commit c052b07662e153a729ae3deaea7ffed165bd86a2 Author: defiQUG Date: Mon Feb 9 21:51:42 2026 -0800 Initial commit: add .gitignore and README diff --git a/.cursor/rules/fusionagi-standards.mdc b/.cursor/rules/fusionagi-standards.mdc new file mode 100644 index 0000000..58420ef --- /dev/null +++ b/.cursor/rules/fusionagi-standards.mdc @@ -0,0 +1,28 @@ +--- +description: Core coding standards and module boundaries for FusionAGI +alwaysApply: true +--- + +# FusionAGI Coding Standards + +## General + +- Use type hints for all function signatures and public attributes. +- Use docstrings (Google or NumPy style) for public modules, classes, and functions. +- Prefer Pydantic models for all structured data (tasks, messages, config). + +## Module Boundaries + +- **core/** — Orchestrator, event bus, state manager only. No LLM or tool logic. +- **agents/** — Agents depend on schemas and core; they receive adapters/tools by injection. +- **adapters/** — LLM provider code only; expose a single abstract interface. +- **schemas/** — Single source of truth for Task, Message, Plan, etc. No business logic. +- **tools/** — Tool definitions and safe runner; governance may wrap invocation. +- **memory/** — Storage interfaces; no agent logic. +- **governance/** — Guardrails, rate limits, access control; called by orchestrator/tools. + +## Conventions + +- Agents communicate only via structured message envelopes (from schemas). +- Replaceable components: use abstract base classes or protocols; inject implementations. +- Log decisions and state transitions for determinism and replay. diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 0000000..2cd89a0 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,68 @@ +name: Tests + +on: + push: + branches: [main, master] + pull_request: + branches: [main, master] + +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.10", "3.11", "3.12"] + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -e ".[dev,api]" + + - name: Run pytest + run: pytest tests/ -v --tb=short + + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -e ".[dev]" + + - name: Ruff check + run: ruff check fusionagi tests/ + continue-on-error: true + + - name: Ruff format check + run: ruff format --check fusionagi tests/ + continue-on-error: true + + docker: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build image + uses: docker/build-push-action@v5 + with: + context: . + push: false + tags: fusionagi:test diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..4c64e16 --- /dev/null +++ b/.gitignore @@ -0,0 +1,49 @@ +# Dependencies +node_modules/ +.pnpm-store/ +vendor/ + +# Package manager lock files (optional: uncomment to ignore) +# package-lock.json +# yarn.lock + +# Environment and secrets +.env +.env.local +.env.*.local +*.env.backup +.env.backup.* + +# Logs and temp +*.log +logs/ +*.tmp +*.temp +*.tmp.* + +# OS +.DS_Store +Thumbs.db + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ + +# Build / output +dist/ +build/ +.next/ +out/ +*.pyc +__pycache__/ +.eggs/ +*.egg-info/ +.coverage +htmlcov/ + +# Optional +.reports/ +reports/ diff --git a/.venv/bin/Activate.ps1 b/.venv/bin/Activate.ps1 new file mode 100644 index 0000000..b49d77b --- /dev/null +++ b/.venv/bin/Activate.ps1 @@ -0,0 +1,247 @@ +<# +.Synopsis +Activate a Python virtual environment for the current PowerShell session. + +.Description +Pushes the python executable for a virtual environment to the front of the +$Env:PATH environment variable and sets the prompt to signify that you are +in a Python virtual environment. Makes use of the command line switches as +well as the `pyvenv.cfg` file values present in the virtual environment. + +.Parameter VenvDir +Path to the directory that contains the virtual environment to activate. The +default value for this is the parent of the directory that the Activate.ps1 +script is located within. + +.Parameter Prompt +The prompt prefix to display when this virtual environment is activated. By +default, this prompt is the name of the virtual environment folder (VenvDir) +surrounded by parentheses and followed by a single space (ie. '(.venv) '). + +.Example +Activate.ps1 +Activates the Python virtual environment that contains the Activate.ps1 script. + +.Example +Activate.ps1 -Verbose +Activates the Python virtual environment that contains the Activate.ps1 script, +and shows extra information about the activation as it executes. + +.Example +Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv +Activates the Python virtual environment located in the specified location. + +.Example +Activate.ps1 -Prompt "MyPython" +Activates the Python virtual environment that contains the Activate.ps1 script, +and prefixes the current prompt with the specified string (surrounded in +parentheses) while the virtual environment is active. + +.Notes +On Windows, it may be required to enable this Activate.ps1 script by setting the +execution policy for the user. You can do this by issuing the following PowerShell +command: + +PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser + +For more information on Execution Policies: +https://go.microsoft.com/fwlink/?LinkID=135170 + +#> +Param( + [Parameter(Mandatory = $false)] + [String] + $VenvDir, + [Parameter(Mandatory = $false)] + [String] + $Prompt +) + +<# Function declarations --------------------------------------------------- #> + +<# +.Synopsis +Remove all shell session elements added by the Activate script, including the +addition of the virtual environment's Python executable from the beginning of +the PATH variable. + +.Parameter NonDestructive +If present, do not remove this function from the global namespace for the +session. + +#> +function global:deactivate ([switch]$NonDestructive) { + # Revert to original values + + # The prior prompt: + if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) { + Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt + Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT + } + + # The prior PYTHONHOME: + if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) { + Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME + Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME + } + + # The prior PATH: + if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) { + Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH + Remove-Item -Path Env:_OLD_VIRTUAL_PATH + } + + # Just remove the VIRTUAL_ENV altogether: + if (Test-Path -Path Env:VIRTUAL_ENV) { + Remove-Item -Path env:VIRTUAL_ENV + } + + # Just remove VIRTUAL_ENV_PROMPT altogether. + if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) { + Remove-Item -Path env:VIRTUAL_ENV_PROMPT + } + + # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: + if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { + Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force + } + + # Leave deactivate function in the global namespace if requested: + if (-not $NonDestructive) { + Remove-Item -Path function:deactivate + } +} + +<# +.Description +Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the +given folder, and returns them in a map. + +For each line in the pyvenv.cfg file, if that line can be parsed into exactly +two strings separated by `=` (with any amount of whitespace surrounding the =) +then it is considered a `key = value` line. The left hand string is the key, +the right hand is the value. + +If the value starts with a `'` or a `"` then the first and last character is +stripped from the value before being captured. + +.Parameter ConfigDir +Path to the directory that contains the `pyvenv.cfg` file. +#> +function Get-PyVenvConfig( + [String] + $ConfigDir +) { + Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg" + + # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue). + $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue + + # An empty map will be returned if no config file is found. + $pyvenvConfig = @{ } + + if ($pyvenvConfigPath) { + + Write-Verbose "File exists, parse `key = value` lines" + $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath + + $pyvenvConfigContent | ForEach-Object { + $keyval = $PSItem -split "\s*=\s*", 2 + if ($keyval[0] -and $keyval[1]) { + $val = $keyval[1] + + # Remove extraneous quotations around a string value. + if ("'""".Contains($val.Substring(0, 1))) { + $val = $val.Substring(1, $val.Length - 2) + } + + $pyvenvConfig[$keyval[0]] = $val + Write-Verbose "Adding Key: '$($keyval[0])'='$val'" + } + } + } + return $pyvenvConfig +} + + +<# Begin Activate script --------------------------------------------------- #> + +# Determine the containing directory of this script +$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition +$VenvExecDir = Get-Item -Path $VenvExecPath + +Write-Verbose "Activation script is located in path: '$VenvExecPath'" +Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)" +Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)" + +# Set values required in priority: CmdLine, ConfigFile, Default +# First, get the location of the virtual environment, it might not be +# VenvExecDir if specified on the command line. +if ($VenvDir) { + Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values" +} +else { + Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir." + $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/") + Write-Verbose "VenvDir=$VenvDir" +} + +# Next, read the `pyvenv.cfg` file to determine any required value such +# as `prompt`. +$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir + +# Next, set the prompt from the command line, or the config file, or +# just use the name of the virtual environment folder. +if ($Prompt) { + Write-Verbose "Prompt specified as argument, using '$Prompt'" +} +else { + Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value" + if ($pyvenvCfg -and $pyvenvCfg['prompt']) { + Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'" + $Prompt = $pyvenvCfg['prompt']; + } + else { + Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)" + Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" + $Prompt = Split-Path -Path $venvDir -Leaf + } +} + +Write-Verbose "Prompt = '$Prompt'" +Write-Verbose "VenvDir='$VenvDir'" + +# Deactivate any currently active virtual environment, but leave the +# deactivate function in place. +deactivate -nondestructive + +# Now set the environment variable VIRTUAL_ENV, used by many tools to determine +# that there is an activated venv. +$env:VIRTUAL_ENV = $VenvDir + +if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) { + + Write-Verbose "Setting prompt to '$Prompt'" + + # Set the prompt to include the env name + # Make sure _OLD_VIRTUAL_PROMPT is global + function global:_OLD_VIRTUAL_PROMPT { "" } + Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT + New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt + + function global:prompt { + Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " + _OLD_VIRTUAL_PROMPT + } + $env:VIRTUAL_ENV_PROMPT = $Prompt +} + +# Clear PYTHONHOME +if (Test-Path -Path Env:PYTHONHOME) { + Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME + Remove-Item -Path Env:PYTHONHOME +} + +# Add the venv to the PATH +Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH +$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" diff --git a/.venv/bin/activate b/.venv/bin/activate new file mode 100644 index 0000000..c546def --- /dev/null +++ b/.venv/bin/activate @@ -0,0 +1,70 @@ +# This file must be used with "source bin/activate" *from bash* +# You cannot run it directly + +deactivate () { + # reset old environment variables + if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then + PATH="${_OLD_VIRTUAL_PATH:-}" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then + PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # Call hash to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + hash -r 2> /dev/null + + if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then + PS1="${_OLD_VIRTUAL_PS1:-}" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + unset VIRTUAL_ENV_PROMPT + if [ ! "${1:-}" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +# on Windows, a path can contain colons and backslashes and has to be converted: +if [ "${OSTYPE:-}" = "cygwin" ] || [ "${OSTYPE:-}" = "msys" ] ; then + # transform D:\path\to\venv to /d/path/to/venv on MSYS + # and to /cygdrive/d/path/to/venv on Cygwin + export VIRTUAL_ENV=$(cygpath /home/intlc/projects/FusionAGI/.venv) +else + # use the path as-is + export VIRTUAL_ENV=/home/intlc/projects/FusionAGI/.venv +fi + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/"bin":$PATH" +export PATH + +# unset PYTHONHOME if set +# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) +# could use `if (set -u; : $PYTHONHOME) ;` in bash +if [ -n "${PYTHONHOME:-}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1:-}" + PS1='(.venv) '"${PS1:-}" + export PS1 + VIRTUAL_ENV_PROMPT='(.venv) ' + export VIRTUAL_ENV_PROMPT +fi + +# Call hash to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +hash -r 2> /dev/null diff --git a/.venv/bin/activate.csh b/.venv/bin/activate.csh new file mode 100644 index 0000000..b86f92b --- /dev/null +++ b/.venv/bin/activate.csh @@ -0,0 +1,27 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. + +# Created by Davide Di Blasi . +# Ported to Python 3.3 venv by Andrew Svetlov + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV /home/intlc/projects/FusionAGI/.venv + +set _OLD_VIRTUAL_PATH="$PATH" +setenv PATH "$VIRTUAL_ENV/"bin":$PATH" + + +set _OLD_VIRTUAL_PROMPT="$prompt" + +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then + set prompt = '(.venv) '"$prompt" + setenv VIRTUAL_ENV_PROMPT '(.venv) ' +endif + +alias pydoc python -m pydoc + +rehash diff --git a/.venv/bin/activate.fish b/.venv/bin/activate.fish new file mode 100644 index 0000000..2e9e4e7 --- /dev/null +++ b/.venv/bin/activate.fish @@ -0,0 +1,69 @@ +# This file must be used with "source /bin/activate.fish" *from fish* +# (https://fishshell.com/). You cannot run it directly. + +function deactivate -d "Exit virtual environment and return to normal shell environment" + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH + set -e _OLD_VIRTUAL_PATH + end + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + set -e _OLD_FISH_PROMPT_OVERRIDE + # prevents error when using nested fish instances (Issue #93858) + if functions -q _old_fish_prompt + functions -e fish_prompt + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + end + end + + set -e VIRTUAL_ENV + set -e VIRTUAL_ENV_PROMPT + if test "$argv[1]" != "nondestructive" + # Self-destruct! + functions -e deactivate + end +end + +# Unset irrelevant variables. +deactivate nondestructive + +set -gx VIRTUAL_ENV /home/intlc/projects/FusionAGI/.venv + +set -gx _OLD_VIRTUAL_PATH $PATH +set -gx PATH "$VIRTUAL_ENV/"bin $PATH + +# Unset PYTHONHOME if set. +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # fish uses a function instead of an env var to generate the prompt. + + # Save the current fish_prompt function as the function _old_fish_prompt. + functions -c fish_prompt _old_fish_prompt + + # With the original prompt function renamed, we can override with our own. + function fish_prompt + # Save the return status of the last command. + set -l old_status $status + + # Output the venv prompt; color taken from the blue of the Python logo. + printf "%s%s%s" (set_color 4B8BBE) '(.venv) ' (set_color normal) + + # Restore the return status of the previous command. + echo "exit $old_status" | . + # Output the original/"old" prompt. + _old_fish_prompt + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" + set -gx VIRTUAL_ENV_PROMPT '(.venv) ' +end diff --git a/.venv/bin/dmypy b/.venv/bin/dmypy new file mode 100755 index 0000000..1ae909d --- /dev/null +++ b/.venv/bin/dmypy @@ -0,0 +1,8 @@ +#!/home/intlc/projects/FusionAGI/.venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from mypy.dmypy.client import console_entry +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(console_entry()) diff --git a/.venv/bin/fastapi b/.venv/bin/fastapi new file mode 100755 index 0000000..f13d8aa --- /dev/null +++ b/.venv/bin/fastapi @@ -0,0 +1,8 @@ +#!/home/intlc/projects/FusionAGI/.venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from fastapi.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/.venv/bin/httpx b/.venv/bin/httpx new file mode 100755 index 0000000..5073d3d --- /dev/null +++ b/.venv/bin/httpx @@ -0,0 +1,8 @@ +#!/home/intlc/projects/FusionAGI/.venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from httpx import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/.venv/bin/mypy b/.venv/bin/mypy new file mode 100755 index 0000000..8c4b7a2 --- /dev/null +++ b/.venv/bin/mypy @@ -0,0 +1,8 @@ +#!/home/intlc/projects/FusionAGI/.venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from mypy.__main__ import console_entry +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(console_entry()) diff --git a/.venv/bin/mypyc b/.venv/bin/mypyc new file mode 100755 index 0000000..4d39aba --- /dev/null +++ b/.venv/bin/mypyc @@ -0,0 +1,8 @@ +#!/home/intlc/projects/FusionAGI/.venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from mypyc.__main__ import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/.venv/bin/pip b/.venv/bin/pip new file mode 100755 index 0000000..c635b4b --- /dev/null +++ b/.venv/bin/pip @@ -0,0 +1,8 @@ +#!/home/intlc/projects/FusionAGI/.venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/.venv/bin/pip3 b/.venv/bin/pip3 new file mode 100755 index 0000000..c635b4b --- /dev/null +++ b/.venv/bin/pip3 @@ -0,0 +1,8 @@ +#!/home/intlc/projects/FusionAGI/.venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/.venv/bin/pip3.12 b/.venv/bin/pip3.12 new file mode 100755 index 0000000..c635b4b --- /dev/null +++ b/.venv/bin/pip3.12 @@ -0,0 +1,8 @@ +#!/home/intlc/projects/FusionAGI/.venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/.venv/bin/py.test b/.venv/bin/py.test new file mode 100755 index 0000000..03ec21a --- /dev/null +++ b/.venv/bin/py.test @@ -0,0 +1,8 @@ +#!/home/intlc/projects/FusionAGI/.venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pytest import console_main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(console_main()) diff --git a/.venv/bin/pygmentize b/.venv/bin/pygmentize new file mode 100755 index 0000000..e19fb3a --- /dev/null +++ b/.venv/bin/pygmentize @@ -0,0 +1,8 @@ +#!/home/intlc/projects/FusionAGI/.venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pygments.cmdline import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/.venv/bin/pytest b/.venv/bin/pytest new file mode 100755 index 0000000..03ec21a --- /dev/null +++ b/.venv/bin/pytest @@ -0,0 +1,8 @@ +#!/home/intlc/projects/FusionAGI/.venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pytest import console_main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(console_main()) diff --git a/.venv/bin/python b/.venv/bin/python new file mode 120000 index 0000000..b8a0adb --- /dev/null +++ b/.venv/bin/python @@ -0,0 +1 @@ +python3 \ No newline at end of file diff --git a/.venv/bin/python3 b/.venv/bin/python3 new file mode 120000 index 0000000..ae65fda --- /dev/null +++ b/.venv/bin/python3 @@ -0,0 +1 @@ +/usr/bin/python3 \ No newline at end of file diff --git a/.venv/bin/python3.12 b/.venv/bin/python3.12 new file mode 120000 index 0000000..b8a0adb --- /dev/null +++ b/.venv/bin/python3.12 @@ -0,0 +1 @@ +python3 \ No newline at end of file diff --git a/.venv/bin/ruff b/.venv/bin/ruff new file mode 100755 index 0000000..ead2012 Binary files /dev/null and b/.venv/bin/ruff differ diff --git a/.venv/bin/stubgen b/.venv/bin/stubgen new file mode 100755 index 0000000..306a691 --- /dev/null +++ b/.venv/bin/stubgen @@ -0,0 +1,8 @@ +#!/home/intlc/projects/FusionAGI/.venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from mypy.stubgen import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/.venv/bin/stubtest b/.venv/bin/stubtest new file mode 100755 index 0000000..0c01ef0 --- /dev/null +++ b/.venv/bin/stubtest @@ -0,0 +1,8 @@ +#!/home/intlc/projects/FusionAGI/.venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from mypy.stubtest import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/.venv/bin/uvicorn b/.venv/bin/uvicorn new file mode 100755 index 0000000..89d4e7c --- /dev/null +++ b/.venv/bin/uvicorn @@ -0,0 +1,8 @@ +#!/home/intlc/projects/FusionAGI/.venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from uvicorn.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/.venv/lib/python3.12/site-packages/4c842c94c09923bae9e4__mypyc.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/4c842c94c09923bae9e4__mypyc.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..676e4d2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/4c842c94c09923bae9e4__mypyc.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/__editable__.fusionagi-0.1.0.pth b/.venv/lib/python3.12/site-packages/__editable__.fusionagi-0.1.0.pth new file mode 100644 index 0000000..a0f7992 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/__editable__.fusionagi-0.1.0.pth @@ -0,0 +1 @@ +import __editable___fusionagi_0_1_0_finder; __editable___fusionagi_0_1_0_finder.install() \ No newline at end of file diff --git a/.venv/lib/python3.12/site-packages/__editable___fusionagi_0_1_0_finder.py b/.venv/lib/python3.12/site-packages/__editable___fusionagi_0_1_0_finder.py new file mode 100644 index 0000000..6d2d4e7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/__editable___fusionagi_0_1_0_finder.py @@ -0,0 +1,85 @@ +from __future__ import annotations +import sys +from importlib.machinery import ModuleSpec, PathFinder +from importlib.machinery import all_suffixes as module_suffixes +from importlib.util import spec_from_file_location +from itertools import chain +from pathlib import Path + +MAPPING: dict[str, str] = {'fusionagi': '/home/intlc/projects/FusionAGI/fusionagi'} +NAMESPACES: dict[str, list[str]] = {} +PATH_PLACEHOLDER = '__editable__.fusionagi-0.1.0.finder' + ".__path_hook__" + + +class _EditableFinder: # MetaPathFinder + @classmethod + def find_spec(cls, fullname: str, path=None, target=None) -> ModuleSpec | None: # type: ignore + # Top-level packages and modules (we know these exist in the FS) + if fullname in MAPPING: + pkg_path = MAPPING[fullname] + return cls._find_spec(fullname, Path(pkg_path)) + + # Handle immediate children modules (required for namespaces to work) + # To avoid problems with case sensitivity in the file system we delegate + # to the importlib.machinery implementation. + parent, _, child = fullname.rpartition(".") + if parent and parent in MAPPING: + return PathFinder.find_spec(fullname, path=[MAPPING[parent]]) + + # Other levels of nesting should be handled automatically by importlib + # using the parent path. + return None + + @classmethod + def _find_spec(cls, fullname: str, candidate_path: Path) -> ModuleSpec | None: + init = candidate_path / "__init__.py" + candidates = (candidate_path.with_suffix(x) for x in module_suffixes()) + for candidate in chain([init], candidates): + if candidate.exists(): + return spec_from_file_location(fullname, candidate) + return None + + +class _EditableNamespaceFinder: # PathEntryFinder + @classmethod + def _path_hook(cls, path) -> type[_EditableNamespaceFinder]: + if path == PATH_PLACEHOLDER: + return cls + raise ImportError + + @classmethod + def _paths(cls, fullname: str) -> list[str]: + paths = NAMESPACES[fullname] + if not paths and fullname in MAPPING: + paths = [MAPPING[fullname]] + # Always add placeholder, for 2 reasons: + # 1. __path__ cannot be empty for the spec to be considered namespace. + # 2. In the case of nested namespaces, we need to force + # import machinery to query _EditableNamespaceFinder again. + return [*paths, PATH_PLACEHOLDER] + + @classmethod + def find_spec(cls, fullname: str, target=None) -> ModuleSpec | None: # type: ignore + if fullname in NAMESPACES: + spec = ModuleSpec(fullname, None, is_package=True) + spec.submodule_search_locations = cls._paths(fullname) + return spec + return None + + @classmethod + def find_module(cls, _fullname) -> None: + return None + + +def install(): + if not any(finder == _EditableFinder for finder in sys.meta_path): + sys.meta_path.append(_EditableFinder) + + if not NAMESPACES: + return + + if not any(hook == _EditableNamespaceFinder._path_hook for hook in sys.path_hooks): + # PathEntryFinder is needed to create NamespaceSpec without private APIS + sys.path_hooks.append(_EditableNamespaceFinder._path_hook) + if PATH_PLACEHOLDER not in sys.path: + sys.path.append(PATH_PLACEHOLDER) # Used just to trigger the path hook diff --git a/.venv/lib/python3.12/site-packages/_pytest/__init__.py b/.venv/lib/python3.12/site-packages/_pytest/__init__.py new file mode 100644 index 0000000..8eb8ec9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/__init__.py @@ -0,0 +1,13 @@ +from __future__ import annotations + + +__all__ = ["__version__", "version_tuple"] + +try: + from ._version import version as __version__ + from ._version import version_tuple +except ImportError: # pragma: no cover + # broken installation, we don't even try + # unknown only works because we do poor mans version compare + __version__ = "unknown" + version_tuple = (0, 0, "unknown") diff --git a/.venv/lib/python3.12/site-packages/_pytest/_argcomplete.py b/.venv/lib/python3.12/site-packages/_pytest/_argcomplete.py new file mode 100644 index 0000000..59426ef --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/_argcomplete.py @@ -0,0 +1,117 @@ +"""Allow bash-completion for argparse with argcomplete if installed. + +Needs argcomplete>=0.5.6 for python 3.2/3.3 (older versions fail +to find the magic string, so _ARGCOMPLETE env. var is never set, and +this does not need special code). + +Function try_argcomplete(parser) should be called directly before +the call to ArgumentParser.parse_args(). + +The filescompleter is what you normally would use on the positional +arguments specification, in order to get "dirname/" after "dirn" +instead of the default "dirname ": + + optparser.add_argument(Config._file_or_dir, nargs='*').completer=filescompleter + +Other, application specific, completers should go in the file +doing the add_argument calls as they need to be specified as .completer +attributes as well. (If argcomplete is not installed, the function the +attribute points to will not be used). + +SPEEDUP +======= + +The generic argcomplete script for bash-completion +(/etc/bash_completion.d/python-argcomplete.sh) +uses a python program to determine startup script generated by pip. +You can speed up completion somewhat by changing this script to include + # PYTHON_ARGCOMPLETE_OK +so the python-argcomplete-check-easy-install-script does not +need to be called to find the entry point of the code and see if that is +marked with PYTHON_ARGCOMPLETE_OK. + +INSTALL/DEBUGGING +================= + +To include this support in another application that has setup.py generated +scripts: + +- Add the line: + # PYTHON_ARGCOMPLETE_OK + near the top of the main python entry point. + +- Include in the file calling parse_args(): + from _argcomplete import try_argcomplete, filescompleter + Call try_argcomplete just before parse_args(), and optionally add + filescompleter to the positional arguments' add_argument(). + +If things do not work right away: + +- Switch on argcomplete debugging with (also helpful when doing custom + completers): + export _ARC_DEBUG=1 + +- Run: + python-argcomplete-check-easy-install-script $(which appname) + echo $? + will echo 0 if the magic line has been found, 1 if not. + +- Sometimes it helps to find early on errors using: + _ARGCOMPLETE=1 _ARC_DEBUG=1 appname + which should throw a KeyError: 'COMPLINE' (which is properly set by the + global argcomplete script). +""" + +from __future__ import annotations + +import argparse +from glob import glob +import os +import sys +from typing import Any + + +class FastFilesCompleter: + """Fast file completer class.""" + + def __init__(self, directories: bool = True) -> None: + self.directories = directories + + def __call__(self, prefix: str, **kwargs: Any) -> list[str]: + # Only called on non option completions. + if os.sep in prefix[1:]: + prefix_dir = len(os.path.dirname(prefix) + os.sep) + else: + prefix_dir = 0 + completion = [] + globbed = [] + if "*" not in prefix and "?" not in prefix: + # We are on unix, otherwise no bash. + if not prefix or prefix[-1] == os.sep: + globbed.extend(glob(prefix + ".*")) + prefix += "*" + globbed.extend(glob(prefix)) + for x in sorted(globbed): + if os.path.isdir(x): + x += "/" + # Append stripping the prefix (like bash, not like compgen). + completion.append(x[prefix_dir:]) + return completion + + +if os.environ.get("_ARGCOMPLETE"): + try: + import argcomplete.completers + except ImportError: + sys.exit(-1) + filescompleter: FastFilesCompleter | None = FastFilesCompleter() + + def try_argcomplete(parser: argparse.ArgumentParser) -> None: + argcomplete.autocomplete(parser, always_complete_options=False) + +else: + + def try_argcomplete(parser: argparse.ArgumentParser) -> None: + pass + + filescompleter = None diff --git a/.venv/lib/python3.12/site-packages/_pytest/_code/__init__.py b/.venv/lib/python3.12/site-packages/_pytest/_code/__init__.py new file mode 100644 index 0000000..7f67a2e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/_code/__init__.py @@ -0,0 +1,26 @@ +"""Python inspection/code generation API.""" + +from __future__ import annotations + +from .code import Code +from .code import ExceptionInfo +from .code import filter_traceback +from .code import Frame +from .code import getfslineno +from .code import Traceback +from .code import TracebackEntry +from .source import getrawcode +from .source import Source + + +__all__ = [ + "Code", + "ExceptionInfo", + "Frame", + "Source", + "Traceback", + "TracebackEntry", + "filter_traceback", + "getfslineno", + "getrawcode", +] diff --git a/.venv/lib/python3.12/site-packages/_pytest/_code/code.py b/.venv/lib/python3.12/site-packages/_pytest/_code/code.py new file mode 100644 index 0000000..add2a49 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/_code/code.py @@ -0,0 +1,1565 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +import ast +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Mapping +from collections.abc import Sequence +import dataclasses +import inspect +from inspect import CO_VARARGS +from inspect import CO_VARKEYWORDS +from io import StringIO +import os +from pathlib import Path +import re +import sys +from traceback import extract_tb +from traceback import format_exception +from traceback import format_exception_only +from traceback import FrameSummary +from types import CodeType +from types import FrameType +from types import TracebackType +from typing import Any +from typing import ClassVar +from typing import Final +from typing import final +from typing import Generic +from typing import Literal +from typing import overload +from typing import SupportsIndex +from typing import TypeAlias +from typing import TypeVar + +import pluggy + +import _pytest +from _pytest._code.source import findsource +from _pytest._code.source import getrawcode +from _pytest._code.source import getstatementrange_ast +from _pytest._code.source import Source +from _pytest._io import TerminalWriter +from _pytest._io.saferepr import safeformat +from _pytest._io.saferepr import saferepr +from _pytest.compat import get_real_func +from _pytest.deprecated import check_ispytest +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath + + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + +TracebackStyle = Literal["long", "short", "line", "no", "native", "value", "auto"] + +EXCEPTION_OR_MORE = type[BaseException] | tuple[type[BaseException], ...] + + +class Code: + """Wrapper around Python code objects.""" + + __slots__ = ("raw",) + + def __init__(self, obj: CodeType) -> None: + self.raw = obj + + @classmethod + def from_function(cls, obj: object) -> Code: + return cls(getrawcode(obj)) + + def __eq__(self, other): + return self.raw == other.raw + + # Ignore type because of https://github.com/python/mypy/issues/4266. + __hash__ = None # type: ignore + + @property + def firstlineno(self) -> int: + return self.raw.co_firstlineno - 1 + + @property + def name(self) -> str: + return self.raw.co_name + + @property + def path(self) -> Path | str: + """Return a path object pointing to source code, or an ``str`` in + case of ``OSError`` / non-existing file.""" + if not self.raw.co_filename: + return "" + try: + p = absolutepath(self.raw.co_filename) + # maybe don't try this checking + if not p.exists(): + raise OSError("path check failed.") + return p + except OSError: + # XXX maybe try harder like the weird logic + # in the standard lib [linecache.updatecache] does? + return self.raw.co_filename + + @property + def fullsource(self) -> Source | None: + """Return a _pytest._code.Source object for the full source file of the code.""" + full, _ = findsource(self.raw) + return full + + def source(self) -> Source: + """Return a _pytest._code.Source object for the code object's source only.""" + # return source only for that part of code + return Source(self.raw) + + def getargs(self, var: bool = False) -> tuple[str, ...]: + """Return a tuple with the argument names for the code object. + + If 'var' is set True also return the names of the variable and + keyword arguments when present. + """ + # Handy shortcut for getting args. + raw = self.raw + argcount = raw.co_argcount + if var: + argcount += raw.co_flags & CO_VARARGS + argcount += raw.co_flags & CO_VARKEYWORDS + return raw.co_varnames[:argcount] + + +class Frame: + """Wrapper around a Python frame holding f_locals and f_globals + in which expressions can be evaluated.""" + + __slots__ = ("raw",) + + def __init__(self, frame: FrameType) -> None: + self.raw = frame + + @property + def lineno(self) -> int: + return self.raw.f_lineno - 1 + + @property + def f_globals(self) -> dict[str, Any]: + return self.raw.f_globals + + @property + def f_locals(self) -> dict[str, Any]: + return self.raw.f_locals + + @property + def code(self) -> Code: + return Code(self.raw.f_code) + + @property + def statement(self) -> Source: + """Statement this frame is at.""" + if self.code.fullsource is None: + return Source("") + return self.code.fullsource.getstatement(self.lineno) + + def eval(self, code, **vars): + """Evaluate 'code' in the frame. + + 'vars' are optional additional local variables. + + Returns the result of the evaluation. + """ + f_locals = self.f_locals.copy() + f_locals.update(vars) + return eval(code, self.f_globals, f_locals) + + def repr(self, object: object) -> str: + """Return a 'safe' (non-recursive, one-line) string repr for 'object'.""" + return saferepr(object) + + def getargs(self, var: bool = False): + """Return a list of tuples (name, value) for all arguments. + + If 'var' is set True, also include the variable and keyword arguments + when present. + """ + retval = [] + for arg in self.code.getargs(var): + try: + retval.append((arg, self.f_locals[arg])) + except KeyError: + pass # this can occur when using Psyco + return retval + + +class TracebackEntry: + """A single entry in a Traceback.""" + + __slots__ = ("_rawentry", "_repr_style") + + def __init__( + self, + rawentry: TracebackType, + repr_style: Literal["short", "long"] | None = None, + ) -> None: + self._rawentry: Final = rawentry + self._repr_style: Final = repr_style + + def with_repr_style( + self, repr_style: Literal["short", "long"] | None + ) -> TracebackEntry: + return TracebackEntry(self._rawentry, repr_style) + + @property + def lineno(self) -> int: + return self._rawentry.tb_lineno - 1 + + def get_python_framesummary(self) -> FrameSummary: + # Python's built-in traceback module implements all the nitty gritty + # details to get column numbers of out frames. + stack_summary = extract_tb(self._rawentry, limit=1) + return stack_summary[0] + + # Column and end line numbers introduced in python 3.11 + if sys.version_info < (3, 11): + + @property + def end_lineno_relative(self) -> int | None: + return None + + @property + def colno(self) -> int | None: + return None + + @property + def end_colno(self) -> int | None: + return None + else: + + @property + def end_lineno_relative(self) -> int | None: + frame_summary = self.get_python_framesummary() + if frame_summary.end_lineno is None: # pragma: no cover + return None + return frame_summary.end_lineno - 1 - self.frame.code.firstlineno + + @property + def colno(self) -> int | None: + """Starting byte offset of the expression in the traceback entry.""" + return self.get_python_framesummary().colno + + @property + def end_colno(self) -> int | None: + """Ending byte offset of the expression in the traceback entry.""" + return self.get_python_framesummary().end_colno + + @property + def frame(self) -> Frame: + return Frame(self._rawentry.tb_frame) + + @property + def relline(self) -> int: + return self.lineno - self.frame.code.firstlineno + + def __repr__(self) -> str: + return f"" + + @property + def statement(self) -> Source: + """_pytest._code.Source object for the current statement.""" + source = self.frame.code.fullsource + assert source is not None + return source.getstatement(self.lineno) + + @property + def path(self) -> Path | str: + """Path to the source code.""" + return self.frame.code.path + + @property + def locals(self) -> dict[str, Any]: + """Locals of underlying frame.""" + return self.frame.f_locals + + def getfirstlinesource(self) -> int: + return self.frame.code.firstlineno + + def getsource( + self, astcache: dict[str | Path, ast.AST] | None = None + ) -> Source | None: + """Return failing source code.""" + # we use the passed in astcache to not reparse asttrees + # within exception info printing + source = self.frame.code.fullsource + if source is None: + return None + key = astnode = None + if astcache is not None: + key = self.frame.code.path + if key is not None: + astnode = astcache.get(key, None) + start = self.getfirstlinesource() + try: + astnode, _, end = getstatementrange_ast( + self.lineno, source, astnode=astnode + ) + except SyntaxError: + end = self.lineno + 1 + else: + if key is not None and astcache is not None: + astcache[key] = astnode + return source[start:end] + + source = property(getsource) + + def ishidden(self, excinfo: ExceptionInfo[BaseException] | None) -> bool: + """Return True if the current frame has a var __tracebackhide__ + resolving to True. + + If __tracebackhide__ is a callable, it gets called with the + ExceptionInfo instance and can decide whether to hide the traceback. + + Mostly for internal use. + """ + tbh: bool | Callable[[ExceptionInfo[BaseException] | None], bool] = False + for maybe_ns_dct in (self.frame.f_locals, self.frame.f_globals): + # in normal cases, f_locals and f_globals are dictionaries + # however via `exec(...)` / `eval(...)` they can be other types + # (even incorrect types!). + # as such, we suppress all exceptions while accessing __tracebackhide__ + try: + tbh = maybe_ns_dct["__tracebackhide__"] + except Exception: + pass + else: + break + if tbh and callable(tbh): + return tbh(excinfo) + return tbh + + def __str__(self) -> str: + name = self.frame.code.name + try: + line = str(self.statement).lstrip() + except KeyboardInterrupt: + raise + except BaseException: + line = "???" + # This output does not quite match Python's repr for traceback entries, + # but changing it to do so would break certain plugins. See + # https://github.com/pytest-dev/pytest/pull/7535/ for details. + return f" File '{self.path}':{self.lineno + 1} in {name}\n {line}\n" + + @property + def name(self) -> str: + """co_name of underlying code.""" + return self.frame.code.raw.co_name + + +class Traceback(list[TracebackEntry]): + """Traceback objects encapsulate and offer higher level access to Traceback entries.""" + + def __init__( + self, + tb: TracebackType | Iterable[TracebackEntry], + ) -> None: + """Initialize from given python traceback object and ExceptionInfo.""" + if isinstance(tb, TracebackType): + + def f(cur: TracebackType) -> Iterable[TracebackEntry]: + cur_: TracebackType | None = cur + while cur_ is not None: + yield TracebackEntry(cur_) + cur_ = cur_.tb_next + + super().__init__(f(tb)) + else: + super().__init__(tb) + + def cut( + self, + path: os.PathLike[str] | str | None = None, + lineno: int | None = None, + firstlineno: int | None = None, + excludepath: os.PathLike[str] | None = None, + ) -> Traceback: + """Return a Traceback instance wrapping part of this Traceback. + + By providing any combination of path, lineno and firstlineno, the + first frame to start the to-be-returned traceback is determined. + + This allows cutting the first part of a Traceback instance e.g. + for formatting reasons (removing some uninteresting bits that deal + with handling of the exception/traceback). + """ + path_ = None if path is None else os.fspath(path) + excludepath_ = None if excludepath is None else os.fspath(excludepath) + for x in self: + code = x.frame.code + codepath = code.path + if path is not None and str(codepath) != path_: + continue + if ( + excludepath is not None + and isinstance(codepath, Path) + and excludepath_ in (str(p) for p in codepath.parents) # type: ignore[operator] + ): + continue + if lineno is not None and x.lineno != lineno: + continue + if firstlineno is not None and x.frame.code.firstlineno != firstlineno: + continue + return Traceback(x._rawentry) + return self + + @overload + def __getitem__(self, key: SupportsIndex) -> TracebackEntry: ... + + @overload + def __getitem__(self, key: slice) -> Traceback: ... + + def __getitem__(self, key: SupportsIndex | slice) -> TracebackEntry | Traceback: + if isinstance(key, slice): + return self.__class__(super().__getitem__(key)) + else: + return super().__getitem__(key) + + def filter( + self, + excinfo_or_fn: ExceptionInfo[BaseException] | Callable[[TracebackEntry], bool], + /, + ) -> Traceback: + """Return a Traceback instance with certain items removed. + + If the filter is an `ExceptionInfo`, removes all the ``TracebackEntry``s + which are hidden (see ishidden() above). + + Otherwise, the filter is a function that gets a single argument, a + ``TracebackEntry`` instance, and should return True when the item should + be added to the ``Traceback``, False when not. + """ + if isinstance(excinfo_or_fn, ExceptionInfo): + fn = lambda x: not x.ishidden(excinfo_or_fn) # noqa: E731 + else: + fn = excinfo_or_fn + return Traceback(filter(fn, self)) + + def recursionindex(self) -> int | None: + """Return the index of the frame/TracebackEntry where recursion originates if + appropriate, None if no recursion occurred.""" + cache: dict[tuple[Any, int, int], list[dict[str, Any]]] = {} + for i, entry in enumerate(self): + # id for the code.raw is needed to work around + # the strange metaprogramming in the decorator lib from pypi + # which generates code objects that have hash/value equality + # XXX needs a test + key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno + values = cache.setdefault(key, []) + # Since Python 3.13 f_locals is a proxy, freeze it. + loc = dict(entry.frame.f_locals) + if values: + for otherloc in values: + if otherloc == loc: + return i + values.append(loc) + return None + + +def stringify_exception( + exc: BaseException, include_subexception_msg: bool = True +) -> str: + try: + notes = getattr(exc, "__notes__", []) + except KeyError: + # Workaround for https://github.com/python/cpython/issues/98778 on + # some 3.10 and 3.11 patch versions. + HTTPError = getattr(sys.modules.get("urllib.error", None), "HTTPError", ()) + if sys.version_info < (3, 12) and isinstance(exc, HTTPError): + notes = [] + else: # pragma: no cover + # exception not related to above bug, reraise + raise + if not include_subexception_msg and isinstance(exc, BaseExceptionGroup): + message = exc.message + else: + message = str(exc) + + return "\n".join( + [ + message, + *notes, + ] + ) + + +E = TypeVar("E", bound=BaseException, covariant=True) + + +@final +@dataclasses.dataclass +class ExceptionInfo(Generic[E]): + """Wraps sys.exc_info() objects and offers help for navigating the traceback.""" + + _assert_start_repr: ClassVar = "AssertionError('assert " + + _excinfo: tuple[type[E], E, TracebackType] | None + _striptext: str + _traceback: Traceback | None + + def __init__( + self, + excinfo: tuple[type[E], E, TracebackType] | None, + striptext: str = "", + traceback: Traceback | None = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._excinfo = excinfo + self._striptext = striptext + self._traceback = traceback + + @classmethod + def from_exception( + cls, + # Ignoring error: "Cannot use a covariant type variable as a parameter". + # This is OK to ignore because this class is (conceptually) readonly. + # See https://github.com/python/mypy/issues/7049. + exception: E, # type: ignore[misc] + exprinfo: str | None = None, + ) -> ExceptionInfo[E]: + """Return an ExceptionInfo for an existing exception. + + The exception must have a non-``None`` ``__traceback__`` attribute, + otherwise this function fails with an assertion error. This means that + the exception must have been raised, or added a traceback with the + :py:meth:`~BaseException.with_traceback()` method. + + :param exprinfo: + A text string helping to determine if we should strip + ``AssertionError`` from the output. Defaults to the exception + message/``__str__()``. + + .. versionadded:: 7.4 + """ + assert exception.__traceback__, ( + "Exceptions passed to ExcInfo.from_exception(...)" + " must have a non-None __traceback__." + ) + exc_info = (type(exception), exception, exception.__traceback__) + return cls.from_exc_info(exc_info, exprinfo) + + @classmethod + def from_exc_info( + cls, + exc_info: tuple[type[E], E, TracebackType], + exprinfo: str | None = None, + ) -> ExceptionInfo[E]: + """Like :func:`from_exception`, but using old-style exc_info tuple.""" + _striptext = "" + if exprinfo is None and isinstance(exc_info[1], AssertionError): + exprinfo = getattr(exc_info[1], "msg", None) + if exprinfo is None: + exprinfo = saferepr(exc_info[1]) + if exprinfo and exprinfo.startswith(cls._assert_start_repr): + _striptext = "AssertionError: " + + return cls(exc_info, _striptext, _ispytest=True) + + @classmethod + def from_current(cls, exprinfo: str | None = None) -> ExceptionInfo[BaseException]: + """Return an ExceptionInfo matching the current traceback. + + .. warning:: + + Experimental API + + :param exprinfo: + A text string helping to determine if we should strip + ``AssertionError`` from the output. Defaults to the exception + message/``__str__()``. + """ + tup = sys.exc_info() + assert tup[0] is not None, "no current exception" + assert tup[1] is not None, "no current exception" + assert tup[2] is not None, "no current exception" + exc_info = (tup[0], tup[1], tup[2]) + return ExceptionInfo.from_exc_info(exc_info, exprinfo) + + @classmethod + def for_later(cls) -> ExceptionInfo[E]: + """Return an unfilled ExceptionInfo.""" + return cls(None, _ispytest=True) + + def fill_unfilled(self, exc_info: tuple[type[E], E, TracebackType]) -> None: + """Fill an unfilled ExceptionInfo created with ``for_later()``.""" + assert self._excinfo is None, "ExceptionInfo was already filled" + self._excinfo = exc_info + + @property + def type(self) -> type[E]: + """The exception class.""" + assert self._excinfo is not None, ( + ".type can only be used after the context manager exits" + ) + return self._excinfo[0] + + @property + def value(self) -> E: + """The exception value.""" + assert self._excinfo is not None, ( + ".value can only be used after the context manager exits" + ) + return self._excinfo[1] + + @property + def tb(self) -> TracebackType: + """The exception raw traceback.""" + assert self._excinfo is not None, ( + ".tb can only be used after the context manager exits" + ) + return self._excinfo[2] + + @property + def typename(self) -> str: + """The type name of the exception.""" + assert self._excinfo is not None, ( + ".typename can only be used after the context manager exits" + ) + return self.type.__name__ + + @property + def traceback(self) -> Traceback: + """The traceback.""" + if self._traceback is None: + self._traceback = Traceback(self.tb) + return self._traceback + + @traceback.setter + def traceback(self, value: Traceback) -> None: + self._traceback = value + + def __repr__(self) -> str: + if self._excinfo is None: + return "" + return f"<{self.__class__.__name__} {saferepr(self._excinfo[1])} tblen={len(self.traceback)}>" + + def exconly(self, tryshort: bool = False) -> str: + """Return the exception as a string. + + When 'tryshort' resolves to True, and the exception is an + AssertionError, only the actual exception part of the exception + representation is returned (so 'AssertionError: ' is removed from + the beginning). + """ + + def _get_single_subexc( + eg: BaseExceptionGroup[BaseException], + ) -> BaseException | None: + if len(eg.exceptions) != 1: + return None + if isinstance(e := eg.exceptions[0], BaseExceptionGroup): + return _get_single_subexc(e) + return e + + if ( + tryshort + and isinstance(self.value, BaseExceptionGroup) + and (subexc := _get_single_subexc(self.value)) is not None + ): + return f"{subexc!r} [single exception in {type(self.value).__name__}]" + + lines = format_exception_only(self.type, self.value) + text = "".join(lines) + text = text.rstrip() + if tryshort: + if text.startswith(self._striptext): + text = text[len(self._striptext) :] + return text + + def errisinstance(self, exc: EXCEPTION_OR_MORE) -> bool: + """Return True if the exception is an instance of exc. + + Consider using ``isinstance(excinfo.value, exc)`` instead. + """ + return isinstance(self.value, exc) + + def _getreprcrash(self) -> ReprFileLocation | None: + # Find last non-hidden traceback entry that led to the exception of the + # traceback, or None if all hidden. + for i in range(-1, -len(self.traceback) - 1, -1): + entry = self.traceback[i] + if not entry.ishidden(self): + path, lineno = entry.frame.code.raw.co_filename, entry.lineno + exconly = self.exconly(tryshort=True) + return ReprFileLocation(path, lineno + 1, exconly) + return None + + def getrepr( + self, + showlocals: bool = False, + style: TracebackStyle = "long", + abspath: bool = False, + tbfilter: bool | Callable[[ExceptionInfo[BaseException]], Traceback] = True, + funcargs: bool = False, + truncate_locals: bool = True, + truncate_args: bool = True, + chain: bool = True, + ) -> ReprExceptionInfo | ExceptionChainRepr: + """Return str()able representation of this exception info. + + :param bool showlocals: + Show locals per traceback entry. + Ignored if ``style=="native"``. + + :param str style: + long|short|line|no|native|value traceback style. + + :param bool abspath: + If paths should be changed to absolute or left unchanged. + + :param tbfilter: + A filter for traceback entries. + + * If false, don't hide any entries. + * If true, hide internal entries and entries that contain a local + variable ``__tracebackhide__ = True``. + * If a callable, delegates the filtering to the callable. + + Ignored if ``style`` is ``"native"``. + + :param bool funcargs: + Show fixtures ("funcargs" for legacy purposes) per traceback entry. + + :param bool truncate_locals: + With ``showlocals==True``, make sure locals can be safely represented as strings. + + :param bool truncate_args: + With ``showargs==True``, make sure args can be safely represented as strings. + + :param bool chain: + If chained exceptions in Python 3 should be shown. + + .. versionchanged:: 3.9 + + Added the ``chain`` parameter. + """ + if style == "native": + return ReprExceptionInfo( + reprtraceback=ReprTracebackNative( + format_exception( + self.type, + self.value, + self.traceback[0]._rawentry if self.traceback else None, + ) + ), + reprcrash=self._getreprcrash(), + ) + + fmt = FormattedExcinfo( + showlocals=showlocals, + style=style, + abspath=abspath, + tbfilter=tbfilter, + funcargs=funcargs, + truncate_locals=truncate_locals, + truncate_args=truncate_args, + chain=chain, + ) + return fmt.repr_excinfo(self) + + def match(self, regexp: str | re.Pattern[str]) -> Literal[True]: + """Check whether the regular expression `regexp` matches the string + representation of the exception using :func:`python:re.search`. + + If it matches `True` is returned, otherwise an `AssertionError` is raised. + """ + __tracebackhide__ = True + value = stringify_exception(self.value) + msg = ( + f"Regex pattern did not match.\n" + f" Expected regex: {regexp!r}\n" + f" Actual message: {value!r}" + ) + if regexp == value: + msg += "\n Did you mean to `re.escape()` the regex?" + assert re.search(regexp, value), msg + # Return True to allow for "assert excinfo.match()". + return True + + def _group_contains( + self, + exc_group: BaseExceptionGroup[BaseException], + expected_exception: EXCEPTION_OR_MORE, + match: str | re.Pattern[str] | None, + target_depth: int | None = None, + current_depth: int = 1, + ) -> bool: + """Return `True` if a `BaseExceptionGroup` contains a matching exception.""" + if (target_depth is not None) and (current_depth > target_depth): + # already descended past the target depth + return False + for exc in exc_group.exceptions: + if isinstance(exc, BaseExceptionGroup): + if self._group_contains( + exc, expected_exception, match, target_depth, current_depth + 1 + ): + return True + if (target_depth is not None) and (current_depth != target_depth): + # not at the target depth, no match + continue + if not isinstance(exc, expected_exception): + continue + if match is not None: + value = stringify_exception(exc) + if not re.search(match, value): + continue + return True + return False + + def group_contains( + self, + expected_exception: EXCEPTION_OR_MORE, + *, + match: str | re.Pattern[str] | None = None, + depth: int | None = None, + ) -> bool: + """Check whether a captured exception group contains a matching exception. + + :param Type[BaseException] | Tuple[Type[BaseException]] expected_exception: + The expected exception type, or a tuple if one of multiple possible + exception types are expected. + + :param str | re.Pattern[str] | None match: + If specified, a string containing a regular expression, + or a regular expression object, that is tested against the string + representation of the exception and its `PEP-678 ` `__notes__` + using :func:`re.search`. + + To match a literal string that may contain :ref:`special characters + `, the pattern can first be escaped with :func:`re.escape`. + + :param Optional[int] depth: + If `None`, will search for a matching exception at any nesting depth. + If >= 1, will only match an exception if it's at the specified depth (depth = 1 being + the exceptions contained within the topmost exception group). + + .. versionadded:: 8.0 + + .. warning:: + This helper makes it easy to check for the presence of specific exceptions, + but it is very bad for checking that the group does *not* contain + *any other exceptions*. + You should instead consider using :class:`pytest.RaisesGroup` + + """ + msg = "Captured exception is not an instance of `BaseExceptionGroup`" + assert isinstance(self.value, BaseExceptionGroup), msg + msg = "`depth` must be >= 1 if specified" + assert (depth is None) or (depth >= 1), msg + return self._group_contains(self.value, expected_exception, match, depth) + + +# Type alias for the `tbfilter` setting: +# bool: If True, it should be filtered using Traceback.filter() +# callable: A callable that takes an ExceptionInfo and returns the filtered traceback. +TracebackFilter: TypeAlias = bool | Callable[[ExceptionInfo[BaseException]], Traceback] + + +@dataclasses.dataclass +class FormattedExcinfo: + """Presenting information about failing Functions and Generators.""" + + # for traceback entries + flow_marker: ClassVar = ">" + fail_marker: ClassVar = "E" + + showlocals: bool = False + style: TracebackStyle = "long" + abspath: bool = True + tbfilter: TracebackFilter = True + funcargs: bool = False + truncate_locals: bool = True + truncate_args: bool = True + chain: bool = True + astcache: dict[str | Path, ast.AST] = dataclasses.field( + default_factory=dict, init=False, repr=False + ) + + def _getindent(self, source: Source) -> int: + # Figure out indent for the given source. + try: + s = str(source.getstatement(len(source) - 1)) + except KeyboardInterrupt: + raise + except BaseException: + try: + s = str(source[-1]) + except KeyboardInterrupt: + raise + except BaseException: + return 0 + return 4 + (len(s) - len(s.lstrip())) + + def _getentrysource(self, entry: TracebackEntry) -> Source | None: + source = entry.getsource(self.astcache) + if source is not None: + source = source.deindent() + return source + + def repr_args(self, entry: TracebackEntry) -> ReprFuncArgs | None: + if self.funcargs: + args = [] + for argname, argvalue in entry.frame.getargs(var=True): + if self.truncate_args: + str_repr = saferepr(argvalue) + else: + str_repr = saferepr(argvalue, maxsize=None) + args.append((argname, str_repr)) + return ReprFuncArgs(args) + return None + + def get_source( + self, + source: Source | None, + line_index: int = -1, + excinfo: ExceptionInfo[BaseException] | None = None, + short: bool = False, + end_line_index: int | None = None, + colno: int | None = None, + end_colno: int | None = None, + ) -> list[str]: + """Return formatted and marked up source lines.""" + lines = [] + if source is not None and line_index < 0: + line_index += len(source) + if source is None or line_index >= len(source.lines) or line_index < 0: + # `line_index` could still be outside `range(len(source.lines))` if + # we're processing AST with pathological position attributes. + source = Source("???") + line_index = 0 + space_prefix = " " + if short: + lines.append(space_prefix + source.lines[line_index].strip()) + lines.extend( + self.get_highlight_arrows_for_line( + raw_line=source.raw_lines[line_index], + line=source.lines[line_index].strip(), + lineno=line_index, + end_lineno=end_line_index, + colno=colno, + end_colno=end_colno, + ) + ) + else: + for line in source.lines[:line_index]: + lines.append(space_prefix + line) + lines.append(self.flow_marker + " " + source.lines[line_index]) + lines.extend( + self.get_highlight_arrows_for_line( + raw_line=source.raw_lines[line_index], + line=source.lines[line_index], + lineno=line_index, + end_lineno=end_line_index, + colno=colno, + end_colno=end_colno, + ) + ) + for line in source.lines[line_index + 1 :]: + lines.append(space_prefix + line) + if excinfo is not None: + indent = 4 if short else self._getindent(source) + lines.extend(self.get_exconly(excinfo, indent=indent, markall=True)) + return lines + + def get_highlight_arrows_for_line( + self, + line: str, + raw_line: str, + lineno: int | None, + end_lineno: int | None, + colno: int | None, + end_colno: int | None, + ) -> list[str]: + """Return characters highlighting a source line. + + Example with colno and end_colno pointing to the bar expression: + "foo() + bar()" + returns " ^^^^^" + """ + if lineno != end_lineno: + # Don't handle expressions that span multiple lines. + return [] + if colno is None or end_colno is None: + # Can't do anything without column information. + return [] + + num_stripped_chars = len(raw_line) - len(line) + + start_char_offset = _byte_offset_to_character_offset(raw_line, colno) + end_char_offset = _byte_offset_to_character_offset(raw_line, end_colno) + num_carets = end_char_offset - start_char_offset + # If the highlight would span the whole line, it is redundant, don't + # show it. + if num_carets >= len(line.strip()): + return [] + + highlights = " " + highlights += " " * (start_char_offset - num_stripped_chars + 1) + highlights += "^" * num_carets + return [highlights] + + def get_exconly( + self, + excinfo: ExceptionInfo[BaseException], + indent: int = 4, + markall: bool = False, + ) -> list[str]: + lines = [] + indentstr = " " * indent + # Get the real exception information out. + exlines = excinfo.exconly(tryshort=True).split("\n") + failindent = self.fail_marker + indentstr[1:] + for line in exlines: + lines.append(failindent + line) + if not markall: + failindent = indentstr + return lines + + def repr_locals(self, locals: Mapping[str, object]) -> ReprLocals | None: + if self.showlocals: + lines = [] + keys = [loc for loc in locals if loc[0] != "@"] + keys.sort() + for name in keys: + value = locals[name] + if name == "__builtins__": + lines.append("__builtins__ = ") + else: + # This formatting could all be handled by the + # _repr() function, which is only reprlib.Repr in + # disguise, so is very configurable. + if self.truncate_locals: + str_repr = saferepr(value) + else: + str_repr = safeformat(value) + # if len(str_repr) < 70 or not isinstance(value, (list, tuple, dict)): + lines.append(f"{name:<10} = {str_repr}") + # else: + # self._line("%-10s =\\" % (name,)) + # # XXX + # pprint.pprint(value, stream=self.excinfowriter) + return ReprLocals(lines) + return None + + def repr_traceback_entry( + self, + entry: TracebackEntry | None, + excinfo: ExceptionInfo[BaseException] | None = None, + ) -> ReprEntry: + lines: list[str] = [] + style = ( + entry._repr_style + if entry is not None and entry._repr_style is not None + else self.style + ) + if style in ("short", "long") and entry is not None: + source = self._getentrysource(entry) + if source is None: + source = Source("???") + line_index = 0 + end_line_index, colno, end_colno = None, None, None + else: + line_index = entry.relline + end_line_index = entry.end_lineno_relative + colno = entry.colno + end_colno = entry.end_colno + short = style == "short" + reprargs = self.repr_args(entry) if not short else None + s = self.get_source( + source=source, + line_index=line_index, + excinfo=excinfo, + short=short, + end_line_index=end_line_index, + colno=colno, + end_colno=end_colno, + ) + lines.extend(s) + if short: + message = f"in {entry.name}" + else: + message = (excinfo and excinfo.typename) or "" + entry_path = entry.path + path = self._makepath(entry_path) + reprfileloc = ReprFileLocation(path, entry.lineno + 1, message) + localsrepr = self.repr_locals(entry.locals) + return ReprEntry(lines, reprargs, localsrepr, reprfileloc, style) + elif style == "value": + if excinfo: + lines.extend(str(excinfo.value).split("\n")) + return ReprEntry(lines, None, None, None, style) + else: + if excinfo: + lines.extend(self.get_exconly(excinfo, indent=4)) + return ReprEntry(lines, None, None, None, style) + + def _makepath(self, path: Path | str) -> str: + if not self.abspath and isinstance(path, Path): + try: + np = bestrelpath(Path.cwd(), path) + except OSError: + return str(path) + if len(np) < len(str(path)): + return np + return str(path) + + def repr_traceback(self, excinfo: ExceptionInfo[BaseException]) -> ReprTraceback: + traceback = filter_excinfo_traceback(self.tbfilter, excinfo) + + if isinstance(excinfo.value, RecursionError): + traceback, extraline = self._truncate_recursive_traceback(traceback) + else: + extraline = None + + if not traceback: + if extraline is None: + extraline = "All traceback entries are hidden. Pass `--full-trace` to see hidden and internal frames." + entries = [self.repr_traceback_entry(None, excinfo)] + return ReprTraceback(entries, extraline, style=self.style) + + last = traceback[-1] + if self.style == "value": + entries = [self.repr_traceback_entry(last, excinfo)] + return ReprTraceback(entries, None, style=self.style) + + entries = [ + self.repr_traceback_entry(entry, excinfo if last == entry else None) + for entry in traceback + ] + return ReprTraceback(entries, extraline, style=self.style) + + def _truncate_recursive_traceback( + self, traceback: Traceback + ) -> tuple[Traceback, str | None]: + """Truncate the given recursive traceback trying to find the starting + point of the recursion. + + The detection is done by going through each traceback entry and + finding the point in which the locals of the frame are equal to the + locals of a previous frame (see ``recursionindex()``). + + Handle the situation where the recursion process might raise an + exception (for example comparing numpy arrays using equality raises a + TypeError), in which case we do our best to warn the user of the + error and show a limited traceback. + """ + try: + recursionindex = traceback.recursionindex() + except Exception as e: + max_frames = 10 + extraline: str | None = ( + "!!! Recursion error detected, but an error occurred locating the origin of recursion.\n" + " The following exception happened when comparing locals in the stack frame:\n" + f" {type(e).__name__}: {e!s}\n" + f" Displaying first and last {max_frames} stack frames out of {len(traceback)}." + ) + # Type ignored because adding two instances of a List subtype + # currently incorrectly has type List instead of the subtype. + traceback = traceback[:max_frames] + traceback[-max_frames:] # type: ignore + else: + if recursionindex is not None: + extraline = "!!! Recursion detected (same locals & position)" + traceback = traceback[: recursionindex + 1] + else: + extraline = None + + return traceback, extraline + + def repr_excinfo(self, excinfo: ExceptionInfo[BaseException]) -> ExceptionChainRepr: + repr_chain: list[tuple[ReprTraceback, ReprFileLocation | None, str | None]] = [] + e: BaseException | None = excinfo.value + excinfo_: ExceptionInfo[BaseException] | None = excinfo + descr = None + seen: set[int] = set() + while e is not None and id(e) not in seen: + seen.add(id(e)) + + if excinfo_: + # Fall back to native traceback as a temporary workaround until + # full support for exception groups added to ExceptionInfo. + # See https://github.com/pytest-dev/pytest/issues/9159 + reprtraceback: ReprTraceback | ReprTracebackNative + if isinstance(e, BaseExceptionGroup): + # don't filter any sub-exceptions since they shouldn't have any internal frames + traceback = filter_excinfo_traceback(self.tbfilter, excinfo) + reprtraceback = ReprTracebackNative( + format_exception( + type(excinfo.value), + excinfo.value, + traceback[0]._rawentry, + ) + ) + else: + reprtraceback = self.repr_traceback(excinfo_) + reprcrash = excinfo_._getreprcrash() + else: + # Fallback to native repr if the exception doesn't have a traceback: + # ExceptionInfo objects require a full traceback to work. + reprtraceback = ReprTracebackNative(format_exception(type(e), e, None)) + reprcrash = None + repr_chain += [(reprtraceback, reprcrash, descr)] + + if e.__cause__ is not None and self.chain: + e = e.__cause__ + excinfo_ = ExceptionInfo.from_exception(e) if e.__traceback__ else None + descr = "The above exception was the direct cause of the following exception:" + elif ( + e.__context__ is not None and not e.__suppress_context__ and self.chain + ): + e = e.__context__ + excinfo_ = ExceptionInfo.from_exception(e) if e.__traceback__ else None + descr = "During handling of the above exception, another exception occurred:" + else: + e = None + repr_chain.reverse() + return ExceptionChainRepr(repr_chain) + + +@dataclasses.dataclass(eq=False) +class TerminalRepr: + def __str__(self) -> str: + # FYI this is called from pytest-xdist's serialization of exception + # information. + io = StringIO() + tw = TerminalWriter(file=io) + self.toterminal(tw) + return io.getvalue().strip() + + def __repr__(self) -> str: + return f"<{self.__class__} instance at {id(self):0x}>" + + def toterminal(self, tw: TerminalWriter) -> None: + raise NotImplementedError() + + +# This class is abstract -- only subclasses are instantiated. +@dataclasses.dataclass(eq=False) +class ExceptionRepr(TerminalRepr): + # Provided by subclasses. + reprtraceback: ReprTraceback + reprcrash: ReprFileLocation | None + sections: list[tuple[str, str, str]] = dataclasses.field( + init=False, default_factory=list + ) + + def addsection(self, name: str, content: str, sep: str = "-") -> None: + self.sections.append((name, content, sep)) + + def toterminal(self, tw: TerminalWriter) -> None: + for name, content, sep in self.sections: + tw.sep(sep, name) + tw.line(content) + + +@dataclasses.dataclass(eq=False) +class ExceptionChainRepr(ExceptionRepr): + chain: Sequence[tuple[ReprTraceback, ReprFileLocation | None, str | None]] + + def __init__( + self, + chain: Sequence[tuple[ReprTraceback, ReprFileLocation | None, str | None]], + ) -> None: + # reprcrash and reprtraceback of the outermost (the newest) exception + # in the chain. + super().__init__( + reprtraceback=chain[-1][0], + reprcrash=chain[-1][1], + ) + self.chain = chain + + def toterminal(self, tw: TerminalWriter) -> None: + for element in self.chain: + element[0].toterminal(tw) + if element[2] is not None: + tw.line("") + tw.line(element[2], yellow=True) + super().toterminal(tw) + + +@dataclasses.dataclass(eq=False) +class ReprExceptionInfo(ExceptionRepr): + reprtraceback: ReprTraceback + reprcrash: ReprFileLocation | None + + def toterminal(self, tw: TerminalWriter) -> None: + self.reprtraceback.toterminal(tw) + super().toterminal(tw) + + +@dataclasses.dataclass(eq=False) +class ReprTraceback(TerminalRepr): + reprentries: Sequence[ReprEntry | ReprEntryNative] + extraline: str | None + style: TracebackStyle + + entrysep: ClassVar = "_ " + + def toterminal(self, tw: TerminalWriter) -> None: + # The entries might have different styles. + for i, entry in enumerate(self.reprentries): + if entry.style == "long": + tw.line("") + entry.toterminal(tw) + if i < len(self.reprentries) - 1: + next_entry = self.reprentries[i + 1] + if entry.style == "long" or ( + entry.style == "short" and next_entry.style == "long" + ): + tw.sep(self.entrysep) + + if self.extraline: + tw.line(self.extraline) + + +class ReprTracebackNative(ReprTraceback): + def __init__(self, tblines: Sequence[str]) -> None: + self.reprentries = [ReprEntryNative(tblines)] + self.extraline = None + self.style = "native" + + +@dataclasses.dataclass(eq=False) +class ReprEntryNative(TerminalRepr): + lines: Sequence[str] + + style: ClassVar[TracebackStyle] = "native" + + def toterminal(self, tw: TerminalWriter) -> None: + tw.write("".join(self.lines)) + + +@dataclasses.dataclass(eq=False) +class ReprEntry(TerminalRepr): + lines: Sequence[str] + reprfuncargs: ReprFuncArgs | None + reprlocals: ReprLocals | None + reprfileloc: ReprFileLocation | None + style: TracebackStyle + + def _write_entry_lines(self, tw: TerminalWriter) -> None: + """Write the source code portions of a list of traceback entries with syntax highlighting. + + Usually entries are lines like these: + + " x = 1" + "> assert x == 2" + "E assert 1 == 2" + + This function takes care of rendering the "source" portions of it (the lines without + the "E" prefix) using syntax highlighting, taking care to not highlighting the ">" + character, as doing so might break line continuations. + """ + if not self.lines: + return + + if self.style == "value": + # Using tw.write instead of tw.line for testing purposes due to TWMock implementation; + # lines written with TWMock.line and TWMock._write_source cannot be distinguished + # from each other, whereas lines written with TWMock.write are marked with TWMock.WRITE + for line in self.lines: + tw.write(line) + tw.write("\n") + return + + # separate indents and source lines that are not failures: we want to + # highlight the code but not the indentation, which may contain markers + # such as "> assert 0" + fail_marker = f"{FormattedExcinfo.fail_marker} " + indent_size = len(fail_marker) + indents: list[str] = [] + source_lines: list[str] = [] + failure_lines: list[str] = [] + for index, line in enumerate(self.lines): + is_failure_line = line.startswith(fail_marker) + if is_failure_line: + # from this point on all lines are considered part of the failure + failure_lines.extend(self.lines[index:]) + break + else: + indents.append(line[:indent_size]) + source_lines.append(line[indent_size:]) + + tw._write_source(source_lines, indents) + + # failure lines are always completely red and bold + for line in failure_lines: + tw.line(line, bold=True, red=True) + + def toterminal(self, tw: TerminalWriter) -> None: + if self.style == "short": + if self.reprfileloc: + self.reprfileloc.toterminal(tw) + self._write_entry_lines(tw) + if self.reprlocals: + self.reprlocals.toterminal(tw, indent=" " * 8) + return + + if self.reprfuncargs: + self.reprfuncargs.toterminal(tw) + + self._write_entry_lines(tw) + + if self.reprlocals: + tw.line("") + self.reprlocals.toterminal(tw) + if self.reprfileloc: + if self.lines: + tw.line("") + self.reprfileloc.toterminal(tw) + + def __str__(self) -> str: + return "{}\n{}\n{}".format( + "\n".join(self.lines), self.reprlocals, self.reprfileloc + ) + + +@dataclasses.dataclass(eq=False) +class ReprFileLocation(TerminalRepr): + path: str + lineno: int + message: str + + def __post_init__(self) -> None: + self.path = str(self.path) + + def toterminal(self, tw: TerminalWriter) -> None: + # Filename and lineno output for each entry, using an output format + # that most editors understand. + msg = self.message + i = msg.find("\n") + if i != -1: + msg = msg[:i] + tw.write(self.path, bold=True, red=True) + tw.line(f":{self.lineno}: {msg}") + + +@dataclasses.dataclass(eq=False) +class ReprLocals(TerminalRepr): + lines: Sequence[str] + + def toterminal(self, tw: TerminalWriter, indent="") -> None: + for line in self.lines: + tw.line(indent + line) + + +@dataclasses.dataclass(eq=False) +class ReprFuncArgs(TerminalRepr): + args: Sequence[tuple[str, object]] + + def toterminal(self, tw: TerminalWriter) -> None: + if self.args: + linesofar = "" + for name, value in self.args: + ns = f"{name} = {value}" + if len(ns) + len(linesofar) + 2 > tw.fullwidth: + if linesofar: + tw.line(linesofar) + linesofar = ns + else: + if linesofar: + linesofar += ", " + ns + else: + linesofar = ns + if linesofar: + tw.line(linesofar) + tw.line("") + + +def getfslineno(obj: object) -> tuple[str | Path, int]: + """Return source location (path, lineno) for the given object. + + If the source cannot be determined return ("", -1). + + The line number is 0-based. + """ + # xxx let decorators etc specify a sane ordering + # NOTE: this used to be done in _pytest.compat.getfslineno, initially added + # in 6ec13a2b9. It ("place_as") appears to be something very custom. + obj = get_real_func(obj) + if hasattr(obj, "place_as"): + obj = obj.place_as + + try: + code = Code.from_function(obj) + except TypeError: + try: + fn = inspect.getsourcefile(obj) or inspect.getfile(obj) # type: ignore[arg-type] + except TypeError: + return "", -1 + + fspath = (fn and absolutepath(fn)) or "" + lineno = -1 + if fspath: + try: + _, lineno = findsource(obj) + except OSError: + pass + return fspath, lineno + + return code.path, code.firstlineno + + +def _byte_offset_to_character_offset(str, offset): + """Converts a byte based offset in a string to a code-point.""" + as_utf8 = str.encode("utf-8") + return len(as_utf8[:offset].decode("utf-8", errors="replace")) + + +# Relative paths that we use to filter traceback entries from appearing to the user; +# see filter_traceback. +# note: if we need to add more paths than what we have now we should probably use a list +# for better maintenance. + +_PLUGGY_DIR = Path(pluggy.__file__.rstrip("oc")) +# pluggy is either a package or a single module depending on the version +if _PLUGGY_DIR.name == "__init__.py": + _PLUGGY_DIR = _PLUGGY_DIR.parent +_PYTEST_DIR = Path(_pytest.__file__).parent + + +def filter_traceback(entry: TracebackEntry) -> bool: + """Return True if a TracebackEntry instance should be included in tracebacks. + + We hide traceback entries of: + + * dynamically generated code (no code to show up for it); + * internal traceback from pytest or its internal libraries, py and pluggy. + """ + # entry.path might sometimes return a str object when the entry + # points to dynamically generated code. + # See https://bitbucket.org/pytest-dev/py/issues/71. + raw_filename = entry.frame.code.raw.co_filename + is_generated = "<" in raw_filename and ">" in raw_filename + if is_generated: + return False + + # entry.path might point to a non-existing file, in which case it will + # also return a str object. See #1133. + p = Path(entry.path) + + parents = p.parents + if _PLUGGY_DIR in parents: + return False + if _PYTEST_DIR in parents: + return False + + return True + + +def filter_excinfo_traceback( + tbfilter: TracebackFilter, excinfo: ExceptionInfo[BaseException] +) -> Traceback: + """Filter the exception traceback in ``excinfo`` according to ``tbfilter``.""" + if callable(tbfilter): + return tbfilter(excinfo) + elif tbfilter: + return excinfo.traceback.filter(excinfo) + else: + return excinfo.traceback diff --git a/.venv/lib/python3.12/site-packages/_pytest/_code/source.py b/.venv/lib/python3.12/site-packages/_pytest/_code/source.py new file mode 100644 index 0000000..99c242d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/_code/source.py @@ -0,0 +1,225 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +import ast +from bisect import bisect_right +from collections.abc import Iterable +from collections.abc import Iterator +import inspect +import textwrap +import tokenize +import types +from typing import overload +import warnings + + +class Source: + """An immutable object holding a source code fragment. + + When using Source(...), the source lines are deindented. + """ + + def __init__(self, obj: object = None) -> None: + if not obj: + self.lines: list[str] = [] + self.raw_lines: list[str] = [] + elif isinstance(obj, Source): + self.lines = obj.lines + self.raw_lines = obj.raw_lines + elif isinstance(obj, tuple | list): + self.lines = deindent(x.rstrip("\n") for x in obj) + self.raw_lines = list(x.rstrip("\n") for x in obj) + elif isinstance(obj, str): + self.lines = deindent(obj.split("\n")) + self.raw_lines = obj.split("\n") + else: + try: + rawcode = getrawcode(obj) + src = inspect.getsource(rawcode) + except TypeError: + src = inspect.getsource(obj) # type: ignore[arg-type] + self.lines = deindent(src.split("\n")) + self.raw_lines = src.split("\n") + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Source): + return NotImplemented + return self.lines == other.lines + + # Ignore type because of https://github.com/python/mypy/issues/4266. + __hash__ = None # type: ignore + + @overload + def __getitem__(self, key: int) -> str: ... + + @overload + def __getitem__(self, key: slice) -> Source: ... + + def __getitem__(self, key: int | slice) -> str | Source: + if isinstance(key, int): + return self.lines[key] + else: + if key.step not in (None, 1): + raise IndexError("cannot slice a Source with a step") + newsource = Source() + newsource.lines = self.lines[key.start : key.stop] + newsource.raw_lines = self.raw_lines[key.start : key.stop] + return newsource + + def __iter__(self) -> Iterator[str]: + return iter(self.lines) + + def __len__(self) -> int: + return len(self.lines) + + def strip(self) -> Source: + """Return new Source object with trailing and leading blank lines removed.""" + start, end = 0, len(self) + while start < end and not self.lines[start].strip(): + start += 1 + while end > start and not self.lines[end - 1].strip(): + end -= 1 + source = Source() + source.raw_lines = self.raw_lines + source.lines[:] = self.lines[start:end] + return source + + def indent(self, indent: str = " " * 4) -> Source: + """Return a copy of the source object with all lines indented by the + given indent-string.""" + newsource = Source() + newsource.raw_lines = self.raw_lines + newsource.lines = [(indent + line) for line in self.lines] + return newsource + + def getstatement(self, lineno: int) -> Source: + """Return Source statement which contains the given linenumber + (counted from 0).""" + start, end = self.getstatementrange(lineno) + return self[start:end] + + def getstatementrange(self, lineno: int) -> tuple[int, int]: + """Return (start, end) tuple which spans the minimal statement region + which containing the given lineno.""" + if not (0 <= lineno < len(self)): + raise IndexError("lineno out of range") + _ast, start, end = getstatementrange_ast(lineno, self) + return start, end + + def deindent(self) -> Source: + """Return a new Source object deindented.""" + newsource = Source() + newsource.lines[:] = deindent(self.lines) + newsource.raw_lines = self.raw_lines + return newsource + + def __str__(self) -> str: + return "\n".join(self.lines) + + +# +# helper functions +# + + +def findsource(obj) -> tuple[Source | None, int]: + try: + sourcelines, lineno = inspect.findsource(obj) + except Exception: + return None, -1 + source = Source() + source.lines = [line.rstrip() for line in sourcelines] + source.raw_lines = sourcelines + return source, lineno + + +def getrawcode(obj: object, trycall: bool = True) -> types.CodeType: + """Return code object for given function.""" + try: + return obj.__code__ # type: ignore[attr-defined,no-any-return] + except AttributeError: + pass + if trycall: + call = getattr(obj, "__call__", None) + if call and not isinstance(obj, type): + return getrawcode(call, trycall=False) + raise TypeError(f"could not get code object for {obj!r}") + + +def deindent(lines: Iterable[str]) -> list[str]: + return textwrap.dedent("\n".join(lines)).splitlines() + + +def get_statement_startend2(lineno: int, node: ast.AST) -> tuple[int, int | None]: + # Flatten all statements and except handlers into one lineno-list. + # AST's line numbers start indexing at 1. + values: list[int] = [] + for x in ast.walk(node): + if isinstance(x, ast.stmt | ast.ExceptHandler): + # The lineno points to the class/def, so need to include the decorators. + if isinstance(x, ast.ClassDef | ast.FunctionDef | ast.AsyncFunctionDef): + for d in x.decorator_list: + values.append(d.lineno - 1) + values.append(x.lineno - 1) + for name in ("finalbody", "orelse"): + val: list[ast.stmt] | None = getattr(x, name, None) + if val: + # Treat the finally/orelse part as its own statement. + values.append(val[0].lineno - 1 - 1) + values.sort() + insert_index = bisect_right(values, lineno) + start = values[insert_index - 1] + if insert_index >= len(values): + end = None + else: + end = values[insert_index] + return start, end + + +def getstatementrange_ast( + lineno: int, + source: Source, + assertion: bool = False, + astnode: ast.AST | None = None, +) -> tuple[ast.AST, int, int]: + if astnode is None: + content = str(source) + # See #4260: + # Don't produce duplicate warnings when compiling source to find AST. + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + astnode = ast.parse(content, "source", "exec") + + start, end = get_statement_startend2(lineno, astnode) + # We need to correct the end: + # - ast-parsing strips comments + # - there might be empty lines + # - we might have lesser indented code blocks at the end + if end is None: + end = len(source.lines) + + if end > start + 1: + # Make sure we don't span differently indented code blocks + # by using the BlockFinder helper used which inspect.getsource() uses itself. + block_finder = inspect.BlockFinder() + # If we start with an indented line, put blockfinder to "started" mode. + block_finder.started = ( + bool(source.lines[start]) and source.lines[start][0].isspace() + ) + it = ((x + "\n") for x in source.lines[start:end]) + try: + for tok in tokenize.generate_tokens(lambda: next(it)): + block_finder.tokeneater(*tok) + except (inspect.EndOfBlock, IndentationError): + end = block_finder.last + start + except Exception: + pass + + # The end might still point to a comment or empty line, correct it. + while end: + line = source.lines[end - 1].lstrip() + if line.startswith("#") or not line: + end -= 1 + else: + break + return astnode, start, end diff --git a/.venv/lib/python3.12/site-packages/_pytest/_io/__init__.py b/.venv/lib/python3.12/site-packages/_pytest/_io/__init__.py new file mode 100644 index 0000000..b0155b1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/_io/__init__.py @@ -0,0 +1,10 @@ +from __future__ import annotations + +from .terminalwriter import get_terminal_width +from .terminalwriter import TerminalWriter + + +__all__ = [ + "TerminalWriter", + "get_terminal_width", +] diff --git a/.venv/lib/python3.12/site-packages/_pytest/_io/pprint.py b/.venv/lib/python3.12/site-packages/_pytest/_io/pprint.py new file mode 100644 index 0000000..28f0690 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/_io/pprint.py @@ -0,0 +1,673 @@ +# mypy: allow-untyped-defs +# This module was imported from the cpython standard library +# (https://github.com/python/cpython/) at commit +# c5140945c723ae6c4b7ee81ff720ac8ea4b52cfd (python3.12). +# +# +# Original Author: Fred L. Drake, Jr. +# fdrake@acm.org +# +# This is a simple little module I wrote to make life easier. I didn't +# see anything quite like it in the library, though I may have overlooked +# something. I wrote this when I was trying to read some heavily nested +# tuples with fairly non-descriptive content. This is modeled very much +# after Lisp/Scheme - style pretty-printing of lists. If you find it +# useful, thank small children who sleep at night. +from __future__ import annotations + +import collections as _collections +from collections.abc import Callable +from collections.abc import Iterator +import dataclasses as _dataclasses +from io import StringIO as _StringIO +import re +import types as _types +from typing import Any +from typing import IO + + +class _safe_key: + """Helper function for key functions when sorting unorderable objects. + + The wrapped-object will fallback to a Py2.x style comparison for + unorderable types (sorting first comparing the type name and then by + the obj ids). Does not work recursively, so dict.items() must have + _safe_key applied to both the key and the value. + + """ + + __slots__ = ["obj"] + + def __init__(self, obj): + self.obj = obj + + def __lt__(self, other): + try: + return self.obj < other.obj + except TypeError: + return (str(type(self.obj)), id(self.obj)) < ( + str(type(other.obj)), + id(other.obj), + ) + + +def _safe_tuple(t): + """Helper function for comparing 2-tuples""" + return _safe_key(t[0]), _safe_key(t[1]) + + +class PrettyPrinter: + def __init__( + self, + indent: int = 4, + width: int = 80, + depth: int | None = None, + ) -> None: + """Handle pretty printing operations onto a stream using a set of + configured parameters. + + indent + Number of spaces to indent for each level of nesting. + + width + Attempted maximum number of columns in the output. + + depth + The maximum depth to print out nested structures. + + """ + if indent < 0: + raise ValueError("indent must be >= 0") + if depth is not None and depth <= 0: + raise ValueError("depth must be > 0") + if not width: + raise ValueError("width must be != 0") + self._depth = depth + self._indent_per_level = indent + self._width = width + + def pformat(self, object: Any) -> str: + sio = _StringIO() + self._format(object, sio, 0, 0, set(), 0) + return sio.getvalue() + + def _format( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + objid = id(object) + if objid in context: + stream.write(_recursion(object)) + return + + p = self._dispatch.get(type(object).__repr__, None) + if p is not None: + context.add(objid) + p(self, object, stream, indent, allowance, context, level + 1) + context.remove(objid) + elif ( + _dataclasses.is_dataclass(object) + and not isinstance(object, type) + and object.__dataclass_params__.repr # type:ignore[attr-defined] + and + # Check dataclass has generated repr method. + hasattr(object.__repr__, "__wrapped__") + and "__create_fn__" in object.__repr__.__wrapped__.__qualname__ + ): + context.add(objid) + self._pprint_dataclass( + object, stream, indent, allowance, context, level + 1 + ) + context.remove(objid) + else: + stream.write(self._repr(object, context, level)) + + def _pprint_dataclass( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + cls_name = object.__class__.__name__ + items = [ + (f.name, getattr(object, f.name)) + for f in _dataclasses.fields(object) + if f.repr + ] + stream.write(cls_name + "(") + self._format_namespace_items(items, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch: dict[ + Callable[..., str], + Callable[[PrettyPrinter, Any, IO[str], int, int, set[int], int], None], + ] = {} + + def _pprint_dict( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + write = stream.write + write("{") + items = sorted(object.items(), key=_safe_tuple) + self._format_dict_items(items, stream, indent, allowance, context, level) + write("}") + + _dispatch[dict.__repr__] = _pprint_dict + + def _pprint_ordered_dict( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + if not len(object): + stream.write(repr(object)) + return + cls = object.__class__ + stream.write(cls.__name__ + "(") + self._pprint_dict(object, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[_collections.OrderedDict.__repr__] = _pprint_ordered_dict + + def _pprint_list( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + stream.write("[") + self._format_items(object, stream, indent, allowance, context, level) + stream.write("]") + + _dispatch[list.__repr__] = _pprint_list + + def _pprint_tuple( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + stream.write("(") + self._format_items(object, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[tuple.__repr__] = _pprint_tuple + + def _pprint_set( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + if not len(object): + stream.write(repr(object)) + return + typ = object.__class__ + if typ is set: + stream.write("{") + endchar = "}" + else: + stream.write(typ.__name__ + "({") + endchar = "})" + object = sorted(object, key=_safe_key) + self._format_items(object, stream, indent, allowance, context, level) + stream.write(endchar) + + _dispatch[set.__repr__] = _pprint_set + _dispatch[frozenset.__repr__] = _pprint_set + + def _pprint_str( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + write = stream.write + if not len(object): + write(repr(object)) + return + chunks = [] + lines = object.splitlines(True) + if level == 1: + indent += 1 + allowance += 1 + max_width1 = max_width = self._width - indent + for i, line in enumerate(lines): + rep = repr(line) + if i == len(lines) - 1: + max_width1 -= allowance + if len(rep) <= max_width1: + chunks.append(rep) + else: + # A list of alternating (non-space, space) strings + parts = re.findall(r"\S*\s*", line) + assert parts + assert not parts[-1] + parts.pop() # drop empty last part + max_width2 = max_width + current = "" + for j, part in enumerate(parts): + candidate = current + part + if j == len(parts) - 1 and i == len(lines) - 1: + max_width2 -= allowance + if len(repr(candidate)) > max_width2: + if current: + chunks.append(repr(current)) + current = part + else: + current = candidate + if current: + chunks.append(repr(current)) + if len(chunks) == 1: + write(rep) + return + if level == 1: + write("(") + for i, rep in enumerate(chunks): + if i > 0: + write("\n" + " " * indent) + write(rep) + if level == 1: + write(")") + + _dispatch[str.__repr__] = _pprint_str + + def _pprint_bytes( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + write = stream.write + if len(object) <= 4: + write(repr(object)) + return + parens = level == 1 + if parens: + indent += 1 + allowance += 1 + write("(") + delim = "" + for rep in _wrap_bytes_repr(object, self._width - indent, allowance): + write(delim) + write(rep) + if not delim: + delim = "\n" + " " * indent + if parens: + write(")") + + _dispatch[bytes.__repr__] = _pprint_bytes + + def _pprint_bytearray( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + write = stream.write + write("bytearray(") + self._pprint_bytes( + bytes(object), stream, indent + 10, allowance + 1, context, level + 1 + ) + write(")") + + _dispatch[bytearray.__repr__] = _pprint_bytearray + + def _pprint_mappingproxy( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + stream.write("mappingproxy(") + self._format(object.copy(), stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[_types.MappingProxyType.__repr__] = _pprint_mappingproxy + + def _pprint_simplenamespace( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + if type(object) is _types.SimpleNamespace: + # The SimpleNamespace repr is "namespace" instead of the class + # name, so we do the same here. For subclasses; use the class name. + cls_name = "namespace" + else: + cls_name = object.__class__.__name__ + items = object.__dict__.items() + stream.write(cls_name + "(") + self._format_namespace_items(items, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[_types.SimpleNamespace.__repr__] = _pprint_simplenamespace + + def _format_dict_items( + self, + items: list[tuple[Any, Any]], + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + if not items: + return + + write = stream.write + item_indent = indent + self._indent_per_level + delimnl = "\n" + " " * item_indent + for key, ent in items: + write(delimnl) + write(self._repr(key, context, level)) + write(": ") + self._format(ent, stream, item_indent, 1, context, level) + write(",") + + write("\n" + " " * indent) + + def _format_namespace_items( + self, + items: list[tuple[Any, Any]], + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + if not items: + return + + write = stream.write + item_indent = indent + self._indent_per_level + delimnl = "\n" + " " * item_indent + for key, ent in items: + write(delimnl) + write(key) + write("=") + if id(ent) in context: + # Special-case representation of recursion to match standard + # recursive dataclass repr. + write("...") + else: + self._format( + ent, + stream, + item_indent + len(key) + 1, + 1, + context, + level, + ) + + write(",") + + write("\n" + " " * indent) + + def _format_items( + self, + items: list[Any], + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + if not items: + return + + write = stream.write + item_indent = indent + self._indent_per_level + delimnl = "\n" + " " * item_indent + + for item in items: + write(delimnl) + self._format(item, stream, item_indent, 1, context, level) + write(",") + + write("\n" + " " * indent) + + def _repr(self, object: Any, context: set[int], level: int) -> str: + return self._safe_repr(object, context.copy(), self._depth, level) + + def _pprint_default_dict( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + rdf = self._repr(object.default_factory, context, level) + stream.write(f"{object.__class__.__name__}({rdf}, ") + self._pprint_dict(object, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[_collections.defaultdict.__repr__] = _pprint_default_dict + + def _pprint_counter( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + stream.write(object.__class__.__name__ + "(") + + if object: + stream.write("{") + items = object.most_common() + self._format_dict_items(items, stream, indent, allowance, context, level) + stream.write("}") + + stream.write(")") + + _dispatch[_collections.Counter.__repr__] = _pprint_counter + + def _pprint_chain_map( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + if not len(object.maps) or (len(object.maps) == 1 and not len(object.maps[0])): + stream.write(repr(object)) + return + + stream.write(object.__class__.__name__ + "(") + self._format_items(object.maps, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[_collections.ChainMap.__repr__] = _pprint_chain_map + + def _pprint_deque( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + stream.write(object.__class__.__name__ + "(") + if object.maxlen is not None: + stream.write(f"maxlen={object.maxlen}, ") + stream.write("[") + + self._format_items(object, stream, indent, allowance + 1, context, level) + stream.write("])") + + _dispatch[_collections.deque.__repr__] = _pprint_deque + + def _pprint_user_dict( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + self._format(object.data, stream, indent, allowance, context, level - 1) + + _dispatch[_collections.UserDict.__repr__] = _pprint_user_dict + + def _pprint_user_list( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + self._format(object.data, stream, indent, allowance, context, level - 1) + + _dispatch[_collections.UserList.__repr__] = _pprint_user_list + + def _pprint_user_string( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + self._format(object.data, stream, indent, allowance, context, level - 1) + + _dispatch[_collections.UserString.__repr__] = _pprint_user_string + + def _safe_repr( + self, object: Any, context: set[int], maxlevels: int | None, level: int + ) -> str: + typ = type(object) + if typ in _builtin_scalars: + return repr(object) + + r = getattr(typ, "__repr__", None) + + if issubclass(typ, dict) and r is dict.__repr__: + if not object: + return "{}" + objid = id(object) + if maxlevels and level >= maxlevels: + return "{...}" + if objid in context: + return _recursion(object) + context.add(objid) + components: list[str] = [] + append = components.append + level += 1 + for k, v in sorted(object.items(), key=_safe_tuple): + krepr = self._safe_repr(k, context, maxlevels, level) + vrepr = self._safe_repr(v, context, maxlevels, level) + append(f"{krepr}: {vrepr}") + context.remove(objid) + return "{{{}}}".format(", ".join(components)) + + if (issubclass(typ, list) and r is list.__repr__) or ( + issubclass(typ, tuple) and r is tuple.__repr__ + ): + if issubclass(typ, list): + if not object: + return "[]" + format = "[%s]" + elif len(object) == 1: + format = "(%s,)" + else: + if not object: + return "()" + format = "(%s)" + objid = id(object) + if maxlevels and level >= maxlevels: + return format % "..." + if objid in context: + return _recursion(object) + context.add(objid) + components = [] + append = components.append + level += 1 + for o in object: + orepr = self._safe_repr(o, context, maxlevels, level) + append(orepr) + context.remove(objid) + return format % ", ".join(components) + + return repr(object) + + +_builtin_scalars = frozenset( + {str, bytes, bytearray, float, complex, bool, type(None), int} +) + + +def _recursion(object: Any) -> str: + return f"" + + +def _wrap_bytes_repr(object: Any, width: int, allowance: int) -> Iterator[str]: + current = b"" + last = len(object) // 4 * 4 + for i in range(0, len(object), 4): + part = object[i : i + 4] + candidate = current + part + if i == last: + width -= allowance + if len(repr(candidate)) > width: + if current: + yield repr(current) + current = part + else: + current = candidate + if current: + yield repr(current) diff --git a/.venv/lib/python3.12/site-packages/_pytest/_io/saferepr.py b/.venv/lib/python3.12/site-packages/_pytest/_io/saferepr.py new file mode 100644 index 0000000..cee70e3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/_io/saferepr.py @@ -0,0 +1,130 @@ +from __future__ import annotations + +import pprint +import reprlib + + +def _try_repr_or_str(obj: object) -> str: + try: + return repr(obj) + except (KeyboardInterrupt, SystemExit): + raise + except BaseException: + return f'{type(obj).__name__}("{obj}")' + + +def _format_repr_exception(exc: BaseException, obj: object) -> str: + try: + exc_info = _try_repr_or_str(exc) + except (KeyboardInterrupt, SystemExit): + raise + except BaseException as inner_exc: + exc_info = f"unpresentable exception ({_try_repr_or_str(inner_exc)})" + return ( + f"<[{exc_info} raised in repr()] {type(obj).__name__} object at 0x{id(obj):x}>" + ) + + +def _ellipsize(s: str, maxsize: int) -> str: + if len(s) > maxsize: + i = max(0, (maxsize - 3) // 2) + j = max(0, maxsize - 3 - i) + return s[:i] + "..." + s[len(s) - j :] + return s + + +class SafeRepr(reprlib.Repr): + """ + repr.Repr that limits the resulting size of repr() and includes + information on exceptions raised during the call. + """ + + def __init__(self, maxsize: int | None, use_ascii: bool = False) -> None: + """ + :param maxsize: + If not None, will truncate the resulting repr to that specific size, using ellipsis + somewhere in the middle to hide the extra text. + If None, will not impose any size limits on the returning repr. + """ + super().__init__() + # ``maxstring`` is used by the superclass, and needs to be an int; using a + # very large number in case maxsize is None, meaning we want to disable + # truncation. + self.maxstring = maxsize if maxsize is not None else 1_000_000_000 + self.maxsize = maxsize + self.use_ascii = use_ascii + + def repr(self, x: object) -> str: + try: + if self.use_ascii: + s = ascii(x) + else: + s = super().repr(x) + except (KeyboardInterrupt, SystemExit): + raise + except BaseException as exc: + s = _format_repr_exception(exc, x) + if self.maxsize is not None: + s = _ellipsize(s, self.maxsize) + return s + + def repr_instance(self, x: object, level: int) -> str: + try: + s = repr(x) + except (KeyboardInterrupt, SystemExit): + raise + except BaseException as exc: + s = _format_repr_exception(exc, x) + if self.maxsize is not None: + s = _ellipsize(s, self.maxsize) + return s + + +def safeformat(obj: object) -> str: + """Return a pretty printed string for the given object. + + Failing __repr__ functions of user instances will be represented + with a short exception info. + """ + try: + return pprint.pformat(obj) + except Exception as exc: + return _format_repr_exception(exc, obj) + + +# Maximum size of overall repr of objects to display during assertion errors. +DEFAULT_REPR_MAX_SIZE = 240 + + +def saferepr( + obj: object, maxsize: int | None = DEFAULT_REPR_MAX_SIZE, use_ascii: bool = False +) -> str: + """Return a size-limited safe repr-string for the given object. + + Failing __repr__ functions of user instances will be represented + with a short exception info and 'saferepr' generally takes + care to never raise exceptions itself. + + This function is a wrapper around the Repr/reprlib functionality of the + stdlib. + """ + return SafeRepr(maxsize, use_ascii).repr(obj) + + +def saferepr_unlimited(obj: object, use_ascii: bool = True) -> str: + """Return an unlimited-size safe repr-string for the given object. + + As with saferepr, failing __repr__ functions of user instances + will be represented with a short exception info. + + This function is a wrapper around simple repr. + + Note: a cleaner solution would be to alter ``saferepr``this way + when maxsize=None, but that might affect some other code. + """ + try: + if use_ascii: + return ascii(obj) + return repr(obj) + except Exception as exc: + return _format_repr_exception(exc, obj) diff --git a/.venv/lib/python3.12/site-packages/_pytest/_io/terminalwriter.py b/.venv/lib/python3.12/site-packages/_pytest/_io/terminalwriter.py new file mode 100644 index 0000000..9191b4e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/_io/terminalwriter.py @@ -0,0 +1,258 @@ +"""Helper functions for writing to terminals and files.""" + +from __future__ import annotations + +from collections.abc import Sequence +import os +import shutil +import sys +from typing import final +from typing import Literal +from typing import TextIO + +import pygments +from pygments.formatters.terminal import TerminalFormatter +from pygments.lexer import Lexer +from pygments.lexers.diff import DiffLexer +from pygments.lexers.python import PythonLexer + +from ..compat import assert_never +from .wcwidth import wcswidth + + +# This code was initially copied from py 1.8.1, file _io/terminalwriter.py. + + +def get_terminal_width() -> int: + width, _ = shutil.get_terminal_size(fallback=(80, 24)) + + # The Windows get_terminal_size may be bogus, let's sanify a bit. + if width < 40: + width = 80 + + return width + + +def should_do_markup(file: TextIO) -> bool: + if os.environ.get("PY_COLORS") == "1": + return True + if os.environ.get("PY_COLORS") == "0": + return False + if os.environ.get("NO_COLOR"): + return False + if os.environ.get("FORCE_COLOR"): + return True + return ( + hasattr(file, "isatty") and file.isatty() and os.environ.get("TERM") != "dumb" + ) + + +@final +class TerminalWriter: + _esctable = dict( + black=30, + red=31, + green=32, + yellow=33, + blue=34, + purple=35, + cyan=36, + white=37, + Black=40, + Red=41, + Green=42, + Yellow=43, + Blue=44, + Purple=45, + Cyan=46, + White=47, + bold=1, + light=2, + blink=5, + invert=7, + ) + + def __init__(self, file: TextIO | None = None) -> None: + if file is None: + file = sys.stdout + if hasattr(file, "isatty") and file.isatty() and sys.platform == "win32": + try: + import colorama + except ImportError: + pass + else: + file = colorama.AnsiToWin32(file).stream + assert file is not None + self._file = file + self.hasmarkup = should_do_markup(file) + self._current_line = "" + self._terminal_width: int | None = None + self.code_highlight = True + + @property + def fullwidth(self) -> int: + if self._terminal_width is not None: + return self._terminal_width + return get_terminal_width() + + @fullwidth.setter + def fullwidth(self, value: int) -> None: + self._terminal_width = value + + @property + def width_of_current_line(self) -> int: + """Return an estimate of the width so far in the current line.""" + return wcswidth(self._current_line) + + def markup(self, text: str, **markup: bool) -> str: + for name in markup: + if name not in self._esctable: + raise ValueError(f"unknown markup: {name!r}") + if self.hasmarkup: + esc = [self._esctable[name] for name, on in markup.items() if on] + if esc: + text = "".join(f"\x1b[{cod}m" for cod in esc) + text + "\x1b[0m" + return text + + def sep( + self, + sepchar: str, + title: str | None = None, + fullwidth: int | None = None, + **markup: bool, + ) -> None: + if fullwidth is None: + fullwidth = self.fullwidth + # The goal is to have the line be as long as possible + # under the condition that len(line) <= fullwidth. + if sys.platform == "win32": + # If we print in the last column on windows we are on a + # new line but there is no way to verify/neutralize this + # (we may not know the exact line width). + # So let's be defensive to avoid empty lines in the output. + fullwidth -= 1 + if title is not None: + # we want 2 + 2*len(fill) + len(title) <= fullwidth + # i.e. 2 + 2*len(sepchar)*N + len(title) <= fullwidth + # 2*len(sepchar)*N <= fullwidth - len(title) - 2 + # N <= (fullwidth - len(title) - 2) // (2*len(sepchar)) + N = max((fullwidth - len(title) - 2) // (2 * len(sepchar)), 1) + fill = sepchar * N + line = f"{fill} {title} {fill}" + else: + # we want len(sepchar)*N <= fullwidth + # i.e. N <= fullwidth // len(sepchar) + line = sepchar * (fullwidth // len(sepchar)) + # In some situations there is room for an extra sepchar at the right, + # in particular if we consider that with a sepchar like "_ " the + # trailing space is not important at the end of the line. + if len(line) + len(sepchar.rstrip()) <= fullwidth: + line += sepchar.rstrip() + + self.line(line, **markup) + + def write(self, msg: str, *, flush: bool = False, **markup: bool) -> None: + if msg: + current_line = msg.rsplit("\n", 1)[-1] + if "\n" in msg: + self._current_line = current_line + else: + self._current_line += current_line + + msg = self.markup(msg, **markup) + + self.write_raw(msg, flush=flush) + + def write_raw(self, msg: str, *, flush: bool = False) -> None: + try: + self._file.write(msg) + except UnicodeEncodeError: + # Some environments don't support printing general Unicode + # strings, due to misconfiguration or otherwise; in that case, + # print the string escaped to ASCII. + # When the Unicode situation improves we should consider + # letting the error propagate instead of masking it (see #7475 + # for one brief attempt). + msg = msg.encode("unicode-escape").decode("ascii") + self._file.write(msg) + + if flush: + self.flush() + + def line(self, s: str = "", **markup: bool) -> None: + self.write(s, **markup) + self.write("\n") + + def flush(self) -> None: + self._file.flush() + + def _write_source(self, lines: Sequence[str], indents: Sequence[str] = ()) -> None: + """Write lines of source code possibly highlighted. + + Keeping this private for now because the API is clunky. We should discuss how + to evolve the terminal writer so we can have more precise color support, for example + being able to write part of a line in one color and the rest in another, and so on. + """ + if indents and len(indents) != len(lines): + raise ValueError( + f"indents size ({len(indents)}) should have same size as lines ({len(lines)})" + ) + if not indents: + indents = [""] * len(lines) + source = "\n".join(lines) + new_lines = self._highlight(source).splitlines() + # Would be better to strict=True but that fails some CI jobs. + for indent, new_line in zip(indents, new_lines, strict=False): + self.line(indent + new_line) + + def _get_pygments_lexer(self, lexer: Literal["python", "diff"]) -> Lexer: + if lexer == "python": + return PythonLexer() + elif lexer == "diff": + return DiffLexer() + else: + assert_never(lexer) + + def _get_pygments_formatter(self) -> TerminalFormatter: + from _pytest.config.exceptions import UsageError + + theme = os.getenv("PYTEST_THEME") + theme_mode = os.getenv("PYTEST_THEME_MODE", "dark") + + try: + return TerminalFormatter(bg=theme_mode, style=theme) + except pygments.util.ClassNotFound as e: + raise UsageError( + f"PYTEST_THEME environment variable has an invalid value: '{theme}'. " + "Hint: See available pygments styles with `pygmentize -L styles`." + ) from e + except pygments.util.OptionError as e: + raise UsageError( + f"PYTEST_THEME_MODE environment variable has an invalid value: '{theme_mode}'. " + "The allowed values are 'dark' (default) and 'light'." + ) from e + + def _highlight( + self, source: str, lexer: Literal["diff", "python"] = "python" + ) -> str: + """Highlight the given source if we have markup support.""" + if not source or not self.hasmarkup or not self.code_highlight: + return source + + pygments_lexer = self._get_pygments_lexer(lexer) + pygments_formatter = self._get_pygments_formatter() + + highlighted: str = pygments.highlight( + source, pygments_lexer, pygments_formatter + ) + # pygments terminal formatter may add a newline when there wasn't one. + # We don't want this, remove. + if highlighted[-1] == "\n" and source[-1] != "\n": + highlighted = highlighted[:-1] + + # Some lexers will not set the initial color explicitly + # which may lead to the previous color being propagated to the + # start of the expression, so reset first. + highlighted = "\x1b[0m" + highlighted + + return highlighted diff --git a/.venv/lib/python3.12/site-packages/_pytest/_io/wcwidth.py b/.venv/lib/python3.12/site-packages/_pytest/_io/wcwidth.py new file mode 100644 index 0000000..23886ff --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/_io/wcwidth.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +from functools import lru_cache +import unicodedata + + +@lru_cache(100) +def wcwidth(c: str) -> int: + """Determine how many columns are needed to display a character in a terminal. + + Returns -1 if the character is not printable. + Returns 0, 1 or 2 for other characters. + """ + o = ord(c) + + # ASCII fast path. + if 0x20 <= o < 0x07F: + return 1 + + # Some Cf/Zp/Zl characters which should be zero-width. + if ( + o == 0x0000 + or 0x200B <= o <= 0x200F + or 0x2028 <= o <= 0x202E + or 0x2060 <= o <= 0x2063 + ): + return 0 + + category = unicodedata.category(c) + + # Control characters. + if category == "Cc": + return -1 + + # Combining characters with zero width. + if category in ("Me", "Mn"): + return 0 + + # Full/Wide east asian characters. + if unicodedata.east_asian_width(c) in ("F", "W"): + return 2 + + return 1 + + +def wcswidth(s: str) -> int: + """Determine how many columns are needed to display a string in a terminal. + + Returns -1 if the string contains non-printable characters. + """ + width = 0 + for c in unicodedata.normalize("NFC", s): + wc = wcwidth(c) + if wc < 0: + return -1 + width += wc + return width diff --git a/.venv/lib/python3.12/site-packages/_pytest/_py/__init__.py b/.venv/lib/python3.12/site-packages/_pytest/_py/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/_pytest/_py/error.py b/.venv/lib/python3.12/site-packages/_pytest/_py/error.py new file mode 100644 index 0000000..dace237 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/_py/error.py @@ -0,0 +1,119 @@ +"""create errno-specific classes for IO or os calls.""" + +from __future__ import annotations + +from collections.abc import Callable +import errno +import os +import sys +from typing import TYPE_CHECKING +from typing import TypeVar + + +if TYPE_CHECKING: + from typing_extensions import ParamSpec + + P = ParamSpec("P") + +R = TypeVar("R") + + +class Error(EnvironmentError): + def __repr__(self) -> str: + return "{}.{} {!r}: {} ".format( + self.__class__.__module__, + self.__class__.__name__, + self.__class__.__doc__, + " ".join(map(str, self.args)), + # repr(self.args) + ) + + def __str__(self) -> str: + s = "[{}]: {}".format( + self.__class__.__doc__, + " ".join(map(str, self.args)), + ) + return s + + +_winerrnomap = { + 2: errno.ENOENT, + 3: errno.ENOENT, + 17: errno.EEXIST, + 18: errno.EXDEV, + 13: errno.EBUSY, # empty cd drive, but ENOMEDIUM seems unavailable + 22: errno.ENOTDIR, + 20: errno.ENOTDIR, + 267: errno.ENOTDIR, + 5: errno.EACCES, # anything better? +} + + +class ErrorMaker: + """lazily provides Exception classes for each possible POSIX errno + (as defined per the 'errno' module). All such instances + subclass EnvironmentError. + """ + + _errno2class: dict[int, type[Error]] = {} + + def __getattr__(self, name: str) -> type[Error]: + if name[0] == "_": + raise AttributeError(name) + eno = getattr(errno, name) + cls = self._geterrnoclass(eno) + setattr(self, name, cls) + return cls + + def _geterrnoclass(self, eno: int) -> type[Error]: + try: + return self._errno2class[eno] + except KeyError: + clsname = errno.errorcode.get(eno, f"UnknownErrno{eno}") + errorcls = type( + clsname, + (Error,), + {"__module__": "py.error", "__doc__": os.strerror(eno)}, + ) + self._errno2class[eno] = errorcls + return errorcls + + def checked_call( + self, func: Callable[P, R], *args: P.args, **kwargs: P.kwargs + ) -> R: + """Call a function and raise an errno-exception if applicable.""" + __tracebackhide__ = True + try: + return func(*args, **kwargs) + except Error: + raise + except OSError as value: + if not hasattr(value, "errno"): + raise + if sys.platform == "win32": + try: + # error: Invalid index type "Optional[int]" for "dict[int, int]"; expected type "int" [index] + # OK to ignore because we catch the KeyError below. + cls = self._geterrnoclass(_winerrnomap[value.errno]) # type:ignore[index] + except KeyError: + raise value + else: + # we are not on Windows, or we got a proper OSError + if value.errno is None: + cls = type( + "UnknownErrnoNone", + (Error,), + {"__module__": "py.error", "__doc__": None}, + ) + else: + cls = self._geterrnoclass(value.errno) + + raise cls(f"{func.__name__}{args!r}") + + +_error_maker = ErrorMaker() +checked_call = _error_maker.checked_call + + +def __getattr__(attr: str) -> type[Error]: + return getattr(_error_maker, attr) # type: ignore[no-any-return] diff --git a/.venv/lib/python3.12/site-packages/_pytest/_py/path.py b/.venv/lib/python3.12/site-packages/_pytest/_py/path.py new file mode 100644 index 0000000..b7131b0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/_py/path.py @@ -0,0 +1,1475 @@ +# mypy: allow-untyped-defs +"""local path implementation.""" + +from __future__ import annotations + +import atexit +from collections.abc import Callable +from contextlib import contextmanager +import fnmatch +import importlib.util +import io +import os +from os.path import abspath +from os.path import dirname +from os.path import exists +from os.path import isabs +from os.path import isdir +from os.path import isfile +from os.path import islink +from os.path import normpath +import posixpath +from stat import S_ISDIR +from stat import S_ISLNK +from stat import S_ISREG +import sys +from typing import Any +from typing import cast +from typing import Literal +from typing import overload +from typing import TYPE_CHECKING +import uuid +import warnings + +from . import error + + +# Moved from local.py. +iswin32 = sys.platform == "win32" or (getattr(os, "_name", False) == "nt") + + +class Checkers: + _depend_on_existence = "exists", "link", "dir", "file" + + def __init__(self, path): + self.path = path + + def dotfile(self): + return self.path.basename.startswith(".") + + def ext(self, arg): + if not arg.startswith("."): + arg = "." + arg + return self.path.ext == arg + + def basename(self, arg): + return self.path.basename == arg + + def basestarts(self, arg): + return self.path.basename.startswith(arg) + + def relto(self, arg): + return self.path.relto(arg) + + def fnmatch(self, arg): + return self.path.fnmatch(arg) + + def endswith(self, arg): + return str(self.path).endswith(arg) + + def _evaluate(self, kw): + from .._code.source import getrawcode + + for name, value in kw.items(): + invert = False + meth = None + try: + meth = getattr(self, name) + except AttributeError: + if name[:3] == "not": + invert = True + try: + meth = getattr(self, name[3:]) + except AttributeError: + pass + if meth is None: + raise TypeError(f"no {name!r} checker available for {self.path!r}") + try: + if getrawcode(meth).co_argcount > 1: + if (not meth(value)) ^ invert: + return False + else: + if bool(value) ^ bool(meth()) ^ invert: + return False + except (error.ENOENT, error.ENOTDIR, error.EBUSY): + # EBUSY feels not entirely correct, + # but its kind of necessary since ENOMEDIUM + # is not accessible in python + for name in self._depend_on_existence: + if name in kw: + if kw.get(name): + return False + name = "not" + name + if name in kw: + if not kw.get(name): + return False + return True + + _statcache: Stat + + def _stat(self) -> Stat: + try: + return self._statcache + except AttributeError: + try: + self._statcache = self.path.stat() + except error.ELOOP: + self._statcache = self.path.lstat() + return self._statcache + + def dir(self): + return S_ISDIR(self._stat().mode) + + def file(self): + return S_ISREG(self._stat().mode) + + def exists(self): + return self._stat() + + def link(self): + st = self.path.lstat() + return S_ISLNK(st.mode) + + +class NeverRaised(Exception): + pass + + +class Visitor: + def __init__(self, fil, rec, ignore, bf, sort): + if isinstance(fil, str): + fil = FNMatcher(fil) + if isinstance(rec, str): + self.rec: Callable[[LocalPath], bool] = FNMatcher(rec) + elif not hasattr(rec, "__call__") and rec: + self.rec = lambda path: True + else: + self.rec = rec + self.fil = fil + self.ignore = ignore + self.breadthfirst = bf + self.optsort = cast(Callable[[Any], Any], sorted) if sort else (lambda x: x) + + def gen(self, path): + try: + entries = path.listdir() + except self.ignore: + return + rec = self.rec + dirs = self.optsort( + [p for p in entries if p.check(dir=1) and (rec is None or rec(p))] + ) + if not self.breadthfirst: + for subdir in dirs: + yield from self.gen(subdir) + for p in self.optsort(entries): + if self.fil is None or self.fil(p): + yield p + if self.breadthfirst: + for subdir in dirs: + yield from self.gen(subdir) + + +class FNMatcher: + def __init__(self, pattern): + self.pattern = pattern + + def __call__(self, path): + pattern = self.pattern + + if ( + pattern.find(path.sep) == -1 + and iswin32 + and pattern.find(posixpath.sep) != -1 + ): + # Running on Windows, the pattern has no Windows path separators, + # and the pattern has one or more Posix path separators. Replace + # the Posix path separators with the Windows path separator. + pattern = pattern.replace(posixpath.sep, path.sep) + + if pattern.find(path.sep) == -1: + name = path.basename + else: + name = str(path) # path.strpath # XXX svn? + if not os.path.isabs(pattern): + pattern = "*" + path.sep + pattern + return fnmatch.fnmatch(name, pattern) + + +def map_as_list(func, iter): + return list(map(func, iter)) + + +class Stat: + if TYPE_CHECKING: + + @property + def size(self) -> int: ... + + @property + def mtime(self) -> float: ... + + def __getattr__(self, name: str) -> Any: + return getattr(self._osstatresult, "st_" + name) + + def __init__(self, path, osstatresult): + self.path = path + self._osstatresult = osstatresult + + @property + def owner(self): + if iswin32: + raise NotImplementedError("XXX win32") + import pwd + + entry = error.checked_call(pwd.getpwuid, self.uid) # type:ignore[attr-defined,unused-ignore] + return entry[0] + + @property + def group(self): + """Return group name of file.""" + if iswin32: + raise NotImplementedError("XXX win32") + import grp + + entry = error.checked_call(grp.getgrgid, self.gid) # type:ignore[attr-defined,unused-ignore] + return entry[0] + + def isdir(self): + return S_ISDIR(self._osstatresult.st_mode) + + def isfile(self): + return S_ISREG(self._osstatresult.st_mode) + + def islink(self): + self.path.lstat() + return S_ISLNK(self._osstatresult.st_mode) + + +def getuserid(user): + import pwd + + if not isinstance(user, int): + user = pwd.getpwnam(user)[2] # type:ignore[attr-defined,unused-ignore] + return user + + +def getgroupid(group): + import grp + + if not isinstance(group, int): + group = grp.getgrnam(group)[2] # type:ignore[attr-defined,unused-ignore] + return group + + +class LocalPath: + """Object oriented interface to os.path and other local filesystem + related information. + """ + + class ImportMismatchError(ImportError): + """raised on pyimport() if there is a mismatch of __file__'s""" + + sep = os.sep + + def __init__(self, path=None, expanduser=False): + """Initialize and return a local Path instance. + + Path can be relative to the current directory. + If path is None it defaults to the current working directory. + If expanduser is True, tilde-expansion is performed. + Note that Path instances always carry an absolute path. + Note also that passing in a local path object will simply return + the exact same path object. Use new() to get a new copy. + """ + if path is None: + self.strpath = error.checked_call(os.getcwd) + else: + try: + path = os.fspath(path) + except TypeError: + raise ValueError( + "can only pass None, Path instances " + "or non-empty strings to LocalPath" + ) + if expanduser: + path = os.path.expanduser(path) + self.strpath = abspath(path) + + if sys.platform != "win32": + + def chown(self, user, group, rec=0): + """Change ownership to the given user and group. + user and group may be specified by a number or + by a name. if rec is True change ownership + recursively. + """ + uid = getuserid(user) + gid = getgroupid(group) + if rec: + for x in self.visit(rec=lambda x: x.check(link=0)): + if x.check(link=0): + error.checked_call(os.chown, str(x), uid, gid) + error.checked_call(os.chown, str(self), uid, gid) + + def readlink(self) -> str: + """Return value of a symbolic link.""" + # https://github.com/python/mypy/issues/12278 + return error.checked_call(os.readlink, self.strpath) # type: ignore[arg-type,return-value,unused-ignore] + + def mklinkto(self, oldname): + """Posix style hard link to another name.""" + error.checked_call(os.link, str(oldname), str(self)) + + def mksymlinkto(self, value, absolute=1): + """Create a symbolic link with the given value (pointing to another name).""" + if absolute: + error.checked_call(os.symlink, str(value), self.strpath) + else: + base = self.common(value) + # with posix local paths '/' is always a common base + relsource = self.__class__(value).relto(base) + reldest = self.relto(base) + n = reldest.count(self.sep) + target = self.sep.join(("..",) * n + (relsource,)) + error.checked_call(os.symlink, target, self.strpath) + + def __div__(self, other): + return self.join(os.fspath(other)) + + __truediv__ = __div__ # py3k + + @property + def basename(self): + """Basename part of path.""" + return self._getbyspec("basename")[0] + + @property + def dirname(self): + """Dirname part of path.""" + return self._getbyspec("dirname")[0] + + @property + def purebasename(self): + """Pure base name of the path.""" + return self._getbyspec("purebasename")[0] + + @property + def ext(self): + """Extension of the path (including the '.').""" + return self._getbyspec("ext")[0] + + def read_binary(self): + """Read and return a bytestring from reading the path.""" + with self.open("rb") as f: + return f.read() + + def read_text(self, encoding): + """Read and return a Unicode string from reading the path.""" + with self.open("r", encoding=encoding) as f: + return f.read() + + def read(self, mode="r"): + """Read and return a bytestring from reading the path.""" + with self.open(mode) as f: + return f.read() + + def readlines(self, cr=1): + """Read and return a list of lines from the path. if cr is False, the + newline will be removed from the end of each line.""" + mode = "r" + + if not cr: + content = self.read(mode) + return content.split("\n") + else: + f = self.open(mode) + try: + return f.readlines() + finally: + f.close() + + def load(self): + """(deprecated) return object unpickled from self.read()""" + f = self.open("rb") + try: + import pickle + + return error.checked_call(pickle.load, f) + finally: + f.close() + + def move(self, target): + """Move this path to target.""" + if target.relto(self): + raise error.EINVAL(target, "cannot move path into a subdirectory of itself") + try: + self.rename(target) + except error.EXDEV: # invalid cross-device link + self.copy(target) + self.remove() + + def fnmatch(self, pattern): + """Return true if the basename/fullname matches the glob-'pattern'. + + valid pattern characters:: + + * matches everything + ? matches any single character + [seq] matches any character in seq + [!seq] matches any char not in seq + + If the pattern contains a path-separator then the full path + is used for pattern matching and a '*' is prepended to the + pattern. + + if the pattern doesn't contain a path-separator the pattern + is only matched against the basename. + """ + return FNMatcher(pattern)(self) + + def relto(self, relpath): + """Return a string which is the relative part of the path + to the given 'relpath'. + """ + if not isinstance(relpath, str | LocalPath): + raise TypeError(f"{relpath!r}: not a string or path object") + strrelpath = str(relpath) + if strrelpath and strrelpath[-1] != self.sep: + strrelpath += self.sep + # assert strrelpath[-1] == self.sep + # assert strrelpath[-2] != self.sep + strself = self.strpath + if sys.platform == "win32" or getattr(os, "_name", None) == "nt": + if os.path.normcase(strself).startswith(os.path.normcase(strrelpath)): + return strself[len(strrelpath) :] + elif strself.startswith(strrelpath): + return strself[len(strrelpath) :] + return "" + + def ensure_dir(self, *args): + """Ensure the path joined with args is a directory.""" + return self.ensure(*args, dir=True) + + def bestrelpath(self, dest): + """Return a string which is a relative path from self + (assumed to be a directory) to dest such that + self.join(bestrelpath) == dest and if not such + path can be determined return dest. + """ + try: + if self == dest: + return os.curdir + base = self.common(dest) + if not base: # can be the case on windows + return str(dest) + self2base = self.relto(base) + reldest = dest.relto(base) + if self2base: + n = self2base.count(self.sep) + 1 + else: + n = 0 + lst = [os.pardir] * n + if reldest: + lst.append(reldest) + target = dest.sep.join(lst) + return target + except AttributeError: + return str(dest) + + def exists(self): + return self.check() + + def isdir(self): + return self.check(dir=1) + + def isfile(self): + return self.check(file=1) + + def parts(self, reverse=False): + """Return a root-first list of all ancestor directories + plus the path itself. + """ + current = self + lst = [self] + while 1: + last = current + current = current.dirpath() + if last == current: + break + lst.append(current) + if not reverse: + lst.reverse() + return lst + + def common(self, other): + """Return the common part shared with the other path + or None if there is no common part. + """ + last = None + for x, y in zip(self.parts(), other.parts()): + if x != y: + return last + last = x + return last + + def __add__(self, other): + """Return new path object with 'other' added to the basename""" + return self.new(basename=self.basename + str(other)) + + def visit(self, fil=None, rec=None, ignore=NeverRaised, bf=False, sort=False): + """Yields all paths below the current one + + fil is a filter (glob pattern or callable), if not matching the + path will not be yielded, defaulting to None (everything is + returned) + + rec is a filter (glob pattern or callable) that controls whether + a node is descended, defaulting to None + + ignore is an Exception class that is ignoredwhen calling dirlist() + on any of the paths (by default, all exceptions are reported) + + bf if True will cause a breadthfirst search instead of the + default depthfirst. Default: False + + sort if True will sort entries within each directory level. + """ + yield from Visitor(fil, rec, ignore, bf, sort).gen(self) + + def _sortlist(self, res, sort): + if sort: + if hasattr(sort, "__call__"): + warnings.warn( + DeprecationWarning( + "listdir(sort=callable) is deprecated and breaks on python3" + ), + stacklevel=3, + ) + res.sort(sort) + else: + res.sort() + + def __fspath__(self): + return self.strpath + + def __hash__(self): + s = self.strpath + if iswin32: + s = s.lower() + return hash(s) + + def __eq__(self, other): + s1 = os.fspath(self) + try: + s2 = os.fspath(other) + except TypeError: + return False + if iswin32: + s1 = s1.lower() + try: + s2 = s2.lower() + except AttributeError: + return False + return s1 == s2 + + def __ne__(self, other): + return not (self == other) + + def __lt__(self, other): + return os.fspath(self) < os.fspath(other) + + def __gt__(self, other): + return os.fspath(self) > os.fspath(other) + + def samefile(self, other): + """Return True if 'other' references the same file as 'self'.""" + other = os.fspath(other) + if not isabs(other): + other = abspath(other) + if self == other: + return True + if not hasattr(os.path, "samefile"): + return False + return error.checked_call(os.path.samefile, self.strpath, other) + + def remove(self, rec=1, ignore_errors=False): + """Remove a file or directory (or a directory tree if rec=1). + if ignore_errors is True, errors while removing directories will + be ignored. + """ + if self.check(dir=1, link=0): + if rec: + # force remove of readonly files on windows + if iswin32: + self.chmod(0o700, rec=1) + import shutil + + error.checked_call( + shutil.rmtree, self.strpath, ignore_errors=ignore_errors + ) + else: + error.checked_call(os.rmdir, self.strpath) + else: + if iswin32: + self.chmod(0o700) + error.checked_call(os.remove, self.strpath) + + def computehash(self, hashtype="md5", chunksize=524288): + """Return hexdigest of hashvalue for this file.""" + try: + try: + import hashlib as mod + except ImportError: + if hashtype == "sha1": + hashtype = "sha" + mod = __import__(hashtype) + hash = getattr(mod, hashtype)() + except (AttributeError, ImportError): + raise ValueError(f"Don't know how to compute {hashtype!r} hash") + f = self.open("rb") + try: + while 1: + buf = f.read(chunksize) + if not buf: + return hash.hexdigest() + hash.update(buf) + finally: + f.close() + + def new(self, **kw): + """Create a modified version of this path. + the following keyword arguments modify various path parts:: + + a:/some/path/to/a/file.ext + xx drive + xxxxxxxxxxxxxxxxx dirname + xxxxxxxx basename + xxxx purebasename + xxx ext + """ + obj = object.__new__(self.__class__) + if not kw: + obj.strpath = self.strpath + return obj + drive, dirname, _basename, purebasename, ext = self._getbyspec( + "drive,dirname,basename,purebasename,ext" + ) + if "basename" in kw: + if "purebasename" in kw or "ext" in kw: + raise ValueError(f"invalid specification {kw!r}") + else: + pb = kw.setdefault("purebasename", purebasename) + try: + ext = kw["ext"] + except KeyError: + pass + else: + if ext and not ext.startswith("."): + ext = "." + ext + kw["basename"] = pb + ext + + if "dirname" in kw and not kw["dirname"]: + kw["dirname"] = drive + else: + kw.setdefault("dirname", dirname) + kw.setdefault("sep", self.sep) + obj.strpath = normpath("{dirname}{sep}{basename}".format(**kw)) + return obj + + def _getbyspec(self, spec: str) -> list[str]: + """See new for what 'spec' can be.""" + res = [] + parts = self.strpath.split(self.sep) + + args = filter(None, spec.split(",")) + for name in args: + if name == "drive": + res.append(parts[0]) + elif name == "dirname": + res.append(self.sep.join(parts[:-1])) + else: + basename = parts[-1] + if name == "basename": + res.append(basename) + else: + i = basename.rfind(".") + if i == -1: + purebasename, ext = basename, "" + else: + purebasename, ext = basename[:i], basename[i:] + if name == "purebasename": + res.append(purebasename) + elif name == "ext": + res.append(ext) + else: + raise ValueError(f"invalid part specification {name!r}") + return res + + def dirpath(self, *args, **kwargs): + """Return the directory path joined with any given path arguments.""" + if not kwargs: + path = object.__new__(self.__class__) + path.strpath = dirname(self.strpath) + if args: + path = path.join(*args) + return path + return self.new(basename="").join(*args, **kwargs) + + def join(self, *args: os.PathLike[str], abs: bool = False) -> LocalPath: + """Return a new path by appending all 'args' as path + components. if abs=1 is used restart from root if any + of the args is an absolute path. + """ + sep = self.sep + strargs = [os.fspath(arg) for arg in args] + strpath = self.strpath + if abs: + newargs: list[str] = [] + for arg in reversed(strargs): + if isabs(arg): + strpath = arg + strargs = newargs + break + newargs.insert(0, arg) + # special case for when we have e.g. strpath == "/" + actual_sep = "" if strpath.endswith(sep) else sep + for arg in strargs: + arg = arg.strip(sep) + if iswin32: + # allow unix style paths even on windows. + arg = arg.strip("/") + arg = arg.replace("/", sep) + strpath = strpath + actual_sep + arg + actual_sep = sep + obj = object.__new__(self.__class__) + obj.strpath = normpath(strpath) + return obj + + def open(self, mode="r", ensure=False, encoding=None): + """Return an opened file with the given mode. + + If ensure is True, create parent directories if needed. + """ + if ensure: + self.dirpath().ensure(dir=1) + if encoding: + return error.checked_call( + io.open, + self.strpath, + mode, + encoding=encoding, + ) + return error.checked_call(open, self.strpath, mode) + + def _fastjoin(self, name): + child = object.__new__(self.__class__) + child.strpath = self.strpath + self.sep + name + return child + + def islink(self): + return islink(self.strpath) + + def check(self, **kw): + """Check a path for existence and properties. + + Without arguments, return True if the path exists, otherwise False. + + valid checkers:: + + file = 1 # is a file + file = 0 # is not a file (may not even exist) + dir = 1 # is a dir + link = 1 # is a link + exists = 1 # exists + + You can specify multiple checker definitions, for example:: + + path.check(file=1, link=1) # a link pointing to a file + """ + if not kw: + return exists(self.strpath) + if len(kw) == 1: + if "dir" in kw: + return not kw["dir"] ^ isdir(self.strpath) + if "file" in kw: + return not kw["file"] ^ isfile(self.strpath) + if not kw: + kw = {"exists": 1} + return Checkers(self)._evaluate(kw) + + _patternchars = set("*?[" + os.sep) + + def listdir(self, fil=None, sort=None): + """List directory contents, possibly filter by the given fil func + and possibly sorted. + """ + if fil is None and sort is None: + names = error.checked_call(os.listdir, self.strpath) + return map_as_list(self._fastjoin, names) + if isinstance(fil, str): + if not self._patternchars.intersection(fil): + child = self._fastjoin(fil) + if exists(child.strpath): + return [child] + return [] + fil = FNMatcher(fil) + names = error.checked_call(os.listdir, self.strpath) + res = [] + for name in names: + child = self._fastjoin(name) + if fil is None or fil(child): + res.append(child) + self._sortlist(res, sort) + return res + + def size(self) -> int: + """Return size of the underlying file object""" + return self.stat().size + + def mtime(self) -> float: + """Return last modification time of the path.""" + return self.stat().mtime + + def copy(self, target, mode=False, stat=False): + """Copy path to target. + + If mode is True, will copy permission from path to target. + If stat is True, copy permission, last modification + time, last access time, and flags from path to target. + """ + if self.check(file=1): + if target.check(dir=1): + target = target.join(self.basename) + assert self != target + copychunked(self, target) + if mode: + copymode(self.strpath, target.strpath) + if stat: + copystat(self, target) + else: + + def rec(p): + return p.check(link=0) + + for x in self.visit(rec=rec): + relpath = x.relto(self) + newx = target.join(relpath) + newx.dirpath().ensure(dir=1) + if x.check(link=1): + newx.mksymlinkto(x.readlink()) + continue + elif x.check(file=1): + copychunked(x, newx) + elif x.check(dir=1): + newx.ensure(dir=1) + if mode: + copymode(x.strpath, newx.strpath) + if stat: + copystat(x, newx) + + def rename(self, target): + """Rename this path to target.""" + target = os.fspath(target) + return error.checked_call(os.rename, self.strpath, target) + + def dump(self, obj, bin=1): + """Pickle object into path location""" + f = self.open("wb") + import pickle + + try: + error.checked_call(pickle.dump, obj, f, bin) + finally: + f.close() + + def mkdir(self, *args): + """Create & return the directory joined with args.""" + p = self.join(*args) + error.checked_call(os.mkdir, os.fspath(p)) + return p + + def write_binary(self, data, ensure=False): + """Write binary data into path. If ensure is True create + missing parent directories. + """ + if ensure: + self.dirpath().ensure(dir=1) + with self.open("wb") as f: + f.write(data) + + def write_text(self, data, encoding, ensure=False): + """Write text data into path using the specified encoding. + If ensure is True create missing parent directories. + """ + if ensure: + self.dirpath().ensure(dir=1) + with self.open("w", encoding=encoding) as f: + f.write(data) + + def write(self, data, mode="w", ensure=False): + """Write data into path. If ensure is True create + missing parent directories. + """ + if ensure: + self.dirpath().ensure(dir=1) + if "b" in mode: + if not isinstance(data, bytes): + raise ValueError("can only process bytes") + else: + if not isinstance(data, str): + if not isinstance(data, bytes): + data = str(data) + else: + data = data.decode(sys.getdefaultencoding()) + f = self.open(mode) + try: + f.write(data) + finally: + f.close() + + def _ensuredirs(self): + parent = self.dirpath() + if parent == self: + return self + if parent.check(dir=0): + parent._ensuredirs() + if self.check(dir=0): + try: + self.mkdir() + except error.EEXIST: + # race condition: file/dir created by another thread/process. + # complain if it is not a dir + if self.check(dir=0): + raise + return self + + def ensure(self, *args, **kwargs): + """Ensure that an args-joined path exists (by default as + a file). if you specify a keyword argument 'dir=True' + then the path is forced to be a directory path. + """ + p = self.join(*args) + if kwargs.get("dir", 0): + return p._ensuredirs() + else: + p.dirpath()._ensuredirs() + if not p.check(file=1): + p.open("wb").close() + return p + + @overload + def stat(self, raising: Literal[True] = ...) -> Stat: ... + + @overload + def stat(self, raising: Literal[False]) -> Stat | None: ... + + def stat(self, raising: bool = True) -> Stat | None: + """Return an os.stat() tuple.""" + if raising: + return Stat(self, error.checked_call(os.stat, self.strpath)) + try: + return Stat(self, os.stat(self.strpath)) + except KeyboardInterrupt: + raise + except Exception: + return None + + def lstat(self) -> Stat: + """Return an os.lstat() tuple.""" + return Stat(self, error.checked_call(os.lstat, self.strpath)) + + def setmtime(self, mtime=None): + """Set modification time for the given path. if 'mtime' is None + (the default) then the file's mtime is set to current time. + + Note that the resolution for 'mtime' is platform dependent. + """ + if mtime is None: + return error.checked_call(os.utime, self.strpath, mtime) + try: + return error.checked_call(os.utime, self.strpath, (-1, mtime)) + except error.EINVAL: + return error.checked_call(os.utime, self.strpath, (self.atime(), mtime)) + + def chdir(self): + """Change directory to self and return old current directory""" + try: + old = self.__class__() + except error.ENOENT: + old = None + error.checked_call(os.chdir, self.strpath) + return old + + @contextmanager + def as_cwd(self): + """ + Return a context manager, which changes to the path's dir during the + managed "with" context. + On __enter__ it returns the old dir, which might be ``None``. + """ + old = self.chdir() + try: + yield old + finally: + if old is not None: + old.chdir() + + def realpath(self): + """Return a new path which contains no symbolic links.""" + return self.__class__(os.path.realpath(self.strpath)) + + def atime(self): + """Return last access time of the path.""" + return self.stat().atime + + def __repr__(self): + return f"local({self.strpath!r})" + + def __str__(self): + """Return string representation of the Path.""" + return self.strpath + + def chmod(self, mode, rec=0): + """Change permissions to the given mode. If mode is an + integer it directly encodes the os-specific modes. + if rec is True perform recursively. + """ + if not isinstance(mode, int): + raise TypeError(f"mode {mode!r} must be an integer") + if rec: + for x in self.visit(rec=rec): + error.checked_call(os.chmod, str(x), mode) + error.checked_call(os.chmod, self.strpath, mode) + + def pypkgpath(self): + """Return the Python package path by looking for the last + directory upwards which still contains an __init__.py. + Return None if a pkgpath cannot be determined. + """ + pkgpath = None + for parent in self.parts(reverse=True): + if parent.isdir(): + if not parent.join("__init__.py").exists(): + break + if not isimportable(parent.basename): + break + pkgpath = parent + return pkgpath + + def _ensuresyspath(self, ensuremode, path): + if ensuremode: + s = str(path) + if ensuremode == "append": + if s not in sys.path: + sys.path.append(s) + else: + if s != sys.path[0]: + sys.path.insert(0, s) + + def pyimport(self, modname=None, ensuresyspath=True): + """Return path as an imported python module. + + If modname is None, look for the containing package + and construct an according module name. + The module will be put/looked up in sys.modules. + if ensuresyspath is True then the root dir for importing + the file (taking __init__.py files into account) will + be prepended to sys.path if it isn't there already. + If ensuresyspath=="append" the root dir will be appended + if it isn't already contained in sys.path. + if ensuresyspath is False no modification of syspath happens. + + Special value of ensuresyspath=="importlib" is intended + purely for using in pytest, it is capable only of importing + separate .py files outside packages, e.g. for test suite + without any __init__.py file. It effectively allows having + same-named test modules in different places and offers + mild opt-in via this option. Note that it works only in + recent versions of python. + """ + if not self.check(): + raise error.ENOENT(self) + + if ensuresyspath == "importlib": + if modname is None: + modname = self.purebasename + spec = importlib.util.spec_from_file_location(modname, str(self)) + if spec is None or spec.loader is None: + raise ImportError(f"Can't find module {modname} at location {self!s}") + mod = importlib.util.module_from_spec(spec) + spec.loader.exec_module(mod) + return mod + + pkgpath = None + if modname is None: + pkgpath = self.pypkgpath() + if pkgpath is not None: + pkgroot = pkgpath.dirpath() + names = self.new(ext="").relto(pkgroot).split(self.sep) + if names[-1] == "__init__": + names.pop() + modname = ".".join(names) + else: + pkgroot = self.dirpath() + modname = self.purebasename + + self._ensuresyspath(ensuresyspath, pkgroot) + __import__(modname) + mod = sys.modules[modname] + if self.basename == "__init__.py": + return mod # we don't check anything as we might + # be in a namespace package ... too icky to check + modfile = mod.__file__ + assert modfile is not None + if modfile[-4:] in (".pyc", ".pyo"): + modfile = modfile[:-1] + elif modfile.endswith("$py.class"): + modfile = modfile[:-9] + ".py" + if modfile.endswith(os.sep + "__init__.py"): + if self.basename != "__init__.py": + modfile = modfile[:-12] + try: + issame = self.samefile(modfile) + except error.ENOENT: + issame = False + if not issame: + ignore = os.getenv("PY_IGNORE_IMPORTMISMATCH") + if ignore != "1": + raise self.ImportMismatchError(modname, modfile, self) + return mod + else: + try: + return sys.modules[modname] + except KeyError: + # we have a custom modname, do a pseudo-import + import types + + mod = types.ModuleType(modname) + mod.__file__ = str(self) + sys.modules[modname] = mod + try: + with open(str(self), "rb") as f: + exec(f.read(), mod.__dict__) + except BaseException: + del sys.modules[modname] + raise + return mod + + def sysexec(self, *argv: os.PathLike[str], **popen_opts: Any) -> str: + """Return stdout text from executing a system child process, + where the 'self' path points to executable. + The process is directly invoked and not through a system shell. + """ + from subprocess import PIPE + from subprocess import Popen + + popen_opts.pop("stdout", None) + popen_opts.pop("stderr", None) + proc = Popen( + [str(self)] + [str(arg) for arg in argv], + **popen_opts, + stdout=PIPE, + stderr=PIPE, + ) + stdout: str | bytes + stdout, stderr = proc.communicate() + ret = proc.wait() + if isinstance(stdout, bytes): + stdout = stdout.decode(sys.getdefaultencoding()) + if ret != 0: + if isinstance(stderr, bytes): + stderr = stderr.decode(sys.getdefaultencoding()) + raise RuntimeError( + ret, + ret, + str(self), + stdout, + stderr, + ) + return stdout + + @classmethod + def sysfind(cls, name, checker=None, paths=None): + """Return a path object found by looking at the systems + underlying PATH specification. If the checker is not None + it will be invoked to filter matching paths. If a binary + cannot be found, None is returned + Note: This is probably not working on plain win32 systems + but may work on cygwin. + """ + if isabs(name): + p = local(name) + if p.check(file=1): + return p + else: + if paths is None: + if iswin32: + paths = os.environ["Path"].split(";") + if "" not in paths and "." not in paths: + paths.append(".") + try: + systemroot = os.environ["SYSTEMROOT"] + except KeyError: + pass + else: + paths = [ + path.replace("%SystemRoot%", systemroot) for path in paths + ] + else: + paths = os.environ["PATH"].split(":") + tryadd = [] + if iswin32: + tryadd += os.environ["PATHEXT"].split(os.pathsep) + tryadd.append("") + + for x in paths: + for addext in tryadd: + p = local(x).join(name, abs=True) + addext + try: + if p.check(file=1): + if checker: + if not checker(p): + continue + return p + except error.EACCES: + pass + return None + + @classmethod + def _gethomedir(cls): + try: + x = os.environ["HOME"] + except KeyError: + try: + x = os.environ["HOMEDRIVE"] + os.environ["HOMEPATH"] + except KeyError: + return None + return cls(x) + + # """ + # special class constructors for local filesystem paths + # """ + @classmethod + def get_temproot(cls): + """Return the system's temporary directory + (where tempfiles are usually created in) + """ + import tempfile + + return local(tempfile.gettempdir()) + + @classmethod + def mkdtemp(cls, rootdir=None): + """Return a Path object pointing to a fresh new temporary directory + (which we created ourselves). + """ + import tempfile + + if rootdir is None: + rootdir = cls.get_temproot() + path = error.checked_call(tempfile.mkdtemp, dir=str(rootdir)) + return cls(path) + + @classmethod + def make_numbered_dir( + cls, prefix="session-", rootdir=None, keep=3, lock_timeout=172800 + ): # two days + """Return unique directory with a number greater than the current + maximum one. The number is assumed to start directly after prefix. + if keep is true directories with a number less than (maxnum-keep) + will be removed. If .lock files are used (lock_timeout non-zero), + algorithm is multi-process safe. + """ + if rootdir is None: + rootdir = cls.get_temproot() + + nprefix = prefix.lower() + + def parse_num(path): + """Parse the number out of a path (if it matches the prefix)""" + nbasename = path.basename.lower() + if nbasename.startswith(nprefix): + try: + return int(nbasename[len(nprefix) :]) + except ValueError: + pass + + def create_lockfile(path): + """Exclusively create lockfile. Throws when failed""" + mypid = os.getpid() + lockfile = path.join(".lock") + if hasattr(lockfile, "mksymlinkto"): + lockfile.mksymlinkto(str(mypid)) + else: + fd = error.checked_call( + os.open, str(lockfile), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o644 + ) + with os.fdopen(fd, "w") as f: + f.write(str(mypid)) + return lockfile + + def atexit_remove_lockfile(lockfile): + """Ensure lockfile is removed at process exit""" + mypid = os.getpid() + + def try_remove_lockfile(): + # in a fork() situation, only the last process should + # remove the .lock, otherwise the other processes run the + # risk of seeing their temporary dir disappear. For now + # we remove the .lock in the parent only (i.e. we assume + # that the children finish before the parent). + if os.getpid() != mypid: + return + try: + lockfile.remove() + except error.Error: + pass + + atexit.register(try_remove_lockfile) + + # compute the maximum number currently in use with the prefix + lastmax = None + while True: + maxnum = -1 + for path in rootdir.listdir(): + num = parse_num(path) + if num is not None: + maxnum = max(maxnum, num) + + # make the new directory + try: + udir = rootdir.mkdir(prefix + str(maxnum + 1)) + if lock_timeout: + lockfile = create_lockfile(udir) + atexit_remove_lockfile(lockfile) + except (error.EEXIST, error.ENOENT, error.EBUSY): + # race condition (1): another thread/process created the dir + # in the meantime - try again + # race condition (2): another thread/process spuriously acquired + # lock treating empty directory as candidate + # for removal - try again + # race condition (3): another thread/process tried to create the lock at + # the same time (happened in Python 3.3 on Windows) + # https://ci.appveyor.com/project/pytestbot/py/build/1.0.21/job/ffi85j4c0lqwsfwa + if lastmax == maxnum: + raise + lastmax = maxnum + continue + break + + def get_mtime(path): + """Read file modification time""" + try: + return path.lstat().mtime + except error.Error: + pass + + garbage_prefix = prefix + "garbage-" + + def is_garbage(path): + """Check if path denotes directory scheduled for removal""" + bn = path.basename + return bn.startswith(garbage_prefix) + + # prune old directories + udir_time = get_mtime(udir) + if keep and udir_time: + for path in rootdir.listdir(): + num = parse_num(path) + if num is not None and num <= (maxnum - keep): + try: + # try acquiring lock to remove directory as exclusive user + if lock_timeout: + create_lockfile(path) + except (error.EEXIST, error.ENOENT, error.EBUSY): + path_time = get_mtime(path) + if not path_time: + # assume directory doesn't exist now + continue + if abs(udir_time - path_time) < lock_timeout: + # assume directory with lockfile exists + # and lock timeout hasn't expired yet + continue + + # path dir locked for exclusive use + # and scheduled for removal to avoid another thread/process + # treating it as a new directory or removal candidate + garbage_path = rootdir.join(garbage_prefix + str(uuid.uuid4())) + try: + path.rename(garbage_path) + garbage_path.remove(rec=1) + except KeyboardInterrupt: + raise + except Exception: # this might be error.Error, WindowsError ... + pass + if is_garbage(path): + try: + path.remove(rec=1) + except KeyboardInterrupt: + raise + except Exception: # this might be error.Error, WindowsError ... + pass + + # make link... + try: + username = os.environ["USER"] # linux, et al + except KeyError: + try: + username = os.environ["USERNAME"] # windows + except KeyError: + username = "current" + + src = str(udir) + dest = src[: src.rfind("-")] + "-" + username + try: + os.unlink(dest) + except OSError: + pass + try: + os.symlink(src, dest) + except (OSError, AttributeError, NotImplementedError): + pass + + return udir + + +def copymode(src, dest): + """Copy permission from src to dst.""" + import shutil + + shutil.copymode(src, dest) + + +def copystat(src, dest): + """Copy permission, last modification time, + last access time, and flags from src to dst.""" + import shutil + + shutil.copystat(str(src), str(dest)) + + +def copychunked(src, dest): + chunksize = 524288 # half a meg of bytes + fsrc = src.open("rb") + try: + fdest = dest.open("wb") + try: + while 1: + buf = fsrc.read(chunksize) + if not buf: + break + fdest.write(buf) + finally: + fdest.close() + finally: + fsrc.close() + + +def isimportable(name): + if name and (name[0].isalpha() or name[0] == "_"): + name = name.replace("_", "") + return not name or name.isalnum() + + +local = LocalPath diff --git a/.venv/lib/python3.12/site-packages/_pytest/_version.py b/.venv/lib/python3.12/site-packages/_pytest/_version.py new file mode 100644 index 0000000..e5c1257 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/_version.py @@ -0,0 +1,34 @@ +# file generated by setuptools-scm +# don't change, don't track in version control + +__all__ = [ + "__version__", + "__version_tuple__", + "version", + "version_tuple", + "__commit_id__", + "commit_id", +] + +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple + from typing import Union + + VERSION_TUPLE = Tuple[Union[int, str], ...] + COMMIT_ID = Union[str, None] +else: + VERSION_TUPLE = object + COMMIT_ID = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE +commit_id: COMMIT_ID +__commit_id__: COMMIT_ID + +__version__ = version = '9.0.2' +__version_tuple__ = version_tuple = (9, 0, 2) + +__commit_id__ = commit_id = None diff --git a/.venv/lib/python3.12/site-packages/_pytest/assertion/__init__.py b/.venv/lib/python3.12/site-packages/_pytest/assertion/__init__.py new file mode 100644 index 0000000..22f3ca8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/assertion/__init__.py @@ -0,0 +1,208 @@ +# mypy: allow-untyped-defs +"""Support for presenting detailed information in failing assertions.""" + +from __future__ import annotations + +from collections.abc import Generator +import sys +from typing import Any +from typing import Protocol +from typing import TYPE_CHECKING + +from _pytest.assertion import rewrite +from _pytest.assertion import truncate +from _pytest.assertion import util +from _pytest.assertion.rewrite import assertstate_key +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.nodes import Item + + +if TYPE_CHECKING: + from _pytest.main import Session + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("debugconfig") + group.addoption( + "--assert", + action="store", + dest="assertmode", + choices=("rewrite", "plain"), + default="rewrite", + metavar="MODE", + help=( + "Control assertion debugging tools.\n" + "'plain' performs no assertion debugging.\n" + "'rewrite' (the default) rewrites assert statements in test modules" + " on import to provide assert expression information." + ), + ) + parser.addini( + "enable_assertion_pass_hook", + type="bool", + default=False, + help="Enables the pytest_assertion_pass hook. " + "Make sure to delete any previously generated pyc cache files.", + ) + + parser.addini( + "truncation_limit_lines", + default=None, + help="Set threshold of LINES after which truncation will take effect", + ) + parser.addini( + "truncation_limit_chars", + default=None, + help=("Set threshold of CHARS after which truncation will take effect"), + ) + + Config._add_verbosity_ini( + parser, + Config.VERBOSITY_ASSERTIONS, + help=( + "Specify a verbosity level for assertions, overriding the main level. " + "Higher levels will provide more detailed explanation when an assertion fails." + ), + ) + + +def register_assert_rewrite(*names: str) -> None: + """Register one or more module names to be rewritten on import. + + This function will make sure that this module or all modules inside + the package will get their assert statements rewritten. + Thus you should make sure to call this before the module is + actually imported, usually in your __init__.py if you are a plugin + using a package. + + :param names: The module names to register. + """ + for name in names: + if not isinstance(name, str): + msg = "expected module names as *args, got {0} instead" # type: ignore[unreachable] + raise TypeError(msg.format(repr(names))) + rewrite_hook: RewriteHook + for hook in sys.meta_path: + if isinstance(hook, rewrite.AssertionRewritingHook): + rewrite_hook = hook + break + else: + rewrite_hook = DummyRewriteHook() + rewrite_hook.mark_rewrite(*names) + + +class RewriteHook(Protocol): + def mark_rewrite(self, *names: str) -> None: ... + + +class DummyRewriteHook: + """A no-op import hook for when rewriting is disabled.""" + + def mark_rewrite(self, *names: str) -> None: + pass + + +class AssertionState: + """State for the assertion plugin.""" + + def __init__(self, config: Config, mode) -> None: + self.mode = mode + self.trace = config.trace.root.get("assertion") + self.hook: rewrite.AssertionRewritingHook | None = None + + +def install_importhook(config: Config) -> rewrite.AssertionRewritingHook: + """Try to install the rewrite hook, raise SystemError if it fails.""" + config.stash[assertstate_key] = AssertionState(config, "rewrite") + config.stash[assertstate_key].hook = hook = rewrite.AssertionRewritingHook(config) + sys.meta_path.insert(0, hook) + config.stash[assertstate_key].trace("installed rewrite import hook") + + def undo() -> None: + hook = config.stash[assertstate_key].hook + if hook is not None and hook in sys.meta_path: + sys.meta_path.remove(hook) + + config.add_cleanup(undo) + return hook + + +def pytest_collection(session: Session) -> None: + # This hook is only called when test modules are collected + # so for example not in the managing process of pytest-xdist + # (which does not collect test modules). + assertstate = session.config.stash.get(assertstate_key, None) + if assertstate: + if assertstate.hook is not None: + assertstate.hook.set_session(session) + + +@hookimpl(wrapper=True, tryfirst=True) +def pytest_runtest_protocol(item: Item) -> Generator[None, object, object]: + """Setup the pytest_assertrepr_compare and pytest_assertion_pass hooks. + + The rewrite module will use util._reprcompare if it exists to use custom + reporting via the pytest_assertrepr_compare hook. This sets up this custom + comparison for the test. + """ + ihook = item.ihook + + def callbinrepr(op, left: object, right: object) -> str | None: + """Call the pytest_assertrepr_compare hook and prepare the result. + + This uses the first result from the hook and then ensures the + following: + * Overly verbose explanations are truncated unless configured otherwise + (eg. if running in verbose mode). + * Embedded newlines are escaped to help util.format_explanation() + later. + * If the rewrite mode is used embedded %-characters are replaced + to protect later % formatting. + + The result can be formatted by util.format_explanation() for + pretty printing. + """ + hook_result = ihook.pytest_assertrepr_compare( + config=item.config, op=op, left=left, right=right + ) + for new_expl in hook_result: + if new_expl: + new_expl = truncate.truncate_if_required(new_expl, item) + new_expl = [line.replace("\n", "\\n") for line in new_expl] + res = "\n~".join(new_expl) + if item.config.getvalue("assertmode") == "rewrite": + res = res.replace("%", "%%") + return res + return None + + saved_assert_hooks = util._reprcompare, util._assertion_pass + util._reprcompare = callbinrepr + util._config = item.config + + if ihook.pytest_assertion_pass.get_hookimpls(): + + def call_assertion_pass_hook(lineno: int, orig: str, expl: str) -> None: + ihook.pytest_assertion_pass(item=item, lineno=lineno, orig=orig, expl=expl) + + util._assertion_pass = call_assertion_pass_hook + + try: + return (yield) + finally: + util._reprcompare, util._assertion_pass = saved_assert_hooks + util._config = None + + +def pytest_sessionfinish(session: Session) -> None: + assertstate = session.config.stash.get(assertstate_key, None) + if assertstate: + if assertstate.hook is not None: + assertstate.hook.set_session(None) + + +def pytest_assertrepr_compare( + config: Config, op: str, left: Any, right: Any +) -> list[str] | None: + return util.assertrepr_compare(config=config, op=op, left=left, right=right) diff --git a/.venv/lib/python3.12/site-packages/_pytest/assertion/rewrite.py b/.venv/lib/python3.12/site-packages/_pytest/assertion/rewrite.py new file mode 100644 index 0000000..566549d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/assertion/rewrite.py @@ -0,0 +1,1202 @@ +"""Rewrite assertion AST to produce nice error messages.""" + +from __future__ import annotations + +import ast +from collections import defaultdict +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Sequence +import errno +import functools +import importlib.abc +import importlib.machinery +import importlib.util +import io +import itertools +import marshal +import os +from pathlib import Path +from pathlib import PurePath +import struct +import sys +import tokenize +import types +from typing import IO +from typing import TYPE_CHECKING + + +if sys.version_info >= (3, 12): + from importlib.resources.abc import TraversableResources +else: + from importlib.abc import TraversableResources +if sys.version_info < (3, 11): + from importlib.readers import FileReader +else: + from importlib.resources.readers import FileReader + + +from _pytest._io.saferepr import DEFAULT_REPR_MAX_SIZE +from _pytest._io.saferepr import saferepr +from _pytest._io.saferepr import saferepr_unlimited +from _pytest._version import version +from _pytest.assertion import util +from _pytest.config import Config +from _pytest.fixtures import FixtureFunctionDefinition +from _pytest.main import Session +from _pytest.pathlib import absolutepath +from _pytest.pathlib import fnmatch_ex +from _pytest.stash import StashKey + + +# fmt: off +from _pytest.assertion.util import format_explanation as _format_explanation # noqa:F401, isort:skip +# fmt:on + +if TYPE_CHECKING: + from _pytest.assertion import AssertionState + + +class Sentinel: + pass + + +assertstate_key = StashKey["AssertionState"]() + +# pytest caches rewritten pycs in pycache dirs +PYTEST_TAG = f"{sys.implementation.cache_tag}-pytest-{version}" +PYC_EXT = ".py" + ((__debug__ and "c") or "o") +PYC_TAIL = "." + PYTEST_TAG + PYC_EXT + +# Special marker that denotes we have just left a scope definition +_SCOPE_END_MARKER = Sentinel() + + +class AssertionRewritingHook(importlib.abc.MetaPathFinder, importlib.abc.Loader): + """PEP302/PEP451 import hook which rewrites asserts.""" + + def __init__(self, config: Config) -> None: + self.config = config + try: + self.fnpats = config.getini("python_files") + except ValueError: + self.fnpats = ["test_*.py", "*_test.py"] + self.session: Session | None = None + self._rewritten_names: dict[str, Path] = {} + self._must_rewrite: set[str] = set() + # flag to guard against trying to rewrite a pyc file while we are already writing another pyc file, + # which might result in infinite recursion (#3506) + self._writing_pyc = False + self._basenames_to_check_rewrite = {"conftest"} + self._marked_for_rewrite_cache: dict[str, bool] = {} + self._session_paths_checked = False + + def set_session(self, session: Session | None) -> None: + self.session = session + self._session_paths_checked = False + + # Indirection so we can mock calls to find_spec originated from the hook during testing + _find_spec = importlib.machinery.PathFinder.find_spec + + def find_spec( + self, + name: str, + path: Sequence[str | bytes] | None = None, + target: types.ModuleType | None = None, + ) -> importlib.machinery.ModuleSpec | None: + if self._writing_pyc: + return None + state = self.config.stash[assertstate_key] + if self._early_rewrite_bailout(name, state): + return None + state.trace(f"find_module called for: {name}") + + # Type ignored because mypy is confused about the `self` binding here. + spec = self._find_spec(name, path) # type: ignore + + if spec is None and path is not None: + # With --import-mode=importlib, PathFinder cannot find spec without modifying `sys.path`, + # causing inability to assert rewriting (#12659). + # At this point, try using the file path to find the module spec. + for _path_str in path: + spec = importlib.util.spec_from_file_location(name, _path_str) + if spec is not None: + break + + if ( + # the import machinery could not find a file to import + spec is None + # this is a namespace package (without `__init__.py`) + # there's nothing to rewrite there + or spec.origin is None + # we can only rewrite source files + or not isinstance(spec.loader, importlib.machinery.SourceFileLoader) + # if the file doesn't exist, we can't rewrite it + or not os.path.exists(spec.origin) + ): + return None + else: + fn = spec.origin + + if not self._should_rewrite(name, fn, state): + return None + + return importlib.util.spec_from_file_location( + name, + fn, + loader=self, + submodule_search_locations=spec.submodule_search_locations, + ) + + def create_module( + self, spec: importlib.machinery.ModuleSpec + ) -> types.ModuleType | None: + return None # default behaviour is fine + + def exec_module(self, module: types.ModuleType) -> None: + assert module.__spec__ is not None + assert module.__spec__.origin is not None + fn = Path(module.__spec__.origin) + state = self.config.stash[assertstate_key] + + self._rewritten_names[module.__name__] = fn + + # The requested module looks like a test file, so rewrite it. This is + # the most magical part of the process: load the source, rewrite the + # asserts, and load the rewritten source. We also cache the rewritten + # module code in a special pyc. We must be aware of the possibility of + # concurrent pytest processes rewriting and loading pycs. To avoid + # tricky race conditions, we maintain the following invariant: The + # cached pyc is always a complete, valid pyc. Operations on it must be + # atomic. POSIX's atomic rename comes in handy. + write = not sys.dont_write_bytecode + cache_dir = get_cache_dir(fn) + if write: + ok = try_makedirs(cache_dir) + if not ok: + write = False + state.trace(f"read only directory: {cache_dir}") + + cache_name = fn.name[:-3] + PYC_TAIL + pyc = cache_dir / cache_name + # Notice that even if we're in a read-only directory, I'm going + # to check for a cached pyc. This may not be optimal... + co = _read_pyc(fn, pyc, state.trace) + if co is None: + state.trace(f"rewriting {fn!r}") + source_stat, co = _rewrite_test(fn, self.config) + if write: + self._writing_pyc = True + try: + _write_pyc(state, co, source_stat, pyc) + finally: + self._writing_pyc = False + else: + state.trace(f"found cached rewritten pyc for {fn}") + exec(co, module.__dict__) + + def _early_rewrite_bailout(self, name: str, state: AssertionState) -> bool: + """A fast way to get out of rewriting modules. + + Profiling has shown that the call to PathFinder.find_spec (inside of + the find_spec from this class) is a major slowdown, so, this method + tries to filter what we're sure won't be rewritten before getting to + it. + """ + if self.session is not None and not self._session_paths_checked: + self._session_paths_checked = True + for initial_path in self.session._initialpaths: + # Make something as c:/projects/my_project/path.py -> + # ['c:', 'projects', 'my_project', 'path.py'] + parts = str(initial_path).split(os.sep) + # add 'path' to basenames to be checked. + self._basenames_to_check_rewrite.add(os.path.splitext(parts[-1])[0]) + + # Note: conftest already by default in _basenames_to_check_rewrite. + parts = name.split(".") + if parts[-1] in self._basenames_to_check_rewrite: + return False + + # For matching the name it must be as if it was a filename. + path = PurePath(*parts).with_suffix(".py") + + for pat in self.fnpats: + # if the pattern contains subdirectories ("tests/**.py" for example) we can't bail out based + # on the name alone because we need to match against the full path + if os.path.dirname(pat): + return False + if fnmatch_ex(pat, path): + return False + + if self._is_marked_for_rewrite(name, state): + return False + + state.trace(f"early skip of rewriting module: {name}") + return True + + def _should_rewrite(self, name: str, fn: str, state: AssertionState) -> bool: + # always rewrite conftest files + if os.path.basename(fn) == "conftest.py": + state.trace(f"rewriting conftest file: {fn!r}") + return True + + if self.session is not None: + if self.session.isinitpath(absolutepath(fn)): + state.trace(f"matched test file (was specified on cmdline): {fn!r}") + return True + + # modules not passed explicitly on the command line are only + # rewritten if they match the naming convention for test files + fn_path = PurePath(fn) + for pat in self.fnpats: + if fnmatch_ex(pat, fn_path): + state.trace(f"matched test file {fn!r}") + return True + + return self._is_marked_for_rewrite(name, state) + + def _is_marked_for_rewrite(self, name: str, state: AssertionState) -> bool: + try: + return self._marked_for_rewrite_cache[name] + except KeyError: + for marked in self._must_rewrite: + if name == marked or name.startswith(marked + "."): + state.trace(f"matched marked file {name!r} (from {marked!r})") + self._marked_for_rewrite_cache[name] = True + return True + + self._marked_for_rewrite_cache[name] = False + return False + + def mark_rewrite(self, *names: str) -> None: + """Mark import names as needing to be rewritten. + + The named module or package as well as any nested modules will + be rewritten on import. + """ + already_imported = ( + set(names).intersection(sys.modules).difference(self._rewritten_names) + ) + for name in already_imported: + mod = sys.modules[name] + if not AssertionRewriter.is_rewrite_disabled( + mod.__doc__ or "" + ) and not isinstance(mod.__loader__, type(self)): + self._warn_already_imported(name) + self._must_rewrite.update(names) + self._marked_for_rewrite_cache.clear() + + def _warn_already_imported(self, name: str) -> None: + from _pytest.warning_types import PytestAssertRewriteWarning + + self.config.issue_config_time_warning( + PytestAssertRewriteWarning( + f"Module already imported so cannot be rewritten; {name}" + ), + stacklevel=5, + ) + + def get_data(self, pathname: str | bytes) -> bytes: + """Optional PEP302 get_data API.""" + with open(pathname, "rb") as f: + return f.read() + + def get_resource_reader(self, name: str) -> TraversableResources: + return FileReader(types.SimpleNamespace(path=self._rewritten_names[name])) # type: ignore[arg-type] + + +def _write_pyc_fp( + fp: IO[bytes], source_stat: os.stat_result, co: types.CodeType +) -> None: + # Technically, we don't have to have the same pyc format as + # (C)Python, since these "pycs" should never be seen by builtin + # import. However, there's little reason to deviate. + fp.write(importlib.util.MAGIC_NUMBER) + # https://www.python.org/dev/peps/pep-0552/ + flags = b"\x00\x00\x00\x00" + fp.write(flags) + # as of now, bytecode header expects 32-bit numbers for size and mtime (#4903) + mtime = int(source_stat.st_mtime) & 0xFFFFFFFF + size = source_stat.st_size & 0xFFFFFFFF + # " bool: + proc_pyc = f"{pyc}.{os.getpid()}" + try: + with open(proc_pyc, "wb") as fp: + _write_pyc_fp(fp, source_stat, co) + except OSError as e: + state.trace(f"error writing pyc file at {proc_pyc}: errno={e.errno}") + return False + + try: + os.replace(proc_pyc, pyc) + except OSError as e: + state.trace(f"error writing pyc file at {pyc}: {e}") + # we ignore any failure to write the cache file + # there are many reasons, permission-denied, pycache dir being a + # file etc. + return False + return True + + +def _rewrite_test(fn: Path, config: Config) -> tuple[os.stat_result, types.CodeType]: + """Read and rewrite *fn* and return the code object.""" + stat = os.stat(fn) + source = fn.read_bytes() + strfn = str(fn) + tree = ast.parse(source, filename=strfn) + rewrite_asserts(tree, source, strfn, config) + co = compile(tree, strfn, "exec", dont_inherit=True) + return stat, co + + +def _read_pyc( + source: Path, pyc: Path, trace: Callable[[str], None] = lambda x: None +) -> types.CodeType | None: + """Possibly read a pytest pyc containing rewritten code. + + Return rewritten code if successful or None if not. + """ + try: + fp = open(pyc, "rb") + except OSError: + return None + with fp: + try: + stat_result = os.stat(source) + mtime = int(stat_result.st_mtime) + size = stat_result.st_size + data = fp.read(16) + except OSError as e: + trace(f"_read_pyc({source}): OSError {e}") + return None + # Check for invalid or out of date pyc file. + if len(data) != (16): + trace(f"_read_pyc({source}): invalid pyc (too short)") + return None + if data[:4] != importlib.util.MAGIC_NUMBER: + trace(f"_read_pyc({source}): invalid pyc (bad magic number)") + return None + if data[4:8] != b"\x00\x00\x00\x00": + trace(f"_read_pyc({source}): invalid pyc (unsupported flags)") + return None + mtime_data = data[8:12] + if int.from_bytes(mtime_data, "little") != mtime & 0xFFFFFFFF: + trace(f"_read_pyc({source}): out of date") + return None + size_data = data[12:16] + if int.from_bytes(size_data, "little") != size & 0xFFFFFFFF: + trace(f"_read_pyc({source}): invalid pyc (incorrect size)") + return None + try: + co = marshal.load(fp) + except Exception as e: + trace(f"_read_pyc({source}): marshal.load error {e}") + return None + if not isinstance(co, types.CodeType): + trace(f"_read_pyc({source}): not a code object") + return None + return co + + +def rewrite_asserts( + mod: ast.Module, + source: bytes, + module_path: str | None = None, + config: Config | None = None, +) -> None: + """Rewrite the assert statements in mod.""" + AssertionRewriter(module_path, config, source).run(mod) + + +def _saferepr(obj: object) -> str: + r"""Get a safe repr of an object for assertion error messages. + + The assertion formatting (util.format_explanation()) requires + newlines to be escaped since they are a special character for it. + Normally assertion.util.format_explanation() does this but for a + custom repr it is possible to contain one of the special escape + sequences, especially '\n{' and '\n}' are likely to be present in + JSON reprs. + """ + if isinstance(obj, types.MethodType): + # for bound methods, skip redundant information + return obj.__name__ + + maxsize = _get_maxsize_for_saferepr(util._config) + if not maxsize: + return saferepr_unlimited(obj).replace("\n", "\\n") + return saferepr(obj, maxsize=maxsize).replace("\n", "\\n") + + +def _get_maxsize_for_saferepr(config: Config | None) -> int | None: + """Get `maxsize` configuration for saferepr based on the given config object.""" + if config is None: + verbosity = 0 + else: + verbosity = config.get_verbosity(Config.VERBOSITY_ASSERTIONS) + if verbosity >= 2: + return None + if verbosity >= 1: + return DEFAULT_REPR_MAX_SIZE * 10 + return DEFAULT_REPR_MAX_SIZE + + +def _format_assertmsg(obj: object) -> str: + r"""Format the custom assertion message given. + + For strings this simply replaces newlines with '\n~' so that + util.format_explanation() will preserve them instead of escaping + newlines. For other objects saferepr() is used first. + """ + # reprlib appears to have a bug which means that if a string + # contains a newline it gets escaped, however if an object has a + # .__repr__() which contains newlines it does not get escaped. + # However in either case we want to preserve the newline. + replaces = [("\n", "\n~"), ("%", "%%")] + if not isinstance(obj, str): + obj = saferepr(obj, _get_maxsize_for_saferepr(util._config)) + replaces.append(("\\n", "\n~")) + + for r1, r2 in replaces: + obj = obj.replace(r1, r2) + + return obj + + +def _should_repr_global_name(obj: object) -> bool: + if callable(obj): + # For pytest fixtures the __repr__ method provides more information than the function name. + return isinstance(obj, FixtureFunctionDefinition) + + try: + return not hasattr(obj, "__name__") + except Exception: + return True + + +def _format_boolop(explanations: Iterable[str], is_or: bool) -> str: + explanation = "(" + ((is_or and " or ") or " and ").join(explanations) + ")" + return explanation.replace("%", "%%") + + +def _call_reprcompare( + ops: Sequence[str], + results: Sequence[bool], + expls: Sequence[str], + each_obj: Sequence[object], +) -> str: + for i, res, expl in zip(range(len(ops)), results, expls, strict=True): + try: + done = not res + except Exception: + done = True + if done: + break + if util._reprcompare is not None: + custom = util._reprcompare(ops[i], each_obj[i], each_obj[i + 1]) + if custom is not None: + return custom + return expl + + +def _call_assertion_pass(lineno: int, orig: str, expl: str) -> None: + if util._assertion_pass is not None: + util._assertion_pass(lineno, orig, expl) + + +def _check_if_assertion_pass_impl() -> bool: + """Check if any plugins implement the pytest_assertion_pass hook + in order not to generate explanation unnecessarily (might be expensive).""" + return True if util._assertion_pass else False + + +UNARY_MAP = {ast.Not: "not %s", ast.Invert: "~%s", ast.USub: "-%s", ast.UAdd: "+%s"} + +BINOP_MAP = { + ast.BitOr: "|", + ast.BitXor: "^", + ast.BitAnd: "&", + ast.LShift: "<<", + ast.RShift: ">>", + ast.Add: "+", + ast.Sub: "-", + ast.Mult: "*", + ast.Div: "/", + ast.FloorDiv: "//", + ast.Mod: "%%", # escaped for string formatting + ast.Eq: "==", + ast.NotEq: "!=", + ast.Lt: "<", + ast.LtE: "<=", + ast.Gt: ">", + ast.GtE: ">=", + ast.Pow: "**", + ast.Is: "is", + ast.IsNot: "is not", + ast.In: "in", + ast.NotIn: "not in", + ast.MatMult: "@", +} + + +def traverse_node(node: ast.AST) -> Iterator[ast.AST]: + """Recursively yield node and all its children in depth-first order.""" + yield node + for child in ast.iter_child_nodes(node): + yield from traverse_node(child) + + +@functools.lru_cache(maxsize=1) +def _get_assertion_exprs(src: bytes) -> dict[int, str]: + """Return a mapping from {lineno: "assertion test expression"}.""" + ret: dict[int, str] = {} + + depth = 0 + lines: list[str] = [] + assert_lineno: int | None = None + seen_lines: set[int] = set() + + def _write_and_reset() -> None: + nonlocal depth, lines, assert_lineno, seen_lines + assert assert_lineno is not None + ret[assert_lineno] = "".join(lines).rstrip().rstrip("\\") + depth = 0 + lines = [] + assert_lineno = None + seen_lines = set() + + tokens = tokenize.tokenize(io.BytesIO(src).readline) + for tp, source, (lineno, offset), _, line in tokens: + if tp == tokenize.NAME and source == "assert": + assert_lineno = lineno + elif assert_lineno is not None: + # keep track of depth for the assert-message `,` lookup + if tp == tokenize.OP and source in "([{": + depth += 1 + elif tp == tokenize.OP and source in ")]}": + depth -= 1 + + if not lines: + lines.append(line[offset:]) + seen_lines.add(lineno) + # a non-nested comma separates the expression from the message + elif depth == 0 and tp == tokenize.OP and source == ",": + # one line assert with message + if lineno in seen_lines and len(lines) == 1: + offset_in_trimmed = offset + len(lines[-1]) - len(line) + lines[-1] = lines[-1][:offset_in_trimmed] + # multi-line assert with message + elif lineno in seen_lines: + lines[-1] = lines[-1][:offset] + # multi line assert with escaped newline before message + else: + lines.append(line[:offset]) + _write_and_reset() + elif tp in {tokenize.NEWLINE, tokenize.ENDMARKER}: + _write_and_reset() + elif lines and lineno not in seen_lines: + lines.append(line) + seen_lines.add(lineno) + + return ret + + +class AssertionRewriter(ast.NodeVisitor): + """Assertion rewriting implementation. + + The main entrypoint is to call .run() with an ast.Module instance, + this will then find all the assert statements and rewrite them to + provide intermediate values and a detailed assertion error. See + http://pybites.blogspot.be/2011/07/behind-scenes-of-pytests-new-assertion.html + for an overview of how this works. + + The entry point here is .run() which will iterate over all the + statements in an ast.Module and for each ast.Assert statement it + finds call .visit() with it. Then .visit_Assert() takes over and + is responsible for creating new ast statements to replace the + original assert statement: it rewrites the test of an assertion + to provide intermediate values and replace it with an if statement + which raises an assertion error with a detailed explanation in + case the expression is false and calls pytest_assertion_pass hook + if expression is true. + + For this .visit_Assert() uses the visitor pattern to visit all the + AST nodes of the ast.Assert.test field, each visit call returning + an AST node and the corresponding explanation string. During this + state is kept in several instance attributes: + + :statements: All the AST statements which will replace the assert + statement. + + :variables: This is populated by .variable() with each variable + used by the statements so that they can all be set to None at + the end of the statements. + + :variable_counter: Counter to create new unique variables needed + by statements. Variables are created using .variable() and + have the form of "@py_assert0". + + :expl_stmts: The AST statements which will be executed to get + data from the assertion. This is the code which will construct + the detailed assertion message that is used in the AssertionError + or for the pytest_assertion_pass hook. + + :explanation_specifiers: A dict filled by .explanation_param() + with %-formatting placeholders and their corresponding + expressions to use in the building of an assertion message. + This is used by .pop_format_context() to build a message. + + :stack: A stack of the explanation_specifiers dicts maintained by + .push_format_context() and .pop_format_context() which allows + to build another %-formatted string while already building one. + + :scope: A tuple containing the current scope used for variables_overwrite. + + :variables_overwrite: A dict filled with references to variables + that change value within an assert. This happens when a variable is + reassigned with the walrus operator + + This state, except the variables_overwrite, is reset on every new assert + statement visited and used by the other visitors. + """ + + def __init__( + self, module_path: str | None, config: Config | None, source: bytes + ) -> None: + super().__init__() + self.module_path = module_path + self.config = config + if config is not None: + self.enable_assertion_pass_hook = config.getini( + "enable_assertion_pass_hook" + ) + else: + self.enable_assertion_pass_hook = False + self.source = source + self.scope: tuple[ast.AST, ...] = () + self.variables_overwrite: defaultdict[tuple[ast.AST, ...], dict[str, str]] = ( + defaultdict(dict) + ) + + def run(self, mod: ast.Module) -> None: + """Find all assert statements in *mod* and rewrite them.""" + if not mod.body: + # Nothing to do. + return + + # We'll insert some special imports at the top of the module, but after any + # docstrings and __future__ imports, so first figure out where that is. + doc = getattr(mod, "docstring", None) + expect_docstring = doc is None + if doc is not None and self.is_rewrite_disabled(doc): + return + pos = 0 + for item in mod.body: + match item: + case ast.Expr(value=ast.Constant(value=str() as doc)) if ( + expect_docstring + ): + if self.is_rewrite_disabled(doc): + return + expect_docstring = False + case ast.ImportFrom(level=0, module="__future__"): + pass + case _: + break + pos += 1 + # Special case: for a decorated function, set the lineno to that of the + # first decorator, not the `def`. Issue #4984. + if isinstance(item, ast.FunctionDef) and item.decorator_list: + lineno = item.decorator_list[0].lineno + else: + lineno = item.lineno + # Now actually insert the special imports. + aliases = [ + ast.alias("builtins", "@py_builtins", lineno=lineno, col_offset=0), + ast.alias( + "_pytest.assertion.rewrite", + "@pytest_ar", + lineno=lineno, + col_offset=0, + ), + ] + imports = [ + ast.Import([alias], lineno=lineno, col_offset=0) for alias in aliases + ] + mod.body[pos:pos] = imports + + # Collect asserts. + self.scope = (mod,) + nodes: list[ast.AST | Sentinel] = [mod] + while nodes: + node = nodes.pop() + if isinstance(node, ast.FunctionDef | ast.AsyncFunctionDef | ast.ClassDef): + self.scope = tuple((*self.scope, node)) + nodes.append(_SCOPE_END_MARKER) + if node == _SCOPE_END_MARKER: + self.scope = self.scope[:-1] + continue + assert isinstance(node, ast.AST) + for name, field in ast.iter_fields(node): + if isinstance(field, list): + new: list[ast.AST] = [] + for i, child in enumerate(field): + if isinstance(child, ast.Assert): + # Transform assert. + new.extend(self.visit(child)) + else: + new.append(child) + if isinstance(child, ast.AST): + nodes.append(child) + setattr(node, name, new) + elif ( + isinstance(field, ast.AST) + # Don't recurse into expressions as they can't contain + # asserts. + and not isinstance(field, ast.expr) + ): + nodes.append(field) + + @staticmethod + def is_rewrite_disabled(docstring: str) -> bool: + return "PYTEST_DONT_REWRITE" in docstring + + def variable(self) -> str: + """Get a new variable.""" + # Use a character invalid in python identifiers to avoid clashing. + name = "@py_assert" + str(next(self.variable_counter)) + self.variables.append(name) + return name + + def assign(self, expr: ast.expr) -> ast.Name: + """Give *expr* a name.""" + name = self.variable() + self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr)) + return ast.copy_location(ast.Name(name, ast.Load()), expr) + + def display(self, expr: ast.expr) -> ast.expr: + """Call saferepr on the expression.""" + return self.helper("_saferepr", expr) + + def helper(self, name: str, *args: ast.expr) -> ast.expr: + """Call a helper in this module.""" + py_name = ast.Name("@pytest_ar", ast.Load()) + attr = ast.Attribute(py_name, name, ast.Load()) + return ast.Call(attr, list(args), []) + + def builtin(self, name: str) -> ast.Attribute: + """Return the builtin called *name*.""" + builtin_name = ast.Name("@py_builtins", ast.Load()) + return ast.Attribute(builtin_name, name, ast.Load()) + + def explanation_param(self, expr: ast.expr) -> str: + """Return a new named %-formatting placeholder for expr. + + This creates a %-formatting placeholder for expr in the + current formatting context, e.g. ``%(py0)s``. The placeholder + and expr are placed in the current format context so that it + can be used on the next call to .pop_format_context(). + """ + specifier = "py" + str(next(self.variable_counter)) + self.explanation_specifiers[specifier] = expr + return "%(" + specifier + ")s" + + def push_format_context(self) -> None: + """Create a new formatting context. + + The format context is used for when an explanation wants to + have a variable value formatted in the assertion message. In + this case the value required can be added using + .explanation_param(). Finally .pop_format_context() is used + to format a string of %-formatted values as added by + .explanation_param(). + """ + self.explanation_specifiers: dict[str, ast.expr] = {} + self.stack.append(self.explanation_specifiers) + + def pop_format_context(self, expl_expr: ast.expr) -> ast.Name: + """Format the %-formatted string with current format context. + + The expl_expr should be an str ast.expr instance constructed from + the %-placeholders created by .explanation_param(). This will + add the required code to format said string to .expl_stmts and + return the ast.Name instance of the formatted string. + """ + current = self.stack.pop() + if self.stack: + self.explanation_specifiers = self.stack[-1] + keys: list[ast.expr | None] = [ast.Constant(key) for key in current.keys()] + format_dict = ast.Dict(keys, list(current.values())) + form = ast.BinOp(expl_expr, ast.Mod(), format_dict) + name = "@py_format" + str(next(self.variable_counter)) + if self.enable_assertion_pass_hook: + self.format_variables.append(name) + self.expl_stmts.append(ast.Assign([ast.Name(name, ast.Store())], form)) + return ast.Name(name, ast.Load()) + + def generic_visit(self, node: ast.AST) -> tuple[ast.Name, str]: + """Handle expressions we don't have custom code for.""" + assert isinstance(node, ast.expr) + res = self.assign(node) + return res, self.explanation_param(self.display(res)) + + def visit_Assert(self, assert_: ast.Assert) -> list[ast.stmt]: + """Return the AST statements to replace the ast.Assert instance. + + This rewrites the test of an assertion to provide + intermediate values and replace it with an if statement which + raises an assertion error with a detailed explanation in case + the expression is false. + """ + if isinstance(assert_.test, ast.Tuple) and len(assert_.test.elts) >= 1: + import warnings + + from _pytest.warning_types import PytestAssertRewriteWarning + + # TODO: This assert should not be needed. + assert self.module_path is not None + warnings.warn_explicit( + PytestAssertRewriteWarning( + "assertion is always true, perhaps remove parentheses?" + ), + category=None, + filename=self.module_path, + lineno=assert_.lineno, + ) + + self.statements: list[ast.stmt] = [] + self.variables: list[str] = [] + self.variable_counter = itertools.count() + + if self.enable_assertion_pass_hook: + self.format_variables: list[str] = [] + + self.stack: list[dict[str, ast.expr]] = [] + self.expl_stmts: list[ast.stmt] = [] + self.push_format_context() + # Rewrite assert into a bunch of statements. + top_condition, explanation = self.visit(assert_.test) + + negation = ast.UnaryOp(ast.Not(), top_condition) + + if self.enable_assertion_pass_hook: # Experimental pytest_assertion_pass hook + msg = self.pop_format_context(ast.Constant(explanation)) + + # Failed + if assert_.msg: + assertmsg = self.helper("_format_assertmsg", assert_.msg) + gluestr = "\n>assert " + else: + assertmsg = ast.Constant("") + gluestr = "assert " + err_explanation = ast.BinOp(ast.Constant(gluestr), ast.Add(), msg) + err_msg = ast.BinOp(assertmsg, ast.Add(), err_explanation) + err_name = ast.Name("AssertionError", ast.Load()) + fmt = self.helper("_format_explanation", err_msg) + exc = ast.Call(err_name, [fmt], []) + raise_ = ast.Raise(exc, None) + statements_fail = [] + statements_fail.extend(self.expl_stmts) + statements_fail.append(raise_) + + # Passed + fmt_pass = self.helper("_format_explanation", msg) + orig = _get_assertion_exprs(self.source)[assert_.lineno] + hook_call_pass = ast.Expr( + self.helper( + "_call_assertion_pass", + ast.Constant(assert_.lineno), + ast.Constant(orig), + fmt_pass, + ) + ) + # If any hooks implement assert_pass hook + hook_impl_test = ast.If( + self.helper("_check_if_assertion_pass_impl"), + [*self.expl_stmts, hook_call_pass], + [], + ) + statements_pass: list[ast.stmt] = [hook_impl_test] + + # Test for assertion condition + main_test = ast.If(negation, statements_fail, statements_pass) + self.statements.append(main_test) + if self.format_variables: + variables: list[ast.expr] = [ + ast.Name(name, ast.Store()) for name in self.format_variables + ] + clear_format = ast.Assign(variables, ast.Constant(None)) + self.statements.append(clear_format) + + else: # Original assertion rewriting + # Create failure message. + body = self.expl_stmts + self.statements.append(ast.If(negation, body, [])) + if assert_.msg: + assertmsg = self.helper("_format_assertmsg", assert_.msg) + explanation = "\n>assert " + explanation + else: + assertmsg = ast.Constant("") + explanation = "assert " + explanation + template = ast.BinOp(assertmsg, ast.Add(), ast.Constant(explanation)) + msg = self.pop_format_context(template) + fmt = self.helper("_format_explanation", msg) + err_name = ast.Name("AssertionError", ast.Load()) + exc = ast.Call(err_name, [fmt], []) + raise_ = ast.Raise(exc, None) + + body.append(raise_) + + # Clear temporary variables by setting them to None. + if self.variables: + variables = [ast.Name(name, ast.Store()) for name in self.variables] + clear = ast.Assign(variables, ast.Constant(None)) + self.statements.append(clear) + # Fix locations (line numbers/column offsets). + for stmt in self.statements: + for node in traverse_node(stmt): + if getattr(node, "lineno", None) is None: + # apply the assertion location to all generated ast nodes without source location + # and preserve the location of existing nodes or generated nodes with an correct location. + ast.copy_location(node, assert_) + return self.statements + + def visit_NamedExpr(self, name: ast.NamedExpr) -> tuple[ast.NamedExpr, str]: + # This method handles the 'walrus operator' repr of the target + # name if it's a local variable or _should_repr_global_name() + # thinks it's acceptable. + locs = ast.Call(self.builtin("locals"), [], []) + target_id = name.target.id + inlocs = ast.Compare(ast.Constant(target_id), [ast.In()], [locs]) + dorepr = self.helper("_should_repr_global_name", name) + test = ast.BoolOp(ast.Or(), [inlocs, dorepr]) + expr = ast.IfExp(test, self.display(name), ast.Constant(target_id)) + return name, self.explanation_param(expr) + + def visit_Name(self, name: ast.Name) -> tuple[ast.Name, str]: + # Display the repr of the name if it's a local variable or + # _should_repr_global_name() thinks it's acceptable. + locs = ast.Call(self.builtin("locals"), [], []) + inlocs = ast.Compare(ast.Constant(name.id), [ast.In()], [locs]) + dorepr = self.helper("_should_repr_global_name", name) + test = ast.BoolOp(ast.Or(), [inlocs, dorepr]) + expr = ast.IfExp(test, self.display(name), ast.Constant(name.id)) + return name, self.explanation_param(expr) + + def visit_BoolOp(self, boolop: ast.BoolOp) -> tuple[ast.Name, str]: + res_var = self.variable() + expl_list = self.assign(ast.List([], ast.Load())) + app = ast.Attribute(expl_list, "append", ast.Load()) + is_or = int(isinstance(boolop.op, ast.Or)) + body = save = self.statements + fail_save = self.expl_stmts + levels = len(boolop.values) - 1 + self.push_format_context() + # Process each operand, short-circuiting if needed. + for i, v in enumerate(boolop.values): + if i: + fail_inner: list[ast.stmt] = [] + # cond is set in a prior loop iteration below + self.expl_stmts.append(ast.If(cond, fail_inner, [])) # noqa: F821 + self.expl_stmts = fail_inner + match v: + # Check if the left operand is an ast.NamedExpr and the value has already been visited + case ast.Compare( + left=ast.NamedExpr(target=ast.Name(id=target_id)) + ) if target_id in [ + e.id for e in boolop.values[:i] if hasattr(e, "id") + ]: + pytest_temp = self.variable() + self.variables_overwrite[self.scope][target_id] = v.left # type:ignore[assignment] + # mypy's false positive, we're checking that the 'target' attribute exists. + v.left.target.id = pytest_temp # type:ignore[attr-defined] + self.push_format_context() + res, expl = self.visit(v) + body.append(ast.Assign([ast.Name(res_var, ast.Store())], res)) + expl_format = self.pop_format_context(ast.Constant(expl)) + call = ast.Call(app, [expl_format], []) + self.expl_stmts.append(ast.Expr(call)) + if i < levels: + cond: ast.expr = res + if is_or: + cond = ast.UnaryOp(ast.Not(), cond) + inner: list[ast.stmt] = [] + self.statements.append(ast.If(cond, inner, [])) + self.statements = body = inner + self.statements = save + self.expl_stmts = fail_save + expl_template = self.helper("_format_boolop", expl_list, ast.Constant(is_or)) + expl = self.pop_format_context(expl_template) + return ast.Name(res_var, ast.Load()), self.explanation_param(expl) + + def visit_UnaryOp(self, unary: ast.UnaryOp) -> tuple[ast.Name, str]: + pattern = UNARY_MAP[unary.op.__class__] + operand_res, operand_expl = self.visit(unary.operand) + res = self.assign(ast.copy_location(ast.UnaryOp(unary.op, operand_res), unary)) + return res, pattern % (operand_expl,) + + def visit_BinOp(self, binop: ast.BinOp) -> tuple[ast.Name, str]: + symbol = BINOP_MAP[binop.op.__class__] + left_expr, left_expl = self.visit(binop.left) + right_expr, right_expl = self.visit(binop.right) + explanation = f"({left_expl} {symbol} {right_expl})" + res = self.assign( + ast.copy_location(ast.BinOp(left_expr, binop.op, right_expr), binop) + ) + return res, explanation + + def visit_Call(self, call: ast.Call) -> tuple[ast.Name, str]: + new_func, func_expl = self.visit(call.func) + arg_expls = [] + new_args = [] + new_kwargs = [] + for arg in call.args: + if isinstance(arg, ast.Name) and arg.id in self.variables_overwrite.get( + self.scope, {} + ): + arg = self.variables_overwrite[self.scope][arg.id] # type:ignore[assignment] + res, expl = self.visit(arg) + arg_expls.append(expl) + new_args.append(res) + for keyword in call.keywords: + match keyword.value: + case ast.Name(id=id) if id in self.variables_overwrite.get( + self.scope, {} + ): + keyword.value = self.variables_overwrite[self.scope][id] # type:ignore[assignment] + res, expl = self.visit(keyword.value) + new_kwargs.append(ast.keyword(keyword.arg, res)) + if keyword.arg: + arg_expls.append(keyword.arg + "=" + expl) + else: # **args have `arg` keywords with an .arg of None + arg_expls.append("**" + expl) + + expl = "{}({})".format(func_expl, ", ".join(arg_expls)) + new_call = ast.copy_location(ast.Call(new_func, new_args, new_kwargs), call) + res = self.assign(new_call) + res_expl = self.explanation_param(self.display(res)) + outer_expl = f"{res_expl}\n{{{res_expl} = {expl}\n}}" + return res, outer_expl + + def visit_Starred(self, starred: ast.Starred) -> tuple[ast.Starred, str]: + # A Starred node can appear in a function call. + res, expl = self.visit(starred.value) + new_starred = ast.Starred(res, starred.ctx) + return new_starred, "*" + expl + + def visit_Attribute(self, attr: ast.Attribute) -> tuple[ast.Name, str]: + if not isinstance(attr.ctx, ast.Load): + return self.generic_visit(attr) + value, value_expl = self.visit(attr.value) + res = self.assign( + ast.copy_location(ast.Attribute(value, attr.attr, ast.Load()), attr) + ) + res_expl = self.explanation_param(self.display(res)) + pat = "%s\n{%s = %s.%s\n}" + expl = pat % (res_expl, res_expl, value_expl, attr.attr) + return res, expl + + def visit_Compare(self, comp: ast.Compare) -> tuple[ast.expr, str]: + self.push_format_context() + # We first check if we have overwritten a variable in the previous assert + match comp.left: + case ast.Name(id=name_id) if name_id in self.variables_overwrite.get( + self.scope, {} + ): + comp.left = self.variables_overwrite[self.scope][name_id] # type: ignore[assignment] + case ast.NamedExpr(target=ast.Name(id=target_id)): + self.variables_overwrite[self.scope][target_id] = comp.left # type: ignore[assignment] + left_res, left_expl = self.visit(comp.left) + if isinstance(comp.left, ast.Compare | ast.BoolOp): + left_expl = f"({left_expl})" + res_variables = [self.variable() for i in range(len(comp.ops))] + load_names: list[ast.expr] = [ast.Name(v, ast.Load()) for v in res_variables] + store_names = [ast.Name(v, ast.Store()) for v in res_variables] + it = zip(range(len(comp.ops)), comp.ops, comp.comparators, strict=True) + expls: list[ast.expr] = [] + syms: list[ast.expr] = [] + results = [left_res] + for i, op, next_operand in it: + match (next_operand, left_res): + case ( + ast.NamedExpr(target=ast.Name(id=target_id)), + ast.Name(id=name_id), + ) if target_id == name_id: + next_operand.target.id = self.variable() + self.variables_overwrite[self.scope][name_id] = next_operand # type: ignore[assignment] + + next_res, next_expl = self.visit(next_operand) + if isinstance(next_operand, ast.Compare | ast.BoolOp): + next_expl = f"({next_expl})" + results.append(next_res) + sym = BINOP_MAP[op.__class__] + syms.append(ast.Constant(sym)) + expl = f"{left_expl} {sym} {next_expl}" + expls.append(ast.Constant(expl)) + res_expr = ast.copy_location(ast.Compare(left_res, [op], [next_res]), comp) + self.statements.append(ast.Assign([store_names[i]], res_expr)) + left_res, left_expl = next_res, next_expl + # Use pytest.assertion.util._reprcompare if that's available. + expl_call = self.helper( + "_call_reprcompare", + ast.Tuple(syms, ast.Load()), + ast.Tuple(load_names, ast.Load()), + ast.Tuple(expls, ast.Load()), + ast.Tuple(results, ast.Load()), + ) + if len(comp.ops) > 1: + res: ast.expr = ast.BoolOp(ast.And(), load_names) + else: + res = load_names[0] + + return res, self.explanation_param(self.pop_format_context(expl_call)) + + +def try_makedirs(cache_dir: Path) -> bool: + """Attempt to create the given directory and sub-directories exist. + + Returns True if successful or if it already exists. + """ + try: + os.makedirs(cache_dir, exist_ok=True) + except (FileNotFoundError, NotADirectoryError, FileExistsError): + # One of the path components was not a directory: + # - we're in a zip file + # - it is a file + return False + except PermissionError: + return False + except OSError as e: + # as of now, EROFS doesn't have an equivalent OSError-subclass + # + # squashfuse_ll returns ENOSYS "OSError: [Errno 38] Function not + # implemented" for a read-only error + if e.errno in {errno.EROFS, errno.ENOSYS}: + return False + raise + return True + + +def get_cache_dir(file_path: Path) -> Path: + """Return the cache directory to write .pyc files for the given .py file path.""" + if sys.pycache_prefix: + # given: + # prefix = '/tmp/pycs' + # path = '/home/user/proj/test_app.py' + # we want: + # '/tmp/pycs/home/user/proj' + return Path(sys.pycache_prefix) / Path(*file_path.parts[1:-1]) + else: + # classic pycache directory + return file_path.parent / "__pycache__" diff --git a/.venv/lib/python3.12/site-packages/_pytest/assertion/truncate.py b/.venv/lib/python3.12/site-packages/_pytest/assertion/truncate.py new file mode 100644 index 0000000..5820e6e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/assertion/truncate.py @@ -0,0 +1,137 @@ +"""Utilities for truncating assertion output. + +Current default behaviour is to truncate assertion explanations at +terminal lines, unless running with an assertions verbosity level of at least 2 or running on CI. +""" + +from __future__ import annotations + +from _pytest.compat import running_on_ci +from _pytest.config import Config +from _pytest.nodes import Item + + +DEFAULT_MAX_LINES = 8 +DEFAULT_MAX_CHARS = DEFAULT_MAX_LINES * 80 +USAGE_MSG = "use '-vv' to show" + + +def truncate_if_required(explanation: list[str], item: Item) -> list[str]: + """Truncate this assertion explanation if the given test item is eligible.""" + should_truncate, max_lines, max_chars = _get_truncation_parameters(item) + if should_truncate: + return _truncate_explanation( + explanation, + max_lines=max_lines, + max_chars=max_chars, + ) + return explanation + + +def _get_truncation_parameters(item: Item) -> tuple[bool, int, int]: + """Return the truncation parameters related to the given item, as (should truncate, max lines, max chars).""" + # We do not need to truncate if one of conditions is met: + # 1. Verbosity level is 2 or more; + # 2. Test is being run in CI environment; + # 3. Both truncation_limit_lines and truncation_limit_chars + # .ini parameters are set to 0 explicitly. + max_lines = item.config.getini("truncation_limit_lines") + max_lines = int(max_lines if max_lines is not None else DEFAULT_MAX_LINES) + + max_chars = item.config.getini("truncation_limit_chars") + max_chars = int(max_chars if max_chars is not None else DEFAULT_MAX_CHARS) + + verbose = item.config.get_verbosity(Config.VERBOSITY_ASSERTIONS) + + should_truncate = verbose < 2 and not running_on_ci() + should_truncate = should_truncate and (max_lines > 0 or max_chars > 0) + + return should_truncate, max_lines, max_chars + + +def _truncate_explanation( + input_lines: list[str], + max_lines: int, + max_chars: int, +) -> list[str]: + """Truncate given list of strings that makes up the assertion explanation. + + Truncates to either max_lines, or max_chars - whichever the input reaches + first, taking the truncation explanation into account. The remaining lines + will be replaced by a usage message. + """ + # Check if truncation required + input_char_count = len("".join(input_lines)) + # The length of the truncation explanation depends on the number of lines + # removed but is at least 68 characters: + # The real value is + # 64 (for the base message: + # '...\n...Full output truncated (1 line hidden), use '-vv' to show")' + # ) + # + 1 (for plural) + # + int(math.log10(len(input_lines) - max_lines)) (number of hidden line, at least 1) + # + 3 for the '...' added to the truncated line + # But if there's more than 100 lines it's very likely that we're going to + # truncate, so we don't need the exact value using log10. + tolerable_max_chars = ( + max_chars + 70 # 64 + 1 (for plural) + 2 (for '99') + 3 for '...' + ) + # The truncation explanation add two lines to the output + tolerable_max_lines = max_lines + 2 + if ( + len(input_lines) <= tolerable_max_lines + and input_char_count <= tolerable_max_chars + ): + return input_lines + # Truncate first to max_lines, and then truncate to max_chars if necessary + if max_lines > 0: + truncated_explanation = input_lines[:max_lines] + else: + truncated_explanation = input_lines + truncated_char = True + # We reevaluate the need to truncate chars following removal of some lines + if len("".join(truncated_explanation)) > tolerable_max_chars and max_chars > 0: + truncated_explanation = _truncate_by_char_count( + truncated_explanation, max_chars + ) + else: + truncated_char = False + + if truncated_explanation == input_lines: + # No truncation happened, so we do not need to add any explanations + return truncated_explanation + + truncated_line_count = len(input_lines) - len(truncated_explanation) + if truncated_explanation[-1]: + # Add ellipsis and take into account part-truncated final line + truncated_explanation[-1] = truncated_explanation[-1] + "..." + if truncated_char: + # It's possible that we did not remove any char from this line + truncated_line_count += 1 + else: + # Add proper ellipsis when we were able to fit a full line exactly + truncated_explanation[-1] = "..." + return [ + *truncated_explanation, + "", + f"...Full output truncated ({truncated_line_count} line" + f"{'' if truncated_line_count == 1 else 's'} hidden), {USAGE_MSG}", + ] + + +def _truncate_by_char_count(input_lines: list[str], max_chars: int) -> list[str]: + # Find point at which input length exceeds total allowed length + iterated_char_count = 0 + for iterated_index, input_line in enumerate(input_lines): + if iterated_char_count + len(input_line) > max_chars: + break + iterated_char_count += len(input_line) + + # Create truncated explanation with modified final line + truncated_result = input_lines[:iterated_index] + final_line = input_lines[iterated_index] + if final_line: + final_line_truncate_point = max_chars - iterated_char_count + final_line = final_line[:final_line_truncate_point] + truncated_result.append(final_line) + return truncated_result diff --git a/.venv/lib/python3.12/site-packages/_pytest/assertion/util.py b/.venv/lib/python3.12/site-packages/_pytest/assertion/util.py new file mode 100644 index 0000000..f35d83a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/assertion/util.py @@ -0,0 +1,615 @@ +# mypy: allow-untyped-defs +"""Utilities for assertion debugging.""" + +from __future__ import annotations + +import collections.abc +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Mapping +from collections.abc import Sequence +from collections.abc import Set as AbstractSet +import pprint +from typing import Any +from typing import Literal +from typing import Protocol +from unicodedata import normalize + +from _pytest import outcomes +import _pytest._code +from _pytest._io.pprint import PrettyPrinter +from _pytest._io.saferepr import saferepr +from _pytest._io.saferepr import saferepr_unlimited +from _pytest.compat import running_on_ci +from _pytest.config import Config + + +# The _reprcompare attribute on the util module is used by the new assertion +# interpretation code and assertion rewriter to detect this plugin was +# loaded and in turn call the hooks defined here as part of the +# DebugInterpreter. +_reprcompare: Callable[[str, object, object], str | None] | None = None + +# Works similarly as _reprcompare attribute. Is populated with the hook call +# when pytest_runtest_setup is called. +_assertion_pass: Callable[[int, str, str], None] | None = None + +# Config object which is assigned during pytest_runtest_protocol. +_config: Config | None = None + + +class _HighlightFunc(Protocol): + def __call__(self, source: str, lexer: Literal["diff", "python"] = "python") -> str: + """Apply highlighting to the given source.""" + + +def dummy_highlighter(source: str, lexer: Literal["diff", "python"] = "python") -> str: + """Dummy highlighter that returns the text unprocessed. + + Needed for _notin_text, as the diff gets post-processed to only show the "+" part. + """ + return source + + +def format_explanation(explanation: str) -> str: + r"""Format an explanation. + + Normally all embedded newlines are escaped, however there are + three exceptions: \n{, \n} and \n~. The first two are intended + cover nested explanations, see function and attribute explanations + for examples (.visit_Call(), visit_Attribute()). The last one is + for when one explanation needs to span multiple lines, e.g. when + displaying diffs. + """ + lines = _split_explanation(explanation) + result = _format_lines(lines) + return "\n".join(result) + + +def _split_explanation(explanation: str) -> list[str]: + r"""Return a list of individual lines in the explanation. + + This will return a list of lines split on '\n{', '\n}' and '\n~'. + Any other newlines will be escaped and appear in the line as the + literal '\n' characters. + """ + raw_lines = (explanation or "").split("\n") + lines = [raw_lines[0]] + for values in raw_lines[1:]: + if values and values[0] in ["{", "}", "~", ">"]: + lines.append(values) + else: + lines[-1] += "\\n" + values + return lines + + +def _format_lines(lines: Sequence[str]) -> list[str]: + """Format the individual lines. + + This will replace the '{', '}' and '~' characters of our mini formatting + language with the proper 'where ...', 'and ...' and ' + ...' text, taking + care of indentation along the way. + + Return a list of formatted lines. + """ + result = list(lines[:1]) + stack = [0] + stackcnt = [0] + for line in lines[1:]: + if line.startswith("{"): + if stackcnt[-1]: + s = "and " + else: + s = "where " + stack.append(len(result)) + stackcnt[-1] += 1 + stackcnt.append(0) + result.append(" +" + " " * (len(stack) - 1) + s + line[1:]) + elif line.startswith("}"): + stack.pop() + stackcnt.pop() + result[stack[-1]] += line[1:] + else: + assert line[0] in ["~", ">"] + stack[-1] += 1 + indent = len(stack) if line.startswith("~") else len(stack) - 1 + result.append(" " * indent + line[1:]) + assert len(stack) == 1 + return result + + +def issequence(x: Any) -> bool: + return isinstance(x, collections.abc.Sequence) and not isinstance(x, str) + + +def istext(x: Any) -> bool: + return isinstance(x, str) + + +def isdict(x: Any) -> bool: + return isinstance(x, dict) + + +def isset(x: Any) -> bool: + return isinstance(x, set | frozenset) + + +def isnamedtuple(obj: Any) -> bool: + return isinstance(obj, tuple) and getattr(obj, "_fields", None) is not None + + +def isdatacls(obj: Any) -> bool: + return getattr(obj, "__dataclass_fields__", None) is not None + + +def isattrs(obj: Any) -> bool: + return getattr(obj, "__attrs_attrs__", None) is not None + + +def isiterable(obj: Any) -> bool: + try: + iter(obj) + return not istext(obj) + except Exception: + return False + + +def has_default_eq( + obj: object, +) -> bool: + """Check if an instance of an object contains the default eq + + First, we check if the object's __eq__ attribute has __code__, + if so, we check the equally of the method code filename (__code__.co_filename) + to the default one generated by the dataclass and attr module + for dataclasses the default co_filename is , for attrs class, the __eq__ should contain "attrs eq generated" + """ + # inspired from https://github.com/willmcgugan/rich/blob/07d51ffc1aee6f16bd2e5a25b4e82850fb9ed778/rich/pretty.py#L68 + if hasattr(obj.__eq__, "__code__") and hasattr(obj.__eq__.__code__, "co_filename"): + code_filename = obj.__eq__.__code__.co_filename + + if isattrs(obj): + return "attrs generated " in code_filename + + return code_filename == "" # data class + return True + + +def assertrepr_compare( + config, op: str, left: Any, right: Any, use_ascii: bool = False +) -> list[str] | None: + """Return specialised explanations for some operators/operands.""" + verbose = config.get_verbosity(Config.VERBOSITY_ASSERTIONS) + + # Strings which normalize equal are often hard to distinguish when printed; use ascii() to make this easier. + # See issue #3246. + use_ascii = ( + isinstance(left, str) + and isinstance(right, str) + and normalize("NFD", left) == normalize("NFD", right) + ) + + if verbose > 1: + left_repr = saferepr_unlimited(left, use_ascii=use_ascii) + right_repr = saferepr_unlimited(right, use_ascii=use_ascii) + else: + # XXX: "15 chars indentation" is wrong + # ("E AssertionError: assert "); should use term width. + maxsize = ( + 80 - 15 - len(op) - 2 + ) // 2 # 15 chars indentation, 1 space around op + + left_repr = saferepr(left, maxsize=maxsize, use_ascii=use_ascii) + right_repr = saferepr(right, maxsize=maxsize, use_ascii=use_ascii) + + summary = f"{left_repr} {op} {right_repr}" + highlighter = config.get_terminal_writer()._highlight + + explanation = None + try: + if op == "==": + explanation = _compare_eq_any(left, right, highlighter, verbose) + elif op == "not in": + if istext(left) and istext(right): + explanation = _notin_text(left, right, verbose) + elif op == "!=": + if isset(left) and isset(right): + explanation = ["Both sets are equal"] + elif op == ">=": + if isset(left) and isset(right): + explanation = _compare_gte_set(left, right, highlighter, verbose) + elif op == "<=": + if isset(left) and isset(right): + explanation = _compare_lte_set(left, right, highlighter, verbose) + elif op == ">": + if isset(left) and isset(right): + explanation = _compare_gt_set(left, right, highlighter, verbose) + elif op == "<": + if isset(left) and isset(right): + explanation = _compare_lt_set(left, right, highlighter, verbose) + + except outcomes.Exit: + raise + except Exception: + repr_crash = _pytest._code.ExceptionInfo.from_current()._getreprcrash() + explanation = [ + f"(pytest_assertion plugin: representation of details failed: {repr_crash}.", + " Probably an object has a faulty __repr__.)", + ] + + if not explanation: + return None + + if explanation[0] != "": + explanation = ["", *explanation] + return [summary, *explanation] + + +def _compare_eq_any( + left: Any, right: Any, highlighter: _HighlightFunc, verbose: int = 0 +) -> list[str]: + explanation = [] + if istext(left) and istext(right): + explanation = _diff_text(left, right, highlighter, verbose) + else: + from _pytest.python_api import ApproxBase + + if isinstance(left, ApproxBase) or isinstance(right, ApproxBase): + # Although the common order should be obtained == expected, this ensures both ways + approx_side = left if isinstance(left, ApproxBase) else right + other_side = right if isinstance(left, ApproxBase) else left + + explanation = approx_side._repr_compare(other_side) + elif type(left) is type(right) and ( + isdatacls(left) or isattrs(left) or isnamedtuple(left) + ): + # Note: unlike dataclasses/attrs, namedtuples compare only the + # field values, not the type or field names. But this branch + # intentionally only handles the same-type case, which was often + # used in older code bases before dataclasses/attrs were available. + explanation = _compare_eq_cls(left, right, highlighter, verbose) + elif issequence(left) and issequence(right): + explanation = _compare_eq_sequence(left, right, highlighter, verbose) + elif isset(left) and isset(right): + explanation = _compare_eq_set(left, right, highlighter, verbose) + elif isdict(left) and isdict(right): + explanation = _compare_eq_dict(left, right, highlighter, verbose) + + if isiterable(left) and isiterable(right): + expl = _compare_eq_iterable(left, right, highlighter, verbose) + explanation.extend(expl) + + return explanation + + +def _diff_text( + left: str, right: str, highlighter: _HighlightFunc, verbose: int = 0 +) -> list[str]: + """Return the explanation for the diff between text. + + Unless --verbose is used this will skip leading and trailing + characters which are identical to keep the diff minimal. + """ + from difflib import ndiff + + explanation: list[str] = [] + + if verbose < 1: + i = 0 # just in case left or right has zero length + for i in range(min(len(left), len(right))): + if left[i] != right[i]: + break + if i > 42: + i -= 10 # Provide some context + explanation = [ + f"Skipping {i} identical leading characters in diff, use -v to show" + ] + left = left[i:] + right = right[i:] + if len(left) == len(right): + for i in range(len(left)): + if left[-i] != right[-i]: + break + if i > 42: + i -= 10 # Provide some context + explanation += [ + f"Skipping {i} identical trailing " + "characters in diff, use -v to show" + ] + left = left[:-i] + right = right[:-i] + keepends = True + if left.isspace() or right.isspace(): + left = repr(str(left)) + right = repr(str(right)) + explanation += ["Strings contain only whitespace, escaping them using repr()"] + # "right" is the expected base against which we compare "left", + # see https://github.com/pytest-dev/pytest/issues/3333 + explanation.extend( + highlighter( + "\n".join( + line.strip("\n") + for line in ndiff(right.splitlines(keepends), left.splitlines(keepends)) + ), + lexer="diff", + ).splitlines() + ) + return explanation + + +def _compare_eq_iterable( + left: Iterable[Any], + right: Iterable[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + if verbose <= 0 and not running_on_ci(): + return ["Use -v to get more diff"] + # dynamic import to speedup pytest + import difflib + + left_formatting = PrettyPrinter().pformat(left).splitlines() + right_formatting = PrettyPrinter().pformat(right).splitlines() + + explanation = ["", "Full diff:"] + # "right" is the expected base against which we compare "left", + # see https://github.com/pytest-dev/pytest/issues/3333 + explanation.extend( + highlighter( + "\n".join( + line.rstrip() + for line in difflib.ndiff(right_formatting, left_formatting) + ), + lexer="diff", + ).splitlines() + ) + return explanation + + +def _compare_eq_sequence( + left: Sequence[Any], + right: Sequence[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + comparing_bytes = isinstance(left, bytes) and isinstance(right, bytes) + explanation: list[str] = [] + len_left = len(left) + len_right = len(right) + for i in range(min(len_left, len_right)): + if left[i] != right[i]: + if comparing_bytes: + # when comparing bytes, we want to see their ascii representation + # instead of their numeric values (#5260) + # using a slice gives us the ascii representation: + # >>> s = b'foo' + # >>> s[0] + # 102 + # >>> s[0:1] + # b'f' + left_value = left[i : i + 1] + right_value = right[i : i + 1] + else: + left_value = left[i] + right_value = right[i] + + explanation.append( + f"At index {i} diff:" + f" {highlighter(repr(left_value))} != {highlighter(repr(right_value))}" + ) + break + + if comparing_bytes: + # when comparing bytes, it doesn't help to show the "sides contain one or more + # items" longer explanation, so skip it + + return explanation + + len_diff = len_left - len_right + if len_diff: + if len_diff > 0: + dir_with_more = "Left" + extra = saferepr(left[len_right]) + else: + len_diff = 0 - len_diff + dir_with_more = "Right" + extra = saferepr(right[len_left]) + + if len_diff == 1: + explanation += [ + f"{dir_with_more} contains one more item: {highlighter(extra)}" + ] + else: + explanation += [ + f"{dir_with_more} contains {len_diff} more items, first extra item: {highlighter(extra)}" + ] + return explanation + + +def _compare_eq_set( + left: AbstractSet[Any], + right: AbstractSet[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + explanation = [] + explanation.extend(_set_one_sided_diff("left", left, right, highlighter)) + explanation.extend(_set_one_sided_diff("right", right, left, highlighter)) + return explanation + + +def _compare_gt_set( + left: AbstractSet[Any], + right: AbstractSet[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + explanation = _compare_gte_set(left, right, highlighter) + if not explanation: + return ["Both sets are equal"] + return explanation + + +def _compare_lt_set( + left: AbstractSet[Any], + right: AbstractSet[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + explanation = _compare_lte_set(left, right, highlighter) + if not explanation: + return ["Both sets are equal"] + return explanation + + +def _compare_gte_set( + left: AbstractSet[Any], + right: AbstractSet[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + return _set_one_sided_diff("right", right, left, highlighter) + + +def _compare_lte_set( + left: AbstractSet[Any], + right: AbstractSet[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + return _set_one_sided_diff("left", left, right, highlighter) + + +def _set_one_sided_diff( + posn: str, + set1: AbstractSet[Any], + set2: AbstractSet[Any], + highlighter: _HighlightFunc, +) -> list[str]: + explanation = [] + diff = set1 - set2 + if diff: + explanation.append(f"Extra items in the {posn} set:") + for item in diff: + explanation.append(highlighter(saferepr(item))) + return explanation + + +def _compare_eq_dict( + left: Mapping[Any, Any], + right: Mapping[Any, Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + explanation: list[str] = [] + set_left = set(left) + set_right = set(right) + common = set_left.intersection(set_right) + same = {k: left[k] for k in common if left[k] == right[k]} + if same and verbose < 2: + explanation += [f"Omitting {len(same)} identical items, use -vv to show"] + elif same: + explanation += ["Common items:"] + explanation += highlighter(pprint.pformat(same)).splitlines() + diff = {k for k in common if left[k] != right[k]} + if diff: + explanation += ["Differing items:"] + for k in diff: + explanation += [ + highlighter(saferepr({k: left[k]})) + + " != " + + highlighter(saferepr({k: right[k]})) + ] + extra_left = set_left - set_right + len_extra_left = len(extra_left) + if len_extra_left: + explanation.append( + f"Left contains {len_extra_left} more item{'' if len_extra_left == 1 else 's'}:" + ) + explanation.extend( + highlighter(pprint.pformat({k: left[k] for k in extra_left})).splitlines() + ) + extra_right = set_right - set_left + len_extra_right = len(extra_right) + if len_extra_right: + explanation.append( + f"Right contains {len_extra_right} more item{'' if len_extra_right == 1 else 's'}:" + ) + explanation.extend( + highlighter(pprint.pformat({k: right[k] for k in extra_right})).splitlines() + ) + return explanation + + +def _compare_eq_cls( + left: Any, right: Any, highlighter: _HighlightFunc, verbose: int +) -> list[str]: + if not has_default_eq(left): + return [] + if isdatacls(left): + import dataclasses + + all_fields = dataclasses.fields(left) + fields_to_check = [info.name for info in all_fields if info.compare] + elif isattrs(left): + all_fields = left.__attrs_attrs__ + fields_to_check = [field.name for field in all_fields if getattr(field, "eq")] + elif isnamedtuple(left): + fields_to_check = left._fields + else: + assert False + + indent = " " + same = [] + diff = [] + for field in fields_to_check: + if getattr(left, field) == getattr(right, field): + same.append(field) + else: + diff.append(field) + + explanation = [] + if same or diff: + explanation += [""] + if same and verbose < 2: + explanation.append(f"Omitting {len(same)} identical items, use -vv to show") + elif same: + explanation += ["Matching attributes:"] + explanation += highlighter(pprint.pformat(same)).splitlines() + if diff: + explanation += ["Differing attributes:"] + explanation += highlighter(pprint.pformat(diff)).splitlines() + for field in diff: + field_left = getattr(left, field) + field_right = getattr(right, field) + explanation += [ + "", + f"Drill down into differing attribute {field}:", + f"{indent}{field}: {highlighter(repr(field_left))} != {highlighter(repr(field_right))}", + ] + explanation += [ + indent + line + for line in _compare_eq_any( + field_left, field_right, highlighter, verbose + ) + ] + return explanation + + +def _notin_text(term: str, text: str, verbose: int = 0) -> list[str]: + index = text.find(term) + head = text[:index] + tail = text[index + len(term) :] + correct_text = head + tail + diff = _diff_text(text, correct_text, dummy_highlighter, verbose) + newdiff = [f"{saferepr(term, maxsize=42)} is contained here:"] + for line in diff: + if line.startswith("Skipping"): + continue + if line.startswith("- "): + continue + if line.startswith("+ "): + newdiff.append(" " + line[2:]) + else: + newdiff.append(line) + return newdiff diff --git a/.venv/lib/python3.12/site-packages/_pytest/cacheprovider.py b/.venv/lib/python3.12/site-packages/_pytest/cacheprovider.py new file mode 100644 index 0000000..4383f10 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/cacheprovider.py @@ -0,0 +1,646 @@ +# mypy: allow-untyped-defs +"""Implementation of the cache provider.""" + +# This plugin was not named "cache" to avoid conflicts with the external +# pytest-cache version. +from __future__ import annotations + +from collections.abc import Generator +from collections.abc import Iterable +import dataclasses +import errno +import json +import os +from pathlib import Path +import tempfile +from typing import final + +from .pathlib import resolve_from_str +from .pathlib import rm_rf +from .reports import CollectReport +from _pytest import nodes +from _pytest._io import TerminalWriter +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.main import Session +from _pytest.nodes import Directory +from _pytest.nodes import File +from _pytest.reports import TestReport + + +README_CONTENT = """\ +# pytest cache directory # + +This directory contains data from the pytest's cache plugin, +which provides the `--lf` and `--ff` options, as well as the `cache` fixture. + +**Do not** commit this to version control. + +See [the docs](https://docs.pytest.org/en/stable/how-to/cache.html) for more information. +""" + +CACHEDIR_TAG_CONTENT = b"""\ +Signature: 8a477f597d28d172789f06886806bc55 +# This file is a cache directory tag created by pytest. +# For information about cache directory tags, see: +# https://bford.info/cachedir/spec.html +""" + + +@final +@dataclasses.dataclass +class Cache: + """Instance of the `cache` fixture.""" + + _cachedir: Path = dataclasses.field(repr=False) + _config: Config = dataclasses.field(repr=False) + + # Sub-directory under cache-dir for directories created by `mkdir()`. + _CACHE_PREFIX_DIRS = "d" + + # Sub-directory under cache-dir for values created by `set()`. + _CACHE_PREFIX_VALUES = "v" + + def __init__( + self, cachedir: Path, config: Config, *, _ispytest: bool = False + ) -> None: + check_ispytest(_ispytest) + self._cachedir = cachedir + self._config = config + + @classmethod + def for_config(cls, config: Config, *, _ispytest: bool = False) -> Cache: + """Create the Cache instance for a Config. + + :meta private: + """ + check_ispytest(_ispytest) + cachedir = cls.cache_dir_from_config(config, _ispytest=True) + if config.getoption("cacheclear") and cachedir.is_dir(): + cls.clear_cache(cachedir, _ispytest=True) + return cls(cachedir, config, _ispytest=True) + + @classmethod + def clear_cache(cls, cachedir: Path, _ispytest: bool = False) -> None: + """Clear the sub-directories used to hold cached directories and values. + + :meta private: + """ + check_ispytest(_ispytest) + for prefix in (cls._CACHE_PREFIX_DIRS, cls._CACHE_PREFIX_VALUES): + d = cachedir / prefix + if d.is_dir(): + rm_rf(d) + + @staticmethod + def cache_dir_from_config(config: Config, *, _ispytest: bool = False) -> Path: + """Get the path to the cache directory for a Config. + + :meta private: + """ + check_ispytest(_ispytest) + return resolve_from_str(config.getini("cache_dir"), config.rootpath) + + def warn(self, fmt: str, *, _ispytest: bool = False, **args: object) -> None: + """Issue a cache warning. + + :meta private: + """ + check_ispytest(_ispytest) + import warnings + + from _pytest.warning_types import PytestCacheWarning + + warnings.warn( + PytestCacheWarning(fmt.format(**args) if args else fmt), + self._config.hook, + stacklevel=3, + ) + + def _mkdir(self, path: Path) -> None: + self._ensure_cache_dir_and_supporting_files() + path.mkdir(exist_ok=True, parents=True) + + def mkdir(self, name: str) -> Path: + """Return a directory path object with the given name. + + If the directory does not yet exist, it will be created. You can use + it to manage files to e.g. store/retrieve database dumps across test + sessions. + + .. versionadded:: 7.0 + + :param name: + Must be a string not containing a ``/`` separator. + Make sure the name contains your plugin or application + identifiers to prevent clashes with other cache users. + """ + path = Path(name) + if len(path.parts) > 1: + raise ValueError("name is not allowed to contain path separators") + res = self._cachedir.joinpath(self._CACHE_PREFIX_DIRS, path) + self._mkdir(res) + return res + + def _getvaluepath(self, key: str) -> Path: + return self._cachedir.joinpath(self._CACHE_PREFIX_VALUES, Path(key)) + + def get(self, key: str, default): + """Return the cached value for the given key. + + If no value was yet cached or the value cannot be read, the specified + default is returned. + + :param key: + Must be a ``/`` separated value. Usually the first + name is the name of your plugin or your application. + :param default: + The value to return in case of a cache-miss or invalid cache value. + """ + path = self._getvaluepath(key) + try: + with path.open("r", encoding="UTF-8") as f: + return json.load(f) + except (ValueError, OSError): + return default + + def set(self, key: str, value: object) -> None: + """Save value for the given key. + + :param key: + Must be a ``/`` separated value. Usually the first + name is the name of your plugin or your application. + :param value: + Must be of any combination of basic python types, + including nested types like lists of dictionaries. + """ + path = self._getvaluepath(key) + try: + self._mkdir(path.parent) + except OSError as exc: + self.warn( + f"could not create cache path {path}: {exc}", + _ispytest=True, + ) + return + data = json.dumps(value, ensure_ascii=False, indent=2) + try: + f = path.open("w", encoding="UTF-8") + except OSError as exc: + self.warn( + f"cache could not write path {path}: {exc}", + _ispytest=True, + ) + else: + with f: + f.write(data) + + def _ensure_cache_dir_and_supporting_files(self) -> None: + """Create the cache dir and its supporting files.""" + if self._cachedir.is_dir(): + return + + self._cachedir.parent.mkdir(parents=True, exist_ok=True) + with tempfile.TemporaryDirectory( + prefix="pytest-cache-files-", + dir=self._cachedir.parent, + ) as newpath: + path = Path(newpath) + + # Reset permissions to the default, see #12308. + # Note: there's no way to get the current umask atomically, eek. + umask = os.umask(0o022) + os.umask(umask) + path.chmod(0o777 - umask) + + with open(path.joinpath("README.md"), "x", encoding="UTF-8") as f: + f.write(README_CONTENT) + with open(path.joinpath(".gitignore"), "x", encoding="UTF-8") as f: + f.write("# Created by pytest automatically.\n*\n") + with open(path.joinpath("CACHEDIR.TAG"), "xb") as f: + f.write(CACHEDIR_TAG_CONTENT) + + try: + path.rename(self._cachedir) + except OSError as e: + # If 2 concurrent pytests both race to the rename, the loser + # gets "Directory not empty" from the rename. In this case, + # everything is handled so just continue (while letting the + # temporary directory be cleaned up). + # On Windows, the error is a FileExistsError which translates to EEXIST. + if e.errno not in (errno.ENOTEMPTY, errno.EEXIST): + raise + else: + # Create a directory in place of the one we just moved so that + # `TemporaryDirectory`'s cleanup doesn't complain. + # + # TODO: pass ignore_cleanup_errors=True when we no longer support python < 3.10. + # See https://github.com/python/cpython/issues/74168. Note that passing + # delete=False would do the wrong thing in case of errors and isn't supported + # until python 3.12. + path.mkdir() + + +class LFPluginCollWrapper: + def __init__(self, lfplugin: LFPlugin) -> None: + self.lfplugin = lfplugin + self._collected_at_least_one_failure = False + + @hookimpl(wrapper=True) + def pytest_make_collect_report( + self, collector: nodes.Collector + ) -> Generator[None, CollectReport, CollectReport]: + res = yield + if isinstance(collector, Session | Directory): + # Sort any lf-paths to the beginning. + lf_paths = self.lfplugin._last_failed_paths + + # Use stable sort to prioritize last failed. + def sort_key(node: nodes.Item | nodes.Collector) -> bool: + return node.path in lf_paths + + res.result = sorted( + res.result, + key=sort_key, + reverse=True, + ) + + elif isinstance(collector, File): + if collector.path in self.lfplugin._last_failed_paths: + result = res.result + lastfailed = self.lfplugin.lastfailed + + # Only filter with known failures. + if not self._collected_at_least_one_failure: + if not any(x.nodeid in lastfailed for x in result): + return res + self.lfplugin.config.pluginmanager.register( + LFPluginCollSkipfiles(self.lfplugin), "lfplugin-collskip" + ) + self._collected_at_least_one_failure = True + + session = collector.session + result[:] = [ + x + for x in result + if x.nodeid in lastfailed + # Include any passed arguments (not trivial to filter). + or session.isinitpath(x.path) + # Keep all sub-collectors. + or isinstance(x, nodes.Collector) + ] + + return res + + +class LFPluginCollSkipfiles: + def __init__(self, lfplugin: LFPlugin) -> None: + self.lfplugin = lfplugin + + @hookimpl + def pytest_make_collect_report( + self, collector: nodes.Collector + ) -> CollectReport | None: + if isinstance(collector, File): + if collector.path not in self.lfplugin._last_failed_paths: + self.lfplugin._skipped_files += 1 + + return CollectReport( + collector.nodeid, "passed", longrepr=None, result=[] + ) + return None + + +class LFPlugin: + """Plugin which implements the --lf (run last-failing) option.""" + + def __init__(self, config: Config) -> None: + self.config = config + active_keys = "lf", "failedfirst" + self.active = any(config.getoption(key) for key in active_keys) + assert config.cache + self.lastfailed: dict[str, bool] = config.cache.get("cache/lastfailed", {}) + self._previously_failed_count: int | None = None + self._report_status: str | None = None + self._skipped_files = 0 # count skipped files during collection due to --lf + + if config.getoption("lf"): + self._last_failed_paths = self.get_last_failed_paths() + config.pluginmanager.register( + LFPluginCollWrapper(self), "lfplugin-collwrapper" + ) + + def get_last_failed_paths(self) -> set[Path]: + """Return a set with all Paths of the previously failed nodeids and + their parents.""" + rootpath = self.config.rootpath + result = set() + for nodeid in self.lastfailed: + path = rootpath / nodeid.split("::")[0] + result.add(path) + result.update(path.parents) + return {x for x in result if x.exists()} + + def pytest_report_collectionfinish(self) -> str | None: + if self.active and self.config.get_verbosity() >= 0: + return f"run-last-failure: {self._report_status}" + return None + + def pytest_runtest_logreport(self, report: TestReport) -> None: + if (report.when == "call" and report.passed) or report.skipped: + self.lastfailed.pop(report.nodeid, None) + elif report.failed: + self.lastfailed[report.nodeid] = True + + def pytest_collectreport(self, report: CollectReport) -> None: + passed = report.outcome in ("passed", "skipped") + if passed: + if report.nodeid in self.lastfailed: + self.lastfailed.pop(report.nodeid) + self.lastfailed.update((item.nodeid, True) for item in report.result) + else: + self.lastfailed[report.nodeid] = True + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_collection_modifyitems( + self, config: Config, items: list[nodes.Item] + ) -> Generator[None]: + res = yield + + if not self.active: + return res + + if self.lastfailed: + previously_failed = [] + previously_passed = [] + for item in items: + if item.nodeid in self.lastfailed: + previously_failed.append(item) + else: + previously_passed.append(item) + self._previously_failed_count = len(previously_failed) + + if not previously_failed: + # Running a subset of all tests with recorded failures + # only outside of it. + self._report_status = ( + f"{len(self.lastfailed)} known failures not in selected tests" + ) + else: + if self.config.getoption("lf"): + items[:] = previously_failed + config.hook.pytest_deselected(items=previously_passed) + else: # --failedfirst + items[:] = previously_failed + previously_passed + + noun = "failure" if self._previously_failed_count == 1 else "failures" + suffix = " first" if self.config.getoption("failedfirst") else "" + self._report_status = ( + f"rerun previous {self._previously_failed_count} {noun}{suffix}" + ) + + if self._skipped_files > 0: + files_noun = "file" if self._skipped_files == 1 else "files" + self._report_status += f" (skipped {self._skipped_files} {files_noun})" + else: + self._report_status = "no previously failed tests, " + if self.config.getoption("last_failed_no_failures") == "none": + self._report_status += "deselecting all items." + config.hook.pytest_deselected(items=items[:]) + items[:] = [] + else: + self._report_status += "not deselecting items." + + return res + + def pytest_sessionfinish(self, session: Session) -> None: + config = self.config + if config.getoption("cacheshow") or hasattr(config, "workerinput"): + return + + assert config.cache is not None + saved_lastfailed = config.cache.get("cache/lastfailed", {}) + if saved_lastfailed != self.lastfailed: + config.cache.set("cache/lastfailed", self.lastfailed) + + +class NFPlugin: + """Plugin which implements the --nf (run new-first) option.""" + + def __init__(self, config: Config) -> None: + self.config = config + self.active = config.option.newfirst + assert config.cache is not None + self.cached_nodeids = set(config.cache.get("cache/nodeids", [])) + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_collection_modifyitems(self, items: list[nodes.Item]) -> Generator[None]: + res = yield + + if self.active: + new_items: dict[str, nodes.Item] = {} + other_items: dict[str, nodes.Item] = {} + for item in items: + if item.nodeid not in self.cached_nodeids: + new_items[item.nodeid] = item + else: + other_items[item.nodeid] = item + + items[:] = self._get_increasing_order( + new_items.values() + ) + self._get_increasing_order(other_items.values()) + self.cached_nodeids.update(new_items) + else: + self.cached_nodeids.update(item.nodeid for item in items) + + return res + + def _get_increasing_order(self, items: Iterable[nodes.Item]) -> list[nodes.Item]: + return sorted(items, key=lambda item: item.path.stat().st_mtime, reverse=True) + + def pytest_sessionfinish(self) -> None: + config = self.config + if config.getoption("cacheshow") or hasattr(config, "workerinput"): + return + + if config.getoption("collectonly"): + return + + assert config.cache is not None + config.cache.set("cache/nodeids", sorted(self.cached_nodeids)) + + +def pytest_addoption(parser: Parser) -> None: + """Add command-line options for cache functionality. + + :param parser: Parser object to add command-line options to. + """ + group = parser.getgroup("general") + group.addoption( + "--lf", + "--last-failed", + action="store_true", + dest="lf", + help="Rerun only the tests that failed at the last run (or all if none failed)", + ) + group.addoption( + "--ff", + "--failed-first", + action="store_true", + dest="failedfirst", + help="Run all tests, but run the last failures first. " + "This may re-order tests and thus lead to " + "repeated fixture setup/teardown.", + ) + group.addoption( + "--nf", + "--new-first", + action="store_true", + dest="newfirst", + help="Run tests from new files first, then the rest of the tests " + "sorted by file mtime", + ) + group.addoption( + "--cache-show", + action="append", + nargs="?", + dest="cacheshow", + help=( + "Show cache contents, don't perform collection or tests. " + "Optional argument: glob (default: '*')." + ), + ) + group.addoption( + "--cache-clear", + action="store_true", + dest="cacheclear", + help="Remove all cache contents at start of test run", + ) + cache_dir_default = ".pytest_cache" + if "TOX_ENV_DIR" in os.environ: + cache_dir_default = os.path.join(os.environ["TOX_ENV_DIR"], cache_dir_default) + parser.addini("cache_dir", default=cache_dir_default, help="Cache directory path") + group.addoption( + "--lfnf", + "--last-failed-no-failures", + action="store", + dest="last_failed_no_failures", + choices=("all", "none"), + default="all", + help="With ``--lf``, determines whether to execute tests when there " + "are no previously (known) failures or when no " + "cached ``lastfailed`` data was found. " + "``all`` (the default) runs the full test suite again. " + "``none`` just emits a message about no known failures and exits successfully.", + ) + + +def pytest_cmdline_main(config: Config) -> int | ExitCode | None: + if config.option.cacheshow and not config.option.help: + from _pytest.main import wrap_session + + return wrap_session(config, cacheshow) + return None + + +@hookimpl(tryfirst=True) +def pytest_configure(config: Config) -> None: + """Configure cache system and register related plugins. + + Creates the Cache instance and registers the last-failed (LFPlugin) + and new-first (NFPlugin) plugins with the plugin manager. + + :param config: pytest configuration object. + """ + config.cache = Cache.for_config(config, _ispytest=True) + config.pluginmanager.register(LFPlugin(config), "lfplugin") + config.pluginmanager.register(NFPlugin(config), "nfplugin") + + +@fixture +def cache(request: FixtureRequest) -> Cache: + """Return a cache object that can persist state between testing sessions. + + cache.get(key, default) + cache.set(key, value) + + Keys must be ``/`` separated strings, where the first part is usually the + name of your plugin or application to avoid clashes with other cache users. + + Values can be any object handled by the json stdlib module. + """ + assert request.config.cache is not None + return request.config.cache + + +def pytest_report_header(config: Config) -> str | None: + """Display cachedir with --cache-show and if non-default.""" + if config.option.verbose > 0 or config.getini("cache_dir") != ".pytest_cache": + assert config.cache is not None + cachedir = config.cache._cachedir + # TODO: evaluate generating upward relative paths + # starting with .., ../.. if sensible + + try: + displaypath = cachedir.relative_to(config.rootpath) + except ValueError: + displaypath = cachedir + return f"cachedir: {displaypath}" + return None + + +def cacheshow(config: Config, session: Session) -> int: + """Display cache contents when --cache-show is used. + + Shows cached values and directories matching the specified glob pattern + (default: '*'). Displays cache location, cached test results, and + any cached directories created by plugins. + + :param config: pytest configuration object. + :param session: pytest session object. + :returns: Exit code (0 for success). + """ + from pprint import pformat + + assert config.cache is not None + + tw = TerminalWriter() + tw.line("cachedir: " + str(config.cache._cachedir)) + if not config.cache._cachedir.is_dir(): + tw.line("cache is empty") + return 0 + + glob = config.option.cacheshow[0] + if glob is None: + glob = "*" + + dummy = object() + basedir = config.cache._cachedir + vdir = basedir / Cache._CACHE_PREFIX_VALUES + tw.sep("-", f"cache values for {glob!r}") + for valpath in sorted(x for x in vdir.rglob(glob) if x.is_file()): + key = str(valpath.relative_to(vdir)) + val = config.cache.get(key, dummy) + if val is dummy: + tw.line(f"{key} contains unreadable content, will be ignored") + else: + tw.line(f"{key} contains:") + for line in pformat(val).splitlines(): + tw.line(" " + line) + + ddir = basedir / Cache._CACHE_PREFIX_DIRS + if ddir.is_dir(): + contents = sorted(ddir.rglob(glob)) + tw.sep("-", f"cache directories for {glob!r}") + for p in contents: + # if p.is_dir(): + # print("%s/" % p.relative_to(basedir)) + if p.is_file(): + key = str(p.relative_to(basedir)) + tw.line(f"{key} is a file of length {p.stat().st_size}") + return 0 diff --git a/.venv/lib/python3.12/site-packages/_pytest/capture.py b/.venv/lib/python3.12/site-packages/_pytest/capture.py new file mode 100644 index 0000000..6d98676 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/capture.py @@ -0,0 +1,1144 @@ +# mypy: allow-untyped-defs +"""Per-test stdout/stderr capturing mechanism.""" + +from __future__ import annotations + +import abc +import collections +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import Iterator +import contextlib +import io +from io import UnsupportedOperation +import os +import sys +from tempfile import TemporaryFile +from types import TracebackType +from typing import Any +from typing import AnyStr +from typing import BinaryIO +from typing import cast +from typing import Final +from typing import final +from typing import Generic +from typing import Literal +from typing import NamedTuple +from typing import TextIO +from typing import TYPE_CHECKING + + +if TYPE_CHECKING: + from typing_extensions import Self + +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import SubRequest +from _pytest.nodes import Collector +from _pytest.nodes import File +from _pytest.nodes import Item +from _pytest.reports import CollectReport + + +_CaptureMethod = Literal["fd", "sys", "no", "tee-sys"] + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group.addoption( + "--capture", + action="store", + default="fd", + metavar="method", + choices=["fd", "sys", "no", "tee-sys"], + help="Per-test capturing method: one of fd|sys|no|tee-sys", + ) + group._addoption( # private to use reserved lower-case short option + "-s", + action="store_const", + const="no", + dest="capture", + help="Shortcut for --capture=no", + ) + + +def _colorama_workaround() -> None: + """Ensure colorama is imported so that it attaches to the correct stdio + handles on Windows. + + colorama uses the terminal on import time. So if something does the + first import of colorama while I/O capture is active, colorama will + fail in various ways. + """ + if sys.platform.startswith("win32"): + try: + import colorama # noqa: F401 + except ImportError: + pass + + +def _readline_workaround() -> None: + """Ensure readline is imported early so it attaches to the correct stdio handles. + + This isn't a problem with the default GNU readline implementation, but in + some configurations, Python uses libedit instead (on macOS, and for prebuilt + binaries such as used by uv). + + In theory this is only needed if readline.backend == "libedit", but the + workaround consists of importing readline here, so we already worked around + the issue by the time we could check if we need to. + """ + try: + import readline # noqa: F401 + except ImportError: + pass + + +def _windowsconsoleio_workaround(stream: TextIO) -> None: + """Workaround for Windows Unicode console handling. + + Python 3.6 implemented Unicode console handling for Windows. This works + by reading/writing to the raw console handle using + ``{Read,Write}ConsoleW``. + + The problem is that we are going to ``dup2`` over the stdio file + descriptors when doing ``FDCapture`` and this will ``CloseHandle`` the + handles used by Python to write to the console. Though there is still some + weirdness and the console handle seems to only be closed randomly and not + on the first call to ``CloseHandle``, or maybe it gets reopened with the + same handle value when we suspend capturing. + + The workaround in this case will reopen stdio with a different fd which + also means a different handle by replicating the logic in + "Py_lifecycle.c:initstdio/create_stdio". + + :param stream: + In practice ``sys.stdout`` or ``sys.stderr``, but given + here as parameter for unittesting purposes. + + See https://github.com/pytest-dev/py/issues/103. + """ + if not sys.platform.startswith("win32") or hasattr(sys, "pypy_version_info"): + return + + # Bail out if ``stream`` doesn't seem like a proper ``io`` stream (#2666). + if not hasattr(stream, "buffer"): # type: ignore[unreachable,unused-ignore] + return + + raw_stdout = stream.buffer.raw if hasattr(stream.buffer, "raw") else stream.buffer + + if not isinstance(raw_stdout, io._WindowsConsoleIO): # type: ignore[attr-defined,unused-ignore] + return + + def _reopen_stdio(f, mode): + if not hasattr(stream.buffer, "raw") and mode[0] == "w": + buffering = 0 + else: + buffering = -1 + + return io.TextIOWrapper( + open(os.dup(f.fileno()), mode, buffering), + f.encoding, + f.errors, + f.newlines, + f.line_buffering, + ) + + sys.stdin = _reopen_stdio(sys.stdin, "rb") + sys.stdout = _reopen_stdio(sys.stdout, "wb") + sys.stderr = _reopen_stdio(sys.stderr, "wb") + + +@hookimpl(wrapper=True) +def pytest_load_initial_conftests(early_config: Config) -> Generator[None]: + ns = early_config.known_args_namespace + if ns.capture == "fd": + _windowsconsoleio_workaround(sys.stdout) + _colorama_workaround() + _readline_workaround() + pluginmanager = early_config.pluginmanager + capman = CaptureManager(ns.capture) + pluginmanager.register(capman, "capturemanager") + + # Make sure that capturemanager is properly reset at final shutdown. + early_config.add_cleanup(capman.stop_global_capturing) + + # Finally trigger conftest loading but while capturing (issue #93). + capman.start_global_capturing() + try: + try: + yield + finally: + capman.suspend_global_capture() + except BaseException: + out, err = capman.read_global_capture() + sys.stdout.write(out) + sys.stderr.write(err) + raise + + +# IO Helpers. + + +class EncodedFile(io.TextIOWrapper): + __slots__ = () + + @property + def name(self) -> str: + # Ensure that file.name is a string. Workaround for a Python bug + # fixed in >=3.7.4: https://bugs.python.org/issue36015 + return repr(self.buffer) + + @property + def mode(self) -> str: + # TextIOWrapper doesn't expose a mode, but at least some of our + # tests check it. + assert hasattr(self.buffer, "mode") + return cast(str, self.buffer.mode.replace("b", "")) + + +class CaptureIO(io.TextIOWrapper): + def __init__(self) -> None: + super().__init__(io.BytesIO(), encoding="UTF-8", newline="", write_through=True) + + def getvalue(self) -> str: + assert isinstance(self.buffer, io.BytesIO) + return self.buffer.getvalue().decode("UTF-8") + + +class TeeCaptureIO(CaptureIO): + def __init__(self, other: TextIO) -> None: + self._other = other + super().__init__() + + def write(self, s: str) -> int: + super().write(s) + return self._other.write(s) + + +class DontReadFromInput(TextIO): + @property + def encoding(self) -> str: + assert sys.__stdin__ is not None + return sys.__stdin__.encoding + + def read(self, size: int = -1) -> str: + raise OSError( + "pytest: reading from stdin while output is captured! Consider using `-s`." + ) + + readline = read + + def __next__(self) -> str: + return self.readline() + + def readlines(self, hint: int | None = -1) -> list[str]: + raise OSError( + "pytest: reading from stdin while output is captured! Consider using `-s`." + ) + + def __iter__(self) -> Iterator[str]: + return self + + def fileno(self) -> int: + raise UnsupportedOperation("redirected stdin is pseudofile, has no fileno()") + + def flush(self) -> None: + raise UnsupportedOperation("redirected stdin is pseudofile, has no flush()") + + def isatty(self) -> bool: + return False + + def close(self) -> None: + pass + + def readable(self) -> bool: + return False + + def seek(self, offset: int, whence: int = 0) -> int: + raise UnsupportedOperation("redirected stdin is pseudofile, has no seek(int)") + + def seekable(self) -> bool: + return False + + def tell(self) -> int: + raise UnsupportedOperation("redirected stdin is pseudofile, has no tell()") + + def truncate(self, size: int | None = None) -> int: + raise UnsupportedOperation("cannot truncate stdin") + + def write(self, data: str) -> int: + raise UnsupportedOperation("cannot write to stdin") + + def writelines(self, lines: Iterable[str]) -> None: + raise UnsupportedOperation("Cannot write to stdin") + + def writable(self) -> bool: + return False + + def __enter__(self) -> Self: + return self + + def __exit__( + self, + type: type[BaseException] | None, + value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + pass + + @property + def buffer(self) -> BinaryIO: + # The str/bytes doesn't actually matter in this type, so OK to fake. + return self # type: ignore[return-value] + + +# Capture classes. + + +class CaptureBase(abc.ABC, Generic[AnyStr]): + EMPTY_BUFFER: AnyStr + + @abc.abstractmethod + def __init__(self, fd: int) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def start(self) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def done(self) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def suspend(self) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def resume(self) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def writeorg(self, data: AnyStr) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def snap(self) -> AnyStr: + raise NotImplementedError() + + +patchsysdict = {0: "stdin", 1: "stdout", 2: "stderr"} + + +class NoCapture(CaptureBase[str]): + EMPTY_BUFFER = "" + + def __init__(self, fd: int) -> None: + pass + + def start(self) -> None: + pass + + def done(self) -> None: + pass + + def suspend(self) -> None: + pass + + def resume(self) -> None: + pass + + def snap(self) -> str: + return "" + + def writeorg(self, data: str) -> None: + pass + + +class SysCaptureBase(CaptureBase[AnyStr]): + def __init__( + self, fd: int, tmpfile: TextIO | None = None, *, tee: bool = False + ) -> None: + name = patchsysdict[fd] + self._old: TextIO = getattr(sys, name) + self.name = name + if tmpfile is None: + if name == "stdin": + tmpfile = DontReadFromInput() + else: + tmpfile = CaptureIO() if not tee else TeeCaptureIO(self._old) + self.tmpfile = tmpfile + self._state = "initialized" + + def repr(self, class_name: str) -> str: + return "<{} {} _old={} _state={!r} tmpfile={!r}>".format( + class_name, + self.name, + (hasattr(self, "_old") and repr(self._old)) or "", + self._state, + self.tmpfile, + ) + + def __repr__(self) -> str: + return "<{} {} _old={} _state={!r} tmpfile={!r}>".format( + self.__class__.__name__, + self.name, + (hasattr(self, "_old") and repr(self._old)) or "", + self._state, + self.tmpfile, + ) + + def _assert_state(self, op: str, states: tuple[str, ...]) -> None: + assert self._state in states, ( + "cannot {} in state {!r}: expected one of {}".format( + op, self._state, ", ".join(states) + ) + ) + + def start(self) -> None: + self._assert_state("start", ("initialized",)) + setattr(sys, self.name, self.tmpfile) + self._state = "started" + + def done(self) -> None: + self._assert_state("done", ("initialized", "started", "suspended", "done")) + if self._state == "done": + return + setattr(sys, self.name, self._old) + del self._old + self.tmpfile.close() + self._state = "done" + + def suspend(self) -> None: + self._assert_state("suspend", ("started", "suspended")) + setattr(sys, self.name, self._old) + self._state = "suspended" + + def resume(self) -> None: + self._assert_state("resume", ("started", "suspended")) + if self._state == "started": + return + setattr(sys, self.name, self.tmpfile) + self._state = "started" + + +class SysCaptureBinary(SysCaptureBase[bytes]): + EMPTY_BUFFER = b"" + + def snap(self) -> bytes: + self._assert_state("snap", ("started", "suspended")) + self.tmpfile.seek(0) + res = self.tmpfile.buffer.read() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res + + def writeorg(self, data: bytes) -> None: + self._assert_state("writeorg", ("started", "suspended")) + self._old.flush() + self._old.buffer.write(data) + self._old.buffer.flush() + + +class SysCapture(SysCaptureBase[str]): + EMPTY_BUFFER = "" + + def snap(self) -> str: + self._assert_state("snap", ("started", "suspended")) + assert isinstance(self.tmpfile, CaptureIO) + res = self.tmpfile.getvalue() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res + + def writeorg(self, data: str) -> None: + self._assert_state("writeorg", ("started", "suspended")) + self._old.write(data) + self._old.flush() + + +class FDCaptureBase(CaptureBase[AnyStr]): + def __init__(self, targetfd: int) -> None: + self.targetfd = targetfd + + try: + os.fstat(targetfd) + except OSError: + # FD capturing is conceptually simple -- create a temporary file, + # redirect the FD to it, redirect back when done. But when the + # target FD is invalid it throws a wrench into this lovely scheme. + # + # Tests themselves shouldn't care if the FD is valid, FD capturing + # should work regardless of external circumstances. So falling back + # to just sys capturing is not a good option. + # + # Further complications are the need to support suspend() and the + # possibility of FD reuse (e.g. the tmpfile getting the very same + # target FD). The following approach is robust, I believe. + self.targetfd_invalid: int | None = os.open(os.devnull, os.O_RDWR) + os.dup2(self.targetfd_invalid, targetfd) + else: + self.targetfd_invalid = None + self.targetfd_save = os.dup(targetfd) + + if targetfd == 0: + self.tmpfile = open(os.devnull, encoding="utf-8") + self.syscapture: CaptureBase[str] = SysCapture(targetfd) + else: + self.tmpfile = EncodedFile( + TemporaryFile(buffering=0), + encoding="utf-8", + errors="replace", + newline="", + write_through=True, + ) + if targetfd in patchsysdict: + self.syscapture = SysCapture(targetfd, self.tmpfile) + else: + self.syscapture = NoCapture(targetfd) + + self._state = "initialized" + + def __repr__(self) -> str: + return ( + f"<{self.__class__.__name__} {self.targetfd} oldfd={self.targetfd_save} " + f"_state={self._state!r} tmpfile={self.tmpfile!r}>" + ) + + def _assert_state(self, op: str, states: tuple[str, ...]) -> None: + assert self._state in states, ( + "cannot {} in state {!r}: expected one of {}".format( + op, self._state, ", ".join(states) + ) + ) + + def start(self) -> None: + """Start capturing on targetfd using memorized tmpfile.""" + self._assert_state("start", ("initialized",)) + os.dup2(self.tmpfile.fileno(), self.targetfd) + self.syscapture.start() + self._state = "started" + + def done(self) -> None: + """Stop capturing, restore streams, return original capture file, + seeked to position zero.""" + self._assert_state("done", ("initialized", "started", "suspended", "done")) + if self._state == "done": + return + os.dup2(self.targetfd_save, self.targetfd) + os.close(self.targetfd_save) + if self.targetfd_invalid is not None: + if self.targetfd_invalid != self.targetfd: + os.close(self.targetfd) + os.close(self.targetfd_invalid) + self.syscapture.done() + self.tmpfile.close() + self._state = "done" + + def suspend(self) -> None: + self._assert_state("suspend", ("started", "suspended")) + if self._state == "suspended": + return + self.syscapture.suspend() + os.dup2(self.targetfd_save, self.targetfd) + self._state = "suspended" + + def resume(self) -> None: + self._assert_state("resume", ("started", "suspended")) + if self._state == "started": + return + self.syscapture.resume() + os.dup2(self.tmpfile.fileno(), self.targetfd) + self._state = "started" + + +class FDCaptureBinary(FDCaptureBase[bytes]): + """Capture IO to/from a given OS-level file descriptor. + + snap() produces `bytes`. + """ + + EMPTY_BUFFER = b"" + + def snap(self) -> bytes: + self._assert_state("snap", ("started", "suspended")) + self.tmpfile.seek(0) + res = self.tmpfile.buffer.read() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res # type: ignore[return-value] + + def writeorg(self, data: bytes) -> None: + """Write to original file descriptor.""" + self._assert_state("writeorg", ("started", "suspended")) + os.write(self.targetfd_save, data) + + +class FDCapture(FDCaptureBase[str]): + """Capture IO to/from a given OS-level file descriptor. + + snap() produces text. + """ + + EMPTY_BUFFER = "" + + def snap(self) -> str: + self._assert_state("snap", ("started", "suspended")) + self.tmpfile.seek(0) + res = self.tmpfile.read() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res + + def writeorg(self, data: str) -> None: + """Write to original file descriptor.""" + self._assert_state("writeorg", ("started", "suspended")) + # XXX use encoding of original stream + os.write(self.targetfd_save, data.encode("utf-8")) + + +# MultiCapture + + +# Generic NamedTuple only supported since Python 3.11. +if sys.version_info >= (3, 11) or TYPE_CHECKING: + + @final + class CaptureResult(NamedTuple, Generic[AnyStr]): + """The result of :method:`caplog.readouterr() `.""" + + out: AnyStr + err: AnyStr + +else: + + class CaptureResult( + collections.namedtuple("CaptureResult", ["out", "err"]), # noqa: PYI024 + Generic[AnyStr], + ): + """The result of :method:`caplog.readouterr() `.""" + + __slots__ = () + + +class MultiCapture(Generic[AnyStr]): + _state = None + _in_suspended = False + + def __init__( + self, + in_: CaptureBase[AnyStr] | None, + out: CaptureBase[AnyStr] | None, + err: CaptureBase[AnyStr] | None, + ) -> None: + self.in_: CaptureBase[AnyStr] | None = in_ + self.out: CaptureBase[AnyStr] | None = out + self.err: CaptureBase[AnyStr] | None = err + + def __repr__(self) -> str: + return ( + f"" + ) + + def start_capturing(self) -> None: + self._state = "started" + if self.in_: + self.in_.start() + if self.out: + self.out.start() + if self.err: + self.err.start() + + def pop_outerr_to_orig(self) -> tuple[AnyStr, AnyStr]: + """Pop current snapshot out/err capture and flush to orig streams.""" + out, err = self.readouterr() + if out: + assert self.out is not None + self.out.writeorg(out) + if err: + assert self.err is not None + self.err.writeorg(err) + return out, err + + def suspend_capturing(self, in_: bool = False) -> None: + self._state = "suspended" + if self.out: + self.out.suspend() + if self.err: + self.err.suspend() + if in_ and self.in_: + self.in_.suspend() + self._in_suspended = True + + def resume_capturing(self) -> None: + self._state = "started" + if self.out: + self.out.resume() + if self.err: + self.err.resume() + if self._in_suspended: + assert self.in_ is not None + self.in_.resume() + self._in_suspended = False + + def stop_capturing(self) -> None: + """Stop capturing and reset capturing streams.""" + if self._state == "stopped": + raise ValueError("was already stopped") + self._state = "stopped" + if self.out: + self.out.done() + if self.err: + self.err.done() + if self.in_: + self.in_.done() + + def is_started(self) -> bool: + """Whether actively capturing -- not suspended or stopped.""" + return self._state == "started" + + def readouterr(self) -> CaptureResult[AnyStr]: + out = self.out.snap() if self.out else "" + err = self.err.snap() if self.err else "" + # TODO: This type error is real, need to fix. + return CaptureResult(out, err) # type: ignore[arg-type] + + +def _get_multicapture(method: _CaptureMethod) -> MultiCapture[str]: + if method == "fd": + return MultiCapture(in_=FDCapture(0), out=FDCapture(1), err=FDCapture(2)) + elif method == "sys": + return MultiCapture(in_=SysCapture(0), out=SysCapture(1), err=SysCapture(2)) + elif method == "no": + return MultiCapture(in_=None, out=None, err=None) + elif method == "tee-sys": + return MultiCapture( + in_=None, out=SysCapture(1, tee=True), err=SysCapture(2, tee=True) + ) + raise ValueError(f"unknown capturing method: {method!r}") + + +# CaptureManager and CaptureFixture + + +class CaptureManager: + """The capture plugin. + + Manages that the appropriate capture method is enabled/disabled during + collection and each test phase (setup, call, teardown). After each of + those points, the captured output is obtained and attached to the + collection/runtest report. + + There are two levels of capture: + + * global: enabled by default and can be suppressed by the ``-s`` + option. This is always enabled/disabled during collection and each test + phase. + + * fixture: when a test function or one of its fixture depend on the + ``capsys`` or ``capfd`` fixtures. In this case special handling is + needed to ensure the fixtures take precedence over the global capture. + """ + + def __init__(self, method: _CaptureMethod) -> None: + self._method: Final = method + self._global_capturing: MultiCapture[str] | None = None + self._capture_fixture: CaptureFixture[Any] | None = None + + def __repr__(self) -> str: + return ( + f"" + ) + + def is_capturing(self) -> str | bool: + if self.is_globally_capturing(): + return "global" + if self._capture_fixture: + return f"fixture {self._capture_fixture.request.fixturename}" + return False + + # Global capturing control + + def is_globally_capturing(self) -> bool: + return self._method != "no" + + def start_global_capturing(self) -> None: + assert self._global_capturing is None + self._global_capturing = _get_multicapture(self._method) + self._global_capturing.start_capturing() + + def stop_global_capturing(self) -> None: + if self._global_capturing is not None: + self._global_capturing.pop_outerr_to_orig() + self._global_capturing.stop_capturing() + self._global_capturing = None + + def resume_global_capture(self) -> None: + # During teardown of the python process, and on rare occasions, capture + # attributes can be `None` while trying to resume global capture. + if self._global_capturing is not None: + self._global_capturing.resume_capturing() + + def suspend_global_capture(self, in_: bool = False) -> None: + if self._global_capturing is not None: + self._global_capturing.suspend_capturing(in_=in_) + + def suspend(self, in_: bool = False) -> None: + # Need to undo local capsys-et-al if it exists before disabling global capture. + self.suspend_fixture() + self.suspend_global_capture(in_) + + def resume(self) -> None: + self.resume_global_capture() + self.resume_fixture() + + def read_global_capture(self) -> CaptureResult[str]: + assert self._global_capturing is not None + return self._global_capturing.readouterr() + + # Fixture Control + + def set_fixture(self, capture_fixture: CaptureFixture[Any]) -> None: + if self._capture_fixture: + current_fixture = self._capture_fixture.request.fixturename + requested_fixture = capture_fixture.request.fixturename + capture_fixture.request.raiseerror( + f"cannot use {requested_fixture} and {current_fixture} at the same time" + ) + self._capture_fixture = capture_fixture + + def unset_fixture(self) -> None: + self._capture_fixture = None + + def activate_fixture(self) -> None: + """If the current item is using ``capsys`` or ``capfd``, activate + them so they take precedence over the global capture.""" + if self._capture_fixture: + self._capture_fixture._start() + + def deactivate_fixture(self) -> None: + """Deactivate the ``capsys`` or ``capfd`` fixture of this item, if any.""" + if self._capture_fixture: + self._capture_fixture.close() + + def suspend_fixture(self) -> None: + if self._capture_fixture: + self._capture_fixture._suspend() + + def resume_fixture(self) -> None: + if self._capture_fixture: + self._capture_fixture._resume() + + # Helper context managers + + @contextlib.contextmanager + def global_and_fixture_disabled(self) -> Generator[None]: + """Context manager to temporarily disable global and current fixture capturing.""" + do_fixture = self._capture_fixture and self._capture_fixture._is_started() + if do_fixture: + self.suspend_fixture() + do_global = self._global_capturing and self._global_capturing.is_started() + if do_global: + self.suspend_global_capture() + try: + yield + finally: + if do_global: + self.resume_global_capture() + if do_fixture: + self.resume_fixture() + + @contextlib.contextmanager + def item_capture(self, when: str, item: Item) -> Generator[None]: + self.resume_global_capture() + self.activate_fixture() + try: + yield + finally: + self.deactivate_fixture() + self.suspend_global_capture(in_=False) + + out, err = self.read_global_capture() + item.add_report_section(when, "stdout", out) + item.add_report_section(when, "stderr", err) + + # Hooks + + @hookimpl(wrapper=True) + def pytest_make_collect_report( + self, collector: Collector + ) -> Generator[None, CollectReport, CollectReport]: + if isinstance(collector, File): + self.resume_global_capture() + try: + rep = yield + finally: + self.suspend_global_capture() + out, err = self.read_global_capture() + if out: + rep.sections.append(("Captured stdout", out)) + if err: + rep.sections.append(("Captured stderr", err)) + else: + rep = yield + return rep + + @hookimpl(wrapper=True) + def pytest_runtest_setup(self, item: Item) -> Generator[None]: + with self.item_capture("setup", item): + return (yield) + + @hookimpl(wrapper=True) + def pytest_runtest_call(self, item: Item) -> Generator[None]: + with self.item_capture("call", item): + return (yield) + + @hookimpl(wrapper=True) + def pytest_runtest_teardown(self, item: Item) -> Generator[None]: + with self.item_capture("teardown", item): + return (yield) + + @hookimpl(tryfirst=True) + def pytest_keyboard_interrupt(self) -> None: + self.stop_global_capturing() + + @hookimpl(tryfirst=True) + def pytest_internalerror(self) -> None: + self.stop_global_capturing() + + +class CaptureFixture(Generic[AnyStr]): + """Object returned by the :fixture:`capsys`, :fixture:`capsysbinary`, + :fixture:`capfd` and :fixture:`capfdbinary` fixtures.""" + + def __init__( + self, + captureclass: type[CaptureBase[AnyStr]], + request: SubRequest, + *, + config: dict[str, Any] | None = None, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self.captureclass: type[CaptureBase[AnyStr]] = captureclass + self.request = request + self._config = config if config else {} + self._capture: MultiCapture[AnyStr] | None = None + self._captured_out: AnyStr = self.captureclass.EMPTY_BUFFER + self._captured_err: AnyStr = self.captureclass.EMPTY_BUFFER + + def _start(self) -> None: + if self._capture is None: + self._capture = MultiCapture( + in_=None, + out=self.captureclass(1, **self._config), + err=self.captureclass(2, **self._config), + ) + self._capture.start_capturing() + + def close(self) -> None: + if self._capture is not None: + out, err = self._capture.pop_outerr_to_orig() + self._captured_out += out + self._captured_err += err + self._capture.stop_capturing() + self._capture = None + + def readouterr(self) -> CaptureResult[AnyStr]: + """Read and return the captured output so far, resetting the internal + buffer. + + :returns: + The captured content as a namedtuple with ``out`` and ``err`` + string attributes. + """ + captured_out, captured_err = self._captured_out, self._captured_err + if self._capture is not None: + out, err = self._capture.readouterr() + captured_out += out + captured_err += err + self._captured_out = self.captureclass.EMPTY_BUFFER + self._captured_err = self.captureclass.EMPTY_BUFFER + return CaptureResult(captured_out, captured_err) + + def _suspend(self) -> None: + """Suspend this fixture's own capturing temporarily.""" + if self._capture is not None: + self._capture.suspend_capturing() + + def _resume(self) -> None: + """Resume this fixture's own capturing temporarily.""" + if self._capture is not None: + self._capture.resume_capturing() + + def _is_started(self) -> bool: + """Whether actively capturing -- not disabled or closed.""" + if self._capture is not None: + return self._capture.is_started() + return False + + @contextlib.contextmanager + def disabled(self) -> Generator[None]: + """Temporarily disable capturing while inside the ``with`` block.""" + capmanager: CaptureManager = self.request.config.pluginmanager.getplugin( + "capturemanager" + ) + with capmanager.global_and_fixture_disabled(): + yield + + +# The fixtures. + + +@fixture +def capsys(request: SubRequest) -> Generator[CaptureFixture[str]]: + r"""Enable text capturing of writes to ``sys.stdout`` and ``sys.stderr``. + + The captured output is made available via ``capsys.readouterr()`` method + calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``text`` objects. + + Returns an instance of :class:`CaptureFixture[str] `. + + Example: + + .. code-block:: python + + def test_output(capsys): + print("hello") + captured = capsys.readouterr() + assert captured.out == "hello\n" + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture(SysCapture, request, _ispytest=True) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() + + +@fixture +def capteesys(request: SubRequest) -> Generator[CaptureFixture[str]]: + r"""Enable simultaneous text capturing and pass-through of writes + to ``sys.stdout`` and ``sys.stderr`` as defined by ``--capture=``. + + + The captured output is made available via ``capteesys.readouterr()`` method + calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``text`` objects. + + The output is also passed-through, allowing it to be "live-printed", + reported, or both as defined by ``--capture=``. + + Returns an instance of :class:`CaptureFixture[str] `. + + Example: + + .. code-block:: python + + def test_output(capteesys): + print("hello") + captured = capteesys.readouterr() + assert captured.out == "hello\n" + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture( + SysCapture, request, config=dict(tee=True), _ispytest=True + ) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() + + +@fixture +def capsysbinary(request: SubRequest) -> Generator[CaptureFixture[bytes]]: + r"""Enable bytes capturing of writes to ``sys.stdout`` and ``sys.stderr``. + + The captured output is made available via ``capsysbinary.readouterr()`` + method calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``bytes`` objects. + + Returns an instance of :class:`CaptureFixture[bytes] `. + + Example: + + .. code-block:: python + + def test_output(capsysbinary): + print("hello") + captured = capsysbinary.readouterr() + assert captured.out == b"hello\n" + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture(SysCaptureBinary, request, _ispytest=True) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() + + +@fixture +def capfd(request: SubRequest) -> Generator[CaptureFixture[str]]: + r"""Enable text capturing of writes to file descriptors ``1`` and ``2``. + + The captured output is made available via ``capfd.readouterr()`` method + calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``text`` objects. + + Returns an instance of :class:`CaptureFixture[str] `. + + Example: + + .. code-block:: python + + def test_system_echo(capfd): + os.system('echo "hello"') + captured = capfd.readouterr() + assert captured.out == "hello\n" + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture(FDCapture, request, _ispytest=True) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() + + +@fixture +def capfdbinary(request: SubRequest) -> Generator[CaptureFixture[bytes]]: + r"""Enable bytes capturing of writes to file descriptors ``1`` and ``2``. + + The captured output is made available via ``capfd.readouterr()`` method + calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``byte`` objects. + + Returns an instance of :class:`CaptureFixture[bytes] `. + + Example: + + .. code-block:: python + + def test_system_echo(capfdbinary): + os.system('echo "hello"') + captured = capfdbinary.readouterr() + assert captured.out == b"hello\n" + + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture(FDCaptureBinary, request, _ispytest=True) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() diff --git a/.venv/lib/python3.12/site-packages/_pytest/compat.py b/.venv/lib/python3.12/site-packages/_pytest/compat.py new file mode 100644 index 0000000..72c3d09 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/compat.py @@ -0,0 +1,314 @@ +# mypy: allow-untyped-defs +"""Python version compatibility code and random general utilities.""" + +from __future__ import annotations + +from collections.abc import Callable +import enum +import functools +import inspect +from inspect import Parameter +from inspect import Signature +import os +from pathlib import Path +import sys +from typing import Any +from typing import Final +from typing import NoReturn + +import py + + +if sys.version_info >= (3, 14): + from annotationlib import Format + + +#: constant to prepare valuing pylib path replacements/lazy proxies later on +# intended for removal in pytest 8.0 or 9.0 + +# fmt: off +# intentional space to create a fake difference for the verification +LEGACY_PATH = py.path. local +# fmt: on + + +def legacy_path(path: str | os.PathLike[str]) -> LEGACY_PATH: + """Internal wrapper to prepare lazy proxies for legacy_path instances""" + return LEGACY_PATH(path) + + +# fmt: off +# Singleton type for NOTSET, as described in: +# https://www.python.org/dev/peps/pep-0484/#support-for-singleton-types-in-unions +class NotSetType(enum.Enum): + token = 0 +NOTSET: Final = NotSetType.token +# fmt: on + + +def iscoroutinefunction(func: object) -> bool: + """Return True if func is a coroutine function (a function defined with async + def syntax, and doesn't contain yield), or a function decorated with + @asyncio.coroutine. + + Note: copied and modified from Python 3.5's builtin coroutines.py to avoid + importing asyncio directly, which in turns also initializes the "logging" + module as a side-effect (see issue #8). + """ + return inspect.iscoroutinefunction(func) or getattr(func, "_is_coroutine", False) + + +def is_async_function(func: object) -> bool: + """Return True if the given function seems to be an async function or + an async generator.""" + return iscoroutinefunction(func) or inspect.isasyncgenfunction(func) + + +def signature(obj: Callable[..., Any]) -> Signature: + """Return signature without evaluating annotations.""" + if sys.version_info >= (3, 14): + return inspect.signature(obj, annotation_format=Format.STRING) + return inspect.signature(obj) + + +def getlocation(function, curdir: str | os.PathLike[str] | None = None) -> str: + function = get_real_func(function) + fn = Path(inspect.getfile(function)) + lineno = function.__code__.co_firstlineno + if curdir is not None: + try: + relfn = fn.relative_to(curdir) + except ValueError: + pass + else: + return f"{relfn}:{lineno + 1}" + return f"{fn}:{lineno + 1}" + + +def num_mock_patch_args(function) -> int: + """Return number of arguments used up by mock arguments (if any).""" + patchings = getattr(function, "patchings", None) + if not patchings: + return 0 + + mock_sentinel = getattr(sys.modules.get("mock"), "DEFAULT", object()) + ut_mock_sentinel = getattr(sys.modules.get("unittest.mock"), "DEFAULT", object()) + + return len( + [ + p + for p in patchings + if not p.attribute_name + and (p.new is mock_sentinel or p.new is ut_mock_sentinel) + ] + ) + + +def getfuncargnames( + function: Callable[..., object], + *, + name: str = "", + cls: type | None = None, +) -> tuple[str, ...]: + """Return the names of a function's mandatory arguments. + + Should return the names of all function arguments that: + * Aren't bound to an instance or type as in instance or class methods. + * Don't have default values. + * Aren't bound with functools.partial. + * Aren't replaced with mocks. + + The cls arguments indicate that the function should be treated as a bound + method even though it's not unless the function is a static method. + + The name parameter should be the original name in which the function was collected. + """ + # TODO(RonnyPfannschmidt): This function should be refactored when we + # revisit fixtures. The fixture mechanism should ask the node for + # the fixture names, and not try to obtain directly from the + # function object well after collection has occurred. + + # The parameters attribute of a Signature object contains an + # ordered mapping of parameter names to Parameter instances. This + # creates a tuple of the names of the parameters that don't have + # defaults. + try: + parameters = signature(function).parameters.values() + except (ValueError, TypeError) as e: + from _pytest.outcomes import fail + + fail( + f"Could not determine arguments of {function!r}: {e}", + pytrace=False, + ) + + arg_names = tuple( + p.name + for p in parameters + if ( + p.kind is Parameter.POSITIONAL_OR_KEYWORD + or p.kind is Parameter.KEYWORD_ONLY + ) + and p.default is Parameter.empty + ) + if not name: + name = function.__name__ + + # If this function should be treated as a bound method even though + # it's passed as an unbound method or function, and its first parameter + # wasn't defined as positional only, remove the first parameter name. + if not any(p.kind is Parameter.POSITIONAL_ONLY for p in parameters) and ( + # Not using `getattr` because we don't want to resolve the staticmethod. + # Not using `cls.__dict__` because we want to check the entire MRO. + cls + and not isinstance( + inspect.getattr_static(cls, name, default=None), staticmethod + ) + ): + arg_names = arg_names[1:] + # Remove any names that will be replaced with mocks. + if hasattr(function, "__wrapped__"): + arg_names = arg_names[num_mock_patch_args(function) :] + return arg_names + + +def get_default_arg_names(function: Callable[..., Any]) -> tuple[str, ...]: + # Note: this code intentionally mirrors the code at the beginning of + # getfuncargnames, to get the arguments which were excluded from its result + # because they had default values. + return tuple( + p.name + for p in signature(function).parameters.values() + if p.kind in (Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY) + and p.default is not Parameter.empty + ) + + +_non_printable_ascii_translate_table = { + i: f"\\x{i:02x}" for i in range(128) if i not in range(32, 127) +} +_non_printable_ascii_translate_table.update( + {ord("\t"): "\\t", ord("\r"): "\\r", ord("\n"): "\\n"} +) + + +def ascii_escaped(val: bytes | str) -> str: + r"""If val is pure ASCII, return it as an str, otherwise, escape + bytes objects into a sequence of escaped bytes: + + b'\xc3\xb4\xc5\xd6' -> r'\xc3\xb4\xc5\xd6' + + and escapes strings into a sequence of escaped unicode ids, e.g.: + + r'4\nV\U00043efa\x0eMXWB\x1e\u3028\u15fd\xcd\U0007d944' + + Note: + The obvious "v.decode('unicode-escape')" will return + valid UTF-8 unicode if it finds them in bytes, but we + want to return escaped bytes for any byte, even if they match + a UTF-8 string. + """ + if isinstance(val, bytes): + ret = val.decode("ascii", "backslashreplace") + else: + ret = val.encode("unicode_escape").decode("ascii") + return ret.translate(_non_printable_ascii_translate_table) + + +def get_real_func(obj): + """Get the real function object of the (possibly) wrapped object by + :func:`functools.wraps`, or :func:`functools.partial`.""" + obj = inspect.unwrap(obj) + + if isinstance(obj, functools.partial): + obj = obj.func + return obj + + +def getimfunc(func): + try: + return func.__func__ + except AttributeError: + return func + + +def safe_getattr(object: Any, name: str, default: Any) -> Any: + """Like getattr but return default upon any Exception or any OutcomeException. + + Attribute access can potentially fail for 'evil' Python objects. + See issue #214. + It catches OutcomeException because of #2490 (issue #580), new outcomes + are derived from BaseException instead of Exception (for more details + check #2707). + """ + from _pytest.outcomes import TEST_OUTCOME + + try: + return getattr(object, name, default) + except TEST_OUTCOME: + return default + + +def safe_isclass(obj: object) -> bool: + """Ignore any exception via isinstance on Python 3.""" + try: + return inspect.isclass(obj) + except Exception: + return False + + +def get_user_id() -> int | None: + """Return the current process's real user id or None if it could not be + determined. + + :return: The user id or None if it could not be determined. + """ + # mypy follows the version and platform checking expectation of PEP 484: + # https://mypy.readthedocs.io/en/stable/common_issues.html?highlight=platform#python-version-and-system-platform-checks + # Containment checks are too complex for mypy v1.5.0 and cause failure. + if sys.platform == "win32" or sys.platform == "emscripten": + # win32 does not have a getuid() function. + # Emscripten has a return 0 stub. + return None + else: + # On other platforms, a return value of -1 is assumed to indicate that + # the current process's real user id could not be determined. + ERROR = -1 + uid = os.getuid() + return uid if uid != ERROR else None + + +if sys.version_info >= (3, 11): + from typing import assert_never +else: + + def assert_never(value: NoReturn) -> NoReturn: + assert False, f"Unhandled value: {value} ({type(value).__name__})" + + +class CallableBool: + """ + A bool-like object that can also be called, returning its true/false value. + + Used for backwards compatibility in cases where something was supposed to be a method + but was implemented as a simple attribute by mistake (see `TerminalReporter.isatty`). + + Do not use in new code. + """ + + def __init__(self, value: bool) -> None: + self._value = value + + def __bool__(self) -> bool: + return self._value + + def __call__(self) -> bool: + return self._value + + +def running_on_ci() -> bool: + """Check if we're currently running on a CI system.""" + # Only enable CI mode if one of these env variables is defined and non-empty. + # Note: review `regendoc` tox env in case this list is changed. + env_vars = ["CI", "BUILD_NUMBER"] + return any(os.environ.get(var) for var in env_vars) diff --git a/.venv/lib/python3.12/site-packages/_pytest/config/__init__.py b/.venv/lib/python3.12/site-packages/_pytest/config/__init__.py new file mode 100644 index 0000000..6b02e16 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/config/__init__.py @@ -0,0 +1,2197 @@ +# mypy: allow-untyped-defs +"""Command line options, config-file and conftest.py processing.""" + +from __future__ import annotations + +import argparse +import builtins +import collections.abc +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Mapping +from collections.abc import MutableMapping +from collections.abc import Sequence +import contextlib +import copy +import dataclasses +import enum +from functools import lru_cache +import glob +import importlib.metadata +import inspect +import os +import pathlib +import re +import shlex +import sys +from textwrap import dedent +import types +from types import FunctionType +from typing import Any +from typing import cast +from typing import Final +from typing import final +from typing import IO +from typing import TextIO +from typing import TYPE_CHECKING +import warnings + +import pluggy +from pluggy import HookimplMarker +from pluggy import HookimplOpts +from pluggy import HookspecMarker +from pluggy import HookspecOpts +from pluggy import PluginManager + +from .compat import PathAwareHookProxy +from .exceptions import PrintHelp as PrintHelp +from .exceptions import UsageError as UsageError +from .findpaths import ConfigValue +from .findpaths import determine_setup +from _pytest import __version__ +import _pytest._code +from _pytest._code import ExceptionInfo +from _pytest._code import filter_traceback +from _pytest._code.code import TracebackStyle +from _pytest._io import TerminalWriter +from _pytest.compat import assert_never +from _pytest.config.argparsing import Argument +from _pytest.config.argparsing import FILE_OR_DIR +from _pytest.config.argparsing import Parser +import _pytest.deprecated +import _pytest.hookspec +from _pytest.outcomes import fail +from _pytest.outcomes import Skipped +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath +from _pytest.pathlib import import_path +from _pytest.pathlib import ImportMode +from _pytest.pathlib import resolve_package_path +from _pytest.pathlib import safe_exists +from _pytest.stash import Stash +from _pytest.warning_types import PytestConfigWarning +from _pytest.warning_types import warn_explicit_for + + +if TYPE_CHECKING: + from _pytest.assertion.rewrite import AssertionRewritingHook + from _pytest.cacheprovider import Cache + from _pytest.terminal import TerminalReporter + +_PluggyPlugin = object +"""A type to represent plugin objects. + +Plugins can be any namespace, so we can't narrow it down much, but we use an +alias to make the intent clear. + +Ideally this type would be provided by pluggy itself. +""" + + +hookimpl = HookimplMarker("pytest") +hookspec = HookspecMarker("pytest") + + +@final +class ExitCode(enum.IntEnum): + """Encodes the valid exit codes by pytest. + + Currently users and plugins may supply other exit codes as well. + + .. versionadded:: 5.0 + """ + + #: Tests passed. + OK = 0 + #: Tests failed. + TESTS_FAILED = 1 + #: pytest was interrupted. + INTERRUPTED = 2 + #: An internal error got in the way. + INTERNAL_ERROR = 3 + #: pytest was misused. + USAGE_ERROR = 4 + #: pytest couldn't find tests. + NO_TESTS_COLLECTED = 5 + + __module__ = "pytest" + + +class ConftestImportFailure(Exception): + def __init__( + self, + path: pathlib.Path, + *, + cause: Exception, + ) -> None: + self.path = path + self.cause = cause + + def __str__(self) -> str: + return f"{type(self.cause).__name__}: {self.cause} (from {self.path})" + + +def filter_traceback_for_conftest_import_failure( + entry: _pytest._code.TracebackEntry, +) -> bool: + """Filter tracebacks entries which point to pytest internals or importlib. + + Make a special case for importlib because we use it to import test modules and conftest files + in _pytest.pathlib.import_path. + """ + return filter_traceback(entry) and "importlib" not in str(entry.path).split(os.sep) + + +def print_conftest_import_error(e: ConftestImportFailure, file: TextIO) -> None: + exc_info = ExceptionInfo.from_exception(e.cause) + tw = TerminalWriter(file) + tw.line(f"ImportError while loading conftest '{e.path}'.", red=True) + exc_info.traceback = exc_info.traceback.filter( + filter_traceback_for_conftest_import_failure + ) + exc_repr = ( + exc_info.getrepr(style="short", chain=False) + if exc_info.traceback + else exc_info.exconly() + ) + formatted_tb = str(exc_repr) + for line in formatted_tb.splitlines(): + tw.line(line.rstrip(), red=True) + + +def print_usage_error(e: UsageError, file: TextIO) -> None: + tw = TerminalWriter(file) + for msg in e.args: + tw.line(f"ERROR: {msg}\n", red=True) + + +def main( + args: list[str] | os.PathLike[str] | None = None, + plugins: Sequence[str | _PluggyPlugin] | None = None, +) -> int | ExitCode: + """Perform an in-process test run. + + :param args: + List of command line arguments. If `None` or not given, defaults to reading + arguments directly from the process command line (:data:`sys.argv`). + :param plugins: List of plugin objects to be auto-registered during initialization. + + :returns: An exit code. + """ + # Handle a single `--version` argument early to avoid starting up the entire pytest infrastructure. + new_args = sys.argv[1:] if args is None else args + if isinstance(new_args, Sequence) and new_args.count("--version") == 1: + sys.stdout.write(f"pytest {__version__}\n") + return ExitCode.OK + + old_pytest_version = os.environ.get("PYTEST_VERSION") + try: + os.environ["PYTEST_VERSION"] = __version__ + try: + config = _prepareconfig(new_args, plugins) + except ConftestImportFailure as e: + print_conftest_import_error(e, file=sys.stderr) + return ExitCode.USAGE_ERROR + + try: + ret: ExitCode | int = config.hook.pytest_cmdline_main(config=config) + try: + return ExitCode(ret) + except ValueError: + return ret + finally: + config._ensure_unconfigure() + except UsageError as e: + print_usage_error(e, file=sys.stderr) + return ExitCode.USAGE_ERROR + finally: + if old_pytest_version is None: + os.environ.pop("PYTEST_VERSION", None) + else: + os.environ["PYTEST_VERSION"] = old_pytest_version + + +def console_main() -> int: + """The CLI entry point of pytest. + + This function is not meant for programmable use; use `main()` instead. + """ + # https://docs.python.org/3/library/signal.html#note-on-sigpipe + try: + code = main() + sys.stdout.flush() + return code + except BrokenPipeError: + # Python flushes standard streams on exit; redirect remaining output + # to devnull to avoid another BrokenPipeError at shutdown + devnull = os.open(os.devnull, os.O_WRONLY) + os.dup2(devnull, sys.stdout.fileno()) + return 1 # Python exits with error code 1 on EPIPE + + +class cmdline: # compatibility namespace + main = staticmethod(main) + + +def filename_arg(path: str, optname: str) -> str: + """Argparse type validator for filename arguments. + + :path: Path of filename. + :optname: Name of the option. + """ + if os.path.isdir(path): + raise UsageError(f"{optname} must be a filename, given: {path}") + return path + + +def directory_arg(path: str, optname: str) -> str: + """Argparse type validator for directory arguments. + + :path: Path of directory. + :optname: Name of the option. + """ + if not os.path.isdir(path): + raise UsageError(f"{optname} must be a directory, given: {path}") + return path + + +# Plugins that cannot be disabled via "-p no:X" currently. +essential_plugins = ( + "mark", + "main", + "runner", + "fixtures", + "helpconfig", # Provides -p. +) + +default_plugins = ( + *essential_plugins, + "python", + "terminal", + "debugging", + "unittest", + "capture", + "skipping", + "legacypath", + "tmpdir", + "monkeypatch", + "recwarn", + "pastebin", + "assertion", + "junitxml", + "doctest", + "cacheprovider", + "setuponly", + "setupplan", + "stepwise", + "unraisableexception", + "threadexception", + "warnings", + "logging", + "reports", + "faulthandler", + "subtests", +) + +builtin_plugins = { + *default_plugins, + "pytester", + "pytester_assertions", + "terminalprogress", +} + + +def get_config( + args: Iterable[str] | None = None, + plugins: Sequence[str | _PluggyPlugin] | None = None, +) -> Config: + # Subsequent calls to main will create a fresh instance. + pluginmanager = PytestPluginManager() + invocation_params = Config.InvocationParams( + args=args or (), + plugins=plugins, + dir=pathlib.Path.cwd(), + ) + config = Config(pluginmanager, invocation_params=invocation_params) + + if invocation_params.args: + # Handle any "-p no:plugin" args. + pluginmanager.consider_preparse(invocation_params.args, exclude_only=True) + + for spec in default_plugins: + pluginmanager.import_plugin(spec) + + return config + + +def get_plugin_manager() -> PytestPluginManager: + """Obtain a new instance of the + :py:class:`pytest.PytestPluginManager`, with default plugins + already loaded. + + This function can be used by integration with other tools, like hooking + into pytest to run tests into an IDE. + """ + return get_config().pluginmanager + + +def _prepareconfig( + args: list[str] | os.PathLike[str], + plugins: Sequence[str | _PluggyPlugin] | None = None, +) -> Config: + if isinstance(args, os.PathLike): + args = [os.fspath(args)] + elif not isinstance(args, list): + msg = ( # type:ignore[unreachable] + "`args` parameter expected to be a list of strings, got: {!r} (type: {})" + ) + raise TypeError(msg.format(args, type(args))) + + initial_config = get_config(args, plugins) + pluginmanager = initial_config.pluginmanager + try: + if plugins: + for plugin in plugins: + if isinstance(plugin, str): + pluginmanager.consider_pluginarg(plugin) + else: + pluginmanager.register(plugin) + config: Config = pluginmanager.hook.pytest_cmdline_parse( + pluginmanager=pluginmanager, args=args + ) + return config + except BaseException: + initial_config._ensure_unconfigure() + raise + + +def _get_directory(path: pathlib.Path) -> pathlib.Path: + """Get the directory of a path - itself if already a directory.""" + if path.is_file(): + return path.parent + else: + return path + + +def _get_legacy_hook_marks( + method: Any, + hook_type: str, + opt_names: tuple[str, ...], +) -> dict[str, bool]: + if TYPE_CHECKING: + # abuse typeguard from importlib to avoid massive method type union that's lacking an alias + assert inspect.isroutine(method) + known_marks: set[str] = {m.name for m in getattr(method, "pytestmark", [])} + must_warn: list[str] = [] + opts: dict[str, bool] = {} + for opt_name in opt_names: + opt_attr = getattr(method, opt_name, AttributeError) + if opt_attr is not AttributeError: + must_warn.append(f"{opt_name}={opt_attr}") + opts[opt_name] = True + elif opt_name in known_marks: + must_warn.append(f"{opt_name}=True") + opts[opt_name] = True + else: + opts[opt_name] = False + if must_warn: + hook_opts = ", ".join(must_warn) + message = _pytest.deprecated.HOOK_LEGACY_MARKING.format( + type=hook_type, + fullname=method.__qualname__, + hook_opts=hook_opts, + ) + warn_explicit_for(cast(FunctionType, method), message) + return opts + + +@final +class PytestPluginManager(PluginManager): + """A :py:class:`pluggy.PluginManager ` with + additional pytest-specific functionality: + + * Loading plugins from the command line, ``PYTEST_PLUGINS`` env variable and + ``pytest_plugins`` global variables found in plugins being loaded. + * ``conftest.py`` loading during start-up. + """ + + def __init__(self) -> None: + from _pytest.assertion import DummyRewriteHook + from _pytest.assertion import RewriteHook + + super().__init__("pytest") + + # -- State related to local conftest plugins. + # All loaded conftest modules. + self._conftest_plugins: set[types.ModuleType] = set() + # All conftest modules applicable for a directory. + # This includes the directory's own conftest modules as well + # as those of its parent directories. + self._dirpath2confmods: dict[pathlib.Path, list[types.ModuleType]] = {} + # Cutoff directory above which conftests are no longer discovered. + self._confcutdir: pathlib.Path | None = None + # If set, conftest loading is skipped. + self._noconftest = False + + # _getconftestmodules()'s call to _get_directory() causes a stat + # storm when it's called potentially thousands of times in a test + # session (#9478), often with the same path, so cache it. + self._get_directory = lru_cache(256)(_get_directory) + + # plugins that were explicitly skipped with pytest.skip + # list of (module name, skip reason) + # previously we would issue a warning when a plugin was skipped, but + # since we refactored warnings as first citizens of Config, they are + # just stored here to be used later. + self.skipped_plugins: list[tuple[str, str]] = [] + + self.add_hookspecs(_pytest.hookspec) + self.register(self) + if os.environ.get("PYTEST_DEBUG"): + err: IO[str] = sys.stderr + encoding: str = getattr(err, "encoding", "utf8") + try: + err = open( + os.dup(err.fileno()), + mode=err.mode, + buffering=1, + encoding=encoding, + ) + except Exception: + pass + self.trace.root.setwriter(err.write) + self.enable_tracing() + + # Config._consider_importhook will set a real object if required. + self.rewrite_hook: RewriteHook = DummyRewriteHook() + # Used to know when we are importing conftests after the pytest_configure stage. + self._configured = False + + def parse_hookimpl_opts( + self, plugin: _PluggyPlugin, name: str + ) -> HookimplOpts | None: + """:meta private:""" + # pytest hooks are always prefixed with "pytest_", + # so we avoid accessing possibly non-readable attributes + # (see issue #1073). + if not name.startswith("pytest_"): + return None + # Ignore names which cannot be hooks. + if name == "pytest_plugins": + return None + + opts = super().parse_hookimpl_opts(plugin, name) + if opts is not None: + return opts + + method = getattr(plugin, name) + # Consider only actual functions for hooks (#3775). + if not inspect.isroutine(method): + return None + # Collect unmarked hooks as long as they have the `pytest_' prefix. + legacy = _get_legacy_hook_marks( + method, "impl", ("tryfirst", "trylast", "optionalhook", "hookwrapper") + ) + return cast(HookimplOpts, legacy) + + def parse_hookspec_opts(self, module_or_class, name: str) -> HookspecOpts | None: + """:meta private:""" + opts = super().parse_hookspec_opts(module_or_class, name) + if opts is None: + method = getattr(module_or_class, name) + if name.startswith("pytest_"): + legacy = _get_legacy_hook_marks( + method, "spec", ("firstresult", "historic") + ) + opts = cast(HookspecOpts, legacy) + return opts + + def register(self, plugin: _PluggyPlugin, name: str | None = None) -> str | None: + if name in _pytest.deprecated.DEPRECATED_EXTERNAL_PLUGINS: + warnings.warn( + PytestConfigWarning( + "{} plugin has been merged into the core, " + "please remove it from your requirements.".format( + name.replace("_", "-") + ) + ) + ) + return None + plugin_name = super().register(plugin, name) + if plugin_name is not None: + self.hook.pytest_plugin_registered.call_historic( + kwargs=dict( + plugin=plugin, + plugin_name=plugin_name, + manager=self, + ) + ) + + if isinstance(plugin, types.ModuleType): + self.consider_module(plugin) + return plugin_name + + def getplugin(self, name: str): + # Support deprecated naming because plugins (xdist e.g.) use it. + plugin: _PluggyPlugin | None = self.get_plugin(name) + return plugin + + def hasplugin(self, name: str) -> bool: + """Return whether a plugin with the given name is registered.""" + return bool(self.get_plugin(name)) + + def pytest_configure(self, config: Config) -> None: + """:meta private:""" + # XXX now that the pluginmanager exposes hookimpl(tryfirst...) + # we should remove tryfirst/trylast as markers. + config.addinivalue_line( + "markers", + "tryfirst: mark a hook implementation function such that the " + "plugin machinery will try to call it first/as early as possible. " + "DEPRECATED, use @pytest.hookimpl(tryfirst=True) instead.", + ) + config.addinivalue_line( + "markers", + "trylast: mark a hook implementation function such that the " + "plugin machinery will try to call it last/as late as possible. " + "DEPRECATED, use @pytest.hookimpl(trylast=True) instead.", + ) + self._configured = True + + # + # Internal API for local conftest plugin handling. + # + def _set_initial_conftests( + self, + args: Sequence[str | pathlib.Path], + pyargs: bool, + noconftest: bool, + rootpath: pathlib.Path, + confcutdir: pathlib.Path | None, + invocation_dir: pathlib.Path, + importmode: ImportMode | str, + *, + consider_namespace_packages: bool, + ) -> None: + """Load initial conftest files given a preparsed "namespace". + + As conftest files may add their own command line options which have + arguments ('--my-opt somepath') we might get some false positives. + All builtin and 3rd party plugins will have been loaded, however, so + common options will not confuse our logic here. + """ + self._confcutdir = ( + absolutepath(invocation_dir / confcutdir) if confcutdir else None + ) + self._noconftest = noconftest + self._using_pyargs = pyargs + foundanchor = False + for initial_path in args: + path = str(initial_path) + # remove node-id syntax + i = path.find("::") + if i != -1: + path = path[:i] + anchor = absolutepath(invocation_dir / path) + + # Ensure we do not break if what appears to be an anchor + # is in fact a very long option (#10169, #11394). + if safe_exists(anchor): + self._try_load_conftest( + anchor, + importmode, + rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + foundanchor = True + if not foundanchor: + self._try_load_conftest( + invocation_dir, + importmode, + rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + + def _is_in_confcutdir(self, path: pathlib.Path) -> bool: + """Whether to consider the given path to load conftests from.""" + if self._confcutdir is None: + return True + # The semantics here are literally: + # Do not load a conftest if it is found upwards from confcut dir. + # But this is *not* the same as: + # Load only conftests from confcutdir or below. + # At first glance they might seem the same thing, however we do support use cases where + # we want to load conftests that are not found in confcutdir or below, but are found + # in completely different directory hierarchies like packages installed + # in out-of-source trees. + # (see #9767 for a regression where the logic was inverted). + return path not in self._confcutdir.parents + + def _try_load_conftest( + self, + anchor: pathlib.Path, + importmode: str | ImportMode, + rootpath: pathlib.Path, + *, + consider_namespace_packages: bool, + ) -> None: + self._loadconftestmodules( + anchor, + importmode, + rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + # let's also consider test* subdirs + if anchor.is_dir(): + for x in anchor.glob("test*"): + if x.is_dir(): + self._loadconftestmodules( + x, + importmode, + rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + + def _loadconftestmodules( + self, + path: pathlib.Path, + importmode: str | ImportMode, + rootpath: pathlib.Path, + *, + consider_namespace_packages: bool, + ) -> None: + if self._noconftest: + return + + directory = self._get_directory(path) + + # Optimization: avoid repeated searches in the same directory. + # Assumes always called with same importmode and rootpath. + if directory in self._dirpath2confmods: + return + + clist = [] + for parent in reversed((directory, *directory.parents)): + if self._is_in_confcutdir(parent): + conftestpath = parent / "conftest.py" + if conftestpath.is_file(): + mod = self._importconftest( + conftestpath, + importmode, + rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + clist.append(mod) + self._dirpath2confmods[directory] = clist + + def _getconftestmodules(self, path: pathlib.Path) -> Sequence[types.ModuleType]: + directory = self._get_directory(path) + return self._dirpath2confmods.get(directory, ()) + + def _rget_with_confmod( + self, + name: str, + path: pathlib.Path, + ) -> tuple[types.ModuleType, Any]: + modules = self._getconftestmodules(path) + for mod in reversed(modules): + try: + return mod, getattr(mod, name) + except AttributeError: + continue + raise KeyError(name) + + def _importconftest( + self, + conftestpath: pathlib.Path, + importmode: str | ImportMode, + rootpath: pathlib.Path, + *, + consider_namespace_packages: bool, + ) -> types.ModuleType: + conftestpath_plugin_name = str(conftestpath) + existing = self.get_plugin(conftestpath_plugin_name) + if existing is not None: + return cast(types.ModuleType, existing) + + # conftest.py files there are not in a Python package all have module + # name "conftest", and thus conflict with each other. Clear the existing + # before loading the new one, otherwise the existing one will be + # returned from the module cache. + pkgpath = resolve_package_path(conftestpath) + if pkgpath is None: + try: + del sys.modules[conftestpath.stem] + except KeyError: + pass + + try: + mod = import_path( + conftestpath, + mode=importmode, + root=rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + except Exception as e: + assert e.__traceback__ is not None + raise ConftestImportFailure(conftestpath, cause=e) from e + + self._check_non_top_pytest_plugins(mod, conftestpath) + + self._conftest_plugins.add(mod) + dirpath = conftestpath.parent + if dirpath in self._dirpath2confmods: + for path, mods in self._dirpath2confmods.items(): + if dirpath in path.parents or path == dirpath: + if mod in mods: + raise AssertionError( + f"While trying to load conftest path {conftestpath!s}, " + f"found that the module {mod} is already loaded with path {mod.__file__}. " + "This is not supposed to happen. Please report this issue to pytest." + ) + mods.append(mod) + self.trace(f"loading conftestmodule {mod!r}") + self.consider_conftest(mod, registration_name=conftestpath_plugin_name) + return mod + + def _check_non_top_pytest_plugins( + self, + mod: types.ModuleType, + conftestpath: pathlib.Path, + ) -> None: + if ( + hasattr(mod, "pytest_plugins") + and self._configured + and not self._using_pyargs + ): + msg = ( + "Defining 'pytest_plugins' in a non-top-level conftest is no longer supported:\n" + "It affects the entire test suite instead of just below the conftest as expected.\n" + " {}\n" + "Please move it to a top level conftest file at the rootdir:\n" + " {}\n" + "For more information, visit:\n" + " https://docs.pytest.org/en/stable/deprecations.html#pytest-plugins-in-non-top-level-conftest-files" + ) + fail(msg.format(conftestpath, self._confcutdir), pytrace=False) + + # + # API for bootstrapping plugin loading + # + # + + def consider_preparse( + self, args: Sequence[str], *, exclude_only: bool = False + ) -> None: + """:meta private:""" + i = 0 + n = len(args) + while i < n: + opt = args[i] + i += 1 + if isinstance(opt, str): + if opt == "-p": + try: + parg = args[i] + except IndexError: + return + i += 1 + elif opt.startswith("-p"): + parg = opt[2:] + else: + continue + parg = parg.strip() + if exclude_only and not parg.startswith("no:"): + continue + self.consider_pluginarg(parg) + + def consider_pluginarg(self, arg: str) -> None: + """:meta private:""" + if arg.startswith("no:"): + name = arg[3:] + if name in essential_plugins: + raise UsageError(f"plugin {name} cannot be disabled") + + # PR #4304: remove stepwise if cacheprovider is blocked. + if name == "cacheprovider": + self.set_blocked("stepwise") + self.set_blocked("pytest_stepwise") + + self.set_blocked(name) + if not name.startswith("pytest_"): + self.set_blocked("pytest_" + name) + else: + name = arg + # Unblock the plugin. + self.unblock(name) + if not name.startswith("pytest_"): + self.unblock("pytest_" + name) + self.import_plugin(arg, consider_entry_points=True) + + def consider_conftest( + self, conftestmodule: types.ModuleType, registration_name: str + ) -> None: + """:meta private:""" + self.register(conftestmodule, name=registration_name) + + def consider_env(self) -> None: + """:meta private:""" + self._import_plugin_specs(os.environ.get("PYTEST_PLUGINS")) + + def consider_module(self, mod: types.ModuleType) -> None: + """:meta private:""" + self._import_plugin_specs(getattr(mod, "pytest_plugins", [])) + + def _import_plugin_specs( + self, spec: None | types.ModuleType | str | Sequence[str] + ) -> None: + plugins = _get_plugin_specs_as_list(spec) + for import_spec in plugins: + self.import_plugin(import_spec) + + def import_plugin(self, modname: str, consider_entry_points: bool = False) -> None: + """Import a plugin with ``modname``. + + If ``consider_entry_points`` is True, entry point names are also + considered to find a plugin. + """ + # Most often modname refers to builtin modules, e.g. "pytester", + # "terminal" or "capture". Those plugins are registered under their + # basename for historic purposes but must be imported with the + # _pytest prefix. + assert isinstance(modname, str), ( + f"module name as text required, got {modname!r}" + ) + if self.is_blocked(modname) or self.get_plugin(modname) is not None: + return + + importspec = "_pytest." + modname if modname in builtin_plugins else modname + self.rewrite_hook.mark_rewrite(importspec) + + if consider_entry_points: + loaded = self.load_setuptools_entrypoints("pytest11", name=modname) + if loaded: + return + + try: + __import__(importspec) + except ImportError as e: + raise ImportError( + f'Error importing plugin "{modname}": {e.args[0]}' + ).with_traceback(e.__traceback__) from e + + except Skipped as e: + self.skipped_plugins.append((modname, e.msg or "")) + else: + mod = sys.modules[importspec] + self.register(mod, modname) + + +def _get_plugin_specs_as_list( + specs: None | types.ModuleType | str | Sequence[str], +) -> list[str]: + """Parse a plugins specification into a list of plugin names.""" + # None means empty. + if specs is None: + return [] + # Workaround for #3899 - a submodule which happens to be called "pytest_plugins". + if isinstance(specs, types.ModuleType): + return [] + # Comma-separated list. + if isinstance(specs, str): + return specs.split(",") if specs else [] + # Direct specification. + if isinstance(specs, collections.abc.Sequence): + return list(specs) + raise UsageError( + f"Plugins may be specified as a sequence or a ','-separated string of plugin names. Got: {specs!r}" + ) + + +class Notset: + def __repr__(self): + return "" + + +notset = Notset() + + +def _iter_rewritable_modules(package_files: Iterable[str]) -> Iterator[str]: + """Given an iterable of file names in a source distribution, return the "names" that should + be marked for assertion rewrite. + + For example the package "pytest_mock/__init__.py" should be added as "pytest_mock" in + the assertion rewrite mechanism. + + This function has to deal with dist-info based distributions and egg based distributions + (which are still very much in use for "editable" installs). + + Here are the file names as seen in a dist-info based distribution: + + pytest_mock/__init__.py + pytest_mock/_version.py + pytest_mock/plugin.py + pytest_mock.egg-info/PKG-INFO + + Here are the file names as seen in an egg based distribution: + + src/pytest_mock/__init__.py + src/pytest_mock/_version.py + src/pytest_mock/plugin.py + src/pytest_mock.egg-info/PKG-INFO + LICENSE + setup.py + + We have to take in account those two distribution flavors in order to determine which + names should be considered for assertion rewriting. + + More information: + https://github.com/pytest-dev/pytest-mock/issues/167 + """ + package_files = list(package_files) + seen_some = False + for fn in package_files: + is_simple_module = "/" not in fn and fn.endswith(".py") + is_package = fn.count("/") == 1 and fn.endswith("__init__.py") + if is_simple_module: + module_name, _ = os.path.splitext(fn) + # we ignore "setup.py" at the root of the distribution + # as well as editable installation finder modules made by setuptools + if module_name != "setup" and not module_name.startswith("__editable__"): + seen_some = True + yield module_name + elif is_package: + package_name = os.path.dirname(fn) + seen_some = True + yield package_name + + if not seen_some: + # At this point we did not find any packages or modules suitable for assertion + # rewriting, so we try again by stripping the first path component (to account for + # "src" based source trees for example). + # This approach lets us have the common case continue to be fast, as egg-distributions + # are rarer. + new_package_files = [] + for fn in package_files: + parts = fn.split("/") + new_fn = "/".join(parts[1:]) + if new_fn: + new_package_files.append(new_fn) + if new_package_files: + yield from _iter_rewritable_modules(new_package_files) + + +class _DeprecatedInicfgProxy(MutableMapping[str, Any]): + """Compatibility proxy for the deprecated Config.inicfg.""" + + __slots__ = ("_config",) + + def __init__(self, config: Config) -> None: + self._config = config + + def __getitem__(self, key: str) -> Any: + return self._config._inicfg[key].value + + def __setitem__(self, key: str, value: Any) -> None: + self._config._inicfg[key] = ConfigValue(value, origin="override", mode="toml") + + def __delitem__(self, key: str) -> None: + del self._config._inicfg[key] + + def __iter__(self) -> Iterator[str]: + return iter(self._config._inicfg) + + def __len__(self) -> int: + return len(self._config._inicfg) + + +@final +class Config: + """Access to configuration values, pluginmanager and plugin hooks. + + :param PytestPluginManager pluginmanager: + A pytest PluginManager. + + :param InvocationParams invocation_params: + Object containing parameters regarding the :func:`pytest.main` + invocation. + """ + + @final + @dataclasses.dataclass(frozen=True) + class InvocationParams: + """Holds parameters passed during :func:`pytest.main`. + + The object attributes are read-only. + + .. versionadded:: 5.1 + + .. note:: + + Note that the environment variable ``PYTEST_ADDOPTS`` and the ``addopts`` + configuration option are handled by pytest, not being included in the ``args`` attribute. + + Plugins accessing ``InvocationParams`` must be aware of that. + """ + + args: tuple[str, ...] + """The command-line arguments as passed to :func:`pytest.main`.""" + plugins: Sequence[str | _PluggyPlugin] | None + """Extra plugins, might be `None`.""" + dir: pathlib.Path + """The directory from which :func:`pytest.main` was invoked.""" + + def __init__( + self, + *, + args: Iterable[str], + plugins: Sequence[str | _PluggyPlugin] | None, + dir: pathlib.Path, + ) -> None: + object.__setattr__(self, "args", tuple(args)) + object.__setattr__(self, "plugins", plugins) + object.__setattr__(self, "dir", dir) + + class ArgsSource(enum.Enum): + """Indicates the source of the test arguments. + + .. versionadded:: 7.2 + """ + + #: Command line arguments. + ARGS = enum.auto() + #: Invocation directory. + INVOCATION_DIR = enum.auto() + INCOVATION_DIR = INVOCATION_DIR # backwards compatibility alias + #: 'testpaths' configuration value. + TESTPATHS = enum.auto() + + # Set by cacheprovider plugin. + cache: Cache + + def __init__( + self, + pluginmanager: PytestPluginManager, + *, + invocation_params: InvocationParams | None = None, + ) -> None: + if invocation_params is None: + invocation_params = self.InvocationParams( + args=(), plugins=None, dir=pathlib.Path.cwd() + ) + + self.option = argparse.Namespace() + """Access to command line option as attributes. + + :type: argparse.Namespace + """ + + self.invocation_params = invocation_params + """The parameters with which pytest was invoked. + + :type: InvocationParams + """ + + self._parser = Parser( + usage=f"%(prog)s [options] [{FILE_OR_DIR}] [{FILE_OR_DIR}] [...]", + processopt=self._processopt, + _ispytest=True, + ) + self.pluginmanager = pluginmanager + """The plugin manager handles plugin registration and hook invocation. + + :type: PytestPluginManager + """ + + self.stash = Stash() + """A place where plugins can store information on the config for their + own use. + + :type: Stash + """ + # Deprecated alias. Was never public. Can be removed in a few releases. + self._store = self.stash + + self.trace = self.pluginmanager.trace.root.get("config") + self.hook: pluggy.HookRelay = PathAwareHookProxy(self.pluginmanager.hook) # type: ignore[assignment] + self._inicache: dict[str, Any] = {} + self._opt2dest: dict[str, str] = {} + self._cleanup_stack = contextlib.ExitStack() + self.pluginmanager.register(self, "pytestconfig") + self._configured = False + self.hook.pytest_addoption.call_historic( + kwargs=dict(parser=self._parser, pluginmanager=self.pluginmanager) + ) + self.args_source = Config.ArgsSource.ARGS + self.args: list[str] = [] + + @property + def inicfg(self) -> _DeprecatedInicfgProxy: + return _DeprecatedInicfgProxy(self) + + @property + def rootpath(self) -> pathlib.Path: + """The path to the :ref:`rootdir `. + + .. versionadded:: 6.1 + """ + return self._rootpath + + @property + def inipath(self) -> pathlib.Path | None: + """The path to the :ref:`configfile `. + + .. versionadded:: 6.1 + """ + return self._inipath + + def add_cleanup(self, func: Callable[[], None]) -> None: + """Add a function to be called when the config object gets out of + use (usually coinciding with pytest_unconfigure). + """ + self._cleanup_stack.callback(func) + + def _do_configure(self) -> None: + assert not self._configured + self._configured = True + self.hook.pytest_configure.call_historic(kwargs=dict(config=self)) + + def _ensure_unconfigure(self) -> None: + try: + if self._configured: + self._configured = False + try: + self.hook.pytest_unconfigure(config=self) + finally: + self.hook.pytest_configure._call_history = [] + finally: + try: + self._cleanup_stack.close() + finally: + self._cleanup_stack = contextlib.ExitStack() + + def get_terminal_writer(self) -> TerminalWriter: + terminalreporter: TerminalReporter | None = self.pluginmanager.get_plugin( + "terminalreporter" + ) + assert terminalreporter is not None + return terminalreporter._tw + + def pytest_cmdline_parse( + self, pluginmanager: PytestPluginManager, args: list[str] + ) -> Config: + try: + self.parse(args) + except UsageError: + # Handle `--version --version` and `--help` here in a minimal fashion. + # This gets done via helpconfig normally, but its + # pytest_cmdline_main is not called in case of errors. + if getattr(self.option, "version", False) or "--version" in args: + from _pytest.helpconfig import show_version_verbose + + # Note that `--version` (single argument) is handled early by `Config.main()`, so the only + # way we are reaching this point is via `--version --version`. + show_version_verbose(self) + elif ( + getattr(self.option, "help", False) or "--help" in args or "-h" in args + ): + self._parser.optparser.print_help() + sys.stdout.write( + "\nNOTE: displaying only minimal help due to UsageError.\n\n" + ) + + raise + + return self + + def notify_exception( + self, + excinfo: ExceptionInfo[BaseException], + option: argparse.Namespace | None = None, + ) -> None: + if option and getattr(option, "fulltrace", False): + style: TracebackStyle = "long" + else: + style = "native" + excrepr = excinfo.getrepr( + funcargs=True, showlocals=getattr(option, "showlocals", False), style=style + ) + res = self.hook.pytest_internalerror(excrepr=excrepr, excinfo=excinfo) + if not any(res): + for line in str(excrepr).split("\n"): + sys.stderr.write(f"INTERNALERROR> {line}\n") + sys.stderr.flush() + + def cwd_relative_nodeid(self, nodeid: str) -> str: + # nodeid's are relative to the rootpath, compute relative to cwd. + if self.invocation_params.dir != self.rootpath: + base_path_part, *nodeid_part = nodeid.split("::") + # Only process path part + fullpath = self.rootpath / base_path_part + relative_path = bestrelpath(self.invocation_params.dir, fullpath) + + nodeid = "::".join([relative_path, *nodeid_part]) + return nodeid + + @classmethod + def fromdictargs(cls, option_dict: Mapping[str, Any], args: list[str]) -> Config: + """Constructor usable for subprocesses.""" + config = get_config(args) + config.option.__dict__.update(option_dict) + config.parse(args, addopts=False) + for x in config.option.plugins: + config.pluginmanager.consider_pluginarg(x) + return config + + def _processopt(self, opt: Argument) -> None: + for name in opt._short_opts + opt._long_opts: + self._opt2dest[name] = opt.dest + + if hasattr(opt, "default"): + if not hasattr(self.option, opt.dest): + setattr(self.option, opt.dest, opt.default) + + @hookimpl(trylast=True) + def pytest_load_initial_conftests(self, early_config: Config) -> None: + # We haven't fully parsed the command line arguments yet, so + # early_config.args it not set yet. But we need it for + # discovering the initial conftests. So "pre-run" the logic here. + # It will be done for real in `parse()`. + args, _args_source = early_config._decide_args( + args=early_config.known_args_namespace.file_or_dir, + pyargs=early_config.known_args_namespace.pyargs, + testpaths=early_config.getini("testpaths"), + invocation_dir=early_config.invocation_params.dir, + rootpath=early_config.rootpath, + warn=False, + ) + self.pluginmanager._set_initial_conftests( + args=args, + pyargs=early_config.known_args_namespace.pyargs, + noconftest=early_config.known_args_namespace.noconftest, + rootpath=early_config.rootpath, + confcutdir=early_config.known_args_namespace.confcutdir, + invocation_dir=early_config.invocation_params.dir, + importmode=early_config.known_args_namespace.importmode, + consider_namespace_packages=early_config.getini( + "consider_namespace_packages" + ), + ) + + def _consider_importhook(self) -> None: + """Install the PEP 302 import hook if using assertion rewriting. + + Needs to parse the --assert= option from the commandline + and find all the installed plugins to mark them for rewriting + by the importhook. + """ + mode = getattr(self.known_args_namespace, "assertmode", "plain") + + disable_autoload = getattr( + self.known_args_namespace, "disable_plugin_autoload", False + ) or bool(os.environ.get("PYTEST_DISABLE_PLUGIN_AUTOLOAD")) + if mode == "rewrite": + import _pytest.assertion + + try: + hook = _pytest.assertion.install_importhook(self) + except SystemError: + mode = "plain" + else: + self._mark_plugins_for_rewrite(hook, disable_autoload) + self._warn_about_missing_assertion(mode) + + def _mark_plugins_for_rewrite( + self, hook: AssertionRewritingHook, disable_autoload: bool + ) -> None: + """Given an importhook, mark for rewrite any top-level + modules or packages in the distribution package for + all pytest plugins.""" + self.pluginmanager.rewrite_hook = hook + + if disable_autoload: + # We don't autoload from distribution package entry points, + # no need to continue. + return + + package_files = ( + str(file) + for dist in importlib.metadata.distributions() + if any(ep.group == "pytest11" for ep in dist.entry_points) + for file in dist.files or [] + ) + + for name in _iter_rewritable_modules(package_files): + hook.mark_rewrite(name) + + def _configure_python_path(self) -> None: + # `pythonpath = a b` will set `sys.path` to `[a, b, x, y, z, ...]` + for path in reversed(self.getini("pythonpath")): + sys.path.insert(0, str(path)) + self.add_cleanup(self._unconfigure_python_path) + + def _unconfigure_python_path(self) -> None: + for path in self.getini("pythonpath"): + path_str = str(path) + if path_str in sys.path: + sys.path.remove(path_str) + + def _validate_args(self, args: list[str], via: str) -> list[str]: + """Validate known args.""" + self._parser.extra_info["config source"] = via + try: + self._parser.parse_known_and_unknown_args( + args, namespace=copy.copy(self.option) + ) + finally: + self._parser.extra_info.pop("config source", None) + + return args + + def _decide_args( + self, + *, + args: list[str], + pyargs: bool, + testpaths: list[str], + invocation_dir: pathlib.Path, + rootpath: pathlib.Path, + warn: bool, + ) -> tuple[list[str], ArgsSource]: + """Decide the args (initial paths/nodeids) to use given the relevant inputs. + + :param warn: Whether can issue warnings. + + :returns: The args and the args source. Guaranteed to be non-empty. + """ + if args: + source = Config.ArgsSource.ARGS + result = args + else: + if invocation_dir == rootpath: + source = Config.ArgsSource.TESTPATHS + if pyargs: + result = testpaths + else: + result = [] + for path in testpaths: + result.extend(sorted(glob.iglob(path, recursive=True))) + if testpaths and not result: + if warn: + warning_text = ( + "No files were found in testpaths; " + "consider removing or adjusting your testpaths configuration. " + "Searching recursively from the current directory instead." + ) + self.issue_config_time_warning( + PytestConfigWarning(warning_text), stacklevel=3 + ) + else: + result = [] + if not result: + source = Config.ArgsSource.INVOCATION_DIR + result = [str(invocation_dir)] + return result, source + + @hookimpl(wrapper=True) + def pytest_collection(self) -> Generator[None, object, object]: + # Validate invalid configuration keys after collection is done so we + # take in account options added by late-loading conftest files. + try: + return (yield) + finally: + self._validate_config_options() + + def _checkversion(self) -> None: + import pytest + + minver_ini_value = self._inicfg.get("minversion", None) + minver = minver_ini_value.value if minver_ini_value is not None else None + if minver: + # Imported lazily to improve start-up time. + from packaging.version import Version + + if not isinstance(minver, str): + raise pytest.UsageError( + f"{self.inipath}: 'minversion' must be a single value" + ) + + if Version(minver) > Version(pytest.__version__): + raise pytest.UsageError( + f"{self.inipath}: 'minversion' requires pytest-{minver}, actual pytest-{pytest.__version__}'" + ) + + def _validate_config_options(self) -> None: + for key in sorted(self._get_unknown_ini_keys()): + self._warn_or_fail_if_strict(f"Unknown config option: {key}\n") + + def _validate_plugins(self) -> None: + required_plugins = sorted(self.getini("required_plugins")) + if not required_plugins: + return + + # Imported lazily to improve start-up time. + from packaging.requirements import InvalidRequirement + from packaging.requirements import Requirement + from packaging.version import Version + + plugin_info = self.pluginmanager.list_plugin_distinfo() + plugin_dist_info = {dist.project_name: dist.version for _, dist in plugin_info} + + missing_plugins = [] + for required_plugin in required_plugins: + try: + req = Requirement(required_plugin) + except InvalidRequirement: + missing_plugins.append(required_plugin) + continue + + if req.name not in plugin_dist_info: + missing_plugins.append(required_plugin) + elif not req.specifier.contains( + Version(plugin_dist_info[req.name]), prereleases=True + ): + missing_plugins.append(required_plugin) + + if missing_plugins: + raise UsageError( + "Missing required plugins: {}".format(", ".join(missing_plugins)), + ) + + def _warn_or_fail_if_strict(self, message: str) -> None: + strict_config = self.getini("strict_config") + if strict_config is None: + strict_config = self.getini("strict") + if strict_config: + raise UsageError(message) + + self.issue_config_time_warning(PytestConfigWarning(message), stacklevel=3) + + def _get_unknown_ini_keys(self) -> set[str]: + known_keys = self._parser._inidict.keys() | self._parser._ini_aliases.keys() + return self._inicfg.keys() - known_keys + + def parse(self, args: list[str], addopts: bool = True) -> None: + # Parse given cmdline arguments into this config object. + assert self.args == [], ( + "can only parse cmdline args at most once per Config object" + ) + + self.hook.pytest_addhooks.call_historic( + kwargs=dict(pluginmanager=self.pluginmanager) + ) + + if addopts: + env_addopts = os.environ.get("PYTEST_ADDOPTS", "") + if len(env_addopts): + args[:] = ( + self._validate_args(shlex.split(env_addopts), "via PYTEST_ADDOPTS") + + args + ) + + ns = self._parser.parse_known_args(args, namespace=copy.copy(self.option)) + rootpath, inipath, inicfg, ignored_config_files = determine_setup( + inifile=ns.inifilename, + override_ini=ns.override_ini, + args=ns.file_or_dir, + rootdir_cmd_arg=ns.rootdir or None, + invocation_dir=self.invocation_params.dir, + ) + self._rootpath = rootpath + self._inipath = inipath + self._ignored_config_files = ignored_config_files + self._inicfg = inicfg + self._parser.extra_info["rootdir"] = str(self.rootpath) + self._parser.extra_info["inifile"] = str(self.inipath) + + self._parser.addini("addopts", "Extra command line options", "args") + self._parser.addini("minversion", "Minimally required pytest version") + self._parser.addini( + "pythonpath", type="paths", help="Add paths to sys.path", default=[] + ) + self._parser.addini( + "required_plugins", + "Plugins that must be present for pytest to run", + type="args", + default=[], + ) + + if addopts: + args[:] = ( + self._validate_args(self.getini("addopts"), "via addopts config") + args + ) + + self.known_args_namespace = self._parser.parse_known_args( + args, namespace=copy.copy(self.option) + ) + self._checkversion() + self._consider_importhook() + self._configure_python_path() + self.pluginmanager.consider_preparse(args, exclude_only=False) + if ( + not os.environ.get("PYTEST_DISABLE_PLUGIN_AUTOLOAD") + and not self.known_args_namespace.disable_plugin_autoload + ): + # Autoloading from distribution package entry point has + # not been disabled. + self.pluginmanager.load_setuptools_entrypoints("pytest11") + # Otherwise only plugins explicitly specified in PYTEST_PLUGINS + # are going to be loaded. + self.pluginmanager.consider_env() + + self._parser.parse_known_args(args, namespace=self.known_args_namespace) + + self._validate_plugins() + self._warn_about_skipped_plugins() + + if self.known_args_namespace.confcutdir is None: + if self.inipath is not None: + confcutdir = str(self.inipath.parent) + else: + confcutdir = str(self.rootpath) + self.known_args_namespace.confcutdir = confcutdir + try: + self.hook.pytest_load_initial_conftests( + early_config=self, args=args, parser=self._parser + ) + except ConftestImportFailure as e: + if self.known_args_namespace.help or self.known_args_namespace.version: + # we don't want to prevent --help/--version to work + # so just let it pass and print a warning at the end + self.issue_config_time_warning( + PytestConfigWarning(f"could not load initial conftests: {e.path}"), + stacklevel=2, + ) + else: + raise + + try: + self._parser.parse(args, namespace=self.option) + except PrintHelp: + return + + self.args, self.args_source = self._decide_args( + args=getattr(self.option, FILE_OR_DIR), + pyargs=self.option.pyargs, + testpaths=self.getini("testpaths"), + invocation_dir=self.invocation_params.dir, + rootpath=self.rootpath, + warn=True, + ) + + def issue_config_time_warning(self, warning: Warning, stacklevel: int) -> None: + """Issue and handle a warning during the "configure" stage. + + During ``pytest_configure`` we can't capture warnings using the ``catch_warnings_for_item`` + function because it is not possible to have hook wrappers around ``pytest_configure``. + + This function is mainly intended for plugins that need to issue warnings during + ``pytest_configure`` (or similar stages). + + :param warning: The warning instance. + :param stacklevel: stacklevel forwarded to warnings.warn. + """ + if self.pluginmanager.is_blocked("warnings"): + return + + cmdline_filters = self.known_args_namespace.pythonwarnings or [] + config_filters = self.getini("filterwarnings") + + with warnings.catch_warnings(record=True) as records: + warnings.simplefilter("always", type(warning)) + apply_warning_filters(config_filters, cmdline_filters) + warnings.warn(warning, stacklevel=stacklevel) + + if records: + frame = sys._getframe(stacklevel - 1) + location = frame.f_code.co_filename, frame.f_lineno, frame.f_code.co_name + self.hook.pytest_warning_recorded.call_historic( + kwargs=dict( + warning_message=records[0], + when="config", + nodeid="", + location=location, + ) + ) + + def addinivalue_line(self, name: str, line: str) -> None: + """Add a line to a configuration option. The option must have been + declared but might not yet be set in which case the line becomes + the first line in its value.""" + x = self.getini(name) + assert isinstance(x, list) + x.append(line) # modifies the cached list inline + + def getini(self, name: str) -> Any: + """Return configuration value the an :ref:`configuration file `. + + If a configuration value is not defined in a + :ref:`configuration file `, then the ``default`` value + provided while registering the configuration through + :func:`parser.addini ` will be returned. + Please note that you can even provide ``None`` as a valid + default value. + + If ``default`` is not provided while registering using + :func:`parser.addini `, then a default value + based on the ``type`` parameter passed to + :func:`parser.addini ` will be returned. + The default values based on ``type`` are: + ``paths``, ``pathlist``, ``args`` and ``linelist`` : empty list ``[]`` + ``bool`` : ``False`` + ``string`` : empty string ``""`` + ``int`` : ``0`` + ``float`` : ``0.0`` + + If neither the ``default`` nor the ``type`` parameter is passed + while registering the configuration through + :func:`parser.addini `, then the configuration + is treated as a string and a default empty string '' is returned. + + If the specified name hasn't been registered through a prior + :func:`parser.addini ` call (usually from a + plugin), a ValueError is raised. + """ + canonical_name = self._parser._ini_aliases.get(name, name) + try: + return self._inicache[canonical_name] + except KeyError: + pass + self._inicache[canonical_name] = val = self._getini(canonical_name) + return val + + # Meant for easy monkeypatching by legacypath plugin. + # Can be inlined back (with no cover removed) once legacypath is gone. + def _getini_unknown_type(self, name: str, type: str, value: object): + msg = ( + f"Option {name} has unknown configuration type {type} with value {value!r}" + ) + raise ValueError(msg) # pragma: no cover + + def _getini(self, name: str): + # If this is an alias, resolve to canonical name. + canonical_name = self._parser._ini_aliases.get(name, name) + + try: + _description, type, default = self._parser._inidict[canonical_name] + except KeyError as e: + raise ValueError(f"unknown configuration value: {name!r}") from e + + # Collect all possible values (canonical name + aliases) from _inicfg. + # Each candidate is (ConfigValue, is_canonical). + candidates = [] + if canonical_name in self._inicfg: + candidates.append((self._inicfg[canonical_name], True)) + for alias, target in self._parser._ini_aliases.items(): + if target == canonical_name and alias in self._inicfg: + candidates.append((self._inicfg[alias], False)) + + if not candidates: + return default + + # Pick the best candidate based on precedence: + # 1. CLI override takes precedence over file, then + # 2. Canonical name takes precedence over alias. + selected = max(candidates, key=lambda x: (x[0].origin == "override", x[1]))[0] + value = selected.value + mode = selected.mode + + if mode == "ini": + # In ini mode, values are always str | list[str]. + assert isinstance(value, (str, list)) + return self._getini_ini(name, canonical_name, type, value, default) + elif mode == "toml": + return self._getini_toml(name, canonical_name, type, value, default) + else: + assert_never(mode) + + def _getini_ini( + self, + name: str, + canonical_name: str, + type: str, + value: str | list[str], + default: Any, + ): + """Handle config values read in INI mode. + + In INI mode, values are stored as str or list[str] only, and coerced + from string based on the registered type. + """ + # Note: some coercions are only required if we are reading from .ini + # files, because the file format doesn't contain type information, but + # when reading from toml (in ini mode) we will get either str or list of + # str values (see load_config_dict_from_file). For example: + # + # ini: + # a_line_list = "tests acceptance" + # + # in this case, we need to split the string to obtain a list of strings. + # + # toml (ini mode): + # a_line_list = ["tests", "acceptance"] + # + # in this case, we already have a list ready to use. + if type == "paths": + dp = ( + self.inipath.parent + if self.inipath is not None + else self.invocation_params.dir + ) + input_values = shlex.split(value) if isinstance(value, str) else value + return [dp / x for x in input_values] + elif type == "args": + return shlex.split(value) if isinstance(value, str) else value + elif type == "linelist": + if isinstance(value, str): + return [t for t in map(lambda x: x.strip(), value.split("\n")) if t] + else: + return value + elif type == "bool": + return _strtobool(str(value).strip()) + elif type == "string": + return value + elif type == "int": + if not isinstance(value, str): + raise TypeError( + f"Expected an int string for option {name} of type integer, but got: {value!r}" + ) from None + return int(value) + elif type == "float": + if not isinstance(value, str): + raise TypeError( + f"Expected a float string for option {name} of type float, but got: {value!r}" + ) from None + return float(value) + else: + return self._getini_unknown_type(name, type, value) + + def _getini_toml( + self, + name: str, + canonical_name: str, + type: str, + value: object, + default: Any, + ): + """Handle TOML config values with strict type validation and no coercion. + + In TOML mode, values already have native types from TOML parsing. + We validate types match expectations exactly, including list items. + """ + value_type = builtins.type(value).__name__ + if type == "paths": + # Expect a list of strings. + if not isinstance(value, list): + raise TypeError( + f"{self.inipath}: config option '{name}' expects a list for type 'paths', " + f"got {value_type}: {value!r}" + ) + for i, item in enumerate(value): + if not isinstance(item, str): + item_type = builtins.type(item).__name__ + raise TypeError( + f"{self.inipath}: config option '{name}' expects a list of strings, " + f"but item at index {i} is {item_type}: {item!r}" + ) + dp = ( + self.inipath.parent + if self.inipath is not None + else self.invocation_params.dir + ) + return [dp / x for x in value] + elif type in {"args", "linelist"}: + # Expect a list of strings. + if not isinstance(value, list): + raise TypeError( + f"{self.inipath}: config option '{name}' expects a list for type '{type}', " + f"got {value_type}: {value!r}" + ) + for i, item in enumerate(value): + if not isinstance(item, str): + item_type = builtins.type(item).__name__ + raise TypeError( + f"{self.inipath}: config option '{name}' expects a list of strings, " + f"but item at index {i} is {item_type}: {item!r}" + ) + return list(value) + elif type == "bool": + # Expect a boolean. + if not isinstance(value, bool): + raise TypeError( + f"{self.inipath}: config option '{name}' expects a bool, " + f"got {value_type}: {value!r}" + ) + return value + elif type == "int": + # Expect an integer (but not bool, which is a subclass of int). + if not isinstance(value, int) or isinstance(value, bool): + raise TypeError( + f"{self.inipath}: config option '{name}' expects an int, " + f"got {value_type}: {value!r}" + ) + return value + elif type == "float": + # Expect a float or integer only. + if not isinstance(value, (float, int)) or isinstance(value, bool): + raise TypeError( + f"{self.inipath}: config option '{name}' expects a float, " + f"got {value_type}: {value!r}" + ) + return value + elif type == "string": + # Expect a string. + if not isinstance(value, str): + raise TypeError( + f"{self.inipath}: config option '{name}' expects a string, " + f"got {value_type}: {value!r}" + ) + return value + else: + return self._getini_unknown_type(name, type, value) + + def _getconftest_pathlist( + self, name: str, path: pathlib.Path + ) -> list[pathlib.Path] | None: + try: + mod, relroots = self.pluginmanager._rget_with_confmod(name, path) + except KeyError: + return None + assert mod.__file__ is not None + modpath = pathlib.Path(mod.__file__).parent + values: list[pathlib.Path] = [] + for relroot in relroots: + if isinstance(relroot, os.PathLike): + relroot = pathlib.Path(relroot) + else: + relroot = relroot.replace("/", os.sep) + relroot = absolutepath(modpath / relroot) + values.append(relroot) + return values + + def getoption(self, name: str, default: Any = notset, skip: bool = False): + """Return command line option value. + + :param name: Name of the option. You may also specify + the literal ``--OPT`` option instead of the "dest" option name. + :param default: Fallback value if no option of that name is **declared** via :hook:`pytest_addoption`. + Note this parameter will be ignored when the option is **declared** even if the option's value is ``None``. + :param skip: If ``True``, raise :func:`pytest.skip` if option is undeclared or has a ``None`` value. + Note that even if ``True``, if a default was specified it will be returned instead of a skip. + """ + name = self._opt2dest.get(name, name) + try: + val = getattr(self.option, name) + if val is None and skip: + raise AttributeError(name) + return val + except AttributeError as e: + if default is not notset: + return default + if skip: + import pytest + + pytest.skip(f"no {name!r} option found") + raise ValueError(f"no option named {name!r}") from e + + def getvalue(self, name: str, path=None): + """Deprecated, use getoption() instead.""" + return self.getoption(name) + + def getvalueorskip(self, name: str, path=None): + """Deprecated, use getoption(skip=True) instead.""" + return self.getoption(name, skip=True) + + #: Verbosity type for failed assertions (see :confval:`verbosity_assertions`). + VERBOSITY_ASSERTIONS: Final = "assertions" + #: Verbosity type for test case execution (see :confval:`verbosity_test_cases`). + VERBOSITY_TEST_CASES: Final = "test_cases" + #: Verbosity type for failed subtests (see :confval:`verbosity_subtests`). + VERBOSITY_SUBTESTS: Final = "subtests" + + _VERBOSITY_INI_DEFAULT: Final = "auto" + + def get_verbosity(self, verbosity_type: str | None = None) -> int: + r"""Retrieve the verbosity level for a fine-grained verbosity type. + + :param verbosity_type: Verbosity type to get level for. If a level is + configured for the given type, that value will be returned. If the + given type is not a known verbosity type, the global verbosity + level will be returned. If the given type is None (default), the + global verbosity level will be returned. + + To configure a level for a fine-grained verbosity type, the + configuration file should have a setting for the configuration name + and a numeric value for the verbosity level. A special value of "auto" + can be used to explicitly use the global verbosity level. + + Example: + + .. tab:: toml + + .. code-block:: toml + + [tool.pytest] + verbosity_assertions = 2 + + .. tab:: ini + + .. code-block:: ini + + [pytest] + verbosity_assertions = 2 + + .. code-block:: console + + pytest -v + + .. code-block:: python + + print(config.get_verbosity()) # 1 + print(config.get_verbosity(Config.VERBOSITY_ASSERTIONS)) # 2 + """ + global_level = self.getoption("verbose", default=0) + assert isinstance(global_level, int) + if verbosity_type is None: + return global_level + + ini_name = Config._verbosity_ini_name(verbosity_type) + if ini_name not in self._parser._inidict: + return global_level + + level = self.getini(ini_name) + if level == Config._VERBOSITY_INI_DEFAULT: + return global_level + + return int(level) + + @staticmethod + def _verbosity_ini_name(verbosity_type: str) -> str: + return f"verbosity_{verbosity_type}" + + @staticmethod + def _add_verbosity_ini(parser: Parser, verbosity_type: str, help: str) -> None: + """Add a output verbosity configuration option for the given output type. + + :param parser: Parser for command line arguments and config-file values. + :param verbosity_type: Fine-grained verbosity category. + :param help: Description of the output this type controls. + + The value should be retrieved via a call to + :py:func:`config.get_verbosity(type) `. + """ + parser.addini( + Config._verbosity_ini_name(verbosity_type), + help=help, + type="string", + default=Config._VERBOSITY_INI_DEFAULT, + ) + + def _warn_about_missing_assertion(self, mode: str) -> None: + if not _assertion_supported(): + if mode == "plain": + warning_text = ( + "ASSERTIONS ARE NOT EXECUTED" + " and FAILING TESTS WILL PASS. Are you" + " using python -O?" + ) + else: + warning_text = ( + "assertions not in test modules or" + " plugins will be ignored" + " because assert statements are not executed " + "by the underlying Python interpreter " + "(are you using python -O?)\n" + ) + self.issue_config_time_warning( + PytestConfigWarning(warning_text), + stacklevel=3, + ) + + def _warn_about_skipped_plugins(self) -> None: + for module_name, msg in self.pluginmanager.skipped_plugins: + self.issue_config_time_warning( + PytestConfigWarning(f"skipped plugin {module_name!r}: {msg}"), + stacklevel=2, + ) + + +def _assertion_supported() -> bool: + try: + assert False + except AssertionError: + return True + else: + return False # type: ignore[unreachable] + + +def create_terminal_writer( + config: Config, file: TextIO | None = None +) -> TerminalWriter: + """Create a TerminalWriter instance configured according to the options + in the config object. + + Every code which requires a TerminalWriter object and has access to a + config object should use this function. + """ + tw = TerminalWriter(file=file) + + if config.option.color == "yes": + tw.hasmarkup = True + elif config.option.color == "no": + tw.hasmarkup = False + + if config.option.code_highlight == "yes": + tw.code_highlight = True + elif config.option.code_highlight == "no": + tw.code_highlight = False + + return tw + + +def _strtobool(val: str) -> bool: + """Convert a string representation of truth to True or False. + + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + + .. note:: Copied from distutils.util. + """ + val = val.lower() + if val in ("y", "yes", "t", "true", "on", "1"): + return True + elif val in ("n", "no", "f", "false", "off", "0"): + return False + else: + raise ValueError(f"invalid truth value {val!r}") + + +@lru_cache(maxsize=50) +def parse_warning_filter( + arg: str, *, escape: bool +) -> tuple[warnings._ActionKind, str, type[Warning], str, int]: + """Parse a warnings filter string. + + This is copied from warnings._setoption with the following changes: + + * Does not apply the filter. + * Escaping is optional. + * Raises UsageError so we get nice error messages on failure. + """ + __tracebackhide__ = True + error_template = dedent( + f"""\ + while parsing the following warning configuration: + + {arg} + + This error occurred: + + {{error}} + """ + ) + + parts = arg.split(":") + if len(parts) > 5: + doc_url = ( + "https://docs.python.org/3/library/warnings.html#describing-warning-filters" + ) + error = dedent( + f"""\ + Too many fields ({len(parts)}), expected at most 5 separated by colons: + + action:message:category:module:line + + For more information please consult: {doc_url} + """ + ) + raise UsageError(error_template.format(error=error)) + + while len(parts) < 5: + parts.append("") + action_, message, category_, module, lineno_ = (s.strip() for s in parts) + try: + action: warnings._ActionKind = warnings._getaction(action_) # type: ignore[attr-defined] + except warnings._OptionError as e: + raise UsageError(error_template.format(error=str(e))) from None + try: + category: type[Warning] = _resolve_warning_category(category_) + except ImportError: + raise + except Exception: + exc_info = ExceptionInfo.from_current() + exception_text = exc_info.getrepr(style="native") + raise UsageError(error_template.format(error=exception_text)) from None + if message and escape: + message = re.escape(message) + if module and escape: + module = re.escape(module) + r"\Z" + if lineno_: + try: + lineno = int(lineno_) + if lineno < 0: + raise ValueError("number is negative") + except ValueError as e: + raise UsageError( + error_template.format(error=f"invalid lineno {lineno_!r}: {e}") + ) from None + else: + lineno = 0 + try: + re.compile(message) + re.compile(module) + except re.error as e: + raise UsageError( + error_template.format(error=f"Invalid regex {e.pattern!r}: {e}") + ) from None + return action, message, category, module, lineno + + +def _resolve_warning_category(category: str) -> type[Warning]: + """ + Copied from warnings._getcategory, but changed so it lets exceptions (specially ImportErrors) + propagate so we can get access to their tracebacks (#9218). + """ + __tracebackhide__ = True + if not category: + return Warning + + if "." not in category: + import builtins as m + + klass = category + else: + module, _, klass = category.rpartition(".") + m = __import__(module, None, None, [klass]) + cat = getattr(m, klass) + if not issubclass(cat, Warning): + raise UsageError(f"{cat} is not a Warning subclass") + return cast(type[Warning], cat) + + +def apply_warning_filters( + config_filters: Iterable[str], cmdline_filters: Iterable[str] +) -> None: + """Applies pytest-configured filters to the warnings module""" + # Filters should have this precedence: cmdline options, config. + # Filters should be applied in the inverse order of precedence. + for arg in config_filters: + try: + warnings.filterwarnings(*parse_warning_filter(arg, escape=False)) + except ImportError as e: + warnings.warn( + f"Failed to import filter module '{e.name}': {arg}", PytestConfigWarning + ) + continue + + for arg in cmdline_filters: + try: + warnings.filterwarnings(*parse_warning_filter(arg, escape=True)) + except ImportError as e: + warnings.warn( + f"Failed to import filter module '{e.name}': {arg}", PytestConfigWarning + ) + continue diff --git a/.venv/lib/python3.12/site-packages/_pytest/config/argparsing.py b/.venv/lib/python3.12/site-packages/_pytest/config/argparsing.py new file mode 100644 index 0000000..8216ad8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/config/argparsing.py @@ -0,0 +1,578 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +import argparse +from collections.abc import Callable +from collections.abc import Mapping +from collections.abc import Sequence +import os +import sys +from typing import Any +from typing import final +from typing import Literal +from typing import NoReturn + +from .exceptions import UsageError +import _pytest._io +from _pytest.deprecated import check_ispytest + + +FILE_OR_DIR = "file_or_dir" + + +class NotSet: + def __repr__(self) -> str: + return "" + + +NOT_SET = NotSet() + + +@final +class Parser: + """Parser for command line arguments and config-file values. + + :ivar extra_info: Dict of generic param -> value to display in case + there's an error processing the command line arguments. + """ + + def __init__( + self, + usage: str | None = None, + processopt: Callable[[Argument], None] | None = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + + from _pytest._argcomplete import filescompleter + + self._processopt = processopt + self.extra_info: dict[str, Any] = {} + self.optparser = PytestArgumentParser(self, usage, self.extra_info) + anonymous_arggroup = self.optparser.add_argument_group("Custom options") + self._anonymous = OptionGroup( + anonymous_arggroup, "_anonymous", self, _ispytest=True + ) + self._groups = [self._anonymous] + file_or_dir_arg = self.optparser.add_argument(FILE_OR_DIR, nargs="*") + file_or_dir_arg.completer = filescompleter # type: ignore + + self._inidict: dict[str, tuple[str, str, Any]] = {} + # Maps alias -> canonical name. + self._ini_aliases: dict[str, str] = {} + + @property + def prog(self) -> str: + return self.optparser.prog + + @prog.setter + def prog(self, value: str) -> None: + self.optparser.prog = value + + def processoption(self, option: Argument) -> None: + if self._processopt: + if option.dest: + self._processopt(option) + + def getgroup( + self, name: str, description: str = "", after: str | None = None + ) -> OptionGroup: + """Get (or create) a named option Group. + + :param name: Name of the option group. + :param description: Long description for --help output. + :param after: Name of another group, used for ordering --help output. + :returns: The option group. + + The returned group object has an ``addoption`` method with the same + signature as :func:`parser.addoption ` but + will be shown in the respective group in the output of + ``pytest --help``. + """ + for group in self._groups: + if group.name == name: + return group + + arggroup = self.optparser.add_argument_group(description or name) + group = OptionGroup(arggroup, name, self, _ispytest=True) + i = 0 + for i, grp in enumerate(self._groups): + if grp.name == after: + break + self._groups.insert(i + 1, group) + # argparse doesn't provide a way to control `--help` order, so must + # access its internals ☹. + self.optparser._action_groups.insert(i + 1, self.optparser._action_groups.pop()) + return group + + def addoption(self, *opts: str, **attrs: Any) -> None: + """Register a command line option. + + :param opts: + Option names, can be short or long options. + :param attrs: + Same attributes as the argparse library's :meth:`add_argument() + ` function accepts. + + After command line parsing, options are available on the pytest config + object via ``config.option.NAME`` where ``NAME`` is usually set + by passing a ``dest`` attribute, for example + ``addoption("--long", dest="NAME", ...)``. + """ + self._anonymous.addoption(*opts, **attrs) + + def parse( + self, + args: Sequence[str | os.PathLike[str]], + namespace: argparse.Namespace | None = None, + ) -> argparse.Namespace: + """Parse the arguments. + + Unlike ``parse_known_args`` and ``parse_known_and_unknown_args``, + raises PrintHelp on `--help` and UsageError on unknown flags + + :meta private: + """ + from _pytest._argcomplete import try_argcomplete + + try_argcomplete(self.optparser) + strargs = [os.fspath(x) for x in args] + if namespace is None: + namespace = argparse.Namespace() + try: + namespace._raise_print_help = True + return self.optparser.parse_intermixed_args(strargs, namespace=namespace) + finally: + del namespace._raise_print_help + + def parse_known_args( + self, + args: Sequence[str | os.PathLike[str]], + namespace: argparse.Namespace | None = None, + ) -> argparse.Namespace: + """Parse the known arguments at this point. + + :returns: An argparse namespace object. + """ + return self.parse_known_and_unknown_args(args, namespace=namespace)[0] + + def parse_known_and_unknown_args( + self, + args: Sequence[str | os.PathLike[str]], + namespace: argparse.Namespace | None = None, + ) -> tuple[argparse.Namespace, list[str]]: + """Parse the known arguments at this point, and also return the + remaining unknown flag arguments. + + :returns: + A tuple containing an argparse namespace object for the known + arguments, and a list of unknown flag arguments. + """ + strargs = [os.fspath(x) for x in args] + if sys.version_info < (3, 12, 8) or (3, 13) <= sys.version_info < (3, 13, 1): + # Older argparse have a bugged parse_known_intermixed_args. + namespace, unknown = self.optparser.parse_known_args(strargs, namespace) + assert namespace is not None + file_or_dir = getattr(namespace, FILE_OR_DIR) + unknown_flags: list[str] = [] + for arg in unknown: + (unknown_flags if arg.startswith("-") else file_or_dir).append(arg) + return namespace, unknown_flags + else: + return self.optparser.parse_known_intermixed_args(strargs, namespace) + + def addini( + self, + name: str, + help: str, + type: Literal[ + "string", "paths", "pathlist", "args", "linelist", "bool", "int", "float" + ] + | None = None, + default: Any = NOT_SET, + *, + aliases: Sequence[str] = (), + ) -> None: + """Register a configuration file option. + + :param name: + Name of the configuration. + :param type: + Type of the configuration. Can be: + + * ``string``: a string + * ``bool``: a boolean + * ``args``: a list of strings, separated as in a shell + * ``linelist``: a list of strings, separated by line breaks + * ``paths``: a list of :class:`pathlib.Path`, separated as in a shell + * ``pathlist``: a list of ``py.path``, separated as in a shell + * ``int``: an integer + * ``float``: a floating-point number + + .. versionadded:: 8.4 + + The ``float`` and ``int`` types. + + For ``paths`` and ``pathlist`` types, they are considered relative to the config-file. + In case the execution is happening without a config-file defined, + they will be considered relative to the current working directory (for example with ``--override-ini``). + + .. versionadded:: 7.0 + The ``paths`` variable type. + + .. versionadded:: 8.1 + Use the current working directory to resolve ``paths`` and ``pathlist`` in the absence of a config-file. + + Defaults to ``string`` if ``None`` or not passed. + :param default: + Default value if no config-file option exists but is queried. + :param aliases: + Additional names by which this option can be referenced. + Aliases resolve to the canonical name. + + .. versionadded:: 9.0 + The ``aliases`` parameter. + + The value of configuration keys can be retrieved via a call to + :py:func:`config.getini(name) `. + """ + assert type in ( + None, + "string", + "paths", + "pathlist", + "args", + "linelist", + "bool", + "int", + "float", + ) + if type is None: + type = "string" + if default is NOT_SET: + default = get_ini_default_for_type(type) + + self._inidict[name] = (help, type, default) + + for alias in aliases: + if alias in self._inidict: + raise ValueError( + f"alias {alias!r} conflicts with existing configuration option" + ) + if (already := self._ini_aliases.get(alias)) is not None: + raise ValueError(f"{alias!r} is already an alias of {already!r}") + self._ini_aliases[alias] = name + + +def get_ini_default_for_type( + type: Literal[ + "string", "paths", "pathlist", "args", "linelist", "bool", "int", "float" + ], +) -> Any: + """ + Used by addini to get the default value for a given config option type, when + default is not supplied. + """ + if type in ("paths", "pathlist", "args", "linelist"): + return [] + elif type == "bool": + return False + elif type == "int": + return 0 + elif type == "float": + return 0.0 + else: + return "" + + +class ArgumentError(Exception): + """Raised if an Argument instance is created with invalid or + inconsistent arguments.""" + + def __init__(self, msg: str, option: Argument | str) -> None: + self.msg = msg + self.option_id = str(option) + + def __str__(self) -> str: + if self.option_id: + return f"option {self.option_id}: {self.msg}" + else: + return self.msg + + +class Argument: + """Class that mimics the necessary behaviour of optparse.Option. + + It's currently a least effort implementation and ignoring choices + and integer prefixes. + + https://docs.python.org/3/library/optparse.html#optparse-standard-option-types + """ + + def __init__(self, *names: str, **attrs: Any) -> None: + """Store params in private vars for use in add_argument.""" + self._attrs = attrs + self._short_opts: list[str] = [] + self._long_opts: list[str] = [] + try: + self.type = attrs["type"] + except KeyError: + pass + try: + # Attribute existence is tested in Config._processopt. + self.default = attrs["default"] + except KeyError: + pass + self._set_opt_strings(names) + dest: str | None = attrs.get("dest") + if dest: + self.dest = dest + elif self._long_opts: + self.dest = self._long_opts[0][2:].replace("-", "_") + else: + try: + self.dest = self._short_opts[0][1:] + except IndexError as e: + self.dest = "???" # Needed for the error repr. + raise ArgumentError("need a long or short option", self) from e + + def names(self) -> list[str]: + return self._short_opts + self._long_opts + + def attrs(self) -> Mapping[str, Any]: + # Update any attributes set by processopt. + for attr in ("default", "dest", "help", self.dest): + try: + self._attrs[attr] = getattr(self, attr) + except AttributeError: + pass + return self._attrs + + def _set_opt_strings(self, opts: Sequence[str]) -> None: + """Directly from optparse. + + Might not be necessary as this is passed to argparse later on. + """ + for opt in opts: + if len(opt) < 2: + raise ArgumentError( + f"invalid option string {opt!r}: " + "must be at least two characters long", + self, + ) + elif len(opt) == 2: + if not (opt[0] == "-" and opt[1] != "-"): + raise ArgumentError( + f"invalid short option string {opt!r}: " + "must be of the form -x, (x any non-dash char)", + self, + ) + self._short_opts.append(opt) + else: + if not (opt[0:2] == "--" and opt[2] != "-"): + raise ArgumentError( + f"invalid long option string {opt!r}: " + "must start with --, followed by non-dash", + self, + ) + self._long_opts.append(opt) + + def __repr__(self) -> str: + args: list[str] = [] + if self._short_opts: + args += ["_short_opts: " + repr(self._short_opts)] + if self._long_opts: + args += ["_long_opts: " + repr(self._long_opts)] + args += ["dest: " + repr(self.dest)] + if hasattr(self, "type"): + args += ["type: " + repr(self.type)] + if hasattr(self, "default"): + args += ["default: " + repr(self.default)] + return "Argument({})".format(", ".join(args)) + + +class OptionGroup: + """A group of options shown in its own section.""" + + def __init__( + self, + arggroup: argparse._ArgumentGroup, + name: str, + parser: Parser | None, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._arggroup = arggroup + self.name = name + self.options: list[Argument] = [] + self.parser = parser + + def addoption(self, *opts: str, **attrs: Any) -> None: + """Add an option to this group. + + If a shortened version of a long option is specified, it will + be suppressed in the help. ``addoption('--twowords', '--two-words')`` + results in help showing ``--two-words`` only, but ``--twowords`` gets + accepted **and** the automatic destination is in ``args.twowords``. + + :param opts: + Option names, can be short or long options. + :param attrs: + Same attributes as the argparse library's :meth:`add_argument() + ` function accepts. + """ + conflict = set(opts).intersection( + name for opt in self.options for name in opt.names() + ) + if conflict: + raise ValueError(f"option names {conflict} already added") + option = Argument(*opts, **attrs) + self._addoption_instance(option, shortupper=False) + + def _addoption(self, *opts: str, **attrs: Any) -> None: + option = Argument(*opts, **attrs) + self._addoption_instance(option, shortupper=True) + + def _addoption_instance(self, option: Argument, shortupper: bool = False) -> None: + if not shortupper: + for opt in option._short_opts: + if opt[0] == "-" and opt[1].islower(): + raise ValueError("lowercase shortoptions reserved") + + if self.parser: + self.parser.processoption(option) + + self._arggroup.add_argument(*option.names(), **option.attrs()) + self.options.append(option) + + +class PytestArgumentParser(argparse.ArgumentParser): + def __init__( + self, + parser: Parser, + usage: str | None, + extra_info: dict[str, str], + ) -> None: + self._parser = parser + super().__init__( + usage=usage, + add_help=False, + formatter_class=DropShorterLongHelpFormatter, + allow_abbrev=False, + fromfile_prefix_chars="@", + ) + # extra_info is a dict of (param -> value) to display if there's + # an usage error to provide more contextual information to the user. + self.extra_info = extra_info + + def error(self, message: str) -> NoReturn: + """Transform argparse error message into UsageError.""" + msg = f"{self.prog}: error: {message}" + if self.extra_info: + msg += "\n" + "\n".join( + f" {k}: {v}" for k, v in sorted(self.extra_info.items()) + ) + raise UsageError(self.format_usage() + msg) + + +class DropShorterLongHelpFormatter(argparse.HelpFormatter): + """Shorten help for long options that differ only in extra hyphens. + + - Collapse **long** options that are the same except for extra hyphens. + - Shortcut if there are only two options and one of them is a short one. + - Cache result on the action object as this is called at least 2 times. + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + # Use more accurate terminal width. + if "width" not in kwargs: + kwargs["width"] = _pytest._io.get_terminal_width() + super().__init__(*args, **kwargs) + + def _format_action_invocation(self, action: argparse.Action) -> str: + orgstr = super()._format_action_invocation(action) + if orgstr and orgstr[0] != "-": # only optional arguments + return orgstr + res: str | None = getattr(action, "_formatted_action_invocation", None) + if res: + return res + options = orgstr.split(", ") + if len(options) == 2 and (len(options[0]) == 2 or len(options[1]) == 2): + # a shortcut for '-h, --help' or '--abc', '-a' + action._formatted_action_invocation = orgstr # type: ignore + return orgstr + return_list = [] + short_long: dict[str, str] = {} + for option in options: + if len(option) == 2 or option[2] == " ": + continue + if not option.startswith("--"): + raise ArgumentError( + f'long optional argument without "--": [{option}]', option + ) + xxoption = option[2:] + shortened = xxoption.replace("-", "") + if shortened not in short_long or len(short_long[shortened]) < len( + xxoption + ): + short_long[shortened] = xxoption + # now short_long has been filled out to the longest with dashes + # **and** we keep the right option ordering from add_argument + for option in options: + if len(option) == 2 or option[2] == " ": + return_list.append(option) + if option[2:] == short_long.get(option.replace("-", "")): + return_list.append(option.replace(" ", "=", 1)) + formatted_action_invocation = ", ".join(return_list) + action._formatted_action_invocation = formatted_action_invocation # type: ignore + return formatted_action_invocation + + def _split_lines(self, text, width): + """Wrap lines after splitting on original newlines. + + This allows to have explicit line breaks in the help text. + """ + import textwrap + + lines = [] + for line in text.splitlines(): + lines.extend(textwrap.wrap(line.strip(), width)) + return lines + + +class OverrideIniAction(argparse.Action): + """Custom argparse action that makes a CLI flag equivalent to overriding an + option, in addition to behaving like `store_true`. + + This can simplify things since code only needs to inspect the config option + and not consider the CLI flag. + """ + + def __init__( + self, + option_strings: Sequence[str], + dest: str, + nargs: int | str | None = None, + *args, + ini_option: str, + ini_value: str, + **kwargs, + ) -> None: + super().__init__(option_strings, dest, 0, *args, **kwargs) + self.ini_option = ini_option + self.ini_value = ini_value + + def __call__( + self, + parser: argparse.ArgumentParser, + namespace: argparse.Namespace, + *args, + **kwargs, + ) -> None: + setattr(namespace, self.dest, True) + current_overrides = getattr(namespace, "override_ini", None) + if current_overrides is None: + current_overrides = [] + current_overrides.append(f"{self.ini_option}={self.ini_value}") + setattr(namespace, "override_ini", current_overrides) diff --git a/.venv/lib/python3.12/site-packages/_pytest/config/compat.py b/.venv/lib/python3.12/site-packages/_pytest/config/compat.py new file mode 100644 index 0000000..21eab4c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/config/compat.py @@ -0,0 +1,85 @@ +from __future__ import annotations + +from collections.abc import Mapping +import functools +from pathlib import Path +from typing import Any +import warnings + +import pluggy + +from ..compat import LEGACY_PATH +from ..compat import legacy_path +from ..deprecated import HOOK_LEGACY_PATH_ARG + + +# hookname: (Path, LEGACY_PATH) +imply_paths_hooks: Mapping[str, tuple[str, str]] = { + "pytest_ignore_collect": ("collection_path", "path"), + "pytest_collect_file": ("file_path", "path"), + "pytest_pycollect_makemodule": ("module_path", "path"), + "pytest_report_header": ("start_path", "startdir"), + "pytest_report_collectionfinish": ("start_path", "startdir"), +} + + +def _check_path(path: Path, fspath: LEGACY_PATH) -> None: + if Path(fspath) != path: + raise ValueError( + f"Path({fspath!r}) != {path!r}\n" + "if both path and fspath are given they need to be equal" + ) + + +class PathAwareHookProxy: + """ + this helper wraps around hook callers + until pluggy supports fixingcalls, this one will do + + it currently doesn't return full hook caller proxies for fixed hooks, + this may have to be changed later depending on bugs + """ + + def __init__(self, hook_relay: pluggy.HookRelay) -> None: + self._hook_relay = hook_relay + + def __dir__(self) -> list[str]: + return dir(self._hook_relay) + + def __getattr__(self, key: str) -> pluggy.HookCaller: + hook: pluggy.HookCaller = getattr(self._hook_relay, key) + if key not in imply_paths_hooks: + self.__dict__[key] = hook + return hook + else: + path_var, fspath_var = imply_paths_hooks[key] + + @functools.wraps(hook) + def fixed_hook(**kw: Any) -> Any: + path_value: Path | None = kw.pop(path_var, None) + fspath_value: LEGACY_PATH | None = kw.pop(fspath_var, None) + if fspath_value is not None: + warnings.warn( + HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg=fspath_var, pathlib_path_arg=path_var + ), + stacklevel=2, + ) + if path_value is not None: + if fspath_value is not None: + _check_path(path_value, fspath_value) + else: + fspath_value = legacy_path(path_value) + else: + assert fspath_value is not None + path_value = Path(fspath_value) + + kw[path_var] = path_value + kw[fspath_var] = fspath_value + return hook(**kw) + + fixed_hook.name = hook.name # type: ignore[attr-defined] + fixed_hook.spec = hook.spec # type: ignore[attr-defined] + fixed_hook.__name__ = key + self.__dict__[key] = fixed_hook + return fixed_hook # type: ignore[return-value] diff --git a/.venv/lib/python3.12/site-packages/_pytest/config/exceptions.py b/.venv/lib/python3.12/site-packages/_pytest/config/exceptions.py new file mode 100644 index 0000000..d84a9ea --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/config/exceptions.py @@ -0,0 +1,15 @@ +from __future__ import annotations + +from typing import final + + +@final +class UsageError(Exception): + """Error in pytest usage or invocation.""" + + __module__ = "pytest" + + +class PrintHelp(Exception): + """Raised when pytest should print its help to skip the rest of the + argument parsing and validation.""" diff --git a/.venv/lib/python3.12/site-packages/_pytest/config/findpaths.py b/.venv/lib/python3.12/site-packages/_pytest/config/findpaths.py new file mode 100644 index 0000000..3c628a0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/config/findpaths.py @@ -0,0 +1,350 @@ +from __future__ import annotations + +from collections.abc import Iterable +from collections.abc import Sequence +from dataclasses import dataclass +from dataclasses import KW_ONLY +import os +from pathlib import Path +import sys +from typing import Literal +from typing import TypeAlias + +import iniconfig + +from .exceptions import UsageError +from _pytest.outcomes import fail +from _pytest.pathlib import absolutepath +from _pytest.pathlib import commonpath +from _pytest.pathlib import safe_exists + + +@dataclass(frozen=True) +class ConfigValue: + """Represents a configuration value with its origin and parsing mode. + + This allows tracking whether a value came from a configuration file + or from a CLI override (--override-ini), which is important for + determining precedence when dealing with ini option aliases. + + The mode tracks the parsing mode/data model used for the value: + - "ini": from INI files or [tool.pytest.ini_options], where the only + supported value types are `str` or `list[str]`. + - "toml": from TOML files (not in INI mode), where native TOML types + are preserved. + """ + + value: object + _: KW_ONLY + origin: Literal["file", "override"] + mode: Literal["ini", "toml"] + + +ConfigDict: TypeAlias = dict[str, ConfigValue] + + +def _parse_ini_config(path: Path) -> iniconfig.IniConfig: + """Parse the given generic '.ini' file using legacy IniConfig parser, returning + the parsed object. + + Raise UsageError if the file cannot be parsed. + """ + try: + return iniconfig.IniConfig(str(path)) + except iniconfig.ParseError as exc: + raise UsageError(str(exc)) from exc + + +def load_config_dict_from_file( + filepath: Path, +) -> ConfigDict | None: + """Load pytest configuration from the given file path, if supported. + + Return None if the file does not contain valid pytest configuration. + """ + # Configuration from ini files are obtained from the [pytest] section, if present. + if filepath.suffix == ".ini": + iniconfig = _parse_ini_config(filepath) + + if "pytest" in iniconfig: + return { + k: ConfigValue(v, origin="file", mode="ini") + for k, v in iniconfig["pytest"].items() + } + else: + # "pytest.ini" files are always the source of configuration, even if empty. + if filepath.name in {"pytest.ini", ".pytest.ini"}: + return {} + + # '.cfg' files are considered if they contain a "[tool:pytest]" section. + elif filepath.suffix == ".cfg": + iniconfig = _parse_ini_config(filepath) + + if "tool:pytest" in iniconfig.sections: + return { + k: ConfigValue(v, origin="file", mode="ini") + for k, v in iniconfig["tool:pytest"].items() + } + elif "pytest" in iniconfig.sections: + # If a setup.cfg contains a "[pytest]" section, we raise a failure to indicate users that + # plain "[pytest]" sections in setup.cfg files is no longer supported (#3086). + fail(CFG_PYTEST_SECTION.format(filename="setup.cfg"), pytrace=False) + + # '.toml' files are considered if they contain a [tool.pytest] table (toml mode) + # or [tool.pytest.ini_options] table (ini mode) for pyproject.toml, + # or [pytest] table (toml mode) for pytest.toml/.pytest.toml. + elif filepath.suffix == ".toml": + if sys.version_info >= (3, 11): + import tomllib + else: + import tomli as tomllib + + toml_text = filepath.read_text(encoding="utf-8") + try: + config = tomllib.loads(toml_text) + except tomllib.TOMLDecodeError as exc: + raise UsageError(f"{filepath}: {exc}") from exc + + # pytest.toml and .pytest.toml use [pytest] table directly. + if filepath.name in ("pytest.toml", ".pytest.toml"): + pytest_config = config.get("pytest", {}) + if pytest_config: + # TOML mode - preserve native TOML types. + return { + k: ConfigValue(v, origin="file", mode="toml") + for k, v in pytest_config.items() + } + # "pytest.toml" files are always the source of configuration, even if empty. + return {} + + # pyproject.toml uses [tool.pytest] or [tool.pytest.ini_options]. + else: + tool_pytest = config.get("tool", {}).get("pytest", {}) + + # Check for toml mode config: [tool.pytest] with content outside of ini_options. + toml_config = {k: v for k, v in tool_pytest.items() if k != "ini_options"} + # Check for ini mode config: [tool.pytest.ini_options]. + ini_config = tool_pytest.get("ini_options", None) + + if toml_config and ini_config: + raise UsageError( + f"{filepath}: Cannot use both [tool.pytest] (native TOML types) and " + "[tool.pytest.ini_options] (string-based INI format) simultaneously. " + "Please use [tool.pytest] with native TOML types (recommended) " + "or [tool.pytest.ini_options] for backwards compatibility." + ) + + if toml_config: + # TOML mode - preserve native TOML types. + return { + k: ConfigValue(v, origin="file", mode="toml") + for k, v in toml_config.items() + } + + elif ini_config is not None: + # INI mode - TOML supports richer data types than INI files, but we need to + # convert all scalar values to str for compatibility with the INI system. + def make_scalar(v: object) -> str | list[str]: + return v if isinstance(v, list) else str(v) + + return { + k: ConfigValue(make_scalar(v), origin="file", mode="ini") + for k, v in ini_config.items() + } + + return None + + +def locate_config( + invocation_dir: Path, + args: Iterable[Path], +) -> tuple[Path | None, Path | None, ConfigDict, Sequence[str]]: + """Search in the list of arguments for a valid ini-file for pytest, + and return a tuple of (rootdir, inifile, cfg-dict, ignored-config-files), where + ignored-config-files is a list of config basenames found that contain + pytest configuration but were ignored.""" + config_names = [ + "pytest.toml", + ".pytest.toml", + "pytest.ini", + ".pytest.ini", + "pyproject.toml", + "tox.ini", + "setup.cfg", + ] + args = [x for x in args if not str(x).startswith("-")] + if not args: + args = [invocation_dir] + found_pyproject_toml: Path | None = None + ignored_config_files: list[str] = [] + + for arg in args: + argpath = absolutepath(arg) + for base in (argpath, *argpath.parents): + for config_name in config_names: + p = base / config_name + if p.is_file(): + if p.name == "pyproject.toml" and found_pyproject_toml is None: + found_pyproject_toml = p + ini_config = load_config_dict_from_file(p) + if ini_config is not None: + index = config_names.index(config_name) + for remainder in config_names[index + 1 :]: + p2 = base / remainder + if ( + p2.is_file() + and load_config_dict_from_file(p2) is not None + ): + ignored_config_files.append(remainder) + return base, p, ini_config, ignored_config_files + if found_pyproject_toml is not None: + return found_pyproject_toml.parent, found_pyproject_toml, {}, [] + return None, None, {}, [] + + +def get_common_ancestor( + invocation_dir: Path, + paths: Iterable[Path], +) -> Path: + common_ancestor: Path | None = None + for path in paths: + if not path.exists(): + continue + if common_ancestor is None: + common_ancestor = path + else: + if common_ancestor in path.parents or path == common_ancestor: + continue + elif path in common_ancestor.parents: + common_ancestor = path + else: + shared = commonpath(path, common_ancestor) + if shared is not None: + common_ancestor = shared + if common_ancestor is None: + common_ancestor = invocation_dir + elif common_ancestor.is_file(): + common_ancestor = common_ancestor.parent + return common_ancestor + + +def get_dirs_from_args(args: Iterable[str]) -> list[Path]: + def is_option(x: str) -> bool: + return x.startswith("-") + + def get_file_part_from_node_id(x: str) -> str: + return x.split("::")[0] + + def get_dir_from_path(path: Path) -> Path: + if path.is_dir(): + return path + return path.parent + + # These look like paths but may not exist + possible_paths = ( + absolutepath(get_file_part_from_node_id(arg)) + for arg in args + if not is_option(arg) + ) + + return [get_dir_from_path(path) for path in possible_paths if safe_exists(path)] + + +def parse_override_ini(override_ini: Sequence[str] | None) -> ConfigDict: + """Parse the -o/--override-ini command line arguments and return the overrides. + + :raises UsageError: + If one of the values is malformed. + """ + overrides = {} + # override_ini is a list of "ini=value" options. + # Always use the last item if multiple values are set for same ini-name, + # e.g. -o foo=bar1 -o foo=bar2 will set foo to bar2. + for ini_config in override_ini or (): + try: + key, user_ini_value = ini_config.split("=", 1) + except ValueError as e: + raise UsageError( + f"-o/--override-ini expects option=value style (got: {ini_config!r})." + ) from e + else: + overrides[key] = ConfigValue(user_ini_value, origin="override", mode="ini") + return overrides + + +CFG_PYTEST_SECTION = "[pytest] section in {filename} files is no longer supported, change to [tool:pytest] instead." + + +def determine_setup( + *, + inifile: str | None, + override_ini: Sequence[str] | None, + args: Sequence[str], + rootdir_cmd_arg: str | None, + invocation_dir: Path, +) -> tuple[Path, Path | None, ConfigDict, Sequence[str]]: + """Determine the rootdir, inifile and ini configuration values from the + command line arguments. + + :param inifile: + The `--inifile` command line argument, if given. + :param override_ini: + The -o/--override-ini command line arguments, if given. + :param args: + The free command line arguments. + :param rootdir_cmd_arg: + The `--rootdir` command line argument, if given. + :param invocation_dir: + The working directory when pytest was invoked. + + :raises UsageError: + """ + rootdir = None + dirs = get_dirs_from_args(args) + ignored_config_files: Sequence[str] = [] + + if inifile: + inipath_ = absolutepath(inifile) + inipath: Path | None = inipath_ + inicfg = load_config_dict_from_file(inipath_) or {} + if rootdir_cmd_arg is None: + rootdir = inipath_.parent + else: + ancestor = get_common_ancestor(invocation_dir, dirs) + rootdir, inipath, inicfg, ignored_config_files = locate_config( + invocation_dir, [ancestor] + ) + if rootdir is None and rootdir_cmd_arg is None: + for possible_rootdir in (ancestor, *ancestor.parents): + if (possible_rootdir / "setup.py").is_file(): + rootdir = possible_rootdir + break + else: + if dirs != [ancestor]: + rootdir, inipath, inicfg, _ = locate_config(invocation_dir, dirs) + if rootdir is None: + rootdir = get_common_ancestor( + invocation_dir, [invocation_dir, ancestor] + ) + if is_fs_root(rootdir): + rootdir = ancestor + if rootdir_cmd_arg: + rootdir = absolutepath(os.path.expandvars(rootdir_cmd_arg)) + if not rootdir.is_dir(): + raise UsageError( + f"Directory '{rootdir}' not found. Check your '--rootdir' option." + ) + + ini_overrides = parse_override_ini(override_ini) + inicfg.update(ini_overrides) + + assert rootdir is not None + return rootdir, inipath, inicfg, ignored_config_files + + +def is_fs_root(p: Path) -> bool: + r""" + Return True if the given path is pointing to the root of the + file system ("/" on Unix and "C:\\" on Windows for example). + """ + return os.path.splitdrive(str(p))[1] == os.sep diff --git a/.venv/lib/python3.12/site-packages/_pytest/debugging.py b/.venv/lib/python3.12/site-packages/_pytest/debugging.py new file mode 100644 index 0000000..de1b268 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/debugging.py @@ -0,0 +1,407 @@ +# mypy: allow-untyped-defs +# ruff: noqa: T100 +"""Interactive debugging with PDB, the Python Debugger.""" + +from __future__ import annotations + +import argparse +from collections.abc import Callable +from collections.abc import Generator +import functools +import sys +import types +from typing import Any +import unittest + +from _pytest import outcomes +from _pytest._code import ExceptionInfo +from _pytest.capture import CaptureManager +from _pytest.config import Config +from _pytest.config import ConftestImportFailure +from _pytest.config import hookimpl +from _pytest.config import PytestPluginManager +from _pytest.config.argparsing import Parser +from _pytest.config.exceptions import UsageError +from _pytest.nodes import Node +from _pytest.reports import BaseReport +from _pytest.runner import CallInfo + + +def _validate_usepdb_cls(value: str) -> tuple[str, str]: + """Validate syntax of --pdbcls option.""" + try: + modname, classname = value.split(":") + except ValueError as e: + raise argparse.ArgumentTypeError( + f"{value!r} is not in the format 'modname:classname'" + ) from e + return (modname, classname) + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group.addoption( + "--pdb", + dest="usepdb", + action="store_true", + help="Start the interactive Python debugger on errors or KeyboardInterrupt", + ) + group.addoption( + "--pdbcls", + dest="usepdb_cls", + metavar="modulename:classname", + type=_validate_usepdb_cls, + help="Specify a custom interactive Python debugger for use with --pdb." + "For example: --pdbcls=IPython.terminal.debugger:TerminalPdb", + ) + group.addoption( + "--trace", + dest="trace", + action="store_true", + help="Immediately break when running each test", + ) + + +def pytest_configure(config: Config) -> None: + import pdb + + if config.getvalue("trace"): + config.pluginmanager.register(PdbTrace(), "pdbtrace") + if config.getvalue("usepdb"): + config.pluginmanager.register(PdbInvoke(), "pdbinvoke") + + pytestPDB._saved.append( + (pdb.set_trace, pytestPDB._pluginmanager, pytestPDB._config) + ) + pdb.set_trace = pytestPDB.set_trace + pytestPDB._pluginmanager = config.pluginmanager + pytestPDB._config = config + + # NOTE: not using pytest_unconfigure, since it might get called although + # pytest_configure was not (if another plugin raises UsageError). + def fin() -> None: + ( + pdb.set_trace, + pytestPDB._pluginmanager, + pytestPDB._config, + ) = pytestPDB._saved.pop() + + config.add_cleanup(fin) + + +class pytestPDB: + """Pseudo PDB that defers to the real pdb.""" + + _pluginmanager: PytestPluginManager | None = None + _config: Config | None = None + _saved: list[ + tuple[Callable[..., None], PytestPluginManager | None, Config | None] + ] = [] + _recursive_debug = 0 + _wrapped_pdb_cls: tuple[type[Any], type[Any]] | None = None + + @classmethod + def _is_capturing(cls, capman: CaptureManager | None) -> str | bool: + if capman: + return capman.is_capturing() + return False + + @classmethod + def _import_pdb_cls(cls, capman: CaptureManager | None): + if not cls._config: + import pdb + + # Happens when using pytest.set_trace outside of a test. + return pdb.Pdb + + usepdb_cls = cls._config.getvalue("usepdb_cls") + + if cls._wrapped_pdb_cls and cls._wrapped_pdb_cls[0] == usepdb_cls: + return cls._wrapped_pdb_cls[1] + + if usepdb_cls: + modname, classname = usepdb_cls + + try: + __import__(modname) + mod = sys.modules[modname] + + # Handle --pdbcls=pdb:pdb.Pdb (useful e.g. with pdbpp). + parts = classname.split(".") + pdb_cls = getattr(mod, parts[0]) + for part in parts[1:]: + pdb_cls = getattr(pdb_cls, part) + except Exception as exc: + value = ":".join((modname, classname)) + raise UsageError( + f"--pdbcls: could not import {value!r}: {exc}" + ) from exc + else: + import pdb + + pdb_cls = pdb.Pdb + + wrapped_cls = cls._get_pdb_wrapper_class(pdb_cls, capman) + cls._wrapped_pdb_cls = (usepdb_cls, wrapped_cls) + return wrapped_cls + + @classmethod + def _get_pdb_wrapper_class(cls, pdb_cls, capman: CaptureManager | None): + import _pytest.config + + class PytestPdbWrapper(pdb_cls): + _pytest_capman = capman + _continued = False + + def do_debug(self, arg): + cls._recursive_debug += 1 + ret = super().do_debug(arg) + cls._recursive_debug -= 1 + return ret + + if hasattr(pdb_cls, "do_debug"): + do_debug.__doc__ = pdb_cls.do_debug.__doc__ + + def do_continue(self, arg): + ret = super().do_continue(arg) + if cls._recursive_debug == 0: + assert cls._config is not None + tw = _pytest.config.create_terminal_writer(cls._config) + tw.line() + + capman = self._pytest_capman + capturing = pytestPDB._is_capturing(capman) + if capturing: + if capturing == "global": + tw.sep(">", "PDB continue (IO-capturing resumed)") + else: + tw.sep( + ">", + f"PDB continue (IO-capturing resumed for {capturing})", + ) + assert capman is not None + capman.resume() + else: + tw.sep(">", "PDB continue") + assert cls._pluginmanager is not None + cls._pluginmanager.hook.pytest_leave_pdb(config=cls._config, pdb=self) + self._continued = True + return ret + + if hasattr(pdb_cls, "do_continue"): + do_continue.__doc__ = pdb_cls.do_continue.__doc__ + + do_c = do_cont = do_continue + + def do_quit(self, arg): + # Raise Exit outcome when quit command is used in pdb. + # + # This is a bit of a hack - it would be better if BdbQuit + # could be handled, but this would require to wrap the + # whole pytest run, and adjust the report etc. + ret = super().do_quit(arg) + + if cls._recursive_debug == 0: + outcomes.exit("Quitting debugger") + + return ret + + if hasattr(pdb_cls, "do_quit"): + do_quit.__doc__ = pdb_cls.do_quit.__doc__ + + do_q = do_quit + do_exit = do_quit + + def setup(self, f, tb): + """Suspend on setup(). + + Needed after do_continue resumed, and entering another + breakpoint again. + """ + ret = super().setup(f, tb) + if not ret and self._continued: + # pdb.setup() returns True if the command wants to exit + # from the interaction: do not suspend capturing then. + if self._pytest_capman: + self._pytest_capman.suspend_global_capture(in_=True) + return ret + + def get_stack(self, f, t): + stack, i = super().get_stack(f, t) + if f is None: + # Find last non-hidden frame. + i = max(0, len(stack) - 1) + while i and stack[i][0].f_locals.get("__tracebackhide__", False): + i -= 1 + return stack, i + + return PytestPdbWrapper + + @classmethod + def _init_pdb(cls, method, *args, **kwargs): + """Initialize PDB debugging, dropping any IO capturing.""" + import _pytest.config + + if cls._pluginmanager is None: + capman: CaptureManager | None = None + else: + capman = cls._pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend(in_=True) + + if cls._config: + tw = _pytest.config.create_terminal_writer(cls._config) + tw.line() + + if cls._recursive_debug == 0: + # Handle header similar to pdb.set_trace in py37+. + header = kwargs.pop("header", None) + if header is not None: + tw.sep(">", header) + else: + capturing = cls._is_capturing(capman) + if capturing == "global": + tw.sep(">", f"PDB {method} (IO-capturing turned off)") + elif capturing: + tw.sep( + ">", + f"PDB {method} (IO-capturing turned off for {capturing})", + ) + else: + tw.sep(">", f"PDB {method}") + + _pdb = cls._import_pdb_cls(capman)(**kwargs) + + if cls._pluginmanager: + cls._pluginmanager.hook.pytest_enter_pdb(config=cls._config, pdb=_pdb) + return _pdb + + @classmethod + def set_trace(cls, *args, **kwargs) -> None: + """Invoke debugging via ``Pdb.set_trace``, dropping any IO capturing.""" + frame = sys._getframe().f_back + _pdb = cls._init_pdb("set_trace", *args, **kwargs) + _pdb.set_trace(frame) + + +class PdbInvoke: + def pytest_exception_interact( + self, node: Node, call: CallInfo[Any], report: BaseReport + ) -> None: + capman = node.config.pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend_global_capture(in_=True) + out, err = capman.read_global_capture() + sys.stdout.write(out) + sys.stdout.write(err) + assert call.excinfo is not None + + if not isinstance(call.excinfo.value, unittest.SkipTest): + _enter_pdb(node, call.excinfo, report) + + def pytest_internalerror(self, excinfo: ExceptionInfo[BaseException]) -> None: + exc_or_tb = _postmortem_exc_or_tb(excinfo) + post_mortem(exc_or_tb) + + +class PdbTrace: + @hookimpl(wrapper=True) + def pytest_pyfunc_call(self, pyfuncitem) -> Generator[None, object, object]: + wrap_pytest_function_for_tracing(pyfuncitem) + return (yield) + + +def wrap_pytest_function_for_tracing(pyfuncitem) -> None: + """Change the Python function object of the given Function item by a + wrapper which actually enters pdb before calling the python function + itself, effectively leaving the user in the pdb prompt in the first + statement of the function.""" + _pdb = pytestPDB._init_pdb("runcall") + testfunction = pyfuncitem.obj + + # we can't just return `partial(pdb.runcall, testfunction)` because (on + # python < 3.7.4) runcall's first param is `func`, which means we'd get + # an exception if one of the kwargs to testfunction was called `func`. + @functools.wraps(testfunction) + def wrapper(*args, **kwargs) -> None: + func = functools.partial(testfunction, *args, **kwargs) + _pdb.runcall(func) + + pyfuncitem.obj = wrapper + + +def maybe_wrap_pytest_function_for_tracing(pyfuncitem) -> None: + """Wrap the given pytestfunct item for tracing support if --trace was given in + the command line.""" + if pyfuncitem.config.getvalue("trace"): + wrap_pytest_function_for_tracing(pyfuncitem) + + +def _enter_pdb( + node: Node, excinfo: ExceptionInfo[BaseException], rep: BaseReport +) -> BaseReport: + # XXX we reuse the TerminalReporter's terminalwriter + # because this seems to avoid some encoding related troubles + # for not completely clear reasons. + tw = node.config.pluginmanager.getplugin("terminalreporter")._tw + tw.line() + + showcapture = node.config.option.showcapture + + for sectionname, content in ( + ("stdout", rep.capstdout), + ("stderr", rep.capstderr), + ("log", rep.caplog), + ): + if showcapture in (sectionname, "all") and content: + tw.sep(">", "captured " + sectionname) + if content[-1:] == "\n": + content = content[:-1] + tw.line(content) + + tw.sep(">", "traceback") + rep.toterminal(tw) + tw.sep(">", "entering PDB") + tb_or_exc = _postmortem_exc_or_tb(excinfo) + rep._pdbshown = True # type: ignore[attr-defined] + post_mortem(tb_or_exc) + return rep + + +def _postmortem_exc_or_tb( + excinfo: ExceptionInfo[BaseException], +) -> types.TracebackType | BaseException: + from doctest import UnexpectedException + + get_exc = sys.version_info >= (3, 13) + if isinstance(excinfo.value, UnexpectedException): + # A doctest.UnexpectedException is not useful for post_mortem. + # Use the underlying exception instead: + underlying_exc = excinfo.value + if get_exc: + return underlying_exc.exc_info[1] + + return underlying_exc.exc_info[2] + elif isinstance(excinfo.value, ConftestImportFailure): + # A config.ConftestImportFailure is not useful for post_mortem. + # Use the underlying exception instead: + cause = excinfo.value.cause + if get_exc: + return cause + + assert cause.__traceback__ is not None + return cause.__traceback__ + else: + assert excinfo._excinfo is not None + if get_exc: + return excinfo._excinfo[1] + + return excinfo._excinfo[2] + + +def post_mortem(tb_or_exc: types.TracebackType | BaseException) -> None: + p = pytestPDB._init_pdb("post_mortem") + p.reset() + p.interaction(None, tb_or_exc) + if p.quitting: + outcomes.exit("Quitting debugger") diff --git a/.venv/lib/python3.12/site-packages/_pytest/deprecated.py b/.venv/lib/python3.12/site-packages/_pytest/deprecated.py new file mode 100644 index 0000000..cb5d2e9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/deprecated.py @@ -0,0 +1,99 @@ +"""Deprecation messages and bits of code used elsewhere in the codebase that +is planned to be removed in the next pytest release. + +Keeping it in a central location makes it easy to track what is deprecated and should +be removed when the time comes. + +All constants defined in this module should be either instances of +:class:`PytestWarning`, or :class:`UnformattedWarning` +in case of warnings which need to format their messages. +""" + +from __future__ import annotations + +from warnings import warn + +from _pytest.warning_types import PytestDeprecationWarning +from _pytest.warning_types import PytestRemovedIn9Warning +from _pytest.warning_types import PytestRemovedIn10Warning +from _pytest.warning_types import UnformattedWarning + + +# set of plugins which have been integrated into the core; we use this list to ignore +# them during registration to avoid conflicts +DEPRECATED_EXTERNAL_PLUGINS = { + "pytest_catchlog", + "pytest_capturelog", + "pytest_faulthandler", + "pytest_subtests", +} + + +# This could have been removed pytest 8, but it's harmless and common, so no rush to remove. +YIELD_FIXTURE = PytestDeprecationWarning( + "@pytest.yield_fixture is deprecated.\n" + "Use @pytest.fixture instead; they are the same." +) + +# This deprecation is never really meant to be removed. +PRIVATE = PytestDeprecationWarning("A private pytest class or function was used.") + + +HOOK_LEGACY_PATH_ARG = UnformattedWarning( + PytestRemovedIn9Warning, + "The ({pylib_path_arg}: py.path.local) argument is deprecated, please use ({pathlib_path_arg}: pathlib.Path)\n" + "see https://docs.pytest.org/en/latest/deprecations.html" + "#py-path-local-arguments-for-hooks-replaced-with-pathlib-path", +) + +NODE_CTOR_FSPATH_ARG = UnformattedWarning( + PytestRemovedIn9Warning, + "The (fspath: py.path.local) argument to {node_type_name} is deprecated. " + "Please use the (path: pathlib.Path) argument instead.\n" + "See https://docs.pytest.org/en/latest/deprecations.html" + "#fspath-argument-for-node-constructors-replaced-with-pathlib-path", +) + +HOOK_LEGACY_MARKING = UnformattedWarning( + PytestDeprecationWarning, + "The hook{type} {fullname} uses old-style configuration options (marks or attributes).\n" + "Please use the pytest.hook{type}({hook_opts}) decorator instead\n" + " to configure the hooks.\n" + " See https://docs.pytest.org/en/latest/deprecations.html" + "#configuring-hook-specs-impls-using-markers", +) + +MARKED_FIXTURE = PytestRemovedIn9Warning( + "Marks applied to fixtures have no effect\n" + "See docs: https://docs.pytest.org/en/stable/deprecations.html#applying-a-mark-to-a-fixture-function" +) + +MONKEYPATCH_LEGACY_NAMESPACE_PACKAGES = PytestRemovedIn10Warning( + "monkeypatch.syspath_prepend() called with pkg_resources legacy namespace packages detected.\n" + "Legacy namespace packages (using pkg_resources.declare_namespace) are deprecated.\n" + "Please use native namespace packages (PEP 420) instead.\n" + "See https://docs.pytest.org/en/stable/deprecations.html#monkeypatch-fixup-namespace-packages" +) + +# You want to make some `__init__` or function "private". +# +# def my_private_function(some, args): +# ... +# +# Do this: +# +# def my_private_function(some, args, *, _ispytest: bool = False): +# check_ispytest(_ispytest) +# ... +# +# Change all internal/allowed calls to +# +# my_private_function(some, args, _ispytest=True) +# +# All other calls will get the default _ispytest=False and trigger +# the warning (possibly error in the future). + + +def check_ispytest(ispytest: bool) -> None: + if not ispytest: + warn(PRIVATE, stacklevel=3) diff --git a/.venv/lib/python3.12/site-packages/_pytest/doctest.py b/.venv/lib/python3.12/site-packages/_pytest/doctest.py new file mode 100644 index 0000000..cd255f5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/doctest.py @@ -0,0 +1,736 @@ +# mypy: allow-untyped-defs +"""Discover and run doctests in modules and test files.""" + +from __future__ import annotations + +import bdb +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import Sequence +from contextlib import contextmanager +import functools +import inspect +import os +from pathlib import Path +import platform +import re +import sys +import traceback +import types +from typing import Any +from typing import TYPE_CHECKING +import warnings + +from _pytest import outcomes +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import ReprFileLocation +from _pytest._code.code import TerminalRepr +from _pytest._io import TerminalWriter +from _pytest.compat import safe_getattr +from _pytest.config import Config +from _pytest.config.argparsing import Parser +from _pytest.fixtures import fixture +from _pytest.fixtures import TopRequest +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.outcomes import OutcomeException +from _pytest.outcomes import skip +from _pytest.pathlib import fnmatch_ex +from _pytest.python import Module +from _pytest.python_api import approx +from _pytest.warning_types import PytestWarning + + +if TYPE_CHECKING: + import doctest + + from typing_extensions import Self + +DOCTEST_REPORT_CHOICE_NONE = "none" +DOCTEST_REPORT_CHOICE_CDIFF = "cdiff" +DOCTEST_REPORT_CHOICE_NDIFF = "ndiff" +DOCTEST_REPORT_CHOICE_UDIFF = "udiff" +DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE = "only_first_failure" + +DOCTEST_REPORT_CHOICES = ( + DOCTEST_REPORT_CHOICE_NONE, + DOCTEST_REPORT_CHOICE_CDIFF, + DOCTEST_REPORT_CHOICE_NDIFF, + DOCTEST_REPORT_CHOICE_UDIFF, + DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE, +) + +# Lazy definition of runner class +RUNNER_CLASS = None +# Lazy definition of output checker class +CHECKER_CLASS: type[doctest.OutputChecker] | None = None + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "doctest_optionflags", + "Option flags for doctests", + type="args", + default=["ELLIPSIS"], + ) + parser.addini( + "doctest_encoding", "Encoding used for doctest files", default="utf-8" + ) + group = parser.getgroup("collect") + group.addoption( + "--doctest-modules", + action="store_true", + default=False, + help="Run doctests in all .py modules", + dest="doctestmodules", + ) + group.addoption( + "--doctest-report", + type=str.lower, + default="udiff", + help="Choose another output format for diffs on doctest failure", + choices=DOCTEST_REPORT_CHOICES, + dest="doctestreport", + ) + group.addoption( + "--doctest-glob", + action="append", + default=[], + metavar="pat", + help="Doctests file matching pattern, default: test*.txt", + dest="doctestglob", + ) + group.addoption( + "--doctest-ignore-import-errors", + action="store_true", + default=False, + help="Ignore doctest collection errors", + dest="doctest_ignore_import_errors", + ) + group.addoption( + "--doctest-continue-on-failure", + action="store_true", + default=False, + help="For a given doctest, continue to run after the first failure", + dest="doctest_continue_on_failure", + ) + + +def pytest_unconfigure() -> None: + global RUNNER_CLASS + + RUNNER_CLASS = None + + +def pytest_collect_file( + file_path: Path, + parent: Collector, +) -> DoctestModule | DoctestTextfile | None: + config = parent.config + if file_path.suffix == ".py": + if config.option.doctestmodules and not any( + (_is_setup_py(file_path), _is_main_py(file_path)) + ): + return DoctestModule.from_parent(parent, path=file_path) + elif _is_doctest(config, file_path, parent): + return DoctestTextfile.from_parent(parent, path=file_path) + return None + + +def _is_setup_py(path: Path) -> bool: + if path.name != "setup.py": + return False + contents = path.read_bytes() + return b"setuptools" in contents or b"distutils" in contents + + +def _is_doctest(config: Config, path: Path, parent: Collector) -> bool: + if path.suffix in (".txt", ".rst") and parent.session.isinitpath(path): + return True + globs = config.getoption("doctestglob") or ["test*.txt"] + return any(fnmatch_ex(glob, path) for glob in globs) + + +def _is_main_py(path: Path) -> bool: + return path.name == "__main__.py" + + +class ReprFailDoctest(TerminalRepr): + def __init__( + self, reprlocation_lines: Sequence[tuple[ReprFileLocation, Sequence[str]]] + ) -> None: + self.reprlocation_lines = reprlocation_lines + + def toterminal(self, tw: TerminalWriter) -> None: + for reprlocation, lines in self.reprlocation_lines: + for line in lines: + tw.line(line) + reprlocation.toterminal(tw) + + +class MultipleDoctestFailures(Exception): + def __init__(self, failures: Sequence[doctest.DocTestFailure]) -> None: + super().__init__() + self.failures = failures + + +def _init_runner_class() -> type[doctest.DocTestRunner]: + import doctest + + class PytestDoctestRunner(doctest.DebugRunner): + """Runner to collect failures. + + Note that the out variable in this case is a list instead of a + stdout-like object. + """ + + def __init__( + self, + checker: doctest.OutputChecker | None = None, + verbose: bool | None = None, + optionflags: int = 0, + continue_on_failure: bool = True, + ) -> None: + super().__init__(checker=checker, verbose=verbose, optionflags=optionflags) + self.continue_on_failure = continue_on_failure + + def report_failure( + self, + out, + test: doctest.DocTest, + example: doctest.Example, + got: str, + ) -> None: + failure = doctest.DocTestFailure(test, example, got) + if self.continue_on_failure: + out.append(failure) + else: + raise failure + + def report_unexpected_exception( + self, + out, + test: doctest.DocTest, + example: doctest.Example, + exc_info: tuple[type[BaseException], BaseException, types.TracebackType], + ) -> None: + if isinstance(exc_info[1], OutcomeException): + raise exc_info[1] + if isinstance(exc_info[1], bdb.BdbQuit): + outcomes.exit("Quitting debugger") + failure = doctest.UnexpectedException(test, example, exc_info) + if self.continue_on_failure: + out.append(failure) + else: + raise failure + + return PytestDoctestRunner + + +def _get_runner( + checker: doctest.OutputChecker | None = None, + verbose: bool | None = None, + optionflags: int = 0, + continue_on_failure: bool = True, +) -> doctest.DocTestRunner: + # We need this in order to do a lazy import on doctest + global RUNNER_CLASS + if RUNNER_CLASS is None: + RUNNER_CLASS = _init_runner_class() + # Type ignored because the continue_on_failure argument is only defined on + # PytestDoctestRunner, which is lazily defined so can't be used as a type. + return RUNNER_CLASS( # type: ignore + checker=checker, + verbose=verbose, + optionflags=optionflags, + continue_on_failure=continue_on_failure, + ) + + +class DoctestItem(Item): + def __init__( + self, + name: str, + parent: DoctestTextfile | DoctestModule, + runner: doctest.DocTestRunner, + dtest: doctest.DocTest, + ) -> None: + super().__init__(name, parent) + self.runner = runner + self.dtest = dtest + + # Stuff needed for fixture support. + self.obj = None + fm = self.session._fixturemanager + fixtureinfo = fm.getfixtureinfo(node=self, func=None, cls=None) + self._fixtureinfo = fixtureinfo + self.fixturenames = fixtureinfo.names_closure + self._initrequest() + + @classmethod + def from_parent( # type: ignore[override] + cls, + parent: DoctestTextfile | DoctestModule, + *, + name: str, + runner: doctest.DocTestRunner, + dtest: doctest.DocTest, + ) -> Self: + # incompatible signature due to imposed limits on subclass + """The public named constructor.""" + return super().from_parent(name=name, parent=parent, runner=runner, dtest=dtest) + + def _initrequest(self) -> None: + self.funcargs: dict[str, object] = {} + self._request = TopRequest(self, _ispytest=True) # type: ignore[arg-type] + + def setup(self) -> None: + self._request._fillfixtures() + globs = dict(getfixture=self._request.getfixturevalue) + for name, value in self._request.getfixturevalue("doctest_namespace").items(): + globs[name] = value + self.dtest.globs.update(globs) + + def runtest(self) -> None: + _check_all_skipped(self.dtest) + self._disable_output_capturing_for_darwin() + failures: list[doctest.DocTestFailure] = [] + # Type ignored because we change the type of `out` from what + # doctest expects. + self.runner.run(self.dtest, out=failures) # type: ignore[arg-type] + if failures: + raise MultipleDoctestFailures(failures) + + def _disable_output_capturing_for_darwin(self) -> None: + """Disable output capturing. Otherwise, stdout is lost to doctest (#985).""" + if platform.system() != "Darwin": + return + capman = self.config.pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend_global_capture(in_=True) + out, err = capman.read_global_capture() + sys.stdout.write(out) + sys.stderr.write(err) + + # TODO: Type ignored -- breaks Liskov Substitution. + def repr_failure( # type: ignore[override] + self, + excinfo: ExceptionInfo[BaseException], + ) -> str | TerminalRepr: + import doctest + + failures: ( + Sequence[doctest.DocTestFailure | doctest.UnexpectedException] | None + ) = None + if isinstance( + excinfo.value, doctest.DocTestFailure | doctest.UnexpectedException + ): + failures = [excinfo.value] + elif isinstance(excinfo.value, MultipleDoctestFailures): + failures = excinfo.value.failures + + if failures is None: + return super().repr_failure(excinfo) + + reprlocation_lines = [] + for failure in failures: + example = failure.example + test = failure.test + filename = test.filename + if test.lineno is None: + lineno = None + else: + lineno = test.lineno + example.lineno + 1 + message = type(failure).__name__ + # TODO: ReprFileLocation doesn't expect a None lineno. + reprlocation = ReprFileLocation(filename, lineno, message) # type: ignore[arg-type] + checker = _get_checker() + report_choice = _get_report_choice(self.config.getoption("doctestreport")) + if lineno is not None: + assert failure.test.docstring is not None + lines = failure.test.docstring.splitlines(False) + # add line numbers to the left of the error message + assert test.lineno is not None + lines = [ + f"{i + test.lineno + 1:03d} {x}" for (i, x) in enumerate(lines) + ] + # trim docstring error lines to 10 + lines = lines[max(example.lineno - 9, 0) : example.lineno + 1] + else: + lines = [ + "EXAMPLE LOCATION UNKNOWN, not showing all tests of that example" + ] + indent = ">>>" + for line in example.source.splitlines(): + lines.append(f"??? {indent} {line}") + indent = "..." + if isinstance(failure, doctest.DocTestFailure): + lines += checker.output_difference( + example, failure.got, report_choice + ).split("\n") + else: + inner_excinfo = ExceptionInfo.from_exc_info(failure.exc_info) + lines += [f"UNEXPECTED EXCEPTION: {inner_excinfo.value!r}"] + lines += [ + x.strip("\n") for x in traceback.format_exception(*failure.exc_info) + ] + reprlocation_lines.append((reprlocation, lines)) + return ReprFailDoctest(reprlocation_lines) + + def reportinfo(self) -> tuple[os.PathLike[str] | str, int | None, str]: + return self.path, self.dtest.lineno, f"[doctest] {self.name}" + + +def _get_flag_lookup() -> dict[str, int]: + import doctest + + return dict( + DONT_ACCEPT_TRUE_FOR_1=doctest.DONT_ACCEPT_TRUE_FOR_1, + DONT_ACCEPT_BLANKLINE=doctest.DONT_ACCEPT_BLANKLINE, + NORMALIZE_WHITESPACE=doctest.NORMALIZE_WHITESPACE, + ELLIPSIS=doctest.ELLIPSIS, + IGNORE_EXCEPTION_DETAIL=doctest.IGNORE_EXCEPTION_DETAIL, + COMPARISON_FLAGS=doctest.COMPARISON_FLAGS, + ALLOW_UNICODE=_get_allow_unicode_flag(), + ALLOW_BYTES=_get_allow_bytes_flag(), + NUMBER=_get_number_flag(), + ) + + +def get_optionflags(config: Config) -> int: + optionflags_str = config.getini("doctest_optionflags") + flag_lookup_table = _get_flag_lookup() + flag_acc = 0 + for flag in optionflags_str: + flag_acc |= flag_lookup_table[flag] + return flag_acc + + +def _get_continue_on_failure(config: Config) -> bool: + continue_on_failure: bool = config.getvalue("doctest_continue_on_failure") + if continue_on_failure: + # We need to turn off this if we use pdb since we should stop at + # the first failure. + if config.getvalue("usepdb"): + continue_on_failure = False + return continue_on_failure + + +class DoctestTextfile(Module): + obj = None + + def collect(self) -> Iterable[DoctestItem]: + import doctest + + # Inspired by doctest.testfile; ideally we would use it directly, + # but it doesn't support passing a custom checker. + encoding = self.config.getini("doctest_encoding") + text = self.path.read_text(encoding) + filename = str(self.path) + name = self.path.name + globs = {"__name__": "__main__"} + + optionflags = get_optionflags(self.config) + + runner = _get_runner( + verbose=False, + optionflags=optionflags, + checker=_get_checker(), + continue_on_failure=_get_continue_on_failure(self.config), + ) + + parser = doctest.DocTestParser() + test = parser.get_doctest(text, globs, name, filename, 0) + if test.examples: + yield DoctestItem.from_parent( + self, name=test.name, runner=runner, dtest=test + ) + + +def _check_all_skipped(test: doctest.DocTest) -> None: + """Raise pytest.skip() if all examples in the given DocTest have the SKIP + option set.""" + import doctest + + all_skipped = all(x.options.get(doctest.SKIP, False) for x in test.examples) + if all_skipped: + skip("all tests skipped by +SKIP option") + + +def _is_mocked(obj: object) -> bool: + """Return if an object is possibly a mock object by checking the + existence of a highly improbable attribute.""" + return ( + safe_getattr(obj, "pytest_mock_example_attribute_that_shouldnt_exist", None) + is not None + ) + + +@contextmanager +def _patch_unwrap_mock_aware() -> Generator[None]: + """Context manager which replaces ``inspect.unwrap`` with a version + that's aware of mock objects and doesn't recurse into them.""" + real_unwrap = inspect.unwrap + + def _mock_aware_unwrap( + func: Callable[..., Any], *, stop: Callable[[Any], Any] | None = None + ) -> Any: + try: + if stop is None or stop is _is_mocked: + return real_unwrap(func, stop=_is_mocked) + _stop = stop + return real_unwrap(func, stop=lambda obj: _is_mocked(obj) or _stop(func)) + except Exception as e: + warnings.warn( + f"Got {e!r} when unwrapping {func!r}. This is usually caused " + "by a violation of Python's object protocol; see e.g. " + "https://github.com/pytest-dev/pytest/issues/5080", + PytestWarning, + ) + raise + + inspect.unwrap = _mock_aware_unwrap + try: + yield + finally: + inspect.unwrap = real_unwrap + + +class DoctestModule(Module): + def collect(self) -> Iterable[DoctestItem]: + import doctest + + class MockAwareDocTestFinder(doctest.DocTestFinder): + py_ver_info_minor = sys.version_info[:2] + is_find_lineno_broken = ( + py_ver_info_minor < (3, 11) + or (py_ver_info_minor == (3, 11) and sys.version_info.micro < 9) + or (py_ver_info_minor == (3, 12) and sys.version_info.micro < 3) + ) + if is_find_lineno_broken: + + def _find_lineno(self, obj, source_lines): + """On older Pythons, doctest code does not take into account + `@property`. https://github.com/python/cpython/issues/61648 + + Moreover, wrapped Doctests need to be unwrapped so the correct + line number is returned. #8796 + """ + if isinstance(obj, property): + obj = getattr(obj, "fget", obj) + + if hasattr(obj, "__wrapped__"): + # Get the main obj in case of it being wrapped + obj = inspect.unwrap(obj) + + # Type ignored because this is a private function. + return super()._find_lineno( # type:ignore[misc] + obj, + source_lines, + ) + + if sys.version_info < (3, 13): + + def _from_module(self, module, object): + """`cached_property` objects are never considered a part + of the 'current module'. As such they are skipped by doctest. + Here we override `_from_module` to check the underlying + function instead. https://github.com/python/cpython/issues/107995 + """ + if isinstance(object, functools.cached_property): + object = object.func + + # Type ignored because this is a private function. + return super()._from_module(module, object) # type: ignore[misc] + + try: + module = self.obj + except Collector.CollectError: + if self.config.getvalue("doctest_ignore_import_errors"): + skip(f"unable to import module {self.path!r}") + else: + raise + + # While doctests currently don't support fixtures directly, we still + # need to pick up autouse fixtures. + self.session._fixturemanager.parsefactories(self) + + # Uses internal doctest module parsing mechanism. + finder = MockAwareDocTestFinder() + optionflags = get_optionflags(self.config) + runner = _get_runner( + verbose=False, + optionflags=optionflags, + checker=_get_checker(), + continue_on_failure=_get_continue_on_failure(self.config), + ) + + for test in finder.find(module, module.__name__): + if test.examples: # skip empty doctests + yield DoctestItem.from_parent( + self, name=test.name, runner=runner, dtest=test + ) + + +def _init_checker_class() -> type[doctest.OutputChecker]: + import doctest + + class LiteralsOutputChecker(doctest.OutputChecker): + # Based on doctest_nose_plugin.py from the nltk project + # (https://github.com/nltk/nltk) and on the "numtest" doctest extension + # by Sebastien Boisgerault (https://github.com/boisgera/numtest). + + _unicode_literal_re = re.compile(r"(\W|^)[uU]([rR]?[\'\"])", re.UNICODE) + _bytes_literal_re = re.compile(r"(\W|^)[bB]([rR]?[\'\"])", re.UNICODE) + _number_re = re.compile( + r""" + (?P + (?P + (?P [+-]?\d*)\.(?P\d+) + | + (?P [+-]?\d+)\. + ) + (?: + [Ee] + (?P [+-]?\d+) + )? + | + (?P [+-]?\d+) + (?: + [Ee] + (?P [+-]?\d+) + ) + ) + """, + re.VERBOSE, + ) + + def check_output(self, want: str, got: str, optionflags: int) -> bool: + if super().check_output(want, got, optionflags): + return True + + allow_unicode = optionflags & _get_allow_unicode_flag() + allow_bytes = optionflags & _get_allow_bytes_flag() + allow_number = optionflags & _get_number_flag() + + if not allow_unicode and not allow_bytes and not allow_number: + return False + + def remove_prefixes(regex: re.Pattern[str], txt: str) -> str: + return re.sub(regex, r"\1\2", txt) + + if allow_unicode: + want = remove_prefixes(self._unicode_literal_re, want) + got = remove_prefixes(self._unicode_literal_re, got) + + if allow_bytes: + want = remove_prefixes(self._bytes_literal_re, want) + got = remove_prefixes(self._bytes_literal_re, got) + + if allow_number: + got = self._remove_unwanted_precision(want, got) + + return super().check_output(want, got, optionflags) + + def _remove_unwanted_precision(self, want: str, got: str) -> str: + wants = list(self._number_re.finditer(want)) + gots = list(self._number_re.finditer(got)) + if len(wants) != len(gots): + return got + offset = 0 + for w, g in zip(wants, gots, strict=True): + fraction: str | None = w.group("fraction") + exponent: str | None = w.group("exponent1") + if exponent is None: + exponent = w.group("exponent2") + precision = 0 if fraction is None else len(fraction) + if exponent is not None: + precision -= int(exponent) + if float(w.group()) == approx(float(g.group()), abs=10**-precision): + # They're close enough. Replace the text we actually + # got with the text we want, so that it will match when we + # check the string literally. + got = ( + got[: g.start() + offset] + w.group() + got[g.end() + offset :] + ) + offset += w.end() - w.start() - (g.end() - g.start()) + return got + + return LiteralsOutputChecker + + +def _get_checker() -> doctest.OutputChecker: + """Return a doctest.OutputChecker subclass that supports some + additional options: + + * ALLOW_UNICODE and ALLOW_BYTES options to ignore u'' and b'' + prefixes (respectively) in string literals. Useful when the same + doctest should run in Python 2 and Python 3. + + * NUMBER to ignore floating-point differences smaller than the + precision of the literal number in the doctest. + + An inner class is used to avoid importing "doctest" at the module + level. + """ + global CHECKER_CLASS + if CHECKER_CLASS is None: + CHECKER_CLASS = _init_checker_class() + return CHECKER_CLASS() + + +def _get_allow_unicode_flag() -> int: + """Register and return the ALLOW_UNICODE flag.""" + import doctest + + return doctest.register_optionflag("ALLOW_UNICODE") + + +def _get_allow_bytes_flag() -> int: + """Register and return the ALLOW_BYTES flag.""" + import doctest + + return doctest.register_optionflag("ALLOW_BYTES") + + +def _get_number_flag() -> int: + """Register and return the NUMBER flag.""" + import doctest + + return doctest.register_optionflag("NUMBER") + + +def _get_report_choice(key: str) -> int: + """Return the actual `doctest` module flag value. + + We want to do it as late as possible to avoid importing `doctest` and all + its dependencies when parsing options, as it adds overhead and breaks tests. + """ + import doctest + + return { + DOCTEST_REPORT_CHOICE_UDIFF: doctest.REPORT_UDIFF, + DOCTEST_REPORT_CHOICE_CDIFF: doctest.REPORT_CDIFF, + DOCTEST_REPORT_CHOICE_NDIFF: doctest.REPORT_NDIFF, + DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE: doctest.REPORT_ONLY_FIRST_FAILURE, + DOCTEST_REPORT_CHOICE_NONE: 0, + }[key] + + +@fixture(scope="session") +def doctest_namespace() -> dict[str, Any]: + """Fixture that returns a :py:class:`dict` that will be injected into the + namespace of doctests. + + Usually this fixture is used in conjunction with another ``autouse`` fixture: + + .. code-block:: python + + @pytest.fixture(autouse=True) + def add_np(doctest_namespace): + doctest_namespace["np"] = numpy + + For more details: :ref:`doctest_namespace`. + """ + return dict() diff --git a/.venv/lib/python3.12/site-packages/_pytest/faulthandler.py b/.venv/lib/python3.12/site-packages/_pytest/faulthandler.py new file mode 100644 index 0000000..080cf58 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/faulthandler.py @@ -0,0 +1,119 @@ +from __future__ import annotations + +from collections.abc import Generator +import os +import sys + +from _pytest.config import Config +from _pytest.config.argparsing import Parser +from _pytest.nodes import Item +from _pytest.stash import StashKey +import pytest + + +fault_handler_original_stderr_fd_key = StashKey[int]() +fault_handler_stderr_fd_key = StashKey[int]() + + +def pytest_addoption(parser: Parser) -> None: + help_timeout = ( + "Dump the traceback of all threads if a test takes " + "more than TIMEOUT seconds to finish" + ) + help_exit_on_timeout = ( + "Exit the test process if a test takes more than " + "faulthandler_timeout seconds to finish" + ) + parser.addini("faulthandler_timeout", help_timeout, default=0.0) + parser.addini( + "faulthandler_exit_on_timeout", help_exit_on_timeout, type="bool", default=False + ) + + +def pytest_configure(config: Config) -> None: + import faulthandler + + # at teardown we want to restore the original faulthandler fileno + # but faulthandler has no api to return the original fileno + # so here we stash the stderr fileno to be used at teardown + # sys.stderr and sys.__stderr__ may be closed or patched during the session + # so we can't rely on their values being good at that point (#11572). + stderr_fileno = get_stderr_fileno() + if faulthandler.is_enabled(): + config.stash[fault_handler_original_stderr_fd_key] = stderr_fileno + config.stash[fault_handler_stderr_fd_key] = os.dup(stderr_fileno) + faulthandler.enable(file=config.stash[fault_handler_stderr_fd_key]) + + +def pytest_unconfigure(config: Config) -> None: + import faulthandler + + faulthandler.disable() + # Close the dup file installed during pytest_configure. + if fault_handler_stderr_fd_key in config.stash: + os.close(config.stash[fault_handler_stderr_fd_key]) + del config.stash[fault_handler_stderr_fd_key] + # Re-enable the faulthandler if it was originally enabled. + if fault_handler_original_stderr_fd_key in config.stash: + faulthandler.enable(config.stash[fault_handler_original_stderr_fd_key]) + del config.stash[fault_handler_original_stderr_fd_key] + + +def get_stderr_fileno() -> int: + try: + fileno = sys.stderr.fileno() + # The Twisted Logger will return an invalid file descriptor since it is not backed + # by an FD. So, let's also forward this to the same code path as with pytest-xdist. + if fileno == -1: + raise AttributeError() + return fileno + except (AttributeError, ValueError): + # pytest-xdist monkeypatches sys.stderr with an object that is not an actual file. + # https://docs.python.org/3/library/faulthandler.html#issue-with-file-descriptors + # This is potentially dangerous, but the best we can do. + assert sys.__stderr__ is not None + return sys.__stderr__.fileno() + + +def get_timeout_config_value(config: Config) -> float: + return float(config.getini("faulthandler_timeout") or 0.0) + + +def get_exit_on_timeout_config_value(config: Config) -> bool: + exit_on_timeout = config.getini("faulthandler_exit_on_timeout") + assert isinstance(exit_on_timeout, bool) + return exit_on_timeout + + +@pytest.hookimpl(wrapper=True, trylast=True) +def pytest_runtest_protocol(item: Item) -> Generator[None, object, object]: + timeout = get_timeout_config_value(item.config) + exit_on_timeout = get_exit_on_timeout_config_value(item.config) + if timeout > 0: + import faulthandler + + stderr = item.config.stash[fault_handler_stderr_fd_key] + faulthandler.dump_traceback_later(timeout, file=stderr, exit=exit_on_timeout) + try: + return (yield) + finally: + faulthandler.cancel_dump_traceback_later() + else: + return (yield) + + +@pytest.hookimpl(tryfirst=True) +def pytest_enter_pdb() -> None: + """Cancel any traceback dumping due to timeout before entering pdb.""" + import faulthandler + + faulthandler.cancel_dump_traceback_later() + + +@pytest.hookimpl(tryfirst=True) +def pytest_exception_interact() -> None: + """Cancel any traceback dumping due to an interactive exception being + raised.""" + import faulthandler + + faulthandler.cancel_dump_traceback_later() diff --git a/.venv/lib/python3.12/site-packages/_pytest/fixtures.py b/.venv/lib/python3.12/site-packages/_pytest/fixtures.py new file mode 100644 index 0000000..27846db --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/fixtures.py @@ -0,0 +1,2047 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +import abc +from collections import defaultdict +from collections import deque +from collections import OrderedDict +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Mapping +from collections.abc import MutableMapping +from collections.abc import Sequence +from collections.abc import Set as AbstractSet +import dataclasses +import functools +import inspect +import os +from pathlib import Path +import sys +import types +from typing import Any +from typing import cast +from typing import Final +from typing import final +from typing import Generic +from typing import NoReturn +from typing import overload +from typing import TYPE_CHECKING +from typing import TypeVar +import warnings + +import _pytest +from _pytest import nodes +from _pytest._code import getfslineno +from _pytest._code import Source +from _pytest._code.code import FormattedExcinfo +from _pytest._code.code import TerminalRepr +from _pytest._io import TerminalWriter +from _pytest.compat import assert_never +from _pytest.compat import get_real_func +from _pytest.compat import getfuncargnames +from _pytest.compat import getimfunc +from _pytest.compat import getlocation +from _pytest.compat import NOTSET +from _pytest.compat import NotSetType +from _pytest.compat import safe_getattr +from _pytest.compat import safe_isclass +from _pytest.compat import signature +from _pytest.config import _PluggyPlugin +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.deprecated import MARKED_FIXTURE +from _pytest.deprecated import YIELD_FIXTURE +from _pytest.main import Session +from _pytest.mark import Mark +from _pytest.mark import ParameterSet +from _pytest.mark.structures import MarkDecorator +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.outcomes import TEST_OUTCOME +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath +from _pytest.scope import _ScopeName +from _pytest.scope import HIGH_SCOPES +from _pytest.scope import Scope +from _pytest.warning_types import PytestRemovedIn9Warning +from _pytest.warning_types import PytestWarning + + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + + +if TYPE_CHECKING: + from _pytest.python import CallSpec2 + from _pytest.python import Function + from _pytest.python import Metafunc + + +# The value of the fixture -- return/yield of the fixture function (type variable). +FixtureValue = TypeVar("FixtureValue", covariant=True) +# The type of the fixture function (type variable). +FixtureFunction = TypeVar("FixtureFunction", bound=Callable[..., object]) +# The type of a fixture function (type alias generic in fixture value). +_FixtureFunc = Callable[..., FixtureValue] | Callable[..., Generator[FixtureValue]] +# The type of FixtureDef.cached_result (type alias generic in fixture value). +_FixtureCachedResult = ( + tuple[ + # The result. + FixtureValue, + # Cache key. + object, + None, + ] + | tuple[ + None, + # Cache key. + object, + # The exception and the original traceback. + tuple[BaseException, types.TracebackType | None], + ] +) + + +def pytest_sessionstart(session: Session) -> None: + session._fixturemanager = FixtureManager(session) + + +def get_scope_package( + node: nodes.Item, + fixturedef: FixtureDef[object], +) -> nodes.Node | None: + from _pytest.python import Package + + for parent in node.iter_parents(): + if isinstance(parent, Package) and parent.nodeid == fixturedef.baseid: + return parent + return node.session + + +def get_scope_node(node: nodes.Node, scope: Scope) -> nodes.Node | None: + """Get the closest parent node (including self) which matches the given + scope. + + If there is no parent node for the scope (e.g. asking for class scope on a + Module, or on a Function when not defined in a class), returns None. + """ + import _pytest.python + + if scope is Scope.Function: + # Type ignored because this is actually safe, see: + # https://github.com/python/mypy/issues/4717 + return node.getparent(nodes.Item) # type: ignore[type-abstract] + elif scope is Scope.Class: + return node.getparent(_pytest.python.Class) + elif scope is Scope.Module: + return node.getparent(_pytest.python.Module) + elif scope is Scope.Package: + return node.getparent(_pytest.python.Package) + elif scope is Scope.Session: + return node.getparent(_pytest.main.Session) + else: + assert_never(scope) + + +# TODO: Try to use FixtureFunctionDefinition instead of the marker +def getfixturemarker(obj: object) -> FixtureFunctionMarker | None: + """Return fixturemarker or None if it doesn't exist""" + if isinstance(obj, FixtureFunctionDefinition): + return obj._fixture_function_marker + return None + + +# Algorithm for sorting on a per-parametrized resource setup basis. +# It is called for Session scope first and performs sorting +# down to the lower scopes such as to minimize number of "high scope" +# setups and teardowns. + + +@dataclasses.dataclass(frozen=True) +class ParamArgKey: + """A key for a high-scoped parameter used by an item. + + For use as a hashable key in `reorder_items`. The combination of fields + is meant to uniquely identify a particular "instance" of a param, + potentially shared by multiple items in a scope. + """ + + #: The param name. + argname: str + param_index: int + #: For scopes Package, Module, Class, the path to the file (directory in + #: Package's case) of the package/module/class where the item is defined. + scoped_item_path: Path | None + #: For Class scope, the class where the item is defined. + item_cls: type | None + + +_V = TypeVar("_V") +OrderedSet = dict[_V, None] + + +def get_param_argkeys(item: nodes.Item, scope: Scope) -> Iterator[ParamArgKey]: + """Return all ParamArgKeys for item matching the specified high scope.""" + assert scope is not Scope.Function + + try: + callspec: CallSpec2 = item.callspec # type: ignore[attr-defined] + except AttributeError: + return + + item_cls = None + if scope is Scope.Session: + scoped_item_path = None + elif scope is Scope.Package: + # Package key = module's directory. + scoped_item_path = item.path.parent + elif scope is Scope.Module: + scoped_item_path = item.path + elif scope is Scope.Class: + scoped_item_path = item.path + item_cls = item.cls # type: ignore[attr-defined] + else: + assert_never(scope) + + for argname in callspec.indices: + if callspec._arg2scope[argname] != scope: + continue + param_index = callspec.indices[argname] + yield ParamArgKey(argname, param_index, scoped_item_path, item_cls) + + +def reorder_items(items: Sequence[nodes.Item]) -> list[nodes.Item]: + argkeys_by_item: dict[Scope, dict[nodes.Item, OrderedSet[ParamArgKey]]] = {} + items_by_argkey: dict[Scope, dict[ParamArgKey, OrderedDict[nodes.Item, None]]] = {} + for scope in HIGH_SCOPES: + scoped_argkeys_by_item = argkeys_by_item[scope] = {} + scoped_items_by_argkey = items_by_argkey[scope] = defaultdict(OrderedDict) + for item in items: + argkeys = dict.fromkeys(get_param_argkeys(item, scope)) + if argkeys: + scoped_argkeys_by_item[item] = argkeys + for argkey in argkeys: + scoped_items_by_argkey[argkey][item] = None + + items_set = dict.fromkeys(items) + return list( + reorder_items_atscope( + items_set, argkeys_by_item, items_by_argkey, Scope.Session + ) + ) + + +def reorder_items_atscope( + items: OrderedSet[nodes.Item], + argkeys_by_item: Mapping[Scope, Mapping[nodes.Item, OrderedSet[ParamArgKey]]], + items_by_argkey: Mapping[ + Scope, Mapping[ParamArgKey, OrderedDict[nodes.Item, None]] + ], + scope: Scope, +) -> OrderedSet[nodes.Item]: + if scope is Scope.Function or len(items) < 3: + return items + + scoped_items_by_argkey = items_by_argkey[scope] + scoped_argkeys_by_item = argkeys_by_item[scope] + + ignore: set[ParamArgKey] = set() + items_deque = deque(items) + items_done: OrderedSet[nodes.Item] = {} + while items_deque: + no_argkey_items: OrderedSet[nodes.Item] = {} + slicing_argkey = None + while items_deque: + item = items_deque.popleft() + if item in items_done or item in no_argkey_items: + continue + argkeys = dict.fromkeys( + k for k in scoped_argkeys_by_item.get(item, ()) if k not in ignore + ) + if not argkeys: + no_argkey_items[item] = None + else: + slicing_argkey, _ = argkeys.popitem() + # We don't have to remove relevant items from later in the + # deque because they'll just be ignored. + matching_items = [ + i for i in scoped_items_by_argkey[slicing_argkey] if i in items + ] + for i in reversed(matching_items): + items_deque.appendleft(i) + # Fix items_by_argkey order. + for other_scope in HIGH_SCOPES: + other_scoped_items_by_argkey = items_by_argkey[other_scope] + for argkey in argkeys_by_item[other_scope].get(i, ()): + argkey_dict = other_scoped_items_by_argkey[argkey] + if not hasattr(sys, "pypy_version_info"): + argkey_dict[i] = None + argkey_dict.move_to_end(i, last=False) + else: + # Work around a bug in PyPy: + # https://github.com/pypy/pypy/issues/5257 + # https://github.com/pytest-dev/pytest/issues/13312 + bkp = argkey_dict.copy() + argkey_dict.clear() + argkey_dict[i] = None + argkey_dict.update(bkp) + break + if no_argkey_items: + reordered_no_argkey_items = reorder_items_atscope( + no_argkey_items, argkeys_by_item, items_by_argkey, scope.next_lower() + ) + items_done.update(reordered_no_argkey_items) + if slicing_argkey is not None: + ignore.add(slicing_argkey) + return items_done + + +@dataclasses.dataclass(frozen=True) +class FuncFixtureInfo: + """Fixture-related information for a fixture-requesting item (e.g. test + function). + + This is used to examine the fixtures which an item requests statically + (known during collection). This includes autouse fixtures, fixtures + requested by the `usefixtures` marker, fixtures requested in the function + parameters, and the transitive closure of these. + + An item may also request fixtures dynamically (using `request.getfixturevalue`); + these are not reflected here. + """ + + __slots__ = ("argnames", "initialnames", "name2fixturedefs", "names_closure") + + # Fixture names that the item requests directly by function parameters. + argnames: tuple[str, ...] + # Fixture names that the item immediately requires. These include + # argnames + fixture names specified via usefixtures and via autouse=True in + # fixture definitions. + initialnames: tuple[str, ...] + # The transitive closure of the fixture names that the item requires. + # Note: can't include dynamic dependencies (`request.getfixturevalue` calls). + names_closure: list[str] + # A map from a fixture name in the transitive closure to the FixtureDefs + # matching the name which are applicable to this function. + # There may be multiple overriding fixtures with the same name. The + # sequence is ordered from furthest to closes to the function. + name2fixturedefs: dict[str, Sequence[FixtureDef[Any]]] + + def prune_dependency_tree(self) -> None: + """Recompute names_closure from initialnames and name2fixturedefs. + + Can only reduce names_closure, which means that the new closure will + always be a subset of the old one. The order is preserved. + + This method is needed because direct parametrization may shadow some + of the fixtures that were included in the originally built dependency + tree. In this way the dependency tree can get pruned, and the closure + of argnames may get reduced. + """ + closure: set[str] = set() + working_set = set(self.initialnames) + while working_set: + argname = working_set.pop() + # Argname may be something not included in the original names_closure, + # in which case we ignore it. This currently happens with pseudo + # FixtureDefs which wrap 'get_direct_param_fixture_func(request)'. + # So they introduce the new dependency 'request' which might have + # been missing in the original tree (closure). + if argname not in closure and argname in self.names_closure: + closure.add(argname) + if argname in self.name2fixturedefs: + working_set.update(self.name2fixturedefs[argname][-1].argnames) + + self.names_closure[:] = sorted(closure, key=self.names_closure.index) + + +class FixtureRequest(abc.ABC): + """The type of the ``request`` fixture. + + A request object gives access to the requesting test context and has a + ``param`` attribute in case the fixture is parametrized. + """ + + def __init__( + self, + pyfuncitem: Function, + fixturename: str | None, + arg2fixturedefs: dict[str, Sequence[FixtureDef[Any]]], + fixture_defs: dict[str, FixtureDef[Any]], + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + #: Fixture for which this request is being performed. + self.fixturename: Final = fixturename + self._pyfuncitem: Final = pyfuncitem + # The FixtureDefs for each fixture name requested by this item. + # Starts from the statically-known fixturedefs resolved during + # collection. Dynamically requested fixtures (using + # `request.getfixturevalue("foo")`) are added dynamically. + self._arg2fixturedefs: Final = arg2fixturedefs + # The evaluated argnames so far, mapping to the FixtureDef they resolved + # to. + self._fixture_defs: Final = fixture_defs + # Notes on the type of `param`: + # -`request.param` is only defined in parametrized fixtures, and will raise + # AttributeError otherwise. Python typing has no notion of "undefined", so + # this cannot be reflected in the type. + # - Technically `param` is only (possibly) defined on SubRequest, not + # FixtureRequest, but the typing of that is still in flux so this cheats. + # - In the future we might consider using a generic for the param type, but + # for now just using Any. + self.param: Any + + @property + def _fixturemanager(self) -> FixtureManager: + return self._pyfuncitem.session._fixturemanager + + @property + @abc.abstractmethod + def _scope(self) -> Scope: + raise NotImplementedError() + + @property + def scope(self) -> _ScopeName: + """Scope string, one of "function", "class", "module", "package", "session".""" + return self._scope.value + + @abc.abstractmethod + def _check_scope( + self, + requested_fixturedef: FixtureDef[object], + requested_scope: Scope, + ) -> None: + raise NotImplementedError() + + @property + def fixturenames(self) -> list[str]: + """Names of all active fixtures in this request.""" + result = list(self._pyfuncitem.fixturenames) + result.extend(set(self._fixture_defs).difference(result)) + return result + + @property + @abc.abstractmethod + def node(self): + """Underlying collection node (depends on current request scope).""" + raise NotImplementedError() + + @property + def config(self) -> Config: + """The pytest config object associated with this request.""" + return self._pyfuncitem.config + + @property + def function(self): + """Test function object if the request has a per-function scope.""" + if self.scope != "function": + raise AttributeError( + f"function not available in {self.scope}-scoped context" + ) + return self._pyfuncitem.obj + + @property + def cls(self): + """Class (can be None) where the test function was collected.""" + if self.scope not in ("class", "function"): + raise AttributeError(f"cls not available in {self.scope}-scoped context") + clscol = self._pyfuncitem.getparent(_pytest.python.Class) + if clscol: + return clscol.obj + + @property + def instance(self): + """Instance (can be None) on which test function was collected.""" + if self.scope != "function": + return None + return getattr(self._pyfuncitem, "instance", None) + + @property + def module(self): + """Python module object where the test function was collected.""" + if self.scope not in ("function", "class", "module"): + raise AttributeError(f"module not available in {self.scope}-scoped context") + mod = self._pyfuncitem.getparent(_pytest.python.Module) + assert mod is not None + return mod.obj + + @property + def path(self) -> Path: + """Path where the test function was collected.""" + if self.scope not in ("function", "class", "module", "package"): + raise AttributeError(f"path not available in {self.scope}-scoped context") + return self._pyfuncitem.path + + @property + def keywords(self) -> MutableMapping[str, Any]: + """Keywords/markers dictionary for the underlying node.""" + node: nodes.Node = self.node + return node.keywords + + @property + def session(self) -> Session: + """Pytest session object.""" + return self._pyfuncitem.session + + @abc.abstractmethod + def addfinalizer(self, finalizer: Callable[[], object]) -> None: + """Add finalizer/teardown function to be called without arguments after + the last test within the requesting test context finished execution.""" + raise NotImplementedError() + + def applymarker(self, marker: str | MarkDecorator) -> None: + """Apply a marker to a single test function invocation. + + This method is useful if you don't want to have a keyword/marker + on all function invocations. + + :param marker: + An object created by a call to ``pytest.mark.NAME(...)``. + """ + self.node.add_marker(marker) + + def raiseerror(self, msg: str | None) -> NoReturn: + """Raise a FixtureLookupError exception. + + :param msg: + An optional custom error message. + """ + raise FixtureLookupError(None, self, msg) + + def getfixturevalue(self, argname: str) -> Any: + """Dynamically run a named fixture function. + + Declaring fixtures via function argument is recommended where possible. + But if you can only decide whether to use another fixture at test + setup time, you may use this function to retrieve it inside a fixture + or test function body. + + This method can be used during the test setup phase or the test run + phase, but during the test teardown phase a fixture's value may not + be available. + + :param argname: + The fixture name. + :raises pytest.FixtureLookupError: + If the given fixture could not be found. + """ + # Note that in addition to the use case described in the docstring, + # getfixturevalue() is also called by pytest itself during item and fixture + # setup to evaluate the fixtures that are requested statically + # (using function parameters, autouse, etc). + + fixturedef = self._get_active_fixturedef(argname) + assert fixturedef.cached_result is not None, ( + f'The fixture value for "{argname}" is not available. ' + "This can happen when the fixture has already been torn down." + ) + return fixturedef.cached_result[0] + + def _iter_chain(self) -> Iterator[SubRequest]: + """Yield all SubRequests in the chain, from self up. + + Note: does *not* yield the TopRequest. + """ + current = self + while isinstance(current, SubRequest): + yield current + current = current._parent_request + + def _get_active_fixturedef(self, argname: str) -> FixtureDef[object]: + if argname == "request": + return RequestFixtureDef(self) + + # If we already finished computing a fixture by this name in this item, + # return it. + fixturedef = self._fixture_defs.get(argname) + if fixturedef is not None: + self._check_scope(fixturedef, fixturedef._scope) + return fixturedef + + # Find the appropriate fixturedef. + fixturedefs = self._arg2fixturedefs.get(argname, None) + if fixturedefs is None: + # We arrive here because of a dynamic call to + # getfixturevalue(argname) which was naturally + # not known at parsing/collection time. + fixturedefs = self._fixturemanager.getfixturedefs(argname, self._pyfuncitem) + if fixturedefs is not None: + self._arg2fixturedefs[argname] = fixturedefs + # No fixtures defined with this name. + if fixturedefs is None: + raise FixtureLookupError(argname, self) + # The are no fixtures with this name applicable for the function. + if not fixturedefs: + raise FixtureLookupError(argname, self) + + # A fixture may override another fixture with the same name, e.g. a + # fixture in a module can override a fixture in a conftest, a fixture in + # a class can override a fixture in the module, and so on. + # An overriding fixture can request its own name (possibly indirectly); + # in this case it gets the value of the fixture it overrides, one level + # up. + # Check how many `argname`s deep we are, and take the next one. + # `fixturedefs` is sorted from furthest to closest, so use negative + # indexing to go in reverse. + index = -1 + for request in self._iter_chain(): + if request.fixturename == argname: + index -= 1 + # If already consumed all of the available levels, fail. + if -index > len(fixturedefs): + raise FixtureLookupError(argname, self) + fixturedef = fixturedefs[index] + + # Prepare a SubRequest object for calling the fixture. + try: + callspec = self._pyfuncitem.callspec + except AttributeError: + callspec = None + if callspec is not None and argname in callspec.params: + param = callspec.params[argname] + param_index = callspec.indices[argname] + # The parametrize invocation scope overrides the fixture's scope. + scope = callspec._arg2scope[argname] + else: + param = NOTSET + param_index = 0 + scope = fixturedef._scope + self._check_fixturedef_without_param(fixturedef) + # The parametrize invocation scope only controls caching behavior while + # allowing wider-scoped fixtures to keep depending on the parametrized + # fixture. Scope control is enforced for parametrized fixtures + # by recreating the whole fixture tree on parameter change. + # Hence `fixturedef._scope`, not `scope`. + self._check_scope(fixturedef, fixturedef._scope) + subrequest = SubRequest( + self, scope, param, param_index, fixturedef, _ispytest=True + ) + + # Make sure the fixture value is cached, running it if it isn't + fixturedef.execute(request=subrequest) + + self._fixture_defs[argname] = fixturedef + return fixturedef + + def _check_fixturedef_without_param(self, fixturedef: FixtureDef[object]) -> None: + """Check that this request is allowed to execute this fixturedef without + a param.""" + funcitem = self._pyfuncitem + has_params = fixturedef.params is not None + fixtures_not_supported = getattr(funcitem, "nofuncargs", False) + if has_params and fixtures_not_supported: + msg = ( + f"{funcitem.name} does not support fixtures, maybe unittest.TestCase subclass?\n" + f"Node id: {funcitem.nodeid}\n" + f"Function type: {type(funcitem).__name__}" + ) + fail(msg, pytrace=False) + if has_params: + frame = inspect.stack()[3] + frameinfo = inspect.getframeinfo(frame[0]) + source_path = absolutepath(frameinfo.filename) + source_lineno = frameinfo.lineno + try: + source_path_str = str(source_path.relative_to(funcitem.config.rootpath)) + except ValueError: + source_path_str = str(source_path) + location = getlocation(fixturedef.func, funcitem.config.rootpath) + msg = ( + "The requested fixture has no parameter defined for test:\n" + f" {funcitem.nodeid}\n\n" + f"Requested fixture '{fixturedef.argname}' defined in:\n" + f"{location}\n\n" + f"Requested here:\n" + f"{source_path_str}:{source_lineno}" + ) + fail(msg, pytrace=False) + + def _get_fixturestack(self) -> list[FixtureDef[Any]]: + values = [request._fixturedef for request in self._iter_chain()] + values.reverse() + return values + + +@final +class TopRequest(FixtureRequest): + """The type of the ``request`` fixture in a test function.""" + + def __init__(self, pyfuncitem: Function, *, _ispytest: bool = False) -> None: + super().__init__( + fixturename=None, + pyfuncitem=pyfuncitem, + arg2fixturedefs=pyfuncitem._fixtureinfo.name2fixturedefs.copy(), + fixture_defs={}, + _ispytest=_ispytest, + ) + + @property + def _scope(self) -> Scope: + return Scope.Function + + def _check_scope( + self, + requested_fixturedef: FixtureDef[object], + requested_scope: Scope, + ) -> None: + # TopRequest always has function scope so always valid. + pass + + @property + def node(self): + return self._pyfuncitem + + def __repr__(self) -> str: + return f"" + + def _fillfixtures(self) -> None: + item = self._pyfuncitem + for argname in item.fixturenames: + if argname not in item.funcargs: + item.funcargs[argname] = self.getfixturevalue(argname) + + def addfinalizer(self, finalizer: Callable[[], object]) -> None: + self.node.addfinalizer(finalizer) + + +@final +class SubRequest(FixtureRequest): + """The type of the ``request`` fixture in a fixture function requested + (transitively) by a test function.""" + + def __init__( + self, + request: FixtureRequest, + scope: Scope, + param: Any, + param_index: int, + fixturedef: FixtureDef[object], + *, + _ispytest: bool = False, + ) -> None: + super().__init__( + pyfuncitem=request._pyfuncitem, + fixturename=fixturedef.argname, + fixture_defs=request._fixture_defs, + arg2fixturedefs=request._arg2fixturedefs, + _ispytest=_ispytest, + ) + self._parent_request: Final[FixtureRequest] = request + self._scope_field: Final = scope + self._fixturedef: Final[FixtureDef[object]] = fixturedef + if param is not NOTSET: + self.param = param + self.param_index: Final = param_index + + def __repr__(self) -> str: + return f"" + + @property + def _scope(self) -> Scope: + return self._scope_field + + @property + def node(self): + scope = self._scope + if scope is Scope.Function: + # This might also be a non-function Item despite its attribute name. + node: nodes.Node | None = self._pyfuncitem + elif scope is Scope.Package: + node = get_scope_package(self._pyfuncitem, self._fixturedef) + else: + node = get_scope_node(self._pyfuncitem, scope) + if node is None and scope is Scope.Class: + # Fallback to function item itself. + node = self._pyfuncitem + assert node, ( + f'Could not obtain a node for scope "{scope}" for function {self._pyfuncitem!r}' + ) + return node + + def _check_scope( + self, + requested_fixturedef: FixtureDef[object], + requested_scope: Scope, + ) -> None: + if self._scope > requested_scope: + # Try to report something helpful. + argname = requested_fixturedef.argname + fixture_stack = "\n".join( + self._format_fixturedef_line(fixturedef) + for fixturedef in self._get_fixturestack() + ) + requested_fixture = self._format_fixturedef_line(requested_fixturedef) + fail( + f"ScopeMismatch: You tried to access the {requested_scope.value} scoped " + f"fixture {argname} with a {self._scope.value} scoped request object. " + f"Requesting fixture stack:\n{fixture_stack}\n" + f"Requested fixture:\n{requested_fixture}", + pytrace=False, + ) + + def _format_fixturedef_line(self, fixturedef: FixtureDef[object]) -> str: + factory = fixturedef.func + path, lineno = getfslineno(factory) + if isinstance(path, Path): + path = bestrelpath(self._pyfuncitem.session.path, path) + sig = signature(factory) + return f"{path}:{lineno + 1}: def {factory.__name__}{sig}" + + def addfinalizer(self, finalizer: Callable[[], object]) -> None: + self._fixturedef.addfinalizer(finalizer) + + +@final +class FixtureLookupError(LookupError): + """Could not return a requested fixture (missing or invalid).""" + + def __init__( + self, argname: str | None, request: FixtureRequest, msg: str | None = None + ) -> None: + self.argname = argname + self.request = request + self.fixturestack = request._get_fixturestack() + self.msg = msg + + def formatrepr(self) -> FixtureLookupErrorRepr: + tblines: list[str] = [] + addline = tblines.append + stack = [self.request._pyfuncitem.obj] + stack.extend(map(lambda x: x.func, self.fixturestack)) + msg = self.msg + # This function currently makes an assumption that a non-None msg means we + # have a non-empty `self.fixturestack`. This is currently true, but if + # somebody at some point want to extend the use of FixtureLookupError to + # new cases it might break. + # Add the assert to make it clearer to developer that this will fail, otherwise + # it crashes because `fspath` does not get set due to `stack` being empty. + assert self.msg is None or self.fixturestack, ( + "formatrepr assumptions broken, rewrite it to handle it" + ) + if msg is not None: + # The last fixture raise an error, let's present + # it at the requesting side. + stack = stack[:-1] + for function in stack: + fspath, lineno = getfslineno(function) + try: + lines, _ = inspect.getsourcelines(get_real_func(function)) + except (OSError, IndexError, TypeError): + error_msg = "file %s, line %s: source code not available" + addline(error_msg % (fspath, lineno + 1)) + else: + addline(f"file {fspath}, line {lineno + 1}") + for i, line in enumerate(lines): + line = line.rstrip() + addline(" " + line) + if line.lstrip().startswith("def"): + break + + if msg is None: + fm = self.request._fixturemanager + available = set() + parent = self.request._pyfuncitem.parent + assert parent is not None + for name, fixturedefs in fm._arg2fixturedefs.items(): + faclist = list(fm._matchfactories(fixturedefs, parent)) + if faclist: + available.add(name) + if self.argname in available: + msg = ( + f" recursive dependency involving fixture '{self.argname}' detected" + ) + else: + msg = f"fixture '{self.argname}' not found" + msg += "\n available fixtures: {}".format(", ".join(sorted(available))) + msg += "\n use 'pytest --fixtures [testpath]' for help on them." + + return FixtureLookupErrorRepr(fspath, lineno, tblines, msg, self.argname) + + +class FixtureLookupErrorRepr(TerminalRepr): + def __init__( + self, + filename: str | os.PathLike[str], + firstlineno: int, + tblines: Sequence[str], + errorstring: str, + argname: str | None, + ) -> None: + self.tblines = tblines + self.errorstring = errorstring + self.filename = filename + self.firstlineno = firstlineno + self.argname = argname + + def toterminal(self, tw: TerminalWriter) -> None: + # tw.line("FixtureLookupError: %s" %(self.argname), red=True) + for tbline in self.tblines: + tw.line(tbline.rstrip()) + lines = self.errorstring.split("\n") + if lines: + tw.line( + f"{FormattedExcinfo.fail_marker} {lines[0].strip()}", + red=True, + ) + for line in lines[1:]: + tw.line( + f"{FormattedExcinfo.flow_marker} {line.strip()}", + red=True, + ) + tw.line() + tw.line(f"{os.fspath(self.filename)}:{self.firstlineno + 1}") + + +def call_fixture_func( + fixturefunc: _FixtureFunc[FixtureValue], request: FixtureRequest, kwargs +) -> FixtureValue: + if inspect.isgeneratorfunction(fixturefunc): + fixturefunc = cast(Callable[..., Generator[FixtureValue]], fixturefunc) + generator = fixturefunc(**kwargs) + try: + fixture_result = next(generator) + except StopIteration: + raise ValueError(f"{request.fixturename} did not yield a value") from None + finalizer = functools.partial(_teardown_yield_fixture, fixturefunc, generator) + request.addfinalizer(finalizer) + else: + fixturefunc = cast(Callable[..., FixtureValue], fixturefunc) + fixture_result = fixturefunc(**kwargs) + return fixture_result + + +def _teardown_yield_fixture(fixturefunc, it) -> None: + """Execute the teardown of a fixture function by advancing the iterator + after the yield and ensure the iteration ends (if not it means there is + more than one yield in the function).""" + try: + next(it) + except StopIteration: + pass + else: + fs, lineno = getfslineno(fixturefunc) + fail( + f"fixture function has more than one 'yield':\n\n" + f"{Source(fixturefunc).indent()}\n" + f"{fs}:{lineno + 1}", + pytrace=False, + ) + + +def _eval_scope_callable( + scope_callable: Callable[[str, Config], _ScopeName], + fixture_name: str, + config: Config, +) -> _ScopeName: + try: + # Type ignored because there is no typing mechanism to specify + # keyword arguments, currently. + result = scope_callable(fixture_name=fixture_name, config=config) # type: ignore[call-arg] + except Exception as e: + raise TypeError( + f"Error evaluating {scope_callable} while defining fixture '{fixture_name}'.\n" + "Expected a function with the signature (*, fixture_name, config)" + ) from e + if not isinstance(result, str): + fail( + f"Expected {scope_callable} to return a 'str' while defining fixture '{fixture_name}', but it returned:\n" + f"{result!r}", + pytrace=False, + ) + return result + + +class FixtureDef(Generic[FixtureValue]): + """A container for a fixture definition. + + Note: At this time, only explicitly documented fields and methods are + considered public stable API. + """ + + def __init__( + self, + config: Config, + baseid: str | None, + argname: str, + func: _FixtureFunc[FixtureValue], + scope: Scope | _ScopeName | Callable[[str, Config], _ScopeName] | None, + params: Sequence[object] | None, + ids: tuple[object | None, ...] | Callable[[Any], object | None] | None = None, + *, + _ispytest: bool = False, + # only used in a deprecationwarning msg, can be removed in pytest9 + _autouse: bool = False, + ) -> None: + check_ispytest(_ispytest) + # The "base" node ID for the fixture. + # + # This is a node ID prefix. A fixture is only available to a node (e.g. + # a `Function` item) if the fixture's baseid is a nodeid of a parent of + # node. + # + # For a fixture found in a Collector's object (e.g. a `Module`s module, + # a `Class`'s class), the baseid is the Collector's nodeid. + # + # For a fixture found in a conftest plugin, the baseid is the conftest's + # directory path relative to the rootdir. + # + # For other plugins, the baseid is the empty string (always matches). + self.baseid: Final = baseid or "" + # Whether the fixture was found from a node or a conftest in the + # collection tree. Will be false for fixtures defined in non-conftest + # plugins. + self.has_location: Final = baseid is not None + # The fixture factory function. + self.func: Final = func + # The name by which the fixture may be requested. + self.argname: Final = argname + if scope is None: + scope = Scope.Function + elif callable(scope): + scope = _eval_scope_callable(scope, argname, config) + if isinstance(scope, str): + scope = Scope.from_user( + scope, descr=f"Fixture '{func.__name__}'", where=baseid + ) + self._scope: Final = scope + # If the fixture is directly parametrized, the parameter values. + self.params: Final = params + # If the fixture is directly parametrized, a tuple of explicit IDs to + # assign to the parameter values, or a callable to generate an ID given + # a parameter value. + self.ids: Final = ids + # The names requested by the fixtures. + self.argnames: Final = getfuncargnames(func, name=argname) + # If the fixture was executed, the current value of the fixture. + # Can change if the fixture is executed with different parameters. + self.cached_result: _FixtureCachedResult[FixtureValue] | None = None + self._finalizers: Final[list[Callable[[], object]]] = [] + + # only used to emit a deprecationwarning, can be removed in pytest9 + self._autouse = _autouse + + @property + def scope(self) -> _ScopeName: + """Scope string, one of "function", "class", "module", "package", "session".""" + return self._scope.value + + def addfinalizer(self, finalizer: Callable[[], object]) -> None: + self._finalizers.append(finalizer) + + def finish(self, request: SubRequest) -> None: + exceptions: list[BaseException] = [] + while self._finalizers: + fin = self._finalizers.pop() + try: + fin() + except BaseException as e: + exceptions.append(e) + node = request.node + node.ihook.pytest_fixture_post_finalizer(fixturedef=self, request=request) + # Even if finalization fails, we invalidate the cached fixture + # value and remove all finalizers because they may be bound methods + # which will keep instances alive. + self.cached_result = None + self._finalizers.clear() + if len(exceptions) == 1: + raise exceptions[0] + elif len(exceptions) > 1: + msg = f'errors while tearing down fixture "{self.argname}" of {node}' + raise BaseExceptionGroup(msg, exceptions[::-1]) + + def execute(self, request: SubRequest) -> FixtureValue: + """Return the value of this fixture, executing it if not cached.""" + # Ensure that the dependent fixtures requested by this fixture are loaded. + # This needs to be done before checking if we have a cached value, since + # if a dependent fixture has their cache invalidated, e.g. due to + # parametrization, they finalize themselves and fixtures depending on it + # (which will likely include this fixture) setting `self.cached_result = None`. + # See #4871 + requested_fixtures_that_should_finalize_us = [] + for argname in self.argnames: + fixturedef = request._get_active_fixturedef(argname) + # Saves requested fixtures in a list so we later can add our finalizer + # to them, ensuring that if a requested fixture gets torn down we get torn + # down first. This is generally handled by SetupState, but still currently + # needed when this fixture is not parametrized but depends on a parametrized + # fixture. + requested_fixtures_that_should_finalize_us.append(fixturedef) + + # Check for (and return) cached value/exception. + if self.cached_result is not None: + request_cache_key = self.cache_key(request) + cache_key = self.cached_result[1] + try: + # Attempt to make a normal == check: this might fail for objects + # which do not implement the standard comparison (like numpy arrays -- #6497). + cache_hit = bool(request_cache_key == cache_key) + except (ValueError, RuntimeError): + # If the comparison raises, use 'is' as fallback. + cache_hit = request_cache_key is cache_key + + if cache_hit: + if self.cached_result[2] is not None: + exc, exc_tb = self.cached_result[2] + raise exc.with_traceback(exc_tb) + else: + return self.cached_result[0] + # We have a previous but differently parametrized fixture instance + # so we need to tear it down before creating a new one. + self.finish(request) + assert self.cached_result is None + + # Add finalizer to requested fixtures we saved previously. + # We make sure to do this after checking for cached value to avoid + # adding our finalizer multiple times. (#12135) + finalizer = functools.partial(self.finish, request=request) + for parent_fixture in requested_fixtures_that_should_finalize_us: + parent_fixture.addfinalizer(finalizer) + + ihook = request.node.ihook + try: + # Setup the fixture, run the code in it, and cache the value + # in self.cached_result. + result: FixtureValue = ihook.pytest_fixture_setup( + fixturedef=self, request=request + ) + finally: + # Schedule our finalizer, even if the setup failed. + request.node.addfinalizer(finalizer) + + return result + + def cache_key(self, request: SubRequest) -> object: + return getattr(request, "param", None) + + def __repr__(self) -> str: + return f"" + + +class RequestFixtureDef(FixtureDef[FixtureRequest]): + """A custom FixtureDef for the special "request" fixture. + + A new one is generated on-demand whenever "request" is requested. + """ + + def __init__(self, request: FixtureRequest) -> None: + super().__init__( + config=request.config, + baseid=None, + argname="request", + func=lambda: request, + scope=Scope.Function, + params=None, + _ispytest=True, + ) + self.cached_result = (request, [0], None) + + def addfinalizer(self, finalizer: Callable[[], object]) -> None: + pass + + +def resolve_fixture_function( + fixturedef: FixtureDef[FixtureValue], request: FixtureRequest +) -> _FixtureFunc[FixtureValue]: + """Get the actual callable that can be called to obtain the fixture + value.""" + fixturefunc = fixturedef.func + # The fixture function needs to be bound to the actual + # request.instance so that code working with "fixturedef" behaves + # as expected. + instance = request.instance + if instance is not None: + # Handle the case where fixture is defined not in a test class, but some other class + # (for example a plugin class with a fixture), see #2270. + if hasattr(fixturefunc, "__self__") and not isinstance( + instance, + fixturefunc.__self__.__class__, + ): + return fixturefunc + fixturefunc = getimfunc(fixturedef.func) + if fixturefunc != fixturedef.func: + fixturefunc = fixturefunc.__get__(instance) + return fixturefunc + + +def pytest_fixture_setup( + fixturedef: FixtureDef[FixtureValue], request: SubRequest +) -> FixtureValue: + """Execution of fixture setup.""" + kwargs = {} + for argname in fixturedef.argnames: + kwargs[argname] = request.getfixturevalue(argname) + + fixturefunc = resolve_fixture_function(fixturedef, request) + my_cache_key = fixturedef.cache_key(request) + + if inspect.isasyncgenfunction(fixturefunc) or inspect.iscoroutinefunction( + fixturefunc + ): + auto_str = " with autouse=True" if fixturedef._autouse else "" + + warnings.warn( + PytestRemovedIn9Warning( + f"{request.node.name!r} requested an async fixture " + f"{request.fixturename!r}{auto_str}, with no plugin or hook that " + "handled it. This is usually an error, as pytest does not natively " + "support it. " + "This will turn into an error in pytest 9.\n" + "See: https://docs.pytest.org/en/stable/deprecations.html#sync-test-depending-on-async-fixture" + ), + # no stacklevel will point at users code, so we just point here + stacklevel=1, + ) + + try: + result = call_fixture_func(fixturefunc, request, kwargs) + except TEST_OUTCOME as e: + if isinstance(e, skip.Exception): + # The test requested a fixture which caused a skip. + # Don't show the fixture as the skip location, as then the user + # wouldn't know which test skipped. + e._use_item_location = True + fixturedef.cached_result = (None, my_cache_key, (e, e.__traceback__)) + raise + fixturedef.cached_result = (result, my_cache_key, None) + return result + + +@final +@dataclasses.dataclass(frozen=True) +class FixtureFunctionMarker: + scope: _ScopeName | Callable[[str, Config], _ScopeName] + params: tuple[object, ...] | None + autouse: bool = False + ids: tuple[object | None, ...] | Callable[[Any], object | None] | None = None + name: str | None = None + + _ispytest: dataclasses.InitVar[bool] = False + + def __post_init__(self, _ispytest: bool) -> None: + check_ispytest(_ispytest) + + def __call__(self, function: FixtureFunction) -> FixtureFunctionDefinition: + if inspect.isclass(function): + raise ValueError("class fixtures not supported (maybe in the future)") + + if isinstance(function, FixtureFunctionDefinition): + raise ValueError( + f"@pytest.fixture is being applied more than once to the same function {function.__name__!r}" + ) + + if hasattr(function, "pytestmark"): + warnings.warn(MARKED_FIXTURE, stacklevel=2) + + fixture_definition = FixtureFunctionDefinition( + function=function, fixture_function_marker=self, _ispytest=True + ) + + name = self.name or function.__name__ + if name == "request": + location = getlocation(function) + fail( + f"'request' is a reserved word for fixtures, use another name:\n {location}", + pytrace=False, + ) + + return fixture_definition + + +# TODO: paramspec/return type annotation tracking and storing +class FixtureFunctionDefinition: + def __init__( + self, + *, + function: Callable[..., Any], + fixture_function_marker: FixtureFunctionMarker, + instance: object | None = None, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self.name = fixture_function_marker.name or function.__name__ + # In order to show the function that this fixture contains in messages. + # Set the __name__ to be same as the function __name__ or the given fixture name. + self.__name__ = self.name + self._fixture_function_marker = fixture_function_marker + if instance is not None: + self._fixture_function = cast( + Callable[..., Any], function.__get__(instance) + ) + else: + self._fixture_function = function + functools.update_wrapper(self, function) + + def __repr__(self) -> str: + return f"" + + def __get__(self, instance, owner=None): + """Behave like a method if the function it was applied to was a method.""" + return FixtureFunctionDefinition( + function=self._fixture_function, + fixture_function_marker=self._fixture_function_marker, + instance=instance, + _ispytest=True, + ) + + def __call__(self, *args: Any, **kwds: Any) -> Any: + message = ( + f'Fixture "{self.name}" called directly. Fixtures are not meant to be called directly,\n' + "but are created automatically when test functions request them as parameters.\n" + "See https://docs.pytest.org/en/stable/explanation/fixtures.html for more information about fixtures, and\n" + "https://docs.pytest.org/en/stable/deprecations.html#calling-fixtures-directly" + ) + fail(message, pytrace=False) + + def _get_wrapped_function(self) -> Callable[..., Any]: + return self._fixture_function + + +@overload +def fixture( + fixture_function: Callable[..., object], + *, + scope: _ScopeName | Callable[[str, Config], _ScopeName] = ..., + params: Iterable[object] | None = ..., + autouse: bool = ..., + ids: Sequence[object | None] | Callable[[Any], object | None] | None = ..., + name: str | None = ..., +) -> FixtureFunctionDefinition: ... + + +@overload +def fixture( + fixture_function: None = ..., + *, + scope: _ScopeName | Callable[[str, Config], _ScopeName] = ..., + params: Iterable[object] | None = ..., + autouse: bool = ..., + ids: Sequence[object | None] | Callable[[Any], object | None] | None = ..., + name: str | None = None, +) -> FixtureFunctionMarker: ... + + +def fixture( + fixture_function: FixtureFunction | None = None, + *, + scope: _ScopeName | Callable[[str, Config], _ScopeName] = "function", + params: Iterable[object] | None = None, + autouse: bool = False, + ids: Sequence[object | None] | Callable[[Any], object | None] | None = None, + name: str | None = None, +) -> FixtureFunctionMarker | FixtureFunctionDefinition: + """Decorator to mark a fixture factory function. + + This decorator can be used, with or without parameters, to define a + fixture function. + + The name of the fixture function can later be referenced to cause its + invocation ahead of running tests: test modules or classes can use the + ``pytest.mark.usefixtures(fixturename)`` marker. + + Test functions can directly use fixture names as input arguments in which + case the fixture instance returned from the fixture function will be + injected. + + Fixtures can provide their values to test functions using ``return`` or + ``yield`` statements. When using ``yield`` the code block after the + ``yield`` statement is executed as teardown code regardless of the test + outcome, and must yield exactly once. + + :param scope: + The scope for which this fixture is shared; one of ``"function"`` + (default), ``"class"``, ``"module"``, ``"package"`` or ``"session"``. + + This parameter may also be a callable which receives ``(fixture_name, config)`` + as parameters, and must return a ``str`` with one of the values mentioned above. + + See :ref:`dynamic scope` in the docs for more information. + + :param params: + An optional list of parameters which will cause multiple invocations + of the fixture function and all of the tests using it. The current + parameter is available in ``request.param``. + + :param autouse: + If True, the fixture func is activated for all tests that can see it. + If False (the default), an explicit reference is needed to activate + the fixture. + + :param ids: + Sequence of ids each corresponding to the params so that they are + part of the test id. If no ids are provided they will be generated + automatically from the params. + + :param name: + The name of the fixture. This defaults to the name of the decorated + function. If a fixture is used in the same module in which it is + defined, the function name of the fixture will be shadowed by the + function arg that requests the fixture; one way to resolve this is to + name the decorated function ``fixture_`` and then use + ``@pytest.fixture(name='')``. + """ + fixture_marker = FixtureFunctionMarker( + scope=scope, + params=tuple(params) if params is not None else None, + autouse=autouse, + ids=None if ids is None else ids if callable(ids) else tuple(ids), + name=name, + _ispytest=True, + ) + + # Direct decoration. + if fixture_function: + return fixture_marker(fixture_function) + + return fixture_marker + + +def yield_fixture( + fixture_function=None, + *args, + scope="function", + params=None, + autouse=False, + ids=None, + name=None, +): + """(Return a) decorator to mark a yield-fixture factory function. + + .. deprecated:: 3.0 + Use :py:func:`pytest.fixture` directly instead. + """ + warnings.warn(YIELD_FIXTURE, stacklevel=2) + return fixture( + fixture_function, + *args, + scope=scope, + params=params, + autouse=autouse, + ids=ids, + name=name, + ) + + +@fixture(scope="session") +def pytestconfig(request: FixtureRequest) -> Config: + """Session-scoped fixture that returns the session's :class:`pytest.Config` + object. + + Example:: + + def test_foo(pytestconfig): + if pytestconfig.get_verbosity() > 0: + ... + + """ + return request.config + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "usefixtures", + type="args", + default=[], + help="List of default fixtures to be used with this project", + ) + group = parser.getgroup("general") + group.addoption( + "--fixtures", + "--funcargs", + action="store_true", + dest="showfixtures", + default=False, + help="Show available fixtures, sorted by plugin appearance " + "(fixtures with leading '_' are only shown with '-v')", + ) + group.addoption( + "--fixtures-per-test", + action="store_true", + dest="show_fixtures_per_test", + default=False, + help="Show fixtures per test", + ) + + +def pytest_cmdline_main(config: Config) -> int | ExitCode | None: + if config.option.showfixtures: + showfixtures(config) + return 0 + if config.option.show_fixtures_per_test: + show_fixtures_per_test(config) + return 0 + return None + + +def _get_direct_parametrize_args(node: nodes.Node) -> set[str]: + """Return all direct parametrization arguments of a node, so we don't + mistake them for fixtures. + + Check https://github.com/pytest-dev/pytest/issues/5036. + + These things are done later as well when dealing with parametrization + so this could be improved. + """ + parametrize_argnames: set[str] = set() + for marker in node.iter_markers(name="parametrize"): + if not marker.kwargs.get("indirect", False): + p_argnames, _ = ParameterSet._parse_parametrize_args( + *marker.args, **marker.kwargs + ) + parametrize_argnames.update(p_argnames) + return parametrize_argnames + + +def deduplicate_names(*seqs: Iterable[str]) -> tuple[str, ...]: + """De-duplicate the sequence of names while keeping the original order.""" + # Ideally we would use a set, but it does not preserve insertion order. + return tuple(dict.fromkeys(name for seq in seqs for name in seq)) + + +class FixtureManager: + """pytest fixture definitions and information is stored and managed + from this class. + + During collection fm.parsefactories() is called multiple times to parse + fixture function definitions into FixtureDef objects and internal + data structures. + + During collection of test functions, metafunc-mechanics instantiate + a FuncFixtureInfo object which is cached per node/func-name. + This FuncFixtureInfo object is later retrieved by Function nodes + which themselves offer a fixturenames attribute. + + The FuncFixtureInfo object holds information about fixtures and FixtureDefs + relevant for a particular function. An initial list of fixtures is + assembled like this: + + - config-defined usefixtures + - autouse-marked fixtures along the collection chain up from the function + - usefixtures markers at module/class/function level + - test function funcargs + + Subsequently the funcfixtureinfo.fixturenames attribute is computed + as the closure of the fixtures needed to setup the initial fixtures, + i.e. fixtures needed by fixture functions themselves are appended + to the fixturenames list. + + Upon the test-setup phases all fixturenames are instantiated, retrieved + by a lookup of their FuncFixtureInfo. + """ + + def __init__(self, session: Session) -> None: + self.session = session + self.config: Config = session.config + # Maps a fixture name (argname) to all of the FixtureDefs in the test + # suite/plugins defined with this name. Populated by parsefactories(). + # TODO: The order of the FixtureDefs list of each arg is significant, + # explain. + self._arg2fixturedefs: Final[dict[str, list[FixtureDef[Any]]]] = {} + self._holderobjseen: Final[set[object]] = set() + # A mapping from a nodeid to a list of autouse fixtures it defines. + self._nodeid_autousenames: Final[dict[str, list[str]]] = { + "": self.config.getini("usefixtures"), + } + session.config.pluginmanager.register(self, "funcmanage") + + def getfixtureinfo( + self, + node: nodes.Item, + func: Callable[..., object] | None, + cls: type | None, + ) -> FuncFixtureInfo: + """Calculate the :class:`FuncFixtureInfo` for an item. + + If ``func`` is None, or if the item sets an attribute + ``nofuncargs = True``, then ``func`` is not examined at all. + + :param node: + The item requesting the fixtures. + :param func: + The item's function. + :param cls: + If the function is a method, the method's class. + """ + if func is not None and not getattr(node, "nofuncargs", False): + argnames = getfuncargnames(func, name=node.name, cls=cls) + else: + argnames = () + usefixturesnames = self._getusefixturesnames(node) + autousenames = self._getautousenames(node) + initialnames = deduplicate_names(autousenames, usefixturesnames, argnames) + + direct_parametrize_args = _get_direct_parametrize_args(node) + + names_closure, arg2fixturedefs = self.getfixtureclosure( + parentnode=node, + initialnames=initialnames, + ignore_args=direct_parametrize_args, + ) + + return FuncFixtureInfo(argnames, initialnames, names_closure, arg2fixturedefs) + + def pytest_plugin_registered(self, plugin: _PluggyPlugin, plugin_name: str) -> None: + # Fixtures defined in conftest plugins are only visible to within the + # conftest's directory. This is unlike fixtures in non-conftest plugins + # which have global visibility. So for conftests, construct the base + # nodeid from the plugin name (which is the conftest path). + if plugin_name and plugin_name.endswith("conftest.py"): + # Note: we explicitly do *not* use `plugin.__file__` here -- The + # difference is that plugin_name has the correct capitalization on + # case-insensitive systems (Windows) and other normalization issues + # (issue #11816). + conftestpath = absolutepath(plugin_name) + try: + nodeid = str(conftestpath.parent.relative_to(self.config.rootpath)) + except ValueError: + nodeid = "" + if nodeid == ".": + nodeid = "" + if os.sep != nodes.SEP: + nodeid = nodeid.replace(os.sep, nodes.SEP) + else: + nodeid = None + + self.parsefactories(plugin, nodeid) + + def _getautousenames(self, node: nodes.Node) -> Iterator[str]: + """Return the names of autouse fixtures applicable to node.""" + for parentnode in node.listchain(): + basenames = self._nodeid_autousenames.get(parentnode.nodeid) + if basenames: + yield from basenames + + def _getusefixturesnames(self, node: nodes.Item) -> Iterator[str]: + """Return the names of usefixtures fixtures applicable to node.""" + for marker_node, mark in node.iter_markers_with_node(name="usefixtures"): + if not mark.args: + marker_node.warn( + PytestWarning( + f"usefixtures() in {node.nodeid} without arguments has no effect" + ) + ) + yield from mark.args + + def getfixtureclosure( + self, + parentnode: nodes.Node, + initialnames: tuple[str, ...], + ignore_args: AbstractSet[str], + ) -> tuple[list[str], dict[str, Sequence[FixtureDef[Any]]]]: + # Collect the closure of all fixtures, starting with the given + # fixturenames as the initial set. As we have to visit all + # factory definitions anyway, we also return an arg2fixturedefs + # mapping so that the caller can reuse it and does not have + # to re-discover fixturedefs again for each fixturename + # (discovering matching fixtures for a given name/node is expensive). + + fixturenames_closure = list(initialnames) + + arg2fixturedefs: dict[str, Sequence[FixtureDef[Any]]] = {} + + # Track the index for each fixture name in the simulated stack. + # Needed for handling override chains correctly, similar to _get_active_fixturedef. + # Using negative indices: -1 is the most specific (last), -2 is second to last, etc. + current_indices: dict[str, int] = {} + + def process_argname(argname: str) -> None: + # Optimization: already processed this argname. + if current_indices.get(argname) == -1: + return + + if argname not in fixturenames_closure: + fixturenames_closure.append(argname) + + if argname in ignore_args: + return + + fixturedefs = arg2fixturedefs.get(argname) + if not fixturedefs: + fixturedefs = self.getfixturedefs(argname, parentnode) + if not fixturedefs: + # Fixture not defined or not visible (will error during runtest). + return + arg2fixturedefs[argname] = fixturedefs + + index = current_indices.get(argname, -1) + if -index > len(fixturedefs): + # Exhausted the override chain (will error during runtest). + return + fixturedef = fixturedefs[index] + + current_indices[argname] = index - 1 + for dep in fixturedef.argnames: + process_argname(dep) + current_indices[argname] = index + + for name in initialnames: + process_argname(name) + + def sort_by_scope(arg_name: str) -> Scope: + try: + fixturedefs = arg2fixturedefs[arg_name] + except KeyError: + return Scope.Function + else: + return fixturedefs[-1]._scope + + fixturenames_closure.sort(key=sort_by_scope, reverse=True) + return fixturenames_closure, arg2fixturedefs + + def pytest_generate_tests(self, metafunc: Metafunc) -> None: + """Generate new tests based on parametrized fixtures used by the given metafunc""" + + def get_parametrize_mark_argnames(mark: Mark) -> Sequence[str]: + args, _ = ParameterSet._parse_parametrize_args(*mark.args, **mark.kwargs) + return args + + for argname in metafunc.fixturenames: + # Get the FixtureDefs for the argname. + fixture_defs = metafunc._arg2fixturedefs.get(argname) + if not fixture_defs: + # Will raise FixtureLookupError at setup time if not parametrized somewhere + # else (e.g @pytest.mark.parametrize) + continue + + # If the test itself parametrizes using this argname, give it + # precedence. + if any( + argname in get_parametrize_mark_argnames(mark) + for mark in metafunc.definition.iter_markers("parametrize") + ): + continue + + # In the common case we only look at the fixture def with the + # closest scope (last in the list). But if the fixture overrides + # another fixture, while requesting the super fixture, keep going + # in case the super fixture is parametrized (#1953). + for fixturedef in reversed(fixture_defs): + # Fixture is parametrized, apply it and stop. + if fixturedef.params is not None: + metafunc.parametrize( + argname, + fixturedef.params, + indirect=True, + scope=fixturedef.scope, + ids=fixturedef.ids, + ) + break + + # Not requesting the overridden super fixture, stop. + if argname not in fixturedef.argnames: + break + + # Try next super fixture, if any. + + def pytest_collection_modifyitems(self, items: list[nodes.Item]) -> None: + # Separate parametrized setups. + items[:] = reorder_items(items) + + def _register_fixture( + self, + *, + name: str, + func: _FixtureFunc[object], + nodeid: str | None, + scope: Scope | _ScopeName | Callable[[str, Config], _ScopeName] = "function", + params: Sequence[object] | None = None, + ids: tuple[object | None, ...] | Callable[[Any], object | None] | None = None, + autouse: bool = False, + ) -> None: + """Register a fixture + + :param name: + The fixture's name. + :param func: + The fixture's implementation function. + :param nodeid: + The visibility of the fixture. The fixture will be available to the + node with this nodeid and its children in the collection tree. + None means that the fixture is visible to the entire collection tree, + e.g. a fixture defined for general use in a plugin. + :param scope: + The fixture's scope. + :param params: + The fixture's parametrization params. + :param ids: + The fixture's IDs. + :param autouse: + Whether this is an autouse fixture. + """ + fixture_def = FixtureDef( + config=self.config, + baseid=nodeid, + argname=name, + func=func, + scope=scope, + params=params, + ids=ids, + _ispytest=True, + _autouse=autouse, + ) + + faclist = self._arg2fixturedefs.setdefault(name, []) + if fixture_def.has_location: + faclist.append(fixture_def) + else: + # fixturedefs with no location are at the front + # so this inserts the current fixturedef after the + # existing fixturedefs from external plugins but + # before the fixturedefs provided in conftests. + i = len([f for f in faclist if not f.has_location]) + faclist.insert(i, fixture_def) + if autouse: + self._nodeid_autousenames.setdefault(nodeid or "", []).append(name) + + @overload + def parsefactories( + self, + node_or_obj: nodes.Node, + ) -> None: + raise NotImplementedError() + + @overload + def parsefactories( + self, + node_or_obj: object, + nodeid: str | None, + ) -> None: + raise NotImplementedError() + + def parsefactories( + self, + node_or_obj: nodes.Node | object, + nodeid: str | NotSetType | None = NOTSET, + ) -> None: + """Collect fixtures from a collection node or object. + + Found fixtures are parsed into `FixtureDef`s and saved. + + If `node_or_object` is a collection node (with an underlying Python + object), the node's object is traversed and the node's nodeid is used to + determine the fixtures' visibility. `nodeid` must not be specified in + this case. + + If `node_or_object` is an object (e.g. a plugin), the object is + traversed and the given `nodeid` is used to determine the fixtures' + visibility. `nodeid` must be specified in this case; None and "" mean + total visibility. + """ + if nodeid is not NOTSET: + holderobj = node_or_obj + else: + assert isinstance(node_or_obj, nodes.Node) + holderobj = cast(object, node_or_obj.obj) # type: ignore[attr-defined] + assert isinstance(node_or_obj.nodeid, str) + nodeid = node_or_obj.nodeid + if holderobj in self._holderobjseen: + return + + # Avoid accessing `@property` (and other descriptors) when iterating fixtures. + if not safe_isclass(holderobj) and not isinstance(holderobj, types.ModuleType): + holderobj_tp: object = type(holderobj) + else: + holderobj_tp = holderobj + + self._holderobjseen.add(holderobj) + for name in dir(holderobj): + # The attribute can be an arbitrary descriptor, so the attribute + # access below can raise. safe_getattr() ignores such exceptions. + obj_ub = safe_getattr(holderobj_tp, name, None) + if type(obj_ub) is FixtureFunctionDefinition: + marker = obj_ub._fixture_function_marker + if marker.name: + fixture_name = marker.name + else: + fixture_name = name + + # OK we know it is a fixture -- now safe to look up on the _instance_. + try: + obj = getattr(holderobj, name) + # if the fixture is named in the decorator we cannot find it in the module + except AttributeError: + obj = obj_ub + + func = obj._get_wrapped_function() + + self._register_fixture( + name=fixture_name, + nodeid=nodeid, + func=func, + scope=marker.scope, + params=marker.params, + ids=marker.ids, + autouse=marker.autouse, + ) + + def getfixturedefs( + self, argname: str, node: nodes.Node + ) -> Sequence[FixtureDef[Any]] | None: + """Get FixtureDefs for a fixture name which are applicable + to a given node. + + Returns None if there are no fixtures at all defined with the given + name. (This is different from the case in which there are fixtures + with the given name, but none applicable to the node. In this case, + an empty result is returned). + + :param argname: Name of the fixture to search for. + :param node: The requesting Node. + """ + try: + fixturedefs = self._arg2fixturedefs[argname] + except KeyError: + return None + return tuple(self._matchfactories(fixturedefs, node)) + + def _matchfactories( + self, fixturedefs: Iterable[FixtureDef[Any]], node: nodes.Node + ) -> Iterator[FixtureDef[Any]]: + parentnodeids = {n.nodeid for n in node.iter_parents()} + for fixturedef in fixturedefs: + if fixturedef.baseid in parentnodeids: + yield fixturedef + + +def show_fixtures_per_test(config: Config) -> int | ExitCode: + from _pytest.main import wrap_session + + return wrap_session(config, _show_fixtures_per_test) + + +_PYTEST_DIR = Path(_pytest.__file__).parent + + +def _pretty_fixture_path(invocation_dir: Path, func) -> str: + loc = Path(getlocation(func, invocation_dir)) + prefix = Path("...", "_pytest") + try: + return str(prefix / loc.relative_to(_PYTEST_DIR)) + except ValueError: + return bestrelpath(invocation_dir, loc) + + +def _show_fixtures_per_test(config: Config, session: Session) -> None: + import _pytest.config + + session.perform_collect() + invocation_dir = config.invocation_params.dir + tw = _pytest.config.create_terminal_writer(config) + verbose = config.get_verbosity() + + def get_best_relpath(func) -> str: + loc = getlocation(func, invocation_dir) + return bestrelpath(invocation_dir, Path(loc)) + + def write_fixture(fixture_def: FixtureDef[object]) -> None: + argname = fixture_def.argname + if verbose <= 0 and argname.startswith("_"): + return + prettypath = _pretty_fixture_path(invocation_dir, fixture_def.func) + tw.write(f"{argname}", green=True) + tw.write(f" -- {prettypath}", yellow=True) + tw.write("\n") + fixture_doc = inspect.getdoc(fixture_def.func) + if fixture_doc: + write_docstring( + tw, + fixture_doc.split("\n\n", maxsplit=1)[0] + if verbose <= 0 + else fixture_doc, + ) + else: + tw.line(" no docstring available", red=True) + + def write_item(item: nodes.Item) -> None: + # Not all items have _fixtureinfo attribute. + info: FuncFixtureInfo | None = getattr(item, "_fixtureinfo", None) + if info is None or not info.name2fixturedefs: + # This test item does not use any fixtures. + return + tw.line() + tw.sep("-", f"fixtures used by {item.name}") + # TODO: Fix this type ignore. + tw.sep("-", f"({get_best_relpath(item.function)})") # type: ignore[attr-defined] + # dict key not used in loop but needed for sorting. + for _, fixturedefs in sorted(info.name2fixturedefs.items()): + assert fixturedefs is not None + if not fixturedefs: + continue + # Last item is expected to be the one used by the test item. + write_fixture(fixturedefs[-1]) + + for session_item in session.items: + write_item(session_item) + + +def showfixtures(config: Config) -> int | ExitCode: + from _pytest.main import wrap_session + + return wrap_session(config, _showfixtures_main) + + +def _showfixtures_main(config: Config, session: Session) -> None: + import _pytest.config + + session.perform_collect() + invocation_dir = config.invocation_params.dir + tw = _pytest.config.create_terminal_writer(config) + verbose = config.get_verbosity() + + fm = session._fixturemanager + + available = [] + seen: set[tuple[str, str]] = set() + + for argname, fixturedefs in fm._arg2fixturedefs.items(): + assert fixturedefs is not None + if not fixturedefs: + continue + for fixturedef in fixturedefs: + loc = getlocation(fixturedef.func, invocation_dir) + if (fixturedef.argname, loc) in seen: + continue + seen.add((fixturedef.argname, loc)) + available.append( + ( + len(fixturedef.baseid), + fixturedef.func.__module__, + _pretty_fixture_path(invocation_dir, fixturedef.func), + fixturedef.argname, + fixturedef, + ) + ) + + available.sort() + currentmodule = None + for baseid, module, prettypath, argname, fixturedef in available: + if currentmodule != module: + if not module.startswith("_pytest."): + tw.line() + tw.sep("-", f"fixtures defined from {module}") + currentmodule = module + if verbose <= 0 and argname.startswith("_"): + continue + tw.write(f"{argname}", green=True) + if fixturedef.scope != "function": + tw.write(f" [{fixturedef.scope} scope]", cyan=True) + tw.write(f" -- {prettypath}", yellow=True) + tw.write("\n") + doc = inspect.getdoc(fixturedef.func) + if doc: + write_docstring( + tw, doc.split("\n\n", maxsplit=1)[0] if verbose <= 0 else doc + ) + else: + tw.line(" no docstring available", red=True) + tw.line() + + +def write_docstring(tw: TerminalWriter, doc: str, indent: str = " ") -> None: + for line in doc.split("\n"): + tw.line(indent + line) diff --git a/.venv/lib/python3.12/site-packages/_pytest/freeze_support.py b/.venv/lib/python3.12/site-packages/_pytest/freeze_support.py new file mode 100644 index 0000000..959ff07 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/freeze_support.py @@ -0,0 +1,45 @@ +"""Provides a function to report all internal modules for using freezing +tools.""" + +from __future__ import annotations + +from collections.abc import Iterator +import types + + +def freeze_includes() -> list[str]: + """Return a list of module names used by pytest that should be + included by cx_freeze.""" + import _pytest + + result = list(_iter_all_modules(_pytest)) + return result + + +def _iter_all_modules( + package: str | types.ModuleType, + prefix: str = "", +) -> Iterator[str]: + """Iterate over the names of all modules that can be found in the given + package, recursively. + + >>> import _pytest + >>> list(_iter_all_modules(_pytest)) + ['_pytest._argcomplete', '_pytest._code.code', ...] + """ + import os + import pkgutil + + if isinstance(package, str): + path = package + else: + # Type ignored because typeshed doesn't define ModuleType.__path__ + # (only defined on packages). + package_path = package.__path__ + path, prefix = package_path[0], package.__name__ + "." + for _, name, is_package in pkgutil.iter_modules([path]): + if is_package: + for m in _iter_all_modules(os.path.join(path, name), prefix=name + "."): + yield prefix + m + else: + yield prefix + name diff --git a/.venv/lib/python3.12/site-packages/_pytest/helpconfig.py b/.venv/lib/python3.12/site-packages/_pytest/helpconfig.py new file mode 100644 index 0000000..6a22c9f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/helpconfig.py @@ -0,0 +1,293 @@ +# mypy: allow-untyped-defs +"""Version info, help messages, tracing configuration.""" + +from __future__ import annotations + +import argparse +from collections.abc import Generator +from collections.abc import Sequence +import os +import sys +from typing import Any + +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import PrintHelp +from _pytest.config.argparsing import Parser +from _pytest.terminal import TerminalReporter +import pytest + + +class HelpAction(argparse.Action): + """An argparse Action that will raise a PrintHelp exception in order to skip + the rest of the argument parsing when --help is passed. + + This prevents argparse from raising UsageError when `--help` is used along + with missing required arguments when any are defined, for example by + ``pytest_addoption``. This is similar to the way that the builtin argparse + --help option is implemented by raising SystemExit. + + To opt in to this behavior, the parse caller must set + `namespace._raise_print_help = True`. Otherwise it just sets the option. + """ + + def __init__( + self, option_strings: Sequence[str], dest: str, *, help: str | None = None + ) -> None: + super().__init__( + option_strings=option_strings, + dest=dest, + nargs=0, + const=True, + default=False, + help=help, + ) + + def __call__( + self, + parser: argparse.ArgumentParser, + namespace: argparse.Namespace, + values: str | Sequence[Any] | None, + option_string: str | None = None, + ) -> None: + setattr(namespace, self.dest, self.const) + + if getattr(namespace, "_raise_print_help", False): + raise PrintHelp + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("debugconfig") + group.addoption( + "--version", + "-V", + action="count", + default=0, + dest="version", + help="Display pytest version and information about plugins. " + "When given twice, also display information about plugins.", + ) + group._addoption( # private to use reserved lower-case short option + "-h", + "--help", + action=HelpAction, + dest="help", + help="Show help message and configuration info", + ) + group._addoption( # private to use reserved lower-case short option + "-p", + action="append", + dest="plugins", + default=[], + metavar="name", + help="Early-load given plugin module name or entry point (multi-allowed). " + "To avoid loading of plugins, use the `no:` prefix, e.g. " + "`no:doctest`. See also --disable-plugin-autoload.", + ) + group.addoption( + "--disable-plugin-autoload", + action="store_true", + default=False, + help="Disable plugin auto-loading through entry point packaging metadata. " + "Only plugins explicitly specified in -p or env var PYTEST_PLUGINS will be loaded.", + ) + group.addoption( + "--traceconfig", + "--trace-config", + action="store_true", + default=False, + help="Trace considerations of conftest.py files", + ) + group.addoption( + "--debug", + action="store", + nargs="?", + const="pytestdebug.log", + dest="debug", + metavar="DEBUG_FILE_NAME", + help="Store internal tracing debug information in this log file. " + "This file is opened with 'w' and truncated as a result, care advised. " + "Default: pytestdebug.log.", + ) + group._addoption( # private to use reserved lower-case short option + "-o", + "--override-ini", + dest="override_ini", + action="append", + help='Override configuration option with "option=value" style, ' + "e.g. `-o strict_xfail=True -o cache_dir=cache`.", + ) + + +@pytest.hookimpl(wrapper=True) +def pytest_cmdline_parse() -> Generator[None, Config, Config]: + config = yield + + if config.option.debug: + # --debug | --debug was provided. + path = config.option.debug + debugfile = open(path, "w", encoding="utf-8") + debugfile.write( + "versions pytest-{}, " + "python-{}\ninvocation_dir={}\ncwd={}\nargs={}\n\n".format( + pytest.__version__, + ".".join(map(str, sys.version_info)), + config.invocation_params.dir, + os.getcwd(), + config.invocation_params.args, + ) + ) + config.trace.root.setwriter(debugfile.write) + undo_tracing = config.pluginmanager.enable_tracing() + sys.stderr.write(f"writing pytest debug information to {path}\n") + + def unset_tracing() -> None: + debugfile.close() + sys.stderr.write(f"wrote pytest debug information to {debugfile.name}\n") + config.trace.root.setwriter(None) + undo_tracing() + + config.add_cleanup(unset_tracing) + + return config + + +def show_version_verbose(config: Config) -> None: + """Show verbose pytest version installation, including plugins.""" + sys.stdout.write( + f"This is pytest version {pytest.__version__}, imported from {pytest.__file__}\n" + ) + plugininfo = getpluginversioninfo(config) + if plugininfo: + for line in plugininfo: + sys.stdout.write(line + "\n") + + +def pytest_cmdline_main(config: Config) -> int | ExitCode | None: + # Note: a single `--version` argument is handled directly by `Config.main()` to avoid starting up the entire + # pytest infrastructure just to display the version (#13574). + if config.option.version > 1: + show_version_verbose(config) + return ExitCode.OK + elif config.option.help: + config._do_configure() + showhelp(config) + config._ensure_unconfigure() + return ExitCode.OK + return None + + +def showhelp(config: Config) -> None: + import textwrap + + reporter: TerminalReporter | None = config.pluginmanager.get_plugin( + "terminalreporter" + ) + assert reporter is not None + tw = reporter._tw + tw.write(config._parser.optparser.format_help()) + tw.line() + tw.line( + "[pytest] configuration options in the first " + "pytest.toml|pytest.ini|tox.ini|setup.cfg|pyproject.toml file found:" + ) + tw.line() + + columns = tw.fullwidth # costly call + indent_len = 24 # based on argparse's max_help_position=24 + indent = " " * indent_len + for name in config._parser._inidict: + help, type, _default = config._parser._inidict[name] + if help is None: + raise TypeError(f"help argument cannot be None for {name}") + spec = f"{name} ({type}):" + tw.write(f" {spec}") + spec_len = len(spec) + if spec_len > (indent_len - 3): + # Display help starting at a new line. + tw.line() + helplines = textwrap.wrap( + help, + columns, + initial_indent=indent, + subsequent_indent=indent, + break_on_hyphens=False, + ) + + for line in helplines: + tw.line(line) + else: + # Display help starting after the spec, following lines indented. + tw.write(" " * (indent_len - spec_len - 2)) + wrapped = textwrap.wrap(help, columns - indent_len, break_on_hyphens=False) + + if wrapped: + tw.line(wrapped[0]) + for line in wrapped[1:]: + tw.line(indent + line) + + tw.line() + tw.line("Environment variables:") + vars = [ + ( + "CI", + "When set to a non-empty value, pytest knows it is running in a " + "CI process and does not truncate summary info", + ), + ("BUILD_NUMBER", "Equivalent to CI"), + ("PYTEST_ADDOPTS", "Extra command line options"), + ("PYTEST_PLUGINS", "Comma-separated plugins to load during startup"), + ("PYTEST_DISABLE_PLUGIN_AUTOLOAD", "Set to disable plugin auto-loading"), + ("PYTEST_DEBUG", "Set to enable debug tracing of pytest's internals"), + ("PYTEST_DEBUG_TEMPROOT", "Override the system temporary directory"), + ("PYTEST_THEME", "The Pygments style to use for code output"), + ("PYTEST_THEME_MODE", "Set the PYTEST_THEME to be either 'dark' or 'light'"), + ] + for name, help in vars: + tw.line(f" {name:<24} {help}") + tw.line() + tw.line() + + tw.line("to see available markers type: pytest --markers") + tw.line("to see available fixtures type: pytest --fixtures") + tw.line( + "(shown according to specified file_or_dir or current dir " + "if not specified; fixtures with leading '_' are only shown " + "with the '-v' option" + ) + + for warningreport in reporter.stats.get("warnings", []): + tw.line("warning : " + warningreport.message, red=True) + + +def getpluginversioninfo(config: Config) -> list[str]: + lines = [] + plugininfo = config.pluginmanager.list_plugin_distinfo() + if plugininfo: + lines.append("registered third-party plugins:") + for plugin, dist in plugininfo: + loc = getattr(plugin, "__file__", repr(plugin)) + content = f"{dist.project_name}-{dist.version} at {loc}" + lines.append(" " + content) + return lines + + +def pytest_report_header(config: Config) -> list[str]: + lines = [] + if config.option.debug or config.option.traceconfig: + lines.append(f"using: pytest-{pytest.__version__}") + + verinfo = getpluginversioninfo(config) + if verinfo: + lines.extend(verinfo) + + if config.option.traceconfig: + lines.append("active plugins:") + items = config.pluginmanager.list_name_plugin() + for name, plugin in items: + if hasattr(plugin, "__file__"): + r = plugin.__file__ + else: + r = repr(plugin) + lines.append(f" {name:<20}: {r}") + return lines diff --git a/.venv/lib/python3.12/site-packages/_pytest/hookspec.py b/.venv/lib/python3.12/site-packages/_pytest/hookspec.py new file mode 100644 index 0000000..c5bcc36 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/hookspec.py @@ -0,0 +1,1342 @@ +# mypy: allow-untyped-defs +# ruff: noqa: T100 +"""Hook specifications for pytest plugins which are invoked by pytest itself +and by builtin plugins.""" + +from __future__ import annotations + +from collections.abc import Mapping +from collections.abc import Sequence +from pathlib import Path +from typing import Any +from typing import TYPE_CHECKING + +from pluggy import HookspecMarker + +from .deprecated import HOOK_LEGACY_PATH_ARG + + +if TYPE_CHECKING: + import pdb + from typing import Literal + import warnings + + from _pytest._code.code import ExceptionInfo + from _pytest._code.code import ExceptionRepr + from _pytest.compat import LEGACY_PATH + from _pytest.config import _PluggyPlugin + from _pytest.config import Config + from _pytest.config import ExitCode + from _pytest.config import PytestPluginManager + from _pytest.config.argparsing import Parser + from _pytest.fixtures import FixtureDef + from _pytest.fixtures import SubRequest + from _pytest.main import Session + from _pytest.nodes import Collector + from _pytest.nodes import Item + from _pytest.outcomes import Exit + from _pytest.python import Class + from _pytest.python import Function + from _pytest.python import Metafunc + from _pytest.python import Module + from _pytest.reports import CollectReport + from _pytest.reports import TestReport + from _pytest.runner import CallInfo + from _pytest.terminal import TerminalReporter + from _pytest.terminal import TestShortLogReport + + +hookspec = HookspecMarker("pytest") + +# ------------------------------------------------------------------------- +# Initialization hooks called for every plugin +# ------------------------------------------------------------------------- + + +@hookspec(historic=True) +def pytest_addhooks(pluginmanager: PytestPluginManager) -> None: + """Called at plugin registration time to allow adding new hooks via a call to + :func:`pluginmanager.add_hookspecs(module_or_class, prefix) `. + + :param pluginmanager: The pytest plugin manager. + + .. note:: + This hook is incompatible with hook wrappers. + + Use in conftest plugins + ======================= + + If a conftest plugin implements this hook, it will be called immediately + when the conftest is registered. + """ + + +@hookspec(historic=True) +def pytest_plugin_registered( + plugin: _PluggyPlugin, + plugin_name: str, + manager: PytestPluginManager, +) -> None: + """A new pytest plugin got registered. + + :param plugin: The plugin module or instance. + :param plugin_name: The name by which the plugin is registered. + :param manager: The pytest plugin manager. + + .. note:: + This hook is incompatible with hook wrappers. + + Use in conftest plugins + ======================= + + If a conftest plugin implements this hook, it will be called immediately + when the conftest is registered, once for each plugin registered thus far + (including itself!), and for all plugins thereafter when they are + registered. + """ + + +@hookspec(historic=True) +def pytest_addoption(parser: Parser, pluginmanager: PytestPluginManager) -> None: + """Register argparse-style options and config-style config values, + called once at the beginning of a test run. + + :param parser: + To add command line options, call + :py:func:`parser.addoption(...) `. + To add config-file values call :py:func:`parser.addini(...) + `. + + :param pluginmanager: + The pytest plugin manager, which can be used to install :py:func:`~pytest.hookspec`'s + or :py:func:`~pytest.hookimpl`'s and allow one plugin to call another plugin's hooks + to change how command line options are added. + + Options can later be accessed through the + :py:class:`config ` object, respectively: + + - :py:func:`config.getoption(name) ` to + retrieve the value of a command line option. + + - :py:func:`config.getini(name) ` to retrieve + a value read from a configuration file. + + The config object is passed around on many internal objects via the ``.config`` + attribute or can be retrieved as the ``pytestconfig`` fixture. + + .. note:: + This hook is incompatible with hook wrappers. + + Use in conftest plugins + ======================= + + If a conftest plugin implements this hook, it will be called immediately + when the conftest is registered. + + This hook is only called for :ref:`initial conftests `. + """ + + +@hookspec(historic=True) +def pytest_configure(config: Config) -> None: + """Allow plugins and conftest files to perform initial configuration. + + .. note:: + This hook is incompatible with hook wrappers. + + :param config: The pytest config object. + + Use in conftest plugins + ======================= + + This hook is called for every :ref:`initial conftest ` file + after command line options have been parsed. After that, the hook is called + for other conftest files as they are registered. + """ + + +# ------------------------------------------------------------------------- +# Bootstrapping hooks called for plugins registered early enough: +# internal and 3rd party plugins. +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_cmdline_parse( + pluginmanager: PytestPluginManager, args: list[str] +) -> Config | None: + """Return an initialized :class:`~pytest.Config`, parsing the specified args. + + Stops at first non-None result, see :ref:`firstresult`. + + .. note:: + This hook is only called for plugin classes passed to the + ``plugins`` arg when using `pytest.main`_ to perform an in-process + test run. + + :param pluginmanager: The pytest plugin manager. + :param args: List of arguments passed on the command line. + :returns: A pytest config object. + + Use in conftest plugins + ======================= + + This hook is not called for conftest files. + """ + + +def pytest_load_initial_conftests( + early_config: Config, parser: Parser, args: list[str] +) -> None: + """Called to implement the loading of :ref:`initial conftest files + ` ahead of command line option parsing. + + :param early_config: The pytest config object. + :param args: Arguments passed on the command line. + :param parser: To add command line options. + + Use in conftest plugins + ======================= + + This hook is not called for conftest files. + """ + + +@hookspec(firstresult=True) +def pytest_cmdline_main(config: Config) -> ExitCode | int | None: + """Called for performing the main command line action. + + The default implementation will invoke the configure hooks and + :hook:`pytest_runtestloop`. + + Stops at first non-None result, see :ref:`firstresult`. + + :param config: The pytest config object. + :returns: The exit code. + + Use in conftest plugins + ======================= + + This hook is only called for :ref:`initial conftests `. + """ + + +# ------------------------------------------------------------------------- +# collection hooks +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_collection(session: Session) -> object | None: + """Perform the collection phase for the given session. + + Stops at first non-None result, see :ref:`firstresult`. + The return value is not used, but only stops further processing. + + The default collection phase is this (see individual hooks for full details): + + 1. Starting from ``session`` as the initial collector: + + 1. ``pytest_collectstart(collector)`` + 2. ``report = pytest_make_collect_report(collector)`` + 3. ``pytest_exception_interact(collector, call, report)`` if an interactive exception occurred + 4. For each collected node: + + 1. If an item, ``pytest_itemcollected(item)`` + 2. If a collector, recurse into it. + + 5. ``pytest_collectreport(report)`` + + 2. ``pytest_collection_modifyitems(session, config, items)`` + + 1. ``pytest_deselected(items)`` for any deselected items (may be called multiple times) + + 3. ``pytest_collection_finish(session)`` + 4. Set ``session.items`` to the list of collected items + 5. Set ``session.testscollected`` to the number of collected items + + You can implement this hook to only perform some action before collection, + for example the terminal plugin uses it to start displaying the collection + counter (and returns `None`). + + :param session: The pytest session object. + + Use in conftest plugins + ======================= + + This hook is only called for :ref:`initial conftests `. + """ + + +def pytest_collection_modifyitems( + session: Session, config: Config, items: list[Item] +) -> None: + """Called after collection has been performed. May filter or re-order + the items in-place. + + When items are deselected (filtered out from ``items``), + the hook :hook:`pytest_deselected` must be called explicitly + with the deselected items to properly notify other plugins, + e.g. with ``config.hook.pytest_deselected(items=deselected_items)``. + + :param session: The pytest session object. + :param config: The pytest config object. + :param items: List of item objects. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +def pytest_collection_finish(session: Session) -> None: + """Called after collection has been performed and modified. + + :param session: The pytest session object. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +@hookspec( + firstresult=True, + warn_on_impl_args={ + "path": HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg="path", pathlib_path_arg="collection_path" + ), + }, +) +def pytest_ignore_collect( + collection_path: Path, path: LEGACY_PATH, config: Config +) -> bool | None: + """Return ``True`` to ignore this path for collection. + + Return ``None`` to let other plugins ignore the path for collection. + + Returning ``False`` will forcefully *not* ignore this path for collection, + without giving a chance for other plugins to ignore this path. + + This hook is consulted for all files and directories prior to calling + more specific hooks. + + Stops at first non-None result, see :ref:`firstresult`. + + :param collection_path: The path to analyze. + :type collection_path: pathlib.Path + :param path: The path to analyze (deprecated). + :param config: The pytest config object. + + .. versionchanged:: 7.0.0 + The ``collection_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``path`` parameter. The ``path`` parameter + has been deprecated. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collection path, only + conftest files in parent directories of the collection path are consulted + (if the path is a directory, its own conftest file is *not* consulted - a + directory cannot ignore itself!). + """ + + +@hookspec(firstresult=True) +def pytest_collect_directory(path: Path, parent: Collector) -> Collector | None: + """Create a :class:`~pytest.Collector` for the given directory, or None if + not relevant. + + .. versionadded:: 8.0 + + For best results, the returned collector should be a subclass of + :class:`~pytest.Directory`, but this is not required. + + The new node needs to have the specified ``parent`` as a parent. + + Stops at first non-None result, see :ref:`firstresult`. + + :param path: The path to analyze. + :type path: pathlib.Path + + See :ref:`custom directory collectors` for a simple example of use of this + hook. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collection path, only + conftest files in parent directories of the collection path are consulted + (if the path is a directory, its own conftest file is *not* consulted - a + directory cannot collect itself!). + """ + + +@hookspec( + warn_on_impl_args={ + "path": HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg="path", pathlib_path_arg="file_path" + ), + }, +) +def pytest_collect_file( + file_path: Path, path: LEGACY_PATH, parent: Collector +) -> Collector | None: + """Create a :class:`~pytest.Collector` for the given path, or None if not relevant. + + For best results, the returned collector should be a subclass of + :class:`~pytest.File`, but this is not required. + + The new node needs to have the specified ``parent`` as a parent. + + :param file_path: The path to analyze. + :type file_path: pathlib.Path + :param path: The path to collect (deprecated). + + .. versionchanged:: 7.0.0 + The ``file_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``path`` parameter. The ``path`` parameter + has been deprecated. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given file path, only + conftest files in parent directories of the file path are consulted. + """ + + +# logging hooks for collection + + +def pytest_collectstart(collector: Collector) -> None: + """Collector starts collecting. + + :param collector: + The collector. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collector, only + conftest files in the collector's directory and its parent directories are + consulted. + """ + + +def pytest_itemcollected(item: Item) -> None: + """We just collected a test item. + + :param item: + The item. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_collectreport(report: CollectReport) -> None: + """Collector finished collecting. + + :param report: + The collect report. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collector, only + conftest files in the collector's directory and its parent directories are + consulted. + """ + + +def pytest_deselected(items: Sequence[Item]) -> None: + """Called for deselected test items, e.g. by keyword. + + Note that this hook has two integration aspects for plugins: + + - it can be *implemented* to be notified of deselected items + - it must be *called* from :hook:`pytest_collection_modifyitems` + implementations when items are deselected (to properly notify other plugins). + + May be called multiple times. + + :param items: + The items. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +@hookspec(firstresult=True) +def pytest_make_collect_report(collector: Collector) -> CollectReport | None: + """Perform :func:`collector.collect() ` and return + a :class:`~pytest.CollectReport`. + + Stops at first non-None result, see :ref:`firstresult`. + + :param collector: + The collector. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collector, only + conftest files in the collector's directory and its parent directories are + consulted. + """ + + +# ------------------------------------------------------------------------- +# Python test function related hooks +# ------------------------------------------------------------------------- + + +@hookspec( + firstresult=True, + warn_on_impl_args={ + "path": HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg="path", pathlib_path_arg="module_path" + ), + }, +) +def pytest_pycollect_makemodule( + module_path: Path, path: LEGACY_PATH, parent +) -> Module | None: + """Return a :class:`pytest.Module` collector or None for the given path. + + This hook will be called for each matching test module path. + The :hook:`pytest_collect_file` hook needs to be used if you want to + create test modules for files that do not match as a test module. + + Stops at first non-None result, see :ref:`firstresult`. + + :param module_path: The path of the module to collect. + :type module_path: pathlib.Path + :param path: The path of the module to collect (deprecated). + + .. versionchanged:: 7.0.0 + The ``module_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``path`` parameter. + + The ``path`` parameter has been deprecated in favor of ``fspath``. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given parent collector, + only conftest files in the collector's directory and its parent directories + are consulted. + """ + + +@hookspec(firstresult=True) +def pytest_pycollect_makeitem( + collector: Module | Class, name: str, obj: object +) -> None | Item | Collector | list[Item | Collector]: + """Return a custom item/collector for a Python object in a module, or None. + + Stops at first non-None result, see :ref:`firstresult`. + + :param collector: + The module/class collector. + :param name: + The name of the object in the module/class. + :param obj: + The object. + :returns: + The created items/collectors. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collector, only + conftest files in the collector's directory and its parent directories + are consulted. + """ + + +@hookspec(firstresult=True) +def pytest_pyfunc_call(pyfuncitem: Function) -> object | None: + """Call underlying test function. + + Stops at first non-None result, see :ref:`firstresult`. + + :param pyfuncitem: + The function item. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only + conftest files in the item's directory and its parent directories + are consulted. + """ + + +def pytest_generate_tests(metafunc: Metafunc) -> None: + """Generate (multiple) parametrized calls to a test function. + + :param metafunc: + The :class:`~pytest.Metafunc` helper for the test function. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given function definition, + only conftest files in the functions's directory and its parent directories + are consulted. + """ + + +@hookspec(firstresult=True) +def pytest_make_parametrize_id(config: Config, val: object, argname: str) -> str | None: + """Return a user-friendly string representation of the given ``val`` + that will be used by @pytest.mark.parametrize calls, or None if the hook + doesn't know about ``val``. + + The parameter name is available as ``argname``, if required. + + Stops at first non-None result, see :ref:`firstresult`. + + :param config: The pytest config object. + :param val: The parametrized value. + :param argname: The automatic parameter name produced by pytest. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +# ------------------------------------------------------------------------- +# runtest related hooks +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_runtestloop(session: Session) -> object | None: + """Perform the main runtest loop (after collection finished). + + The default hook implementation performs the runtest protocol for all items + collected in the session (``session.items``), unless the collection failed + or the ``collectonly`` pytest option is set. + + If at any point :py:func:`pytest.exit` is called, the loop is + terminated immediately. + + If at any point ``session.shouldfail`` or ``session.shouldstop`` are set, the + loop is terminated after the runtest protocol for the current item is finished. + + :param session: The pytest session object. + + Stops at first non-None result, see :ref:`firstresult`. + The return value is not used, but only stops further processing. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +@hookspec(firstresult=True) +def pytest_runtest_protocol(item: Item, nextitem: Item | None) -> object | None: + """Perform the runtest protocol for a single test item. + + The default runtest protocol is this (see individual hooks for full details): + + - ``pytest_runtest_logstart(nodeid, location)`` + + - Setup phase: + - ``call = pytest_runtest_setup(item)`` (wrapped in ``CallInfo(when="setup")``) + - ``report = pytest_runtest_makereport(item, call)`` + - ``pytest_runtest_logreport(report)`` + - ``pytest_exception_interact(call, report)`` if an interactive exception occurred + + - Call phase, if the setup passed and the ``setuponly`` pytest option is not set: + - ``call = pytest_runtest_call(item)`` (wrapped in ``CallInfo(when="call")``) + - ``report = pytest_runtest_makereport(item, call)`` + - ``pytest_runtest_logreport(report)`` + - ``pytest_exception_interact(call, report)`` if an interactive exception occurred + + - Teardown phase: + - ``call = pytest_runtest_teardown(item, nextitem)`` (wrapped in ``CallInfo(when="teardown")``) + - ``report = pytest_runtest_makereport(item, call)`` + - ``pytest_runtest_logreport(report)`` + - ``pytest_exception_interact(call, report)`` if an interactive exception occurred + + - ``pytest_runtest_logfinish(nodeid, location)`` + + :param item: Test item for which the runtest protocol is performed. + :param nextitem: The scheduled-to-be-next test item (or None if this is the end my friend). + + Stops at first non-None result, see :ref:`firstresult`. + The return value is not used, but only stops further processing. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +def pytest_runtest_logstart(nodeid: str, location: tuple[str, int | None, str]) -> None: + """Called at the start of running the runtest protocol for a single item. + + See :hook:`pytest_runtest_protocol` for a description of the runtest protocol. + + :param nodeid: Full node ID of the item. + :param location: A tuple of ``(filename, lineno, testname)`` + where ``filename`` is a file path relative to ``config.rootpath`` + and ``lineno`` is 0-based. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_runtest_logfinish( + nodeid: str, location: tuple[str, int | None, str] +) -> None: + """Called at the end of running the runtest protocol for a single item. + + See :hook:`pytest_runtest_protocol` for a description of the runtest protocol. + + :param nodeid: Full node ID of the item. + :param location: A tuple of ``(filename, lineno, testname)`` + where ``filename`` is a file path relative to ``config.rootpath`` + and ``lineno`` is 0-based. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_runtest_setup(item: Item) -> None: + """Called to perform the setup phase for a test item. + + The default implementation runs ``setup()`` on ``item`` and all of its + parents (which haven't been setup yet). This includes obtaining the + values of fixtures required by the item (which haven't been obtained + yet). + + :param item: + The item. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_runtest_call(item: Item) -> None: + """Called to run the test for test item (the call phase). + + The default implementation calls ``item.runtest()``. + + :param item: + The item. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_runtest_teardown(item: Item, nextitem: Item | None) -> None: + """Called to perform the teardown phase for a test item. + + The default implementation runs the finalizers and calls ``teardown()`` + on ``item`` and all of its parents (which need to be torn down). This + includes running the teardown phase of fixtures required by the item (if + they go out of scope). + + :param item: + The item. + :param nextitem: + The scheduled-to-be-next test item (None if no further test item is + scheduled). This argument is used to perform exact teardowns, i.e. + calling just enough finalizers so that nextitem only needs to call + setup functions. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +@hookspec(firstresult=True) +def pytest_runtest_makereport(item: Item, call: CallInfo[None]) -> TestReport | None: + """Called to create a :class:`~pytest.TestReport` for each of + the setup, call and teardown runtest phases of a test item. + + See :hook:`pytest_runtest_protocol` for a description of the runtest protocol. + + :param item: The item. + :param call: The :class:`~pytest.CallInfo` for the phase. + + Stops at first non-None result, see :ref:`firstresult`. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_runtest_logreport(report: TestReport) -> None: + """Process the :class:`~pytest.TestReport` produced for each + of the setup, call and teardown runtest phases of an item. + + See :hook:`pytest_runtest_protocol` for a description of the runtest protocol. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +@hookspec(firstresult=True) +def pytest_report_to_serializable( + config: Config, + report: CollectReport | TestReport, +) -> dict[str, Any] | None: + """Serialize the given report object into a data structure suitable for + sending over the wire, e.g. converted to JSON. + + :param config: The pytest config object. + :param report: The report. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. The exact details may depend + on the plugin which calls the hook. + """ + + +@hookspec(firstresult=True) +def pytest_report_from_serializable( + config: Config, + data: dict[str, Any], +) -> CollectReport | TestReport | None: + """Restore a report object previously serialized with + :hook:`pytest_report_to_serializable`. + + :param config: The pytest config object. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. The exact details may depend + on the plugin which calls the hook. + """ + + +# ------------------------------------------------------------------------- +# Fixture related hooks +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_fixture_setup( + fixturedef: FixtureDef[Any], request: SubRequest +) -> object | None: + """Perform fixture setup execution. + + :param fixturedef: + The fixture definition object. + :param request: + The fixture request object. + :returns: + The return value of the call to the fixture function. + + Stops at first non-None result, see :ref:`firstresult`. + + .. note:: + If the fixture function returns None, other implementations of + this hook function will continue to be called, according to the + behavior of the :ref:`firstresult` option. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given fixture, only + conftest files in the fixture scope's directory and its parent directories + are consulted. + """ + + +def pytest_fixture_post_finalizer( + fixturedef: FixtureDef[Any], request: SubRequest +) -> None: + """Called after fixture teardown, but before the cache is cleared, so + the fixture result ``fixturedef.cached_result`` is still available (not + ``None``). + + :param fixturedef: + The fixture definition object. + :param request: + The fixture request object. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given fixture, only + conftest files in the fixture scope's directory and its parent directories + are consulted. + """ + + +# ------------------------------------------------------------------------- +# test session related hooks +# ------------------------------------------------------------------------- + + +def pytest_sessionstart(session: Session) -> None: + """Called after the ``Session`` object has been created and before performing collection + and entering the run test loop. + + :param session: The pytest session object. + + Use in conftest plugins + ======================= + + This hook is only called for :ref:`initial conftests `. + """ + + +def pytest_sessionfinish( + session: Session, + exitstatus: int | ExitCode, +) -> None: + """Called after whole test run finished, right before returning the exit status to the system. + + :param session: The pytest session object. + :param exitstatus: The status which pytest will return to the system. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +def pytest_unconfigure(config: Config) -> None: + """Called before test process is exited. + + :param config: The pytest config object. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +# ------------------------------------------------------------------------- +# hooks for customizing the assert methods +# ------------------------------------------------------------------------- + + +def pytest_assertrepr_compare( + config: Config, op: str, left: object, right: object +) -> list[str] | None: + """Return explanation for comparisons in failing assert expressions. + + Return None for no custom explanation, otherwise return a list + of strings. The strings will be joined by newlines but any newlines + *in* a string will be escaped. Note that all but the first line will + be indented slightly, the intention is for the first line to be a summary. + + :param config: The pytest config object. + :param op: The operator, e.g. `"=="`, `"!="`, `"not in"`. + :param left: The left operand. + :param right: The right operand. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_assertion_pass(item: Item, lineno: int, orig: str, expl: str) -> None: + """Called whenever an assertion passes. + + .. versionadded:: 5.0 + + Use this hook to do some processing after a passing assertion. + The original assertion information is available in the `orig` string + and the pytest introspected assertion information is available in the + `expl` string. + + This hook must be explicitly enabled by the :confval:`enable_assertion_pass_hook` + configuration option: + + .. tab:: toml + + .. code-block:: toml + + [pytest] + enable_assertion_pass_hook = true + + .. tab:: ini + + .. code-block:: ini + + [pytest] + enable_assertion_pass_hook = true + + You need to **clean the .pyc** files in your project directory and interpreter libraries + when enabling this option, as assertions will require to be re-written. + + :param item: pytest item object of current test. + :param lineno: Line number of the assert statement. + :param orig: String with the original assertion. + :param expl: String with the assert explanation. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +# ------------------------------------------------------------------------- +# Hooks for influencing reporting (invoked from _pytest_terminal). +# ------------------------------------------------------------------------- + + +@hookspec( + warn_on_impl_args={ + "startdir": HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg="startdir", pathlib_path_arg="start_path" + ), + }, +) +def pytest_report_header( # type:ignore[empty-body] + config: Config, start_path: Path, startdir: LEGACY_PATH +) -> str | list[str]: + """Return a string or list of strings to be displayed as header info for terminal reporting. + + :param config: The pytest config object. + :param start_path: The starting dir. + :type start_path: pathlib.Path + :param startdir: The starting dir (deprecated). + + .. note:: + + Lines returned by a plugin are displayed before those of plugins which + ran before it. + If you want to have your line(s) displayed first, use + :ref:`trylast=True `. + + .. versionchanged:: 7.0.0 + The ``start_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``startdir`` parameter. The ``startdir`` parameter + has been deprecated. + + Use in conftest plugins + ======================= + + This hook is only called for :ref:`initial conftests `. + """ + + +@hookspec( + warn_on_impl_args={ + "startdir": HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg="startdir", pathlib_path_arg="start_path" + ), + }, +) +def pytest_report_collectionfinish( # type:ignore[empty-body] + config: Config, + start_path: Path, + startdir: LEGACY_PATH, + items: Sequence[Item], +) -> str | list[str]: + """Return a string or list of strings to be displayed after collection + has finished successfully. + + These strings will be displayed after the standard "collected X items" message. + + .. versionadded:: 3.2 + + :param config: The pytest config object. + :param start_path: The starting dir. + :type start_path: pathlib.Path + :param startdir: The starting dir (deprecated). + :param items: List of pytest items that are going to be executed; this list should not be modified. + + .. note:: + + Lines returned by a plugin are displayed before those of plugins which + ran before it. + If you want to have your line(s) displayed first, use + :ref:`trylast=True `. + + .. versionchanged:: 7.0.0 + The ``start_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``startdir`` parameter. The ``startdir`` parameter + has been deprecated. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +@hookspec(firstresult=True) +def pytest_report_teststatus( # type:ignore[empty-body] + report: CollectReport | TestReport, config: Config +) -> TestShortLogReport | tuple[str, str, str | tuple[str, Mapping[str, bool]]]: + """Return result-category, shortletter and verbose word for status + reporting. + + The result-category is a category in which to count the result, for + example "passed", "skipped", "error" or the empty string. + + The shortletter is shown as testing progresses, for example ".", "s", + "E" or the empty string. + + The verbose word is shown as testing progresses in verbose mode, for + example "PASSED", "SKIPPED", "ERROR" or the empty string. + + pytest may style these implicitly according to the report outcome. + To provide explicit styling, return a tuple for the verbose word, + for example ``"rerun", "R", ("RERUN", {"yellow": True})``. + + :param report: The report object whose status is to be returned. + :param config: The pytest config object. + :returns: The test status. + + Stops at first non-None result, see :ref:`firstresult`. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +def pytest_terminal_summary( + terminalreporter: TerminalReporter, + exitstatus: ExitCode, + config: Config, +) -> None: + """Add a section to terminal summary reporting. + + :param terminalreporter: The internal terminal reporter object. + :param exitstatus: The exit status that will be reported back to the OS. + :param config: The pytest config object. + + .. versionadded:: 4.2 + The ``config`` parameter. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +@hookspec(historic=True) +def pytest_warning_recorded( + warning_message: warnings.WarningMessage, + when: Literal["config", "collect", "runtest"], + nodeid: str, + location: tuple[str, int, str] | None, +) -> None: + """Process a warning captured by the internal pytest warnings plugin. + + :param warning_message: + The captured warning. This is the same object produced by :class:`warnings.catch_warnings`, + and contains the same attributes as the parameters of :py:func:`warnings.showwarning`. + + :param when: + Indicates when the warning was captured. Possible values: + + * ``"config"``: during pytest configuration/initialization stage. + * ``"collect"``: during test collection. + * ``"runtest"``: during test execution. + + :param nodeid: + Full id of the item. Empty string for warnings that are not specific to + a particular node. + + :param location: + When available, holds information about the execution context of the captured + warning (filename, linenumber, function). ``function`` evaluates to + when the execution context is at the module level. + + .. versionadded:: 6.0 + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. If the warning is specific to a + particular node, only conftest files in parent directories of the node are + consulted. + """ + + +# ------------------------------------------------------------------------- +# Hooks for influencing skipping +# ------------------------------------------------------------------------- + + +def pytest_markeval_namespace( # type:ignore[empty-body] + config: Config, +) -> dict[str, Any]: + """Called when constructing the globals dictionary used for + evaluating string conditions in xfail/skipif markers. + + This is useful when the condition for a marker requires + objects that are expensive or impossible to obtain during + collection time, which is required by normal boolean + conditions. + + .. versionadded:: 6.2 + + :param config: The pytest config object. + :returns: A dictionary of additional globals to add. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in parent directories of the item are consulted. + """ + + +# ------------------------------------------------------------------------- +# error handling and internal debugging hooks +# ------------------------------------------------------------------------- + + +def pytest_internalerror( + excrepr: ExceptionRepr, + excinfo: ExceptionInfo[BaseException], +) -> bool | None: + """Called for internal errors. + + Return True to suppress the fallback handling of printing an + INTERNALERROR message directly to sys.stderr. + + :param excrepr: The exception repr object. + :param excinfo: The exception info. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +def pytest_keyboard_interrupt( + excinfo: ExceptionInfo[KeyboardInterrupt | Exit], +) -> None: + """Called for keyboard interrupt. + + :param excinfo: The exception info. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +def pytest_exception_interact( + node: Item | Collector, + call: CallInfo[Any], + report: CollectReport | TestReport, +) -> None: + """Called when an exception was raised which can potentially be + interactively handled. + + May be called during collection (see :hook:`pytest_make_collect_report`), + in which case ``report`` is a :class:`~pytest.CollectReport`. + + May be called during runtest of an item (see :hook:`pytest_runtest_protocol`), + in which case ``report`` is a :class:`~pytest.TestReport`. + + This hook is not called if the exception that was raised is an internal + exception like ``skip.Exception``. + + :param node: + The item or collector. + :param call: + The call information. Contains the exception. + :param report: + The collection or test report. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given node, only conftest + files in parent directories of the node are consulted. + """ + + +def pytest_enter_pdb(config: Config, pdb: pdb.Pdb) -> None: + """Called upon pdb.set_trace(). + + Can be used by plugins to take special action just before the python + debugger enters interactive mode. + + :param config: The pytest config object. + :param pdb: The Pdb instance. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +def pytest_leave_pdb(config: Config, pdb: pdb.Pdb) -> None: + """Called when leaving pdb (e.g. with continue after pdb.set_trace()). + + Can be used by plugins to take special action just after the python + debugger leaves interactive mode. + + :param config: The pytest config object. + :param pdb: The Pdb instance. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ diff --git a/.venv/lib/python3.12/site-packages/_pytest/junitxml.py b/.venv/lib/python3.12/site-packages/_pytest/junitxml.py new file mode 100644 index 0000000..ae8d2b9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/junitxml.py @@ -0,0 +1,695 @@ +# mypy: allow-untyped-defs +"""Report test results in JUnit-XML format, for use with Jenkins and build +integration servers. + +Based on initial code from Ross Lawley. + +Output conforms to +https://github.com/jenkinsci/xunit-plugin/blob/master/src/main/resources/org/jenkinsci/plugins/xunit/types/model/xsd/junit-10.xsd +""" + +from __future__ import annotations + +from collections.abc import Callable +import functools +import os +import platform +import re +import xml.etree.ElementTree as ET + +from _pytest import nodes +from _pytest import timing +from _pytest._code.code import ExceptionRepr +from _pytest._code.code import ReprFileLocation +from _pytest.config import Config +from _pytest.config import filename_arg +from _pytest.config.argparsing import Parser +from _pytest.fixtures import FixtureRequest +from _pytest.reports import TestReport +from _pytest.stash import StashKey +from _pytest.terminal import TerminalReporter +import pytest + + +xml_key = StashKey["LogXML"]() + + +def bin_xml_escape(arg: object) -> str: + r"""Visually escape invalid XML characters. + + For example, transforms + 'hello\aworld\b' + into + 'hello#x07world#x08' + Note that the #xABs are *not* XML escapes - missing the ampersand «. + The idea is to escape visually for the user rather than for XML itself. + """ + + def repl(matchobj: re.Match[str]) -> str: + i = ord(matchobj.group()) + if i <= 0xFF: + return f"#x{i:02X}" + else: + return f"#x{i:04X}" + + # The spec range of valid chars is: + # Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF] + # For an unknown(?) reason, we disallow #x7F (DEL) as well. + illegal_xml_re = ( + "[^\u0009\u000a\u000d\u0020-\u007e\u0080-\ud7ff\ue000-\ufffd\u10000-\u10ffff]" + ) + return re.sub(illegal_xml_re, repl, str(arg)) + + +def merge_family(left, right) -> None: + result = {} + for kl, vl in left.items(): + for kr, vr in right.items(): + if not isinstance(vl, list): + raise TypeError(type(vl)) + result[kl] = vl + vr + left.update(result) + + +families = { # pylint: disable=dict-init-mutate + "_base": {"testcase": ["classname", "name"]}, + "_base_legacy": {"testcase": ["file", "line", "url"]}, +} +# xUnit 1.x inherits legacy attributes. +families["xunit1"] = families["_base"].copy() +merge_family(families["xunit1"], families["_base_legacy"]) + +# xUnit 2.x uses strict base attributes. +families["xunit2"] = families["_base"] + + +class _NodeReporter: + def __init__(self, nodeid: str | TestReport, xml: LogXML) -> None: + self.id = nodeid + self.xml = xml + self.add_stats = self.xml.add_stats + self.family = self.xml.family + self.duration = 0.0 + self.properties: list[tuple[str, str]] = [] + self.nodes: list[ET.Element] = [] + self.attrs: dict[str, str] = {} + + def append(self, node: ET.Element) -> None: + self.xml.add_stats(node.tag) + self.nodes.append(node) + + def add_property(self, name: str, value: object) -> None: + self.properties.append((str(name), bin_xml_escape(value))) + + def add_attribute(self, name: str, value: object) -> None: + self.attrs[str(name)] = bin_xml_escape(value) + + def make_properties_node(self) -> ET.Element | None: + """Return a Junit node containing custom properties, if any.""" + if self.properties: + properties = ET.Element("properties") + for name, value in self.properties: + properties.append(ET.Element("property", name=name, value=value)) + return properties + return None + + def record_testreport(self, testreport: TestReport) -> None: + names = mangle_test_address(testreport.nodeid) + existing_attrs = self.attrs + classnames = names[:-1] + if self.xml.prefix: + classnames.insert(0, self.xml.prefix) + attrs: dict[str, str] = { + "classname": ".".join(classnames), + "name": bin_xml_escape(names[-1]), + "file": testreport.location[0], + } + if testreport.location[1] is not None: + attrs["line"] = str(testreport.location[1]) + if hasattr(testreport, "url"): + attrs["url"] = testreport.url + self.attrs = attrs + self.attrs.update(existing_attrs) # Restore any user-defined attributes. + + # Preserve legacy testcase behavior. + if self.family == "xunit1": + return + + # Filter out attributes not permitted by this test family. + # Including custom attributes because they are not valid here. + temp_attrs = {} + for key in self.attrs: + if key in families[self.family]["testcase"]: + temp_attrs[key] = self.attrs[key] + self.attrs = temp_attrs + + def to_xml(self) -> ET.Element: + testcase = ET.Element("testcase", self.attrs, time=f"{self.duration:.3f}") + properties = self.make_properties_node() + if properties is not None: + testcase.append(properties) + testcase.extend(self.nodes) + return testcase + + def _add_simple(self, tag: str, message: str, data: str | None = None) -> None: + node = ET.Element(tag, message=message) + node.text = bin_xml_escape(data) + self.append(node) + + def write_captured_output(self, report: TestReport) -> None: + if not self.xml.log_passing_tests and report.passed: + return + + content_out = report.capstdout + content_log = report.caplog + content_err = report.capstderr + if self.xml.logging == "no": + return + content_all = "" + if self.xml.logging in ["log", "all"]: + content_all = self._prepare_content(content_log, " Captured Log ") + if self.xml.logging in ["system-out", "out-err", "all"]: + content_all += self._prepare_content(content_out, " Captured Out ") + self._write_content(report, content_all, "system-out") + content_all = "" + if self.xml.logging in ["system-err", "out-err", "all"]: + content_all += self._prepare_content(content_err, " Captured Err ") + self._write_content(report, content_all, "system-err") + content_all = "" + if content_all: + self._write_content(report, content_all, "system-out") + + def _prepare_content(self, content: str, header: str) -> str: + return "\n".join([header.center(80, "-"), content, ""]) + + def _write_content(self, report: TestReport, content: str, jheader: str) -> None: + tag = ET.Element(jheader) + tag.text = bin_xml_escape(content) + self.append(tag) + + def append_pass(self, report: TestReport) -> None: + self.add_stats("passed") + + def append_failure(self, report: TestReport) -> None: + # msg = str(report.longrepr.reprtraceback.extraline) + if hasattr(report, "wasxfail"): + self._add_simple("skipped", "xfail-marked test passes unexpectedly") + else: + assert report.longrepr is not None + reprcrash: ReprFileLocation | None = getattr( + report.longrepr, "reprcrash", None + ) + if reprcrash is not None: + message = reprcrash.message + else: + message = str(report.longrepr) + message = bin_xml_escape(message) + self._add_simple("failure", message, str(report.longrepr)) + + def append_collect_error(self, report: TestReport) -> None: + # msg = str(report.longrepr.reprtraceback.extraline) + assert report.longrepr is not None + self._add_simple("error", "collection failure", str(report.longrepr)) + + def append_collect_skipped(self, report: TestReport) -> None: + self._add_simple("skipped", "collection skipped", str(report.longrepr)) + + def append_error(self, report: TestReport) -> None: + assert report.longrepr is not None + reprcrash: ReprFileLocation | None = getattr(report.longrepr, "reprcrash", None) + if reprcrash is not None: + reason = reprcrash.message + else: + reason = str(report.longrepr) + + if report.when == "teardown": + msg = f'failed on teardown with "{reason}"' + else: + msg = f'failed on setup with "{reason}"' + self._add_simple("error", bin_xml_escape(msg), str(report.longrepr)) + + def append_skipped(self, report: TestReport) -> None: + if hasattr(report, "wasxfail"): + xfailreason = report.wasxfail + if xfailreason.startswith("reason: "): + xfailreason = xfailreason[8:] + xfailreason = bin_xml_escape(xfailreason) + skipped = ET.Element("skipped", type="pytest.xfail", message=xfailreason) + self.append(skipped) + else: + assert isinstance(report.longrepr, tuple) + filename, lineno, skipreason = report.longrepr + if skipreason.startswith("Skipped: "): + skipreason = skipreason[9:] + details = f"{filename}:{lineno}: {skipreason}" + + skipped = ET.Element( + "skipped", type="pytest.skip", message=bin_xml_escape(skipreason) + ) + skipped.text = bin_xml_escape(details) + self.append(skipped) + self.write_captured_output(report) + + def finalize(self) -> None: + data = self.to_xml() + self.__dict__.clear() + # Type ignored because mypy doesn't like overriding a method. + # Also the return value doesn't match... + self.to_xml = lambda: data # type: ignore[method-assign] + + +def _warn_incompatibility_with_xunit2( + request: FixtureRequest, fixture_name: str +) -> None: + """Emit a PytestWarning about the given fixture being incompatible with newer xunit revisions.""" + from _pytest.warning_types import PytestWarning + + xml = request.config.stash.get(xml_key, None) + if xml is not None and xml.family not in ("xunit1", "legacy"): + request.node.warn( + PytestWarning( + f"{fixture_name} is incompatible with junit_family '{xml.family}' (use 'legacy' or 'xunit1')" + ) + ) + + +@pytest.fixture +def record_property(request: FixtureRequest) -> Callable[[str, object], None]: + """Add extra properties to the calling test. + + User properties become part of the test report and are available to the + configured reporters, like JUnit XML. + + The fixture is callable with ``name, value``. The value is automatically + XML-encoded. + + Example:: + + def test_function(record_property): + record_property("example_key", 1) + """ + _warn_incompatibility_with_xunit2(request, "record_property") + + def append_property(name: str, value: object) -> None: + request.node.user_properties.append((name, value)) + + return append_property + + +@pytest.fixture +def record_xml_attribute(request: FixtureRequest) -> Callable[[str, object], None]: + """Add extra xml attributes to the tag for the calling test. + + The fixture is callable with ``name, value``. The value is + automatically XML-encoded. + """ + from _pytest.warning_types import PytestExperimentalApiWarning + + request.node.warn( + PytestExperimentalApiWarning("record_xml_attribute is an experimental feature") + ) + + _warn_incompatibility_with_xunit2(request, "record_xml_attribute") + + # Declare noop + def add_attr_noop(name: str, value: object) -> None: + pass + + attr_func = add_attr_noop + + xml = request.config.stash.get(xml_key, None) + if xml is not None: + node_reporter = xml.node_reporter(request.node.nodeid) + attr_func = node_reporter.add_attribute + + return attr_func + + +def _check_record_param_type(param: str, v: str) -> None: + """Used by record_testsuite_property to check that the given parameter name is of the proper + type.""" + __tracebackhide__ = True + if not isinstance(v, str): + msg = "{param} parameter needs to be a string, but {g} given" # type: ignore[unreachable] + raise TypeError(msg.format(param=param, g=type(v).__name__)) + + +@pytest.fixture(scope="session") +def record_testsuite_property(request: FixtureRequest) -> Callable[[str, object], None]: + """Record a new ```` tag as child of the root ````. + + This is suitable to writing global information regarding the entire test + suite, and is compatible with ``xunit2`` JUnit family. + + This is a ``session``-scoped fixture which is called with ``(name, value)``. Example: + + .. code-block:: python + + def test_foo(record_testsuite_property): + record_testsuite_property("ARCH", "PPC") + record_testsuite_property("STORAGE_TYPE", "CEPH") + + :param name: + The property name. + :param value: + The property value. Will be converted to a string. + + .. warning:: + + Currently this fixture **does not work** with the + `pytest-xdist `__ plugin. See + :issue:`7767` for details. + """ + __tracebackhide__ = True + + def record_func(name: str, value: object) -> None: + """No-op function in case --junit-xml was not passed in the command-line.""" + __tracebackhide__ = True + _check_record_param_type("name", name) + + xml = request.config.stash.get(xml_key, None) + if xml is not None: + record_func = xml.add_global_property + return record_func + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("terminal reporting") + group.addoption( + "--junitxml", + "--junit-xml", + action="store", + dest="xmlpath", + metavar="path", + type=functools.partial(filename_arg, optname="--junitxml"), + default=None, + help="Create junit-xml style report file at given path", + ) + group.addoption( + "--junitprefix", + "--junit-prefix", + action="store", + metavar="str", + default=None, + help="Prepend prefix to classnames in junit-xml output", + ) + parser.addini( + "junit_suite_name", "Test suite name for JUnit report", default="pytest" + ) + parser.addini( + "junit_logging", + "Write captured log messages to JUnit report: " + "one of no|log|system-out|system-err|out-err|all", + default="no", + ) + parser.addini( + "junit_log_passing_tests", + "Capture log information for passing tests to JUnit report: ", + type="bool", + default=True, + ) + parser.addini( + "junit_duration_report", + "Duration time to report: one of total|call", + default="total", + ) # choices=['total', 'call']) + parser.addini( + "junit_family", + "Emit XML for schema: one of legacy|xunit1|xunit2", + default="xunit2", + ) + + +def pytest_configure(config: Config) -> None: + xmlpath = config.option.xmlpath + # Prevent opening xmllog on worker nodes (xdist). + if xmlpath and not hasattr(config, "workerinput"): + junit_family = config.getini("junit_family") + config.stash[xml_key] = LogXML( + xmlpath, + config.option.junitprefix, + config.getini("junit_suite_name"), + config.getini("junit_logging"), + config.getini("junit_duration_report"), + junit_family, + config.getini("junit_log_passing_tests"), + ) + config.pluginmanager.register(config.stash[xml_key]) + + +def pytest_unconfigure(config: Config) -> None: + xml = config.stash.get(xml_key, None) + if xml: + del config.stash[xml_key] + config.pluginmanager.unregister(xml) + + +def mangle_test_address(address: str) -> list[str]: + path, possible_open_bracket, params = address.partition("[") + names = path.split("::") + # Convert file path to dotted path. + names[0] = names[0].replace(nodes.SEP, ".") + names[0] = re.sub(r"\.py$", "", names[0]) + # Put any params back. + names[-1] += possible_open_bracket + params + return names + + +class LogXML: + def __init__( + self, + logfile, + prefix: str | None, + suite_name: str = "pytest", + logging: str = "no", + report_duration: str = "total", + family="xunit1", + log_passing_tests: bool = True, + ) -> None: + logfile = os.path.expanduser(os.path.expandvars(logfile)) + self.logfile = os.path.normpath(os.path.abspath(logfile)) + self.prefix = prefix + self.suite_name = suite_name + self.logging = logging + self.log_passing_tests = log_passing_tests + self.report_duration = report_duration + self.family = family + self.stats: dict[str, int] = dict.fromkeys( + ["error", "passed", "failure", "skipped"], 0 + ) + self.node_reporters: dict[tuple[str | TestReport, object], _NodeReporter] = {} + self.node_reporters_ordered: list[_NodeReporter] = [] + self.global_properties: list[tuple[str, str]] = [] + + # List of reports that failed on call but teardown is pending. + self.open_reports: list[TestReport] = [] + self.cnt_double_fail_tests = 0 + + # Replaces convenience family with real family. + if self.family == "legacy": + self.family = "xunit1" + + def finalize(self, report: TestReport) -> None: + nodeid = getattr(report, "nodeid", report) + # Local hack to handle xdist report order. + workernode = getattr(report, "node", None) + reporter = self.node_reporters.pop((nodeid, workernode)) + + for propname, propvalue in report.user_properties: + reporter.add_property(propname, str(propvalue)) + + if reporter is not None: + reporter.finalize() + + def node_reporter(self, report: TestReport | str) -> _NodeReporter: + nodeid: str | TestReport = getattr(report, "nodeid", report) + # Local hack to handle xdist report order. + workernode = getattr(report, "node", None) + + key = nodeid, workernode + + if key in self.node_reporters: + # TODO: breaks for --dist=each + return self.node_reporters[key] + + reporter = _NodeReporter(nodeid, self) + + self.node_reporters[key] = reporter + self.node_reporters_ordered.append(reporter) + + return reporter + + def add_stats(self, key: str) -> None: + if key in self.stats: + self.stats[key] += 1 + + def _opentestcase(self, report: TestReport) -> _NodeReporter: + reporter = self.node_reporter(report) + reporter.record_testreport(report) + return reporter + + def pytest_runtest_logreport(self, report: TestReport) -> None: + """Handle a setup/call/teardown report, generating the appropriate + XML tags as necessary. + + Note: due to plugins like xdist, this hook may be called in interlaced + order with reports from other nodes. For example: + + Usual call order: + -> setup node1 + -> call node1 + -> teardown node1 + -> setup node2 + -> call node2 + -> teardown node2 + + Possible call order in xdist: + -> setup node1 + -> call node1 + -> setup node2 + -> call node2 + -> teardown node2 + -> teardown node1 + """ + close_report = None + if report.passed: + if report.when == "call": # ignore setup/teardown + reporter = self._opentestcase(report) + reporter.append_pass(report) + elif report.failed: + if report.when == "teardown": + # The following vars are needed when xdist plugin is used. + report_wid = getattr(report, "worker_id", None) + report_ii = getattr(report, "item_index", None) + close_report = next( + ( + rep + for rep in self.open_reports + if ( + rep.nodeid == report.nodeid + and getattr(rep, "item_index", None) == report_ii + and getattr(rep, "worker_id", None) == report_wid + ) + ), + None, + ) + if close_report: + # We need to open new testcase in case we have failure in + # call and error in teardown in order to follow junit + # schema. + self.finalize(close_report) + self.cnt_double_fail_tests += 1 + reporter = self._opentestcase(report) + if report.when == "call": + reporter.append_failure(report) + self.open_reports.append(report) + if not self.log_passing_tests: + reporter.write_captured_output(report) + else: + reporter.append_error(report) + elif report.skipped: + reporter = self._opentestcase(report) + reporter.append_skipped(report) + self.update_testcase_duration(report) + if report.when == "teardown": + reporter = self._opentestcase(report) + reporter.write_captured_output(report) + + self.finalize(report) + report_wid = getattr(report, "worker_id", None) + report_ii = getattr(report, "item_index", None) + close_report = next( + ( + rep + for rep in self.open_reports + if ( + rep.nodeid == report.nodeid + and getattr(rep, "item_index", None) == report_ii + and getattr(rep, "worker_id", None) == report_wid + ) + ), + None, + ) + if close_report: + self.open_reports.remove(close_report) + + def update_testcase_duration(self, report: TestReport) -> None: + """Accumulate total duration for nodeid from given report and update + the Junit.testcase with the new total if already created.""" + if self.report_duration in {"total", report.when}: + reporter = self.node_reporter(report) + reporter.duration += getattr(report, "duration", 0.0) + + def pytest_collectreport(self, report: TestReport) -> None: + if not report.passed: + reporter = self._opentestcase(report) + if report.failed: + reporter.append_collect_error(report) + else: + reporter.append_collect_skipped(report) + + def pytest_internalerror(self, excrepr: ExceptionRepr) -> None: + reporter = self.node_reporter("internal") + reporter.attrs.update(classname="pytest", name="internal") + reporter._add_simple("error", "internal error", str(excrepr)) + + def pytest_sessionstart(self) -> None: + self.suite_start = timing.Instant() + + def pytest_sessionfinish(self) -> None: + dirname = os.path.dirname(os.path.abspath(self.logfile)) + # exist_ok avoids filesystem race conditions between checking path existence and requesting creation + os.makedirs(dirname, exist_ok=True) + + with open(self.logfile, "w", encoding="utf-8") as logfile: + duration = self.suite_start.elapsed() + + numtests = ( + self.stats["passed"] + + self.stats["failure"] + + self.stats["skipped"] + + self.stats["error"] + - self.cnt_double_fail_tests + ) + logfile.write('') + + suite_node = ET.Element( + "testsuite", + name=self.suite_name, + errors=str(self.stats["error"]), + failures=str(self.stats["failure"]), + skipped=str(self.stats["skipped"]), + tests=str(numtests), + time=f"{duration.seconds:.3f}", + timestamp=self.suite_start.as_utc().astimezone().isoformat(), + hostname=platform.node(), + ) + global_properties = self._get_global_properties_node() + if global_properties is not None: + suite_node.append(global_properties) + for node_reporter in self.node_reporters_ordered: + suite_node.append(node_reporter.to_xml()) + testsuites = ET.Element("testsuites") + testsuites.set("name", "pytest tests") + testsuites.append(suite_node) + logfile.write(ET.tostring(testsuites, encoding="unicode")) + + def pytest_terminal_summary( + self, terminalreporter: TerminalReporter, config: pytest.Config + ) -> None: + if config.get_verbosity() >= 0: + terminalreporter.write_sep("-", f"generated xml file: {self.logfile}") + + def add_global_property(self, name: str, value: object) -> None: + __tracebackhide__ = True + _check_record_param_type("name", name) + self.global_properties.append((name, bin_xml_escape(value))) + + def _get_global_properties_node(self) -> ET.Element | None: + """Return a Junit node containing custom properties, if any.""" + if self.global_properties: + properties = ET.Element("properties") + for name, value in self.global_properties: + properties.append(ET.Element("property", name=name, value=value)) + return properties + return None diff --git a/.venv/lib/python3.12/site-packages/_pytest/legacypath.py b/.venv/lib/python3.12/site-packages/_pytest/legacypath.py new file mode 100644 index 0000000..59e8ef6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/legacypath.py @@ -0,0 +1,468 @@ +# mypy: allow-untyped-defs +"""Add backward compatibility support for the legacy py path type.""" + +from __future__ import annotations + +import dataclasses +from pathlib import Path +import shlex +import subprocess +from typing import Final +from typing import final +from typing import TYPE_CHECKING + +from iniconfig import SectionWrapper + +from _pytest.cacheprovider import Cache +from _pytest.compat import LEGACY_PATH +from _pytest.compat import legacy_path +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config import PytestPluginManager +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.main import Session +from _pytest.monkeypatch import MonkeyPatch +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.nodes import Node +from _pytest.pytester import HookRecorder +from _pytest.pytester import Pytester +from _pytest.pytester import RunResult +from _pytest.terminal import TerminalReporter +from _pytest.tmpdir import TempPathFactory + + +if TYPE_CHECKING: + import pexpect + + +@final +class Testdir: + """ + Similar to :class:`Pytester`, but this class works with legacy legacy_path objects instead. + + All methods just forward to an internal :class:`Pytester` instance, converting results + to `legacy_path` objects as necessary. + """ + + __test__ = False + + CLOSE_STDIN: Final = Pytester.CLOSE_STDIN + TimeoutExpired: Final = Pytester.TimeoutExpired + + def __init__(self, pytester: Pytester, *, _ispytest: bool = False) -> None: + check_ispytest(_ispytest) + self._pytester = pytester + + @property + def tmpdir(self) -> LEGACY_PATH: + """Temporary directory where tests are executed.""" + return legacy_path(self._pytester.path) + + @property + def test_tmproot(self) -> LEGACY_PATH: + return legacy_path(self._pytester._test_tmproot) + + @property + def request(self): + return self._pytester._request + + @property + def plugins(self): + return self._pytester.plugins + + @plugins.setter + def plugins(self, plugins): + self._pytester.plugins = plugins + + @property + def monkeypatch(self) -> MonkeyPatch: + return self._pytester._monkeypatch + + def make_hook_recorder(self, pluginmanager) -> HookRecorder: + """See :meth:`Pytester.make_hook_recorder`.""" + return self._pytester.make_hook_recorder(pluginmanager) + + def chdir(self) -> None: + """See :meth:`Pytester.chdir`.""" + return self._pytester.chdir() + + def finalize(self) -> None: + return self._pytester._finalize() + + def makefile(self, ext, *args, **kwargs) -> LEGACY_PATH: + """See :meth:`Pytester.makefile`.""" + if ext and not ext.startswith("."): + # pytester.makefile is going to throw a ValueError in a way that + # testdir.makefile did not, because + # pathlib.Path is stricter suffixes than py.path + # This ext arguments is likely user error, but since testdir has + # allowed this, we will prepend "." as a workaround to avoid breaking + # testdir usage that worked before + ext = "." + ext + return legacy_path(self._pytester.makefile(ext, *args, **kwargs)) + + def makeconftest(self, source) -> LEGACY_PATH: + """See :meth:`Pytester.makeconftest`.""" + return legacy_path(self._pytester.makeconftest(source)) + + def makeini(self, source) -> LEGACY_PATH: + """See :meth:`Pytester.makeini`.""" + return legacy_path(self._pytester.makeini(source)) + + def getinicfg(self, source: str) -> SectionWrapper: + """See :meth:`Pytester.getinicfg`.""" + return self._pytester.getinicfg(source) + + def makepyprojecttoml(self, source) -> LEGACY_PATH: + """See :meth:`Pytester.makepyprojecttoml`.""" + return legacy_path(self._pytester.makepyprojecttoml(source)) + + def makepyfile(self, *args, **kwargs) -> LEGACY_PATH: + """See :meth:`Pytester.makepyfile`.""" + return legacy_path(self._pytester.makepyfile(*args, **kwargs)) + + def maketxtfile(self, *args, **kwargs) -> LEGACY_PATH: + """See :meth:`Pytester.maketxtfile`.""" + return legacy_path(self._pytester.maketxtfile(*args, **kwargs)) + + def syspathinsert(self, path=None) -> None: + """See :meth:`Pytester.syspathinsert`.""" + return self._pytester.syspathinsert(path) + + def mkdir(self, name) -> LEGACY_PATH: + """See :meth:`Pytester.mkdir`.""" + return legacy_path(self._pytester.mkdir(name)) + + def mkpydir(self, name) -> LEGACY_PATH: + """See :meth:`Pytester.mkpydir`.""" + return legacy_path(self._pytester.mkpydir(name)) + + def copy_example(self, name=None) -> LEGACY_PATH: + """See :meth:`Pytester.copy_example`.""" + return legacy_path(self._pytester.copy_example(name)) + + def getnode(self, config: Config, arg) -> Item | Collector | None: + """See :meth:`Pytester.getnode`.""" + return self._pytester.getnode(config, arg) + + def getpathnode(self, path): + """See :meth:`Pytester.getpathnode`.""" + return self._pytester.getpathnode(path) + + def genitems(self, colitems: list[Item | Collector]) -> list[Item]: + """See :meth:`Pytester.genitems`.""" + return self._pytester.genitems(colitems) + + def runitem(self, source): + """See :meth:`Pytester.runitem`.""" + return self._pytester.runitem(source) + + def inline_runsource(self, source, *cmdlineargs): + """See :meth:`Pytester.inline_runsource`.""" + return self._pytester.inline_runsource(source, *cmdlineargs) + + def inline_genitems(self, *args): + """See :meth:`Pytester.inline_genitems`.""" + return self._pytester.inline_genitems(*args) + + def inline_run(self, *args, plugins=(), no_reraise_ctrlc: bool = False): + """See :meth:`Pytester.inline_run`.""" + return self._pytester.inline_run( + *args, plugins=plugins, no_reraise_ctrlc=no_reraise_ctrlc + ) + + def runpytest_inprocess(self, *args, **kwargs) -> RunResult: + """See :meth:`Pytester.runpytest_inprocess`.""" + return self._pytester.runpytest_inprocess(*args, **kwargs) + + def runpytest(self, *args, **kwargs) -> RunResult: + """See :meth:`Pytester.runpytest`.""" + return self._pytester.runpytest(*args, **kwargs) + + def parseconfig(self, *args) -> Config: + """See :meth:`Pytester.parseconfig`.""" + return self._pytester.parseconfig(*args) + + def parseconfigure(self, *args) -> Config: + """See :meth:`Pytester.parseconfigure`.""" + return self._pytester.parseconfigure(*args) + + def getitem(self, source, funcname="test_func"): + """See :meth:`Pytester.getitem`.""" + return self._pytester.getitem(source, funcname) + + def getitems(self, source): + """See :meth:`Pytester.getitems`.""" + return self._pytester.getitems(source) + + def getmodulecol(self, source, configargs=(), withinit=False): + """See :meth:`Pytester.getmodulecol`.""" + return self._pytester.getmodulecol( + source, configargs=configargs, withinit=withinit + ) + + def collect_by_name(self, modcol: Collector, name: str) -> Item | Collector | None: + """See :meth:`Pytester.collect_by_name`.""" + return self._pytester.collect_by_name(modcol, name) + + def popen( + self, + cmdargs, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + stdin=CLOSE_STDIN, + **kw, + ): + """See :meth:`Pytester.popen`.""" + return self._pytester.popen(cmdargs, stdout, stderr, stdin, **kw) + + def run(self, *cmdargs, timeout=None, stdin=CLOSE_STDIN) -> RunResult: + """See :meth:`Pytester.run`.""" + return self._pytester.run(*cmdargs, timeout=timeout, stdin=stdin) + + def runpython(self, script) -> RunResult: + """See :meth:`Pytester.runpython`.""" + return self._pytester.runpython(script) + + def runpython_c(self, command): + """See :meth:`Pytester.runpython_c`.""" + return self._pytester.runpython_c(command) + + def runpytest_subprocess(self, *args, timeout=None) -> RunResult: + """See :meth:`Pytester.runpytest_subprocess`.""" + return self._pytester.runpytest_subprocess(*args, timeout=timeout) + + def spawn_pytest(self, string: str, expect_timeout: float = 10.0) -> pexpect.spawn: + """See :meth:`Pytester.spawn_pytest`.""" + return self._pytester.spawn_pytest(string, expect_timeout=expect_timeout) + + def spawn(self, cmd: str, expect_timeout: float = 10.0) -> pexpect.spawn: + """See :meth:`Pytester.spawn`.""" + return self._pytester.spawn(cmd, expect_timeout=expect_timeout) + + def __repr__(self) -> str: + return f"" + + def __str__(self) -> str: + return str(self.tmpdir) + + +class LegacyTestdirPlugin: + @staticmethod + @fixture + def testdir(pytester: Pytester) -> Testdir: + """ + Identical to :fixture:`pytester`, and provides an instance whose methods return + legacy ``LEGACY_PATH`` objects instead when applicable. + + New code should avoid using :fixture:`testdir` in favor of :fixture:`pytester`. + """ + return Testdir(pytester, _ispytest=True) + + +@final +@dataclasses.dataclass +class TempdirFactory: + """Backward compatibility wrapper that implements ``py.path.local`` + for :class:`TempPathFactory`. + + .. note:: + These days, it is preferred to use ``tmp_path_factory``. + + :ref:`About the tmpdir and tmpdir_factory fixtures`. + + """ + + _tmppath_factory: TempPathFactory + + def __init__( + self, tmppath_factory: TempPathFactory, *, _ispytest: bool = False + ) -> None: + check_ispytest(_ispytest) + self._tmppath_factory = tmppath_factory + + def mktemp(self, basename: str, numbered: bool = True) -> LEGACY_PATH: + """Same as :meth:`TempPathFactory.mktemp`, but returns a ``py.path.local`` object.""" + return legacy_path(self._tmppath_factory.mktemp(basename, numbered).resolve()) + + def getbasetemp(self) -> LEGACY_PATH: + """Same as :meth:`TempPathFactory.getbasetemp`, but returns a ``py.path.local`` object.""" + return legacy_path(self._tmppath_factory.getbasetemp().resolve()) + + +class LegacyTmpdirPlugin: + @staticmethod + @fixture(scope="session") + def tmpdir_factory(request: FixtureRequest) -> TempdirFactory: + """Return a :class:`pytest.TempdirFactory` instance for the test session.""" + # Set dynamically by pytest_configure(). + return request.config._tmpdirhandler # type: ignore + + @staticmethod + @fixture + def tmpdir(tmp_path: Path) -> LEGACY_PATH: + """Return a temporary directory (as `legacy_path`_ object) + which is unique to each test function invocation. + The temporary directory is created as a subdirectory + of the base temporary directory, with configurable retention, + as discussed in :ref:`temporary directory location and retention`. + + .. note:: + These days, it is preferred to use ``tmp_path``. + + :ref:`About the tmpdir and tmpdir_factory fixtures`. + + .. _legacy_path: https://py.readthedocs.io/en/latest/path.html + """ + return legacy_path(tmp_path) + + +def Cache_makedir(self: Cache, name: str) -> LEGACY_PATH: + """Return a directory path object with the given name. + + Same as :func:`mkdir`, but returns a legacy py path instance. + """ + return legacy_path(self.mkdir(name)) + + +def FixtureRequest_fspath(self: FixtureRequest) -> LEGACY_PATH: + """(deprecated) The file system path of the test module which collected this test.""" + return legacy_path(self.path) + + +def TerminalReporter_startdir(self: TerminalReporter) -> LEGACY_PATH: + """The directory from which pytest was invoked. + + Prefer to use ``startpath`` which is a :class:`pathlib.Path`. + + :type: LEGACY_PATH + """ + return legacy_path(self.startpath) + + +def Config_invocation_dir(self: Config) -> LEGACY_PATH: + """The directory from which pytest was invoked. + + Prefer to use :attr:`invocation_params.dir `, + which is a :class:`pathlib.Path`. + + :type: LEGACY_PATH + """ + return legacy_path(str(self.invocation_params.dir)) + + +def Config_rootdir(self: Config) -> LEGACY_PATH: + """The path to the :ref:`rootdir `. + + Prefer to use :attr:`rootpath`, which is a :class:`pathlib.Path`. + + :type: LEGACY_PATH + """ + return legacy_path(str(self.rootpath)) + + +def Config_inifile(self: Config) -> LEGACY_PATH | None: + """The path to the :ref:`configfile `. + + Prefer to use :attr:`inipath`, which is a :class:`pathlib.Path`. + + :type: Optional[LEGACY_PATH] + """ + return legacy_path(str(self.inipath)) if self.inipath else None + + +def Session_startdir(self: Session) -> LEGACY_PATH: + """The path from which pytest was invoked. + + Prefer to use ``startpath`` which is a :class:`pathlib.Path`. + + :type: LEGACY_PATH + """ + return legacy_path(self.startpath) + + +def Config__getini_unknown_type(self, name: str, type: str, value: str | list[str]): + if type == "pathlist": + # TODO: This assert is probably not valid in all cases. + assert self.inipath is not None + dp = self.inipath.parent + input_values = shlex.split(value) if isinstance(value, str) else value + return [legacy_path(str(dp / x)) for x in input_values] + else: + raise ValueError(f"unknown configuration type: {type}", value) + + +def Node_fspath(self: Node) -> LEGACY_PATH: + """(deprecated) returns a legacy_path copy of self.path""" + return legacy_path(self.path) + + +def Node_fspath_set(self: Node, value: LEGACY_PATH) -> None: + self.path = Path(value) + + +@hookimpl(tryfirst=True) +def pytest_load_initial_conftests(early_config: Config) -> None: + """Monkeypatch legacy path attributes in several classes, as early as possible.""" + mp = MonkeyPatch() + early_config.add_cleanup(mp.undo) + + # Add Cache.makedir(). + mp.setattr(Cache, "makedir", Cache_makedir, raising=False) + + # Add FixtureRequest.fspath property. + mp.setattr(FixtureRequest, "fspath", property(FixtureRequest_fspath), raising=False) + + # Add TerminalReporter.startdir property. + mp.setattr( + TerminalReporter, "startdir", property(TerminalReporter_startdir), raising=False + ) + + # Add Config.{invocation_dir,rootdir,inifile} properties. + mp.setattr(Config, "invocation_dir", property(Config_invocation_dir), raising=False) + mp.setattr(Config, "rootdir", property(Config_rootdir), raising=False) + mp.setattr(Config, "inifile", property(Config_inifile), raising=False) + + # Add Session.startdir property. + mp.setattr(Session, "startdir", property(Session_startdir), raising=False) + + # Add pathlist configuration type. + mp.setattr(Config, "_getini_unknown_type", Config__getini_unknown_type) + + # Add Node.fspath property. + mp.setattr(Node, "fspath", property(Node_fspath, Node_fspath_set), raising=False) + + +@hookimpl +def pytest_configure(config: Config) -> None: + """Installs the LegacyTmpdirPlugin if the ``tmpdir`` plugin is also installed.""" + if config.pluginmanager.has_plugin("tmpdir"): + mp = MonkeyPatch() + config.add_cleanup(mp.undo) + # Create TmpdirFactory and attach it to the config object. + # + # This is to comply with existing plugins which expect the handler to be + # available at pytest_configure time, but ideally should be moved entirely + # to the tmpdir_factory session fixture. + try: + tmp_path_factory = config._tmp_path_factory # type: ignore[attr-defined] + except AttributeError: + # tmpdir plugin is blocked. + pass + else: + _tmpdirhandler = TempdirFactory(tmp_path_factory, _ispytest=True) + mp.setattr(config, "_tmpdirhandler", _tmpdirhandler, raising=False) + + config.pluginmanager.register(LegacyTmpdirPlugin, "legacypath-tmpdir") + + +@hookimpl +def pytest_plugin_registered(plugin: object, manager: PytestPluginManager) -> None: + # pytester is not loaded by default and is commonly loaded from a conftest, + # so checking for it in `pytest_configure` is not enough. + is_pytester = plugin is manager.get_plugin("pytester") + if is_pytester and not manager.is_registered(LegacyTestdirPlugin): + manager.register(LegacyTestdirPlugin, "legacypath-pytester") diff --git a/.venv/lib/python3.12/site-packages/_pytest/logging.py b/.venv/lib/python3.12/site-packages/_pytest/logging.py new file mode 100644 index 0000000..e4fed57 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/logging.py @@ -0,0 +1,960 @@ +# mypy: allow-untyped-defs +"""Access and control log capturing.""" + +from __future__ import annotations + +from collections.abc import Generator +from collections.abc import Mapping +from collections.abc import Set as AbstractSet +from contextlib import contextmanager +from contextlib import nullcontext +from datetime import datetime +from datetime import timedelta +from datetime import timezone +import io +from io import StringIO +import logging +from logging import LogRecord +import os +from pathlib import Path +import re +from types import TracebackType +from typing import final +from typing import Generic +from typing import Literal +from typing import TYPE_CHECKING +from typing import TypeVar + +from _pytest import nodes +from _pytest._io import TerminalWriter +from _pytest.capture import CaptureManager +from _pytest.config import _strtobool +from _pytest.config import Config +from _pytest.config import create_terminal_writer +from _pytest.config import hookimpl +from _pytest.config import UsageError +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.main import Session +from _pytest.stash import StashKey +from _pytest.terminal import TerminalReporter + + +if TYPE_CHECKING: + logging_StreamHandler = logging.StreamHandler[StringIO] +else: + logging_StreamHandler = logging.StreamHandler + +DEFAULT_LOG_FORMAT = "%(levelname)-8s %(name)s:%(filename)s:%(lineno)d %(message)s" +DEFAULT_LOG_DATE_FORMAT = "%H:%M:%S" +_ANSI_ESCAPE_SEQ = re.compile(r"\x1b\[[\d;]+m") +caplog_handler_key = StashKey["LogCaptureHandler"]() +caplog_records_key = StashKey[dict[str, list[logging.LogRecord]]]() + + +def _remove_ansi_escape_sequences(text: str) -> str: + return _ANSI_ESCAPE_SEQ.sub("", text) + + +class DatetimeFormatter(logging.Formatter): + """A logging formatter which formats record with + :func:`datetime.datetime.strftime` formatter instead of + :func:`time.strftime` in case of microseconds in format string. + """ + + def formatTime(self, record: LogRecord, datefmt: str | None = None) -> str: + if datefmt and "%f" in datefmt: + ct = self.converter(record.created) + tz = timezone(timedelta(seconds=ct.tm_gmtoff), ct.tm_zone) + # Construct `datetime.datetime` object from `struct_time` + # and msecs information from `record` + # Using int() instead of round() to avoid it exceeding 1_000_000 and causing a ValueError (#11861). + dt = datetime(*ct[0:6], microsecond=int(record.msecs * 1000), tzinfo=tz) + return dt.strftime(datefmt) + # Use `logging.Formatter` for non-microsecond formats + return super().formatTime(record, datefmt) + + +class ColoredLevelFormatter(DatetimeFormatter): + """A logging formatter which colorizes the %(levelname)..s part of the + log format passed to __init__.""" + + LOGLEVEL_COLOROPTS: Mapping[int, AbstractSet[str]] = { + logging.CRITICAL: {"red"}, + logging.ERROR: {"red", "bold"}, + logging.WARNING: {"yellow"}, + logging.WARN: {"yellow"}, + logging.INFO: {"green"}, + logging.DEBUG: {"purple"}, + logging.NOTSET: set(), + } + LEVELNAME_FMT_REGEX = re.compile(r"%\(levelname\)([+-.]?\d*(?:\.\d+)?s)") + + def __init__(self, terminalwriter: TerminalWriter, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + self._terminalwriter = terminalwriter + self._original_fmt = self._style._fmt + self._level_to_fmt_mapping: dict[int, str] = {} + + for level, color_opts in self.LOGLEVEL_COLOROPTS.items(): + self.add_color_level(level, *color_opts) + + def add_color_level(self, level: int, *color_opts: str) -> None: + """Add or update color opts for a log level. + + :param level: + Log level to apply a style to, e.g. ``logging.INFO``. + :param color_opts: + ANSI escape sequence color options. Capitalized colors indicates + background color, i.e. ``'green', 'Yellow', 'bold'`` will give bold + green text on yellow background. + + .. warning:: + This is an experimental API. + """ + assert self._fmt is not None + levelname_fmt_match = self.LEVELNAME_FMT_REGEX.search(self._fmt) + if not levelname_fmt_match: + return + levelname_fmt = levelname_fmt_match.group() + + formatted_levelname = levelname_fmt % {"levelname": logging.getLevelName(level)} + + # add ANSI escape sequences around the formatted levelname + color_kwargs = {name: True for name in color_opts} + colorized_formatted_levelname = self._terminalwriter.markup( + formatted_levelname, **color_kwargs + ) + self._level_to_fmt_mapping[level] = self.LEVELNAME_FMT_REGEX.sub( + colorized_formatted_levelname, self._fmt + ) + + def format(self, record: logging.LogRecord) -> str: + fmt = self._level_to_fmt_mapping.get(record.levelno, self._original_fmt) + self._style._fmt = fmt + return super().format(record) + + +class PercentStyleMultiline(logging.PercentStyle): + """A logging style with special support for multiline messages. + + If the message of a record consists of multiple lines, this style + formats the message as if each line were logged separately. + """ + + def __init__(self, fmt: str, auto_indent: int | str | bool | None) -> None: + super().__init__(fmt) + self._auto_indent = self._get_auto_indent(auto_indent) + + @staticmethod + def _get_auto_indent(auto_indent_option: int | str | bool | None) -> int: + """Determine the current auto indentation setting. + + Specify auto indent behavior (on/off/fixed) by passing in + extra={"auto_indent": [value]} to the call to logging.log() or + using a --log-auto-indent [value] command line or the + log_auto_indent [value] config option. + + Default behavior is auto-indent off. + + Using the string "True" or "on" or the boolean True as the value + turns auto indent on, using the string "False" or "off" or the + boolean False or the int 0 turns it off, and specifying a + positive integer fixes the indentation position to the value + specified. + + Any other values for the option are invalid, and will silently be + converted to the default. + + :param None|bool|int|str auto_indent_option: + User specified option for indentation from command line, config + or extra kwarg. Accepts int, bool or str. str option accepts the + same range of values as boolean config options, as well as + positive integers represented in str form. + + :returns: + Indentation value, which can be + -1 (automatically determine indentation) or + 0 (auto-indent turned off) or + >0 (explicitly set indentation position). + """ + if auto_indent_option is None: + return 0 + elif isinstance(auto_indent_option, bool): + if auto_indent_option: + return -1 + else: + return 0 + elif isinstance(auto_indent_option, int): + return int(auto_indent_option) + elif isinstance(auto_indent_option, str): + try: + return int(auto_indent_option) + except ValueError: + pass + try: + if _strtobool(auto_indent_option): + return -1 + except ValueError: + return 0 + + return 0 + + def format(self, record: logging.LogRecord) -> str: + if "\n" in record.message: + if hasattr(record, "auto_indent"): + # Passed in from the "extra={}" kwarg on the call to logging.log(). + auto_indent = self._get_auto_indent(record.auto_indent) + else: + auto_indent = self._auto_indent + + if auto_indent: + lines = record.message.splitlines() + formatted = self._fmt % {**record.__dict__, "message": lines[0]} + + if auto_indent < 0: + indentation = _remove_ansi_escape_sequences(formatted).find( + lines[0] + ) + else: + # Optimizes logging by allowing a fixed indentation. + indentation = auto_indent + lines[0] = formatted + return ("\n" + " " * indentation).join(lines) + return self._fmt % record.__dict__ + + +def get_option_ini(config: Config, *names: str): + for name in names: + ret = config.getoption(name) # 'default' arg won't work as expected + if ret is None: + ret = config.getini(name) + if ret: + return ret + + +def pytest_addoption(parser: Parser) -> None: + """Add options to control log capturing.""" + group = parser.getgroup("logging") + + def add_option_ini(option, dest, default=None, type=None, **kwargs): + parser.addini( + dest, default=default, type=type, help="Default value for " + option + ) + group.addoption(option, dest=dest, **kwargs) + + add_option_ini( + "--log-level", + dest="log_level", + default=None, + metavar="LEVEL", + help=( + "Level of messages to catch/display." + " Not set by default, so it depends on the root/parent log handler's" + ' effective level, where it is "WARNING" by default.' + ), + ) + add_option_ini( + "--log-format", + dest="log_format", + default=DEFAULT_LOG_FORMAT, + help="Log format used by the logging module", + ) + add_option_ini( + "--log-date-format", + dest="log_date_format", + default=DEFAULT_LOG_DATE_FORMAT, + help="Log date format used by the logging module", + ) + parser.addini( + "log_cli", + default=False, + type="bool", + help='Enable log display during test run (also known as "live logging")', + ) + add_option_ini( + "--log-cli-level", dest="log_cli_level", default=None, help="CLI logging level" + ) + add_option_ini( + "--log-cli-format", + dest="log_cli_format", + default=None, + help="Log format used by the logging module", + ) + add_option_ini( + "--log-cli-date-format", + dest="log_cli_date_format", + default=None, + help="Log date format used by the logging module", + ) + add_option_ini( + "--log-file", + dest="log_file", + default=None, + help="Path to a file when logging will be written to", + ) + add_option_ini( + "--log-file-mode", + dest="log_file_mode", + default="w", + choices=["w", "a"], + help="Log file open mode", + ) + add_option_ini( + "--log-file-level", + dest="log_file_level", + default=None, + help="Log file logging level", + ) + add_option_ini( + "--log-file-format", + dest="log_file_format", + default=None, + help="Log format used by the logging module", + ) + add_option_ini( + "--log-file-date-format", + dest="log_file_date_format", + default=None, + help="Log date format used by the logging module", + ) + add_option_ini( + "--log-auto-indent", + dest="log_auto_indent", + default=None, + help="Auto-indent multiline messages passed to the logging module. Accepts true|on, false|off or an integer.", + ) + group.addoption( + "--log-disable", + action="append", + default=[], + dest="logger_disable", + help="Disable a logger by name. Can be passed multiple times.", + ) + + +_HandlerType = TypeVar("_HandlerType", bound=logging.Handler) + + +# Not using @contextmanager for performance reasons. +class catching_logs(Generic[_HandlerType]): + """Context manager that prepares the whole logging machinery properly.""" + + __slots__ = ("handler", "level", "orig_level") + + def __init__(self, handler: _HandlerType, level: int | None = None) -> None: + self.handler = handler + self.level = level + + def __enter__(self) -> _HandlerType: + root_logger = logging.getLogger() + if self.level is not None: + self.handler.setLevel(self.level) + root_logger.addHandler(self.handler) + if self.level is not None: + self.orig_level = root_logger.level + root_logger.setLevel(min(self.orig_level, self.level)) + return self.handler + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + root_logger = logging.getLogger() + if self.level is not None: + root_logger.setLevel(self.orig_level) + root_logger.removeHandler(self.handler) + + +class LogCaptureHandler(logging_StreamHandler): + """A logging handler that stores log records and the log text.""" + + def __init__(self) -> None: + """Create a new log handler.""" + super().__init__(StringIO()) + self.records: list[logging.LogRecord] = [] + + def emit(self, record: logging.LogRecord) -> None: + """Keep the log records in a list in addition to the log text.""" + self.records.append(record) + super().emit(record) + + def reset(self) -> None: + self.records = [] + self.stream = StringIO() + + def clear(self) -> None: + self.records.clear() + self.stream = StringIO() + + def handleError(self, record: logging.LogRecord) -> None: + if logging.raiseExceptions: + # Fail the test if the log message is bad (emit failed). + # The default behavior of logging is to print "Logging error" + # to stderr with the call stack and some extra details. + # pytest wants to make such mistakes visible during testing. + raise # noqa: PLE0704 + + +@final +class LogCaptureFixture: + """Provides access and control of log capturing.""" + + def __init__(self, item: nodes.Node, *, _ispytest: bool = False) -> None: + check_ispytest(_ispytest) + self._item = item + self._initial_handler_level: int | None = None + # Dict of log name -> log level. + self._initial_logger_levels: dict[str | None, int] = {} + self._initial_disabled_logging_level: int | None = None + + def _finalize(self) -> None: + """Finalize the fixture. + + This restores the log levels and the disabled logging levels changed by :meth:`set_level`. + """ + # Restore log levels. + if self._initial_handler_level is not None: + self.handler.setLevel(self._initial_handler_level) + for logger_name, level in self._initial_logger_levels.items(): + logger = logging.getLogger(logger_name) + logger.setLevel(level) + # Disable logging at the original disabled logging level. + if self._initial_disabled_logging_level is not None: + logging.disable(self._initial_disabled_logging_level) + self._initial_disabled_logging_level = None + + @property + def handler(self) -> LogCaptureHandler: + """Get the logging handler used by the fixture.""" + return self._item.stash[caplog_handler_key] + + def get_records( + self, when: Literal["setup", "call", "teardown"] + ) -> list[logging.LogRecord]: + """Get the logging records for one of the possible test phases. + + :param when: + Which test phase to obtain the records from. + Valid values are: "setup", "call" and "teardown". + + :returns: The list of captured records at the given stage. + + .. versionadded:: 3.4 + """ + return self._item.stash[caplog_records_key].get(when, []) + + @property + def text(self) -> str: + """The formatted log text.""" + return _remove_ansi_escape_sequences(self.handler.stream.getvalue()) + + @property + def records(self) -> list[logging.LogRecord]: + """The list of log records.""" + return self.handler.records + + @property + def record_tuples(self) -> list[tuple[str, int, str]]: + """A list of a stripped down version of log records intended + for use in assertion comparison. + + The format of the tuple is: + + (logger_name, log_level, message) + """ + return [(r.name, r.levelno, r.getMessage()) for r in self.records] + + @property + def messages(self) -> list[str]: + """A list of format-interpolated log messages. + + Unlike 'records', which contains the format string and parameters for + interpolation, log messages in this list are all interpolated. + + Unlike 'text', which contains the output from the handler, log + messages in this list are unadorned with levels, timestamps, etc, + making exact comparisons more reliable. + + Note that traceback or stack info (from :func:`logging.exception` or + the `exc_info` or `stack_info` arguments to the logging functions) is + not included, as this is added by the formatter in the handler. + + .. versionadded:: 3.7 + """ + return [r.getMessage() for r in self.records] + + def clear(self) -> None: + """Reset the list of log records and the captured log text.""" + self.handler.clear() + + def _force_enable_logging( + self, level: int | str, logger_obj: logging.Logger + ) -> int: + """Enable the desired logging level if the global level was disabled via ``logging.disabled``. + + Only enables logging levels greater than or equal to the requested ``level``. + + Does nothing if the desired ``level`` wasn't disabled. + + :param level: + The logger level caplog should capture. + All logging is enabled if a non-standard logging level string is supplied. + Valid level strings are in :data:`logging._nameToLevel`. + :param logger_obj: The logger object to check. + + :return: The original disabled logging level. + """ + original_disable_level: int = logger_obj.manager.disable + + if isinstance(level, str): + # Try to translate the level string to an int for `logging.disable()` + level = logging.getLevelName(level) + + if not isinstance(level, int): + # The level provided was not valid, so just un-disable all logging. + logging.disable(logging.NOTSET) + elif not logger_obj.isEnabledFor(level): + # Each level is `10` away from other levels. + # https://docs.python.org/3/library/logging.html#logging-levels + disable_level = max(level - 10, logging.NOTSET) + logging.disable(disable_level) + + return original_disable_level + + def set_level(self, level: int | str, logger: str | None = None) -> None: + """Set the threshold level of a logger for the duration of a test. + + Logging messages which are less severe than this level will not be captured. + + .. versionchanged:: 3.4 + The levels of the loggers changed by this function will be + restored to their initial values at the end of the test. + + Will enable the requested logging level if it was disabled via :func:`logging.disable`. + + :param level: The level. + :param logger: The logger to update. If not given, the root logger. + """ + logger_obj = logging.getLogger(logger) + # Save the original log-level to restore it during teardown. + self._initial_logger_levels.setdefault(logger, logger_obj.level) + logger_obj.setLevel(level) + if self._initial_handler_level is None: + self._initial_handler_level = self.handler.level + self.handler.setLevel(level) + initial_disabled_logging_level = self._force_enable_logging(level, logger_obj) + if self._initial_disabled_logging_level is None: + self._initial_disabled_logging_level = initial_disabled_logging_level + + @contextmanager + def at_level(self, level: int | str, logger: str | None = None) -> Generator[None]: + """Context manager that sets the level for capturing of logs. After + the end of the 'with' statement the level is restored to its original + value. + + Will enable the requested logging level if it was disabled via :func:`logging.disable`. + + :param level: The level. + :param logger: The logger to update. If not given, the root logger. + """ + logger_obj = logging.getLogger(logger) + orig_level = logger_obj.level + logger_obj.setLevel(level) + handler_orig_level = self.handler.level + self.handler.setLevel(level) + original_disable_level = self._force_enable_logging(level, logger_obj) + try: + yield + finally: + logger_obj.setLevel(orig_level) + self.handler.setLevel(handler_orig_level) + logging.disable(original_disable_level) + + @contextmanager + def filtering(self, filter_: logging.Filter) -> Generator[None]: + """Context manager that temporarily adds the given filter to the caplog's + :meth:`handler` for the 'with' statement block, and removes that filter at the + end of the block. + + :param filter_: A custom :class:`logging.Filter` object. + + .. versionadded:: 7.5 + """ + self.handler.addFilter(filter_) + try: + yield + finally: + self.handler.removeFilter(filter_) + + +@fixture +def caplog(request: FixtureRequest) -> Generator[LogCaptureFixture]: + """Access and control log capturing. + + Captured logs are available through the following properties/methods:: + + * caplog.messages -> list of format-interpolated log messages + * caplog.text -> string containing formatted log output + * caplog.records -> list of logging.LogRecord instances + * caplog.record_tuples -> list of (logger_name, level, message) tuples + * caplog.clear() -> clear captured records and formatted log output string + """ + result = LogCaptureFixture(request.node, _ispytest=True) + yield result + result._finalize() + + +def get_log_level_for_setting(config: Config, *setting_names: str) -> int | None: + for setting_name in setting_names: + log_level = config.getoption(setting_name) + if log_level is None: + log_level = config.getini(setting_name) + if log_level: + break + else: + return None + + if isinstance(log_level, str): + log_level = log_level.upper() + try: + return int(getattr(logging, log_level, log_level)) + except ValueError as e: + # Python logging does not recognise this as a logging level + raise UsageError( + f"'{log_level}' is not recognized as a logging level name for " + f"'{setting_name}'. Please consider passing the " + "logging level num instead." + ) from e + + +# run after terminalreporter/capturemanager are configured +@hookimpl(trylast=True) +def pytest_configure(config: Config) -> None: + config.pluginmanager.register(LoggingPlugin(config), "logging-plugin") + + +class LoggingPlugin: + """Attaches to the logging module and captures log messages for each test.""" + + def __init__(self, config: Config) -> None: + """Create a new plugin to capture log messages. + + The formatter can be safely shared across all handlers so + create a single one for the entire test session here. + """ + self._config = config + + # Report logging. + self.formatter = self._create_formatter( + get_option_ini(config, "log_format"), + get_option_ini(config, "log_date_format"), + get_option_ini(config, "log_auto_indent"), + ) + self.log_level = get_log_level_for_setting(config, "log_level") + self.caplog_handler = LogCaptureHandler() + self.caplog_handler.setFormatter(self.formatter) + self.report_handler = LogCaptureHandler() + self.report_handler.setFormatter(self.formatter) + + # File logging. + self.log_file_level = get_log_level_for_setting( + config, "log_file_level", "log_level" + ) + log_file = get_option_ini(config, "log_file") or os.devnull + if log_file != os.devnull: + directory = os.path.dirname(os.path.abspath(log_file)) + if not os.path.isdir(directory): + os.makedirs(directory) + + self.log_file_mode = get_option_ini(config, "log_file_mode") or "w" + self.log_file_handler = _FileHandler( + log_file, mode=self.log_file_mode, encoding="UTF-8" + ) + log_file_format = get_option_ini(config, "log_file_format", "log_format") + log_file_date_format = get_option_ini( + config, "log_file_date_format", "log_date_format" + ) + + log_file_formatter = DatetimeFormatter( + log_file_format, datefmt=log_file_date_format + ) + self.log_file_handler.setFormatter(log_file_formatter) + + # CLI/live logging. + self.log_cli_level = get_log_level_for_setting( + config, "log_cli_level", "log_level" + ) + if self._log_cli_enabled(): + terminal_reporter = config.pluginmanager.get_plugin("terminalreporter") + # Guaranteed by `_log_cli_enabled()`. + assert terminal_reporter is not None + capture_manager = config.pluginmanager.get_plugin("capturemanager") + # if capturemanager plugin is disabled, live logging still works. + self.log_cli_handler: ( + _LiveLoggingStreamHandler | _LiveLoggingNullHandler + ) = _LiveLoggingStreamHandler(terminal_reporter, capture_manager) + else: + self.log_cli_handler = _LiveLoggingNullHandler() + log_cli_formatter = self._create_formatter( + get_option_ini(config, "log_cli_format", "log_format"), + get_option_ini(config, "log_cli_date_format", "log_date_format"), + get_option_ini(config, "log_auto_indent"), + ) + self.log_cli_handler.setFormatter(log_cli_formatter) + self._disable_loggers(loggers_to_disable=config.option.logger_disable) + + def _disable_loggers(self, loggers_to_disable: list[str]) -> None: + if not loggers_to_disable: + return + + for name in loggers_to_disable: + logger = logging.getLogger(name) + logger.disabled = True + + def _create_formatter(self, log_format, log_date_format, auto_indent): + # Color option doesn't exist if terminal plugin is disabled. + color = getattr(self._config.option, "color", "no") + if color != "no" and ColoredLevelFormatter.LEVELNAME_FMT_REGEX.search( + log_format + ): + formatter: logging.Formatter = ColoredLevelFormatter( + create_terminal_writer(self._config), log_format, log_date_format + ) + else: + formatter = DatetimeFormatter(log_format, log_date_format) + + formatter._style = PercentStyleMultiline( + formatter._style._fmt, auto_indent=auto_indent + ) + + return formatter + + def set_log_path(self, fname: str) -> None: + """Set the filename parameter for Logging.FileHandler(). + + Creates parent directory if it does not exist. + + .. warning:: + This is an experimental API. + """ + fpath = Path(fname) + + if not fpath.is_absolute(): + fpath = self._config.rootpath / fpath + + if not fpath.parent.exists(): + fpath.parent.mkdir(exist_ok=True, parents=True) + + # https://github.com/python/mypy/issues/11193 + stream: io.TextIOWrapper = fpath.open(mode=self.log_file_mode, encoding="UTF-8") # type: ignore[assignment] + old_stream = self.log_file_handler.setStream(stream) + if old_stream: + old_stream.close() + + def _log_cli_enabled(self) -> bool: + """Return whether live logging is enabled.""" + enabled = self._config.getoption( + "--log-cli-level" + ) is not None or self._config.getini("log_cli") + if not enabled: + return False + + terminal_reporter = self._config.pluginmanager.get_plugin("terminalreporter") + if terminal_reporter is None: + # terminal reporter is disabled e.g. by pytest-xdist. + return False + + return True + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_sessionstart(self) -> Generator[None]: + self.log_cli_handler.set_when("sessionstart") + + with catching_logs(self.log_cli_handler, level=self.log_cli_level): + with catching_logs(self.log_file_handler, level=self.log_file_level): + return (yield) + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_collection(self) -> Generator[None]: + self.log_cli_handler.set_when("collection") + + with catching_logs(self.log_cli_handler, level=self.log_cli_level): + with catching_logs(self.log_file_handler, level=self.log_file_level): + return (yield) + + @hookimpl(wrapper=True) + def pytest_runtestloop(self, session: Session) -> Generator[None, object, object]: + if session.config.option.collectonly: + return (yield) + + if self._log_cli_enabled() and self._config.get_verbosity() < 1: + # The verbose flag is needed to avoid messy test progress output. + self._config.option.verbose = 1 + + with catching_logs(self.log_cli_handler, level=self.log_cli_level): + with catching_logs(self.log_file_handler, level=self.log_file_level): + return (yield) # Run all the tests. + + @hookimpl + def pytest_runtest_logstart(self) -> None: + self.log_cli_handler.reset() + self.log_cli_handler.set_when("start") + + @hookimpl + def pytest_runtest_logreport(self) -> None: + self.log_cli_handler.set_when("logreport") + + @contextmanager + def _runtest_for(self, item: nodes.Item, when: str) -> Generator[None]: + """Implement the internals of the pytest_runtest_xxx() hooks.""" + with ( + catching_logs( + self.caplog_handler, + level=self.log_level, + ) as caplog_handler, + catching_logs( + self.report_handler, + level=self.log_level, + ) as report_handler, + ): + caplog_handler.reset() + report_handler.reset() + item.stash[caplog_records_key][when] = caplog_handler.records + item.stash[caplog_handler_key] = caplog_handler + + try: + yield + finally: + log = report_handler.stream.getvalue().strip() + item.add_report_section(when, "log", log) + + @hookimpl(wrapper=True) + def pytest_runtest_setup(self, item: nodes.Item) -> Generator[None]: + self.log_cli_handler.set_when("setup") + + empty: dict[str, list[logging.LogRecord]] = {} + item.stash[caplog_records_key] = empty + with self._runtest_for(item, "setup"): + yield + + @hookimpl(wrapper=True) + def pytest_runtest_call(self, item: nodes.Item) -> Generator[None]: + self.log_cli_handler.set_when("call") + + with self._runtest_for(item, "call"): + yield + + @hookimpl(wrapper=True) + def pytest_runtest_teardown(self, item: nodes.Item) -> Generator[None]: + self.log_cli_handler.set_when("teardown") + + try: + with self._runtest_for(item, "teardown"): + yield + finally: + del item.stash[caplog_records_key] + del item.stash[caplog_handler_key] + + @hookimpl + def pytest_runtest_logfinish(self) -> None: + self.log_cli_handler.set_when("finish") + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_sessionfinish(self) -> Generator[None]: + self.log_cli_handler.set_when("sessionfinish") + + with catching_logs(self.log_cli_handler, level=self.log_cli_level): + with catching_logs(self.log_file_handler, level=self.log_file_level): + return (yield) + + @hookimpl + def pytest_unconfigure(self) -> None: + # Close the FileHandler explicitly. + # (logging.shutdown might have lost the weakref?!) + self.log_file_handler.close() + + +class _FileHandler(logging.FileHandler): + """A logging FileHandler with pytest tweaks.""" + + def handleError(self, record: logging.LogRecord) -> None: + # Handled by LogCaptureHandler. + pass + + +class _LiveLoggingStreamHandler(logging_StreamHandler): + """A logging StreamHandler used by the live logging feature: it will + write a newline before the first log message in each test. + + During live logging we must also explicitly disable stdout/stderr + capturing otherwise it will get captured and won't appear in the + terminal. + """ + + # Officially stream needs to be a IO[str], but TerminalReporter + # isn't. So force it. + stream: TerminalReporter = None # type: ignore + + def __init__( + self, + terminal_reporter: TerminalReporter, + capture_manager: CaptureManager | None, + ) -> None: + super().__init__(stream=terminal_reporter) # type: ignore[arg-type] + self.capture_manager = capture_manager + self.reset() + self.set_when(None) + self._test_outcome_written = False + + def reset(self) -> None: + """Reset the handler; should be called before the start of each test.""" + self._first_record_emitted = False + + def set_when(self, when: str | None) -> None: + """Prepare for the given test phase (setup/call/teardown).""" + self._when = when + self._section_name_shown = False + if when == "start": + self._test_outcome_written = False + + def emit(self, record: logging.LogRecord) -> None: + ctx_manager = ( + self.capture_manager.global_and_fixture_disabled() + if self.capture_manager + else nullcontext() + ) + with ctx_manager: + if not self._first_record_emitted: + self.stream.write("\n") + self._first_record_emitted = True + elif self._when in ("teardown", "finish"): + if not self._test_outcome_written: + self._test_outcome_written = True + self.stream.write("\n") + if not self._section_name_shown and self._when: + self.stream.section("live log " + self._when, sep="-", bold=True) + self._section_name_shown = True + super().emit(record) + + def handleError(self, record: logging.LogRecord) -> None: + # Handled by LogCaptureHandler. + pass + + +class _LiveLoggingNullHandler(logging.NullHandler): + """A logging handler used when live logging is disabled.""" + + def reset(self) -> None: + pass + + def set_when(self, when: str) -> None: + pass + + def handleError(self, record: logging.LogRecord) -> None: + # Handled by LogCaptureHandler. + pass diff --git a/.venv/lib/python3.12/site-packages/_pytest/main.py b/.venv/lib/python3.12/site-packages/_pytest/main.py new file mode 100644 index 0000000..9bc930d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/main.py @@ -0,0 +1,1203 @@ +"""Core implementation of the testing process: init, session, runtest loop.""" + +from __future__ import annotations + +import argparse +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Sequence +from collections.abc import Set as AbstractSet +import dataclasses +import fnmatch +import functools +import importlib +import importlib.util +import os +from pathlib import Path +import sys +from typing import final +from typing import Literal +from typing import overload +from typing import TYPE_CHECKING +import warnings + +import pluggy + +from _pytest import nodes +import _pytest._code +from _pytest.config import Config +from _pytest.config import directory_arg +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config import PytestPluginManager +from _pytest.config import UsageError +from _pytest.config.argparsing import OverrideIniAction +from _pytest.config.argparsing import Parser +from _pytest.config.compat import PathAwareHookProxy +from _pytest.outcomes import exit +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath +from _pytest.pathlib import fnmatch_ex +from _pytest.pathlib import safe_exists +from _pytest.pathlib import samefile_nofollow +from _pytest.pathlib import scandir +from _pytest.reports import CollectReport +from _pytest.reports import TestReport +from _pytest.runner import collect_one_node +from _pytest.runner import SetupState +from _pytest.warning_types import PytestWarning + + +if TYPE_CHECKING: + from typing_extensions import Self + + from _pytest.fixtures import FixtureManager + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group._addoption( # private to use reserved lower-case short option + "-x", + "--exitfirst", + action="store_const", + dest="maxfail", + const=1, + help="Exit instantly on first error or failed test", + ) + group.addoption( + "--maxfail", + metavar="num", + action="store", + type=int, + dest="maxfail", + default=0, + help="Exit after first num failures or errors", + ) + group.addoption( + "--strict-config", + action=OverrideIniAction, + ini_option="strict_config", + ini_value="true", + help="Enables the strict_config option", + ) + group.addoption( + "--strict-markers", + action=OverrideIniAction, + ini_option="strict_markers", + ini_value="true", + help="Enables the strict_markers option", + ) + group.addoption( + "--strict", + action=OverrideIniAction, + ini_option="strict", + ini_value="true", + help="Enables the strict option", + ) + parser.addini( + "strict_config", + "Any warnings encountered while parsing the `pytest` section of the " + "configuration file raise errors", + type="bool", + # None => fallback to `strict`. + default=None, + ) + parser.addini( + "strict_markers", + "Markers not registered in the `markers` section of the configuration " + "file raise errors", + type="bool", + # None => fallback to `strict`. + default=None, + ) + parser.addini( + "strict", + "Enables all strictness options, currently: " + "strict_config, strict_markers, strict_xfail, strict_parametrization_ids", + type="bool", + default=False, + ) + + group = parser.getgroup("pytest-warnings") + group.addoption( + "-W", + "--pythonwarnings", + action="append", + help="Set which warnings to report, see -W option of Python itself", + ) + parser.addini( + "filterwarnings", + type="linelist", + help="Each line specifies a pattern for " + "warnings.filterwarnings. " + "Processed after -W/--pythonwarnings.", + ) + + group = parser.getgroup("collect", "collection") + group.addoption( + "--collectonly", + "--collect-only", + "--co", + action="store_true", + help="Only collect tests, don't execute them", + ) + group.addoption( + "--pyargs", + action="store_true", + help="Try to interpret all arguments as Python packages", + ) + group.addoption( + "--ignore", + action="append", + metavar="path", + help="Ignore path during collection (multi-allowed)", + ) + group.addoption( + "--ignore-glob", + action="append", + metavar="path", + help="Ignore path pattern during collection (multi-allowed)", + ) + group.addoption( + "--deselect", + action="append", + metavar="nodeid_prefix", + help="Deselect item (via node id prefix) during collection (multi-allowed)", + ) + group.addoption( + "--confcutdir", + dest="confcutdir", + default=None, + metavar="dir", + type=functools.partial(directory_arg, optname="--confcutdir"), + help="Only load conftest.py's relative to specified dir", + ) + group.addoption( + "--noconftest", + action="store_true", + dest="noconftest", + default=False, + help="Don't load any conftest.py files", + ) + group.addoption( + "--keepduplicates", + "--keep-duplicates", + action="store_true", + dest="keepduplicates", + default=False, + help="Keep duplicate tests", + ) + group.addoption( + "--collect-in-virtualenv", + action="store_true", + dest="collect_in_virtualenv", + default=False, + help="Don't ignore tests in a local virtualenv directory", + ) + group.addoption( + "--continue-on-collection-errors", + action="store_true", + default=False, + dest="continue_on_collection_errors", + help="Force test execution even if collection errors occur", + ) + group.addoption( + "--import-mode", + default="prepend", + choices=["prepend", "append", "importlib"], + dest="importmode", + help="Prepend/append to sys.path when importing test modules and conftest " + "files. Default: prepend.", + ) + parser.addini( + "norecursedirs", + "Directory patterns to avoid for recursion", + type="args", + default=[ + "*.egg", + ".*", + "_darcs", + "build", + "CVS", + "dist", + "node_modules", + "venv", + "{arch}", + ], + ) + parser.addini( + "testpaths", + "Directories to search for tests when no files or directories are given on the " + "command line", + type="args", + default=[], + ) + parser.addini( + "collect_imported_tests", + "Whether to collect tests in imported modules outside `testpaths`", + type="bool", + default=True, + ) + parser.addini( + "consider_namespace_packages", + type="bool", + default=False, + help="Consider namespace packages when resolving module names during import", + ) + + group = parser.getgroup("debugconfig", "test session debugging and configuration") + group._addoption( # private to use reserved lower-case short option + "-c", + "--config-file", + metavar="FILE", + type=str, + dest="inifilename", + help="Load configuration from `FILE` instead of trying to locate one of the " + "implicit configuration files.", + ) + group.addoption( + "--rootdir", + action="store", + dest="rootdir", + help="Define root directory for tests. Can be relative path: 'root_dir', './root_dir', " + "'root_dir/another_dir/'; absolute path: '/home/user/root_dir'; path with variables: " + "'$HOME/root_dir'.", + ) + group.addoption( + "--basetemp", + dest="basetemp", + default=None, + type=validate_basetemp, + metavar="dir", + help=( + "Base temporary directory for this test run. " + "(Warning: this directory is removed if it exists.)" + ), + ) + + +def validate_basetemp(path: str) -> str: + # GH 7119 + msg = "basetemp must not be empty, the current working directory or any parent directory of it" + + # empty path + if not path: + raise argparse.ArgumentTypeError(msg) + + def is_ancestor(base: Path, query: Path) -> bool: + """Return whether query is an ancestor of base.""" + if base == query: + return True + return query in base.parents + + # check if path is an ancestor of cwd + if is_ancestor(Path.cwd(), Path(path).absolute()): + raise argparse.ArgumentTypeError(msg) + + # check symlinks for ancestors + if is_ancestor(Path.cwd().resolve(), Path(path).resolve()): + raise argparse.ArgumentTypeError(msg) + + return path + + +def wrap_session( + config: Config, doit: Callable[[Config, Session], int | ExitCode | None] +) -> int | ExitCode: + """Skeleton command line program.""" + session = Session.from_config(config) + session.exitstatus = ExitCode.OK + initstate = 0 + try: + try: + config._do_configure() + initstate = 1 + config.hook.pytest_sessionstart(session=session) + initstate = 2 + session.exitstatus = doit(config, session) or 0 + except UsageError: + session.exitstatus = ExitCode.USAGE_ERROR + raise + except Failed: + session.exitstatus = ExitCode.TESTS_FAILED + except (KeyboardInterrupt, exit.Exception): + excinfo = _pytest._code.ExceptionInfo.from_current() + exitstatus: int | ExitCode = ExitCode.INTERRUPTED + if isinstance(excinfo.value, exit.Exception): + if excinfo.value.returncode is not None: + exitstatus = excinfo.value.returncode + if initstate < 2: + sys.stderr.write(f"{excinfo.typename}: {excinfo.value.msg}\n") + config.hook.pytest_keyboard_interrupt(excinfo=excinfo) + session.exitstatus = exitstatus + except BaseException: + session.exitstatus = ExitCode.INTERNAL_ERROR + excinfo = _pytest._code.ExceptionInfo.from_current() + try: + config.notify_exception(excinfo, config.option) + except exit.Exception as exc: + if exc.returncode is not None: + session.exitstatus = exc.returncode + sys.stderr.write(f"{type(exc).__name__}: {exc}\n") + else: + if isinstance(excinfo.value, SystemExit): + sys.stderr.write("mainloop: caught unexpected SystemExit!\n") + + finally: + # Explicitly break reference cycle. + excinfo = None # type: ignore + os.chdir(session.startpath) + if initstate >= 2: + try: + config.hook.pytest_sessionfinish( + session=session, exitstatus=session.exitstatus + ) + except exit.Exception as exc: + if exc.returncode is not None: + session.exitstatus = exc.returncode + sys.stderr.write(f"{type(exc).__name__}: {exc}\n") + config._ensure_unconfigure() + return session.exitstatus + + +def pytest_cmdline_main(config: Config) -> int | ExitCode: + return wrap_session(config, _main) + + +def _main(config: Config, session: Session) -> int | ExitCode | None: + """Default command line protocol for initialization, session, + running tests and reporting.""" + config.hook.pytest_collection(session=session) + config.hook.pytest_runtestloop(session=session) + + if session.testsfailed: + return ExitCode.TESTS_FAILED + elif session.testscollected == 0: + return ExitCode.NO_TESTS_COLLECTED + return None + + +def pytest_collection(session: Session) -> None: + session.perform_collect() + + +def pytest_runtestloop(session: Session) -> bool: + if session.testsfailed and not session.config.option.continue_on_collection_errors: + raise session.Interrupted( + f"{session.testsfailed} error{'s' if session.testsfailed != 1 else ''} during collection" + ) + + if session.config.option.collectonly: + return True + + for i, item in enumerate(session.items): + nextitem = session.items[i + 1] if i + 1 < len(session.items) else None + item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) + if session.shouldfail: + raise session.Failed(session.shouldfail) + if session.shouldstop: + raise session.Interrupted(session.shouldstop) + return True + + +def _in_venv(path: Path) -> bool: + """Attempt to detect if ``path`` is the root of a Virtual Environment by + checking for the existence of the pyvenv.cfg file. + + [https://peps.python.org/pep-0405/] + + For regression protection we also check for conda environments that do not include pyenv.cfg yet -- + https://github.com/conda/conda/issues/13337 is the conda issue tracking adding pyenv.cfg. + + Checking for the `conda-meta/history` file per https://github.com/pytest-dev/pytest/issues/12652#issuecomment-2246336902. + + """ + try: + return ( + path.joinpath("pyvenv.cfg").is_file() + or path.joinpath("conda-meta", "history").is_file() + ) + except OSError: + return False + + +def pytest_ignore_collect(collection_path: Path, config: Config) -> bool | None: + if collection_path.name == "__pycache__": + return True + + ignore_paths = config._getconftest_pathlist( + "collect_ignore", path=collection_path.parent + ) + ignore_paths = ignore_paths or [] + excludeopt = config.getoption("ignore") + if excludeopt: + ignore_paths.extend(absolutepath(x) for x in excludeopt) + + if collection_path in ignore_paths: + return True + + ignore_globs = config._getconftest_pathlist( + "collect_ignore_glob", path=collection_path.parent + ) + ignore_globs = ignore_globs or [] + excludeglobopt = config.getoption("ignore_glob") + if excludeglobopt: + ignore_globs.extend(absolutepath(x) for x in excludeglobopt) + + if any(fnmatch.fnmatch(str(collection_path), str(glob)) for glob in ignore_globs): + return True + + allow_in_venv = config.getoption("collect_in_virtualenv") + if not allow_in_venv and _in_venv(collection_path): + return True + + if collection_path.is_dir(): + norecursepatterns = config.getini("norecursedirs") + if any(fnmatch_ex(pat, collection_path) for pat in norecursepatterns): + return True + + return None + + +def pytest_collect_directory( + path: Path, parent: nodes.Collector +) -> nodes.Collector | None: + return Dir.from_parent(parent, path=path) + + +def pytest_collection_modifyitems(items: list[nodes.Item], config: Config) -> None: + deselect_prefixes = tuple(config.getoption("deselect") or []) + if not deselect_prefixes: + return + + remaining = [] + deselected = [] + for colitem in items: + if colitem.nodeid.startswith(deselect_prefixes): + deselected.append(colitem) + else: + remaining.append(colitem) + + if deselected: + config.hook.pytest_deselected(items=deselected) + items[:] = remaining + + +class FSHookProxy: + def __init__( + self, + pm: PytestPluginManager, + remove_mods: AbstractSet[object], + ) -> None: + self.pm = pm + self.remove_mods = remove_mods + + def __getattr__(self, name: str) -> pluggy.HookCaller: + x = self.pm.subset_hook_caller(name, remove_plugins=self.remove_mods) + self.__dict__[name] = x + return x + + +class Interrupted(KeyboardInterrupt): + """Signals that the test run was interrupted.""" + + __module__ = "builtins" # For py3. + + +class Failed(Exception): + """Signals a stop as failed test run.""" + + +@dataclasses.dataclass +class _bestrelpath_cache(dict[Path, str]): + __slots__ = ("path",) + + path: Path + + def __missing__(self, path: Path) -> str: + r = bestrelpath(self.path, path) + self[path] = r + return r + + +@final +class Dir(nodes.Directory): + """Collector of files in a file system directory. + + .. versionadded:: 8.0 + + .. note:: + + Python directories with an `__init__.py` file are instead collected by + :class:`~pytest.Package` by default. Both are :class:`~pytest.Directory` + collectors. + """ + + @classmethod + def from_parent( # type: ignore[override] + cls, + parent: nodes.Collector, + *, + path: Path, + ) -> Self: + """The public constructor. + + :param parent: The parent collector of this Dir. + :param path: The directory's path. + :type path: pathlib.Path + """ + return super().from_parent(parent=parent, path=path) + + def collect(self) -> Iterable[nodes.Item | nodes.Collector]: + config = self.config + col: nodes.Collector | None + cols: Sequence[nodes.Collector] + ihook = self.ihook + for direntry in scandir(self.path): + if direntry.is_dir(): + path = Path(direntry.path) + if not self.session.isinitpath(path, with_parents=True): + if ihook.pytest_ignore_collect(collection_path=path, config=config): + continue + col = ihook.pytest_collect_directory(path=path, parent=self) + if col is not None: + yield col + + elif direntry.is_file(): + path = Path(direntry.path) + if not self.session.isinitpath(path): + if ihook.pytest_ignore_collect(collection_path=path, config=config): + continue + cols = ihook.pytest_collect_file(file_path=path, parent=self) + yield from cols + + +@final +class Session(nodes.Collector): + """The root of the collection tree. + + ``Session`` collects the initial paths given as arguments to pytest. + """ + + Interrupted = Interrupted + Failed = Failed + # Set on the session by runner.pytest_sessionstart. + _setupstate: SetupState + # Set on the session by fixtures.pytest_sessionstart. + _fixturemanager: FixtureManager + exitstatus: int | ExitCode + + def __init__(self, config: Config) -> None: + super().__init__( + name="", + path=config.rootpath, + fspath=None, + parent=None, + config=config, + session=self, + nodeid="", + ) + self.testsfailed = 0 + self.testscollected = 0 + self._shouldstop: bool | str = False + self._shouldfail: bool | str = False + self.trace = config.trace.root.get("collection") + self._initialpaths: frozenset[Path] = frozenset() + self._initialpaths_with_parents: frozenset[Path] = frozenset() + self._notfound: list[tuple[str, Sequence[nodes.Collector]]] = [] + self._initial_parts: list[CollectionArgument] = [] + self._collection_cache: dict[nodes.Collector, CollectReport] = {} + self.items: list[nodes.Item] = [] + + self._bestrelpathcache: dict[Path, str] = _bestrelpath_cache(config.rootpath) + + self.config.pluginmanager.register(self, name="session") + + @classmethod + def from_config(cls, config: Config) -> Session: + session: Session = cls._create(config=config) + return session + + def __repr__(self) -> str: + return ( + f"<{self.__class__.__name__} {self.name} " + f"exitstatus=%r " + f"testsfailed={self.testsfailed} " + f"testscollected={self.testscollected}>" + ) % getattr(self, "exitstatus", "") + + @property + def shouldstop(self) -> bool | str: + return self._shouldstop + + @shouldstop.setter + def shouldstop(self, value: bool | str) -> None: + # The runner checks shouldfail and assumes that if it is set we are + # definitely stopping, so prevent unsetting it. + if value is False and self._shouldstop: + warnings.warn( + PytestWarning( + "session.shouldstop cannot be unset after it has been set; ignoring." + ), + stacklevel=2, + ) + return + self._shouldstop = value + + @property + def shouldfail(self) -> bool | str: + return self._shouldfail + + @shouldfail.setter + def shouldfail(self, value: bool | str) -> None: + # The runner checks shouldfail and assumes that if it is set we are + # definitely stopping, so prevent unsetting it. + if value is False and self._shouldfail: + warnings.warn( + PytestWarning( + "session.shouldfail cannot be unset after it has been set; ignoring." + ), + stacklevel=2, + ) + return + self._shouldfail = value + + @property + def startpath(self) -> Path: + """The path from which pytest was invoked. + + .. versionadded:: 7.0.0 + """ + return self.config.invocation_params.dir + + def _node_location_to_relpath(self, node_path: Path) -> str: + # bestrelpath is a quite slow function. + return self._bestrelpathcache[node_path] + + @hookimpl(tryfirst=True) + def pytest_collectstart(self) -> None: + if self.shouldfail: + raise self.Failed(self.shouldfail) + if self.shouldstop: + raise self.Interrupted(self.shouldstop) + + @hookimpl(tryfirst=True) + def pytest_runtest_logreport(self, report: TestReport | CollectReport) -> None: + if report.failed and not hasattr(report, "wasxfail"): + self.testsfailed += 1 + maxfail = self.config.getvalue("maxfail") + if maxfail and self.testsfailed >= maxfail: + self.shouldfail = f"stopping after {self.testsfailed} failures" + + pytest_collectreport = pytest_runtest_logreport + + def isinitpath( + self, + path: str | os.PathLike[str], + *, + with_parents: bool = False, + ) -> bool: + """Is path an initial path? + + An initial path is a path explicitly given to pytest on the command + line. + + :param with_parents: + If set, also return True if the path is a parent of an initial path. + + .. versionchanged:: 8.0 + Added the ``with_parents`` parameter. + """ + # Optimization: Path(Path(...)) is much slower than isinstance. + path_ = path if isinstance(path, Path) else Path(path) + if with_parents: + return path_ in self._initialpaths_with_parents + else: + return path_ in self._initialpaths + + def gethookproxy(self, fspath: os.PathLike[str]) -> pluggy.HookRelay: + # Optimization: Path(Path(...)) is much slower than isinstance. + path = fspath if isinstance(fspath, Path) else Path(fspath) + pm = self.config.pluginmanager + # Check if we have the common case of running + # hooks with all conftest.py files. + my_conftestmodules = pm._getconftestmodules(path) + remove_mods = pm._conftest_plugins.difference(my_conftestmodules) + proxy: pluggy.HookRelay + if remove_mods: + # One or more conftests are not in use at this path. + proxy = PathAwareHookProxy(FSHookProxy(pm, remove_mods)) # type: ignore[arg-type,assignment] + else: + # All plugins are active for this fspath. + proxy = self.config.hook + return proxy + + def _collect_path( + self, + path: Path, + path_cache: dict[Path, Sequence[nodes.Collector]], + ) -> Sequence[nodes.Collector]: + """Create a Collector for the given path. + + `path_cache` makes it so the same Collectors are returned for the same + path. + """ + if path in path_cache: + return path_cache[path] + + if path.is_dir(): + ihook = self.gethookproxy(path.parent) + col: nodes.Collector | None = ihook.pytest_collect_directory( + path=path, parent=self + ) + cols: Sequence[nodes.Collector] = (col,) if col is not None else () + + elif path.is_file(): + ihook = self.gethookproxy(path) + cols = ihook.pytest_collect_file(file_path=path, parent=self) + + else: + # Broken symlink or invalid/missing file. + cols = () + + path_cache[path] = cols + return cols + + @overload + def perform_collect( + self, args: Sequence[str] | None = ..., genitems: Literal[True] = ... + ) -> Sequence[nodes.Item]: ... + + @overload + def perform_collect( + self, args: Sequence[str] | None = ..., genitems: bool = ... + ) -> Sequence[nodes.Item | nodes.Collector]: ... + + def perform_collect( + self, args: Sequence[str] | None = None, genitems: bool = True + ) -> Sequence[nodes.Item | nodes.Collector]: + """Perform the collection phase for this session. + + This is called by the default :hook:`pytest_collection` hook + implementation; see the documentation of this hook for more details. + For testing purposes, it may also be called directly on a fresh + ``Session``. + + This function normally recursively expands any collectors collected + from the session to their items, and only items are returned. For + testing purposes, this may be suppressed by passing ``genitems=False``, + in which case the return value contains these collectors unexpanded, + and ``session.items`` is empty. + """ + if args is None: + args = self.config.args + + self.trace("perform_collect", self, args) + self.trace.root.indent += 1 + + hook = self.config.hook + + self._notfound = [] + self._initial_parts = [] + self._collection_cache = {} + self.items = [] + items: Sequence[nodes.Item | nodes.Collector] = self.items + consider_namespace_packages: bool = self.config.getini( + "consider_namespace_packages" + ) + try: + initialpaths: list[Path] = [] + initialpaths_with_parents: list[Path] = [] + + collection_args = [ + resolve_collection_argument( + self.config.invocation_params.dir, + arg, + i, + as_pypath=self.config.option.pyargs, + consider_namespace_packages=consider_namespace_packages, + ) + for i, arg in enumerate(args) + ] + + if not self.config.getoption("keepduplicates"): + # Normalize the collection arguments -- remove duplicates and overlaps. + self._initial_parts = normalize_collection_arguments(collection_args) + else: + self._initial_parts = collection_args + + for collection_argument in self._initial_parts: + initialpaths.append(collection_argument.path) + initialpaths_with_parents.append(collection_argument.path) + initialpaths_with_parents.extend(collection_argument.path.parents) + self._initialpaths = frozenset(initialpaths) + self._initialpaths_with_parents = frozenset(initialpaths_with_parents) + + rep = collect_one_node(self) + self.ihook.pytest_collectreport(report=rep) + self.trace.root.indent -= 1 + if self._notfound: + errors = [] + for arg, collectors in self._notfound: + if collectors: + errors.append( + f"not found: {arg}\n(no match in any of {collectors!r})" + ) + else: + errors.append(f"found no collectors for {arg}") + + raise UsageError(*errors) + + if not genitems: + items = rep.result + else: + if rep.passed: + for node in rep.result: + self.items.extend(self.genitems(node)) + + self.config.pluginmanager.check_pending() + hook.pytest_collection_modifyitems( + session=self, config=self.config, items=items + ) + finally: + self._notfound = [] + self._initial_parts = [] + self._collection_cache = {} + hook.pytest_collection_finish(session=self) + + if genitems: + self.testscollected = len(items) + + return items + + def _collect_one_node( + self, + node: nodes.Collector, + handle_dupes: bool = True, + ) -> tuple[CollectReport, bool]: + if node in self._collection_cache and handle_dupes: + rep = self._collection_cache[node] + return rep, True + else: + rep = collect_one_node(node) + self._collection_cache[node] = rep + return rep, False + + def collect(self) -> Iterator[nodes.Item | nodes.Collector]: + # This is a cache for the root directories of the initial paths. + # We can't use collection_cache for Session because of its special + # role as the bootstrapping collector. + path_cache: dict[Path, Sequence[nodes.Collector]] = {} + + pm = self.config.pluginmanager + + for collection_argument in self._initial_parts: + self.trace("processing argument", collection_argument) + self.trace.root.indent += 1 + + argpath = collection_argument.path + names = collection_argument.parts + parametrization = collection_argument.parametrization + module_name = collection_argument.module_name + + # resolve_collection_argument() ensures this. + if argpath.is_dir(): + assert not names, f"invalid arg {(argpath, names)!r}" + + paths = [argpath] + # Add relevant parents of the path, from the root, e.g. + # /a/b/c.py -> [/, /a, /a/b, /a/b/c.py] + if module_name is None: + # Paths outside of the confcutdir should not be considered. + for path in argpath.parents: + if not pm._is_in_confcutdir(path): + break + paths.insert(0, path) + else: + # For --pyargs arguments, only consider paths matching the module + # name. Paths beyond the package hierarchy are not included. + module_name_parts = module_name.split(".") + for i, path in enumerate(argpath.parents, 2): + if i > len(module_name_parts) or path.stem != module_name_parts[-i]: + break + paths.insert(0, path) + + # Start going over the parts from the root, collecting each level + # and discarding all nodes which don't match the level's part. + any_matched_in_initial_part = False + notfound_collectors = [] + work: list[tuple[nodes.Collector | nodes.Item, list[Path | str]]] = [ + (self, [*paths, *names]) + ] + while work: + matchnode, matchparts = work.pop() + + # Pop'd all of the parts, this is a match. + if not matchparts: + yield matchnode + any_matched_in_initial_part = True + continue + + # Should have been matched by now, discard. + if not isinstance(matchnode, nodes.Collector): + continue + + # Collect this level of matching. + # Collecting Session (self) is done directly to avoid endless + # recursion to this function. + subnodes: Sequence[nodes.Collector | nodes.Item] + if isinstance(matchnode, Session): + assert isinstance(matchparts[0], Path) + subnodes = matchnode._collect_path(matchparts[0], path_cache) + else: + # For backward compat, files given directly multiple + # times on the command line should not be deduplicated. + handle_dupes = not ( + len(matchparts) == 1 + and isinstance(matchparts[0], Path) + and matchparts[0].is_file() + ) + rep, duplicate = self._collect_one_node(matchnode, handle_dupes) + if not duplicate and not rep.passed: + # Report collection failures here to avoid failing to + # run some test specified in the command line because + # the module could not be imported (#134). + matchnode.ihook.pytest_collectreport(report=rep) + if not rep.passed: + continue + subnodes = rep.result + + # Prune this level. + any_matched_in_collector = False + for node in reversed(subnodes): + # Path part e.g. `/a/b/` in `/a/b/test_file.py::TestIt::test_it`. + if isinstance(matchparts[0], Path): + is_match = node.path == matchparts[0] + if sys.platform == "win32" and not is_match: + # In case the file paths do not match, fallback to samefile() to + # account for short-paths on Windows (#11895). But use a version + # which doesn't resolve symlinks, otherwise we might match the + # same file more than once (#12039). + is_match = samefile_nofollow(node.path, matchparts[0]) + + # Name part e.g. `TestIt` in `/a/b/test_file.py::TestIt::test_it`. + else: + if len(matchparts) == 1: + # This the last part, one parametrization goes. + if parametrization is not None: + # A parametrized arg must match exactly. + is_match = node.name == matchparts[0] + parametrization + else: + # A non-parameterized arg matches all parametrizations (if any). + # TODO: Remove the hacky split once the collection structure + # contains parametrization. + is_match = node.name.split("[")[0] == matchparts[0] + else: + is_match = node.name == matchparts[0] + if is_match: + work.append((node, matchparts[1:])) + any_matched_in_collector = True + + if not any_matched_in_collector: + notfound_collectors.append(matchnode) + + if not any_matched_in_initial_part: + report_arg = "::".join((str(argpath), *names)) + self._notfound.append((report_arg, notfound_collectors)) + + self.trace.root.indent -= 1 + + def genitems(self, node: nodes.Item | nodes.Collector) -> Iterator[nodes.Item]: + self.trace("genitems", node) + if isinstance(node, nodes.Item): + node.ihook.pytest_itemcollected(item=node) + yield node + else: + assert isinstance(node, nodes.Collector) + # For backward compat, dedup only applies to files. + handle_dupes = not isinstance(node, nodes.File) + rep, duplicate = self._collect_one_node(node, handle_dupes) + if rep.passed: + for subnode in rep.result: + yield from self.genitems(subnode) + if not duplicate: + node.ihook.pytest_collectreport(report=rep) + + +def search_pypath( + module_name: str, *, consider_namespace_packages: bool = False +) -> str | None: + """Search sys.path for the given a dotted module name, and return its file + system path if found.""" + try: + spec = importlib.util.find_spec(module_name) + # AttributeError: looks like package module, but actually filename + # ImportError: module does not exist + # ValueError: not a module name + except (AttributeError, ImportError, ValueError): + return None + + if spec is None: + return None + + if ( + spec.submodule_search_locations is None + or len(spec.submodule_search_locations) == 0 + ): + # Must be a simple module. + return spec.origin + + if consider_namespace_packages: + # If submodule_search_locations is set, it's a package (regular or namespace). + # Typically there is a single entry, but documentation claims it can be empty too + # (e.g. if the package has no physical location). + return spec.submodule_search_locations[0] + + if spec.origin is None: + # This is only the case for namespace packages + return None + + return os.path.dirname(spec.origin) + + +@dataclasses.dataclass(frozen=True) +class CollectionArgument: + """A resolved collection argument.""" + + path: Path + parts: Sequence[str] + parametrization: str | None + module_name: str | None + original_index: int + + +def resolve_collection_argument( + invocation_path: Path, + arg: str, + arg_index: int, + *, + as_pypath: bool = False, + consider_namespace_packages: bool = False, +) -> CollectionArgument: + """Parse path arguments optionally containing selection parts and return (fspath, names). + + Command-line arguments can point to files and/or directories, and optionally contain + parts for specific tests selection, for example: + + "pkg/tests/test_foo.py::TestClass::test_foo" + + This function ensures the path exists, and returns a resolved `CollectionArgument`: + + CollectionArgument( + path=Path("/full/path/to/pkg/tests/test_foo.py"), + parts=["TestClass", "test_foo"], + module_name=None, + ) + + When as_pypath is True, expects that the command-line argument actually contains + module paths instead of file-system paths: + + "pkg.tests.test_foo::TestClass::test_foo[a,b]" + + In which case we search sys.path for a matching module, and then return the *path* to the + found module, which may look like this: + + CollectionArgument( + path=Path("/home/u/myvenv/lib/site-packages/pkg/tests/test_foo.py"), + parts=["TestClass", "test_foo"], + parametrization="[a,b]", + module_name="pkg.tests.test_foo", + ) + + If the path doesn't exist, raise UsageError. + If the path is a directory and selection parts are present, raise UsageError. + """ + base, squacket, rest = arg.partition("[") + strpath, *parts = base.split("::") + if squacket and not parts: + raise UsageError(f"path cannot contain [] parametrization: {arg}") + parametrization = f"{squacket}{rest}" if squacket else None + module_name = None + if as_pypath: + pyarg_strpath = search_pypath( + strpath, consider_namespace_packages=consider_namespace_packages + ) + if pyarg_strpath is not None: + module_name = strpath + strpath = pyarg_strpath + fspath = invocation_path / strpath + fspath = absolutepath(fspath) + if not safe_exists(fspath): + msg = ( + "module or package not found: {arg} (missing __init__.py?)" + if as_pypath + else "file or directory not found: {arg}" + ) + raise UsageError(msg.format(arg=arg)) + if parts and fspath.is_dir(): + msg = ( + "package argument cannot contain :: selection parts: {arg}" + if as_pypath + else "directory argument cannot contain :: selection parts: {arg}" + ) + raise UsageError(msg.format(arg=arg)) + return CollectionArgument( + path=fspath, + parts=parts, + parametrization=parametrization, + module_name=module_name, + original_index=arg_index, + ) + + +def is_collection_argument_subsumed_by( + arg: CollectionArgument, by: CollectionArgument +) -> bool: + """Check if `arg` is subsumed (contained) by `by`.""" + # First check path subsumption. + if by.path != arg.path: + # `by` subsumes `arg` if `by` is a parent directory of `arg` and has no + # parts (collects everything in that directory). + if not by.parts: + return arg.path.is_relative_to(by.path) + return False + # Paths are equal, check parts. + # For example: ("TestClass",) is a prefix of ("TestClass", "test_method"). + if len(by.parts) > len(arg.parts) or arg.parts[: len(by.parts)] != by.parts: + return False + # Paths and parts are equal, check parametrization. + # A `by` without parametrization (None) matches everything, e.g. + # `pytest x.py::test_it` matches `x.py::test_it[0]`. Otherwise must be + # exactly equal. + if by.parametrization is not None and by.parametrization != arg.parametrization: + return False + return True + + +def normalize_collection_arguments( + collection_args: Sequence[CollectionArgument], +) -> list[CollectionArgument]: + """Normalize collection arguments to eliminate overlapping paths and parts. + + Detects when collection arguments overlap in either paths or parts and only + keeps the shorter prefix, or the earliest argument if duplicate, preserving + order. The result is prefix-free. + """ + # A quadratic algorithm is not acceptable since large inputs are possible. + # So this uses an O(n*log(n)) algorithm which takes advantage of the + # property that after sorting, a collection argument will immediately + # precede collection arguments it subsumes. An O(n) algorithm is not worth + # it. + collection_args_sorted = sorted( + collection_args, + key=lambda arg: (arg.path, arg.parts, arg.parametrization or ""), + ) + normalized: list[CollectionArgument] = [] + last_kept = None + for arg in collection_args_sorted: + if last_kept is None or not is_collection_argument_subsumed_by(arg, last_kept): + normalized.append(arg) + last_kept = arg + normalized.sort(key=lambda arg: arg.original_index) + return normalized diff --git a/.venv/lib/python3.12/site-packages/_pytest/mark/__init__.py b/.venv/lib/python3.12/site-packages/_pytest/mark/__init__.py new file mode 100644 index 0000000..841d781 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/mark/__init__.py @@ -0,0 +1,301 @@ +"""Generic mechanism for marking and selecting python functions.""" + +from __future__ import annotations + +import collections +from collections.abc import Collection +from collections.abc import Iterable +from collections.abc import Set as AbstractSet +import dataclasses +from typing import TYPE_CHECKING + +from .expression import Expression +from .structures import _HiddenParam +from .structures import EMPTY_PARAMETERSET_OPTION +from .structures import get_empty_parameterset_mark +from .structures import HIDDEN_PARAM +from .structures import Mark +from .structures import MARK_GEN +from .structures import MarkDecorator +from .structures import MarkGenerator +from .structures import ParameterSet +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config import UsageError +from _pytest.config.argparsing import NOT_SET +from _pytest.config.argparsing import Parser +from _pytest.stash import StashKey + + +if TYPE_CHECKING: + from _pytest.nodes import Item + + +__all__ = [ + "HIDDEN_PARAM", + "MARK_GEN", + "Mark", + "MarkDecorator", + "MarkGenerator", + "ParameterSet", + "get_empty_parameterset_mark", +] + + +old_mark_config_key = StashKey[Config | None]() + + +def param( + *values: object, + marks: MarkDecorator | Collection[MarkDecorator | Mark] = (), + id: str | _HiddenParam | None = None, +) -> ParameterSet: + """Specify a parameter in `pytest.mark.parametrize`_ calls or + :ref:`parametrized fixtures `. + + .. code-block:: python + + @pytest.mark.parametrize( + "test_input,expected", + [ + ("3+5", 8), + pytest.param("6*9", 42, marks=pytest.mark.xfail), + ], + ) + def test_eval(test_input, expected): + assert eval(test_input) == expected + + :param values: Variable args of the values of the parameter set, in order. + + :param marks: + A single mark or a list of marks to be applied to this parameter set. + + :ref:`pytest.mark.usefixtures ` cannot be added via this parameter. + + :type id: str | Literal[pytest.HIDDEN_PARAM] | None + :param id: + The id to attribute to this parameter set. + + .. versionadded:: 8.4 + :ref:`hidden-param` means to hide the parameter set + from the test name. Can only be used at most 1 time, as + test names need to be unique. + """ + return ParameterSet.param(*values, marks=marks, id=id) + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group._addoption( # private to use reserved lower-case short option + "-k", + action="store", + dest="keyword", + default="", + metavar="EXPRESSION", + help="Only run tests which match the given substring expression. " + "An expression is a Python evaluable expression " + "where all names are substring-matched against test names " + "and their parent classes. Example: -k 'test_method or test_" + "other' matches all test functions and classes whose name " + "contains 'test_method' or 'test_other', while -k 'not test_method' " + "matches those that don't contain 'test_method' in their names. " + "-k 'not test_method and not test_other' will eliminate the matches. " + "Additionally keywords are matched to classes and functions " + "containing extra names in their 'extra_keyword_matches' set, " + "as well as functions which have names assigned directly to them. " + "The matching is case-insensitive.", + ) + + group._addoption( # private to use reserved lower-case short option + "-m", + action="store", + dest="markexpr", + default="", + metavar="MARKEXPR", + help="Only run tests matching given mark expression. " + "For example: -m 'mark1 and not mark2'.", + ) + + group.addoption( + "--markers", + action="store_true", + help="show markers (builtin, plugin and per-project ones).", + ) + + parser.addini("markers", "Register new markers for test functions", "linelist") + parser.addini(EMPTY_PARAMETERSET_OPTION, "Default marker for empty parametersets") + + +@hookimpl(tryfirst=True) +def pytest_cmdline_main(config: Config) -> int | ExitCode | None: + import _pytest.config + + if config.option.markers: + config._do_configure() + tw = _pytest.config.create_terminal_writer(config) + for line in config.getini("markers"): + parts = line.split(":", 1) + name = parts[0] + rest = parts[1] if len(parts) == 2 else "" + tw.write(f"@pytest.mark.{name}:", bold=True) + tw.line(rest) + tw.line() + config._ensure_unconfigure() + return 0 + + return None + + +@dataclasses.dataclass +class KeywordMatcher: + """A matcher for keywords. + + Given a list of names, matches any substring of one of these names. The + string inclusion check is case-insensitive. + + Will match on the name of colitem, including the names of its parents. + Only matches names of items which are either a :class:`Class` or a + :class:`Function`. + + Additionally, matches on names in the 'extra_keyword_matches' set of + any item, as well as names directly assigned to test functions. + """ + + __slots__ = ("_names",) + + _names: AbstractSet[str] + + @classmethod + def from_item(cls, item: Item) -> KeywordMatcher: + mapped_names = set() + + # Add the names of the current item and any parent items, + # except the Session and root Directory's which are not + # interesting for matching. + import pytest + + for node in item.listchain(): + if isinstance(node, pytest.Session): + continue + if isinstance(node, pytest.Directory) and isinstance( + node.parent, pytest.Session + ): + continue + mapped_names.add(node.name) + + # Add the names added as extra keywords to current or parent items. + mapped_names.update(item.listextrakeywords()) + + # Add the names attached to the current function through direct assignment. + function_obj = getattr(item, "function", None) + if function_obj: + mapped_names.update(function_obj.__dict__) + + # Add the markers to the keywords as we no longer handle them correctly. + mapped_names.update(mark.name for mark in item.iter_markers()) + + return cls(mapped_names) + + def __call__(self, subname: str, /, **kwargs: str | int | bool | None) -> bool: + if kwargs: + raise UsageError("Keyword expressions do not support call parameters.") + subname = subname.lower() + return any(subname in name.lower() for name in self._names) + + +def deselect_by_keyword(items: list[Item], config: Config) -> None: + keywordexpr = config.option.keyword.lstrip() + if not keywordexpr: + return + + expr = _parse_expression(keywordexpr, "Wrong expression passed to '-k'") + + remaining = [] + deselected = [] + for colitem in items: + if not expr.evaluate(KeywordMatcher.from_item(colitem)): + deselected.append(colitem) + else: + remaining.append(colitem) + + if deselected: + config.hook.pytest_deselected(items=deselected) + items[:] = remaining + + +@dataclasses.dataclass +class MarkMatcher: + """A matcher for markers which are present. + + Tries to match on any marker names, attached to the given colitem. + """ + + __slots__ = ("own_mark_name_mapping",) + + own_mark_name_mapping: dict[str, list[Mark]] + + @classmethod + def from_markers(cls, markers: Iterable[Mark]) -> MarkMatcher: + mark_name_mapping = collections.defaultdict(list) + for mark in markers: + mark_name_mapping[mark.name].append(mark) + return cls(mark_name_mapping) + + def __call__(self, name: str, /, **kwargs: str | int | bool | None) -> bool: + if not (matches := self.own_mark_name_mapping.get(name, [])): + return False + + for mark in matches: # pylint: disable=consider-using-any-or-all + if all(mark.kwargs.get(k, NOT_SET) == v for k, v in kwargs.items()): + return True + return False + + +def deselect_by_mark(items: list[Item], config: Config) -> None: + matchexpr = config.option.markexpr + if not matchexpr: + return + + expr = _parse_expression(matchexpr, "Wrong expression passed to '-m'") + remaining: list[Item] = [] + deselected: list[Item] = [] + for item in items: + if expr.evaluate(MarkMatcher.from_markers(item.iter_markers())): + remaining.append(item) + else: + deselected.append(item) + if deselected: + config.hook.pytest_deselected(items=deselected) + items[:] = remaining + + +def _parse_expression(expr: str, exc_message: str) -> Expression: + try: + return Expression.compile(expr) + except SyntaxError as e: + raise UsageError( + f"{exc_message}: {e.text}: at column {e.offset}: {e.msg}" + ) from None + + +def pytest_collection_modifyitems(items: list[Item], config: Config) -> None: + deselect_by_keyword(items, config) + deselect_by_mark(items, config) + + +def pytest_configure(config: Config) -> None: + config.stash[old_mark_config_key] = MARK_GEN._config + MARK_GEN._config = config + + empty_parameterset = config.getini(EMPTY_PARAMETERSET_OPTION) + + if empty_parameterset not in ("skip", "xfail", "fail_at_collect", None, ""): + raise UsageError( + f"{EMPTY_PARAMETERSET_OPTION!s} must be one of skip, xfail or fail_at_collect" + f" but it is {empty_parameterset!r}" + ) + + +def pytest_unconfigure(config: Config) -> None: + MARK_GEN._config = config.stash.get(old_mark_config_key, None) diff --git a/.venv/lib/python3.12/site-packages/_pytest/mark/expression.py b/.venv/lib/python3.12/site-packages/_pytest/mark/expression.py new file mode 100644 index 0000000..3bdbd03 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/mark/expression.py @@ -0,0 +1,353 @@ +r"""Evaluate match expressions, as used by `-k` and `-m`. + +The grammar is: + +expression: expr? EOF +expr: and_expr ('or' and_expr)* +and_expr: not_expr ('and' not_expr)* +not_expr: 'not' not_expr | '(' expr ')' | ident kwargs? + +ident: (\w|:|\+|-|\.|\[|\]|\\|/)+ +kwargs: ('(' name '=' value ( ', ' name '=' value )* ')') +name: a valid ident, but not a reserved keyword +value: (unescaped) string literal | (-)?[0-9]+ | 'False' | 'True' | 'None' + +The semantics are: + +- Empty expression evaluates to False. +- ident evaluates to True or False according to a provided matcher function. +- ident with parentheses and keyword arguments evaluates to True or False according to a provided matcher function. +- or/and/not evaluate according to the usual boolean semantics. +""" + +from __future__ import annotations + +import ast +from collections.abc import Iterator +from collections.abc import Mapping +from collections.abc import Sequence +import dataclasses +import enum +import keyword +import re +import types +from typing import Final +from typing import final +from typing import Literal +from typing import NoReturn +from typing import overload +from typing import Protocol + + +__all__ = [ + "Expression", + "ExpressionMatcher", +] + + +FILE_NAME: Final = "" + + +class TokenType(enum.Enum): + LPAREN = "left parenthesis" + RPAREN = "right parenthesis" + OR = "or" + AND = "and" + NOT = "not" + IDENT = "identifier" + EOF = "end of input" + EQUAL = "=" + STRING = "string literal" + COMMA = "," + + +@dataclasses.dataclass(frozen=True) +class Token: + __slots__ = ("pos", "type", "value") + type: TokenType + value: str + pos: int + + +class Scanner: + __slots__ = ("current", "input", "tokens") + + def __init__(self, input: str) -> None: + self.input = input + self.tokens = self.lex(input) + self.current = next(self.tokens) + + def lex(self, input: str) -> Iterator[Token]: + pos = 0 + while pos < len(input): + if input[pos] in (" ", "\t"): + pos += 1 + elif input[pos] == "(": + yield Token(TokenType.LPAREN, "(", pos) + pos += 1 + elif input[pos] == ")": + yield Token(TokenType.RPAREN, ")", pos) + pos += 1 + elif input[pos] == "=": + yield Token(TokenType.EQUAL, "=", pos) + pos += 1 + elif input[pos] == ",": + yield Token(TokenType.COMMA, ",", pos) + pos += 1 + elif (quote_char := input[pos]) in ("'", '"'): + end_quote_pos = input.find(quote_char, pos + 1) + if end_quote_pos == -1: + raise SyntaxError( + f'closing quote "{quote_char}" is missing', + (FILE_NAME, 1, pos + 1, input), + ) + value = input[pos : end_quote_pos + 1] + if (backslash_pos := input.find("\\")) != -1: + raise SyntaxError( + r'escaping with "\" not supported in marker expression', + (FILE_NAME, 1, backslash_pos + 1, input), + ) + yield Token(TokenType.STRING, value, pos) + pos += len(value) + else: + match = re.match(r"(:?\w|:|\+|-|\.|\[|\]|\\|/)+", input[pos:]) + if match: + value = match.group(0) + if value == "or": + yield Token(TokenType.OR, value, pos) + elif value == "and": + yield Token(TokenType.AND, value, pos) + elif value == "not": + yield Token(TokenType.NOT, value, pos) + else: + yield Token(TokenType.IDENT, value, pos) + pos += len(value) + else: + raise SyntaxError( + f'unexpected character "{input[pos]}"', + (FILE_NAME, 1, pos + 1, input), + ) + yield Token(TokenType.EOF, "", pos) + + @overload + def accept(self, type: TokenType, *, reject: Literal[True]) -> Token: ... + + @overload + def accept( + self, type: TokenType, *, reject: Literal[False] = False + ) -> Token | None: ... + + def accept(self, type: TokenType, *, reject: bool = False) -> Token | None: + if self.current.type is type: + token = self.current + if token.type is not TokenType.EOF: + self.current = next(self.tokens) + return token + if reject: + self.reject((type,)) + return None + + def reject(self, expected: Sequence[TokenType]) -> NoReturn: + raise SyntaxError( + "expected {}; got {}".format( + " OR ".join(type.value for type in expected), + self.current.type.value, + ), + (FILE_NAME, 1, self.current.pos + 1, self.input), + ) + + +# True, False and None are legal match expression identifiers, +# but illegal as Python identifiers. To fix this, this prefix +# is added to identifiers in the conversion to Python AST. +IDENT_PREFIX = "$" + + +def expression(s: Scanner) -> ast.Expression: + if s.accept(TokenType.EOF): + ret: ast.expr = ast.Constant(False) + else: + ret = expr(s) + s.accept(TokenType.EOF, reject=True) + return ast.fix_missing_locations(ast.Expression(ret)) + + +def expr(s: Scanner) -> ast.expr: + ret = and_expr(s) + while s.accept(TokenType.OR): + rhs = and_expr(s) + ret = ast.BoolOp(ast.Or(), [ret, rhs]) + return ret + + +def and_expr(s: Scanner) -> ast.expr: + ret = not_expr(s) + while s.accept(TokenType.AND): + rhs = not_expr(s) + ret = ast.BoolOp(ast.And(), [ret, rhs]) + return ret + + +def not_expr(s: Scanner) -> ast.expr: + if s.accept(TokenType.NOT): + return ast.UnaryOp(ast.Not(), not_expr(s)) + if s.accept(TokenType.LPAREN): + ret = expr(s) + s.accept(TokenType.RPAREN, reject=True) + return ret + ident = s.accept(TokenType.IDENT) + if ident: + name = ast.Name(IDENT_PREFIX + ident.value, ast.Load()) + if s.accept(TokenType.LPAREN): + ret = ast.Call(func=name, args=[], keywords=all_kwargs(s)) + s.accept(TokenType.RPAREN, reject=True) + else: + ret = name + return ret + + s.reject((TokenType.NOT, TokenType.LPAREN, TokenType.IDENT)) + + +BUILTIN_MATCHERS = {"True": True, "False": False, "None": None} + + +def single_kwarg(s: Scanner) -> ast.keyword: + keyword_name = s.accept(TokenType.IDENT, reject=True) + if not keyword_name.value.isidentifier(): + raise SyntaxError( + f"not a valid python identifier {keyword_name.value}", + (FILE_NAME, 1, keyword_name.pos + 1, s.input), + ) + if keyword.iskeyword(keyword_name.value): + raise SyntaxError( + f"unexpected reserved python keyword `{keyword_name.value}`", + (FILE_NAME, 1, keyword_name.pos + 1, s.input), + ) + s.accept(TokenType.EQUAL, reject=True) + + if value_token := s.accept(TokenType.STRING): + value: str | int | bool | None = value_token.value[1:-1] # strip quotes + else: + value_token = s.accept(TokenType.IDENT, reject=True) + if (number := value_token.value).isdigit() or ( + number.startswith("-") and number[1:].isdigit() + ): + value = int(number) + elif value_token.value in BUILTIN_MATCHERS: + value = BUILTIN_MATCHERS[value_token.value] + else: + raise SyntaxError( + f'unexpected character/s "{value_token.value}"', + (FILE_NAME, 1, value_token.pos + 1, s.input), + ) + + ret = ast.keyword(keyword_name.value, ast.Constant(value)) + return ret + + +def all_kwargs(s: Scanner) -> list[ast.keyword]: + ret = [single_kwarg(s)] + while s.accept(TokenType.COMMA): + ret.append(single_kwarg(s)) + return ret + + +class ExpressionMatcher(Protocol): + """A callable which, given an identifier and optional kwargs, should return + whether it matches in an :class:`Expression` evaluation. + + Should be prepared to handle arbitrary strings as input. + + If no kwargs are provided, the expression of the form `foo`. + If kwargs are provided, the expression is of the form `foo(1, b=True, "s")`. + + If the expression is not supported (e.g. don't want to accept the kwargs + syntax variant), should raise :class:`~pytest.UsageError`. + + Example:: + + def matcher(name: str, /, **kwargs: str | int | bool | None) -> bool: + # Match `cat`. + if name == "cat" and not kwargs: + return True + # Match `dog(barks=True)`. + if name == "dog" and kwargs == {"barks": False}: + return True + return False + """ + + def __call__(self, name: str, /, **kwargs: str | int | bool | None) -> bool: ... + + +@dataclasses.dataclass +class MatcherNameAdapter: + matcher: ExpressionMatcher + name: str + + def __bool__(self) -> bool: + return self.matcher(self.name) + + def __call__(self, **kwargs: str | int | bool | None) -> bool: + return self.matcher(self.name, **kwargs) + + +class MatcherAdapter(Mapping[str, MatcherNameAdapter]): + """Adapts a matcher function to a locals mapping as required by eval().""" + + def __init__(self, matcher: ExpressionMatcher) -> None: + self.matcher = matcher + + def __getitem__(self, key: str) -> MatcherNameAdapter: + return MatcherNameAdapter(matcher=self.matcher, name=key[len(IDENT_PREFIX) :]) + + def __iter__(self) -> Iterator[str]: + raise NotImplementedError() + + def __len__(self) -> int: + raise NotImplementedError() + + +@final +class Expression: + """A compiled match expression as used by -k and -m. + + The expression can be evaluated against different matchers. + """ + + __slots__ = ("_code", "input") + + def __init__(self, input: str, code: types.CodeType) -> None: + #: The original input line, as a string. + self.input: Final = input + self._code: Final = code + + @classmethod + def compile(cls, input: str) -> Expression: + """Compile a match expression. + + :param input: The input expression - one line. + + :raises SyntaxError: If the expression is malformed. + """ + astexpr = expression(Scanner(input)) + code = compile( + astexpr, + filename="", + mode="eval", + ) + return Expression(input, code) + + def evaluate(self, matcher: ExpressionMatcher) -> bool: + """Evaluate the match expression. + + :param matcher: + A callback which determines whether an identifier matches or not. + See the :class:`ExpressionMatcher` protocol for details and example. + + :returns: Whether the expression matches or not. + + :raises UsageError: + If the matcher doesn't support the expression. Cannot happen if the + matcher supports all expressions. + """ + return bool(eval(self._code, {"__builtins__": {}}, MatcherAdapter(matcher))) diff --git a/.venv/lib/python3.12/site-packages/_pytest/mark/structures.py b/.venv/lib/python3.12/site-packages/_pytest/mark/structures.py new file mode 100644 index 0000000..16bb6d8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/mark/structures.py @@ -0,0 +1,664 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +import collections.abc +from collections.abc import Callable +from collections.abc import Collection +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Mapping +from collections.abc import MutableMapping +from collections.abc import Sequence +import dataclasses +import enum +import inspect +from typing import Any +from typing import final +from typing import NamedTuple +from typing import overload +from typing import TYPE_CHECKING +from typing import TypeVar +import warnings + +from .._code import getfslineno +from ..compat import NOTSET +from ..compat import NotSetType +from _pytest.config import Config +from _pytest.deprecated import check_ispytest +from _pytest.deprecated import MARKED_FIXTURE +from _pytest.outcomes import fail +from _pytest.raises import AbstractRaises +from _pytest.scope import _ScopeName +from _pytest.warning_types import PytestUnknownMarkWarning + + +if TYPE_CHECKING: + from ..nodes import Node + + +EMPTY_PARAMETERSET_OPTION = "empty_parameter_set_mark" + + +# Singleton type for HIDDEN_PARAM, as described in: +# https://www.python.org/dev/peps/pep-0484/#support-for-singleton-types-in-unions +class _HiddenParam(enum.Enum): + token = 0 + + +#: Can be used as a parameter set id to hide it from the test name. +HIDDEN_PARAM = _HiddenParam.token + + +def istestfunc(func) -> bool: + return callable(func) and getattr(func, "__name__", "") != "" + + +def get_empty_parameterset_mark( + config: Config, argnames: Sequence[str], func +) -> MarkDecorator: + from ..nodes import Collector + + argslisting = ", ".join(argnames) + + _fs, lineno = getfslineno(func) + reason = f"got empty parameter set for ({argslisting})" + requested_mark = config.getini(EMPTY_PARAMETERSET_OPTION) + if requested_mark in ("", None, "skip"): + mark = MARK_GEN.skip(reason=reason) + elif requested_mark == "xfail": + mark = MARK_GEN.xfail(reason=reason, run=False) + elif requested_mark == "fail_at_collect": + raise Collector.CollectError( + f"Empty parameter set in '{func.__name__}' at line {lineno + 1}" + ) + else: + raise LookupError(requested_mark) + return mark + + +class ParameterSet(NamedTuple): + """A set of values for a set of parameters along with associated marks and + an optional ID for the set. + + Examples:: + + pytest.param(1, 2, 3) + # ParameterSet(values=(1, 2, 3), marks=(), id=None) + + pytest.param("hello", id="greeting") + # ParameterSet(values=("hello",), marks=(), id="greeting") + + # Parameter set with marks + pytest.param(42, marks=pytest.mark.xfail) + # ParameterSet(values=(42,), marks=(MarkDecorator(...),), id=None) + + # From parametrize mark (parameter names + list of parameter sets) + pytest.mark.parametrize( + ("a", "b", "expected"), + [ + (1, 2, 3), + pytest.param(40, 2, 42, id="everything"), + ], + ) + # ParameterSet(values=(1, 2, 3), marks=(), id=None) + # ParameterSet(values=(40, 2, 42), marks=(), id="everything") + """ + + values: Sequence[object | NotSetType] + marks: Collection[MarkDecorator | Mark] + id: str | _HiddenParam | None + + @classmethod + def param( + cls, + *values: object, + marks: MarkDecorator | Collection[MarkDecorator | Mark] = (), + id: str | _HiddenParam | None = None, + ) -> ParameterSet: + if isinstance(marks, MarkDecorator): + marks = (marks,) + else: + assert isinstance(marks, collections.abc.Collection) + if any(i.name == "usefixtures" for i in marks): + raise ValueError( + "pytest.param cannot add pytest.mark.usefixtures; see " + "https://docs.pytest.org/en/stable/reference/reference.html#pytest-param" + ) + + if id is not None: + if not isinstance(id, str) and id is not HIDDEN_PARAM: + raise TypeError( + "Expected id to be a string or a `pytest.HIDDEN_PARAM` sentinel, " + f"got {type(id)}: {id!r}", + ) + return cls(values, marks, id) + + @classmethod + def extract_from( + cls, + parameterset: ParameterSet | Sequence[object] | object, + force_tuple: bool = False, + ) -> ParameterSet: + """Extract from an object or objects. + + :param parameterset: + A legacy style parameterset that may or may not be a tuple, + and may or may not be wrapped into a mess of mark objects. + + :param force_tuple: + Enforce tuple wrapping so single argument tuple values + don't get decomposed and break tests. + """ + if isinstance(parameterset, cls): + return parameterset + if force_tuple: + return cls.param(parameterset) + else: + # TODO: Refactor to fix this type-ignore. Currently the following + # passes type-checking but crashes: + # + # @pytest.mark.parametrize(('x', 'y'), [1, 2]) + # def test_foo(x, y): pass + return cls(parameterset, marks=[], id=None) # type: ignore[arg-type] + + @staticmethod + def _parse_parametrize_args( + argnames: str | Sequence[str], + argvalues: Iterable[ParameterSet | Sequence[object] | object], + *args, + **kwargs, + ) -> tuple[Sequence[str], bool]: + if isinstance(argnames, str): + argnames = [x.strip() for x in argnames.split(",") if x.strip()] + force_tuple = len(argnames) == 1 + else: + force_tuple = False + return argnames, force_tuple + + @staticmethod + def _parse_parametrize_parameters( + argvalues: Iterable[ParameterSet | Sequence[object] | object], + force_tuple: bool, + ) -> list[ParameterSet]: + return [ + ParameterSet.extract_from(x, force_tuple=force_tuple) for x in argvalues + ] + + @classmethod + def _for_parametrize( + cls, + argnames: str | Sequence[str], + argvalues: Iterable[ParameterSet | Sequence[object] | object], + func, + config: Config, + nodeid: str, + ) -> tuple[Sequence[str], list[ParameterSet]]: + argnames, force_tuple = cls._parse_parametrize_args(argnames, argvalues) + parameters = cls._parse_parametrize_parameters(argvalues, force_tuple) + del argvalues + + if parameters: + # Check all parameter sets have the correct number of values. + for param in parameters: + if len(param.values) != len(argnames): + msg = ( + '{nodeid}: in "parametrize" the number of names ({names_len}):\n' + " {names}\n" + "must be equal to the number of values ({values_len}):\n" + " {values}" + ) + fail( + msg.format( + nodeid=nodeid, + values=param.values, + names=argnames, + names_len=len(argnames), + values_len=len(param.values), + ), + pytrace=False, + ) + else: + # Empty parameter set (likely computed at runtime): create a single + # parameter set with NOTSET values, with the "empty parameter set" mark applied to it. + mark = get_empty_parameterset_mark(config, argnames, func) + parameters.append( + ParameterSet( + values=(NOTSET,) * len(argnames), marks=[mark], id="NOTSET" + ) + ) + return argnames, parameters + + +@final +@dataclasses.dataclass(frozen=True) +class Mark: + """A pytest mark.""" + + #: Name of the mark. + name: str + #: Positional arguments of the mark decorator. + args: tuple[Any, ...] + #: Keyword arguments of the mark decorator. + kwargs: Mapping[str, Any] + + #: Source Mark for ids with parametrize Marks. + _param_ids_from: Mark | None = dataclasses.field(default=None, repr=False) + #: Resolved/generated ids with parametrize Marks. + _param_ids_generated: Sequence[str] | None = dataclasses.field( + default=None, repr=False + ) + + def __init__( + self, + name: str, + args: tuple[Any, ...], + kwargs: Mapping[str, Any], + param_ids_from: Mark | None = None, + param_ids_generated: Sequence[str] | None = None, + *, + _ispytest: bool = False, + ) -> None: + """:meta private:""" + check_ispytest(_ispytest) + # Weirdness to bypass frozen=True. + object.__setattr__(self, "name", name) + object.__setattr__(self, "args", args) + object.__setattr__(self, "kwargs", kwargs) + object.__setattr__(self, "_param_ids_from", param_ids_from) + object.__setattr__(self, "_param_ids_generated", param_ids_generated) + + def _has_param_ids(self) -> bool: + return "ids" in self.kwargs or len(self.args) >= 4 + + def combined_with(self, other: Mark) -> Mark: + """Return a new Mark which is a combination of this + Mark and another Mark. + + Combines by appending args and merging kwargs. + + :param Mark other: The mark to combine with. + :rtype: Mark + """ + assert self.name == other.name + + # Remember source of ids with parametrize Marks. + param_ids_from: Mark | None = None + if self.name == "parametrize": + if other._has_param_ids(): + param_ids_from = other + elif self._has_param_ids(): + param_ids_from = self + + return Mark( + self.name, + self.args + other.args, + dict(self.kwargs, **other.kwargs), + param_ids_from=param_ids_from, + _ispytest=True, + ) + + +# A generic parameter designating an object to which a Mark may +# be applied -- a test function (callable) or class. +# Note: a lambda is not allowed, but this can't be represented. +Markable = TypeVar("Markable", bound=Callable[..., object] | type) + + +@dataclasses.dataclass +class MarkDecorator: + """A decorator for applying a mark on test functions and classes. + + ``MarkDecorators`` are created with ``pytest.mark``:: + + mark1 = pytest.mark.NAME # Simple MarkDecorator + mark2 = pytest.mark.NAME(name1=value) # Parametrized MarkDecorator + + and can then be applied as decorators to test functions:: + + @mark2 + def test_function(): + pass + + When a ``MarkDecorator`` is called, it does the following: + + 1. If called with a single class as its only positional argument and no + additional keyword arguments, it attaches the mark to the class so it + gets applied automatically to all test cases found in that class. + + 2. If called with a single function as its only positional argument and + no additional keyword arguments, it attaches the mark to the function, + containing all the arguments already stored internally in the + ``MarkDecorator``. + + 3. When called in any other case, it returns a new ``MarkDecorator`` + instance with the original ``MarkDecorator``'s content updated with + the arguments passed to this call. + + Note: The rules above prevent a ``MarkDecorator`` from storing only a + single function or class reference as its positional argument with no + additional keyword or positional arguments. You can work around this by + using `with_args()`. + """ + + mark: Mark + + def __init__(self, mark: Mark, *, _ispytest: bool = False) -> None: + """:meta private:""" + check_ispytest(_ispytest) + self.mark = mark + + @property + def name(self) -> str: + """Alias for mark.name.""" + return self.mark.name + + @property + def args(self) -> tuple[Any, ...]: + """Alias for mark.args.""" + return self.mark.args + + @property + def kwargs(self) -> Mapping[str, Any]: + """Alias for mark.kwargs.""" + return self.mark.kwargs + + @property + def markname(self) -> str: + """:meta private:""" + return self.name # for backward-compat (2.4.1 had this attr) + + def with_args(self, *args: object, **kwargs: object) -> MarkDecorator: + """Return a MarkDecorator with extra arguments added. + + Unlike calling the MarkDecorator, with_args() can be used even + if the sole argument is a callable/class. + """ + mark = Mark(self.name, args, kwargs, _ispytest=True) + return MarkDecorator(self.mark.combined_with(mark), _ispytest=True) + + # Type ignored because the overloads overlap with an incompatible + # return type. Not much we can do about that. Thankfully mypy picks + # the first match so it works out even if we break the rules. + @overload + def __call__(self, arg: Markable) -> Markable: # type: ignore[overload-overlap] + pass + + @overload + def __call__(self, *args: object, **kwargs: object) -> MarkDecorator: + pass + + def __call__(self, *args: object, **kwargs: object): + """Call the MarkDecorator.""" + if args and not kwargs: + func = args[0] + is_class = inspect.isclass(func) + # For staticmethods/classmethods, the marks are eventually fetched from the + # function object, not the descriptor, so unwrap. + unwrapped_func = func + if isinstance(func, staticmethod | classmethod): + unwrapped_func = func.__func__ + if len(args) == 1 and (istestfunc(unwrapped_func) or is_class): + store_mark(unwrapped_func, self.mark, stacklevel=3) + return func + return self.with_args(*args, **kwargs) + + +def get_unpacked_marks( + obj: object | type, + *, + consider_mro: bool = True, +) -> list[Mark]: + """Obtain the unpacked marks that are stored on an object. + + If obj is a class and consider_mro is true, return marks applied to + this class and all of its super-classes in MRO order. If consider_mro + is false, only return marks applied directly to this class. + """ + if isinstance(obj, type): + if not consider_mro: + mark_lists = [obj.__dict__.get("pytestmark", [])] + else: + mark_lists = [ + x.__dict__.get("pytestmark", []) for x in reversed(obj.__mro__) + ] + mark_list = [] + for item in mark_lists: + if isinstance(item, list): + mark_list.extend(item) + else: + mark_list.append(item) + else: + mark_attribute = getattr(obj, "pytestmark", []) + if isinstance(mark_attribute, list): + mark_list = mark_attribute + else: + mark_list = [mark_attribute] + return list(normalize_mark_list(mark_list)) + + +def normalize_mark_list( + mark_list: Iterable[Mark | MarkDecorator], +) -> Iterable[Mark]: + """ + Normalize an iterable of Mark or MarkDecorator objects into a list of marks + by retrieving the `mark` attribute on MarkDecorator instances. + + :param mark_list: marks to normalize + :returns: A new list of the extracted Mark objects + """ + for mark in mark_list: + mark_obj = getattr(mark, "mark", mark) + if not isinstance(mark_obj, Mark): + raise TypeError(f"got {mark_obj!r} instead of Mark") + yield mark_obj + + +def store_mark(obj, mark: Mark, *, stacklevel: int = 2) -> None: + """Store a Mark on an object. + + This is used to implement the Mark declarations/decorators correctly. + """ + assert isinstance(mark, Mark), mark + + from ..fixtures import getfixturemarker + + if getfixturemarker(obj) is not None: + warnings.warn(MARKED_FIXTURE, stacklevel=stacklevel) + + # Always reassign name to avoid updating pytestmark in a reference that + # was only borrowed. + obj.pytestmark = [*get_unpacked_marks(obj, consider_mro=False), mark] + + +# Typing for builtin pytest marks. This is cheating; it gives builtin marks +# special privilege, and breaks modularity. But practicality beats purity... +if TYPE_CHECKING: + + class _SkipMarkDecorator(MarkDecorator): + @overload # type: ignore[override,no-overload-impl] + def __call__(self, arg: Markable) -> Markable: ... + + @overload + def __call__(self, reason: str = ...) -> MarkDecorator: ... + + class _SkipifMarkDecorator(MarkDecorator): + def __call__( # type: ignore[override] + self, + condition: str | bool = ..., + *conditions: str | bool, + reason: str = ..., + ) -> MarkDecorator: ... + + class _XfailMarkDecorator(MarkDecorator): + @overload # type: ignore[override,no-overload-impl] + def __call__(self, arg: Markable) -> Markable: ... + + @overload + def __call__( + self, + condition: str | bool = False, + *conditions: str | bool, + reason: str = ..., + run: bool = ..., + raises: None + | type[BaseException] + | tuple[type[BaseException], ...] + | AbstractRaises[BaseException] = ..., + strict: bool = ..., + ) -> MarkDecorator: ... + + class _ParametrizeMarkDecorator(MarkDecorator): + def __call__( # type: ignore[override] + self, + argnames: str | Sequence[str], + argvalues: Iterable[ParameterSet | Sequence[object] | object], + *, + indirect: bool | Sequence[str] = ..., + ids: Iterable[None | str | float | int | bool] + | Callable[[Any], object | None] + | None = ..., + scope: _ScopeName | None = ..., + ) -> MarkDecorator: ... + + class _UsefixturesMarkDecorator(MarkDecorator): + def __call__(self, *fixtures: str) -> MarkDecorator: # type: ignore[override] + ... + + class _FilterwarningsMarkDecorator(MarkDecorator): + def __call__(self, *filters: str) -> MarkDecorator: # type: ignore[override] + ... + + +@final +class MarkGenerator: + """Factory for :class:`MarkDecorator` objects - exposed as + a ``pytest.mark`` singleton instance. + + Example:: + + import pytest + + + @pytest.mark.slowtest + def test_function(): + pass + + applies a 'slowtest' :class:`Mark` on ``test_function``. + """ + + # See TYPE_CHECKING above. + if TYPE_CHECKING: + skip: _SkipMarkDecorator + skipif: _SkipifMarkDecorator + xfail: _XfailMarkDecorator + parametrize: _ParametrizeMarkDecorator + usefixtures: _UsefixturesMarkDecorator + filterwarnings: _FilterwarningsMarkDecorator + + def __init__(self, *, _ispytest: bool = False) -> None: + check_ispytest(_ispytest) + self._config: Config | None = None + self._markers: set[str] = set() + + def __getattr__(self, name: str) -> MarkDecorator: + """Generate a new :class:`MarkDecorator` with the given name.""" + if name[0] == "_": + raise AttributeError("Marker name must NOT start with underscore") + + if self._config is not None: + # We store a set of markers as a performance optimisation - if a mark + # name is in the set we definitely know it, but a mark may be known and + # not in the set. We therefore start by updating the set! + if name not in self._markers: + for line in self._config.getini("markers"): + # example lines: "skipif(condition): skip the given test if..." + # or "hypothesis: tests which use Hypothesis", so to get the + # marker name we split on both `:` and `(`. + marker = line.split(":")[0].split("(")[0].strip() + self._markers.add(marker) + + # If the name is not in the set of known marks after updating, + # then it really is time to issue a warning or an error. + if name not in self._markers: + # Raise a specific error for common misspellings of "parametrize". + if name in ["parameterize", "parametrise", "parameterise"]: + __tracebackhide__ = True + fail(f"Unknown '{name}' mark, did you mean 'parametrize'?") + + strict_markers = self._config.getini("strict_markers") + if strict_markers is None: + strict_markers = self._config.getini("strict") + if strict_markers: + fail( + f"{name!r} not found in `markers` configuration option", + pytrace=False, + ) + + warnings.warn( + f"Unknown pytest.mark.{name} - is this a typo? You can register " + "custom marks to avoid this warning - for details, see " + "https://docs.pytest.org/en/stable/how-to/mark.html", + PytestUnknownMarkWarning, + 2, + ) + + return MarkDecorator(Mark(name, (), {}, _ispytest=True), _ispytest=True) + + +MARK_GEN = MarkGenerator(_ispytest=True) + + +@final +class NodeKeywords(MutableMapping[str, Any]): + __slots__ = ("_markers", "node", "parent") + + def __init__(self, node: Node) -> None: + self.node = node + self.parent = node.parent + self._markers = {node.name: True} + + def __getitem__(self, key: str) -> Any: + try: + return self._markers[key] + except KeyError: + if self.parent is None: + raise + return self.parent.keywords[key] + + def __setitem__(self, key: str, value: Any) -> None: + self._markers[key] = value + + # Note: we could've avoided explicitly implementing some of the methods + # below and use the collections.abc fallback, but that would be slow. + + def __contains__(self, key: object) -> bool: + return key in self._markers or ( + self.parent is not None and key in self.parent.keywords + ) + + def update( # type: ignore[override] + self, + other: Mapping[str, Any] | Iterable[tuple[str, Any]] = (), + **kwds: Any, + ) -> None: + self._markers.update(other) + self._markers.update(kwds) + + def __delitem__(self, key: str) -> None: + raise ValueError("cannot delete key in keywords dict") + + def __iter__(self) -> Iterator[str]: + # Doesn't need to be fast. + yield from self._markers + if self.parent is not None: + for keyword in self.parent.keywords: + # self._marks and self.parent.keywords can have duplicates. + if keyword not in self._markers: + yield keyword + + def __len__(self) -> int: + # Doesn't need to be fast. + return sum(1 for keyword in self) + + def __repr__(self) -> str: + return f"" diff --git a/.venv/lib/python3.12/site-packages/_pytest/monkeypatch.py b/.venv/lib/python3.12/site-packages/_pytest/monkeypatch.py new file mode 100644 index 0000000..07cc3fc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/monkeypatch.py @@ -0,0 +1,435 @@ +# mypy: allow-untyped-defs +"""Monkeypatching and mocking functionality.""" + +from __future__ import annotations + +from collections.abc import Generator +from collections.abc import Mapping +from collections.abc import MutableMapping +from contextlib import contextmanager +import os +from pathlib import Path +import re +import sys +from typing import Any +from typing import final +from typing import overload +from typing import TypeVar +import warnings + +from _pytest.deprecated import MONKEYPATCH_LEGACY_NAMESPACE_PACKAGES +from _pytest.fixtures import fixture +from _pytest.warning_types import PytestWarning + + +RE_IMPORT_ERROR_NAME = re.compile(r"^No module named (.*)$") + + +K = TypeVar("K") +V = TypeVar("V") + + +@fixture +def monkeypatch() -> Generator[MonkeyPatch]: + """A convenient fixture for monkey-patching. + + The fixture provides these methods to modify objects, dictionaries, or + :data:`os.environ`: + + * :meth:`monkeypatch.setattr(obj, name, value, raising=True) ` + * :meth:`monkeypatch.delattr(obj, name, raising=True) ` + * :meth:`monkeypatch.setitem(mapping, name, value) ` + * :meth:`monkeypatch.delitem(obj, name, raising=True) ` + * :meth:`monkeypatch.setenv(name, value, prepend=None) ` + * :meth:`monkeypatch.delenv(name, raising=True) ` + * :meth:`monkeypatch.syspath_prepend(path) ` + * :meth:`monkeypatch.chdir(path) ` + * :meth:`monkeypatch.context() ` + + All modifications will be undone after the requesting test function or + fixture has finished. The ``raising`` parameter determines if a :class:`KeyError` + or :class:`AttributeError` will be raised if the set/deletion operation does not have the + specified target. + + To undo modifications done by the fixture in a contained scope, + use :meth:`context() `. + """ + mpatch = MonkeyPatch() + yield mpatch + mpatch.undo() + + +def resolve(name: str) -> object: + # Simplified from zope.dottedname. + parts = name.split(".") + + used = parts.pop(0) + found: object = __import__(used) + for part in parts: + used += "." + part + try: + found = getattr(found, part) + except AttributeError: + pass + else: + continue + # We use explicit un-nesting of the handling block in order + # to avoid nested exceptions. + try: + __import__(used) + except ImportError as ex: + expected = str(ex).split()[-1] + if expected == used: + raise + else: + raise ImportError(f"import error in {used}: {ex}") from ex + found = annotated_getattr(found, part, used) + return found + + +def annotated_getattr(obj: object, name: str, ann: str) -> object: + try: + obj = getattr(obj, name) + except AttributeError as e: + raise AttributeError( + f"{type(obj).__name__!r} object at {ann} has no attribute {name!r}" + ) from e + return obj + + +def derive_importpath(import_path: str, raising: bool) -> tuple[str, object]: + if not isinstance(import_path, str) or "." not in import_path: + raise TypeError(f"must be absolute import path string, not {import_path!r}") + module, attr = import_path.rsplit(".", 1) + target = resolve(module) + if raising: + annotated_getattr(target, attr, ann=module) + return attr, target + + +class Notset: + def __repr__(self) -> str: + return "" + + +notset = Notset() + + +@final +class MonkeyPatch: + """Helper to conveniently monkeypatch attributes/items/environment + variables/syspath. + + Returned by the :fixture:`monkeypatch` fixture. + + .. versionchanged:: 6.2 + Can now also be used directly as `pytest.MonkeyPatch()`, for when + the fixture is not available. In this case, use + :meth:`with MonkeyPatch.context() as mp: ` or remember to call + :meth:`undo` explicitly. + """ + + def __init__(self) -> None: + self._setattr: list[tuple[object, str, object]] = [] + self._setitem: list[tuple[Mapping[Any, Any], object, object]] = [] + self._cwd: str | None = None + self._savesyspath: list[str] | None = None + + @classmethod + @contextmanager + def context(cls) -> Generator[MonkeyPatch]: + """Context manager that returns a new :class:`MonkeyPatch` object + which undoes any patching done inside the ``with`` block upon exit. + + Example: + + .. code-block:: python + + import functools + + + def test_partial(monkeypatch): + with monkeypatch.context() as m: + m.setattr(functools, "partial", 3) + + Useful in situations where it is desired to undo some patches before the test ends, + such as mocking ``stdlib`` functions that might break pytest itself if mocked (for examples + of this see :issue:`3290`). + """ + m = cls() + try: + yield m + finally: + m.undo() + + @overload + def setattr( + self, + target: str, + name: object, + value: Notset = ..., + raising: bool = ..., + ) -> None: ... + + @overload + def setattr( + self, + target: object, + name: str, + value: object, + raising: bool = ..., + ) -> None: ... + + def setattr( + self, + target: str | object, + name: object | str, + value: object = notset, + raising: bool = True, + ) -> None: + """ + Set attribute value on target, memorizing the old value. + + For example: + + .. code-block:: python + + import os + + monkeypatch.setattr(os, "getcwd", lambda: "/") + + The code above replaces the :func:`os.getcwd` function by a ``lambda`` which + always returns ``"/"``. + + For convenience, you can specify a string as ``target`` which + will be interpreted as a dotted import path, with the last part + being the attribute name: + + .. code-block:: python + + monkeypatch.setattr("os.getcwd", lambda: "/") + + Raises :class:`AttributeError` if the attribute does not exist, unless + ``raising`` is set to False. + + **Where to patch** + + ``monkeypatch.setattr`` works by (temporarily) changing the object that a name points to with another one. + There can be many names pointing to any individual object, so for patching to work you must ensure + that you patch the name used by the system under test. + + See the section :ref:`Where to patch ` in the :mod:`unittest.mock` + docs for a complete explanation, which is meant for :func:`unittest.mock.patch` but + applies to ``monkeypatch.setattr`` as well. + """ + __tracebackhide__ = True + import inspect + + if isinstance(value, Notset): + if not isinstance(target, str): + raise TypeError( + "use setattr(target, name, value) or " + "setattr(target, value) with target being a dotted " + "import string" + ) + value = name + name, target = derive_importpath(target, raising) + else: + if not isinstance(name, str): + raise TypeError( + "use setattr(target, name, value) with name being a string or " + "setattr(target, value) with target being a dotted " + "import string" + ) + + oldval = getattr(target, name, notset) + if raising and oldval is notset: + raise AttributeError(f"{target!r} has no attribute {name!r}") + + # avoid class descriptors like staticmethod/classmethod + if inspect.isclass(target): + oldval = target.__dict__.get(name, notset) + self._setattr.append((target, name, oldval)) + setattr(target, name, value) + + def delattr( + self, + target: object | str, + name: str | Notset = notset, + raising: bool = True, + ) -> None: + """Delete attribute ``name`` from ``target``. + + If no ``name`` is specified and ``target`` is a string + it will be interpreted as a dotted import path with the + last part being the attribute name. + + Raises AttributeError it the attribute does not exist, unless + ``raising`` is set to False. + """ + __tracebackhide__ = True + import inspect + + if isinstance(name, Notset): + if not isinstance(target, str): + raise TypeError( + "use delattr(target, name) or " + "delattr(target) with target being a dotted " + "import string" + ) + name, target = derive_importpath(target, raising) + + if not hasattr(target, name): + if raising: + raise AttributeError(name) + else: + oldval = getattr(target, name, notset) + # Avoid class descriptors like staticmethod/classmethod. + if inspect.isclass(target): + oldval = target.__dict__.get(name, notset) + self._setattr.append((target, name, oldval)) + delattr(target, name) + + def setitem(self, dic: Mapping[K, V], name: K, value: V) -> None: + """Set dictionary entry ``name`` to value.""" + self._setitem.append((dic, name, dic.get(name, notset))) + # Not all Mapping types support indexing, but MutableMapping doesn't support TypedDict + dic[name] = value # type: ignore[index] + + def delitem(self, dic: Mapping[K, V], name: K, raising: bool = True) -> None: + """Delete ``name`` from dict. + + Raises ``KeyError`` if it doesn't exist, unless ``raising`` is set to + False. + """ + if name not in dic: + if raising: + raise KeyError(name) + else: + self._setitem.append((dic, name, dic.get(name, notset))) + # Not all Mapping types support indexing, but MutableMapping doesn't support TypedDict + del dic[name] # type: ignore[attr-defined] + + def setenv(self, name: str, value: str, prepend: str | None = None) -> None: + """Set environment variable ``name`` to ``value``. + + If ``prepend`` is a character, read the current environment variable + value and prepend the ``value`` adjoined with the ``prepend`` + character. + """ + if not isinstance(value, str): + warnings.warn( # type: ignore[unreachable] + PytestWarning( + f"Value of environment variable {name} type should be str, but got " + f"{value!r} (type: {type(value).__name__}); converted to str implicitly" + ), + stacklevel=2, + ) + value = str(value) + if prepend and name in os.environ: + value = value + prepend + os.environ[name] + self.setitem(os.environ, name, value) + + def delenv(self, name: str, raising: bool = True) -> None: + """Delete ``name`` from the environment. + + Raises ``KeyError`` if it does not exist, unless ``raising`` is set to + False. + """ + environ: MutableMapping[str, str] = os.environ + self.delitem(environ, name, raising=raising) + + def syspath_prepend(self, path) -> None: + """Prepend ``path`` to ``sys.path`` list of import locations.""" + if self._savesyspath is None: + self._savesyspath = sys.path[:] + sys.path.insert(0, str(path)) + + # https://github.com/pypa/setuptools/blob/d8b901bc/docs/pkg_resources.txt#L162-L171 + # this is only needed when pkg_resources was already loaded by the namespace package + if "pkg_resources" in sys.modules: + import pkg_resources + from pkg_resources import fixup_namespace_packages + + # Only issue deprecation warning if this call would actually have an + # effect for this specific path. + if ( + hasattr(pkg_resources, "_namespace_packages") + and pkg_resources._namespace_packages + ): + path_obj = Path(str(path)) + for ns_pkg in pkg_resources._namespace_packages: + if ns_pkg is None: + continue + ns_pkg_path = path_obj / ns_pkg.replace(".", os.sep) + if ns_pkg_path.is_dir(): + warnings.warn( + MONKEYPATCH_LEGACY_NAMESPACE_PACKAGES, stacklevel=2 + ) + break + + fixup_namespace_packages(str(path)) + + # A call to syspathinsert() usually means that the caller wants to + # import some dynamically created files, thus with python3 we + # invalidate its import caches. + # This is especially important when any namespace package is in use, + # since then the mtime based FileFinder cache (that gets created in + # this case already) gets not invalidated when writing the new files + # quickly afterwards. + from importlib import invalidate_caches + + invalidate_caches() + + def chdir(self, path: str | os.PathLike[str]) -> None: + """Change the current working directory to the specified path. + + :param path: + The path to change into. + """ + if self._cwd is None: + self._cwd = os.getcwd() + os.chdir(path) + + def undo(self) -> None: + """Undo previous changes. + + This call consumes the undo stack. Calling it a second time has no + effect unless you do more monkeypatching after the undo call. + + There is generally no need to call `undo()`, since it is + called automatically during tear-down. + + .. note:: + The same `monkeypatch` fixture is used across a + single test function invocation. If `monkeypatch` is used both by + the test function itself and one of the test fixtures, + calling `undo()` will undo all of the changes made in + both functions. + + Prefer to use :meth:`context() ` instead. + """ + for obj, name, value in reversed(self._setattr): + if value is not notset: + setattr(obj, name, value) + else: + delattr(obj, name) + self._setattr[:] = [] + for dictionary, key, value in reversed(self._setitem): + if value is notset: + try: + # Not all Mapping types support indexing, but MutableMapping doesn't support TypedDict + del dictionary[key] # type: ignore[attr-defined] + except KeyError: + pass # Was already deleted, so we have the desired state. + else: + # Not all Mapping types support indexing, but MutableMapping doesn't support TypedDict + dictionary[key] = value # type: ignore[index] + self._setitem[:] = [] + if self._savesyspath is not None: + sys.path[:] = self._savesyspath + self._savesyspath = None + + if self._cwd is not None: + os.chdir(self._cwd) + self._cwd = None diff --git a/.venv/lib/python3.12/site-packages/_pytest/nodes.py b/.venv/lib/python3.12/site-packages/_pytest/nodes.py new file mode 100644 index 0000000..6690f6a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/nodes.py @@ -0,0 +1,772 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +import abc +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import MutableMapping +from functools import cached_property +from functools import lru_cache +import os +import pathlib +from pathlib import Path +from typing import Any +from typing import cast +from typing import NoReturn +from typing import overload +from typing import TYPE_CHECKING +from typing import TypeVar +import warnings + +import pluggy + +import _pytest._code +from _pytest._code import getfslineno +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import TerminalRepr +from _pytest._code.code import Traceback +from _pytest._code.code import TracebackStyle +from _pytest.compat import LEGACY_PATH +from _pytest.compat import signature +from _pytest.config import Config +from _pytest.config import ConftestImportFailure +from _pytest.config.compat import _check_path +from _pytest.deprecated import NODE_CTOR_FSPATH_ARG +from _pytest.mark.structures import Mark +from _pytest.mark.structures import MarkDecorator +from _pytest.mark.structures import NodeKeywords +from _pytest.outcomes import fail +from _pytest.pathlib import absolutepath +from _pytest.stash import Stash +from _pytest.warning_types import PytestWarning + + +if TYPE_CHECKING: + from typing_extensions import Self + + # Imported here due to circular import. + from _pytest.main import Session + + +SEP = "/" + +tracebackcutdir = Path(_pytest.__file__).parent + + +_T = TypeVar("_T") + + +def _imply_path( + node_type: type[Node], + path: Path | None, + fspath: LEGACY_PATH | None, +) -> Path: + if fspath is not None: + warnings.warn( + NODE_CTOR_FSPATH_ARG.format( + node_type_name=node_type.__name__, + ), + stacklevel=6, + ) + if path is not None: + if fspath is not None: + _check_path(path, fspath) + return path + else: + assert fspath is not None + return Path(fspath) + + +_NodeType = TypeVar("_NodeType", bound="Node") + + +class NodeMeta(abc.ABCMeta): + """Metaclass used by :class:`Node` to enforce that direct construction raises + :class:`Failed`. + + This behaviour supports the indirection introduced with :meth:`Node.from_parent`, + the named constructor to be used instead of direct construction. The design + decision to enforce indirection with :class:`NodeMeta` was made as a + temporary aid for refactoring the collection tree, which was diagnosed to + have :class:`Node` objects whose creational patterns were overly entangled. + Once the refactoring is complete, this metaclass can be removed. + + See https://github.com/pytest-dev/pytest/projects/3 for an overview of the + progress on detangling the :class:`Node` classes. + """ + + def __call__(cls, *k, **kw) -> NoReturn: + msg = ( + "Direct construction of {name} has been deprecated, please use {name}.from_parent.\n" + "See " + "https://docs.pytest.org/en/stable/deprecations.html#node-construction-changed-to-node-from-parent" + " for more details." + ).format(name=f"{cls.__module__}.{cls.__name__}") + fail(msg, pytrace=False) + + def _create(cls: type[_T], *k, **kw) -> _T: + try: + return super().__call__(*k, **kw) # type: ignore[no-any-return,misc] + except TypeError: + sig = signature(getattr(cls, "__init__")) + known_kw = {k: v for k, v in kw.items() if k in sig.parameters} + from .warning_types import PytestDeprecationWarning + + warnings.warn( + PytestDeprecationWarning( + f"{cls} is not using a cooperative constructor and only takes {set(known_kw)}.\n" + "See https://docs.pytest.org/en/stable/deprecations.html" + "#constructors-of-custom-pytest-node-subclasses-should-take-kwargs " + "for more details." + ) + ) + + return super().__call__(*k, **known_kw) # type: ignore[no-any-return,misc] + + +class Node(abc.ABC, metaclass=NodeMeta): + r"""Base class of :class:`Collector` and :class:`Item`, the components of + the test collection tree. + + ``Collector``\'s are the internal nodes of the tree, and ``Item``\'s are the + leaf nodes. + """ + + # Implemented in the legacypath plugin. + #: A ``LEGACY_PATH`` copy of the :attr:`path` attribute. Intended for usage + #: for methods not migrated to ``pathlib.Path`` yet, such as + #: :meth:`Item.reportinfo `. Will be deprecated in + #: a future release, prefer using :attr:`path` instead. + fspath: LEGACY_PATH + + # Use __slots__ to make attribute access faster. + # Note that __dict__ is still available. + __slots__ = ( + "__dict__", + "_nodeid", + "_store", + "config", + "name", + "parent", + "path", + "session", + ) + + def __init__( + self, + name: str, + parent: Node | None = None, + config: Config | None = None, + session: Session | None = None, + fspath: LEGACY_PATH | None = None, + path: Path | None = None, + nodeid: str | None = None, + ) -> None: + #: A unique name within the scope of the parent node. + self.name: str = name + + #: The parent collector node. + self.parent = parent + + if config: + #: The pytest config object. + self.config: Config = config + else: + if not parent: + raise TypeError("config or parent must be provided") + self.config = parent.config + + if session: + #: The pytest session this node is part of. + self.session: Session = session + else: + if not parent: + raise TypeError("session or parent must be provided") + self.session = parent.session + + if path is None and fspath is None: + path = getattr(parent, "path", None) + #: Filesystem path where this node was collected from (can be None). + self.path: pathlib.Path = _imply_path(type(self), path, fspath=fspath) + + # The explicit annotation is to avoid publicly exposing NodeKeywords. + #: Keywords/markers collected from all scopes. + self.keywords: MutableMapping[str, Any] = NodeKeywords(self) + + #: The marker objects belonging to this node. + self.own_markers: list[Mark] = [] + + #: Allow adding of extra keywords to use for matching. + self.extra_keyword_matches: set[str] = set() + + if nodeid is not None: + assert "::()" not in nodeid + self._nodeid = nodeid + else: + if not self.parent: + raise TypeError("nodeid or parent must be provided") + self._nodeid = self.parent.nodeid + "::" + self.name + + #: A place where plugins can store information on the node for their + #: own use. + self.stash: Stash = Stash() + # Deprecated alias. Was never public. Can be removed in a few releases. + self._store = self.stash + + @classmethod + def from_parent(cls, parent: Node, **kw) -> Self: + """Public constructor for Nodes. + + This indirection got introduced in order to enable removing + the fragile logic from the node constructors. + + Subclasses can use ``super().from_parent(...)`` when overriding the + construction. + + :param parent: The parent node of this Node. + """ + if "config" in kw: + raise TypeError("config is not a valid argument for from_parent") + if "session" in kw: + raise TypeError("session is not a valid argument for from_parent") + return cls._create(parent=parent, **kw) + + @property + def ihook(self) -> pluggy.HookRelay: + """fspath-sensitive hook proxy used to call pytest hooks.""" + return self.session.gethookproxy(self.path) + + def __repr__(self) -> str: + return "<{} {}>".format(self.__class__.__name__, getattr(self, "name", None)) + + def warn(self, warning: Warning) -> None: + """Issue a warning for this Node. + + Warnings will be displayed after the test session, unless explicitly suppressed. + + :param Warning warning: + The warning instance to issue. + + :raises ValueError: If ``warning`` instance is not a subclass of Warning. + + Example usage: + + .. code-block:: python + + node.warn(PytestWarning("some message")) + node.warn(UserWarning("some message")) + + .. versionchanged:: 6.2 + Any subclass of :class:`Warning` is now accepted, rather than only + :class:`PytestWarning ` subclasses. + """ + # enforce type checks here to avoid getting a generic type error later otherwise. + if not isinstance(warning, Warning): + raise ValueError( + f"warning must be an instance of Warning or subclass, got {warning!r}" + ) + path, lineno = get_fslocation_from_item(self) + assert lineno is not None + warnings.warn_explicit( + warning, + category=None, + filename=str(path), + lineno=lineno + 1, + ) + + # Methods for ordering nodes. + + @property + def nodeid(self) -> str: + """A ::-separated string denoting its collection tree address.""" + return self._nodeid + + def __hash__(self) -> int: + return hash(self._nodeid) + + def setup(self) -> None: + pass + + def teardown(self) -> None: + pass + + def iter_parents(self) -> Iterator[Node]: + """Iterate over all parent collectors starting from and including self + up to the root of the collection tree. + + .. versionadded:: 8.1 + """ + parent: Node | None = self + while parent is not None: + yield parent + parent = parent.parent + + def listchain(self) -> list[Node]: + """Return a list of all parent collectors starting from the root of the + collection tree down to and including self.""" + chain = [] + item: Node | None = self + while item is not None: + chain.append(item) + item = item.parent + chain.reverse() + return chain + + def add_marker(self, marker: str | MarkDecorator, append: bool = True) -> None: + """Dynamically add a marker object to the node. + + :param marker: + The marker. + :param append: + Whether to append the marker, or prepend it. + """ + from _pytest.mark import MARK_GEN + + if isinstance(marker, MarkDecorator): + marker_ = marker + elif isinstance(marker, str): + marker_ = getattr(MARK_GEN, marker) + else: + raise ValueError("is not a string or pytest.mark.* Marker") + self.keywords[marker_.name] = marker_ + if append: + self.own_markers.append(marker_.mark) + else: + self.own_markers.insert(0, marker_.mark) + + def iter_markers(self, name: str | None = None) -> Iterator[Mark]: + """Iterate over all markers of the node. + + :param name: If given, filter the results by the name attribute. + :returns: An iterator of the markers of the node. + """ + return (x[1] for x in self.iter_markers_with_node(name=name)) + + def iter_markers_with_node( + self, name: str | None = None + ) -> Iterator[tuple[Node, Mark]]: + """Iterate over all markers of the node. + + :param name: If given, filter the results by the name attribute. + :returns: An iterator of (node, mark) tuples. + """ + for node in self.iter_parents(): + for mark in node.own_markers: + if name is None or getattr(mark, "name", None) == name: + yield node, mark + + @overload + def get_closest_marker(self, name: str) -> Mark | None: ... + + @overload + def get_closest_marker(self, name: str, default: Mark) -> Mark: ... + + def get_closest_marker(self, name: str, default: Mark | None = None) -> Mark | None: + """Return the first marker matching the name, from closest (for + example function) to farther level (for example module level). + + :param default: Fallback return value if no marker was found. + :param name: Name to filter by. + """ + return next(self.iter_markers(name=name), default) + + def listextrakeywords(self) -> set[str]: + """Return a set of all extra keywords in self and any parents.""" + extra_keywords: set[str] = set() + for item in self.listchain(): + extra_keywords.update(item.extra_keyword_matches) + return extra_keywords + + def listnames(self) -> list[str]: + return [x.name for x in self.listchain()] + + def addfinalizer(self, fin: Callable[[], object]) -> None: + """Register a function to be called without arguments when this node is + finalized. + + This method can only be called when this node is active + in a setup chain, for example during self.setup(). + """ + self.session._setupstate.addfinalizer(fin, self) + + def getparent(self, cls: type[_NodeType]) -> _NodeType | None: + """Get the closest parent node (including self) which is an instance of + the given class. + + :param cls: The node class to search for. + :returns: The node, if found. + """ + for node in self.iter_parents(): + if isinstance(node, cls): + return node + return None + + def _traceback_filter(self, excinfo: ExceptionInfo[BaseException]) -> Traceback: + return excinfo.traceback + + def _repr_failure_py( + self, + excinfo: ExceptionInfo[BaseException], + style: TracebackStyle | None = None, + ) -> TerminalRepr: + from _pytest.fixtures import FixtureLookupError + + if isinstance(excinfo.value, ConftestImportFailure): + excinfo = ExceptionInfo.from_exception(excinfo.value.cause) + if isinstance(excinfo.value, fail.Exception): + if not excinfo.value.pytrace: + style = "value" + if isinstance(excinfo.value, FixtureLookupError): + return excinfo.value.formatrepr() + + tbfilter: bool | Callable[[ExceptionInfo[BaseException]], Traceback] + if self.config.getoption("fulltrace", False): + style = "long" + tbfilter = False + else: + tbfilter = self._traceback_filter + if style == "auto": + style = "long" + # XXX should excinfo.getrepr record all data and toterminal() process it? + if style is None: + if self.config.getoption("tbstyle", "auto") == "short": + style = "short" + else: + style = "long" + + if self.config.get_verbosity() > 1: + truncate_locals = False + else: + truncate_locals = True + + truncate_args = False if self.config.get_verbosity() > 2 else True + + # excinfo.getrepr() formats paths relative to the CWD if `abspath` is False. + # It is possible for a fixture/test to change the CWD while this code runs, which + # would then result in the user seeing confusing paths in the failure message. + # To fix this, if the CWD changed, always display the full absolute path. + # It will be better to just always display paths relative to invocation_dir, but + # this requires a lot of plumbing (#6428). + try: + abspath = Path(os.getcwd()) != self.config.invocation_params.dir + except OSError: + abspath = True + + return excinfo.getrepr( + funcargs=True, + abspath=abspath, + showlocals=self.config.getoption("showlocals", False), + style=style, + tbfilter=tbfilter, + truncate_locals=truncate_locals, + truncate_args=truncate_args, + ) + + def repr_failure( + self, + excinfo: ExceptionInfo[BaseException], + style: TracebackStyle | None = None, + ) -> str | TerminalRepr: + """Return a representation of a collection or test failure. + + .. seealso:: :ref:`non-python tests` + + :param excinfo: Exception information for the failure. + """ + return self._repr_failure_py(excinfo, style) + + +def get_fslocation_from_item(node: Node) -> tuple[str | Path, int | None]: + """Try to extract the actual location from a node, depending on available attributes: + + * "location": a pair (path, lineno) + * "obj": a Python object that the node wraps. + * "path": just a path + + :rtype: A tuple of (str|Path, int) with filename and 0-based line number. + """ + # See Item.location. + location: tuple[str, int | None, str] | None = getattr(node, "location", None) + if location is not None: + return location[:2] + obj = getattr(node, "obj", None) + if obj is not None: + return getfslineno(obj) + return getattr(node, "path", "unknown location"), -1 + + +class Collector(Node, abc.ABC): + """Base class of all collectors. + + Collector create children through `collect()` and thus iteratively build + the collection tree. + """ + + class CollectError(Exception): + """An error during collection, contains a custom message.""" + + @abc.abstractmethod + def collect(self) -> Iterable[Item | Collector]: + """Collect children (items and collectors) for this collector.""" + raise NotImplementedError("abstract") + + # TODO: This omits the style= parameter which breaks Liskov Substitution. + def repr_failure( # type: ignore[override] + self, excinfo: ExceptionInfo[BaseException] + ) -> str | TerminalRepr: + """Return a representation of a collection failure. + + :param excinfo: Exception information for the failure. + """ + if isinstance(excinfo.value, self.CollectError) and not self.config.getoption( + "fulltrace", False + ): + exc = excinfo.value + return str(exc.args[0]) + + # Respect explicit tbstyle option, but default to "short" + # (_repr_failure_py uses "long" with "fulltrace" option always). + tbstyle = self.config.getoption("tbstyle", "auto") + if tbstyle == "auto": + tbstyle = "short" + + return self._repr_failure_py(excinfo, style=tbstyle) + + def _traceback_filter(self, excinfo: ExceptionInfo[BaseException]) -> Traceback: + if hasattr(self, "path"): + traceback = excinfo.traceback + ntraceback = traceback.cut(path=self.path) + if ntraceback == traceback: + ntraceback = ntraceback.cut(excludepath=tracebackcutdir) + return ntraceback.filter(excinfo) + return excinfo.traceback + + +@lru_cache(maxsize=1000) +def _check_initialpaths_for_relpath( + initial_paths: frozenset[Path], path: Path +) -> str | None: + if path in initial_paths: + return "" + + for parent in path.parents: + if parent in initial_paths: + return str(path.relative_to(parent)) + + return None + + +class FSCollector(Collector, abc.ABC): + """Base class for filesystem collectors.""" + + def __init__( + self, + fspath: LEGACY_PATH | None = None, + path_or_parent: Path | Node | None = None, + path: Path | None = None, + name: str | None = None, + parent: Node | None = None, + config: Config | None = None, + session: Session | None = None, + nodeid: str | None = None, + ) -> None: + if path_or_parent: + if isinstance(path_or_parent, Node): + assert parent is None + parent = cast(FSCollector, path_or_parent) + elif isinstance(path_or_parent, Path): + assert path is None + path = path_or_parent + + path = _imply_path(type(self), path, fspath=fspath) + if name is None: + name = path.name + if parent is not None and parent.path != path: + try: + rel = path.relative_to(parent.path) + except ValueError: + pass + else: + name = str(rel) + name = name.replace(os.sep, SEP) + self.path = path + + if session is None: + assert parent is not None + session = parent.session + + if nodeid is None: + try: + nodeid = str(self.path.relative_to(session.config.rootpath)) + except ValueError: + nodeid = _check_initialpaths_for_relpath(session._initialpaths, path) + + if nodeid and os.sep != SEP: + nodeid = nodeid.replace(os.sep, SEP) + + super().__init__( + name=name, + parent=parent, + config=config, + session=session, + nodeid=nodeid, + path=path, + ) + + @classmethod + def from_parent( + cls, + parent, + *, + fspath: LEGACY_PATH | None = None, + path: Path | None = None, + **kw, + ) -> Self: + """The public constructor.""" + return super().from_parent(parent=parent, fspath=fspath, path=path, **kw) + + +class File(FSCollector, abc.ABC): + """Base class for collecting tests from a file. + + :ref:`non-python tests`. + """ + + +class Directory(FSCollector, abc.ABC): + """Base class for collecting files from a directory. + + A basic directory collector does the following: goes over the files and + sub-directories in the directory and creates collectors for them by calling + the hooks :hook:`pytest_collect_directory` and :hook:`pytest_collect_file`, + after checking that they are not ignored using + :hook:`pytest_ignore_collect`. + + The default directory collectors are :class:`~pytest.Dir` and + :class:`~pytest.Package`. + + .. versionadded:: 8.0 + + :ref:`custom directory collectors`. + """ + + +class Item(Node, abc.ABC): + """Base class of all test invocation items. + + Note that for a single function there might be multiple test invocation items. + """ + + nextitem = None + + def __init__( + self, + name, + parent=None, + config: Config | None = None, + session: Session | None = None, + nodeid: str | None = None, + **kw, + ) -> None: + # The first two arguments are intentionally passed positionally, + # to keep plugins who define a node type which inherits from + # (pytest.Item, pytest.File) working (see issue #8435). + # They can be made kwargs when the deprecation above is done. + super().__init__( + name, + parent, + config=config, + session=session, + nodeid=nodeid, + **kw, + ) + self._report_sections: list[tuple[str, str, str]] = [] + + #: A list of tuples (name, value) that holds user defined properties + #: for this test. + self.user_properties: list[tuple[str, object]] = [] + + self._check_item_and_collector_diamond_inheritance() + + def _check_item_and_collector_diamond_inheritance(self) -> None: + """ + Check if the current type inherits from both File and Collector + at the same time, emitting a warning accordingly (#8447). + """ + cls = type(self) + + # We inject an attribute in the type to avoid issuing this warning + # for the same class more than once, which is not helpful. + # It is a hack, but was deemed acceptable in order to avoid + # flooding the user in the common case. + attr_name = "_pytest_diamond_inheritance_warning_shown" + if getattr(cls, attr_name, False): + return + setattr(cls, attr_name, True) + + problems = ", ".join( + base.__name__ for base in cls.__bases__ if issubclass(base, Collector) + ) + if problems: + warnings.warn( + f"{cls.__name__} is an Item subclass and should not be a collector, " + f"however its bases {problems} are collectors.\n" + "Please split the Collectors and the Item into separate node types.\n" + "Pytest Doc example: https://docs.pytest.org/en/latest/example/nonpython.html\n" + "example pull request on a plugin: https://github.com/asmeurer/pytest-flakes/pull/40/", + PytestWarning, + ) + + @abc.abstractmethod + def runtest(self) -> None: + """Run the test case for this item. + + Must be implemented by subclasses. + + .. seealso:: :ref:`non-python tests` + """ + raise NotImplementedError("runtest must be implemented by Item subclass") + + def add_report_section(self, when: str, key: str, content: str) -> None: + """Add a new report section, similar to what's done internally to add + stdout and stderr captured output:: + + item.add_report_section("call", "stdout", "report section contents") + + :param str when: + One of the possible capture states, ``"setup"``, ``"call"``, ``"teardown"``. + :param str key: + Name of the section, can be customized at will. Pytest uses ``"stdout"`` and + ``"stderr"`` internally. + :param str content: + The full contents as a string. + """ + if content: + self._report_sections.append((when, key, content)) + + def reportinfo(self) -> tuple[os.PathLike[str] | str, int | None, str]: + """Get location information for this item for test reports. + + Returns a tuple with three elements: + + - The path of the test (default ``self.path``) + - The 0-based line number of the test (default ``None``) + - A name of the test to be shown (default ``""``) + + .. seealso:: :ref:`non-python tests` + """ + return self.path, None, "" + + @cached_property + def location(self) -> tuple[str, int | None, str]: + """ + Returns a tuple of ``(relfspath, lineno, testname)`` for this item + where ``relfspath`` is file path relative to ``config.rootpath`` + and lineno is a 0-based line number. + """ + location = self.reportinfo() + path = absolutepath(location[0]) + relfspath = self.session._node_location_to_relpath(path) + assert type(location[2]) is str + return (relfspath, location[1], location[2]) diff --git a/.venv/lib/python3.12/site-packages/_pytest/outcomes.py b/.venv/lib/python3.12/site-packages/_pytest/outcomes.py new file mode 100644 index 0000000..766be95 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/outcomes.py @@ -0,0 +1,308 @@ +"""Exception classes and constants handling test outcomes as well as +functions creating them.""" + +from __future__ import annotations + +import sys +from typing import Any +from typing import ClassVar +from typing import NoReturn + +from .warning_types import PytestDeprecationWarning + + +class OutcomeException(BaseException): + """OutcomeException and its subclass instances indicate and contain info + about test and collection outcomes.""" + + def __init__(self, msg: str | None = None, pytrace: bool = True) -> None: + if msg is not None and not isinstance(msg, str): + error_msg = ( # type: ignore[unreachable] + "{} expected string as 'msg' parameter, got '{}' instead.\n" + "Perhaps you meant to use a mark?" + ) + raise TypeError(error_msg.format(type(self).__name__, type(msg).__name__)) + super().__init__(msg) + self.msg = msg + self.pytrace = pytrace + + def __repr__(self) -> str: + if self.msg is not None: + return self.msg + return f"<{self.__class__.__name__} instance>" + + __str__ = __repr__ + + +TEST_OUTCOME = (OutcomeException, Exception) + + +class Skipped(OutcomeException): + # XXX hackish: on 3k we fake to live in the builtins + # in order to have Skipped exception printing shorter/nicer + __module__ = "builtins" + + def __init__( + self, + msg: str | None = None, + pytrace: bool = True, + allow_module_level: bool = False, + *, + _use_item_location: bool = False, + ) -> None: + super().__init__(msg=msg, pytrace=pytrace) + self.allow_module_level = allow_module_level + # If true, the skip location is reported as the item's location, + # instead of the place that raises the exception/calls skip(). + self._use_item_location = _use_item_location + + +class Failed(OutcomeException): + """Raised from an explicit call to pytest.fail().""" + + __module__ = "builtins" + + +class Exit(Exception): + """Raised for immediate program exits (no tracebacks/summaries).""" + + def __init__( + self, msg: str = "unknown reason", returncode: int | None = None + ) -> None: + self.msg = msg + self.returncode = returncode + super().__init__(msg) + + +class XFailed(Failed): + """Raised from an explicit call to pytest.xfail().""" + + +class _Exit: + """Exit testing process. + + :param reason: + The message to show as the reason for exiting pytest. reason has a default value + only because `msg` is deprecated. + + :param returncode: + Return code to be used when exiting pytest. None means the same as ``0`` (no error), + same as :func:`sys.exit`. + + :raises pytest.exit.Exception: + The exception that is raised. + """ + + Exception: ClassVar[type[Exit]] = Exit + + def __call__(self, reason: str = "", returncode: int | None = None) -> NoReturn: + __tracebackhide__ = True + raise Exit(msg=reason, returncode=returncode) + + +exit: _Exit = _Exit() + + +class _Skip: + """Skip an executing test with the given message. + + This function should be called only during testing (setup, call or teardown) or + during collection by using the ``allow_module_level`` flag. This function can + be called in doctests as well. + + :param reason: + The message to show the user as reason for the skip. + + :param allow_module_level: + Allows this function to be called at module level. + Raising the skip exception at module level will stop + the execution of the module and prevent the collection of all tests in the module, + even those defined before the `skip` call. + + Defaults to False. + + :raises pytest.skip.Exception: + The exception that is raised. + + .. note:: + It is better to use the :ref:`pytest.mark.skipif ref` marker when + possible to declare a test to be skipped under certain conditions + like mismatching platforms or dependencies. + Similarly, use the ``# doctest: +SKIP`` directive (see :py:data:`doctest.SKIP`) + to skip a doctest statically. + """ + + Exception: ClassVar[type[Skipped]] = Skipped + + def __call__(self, reason: str = "", allow_module_level: bool = False) -> NoReturn: + __tracebackhide__ = True + raise Skipped(msg=reason, allow_module_level=allow_module_level) + + +skip: _Skip = _Skip() + + +class _Fail: + """Explicitly fail an executing test with the given message. + + :param reason: + The message to show the user as reason for the failure. + + :param pytrace: + If False, msg represents the full failure information and no + python traceback will be reported. + + :raises pytest.fail.Exception: + The exception that is raised. + """ + + Exception: ClassVar[type[Failed]] = Failed + + def __call__(self, reason: str = "", pytrace: bool = True) -> NoReturn: + __tracebackhide__ = True + raise Failed(msg=reason, pytrace=pytrace) + + +fail: _Fail = _Fail() + + +class _XFail: + """Imperatively xfail an executing test or setup function with the given reason. + + This function should be called only during testing (setup, call or teardown). + + No other code is executed after using ``xfail()`` (it is implemented + internally by raising an exception). + + :param reason: + The message to show the user as reason for the xfail. + + .. note:: + It is better to use the :ref:`pytest.mark.xfail ref` marker when + possible to declare a test to be xfailed under certain conditions + like known bugs or missing features. + + :raises pytest.xfail.Exception: + The exception that is raised. + """ + + Exception: ClassVar[type[XFailed]] = XFailed + + def __call__(self, reason: str = "") -> NoReturn: + __tracebackhide__ = True + raise XFailed(msg=reason) + + +xfail: _XFail = _XFail() + + +def importorskip( + modname: str, + minversion: str | None = None, + reason: str | None = None, + *, + exc_type: type[ImportError] | None = None, +) -> Any: + """Import and return the requested module ``modname``, or skip the + current test if the module cannot be imported. + + :param modname: + The name of the module to import. + :param minversion: + If given, the imported module's ``__version__`` attribute must be at + least this minimal version, otherwise the test is still skipped. + :param reason: + If given, this reason is shown as the message when the module cannot + be imported. + :param exc_type: + The exception that should be captured in order to skip modules. + Must be :py:class:`ImportError` or a subclass. + + If the module can be imported but raises :class:`ImportError`, pytest will + issue a warning to the user, as often users expect the module not to be + found (which would raise :class:`ModuleNotFoundError` instead). + + This warning can be suppressed by passing ``exc_type=ImportError`` explicitly. + + See :ref:`import-or-skip-import-error` for details. + + + :returns: + The imported module. This should be assigned to its canonical name. + + :raises pytest.skip.Exception: + If the module cannot be imported. + + Example:: + + docutils = pytest.importorskip("docutils") + + .. versionadded:: 8.2 + + The ``exc_type`` parameter. + """ + import warnings + + __tracebackhide__ = True + compile(modname, "", "eval") # to catch syntaxerrors + + # Until pytest 9.1, we will warn the user if we catch ImportError (instead of ModuleNotFoundError), + # as this might be hiding an installation/environment problem, which is not usually what is intended + # when using importorskip() (#11523). + # In 9.1, to keep the function signature compatible, we just change the code below to: + # 1. Use `exc_type = ModuleNotFoundError` if `exc_type` is not given. + # 2. Remove `warn_on_import` and the warning handling. + if exc_type is None: + exc_type = ImportError + warn_on_import_error = True + else: + warn_on_import_error = False + + skipped: Skipped | None = None + warning: Warning | None = None + + with warnings.catch_warnings(): + # Make sure to ignore ImportWarnings that might happen because + # of existing directories with the same name we're trying to + # import but without a __init__.py file. + warnings.simplefilter("ignore") + + try: + __import__(modname) + except exc_type as exc: + # Do not raise or issue warnings inside the catch_warnings() block. + if reason is None: + reason = f"could not import {modname!r}: {exc}" + skipped = Skipped(reason, allow_module_level=True) + + if warn_on_import_error and not isinstance(exc, ModuleNotFoundError): + lines = [ + "", + f"Module '{modname}' was found, but when imported by pytest it raised:", + f" {exc!r}", + "In pytest 9.1 this warning will become an error by default.", + "You can fix the underlying problem, or alternatively overwrite this behavior and silence this " + "warning by passing exc_type=ImportError explicitly.", + "See https://docs.pytest.org/en/stable/deprecations.html#pytest-importorskip-default-behavior-regarding-importerror", + ] + warning = PytestDeprecationWarning("\n".join(lines)) + + if warning: + warnings.warn(warning, stacklevel=2) + if skipped: + raise skipped + + mod = sys.modules[modname] + if minversion is None: + return mod + verattr = getattr(mod, "__version__", None) + if minversion is not None: + # Imported lazily to improve start-up time. + from packaging.version import Version + + if verattr is None or Version(verattr) < Version(minversion): + raise Skipped( + f"module {modname!r} has __version__ {verattr!r}, required is: {minversion!r}", + allow_module_level=True, + ) + return mod diff --git a/.venv/lib/python3.12/site-packages/_pytest/pastebin.py b/.venv/lib/python3.12/site-packages/_pytest/pastebin.py new file mode 100644 index 0000000..c7b39d9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/pastebin.py @@ -0,0 +1,117 @@ +# mypy: allow-untyped-defs +"""Submit failure or test session information to a pastebin service.""" + +from __future__ import annotations + +from io import StringIO +import tempfile +from typing import IO + +from _pytest.config import Config +from _pytest.config import create_terminal_writer +from _pytest.config.argparsing import Parser +from _pytest.stash import StashKey +from _pytest.terminal import TerminalReporter +import pytest + + +pastebinfile_key = StashKey[IO[bytes]]() + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("terminal reporting") + group.addoption( + "--pastebin", + metavar="mode", + action="store", + dest="pastebin", + default=None, + choices=["failed", "all"], + help="Send failed|all info to bpaste.net pastebin service", + ) + + +@pytest.hookimpl(trylast=True) +def pytest_configure(config: Config) -> None: + if config.option.pastebin == "all": + tr = config.pluginmanager.getplugin("terminalreporter") + # If no terminal reporter plugin is present, nothing we can do here; + # this can happen when this function executes in a worker node + # when using pytest-xdist, for example. + if tr is not None: + # pastebin file will be UTF-8 encoded binary file. + config.stash[pastebinfile_key] = tempfile.TemporaryFile("w+b") + oldwrite = tr._tw.write + + def tee_write(s, **kwargs): + oldwrite(s, **kwargs) + if isinstance(s, str): + s = s.encode("utf-8") + config.stash[pastebinfile_key].write(s) + + tr._tw.write = tee_write + + +def pytest_unconfigure(config: Config) -> None: + if pastebinfile_key in config.stash: + pastebinfile = config.stash[pastebinfile_key] + # Get terminal contents and delete file. + pastebinfile.seek(0) + sessionlog = pastebinfile.read() + pastebinfile.close() + del config.stash[pastebinfile_key] + # Undo our patching in the terminal reporter. + tr = config.pluginmanager.getplugin("terminalreporter") + del tr._tw.__dict__["write"] + # Write summary. + tr.write_sep("=", "Sending information to Paste Service") + pastebinurl = create_new_paste(sessionlog) + tr.write_line(f"pastebin session-log: {pastebinurl}\n") + + +def create_new_paste(contents: str | bytes) -> str: + """Create a new paste using the bpaste.net service. + + :contents: Paste contents string. + :returns: URL to the pasted contents, or an error message. + """ + import re + from urllib.error import HTTPError + from urllib.parse import urlencode + from urllib.request import urlopen + + params = {"code": contents, "lexer": "text", "expiry": "1week"} + url = "https://bpa.st" + try: + response: str = ( + urlopen(url, data=urlencode(params).encode("ascii")).read().decode("utf-8") + ) + except HTTPError as e: + with e: # HTTPErrors are also http responses that must be closed! + return f"bad response: {e}" + except OSError as e: # eg urllib.error.URLError + return f"bad response: {e}" + m = re.search(r'href="/raw/(\w+)"', response) + if m: + return f"{url}/show/{m.group(1)}" + else: + return "bad response: invalid format ('" + response + "')" + + +def pytest_terminal_summary(terminalreporter: TerminalReporter) -> None: + if terminalreporter.config.option.pastebin != "failed": + return + if "failed" in terminalreporter.stats: + terminalreporter.write_sep("=", "Sending information to Paste Service") + for rep in terminalreporter.stats["failed"]: + try: + msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc + except AttributeError: + msg = terminalreporter._getfailureheadline(rep) + file = StringIO() + tw = create_terminal_writer(terminalreporter.config, file) + rep.toterminal(tw) + s = file.getvalue() + assert len(s) + pastebinurl = create_new_paste(s) + terminalreporter.write_line(f"{msg} --> {pastebinurl}") diff --git a/.venv/lib/python3.12/site-packages/_pytest/pathlib.py b/.venv/lib/python3.12/site-packages/_pytest/pathlib.py new file mode 100644 index 0000000..cd15434 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/pathlib.py @@ -0,0 +1,1063 @@ +from __future__ import annotations + +import atexit +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Iterator +import contextlib +from enum import Enum +from errno import EBADF +from errno import ELOOP +from errno import ENOENT +from errno import ENOTDIR +import fnmatch +from functools import partial +from importlib.machinery import ModuleSpec +from importlib.machinery import PathFinder +import importlib.util +import itertools +import os +from os.path import expanduser +from os.path import expandvars +from os.path import isabs +from os.path import sep +from pathlib import Path +from pathlib import PurePath +from posixpath import sep as posix_sep +import shutil +import sys +import types +from types import ModuleType +from typing import Any +from typing import TypeVar +import uuid +import warnings + +from _pytest.compat import assert_never +from _pytest.outcomes import skip +from _pytest.warning_types import PytestWarning + + +if sys.version_info < (3, 11): + from importlib._bootstrap_external import _NamespaceLoader as NamespaceLoader +else: + from importlib.machinery import NamespaceLoader + +LOCK_TIMEOUT = 60 * 60 * 24 * 3 + +_AnyPurePath = TypeVar("_AnyPurePath", bound=PurePath) + +# The following function, variables and comments were +# copied from cpython 3.9 Lib/pathlib.py file. + +# EBADF - guard against macOS `stat` throwing EBADF +_IGNORED_ERRORS = (ENOENT, ENOTDIR, EBADF, ELOOP) + +_IGNORED_WINERRORS = ( + 21, # ERROR_NOT_READY - drive exists but is not accessible + 1921, # ERROR_CANT_RESOLVE_FILENAME - fix for broken symlink pointing to itself +) + + +def _ignore_error(exception: Exception) -> bool: + return ( + getattr(exception, "errno", None) in _IGNORED_ERRORS + or getattr(exception, "winerror", None) in _IGNORED_WINERRORS + ) + + +def get_lock_path(path: _AnyPurePath) -> _AnyPurePath: + return path.joinpath(".lock") + + +def on_rm_rf_error( + func: Callable[..., Any] | None, + path: str, + excinfo: BaseException + | tuple[type[BaseException], BaseException, types.TracebackType | None], + *, + start_path: Path, +) -> bool: + """Handle known read-only errors during rmtree. + + The returned value is used only by our own tests. + """ + if isinstance(excinfo, BaseException): + exc = excinfo + else: + exc = excinfo[1] + + # Another process removed the file in the middle of the "rm_rf" (xdist for example). + # More context: https://github.com/pytest-dev/pytest/issues/5974#issuecomment-543799018 + if isinstance(exc, FileNotFoundError): + return False + + if not isinstance(exc, PermissionError): + warnings.warn( + PytestWarning(f"(rm_rf) error removing {path}\n{type(exc)}: {exc}") + ) + return False + + if func not in (os.rmdir, os.remove, os.unlink): + if func not in (os.open,): + warnings.warn( + PytestWarning( + f"(rm_rf) unknown function {func} when removing {path}:\n{type(exc)}: {exc}" + ) + ) + return False + + # Chmod + retry. + import stat + + def chmod_rw(p: str) -> None: + mode = os.stat(p).st_mode + os.chmod(p, mode | stat.S_IRUSR | stat.S_IWUSR) + + # For files, we need to recursively go upwards in the directories to + # ensure they all are also writable. + p = Path(path) + if p.is_file(): + for parent in p.parents: + chmod_rw(str(parent)) + # Stop when we reach the original path passed to rm_rf. + if parent == start_path: + break + chmod_rw(str(path)) + + func(path) + return True + + +def ensure_extended_length_path(path: Path) -> Path: + """Get the extended-length version of a path (Windows). + + On Windows, by default, the maximum length of a path (MAX_PATH) is 260 + characters, and operations on paths longer than that fail. But it is possible + to overcome this by converting the path to "extended-length" form before + performing the operation: + https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file#maximum-path-length-limitation + + On Windows, this function returns the extended-length absolute version of path. + On other platforms it returns path unchanged. + """ + if sys.platform.startswith("win32"): + path = path.resolve() + path = Path(get_extended_length_path_str(str(path))) + return path + + +def get_extended_length_path_str(path: str) -> str: + """Convert a path to a Windows extended length path.""" + long_path_prefix = "\\\\?\\" + unc_long_path_prefix = "\\\\?\\UNC\\" + if path.startswith((long_path_prefix, unc_long_path_prefix)): + return path + # UNC + if path.startswith("\\\\"): + return unc_long_path_prefix + path[2:] + return long_path_prefix + path + + +def rm_rf(path: Path) -> None: + """Remove the path contents recursively, even if some elements + are read-only.""" + path = ensure_extended_length_path(path) + onerror = partial(on_rm_rf_error, start_path=path) + if sys.version_info >= (3, 12): + shutil.rmtree(str(path), onexc=onerror) + else: + shutil.rmtree(str(path), onerror=onerror) + + +def find_prefixed(root: Path, prefix: str) -> Iterator[os.DirEntry[str]]: + """Find all elements in root that begin with the prefix, case-insensitive.""" + l_prefix = prefix.lower() + for x in os.scandir(root): + if x.name.lower().startswith(l_prefix): + yield x + + +def extract_suffixes(iter: Iterable[os.DirEntry[str]], prefix: str) -> Iterator[str]: + """Return the parts of the paths following the prefix. + + :param iter: Iterator over path names. + :param prefix: Expected prefix of the path names. + """ + p_len = len(prefix) + for entry in iter: + yield entry.name[p_len:] + + +def find_suffixes(root: Path, prefix: str) -> Iterator[str]: + """Combine find_prefixes and extract_suffixes.""" + return extract_suffixes(find_prefixed(root, prefix), prefix) + + +def parse_num(maybe_num: str) -> int: + """Parse number path suffixes, returns -1 on error.""" + try: + return int(maybe_num) + except ValueError: + return -1 + + +def _force_symlink(root: Path, target: str | PurePath, link_to: str | Path) -> None: + """Helper to create the current symlink. + + It's full of race conditions that are reasonably OK to ignore + for the context of best effort linking to the latest test run. + + The presumption being that in case of much parallelism + the inaccuracy is going to be acceptable. + """ + current_symlink = root.joinpath(target) + try: + current_symlink.unlink() + except OSError: + pass + try: + current_symlink.symlink_to(link_to) + except Exception: + pass + + +def make_numbered_dir(root: Path, prefix: str, mode: int = 0o700) -> Path: + """Create a directory with an increased number as suffix for the given prefix.""" + for i in range(10): + # try up to 10 times to create the folder + max_existing = max(map(parse_num, find_suffixes(root, prefix)), default=-1) + new_number = max_existing + 1 + new_path = root.joinpath(f"{prefix}{new_number}") + try: + new_path.mkdir(mode=mode) + except Exception: + pass + else: + _force_symlink(root, prefix + "current", new_path) + return new_path + else: + raise OSError( + "could not create numbered dir with prefix " + f"{prefix} in {root} after 10 tries" + ) + + +def create_cleanup_lock(p: Path) -> Path: + """Create a lock to prevent premature folder cleanup.""" + lock_path = get_lock_path(p) + try: + fd = os.open(str(lock_path), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o644) + except FileExistsError as e: + raise OSError(f"cannot create lockfile in {p}") from e + else: + pid = os.getpid() + spid = str(pid).encode() + os.write(fd, spid) + os.close(fd) + if not lock_path.is_file(): + raise OSError("lock path got renamed after successful creation") + return lock_path + + +def register_cleanup_lock_removal( + lock_path: Path, register: Any = atexit.register +) -> Any: + """Register a cleanup function for removing a lock, by default on atexit.""" + pid = os.getpid() + + def cleanup_on_exit(lock_path: Path = lock_path, original_pid: int = pid) -> None: + current_pid = os.getpid() + if current_pid != original_pid: + # fork + return + try: + lock_path.unlink() + except OSError: + pass + + return register(cleanup_on_exit) + + +def maybe_delete_a_numbered_dir(path: Path) -> None: + """Remove a numbered directory if its lock can be obtained and it does + not seem to be in use.""" + path = ensure_extended_length_path(path) + lock_path = None + try: + lock_path = create_cleanup_lock(path) + parent = path.parent + + garbage = parent.joinpath(f"garbage-{uuid.uuid4()}") + path.rename(garbage) + rm_rf(garbage) + except OSError: + # known races: + # * other process did a cleanup at the same time + # * deletable folder was found + # * process cwd (Windows) + return + finally: + # If we created the lock, ensure we remove it even if we failed + # to properly remove the numbered dir. + if lock_path is not None: + try: + lock_path.unlink() + except OSError: + pass + + +def ensure_deletable(path: Path, consider_lock_dead_if_created_before: float) -> bool: + """Check if `path` is deletable based on whether the lock file is expired.""" + if path.is_symlink(): + return False + lock = get_lock_path(path) + try: + if not lock.is_file(): + return True + except OSError: + # we might not have access to the lock file at all, in this case assume + # we don't have access to the entire directory (#7491). + return False + try: + lock_time = lock.stat().st_mtime + except Exception: + return False + else: + if lock_time < consider_lock_dead_if_created_before: + # We want to ignore any errors while trying to remove the lock such as: + # - PermissionDenied, like the file permissions have changed since the lock creation; + # - FileNotFoundError, in case another pytest process got here first; + # and any other cause of failure. + with contextlib.suppress(OSError): + lock.unlink() + return True + return False + + +def try_cleanup(path: Path, consider_lock_dead_if_created_before: float) -> None: + """Try to cleanup a folder if we can ensure it's deletable.""" + if ensure_deletable(path, consider_lock_dead_if_created_before): + maybe_delete_a_numbered_dir(path) + + +def cleanup_candidates(root: Path, prefix: str, keep: int) -> Iterator[Path]: + """List candidates for numbered directories to be removed - follows py.path.""" + max_existing = max(map(parse_num, find_suffixes(root, prefix)), default=-1) + max_delete = max_existing - keep + entries = find_prefixed(root, prefix) + entries, entries2 = itertools.tee(entries) + numbers = map(parse_num, extract_suffixes(entries2, prefix)) + for entry, number in zip(entries, numbers, strict=True): + if number <= max_delete: + yield Path(entry) + + +def cleanup_dead_symlinks(root: Path) -> None: + for left_dir in root.iterdir(): + if left_dir.is_symlink(): + if not left_dir.resolve().exists(): + left_dir.unlink() + + +def cleanup_numbered_dir( + root: Path, prefix: str, keep: int, consider_lock_dead_if_created_before: float +) -> None: + """Cleanup for lock driven numbered directories.""" + if not root.exists(): + return + for path in cleanup_candidates(root, prefix, keep): + try_cleanup(path, consider_lock_dead_if_created_before) + for path in root.glob("garbage-*"): + try_cleanup(path, consider_lock_dead_if_created_before) + + cleanup_dead_symlinks(root) + + +def make_numbered_dir_with_cleanup( + root: Path, + prefix: str, + keep: int, + lock_timeout: float, + mode: int, +) -> Path: + """Create a numbered dir with a cleanup lock and remove old ones.""" + e = None + for i in range(10): + try: + p = make_numbered_dir(root, prefix, mode) + # Only lock the current dir when keep is not 0 + if keep != 0: + lock_path = create_cleanup_lock(p) + register_cleanup_lock_removal(lock_path) + except Exception as exc: + e = exc + else: + consider_lock_dead_if_created_before = p.stat().st_mtime - lock_timeout + # Register a cleanup for program exit + atexit.register( + cleanup_numbered_dir, + root, + prefix, + keep, + consider_lock_dead_if_created_before, + ) + return p + assert e is not None + raise e + + +def resolve_from_str(input: str, rootpath: Path) -> Path: + input = expanduser(input) + input = expandvars(input) + if isabs(input): + return Path(input) + else: + return rootpath.joinpath(input) + + +def fnmatch_ex(pattern: str, path: str | os.PathLike[str]) -> bool: + """A port of FNMatcher from py.path.common which works with PurePath() instances. + + The difference between this algorithm and PurePath.match() is that the + latter matches "**" glob expressions for each part of the path, while + this algorithm uses the whole path instead. + + For example: + "tests/foo/bar/doc/test_foo.py" matches pattern "tests/**/doc/test*.py" + with this algorithm, but not with PurePath.match(). + + This algorithm was ported to keep backward-compatibility with existing + settings which assume paths match according this logic. + + References: + * https://bugs.python.org/issue29249 + * https://bugs.python.org/issue34731 + """ + path = PurePath(path) + iswin32 = sys.platform.startswith("win") + + if iswin32 and sep not in pattern and posix_sep in pattern: + # Running on Windows, the pattern has no Windows path separators, + # and the pattern has one or more Posix path separators. Replace + # the Posix path separators with the Windows path separator. + pattern = pattern.replace(posix_sep, sep) + + if sep not in pattern: + name = path.name + else: + name = str(path) + if path.is_absolute() and not os.path.isabs(pattern): + pattern = f"*{os.sep}{pattern}" + return fnmatch.fnmatch(name, pattern) + + +def parts(s: str) -> set[str]: + parts = s.split(sep) + return {sep.join(parts[: i + 1]) or sep for i in range(len(parts))} + + +def symlink_or_skip( + src: os.PathLike[str] | str, + dst: os.PathLike[str] | str, + **kwargs: Any, +) -> None: + """Make a symlink, or skip the test in case symlinks are not supported.""" + try: + os.symlink(src, dst, **kwargs) + except OSError as e: + skip(f"symlinks not supported: {e}") + + +class ImportMode(Enum): + """Possible values for `mode` parameter of `import_path`.""" + + prepend = "prepend" + append = "append" + importlib = "importlib" + + +class ImportPathMismatchError(ImportError): + """Raised on import_path() if there is a mismatch of __file__'s. + + This can happen when `import_path` is called multiple times with different filenames that has + the same basename but reside in packages + (for example "/tests1/test_foo.py" and "/tests2/test_foo.py"). + """ + + +def import_path( + path: str | os.PathLike[str], + *, + mode: str | ImportMode = ImportMode.prepend, + root: Path, + consider_namespace_packages: bool, +) -> ModuleType: + """ + Import and return a module from the given path, which can be a file (a module) or + a directory (a package). + + :param path: + Path to the file to import. + + :param mode: + Controls the underlying import mechanism that will be used: + + * ImportMode.prepend: the directory containing the module (or package, taking + `__init__.py` files into account) will be put at the *start* of `sys.path` before + being imported with `importlib.import_module`. + + * ImportMode.append: same as `prepend`, but the directory will be appended + to the end of `sys.path`, if not already in `sys.path`. + + * ImportMode.importlib: uses more fine control mechanisms provided by `importlib` + to import the module, which avoids having to muck with `sys.path` at all. It effectively + allows having same-named test modules in different places. + + :param root: + Used as an anchor when mode == ImportMode.importlib to obtain + a unique name for the module being imported so it can safely be stored + into ``sys.modules``. + + :param consider_namespace_packages: + If True, consider namespace packages when resolving module names. + + :raises ImportPathMismatchError: + If after importing the given `path` and the module `__file__` + are different. Only raised in `prepend` and `append` modes. + """ + path = Path(path) + mode = ImportMode(mode) + + if not path.exists(): + raise ImportError(path) + + if mode is ImportMode.importlib: + # Try to import this module using the standard import mechanisms, but + # without touching sys.path. + try: + pkg_root, module_name = resolve_pkg_root_and_module_name( + path, consider_namespace_packages=consider_namespace_packages + ) + except CouldNotResolvePathError: + pass + else: + # If the given module name is already in sys.modules, do not import it again. + with contextlib.suppress(KeyError): + return sys.modules[module_name] + + mod = _import_module_using_spec( + module_name, path, pkg_root, insert_modules=False + ) + if mod is not None: + return mod + + # Could not import the module with the current sys.path, so we fall back + # to importing the file as a single module, not being a part of a package. + module_name = module_name_from_path(path, root) + with contextlib.suppress(KeyError): + return sys.modules[module_name] + + mod = _import_module_using_spec( + module_name, path, path.parent, insert_modules=True + ) + if mod is None: + raise ImportError(f"Can't find module {module_name} at location {path}") + return mod + + try: + pkg_root, module_name = resolve_pkg_root_and_module_name( + path, consider_namespace_packages=consider_namespace_packages + ) + except CouldNotResolvePathError: + pkg_root, module_name = path.parent, path.stem + + # Change sys.path permanently: restoring it at the end of this function would cause surprising + # problems because of delayed imports: for example, a conftest.py file imported by this function + # might have local imports, which would fail at runtime if we restored sys.path. + if mode is ImportMode.append: + if str(pkg_root) not in sys.path: + sys.path.append(str(pkg_root)) + elif mode is ImportMode.prepend: + if str(pkg_root) != sys.path[0]: + sys.path.insert(0, str(pkg_root)) + else: + assert_never(mode) + + importlib.import_module(module_name) + + mod = sys.modules[module_name] + if path.name == "__init__.py": + return mod + + ignore = os.environ.get("PY_IGNORE_IMPORTMISMATCH", "") + if ignore != "1": + module_file = mod.__file__ + if module_file is None: + raise ImportPathMismatchError(module_name, module_file, path) + + if module_file.endswith((".pyc", ".pyo")): + module_file = module_file[:-1] + if module_file.endswith(os.sep + "__init__.py"): + module_file = module_file[: -(len(os.sep + "__init__.py"))] + + try: + is_same = _is_same(str(path), module_file) + except FileNotFoundError: + is_same = False + + if not is_same: + raise ImportPathMismatchError(module_name, module_file, path) + + return mod + + +def _import_module_using_spec( + module_name: str, module_path: Path, module_location: Path, *, insert_modules: bool +) -> ModuleType | None: + """ + Tries to import a module by its canonical name, path, and its parent location. + + :param module_name: + The expected module name, will become the key of `sys.modules`. + + :param module_path: + The file path of the module, for example `/foo/bar/test_demo.py`. + If module is a package, pass the path to the `__init__.py` of the package. + If module is a namespace package, pass directory path. + + :param module_location: + The parent location of the module. + If module is a package, pass the directory containing the `__init__.py` file. + + :param insert_modules: + If True, will call `insert_missing_modules` to create empty intermediate modules + with made-up module names (when importing test files not reachable from `sys.path`). + + Example 1 of parent_module_*: + + module_name: "a.b.c.demo" + module_path: Path("a/b/c/demo.py") + module_location: Path("a/b/c/") + if "a.b.c" is package ("a/b/c/__init__.py" exists), then + parent_module_name: "a.b.c" + parent_module_path: Path("a/b/c/__init__.py") + parent_module_location: Path("a/b/c/") + else: + parent_module_name: "a.b.c" + parent_module_path: Path("a/b/c") + parent_module_location: Path("a/b/") + + Example 2 of parent_module_*: + + module_name: "a.b.c" + module_path: Path("a/b/c/__init__.py") + module_location: Path("a/b/c/") + if "a.b" is package ("a/b/__init__.py" exists), then + parent_module_name: "a.b" + parent_module_path: Path("a/b/__init__.py") + parent_module_location: Path("a/b/") + else: + parent_module_name: "a.b" + parent_module_path: Path("a/b/") + parent_module_location: Path("a/") + """ + # Attempt to import the parent module, seems is our responsibility: + # https://github.com/python/cpython/blob/73906d5c908c1e0b73c5436faeff7d93698fc074/Lib/importlib/_bootstrap.py#L1308-L1311 + parent_module_name, _, name = module_name.rpartition(".") + parent_module: ModuleType | None = None + if parent_module_name: + parent_module = sys.modules.get(parent_module_name) + # If the parent_module lacks the `__path__` attribute, AttributeError when finding a submodule's spec, + # requiring re-import according to the path. + need_reimport = not hasattr(parent_module, "__path__") + if parent_module is None or need_reimport: + # Get parent_location based on location, get parent_path based on path. + if module_path.name == "__init__.py": + # If the current module is in a package, + # need to leave the package first and then enter the parent module. + parent_module_path = module_path.parent.parent + else: + parent_module_path = module_path.parent + + if (parent_module_path / "__init__.py").is_file(): + # If the parent module is a package, loading by __init__.py file. + parent_module_path = parent_module_path / "__init__.py" + + parent_module = _import_module_using_spec( + parent_module_name, + parent_module_path, + parent_module_path.parent, + insert_modules=insert_modules, + ) + + # Checking with sys.meta_path first in case one of its hooks can import this module, + # such as our own assertion-rewrite hook. + for meta_importer in sys.meta_path: + module_name_of_meta = getattr(meta_importer.__class__, "__module__", "") + if module_name_of_meta == "_pytest.assertion.rewrite" and module_path.is_file(): + # Import modules in subdirectories by module_path + # to ensure assertion rewrites are not missed (#12659). + find_spec_path = [str(module_location), str(module_path)] + else: + find_spec_path = [str(module_location)] + + spec = meta_importer.find_spec(module_name, find_spec_path) + + if spec_matches_module_path(spec, module_path): + break + else: + loader = None + if module_path.is_dir(): + # The `spec_from_file_location` matches a loader based on the file extension by default. + # For a namespace package, need to manually specify a loader. + loader = NamespaceLoader(name, module_path, PathFinder()) # type: ignore[arg-type] + + spec = importlib.util.spec_from_file_location( + module_name, str(module_path), loader=loader + ) + + if spec_matches_module_path(spec, module_path): + assert spec is not None + # Find spec and import this module. + mod = importlib.util.module_from_spec(spec) + sys.modules[module_name] = mod + spec.loader.exec_module(mod) # type: ignore[union-attr] + + # Set this module as an attribute of the parent module (#12194). + if parent_module is not None: + setattr(parent_module, name, mod) + + if insert_modules: + insert_missing_modules(sys.modules, module_name) + return mod + + return None + + +def spec_matches_module_path(module_spec: ModuleSpec | None, module_path: Path) -> bool: + """Return true if the given ModuleSpec can be used to import the given module path.""" + if module_spec is None: + return False + + if module_spec.origin: + return Path(module_spec.origin) == module_path + + # Compare the path with the `module_spec.submodule_Search_Locations` in case + # the module is part of a namespace package. + # https://docs.python.org/3/library/importlib.html#importlib.machinery.ModuleSpec.submodule_search_locations + if module_spec.submodule_search_locations: # can be None. + for path in module_spec.submodule_search_locations: + if Path(path) == module_path: + return True + + return False + + +# Implement a special _is_same function on Windows which returns True if the two filenames +# compare equal, to circumvent os.path.samefile returning False for mounts in UNC (#7678). +if sys.platform.startswith("win"): + + def _is_same(f1: str, f2: str) -> bool: + return Path(f1) == Path(f2) or os.path.samefile(f1, f2) + +else: + + def _is_same(f1: str, f2: str) -> bool: + return os.path.samefile(f1, f2) + + +def module_name_from_path(path: Path, root: Path) -> str: + """ + Return a dotted module name based on the given path, anchored on root. + + For example: path="projects/src/tests/test_foo.py" and root="/projects", the + resulting module name will be "src.tests.test_foo". + """ + path = path.with_suffix("") + try: + relative_path = path.relative_to(root) + except ValueError: + # If we can't get a relative path to root, use the full path, except + # for the first part ("d:\\" or "/" depending on the platform, for example). + path_parts = path.parts[1:] + else: + # Use the parts for the relative path to the root path. + path_parts = relative_path.parts + + # Module name for packages do not contain the __init__ file, unless + # the `__init__.py` file is at the root. + if len(path_parts) >= 2 and path_parts[-1] == "__init__": + path_parts = path_parts[:-1] + + # Module names cannot contain ".", normalize them to "_". This prevents + # a directory having a "." in the name (".env.310" for example) causing extra intermediate modules. + # Also, important to replace "." at the start of paths, as those are considered relative imports. + path_parts = tuple(x.replace(".", "_") for x in path_parts) + + return ".".join(path_parts) + + +def insert_missing_modules(modules: dict[str, ModuleType], module_name: str) -> None: + """ + Used by ``import_path`` to create intermediate modules when using mode=importlib. + + When we want to import a module as "src.tests.test_foo" for example, we need + to create empty modules "src" and "src.tests" after inserting "src.tests.test_foo", + otherwise "src.tests.test_foo" is not importable by ``__import__``. + """ + module_parts = module_name.split(".") + while module_name: + parent_module_name, _, child_name = module_name.rpartition(".") + if parent_module_name: + parent_module = modules.get(parent_module_name) + if parent_module is None: + try: + # If sys.meta_path is empty, calling import_module will issue + # a warning and raise ModuleNotFoundError. To avoid the + # warning, we check sys.meta_path explicitly and raise the error + # ourselves to fall back to creating a dummy module. + if not sys.meta_path: + raise ModuleNotFoundError + parent_module = importlib.import_module(parent_module_name) + except ModuleNotFoundError: + parent_module = ModuleType( + module_name, + doc="Empty module created by pytest's importmode=importlib.", + ) + modules[parent_module_name] = parent_module + + # Add child attribute to the parent that can reference the child + # modules. + if not hasattr(parent_module, child_name): + setattr(parent_module, child_name, modules[module_name]) + + module_parts.pop(-1) + module_name = ".".join(module_parts) + + +def resolve_package_path(path: Path) -> Path | None: + """Return the Python package path by looking for the last + directory upwards which still contains an __init__.py. + + Returns None if it cannot be determined. + """ + result = None + for parent in itertools.chain((path,), path.parents): + if parent.is_dir(): + if not (parent / "__init__.py").is_file(): + break + if not parent.name.isidentifier(): + break + result = parent + return result + + +def resolve_pkg_root_and_module_name( + path: Path, *, consider_namespace_packages: bool = False +) -> tuple[Path, str]: + """ + Return the path to the directory of the root package that contains the + given Python file, and its module name: + + src/ + app/ + __init__.py + core/ + __init__.py + models.py + + Passing the full path to `models.py` will yield Path("src") and "app.core.models". + + If consider_namespace_packages is True, then we additionally check upwards in the hierarchy + for namespace packages: + + https://packaging.python.org/en/latest/guides/packaging-namespace-packages + + Raises CouldNotResolvePathError if the given path does not belong to a package (missing any __init__.py files). + """ + pkg_root: Path | None = None + pkg_path = resolve_package_path(path) + if pkg_path is not None: + pkg_root = pkg_path.parent + if consider_namespace_packages: + start = pkg_root if pkg_root is not None else path.parent + for candidate in (start, *start.parents): + module_name = compute_module_name(candidate, path) + if module_name and is_importable(module_name, path): + # Point the pkg_root to the root of the namespace package. + pkg_root = candidate + break + + if pkg_root is not None: + module_name = compute_module_name(pkg_root, path) + if module_name: + return pkg_root, module_name + + raise CouldNotResolvePathError(f"Could not resolve for {path}") + + +def is_importable(module_name: str, module_path: Path) -> bool: + """ + Return if the given module path could be imported normally by Python, akin to the user + entering the REPL and importing the corresponding module name directly, and corresponds + to the module_path specified. + + :param module_name: + Full module name that we want to check if is importable. + For example, "app.models". + + :param module_path: + Full path to the python module/package we want to check if is importable. + For example, "/projects/src/app/models.py". + """ + try: + # Note this is different from what we do in ``_import_module_using_spec``, where we explicitly search through + # sys.meta_path to be able to pass the path of the module that we want to import (``meta_importer.find_spec``). + # Using importlib.util.find_spec() is different, it gives the same results as trying to import + # the module normally in the REPL. + spec = importlib.util.find_spec(module_name) + except (ImportError, ValueError, ImportWarning): + return False + else: + return spec_matches_module_path(spec, module_path) + + +def compute_module_name(root: Path, module_path: Path) -> str | None: + """Compute a module name based on a path and a root anchor.""" + try: + path_without_suffix = module_path.with_suffix("") + except ValueError: + # Empty paths (such as Path.cwd()) might break meta_path hooks (like our own assertion rewriter). + return None + + try: + relative = path_without_suffix.relative_to(root) + except ValueError: # pragma: no cover + return None + names = list(relative.parts) + if not names: + return None + if names[-1] == "__init__": + names.pop() + return ".".join(names) + + +class CouldNotResolvePathError(Exception): + """Custom exception raised by resolve_pkg_root_and_module_name.""" + + +def scandir( + path: str | os.PathLike[str], + sort_key: Callable[[os.DirEntry[str]], object] = lambda entry: entry.name, +) -> list[os.DirEntry[str]]: + """Scan a directory recursively, in breadth-first order. + + The returned entries are sorted according to the given key. + The default is to sort by name. + If the directory does not exist, return an empty list. + """ + entries = [] + # Attempt to create a scandir iterator for the given path. + try: + scandir_iter = os.scandir(path) + except FileNotFoundError: + # If the directory does not exist, return an empty list. + return [] + # Use the scandir iterator in a context manager to ensure it is properly closed. + with scandir_iter as s: + for entry in s: + try: + entry.is_file() + except OSError as err: + if _ignore_error(err): + continue + # Reraise non-ignorable errors to avoid hiding issues. + raise + entries.append(entry) + entries.sort(key=sort_key) # type: ignore[arg-type] + return entries + + +def visit( + path: str | os.PathLike[str], recurse: Callable[[os.DirEntry[str]], bool] +) -> Iterator[os.DirEntry[str]]: + """Walk a directory recursively, in breadth-first order. + + The `recurse` predicate determines whether a directory is recursed. + + Entries at each directory level are sorted. + """ + entries = scandir(path) + yield from entries + for entry in entries: + if entry.is_dir() and recurse(entry): + yield from visit(entry.path, recurse) + + +def absolutepath(path: str | os.PathLike[str]) -> Path: + """Convert a path to an absolute path using os.path.abspath. + + Prefer this over Path.resolve() (see #6523). + Prefer this over Path.absolute() (not public, doesn't normalize). + """ + return Path(os.path.abspath(path)) + + +def commonpath(path1: Path, path2: Path) -> Path | None: + """Return the common part shared with the other path, or None if there is + no common part. + + If one path is relative and one is absolute, returns None. + """ + try: + return Path(os.path.commonpath((str(path1), str(path2)))) + except ValueError: + return None + + +def bestrelpath(directory: Path, dest: Path) -> str: + """Return a string which is a relative path from directory to dest such + that directory/bestrelpath == dest. + + The paths must be either both absolute or both relative. + + If no such path can be determined, returns dest. + """ + assert isinstance(directory, Path) + assert isinstance(dest, Path) + if dest == directory: + return os.curdir + # Find the longest common directory. + base = commonpath(directory, dest) + # Can be the case on Windows for two absolute paths on different drives. + # Can be the case for two relative paths without common prefix. + # Can be the case for a relative path and an absolute path. + if not base: + return str(dest) + reldirectory = directory.relative_to(base) + reldest = dest.relative_to(base) + return os.path.join( + # Back from directory to base. + *([os.pardir] * len(reldirectory.parts)), + # Forward from base to dest. + *reldest.parts, + ) + + +def safe_exists(p: Path) -> bool: + """Like Path.exists(), but account for input arguments that might be too long (#11394).""" + try: + return p.exists() + except (ValueError, OSError): + # ValueError: stat: path too long for Windows + # OSError: [WinError 123] The filename, directory name, or volume label syntax is incorrect + return False + + +def samefile_nofollow(p1: Path, p2: Path) -> bool: + """Test whether two paths reference the same actual file or directory. + + Unlike Path.samefile(), does not resolve symlinks. + """ + return os.path.samestat(p1.lstat(), p2.lstat()) diff --git a/.venv/lib/python3.12/site-packages/_pytest/py.typed b/.venv/lib/python3.12/site-packages/_pytest/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/_pytest/pytester.py b/.venv/lib/python3.12/site-packages/_pytest/pytester.py new file mode 100644 index 0000000..1cd5f05 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/pytester.py @@ -0,0 +1,1791 @@ +# mypy: allow-untyped-defs +"""(Disabled by default) support for testing pytest and pytest plugins. + +PYTEST_DONT_REWRITE +""" + +from __future__ import annotations + +import collections.abc +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import Sequence +import contextlib +from fnmatch import fnmatch +import gc +import importlib +from io import StringIO +import locale +import os +from pathlib import Path +import platform +import re +import shutil +import subprocess +import sys +import traceback +from typing import Any +from typing import Final +from typing import final +from typing import IO +from typing import Literal +from typing import overload +from typing import TextIO +from typing import TYPE_CHECKING +from weakref import WeakKeyDictionary + +from iniconfig import IniConfig +from iniconfig import SectionWrapper + +from _pytest import timing +from _pytest._code import Source +from _pytest.capture import _get_multicapture +from _pytest.compat import NOTSET +from _pytest.compat import NotSetType +from _pytest.config import _PluggyPlugin +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config import main +from _pytest.config import PytestPluginManager +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.main import Session +from _pytest.monkeypatch import MonkeyPatch +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.outcomes import fail +from _pytest.outcomes import importorskip +from _pytest.outcomes import skip +from _pytest.pathlib import bestrelpath +from _pytest.pathlib import make_numbered_dir +from _pytest.reports import CollectReport +from _pytest.reports import TestReport +from _pytest.tmpdir import TempPathFactory +from _pytest.warning_types import PytestFDWarning + + +if TYPE_CHECKING: + import pexpect + + +pytest_plugins = ["pytester_assertions"] + + +IGNORE_PAM = [ # filenames added when obtaining details about the current user + "/var/lib/sss/mc/passwd" +] + + +def pytest_addoption(parser: Parser) -> None: + parser.addoption( + "--lsof", + action="store_true", + dest="lsof", + default=False, + help="Run FD checks if lsof is available", + ) + + parser.addoption( + "--runpytest", + default="inprocess", + dest="runpytest", + choices=("inprocess", "subprocess"), + help=( + "Run pytest sub runs in tests using an 'inprocess' " + "or 'subprocess' (python -m main) method" + ), + ) + + parser.addini( + "pytester_example_dir", help="Directory to take the pytester example files from" + ) + + +def pytest_configure(config: Config) -> None: + if config.getvalue("lsof"): + checker = LsofFdLeakChecker() + if checker.matching_platform(): + config.pluginmanager.register(checker) + + config.addinivalue_line( + "markers", + "pytester_example_path(*path_segments): join the given path " + "segments to `pytester_example_dir` for this test.", + ) + + +class LsofFdLeakChecker: + def get_open_files(self) -> list[tuple[str, str]]: + if sys.version_info >= (3, 11): + # New in Python 3.11, ignores utf-8 mode + encoding = locale.getencoding() + else: + encoding = locale.getpreferredencoding(False) + out = subprocess.run( + ("lsof", "-Ffn0", "-p", str(os.getpid())), + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + check=True, + text=True, + encoding=encoding, + ).stdout + + def isopen(line: str) -> bool: + return line.startswith("f") and ( + "deleted" not in line + and "mem" not in line + and "txt" not in line + and "cwd" not in line + ) + + open_files = [] + + for line in out.split("\n"): + if isopen(line): + fields = line.split("\0") + fd = fields[0][1:] + filename = fields[1][1:] + if filename in IGNORE_PAM: + continue + if filename.startswith("/"): + open_files.append((fd, filename)) + + return open_files + + def matching_platform(self) -> bool: + try: + subprocess.run(("lsof", "-v"), check=True) + except (OSError, subprocess.CalledProcessError): + return False + else: + return True + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_runtest_protocol(self, item: Item) -> Generator[None, object, object]: + lines1 = self.get_open_files() + try: + return (yield) + finally: + if hasattr(sys, "pypy_version_info"): + gc.collect() + lines2 = self.get_open_files() + + new_fds = {t[0] for t in lines2} - {t[0] for t in lines1} + leaked_files = [t for t in lines2 if t[0] in new_fds] + if leaked_files: + error = [ + f"***** {len(leaked_files)} FD leakage detected", + *(str(f) for f in leaked_files), + "*** Before:", + *(str(f) for f in lines1), + "*** After:", + *(str(f) for f in lines2), + f"***** {len(leaked_files)} FD leakage detected", + "*** function {}:{}: {} ".format(*item.location), + "See issue #2366", + ] + item.warn(PytestFDWarning("\n".join(error))) + + +# used at least by pytest-xdist plugin + + +@fixture +def _pytest(request: FixtureRequest) -> PytestArg: + """Return a helper which offers a gethookrecorder(hook) method which + returns a HookRecorder instance which helps to make assertions about called + hooks.""" + return PytestArg(request) + + +class PytestArg: + def __init__(self, request: FixtureRequest) -> None: + self._request = request + + def gethookrecorder(self, hook) -> HookRecorder: + hookrecorder = HookRecorder(hook._pm) + self._request.addfinalizer(hookrecorder.finish_recording) + return hookrecorder + + +def get_public_names(values: Iterable[str]) -> list[str]: + """Only return names from iterator values without a leading underscore.""" + return [x for x in values if x[0] != "_"] + + +@final +class RecordedHookCall: + """A recorded call to a hook. + + The arguments to the hook call are set as attributes. + For example: + + .. code-block:: python + + calls = hook_recorder.getcalls("pytest_runtest_setup") + # Suppose pytest_runtest_setup was called once with `item=an_item`. + assert calls[0].item is an_item + """ + + def __init__(self, name: str, kwargs) -> None: + self.__dict__.update(kwargs) + self._name = name + + def __repr__(self) -> str: + d = self.__dict__.copy() + del d["_name"] + return f"" + + if TYPE_CHECKING: + # The class has undetermined attributes, this tells mypy about it. + def __getattr__(self, key: str): ... + + +@final +class HookRecorder: + """Record all hooks called in a plugin manager. + + Hook recorders are created by :class:`Pytester`. + + This wraps all the hook calls in the plugin manager, recording each call + before propagating the normal calls. + """ + + def __init__( + self, pluginmanager: PytestPluginManager, *, _ispytest: bool = False + ) -> None: + check_ispytest(_ispytest) + + self._pluginmanager = pluginmanager + self.calls: list[RecordedHookCall] = [] + self.ret: int | ExitCode | None = None + + def before(hook_name: str, hook_impls, kwargs) -> None: + self.calls.append(RecordedHookCall(hook_name, kwargs)) + + def after(outcome, hook_name: str, hook_impls, kwargs) -> None: + pass + + self._undo_wrapping = pluginmanager.add_hookcall_monitoring(before, after) + + def finish_recording(self) -> None: + self._undo_wrapping() + + def getcalls(self, names: str | Iterable[str]) -> list[RecordedHookCall]: + """Get all recorded calls to hooks with the given names (or name).""" + if isinstance(names, str): + names = names.split() + return [call for call in self.calls if call._name in names] + + def assert_contains(self, entries: Sequence[tuple[str, str]]) -> None: + __tracebackhide__ = True + i = 0 + entries = list(entries) + # Since Python 3.13, f_locals is not a dict, but eval requires a dict. + backlocals = dict(sys._getframe(1).f_locals) + while entries: + name, check = entries.pop(0) + for ind, call in enumerate(self.calls[i:]): + if call._name == name: + print("NAMEMATCH", name, call) + if eval(check, backlocals, call.__dict__): + print("CHECKERMATCH", repr(check), "->", call) + else: + print("NOCHECKERMATCH", repr(check), "-", call) + continue + i += ind + 1 + break + print("NONAMEMATCH", name, "with", call) + else: + fail(f"could not find {name!r} check {check!r}") + + def popcall(self, name: str) -> RecordedHookCall: + __tracebackhide__ = True + for i, call in enumerate(self.calls): + if call._name == name: + del self.calls[i] + return call + lines = [f"could not find call {name!r}, in:"] + lines.extend([f" {x}" for x in self.calls]) + fail("\n".join(lines)) + + def getcall(self, name: str) -> RecordedHookCall: + values = self.getcalls(name) + assert len(values) == 1, (name, values) + return values[0] + + # functionality for test reports + + @overload + def getreports( + self, + names: Literal["pytest_collectreport"], + ) -> Sequence[CollectReport]: ... + + @overload + def getreports( + self, + names: Literal["pytest_runtest_logreport"], + ) -> Sequence[TestReport]: ... + + @overload + def getreports( + self, + names: str | Iterable[str] = ( + "pytest_collectreport", + "pytest_runtest_logreport", + ), + ) -> Sequence[CollectReport | TestReport]: ... + + def getreports( + self, + names: str | Iterable[str] = ( + "pytest_collectreport", + "pytest_runtest_logreport", + ), + ) -> Sequence[CollectReport | TestReport]: + return [x.report for x in self.getcalls(names)] + + def matchreport( + self, + inamepart: str = "", + names: str | Iterable[str] = ( + "pytest_runtest_logreport", + "pytest_collectreport", + ), + when: str | None = None, + ) -> CollectReport | TestReport: + """Return a testreport whose dotted import path matches.""" + values = [] + for rep in self.getreports(names=names): + if not when and rep.when != "call" and rep.passed: + # setup/teardown passing reports - let's ignore those + continue + if when and rep.when != when: + continue + if not inamepart or inamepart in rep.nodeid.split("::"): + values.append(rep) + if not values: + raise ValueError( + f"could not find test report matching {inamepart!r}: " + "no test reports at all!" + ) + if len(values) > 1: + raise ValueError( + f"found 2 or more testreports matching {inamepart!r}: {values}" + ) + return values[0] + + @overload + def getfailures( + self, + names: Literal["pytest_collectreport"], + ) -> Sequence[CollectReport]: ... + + @overload + def getfailures( + self, + names: Literal["pytest_runtest_logreport"], + ) -> Sequence[TestReport]: ... + + @overload + def getfailures( + self, + names: str | Iterable[str] = ( + "pytest_collectreport", + "pytest_runtest_logreport", + ), + ) -> Sequence[CollectReport | TestReport]: ... + + def getfailures( + self, + names: str | Iterable[str] = ( + "pytest_collectreport", + "pytest_runtest_logreport", + ), + ) -> Sequence[CollectReport | TestReport]: + return [rep for rep in self.getreports(names) if rep.failed] + + def getfailedcollections(self) -> Sequence[CollectReport]: + return self.getfailures("pytest_collectreport") + + def listoutcomes( + self, + ) -> tuple[ + Sequence[TestReport], + Sequence[CollectReport | TestReport], + Sequence[CollectReport | TestReport], + ]: + passed = [] + skipped = [] + failed = [] + for rep in self.getreports( + ("pytest_collectreport", "pytest_runtest_logreport") + ): + if rep.passed: + if rep.when == "call": + assert isinstance(rep, TestReport) + passed.append(rep) + elif rep.skipped: + skipped.append(rep) + else: + assert rep.failed, f"Unexpected outcome: {rep!r}" + failed.append(rep) + return passed, skipped, failed + + def countoutcomes(self) -> list[int]: + return [len(x) for x in self.listoutcomes()] + + def assertoutcome(self, passed: int = 0, skipped: int = 0, failed: int = 0) -> None: + __tracebackhide__ = True + from _pytest.pytester_assertions import assertoutcome + + outcomes = self.listoutcomes() + assertoutcome( + outcomes, + passed=passed, + skipped=skipped, + failed=failed, + ) + + def clear(self) -> None: + self.calls[:] = [] + + +@fixture +def linecomp() -> LineComp: + """A :class: `LineComp` instance for checking that an input linearly + contains a sequence of strings.""" + return LineComp() + + +@fixture(name="LineMatcher") +def LineMatcher_fixture(request: FixtureRequest) -> type[LineMatcher]: + """A reference to the :class: `LineMatcher`. + + This is instantiable with a list of lines (without their trailing newlines). + This is useful for testing large texts, such as the output of commands. + """ + return LineMatcher + + +@fixture +def pytester( + request: FixtureRequest, tmp_path_factory: TempPathFactory, monkeypatch: MonkeyPatch +) -> Pytester: + """ + Facilities to write tests/configuration files, execute pytest in isolation, and match + against expected output, perfect for black-box testing of pytest plugins. + + It attempts to isolate the test run from external factors as much as possible, modifying + the current working directory to ``path`` and environment variables during initialization. + + It is particularly useful for testing plugins. It is similar to the :fixture:`tmp_path` + fixture but provides methods which aid in testing pytest itself. + """ + return Pytester(request, tmp_path_factory, monkeypatch, _ispytest=True) + + +@fixture +def _sys_snapshot() -> Generator[None]: + snappaths = SysPathsSnapshot() + snapmods = SysModulesSnapshot() + yield + snapmods.restore() + snappaths.restore() + + +@fixture +def _config_for_test() -> Generator[Config]: + from _pytest.config import get_config + + config = get_config() + yield config + config._ensure_unconfigure() # cleanup, e.g. capman closing tmpfiles. + + +# Regex to match the session duration string in the summary: "74.34s". +rex_session_duration = re.compile(r"\d+\.\d\ds") +# Regex to match all the counts and phrases in the summary line: "34 passed, 111 skipped". +rex_outcome = re.compile(r"(\d+) (\w+)") + + +@final +class RunResult: + """The result of running a command from :class:`~pytest.Pytester`.""" + + def __init__( + self, + ret: int | ExitCode, + outlines: list[str], + errlines: list[str], + duration: float, + ) -> None: + try: + self.ret: int | ExitCode = ExitCode(ret) + """The return value.""" + except ValueError: + self.ret = ret + self.outlines = outlines + """List of lines captured from stdout.""" + self.errlines = errlines + """List of lines captured from stderr.""" + self.stdout = LineMatcher(outlines) + """:class:`~pytest.LineMatcher` of stdout. + + Use e.g. :func:`str(stdout) ` to reconstruct stdout, or the commonly used + :func:`stdout.fnmatch_lines() ` method. + """ + self.stderr = LineMatcher(errlines) + """:class:`~pytest.LineMatcher` of stderr.""" + self.duration = duration + """Duration in seconds.""" + + def __repr__(self) -> str: + return ( + f"" + ) + + def parseoutcomes(self) -> dict[str, int]: + """Return a dictionary of outcome noun -> count from parsing the terminal + output that the test process produced. + + The returned nouns will always be in plural form:: + + ======= 1 failed, 1 passed, 1 warning, 1 error in 0.13s ==== + + Will return ``{"failed": 1, "passed": 1, "warnings": 1, "errors": 1}``. + """ + return self.parse_summary_nouns(self.outlines) + + @classmethod + def parse_summary_nouns(cls, lines) -> dict[str, int]: + """Extract the nouns from a pytest terminal summary line. + + It always returns the plural noun for consistency:: + + ======= 1 failed, 1 passed, 1 warning, 1 error in 0.13s ==== + + Will return ``{"failed": 1, "passed": 1, "warnings": 1, "errors": 1}``. + """ + for line in reversed(lines): + if rex_session_duration.search(line): + outcomes = rex_outcome.findall(line) + ret = {noun: int(count) for (count, noun) in outcomes} + break + else: + raise ValueError("Pytest terminal summary report not found") + + to_plural = { + "warning": "warnings", + "error": "errors", + } + return {to_plural.get(k, k): v for k, v in ret.items()} + + def assert_outcomes( + self, + passed: int = 0, + skipped: int = 0, + failed: int = 0, + errors: int = 0, + xpassed: int = 0, + xfailed: int = 0, + warnings: int | None = None, + deselected: int | None = None, + ) -> None: + """ + Assert that the specified outcomes appear with the respective + numbers (0 means it didn't occur) in the text output from a test run. + + ``warnings`` and ``deselected`` are only checked if not None. + """ + __tracebackhide__ = True + from _pytest.pytester_assertions import assert_outcomes + + outcomes = self.parseoutcomes() + assert_outcomes( + outcomes, + passed=passed, + skipped=skipped, + failed=failed, + errors=errors, + xpassed=xpassed, + xfailed=xfailed, + warnings=warnings, + deselected=deselected, + ) + + +class SysModulesSnapshot: + def __init__(self, preserve: Callable[[str], bool] | None = None) -> None: + self.__preserve = preserve + self.__saved = dict(sys.modules) + + def restore(self) -> None: + if self.__preserve: + self.__saved.update( + (k, m) for k, m in sys.modules.items() if self.__preserve(k) + ) + sys.modules.clear() + sys.modules.update(self.__saved) + + +class SysPathsSnapshot: + def __init__(self) -> None: + self.__saved = list(sys.path), list(sys.meta_path) + + def restore(self) -> None: + sys.path[:], sys.meta_path[:] = self.__saved + + +@final +class Pytester: + """ + Facilities to write tests/configuration files, execute pytest in isolation, and match + against expected output, perfect for black-box testing of pytest plugins. + + It attempts to isolate the test run from external factors as much as possible, modifying + the current working directory to :attr:`path` and environment variables during initialization. + """ + + __test__ = False + + CLOSE_STDIN: Final = NOTSET + + class TimeoutExpired(Exception): + pass + + def __init__( + self, + request: FixtureRequest, + tmp_path_factory: TempPathFactory, + monkeypatch: MonkeyPatch, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._request = request + self._mod_collections: WeakKeyDictionary[Collector, list[Item | Collector]] = ( + WeakKeyDictionary() + ) + if request.function: + name: str = request.function.__name__ + else: + name = request.node.name + self._name = name + self._path: Path = tmp_path_factory.mktemp(name, numbered=True) + #: A list of plugins to use with :py:meth:`parseconfig` and + #: :py:meth:`runpytest`. Initially this is an empty list but plugins can + #: be added to the list. + #: + #: When running in subprocess mode, specify plugins by name (str) - adding + #: plugin objects directly is not supported. + self.plugins: list[str | _PluggyPlugin] = [] + self._sys_path_snapshot = SysPathsSnapshot() + self._sys_modules_snapshot = self.__take_sys_modules_snapshot() + self._request.addfinalizer(self._finalize) + self._method = self._request.config.getoption("--runpytest") + self._test_tmproot = tmp_path_factory.mktemp(f"tmp-{name}", numbered=True) + + self._monkeypatch = mp = monkeypatch + self.chdir() + mp.setenv("PYTEST_DEBUG_TEMPROOT", str(self._test_tmproot)) + # Ensure no unexpected caching via tox. + mp.delenv("TOX_ENV_DIR", raising=False) + # Discard outer pytest options. + mp.delenv("PYTEST_ADDOPTS", raising=False) + # Ensure no user config is used. + tmphome = str(self.path) + mp.setenv("HOME", tmphome) + mp.setenv("USERPROFILE", tmphome) + # Do not use colors for inner runs by default. + mp.setenv("PY_COLORS", "0") + + @property + def path(self) -> Path: + """Temporary directory path used to create files/run tests from, etc.""" + return self._path + + def __repr__(self) -> str: + return f"" + + def _finalize(self) -> None: + """ + Clean up global state artifacts. + + Some methods modify the global interpreter state and this tries to + clean this up. It does not remove the temporary directory however so + it can be looked at after the test run has finished. + """ + self._sys_modules_snapshot.restore() + self._sys_path_snapshot.restore() + + def __take_sys_modules_snapshot(self) -> SysModulesSnapshot: + # Some zope modules used by twisted-related tests keep internal state + # and can't be deleted; we had some trouble in the past with + # `zope.interface` for example. + # + # Preserve readline due to https://bugs.python.org/issue41033. + # pexpect issues a SIGWINCH. + def preserve_module(name): + return name.startswith(("zope", "readline")) + + return SysModulesSnapshot(preserve=preserve_module) + + def make_hook_recorder(self, pluginmanager: PytestPluginManager) -> HookRecorder: + """Create a new :class:`HookRecorder` for a :class:`PytestPluginManager`.""" + pluginmanager.reprec = reprec = HookRecorder(pluginmanager, _ispytest=True) # type: ignore[attr-defined] + self._request.addfinalizer(reprec.finish_recording) + return reprec + + def chdir(self) -> None: + """Cd into the temporary directory. + + This is done automatically upon instantiation. + """ + self._monkeypatch.chdir(self.path) + + def _makefile( + self, + ext: str, + lines: Sequence[Any | bytes], + files: dict[str, str], + encoding: str = "utf-8", + ) -> Path: + items = list(files.items()) + + if ext is None: + raise TypeError("ext must not be None") + + if ext and not ext.startswith("."): + raise ValueError( + f"pytester.makefile expects a file extension, try .{ext} instead of {ext}" + ) + + def to_text(s: Any | bytes) -> str: + return s.decode(encoding) if isinstance(s, bytes) else str(s) + + if lines: + source = "\n".join(to_text(x) for x in lines) + basename = self._name + items.insert(0, (basename, source)) + + ret = None + for basename, value in items: + p = self.path.joinpath(basename).with_suffix(ext) + p.parent.mkdir(parents=True, exist_ok=True) + source_ = Source(value) + source = "\n".join(to_text(line) for line in source_.lines) + p.write_text(source.strip(), encoding=encoding) + if ret is None: + ret = p + assert ret is not None + return ret + + def makefile(self, ext: str, *args: str, **kwargs: str) -> Path: + r"""Create new text file(s) in the test directory. + + :param ext: + The extension the file(s) should use, including the dot, e.g. `.py`. + :param args: + All args are treated as strings and joined using newlines. + The result is written as contents to the file. The name of the + file is based on the test function requesting this fixture. + :param kwargs: + Each keyword is the name of a file, while the value of it will + be written as contents of the file. + :returns: + The first created file. + + Examples: + + .. code-block:: python + + pytester.makefile(".txt", "line1", "line2") + + pytester.makefile(".ini", pytest="[pytest]\naddopts=-rs\n") + + To create binary files, use :meth:`pathlib.Path.write_bytes` directly: + + .. code-block:: python + + filename = pytester.path.joinpath("foo.bin") + filename.write_bytes(b"...") + """ + return self._makefile(ext, args, kwargs) + + def makeconftest(self, source: str) -> Path: + """Write a conftest.py file. + + :param source: The contents. + :returns: The conftest.py file. + """ + return self.makepyfile(conftest=source) + + def makeini(self, source: str) -> Path: + """Write a tox.ini file. + + :param source: The contents. + :returns: The tox.ini file. + """ + return self.makefile(".ini", tox=source) + + def maketoml(self, source: str) -> Path: + """Write a pytest.toml file. + + :param source: The contents. + :returns: The pytest.toml file. + + .. versionadded:: 9.0 + """ + return self.makefile(".toml", pytest=source) + + def getinicfg(self, source: str) -> SectionWrapper: + """Return the pytest section from the tox.ini config file.""" + p = self.makeini(source) + return IniConfig(str(p))["pytest"] + + def makepyprojecttoml(self, source: str) -> Path: + """Write a pyproject.toml file. + + :param source: The contents. + :returns: The pyproject.ini file. + + .. versionadded:: 6.0 + """ + return self.makefile(".toml", pyproject=source) + + def makepyfile(self, *args, **kwargs) -> Path: + r"""Shortcut for .makefile() with a .py extension. + + Defaults to the test name with a '.py' extension, e.g test_foobar.py, overwriting + existing files. + + Examples: + + .. code-block:: python + + def test_something(pytester): + # Initial file is created test_something.py. + pytester.makepyfile("foobar") + # To create multiple files, pass kwargs accordingly. + pytester.makepyfile(custom="foobar") + # At this point, both 'test_something.py' & 'custom.py' exist in the test directory. + + """ + return self._makefile(".py", args, kwargs) + + def maketxtfile(self, *args, **kwargs) -> Path: + r"""Shortcut for .makefile() with a .txt extension. + + Defaults to the test name with a '.txt' extension, e.g test_foobar.txt, overwriting + existing files. + + Examples: + + .. code-block:: python + + def test_something(pytester): + # Initial file is created test_something.txt. + pytester.maketxtfile("foobar") + # To create multiple files, pass kwargs accordingly. + pytester.maketxtfile(custom="foobar") + # At this point, both 'test_something.txt' & 'custom.txt' exist in the test directory. + + """ + return self._makefile(".txt", args, kwargs) + + def syspathinsert(self, path: str | os.PathLike[str] | None = None) -> None: + """Prepend a directory to sys.path, defaults to :attr:`path`. + + This is undone automatically when this object dies at the end of each + test. + + :param path: + The path. + """ + if path is None: + path = self.path + + self._monkeypatch.syspath_prepend(str(path)) + + def mkdir(self, name: str | os.PathLike[str]) -> Path: + """Create a new (sub)directory. + + :param name: + The name of the directory, relative to the pytester path. + :returns: + The created directory. + :rtype: pathlib.Path + """ + p = self.path / name + p.mkdir() + return p + + def mkpydir(self, name: str | os.PathLike[str]) -> Path: + """Create a new python package. + + This creates a (sub)directory with an empty ``__init__.py`` file so it + gets recognised as a Python package. + """ + p = self.path / name + p.mkdir() + p.joinpath("__init__.py").touch() + return p + + def copy_example(self, name: str | None = None) -> Path: + """Copy file from project's directory into the testdir. + + :param name: + The name of the file to copy. + :return: + Path to the copied directory (inside ``self.path``). + :rtype: pathlib.Path + """ + example_dir_ = self._request.config.getini("pytester_example_dir") + if example_dir_ is None: + raise ValueError("pytester_example_dir is unset, can't copy examples") + example_dir: Path = self._request.config.rootpath / example_dir_ + + for extra_element in self._request.node.iter_markers("pytester_example_path"): + assert extra_element.args + example_dir = example_dir.joinpath(*extra_element.args) + + if name is None: + func_name = self._name + maybe_dir = example_dir / func_name + maybe_file = example_dir / (func_name + ".py") + + if maybe_dir.is_dir(): + example_path = maybe_dir + elif maybe_file.is_file(): + example_path = maybe_file + else: + raise LookupError( + f"{func_name} can't be found as module or package in {example_dir}" + ) + else: + example_path = example_dir.joinpath(name) + + if example_path.is_dir() and not example_path.joinpath("__init__.py").is_file(): + shutil.copytree(example_path, self.path, symlinks=True, dirs_exist_ok=True) + return self.path + elif example_path.is_file(): + result = self.path.joinpath(example_path.name) + shutil.copy(example_path, result) + return result + else: + raise LookupError( + f'example "{example_path}" is not found as a file or directory' + ) + + def getnode(self, config: Config, arg: str | os.PathLike[str]) -> Collector | Item: + """Get the collection node of a file. + + :param config: + A pytest config. + See :py:meth:`parseconfig` and :py:meth:`parseconfigure` for creating it. + :param arg: + Path to the file. + :returns: + The node. + """ + session = Session.from_config(config) + assert "::" not in str(arg) + p = Path(os.path.abspath(arg)) + config.hook.pytest_sessionstart(session=session) + res = session.perform_collect([str(p)], genitems=False)[0] + config.hook.pytest_sessionfinish(session=session, exitstatus=ExitCode.OK) + return res + + def getpathnode(self, path: str | os.PathLike[str]) -> Collector | Item: + """Return the collection node of a file. + + This is like :py:meth:`getnode` but uses :py:meth:`parseconfigure` to + create the (configured) pytest Config instance. + + :param path: + Path to the file. + :returns: + The node. + """ + path = Path(path) + config = self.parseconfigure(path) + session = Session.from_config(config) + x = bestrelpath(session.path, path) + config.hook.pytest_sessionstart(session=session) + res = session.perform_collect([x], genitems=False)[0] + config.hook.pytest_sessionfinish(session=session, exitstatus=ExitCode.OK) + return res + + def genitems(self, colitems: Sequence[Item | Collector]) -> list[Item]: + """Generate all test items from a collection node. + + This recurses into the collection node and returns a list of all the + test items contained within. + + :param colitems: + The collection nodes. + :returns: + The collected items. + """ + session = colitems[0].session + result: list[Item] = [] + for colitem in colitems: + result.extend(session.genitems(colitem)) + return result + + def runitem(self, source: str) -> Any: + """Run the "test_func" Item. + + The calling test instance (class containing the test method) must + provide a ``.getrunner()`` method which should return a runner which + can run the test protocol for a single item, e.g. + ``_pytest.runner.runtestprotocol``. + """ + # used from runner functional tests + item = self.getitem(source) + # the test class where we are called from wants to provide the runner + testclassinstance = self._request.instance + runner = testclassinstance.getrunner() + return runner(item) + + def inline_runsource(self, source: str, *cmdlineargs) -> HookRecorder: + """Run a test module in process using ``pytest.main()``. + + This run writes "source" into a temporary file and runs + ``pytest.main()`` on it, returning a :py:class:`HookRecorder` instance + for the result. + + :param source: The source code of the test module. + :param cmdlineargs: Any extra command line arguments to use. + """ + p = self.makepyfile(source) + values = [*list(cmdlineargs), p] + return self.inline_run(*values) + + def inline_genitems(self, *args) -> tuple[list[Item], HookRecorder]: + """Run ``pytest.main(['--collect-only'])`` in-process. + + Runs the :py:func:`pytest.main` function to run all of pytest inside + the test process itself like :py:meth:`inline_run`, but returns a + tuple of the collected items and a :py:class:`HookRecorder` instance. + """ + rec = self.inline_run("--collect-only", *args) + items = [x.item for x in rec.getcalls("pytest_itemcollected")] + return items, rec + + def inline_run( + self, + *args: str | os.PathLike[str], + plugins=(), + no_reraise_ctrlc: bool = False, + ) -> HookRecorder: + """Run ``pytest.main()`` in-process, returning a HookRecorder. + + Runs the :py:func:`pytest.main` function to run all of pytest inside + the test process itself. This means it can return a + :py:class:`HookRecorder` instance which gives more detailed results + from that run than can be done by matching stdout/stderr from + :py:meth:`runpytest`. + + :param args: + Command line arguments to pass to :py:func:`pytest.main`. + :param plugins: + Extra plugin instances the ``pytest.main()`` instance should use. + :param no_reraise_ctrlc: + Typically we reraise keyboard interrupts from the child run. If + True, the KeyboardInterrupt exception is captured. + """ + from _pytest.unraisableexception import gc_collect_iterations_key + + # (maybe a cpython bug?) the importlib cache sometimes isn't updated + # properly between file creation and inline_run (especially if imports + # are interspersed with file creation) + importlib.invalidate_caches() + + plugins = list(plugins) + finalizers = [] + try: + # Any sys.module or sys.path changes done while running pytest + # inline should be reverted after the test run completes to avoid + # clashing with later inline tests run within the same pytest test, + # e.g. just because they use matching test module names. + finalizers.append(self.__take_sys_modules_snapshot().restore) + finalizers.append(SysPathsSnapshot().restore) + + # Important note: + # - our tests should not leave any other references/registrations + # laying around other than possibly loaded test modules + # referenced from sys.modules, as nothing will clean those up + # automatically + + rec = [] + + class PytesterHelperPlugin: + @staticmethod + def pytest_configure(config: Config) -> None: + rec.append(self.make_hook_recorder(config.pluginmanager)) + + # The unraisable plugin GC collect slows down inline + # pytester runs too much. + config.stash[gc_collect_iterations_key] = 0 + + plugins.append(PytesterHelperPlugin()) + ret = main([str(x) for x in args], plugins=plugins) + if len(rec) == 1: + reprec = rec.pop() + else: + + class reprec: # type: ignore + pass + + reprec.ret = ret + + # Typically we reraise keyboard interrupts from the child run + # because it's our user requesting interruption of the testing. + if ret == ExitCode.INTERRUPTED and not no_reraise_ctrlc: + calls = reprec.getcalls("pytest_keyboard_interrupt") + if calls and calls[-1].excinfo.type == KeyboardInterrupt: + raise KeyboardInterrupt() + return reprec + finally: + for finalizer in finalizers: + finalizer() + + def runpytest_inprocess( + self, *args: str | os.PathLike[str], **kwargs: Any + ) -> RunResult: + """Return result of running pytest in-process, providing a similar + interface to what self.runpytest() provides.""" + syspathinsert = kwargs.pop("syspathinsert", False) + + if syspathinsert: + self.syspathinsert() + instant = timing.Instant() + capture = _get_multicapture("sys") + capture.start_capturing() + try: + try: + reprec = self.inline_run(*args, **kwargs) + except SystemExit as e: + ret = e.args[0] + try: + ret = ExitCode(e.args[0]) + except ValueError: + pass + + class reprec: # type: ignore + ret = ret + + except Exception: + traceback.print_exc() + + class reprec: # type: ignore + ret = ExitCode(3) + + finally: + out, err = capture.readouterr() + capture.stop_capturing() + sys.stdout.write(out) + sys.stderr.write(err) + + assert reprec.ret is not None + res = RunResult( + reprec.ret, out.splitlines(), err.splitlines(), instant.elapsed().seconds + ) + res.reprec = reprec # type: ignore + return res + + def runpytest(self, *args: str | os.PathLike[str], **kwargs: Any) -> RunResult: + """Run pytest inline or in a subprocess, depending on the command line + option "--runpytest" and return a :py:class:`~pytest.RunResult`.""" + new_args = self._ensure_basetemp(args) + if self._method == "inprocess": + return self.runpytest_inprocess(*new_args, **kwargs) + elif self._method == "subprocess": + return self.runpytest_subprocess(*new_args, **kwargs) + raise RuntimeError(f"Unrecognized runpytest option: {self._method}") + + def _ensure_basetemp( + self, args: Sequence[str | os.PathLike[str]] + ) -> list[str | os.PathLike[str]]: + new_args = list(args) + for x in new_args: + if str(x).startswith("--basetemp"): + break + else: + new_args.append( + "--basetemp={}".format(self.path.parent.joinpath("basetemp")) + ) + return new_args + + def parseconfig(self, *args: str | os.PathLike[str]) -> Config: + """Return a new pytest :class:`pytest.Config` instance from given + commandline args. + + This invokes the pytest bootstrapping code in _pytest.config to create a + new :py:class:`pytest.PytestPluginManager` and call the + :hook:`pytest_cmdline_parse` hook to create a new :class:`pytest.Config` + instance. + + If :attr:`plugins` has been populated they should be plugin modules + to be registered with the plugin manager. + """ + import _pytest.config + + new_args = [str(x) for x in self._ensure_basetemp(args)] + + config = _pytest.config._prepareconfig(new_args, self.plugins) + # we don't know what the test will do with this half-setup config + # object and thus we make sure it gets unconfigured properly in any + # case (otherwise capturing could still be active, for example) + self._request.addfinalizer(config._ensure_unconfigure) + return config + + def parseconfigure(self, *args: str | os.PathLike[str]) -> Config: + """Return a new pytest configured Config instance. + + Returns a new :py:class:`pytest.Config` instance like + :py:meth:`parseconfig`, but also calls the :hook:`pytest_configure` + hook. + """ + config = self.parseconfig(*args) + config._do_configure() + return config + + def getitem( + self, source: str | os.PathLike[str], funcname: str = "test_func" + ) -> Item: + """Return the test item for a test function. + + Writes the source to a python file and runs pytest's collection on + the resulting module, returning the test item for the requested + function name. + + :param source: + The module source. + :param funcname: + The name of the test function for which to return a test item. + :returns: + The test item. + """ + items = self.getitems(source) + for item in items: + if item.name == funcname: + return item + assert 0, f"{funcname!r} item not found in module:\n{source}\nitems: {items}" + + def getitems(self, source: str | os.PathLike[str]) -> list[Item]: + """Return all test items collected from the module. + + Writes the source to a Python file and runs pytest's collection on + the resulting module, returning all test items contained within. + """ + modcol = self.getmodulecol(source) + return self.genitems([modcol]) + + def getmodulecol( + self, + source: str | os.PathLike[str], + configargs=(), + *, + withinit: bool = False, + ): + """Return the module collection node for ``source``. + + Writes ``source`` to a file using :py:meth:`makepyfile` and then + runs the pytest collection on it, returning the collection node for the + test module. + + :param source: + The source code of the module to collect. + + :param configargs: + Any extra arguments to pass to :py:meth:`parseconfigure`. + + :param withinit: + Whether to also write an ``__init__.py`` file to the same + directory to ensure it is a package. + """ + if isinstance(source, os.PathLike): + path = self.path.joinpath(source) + assert not withinit, "not supported for paths" + else: + kw = {self._name: str(source)} + path = self.makepyfile(**kw) + if withinit: + self.makepyfile(__init__="#") + self.config = config = self.parseconfigure(path, *configargs) + return self.getnode(config, path) + + def collect_by_name(self, modcol: Collector, name: str) -> Item | Collector | None: + """Return the collection node for name from the module collection. + + Searches a module collection node for a collection node matching the + given name. + + :param modcol: A module collection node; see :py:meth:`getmodulecol`. + :param name: The name of the node to return. + """ + if modcol not in self._mod_collections: + self._mod_collections[modcol] = list(modcol.collect()) + for colitem in self._mod_collections[modcol]: + if colitem.name == name: + return colitem + return None + + def popen( + self, + cmdargs: Sequence[str | os.PathLike[str]], + stdout: int | TextIO = subprocess.PIPE, + stderr: int | TextIO = subprocess.PIPE, + stdin: NotSetType | bytes | IO[Any] | int = CLOSE_STDIN, + **kw, + ): + """Invoke :py:class:`subprocess.Popen`. + + Calls :py:class:`subprocess.Popen` making sure the current working + directory is in ``PYTHONPATH``. + + You probably want to use :py:meth:`run` instead. + """ + env = os.environ.copy() + env["PYTHONPATH"] = os.pathsep.join( + filter(None, [os.getcwd(), env.get("PYTHONPATH", "")]) + ) + kw["env"] = env + + if stdin is self.CLOSE_STDIN: + kw["stdin"] = subprocess.PIPE + elif isinstance(stdin, bytes): + kw["stdin"] = subprocess.PIPE + else: + kw["stdin"] = stdin + + popen = subprocess.Popen(cmdargs, stdout=stdout, stderr=stderr, **kw) + if stdin is self.CLOSE_STDIN: + assert popen.stdin is not None + popen.stdin.close() + elif isinstance(stdin, bytes): + assert popen.stdin is not None + popen.stdin.write(stdin) + + return popen + + def run( + self, + *cmdargs: str | os.PathLike[str], + timeout: float | None = None, + stdin: NotSetType | bytes | IO[Any] | int = CLOSE_STDIN, + ) -> RunResult: + """Run a command with arguments. + + Run a process using :py:class:`subprocess.Popen` saving the stdout and + stderr. + + :param cmdargs: + The sequence of arguments to pass to :py:class:`subprocess.Popen`, + with path-like objects being converted to :py:class:`str` + automatically. + :param timeout: + The period in seconds after which to timeout and raise + :py:class:`Pytester.TimeoutExpired`. + :param stdin: + Optional standard input. + + - If it is ``CLOSE_STDIN`` (Default), then this method calls + :py:class:`subprocess.Popen` with ``stdin=subprocess.PIPE``, and + the standard input is closed immediately after the new command is + started. + + - If it is of type :py:class:`bytes`, these bytes are sent to the + standard input of the command. + + - Otherwise, it is passed through to :py:class:`subprocess.Popen`. + For further information in this case, consult the document of the + ``stdin`` parameter in :py:class:`subprocess.Popen`. + :type stdin: _pytest.compat.NotSetType | bytes | IO[Any] | int + :returns: + The result. + + """ + __tracebackhide__ = True + + cmdargs = tuple(os.fspath(arg) for arg in cmdargs) + p1 = self.path.joinpath("stdout") + p2 = self.path.joinpath("stderr") + print("running:", *cmdargs) + print(" in:", Path.cwd()) + + with p1.open("w", encoding="utf8") as f1, p2.open("w", encoding="utf8") as f2: + instant = timing.Instant() + popen = self.popen( + cmdargs, + stdin=stdin, + stdout=f1, + stderr=f2, + ) + if popen.stdin is not None: + popen.stdin.close() + + def handle_timeout() -> None: + __tracebackhide__ = True + + timeout_message = f"{timeout} second timeout expired running: {cmdargs}" + + popen.kill() + popen.wait() + raise self.TimeoutExpired(timeout_message) + + if timeout is None: + ret = popen.wait() + else: + try: + ret = popen.wait(timeout) + except subprocess.TimeoutExpired: + handle_timeout() + f1.flush() + f2.flush() + + with p1.open(encoding="utf8") as f1, p2.open(encoding="utf8") as f2: + out = f1.read().splitlines() + err = f2.read().splitlines() + + self._dump_lines(out, sys.stdout) + self._dump_lines(err, sys.stderr) + + with contextlib.suppress(ValueError): + ret = ExitCode(ret) + return RunResult(ret, out, err, instant.elapsed().seconds) + + def _dump_lines(self, lines, fp): + try: + for line in lines: + print(line, file=fp) + except UnicodeEncodeError: + print(f"couldn't print to {fp} because of encoding") + + def _getpytestargs(self) -> tuple[str, ...]: + return sys.executable, "-mpytest" + + def runpython(self, script: os.PathLike[str]) -> RunResult: + """Run a python script using sys.executable as interpreter.""" + return self.run(sys.executable, script) + + def runpython_c(self, command: str) -> RunResult: + """Run ``python -c "command"``.""" + return self.run(sys.executable, "-c", command) + + def runpytest_subprocess( + self, *args: str | os.PathLike[str], timeout: float | None = None + ) -> RunResult: + """Run pytest as a subprocess with given arguments. + + Any plugins added to the :py:attr:`plugins` list will be added using the + ``-p`` command line option. Additionally ``--basetemp`` is used to put + any temporary files and directories in a numbered directory prefixed + with "runpytest-" to not conflict with the normal numbered pytest + location for temporary files and directories. + + :param args: + The sequence of arguments to pass to the pytest subprocess. + :param timeout: + The period in seconds after which to timeout and raise + :py:class:`Pytester.TimeoutExpired`. + :returns: + The result. + """ + __tracebackhide__ = True + p = make_numbered_dir(root=self.path, prefix="runpytest-", mode=0o700) + args = (f"--basetemp={p}", *args) + for plugin in self.plugins: + if not isinstance(plugin, str): + raise ValueError( + f"Specifying plugins as objects is not supported in pytester subprocess mode; " + f"specify by name instead: {plugin}" + ) + args = ("-p", plugin, *args) + args = self._getpytestargs() + args + return self.run(*args, timeout=timeout) + + def spawn_pytest(self, string: str, expect_timeout: float = 10.0) -> pexpect.spawn: + """Run pytest using pexpect. + + This makes sure to use the right pytest and sets up the temporary + directory locations. + + The pexpect child is returned. + """ + basetemp = self.path / "temp-pexpect" + basetemp.mkdir(mode=0o700) + invoke = " ".join(map(str, self._getpytestargs())) + cmd = f"{invoke} --basetemp={basetemp} {string}" + return self.spawn(cmd, expect_timeout=expect_timeout) + + def spawn(self, cmd: str, expect_timeout: float = 10.0) -> pexpect.spawn: + """Run a command using pexpect. + + The pexpect child is returned. + """ + pexpect = importorskip("pexpect", "3.0") + if hasattr(sys, "pypy_version_info") and "64" in platform.machine(): + skip("pypy-64 bit not supported") + if not hasattr(pexpect, "spawn"): + skip("pexpect.spawn not available") + logfile = self.path.joinpath("spawn.out").open("wb") + + child = pexpect.spawn(cmd, logfile=logfile, timeout=expect_timeout) + self._request.addfinalizer(logfile.close) + return child + + +class LineComp: + def __init__(self) -> None: + self.stringio = StringIO() + """:class:`python:io.StringIO()` instance used for input.""" + + def assert_contains_lines(self, lines2: Sequence[str]) -> None: + """Assert that ``lines2`` are contained (linearly) in :attr:`stringio`'s value. + + Lines are matched using :func:`LineMatcher.fnmatch_lines `. + """ + __tracebackhide__ = True + val = self.stringio.getvalue() + self.stringio.truncate(0) + self.stringio.seek(0) + lines1 = val.split("\n") + LineMatcher(lines1).fnmatch_lines(lines2) + + +class LineMatcher: + """Flexible matching of text. + + This is a convenience class to test large texts like the output of + commands. + + The constructor takes a list of lines without their trailing newlines, i.e. + ``text.splitlines()``. + """ + + def __init__(self, lines: list[str]) -> None: + self.lines = lines + self._log_output: list[str] = [] + + def __str__(self) -> str: + """Return the entire original text. + + .. versionadded:: 6.2 + You can use :meth:`str` in older versions. + """ + return "\n".join(self.lines) + + def _getlines(self, lines2: str | Sequence[str] | Source) -> Sequence[str]: + if isinstance(lines2, str): + lines2 = Source(lines2) + if isinstance(lines2, Source): + lines2 = lines2.strip().lines + return lines2 + + def fnmatch_lines_random(self, lines2: Sequence[str]) -> None: + """Check lines exist in the output in any order (using :func:`python:fnmatch.fnmatch`).""" + __tracebackhide__ = True + self._match_lines_random(lines2, fnmatch) + + def re_match_lines_random(self, lines2: Sequence[str]) -> None: + """Check lines exist in the output in any order (using :func:`python:re.match`).""" + __tracebackhide__ = True + self._match_lines_random(lines2, lambda name, pat: bool(re.match(pat, name))) + + def _match_lines_random( + self, lines2: Sequence[str], match_func: Callable[[str, str], bool] + ) -> None: + __tracebackhide__ = True + lines2 = self._getlines(lines2) + for line in lines2: + for x in self.lines: + if line == x or match_func(x, line): + self._log("matched: ", repr(line)) + break + else: + msg = f"line {line!r} not found in output" + self._log(msg) + self._fail(msg) + + def get_lines_after(self, fnline: str) -> Sequence[str]: + """Return all lines following the given line in the text. + + The given line can contain glob wildcards. + """ + for i, line in enumerate(self.lines): + if fnline == line or fnmatch(line, fnline): + return self.lines[i + 1 :] + raise ValueError(f"line {fnline!r} not found in output") + + def _log(self, *args) -> None: + self._log_output.append(" ".join(str(x) for x in args)) + + @property + def _log_text(self) -> str: + return "\n".join(self._log_output) + + def fnmatch_lines( + self, lines2: Sequence[str], *, consecutive: bool = False + ) -> None: + """Check lines exist in the output (using :func:`python:fnmatch.fnmatch`). + + The argument is a list of lines which have to match and can use glob + wildcards. If they do not match a pytest.fail() is called. The + matches and non-matches are also shown as part of the error message. + + :param lines2: String patterns to match. + :param consecutive: Match lines consecutively? + """ + __tracebackhide__ = True + self._match_lines(lines2, fnmatch, "fnmatch", consecutive=consecutive) + + def re_match_lines( + self, lines2: Sequence[str], *, consecutive: bool = False + ) -> None: + """Check lines exist in the output (using :func:`python:re.match`). + + The argument is a list of lines which have to match using ``re.match``. + If they do not match a pytest.fail() is called. + + The matches and non-matches are also shown as part of the error message. + + :param lines2: string patterns to match. + :param consecutive: match lines consecutively? + """ + __tracebackhide__ = True + self._match_lines( + lines2, + lambda name, pat: bool(re.match(pat, name)), + "re.match", + consecutive=consecutive, + ) + + def _match_lines( + self, + lines2: Sequence[str], + match_func: Callable[[str, str], bool], + match_nickname: str, + *, + consecutive: bool = False, + ) -> None: + """Underlying implementation of ``fnmatch_lines`` and ``re_match_lines``. + + :param Sequence[str] lines2: + List of string patterns to match. The actual format depends on + ``match_func``. + :param match_func: + A callable ``match_func(line, pattern)`` where line is the + captured line from stdout/stderr and pattern is the matching + pattern. + :param str match_nickname: + The nickname for the match function that will be logged to stdout + when a match occurs. + :param consecutive: + Match lines consecutively? + """ + if not isinstance(lines2, collections.abc.Sequence): + raise TypeError(f"invalid type for lines2: {type(lines2).__name__}") + lines2 = self._getlines(lines2) + lines1 = self.lines[:] + extralines = [] + __tracebackhide__ = True + wnick = len(match_nickname) + 1 + started = False + for line in lines2: + nomatchprinted = False + while lines1: + nextline = lines1.pop(0) + if line == nextline: + self._log("exact match:", repr(line)) + started = True + break + elif match_func(nextline, line): + self._log(f"{match_nickname}:", repr(line)) + self._log( + "{:>{width}}".format("with:", width=wnick), repr(nextline) + ) + started = True + break + else: + if consecutive and started: + msg = f"no consecutive match: {line!r}" + self._log(msg) + self._log( + "{:>{width}}".format("with:", width=wnick), repr(nextline) + ) + self._fail(msg) + if not nomatchprinted: + self._log( + "{:>{width}}".format("nomatch:", width=wnick), repr(line) + ) + nomatchprinted = True + self._log("{:>{width}}".format("and:", width=wnick), repr(nextline)) + extralines.append(nextline) + else: + msg = f"remains unmatched: {line!r}" + self._log(msg) + self._fail(msg) + self._log_output = [] + + def no_fnmatch_line(self, pat: str) -> None: + """Ensure captured lines do not match the given pattern, using ``fnmatch.fnmatch``. + + :param str pat: The pattern to match lines. + """ + __tracebackhide__ = True + self._no_match_line(pat, fnmatch, "fnmatch") + + def no_re_match_line(self, pat: str) -> None: + """Ensure captured lines do not match the given pattern, using ``re.match``. + + :param str pat: The regular expression to match lines. + """ + __tracebackhide__ = True + self._no_match_line( + pat, lambda name, pat: bool(re.match(pat, name)), "re.match" + ) + + def _no_match_line( + self, pat: str, match_func: Callable[[str, str], bool], match_nickname: str + ) -> None: + """Ensure captured lines does not have a the given pattern, using ``fnmatch.fnmatch``. + + :param str pat: The pattern to match lines. + """ + __tracebackhide__ = True + nomatch_printed = False + wnick = len(match_nickname) + 1 + for line in self.lines: + if match_func(line, pat): + msg = f"{match_nickname}: {pat!r}" + self._log(msg) + self._log("{:>{width}}".format("with:", width=wnick), repr(line)) + self._fail(msg) + else: + if not nomatch_printed: + self._log("{:>{width}}".format("nomatch:", width=wnick), repr(pat)) + nomatch_printed = True + self._log("{:>{width}}".format("and:", width=wnick), repr(line)) + self._log_output = [] + + def _fail(self, msg: str) -> None: + __tracebackhide__ = True + log_text = self._log_text + self._log_output = [] + fail(log_text) + + def str(self) -> str: + """Return the entire original text.""" + return str(self) diff --git a/.venv/lib/python3.12/site-packages/_pytest/pytester_assertions.py b/.venv/lib/python3.12/site-packages/_pytest/pytester_assertions.py new file mode 100644 index 0000000..915cc8a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/pytester_assertions.py @@ -0,0 +1,74 @@ +"""Helper plugin for pytester; should not be loaded on its own.""" + +# This plugin contains assertions used by pytester. pytester cannot +# contain them itself, since it is imported by the `pytest` module, +# hence cannot be subject to assertion rewriting, which requires a +# module to not be already imported. +from __future__ import annotations + +from collections.abc import Sequence + +from _pytest.reports import CollectReport +from _pytest.reports import TestReport + + +def assertoutcome( + outcomes: tuple[ + Sequence[TestReport], + Sequence[CollectReport | TestReport], + Sequence[CollectReport | TestReport], + ], + passed: int = 0, + skipped: int = 0, + failed: int = 0, +) -> None: + __tracebackhide__ = True + + realpassed, realskipped, realfailed = outcomes + obtained = { + "passed": len(realpassed), + "skipped": len(realskipped), + "failed": len(realfailed), + } + expected = {"passed": passed, "skipped": skipped, "failed": failed} + assert obtained == expected, outcomes + + +def assert_outcomes( + outcomes: dict[str, int], + passed: int = 0, + skipped: int = 0, + failed: int = 0, + errors: int = 0, + xpassed: int = 0, + xfailed: int = 0, + warnings: int | None = None, + deselected: int | None = None, +) -> None: + """Assert that the specified outcomes appear with the respective + numbers (0 means it didn't occur) in the text output from a test run.""" + __tracebackhide__ = True + + obtained = { + "passed": outcomes.get("passed", 0), + "skipped": outcomes.get("skipped", 0), + "failed": outcomes.get("failed", 0), + "errors": outcomes.get("errors", 0), + "xpassed": outcomes.get("xpassed", 0), + "xfailed": outcomes.get("xfailed", 0), + } + expected = { + "passed": passed, + "skipped": skipped, + "failed": failed, + "errors": errors, + "xpassed": xpassed, + "xfailed": xfailed, + } + if warnings is not None: + obtained["warnings"] = outcomes.get("warnings", 0) + expected["warnings"] = warnings + if deselected is not None: + obtained["deselected"] = outcomes.get("deselected", 0) + expected["deselected"] = deselected + assert obtained == expected diff --git a/.venv/lib/python3.12/site-packages/_pytest/python.py b/.venv/lib/python3.12/site-packages/_pytest/python.py new file mode 100644 index 0000000..e637518 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/python.py @@ -0,0 +1,1772 @@ +# mypy: allow-untyped-defs +"""Python test discovery, setup and run of test functions.""" + +from __future__ import annotations + +import abc +from collections import Counter +from collections import defaultdict +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Mapping +from collections.abc import Sequence +import dataclasses +import enum +import fnmatch +from functools import partial +import inspect +import itertools +import os +from pathlib import Path +import re +import textwrap +import types +from typing import Any +from typing import cast +from typing import final +from typing import Literal +from typing import NoReturn +from typing import TYPE_CHECKING +import warnings + +import _pytest +from _pytest import fixtures +from _pytest import nodes +from _pytest._code import filter_traceback +from _pytest._code import getfslineno +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import TerminalRepr +from _pytest._code.code import Traceback +from _pytest._io.saferepr import saferepr +from _pytest.compat import ascii_escaped +from _pytest.compat import get_default_arg_names +from _pytest.compat import get_real_func +from _pytest.compat import getimfunc +from _pytest.compat import is_async_function +from _pytest.compat import LEGACY_PATH +from _pytest.compat import NOTSET +from _pytest.compat import safe_getattr +from _pytest.compat import safe_isclass +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import FixtureDef +from _pytest.fixtures import FixtureRequest +from _pytest.fixtures import FuncFixtureInfo +from _pytest.fixtures import get_scope_node +from _pytest.main import Session +from _pytest.mark import ParameterSet +from _pytest.mark.structures import _HiddenParam +from _pytest.mark.structures import get_unpacked_marks +from _pytest.mark.structures import HIDDEN_PARAM +from _pytest.mark.structures import Mark +from _pytest.mark.structures import MarkDecorator +from _pytest.mark.structures import normalize_mark_list +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.pathlib import fnmatch_ex +from _pytest.pathlib import import_path +from _pytest.pathlib import ImportPathMismatchError +from _pytest.pathlib import scandir +from _pytest.scope import _ScopeName +from _pytest.scope import Scope +from _pytest.stash import StashKey +from _pytest.warning_types import PytestCollectionWarning +from _pytest.warning_types import PytestReturnNotNoneWarning + + +if TYPE_CHECKING: + from typing_extensions import Self + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "python_files", + type="args", + # NOTE: default is also used in AssertionRewritingHook. + default=["test_*.py", "*_test.py"], + help="Glob-style file patterns for Python test module discovery", + ) + parser.addini( + "python_classes", + type="args", + default=["Test"], + help="Prefixes or glob names for Python test class discovery", + ) + parser.addini( + "python_functions", + type="args", + default=["test"], + help="Prefixes or glob names for Python test function and method discovery", + ) + parser.addini( + "disable_test_id_escaping_and_forfeit_all_rights_to_community_support", + type="bool", + default=False, + help="Disable string escape non-ASCII characters, might cause unwanted " + "side effects(use at your own risk)", + ) + parser.addini( + "strict_parametrization_ids", + type="bool", + # None => fallback to `strict`. + default=None, + help="Emit an error if non-unique parameter set IDs are detected", + ) + + +def pytest_generate_tests(metafunc: Metafunc) -> None: + for marker in metafunc.definition.iter_markers(name="parametrize"): + metafunc.parametrize(*marker.args, **marker.kwargs, _param_mark=marker) + + +def pytest_configure(config: Config) -> None: + config.addinivalue_line( + "markers", + "parametrize(argnames, argvalues): call a test function multiple " + "times passing in different arguments in turn. argvalues generally " + "needs to be a list of values if argnames specifies only one name " + "or a list of tuples of values if argnames specifies multiple names. " + "Example: @parametrize('arg1', [1,2]) would lead to two calls of the " + "decorated test function, one with arg1=1 and another with arg1=2." + "see https://docs.pytest.org/en/stable/how-to/parametrize.html for more info " + "and examples.", + ) + config.addinivalue_line( + "markers", + "usefixtures(fixturename1, fixturename2, ...): mark tests as needing " + "all of the specified fixtures. see " + "https://docs.pytest.org/en/stable/explanation/fixtures.html#usefixtures ", + ) + + +def async_fail(nodeid: str) -> None: + msg = ( + "async def functions are not natively supported.\n" + "You need to install a suitable plugin for your async framework, for example:\n" + " - anyio\n" + " - pytest-asyncio\n" + " - pytest-tornasync\n" + " - pytest-trio\n" + " - pytest-twisted" + ) + fail(msg, pytrace=False) + + +@hookimpl(trylast=True) +def pytest_pyfunc_call(pyfuncitem: Function) -> object | None: + testfunction = pyfuncitem.obj + if is_async_function(testfunction): + async_fail(pyfuncitem.nodeid) + funcargs = pyfuncitem.funcargs + testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} + result = testfunction(**testargs) + if hasattr(result, "__await__") or hasattr(result, "__aiter__"): + async_fail(pyfuncitem.nodeid) + elif result is not None: + warnings.warn( + PytestReturnNotNoneWarning( + f"Test functions should return None, but {pyfuncitem.nodeid} returned {type(result)!r}.\n" + "Did you mean to use `assert` instead of `return`?\n" + "See https://docs.pytest.org/en/stable/how-to/assert.html#return-not-none for more information." + ) + ) + return True + + +def pytest_collect_directory( + path: Path, parent: nodes.Collector +) -> nodes.Collector | None: + pkginit = path / "__init__.py" + try: + has_pkginit = pkginit.is_file() + except PermissionError: + # See https://github.com/pytest-dev/pytest/issues/12120#issuecomment-2106349096. + return None + if has_pkginit: + return Package.from_parent(parent, path=path) + return None + + +def pytest_collect_file(file_path: Path, parent: nodes.Collector) -> Module | None: + if file_path.suffix == ".py": + if not parent.session.isinitpath(file_path): + if not path_matches_patterns( + file_path, parent.config.getini("python_files") + ): + return None + ihook = parent.session.gethookproxy(file_path) + module: Module = ihook.pytest_pycollect_makemodule( + module_path=file_path, parent=parent + ) + return module + return None + + +def path_matches_patterns(path: Path, patterns: Iterable[str]) -> bool: + """Return whether path matches any of the patterns in the list of globs given.""" + return any(fnmatch_ex(pattern, path) for pattern in patterns) + + +def pytest_pycollect_makemodule(module_path: Path, parent) -> Module: + return Module.from_parent(parent, path=module_path) + + +@hookimpl(trylast=True) +def pytest_pycollect_makeitem( + collector: Module | Class, name: str, obj: object +) -> None | nodes.Item | nodes.Collector | list[nodes.Item | nodes.Collector]: + assert isinstance(collector, Class | Module), type(collector) + # Nothing was collected elsewhere, let's do it here. + if safe_isclass(obj): + if collector.istestclass(obj, name): + return Class.from_parent(collector, name=name, obj=obj) + elif collector.istestfunction(obj, name): + # mock seems to store unbound methods (issue473), normalize it. + obj = getattr(obj, "__func__", obj) + # We need to try and unwrap the function if it's a functools.partial + # or a functools.wrapped. + # We mustn't if it's been wrapped with mock.patch (python 2 only). + if not (inspect.isfunction(obj) or inspect.isfunction(get_real_func(obj))): + filename, lineno = getfslineno(obj) + warnings.warn_explicit( + message=PytestCollectionWarning( + f"cannot collect {name!r} because it is not a function." + ), + category=None, + filename=str(filename), + lineno=lineno + 1, + ) + elif getattr(obj, "__test__", True): + if inspect.isgeneratorfunction(obj): + fail( + f"'yield' keyword is allowed in fixtures, but not in tests ({name})", + pytrace=False, + ) + return list(collector._genfunctions(name, obj)) + return None + return None + + +class PyobjMixin(nodes.Node): + """this mix-in inherits from Node to carry over the typing information + + as its intended to always mix in before a node + its position in the mro is unaffected""" + + _ALLOW_MARKERS = True + + @property + def module(self): + """Python module object this node was collected from (can be None).""" + node = self.getparent(Module) + return node.obj if node is not None else None + + @property + def cls(self): + """Python class object this node was collected from (can be None).""" + node = self.getparent(Class) + return node.obj if node is not None else None + + @property + def instance(self): + """Python instance object the function is bound to. + + Returns None if not a test method, e.g. for a standalone test function, + a class or a module. + """ + # Overridden by Function. + return None + + @property + def obj(self): + """Underlying Python object.""" + obj = getattr(self, "_obj", None) + if obj is None: + self._obj = obj = self._getobj() + # XXX evil hack + # used to avoid Function marker duplication + if self._ALLOW_MARKERS: + self.own_markers.extend(get_unpacked_marks(self.obj)) + # This assumes that `obj` is called before there is a chance + # to add custom keys to `self.keywords`, so no fear of overriding. + self.keywords.update((mark.name, mark) for mark in self.own_markers) + return obj + + @obj.setter + def obj(self, value): + self._obj = value + + def _getobj(self): + """Get the underlying Python object. May be overwritten by subclasses.""" + # TODO: Improve the type of `parent` such that assert/ignore aren't needed. + assert self.parent is not None + obj = self.parent.obj # type: ignore[attr-defined] + return getattr(obj, self.name) + + def getmodpath(self, stopatmodule: bool = True, includemodule: bool = False) -> str: + """Return Python path relative to the containing module.""" + parts = [] + for node in self.iter_parents(): + name = node.name + if isinstance(node, Module): + name = os.path.splitext(name)[0] + if stopatmodule: + if includemodule: + parts.append(name) + break + parts.append(name) + parts.reverse() + return ".".join(parts) + + def reportinfo(self) -> tuple[os.PathLike[str] | str, int | None, str]: + # XXX caching? + path, lineno = getfslineno(self.obj) + modpath = self.getmodpath() + return path, lineno, modpath + + +# As an optimization, these builtin attribute names are pre-ignored when +# iterating over an object during collection -- the pytest_pycollect_makeitem +# hook is not called for them. +# fmt: off +class _EmptyClass: pass # noqa: E701 +IGNORED_ATTRIBUTES = frozenset.union( + frozenset(), + # Module. + dir(types.ModuleType("empty_module")), + # Some extra module attributes the above doesn't catch. + {"__builtins__", "__file__", "__cached__"}, + # Class. + dir(_EmptyClass), + # Instance. + dir(_EmptyClass()), +) +del _EmptyClass +# fmt: on + + +class PyCollector(PyobjMixin, nodes.Collector, abc.ABC): + def funcnamefilter(self, name: str) -> bool: + return self._matches_prefix_or_glob_option("python_functions", name) + + def isnosetest(self, obj: object) -> bool: + """Look for the __test__ attribute, which is applied by the + @nose.tools.istest decorator. + """ + # We explicitly check for "is True" here to not mistakenly treat + # classes with a custom __getattr__ returning something truthy (like a + # function) as test classes. + return safe_getattr(obj, "__test__", False) is True + + def classnamefilter(self, name: str) -> bool: + return self._matches_prefix_or_glob_option("python_classes", name) + + def istestfunction(self, obj: object, name: str) -> bool: + if self.funcnamefilter(name) or self.isnosetest(obj): + if isinstance(obj, staticmethod | classmethod): + # staticmethods and classmethods need to be unwrapped. + obj = safe_getattr(obj, "__func__", False) + return callable(obj) and fixtures.getfixturemarker(obj) is None + else: + return False + + def istestclass(self, obj: object, name: str) -> bool: + if not (self.classnamefilter(name) or self.isnosetest(obj)): + return False + if inspect.isabstract(obj): + return False + return True + + def _matches_prefix_or_glob_option(self, option_name: str, name: str) -> bool: + """Check if the given name matches the prefix or glob-pattern defined + in configuration.""" + for option in self.config.getini(option_name): + if name.startswith(option): + return True + # Check that name looks like a glob-string before calling fnmatch + # because this is called for every name in each collected module, + # and fnmatch is somewhat expensive to call. + elif ("*" in option or "?" in option or "[" in option) and fnmatch.fnmatch( + name, option + ): + return True + return False + + def collect(self) -> Iterable[nodes.Item | nodes.Collector]: + if not getattr(self.obj, "__test__", True): + return [] + + # Avoid random getattrs and peek in the __dict__ instead. + dicts = [getattr(self.obj, "__dict__", {})] + if isinstance(self.obj, type): + for basecls in self.obj.__mro__: + dicts.append(basecls.__dict__) + + # In each class, nodes should be definition ordered. + # __dict__ is definition ordered. + seen: set[str] = set() + dict_values: list[list[nodes.Item | nodes.Collector]] = [] + collect_imported_tests = self.session.config.getini("collect_imported_tests") + ihook = self.ihook + for dic in dicts: + values: list[nodes.Item | nodes.Collector] = [] + # Note: seems like the dict can change during iteration - + # be careful not to remove the list() without consideration. + for name, obj in list(dic.items()): + if name in IGNORED_ATTRIBUTES: + continue + if name in seen: + continue + seen.add(name) + + if not collect_imported_tests and isinstance(self, Module): + # Do not collect functions and classes from other modules. + if inspect.isfunction(obj) or inspect.isclass(obj): + if obj.__module__ != self._getobj().__name__: + continue + + res = ihook.pytest_pycollect_makeitem( + collector=self, name=name, obj=obj + ) + if res is None: + continue + elif isinstance(res, list): + values.extend(res) + else: + values.append(res) + dict_values.append(values) + + # Between classes in the class hierarchy, reverse-MRO order -- nodes + # inherited from base classes should come before subclasses. + result = [] + for values in reversed(dict_values): + result.extend(values) + return result + + def _genfunctions(self, name: str, funcobj) -> Iterator[Function]: + modulecol = self.getparent(Module) + assert modulecol is not None + module = modulecol.obj + clscol = self.getparent(Class) + cls = (clscol and clscol.obj) or None + + definition = FunctionDefinition.from_parent(self, name=name, callobj=funcobj) + fixtureinfo = definition._fixtureinfo + + # pytest_generate_tests impls call metafunc.parametrize() which fills + # metafunc._calls, the outcome of the hook. + metafunc = Metafunc( + definition=definition, + fixtureinfo=fixtureinfo, + config=self.config, + cls=cls, + module=module, + _ispytest=True, + ) + methods = [] + if hasattr(module, "pytest_generate_tests"): + methods.append(module.pytest_generate_tests) + if cls is not None and hasattr(cls, "pytest_generate_tests"): + methods.append(cls().pytest_generate_tests) + self.ihook.pytest_generate_tests.call_extra(methods, dict(metafunc=metafunc)) + + if not metafunc._calls: + yield Function.from_parent(self, name=name, fixtureinfo=fixtureinfo) + else: + metafunc._recompute_direct_params_indices() + # Direct parametrizations taking place in module/class-specific + # `metafunc.parametrize` calls may have shadowed some fixtures, so make sure + # we update what the function really needs a.k.a its fixture closure. Note that + # direct parametrizations using `@pytest.mark.parametrize` have already been considered + # into making the closure using `ignore_args` arg to `getfixtureclosure`. + fixtureinfo.prune_dependency_tree() + + for callspec in metafunc._calls: + subname = f"{name}[{callspec.id}]" if callspec._idlist else name + yield Function.from_parent( + self, + name=subname, + callspec=callspec, + fixtureinfo=fixtureinfo, + keywords={callspec.id: True}, + originalname=name, + ) + + +def importtestmodule( + path: Path, + config: Config, +): + # We assume we are only called once per module. + importmode = config.getoption("--import-mode") + try: + mod = import_path( + path, + mode=importmode, + root=config.rootpath, + consider_namespace_packages=config.getini("consider_namespace_packages"), + ) + except SyntaxError as e: + raise nodes.Collector.CollectError( + ExceptionInfo.from_current().getrepr(style="short") + ) from e + except ImportPathMismatchError as e: + raise nodes.Collector.CollectError( + "import file mismatch:\n" + "imported module {!r} has this __file__ attribute:\n" + " {}\n" + "which is not the same as the test file we want to collect:\n" + " {}\n" + "HINT: remove __pycache__ / .pyc files and/or use a " + "unique basename for your test file modules".format(*e.args) + ) from e + except ImportError as e: + exc_info = ExceptionInfo.from_current() + if config.get_verbosity() < 2: + exc_info.traceback = exc_info.traceback.filter(filter_traceback) + exc_repr = ( + exc_info.getrepr(style="short") + if exc_info.traceback + else exc_info.exconly() + ) + formatted_tb = str(exc_repr) + raise nodes.Collector.CollectError( + f"ImportError while importing test module '{path}'.\n" + "Hint: make sure your test modules/packages have valid Python names.\n" + "Traceback:\n" + f"{formatted_tb}" + ) from e + except skip.Exception as e: + if e.allow_module_level: + raise + raise nodes.Collector.CollectError( + "Using pytest.skip outside of a test will skip the entire module. " + "If that's your intention, pass `allow_module_level=True`. " + "If you want to skip a specific test or an entire class, " + "use the @pytest.mark.skip or @pytest.mark.skipif decorators." + ) from e + config.pluginmanager.consider_module(mod) + return mod + + +class Module(nodes.File, PyCollector): + """Collector for test classes and functions in a Python module.""" + + def _getobj(self): + return importtestmodule(self.path, self.config) + + def collect(self) -> Iterable[nodes.Item | nodes.Collector]: + self._register_setup_module_fixture() + self._register_setup_function_fixture() + self.session._fixturemanager.parsefactories(self) + return super().collect() + + def _register_setup_module_fixture(self) -> None: + """Register an autouse, module-scoped fixture for the collected module object + that invokes setUpModule/tearDownModule if either or both are available. + + Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_module = _get_first_non_fixture_func( + self.obj, ("setUpModule", "setup_module") + ) + teardown_module = _get_first_non_fixture_func( + self.obj, ("tearDownModule", "teardown_module") + ) + + if setup_module is None and teardown_module is None: + return + + def xunit_setup_module_fixture(request) -> Generator[None]: + module = request.module + if setup_module is not None: + _call_with_optional_argument(setup_module, module) + yield + if teardown_module is not None: + _call_with_optional_argument(teardown_module, module) + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_xunit_setup_module_fixture_{self.obj.__name__}", + func=xunit_setup_module_fixture, + nodeid=self.nodeid, + scope="module", + autouse=True, + ) + + def _register_setup_function_fixture(self) -> None: + """Register an autouse, function-scoped fixture for the collected module object + that invokes setup_function/teardown_function if either or both are available. + + Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_function = _get_first_non_fixture_func(self.obj, ("setup_function",)) + teardown_function = _get_first_non_fixture_func( + self.obj, ("teardown_function",) + ) + if setup_function is None and teardown_function is None: + return + + def xunit_setup_function_fixture(request) -> Generator[None]: + if request.instance is not None: + # in this case we are bound to an instance, so we need to let + # setup_method handle this + yield + return + function = request.function + if setup_function is not None: + _call_with_optional_argument(setup_function, function) + yield + if teardown_function is not None: + _call_with_optional_argument(teardown_function, function) + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_xunit_setup_function_fixture_{self.obj.__name__}", + func=xunit_setup_function_fixture, + nodeid=self.nodeid, + scope="function", + autouse=True, + ) + + +class Package(nodes.Directory): + """Collector for files and directories in a Python packages -- directories + with an `__init__.py` file. + + .. note:: + + Directories without an `__init__.py` file are instead collected by + :class:`~pytest.Dir` by default. Both are :class:`~pytest.Directory` + collectors. + + .. versionchanged:: 8.0 + + Now inherits from :class:`~pytest.Directory`. + """ + + def __init__( + self, + fspath: LEGACY_PATH | None, + parent: nodes.Collector, + # NOTE: following args are unused: + config=None, + session=None, + nodeid=None, + path: Path | None = None, + ) -> None: + # NOTE: Could be just the following, but kept as-is for compat. + # super().__init__(self, fspath, parent=parent) + session = parent.session + super().__init__( + fspath=fspath, + path=path, + parent=parent, + config=config, + session=session, + nodeid=nodeid, + ) + + def setup(self) -> None: + init_mod = importtestmodule(self.path / "__init__.py", self.config) + + # Not using fixtures to call setup_module here because autouse fixtures + # from packages are not called automatically (#4085). + setup_module = _get_first_non_fixture_func( + init_mod, ("setUpModule", "setup_module") + ) + if setup_module is not None: + _call_with_optional_argument(setup_module, init_mod) + + teardown_module = _get_first_non_fixture_func( + init_mod, ("tearDownModule", "teardown_module") + ) + if teardown_module is not None: + func = partial(_call_with_optional_argument, teardown_module, init_mod) + self.addfinalizer(func) + + def collect(self) -> Iterable[nodes.Item | nodes.Collector]: + # Always collect __init__.py first. + def sort_key(entry: os.DirEntry[str]) -> object: + return (entry.name != "__init__.py", entry.name) + + config = self.config + col: nodes.Collector | None + cols: Sequence[nodes.Collector] + ihook = self.ihook + for direntry in scandir(self.path, sort_key): + if direntry.is_dir(): + path = Path(direntry.path) + if not self.session.isinitpath(path, with_parents=True): + if ihook.pytest_ignore_collect(collection_path=path, config=config): + continue + col = ihook.pytest_collect_directory(path=path, parent=self) + if col is not None: + yield col + + elif direntry.is_file(): + path = Path(direntry.path) + if not self.session.isinitpath(path): + if ihook.pytest_ignore_collect(collection_path=path, config=config): + continue + cols = ihook.pytest_collect_file(file_path=path, parent=self) + yield from cols + + +def _call_with_optional_argument(func, arg) -> None: + """Call the given function with the given argument if func accepts one argument, otherwise + calls func without arguments.""" + arg_count = func.__code__.co_argcount + if inspect.ismethod(func): + arg_count -= 1 + if arg_count: + func(arg) + else: + func() + + +def _get_first_non_fixture_func(obj: object, names: Iterable[str]) -> object | None: + """Return the attribute from the given object to be used as a setup/teardown + xunit-style function, but only if not marked as a fixture to avoid calling it twice. + """ + for name in names: + meth: object | None = getattr(obj, name, None) + if meth is not None and fixtures.getfixturemarker(meth) is None: + return meth + return None + + +class Class(PyCollector): + """Collector for test methods (and nested classes) in a Python class.""" + + @classmethod + def from_parent(cls, parent, *, name, obj=None, **kw) -> Self: # type: ignore[override] + """The public constructor.""" + return super().from_parent(name=name, parent=parent, **kw) + + def newinstance(self): + return self.obj() + + def collect(self) -> Iterable[nodes.Item | nodes.Collector]: + if not safe_getattr(self.obj, "__test__", True): + return [] + if hasinit(self.obj): + assert self.parent is not None + self.warn( + PytestCollectionWarning( + f"cannot collect test class {self.obj.__name__!r} because it has a " + f"__init__ constructor (from: {self.parent.nodeid})" + ) + ) + return [] + elif hasnew(self.obj): + assert self.parent is not None + self.warn( + PytestCollectionWarning( + f"cannot collect test class {self.obj.__name__!r} because it has a " + f"__new__ constructor (from: {self.parent.nodeid})" + ) + ) + return [] + + self._register_setup_class_fixture() + self._register_setup_method_fixture() + + self.session._fixturemanager.parsefactories(self.newinstance(), self.nodeid) + + return super().collect() + + def _register_setup_class_fixture(self) -> None: + """Register an autouse, class scoped fixture into the collected class object + that invokes setup_class/teardown_class if either or both are available. + + Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_class = _get_first_non_fixture_func(self.obj, ("setup_class",)) + teardown_class = _get_first_non_fixture_func(self.obj, ("teardown_class",)) + if setup_class is None and teardown_class is None: + return + + def xunit_setup_class_fixture(request) -> Generator[None]: + cls = request.cls + if setup_class is not None: + func = getimfunc(setup_class) + _call_with_optional_argument(func, cls) + yield + if teardown_class is not None: + func = getimfunc(teardown_class) + _call_with_optional_argument(func, cls) + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_xunit_setup_class_fixture_{self.obj.__qualname__}", + func=xunit_setup_class_fixture, + nodeid=self.nodeid, + scope="class", + autouse=True, + ) + + def _register_setup_method_fixture(self) -> None: + """Register an autouse, function scoped fixture into the collected class object + that invokes setup_method/teardown_method if either or both are available. + + Using a fixture to invoke these methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_name = "setup_method" + setup_method = _get_first_non_fixture_func(self.obj, (setup_name,)) + teardown_name = "teardown_method" + teardown_method = _get_first_non_fixture_func(self.obj, (teardown_name,)) + if setup_method is None and teardown_method is None: + return + + def xunit_setup_method_fixture(request) -> Generator[None]: + instance = request.instance + method = request.function + if setup_method is not None: + func = getattr(instance, setup_name) + _call_with_optional_argument(func, method) + yield + if teardown_method is not None: + func = getattr(instance, teardown_name) + _call_with_optional_argument(func, method) + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_xunit_setup_method_fixture_{self.obj.__qualname__}", + func=xunit_setup_method_fixture, + nodeid=self.nodeid, + scope="function", + autouse=True, + ) + + +def hasinit(obj: object) -> bool: + init: object = getattr(obj, "__init__", None) + if init: + return init != object.__init__ + return False + + +def hasnew(obj: object) -> bool: + new: object = getattr(obj, "__new__", None) + if new: + return new != object.__new__ + return False + + +@final +@dataclasses.dataclass(frozen=True) +class IdMaker: + """Make IDs for a parametrization.""" + + __slots__ = ( + "argnames", + "config", + "func_name", + "idfn", + "ids", + "nodeid", + "parametersets", + ) + + # The argnames of the parametrization. + argnames: Sequence[str] + # The ParameterSets of the parametrization. + parametersets: Sequence[ParameterSet] + # Optionally, a user-provided callable to make IDs for parameters in a + # ParameterSet. + idfn: Callable[[Any], object | None] | None + # Optionally, explicit IDs for ParameterSets by index. + ids: Sequence[object | None] | None + # Optionally, the pytest config. + # Used for controlling ASCII escaping, determining parametrization ID + # strictness, and for calling the :hook:`pytest_make_parametrize_id` hook. + config: Config | None + # Optionally, the ID of the node being parametrized. + # Used only for clearer error messages. + nodeid: str | None + # Optionally, the ID of the function being parametrized. + # Used only for clearer error messages. + func_name: str | None + + def make_unique_parameterset_ids(self) -> list[str | _HiddenParam]: + """Make a unique identifier for each ParameterSet, that may be used to + identify the parametrization in a node ID. + + If strict_parametrization_ids is enabled, and duplicates are detected, + raises CollectError. Otherwise makes the IDs unique as follows: + + Format is -...-[counter], where prm_x_token is + - user-provided id, if given + - else an id derived from the value, applicable for certain types + - else + The counter suffix is appended only in case a string wouldn't be unique + otherwise. + """ + resolved_ids = list(self._resolve_ids()) + # All IDs must be unique! + if len(resolved_ids) != len(set(resolved_ids)): + # Record the number of occurrences of each ID. + id_counts = Counter(resolved_ids) + + if self._strict_parametrization_ids_enabled(): + parameters = ", ".join(self.argnames) + parametersets = ", ".join( + [saferepr(list(param.values)) for param in self.parametersets] + ) + ids = ", ".join( + id if id is not HIDDEN_PARAM else "" for id in resolved_ids + ) + duplicates = ", ".join( + id if id is not HIDDEN_PARAM else "" + for id, count in id_counts.items() + if count > 1 + ) + msg = textwrap.dedent(f""" + Duplicate parametrization IDs detected, but strict_parametrization_ids is set. + + Test name: {self.nodeid} + Parameters: {parameters} + Parameter sets: {parametersets} + IDs: {ids} + Duplicates: {duplicates} + + You can fix this problem using `@pytest.mark.parametrize(..., ids=...)` or `pytest.param(..., id=...)`. + """).strip() # noqa: E501 + raise nodes.Collector.CollectError(msg) + + # Map the ID to its next suffix. + id_suffixes: dict[str, int] = defaultdict(int) + # Suffix non-unique IDs to make them unique. + for index, id in enumerate(resolved_ids): + if id_counts[id] > 1: + if id is HIDDEN_PARAM: + self._complain_multiple_hidden_parameter_sets() + suffix = "" + if id and id[-1].isdigit(): + suffix = "_" + new_id = f"{id}{suffix}{id_suffixes[id]}" + while new_id in set(resolved_ids): + id_suffixes[id] += 1 + new_id = f"{id}{suffix}{id_suffixes[id]}" + resolved_ids[index] = new_id + id_suffixes[id] += 1 + assert len(resolved_ids) == len(set(resolved_ids)), ( + f"Internal error: {resolved_ids=}" + ) + return resolved_ids + + def _strict_parametrization_ids_enabled(self) -> bool: + if self.config is None: + return False + strict_parametrization_ids = self.config.getini("strict_parametrization_ids") + if strict_parametrization_ids is None: + strict_parametrization_ids = self.config.getini("strict") + return cast(bool, strict_parametrization_ids) + + def _resolve_ids(self) -> Iterable[str | _HiddenParam]: + """Resolve IDs for all ParameterSets (may contain duplicates).""" + for idx, parameterset in enumerate(self.parametersets): + if parameterset.id is not None: + # ID provided directly - pytest.param(..., id="...") + if parameterset.id is HIDDEN_PARAM: + yield HIDDEN_PARAM + else: + yield _ascii_escaped_by_config(parameterset.id, self.config) + elif self.ids and idx < len(self.ids) and self.ids[idx] is not None: + # ID provided in the IDs list - parametrize(..., ids=[...]). + if self.ids[idx] is HIDDEN_PARAM: + yield HIDDEN_PARAM + else: + yield self._idval_from_value_required(self.ids[idx], idx) + else: + # ID not provided - generate it. + yield "-".join( + self._idval(val, argname, idx) + for val, argname in zip( + parameterset.values, self.argnames, strict=True + ) + ) + + def _idval(self, val: object, argname: str, idx: int) -> str: + """Make an ID for a parameter in a ParameterSet.""" + idval = self._idval_from_function(val, argname, idx) + if idval is not None: + return idval + idval = self._idval_from_hook(val, argname) + if idval is not None: + return idval + idval = self._idval_from_value(val) + if idval is not None: + return idval + return self._idval_from_argname(argname, idx) + + def _idval_from_function(self, val: object, argname: str, idx: int) -> str | None: + """Try to make an ID for a parameter in a ParameterSet using the + user-provided id callable, if given.""" + if self.idfn is None: + return None + try: + id = self.idfn(val) + except Exception as e: + prefix = f"{self.nodeid}: " if self.nodeid is not None else "" + msg = "error raised while trying to determine id of parameter '{}' at position {}" + msg = prefix + msg.format(argname, idx) + raise ValueError(msg) from e + if id is None: + return None + return self._idval_from_value(id) + + def _idval_from_hook(self, val: object, argname: str) -> str | None: + """Try to make an ID for a parameter in a ParameterSet by calling the + :hook:`pytest_make_parametrize_id` hook.""" + if self.config: + id: str | None = self.config.hook.pytest_make_parametrize_id( + config=self.config, val=val, argname=argname + ) + return id + return None + + def _idval_from_value(self, val: object) -> str | None: + """Try to make an ID for a parameter in a ParameterSet from its value, + if the value type is supported.""" + if isinstance(val, str | bytes): + return _ascii_escaped_by_config(val, self.config) + elif val is None or isinstance(val, float | int | bool | complex): + return str(val) + elif isinstance(val, re.Pattern): + return ascii_escaped(val.pattern) + elif val is NOTSET: + # Fallback to default. Note that NOTSET is an enum.Enum. + pass + elif isinstance(val, enum.Enum): + return str(val) + elif isinstance(getattr(val, "__name__", None), str): + # Name of a class, function, module, etc. + name: str = getattr(val, "__name__") + return name + return None + + def _idval_from_value_required(self, val: object, idx: int) -> str: + """Like _idval_from_value(), but fails if the type is not supported.""" + id = self._idval_from_value(val) + if id is not None: + return id + + # Fail. + prefix = self._make_error_prefix() + msg = ( + f"{prefix}ids contains unsupported value {saferepr(val)} (type: {type(val)!r}) at index {idx}. " + "Supported types are: str, bytes, int, float, complex, bool, enum, regex or anything with a __name__." + ) + fail(msg, pytrace=False) + + @staticmethod + def _idval_from_argname(argname: str, idx: int) -> str: + """Make an ID for a parameter in a ParameterSet from the argument name + and the index of the ParameterSet.""" + return str(argname) + str(idx) + + def _complain_multiple_hidden_parameter_sets(self) -> NoReturn: + fail( + f"{self._make_error_prefix()}multiple instances of HIDDEN_PARAM " + "cannot be used in the same parametrize call, " + "because the tests names need to be unique." + ) + + def _make_error_prefix(self) -> str: + if self.func_name is not None: + return f"In {self.func_name}: " + elif self.nodeid is not None: + return f"In {self.nodeid}: " + else: + return "" + + +@final +@dataclasses.dataclass(frozen=True) +class CallSpec2: + """A planned parameterized invocation of a test function. + + Calculated during collection for a given test function's Metafunc. + Once collection is over, each callspec is turned into a single Item + and stored in item.callspec. + """ + + # arg name -> arg value which will be passed to a fixture or pseudo-fixture + # of the same name. (indirect or direct parametrization respectively) + params: dict[str, object] = dataclasses.field(default_factory=dict) + # arg name -> arg index. + indices: dict[str, int] = dataclasses.field(default_factory=dict) + # arg name -> parameter scope. + # Used for sorting parametrized resources. + _arg2scope: Mapping[str, Scope] = dataclasses.field(default_factory=dict) + # Parts which will be added to the item's name in `[..]` separated by "-". + _idlist: Sequence[str] = dataclasses.field(default_factory=tuple) + # Marks which will be applied to the item. + marks: list[Mark] = dataclasses.field(default_factory=list) + + def setmulti( + self, + *, + argnames: Iterable[str], + valset: Iterable[object], + id: str | _HiddenParam, + marks: Iterable[Mark | MarkDecorator], + scope: Scope, + param_index: int, + nodeid: str, + ) -> CallSpec2: + params = self.params.copy() + indices = self.indices.copy() + arg2scope = dict(self._arg2scope) + for arg, val in zip(argnames, valset, strict=True): + if arg in params: + raise nodes.Collector.CollectError( + f"{nodeid}: duplicate parametrization of {arg!r}" + ) + params[arg] = val + indices[arg] = param_index + arg2scope[arg] = scope + return CallSpec2( + params=params, + indices=indices, + _arg2scope=arg2scope, + _idlist=self._idlist if id is HIDDEN_PARAM else [*self._idlist, id], + marks=[*self.marks, *normalize_mark_list(marks)], + ) + + def getparam(self, name: str) -> object: + try: + return self.params[name] + except KeyError as e: + raise ValueError(name) from e + + @property + def id(self) -> str: + return "-".join(self._idlist) + + +def get_direct_param_fixture_func(request: FixtureRequest) -> Any: + return request.param + + +# Used for storing pseudo fixturedefs for direct parametrization. +name2pseudofixturedef_key = StashKey[dict[str, FixtureDef[Any]]]() + + +@final +class Metafunc: + """Objects passed to the :hook:`pytest_generate_tests` hook. + + They help to inspect a test function and to generate tests according to + test configuration or values specified in the class or module where a + test function is defined. + """ + + def __init__( + self, + definition: FunctionDefinition, + fixtureinfo: fixtures.FuncFixtureInfo, + config: Config, + cls=None, + module=None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + + #: Access to the underlying :class:`_pytest.python.FunctionDefinition`. + self.definition = definition + + #: Access to the :class:`pytest.Config` object for the test session. + self.config = config + + #: The module object where the test function is defined in. + self.module = module + + #: Underlying Python test function. + self.function = definition.obj + + #: Set of fixture names required by the test function. + self.fixturenames = fixtureinfo.names_closure + + #: Class object where the test function is defined in or ``None``. + self.cls = cls + + self._arg2fixturedefs = fixtureinfo.name2fixturedefs + + # Result of parametrize(). + self._calls: list[CallSpec2] = [] + + self._params_directness: dict[str, Literal["indirect", "direct"]] = {} + + def parametrize( + self, + argnames: str | Sequence[str], + argvalues: Iterable[ParameterSet | Sequence[object] | object], + indirect: bool | Sequence[str] = False, + ids: Iterable[object | None] | Callable[[Any], object | None] | None = None, + scope: _ScopeName | None = None, + *, + _param_mark: Mark | None = None, + ) -> None: + """Add new invocations to the underlying test function using the list + of argvalues for the given argnames. Parametrization is performed + during the collection phase. If you need to setup expensive resources + see about setting ``indirect`` to do it at test setup time instead. + + Can be called multiple times per test function (but only on different + argument names), in which case each call parametrizes all previous + parametrizations, e.g. + + :: + + unparametrized: t + parametrize ["x", "y"]: t[x], t[y] + parametrize [1, 2]: t[x-1], t[x-2], t[y-1], t[y-2] + + :param argnames: + A comma-separated string denoting one or more argument names, or + a list/tuple of argument strings. + + :param argvalues: + The list of argvalues determines how often a test is invoked with + different argument values. + + If only one argname was specified argvalues is a list of values. + If N argnames were specified, argvalues must be a list of + N-tuples, where each tuple-element specifies a value for its + respective argname. + + :param indirect: + A list of arguments' names (subset of argnames) or a boolean. + If True the list contains all names from the argnames. Each + argvalue corresponding to an argname in this list will + be passed as request.param to its respective argname fixture + function so that it can perform more expensive setups during the + setup phase of a test rather than at collection time. + + :param ids: + Sequence of (or generator for) ids for ``argvalues``, + or a callable to return part of the id for each argvalue. + + With sequences (and generators like ``itertools.count()``) the + returned ids should be of type ``string``, ``int``, ``float``, + ``bool``, or ``None``. + They are mapped to the corresponding index in ``argvalues``. + ``None`` means to use the auto-generated id. + + .. versionadded:: 8.4 + :ref:`hidden-param` means to hide the parameter set + from the test name. Can only be used at most 1 time, as + test names need to be unique. + + If it is a callable it will be called for each entry in + ``argvalues``, and the return value is used as part of the + auto-generated id for the whole set (where parts are joined with + dashes ("-")). + This is useful to provide more specific ids for certain items, e.g. + dates. Returning ``None`` will use an auto-generated id. + + If no ids are provided they will be generated automatically from + the argvalues. + + :param scope: + If specified it denotes the scope of the parameters. + The scope is used for grouping tests by parameter instances. + It will also override any fixture-function defined scope, allowing + to set a dynamic scope using test context or configuration. + """ + nodeid = self.definition.nodeid + + argnames, parametersets = ParameterSet._for_parametrize( + argnames, + argvalues, + self.function, + self.config, + nodeid=self.definition.nodeid, + ) + del argvalues + + if "request" in argnames: + fail( + f"{nodeid}: 'request' is a reserved name and cannot be used in @pytest.mark.parametrize", + pytrace=False, + ) + + if scope is not None: + scope_ = Scope.from_user( + scope, descr=f"parametrize() call in {self.function.__name__}" + ) + else: + scope_ = _find_parametrized_scope(argnames, self._arg2fixturedefs, indirect) + + self._validate_if_using_arg_names(argnames, indirect) + + # Use any already (possibly) generated ids with parametrize Marks. + if _param_mark and _param_mark._param_ids_from: + generated_ids = _param_mark._param_ids_from._param_ids_generated + if generated_ids is not None: + ids = generated_ids + + ids = self._resolve_parameter_set_ids( + argnames, ids, parametersets, nodeid=self.definition.nodeid + ) + + # Store used (possibly generated) ids with parametrize Marks. + if _param_mark and _param_mark._param_ids_from and generated_ids is None: + object.__setattr__(_param_mark._param_ids_from, "_param_ids_generated", ids) + + # Calculate directness. + arg_directness = self._resolve_args_directness(argnames, indirect) + self._params_directness.update(arg_directness) + + # Add direct parametrizations as fixturedefs to arg2fixturedefs by + # registering artificial "pseudo" FixtureDef's such that later at test + # setup time we can rely on FixtureDefs to exist for all argnames. + node = None + # For scopes higher than function, a "pseudo" FixtureDef might have + # already been created for the scope. We thus store and cache the + # FixtureDef on the node related to the scope. + if scope_ is Scope.Function: + name2pseudofixturedef = None + else: + collector = self.definition.parent + assert collector is not None + node = get_scope_node(collector, scope_) + if node is None: + # If used class scope and there is no class, use module-level + # collector (for now). + if scope_ is Scope.Class: + assert isinstance(collector, Module) + node = collector + # If used package scope and there is no package, use session + # (for now). + elif scope_ is Scope.Package: + node = collector.session + else: + assert False, f"Unhandled missing scope: {scope}" + default: dict[str, FixtureDef[Any]] = {} + name2pseudofixturedef = node.stash.setdefault( + name2pseudofixturedef_key, default + ) + for argname in argnames: + if arg_directness[argname] == "indirect": + continue + if name2pseudofixturedef is not None and argname in name2pseudofixturedef: + fixturedef = name2pseudofixturedef[argname] + else: + fixturedef = FixtureDef( + config=self.config, + baseid="", + argname=argname, + func=get_direct_param_fixture_func, + scope=scope_, + params=None, + ids=None, + _ispytest=True, + ) + if name2pseudofixturedef is not None: + name2pseudofixturedef[argname] = fixturedef + self._arg2fixturedefs[argname] = [fixturedef] + + # Create the new calls: if we are parametrize() multiple times (by applying the decorator + # more than once) then we accumulate those calls generating the cartesian product + # of all calls. + newcalls = [] + for callspec in self._calls or [CallSpec2()]: + for param_index, (param_id, param_set) in enumerate( + zip(ids, parametersets, strict=True) + ): + newcallspec = callspec.setmulti( + argnames=argnames, + valset=param_set.values, + id=param_id, + marks=param_set.marks, + scope=scope_, + param_index=param_index, + nodeid=nodeid, + ) + newcalls.append(newcallspec) + self._calls = newcalls + + def _resolve_parameter_set_ids( + self, + argnames: Sequence[str], + ids: Iterable[object | None] | Callable[[Any], object | None] | None, + parametersets: Sequence[ParameterSet], + nodeid: str, + ) -> list[str | _HiddenParam]: + """Resolve the actual ids for the given parameter sets. + + :param argnames: + Argument names passed to ``parametrize()``. + :param ids: + The `ids` parameter of the ``parametrize()`` call (see docs). + :param parametersets: + The parameter sets, each containing a set of values corresponding + to ``argnames``. + :param nodeid str: + The nodeid of the definition item that generated this + parametrization. + :returns: + List with ids for each parameter set given. + """ + if ids is None: + idfn = None + ids_ = None + elif callable(ids): + idfn = ids + ids_ = None + else: + idfn = None + ids_ = self._validate_ids(ids, parametersets, self.function.__name__) + id_maker = IdMaker( + argnames, + parametersets, + idfn, + ids_, + self.config, + nodeid=nodeid, + func_name=self.function.__name__, + ) + return id_maker.make_unique_parameterset_ids() + + def _validate_ids( + self, + ids: Iterable[object | None], + parametersets: Sequence[ParameterSet], + func_name: str, + ) -> list[object | None]: + try: + num_ids = len(ids) # type: ignore[arg-type] + except TypeError: + try: + iter(ids) + except TypeError as e: + raise TypeError("ids must be a callable or an iterable") from e + num_ids = len(parametersets) + + # num_ids == 0 is a special case: https://github.com/pytest-dev/pytest/issues/1849 + if num_ids != len(parametersets) and num_ids != 0: + msg = "In {}: {} parameter sets specified, with different number of ids: {}" + fail(msg.format(func_name, len(parametersets), num_ids), pytrace=False) + + return list(itertools.islice(ids, num_ids)) + + def _resolve_args_directness( + self, + argnames: Sequence[str], + indirect: bool | Sequence[str], + ) -> dict[str, Literal["indirect", "direct"]]: + """Resolve if each parametrized argument must be considered an indirect + parameter to a fixture of the same name, or a direct parameter to the + parametrized function, based on the ``indirect`` parameter of the + parametrized() call. + + :param argnames: + List of argument names passed to ``parametrize()``. + :param indirect: + Same as the ``indirect`` parameter of ``parametrize()``. + :returns + A dict mapping each arg name to either "indirect" or "direct". + """ + arg_directness: dict[str, Literal["indirect", "direct"]] + if isinstance(indirect, bool): + arg_directness = dict.fromkeys( + argnames, "indirect" if indirect else "direct" + ) + elif isinstance(indirect, Sequence): + arg_directness = dict.fromkeys(argnames, "direct") + for arg in indirect: + if arg not in argnames: + fail( + f"In {self.function.__name__}: indirect fixture '{arg}' doesn't exist", + pytrace=False, + ) + arg_directness[arg] = "indirect" + else: + fail( + f"In {self.function.__name__}: expected Sequence or boolean" + f" for indirect, got {type(indirect).__name__}", + pytrace=False, + ) + return arg_directness + + def _validate_if_using_arg_names( + self, + argnames: Sequence[str], + indirect: bool | Sequence[str], + ) -> None: + """Check if all argnames are being used, by default values, or directly/indirectly. + + :param List[str] argnames: List of argument names passed to ``parametrize()``. + :param indirect: Same as the ``indirect`` parameter of ``parametrize()``. + :raises ValueError: If validation fails. + """ + default_arg_names = set(get_default_arg_names(self.function)) + func_name = self.function.__name__ + for arg in argnames: + if arg not in self.fixturenames: + if arg in default_arg_names: + fail( + f"In {func_name}: function already takes an argument '{arg}' with a default value", + pytrace=False, + ) + else: + if isinstance(indirect, Sequence): + name = "fixture" if arg in indirect else "argument" + else: + name = "fixture" if indirect else "argument" + fail( + f"In {func_name}: function uses no {name} '{arg}'", + pytrace=False, + ) + + def _recompute_direct_params_indices(self) -> None: + for argname, param_type in self._params_directness.items(): + if param_type == "direct": + for i, callspec in enumerate(self._calls): + callspec.indices[argname] = i + + +def _find_parametrized_scope( + argnames: Sequence[str], + arg2fixturedefs: Mapping[str, Sequence[fixtures.FixtureDef[object]]], + indirect: bool | Sequence[str], +) -> Scope: + """Find the most appropriate scope for a parametrized call based on its arguments. + + When there's at least one direct argument, always use "function" scope. + + When a test function is parametrized and all its arguments are indirect + (e.g. fixtures), return the most narrow scope based on the fixtures used. + + Related to issue #1832, based on code posted by @Kingdread. + """ + if isinstance(indirect, Sequence): + all_arguments_are_fixtures = len(indirect) == len(argnames) + else: + all_arguments_are_fixtures = bool(indirect) + + if all_arguments_are_fixtures: + fixturedefs = arg2fixturedefs or {} + used_scopes = [ + fixturedef[-1]._scope + for name, fixturedef in fixturedefs.items() + if name in argnames + ] + # Takes the most narrow scope from used fixtures. + return min(used_scopes, default=Scope.Function) + + return Scope.Function + + +def _ascii_escaped_by_config(val: str | bytes, config: Config | None) -> str: + if config is None: + escape_option = False + else: + escape_option = config.getini( + "disable_test_id_escaping_and_forfeit_all_rights_to_community_support" + ) + # TODO: If escaping is turned off and the user passes bytes, + # will return a bytes. For now we ignore this but the + # code *probably* doesn't handle this case. + return val if escape_option else ascii_escaped(val) # type: ignore + + +class Function(PyobjMixin, nodes.Item): + """Item responsible for setting up and executing a Python test function. + + :param name: + The full function name, including any decorations like those + added by parametrization (``my_func[my_param]``). + :param parent: + The parent Node. + :param config: + The pytest Config object. + :param callspec: + If given, this function has been parametrized and the callspec contains + meta information about the parametrization. + :param callobj: + If given, the object which will be called when the Function is invoked, + otherwise the callobj will be obtained from ``parent`` using ``originalname``. + :param keywords: + Keywords bound to the function object for "-k" matching. + :param session: + The pytest Session object. + :param fixtureinfo: + Fixture information already resolved at this fixture node.. + :param originalname: + The attribute name to use for accessing the underlying function object. + Defaults to ``name``. Set this if name is different from the original name, + for example when it contains decorations like those added by parametrization + (``my_func[my_param]``). + """ + + # Disable since functions handle it themselves. + _ALLOW_MARKERS = False + + def __init__( + self, + name: str, + parent, + config: Config | None = None, + callspec: CallSpec2 | None = None, + callobj=NOTSET, + keywords: Mapping[str, Any] | None = None, + session: Session | None = None, + fixtureinfo: FuncFixtureInfo | None = None, + originalname: str | None = None, + ) -> None: + super().__init__(name, parent, config=config, session=session) + + if callobj is not NOTSET: + self._obj = callobj + self._instance = getattr(callobj, "__self__", None) + + #: Original function name, without any decorations (for example + #: parametrization adds a ``"[...]"`` suffix to function names), used to access + #: the underlying function object from ``parent`` (in case ``callobj`` is not given + #: explicitly). + #: + #: .. versionadded:: 3.0 + self.originalname = originalname or name + + # Note: when FunctionDefinition is introduced, we should change ``originalname`` + # to a readonly property that returns FunctionDefinition.name. + + self.own_markers.extend(get_unpacked_marks(self.obj)) + if callspec: + self.callspec = callspec + self.own_markers.extend(callspec.marks) + + # todo: this is a hell of a hack + # https://github.com/pytest-dev/pytest/issues/4569 + # Note: the order of the updates is important here; indicates what + # takes priority (ctor argument over function attributes over markers). + # Take own_markers only; NodeKeywords handles parent traversal on its own. + self.keywords.update((mark.name, mark) for mark in self.own_markers) + self.keywords.update(self.obj.__dict__) + if keywords: + self.keywords.update(keywords) + + if fixtureinfo is None: + fm = self.session._fixturemanager + fixtureinfo = fm.getfixtureinfo(self, self.obj, self.cls) + self._fixtureinfo: FuncFixtureInfo = fixtureinfo + self.fixturenames = fixtureinfo.names_closure + self._initrequest() + + # todo: determine sound type limitations + @classmethod + def from_parent(cls, parent, **kw) -> Self: + """The public constructor.""" + return super().from_parent(parent=parent, **kw) + + def _initrequest(self) -> None: + self.funcargs: dict[str, object] = {} + self._request = fixtures.TopRequest(self, _ispytest=True) + + @property + def function(self): + """Underlying python 'function' object.""" + return getimfunc(self.obj) + + @property + def instance(self): + try: + return self._instance + except AttributeError: + if isinstance(self.parent, Class): + # Each Function gets a fresh class instance. + self._instance = self._getinstance() + else: + self._instance = None + return self._instance + + def _getinstance(self): + if isinstance(self.parent, Class): + # Each Function gets a fresh class instance. + return self.parent.newinstance() + else: + return None + + def _getobj(self): + instance = self.instance + if instance is not None: + parent_obj = instance + else: + assert self.parent is not None + parent_obj = self.parent.obj # type: ignore[attr-defined] + return getattr(parent_obj, self.originalname) + + @property + def _pyfuncitem(self): + """(compatonly) for code expecting pytest-2.2 style request objects.""" + return self + + def runtest(self) -> None: + """Execute the underlying test function.""" + self.ihook.pytest_pyfunc_call(pyfuncitem=self) + + def setup(self) -> None: + self._request._fillfixtures() + + def _traceback_filter(self, excinfo: ExceptionInfo[BaseException]) -> Traceback: + if hasattr(self, "_obj") and not self.config.getoption("fulltrace", False): + code = _pytest._code.Code.from_function(get_real_func(self.obj)) + path, firstlineno = code.path, code.firstlineno + traceback = excinfo.traceback + ntraceback = traceback.cut(path=path, firstlineno=firstlineno) + if ntraceback == traceback: + ntraceback = ntraceback.cut(path=path) + if ntraceback == traceback: + ntraceback = ntraceback.filter(filter_traceback) + if not ntraceback: + ntraceback = traceback + ntraceback = ntraceback.filter(excinfo) + + # issue364: mark all but first and last frames to + # only show a single-line message for each frame. + if self.config.getoption("tbstyle", "auto") == "auto": + if len(ntraceback) > 2: + ntraceback = Traceback( + ( + ntraceback[0], + *(t.with_repr_style("short") for t in ntraceback[1:-1]), + ntraceback[-1], + ) + ) + + return ntraceback + return excinfo.traceback + + # TODO: Type ignored -- breaks Liskov Substitution. + def repr_failure( # type: ignore[override] + self, + excinfo: ExceptionInfo[BaseException], + ) -> str | TerminalRepr: + style = self.config.getoption("tbstyle", "auto") + if style == "auto": + style = "long" + return self._repr_failure_py(excinfo, style=style) + + +class FunctionDefinition(Function): + """This class is a stop gap solution until we evolve to have actual function + definition nodes and manage to get rid of ``metafunc``.""" + + def runtest(self) -> None: + raise RuntimeError("function definitions are not supposed to be run as tests") + + setup = runtest diff --git a/.venv/lib/python3.12/site-packages/_pytest/python_api.py b/.venv/lib/python3.12/site-packages/_pytest/python_api.py new file mode 100644 index 0000000..1e389eb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/python_api.py @@ -0,0 +1,820 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +from collections.abc import Collection +from collections.abc import Mapping +from collections.abc import Sequence +from collections.abc import Sized +from decimal import Decimal +import math +from numbers import Complex +import pprint +import sys +from typing import Any +from typing import TYPE_CHECKING + + +if TYPE_CHECKING: + from numpy import ndarray + + +def _compare_approx( + full_object: object, + message_data: Sequence[tuple[str, str, str]], + number_of_elements: int, + different_ids: Sequence[object], + max_abs_diff: float, + max_rel_diff: float, +) -> list[str]: + message_list = list(message_data) + message_list.insert(0, ("Index", "Obtained", "Expected")) + max_sizes = [0, 0, 0] + for index, obtained, expected in message_list: + max_sizes[0] = max(max_sizes[0], len(index)) + max_sizes[1] = max(max_sizes[1], len(obtained)) + max_sizes[2] = max(max_sizes[2], len(expected)) + explanation = [ + f"comparison failed. Mismatched elements: {len(different_ids)} / {number_of_elements}:", + f"Max absolute difference: {max_abs_diff}", + f"Max relative difference: {max_rel_diff}", + ] + [ + f"{indexes:<{max_sizes[0]}} | {obtained:<{max_sizes[1]}} | {expected:<{max_sizes[2]}}" + for indexes, obtained, expected in message_list + ] + return explanation + + +# builtin pytest.approx helper + + +class ApproxBase: + """Provide shared utilities for making approximate comparisons between + numbers or sequences of numbers.""" + + # Tell numpy to use our `__eq__` operator instead of its. + __array_ufunc__ = None + __array_priority__ = 100 + + def __init__(self, expected, rel=None, abs=None, nan_ok: bool = False) -> None: + __tracebackhide__ = True + self.expected = expected + self.abs = abs + self.rel = rel + self.nan_ok = nan_ok + self._check_type() + + def __repr__(self) -> str: + raise NotImplementedError + + def _repr_compare(self, other_side: Any) -> list[str]: + return [ + "comparison failed", + f"Obtained: {other_side}", + f"Expected: {self}", + ] + + def __eq__(self, actual) -> bool: + return all( + a == self._approx_scalar(x) for a, x in self._yield_comparisons(actual) + ) + + def __bool__(self): + __tracebackhide__ = True + raise AssertionError( + "approx() is not supported in a boolean context.\nDid you mean: `assert a == approx(b)`?" + ) + + # Ignore type because of https://github.com/python/mypy/issues/4266. + __hash__ = None # type: ignore + + def __ne__(self, actual) -> bool: + return not (actual == self) + + def _approx_scalar(self, x) -> ApproxScalar: + if isinstance(x, Decimal): + return ApproxDecimal(x, rel=self.rel, abs=self.abs, nan_ok=self.nan_ok) + return ApproxScalar(x, rel=self.rel, abs=self.abs, nan_ok=self.nan_ok) + + def _yield_comparisons(self, actual): + """Yield all the pairs of numbers to be compared. + + This is used to implement the `__eq__` method. + """ + raise NotImplementedError + + def _check_type(self) -> None: + """Raise a TypeError if the expected value is not a valid type.""" + # This is only a concern if the expected value is a sequence. In every + # other case, the approx() function ensures that the expected value has + # a numeric type. For this reason, the default is to do nothing. The + # classes that deal with sequences should reimplement this method to + # raise if there are any non-numeric elements in the sequence. + + +def _recursive_sequence_map(f, x): + """Recursively map a function over a sequence of arbitrary depth""" + if isinstance(x, list | tuple): + seq_type = type(x) + return seq_type(_recursive_sequence_map(f, xi) for xi in x) + elif _is_sequence_like(x): + return [_recursive_sequence_map(f, xi) for xi in x] + else: + return f(x) + + +class ApproxNumpy(ApproxBase): + """Perform approximate comparisons where the expected value is numpy array.""" + + def __repr__(self) -> str: + list_scalars = _recursive_sequence_map( + self._approx_scalar, self.expected.tolist() + ) + return f"approx({list_scalars!r})" + + def _repr_compare(self, other_side: ndarray | list[Any]) -> list[str]: + import itertools + import math + + def get_value_from_nested_list( + nested_list: list[Any], nd_index: tuple[Any, ...] + ) -> Any: + """ + Helper function to get the value out of a nested list, given an n-dimensional index. + This mimics numpy's indexing, but for raw nested python lists. + """ + value: Any = nested_list + for i in nd_index: + value = value[i] + return value + + np_array_shape = self.expected.shape + approx_side_as_seq = _recursive_sequence_map( + self._approx_scalar, self.expected.tolist() + ) + + # convert other_side to numpy array to ensure shape attribute is available + other_side_as_array = _as_numpy_array(other_side) + assert other_side_as_array is not None + + if np_array_shape != other_side_as_array.shape: + return [ + "Impossible to compare arrays with different shapes.", + f"Shapes: {np_array_shape} and {other_side_as_array.shape}", + ] + + number_of_elements = self.expected.size + max_abs_diff = -math.inf + max_rel_diff = -math.inf + different_ids = [] + for index in itertools.product(*(range(i) for i in np_array_shape)): + approx_value = get_value_from_nested_list(approx_side_as_seq, index) + other_value = get_value_from_nested_list(other_side_as_array, index) + if approx_value != other_value: + abs_diff = abs(approx_value.expected - other_value) + max_abs_diff = max(max_abs_diff, abs_diff) + if other_value == 0.0: + max_rel_diff = math.inf + else: + max_rel_diff = max(max_rel_diff, abs_diff / abs(other_value)) + different_ids.append(index) + + message_data = [ + ( + str(index), + str(get_value_from_nested_list(other_side_as_array, index)), + str(get_value_from_nested_list(approx_side_as_seq, index)), + ) + for index in different_ids + ] + return _compare_approx( + self.expected, + message_data, + number_of_elements, + different_ids, + max_abs_diff, + max_rel_diff, + ) + + def __eq__(self, actual) -> bool: + import numpy as np + + # self.expected is supposed to always be an array here. + + if not np.isscalar(actual): + try: + actual = np.asarray(actual) + except Exception as e: + raise TypeError(f"cannot compare '{actual}' to numpy.ndarray") from e + + if not np.isscalar(actual) and actual.shape != self.expected.shape: + return False + + return super().__eq__(actual) + + def _yield_comparisons(self, actual): + import numpy as np + + # `actual` can either be a numpy array or a scalar, it is treated in + # `__eq__` before being passed to `ApproxBase.__eq__`, which is the + # only method that calls this one. + + if np.isscalar(actual): + for i in np.ndindex(self.expected.shape): + yield actual, self.expected[i].item() + else: + for i in np.ndindex(self.expected.shape): + yield actual[i].item(), self.expected[i].item() + + +class ApproxMapping(ApproxBase): + """Perform approximate comparisons where the expected value is a mapping + with numeric values (the keys can be anything).""" + + def __repr__(self) -> str: + return f"approx({ ({k: self._approx_scalar(v) for k, v in self.expected.items()})!r})" + + def _repr_compare(self, other_side: Mapping[object, float]) -> list[str]: + import math + + if len(self.expected) != len(other_side): + return [ + "Impossible to compare mappings with different sizes.", + f"Lengths: {len(self.expected)} and {len(other_side)}", + ] + + if set(self.expected.keys()) != set(other_side.keys()): + return [ + "comparison failed.", + f"Mappings has different keys: expected {self.expected.keys()} but got {other_side.keys()}", + ] + + approx_side_as_map = { + k: self._approx_scalar(v) for k, v in self.expected.items() + } + + number_of_elements = len(approx_side_as_map) + max_abs_diff = -math.inf + max_rel_diff = -math.inf + different_ids = [] + for (approx_key, approx_value), other_value in zip( + approx_side_as_map.items(), other_side.values(), strict=True + ): + if approx_value != other_value: + if approx_value.expected is not None and other_value is not None: + try: + max_abs_diff = max( + max_abs_diff, abs(approx_value.expected - other_value) + ) + if approx_value.expected == 0.0: + max_rel_diff = math.inf + else: + max_rel_diff = max( + max_rel_diff, + abs( + (approx_value.expected - other_value) + / approx_value.expected + ), + ) + except ZeroDivisionError: + pass + different_ids.append(approx_key) + + message_data = [ + (str(key), str(other_side[key]), str(approx_side_as_map[key])) + for key in different_ids + ] + + return _compare_approx( + self.expected, + message_data, + number_of_elements, + different_ids, + max_abs_diff, + max_rel_diff, + ) + + def __eq__(self, actual) -> bool: + try: + if set(actual.keys()) != set(self.expected.keys()): + return False + except AttributeError: + return False + + return super().__eq__(actual) + + def _yield_comparisons(self, actual): + for k in self.expected.keys(): + yield actual[k], self.expected[k] + + def _check_type(self) -> None: + __tracebackhide__ = True + for key, value in self.expected.items(): + if isinstance(value, type(self.expected)): + msg = "pytest.approx() does not support nested dictionaries: key={!r} value={!r}\n full mapping={}" + raise TypeError(msg.format(key, value, pprint.pformat(self.expected))) + + +class ApproxSequenceLike(ApproxBase): + """Perform approximate comparisons where the expected value is a sequence of numbers.""" + + def __repr__(self) -> str: + seq_type = type(self.expected) + if seq_type not in (tuple, list): + seq_type = list + return f"approx({seq_type(self._approx_scalar(x) for x in self.expected)!r})" + + def _repr_compare(self, other_side: Sequence[float]) -> list[str]: + import math + + if len(self.expected) != len(other_side): + return [ + "Impossible to compare lists with different sizes.", + f"Lengths: {len(self.expected)} and {len(other_side)}", + ] + + approx_side_as_map = _recursive_sequence_map(self._approx_scalar, self.expected) + + number_of_elements = len(approx_side_as_map) + max_abs_diff = -math.inf + max_rel_diff = -math.inf + different_ids = [] + for i, (approx_value, other_value) in enumerate( + zip(approx_side_as_map, other_side, strict=True) + ): + if approx_value != other_value: + try: + abs_diff = abs(approx_value.expected - other_value) + max_abs_diff = max(max_abs_diff, abs_diff) + # Ignore non-numbers for the diff calculations (#13012). + except TypeError: + pass + else: + if other_value == 0.0: + max_rel_diff = math.inf + else: + max_rel_diff = max(max_rel_diff, abs_diff / abs(other_value)) + different_ids.append(i) + message_data = [ + (str(i), str(other_side[i]), str(approx_side_as_map[i])) + for i in different_ids + ] + + return _compare_approx( + self.expected, + message_data, + number_of_elements, + different_ids, + max_abs_diff, + max_rel_diff, + ) + + def __eq__(self, actual) -> bool: + try: + if len(actual) != len(self.expected): + return False + except TypeError: + return False + return super().__eq__(actual) + + def _yield_comparisons(self, actual): + return zip(actual, self.expected, strict=True) + + def _check_type(self) -> None: + __tracebackhide__ = True + for index, x in enumerate(self.expected): + if isinstance(x, type(self.expected)): + msg = "pytest.approx() does not support nested data structures: {!r} at index {}\n full sequence: {}" + raise TypeError(msg.format(x, index, pprint.pformat(self.expected))) + + +class ApproxScalar(ApproxBase): + """Perform approximate comparisons where the expected value is a single number.""" + + # Using Real should be better than this Union, but not possible yet: + # https://github.com/python/typeshed/pull/3108 + DEFAULT_ABSOLUTE_TOLERANCE: float | Decimal = 1e-12 + DEFAULT_RELATIVE_TOLERANCE: float | Decimal = 1e-6 + + def __repr__(self) -> str: + """Return a string communicating both the expected value and the + tolerance for the comparison being made. + + For example, ``1.0 ± 1e-6``, ``(3+4j) ± 5e-6 ∠ ±180°``. + """ + # Don't show a tolerance for values that aren't compared using + # tolerances, i.e. non-numerics and infinities. Need to call abs to + # handle complex numbers, e.g. (inf + 1j). + if ( + isinstance(self.expected, bool) + or (not isinstance(self.expected, Complex | Decimal)) + or math.isinf(abs(self.expected) or isinstance(self.expected, bool)) + ): + return str(self.expected) + + # If a sensible tolerance can't be calculated, self.tolerance will + # raise a ValueError. In this case, display '???'. + try: + if 1e-3 <= self.tolerance < 1e3: + vetted_tolerance = f"{self.tolerance:n}" + else: + vetted_tolerance = f"{self.tolerance:.1e}" + + if ( + isinstance(self.expected, Complex) + and self.expected.imag + and not math.isinf(self.tolerance) + ): + vetted_tolerance += " ∠ ±180°" + except ValueError: + vetted_tolerance = "???" + + return f"{self.expected} ± {vetted_tolerance}" + + def __eq__(self, actual) -> bool: + """Return whether the given value is equal to the expected value + within the pre-specified tolerance.""" + + def is_bool(val: Any) -> bool: + # Check if `val` is a native bool or numpy bool. + if isinstance(val, bool): + return True + if np := sys.modules.get("numpy"): + return isinstance(val, np.bool_) + return False + + asarray = _as_numpy_array(actual) + if asarray is not None: + # Call ``__eq__()`` manually to prevent infinite-recursion with + # numpy<1.13. See #3748. + return all(self.__eq__(a) for a in asarray.flat) + + # Short-circuit exact equality, except for bool and np.bool_ + if is_bool(self.expected) and not is_bool(actual): + return False + elif actual == self.expected: + return True + + # If either type is non-numeric, fall back to strict equality. + # NB: we need Complex, rather than just Number, to ensure that __abs__, + # __sub__, and __float__ are defined. Also, consider bool to be + # non-numeric, even though it has the required arithmetic. + if is_bool(self.expected) or not ( + isinstance(self.expected, Complex | Decimal) + and isinstance(actual, Complex | Decimal) + ): + return False + + # Allow the user to control whether NaNs are considered equal to each + # other or not. The abs() calls are for compatibility with complex + # numbers. + if math.isnan(abs(self.expected)): + return self.nan_ok and math.isnan(abs(actual)) + + # Infinity shouldn't be approximately equal to anything but itself, but + # if there's a relative tolerance, it will be infinite and infinity + # will seem approximately equal to everything. The equal-to-itself + # case would have been short circuited above, so here we can just + # return false if the expected value is infinite. The abs() call is + # for compatibility with complex numbers. + if math.isinf(abs(self.expected)): + return False + + # Return true if the two numbers are within the tolerance. + result: bool = abs(self.expected - actual) <= self.tolerance + return result + + __hash__ = None + + @property + def tolerance(self): + """Return the tolerance for the comparison. + + This could be either an absolute tolerance or a relative tolerance, + depending on what the user specified or which would be larger. + """ + + def set_default(x, default): + return x if x is not None else default + + # Figure out what the absolute tolerance should be. ``self.abs`` is + # either None or a value specified by the user. + absolute_tolerance = set_default(self.abs, self.DEFAULT_ABSOLUTE_TOLERANCE) + + if absolute_tolerance < 0: + raise ValueError( + f"absolute tolerance can't be negative: {absolute_tolerance}" + ) + if math.isnan(absolute_tolerance): + raise ValueError("absolute tolerance can't be NaN.") + + # If the user specified an absolute tolerance but not a relative one, + # just return the absolute tolerance. + if self.rel is None: + if self.abs is not None: + return absolute_tolerance + + # Figure out what the relative tolerance should be. ``self.rel`` is + # either None or a value specified by the user. This is done after + # we've made sure the user didn't ask for an absolute tolerance only, + # because we don't want to raise errors about the relative tolerance if + # we aren't even going to use it. + relative_tolerance = set_default( + self.rel, self.DEFAULT_RELATIVE_TOLERANCE + ) * abs(self.expected) + + if relative_tolerance < 0: + raise ValueError( + f"relative tolerance can't be negative: {relative_tolerance}" + ) + if math.isnan(relative_tolerance): + raise ValueError("relative tolerance can't be NaN.") + + # Return the larger of the relative and absolute tolerances. + return max(relative_tolerance, absolute_tolerance) + + +class ApproxDecimal(ApproxScalar): + """Perform approximate comparisons where the expected value is a Decimal.""" + + DEFAULT_ABSOLUTE_TOLERANCE = Decimal("1e-12") + DEFAULT_RELATIVE_TOLERANCE = Decimal("1e-6") + + def __repr__(self) -> str: + if isinstance(self.rel, float): + rel = Decimal.from_float(self.rel) + else: + rel = self.rel + + if isinstance(self.abs, float): + abs_ = Decimal.from_float(self.abs) + else: + abs_ = self.abs + + tol_str = "???" + if rel is not None and Decimal("1e-3") <= rel <= Decimal("1e3"): + tol_str = f"{rel:.1e}" + elif abs_ is not None: + tol_str = f"{abs_:.1e}" + + return f"{self.expected} ± {tol_str}" + + +def approx(expected, rel=None, abs=None, nan_ok: bool = False) -> ApproxBase: + """Assert that two numbers (or two ordered sequences of numbers) are equal to each other + within some tolerance. + + Due to the :doc:`python:tutorial/floatingpoint`, numbers that we + would intuitively expect to be equal are not always so:: + + >>> 0.1 + 0.2 == 0.3 + False + + This problem is commonly encountered when writing tests, e.g. when making + sure that floating-point values are what you expect them to be. One way to + deal with this problem is to assert that two floating-point numbers are + equal to within some appropriate tolerance:: + + >>> abs((0.1 + 0.2) - 0.3) < 1e-6 + True + + However, comparisons like this are tedious to write and difficult to + understand. Furthermore, absolute comparisons like the one above are + usually discouraged because there's no tolerance that works well for all + situations. ``1e-6`` is good for numbers around ``1``, but too small for + very big numbers and too big for very small ones. It's better to express + the tolerance as a fraction of the expected value, but relative comparisons + like that are even more difficult to write correctly and concisely. + + The ``approx`` class performs floating-point comparisons using a syntax + that's as intuitive as possible:: + + >>> from pytest import approx + >>> 0.1 + 0.2 == approx(0.3) + True + + The same syntax also works for ordered sequences of numbers:: + + >>> (0.1 + 0.2, 0.2 + 0.4) == approx((0.3, 0.6)) + True + + ``numpy`` arrays:: + + >>> import numpy as np # doctest: +SKIP + >>> np.array([0.1, 0.2]) + np.array([0.2, 0.4]) == approx(np.array([0.3, 0.6])) # doctest: +SKIP + True + + And for a ``numpy`` array against a scalar:: + + >>> import numpy as np # doctest: +SKIP + >>> np.array([0.1, 0.2]) + np.array([0.2, 0.1]) == approx(0.3) # doctest: +SKIP + True + + Only ordered sequences are supported, because ``approx`` needs + to infer the relative position of the sequences without ambiguity. This means + ``sets`` and other unordered sequences are not supported. + + Finally, dictionary *values* can also be compared:: + + >>> {'a': 0.1 + 0.2, 'b': 0.2 + 0.4} == approx({'a': 0.3, 'b': 0.6}) + True + + The comparison will be true if both mappings have the same keys and their + respective values match the expected tolerances. + + **Tolerances** + + By default, ``approx`` considers numbers within a relative tolerance of + ``1e-6`` (i.e. one part in a million) of its expected value to be equal. + This treatment would lead to surprising results if the expected value was + ``0.0``, because nothing but ``0.0`` itself is relatively close to ``0.0``. + To handle this case less surprisingly, ``approx`` also considers numbers + within an absolute tolerance of ``1e-12`` of its expected value to be + equal. Infinity and NaN are special cases. Infinity is only considered + equal to itself, regardless of the relative tolerance. NaN is not + considered equal to anything by default, but you can make it be equal to + itself by setting the ``nan_ok`` argument to True. (This is meant to + facilitate comparing arrays that use NaN to mean "no data".) + + Both the relative and absolute tolerances can be changed by passing + arguments to the ``approx`` constructor:: + + >>> 1.0001 == approx(1) + False + >>> 1.0001 == approx(1, rel=1e-3) + True + >>> 1.0001 == approx(1, abs=1e-3) + True + + If you specify ``abs`` but not ``rel``, the comparison will not consider + the relative tolerance at all. In other words, two numbers that are within + the default relative tolerance of ``1e-6`` will still be considered unequal + if they exceed the specified absolute tolerance. If you specify both + ``abs`` and ``rel``, the numbers will be considered equal if either + tolerance is met:: + + >>> 1 + 1e-8 == approx(1) + True + >>> 1 + 1e-8 == approx(1, abs=1e-12) + False + >>> 1 + 1e-8 == approx(1, rel=1e-6, abs=1e-12) + True + + **Non-numeric types** + + You can also use ``approx`` to compare non-numeric types, or dicts and + sequences containing non-numeric types, in which case it falls back to + strict equality. This can be useful for comparing dicts and sequences that + can contain optional values:: + + >>> {"required": 1.0000005, "optional": None} == approx({"required": 1, "optional": None}) + True + >>> [None, 1.0000005] == approx([None,1]) + True + >>> ["foo", 1.0000005] == approx([None,1]) + False + + If you're thinking about using ``approx``, then you might want to know how + it compares to other good ways of comparing floating-point numbers. All of + these algorithms are based on relative and absolute tolerances and should + agree for the most part, but they do have meaningful differences: + + - ``math.isclose(a, b, rel_tol=1e-9, abs_tol=0.0)``: True if the relative + tolerance is met w.r.t. either ``a`` or ``b`` or if the absolute + tolerance is met. Because the relative tolerance is calculated w.r.t. + both ``a`` and ``b``, this test is symmetric (i.e. neither ``a`` nor + ``b`` is a "reference value"). You have to specify an absolute tolerance + if you want to compare to ``0.0`` because there is no tolerance by + default. More information: :py:func:`math.isclose`. + + - ``numpy.isclose(a, b, rtol=1e-5, atol=1e-8)``: True if the difference + between ``a`` and ``b`` is less that the sum of the relative tolerance + w.r.t. ``b`` and the absolute tolerance. Because the relative tolerance + is only calculated w.r.t. ``b``, this test is asymmetric and you can + think of ``b`` as the reference value. Support for comparing sequences + is provided by :py:func:`numpy.allclose`. More information: + :std:doc:`numpy:reference/generated/numpy.isclose`. + + - ``unittest.TestCase.assertAlmostEqual(a, b)``: True if ``a`` and ``b`` + are within an absolute tolerance of ``1e-7``. No relative tolerance is + considered , so this function is not appropriate for very large or very + small numbers. Also, it's only available in subclasses of ``unittest.TestCase`` + and it's ugly because it doesn't follow PEP8. More information: + :py:meth:`unittest.TestCase.assertAlmostEqual`. + + - ``a == pytest.approx(b, rel=1e-6, abs=1e-12)``: True if the relative + tolerance is met w.r.t. ``b`` or if the absolute tolerance is met. + Because the relative tolerance is only calculated w.r.t. ``b``, this test + is asymmetric and you can think of ``b`` as the reference value. In the + special case that you explicitly specify an absolute tolerance but not a + relative tolerance, only the absolute tolerance is considered. + + .. note:: + + ``approx`` can handle numpy arrays, but we recommend the + specialised test helpers in :std:doc:`numpy:reference/routines.testing` + if you need support for comparisons, NaNs, or ULP-based tolerances. + + To match strings using regex, you can use + `Matches `_ + from the + `re_assert package `_. + + + .. note:: + + Unlike built-in equality, this function considers + booleans unequal to numeric zero or one. For example:: + + >>> 1 == approx(True) + False + + .. warning:: + + .. versionchanged:: 3.2 + + In order to avoid inconsistent behavior, :py:exc:`TypeError` is + raised for ``>``, ``>=``, ``<`` and ``<=`` comparisons. + The example below illustrates the problem:: + + assert approx(0.1) > 0.1 + 1e-10 # calls approx(0.1).__gt__(0.1 + 1e-10) + assert 0.1 + 1e-10 > approx(0.1) # calls approx(0.1).__lt__(0.1 + 1e-10) + + In the second example one expects ``approx(0.1).__le__(0.1 + 1e-10)`` + to be called. But instead, ``approx(0.1).__lt__(0.1 + 1e-10)`` is used to + comparison. This is because the call hierarchy of rich comparisons + follows a fixed behavior. More information: :py:meth:`object.__ge__` + + .. versionchanged:: 3.7.1 + ``approx`` raises ``TypeError`` when it encounters a dict value or + sequence element of non-numeric type. + + .. versionchanged:: 6.1.0 + ``approx`` falls back to strict equality for non-numeric types instead + of raising ``TypeError``. + """ + # Delegate the comparison to a class that knows how to deal with the type + # of the expected value (e.g. int, float, list, dict, numpy.array, etc). + # + # The primary responsibility of these classes is to implement ``__eq__()`` + # and ``__repr__()``. The former is used to actually check if some + # "actual" value is equivalent to the given expected value within the + # allowed tolerance. The latter is used to show the user the expected + # value and tolerance, in the case that a test failed. + # + # The actual logic for making approximate comparisons can be found in + # ApproxScalar, which is used to compare individual numbers. All of the + # other Approx classes eventually delegate to this class. The ApproxBase + # class provides some convenient methods and overloads, but isn't really + # essential. + + __tracebackhide__ = True + + if isinstance(expected, Decimal): + cls: type[ApproxBase] = ApproxDecimal + elif isinstance(expected, Mapping): + cls = ApproxMapping + elif _is_numpy_array(expected): + expected = _as_numpy_array(expected) + cls = ApproxNumpy + elif _is_sequence_like(expected): + cls = ApproxSequenceLike + elif isinstance(expected, Collection) and not isinstance(expected, str | bytes): + msg = f"pytest.approx() only supports ordered sequences, but got: {expected!r}" + raise TypeError(msg) + else: + cls = ApproxScalar + + return cls(expected, rel, abs, nan_ok) + + +def _is_sequence_like(expected: object) -> bool: + return ( + hasattr(expected, "__getitem__") + and isinstance(expected, Sized) + and not isinstance(expected, str | bytes) + ) + + +def _is_numpy_array(obj: object) -> bool: + """ + Return true if the given object is implicitly convertible to ndarray, + and numpy is already imported. + """ + return _as_numpy_array(obj) is not None + + +def _as_numpy_array(obj: object) -> ndarray | None: + """ + Return an ndarray if the given object is implicitly convertible to ndarray, + and numpy is already imported, otherwise None. + """ + np: Any = sys.modules.get("numpy") + if np is not None: + # avoid infinite recursion on numpy scalars, which have __array__ + if np.isscalar(obj): + return None + elif isinstance(obj, np.ndarray): + return obj + elif hasattr(obj, "__array__") or hasattr("obj", "__array_interface__"): + return np.asarray(obj) + return None diff --git a/.venv/lib/python3.12/site-packages/_pytest/raises.py b/.venv/lib/python3.12/site-packages/_pytest/raises.py new file mode 100644 index 0000000..7c246fd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/raises.py @@ -0,0 +1,1517 @@ +from __future__ import annotations + +from abc import ABC +from abc import abstractmethod +import re +from re import Pattern +import sys +from textwrap import indent +from typing import Any +from typing import cast +from typing import final +from typing import Generic +from typing import get_args +from typing import get_origin +from typing import Literal +from typing import overload +from typing import TYPE_CHECKING +import warnings + +from _pytest._code import ExceptionInfo +from _pytest._code.code import stringify_exception +from _pytest.outcomes import fail +from _pytest.warning_types import PytestWarning + + +if TYPE_CHECKING: + from collections.abc import Callable + from collections.abc import Sequence + + # for some reason Sphinx does not play well with 'from types import TracebackType' + import types + from typing import TypeGuard + + from typing_extensions import ParamSpec + from typing_extensions import TypeVar + + P = ParamSpec("P") + + # this conditional definition is because we want to allow a TypeVar default + BaseExcT_co_default = TypeVar( + "BaseExcT_co_default", + bound=BaseException, + default=BaseException, + covariant=True, + ) + + # Use short name because it shows up in docs. + E = TypeVar("E", bound=BaseException, default=BaseException) +else: + from typing import TypeVar + + BaseExcT_co_default = TypeVar( + "BaseExcT_co_default", bound=BaseException, covariant=True + ) + +# RaisesGroup doesn't work with a default. +BaseExcT_co = TypeVar("BaseExcT_co", bound=BaseException, covariant=True) +BaseExcT_1 = TypeVar("BaseExcT_1", bound=BaseException) +BaseExcT_2 = TypeVar("BaseExcT_2", bound=BaseException) +ExcT_1 = TypeVar("ExcT_1", bound=Exception) +ExcT_2 = TypeVar("ExcT_2", bound=Exception) + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + from exceptiongroup import ExceptionGroup + + +# String patterns default to including the unicode flag. +_REGEX_NO_FLAGS = re.compile(r"").flags + + +# pytest.raises helper +@overload +def raises( + expected_exception: type[E] | tuple[type[E], ...], + *, + match: str | re.Pattern[str] | None = ..., + check: Callable[[E], bool] = ..., +) -> RaisesExc[E]: ... + + +@overload +def raises( + *, + match: str | re.Pattern[str], + # If exception_type is not provided, check() must do any typechecks itself. + check: Callable[[BaseException], bool] = ..., +) -> RaisesExc[BaseException]: ... + + +@overload +def raises(*, check: Callable[[BaseException], bool]) -> RaisesExc[BaseException]: ... + + +@overload +def raises( + expected_exception: type[E] | tuple[type[E], ...], + func: Callable[..., Any], + *args: Any, + **kwargs: Any, +) -> ExceptionInfo[E]: ... + + +def raises( + expected_exception: type[E] | tuple[type[E], ...] | None = None, + *args: Any, + **kwargs: Any, +) -> RaisesExc[BaseException] | ExceptionInfo[E]: + r"""Assert that a code block/function call raises an exception type, or one of its subclasses. + + :param expected_exception: + The expected exception type, or a tuple if one of multiple possible + exception types are expected. Note that subclasses of the passed exceptions + will also match. + + This is not a required parameter, you may opt to only use ``match`` and/or + ``check`` for verifying the raised exception. + + :kwparam str | re.Pattern[str] | None match: + If specified, a string containing a regular expression, + or a regular expression object, that is tested against the string + representation of the exception and its :pep:`678` `__notes__` + using :func:`re.search`. + + To match a literal string that may contain :ref:`special characters + `, the pattern can first be escaped with :func:`re.escape`. + + (This is only used when ``pytest.raises`` is used as a context manager, + and passed through to the function otherwise. + When using ``pytest.raises`` as a function, you can use: + ``pytest.raises(Exc, func, match="passed on").match("my pattern")``.) + + :kwparam Callable[[BaseException], bool] check: + + .. versionadded:: 8.4 + + If specified, a callable that will be called with the exception as a parameter + after checking the type and the match regex if specified. + If it returns ``True`` it will be considered a match, if not it will + be considered a failed match. + + + Use ``pytest.raises`` as a context manager, which will capture the exception of the given + type, or any of its subclasses:: + + >>> import pytest + >>> with pytest.raises(ZeroDivisionError): + ... 1/0 + + If the code block does not raise the expected exception (:class:`ZeroDivisionError` in the example + above), or no exception at all, the check will fail instead. + + You can also use the keyword argument ``match`` to assert that the + exception matches a text or regex:: + + >>> with pytest.raises(ValueError, match='must be 0 or None'): + ... raise ValueError("value must be 0 or None") + + >>> with pytest.raises(ValueError, match=r'must be \d+$'): + ... raise ValueError("value must be 42") + + The ``match`` argument searches the formatted exception string, which includes any + `PEP-678 `__ ``__notes__``: + + >>> with pytest.raises(ValueError, match=r"had a note added"): # doctest: +SKIP + ... e = ValueError("value must be 42") + ... e.add_note("had a note added") + ... raise e + + The ``check`` argument, if provided, must return True when passed the raised exception + for the match to be successful, otherwise an :exc:`AssertionError` is raised. + + >>> import errno + >>> with pytest.raises(OSError, check=lambda e: e.errno == errno.EACCES): + ... raise OSError(errno.EACCES, "no permission to view") + + The context manager produces an :class:`ExceptionInfo` object which can be used to inspect the + details of the captured exception:: + + >>> with pytest.raises(ValueError) as exc_info: + ... raise ValueError("value must be 42") + >>> assert exc_info.type is ValueError + >>> assert exc_info.value.args[0] == "value must be 42" + + .. warning:: + + Given that ``pytest.raises`` matches subclasses, be wary of using it to match :class:`Exception` like this:: + + # Careful, this will catch ANY exception raised. + with pytest.raises(Exception): + some_function() + + Because :class:`Exception` is the base class of almost all exceptions, it is easy for this to hide + real bugs, where the user wrote this expecting a specific exception, but some other exception is being + raised due to a bug introduced during a refactoring. + + Avoid using ``pytest.raises`` to catch :class:`Exception` unless certain that you really want to catch + **any** exception raised. + + .. note:: + + When using ``pytest.raises`` as a context manager, it's worthwhile to + note that normal context manager rules apply and that the exception + raised *must* be the final line in the scope of the context manager. + Lines of code after that, within the scope of the context manager will + not be executed. For example:: + + >>> value = 15 + >>> with pytest.raises(ValueError) as exc_info: + ... if value > 10: + ... raise ValueError("value must be <= 10") + ... assert exc_info.type is ValueError # This will not execute. + + Instead, the following approach must be taken (note the difference in + scope):: + + >>> with pytest.raises(ValueError) as exc_info: + ... if value > 10: + ... raise ValueError("value must be <= 10") + ... + >>> assert exc_info.type is ValueError + + **Expecting exception groups** + + When expecting exceptions wrapped in :exc:`BaseExceptionGroup` or + :exc:`ExceptionGroup`, you should instead use :class:`pytest.RaisesGroup`. + + **Using with** ``pytest.mark.parametrize`` + + When using :ref:`pytest.mark.parametrize ref` + it is possible to parametrize tests such that + some runs raise an exception and others do not. + + See :ref:`parametrizing_conditional_raising` for an example. + + .. seealso:: + + :ref:`assertraises` for more examples and detailed discussion. + + **Legacy form** + + It is possible to specify a callable by passing a to-be-called lambda:: + + >>> raises(ZeroDivisionError, lambda: 1/0) + + + or you can specify an arbitrary callable with arguments:: + + >>> def f(x): return 1/x + ... + >>> raises(ZeroDivisionError, f, 0) + + >>> raises(ZeroDivisionError, f, x=0) + + + The form above is fully supported but discouraged for new code because the + context manager form is regarded as more readable and less error-prone. + + .. note:: + Similar to caught exception objects in Python, explicitly clearing + local references to returned ``ExceptionInfo`` objects can + help the Python interpreter speed up its garbage collection. + + Clearing those references breaks a reference cycle + (``ExceptionInfo`` --> caught exception --> frame stack raising + the exception --> current frame stack --> local variables --> + ``ExceptionInfo``) which makes Python keep all objects referenced + from that cycle (including all local variables in the current + frame) alive until the next cyclic garbage collection run. + More detailed information can be found in the official Python + documentation for :ref:`the try statement `. + """ + __tracebackhide__ = True + + if not args: + if set(kwargs) - {"match", "check", "expected_exception"}: + msg = "Unexpected keyword arguments passed to pytest.raises: " + msg += ", ".join(sorted(kwargs)) + msg += "\nUse context-manager form instead?" + raise TypeError(msg) + + if expected_exception is None: + return RaisesExc(**kwargs) + return RaisesExc(expected_exception, **kwargs) + + if not expected_exception: + raise ValueError( + f"Expected an exception type or a tuple of exception types, but got `{expected_exception!r}`. " + f"Raising exceptions is already understood as failing the test, so you don't need " + f"any special code to say 'this should never raise an exception'." + ) + func = args[0] + if not callable(func): + raise TypeError(f"{func!r} object (type: {type(func)}) must be callable") + with RaisesExc(expected_exception) as excinfo: + func(*args[1:], **kwargs) + try: + return excinfo + finally: + del excinfo + + +# note: RaisesExc/RaisesGroup uses fail() internally, so this alias +# indicates (to [internal] plugins?) that `pytest.raises` will +# raise `_pytest.outcomes.Failed`, where +# `outcomes.Failed is outcomes.fail.Exception is raises.Exception` +# note: this is *not* the same as `_pytest.main.Failed` +# note: mypy does not recognize this attribute, and it's not possible +# to use a protocol/decorator like the others in outcomes due to +# https://github.com/python/mypy/issues/18715 +raises.Exception = fail.Exception # type: ignore[attr-defined] + + +def _match_pattern(match: Pattern[str]) -> str | Pattern[str]: + """Helper function to remove redundant `re.compile` calls when printing regex""" + return match.pattern if match.flags == _REGEX_NO_FLAGS else match + + +def repr_callable(fun: Callable[[BaseExcT_1], bool]) -> str: + """Get the repr of a ``check`` parameter. + + Split out so it can be monkeypatched (e.g. by hypothesis) + """ + return repr(fun) + + +def backquote(s: str) -> str: + return "`" + s + "`" + + +def _exception_type_name( + e: type[BaseException] | tuple[type[BaseException], ...], +) -> str: + if isinstance(e, type): + return e.__name__ + if len(e) == 1: + return e[0].__name__ + return "(" + ", ".join(ee.__name__ for ee in e) + ")" + + +def _check_raw_type( + expected_type: type[BaseException] | tuple[type[BaseException], ...] | None, + exception: BaseException, +) -> str | None: + if expected_type is None or expected_type == (): + return None + + if not isinstance( + exception, + expected_type, + ): + actual_type_str = backquote(_exception_type_name(type(exception)) + "()") + expected_type_str = backquote(_exception_type_name(expected_type)) + if ( + isinstance(exception, BaseExceptionGroup) + and isinstance(expected_type, type) + and not issubclass(expected_type, BaseExceptionGroup) + ): + return f"Unexpected nested {actual_type_str}, expected {expected_type_str}" + return f"{actual_type_str} is not an instance of {expected_type_str}" + return None + + +def is_fully_escaped(s: str) -> bool: + # we know we won't compile with re.VERBOSE, so whitespace doesn't need to be escaped + metacharacters = "{}()+.*?^$[]" + return not any( + c in metacharacters and (i == 0 or s[i - 1] != "\\") for (i, c) in enumerate(s) + ) + + +def unescape(s: str) -> str: + return re.sub(r"\\([{}()+-.*?^$\[\]\s\\])", r"\1", s) + + +# These classes conceptually differ from ExceptionInfo in that ExceptionInfo is tied, and +# constructed from, a particular exception - whereas these are constructed with expected +# exceptions, and later allow matching towards particular exceptions. +# But there's overlap in `ExceptionInfo.match` and `AbstractRaises._check_match`, as with +# `AbstractRaises.matches` and `ExceptionInfo.errisinstance`+`ExceptionInfo.group_contains`. +# The interaction between these classes should perhaps be improved. +class AbstractRaises(ABC, Generic[BaseExcT_co]): + """ABC with common functionality shared between RaisesExc and RaisesGroup""" + + def __init__( + self, + *, + match: str | Pattern[str] | None, + check: Callable[[BaseExcT_co], bool] | None, + ) -> None: + if isinstance(match, str): + # juggle error in order to avoid context to fail (necessary?) + re_error = None + try: + self.match: Pattern[str] | None = re.compile(match) + except re.error as e: + re_error = e + if re_error is not None: + fail(f"Invalid regex pattern provided to 'match': {re_error}") + if match == "": + warnings.warn( + PytestWarning( + "matching against an empty string will *always* pass. If you want " + "to check for an empty message you need to pass '^$'. If you don't " + "want to match you should pass `None` or leave out the parameter." + ), + stacklevel=2, + ) + else: + self.match = match + + # check if this is a fully escaped regex and has ^$ to match fully + # in which case we can do a proper diff on error + self.rawmatch: str | None = None + if isinstance(match, str) or ( + isinstance(match, Pattern) and match.flags == _REGEX_NO_FLAGS + ): + if isinstance(match, Pattern): + match = match.pattern + if ( + match + and match[0] == "^" + and match[-1] == "$" + and is_fully_escaped(match[1:-1]) + ): + self.rawmatch = unescape(match[1:-1]) + + self.check = check + self._fail_reason: str | None = None + + # used to suppress repeated printing of `repr(self.check)` + self._nested: bool = False + + # set in self._parse_exc + self.is_baseexception = False + + def _parse_exc( + self, exc: type[BaseExcT_1] | types.GenericAlias, expected: str + ) -> type[BaseExcT_1]: + if isinstance(exc, type) and issubclass(exc, BaseException): + if not issubclass(exc, Exception): + self.is_baseexception = True + return exc + # because RaisesGroup does not support variable number of exceptions there's + # still a use for RaisesExc(ExceptionGroup[Exception]). + origin_exc: type[BaseException] | None = get_origin(exc) + if origin_exc and issubclass(origin_exc, BaseExceptionGroup): + exc_type = get_args(exc)[0] + if ( + issubclass(origin_exc, ExceptionGroup) and exc_type in (Exception, Any) + ) or ( + issubclass(origin_exc, BaseExceptionGroup) + and exc_type in (BaseException, Any) + ): + if not issubclass(origin_exc, ExceptionGroup): + self.is_baseexception = True + return cast(type[BaseExcT_1], origin_exc) + else: + raise ValueError( + f"Only `ExceptionGroup[Exception]` or `BaseExceptionGroup[BaseException]` " + f"are accepted as generic types but got `{exc}`. " + f"As `raises` will catch all instances of the specified group regardless of the " + f"generic argument specific nested exceptions has to be checked " + f"with `RaisesGroup`." + ) + # unclear if the Type/ValueError distinction is even helpful here + msg = f"Expected {expected}, but got " + if isinstance(exc, type): # type: ignore[unreachable] + raise ValueError(msg + f"{exc.__name__!r}") + if isinstance(exc, BaseException): # type: ignore[unreachable] + raise TypeError(msg + f"an exception instance: {type(exc).__name__}") + raise TypeError(msg + repr(type(exc).__name__)) + + @property + def fail_reason(self) -> str | None: + """Set after a call to :meth:`matches` to give a human-readable reason for why the match failed. + When used as a context manager the string will be printed as the reason for the + test failing.""" + return self._fail_reason + + def _check_check( + self: AbstractRaises[BaseExcT_1], + exception: BaseExcT_1, + ) -> bool: + if self.check is None: + return True + + if self.check(exception): + return True + + check_repr = "" if self._nested else " " + repr_callable(self.check) + self._fail_reason = f"check{check_repr} did not return True" + return False + + # TODO: harmonize with ExceptionInfo.match + def _check_match(self, e: BaseException) -> bool: + if self.match is None or re.search( + self.match, + stringified_exception := stringify_exception( + e, include_subexception_msg=False + ), + ): + return True + + # if we're matching a group, make sure we're explicit to reduce confusion + # if they're trying to match an exception contained within the group + maybe_specify_type = ( + f" the `{_exception_type_name(type(e))}()`" + if isinstance(e, BaseExceptionGroup) + else "" + ) + if isinstance(self.rawmatch, str): + # TODO: it instructs to use `-v` to print leading text, but that doesn't work + # I also don't know if this is the proper entry point, or tool to use at all + from _pytest.assertion.util import _diff_text + from _pytest.assertion.util import dummy_highlighter + + diff = _diff_text(self.rawmatch, stringified_exception, dummy_highlighter) + self._fail_reason = ("\n" if diff[0][0] == "-" else "") + "\n".join(diff) + return False + + self._fail_reason = ( + f"Regex pattern did not match{maybe_specify_type}.\n" + f" Expected regex: {_match_pattern(self.match)!r}\n" + f" Actual message: {stringified_exception!r}" + ) + if _match_pattern(self.match) == stringified_exception: + self._fail_reason += "\n Did you mean to `re.escape()` the regex?" + return False + + @abstractmethod + def matches( + self: AbstractRaises[BaseExcT_1], exception: BaseException + ) -> TypeGuard[BaseExcT_1]: + """Check if an exception matches the requirements of this AbstractRaises. + If it fails, :meth:`AbstractRaises.fail_reason` should be set. + """ + + +@final +class RaisesExc(AbstractRaises[BaseExcT_co_default]): + """ + .. versionadded:: 8.4 + + + This is the class constructed when calling :func:`pytest.raises`, but may be used + directly as a helper class with :class:`RaisesGroup` when you want to specify + requirements on sub-exceptions. + + You don't need this if you only want to specify the type, since :class:`RaisesGroup` + accepts ``type[BaseException]``. + + :param type[BaseException] | tuple[type[BaseException]] | None expected_exception: + The expected type, or one of several possible types. + May be ``None`` in order to only make use of ``match`` and/or ``check`` + + The type is checked with :func:`isinstance`, and does not need to be an exact match. + If that is wanted you can use the ``check`` parameter. + + :kwparam str | Pattern[str] match: + A regex to match. + + :kwparam Callable[[BaseException], bool] check: + If specified, a callable that will be called with the exception as a parameter + after checking the type and the match regex if specified. + If it returns ``True`` it will be considered a match, if not it will + be considered a failed match. + + :meth:`RaisesExc.matches` can also be used standalone to check individual exceptions. + + Examples:: + + with RaisesGroup(RaisesExc(ValueError, match="string")) + ... + with RaisesGroup(RaisesExc(check=lambda x: x.args == (3, "hello"))): + ... + with RaisesGroup(RaisesExc(check=lambda x: type(x) is ValueError)): + ... + """ + + # Trio bundled hypothesis monkeypatching, we will probably instead assume that + # hypothesis will handle that in their pytest plugin by the time this is released. + # Alternatively we could add a version of get_pretty_function_description ourselves + # https://github.com/HypothesisWorks/hypothesis/blob/8ced2f59f5c7bea3344e35d2d53e1f8f8eb9fcd8/hypothesis-python/src/hypothesis/internal/reflection.py#L439 + + # At least one of the three parameters must be passed. + @overload + def __init__( + self, + expected_exception: ( + type[BaseExcT_co_default] | tuple[type[BaseExcT_co_default], ...] + ), + /, + *, + match: str | Pattern[str] | None = ..., + check: Callable[[BaseExcT_co_default], bool] | None = ..., + ) -> None: ... + + @overload + def __init__( + self: RaisesExc[BaseException], # Give E a value. + /, + *, + match: str | Pattern[str] | None, + # If exception_type is not provided, check() must do any typechecks itself. + check: Callable[[BaseException], bool] | None = ..., + ) -> None: ... + + @overload + def __init__(self, /, *, check: Callable[[BaseException], bool]) -> None: ... + + def __init__( + self, + expected_exception: ( + type[BaseExcT_co_default] | tuple[type[BaseExcT_co_default], ...] | None + ) = None, + /, + *, + match: str | Pattern[str] | None = None, + check: Callable[[BaseExcT_co_default], bool] | None = None, + ): + super().__init__(match=match, check=check) + if isinstance(expected_exception, tuple): + expected_exceptions = expected_exception + elif expected_exception is None: + expected_exceptions = () + else: + expected_exceptions = (expected_exception,) + + if (expected_exceptions == ()) and match is None and check is None: + raise ValueError("You must specify at least one parameter to match on.") + + self.expected_exceptions = tuple( + self._parse_exc(e, expected="a BaseException type") + for e in expected_exceptions + ) + + self._just_propagate = False + + def matches( + self, + exception: BaseException | None, + ) -> TypeGuard[BaseExcT_co_default]: + """Check if an exception matches the requirements of this :class:`RaisesExc`. + If it fails, :attr:`RaisesExc.fail_reason` will be set. + + Examples:: + + assert RaisesExc(ValueError).matches(my_exception): + # is equivalent to + assert isinstance(my_exception, ValueError) + + # this can be useful when checking e.g. the ``__cause__`` of an exception. + with pytest.raises(ValueError) as excinfo: + ... + assert RaisesExc(SyntaxError, match="foo").matches(excinfo.value.__cause__) + # above line is equivalent to + assert isinstance(excinfo.value.__cause__, SyntaxError) + assert re.search("foo", str(excinfo.value.__cause__) + + """ + self._just_propagate = False + if exception is None: + self._fail_reason = "exception is None" + return False + if not self._check_type(exception): + self._just_propagate = True + return False + + if not self._check_match(exception): + return False + + return self._check_check(exception) + + def __repr__(self) -> str: + parameters = [] + if self.expected_exceptions: + parameters.append(_exception_type_name(self.expected_exceptions)) + if self.match is not None: + # If no flags were specified, discard the redundant re.compile() here. + parameters.append( + f"match={_match_pattern(self.match)!r}", + ) + if self.check is not None: + parameters.append(f"check={repr_callable(self.check)}") + return f"RaisesExc({', '.join(parameters)})" + + def _check_type(self, exception: BaseException) -> TypeGuard[BaseExcT_co_default]: + self._fail_reason = _check_raw_type(self.expected_exceptions, exception) + return self._fail_reason is None + + def __enter__(self) -> ExceptionInfo[BaseExcT_co_default]: + self.excinfo: ExceptionInfo[BaseExcT_co_default] = ExceptionInfo.for_later() + return self.excinfo + + # TODO: move common code into superclass + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> bool: + __tracebackhide__ = True + if exc_type is None: + if not self.expected_exceptions: + fail("DID NOT RAISE any exception") + if len(self.expected_exceptions) > 1: + fail(f"DID NOT RAISE any of {self.expected_exceptions!r}") + + fail(f"DID NOT RAISE {self.expected_exceptions[0]!r}") + + assert self.excinfo is not None, ( + "Internal error - should have been constructed in __enter__" + ) + + if not self.matches(exc_val): + if self._just_propagate: + return False + raise AssertionError(self._fail_reason) + + # Cast to narrow the exception type now that it's verified.... + # even though the TypeGuard in self.matches should be narrowing + exc_info = cast( + "tuple[type[BaseExcT_co_default], BaseExcT_co_default, types.TracebackType]", + (exc_type, exc_val, exc_tb), + ) + self.excinfo.fill_unfilled(exc_info) + return True + + +@final +class RaisesGroup(AbstractRaises[BaseExceptionGroup[BaseExcT_co]]): + """ + .. versionadded:: 8.4 + + Contextmanager for checking for an expected :exc:`ExceptionGroup`. + This works similar to :func:`pytest.raises`, but allows for specifying the structure of an :exc:`ExceptionGroup`. + :meth:`ExceptionInfo.group_contains` also tries to handle exception groups, + but it is very bad at checking that you *didn't* get unexpected exceptions. + + The catching behaviour differs from :ref:`except* `, being much + stricter about the structure by default. + By using ``allow_unwrapped=True`` and ``flatten_subgroups=True`` you can match + :ref:`except* ` fully when expecting a single exception. + + :param args: + Any number of exception types, :class:`RaisesGroup` or :class:`RaisesExc` + to specify the exceptions contained in this exception. + All specified exceptions must be present in the raised group, *and no others*. + + If you expect a variable number of exceptions you need to use + :func:`pytest.raises(ExceptionGroup) ` and manually check + the contained exceptions. Consider making use of :meth:`RaisesExc.matches`. + + It does not care about the order of the exceptions, so + ``RaisesGroup(ValueError, TypeError)`` + is equivalent to + ``RaisesGroup(TypeError, ValueError)``. + :kwparam str | re.Pattern[str] | None match: + If specified, a string containing a regular expression, + or a regular expression object, that is tested against the string + representation of the exception group and its :pep:`678` `__notes__` + using :func:`re.search`. + + To match a literal string that may contain :ref:`special characters + `, the pattern can first be escaped with :func:`re.escape`. + + Note that " (5 subgroups)" will be stripped from the ``repr`` before matching. + :kwparam Callable[[E], bool] check: + If specified, a callable that will be called with the group as a parameter + after successfully matching the expected exceptions. If it returns ``True`` + it will be considered a match, if not it will be considered a failed match. + :kwparam bool allow_unwrapped: + If expecting a single exception or :class:`RaisesExc` it will match even + if the exception is not inside an exceptiongroup. + + Using this together with ``match``, ``check`` or expecting multiple exceptions + will raise an error. + :kwparam bool flatten_subgroups: + "flatten" any groups inside the raised exception group, extracting all exceptions + inside any nested groups, before matching. Without this it expects you to + fully specify the nesting structure by passing :class:`RaisesGroup` as expected + parameter. + + Examples:: + + with RaisesGroup(ValueError): + raise ExceptionGroup("", (ValueError(),)) + # match + with RaisesGroup( + ValueError, + ValueError, + RaisesExc(TypeError, match="^expected int$"), + match="^my group$", + ): + raise ExceptionGroup( + "my group", + [ + ValueError(), + TypeError("expected int"), + ValueError(), + ], + ) + # check + with RaisesGroup( + KeyboardInterrupt, + match="^hello$", + check=lambda x: isinstance(x.__cause__, ValueError), + ): + raise BaseExceptionGroup("hello", [KeyboardInterrupt()]) from ValueError + # nested groups + with RaisesGroup(RaisesGroup(ValueError)): + raise ExceptionGroup("", (ExceptionGroup("", (ValueError(),)),)) + + # flatten_subgroups + with RaisesGroup(ValueError, flatten_subgroups=True): + raise ExceptionGroup("", (ExceptionGroup("", (ValueError(),)),)) + + # allow_unwrapped + with RaisesGroup(ValueError, allow_unwrapped=True): + raise ValueError + + + :meth:`RaisesGroup.matches` can also be used directly to check a standalone exception group. + + + The matching algorithm is greedy, which means cases such as this may fail:: + + with RaisesGroup(ValueError, RaisesExc(ValueError, match="hello")): + raise ExceptionGroup("", (ValueError("hello"), ValueError("goodbye"))) + + even though it generally does not care about the order of the exceptions in the group. + To avoid the above you should specify the first :exc:`ValueError` with a :class:`RaisesExc` as well. + + .. note:: + When raised exceptions don't match the expected ones, you'll get a detailed error + message explaining why. This includes ``repr(check)`` if set, which in Python can be + overly verbose, showing memory locations etc etc. + + If installed and imported (in e.g. ``conftest.py``), the ``hypothesis`` library will + monkeypatch this output to provide shorter & more readable repr's. + """ + + # allow_unwrapped=True requires: singular exception, exception not being + # RaisesGroup instance, match is None, check is None + @overload + def __init__( + self, + expected_exception: type[BaseExcT_co] | RaisesExc[BaseExcT_co], + /, + *, + allow_unwrapped: Literal[True], + flatten_subgroups: bool = False, + ) -> None: ... + + # flatten_subgroups = True also requires no nested RaisesGroup + @overload + def __init__( + self, + expected_exception: type[BaseExcT_co] | RaisesExc[BaseExcT_co], + /, + *other_exceptions: type[BaseExcT_co] | RaisesExc[BaseExcT_co], + flatten_subgroups: Literal[True], + match: str | Pattern[str] | None = None, + check: Callable[[BaseExceptionGroup[BaseExcT_co]], bool] | None = None, + ) -> None: ... + + # simplify the typevars if possible (the following 3 are equivalent but go simpler->complicated) + # ... the first handles RaisesGroup[ValueError], the second RaisesGroup[ExceptionGroup[ValueError]], + # the third RaisesGroup[ValueError | ExceptionGroup[ValueError]]. + # ... otherwise, we will get results like RaisesGroup[ValueError | ExceptionGroup[Never]] (I think) + # (technically correct but misleading) + @overload + def __init__( + self: RaisesGroup[ExcT_1], + expected_exception: type[ExcT_1] | RaisesExc[ExcT_1], + /, + *other_exceptions: type[ExcT_1] | RaisesExc[ExcT_1], + match: str | Pattern[str] | None = None, + check: Callable[[ExceptionGroup[ExcT_1]], bool] | None = None, + ) -> None: ... + + @overload + def __init__( + self: RaisesGroup[ExceptionGroup[ExcT_2]], + expected_exception: RaisesGroup[ExcT_2], + /, + *other_exceptions: RaisesGroup[ExcT_2], + match: str | Pattern[str] | None = None, + check: Callable[[ExceptionGroup[ExceptionGroup[ExcT_2]]], bool] | None = None, + ) -> None: ... + + @overload + def __init__( + self: RaisesGroup[ExcT_1 | ExceptionGroup[ExcT_2]], + expected_exception: type[ExcT_1] | RaisesExc[ExcT_1] | RaisesGroup[ExcT_2], + /, + *other_exceptions: type[ExcT_1] | RaisesExc[ExcT_1] | RaisesGroup[ExcT_2], + match: str | Pattern[str] | None = None, + check: ( + Callable[[ExceptionGroup[ExcT_1 | ExceptionGroup[ExcT_2]]], bool] | None + ) = None, + ) -> None: ... + + # same as the above 3 but handling BaseException + @overload + def __init__( + self: RaisesGroup[BaseExcT_1], + expected_exception: type[BaseExcT_1] | RaisesExc[BaseExcT_1], + /, + *other_exceptions: type[BaseExcT_1] | RaisesExc[BaseExcT_1], + match: str | Pattern[str] | None = None, + check: Callable[[BaseExceptionGroup[BaseExcT_1]], bool] | None = None, + ) -> None: ... + + @overload + def __init__( + self: RaisesGroup[BaseExceptionGroup[BaseExcT_2]], + expected_exception: RaisesGroup[BaseExcT_2], + /, + *other_exceptions: RaisesGroup[BaseExcT_2], + match: str | Pattern[str] | None = None, + check: ( + Callable[[BaseExceptionGroup[BaseExceptionGroup[BaseExcT_2]]], bool] | None + ) = None, + ) -> None: ... + + @overload + def __init__( + self: RaisesGroup[BaseExcT_1 | BaseExceptionGroup[BaseExcT_2]], + expected_exception: type[BaseExcT_1] + | RaisesExc[BaseExcT_1] + | RaisesGroup[BaseExcT_2], + /, + *other_exceptions: type[BaseExcT_1] + | RaisesExc[BaseExcT_1] + | RaisesGroup[BaseExcT_2], + match: str | Pattern[str] | None = None, + check: ( + Callable[ + [BaseExceptionGroup[BaseExcT_1 | BaseExceptionGroup[BaseExcT_2]]], + bool, + ] + | None + ) = None, + ) -> None: ... + + def __init__( + self: RaisesGroup[ExcT_1 | BaseExcT_1 | BaseExceptionGroup[BaseExcT_2]], + expected_exception: type[BaseExcT_1] + | RaisesExc[BaseExcT_1] + | RaisesGroup[BaseExcT_2], + /, + *other_exceptions: type[BaseExcT_1] + | RaisesExc[BaseExcT_1] + | RaisesGroup[BaseExcT_2], + allow_unwrapped: bool = False, + flatten_subgroups: bool = False, + match: str | Pattern[str] | None = None, + check: ( + Callable[[BaseExceptionGroup[BaseExcT_1]], bool] + | Callable[[ExceptionGroup[ExcT_1]], bool] + | None + ) = None, + ): + # The type hint on the `self` and `check` parameters uses different formats + # that are *very* hard to reconcile while adhering to the overloads, so we cast + # it to avoid an error when passing it to super().__init__ + check = cast( + "Callable[[BaseExceptionGroup[ExcT_1|BaseExcT_1|BaseExceptionGroup[BaseExcT_2]]], bool]", + check, + ) + super().__init__(match=match, check=check) + self.allow_unwrapped = allow_unwrapped + self.flatten_subgroups: bool = flatten_subgroups + self.is_baseexception = False + + if allow_unwrapped and other_exceptions: + raise ValueError( + "You cannot specify multiple exceptions with `allow_unwrapped=True.`" + " If you want to match one of multiple possible exceptions you should" + " use a `RaisesExc`." + " E.g. `RaisesExc(check=lambda e: isinstance(e, (...)))`", + ) + if allow_unwrapped and isinstance(expected_exception, RaisesGroup): + raise ValueError( + "`allow_unwrapped=True` has no effect when expecting a `RaisesGroup`." + " You might want it in the expected `RaisesGroup`, or" + " `flatten_subgroups=True` if you don't care about the structure.", + ) + if allow_unwrapped and (match is not None or check is not None): + raise ValueError( + "`allow_unwrapped=True` bypasses the `match` and `check` parameters" + " if the exception is unwrapped. If you intended to match/check the" + " exception you should use a `RaisesExc` object. If you want to match/check" + " the exceptiongroup when the exception *is* wrapped you need to" + " do e.g. `if isinstance(exc.value, ExceptionGroup):" + " assert RaisesGroup(...).matches(exc.value)` afterwards.", + ) + + self.expected_exceptions: tuple[ + type[BaseExcT_co] | RaisesExc[BaseExcT_co] | RaisesGroup[BaseException], ... + ] = tuple( + self._parse_excgroup(e, "a BaseException type, RaisesExc, or RaisesGroup") + for e in ( + expected_exception, + *other_exceptions, + ) + ) + + def _parse_excgroup( + self, + exc: ( + type[BaseExcT_co] + | types.GenericAlias + | RaisesExc[BaseExcT_1] + | RaisesGroup[BaseExcT_2] + ), + expected: str, + ) -> type[BaseExcT_co] | RaisesExc[BaseExcT_1] | RaisesGroup[BaseExcT_2]: + # verify exception type and set `self.is_baseexception` + if isinstance(exc, RaisesGroup): + if self.flatten_subgroups: + raise ValueError( + "You cannot specify a nested structure inside a RaisesGroup with" + " `flatten_subgroups=True`. The parameter will flatten subgroups" + " in the raised exceptiongroup before matching, which would never" + " match a nested structure.", + ) + self.is_baseexception |= exc.is_baseexception + exc._nested = True + return exc + elif isinstance(exc, RaisesExc): + self.is_baseexception |= exc.is_baseexception + exc._nested = True + return exc + elif isinstance(exc, tuple): + raise TypeError( + f"Expected {expected}, but got {type(exc).__name__!r}.\n" + "RaisesGroup does not support tuples of exception types when expecting one of " + "several possible exception types like RaisesExc.\n" + "If you meant to expect a group with multiple exceptions, list them as separate arguments." + ) + else: + return super()._parse_exc(exc, expected) + + @overload + def __enter__( + self: RaisesGroup[ExcT_1], + ) -> ExceptionInfo[ExceptionGroup[ExcT_1]]: ... + @overload + def __enter__( + self: RaisesGroup[BaseExcT_1], + ) -> ExceptionInfo[BaseExceptionGroup[BaseExcT_1]]: ... + + def __enter__(self) -> ExceptionInfo[BaseExceptionGroup[BaseException]]: + self.excinfo: ExceptionInfo[BaseExceptionGroup[BaseExcT_co]] = ( + ExceptionInfo.for_later() + ) + return self.excinfo + + def __repr__(self) -> str: + reqs = [ + e.__name__ if isinstance(e, type) else repr(e) + for e in self.expected_exceptions + ] + if self.allow_unwrapped: + reqs.append(f"allow_unwrapped={self.allow_unwrapped}") + if self.flatten_subgroups: + reqs.append(f"flatten_subgroups={self.flatten_subgroups}") + if self.match is not None: + # If no flags were specified, discard the redundant re.compile() here. + reqs.append(f"match={_match_pattern(self.match)!r}") + if self.check is not None: + reqs.append(f"check={repr_callable(self.check)}") + return f"RaisesGroup({', '.join(reqs)})" + + def _unroll_exceptions( + self, + exceptions: Sequence[BaseException], + ) -> Sequence[BaseException]: + """Used if `flatten_subgroups=True`.""" + res: list[BaseException] = [] + for exc in exceptions: + if isinstance(exc, BaseExceptionGroup): + res.extend(self._unroll_exceptions(exc.exceptions)) + + else: + res.append(exc) + return res + + @overload + def matches( + self: RaisesGroup[ExcT_1], + exception: BaseException | None, + ) -> TypeGuard[ExceptionGroup[ExcT_1]]: ... + @overload + def matches( + self: RaisesGroup[BaseExcT_1], + exception: BaseException | None, + ) -> TypeGuard[BaseExceptionGroup[BaseExcT_1]]: ... + + def matches( + self, + exception: BaseException | None, + ) -> bool: + """Check if an exception matches the requirements of this RaisesGroup. + If it fails, `RaisesGroup.fail_reason` will be set. + + Example:: + + with pytest.raises(TypeError) as excinfo: + ... + assert RaisesGroup(ValueError).matches(excinfo.value.__cause__) + # the above line is equivalent to + myexc = excinfo.value.__cause + assert isinstance(myexc, BaseExceptionGroup) + assert len(myexc.exceptions) == 1 + assert isinstance(myexc.exceptions[0], ValueError) + """ + self._fail_reason = None + if exception is None: + self._fail_reason = "exception is None" + return False + if not isinstance(exception, BaseExceptionGroup): + # we opt to only print type of the exception here, as the repr would + # likely be quite long + not_group_msg = f"`{type(exception).__name__}()` is not an exception group" + if len(self.expected_exceptions) > 1: + self._fail_reason = not_group_msg + return False + # if we have 1 expected exception, check if it would work even if + # allow_unwrapped is not set + res = self._check_expected(self.expected_exceptions[0], exception) + if res is None and self.allow_unwrapped: + return True + + if res is None: + self._fail_reason = ( + f"{not_group_msg}, but would match with `allow_unwrapped=True`" + ) + elif self.allow_unwrapped: + self._fail_reason = res + else: + self._fail_reason = not_group_msg + return False + + actual_exceptions: Sequence[BaseException] = exception.exceptions + if self.flatten_subgroups: + actual_exceptions = self._unroll_exceptions(actual_exceptions) + + if not self._check_match(exception): + self._fail_reason = cast(str, self._fail_reason) + old_reason = self._fail_reason + if ( + len(actual_exceptions) == len(self.expected_exceptions) == 1 + and isinstance(expected := self.expected_exceptions[0], type) + and isinstance(actual := actual_exceptions[0], expected) + and self._check_match(actual) + ): + assert self.match is not None, "can't be None if _check_match failed" + assert self._fail_reason is old_reason is not None + self._fail_reason += ( + f"\n" + f" but matched the expected `{self._repr_expected(expected)}`.\n" + f" You might want " + f"`RaisesGroup(RaisesExc({expected.__name__}, match={_match_pattern(self.match)!r}))`" + ) + else: + self._fail_reason = old_reason + return False + + # do the full check on expected exceptions + if not self._check_exceptions( + exception, + actual_exceptions, + ): + self._fail_reason = cast(str, self._fail_reason) + assert self._fail_reason is not None + old_reason = self._fail_reason + # if we're not expecting a nested structure, and there is one, do a second + # pass where we try flattening it + if ( + not self.flatten_subgroups + and not any( + isinstance(e, RaisesGroup) for e in self.expected_exceptions + ) + and any(isinstance(e, BaseExceptionGroup) for e in actual_exceptions) + and self._check_exceptions( + exception, + self._unroll_exceptions(exception.exceptions), + ) + ): + # only indent if it's a single-line reason. In a multi-line there's already + # indented lines that this does not belong to. + indent = " " if "\n" not in self._fail_reason else "" + self._fail_reason = ( + old_reason + + f"\n{indent}Did you mean to use `flatten_subgroups=True`?" + ) + else: + self._fail_reason = old_reason + return False + + # Only run `self.check` once we know `exception` is of the correct type. + if not self._check_check(exception): + reason = ( + cast(str, self._fail_reason) + f" on the {type(exception).__name__}" + ) + if ( + len(actual_exceptions) == len(self.expected_exceptions) == 1 + and isinstance(expected := self.expected_exceptions[0], type) + # we explicitly break typing here :) + and self._check_check(actual_exceptions[0]) # type: ignore[arg-type] + ): + self._fail_reason = reason + ( + f", but did return True for the expected {self._repr_expected(expected)}." + f" You might want RaisesGroup(RaisesExc({expected.__name__}, check=<...>))" + ) + else: + self._fail_reason = reason + return False + + return True + + @staticmethod + def _check_expected( + expected_type: ( + type[BaseException] | RaisesExc[BaseException] | RaisesGroup[BaseException] + ), + exception: BaseException, + ) -> str | None: + """Helper method for `RaisesGroup.matches` and `RaisesGroup._check_exceptions` + to check one of potentially several expected exceptions.""" + if isinstance(expected_type, type): + return _check_raw_type(expected_type, exception) + res = expected_type.matches(exception) + if res: + return None + assert expected_type.fail_reason is not None + if expected_type.fail_reason.startswith("\n"): + return f"\n{expected_type!r}: {indent(expected_type.fail_reason, ' ')}" + return f"{expected_type!r}: {expected_type.fail_reason}" + + @staticmethod + def _repr_expected(e: type[BaseException] | AbstractRaises[BaseException]) -> str: + """Get the repr of an expected type/RaisesExc/RaisesGroup, but we only want + the name if it's a type""" + if isinstance(e, type): + return _exception_type_name(e) + return repr(e) + + @overload + def _check_exceptions( + self: RaisesGroup[ExcT_1], + _exception: Exception, + actual_exceptions: Sequence[Exception], + ) -> TypeGuard[ExceptionGroup[ExcT_1]]: ... + @overload + def _check_exceptions( + self: RaisesGroup[BaseExcT_1], + _exception: BaseException, + actual_exceptions: Sequence[BaseException], + ) -> TypeGuard[BaseExceptionGroup[BaseExcT_1]]: ... + + def _check_exceptions( + self, + _exception: BaseException, + actual_exceptions: Sequence[BaseException], + ) -> bool: + """Helper method for RaisesGroup.matches that attempts to pair up expected and actual exceptions""" + # The _exception parameter is not used, but necessary for the TypeGuard + + # full table with all results + results = ResultHolder(self.expected_exceptions, actual_exceptions) + + # (indexes of) raised exceptions that haven't (yet) found an expected + remaining_actual = list(range(len(actual_exceptions))) + # (indexes of) expected exceptions that haven't found a matching raised + failed_expected: list[int] = [] + # successful greedy matches + matches: dict[int, int] = {} + + # loop over expected exceptions first to get a more predictable result + for i_exp, expected in enumerate(self.expected_exceptions): + for i_rem in remaining_actual: + res = self._check_expected(expected, actual_exceptions[i_rem]) + results.set_result(i_exp, i_rem, res) + if res is None: + remaining_actual.remove(i_rem) + matches[i_exp] = i_rem + break + else: + failed_expected.append(i_exp) + + # All exceptions matched up successfully + if not remaining_actual and not failed_expected: + return True + + # in case of a single expected and single raised we simplify the output + if 1 == len(actual_exceptions) == len(self.expected_exceptions): + assert not matches + self._fail_reason = res + return False + + # The test case is failing, so we can do a slow and exhaustive check to find + # duplicate matches etc that will be helpful in debugging + for i_exp, expected in enumerate(self.expected_exceptions): + for i_actual, actual in enumerate(actual_exceptions): + if results.has_result(i_exp, i_actual): + continue + results.set_result( + i_exp, i_actual, self._check_expected(expected, actual) + ) + + successful_str = ( + f"{len(matches)} matched exception{'s' if len(matches) > 1 else ''}. " + if matches + else "" + ) + + # all expected were found + if not failed_expected and results.no_match_for_actual(remaining_actual): + self._fail_reason = ( + f"{successful_str}Unexpected exception(s):" + f" {[actual_exceptions[i] for i in remaining_actual]!r}" + ) + return False + # all raised exceptions were expected + if not remaining_actual and results.no_match_for_expected(failed_expected): + no_match_for_str = ", ".join( + self._repr_expected(self.expected_exceptions[i]) + for i in failed_expected + ) + self._fail_reason = f"{successful_str}Too few exceptions raised, found no match for: [{no_match_for_str}]" + return False + + # if there's only one remaining and one failed, and the unmatched didn't match anything else, + # we elect to only print why the remaining and the failed didn't match. + if ( + 1 == len(remaining_actual) == len(failed_expected) + and results.no_match_for_actual(remaining_actual) + and results.no_match_for_expected(failed_expected) + ): + self._fail_reason = f"{successful_str}{results.get_result(failed_expected[0], remaining_actual[0])}" + return False + + # there's both expected and raised exceptions without matches + s = "" + if matches: + s += f"\n{successful_str}" + indent_1 = " " * 2 + indent_2 = " " * 4 + + if not remaining_actual: + s += "\nToo few exceptions raised!" + elif not failed_expected: + s += "\nUnexpected exception(s)!" + + if failed_expected: + s += "\nThe following expected exceptions did not find a match:" + rev_matches = {v: k for k, v in matches.items()} + for i_failed in failed_expected: + s += ( + f"\n{indent_1}{self._repr_expected(self.expected_exceptions[i_failed])}" + ) + for i_actual, actual in enumerate(actual_exceptions): + if results.get_result(i_exp, i_actual) is None: + # we print full repr of match target + s += ( + f"\n{indent_2}It matches {backquote(repr(actual))} which was paired with " + + backquote( + self._repr_expected( + self.expected_exceptions[rev_matches[i_actual]] + ) + ) + ) + + if remaining_actual: + s += "\nThe following raised exceptions did not find a match" + for i_actual in remaining_actual: + s += f"\n{indent_1}{actual_exceptions[i_actual]!r}:" + for i_exp, expected in enumerate(self.expected_exceptions): + res = results.get_result(i_exp, i_actual) + if i_exp in failed_expected: + assert res is not None + if res[0] != "\n": + s += "\n" + s += indent(res, indent_2) + if res is None: + # we print full repr of match target + s += ( + f"\n{indent_2}It matches {backquote(self._repr_expected(expected))} " + f"which was paired with {backquote(repr(actual_exceptions[matches[i_exp]]))}" + ) + + if len(self.expected_exceptions) == len(actual_exceptions) and possible_match( + results + ): + s += ( + "\nThere exist a possible match when attempting an exhaustive check," + " but RaisesGroup uses a greedy algorithm. " + "Please make your expected exceptions more stringent with `RaisesExc` etc" + " so the greedy algorithm can function." + ) + self._fail_reason = s + return False + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> bool: + __tracebackhide__ = True + if exc_type is None: + fail(f"DID NOT RAISE any exception, expected `{self.expected_type()}`") + + assert self.excinfo is not None, ( + "Internal error - should have been constructed in __enter__" + ) + + # group_str is the only thing that differs between RaisesExc and RaisesGroup... + # I might just scrap it? Or make it part of fail_reason + group_str = ( + "(group)" + if self.allow_unwrapped and not issubclass(exc_type, BaseExceptionGroup) + else "group" + ) + + if not self.matches(exc_val): + fail(f"Raised exception {group_str} did not match: {self._fail_reason}") + + # Cast to narrow the exception type now that it's verified.... + # even though the TypeGuard in self.matches should be narrowing + exc_info = cast( + "tuple[type[BaseExceptionGroup[BaseExcT_co]], BaseExceptionGroup[BaseExcT_co], types.TracebackType]", + (exc_type, exc_val, exc_tb), + ) + self.excinfo.fill_unfilled(exc_info) + return True + + def expected_type(self) -> str: + subexcs = [] + for e in self.expected_exceptions: + if isinstance(e, RaisesExc): + subexcs.append(repr(e)) + elif isinstance(e, RaisesGroup): + subexcs.append(e.expected_type()) + elif isinstance(e, type): + subexcs.append(e.__name__) + else: # pragma: no cover + raise AssertionError("unknown type") + group_type = "Base" if self.is_baseexception else "" + return f"{group_type}ExceptionGroup({', '.join(subexcs)})" + + +@final +class NotChecked: + """Singleton for unchecked values in ResultHolder""" + + +class ResultHolder: + """Container for results of checking exceptions. + Used in RaisesGroup._check_exceptions and possible_match. + """ + + def __init__( + self, + expected_exceptions: tuple[ + type[BaseException] | AbstractRaises[BaseException], ... + ], + actual_exceptions: Sequence[BaseException], + ) -> None: + self.results: list[list[str | type[NotChecked] | None]] = [ + [NotChecked for _ in expected_exceptions] for _ in actual_exceptions + ] + + def set_result(self, expected: int, actual: int, result: str | None) -> None: + self.results[actual][expected] = result + + def get_result(self, expected: int, actual: int) -> str | None: + res = self.results[actual][expected] + assert res is not NotChecked + # mypy doesn't support identity checking against anything but None + return res # type: ignore[return-value] + + def has_result(self, expected: int, actual: int) -> bool: + return self.results[actual][expected] is not NotChecked + + def no_match_for_expected(self, expected: list[int]) -> bool: + for i in expected: + for actual_results in self.results: + assert actual_results[i] is not NotChecked + if actual_results[i] is None: + return False + return True + + def no_match_for_actual(self, actual: list[int]) -> bool: + for i in actual: + for res in self.results[i]: + assert res is not NotChecked + if res is None: + return False + return True + + +def possible_match(results: ResultHolder, used: set[int] | None = None) -> bool: + if used is None: + used = set() + curr_row = len(used) + if curr_row == len(results.results): + return True + return any( + val is None and i not in used and possible_match(results, used | {i}) + for (i, val) in enumerate(results.results[curr_row]) + ) diff --git a/.venv/lib/python3.12/site-packages/_pytest/recwarn.py b/.venv/lib/python3.12/site-packages/_pytest/recwarn.py new file mode 100644 index 0000000..e3db717 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/recwarn.py @@ -0,0 +1,367 @@ +# mypy: allow-untyped-defs +"""Record warnings during test function execution.""" + +from __future__ import annotations + +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterator +from pprint import pformat +import re +from types import TracebackType +from typing import Any +from typing import final +from typing import overload +from typing import TYPE_CHECKING +from typing import TypeVar + + +if TYPE_CHECKING: + from typing_extensions import Self + +import warnings + +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.outcomes import Exit +from _pytest.outcomes import fail + + +T = TypeVar("T") + + +@fixture +def recwarn() -> Generator[WarningsRecorder]: + """Return a :class:`WarningsRecorder` instance that records all warnings emitted by test functions. + + See :ref:`warnings` for information on warning categories. + """ + wrec = WarningsRecorder(_ispytest=True) + with wrec: + warnings.simplefilter("default") + yield wrec + + +@overload +def deprecated_call( + *, match: str | re.Pattern[str] | None = ... +) -> WarningsRecorder: ... + + +@overload +def deprecated_call(func: Callable[..., T], *args: Any, **kwargs: Any) -> T: ... + + +def deprecated_call( + func: Callable[..., Any] | None = None, *args: Any, **kwargs: Any +) -> WarningsRecorder | Any: + """Assert that code produces a ``DeprecationWarning`` or ``PendingDeprecationWarning`` or ``FutureWarning``. + + This function can be used as a context manager:: + + >>> import warnings + >>> def api_call_v2(): + ... warnings.warn('use v3 of this api', DeprecationWarning) + ... return 200 + + >>> import pytest + >>> with pytest.deprecated_call(): + ... assert api_call_v2() == 200 + + It can also be used by passing a function and ``*args`` and ``**kwargs``, + in which case it will ensure calling ``func(*args, **kwargs)`` produces one of + the warnings types above. The return value is the return value of the function. + + In the context manager form you may use the keyword argument ``match`` to assert + that the warning matches a text or regex. + + The context manager produces a list of :class:`warnings.WarningMessage` objects, + one for each warning raised. + """ + __tracebackhide__ = True + if func is not None: + args = (func, *args) + return warns( + (DeprecationWarning, PendingDeprecationWarning, FutureWarning), *args, **kwargs + ) + + +@overload +def warns( + expected_warning: type[Warning] | tuple[type[Warning], ...] = ..., + *, + match: str | re.Pattern[str] | None = ..., +) -> WarningsChecker: ... + + +@overload +def warns( + expected_warning: type[Warning] | tuple[type[Warning], ...], + func: Callable[..., T], + *args: Any, + **kwargs: Any, +) -> T: ... + + +def warns( + expected_warning: type[Warning] | tuple[type[Warning], ...] = Warning, + *args: Any, + match: str | re.Pattern[str] | None = None, + **kwargs: Any, +) -> WarningsChecker | Any: + r"""Assert that code raises a particular class of warning. + + Specifically, the parameter ``expected_warning`` can be a warning class or tuple + of warning classes, and the code inside the ``with`` block must issue at least one + warning of that class or classes. + + This helper produces a list of :class:`warnings.WarningMessage` objects, one for + each warning emitted (regardless of whether it is an ``expected_warning`` or not). + Since pytest 8.0, unmatched warnings are also re-emitted when the context closes. + + This function can be used as a context manager:: + + >>> import pytest + >>> with pytest.warns(RuntimeWarning): + ... warnings.warn("my warning", RuntimeWarning) + + In the context manager form you may use the keyword argument ``match`` to assert + that the warning matches a text or regex:: + + >>> with pytest.warns(UserWarning, match='must be 0 or None'): + ... warnings.warn("value must be 0 or None", UserWarning) + + >>> with pytest.warns(UserWarning, match=r'must be \d+$'): + ... warnings.warn("value must be 42", UserWarning) + + >>> with pytest.warns(UserWarning): # catch re-emitted warning + ... with pytest.warns(UserWarning, match=r'must be \d+$'): + ... warnings.warn("this is not here", UserWarning) + Traceback (most recent call last): + ... + Failed: DID NOT WARN. No warnings of type ...UserWarning... were emitted... + + **Using with** ``pytest.mark.parametrize`` + + When using :ref:`pytest.mark.parametrize ref` it is possible to parametrize tests + such that some runs raise a warning and others do not. + + This could be achieved in the same way as with exceptions, see + :ref:`parametrizing_conditional_raising` for an example. + + """ + __tracebackhide__ = True + if not args: + if kwargs: + argnames = ", ".join(sorted(kwargs)) + raise TypeError( + f"Unexpected keyword arguments passed to pytest.warns: {argnames}" + "\nUse context-manager form instead?" + ) + return WarningsChecker(expected_warning, match_expr=match, _ispytest=True) + else: + func = args[0] + if not callable(func): + raise TypeError(f"{func!r} object (type: {type(func)}) must be callable") + with WarningsChecker(expected_warning, _ispytest=True): + return func(*args[1:], **kwargs) + + +class WarningsRecorder(warnings.catch_warnings): + """A context manager to record raised warnings. + + Each recorded warning is an instance of :class:`warnings.WarningMessage`. + + Adapted from `warnings.catch_warnings`. + + .. note:: + ``DeprecationWarning`` and ``PendingDeprecationWarning`` are treated + differently; see :ref:`ensuring_function_triggers`. + + """ + + def __init__(self, *, _ispytest: bool = False) -> None: + check_ispytest(_ispytest) + super().__init__(record=True) + self._entered = False + self._list: list[warnings.WarningMessage] = [] + + @property + def list(self) -> list[warnings.WarningMessage]: + """The list of recorded warnings.""" + return self._list + + def __getitem__(self, i: int) -> warnings.WarningMessage: + """Get a recorded warning by index.""" + return self._list[i] + + def __iter__(self) -> Iterator[warnings.WarningMessage]: + """Iterate through the recorded warnings.""" + return iter(self._list) + + def __len__(self) -> int: + """The number of recorded warnings.""" + return len(self._list) + + def pop(self, cls: type[Warning] = Warning) -> warnings.WarningMessage: + """Pop the first recorded warning which is an instance of ``cls``, + but not an instance of a child class of any other match. + Raises ``AssertionError`` if there is no match. + """ + best_idx: int | None = None + for i, w in enumerate(self._list): + if w.category == cls: + return self._list.pop(i) # exact match, stop looking + if issubclass(w.category, cls) and ( + best_idx is None + or not issubclass(w.category, self._list[best_idx].category) + ): + best_idx = i + if best_idx is not None: + return self._list.pop(best_idx) + __tracebackhide__ = True + raise AssertionError(f"{cls!r} not found in warning list") + + def clear(self) -> None: + """Clear the list of recorded warnings.""" + self._list[:] = [] + + # Type ignored because we basically want the `catch_warnings` generic type + # parameter to be ourselves but that is not possible(?). + def __enter__(self) -> Self: # type: ignore[override] + if self._entered: + __tracebackhide__ = True + raise RuntimeError(f"Cannot enter {self!r} twice") + _list = super().__enter__() + # record=True means it's None. + assert _list is not None + self._list = _list + warnings.simplefilter("always") + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if not self._entered: + __tracebackhide__ = True + raise RuntimeError(f"Cannot exit {self!r} without entering first") + + super().__exit__(exc_type, exc_val, exc_tb) + + # Built-in catch_warnings does not reset entered state so we do it + # manually here for this context manager to become reusable. + self._entered = False + + +@final +class WarningsChecker(WarningsRecorder): + def __init__( + self, + expected_warning: type[Warning] | tuple[type[Warning], ...] = Warning, + match_expr: str | re.Pattern[str] | None = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + super().__init__(_ispytest=True) + + msg = "exceptions must be derived from Warning, not %s" + if isinstance(expected_warning, tuple): + for exc in expected_warning: + if not issubclass(exc, Warning): + raise TypeError(msg % type(exc)) + expected_warning_tup = expected_warning + elif isinstance(expected_warning, type) and issubclass( + expected_warning, Warning + ): + expected_warning_tup = (expected_warning,) + else: + raise TypeError(msg % type(expected_warning)) + + self.expected_warning = expected_warning_tup + self.match_expr = match_expr + + def matches(self, warning: warnings.WarningMessage) -> bool: + assert self.expected_warning is not None + return issubclass(warning.category, self.expected_warning) and bool( + self.match_expr is None or re.search(self.match_expr, str(warning.message)) + ) + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + super().__exit__(exc_type, exc_val, exc_tb) + + __tracebackhide__ = True + + # BaseExceptions like pytest.{skip,fail,xfail,exit} or Ctrl-C within + # pytest.warns should *not* trigger "DID NOT WARN" and get suppressed + # when the warning doesn't happen. Control-flow exceptions should always + # propagate. + if exc_val is not None and ( + not isinstance(exc_val, Exception) + # Exit is an Exception, not a BaseException, for some reason. + or isinstance(exc_val, Exit) + ): + return + + def found_str() -> str: + return pformat([record.message for record in self], indent=2) + + try: + if not any(issubclass(w.category, self.expected_warning) for w in self): + fail( + f"DID NOT WARN. No warnings of type {self.expected_warning} were emitted.\n" + f" Emitted warnings: {found_str()}." + ) + elif not any(self.matches(w) for w in self): + fail( + f"DID NOT WARN. No warnings of type {self.expected_warning} matching the regex were emitted.\n" + f" Regex: {self.match_expr}\n" + f" Emitted warnings: {found_str()}." + ) + finally: + # Whether or not any warnings matched, we want to re-emit all unmatched warnings. + for w in self: + if not self.matches(w): + warnings.warn_explicit( + message=w.message, + category=w.category, + filename=w.filename, + lineno=w.lineno, + module=w.__module__, + source=w.source, + ) + + # Currently in Python it is possible to pass other types than an + # `str` message when creating `Warning` instances, however this + # causes an exception when :func:`warnings.filterwarnings` is used + # to filter those warnings. See + # https://github.com/python/cpython/issues/103577 for a discussion. + # While this can be considered a bug in CPython, we put guards in + # pytest as the error message produced without this check in place + # is confusing (#10865). + for w in self: + if type(w.message) is not UserWarning: + # If the warning was of an incorrect type then `warnings.warn()` + # creates a UserWarning. Any other warning must have been specified + # explicitly. + continue + if not w.message.args: + # UserWarning() without arguments must have been specified explicitly. + continue + msg = w.message.args[0] + if isinstance(msg, str): + continue + # It's possible that UserWarning was explicitly specified, and + # its first argument was not a string. But that case can't be + # distinguished from an invalid type. + raise TypeError( + f"Warning must be str or Warning, got {msg!r} (type {type(msg).__name__})" + ) diff --git a/.venv/lib/python3.12/site-packages/_pytest/reports.py b/.venv/lib/python3.12/site-packages/_pytest/reports.py new file mode 100644 index 0000000..011a69d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/reports.py @@ -0,0 +1,694 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Mapping +from collections.abc import Sequence +import dataclasses +from io import StringIO +import os +from pprint import pprint +import sys +from typing import Any +from typing import cast +from typing import final +from typing import Literal +from typing import NoReturn +from typing import TYPE_CHECKING + +from _pytest._code.code import ExceptionChainRepr +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import ExceptionRepr +from _pytest._code.code import ReprEntry +from _pytest._code.code import ReprEntryNative +from _pytest._code.code import ReprExceptionInfo +from _pytest._code.code import ReprFileLocation +from _pytest._code.code import ReprFuncArgs +from _pytest._code.code import ReprLocals +from _pytest._code.code import ReprTraceback +from _pytest._code.code import TerminalRepr +from _pytest._io import TerminalWriter +from _pytest.config import Config +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.outcomes import fail +from _pytest.outcomes import skip + + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + + +if TYPE_CHECKING: + from typing_extensions import Self + + from _pytest.runner import CallInfo + + +def getworkerinfoline(node): + try: + return node._workerinfocache + except AttributeError: + d = node.workerinfo + ver = "{}.{}.{}".format(*d["version_info"][:3]) + node._workerinfocache = s = "[{}] {} -- Python {} {}".format( + d["id"], d["sysplatform"], ver, d["executable"] + ) + return s + + +class BaseReport: + when: str | None + location: tuple[str, int | None, str] | None + longrepr: ( + None | ExceptionInfo[BaseException] | tuple[str, int, str] | str | TerminalRepr + ) + sections: list[tuple[str, str]] + nodeid: str + outcome: Literal["passed", "failed", "skipped"] + + def __init__(self, **kw: Any) -> None: + self.__dict__.update(kw) + + if TYPE_CHECKING: + # Can have arbitrary fields given to __init__(). + def __getattr__(self, key: str) -> Any: ... + + def toterminal(self, out: TerminalWriter) -> None: + if hasattr(self, "node"): + worker_info = getworkerinfoline(self.node) + if worker_info: + out.line(worker_info) + + longrepr = self.longrepr + if longrepr is None: + return + + if hasattr(longrepr, "toterminal"): + longrepr_terminal = cast(TerminalRepr, longrepr) + longrepr_terminal.toterminal(out) + else: + try: + s = str(longrepr) + except UnicodeEncodeError: + s = "" + out.line(s) + + def get_sections(self, prefix: str) -> Iterator[tuple[str, str]]: + for name, content in self.sections: + if name.startswith(prefix): + yield prefix, content + + @property + def longreprtext(self) -> str: + """Read-only property that returns the full string representation of + ``longrepr``. + + .. versionadded:: 3.0 + """ + file = StringIO() + tw = TerminalWriter(file) + tw.hasmarkup = False + self.toterminal(tw) + exc = file.getvalue() + return exc.strip() + + @property + def caplog(self) -> str: + """Return captured log lines, if log capturing is enabled. + + .. versionadded:: 3.5 + """ + return "\n".join( + content for (prefix, content) in self.get_sections("Captured log") + ) + + @property + def capstdout(self) -> str: + """Return captured text from stdout, if capturing is enabled. + + .. versionadded:: 3.0 + """ + return "".join( + content for (prefix, content) in self.get_sections("Captured stdout") + ) + + @property + def capstderr(self) -> str: + """Return captured text from stderr, if capturing is enabled. + + .. versionadded:: 3.0 + """ + return "".join( + content for (prefix, content) in self.get_sections("Captured stderr") + ) + + @property + def passed(self) -> bool: + """Whether the outcome is passed.""" + return self.outcome == "passed" + + @property + def failed(self) -> bool: + """Whether the outcome is failed.""" + return self.outcome == "failed" + + @property + def skipped(self) -> bool: + """Whether the outcome is skipped.""" + return self.outcome == "skipped" + + @property + def fspath(self) -> str: + """The path portion of the reported node, as a string.""" + return self.nodeid.split("::")[0] + + @property + def count_towards_summary(self) -> bool: + """**Experimental** Whether this report should be counted towards the + totals shown at the end of the test session: "1 passed, 1 failure, etc". + + .. note:: + + This function is considered **experimental**, so beware that it is subject to changes + even in patch releases. + """ + return True + + @property + def head_line(self) -> str | None: + """**Experimental** The head line shown with longrepr output for this + report, more commonly during traceback representation during + failures:: + + ________ Test.foo ________ + + + In the example above, the head_line is "Test.foo". + + .. note:: + + This function is considered **experimental**, so beware that it is subject to changes + even in patch releases. + """ + if self.location is not None: + _fspath, _lineno, domain = self.location + return domain + return None + + def _get_verbose_word_with_markup( + self, config: Config, default_markup: Mapping[str, bool] + ) -> tuple[str, Mapping[str, bool]]: + _category, _short, verbose = config.hook.pytest_report_teststatus( + report=self, config=config + ) + + if isinstance(verbose, str): + return verbose, default_markup + + if isinstance(verbose, Sequence) and len(verbose) == 2: + word, markup = verbose + if isinstance(word, str) and isinstance(markup, Mapping): + return word, markup + + fail( # pragma: no cover + "pytest_report_teststatus() hook (from a plugin) returned " + f"an invalid verbose value: {verbose!r}.\nExpected either a string " + "or a tuple of (word, markup)." + ) + + def _to_json(self) -> dict[str, Any]: + """Return the contents of this report as a dict of builtin entries, + suitable for serialization. + + This was originally the serialize_report() function from xdist (ca03269). + + Experimental method. + """ + return _report_to_json(self) + + @classmethod + def _from_json(cls, reportdict: dict[str, object]) -> Self: + """Create either a TestReport or CollectReport, depending on the calling class. + + It is the callers responsibility to know which class to pass here. + + This was originally the serialize_report() function from xdist (ca03269). + + Experimental method. + """ + kwargs = _report_kwargs_from_json(reportdict) + return cls(**kwargs) + + +def _report_unserialization_failure( + type_name: str, report_class: type[BaseReport], reportdict +) -> NoReturn: + url = "https://github.com/pytest-dev/pytest/issues" + stream = StringIO() + pprint("-" * 100, stream=stream) + pprint(f"INTERNALERROR: Unknown entry type returned: {type_name}", stream=stream) + pprint(f"report_name: {report_class}", stream=stream) + pprint(reportdict, stream=stream) + pprint(f"Please report this bug at {url}", stream=stream) + pprint("-" * 100, stream=stream) + raise RuntimeError(stream.getvalue()) + + +def _format_failed_longrepr( + item: Item, call: CallInfo[None], excinfo: ExceptionInfo[BaseException] +): + if call.when == "call": + longrepr = item.repr_failure(excinfo) + else: + # Exception in setup or teardown. + longrepr = item._repr_failure_py( + excinfo, style=item.config.getoption("tbstyle", "auto") + ) + return longrepr + + +def _format_exception_group_all_skipped_longrepr( + item: Item, + excinfo: ExceptionInfo[BaseExceptionGroup[BaseException | BaseExceptionGroup]], +) -> tuple[str, int, str]: + r = excinfo._getreprcrash() + assert r is not None, ( + "There should always be a traceback entry for skipping a test." + ) + if all( + getattr(skip, "_use_item_location", False) for skip in excinfo.value.exceptions + ): + path, line = item.reportinfo()[:2] + assert line is not None + loc = (os.fspath(path), line + 1) + default_msg = "skipped" + else: + loc = (str(r.path), r.lineno) + default_msg = r.message + + # Get all unique skip messages. + msgs: list[str] = [] + for exception in excinfo.value.exceptions: + m = getattr(exception, "msg", None) or ( + exception.args[0] if exception.args else None + ) + if m and m not in msgs: + msgs.append(m) + + reason = "; ".join(msgs) if msgs else default_msg + longrepr = (*loc, reason) + return longrepr + + +class TestReport(BaseReport): + """Basic test report object (also used for setup and teardown calls if + they fail). + + Reports can contain arbitrary extra attributes. + """ + + __test__ = False + + # Defined by skipping plugin. + # xfail reason if xfailed, otherwise not defined. Use hasattr to distinguish. + wasxfail: str + + def __init__( + self, + nodeid: str, + location: tuple[str, int | None, str], + keywords: Mapping[str, Any], + outcome: Literal["passed", "failed", "skipped"], + longrepr: None + | ExceptionInfo[BaseException] + | tuple[str, int, str] + | str + | TerminalRepr, + when: Literal["setup", "call", "teardown"], + sections: Iterable[tuple[str, str]] = (), + duration: float = 0, + start: float = 0, + stop: float = 0, + user_properties: Iterable[tuple[str, object]] | None = None, + **extra, + ) -> None: + #: Normalized collection nodeid. + self.nodeid = nodeid + + #: A (filesystempath, lineno, domaininfo) tuple indicating the + #: actual location of a test item - it might be different from the + #: collected one e.g. if a method is inherited from a different module. + #: The filesystempath may be relative to ``config.rootdir``. + #: The line number is 0-based. + self.location: tuple[str, int | None, str] = location + + #: A name -> value dictionary containing all keywords and + #: markers associated with a test invocation. + self.keywords: Mapping[str, Any] = keywords + + #: Test outcome, always one of "passed", "failed", "skipped". + self.outcome = outcome + + #: None or a failure representation. + self.longrepr = longrepr + + #: One of 'setup', 'call', 'teardown' to indicate runtest phase. + self.when: Literal["setup", "call", "teardown"] = when + + #: User properties is a list of tuples (name, value) that holds user + #: defined properties of the test. + self.user_properties = list(user_properties or []) + + #: Tuples of str ``(heading, content)`` with extra information + #: for the test report. Used by pytest to add text captured + #: from ``stdout``, ``stderr``, and intercepted logging events. May + #: be used by other plugins to add arbitrary information to reports. + self.sections = list(sections) + + #: Time it took to run just the test. + self.duration: float = duration + + #: The system time when the call started, in seconds since the epoch. + self.start: float = start + #: The system time when the call ended, in seconds since the epoch. + self.stop: float = stop + + self.__dict__.update(extra) + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.nodeid!r} when={self.when!r} outcome={self.outcome!r}>" + + @classmethod + def from_item_and_call(cls, item: Item, call: CallInfo[None]) -> TestReport: + """Create and fill a TestReport with standard item and call info. + + :param item: The item. + :param call: The call info. + """ + when = call.when + # Remove "collect" from the Literal type -- only for collection calls. + assert when != "collect" + duration = call.duration + start = call.start + stop = call.stop + keywords = {x: 1 for x in item.keywords} + excinfo = call.excinfo + sections = [] + if not call.excinfo: + outcome: Literal["passed", "failed", "skipped"] = "passed" + longrepr: ( + None + | ExceptionInfo[BaseException] + | tuple[str, int, str] + | str + | TerminalRepr + ) = None + else: + if not isinstance(excinfo, ExceptionInfo): + outcome = "failed" + longrepr = excinfo + elif isinstance(excinfo.value, skip.Exception): + outcome = "skipped" + r = excinfo._getreprcrash() + assert r is not None, ( + "There should always be a traceback entry for skipping a test." + ) + if excinfo.value._use_item_location: + path, line = item.reportinfo()[:2] + assert line is not None + longrepr = (os.fspath(path), line + 1, r.message) + else: + longrepr = (str(r.path), r.lineno, r.message) + elif isinstance(excinfo.value, BaseExceptionGroup) and ( + excinfo.value.split(skip.Exception)[1] is None + ): + # All exceptions in the group are skip exceptions. + outcome = "skipped" + excinfo = cast( + ExceptionInfo[ + BaseExceptionGroup[BaseException | BaseExceptionGroup] + ], + excinfo, + ) + longrepr = _format_exception_group_all_skipped_longrepr(item, excinfo) + else: + outcome = "failed" + longrepr = _format_failed_longrepr(item, call, excinfo) + for rwhen, key, content in item._report_sections: + sections.append((f"Captured {key} {rwhen}", content)) + return cls( + item.nodeid, + item.location, + keywords, + outcome, + longrepr, + when, + sections, + duration, + start, + stop, + user_properties=item.user_properties, + ) + + +@final +class CollectReport(BaseReport): + """Collection report object. + + Reports can contain arbitrary extra attributes. + """ + + when = "collect" + + def __init__( + self, + nodeid: str, + outcome: Literal["passed", "failed", "skipped"], + longrepr: None + | ExceptionInfo[BaseException] + | tuple[str, int, str] + | str + | TerminalRepr, + result: list[Item | Collector] | None, + sections: Iterable[tuple[str, str]] = (), + **extra, + ) -> None: + #: Normalized collection nodeid. + self.nodeid = nodeid + + #: Test outcome, always one of "passed", "failed", "skipped". + self.outcome = outcome + + #: None or a failure representation. + self.longrepr = longrepr + + #: The collected items and collection nodes. + self.result = result or [] + + #: Tuples of str ``(heading, content)`` with extra information + #: for the test report. Used by pytest to add text captured + #: from ``stdout``, ``stderr``, and intercepted logging events. May + #: be used by other plugins to add arbitrary information to reports. + self.sections = list(sections) + + self.__dict__.update(extra) + + @property + def location( # type:ignore[override] + self, + ) -> tuple[str, int | None, str] | None: + return (self.fspath, None, self.fspath) + + def __repr__(self) -> str: + return f"" + + +class CollectErrorRepr(TerminalRepr): + def __init__(self, msg: str) -> None: + self.longrepr = msg + + def toterminal(self, out: TerminalWriter) -> None: + out.line(self.longrepr, red=True) + + +def pytest_report_to_serializable( + report: CollectReport | TestReport, +) -> dict[str, Any] | None: + if isinstance(report, TestReport | CollectReport): + data = report._to_json() + data["$report_type"] = report.__class__.__name__ + return data + # TODO: Check if this is actually reachable. + return None # type: ignore[unreachable] + + +def pytest_report_from_serializable( + data: dict[str, Any], +) -> CollectReport | TestReport | None: + if "$report_type" in data: + if data["$report_type"] == "TestReport": + return TestReport._from_json(data) + elif data["$report_type"] == "CollectReport": + return CollectReport._from_json(data) + assert False, "Unknown report_type unserialize data: {}".format( + data["$report_type"] + ) + return None + + +def _report_to_json(report: BaseReport) -> dict[str, Any]: + """Return the contents of this report as a dict of builtin entries, + suitable for serialization. + + This was originally the serialize_report() function from xdist (ca03269). + """ + + def serialize_repr_entry( + entry: ReprEntry | ReprEntryNative, + ) -> dict[str, Any]: + data = dataclasses.asdict(entry) + for key, value in data.items(): + if hasattr(value, "__dict__"): + data[key] = dataclasses.asdict(value) + entry_data = {"type": type(entry).__name__, "data": data} + return entry_data + + def serialize_repr_traceback(reprtraceback: ReprTraceback) -> dict[str, Any]: + result = dataclasses.asdict(reprtraceback) + result["reprentries"] = [ + serialize_repr_entry(x) for x in reprtraceback.reprentries + ] + return result + + def serialize_repr_crash( + reprcrash: ReprFileLocation | None, + ) -> dict[str, Any] | None: + if reprcrash is not None: + return dataclasses.asdict(reprcrash) + else: + return None + + def serialize_exception_longrepr(rep: BaseReport) -> dict[str, Any]: + assert rep.longrepr is not None + # TODO: Investigate whether the duck typing is really necessary here. + longrepr = cast(ExceptionRepr, rep.longrepr) + result: dict[str, Any] = { + "reprcrash": serialize_repr_crash(longrepr.reprcrash), + "reprtraceback": serialize_repr_traceback(longrepr.reprtraceback), + "sections": longrepr.sections, + } + if isinstance(longrepr, ExceptionChainRepr): + result["chain"] = [] + for repr_traceback, repr_crash, description in longrepr.chain: + result["chain"].append( + ( + serialize_repr_traceback(repr_traceback), + serialize_repr_crash(repr_crash), + description, + ) + ) + else: + result["chain"] = None + return result + + d = report.__dict__.copy() + if hasattr(report.longrepr, "toterminal"): + if hasattr(report.longrepr, "reprtraceback") and hasattr( + report.longrepr, "reprcrash" + ): + d["longrepr"] = serialize_exception_longrepr(report) + else: + d["longrepr"] = str(report.longrepr) + else: + d["longrepr"] = report.longrepr + for name in d: + if isinstance(d[name], os.PathLike): + d[name] = os.fspath(d[name]) + elif name == "result": + d[name] = None # for now + return d + + +def _report_kwargs_from_json(reportdict: dict[str, Any]) -> dict[str, Any]: + """Return **kwargs that can be used to construct a TestReport or + CollectReport instance. + + This was originally the serialize_report() function from xdist (ca03269). + """ + + def deserialize_repr_entry(entry_data): + data = entry_data["data"] + entry_type = entry_data["type"] + if entry_type == "ReprEntry": + reprfuncargs = None + reprfileloc = None + reprlocals = None + if data["reprfuncargs"]: + reprfuncargs = ReprFuncArgs(**data["reprfuncargs"]) + if data["reprfileloc"]: + reprfileloc = ReprFileLocation(**data["reprfileloc"]) + if data["reprlocals"]: + reprlocals = ReprLocals(data["reprlocals"]["lines"]) + + reprentry: ReprEntry | ReprEntryNative = ReprEntry( + lines=data["lines"], + reprfuncargs=reprfuncargs, + reprlocals=reprlocals, + reprfileloc=reprfileloc, + style=data["style"], + ) + elif entry_type == "ReprEntryNative": + reprentry = ReprEntryNative(data["lines"]) + else: + _report_unserialization_failure(entry_type, TestReport, reportdict) + return reprentry + + def deserialize_repr_traceback(repr_traceback_dict): + repr_traceback_dict["reprentries"] = [ + deserialize_repr_entry(x) for x in repr_traceback_dict["reprentries"] + ] + return ReprTraceback(**repr_traceback_dict) + + def deserialize_repr_crash(repr_crash_dict: dict[str, Any] | None): + if repr_crash_dict is not None: + return ReprFileLocation(**repr_crash_dict) + else: + return None + + if ( + reportdict["longrepr"] + and "reprcrash" in reportdict["longrepr"] + and "reprtraceback" in reportdict["longrepr"] + ): + reprtraceback = deserialize_repr_traceback( + reportdict["longrepr"]["reprtraceback"] + ) + reprcrash = deserialize_repr_crash(reportdict["longrepr"]["reprcrash"]) + if reportdict["longrepr"]["chain"]: + chain = [] + for repr_traceback_data, repr_crash_data, description in reportdict[ + "longrepr" + ]["chain"]: + chain.append( + ( + deserialize_repr_traceback(repr_traceback_data), + deserialize_repr_crash(repr_crash_data), + description, + ) + ) + exception_info: ExceptionChainRepr | ReprExceptionInfo = ExceptionChainRepr( + chain + ) + else: + exception_info = ReprExceptionInfo( + reprtraceback=reprtraceback, + reprcrash=reprcrash, + ) + + for section in reportdict["longrepr"]["sections"]: + exception_info.addsection(*section) + reportdict["longrepr"] = exception_info + + return reportdict diff --git a/.venv/lib/python3.12/site-packages/_pytest/runner.py b/.venv/lib/python3.12/site-packages/_pytest/runner.py new file mode 100644 index 0000000..9c20ff9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/runner.py @@ -0,0 +1,580 @@ +# mypy: allow-untyped-defs +"""Basic collect and runtest protocol implementations.""" + +from __future__ import annotations + +import bdb +from collections.abc import Callable +import dataclasses +import os +import sys +import types +from typing import cast +from typing import final +from typing import Generic +from typing import Literal +from typing import TYPE_CHECKING +from typing import TypeVar + +from .config import Config +from .reports import BaseReport +from .reports import CollectErrorRepr +from .reports import CollectReport +from .reports import TestReport +from _pytest import timing +from _pytest._code.code import ExceptionChainRepr +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import TerminalRepr +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.nodes import Collector +from _pytest.nodes import Directory +from _pytest.nodes import Item +from _pytest.nodes import Node +from _pytest.outcomes import Exit +from _pytest.outcomes import OutcomeException +from _pytest.outcomes import Skipped +from _pytest.outcomes import TEST_OUTCOME + + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + +if TYPE_CHECKING: + from _pytest.main import Session + from _pytest.terminal import TerminalReporter + +# +# pytest plugin hooks. + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("terminal reporting", "Reporting", after="general") + group.addoption( + "--durations", + action="store", + type=int, + default=None, + metavar="N", + help="Show N slowest setup/test durations (N=0 for all)", + ) + group.addoption( + "--durations-min", + action="store", + type=float, + default=None, + metavar="N", + help="Minimal duration in seconds for inclusion in slowest list. " + "Default: 0.005 (or 0.0 if -vv is given).", + ) + + +def pytest_terminal_summary(terminalreporter: TerminalReporter) -> None: + durations = terminalreporter.config.option.durations + durations_min = terminalreporter.config.option.durations_min + verbose = terminalreporter.config.get_verbosity() + if durations is None: + return + if durations_min is None: + durations_min = 0.005 if verbose < 2 else 0.0 + tr = terminalreporter + dlist = [] + for replist in tr.stats.values(): + for rep in replist: + if hasattr(rep, "duration"): + dlist.append(rep) + if not dlist: + return + dlist.sort(key=lambda x: x.duration, reverse=True) + if not durations: + tr.write_sep("=", "slowest durations") + else: + tr.write_sep("=", f"slowest {durations} durations") + dlist = dlist[:durations] + + for i, rep in enumerate(dlist): + if rep.duration < durations_min: + tr.write_line("") + message = f"({len(dlist) - i} durations < {durations_min:g}s hidden." + if terminalreporter.config.option.durations_min is None: + message += " Use -vv to show these durations." + message += ")" + tr.write_line(message) + break + tr.write_line(f"{rep.duration:02.2f}s {rep.when:<8} {rep.nodeid}") + + +def pytest_sessionstart(session: Session) -> None: + session._setupstate = SetupState() + + +def pytest_sessionfinish(session: Session) -> None: + session._setupstate.teardown_exact(None) + + +def pytest_runtest_protocol(item: Item, nextitem: Item | None) -> bool: + ihook = item.ihook + ihook.pytest_runtest_logstart(nodeid=item.nodeid, location=item.location) + runtestprotocol(item, nextitem=nextitem) + ihook.pytest_runtest_logfinish(nodeid=item.nodeid, location=item.location) + return True + + +def runtestprotocol( + item: Item, log: bool = True, nextitem: Item | None = None +) -> list[TestReport]: + hasrequest = hasattr(item, "_request") + if hasrequest and not item._request: # type: ignore[attr-defined] + # This only happens if the item is re-run, as is done by + # pytest-rerunfailures. + item._initrequest() # type: ignore[attr-defined] + rep = call_and_report(item, "setup", log) + reports = [rep] + if rep.passed: + if item.config.getoption("setupshow", False): + show_test_item(item) + if not item.config.getoption("setuponly", False): + reports.append(call_and_report(item, "call", log)) + # If the session is about to fail or stop, teardown everything - this is + # necessary to correctly report fixture teardown errors (see #11706) + if item.session.shouldfail or item.session.shouldstop: + nextitem = None + reports.append(call_and_report(item, "teardown", log, nextitem=nextitem)) + # After all teardown hooks have been called + # want funcargs and request info to go away. + if hasrequest: + item._request = False # type: ignore[attr-defined] + item.funcargs = None # type: ignore[attr-defined] + return reports + + +def show_test_item(item: Item) -> None: + """Show test function, parameters and the fixtures of the test item.""" + tw = item.config.get_terminal_writer() + tw.line() + tw.write(" " * 8) + tw.write(item.nodeid) + used_fixtures = sorted(getattr(item, "fixturenames", [])) + if used_fixtures: + tw.write(" (fixtures used: {})".format(", ".join(used_fixtures))) + tw.flush() + + +def pytest_runtest_setup(item: Item) -> None: + _update_current_test_var(item, "setup") + item.session._setupstate.setup(item) + + +def pytest_runtest_call(item: Item) -> None: + _update_current_test_var(item, "call") + try: + del sys.last_type + del sys.last_value + del sys.last_traceback + if sys.version_info >= (3, 12, 0): + del sys.last_exc # type:ignore[attr-defined] + except AttributeError: + pass + try: + item.runtest() + except Exception as e: + # Store trace info to allow postmortem debugging + sys.last_type = type(e) + sys.last_value = e + if sys.version_info >= (3, 12, 0): + sys.last_exc = e # type:ignore[attr-defined] + assert e.__traceback__ is not None + # Skip *this* frame + sys.last_traceback = e.__traceback__.tb_next + raise + + +def pytest_runtest_teardown(item: Item, nextitem: Item | None) -> None: + _update_current_test_var(item, "teardown") + item.session._setupstate.teardown_exact(nextitem) + _update_current_test_var(item, None) + + +def _update_current_test_var( + item: Item, when: Literal["setup", "call", "teardown"] | None +) -> None: + """Update :envvar:`PYTEST_CURRENT_TEST` to reflect the current item and stage. + + If ``when`` is None, delete ``PYTEST_CURRENT_TEST`` from the environment. + """ + var_name = "PYTEST_CURRENT_TEST" + if when: + value = f"{item.nodeid} ({when})" + # don't allow null bytes on environment variables (see #2644, #2957) + value = value.replace("\x00", "(null)") + os.environ[var_name] = value + else: + os.environ.pop(var_name) + + +def pytest_report_teststatus(report: BaseReport) -> tuple[str, str, str] | None: + if report.when in ("setup", "teardown"): + if report.failed: + # category, shortletter, verbose-word + return "error", "E", "ERROR" + elif report.skipped: + return "skipped", "s", "SKIPPED" + else: + return "", "", "" + return None + + +# +# Implementation + + +def call_and_report( + item: Item, when: Literal["setup", "call", "teardown"], log: bool = True, **kwds +) -> TestReport: + ihook = item.ihook + if when == "setup": + runtest_hook: Callable[..., None] = ihook.pytest_runtest_setup + elif when == "call": + runtest_hook = ihook.pytest_runtest_call + elif when == "teardown": + runtest_hook = ihook.pytest_runtest_teardown + else: + assert False, f"Unhandled runtest hook case: {when}" + + call = CallInfo.from_call( + lambda: runtest_hook(item=item, **kwds), + when=when, + reraise=get_reraise_exceptions(item.config), + ) + report: TestReport = ihook.pytest_runtest_makereport(item=item, call=call) + if log: + ihook.pytest_runtest_logreport(report=report) + if check_interactive_exception(call, report): + ihook.pytest_exception_interact(node=item, call=call, report=report) + return report + + +def get_reraise_exceptions(config: Config) -> tuple[type[BaseException], ...]: + """Return exception types that should not be suppressed in general.""" + reraise: tuple[type[BaseException], ...] = (Exit,) + if not config.getoption("usepdb", False): + reraise += (KeyboardInterrupt,) + return reraise + + +def check_interactive_exception(call: CallInfo[object], report: BaseReport) -> bool: + """Check whether the call raised an exception that should be reported as + interactive.""" + if call.excinfo is None: + # Didn't raise. + return False + if hasattr(report, "wasxfail"): + # Exception was expected. + return False + if isinstance(call.excinfo.value, Skipped | bdb.BdbQuit): + # Special control flow exception. + return False + return True + + +TResult = TypeVar("TResult", covariant=True) + + +@final +@dataclasses.dataclass +class CallInfo(Generic[TResult]): + """Result/Exception info of a function invocation.""" + + _result: TResult | None + #: The captured exception of the call, if it raised. + excinfo: ExceptionInfo[BaseException] | None + #: The system time when the call started, in seconds since the epoch. + start: float + #: The system time when the call ended, in seconds since the epoch. + stop: float + #: The call duration, in seconds. + duration: float + #: The context of invocation: "collect", "setup", "call" or "teardown". + when: Literal["collect", "setup", "call", "teardown"] + + def __init__( + self, + result: TResult | None, + excinfo: ExceptionInfo[BaseException] | None, + start: float, + stop: float, + duration: float, + when: Literal["collect", "setup", "call", "teardown"], + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._result = result + self.excinfo = excinfo + self.start = start + self.stop = stop + self.duration = duration + self.when = when + + @property + def result(self) -> TResult: + """The return value of the call, if it didn't raise. + + Can only be accessed if excinfo is None. + """ + if self.excinfo is not None: + raise AttributeError(f"{self!r} has no valid result") + # The cast is safe because an exception wasn't raised, hence + # _result has the expected function return type (which may be + # None, that's why a cast and not an assert). + return cast(TResult, self._result) + + @classmethod + def from_call( + cls, + func: Callable[[], TResult], + when: Literal["collect", "setup", "call", "teardown"], + reraise: type[BaseException] | tuple[type[BaseException], ...] | None = None, + ) -> CallInfo[TResult]: + """Call func, wrapping the result in a CallInfo. + + :param func: + The function to call. Called without arguments. + :type func: Callable[[], _pytest.runner.TResult] + :param when: + The phase in which the function is called. + :param reraise: + Exception or exceptions that shall propagate if raised by the + function, instead of being wrapped in the CallInfo. + """ + excinfo = None + instant = timing.Instant() + try: + result: TResult | None = func() + except BaseException: + excinfo = ExceptionInfo.from_current() + if reraise is not None and isinstance(excinfo.value, reraise): + raise + result = None + duration = instant.elapsed() + return cls( + start=duration.start.time, + stop=duration.stop.time, + duration=duration.seconds, + when=when, + result=result, + excinfo=excinfo, + _ispytest=True, + ) + + def __repr__(self) -> str: + if self.excinfo is None: + return f"" + return f"" + + +def pytest_runtest_makereport(item: Item, call: CallInfo[None]) -> TestReport: + return TestReport.from_item_and_call(item, call) + + +def pytest_make_collect_report(collector: Collector) -> CollectReport: + def collect() -> list[Item | Collector]: + # Before collecting, if this is a Directory, load the conftests. + # If a conftest import fails to load, it is considered a collection + # error of the Directory collector. This is why it's done inside of the + # CallInfo wrapper. + # + # Note: initial conftests are loaded early, not here. + if isinstance(collector, Directory): + collector.config.pluginmanager._loadconftestmodules( + collector.path, + collector.config.getoption("importmode"), + rootpath=collector.config.rootpath, + consider_namespace_packages=collector.config.getini( + "consider_namespace_packages" + ), + ) + + return list(collector.collect()) + + call = CallInfo.from_call( + collect, "collect", reraise=(KeyboardInterrupt, SystemExit) + ) + longrepr: None | tuple[str, int, str] | str | TerminalRepr = None + if not call.excinfo: + outcome: Literal["passed", "skipped", "failed"] = "passed" + else: + skip_exceptions = [Skipped] + unittest = sys.modules.get("unittest") + if unittest is not None: + skip_exceptions.append(unittest.SkipTest) + if isinstance(call.excinfo.value, tuple(skip_exceptions)): + outcome = "skipped" + r_ = collector._repr_failure_py(call.excinfo, "line") + assert isinstance(r_, ExceptionChainRepr), repr(r_) + r = r_.reprcrash + assert r + longrepr = (str(r.path), r.lineno, r.message) + else: + outcome = "failed" + errorinfo = collector.repr_failure(call.excinfo) + if not hasattr(errorinfo, "toterminal"): + assert isinstance(errorinfo, str) + errorinfo = CollectErrorRepr(errorinfo) + longrepr = errorinfo + result = call.result if not call.excinfo else None + rep = CollectReport(collector.nodeid, outcome, longrepr, result) + rep.call = call # type: ignore # see collect_one_node + return rep + + +class SetupState: + """Shared state for setting up/tearing down test items or collectors + in a session. + + Suppose we have a collection tree as follows: + + + + + + + + The SetupState maintains a stack. The stack starts out empty: + + [] + + During the setup phase of item1, setup(item1) is called. What it does + is: + + push session to stack, run session.setup() + push mod1 to stack, run mod1.setup() + push item1 to stack, run item1.setup() + + The stack is: + + [session, mod1, item1] + + While the stack is in this shape, it is allowed to add finalizers to + each of session, mod1, item1 using addfinalizer(). + + During the teardown phase of item1, teardown_exact(item2) is called, + where item2 is the next item to item1. What it does is: + + pop item1 from stack, run its teardowns + pop mod1 from stack, run its teardowns + + mod1 was popped because it ended its purpose with item1. The stack is: + + [session] + + During the setup phase of item2, setup(item2) is called. What it does + is: + + push mod2 to stack, run mod2.setup() + push item2 to stack, run item2.setup() + + Stack: + + [session, mod2, item2] + + During the teardown phase of item2, teardown_exact(None) is called, + because item2 is the last item. What it does is: + + pop item2 from stack, run its teardowns + pop mod2 from stack, run its teardowns + pop session from stack, run its teardowns + + Stack: + + [] + + The end! + """ + + def __init__(self) -> None: + # The stack is in the dict insertion order. + self.stack: dict[ + Node, + tuple[ + # Node's finalizers. + list[Callable[[], object]], + # Node's exception and original traceback, if its setup raised. + tuple[OutcomeException | Exception, types.TracebackType | None] | None, + ], + ] = {} + + def setup(self, item: Item) -> None: + """Setup objects along the collector chain to the item.""" + needed_collectors = item.listchain() + + # If a collector fails its setup, fail its entire subtree of items. + # The setup is not retried for each item - the same exception is used. + for col, (finalizers, exc) in self.stack.items(): + assert col in needed_collectors, "previous item was not torn down properly" + if exc: + raise exc[0].with_traceback(exc[1]) + + for col in needed_collectors[len(self.stack) :]: + assert col not in self.stack + # Push onto the stack. + self.stack[col] = ([col.teardown], None) + try: + col.setup() + except TEST_OUTCOME as exc: + self.stack[col] = (self.stack[col][0], (exc, exc.__traceback__)) + raise + + def addfinalizer(self, finalizer: Callable[[], object], node: Node) -> None: + """Attach a finalizer to the given node. + + The node must be currently active in the stack. + """ + assert node and not isinstance(node, tuple) + assert callable(finalizer) + assert node in self.stack, (node, self.stack) + self.stack[node][0].append(finalizer) + + def teardown_exact(self, nextitem: Item | None) -> None: + """Teardown the current stack up until reaching nodes that nextitem + also descends from. + + When nextitem is None (meaning we're at the last item), the entire + stack is torn down. + """ + needed_collectors = (nextitem and nextitem.listchain()) or [] + exceptions: list[BaseException] = [] + while self.stack: + if list(self.stack.keys()) == needed_collectors[: len(self.stack)]: + break + node, (finalizers, _) = self.stack.popitem() + these_exceptions = [] + while finalizers: + fin = finalizers.pop() + try: + fin() + except TEST_OUTCOME as e: + these_exceptions.append(e) + + if len(these_exceptions) == 1: + exceptions.extend(these_exceptions) + elif these_exceptions: + msg = f"errors while tearing down {node!r}" + exceptions.append(BaseExceptionGroup(msg, these_exceptions[::-1])) + + if len(exceptions) == 1: + raise exceptions[0] + elif exceptions: + raise BaseExceptionGroup("errors during test teardown", exceptions[::-1]) + if nextitem is None: + assert not self.stack + + +def collect_one_node(collector: Collector) -> CollectReport: + ihook = collector.ihook + ihook.pytest_collectstart(collector=collector) + rep: CollectReport = ihook.pytest_make_collect_report(collector=collector) + call = rep.__dict__.pop("call", None) + if call and check_interactive_exception(call, rep): + ihook.pytest_exception_interact(node=collector, call=call, report=rep) + return rep diff --git a/.venv/lib/python3.12/site-packages/_pytest/scope.py b/.venv/lib/python3.12/site-packages/_pytest/scope.py new file mode 100644 index 0000000..2b007e8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/scope.py @@ -0,0 +1,91 @@ +""" +Scope definition and related utilities. + +Those are defined here, instead of in the 'fixtures' module because +their use is spread across many other pytest modules, and centralizing it in 'fixtures' +would cause circular references. + +Also this makes the module light to import, as it should. +""" + +from __future__ import annotations + +from enum import Enum +from functools import total_ordering +from typing import Literal + + +_ScopeName = Literal["session", "package", "module", "class", "function"] + + +@total_ordering +class Scope(Enum): + """ + Represents one of the possible fixture scopes in pytest. + + Scopes are ordered from lower to higher, that is: + + ->>> higher ->>> + + Function < Class < Module < Package < Session + + <<<- lower <<<- + """ + + # Scopes need to be listed from lower to higher. + Function = "function" + Class = "class" + Module = "module" + Package = "package" + Session = "session" + + def next_lower(self) -> Scope: + """Return the next lower scope.""" + index = _SCOPE_INDICES[self] + if index == 0: + raise ValueError(f"{self} is the lower-most scope") + return _ALL_SCOPES[index - 1] + + def next_higher(self) -> Scope: + """Return the next higher scope.""" + index = _SCOPE_INDICES[self] + if index == len(_SCOPE_INDICES) - 1: + raise ValueError(f"{self} is the upper-most scope") + return _ALL_SCOPES[index + 1] + + def __lt__(self, other: Scope) -> bool: + self_index = _SCOPE_INDICES[self] + other_index = _SCOPE_INDICES[other] + return self_index < other_index + + @classmethod + def from_user( + cls, scope_name: _ScopeName, descr: str, where: str | None = None + ) -> Scope: + """ + Given a scope name from the user, return the equivalent Scope enum. Should be used + whenever we want to convert a user provided scope name to its enum object. + + If the scope name is invalid, construct a user friendly message and call pytest.fail. + """ + from _pytest.outcomes import fail + + try: + # Holding this reference is necessary for mypy at the moment. + scope = Scope(scope_name) + except ValueError: + fail( + "{} {}got an unexpected scope value '{}'".format( + descr, f"from {where} " if where else "", scope_name + ), + pytrace=False, + ) + return scope + + +_ALL_SCOPES = list(Scope) +_SCOPE_INDICES = {scope: index for index, scope in enumerate(_ALL_SCOPES)} + + +# Ordered list of scopes which can contain many tests (in practice all except Function). +HIGH_SCOPES = [x for x in Scope if x is not Scope.Function] diff --git a/.venv/lib/python3.12/site-packages/_pytest/setuponly.py b/.venv/lib/python3.12/site-packages/_pytest/setuponly.py new file mode 100644 index 0000000..7e6b46b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/setuponly.py @@ -0,0 +1,98 @@ +from __future__ import annotations + +from collections.abc import Generator + +from _pytest._io.saferepr import saferepr +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config.argparsing import Parser +from _pytest.fixtures import FixtureDef +from _pytest.fixtures import SubRequest +from _pytest.scope import Scope +import pytest + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("debugconfig") + group.addoption( + "--setuponly", + "--setup-only", + action="store_true", + help="Only setup fixtures, do not execute tests", + ) + group.addoption( + "--setupshow", + "--setup-show", + action="store_true", + help="Show setup of fixtures while executing tests", + ) + + +@pytest.hookimpl(wrapper=True) +def pytest_fixture_setup( + fixturedef: FixtureDef[object], request: SubRequest +) -> Generator[None, object, object]: + try: + return (yield) + finally: + if request.config.option.setupshow: + if hasattr(request, "param"): + # Save the fixture parameter so ._show_fixture_action() can + # display it now and during the teardown (in .finish()). + if fixturedef.ids: + if callable(fixturedef.ids): + param = fixturedef.ids(request.param) + else: + param = fixturedef.ids[request.param_index] + else: + param = request.param + fixturedef.cached_param = param # type: ignore[attr-defined] + _show_fixture_action(fixturedef, request.config, "SETUP") + + +def pytest_fixture_post_finalizer( + fixturedef: FixtureDef[object], request: SubRequest +) -> None: + if fixturedef.cached_result is not None: + config = request.config + if config.option.setupshow: + _show_fixture_action(fixturedef, request.config, "TEARDOWN") + if hasattr(fixturedef, "cached_param"): + del fixturedef.cached_param + + +def _show_fixture_action( + fixturedef: FixtureDef[object], config: Config, msg: str +) -> None: + capman = config.pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend_global_capture() + + tw = config.get_terminal_writer() + tw.line() + # Use smaller indentation the higher the scope: Session = 0, Package = 1, etc. + scope_indent = list(reversed(Scope)).index(fixturedef._scope) + tw.write(" " * 2 * scope_indent) + + scopename = fixturedef.scope[0].upper() + tw.write(f"{msg:<8} {scopename} {fixturedef.argname}") + + if msg == "SETUP": + deps = sorted(arg for arg in fixturedef.argnames if arg != "request") + if deps: + tw.write(" (fixtures used: {})".format(", ".join(deps))) + + if hasattr(fixturedef, "cached_param"): + tw.write(f"[{saferepr(fixturedef.cached_param, maxsize=42)}]") + + tw.flush() + + if capman: + capman.resume_global_capture() + + +@pytest.hookimpl(tryfirst=True) +def pytest_cmdline_main(config: Config) -> int | ExitCode | None: + if config.option.setuponly: + config.option.setupshow = True + return None diff --git a/.venv/lib/python3.12/site-packages/_pytest/setupplan.py b/.venv/lib/python3.12/site-packages/_pytest/setupplan.py new file mode 100644 index 0000000..4e124cc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/setupplan.py @@ -0,0 +1,39 @@ +from __future__ import annotations + +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config.argparsing import Parser +from _pytest.fixtures import FixtureDef +from _pytest.fixtures import SubRequest +import pytest + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("debugconfig") + group.addoption( + "--setupplan", + "--setup-plan", + action="store_true", + help="Show what fixtures and tests would be executed but " + "don't execute anything", + ) + + +@pytest.hookimpl(tryfirst=True) +def pytest_fixture_setup( + fixturedef: FixtureDef[object], request: SubRequest +) -> object | None: + # Will return a dummy fixture if the setuponly option is provided. + if request.config.option.setupplan: + my_cache_key = fixturedef.cache_key(request) + fixturedef.cached_result = (None, my_cache_key, None) + return fixturedef.cached_result + return None + + +@pytest.hookimpl(tryfirst=True) +def pytest_cmdline_main(config: Config) -> int | ExitCode | None: + if config.option.setupplan: + config.option.setuponly = True + config.option.setupshow = True + return None diff --git a/.venv/lib/python3.12/site-packages/_pytest/skipping.py b/.venv/lib/python3.12/site-packages/_pytest/skipping.py new file mode 100644 index 0000000..3b06762 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/skipping.py @@ -0,0 +1,321 @@ +# mypy: allow-untyped-defs +"""Support for skip/xfail functions and markers.""" + +from __future__ import annotations + +from collections.abc import Generator +from collections.abc import Mapping +import dataclasses +import os +import platform +import sys +import traceback + +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.mark.structures import Mark +from _pytest.nodes import Item +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.outcomes import xfail +from _pytest.raises import AbstractRaises +from _pytest.reports import BaseReport +from _pytest.reports import TestReport +from _pytest.runner import CallInfo +from _pytest.stash import StashKey + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group.addoption( + "--runxfail", + action="store_true", + dest="runxfail", + default=False, + help="Report the results of xfail tests as if they were not marked", + ) + + parser.addini( + "strict_xfail", + "Default for the strict parameter of xfail " + "markers when not given explicitly (default: False) (alias: xfail_strict)", + type="bool", + # None => fallback to `strict`. + default=None, + aliases=["xfail_strict"], + ) + + +def pytest_configure(config: Config) -> None: + if config.option.runxfail: + # yay a hack + import pytest + + old = pytest.xfail + config.add_cleanup(lambda: setattr(pytest, "xfail", old)) + + def nop(*args, **kwargs): + pass + + nop.Exception = xfail.Exception # type: ignore[attr-defined] + setattr(pytest, "xfail", nop) + + config.addinivalue_line( + "markers", + "skip(reason=None): skip the given test function with an optional reason. " + 'Example: skip(reason="no way of currently testing this") skips the ' + "test.", + ) + config.addinivalue_line( + "markers", + "skipif(condition, ..., *, reason=...): " + "skip the given test function if any of the conditions evaluate to True. " + "Example: skipif(sys.platform == 'win32') skips the test if we are on the win32 platform. " + "See https://docs.pytest.org/en/stable/reference/reference.html#pytest-mark-skipif", + ) + config.addinivalue_line( + "markers", + "xfail(condition, ..., *, reason=..., run=True, raises=None, strict=strict_xfail): " + "mark the test function as an expected failure if any of the conditions " + "evaluate to True. Optionally specify a reason for better reporting " + "and run=False if you don't even want to execute the test function. " + "If only specific exception(s) are expected, you can list them in " + "raises, and if the test fails in other ways, it will be reported as " + "a true failure. See https://docs.pytest.org/en/stable/reference/reference.html#pytest-mark-xfail", + ) + + +def evaluate_condition(item: Item, mark: Mark, condition: object) -> tuple[bool, str]: + """Evaluate a single skipif/xfail condition. + + If an old-style string condition is given, it is eval()'d, otherwise the + condition is bool()'d. If this fails, an appropriately formatted pytest.fail + is raised. + + Returns (result, reason). The reason is only relevant if the result is True. + """ + # String condition. + if isinstance(condition, str): + globals_ = { + "os": os, + "sys": sys, + "platform": platform, + "config": item.config, + } + for dictionary in reversed( + item.ihook.pytest_markeval_namespace(config=item.config) + ): + if not isinstance(dictionary, Mapping): + raise ValueError( + f"pytest_markeval_namespace() needs to return a dict, got {dictionary!r}" + ) + globals_.update(dictionary) + if hasattr(item, "obj"): + globals_.update(item.obj.__globals__) + try: + filename = f"<{mark.name} condition>" + condition_code = compile(condition, filename, "eval") + result = eval(condition_code, globals_) + except SyntaxError as exc: + msglines = [ + f"Error evaluating {mark.name!r} condition", + " " + condition, + " " + " " * (exc.offset or 0) + "^", + "SyntaxError: invalid syntax", + ] + fail("\n".join(msglines), pytrace=False) + except Exception as exc: + msglines = [ + f"Error evaluating {mark.name!r} condition", + " " + condition, + *traceback.format_exception_only(type(exc), exc), + ] + fail("\n".join(msglines), pytrace=False) + + # Boolean condition. + else: + try: + result = bool(condition) + except Exception as exc: + msglines = [ + f"Error evaluating {mark.name!r} condition as a boolean", + *traceback.format_exception_only(type(exc), exc), + ] + fail("\n".join(msglines), pytrace=False) + + reason = mark.kwargs.get("reason", None) + if reason is None: + if isinstance(condition, str): + reason = "condition: " + condition + else: + # XXX better be checked at collection time + msg = ( + f"Error evaluating {mark.name!r}: " + + "you need to specify reason=STRING when using booleans as conditions." + ) + fail(msg, pytrace=False) + + return result, reason + + +@dataclasses.dataclass(frozen=True) +class Skip: + """The result of evaluate_skip_marks().""" + + reason: str = "unconditional skip" + + +def evaluate_skip_marks(item: Item) -> Skip | None: + """Evaluate skip and skipif marks on item, returning Skip if triggered.""" + for mark in item.iter_markers(name="skipif"): + if "condition" not in mark.kwargs: + conditions = mark.args + else: + conditions = (mark.kwargs["condition"],) + + # Unconditional. + if not conditions: + reason = mark.kwargs.get("reason", "") + return Skip(reason) + + # If any of the conditions are true. + for condition in conditions: + result, reason = evaluate_condition(item, mark, condition) + if result: + return Skip(reason) + + for mark in item.iter_markers(name="skip"): + try: + return Skip(*mark.args, **mark.kwargs) + except TypeError as e: + raise TypeError(str(e) + " - maybe you meant pytest.mark.skipif?") from None + + return None + + +@dataclasses.dataclass(frozen=True) +class Xfail: + """The result of evaluate_xfail_marks().""" + + __slots__ = ("raises", "reason", "run", "strict") + + reason: str + run: bool + strict: bool + raises: ( + type[BaseException] + | tuple[type[BaseException], ...] + | AbstractRaises[BaseException] + | None + ) + + +def evaluate_xfail_marks(item: Item) -> Xfail | None: + """Evaluate xfail marks on item, returning Xfail if triggered.""" + for mark in item.iter_markers(name="xfail"): + run = mark.kwargs.get("run", True) + strict = mark.kwargs.get("strict") + if strict is None: + strict = item.config.getini("strict_xfail") + if strict is None: + strict = item.config.getini("strict") + raises = mark.kwargs.get("raises", None) + if "condition" not in mark.kwargs: + conditions = mark.args + else: + conditions = (mark.kwargs["condition"],) + + # Unconditional. + if not conditions: + reason = mark.kwargs.get("reason", "") + return Xfail(reason, run, strict, raises) + + # If any of the conditions are true. + for condition in conditions: + result, reason = evaluate_condition(item, mark, condition) + if result: + return Xfail(reason, run, strict, raises) + + return None + + +# Saves the xfail mark evaluation. Can be refreshed during call if None. +xfailed_key = StashKey[Xfail | None]() + + +@hookimpl(tryfirst=True) +def pytest_runtest_setup(item: Item) -> None: + skipped = evaluate_skip_marks(item) + if skipped: + raise skip.Exception(skipped.reason, _use_item_location=True) + + item.stash[xfailed_key] = xfailed = evaluate_xfail_marks(item) + if xfailed and not item.config.option.runxfail and not xfailed.run: + xfail("[NOTRUN] " + xfailed.reason) + + +@hookimpl(wrapper=True) +def pytest_runtest_call(item: Item) -> Generator[None]: + xfailed = item.stash.get(xfailed_key, None) + if xfailed is None: + item.stash[xfailed_key] = xfailed = evaluate_xfail_marks(item) + + if xfailed and not item.config.option.runxfail and not xfailed.run: + xfail("[NOTRUN] " + xfailed.reason) + + try: + return (yield) + finally: + # The test run may have added an xfail mark dynamically. + xfailed = item.stash.get(xfailed_key, None) + if xfailed is None: + item.stash[xfailed_key] = xfailed = evaluate_xfail_marks(item) + + +@hookimpl(wrapper=True) +def pytest_runtest_makereport( + item: Item, call: CallInfo[None] +) -> Generator[None, TestReport, TestReport]: + rep = yield + xfailed = item.stash.get(xfailed_key, None) + if item.config.option.runxfail: + pass # don't interfere + elif call.excinfo and isinstance(call.excinfo.value, xfail.Exception): + assert call.excinfo.value.msg is not None + rep.wasxfail = call.excinfo.value.msg + rep.outcome = "skipped" + elif not rep.skipped and xfailed: + if call.excinfo: + raises = xfailed.raises + if raises is None or ( + ( + isinstance(raises, type | tuple) + and isinstance(call.excinfo.value, raises) + ) + or ( + isinstance(raises, AbstractRaises) + and raises.matches(call.excinfo.value) + ) + ): + rep.outcome = "skipped" + rep.wasxfail = xfailed.reason + else: + rep.outcome = "failed" + elif call.when == "call": + if xfailed.strict: + rep.outcome = "failed" + rep.longrepr = "[XPASS(strict)] " + xfailed.reason + else: + rep.outcome = "passed" + rep.wasxfail = xfailed.reason + return rep + + +def pytest_report_teststatus(report: BaseReport) -> tuple[str, str, str] | None: + if hasattr(report, "wasxfail"): + if report.skipped: + return "xfailed", "x", "XFAIL" + elif report.passed: + return "xpassed", "X", "XPASS" + return None diff --git a/.venv/lib/python3.12/site-packages/_pytest/stash.py b/.venv/lib/python3.12/site-packages/_pytest/stash.py new file mode 100644 index 0000000..6a9ff88 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/stash.py @@ -0,0 +1,116 @@ +from __future__ import annotations + +from typing import Any +from typing import cast +from typing import Generic +from typing import TypeVar + + +__all__ = ["Stash", "StashKey"] + + +T = TypeVar("T") +D = TypeVar("D") + + +class StashKey(Generic[T]): + """``StashKey`` is an object used as a key to a :class:`Stash`. + + A ``StashKey`` is associated with the type ``T`` of the value of the key. + + A ``StashKey`` is unique and cannot conflict with another key. + + .. versionadded:: 7.0 + """ + + __slots__ = () + + +class Stash: + r"""``Stash`` is a type-safe heterogeneous mutable mapping that + allows keys and value types to be defined separately from + where it (the ``Stash``) is created. + + Usually you will be given an object which has a ``Stash``, for example + :class:`~pytest.Config` or a :class:`~_pytest.nodes.Node`: + + .. code-block:: python + + stash: Stash = some_object.stash + + If a module or plugin wants to store data in this ``Stash``, it creates + :class:`StashKey`\s for its keys (at the module level): + + .. code-block:: python + + # At the top-level of the module + some_str_key = StashKey[str]() + some_bool_key = StashKey[bool]() + + To store information: + + .. code-block:: python + + # Value type must match the key. + stash[some_str_key] = "value" + stash[some_bool_key] = True + + To retrieve the information: + + .. code-block:: python + + # The static type of some_str is str. + some_str = stash[some_str_key] + # The static type of some_bool is bool. + some_bool = stash[some_bool_key] + + .. versionadded:: 7.0 + """ + + __slots__ = ("_storage",) + + def __init__(self) -> None: + self._storage: dict[StashKey[Any], object] = {} + + def __setitem__(self, key: StashKey[T], value: T) -> None: + """Set a value for key.""" + self._storage[key] = value + + def __getitem__(self, key: StashKey[T]) -> T: + """Get the value for key. + + Raises ``KeyError`` if the key wasn't set before. + """ + return cast(T, self._storage[key]) + + def get(self, key: StashKey[T], default: D) -> T | D: + """Get the value for key, or return default if the key wasn't set + before.""" + try: + return self[key] + except KeyError: + return default + + def setdefault(self, key: StashKey[T], default: T) -> T: + """Return the value of key if already set, otherwise set the value + of key to default and return default.""" + try: + return self[key] + except KeyError: + self[key] = default + return default + + def __delitem__(self, key: StashKey[T]) -> None: + """Delete the value for key. + + Raises ``KeyError`` if the key wasn't set before. + """ + del self._storage[key] + + def __contains__(self, key: StashKey[T]) -> bool: + """Return whether key was set.""" + return key in self._storage + + def __len__(self) -> int: + """Return how many items exist in the stash.""" + return len(self._storage) diff --git a/.venv/lib/python3.12/site-packages/_pytest/stepwise.py b/.venv/lib/python3.12/site-packages/_pytest/stepwise.py new file mode 100644 index 0000000..8901540 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/stepwise.py @@ -0,0 +1,209 @@ +from __future__ import annotations + +import dataclasses +from datetime import datetime +from datetime import timedelta +from typing import Any +from typing import TYPE_CHECKING + +from _pytest import nodes +from _pytest.cacheprovider import Cache +from _pytest.config import Config +from _pytest.config.argparsing import Parser +from _pytest.main import Session +from _pytest.reports import TestReport + + +if TYPE_CHECKING: + from typing_extensions import Self + +STEPWISE_CACHE_DIR = "cache/stepwise" + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group.addoption( + "--sw", + "--stepwise", + action="store_true", + default=False, + dest="stepwise", + help="Exit on test failure and continue from last failing test next time", + ) + group.addoption( + "--sw-skip", + "--stepwise-skip", + action="store_true", + default=False, + dest="stepwise_skip", + help="Ignore the first failing test but stop on the next failing test. " + "Implicitly enables --stepwise.", + ) + group.addoption( + "--sw-reset", + "--stepwise-reset", + action="store_true", + default=False, + dest="stepwise_reset", + help="Resets stepwise state, restarting the stepwise workflow. " + "Implicitly enables --stepwise.", + ) + + +def pytest_configure(config: Config) -> None: + # --stepwise-skip/--stepwise-reset implies stepwise. + if config.option.stepwise_skip or config.option.stepwise_reset: + config.option.stepwise = True + if config.getoption("stepwise"): + config.pluginmanager.register(StepwisePlugin(config), "stepwiseplugin") + + +def pytest_sessionfinish(session: Session) -> None: + if not session.config.getoption("stepwise"): + assert session.config.cache is not None + if hasattr(session.config, "workerinput"): + # Do not update cache if this process is a xdist worker to prevent + # race conditions (#10641). + return + + +@dataclasses.dataclass +class StepwiseCacheInfo: + # The nodeid of the last failed test. + last_failed: str | None + + # The number of tests in the last time --stepwise was run. + # We use this information as a simple way to invalidate the cache information, avoiding + # confusing behavior in case the cache is stale. + last_test_count: int | None + + # The date when the cache was last updated, for information purposes only. + last_cache_date_str: str + + @property + def last_cache_date(self) -> datetime: + return datetime.fromisoformat(self.last_cache_date_str) + + @classmethod + def empty(cls) -> Self: + return cls( + last_failed=None, + last_test_count=None, + last_cache_date_str=datetime.now().isoformat(), + ) + + def update_date_to_now(self) -> None: + self.last_cache_date_str = datetime.now().isoformat() + + +class StepwisePlugin: + def __init__(self, config: Config) -> None: + self.config = config + self.session: Session | None = None + self.report_status: list[str] = [] + assert config.cache is not None + self.cache: Cache = config.cache + self.skip: bool = config.getoption("stepwise_skip") + self.reset: bool = config.getoption("stepwise_reset") + self.cached_info = self._load_cached_info() + + def _load_cached_info(self) -> StepwiseCacheInfo: + cached_dict: dict[str, Any] | None = self.cache.get(STEPWISE_CACHE_DIR, None) + if cached_dict: + try: + return StepwiseCacheInfo( + cached_dict["last_failed"], + cached_dict["last_test_count"], + cached_dict["last_cache_date_str"], + ) + except (KeyError, TypeError) as e: + error = f"{type(e).__name__}: {e}" + self.report_status.append(f"error reading cache, discarding ({error})") + + # Cache not found or error during load, return a new cache. + return StepwiseCacheInfo.empty() + + def pytest_sessionstart(self, session: Session) -> None: + self.session = session + + def pytest_collection_modifyitems( + self, config: Config, items: list[nodes.Item] + ) -> None: + last_test_count = self.cached_info.last_test_count + self.cached_info.last_test_count = len(items) + + if self.reset: + self.report_status.append("resetting state, not skipping.") + self.cached_info.last_failed = None + return + + if not self.cached_info.last_failed: + self.report_status.append("no previously failed tests, not skipping.") + return + + if last_test_count is not None and last_test_count != len(items): + self.report_status.append( + f"test count changed, not skipping (now {len(items)} tests, previously {last_test_count})." + ) + self.cached_info.last_failed = None + return + + # Check all item nodes until we find a match on last failed. + failed_index = None + for index, item in enumerate(items): + if item.nodeid == self.cached_info.last_failed: + failed_index = index + break + + # If the previously failed test was not found among the test items, + # do not skip any tests. + if failed_index is None: + self.report_status.append("previously failed test not found, not skipping.") + else: + cache_age = datetime.now() - self.cached_info.last_cache_date + # Round up to avoid showing microseconds. + cache_age = timedelta(seconds=int(cache_age.total_seconds())) + self.report_status.append( + f"skipping {failed_index} already passed items (cache from {cache_age} ago," + f" use --sw-reset to discard)." + ) + deselected = items[:failed_index] + del items[:failed_index] + config.hook.pytest_deselected(items=deselected) + + def pytest_runtest_logreport(self, report: TestReport) -> None: + if report.failed: + if self.skip: + # Remove test from the failed ones (if it exists) and unset the skip option + # to make sure the following tests will not be skipped. + if report.nodeid == self.cached_info.last_failed: + self.cached_info.last_failed = None + + self.skip = False + else: + # Mark test as the last failing and interrupt the test session. + self.cached_info.last_failed = report.nodeid + assert self.session is not None + self.session.shouldstop = ( + "Test failed, continuing from this test next run." + ) + + else: + # If the test was actually run and did pass. + if report.when == "call": + # Remove test from the failed ones, if exists. + if report.nodeid == self.cached_info.last_failed: + self.cached_info.last_failed = None + + def pytest_report_collectionfinish(self) -> list[str] | None: + if self.config.get_verbosity() >= 0 and self.report_status: + return [f"stepwise: {x}" for x in self.report_status] + return None + + def pytest_sessionfinish(self) -> None: + if hasattr(self.config, "workerinput"): + # Do not update cache if this process is a xdist worker to prevent + # race conditions (#10641). + return + self.cached_info.update_date_to_now() + self.cache.set(STEPWISE_CACHE_DIR, dataclasses.asdict(self.cached_info)) diff --git a/.venv/lib/python3.12/site-packages/_pytest/subtests.py b/.venv/lib/python3.12/site-packages/_pytest/subtests.py new file mode 100644 index 0000000..e0ceb27 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/subtests.py @@ -0,0 +1,411 @@ +"""Builtin plugin that adds subtests support.""" + +from __future__ import annotations + +from collections import defaultdict +from collections.abc import Callable +from collections.abc import Iterator +from collections.abc import Mapping +from contextlib import AbstractContextManager +from contextlib import contextmanager +from contextlib import ExitStack +from contextlib import nullcontext +import dataclasses +import time +from types import TracebackType +from typing import Any +from typing import TYPE_CHECKING + +import pluggy + +from _pytest._code import ExceptionInfo +from _pytest._io.saferepr import saferepr +from _pytest.capture import CaptureFixture +from _pytest.capture import FDCapture +from _pytest.capture import SysCapture +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import SubRequest +from _pytest.logging import catching_logs +from _pytest.logging import LogCaptureHandler +from _pytest.logging import LoggingPlugin +from _pytest.reports import TestReport +from _pytest.runner import CallInfo +from _pytest.runner import check_interactive_exception +from _pytest.runner import get_reraise_exceptions +from _pytest.stash import StashKey + + +if TYPE_CHECKING: + from typing_extensions import Self + + +def pytest_addoption(parser: Parser) -> None: + Config._add_verbosity_ini( + parser, + Config.VERBOSITY_SUBTESTS, + help=( + "Specify verbosity level for subtests. " + "Higher levels will generate output for passed subtests. Failed subtests are always reported." + ), + ) + + +@dataclasses.dataclass(frozen=True, slots=True, kw_only=True) +class SubtestContext: + """The values passed to Subtests.test() that are included in the test report.""" + + msg: str | None + kwargs: Mapping[str, Any] + + def _to_json(self) -> dict[str, Any]: + return dataclasses.asdict(self) + + @classmethod + def _from_json(cls, d: dict[str, Any]) -> Self: + return cls(msg=d["msg"], kwargs=d["kwargs"]) + + +@dataclasses.dataclass(init=False) +class SubtestReport(TestReport): + context: SubtestContext + + @property + def head_line(self) -> str: + _, _, domain = self.location + return f"{domain} {self._sub_test_description()}" + + def _sub_test_description(self) -> str: + parts = [] + if self.context.msg is not None: + parts.append(f"[{self.context.msg}]") + if self.context.kwargs: + params_desc = ", ".join( + f"{k}={saferepr(v)}" for (k, v) in self.context.kwargs.items() + ) + parts.append(f"({params_desc})") + return " ".join(parts) or "()" + + def _to_json(self) -> dict[str, Any]: + data = super()._to_json() + del data["context"] + data["_report_type"] = "SubTestReport" + data["_subtest.context"] = self.context._to_json() + return data + + @classmethod + def _from_json(cls, reportdict: dict[str, Any]) -> SubtestReport: + report = super()._from_json(reportdict) + report.context = SubtestContext._from_json(reportdict["_subtest.context"]) + return report + + @classmethod + def _new( + cls, + test_report: TestReport, + context: SubtestContext, + captured_output: Captured | None, + captured_logs: CapturedLogs | None, + ) -> Self: + result = super()._from_json(test_report._to_json()) + result.context = context + + if captured_output: + if captured_output.out: + result.sections.append(("Captured stdout call", captured_output.out)) + if captured_output.err: + result.sections.append(("Captured stderr call", captured_output.err)) + + if captured_logs and (log := captured_logs.handler.stream.getvalue()): + result.sections.append(("Captured log call", log)) + + return result + + +@fixture +def subtests(request: SubRequest) -> Subtests: + """Provides subtests functionality.""" + capmam = request.node.config.pluginmanager.get_plugin("capturemanager") + suspend_capture_ctx = ( + capmam.global_and_fixture_disabled if capmam is not None else nullcontext + ) + return Subtests(request.node.ihook, suspend_capture_ctx, request, _ispytest=True) + + +class Subtests: + """Subtests fixture, enables declaring subtests inside test functions via the :meth:`test` method.""" + + def __init__( + self, + ihook: pluggy.HookRelay, + suspend_capture_ctx: Callable[[], AbstractContextManager[None]], + request: SubRequest, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._ihook = ihook + self._suspend_capture_ctx = suspend_capture_ctx + self._request = request + + def test( + self, + msg: str | None = None, + **kwargs: Any, + ) -> _SubTestContextManager: + """ + Context manager for subtests, capturing exceptions raised inside the subtest scope and + reporting assertion failures and errors individually. + + Usage + ----- + + .. code-block:: python + + def test(subtests): + for i in range(5): + with subtests.test("custom message", i=i): + assert i % 2 == 0 + + :param msg: + If given, the message will be shown in the test report in case of subtest failure. + + :param kwargs: + Arbitrary values that are also added to the subtest report. + """ + return _SubTestContextManager( + self._ihook, + msg, + kwargs, + request=self._request, + suspend_capture_ctx=self._suspend_capture_ctx, + config=self._request.config, + ) + + +@dataclasses.dataclass +class _SubTestContextManager: + """ + Context manager for subtests, capturing exceptions raised inside the subtest scope and handling + them through the pytest machinery. + """ + + # Note: initially the logic for this context manager was implemented directly + # in Subtests.test() as a @contextmanager, however, it is not possible to control the output fully when + # exiting from it due to an exception when in `--exitfirst` mode, so this was refactored into an + # explicit context manager class (pytest-dev/pytest-subtests#134). + + ihook: pluggy.HookRelay + msg: str | None + kwargs: dict[str, Any] + suspend_capture_ctx: Callable[[], AbstractContextManager[None]] + request: SubRequest + config: Config + + def __enter__(self) -> None: + __tracebackhide__ = True + + self._start = time.time() + self._precise_start = time.perf_counter() + self._exc_info = None + + self._exit_stack = ExitStack() + self._captured_output = self._exit_stack.enter_context( + capturing_output(self.request) + ) + self._captured_logs = self._exit_stack.enter_context( + capturing_logs(self.request) + ) + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + __tracebackhide__ = True + if exc_val is not None: + exc_info = ExceptionInfo.from_exception(exc_val) + else: + exc_info = None + + self._exit_stack.close() + + precise_stop = time.perf_counter() + duration = precise_stop - self._precise_start + stop = time.time() + + call_info = CallInfo[None]( + None, + exc_info, + start=self._start, + stop=stop, + duration=duration, + when="call", + _ispytest=True, + ) + report = self.ihook.pytest_runtest_makereport( + item=self.request.node, call=call_info + ) + sub_report = SubtestReport._new( + report, + SubtestContext(msg=self.msg, kwargs=self.kwargs), + captured_output=self._captured_output, + captured_logs=self._captured_logs, + ) + + if sub_report.failed: + failed_subtests = self.config.stash[failed_subtests_key] + failed_subtests[self.request.node.nodeid] += 1 + + with self.suspend_capture_ctx(): + self.ihook.pytest_runtest_logreport(report=sub_report) + + if check_interactive_exception(call_info, sub_report): + self.ihook.pytest_exception_interact( + node=self.request.node, call=call_info, report=sub_report + ) + + if exc_val is not None: + if isinstance(exc_val, get_reraise_exceptions(self.config)): + return False + if self.request.session.shouldfail: + return False + return True + + +@contextmanager +def capturing_output(request: SubRequest) -> Iterator[Captured]: + option = request.config.getoption("capture", None) + + capman = request.config.pluginmanager.getplugin("capturemanager") + if getattr(capman, "_capture_fixture", None): + # capsys or capfd are active, subtest should not capture. + fixture = None + elif option == "sys": + fixture = CaptureFixture(SysCapture, request, _ispytest=True) + elif option == "fd": + fixture = CaptureFixture(FDCapture, request, _ispytest=True) + else: + fixture = None + + if fixture is not None: + fixture._start() + + captured = Captured() + try: + yield captured + finally: + if fixture is not None: + out, err = fixture.readouterr() + fixture.close() + captured.out = out + captured.err = err + + +@contextmanager +def capturing_logs( + request: SubRequest, +) -> Iterator[CapturedLogs | None]: + logging_plugin: LoggingPlugin | None = request.config.pluginmanager.getplugin( + "logging-plugin" + ) + if logging_plugin is None: + yield None + else: + handler = LogCaptureHandler() + handler.setFormatter(logging_plugin.formatter) + + captured_logs = CapturedLogs(handler) + with catching_logs(handler, level=logging_plugin.log_level): + yield captured_logs + + +@dataclasses.dataclass +class Captured: + out: str = "" + err: str = "" + + +@dataclasses.dataclass +class CapturedLogs: + handler: LogCaptureHandler + + +def pytest_report_to_serializable(report: TestReport) -> dict[str, Any] | None: + if isinstance(report, SubtestReport): + return report._to_json() + return None + + +def pytest_report_from_serializable(data: dict[str, Any]) -> SubtestReport | None: + if data.get("_report_type") == "SubTestReport": + return SubtestReport._from_json(data) + return None + + +# Dict of nodeid -> number of failed subtests. +# Used to fail top-level tests that passed but contain failed subtests. +failed_subtests_key = StashKey[defaultdict[str, int]]() + + +def pytest_configure(config: Config) -> None: + config.stash[failed_subtests_key] = defaultdict(lambda: 0) + + +@hookimpl(tryfirst=True) +def pytest_report_teststatus( + report: TestReport, + config: Config, +) -> tuple[str, str, str | Mapping[str, bool]] | None: + if report.when != "call": + return None + + quiet = config.get_verbosity(Config.VERBOSITY_SUBTESTS) == 0 + if isinstance(report, SubtestReport): + outcome = report.outcome + description = report._sub_test_description() + + if hasattr(report, "wasxfail"): + if quiet: + return "", "", "" + elif outcome == "skipped": + category = "xfailed" + short = "y" # x letter is used for regular xfail, y for subtest xfail + status = "SUBXFAIL" + # outcome == "passed" in an xfail is only possible via a @pytest.mark.xfail mark, which + # is not applicable to a subtest, which only handles pytest.xfail(). + else: # pragma: no cover + # This should not normally happen, unless some plugin is setting wasxfail without + # the correct outcome. Pytest expects the call outcome to be either skipped or + # passed in case of xfail. + # Let's pass this report to the next hook. + return None + return category, short, f"{status}{description}" + + if report.failed: + return outcome, "u", f"SUBFAILED{description}" + else: + if report.passed: + if quiet: + return "", "", "" + else: + return f"subtests {outcome}", "u", f"SUBPASSED{description}" + elif report.skipped: + if quiet: + return "", "", "" + else: + return outcome, "-", f"SUBSKIPPED{description}" + + else: + failed_subtests_count = config.stash[failed_subtests_key][report.nodeid] + # Top-level test, fail if it contains failed subtests and it has passed. + if report.passed and failed_subtests_count > 0: + report.outcome = "failed" + suffix = "s" if failed_subtests_count > 1 else "" + report.longrepr = f"contains {failed_subtests_count} failed subtest{suffix}" + + return None diff --git a/.venv/lib/python3.12/site-packages/_pytest/terminal.py b/.venv/lib/python3.12/site-packages/_pytest/terminal.py new file mode 100644 index 0000000..e66e4f4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/terminal.py @@ -0,0 +1,1763 @@ +# mypy: allow-untyped-defs +"""Terminal reporting of the full testing process. + +This is a good source for looking at the various reporting hooks. +""" + +from __future__ import annotations + +import argparse +from collections import Counter +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Mapping +from collections.abc import Sequence +import dataclasses +import datetime +from functools import partial +import inspect +from pathlib import Path +import platform +import sys +import textwrap +from typing import Any +from typing import ClassVar +from typing import final +from typing import Literal +from typing import NamedTuple +from typing import TextIO +from typing import TYPE_CHECKING +import warnings + +import pluggy + +from _pytest import compat +from _pytest import nodes +from _pytest import timing +from _pytest._code import ExceptionInfo +from _pytest._code.code import ExceptionRepr +from _pytest._io import TerminalWriter +from _pytest._io.wcwidth import wcswidth +import _pytest._version +from _pytest.compat import running_on_ci +from _pytest.config import _PluggyPlugin +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.nodes import Item +from _pytest.nodes import Node +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath +from _pytest.reports import BaseReport +from _pytest.reports import CollectReport +from _pytest.reports import TestReport + + +if TYPE_CHECKING: + from _pytest.main import Session + + +REPORT_COLLECTING_RESOLUTION = 0.5 + +KNOWN_TYPES = ( + "failed", + "passed", + "skipped", + "deselected", + "xfailed", + "xpassed", + "warnings", + "error", + "subtests passed", + "subtests failed", + "subtests skipped", +) + +_REPORTCHARS_DEFAULT = "fE" + + +class MoreQuietAction(argparse.Action): + """A modified copy of the argparse count action which counts down and updates + the legacy quiet attribute at the same time. + + Used to unify verbosity handling. + """ + + def __init__( + self, + option_strings: Sequence[str], + dest: str, + default: object = None, + required: bool = False, + help: str | None = None, + ) -> None: + super().__init__( + option_strings=option_strings, + dest=dest, + nargs=0, + default=default, + required=required, + help=help, + ) + + def __call__( + self, + parser: argparse.ArgumentParser, + namespace: argparse.Namespace, + values: str | Sequence[object] | None, + option_string: str | None = None, + ) -> None: + new_count = getattr(namespace, self.dest, 0) - 1 + setattr(namespace, self.dest, new_count) + # todo Deprecate config.quiet + namespace.quiet = getattr(namespace, "quiet", 0) + 1 + + +class TestShortLogReport(NamedTuple): + """Used to store the test status result category, shortletter and verbose word. + For example ``"rerun", "R", ("RERUN", {"yellow": True})``. + + :ivar category: + The class of result, for example ``“passed”``, ``“skipped”``, ``“error”``, or the empty string. + + :ivar letter: + The short letter shown as testing progresses, for example ``"."``, ``"s"``, ``"E"``, or the empty string. + + :ivar word: + Verbose word is shown as testing progresses in verbose mode, for example ``"PASSED"``, ``"SKIPPED"``, + ``"ERROR"``, or the empty string. + """ + + category: str + letter: str + word: str | tuple[str, Mapping[str, bool]] + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("terminal reporting", "Reporting", after="general") + group._addoption( # private to use reserved lower-case short option + "-v", + "--verbose", + action="count", + default=0, + dest="verbose", + help="Increase verbosity", + ) + group.addoption( + "--no-header", + action="store_true", + default=False, + dest="no_header", + help="Disable header", + ) + group.addoption( + "--no-summary", + action="store_true", + default=False, + dest="no_summary", + help="Disable summary", + ) + group.addoption( + "--no-fold-skipped", + action="store_false", + dest="fold_skipped", + default=True, + help="Do not fold skipped tests in short summary.", + ) + group.addoption( + "--force-short-summary", + action="store_true", + dest="force_short_summary", + default=False, + help="Force condensed summary output regardless of verbosity level.", + ) + group._addoption( # private to use reserved lower-case short option + "-q", + "--quiet", + action=MoreQuietAction, + default=0, + dest="verbose", + help="Decrease verbosity", + ) + group.addoption( + "--verbosity", + dest="verbose", + type=int, + default=0, + help="Set verbosity. Default: 0.", + ) + group._addoption( # private to use reserved lower-case short option + "-r", + action="store", + dest="reportchars", + default=_REPORTCHARS_DEFAULT, + metavar="chars", + help="Show extra test summary info as specified by chars: (f)ailed, " + "(E)rror, (s)kipped, (x)failed, (X)passed, " + "(p)assed, (P)assed with output, (a)ll except passed (p/P), or (A)ll. " + "(w)arnings are enabled by default (see --disable-warnings), " + "'N' can be used to reset the list. (default: 'fE').", + ) + group.addoption( + "--disable-warnings", + "--disable-pytest-warnings", + default=False, + dest="disable_warnings", + action="store_true", + help="Disable warnings summary", + ) + group._addoption( # private to use reserved lower-case short option + "-l", + "--showlocals", + action="store_true", + dest="showlocals", + default=False, + help="Show locals in tracebacks (disabled by default)", + ) + group.addoption( + "--no-showlocals", + action="store_false", + dest="showlocals", + help="Hide locals in tracebacks (negate --showlocals passed through addopts)", + ) + group.addoption( + "--tb", + metavar="style", + action="store", + dest="tbstyle", + default="auto", + choices=["auto", "long", "short", "no", "line", "native"], + help="Traceback print mode (auto/long/short/line/native/no)", + ) + group.addoption( + "--xfail-tb", + action="store_true", + dest="xfail_tb", + default=False, + help="Show tracebacks for xfail (as long as --tb != no)", + ) + group.addoption( + "--show-capture", + action="store", + dest="showcapture", + choices=["no", "stdout", "stderr", "log", "all"], + default="all", + help="Controls how captured stdout/stderr/log is shown on failed tests. " + "Default: all.", + ) + group.addoption( + "--fulltrace", + "--full-trace", + action="store_true", + default=False, + help="Don't cut any tracebacks (default is to cut)", + ) + group.addoption( + "--color", + metavar="color", + action="store", + dest="color", + default="auto", + choices=["yes", "no", "auto"], + help="Color terminal output (yes/no/auto)", + ) + group.addoption( + "--code-highlight", + default="yes", + choices=["yes", "no"], + help="Whether code should be highlighted (only if --color is also enabled). " + "Default: yes.", + ) + + parser.addini( + "console_output_style", + help='Console output: "classic", or with additional progress information ' + '("progress" (percentage) | "count" | "progress-even-when-capture-no" (forces ' + "progress even when capture=no)", + default="progress", + ) + Config._add_verbosity_ini( + parser, + Config.VERBOSITY_TEST_CASES, + help=( + "Specify a verbosity level for test case execution, overriding the main level. " + "Higher levels will provide more detailed information about each test case executed." + ), + ) + + +def pytest_configure(config: Config) -> None: + reporter = TerminalReporter(config, sys.stdout) + config.pluginmanager.register(reporter, "terminalreporter") + if config.option.debug or config.option.traceconfig: + + def mywriter(tags, args): + msg = " ".join(map(str, args)) + reporter.write_line("[traceconfig] " + msg) + + config.trace.root.setprocessor("pytest:config", mywriter) + + # See terminalprogress.py. + # On Windows it's safe to load by default. + if sys.platform == "win32": + config.pluginmanager.import_plugin("terminalprogress") + + +def getreportopt(config: Config) -> str: + reportchars: str = config.option.reportchars + + old_aliases = {"F", "S"} + reportopts = "" + for char in reportchars: + if char in old_aliases: + char = char.lower() + if char == "a": + reportopts = "sxXEf" + elif char == "A": + reportopts = "PpsxXEf" + elif char == "N": + reportopts = "" + elif char not in reportopts: + reportopts += char + + if not config.option.disable_warnings and "w" not in reportopts: + reportopts = "w" + reportopts + elif config.option.disable_warnings and "w" in reportopts: + reportopts = reportopts.replace("w", "") + + return reportopts + + +@hookimpl(trylast=True) # after _pytest.runner +def pytest_report_teststatus(report: BaseReport) -> tuple[str, str, str]: + letter = "F" + if report.passed: + letter = "." + elif report.skipped: + letter = "s" + + outcome: str = report.outcome + if report.when in ("collect", "setup", "teardown") and outcome == "failed": + outcome = "error" + letter = "E" + + return outcome, letter, outcome.upper() + + +@dataclasses.dataclass +class WarningReport: + """Simple structure to hold warnings information captured by ``pytest_warning_recorded``. + + :ivar str message: + User friendly message about the warning. + :ivar str|None nodeid: + nodeid that generated the warning (see ``get_location``). + :ivar tuple fslocation: + File system location of the source of the warning (see ``get_location``). + """ + + message: str + nodeid: str | None = None + fslocation: tuple[str, int] | None = None + + count_towards_summary: ClassVar = True + + def get_location(self, config: Config) -> str | None: + """Return the more user-friendly information about the location of a warning, or None.""" + if self.nodeid: + return self.nodeid + if self.fslocation: + filename, linenum = self.fslocation + relpath = bestrelpath(config.invocation_params.dir, absolutepath(filename)) + return f"{relpath}:{linenum}" + return None + + +@final +class TerminalReporter: + def __init__(self, config: Config, file: TextIO | None = None) -> None: + import _pytest.config + + self.config = config + self._numcollected = 0 + self._session: Session | None = None + self._showfspath: bool | None = None + + self.stats: dict[str, list[Any]] = {} + self._main_color: str | None = None + self._known_types: list[str] | None = None + self.startpath = config.invocation_params.dir + if file is None: + file = sys.stdout + self._tw = _pytest.config.create_terminal_writer(config, file) + self._screen_width = self._tw.fullwidth + self.currentfspath: None | Path | str | int = None + self.reportchars = getreportopt(config) + self.foldskipped = config.option.fold_skipped + self.hasmarkup = self._tw.hasmarkup + # isatty should be a method but was wrongly implemented as a boolean. + # We use CallableBool here to support both. + self.isatty = compat.CallableBool(file.isatty()) + self._progress_nodeids_reported: set[str] = set() + self._timing_nodeids_reported: set[str] = set() + self._show_progress_info = self._determine_show_progress_info() + self._collect_report_last_write = timing.Instant() + self._already_displayed_warnings: int | None = None + self._keyboardinterrupt_memo: ExceptionRepr | None = None + + def _determine_show_progress_info( + self, + ) -> Literal["progress", "count", "times", False]: + """Return whether we should display progress information based on the current config.""" + # do not show progress if we are not capturing output (#3038) unless explicitly + # overridden by progress-even-when-capture-no + if ( + self.config.getoption("capture", "no") == "no" + and self.config.getini("console_output_style") + != "progress-even-when-capture-no" + ): + return False + # do not show progress if we are showing fixture setup/teardown + if self.config.getoption("setupshow", False): + return False + cfg: str = self.config.getini("console_output_style") + if cfg in {"progress", "progress-even-when-capture-no"}: + return "progress" + elif cfg == "count": + return "count" + elif cfg == "times": + return "times" + else: + return False + + @property + def verbosity(self) -> int: + verbosity: int = self.config.option.verbose + return verbosity + + @property + def showheader(self) -> bool: + return self.verbosity >= 0 + + @property + def no_header(self) -> bool: + return bool(self.config.option.no_header) + + @property + def no_summary(self) -> bool: + return bool(self.config.option.no_summary) + + @property + def showfspath(self) -> bool: + if self._showfspath is None: + return self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) >= 0 + return self._showfspath + + @showfspath.setter + def showfspath(self, value: bool | None) -> None: + self._showfspath = value + + @property + def showlongtestinfo(self) -> bool: + return self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) > 0 + + @property + def reported_progress(self) -> int: + """The amount of items reported in the progress so far. + + :meta private: + """ + return len(self._progress_nodeids_reported) + + def hasopt(self, char: str) -> bool: + char = {"xfailed": "x", "skipped": "s"}.get(char, char) + return char in self.reportchars + + def write_fspath_result(self, nodeid: str, res: str, **markup: bool) -> None: + fspath = self.config.rootpath / nodeid.split("::")[0] + if self.currentfspath is None or fspath != self.currentfspath: + if self.currentfspath is not None and self._show_progress_info: + self._write_progress_information_filling_space() + self.currentfspath = fspath + relfspath = bestrelpath(self.startpath, fspath) + self._tw.line() + self._tw.write(relfspath + " ") + self._tw.write(res, flush=True, **markup) + + def write_ensure_prefix(self, prefix: str, extra: str = "", **kwargs) -> None: + if self.currentfspath != prefix: + self._tw.line() + self.currentfspath = prefix + self._tw.write(prefix) + if extra: + self._tw.write(extra, **kwargs) + self.currentfspath = -2 + + def ensure_newline(self) -> None: + if self.currentfspath: + self._tw.line() + self.currentfspath = None + + def wrap_write( + self, + content: str, + *, + flush: bool = False, + margin: int = 8, + line_sep: str = "\n", + **markup: bool, + ) -> None: + """Wrap message with margin for progress info.""" + width_of_current_line = self._tw.width_of_current_line + wrapped = line_sep.join( + textwrap.wrap( + " " * width_of_current_line + content, + width=self._screen_width - margin, + drop_whitespace=True, + replace_whitespace=False, + ), + ) + wrapped = wrapped[width_of_current_line:] + self._tw.write(wrapped, flush=flush, **markup) + + def write(self, content: str, *, flush: bool = False, **markup: bool) -> None: + self._tw.write(content, flush=flush, **markup) + + def write_raw(self, content: str, *, flush: bool = False) -> None: + self._tw.write_raw(content, flush=flush) + + def flush(self) -> None: + self._tw.flush() + + def write_line(self, line: str | bytes, **markup: bool) -> None: + if not isinstance(line, str): + line = str(line, errors="replace") + self.ensure_newline() + self._tw.line(line, **markup) + + def rewrite(self, line: str, **markup: bool) -> None: + """Rewinds the terminal cursor to the beginning and writes the given line. + + :param erase: + If True, will also add spaces until the full terminal width to ensure + previous lines are properly erased. + + The rest of the keyword arguments are markup instructions. + """ + erase = markup.pop("erase", False) + if erase: + fill_count = self._tw.fullwidth - len(line) - 1 + fill = " " * fill_count + else: + fill = "" + line = str(line) + self._tw.write("\r" + line + fill, **markup) + + def write_sep( + self, + sep: str, + title: str | None = None, + fullwidth: int | None = None, + **markup: bool, + ) -> None: + self.ensure_newline() + self._tw.sep(sep, title, fullwidth, **markup) + + def section(self, title: str, sep: str = "=", **kw: bool) -> None: + self._tw.sep(sep, title, **kw) + + def line(self, msg: str, **kw: bool) -> None: + self._tw.line(msg, **kw) + + def _add_stats(self, category: str, items: Sequence[Any]) -> None: + set_main_color = category not in self.stats + self.stats.setdefault(category, []).extend(items) + if set_main_color: + self._set_main_color() + + def pytest_internalerror(self, excrepr: ExceptionRepr) -> bool: + for line in str(excrepr).split("\n"): + self.write_line("INTERNALERROR> " + line) + return True + + def pytest_warning_recorded( + self, + warning_message: warnings.WarningMessage, + nodeid: str, + ) -> None: + from _pytest.warnings import warning_record_to_str + + fslocation = warning_message.filename, warning_message.lineno + message = warning_record_to_str(warning_message) + + warning_report = WarningReport( + fslocation=fslocation, message=message, nodeid=nodeid + ) + self._add_stats("warnings", [warning_report]) + + def pytest_plugin_registered(self, plugin: _PluggyPlugin) -> None: + if self.config.option.traceconfig: + msg = f"PLUGIN registered: {plugin}" + # XXX This event may happen during setup/teardown time + # which unfortunately captures our output here + # which garbles our output if we use self.write_line. + self.write_line(msg) + + def pytest_deselected(self, items: Sequence[Item]) -> None: + self._add_stats("deselected", items) + + def pytest_runtest_logstart( + self, nodeid: str, location: tuple[str, int | None, str] + ) -> None: + fspath, lineno, domain = location + # Ensure that the path is printed before the + # 1st test of a module starts running. + if self.showlongtestinfo: + line = self._locationline(nodeid, fspath, lineno, domain) + self.write_ensure_prefix(line, "") + self.flush() + elif self.showfspath: + self.write_fspath_result(nodeid, "") + self.flush() + + def pytest_runtest_logreport(self, report: TestReport) -> None: + self._tests_ran = True + rep = report + + res = TestShortLogReport( + *self.config.hook.pytest_report_teststatus(report=rep, config=self.config) + ) + category, letter, word = res.category, res.letter, res.word + if not isinstance(word, tuple): + markup = None + else: + word, markup = word + self._add_stats(category, [rep]) + if not letter and not word: + # Probably passed setup/teardown. + return + if markup is None: + was_xfail = hasattr(report, "wasxfail") + if rep.passed and not was_xfail: + markup = {"green": True} + elif rep.passed and was_xfail: + markup = {"yellow": True} + elif rep.failed: + markup = {"red": True} + elif rep.skipped: + markup = {"yellow": True} + else: + markup = {} + self._progress_nodeids_reported.add(rep.nodeid) + if self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) <= 0: + self._tw.write(letter, **markup) + # When running in xdist, the logreport and logfinish of multiple + # items are interspersed, e.g. `logreport`, `logreport`, + # `logfinish`, `logfinish`. To avoid the "past edge" calculation + # from getting confused and overflowing (#7166), do the past edge + # printing here and not in logfinish, except for the 100% which + # should only be printed after all teardowns are finished. + if self._show_progress_info and not self._is_last_item: + self._write_progress_information_if_past_edge() + else: + line = self._locationline(rep.nodeid, *rep.location) + running_xdist = hasattr(rep, "node") + if not running_xdist: + self.write_ensure_prefix(line, word, **markup) + if rep.skipped or hasattr(report, "wasxfail"): + reason = _get_raw_skip_reason(rep) + if self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) < 2: + available_width = ( + (self._tw.fullwidth - self._tw.width_of_current_line) + - len(" [100%]") + - 1 + ) + formatted_reason = _format_trimmed( + " ({})", reason, available_width + ) + else: + formatted_reason = f" ({reason})" + + if reason and formatted_reason is not None: + self.wrap_write(formatted_reason) + if self._show_progress_info: + self._write_progress_information_filling_space() + else: + self.ensure_newline() + self._tw.write(f"[{rep.node.gateway.id}]") + if self._show_progress_info: + self._tw.write( + self._get_progress_information_message() + " ", cyan=True + ) + else: + self._tw.write(" ") + self._tw.write(word, **markup) + self._tw.write(" " + line) + self.currentfspath = -2 + self.flush() + + @property + def _is_last_item(self) -> bool: + assert self._session is not None + return self.reported_progress == self._session.testscollected + + @hookimpl(wrapper=True) + def pytest_runtestloop(self) -> Generator[None, object, object]: + result = yield + + # Write the final/100% progress -- deferred until the loop is complete. + if ( + self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) <= 0 + and self._show_progress_info + and self.reported_progress + ): + self._write_progress_information_filling_space() + + return result + + def _get_progress_information_message(self) -> str: + assert self._session + collected = self._session.testscollected + if self._show_progress_info == "count": + if collected: + progress = self.reported_progress + counter_format = f"{{:{len(str(collected))}d}}" + format_string = f" [{counter_format}/{{}}]" + return format_string.format(progress, collected) + return f" [ {collected} / {collected} ]" + if self._show_progress_info == "times": + if not collected: + return "" + all_reports = ( + self._get_reports_to_display("passed") + + self._get_reports_to_display("xpassed") + + self._get_reports_to_display("failed") + + self._get_reports_to_display("xfailed") + + self._get_reports_to_display("skipped") + + self._get_reports_to_display("error") + + self._get_reports_to_display("") + ) + current_location = all_reports[-1].location[0] + not_reported = [ + r for r in all_reports if r.nodeid not in self._timing_nodeids_reported + ] + tests_in_module = sum( + i.location[0] == current_location for i in self._session.items + ) + tests_completed = sum( + r.when == "setup" + for r in not_reported + if r.location[0] == current_location + ) + last_in_module = tests_completed == tests_in_module + if self.showlongtestinfo or last_in_module: + self._timing_nodeids_reported.update(r.nodeid for r in not_reported) + return format_node_duration( + sum(r.duration for r in not_reported if isinstance(r, TestReport)) + ) + return "" + if collected: + return f" [{self.reported_progress * 100 // collected:3d}%]" + return " [100%]" + + def _write_progress_information_if_past_edge(self) -> None: + w = self._width_of_current_line + if self._show_progress_info == "count": + assert self._session + num_tests = self._session.testscollected + progress_length = len(f" [{num_tests}/{num_tests}]") + elif self._show_progress_info == "times": + progress_length = len(" 99h 59m") + else: + progress_length = len(" [100%]") + past_edge = w + progress_length + 1 >= self._screen_width + if past_edge: + main_color, _ = self._get_main_color() + msg = self._get_progress_information_message() + self._tw.write(msg + "\n", **{main_color: True}) + + def _write_progress_information_filling_space(self) -> None: + color, _ = self._get_main_color() + msg = self._get_progress_information_message() + w = self._width_of_current_line + fill = self._tw.fullwidth - w - 1 + self.write(msg.rjust(fill), flush=True, **{color: True}) + + @property + def _width_of_current_line(self) -> int: + """Return the width of the current line.""" + return self._tw.width_of_current_line + + def pytest_collection(self) -> None: + if self.isatty(): + if self.config.option.verbose >= 0: + self.write("collecting ... ", flush=True, bold=True) + elif self.config.option.verbose >= 1: + self.write("collecting ... ", flush=True, bold=True) + + def pytest_collectreport(self, report: CollectReport) -> None: + if report.failed: + self._add_stats("error", [report]) + elif report.skipped: + self._add_stats("skipped", [report]) + items = [x for x in report.result if isinstance(x, Item)] + self._numcollected += len(items) + if self.isatty(): + self.report_collect() + + def report_collect(self, final: bool = False) -> None: + if self.config.option.verbose < 0: + return + + if not final: + # Only write the "collecting" report every `REPORT_COLLECTING_RESOLUTION`. + if ( + self._collect_report_last_write.elapsed().seconds + < REPORT_COLLECTING_RESOLUTION + ): + return + self._collect_report_last_write = timing.Instant() + + errors = len(self.stats.get("error", [])) + skipped = len(self.stats.get("skipped", [])) + deselected = len(self.stats.get("deselected", [])) + selected = self._numcollected - deselected + line = "collected " if final else "collecting " + line += ( + str(self._numcollected) + " item" + ("" if self._numcollected == 1 else "s") + ) + if errors: + line += f" / {errors} error{'s' if errors != 1 else ''}" + if deselected: + line += f" / {deselected} deselected" + if skipped: + line += f" / {skipped} skipped" + if self._numcollected > selected: + line += f" / {selected} selected" + if self.isatty(): + self.rewrite(line, bold=True, erase=True) + if final: + self.write("\n") + else: + self.write_line(line) + + @hookimpl(trylast=True) + def pytest_sessionstart(self, session: Session) -> None: + self._session = session + self._session_start = timing.Instant() + if not self.showheader: + return + self.write_sep("=", "test session starts", bold=True) + verinfo = platform.python_version() + if not self.no_header: + msg = f"platform {sys.platform} -- Python {verinfo}" + pypy_version_info = getattr(sys, "pypy_version_info", None) + if pypy_version_info: + verinfo = ".".join(map(str, pypy_version_info[:3])) + msg += f"[pypy-{verinfo}-{pypy_version_info[3]}]" + msg += f", pytest-{_pytest._version.version}, pluggy-{pluggy.__version__}" + if ( + self.verbosity > 0 + or self.config.option.debug + or getattr(self.config.option, "pastebin", None) + ): + msg += " -- " + str(sys.executable) + self.write_line(msg) + lines = self.config.hook.pytest_report_header( + config=self.config, start_path=self.startpath + ) + self._write_report_lines_from_hooks(lines) + + def _write_report_lines_from_hooks( + self, lines: Sequence[str | Sequence[str]] + ) -> None: + for line_or_lines in reversed(lines): + if isinstance(line_or_lines, str): + self.write_line(line_or_lines) + else: + for line in line_or_lines: + self.write_line(line) + + def pytest_report_header(self, config: Config) -> list[str]: + result = [f"rootdir: {config.rootpath}"] + + if config.inipath: + warning = "" + if config._ignored_config_files: + warning = f" (WARNING: ignoring pytest config in {', '.join(config._ignored_config_files)}!)" + result.append( + "configfile: " + bestrelpath(config.rootpath, config.inipath) + warning + ) + + if config.args_source == Config.ArgsSource.TESTPATHS: + testpaths: list[str] = config.getini("testpaths") + result.append("testpaths: {}".format(", ".join(testpaths))) + + plugininfo = config.pluginmanager.list_plugin_distinfo() + if plugininfo: + result.append( + "plugins: {}".format(", ".join(_plugin_nameversions(plugininfo))) + ) + return result + + def pytest_collection_finish(self, session: Session) -> None: + self.report_collect(True) + + lines = self.config.hook.pytest_report_collectionfinish( + config=self.config, + start_path=self.startpath, + items=session.items, + ) + self._write_report_lines_from_hooks(lines) + + if self.config.getoption("collectonly"): + if session.items: + if self.config.option.verbose > -1: + self._tw.line("") + self._printcollecteditems(session.items) + + failed = self.stats.get("failed") + if failed: + self._tw.sep("!", "collection failures") + for rep in failed: + rep.toterminal(self._tw) + + def _printcollecteditems(self, items: Sequence[Item]) -> None: + test_cases_verbosity = self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) + if test_cases_verbosity < 0: + if test_cases_verbosity < -1: + counts = Counter(item.nodeid.split("::", 1)[0] for item in items) + for name, count in sorted(counts.items()): + self._tw.line(f"{name}: {count}") + else: + for item in items: + self._tw.line(item.nodeid) + return + stack: list[Node] = [] + indent = "" + for item in items: + needed_collectors = item.listchain()[1:] # strip root node + while stack: + if stack == needed_collectors[: len(stack)]: + break + stack.pop() + for col in needed_collectors[len(stack) :]: + stack.append(col) + indent = (len(stack) - 1) * " " + self._tw.line(f"{indent}{col}") + if test_cases_verbosity >= 1: + obj = getattr(col, "obj", None) + doc = inspect.getdoc(obj) if obj else None + if doc: + for line in doc.splitlines(): + self._tw.line("{}{}".format(indent + " ", line)) + + @hookimpl(wrapper=True) + def pytest_sessionfinish( + self, session: Session, exitstatus: int | ExitCode + ) -> Generator[None]: + result = yield + self._tw.line("") + summary_exit_codes = ( + ExitCode.OK, + ExitCode.TESTS_FAILED, + ExitCode.INTERRUPTED, + ExitCode.USAGE_ERROR, + ExitCode.NO_TESTS_COLLECTED, + ) + if exitstatus in summary_exit_codes and not self.no_summary: + self.config.hook.pytest_terminal_summary( + terminalreporter=self, exitstatus=exitstatus, config=self.config + ) + if session.shouldfail: + self.write_sep("!", str(session.shouldfail), red=True) + if exitstatus == ExitCode.INTERRUPTED: + self._report_keyboardinterrupt() + self._keyboardinterrupt_memo = None + elif session.shouldstop: + self.write_sep("!", str(session.shouldstop), red=True) + self.summary_stats() + return result + + @hookimpl(wrapper=True) + def pytest_terminal_summary(self) -> Generator[None]: + self.summary_errors() + self.summary_failures() + self.summary_xfailures() + self.summary_warnings() + self.summary_passes() + self.summary_xpasses() + try: + return (yield) + finally: + self.short_test_summary() + # Display any extra warnings from teardown here (if any). + self.summary_warnings() + + def pytest_keyboard_interrupt(self, excinfo: ExceptionInfo[BaseException]) -> None: + self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True) + + def pytest_unconfigure(self) -> None: + if self._keyboardinterrupt_memo is not None: + self._report_keyboardinterrupt() + + def _report_keyboardinterrupt(self) -> None: + excrepr = self._keyboardinterrupt_memo + assert excrepr is not None + assert excrepr.reprcrash is not None + msg = excrepr.reprcrash.message + self.write_sep("!", msg) + if "KeyboardInterrupt" in msg: + if self.config.option.fulltrace: + excrepr.toterminal(self._tw) + else: + excrepr.reprcrash.toterminal(self._tw) + self._tw.line( + "(to show a full traceback on KeyboardInterrupt use --full-trace)", + yellow=True, + ) + + def _locationline( + self, nodeid: str, fspath: str, lineno: int | None, domain: str + ) -> str: + def mkrel(nodeid: str) -> str: + line = self.config.cwd_relative_nodeid(nodeid) + if domain and line.endswith(domain): + line = line[: -len(domain)] + values = domain.split("[") + values[0] = values[0].replace(".", "::") # don't replace '.' in params + line += "[".join(values) + return line + + # fspath comes from testid which has a "/"-normalized path. + if fspath: + res = mkrel(nodeid) + if self.verbosity >= 2 and nodeid.split("::")[0] != fspath.replace( + "\\", nodes.SEP + ): + res += " <- " + bestrelpath(self.startpath, Path(fspath)) + else: + res = "[location]" + return res + " " + + def _getfailureheadline(self, rep): + head_line = rep.head_line + if head_line: + return head_line + return "test session" # XXX? + + def _getcrashline(self, rep): + try: + return str(rep.longrepr.reprcrash) + except AttributeError: + try: + return str(rep.longrepr)[:50] + except AttributeError: + return "" + + # + # Summaries for sessionfinish. + # + def getreports(self, name: str): + return [x for x in self.stats.get(name, ()) if not hasattr(x, "_pdbshown")] + + def summary_warnings(self) -> None: + if self.hasopt("w"): + all_warnings: list[WarningReport] | None = self.stats.get("warnings") + if not all_warnings: + return + + final = self._already_displayed_warnings is not None + if final: + warning_reports = all_warnings[self._already_displayed_warnings :] + else: + warning_reports = all_warnings + self._already_displayed_warnings = len(warning_reports) + if not warning_reports: + return + + reports_grouped_by_message: dict[str, list[WarningReport]] = {} + for wr in warning_reports: + reports_grouped_by_message.setdefault(wr.message, []).append(wr) + + def collapsed_location_report(reports: list[WarningReport]) -> str: + locations = [] + for w in reports: + location = w.get_location(self.config) + if location: + locations.append(location) + + if len(locations) < 10: + return "\n".join(map(str, locations)) + + counts_by_filename = Counter( + str(loc).split("::", 1)[0] for loc in locations + ) + return "\n".join( + "{}: {} warning{}".format(k, v, "s" if v > 1 else "") + for k, v in counts_by_filename.items() + ) + + title = "warnings summary (final)" if final else "warnings summary" + self.write_sep("=", title, yellow=True, bold=False) + for message, message_reports in reports_grouped_by_message.items(): + maybe_location = collapsed_location_report(message_reports) + if maybe_location: + self._tw.line(maybe_location) + lines = message.splitlines() + indented = "\n".join(" " + x for x in lines) + message = indented.rstrip() + else: + message = message.rstrip() + self._tw.line(message) + self._tw.line() + self._tw.line( + "-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html" + ) + + def summary_passes(self) -> None: + self.summary_passes_combined("passed", "PASSES", "P") + + def summary_xpasses(self) -> None: + self.summary_passes_combined("xpassed", "XPASSES", "X") + + def summary_passes_combined( + self, which_reports: str, sep_title: str, needed_opt: str + ) -> None: + if self.config.option.tbstyle != "no": + if self.hasopt(needed_opt): + reports: list[TestReport] = self.getreports(which_reports) + if not reports: + return + self.write_sep("=", sep_title) + for rep in reports: + if rep.sections: + msg = self._getfailureheadline(rep) + self.write_sep("_", msg, green=True, bold=True) + self._outrep_summary(rep) + self._handle_teardown_sections(rep.nodeid) + + def _get_teardown_reports(self, nodeid: str) -> list[TestReport]: + reports = self.getreports("") + return [ + report + for report in reports + if report.when == "teardown" and report.nodeid == nodeid + ] + + def _handle_teardown_sections(self, nodeid: str) -> None: + for report in self._get_teardown_reports(nodeid): + self.print_teardown_sections(report) + + def print_teardown_sections(self, rep: TestReport) -> None: + showcapture = self.config.option.showcapture + if showcapture == "no": + return + for secname, content in rep.sections: + if showcapture != "all" and showcapture not in secname: + continue + if "teardown" in secname: + self._tw.sep("-", secname) + if content[-1:] == "\n": + content = content[:-1] + self._tw.line(content) + + def summary_failures(self) -> None: + style = self.config.option.tbstyle + self.summary_failures_combined("failed", "FAILURES", style=style) + + def summary_xfailures(self) -> None: + show_tb = self.config.option.xfail_tb + style = self.config.option.tbstyle if show_tb else "no" + self.summary_failures_combined("xfailed", "XFAILURES", style=style) + + def summary_failures_combined( + self, + which_reports: str, + sep_title: str, + *, + style: str, + needed_opt: str | None = None, + ) -> None: + if style != "no": + if not needed_opt or self.hasopt(needed_opt): + reports: list[BaseReport] = self.getreports(which_reports) + if not reports: + return + self.write_sep("=", sep_title) + if style == "line": + for rep in reports: + line = self._getcrashline(rep) + self._outrep_summary(rep) + self.write_line(line) + else: + for rep in reports: + msg = self._getfailureheadline(rep) + self.write_sep("_", msg, red=True, bold=True) + self._outrep_summary(rep) + self._handle_teardown_sections(rep.nodeid) + + def summary_errors(self) -> None: + if self.config.option.tbstyle != "no": + reports: list[BaseReport] = self.getreports("error") + if not reports: + return + self.write_sep("=", "ERRORS") + for rep in self.stats["error"]: + msg = self._getfailureheadline(rep) + if rep.when == "collect": + msg = "ERROR collecting " + msg + else: + msg = f"ERROR at {rep.when} of {msg}" + self.write_sep("_", msg, red=True, bold=True) + self._outrep_summary(rep) + + def _outrep_summary(self, rep: BaseReport) -> None: + rep.toterminal(self._tw) + showcapture = self.config.option.showcapture + if showcapture == "no": + return + for secname, content in rep.sections: + if showcapture != "all" and showcapture not in secname: + continue + self._tw.sep("-", secname) + if content[-1:] == "\n": + content = content[:-1] + self._tw.line(content) + + def summary_stats(self) -> None: + if self.verbosity < -1: + return + + session_duration = self._session_start.elapsed() + (parts, main_color) = self.build_summary_stats_line() + line_parts = [] + + display_sep = self.verbosity >= 0 + if display_sep: + fullwidth = self._tw.fullwidth + for text, markup in parts: + with_markup = self._tw.markup(text, **markup) + if display_sep: + fullwidth += len(with_markup) - len(text) + line_parts.append(with_markup) + msg = ", ".join(line_parts) + + main_markup = {main_color: True} + duration = f" in {format_session_duration(session_duration.seconds)}" + duration_with_markup = self._tw.markup(duration, **main_markup) + if display_sep: + fullwidth += len(duration_with_markup) - len(duration) + msg += duration_with_markup + + if display_sep: + markup_for_end_sep = self._tw.markup("", **main_markup) + if markup_for_end_sep.endswith("\x1b[0m"): + markup_for_end_sep = markup_for_end_sep[:-4] + fullwidth += len(markup_for_end_sep) + msg += markup_for_end_sep + + if display_sep: + self.write_sep("=", msg, fullwidth=fullwidth, **main_markup) + else: + self.write_line(msg, **main_markup) + + def short_test_summary(self) -> None: + if not self.reportchars: + return + + def show_simple(lines: list[str], *, stat: str) -> None: + failed = self.stats.get(stat, []) + if not failed: + return + config = self.config + for rep in failed: + color = _color_for_type.get(stat, _color_for_type_default) + line = _get_line_with_reprcrash_message( + config, rep, self._tw, {color: True} + ) + lines.append(line) + + def show_xfailed(lines: list[str]) -> None: + xfailed = self.stats.get("xfailed", []) + for rep in xfailed: + verbose_word, verbose_markup = rep._get_verbose_word_with_markup( + self.config, {_color_for_type["warnings"]: True} + ) + markup_word = self._tw.markup(verbose_word, **verbose_markup) + nodeid = _get_node_id_with_markup(self._tw, self.config, rep) + line = f"{markup_word} {nodeid}" + reason = rep.wasxfail + if reason: + line += " - " + str(reason) + + lines.append(line) + + def show_xpassed(lines: list[str]) -> None: + xpassed = self.stats.get("xpassed", []) + for rep in xpassed: + verbose_word, verbose_markup = rep._get_verbose_word_with_markup( + self.config, {_color_for_type["warnings"]: True} + ) + markup_word = self._tw.markup(verbose_word, **verbose_markup) + nodeid = _get_node_id_with_markup(self._tw, self.config, rep) + line = f"{markup_word} {nodeid}" + reason = rep.wasxfail + if reason: + line += " - " + str(reason) + lines.append(line) + + def show_skipped_folded(lines: list[str]) -> None: + skipped: list[CollectReport] = self.stats.get("skipped", []) + fskips = _folded_skips(self.startpath, skipped) if skipped else [] + if not fskips: + return + verbose_word, verbose_markup = skipped[0]._get_verbose_word_with_markup( + self.config, {_color_for_type["warnings"]: True} + ) + markup_word = self._tw.markup(verbose_word, **verbose_markup) + prefix = "Skipped: " + for num, fspath, lineno, reason in fskips: + if reason.startswith(prefix): + reason = reason[len(prefix) :] + if lineno is not None: + lines.append(f"{markup_word} [{num}] {fspath}:{lineno}: {reason}") + else: + lines.append(f"{markup_word} [{num}] {fspath}: {reason}") + + def show_skipped_unfolded(lines: list[str]) -> None: + skipped: list[CollectReport] = self.stats.get("skipped", []) + + for rep in skipped: + assert rep.longrepr is not None + assert isinstance(rep.longrepr, tuple), (rep, rep.longrepr) + assert len(rep.longrepr) == 3, (rep, rep.longrepr) + + verbose_word, verbose_markup = rep._get_verbose_word_with_markup( + self.config, {_color_for_type["warnings"]: True} + ) + markup_word = self._tw.markup(verbose_word, **verbose_markup) + nodeid = _get_node_id_with_markup(self._tw, self.config, rep) + line = f"{markup_word} {nodeid}" + reason = rep.longrepr[2] + if reason: + line += " - " + str(reason) + lines.append(line) + + def show_skipped(lines: list[str]) -> None: + if self.foldskipped: + show_skipped_folded(lines) + else: + show_skipped_unfolded(lines) + + REPORTCHAR_ACTIONS: Mapping[str, Callable[[list[str]], None]] = { + "x": show_xfailed, + "X": show_xpassed, + "f": partial(show_simple, stat="failed"), + "s": show_skipped, + "p": partial(show_simple, stat="passed"), + "E": partial(show_simple, stat="error"), + } + + lines: list[str] = [] + for char in self.reportchars: + action = REPORTCHAR_ACTIONS.get(char) + if action: # skipping e.g. "P" (passed with output) here. + action(lines) + + if lines: + self.write_sep("=", "short test summary info", cyan=True, bold=True) + for line in lines: + self.write_line(line) + + def _get_main_color(self) -> tuple[str, list[str]]: + if self._main_color is None or self._known_types is None or self._is_last_item: + self._set_main_color() + assert self._main_color + assert self._known_types + return self._main_color, self._known_types + + def _determine_main_color(self, unknown_type_seen: bool) -> str: + stats = self.stats + if "failed" in stats or "error" in stats: + main_color = "red" + elif "warnings" in stats or "xpassed" in stats or unknown_type_seen: + main_color = "yellow" + elif "passed" in stats or not self._is_last_item: + main_color = "green" + else: + main_color = "yellow" + return main_color + + def _set_main_color(self) -> None: + unknown_types: list[str] = [] + for found_type in self.stats: + if found_type: # setup/teardown reports have an empty key, ignore them + if found_type not in KNOWN_TYPES and found_type not in unknown_types: + unknown_types.append(found_type) + self._known_types = list(KNOWN_TYPES) + unknown_types + self._main_color = self._determine_main_color(bool(unknown_types)) + + def build_summary_stats_line(self) -> tuple[list[tuple[str, dict[str, bool]]], str]: + """ + Build the parts used in the last summary stats line. + + The summary stats line is the line shown at the end, "=== 12 passed, 2 errors in Xs===". + + This function builds a list of the "parts" that make up for the text in that line, in + the example above it would be:: + + [ + ("12 passed", {"green": True}), + ("2 errors", {"red": True} + ] + + That last dict for each line is a "markup dictionary", used by TerminalWriter to + color output. + + The final color of the line is also determined by this function, and is the second + element of the returned tuple. + """ + if self.config.getoption("collectonly"): + return self._build_collect_only_summary_stats_line() + else: + return self._build_normal_summary_stats_line() + + def _get_reports_to_display(self, key: str) -> list[Any]: + """Get test/collection reports for the given status key, such as `passed` or `error`.""" + reports = self.stats.get(key, []) + return [x for x in reports if getattr(x, "count_towards_summary", True)] + + def _build_normal_summary_stats_line( + self, + ) -> tuple[list[tuple[str, dict[str, bool]]], str]: + main_color, known_types = self._get_main_color() + parts = [] + + for key in known_types: + reports = self._get_reports_to_display(key) + if reports: + count = len(reports) + color = _color_for_type.get(key, _color_for_type_default) + markup = {color: True, "bold": color == main_color} + parts.append(("%d %s" % pluralize(count, key), markup)) # noqa: UP031 + + if not parts: + parts = [("no tests ran", {_color_for_type_default: True})] + + return parts, main_color + + def _build_collect_only_summary_stats_line( + self, + ) -> tuple[list[tuple[str, dict[str, bool]]], str]: + deselected = len(self._get_reports_to_display("deselected")) + errors = len(self._get_reports_to_display("error")) + + if self._numcollected == 0: + parts = [("no tests collected", {"yellow": True})] + main_color = "yellow" + + elif deselected == 0: + main_color = "green" + collected_output = "%d %s collected" % pluralize(self._numcollected, "test") # noqa: UP031 + parts = [(collected_output, {main_color: True})] + else: + all_tests_were_deselected = self._numcollected == deselected + if all_tests_were_deselected: + main_color = "yellow" + collected_output = f"no tests collected ({deselected} deselected)" + else: + main_color = "green" + selected = self._numcollected - deselected + collected_output = f"{selected}/{self._numcollected} tests collected ({deselected} deselected)" + + parts = [(collected_output, {main_color: True})] + + if errors: + main_color = _color_for_type["error"] + parts += [("%d %s" % pluralize(errors, "error"), {main_color: True})] # noqa: UP031 + + return parts, main_color + + +def _get_node_id_with_markup(tw: TerminalWriter, config: Config, rep: BaseReport): + nodeid = config.cwd_relative_nodeid(rep.nodeid) + path, *parts = nodeid.split("::") + if parts: + parts_markup = tw.markup("::".join(parts), bold=True) + return path + "::" + parts_markup + else: + return path + + +def _format_trimmed(format: str, msg: str, available_width: int) -> str | None: + """Format msg into format, ellipsizing it if doesn't fit in available_width. + + Returns None if even the ellipsis can't fit. + """ + # Only use the first line. + i = msg.find("\n") + if i != -1: + msg = msg[:i] + + ellipsis = "..." + format_width = wcswidth(format.format("")) + if format_width + len(ellipsis) > available_width: + return None + + if format_width + wcswidth(msg) > available_width: + available_width -= len(ellipsis) + msg = msg[:available_width] + while format_width + wcswidth(msg) > available_width: + msg = msg[:-1] + msg += ellipsis + + return format.format(msg) + + +def _get_line_with_reprcrash_message( + config: Config, rep: BaseReport, tw: TerminalWriter, word_markup: dict[str, bool] +) -> str: + """Get summary line for a report, trying to add reprcrash message.""" + verbose_word, verbose_markup = rep._get_verbose_word_with_markup( + config, word_markup + ) + word = tw.markup(verbose_word, **verbose_markup) + node = _get_node_id_with_markup(tw, config, rep) + + line = f"{word} {node}" + line_width = wcswidth(line) + + msg: str | None + try: + if isinstance(rep.longrepr, str): + msg = rep.longrepr + else: + # Type ignored intentionally -- possible AttributeError expected. + msg = rep.longrepr.reprcrash.message # type: ignore[union-attr] + except AttributeError: + pass + else: + if ( + running_on_ci() or config.option.verbose >= 2 + ) and not config.option.force_short_summary: + msg = f" - {msg}" + else: + available_width = tw.fullwidth - line_width + msg = _format_trimmed(" - {}", msg, available_width) + if msg is not None: + line += msg + + return line + + +def _folded_skips( + startpath: Path, + skipped: Sequence[CollectReport], +) -> list[tuple[int, str, int | None, str]]: + d: dict[tuple[str, int | None, str], list[CollectReport]] = {} + for event in skipped: + assert event.longrepr is not None + assert isinstance(event.longrepr, tuple), (event, event.longrepr) + assert len(event.longrepr) == 3, (event, event.longrepr) + fspath, lineno, reason = event.longrepr + # For consistency, report all fspaths in relative form. + fspath = bestrelpath(startpath, Path(fspath)) + keywords = getattr(event, "keywords", {}) + # Folding reports with global pytestmark variable. + # This is a workaround, because for now we cannot identify the scope of a skip marker + # TODO: Revisit after marks scope would be fixed. + if ( + event.when == "setup" + and "skip" in keywords + and "pytestmark" not in keywords + ): + key: tuple[str, int | None, str] = (fspath, None, reason) + else: + key = (fspath, lineno, reason) + d.setdefault(key, []).append(event) + values: list[tuple[int, str, int | None, str]] = [] + for key, events in d.items(): + values.append((len(events), *key)) + return values + + +_color_for_type = { + "failed": "red", + "error": "red", + "warnings": "yellow", + "passed": "green", + "subtests passed": "green", + "subtests failed": "red", +} +_color_for_type_default = "yellow" + + +def pluralize(count: int, noun: str) -> tuple[int, str]: + # No need to pluralize words such as `failed` or `passed`. + if noun not in ["error", "warnings", "test"]: + return count, noun + + # The `warnings` key is plural. To avoid API breakage, we keep it that way but + # set it to singular here so we can determine plurality in the same way as we do + # for `error`. + noun = noun.replace("warnings", "warning") + + return count, noun + "s" if count != 1 else noun + + +def _plugin_nameversions(plugininfo) -> list[str]: + values: list[str] = [] + for plugin, dist in plugininfo: + # Gets us name and version! + name = f"{dist.project_name}-{dist.version}" + # Questionable convenience, but it keeps things short. + if name.startswith("pytest-"): + name = name[7:] + # We decided to print python package names they can have more than one plugin. + if name not in values: + values.append(name) + return values + + +def format_session_duration(seconds: float) -> str: + """Format the given seconds in a human readable manner to show in the final summary.""" + if seconds < 60: + return f"{seconds:.2f}s" + else: + dt = datetime.timedelta(seconds=int(seconds)) + return f"{seconds:.2f}s ({dt})" + + +def format_node_duration(seconds: float) -> str: + """Format the given seconds in a human readable manner to show in the test progress.""" + # The formatting is designed to be compact and readable, with at most 7 characters + # for durations below 100 hours. + if seconds < 0.00001: + return f" {seconds * 1000000:.3f}us" + if seconds < 0.0001: + return f" {seconds * 1000000:.2f}us" + if seconds < 0.001: + return f" {seconds * 1000000:.1f}us" + if seconds < 0.01: + return f" {seconds * 1000:.3f}ms" + if seconds < 0.1: + return f" {seconds * 1000:.2f}ms" + if seconds < 1: + return f" {seconds * 1000:.1f}ms" + if seconds < 60: + return f" {seconds:.3f}s" + if seconds < 3600: + return f" {seconds // 60:.0f}m {seconds % 60:.0f}s" + return f" {seconds // 3600:.0f}h {(seconds % 3600) // 60:.0f}m" + + +def _get_raw_skip_reason(report: TestReport) -> str: + """Get the reason string of a skip/xfail/xpass test report. + + The string is just the part given by the user. + """ + if hasattr(report, "wasxfail"): + reason = report.wasxfail + if reason.startswith("reason: "): + reason = reason[len("reason: ") :] + return reason + else: + assert report.skipped + assert isinstance(report.longrepr, tuple) + _, _, reason = report.longrepr + if reason.startswith("Skipped: "): + reason = reason[len("Skipped: ") :] + elif reason == "Skipped": + reason = "" + return reason + + +class TerminalProgressPlugin: + """Terminal progress reporting plugin using OSC 9;4 ANSI sequences. + + Emits OSC 9;4 sequences to indicate test progress to terminal + tabs/windows/etc. + + Not all terminal emulators support this feature. + + Ref: https://conemu.github.io/en/AnsiEscapeCodes.html#ConEmu_specific_OSC + """ + + def __init__(self, tr: TerminalReporter) -> None: + self._tr = tr + self._session: Session | None = None + self._has_failures = False + + def _emit_progress( + self, + state: Literal["remove", "normal", "error", "indeterminate", "paused"], + progress: int | None = None, + ) -> None: + """Emit OSC 9;4 sequence for indicating progress to the terminal. + + :param state: + Progress state to set. + :param progress: + Progress value 0-100. Required for "normal", optional for "error" + and "paused", otherwise ignored. + """ + assert progress is None or 0 <= progress <= 100 + + # OSC 9;4 sequence: ESC ] 9 ; 4 ; state ; progress ST + # ST can be ESC \ or BEL. ESC \ seems better supported. + match state: + case "remove": + sequence = "\x1b]9;4;0;\x1b\\" + case "normal": + assert progress is not None + sequence = f"\x1b]9;4;1;{progress}\x1b\\" + case "error": + if progress is not None: + sequence = f"\x1b]9;4;2;{progress}\x1b\\" + else: + sequence = "\x1b]9;4;2;\x1b\\" + case "indeterminate": + sequence = "\x1b]9;4;3;\x1b\\" + case "paused": + if progress is not None: + sequence = f"\x1b]9;4;4;{progress}\x1b\\" + else: + sequence = "\x1b]9;4;4;\x1b\\" + + self._tr.write_raw(sequence, flush=True) + + @hookimpl + def pytest_sessionstart(self, session: Session) -> None: + self._session = session + # Show indeterminate progress during collection. + self._emit_progress("indeterminate") + + @hookimpl + def pytest_collection_finish(self) -> None: + assert self._session is not None + if self._session.testscollected > 0: + # Switch from indeterminate to 0% progress. + self._emit_progress("normal", 0) + + @hookimpl + def pytest_runtest_logreport(self, report: TestReport) -> None: + if report.failed: + self._has_failures = True + + # Let's consider the "call" phase for progress. + if report.when != "call": + return + + # Calculate and emit progress. + assert self._session is not None + collected = self._session.testscollected + if collected > 0: + reported = self._tr.reported_progress + progress = min(reported * 100 // collected, 100) + self._emit_progress("error" if self._has_failures else "normal", progress) + + @hookimpl + def pytest_sessionfinish(self) -> None: + self._emit_progress("remove") diff --git a/.venv/lib/python3.12/site-packages/_pytest/terminalprogress.py b/.venv/lib/python3.12/site-packages/_pytest/terminalprogress.py new file mode 100644 index 0000000..287f0d5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/terminalprogress.py @@ -0,0 +1,30 @@ +# A plugin to register the TerminalProgressPlugin plugin. +# +# This plugin is not loaded by default due to compatibility issues (#13896), +# but can be enabled in one of these ways: +# - The terminal plugin enables it in a few cases where it's safe, and not +# blocked by the user (using e.g. `-p no:terminalprogress`). +# - The user explicitly requests it, e.g. using `-p terminalprogress`. +# +# In a few years, if it's safe, we can consider enabling it by default. Then, +# this file will become unnecessary and can be inlined into terminal.py. + +from __future__ import annotations + +import os + +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.terminal import TerminalProgressPlugin +from _pytest.terminal import TerminalReporter + + +@hookimpl(trylast=True) +def pytest_configure(config: Config) -> None: + reporter: TerminalReporter | None = config.pluginmanager.get_plugin( + "terminalreporter" + ) + + if reporter is not None and reporter.isatty() and os.environ.get("TERM") != "dumb": + plugin = TerminalProgressPlugin(reporter) + config.pluginmanager.register(plugin, name="terminalprogress-plugin") diff --git a/.venv/lib/python3.12/site-packages/_pytest/threadexception.py b/.venv/lib/python3.12/site-packages/_pytest/threadexception.py new file mode 100644 index 0000000..eb57783 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/threadexception.py @@ -0,0 +1,152 @@ +from __future__ import annotations + +import collections +from collections.abc import Callable +import functools +import sys +import threading +import traceback +from typing import NamedTuple +from typing import TYPE_CHECKING +import warnings + +from _pytest.config import Config +from _pytest.nodes import Item +from _pytest.stash import StashKey +from _pytest.tracemalloc import tracemalloc_message +import pytest + + +if TYPE_CHECKING: + pass + +if sys.version_info < (3, 11): + from exceptiongroup import ExceptionGroup + + +class ThreadExceptionMeta(NamedTuple): + msg: str + cause_msg: str + exc_value: BaseException | None + + +thread_exceptions: StashKey[collections.deque[ThreadExceptionMeta | BaseException]] = ( + StashKey() +) + + +def collect_thread_exception(config: Config) -> None: + pop_thread_exception = config.stash[thread_exceptions].pop + errors: list[pytest.PytestUnhandledThreadExceptionWarning | RuntimeError] = [] + meta = None + hook_error = None + try: + while True: + try: + meta = pop_thread_exception() + except IndexError: + break + + if isinstance(meta, BaseException): + hook_error = RuntimeError("Failed to process thread exception") + hook_error.__cause__ = meta + errors.append(hook_error) + continue + + msg = meta.msg + try: + warnings.warn(pytest.PytestUnhandledThreadExceptionWarning(msg)) + except pytest.PytestUnhandledThreadExceptionWarning as e: + # This except happens when the warning is treated as an error (e.g. `-Werror`). + if meta.exc_value is not None: + # Exceptions have a better way to show the traceback, but + # warnings do not, so hide the traceback from the msg and + # set the cause so the traceback shows up in the right place. + e.args = (meta.cause_msg,) + e.__cause__ = meta.exc_value + errors.append(e) + + if len(errors) == 1: + raise errors[0] + if errors: + raise ExceptionGroup("multiple thread exception warnings", errors) + finally: + del errors, meta, hook_error + + +def cleanup( + *, config: Config, prev_hook: Callable[[threading.ExceptHookArgs], object] +) -> None: + try: + try: + # We don't join threads here, so exceptions raised from any + # threads still running by the time _threading_atexits joins them + # do not get captured (see #13027). + collect_thread_exception(config) + finally: + threading.excepthook = prev_hook + finally: + del config.stash[thread_exceptions] + + +def thread_exception_hook( + args: threading.ExceptHookArgs, + /, + *, + append: Callable[[ThreadExceptionMeta | BaseException], object], +) -> None: + try: + # we need to compute these strings here as they might change after + # the excepthook finishes and before the metadata object is + # collected by a pytest hook + thread_name = "" if args.thread is None else args.thread.name + summary = f"Exception in thread {thread_name}" + traceback_message = "\n\n" + "".join( + traceback.format_exception( + args.exc_type, + args.exc_value, + args.exc_traceback, + ) + ) + tracemalloc_tb = "\n" + tracemalloc_message(args.thread) + msg = summary + traceback_message + tracemalloc_tb + cause_msg = summary + tracemalloc_tb + + append( + ThreadExceptionMeta( + # Compute these strings here as they might change later + msg=msg, + cause_msg=cause_msg, + exc_value=args.exc_value, + ) + ) + except BaseException as e: + append(e) + # Raising this will cause the exception to be logged twice, once in our + # collect_thread_exception and once by sys.excepthook + # which is fine - this should never happen anyway and if it does + # it should probably be reported as a pytest bug. + raise + + +def pytest_configure(config: Config) -> None: + prev_hook = threading.excepthook + deque: collections.deque[ThreadExceptionMeta | BaseException] = collections.deque() + config.stash[thread_exceptions] = deque + config.add_cleanup(functools.partial(cleanup, config=config, prev_hook=prev_hook)) + threading.excepthook = functools.partial(thread_exception_hook, append=deque.append) + + +@pytest.hookimpl(trylast=True) +def pytest_runtest_setup(item: Item) -> None: + collect_thread_exception(item.config) + + +@pytest.hookimpl(trylast=True) +def pytest_runtest_call(item: Item) -> None: + collect_thread_exception(item.config) + + +@pytest.hookimpl(trylast=True) +def pytest_runtest_teardown(item: Item) -> None: + collect_thread_exception(item.config) diff --git a/.venv/lib/python3.12/site-packages/_pytest/timing.py b/.venv/lib/python3.12/site-packages/_pytest/timing.py new file mode 100644 index 0000000..51c3db2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/timing.py @@ -0,0 +1,95 @@ +"""Indirection for time functions. + +We intentionally grab some "time" functions internally to avoid tests mocking "time" to affect +pytest runtime information (issue #185). + +Fixture "mock_timing" also interacts with this module for pytest's own tests. +""" + +from __future__ import annotations + +import dataclasses +from datetime import datetime +from datetime import timezone +from time import perf_counter +from time import sleep +from time import time +from typing import TYPE_CHECKING + + +if TYPE_CHECKING: + from pytest import MonkeyPatch + + +@dataclasses.dataclass(frozen=True) +class Instant: + """ + Represents an instant in time, used to both get the timestamp value and to measure + the duration of a time span. + + Inspired by Rust's `std::time::Instant`. + """ + + # Creation time of this instant, using time.time(), to measure actual time. + # Note: using a `lambda` to correctly get the mocked time via `MockTiming`. + time: float = dataclasses.field(default_factory=lambda: time(), init=False) + + # Performance counter tick of the instant, used to measure precise elapsed time. + # Note: using a `lambda` to correctly get the mocked time via `MockTiming`. + perf_count: float = dataclasses.field( + default_factory=lambda: perf_counter(), init=False + ) + + def elapsed(self) -> Duration: + """Measure the duration since `Instant` was created.""" + return Duration(start=self, stop=Instant()) + + def as_utc(self) -> datetime: + """Instant as UTC datetime.""" + return datetime.fromtimestamp(self.time, timezone.utc) + + +@dataclasses.dataclass(frozen=True) +class Duration: + """A span of time as measured by `Instant.elapsed()`.""" + + start: Instant + stop: Instant + + @property + def seconds(self) -> float: + """Elapsed time of the duration in seconds, measured using a performance counter for precise timing.""" + return self.stop.perf_count - self.start.perf_count + + +@dataclasses.dataclass +class MockTiming: + """Mocks _pytest.timing with a known object that can be used to control timing in tests + deterministically. + + pytest itself should always use functions from `_pytest.timing` instead of `time` directly. + + This then allows us more control over time during testing, if testing code also + uses `_pytest.timing` functions. + + Time is static, and only advances through `sleep` calls, thus tests might sleep over large + numbers and obtain accurate time() calls at the end, making tests reliable and instant.""" + + _current_time: float = datetime(2020, 5, 22, 14, 20, 50).timestamp() + + def sleep(self, seconds: float) -> None: + self._current_time += seconds + + def time(self) -> float: + return self._current_time + + def patch(self, monkeypatch: MonkeyPatch) -> None: + # pylint: disable-next=import-self + from _pytest import timing # noqa: PLW0406 + + monkeypatch.setattr(timing, "sleep", self.sleep) + monkeypatch.setattr(timing, "time", self.time) + monkeypatch.setattr(timing, "perf_counter", self.time) + + +__all__ = ["perf_counter", "sleep", "time"] diff --git a/.venv/lib/python3.12/site-packages/_pytest/tmpdir.py b/.venv/lib/python3.12/site-packages/_pytest/tmpdir.py new file mode 100644 index 0000000..855ad27 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/tmpdir.py @@ -0,0 +1,315 @@ +# mypy: allow-untyped-defs +"""Support for providing temporary directories to test functions.""" + +from __future__ import annotations + +from collections.abc import Generator +import dataclasses +import os +from pathlib import Path +import re +from shutil import rmtree +import tempfile +from typing import Any +from typing import final +from typing import Literal + +from .pathlib import cleanup_dead_symlinks +from .pathlib import LOCK_TIMEOUT +from .pathlib import make_numbered_dir +from .pathlib import make_numbered_dir_with_cleanup +from .pathlib import rm_rf +from _pytest.compat import get_user_id +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.monkeypatch import MonkeyPatch +from _pytest.nodes import Item +from _pytest.reports import TestReport +from _pytest.stash import StashKey + + +tmppath_result_key = StashKey[dict[str, bool]]() +RetentionType = Literal["all", "failed", "none"] + + +@final +@dataclasses.dataclass +class TempPathFactory: + """Factory for temporary directories under the common base temp directory, + as discussed at :ref:`temporary directory location and retention`. + """ + + _given_basetemp: Path | None + # pluggy TagTracerSub, not currently exposed, so Any. + _trace: Any + _basetemp: Path | None + _retention_count: int + _retention_policy: RetentionType + + def __init__( + self, + given_basetemp: Path | None, + retention_count: int, + retention_policy: RetentionType, + trace, + basetemp: Path | None = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + if given_basetemp is None: + self._given_basetemp = None + else: + # Use os.path.abspath() to get absolute path instead of resolve() as it + # does not work the same in all platforms (see #4427). + # Path.absolute() exists, but it is not public (see https://bugs.python.org/issue25012). + self._given_basetemp = Path(os.path.abspath(str(given_basetemp))) + self._trace = trace + self._retention_count = retention_count + self._retention_policy = retention_policy + self._basetemp = basetemp + + @classmethod + def from_config( + cls, + config: Config, + *, + _ispytest: bool = False, + ) -> TempPathFactory: + """Create a factory according to pytest configuration. + + :meta private: + """ + check_ispytest(_ispytest) + count = int(config.getini("tmp_path_retention_count")) + if count < 0: + raise ValueError( + f"tmp_path_retention_count must be >= 0. Current input: {count}." + ) + + policy = config.getini("tmp_path_retention_policy") + if policy not in ("all", "failed", "none"): + raise ValueError( + f"tmp_path_retention_policy must be either all, failed, none. Current input: {policy}." + ) + + return cls( + given_basetemp=config.option.basetemp, + trace=config.trace.get("tmpdir"), + retention_count=count, + retention_policy=policy, + _ispytest=True, + ) + + def _ensure_relative_to_basetemp(self, basename: str) -> str: + basename = os.path.normpath(basename) + if (self.getbasetemp() / basename).resolve().parent != self.getbasetemp(): + raise ValueError(f"{basename} is not a normalized and relative path") + return basename + + def mktemp(self, basename: str, numbered: bool = True) -> Path: + """Create a new temporary directory managed by the factory. + + :param basename: + Directory base name, must be a relative path. + + :param numbered: + If ``True``, ensure the directory is unique by adding a numbered + suffix greater than any existing one: ``basename="foo-"`` and ``numbered=True`` + means that this function will create directories named ``"foo-0"``, + ``"foo-1"``, ``"foo-2"`` and so on. + + :returns: + The path to the new directory. + """ + basename = self._ensure_relative_to_basetemp(basename) + if not numbered: + p = self.getbasetemp().joinpath(basename) + p.mkdir(mode=0o700) + else: + p = make_numbered_dir(root=self.getbasetemp(), prefix=basename, mode=0o700) + self._trace("mktemp", p) + return p + + def getbasetemp(self) -> Path: + """Return the base temporary directory, creating it if needed. + + :returns: + The base temporary directory. + """ + if self._basetemp is not None: + return self._basetemp + + if self._given_basetemp is not None: + basetemp = self._given_basetemp + if basetemp.exists(): + rm_rf(basetemp) + basetemp.mkdir(mode=0o700) + basetemp = basetemp.resolve() + else: + from_env = os.environ.get("PYTEST_DEBUG_TEMPROOT") + temproot = Path(from_env or tempfile.gettempdir()).resolve() + user = get_user() or "unknown" + # use a sub-directory in the temproot to speed-up + # make_numbered_dir() call + rootdir = temproot.joinpath(f"pytest-of-{user}") + try: + rootdir.mkdir(mode=0o700, exist_ok=True) + except OSError: + # getuser() likely returned illegal characters for the platform, use unknown back off mechanism + rootdir = temproot.joinpath("pytest-of-unknown") + rootdir.mkdir(mode=0o700, exist_ok=True) + # Because we use exist_ok=True with a predictable name, make sure + # we are the owners, to prevent any funny business (on unix, where + # temproot is usually shared). + # Also, to keep things private, fixup any world-readable temp + # rootdir's permissions. Historically 0o755 was used, so we can't + # just error out on this, at least for a while. + uid = get_user_id() + if uid is not None: + rootdir_stat = rootdir.stat() + if rootdir_stat.st_uid != uid: + raise OSError( + f"The temporary directory {rootdir} is not owned by the current user. " + "Fix this and try again." + ) + if (rootdir_stat.st_mode & 0o077) != 0: + os.chmod(rootdir, rootdir_stat.st_mode & ~0o077) + keep = self._retention_count + if self._retention_policy == "none": + keep = 0 + basetemp = make_numbered_dir_with_cleanup( + prefix="pytest-", + root=rootdir, + keep=keep, + lock_timeout=LOCK_TIMEOUT, + mode=0o700, + ) + assert basetemp is not None, basetemp + self._basetemp = basetemp + self._trace("new basetemp", basetemp) + return basetemp + + +def get_user() -> str | None: + """Return the current user name, or None if getuser() does not work + in the current environment (see #1010).""" + try: + # In some exotic environments, getpass may not be importable. + import getpass + + return getpass.getuser() + except (ImportError, OSError, KeyError): + return None + + +def pytest_configure(config: Config) -> None: + """Create a TempPathFactory and attach it to the config object. + + This is to comply with existing plugins which expect the handler to be + available at pytest_configure time, but ideally should be moved entirely + to the tmp_path_factory session fixture. + """ + mp = MonkeyPatch() + config.add_cleanup(mp.undo) + _tmp_path_factory = TempPathFactory.from_config(config, _ispytest=True) + mp.setattr(config, "_tmp_path_factory", _tmp_path_factory, raising=False) + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "tmp_path_retention_count", + help="How many sessions should we keep the `tmp_path` directories, according to `tmp_path_retention_policy`.", + default="3", + # NOTE: Would have been better as an `int` but can't change it now. + type="string", + ) + + parser.addini( + "tmp_path_retention_policy", + help="Controls which directories created by the `tmp_path` fixture are kept around, based on test outcome. " + "(all/failed/none)", + type="string", + default="all", + ) + + +@fixture(scope="session") +def tmp_path_factory(request: FixtureRequest) -> TempPathFactory: + """Return a :class:`pytest.TempPathFactory` instance for the test session.""" + # Set dynamically by pytest_configure() above. + return request.config._tmp_path_factory # type: ignore + + +def _mk_tmp(request: FixtureRequest, factory: TempPathFactory) -> Path: + name = request.node.name + name = re.sub(r"[\W]", "_", name) + MAXVAL = 30 + name = name[:MAXVAL] + return factory.mktemp(name, numbered=True) + + +@fixture +def tmp_path( + request: FixtureRequest, tmp_path_factory: TempPathFactory +) -> Generator[Path]: + """Return a temporary directory (as :class:`pathlib.Path` object) + which is unique to each test function invocation. + The temporary directory is created as a subdirectory + of the base temporary directory, with configurable retention, + as discussed in :ref:`temporary directory location and retention`. + """ + path = _mk_tmp(request, tmp_path_factory) + yield path + + # Remove the tmpdir if the policy is "failed" and the test passed. + policy = tmp_path_factory._retention_policy + result_dict = request.node.stash[tmppath_result_key] + + if policy == "failed" and result_dict.get("call", True): + # We do a "best effort" to remove files, but it might not be possible due to some leaked resource, + # permissions, etc, in which case we ignore it. + rmtree(path, ignore_errors=True) + + del request.node.stash[tmppath_result_key] + + +def pytest_sessionfinish(session, exitstatus: int | ExitCode): + """After each session, remove base directory if all the tests passed, + the policy is "failed", and the basetemp is not specified by a user. + """ + tmp_path_factory: TempPathFactory = session.config._tmp_path_factory + basetemp = tmp_path_factory._basetemp + if basetemp is None: + return + + policy = tmp_path_factory._retention_policy + if ( + exitstatus == 0 + and policy == "failed" + and tmp_path_factory._given_basetemp is None + ): + if basetemp.is_dir(): + # We do a "best effort" to remove files, but it might not be possible due to some leaked resource, + # permissions, etc, in which case we ignore it. + rmtree(basetemp, ignore_errors=True) + + # Remove dead symlinks. + if basetemp.is_dir(): + cleanup_dead_symlinks(basetemp) + + +@hookimpl(wrapper=True, tryfirst=True) +def pytest_runtest_makereport( + item: Item, call +) -> Generator[None, TestReport, TestReport]: + rep = yield + assert rep.when is not None + empty: dict[str, bool] = {} + item.stash.setdefault(tmppath_result_key, empty)[rep.when] = rep.passed + return rep diff --git a/.venv/lib/python3.12/site-packages/_pytest/tracemalloc.py b/.venv/lib/python3.12/site-packages/_pytest/tracemalloc.py new file mode 100644 index 0000000..5d0b198 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/tracemalloc.py @@ -0,0 +1,24 @@ +from __future__ import annotations + + +def tracemalloc_message(source: object) -> str: + if source is None: + return "" + + try: + import tracemalloc + except ImportError: + return "" + + tb = tracemalloc.get_object_traceback(source) + if tb is not None: + formatted_tb = "\n".join(tb.format()) + # Use a leading new line to better separate the (large) output + # from the traceback to the previous warning text. + return f"\nObject allocated at:\n{formatted_tb}" + # No need for a leading new line. + url = "https://docs.pytest.org/en/stable/how-to/capture-warnings.html#resource-warnings" + return ( + "Enable tracemalloc to get traceback where the object was allocated.\n" + f"See {url} for more info." + ) diff --git a/.venv/lib/python3.12/site-packages/_pytest/unittest.py b/.venv/lib/python3.12/site-packages/_pytest/unittest.py new file mode 100644 index 0000000..23b9272 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/unittest.py @@ -0,0 +1,628 @@ +# mypy: allow-untyped-defs +"""Discover and run std-library "unittest" style tests.""" + +from __future__ import annotations + +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import Iterator +from enum import auto +from enum import Enum +import inspect +import sys +import traceback +import types +from typing import Any +from typing import TYPE_CHECKING +from unittest import TestCase + +import _pytest._code +from _pytest._code import ExceptionInfo +from _pytest.compat import assert_never +from _pytest.compat import is_async_function +from _pytest.config import hookimpl +from _pytest.fixtures import FixtureRequest +from _pytest.monkeypatch import MonkeyPatch +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.outcomes import exit +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.outcomes import xfail +from _pytest.python import Class +from _pytest.python import Function +from _pytest.python import Module +from _pytest.runner import CallInfo +from _pytest.runner import check_interactive_exception +from _pytest.subtests import SubtestContext +from _pytest.subtests import SubtestReport + + +if sys.version_info[:2] < (3, 11): + from exceptiongroup import ExceptionGroup + +if TYPE_CHECKING: + from types import TracebackType + import unittest + + import twisted.trial.unittest + + +_SysExcInfoType = ( + tuple[type[BaseException], BaseException, types.TracebackType] + | tuple[None, None, None] +) + + +def pytest_pycollect_makeitem( + collector: Module | Class, name: str, obj: object +) -> UnitTestCase | None: + try: + # Has unittest been imported? + ut = sys.modules["unittest"] + # Is obj a subclass of unittest.TestCase? + # Type ignored because `ut` is an opaque module. + if not issubclass(obj, ut.TestCase): # type: ignore + return None + except Exception: + return None + # Is obj a concrete class? + # Abstract classes can't be instantiated so no point collecting them. + if inspect.isabstract(obj): + return None + # Yes, so let's collect it. + return UnitTestCase.from_parent(collector, name=name, obj=obj) + + +class UnitTestCase(Class): + # Marker for fixturemanger.getfixtureinfo() + # to declare that our children do not support funcargs. + nofuncargs = True + + def newinstance(self): + # TestCase __init__ takes the method (test) name. The TestCase + # constructor treats the name "runTest" as a special no-op, so it can be + # used when a dummy instance is needed. While unittest.TestCase has a + # default, some subclasses omit the default (#9610), so always supply + # it. + return self.obj("runTest") + + def collect(self) -> Iterable[Item | Collector]: + from unittest import TestLoader + + cls = self.obj + if not getattr(cls, "__test__", True): + return + + skipped = _is_skipped(cls) + if not skipped: + self._register_unittest_setup_method_fixture(cls) + self._register_unittest_setup_class_fixture(cls) + self._register_setup_class_fixture() + + self.session._fixturemanager.parsefactories(self.newinstance(), self.nodeid) + + loader = TestLoader() + foundsomething = False + for name in loader.getTestCaseNames(self.obj): + x = getattr(self.obj, name) + if not getattr(x, "__test__", True): + continue + yield TestCaseFunction.from_parent(self, name=name) + foundsomething = True + + if not foundsomething: + runtest = getattr(self.obj, "runTest", None) + if runtest is not None: + ut = sys.modules.get("twisted.trial.unittest", None) + if ut is None or runtest != ut.TestCase.runTest: + yield TestCaseFunction.from_parent(self, name="runTest") + + def _register_unittest_setup_class_fixture(self, cls: type) -> None: + """Register an auto-use fixture to invoke setUpClass and + tearDownClass (#517).""" + setup = getattr(cls, "setUpClass", None) + teardown = getattr(cls, "tearDownClass", None) + if setup is None and teardown is None: + return None + cleanup = getattr(cls, "doClassCleanups", lambda: None) + + def process_teardown_exceptions() -> None: + # tearDown_exceptions is a list set in the class containing exc_infos for errors during + # teardown for the class. + exc_infos = getattr(cls, "tearDown_exceptions", None) + if not exc_infos: + return + exceptions = [exc for (_, exc, _) in exc_infos] + # If a single exception, raise it directly as this provides a more readable + # error (hopefully this will improve in #12255). + if len(exceptions) == 1: + raise exceptions[0] + else: + raise ExceptionGroup("Unittest class cleanup errors", exceptions) + + def unittest_setup_class_fixture( + request: FixtureRequest, + ) -> Generator[None]: + cls = request.cls + if _is_skipped(cls): + reason = cls.__unittest_skip_why__ + raise skip.Exception(reason, _use_item_location=True) + if setup is not None: + try: + setup() + # unittest does not call the cleanup function for every BaseException, so we + # follow this here. + except Exception: + cleanup() + process_teardown_exceptions() + raise + yield + try: + if teardown is not None: + teardown() + finally: + cleanup() + process_teardown_exceptions() + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_unittest_setUpClass_fixture_{cls.__qualname__}", + func=unittest_setup_class_fixture, + nodeid=self.nodeid, + scope="class", + autouse=True, + ) + + def _register_unittest_setup_method_fixture(self, cls: type) -> None: + """Register an auto-use fixture to invoke setup_method and + teardown_method (#517).""" + setup = getattr(cls, "setup_method", None) + teardown = getattr(cls, "teardown_method", None) + if setup is None and teardown is None: + return None + + def unittest_setup_method_fixture( + request: FixtureRequest, + ) -> Generator[None]: + self = request.instance + if _is_skipped(self): + reason = self.__unittest_skip_why__ + raise skip.Exception(reason, _use_item_location=True) + if setup is not None: + setup(self, request.function) + yield + if teardown is not None: + teardown(self, request.function) + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_unittest_setup_method_fixture_{cls.__qualname__}", + func=unittest_setup_method_fixture, + nodeid=self.nodeid, + scope="function", + autouse=True, + ) + + +class TestCaseFunction(Function): + nofuncargs = True + failfast = False + _excinfo: list[_pytest._code.ExceptionInfo[BaseException]] | None = None + + def _getinstance(self): + assert isinstance(self.parent, UnitTestCase) + return self.parent.obj(self.name) + + # Backward compat for pytest-django; can be removed after pytest-django + # updates + some slack. + @property + def _testcase(self): + return self.instance + + def setup(self) -> None: + # A bound method to be called during teardown() if set (see 'runtest()'). + self._explicit_tearDown: Callable[[], None] | None = None + super().setup() + if sys.version_info < (3, 11): + # A cache of the subTest errors and non-subtest skips in self._outcome. + # Compute and cache these lists once, instead of computing them again and again for each subtest (#13965). + self._cached_errors_and_skips: tuple[list[Any], list[Any]] | None = None + + def teardown(self) -> None: + if self._explicit_tearDown is not None: + self._explicit_tearDown() + self._explicit_tearDown = None + self._obj = None + del self._instance + super().teardown() + + def startTest(self, testcase: unittest.TestCase) -> None: + pass + + def _addexcinfo(self, rawexcinfo: _SysExcInfoType) -> None: + rawexcinfo = _handle_twisted_exc_info(rawexcinfo) + try: + excinfo = _pytest._code.ExceptionInfo[BaseException].from_exc_info( + rawexcinfo # type: ignore[arg-type] + ) + # Invoke the attributes to trigger storing the traceback + # trial causes some issue there. + _ = excinfo.value + _ = excinfo.traceback + except TypeError: + try: + try: + values = traceback.format_exception(*rawexcinfo) + values.insert( + 0, + "NOTE: Incompatible Exception Representation, " + "displaying natively:\n\n", + ) + fail("".join(values), pytrace=False) + except (fail.Exception, KeyboardInterrupt): + raise + except BaseException: + fail( + "ERROR: Unknown Incompatible Exception " + f"representation:\n{rawexcinfo!r}", + pytrace=False, + ) + except KeyboardInterrupt: + raise + except fail.Exception: + excinfo = _pytest._code.ExceptionInfo.from_current() + self.__dict__.setdefault("_excinfo", []).append(excinfo) + + def addError( + self, testcase: unittest.TestCase, rawexcinfo: _SysExcInfoType + ) -> None: + try: + if isinstance(rawexcinfo[1], exit.Exception): + exit(rawexcinfo[1].msg) + except TypeError: + pass + self._addexcinfo(rawexcinfo) + + def addFailure( + self, testcase: unittest.TestCase, rawexcinfo: _SysExcInfoType + ) -> None: + self._addexcinfo(rawexcinfo) + + def addSkip( + self, testcase: unittest.TestCase, reason: str, *, handle_subtests: bool = True + ) -> None: + from unittest.case import _SubTest # type: ignore[attr-defined] + + def add_skip() -> None: + try: + raise skip.Exception(reason, _use_item_location=True) + except skip.Exception: + self._addexcinfo(sys.exc_info()) + + if not handle_subtests: + add_skip() + return + + if isinstance(testcase, _SubTest): + add_skip() + if self._excinfo is not None: + exc_info = self._excinfo[-1] + self.addSubTest(testcase.test_case, testcase, exc_info) + else: + # For python < 3.11: the non-subtest skips have to be added by `add_skip` only after all subtest + # failures are processed by `_addSubTest`: `self.instance._outcome` has no attribute + # `skipped/errors` anymore. + # We also need to check if `self.instance._outcome` is `None` (this happens if the test + # class/method is decorated with `unittest.skip`, see pytest-dev/pytest-subtests#173). + if sys.version_info < (3, 11) and self.instance._outcome is not None: + subtest_errors, _ = self._obtain_errors_and_skips() + if len(subtest_errors) == 0: + add_skip() + else: + add_skip() + + def addExpectedFailure( + self, + testcase: unittest.TestCase, + rawexcinfo: _SysExcInfoType, + reason: str = "", + ) -> None: + try: + xfail(str(reason)) + except xfail.Exception: + self._addexcinfo(sys.exc_info()) + + def addUnexpectedSuccess( + self, + testcase: unittest.TestCase, + reason: twisted.trial.unittest.Todo | None = None, + ) -> None: + msg = "Unexpected success" + if reason: + msg += f": {reason.reason}" + # Preserve unittest behaviour - fail the test. Explicitly not an XPASS. + try: + fail(msg, pytrace=False) + except fail.Exception: + self._addexcinfo(sys.exc_info()) + + def addSuccess(self, testcase: unittest.TestCase) -> None: + pass + + def stopTest(self, testcase: unittest.TestCase) -> None: + pass + + def addDuration(self, testcase: unittest.TestCase, elapsed: float) -> None: + pass + + def runtest(self) -> None: + from _pytest.debugging import maybe_wrap_pytest_function_for_tracing + + testcase = self.instance + assert testcase is not None + + maybe_wrap_pytest_function_for_tracing(self) + + # Let the unittest framework handle async functions. + if is_async_function(self.obj): + testcase(result=self) + else: + # When --pdb is given, we want to postpone calling tearDown() otherwise + # when entering the pdb prompt, tearDown() would have probably cleaned up + # instance variables, which makes it difficult to debug. + # Arguably we could always postpone tearDown(), but this changes the moment where the + # TestCase instance interacts with the results object, so better to only do it + # when absolutely needed. + # We need to consider if the test itself is skipped, or the whole class. + assert isinstance(self.parent, UnitTestCase) + skipped = _is_skipped(self.obj) or _is_skipped(self.parent.obj) + if self.config.getoption("usepdb") and not skipped: + self._explicit_tearDown = testcase.tearDown + setattr(testcase, "tearDown", lambda *args: None) + + # We need to update the actual bound method with self.obj, because + # wrap_pytest_function_for_tracing replaces self.obj by a wrapper. + setattr(testcase, self.name, self.obj) + try: + testcase(result=self) + finally: + delattr(testcase, self.name) + + def _traceback_filter( + self, excinfo: _pytest._code.ExceptionInfo[BaseException] + ) -> _pytest._code.Traceback: + traceback = super()._traceback_filter(excinfo) + ntraceback = traceback.filter( + lambda x: not x.frame.f_globals.get("__unittest"), + ) + if not ntraceback: + ntraceback = traceback + return ntraceback + + def addSubTest( + self, + test_case: Any, + test: TestCase, + exc_info: ExceptionInfo[BaseException] + | tuple[type[BaseException], BaseException, TracebackType] + | None, + ) -> None: + exception_info: ExceptionInfo[BaseException] | None + match exc_info: + case tuple(): + exception_info = ExceptionInfo(exc_info, _ispytest=True) + case ExceptionInfo() | None: + exception_info = exc_info + case unreachable: + assert_never(unreachable) + + call_info = CallInfo[None]( + None, + exception_info, + start=0, + stop=0, + duration=0, + when="call", + _ispytest=True, + ) + msg = test._message if isinstance(test._message, str) else None # type: ignore[attr-defined] + report = self.ihook.pytest_runtest_makereport(item=self, call=call_info) + sub_report = SubtestReport._new( + report, + SubtestContext(msg=msg, kwargs=dict(test.params)), # type: ignore[attr-defined] + captured_output=None, + captured_logs=None, + ) + self.ihook.pytest_runtest_logreport(report=sub_report) + if check_interactive_exception(call_info, sub_report): + self.ihook.pytest_exception_interact( + node=self, call=call_info, report=sub_report + ) + + # For python < 3.11: add non-subtest skips once all subtest failures are processed by # `_addSubTest`. + if sys.version_info < (3, 11): + subtest_errors, non_subtest_skip = self._obtain_errors_and_skips() + + # Check if we have non-subtest skips: if there are also sub failures, non-subtest skips are not treated in + # `_addSubTest` and have to be added using `add_skip` after all subtest failures are processed. + if len(non_subtest_skip) > 0 and len(subtest_errors) > 0: + # Make sure we have processed the last subtest failure + last_subset_error = subtest_errors[-1] + if exc_info is last_subset_error[-1]: + # Add non-subtest skips (as they could not be treated in `_addSkip`) + for testcase, reason in non_subtest_skip: + self.addSkip(testcase, reason, handle_subtests=False) + + def _obtain_errors_and_skips(self) -> tuple[list[Any], list[Any]]: + """Compute or obtain the cached values for subtest errors and non-subtest skips.""" + from unittest.case import _SubTest # type: ignore[attr-defined] + + assert sys.version_info < (3, 11), ( + "This workaround only should be used in Python 3.10" + ) + if self._cached_errors_and_skips is not None: + return self._cached_errors_and_skips + + subtest_errors = [ + (x, y) + for x, y in self.instance._outcome.errors + if isinstance(x, _SubTest) and y is not None + ] + + non_subtest_skips = [ + (x, y) + for x, y in self.instance._outcome.skipped + if not isinstance(x, _SubTest) + ] + self._cached_errors_and_skips = (subtest_errors, non_subtest_skips) + return subtest_errors, non_subtest_skips + + +@hookimpl(tryfirst=True) +def pytest_runtest_makereport(item: Item, call: CallInfo[None]) -> None: + if isinstance(item, TestCaseFunction): + if item._excinfo: + call.excinfo = item._excinfo.pop(0) + try: + del call.result + except AttributeError: + pass + + # Convert unittest.SkipTest to pytest.skip. + # This covers explicit `raise unittest.SkipTest`. + unittest = sys.modules.get("unittest") + if unittest and call.excinfo and isinstance(call.excinfo.value, unittest.SkipTest): + excinfo = call.excinfo + call2 = CallInfo[None].from_call(lambda: skip(str(excinfo.value)), call.when) + call.excinfo = call2.excinfo + + +def _is_skipped(obj) -> bool: + """Return True if the given object has been marked with @unittest.skip.""" + return bool(getattr(obj, "__unittest_skip__", False)) + + +def pytest_configure() -> None: + """Register the TestCaseFunction class as an IReporter if twisted.trial is available.""" + if _get_twisted_version() is not TwistedVersion.NotInstalled: + from twisted.trial.itrial import IReporter + from zope.interface import classImplements + + classImplements(TestCaseFunction, IReporter) + + +class TwistedVersion(Enum): + """ + The Twisted version installed in the environment. + + We have different workarounds in place for different versions of Twisted. + """ + + # Twisted version 24 or prior. + Version24 = auto() + # Twisted version 25 or later. + Version25 = auto() + # Twisted version is not available. + NotInstalled = auto() + + +def _get_twisted_version() -> TwistedVersion: + # We need to check if "twisted.trial.unittest" is specifically present in sys.modules. + # This is because we intend to integrate with Trial only when it's actively running + # the test suite, but not needed when only other Twisted components are in use. + if "twisted.trial.unittest" not in sys.modules: + return TwistedVersion.NotInstalled + + import importlib.metadata + + import packaging.version + + version_str = importlib.metadata.version("twisted") + version = packaging.version.parse(version_str) + if version.major <= 24: + return TwistedVersion.Version24 + else: + return TwistedVersion.Version25 + + +# Name of the attribute in `twisted.python.Failure` instances that stores +# the `sys.exc_info()` tuple. +# See twisted.trial support in `pytest_runtest_protocol`. +TWISTED_RAW_EXCINFO_ATTR = "_twisted_raw_excinfo" + + +@hookimpl(wrapper=True) +def pytest_runtest_protocol(item: Item) -> Iterator[None]: + if _get_twisted_version() is TwistedVersion.Version24: + import twisted.python.failure as ut + + # Monkeypatch `Failure.__init__` to store the raw exception info. + original__init__ = ut.Failure.__init__ + + def store_raw_exception_info( + self, exc_value=None, exc_type=None, exc_tb=None, captureVars=None + ): # pragma: no cover + if exc_value is None: + raw_exc_info = sys.exc_info() + else: + if exc_type is None: + exc_type = type(exc_value) + if exc_tb is None: + exc_tb = sys.exc_info()[2] + raw_exc_info = (exc_type, exc_value, exc_tb) + setattr(self, TWISTED_RAW_EXCINFO_ATTR, tuple(raw_exc_info)) + try: + original__init__( + self, exc_value, exc_type, exc_tb, captureVars=captureVars + ) + except TypeError: # pragma: no cover + original__init__(self, exc_value, exc_type, exc_tb) + + with MonkeyPatch.context() as patcher: + patcher.setattr(ut.Failure, "__init__", store_raw_exception_info) + return (yield) + else: + return (yield) + + +def _handle_twisted_exc_info( + rawexcinfo: _SysExcInfoType | BaseException, +) -> _SysExcInfoType: + """ + Twisted passes a custom Failure instance to `addError()` instead of using `sys.exc_info()`. + Therefore, if `rawexcinfo` is a `Failure` instance, convert it into the equivalent `sys.exc_info()` tuple + as expected by pytest. + """ + twisted_version = _get_twisted_version() + if twisted_version is TwistedVersion.NotInstalled: + # Unfortunately, because we cannot import `twisted.python.failure` at the top of the file + # and use it in the signature, we need to use `type:ignore` here because we cannot narrow + # the type properly in the `if` statement above. + return rawexcinfo # type:ignore[return-value] + elif twisted_version is TwistedVersion.Version24: + # Twisted calls addError() passing its own classes (like `twisted.python.Failure`), which violates + # the `addError()` signature, so we extract the original `sys.exc_info()` tuple which is stored + # in the object. + if hasattr(rawexcinfo, TWISTED_RAW_EXCINFO_ATTR): + saved_exc_info = getattr(rawexcinfo, TWISTED_RAW_EXCINFO_ATTR) + # Delete the attribute from the original object to avoid leaks. + delattr(rawexcinfo, TWISTED_RAW_EXCINFO_ATTR) + return saved_exc_info # type:ignore[no-any-return] + return rawexcinfo # type:ignore[return-value] + elif twisted_version is TwistedVersion.Version25: + if isinstance(rawexcinfo, BaseException): + import twisted.python.failure + + if isinstance(rawexcinfo, twisted.python.failure.Failure): + tb = rawexcinfo.__traceback__ + if tb is None: + tb = sys.exc_info()[2] + return type(rawexcinfo.value), rawexcinfo.value, tb + + return rawexcinfo # type:ignore[return-value] + else: + # Ideally we would use assert_never() here, but it is not available in all Python versions + # we support, plus we do not require `type_extensions` currently. + assert False, f"Unexpected Twisted version: {twisted_version}" diff --git a/.venv/lib/python3.12/site-packages/_pytest/unraisableexception.py b/.venv/lib/python3.12/site-packages/_pytest/unraisableexception.py new file mode 100644 index 0000000..0faca36 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/unraisableexception.py @@ -0,0 +1,163 @@ +from __future__ import annotations + +import collections +from collections.abc import Callable +import functools +import gc +import sys +import traceback +from typing import NamedTuple +from typing import TYPE_CHECKING +import warnings + +from _pytest.config import Config +from _pytest.nodes import Item +from _pytest.stash import StashKey +from _pytest.tracemalloc import tracemalloc_message +import pytest + + +if TYPE_CHECKING: + pass + +if sys.version_info < (3, 11): + from exceptiongroup import ExceptionGroup + + +# This is a stash item and not a simple constant to allow pytester to override it. +gc_collect_iterations_key = StashKey[int]() + + +def gc_collect_harder(iterations: int) -> None: + for _ in range(iterations): + gc.collect() + + +class UnraisableMeta(NamedTuple): + msg: str + cause_msg: str + exc_value: BaseException | None + + +unraisable_exceptions: StashKey[collections.deque[UnraisableMeta | BaseException]] = ( + StashKey() +) + + +def collect_unraisable(config: Config) -> None: + pop_unraisable = config.stash[unraisable_exceptions].pop + errors: list[pytest.PytestUnraisableExceptionWarning | RuntimeError] = [] + meta = None + hook_error = None + try: + while True: + try: + meta = pop_unraisable() + except IndexError: + break + + if isinstance(meta, BaseException): + hook_error = RuntimeError("Failed to process unraisable exception") + hook_error.__cause__ = meta + errors.append(hook_error) + continue + + msg = meta.msg + try: + warnings.warn(pytest.PytestUnraisableExceptionWarning(msg)) + except pytest.PytestUnraisableExceptionWarning as e: + # This except happens when the warning is treated as an error (e.g. `-Werror`). + if meta.exc_value is not None: + # Exceptions have a better way to show the traceback, but + # warnings do not, so hide the traceback from the msg and + # set the cause so the traceback shows up in the right place. + e.args = (meta.cause_msg,) + e.__cause__ = meta.exc_value + errors.append(e) + + if len(errors) == 1: + raise errors[0] + if errors: + raise ExceptionGroup("multiple unraisable exception warnings", errors) + finally: + del errors, meta, hook_error + + +def cleanup( + *, config: Config, prev_hook: Callable[[sys.UnraisableHookArgs], object] +) -> None: + # A single collection doesn't necessarily collect everything. + # Constant determined experimentally by the Trio project. + gc_collect_iterations = config.stash.get(gc_collect_iterations_key, 5) + try: + try: + gc_collect_harder(gc_collect_iterations) + collect_unraisable(config) + finally: + sys.unraisablehook = prev_hook + finally: + del config.stash[unraisable_exceptions] + + +def unraisable_hook( + unraisable: sys.UnraisableHookArgs, + /, + *, + append: Callable[[UnraisableMeta | BaseException], object], +) -> None: + try: + # we need to compute these strings here as they might change after + # the unraisablehook finishes and before the metadata object is + # collected by a pytest hook + err_msg = ( + "Exception ignored in" if unraisable.err_msg is None else unraisable.err_msg + ) + summary = f"{err_msg}: {unraisable.object!r}" + traceback_message = "\n\n" + "".join( + traceback.format_exception( + unraisable.exc_type, + unraisable.exc_value, + unraisable.exc_traceback, + ) + ) + tracemalloc_tb = "\n" + tracemalloc_message(unraisable.object) + msg = summary + traceback_message + tracemalloc_tb + cause_msg = summary + tracemalloc_tb + + append( + UnraisableMeta( + msg=msg, + cause_msg=cause_msg, + exc_value=unraisable.exc_value, + ) + ) + except BaseException as e: + append(e) + # Raising this will cause the exception to be logged twice, once in our + # collect_unraisable and once by the unraisablehook calling machinery + # which is fine - this should never happen anyway and if it does + # it should probably be reported as a pytest bug. + raise + + +def pytest_configure(config: Config) -> None: + prev_hook = sys.unraisablehook + deque: collections.deque[UnraisableMeta | BaseException] = collections.deque() + config.stash[unraisable_exceptions] = deque + config.add_cleanup(functools.partial(cleanup, config=config, prev_hook=prev_hook)) + sys.unraisablehook = functools.partial(unraisable_hook, append=deque.append) + + +@pytest.hookimpl(trylast=True) +def pytest_runtest_setup(item: Item) -> None: + collect_unraisable(item.config) + + +@pytest.hookimpl(trylast=True) +def pytest_runtest_call(item: Item) -> None: + collect_unraisable(item.config) + + +@pytest.hookimpl(trylast=True) +def pytest_runtest_teardown(item: Item) -> None: + collect_unraisable(item.config) diff --git a/.venv/lib/python3.12/site-packages/_pytest/warning_types.py b/.venv/lib/python3.12/site-packages/_pytest/warning_types.py new file mode 100644 index 0000000..93071b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/warning_types.py @@ -0,0 +1,172 @@ +from __future__ import annotations + +import dataclasses +import inspect +from types import FunctionType +from typing import Any +from typing import final +from typing import Generic +from typing import TypeVar +import warnings + + +class PytestWarning(UserWarning): + """Base class for all warnings emitted by pytest.""" + + __module__ = "pytest" + + +@final +class PytestAssertRewriteWarning(PytestWarning): + """Warning emitted by the pytest assert rewrite module.""" + + __module__ = "pytest" + + +@final +class PytestCacheWarning(PytestWarning): + """Warning emitted by the cache plugin in various situations.""" + + __module__ = "pytest" + + +@final +class PytestConfigWarning(PytestWarning): + """Warning emitted for configuration issues.""" + + __module__ = "pytest" + + +@final +class PytestCollectionWarning(PytestWarning): + """Warning emitted when pytest is not able to collect a file or symbol in a module.""" + + __module__ = "pytest" + + +class PytestDeprecationWarning(PytestWarning, DeprecationWarning): + """Warning class for features that will be removed in a future version.""" + + __module__ = "pytest" + + +class PytestRemovedIn9Warning(PytestDeprecationWarning): + """Warning class for features that will be removed in pytest 9.""" + + __module__ = "pytest" + + +class PytestRemovedIn10Warning(PytestDeprecationWarning): + """Warning class for features that will be removed in pytest 10.""" + + __module__ = "pytest" + + +@final +class PytestExperimentalApiWarning(PytestWarning, FutureWarning): + """Warning category used to denote experiments in pytest. + + Use sparingly as the API might change or even be removed completely in a + future version. + """ + + __module__ = "pytest" + + @classmethod + def simple(cls, apiname: str) -> PytestExperimentalApiWarning: + return cls(f"{apiname} is an experimental api that may change over time") + + +@final +class PytestReturnNotNoneWarning(PytestWarning): + """ + Warning emitted when a test function returns a value other than ``None``. + + See :ref:`return-not-none` for details. + """ + + __module__ = "pytest" + + +@final +class PytestUnknownMarkWarning(PytestWarning): + """Warning emitted on use of unknown markers. + + See :ref:`mark` for details. + """ + + __module__ = "pytest" + + +@final +class PytestUnraisableExceptionWarning(PytestWarning): + """An unraisable exception was reported. + + Unraisable exceptions are exceptions raised in :meth:`__del__ ` + implementations and similar situations when the exception cannot be raised + as normal. + """ + + __module__ = "pytest" + + +@final +class PytestUnhandledThreadExceptionWarning(PytestWarning): + """An unhandled exception occurred in a :class:`~threading.Thread`. + + Such exceptions don't propagate normally. + """ + + __module__ = "pytest" + + +_W = TypeVar("_W", bound=PytestWarning) + + +@final +@dataclasses.dataclass +class UnformattedWarning(Generic[_W]): + """A warning meant to be formatted during runtime. + + This is used to hold warnings that need to format their message at runtime, + as opposed to a direct message. + """ + + category: type[_W] + template: str + + def format(self, **kwargs: Any) -> _W: + """Return an instance of the warning category, formatted with given kwargs.""" + return self.category(self.template.format(**kwargs)) + + +@final +class PytestFDWarning(PytestWarning): + """When the lsof plugin finds leaked fds.""" + + __module__ = "pytest" + + +def warn_explicit_for(method: FunctionType, message: PytestWarning) -> None: + """ + Issue the warning :param:`message` for the definition of the given :param:`method` + + this helps to log warnings for functions defined prior to finding an issue with them + (like hook wrappers being marked in a legacy mechanism) + """ + lineno = method.__code__.co_firstlineno + filename = inspect.getfile(method) + module = method.__module__ + mod_globals = method.__globals__ + try: + warnings.warn_explicit( + message, + type(message), + filename=filename, + module=module, + registry=mod_globals.setdefault("__warningregistry__", {}), + lineno=lineno, + ) + except Warning as w: + # If warnings are errors (e.g. -Werror), location information gets lost, so we add it to the message. + raise type(w)(f"{w}\n at {filename}:{lineno}") from None diff --git a/.venv/lib/python3.12/site-packages/_pytest/warnings.py b/.venv/lib/python3.12/site-packages/_pytest/warnings.py new file mode 100644 index 0000000..1dbf002 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/_pytest/warnings.py @@ -0,0 +1,151 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +from collections.abc import Generator +from contextlib import contextmanager +from contextlib import ExitStack +import sys +from typing import Literal +import warnings + +from _pytest.config import apply_warning_filters +from _pytest.config import Config +from _pytest.config import parse_warning_filter +from _pytest.main import Session +from _pytest.nodes import Item +from _pytest.terminal import TerminalReporter +from _pytest.tracemalloc import tracemalloc_message +import pytest + + +@contextmanager +def catch_warnings_for_item( + config: Config, + ihook, + when: Literal["config", "collect", "runtest"], + item: Item | None, + *, + record: bool = True, +) -> Generator[None]: + """Context manager that catches warnings generated in the contained execution block. + + ``item`` can be None if we are not in the context of an item execution. + + Each warning captured triggers the ``pytest_warning_recorded`` hook. + """ + config_filters = config.getini("filterwarnings") + cmdline_filters = config.known_args_namespace.pythonwarnings or [] + with warnings.catch_warnings(record=record) as log: + if not sys.warnoptions: + # If user is not explicitly configuring warning filters, show deprecation warnings by default (#2908). + warnings.filterwarnings("always", category=DeprecationWarning) + warnings.filterwarnings("always", category=PendingDeprecationWarning) + + warnings.filterwarnings("error", category=pytest.PytestRemovedIn9Warning) + + apply_warning_filters(config_filters, cmdline_filters) + + # apply filters from "filterwarnings" marks + nodeid = "" if item is None else item.nodeid + if item is not None: + for mark in item.iter_markers(name="filterwarnings"): + for arg in mark.args: + warnings.filterwarnings(*parse_warning_filter(arg, escape=False)) + + try: + yield + finally: + if record: + # mypy can't infer that record=True means log is not None; help it. + assert log is not None + + for warning_message in log: + ihook.pytest_warning_recorded.call_historic( + kwargs=dict( + warning_message=warning_message, + nodeid=nodeid, + when=when, + location=None, + ) + ) + + +def warning_record_to_str(warning_message: warnings.WarningMessage) -> str: + """Convert a warnings.WarningMessage to a string.""" + return warnings.formatwarning( + str(warning_message.message), + warning_message.category, + warning_message.filename, + warning_message.lineno, + warning_message.line, + ) + tracemalloc_message(warning_message.source) + + +@pytest.hookimpl(wrapper=True, tryfirst=True) +def pytest_runtest_protocol(item: Item) -> Generator[None, object, object]: + with catch_warnings_for_item( + config=item.config, ihook=item.ihook, when="runtest", item=item + ): + return (yield) + + +@pytest.hookimpl(wrapper=True, tryfirst=True) +def pytest_collection(session: Session) -> Generator[None, object, object]: + config = session.config + with catch_warnings_for_item( + config=config, ihook=config.hook, when="collect", item=None + ): + return (yield) + + +@pytest.hookimpl(wrapper=True) +def pytest_terminal_summary( + terminalreporter: TerminalReporter, +) -> Generator[None]: + config = terminalreporter.config + with catch_warnings_for_item( + config=config, ihook=config.hook, when="config", item=None + ): + return (yield) + + +@pytest.hookimpl(wrapper=True) +def pytest_sessionfinish(session: Session) -> Generator[None]: + config = session.config + with catch_warnings_for_item( + config=config, ihook=config.hook, when="config", item=None + ): + return (yield) + + +@pytest.hookimpl(wrapper=True) +def pytest_load_initial_conftests( + early_config: Config, +) -> Generator[None]: + with catch_warnings_for_item( + config=early_config, ihook=early_config.hook, when="config", item=None + ): + return (yield) + + +def pytest_configure(config: Config) -> None: + with ExitStack() as stack: + stack.enter_context( + catch_warnings_for_item( + config=config, + ihook=config.hook, + when="config", + item=None, + # this disables recording because the terminalreporter has + # finished by the time it comes to reporting logged warnings + # from the end of config cleanup. So for now, this is only + # useful for setting a warning filter with an 'error' action. + record=False, + ) + ) + config.addinivalue_line( + "markers", + "filterwarnings(warning): add a warning filter to the given test. " + "see https://docs.pytest.org/en/stable/how-to/capture-warnings.html#pytest-mark-filterwarnings ", + ) + config.add_cleanup(stack.pop_all().close) diff --git a/.venv/lib/python3.12/site-packages/annotated_doc-0.0.4.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/annotated_doc-0.0.4.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/annotated_doc-0.0.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/annotated_doc-0.0.4.dist-info/METADATA b/.venv/lib/python3.12/site-packages/annotated_doc-0.0.4.dist-info/METADATA new file mode 100644 index 0000000..9bf7a9e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/annotated_doc-0.0.4.dist-info/METADATA @@ -0,0 +1,145 @@ +Metadata-Version: 2.4 +Name: annotated-doc +Version: 0.0.4 +Summary: Document parameters, class attributes, return types, and variables inline, with Annotated. +Author-Email: =?utf-8?q?Sebasti=C3=A1n_Ram=C3=ADrez?= +License-Expression: MIT +License-File: LICENSE +Classifier: Intended Audience :: Information Technology +Classifier: Intended Audience :: System Administrators +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python +Classifier: Topic :: Internet +Classifier: Topic :: Software Development :: Libraries :: Application Frameworks +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development +Classifier: Typing :: Typed +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Project-URL: Homepage, https://github.com/fastapi/annotated-doc +Project-URL: Documentation, https://github.com/fastapi/annotated-doc +Project-URL: Repository, https://github.com/fastapi/annotated-doc +Project-URL: Issues, https://github.com/fastapi/annotated-doc/issues +Project-URL: Changelog, https://github.com/fastapi/annotated-doc/release-notes.md +Requires-Python: >=3.8 +Description-Content-Type: text/markdown + +# Annotated Doc + +Document parameters, class attributes, return types, and variables inline, with `Annotated`. + + + Test + + + Coverage + + + Package version + + + Supported Python versions + + +## Installation + +```bash +pip install annotated-doc +``` + +Or with `uv`: + +```Python +uv add annotated-doc +``` + +## Usage + +Import `Doc` and pass a single literal string with the documentation for the specific parameter, class attribute, return type, or variable. + +For example, to document a parameter `name` in a function `hi` you could do: + +```Python +from typing import Annotated + +from annotated_doc import Doc + +def hi(name: Annotated[str, Doc("Who to say hi to")]) -> None: + print(f"Hi, {name}!") +``` + +You can also use it to document class attributes: + +```Python +from typing import Annotated + +from annotated_doc import Doc + +class User: + name: Annotated[str, Doc("The user's name")] + age: Annotated[int, Doc("The user's age")] +``` + +The same way, you could document return types and variables, or anything that could have a type annotation with `Annotated`. + +## Who Uses This + +`annotated-doc` was made for: + +* [FastAPI](https://fastapi.tiangolo.com/) +* [Typer](https://typer.tiangolo.com/) +* [SQLModel](https://sqlmodel.tiangolo.com/) +* [Asyncer](https://asyncer.tiangolo.com/) + +`annotated-doc` is supported by [griffe-typingdoc](https://github.com/mkdocstrings/griffe-typingdoc), which powers reference documentation like the one in the [FastAPI Reference](https://fastapi.tiangolo.com/reference/). + +## Reasons not to use `annotated-doc` + +You are already comfortable with one of the existing docstring formats, like: + +* Sphinx +* numpydoc +* Google +* Keras + +Your team is already comfortable using them. + +You prefer having the documentation about parameters all together in a docstring, separated from the code defining them. + +You care about a specific set of users, using one specific editor, and that editor already has support for the specific docstring format you use. + +## Reasons to use `annotated-doc` + +* No micro-syntax to learn for newcomers, it’s **just Python** syntax. +* **Editing** would be already fully supported by default by any editor (current or future) supporting Python syntax, including syntax errors, syntax highlighting, etc. +* **Rendering** would be relatively straightforward to implement by static tools (tools that don't need runtime execution), as the information can be extracted from the AST they normally already create. +* **Deduplication of information**: the name of a parameter would be defined in a single place, not duplicated inside of a docstring. +* **Elimination** of the possibility of having **inconsistencies** when removing a parameter or class variable and **forgetting to remove** its documentation. +* **Minimization** of the probability of adding a new parameter or class variable and **forgetting to add its documentation**. +* **Elimination** of the possibility of having **inconsistencies** between the **name** of a parameter in the **signature** and the name in the docstring when it is renamed. +* **Access** to the documentation string for each symbol at **runtime**, including existing (older) Python versions. +* A more formalized way to document other symbols, like type aliases, that could use Annotated. +* **Support** for apps using FastAPI, Typer and others. +* **AI Accessibility**: AI tools will have an easier way understanding each parameter as the distance from documentation to parameter is much closer. + +## History + +I ([@tiangolo](https://github.com/tiangolo)) originally wanted for this to be part of the Python standard library (in [PEP 727](https://peps.python.org/pep-0727/)), but the proposal was withdrawn as there was a fair amount of negative feedback and opposition. + +The conclusion was that this was better done as an external effort, in a third-party library. + +So, here it is, with a simpler approach, as a third-party library, in a way that can be used by others, starting with FastAPI and friends. + +## License + +This project is licensed under the terms of the MIT license. diff --git a/.venv/lib/python3.12/site-packages/annotated_doc-0.0.4.dist-info/RECORD b/.venv/lib/python3.12/site-packages/annotated_doc-0.0.4.dist-info/RECORD new file mode 100644 index 0000000..549e005 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/annotated_doc-0.0.4.dist-info/RECORD @@ -0,0 +1,11 @@ +annotated_doc-0.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +annotated_doc-0.0.4.dist-info/METADATA,sha256=Irm5KJua33dY2qKKAjJ-OhKaVBVIfwFGej_dSe3Z1TU,6566 +annotated_doc-0.0.4.dist-info/RECORD,, +annotated_doc-0.0.4.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90 +annotated_doc-0.0.4.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34 +annotated_doc-0.0.4.dist-info/licenses/LICENSE,sha256=__Fwd5pqy_ZavbQFwIfxzuF4ZpHkqWpANFF-SlBKDN8,1086 +annotated_doc/__init__.py,sha256=VuyxxUe80kfEyWnOrCx_Bk8hybo3aKo6RYBlkBBYW8k,52 +annotated_doc/__pycache__/__init__.cpython-312.pyc,, +annotated_doc/__pycache__/main.cpython-312.pyc,, +annotated_doc/main.py,sha256=5Zfvxv80SwwLqpRW73AZyZyiM4bWma9QWRbp_cgD20s,1075 +annotated_doc/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/.venv/lib/python3.12/site-packages/annotated_doc-0.0.4.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/annotated_doc-0.0.4.dist-info/WHEEL new file mode 100644 index 0000000..045c8ac --- /dev/null +++ b/.venv/lib/python3.12/site-packages/annotated_doc-0.0.4.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: pdm-backend (2.4.5) +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.venv/lib/python3.12/site-packages/annotated_doc-0.0.4.dist-info/entry_points.txt b/.venv/lib/python3.12/site-packages/annotated_doc-0.0.4.dist-info/entry_points.txt new file mode 100644 index 0000000..c3ad472 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/annotated_doc-0.0.4.dist-info/entry_points.txt @@ -0,0 +1,4 @@ +[console_scripts] + +[gui_scripts] + diff --git a/.venv/lib/python3.12/site-packages/annotated_doc-0.0.4.dist-info/licenses/LICENSE b/.venv/lib/python3.12/site-packages/annotated_doc-0.0.4.dist-info/licenses/LICENSE new file mode 100644 index 0000000..7a25446 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/annotated_doc-0.0.4.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2025 Sebastián Ramírez + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/.venv/lib/python3.12/site-packages/annotated_doc/__init__.py b/.venv/lib/python3.12/site-packages/annotated_doc/__init__.py new file mode 100644 index 0000000..a0152a7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/annotated_doc/__init__.py @@ -0,0 +1,3 @@ +from .main import Doc as Doc + +__version__ = "0.0.4" diff --git a/.venv/lib/python3.12/site-packages/annotated_doc/main.py b/.venv/lib/python3.12/site-packages/annotated_doc/main.py new file mode 100644 index 0000000..7063c59 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/annotated_doc/main.py @@ -0,0 +1,36 @@ +class Doc: + """Define the documentation of a type annotation using `Annotated`, to be + used in class attributes, function and method parameters, return values, + and variables. + + The value should be a positional-only string literal to allow static tools + like editors and documentation generators to use it. + + This complements docstrings. + + The string value passed is available in the attribute `documentation`. + + Example: + + ```Python + from typing import Annotated + from annotated_doc import Doc + + def hi(name: Annotated[str, Doc("Who to say hi to")]) -> None: + print(f"Hi, {name}!") + ``` + """ + + def __init__(self, documentation: str, /) -> None: + self.documentation = documentation + + def __repr__(self) -> str: + return f"Doc({self.documentation!r})" + + def __hash__(self) -> int: + return hash(self.documentation) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Doc): + return NotImplemented + return self.documentation == other.documentation diff --git a/.venv/lib/python3.12/site-packages/annotated_doc/py.typed b/.venv/lib/python3.12/site-packages/annotated_doc/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/METADATA b/.venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/METADATA new file mode 100644 index 0000000..3ac05cf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/METADATA @@ -0,0 +1,295 @@ +Metadata-Version: 2.3 +Name: annotated-types +Version: 0.7.0 +Summary: Reusable constraint types to use with typing.Annotated +Project-URL: Homepage, https://github.com/annotated-types/annotated-types +Project-URL: Source, https://github.com/annotated-types/annotated-types +Project-URL: Changelog, https://github.com/annotated-types/annotated-types/releases +Author-email: Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com>, Samuel Colvin , Zac Hatfield-Dodds +License-File: LICENSE +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Console +Classifier: Environment :: MacOS X +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Information Technology +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: Unix +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Typing :: Typed +Requires-Python: >=3.8 +Requires-Dist: typing-extensions>=4.0.0; python_version < '3.9' +Description-Content-Type: text/markdown + +# annotated-types + +[![CI](https://github.com/annotated-types/annotated-types/workflows/CI/badge.svg?event=push)](https://github.com/annotated-types/annotated-types/actions?query=event%3Apush+branch%3Amain+workflow%3ACI) +[![pypi](https://img.shields.io/pypi/v/annotated-types.svg)](https://pypi.python.org/pypi/annotated-types) +[![versions](https://img.shields.io/pypi/pyversions/annotated-types.svg)](https://github.com/annotated-types/annotated-types) +[![license](https://img.shields.io/github/license/annotated-types/annotated-types.svg)](https://github.com/annotated-types/annotated-types/blob/main/LICENSE) + +[PEP-593](https://peps.python.org/pep-0593/) added `typing.Annotated` as a way of +adding context-specific metadata to existing types, and specifies that +`Annotated[T, x]` _should_ be treated as `T` by any tool or library without special +logic for `x`. + +This package provides metadata objects which can be used to represent common +constraints such as upper and lower bounds on scalar values and collection sizes, +a `Predicate` marker for runtime checks, and +descriptions of how we intend these metadata to be interpreted. In some cases, +we also note alternative representations which do not require this package. + +## Install + +```bash +pip install annotated-types +``` + +## Examples + +```python +from typing import Annotated +from annotated_types import Gt, Len, Predicate + +class MyClass: + age: Annotated[int, Gt(18)] # Valid: 19, 20, ... + # Invalid: 17, 18, "19", 19.0, ... + factors: list[Annotated[int, Predicate(is_prime)]] # Valid: 2, 3, 5, 7, 11, ... + # Invalid: 4, 8, -2, 5.0, "prime", ... + + my_list: Annotated[list[int], Len(0, 10)] # Valid: [], [10, 20, 30, 40, 50] + # Invalid: (1, 2), ["abc"], [0] * 20 +``` + +## Documentation + +_While `annotated-types` avoids runtime checks for performance, users should not +construct invalid combinations such as `MultipleOf("non-numeric")` or `Annotated[int, Len(3)]`. +Downstream implementors may choose to raise an error, emit a warning, silently ignore +a metadata item, etc., if the metadata objects described below are used with an +incompatible type - or for any other reason!_ + +### Gt, Ge, Lt, Le + +Express inclusive and/or exclusive bounds on orderable values - which may be numbers, +dates, times, strings, sets, etc. Note that the boundary value need not be of the +same type that was annotated, so long as they can be compared: `Annotated[int, Gt(1.5)]` +is fine, for example, and implies that the value is an integer x such that `x > 1.5`. + +We suggest that implementors may also interpret `functools.partial(operator.le, 1.5)` +as being equivalent to `Gt(1.5)`, for users who wish to avoid a runtime dependency on +the `annotated-types` package. + +To be explicit, these types have the following meanings: + +* `Gt(x)` - value must be "Greater Than" `x` - equivalent to exclusive minimum +* `Ge(x)` - value must be "Greater than or Equal" to `x` - equivalent to inclusive minimum +* `Lt(x)` - value must be "Less Than" `x` - equivalent to exclusive maximum +* `Le(x)` - value must be "Less than or Equal" to `x` - equivalent to inclusive maximum + +### Interval + +`Interval(gt, ge, lt, le)` allows you to specify an upper and lower bound with a single +metadata object. `None` attributes should be ignored, and non-`None` attributes +treated as per the single bounds above. + +### MultipleOf + +`MultipleOf(multiple_of=x)` might be interpreted in two ways: + +1. Python semantics, implying `value % multiple_of == 0`, or +2. [JSONschema semantics](https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.2.1), + where `int(value / multiple_of) == value / multiple_of`. + +We encourage users to be aware of these two common interpretations and their +distinct behaviours, especially since very large or non-integer numbers make +it easy to cause silent data corruption due to floating-point imprecision. + +We encourage libraries to carefully document which interpretation they implement. + +### MinLen, MaxLen, Len + +`Len()` implies that `min_length <= len(value) <= max_length` - lower and upper bounds are inclusive. + +As well as `Len()` which can optionally include upper and lower bounds, we also +provide `MinLen(x)` and `MaxLen(y)` which are equivalent to `Len(min_length=x)` +and `Len(max_length=y)` respectively. + +`Len`, `MinLen`, and `MaxLen` may be used with any type which supports `len(value)`. + +Examples of usage: + +* `Annotated[list, MaxLen(10)]` (or `Annotated[list, Len(max_length=10))`) - list must have a length of 10 or less +* `Annotated[str, MaxLen(10)]` - string must have a length of 10 or less +* `Annotated[list, MinLen(3))` (or `Annotated[list, Len(min_length=3))`) - list must have a length of 3 or more +* `Annotated[list, Len(4, 6)]` - list must have a length of 4, 5, or 6 +* `Annotated[list, Len(8, 8)]` - list must have a length of exactly 8 + +#### Changed in v0.4.0 + +* `min_inclusive` has been renamed to `min_length`, no change in meaning +* `max_exclusive` has been renamed to `max_length`, upper bound is now **inclusive** instead of **exclusive** +* The recommendation that slices are interpreted as `Len` has been removed due to ambiguity and different semantic + meaning of the upper bound in slices vs. `Len` + +See [issue #23](https://github.com/annotated-types/annotated-types/issues/23) for discussion. + +### Timezone + +`Timezone` can be used with a `datetime` or a `time` to express which timezones +are allowed. `Annotated[datetime, Timezone(None)]` must be a naive datetime. +`Timezone[...]` ([literal ellipsis](https://docs.python.org/3/library/constants.html#Ellipsis)) +expresses that any timezone-aware datetime is allowed. You may also pass a specific +timezone string or [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects) +object such as `Timezone(timezone.utc)` or `Timezone("Africa/Abidjan")` to express that you only +allow a specific timezone, though we note that this is often a symptom of fragile design. + +#### Changed in v0.x.x + +* `Timezone` accepts [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects) objects instead of + `timezone`, extending compatibility to [`zoneinfo`](https://docs.python.org/3/library/zoneinfo.html) and third party libraries. + +### Unit + +`Unit(unit: str)` expresses that the annotated numeric value is the magnitude of +a quantity with the specified unit. For example, `Annotated[float, Unit("m/s")]` +would be a float representing a velocity in meters per second. + +Please note that `annotated_types` itself makes no attempt to parse or validate +the unit string in any way. That is left entirely to downstream libraries, +such as [`pint`](https://pint.readthedocs.io) or +[`astropy.units`](https://docs.astropy.org/en/stable/units/). + +An example of how a library might use this metadata: + +```python +from annotated_types import Unit +from typing import Annotated, TypeVar, Callable, Any, get_origin, get_args + +# given a type annotated with a unit: +Meters = Annotated[float, Unit("m")] + + +# you can cast the annotation to a specific unit type with any +# callable that accepts a string and returns the desired type +T = TypeVar("T") +def cast_unit(tp: Any, unit_cls: Callable[[str], T]) -> T | None: + if get_origin(tp) is Annotated: + for arg in get_args(tp): + if isinstance(arg, Unit): + return unit_cls(arg.unit) + return None + + +# using `pint` +import pint +pint_unit = cast_unit(Meters, pint.Unit) + + +# using `astropy.units` +import astropy.units as u +astropy_unit = cast_unit(Meters, u.Unit) +``` + +### Predicate + +`Predicate(func: Callable)` expresses that `func(value)` is truthy for valid values. +Users should prefer the statically inspectable metadata above, but if you need +the full power and flexibility of arbitrary runtime predicates... here it is. + +For some common constraints, we provide generic types: + +* `IsLower = Annotated[T, Predicate(str.islower)]` +* `IsUpper = Annotated[T, Predicate(str.isupper)]` +* `IsDigit = Annotated[T, Predicate(str.isdigit)]` +* `IsFinite = Annotated[T, Predicate(math.isfinite)]` +* `IsNotFinite = Annotated[T, Predicate(Not(math.isfinite))]` +* `IsNan = Annotated[T, Predicate(math.isnan)]` +* `IsNotNan = Annotated[T, Predicate(Not(math.isnan))]` +* `IsInfinite = Annotated[T, Predicate(math.isinf)]` +* `IsNotInfinite = Annotated[T, Predicate(Not(math.isinf))]` + +so that you can write e.g. `x: IsFinite[float] = 2.0` instead of the longer +(but exactly equivalent) `x: Annotated[float, Predicate(math.isfinite)] = 2.0`. + +Some libraries might have special logic to handle known or understandable predicates, +for example by checking for `str.isdigit` and using its presence to both call custom +logic to enforce digit-only strings, and customise some generated external schema. +Users are therefore encouraged to avoid indirection like `lambda s: s.lower()`, in +favor of introspectable methods such as `str.lower` or `re.compile("pattern").search`. + +To enable basic negation of commonly used predicates like `math.isnan` without introducing introspection that makes it impossible for implementers to introspect the predicate we provide a `Not` wrapper that simply negates the predicate in an introspectable manner. Several of the predicates listed above are created in this manner. + +We do not specify what behaviour should be expected for predicates that raise +an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently +skip invalid constraints, or statically raise an error; or it might try calling it +and then propagate or discard the resulting +`TypeError: descriptor 'isdigit' for 'str' objects doesn't apply to a 'int' object` +exception. We encourage libraries to document the behaviour they choose. + +### Doc + +`doc()` can be used to add documentation information in `Annotated`, for function and method parameters, variables, class attributes, return types, and any place where `Annotated` can be used. + +It expects a value that can be statically analyzed, as the main use case is for static analysis, editors, documentation generators, and similar tools. + +It returns a `DocInfo` class with a single attribute `documentation` containing the value passed to `doc()`. + +This is the early adopter's alternative form of the [`typing-doc` proposal](https://github.com/tiangolo/fastapi/blob/typing-doc/typing_doc.md). + +### Integrating downstream types with `GroupedMetadata` + +Implementers may choose to provide a convenience wrapper that groups multiple pieces of metadata. +This can help reduce verbosity and cognitive overhead for users. +For example, an implementer like Pydantic might provide a `Field` or `Meta` type that accepts keyword arguments and transforms these into low-level metadata: + +```python +from dataclasses import dataclass +from typing import Iterator +from annotated_types import GroupedMetadata, Ge + +@dataclass +class Field(GroupedMetadata): + ge: int | None = None + description: str | None = None + + def __iter__(self) -> Iterator[object]: + # Iterating over a GroupedMetadata object should yield annotated-types + # constraint metadata objects which describe it as fully as possible, + # and may include other unknown objects too. + if self.ge is not None: + yield Ge(self.ge) + if self.description is not None: + yield Description(self.description) +``` + +Libraries consuming annotated-types constraints should check for `GroupedMetadata` and unpack it by iterating over the object and treating the results as if they had been "unpacked" in the `Annotated` type. The same logic should be applied to the [PEP 646 `Unpack` type](https://peps.python.org/pep-0646/), so that `Annotated[T, Field(...)]`, `Annotated[T, Unpack[Field(...)]]` and `Annotated[T, *Field(...)]` are all treated consistently. + +Libraries consuming annotated-types should also ignore any metadata they do not recongize that came from unpacking a `GroupedMetadata`, just like they ignore unrecognized metadata in `Annotated` itself. + +Our own `annotated_types.Interval` class is a `GroupedMetadata` which unpacks itself into `Gt`, `Lt`, etc., so this is not an abstract concern. Similarly, `annotated_types.Len` is a `GroupedMetadata` which unpacks itself into `MinLen` (optionally) and `MaxLen`. + +### Consuming metadata + +We intend to not be prescriptive as to _how_ the metadata and constraints are used, but as an example of how one might parse constraints from types annotations see our [implementation in `test_main.py`](https://github.com/annotated-types/annotated-types/blob/f59cf6d1b5255a0fe359b93896759a180bec30ae/tests/test_main.py#L94-L103). + +It is up to the implementer to determine how this metadata is used. +You could use the metadata for runtime type checking, for generating schemas or to generate example data, amongst other use cases. + +## Design & History + +This package was designed at the PyCon 2022 sprints by the maintainers of Pydantic +and Hypothesis, with the goal of making it as easy as possible for end-users to +provide more informative annotations for use by runtime libraries. + +It is deliberately minimal, and following PEP-593 allows considerable downstream +discretion in what (if anything!) they choose to support. Nonetheless, we expect +that staying simple and covering _only_ the most common use-cases will give users +and maintainers the best experience we can. If you'd like more constraints for your +types - follow our lead, by defining them and documenting them downstream! diff --git a/.venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/RECORD b/.venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/RECORD new file mode 100644 index 0000000..a66e278 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/RECORD @@ -0,0 +1,10 @@ +annotated_types-0.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +annotated_types-0.7.0.dist-info/METADATA,sha256=7ltqxksJJ0wCYFGBNIQCWTlWQGeAH0hRFdnK3CB895E,15046 +annotated_types-0.7.0.dist-info/RECORD,, +annotated_types-0.7.0.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87 +annotated_types-0.7.0.dist-info/licenses/LICENSE,sha256=_hBJiEsaDZNCkB6I4H8ykl0ksxIdmXK2poBfuYJLCV0,1083 +annotated_types/__init__.py,sha256=RynLsRKUEGI0KimXydlD1fZEfEzWwDo0Uon3zOKhG1Q,13819 +annotated_types/__pycache__/__init__.cpython-312.pyc,, +annotated_types/__pycache__/test_cases.cpython-312.pyc,, +annotated_types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +annotated_types/test_cases.py,sha256=zHFX6EpcMbGJ8FzBYDbO56bPwx_DYIVSKbZM-4B3_lg,6421 diff --git a/.venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/WHEEL new file mode 100644 index 0000000..516596c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.24.2 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE b/.venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..d99323a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2022 the contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/.venv/lib/python3.12/site-packages/annotated_types/__init__.py b/.venv/lib/python3.12/site-packages/annotated_types/__init__.py new file mode 100644 index 0000000..74e0dee --- /dev/null +++ b/.venv/lib/python3.12/site-packages/annotated_types/__init__.py @@ -0,0 +1,432 @@ +import math +import sys +import types +from dataclasses import dataclass +from datetime import tzinfo +from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, SupportsFloat, SupportsIndex, TypeVar, Union + +if sys.version_info < (3, 8): + from typing_extensions import Protocol, runtime_checkable +else: + from typing import Protocol, runtime_checkable + +if sys.version_info < (3, 9): + from typing_extensions import Annotated, Literal +else: + from typing import Annotated, Literal + +if sys.version_info < (3, 10): + EllipsisType = type(Ellipsis) + KW_ONLY = {} + SLOTS = {} +else: + from types import EllipsisType + + KW_ONLY = {"kw_only": True} + SLOTS = {"slots": True} + + +__all__ = ( + 'BaseMetadata', + 'GroupedMetadata', + 'Gt', + 'Ge', + 'Lt', + 'Le', + 'Interval', + 'MultipleOf', + 'MinLen', + 'MaxLen', + 'Len', + 'Timezone', + 'Predicate', + 'LowerCase', + 'UpperCase', + 'IsDigits', + 'IsFinite', + 'IsNotFinite', + 'IsNan', + 'IsNotNan', + 'IsInfinite', + 'IsNotInfinite', + 'doc', + 'DocInfo', + '__version__', +) + +__version__ = '0.7.0' + + +T = TypeVar('T') + + +# arguments that start with __ are considered +# positional only +# see https://peps.python.org/pep-0484/#positional-only-arguments + + +class SupportsGt(Protocol): + def __gt__(self: T, __other: T) -> bool: + ... + + +class SupportsGe(Protocol): + def __ge__(self: T, __other: T) -> bool: + ... + + +class SupportsLt(Protocol): + def __lt__(self: T, __other: T) -> bool: + ... + + +class SupportsLe(Protocol): + def __le__(self: T, __other: T) -> bool: + ... + + +class SupportsMod(Protocol): + def __mod__(self: T, __other: T) -> T: + ... + + +class SupportsDiv(Protocol): + def __div__(self: T, __other: T) -> T: + ... + + +class BaseMetadata: + """Base class for all metadata. + + This exists mainly so that implementers + can do `isinstance(..., BaseMetadata)` while traversing field annotations. + """ + + __slots__ = () + + +@dataclass(frozen=True, **SLOTS) +class Gt(BaseMetadata): + """Gt(gt=x) implies that the value must be greater than x. + + It can be used with any type that supports the ``>`` operator, + including numbers, dates and times, strings, sets, and so on. + """ + + gt: SupportsGt + + +@dataclass(frozen=True, **SLOTS) +class Ge(BaseMetadata): + """Ge(ge=x) implies that the value must be greater than or equal to x. + + It can be used with any type that supports the ``>=`` operator, + including numbers, dates and times, strings, sets, and so on. + """ + + ge: SupportsGe + + +@dataclass(frozen=True, **SLOTS) +class Lt(BaseMetadata): + """Lt(lt=x) implies that the value must be less than x. + + It can be used with any type that supports the ``<`` operator, + including numbers, dates and times, strings, sets, and so on. + """ + + lt: SupportsLt + + +@dataclass(frozen=True, **SLOTS) +class Le(BaseMetadata): + """Le(le=x) implies that the value must be less than or equal to x. + + It can be used with any type that supports the ``<=`` operator, + including numbers, dates and times, strings, sets, and so on. + """ + + le: SupportsLe + + +@runtime_checkable +class GroupedMetadata(Protocol): + """A grouping of multiple objects, like typing.Unpack. + + `GroupedMetadata` on its own is not metadata and has no meaning. + All of the constraints and metadata should be fully expressable + in terms of the `BaseMetadata`'s returned by `GroupedMetadata.__iter__()`. + + Concrete implementations should override `GroupedMetadata.__iter__()` + to add their own metadata. + For example: + + >>> @dataclass + >>> class Field(GroupedMetadata): + >>> gt: float | None = None + >>> description: str | None = None + ... + >>> def __iter__(self) -> Iterable[object]: + >>> if self.gt is not None: + >>> yield Gt(self.gt) + >>> if self.description is not None: + >>> yield Description(self.gt) + + Also see the implementation of `Interval` below for an example. + + Parsers should recognize this and unpack it so that it can be used + both with and without unpacking: + + - `Annotated[int, Field(...)]` (parser must unpack Field) + - `Annotated[int, *Field(...)]` (PEP-646) + """ # noqa: trailing-whitespace + + @property + def __is_annotated_types_grouped_metadata__(self) -> Literal[True]: + return True + + def __iter__(self) -> Iterator[object]: + ... + + if not TYPE_CHECKING: + __slots__ = () # allow subclasses to use slots + + def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None: + # Basic ABC like functionality without the complexity of an ABC + super().__init_subclass__(*args, **kwargs) + if cls.__iter__ is GroupedMetadata.__iter__: + raise TypeError("Can't subclass GroupedMetadata without implementing __iter__") + + def __iter__(self) -> Iterator[object]: # noqa: F811 + raise NotImplementedError # more helpful than "None has no attribute..." type errors + + +@dataclass(frozen=True, **KW_ONLY, **SLOTS) +class Interval(GroupedMetadata): + """Interval can express inclusive or exclusive bounds with a single object. + + It accepts keyword arguments ``gt``, ``ge``, ``lt``, and/or ``le``, which + are interpreted the same way as the single-bound constraints. + """ + + gt: Union[SupportsGt, None] = None + ge: Union[SupportsGe, None] = None + lt: Union[SupportsLt, None] = None + le: Union[SupportsLe, None] = None + + def __iter__(self) -> Iterator[BaseMetadata]: + """Unpack an Interval into zero or more single-bounds.""" + if self.gt is not None: + yield Gt(self.gt) + if self.ge is not None: + yield Ge(self.ge) + if self.lt is not None: + yield Lt(self.lt) + if self.le is not None: + yield Le(self.le) + + +@dataclass(frozen=True, **SLOTS) +class MultipleOf(BaseMetadata): + """MultipleOf(multiple_of=x) might be interpreted in two ways: + + 1. Python semantics, implying ``value % multiple_of == 0``, or + 2. JSONschema semantics, where ``int(value / multiple_of) == value / multiple_of`` + + We encourage users to be aware of these two common interpretations, + and libraries to carefully document which they implement. + """ + + multiple_of: Union[SupportsDiv, SupportsMod] + + +@dataclass(frozen=True, **SLOTS) +class MinLen(BaseMetadata): + """ + MinLen() implies minimum inclusive length, + e.g. ``len(value) >= min_length``. + """ + + min_length: Annotated[int, Ge(0)] + + +@dataclass(frozen=True, **SLOTS) +class MaxLen(BaseMetadata): + """ + MaxLen() implies maximum inclusive length, + e.g. ``len(value) <= max_length``. + """ + + max_length: Annotated[int, Ge(0)] + + +@dataclass(frozen=True, **SLOTS) +class Len(GroupedMetadata): + """ + Len() implies that ``min_length <= len(value) <= max_length``. + + Upper bound may be omitted or ``None`` to indicate no upper length bound. + """ + + min_length: Annotated[int, Ge(0)] = 0 + max_length: Optional[Annotated[int, Ge(0)]] = None + + def __iter__(self) -> Iterator[BaseMetadata]: + """Unpack a Len into zone or more single-bounds.""" + if self.min_length > 0: + yield MinLen(self.min_length) + if self.max_length is not None: + yield MaxLen(self.max_length) + + +@dataclass(frozen=True, **SLOTS) +class Timezone(BaseMetadata): + """Timezone(tz=...) requires a datetime to be aware (or ``tz=None``, naive). + + ``Annotated[datetime, Timezone(None)]`` must be a naive datetime. + ``Timezone[...]`` (the ellipsis literal) expresses that the datetime must be + tz-aware but any timezone is allowed. + + You may also pass a specific timezone string or tzinfo object such as + ``Timezone(timezone.utc)`` or ``Timezone("Africa/Abidjan")`` to express that + you only allow a specific timezone, though we note that this is often + a symptom of poor design. + """ + + tz: Union[str, tzinfo, EllipsisType, None] + + +@dataclass(frozen=True, **SLOTS) +class Unit(BaseMetadata): + """Indicates that the value is a physical quantity with the specified unit. + + It is intended for usage with numeric types, where the value represents the + magnitude of the quantity. For example, ``distance: Annotated[float, Unit('m')]`` + or ``speed: Annotated[float, Unit('m/s')]``. + + Interpretation of the unit string is left to the discretion of the consumer. + It is suggested to follow conventions established by python libraries that work + with physical quantities, such as + + - ``pint`` : + - ``astropy.units``: + + For indicating a quantity with a certain dimensionality but without a specific unit + it is recommended to use square brackets, e.g. `Annotated[float, Unit('[time]')]`. + Note, however, ``annotated_types`` itself makes no use of the unit string. + """ + + unit: str + + +@dataclass(frozen=True, **SLOTS) +class Predicate(BaseMetadata): + """``Predicate(func: Callable)`` implies `func(value)` is truthy for valid values. + + Users should prefer statically inspectable metadata, but if you need the full + power and flexibility of arbitrary runtime predicates... here it is. + + We provide a few predefined predicates for common string constraints: + ``IsLower = Predicate(str.islower)``, ``IsUpper = Predicate(str.isupper)``, and + ``IsDigits = Predicate(str.isdigit)``. Users are encouraged to use methods which + can be given special handling, and avoid indirection like ``lambda s: s.lower()``. + + Some libraries might have special logic to handle certain predicates, e.g. by + checking for `str.isdigit` and using its presence to both call custom logic to + enforce digit-only strings, and customise some generated external schema. + + We do not specify what behaviour should be expected for predicates that raise + an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently + skip invalid constraints, or statically raise an error; or it might try calling it + and then propagate or discard the resulting exception. + """ + + func: Callable[[Any], bool] + + def __repr__(self) -> str: + if getattr(self.func, "__name__", "") == "": + return f"{self.__class__.__name__}({self.func!r})" + if isinstance(self.func, (types.MethodType, types.BuiltinMethodType)) and ( + namespace := getattr(self.func.__self__, "__name__", None) + ): + return f"{self.__class__.__name__}({namespace}.{self.func.__name__})" + if isinstance(self.func, type(str.isascii)): # method descriptor + return f"{self.__class__.__name__}({self.func.__qualname__})" + return f"{self.__class__.__name__}({self.func.__name__})" + + +@dataclass +class Not: + func: Callable[[Any], bool] + + def __call__(self, __v: Any) -> bool: + return not self.func(__v) + + +_StrType = TypeVar("_StrType", bound=str) + +LowerCase = Annotated[_StrType, Predicate(str.islower)] +""" +Return True if the string is a lowercase string, False otherwise. + +A string is lowercase if all cased characters in the string are lowercase and there is at least one cased character in the string. +""" # noqa: E501 +UpperCase = Annotated[_StrType, Predicate(str.isupper)] +""" +Return True if the string is an uppercase string, False otherwise. + +A string is uppercase if all cased characters in the string are uppercase and there is at least one cased character in the string. +""" # noqa: E501 +IsDigit = Annotated[_StrType, Predicate(str.isdigit)] +IsDigits = IsDigit # type: ignore # plural for backwards compatibility, see #63 +""" +Return True if the string is a digit string, False otherwise. + +A string is a digit string if all characters in the string are digits and there is at least one character in the string. +""" # noqa: E501 +IsAscii = Annotated[_StrType, Predicate(str.isascii)] +""" +Return True if all characters in the string are ASCII, False otherwise. + +ASCII characters have code points in the range U+0000-U+007F. Empty string is ASCII too. +""" + +_NumericType = TypeVar('_NumericType', bound=Union[SupportsFloat, SupportsIndex]) +IsFinite = Annotated[_NumericType, Predicate(math.isfinite)] +"""Return True if x is neither an infinity nor a NaN, and False otherwise.""" +IsNotFinite = Annotated[_NumericType, Predicate(Not(math.isfinite))] +"""Return True if x is one of infinity or NaN, and False otherwise""" +IsNan = Annotated[_NumericType, Predicate(math.isnan)] +"""Return True if x is a NaN (not a number), and False otherwise.""" +IsNotNan = Annotated[_NumericType, Predicate(Not(math.isnan))] +"""Return True if x is anything but NaN (not a number), and False otherwise.""" +IsInfinite = Annotated[_NumericType, Predicate(math.isinf)] +"""Return True if x is a positive or negative infinity, and False otherwise.""" +IsNotInfinite = Annotated[_NumericType, Predicate(Not(math.isinf))] +"""Return True if x is neither a positive or negative infinity, and False otherwise.""" + +try: + from typing_extensions import DocInfo, doc # type: ignore [attr-defined] +except ImportError: + + @dataclass(frozen=True, **SLOTS) + class DocInfo: # type: ignore [no-redef] + """ " + The return value of doc(), mainly to be used by tools that want to extract the + Annotated documentation at runtime. + """ + + documentation: str + """The documentation string passed to doc().""" + + def doc( + documentation: str, + ) -> DocInfo: + """ + Add documentation to a type annotation inside of Annotated. + + For example: + + >>> def hi(name: Annotated[int, doc("The name of the user")]) -> None: ... + """ + return DocInfo(documentation) diff --git a/.venv/lib/python3.12/site-packages/annotated_types/py.typed b/.venv/lib/python3.12/site-packages/annotated_types/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/annotated_types/test_cases.py b/.venv/lib/python3.12/site-packages/annotated_types/test_cases.py new file mode 100644 index 0000000..d9164d6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/annotated_types/test_cases.py @@ -0,0 +1,151 @@ +import math +import sys +from datetime import date, datetime, timedelta, timezone +from decimal import Decimal +from typing import Any, Dict, Iterable, Iterator, List, NamedTuple, Set, Tuple + +if sys.version_info < (3, 9): + from typing_extensions import Annotated +else: + from typing import Annotated + +import annotated_types as at + + +class Case(NamedTuple): + """ + A test case for `annotated_types`. + """ + + annotation: Any + valid_cases: Iterable[Any] + invalid_cases: Iterable[Any] + + +def cases() -> Iterable[Case]: + # Gt, Ge, Lt, Le + yield Case(Annotated[int, at.Gt(4)], (5, 6, 1000), (4, 0, -1)) + yield Case(Annotated[float, at.Gt(0.5)], (0.6, 0.7, 0.8, 0.9), (0.5, 0.0, -0.1)) + yield Case( + Annotated[datetime, at.Gt(datetime(2000, 1, 1))], + [datetime(2000, 1, 2), datetime(2000, 1, 3)], + [datetime(2000, 1, 1), datetime(1999, 12, 31)], + ) + yield Case( + Annotated[datetime, at.Gt(date(2000, 1, 1))], + [date(2000, 1, 2), date(2000, 1, 3)], + [date(2000, 1, 1), date(1999, 12, 31)], + ) + yield Case( + Annotated[datetime, at.Gt(Decimal('1.123'))], + [Decimal('1.1231'), Decimal('123')], + [Decimal('1.123'), Decimal('0')], + ) + + yield Case(Annotated[int, at.Ge(4)], (4, 5, 6, 1000, 4), (0, -1)) + yield Case(Annotated[float, at.Ge(0.5)], (0.5, 0.6, 0.7, 0.8, 0.9), (0.4, 0.0, -0.1)) + yield Case( + Annotated[datetime, at.Ge(datetime(2000, 1, 1))], + [datetime(2000, 1, 2), datetime(2000, 1, 3)], + [datetime(1998, 1, 1), datetime(1999, 12, 31)], + ) + + yield Case(Annotated[int, at.Lt(4)], (0, -1), (4, 5, 6, 1000, 4)) + yield Case(Annotated[float, at.Lt(0.5)], (0.4, 0.0, -0.1), (0.5, 0.6, 0.7, 0.8, 0.9)) + yield Case( + Annotated[datetime, at.Lt(datetime(2000, 1, 1))], + [datetime(1999, 12, 31), datetime(1999, 12, 31)], + [datetime(2000, 1, 2), datetime(2000, 1, 3)], + ) + + yield Case(Annotated[int, at.Le(4)], (4, 0, -1), (5, 6, 1000)) + yield Case(Annotated[float, at.Le(0.5)], (0.5, 0.0, -0.1), (0.6, 0.7, 0.8, 0.9)) + yield Case( + Annotated[datetime, at.Le(datetime(2000, 1, 1))], + [datetime(2000, 1, 1), datetime(1999, 12, 31)], + [datetime(2000, 1, 2), datetime(2000, 1, 3)], + ) + + # Interval + yield Case(Annotated[int, at.Interval(gt=4)], (5, 6, 1000), (4, 0, -1)) + yield Case(Annotated[int, at.Interval(gt=4, lt=10)], (5, 6), (4, 10, 1000, 0, -1)) + yield Case(Annotated[float, at.Interval(ge=0.5, le=1)], (0.5, 0.9, 1), (0.49, 1.1)) + yield Case( + Annotated[datetime, at.Interval(gt=datetime(2000, 1, 1), le=datetime(2000, 1, 3))], + [datetime(2000, 1, 2), datetime(2000, 1, 3)], + [datetime(2000, 1, 1), datetime(2000, 1, 4)], + ) + + yield Case(Annotated[int, at.MultipleOf(multiple_of=3)], (0, 3, 9), (1, 2, 4)) + yield Case(Annotated[float, at.MultipleOf(multiple_of=0.5)], (0, 0.5, 1, 1.5), (0.4, 1.1)) + + # lengths + + yield Case(Annotated[str, at.MinLen(3)], ('123', '1234', 'x' * 10), ('', '1', '12')) + yield Case(Annotated[str, at.Len(3)], ('123', '1234', 'x' * 10), ('', '1', '12')) + yield Case(Annotated[List[int], at.MinLen(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2])) + yield Case(Annotated[List[int], at.Len(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2])) + + yield Case(Annotated[str, at.MaxLen(4)], ('', '1234'), ('12345', 'x' * 10)) + yield Case(Annotated[str, at.Len(0, 4)], ('', '1234'), ('12345', 'x' * 10)) + yield Case(Annotated[List[str], at.MaxLen(4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10)) + yield Case(Annotated[List[str], at.Len(0, 4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10)) + + yield Case(Annotated[str, at.Len(3, 5)], ('123', '12345'), ('', '1', '12', '123456', 'x' * 10)) + yield Case(Annotated[str, at.Len(3, 3)], ('123',), ('12', '1234')) + + yield Case(Annotated[Dict[int, int], at.Len(2, 3)], [{1: 1, 2: 2}], [{}, {1: 1}, {1: 1, 2: 2, 3: 3, 4: 4}]) + yield Case(Annotated[Set[int], at.Len(2, 3)], ({1, 2}, {1, 2, 3}), (set(), {1}, {1, 2, 3, 4})) + yield Case(Annotated[Tuple[int, ...], at.Len(2, 3)], ((1, 2), (1, 2, 3)), ((), (1,), (1, 2, 3, 4))) + + # Timezone + + yield Case( + Annotated[datetime, at.Timezone(None)], [datetime(2000, 1, 1)], [datetime(2000, 1, 1, tzinfo=timezone.utc)] + ) + yield Case( + Annotated[datetime, at.Timezone(...)], [datetime(2000, 1, 1, tzinfo=timezone.utc)], [datetime(2000, 1, 1)] + ) + yield Case( + Annotated[datetime, at.Timezone(timezone.utc)], + [datetime(2000, 1, 1, tzinfo=timezone.utc)], + [datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))], + ) + yield Case( + Annotated[datetime, at.Timezone('Europe/London')], + [datetime(2000, 1, 1, tzinfo=timezone(timedelta(0), name='Europe/London'))], + [datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))], + ) + + # Quantity + + yield Case(Annotated[float, at.Unit(unit='m')], (5, 4.2), ('5m', '4.2m')) + + # predicate types + + yield Case(at.LowerCase[str], ['abc', 'foobar'], ['', 'A', 'Boom']) + yield Case(at.UpperCase[str], ['ABC', 'DEFO'], ['', 'a', 'abc', 'AbC']) + yield Case(at.IsDigit[str], ['123'], ['', 'ab', 'a1b2']) + yield Case(at.IsAscii[str], ['123', 'foo bar'], ['£100', '😊', 'whatever 👀']) + + yield Case(Annotated[int, at.Predicate(lambda x: x % 2 == 0)], [0, 2, 4], [1, 3, 5]) + + yield Case(at.IsFinite[float], [1.23], [math.nan, math.inf, -math.inf]) + yield Case(at.IsNotFinite[float], [math.nan, math.inf], [1.23]) + yield Case(at.IsNan[float], [math.nan], [1.23, math.inf]) + yield Case(at.IsNotNan[float], [1.23, math.inf], [math.nan]) + yield Case(at.IsInfinite[float], [math.inf], [math.nan, 1.23]) + yield Case(at.IsNotInfinite[float], [math.nan, 1.23], [math.inf]) + + # check stacked predicates + yield Case(at.IsInfinite[Annotated[float, at.Predicate(lambda x: x > 0)]], [math.inf], [-math.inf, 1.23, math.nan]) + + # doc + yield Case(Annotated[int, at.doc("A number")], [1, 2], []) + + # custom GroupedMetadata + class MyCustomGroupedMetadata(at.GroupedMetadata): + def __iter__(self) -> Iterator[at.Predicate]: + yield at.Predicate(lambda x: float(x).is_integer()) + + yield Case(Annotated[float, MyCustomGroupedMetadata()], [0, 2.0], [0.01, 1.5]) diff --git a/.venv/lib/python3.12/site-packages/anyio-4.12.1.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/anyio-4.12.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio-4.12.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/anyio-4.12.1.dist-info/METADATA b/.venv/lib/python3.12/site-packages/anyio-4.12.1.dist-info/METADATA new file mode 100644 index 0000000..dbeb198 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio-4.12.1.dist-info/METADATA @@ -0,0 +1,96 @@ +Metadata-Version: 2.4 +Name: anyio +Version: 4.12.1 +Summary: High-level concurrency and networking framework on top of asyncio or Trio +Author-email: Alex Grönholm +License-Expression: MIT +Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/ +Project-URL: Changelog, https://anyio.readthedocs.io/en/stable/versionhistory.html +Project-URL: Source code, https://github.com/agronholm/anyio +Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Framework :: AnyIO +Classifier: Typing :: Typed +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Requires-Python: >=3.9 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: exceptiongroup>=1.0.2; python_version < "3.11" +Requires-Dist: idna>=2.8 +Requires-Dist: typing_extensions>=4.5; python_version < "3.13" +Provides-Extra: trio +Requires-Dist: trio>=0.32.0; python_version >= "3.10" and extra == "trio" +Requires-Dist: trio>=0.31.0; python_version < "3.10" and extra == "trio" +Dynamic: license-file + +.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg + :target: https://github.com/agronholm/anyio/actions/workflows/test.yml + :alt: Build Status +.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master + :target: https://coveralls.io/github/agronholm/anyio?branch=master + :alt: Code Coverage +.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest + :target: https://anyio.readthedocs.io/en/latest/?badge=latest + :alt: Documentation +.. image:: https://badges.gitter.im/gitterHQ/gitter.svg + :target: https://gitter.im/python-trio/AnyIO + :alt: Gitter chat + +AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or +Trio_. It implements Trio-like `structured concurrency`_ (SC) on top of asyncio and works in harmony +with the native SC of Trio itself. + +Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or +Trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full +refactoring necessary. It will blend in with the native libraries of your chosen backend. + +To find out why you might want to use AnyIO's APIs instead of asyncio's, you can read about it +`here `_. + +Documentation +------------- + +View full documentation at: https://anyio.readthedocs.io/ + +Features +-------- + +AnyIO offers the following functionality: + +* Task groups (nurseries_ in trio terminology) +* High-level networking (TCP, UDP and UNIX sockets) + + * `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python + 3.8) + * async/await style UDP sockets (unlike asyncio where you still have to use Transports and + Protocols) + +* A versatile API for byte streams and object streams +* Inter-task synchronization and communication (locks, conditions, events, semaphores, object + streams) +* Worker threads +* Subprocesses +* Subinterpreter support for code parallelization (on Python 3.13 and later) +* Asynchronous file I/O (using worker threads) +* Signal handling +* Asynchronous version of the functools_ module + +AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures. +It even works with the popular Hypothesis_ library. + +.. _asyncio: https://docs.python.org/3/library/asyncio.html +.. _Trio: https://github.com/python-trio/trio +.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency +.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning +.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs +.. _pytest: https://docs.pytest.org/en/latest/ +.. _functools: https://docs.python.org/3/library/functools.html +.. _Hypothesis: https://hypothesis.works/ diff --git a/.venv/lib/python3.12/site-packages/anyio-4.12.1.dist-info/RECORD b/.venv/lib/python3.12/site-packages/anyio-4.12.1.dist-info/RECORD new file mode 100644 index 0000000..4b3b57c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio-4.12.1.dist-info/RECORD @@ -0,0 +1,92 @@ +anyio-4.12.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +anyio-4.12.1.dist-info/METADATA,sha256=DfiDab9Tmmcfy802lOLTMEHJQShkOSbopCwqCYbLuJk,4277 +anyio-4.12.1.dist-info/RECORD,, +anyio-4.12.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 +anyio-4.12.1.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39 +anyio-4.12.1.dist-info/licenses/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081 +anyio-4.12.1.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6 +anyio/__init__.py,sha256=7iDVqMUprUuKNY91FuoKqayAhR-OY136YDPI6P78HHk,6170 +anyio/__pycache__/__init__.cpython-312.pyc,, +anyio/__pycache__/from_thread.cpython-312.pyc,, +anyio/__pycache__/functools.cpython-312.pyc,, +anyio/__pycache__/lowlevel.cpython-312.pyc,, +anyio/__pycache__/pytest_plugin.cpython-312.pyc,, +anyio/__pycache__/to_interpreter.cpython-312.pyc,, +anyio/__pycache__/to_process.cpython-312.pyc,, +anyio/__pycache__/to_thread.cpython-312.pyc,, +anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_backends/__pycache__/__init__.cpython-312.pyc,, +anyio/_backends/__pycache__/_asyncio.cpython-312.pyc,, +anyio/_backends/__pycache__/_trio.cpython-312.pyc,, +anyio/_backends/_asyncio.py,sha256=xG6qv60mgGnL0mK82dxjH2b8hlkMlJ-x2BqIq3qv70Y,98863 +anyio/_backends/_trio.py,sha256=30Rctb7lm8g63ZHljVPVnj5aH-uK6oQvphjwUBoAzuI,41456 +anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_core/__pycache__/__init__.cpython-312.pyc,, +anyio/_core/__pycache__/_asyncio_selector_thread.cpython-312.pyc,, +anyio/_core/__pycache__/_contextmanagers.cpython-312.pyc,, +anyio/_core/__pycache__/_eventloop.cpython-312.pyc,, +anyio/_core/__pycache__/_exceptions.cpython-312.pyc,, +anyio/_core/__pycache__/_fileio.cpython-312.pyc,, +anyio/_core/__pycache__/_resources.cpython-312.pyc,, +anyio/_core/__pycache__/_signals.cpython-312.pyc,, +anyio/_core/__pycache__/_sockets.cpython-312.pyc,, +anyio/_core/__pycache__/_streams.cpython-312.pyc,, +anyio/_core/__pycache__/_subprocesses.cpython-312.pyc,, +anyio/_core/__pycache__/_synchronization.cpython-312.pyc,, +anyio/_core/__pycache__/_tasks.cpython-312.pyc,, +anyio/_core/__pycache__/_tempfile.cpython-312.pyc,, +anyio/_core/__pycache__/_testing.cpython-312.pyc,, +anyio/_core/__pycache__/_typedattr.cpython-312.pyc,, +anyio/_core/_asyncio_selector_thread.py,sha256=2PdxFM3cs02Kp6BSppbvmRT7q7asreTW5FgBxEsflBo,5626 +anyio/_core/_contextmanagers.py,sha256=YInBCabiEeS-UaP_Jdxa1CaFC71ETPW8HZTHIM8Rsc8,7215 +anyio/_core/_eventloop.py,sha256=c2EdcBX-xnKwxPcC4Pjn3_qG9I-x4IWFO2R9RqCGjM4,6448 +anyio/_core/_exceptions.py,sha256=Y3aq-Wxd7Q2HqwSg7nZPvRsHEuGazv_qeet6gqEBdPk,4407 +anyio/_core/_fileio.py,sha256=uc7t10Vb-If7GbdWM_zFf-ajUe6uek63fSt7IBLlZW0,25731 +anyio/_core/_resources.py,sha256=NbmU5O5UX3xEyACnkmYX28Fmwdl-f-ny0tHym26e0w0,435 +anyio/_core/_signals.py,sha256=mjTBB2hTKNPRlU0IhnijeQedpWOGERDiMjSlJQsFrug,1016 +anyio/_core/_sockets.py,sha256=RBXHcUqZt5gg_-OOfgHVv8uq2FSKk1uVUzTdpjBoI1o,34977 +anyio/_core/_streams.py,sha256=FczFwIgDpnkK0bODWJXMpsUJYdvAD04kaUaGzJU8DK0,1806 +anyio/_core/_subprocesses.py,sha256=EXm5igL7dj55iYkPlbYVAqtbqxJxjU-6OndSTIx9SRg,8047 +anyio/_core/_synchronization.py,sha256=MgVVqFzvt580tHC31LiOcq1G6aryut--xRG4Ff8KwxQ,20869 +anyio/_core/_tasks.py,sha256=pVB7K6AAulzUM8YgXAeqNZG44nSyZ1bYJjH8GznC00I,5435 +anyio/_core/_tempfile.py,sha256=lHb7CW4FyIlpkf5ADAf4VmLHCKwEHF9nxqNyBCFFUiA,19697 +anyio/_core/_testing.py,sha256=u7MPqGXwpTxqI7hclSdNA30z2GH1Nw258uwKvy_RfBg,2340 +anyio/_core/_typedattr.py,sha256=P4ozZikn3-DbpoYcvyghS_FOYAgbmUxeoU8-L_07pZM,2508 +anyio/abc/__init__.py,sha256=6mWhcl_pGXhrgZVHP_TCfMvIXIOp9mroEFM90fYCU_U,2869 +anyio/abc/__pycache__/__init__.cpython-312.pyc,, +anyio/abc/__pycache__/_eventloop.cpython-312.pyc,, +anyio/abc/__pycache__/_resources.cpython-312.pyc,, +anyio/abc/__pycache__/_sockets.cpython-312.pyc,, +anyio/abc/__pycache__/_streams.cpython-312.pyc,, +anyio/abc/__pycache__/_subprocesses.cpython-312.pyc,, +anyio/abc/__pycache__/_tasks.cpython-312.pyc,, +anyio/abc/__pycache__/_testing.cpython-312.pyc,, +anyio/abc/_eventloop.py,sha256=GlzgB3UJGgG6Kr7olpjOZ-o00PghecXuofVDQ_5611Q,10749 +anyio/abc/_resources.py,sha256=DrYvkNN1hH6Uvv5_5uKySvDsnknGVDe8FCKfko0VtN8,783 +anyio/abc/_sockets.py,sha256=ECTY0jLEF18gryANHR3vFzXzGdZ-xPwELq1QdgOb0Jo,13258 +anyio/abc/_streams.py,sha256=005GKSCXGprxnhucILboSqc2JFovECZk9m3p-qqxXVc,7640 +anyio/abc/_subprocesses.py,sha256=cumAPJTktOQtw63IqG0lDpyZqu_l1EElvQHMiwJgL08,2067 +anyio/abc/_tasks.py,sha256=KC7wrciE48AINOI-AhPutnFhe1ewfP7QnamFlDzqesQ,3721 +anyio/abc/_testing.py,sha256=tBJUzkSfOXJw23fe8qSJ03kJlShOYjjaEyFB6k6MYT8,1821 +anyio/from_thread.py,sha256=L-0w1HxJ6BSb-KuVi57k5Tkc3yzQrx3QK5tAxMPcY-0,19141 +anyio/functools.py,sha256=HWj7GBEmc0Z-mZg3uok7Z7ZJn0rEC_0Pzbt0nYUDaTQ,10973 +anyio/lowlevel.py,sha256=AyKLVK3LaWSoK39LkCKxE4_GDMLKZBNqTrLUgk63y80,5158 +anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/pytest_plugin.py,sha256=3jAFQn0jv_pyoWE2GBBlHaj9sqXj4e8vob0_hgrsXE8,10244 +anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/streams/__pycache__/__init__.cpython-312.pyc,, +anyio/streams/__pycache__/buffered.cpython-312.pyc,, +anyio/streams/__pycache__/file.cpython-312.pyc,, +anyio/streams/__pycache__/memory.cpython-312.pyc,, +anyio/streams/__pycache__/stapled.cpython-312.pyc,, +anyio/streams/__pycache__/text.cpython-312.pyc,, +anyio/streams/__pycache__/tls.cpython-312.pyc,, +anyio/streams/buffered.py,sha256=2R3PeJhe4EXrdYqz44Y6-Eg9R6DrmlsYrP36Ir43-po,6263 +anyio/streams/file.py,sha256=4WZ7XGz5WNu39FQHvqbe__TQ0HDP9OOhgO1mk9iVpVU,4470 +anyio/streams/memory.py,sha256=F0zwzvFJKAhX_LRZGoKzzqDC2oMM-f-yyTBrEYEGOaU,10740 +anyio/streams/stapled.py,sha256=T8Xqwf8K6EgURPxbt1N4i7A8BAk-gScv-GRhjLXIf_o,4390 +anyio/streams/text.py,sha256=BcVAGJw1VRvtIqnv-o0Rb0pwH7p8vwlvl21xHq522ag,5765 +anyio/streams/tls.py,sha256=Jpxy0Mfbcp1BxHCwE-YjSSFaLnIBbnnwur-excYThs4,15368 +anyio/to_interpreter.py,sha256=_mLngrMy97TMR6VbW4Y6YzDUk9ZuPcQMPlkuyRh3C9k,7100 +anyio/to_process.py,sha256=J7gAA_YOuoHqnpDAf5fm1Qu6kOmTzdFbiDNvnV755vk,9798 +anyio/to_thread.py,sha256=menEgXYmUV7Fjg_9WqCV95P9MAtQS8BzPGGcWB_QnfQ,2687 diff --git a/.venv/lib/python3.12/site-packages/anyio-4.12.1.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/anyio-4.12.1.dist-info/WHEEL new file mode 100644 index 0000000..e7fa31b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio-4.12.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.12/site-packages/anyio-4.12.1.dist-info/entry_points.txt b/.venv/lib/python3.12/site-packages/anyio-4.12.1.dist-info/entry_points.txt new file mode 100644 index 0000000..44dd9bd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio-4.12.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[pytest11] +anyio = anyio.pytest_plugin diff --git a/.venv/lib/python3.12/site-packages/anyio-4.12.1.dist-info/licenses/LICENSE b/.venv/lib/python3.12/site-packages/anyio-4.12.1.dist-info/licenses/LICENSE new file mode 100644 index 0000000..104eebf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio-4.12.1.dist-info/licenses/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2018 Alex Grönholm + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/.venv/lib/python3.12/site-packages/anyio-4.12.1.dist-info/top_level.txt b/.venv/lib/python3.12/site-packages/anyio-4.12.1.dist-info/top_level.txt new file mode 100644 index 0000000..c77c069 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio-4.12.1.dist-info/top_level.txt @@ -0,0 +1 @@ +anyio diff --git a/.venv/lib/python3.12/site-packages/anyio/__init__.py b/.venv/lib/python3.12/site-packages/anyio/__init__.py new file mode 100644 index 0000000..d23c5a5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/__init__.py @@ -0,0 +1,111 @@ +from __future__ import annotations + +from ._core._contextmanagers import AsyncContextManagerMixin as AsyncContextManagerMixin +from ._core._contextmanagers import ContextManagerMixin as ContextManagerMixin +from ._core._eventloop import current_time as current_time +from ._core._eventloop import get_all_backends as get_all_backends +from ._core._eventloop import get_available_backends as get_available_backends +from ._core._eventloop import get_cancelled_exc_class as get_cancelled_exc_class +from ._core._eventloop import run as run +from ._core._eventloop import sleep as sleep +from ._core._eventloop import sleep_forever as sleep_forever +from ._core._eventloop import sleep_until as sleep_until +from ._core._exceptions import BrokenResourceError as BrokenResourceError +from ._core._exceptions import BrokenWorkerInterpreter as BrokenWorkerInterpreter +from ._core._exceptions import BrokenWorkerProcess as BrokenWorkerProcess +from ._core._exceptions import BusyResourceError as BusyResourceError +from ._core._exceptions import ClosedResourceError as ClosedResourceError +from ._core._exceptions import ConnectionFailed as ConnectionFailed +from ._core._exceptions import DelimiterNotFound as DelimiterNotFound +from ._core._exceptions import EndOfStream as EndOfStream +from ._core._exceptions import IncompleteRead as IncompleteRead +from ._core._exceptions import NoEventLoopError as NoEventLoopError +from ._core._exceptions import RunFinishedError as RunFinishedError +from ._core._exceptions import TypedAttributeLookupError as TypedAttributeLookupError +from ._core._exceptions import WouldBlock as WouldBlock +from ._core._fileio import AsyncFile as AsyncFile +from ._core._fileio import Path as Path +from ._core._fileio import open_file as open_file +from ._core._fileio import wrap_file as wrap_file +from ._core._resources import aclose_forcefully as aclose_forcefully +from ._core._signals import open_signal_receiver as open_signal_receiver +from ._core._sockets import TCPConnectable as TCPConnectable +from ._core._sockets import UNIXConnectable as UNIXConnectable +from ._core._sockets import as_connectable as as_connectable +from ._core._sockets import connect_tcp as connect_tcp +from ._core._sockets import connect_unix as connect_unix +from ._core._sockets import create_connected_udp_socket as create_connected_udp_socket +from ._core._sockets import ( + create_connected_unix_datagram_socket as create_connected_unix_datagram_socket, +) +from ._core._sockets import create_tcp_listener as create_tcp_listener +from ._core._sockets import create_udp_socket as create_udp_socket +from ._core._sockets import create_unix_datagram_socket as create_unix_datagram_socket +from ._core._sockets import create_unix_listener as create_unix_listener +from ._core._sockets import getaddrinfo as getaddrinfo +from ._core._sockets import getnameinfo as getnameinfo +from ._core._sockets import notify_closing as notify_closing +from ._core._sockets import wait_readable as wait_readable +from ._core._sockets import wait_socket_readable as wait_socket_readable +from ._core._sockets import wait_socket_writable as wait_socket_writable +from ._core._sockets import wait_writable as wait_writable +from ._core._streams import create_memory_object_stream as create_memory_object_stream +from ._core._subprocesses import open_process as open_process +from ._core._subprocesses import run_process as run_process +from ._core._synchronization import CapacityLimiter as CapacityLimiter +from ._core._synchronization import ( + CapacityLimiterStatistics as CapacityLimiterStatistics, +) +from ._core._synchronization import Condition as Condition +from ._core._synchronization import ConditionStatistics as ConditionStatistics +from ._core._synchronization import Event as Event +from ._core._synchronization import EventStatistics as EventStatistics +from ._core._synchronization import Lock as Lock +from ._core._synchronization import LockStatistics as LockStatistics +from ._core._synchronization import ResourceGuard as ResourceGuard +from ._core._synchronization import Semaphore as Semaphore +from ._core._synchronization import SemaphoreStatistics as SemaphoreStatistics +from ._core._tasks import TASK_STATUS_IGNORED as TASK_STATUS_IGNORED +from ._core._tasks import CancelScope as CancelScope +from ._core._tasks import create_task_group as create_task_group +from ._core._tasks import current_effective_deadline as current_effective_deadline +from ._core._tasks import fail_after as fail_after +from ._core._tasks import move_on_after as move_on_after +from ._core._tempfile import NamedTemporaryFile as NamedTemporaryFile +from ._core._tempfile import SpooledTemporaryFile as SpooledTemporaryFile +from ._core._tempfile import TemporaryDirectory as TemporaryDirectory +from ._core._tempfile import TemporaryFile as TemporaryFile +from ._core._tempfile import gettempdir as gettempdir +from ._core._tempfile import gettempdirb as gettempdirb +from ._core._tempfile import mkdtemp as mkdtemp +from ._core._tempfile import mkstemp as mkstemp +from ._core._testing import TaskInfo as TaskInfo +from ._core._testing import get_current_task as get_current_task +from ._core._testing import get_running_tasks as get_running_tasks +from ._core._testing import wait_all_tasks_blocked as wait_all_tasks_blocked +from ._core._typedattr import TypedAttributeProvider as TypedAttributeProvider +from ._core._typedattr import TypedAttributeSet as TypedAttributeSet +from ._core._typedattr import typed_attribute as typed_attribute + +# Re-export imports so they look like they live directly in this package +for __value in list(locals().values()): + if getattr(__value, "__module__", "").startswith("anyio."): + __value.__module__ = __name__ + + +del __value + + +def __getattr__(attr: str) -> type[BrokenWorkerInterpreter]: + """Support deprecated aliases.""" + if attr == "BrokenWorkerIntepreter": + import warnings + + warnings.warn( + "The 'BrokenWorkerIntepreter' alias is deprecated, use 'BrokenWorkerInterpreter' instead.", + DeprecationWarning, + stacklevel=2, + ) + return BrokenWorkerInterpreter + + raise AttributeError(f"module {__name__!r} has no attribute {attr!r}") diff --git a/.venv/lib/python3.12/site-packages/anyio/_backends/__init__.py b/.venv/lib/python3.12/site-packages/anyio/_backends/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/anyio/_backends/_asyncio.py b/.venv/lib/python3.12/site-packages/anyio/_backends/_asyncio.py new file mode 100644 index 0000000..8ff009e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/_backends/_asyncio.py @@ -0,0 +1,2980 @@ +from __future__ import annotations + +import array +import asyncio +import concurrent.futures +import contextvars +import math +import os +import socket +import sys +import threading +import weakref +from asyncio import ( + AbstractEventLoop, + CancelledError, + all_tasks, + create_task, + current_task, + get_running_loop, + sleep, +) +from asyncio.base_events import _run_until_complete_cb # type: ignore[attr-defined] +from collections import OrderedDict, deque +from collections.abc import ( + AsyncGenerator, + AsyncIterator, + Awaitable, + Callable, + Collection, + Coroutine, + Iterable, + Sequence, +) +from concurrent.futures import Future +from contextlib import AbstractContextManager, suppress +from contextvars import Context, copy_context +from dataclasses import dataclass, field +from functools import partial, wraps +from inspect import ( + CORO_RUNNING, + CORO_SUSPENDED, + getcoroutinestate, + iscoroutine, +) +from io import IOBase +from os import PathLike +from queue import Queue +from signal import Signals +from socket import AddressFamily, SocketKind +from threading import Thread +from types import CodeType, TracebackType +from typing import ( + IO, + TYPE_CHECKING, + Any, + Optional, + TypeVar, + cast, +) +from weakref import WeakKeyDictionary + +from .. import ( + CapacityLimiterStatistics, + EventStatistics, + LockStatistics, + TaskInfo, + abc, +) +from .._core._eventloop import ( + claim_worker_thread, + set_current_async_library, + threadlocals, +) +from .._core._exceptions import ( + BrokenResourceError, + BusyResourceError, + ClosedResourceError, + EndOfStream, + RunFinishedError, + WouldBlock, + iterate_exceptions, +) +from .._core._sockets import convert_ipv6_sockaddr +from .._core._streams import create_memory_object_stream +from .._core._synchronization import ( + CapacityLimiter as BaseCapacityLimiter, +) +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import Lock as BaseLock +from .._core._synchronization import ( + ResourceGuard, + SemaphoreStatistics, +) +from .._core._synchronization import Semaphore as BaseSemaphore +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import ( + AsyncBackend, + IPSockAddrType, + SocketListener, + UDPPacketType, + UNIXDatagramPacketType, +) +from ..abc._eventloop import StrOrBytesPath +from ..lowlevel import RunVar +from ..streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream + +if TYPE_CHECKING: + from _typeshed import FileDescriptorLike +else: + FileDescriptorLike = object + +if sys.version_info >= (3, 10): + from typing import ParamSpec +else: + from typing_extensions import ParamSpec + +if sys.version_info >= (3, 11): + from asyncio import Runner + from typing import TypeVarTuple, Unpack +else: + import contextvars + import enum + import signal + from asyncio import coroutines, events, exceptions, tasks + + from exceptiongroup import BaseExceptionGroup + from typing_extensions import TypeVarTuple, Unpack + + class _State(enum.Enum): + CREATED = "created" + INITIALIZED = "initialized" + CLOSED = "closed" + + class Runner: + # Copied from CPython 3.11 + def __init__( + self, + *, + debug: bool | None = None, + loop_factory: Callable[[], AbstractEventLoop] | None = None, + ): + self._state = _State.CREATED + self._debug = debug + self._loop_factory = loop_factory + self._loop: AbstractEventLoop | None = None + self._context = None + self._interrupt_count = 0 + self._set_event_loop = False + + def __enter__(self) -> Runner: + self._lazy_init() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def close(self) -> None: + """Shutdown and close event loop.""" + loop = self._loop + if self._state is not _State.INITIALIZED or loop is None: + return + try: + _cancel_all_tasks(loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + if hasattr(loop, "shutdown_default_executor"): + loop.run_until_complete(loop.shutdown_default_executor()) + else: + loop.run_until_complete(_shutdown_default_executor(loop)) + finally: + if self._set_event_loop: + events.set_event_loop(None) + loop.close() + self._loop = None + self._state = _State.CLOSED + + def get_loop(self) -> AbstractEventLoop: + """Return embedded event loop.""" + self._lazy_init() + return self._loop + + def run(self, coro: Coroutine[T_Retval], *, context=None) -> T_Retval: + """Run a coroutine inside the embedded event loop.""" + if not coroutines.iscoroutine(coro): + raise ValueError(f"a coroutine was expected, got {coro!r}") + + if events._get_running_loop() is not None: + # fail fast with short traceback + raise RuntimeError( + "Runner.run() cannot be called from a running event loop" + ) + + self._lazy_init() + + if context is None: + context = self._context + task = context.run(self._loop.create_task, coro) + + if ( + threading.current_thread() is threading.main_thread() + and signal.getsignal(signal.SIGINT) is signal.default_int_handler + ): + sigint_handler = partial(self._on_sigint, main_task=task) + try: + signal.signal(signal.SIGINT, sigint_handler) + except ValueError: + # `signal.signal` may throw if `threading.main_thread` does + # not support signals (e.g. embedded interpreter with signals + # not registered - see gh-91880) + sigint_handler = None + else: + sigint_handler = None + + self._interrupt_count = 0 + try: + return self._loop.run_until_complete(task) + except exceptions.CancelledError: + if self._interrupt_count > 0: + uncancel = getattr(task, "uncancel", None) + if uncancel is not None and uncancel() == 0: + raise KeyboardInterrupt # noqa: B904 + raise # CancelledError + finally: + if ( + sigint_handler is not None + and signal.getsignal(signal.SIGINT) is sigint_handler + ): + signal.signal(signal.SIGINT, signal.default_int_handler) + + def _lazy_init(self) -> None: + if self._state is _State.CLOSED: + raise RuntimeError("Runner is closed") + if self._state is _State.INITIALIZED: + return + if self._loop_factory is None: + self._loop = events.new_event_loop() + if not self._set_event_loop: + # Call set_event_loop only once to avoid calling + # attach_loop multiple times on child watchers + events.set_event_loop(self._loop) + self._set_event_loop = True + else: + self._loop = self._loop_factory() + if self._debug is not None: + self._loop.set_debug(self._debug) + self._context = contextvars.copy_context() + self._state = _State.INITIALIZED + + def _on_sigint(self, signum, frame, main_task: asyncio.Task) -> None: + self._interrupt_count += 1 + if self._interrupt_count == 1 and not main_task.done(): + main_task.cancel() + # wakeup loop if it is blocked by select() with long timeout + self._loop.call_soon_threadsafe(lambda: None) + return + raise KeyboardInterrupt() + + def _cancel_all_tasks(loop: AbstractEventLoop) -> None: + to_cancel = tasks.all_tasks(loop) + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + loop.run_until_complete(tasks.gather(*to_cancel, return_exceptions=True)) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler( + { + "message": "unhandled exception during asyncio.run() shutdown", + "exception": task.exception(), + "task": task, + } + ) + + async def _shutdown_default_executor(loop: AbstractEventLoop) -> None: + """Schedule the shutdown of the default executor.""" + + def _do_shutdown(future: asyncio.futures.Future) -> None: + try: + loop._default_executor.shutdown(wait=True) # type: ignore[attr-defined] + loop.call_soon_threadsafe(future.set_result, None) + except Exception as ex: + loop.call_soon_threadsafe(future.set_exception, ex) + + loop._executor_shutdown_called = True + if loop._default_executor is None: + return + future = loop.create_future() + thread = threading.Thread(target=_do_shutdown, args=(future,)) + thread.start() + try: + await future + finally: + thread.join() + + +T_Retval = TypeVar("T_Retval") +T_contra = TypeVar("T_contra", contravariant=True) +PosArgsT = TypeVarTuple("PosArgsT") +P = ParamSpec("P") + +_root_task: RunVar[asyncio.Task | None] = RunVar("_root_task") + + +def find_root_task() -> asyncio.Task: + root_task = _root_task.get(None) + if root_task is not None and not root_task.done(): + return root_task + + # Look for a task that has been started via run_until_complete() + for task in all_tasks(): + if task._callbacks and not task.done(): + callbacks = [cb for cb, context in task._callbacks] + for cb in callbacks: + if ( + cb is _run_until_complete_cb + or getattr(cb, "__module__", None) == "uvloop.loop" + ): + _root_task.set(task) + return task + + # Look up the topmost task in the AnyIO task tree, if possible + task = cast(asyncio.Task, current_task()) + state = _task_states.get(task) + if state: + cancel_scope = state.cancel_scope + while cancel_scope and cancel_scope._parent_scope is not None: + cancel_scope = cancel_scope._parent_scope + + if cancel_scope is not None: + return cast(asyncio.Task, cancel_scope._host_task) + + return task + + +def get_callable_name(func: Callable) -> str: + module = getattr(func, "__module__", None) + qualname = getattr(func, "__qualname__", None) + return ".".join([x for x in (module, qualname) if x]) + + +# +# Event loop +# + +_run_vars: WeakKeyDictionary[asyncio.AbstractEventLoop, Any] = WeakKeyDictionary() + + +def _task_started(task: asyncio.Task) -> bool: + """Return ``True`` if the task has been started and has not finished.""" + # The task coro should never be None here, as we never add finished tasks to the + # task list + coro = task.get_coro() + assert coro is not None + try: + return getcoroutinestate(coro) in (CORO_RUNNING, CORO_SUSPENDED) + except AttributeError: + # task coro is async_genenerator_asend https://bugs.python.org/issue37771 + raise Exception(f"Cannot determine if task {task} has started or not") from None + + +# +# Timeouts and cancellation +# + + +def is_anyio_cancellation(exc: CancelledError) -> bool: + # Sometimes third party frameworks catch a CancelledError and raise a new one, so as + # a workaround we have to look at the previous ones in __context__ too for a + # matching cancel message + while True: + if ( + exc.args + and isinstance(exc.args[0], str) + and exc.args[0].startswith("Cancelled via cancel scope ") + ): + return True + + if isinstance(exc.__context__, CancelledError): + exc = exc.__context__ + continue + + return False + + +class CancelScope(BaseCancelScope): + def __new__( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return object.__new__(cls) + + def __init__(self, deadline: float = math.inf, shield: bool = False): + self._deadline = deadline + self._shield = shield + self._parent_scope: CancelScope | None = None + self._child_scopes: set[CancelScope] = set() + self._cancel_called = False + self._cancel_reason: str | None = None + self._cancelled_caught = False + self._active = False + self._timeout_handle: asyncio.TimerHandle | None = None + self._cancel_handle: asyncio.Handle | None = None + self._tasks: set[asyncio.Task] = set() + self._host_task: asyncio.Task | None = None + if sys.version_info >= (3, 11): + self._pending_uncancellations: int | None = 0 + else: + self._pending_uncancellations = None + + def __enter__(self) -> CancelScope: + if self._active: + raise RuntimeError( + "Each CancelScope may only be used for a single 'with' block" + ) + + self._host_task = host_task = cast(asyncio.Task, current_task()) + self._tasks.add(host_task) + try: + task_state = _task_states[host_task] + except KeyError: + task_state = TaskState(None, self) + _task_states[host_task] = task_state + else: + self._parent_scope = task_state.cancel_scope + task_state.cancel_scope = self + if self._parent_scope is not None: + # If using an eager task factory, the parent scope may not even contain + # the host task + self._parent_scope._child_scopes.add(self) + self._parent_scope._tasks.discard(host_task) + + self._timeout() + self._active = True + + # Start cancelling the host task if the scope was cancelled before entering + if self._cancel_called: + self._deliver_cancellation(self) + + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + del exc_tb + + if not self._active: + raise RuntimeError("This cancel scope is not active") + if current_task() is not self._host_task: + raise RuntimeError( + "Attempted to exit cancel scope in a different task than it was " + "entered in" + ) + + assert self._host_task is not None + host_task_state = _task_states.get(self._host_task) + if host_task_state is None or host_task_state.cancel_scope is not self: + raise RuntimeError( + "Attempted to exit a cancel scope that isn't the current tasks's " + "current cancel scope" + ) + + try: + self._active = False + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._tasks.remove(self._host_task) + if self._parent_scope is not None: + self._parent_scope._child_scopes.remove(self) + self._parent_scope._tasks.add(self._host_task) + + host_task_state.cancel_scope = self._parent_scope + + # Restart the cancellation effort in the closest visible, cancelled parent + # scope if necessary + self._restart_cancellation_in_parent() + + # We only swallow the exception iff it was an AnyIO CancelledError, either + # directly as exc_val or inside an exception group and there are no cancelled + # parent cancel scopes visible to us here + if self._cancel_called and not self._parent_cancellation_is_visible_to_us: + # For each level-cancel() call made on the host task, call uncancel() + while self._pending_uncancellations: + self._host_task.uncancel() + self._pending_uncancellations -= 1 + + # Update cancelled_caught and check for exceptions we must not swallow + cannot_swallow_exc_val = False + if exc_val is not None: + for exc in iterate_exceptions(exc_val): + if isinstance(exc, CancelledError) and is_anyio_cancellation( + exc + ): + self._cancelled_caught = True + else: + cannot_swallow_exc_val = True + + return self._cancelled_caught and not cannot_swallow_exc_val + else: + if self._pending_uncancellations: + assert self._parent_scope is not None + assert self._parent_scope._pending_uncancellations is not None + self._parent_scope._pending_uncancellations += ( + self._pending_uncancellations + ) + self._pending_uncancellations = 0 + + return False + finally: + self._host_task = None + del exc_val + + @property + def _effectively_cancelled(self) -> bool: + cancel_scope: CancelScope | None = self + while cancel_scope is not None: + if cancel_scope._cancel_called: + return True + + if cancel_scope.shield: + return False + + cancel_scope = cancel_scope._parent_scope + + return False + + @property + def _parent_cancellation_is_visible_to_us(self) -> bool: + return ( + self._parent_scope is not None + and not self.shield + and self._parent_scope._effectively_cancelled + ) + + def _timeout(self) -> None: + if self._deadline != math.inf: + loop = get_running_loop() + if loop.time() >= self._deadline: + self.cancel("deadline exceeded") + else: + self._timeout_handle = loop.call_at(self._deadline, self._timeout) + + def _deliver_cancellation(self, origin: CancelScope) -> bool: + """ + Deliver cancellation to directly contained tasks and nested cancel scopes. + + Schedule another run at the end if we still have tasks eligible for + cancellation. + + :param origin: the cancel scope that originated the cancellation + :return: ``True`` if the delivery needs to be retried on the next cycle + + """ + should_retry = False + current = current_task() + for task in self._tasks: + should_retry = True + if task._must_cancel: # type: ignore[attr-defined] + continue + + # The task is eligible for cancellation if it has started + if task is not current and (task is self._host_task or _task_started(task)): + waiter = task._fut_waiter # type: ignore[attr-defined] + if not isinstance(waiter, asyncio.Future) or not waiter.done(): + task.cancel(origin._cancel_reason) + if ( + task is origin._host_task + and origin._pending_uncancellations is not None + ): + origin._pending_uncancellations += 1 + + # Deliver cancellation to child scopes that aren't shielded or running their own + # cancellation callbacks + for scope in self._child_scopes: + if not scope._shield and not scope.cancel_called: + should_retry = scope._deliver_cancellation(origin) or should_retry + + # Schedule another callback if there are still tasks left + if origin is self: + if should_retry: + self._cancel_handle = get_running_loop().call_soon( + self._deliver_cancellation, origin + ) + else: + self._cancel_handle = None + + return should_retry + + def _restart_cancellation_in_parent(self) -> None: + """ + Restart the cancellation effort in the closest directly cancelled parent scope. + + """ + scope = self._parent_scope + while scope is not None: + if scope._cancel_called: + if scope._cancel_handle is None: + scope._deliver_cancellation(scope) + + break + + # No point in looking beyond any shielded scope + if scope._shield: + break + + scope = scope._parent_scope + + def cancel(self, reason: str | None = None) -> None: + if not self._cancel_called: + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._cancel_called = True + self._cancel_reason = f"Cancelled via cancel scope {id(self):x}" + if task := current_task(): + self._cancel_reason += f" by {task}" + + if reason: + self._cancel_reason += f"; reason: {reason}" + + if self._host_task is not None: + self._deliver_cancellation(self) + + @property + def deadline(self) -> float: + return self._deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self._deadline = float(value) + if self._timeout_handle is not None: + self._timeout_handle.cancel() + self._timeout_handle = None + + if self._active and not self._cancel_called: + self._timeout() + + @property + def cancel_called(self) -> bool: + return self._cancel_called + + @property + def cancelled_caught(self) -> bool: + return self._cancelled_caught + + @property + def shield(self) -> bool: + return self._shield + + @shield.setter + def shield(self, value: bool) -> None: + if self._shield != value: + self._shield = value + if not value: + self._restart_cancellation_in_parent() + + +# +# Task states +# + + +class TaskState: + """ + Encapsulates auxiliary task information that cannot be added to the Task instance + itself because there are no guarantees about its implementation. + """ + + __slots__ = "parent_id", "cancel_scope", "__weakref__" + + def __init__(self, parent_id: int | None, cancel_scope: CancelScope | None): + self.parent_id = parent_id + self.cancel_scope = cancel_scope + + +_task_states: WeakKeyDictionary[asyncio.Task, TaskState] = WeakKeyDictionary() + + +# +# Task groups +# + + +class _AsyncioTaskStatus(abc.TaskStatus): + def __init__(self, future: asyncio.Future, parent_id: int): + self._future = future + self._parent_id = parent_id + + def started(self, value: T_contra | None = None) -> None: + try: + self._future.set_result(value) + except asyncio.InvalidStateError: + if not self._future.cancelled(): + raise RuntimeError( + "called 'started' twice on the same task status" + ) from None + + task = cast(asyncio.Task, current_task()) + _task_states[task].parent_id = self._parent_id + + +if sys.version_info >= (3, 12): + _eager_task_factory_code: CodeType | None = asyncio.eager_task_factory.__code__ +else: + _eager_task_factory_code = None + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self.cancel_scope: CancelScope = CancelScope() + self._active = False + self._exceptions: list[BaseException] = [] + self._tasks: set[asyncio.Task] = set() + self._on_completed_fut: asyncio.Future[None] | None = None + + async def __aenter__(self) -> TaskGroup: + self.cancel_scope.__enter__() + self._active = True + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + try: + if exc_val is not None: + self.cancel_scope.cancel() + if not isinstance(exc_val, CancelledError): + self._exceptions.append(exc_val) + + loop = get_running_loop() + try: + if self._tasks: + with CancelScope() as wait_scope: + while self._tasks: + self._on_completed_fut = loop.create_future() + + try: + await self._on_completed_fut + except CancelledError as exc: + # Shield the scope against further cancellation attempts, + # as they're not productive (#695) + wait_scope.shield = True + self.cancel_scope.cancel() + + # Set exc_val from the cancellation exception if it was + # previously unset. However, we should not replace a native + # cancellation exception with one raise by a cancel scope. + if exc_val is None or ( + isinstance(exc_val, CancelledError) + and not is_anyio_cancellation(exc) + ): + exc_val = exc + + self._on_completed_fut = None + else: + # If there are no child tasks to wait on, run at least one checkpoint + # anyway + await AsyncIOBackend.cancel_shielded_checkpoint() + + self._active = False + if self._exceptions: + # The exception that got us here should already have been + # added to self._exceptions so it's ok to break exception + # chaining and avoid adding a "During handling of above..." + # for each nesting level. + raise BaseExceptionGroup( + "unhandled errors in a TaskGroup", self._exceptions + ) from None + elif exc_val: + raise exc_val + except BaseException as exc: + if self.cancel_scope.__exit__(type(exc), exc, exc.__traceback__): + return True + + raise + + return self.cancel_scope.__exit__(exc_type, exc_val, exc_tb) + finally: + del exc_val, exc_tb, self._exceptions + + def _spawn( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + args: tuple[Unpack[PosArgsT]], + name: object, + task_status_future: asyncio.Future | None = None, + ) -> asyncio.Task: + def task_done(_task: asyncio.Task) -> None: + if sys.version_info >= (3, 14) and self.cancel_scope._host_task is not None: + asyncio.future_discard_from_awaited_by( + _task, self.cancel_scope._host_task + ) + + task_state = _task_states[_task] + assert task_state.cancel_scope is not None + assert _task in task_state.cancel_scope._tasks + task_state.cancel_scope._tasks.remove(_task) + self._tasks.remove(task) + del _task_states[_task] + + if self._on_completed_fut is not None and not self._tasks: + try: + self._on_completed_fut.set_result(None) + except asyncio.InvalidStateError: + pass + + try: + exc = _task.exception() + except CancelledError as e: + while isinstance(e.__context__, CancelledError): + e = e.__context__ + + exc = e + + if exc is not None: + # The future can only be in the cancelled state if the host task was + # cancelled, so return immediately instead of adding one more + # CancelledError to the exceptions list + if task_status_future is not None and task_status_future.cancelled(): + return + + if task_status_future is None or task_status_future.done(): + if not isinstance(exc, CancelledError): + self._exceptions.append(exc) + + if not self.cancel_scope._effectively_cancelled: + self.cancel_scope.cancel() + else: + task_status_future.set_exception(exc) + elif task_status_future is not None and not task_status_future.done(): + task_status_future.set_exception( + RuntimeError("Child exited without calling task_status.started()") + ) + + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + kwargs = {} + if task_status_future: + parent_id = id(current_task()) + kwargs["task_status"] = _AsyncioTaskStatus( + task_status_future, id(self.cancel_scope._host_task) + ) + else: + parent_id = id(self.cancel_scope._host_task) + + coro = func(*args, **kwargs) + if not iscoroutine(coro): + prefix = f"{func.__module__}." if hasattr(func, "__module__") else "" + raise TypeError( + f"Expected {prefix}{func.__qualname__}() to return a coroutine, but " + f"the return value ({coro!r}) is not a coroutine object" + ) + + name = get_callable_name(func) if name is None else str(name) + loop = asyncio.get_running_loop() + if ( + (factory := loop.get_task_factory()) + and getattr(factory, "__code__", None) is _eager_task_factory_code + and (closure := getattr(factory, "__closure__", None)) + ): + custom_task_constructor = closure[0].cell_contents + task = custom_task_constructor(coro, loop=loop, name=name) + else: + task = create_task(coro, name=name) + + # Make the spawned task inherit the task group's cancel scope + _task_states[task] = TaskState( + parent_id=parent_id, cancel_scope=self.cancel_scope + ) + self.cancel_scope._tasks.add(task) + self._tasks.add(task) + if sys.version_info >= (3, 14) and self.cancel_scope._host_task is not None: + asyncio.future_add_to_awaited_by(task, self.cancel_scope._host_task) + + task.add_done_callback(task_done) + return task + + def start_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> None: + self._spawn(func, args, name) + + async def start( + self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None + ) -> Any: + future: asyncio.Future = asyncio.Future() + task = self._spawn(func, args, name, future) + + # If the task raises an exception after sending a start value without a switch + # point between, the task group is cancelled and this method never proceeds to + # process the completed future. That's why we have to have a shielded cancel + # scope here. + try: + return await future + except CancelledError: + # Cancel the task and wait for it to exit before returning + task.cancel() + with CancelScope(shield=True), suppress(CancelledError): + await task + + raise + + +# +# Threads +# + +_Retval_Queue_Type = tuple[Optional[T_Retval], Optional[BaseException]] + + +class WorkerThread(Thread): + MAX_IDLE_TIME = 10 # seconds + + def __init__( + self, + root_task: asyncio.Task, + workers: set[WorkerThread], + idle_workers: deque[WorkerThread], + ): + super().__init__(name="AnyIO worker thread") + self.root_task = root_task + self.workers = workers + self.idle_workers = idle_workers + self.loop = root_task._loop + self.queue: Queue[ + tuple[Context, Callable, tuple, asyncio.Future, CancelScope] | None + ] = Queue(2) + self.idle_since = AsyncIOBackend.current_time() + self.stopping = False + + def _report_result( + self, future: asyncio.Future, result: Any, exc: BaseException | None + ) -> None: + self.idle_since = AsyncIOBackend.current_time() + if not self.stopping: + self.idle_workers.append(self) + + if not future.cancelled(): + if exc is not None: + if isinstance(exc, StopIteration): + new_exc = RuntimeError("coroutine raised StopIteration") + new_exc.__cause__ = exc + exc = new_exc + + future.set_exception(exc) + else: + future.set_result(result) + + def run(self) -> None: + with claim_worker_thread(AsyncIOBackend, self.loop): + while True: + item = self.queue.get() + if item is None: + # Shutdown command received + return + + context, func, args, future, cancel_scope = item + if not future.cancelled(): + result = None + exception: BaseException | None = None + threadlocals.current_cancel_scope = cancel_scope + try: + result = context.run(func, *args) + except BaseException as exc: + exception = exc + finally: + del threadlocals.current_cancel_scope + + if not self.loop.is_closed(): + self.loop.call_soon_threadsafe( + self._report_result, future, result, exception + ) + + del result, exception + + self.queue.task_done() + del item, context, func, args, future, cancel_scope + + def stop(self, f: asyncio.Task | None = None) -> None: + self.stopping = True + self.queue.put_nowait(None) + self.workers.discard(self) + try: + self.idle_workers.remove(self) + except ValueError: + pass + + +_threadpool_idle_workers: RunVar[deque[WorkerThread]] = RunVar( + "_threadpool_idle_workers" +) +_threadpool_workers: RunVar[set[WorkerThread]] = RunVar("_threadpool_workers") + + +# +# Subprocesses +# + + +@dataclass(eq=False) +class StreamReaderWrapper(abc.ByteReceiveStream): + _stream: asyncio.StreamReader + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._stream.read(max_bytes) + if data: + return data + else: + raise EndOfStream + + async def aclose(self) -> None: + self._stream.set_exception(ClosedResourceError()) + await AsyncIOBackend.checkpoint() + + +@dataclass(eq=False) +class StreamWriterWrapper(abc.ByteSendStream): + _stream: asyncio.StreamWriter + _closed: bool = field(init=False, default=False) + + async def send(self, item: bytes) -> None: + await AsyncIOBackend.checkpoint_if_cancelled() + stream_paused = self._stream._protocol._paused # type: ignore[attr-defined] + try: + self._stream.write(item) + await self._stream.drain() + except (ConnectionResetError, BrokenPipeError, RuntimeError) as exc: + # If closed by us and/or the peer: + # * on stdlib, drain() raises ConnectionResetError or BrokenPipeError + # * on uvloop and Winloop, write() eventually starts raising RuntimeError + if self._closed: + raise ClosedResourceError from exc + elif self._stream.is_closing(): + raise BrokenResourceError from exc + + raise + + if not stream_paused: + await AsyncIOBackend.cancel_shielded_checkpoint() + + async def aclose(self) -> None: + self._closed = True + self._stream.close() + await AsyncIOBackend.checkpoint() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: asyncio.subprocess.Process + _stdin: StreamWriterWrapper | None + _stdout: StreamReaderWrapper | None + _stderr: StreamReaderWrapper | None + + async def aclose(self) -> None: + with CancelScope(shield=True) as scope: + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + scope.shield = False + try: + await self.wait() + except BaseException: + scope.shield = True + self.kill() + await self.wait() + raise + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: int) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> int | None: + return self._process.returncode + + @property + def stdin(self) -> abc.ByteSendStream | None: + return self._stdin + + @property + def stdout(self) -> abc.ByteReceiveStream | None: + return self._stdout + + @property + def stderr(self) -> abc.ByteReceiveStream | None: + return self._stderr + + +def _forcibly_shutdown_process_pool_on_exit( + workers: set[Process], _task: object +) -> None: + """ + Forcibly shuts down worker processes belonging to this event loop.""" + child_watcher: asyncio.AbstractChildWatcher | None = None # type: ignore[name-defined] + if sys.version_info < (3, 12): + try: + child_watcher = asyncio.get_event_loop_policy().get_child_watcher() + except NotImplementedError: + pass + + # Close as much as possible (w/o async/await) to avoid warnings + for process in workers.copy(): + if process.returncode is None: + continue + + process._stdin._stream._transport.close() # type: ignore[union-attr] + process._stdout._stream._transport.close() # type: ignore[union-attr] + process._stderr._stream._transport.close() # type: ignore[union-attr] + process.kill() + if child_watcher: + child_watcher.remove_child_handler(process.pid) + + +async def _shutdown_process_pool_on_exit(workers: set[abc.Process]) -> None: + """ + Shuts down worker processes belonging to this event loop. + + NOTE: this only works when the event loop was started using asyncio.run() or + anyio.run(). + + """ + process: abc.Process + try: + await sleep(math.inf) + except asyncio.CancelledError: + workers = workers.copy() + for process in workers: + if process.returncode is None: + process.kill() + + for process in workers: + await process.aclose() + + +# +# Sockets and networking +# + + +class StreamProtocol(asyncio.Protocol): + read_queue: deque[bytes] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Exception | None = None + is_at_eof: bool = False + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque() + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + cast(asyncio.Transport, transport).set_write_buffer_limits(0) + + def connection_lost(self, exc: Exception | None) -> None: + if exc: + self.exception = BrokenResourceError() + self.exception.__cause__ = exc + + self.read_event.set() + self.write_event.set() + + def data_received(self, data: bytes) -> None: + # ProactorEventloop sometimes sends bytearray instead of bytes + self.read_queue.append(bytes(data)) + self.read_event.set() + + def eof_received(self) -> bool | None: + self.is_at_eof = True + self.read_event.set() + return True + + def pause_writing(self) -> None: + self.write_event = asyncio.Event() + + def resume_writing(self) -> None: + self.write_event.set() + + +class DatagramProtocol(asyncio.DatagramProtocol): + read_queue: deque[tuple[bytes, IPSockAddrType]] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Exception | None = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque(maxlen=100) # arbitrary value + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + + def connection_lost(self, exc: Exception | None) -> None: + self.read_event.set() + self.write_event.set() + + def datagram_received(self, data: bytes, addr: IPSockAddrType) -> None: + addr = convert_ipv6_sockaddr(addr) + self.read_queue.append((data, addr)) + self.read_event.set() + + def error_received(self, exc: Exception) -> None: + self.exception = exc + + def pause_writing(self) -> None: + self.write_event.clear() + + def resume_writing(self) -> None: + self.write_event.set() + + +class SocketStream(abc.SocketStream): + def __init__(self, transport: asyncio.Transport, protocol: StreamProtocol): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + if ( + not self._protocol.read_event.is_set() + and not self._transport.is_closing() + and not self._protocol.is_at_eof + ): + self._transport.resume_reading() + await self._protocol.read_event.wait() + self._transport.pause_reading() + else: + await AsyncIOBackend.checkpoint() + + try: + chunk = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + elif self._protocol.exception: + raise self._protocol.exception from None + else: + raise EndOfStream from None + + if len(chunk) > max_bytes: + # Split the oversized chunk + chunk, leftover = chunk[:max_bytes], chunk[max_bytes:] + self._protocol.read_queue.appendleft(leftover) + + # If the read queue is empty, clear the flag so that the next call will + # block until data is available + if not self._protocol.read_queue: + self._protocol.read_event.clear() + + return chunk + + async def send(self, item: bytes) -> None: + with self._send_guard: + await AsyncIOBackend.checkpoint() + + if self._closed: + raise ClosedResourceError + elif self._protocol.exception is not None: + raise self._protocol.exception + + try: + self._transport.write(item) + except RuntimeError as exc: + if self._transport.is_closing(): + raise BrokenResourceError from exc + else: + raise + + await self._protocol.write_event.wait() + + async def send_eof(self) -> None: + try: + self._transport.write_eof() + except OSError: + pass + + async def aclose(self) -> None: + self._closed = True + if not self._transport.is_closing(): + try: + self._transport.write_eof() + except OSError: + pass + + self._transport.close() + await sleep(0) + self._transport.abort() + + +class _RawSocketMixin: + _receive_future: asyncio.Future | None = None + _send_future: asyncio.Future | None = None + _closing = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + def _wait_until_readable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._receive_future + loop.remove_reader(self.__raw_socket) + + f = self._receive_future = asyncio.Future() + loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + def _wait_until_writable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._send_future + loop.remove_writer(self.__raw_socket) + + f = self._send_future = asyncio.Future() + loop.add_writer(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + async def aclose(self) -> None: + if not self._closing: + self._closing = True + if self.__raw_socket.fileno() != -1: + self.__raw_socket.close() + + if self._receive_future: + self._receive_future.set_result(None) + if self._send_future: + self._send_future.set_result(None) + + +class UNIXSocketStream(_RawSocketMixin, abc.UNIXSocketStream): + async def send_eof(self) -> None: + with self._send_guard: + self._raw_socket.shutdown(socket.SHUT_WR) + + async def receive(self, max_bytes: int = 65536) -> bytes: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recv(max_bytes) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = self._raw_socket.send(view) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + view = view[bytes_sent:] + + async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError("msglen must be a non-negative integer") + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError("maxfds must be a positive integer") + + loop = get_running_loop() + fds = array.array("i") + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = self._raw_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize) + ) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError( + f"Received unexpected ancillary data; message = {message!r}, " + f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" + ) + + fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: + if not message: + raise ValueError("message must not be empty") + if not fds: + raise ValueError("fds must not be empty") + + loop = get_running_loop() + filenos: list[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + # The ignore can be removed after mypy picks up + # https://github.com/python/typeshed/pull/5545 + self._raw_socket.sendmsg( + [message], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fdarray)] + ) + break + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + +class TCPSocketListener(abc.SocketListener): + _accept_scope: CancelScope | None = None + _closed = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = cast(asyncio.BaseEventLoop, get_running_loop()) + self._accept_guard = ResourceGuard("accepting connections from") + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + async def accept(self) -> abc.SocketStream: + if self._closed: + raise ClosedResourceError + + with self._accept_guard: + await AsyncIOBackend.checkpoint() + with CancelScope() as self._accept_scope: + try: + client_sock, _addr = await self._loop.sock_accept(self._raw_socket) + except asyncio.CancelledError: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + if self._closed: + raise ClosedResourceError from None + + raise + finally: + self._accept_scope = None + + client_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + transport, protocol = await self._loop.connect_accepted_socket( + StreamProtocol, client_sock + ) + return SocketStream(transport, protocol) + + async def aclose(self) -> None: + if self._closed: + return + + self._closed = True + if self._accept_scope: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + self._accept_scope.cancel() + await sleep(0) + + self._raw_socket.close() + + +class UNIXSocketListener(abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = get_running_loop() + self._accept_guard = ResourceGuard("accepting connections from") + self._closed = False + + async def accept(self) -> abc.SocketStream: + await AsyncIOBackend.checkpoint() + with self._accept_guard: + while True: + try: + client_sock, _ = self.__raw_socket.accept() + client_sock.setblocking(False) + return UNIXSocketStream(client_sock) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + self._loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback( + lambda _: self._loop.remove_reader(self.__raw_socket) + ) + await f + except OSError as exc: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + async def aclose(self) -> None: + self._closed = True + self.__raw_socket.close() + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + +class UDPSocket(abc.UDPSocket): + def __init__( + self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol + ): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def aclose(self) -> None: + self._closed = True + if not self._transport.is_closing(): + self._transport.close() + + async def receive(self) -> tuple[bytes, IPSockAddrType]: + with self._receive_guard: + await AsyncIOBackend.checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + return self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + await AsyncIOBackend.checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(*item) + + +class ConnectedUDPSocket(abc.ConnectedUDPSocket): + def __init__( + self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol + ): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def aclose(self) -> None: + self._closed = True + if not self._transport.is_closing(): + self._transport.close() + + async def receive(self) -> bytes: + with self._receive_guard: + await AsyncIOBackend.checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + packet = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + return packet[0] + + async def send(self, item: bytes) -> None: + with self._send_guard: + await AsyncIOBackend.checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(item) + + +class UNIXDatagramSocket(_RawSocketMixin, abc.UNIXDatagramSocket): + async def receive(self) -> UNIXDatagramPacketType: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recvfrom(65536) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return data + + async def send(self, item: UNIXDatagramPacketType) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + self._raw_socket.sendto(*item) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return + + +class ConnectedUNIXDatagramSocket(_RawSocketMixin, abc.ConnectedUNIXDatagramSocket): + async def receive(self) -> bytes: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recv(65536) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return data + + async def send(self, item: bytes) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + self._raw_socket.send(item) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return + + +_read_events: RunVar[dict[int, asyncio.Future[bool]]] = RunVar("read_events") +_write_events: RunVar[dict[int, asyncio.Future[bool]]] = RunVar("write_events") + + +# +# Synchronization +# + + +class Event(BaseEvent): + def __new__(cls) -> Event: + return object.__new__(cls) + + def __init__(self) -> None: + self._event = asyncio.Event() + + def set(self) -> None: + self._event.set() + + def is_set(self) -> bool: + return self._event.is_set() + + async def wait(self) -> None: + if self.is_set(): + await AsyncIOBackend.checkpoint() + else: + await self._event.wait() + + def statistics(self) -> EventStatistics: + return EventStatistics(len(self._event._waiters)) + + +class Lock(BaseLock): + def __new__(cls, *, fast_acquire: bool = False) -> Lock: + return object.__new__(cls) + + def __init__(self, *, fast_acquire: bool = False) -> None: + self._fast_acquire = fast_acquire + self._owner_task: asyncio.Task | None = None + self._waiters: deque[tuple[asyncio.Task, asyncio.Future]] = deque() + + async def acquire(self) -> None: + task = cast(asyncio.Task, current_task()) + if self._owner_task is None and not self._waiters: + await AsyncIOBackend.checkpoint_if_cancelled() + self._owner_task = task + + # Unless on the "fast path", yield control of the event loop so that other + # tasks can run too + if not self._fast_acquire: + try: + await AsyncIOBackend.cancel_shielded_checkpoint() + except CancelledError: + self.release() + raise + + return + + if self._owner_task == task: + raise RuntimeError("Attempted to acquire an already held Lock") + + fut: asyncio.Future[None] = asyncio.Future() + item = task, fut + self._waiters.append(item) + try: + await fut + except CancelledError: + self._waiters.remove(item) + if self._owner_task is task: + self.release() + + raise + + self._waiters.remove(item) + + def acquire_nowait(self) -> None: + task = cast(asyncio.Task, current_task()) + if self._owner_task is None and not self._waiters: + self._owner_task = task + return + + if self._owner_task is task: + raise RuntimeError("Attempted to acquire an already held Lock") + + raise WouldBlock + + def locked(self) -> bool: + return self._owner_task is not None + + def release(self) -> None: + if self._owner_task != current_task(): + raise RuntimeError("The current task is not holding this lock") + + for task, fut in self._waiters: + if not fut.cancelled(): + self._owner_task = task + fut.set_result(None) + return + + self._owner_task = None + + def statistics(self) -> LockStatistics: + task_info = AsyncIOTaskInfo(self._owner_task) if self._owner_task else None + return LockStatistics(self.locked(), task_info, len(self._waiters)) + + +class Semaphore(BaseSemaphore): + def __new__( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> Semaphore: + return object.__new__(cls) + + def __init__( + self, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ): + super().__init__(initial_value, max_value=max_value) + self._value = initial_value + self._max_value = max_value + self._fast_acquire = fast_acquire + self._waiters: deque[asyncio.Future[None]] = deque() + + async def acquire(self) -> None: + if self._value > 0 and not self._waiters: + await AsyncIOBackend.checkpoint_if_cancelled() + self._value -= 1 + + # Unless on the "fast path", yield control of the event loop so that other + # tasks can run too + if not self._fast_acquire: + try: + await AsyncIOBackend.cancel_shielded_checkpoint() + except CancelledError: + self.release() + raise + + return + + fut: asyncio.Future[None] = asyncio.Future() + self._waiters.append(fut) + try: + await fut + except CancelledError: + try: + self._waiters.remove(fut) + except ValueError: + self.release() + + raise + + def acquire_nowait(self) -> None: + if self._value == 0: + raise WouldBlock + + self._value -= 1 + + def release(self) -> None: + if self._max_value is not None and self._value == self._max_value: + raise ValueError("semaphore released too many times") + + for fut in self._waiters: + if not fut.cancelled(): + fut.set_result(None) + self._waiters.remove(fut) + return + + self._value += 1 + + @property + def value(self) -> int: + return self._value + + @property + def max_value(self) -> int | None: + return self._max_value + + def statistics(self) -> SemaphoreStatistics: + return SemaphoreStatistics(len(self._waiters)) + + +class CapacityLimiter(BaseCapacityLimiter): + _total_tokens: float = 0 + + def __new__(cls, total_tokens: float) -> CapacityLimiter: + return object.__new__(cls) + + def __init__(self, total_tokens: float): + self._borrowers: set[Any] = set() + self._wait_queue: OrderedDict[Any, asyncio.Event] = OrderedDict() + self.total_tokens = total_tokens + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + @property + def total_tokens(self) -> float: + return self._total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + if not isinstance(value, int) and not math.isinf(value): + raise TypeError("total_tokens must be an int or math.inf") + + if value < 0: + raise ValueError("total_tokens must be >= 0") + + waiters_to_notify = max(value - self._total_tokens, 0) + self._total_tokens = value + + # Notify waiting tasks that they have acquired the limiter + while self._wait_queue and waiters_to_notify: + event = self._wait_queue.popitem(last=False)[1] + event.set() + waiters_to_notify -= 1 + + @property + def borrowed_tokens(self) -> int: + return len(self._borrowers) + + @property + def available_tokens(self) -> float: + return self._total_tokens - len(self._borrowers) + + def _notify_next_waiter(self) -> None: + """Notify the next task in line if this limiter has free capacity now.""" + if self._wait_queue and len(self._borrowers) < self._total_tokens: + event = self._wait_queue.popitem(last=False)[1] + event.set() + + def acquire_nowait(self) -> None: + self.acquire_on_behalf_of_nowait(current_task()) + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + if borrower in self._borrowers: + raise RuntimeError( + "this borrower is already holding one of this CapacityLimiter's tokens" + ) + + if self._wait_queue or len(self._borrowers) >= self._total_tokens: + raise WouldBlock + + self._borrowers.add(borrower) + + async def acquire(self) -> None: + return await self.acquire_on_behalf_of(current_task()) + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await AsyncIOBackend.checkpoint_if_cancelled() + try: + self.acquire_on_behalf_of_nowait(borrower) + except WouldBlock: + event = asyncio.Event() + self._wait_queue[borrower] = event + try: + await event.wait() + except BaseException: + self._wait_queue.pop(borrower, None) + if event.is_set(): + self._notify_next_waiter() + + raise + + self._borrowers.add(borrower) + else: + try: + await AsyncIOBackend.cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def release(self) -> None: + self.release_on_behalf_of(current_task()) + + def release_on_behalf_of(self, borrower: object) -> None: + try: + self._borrowers.remove(borrower) + except KeyError: + raise RuntimeError( + "this borrower isn't holding any of this CapacityLimiter's tokens" + ) from None + + self._notify_next_waiter() + + def statistics(self) -> CapacityLimiterStatistics: + return CapacityLimiterStatistics( + self.borrowed_tokens, + self.total_tokens, + tuple(self._borrowers), + len(self._wait_queue), + ) + + +_default_thread_limiter: RunVar[CapacityLimiter] = RunVar("_default_thread_limiter") + + +# +# Operating system signals +# + + +class _SignalReceiver: + def __init__(self, signals: tuple[Signals, ...]): + self._signals = signals + self._loop = get_running_loop() + self._signal_queue: deque[Signals] = deque() + self._future: asyncio.Future = asyncio.Future() + self._handled_signals: set[Signals] = set() + + def _deliver(self, signum: Signals) -> None: + self._signal_queue.append(signum) + if not self._future.done(): + self._future.set_result(None) + + def __enter__(self) -> _SignalReceiver: + for sig in set(self._signals): + self._loop.add_signal_handler(sig, self._deliver, sig) + self._handled_signals.add(sig) + + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + for sig in self._handled_signals: + self._loop.remove_signal_handler(sig) + + def __aiter__(self) -> _SignalReceiver: + return self + + async def __anext__(self) -> Signals: + await AsyncIOBackend.checkpoint() + if not self._signal_queue: + self._future = asyncio.Future() + await self._future + + return self._signal_queue.popleft() + + +# +# Testing and debugging +# + + +class AsyncIOTaskInfo(TaskInfo): + def __init__(self, task: asyncio.Task): + task_state = _task_states.get(task) + if task_state is None: + parent_id = None + else: + parent_id = task_state.parent_id + + coro = task.get_coro() + assert coro is not None, "created TaskInfo from a completed Task" + super().__init__(id(task), parent_id, task.get_name(), coro) + self._task = weakref.ref(task) + + def has_pending_cancellation(self) -> bool: + if not (task := self._task()): + # If the task isn't around anymore, it won't have a pending cancellation + return False + + if task._must_cancel: # type: ignore[attr-defined] + return True + elif ( + isinstance(task._fut_waiter, asyncio.Future) # type: ignore[attr-defined] + and task._fut_waiter.cancelled() # type: ignore[attr-defined] + ): + return True + + if task_state := _task_states.get(task): + if cancel_scope := task_state.cancel_scope: + return cancel_scope._effectively_cancelled + + return False + + +class TestRunner(abc.TestRunner): + _send_stream: MemoryObjectSendStream[tuple[Awaitable[Any], asyncio.Future[Any]]] + + def __init__( + self, + *, + debug: bool | None = None, + use_uvloop: bool = False, + loop_factory: Callable[[], AbstractEventLoop] | None = None, + ) -> None: + if use_uvloop and loop_factory is None: + if sys.platform != "win32": + import uvloop + + loop_factory = uvloop.new_event_loop + else: + import winloop + + loop_factory = winloop.new_event_loop + + self._runner = Runner(debug=debug, loop_factory=loop_factory) + self._exceptions: list[BaseException] = [] + self._runner_task: asyncio.Task | None = None + + def __enter__(self) -> TestRunner: + self._runner.__enter__() + self.get_loop().set_exception_handler(self._exception_handler) + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self._runner.__exit__(exc_type, exc_val, exc_tb) + + def get_loop(self) -> AbstractEventLoop: + return self._runner.get_loop() + + def _exception_handler( + self, loop: asyncio.AbstractEventLoop, context: dict[str, Any] + ) -> None: + if isinstance(context.get("exception"), Exception): + self._exceptions.append(context["exception"]) + else: + loop.default_exception_handler(context) + + def _raise_async_exceptions(self) -> None: + # Re-raise any exceptions raised in asynchronous callbacks + if self._exceptions: + exceptions, self._exceptions = self._exceptions, [] + if len(exceptions) == 1: + raise exceptions[0] + elif exceptions: + raise BaseExceptionGroup( + "Multiple exceptions occurred in asynchronous callbacks", exceptions + ) + + async def _run_tests_and_fixtures( + self, + receive_stream: MemoryObjectReceiveStream[ + tuple[Awaitable[T_Retval], asyncio.Future[T_Retval]] + ], + ) -> None: + from _pytest.outcomes import OutcomeException + + with receive_stream, self._send_stream: + async for coro, future in receive_stream: + try: + retval = await coro + except CancelledError as exc: + if not future.cancelled(): + future.cancel(*exc.args) + + raise + except BaseException as exc: + if not future.cancelled(): + future.set_exception(exc) + + if not isinstance(exc, (Exception, OutcomeException)): + raise + else: + if not future.cancelled(): + future.set_result(retval) + + async def _call_in_runner_task( + self, + func: Callable[P, Awaitable[T_Retval]], + *args: P.args, + **kwargs: P.kwargs, + ) -> T_Retval: + if not self._runner_task: + self._send_stream, receive_stream = create_memory_object_stream[ + tuple[Awaitable[Any], asyncio.Future] + ](1) + self._runner_task = self.get_loop().create_task( + self._run_tests_and_fixtures(receive_stream) + ) + + coro = func(*args, **kwargs) + future: asyncio.Future[T_Retval] = self.get_loop().create_future() + self._send_stream.send_nowait((coro, future)) + return await future + + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], + kwargs: dict[str, Any], + ) -> Iterable[T_Retval]: + asyncgen = fixture_func(**kwargs) + fixturevalue: T_Retval = self.get_loop().run_until_complete( + self._call_in_runner_task(asyncgen.asend, None) + ) + self._raise_async_exceptions() + + yield fixturevalue + + try: + self.get_loop().run_until_complete( + self._call_in_runner_task(asyncgen.asend, None) + ) + except StopAsyncIteration: + self._raise_async_exceptions() + else: + self.get_loop().run_until_complete(asyncgen.aclose()) + raise RuntimeError("Async generator fixture did not stop") + + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], + kwargs: dict[str, Any], + ) -> T_Retval: + retval = self.get_loop().run_until_complete( + self._call_in_runner_task(fixture_func, **kwargs) + ) + self._raise_async_exceptions() + return retval + + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] + ) -> None: + try: + self.get_loop().run_until_complete( + self._call_in_runner_task(test_func, **kwargs) + ) + except Exception as exc: + self._exceptions.append(exc) + + self._raise_async_exceptions() + + +class AsyncIOBackend(AsyncBackend): + @classmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + @wraps(func) + async def wrapper() -> T_Retval: + task = cast(asyncio.Task, current_task()) + task.set_name(get_callable_name(func)) + _task_states[task] = TaskState(None, None) + + try: + return await func(*args) + finally: + del _task_states[task] + + debug = options.get("debug", None) + loop_factory = options.get("loop_factory", None) + if loop_factory is None and options.get("use_uvloop", False): + if sys.platform != "win32": + import uvloop + + loop_factory = uvloop.new_event_loop + else: + import winloop + + loop_factory = winloop.new_event_loop + + with Runner(debug=debug, loop_factory=loop_factory) as runner: + return runner.run(wrapper()) + + @classmethod + def current_token(cls) -> object: + return get_running_loop() + + @classmethod + def current_time(cls) -> float: + return get_running_loop().time() + + @classmethod + def cancelled_exception_class(cls) -> type[BaseException]: + return CancelledError + + @classmethod + async def checkpoint(cls) -> None: + await sleep(0) + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + task = current_task() + if task is None: + return + + try: + cancel_scope = _task_states[task].cancel_scope + except KeyError: + return + + while cancel_scope: + if cancel_scope.cancel_called: + await sleep(0) + elif cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + with CancelScope(shield=True): + await sleep(0) + + @classmethod + async def sleep(cls, delay: float) -> None: + await sleep(delay) + + @classmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return CancelScope(deadline=deadline, shield=shield) + + @classmethod + def current_effective_deadline(cls) -> float: + if (task := current_task()) is None: + return math.inf + + try: + cancel_scope = _task_states[task].cancel_scope + except KeyError: + return math.inf + + deadline = math.inf + while cancel_scope: + deadline = min(deadline, cancel_scope.deadline) + if cancel_scope._cancel_called: + deadline = -math.inf + break + elif cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + return deadline + + @classmethod + def create_task_group(cls) -> abc.TaskGroup: + return TaskGroup() + + @classmethod + def create_event(cls) -> abc.Event: + return Event() + + @classmethod + def create_lock(cls, *, fast_acquire: bool) -> abc.Lock: + return Lock(fast_acquire=fast_acquire) + + @classmethod + def create_semaphore( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> abc.Semaphore: + return Semaphore(initial_value, max_value=max_value, fast_acquire=fast_acquire) + + @classmethod + def create_capacity_limiter(cls, total_tokens: float) -> abc.CapacityLimiter: + return CapacityLimiter(total_tokens) + + @classmethod + async def run_sync_in_worker_thread( # type: ignore[return] + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: abc.CapacityLimiter | None = None, + ) -> T_Retval: + await cls.checkpoint() + + # If this is the first run in this event loop thread, set up the necessary + # variables + try: + idle_workers = _threadpool_idle_workers.get() + workers = _threadpool_workers.get() + except LookupError: + idle_workers = deque() + workers = set() + _threadpool_idle_workers.set(idle_workers) + _threadpool_workers.set(workers) + + async with limiter or cls.current_default_thread_limiter(): + with CancelScope(shield=not abandon_on_cancel) as scope: + future = asyncio.Future[T_Retval]() + root_task = find_root_task() + if not idle_workers: + worker = WorkerThread(root_task, workers, idle_workers) + worker.start() + workers.add(worker) + root_task.add_done_callback( + worker.stop, context=contextvars.Context() + ) + else: + worker = idle_workers.pop() + + # Prune any other workers that have been idle for MAX_IDLE_TIME + # seconds or longer + now = cls.current_time() + while idle_workers: + if ( + now - idle_workers[0].idle_since + < WorkerThread.MAX_IDLE_TIME + ): + break + + expired_worker = idle_workers.popleft() + expired_worker.root_task.remove_done_callback( + expired_worker.stop + ) + expired_worker.stop() + + context = copy_context() + context.run(set_current_async_library, None) + if abandon_on_cancel or scope._parent_scope is None: + worker_scope = scope + else: + worker_scope = scope._parent_scope + + worker.queue.put_nowait((context, func, args, future, worker_scope)) + return await future + + @classmethod + def check_cancelled(cls) -> None: + scope: CancelScope | None = threadlocals.current_cancel_scope + while scope is not None: + if scope.cancel_called: + raise CancelledError(f"Cancelled by cancel scope {id(scope):x}") + + if scope.shield: + return + + scope = scope._parent_scope + + @classmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + async def task_wrapper() -> T_Retval: + __tracebackhide__ = True + if scope is not None: + task = cast(asyncio.Task, current_task()) + _task_states[task] = TaskState(None, scope) + scope._tasks.add(task) + try: + return await func(*args) + except CancelledError as exc: + raise concurrent.futures.CancelledError(str(exc)) from None + finally: + if scope is not None: + scope._tasks.discard(task) + + loop = cast( + "AbstractEventLoop", token or threadlocals.current_token.native_token + ) + if loop.is_closed(): + raise RunFinishedError + + context = copy_context() + context.run(set_current_async_library, "asyncio") + scope = getattr(threadlocals, "current_cancel_scope", None) + f: concurrent.futures.Future[T_Retval] = context.run( + asyncio.run_coroutine_threadsafe, task_wrapper(), loop=loop + ) + return f.result() + + @classmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + @wraps(func) + def wrapper() -> None: + try: + set_current_async_library("asyncio") + f.set_result(func(*args)) + except BaseException as exc: + f.set_exception(exc) + if not isinstance(exc, Exception): + raise + + loop = cast( + "AbstractEventLoop", token or threadlocals.current_token.native_token + ) + if loop.is_closed(): + raise RunFinishedError + + f: concurrent.futures.Future[T_Retval] = Future() + loop.call_soon_threadsafe(wrapper) + return f.result() + + @classmethod + async def open_process( + cls, + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + **kwargs: Any, + ) -> Process: + await cls.checkpoint() + if isinstance(command, PathLike): + command = os.fspath(command) + + if isinstance(command, (str, bytes)): + process = await asyncio.create_subprocess_shell( + command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + **kwargs, + ) + else: + process = await asyncio.create_subprocess_exec( + *command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + **kwargs, + ) + + stdin_stream = StreamWriterWrapper(process.stdin) if process.stdin else None + stdout_stream = StreamReaderWrapper(process.stdout) if process.stdout else None + stderr_stream = StreamReaderWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + @classmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None: + create_task( + _shutdown_process_pool_on_exit(workers), + name="AnyIO process pool shutdown task", + ) + find_root_task().add_done_callback( + partial(_forcibly_shutdown_process_pool_on_exit, workers) # type:ignore[arg-type] + ) + + @classmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> abc.SocketStream: + transport, protocol = cast( + tuple[asyncio.Transport, StreamProtocol], + await get_running_loop().create_connection( + StreamProtocol, host, port, local_addr=local_address + ), + ) + transport.pause_reading() + return SocketStream(transport, protocol) + + @classmethod + async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream: + await cls.checkpoint() + loop = get_running_loop() + raw_socket = socket.socket(socket.AF_UNIX) + raw_socket.setblocking(False) + while True: + try: + raw_socket.connect(path) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + loop.add_writer(raw_socket, f.set_result, None) + f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) + await f + except BaseException: + raw_socket.close() + raise + else: + return UNIXSocketStream(raw_socket) + + @classmethod + def create_tcp_listener(cls, sock: socket.socket) -> SocketListener: + return TCPSocketListener(sock) + + @classmethod + def create_unix_listener(cls, sock: socket.socket) -> SocketListener: + return UNIXSocketListener(sock) + + @classmethod + async def create_udp_socket( + cls, + family: AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + transport, protocol = await get_running_loop().create_datagram_endpoint( + DatagramProtocol, + local_addr=local_address, + remote_addr=remote_address, + family=family, + reuse_port=reuse_port, + ) + if protocol.exception: + transport.close() + raise protocol.exception + + if not remote_address: + return UDPSocket(transport, protocol) + else: + return ConnectedUDPSocket(transport, protocol) + + @classmethod + async def create_unix_datagram_socket( # type: ignore[override] + cls, raw_socket: socket.socket, remote_path: str | bytes | None + ) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket: + await cls.checkpoint() + loop = get_running_loop() + + if remote_path: + while True: + try: + raw_socket.connect(remote_path) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + loop.add_writer(raw_socket, f.set_result, None) + f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) + await f + except BaseException: + raw_socket.close() + raise + else: + return ConnectedUNIXDatagramSocket(raw_socket) + else: + return UNIXDatagramSocket(raw_socket) + + @classmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> Sequence[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], + ] + ]: + return await get_running_loop().getaddrinfo( + host, port, family=family, type=type, proto=proto, flags=flags + ) + + @classmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + return await get_running_loop().getnameinfo(sockaddr, flags) + + @classmethod + async def wait_readable(cls, obj: FileDescriptorLike) -> None: + try: + read_events = _read_events.get() + except LookupError: + read_events = {} + _read_events.set(read_events) + + fd = obj if isinstance(obj, int) else obj.fileno() + if read_events.get(fd): + raise BusyResourceError("reading from") + + loop = get_running_loop() + fut: asyncio.Future[bool] = loop.create_future() + + def cb() -> None: + try: + del read_events[fd] + except KeyError: + pass + else: + remove_reader(fd) + + try: + fut.set_result(True) + except asyncio.InvalidStateError: + pass + + try: + loop.add_reader(fd, cb) + except NotImplementedError: + from anyio._core._asyncio_selector_thread import get_selector + + selector = get_selector() + selector.add_reader(fd, cb) + remove_reader = selector.remove_reader + else: + remove_reader = loop.remove_reader + + read_events[fd] = fut + try: + success = await fut + finally: + try: + del read_events[fd] + except KeyError: + pass + else: + remove_reader(fd) + + if not success: + raise ClosedResourceError + + @classmethod + async def wait_writable(cls, obj: FileDescriptorLike) -> None: + try: + write_events = _write_events.get() + except LookupError: + write_events = {} + _write_events.set(write_events) + + fd = obj if isinstance(obj, int) else obj.fileno() + if write_events.get(fd): + raise BusyResourceError("writing to") + + loop = get_running_loop() + fut: asyncio.Future[bool] = loop.create_future() + + def cb() -> None: + try: + del write_events[fd] + except KeyError: + pass + else: + remove_writer(fd) + + try: + fut.set_result(True) + except asyncio.InvalidStateError: + pass + + try: + loop.add_writer(fd, cb) + except NotImplementedError: + from anyio._core._asyncio_selector_thread import get_selector + + selector = get_selector() + selector.add_writer(fd, cb) + remove_writer = selector.remove_writer + else: + remove_writer = loop.remove_writer + + write_events[fd] = fut + try: + success = await fut + finally: + try: + del write_events[fd] + except KeyError: + pass + else: + remove_writer(fd) + + if not success: + raise ClosedResourceError + + @classmethod + def notify_closing(cls, obj: FileDescriptorLike) -> None: + fd = obj if isinstance(obj, int) else obj.fileno() + loop = get_running_loop() + + try: + write_events = _write_events.get() + except LookupError: + pass + else: + try: + fut = write_events.pop(fd) + except KeyError: + pass + else: + try: + fut.set_result(False) + except asyncio.InvalidStateError: + pass + + try: + loop.remove_writer(fd) + except NotImplementedError: + from anyio._core._asyncio_selector_thread import get_selector + + get_selector().remove_writer(fd) + + try: + read_events = _read_events.get() + except LookupError: + pass + else: + try: + fut = read_events.pop(fd) + except KeyError: + pass + else: + try: + fut.set_result(False) + except asyncio.InvalidStateError: + pass + + try: + loop.remove_reader(fd) + except NotImplementedError: + from anyio._core._asyncio_selector_thread import get_selector + + get_selector().remove_reader(fd) + + @classmethod + async def wrap_listener_socket(cls, sock: socket.socket) -> SocketListener: + return TCPSocketListener(sock) + + @classmethod + async def wrap_stream_socket(cls, sock: socket.socket) -> SocketStream: + transport, protocol = await get_running_loop().create_connection( + StreamProtocol, sock=sock + ) + return SocketStream(transport, protocol) + + @classmethod + async def wrap_unix_stream_socket(cls, sock: socket.socket) -> UNIXSocketStream: + return UNIXSocketStream(sock) + + @classmethod + async def wrap_udp_socket(cls, sock: socket.socket) -> UDPSocket: + transport, protocol = await get_running_loop().create_datagram_endpoint( + DatagramProtocol, sock=sock + ) + return UDPSocket(transport, protocol) + + @classmethod + async def wrap_connected_udp_socket(cls, sock: socket.socket) -> ConnectedUDPSocket: + transport, protocol = await get_running_loop().create_datagram_endpoint( + DatagramProtocol, sock=sock + ) + return ConnectedUDPSocket(transport, protocol) + + @classmethod + async def wrap_unix_datagram_socket(cls, sock: socket.socket) -> UNIXDatagramSocket: + return UNIXDatagramSocket(sock) + + @classmethod + async def wrap_connected_unix_datagram_socket( + cls, sock: socket.socket + ) -> ConnectedUNIXDatagramSocket: + return ConnectedUNIXDatagramSocket(sock) + + @classmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + try: + return _default_thread_limiter.get() + except LookupError: + limiter = CapacityLimiter(40) + _default_thread_limiter.set(limiter) + return limiter + + @classmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> AbstractContextManager[AsyncIterator[Signals]]: + return _SignalReceiver(signals) + + @classmethod + def get_current_task(cls) -> TaskInfo: + return AsyncIOTaskInfo(current_task()) # type: ignore[arg-type] + + @classmethod + def get_running_tasks(cls) -> Sequence[TaskInfo]: + return [AsyncIOTaskInfo(task) for task in all_tasks() if not task.done()] + + @classmethod + async def wait_all_tasks_blocked(cls) -> None: + await cls.checkpoint() + this_task = current_task() + while True: + for task in all_tasks(): + if task is this_task: + continue + + waiter = task._fut_waiter # type: ignore[attr-defined] + if waiter is None or waiter.done(): + await sleep(0.1) + break + else: + return + + @classmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + return TestRunner(**options) + + +backend_class = AsyncIOBackend diff --git a/.venv/lib/python3.12/site-packages/anyio/_backends/_trio.py b/.venv/lib/python3.12/site-packages/anyio/_backends/_trio.py new file mode 100644 index 0000000..f460a7f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/_backends/_trio.py @@ -0,0 +1,1346 @@ +from __future__ import annotations + +import array +import math +import os +import socket +import sys +import types +import weakref +from collections.abc import ( + AsyncGenerator, + AsyncIterator, + Awaitable, + Callable, + Collection, + Coroutine, + Iterable, + Sequence, +) +from contextlib import AbstractContextManager +from dataclasses import dataclass +from io import IOBase +from os import PathLike +from signal import Signals +from socket import AddressFamily, SocketKind +from types import TracebackType +from typing import ( + IO, + TYPE_CHECKING, + Any, + Generic, + NoReturn, + TypeVar, + cast, + overload, +) + +import trio.from_thread +import trio.lowlevel +from outcome import Error, Outcome, Value +from trio.lowlevel import ( + current_root_task, + current_task, + notify_closing, + wait_readable, + wait_writable, +) +from trio.socket import SocketType as TrioSocketType +from trio.to_thread import run_sync + +from .. import ( + CapacityLimiterStatistics, + EventStatistics, + LockStatistics, + RunFinishedError, + TaskInfo, + WouldBlock, + abc, +) +from .._core._eventloop import claim_worker_thread +from .._core._exceptions import ( + BrokenResourceError, + BusyResourceError, + ClosedResourceError, + EndOfStream, +) +from .._core._sockets import convert_ipv6_sockaddr +from .._core._streams import create_memory_object_stream +from .._core._synchronization import ( + CapacityLimiter as BaseCapacityLimiter, +) +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import Lock as BaseLock +from .._core._synchronization import ( + ResourceGuard, + SemaphoreStatistics, +) +from .._core._synchronization import Semaphore as BaseSemaphore +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import IPSockAddrType, UDPPacketType, UNIXDatagramPacketType +from ..abc._eventloop import AsyncBackend, StrOrBytesPath +from ..streams.memory import MemoryObjectSendStream + +if TYPE_CHECKING: + from _typeshed import FileDescriptorLike + +if sys.version_info >= (3, 10): + from typing import ParamSpec +else: + from typing_extensions import ParamSpec + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from exceptiongroup import BaseExceptionGroup + from typing_extensions import TypeVarTuple, Unpack + +T = TypeVar("T") +T_Retval = TypeVar("T_Retval") +T_SockAddr = TypeVar("T_SockAddr", str, IPSockAddrType) +PosArgsT = TypeVarTuple("PosArgsT") +P = ParamSpec("P") + + +# +# Event loop +# + +RunVar = trio.lowlevel.RunVar + + +# +# Timeouts and cancellation +# + + +class CancelScope(BaseCancelScope): + def __new__( + cls, original: trio.CancelScope | None = None, **kwargs: object + ) -> CancelScope: + return object.__new__(cls) + + def __init__(self, original: trio.CancelScope | None = None, **kwargs: Any) -> None: + self.__original = original or trio.CancelScope(**kwargs) + + def __enter__(self) -> CancelScope: + self.__original.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + return self.__original.__exit__(exc_type, exc_val, exc_tb) + + def cancel(self, reason: str | None = None) -> None: + self.__original.cancel(reason) + + @property + def deadline(self) -> float: + return self.__original.deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self.__original.deadline = value + + @property + def cancel_called(self) -> bool: + return self.__original.cancel_called + + @property + def cancelled_caught(self) -> bool: + return self.__original.cancelled_caught + + @property + def shield(self) -> bool: + return self.__original.shield + + @shield.setter + def shield(self, value: bool) -> None: + self.__original.shield = value + + +# +# Task groups +# + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self._active = False + self._nursery_manager = trio.open_nursery(strict_exception_groups=True) + self.cancel_scope = None # type: ignore[assignment] + + async def __aenter__(self) -> TaskGroup: + self._active = True + self._nursery = await self._nursery_manager.__aenter__() + self.cancel_scope = CancelScope(self._nursery.cancel_scope) + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + try: + # trio.Nursery.__exit__ returns bool; .open_nursery has wrong type + return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb) # type: ignore[return-value] + except BaseExceptionGroup as exc: + if not exc.split(trio.Cancelled)[1]: + raise trio.Cancelled._create() from exc + + raise + finally: + del exc_val, exc_tb + self._active = False + + def start_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> None: + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + self._nursery.start_soon(func, *args, name=name) + + async def start( + self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None + ) -> Any: + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + return await self._nursery.start(func, *args, name=name) + + +# +# Subprocesses +# + + +@dataclass(eq=False) +class ReceiveStreamWrapper(abc.ByteReceiveStream): + _stream: trio.abc.ReceiveStream + + async def receive(self, max_bytes: int | None = None) -> bytes: + try: + data = await self._stream.receive_some(max_bytes) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + if data: + return bytes(data) + else: + raise EndOfStream + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class SendStreamWrapper(abc.ByteSendStream): + _stream: trio.abc.SendStream + + async def send(self, item: bytes) -> None: + try: + await self._stream.send_all(item) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: trio.Process + _stdin: abc.ByteSendStream | None + _stdout: abc.ByteReceiveStream | None + _stderr: abc.ByteReceiveStream | None + + async def aclose(self) -> None: + with CancelScope(shield=True): + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + try: + await self.wait() + except BaseException: + self.kill() + with CancelScope(shield=True): + await self.wait() + raise + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: Signals) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> int | None: + return self._process.returncode + + @property + def stdin(self) -> abc.ByteSendStream | None: + return self._stdin + + @property + def stdout(self) -> abc.ByteReceiveStream | None: + return self._stdout + + @property + def stderr(self) -> abc.ByteReceiveStream | None: + return self._stderr + + +class _ProcessPoolShutdownInstrument(trio.abc.Instrument): + def after_run(self) -> None: + super().after_run() + + +current_default_worker_process_limiter: trio.lowlevel.RunVar = RunVar( + "current_default_worker_process_limiter" +) + + +async def _shutdown_process_pool(workers: set[abc.Process]) -> None: + try: + await trio.sleep(math.inf) + except trio.Cancelled: + for process in workers: + if process.returncode is None: + process.kill() + + with CancelScope(shield=True): + for process in workers: + await process.aclose() + + +# +# Sockets and networking +# + + +class _TrioSocketMixin(Generic[T_SockAddr]): + def __init__(self, trio_socket: TrioSocketType) -> None: + self._trio_socket = trio_socket + self._closed = False + + def _check_closed(self) -> None: + if self._closed: + raise ClosedResourceError + if self._trio_socket.fileno() < 0: + raise BrokenResourceError + + @property + def _raw_socket(self) -> socket.socket: + return self._trio_socket._sock # type: ignore[attr-defined] + + async def aclose(self) -> None: + if self._trio_socket.fileno() >= 0: + self._closed = True + self._trio_socket.close() + + def _convert_socket_error(self, exc: BaseException) -> NoReturn: + if isinstance(exc, trio.ClosedResourceError): + raise ClosedResourceError from exc + elif self._trio_socket.fileno() < 0 and self._closed: + raise ClosedResourceError from None + elif isinstance(exc, OSError): + raise BrokenResourceError from exc + else: + raise exc + + +class SocketStream(_TrioSocketMixin, abc.SocketStream): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + try: + data = await self._trio_socket.recv(max_bytes) + except BaseException as exc: + self._convert_socket_error(exc) + + if data: + return data + else: + raise EndOfStream + + async def send(self, item: bytes) -> None: + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = await self._trio_socket.send(view) + except BaseException as exc: + self._convert_socket_error(exc) + + view = view[bytes_sent:] + + async def send_eof(self) -> None: + self._trio_socket.shutdown(socket.SHUT_WR) + + +class UNIXSocketStream(SocketStream, abc.UNIXSocketStream): + async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError("msglen must be a non-negative integer") + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError("maxfds must be a positive integer") + + fds = array.array("i") + await trio.lowlevel.checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = await self._trio_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize) + ) + except BaseException as exc: + self._convert_socket_error(exc) + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError( + f"Received unexpected ancillary data; message = {message!r}, " + f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" + ) + + fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: + if not message: + raise ValueError("message must not be empty") + if not fds: + raise ValueError("fds must not be empty") + + filenos: list[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await trio.lowlevel.checkpoint() + with self._send_guard: + while True: + try: + await self._trio_socket.sendmsg( + [message], + [ + ( + socket.SOL_SOCKET, + socket.SCM_RIGHTS, + fdarray, + ) + ], + ) + break + except BaseException as exc: + self._convert_socket_error(exc) + + +class TCPSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard("accepting connections from") + + async def accept(self) -> SocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + return SocketStream(trio_socket) + + +class UNIXSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard("accepting connections from") + + async def accept(self) -> UNIXSocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + return UNIXSocketStream(trio_socket) + + +class UDPSocket(_TrioSocketMixin[IPSockAddrType], abc.UDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> tuple[bytes, IPSockAddrType]: + with self._receive_guard: + try: + data, addr = await self._trio_socket.recvfrom(65536) + return data, convert_ipv6_sockaddr(addr) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + try: + await self._trio_socket.sendto(*item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> bytes: + with self._receive_guard: + try: + return await self._trio_socket.recv(65536) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: bytes) -> None: + with self._send_guard: + try: + await self._trio_socket.send(item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class UNIXDatagramSocket(_TrioSocketMixin[str], abc.UNIXDatagramSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> UNIXDatagramPacketType: + with self._receive_guard: + try: + data, addr = await self._trio_socket.recvfrom(65536) + return data, addr + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: UNIXDatagramPacketType) -> None: + with self._send_guard: + try: + await self._trio_socket.sendto(*item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class ConnectedUNIXDatagramSocket( + _TrioSocketMixin[str], abc.ConnectedUNIXDatagramSocket +): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> bytes: + with self._receive_guard: + try: + return await self._trio_socket.recv(65536) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: bytes) -> None: + with self._send_guard: + try: + await self._trio_socket.send(item) + except BaseException as exc: + self._convert_socket_error(exc) + + +# +# Synchronization +# + + +class Event(BaseEvent): + def __new__(cls) -> Event: + return object.__new__(cls) + + def __init__(self) -> None: + self.__original = trio.Event() + + def is_set(self) -> bool: + return self.__original.is_set() + + async def wait(self) -> None: + return await self.__original.wait() + + def statistics(self) -> EventStatistics: + orig_statistics = self.__original.statistics() + return EventStatistics(tasks_waiting=orig_statistics.tasks_waiting) + + def set(self) -> None: + self.__original.set() + + +class Lock(BaseLock): + def __new__(cls, *, fast_acquire: bool = False) -> Lock: + return object.__new__(cls) + + def __init__(self, *, fast_acquire: bool = False) -> None: + self._fast_acquire = fast_acquire + self.__original = trio.Lock() + + @staticmethod + def _convert_runtime_error_msg(exc: RuntimeError) -> None: + if exc.args == ("attempt to re-acquire an already held Lock",): + exc.args = ("Attempted to acquire an already held Lock",) + + async def acquire(self) -> None: + if not self._fast_acquire: + try: + await self.__original.acquire() + except RuntimeError as exc: + self._convert_runtime_error_msg(exc) + raise + + return + + # This is the "fast path" where we don't let other tasks run + await trio.lowlevel.checkpoint_if_cancelled() + try: + self.__original.acquire_nowait() + except trio.WouldBlock: + await self.__original._lot.park() + except RuntimeError as exc: + self._convert_runtime_error_msg(exc) + raise + + def acquire_nowait(self) -> None: + try: + self.__original.acquire_nowait() + except trio.WouldBlock: + raise WouldBlock from None + except RuntimeError as exc: + self._convert_runtime_error_msg(exc) + raise + + def locked(self) -> bool: + return self.__original.locked() + + def release(self) -> None: + self.__original.release() + + def statistics(self) -> LockStatistics: + orig_statistics = self.__original.statistics() + owner = TrioTaskInfo(orig_statistics.owner) if orig_statistics.owner else None + return LockStatistics( + orig_statistics.locked, owner, orig_statistics.tasks_waiting + ) + + +class Semaphore(BaseSemaphore): + def __new__( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> Semaphore: + return object.__new__(cls) + + def __init__( + self, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> None: + super().__init__(initial_value, max_value=max_value, fast_acquire=fast_acquire) + self.__original = trio.Semaphore(initial_value, max_value=max_value) + + async def acquire(self) -> None: + if not self._fast_acquire: + await self.__original.acquire() + return + + # This is the "fast path" where we don't let other tasks run + await trio.lowlevel.checkpoint_if_cancelled() + try: + self.__original.acquire_nowait() + except trio.WouldBlock: + await self.__original._lot.park() + + def acquire_nowait(self) -> None: + try: + self.__original.acquire_nowait() + except trio.WouldBlock: + raise WouldBlock from None + + @property + def max_value(self) -> int | None: + return self.__original.max_value + + @property + def value(self) -> int: + return self.__original.value + + def release(self) -> None: + self.__original.release() + + def statistics(self) -> SemaphoreStatistics: + orig_statistics = self.__original.statistics() + return SemaphoreStatistics(orig_statistics.tasks_waiting) + + +class CapacityLimiter(BaseCapacityLimiter): + def __new__( + cls, + total_tokens: float | None = None, + *, + original: trio.CapacityLimiter | None = None, + ) -> CapacityLimiter: + return object.__new__(cls) + + def __init__( + self, + total_tokens: float | None = None, + *, + original: trio.CapacityLimiter | None = None, + ) -> None: + if original is not None: + self.__original = original + else: + assert total_tokens is not None + self.__original = trio.CapacityLimiter(total_tokens) + + async def __aenter__(self) -> None: + return await self.__original.__aenter__() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.__original.__aexit__(exc_type, exc_val, exc_tb) + + @property + def total_tokens(self) -> float: + return self.__original.total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + self.__original.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + return self.__original.borrowed_tokens + + @property + def available_tokens(self) -> float: + return self.__original.available_tokens + + def acquire_nowait(self) -> None: + self.__original.acquire_nowait() + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + self.__original.acquire_on_behalf_of_nowait(borrower) + + async def acquire(self) -> None: + await self.__original.acquire() + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await self.__original.acquire_on_behalf_of(borrower) + + def release(self) -> None: + return self.__original.release() + + def release_on_behalf_of(self, borrower: object) -> None: + return self.__original.release_on_behalf_of(borrower) + + def statistics(self) -> CapacityLimiterStatistics: + orig = self.__original.statistics() + return CapacityLimiterStatistics( + borrowed_tokens=orig.borrowed_tokens, + total_tokens=orig.total_tokens, + borrowers=tuple(orig.borrowers), + tasks_waiting=orig.tasks_waiting, + ) + + +_capacity_limiter_wrapper: trio.lowlevel.RunVar = RunVar("_capacity_limiter_wrapper") + + +# +# Signal handling +# + + +class _SignalReceiver: + _iterator: AsyncIterator[int] + + def __init__(self, signals: tuple[Signals, ...]): + self._signals = signals + + def __enter__(self) -> _SignalReceiver: + self._cm = trio.open_signal_receiver(*self._signals) + self._iterator = self._cm.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + return self._cm.__exit__(exc_type, exc_val, exc_tb) + + def __aiter__(self) -> _SignalReceiver: + return self + + async def __anext__(self) -> Signals: + signum = await self._iterator.__anext__() + return Signals(signum) + + +# +# Testing and debugging +# + + +class TestRunner(abc.TestRunner): + def __init__(self, **options: Any) -> None: + from queue import Queue + + self._call_queue: Queue[Callable[[], object]] = Queue() + self._send_stream: MemoryObjectSendStream | None = None + self._options = options + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> None: + if self._send_stream: + self._send_stream.close() + while self._send_stream is not None: + self._call_queue.get()() + + async def _run_tests_and_fixtures(self) -> None: + self._send_stream, receive_stream = create_memory_object_stream(1) + with receive_stream: + async for coro, outcome_holder in receive_stream: + try: + retval = await coro + except BaseException as exc: + outcome_holder.append(Error(exc)) + else: + outcome_holder.append(Value(retval)) + + def _main_task_finished(self, outcome: object) -> None: + self._send_stream = None + + def _call_in_runner_task( + self, + func: Callable[P, Awaitable[T_Retval]], + *args: P.args, + **kwargs: P.kwargs, + ) -> T_Retval: + if self._send_stream is None: + trio.lowlevel.start_guest_run( + self._run_tests_and_fixtures, + run_sync_soon_threadsafe=self._call_queue.put, + done_callback=self._main_task_finished, + **self._options, + ) + while self._send_stream is None: + self._call_queue.get()() + + outcome_holder: list[Outcome] = [] + self._send_stream.send_nowait((func(*args, **kwargs), outcome_holder)) + while not outcome_holder: + self._call_queue.get()() + + return outcome_holder[0].unwrap() + + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], + kwargs: dict[str, Any], + ) -> Iterable[T_Retval]: + asyncgen = fixture_func(**kwargs) + fixturevalue: T_Retval = self._call_in_runner_task(asyncgen.asend, None) + + yield fixturevalue + + try: + self._call_in_runner_task(asyncgen.asend, None) + except StopAsyncIteration: + pass + else: + self._call_in_runner_task(asyncgen.aclose) + raise RuntimeError("Async generator fixture did not stop") + + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], + kwargs: dict[str, Any], + ) -> T_Retval: + return self._call_in_runner_task(fixture_func, **kwargs) + + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] + ) -> None: + self._call_in_runner_task(test_func, **kwargs) + + +class TrioTaskInfo(TaskInfo): + def __init__(self, task: trio.lowlevel.Task): + parent_id = None + if task.parent_nursery and task.parent_nursery.parent_task: + parent_id = id(task.parent_nursery.parent_task) + + super().__init__(id(task), parent_id, task.name, task.coro) + self._task = weakref.proxy(task) + + def has_pending_cancellation(self) -> bool: + try: + return self._task._cancel_status.effectively_cancelled + except ReferenceError: + # If the task is no longer around, it surely doesn't have a cancellation + # pending + return False + + +class TrioBackend(AsyncBackend): + @classmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + return trio.run(func, *args) + + @classmethod + def current_token(cls) -> object: + return trio.lowlevel.current_trio_token() + + @classmethod + def current_time(cls) -> float: + return trio.current_time() + + @classmethod + def cancelled_exception_class(cls) -> type[BaseException]: + return trio.Cancelled + + @classmethod + async def checkpoint(cls) -> None: + await trio.lowlevel.checkpoint() + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + await trio.lowlevel.checkpoint_if_cancelled() + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + await trio.lowlevel.cancel_shielded_checkpoint() + + @classmethod + async def sleep(cls, delay: float) -> None: + await trio.sleep(delay) + + @classmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> abc.CancelScope: + return CancelScope(deadline=deadline, shield=shield) + + @classmethod + def current_effective_deadline(cls) -> float: + return trio.current_effective_deadline() + + @classmethod + def create_task_group(cls) -> abc.TaskGroup: + return TaskGroup() + + @classmethod + def create_event(cls) -> abc.Event: + return Event() + + @classmethod + def create_lock(cls, *, fast_acquire: bool) -> Lock: + return Lock(fast_acquire=fast_acquire) + + @classmethod + def create_semaphore( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> abc.Semaphore: + return Semaphore(initial_value, max_value=max_value, fast_acquire=fast_acquire) + + @classmethod + def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter: + return CapacityLimiter(total_tokens) + + @classmethod + async def run_sync_in_worker_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: abc.CapacityLimiter | None = None, + ) -> T_Retval: + def wrapper() -> T_Retval: + with claim_worker_thread(TrioBackend, token): + return func(*args) + + token = TrioBackend.current_token() + return await run_sync( + wrapper, + abandon_on_cancel=abandon_on_cancel, + limiter=cast(trio.CapacityLimiter, limiter), + ) + + @classmethod + def check_cancelled(cls) -> None: + trio.from_thread.check_cancelled() + + @classmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + trio_token = cast("trio.lowlevel.TrioToken | None", token) + try: + return trio.from_thread.run(func, *args, trio_token=trio_token) + except trio.RunFinishedError: + raise RunFinishedError from None + + @classmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + trio_token = cast("trio.lowlevel.TrioToken | None", token) + try: + return trio.from_thread.run_sync(func, *args, trio_token=trio_token) + except trio.RunFinishedError: + raise RunFinishedError from None + + @classmethod + async def open_process( + cls, + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + **kwargs: Any, + ) -> Process: + def convert_item(item: StrOrBytesPath) -> str: + str_or_bytes = os.fspath(item) + if isinstance(str_or_bytes, str): + return str_or_bytes + else: + return os.fsdecode(str_or_bytes) + + if isinstance(command, (str, bytes, PathLike)): + process = await trio.lowlevel.open_process( + convert_item(command), + stdin=stdin, + stdout=stdout, + stderr=stderr, + shell=True, + **kwargs, + ) + else: + process = await trio.lowlevel.open_process( + [convert_item(item) for item in command], + stdin=stdin, + stdout=stdout, + stderr=stderr, + shell=False, + **kwargs, + ) + + stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None + stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None + stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + @classmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None: + trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers) + + @classmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> SocketStream: + family = socket.AF_INET6 if ":" in host else socket.AF_INET + trio_socket = trio.socket.socket(family) + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + if local_address: + await trio_socket.bind(local_address) + + try: + await trio_socket.connect((host, port)) + except BaseException: + trio_socket.close() + raise + + return SocketStream(trio_socket) + + @classmethod + async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream: + trio_socket = trio.socket.socket(socket.AF_UNIX) + try: + await trio_socket.connect(path) + except BaseException: + trio_socket.close() + raise + + return UNIXSocketStream(trio_socket) + + @classmethod + def create_tcp_listener(cls, sock: socket.socket) -> abc.SocketListener: + return TCPSocketListener(sock) + + @classmethod + def create_unix_listener(cls, sock: socket.socket) -> abc.SocketListener: + return UNIXSocketListener(sock) + + @classmethod + async def create_udp_socket( + cls, + family: socket.AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM) + + if reuse_port: + trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + if local_address: + await trio_socket.bind(local_address) + + if remote_address: + await trio_socket.connect(remote_address) + return ConnectedUDPSocket(trio_socket) + else: + return UDPSocket(trio_socket) + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: None + ) -> abc.UNIXDatagramSocket: ... + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: str | bytes + ) -> abc.ConnectedUNIXDatagramSocket: ... + + @classmethod + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: str | bytes | None + ) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket: + trio_socket = trio.socket.from_stdlib_socket(raw_socket) + + if remote_path: + await trio_socket.connect(remote_path) + return ConnectedUNIXDatagramSocket(trio_socket) + else: + return UNIXDatagramSocket(trio_socket) + + @classmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> Sequence[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], + ] + ]: + return await trio.socket.getaddrinfo(host, port, family, type, proto, flags) + + @classmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + return await trio.socket.getnameinfo(sockaddr, flags) + + @classmethod + async def wait_readable(cls, obj: FileDescriptorLike) -> None: + try: + await wait_readable(obj) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError("reading from") from None + + @classmethod + async def wait_writable(cls, obj: FileDescriptorLike) -> None: + try: + await wait_writable(obj) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError("writing to") from None + + @classmethod + def notify_closing(cls, obj: FileDescriptorLike) -> None: + notify_closing(obj) + + @classmethod + async def wrap_listener_socket(cls, sock: socket.socket) -> abc.SocketListener: + return TCPSocketListener(sock) + + @classmethod + async def wrap_stream_socket(cls, sock: socket.socket) -> SocketStream: + trio_sock = trio.socket.from_stdlib_socket(sock) + return SocketStream(trio_sock) + + @classmethod + async def wrap_unix_stream_socket(cls, sock: socket.socket) -> UNIXSocketStream: + trio_sock = trio.socket.from_stdlib_socket(sock) + return UNIXSocketStream(trio_sock) + + @classmethod + async def wrap_udp_socket(cls, sock: socket.socket) -> UDPSocket: + trio_sock = trio.socket.from_stdlib_socket(sock) + return UDPSocket(trio_sock) + + @classmethod + async def wrap_connected_udp_socket(cls, sock: socket.socket) -> ConnectedUDPSocket: + trio_sock = trio.socket.from_stdlib_socket(sock) + return ConnectedUDPSocket(trio_sock) + + @classmethod + async def wrap_unix_datagram_socket(cls, sock: socket.socket) -> UNIXDatagramSocket: + trio_sock = trio.socket.from_stdlib_socket(sock) + return UNIXDatagramSocket(trio_sock) + + @classmethod + async def wrap_connected_unix_datagram_socket( + cls, sock: socket.socket + ) -> ConnectedUNIXDatagramSocket: + trio_sock = trio.socket.from_stdlib_socket(sock) + return ConnectedUNIXDatagramSocket(trio_sock) + + @classmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + try: + return _capacity_limiter_wrapper.get() + except LookupError: + limiter = CapacityLimiter( + original=trio.to_thread.current_default_thread_limiter() + ) + _capacity_limiter_wrapper.set(limiter) + return limiter + + @classmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> AbstractContextManager[AsyncIterator[Signals]]: + return _SignalReceiver(signals) + + @classmethod + def get_current_task(cls) -> TaskInfo: + task = current_task() + return TrioTaskInfo(task) + + @classmethod + def get_running_tasks(cls) -> Sequence[TaskInfo]: + root_task = current_root_task() + assert root_task + task_infos = [TrioTaskInfo(root_task)] + nurseries = root_task.child_nurseries + while nurseries: + new_nurseries: list[trio.Nursery] = [] + for nursery in nurseries: + for task in nursery.child_tasks: + task_infos.append(TrioTaskInfo(task)) + new_nurseries.extend(task.child_nurseries) + + nurseries = new_nurseries + + return task_infos + + @classmethod + async def wait_all_tasks_blocked(cls) -> None: + from trio.testing import wait_all_tasks_blocked + + await wait_all_tasks_blocked() + + @classmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + return TestRunner(**options) + + +backend_class = TrioBackend diff --git a/.venv/lib/python3.12/site-packages/anyio/_core/__init__.py b/.venv/lib/python3.12/site-packages/anyio/_core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/anyio/_core/_asyncio_selector_thread.py b/.venv/lib/python3.12/site-packages/anyio/_core/_asyncio_selector_thread.py new file mode 100644 index 0000000..9f35bae --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/_core/_asyncio_selector_thread.py @@ -0,0 +1,167 @@ +from __future__ import annotations + +import asyncio +import socket +import threading +from collections.abc import Callable +from selectors import EVENT_READ, EVENT_WRITE, DefaultSelector +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from _typeshed import FileDescriptorLike + +_selector_lock = threading.Lock() +_selector: Selector | None = None + + +class Selector: + def __init__(self) -> None: + self._thread = threading.Thread(target=self.run, name="AnyIO socket selector") + self._selector = DefaultSelector() + self._send, self._receive = socket.socketpair() + self._send.setblocking(False) + self._receive.setblocking(False) + # This somewhat reduces the amount of memory wasted queueing up data + # for wakeups. With these settings, maximum number of 1-byte sends + # before getting BlockingIOError: + # Linux 4.8: 6 + # macOS (darwin 15.5): 1 + # Windows 10: 525347 + # Windows you're weird. (And on Windows setting SNDBUF to 0 makes send + # blocking, even on non-blocking sockets, so don't do that.) + self._receive.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 1) + self._send.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1) + # On Windows this is a TCP socket so this might matter. On other + # platforms this fails b/c AF_UNIX sockets aren't actually TCP. + try: + self._send.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + except OSError: + pass + + self._selector.register(self._receive, EVENT_READ) + self._closed = False + + def start(self) -> None: + self._thread.start() + threading._register_atexit(self._stop) # type: ignore[attr-defined] + + def _stop(self) -> None: + global _selector + self._closed = True + self._notify_self() + self._send.close() + self._thread.join() + self._selector.unregister(self._receive) + self._receive.close() + self._selector.close() + _selector = None + assert not self._selector.get_map(), ( + "selector still has registered file descriptors after shutdown" + ) + + def _notify_self(self) -> None: + try: + self._send.send(b"\x00") + except BlockingIOError: + pass + + def add_reader(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None: + loop = asyncio.get_running_loop() + try: + key = self._selector.get_key(fd) + except KeyError: + self._selector.register(fd, EVENT_READ, {EVENT_READ: (loop, callback)}) + else: + if EVENT_READ in key.data: + raise ValueError( + "this file descriptor is already registered for reading" + ) + + key.data[EVENT_READ] = loop, callback + self._selector.modify(fd, key.events | EVENT_READ, key.data) + + self._notify_self() + + def add_writer(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None: + loop = asyncio.get_running_loop() + try: + key = self._selector.get_key(fd) + except KeyError: + self._selector.register(fd, EVENT_WRITE, {EVENT_WRITE: (loop, callback)}) + else: + if EVENT_WRITE in key.data: + raise ValueError( + "this file descriptor is already registered for writing" + ) + + key.data[EVENT_WRITE] = loop, callback + self._selector.modify(fd, key.events | EVENT_WRITE, key.data) + + self._notify_self() + + def remove_reader(self, fd: FileDescriptorLike) -> bool: + try: + key = self._selector.get_key(fd) + except KeyError: + return False + + if new_events := key.events ^ EVENT_READ: + del key.data[EVENT_READ] + self._selector.modify(fd, new_events, key.data) + else: + self._selector.unregister(fd) + + return True + + def remove_writer(self, fd: FileDescriptorLike) -> bool: + try: + key = self._selector.get_key(fd) + except KeyError: + return False + + if new_events := key.events ^ EVENT_WRITE: + del key.data[EVENT_WRITE] + self._selector.modify(fd, new_events, key.data) + else: + self._selector.unregister(fd) + + return True + + def run(self) -> None: + while not self._closed: + for key, events in self._selector.select(): + if key.fileobj is self._receive: + try: + while self._receive.recv(4096): + pass + except BlockingIOError: + pass + + continue + + if events & EVENT_READ: + loop, callback = key.data[EVENT_READ] + self.remove_reader(key.fd) + try: + loop.call_soon_threadsafe(callback) + except RuntimeError: + pass # the loop was already closed + + if events & EVENT_WRITE: + loop, callback = key.data[EVENT_WRITE] + self.remove_writer(key.fd) + try: + loop.call_soon_threadsafe(callback) + except RuntimeError: + pass # the loop was already closed + + +def get_selector() -> Selector: + global _selector + + with _selector_lock: + if _selector is None: + _selector = Selector() + _selector.start() + + return _selector diff --git a/.venv/lib/python3.12/site-packages/anyio/_core/_contextmanagers.py b/.venv/lib/python3.12/site-packages/anyio/_core/_contextmanagers.py new file mode 100644 index 0000000..302f32b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/_core/_contextmanagers.py @@ -0,0 +1,200 @@ +from __future__ import annotations + +from abc import abstractmethod +from contextlib import AbstractAsyncContextManager, AbstractContextManager +from inspect import isasyncgen, iscoroutine, isgenerator +from types import TracebackType +from typing import Protocol, TypeVar, cast, final + +_T_co = TypeVar("_T_co", covariant=True) +_ExitT_co = TypeVar("_ExitT_co", covariant=True, bound="bool | None") + + +class _SupportsCtxMgr(Protocol[_T_co, _ExitT_co]): + def __contextmanager__(self) -> AbstractContextManager[_T_co, _ExitT_co]: ... + + +class _SupportsAsyncCtxMgr(Protocol[_T_co, _ExitT_co]): + def __asynccontextmanager__( + self, + ) -> AbstractAsyncContextManager[_T_co, _ExitT_co]: ... + + +class ContextManagerMixin: + """ + Mixin class providing context manager functionality via a generator-based + implementation. + + This class allows you to implement a context manager via :meth:`__contextmanager__` + which should return a generator. The mechanics are meant to mirror those of + :func:`@contextmanager `. + + .. note:: Classes using this mix-in are not reentrant as context managers, meaning + that once you enter it, you can't re-enter before first exiting it. + + .. seealso:: :doc:`contextmanagers` + """ + + __cm: AbstractContextManager[object, bool | None] | None = None + + @final + def __enter__(self: _SupportsCtxMgr[_T_co, bool | None]) -> _T_co: + # Needed for mypy to assume self still has the __cm member + assert isinstance(self, ContextManagerMixin) + if self.__cm is not None: + raise RuntimeError( + f"this {self.__class__.__qualname__} has already been entered" + ) + + cm = self.__contextmanager__() + if not isinstance(cm, AbstractContextManager): + if isgenerator(cm): + raise TypeError( + "__contextmanager__() returned a generator object instead of " + "a context manager. Did you forget to add the @contextmanager " + "decorator?" + ) + + raise TypeError( + f"__contextmanager__() did not return a context manager object, " + f"but {cm.__class__!r}" + ) + + if cm is self: + raise TypeError( + f"{self.__class__.__qualname__}.__contextmanager__() returned " + f"self. Did you forget to add the @contextmanager decorator and a " + f"'yield' statement?" + ) + + value = cm.__enter__() + self.__cm = cm + return value + + @final + def __exit__( + self: _SupportsCtxMgr[object, _ExitT_co], + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> _ExitT_co: + # Needed for mypy to assume self still has the __cm member + assert isinstance(self, ContextManagerMixin) + if self.__cm is None: + raise RuntimeError( + f"this {self.__class__.__qualname__} has not been entered yet" + ) + + # Prevent circular references + cm = self.__cm + del self.__cm + + return cast(_ExitT_co, cm.__exit__(exc_type, exc_val, exc_tb)) + + @abstractmethod + def __contextmanager__(self) -> AbstractContextManager[object, bool | None]: + """ + Implement your context manager logic here. + + This method **must** be decorated with + :func:`@contextmanager `. + + .. note:: Remember that the ``yield`` will raise any exception raised in the + enclosed context block, so use a ``finally:`` block to clean up resources! + + :return: a context manager object + """ + + +class AsyncContextManagerMixin: + """ + Mixin class providing async context manager functionality via a generator-based + implementation. + + This class allows you to implement a context manager via + :meth:`__asynccontextmanager__`. The mechanics are meant to mirror those of + :func:`@asynccontextmanager `. + + .. note:: Classes using this mix-in are not reentrant as context managers, meaning + that once you enter it, you can't re-enter before first exiting it. + + .. seealso:: :doc:`contextmanagers` + """ + + __cm: AbstractAsyncContextManager[object, bool | None] | None = None + + @final + async def __aenter__(self: _SupportsAsyncCtxMgr[_T_co, bool | None]) -> _T_co: + # Needed for mypy to assume self still has the __cm member + assert isinstance(self, AsyncContextManagerMixin) + if self.__cm is not None: + raise RuntimeError( + f"this {self.__class__.__qualname__} has already been entered" + ) + + cm = self.__asynccontextmanager__() + if not isinstance(cm, AbstractAsyncContextManager): + if isasyncgen(cm): + raise TypeError( + "__asynccontextmanager__() returned an async generator instead of " + "an async context manager. Did you forget to add the " + "@asynccontextmanager decorator?" + ) + elif iscoroutine(cm): + cm.close() + raise TypeError( + "__asynccontextmanager__() returned a coroutine object instead of " + "an async context manager. Did you forget to add the " + "@asynccontextmanager decorator and a 'yield' statement?" + ) + + raise TypeError( + f"__asynccontextmanager__() did not return an async context manager, " + f"but {cm.__class__!r}" + ) + + if cm is self: + raise TypeError( + f"{self.__class__.__qualname__}.__asynccontextmanager__() returned " + f"self. Did you forget to add the @asynccontextmanager decorator and a " + f"'yield' statement?" + ) + + value = await cm.__aenter__() + self.__cm = cm + return value + + @final + async def __aexit__( + self: _SupportsAsyncCtxMgr[object, _ExitT_co], + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> _ExitT_co: + assert isinstance(self, AsyncContextManagerMixin) + if self.__cm is None: + raise RuntimeError( + f"this {self.__class__.__qualname__} has not been entered yet" + ) + + # Prevent circular references + cm = self.__cm + del self.__cm + + return cast(_ExitT_co, await cm.__aexit__(exc_type, exc_val, exc_tb)) + + @abstractmethod + def __asynccontextmanager__( + self, + ) -> AbstractAsyncContextManager[object, bool | None]: + """ + Implement your async context manager logic here. + + This method **must** be decorated with + :func:`@asynccontextmanager `. + + .. note:: Remember that the ``yield`` will raise any exception raised in the + enclosed context block, so use a ``finally:`` block to clean up resources! + + :return: an async context manager object + """ diff --git a/.venv/lib/python3.12/site-packages/anyio/_core/_eventloop.py b/.venv/lib/python3.12/site-packages/anyio/_core/_eventloop.py new file mode 100644 index 0000000..59a69cc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/_core/_eventloop.py @@ -0,0 +1,234 @@ +from __future__ import annotations + +import math +import sys +import threading +from collections.abc import Awaitable, Callable, Generator +from contextlib import contextmanager +from contextvars import Token +from importlib import import_module +from typing import TYPE_CHECKING, Any, TypeVar + +from ._exceptions import NoEventLoopError + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +sniffio: Any +try: + import sniffio +except ModuleNotFoundError: + sniffio = None + +if TYPE_CHECKING: + from ..abc import AsyncBackend + +# This must be updated when new backends are introduced +BACKENDS = "asyncio", "trio" + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + +threadlocals = threading.local() +loaded_backends: dict[str, type[AsyncBackend]] = {} + + +def run( + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + backend: str = "asyncio", + backend_options: dict[str, Any] | None = None, +) -> T_Retval: + """ + Run the given coroutine function in an asynchronous event loop. + + The current thread must not be already running an event loop. + + :param func: a coroutine function + :param args: positional arguments to ``func`` + :param backend: name of the asynchronous event loop implementation – currently + either ``asyncio`` or ``trio`` + :param backend_options: keyword arguments to call the backend ``run()`` + implementation with (documented :ref:`here `) + :return: the return value of the coroutine function + :raises RuntimeError: if an asynchronous event loop is already running in this + thread + :raises LookupError: if the named backend is not found + + """ + if asynclib_name := current_async_library(): + raise RuntimeError(f"Already running {asynclib_name} in this thread") + + try: + async_backend = get_async_backend(backend) + except ImportError as exc: + raise LookupError(f"No such backend: {backend}") from exc + + token = None + if asynclib_name is None: + # Since we're in control of the event loop, we can cache the name of the async + # library + token = set_current_async_library(backend) + + try: + backend_options = backend_options or {} + return async_backend.run(func, args, {}, backend_options) + finally: + reset_current_async_library(token) + + +async def sleep(delay: float) -> None: + """ + Pause the current task for the specified duration. + + :param delay: the duration, in seconds + + """ + return await get_async_backend().sleep(delay) + + +async def sleep_forever() -> None: + """ + Pause the current task until it's cancelled. + + This is a shortcut for ``sleep(math.inf)``. + + .. versionadded:: 3.1 + + """ + await sleep(math.inf) + + +async def sleep_until(deadline: float) -> None: + """ + Pause the current task until the given time. + + :param deadline: the absolute time to wake up at (according to the internal + monotonic clock of the event loop) + + .. versionadded:: 3.1 + + """ + now = current_time() + await sleep(max(deadline - now, 0)) + + +def current_time() -> float: + """ + Return the current value of the event loop's internal clock. + + :return: the clock value (seconds) + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + return get_async_backend().current_time() + + +def get_all_backends() -> tuple[str, ...]: + """Return a tuple of the names of all built-in backends.""" + return BACKENDS + + +def get_available_backends() -> tuple[str, ...]: + """ + Test for the availability of built-in backends. + + :return a tuple of the built-in backend names that were successfully imported + + .. versionadded:: 4.12 + + """ + available_backends: list[str] = [] + for backend_name in get_all_backends(): + try: + get_async_backend(backend_name) + except ImportError: + continue + + available_backends.append(backend_name) + + return tuple(available_backends) + + +def get_cancelled_exc_class() -> type[BaseException]: + """ + Return the current async library's cancellation exception class. + + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + return get_async_backend().cancelled_exception_class() + + +# +# Private API +# + + +@contextmanager +def claim_worker_thread( + backend_class: type[AsyncBackend], token: object +) -> Generator[Any, None, None]: + from ..lowlevel import EventLoopToken + + threadlocals.current_token = EventLoopToken(backend_class, token) + try: + yield + finally: + del threadlocals.current_token + + +def get_async_backend(asynclib_name: str | None = None) -> type[AsyncBackend]: + if asynclib_name is None: + asynclib_name = current_async_library() + if not asynclib_name: + raise NoEventLoopError( + f"Not currently running on any asynchronous event loop. " + f"Available async backends: {', '.join(get_all_backends())}" + ) + + # We use our own dict instead of sys.modules to get the already imported back-end + # class because the appropriate modules in sys.modules could potentially be only + # partially initialized + try: + return loaded_backends[asynclib_name] + except KeyError: + module = import_module(f"anyio._backends._{asynclib_name}") + loaded_backends[asynclib_name] = module.backend_class + return module.backend_class + + +def current_async_library() -> str | None: + if sniffio is None: + # If sniffio is not installed, we assume we're either running asyncio or nothing + import asyncio + + try: + asyncio.get_running_loop() + return "asyncio" + except RuntimeError: + pass + else: + try: + return sniffio.current_async_library() + except sniffio.AsyncLibraryNotFoundError: + pass + + return None + + +def set_current_async_library(asynclib_name: str | None) -> Token | None: + # no-op if sniffio is not installed + if sniffio is None: + return None + + return sniffio.current_async_library_cvar.set(asynclib_name) + + +def reset_current_async_library(token: Token | None) -> None: + if token is not None: + sniffio.current_async_library_cvar.reset(token) diff --git a/.venv/lib/python3.12/site-packages/anyio/_core/_exceptions.py b/.venv/lib/python3.12/site-packages/anyio/_core/_exceptions.py new file mode 100644 index 0000000..3776bed --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/_core/_exceptions.py @@ -0,0 +1,156 @@ +from __future__ import annotations + +import sys +from collections.abc import Generator +from textwrap import dedent +from typing import Any + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + + +class BrokenResourceError(Exception): + """ + Raised when trying to use a resource that has been rendered unusable due to external + causes (e.g. a send stream whose peer has disconnected). + """ + + +class BrokenWorkerProcess(Exception): + """ + Raised by :meth:`~anyio.to_process.run_sync` if the worker process terminates abruptly or + otherwise misbehaves. + """ + + +class BrokenWorkerInterpreter(Exception): + """ + Raised by :meth:`~anyio.to_interpreter.run_sync` if an unexpected exception is + raised in the subinterpreter. + """ + + def __init__(self, excinfo: Any): + # This was adapted from concurrent.futures.interpreter.ExecutionFailed + msg = excinfo.formatted + if not msg: + if excinfo.type and excinfo.msg: + msg = f"{excinfo.type.__name__}: {excinfo.msg}" + else: + msg = excinfo.type.__name__ or excinfo.msg + + super().__init__(msg) + self.excinfo = excinfo + + def __str__(self) -> str: + try: + formatted = self.excinfo.errdisplay + except Exception: + return super().__str__() + else: + return dedent( + f""" + {super().__str__()} + + Uncaught in the interpreter: + + {formatted} + """.strip() + ) + + +class BusyResourceError(Exception): + """ + Raised when two tasks are trying to read from or write to the same resource + concurrently. + """ + + def __init__(self, action: str): + super().__init__(f"Another task is already {action} this resource") + + +class ClosedResourceError(Exception): + """Raised when trying to use a resource that has been closed.""" + + +class ConnectionFailed(OSError): + """ + Raised when a connection attempt fails. + + .. note:: This class inherits from :exc:`OSError` for backwards compatibility. + """ + + +def iterate_exceptions( + exception: BaseException, +) -> Generator[BaseException, None, None]: + if isinstance(exception, BaseExceptionGroup): + for exc in exception.exceptions: + yield from iterate_exceptions(exc) + else: + yield exception + + +class DelimiterNotFound(Exception): + """ + Raised during + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + maximum number of bytes has been read without the delimiter being found. + """ + + def __init__(self, max_bytes: int) -> None: + super().__init__( + f"The delimiter was not found among the first {max_bytes} bytes" + ) + + +class EndOfStream(Exception): + """ + Raised when trying to read from a stream that has been closed from the other end. + """ + + +class IncompleteRead(Exception): + """ + Raised during + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + connection is closed before the requested amount of bytes has been read. + """ + + def __init__(self) -> None: + super().__init__( + "The stream was closed before the read operation could be completed" + ) + + +class TypedAttributeLookupError(LookupError): + """ + Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute + is not found and no default value has been given. + """ + + +class WouldBlock(Exception): + """Raised by ``X_nowait`` functions if ``X()`` would block.""" + + +class NoEventLoopError(RuntimeError): + """ + Raised by several functions that require an event loop to be running in the current + thread when there is no running event loop. + + This is also raised by :func:`.from_thread.run` and :func:`.from_thread.run_sync` + if not calling from an AnyIO worker thread, and no ``token`` was passed. + """ + + +class RunFinishedError(RuntimeError): + """ + Raised by :func:`.from_thread.run` and :func:`.from_thread.run_sync` if the event + loop associated with the explicitly passed token has already finished. + """ + + def __init__(self) -> None: + super().__init__( + "The event loop associated with the given token has already finished" + ) diff --git a/.venv/lib/python3.12/site-packages/anyio/_core/_fileio.py b/.venv/lib/python3.12/site-packages/anyio/_core/_fileio.py new file mode 100644 index 0000000..061f0d7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/_core/_fileio.py @@ -0,0 +1,797 @@ +from __future__ import annotations + +import os +import pathlib +import sys +from collections.abc import ( + AsyncIterator, + Callable, + Iterable, + Iterator, + Sequence, +) +from dataclasses import dataclass +from functools import partial +from os import PathLike +from typing import ( + IO, + TYPE_CHECKING, + Any, + AnyStr, + ClassVar, + Final, + Generic, + overload, +) + +from .. import to_thread +from ..abc import AsyncResource + +if TYPE_CHECKING: + from types import ModuleType + + from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer +else: + ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object + + +class AsyncFile(AsyncResource, Generic[AnyStr]): + """ + An asynchronous file object. + + This class wraps a standard file object and provides async friendly versions of the + following blocking methods (where available on the original file object): + + * read + * read1 + * readline + * readlines + * readinto + * readinto1 + * write + * writelines + * truncate + * seek + * tell + * flush + + All other methods are directly passed through. + + This class supports the asynchronous context manager protocol which closes the + underlying file at the end of the context block. + + This class also supports asynchronous iteration:: + + async with await open_file(...) as f: + async for line in f: + print(line) + """ + + def __init__(self, fp: IO[AnyStr]) -> None: + self._fp: Any = fp + + def __getattr__(self, name: str) -> object: + return getattr(self._fp, name) + + @property + def wrapped(self) -> IO[AnyStr]: + """The wrapped file object.""" + return self._fp + + async def __aiter__(self) -> AsyncIterator[AnyStr]: + while True: + line = await self.readline() + if line: + yield line + else: + break + + async def aclose(self) -> None: + return await to_thread.run_sync(self._fp.close) + + async def read(self, size: int = -1) -> AnyStr: + return await to_thread.run_sync(self._fp.read, size) + + async def read1(self: AsyncFile[bytes], size: int = -1) -> bytes: + return await to_thread.run_sync(self._fp.read1, size) + + async def readline(self) -> AnyStr: + return await to_thread.run_sync(self._fp.readline) + + async def readlines(self) -> list[AnyStr]: + return await to_thread.run_sync(self._fp.readlines) + + async def readinto(self: AsyncFile[bytes], b: WriteableBuffer) -> int: + return await to_thread.run_sync(self._fp.readinto, b) + + async def readinto1(self: AsyncFile[bytes], b: WriteableBuffer) -> int: + return await to_thread.run_sync(self._fp.readinto1, b) + + @overload + async def write(self: AsyncFile[bytes], b: ReadableBuffer) -> int: ... + + @overload + async def write(self: AsyncFile[str], b: str) -> int: ... + + async def write(self, b: ReadableBuffer | str) -> int: + return await to_thread.run_sync(self._fp.write, b) + + @overload + async def writelines( + self: AsyncFile[bytes], lines: Iterable[ReadableBuffer] + ) -> None: ... + + @overload + async def writelines(self: AsyncFile[str], lines: Iterable[str]) -> None: ... + + async def writelines(self, lines: Iterable[ReadableBuffer] | Iterable[str]) -> None: + return await to_thread.run_sync(self._fp.writelines, lines) + + async def truncate(self, size: int | None = None) -> int: + return await to_thread.run_sync(self._fp.truncate, size) + + async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int: + return await to_thread.run_sync(self._fp.seek, offset, whence) + + async def tell(self) -> int: + return await to_thread.run_sync(self._fp.tell) + + async def flush(self) -> None: + return await to_thread.run_sync(self._fp.flush) + + +@overload +async def open_file( + file: str | PathLike[str] | int, + mode: OpenBinaryMode, + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: Callable[[str, int], int] | None = ..., +) -> AsyncFile[bytes]: ... + + +@overload +async def open_file( + file: str | PathLike[str] | int, + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: Callable[[str, int], int] | None = ..., +) -> AsyncFile[str]: ... + + +async def open_file( + file: str | PathLike[str] | int, + mode: str = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + closefd: bool = True, + opener: Callable[[str, int], int] | None = None, +) -> AsyncFile[Any]: + """ + Open a file asynchronously. + + The arguments are exactly the same as for the builtin :func:`open`. + + :return: an asynchronous file object + + """ + fp = await to_thread.run_sync( + open, file, mode, buffering, encoding, errors, newline, closefd, opener + ) + return AsyncFile(fp) + + +def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]: + """ + Wrap an existing file as an asynchronous file. + + :param file: an existing file-like object + :return: an asynchronous file object + + """ + return AsyncFile(file) + + +@dataclass(eq=False) +class _PathIterator(AsyncIterator["Path"]): + iterator: Iterator[PathLike[str]] + + async def __anext__(self) -> Path: + nextval = await to_thread.run_sync( + next, self.iterator, None, abandon_on_cancel=True + ) + if nextval is None: + raise StopAsyncIteration from None + + return Path(nextval) + + +class Path: + """ + An asynchronous version of :class:`pathlib.Path`. + + This class cannot be substituted for :class:`pathlib.Path` or + :class:`pathlib.PurePath`, but it is compatible with the :class:`os.PathLike` + interface. + + It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for + the deprecated :meth:`~pathlib.Path.link_to` method. + + Some methods may be unavailable or have limited functionality, based on the Python + version: + + * :meth:`~pathlib.Path.copy` (available on Python 3.14 or later) + * :meth:`~pathlib.Path.copy_into` (available on Python 3.14 or later) + * :meth:`~pathlib.Path.from_uri` (available on Python 3.13 or later) + * :meth:`~pathlib.PurePath.full_match` (available on Python 3.13 or later) + * :attr:`~pathlib.Path.info` (available on Python 3.14 or later) + * :meth:`~pathlib.Path.is_junction` (available on Python 3.12 or later) + * :meth:`~pathlib.PurePath.match` (the ``case_sensitive`` parameter is only + available on Python 3.13 or later) + * :meth:`~pathlib.Path.move` (available on Python 3.14 or later) + * :meth:`~pathlib.Path.move_into` (available on Python 3.14 or later) + * :meth:`~pathlib.PurePath.relative_to` (the ``walk_up`` parameter is only available + on Python 3.12 or later) + * :meth:`~pathlib.Path.walk` (available on Python 3.12 or later) + + Any methods that do disk I/O need to be awaited on. These methods are: + + * :meth:`~pathlib.Path.absolute` + * :meth:`~pathlib.Path.chmod` + * :meth:`~pathlib.Path.cwd` + * :meth:`~pathlib.Path.exists` + * :meth:`~pathlib.Path.expanduser` + * :meth:`~pathlib.Path.group` + * :meth:`~pathlib.Path.hardlink_to` + * :meth:`~pathlib.Path.home` + * :meth:`~pathlib.Path.is_block_device` + * :meth:`~pathlib.Path.is_char_device` + * :meth:`~pathlib.Path.is_dir` + * :meth:`~pathlib.Path.is_fifo` + * :meth:`~pathlib.Path.is_file` + * :meth:`~pathlib.Path.is_junction` + * :meth:`~pathlib.Path.is_mount` + * :meth:`~pathlib.Path.is_socket` + * :meth:`~pathlib.Path.is_symlink` + * :meth:`~pathlib.Path.lchmod` + * :meth:`~pathlib.Path.lstat` + * :meth:`~pathlib.Path.mkdir` + * :meth:`~pathlib.Path.open` + * :meth:`~pathlib.Path.owner` + * :meth:`~pathlib.Path.read_bytes` + * :meth:`~pathlib.Path.read_text` + * :meth:`~pathlib.Path.readlink` + * :meth:`~pathlib.Path.rename` + * :meth:`~pathlib.Path.replace` + * :meth:`~pathlib.Path.resolve` + * :meth:`~pathlib.Path.rmdir` + * :meth:`~pathlib.Path.samefile` + * :meth:`~pathlib.Path.stat` + * :meth:`~pathlib.Path.symlink_to` + * :meth:`~pathlib.Path.touch` + * :meth:`~pathlib.Path.unlink` + * :meth:`~pathlib.Path.walk` + * :meth:`~pathlib.Path.write_bytes` + * :meth:`~pathlib.Path.write_text` + + Additionally, the following methods return an async iterator yielding + :class:`~.Path` objects: + + * :meth:`~pathlib.Path.glob` + * :meth:`~pathlib.Path.iterdir` + * :meth:`~pathlib.Path.rglob` + """ + + __slots__ = "_path", "__weakref__" + + __weakref__: Any + + def __init__(self, *args: str | PathLike[str]) -> None: + self._path: Final[pathlib.Path] = pathlib.Path(*args) + + def __fspath__(self) -> str: + return self._path.__fspath__() + + def __str__(self) -> str: + return self._path.__str__() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.as_posix()!r})" + + def __bytes__(self) -> bytes: + return self._path.__bytes__() + + def __hash__(self) -> int: + return self._path.__hash__() + + def __eq__(self, other: object) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__eq__(target) + + def __lt__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__lt__(target) + + def __le__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__le__(target) + + def __gt__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__gt__(target) + + def __ge__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__ge__(target) + + def __truediv__(self, other: str | PathLike[str]) -> Path: + return Path(self._path / other) + + def __rtruediv__(self, other: str | PathLike[str]) -> Path: + return Path(other) / self + + @property + def parts(self) -> tuple[str, ...]: + return self._path.parts + + @property + def drive(self) -> str: + return self._path.drive + + @property + def root(self) -> str: + return self._path.root + + @property + def anchor(self) -> str: + return self._path.anchor + + @property + def parents(self) -> Sequence[Path]: + return tuple(Path(p) for p in self._path.parents) + + @property + def parent(self) -> Path: + return Path(self._path.parent) + + @property + def name(self) -> str: + return self._path.name + + @property + def suffix(self) -> str: + return self._path.suffix + + @property + def suffixes(self) -> list[str]: + return self._path.suffixes + + @property + def stem(self) -> str: + return self._path.stem + + async def absolute(self) -> Path: + path = await to_thread.run_sync(self._path.absolute) + return Path(path) + + def as_posix(self) -> str: + return self._path.as_posix() + + def as_uri(self) -> str: + return self._path.as_uri() + + if sys.version_info >= (3, 13): + parser: ClassVar[ModuleType] = pathlib.Path.parser + + @classmethod + def from_uri(cls, uri: str) -> Path: + return Path(pathlib.Path.from_uri(uri)) + + def full_match( + self, path_pattern: str, *, case_sensitive: bool | None = None + ) -> bool: + return self._path.full_match(path_pattern, case_sensitive=case_sensitive) + + def match( + self, path_pattern: str, *, case_sensitive: bool | None = None + ) -> bool: + return self._path.match(path_pattern, case_sensitive=case_sensitive) + else: + + def match(self, path_pattern: str) -> bool: + return self._path.match(path_pattern) + + if sys.version_info >= (3, 14): + + @property + def info(self) -> Any: # TODO: add return type annotation when Typeshed gets it + return self._path.info + + async def copy( + self, + target: str | os.PathLike[str], + *, + follow_symlinks: bool = True, + preserve_metadata: bool = False, + ) -> Path: + func = partial( + self._path.copy, + follow_symlinks=follow_symlinks, + preserve_metadata=preserve_metadata, + ) + return Path(await to_thread.run_sync(func, pathlib.Path(target))) + + async def copy_into( + self, + target_dir: str | os.PathLike[str], + *, + follow_symlinks: bool = True, + preserve_metadata: bool = False, + ) -> Path: + func = partial( + self._path.copy_into, + follow_symlinks=follow_symlinks, + preserve_metadata=preserve_metadata, + ) + return Path(await to_thread.run_sync(func, pathlib.Path(target_dir))) + + async def move(self, target: str | os.PathLike[str]) -> Path: + # Upstream does not handle anyio.Path properly as a PathLike + target = pathlib.Path(target) + return Path(await to_thread.run_sync(self._path.move, target)) + + async def move_into( + self, + target_dir: str | os.PathLike[str], + ) -> Path: + return Path(await to_thread.run_sync(self._path.move_into, target_dir)) + + def is_relative_to(self, other: str | PathLike[str]) -> bool: + try: + self.relative_to(other) + return True + except ValueError: + return False + + async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: + func = partial(os.chmod, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, mode) + + @classmethod + async def cwd(cls) -> Path: + path = await to_thread.run_sync(pathlib.Path.cwd) + return cls(path) + + async def exists(self) -> bool: + return await to_thread.run_sync(self._path.exists, abandon_on_cancel=True) + + async def expanduser(self) -> Path: + return Path( + await to_thread.run_sync(self._path.expanduser, abandon_on_cancel=True) + ) + + if sys.version_info < (3, 12): + # Python 3.11 and earlier + def glob(self, pattern: str) -> AsyncIterator[Path]: + gen = self._path.glob(pattern) + return _PathIterator(gen) + elif (3, 12) <= sys.version_info < (3, 13): + # changed in Python 3.12: + # - The case_sensitive parameter was added. + def glob( + self, + pattern: str, + *, + case_sensitive: bool | None = None, + ) -> AsyncIterator[Path]: + gen = self._path.glob(pattern, case_sensitive=case_sensitive) + return _PathIterator(gen) + elif sys.version_info >= (3, 13): + # Changed in Python 3.13: + # - The recurse_symlinks parameter was added. + # - The pattern parameter accepts a path-like object. + def glob( # type: ignore[misc] # mypy doesn't allow for differing signatures in a conditional block + self, + pattern: str | PathLike[str], + *, + case_sensitive: bool | None = None, + recurse_symlinks: bool = False, + ) -> AsyncIterator[Path]: + gen = self._path.glob( + pattern, # type: ignore[arg-type] + case_sensitive=case_sensitive, + recurse_symlinks=recurse_symlinks, + ) + return _PathIterator(gen) + + async def group(self) -> str: + return await to_thread.run_sync(self._path.group, abandon_on_cancel=True) + + async def hardlink_to( + self, target: str | bytes | PathLike[str] | PathLike[bytes] + ) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(os.link, target, self) + + @classmethod + async def home(cls) -> Path: + home_path = await to_thread.run_sync(pathlib.Path.home) + return cls(home_path) + + def is_absolute(self) -> bool: + return self._path.is_absolute() + + async def is_block_device(self) -> bool: + return await to_thread.run_sync( + self._path.is_block_device, abandon_on_cancel=True + ) + + async def is_char_device(self) -> bool: + return await to_thread.run_sync( + self._path.is_char_device, abandon_on_cancel=True + ) + + async def is_dir(self) -> bool: + return await to_thread.run_sync(self._path.is_dir, abandon_on_cancel=True) + + async def is_fifo(self) -> bool: + return await to_thread.run_sync(self._path.is_fifo, abandon_on_cancel=True) + + async def is_file(self) -> bool: + return await to_thread.run_sync(self._path.is_file, abandon_on_cancel=True) + + if sys.version_info >= (3, 12): + + async def is_junction(self) -> bool: + return await to_thread.run_sync(self._path.is_junction) + + async def is_mount(self) -> bool: + return await to_thread.run_sync( + os.path.ismount, self._path, abandon_on_cancel=True + ) + + def is_reserved(self) -> bool: + return self._path.is_reserved() + + async def is_socket(self) -> bool: + return await to_thread.run_sync(self._path.is_socket, abandon_on_cancel=True) + + async def is_symlink(self) -> bool: + return await to_thread.run_sync(self._path.is_symlink, abandon_on_cancel=True) + + async def iterdir(self) -> AsyncIterator[Path]: + gen = ( + self._path.iterdir() + if sys.version_info < (3, 13) + else await to_thread.run_sync(self._path.iterdir, abandon_on_cancel=True) + ) + async for path in _PathIterator(gen): + yield path + + def joinpath(self, *args: str | PathLike[str]) -> Path: + return Path(self._path.joinpath(*args)) + + async def lchmod(self, mode: int) -> None: + await to_thread.run_sync(self._path.lchmod, mode) + + async def lstat(self) -> os.stat_result: + return await to_thread.run_sync(self._path.lstat, abandon_on_cancel=True) + + async def mkdir( + self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False + ) -> None: + await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok) + + @overload + async def open( + self, + mode: OpenBinaryMode, + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + ) -> AsyncFile[bytes]: ... + + @overload + async def open( + self, + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + ) -> AsyncFile[str]: ... + + async def open( + self, + mode: str = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> AsyncFile[Any]: + fp = await to_thread.run_sync( + self._path.open, mode, buffering, encoding, errors, newline + ) + return AsyncFile(fp) + + async def owner(self) -> str: + return await to_thread.run_sync(self._path.owner, abandon_on_cancel=True) + + async def read_bytes(self) -> bytes: + return await to_thread.run_sync(self._path.read_bytes) + + async def read_text( + self, encoding: str | None = None, errors: str | None = None + ) -> str: + return await to_thread.run_sync(self._path.read_text, encoding, errors) + + if sys.version_info >= (3, 12): + + def relative_to( + self, *other: str | PathLike[str], walk_up: bool = False + ) -> Path: + # relative_to() should work with any PathLike but it doesn't + others = [pathlib.Path(other) for other in other] + return Path(self._path.relative_to(*others, walk_up=walk_up)) + + else: + + def relative_to(self, *other: str | PathLike[str]) -> Path: + return Path(self._path.relative_to(*other)) + + async def readlink(self) -> Path: + target = await to_thread.run_sync(os.readlink, self._path) + return Path(target) + + async def rename(self, target: str | pathlib.PurePath | Path) -> Path: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.rename, target) + return Path(target) + + async def replace(self, target: str | pathlib.PurePath | Path) -> Path: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.replace, target) + return Path(target) + + async def resolve(self, strict: bool = False) -> Path: + func = partial(self._path.resolve, strict=strict) + return Path(await to_thread.run_sync(func, abandon_on_cancel=True)) + + if sys.version_info < (3, 12): + # Pre Python 3.12 + def rglob(self, pattern: str) -> AsyncIterator[Path]: + gen = self._path.rglob(pattern) + return _PathIterator(gen) + elif (3, 12) <= sys.version_info < (3, 13): + # Changed in Python 3.12: + # - The case_sensitive parameter was added. + def rglob( + self, pattern: str, *, case_sensitive: bool | None = None + ) -> AsyncIterator[Path]: + gen = self._path.rglob(pattern, case_sensitive=case_sensitive) + return _PathIterator(gen) + elif sys.version_info >= (3, 13): + # Changed in Python 3.13: + # - The recurse_symlinks parameter was added. + # - The pattern parameter accepts a path-like object. + def rglob( # type: ignore[misc] # mypy doesn't allow for differing signatures in a conditional block + self, + pattern: str | PathLike[str], + *, + case_sensitive: bool | None = None, + recurse_symlinks: bool = False, + ) -> AsyncIterator[Path]: + gen = self._path.rglob( + pattern, # type: ignore[arg-type] + case_sensitive=case_sensitive, + recurse_symlinks=recurse_symlinks, + ) + return _PathIterator(gen) + + async def rmdir(self) -> None: + await to_thread.run_sync(self._path.rmdir) + + async def samefile(self, other_path: str | PathLike[str]) -> bool: + if isinstance(other_path, Path): + other_path = other_path._path + + return await to_thread.run_sync( + self._path.samefile, other_path, abandon_on_cancel=True + ) + + async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result: + func = partial(os.stat, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, abandon_on_cancel=True) + + async def symlink_to( + self, + target: str | bytes | PathLike[str] | PathLike[bytes], + target_is_directory: bool = False, + ) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.symlink_to, target, target_is_directory) + + async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: + await to_thread.run_sync(self._path.touch, mode, exist_ok) + + async def unlink(self, missing_ok: bool = False) -> None: + try: + await to_thread.run_sync(self._path.unlink) + except FileNotFoundError: + if not missing_ok: + raise + + if sys.version_info >= (3, 12): + + async def walk( + self, + top_down: bool = True, + on_error: Callable[[OSError], object] | None = None, + follow_symlinks: bool = False, + ) -> AsyncIterator[tuple[Path, list[str], list[str]]]: + def get_next_value() -> tuple[pathlib.Path, list[str], list[str]] | None: + try: + return next(gen) + except StopIteration: + return None + + gen = self._path.walk(top_down, on_error, follow_symlinks) + while True: + value = await to_thread.run_sync(get_next_value) + if value is None: + return + + root, dirs, paths = value + yield Path(root), dirs, paths + + def with_name(self, name: str) -> Path: + return Path(self._path.with_name(name)) + + def with_stem(self, stem: str) -> Path: + return Path(self._path.with_name(stem + self._path.suffix)) + + def with_suffix(self, suffix: str) -> Path: + return Path(self._path.with_suffix(suffix)) + + def with_segments(self, *pathsegments: str | PathLike[str]) -> Path: + return Path(*pathsegments) + + async def write_bytes(self, data: bytes) -> int: + return await to_thread.run_sync(self._path.write_bytes, data) + + async def write_text( + self, + data: str, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> int: + # Path.write_text() does not support the "newline" parameter before Python 3.10 + def sync_write_text() -> int: + with self._path.open( + "w", encoding=encoding, errors=errors, newline=newline + ) as fp: + return fp.write(data) + + return await to_thread.run_sync(sync_write_text) + + +PathLike.register(Path) diff --git a/.venv/lib/python3.12/site-packages/anyio/_core/_resources.py b/.venv/lib/python3.12/site-packages/anyio/_core/_resources.py new file mode 100644 index 0000000..b9a5344 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/_core/_resources.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from ..abc import AsyncResource +from ._tasks import CancelScope + + +async def aclose_forcefully(resource: AsyncResource) -> None: + """ + Close an asynchronous resource in a cancelled scope. + + Doing this closes the resource without waiting on anything. + + :param resource: the resource to close + + """ + with CancelScope() as scope: + scope.cancel() + await resource.aclose() diff --git a/.venv/lib/python3.12/site-packages/anyio/_core/_signals.py b/.venv/lib/python3.12/site-packages/anyio/_core/_signals.py new file mode 100644 index 0000000..e24c79e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/_core/_signals.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +from collections.abc import AsyncIterator +from contextlib import AbstractContextManager +from signal import Signals + +from ._eventloop import get_async_backend + + +def open_signal_receiver( + *signals: Signals, +) -> AbstractContextManager[AsyncIterator[Signals]]: + """ + Start receiving operating system signals. + + :param signals: signals to receive (e.g. ``signal.SIGINT``) + :return: an asynchronous context manager for an asynchronous iterator which yields + signal numbers + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + .. warning:: Windows does not support signals natively so it is best to avoid + relying on this in cross-platform applications. + + .. warning:: On asyncio, this permanently replaces any previous signal handler for + the given signals, as set via :meth:`~asyncio.loop.add_signal_handler`. + + """ + return get_async_backend().open_signal_receiver(*signals) diff --git a/.venv/lib/python3.12/site-packages/anyio/_core/_sockets.py b/.venv/lib/python3.12/site-packages/anyio/_core/_sockets.py new file mode 100644 index 0000000..6c99b3a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/_core/_sockets.py @@ -0,0 +1,1003 @@ +from __future__ import annotations + +import errno +import os +import socket +import ssl +import stat +import sys +from collections.abc import Awaitable +from dataclasses import dataclass +from ipaddress import IPv4Address, IPv6Address, ip_address +from os import PathLike, chmod +from socket import AddressFamily, SocketKind +from typing import TYPE_CHECKING, Any, Literal, cast, overload + +from .. import ConnectionFailed, to_thread +from ..abc import ( + ByteStreamConnectable, + ConnectedUDPSocket, + ConnectedUNIXDatagramSocket, + IPAddressType, + IPSockAddrType, + SocketListener, + SocketStream, + UDPSocket, + UNIXDatagramSocket, + UNIXSocketStream, +) +from ..streams.stapled import MultiListener +from ..streams.tls import TLSConnectable, TLSStream +from ._eventloop import get_async_backend +from ._resources import aclose_forcefully +from ._synchronization import Event +from ._tasks import create_task_group, move_on_after + +if TYPE_CHECKING: + from _typeshed import FileDescriptorLike +else: + FileDescriptorLike = object + +if sys.version_info < (3, 11): + from exceptiongroup import ExceptionGroup + +if sys.version_info >= (3, 12): + from typing import override +else: + from typing_extensions import override + +if sys.version_info < (3, 13): + from typing_extensions import deprecated +else: + from warnings import deprecated + +IPPROTO_IPV6 = getattr(socket, "IPPROTO_IPV6", 41) # https://bugs.python.org/issue29515 + +AnyIPAddressFamily = Literal[ + AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, AddressFamily.AF_INET6 +] +IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6] + + +# tls_hostname given +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + ssl_context: ssl.SSLContext | None = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str, + happy_eyeballs_delay: float = ..., +) -> TLSStream: ... + + +# ssl_context given +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + ssl_context: ssl.SSLContext, + tls_standard_compatible: bool = ..., + tls_hostname: str | None = ..., + happy_eyeballs_delay: float = ..., +) -> TLSStream: ... + + +# tls=True +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + tls: Literal[True], + ssl_context: ssl.SSLContext | None = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str | None = ..., + happy_eyeballs_delay: float = ..., +) -> TLSStream: ... + + +# tls=False +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + tls: Literal[False], + ssl_context: ssl.SSLContext | None = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str | None = ..., + happy_eyeballs_delay: float = ..., +) -> SocketStream: ... + + +# No TLS arguments +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + happy_eyeballs_delay: float = ..., +) -> SocketStream: ... + + +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = None, + tls: bool = False, + ssl_context: ssl.SSLContext | None = None, + tls_standard_compatible: bool = True, + tls_hostname: str | None = None, + happy_eyeballs_delay: float = 0.25, +) -> SocketStream | TLSStream: + """ + Connect to a host using the TCP protocol. + + This function implements the stateless version of the Happy Eyeballs algorithm (RFC + 6555). If ``remote_host`` is a host name that resolves to multiple IP addresses, + each one is tried until one connection attempt succeeds. If the first attempt does + not connected within 250 milliseconds, a second attempt is started using the next + address in the list, and so on. On IPv6 enabled systems, an IPv6 address (if + available) is tried first. + + When the connection has been established, a TLS handshake will be done if either + ``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``. + + :param remote_host: the IP address or host name to connect to + :param remote_port: port on the target host to connect to + :param local_host: the interface address or name to bind the socket to before + connecting + :param tls: ``True`` to do a TLS handshake with the connected stream and return a + :class:`~anyio.streams.tls.TLSStream` instead + :param ssl_context: the SSL context object to use (if omitted, a default context is + created) + :param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake + before closing the stream and requires that the server does this as well. + Otherwise, :exc:`~ssl.SSLEOFError` may be raised during reads from the stream. + Some protocols, such as HTTP, require this option to be ``False``. + See :meth:`~ssl.SSLContext.wrap_socket` for details. + :param tls_hostname: host name to check the server certificate against (defaults to + the value of ``remote_host``) + :param happy_eyeballs_delay: delay (in seconds) before starting the next connection + attempt + :return: a socket stream object if no TLS handshake was done, otherwise a TLS stream + :raises ConnectionFailed: if the connection fails + + """ + # Placed here due to https://github.com/python/mypy/issues/7057 + connected_stream: SocketStream | None = None + + async def try_connect(remote_host: str, event: Event) -> None: + nonlocal connected_stream + try: + stream = await asynclib.connect_tcp(remote_host, remote_port, local_address) + except OSError as exc: + oserrors.append(exc) + return + else: + if connected_stream is None: + connected_stream = stream + tg.cancel_scope.cancel() + else: + await stream.aclose() + finally: + event.set() + + asynclib = get_async_backend() + local_address: IPSockAddrType | None = None + family = socket.AF_UNSPEC + if local_host: + gai_res = await getaddrinfo(str(local_host), None) + family, *_, local_address = gai_res[0] + + target_host = str(remote_host) + try: + addr_obj = ip_address(remote_host) + except ValueError: + addr_obj = None + + if addr_obj is not None: + if isinstance(addr_obj, IPv6Address): + target_addrs = [(socket.AF_INET6, addr_obj.compressed)] + else: + target_addrs = [(socket.AF_INET, addr_obj.compressed)] + else: + # getaddrinfo() will raise an exception if name resolution fails + gai_res = await getaddrinfo( + target_host, remote_port, family=family, type=socket.SOCK_STREAM + ) + + # Organize the list so that the first address is an IPv6 address (if available) + # and the second one is an IPv4 addresses. The rest can be in whatever order. + v6_found = v4_found = False + target_addrs = [] + for af, *_, sa in gai_res: + if af == socket.AF_INET6 and not v6_found: + v6_found = True + target_addrs.insert(0, (af, sa[0])) + elif af == socket.AF_INET and not v4_found and v6_found: + v4_found = True + target_addrs.insert(1, (af, sa[0])) + else: + target_addrs.append((af, sa[0])) + + oserrors: list[OSError] = [] + try: + async with create_task_group() as tg: + for _af, addr in target_addrs: + event = Event() + tg.start_soon(try_connect, addr, event) + with move_on_after(happy_eyeballs_delay): + await event.wait() + + if connected_stream is None: + cause = ( + oserrors[0] + if len(oserrors) == 1 + else ExceptionGroup("multiple connection attempts failed", oserrors) + ) + raise OSError("All connection attempts failed") from cause + finally: + oserrors.clear() + + if tls or tls_hostname or ssl_context: + try: + return await TLSStream.wrap( + connected_stream, + server_side=False, + hostname=tls_hostname or str(remote_host), + ssl_context=ssl_context, + standard_compatible=tls_standard_compatible, + ) + except BaseException: + await aclose_forcefully(connected_stream) + raise + + return connected_stream + + +async def connect_unix(path: str | bytes | PathLike[Any]) -> UNIXSocketStream: + """ + Connect to the given UNIX socket. + + Not available on Windows. + + :param path: path to the socket + :return: a socket stream object + :raises ConnectionFailed: if the connection fails + + """ + path = os.fspath(path) + return await get_async_backend().connect_unix(path) + + +async def create_tcp_listener( + *, + local_host: IPAddressType | None = None, + local_port: int = 0, + family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC, + backlog: int = 65536, + reuse_port: bool = False, +) -> MultiListener[SocketStream]: + """ + Create a TCP socket listener. + + :param local_port: port number to listen on + :param local_host: IP address of the interface to listen on. If omitted, listen on + all IPv4 and IPv6 interfaces. To listen on all interfaces on a specific address + family, use ``0.0.0.0`` for IPv4 or ``::`` for IPv6. + :param family: address family (used if ``local_host`` was omitted) + :param backlog: maximum number of queued incoming connections (up to a maximum of + 2**16, or 65536) + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) + :return: a multi-listener object containing one or more socket listeners + :raises OSError: if there's an error creating a socket, or binding to one or more + interfaces failed + + """ + asynclib = get_async_backend() + backlog = min(backlog, 65536) + local_host = str(local_host) if local_host is not None else None + + def setup_raw_socket( + fam: AddressFamily, + bind_addr: tuple[str, int] | tuple[str, int, int, int], + *, + v6only: bool = True, + ) -> socket.socket: + sock = socket.socket(fam) + try: + sock.setblocking(False) + + if fam == AddressFamily.AF_INET6: + sock.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, v6only) + + # For Windows, enable exclusive address use. For others, enable address + # reuse. + if sys.platform == "win32": + sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) + else: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + + if reuse_port: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + # Workaround for #554 + if fam == socket.AF_INET6 and "%" in bind_addr[0]: + addr, scope_id = bind_addr[0].split("%", 1) + bind_addr = (addr, bind_addr[1], 0, int(scope_id)) + + sock.bind(bind_addr) + sock.listen(backlog) + except BaseException: + sock.close() + raise + + return sock + + # We passing type=0 on non-Windows platforms as a workaround for a uvloop bug + # where we don't get the correct scope ID for IPv6 link-local addresses when passing + # type=socket.SOCK_STREAM to getaddrinfo(): + # https://github.com/MagicStack/uvloop/issues/539 + gai_res = await getaddrinfo( + local_host, + local_port, + family=family, + type=socket.SOCK_STREAM if sys.platform == "win32" else 0, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + + # The set comprehension is here to work around a glibc bug: + # https://sourceware.org/bugzilla/show_bug.cgi?id=14969 + sockaddrs = sorted({res for res in gai_res if res[1] == SocketKind.SOCK_STREAM}) + + # Special case for dual-stack binding on the "any" interface + if ( + local_host is None + and family == AddressFamily.AF_UNSPEC + and socket.has_dualstack_ipv6() + and any(fam == AddressFamily.AF_INET6 for fam, *_ in gai_res) + ): + raw_socket = setup_raw_socket( + AddressFamily.AF_INET6, ("::", local_port), v6only=False + ) + listener = asynclib.create_tcp_listener(raw_socket) + return MultiListener([listener]) + + errors: list[OSError] = [] + try: + for _ in range(len(sockaddrs)): + listeners: list[SocketListener] = [] + bound_ephemeral_port = local_port + try: + for fam, *_, sockaddr in sockaddrs: + sockaddr = sockaddr[0], bound_ephemeral_port, *sockaddr[2:] + raw_socket = setup_raw_socket(fam, sockaddr) + + # Store the assigned port if an ephemeral port was requested, so + # we'll bind to the same port on all interfaces + if local_port == 0 and len(gai_res) > 1: + bound_ephemeral_port = raw_socket.getsockname()[1] + + listeners.append(asynclib.create_tcp_listener(raw_socket)) + except BaseException as exc: + for listener in listeners: + await listener.aclose() + + # If an ephemeral port was requested but binding the assigned port + # failed for another interface, rotate the address list and try again + if ( + isinstance(exc, OSError) + and exc.errno == errno.EADDRINUSE + and local_port == 0 + and bound_ephemeral_port + ): + errors.append(exc) + sockaddrs.append(sockaddrs.pop(0)) + continue + + raise + + return MultiListener(listeners) + + raise OSError( + f"Could not create {len(sockaddrs)} listeners with a consistent port" + ) from ExceptionGroup("Several bind attempts failed", errors) + finally: + del errors # Prevent reference cycles + + +async def create_unix_listener( + path: str | bytes | PathLike[Any], + *, + mode: int | None = None, + backlog: int = 65536, +) -> SocketListener: + """ + Create a UNIX socket listener. + + Not available on Windows. + + :param path: path of the socket + :param mode: permissions to set on the socket + :param backlog: maximum number of queued incoming connections (up to a maximum of + 2**16, or 65536) + :return: a listener object + + .. versionchanged:: 3.0 + If a socket already exists on the file system in the given path, it will be + removed first. + + """ + backlog = min(backlog, 65536) + raw_socket = await setup_unix_local_socket(path, mode, socket.SOCK_STREAM) + try: + raw_socket.listen(backlog) + return get_async_backend().create_unix_listener(raw_socket) + except BaseException: + raw_socket.close() + raise + + +async def create_udp_socket( + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, + *, + local_host: IPAddressType | None = None, + local_port: int = 0, + reuse_port: bool = False, +) -> UDPSocket: + """ + Create a UDP socket. + + If ``port`` has been given, the socket will be bound to this port on the local + machine, making this socket suitable for providing UDP based services. + + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically + determined from ``local_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) + :return: a UDP socket + + """ + if family is AddressFamily.AF_UNSPEC and not local_host: + raise ValueError('Either "family" or "local_host" must be given') + + if local_host: + gai_res = await getaddrinfo( + str(local_host), + local_port, + family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + elif family is AddressFamily.AF_INET6: + local_address = ("::", 0) + else: + local_address = ("0.0.0.0", 0) + + sock = await get_async_backend().create_udp_socket( + family, local_address, None, reuse_port + ) + return cast(UDPSocket, sock) + + +async def create_connected_udp_socket( + remote_host: IPAddressType, + remote_port: int, + *, + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, + local_host: IPAddressType | None = None, + local_port: int = 0, + reuse_port: bool = False, +) -> ConnectedUDPSocket: + """ + Create a connected UDP socket. + + Connected UDP sockets can only communicate with the specified remote host/port, an + any packets sent from other sources are dropped. + + :param remote_host: remote host to set as the default target + :param remote_port: port on the remote host to set as the default target + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically + determined from ``local_host`` or ``remote_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) + :return: a connected UDP socket + + """ + local_address = None + if local_host: + gai_res = await getaddrinfo( + str(local_host), + local_port, + family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + + gai_res = await getaddrinfo( + str(remote_host), remote_port, family=family, type=socket.SOCK_DGRAM + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + remote_address = gai_res[0][-1] + + sock = await get_async_backend().create_udp_socket( + family, local_address, remote_address, reuse_port + ) + return cast(ConnectedUDPSocket, sock) + + +async def create_unix_datagram_socket( + *, + local_path: None | str | bytes | PathLike[Any] = None, + local_mode: int | None = None, +) -> UNIXDatagramSocket: + """ + Create a UNIX datagram socket. + + Not available on Windows. + + If ``local_path`` has been given, the socket will be bound to this path, making this + socket suitable for receiving datagrams from other processes. Other processes can + send datagrams to this socket only if ``local_path`` is set. + + If a socket already exists on the file system in the ``local_path``, it will be + removed first. + + :param local_path: the path on which to bind to + :param local_mode: permissions to set on the local socket + :return: a UNIX datagram socket + + """ + raw_socket = await setup_unix_local_socket( + local_path, local_mode, socket.SOCK_DGRAM + ) + return await get_async_backend().create_unix_datagram_socket(raw_socket, None) + + +async def create_connected_unix_datagram_socket( + remote_path: str | bytes | PathLike[Any], + *, + local_path: None | str | bytes | PathLike[Any] = None, + local_mode: int | None = None, +) -> ConnectedUNIXDatagramSocket: + """ + Create a connected UNIX datagram socket. + + Connected datagram sockets can only communicate with the specified remote path. + + If ``local_path`` has been given, the socket will be bound to this path, making + this socket suitable for receiving datagrams from other processes. Other processes + can send datagrams to this socket only if ``local_path`` is set. + + If a socket already exists on the file system in the ``local_path``, it will be + removed first. + + :param remote_path: the path to set as the default target + :param local_path: the path on which to bind to + :param local_mode: permissions to set on the local socket + :return: a connected UNIX datagram socket + + """ + remote_path = os.fspath(remote_path) + raw_socket = await setup_unix_local_socket( + local_path, local_mode, socket.SOCK_DGRAM + ) + return await get_async_backend().create_unix_datagram_socket( + raw_socket, remote_path + ) + + +async def getaddrinfo( + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, +) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int]]]: + """ + Look up a numeric IP address given a host name. + + Internationalized domain names are translated according to the (non-transitional) + IDNA 2008 standard. + + .. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of + (host, port), unlike what :func:`socket.getaddrinfo` does. + + :param host: host name + :param port: port number + :param family: socket family (`'AF_INET``, ...) + :param type: socket type (``SOCK_STREAM``, ...) + :param proto: protocol number + :param flags: flags to pass to upstream ``getaddrinfo()`` + :return: list of tuples containing (family, type, proto, canonname, sockaddr) + + .. seealso:: :func:`socket.getaddrinfo` + + """ + # Handle unicode hostnames + if isinstance(host, str): + try: + encoded_host: bytes | None = host.encode("ascii") + except UnicodeEncodeError: + import idna + + encoded_host = idna.encode(host, uts46=True) + else: + encoded_host = host + + gai_res = await get_async_backend().getaddrinfo( + encoded_host, port, family=family, type=type, proto=proto, flags=flags + ) + return [ + (family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr)) + for family, type, proto, canonname, sockaddr in gai_res + # filter out IPv6 results when IPv6 is disabled + if not isinstance(sockaddr[0], int) + ] + + +def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[tuple[str, str]]: + """ + Look up the host name of an IP address. + + :param sockaddr: socket address (e.g. (ipaddress, port) for IPv4) + :param flags: flags to pass to upstream ``getnameinfo()`` + :return: a tuple of (host name, service name) + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + .. seealso:: :func:`socket.getnameinfo` + + """ + return get_async_backend().getnameinfo(sockaddr, flags) + + +@deprecated("This function is deprecated; use `wait_readable` instead") +def wait_socket_readable(sock: socket.socket) -> Awaitable[None]: + """ + .. deprecated:: 4.7.0 + Use :func:`wait_readable` instead. + + Wait until the given socket has data to be read. + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher + level constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become readable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become readable + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + return get_async_backend().wait_readable(sock.fileno()) + + +@deprecated("This function is deprecated; use `wait_writable` instead") +def wait_socket_writable(sock: socket.socket) -> Awaitable[None]: + """ + .. deprecated:: 4.7.0 + Use :func:`wait_writable` instead. + + Wait until the given socket can be written to. + + This does **NOT** work on Windows when using the asyncio backend with a proactor + event loop (default on py3.8+). + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher + level constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become writable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become writable + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + return get_async_backend().wait_writable(sock.fileno()) + + +def wait_readable(obj: FileDescriptorLike) -> Awaitable[None]: + """ + Wait until the given object has data to be read. + + On Unix systems, ``obj`` must either be an integer file descriptor, or else an + object with a ``.fileno()`` method which returns an integer file descriptor. Any + kind of file descriptor can be passed, though the exact semantics will depend on + your kernel. For example, this probably won't do anything useful for on-disk files. + + On Windows systems, ``obj`` must either be an integer ``SOCKET`` handle, or else an + object with a ``.fileno()`` method which returns an integer ``SOCKET`` handle. File + descriptors aren't supported, and neither are handles that refer to anything besides + a ``SOCKET``. + + On backends where this functionality is not natively provided (asyncio + ``ProactorEventLoop`` on Windows), it is provided using a separate selector thread + which is set to shut down when the interpreter shuts down. + + .. warning:: Don't use this on raw sockets that have been wrapped by any higher + level constructs like socket streams! + + :param obj: an object with a ``.fileno()`` method or an integer handle + :raises ~anyio.ClosedResourceError: if the object was closed while waiting for the + object to become readable + :raises ~anyio.BusyResourceError: if another task is already waiting for the object + to become readable + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + return get_async_backend().wait_readable(obj) + + +def wait_writable(obj: FileDescriptorLike) -> Awaitable[None]: + """ + Wait until the given object can be written to. + + :param obj: an object with a ``.fileno()`` method or an integer handle + :raises ~anyio.ClosedResourceError: if the object was closed while waiting for the + object to become writable + :raises ~anyio.BusyResourceError: if another task is already waiting for the object + to become writable + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + .. seealso:: See the documentation of :func:`wait_readable` for the definition of + ``obj`` and notes on backend compatibility. + + .. warning:: Don't use this on raw sockets that have been wrapped by any higher + level constructs like socket streams! + + """ + return get_async_backend().wait_writable(obj) + + +def notify_closing(obj: FileDescriptorLike) -> None: + """ + Call this before closing a file descriptor (on Unix) or socket (on + Windows). This will cause any `wait_readable` or `wait_writable` + calls on the given object to immediately wake up and raise + `~anyio.ClosedResourceError`. + + This doesn't actually close the object – you still have to do that + yourself afterwards. Also, you want to be careful to make sure no + new tasks start waiting on the object in between when you call this + and when it's actually closed. So to close something properly, you + usually want to do these steps in order: + + 1. Explicitly mark the object as closed, so that any new attempts + to use it will abort before they start. + 2. Call `notify_closing` to wake up any already-existing users. + 3. Actually close the object. + + It's also possible to do them in a different order if that's more + convenient, *but only if* you make sure not to have any checkpoints in + between the steps. This way they all happen in a single atomic + step, so other tasks won't be able to tell what order they happened + in anyway. + + :param obj: an object with a ``.fileno()`` method or an integer handle + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + get_async_backend().notify_closing(obj) + + +# +# Private API +# + + +def convert_ipv6_sockaddr( + sockaddr: tuple[str, int, int, int] | tuple[str, int], +) -> tuple[str, int]: + """ + Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format. + + If the scope ID is nonzero, it is added to the address, separated with ``%``. + Otherwise the flow id and scope id are simply cut off from the tuple. + Any other kinds of socket addresses are returned as-is. + + :param sockaddr: the result of :meth:`~socket.socket.getsockname` + :return: the converted socket address + + """ + # This is more complicated than it should be because of MyPy + if isinstance(sockaddr, tuple) and len(sockaddr) == 4: + host, port, flowinfo, scope_id = sockaddr + if scope_id: + # PyPy (as of v7.3.11) leaves the interface name in the result, so + # we discard it and only get the scope ID from the end + # (https://foss.heptapod.net/pypy/pypy/-/issues/3938) + host = host.split("%")[0] + + # Add scope_id to the address + return f"{host}%{scope_id}", port + else: + return host, port + else: + return sockaddr + + +async def setup_unix_local_socket( + path: None | str | bytes | PathLike[Any], + mode: int | None, + socktype: int, +) -> socket.socket: + """ + Create a UNIX local socket object, deleting the socket at the given path if it + exists. + + Not available on Windows. + + :param path: path of the socket + :param mode: permissions to set on the socket + :param socktype: socket.SOCK_STREAM or socket.SOCK_DGRAM + + """ + path_str: str | None + if path is not None: + path_str = os.fsdecode(path) + + # Linux abstract namespace sockets aren't backed by a concrete file so skip stat call + if not path_str.startswith("\0"): + # Copied from pathlib... + try: + stat_result = os.stat(path) + except OSError as e: + if e.errno not in ( + errno.ENOENT, + errno.ENOTDIR, + errno.EBADF, + errno.ELOOP, + ): + raise + else: + if stat.S_ISSOCK(stat_result.st_mode): + os.unlink(path) + else: + path_str = None + + raw_socket = socket.socket(socket.AF_UNIX, socktype) + raw_socket.setblocking(False) + + if path_str is not None: + try: + await to_thread.run_sync(raw_socket.bind, path_str, abandon_on_cancel=True) + if mode is not None: + await to_thread.run_sync(chmod, path_str, mode, abandon_on_cancel=True) + except BaseException: + raw_socket.close() + raise + + return raw_socket + + +@dataclass +class TCPConnectable(ByteStreamConnectable): + """ + Connects to a TCP server at the given host and port. + + :param host: host name or IP address of the server + :param port: TCP port number of the server + """ + + host: str | IPv4Address | IPv6Address + port: int + + def __post_init__(self) -> None: + if self.port < 1 or self.port > 65535: + raise ValueError("TCP port number out of range") + + @override + async def connect(self) -> SocketStream: + try: + return await connect_tcp(self.host, self.port) + except OSError as exc: + raise ConnectionFailed( + f"error connecting to {self.host}:{self.port}: {exc}" + ) from exc + + +@dataclass +class UNIXConnectable(ByteStreamConnectable): + """ + Connects to a UNIX domain socket at the given path. + + :param path: the file system path of the socket + """ + + path: str | bytes | PathLike[str] | PathLike[bytes] + + @override + async def connect(self) -> UNIXSocketStream: + try: + return await connect_unix(self.path) + except OSError as exc: + raise ConnectionFailed(f"error connecting to {self.path!r}: {exc}") from exc + + +def as_connectable( + remote: ByteStreamConnectable + | tuple[str | IPv4Address | IPv6Address, int] + | str + | bytes + | PathLike[str], + /, + *, + tls: bool = False, + ssl_context: ssl.SSLContext | None = None, + tls_hostname: str | None = None, + tls_standard_compatible: bool = True, +) -> ByteStreamConnectable: + """ + Return a byte stream connectable from the given object. + + If a bytestream connectable is given, it is returned unchanged. + If a tuple of (host, port) is given, a TCP connectable is returned. + If a string or bytes path is given, a UNIX connectable is returned. + + If ``tls=True``, the connectable will be wrapped in a + :class:`~.streams.tls.TLSConnectable`. + + :param remote: a connectable, a tuple of (host, port) or a path to a UNIX socket + :param tls: if ``True``, wrap the plaintext connectable in a + :class:`~.streams.tls.TLSConnectable`, using the provided TLS settings) + :param ssl_context: if ``tls=True``, the SSLContext object to use (if not provided, + a secure default will be created) + :param tls_hostname: if ``tls=True``, host name of the server to use for checking + the server certificate (defaults to the host portion of the address for TCP + connectables) + :param tls_standard_compatible: if ``False`` and ``tls=True``, makes the TLS stream + skip the closing handshake when closing the connection, so it won't raise an + exception if the server does the same + + """ + connectable: TCPConnectable | UNIXConnectable | TLSConnectable + if isinstance(remote, ByteStreamConnectable): + return remote + elif isinstance(remote, tuple) and len(remote) == 2: + connectable = TCPConnectable(*remote) + elif isinstance(remote, (str, bytes, PathLike)): + connectable = UNIXConnectable(remote) + else: + raise TypeError(f"cannot convert {remote!r} to a connectable") + + if tls: + if not tls_hostname and isinstance(connectable, TCPConnectable): + tls_hostname = str(connectable.host) + + connectable = TLSConnectable( + connectable, + ssl_context=ssl_context, + hostname=tls_hostname, + standard_compatible=tls_standard_compatible, + ) + + return connectable diff --git a/.venv/lib/python3.12/site-packages/anyio/_core/_streams.py b/.venv/lib/python3.12/site-packages/anyio/_core/_streams.py new file mode 100644 index 0000000..2b9c7df --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/_core/_streams.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +import math +from typing import TypeVar +from warnings import warn + +from ..streams.memory import ( + MemoryObjectReceiveStream, + MemoryObjectSendStream, + _MemoryObjectStreamState, +) + +T_Item = TypeVar("T_Item") + + +class create_memory_object_stream( + tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]], +): + """ + Create a memory object stream. + + The stream's item type can be annotated like + :func:`create_memory_object_stream[T_Item]`. + + :param max_buffer_size: number of items held in the buffer until ``send()`` starts + blocking + :param item_type: old way of marking the streams with the right generic type for + static typing (does nothing on AnyIO 4) + + .. deprecated:: 4.0 + Use ``create_memory_object_stream[YourItemType](...)`` instead. + :return: a tuple of (send stream, receive stream) + + """ + + def __new__( # type: ignore[misc] + cls, max_buffer_size: float = 0, item_type: object = None + ) -> tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]: + if max_buffer_size != math.inf and not isinstance(max_buffer_size, int): + raise ValueError("max_buffer_size must be either an integer or math.inf") + if max_buffer_size < 0: + raise ValueError("max_buffer_size cannot be negative") + if item_type is not None: + warn( + "The item_type argument has been deprecated in AnyIO 4.0. " + "Use create_memory_object_stream[YourItemType](...) instead.", + DeprecationWarning, + stacklevel=2, + ) + + state = _MemoryObjectStreamState[T_Item](max_buffer_size) + return (MemoryObjectSendStream(state), MemoryObjectReceiveStream(state)) diff --git a/.venv/lib/python3.12/site-packages/anyio/_core/_subprocesses.py b/.venv/lib/python3.12/site-packages/anyio/_core/_subprocesses.py new file mode 100644 index 0000000..36d9b30 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/_core/_subprocesses.py @@ -0,0 +1,202 @@ +from __future__ import annotations + +import sys +from collections.abc import AsyncIterable, Iterable, Mapping, Sequence +from io import BytesIO +from os import PathLike +from subprocess import PIPE, CalledProcessError, CompletedProcess +from typing import IO, Any, Union, cast + +from ..abc import Process +from ._eventloop import get_async_backend +from ._tasks import create_task_group + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + +StrOrBytesPath: TypeAlias = Union[str, bytes, "PathLike[str]", "PathLike[bytes]"] + + +async def run_process( + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + input: bytes | None = None, + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = PIPE, + stderr: int | IO[Any] | None = PIPE, + check: bool = True, + cwd: StrOrBytesPath | None = None, + env: Mapping[str, str] | None = None, + startupinfo: Any = None, + creationflags: int = 0, + start_new_session: bool = False, + pass_fds: Sequence[int] = (), + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, +) -> CompletedProcess[bytes]: + """ + Run an external command in a subprocess and wait until it completes. + + .. seealso:: :func:`subprocess.run` + + :param command: either a string to pass to the shell, or an iterable of strings + containing the executable name or path and its arguments + :param input: bytes passed to the standard input of the subprocess + :param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + a file-like object, or `None`; ``input`` overrides this + :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + a file-like object, or `None` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + :data:`subprocess.STDOUT`, a file-like object, or `None` + :param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the + process terminates with a return code other than 0 + :param cwd: If not ``None``, change the working directory to this before running the + command + :param env: if not ``None``, this mapping replaces the inherited environment + variables from the parent process + :param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used + to specify process startup parameters (Windows only) + :param creationflags: flags that can be used to control the creation of the + subprocess (see :class:`subprocess.Popen` for the specifics) + :param start_new_session: if ``true`` the setsid() system call will be made in the + child process prior to the execution of the subprocess. (POSIX only) + :param pass_fds: sequence of file descriptors to keep open between the parent and + child processes. (POSIX only) + :param user: effective user to run the process as (Python >= 3.9, POSIX only) + :param group: effective group to run the process as (Python >= 3.9, POSIX only) + :param extra_groups: supplementary groups to set in the subprocess (Python >= 3.9, + POSIX only) + :param umask: if not negative, this umask is applied in the child process before + running the given command (Python >= 3.9, POSIX only) + :return: an object representing the completed process + :raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process + exits with a nonzero return code + + """ + + async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None: + buffer = BytesIO() + async for chunk in stream: + buffer.write(chunk) + + stream_contents[index] = buffer.getvalue() + + if stdin is not None and input is not None: + raise ValueError("only one of stdin and input is allowed") + + async with await open_process( + command, + stdin=PIPE if input else stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + startupinfo=startupinfo, + creationflags=creationflags, + start_new_session=start_new_session, + pass_fds=pass_fds, + user=user, + group=group, + extra_groups=extra_groups, + umask=umask, + ) as process: + stream_contents: list[bytes | None] = [None, None] + async with create_task_group() as tg: + if process.stdout: + tg.start_soon(drain_stream, process.stdout, 0) + + if process.stderr: + tg.start_soon(drain_stream, process.stderr, 1) + + if process.stdin and input: + await process.stdin.send(input) + await process.stdin.aclose() + + await process.wait() + + output, errors = stream_contents + if check and process.returncode != 0: + raise CalledProcessError(cast(int, process.returncode), command, output, errors) + + return CompletedProcess(command, cast(int, process.returncode), output, errors) + + +async def open_process( + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + stdin: int | IO[Any] | None = PIPE, + stdout: int | IO[Any] | None = PIPE, + stderr: int | IO[Any] | None = PIPE, + cwd: StrOrBytesPath | None = None, + env: Mapping[str, str] | None = None, + startupinfo: Any = None, + creationflags: int = 0, + start_new_session: bool = False, + pass_fds: Sequence[int] = (), + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, +) -> Process: + """ + Start an external command in a subprocess. + + .. seealso:: :class:`subprocess.Popen` + + :param command: either a string to pass to the shell, or an iterable of strings + containing the executable name or path and its arguments + :param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, a + file-like object, or ``None`` + :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + a file-like object, or ``None`` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + :data:`subprocess.STDOUT`, a file-like object, or ``None`` + :param cwd: If not ``None``, the working directory is changed before executing + :param env: If env is not ``None``, it must be a mapping that defines the + environment variables for the new process + :param creationflags: flags that can be used to control the creation of the + subprocess (see :class:`subprocess.Popen` for the specifics) + :param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used + to specify process startup parameters (Windows only) + :param start_new_session: if ``true`` the setsid() system call will be made in the + child process prior to the execution of the subprocess. (POSIX only) + :param pass_fds: sequence of file descriptors to keep open between the parent and + child processes. (POSIX only) + :param user: effective user to run the process as (POSIX only) + :param group: effective group to run the process as (POSIX only) + :param extra_groups: supplementary groups to set in the subprocess (POSIX only) + :param umask: if not negative, this umask is applied in the child process before + running the given command (POSIX only) + :return: an asynchronous process object + + """ + kwargs: dict[str, Any] = {} + if user is not None: + kwargs["user"] = user + + if group is not None: + kwargs["group"] = group + + if extra_groups is not None: + kwargs["extra_groups"] = group + + if umask >= 0: + kwargs["umask"] = umask + + return await get_async_backend().open_process( + command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + startupinfo=startupinfo, + creationflags=creationflags, + start_new_session=start_new_session, + pass_fds=pass_fds, + **kwargs, + ) diff --git a/.venv/lib/python3.12/site-packages/anyio/_core/_synchronization.py b/.venv/lib/python3.12/site-packages/anyio/_core/_synchronization.py new file mode 100644 index 0000000..c0ef27a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/_core/_synchronization.py @@ -0,0 +1,753 @@ +from __future__ import annotations + +import math +from collections import deque +from collections.abc import Callable +from dataclasses import dataclass +from types import TracebackType +from typing import TypeVar + +from ..lowlevel import checkpoint_if_cancelled +from ._eventloop import get_async_backend +from ._exceptions import BusyResourceError, NoEventLoopError +from ._tasks import CancelScope +from ._testing import TaskInfo, get_current_task + +T = TypeVar("T") + + +@dataclass(frozen=True) +class EventStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait` + """ + + tasks_waiting: int + + +@dataclass(frozen=True) +class CapacityLimiterStatistics: + """ + :ivar int borrowed_tokens: number of tokens currently borrowed by tasks + :ivar float total_tokens: total number of available tokens + :ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from + this limiter + :ivar int tasks_waiting: number of tasks waiting on + :meth:`~.CapacityLimiter.acquire` or + :meth:`~.CapacityLimiter.acquire_on_behalf_of` + """ + + borrowed_tokens: int + total_tokens: float + borrowers: tuple[object, ...] + tasks_waiting: int + + +@dataclass(frozen=True) +class LockStatistics: + """ + :ivar bool locked: flag indicating if this lock is locked or not + :ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the + lock is not held by any task) + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire` + """ + + locked: bool + owner: TaskInfo | None + tasks_waiting: int + + +@dataclass(frozen=True) +class ConditionStatistics: + """ + :ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait` + :ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying + :class:`~.Lock` + """ + + tasks_waiting: int + lock_statistics: LockStatistics + + +@dataclass(frozen=True) +class SemaphoreStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire` + + """ + + tasks_waiting: int + + +class Event: + def __new__(cls) -> Event: + try: + return get_async_backend().create_event() + except NoEventLoopError: + return EventAdapter() + + def set(self) -> None: + """Set the flag, notifying all listeners.""" + raise NotImplementedError + + def is_set(self) -> bool: + """Return ``True`` if the flag is set, ``False`` if not.""" + raise NotImplementedError + + async def wait(self) -> None: + """ + Wait until the flag has been set. + + If the flag has already been set when this method is called, it returns + immediately. + + """ + raise NotImplementedError + + def statistics(self) -> EventStatistics: + """Return statistics about the current state of this event.""" + raise NotImplementedError + + +class EventAdapter(Event): + _internal_event: Event | None = None + _is_set: bool = False + + def __new__(cls) -> EventAdapter: + return object.__new__(cls) + + @property + def _event(self) -> Event: + if self._internal_event is None: + self._internal_event = get_async_backend().create_event() + if self._is_set: + self._internal_event.set() + + return self._internal_event + + def set(self) -> None: + if self._internal_event is None: + self._is_set = True + else: + self._event.set() + + def is_set(self) -> bool: + if self._internal_event is None: + return self._is_set + + return self._internal_event.is_set() + + async def wait(self) -> None: + await self._event.wait() + + def statistics(self) -> EventStatistics: + if self._internal_event is None: + return EventStatistics(tasks_waiting=0) + + return self._internal_event.statistics() + + +class Lock: + def __new__(cls, *, fast_acquire: bool = False) -> Lock: + try: + return get_async_backend().create_lock(fast_acquire=fast_acquire) + except NoEventLoopError: + return LockAdapter(fast_acquire=fast_acquire) + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + async def acquire(self) -> None: + """Acquire the lock.""" + raise NotImplementedError + + def acquire_nowait(self) -> None: + """ + Acquire the lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + raise NotImplementedError + + def release(self) -> None: + """Release the lock.""" + raise NotImplementedError + + def locked(self) -> bool: + """Return True if the lock is currently held.""" + raise NotImplementedError + + def statistics(self) -> LockStatistics: + """ + Return statistics about the current state of this lock. + + .. versionadded:: 3.0 + """ + raise NotImplementedError + + +class LockAdapter(Lock): + _internal_lock: Lock | None = None + + def __new__(cls, *, fast_acquire: bool = False) -> LockAdapter: + return object.__new__(cls) + + def __init__(self, *, fast_acquire: bool = False): + self._fast_acquire = fast_acquire + + @property + def _lock(self) -> Lock: + if self._internal_lock is None: + self._internal_lock = get_async_backend().create_lock( + fast_acquire=self._fast_acquire + ) + + return self._internal_lock + + async def __aenter__(self) -> None: + await self._lock.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if self._internal_lock is not None: + self._internal_lock.release() + + async def acquire(self) -> None: + """Acquire the lock.""" + await self._lock.acquire() + + def acquire_nowait(self) -> None: + """ + Acquire the lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + self._lock.acquire_nowait() + + def release(self) -> None: + """Release the lock.""" + self._lock.release() + + def locked(self) -> bool: + """Return True if the lock is currently held.""" + return self._lock.locked() + + def statistics(self) -> LockStatistics: + """ + Return statistics about the current state of this lock. + + .. versionadded:: 3.0 + + """ + if self._internal_lock is None: + return LockStatistics(False, None, 0) + + return self._internal_lock.statistics() + + +class Condition: + _owner_task: TaskInfo | None = None + + def __init__(self, lock: Lock | None = None): + self._lock = lock or Lock() + self._waiters: deque[Event] = deque() + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + def _check_acquired(self) -> None: + if self._owner_task != get_current_task(): + raise RuntimeError("The current task is not holding the underlying lock") + + async def acquire(self) -> None: + """Acquire the underlying lock.""" + await self._lock.acquire() + self._owner_task = get_current_task() + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + self._lock.acquire_nowait() + self._owner_task = get_current_task() + + def release(self) -> None: + """Release the underlying lock.""" + self._lock.release() + + def locked(self) -> bool: + """Return True if the lock is set.""" + return self._lock.locked() + + def notify(self, n: int = 1) -> None: + """Notify exactly n listeners.""" + self._check_acquired() + for _ in range(n): + try: + event = self._waiters.popleft() + except IndexError: + break + + event.set() + + def notify_all(self) -> None: + """Notify all the listeners.""" + self._check_acquired() + for event in self._waiters: + event.set() + + self._waiters.clear() + + async def wait(self) -> None: + """Wait for a notification.""" + await checkpoint_if_cancelled() + self._check_acquired() + event = Event() + self._waiters.append(event) + self.release() + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(event) + + raise + finally: + with CancelScope(shield=True): + await self.acquire() + + async def wait_for(self, predicate: Callable[[], T]) -> T: + """ + Wait until a predicate becomes true. + + :param predicate: a callable that returns a truthy value when the condition is + met + :return: the result of the predicate + + .. versionadded:: 4.11.0 + + """ + while not (result := predicate()): + await self.wait() + + return result + + def statistics(self) -> ConditionStatistics: + """ + Return statistics about the current state of this condition. + + .. versionadded:: 3.0 + """ + return ConditionStatistics(len(self._waiters), self._lock.statistics()) + + +class Semaphore: + def __new__( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> Semaphore: + try: + return get_async_backend().create_semaphore( + initial_value, max_value=max_value, fast_acquire=fast_acquire + ) + except NoEventLoopError: + return SemaphoreAdapter(initial_value, max_value=max_value) + + def __init__( + self, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ): + if not isinstance(initial_value, int): + raise TypeError("initial_value must be an integer") + if initial_value < 0: + raise ValueError("initial_value must be >= 0") + if max_value is not None: + if not isinstance(max_value, int): + raise TypeError("max_value must be an integer or None") + if max_value < initial_value: + raise ValueError( + "max_value must be equal to or higher than initial_value" + ) + + self._fast_acquire = fast_acquire + + async def __aenter__(self) -> Semaphore: + await self.acquire() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + async def acquire(self) -> None: + """Decrement the semaphore value, blocking if necessary.""" + raise NotImplementedError + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + raise NotImplementedError + + def release(self) -> None: + """Increment the semaphore value.""" + raise NotImplementedError + + @property + def value(self) -> int: + """The current value of the semaphore.""" + raise NotImplementedError + + @property + def max_value(self) -> int | None: + """The maximum value of the semaphore.""" + raise NotImplementedError + + def statistics(self) -> SemaphoreStatistics: + """ + Return statistics about the current state of this semaphore. + + .. versionadded:: 3.0 + """ + raise NotImplementedError + + +class SemaphoreAdapter(Semaphore): + _internal_semaphore: Semaphore | None = None + + def __new__( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> SemaphoreAdapter: + return object.__new__(cls) + + def __init__( + self, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> None: + super().__init__(initial_value, max_value=max_value, fast_acquire=fast_acquire) + self._initial_value = initial_value + self._max_value = max_value + + @property + def _semaphore(self) -> Semaphore: + if self._internal_semaphore is None: + self._internal_semaphore = get_async_backend().create_semaphore( + self._initial_value, max_value=self._max_value + ) + + return self._internal_semaphore + + async def acquire(self) -> None: + await self._semaphore.acquire() + + def acquire_nowait(self) -> None: + self._semaphore.acquire_nowait() + + def release(self) -> None: + self._semaphore.release() + + @property + def value(self) -> int: + if self._internal_semaphore is None: + return self._initial_value + + return self._semaphore.value + + @property + def max_value(self) -> int | None: + return self._max_value + + def statistics(self) -> SemaphoreStatistics: + if self._internal_semaphore is None: + return SemaphoreStatistics(tasks_waiting=0) + + return self._semaphore.statistics() + + +class CapacityLimiter: + def __new__(cls, total_tokens: float) -> CapacityLimiter: + try: + return get_async_backend().create_capacity_limiter(total_tokens) + except NoEventLoopError: + return CapacityLimiterAdapter(total_tokens) + + async def __aenter__(self) -> None: + raise NotImplementedError + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + raise NotImplementedError + + @property + def total_tokens(self) -> float: + """ + The total number of tokens available for borrowing. + + This is a read-write property. If the total number of tokens is increased, the + proportionate number of tasks waiting on this limiter will be granted their + tokens. + + .. versionchanged:: 3.0 + The property is now writable. + .. versionchanged:: 4.12 + The value can now be set to 0. + + """ + raise NotImplementedError + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + raise NotImplementedError + + @property + def borrowed_tokens(self) -> int: + """The number of tokens that have currently been borrowed.""" + raise NotImplementedError + + @property + def available_tokens(self) -> float: + """The number of tokens currently available to be borrowed""" + raise NotImplementedError + + def acquire_nowait(self) -> None: + """ + Acquire a token for the current task without waiting for one to become + available. + + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + """ + Acquire a token without waiting for one to become available. + + :param borrower: the entity borrowing a token + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + async def acquire(self) -> None: + """ + Acquire a token for the current task, waiting if necessary for one to become + available. + + """ + raise NotImplementedError + + async def acquire_on_behalf_of(self, borrower: object) -> None: + """ + Acquire a token, waiting if necessary for one to become available. + + :param borrower: the entity borrowing a token + + """ + raise NotImplementedError + + def release(self) -> None: + """ + Release the token held by the current task. + + :raises RuntimeError: if the current task has not borrowed a token from this + limiter. + + """ + raise NotImplementedError + + def release_on_behalf_of(self, borrower: object) -> None: + """ + Release the token held by the given borrower. + + :raises RuntimeError: if the borrower has not borrowed a token from this + limiter. + + """ + raise NotImplementedError + + def statistics(self) -> CapacityLimiterStatistics: + """ + Return statistics about the current state of this limiter. + + .. versionadded:: 3.0 + + """ + raise NotImplementedError + + +class CapacityLimiterAdapter(CapacityLimiter): + _internal_limiter: CapacityLimiter | None = None + + def __new__(cls, total_tokens: float) -> CapacityLimiterAdapter: + return object.__new__(cls) + + def __init__(self, total_tokens: float) -> None: + self.total_tokens = total_tokens + + @property + def _limiter(self) -> CapacityLimiter: + if self._internal_limiter is None: + self._internal_limiter = get_async_backend().create_capacity_limiter( + self._total_tokens + ) + + return self._internal_limiter + + async def __aenter__(self) -> None: + await self._limiter.__aenter__() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + return await self._limiter.__aexit__(exc_type, exc_val, exc_tb) + + @property + def total_tokens(self) -> float: + if self._internal_limiter is None: + return self._total_tokens + + return self._internal_limiter.total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + if not isinstance(value, int) and value is not math.inf: + raise TypeError("total_tokens must be an int or math.inf") + elif value < 1: + raise ValueError("total_tokens must be >= 1") + + if self._internal_limiter is None: + self._total_tokens = value + return + + self._limiter.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + if self._internal_limiter is None: + return 0 + + return self._internal_limiter.borrowed_tokens + + @property + def available_tokens(self) -> float: + if self._internal_limiter is None: + return self._total_tokens + + return self._internal_limiter.available_tokens + + def acquire_nowait(self) -> None: + self._limiter.acquire_nowait() + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + self._limiter.acquire_on_behalf_of_nowait(borrower) + + async def acquire(self) -> None: + await self._limiter.acquire() + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await self._limiter.acquire_on_behalf_of(borrower) + + def release(self) -> None: + self._limiter.release() + + def release_on_behalf_of(self, borrower: object) -> None: + self._limiter.release_on_behalf_of(borrower) + + def statistics(self) -> CapacityLimiterStatistics: + if self._internal_limiter is None: + return CapacityLimiterStatistics( + borrowed_tokens=0, + total_tokens=self.total_tokens, + borrowers=(), + tasks_waiting=0, + ) + + return self._internal_limiter.statistics() + + +class ResourceGuard: + """ + A context manager for ensuring that a resource is only used by a single task at a + time. + + Entering this context manager while the previous has not exited it yet will trigger + :exc:`BusyResourceError`. + + :param action: the action to guard against (visible in the :exc:`BusyResourceError` + when triggered, e.g. "Another task is already {action} this resource") + + .. versionadded:: 4.1 + """ + + __slots__ = "action", "_guarded" + + def __init__(self, action: str = "using"): + self.action: str = action + self._guarded = False + + def __enter__(self) -> None: + if self._guarded: + raise BusyResourceError(self.action) + + self._guarded = True + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self._guarded = False diff --git a/.venv/lib/python3.12/site-packages/anyio/_core/_tasks.py b/.venv/lib/python3.12/site-packages/anyio/_core/_tasks.py new file mode 100644 index 0000000..0688bfe --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/_core/_tasks.py @@ -0,0 +1,173 @@ +from __future__ import annotations + +import math +from collections.abc import Generator +from contextlib import contextmanager +from types import TracebackType + +from ..abc._tasks import TaskGroup, TaskStatus +from ._eventloop import get_async_backend + + +class _IgnoredTaskStatus(TaskStatus[object]): + def started(self, value: object = None) -> None: + pass + + +TASK_STATUS_IGNORED = _IgnoredTaskStatus() + + +class CancelScope: + """ + Wraps a unit of work that can be made separately cancellable. + + :param deadline: The time (clock value) when this scope is cancelled automatically + :param shield: ``True`` to shield the cancel scope from external cancellation + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + """ + + def __new__( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return get_async_backend().create_cancel_scope(shield=shield, deadline=deadline) + + def cancel(self, reason: str | None = None) -> None: + """ + Cancel this scope immediately. + + :param reason: a message describing the reason for the cancellation + + """ + raise NotImplementedError + + @property + def deadline(self) -> float: + """ + The time (clock value) when this scope is cancelled automatically. + + Will be ``float('inf')`` if no timeout has been set. + + """ + raise NotImplementedError + + @deadline.setter + def deadline(self, value: float) -> None: + raise NotImplementedError + + @property + def cancel_called(self) -> bool: + """``True`` if :meth:`cancel` has been called.""" + raise NotImplementedError + + @property + def cancelled_caught(self) -> bool: + """ + ``True`` if this scope suppressed a cancellation exception it itself raised. + + This is typically used to check if any work was interrupted, or to see if the + scope was cancelled due to its deadline being reached. The value will, however, + only be ``True`` if the cancellation was triggered by the scope itself (and not + an outer scope). + + """ + raise NotImplementedError + + @property + def shield(self) -> bool: + """ + ``True`` if this scope is shielded from external cancellation. + + While a scope is shielded, it will not receive cancellations from outside. + + """ + raise NotImplementedError + + @shield.setter + def shield(self, value: bool) -> None: + raise NotImplementedError + + def __enter__(self) -> CancelScope: + raise NotImplementedError + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + raise NotImplementedError + + +@contextmanager +def fail_after( + delay: float | None, shield: bool = False +) -> Generator[CancelScope, None, None]: + """ + Create a context manager which raises a :class:`TimeoutError` if does not finish in + time. + + :param delay: maximum allowed time (in seconds) before raising the exception, or + ``None`` to disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a context manager that yields a cancel scope + :rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.CancelScope`\\] + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + current_time = get_async_backend().current_time + deadline = (current_time() + delay) if delay is not None else math.inf + with get_async_backend().create_cancel_scope( + deadline=deadline, shield=shield + ) as cancel_scope: + yield cancel_scope + + if cancel_scope.cancelled_caught and current_time() >= cancel_scope.deadline: + raise TimeoutError + + +def move_on_after(delay: float | None, shield: bool = False) -> CancelScope: + """ + Create a cancel scope with a deadline that expires after the given delay. + + :param delay: maximum allowed time (in seconds) before exiting the context block, or + ``None`` to disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a cancel scope + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + deadline = ( + (get_async_backend().current_time() + delay) if delay is not None else math.inf + ) + return get_async_backend().create_cancel_scope(deadline=deadline, shield=shield) + + +def current_effective_deadline() -> float: + """ + Return the nearest deadline among all the cancel scopes effective for the current + task. + + :return: a clock value from the event loop's internal clock (or ``float('inf')`` if + there is no deadline in effect, or ``float('-inf')`` if the current scope has + been cancelled) + :rtype: float + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + return get_async_backend().current_effective_deadline() + + +def create_task_group() -> TaskGroup: + """ + Create a task group. + + :return: a task group + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + return get_async_backend().create_task_group() diff --git a/.venv/lib/python3.12/site-packages/anyio/_core/_tempfile.py b/.venv/lib/python3.12/site-packages/anyio/_core/_tempfile.py new file mode 100644 index 0000000..fbb6b14 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/_core/_tempfile.py @@ -0,0 +1,616 @@ +from __future__ import annotations + +import os +import sys +import tempfile +from collections.abc import Iterable +from io import BytesIO, TextIOWrapper +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + AnyStr, + Generic, + overload, +) + +from .. import to_thread +from .._core._fileio import AsyncFile +from ..lowlevel import checkpoint_if_cancelled + +if TYPE_CHECKING: + from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer + + +class TemporaryFile(Generic[AnyStr]): + """ + An asynchronous temporary file that is automatically created and cleaned up. + + This class provides an asynchronous context manager interface to a temporary file. + The file is created using Python's standard `tempfile.TemporaryFile` function in a + background thread, and is wrapped as an asynchronous file using `AsyncFile`. + + :param mode: The mode in which the file is opened. Defaults to "w+b". + :param buffering: The buffering policy (-1 means the default buffering). + :param encoding: The encoding used to decode or encode the file. Only applicable in + text mode. + :param newline: Controls how universal newlines mode works (only applicable in text + mode). + :param suffix: The suffix for the temporary file name. + :param prefix: The prefix for the temporary file name. + :param dir: The directory in which the temporary file is created. + :param errors: The error handling scheme used for encoding/decoding errors. + """ + + _async_file: AsyncFile[AnyStr] + + @overload + def __init__( + self: TemporaryFile[bytes], + mode: OpenBinaryMode = ..., + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + *, + errors: str | None = ..., + ): ... + @overload + def __init__( + self: TemporaryFile[str], + mode: OpenTextMode, + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + *, + errors: str | None = ..., + ): ... + + def __init__( + self, + mode: OpenTextMode | OpenBinaryMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + *, + errors: str | None = None, + ) -> None: + self.mode = mode + self.buffering = buffering + self.encoding = encoding + self.newline = newline + self.suffix: str | None = suffix + self.prefix: str | None = prefix + self.dir: str | None = dir + self.errors = errors + + async def __aenter__(self) -> AsyncFile[AnyStr]: + fp = await to_thread.run_sync( + lambda: tempfile.TemporaryFile( + self.mode, + self.buffering, + self.encoding, + self.newline, + self.suffix, + self.prefix, + self.dir, + errors=self.errors, + ) + ) + self._async_file = AsyncFile(fp) + return self._async_file + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + await self._async_file.aclose() + + +class NamedTemporaryFile(Generic[AnyStr]): + """ + An asynchronous named temporary file that is automatically created and cleaned up. + + This class provides an asynchronous context manager for a temporary file with a + visible name in the file system. It uses Python's standard + :func:`~tempfile.NamedTemporaryFile` function and wraps the file object with + :class:`AsyncFile` for asynchronous operations. + + :param mode: The mode in which the file is opened. Defaults to "w+b". + :param buffering: The buffering policy (-1 means the default buffering). + :param encoding: The encoding used to decode or encode the file. Only applicable in + text mode. + :param newline: Controls how universal newlines mode works (only applicable in text + mode). + :param suffix: The suffix for the temporary file name. + :param prefix: The prefix for the temporary file name. + :param dir: The directory in which the temporary file is created. + :param delete: Whether to delete the file when it is closed. + :param errors: The error handling scheme used for encoding/decoding errors. + :param delete_on_close: (Python 3.12+) Whether to delete the file on close. + """ + + _async_file: AsyncFile[AnyStr] + + @overload + def __init__( + self: NamedTemporaryFile[bytes], + mode: OpenBinaryMode = ..., + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + delete: bool = ..., + *, + errors: str | None = ..., + delete_on_close: bool = ..., + ): ... + @overload + def __init__( + self: NamedTemporaryFile[str], + mode: OpenTextMode, + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + delete: bool = ..., + *, + errors: str | None = ..., + delete_on_close: bool = ..., + ): ... + + def __init__( + self, + mode: OpenBinaryMode | OpenTextMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + delete: bool = True, + *, + errors: str | None = None, + delete_on_close: bool = True, + ) -> None: + self._params: dict[str, Any] = { + "mode": mode, + "buffering": buffering, + "encoding": encoding, + "newline": newline, + "suffix": suffix, + "prefix": prefix, + "dir": dir, + "delete": delete, + "errors": errors, + } + if sys.version_info >= (3, 12): + self._params["delete_on_close"] = delete_on_close + + async def __aenter__(self) -> AsyncFile[AnyStr]: + fp = await to_thread.run_sync( + lambda: tempfile.NamedTemporaryFile(**self._params) + ) + self._async_file = AsyncFile(fp) + return self._async_file + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + await self._async_file.aclose() + + +class SpooledTemporaryFile(AsyncFile[AnyStr]): + """ + An asynchronous spooled temporary file that starts in memory and is spooled to disk. + + This class provides an asynchronous interface to a spooled temporary file, much like + Python's standard :class:`~tempfile.SpooledTemporaryFile`. It supports asynchronous + write operations and provides a method to force a rollover to disk. + + :param max_size: Maximum size in bytes before the file is rolled over to disk. + :param mode: The mode in which the file is opened. Defaults to "w+b". + :param buffering: The buffering policy (-1 means the default buffering). + :param encoding: The encoding used to decode or encode the file (text mode only). + :param newline: Controls how universal newlines mode works (text mode only). + :param suffix: The suffix for the temporary file name. + :param prefix: The prefix for the temporary file name. + :param dir: The directory in which the temporary file is created. + :param errors: The error handling scheme used for encoding/decoding errors. + """ + + _rolled: bool = False + + @overload + def __init__( + self: SpooledTemporaryFile[bytes], + max_size: int = ..., + mode: OpenBinaryMode = ..., + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + *, + errors: str | None = ..., + ): ... + @overload + def __init__( + self: SpooledTemporaryFile[str], + max_size: int = ..., + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + *, + errors: str | None = ..., + ): ... + + def __init__( + self, + max_size: int = 0, + mode: OpenBinaryMode | OpenTextMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + *, + errors: str | None = None, + ) -> None: + self._tempfile_params: dict[str, Any] = { + "mode": mode, + "buffering": buffering, + "encoding": encoding, + "newline": newline, + "suffix": suffix, + "prefix": prefix, + "dir": dir, + "errors": errors, + } + self._max_size = max_size + if "b" in mode: + super().__init__(BytesIO()) # type: ignore[arg-type] + else: + super().__init__( + TextIOWrapper( # type: ignore[arg-type] + BytesIO(), + encoding=encoding, + errors=errors, + newline=newline, + write_through=True, + ) + ) + + async def aclose(self) -> None: + if not self._rolled: + self._fp.close() + return + + await super().aclose() + + async def _check(self) -> None: + if self._rolled or self._fp.tell() <= self._max_size: + return + + await self.rollover() + + async def rollover(self) -> None: + if self._rolled: + return + + self._rolled = True + buffer = self._fp + buffer.seek(0) + self._fp = await to_thread.run_sync( + lambda: tempfile.TemporaryFile(**self._tempfile_params) + ) + await self.write(buffer.read()) + buffer.close() + + @property + def closed(self) -> bool: + return self._fp.closed + + async def read(self, size: int = -1) -> AnyStr: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.read(size) + + return await super().read(size) # type: ignore[return-value] + + async def read1(self: SpooledTemporaryFile[bytes], size: int = -1) -> bytes: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.read1(size) + + return await super().read1(size) + + async def readline(self) -> AnyStr: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.readline() + + return await super().readline() # type: ignore[return-value] + + async def readlines(self) -> list[AnyStr]: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.readlines() + + return await super().readlines() # type: ignore[return-value] + + async def readinto(self: SpooledTemporaryFile[bytes], b: WriteableBuffer) -> int: + if not self._rolled: + await checkpoint_if_cancelled() + self._fp.readinto(b) + + return await super().readinto(b) + + async def readinto1(self: SpooledTemporaryFile[bytes], b: WriteableBuffer) -> int: + if not self._rolled: + await checkpoint_if_cancelled() + self._fp.readinto(b) + + return await super().readinto1(b) + + async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.seek(offset, whence) + + return await super().seek(offset, whence) + + async def tell(self) -> int: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.tell() + + return await super().tell() + + async def truncate(self, size: int | None = None) -> int: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.truncate(size) + + return await super().truncate(size) + + @overload + async def write(self: SpooledTemporaryFile[bytes], b: ReadableBuffer) -> int: ... + @overload + async def write(self: SpooledTemporaryFile[str], b: str) -> int: ... + + async def write(self, b: ReadableBuffer | str) -> int: + """ + Asynchronously write data to the spooled temporary file. + + If the file has not yet been rolled over, the data is written synchronously, + and a rollover is triggered if the size exceeds the maximum size. + + :param s: The data to write. + :return: The number of bytes written. + :raises RuntimeError: If the underlying file is not initialized. + + """ + if not self._rolled: + await checkpoint_if_cancelled() + result = self._fp.write(b) + await self._check() + return result + + return await super().write(b) # type: ignore[misc] + + @overload + async def writelines( + self: SpooledTemporaryFile[bytes], lines: Iterable[ReadableBuffer] + ) -> None: ... + @overload + async def writelines( + self: SpooledTemporaryFile[str], lines: Iterable[str] + ) -> None: ... + + async def writelines(self, lines: Iterable[str] | Iterable[ReadableBuffer]) -> None: + """ + Asynchronously write a list of lines to the spooled temporary file. + + If the file has not yet been rolled over, the lines are written synchronously, + and a rollover is triggered if the size exceeds the maximum size. + + :param lines: An iterable of lines to write. + :raises RuntimeError: If the underlying file is not initialized. + + """ + if not self._rolled: + await checkpoint_if_cancelled() + result = self._fp.writelines(lines) + await self._check() + return result + + return await super().writelines(lines) # type: ignore[misc] + + +class TemporaryDirectory(Generic[AnyStr]): + """ + An asynchronous temporary directory that is created and cleaned up automatically. + + This class provides an asynchronous context manager for creating a temporary + directory. It wraps Python's standard :class:`~tempfile.TemporaryDirectory` to + perform directory creation and cleanup operations in a background thread. + + :param suffix: Suffix to be added to the temporary directory name. + :param prefix: Prefix to be added to the temporary directory name. + :param dir: The parent directory where the temporary directory is created. + :param ignore_cleanup_errors: Whether to ignore errors during cleanup + (Python 3.10+). + :param delete: Whether to delete the directory upon closing (Python 3.12+). + """ + + def __init__( + self, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: AnyStr | None = None, + *, + ignore_cleanup_errors: bool = False, + delete: bool = True, + ) -> None: + self.suffix: AnyStr | None = suffix + self.prefix: AnyStr | None = prefix + self.dir: AnyStr | None = dir + self.ignore_cleanup_errors = ignore_cleanup_errors + self.delete = delete + + self._tempdir: tempfile.TemporaryDirectory | None = None + + async def __aenter__(self) -> str: + params: dict[str, Any] = { + "suffix": self.suffix, + "prefix": self.prefix, + "dir": self.dir, + } + if sys.version_info >= (3, 10): + params["ignore_cleanup_errors"] = self.ignore_cleanup_errors + + if sys.version_info >= (3, 12): + params["delete"] = self.delete + + self._tempdir = await to_thread.run_sync( + lambda: tempfile.TemporaryDirectory(**params) + ) + return await to_thread.run_sync(self._tempdir.__enter__) + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + if self._tempdir is not None: + await to_thread.run_sync( + self._tempdir.__exit__, exc_type, exc_value, traceback + ) + + async def cleanup(self) -> None: + if self._tempdir is not None: + await to_thread.run_sync(self._tempdir.cleanup) + + +@overload +async def mkstemp( + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + text: bool = False, +) -> tuple[int, str]: ... + + +@overload +async def mkstemp( + suffix: bytes | None = None, + prefix: bytes | None = None, + dir: bytes | None = None, + text: bool = False, +) -> tuple[int, bytes]: ... + + +async def mkstemp( + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: AnyStr | None = None, + text: bool = False, +) -> tuple[int, str | bytes]: + """ + Asynchronously create a temporary file and return an OS-level handle and the file + name. + + This function wraps `tempfile.mkstemp` and executes it in a background thread. + + :param suffix: Suffix to be added to the file name. + :param prefix: Prefix to be added to the file name. + :param dir: Directory in which the temporary file is created. + :param text: Whether the file is opened in text mode. + :return: A tuple containing the file descriptor and the file name. + + """ + return await to_thread.run_sync(tempfile.mkstemp, suffix, prefix, dir, text) + + +@overload +async def mkdtemp( + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, +) -> str: ... + + +@overload +async def mkdtemp( + suffix: bytes | None = None, + prefix: bytes | None = None, + dir: bytes | None = None, +) -> bytes: ... + + +async def mkdtemp( + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: AnyStr | None = None, +) -> str | bytes: + """ + Asynchronously create a temporary directory and return its path. + + This function wraps `tempfile.mkdtemp` and executes it in a background thread. + + :param suffix: Suffix to be added to the directory name. + :param prefix: Prefix to be added to the directory name. + :param dir: Parent directory where the temporary directory is created. + :return: The path of the created temporary directory. + + """ + return await to_thread.run_sync(tempfile.mkdtemp, suffix, prefix, dir) + + +async def gettempdir() -> str: + """ + Asynchronously return the name of the directory used for temporary files. + + This function wraps `tempfile.gettempdir` and executes it in a background thread. + + :return: The path of the temporary directory as a string. + + """ + return await to_thread.run_sync(tempfile.gettempdir) + + +async def gettempdirb() -> bytes: + """ + Asynchronously return the name of the directory used for temporary files in bytes. + + This function wraps `tempfile.gettempdirb` and executes it in a background thread. + + :return: The path of the temporary directory as bytes. + + """ + return await to_thread.run_sync(tempfile.gettempdirb) diff --git a/.venv/lib/python3.12/site-packages/anyio/_core/_testing.py b/.venv/lib/python3.12/site-packages/anyio/_core/_testing.py new file mode 100644 index 0000000..369e65c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/_core/_testing.py @@ -0,0 +1,82 @@ +from __future__ import annotations + +from collections.abc import Awaitable, Generator +from typing import Any, cast + +from ._eventloop import get_async_backend + + +class TaskInfo: + """ + Represents an asynchronous task. + + :ivar int id: the unique identifier of the task + :ivar parent_id: the identifier of the parent task, if any + :vartype parent_id: Optional[int] + :ivar str name: the description of the task (if any) + :ivar ~collections.abc.Coroutine coro: the coroutine object of the task + """ + + __slots__ = "_name", "id", "parent_id", "name", "coro" + + def __init__( + self, + id: int, + parent_id: int | None, + name: str | None, + coro: Generator[Any, Any, Any] | Awaitable[Any], + ): + func = get_current_task + self._name = f"{func.__module__}.{func.__qualname__}" + self.id: int = id + self.parent_id: int | None = parent_id + self.name: str | None = name + self.coro: Generator[Any, Any, Any] | Awaitable[Any] = coro + + def __eq__(self, other: object) -> bool: + if isinstance(other, TaskInfo): + return self.id == other.id + + return NotImplemented + + def __hash__(self) -> int: + return hash(self.id) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}(id={self.id!r}, name={self.name!r})" + + def has_pending_cancellation(self) -> bool: + """ + Return ``True`` if the task has a cancellation pending, ``False`` otherwise. + + """ + return False + + +def get_current_task() -> TaskInfo: + """ + Return the current task. + + :return: a representation of the current task + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + return get_async_backend().get_current_task() + + +def get_running_tasks() -> list[TaskInfo]: + """ + Return a list of running tasks in the current event loop. + + :return: a list of task info objects + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + return cast("list[TaskInfo]", get_async_backend().get_running_tasks()) + + +async def wait_all_tasks_blocked() -> None: + """Wait until all other tasks are waiting for something.""" + await get_async_backend().wait_all_tasks_blocked() diff --git a/.venv/lib/python3.12/site-packages/anyio/_core/_typedattr.py b/.venv/lib/python3.12/site-packages/anyio/_core/_typedattr.py new file mode 100644 index 0000000..f358a44 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/_core/_typedattr.py @@ -0,0 +1,81 @@ +from __future__ import annotations + +from collections.abc import Callable, Mapping +from typing import Any, TypeVar, final, overload + +from ._exceptions import TypedAttributeLookupError + +T_Attr = TypeVar("T_Attr") +T_Default = TypeVar("T_Default") +undefined = object() + + +def typed_attribute() -> Any: + """Return a unique object, used to mark typed attributes.""" + return object() + + +class TypedAttributeSet: + """ + Superclass for typed attribute collections. + + Checks that every public attribute of every subclass has a type annotation. + """ + + def __init_subclass__(cls) -> None: + annotations: dict[str, Any] = getattr(cls, "__annotations__", {}) + for attrname in dir(cls): + if not attrname.startswith("_") and attrname not in annotations: + raise TypeError( + f"Attribute {attrname!r} is missing its type annotation" + ) + + super().__init_subclass__() + + +class TypedAttributeProvider: + """Base class for classes that wish to provide typed extra attributes.""" + + @property + def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]: + """ + A mapping of the extra attributes to callables that return the corresponding + values. + + If the provider wraps another provider, the attributes from that wrapper should + also be included in the returned mapping (but the wrapper may override the + callables from the wrapped instance). + + """ + return {} + + @overload + def extra(self, attribute: T_Attr) -> T_Attr: ... + + @overload + def extra(self, attribute: T_Attr, default: T_Default) -> T_Attr | T_Default: ... + + @final + def extra(self, attribute: Any, default: object = undefined) -> object: + """ + extra(attribute, default=undefined) + + Return the value of the given typed extra attribute. + + :param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to + look for + :param default: the value that should be returned if no value is found for the + attribute + :raises ~anyio.TypedAttributeLookupError: if the search failed and no default + value was given + + """ + try: + getter = self.extra_attributes[attribute] + except KeyError: + if default is undefined: + raise TypedAttributeLookupError("Attribute not found") from None + else: + return default + + return getter() diff --git a/.venv/lib/python3.12/site-packages/anyio/abc/__init__.py b/.venv/lib/python3.12/site-packages/anyio/abc/__init__.py new file mode 100644 index 0000000..d560ce3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/abc/__init__.py @@ -0,0 +1,58 @@ +from __future__ import annotations + +from ._eventloop import AsyncBackend as AsyncBackend +from ._resources import AsyncResource as AsyncResource +from ._sockets import ConnectedUDPSocket as ConnectedUDPSocket +from ._sockets import ConnectedUNIXDatagramSocket as ConnectedUNIXDatagramSocket +from ._sockets import IPAddressType as IPAddressType +from ._sockets import IPSockAddrType as IPSockAddrType +from ._sockets import SocketAttribute as SocketAttribute +from ._sockets import SocketListener as SocketListener +from ._sockets import SocketStream as SocketStream +from ._sockets import UDPPacketType as UDPPacketType +from ._sockets import UDPSocket as UDPSocket +from ._sockets import UNIXDatagramPacketType as UNIXDatagramPacketType +from ._sockets import UNIXDatagramSocket as UNIXDatagramSocket +from ._sockets import UNIXSocketStream as UNIXSocketStream +from ._streams import AnyByteReceiveStream as AnyByteReceiveStream +from ._streams import AnyByteSendStream as AnyByteSendStream +from ._streams import AnyByteStream as AnyByteStream +from ._streams import AnyByteStreamConnectable as AnyByteStreamConnectable +from ._streams import AnyUnreliableByteReceiveStream as AnyUnreliableByteReceiveStream +from ._streams import AnyUnreliableByteSendStream as AnyUnreliableByteSendStream +from ._streams import AnyUnreliableByteStream as AnyUnreliableByteStream +from ._streams import ByteReceiveStream as ByteReceiveStream +from ._streams import ByteSendStream as ByteSendStream +from ._streams import ByteStream as ByteStream +from ._streams import ByteStreamConnectable as ByteStreamConnectable +from ._streams import Listener as Listener +from ._streams import ObjectReceiveStream as ObjectReceiveStream +from ._streams import ObjectSendStream as ObjectSendStream +from ._streams import ObjectStream as ObjectStream +from ._streams import ObjectStreamConnectable as ObjectStreamConnectable +from ._streams import UnreliableObjectReceiveStream as UnreliableObjectReceiveStream +from ._streams import UnreliableObjectSendStream as UnreliableObjectSendStream +from ._streams import UnreliableObjectStream as UnreliableObjectStream +from ._subprocesses import Process as Process +from ._tasks import TaskGroup as TaskGroup +from ._tasks import TaskStatus as TaskStatus +from ._testing import TestRunner as TestRunner + +# Re-exported here, for backwards compatibility +# isort: off +from .._core._synchronization import ( + CapacityLimiter as CapacityLimiter, + Condition as Condition, + Event as Event, + Lock as Lock, + Semaphore as Semaphore, +) +from .._core._tasks import CancelScope as CancelScope +from ..from_thread import BlockingPortal as BlockingPortal + +# Re-export imports so they look like they live directly in this package +for __value in list(locals().values()): + if getattr(__value, "__module__", "").startswith("anyio.abc."): + __value.__module__ = __name__ + +del __value diff --git a/.venv/lib/python3.12/site-packages/anyio/abc/_eventloop.py b/.venv/lib/python3.12/site-packages/anyio/abc/_eventloop.py new file mode 100644 index 0000000..b1bd085 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/abc/_eventloop.py @@ -0,0 +1,414 @@ +from __future__ import annotations + +import math +import sys +from abc import ABCMeta, abstractmethod +from collections.abc import AsyncIterator, Awaitable, Callable, Sequence +from contextlib import AbstractContextManager +from os import PathLike +from signal import Signals +from socket import AddressFamily, SocketKind, socket +from typing import ( + IO, + TYPE_CHECKING, + Any, + TypeVar, + Union, + overload, +) + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + +if TYPE_CHECKING: + from _typeshed import FileDescriptorLike + + from .._core._synchronization import CapacityLimiter, Event, Lock, Semaphore + from .._core._tasks import CancelScope + from .._core._testing import TaskInfo + from ._sockets import ( + ConnectedUDPSocket, + ConnectedUNIXDatagramSocket, + IPSockAddrType, + SocketListener, + SocketStream, + UDPSocket, + UNIXDatagramSocket, + UNIXSocketStream, + ) + from ._subprocesses import Process + from ._tasks import TaskGroup + from ._testing import TestRunner + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") +StrOrBytesPath: TypeAlias = Union[str, bytes, "PathLike[str]", "PathLike[bytes]"] + + +class AsyncBackend(metaclass=ABCMeta): + @classmethod + @abstractmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + """ + Run the given coroutine function in an asynchronous event loop. + + The current thread must not be already running an event loop. + + :param func: a coroutine function + :param args: positional arguments to ``func`` + :param kwargs: positional arguments to ``func`` + :param options: keyword arguments to call the backend ``run()`` implementation + with + :return: the return value of the coroutine function + """ + + @classmethod + @abstractmethod + def current_token(cls) -> object: + """ + Return an object that allows other threads to run code inside the event loop. + + :return: a token object, specific to the event loop running in the current + thread + """ + + @classmethod + @abstractmethod + def current_time(cls) -> float: + """ + Return the current value of the event loop's internal clock. + + :return: the clock value (seconds) + """ + + @classmethod + @abstractmethod + def cancelled_exception_class(cls) -> type[BaseException]: + """Return the exception class that is raised in a task if it's cancelled.""" + + @classmethod + @abstractmethod + async def checkpoint(cls) -> None: + """ + Check if the task has been cancelled, and allow rescheduling of other tasks. + + This is effectively the same as running :meth:`checkpoint_if_cancelled` and then + :meth:`cancel_shielded_checkpoint`. + """ + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + """ + Check if the current task group has been cancelled. + + This will check if the task has been cancelled, but will not allow other tasks + to be scheduled if not. + + """ + if cls.current_effective_deadline() == -math.inf: + await cls.checkpoint() + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + """ + Allow the rescheduling of other tasks. + + This will give other tasks the opportunity to run, but without checking if the + current task group has been cancelled, unlike with :meth:`checkpoint`. + + """ + with cls.create_cancel_scope(shield=True): + await cls.sleep(0) + + @classmethod + @abstractmethod + async def sleep(cls, delay: float) -> None: + """ + Pause the current task for the specified duration. + + :param delay: the duration, in seconds + """ + + @classmethod + @abstractmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + pass + + @classmethod + @abstractmethod + def current_effective_deadline(cls) -> float: + """ + Return the nearest deadline among all the cancel scopes effective for the + current task. + + :return: + - a clock value from the event loop's internal clock + - ``inf`` if there is no deadline in effect + - ``-inf`` if the current scope has been cancelled + :rtype: float + """ + + @classmethod + @abstractmethod + def create_task_group(cls) -> TaskGroup: + pass + + @classmethod + @abstractmethod + def create_event(cls) -> Event: + pass + + @classmethod + @abstractmethod + def create_lock(cls, *, fast_acquire: bool) -> Lock: + pass + + @classmethod + @abstractmethod + def create_semaphore( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> Semaphore: + pass + + @classmethod + @abstractmethod + def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter: + pass + + @classmethod + @abstractmethod + async def run_sync_in_worker_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: CapacityLimiter | None = None, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def check_cancelled(cls) -> None: + pass + + @classmethod + @abstractmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + async def open_process( + cls, + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + **kwargs: Any, + ) -> Process: + pass + + @classmethod + @abstractmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[Process]) -> None: + pass + + @classmethod + @abstractmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> SocketStream: + pass + + @classmethod + @abstractmethod + async def connect_unix(cls, path: str | bytes) -> UNIXSocketStream: + pass + + @classmethod + @abstractmethod + def create_tcp_listener(cls, sock: socket) -> SocketListener: + pass + + @classmethod + @abstractmethod + def create_unix_listener(cls, sock: socket) -> SocketListener: + pass + + @classmethod + @abstractmethod + async def create_udp_socket( + cls, + family: AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + pass + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: None + ) -> UNIXDatagramSocket: ... + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: str | bytes + ) -> ConnectedUNIXDatagramSocket: ... + + @classmethod + @abstractmethod + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: str | bytes | None + ) -> UNIXDatagramSocket | ConnectedUNIXDatagramSocket: + pass + + @classmethod + @abstractmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> Sequence[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], + ] + ]: + pass + + @classmethod + @abstractmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + pass + + @classmethod + @abstractmethod + async def wait_readable(cls, obj: FileDescriptorLike) -> None: + pass + + @classmethod + @abstractmethod + async def wait_writable(cls, obj: FileDescriptorLike) -> None: + pass + + @classmethod + @abstractmethod + def notify_closing(cls, obj: FileDescriptorLike) -> None: + pass + + @classmethod + @abstractmethod + async def wrap_listener_socket(cls, sock: socket) -> SocketListener: + pass + + @classmethod + @abstractmethod + async def wrap_stream_socket(cls, sock: socket) -> SocketStream: + pass + + @classmethod + @abstractmethod + async def wrap_unix_stream_socket(cls, sock: socket) -> UNIXSocketStream: + pass + + @classmethod + @abstractmethod + async def wrap_udp_socket(cls, sock: socket) -> UDPSocket: + pass + + @classmethod + @abstractmethod + async def wrap_connected_udp_socket(cls, sock: socket) -> ConnectedUDPSocket: + pass + + @classmethod + @abstractmethod + async def wrap_unix_datagram_socket(cls, sock: socket) -> UNIXDatagramSocket: + pass + + @classmethod + @abstractmethod + async def wrap_connected_unix_datagram_socket( + cls, sock: socket + ) -> ConnectedUNIXDatagramSocket: + pass + + @classmethod + @abstractmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + pass + + @classmethod + @abstractmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> AbstractContextManager[AsyncIterator[Signals]]: + pass + + @classmethod + @abstractmethod + def get_current_task(cls) -> TaskInfo: + pass + + @classmethod + @abstractmethod + def get_running_tasks(cls) -> Sequence[TaskInfo]: + pass + + @classmethod + @abstractmethod + async def wait_all_tasks_blocked(cls) -> None: + pass + + @classmethod + @abstractmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + pass diff --git a/.venv/lib/python3.12/site-packages/anyio/abc/_resources.py b/.venv/lib/python3.12/site-packages/anyio/abc/_resources.py new file mode 100644 index 0000000..10df115 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/abc/_resources.py @@ -0,0 +1,33 @@ +from __future__ import annotations + +from abc import ABCMeta, abstractmethod +from types import TracebackType +from typing import TypeVar + +T = TypeVar("T") + + +class AsyncResource(metaclass=ABCMeta): + """ + Abstract base class for all closeable asynchronous resources. + + Works as an asynchronous context manager which returns the instance itself on enter, + and calls :meth:`aclose` on exit. + """ + + __slots__ = () + + async def __aenter__(self: T) -> T: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.aclose() + + @abstractmethod + async def aclose(self) -> None: + """Close the resource.""" diff --git a/.venv/lib/python3.12/site-packages/anyio/abc/_sockets.py b/.venv/lib/python3.12/site-packages/anyio/abc/_sockets.py new file mode 100644 index 0000000..3ff60d4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/abc/_sockets.py @@ -0,0 +1,405 @@ +from __future__ import annotations + +import errno +import socket +import sys +from abc import abstractmethod +from collections.abc import Callable, Collection, Mapping +from contextlib import AsyncExitStack +from io import IOBase +from ipaddress import IPv4Address, IPv6Address +from socket import AddressFamily +from typing import Any, TypeVar, Union + +from .._core._eventloop import get_async_backend +from .._core._typedattr import ( + TypedAttributeProvider, + TypedAttributeSet, + typed_attribute, +) +from ._streams import ByteStream, Listener, UnreliableObjectStream +from ._tasks import TaskGroup + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + +IPAddressType: TypeAlias = Union[str, IPv4Address, IPv6Address] +IPSockAddrType: TypeAlias = tuple[str, int] +SockAddrType: TypeAlias = Union[IPSockAddrType, str] +UDPPacketType: TypeAlias = tuple[bytes, IPSockAddrType] +UNIXDatagramPacketType: TypeAlias = tuple[bytes, str] +T_Retval = TypeVar("T_Retval") + + +def _validate_socket( + sock_or_fd: socket.socket | int, + sock_type: socket.SocketKind, + addr_family: socket.AddressFamily = socket.AF_UNSPEC, + *, + require_connected: bool = False, + require_bound: bool = False, +) -> socket.socket: + if isinstance(sock_or_fd, int): + try: + sock = socket.socket(fileno=sock_or_fd) + except OSError as exc: + if exc.errno == errno.ENOTSOCK: + raise ValueError( + "the file descriptor does not refer to a socket" + ) from exc + elif require_connected: + raise ValueError("the socket must be connected") from exc + elif require_bound: + raise ValueError("the socket must be bound to a local address") from exc + else: + raise + elif isinstance(sock_or_fd, socket.socket): + sock = sock_or_fd + else: + raise TypeError( + f"expected an int or socket, got {type(sock_or_fd).__qualname__} instead" + ) + + try: + if require_connected: + try: + sock.getpeername() + except OSError as exc: + raise ValueError("the socket must be connected") from exc + + if require_bound: + try: + if sock.family in (socket.AF_INET, socket.AF_INET6): + bound_addr = sock.getsockname()[1] + else: + bound_addr = sock.getsockname() + except OSError: + bound_addr = None + + if not bound_addr: + raise ValueError("the socket must be bound to a local address") + + if addr_family != socket.AF_UNSPEC and sock.family != addr_family: + raise ValueError( + f"address family mismatch: expected {addr_family.name}, got " + f"{sock.family.name}" + ) + + if sock.type != sock_type: + raise ValueError( + f"socket type mismatch: expected {sock_type.name}, got {sock.type.name}" + ) + except BaseException: + # Avoid ResourceWarning from the locally constructed socket object + if isinstance(sock_or_fd, int): + sock.detach() + + raise + + sock.setblocking(False) + return sock + + +class SocketAttribute(TypedAttributeSet): + """ + .. attribute:: family + :type: socket.AddressFamily + + the address family of the underlying socket + + .. attribute:: local_address + :type: tuple[str, int] | str + + the local address the underlying socket is connected to + + .. attribute:: local_port + :type: int + + for IP based sockets, the local port the underlying socket is bound to + + .. attribute:: raw_socket + :type: socket.socket + + the underlying stdlib socket object + + .. attribute:: remote_address + :type: tuple[str, int] | str + + the remote address the underlying socket is connected to + + .. attribute:: remote_port + :type: int + + for IP based sockets, the remote port the underlying socket is connected to + """ + + family: AddressFamily = typed_attribute() + local_address: SockAddrType = typed_attribute() + local_port: int = typed_attribute() + raw_socket: socket.socket = typed_attribute() + remote_address: SockAddrType = typed_attribute() + remote_port: int = typed_attribute() + + +class _SocketProvider(TypedAttributeProvider): + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + from .._core._sockets import convert_ipv6_sockaddr as convert + + attributes: dict[Any, Callable[[], Any]] = { + SocketAttribute.family: lambda: self._raw_socket.family, + SocketAttribute.local_address: lambda: convert( + self._raw_socket.getsockname() + ), + SocketAttribute.raw_socket: lambda: self._raw_socket, + } + try: + peername: tuple[str, int] | None = convert(self._raw_socket.getpeername()) + except OSError: + peername = None + + # Provide the remote address for connected sockets + if peername is not None: + attributes[SocketAttribute.remote_address] = lambda: peername + + # Provide local and remote ports for IP based sockets + if self._raw_socket.family in (AddressFamily.AF_INET, AddressFamily.AF_INET6): + attributes[SocketAttribute.local_port] = ( + lambda: self._raw_socket.getsockname()[1] + ) + if peername is not None: + remote_port = peername[1] + attributes[SocketAttribute.remote_port] = lambda: remote_port + + return attributes + + @property + @abstractmethod + def _raw_socket(self) -> socket.socket: + pass + + +class SocketStream(ByteStream, _SocketProvider): + """ + Transports bytes over a socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @classmethod + async def from_socket(cls, sock_or_fd: socket.socket | int) -> SocketStream: + """ + Wrap an existing socket object or file descriptor as a socket stream. + + The newly created socket wrapper takes ownership of the socket being passed in. + The existing socket must already be connected. + + :param sock_or_fd: a socket object or file descriptor + :return: a socket stream + + """ + sock = _validate_socket(sock_or_fd, socket.SOCK_STREAM, require_connected=True) + return await get_async_backend().wrap_stream_socket(sock) + + +class UNIXSocketStream(SocketStream): + @classmethod + async def from_socket(cls, sock_or_fd: socket.socket | int) -> UNIXSocketStream: + """ + Wrap an existing socket object or file descriptor as a UNIX socket stream. + + The newly created socket wrapper takes ownership of the socket being passed in. + The existing socket must already be connected. + + :param sock_or_fd: a socket object or file descriptor + :return: a UNIX socket stream + + """ + sock = _validate_socket( + sock_or_fd, socket.SOCK_STREAM, socket.AF_UNIX, require_connected=True + ) + return await get_async_backend().wrap_unix_stream_socket(sock) + + @abstractmethod + async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: + """ + Send file descriptors along with a message to the peer. + + :param message: a non-empty bytestring + :param fds: a collection of files (either numeric file descriptors or open file + or socket objects) + """ + + @abstractmethod + async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: + """ + Receive file descriptors along with a message from the peer. + + :param msglen: length of the message to expect from the peer + :param maxfds: maximum number of file descriptors to expect from the peer + :return: a tuple of (message, file descriptors) + """ + + +class SocketListener(Listener[SocketStream], _SocketProvider): + """ + Listens to incoming socket connections. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @classmethod + async def from_socket( + cls, + sock_or_fd: socket.socket | int, + ) -> SocketListener: + """ + Wrap an existing socket object or file descriptor as a socket listener. + + The newly created listener takes ownership of the socket being passed in. + + :param sock_or_fd: a socket object or file descriptor + :return: a socket listener + + """ + sock = _validate_socket(sock_or_fd, socket.SOCK_STREAM, require_bound=True) + return await get_async_backend().wrap_listener_socket(sock) + + @abstractmethod + async def accept(self) -> SocketStream: + """Accept an incoming connection.""" + + async def serve( + self, + handler: Callable[[SocketStream], Any], + task_group: TaskGroup | None = None, + ) -> None: + from .. import create_task_group + + async with AsyncExitStack() as stack: + if task_group is None: + task_group = await stack.enter_async_context(create_task_group()) + + while True: + stream = await self.accept() + task_group.start_soon(handler, stream) + + +class UDPSocket(UnreliableObjectStream[UDPPacketType], _SocketProvider): + """ + Represents an unconnected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @classmethod + async def from_socket(cls, sock_or_fd: socket.socket | int) -> UDPSocket: + """ + Wrap an existing socket object or file descriptor as a UDP socket. + + The newly created socket wrapper takes ownership of the socket being passed in. + The existing socket must be bound to a local address. + + :param sock_or_fd: a socket object or file descriptor + :return: a UDP socket + + """ + sock = _validate_socket(sock_or_fd, socket.SOCK_DGRAM, require_bound=True) + return await get_async_backend().wrap_udp_socket(sock) + + async def sendto(self, data: bytes, host: str, port: int) -> None: + """ + Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port))). + + """ + return await self.send((data, (host, port))) + + +class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketProvider): + """ + Represents an connected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @classmethod + async def from_socket(cls, sock_or_fd: socket.socket | int) -> ConnectedUDPSocket: + """ + Wrap an existing socket object or file descriptor as a connected UDP socket. + + The newly created socket wrapper takes ownership of the socket being passed in. + The existing socket must already be connected. + + :param sock_or_fd: a socket object or file descriptor + :return: a connected UDP socket + + """ + sock = _validate_socket( + sock_or_fd, + socket.SOCK_DGRAM, + require_connected=True, + ) + return await get_async_backend().wrap_connected_udp_socket(sock) + + +class UNIXDatagramSocket( + UnreliableObjectStream[UNIXDatagramPacketType], _SocketProvider +): + """ + Represents an unconnected Unix datagram socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @classmethod + async def from_socket( + cls, + sock_or_fd: socket.socket | int, + ) -> UNIXDatagramSocket: + """ + Wrap an existing socket object or file descriptor as a UNIX datagram + socket. + + The newly created socket wrapper takes ownership of the socket being passed in. + + :param sock_or_fd: a socket object or file descriptor + :return: a UNIX datagram socket + + """ + sock = _validate_socket(sock_or_fd, socket.SOCK_DGRAM, socket.AF_UNIX) + return await get_async_backend().wrap_unix_datagram_socket(sock) + + async def sendto(self, data: bytes, path: str) -> None: + """Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, path)).""" + return await self.send((data, path)) + + +class ConnectedUNIXDatagramSocket(UnreliableObjectStream[bytes], _SocketProvider): + """ + Represents a connected Unix datagram socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @classmethod + async def from_socket( + cls, + sock_or_fd: socket.socket | int, + ) -> ConnectedUNIXDatagramSocket: + """ + Wrap an existing socket object or file descriptor as a connected UNIX datagram + socket. + + The newly created socket wrapper takes ownership of the socket being passed in. + The existing socket must already be connected. + + :param sock_or_fd: a socket object or file descriptor + :return: a connected UNIX datagram socket + + """ + sock = _validate_socket( + sock_or_fd, socket.SOCK_DGRAM, socket.AF_UNIX, require_connected=True + ) + return await get_async_backend().wrap_connected_unix_datagram_socket(sock) diff --git a/.venv/lib/python3.12/site-packages/anyio/abc/_streams.py b/.venv/lib/python3.12/site-packages/anyio/abc/_streams.py new file mode 100644 index 0000000..369df3f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/abc/_streams.py @@ -0,0 +1,239 @@ +from __future__ import annotations + +import sys +from abc import ABCMeta, abstractmethod +from collections.abc import Callable +from typing import Any, Generic, TypeVar, Union + +from .._core._exceptions import EndOfStream +from .._core._typedattr import TypedAttributeProvider +from ._resources import AsyncResource +from ._tasks import TaskGroup + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + +T_Item = TypeVar("T_Item") +T_co = TypeVar("T_co", covariant=True) +T_contra = TypeVar("T_contra", contravariant=True) + + +class UnreliableObjectReceiveStream( + Generic[T_co], AsyncResource, TypedAttributeProvider +): + """ + An interface for receiving objects. + + This interface makes no guarantees that the received messages arrive in the order in + which they were sent, or that no messages are missed. + + Asynchronously iterating over objects of this type will yield objects matching the + given type parameter. + """ + + def __aiter__(self) -> UnreliableObjectReceiveStream[T_co]: + return self + + async def __anext__(self) -> T_co: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration from None + + @abstractmethod + async def receive(self) -> T_co: + """ + Receive the next item. + + :raises ~anyio.ClosedResourceError: if the receive stream has been explicitly + closed + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectSendStream( + Generic[T_contra], AsyncResource, TypedAttributeProvider +): + """ + An interface for sending objects. + + This interface makes no guarantees that the messages sent will reach the + recipient(s) in the same order in which they were sent, or at all. + """ + + @abstractmethod + async def send(self, item: T_contra) -> None: + """ + Send an item to the peer(s). + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if the send stream has been explicitly + closed + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectStream( + UnreliableObjectReceiveStream[T_Item], UnreliableObjectSendStream[T_Item] +): + """ + A bidirectional message stream which does not guarantee the order or reliability of + message delivery. + """ + + +class ObjectReceiveStream(UnreliableObjectReceiveStream[T_co]): + """ + A receive message stream which guarantees that messages are received in the same + order in which they were sent, and that no messages are missed. + """ + + +class ObjectSendStream(UnreliableObjectSendStream[T_contra]): + """ + A send message stream which guarantees that messages are delivered in the same order + in which they were sent, without missing any messages in the middle. + """ + + +class ObjectStream( + ObjectReceiveStream[T_Item], + ObjectSendStream[T_Item], + UnreliableObjectStream[T_Item], +): + """ + A bidirectional message stream which guarantees the order and reliability of message + delivery. + """ + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this + method. This method is idempotent (does nothing on successive calls). + """ + + +class ByteReceiveStream(AsyncResource, TypedAttributeProvider): + """ + An interface for receiving bytes from a single peer. + + Iterating this byte stream will yield a byte string of arbitrary length, but no more + than 65536 bytes. + """ + + def __aiter__(self) -> ByteReceiveStream: + return self + + async def __anext__(self) -> bytes: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration from None + + @abstractmethod + async def receive(self, max_bytes: int = 65536) -> bytes: + """ + Receive at most ``max_bytes`` bytes from the peer. + + .. note:: Implementers of this interface should not return an empty + :class:`bytes` object, and users should ignore them. + + :param max_bytes: maximum number of bytes to receive + :return: the received bytes + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + """ + + +class ByteSendStream(AsyncResource, TypedAttributeProvider): + """An interface for sending bytes to a single peer.""" + + @abstractmethod + async def send(self, item: bytes) -> None: + """ + Send the given bytes to the peer. + + :param item: the bytes to send + """ + + +class ByteStream(ByteReceiveStream, ByteSendStream): + """A bidirectional byte stream.""" + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this + method. This method is idempotent (does nothing on successive calls). + """ + + +#: Type alias for all unreliable bytes-oriented receive streams. +AnyUnreliableByteReceiveStream: TypeAlias = Union[ + UnreliableObjectReceiveStream[bytes], ByteReceiveStream +] +#: Type alias for all unreliable bytes-oriented send streams. +AnyUnreliableByteSendStream: TypeAlias = Union[ + UnreliableObjectSendStream[bytes], ByteSendStream +] +#: Type alias for all unreliable bytes-oriented streams. +AnyUnreliableByteStream: TypeAlias = Union[UnreliableObjectStream[bytes], ByteStream] +#: Type alias for all bytes-oriented receive streams. +AnyByteReceiveStream: TypeAlias = Union[ObjectReceiveStream[bytes], ByteReceiveStream] +#: Type alias for all bytes-oriented send streams. +AnyByteSendStream: TypeAlias = Union[ObjectSendStream[bytes], ByteSendStream] +#: Type alias for all bytes-oriented streams. +AnyByteStream: TypeAlias = Union[ObjectStream[bytes], ByteStream] + + +class Listener(Generic[T_co], AsyncResource, TypedAttributeProvider): + """An interface for objects that let you accept incoming connections.""" + + @abstractmethod + async def serve( + self, handler: Callable[[T_co], Any], task_group: TaskGroup | None = None + ) -> None: + """ + Accept incoming connections as they come in and start tasks to handle them. + + :param handler: a callable that will be used to handle each accepted connection + :param task_group: the task group that will be used to start tasks for handling + each accepted connection (if omitted, an ad-hoc task group will be created) + """ + + +class ObjectStreamConnectable(Generic[T_co], metaclass=ABCMeta): + @abstractmethod + async def connect(self) -> ObjectStream[T_co]: + """ + Connect to the remote endpoint. + + :return: an object stream connected to the remote end + :raises ConnectionFailed: if the connection fails + """ + + +class ByteStreamConnectable(metaclass=ABCMeta): + @abstractmethod + async def connect(self) -> ByteStream: + """ + Connect to the remote endpoint. + + :return: a bytestream connected to the remote end + :raises ConnectionFailed: if the connection fails + """ + + +#: Type alias for all connectables returning bytestreams or bytes-oriented object streams +AnyByteStreamConnectable: TypeAlias = Union[ + ObjectStreamConnectable[bytes], ByteStreamConnectable +] diff --git a/.venv/lib/python3.12/site-packages/anyio/abc/_subprocesses.py b/.venv/lib/python3.12/site-packages/anyio/abc/_subprocesses.py new file mode 100644 index 0000000..ce0564c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/abc/_subprocesses.py @@ -0,0 +1,79 @@ +from __future__ import annotations + +from abc import abstractmethod +from signal import Signals + +from ._resources import AsyncResource +from ._streams import ByteReceiveStream, ByteSendStream + + +class Process(AsyncResource): + """An asynchronous version of :class:`subprocess.Popen`.""" + + @abstractmethod + async def wait(self) -> int: + """ + Wait until the process exits. + + :return: the exit code of the process + """ + + @abstractmethod + def terminate(self) -> None: + """ + Terminates the process, gracefully if possible. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGTERM`` to the process. + + .. seealso:: :meth:`subprocess.Popen.terminate` + """ + + @abstractmethod + def kill(self) -> None: + """ + Kills the process. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGKILL`` to the process. + + .. seealso:: :meth:`subprocess.Popen.kill` + """ + + @abstractmethod + def send_signal(self, signal: Signals) -> None: + """ + Send a signal to the subprocess. + + .. seealso:: :meth:`subprocess.Popen.send_signal` + + :param signal: the signal number (e.g. :data:`signal.SIGHUP`) + """ + + @property + @abstractmethod + def pid(self) -> int: + """The process ID of the process.""" + + @property + @abstractmethod + def returncode(self) -> int | None: + """ + The return code of the process. If the process has not yet terminated, this will + be ``None``. + """ + + @property + @abstractmethod + def stdin(self) -> ByteSendStream | None: + """The stream for the standard input of the process.""" + + @property + @abstractmethod + def stdout(self) -> ByteReceiveStream | None: + """The stream for the standard output of the process.""" + + @property + @abstractmethod + def stderr(self) -> ByteReceiveStream | None: + """The stream for the standard error output of the process.""" diff --git a/.venv/lib/python3.12/site-packages/anyio/abc/_tasks.py b/.venv/lib/python3.12/site-packages/anyio/abc/_tasks.py new file mode 100644 index 0000000..516b3ec --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/abc/_tasks.py @@ -0,0 +1,117 @@ +from __future__ import annotations + +import sys +from abc import ABCMeta, abstractmethod +from collections.abc import Awaitable, Callable +from types import TracebackType +from typing import TYPE_CHECKING, Any, Protocol, overload + +if sys.version_info >= (3, 13): + from typing import TypeVar +else: + from typing_extensions import TypeVar + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if TYPE_CHECKING: + from .._core._tasks import CancelScope + +T_Retval = TypeVar("T_Retval") +T_contra = TypeVar("T_contra", contravariant=True, default=None) +PosArgsT = TypeVarTuple("PosArgsT") + + +class TaskStatus(Protocol[T_contra]): + @overload + def started(self: TaskStatus[None]) -> None: ... + + @overload + def started(self, value: T_contra) -> None: ... + + def started(self, value: T_contra | None = None) -> None: + """ + Signal that the task has started. + + :param value: object passed back to the starter of the task + """ + + +class TaskGroup(metaclass=ABCMeta): + """ + Groups several asynchronous tasks together. + + :ivar cancel_scope: the cancel scope inherited by all child tasks + :vartype cancel_scope: CancelScope + + .. note:: On asyncio, support for eager task factories is considered to be + **experimental**. In particular, they don't follow the usual semantics of new + tasks being scheduled on the next iteration of the event loop, and may thus + cause unexpected behavior in code that wasn't written with such semantics in + mind. + """ + + cancel_scope: CancelScope + + @abstractmethod + def start_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> None: + """ + Start a new task in this task group. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def start( + self, + func: Callable[..., Awaitable[Any]], + *args: object, + name: object = None, + ) -> Any: + """ + Start a new task and wait until it signals for readiness. + + The target callable must accept a keyword argument ``task_status`` (of type + :class:`TaskStatus`). Awaiting on this method will return whatever was passed to + ``task_status.started()`` (``None`` by default). + + .. note:: The :class:`TaskStatus` class is generic, and the type argument should + indicate the type of the value that will be passed to + ``task_status.started()``. + + :param func: a coroutine function that accepts the ``task_status`` keyword + argument + :param args: positional arguments to call the function with + :param name: an optional name for the task, for introspection and debugging + :return: the value passed to ``task_status.started()`` + :raises RuntimeError: if the task finishes without calling + ``task_status.started()`` + + .. seealso:: :ref:`start_initialize` + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def __aenter__(self) -> TaskGroup: + """Enter the task group context and allow starting new tasks.""" + + @abstractmethod + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + """Exit the task group context waiting for all tasks to finish.""" diff --git a/.venv/lib/python3.12/site-packages/anyio/abc/_testing.py b/.venv/lib/python3.12/site-packages/anyio/abc/_testing.py new file mode 100644 index 0000000..7c50ed7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/abc/_testing.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +import types +from abc import ABCMeta, abstractmethod +from collections.abc import AsyncGenerator, Callable, Coroutine, Iterable +from typing import Any, TypeVar + +_T = TypeVar("_T") + + +class TestRunner(metaclass=ABCMeta): + """ + Encapsulates a running event loop. Every call made through this object will use the + same event loop. + """ + + def __enter__(self) -> TestRunner: + return self + + @abstractmethod + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> bool | None: ... + + @abstractmethod + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[_T, Any]], + kwargs: dict[str, Any], + ) -> Iterable[_T]: + """ + Run an async generator fixture. + + :param fixture_func: the fixture function + :param kwargs: keyword arguments to call the fixture function with + :return: an iterator yielding the value yielded from the async generator + """ + + @abstractmethod + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, _T]], + kwargs: dict[str, Any], + ) -> _T: + """ + Run an async fixture. + + :param fixture_func: the fixture function + :param kwargs: keyword arguments to call the fixture function with + :return: the return value of the fixture function + """ + + @abstractmethod + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] + ) -> None: + """ + Run an async test function. + + :param test_func: the test function + :param kwargs: keyword arguments to call the test function with + """ diff --git a/.venv/lib/python3.12/site-packages/anyio/from_thread.py b/.venv/lib/python3.12/site-packages/anyio/from_thread.py new file mode 100644 index 0000000..837de5e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/from_thread.py @@ -0,0 +1,578 @@ +from __future__ import annotations + +__all__ = ( + "BlockingPortal", + "BlockingPortalProvider", + "check_cancelled", + "run", + "run_sync", + "start_blocking_portal", +) + +import sys +from collections.abc import Awaitable, Callable, Generator +from concurrent.futures import Future +from contextlib import ( + AbstractAsyncContextManager, + AbstractContextManager, + contextmanager, +) +from dataclasses import dataclass, field +from functools import partial +from inspect import isawaitable +from threading import Lock, Thread, current_thread, get_ident +from types import TracebackType +from typing import ( + Any, + Generic, + TypeVar, + cast, + overload, +) + +from ._core._eventloop import ( + get_cancelled_exc_class, + threadlocals, +) +from ._core._eventloop import run as run_eventloop +from ._core._exceptions import NoEventLoopError +from ._core._synchronization import Event +from ._core._tasks import CancelScope, create_task_group +from .abc._tasks import TaskStatus +from .lowlevel import EventLoopToken, current_token + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +T_Retval = TypeVar("T_Retval") +T_co = TypeVar("T_co", covariant=True) +PosArgsT = TypeVarTuple("PosArgsT") + + +def _token_or_error(token: EventLoopToken | None) -> EventLoopToken: + if token is not None: + return token + + try: + return threadlocals.current_token + except AttributeError: + raise NoEventLoopError( + "Not running inside an AnyIO worker thread, and no event loop token was " + "provided" + ) from None + + +def run( + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + token: EventLoopToken | None = None, +) -> T_Retval: + """ + Call a coroutine function from a worker thread. + + :param func: a coroutine function + :param args: positional arguments for the callable + :param token: an event loop token to use to get back to the event loop thread + (required if calling this function from outside an AnyIO worker thread) + :return: the return value of the coroutine function + :raises MissingTokenError: if no token was provided and called from outside an + AnyIO worker thread + :raises RunFinishedError: if the event loop tied to ``token`` is no longer running + + .. versionchanged:: 4.11.0 + Added the ``token`` parameter. + + """ + explicit_token = token is not None + token = _token_or_error(token) + return token.backend_class.run_async_from_thread( + func, args, token=token.native_token if explicit_token else None + ) + + +def run_sync( + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + token: EventLoopToken | None = None, +) -> T_Retval: + """ + Call a function in the event loop thread from a worker thread. + + :param func: a callable + :param args: positional arguments for the callable + :param token: an event loop token to use to get back to the event loop thread + (required if calling this function from outside an AnyIO worker thread) + :return: the return value of the callable + :raises MissingTokenError: if no token was provided and called from outside an + AnyIO worker thread + :raises RunFinishedError: if the event loop tied to ``token`` is no longer running + + .. versionchanged:: 4.11.0 + Added the ``token`` parameter. + + """ + explicit_token = token is not None + token = _token_or_error(token) + return token.backend_class.run_sync_from_thread( + func, args, token=token.native_token if explicit_token else None + ) + + +class _BlockingAsyncContextManager(Generic[T_co], AbstractContextManager): + _enter_future: Future[T_co] + _exit_future: Future[bool | None] + _exit_event: Event + _exit_exc_info: tuple[ + type[BaseException] | None, BaseException | None, TracebackType | None + ] = (None, None, None) + + def __init__( + self, async_cm: AbstractAsyncContextManager[T_co], portal: BlockingPortal + ): + self._async_cm = async_cm + self._portal = portal + + async def run_async_cm(self) -> bool | None: + try: + self._exit_event = Event() + value = await self._async_cm.__aenter__() + except BaseException as exc: + self._enter_future.set_exception(exc) + raise + else: + self._enter_future.set_result(value) + + try: + # Wait for the sync context manager to exit. + # This next statement can raise `get_cancelled_exc_class()` if + # something went wrong in a task group in this async context + # manager. + await self._exit_event.wait() + finally: + # In case of cancellation, it could be that we end up here before + # `_BlockingAsyncContextManager.__exit__` is called, and an + # `_exit_exc_info` has been set. + result = await self._async_cm.__aexit__(*self._exit_exc_info) + + return result + + def __enter__(self) -> T_co: + self._enter_future = Future() + self._exit_future = self._portal.start_task_soon(self.run_async_cm) + return self._enter_future.result() + + def __exit__( + self, + __exc_type: type[BaseException] | None, + __exc_value: BaseException | None, + __traceback: TracebackType | None, + ) -> bool | None: + self._exit_exc_info = __exc_type, __exc_value, __traceback + self._portal.call(self._exit_event.set) + return self._exit_future.result() + + +class _BlockingPortalTaskStatus(TaskStatus): + def __init__(self, future: Future): + self._future = future + + def started(self, value: object = None) -> None: + self._future.set_result(value) + + +class BlockingPortal: + """ + An object that lets external threads run code in an asynchronous event loop. + + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + """ + + def __init__(self) -> None: + self._token = current_token() + self._event_loop_thread_id: int | None = get_ident() + self._stop_event = Event() + self._task_group = create_task_group() + + async def __aenter__(self) -> BlockingPortal: + await self._task_group.__aenter__() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + await self.stop() + return await self._task_group.__aexit__(exc_type, exc_val, exc_tb) + + def _check_running(self) -> None: + if self._event_loop_thread_id is None: + raise RuntimeError("This portal is not running") + if self._event_loop_thread_id == get_ident(): + raise RuntimeError( + "This method cannot be called from the event loop thread" + ) + + async def sleep_until_stopped(self) -> None: + """Sleep until :meth:`stop` is called.""" + await self._stop_event.wait() + + async def stop(self, cancel_remaining: bool = False) -> None: + """ + Signal the portal to shut down. + + This marks the portal as no longer accepting new calls and exits from + :meth:`sleep_until_stopped`. + + :param cancel_remaining: ``True`` to cancel all the remaining tasks, ``False`` + to let them finish before returning + + """ + self._event_loop_thread_id = None + self._stop_event.set() + if cancel_remaining: + self._task_group.cancel_scope.cancel("the blocking portal is shutting down") + + async def _call_func( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + future: Future[T_Retval], + ) -> None: + def callback(f: Future[T_Retval]) -> None: + if f.cancelled(): + if self._event_loop_thread_id == get_ident(): + scope.cancel("the future was cancelled") + elif self._event_loop_thread_id is not None: + self.call(scope.cancel, "the future was cancelled") + + try: + retval_or_awaitable = func(*args, **kwargs) + if isawaitable(retval_or_awaitable): + with CancelScope() as scope: + future.add_done_callback(callback) + retval = await retval_or_awaitable + else: + retval = retval_or_awaitable + except get_cancelled_exc_class(): + future.cancel() + future.set_running_or_notify_cancel() + except BaseException as exc: + if not future.cancelled(): + future.set_exception(exc) + + # Let base exceptions fall through + if not isinstance(exc, Exception): + raise + else: + if not future.cancelled(): + future.set_result(retval) + finally: + scope = None # type: ignore[assignment] + + def _spawn_task_from_thread( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + name: object, + future: Future[T_Retval], + ) -> None: + """ + Spawn a new task using the given callable. + + :param func: a callable + :param args: positional arguments to be passed to the callable + :param kwargs: keyword arguments to be passed to the callable + :param name: name of the task (will be coerced to a string if not ``None``) + :param future: a future that will resolve to the return value of the callable, + or the exception raised during its execution + + """ + run_sync( + partial(self._task_group.start_soon, name=name), + self._call_func, + func, + args, + kwargs, + future, + token=self._token, + ) + + @overload + def call( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + ) -> T_Retval: ... + + @overload + def call( + self, func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] + ) -> T_Retval: ... + + def call( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + *args: Unpack[PosArgsT], + ) -> T_Retval: + """ + Call the given function in the event loop thread. + + If the callable returns a coroutine object, it is awaited on. + + :param func: any callable + :raises RuntimeError: if the portal is not running or if this method is called + from within the event loop thread + + """ + return cast(T_Retval, self.start_task_soon(func, *args).result()) + + @overload + def start_task_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> Future[T_Retval]: ... + + @overload + def start_task_soon( + self, + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + name: object = None, + ) -> Future[T_Retval]: ... + + def start_task_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + *args: Unpack[PosArgsT], + name: object = None, + ) -> Future[T_Retval]: + """ + Start a task in the portal's task group. + + The task will be run inside a cancel scope which can be cancelled by cancelling + the returned future. + + :param func: the target function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a future that resolves with the return value of the callable if the + task completes successfully, or with the exception raised in the task + :raises RuntimeError: if the portal is not running or if this method is called + from within the event loop thread + :rtype: concurrent.futures.Future[T_Retval] + + .. versionadded:: 3.0 + + """ + self._check_running() + f: Future[T_Retval] = Future() + self._spawn_task_from_thread(func, args, {}, name, f) + return f + + def start_task( + self, + func: Callable[..., Awaitable[T_Retval]], + *args: object, + name: object = None, + ) -> tuple[Future[T_Retval], Any]: + """ + Start a task in the portal's task group and wait until it signals for readiness. + + This method works the same way as :meth:`.abc.TaskGroup.start`. + + :param func: the target function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a tuple of (future, task_status_value) where the ``task_status_value`` + is the value passed to ``task_status.started()`` from within the target + function + :rtype: tuple[concurrent.futures.Future[T_Retval], Any] + + .. versionadded:: 3.0 + + """ + + def task_done(future: Future[T_Retval]) -> None: + if not task_status_future.done(): + if future.cancelled(): + task_status_future.cancel() + elif future.exception(): + task_status_future.set_exception(future.exception()) + else: + exc = RuntimeError( + "Task exited without calling task_status.started()" + ) + task_status_future.set_exception(exc) + + self._check_running() + task_status_future: Future = Future() + task_status = _BlockingPortalTaskStatus(task_status_future) + f: Future = Future() + f.add_done_callback(task_done) + self._spawn_task_from_thread(func, args, {"task_status": task_status}, name, f) + return f, task_status_future.result() + + def wrap_async_context_manager( + self, cm: AbstractAsyncContextManager[T_co] + ) -> AbstractContextManager[T_co]: + """ + Wrap an async context manager as a synchronous context manager via this portal. + + Spawns a task that will call both ``__aenter__()`` and ``__aexit__()``, stopping + in the middle until the synchronous context manager exits. + + :param cm: an asynchronous context manager + :return: a synchronous context manager + + .. versionadded:: 2.1 + + """ + return _BlockingAsyncContextManager(cm, self) + + +@dataclass +class BlockingPortalProvider: + """ + A manager for a blocking portal. Used as a context manager. The first thread to + enter this context manager causes a blocking portal to be started with the specific + parameters, and the last thread to exit causes the portal to be shut down. Thus, + there will be exactly one blocking portal running in this context as long as at + least one thread has entered this context manager. + + The parameters are the same as for :func:`~anyio.run`. + + :param backend: name of the backend + :param backend_options: backend options + + .. versionadded:: 4.4 + """ + + backend: str = "asyncio" + backend_options: dict[str, Any] | None = None + _lock: Lock = field(init=False, default_factory=Lock) + _leases: int = field(init=False, default=0) + _portal: BlockingPortal = field(init=False) + _portal_cm: AbstractContextManager[BlockingPortal] | None = field( + init=False, default=None + ) + + def __enter__(self) -> BlockingPortal: + with self._lock: + if self._portal_cm is None: + self._portal_cm = start_blocking_portal( + self.backend, self.backend_options + ) + self._portal = self._portal_cm.__enter__() + + self._leases += 1 + return self._portal + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + portal_cm: AbstractContextManager[BlockingPortal] | None = None + with self._lock: + assert self._portal_cm + assert self._leases > 0 + self._leases -= 1 + if not self._leases: + portal_cm = self._portal_cm + self._portal_cm = None + del self._portal + + if portal_cm: + portal_cm.__exit__(None, None, None) + + +@contextmanager +def start_blocking_portal( + backend: str = "asyncio", + backend_options: dict[str, Any] | None = None, + *, + name: str | None = None, +) -> Generator[BlockingPortal, Any, None]: + """ + Start a new event loop in a new thread and run a blocking portal in its main task. + + The parameters are the same as for :func:`~anyio.run`. + + :param backend: name of the backend + :param backend_options: backend options + :param name: name of the thread + :return: a context manager that yields a blocking portal + + .. versionchanged:: 3.0 + Usage as a context manager is now required. + + """ + + async def run_portal() -> None: + async with BlockingPortal() as portal_: + if name is None: + current_thread().name = f"{backend}-portal-{id(portal_):x}" + + future.set_result(portal_) + await portal_.sleep_until_stopped() + + def run_blocking_portal() -> None: + if future.set_running_or_notify_cancel(): + try: + run_eventloop( + run_portal, backend=backend, backend_options=backend_options + ) + except BaseException as exc: + if not future.done(): + future.set_exception(exc) + + future: Future[BlockingPortal] = Future() + thread = Thread(target=run_blocking_portal, daemon=True, name=name) + thread.start() + try: + cancel_remaining_tasks = False + portal = future.result() + try: + yield portal + except BaseException: + cancel_remaining_tasks = True + raise + finally: + try: + portal.call(portal.stop, cancel_remaining_tasks) + except RuntimeError: + pass + finally: + thread.join() + + +def check_cancelled() -> None: + """ + Check if the cancel scope of the host task's running the current worker thread has + been cancelled. + + If the host task's current cancel scope has indeed been cancelled, the + backend-specific cancellation exception will be raised. + + :raises RuntimeError: if the current thread was not spawned by + :func:`.to_thread.run_sync` + + """ + try: + token: EventLoopToken = threadlocals.current_token + except AttributeError: + raise NoEventLoopError( + "This function can only be called inside an AnyIO worker thread" + ) from None + + token.backend_class.check_cancelled() diff --git a/.venv/lib/python3.12/site-packages/anyio/functools.py b/.venv/lib/python3.12/site-packages/anyio/functools.py new file mode 100644 index 0000000..b80afe6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/functools.py @@ -0,0 +1,375 @@ +from __future__ import annotations + +__all__ = ( + "AsyncCacheInfo", + "AsyncCacheParameters", + "AsyncLRUCacheWrapper", + "cache", + "lru_cache", + "reduce", +) + +import functools +import sys +from collections import OrderedDict +from collections.abc import ( + AsyncIterable, + Awaitable, + Callable, + Coroutine, + Hashable, + Iterable, +) +from functools import update_wrapper +from inspect import iscoroutinefunction +from typing import ( + Any, + Generic, + NamedTuple, + TypedDict, + TypeVar, + cast, + final, + overload, +) +from weakref import WeakKeyDictionary + +from ._core._synchronization import Lock +from .lowlevel import RunVar, checkpoint + +if sys.version_info >= (3, 11): + from typing import ParamSpec +else: + from typing_extensions import ParamSpec + +T = TypeVar("T") +S = TypeVar("S") +P = ParamSpec("P") +lru_cache_items: RunVar[ + WeakKeyDictionary[ + AsyncLRUCacheWrapper[Any, Any], + OrderedDict[Hashable, tuple[_InitialMissingType, Lock] | tuple[Any, None]], + ] +] = RunVar("lru_cache_items") + + +class _InitialMissingType: + pass + + +initial_missing: _InitialMissingType = _InitialMissingType() + + +class AsyncCacheInfo(NamedTuple): + hits: int + misses: int + maxsize: int | None + currsize: int + + +class AsyncCacheParameters(TypedDict): + maxsize: int | None + typed: bool + always_checkpoint: bool + + +class _LRUMethodWrapper(Generic[T]): + def __init__(self, wrapper: AsyncLRUCacheWrapper[..., T], instance: object): + self.__wrapper = wrapper + self.__instance = instance + + def cache_info(self) -> AsyncCacheInfo: + return self.__wrapper.cache_info() + + def cache_parameters(self) -> AsyncCacheParameters: + return self.__wrapper.cache_parameters() + + def cache_clear(self) -> None: + self.__wrapper.cache_clear() + + async def __call__(self, *args: Any, **kwargs: Any) -> T: + if self.__instance is None: + return await self.__wrapper(*args, **kwargs) + + return await self.__wrapper(self.__instance, *args, **kwargs) + + +@final +class AsyncLRUCacheWrapper(Generic[P, T]): + def __init__( + self, + func: Callable[P, Awaitable[T]], + maxsize: int | None, + typed: bool, + always_checkpoint: bool, + ): + self.__wrapped__ = func + self._hits: int = 0 + self._misses: int = 0 + self._maxsize = max(maxsize, 0) if maxsize is not None else None + self._currsize: int = 0 + self._typed = typed + self._always_checkpoint = always_checkpoint + update_wrapper(self, func) + + def cache_info(self) -> AsyncCacheInfo: + return AsyncCacheInfo(self._hits, self._misses, self._maxsize, self._currsize) + + def cache_parameters(self) -> AsyncCacheParameters: + return { + "maxsize": self._maxsize, + "typed": self._typed, + "always_checkpoint": self._always_checkpoint, + } + + def cache_clear(self) -> None: + if cache := lru_cache_items.get(None): + cache.pop(self, None) + self._hits = self._misses = self._currsize = 0 + + async def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: + # Easy case first: if maxsize == 0, no caching is done + if self._maxsize == 0: + value = await self.__wrapped__(*args, **kwargs) + self._misses += 1 + return value + + # The key is constructed as a flat tuple to avoid memory overhead + key: tuple[Any, ...] = args + if kwargs: + # initial_missing is used as a separator + key += (initial_missing,) + sum(kwargs.items(), ()) + + if self._typed: + key += tuple(type(arg) for arg in args) + if kwargs: + key += (initial_missing,) + tuple(type(val) for val in kwargs.values()) + + try: + cache = lru_cache_items.get() + except LookupError: + cache = WeakKeyDictionary() + lru_cache_items.set(cache) + + try: + cache_entry = cache[self] + except KeyError: + cache_entry = cache[self] = OrderedDict() + + cached_value: T | _InitialMissingType + try: + cached_value, lock = cache_entry[key] + except KeyError: + # We're the first task to call this function + cached_value, lock = ( + initial_missing, + Lock(fast_acquire=not self._always_checkpoint), + ) + cache_entry[key] = cached_value, lock + + if lock is None: + # The value was already cached + self._hits += 1 + cache_entry.move_to_end(key) + if self._always_checkpoint: + await checkpoint() + + return cast(T, cached_value) + + async with lock: + # Check if another task filled the cache while we acquired the lock + if (cached_value := cache_entry[key][0]) is initial_missing: + self._misses += 1 + if self._maxsize is not None and self._currsize >= self._maxsize: + cache_entry.popitem(last=False) + else: + self._currsize += 1 + + value = await self.__wrapped__(*args, **kwargs) + cache_entry[key] = value, None + else: + # Another task filled the cache while we were waiting for the lock + self._hits += 1 + cache_entry.move_to_end(key) + value = cast(T, cached_value) + + return value + + def __get__( + self, instance: object, owner: type | None = None + ) -> _LRUMethodWrapper[T]: + wrapper = _LRUMethodWrapper(self, instance) + update_wrapper(wrapper, self.__wrapped__) + return wrapper + + +class _LRUCacheWrapper(Generic[T]): + def __init__(self, maxsize: int | None, typed: bool, always_checkpoint: bool): + self._maxsize = maxsize + self._typed = typed + self._always_checkpoint = always_checkpoint + + @overload + def __call__( # type: ignore[overload-overlap] + self, func: Callable[P, Coroutine[Any, Any, T]], / + ) -> AsyncLRUCacheWrapper[P, T]: ... + + @overload + def __call__( + self, func: Callable[..., T], / + ) -> functools._lru_cache_wrapper[T]: ... + + def __call__( + self, f: Callable[P, Coroutine[Any, Any, T]] | Callable[..., T], / + ) -> AsyncLRUCacheWrapper[P, T] | functools._lru_cache_wrapper[T]: + if iscoroutinefunction(f): + return AsyncLRUCacheWrapper( + f, self._maxsize, self._typed, self._always_checkpoint + ) + + return functools.lru_cache(maxsize=self._maxsize, typed=self._typed)(f) # type: ignore[arg-type] + + +@overload +def cache( # type: ignore[overload-overlap] + func: Callable[P, Coroutine[Any, Any, T]], / +) -> AsyncLRUCacheWrapper[P, T]: ... + + +@overload +def cache(func: Callable[..., T], /) -> functools._lru_cache_wrapper[T]: ... + + +def cache( + func: Callable[..., T] | Callable[P, Coroutine[Any, Any, T]], / +) -> AsyncLRUCacheWrapper[P, T] | functools._lru_cache_wrapper[T]: + """ + A convenient shortcut for :func:`lru_cache` with ``maxsize=None``. + + This is the asynchronous equivalent to :func:`functools.cache`. + + """ + return lru_cache(maxsize=None)(func) + + +@overload +def lru_cache( + *, maxsize: int | None = ..., typed: bool = ..., always_checkpoint: bool = ... +) -> _LRUCacheWrapper[Any]: ... + + +@overload +def lru_cache( # type: ignore[overload-overlap] + func: Callable[P, Coroutine[Any, Any, T]], / +) -> AsyncLRUCacheWrapper[P, T]: ... + + +@overload +def lru_cache(func: Callable[..., T], /) -> functools._lru_cache_wrapper[T]: ... + + +def lru_cache( + func: Callable[P, Coroutine[Any, Any, T]] | Callable[..., T] | None = None, + /, + *, + maxsize: int | None = 128, + typed: bool = False, + always_checkpoint: bool = False, +) -> ( + AsyncLRUCacheWrapper[P, T] | functools._lru_cache_wrapper[T] | _LRUCacheWrapper[Any] +): + """ + An asynchronous version of :func:`functools.lru_cache`. + + If a synchronous function is passed, the standard library + :func:`functools.lru_cache` is applied instead. + + :param always_checkpoint: if ``True``, every call to the cached function will be + guaranteed to yield control to the event loop at least once + + .. note:: Caches and locks are managed on a per-event loop basis. + + """ + if func is None: + return _LRUCacheWrapper[Any](maxsize, typed, always_checkpoint) + + if not callable(func): + raise TypeError("the first argument must be callable") + + return _LRUCacheWrapper[T](maxsize, typed, always_checkpoint)(func) + + +@overload +async def reduce( + function: Callable[[T, S], Awaitable[T]], + iterable: Iterable[S] | AsyncIterable[S], + /, + initial: T, +) -> T: ... + + +@overload +async def reduce( + function: Callable[[T, T], Awaitable[T]], + iterable: Iterable[T] | AsyncIterable[T], + /, +) -> T: ... + + +async def reduce( # type: ignore[misc] + function: Callable[[T, T], Awaitable[T]] | Callable[[T, S], Awaitable[T]], + iterable: Iterable[T] | Iterable[S] | AsyncIterable[T] | AsyncIterable[S], + /, + initial: T | _InitialMissingType = initial_missing, +) -> T: + """ + Asynchronous version of :func:`functools.reduce`. + + :param function: a coroutine function that takes two arguments: the accumulated + value and the next element from the iterable + :param iterable: an iterable or async iterable + :param initial: the initial value (if missing, the first element of the iterable is + used as the initial value) + + """ + element: Any + function_called = False + if isinstance(iterable, AsyncIterable): + async_it = iterable.__aiter__() + if initial is initial_missing: + try: + value = cast(T, await async_it.__anext__()) + except StopAsyncIteration: + raise TypeError( + "reduce() of empty sequence with no initial value" + ) from None + else: + value = cast(T, initial) + + async for element in async_it: + value = await function(value, element) + function_called = True + elif isinstance(iterable, Iterable): + it = iter(iterable) + if initial is initial_missing: + try: + value = cast(T, next(it)) + except StopIteration: + raise TypeError( + "reduce() of empty sequence with no initial value" + ) from None + else: + value = cast(T, initial) + + for element in it: + value = await function(value, element) + function_called = True + else: + raise TypeError("reduce() argument 2 must be an iterable or async iterable") + + # Make sure there is at least one checkpoint, even if an empty iterable and an + # initial value were given + if not function_called: + await checkpoint() + + return value diff --git a/.venv/lib/python3.12/site-packages/anyio/lowlevel.py b/.venv/lib/python3.12/site-packages/anyio/lowlevel.py new file mode 100644 index 0000000..ffbb75a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/lowlevel.py @@ -0,0 +1,196 @@ +from __future__ import annotations + +__all__ = ( + "EventLoopToken", + "RunvarToken", + "RunVar", + "checkpoint", + "checkpoint_if_cancelled", + "cancel_shielded_checkpoint", + "current_token", +) + +import enum +from dataclasses import dataclass +from types import TracebackType +from typing import Any, Generic, Literal, TypeVar, final, overload +from weakref import WeakKeyDictionary + +from ._core._eventloop import get_async_backend +from .abc import AsyncBackend + +T = TypeVar("T") +D = TypeVar("D") + + +async def checkpoint() -> None: + """ + Check for cancellation and allow the scheduler to switch to another task. + + Equivalent to (but more efficient than):: + + await checkpoint_if_cancelled() + await cancel_shielded_checkpoint() + + .. versionadded:: 3.0 + + """ + await get_async_backend().checkpoint() + + +async def checkpoint_if_cancelled() -> None: + """ + Enter a checkpoint if the enclosing cancel scope has been cancelled. + + This does not allow the scheduler to switch to a different task. + + .. versionadded:: 3.0 + + """ + await get_async_backend().checkpoint_if_cancelled() + + +async def cancel_shielded_checkpoint() -> None: + """ + Allow the scheduler to switch to another task but without checking for cancellation. + + Equivalent to (but potentially more efficient than):: + + with CancelScope(shield=True): + await checkpoint() + + .. versionadded:: 3.0 + + """ + await get_async_backend().cancel_shielded_checkpoint() + + +@final +@dataclass(frozen=True, repr=False) +class EventLoopToken: + """ + An opaque object that holds a reference to an event loop. + + .. versionadded:: 4.11.0 + """ + + backend_class: type[AsyncBackend] + native_token: object + + +def current_token() -> EventLoopToken: + """ + Return a token object that can be used to call code in the current event loop from + another thread. + + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + .. versionadded:: 4.11.0 + + """ + backend_class = get_async_backend() + raw_token = backend_class.current_token() + return EventLoopToken(backend_class, raw_token) + + +_run_vars: WeakKeyDictionary[object, dict[RunVar[Any], Any]] = WeakKeyDictionary() + + +class _NoValueSet(enum.Enum): + NO_VALUE_SET = enum.auto() + + +class RunvarToken(Generic[T]): + __slots__ = "_var", "_value", "_redeemed" + + def __init__(self, var: RunVar[T], value: T | Literal[_NoValueSet.NO_VALUE_SET]): + self._var = var + self._value: T | Literal[_NoValueSet.NO_VALUE_SET] = value + self._redeemed = False + + def __enter__(self) -> RunvarToken[T]: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self._var.reset(self) + + +class RunVar(Generic[T]): + """ + Like a :class:`~contextvars.ContextVar`, except scoped to the running event loop. + + Can be used as a context manager, Just like :class:`~contextvars.ContextVar`, that + will reset the variable to its previous value when the context block is exited. + """ + + __slots__ = "_name", "_default" + + NO_VALUE_SET: Literal[_NoValueSet.NO_VALUE_SET] = _NoValueSet.NO_VALUE_SET + + def __init__( + self, name: str, default: T | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET + ): + self._name = name + self._default = default + + @property + def _current_vars(self) -> dict[RunVar[T], T]: + native_token = current_token().native_token + try: + return _run_vars[native_token] + except KeyError: + run_vars = _run_vars[native_token] = {} + return run_vars + + @overload + def get(self, default: D) -> T | D: ... + + @overload + def get(self) -> T: ... + + def get( + self, default: D | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET + ) -> T | D: + try: + return self._current_vars[self] + except KeyError: + if default is not RunVar.NO_VALUE_SET: + return default + elif self._default is not RunVar.NO_VALUE_SET: + return self._default + + raise LookupError( + f'Run variable "{self._name}" has no value and no default set' + ) + + def set(self, value: T) -> RunvarToken[T]: + current_vars = self._current_vars + token = RunvarToken(self, current_vars.get(self, RunVar.NO_VALUE_SET)) + current_vars[self] = value + return token + + def reset(self, token: RunvarToken[T]) -> None: + if token._var is not self: + raise ValueError("This token does not belong to this RunVar") + + if token._redeemed: + raise ValueError("This token has already been used") + + if token._value is _NoValueSet.NO_VALUE_SET: + try: + del self._current_vars[self] + except KeyError: + pass + else: + self._current_vars[self] = token._value + + token._redeemed = True + + def __repr__(self) -> str: + return f"" diff --git a/.venv/lib/python3.12/site-packages/anyio/py.typed b/.venv/lib/python3.12/site-packages/anyio/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/anyio/pytest_plugin.py b/.venv/lib/python3.12/site-packages/anyio/pytest_plugin.py new file mode 100644 index 0000000..4222816 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/pytest_plugin.py @@ -0,0 +1,302 @@ +from __future__ import annotations + +import socket +import sys +from collections.abc import Callable, Generator, Iterator +from contextlib import ExitStack, contextmanager +from inspect import isasyncgenfunction, iscoroutinefunction, ismethod +from typing import Any, cast + +import pytest +from _pytest.fixtures import SubRequest +from _pytest.outcomes import Exit + +from . import get_available_backends +from ._core._eventloop import ( + current_async_library, + get_async_backend, + reset_current_async_library, + set_current_async_library, +) +from ._core._exceptions import iterate_exceptions +from .abc import TestRunner + +if sys.version_info < (3, 11): + from exceptiongroup import ExceptionGroup + +_current_runner: TestRunner | None = None +_runner_stack: ExitStack | None = None +_runner_leases = 0 + + +def extract_backend_and_options(backend: object) -> tuple[str, dict[str, Any]]: + if isinstance(backend, str): + return backend, {} + elif isinstance(backend, tuple) and len(backend) == 2: + if isinstance(backend[0], str) and isinstance(backend[1], dict): + return cast(tuple[str, dict[str, Any]], backend) + + raise TypeError("anyio_backend must be either a string or tuple of (string, dict)") + + +@contextmanager +def get_runner( + backend_name: str, backend_options: dict[str, Any] +) -> Iterator[TestRunner]: + global _current_runner, _runner_leases, _runner_stack + if _current_runner is None: + asynclib = get_async_backend(backend_name) + _runner_stack = ExitStack() + if current_async_library() is None: + # Since we're in control of the event loop, we can cache the name of the + # async library + token = set_current_async_library(backend_name) + _runner_stack.callback(reset_current_async_library, token) + + backend_options = backend_options or {} + _current_runner = _runner_stack.enter_context( + asynclib.create_test_runner(backend_options) + ) + + _runner_leases += 1 + try: + yield _current_runner + finally: + _runner_leases -= 1 + if not _runner_leases: + assert _runner_stack is not None + _runner_stack.close() + _runner_stack = _current_runner = None + + +def pytest_addoption(parser: pytest.Parser) -> None: + parser.addini( + "anyio_mode", + default="strict", + help='AnyIO plugin mode (either "strict" or "auto")', + ) + + +def pytest_configure(config: pytest.Config) -> None: + config.addinivalue_line( + "markers", + "anyio: mark the (coroutine function) test to be run asynchronously via anyio.", + ) + if ( + config.getini("anyio_mode") == "auto" + and config.pluginmanager.has_plugin("asyncio") + and config.getini("asyncio_mode") == "auto" + ): + config.issue_config_time_warning( + pytest.PytestConfigWarning( + "AnyIO auto mode has been enabled together with pytest-asyncio auto " + "mode. This may cause unexpected behavior." + ), + 1, + ) + + +@pytest.hookimpl(hookwrapper=True) +def pytest_fixture_setup(fixturedef: Any, request: Any) -> Generator[Any]: + def wrapper(anyio_backend: Any, request: SubRequest, **kwargs: Any) -> Any: + # Rebind any fixture methods to the request instance + if ( + request.instance + and ismethod(func) + and type(func.__self__) is type(request.instance) + ): + local_func = func.__func__.__get__(request.instance) + else: + local_func = func + + backend_name, backend_options = extract_backend_and_options(anyio_backend) + if has_backend_arg: + kwargs["anyio_backend"] = anyio_backend + + if has_request_arg: + kwargs["request"] = request + + with get_runner(backend_name, backend_options) as runner: + if isasyncgenfunction(local_func): + yield from runner.run_asyncgen_fixture(local_func, kwargs) + else: + yield runner.run_fixture(local_func, kwargs) + + # Only apply this to coroutine functions and async generator functions in requests + # that involve the anyio_backend fixture + func = fixturedef.func + if isasyncgenfunction(func) or iscoroutinefunction(func): + if "anyio_backend" in request.fixturenames: + fixturedef.func = wrapper + original_argname = fixturedef.argnames + + if not (has_backend_arg := "anyio_backend" in fixturedef.argnames): + fixturedef.argnames += ("anyio_backend",) + + if not (has_request_arg := "request" in fixturedef.argnames): + fixturedef.argnames += ("request",) + + try: + return (yield) + finally: + fixturedef.func = func + fixturedef.argnames = original_argname + + return (yield) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pycollect_makeitem( + collector: pytest.Module | pytest.Class, name: str, obj: object +) -> None: + if collector.istestfunction(obj, name): + inner_func = obj.hypothesis.inner_test if hasattr(obj, "hypothesis") else obj + if iscoroutinefunction(inner_func): + anyio_auto_mode = collector.config.getini("anyio_mode") == "auto" + marker = collector.get_closest_marker("anyio") + own_markers = getattr(obj, "pytestmark", ()) + if ( + anyio_auto_mode + or marker + or any(marker.name == "anyio" for marker in own_markers) + ): + pytest.mark.usefixtures("anyio_backend")(obj) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pyfunc_call(pyfuncitem: Any) -> bool | None: + def run_with_hypothesis(**kwargs: Any) -> None: + with get_runner(backend_name, backend_options) as runner: + runner.run_test(original_func, kwargs) + + backend = pyfuncitem.funcargs.get("anyio_backend") + if backend: + backend_name, backend_options = extract_backend_and_options(backend) + + if hasattr(pyfuncitem.obj, "hypothesis"): + # Wrap the inner test function unless it's already wrapped + original_func = pyfuncitem.obj.hypothesis.inner_test + if original_func.__qualname__ != run_with_hypothesis.__qualname__: + if iscoroutinefunction(original_func): + pyfuncitem.obj.hypothesis.inner_test = run_with_hypothesis + + return None + + if iscoroutinefunction(pyfuncitem.obj): + funcargs = pyfuncitem.funcargs + testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} + with get_runner(backend_name, backend_options) as runner: + try: + runner.run_test(pyfuncitem.obj, testargs) + except ExceptionGroup as excgrp: + for exc in iterate_exceptions(excgrp): + if isinstance(exc, (Exit, KeyboardInterrupt, SystemExit)): + raise exc from excgrp + + raise + + return True + + return None + + +@pytest.fixture(scope="module", params=get_available_backends()) +def anyio_backend(request: Any) -> Any: + return request.param + + +@pytest.fixture +def anyio_backend_name(anyio_backend: Any) -> str: + if isinstance(anyio_backend, str): + return anyio_backend + else: + return anyio_backend[0] + + +@pytest.fixture +def anyio_backend_options(anyio_backend: Any) -> dict[str, Any]: + if isinstance(anyio_backend, str): + return {} + else: + return anyio_backend[1] + + +class FreePortFactory: + """ + Manages port generation based on specified socket kind, ensuring no duplicate + ports are generated. + + This class provides functionality for generating available free ports on the + system. It is initialized with a specific socket kind and can generate ports + for given address families while avoiding reuse of previously generated ports. + + Users should not instantiate this class directly, but use the + ``free_tcp_port_factory`` and ``free_udp_port_factory`` fixtures instead. For simple + uses cases, ``free_tcp_port`` and ``free_udp_port`` can be used instead. + """ + + def __init__(self, kind: socket.SocketKind) -> None: + self._kind = kind + self._generated = set[int]() + + @property + def kind(self) -> socket.SocketKind: + """ + The type of socket connection (e.g., :data:`~socket.SOCK_STREAM` or + :data:`~socket.SOCK_DGRAM`) used to bind for checking port availability + + """ + return self._kind + + def __call__(self, family: socket.AddressFamily | None = None) -> int: + """ + Return an unbound port for the given address family. + + :param family: if omitted, both IPv4 and IPv6 addresses will be tried + :return: a port number + + """ + if family is not None: + families = [family] + else: + families = [socket.AF_INET] + if socket.has_ipv6: + families.append(socket.AF_INET6) + + while True: + port = 0 + with ExitStack() as stack: + for family in families: + sock = stack.enter_context(socket.socket(family, self._kind)) + addr = "::1" if family == socket.AF_INET6 else "127.0.0.1" + try: + sock.bind((addr, port)) + except OSError: + break + + if not port: + port = sock.getsockname()[1] + else: + if port not in self._generated: + self._generated.add(port) + return port + + +@pytest.fixture(scope="session") +def free_tcp_port_factory() -> FreePortFactory: + return FreePortFactory(socket.SOCK_STREAM) + + +@pytest.fixture(scope="session") +def free_udp_port_factory() -> FreePortFactory: + return FreePortFactory(socket.SOCK_DGRAM) + + +@pytest.fixture +def free_tcp_port(free_tcp_port_factory: Callable[[], int]) -> int: + return free_tcp_port_factory() + + +@pytest.fixture +def free_udp_port(free_udp_port_factory: Callable[[], int]) -> int: + return free_udp_port_factory() diff --git a/.venv/lib/python3.12/site-packages/anyio/streams/__init__.py b/.venv/lib/python3.12/site-packages/anyio/streams/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/anyio/streams/buffered.py b/.venv/lib/python3.12/site-packages/anyio/streams/buffered.py new file mode 100644 index 0000000..57c7cd7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/streams/buffered.py @@ -0,0 +1,188 @@ +from __future__ import annotations + +__all__ = ( + "BufferedByteReceiveStream", + "BufferedByteStream", + "BufferedConnectable", +) + +import sys +from collections.abc import Callable, Iterable, Mapping +from dataclasses import dataclass, field +from typing import Any, SupportsIndex + +from .. import ClosedResourceError, DelimiterNotFound, EndOfStream, IncompleteRead +from ..abc import ( + AnyByteReceiveStream, + AnyByteStream, + AnyByteStreamConnectable, + ByteReceiveStream, + ByteStream, + ByteStreamConnectable, +) + +if sys.version_info >= (3, 12): + from typing import override +else: + from typing_extensions import override + + +@dataclass(eq=False) +class BufferedByteReceiveStream(ByteReceiveStream): + """ + Wraps any bytes-based receive stream and uses a buffer to provide sophisticated + receiving capabilities in the form of a byte stream. + """ + + receive_stream: AnyByteReceiveStream + _buffer: bytearray = field(init=False, default_factory=bytearray) + _closed: bool = field(init=False, default=False) + + async def aclose(self) -> None: + await self.receive_stream.aclose() + self._closed = True + + @property + def buffer(self) -> bytes: + """The bytes currently in the buffer.""" + return bytes(self._buffer) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.receive_stream.extra_attributes + + def feed_data(self, data: Iterable[SupportsIndex], /) -> None: + """ + Append data directly into the buffer. + + Any data in the buffer will be consumed by receive operations before receiving + anything from the wrapped stream. + + :param data: the data to append to the buffer (can be bytes or anything else + that supports ``__index__()``) + + """ + self._buffer.extend(data) + + async def receive(self, max_bytes: int = 65536) -> bytes: + if self._closed: + raise ClosedResourceError + + if self._buffer: + chunk = bytes(self._buffer[:max_bytes]) + del self._buffer[:max_bytes] + return chunk + elif isinstance(self.receive_stream, ByteReceiveStream): + return await self.receive_stream.receive(max_bytes) + else: + # With a bytes-oriented object stream, we need to handle any surplus bytes + # we get from the receive() call + chunk = await self.receive_stream.receive() + if len(chunk) > max_bytes: + # Save the surplus bytes in the buffer + self._buffer.extend(chunk[max_bytes:]) + return chunk[:max_bytes] + else: + return chunk + + async def receive_exactly(self, nbytes: int) -> bytes: + """ + Read exactly the given amount of bytes from the stream. + + :param nbytes: the number of bytes to read + :return: the bytes read + :raises ~anyio.IncompleteRead: if the stream was closed before the requested + amount of bytes could be read from the stream + + """ + while True: + remaining = nbytes - len(self._buffer) + if remaining <= 0: + retval = self._buffer[:nbytes] + del self._buffer[:nbytes] + return bytes(retval) + + try: + if isinstance(self.receive_stream, ByteReceiveStream): + chunk = await self.receive_stream.receive(remaining) + else: + chunk = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + self._buffer.extend(chunk) + + async def receive_until(self, delimiter: bytes, max_bytes: int) -> bytes: + """ + Read from the stream until the delimiter is found or max_bytes have been read. + + :param delimiter: the marker to look for in the stream + :param max_bytes: maximum number of bytes that will be read before raising + :exc:`~anyio.DelimiterNotFound` + :return: the bytes read (not including the delimiter) + :raises ~anyio.IncompleteRead: if the stream was closed before the delimiter + was found + :raises ~anyio.DelimiterNotFound: if the delimiter is not found within the + bytes read up to the maximum allowed + + """ + delimiter_size = len(delimiter) + offset = 0 + while True: + # Check if the delimiter can be found in the current buffer + index = self._buffer.find(delimiter, offset) + if index >= 0: + found = self._buffer[:index] + del self._buffer[: index + len(delimiter) :] + return bytes(found) + + # Check if the buffer is already at or over the limit + if len(self._buffer) >= max_bytes: + raise DelimiterNotFound(max_bytes) + + # Read more data into the buffer from the socket + try: + data = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + # Move the offset forward and add the new data to the buffer + offset = max(len(self._buffer) - delimiter_size + 1, 0) + self._buffer.extend(data) + + +class BufferedByteStream(BufferedByteReceiveStream, ByteStream): + """ + A full-duplex variant of :class:`BufferedByteReceiveStream`. All writes are passed + through to the wrapped stream as-is. + """ + + def __init__(self, stream: AnyByteStream): + """ + :param stream: the stream to be wrapped + + """ + super().__init__(stream) + self._stream = stream + + @override + async def send_eof(self) -> None: + await self._stream.send_eof() + + @override + async def send(self, item: bytes) -> None: + await self._stream.send(item) + + +class BufferedConnectable(ByteStreamConnectable): + def __init__(self, connectable: AnyByteStreamConnectable): + """ + :param connectable: the connectable to wrap + + """ + self.connectable = connectable + + @override + async def connect(self) -> BufferedByteStream: + stream = await self.connectable.connect() + return BufferedByteStream(stream) diff --git a/.venv/lib/python3.12/site-packages/anyio/streams/file.py b/.venv/lib/python3.12/site-packages/anyio/streams/file.py new file mode 100644 index 0000000..82d2da8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/streams/file.py @@ -0,0 +1,154 @@ +from __future__ import annotations + +__all__ = ( + "FileReadStream", + "FileStreamAttribute", + "FileWriteStream", +) + +from collections.abc import Callable, Mapping +from io import SEEK_SET, UnsupportedOperation +from os import PathLike +from pathlib import Path +from typing import Any, BinaryIO, cast + +from .. import ( + BrokenResourceError, + ClosedResourceError, + EndOfStream, + TypedAttributeSet, + to_thread, + typed_attribute, +) +from ..abc import ByteReceiveStream, ByteSendStream + + +class FileStreamAttribute(TypedAttributeSet): + #: the open file descriptor + file: BinaryIO = typed_attribute() + #: the path of the file on the file system, if available (file must be a real file) + path: Path = typed_attribute() + #: the file number, if available (file must be a real file or a TTY) + fileno: int = typed_attribute() + + +class _BaseFileStream: + def __init__(self, file: BinaryIO): + self._file = file + + async def aclose(self) -> None: + await to_thread.run_sync(self._file.close) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: dict[Any, Callable[[], Any]] = { + FileStreamAttribute.file: lambda: self._file, + } + + if hasattr(self._file, "name"): + attributes[FileStreamAttribute.path] = lambda: Path(self._file.name) + + try: + self._file.fileno() + except UnsupportedOperation: + pass + else: + attributes[FileStreamAttribute.fileno] = lambda: self._file.fileno() + + return attributes + + +class FileReadStream(_BaseFileStream, ByteReceiveStream): + """ + A byte stream that reads from a file in the file system. + + :param file: a file that has been opened for reading in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path(cls, path: str | PathLike[str]) -> FileReadStream: + """ + Create a file read stream by opening the given file. + + :param path: path of the file to read from + + """ + file = await to_thread.run_sync(Path(path).open, "rb") + return cls(cast(BinaryIO, file)) + + async def receive(self, max_bytes: int = 65536) -> bytes: + try: + data = await to_thread.run_sync(self._file.read, max_bytes) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc + + if data: + return data + else: + raise EndOfStream + + async def seek(self, position: int, whence: int = SEEK_SET) -> int: + """ + Seek the file to the given position. + + .. seealso:: :meth:`io.IOBase.seek` + + .. note:: Not all file descriptors are seekable. + + :param position: position to seek the file to + :param whence: controls how ``position`` is interpreted + :return: the new absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.seek, position, whence) + + async def tell(self) -> int: + """ + Return the current stream position. + + .. note:: Not all file descriptors are seekable. + + :return: the current absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.tell) + + +class FileWriteStream(_BaseFileStream, ByteSendStream): + """ + A byte stream that writes to a file in the file system. + + :param file: a file that has been opened for writing in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path( + cls, path: str | PathLike[str], append: bool = False + ) -> FileWriteStream: + """ + Create a file write stream by opening the given file for writing. + + :param path: path of the file to write to + :param append: if ``True``, open the file for appending; if ``False``, any + existing file at the given path will be truncated + + """ + mode = "ab" if append else "wb" + file = await to_thread.run_sync(Path(path).open, mode) + return cls(cast(BinaryIO, file)) + + async def send(self, item: bytes) -> None: + try: + await to_thread.run_sync(self._file.write, item) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc diff --git a/.venv/lib/python3.12/site-packages/anyio/streams/memory.py b/.venv/lib/python3.12/site-packages/anyio/streams/memory.py new file mode 100644 index 0000000..a3fa0c3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/streams/memory.py @@ -0,0 +1,325 @@ +from __future__ import annotations + +__all__ = ( + "MemoryObjectReceiveStream", + "MemoryObjectSendStream", + "MemoryObjectStreamStatistics", +) + +import warnings +from collections import OrderedDict, deque +from dataclasses import dataclass, field +from types import TracebackType +from typing import Generic, NamedTuple, TypeVar + +from .. import ( + BrokenResourceError, + ClosedResourceError, + EndOfStream, + WouldBlock, +) +from .._core._testing import TaskInfo, get_current_task +from ..abc import Event, ObjectReceiveStream, ObjectSendStream +from ..lowlevel import checkpoint + +T_Item = TypeVar("T_Item") +T_co = TypeVar("T_co", covariant=True) +T_contra = TypeVar("T_contra", contravariant=True) + + +class MemoryObjectStreamStatistics(NamedTuple): + current_buffer_used: int #: number of items stored in the buffer + #: maximum number of items that can be stored on this stream (or :data:`math.inf`) + max_buffer_size: float + open_send_streams: int #: number of unclosed clones of the send stream + open_receive_streams: int #: number of unclosed clones of the receive stream + #: number of tasks blocked on :meth:`MemoryObjectSendStream.send` + tasks_waiting_send: int + #: number of tasks blocked on :meth:`MemoryObjectReceiveStream.receive` + tasks_waiting_receive: int + + +@dataclass(eq=False) +class _MemoryObjectItemReceiver(Generic[T_Item]): + task_info: TaskInfo = field(init=False, default_factory=get_current_task) + item: T_Item = field(init=False) + + def __repr__(self) -> str: + # When item is not defined, we get following error with default __repr__: + # AttributeError: 'MemoryObjectItemReceiver' object has no attribute 'item' + item = getattr(self, "item", None) + return f"{self.__class__.__name__}(task_info={self.task_info}, item={item!r})" + + +@dataclass(eq=False) +class _MemoryObjectStreamState(Generic[T_Item]): + max_buffer_size: float = field() + buffer: deque[T_Item] = field(init=False, default_factory=deque) + open_send_channels: int = field(init=False, default=0) + open_receive_channels: int = field(init=False, default=0) + waiting_receivers: OrderedDict[Event, _MemoryObjectItemReceiver[T_Item]] = field( + init=False, default_factory=OrderedDict + ) + waiting_senders: OrderedDict[Event, T_Item] = field( + init=False, default_factory=OrderedDict + ) + + def statistics(self) -> MemoryObjectStreamStatistics: + return MemoryObjectStreamStatistics( + len(self.buffer), + self.max_buffer_size, + self.open_send_channels, + self.open_receive_channels, + len(self.waiting_senders), + len(self.waiting_receivers), + ) + + +@dataclass(eq=False) +class MemoryObjectReceiveStream(Generic[T_co], ObjectReceiveStream[T_co]): + _state: _MemoryObjectStreamState[T_co] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_receive_channels += 1 + + def receive_nowait(self) -> T_co: + """ + Receive the next item if it can be done without waiting. + + :return: the received item + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.EndOfStream: if the buffer is empty and this stream has been + closed from the sending end + :raises ~anyio.WouldBlock: if there are no items in the buffer and no tasks + waiting to send + + """ + if self._closed: + raise ClosedResourceError + + if self._state.waiting_senders: + # Get the item from the next sender + send_event, item = self._state.waiting_senders.popitem(last=False) + self._state.buffer.append(item) + send_event.set() + + if self._state.buffer: + return self._state.buffer.popleft() + elif not self._state.open_send_channels: + raise EndOfStream + + raise WouldBlock + + async def receive(self) -> T_co: + await checkpoint() + try: + return self.receive_nowait() + except WouldBlock: + # Add ourselves in the queue + receive_event = Event() + receiver = _MemoryObjectItemReceiver[T_co]() + self._state.waiting_receivers[receive_event] = receiver + + try: + await receive_event.wait() + finally: + self._state.waiting_receivers.pop(receive_event, None) + + try: + return receiver.item + except AttributeError: + raise EndOfStream from None + + def clone(self) -> MemoryObjectReceiveStream[T_co]: + """ + Create a clone of this receive stream. + + Each clone can be closed separately. Only when all clones have been closed will + the receiving end of the memory stream be considered closed by the sending ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectReceiveStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special + case for the benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_receive_channels -= 1 + if self._state.open_receive_channels == 0: + send_events = list(self._state.waiting_senders.keys()) + for event in send_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> MemoryObjectReceiveStream[T_co]: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def __del__(self) -> None: + if not self._closed: + warnings.warn( + f"Unclosed <{self.__class__.__name__} at {id(self):x}>", + ResourceWarning, + stacklevel=1, + source=self, + ) + + +@dataclass(eq=False) +class MemoryObjectSendStream(Generic[T_contra], ObjectSendStream[T_contra]): + _state: _MemoryObjectStreamState[T_contra] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_send_channels += 1 + + def send_nowait(self, item: T_contra) -> None: + """ + Send an item immediately if it can be done without waiting. + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.BrokenResourceError: if the stream has been closed from the + receiving end + :raises ~anyio.WouldBlock: if the buffer is full and there are no tasks waiting + to receive + + """ + if self._closed: + raise ClosedResourceError + if not self._state.open_receive_channels: + raise BrokenResourceError + + while self._state.waiting_receivers: + receive_event, receiver = self._state.waiting_receivers.popitem(last=False) + if not receiver.task_info.has_pending_cancellation(): + receiver.item = item + receive_event.set() + return + + if len(self._state.buffer) < self._state.max_buffer_size: + self._state.buffer.append(item) + else: + raise WouldBlock + + async def send(self, item: T_contra) -> None: + """ + Send an item to the stream. + + If the buffer is full, this method blocks until there is again room in the + buffer or the item can be sent directly to a receiver. + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.BrokenResourceError: if the stream has been closed from the + receiving end + + """ + await checkpoint() + try: + self.send_nowait(item) + except WouldBlock: + # Wait until there's someone on the receiving end + send_event = Event() + self._state.waiting_senders[send_event] = item + try: + await send_event.wait() + except BaseException: + self._state.waiting_senders.pop(send_event, None) + raise + + if send_event in self._state.waiting_senders: + del self._state.waiting_senders[send_event] + raise BrokenResourceError from None + + def clone(self) -> MemoryObjectSendStream[T_contra]: + """ + Create a clone of this send stream. + + Each clone can be closed separately. Only when all clones have been closed will + the sending end of the memory stream be considered closed by the receiving ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectSendStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special + case for the benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_send_channels -= 1 + if self._state.open_send_channels == 0: + receive_events = list(self._state.waiting_receivers.keys()) + self._state.waiting_receivers.clear() + for event in receive_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> MemoryObjectSendStream[T_contra]: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def __del__(self) -> None: + if not self._closed: + warnings.warn( + f"Unclosed <{self.__class__.__name__} at {id(self):x}>", + ResourceWarning, + stacklevel=1, + source=self, + ) diff --git a/.venv/lib/python3.12/site-packages/anyio/streams/stapled.py b/.venv/lib/python3.12/site-packages/anyio/streams/stapled.py new file mode 100644 index 0000000..9248b68 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/streams/stapled.py @@ -0,0 +1,147 @@ +from __future__ import annotations + +__all__ = ( + "MultiListener", + "StapledByteStream", + "StapledObjectStream", +) + +from collections.abc import Callable, Mapping, Sequence +from dataclasses import dataclass +from typing import Any, Generic, TypeVar + +from ..abc import ( + ByteReceiveStream, + ByteSendStream, + ByteStream, + Listener, + ObjectReceiveStream, + ObjectSendStream, + ObjectStream, + TaskGroup, +) + +T_Item = TypeVar("T_Item") +T_Stream = TypeVar("T_Stream") + + +@dataclass(eq=False) +class StapledByteStream(ByteStream): + """ + Combines two byte streams into a single, bidirectional byte stream. + + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. + + :param ByteSendStream send_stream: the sending byte stream + :param ByteReceiveStream receive_stream: the receiving byte stream + """ + + send_stream: ByteSendStream + receive_stream: ByteReceiveStream + + async def receive(self, max_bytes: int = 65536) -> bytes: + return await self.receive_stream.receive(max_bytes) + + async def send(self, item: bytes) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.send_stream.extra_attributes, + **self.receive_stream.extra_attributes, + } + + +@dataclass(eq=False) +class StapledObjectStream(Generic[T_Item], ObjectStream[T_Item]): + """ + Combines two object streams into a single, bidirectional object stream. + + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. + + :param ObjectSendStream send_stream: the sending object stream + :param ObjectReceiveStream receive_stream: the receiving object stream + """ + + send_stream: ObjectSendStream[T_Item] + receive_stream: ObjectReceiveStream[T_Item] + + async def receive(self) -> T_Item: + return await self.receive_stream.receive() + + async def send(self, item: T_Item) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.send_stream.extra_attributes, + **self.receive_stream.extra_attributes, + } + + +@dataclass(eq=False) +class MultiListener(Generic[T_Stream], Listener[T_Stream]): + """ + Combines multiple listeners into one, serving connections from all of them at once. + + Any MultiListeners in the given collection of listeners will have their listeners + moved into this one. + + Extra attributes are provided from each listener, with each successive listener + overriding any conflicting attributes from the previous one. + + :param listeners: listeners to serve + :type listeners: Sequence[Listener[T_Stream]] + """ + + listeners: Sequence[Listener[T_Stream]] + + def __post_init__(self) -> None: + listeners: list[Listener[T_Stream]] = [] + for listener in self.listeners: + if isinstance(listener, MultiListener): + listeners.extend(listener.listeners) + del listener.listeners[:] # type: ignore[attr-defined] + else: + listeners.append(listener) + + self.listeners = listeners + + async def serve( + self, handler: Callable[[T_Stream], Any], task_group: TaskGroup | None = None + ) -> None: + from .. import create_task_group + + async with create_task_group() as tg: + for listener in self.listeners: + tg.start_soon(listener.serve, handler, task_group) + + async def aclose(self) -> None: + for listener in self.listeners: + await listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: dict = {} + for listener in self.listeners: + attributes.update(listener.extra_attributes) + + return attributes diff --git a/.venv/lib/python3.12/site-packages/anyio/streams/text.py b/.venv/lib/python3.12/site-packages/anyio/streams/text.py new file mode 100644 index 0000000..296cd25 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/streams/text.py @@ -0,0 +1,176 @@ +from __future__ import annotations + +__all__ = ( + "TextConnectable", + "TextReceiveStream", + "TextSendStream", + "TextStream", +) + +import codecs +import sys +from collections.abc import Callable, Mapping +from dataclasses import InitVar, dataclass, field +from typing import Any + +from ..abc import ( + AnyByteReceiveStream, + AnyByteSendStream, + AnyByteStream, + AnyByteStreamConnectable, + ObjectReceiveStream, + ObjectSendStream, + ObjectStream, + ObjectStreamConnectable, +) + +if sys.version_info >= (3, 12): + from typing import override +else: + from typing_extensions import override + + +@dataclass(eq=False) +class TextReceiveStream(ObjectReceiveStream[str]): + """ + Stream wrapper that decodes bytes to strings using the given encoding. + + Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any + completely received unicode characters as soon as they come in. + + :param transport_stream: any bytes-based receive stream + :param encoding: character encoding to use for decoding bytes to strings (defaults + to ``utf-8``) + :param errors: handling scheme for decoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteReceiveStream + encoding: InitVar[str] = "utf-8" + errors: InitVar[str] = "strict" + _decoder: codecs.IncrementalDecoder = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + decoder_class = codecs.getincrementaldecoder(encoding) + self._decoder = decoder_class(errors=errors) + + async def receive(self) -> str: + while True: + chunk = await self.transport_stream.receive() + decoded = self._decoder.decode(chunk) + if decoded: + return decoded + + async def aclose(self) -> None: + await self.transport_stream.aclose() + self._decoder.reset() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextSendStream(ObjectSendStream[str]): + """ + Sends strings to the wrapped stream as bytes using the given encoding. + + :param AnyByteSendStream transport_stream: any bytes-based send stream + :param str encoding: character encoding to use for encoding strings to bytes + (defaults to ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see + the `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteSendStream + encoding: InitVar[str] = "utf-8" + errors: str = "strict" + _encoder: Callable[..., tuple[bytes, int]] = field(init=False) + + def __post_init__(self, encoding: str) -> None: + self._encoder = codecs.getencoder(encoding) + + async def send(self, item: str) -> None: + encoded = self._encoder(item, self.errors)[0] + await self.transport_stream.send(encoded) + + async def aclose(self) -> None: + await self.transport_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextStream(ObjectStream[str]): + """ + A bidirectional stream that decodes bytes to strings on receive and encodes strings + to bytes on send. + + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. + + :param AnyByteStream transport_stream: any bytes-based stream + :param str encoding: character encoding to use for encoding/decoding strings to/from + bytes (defaults to ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see + the `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteStream + encoding: InitVar[str] = "utf-8" + errors: InitVar[str] = "strict" + _receive_stream: TextReceiveStream = field(init=False) + _send_stream: TextSendStream = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + self._receive_stream = TextReceiveStream( + self.transport_stream, encoding=encoding, errors=errors + ) + self._send_stream = TextSendStream( + self.transport_stream, encoding=encoding, errors=errors + ) + + async def receive(self) -> str: + return await self._receive_stream.receive() + + async def send(self, item: str) -> None: + await self._send_stream.send(item) + + async def send_eof(self) -> None: + await self.transport_stream.send_eof() + + async def aclose(self) -> None: + await self._send_stream.aclose() + await self._receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self._send_stream.extra_attributes, + **self._receive_stream.extra_attributes, + } + + +class TextConnectable(ObjectStreamConnectable[str]): + def __init__(self, connectable: AnyByteStreamConnectable): + """ + :param connectable: the bytestream endpoint to wrap + + """ + self.connectable = connectable + + @override + async def connect(self) -> TextStream: + stream = await self.connectable.connect() + return TextStream(stream) diff --git a/.venv/lib/python3.12/site-packages/anyio/streams/tls.py b/.venv/lib/python3.12/site-packages/anyio/streams/tls.py new file mode 100644 index 0000000..b507488 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/streams/tls.py @@ -0,0 +1,424 @@ +from __future__ import annotations + +__all__ = ( + "TLSAttribute", + "TLSConnectable", + "TLSListener", + "TLSStream", +) + +import logging +import re +import ssl +import sys +from collections.abc import Callable, Mapping +from dataclasses import dataclass +from functools import wraps +from ssl import SSLContext +from typing import Any, TypeVar + +from .. import ( + BrokenResourceError, + EndOfStream, + aclose_forcefully, + get_cancelled_exc_class, + to_thread, +) +from .._core._typedattr import TypedAttributeSet, typed_attribute +from ..abc import ( + AnyByteStream, + AnyByteStreamConnectable, + ByteStream, + ByteStreamConnectable, + Listener, + TaskGroup, +) + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if sys.version_info >= (3, 12): + from typing import override +else: + from typing_extensions import override + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") +_PCTRTT: TypeAlias = tuple[tuple[str, str], ...] +_PCTRTTT: TypeAlias = tuple[_PCTRTT, ...] + + +class TLSAttribute(TypedAttributeSet): + """Contains Transport Layer Security related attributes.""" + + #: the selected ALPN protocol + alpn_protocol: str | None = typed_attribute() + #: the channel binding for type ``tls-unique`` + channel_binding_tls_unique: bytes = typed_attribute() + #: the selected cipher + cipher: tuple[str, str, int] = typed_attribute() + #: the peer certificate in dictionary form (see :meth:`ssl.SSLSocket.getpeercert` + # for more information) + peer_certificate: None | (dict[str, str | _PCTRTTT | _PCTRTT]) = typed_attribute() + #: the peer certificate in binary form + peer_certificate_binary: bytes | None = typed_attribute() + #: ``True`` if this is the server side of the connection + server_side: bool = typed_attribute() + #: ciphers shared by the client during the TLS handshake (``None`` if this is the + #: client side) + shared_ciphers: list[tuple[str, str, int]] | None = typed_attribute() + #: the :class:`~ssl.SSLObject` used for encryption + ssl_object: ssl.SSLObject = typed_attribute() + #: ``True`` if this stream does (and expects) a closing TLS handshake when the + #: stream is being closed + standard_compatible: bool = typed_attribute() + #: the TLS protocol version (e.g. ``TLSv1.2``) + tls_version: str = typed_attribute() + + +@dataclass(eq=False) +class TLSStream(ByteStream): + """ + A stream wrapper that encrypts all sent data and decrypts received data. + + This class has no public initializer; use :meth:`wrap` instead. + All extra attributes from :class:`~TLSAttribute` are supported. + + :var AnyByteStream transport_stream: the wrapped stream + + """ + + transport_stream: AnyByteStream + standard_compatible: bool + _ssl_object: ssl.SSLObject + _read_bio: ssl.MemoryBIO + _write_bio: ssl.MemoryBIO + + @classmethod + async def wrap( + cls, + transport_stream: AnyByteStream, + *, + server_side: bool | None = None, + hostname: str | None = None, + ssl_context: ssl.SSLContext | None = None, + standard_compatible: bool = True, + ) -> TLSStream: + """ + Wrap an existing stream with Transport Layer Security. + + This performs a TLS handshake with the peer. + + :param transport_stream: a bytes-transporting stream to wrap + :param server_side: ``True`` if this is the server side of the connection, + ``False`` if this is the client side (if omitted, will be set to ``False`` + if ``hostname`` has been provided, ``False`` otherwise). Used only to create + a default context when an explicit context has not been provided. + :param hostname: host name of the peer (if host name checking is desired) + :param ssl_context: the SSLContext object to use (if not provided, a secure + default will be created) + :param standard_compatible: if ``False``, skip the closing handshake when + closing the connection, and don't raise an exception if the peer does the + same + :raises ~ssl.SSLError: if the TLS handshake fails + + """ + if server_side is None: + server_side = not hostname + + if not ssl_context: + purpose = ( + ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH + ) + ssl_context = ssl.create_default_context(purpose) + + # Re-enable detection of unexpected EOFs if it was disabled by Python + if hasattr(ssl, "OP_IGNORE_UNEXPECTED_EOF"): + ssl_context.options &= ~ssl.OP_IGNORE_UNEXPECTED_EOF + + bio_in = ssl.MemoryBIO() + bio_out = ssl.MemoryBIO() + + # External SSLContext implementations may do blocking I/O in wrap_bio(), + # but the standard library implementation won't + if type(ssl_context) is ssl.SSLContext: + ssl_object = ssl_context.wrap_bio( + bio_in, bio_out, server_side=server_side, server_hostname=hostname + ) + else: + ssl_object = await to_thread.run_sync( + ssl_context.wrap_bio, + bio_in, + bio_out, + server_side, + hostname, + None, + ) + + wrapper = cls( + transport_stream=transport_stream, + standard_compatible=standard_compatible, + _ssl_object=ssl_object, + _read_bio=bio_in, + _write_bio=bio_out, + ) + await wrapper._call_sslobject_method(ssl_object.do_handshake) + return wrapper + + async def _call_sslobject_method( + self, func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] + ) -> T_Retval: + while True: + try: + result = func(*args) + except ssl.SSLWantReadError: + try: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + data = await self.transport_stream.receive() + except EndOfStream: + self._read_bio.write_eof() + except OSError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + else: + self._read_bio.write(data) + except ssl.SSLWantWriteError: + await self.transport_stream.send(self._write_bio.read()) + except ssl.SSLSyscallError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + except ssl.SSLError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + if isinstance(exc, ssl.SSLEOFError) or ( + exc.strerror and "UNEXPECTED_EOF_WHILE_READING" in exc.strerror + ): + if self.standard_compatible: + raise BrokenResourceError from exc + else: + raise EndOfStream from None + + raise + else: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + return result + + async def unwrap(self) -> tuple[AnyByteStream, bytes]: + """ + Does the TLS closing handshake. + + :return: a tuple of (wrapped byte stream, bytes left in the read buffer) + + """ + await self._call_sslobject_method(self._ssl_object.unwrap) + self._read_bio.write_eof() + self._write_bio.write_eof() + return self.transport_stream, self._read_bio.read() + + async def aclose(self) -> None: + if self.standard_compatible: + try: + await self.unwrap() + except BaseException: + await aclose_forcefully(self.transport_stream) + raise + + await self.transport_stream.aclose() + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._call_sslobject_method(self._ssl_object.read, max_bytes) + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + await self._call_sslobject_method(self._ssl_object.write, item) + + async def send_eof(self) -> None: + tls_version = self.extra(TLSAttribute.tls_version) + match = re.match(r"TLSv(\d+)(?:\.(\d+))?", tls_version) + if match: + major, minor = int(match.group(1)), int(match.group(2) or 0) + if (major, minor) < (1, 3): + raise NotImplementedError( + f"send_eof() requires at least TLSv1.3; current " + f"session uses {tls_version}" + ) + + raise NotImplementedError( + "send_eof() has not yet been implemented for TLS streams" + ) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.transport_stream.extra_attributes, + TLSAttribute.alpn_protocol: self._ssl_object.selected_alpn_protocol, + TLSAttribute.channel_binding_tls_unique: ( + self._ssl_object.get_channel_binding + ), + TLSAttribute.cipher: self._ssl_object.cipher, + TLSAttribute.peer_certificate: lambda: self._ssl_object.getpeercert(False), + TLSAttribute.peer_certificate_binary: lambda: self._ssl_object.getpeercert( + True + ), + TLSAttribute.server_side: lambda: self._ssl_object.server_side, + TLSAttribute.shared_ciphers: lambda: self._ssl_object.shared_ciphers() + if self._ssl_object.server_side + else None, + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + TLSAttribute.ssl_object: lambda: self._ssl_object, + TLSAttribute.tls_version: self._ssl_object.version, + } + + +@dataclass(eq=False) +class TLSListener(Listener[TLSStream]): + """ + A convenience listener that wraps another listener and auto-negotiates a TLS session + on every accepted connection. + + If the TLS handshake times out or raises an exception, + :meth:`handle_handshake_error` is called to do whatever post-mortem processing is + deemed necessary. + + Supports only the :attr:`~TLSAttribute.standard_compatible` extra attribute. + + :param Listener listener: the listener to wrap + :param ssl_context: the SSL context object + :param standard_compatible: a flag passed through to :meth:`TLSStream.wrap` + :param handshake_timeout: time limit for the TLS handshake + (passed to :func:`~anyio.fail_after`) + """ + + listener: Listener[Any] + ssl_context: ssl.SSLContext + standard_compatible: bool = True + handshake_timeout: float = 30 + + @staticmethod + async def handle_handshake_error(exc: BaseException, stream: AnyByteStream) -> None: + """ + Handle an exception raised during the TLS handshake. + + This method does 3 things: + + #. Forcefully closes the original stream + #. Logs the exception (unless it was a cancellation exception) using the + ``anyio.streams.tls`` logger + #. Reraises the exception if it was a base exception or a cancellation exception + + :param exc: the exception + :param stream: the original stream + + """ + await aclose_forcefully(stream) + + # Log all except cancellation exceptions + if not isinstance(exc, get_cancelled_exc_class()): + # CPython (as of 3.11.5) returns incorrect `sys.exc_info()` here when using + # any asyncio implementation, so we explicitly pass the exception to log + # (https://github.com/python/cpython/issues/108668). Trio does not have this + # issue because it works around the CPython bug. + logging.getLogger(__name__).exception( + "Error during TLS handshake", exc_info=exc + ) + + # Only reraise base exceptions and cancellation exceptions + if not isinstance(exc, Exception) or isinstance(exc, get_cancelled_exc_class()): + raise + + async def serve( + self, + handler: Callable[[TLSStream], Any], + task_group: TaskGroup | None = None, + ) -> None: + @wraps(handler) + async def handler_wrapper(stream: AnyByteStream) -> None: + from .. import fail_after + + try: + with fail_after(self.handshake_timeout): + wrapped_stream = await TLSStream.wrap( + stream, + ssl_context=self.ssl_context, + standard_compatible=self.standard_compatible, + ) + except BaseException as exc: + await self.handle_handshake_error(exc, stream) + else: + await handler(wrapped_stream) + + await self.listener.serve(handler_wrapper, task_group) + + async def aclose(self) -> None: + await self.listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + } + + +class TLSConnectable(ByteStreamConnectable): + """ + Wraps another connectable and does TLS negotiation after a successful connection. + + :param connectable: the connectable to wrap + :param hostname: host name of the server (if host name checking is desired) + :param ssl_context: the SSLContext object to use (if not provided, a secure default + will be created) + :param standard_compatible: if ``False``, skip the closing handshake when closing + the connection, and don't raise an exception if the server does the same + """ + + def __init__( + self, + connectable: AnyByteStreamConnectable, + *, + hostname: str | None = None, + ssl_context: ssl.SSLContext | None = None, + standard_compatible: bool = True, + ) -> None: + self.connectable = connectable + self.ssl_context: SSLContext = ssl_context or ssl.create_default_context( + ssl.Purpose.SERVER_AUTH + ) + if not isinstance(self.ssl_context, ssl.SSLContext): + raise TypeError( + "ssl_context must be an instance of ssl.SSLContext, not " + f"{type(self.ssl_context).__name__}" + ) + self.hostname = hostname + self.standard_compatible = standard_compatible + + @override + async def connect(self) -> TLSStream: + stream = await self.connectable.connect() + try: + return await TLSStream.wrap( + stream, + hostname=self.hostname, + ssl_context=self.ssl_context, + standard_compatible=self.standard_compatible, + ) + except BaseException: + await aclose_forcefully(stream) + raise diff --git a/.venv/lib/python3.12/site-packages/anyio/to_interpreter.py b/.venv/lib/python3.12/site-packages/anyio/to_interpreter.py new file mode 100644 index 0000000..694dbe7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/to_interpreter.py @@ -0,0 +1,246 @@ +from __future__ import annotations + +__all__ = ( + "run_sync", + "current_default_interpreter_limiter", +) + +import atexit +import os +import sys +from collections import deque +from collections.abc import Callable +from typing import Any, Final, TypeVar + +from . import current_time, to_thread +from ._core._exceptions import BrokenWorkerInterpreter +from ._core._synchronization import CapacityLimiter +from .lowlevel import RunVar + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if sys.version_info >= (3, 14): + from concurrent.interpreters import ExecutionFailed, create + + def _interp_call( + func: Callable[..., Any], args: tuple[Any, ...] + ) -> tuple[Any, bool]: + try: + retval = func(*args) + except BaseException as exc: + return exc, True + else: + return retval, False + + class _Worker: + last_used: float = 0 + + def __init__(self) -> None: + self._interpreter = create() + + def destroy(self) -> None: + self._interpreter.close() + + def call( + self, + func: Callable[..., T_Retval], + args: tuple[Any, ...], + ) -> T_Retval: + try: + res, is_exception = self._interpreter.call(_interp_call, func, args) + except ExecutionFailed as exc: + raise BrokenWorkerInterpreter(exc.excinfo) from exc + + if is_exception: + raise res + + return res +elif sys.version_info >= (3, 13): + import _interpqueues + import _interpreters + + UNBOUND: Final = 2 # I have no clue how this works, but it was used in the stdlib + FMT_UNPICKLED: Final = 0 + FMT_PICKLED: Final = 1 + QUEUE_PICKLE_ARGS: Final = (FMT_PICKLED, UNBOUND) + QUEUE_UNPICKLE_ARGS: Final = (FMT_UNPICKLED, UNBOUND) + + _run_func = compile( + """ +import _interpqueues +from _interpreters import NotShareableError +from pickle import loads, dumps, HIGHEST_PROTOCOL + +QUEUE_PICKLE_ARGS = (1, 2) +QUEUE_UNPICKLE_ARGS = (0, 2) + +item = _interpqueues.get(queue_id)[0] +try: + func, args = loads(item) + retval = func(*args) +except BaseException as exc: + is_exception = True + retval = exc +else: + is_exception = False + +try: + _interpqueues.put(queue_id, (retval, is_exception), *QUEUE_UNPICKLE_ARGS) +except NotShareableError: + retval = dumps(retval, HIGHEST_PROTOCOL) + _interpqueues.put(queue_id, (retval, is_exception), *QUEUE_PICKLE_ARGS) + """, + "", + "exec", + ) + + class _Worker: + last_used: float = 0 + + def __init__(self) -> None: + self._interpreter_id = _interpreters.create() + self._queue_id = _interpqueues.create(1, *QUEUE_UNPICKLE_ARGS) + _interpreters.set___main___attrs( + self._interpreter_id, {"queue_id": self._queue_id} + ) + + def destroy(self) -> None: + _interpqueues.destroy(self._queue_id) + _interpreters.destroy(self._interpreter_id) + + def call( + self, + func: Callable[..., T_Retval], + args: tuple[Any, ...], + ) -> T_Retval: + import pickle + + item = pickle.dumps((func, args), pickle.HIGHEST_PROTOCOL) + _interpqueues.put(self._queue_id, item, *QUEUE_PICKLE_ARGS) + exc_info = _interpreters.exec(self._interpreter_id, _run_func) + if exc_info: + raise BrokenWorkerInterpreter(exc_info) + + res = _interpqueues.get(self._queue_id) + (res, is_exception), fmt = res[:2] + if fmt == FMT_PICKLED: + res = pickle.loads(res) + + if is_exception: + raise res + + return res +else: + + class _Worker: + last_used: float = 0 + + def __init__(self) -> None: + raise RuntimeError("subinterpreters require at least Python 3.13") + + def call( + self, + func: Callable[..., T_Retval], + args: tuple[Any, ...], + ) -> T_Retval: + raise NotImplementedError + + def destroy(self) -> None: + pass + + +DEFAULT_CPU_COUNT: Final = 8 # this is just an arbitrarily selected value +MAX_WORKER_IDLE_TIME = ( + 30 # seconds a subinterpreter can be idle before becoming eligible for pruning +) + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + +_idle_workers = RunVar[deque[_Worker]]("_available_workers") +_default_interpreter_limiter = RunVar[CapacityLimiter]("_default_interpreter_limiter") + + +def _stop_workers(workers: deque[_Worker]) -> None: + for worker in workers: + worker.destroy() + + workers.clear() + + +async def run_sync( + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + limiter: CapacityLimiter | None = None, +) -> T_Retval: + """ + Call the given function with the given arguments in a subinterpreter. + + .. warning:: On Python 3.13, the :mod:`concurrent.interpreters` module was not yet + available, so the code path for that Python version relies on an undocumented, + private API. As such, it is recommended to not rely on this function for anything + mission-critical on Python 3.13. + + :param func: a callable + :param args: the positional arguments for the callable + :param limiter: capacity limiter to use to limit the total number of subinterpreters + running (if omitted, the default limiter is used) + :return: the result of the call + :raises BrokenWorkerInterpreter: if there's an internal error in a subinterpreter + + """ + if limiter is None: + limiter = current_default_interpreter_limiter() + + try: + idle_workers = _idle_workers.get() + except LookupError: + idle_workers = deque() + _idle_workers.set(idle_workers) + atexit.register(_stop_workers, idle_workers) + + async with limiter: + try: + worker = idle_workers.pop() + except IndexError: + worker = _Worker() + + try: + return await to_thread.run_sync( + worker.call, + func, + args, + limiter=limiter, + ) + finally: + # Prune workers that have been idle for too long + now = current_time() + while idle_workers: + if now - idle_workers[0].last_used <= MAX_WORKER_IDLE_TIME: + break + + await to_thread.run_sync(idle_workers.popleft().destroy, limiter=limiter) + + worker.last_used = current_time() + idle_workers.append(worker) + + +def current_default_interpreter_limiter() -> CapacityLimiter: + """ + Return the capacity limiter used by default to limit the number of concurrently + running subinterpreters. + + Defaults to the number of CPU cores. + + :return: a capacity limiter object + + """ + try: + return _default_interpreter_limiter.get() + except LookupError: + limiter = CapacityLimiter(os.cpu_count() or DEFAULT_CPU_COUNT) + _default_interpreter_limiter.set(limiter) + return limiter diff --git a/.venv/lib/python3.12/site-packages/anyio/to_process.py b/.venv/lib/python3.12/site-packages/anyio/to_process.py new file mode 100644 index 0000000..b289234 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/to_process.py @@ -0,0 +1,266 @@ +from __future__ import annotations + +__all__ = ( + "current_default_process_limiter", + "process_worker", + "run_sync", +) + +import os +import pickle +import subprocess +import sys +from collections import deque +from collections.abc import Callable +from importlib.util import module_from_spec, spec_from_file_location +from typing import TypeVar, cast + +from ._core._eventloop import current_time, get_async_backend, get_cancelled_exc_class +from ._core._exceptions import BrokenWorkerProcess +from ._core._subprocesses import open_process +from ._core._synchronization import CapacityLimiter +from ._core._tasks import CancelScope, fail_after +from .abc import ByteReceiveStream, ByteSendStream, Process +from .lowlevel import RunVar, checkpoint_if_cancelled +from .streams.buffered import BufferedByteReceiveStream + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +WORKER_MAX_IDLE_TIME = 300 # 5 minutes + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + +_process_pool_workers: RunVar[set[Process]] = RunVar("_process_pool_workers") +_process_pool_idle_workers: RunVar[deque[tuple[Process, float]]] = RunVar( + "_process_pool_idle_workers" +) +_default_process_limiter: RunVar[CapacityLimiter] = RunVar("_default_process_limiter") + + +async def run_sync( # type: ignore[return] + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + cancellable: bool = False, + limiter: CapacityLimiter | None = None, +) -> T_Retval: + """ + Call the given function with the given arguments in a worker process. + + If the ``cancellable`` option is enabled and the task waiting for its completion is + cancelled, the worker process running it will be abruptly terminated using SIGKILL + (or ``terminateProcess()`` on Windows). + + :param func: a callable + :param args: positional arguments for the callable + :param cancellable: ``True`` to allow cancellation of the operation while it's + running + :param limiter: capacity limiter to use to limit the total amount of processes + running (if omitted, the default limiter is used) + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + :return: an awaitable that yields the return value of the function. + + """ + + async def send_raw_command(pickled_cmd: bytes) -> object: + try: + await stdin.send(pickled_cmd) + response = await buffered.receive_until(b"\n", 50) + status, length = response.split(b" ") + if status not in (b"RETURN", b"EXCEPTION"): + raise RuntimeError( + f"Worker process returned unexpected response: {response!r}" + ) + + pickled_response = await buffered.receive_exactly(int(length)) + except BaseException as exc: + workers.discard(process) + try: + process.kill() + with CancelScope(shield=True): + await process.aclose() + except ProcessLookupError: + pass + + if isinstance(exc, get_cancelled_exc_class()): + raise + else: + raise BrokenWorkerProcess from exc + + retval = pickle.loads(pickled_response) + if status == b"EXCEPTION": + assert isinstance(retval, BaseException) + raise retval + else: + return retval + + # First pickle the request before trying to reserve a worker process + await checkpoint_if_cancelled() + request = pickle.dumps(("run", func, args), protocol=pickle.HIGHEST_PROTOCOL) + + # If this is the first run in this event loop thread, set up the necessary variables + try: + workers = _process_pool_workers.get() + idle_workers = _process_pool_idle_workers.get() + except LookupError: + workers = set() + idle_workers = deque() + _process_pool_workers.set(workers) + _process_pool_idle_workers.set(idle_workers) + get_async_backend().setup_process_pool_exit_at_shutdown(workers) + + async with limiter or current_default_process_limiter(): + # Pop processes from the pool (starting from the most recently used) until we + # find one that hasn't exited yet + process: Process + while idle_workers: + process, idle_since = idle_workers.pop() + if process.returncode is None: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream( + cast(ByteReceiveStream, process.stdout) + ) + + # Prune any other workers that have been idle for WORKER_MAX_IDLE_TIME + # seconds or longer + now = current_time() + killed_processes: list[Process] = [] + while idle_workers: + if now - idle_workers[0][1] < WORKER_MAX_IDLE_TIME: + break + + process_to_kill, idle_since = idle_workers.popleft() + process_to_kill.kill() + workers.remove(process_to_kill) + killed_processes.append(process_to_kill) + + with CancelScope(shield=True): + for killed_process in killed_processes: + await killed_process.aclose() + + break + + workers.remove(process) + else: + command = [sys.executable, "-u", "-m", __name__] + process = await open_process( + command, stdin=subprocess.PIPE, stdout=subprocess.PIPE + ) + try: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream( + cast(ByteReceiveStream, process.stdout) + ) + with fail_after(20): + message = await buffered.receive(6) + + if message != b"READY\n": + raise BrokenWorkerProcess( + f"Worker process returned unexpected response: {message!r}" + ) + + main_module_path = getattr(sys.modules["__main__"], "__file__", None) + pickled = pickle.dumps( + ("init", sys.path, main_module_path), + protocol=pickle.HIGHEST_PROTOCOL, + ) + await send_raw_command(pickled) + except (BrokenWorkerProcess, get_cancelled_exc_class()): + raise + except BaseException as exc: + process.kill() + raise BrokenWorkerProcess( + "Error during worker process initialization" + ) from exc + + workers.add(process) + + with CancelScope(shield=not cancellable): + try: + return cast(T_Retval, await send_raw_command(request)) + finally: + if process in workers: + idle_workers.append((process, current_time())) + + +def current_default_process_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of worker + processes. + + :return: a capacity limiter object + + """ + try: + return _default_process_limiter.get() + except LookupError: + limiter = CapacityLimiter(os.cpu_count() or 2) + _default_process_limiter.set(limiter) + return limiter + + +def process_worker() -> None: + # Redirect standard streams to os.devnull so that user code won't interfere with the + # parent-worker communication + stdin = sys.stdin + stdout = sys.stdout + sys.stdin = open(os.devnull) + sys.stdout = open(os.devnull, "w") + + stdout.buffer.write(b"READY\n") + while True: + retval = exception = None + try: + command, *args = pickle.load(stdin.buffer) + except EOFError: + return + except BaseException as exc: + exception = exc + else: + if command == "run": + func, args = args + try: + retval = func(*args) + except BaseException as exc: + exception = exc + elif command == "init": + main_module_path: str | None + sys.path, main_module_path = args + del sys.modules["__main__"] + if main_module_path and os.path.isfile(main_module_path): + # Load the parent's main module but as __mp_main__ instead of + # __main__ (like multiprocessing does) to avoid infinite recursion + try: + spec = spec_from_file_location("__mp_main__", main_module_path) + if spec and spec.loader: + main = module_from_spec(spec) + spec.loader.exec_module(main) + sys.modules["__main__"] = main + except BaseException as exc: + exception = exc + try: + if exception is not None: + status = b"EXCEPTION" + pickled = pickle.dumps(exception, pickle.HIGHEST_PROTOCOL) + else: + status = b"RETURN" + pickled = pickle.dumps(retval, pickle.HIGHEST_PROTOCOL) + except BaseException as exc: + exception = exc + status = b"EXCEPTION" + pickled = pickle.dumps(exc, pickle.HIGHEST_PROTOCOL) + + stdout.buffer.write(b"%s %d\n" % (status, len(pickled))) + stdout.buffer.write(pickled) + + # Respect SIGTERM + if isinstance(exception, SystemExit): + raise exception + + +if __name__ == "__main__": + process_worker() diff --git a/.venv/lib/python3.12/site-packages/anyio/to_thread.py b/.venv/lib/python3.12/site-packages/anyio/to_thread.py new file mode 100644 index 0000000..4be5b71 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/anyio/to_thread.py @@ -0,0 +1,78 @@ +from __future__ import annotations + +__all__ = ( + "run_sync", + "current_default_thread_limiter", +) + +import sys +from collections.abc import Callable +from typing import TypeVar +from warnings import warn + +from ._core._eventloop import get_async_backend +from .abc import CapacityLimiter + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + + +async def run_sync( + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + abandon_on_cancel: bool = False, + cancellable: bool | None = None, + limiter: CapacityLimiter | None = None, +) -> T_Retval: + """ + Call the given function with the given arguments in a worker thread. + + If the ``cancellable`` option is enabled and the task waiting for its completion is + cancelled, the thread will still run its course but its return value (or any raised + exception) will be ignored. + + :param func: a callable + :param args: positional arguments for the callable + :param abandon_on_cancel: ``True`` to abandon the thread (leaving it to run + unchecked on own) if the host task is cancelled, ``False`` to ignore + cancellations in the host task until the operation has completed in the worker + thread + :param cancellable: deprecated alias of ``abandon_on_cancel``; will override + ``abandon_on_cancel`` if both parameters are passed + :param limiter: capacity limiter to use to limit the total amount of threads running + (if omitted, the default limiter is used) + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + :return: an awaitable that yields the return value of the function. + + """ + if cancellable is not None: + abandon_on_cancel = cancellable + warn( + "The `cancellable=` keyword argument to `anyio.to_thread.run_sync` is " + "deprecated since AnyIO 4.1.0; use `abandon_on_cancel=` instead", + DeprecationWarning, + stacklevel=2, + ) + + return await get_async_backend().run_sync_in_worker_thread( + func, args, abandon_on_cancel=abandon_on_cancel, limiter=limiter + ) + + +def current_default_thread_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of + concurrent threads. + + :return: a capacity limiter object + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + return get_async_backend().current_default_thread_limiter() diff --git a/.venv/lib/python3.12/site-packages/certifi-2026.1.4.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/certifi-2026.1.4.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/certifi-2026.1.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/certifi-2026.1.4.dist-info/METADATA b/.venv/lib/python3.12/site-packages/certifi-2026.1.4.dist-info/METADATA new file mode 100644 index 0000000..d1bc526 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/certifi-2026.1.4.dist-info/METADATA @@ -0,0 +1,78 @@ +Metadata-Version: 2.4 +Name: certifi +Version: 2026.1.4 +Summary: Python package for providing Mozilla's CA Bundle. +Home-page: https://github.com/certifi/python-certifi +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: MPL-2.0 +Project-URL: Source, https://github.com/certifi/python-certifi +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Requires-Python: >=3.7 +License-File: LICENSE +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: home-page +Dynamic: license +Dynamic: license-file +Dynamic: project-url +Dynamic: requires-python +Dynamic: summary + +Certifi: Python SSL Certificates +================================ + +Certifi provides Mozilla's carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' + +Or from the command line:: + + $ python -m certifi + /usr/local/lib/python3.7/site-packages/certifi/cacert.pem + +Enjoy! + +.. _`Requests`: https://requests.readthedocs.io/en/master/ + +Addition/Removal of Certificates +-------------------------------- + +Certifi does not support any addition/removal or other modification of the +CA trust store content. This project is intended to provide a reliable and +highly portable root of trust to python deployments. Look to upstream projects +for methods to use alternate trust. diff --git a/.venv/lib/python3.12/site-packages/certifi-2026.1.4.dist-info/RECORD b/.venv/lib/python3.12/site-packages/certifi-2026.1.4.dist-info/RECORD new file mode 100644 index 0000000..abef4a1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/certifi-2026.1.4.dist-info/RECORD @@ -0,0 +1,14 @@ +certifi-2026.1.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +certifi-2026.1.4.dist-info/METADATA,sha256=FSfJEfKuMo6bJlofUrtRpn4PFTYtbYyXpHN_A3ZFpIY,2473 +certifi-2026.1.4.dist-info/RECORD,, +certifi-2026.1.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 +certifi-2026.1.4.dist-info/licenses/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989 +certifi-2026.1.4.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 +certifi/__init__.py,sha256=969deMMS7Uchipr0oO4dbRBUvRi0uNYCn07VmG1aTrg,94 +certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 +certifi/__pycache__/__init__.cpython-312.pyc,, +certifi/__pycache__/__main__.cpython-312.pyc,, +certifi/__pycache__/core.cpython-312.pyc,, +certifi/cacert.pem,sha256=Tzl1_zCrvzVEO0hgZK6Ly0Hf9wf_31dsdtKS-0WKoKk,270954 +certifi/core.py,sha256=XFXycndG5pf37ayeF8N32HUuDafsyhkVMbO4BAPWHa0,3394 +certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/.venv/lib/python3.12/site-packages/certifi-2026.1.4.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/certifi-2026.1.4.dist-info/WHEEL new file mode 100644 index 0000000..e7fa31b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/certifi-2026.1.4.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.12/site-packages/certifi-2026.1.4.dist-info/licenses/LICENSE b/.venv/lib/python3.12/site-packages/certifi-2026.1.4.dist-info/licenses/LICENSE new file mode 100644 index 0000000..62b076c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/certifi-2026.1.4.dist-info/licenses/LICENSE @@ -0,0 +1,20 @@ +This package contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/.venv/lib/python3.12/site-packages/certifi-2026.1.4.dist-info/top_level.txt b/.venv/lib/python3.12/site-packages/certifi-2026.1.4.dist-info/top_level.txt new file mode 100644 index 0000000..963eac5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/certifi-2026.1.4.dist-info/top_level.txt @@ -0,0 +1 @@ +certifi diff --git a/.venv/lib/python3.12/site-packages/certifi/__init__.py b/.venv/lib/python3.12/site-packages/certifi/__init__.py new file mode 100644 index 0000000..090fd58 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/certifi/__init__.py @@ -0,0 +1,4 @@ +from .core import contents, where + +__all__ = ["contents", "where"] +__version__ = "2026.01.04" diff --git a/.venv/lib/python3.12/site-packages/certifi/__main__.py b/.venv/lib/python3.12/site-packages/certifi/__main__.py new file mode 100644 index 0000000..8945b5d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/certifi/__main__.py @@ -0,0 +1,12 @@ +import argparse + +from certifi import contents, where + +parser = argparse.ArgumentParser() +parser.add_argument("-c", "--contents", action="store_true") +args = parser.parse_args() + +if args.contents: + print(contents()) +else: + print(where()) diff --git a/.venv/lib/python3.12/site-packages/certifi/cacert.pem b/.venv/lib/python3.12/site-packages/certifi/cacert.pem new file mode 100644 index 0000000..132db0d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/certifi/cacert.pem @@ -0,0 +1,4468 @@ + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Label: "GlobalSign Root CA - R6" +# Serial: 1417766617973444989252670301619537 +# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae +# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 +# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg +MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx +MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET +MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI +xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k +ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD +aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw +LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw +1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX +k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 +SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h +bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n +WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY +rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce +MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu +bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN +nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt +Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 +55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj +vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf +cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz +oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp +nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs +pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v +JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R +8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 +5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GC CA" +# Serial: 44084345621038548146064804565436152554 +# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 +# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 +# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d +-----BEGIN CERTIFICATE----- +MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw +CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 +bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg +Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ +BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu +ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS +b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni +eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W +p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T +rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV +57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg +Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 +-----END CERTIFICATE----- + +# Issuer: CN=UCA Global G2 Root O=UniTrust +# Subject: CN=UCA Global G2 Root O=UniTrust +# Label: "UCA Global G2 Root" +# Serial: 124779693093741543919145257850076631279 +# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 +# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a +# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH +bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x +CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds +b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr +b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 +kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm +VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R +VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc +C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj +tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY +D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv +j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl +NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 +iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP +O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV +ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj +L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 +1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl +1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU +b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV +PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj +y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb +EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg +DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI ++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy +YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX +UB+K+wb1whnw0A== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Extended Validation Root O=UniTrust +# Subject: CN=UCA Extended Validation Root O=UniTrust +# Label: "UCA Extended Validation Root" +# Serial: 106100277556486529736699587978573607008 +# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 +# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a +# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF +eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx +MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV +BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog +D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS +sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop +O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk +sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi +c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj +VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz +KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ +TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G +sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs +1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD +fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN +l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR +ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ +VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 +c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp +4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s +t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj +2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO +vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C +xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx +cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM +fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax +-----END CERTIFICATE----- + +# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Label: "Certigna Root CA" +# Serial: 269714418870597844693661054334862075617 +# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 +# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 +# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 +-----BEGIN CERTIFICATE----- +MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw +WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw +MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x +MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD +VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX +BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw +ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO +ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M +CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu +I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm +TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh +C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf +ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz +IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT +Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k +JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 +hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB +GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of +1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov +L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo +dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr +aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq +hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L +6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG +HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 +0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB +lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi +o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 +gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v +faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 +Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh +jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw +3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global Certification Authority" +# Serial: 1846098327275375458322922162 +# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e +# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 +# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 +-----BEGIN CERTIFICATE----- +MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw +CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x +ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 +c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx +OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI +SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI +b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn +swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu +7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 +1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW +80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP +JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l +RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw +hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 +coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc +BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n +twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud +DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W +0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe +uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q +lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB +aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE +sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT +MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe +qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh +VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 +h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 +EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK +yeC2nOnOcXHebD8WpHk= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P256 Certification Authority" +# Serial: 4151900041497450638097112925 +# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 +# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf +# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 +-----BEGIN CERTIFICATE----- +MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG +SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN +FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w +DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw +CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh +DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P384 Certification Authority" +# Serial: 2704997926503831671788816187 +# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 +# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 +# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 +-----BEGIN CERTIFICATE----- +MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB +BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ +j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF +1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G +A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 +AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC +MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu +Sw== +-----END CERTIFICATE----- + +# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Label: "NAVER Global Root Certification Authority" +# Serial: 9013692873798656336226253319739695165984492813 +# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b +# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 +# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 +-----BEGIN CERTIFICATE----- +MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM +BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG +T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx +CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD +b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA +iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH +38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE +HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz +kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP +szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq +vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf +nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG +YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo +0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a +CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K +AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I +36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN +qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj +cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm ++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL +hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe +lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 +p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 +piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR +LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX +5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO +dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul +9XXeifdy +-----END CERTIFICATE----- + +# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" +# Serial: 131542671362353147877283741781055151509 +# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb +# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a +# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb +-----BEGIN CERTIFICATE----- +MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw +CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw +FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S +Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 +MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL +DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS +QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH +sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK +Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu +SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC +MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy +v+c= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Label: "GlobalSign Root R46" +# Serial: 1552617688466950547958867513931858518042577 +# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef +# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 +# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA +MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD +VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy +MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt +c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ +OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG +vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud +316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo +0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE +y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF +zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE ++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN +I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs +x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa +ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC +4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 +7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg +JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti +2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk +pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF +FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt +rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk +ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 +u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP +4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 +N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 +vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Label: "GlobalSign Root E46" +# Serial: 1552617690338932563915843282459653771421763 +# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f +# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 +# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 +-----BEGIN CERTIFICATE----- +MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx +CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD +ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw +MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex +HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq +R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd +yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ +7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 ++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= +-----END CERTIFICATE----- + +# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Label: "ANF Secure Server Root CA" +# Serial: 996390341000653745 +# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 +# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 +# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 +-----BEGIN CERTIFICATE----- +MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV +BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk +YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV +BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN +MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF +UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD +VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v +dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj +cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q +yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH +2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX +H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL +zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR +p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz +W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ +SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn +LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 +n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B +u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj +o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC +AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L +9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej +rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK +pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 +vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq +OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ +/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 +2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI ++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 +MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo +tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= +-----END CERTIFICATE----- + +# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum EC-384 CA" +# Serial: 160250656287871593594747141429395092468 +# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 +# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed +# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 +-----BEGIN CERTIFICATE----- +MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw +CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw +JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT +EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 +WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT +LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX +BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE +KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm +Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 +EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J +UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn +nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Root CA" +# Serial: 40870380103424195783807378461123655149 +# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 +# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 +# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd +-----BEGIN CERTIFICATE----- +MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 +MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu +MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV +BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw +MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg +U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ +n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q +p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq +NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF +8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 +HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa +mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi +7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF +ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P +qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ +v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 +Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 +vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD +ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 +WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo +zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR +5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ +GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf +5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq +0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D +P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM +qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP +0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf +E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb +-----END CERTIFICATE----- + +# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Label: "TunTrust Root CA" +# Serial: 108534058042236574382096126452369648152337120275 +# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 +# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb +# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL +BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg +Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv +b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG +EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u +IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ +n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd +2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF +VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ +GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF +li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU +r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 +eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb +MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg +jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB +7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW +5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE +ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 +90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z +xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu +QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 +FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH +22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP +xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn +dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 +Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b +nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ +CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH +u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj +d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS RSA Root CA 2021" +# Serial: 76817823531813593706434026085292783742 +# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 +# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d +# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs +MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg +Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL +MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl +YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv +b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l +mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE +4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv +a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M +pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw +Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b +LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY +AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB +AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq +E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr +W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ +CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU +X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 +f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja +H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP +JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P +zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt +jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 +/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT +BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 +aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW +xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU +63ZTGI0RmLo= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS ECC Root CA 2021" +# Serial: 137515985548005187474074462014555733966 +# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 +# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 +# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 +-----BEGIN CERTIFICATE----- +MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw +CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh +cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v +dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG +A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj +aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg +Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 +KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y +STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD +AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw +SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN +nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 1977337328857672817 +# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3 +# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe +# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1 +MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc +tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd +IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j +b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC +AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw +ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m +iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF +Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ +hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P +Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE +EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV +1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t +CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR +5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw +f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9 +ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK +GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV +-----END CERTIFICATE----- + +# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus ECC Root CA" +# Serial: 630369271402956006249506845124680065938238527194 +# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85 +# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1 +# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3 +-----BEGIN CERTIFICATE----- +MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw +RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY +BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz +MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u +LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0 +v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd +e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw +V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA +AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG +GJTO +-----END CERTIFICATE----- + +# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus Root CA" +# Serial: 387574501246983434957692974888460947164905180485 +# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc +# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7 +# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87 +-----BEGIN CERTIFICATE----- +MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL +BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x +FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx +MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s +THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD +ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc +IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU +AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+ +GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9 +8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH +flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt +J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim +0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN +pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ +UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW +OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB +AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet +8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd +nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j +bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM +Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv +TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS +S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr +I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9 +b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB +UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P +Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven +sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s= +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X2 O=Internet Security Research Group +# Subject: CN=ISRG Root X2 O=Internet Security Research Group +# Label: "ISRG Root X2" +# Serial: 87493402998870891108772069816698636114 +# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5 +# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af +# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70 +-----BEGIN CERTIFICATE----- +MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw +CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg +R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 +MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT +ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw +EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW ++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 +ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI +zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW +tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 +/q4AaOeMSQ+2b1tbFfLn +-----END CERTIFICATE----- + +# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Label: "HiPKI Root CA - G1" +# Serial: 60966262342023497858655262305426234976 +# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3 +# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60 +# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc +-----BEGIN CERTIFICATE----- +MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa +Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3 +YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw +qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv +Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6 +lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz +Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ +KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK +FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj +HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr +y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ +/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM +a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6 +fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG +SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi +7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc +SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza +ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc +XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg +iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho +L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF +Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr +kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+ +vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU +YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 159662223612894884239637590694 +# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc +# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28 +# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2 +-----BEGIN CERTIFICATE----- +MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD +VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw +MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g +UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT +BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx +uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV +HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/ ++wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147 +bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 159662320309726417404178440727 +# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40 +# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a +# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo +27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w +Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw +TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl +qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH +szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8 +Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk +MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92 +wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p +aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN +VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb +C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe +QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy +h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4 +7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J +ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef +MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/ +Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT +6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ +0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm +2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb +bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 159662449406622349769042896298 +# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc +# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94 +# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8 +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt +nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY +6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu +MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k +RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg +f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV ++3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo +dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW +Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa +G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq +gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H +vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8 +0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC +B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u +NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg +yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev +HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6 +xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR +TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg +JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV +7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl +6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 159662495401136852707857743206 +# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73 +# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46 +# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48 +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G +jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2 +4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7 +VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm +ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 159662532700760215368942768210 +# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8 +# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47 +# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi +QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR +HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D +9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8 +p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD +-----END CERTIFICATE----- + +# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj +# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj +# Label: "Telia Root CA v2" +# Serial: 7288924052977061235122729490515358 +# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48 +# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd +# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx +CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE +AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1 +NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ +MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq +AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9 +vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9 +lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD +n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT +7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o +6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC +TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6 +WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R +DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI +pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj +YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy +rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ +8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi +0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM +A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS +SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K +TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF +6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er +3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt +Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT +VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW +ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA +rBPuUBQemMc= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 1 2020" +# Serial: 165870826978392376648679885835942448534 +# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed +# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67 +# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44 +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5 +NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS +zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0 +QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/ +VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW +wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV +dWNbFJWcHwHP2NVypw87 +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 1 2020" +# Serial: 126288379621884218666039612629459926992 +# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e +# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07 +# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5 +NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC +/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD +wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3 +OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA +y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb +gfM0agPnIjhQW+0ZT0MW +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS ECC P384 Root G5" +# Serial: 13129116028163249804115411775095713523 +# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed +# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee +# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05 +-----BEGIN CERTIFICATE----- +MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp +Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2 +MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ +bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS +7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp +0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS +B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49 +BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ +LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4 +DXZDjC5Ty3zfDBeWUA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS RSA4096 Root G5" +# Serial: 11930366277458970227240571539258396554 +# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1 +# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35 +# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75 +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN +MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT +HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN +NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs +IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+ +ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0 +2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp +wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM +pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD +nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po +sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx +Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd +Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX +KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe +XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL +tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv +TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN +AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw +GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H +PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF +O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ +REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik +AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv +/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+ +p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw +MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF +qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK +ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+ +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root R1 O=Certainly +# Subject: CN=Certainly Root R1 O=Certainly +# Label: "Certainly Root R1" +# Serial: 188833316161142517227353805653483829216 +# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12 +# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af +# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw +PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy +dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9 +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0 +YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2 +1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT +vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed +aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0 +1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5 +r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5 +cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ +wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ +6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA +2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH +Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR +eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB +/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u +d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr +PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d +8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi +1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd +rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di +taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7 +lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj +yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn +Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy +yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n +wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6 +OV+KmalBWQewLK8= +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root E1 O=Certainly +# Subject: CN=Certainly Root E1 O=Certainly +# Label: "Certainly Root E1" +# Serial: 8168531406727139161245376702891150584 +# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9 +# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b +# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2 +-----BEGIN CERTIFICATE----- +MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw +CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu +bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ +BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s +eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK ++IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2 +QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4 +hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm +ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG +BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication ECC RootCA1" +# Serial: 15446673492073852651 +# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86 +# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41 +# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11 +-----BEGIN CERTIFICATE----- +MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT +AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD +VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx +NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT +HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5 +IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl +dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK +ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu +9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O +be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA1" +# Serial: 113562791157148395269083148143378328608 +# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90 +# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a +# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU +MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI +T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz +MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF +SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh +bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z +xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ +spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5 +58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR +at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll +5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq +nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK +V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/ +pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO +z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn +jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+ +WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF +7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE +AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4 +YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli +awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u ++2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88 +X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN +SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo +P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI ++pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz +znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9 +eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2 +YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy +r/6zcCwupvI= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA2" +# Serial: 58605626836079930195615843123109055211 +# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c +# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6 +# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82 +-----BEGIN CERTIFICATE----- +MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw +CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ +VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy +MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ +TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS +b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B +IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+ ++kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK +sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA +94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B +43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root E46" +# Serial: 88989738453351742415770396670917916916 +# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01 +# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a +# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83 +-----BEGIN CERTIFICATE----- +MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw +CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T +ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN +MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG +A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT +ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC +WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+ +6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B +Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa +qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q +4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root R46" +# Serial: 156256931880233212765902055439220583700 +# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5 +# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38 +# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06 +-----BEGIN CERTIFICATE----- +MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD +Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw +HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY +MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp +YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa +ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz +SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf +iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X +ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3 +IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS +VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE +SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu ++Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt +8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L +HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt +zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P +AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c +mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ +YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52 +gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA +Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB +JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX +DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui +TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5 +dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65 +LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp +0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY +QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS RSA Root CA 2022" +# Serial: 148535279242832292258835760425842727825 +# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da +# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca +# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed +-----BEGIN CERTIFICATE----- +MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO +MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD +DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX +DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw +b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC +AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP +L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY +t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins +S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3 +PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO +L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3 +R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w +dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS ++YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS +d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG +AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f +gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j +BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z +NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt +hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM +QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf +R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ +DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW +P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy +lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq +bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w +AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q +r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji +Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU +98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS ECC Root CA 2022" +# Serial: 26605119622390491762507526719404364228 +# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5 +# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39 +# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43 +-----BEGIN CERTIFICATE----- +MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT +U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2 +MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh +dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm +acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN +SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME +GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW +uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp +15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN +b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA ECC TLS 2021" +# Serial: 81873346711060652204712539181482831616 +# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8 +# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd +# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8 +-----BEGIN CERTIFICATE----- +MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w +LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w +CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0 +MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF +Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI +zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X +tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4 +AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2 +KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD +aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu +CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo +9H1/IISpQuQo +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA RSA TLS 2021" +# Serial: 111436099570196163832749341232207667876 +# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2 +# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48 +# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f +-----BEGIN CERTIFICATE----- +MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM +MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx +MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00 +MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD +QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN +BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z +4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv +Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ +kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs +GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln +nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh +3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD +0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy +geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8 +ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB +c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI +pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB +DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS +4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs +o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ +qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw +xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM +rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4 +AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR +0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY +o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 +dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE +oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G3" +# Serial: 576386314500428537169965010905813481816650257167 +# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04 +# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7 +# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08 +-----BEGIN CERTIFICATE----- +MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM +BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp +ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe +Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw +IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU +cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC +DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS +T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK +AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1 +nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep +qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA +yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs +hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX +zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv +kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT +f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA +uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB +o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih +MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E +BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4 +wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2 +XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1 +JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j +ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV +VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx +xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on +AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d +7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj +gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV ++Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo +FGWsJwt0ivKH +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G4" +# Serial: 451799571007117016466790293371524403291602933463 +# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb +# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a +# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c +-----BEGIN CERTIFICATE----- +MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw +WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs +IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y +MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD +VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz +dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx +s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw +LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij +YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD +pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE +AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR +UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj +/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA== +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS ECC Root 2020" +# Serial: 72082518505882327255703894282316633856 +# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd +# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec +# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1 +-----BEGIN CERTIFICATE----- +MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw +CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH +bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw +MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx +JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE +AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O +tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP +f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA +MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di +z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn +27iQ7t0l +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS RSA Root 2023" +# Serial: 44676229530606711399881795178081572759 +# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2 +# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93 +# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj +MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0 +eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy +MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC +REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG +A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9 +cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV +cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA +U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6 +Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug +BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy +8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J +co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg +8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8 +rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12 +mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg ++y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX +gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2 +p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ +pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm +9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw +M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd +GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+ +CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t +xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+ +w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK +L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj +X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q +ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm +dTdmQRCsu/WU48IxK63nI1bMNSWSs1A= +-----END CERTIFICATE----- + +# Issuer: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA +# Subject: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA +# Label: "FIRMAPROFESIONAL CA ROOT-A WEB" +# Serial: 65916896770016886708751106294915943533 +# MD5 Fingerprint: 82:b2:ad:45:00:82:b0:66:63:f8:5f:c3:67:4e:ce:a3 +# SHA1 Fingerprint: a8:31:11:74:a6:14:15:0d:ca:77:dd:0e:e4:0c:5d:58:fc:a0:72:a5 +# SHA256 Fingerprint: be:f2:56:da:f2:6e:9c:69:bd:ec:16:02:35:97:98:f3:ca:f7:18:21:a0:3e:01:82:57:c5:3c:65:61:7f:3d:4a +-----BEGIN CERTIFICATE----- +MIICejCCAgCgAwIBAgIQMZch7a+JQn81QYehZ1ZMbTAKBggqhkjOPQQDAzBuMQsw +CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE +YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB +IFJPT1QtQSBXRUIwHhcNMjIwNDA2MDkwMTM2WhcNNDcwMzMxMDkwMTM2WjBuMQsw +CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE +YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB +IFJPT1QtQSBXRUIwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARHU+osEaR3xyrq89Zf +e9MEkVz6iMYiuYMQYneEMy3pA4jU4DP37XcsSmDq5G+tbbT4TIqk5B/K6k84Si6C +cyvHZpsKjECcfIr28jlgst7L7Ljkb+qbXbdTkBgyVcUgt5SjYzBhMA8GA1UdEwEB +/wQFMAMBAf8wHwYDVR0jBBgwFoAUk+FDY1w8ndYn81LsF7Kpryz3dvgwHQYDVR0O +BBYEFJPhQ2NcPJ3WJ/NS7Beyqa8s93b4MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjO +PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw +hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG +XSaQpYXFuXqUPoeovQA= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA CYBER Root CA" +# Serial: 85076849864375384482682434040119489222 +# MD5 Fingerprint: 0b:33:a0:97:52:95:d4:a9:fd:bb:db:6e:a3:55:5b:51 +# SHA1 Fingerprint: f6:b1:1c:1a:83:38:e9:7b:db:b3:a8:c8:33:24:e0:2d:9c:7f:26:66 +# SHA256 Fingerprint: 3f:63:bb:28:14:be:17:4e:c8:b6:43:9c:f0:8d:6d:56:f0:b7:c4:05:88:3a:56:48:a3:34:42:4d:6b:3e:c5:58 +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIQQAE0jMIAAAAAAAAAATzyxjANBgkqhkiG9w0BAQwFADBQ +MQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FOLUNBMRAwDgYDVQQLEwdSb290 +IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3QgQ0EwHhcNMjIxMTIyMDY1NDI5 +WhcNNDcxMTIyMTU1OTU5WjBQMQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FO +LUNBMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3Qg +Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDG+Moe2Qkgfh1sTs6P +40czRJzHyWmqOlt47nDSkvgEs1JSHWdyKKHfi12VCv7qze33Kc7wb3+szT3vsxxF +avcokPFhV8UMxKNQXd7UtcsZyoC5dc4pztKFIuwCY8xEMCDa6pFbVuYdHNWdZsc/ +34bKS1PE2Y2yHer43CdTo0fhYcx9tbD47nORxc5zb87uEB8aBs/pJ2DFTxnk684i +JkXXYJndzk834H/nY62wuFm40AZoNWDTNq5xQwTxaWV4fPMf88oon1oglWa0zbfu +j3ikRRjpJi+NmykosaS3Om251Bw4ckVYsV7r8Cibt4LK/c/WMw+f+5eesRycnupf +Xtuq3VTpMCEobY5583WSjCb+3MX2w7DfRFlDo7YDKPYIMKoNM+HvnKkHIuNZW0CP +2oi3aQiotyMuRAlZN1vH4xfyIutuOVLF3lSnmMlLIJXcRolftBL5hSmO68gnFSDA +S9TMfAxsNAwmmyYxpjyn9tnQS6Jk/zuZQXLB4HCX8SS7K8R0IrGsayIyJNN4KsDA +oS/xUgXJP+92ZuJF2A09rZXIx4kmyA+upwMu+8Ff+iDhcK2wZSA3M2Cw1a/XDBzC +kHDXShi8fgGwsOsVHkQGzaRP6AzRwyAQ4VRlnrZR0Bp2a0JaWHY06rc3Ga4udfmW +5cFZ95RXKSWNOkyrTZpB0F8mAwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSdhWEUfMFib5do5E83QOGt4A1WNzAd +BgNVHQ4EFgQUnYVhFHzBYm+XaORPN0DhreANVjcwDQYJKoZIhvcNAQEMBQADggIB +AGSPesRiDrWIzLjHhg6hShbNcAu3p4ULs3a2D6f/CIsLJc+o1IN1KriWiLb73y0t +tGlTITVX1olNc79pj3CjYcya2x6a4CD4bLubIp1dhDGaLIrdaqHXKGnK/nZVekZn +68xDiBaiA9a5F/gZbG0jAn/xX9AKKSM70aoK7akXJlQKTcKlTfjF/biBzysseKNn +TKkHmvPfXvt89YnNdJdhEGoHK4Fa0o635yDRIG4kqIQnoVesqlVYL9zZyvpoBJ7t +RCT5dEA7IzOrg1oYJkK2bVS1FmAwbLGg+LhBoF1JSdJlBTrq/p1hvIbZv97Tujqx +f36SNI7JAG7cmL3c7IAFrQI932XtCwP39xaEBDG6k5TY8hL4iuO/Qq+n1M0RFxbI +Qh0UqEL20kCGoE8jypZFVmAGzbdVAaYBlGX+bgUJurSkquLvWL69J1bY73NxW0Qz +8ppy6rBePm6pUlvscG21h483XjyMnM7k8M4MZ0HMzvaAq07MTFb1wWFZk7Q+ptq4 +NxKfKjLji7gh7MMrZQzvIt6IKTtM1/r+t+FHvpw+PoP7UV31aPcuIYXcv/Fa4nzX +xeSDwWrruoBa3lwtcHb4yOWHh8qgnaHlIhInD0Q9HWzq1MKLL295q39QpsQZp6F6 +t5b5wR9iWqJDB0BeJsas7a5wFsWqynKKTbDPAYsDP27X +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA12" +# Serial: 587887345431707215246142177076162061960426065942 +# MD5 Fingerprint: c6:89:ca:64:42:9b:62:08:49:0b:1e:7f:e9:07:3d:e8 +# SHA1 Fingerprint: 7a:22:1e:3d:de:1b:06:ac:9e:c8:47:70:16:8e:3c:e5:f7:6b:06:f4 +# SHA256 Fingerprint: 3f:03:4b:b5:70:4d:44:b2:d0:85:45:a0:20:57:de:93:eb:f3:90:5f:ce:72:1a:cb:c7:30:c0:6d:da:ee:90:4e +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUZvnHwa/swlG07VOX5uaCwysckBYwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u +LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExMjAeFw0yMDA0MDgw +NTM2NDZaFw00MDA0MDgwNTM2NDZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD +eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS +b290IENBMTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6OcE3emhF +KxS06+QT61d1I02PJC0W6K6OyX2kVzsqdiUzg2zqMoqUm048luT9Ub+ZyZN+v/mt +p7JIKwccJ/VMvHASd6SFVLX9kHrko+RRWAPNEHl57muTH2SOa2SroxPjcf59q5zd +J1M3s6oYwlkm7Fsf0uZlfO+TvdhYXAvA42VvPMfKWeP+bl+sg779XSVOKik71gur +FzJ4pOE+lEa+Ym6b3kaosRbnhW70CEBFEaCeVESE99g2zvVQR9wsMJvuwPWW0v4J +hscGWa5Pro4RmHvzC1KqYiaqId+OJTN5lxZJjfU+1UefNzFJM3IFTQy2VYzxV4+K +h9GtxRESOaCtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBRXNPN0zwRL1SXm8UC2LEzZLemgrTANBgkqhkiG9w0BAQsF +AAOCAQEAPrvbFxbS8hQBICw4g0utvsqFepq2m2um4fylOqyttCg6r9cBg0krY6Ld +mmQOmFxv3Y67ilQiLUoT865AQ9tPkbeGGuwAtEGBpE/6aouIs3YIcipJQMPTw4WJ +mBClnW8Zt7vPemVV2zfrPIpyMpcemik+rY3moxtt9XUa5rBouVui7mlHJzWhhpmA +8zNL4WukJsPvdFlseqJkth5Ew1DgDzk9qTPxpfPSvWKErI4cqc1avTc7bgoitPQV +55FYxTpE05Uo2cBl6XLK0A+9H7MV2anjpEcJnuDLN/v9vZfVvhgaaaI5gdka9at/ +yOPiZwud9AzqVN/Ssq+xIvEg37xEHA== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA14" +# Serial: 575790784512929437950770173562378038616896959179 +# MD5 Fingerprint: 71:0d:72:fa:92:19:65:5e:89:04:ac:16:33:f0:bc:d5 +# SHA1 Fingerprint: dd:50:c0:f7:79:b3:64:2e:74:a2:b8:9d:9f:d3:40:dd:bb:f0:f2:4f +# SHA256 Fingerprint: 4b:00:9c:10:34:49:4f:9a:b5:6b:ba:3b:a1:d6:27:31:fc:4d:20:d8:95:5a:dc:ec:10:a9:25:60:72:61:e3:38 +-----BEGIN CERTIFICATE----- +MIIFcjCCA1qgAwIBAgIUZNtaDCBO6Ncpd8hQJ6JaJ90t8sswDQYJKoZIhvcNAQEM +BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u +LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNDAeFw0yMDA0MDgw +NzA2MTlaFw00NTA0MDgwNzA2MTlaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD +eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS +b290IENBMTQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDF0nqh1oq/ +FjHQmNE6lPxauG4iwWL3pwon71D2LrGeaBLwbCRjOfHw3xDG3rdSINVSW0KZnvOg +vlIfX8xnbacuUKLBl422+JX1sLrcneC+y9/3OPJH9aaakpUqYllQC6KxNedlsmGy +6pJxaeQp8E+BgQQ8sqVb1MWoWWd7VRxJq3qdwudzTe/NCcLEVxLbAQ4jeQkHO6Lo +/IrPj8BGJJw4J+CDnRugv3gVEOuGTgpa/d/aLIJ+7sr2KeH6caH3iGicnPCNvg9J +kdjqOvn90Ghx2+m1K06Ckm9mH+Dw3EzsytHqunQG+bOEkJTRX45zGRBdAuVwpcAQ +0BB8b8VYSbSwbprafZX1zNoCr7gsfXmPvkPx+SgojQlD+Ajda8iLLCSxjVIHvXib +y8posqTdDEx5YMaZ0ZPxMBoH064iwurO8YQJzOAUbn8/ftKChazcqRZOhaBgy/ac +18izju3Gm5h1DVXoX+WViwKkrkMpKBGk5hIwAUt1ax5mnXkvpXYvHUC0bcl9eQjs +0Wq2XSqypWa9a4X0dFbD9ed1Uigspf9mR6XU/v6eVL9lfgHWMI+lNpyiUBzuOIAB +SMbHdPTGrMNASRZhdCyvjG817XsYAFs2PJxQDcqSMxDxJklt33UkN4Ii1+iW/RVL +ApY+B3KVfqs9TC7XyvDf4Fg/LS8EmjijAQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUBpOjCl4oaTeqYR3r6/wtbyPk +86AwDQYJKoZIhvcNAQEMBQADggIBAJaAcgkGfpzMkwQWu6A6jZJOtxEaCnFxEM0E +rX+lRVAQZk5KQaID2RFPeje5S+LGjzJmdSX7684/AykmjbgWHfYfM25I5uj4V7Ib +ed87hwriZLoAymzvftAj63iP/2SbNDefNWWipAA9EiOWWF3KY4fGoweITedpdopT +zfFP7ELyk+OZpDc8h7hi2/DsHzc/N19DzFGdtfCXwreFamgLRB7lUe6TzktuhsHS +DCRZNhqfLJGP4xjblJUK7ZGqDpncllPjYYPGFrojutzdfhrGe0K22VoF3Jpf1d+4 +2kd92jjbrDnVHmtsKheMYc2xbXIBw8MgAGJoFjHVdqqGuw6qnsb58Nn4DSEC5MUo +FlkRudlpcyqSeLiSV5sI8jrlL5WwWLdrIBRtFO8KvH7YVdiI2i/6GaX7i+B/OfVy +K4XELKzvGUWSTLNhB9xNH27SgRNcmvMSZ4PPmz+Ln52kuaiWA3rF7iDeM9ovnhp6 +dB7h7sxaOgTdsxoEqBRjrLdHEoOabPXm6RUVkRqEGQ6UROcSjiVbgGcZ3GOTEAtl +Lor6CZpO2oYofaphNdgOpygau1LgePhsumywbrmHXumZNTfxPWQrqaA0k89jL9WB +365jJ6UeTo3cKXhZ+PmhIIynJkBugnLNeLLIjzwec+fBH7/PzqUqm9tEZDKgu39c +JRNItX+S +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA15" +# Serial: 126083514594751269499665114766174399806381178503 +# MD5 Fingerprint: 13:30:fc:c4:62:a6:a9:de:b5:c1:68:af:b5:d2:31:47 +# SHA1 Fingerprint: cb:ba:83:c8:c1:5a:5d:f1:f9:73:6f:ca:d7:ef:28:13:06:4a:07:7d +# SHA256 Fingerprint: e7:78:f0:f0:95:fe:84:37:29:cd:1a:00:82:17:9e:53:14:a9:c2:91:44:28:05:e1:fb:1d:8f:b6:b8:88:6c:3a +-----BEGIN CERTIFICATE----- +MIICIzCCAamgAwIBAgIUFhXHw9hJp75pDIqI7fBw+d23PocwCgYIKoZIzj0EAwMw +UTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28uLCBM +dGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNTAeFw0yMDA0MDgwODMy +NTZaFw00NTA0MDgwODMyNTZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpDeWJl +cnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBSb290 +IENBMTUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQLUHSNZDKZmbPSYAi4Io5GdCx4 +wCtELW1fHcmuS1Iggz24FG1Th2CeX2yF2wYUleDHKP+dX+Sq8bOLbe1PL0vJSpSR +ZHX+AezB2Ot6lHhWGENfa4HL9rzatAy2KZMIaY+jQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT +9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp +4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6 +bkU6iYAZezKYVWOr62Nuk22rGwlgMU4= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 2 2023" +# Serial: 153168538924886464690566649552453098598 +# MD5 Fingerprint: e1:09:ed:d3:60:d4:56:1b:47:1f:b7:0c:5f:1b:5f:85 +# SHA1 Fingerprint: 2d:b0:70:ee:71:94:af:69:68:17:db:79:ce:58:9f:a0:6b:96:f7:87 +# SHA256 Fingerprint: 05:52:e6:f8:3f:df:65:e8:fa:96:70:e6:66:df:28:a4:e2:13:40:b5:10:cb:e5:25:66:f9:7c:4f:b9:4b:2b:d1 +-----BEGIN CERTIFICATE----- +MIIFqTCCA5GgAwIBAgIQczswBEhb2U14LnNLyaHcZjANBgkqhkiG9w0BAQ0FADBI +MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE +LVRSVVNUIEJSIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA4NTYzMVoXDTM4MDUw +OTA4NTYzMFowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi +MCAGA1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAK7/CVmRgApKaOYkP7in5Mg6CjoWzckjYaCTcfKr +i3OPoGdlYNJUa2NRb0kz4HIHE304zQaSBylSa053bATTlfrdTIzZXcFhfUvnKLNE +gXtRr90zsWh81k5M/itoucpmacTsXld/9w3HnDY25QdgrMBM6ghs7wZ8T1soegj8 +k12b9py0i4a6Ibn08OhZWiihNIQaJZG2tY/vsvmA+vk9PBFy2OMvhnbFeSzBqZCT +Rphny4NqoFAjpzv2gTng7fC5v2Xx2Mt6++9zA84A9H3X4F07ZrjcjrqDy4d2A/wl +2ecjbwb9Z/Pg/4S8R7+1FhhGaRTMBffb00msa8yr5LULQyReS2tNZ9/WtT5PeB+U +cSTq3nD88ZP+npNa5JRal1QMNXtfbO4AHyTsA7oC9Xb0n9Sa7YUsOCIvx9gvdhFP +/Wxc6PWOJ4d/GUohR5AdeY0cW/jPSoXk7bNbjb7EZChdQcRurDhaTyN0dKkSw/bS +uREVMweR2Ds3OmMwBtHFIjYoYiMQ4EbMl6zWK11kJNXuHA7e+whadSr2Y23OC0K+ +0bpwHJwh5Q8xaRfX/Aq03u2AnMuStIv13lmiWAmlY0cL4UEyNEHZmrHZqLAbWt4N +DfTisl01gLmB1IRpkQLLddCNxbU9CZEJjxShFHR5PtbJFR2kWVki3PaKRT08EtY+ +XTIvAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUZ5Dw1t61 +GNVGKX5cq/ieCLxklRAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG +OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfYnJfcm9vdF9jYV8y +XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQA097N3U9swFrktpSHxQCF16+tI +FoE9c+CeJyrrd6kTpGoKWloUMz1oH4Guaf2Mn2VsNELZLdB/eBaxOqwjMa1ef67n +riv6uvw8l5VAk1/DLQOj7aRvU9f6QA4w9QAgLABMjDu0ox+2v5Eyq6+SmNMW5tTR +VFxDWy6u71cqqLRvpO8NVhTaIasgdp4D/Ca4nj8+AybmTNudX0KEPUUDAxxZiMrc +LmEkWqTqJwtzEr5SswrPMhfiHocaFpVIbVrg0M8JkiZmkdijYQ6qgYF/6FKC0ULn +4B0Y+qSFNueG4A3rvNTJ1jxD8V1Jbn6Bm2m1iWKPiFLY1/4nwSPFyysCu7Ff/vtD +hQNGvl3GyiEm/9cCnnRK3PgTFbGBVzbLZVzRHTF36SXDw7IyN9XxmAnkbWOACKsG +koHU6XCPpz+y7YaMgmo1yEJagtFSGkUPFaUA8JR7ZSdXOUPPfH/mvTWze/EZTN46 +ls/pdu4D58JDUjxqgejBWoC9EV2Ta/vH5mQ/u2kc6d0li690yVRAysuTEwrt+2aS +Ecr1wPrYg1UDfNPFIkZ1cGt5SAYqgpq/5usWDiJFAbzdNpQ0qTUmiteXue4Icr80 +knCDgKs4qllo3UCkGJCy89UDyibK79XH4I9TjvAA46jtn/mtd+ArY0+ew+43u3gJ +hJ65bvspmZDogNOfJA== +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia TLS ECC Root CA O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia TLS ECC Root CA O=TrustAsia Technologies, Inc. +# Label: "TrustAsia TLS ECC Root CA" +# Serial: 310892014698942880364840003424242768478804666567 +# MD5 Fingerprint: 09:48:04:77:d2:fc:65:93:71:66:b1:11:95:4f:06:8c +# SHA1 Fingerprint: b5:ec:39:f3:a1:66:37:ae:c3:05:94:57:e2:be:11:be:b7:a1:7f:36 +# SHA256 Fingerprint: c0:07:6b:9e:f0:53:1f:b1:a6:56:d6:7c:4e:be:97:cd:5d:ba:a4:1e:f4:45:98:ac:c2:48:98:78:c9:2d:87:11 +-----BEGIN CERTIFICATE----- +MIICMTCCAbegAwIBAgIUNnThTXxlE8msg1UloD5Sfi9QaMcwCgYIKoZIzj0EAwMw +WDELMAkGA1UEBhMCQ04xJTAjBgNVBAoTHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs +IEluYy4xIjAgBgNVBAMTGVRydXN0QXNpYSBUTFMgRUNDIFJvb3QgQ0EwHhcNMjQw +NTE1MDU0MTU2WhcNNDQwNTE1MDU0MTU1WjBYMQswCQYDVQQGEwJDTjElMCMGA1UE +ChMcVHJ1c3RBc2lhIFRlY2hub2xvZ2llcywgSW5jLjEiMCAGA1UEAxMZVHJ1c3RB +c2lhIFRMUyBFQ0MgUm9vdCBDQTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLh/pVs/ +AT598IhtrimY4ZtcU5nb9wj/1WrgjstEpvDBjL1P1M7UiFPoXlfXTr4sP/MSpwDp +guMqWzJ8S5sUKZ74LYO1644xST0mYekdcouJtgq7nDM1D9rs3qlKH8kzsaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQULIVTu7FDzTLqnqOH/qKYqKaT6RAw +DgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2gAMGUCMFRH18MtYYZI9HlaVQ01 +L18N9mdsd0AaRuf4aFtOJx24mH1/k78ITcTaRTChD15KeAIxAKORh/IRM4PDwYqR +OkwrULG9IpRdNYlzg8WbGf60oenUoWa2AaU2+dhoYSi3dOGiMQ== +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia TLS RSA Root CA O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia TLS RSA Root CA O=TrustAsia Technologies, Inc. +# Label: "TrustAsia TLS RSA Root CA" +# Serial: 160405846464868906657516898462547310235378010780 +# MD5 Fingerprint: 3b:9e:c3:86:0f:34:3c:6b:c5:46:c4:8e:1d:e7:19:12 +# SHA1 Fingerprint: a5:46:50:c5:62:ea:95:9a:1a:a7:04:6f:17:58:c7:29:53:3d:03:fa +# SHA256 Fingerprint: 06:c0:8d:7d:af:d8:76:97:1e:b1:12:4f:e6:7f:84:7e:c0:c7:a1:58:d3:ea:53:cb:e9:40:e2:ea:97:91:f4:c3 +-----BEGIN CERTIFICATE----- +MIIFgDCCA2igAwIBAgIUHBjYz+VTPyI1RlNUJDxsR9FcSpwwDQYJKoZIhvcNAQEM +BQAwWDELMAkGA1UEBhMCQ04xJTAjBgNVBAoTHFRydXN0QXNpYSBUZWNobm9sb2dp +ZXMsIEluYy4xIjAgBgNVBAMTGVRydXN0QXNpYSBUTFMgUlNBIFJvb3QgQ0EwHhcN +MjQwNTE1MDU0MTU3WhcNNDQwNTE1MDU0MTU2WjBYMQswCQYDVQQGEwJDTjElMCMG +A1UEChMcVHJ1c3RBc2lhIFRlY2hub2xvZ2llcywgSW5jLjEiMCAGA1UEAxMZVHJ1 +c3RBc2lhIFRMUyBSU0EgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCC +AgoCggIBAMMWuBtqpERz5dZO9LnPWwvB0ZqB9WOwj0PBuwhaGnrhB3YmH49pVr7+ +NmDQDIPNlOrnxS1cLwUWAp4KqC/lYCZUlviYQB2srp10Zy9U+5RjmOMmSoPGlbYJ +Q1DNDX3eRA5gEk9bNb2/mThtfWza4mhzH/kxpRkQcwUqwzIZheo0qt1CHjCNP561 +HmHVb70AcnKtEj+qpklz8oYVlQwQX1Fkzv93uMltrOXVmPGZLmzjyUT5tUMnCE32 +ft5EebuyjBza00tsLtbDeLdM1aTk2tyKjg7/D8OmYCYozza/+lcK7Fs/6TAWe8Tb +xNRkoDD75f0dcZLdKY9BWN4ArTr9PXwaqLEX8E40eFgl1oUh63kd0Nyrz2I8sMeX +i9bQn9P+PN7F4/w6g3CEIR0JwqH8uyghZVNgepBtljhb//HXeltt08lwSUq6HTrQ +UNoyIBnkiz/r1RYmNzz7dZ6wB3C4FGB33PYPXFIKvF1tjVEK2sUYyJtt3LCDs3+j +TnhMmCWr8n4uIF6CFabW2I+s5c0yhsj55NqJ4js+k8UTav/H9xj8Z7XvGCxUq0DT +bE3txci3OE9kxJRMT6DNrqXGJyV1J23G2pyOsAWZ1SgRxSHUuPzHlqtKZFlhaxP8 +S8ySpg+kUb8OWJDZgoM5pl+z+m6Ss80zDoWo8SnTq1mt1tve1CuBAgMBAAGjQjBA +MA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFLgHkXlcBvRG/XtZylomkadFK/hT +MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQwFAAOCAgEAIZtqBSBdGBanEqT3 +Rz/NyjuujsCCztxIJXgXbODgcMTWltnZ9r96nBO7U5WS/8+S4PPFJzVXqDuiGev4 +iqME3mmL5Dw8veWv0BIb5Ylrc5tvJQJLkIKvQMKtuppgJFqBTQUYo+IzeXoLH5Pt +7DlK9RME7I10nYEKqG/odv6LTytpEoYKNDbdgptvT+Bz3Ul/KD7JO6NXBNiT2Twp +2xIQaOHEibgGIOcberyxk2GaGUARtWqFVwHxtlotJnMnlvm5P1vQiJ3koP26TpUJ +g3933FEFlJ0gcXax7PqJtZwuhfG5WyRasQmr2soaB82G39tp27RIGAAtvKLEiUUj +pQ7hRGU+isFqMB3iYPg6qocJQrmBktwliJiJ8Xw18WLK7nn4GS/+X/jbh87qqA8M +pugLoDzga5SYnH+tBuYc6kIQX+ImFTw3OffXvO645e8D7r0i+yiGNFjEWn9hongP +XvPKnbwbPKfILfanIhHKA9jnZwqKDss1jjQ52MjqjZ9k4DewbNfFj8GQYSbbJIwe +SsCI3zWQzj8C9GRh3sfIB5XeMhg6j6JCQCTl1jNdfK7vsU1P1FeQNWrcrgSXSYk0 +ly4wBOeY99sLAZDBHwo/+ML+TvrbmnNzFrwFuHnYWa8G5z9nODmxfKuU4CkUpijy +323imttUQ/hHWKNddBWcwauwxzQ= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 2 2023" +# Serial: 139766439402180512324132425437959641711 +# MD5 Fingerprint: 96:b4:78:09:f0:09:cb:77:eb:bb:1b:4d:6f:36:bc:b6 +# SHA1 Fingerprint: a5:5b:d8:47:6c:8f:19:f7:4c:f4:6d:6b:b6:c2:79:82:22:df:54:8b +# SHA256 Fingerprint: 8e:82:21:b2:e7:d4:00:78:36:a1:67:2f:0d:cc:29:9c:33:bc:07:d3:16:f1:32:fa:1a:20:6d:58:71:50:f1:ce +-----BEGIN CERTIFICATE----- +MIIFqTCCA5GgAwIBAgIQaSYJfoBLTKCnjHhiU19abzANBgkqhkiG9w0BAQ0FADBI +MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE +LVRSVVNUIEVWIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA5MTAzM1oXDTM4MDUw +OTA5MTAzMlowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi +MCAGA1UEAxMZRC1UUlVTVCBFViBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBANiOo4mAC7JXUtypU0w3uX9jFxPvp1sjW2l1sJkK +F8GLxNuo4MwxusLyzV3pt/gdr2rElYfXR8mV2IIEUD2BCP/kPbOx1sWy/YgJ25yE +7CUXFId/MHibaljJtnMoPDT3mfd/06b4HEV8rSyMlD/YZxBTfiLNTiVR8CUkNRFe +EMbsh2aJgWi6zCudR3Mfvc2RpHJqnKIbGKBv7FD0fUDCqDDPvXPIEysQEx6Lmqg6 +lHPTGGkKSv/BAQP/eX+1SH977ugpbzZMlWGG2Pmic4ruri+W7mjNPU0oQvlFKzIb +RlUWaqZLKfm7lVa/Rh3sHZMdwGWyH6FDrlaeoLGPaxK3YG14C8qKXO0elg6DpkiV +jTujIcSuWMYAsoS0I6SWhjW42J7YrDRJmGOVxcttSEfi8i4YHtAxq9107PncjLgc +jmgjutDzUNzPZY9zOjLHfP7KgiJPvo5iR2blzYfi6NUPGJ/lBHJLRjwQ8kTCZFZx +TnXonMkmdMV9WdEKWw9t/p51HBjGGjp82A0EzM23RWV6sY+4roRIPrN6TagD4uJ+ +ARZZaBhDM7DS3LAaQzXupdqpRlyuhoFBAUp0JuyfBr/CBTdkdXgpaP3F9ev+R/nk +hbDhezGdpn9yo7nELC7MmVcOIQxFAZRl62UJxmMiCzNJkkg8/M3OsD6Onov4/knF +NXJHAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUqvyREBuH +kV8Wub9PS5FeAByxMoAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG +OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfZXZfcm9vdF9jYV8y +XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQCTy6UfmRHsmg1fLBWTxj++EI14 +QvBukEdHjqOSMo1wj/Zbjb6JzkcBahsgIIlbyIIQbODnmaprxiqgYzWRaoUlrRc4 +pZt+UPJ26oUFKidBK7GB0aL2QHWpDsvxVUjY7NHss+jOFKE17MJeNRqrphYBBo7q +3C+jisosketSjl8MmxfPy3MHGcRqwnNU73xDUmPBEcrCRbH0O1P1aa4846XerOhU +t7KR/aypH/KH5BfGSah82ApB9PI+53c0BFLd6IHyTS9URZ0V4U/M5d40VxDJI3IX +cI1QcB9WbMy5/zpaT2N6w25lBx2Eof+pDGOJbbJAiDnXH3dotfyc1dZnaVuodNv8 +ifYbMvekJKZ2t0dT741Jj6m2g1qllpBFYfXeA08mD6iL8AOWsKwV0HFaanuU5nCT +2vFp4LJiTZ6P/4mdm13NRemUAiKN4DV/6PEEeXFsVIP4M7kFMhtYVRFP0OUnR3Hs +7dpn1mKmS00PaaLJvOwiS5THaJQXfuKOKD62xur1NGyfN4gHONuGcfrNlUhDbqNP +gofXNJhuS5N5YHVpD/Aa1VP6IQzCP+k/HxiMkl14p3ZnGbuy6n/pcAlWVqOwDAst +Nl7F6cTVg8uGF5csbBNvh1qvSaYd2804BC5f4ko1Di1L+KIkBI3Y4WNeApI02phh +XBxvWHZks/wCuPWdCg== +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign RSA TLS Root CA 2022 - 1 O=SwissSign AG +# Subject: CN=SwissSign RSA TLS Root CA 2022 - 1 O=SwissSign AG +# Label: "SwissSign RSA TLS Root CA 2022 - 1" +# Serial: 388078645722908516278762308316089881486363258315 +# MD5 Fingerprint: 16:2e:e4:19:76:81:85:ba:8e:91:58:f1:15:ef:72:39 +# SHA1 Fingerprint: 81:34:0a:be:4c:cd:ce:cc:e7:7d:cc:8a:d4:57:e2:45:a0:77:5d:ce +# SHA256 Fingerprint: 19:31:44:f4:31:e0:fd:db:74:07:17:d4:de:92:6a:57:11:33:88:4b:43:60:d3:0e:27:29:13:cb:e6:60:ce:41 +-----BEGIN CERTIFICATE----- +MIIFkzCCA3ugAwIBAgIUQ/oMX04bgBhE79G0TzUfRPSA7cswDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzErMCkGA1UE +AxMiU3dpc3NTaWduIFJTQSBUTFMgUm9vdCBDQSAyMDIyIC0gMTAeFw0yMjA2MDgx +MTA4MjJaFw00NzA2MDgxMTA4MjJaMFExCzAJBgNVBAYTAkNIMRUwEwYDVQQKEwxT +d2lzc1NpZ24gQUcxKzApBgNVBAMTIlN3aXNzU2lnbiBSU0EgVExTIFJvb3QgQ0Eg +MjAyMiAtIDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDLKmjiC8NX +vDVjvHClO/OMPE5Xlm7DTjak9gLKHqquuN6orx122ro10JFwB9+zBvKK8i5VUXu7 +LCTLf5ImgKO0lPaCoaTo+nUdWfMHamFk4saMla+ju45vVs9xzF6BYQ1t8qsCLqSX +5XH8irCRIFucdFJtrhUnWXjyCcplDn/L9Ovn3KlMd/YrFgSVrpxxpT8q2kFC5zyE +EPThPYxr4iuRR1VPuFa+Rd4iUU1OKNlfGUEGjw5NBuBwQCMBauTLE5tzrE0USJIt +/m2n+IdreXXhvhCxqohAWVTXz8TQm0SzOGlkjIHRI36qOTw7D59Ke4LKa2/KIj4x +0LDQKhySio/YGZxH5D4MucLNvkEM+KRHBdvBFzA4OmnczcNpI/2aDwLOEGrOyvi5 +KaM2iYauC8BPY7kGWUleDsFpswrzd34unYyzJ5jSmY0lpx+Gs6ZUcDj8fV3oT4MM +0ZPlEuRU2j7yrTrePjxF8CgPBrnh25d7mUWe3f6VWQQvdT/TromZhqwUtKiE+shd +OxtYk8EXlFXIC+OCeYSf8wCENO7cMdWP8vpPlkwGqnj73mSiI80fPsWMvDdUDrta +clXvyFu1cvh43zcgTFeRc5JzrBh3Q4IgaezprClG5QtO+DdziZaKHG29777YtvTK +wP1H8K4LWCDFyB02rpeNUIMmJCn3nTsPBQIDAQABo2MwYTAPBgNVHRMBAf8EBTAD +AQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBRvjmKLk0Ow4UD2p8P98Q+4 +DxU4pTAdBgNVHQ4EFgQUb45ii5NDsOFA9qfD/fEPuA8VOKUwDQYJKoZIhvcNAQEL +BQADggIBAKwsKUF9+lz1GpUYvyypiqkkVHX1uECry6gkUSsYP2OprphWKwVDIqO3 +10aewCoSPY6WlkDfDDOLazeROpW7OSltwAJsipQLBwJNGD77+3v1dj2b9l4wBlgz +Hqp41eZUBDqyggmNzhYzWUUo8aWjlw5DI/0LIICQ/+Mmz7hkkeUFjxOgdg3XNwwQ +iJb0Pr6VvfHDffCjw3lHC1ySFWPtUnWK50Zpy1FVCypM9fJkT6lc/2cyjlUtMoIc +gC9qkfjLvH4YoiaoLqNTKIftV+Vlek4ASltOU8liNr3CjlvrzG4ngRhZi0Rjn9UM +ZfQpZX+RLOV/fuiJz48gy20HQhFRJjKKLjpHE7iNvUcNCfAWpO2Whi4Z2L6MOuhF +LhG6rlrnub+xzI/goP+4s9GFe3lmozm1O2bYQL7Pt2eLSMkZJVX8vY3PXtpOpvJp +zv1/THfQwUY1mFwjmwJFQ5Ra3bxHrSL+ul4vkSkphnsh3m5kt8sNjzdbowhq6/Td +Ao9QAwKxuDdollDruF/UKIqlIgyKhPBZLtU30WHlQnNYKoH3dtvi4k0NX/a3vgW0 +rk4N3hY9A4GzJl5LuEsAz/+MF7psYC0nhzck5npgL7XTgwSqT0N1osGDsieYK7EO +gLrAhV5Cud+xYJHT6xh+cHiudoO+cVrQkOPKwRYlZ0rwtnu64ZzZ +-----END CERTIFICATE----- + +# Issuer: CN=OISTE Server Root ECC G1 O=OISTE Foundation +# Subject: CN=OISTE Server Root ECC G1 O=OISTE Foundation +# Label: "OISTE Server Root ECC G1" +# Serial: 47819833811561661340092227008453318557 +# MD5 Fingerprint: 42:a7:d2:35:ae:02:92:db:19:76:08:de:2f:05:b4:d4 +# SHA1 Fingerprint: 3b:f6:8b:09:ae:2a:92:7b:ba:e3:8d:3f:11:95:d9:e6:44:0c:45:e2 +# SHA256 Fingerprint: ee:c9:97:c0:c3:0f:21:6f:7e:3b:8b:30:7d:2b:ae:42:41:2d:75:3f:c8:21:9d:af:d1:52:0b:25:72:85:0f:49 +-----BEGIN CERTIFICATE----- +MIICNTCCAbqgAwIBAgIQI/nD1jWvjyhLH/BU6n6XnTAKBggqhkjOPQQDAzBLMQsw +CQYDVQQGEwJDSDEZMBcGA1UECgwQT0lTVEUgRm91bmRhdGlvbjEhMB8GA1UEAwwY +T0lTVEUgU2VydmVyIFJvb3QgRUNDIEcxMB4XDTIzMDUzMTE0NDIyOFoXDTQ4MDUy +NDE0NDIyN1owSzELMAkGA1UEBhMCQ0gxGTAXBgNVBAoMEE9JU1RFIEZvdW5kYXRp +b24xITAfBgNVBAMMGE9JU1RFIFNlcnZlciBSb290IEVDQyBHMTB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABBcv+hK8rBjzCvRE1nZCnrPoH7d5qVi2+GXROiFPqOujvqQy +cvO2Ackr/XeFblPdreqqLiWStukhEaivtUwL85Zgmjvn6hp4LrQ95SjeHIC6XG4N +2xml4z+cKrhAS93mT6NjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBQ3 +TYhlz/w9itWj8UnATgwQb0K0nDAdBgNVHQ4EFgQUN02IZc/8PYrVo/FJwE4MEG9C +tJwwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2kAMGYCMQCpKjAd0MKfkFFR +QD6VVCHNFmb3U2wIFjnQEnx/Yxvf4zgAOdktUyBFCxxgZzFDJe0CMQCSia7pXGKD +YmH5LVerVrkR3SW+ak5KGoJr3M/TvEqzPNcum9v4KGm8ay3sMaE641c= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE Server Root RSA G1 O=OISTE Foundation +# Subject: CN=OISTE Server Root RSA G1 O=OISTE Foundation +# Label: " OISTE Server Root RSA G1" +# Serial: 113845518112613905024960613408179309848 +# MD5 Fingerprint: 23:a7:9e:d4:70:b8:b9:14:57:41:8a:7e:44:59:e2:68 +# SHA1 Fingerprint: f7:00:34:25:94:88:68:31:e4:34:87:3f:70:fe:86:b3:86:9f:f0:6e +# SHA256 Fingerprint: 9a:e3:62:32:a5:18:9f:fd:db:35:3d:fd:26:52:0c:01:53:95:d2:27:77:da:c5:9d:b5:7b:98:c0:89:a6:51:e6 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIQVaXZZ5Qoxu0M+ifdWwFNGDANBgkqhkiG9w0BAQwFADBL +MQswCQYDVQQGEwJDSDEZMBcGA1UECgwQT0lTVEUgRm91bmRhdGlvbjEhMB8GA1UE +AwwYT0lTVEUgU2VydmVyIFJvb3QgUlNBIEcxMB4XDTIzMDUzMTE0MzcxNloXDTQ4 +MDUyNDE0MzcxNVowSzELMAkGA1UEBhMCQ0gxGTAXBgNVBAoMEE9JU1RFIEZvdW5k +YXRpb24xITAfBgNVBAMMGE9JU1RFIFNlcnZlciBSb290IFJTQSBHMTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAKqu9KuCz/vlNwvn1ZatkOhLKdxVYOPM +vLO8LZK55KN68YG0nnJyQ98/qwsmtO57Gmn7KNByXEptaZnwYx4M0rH/1ow00O7b +rEi56rAUjtgHqSSY3ekJvqgiG1k50SeH3BzN+Puz6+mTeO0Pzjd8JnduodgsIUzk +ik/HEzxux9UTl7Ko2yRpg1bTacuCErudG/L4NPKYKyqOBGf244ehHa1uzjZ0Dl4z +O8vbUZeUapU8zhhabkvG/AePLhq5SvdkNCncpo1Q4Y2LS+VIG24ugBA/5J8bZT8R +tOpXaZ+0AOuFJJkk9SGdl6r7NH8CaxWQrbueWhl/pIzY+m0o/DjH40ytas7ZTpOS +jswMZ78LS5bOZmdTaMsXEY5Z96ycG7mOaES3GK/m5Q9l3JUJsJMStR8+lKXHiHUh +sd4JJCpM4rzsTGdHwimIuQq6+cF0zowYJmXa92/GjHtoXAvuY8BeS/FOzJ8vD+Ho +mnqT8eDI278n5mUpezbgMxVz8p1rhAhoKzYHKyfMeNhqhw5HdPSqoBNdZH702xSu ++zrkL8Fl47l6QGzwBrd7KJvX4V84c5Ss2XCTLdyEr0YconosP4EmQufU2MVshGYR +i3drVByjtdgQ8K4p92cIiBdcuJd5z+orKu5YM+Vt6SmqZQENghPsJQtdLEByFSnT +kCz3GkPVavBpAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAU +8snBDw1jALvsRQ5KH7WxszbNDo0wHQYDVR0OBBYEFPLJwQ8NYwC77EUOSh+1sbM2 +zQ6NMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQwFAAOCAgEANGd5sjrG5T33 +I3K5Ce+SrScfoE4KsvXaFwyihdJ+klH9FWXXXGtkFu6KRcoMQzZENdl//nk6HOjG +5D1rd9QhEOP28yBOqb6J8xycqd+8MDoX0TJD0KqKchxRKEzdNsjkLWd9kYccnbz8 +qyiWXmFcuCIzGEgWUOrKL+mlSdx/PKQZvDatkuK59EvV6wit53j+F8Bdh3foZ3dP +AGav9LEDOr4SfEE15fSmG0eLy3n31r8Xbk5l8PjaV8GUgeV6Vg27Rn9vkf195hfk +gSe7BYhW3SCl95gtkRlpMV+bMPKZrXJAlszYd2abtNUOshD+FKrDgHGdPY3ofRRs +YWSGRqbXVMW215AWRqWFyp464+YTFrYVI8ypKVL9AMb2kI5Wj4kI3Zaq5tNqqYY1 +9tVFeEJKRvwDyF7YZvZFZSS0vod7VSCd9521Kvy5YhnLbDuv0204bKt7ph6N/Ome +/msVuduCmsuY33OhkKCgxeDoAaijFJzIwZqsFVAzje18KotzlUBDJvyBpCpfOZC3 +J8tRd/iWkx7P8nd9H0aTolkelUTFLXVksNb54Dxp6gS1HAviRkRNQzuXSXERvSS2 +wq1yVAb+axj5d9spLFKebXd7Yv0PTY6YMjAwcRLWJTXjn/hvnLXrahut6hDTlhZy +BiElxky8j3C7DOReIoMt0r7+hVu05L0= +-----END CERTIFICATE----- diff --git a/.venv/lib/python3.12/site-packages/certifi/core.py b/.venv/lib/python3.12/site-packages/certifi/core.py new file mode 100644 index 0000000..1c9661c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/certifi/core.py @@ -0,0 +1,83 @@ +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem or its contents. +""" +import sys +import atexit + +def exit_cacert_ctx() -> None: + _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr] + + +if sys.version_info >= (3, 11): + + from importlib.resources import as_file, files + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) + + return _CACERT_PATH + + def contents() -> str: + return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii") + +else: + + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the + # file in cases where we're inside of a zipimport situation until + # someone actually calls where(), but we don't want to re-extract + # the file on every call of where(), so we'll do it once then store + # it in a global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you + # to manage the cleanup of this file, so it doesn't actually + # return a path, it returns a context manager that will give + # you the path when you enter it and will do any cleanup when + # you leave it. In the common case of not needing a temporary + # file, it will just return the file system location and the + # __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) + + return _CACERT_PATH + + def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/.venv/lib/python3.12/site-packages/certifi/py.typed b/.venv/lib/python3.12/site-packages/certifi/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/METADATA b/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/METADATA new file mode 100644 index 0000000..3f433af --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/METADATA @@ -0,0 +1,84 @@ +Metadata-Version: 2.4 +Name: click +Version: 8.3.1 +Summary: Composable command line interface toolkit +Maintainer-email: Pallets +Requires-Python: >=3.10 +Description-Content-Type: text/markdown +License-Expression: BSD-3-Clause +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Typing :: Typed +License-File: LICENSE.txt +Requires-Dist: colorama; platform_system == 'Windows' +Project-URL: Changes, https://click.palletsprojects.com/page/changes/ +Project-URL: Chat, https://discord.gg/pallets +Project-URL: Documentation, https://click.palletsprojects.com/ +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Source, https://github.com/pallets/click/ + +
+ +# Click + +Click is a Python package for creating beautiful command line interfaces +in a composable way with as little code as necessary. It's the "Command +Line Interface Creation Kit". It's highly configurable but comes with +sensible defaults out of the box. + +It aims to make the process of writing command line tools quick and fun +while also preventing any frustration caused by the inability to +implement an intended CLI API. + +Click in three points: + +- Arbitrary nesting of commands +- Automatic help page generation +- Supports lazy loading of subcommands at runtime + + +## A Simple Example + +```python +import click + +@click.command() +@click.option("--count", default=1, help="Number of greetings.") +@click.option("--name", prompt="Your name", help="The person to greet.") +def hello(count, name): + """Simple program that greets NAME for a total of COUNT times.""" + for _ in range(count): + click.echo(f"Hello, {name}!") + +if __name__ == '__main__': + hello() +``` + +``` +$ python hello.py --count=3 +Your name: Click +Hello, Click! +Hello, Click! +Hello, Click! +``` + + +## Donate + +The Pallets organization develops and supports Click and other popular +packages. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, [please +donate today][]. + +[please donate today]: https://palletsprojects.com/donate + +## Contributing + +See our [detailed contributing documentation][contrib] for many ways to +contribute, including reporting issues, requesting features, asking or answering +questions, and making PRs. + +[contrib]: https://palletsprojects.com/contributing/ + diff --git a/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/RECORD b/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/RECORD new file mode 100644 index 0000000..77e5c98 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/RECORD @@ -0,0 +1,40 @@ +click-8.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +click-8.3.1.dist-info/METADATA,sha256=XZeBrMAE0ghTE88SjfrSDuSyNCpBPplxJR1tbwD9oZg,2621 +click-8.3.1.dist-info/RECORD,, +click-8.3.1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 +click-8.3.1.dist-info/licenses/LICENSE.txt,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475 +click/__init__.py,sha256=6YyS1aeyknZ0LYweWozNZy0A9nZ_11wmYIhv3cbQrYo,4473 +click/__pycache__/__init__.cpython-312.pyc,, +click/__pycache__/_compat.cpython-312.pyc,, +click/__pycache__/_termui_impl.cpython-312.pyc,, +click/__pycache__/_textwrap.cpython-312.pyc,, +click/__pycache__/_utils.cpython-312.pyc,, +click/__pycache__/_winconsole.cpython-312.pyc,, +click/__pycache__/core.cpython-312.pyc,, +click/__pycache__/decorators.cpython-312.pyc,, +click/__pycache__/exceptions.cpython-312.pyc,, +click/__pycache__/formatting.cpython-312.pyc,, +click/__pycache__/globals.cpython-312.pyc,, +click/__pycache__/parser.cpython-312.pyc,, +click/__pycache__/shell_completion.cpython-312.pyc,, +click/__pycache__/termui.cpython-312.pyc,, +click/__pycache__/testing.cpython-312.pyc,, +click/__pycache__/types.cpython-312.pyc,, +click/__pycache__/utils.cpython-312.pyc,, +click/_compat.py,sha256=v3xBZkFbvA1BXPRkFfBJc6-pIwPI7345m-kQEnpVAs4,18693 +click/_termui_impl.py,sha256=rgCb3On8X5A4200rA5L6i13u5iapmFer7sru57Jy6zA,27093 +click/_textwrap.py,sha256=BOae0RQ6vg3FkNgSJyOoGzG1meGMxJ_ukWVZKx_v-0o,1400 +click/_utils.py,sha256=kZwtTf5gMuCilJJceS2iTCvRvCY-0aN5rJq8gKw7p8g,943 +click/_winconsole.py,sha256=_vxUuUaxwBhoR0vUWCNuHY8VUefiMdCIyU2SXPqoF-A,8465 +click/core.py,sha256=U6Bfxt8GkjNDqyJ0HqXvluJHtyZ4sY5USAvM1Cdq7mQ,132105 +click/decorators.py,sha256=5P7abhJtAQYp_KHgjUvhMv464ERwOzrv2enNknlwHyQ,18461 +click/exceptions.py,sha256=8utf8w6V5hJXMnO_ic1FNrtbwuEn1NUu1aDwV8UqnG4,9954 +click/formatting.py,sha256=RVfwwr0rwWNpgGr8NaHodPzkIr7_tUyVh_nDdanLMNc,9730 +click/globals.py,sha256=gM-Nh6A4M0HB_SgkaF5M4ncGGMDHc_flHXu9_oh4GEU,1923 +click/parser.py,sha256=Q31pH0FlQZEq-UXE_ABRzlygEfvxPTuZbWNh4xfXmzw,19010 +click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +click/shell_completion.py,sha256=Cc4GQUFuWpfQBa9sF5qXeeYI7n3tI_1k6ZdSn4BZbT0,20994 +click/termui.py,sha256=hqCEjNndU-nzW08nRAkBaVgfZp_FdCA9KxfIWlKYaMc,31037 +click/testing.py,sha256=EERbzcl1br0mW0qBS9EqkknfNfXB9WQEW0ELIpkvuSs,19102 +click/types.py,sha256=ek54BNSFwPKsqtfT7jsqcc4WHui8AIFVMKM4oVZIXhc,39927 +click/utils.py,sha256=gCUoewdAhA-QLBUUHxrLh4uj6m7T1WjZZMNPvR0I7YA,20257 diff --git a/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/WHEEL new file mode 100644 index 0000000..d8b9936 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.12.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/licenses/LICENSE.txt b/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/licenses/LICENSE.txt new file mode 100644 index 0000000..d12a849 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/licenses/LICENSE.txt @@ -0,0 +1,28 @@ +Copyright 2014 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/lib/python3.12/site-packages/click/__init__.py b/.venv/lib/python3.12/site-packages/click/__init__.py new file mode 100644 index 0000000..1aa547c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/__init__.py @@ -0,0 +1,123 @@ +""" +Click is a simple Python module inspired by the stdlib optparse to make +writing command line scripts fun. Unlike other modules, it's based +around a simple API that does not come with too much magic and is +composable. +""" + +from __future__ import annotations + +from .core import Argument as Argument +from .core import Command as Command +from .core import CommandCollection as CommandCollection +from .core import Context as Context +from .core import Group as Group +from .core import Option as Option +from .core import Parameter as Parameter +from .decorators import argument as argument +from .decorators import command as command +from .decorators import confirmation_option as confirmation_option +from .decorators import group as group +from .decorators import help_option as help_option +from .decorators import make_pass_decorator as make_pass_decorator +from .decorators import option as option +from .decorators import pass_context as pass_context +from .decorators import pass_obj as pass_obj +from .decorators import password_option as password_option +from .decorators import version_option as version_option +from .exceptions import Abort as Abort +from .exceptions import BadArgumentUsage as BadArgumentUsage +from .exceptions import BadOptionUsage as BadOptionUsage +from .exceptions import BadParameter as BadParameter +from .exceptions import ClickException as ClickException +from .exceptions import FileError as FileError +from .exceptions import MissingParameter as MissingParameter +from .exceptions import NoSuchOption as NoSuchOption +from .exceptions import UsageError as UsageError +from .formatting import HelpFormatter as HelpFormatter +from .formatting import wrap_text as wrap_text +from .globals import get_current_context as get_current_context +from .termui import clear as clear +from .termui import confirm as confirm +from .termui import echo_via_pager as echo_via_pager +from .termui import edit as edit +from .termui import getchar as getchar +from .termui import launch as launch +from .termui import pause as pause +from .termui import progressbar as progressbar +from .termui import prompt as prompt +from .termui import secho as secho +from .termui import style as style +from .termui import unstyle as unstyle +from .types import BOOL as BOOL +from .types import Choice as Choice +from .types import DateTime as DateTime +from .types import File as File +from .types import FLOAT as FLOAT +from .types import FloatRange as FloatRange +from .types import INT as INT +from .types import IntRange as IntRange +from .types import ParamType as ParamType +from .types import Path as Path +from .types import STRING as STRING +from .types import Tuple as Tuple +from .types import UNPROCESSED as UNPROCESSED +from .types import UUID as UUID +from .utils import echo as echo +from .utils import format_filename as format_filename +from .utils import get_app_dir as get_app_dir +from .utils import get_binary_stream as get_binary_stream +from .utils import get_text_stream as get_text_stream +from .utils import open_file as open_file + + +def __getattr__(name: str) -> object: + import warnings + + if name == "BaseCommand": + from .core import _BaseCommand + + warnings.warn( + "'BaseCommand' is deprecated and will be removed in Click 9.0. Use" + " 'Command' instead.", + DeprecationWarning, + stacklevel=2, + ) + return _BaseCommand + + if name == "MultiCommand": + from .core import _MultiCommand + + warnings.warn( + "'MultiCommand' is deprecated and will be removed in Click 9.0. Use" + " 'Group' instead.", + DeprecationWarning, + stacklevel=2, + ) + return _MultiCommand + + if name == "OptionParser": + from .parser import _OptionParser + + warnings.warn( + "'OptionParser' is deprecated and will be removed in Click 9.0. The" + " old parser is available in 'optparse'.", + DeprecationWarning, + stacklevel=2, + ) + return _OptionParser + + if name == "__version__": + import importlib.metadata + import warnings + + warnings.warn( + "The '__version__' attribute is deprecated and will be removed in" + " Click 9.1. Use feature detection or" + " 'importlib.metadata.version(\"click\")' instead.", + DeprecationWarning, + stacklevel=2, + ) + return importlib.metadata.version("click") + + raise AttributeError(name) diff --git a/.venv/lib/python3.12/site-packages/click/_compat.py b/.venv/lib/python3.12/site-packages/click/_compat.py new file mode 100644 index 0000000..f2726b9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/_compat.py @@ -0,0 +1,622 @@ +from __future__ import annotations + +import codecs +import collections.abc as cabc +import io +import os +import re +import sys +import typing as t +from types import TracebackType +from weakref import WeakKeyDictionary + +CYGWIN = sys.platform.startswith("cygwin") +WIN = sys.platform.startswith("win") +auto_wrap_for_ansi: t.Callable[[t.TextIO], t.TextIO] | None = None +_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]") + + +def _make_text_stream( + stream: t.BinaryIO, + encoding: str | None, + errors: str | None, + force_readable: bool = False, + force_writable: bool = False, +) -> t.TextIO: + if encoding is None: + encoding = get_best_encoding(stream) + if errors is None: + errors = "replace" + return _NonClosingTextIOWrapper( + stream, + encoding, + errors, + line_buffering=True, + force_readable=force_readable, + force_writable=force_writable, + ) + + +def is_ascii_encoding(encoding: str) -> bool: + """Checks if a given encoding is ascii.""" + try: + return codecs.lookup(encoding).name == "ascii" + except LookupError: + return False + + +def get_best_encoding(stream: t.IO[t.Any]) -> str: + """Returns the default stream encoding if not found.""" + rv = getattr(stream, "encoding", None) or sys.getdefaultencoding() + if is_ascii_encoding(rv): + return "utf-8" + return rv + + +class _NonClosingTextIOWrapper(io.TextIOWrapper): + def __init__( + self, + stream: t.BinaryIO, + encoding: str | None, + errors: str | None, + force_readable: bool = False, + force_writable: bool = False, + **extra: t.Any, + ) -> None: + self._stream = stream = t.cast( + t.BinaryIO, _FixupStream(stream, force_readable, force_writable) + ) + super().__init__(stream, encoding, errors, **extra) + + def __del__(self) -> None: + try: + self.detach() + except Exception: + pass + + def isatty(self) -> bool: + # https://bitbucket.org/pypy/pypy/issue/1803 + return self._stream.isatty() + + +class _FixupStream: + """The new io interface needs more from streams than streams + traditionally implement. As such, this fix-up code is necessary in + some circumstances. + + The forcing of readable and writable flags are there because some tools + put badly patched objects on sys (one such offender are certain version + of jupyter notebook). + """ + + def __init__( + self, + stream: t.BinaryIO, + force_readable: bool = False, + force_writable: bool = False, + ): + self._stream = stream + self._force_readable = force_readable + self._force_writable = force_writable + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._stream, name) + + def read1(self, size: int) -> bytes: + f = getattr(self._stream, "read1", None) + + if f is not None: + return t.cast(bytes, f(size)) + + return self._stream.read(size) + + def readable(self) -> bool: + if self._force_readable: + return True + x = getattr(self._stream, "readable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.read(0) + except Exception: + return False + return True + + def writable(self) -> bool: + if self._force_writable: + return True + x = getattr(self._stream, "writable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.write(b"") + except Exception: + try: + self._stream.write(b"") + except Exception: + return False + return True + + def seekable(self) -> bool: + x = getattr(self._stream, "seekable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.seek(self._stream.tell()) + except Exception: + return False + return True + + +def _is_binary_reader(stream: t.IO[t.Any], default: bool = False) -> bool: + try: + return isinstance(stream.read(0), bytes) + except Exception: + return default + # This happens in some cases where the stream was already + # closed. In this case, we assume the default. + + +def _is_binary_writer(stream: t.IO[t.Any], default: bool = False) -> bool: + try: + stream.write(b"") + except Exception: + try: + stream.write("") + return False + except Exception: + pass + return default + return True + + +def _find_binary_reader(stream: t.IO[t.Any]) -> t.BinaryIO | None: + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_reader(stream, False): + return t.cast(t.BinaryIO, stream) + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_reader(buf, True): + return t.cast(t.BinaryIO, buf) + + return None + + +def _find_binary_writer(stream: t.IO[t.Any]) -> t.BinaryIO | None: + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_writer(stream, False): + return t.cast(t.BinaryIO, stream) + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_writer(buf, True): + return t.cast(t.BinaryIO, buf) + + return None + + +def _stream_is_misconfigured(stream: t.TextIO) -> bool: + """A stream is misconfigured if its encoding is ASCII.""" + # If the stream does not have an encoding set, we assume it's set + # to ASCII. This appears to happen in certain unittest + # environments. It's not quite clear what the correct behavior is + # but this at least will force Click to recover somehow. + return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii") + + +def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: str | None) -> bool: + """A stream attribute is compatible if it is equal to the + desired value or the desired value is unset and the attribute + has a value. + """ + stream_value = getattr(stream, attr, None) + return stream_value == value or (value is None and stream_value is not None) + + +def _is_compatible_text_stream( + stream: t.TextIO, encoding: str | None, errors: str | None +) -> bool: + """Check if a stream's encoding and errors attributes are + compatible with the desired values. + """ + return _is_compat_stream_attr( + stream, "encoding", encoding + ) and _is_compat_stream_attr(stream, "errors", errors) + + +def _force_correct_text_stream( + text_stream: t.IO[t.Any], + encoding: str | None, + errors: str | None, + is_binary: t.Callable[[t.IO[t.Any], bool], bool], + find_binary: t.Callable[[t.IO[t.Any]], t.BinaryIO | None], + force_readable: bool = False, + force_writable: bool = False, +) -> t.TextIO: + if is_binary(text_stream, False): + binary_reader = t.cast(t.BinaryIO, text_stream) + else: + text_stream = t.cast(t.TextIO, text_stream) + # If the stream looks compatible, and won't default to a + # misconfigured ascii encoding, return it as-is. + if _is_compatible_text_stream(text_stream, encoding, errors) and not ( + encoding is None and _stream_is_misconfigured(text_stream) + ): + return text_stream + + # Otherwise, get the underlying binary reader. + possible_binary_reader = find_binary(text_stream) + + # If that's not possible, silently use the original reader + # and get mojibake instead of exceptions. + if possible_binary_reader is None: + return text_stream + + binary_reader = possible_binary_reader + + # Default errors to replace instead of strict in order to get + # something that works. + if errors is None: + errors = "replace" + + # Wrap the binary stream in a text stream with the correct + # encoding parameters. + return _make_text_stream( + binary_reader, + encoding, + errors, + force_readable=force_readable, + force_writable=force_writable, + ) + + +def _force_correct_text_reader( + text_reader: t.IO[t.Any], + encoding: str | None, + errors: str | None, + force_readable: bool = False, +) -> t.TextIO: + return _force_correct_text_stream( + text_reader, + encoding, + errors, + _is_binary_reader, + _find_binary_reader, + force_readable=force_readable, + ) + + +def _force_correct_text_writer( + text_writer: t.IO[t.Any], + encoding: str | None, + errors: str | None, + force_writable: bool = False, +) -> t.TextIO: + return _force_correct_text_stream( + text_writer, + encoding, + errors, + _is_binary_writer, + _find_binary_writer, + force_writable=force_writable, + ) + + +def get_binary_stdin() -> t.BinaryIO: + reader = _find_binary_reader(sys.stdin) + if reader is None: + raise RuntimeError("Was not able to determine binary stream for sys.stdin.") + return reader + + +def get_binary_stdout() -> t.BinaryIO: + writer = _find_binary_writer(sys.stdout) + if writer is None: + raise RuntimeError("Was not able to determine binary stream for sys.stdout.") + return writer + + +def get_binary_stderr() -> t.BinaryIO: + writer = _find_binary_writer(sys.stderr) + if writer is None: + raise RuntimeError("Was not able to determine binary stream for sys.stderr.") + return writer + + +def get_text_stdin(encoding: str | None = None, errors: str | None = None) -> t.TextIO: + rv = _get_windows_console_stream(sys.stdin, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True) + + +def get_text_stdout(encoding: str | None = None, errors: str | None = None) -> t.TextIO: + rv = _get_windows_console_stream(sys.stdout, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True) + + +def get_text_stderr(encoding: str | None = None, errors: str | None = None) -> t.TextIO: + rv = _get_windows_console_stream(sys.stderr, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True) + + +def _wrap_io_open( + file: str | os.PathLike[str] | int, + mode: str, + encoding: str | None, + errors: str | None, +) -> t.IO[t.Any]: + """Handles not passing ``encoding`` and ``errors`` in binary mode.""" + if "b" in mode: + return open(file, mode) + + return open(file, mode, encoding=encoding, errors=errors) + + +def open_stream( + filename: str | os.PathLike[str], + mode: str = "r", + encoding: str | None = None, + errors: str | None = "strict", + atomic: bool = False, +) -> tuple[t.IO[t.Any], bool]: + binary = "b" in mode + filename = os.fspath(filename) + + # Standard streams first. These are simple because they ignore the + # atomic flag. Use fsdecode to handle Path("-"). + if os.fsdecode(filename) == "-": + if any(m in mode for m in ["w", "a", "x"]): + if binary: + return get_binary_stdout(), False + return get_text_stdout(encoding=encoding, errors=errors), False + if binary: + return get_binary_stdin(), False + return get_text_stdin(encoding=encoding, errors=errors), False + + # Non-atomic writes directly go out through the regular open functions. + if not atomic: + return _wrap_io_open(filename, mode, encoding, errors), True + + # Some usability stuff for atomic writes + if "a" in mode: + raise ValueError( + "Appending to an existing file is not supported, because that" + " would involve an expensive `copy`-operation to a temporary" + " file. Open the file in normal `w`-mode and copy explicitly" + " if that's what you're after." + ) + if "x" in mode: + raise ValueError("Use the `overwrite`-parameter instead.") + if "w" not in mode: + raise ValueError("Atomic writes only make sense with `w`-mode.") + + # Atomic writes are more complicated. They work by opening a file + # as a proxy in the same folder and then using the fdopen + # functionality to wrap it in a Python file. Then we wrap it in an + # atomic file that moves the file over on close. + import errno + import random + + try: + perm: int | None = os.stat(filename).st_mode + except OSError: + perm = None + + flags = os.O_RDWR | os.O_CREAT | os.O_EXCL + + if binary: + flags |= getattr(os, "O_BINARY", 0) + + while True: + tmp_filename = os.path.join( + os.path.dirname(filename), + f".__atomic-write{random.randrange(1 << 32):08x}", + ) + try: + fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm) + break + except OSError as e: + if e.errno == errno.EEXIST or ( + os.name == "nt" + and e.errno == errno.EACCES + and os.path.isdir(e.filename) + and os.access(e.filename, os.W_OK) + ): + continue + raise + + if perm is not None: + os.chmod(tmp_filename, perm) # in case perm includes bits in umask + + f = _wrap_io_open(fd, mode, encoding, errors) + af = _AtomicFile(f, tmp_filename, os.path.realpath(filename)) + return t.cast(t.IO[t.Any], af), True + + +class _AtomicFile: + def __init__(self, f: t.IO[t.Any], tmp_filename: str, real_filename: str) -> None: + self._f = f + self._tmp_filename = tmp_filename + self._real_filename = real_filename + self.closed = False + + @property + def name(self) -> str: + return self._real_filename + + def close(self, delete: bool = False) -> None: + if self.closed: + return + self._f.close() + os.replace(self._tmp_filename, self._real_filename) + self.closed = True + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._f, name) + + def __enter__(self) -> _AtomicFile: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + tb: TracebackType | None, + ) -> None: + self.close(delete=exc_type is not None) + + def __repr__(self) -> str: + return repr(self._f) + + +def strip_ansi(value: str) -> str: + return _ansi_re.sub("", value) + + +def _is_jupyter_kernel_output(stream: t.IO[t.Any]) -> bool: + while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)): + stream = stream._stream + + return stream.__class__.__module__.startswith("ipykernel.") + + +def should_strip_ansi( + stream: t.IO[t.Any] | None = None, color: bool | None = None +) -> bool: + if color is None: + if stream is None: + stream = sys.stdin + return not isatty(stream) and not _is_jupyter_kernel_output(stream) + return not color + + +# On Windows, wrap the output streams with colorama to support ANSI +# color codes. +# NOTE: double check is needed so mypy does not analyze this on Linux +if sys.platform.startswith("win") and WIN: + from ._winconsole import _get_windows_console_stream + + def _get_argv_encoding() -> str: + import locale + + return locale.getpreferredencoding() + + _ansi_stream_wrappers: cabc.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() + + def auto_wrap_for_ansi(stream: t.TextIO, color: bool | None = None) -> t.TextIO: + """Support ANSI color and style codes on Windows by wrapping a + stream with colorama. + """ + try: + cached = _ansi_stream_wrappers.get(stream) + except Exception: + cached = None + + if cached is not None: + return cached + + import colorama + + strip = should_strip_ansi(stream, color) + ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) + rv = t.cast(t.TextIO, ansi_wrapper.stream) + _write = rv.write + + def _safe_write(s: str) -> int: + try: + return _write(s) + except BaseException: + ansi_wrapper.reset_all() + raise + + rv.write = _safe_write # type: ignore[method-assign] + + try: + _ansi_stream_wrappers[stream] = rv + except Exception: + pass + + return rv + +else: + + def _get_argv_encoding() -> str: + return getattr(sys.stdin, "encoding", None) or sys.getfilesystemencoding() + + def _get_windows_console_stream( + f: t.TextIO, encoding: str | None, errors: str | None + ) -> t.TextIO | None: + return None + + +def term_len(x: str) -> int: + return len(strip_ansi(x)) + + +def isatty(stream: t.IO[t.Any]) -> bool: + try: + return stream.isatty() + except Exception: + return False + + +def _make_cached_stream_func( + src_func: t.Callable[[], t.TextIO | None], + wrapper_func: t.Callable[[], t.TextIO], +) -> t.Callable[[], t.TextIO | None]: + cache: cabc.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() + + def func() -> t.TextIO | None: + stream = src_func() + + if stream is None: + return None + + try: + rv = cache.get(stream) + except Exception: + rv = None + if rv is not None: + return rv + rv = wrapper_func() + try: + cache[stream] = rv + except Exception: + pass + return rv + + return func + + +_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin) +_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout) +_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr) + + +binary_streams: cabc.Mapping[str, t.Callable[[], t.BinaryIO]] = { + "stdin": get_binary_stdin, + "stdout": get_binary_stdout, + "stderr": get_binary_stderr, +} + +text_streams: cabc.Mapping[str, t.Callable[[str | None, str | None], t.TextIO]] = { + "stdin": get_text_stdin, + "stdout": get_text_stdout, + "stderr": get_text_stderr, +} diff --git a/.venv/lib/python3.12/site-packages/click/_termui_impl.py b/.venv/lib/python3.12/site-packages/click/_termui_impl.py new file mode 100644 index 0000000..ee8225c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/_termui_impl.py @@ -0,0 +1,852 @@ +""" +This module contains implementations for the termui module. To keep the +import time of Click down, some infrequently used functionality is +placed in this module and only imported as needed. +""" + +from __future__ import annotations + +import collections.abc as cabc +import contextlib +import math +import os +import shlex +import sys +import time +import typing as t +from gettext import gettext as _ +from io import StringIO +from pathlib import Path +from types import TracebackType + +from ._compat import _default_text_stdout +from ._compat import CYGWIN +from ._compat import get_best_encoding +from ._compat import isatty +from ._compat import open_stream +from ._compat import strip_ansi +from ._compat import term_len +from ._compat import WIN +from .exceptions import ClickException +from .utils import echo + +V = t.TypeVar("V") + +if os.name == "nt": + BEFORE_BAR = "\r" + AFTER_BAR = "\n" +else: + BEFORE_BAR = "\r\033[?25l" + AFTER_BAR = "\033[?25h\n" + + +class ProgressBar(t.Generic[V]): + def __init__( + self, + iterable: cabc.Iterable[V] | None, + length: int | None = None, + fill_char: str = "#", + empty_char: str = " ", + bar_template: str = "%(bar)s", + info_sep: str = " ", + hidden: bool = False, + show_eta: bool = True, + show_percent: bool | None = None, + show_pos: bool = False, + item_show_func: t.Callable[[V | None], str | None] | None = None, + label: str | None = None, + file: t.TextIO | None = None, + color: bool | None = None, + update_min_steps: int = 1, + width: int = 30, + ) -> None: + self.fill_char = fill_char + self.empty_char = empty_char + self.bar_template = bar_template + self.info_sep = info_sep + self.hidden = hidden + self.show_eta = show_eta + self.show_percent = show_percent + self.show_pos = show_pos + self.item_show_func = item_show_func + self.label: str = label or "" + + if file is None: + file = _default_text_stdout() + + # There are no standard streams attached to write to. For example, + # pythonw on Windows. + if file is None: + file = StringIO() + + self.file = file + self.color = color + self.update_min_steps = update_min_steps + self._completed_intervals = 0 + self.width: int = width + self.autowidth: bool = width == 0 + + if length is None: + from operator import length_hint + + length = length_hint(iterable, -1) + + if length == -1: + length = None + if iterable is None: + if length is None: + raise TypeError("iterable or length is required") + iterable = t.cast("cabc.Iterable[V]", range(length)) + self.iter: cabc.Iterable[V] = iter(iterable) + self.length = length + self.pos: int = 0 + self.avg: list[float] = [] + self.last_eta: float + self.start: float + self.start = self.last_eta = time.time() + self.eta_known: bool = False + self.finished: bool = False + self.max_width: int | None = None + self.entered: bool = False + self.current_item: V | None = None + self._is_atty = isatty(self.file) + self._last_line: str | None = None + + def __enter__(self) -> ProgressBar[V]: + self.entered = True + self.render_progress() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + tb: TracebackType | None, + ) -> None: + self.render_finish() + + def __iter__(self) -> cabc.Iterator[V]: + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + self.render_progress() + return self.generator() + + def __next__(self) -> V: + # Iteration is defined in terms of a generator function, + # returned by iter(self); use that to define next(). This works + # because `self.iter` is an iterable consumed by that generator, + # so it is re-entry safe. Calling `next(self.generator())` + # twice works and does "what you want". + return next(iter(self)) + + def render_finish(self) -> None: + if self.hidden or not self._is_atty: + return + self.file.write(AFTER_BAR) + self.file.flush() + + @property + def pct(self) -> float: + if self.finished: + return 1.0 + return min(self.pos / (float(self.length or 1) or 1), 1.0) + + @property + def time_per_iteration(self) -> float: + if not self.avg: + return 0.0 + return sum(self.avg) / float(len(self.avg)) + + @property + def eta(self) -> float: + if self.length is not None and not self.finished: + return self.time_per_iteration * (self.length - self.pos) + return 0.0 + + def format_eta(self) -> str: + if self.eta_known: + t = int(self.eta) + seconds = t % 60 + t //= 60 + minutes = t % 60 + t //= 60 + hours = t % 24 + t //= 24 + if t > 0: + return f"{t}d {hours:02}:{minutes:02}:{seconds:02}" + else: + return f"{hours:02}:{minutes:02}:{seconds:02}" + return "" + + def format_pos(self) -> str: + pos = str(self.pos) + if self.length is not None: + pos += f"/{self.length}" + return pos + + def format_pct(self) -> str: + return f"{int(self.pct * 100): 4}%"[1:] + + def format_bar(self) -> str: + if self.length is not None: + bar_length = int(self.pct * self.width) + bar = self.fill_char * bar_length + bar += self.empty_char * (self.width - bar_length) + elif self.finished: + bar = self.fill_char * self.width + else: + chars = list(self.empty_char * (self.width or 1)) + if self.time_per_iteration != 0: + chars[ + int( + (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5) + * self.width + ) + ] = self.fill_char + bar = "".join(chars) + return bar + + def format_progress_line(self) -> str: + show_percent = self.show_percent + + info_bits = [] + if self.length is not None and show_percent is None: + show_percent = not self.show_pos + + if self.show_pos: + info_bits.append(self.format_pos()) + if show_percent: + info_bits.append(self.format_pct()) + if self.show_eta and self.eta_known and not self.finished: + info_bits.append(self.format_eta()) + if self.item_show_func is not None: + item_info = self.item_show_func(self.current_item) + if item_info is not None: + info_bits.append(item_info) + + return ( + self.bar_template + % { + "label": self.label, + "bar": self.format_bar(), + "info": self.info_sep.join(info_bits), + } + ).rstrip() + + def render_progress(self) -> None: + if self.hidden: + return + + if not self._is_atty: + # Only output the label once if the output is not a TTY. + if self._last_line != self.label: + self._last_line = self.label + echo(self.label, file=self.file, color=self.color) + return + + buf = [] + # Update width in case the terminal has been resized + if self.autowidth: + import shutil + + old_width = self.width + self.width = 0 + clutter_length = term_len(self.format_progress_line()) + new_width = max(0, shutil.get_terminal_size().columns - clutter_length) + if new_width < old_width and self.max_width is not None: + buf.append(BEFORE_BAR) + buf.append(" " * self.max_width) + self.max_width = new_width + self.width = new_width + + clear_width = self.width + if self.max_width is not None: + clear_width = self.max_width + + buf.append(BEFORE_BAR) + line = self.format_progress_line() + line_len = term_len(line) + if self.max_width is None or self.max_width < line_len: + self.max_width = line_len + + buf.append(line) + buf.append(" " * (clear_width - line_len)) + line = "".join(buf) + # Render the line only if it changed. + + if line != self._last_line: + self._last_line = line + echo(line, file=self.file, color=self.color, nl=False) + self.file.flush() + + def make_step(self, n_steps: int) -> None: + self.pos += n_steps + if self.length is not None and self.pos >= self.length: + self.finished = True + + if (time.time() - self.last_eta) < 1.0: + return + + self.last_eta = time.time() + + # self.avg is a rolling list of length <= 7 of steps where steps are + # defined as time elapsed divided by the total progress through + # self.length. + if self.pos: + step = (time.time() - self.start) / self.pos + else: + step = time.time() - self.start + + self.avg = self.avg[-6:] + [step] + + self.eta_known = self.length is not None + + def update(self, n_steps: int, current_item: V | None = None) -> None: + """Update the progress bar by advancing a specified number of + steps, and optionally set the ``current_item`` for this new + position. + + :param n_steps: Number of steps to advance. + :param current_item: Optional item to set as ``current_item`` + for the updated position. + + .. versionchanged:: 8.0 + Added the ``current_item`` optional parameter. + + .. versionchanged:: 8.0 + Only render when the number of steps meets the + ``update_min_steps`` threshold. + """ + if current_item is not None: + self.current_item = current_item + + self._completed_intervals += n_steps + + if self._completed_intervals >= self.update_min_steps: + self.make_step(self._completed_intervals) + self.render_progress() + self._completed_intervals = 0 + + def finish(self) -> None: + self.eta_known = False + self.current_item = None + self.finished = True + + def generator(self) -> cabc.Iterator[V]: + """Return a generator which yields the items added to the bar + during construction, and updates the progress bar *after* the + yielded block returns. + """ + # WARNING: the iterator interface for `ProgressBar` relies on + # this and only works because this is a simple generator which + # doesn't create or manage additional state. If this function + # changes, the impact should be evaluated both against + # `iter(bar)` and `next(bar)`. `next()` in particular may call + # `self.generator()` repeatedly, and this must remain safe in + # order for that interface to work. + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + + if not self._is_atty: + yield from self.iter + else: + for rv in self.iter: + self.current_item = rv + + # This allows show_item_func to be updated before the + # item is processed. Only trigger at the beginning of + # the update interval. + if self._completed_intervals == 0: + self.render_progress() + + yield rv + self.update(1) + + self.finish() + self.render_progress() + + +def pager(generator: cabc.Iterable[str], color: bool | None = None) -> None: + """Decide what method to use for paging through text.""" + stdout = _default_text_stdout() + + # There are no standard streams attached to write to. For example, + # pythonw on Windows. + if stdout is None: + stdout = StringIO() + + if not isatty(sys.stdin) or not isatty(stdout): + return _nullpager(stdout, generator, color) + + # Split and normalize the pager command into parts. + pager_cmd_parts = shlex.split(os.environ.get("PAGER", ""), posix=False) + if pager_cmd_parts: + if WIN: + if _tempfilepager(generator, pager_cmd_parts, color): + return + elif _pipepager(generator, pager_cmd_parts, color): + return + + if os.environ.get("TERM") in ("dumb", "emacs"): + return _nullpager(stdout, generator, color) + if (WIN or sys.platform.startswith("os2")) and _tempfilepager( + generator, ["more"], color + ): + return + if _pipepager(generator, ["less"], color): + return + + import tempfile + + fd, filename = tempfile.mkstemp() + os.close(fd) + try: + if _pipepager(generator, ["more"], color): + return + return _nullpager(stdout, generator, color) + finally: + os.unlink(filename) + + +def _pipepager( + generator: cabc.Iterable[str], cmd_parts: list[str], color: bool | None +) -> bool: + """Page through text by feeding it to another program. Invoking a + pager through this might support colors. + + Returns `True` if the command was found, `False` otherwise and thus another + pager should be attempted. + """ + # Split the command into the invoked CLI and its parameters. + if not cmd_parts: + return False + + import shutil + + cmd = cmd_parts[0] + cmd_params = cmd_parts[1:] + + cmd_filepath = shutil.which(cmd) + if not cmd_filepath: + return False + + # Produces a normalized absolute path string. + # multi-call binaries such as busybox derive their identity from the symlink + # less -> busybox. resolve() causes them to misbehave. (eg. less becomes busybox) + cmd_path = Path(cmd_filepath).absolute() + cmd_name = cmd_path.name + + import subprocess + + # Make a local copy of the environment to not affect the global one. + env = dict(os.environ) + + # If we're piping to less and the user hasn't decided on colors, we enable + # them by default we find the -R flag in the command line arguments. + if color is None and cmd_name == "less": + less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_params)}" + if not less_flags: + env["LESS"] = "-R" + color = True + elif "r" in less_flags or "R" in less_flags: + color = True + + c = subprocess.Popen( + [str(cmd_path)] + cmd_params, + shell=False, + stdin=subprocess.PIPE, + env=env, + errors="replace", + text=True, + ) + assert c.stdin is not None + try: + for text in generator: + if not color: + text = strip_ansi(text) + + c.stdin.write(text) + except BrokenPipeError: + # In case the pager exited unexpectedly, ignore the broken pipe error. + pass + except Exception as e: + # In case there is an exception we want to close the pager immediately + # and let the caller handle it. + # Otherwise the pager will keep running, and the user may not notice + # the error message, or worse yet it may leave the terminal in a broken state. + c.terminate() + raise e + finally: + # We must close stdin and wait for the pager to exit before we continue + try: + c.stdin.close() + # Close implies flush, so it might throw a BrokenPipeError if the pager + # process exited already. + except BrokenPipeError: + pass + + # Less doesn't respect ^C, but catches it for its own UI purposes (aborting + # search or other commands inside less). + # + # That means when the user hits ^C, the parent process (click) terminates, + # but less is still alive, paging the output and messing up the terminal. + # + # If the user wants to make the pager exit on ^C, they should set + # `LESS='-K'`. It's not our decision to make. + while True: + try: + c.wait() + except KeyboardInterrupt: + pass + else: + break + + return True + + +def _tempfilepager( + generator: cabc.Iterable[str], cmd_parts: list[str], color: bool | None +) -> bool: + """Page through text by invoking a program on a temporary file. + + Returns `True` if the command was found, `False` otherwise and thus another + pager should be attempted. + """ + # Split the command into the invoked CLI and its parameters. + if not cmd_parts: + return False + + import shutil + + cmd = cmd_parts[0] + + cmd_filepath = shutil.which(cmd) + if not cmd_filepath: + return False + # Produces a normalized absolute path string. + # multi-call binaries such as busybox derive their identity from the symlink + # less -> busybox. resolve() causes them to misbehave. (eg. less becomes busybox) + cmd_path = Path(cmd_filepath).absolute() + + import subprocess + import tempfile + + fd, filename = tempfile.mkstemp() + # TODO: This never terminates if the passed generator never terminates. + text = "".join(generator) + if not color: + text = strip_ansi(text) + encoding = get_best_encoding(sys.stdout) + with open_stream(filename, "wb")[0] as f: + f.write(text.encode(encoding)) + try: + subprocess.call([str(cmd_path), filename]) + except OSError: + # Command not found + pass + finally: + os.close(fd) + os.unlink(filename) + + return True + + +def _nullpager( + stream: t.TextIO, generator: cabc.Iterable[str], color: bool | None +) -> None: + """Simply print unformatted text. This is the ultimate fallback.""" + for text in generator: + if not color: + text = strip_ansi(text) + stream.write(text) + + +class Editor: + def __init__( + self, + editor: str | None = None, + env: cabc.Mapping[str, str] | None = None, + require_save: bool = True, + extension: str = ".txt", + ) -> None: + self.editor = editor + self.env = env + self.require_save = require_save + self.extension = extension + + def get_editor(self) -> str: + if self.editor is not None: + return self.editor + for key in "VISUAL", "EDITOR": + rv = os.environ.get(key) + if rv: + return rv + if WIN: + return "notepad" + + from shutil import which + + for editor in "sensible-editor", "vim", "nano": + if which(editor) is not None: + return editor + return "vi" + + def edit_files(self, filenames: cabc.Iterable[str]) -> None: + import subprocess + + editor = self.get_editor() + environ: dict[str, str] | None = None + + if self.env: + environ = os.environ.copy() + environ.update(self.env) + + exc_filename = " ".join(f'"{filename}"' for filename in filenames) + + try: + c = subprocess.Popen( + args=f"{editor} {exc_filename}", env=environ, shell=True + ) + exit_code = c.wait() + if exit_code != 0: + raise ClickException( + _("{editor}: Editing failed").format(editor=editor) + ) + except OSError as e: + raise ClickException( + _("{editor}: Editing failed: {e}").format(editor=editor, e=e) + ) from e + + @t.overload + def edit(self, text: bytes | bytearray) -> bytes | None: ... + + # We cannot know whether or not the type expected is str or bytes when None + # is passed, so str is returned as that was what was done before. + @t.overload + def edit(self, text: str | None) -> str | None: ... + + def edit(self, text: str | bytes | bytearray | None) -> str | bytes | None: + import tempfile + + if text is None: + data: bytes | bytearray = b"" + elif isinstance(text, (bytes, bytearray)): + data = text + else: + if text and not text.endswith("\n"): + text += "\n" + + if WIN: + data = text.replace("\n", "\r\n").encode("utf-8-sig") + else: + data = text.encode("utf-8") + + fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension) + f: t.BinaryIO + + try: + with os.fdopen(fd, "wb") as f: + f.write(data) + + # If the filesystem resolution is 1 second, like Mac OS + # 10.12 Extended, or 2 seconds, like FAT32, and the editor + # closes very fast, require_save can fail. Set the modified + # time to be 2 seconds in the past to work around this. + os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2)) + # Depending on the resolution, the exact value might not be + # recorded, so get the new recorded value. + timestamp = os.path.getmtime(name) + + self.edit_files((name,)) + + if self.require_save and os.path.getmtime(name) == timestamp: + return None + + with open(name, "rb") as f: + rv = f.read() + + if isinstance(text, (bytes, bytearray)): + return rv + + return rv.decode("utf-8-sig").replace("\r\n", "\n") + finally: + os.unlink(name) + + +def open_url(url: str, wait: bool = False, locate: bool = False) -> int: + import subprocess + + def _unquote_file(url: str) -> str: + from urllib.parse import unquote + + if url.startswith("file://"): + url = unquote(url[7:]) + + return url + + if sys.platform == "darwin": + args = ["open"] + if wait: + args.append("-W") + if locate: + args.append("-R") + args.append(_unquote_file(url)) + null = open("/dev/null", "w") + try: + return subprocess.Popen(args, stderr=null).wait() + finally: + null.close() + elif WIN: + if locate: + url = _unquote_file(url) + args = ["explorer", f"/select,{url}"] + else: + args = ["start"] + if wait: + args.append("/WAIT") + args.append("") + args.append(url) + try: + return subprocess.call(args) + except OSError: + # Command not found + return 127 + elif CYGWIN: + if locate: + url = _unquote_file(url) + args = ["cygstart", os.path.dirname(url)] + else: + args = ["cygstart"] + if wait: + args.append("-w") + args.append(url) + try: + return subprocess.call(args) + except OSError: + # Command not found + return 127 + + try: + if locate: + url = os.path.dirname(_unquote_file(url)) or "." + else: + url = _unquote_file(url) + c = subprocess.Popen(["xdg-open", url]) + if wait: + return c.wait() + return 0 + except OSError: + if url.startswith(("http://", "https://")) and not locate and not wait: + import webbrowser + + webbrowser.open(url) + return 0 + return 1 + + +def _translate_ch_to_exc(ch: str) -> None: + if ch == "\x03": + raise KeyboardInterrupt() + + if ch == "\x04" and not WIN: # Unix-like, Ctrl+D + raise EOFError() + + if ch == "\x1a" and WIN: # Windows, Ctrl+Z + raise EOFError() + + return None + + +if sys.platform == "win32": + import msvcrt + + @contextlib.contextmanager + def raw_terminal() -> cabc.Iterator[int]: + yield -1 + + def getchar(echo: bool) -> str: + # The function `getch` will return a bytes object corresponding to + # the pressed character. Since Windows 10 build 1803, it will also + # return \x00 when called a second time after pressing a regular key. + # + # `getwch` does not share this probably-bugged behavior. Moreover, it + # returns a Unicode object by default, which is what we want. + # + # Either of these functions will return \x00 or \xe0 to indicate + # a special key, and you need to call the same function again to get + # the "rest" of the code. The fun part is that \u00e0 is + # "latin small letter a with grave", so if you type that on a French + # keyboard, you _also_ get a \xe0. + # E.g., consider the Up arrow. This returns \xe0 and then \x48. The + # resulting Unicode string reads as "a with grave" + "capital H". + # This is indistinguishable from when the user actually types + # "a with grave" and then "capital H". + # + # When \xe0 is returned, we assume it's part of a special-key sequence + # and call `getwch` again, but that means that when the user types + # the \u00e0 character, `getchar` doesn't return until a second + # character is typed. + # The alternative is returning immediately, but that would mess up + # cross-platform handling of arrow keys and others that start with + # \xe0. Another option is using `getch`, but then we can't reliably + # read non-ASCII characters, because return values of `getch` are + # limited to the current 8-bit codepage. + # + # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` + # is doing the right thing in more situations than with `getch`. + + if echo: + func = t.cast(t.Callable[[], str], msvcrt.getwche) + else: + func = t.cast(t.Callable[[], str], msvcrt.getwch) + + rv = func() + + if rv in ("\x00", "\xe0"): + # \x00 and \xe0 are control characters that indicate special key, + # see above. + rv += func() + + _translate_ch_to_exc(rv) + return rv + +else: + import termios + import tty + + @contextlib.contextmanager + def raw_terminal() -> cabc.Iterator[int]: + f: t.TextIO | None + fd: int + + if not isatty(sys.stdin): + f = open("/dev/tty") + fd = f.fileno() + else: + fd = sys.stdin.fileno() + f = None + + try: + old_settings = termios.tcgetattr(fd) + + try: + tty.setraw(fd) + yield fd + finally: + termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) + sys.stdout.flush() + + if f is not None: + f.close() + except termios.error: + pass + + def getchar(echo: bool) -> str: + with raw_terminal() as fd: + ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace") + + if echo and isatty(sys.stdout): + sys.stdout.write(ch) + + _translate_ch_to_exc(ch) + return ch diff --git a/.venv/lib/python3.12/site-packages/click/_textwrap.py b/.venv/lib/python3.12/site-packages/click/_textwrap.py new file mode 100644 index 0000000..97fbee3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/_textwrap.py @@ -0,0 +1,51 @@ +from __future__ import annotations + +import collections.abc as cabc +import textwrap +from contextlib import contextmanager + + +class TextWrapper(textwrap.TextWrapper): + def _handle_long_word( + self, + reversed_chunks: list[str], + cur_line: list[str], + cur_len: int, + width: int, + ) -> None: + space_left = max(width - cur_len, 1) + + if self.break_long_words: + last = reversed_chunks[-1] + cut = last[:space_left] + res = last[space_left:] + cur_line.append(cut) + reversed_chunks[-1] = res + elif not cur_line: + cur_line.append(reversed_chunks.pop()) + + @contextmanager + def extra_indent(self, indent: str) -> cabc.Iterator[None]: + old_initial_indent = self.initial_indent + old_subsequent_indent = self.subsequent_indent + self.initial_indent += indent + self.subsequent_indent += indent + + try: + yield + finally: + self.initial_indent = old_initial_indent + self.subsequent_indent = old_subsequent_indent + + def indent_only(self, text: str) -> str: + rv = [] + + for idx, line in enumerate(text.splitlines()): + indent = self.initial_indent + + if idx > 0: + indent = self.subsequent_indent + + rv.append(f"{indent}{line}") + + return "\n".join(rv) diff --git a/.venv/lib/python3.12/site-packages/click/_utils.py b/.venv/lib/python3.12/site-packages/click/_utils.py new file mode 100644 index 0000000..09fb008 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/_utils.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +import enum +import typing as t + + +class Sentinel(enum.Enum): + """Enum used to define sentinel values. + + .. seealso:: + + `PEP 661 - Sentinel Values `_. + """ + + UNSET = object() + FLAG_NEEDS_VALUE = object() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}.{self.name}" + + +UNSET = Sentinel.UNSET +"""Sentinel used to indicate that a value is not set.""" + +FLAG_NEEDS_VALUE = Sentinel.FLAG_NEEDS_VALUE +"""Sentinel used to indicate an option was passed as a flag without a +value but is not a flag option. + +``Option.consume_value`` uses this to prompt or use the ``flag_value``. +""" + +T_UNSET = t.Literal[UNSET] # type: ignore[valid-type] +"""Type hint for the :data:`UNSET` sentinel value.""" + +T_FLAG_NEEDS_VALUE = t.Literal[FLAG_NEEDS_VALUE] # type: ignore[valid-type] +"""Type hint for the :data:`FLAG_NEEDS_VALUE` sentinel value.""" diff --git a/.venv/lib/python3.12/site-packages/click/_winconsole.py b/.venv/lib/python3.12/site-packages/click/_winconsole.py new file mode 100644 index 0000000..e56c7c6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/_winconsole.py @@ -0,0 +1,296 @@ +# This module is based on the excellent work by Adam Bartoš who +# provided a lot of what went into the implementation here in +# the discussion to issue1602 in the Python bug tracker. +# +# There are some general differences in regards to how this works +# compared to the original patches as we do not need to patch +# the entire interpreter but just work in our little world of +# echo and prompt. +from __future__ import annotations + +import collections.abc as cabc +import io +import sys +import time +import typing as t +from ctypes import Array +from ctypes import byref +from ctypes import c_char +from ctypes import c_char_p +from ctypes import c_int +from ctypes import c_ssize_t +from ctypes import c_ulong +from ctypes import c_void_p +from ctypes import POINTER +from ctypes import py_object +from ctypes import Structure +from ctypes.wintypes import DWORD +from ctypes.wintypes import HANDLE +from ctypes.wintypes import LPCWSTR +from ctypes.wintypes import LPWSTR + +from ._compat import _NonClosingTextIOWrapper + +assert sys.platform == "win32" +import msvcrt # noqa: E402 +from ctypes import windll # noqa: E402 +from ctypes import WINFUNCTYPE # noqa: E402 + +c_ssize_p = POINTER(c_ssize_t) + +kernel32 = windll.kernel32 +GetStdHandle = kernel32.GetStdHandle +ReadConsoleW = kernel32.ReadConsoleW +WriteConsoleW = kernel32.WriteConsoleW +GetConsoleMode = kernel32.GetConsoleMode +GetLastError = kernel32.GetLastError +GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) +CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( + ("CommandLineToArgvW", windll.shell32) +) +LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32)) + +STDIN_HANDLE = GetStdHandle(-10) +STDOUT_HANDLE = GetStdHandle(-11) +STDERR_HANDLE = GetStdHandle(-12) + +PyBUF_SIMPLE = 0 +PyBUF_WRITABLE = 1 + +ERROR_SUCCESS = 0 +ERROR_NOT_ENOUGH_MEMORY = 8 +ERROR_OPERATION_ABORTED = 995 + +STDIN_FILENO = 0 +STDOUT_FILENO = 1 +STDERR_FILENO = 2 + +EOF = b"\x1a" +MAX_BYTES_WRITTEN = 32767 + +if t.TYPE_CHECKING: + try: + # Using `typing_extensions.Buffer` instead of `collections.abc` + # on Windows for some reason does not have `Sized` implemented. + from collections.abc import Buffer # type: ignore + except ImportError: + from typing_extensions import Buffer + +try: + from ctypes import pythonapi +except ImportError: + # On PyPy we cannot get buffers so our ability to operate here is + # severely limited. + get_buffer = None +else: + + class Py_buffer(Structure): + _fields_ = [ # noqa: RUF012 + ("buf", c_void_p), + ("obj", py_object), + ("len", c_ssize_t), + ("itemsize", c_ssize_t), + ("readonly", c_int), + ("ndim", c_int), + ("format", c_char_p), + ("shape", c_ssize_p), + ("strides", c_ssize_p), + ("suboffsets", c_ssize_p), + ("internal", c_void_p), + ] + + PyObject_GetBuffer = pythonapi.PyObject_GetBuffer + PyBuffer_Release = pythonapi.PyBuffer_Release + + def get_buffer(obj: Buffer, writable: bool = False) -> Array[c_char]: + buf = Py_buffer() + flags: int = PyBUF_WRITABLE if writable else PyBUF_SIMPLE + PyObject_GetBuffer(py_object(obj), byref(buf), flags) + + try: + buffer_type = c_char * buf.len + out: Array[c_char] = buffer_type.from_address(buf.buf) + return out + finally: + PyBuffer_Release(byref(buf)) + + +class _WindowsConsoleRawIOBase(io.RawIOBase): + def __init__(self, handle: int | None) -> None: + self.handle = handle + + def isatty(self) -> t.Literal[True]: + super().isatty() + return True + + +class _WindowsConsoleReader(_WindowsConsoleRawIOBase): + def readable(self) -> t.Literal[True]: + return True + + def readinto(self, b: Buffer) -> int: + bytes_to_be_read = len(b) + if not bytes_to_be_read: + return 0 + elif bytes_to_be_read % 2: + raise ValueError( + "cannot read odd number of bytes from UTF-16-LE encoded console" + ) + + buffer = get_buffer(b, writable=True) + code_units_to_be_read = bytes_to_be_read // 2 + code_units_read = c_ulong() + + rv = ReadConsoleW( + HANDLE(self.handle), + buffer, + code_units_to_be_read, + byref(code_units_read), + None, + ) + if GetLastError() == ERROR_OPERATION_ABORTED: + # wait for KeyboardInterrupt + time.sleep(0.1) + if not rv: + raise OSError(f"Windows error: {GetLastError()}") + + if buffer[0] == EOF: + return 0 + return 2 * code_units_read.value + + +class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): + def writable(self) -> t.Literal[True]: + return True + + @staticmethod + def _get_error_message(errno: int) -> str: + if errno == ERROR_SUCCESS: + return "ERROR_SUCCESS" + elif errno == ERROR_NOT_ENOUGH_MEMORY: + return "ERROR_NOT_ENOUGH_MEMORY" + return f"Windows error {errno}" + + def write(self, b: Buffer) -> int: + bytes_to_be_written = len(b) + buf = get_buffer(b) + code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2 + code_units_written = c_ulong() + + WriteConsoleW( + HANDLE(self.handle), + buf, + code_units_to_be_written, + byref(code_units_written), + None, + ) + bytes_written = 2 * code_units_written.value + + if bytes_written == 0 and bytes_to_be_written > 0: + raise OSError(self._get_error_message(GetLastError())) + return bytes_written + + +class ConsoleStream: + def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None: + self._text_stream = text_stream + self.buffer = byte_stream + + @property + def name(self) -> str: + return self.buffer.name + + def write(self, x: t.AnyStr) -> int: + if isinstance(x, str): + return self._text_stream.write(x) + try: + self.flush() + except Exception: + pass + return self.buffer.write(x) + + def writelines(self, lines: cabc.Iterable[t.AnyStr]) -> None: + for line in lines: + self.write(line) + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._text_stream, name) + + def isatty(self) -> bool: + return self.buffer.isatty() + + def __repr__(self) -> str: + return f"" + + +def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +_stream_factories: cabc.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = { + 0: _get_text_stdin, + 1: _get_text_stdout, + 2: _get_text_stderr, +} + + +def _is_console(f: t.TextIO) -> bool: + if not hasattr(f, "fileno"): + return False + + try: + fileno = f.fileno() + except (OSError, io.UnsupportedOperation): + return False + + handle = msvcrt.get_osfhandle(fileno) + return bool(GetConsoleMode(handle, byref(DWORD()))) + + +def _get_windows_console_stream( + f: t.TextIO, encoding: str | None, errors: str | None +) -> t.TextIO | None: + if ( + get_buffer is None + or encoding not in {"utf-16-le", None} + or errors not in {"strict", None} + or not _is_console(f) + ): + return None + + func = _stream_factories.get(f.fileno()) + if func is None: + return None + + b = getattr(f, "buffer", None) + + if b is None: + return None + + return func(b) diff --git a/.venv/lib/python3.12/site-packages/click/core.py b/.venv/lib/python3.12/site-packages/click/core.py new file mode 100644 index 0000000..57f549c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/core.py @@ -0,0 +1,3415 @@ +from __future__ import annotations + +import collections.abc as cabc +import enum +import errno +import inspect +import os +import sys +import typing as t +from collections import abc +from collections import Counter +from contextlib import AbstractContextManager +from contextlib import contextmanager +from contextlib import ExitStack +from functools import update_wrapper +from gettext import gettext as _ +from gettext import ngettext +from itertools import repeat +from types import TracebackType + +from . import types +from ._utils import FLAG_NEEDS_VALUE +from ._utils import UNSET +from .exceptions import Abort +from .exceptions import BadParameter +from .exceptions import ClickException +from .exceptions import Exit +from .exceptions import MissingParameter +from .exceptions import NoArgsIsHelpError +from .exceptions import UsageError +from .formatting import HelpFormatter +from .formatting import join_options +from .globals import pop_context +from .globals import push_context +from .parser import _OptionParser +from .parser import _split_opt +from .termui import confirm +from .termui import prompt +from .termui import style +from .utils import _detect_program_name +from .utils import _expand_args +from .utils import echo +from .utils import make_default_short_help +from .utils import make_str +from .utils import PacifyFlushWrapper + +if t.TYPE_CHECKING: + from .shell_completion import CompletionItem + +F = t.TypeVar("F", bound="t.Callable[..., t.Any]") +V = t.TypeVar("V") + + +def _complete_visible_commands( + ctx: Context, incomplete: str +) -> cabc.Iterator[tuple[str, Command]]: + """List all the subcommands of a group that start with the + incomplete value and aren't hidden. + + :param ctx: Invocation context for the group. + :param incomplete: Value being completed. May be empty. + """ + multi = t.cast(Group, ctx.command) + + for name in multi.list_commands(ctx): + if name.startswith(incomplete): + command = multi.get_command(ctx, name) + + if command is not None and not command.hidden: + yield name, command + + +def _check_nested_chain( + base_command: Group, cmd_name: str, cmd: Command, register: bool = False +) -> None: + if not base_command.chain or not isinstance(cmd, Group): + return + + if register: + message = ( + f"It is not possible to add the group {cmd_name!r} to another" + f" group {base_command.name!r} that is in chain mode." + ) + else: + message = ( + f"Found the group {cmd_name!r} as subcommand to another group " + f" {base_command.name!r} that is in chain mode. This is not supported." + ) + + raise RuntimeError(message) + + +def batch(iterable: cabc.Iterable[V], batch_size: int) -> list[tuple[V, ...]]: + return list(zip(*repeat(iter(iterable), batch_size), strict=False)) + + +@contextmanager +def augment_usage_errors( + ctx: Context, param: Parameter | None = None +) -> cabc.Iterator[None]: + """Context manager that attaches extra information to exceptions.""" + try: + yield + except BadParameter as e: + if e.ctx is None: + e.ctx = ctx + if param is not None and e.param is None: + e.param = param + raise + except UsageError as e: + if e.ctx is None: + e.ctx = ctx + raise + + +def iter_params_for_processing( + invocation_order: cabc.Sequence[Parameter], + declaration_order: cabc.Sequence[Parameter], +) -> list[Parameter]: + """Returns all declared parameters in the order they should be processed. + + The declared parameters are re-shuffled depending on the order in which + they were invoked, as well as the eagerness of each parameters. + + The invocation order takes precedence over the declaration order. I.e. the + order in which the user provided them to the CLI is respected. + + This behavior and its effect on callback evaluation is detailed at: + https://click.palletsprojects.com/en/stable/advanced/#callback-evaluation-order + """ + + def sort_key(item: Parameter) -> tuple[bool, float]: + try: + idx: float = invocation_order.index(item) + except ValueError: + idx = float("inf") + + return not item.is_eager, idx + + return sorted(declaration_order, key=sort_key) + + +class ParameterSource(enum.Enum): + """This is an :class:`~enum.Enum` that indicates the source of a + parameter's value. + + Use :meth:`click.Context.get_parameter_source` to get the + source for a parameter by name. + + .. versionchanged:: 8.0 + Use :class:`~enum.Enum` and drop the ``validate`` method. + + .. versionchanged:: 8.0 + Added the ``PROMPT`` value. + """ + + COMMANDLINE = enum.auto() + """The value was provided by the command line args.""" + ENVIRONMENT = enum.auto() + """The value was provided with an environment variable.""" + DEFAULT = enum.auto() + """Used the default specified by the parameter.""" + DEFAULT_MAP = enum.auto() + """Used a default provided by :attr:`Context.default_map`.""" + PROMPT = enum.auto() + """Used a prompt to confirm a default or provide a value.""" + + +class Context: + """The context is a special internal object that holds state relevant + for the script execution at every single level. It's normally invisible + to commands unless they opt-in to getting access to it. + + The context is useful as it can pass internal objects around and can + control special execution features such as reading data from + environment variables. + + A context can be used as context manager in which case it will call + :meth:`close` on teardown. + + :param command: the command class for this context. + :param parent: the parent context. + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it is usually + the name of the script, for commands below it it's + the name of the script. + :param obj: an arbitrary object of user data. + :param auto_envvar_prefix: the prefix to use for automatic environment + variables. If this is `None` then reading + from environment variables is disabled. This + does not affect manually set environment + variables which are always read. + :param default_map: a dictionary (like object) with default values + for parameters. + :param terminal_width: the width of the terminal. The default is + inherit from parent context. If no context + defines the terminal width then auto + detection will be applied. + :param max_content_width: the maximum width for content rendered by + Click (this currently only affects help + pages). This defaults to 80 characters if + not overridden. In other words: even if the + terminal is larger than that, Click will not + format things wider than 80 characters by + default. In addition to that, formatters might + add some safety mapping on the right. + :param resilient_parsing: if this flag is enabled then Click will + parse without any interactivity or callback + invocation. Default values will also be + ignored. This is useful for implementing + things such as completion support. + :param allow_extra_args: if this is set to `True` then extra arguments + at the end will not raise an error and will be + kept on the context. The default is to inherit + from the command. + :param allow_interspersed_args: if this is set to `False` then options + and arguments cannot be mixed. The + default is to inherit from the command. + :param ignore_unknown_options: instructs click to ignore options it does + not know and keeps them for later + processing. + :param help_option_names: optionally a list of strings that define how + the default help parameter is named. The + default is ``['--help']``. + :param token_normalize_func: an optional function that is used to + normalize tokens (options, choices, + etc.). This for instance can be used to + implement case insensitive behavior. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are used in texts that Click prints which is by + default not the case. This for instance would affect + help output. + :param show_default: Show the default value for commands. If this + value is not set, it defaults to the value from the parent + context. ``Command.show_default`` overrides this default for the + specific command. + + .. versionchanged:: 8.2 + The ``protected_args`` attribute is deprecated and will be removed in + Click 9.0. ``args`` will contain remaining unparsed tokens. + + .. versionchanged:: 8.1 + The ``show_default`` parameter is overridden by + ``Command.show_default``, instead of the other way around. + + .. versionchanged:: 8.0 + The ``show_default`` parameter defaults to the value from the + parent context. + + .. versionchanged:: 7.1 + Added the ``show_default`` parameter. + + .. versionchanged:: 4.0 + Added the ``color``, ``ignore_unknown_options``, and + ``max_content_width`` parameters. + + .. versionchanged:: 3.0 + Added the ``allow_extra_args`` and ``allow_interspersed_args`` + parameters. + + .. versionchanged:: 2.0 + Added the ``resilient_parsing``, ``help_option_names``, and + ``token_normalize_func`` parameters. + """ + + #: The formatter class to create with :meth:`make_formatter`. + #: + #: .. versionadded:: 8.0 + formatter_class: type[HelpFormatter] = HelpFormatter + + def __init__( + self, + command: Command, + parent: Context | None = None, + info_name: str | None = None, + obj: t.Any | None = None, + auto_envvar_prefix: str | None = None, + default_map: cabc.MutableMapping[str, t.Any] | None = None, + terminal_width: int | None = None, + max_content_width: int | None = None, + resilient_parsing: bool = False, + allow_extra_args: bool | None = None, + allow_interspersed_args: bool | None = None, + ignore_unknown_options: bool | None = None, + help_option_names: list[str] | None = None, + token_normalize_func: t.Callable[[str], str] | None = None, + color: bool | None = None, + show_default: bool | None = None, + ) -> None: + #: the parent context or `None` if none exists. + self.parent = parent + #: the :class:`Command` for this context. + self.command = command + #: the descriptive information name + self.info_name = info_name + #: Map of parameter names to their parsed values. Parameters + #: with ``expose_value=False`` are not stored. + self.params: dict[str, t.Any] = {} + #: the leftover arguments. + self.args: list[str] = [] + #: protected arguments. These are arguments that are prepended + #: to `args` when certain parsing scenarios are encountered but + #: must be never propagated to another arguments. This is used + #: to implement nested parsing. + self._protected_args: list[str] = [] + #: the collected prefixes of the command's options. + self._opt_prefixes: set[str] = set(parent._opt_prefixes) if parent else set() + + if obj is None and parent is not None: + obj = parent.obj + + #: the user object stored. + self.obj: t.Any = obj + self._meta: dict[str, t.Any] = getattr(parent, "meta", {}) + + #: A dictionary (-like object) with defaults for parameters. + if ( + default_map is None + and info_name is not None + and parent is not None + and parent.default_map is not None + ): + default_map = parent.default_map.get(info_name) + + self.default_map: cabc.MutableMapping[str, t.Any] | None = default_map + + #: This flag indicates if a subcommand is going to be executed. A + #: group callback can use this information to figure out if it's + #: being executed directly or because the execution flow passes + #: onwards to a subcommand. By default it's None, but it can be + #: the name of the subcommand to execute. + #: + #: If chaining is enabled this will be set to ``'*'`` in case + #: any commands are executed. It is however not possible to + #: figure out which ones. If you require this knowledge you + #: should use a :func:`result_callback`. + self.invoked_subcommand: str | None = None + + if terminal_width is None and parent is not None: + terminal_width = parent.terminal_width + + #: The width of the terminal (None is autodetection). + self.terminal_width: int | None = terminal_width + + if max_content_width is None and parent is not None: + max_content_width = parent.max_content_width + + #: The maximum width of formatted content (None implies a sensible + #: default which is 80 for most things). + self.max_content_width: int | None = max_content_width + + if allow_extra_args is None: + allow_extra_args = command.allow_extra_args + + #: Indicates if the context allows extra args or if it should + #: fail on parsing. + #: + #: .. versionadded:: 3.0 + self.allow_extra_args = allow_extra_args + + if allow_interspersed_args is None: + allow_interspersed_args = command.allow_interspersed_args + + #: Indicates if the context allows mixing of arguments and + #: options or not. + #: + #: .. versionadded:: 3.0 + self.allow_interspersed_args: bool = allow_interspersed_args + + if ignore_unknown_options is None: + ignore_unknown_options = command.ignore_unknown_options + + #: Instructs click to ignore options that a command does not + #: understand and will store it on the context for later + #: processing. This is primarily useful for situations where you + #: want to call into external programs. Generally this pattern is + #: strongly discouraged because it's not possibly to losslessly + #: forward all arguments. + #: + #: .. versionadded:: 4.0 + self.ignore_unknown_options: bool = ignore_unknown_options + + if help_option_names is None: + if parent is not None: + help_option_names = parent.help_option_names + else: + help_option_names = ["--help"] + + #: The names for the help options. + self.help_option_names: list[str] = help_option_names + + if token_normalize_func is None and parent is not None: + token_normalize_func = parent.token_normalize_func + + #: An optional normalization function for tokens. This is + #: options, choices, commands etc. + self.token_normalize_func: t.Callable[[str], str] | None = token_normalize_func + + #: Indicates if resilient parsing is enabled. In that case Click + #: will do its best to not cause any failures and default values + #: will be ignored. Useful for completion. + self.resilient_parsing: bool = resilient_parsing + + # If there is no envvar prefix yet, but the parent has one and + # the command on this level has a name, we can expand the envvar + # prefix automatically. + if auto_envvar_prefix is None: + if ( + parent is not None + and parent.auto_envvar_prefix is not None + and self.info_name is not None + ): + auto_envvar_prefix = ( + f"{parent.auto_envvar_prefix}_{self.info_name.upper()}" + ) + else: + auto_envvar_prefix = auto_envvar_prefix.upper() + + if auto_envvar_prefix is not None: + auto_envvar_prefix = auto_envvar_prefix.replace("-", "_") + + self.auto_envvar_prefix: str | None = auto_envvar_prefix + + if color is None and parent is not None: + color = parent.color + + #: Controls if styling output is wanted or not. + self.color: bool | None = color + + if show_default is None and parent is not None: + show_default = parent.show_default + + #: Show option default values when formatting help text. + self.show_default: bool | None = show_default + + self._close_callbacks: list[t.Callable[[], t.Any]] = [] + self._depth = 0 + self._parameter_source: dict[str, ParameterSource] = {} + self._exit_stack = ExitStack() + + @property + def protected_args(self) -> list[str]: + import warnings + + warnings.warn( + "'protected_args' is deprecated and will be removed in Click 9.0." + " 'args' will contain remaining unparsed tokens.", + DeprecationWarning, + stacklevel=2, + ) + return self._protected_args + + def to_info_dict(self) -> dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. This traverses the entire CLI + structure. + + .. code-block:: python + + with Context(cli) as ctx: + info = ctx.to_info_dict() + + .. versionadded:: 8.0 + """ + return { + "command": self.command.to_info_dict(self), + "info_name": self.info_name, + "allow_extra_args": self.allow_extra_args, + "allow_interspersed_args": self.allow_interspersed_args, + "ignore_unknown_options": self.ignore_unknown_options, + "auto_envvar_prefix": self.auto_envvar_prefix, + } + + def __enter__(self) -> Context: + self._depth += 1 + push_context(self) + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + tb: TracebackType | None, + ) -> bool | None: + self._depth -= 1 + exit_result: bool | None = None + if self._depth == 0: + exit_result = self._close_with_exception_info(exc_type, exc_value, tb) + pop_context() + + return exit_result + + @contextmanager + def scope(self, cleanup: bool = True) -> cabc.Iterator[Context]: + """This helper method can be used with the context object to promote + it to the current thread local (see :func:`get_current_context`). + The default behavior of this is to invoke the cleanup functions which + can be disabled by setting `cleanup` to `False`. The cleanup + functions are typically used for things such as closing file handles. + + If the cleanup is intended the context object can also be directly + used as a context manager. + + Example usage:: + + with ctx.scope(): + assert get_current_context() is ctx + + This is equivalent:: + + with ctx: + assert get_current_context() is ctx + + .. versionadded:: 5.0 + + :param cleanup: controls if the cleanup functions should be run or + not. The default is to run these functions. In + some situations the context only wants to be + temporarily pushed in which case this can be disabled. + Nested pushes automatically defer the cleanup. + """ + if not cleanup: + self._depth += 1 + try: + with self as rv: + yield rv + finally: + if not cleanup: + self._depth -= 1 + + @property + def meta(self) -> dict[str, t.Any]: + """This is a dictionary which is shared with all the contexts + that are nested. It exists so that click utilities can store some + state here if they need to. It is however the responsibility of + that code to manage this dictionary well. + + The keys are supposed to be unique dotted strings. For instance + module paths are a good choice for it. What is stored in there is + irrelevant for the operation of click. However what is important is + that code that places data here adheres to the general semantics of + the system. + + Example usage:: + + LANG_KEY = f'{__name__}.lang' + + def set_language(value): + ctx = get_current_context() + ctx.meta[LANG_KEY] = value + + def get_language(): + return get_current_context().meta.get(LANG_KEY, 'en_US') + + .. versionadded:: 5.0 + """ + return self._meta + + def make_formatter(self) -> HelpFormatter: + """Creates the :class:`~click.HelpFormatter` for the help and + usage output. + + To quickly customize the formatter class used without overriding + this method, set the :attr:`formatter_class` attribute. + + .. versionchanged:: 8.0 + Added the :attr:`formatter_class` attribute. + """ + return self.formatter_class( + width=self.terminal_width, max_width=self.max_content_width + ) + + def with_resource(self, context_manager: AbstractContextManager[V]) -> V: + """Register a resource as if it were used in a ``with`` + statement. The resource will be cleaned up when the context is + popped. + + Uses :meth:`contextlib.ExitStack.enter_context`. It calls the + resource's ``__enter__()`` method and returns the result. When + the context is popped, it closes the stack, which calls the + resource's ``__exit__()`` method. + + To register a cleanup function for something that isn't a + context manager, use :meth:`call_on_close`. Or use something + from :mod:`contextlib` to turn it into a context manager first. + + .. code-block:: python + + @click.group() + @click.option("--name") + @click.pass_context + def cli(ctx): + ctx.obj = ctx.with_resource(connect_db(name)) + + :param context_manager: The context manager to enter. + :return: Whatever ``context_manager.__enter__()`` returns. + + .. versionadded:: 8.0 + """ + return self._exit_stack.enter_context(context_manager) + + def call_on_close(self, f: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: + """Register a function to be called when the context tears down. + + This can be used to close resources opened during the script + execution. Resources that support Python's context manager + protocol which would be used in a ``with`` statement should be + registered with :meth:`with_resource` instead. + + :param f: The function to execute on teardown. + """ + return self._exit_stack.callback(f) + + def close(self) -> None: + """Invoke all close callbacks registered with + :meth:`call_on_close`, and exit all context managers entered + with :meth:`with_resource`. + """ + self._close_with_exception_info(None, None, None) + + def _close_with_exception_info( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + tb: TracebackType | None, + ) -> bool | None: + """Unwind the exit stack by calling its :meth:`__exit__` providing the exception + information to allow for exception handling by the various resources registered + using :meth;`with_resource` + + :return: Whatever ``exit_stack.__exit__()`` returns. + """ + exit_result = self._exit_stack.__exit__(exc_type, exc_value, tb) + # In case the context is reused, create a new exit stack. + self._exit_stack = ExitStack() + + return exit_result + + @property + def command_path(self) -> str: + """The computed command path. This is used for the ``usage`` + information on the help page. It's automatically created by + combining the info names of the chain of contexts to the root. + """ + rv = "" + if self.info_name is not None: + rv = self.info_name + if self.parent is not None: + parent_command_path = [self.parent.command_path] + + if isinstance(self.parent.command, Command): + for param in self.parent.command.get_params(self): + parent_command_path.extend(param.get_usage_pieces(self)) + + rv = f"{' '.join(parent_command_path)} {rv}" + return rv.lstrip() + + def find_root(self) -> Context: + """Finds the outermost context.""" + node = self + while node.parent is not None: + node = node.parent + return node + + def find_object(self, object_type: type[V]) -> V | None: + """Finds the closest object of a given type.""" + node: Context | None = self + + while node is not None: + if isinstance(node.obj, object_type): + return node.obj + + node = node.parent + + return None + + def ensure_object(self, object_type: type[V]) -> V: + """Like :meth:`find_object` but sets the innermost object to a + new instance of `object_type` if it does not exist. + """ + rv = self.find_object(object_type) + if rv is None: + self.obj = rv = object_type() + return rv + + @t.overload + def lookup_default( + self, name: str, call: t.Literal[True] = True + ) -> t.Any | None: ... + + @t.overload + def lookup_default( + self, name: str, call: t.Literal[False] = ... + ) -> t.Any | t.Callable[[], t.Any] | None: ... + + def lookup_default(self, name: str, call: bool = True) -> t.Any | None: + """Get the default for a parameter from :attr:`default_map`. + + :param name: Name of the parameter. + :param call: If the default is a callable, call it. Disable to + return the callable instead. + + .. versionchanged:: 8.0 + Added the ``call`` parameter. + """ + if self.default_map is not None: + value = self.default_map.get(name, UNSET) + + if call and callable(value): + return value() + + return value + + return UNSET + + def fail(self, message: str) -> t.NoReturn: + """Aborts the execution of the program with a specific error + message. + + :param message: the error message to fail with. + """ + raise UsageError(message, self) + + def abort(self) -> t.NoReturn: + """Aborts the script.""" + raise Abort() + + def exit(self, code: int = 0) -> t.NoReturn: + """Exits the application with a given exit code. + + .. versionchanged:: 8.2 + Callbacks and context managers registered with :meth:`call_on_close` + and :meth:`with_resource` are closed before exiting. + """ + self.close() + raise Exit(code) + + def get_usage(self) -> str: + """Helper method to get formatted usage string for the current + context and command. + """ + return self.command.get_usage(self) + + def get_help(self) -> str: + """Helper method to get formatted help page for the current + context and command. + """ + return self.command.get_help(self) + + def _make_sub_context(self, command: Command) -> Context: + """Create a new context of the same type as this context, but + for a new command. + + :meta private: + """ + return type(self)(command, info_name=command.name, parent=self) + + @t.overload + def invoke( + self, callback: t.Callable[..., V], /, *args: t.Any, **kwargs: t.Any + ) -> V: ... + + @t.overload + def invoke(self, callback: Command, /, *args: t.Any, **kwargs: t.Any) -> t.Any: ... + + def invoke( + self, callback: Command | t.Callable[..., V], /, *args: t.Any, **kwargs: t.Any + ) -> t.Any | V: + """Invokes a command callback in exactly the way it expects. There + are two ways to invoke this method: + + 1. the first argument can be a callback and all other arguments and + keyword arguments are forwarded directly to the function. + 2. the first argument is a click command object. In that case all + arguments are forwarded as well but proper click parameters + (options and click arguments) must be keyword arguments and Click + will fill in defaults. + + .. versionchanged:: 8.0 + All ``kwargs`` are tracked in :attr:`params` so they will be + passed if :meth:`forward` is called at multiple levels. + + .. versionchanged:: 3.2 + A new context is created, and missing arguments use default values. + """ + if isinstance(callback, Command): + other_cmd = callback + + if other_cmd.callback is None: + raise TypeError( + "The given command does not have a callback that can be invoked." + ) + else: + callback = t.cast("t.Callable[..., V]", other_cmd.callback) + + ctx = self._make_sub_context(other_cmd) + + for param in other_cmd.params: + if param.name not in kwargs and param.expose_value: + default_value = param.get_default(ctx) + # We explicitly hide the :attr:`UNSET` value to the user, as we + # choose to make it an implementation detail. And because ``invoke`` + # has been designed as part of Click public API, we return ``None`` + # instead. Refs: + # https://github.com/pallets/click/issues/3066 + # https://github.com/pallets/click/issues/3065 + # https://github.com/pallets/click/pull/3068 + if default_value is UNSET: + default_value = None + kwargs[param.name] = param.type_cast_value( # type: ignore + ctx, default_value + ) + + # Track all kwargs as params, so that forward() will pass + # them on in subsequent calls. + ctx.params.update(kwargs) + else: + ctx = self + + with augment_usage_errors(self): + with ctx: + return callback(*args, **kwargs) + + def forward(self, cmd: Command, /, *args: t.Any, **kwargs: t.Any) -> t.Any: + """Similar to :meth:`invoke` but fills in default keyword + arguments from the current context if the other command expects + it. This cannot invoke callbacks directly, only other commands. + + .. versionchanged:: 8.0 + All ``kwargs`` are tracked in :attr:`params` so they will be + passed if ``forward`` is called at multiple levels. + """ + # Can only forward to other commands, not direct callbacks. + if not isinstance(cmd, Command): + raise TypeError("Callback is not a command.") + + for param in self.params: + if param not in kwargs: + kwargs[param] = self.params[param] + + return self.invoke(cmd, *args, **kwargs) + + def set_parameter_source(self, name: str, source: ParameterSource) -> None: + """Set the source of a parameter. This indicates the location + from which the value of the parameter was obtained. + + :param name: The name of the parameter. + :param source: A member of :class:`~click.core.ParameterSource`. + """ + self._parameter_source[name] = source + + def get_parameter_source(self, name: str) -> ParameterSource | None: + """Get the source of a parameter. This indicates the location + from which the value of the parameter was obtained. + + This can be useful for determining when a user specified a value + on the command line that is the same as the default value. It + will be :attr:`~click.core.ParameterSource.DEFAULT` only if the + value was actually taken from the default. + + :param name: The name of the parameter. + :rtype: ParameterSource + + .. versionchanged:: 8.0 + Returns ``None`` if the parameter was not provided from any + source. + """ + return self._parameter_source.get(name) + + +class Command: + """Commands are the basic building block of command line interfaces in + Click. A basic command handles command line parsing and might dispatch + more parsing to commands nested below it. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + :param callback: the callback to invoke. This is optional. + :param params: the parameters to register with this command. This can + be either :class:`Option` or :class:`Argument` objects. + :param help: the help string to use for this command. + :param epilog: like the help string but it's printed at the end of the + help page after everything else. + :param short_help: the short help to use for this command. This is + shown on the command listing of the parent command. + :param add_help_option: by default each command registers a ``--help`` + option. This can be disabled by this parameter. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is disabled by default. + If enabled this will add ``--help`` as argument + if no arguments are passed + :param hidden: hide this command from help outputs. + :param deprecated: If ``True`` or non-empty string, issues a message + indicating that the command is deprecated and highlights + its deprecation in --help. The message can be customized + by using a string as the value. + + .. versionchanged:: 8.2 + This is the base class for all commands, not ``BaseCommand``. + ``deprecated`` can be set to a string as well to customize the + deprecation message. + + .. versionchanged:: 8.1 + ``help``, ``epilog``, and ``short_help`` are stored unprocessed, + all formatting is done when outputting help text, not at init, + and is done even if not using the ``@command`` decorator. + + .. versionchanged:: 8.0 + Added a ``repr`` showing the command name. + + .. versionchanged:: 7.1 + Added the ``no_args_is_help`` parameter. + + .. versionchanged:: 2.0 + Added the ``context_settings`` parameter. + """ + + #: The context class to create with :meth:`make_context`. + #: + #: .. versionadded:: 8.0 + context_class: type[Context] = Context + + #: the default for the :attr:`Context.allow_extra_args` flag. + allow_extra_args = False + + #: the default for the :attr:`Context.allow_interspersed_args` flag. + allow_interspersed_args = True + + #: the default for the :attr:`Context.ignore_unknown_options` flag. + ignore_unknown_options = False + + def __init__( + self, + name: str | None, + context_settings: cabc.MutableMapping[str, t.Any] | None = None, + callback: t.Callable[..., t.Any] | None = None, + params: list[Parameter] | None = None, + help: str | None = None, + epilog: str | None = None, + short_help: str | None = None, + options_metavar: str | None = "[OPTIONS]", + add_help_option: bool = True, + no_args_is_help: bool = False, + hidden: bool = False, + deprecated: bool | str = False, + ) -> None: + #: the name the command thinks it has. Upon registering a command + #: on a :class:`Group` the group will default the command name + #: with this information. You should instead use the + #: :class:`Context`\'s :attr:`~Context.info_name` attribute. + self.name = name + + if context_settings is None: + context_settings = {} + + #: an optional dictionary with defaults passed to the context. + self.context_settings: cabc.MutableMapping[str, t.Any] = context_settings + + #: the callback to execute when the command fires. This might be + #: `None` in which case nothing happens. + self.callback = callback + #: the list of parameters for this command in the order they + #: should show up in the help page and execute. Eager parameters + #: will automatically be handled before non eager ones. + self.params: list[Parameter] = params or [] + self.help = help + self.epilog = epilog + self.options_metavar = options_metavar + self.short_help = short_help + self.add_help_option = add_help_option + self._help_option = None + self.no_args_is_help = no_args_is_help + self.hidden = hidden + self.deprecated = deprecated + + def to_info_dict(self, ctx: Context) -> dict[str, t.Any]: + return { + "name": self.name, + "params": [param.to_info_dict() for param in self.get_params(ctx)], + "help": self.help, + "epilog": self.epilog, + "short_help": self.short_help, + "hidden": self.hidden, + "deprecated": self.deprecated, + } + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + def get_usage(self, ctx: Context) -> str: + """Formats the usage line into a string and returns it. + + Calls :meth:`format_usage` internally. + """ + formatter = ctx.make_formatter() + self.format_usage(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_params(self, ctx: Context) -> list[Parameter]: + params = self.params + help_option = self.get_help_option(ctx) + + if help_option is not None: + params = [*params, help_option] + + if __debug__: + import warnings + + opts = [opt for param in params for opt in param.opts] + opts_counter = Counter(opts) + duplicate_opts = (opt for opt, count in opts_counter.items() if count > 1) + + for duplicate_opt in duplicate_opts: + warnings.warn( + ( + f"The parameter {duplicate_opt} is used more than once. " + "Remove its duplicate as parameters should be unique." + ), + stacklevel=3, + ) + + return params + + def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the usage line into the formatter. + + This is a low-level method called by :meth:`get_usage`. + """ + pieces = self.collect_usage_pieces(ctx) + formatter.write_usage(ctx.command_path, " ".join(pieces)) + + def collect_usage_pieces(self, ctx: Context) -> list[str]: + """Returns all the pieces that go into the usage line and returns + it as a list of strings. + """ + rv = [self.options_metavar] if self.options_metavar else [] + + for param in self.get_params(ctx): + rv.extend(param.get_usage_pieces(ctx)) + + return rv + + def get_help_option_names(self, ctx: Context) -> list[str]: + """Returns the names for the help option.""" + all_names = set(ctx.help_option_names) + for param in self.params: + all_names.difference_update(param.opts) + all_names.difference_update(param.secondary_opts) + return list(all_names) + + def get_help_option(self, ctx: Context) -> Option | None: + """Returns the help option object. + + Skipped if :attr:`add_help_option` is ``False``. + + .. versionchanged:: 8.1.8 + The help option is now cached to avoid creating it multiple times. + """ + help_option_names = self.get_help_option_names(ctx) + + if not help_option_names or not self.add_help_option: + return None + + # Cache the help option object in private _help_option attribute to + # avoid creating it multiple times. Not doing this will break the + # callback odering by iter_params_for_processing(), which relies on + # object comparison. + if self._help_option is None: + # Avoid circular import. + from .decorators import help_option + + # Apply help_option decorator and pop resulting option + help_option(*help_option_names)(self) + self._help_option = self.params.pop() # type: ignore[assignment] + + return self._help_option + + def make_parser(self, ctx: Context) -> _OptionParser: + """Creates the underlying option parser for this command.""" + parser = _OptionParser(ctx) + for param in self.get_params(ctx): + param.add_to_parser(parser, ctx) + return parser + + def get_help(self, ctx: Context) -> str: + """Formats the help into a string and returns it. + + Calls :meth:`format_help` internally. + """ + formatter = ctx.make_formatter() + self.format_help(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_short_help_str(self, limit: int = 45) -> str: + """Gets short help for the command or makes it by shortening the + long help string. + """ + if self.short_help: + text = inspect.cleandoc(self.short_help) + elif self.help: + text = make_default_short_help(self.help, limit) + else: + text = "" + + if self.deprecated: + deprecated_message = ( + f"(DEPRECATED: {self.deprecated})" + if isinstance(self.deprecated, str) + else "(DEPRECATED)" + ) + text = _("{text} {deprecated_message}").format( + text=text, deprecated_message=deprecated_message + ) + + return text.strip() + + def format_help(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the help into the formatter if it exists. + + This is a low-level method called by :meth:`get_help`. + + This calls the following methods: + + - :meth:`format_usage` + - :meth:`format_help_text` + - :meth:`format_options` + - :meth:`format_epilog` + """ + self.format_usage(ctx, formatter) + self.format_help_text(ctx, formatter) + self.format_options(ctx, formatter) + self.format_epilog(ctx, formatter) + + def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the help text to the formatter if it exists.""" + if self.help is not None: + # truncate the help text to the first form feed + text = inspect.cleandoc(self.help).partition("\f")[0] + else: + text = "" + + if self.deprecated: + deprecated_message = ( + f"(DEPRECATED: {self.deprecated})" + if isinstance(self.deprecated, str) + else "(DEPRECATED)" + ) + text = _("{text} {deprecated_message}").format( + text=text, deprecated_message=deprecated_message + ) + + if text: + formatter.write_paragraph() + + with formatter.indentation(): + formatter.write_text(text) + + def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes all the options into the formatter if they exist.""" + opts = [] + for param in self.get_params(ctx): + rv = param.get_help_record(ctx) + if rv is not None: + opts.append(rv) + + if opts: + with formatter.section(_("Options")): + formatter.write_dl(opts) + + def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the epilog into the formatter if it exists.""" + if self.epilog: + epilog = inspect.cleandoc(self.epilog) + formatter.write_paragraph() + + with formatter.indentation(): + formatter.write_text(epilog) + + def make_context( + self, + info_name: str | None, + args: list[str], + parent: Context | None = None, + **extra: t.Any, + ) -> Context: + """This function when given an info name and arguments will kick + off the parsing and create a new :class:`Context`. It does not + invoke the actual command callback though. + + To quickly customize the context class used without overriding + this method, set the :attr:`context_class` attribute. + + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it's usually + the name of the script, for commands below it's + the name of the command. + :param args: the arguments to parse as list of strings. + :param parent: the parent context if available. + :param extra: extra keyword arguments forwarded to the context + constructor. + + .. versionchanged:: 8.0 + Added the :attr:`context_class` attribute. + """ + for key, value in self.context_settings.items(): + if key not in extra: + extra[key] = value + + ctx = self.context_class(self, info_name=info_name, parent=parent, **extra) + + with ctx.scope(cleanup=False): + self.parse_args(ctx, args) + return ctx + + def parse_args(self, ctx: Context, args: list[str]) -> list[str]: + if not args and self.no_args_is_help and not ctx.resilient_parsing: + raise NoArgsIsHelpError(ctx) + + parser = self.make_parser(ctx) + opts, args, param_order = parser.parse_args(args=args) + + for param in iter_params_for_processing(param_order, self.get_params(ctx)): + _, args = param.handle_parse_result(ctx, opts, args) + + # We now have all parameters' values into `ctx.params`, but the data may contain + # the `UNSET` sentinel. + # Convert `UNSET` to `None` to ensure that the user doesn't see `UNSET`. + # + # Waiting until after the initial parse to convert allows us to treat `UNSET` + # more like a missing value when multiple params use the same name. + # Refs: + # https://github.com/pallets/click/issues/3071 + # https://github.com/pallets/click/pull/3079 + for name, value in ctx.params.items(): + if value is UNSET: + ctx.params[name] = None + + if args and not ctx.allow_extra_args and not ctx.resilient_parsing: + ctx.fail( + ngettext( + "Got unexpected extra argument ({args})", + "Got unexpected extra arguments ({args})", + len(args), + ).format(args=" ".join(map(str, args))) + ) + + ctx.args = args + ctx._opt_prefixes.update(parser._opt_prefixes) + return args + + def invoke(self, ctx: Context) -> t.Any: + """Given a context, this invokes the attached callback (if it exists) + in the right way. + """ + if self.deprecated: + extra_message = ( + f" {self.deprecated}" if isinstance(self.deprecated, str) else "" + ) + message = _( + "DeprecationWarning: The command {name!r} is deprecated.{extra_message}" + ).format(name=self.name, extra_message=extra_message) + echo(style(message, fg="red"), err=True) + + if self.callback is not None: + return ctx.invoke(self.callback, **ctx.params) + + def shell_complete(self, ctx: Context, incomplete: str) -> list[CompletionItem]: + """Return a list of completions for the incomplete value. Looks + at the names of options and chained multi-commands. + + Any command could be part of a chained multi-command, so sibling + commands are valid at any point during command completion. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results: list[CompletionItem] = [] + + if incomplete and not incomplete[0].isalnum(): + for param in self.get_params(ctx): + if ( + not isinstance(param, Option) + or param.hidden + or ( + not param.multiple + and ctx.get_parameter_source(param.name) # type: ignore + is ParameterSource.COMMANDLINE + ) + ): + continue + + results.extend( + CompletionItem(name, help=param.help) + for name in [*param.opts, *param.secondary_opts] + if name.startswith(incomplete) + ) + + while ctx.parent is not None: + ctx = ctx.parent + + if isinstance(ctx.command, Group) and ctx.command.chain: + results.extend( + CompletionItem(name, help=command.get_short_help_str()) + for name, command in _complete_visible_commands(ctx, incomplete) + if name not in ctx._protected_args + ) + + return results + + @t.overload + def main( + self, + args: cabc.Sequence[str] | None = None, + prog_name: str | None = None, + complete_var: str | None = None, + standalone_mode: t.Literal[True] = True, + **extra: t.Any, + ) -> t.NoReturn: ... + + @t.overload + def main( + self, + args: cabc.Sequence[str] | None = None, + prog_name: str | None = None, + complete_var: str | None = None, + standalone_mode: bool = ..., + **extra: t.Any, + ) -> t.Any: ... + + def main( + self, + args: cabc.Sequence[str] | None = None, + prog_name: str | None = None, + complete_var: str | None = None, + standalone_mode: bool = True, + windows_expand_args: bool = True, + **extra: t.Any, + ) -> t.Any: + """This is the way to invoke a script with all the bells and + whistles as a command line application. This will always terminate + the application after a call. If this is not wanted, ``SystemExit`` + needs to be caught. + + This method is also available by directly calling the instance of + a :class:`Command`. + + :param args: the arguments that should be used for parsing. If not + provided, ``sys.argv[1:]`` is used. + :param prog_name: the program name that should be used. By default + the program name is constructed by taking the file + name from ``sys.argv[0]``. + :param complete_var: the environment variable that controls the + bash completion support. The default is + ``"__COMPLETE"`` with prog_name in + uppercase. + :param standalone_mode: the default behavior is to invoke the script + in standalone mode. Click will then + handle exceptions and convert them into + error messages and the function will never + return but shut down the interpreter. If + this is set to `False` they will be + propagated to the caller and the return + value of this function is the return value + of :meth:`invoke`. + :param windows_expand_args: Expand glob patterns, user dir, and + env vars in command line args on Windows. + :param extra: extra keyword arguments are forwarded to the context + constructor. See :class:`Context` for more information. + + .. versionchanged:: 8.0.1 + Added the ``windows_expand_args`` parameter to allow + disabling command line arg expansion on Windows. + + .. versionchanged:: 8.0 + When taking arguments from ``sys.argv`` on Windows, glob + patterns, user dir, and env vars are expanded. + + .. versionchanged:: 3.0 + Added the ``standalone_mode`` parameter. + """ + if args is None: + args = sys.argv[1:] + + if os.name == "nt" and windows_expand_args: + args = _expand_args(args) + else: + args = list(args) + + if prog_name is None: + prog_name = _detect_program_name() + + # Process shell completion requests and exit early. + self._main_shell_completion(extra, prog_name, complete_var) + + try: + try: + with self.make_context(prog_name, args, **extra) as ctx: + rv = self.invoke(ctx) + if not standalone_mode: + return rv + # it's not safe to `ctx.exit(rv)` here! + # note that `rv` may actually contain data like "1" which + # has obvious effects + # more subtle case: `rv=[None, None]` can come out of + # chained commands which all returned `None` -- so it's not + # even always obvious that `rv` indicates success/failure + # by its truthiness/falsiness + ctx.exit() + except (EOFError, KeyboardInterrupt) as e: + echo(file=sys.stderr) + raise Abort() from e + except ClickException as e: + if not standalone_mode: + raise + e.show() + sys.exit(e.exit_code) + except OSError as e: + if e.errno == errno.EPIPE: + sys.stdout = t.cast(t.TextIO, PacifyFlushWrapper(sys.stdout)) + sys.stderr = t.cast(t.TextIO, PacifyFlushWrapper(sys.stderr)) + sys.exit(1) + else: + raise + except Exit as e: + if standalone_mode: + sys.exit(e.exit_code) + else: + # in non-standalone mode, return the exit code + # note that this is only reached if `self.invoke` above raises + # an Exit explicitly -- thus bypassing the check there which + # would return its result + # the results of non-standalone execution may therefore be + # somewhat ambiguous: if there are codepaths which lead to + # `ctx.exit(1)` and to `return 1`, the caller won't be able to + # tell the difference between the two + return e.exit_code + except Abort: + if not standalone_mode: + raise + echo(_("Aborted!"), file=sys.stderr) + sys.exit(1) + + def _main_shell_completion( + self, + ctx_args: cabc.MutableMapping[str, t.Any], + prog_name: str, + complete_var: str | None = None, + ) -> None: + """Check if the shell is asking for tab completion, process + that, then exit early. Called from :meth:`main` before the + program is invoked. + + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. Defaults to + ``_{PROG_NAME}_COMPLETE``. + + .. versionchanged:: 8.2.0 + Dots (``.``) in ``prog_name`` are replaced with underscores (``_``). + """ + if complete_var is None: + complete_name = prog_name.replace("-", "_").replace(".", "_") + complete_var = f"_{complete_name}_COMPLETE".upper() + + instruction = os.environ.get(complete_var) + + if not instruction: + return + + from .shell_completion import shell_complete + + rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction) + sys.exit(rv) + + def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any: + """Alias for :meth:`main`.""" + return self.main(*args, **kwargs) + + +class _FakeSubclassCheck(type): + def __subclasscheck__(cls, subclass: type) -> bool: + return issubclass(subclass, cls.__bases__[0]) + + def __instancecheck__(cls, instance: t.Any) -> bool: + return isinstance(instance, cls.__bases__[0]) + + +class _BaseCommand(Command, metaclass=_FakeSubclassCheck): + """ + .. deprecated:: 8.2 + Will be removed in Click 9.0. Use ``Command`` instead. + """ + + +class Group(Command): + """A group is a command that nests other commands (or more groups). + + :param name: The name of the group command. + :param commands: Map names to :class:`Command` objects. Can be a list, which + will use :attr:`Command.name` as the keys. + :param invoke_without_command: Invoke the group's callback even if a + subcommand is not given. + :param no_args_is_help: If no arguments are given, show the group's help and + exit. Defaults to the opposite of ``invoke_without_command``. + :param subcommand_metavar: How to represent the subcommand argument in help. + The default will represent whether ``chain`` is set or not. + :param chain: Allow passing more than one subcommand argument. After parsing + a command's arguments, if any arguments remain another command will be + matched, and so on. + :param result_callback: A function to call after the group's and + subcommand's callbacks. The value returned by the subcommand is passed. + If ``chain`` is enabled, the value will be a list of values returned by + all the commands. If ``invoke_without_command`` is enabled, the value + will be the value returned by the group's callback, or an empty list if + ``chain`` is enabled. + :param kwargs: Other arguments passed to :class:`Command`. + + .. versionchanged:: 8.0 + The ``commands`` argument can be a list of command objects. + + .. versionchanged:: 8.2 + Merged with and replaces the ``MultiCommand`` base class. + """ + + allow_extra_args = True + allow_interspersed_args = False + + #: If set, this is used by the group's :meth:`command` decorator + #: as the default :class:`Command` class. This is useful to make all + #: subcommands use a custom command class. + #: + #: .. versionadded:: 8.0 + command_class: type[Command] | None = None + + #: If set, this is used by the group's :meth:`group` decorator + #: as the default :class:`Group` class. This is useful to make all + #: subgroups use a custom group class. + #: + #: If set to the special value :class:`type` (literally + #: ``group_class = type``), this group's class will be used as the + #: default class. This makes a custom group class continue to make + #: custom groups. + #: + #: .. versionadded:: 8.0 + group_class: type[Group] | type[type] | None = None + # Literal[type] isn't valid, so use Type[type] + + def __init__( + self, + name: str | None = None, + commands: cabc.MutableMapping[str, Command] + | cabc.Sequence[Command] + | None = None, + invoke_without_command: bool = False, + no_args_is_help: bool | None = None, + subcommand_metavar: str | None = None, + chain: bool = False, + result_callback: t.Callable[..., t.Any] | None = None, + **kwargs: t.Any, + ) -> None: + super().__init__(name, **kwargs) + + if commands is None: + commands = {} + elif isinstance(commands, abc.Sequence): + commands = {c.name: c for c in commands if c.name is not None} + + #: The registered subcommands by their exported names. + self.commands: cabc.MutableMapping[str, Command] = commands + + if no_args_is_help is None: + no_args_is_help = not invoke_without_command + + self.no_args_is_help = no_args_is_help + self.invoke_without_command = invoke_without_command + + if subcommand_metavar is None: + if chain: + subcommand_metavar = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..." + else: + subcommand_metavar = "COMMAND [ARGS]..." + + self.subcommand_metavar = subcommand_metavar + self.chain = chain + # The result callback that is stored. This can be set or + # overridden with the :func:`result_callback` decorator. + self._result_callback = result_callback + + if self.chain: + for param in self.params: + if isinstance(param, Argument) and not param.required: + raise RuntimeError( + "A group in chain mode cannot have optional arguments." + ) + + def to_info_dict(self, ctx: Context) -> dict[str, t.Any]: + info_dict = super().to_info_dict(ctx) + commands = {} + + for name in self.list_commands(ctx): + command = self.get_command(ctx, name) + + if command is None: + continue + + sub_ctx = ctx._make_sub_context(command) + + with sub_ctx.scope(cleanup=False): + commands[name] = command.to_info_dict(sub_ctx) + + info_dict.update(commands=commands, chain=self.chain) + return info_dict + + def add_command(self, cmd: Command, name: str | None = None) -> None: + """Registers another :class:`Command` with this group. If the name + is not provided, the name of the command is used. + """ + name = name or cmd.name + if name is None: + raise TypeError("Command has no name.") + _check_nested_chain(self, name, cmd, register=True) + self.commands[name] = cmd + + @t.overload + def command(self, __func: t.Callable[..., t.Any]) -> Command: ... + + @t.overload + def command( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], Command]: ... + + def command( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], Command] | Command: + """A shortcut decorator for declaring and attaching a command to + the group. This takes the same arguments as :func:`command` and + immediately registers the created command with this group by + calling :meth:`add_command`. + + To customize the command class used, set the + :attr:`command_class` attribute. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.0 + Added the :attr:`command_class` attribute. + """ + from .decorators import command + + func: t.Callable[..., t.Any] | None = None + + if args and callable(args[0]): + assert len(args) == 1 and not kwargs, ( + "Use 'command(**kwargs)(callable)' to provide arguments." + ) + (func,) = args + args = () + + if self.command_class and kwargs.get("cls") is None: + kwargs["cls"] = self.command_class + + def decorator(f: t.Callable[..., t.Any]) -> Command: + cmd: Command = command(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + if func is not None: + return decorator(func) + + return decorator + + @t.overload + def group(self, __func: t.Callable[..., t.Any]) -> Group: ... + + @t.overload + def group( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], Group]: ... + + def group( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], Group] | Group: + """A shortcut decorator for declaring and attaching a group to + the group. This takes the same arguments as :func:`group` and + immediately registers the created group with this group by + calling :meth:`add_command`. + + To customize the group class used, set the :attr:`group_class` + attribute. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.0 + Added the :attr:`group_class` attribute. + """ + from .decorators import group + + func: t.Callable[..., t.Any] | None = None + + if args and callable(args[0]): + assert len(args) == 1 and not kwargs, ( + "Use 'group(**kwargs)(callable)' to provide arguments." + ) + (func,) = args + args = () + + if self.group_class is not None and kwargs.get("cls") is None: + if self.group_class is type: + kwargs["cls"] = type(self) + else: + kwargs["cls"] = self.group_class + + def decorator(f: t.Callable[..., t.Any]) -> Group: + cmd: Group = group(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + if func is not None: + return decorator(func) + + return decorator + + def result_callback(self, replace: bool = False) -> t.Callable[[F], F]: + """Adds a result callback to the command. By default if a + result callback is already registered this will chain them but + this can be disabled with the `replace` parameter. The result + callback is invoked with the return value of the subcommand + (or the list of return values from all subcommands if chaining + is enabled) as well as the parameters as they would be passed + to the main callback. + + Example:: + + @click.group() + @click.option('-i', '--input', default=23) + def cli(input): + return 42 + + @cli.result_callback() + def process_result(result, input): + return result + input + + :param replace: if set to `True` an already existing result + callback will be removed. + + .. versionchanged:: 8.0 + Renamed from ``resultcallback``. + + .. versionadded:: 3.0 + """ + + def decorator(f: F) -> F: + old_callback = self._result_callback + + if old_callback is None or replace: + self._result_callback = f + return f + + def function(value: t.Any, /, *args: t.Any, **kwargs: t.Any) -> t.Any: + inner = old_callback(value, *args, **kwargs) + return f(inner, *args, **kwargs) + + self._result_callback = rv = update_wrapper(t.cast(F, function), f) + return rv # type: ignore[return-value] + + return decorator + + def get_command(self, ctx: Context, cmd_name: str) -> Command | None: + """Given a context and a command name, this returns a :class:`Command` + object if it exists or returns ``None``. + """ + return self.commands.get(cmd_name) + + def list_commands(self, ctx: Context) -> list[str]: + """Returns a list of subcommand names in the order they should appear.""" + return sorted(self.commands) + + def collect_usage_pieces(self, ctx: Context) -> list[str]: + rv = super().collect_usage_pieces(ctx) + rv.append(self.subcommand_metavar) + return rv + + def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: + super().format_options(ctx, formatter) + self.format_commands(ctx, formatter) + + def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: + """Extra format methods for multi methods that adds all the commands + after the options. + """ + commands = [] + for subcommand in self.list_commands(ctx): + cmd = self.get_command(ctx, subcommand) + # What is this, the tool lied about a command. Ignore it + if cmd is None: + continue + if cmd.hidden: + continue + + commands.append((subcommand, cmd)) + + # allow for 3 times the default spacing + if len(commands): + limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) + + rows = [] + for subcommand, cmd in commands: + help = cmd.get_short_help_str(limit) + rows.append((subcommand, help)) + + if rows: + with formatter.section(_("Commands")): + formatter.write_dl(rows) + + def parse_args(self, ctx: Context, args: list[str]) -> list[str]: + if not args and self.no_args_is_help and not ctx.resilient_parsing: + raise NoArgsIsHelpError(ctx) + + rest = super().parse_args(ctx, args) + + if self.chain: + ctx._protected_args = rest + ctx.args = [] + elif rest: + ctx._protected_args, ctx.args = rest[:1], rest[1:] + + return ctx.args + + def invoke(self, ctx: Context) -> t.Any: + def _process_result(value: t.Any) -> t.Any: + if self._result_callback is not None: + value = ctx.invoke(self._result_callback, value, **ctx.params) + return value + + if not ctx._protected_args: + if self.invoke_without_command: + # No subcommand was invoked, so the result callback is + # invoked with the group return value for regular + # groups, or an empty list for chained groups. + with ctx: + rv = super().invoke(ctx) + return _process_result([] if self.chain else rv) + ctx.fail(_("Missing command.")) + + # Fetch args back out + args = [*ctx._protected_args, *ctx.args] + ctx.args = [] + ctx._protected_args = [] + + # If we're not in chain mode, we only allow the invocation of a + # single command but we also inform the current context about the + # name of the command to invoke. + if not self.chain: + # Make sure the context is entered so we do not clean up + # resources until the result processor has worked. + with ctx: + cmd_name, cmd, args = self.resolve_command(ctx, args) + assert cmd is not None + ctx.invoked_subcommand = cmd_name + super().invoke(ctx) + sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) + with sub_ctx: + return _process_result(sub_ctx.command.invoke(sub_ctx)) + + # In chain mode we create the contexts step by step, but after the + # base command has been invoked. Because at that point we do not + # know the subcommands yet, the invoked subcommand attribute is + # set to ``*`` to inform the command that subcommands are executed + # but nothing else. + with ctx: + ctx.invoked_subcommand = "*" if args else None + super().invoke(ctx) + + # Otherwise we make every single context and invoke them in a + # chain. In that case the return value to the result processor + # is the list of all invoked subcommand's results. + contexts = [] + while args: + cmd_name, cmd, args = self.resolve_command(ctx, args) + assert cmd is not None + sub_ctx = cmd.make_context( + cmd_name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + ) + contexts.append(sub_ctx) + args, sub_ctx.args = sub_ctx.args, [] + + rv = [] + for sub_ctx in contexts: + with sub_ctx: + rv.append(sub_ctx.command.invoke(sub_ctx)) + return _process_result(rv) + + def resolve_command( + self, ctx: Context, args: list[str] + ) -> tuple[str | None, Command | None, list[str]]: + cmd_name = make_str(args[0]) + original_cmd_name = cmd_name + + # Get the command + cmd = self.get_command(ctx, cmd_name) + + # If we can't find the command but there is a normalization + # function available, we try with that one. + if cmd is None and ctx.token_normalize_func is not None: + cmd_name = ctx.token_normalize_func(cmd_name) + cmd = self.get_command(ctx, cmd_name) + + # If we don't find the command we want to show an error message + # to the user that it was not provided. However, there is + # something else we should do: if the first argument looks like + # an option we want to kick off parsing again for arguments to + # resolve things like --help which now should go to the main + # place. + if cmd is None and not ctx.resilient_parsing: + if _split_opt(cmd_name)[0]: + self.parse_args(ctx, args) + ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name)) + return cmd_name if cmd else None, cmd, args[1:] + + def shell_complete(self, ctx: Context, incomplete: str) -> list[CompletionItem]: + """Return a list of completions for the incomplete value. Looks + at the names of options, subcommands, and chained + multi-commands. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results = [ + CompletionItem(name, help=command.get_short_help_str()) + for name, command in _complete_visible_commands(ctx, incomplete) + ] + results.extend(super().shell_complete(ctx, incomplete)) + return results + + +class _MultiCommand(Group, metaclass=_FakeSubclassCheck): + """ + .. deprecated:: 8.2 + Will be removed in Click 9.0. Use ``Group`` instead. + """ + + +class CommandCollection(Group): + """A :class:`Group` that looks up subcommands on other groups. If a command + is not found on this group, each registered source is checked in order. + Parameters on a source are not added to this group, and a source's callback + is not invoked when invoking its commands. In other words, this "flattens" + commands in many groups into this one group. + + :param name: The name of the group command. + :param sources: A list of :class:`Group` objects to look up commands from. + :param kwargs: Other arguments passed to :class:`Group`. + + .. versionchanged:: 8.2 + This is a subclass of ``Group``. Commands are looked up first on this + group, then each of its sources. + """ + + def __init__( + self, + name: str | None = None, + sources: list[Group] | None = None, + **kwargs: t.Any, + ) -> None: + super().__init__(name, **kwargs) + #: The list of registered groups. + self.sources: list[Group] = sources or [] + + def add_source(self, group: Group) -> None: + """Add a group as a source of commands.""" + self.sources.append(group) + + def get_command(self, ctx: Context, cmd_name: str) -> Command | None: + rv = super().get_command(ctx, cmd_name) + + if rv is not None: + return rv + + for source in self.sources: + rv = source.get_command(ctx, cmd_name) + + if rv is not None: + if self.chain: + _check_nested_chain(self, cmd_name, rv) + + return rv + + return None + + def list_commands(self, ctx: Context) -> list[str]: + rv: set[str] = set(super().list_commands(ctx)) + + for source in self.sources: + rv.update(source.list_commands(ctx)) + + return sorted(rv) + + +def _check_iter(value: t.Any) -> cabc.Iterator[t.Any]: + """Check if the value is iterable but not a string. Raises a type + error, or return an iterator over the value. + """ + if isinstance(value, str): + raise TypeError + + return iter(value) + + +class Parameter: + r"""A parameter to a command comes in two versions: they are either + :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently + not supported by design as some of the internals for parsing are + intentionally not finalized. + + Some settings are supported by both options and arguments. + + :param param_decls: the parameter declarations for this option or + argument. This is a list of flags or argument + names. + :param type: the type that should be used. Either a :class:`ParamType` + or a Python type. The latter is converted into the former + automatically if supported. + :param required: controls if this is optional or not. + :param default: the default value if omitted. This can also be a callable, + in which case it's invoked when the default is needed + without any arguments. + :param callback: A function to further process or validate the value + after type conversion. It is called as ``f(ctx, param, value)`` + and must return the value. It is called for all sources, + including prompts. + :param nargs: the number of arguments to match. If not ``1`` the return + value is a tuple instead of single value. The default for + nargs is ``1`` (except if the type is a tuple, then it's + the arity of the tuple). If ``nargs=-1``, all remaining + parameters are collected. + :param metavar: how the value is represented in the help page. + :param expose_value: if this is `True` then the value is passed onwards + to the command callback and stored on the context, + otherwise it's skipped. + :param is_eager: eager values are processed before non eager ones. This + should not be set for arguments or it will inverse the + order of processing. + :param envvar: environment variable(s) that are used to provide a default value for + this parameter. This can be a string or a sequence of strings. If a sequence is + given, only the first non-empty environment variable is used for the parameter. + :param shell_complete: A function that returns custom shell + completions. Used instead of the param's type completion if + given. Takes ``ctx, param, incomplete`` and must return a list + of :class:`~click.shell_completion.CompletionItem` or a list of + strings. + :param deprecated: If ``True`` or non-empty string, issues a message + indicating that the argument is deprecated and highlights + its deprecation in --help. The message can be customized + by using a string as the value. A deprecated parameter + cannot be required, a ValueError will be raised otherwise. + + .. versionchanged:: 8.2.0 + Introduction of ``deprecated``. + + .. versionchanged:: 8.2 + Adding duplicate parameter names to a :class:`~click.core.Command` will + result in a ``UserWarning`` being shown. + + .. versionchanged:: 8.2 + Adding duplicate parameter names to a :class:`~click.core.Command` will + result in a ``UserWarning`` being shown. + + .. versionchanged:: 8.0 + ``process_value`` validates required parameters and bounded + ``nargs``, and invokes the parameter callback before returning + the value. This allows the callback to validate prompts. + ``full_process_value`` is removed. + + .. versionchanged:: 8.0 + ``autocompletion`` is renamed to ``shell_complete`` and has new + semantics described above. The old name is deprecated and will + be removed in 8.1, until then it will be wrapped to match the + new requirements. + + .. versionchanged:: 8.0 + For ``multiple=True, nargs>1``, the default must be a list of + tuples. + + .. versionchanged:: 8.0 + Setting a default is no longer required for ``nargs>1``, it will + default to ``None``. ``multiple=True`` or ``nargs=-1`` will + default to ``()``. + + .. versionchanged:: 7.1 + Empty environment variables are ignored rather than taking the + empty string value. This makes it possible for scripts to clear + variables if they can't unset them. + + .. versionchanged:: 2.0 + Changed signature for parameter callback to also be passed the + parameter. The old callback format will still work, but it will + raise a warning to give you a chance to migrate the code easier. + """ + + param_type_name = "parameter" + + def __init__( + self, + param_decls: cabc.Sequence[str] | None = None, + type: types.ParamType | t.Any | None = None, + required: bool = False, + # XXX The default historically embed two concepts: + # - the declaration of a Parameter object carrying the default (handy to + # arbitrage the default value of coupled Parameters sharing the same + # self.name, like flag options), + # - and the actual value of the default. + # It is confusing and is the source of many issues discussed in: + # https://github.com/pallets/click/pull/3030 + # In the future, we might think of splitting it in two, not unlike + # Option.is_flag and Option.flag_value: we could have something like + # Parameter.is_default and Parameter.default_value. + default: t.Any | t.Callable[[], t.Any] | None = UNSET, + callback: t.Callable[[Context, Parameter, t.Any], t.Any] | None = None, + nargs: int | None = None, + multiple: bool = False, + metavar: str | None = None, + expose_value: bool = True, + is_eager: bool = False, + envvar: str | cabc.Sequence[str] | None = None, + shell_complete: t.Callable[ + [Context, Parameter, str], list[CompletionItem] | list[str] + ] + | None = None, + deprecated: bool | str = False, + ) -> None: + self.name: str | None + self.opts: list[str] + self.secondary_opts: list[str] + self.name, self.opts, self.secondary_opts = self._parse_decls( + param_decls or (), expose_value + ) + self.type: types.ParamType = types.convert_type(type, default) + + # Default nargs to what the type tells us if we have that + # information available. + if nargs is None: + if self.type.is_composite: + nargs = self.type.arity + else: + nargs = 1 + + self.required = required + self.callback = callback + self.nargs = nargs + self.multiple = multiple + self.expose_value = expose_value + self.default: t.Any | t.Callable[[], t.Any] | None = default + self.is_eager = is_eager + self.metavar = metavar + self.envvar = envvar + self._custom_shell_complete = shell_complete + self.deprecated = deprecated + + if __debug__: + if self.type.is_composite and nargs != self.type.arity: + raise ValueError( + f"'nargs' must be {self.type.arity} (or None) for" + f" type {self.type!r}, but it was {nargs}." + ) + + if required and deprecated: + raise ValueError( + f"The {self.param_type_name} '{self.human_readable_name}' " + "is deprecated and still required. A deprecated " + f"{self.param_type_name} cannot be required." + ) + + def to_info_dict(self) -> dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + .. versionchanged:: 8.3.0 + Returns ``None`` for the :attr:`default` if it was not set. + + .. versionadded:: 8.0 + """ + return { + "name": self.name, + "param_type_name": self.param_type_name, + "opts": self.opts, + "secondary_opts": self.secondary_opts, + "type": self.type.to_info_dict(), + "required": self.required, + "nargs": self.nargs, + "multiple": self.multiple, + # We explicitly hide the :attr:`UNSET` value to the user, as we choose to + # make it an implementation detail. And because ``to_info_dict`` has been + # designed for documentation purposes, we return ``None`` instead. + "default": self.default if self.default is not UNSET else None, + "envvar": self.envvar, + } + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + def _parse_decls( + self, decls: cabc.Sequence[str], expose_value: bool + ) -> tuple[str | None, list[str], list[str]]: + raise NotImplementedError() + + @property + def human_readable_name(self) -> str: + """Returns the human readable name of this parameter. This is the + same as the name for options, but the metavar for arguments. + """ + return self.name # type: ignore + + def make_metavar(self, ctx: Context) -> str: + if self.metavar is not None: + return self.metavar + + metavar = self.type.get_metavar(param=self, ctx=ctx) + + if metavar is None: + metavar = self.type.name.upper() + + if self.nargs != 1: + metavar += "..." + + return metavar + + @t.overload + def get_default( + self, ctx: Context, call: t.Literal[True] = True + ) -> t.Any | None: ... + + @t.overload + def get_default( + self, ctx: Context, call: bool = ... + ) -> t.Any | t.Callable[[], t.Any] | None: ... + + def get_default( + self, ctx: Context, call: bool = True + ) -> t.Any | t.Callable[[], t.Any] | None: + """Get the default for the parameter. Tries + :meth:`Context.lookup_default` first, then the local default. + + :param ctx: Current context. + :param call: If the default is a callable, call it. Disable to + return the callable instead. + + .. versionchanged:: 8.0.2 + Type casting is no longer performed when getting a default. + + .. versionchanged:: 8.0.1 + Type casting can fail in resilient parsing mode. Invalid + defaults will not prevent showing help text. + + .. versionchanged:: 8.0 + Looks at ``ctx.default_map`` first. + + .. versionchanged:: 8.0 + Added the ``call`` parameter. + """ + value = ctx.lookup_default(self.name, call=False) # type: ignore + + if value is UNSET: + value = self.default + + if call and callable(value): + value = value() + + return value + + def add_to_parser(self, parser: _OptionParser, ctx: Context) -> None: + raise NotImplementedError() + + def consume_value( + self, ctx: Context, opts: cabc.Mapping[str, t.Any] + ) -> tuple[t.Any, ParameterSource]: + """Returns the parameter value produced by the parser. + + If the parser did not produce a value from user input, the value is either + sourced from the environment variable, the default map, or the parameter's + default value. In that order of precedence. + + If no value is found, an internal sentinel value is returned. + + :meta private: + """ + # Collect from the parse the value passed by the user to the CLI. + value = opts.get(self.name, UNSET) # type: ignore + # If the value is set, it means it was sourced from the command line by the + # parser, otherwise it left unset by default. + source = ( + ParameterSource.COMMANDLINE + if value is not UNSET + else ParameterSource.DEFAULT + ) + + if value is UNSET: + envvar_value = self.value_from_envvar(ctx) + if envvar_value is not None: + value = envvar_value + source = ParameterSource.ENVIRONMENT + + if value is UNSET: + default_map_value = ctx.lookup_default(self.name) # type: ignore + if default_map_value is not UNSET: + value = default_map_value + source = ParameterSource.DEFAULT_MAP + + if value is UNSET: + default_value = self.get_default(ctx) + if default_value is not UNSET: + value = default_value + source = ParameterSource.DEFAULT + + return value, source + + def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any: + """Convert and validate a value against the parameter's + :attr:`type`, :attr:`multiple`, and :attr:`nargs`. + """ + if value is None: + if self.multiple or self.nargs == -1: + return () + else: + return value + + def check_iter(value: t.Any) -> cabc.Iterator[t.Any]: + try: + return _check_iter(value) + except TypeError: + # This should only happen when passing in args manually, + # the parser should construct an iterable when parsing + # the command line. + raise BadParameter( + _("Value must be an iterable."), ctx=ctx, param=self + ) from None + + # Define the conversion function based on nargs and type. + + if self.nargs == 1 or self.type.is_composite: + + def convert(value: t.Any) -> t.Any: + return self.type(value, param=self, ctx=ctx) + + elif self.nargs == -1: + + def convert(value: t.Any) -> t.Any: # tuple[t.Any, ...] + return tuple(self.type(x, self, ctx) for x in check_iter(value)) + + else: # nargs > 1 + + def convert(value: t.Any) -> t.Any: # tuple[t.Any, ...] + value = tuple(check_iter(value)) + + if len(value) != self.nargs: + raise BadParameter( + ngettext( + "Takes {nargs} values but 1 was given.", + "Takes {nargs} values but {len} were given.", + len(value), + ).format(nargs=self.nargs, len=len(value)), + ctx=ctx, + param=self, + ) + + return tuple(self.type(x, self, ctx) for x in value) + + if self.multiple: + return tuple(convert(x) for x in check_iter(value)) + + return convert(value) + + def value_is_missing(self, value: t.Any) -> bool: + """A value is considered missing if: + + - it is :attr:`UNSET`, + - or if it is an empty sequence while the parameter is suppose to have + non-single value (i.e. :attr:`nargs` is not ``1`` or :attr:`multiple` is + set). + + :meta private: + """ + if value is UNSET: + return True + + if (self.nargs != 1 or self.multiple) and value == (): + return True + + return False + + def process_value(self, ctx: Context, value: t.Any) -> t.Any: + """Process the value of this parameter: + + 1. Type cast the value using :meth:`type_cast_value`. + 2. Check if the value is missing (see: :meth:`value_is_missing`), and raise + :exc:`MissingParameter` if it is required. + 3. If a :attr:`callback` is set, call it to have the value replaced by the + result of the callback. If the value was not set, the callback receive + ``None``. This keep the legacy behavior as it was before the introduction of + the :attr:`UNSET` sentinel. + + :meta private: + """ + # shelter `type_cast_value` from ever seeing an `UNSET` value by handling the + # cases in which `UNSET` gets special treatment explicitly at this layer + # + # Refs: + # https://github.com/pallets/click/issues/3069 + if value is UNSET: + if self.multiple or self.nargs == -1: + value = () + else: + value = self.type_cast_value(ctx, value) + + if self.required and self.value_is_missing(value): + raise MissingParameter(ctx=ctx, param=self) + + if self.callback is not None: + # Legacy case: UNSET is not exposed directly to the callback, but converted + # to None. + if value is UNSET: + value = None + + # Search for parameters with UNSET values in the context. + unset_keys = {k: None for k, v in ctx.params.items() if v is UNSET} + # No UNSET values, call the callback as usual. + if not unset_keys: + value = self.callback(ctx, self, value) + + # Legacy case: provide a temporarily manipulated context to the callback + # to hide UNSET values as None. + # + # Refs: + # https://github.com/pallets/click/issues/3136 + # https://github.com/pallets/click/pull/3137 + else: + # Add another layer to the context stack to clearly hint that the + # context is temporarily modified. + with ctx: + # Update the context parameters to replace UNSET with None. + ctx.params.update(unset_keys) + # Feed these fake context parameters to the callback. + value = self.callback(ctx, self, value) + # Restore the UNSET values in the context parameters. + ctx.params.update( + { + k: UNSET + for k in unset_keys + # Only restore keys that are present and still None, in case + # the callback modified other parameters. + if k in ctx.params and ctx.params[k] is None + } + ) + + return value + + def resolve_envvar_value(self, ctx: Context) -> str | None: + """Returns the value found in the environment variable(s) attached to this + parameter. + + Environment variables values are `always returned as strings + `_. + + This method returns ``None`` if: + + - the :attr:`envvar` property is not set on the :class:`Parameter`, + - the environment variable is not found in the environment, + - the variable is found in the environment but its value is empty (i.e. the + environment variable is present but has an empty string). + + If :attr:`envvar` is setup with multiple environment variables, + then only the first non-empty value is returned. + + .. caution:: + + The raw value extracted from the environment is not normalized and is + returned as-is. Any normalization or reconciliation is performed later by + the :class:`Parameter`'s :attr:`type`. + + :meta private: + """ + if not self.envvar: + return None + + if isinstance(self.envvar, str): + rv = os.environ.get(self.envvar) + + if rv: + return rv + else: + for envvar in self.envvar: + rv = os.environ.get(envvar) + + # Return the first non-empty value of the list of environment variables. + if rv: + return rv + # Else, absence of value is interpreted as an environment variable that + # is not set, so proceed to the next one. + + return None + + def value_from_envvar(self, ctx: Context) -> str | cabc.Sequence[str] | None: + """Process the raw environment variable string for this parameter. + + Returns the string as-is or splits it into a sequence of strings if the + parameter is expecting multiple values (i.e. its :attr:`nargs` property is set + to a value other than ``1``). + + :meta private: + """ + rv = self.resolve_envvar_value(ctx) + + if rv is not None and self.nargs != 1: + return self.type.split_envvar_value(rv) + + return rv + + def handle_parse_result( + self, ctx: Context, opts: cabc.Mapping[str, t.Any], args: list[str] + ) -> tuple[t.Any, list[str]]: + """Process the value produced by the parser from user input. + + Always process the value through the Parameter's :attr:`type`, wherever it + comes from. + + If the parameter is deprecated, this method warn the user about it. But only if + the value has been explicitly set by the user (and as such, is not coming from + a default). + + :meta private: + """ + with augment_usage_errors(ctx, param=self): + value, source = self.consume_value(ctx, opts) + + ctx.set_parameter_source(self.name, source) # type: ignore + + # Display a deprecation warning if necessary. + if ( + self.deprecated + and value is not UNSET + and source not in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP) + ): + extra_message = ( + f" {self.deprecated}" if isinstance(self.deprecated, str) else "" + ) + message = _( + "DeprecationWarning: The {param_type} {name!r} is deprecated." + "{extra_message}" + ).format( + param_type=self.param_type_name, + name=self.human_readable_name, + extra_message=extra_message, + ) + echo(style(message, fg="red"), err=True) + + # Process the value through the parameter's type. + try: + value = self.process_value(ctx, value) + except Exception: + if not ctx.resilient_parsing: + raise + # In resilient parsing mode, we do not want to fail the command if the + # value is incompatible with the parameter type, so we reset the value + # to UNSET, which will be interpreted as a missing value. + value = UNSET + + # Add parameter's value to the context. + if ( + self.expose_value + # We skip adding the value if it was previously set by another parameter + # targeting the same variable name. This prevents parameters competing for + # the same name to override each other. + and (self.name not in ctx.params or ctx.params[self.name] is UNSET) + ): + # Click is logically enforcing that the name is None if the parameter is + # not to be exposed. We still assert it here to please the type checker. + assert self.name is not None, ( + f"{self!r} parameter's name should not be None when exposing value." + ) + ctx.params[self.name] = value + + return value, args + + def get_help_record(self, ctx: Context) -> tuple[str, str] | None: + pass + + def get_usage_pieces(self, ctx: Context) -> list[str]: + return [] + + def get_error_hint(self, ctx: Context) -> str: + """Get a stringified version of the param for use in error messages to + indicate which param caused the error. + """ + hint_list = self.opts or [self.human_readable_name] + return " / ".join(f"'{x}'" for x in hint_list) + + def shell_complete(self, ctx: Context, incomplete: str) -> list[CompletionItem]: + """Return a list of completions for the incomplete value. If a + ``shell_complete`` function was given during init, it is used. + Otherwise, the :attr:`type` + :meth:`~click.types.ParamType.shell_complete` function is used. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + if self._custom_shell_complete is not None: + results = self._custom_shell_complete(ctx, self, incomplete) + + if results and isinstance(results[0], str): + from click.shell_completion import CompletionItem + + results = [CompletionItem(c) for c in results] + + return t.cast("list[CompletionItem]", results) + + return self.type.shell_complete(ctx, self, incomplete) + + +class Option(Parameter): + """Options are usually optional values on the command line and + have some extra features that arguments don't have. + + All other parameters are passed onwards to the parameter constructor. + + :param show_default: Show the default value for this option in its + help text. Values are not shown by default, unless + :attr:`Context.show_default` is ``True``. If this value is a + string, it shows that string in parentheses instead of the + actual value. This is particularly useful for dynamic options. + For single option boolean flags, the default remains hidden if + its value is ``False``. + :param show_envvar: Controls if an environment variable should be + shown on the help page and error messages. + Normally, environment variables are not shown. + :param prompt: If set to ``True`` or a non empty string then the + user will be prompted for input. If set to ``True`` the prompt + will be the option name capitalized. A deprecated option cannot be + prompted. + :param confirmation_prompt: Prompt a second time to confirm the + value if it was prompted for. Can be set to a string instead of + ``True`` to customize the message. + :param prompt_required: If set to ``False``, the user will be + prompted for input only when the option was specified as a flag + without a value. + :param hide_input: If this is ``True`` then the input on the prompt + will be hidden from the user. This is useful for password input. + :param is_flag: forces this option to act as a flag. The default is + auto detection. + :param flag_value: which value should be used for this flag if it's + enabled. This is set to a boolean automatically if + the option string contains a slash to mark two options. + :param multiple: if this is set to `True` then the argument is accepted + multiple times and recorded. This is similar to ``nargs`` + in how it works but supports arbitrary number of + arguments. + :param count: this flag makes an option increment an integer. + :param allow_from_autoenv: if this is enabled then the value of this + parameter will be pulled from an environment + variable in case a prefix is defined on the + context. + :param help: the help string. + :param hidden: hide this option from help outputs. + :param attrs: Other command arguments described in :class:`Parameter`. + + .. versionchanged:: 8.2 + ``envvar`` used with ``flag_value`` will always use the ``flag_value``, + previously it would use the value of the environment variable. + + .. versionchanged:: 8.1 + Help text indentation is cleaned here instead of only in the + ``@option`` decorator. + + .. versionchanged:: 8.1 + The ``show_default`` parameter overrides + ``Context.show_default``. + + .. versionchanged:: 8.1 + The default of a single option boolean flag is not shown if the + default value is ``False``. + + .. versionchanged:: 8.0.1 + ``type`` is detected from ``flag_value`` if given. + """ + + param_type_name = "option" + + def __init__( + self, + param_decls: cabc.Sequence[str] | None = None, + show_default: bool | str | None = None, + prompt: bool | str = False, + confirmation_prompt: bool | str = False, + prompt_required: bool = True, + hide_input: bool = False, + is_flag: bool | None = None, + flag_value: t.Any = UNSET, + multiple: bool = False, + count: bool = False, + allow_from_autoenv: bool = True, + type: types.ParamType | t.Any | None = None, + help: str | None = None, + hidden: bool = False, + show_choices: bool = True, + show_envvar: bool = False, + deprecated: bool | str = False, + **attrs: t.Any, + ) -> None: + if help: + help = inspect.cleandoc(help) + + super().__init__( + param_decls, type=type, multiple=multiple, deprecated=deprecated, **attrs + ) + + if prompt is True: + if self.name is None: + raise TypeError("'name' is required with 'prompt=True'.") + + prompt_text: str | None = self.name.replace("_", " ").capitalize() + elif prompt is False: + prompt_text = None + else: + prompt_text = prompt + + if deprecated: + deprecated_message = ( + f"(DEPRECATED: {deprecated})" + if isinstance(deprecated, str) + else "(DEPRECATED)" + ) + help = help + deprecated_message if help is not None else deprecated_message + + self.prompt = prompt_text + self.confirmation_prompt = confirmation_prompt + self.prompt_required = prompt_required + self.hide_input = hide_input + self.hidden = hidden + + # The _flag_needs_value property tells the parser that this option is a flag + # that cannot be used standalone and needs a value. With this information, the + # parser can determine whether to consider the next user-provided argument in + # the CLI as a value for this flag or as a new option. + # If prompt is enabled but not required, then it opens the possibility for the + # option to gets its value from the user. + self._flag_needs_value = self.prompt is not None and not self.prompt_required + + # Auto-detect if this is a flag or not. + if is_flag is None: + # Implicitly a flag because flag_value was set. + if flag_value is not UNSET: + is_flag = True + # Not a flag, but when used as a flag it shows a prompt. + elif self._flag_needs_value: + is_flag = False + # Implicitly a flag because secondary options names were given. + elif self.secondary_opts: + is_flag = True + # The option is explicitly not a flag. But we do not know yet if it needs a + # value or not. So we look at the default value to determine it. + elif is_flag is False and not self._flag_needs_value: + self._flag_needs_value = self.default is UNSET + + if is_flag: + # Set missing default for flags if not explicitly required or prompted. + if self.default is UNSET and not self.required and not self.prompt: + if multiple: + self.default = () + + # Auto-detect the type of the flag based on the flag_value. + if type is None: + # A flag without a flag_value is a boolean flag. + if flag_value is UNSET: + self.type: types.ParamType = types.BoolParamType() + # If the flag value is a boolean, use BoolParamType. + elif isinstance(flag_value, bool): + self.type = types.BoolParamType() + # Otherwise, guess the type from the flag value. + else: + self.type = types.convert_type(None, flag_value) + + self.is_flag: bool = bool(is_flag) + self.is_bool_flag: bool = bool( + is_flag and isinstance(self.type, types.BoolParamType) + ) + self.flag_value: t.Any = flag_value + + # Set boolean flag default to False if unset and not required. + if self.is_bool_flag: + if self.default is UNSET and not self.required: + self.default = False + + # Support the special case of aligning the default value with the flag_value + # for flags whose default is explicitly set to True. Note that as long as we + # have this condition, there is no way a flag can have a default set to True, + # and a flag_value set to something else. Refs: + # https://github.com/pallets/click/issues/3024#issuecomment-3146199461 + # https://github.com/pallets/click/pull/3030/commits/06847da + if self.default is True and self.flag_value is not UNSET: + self.default = self.flag_value + + # Set the default flag_value if it is not set. + if self.flag_value is UNSET: + if self.is_flag: + self.flag_value = True + else: + self.flag_value = None + + # Counting. + self.count = count + if count: + if type is None: + self.type = types.IntRange(min=0) + if self.default is UNSET: + self.default = 0 + + self.allow_from_autoenv = allow_from_autoenv + self.help = help + self.show_default = show_default + self.show_choices = show_choices + self.show_envvar = show_envvar + + if __debug__: + if deprecated and prompt: + raise ValueError("`deprecated` options cannot use `prompt`.") + + if self.nargs == -1: + raise TypeError("nargs=-1 is not supported for options.") + + if not self.is_bool_flag and self.secondary_opts: + raise TypeError("Secondary flag is not valid for non-boolean flag.") + + if self.is_bool_flag and self.hide_input and self.prompt is not None: + raise TypeError( + "'prompt' with 'hide_input' is not valid for boolean flag." + ) + + if self.count: + if self.multiple: + raise TypeError("'count' is not valid with 'multiple'.") + + if self.is_flag: + raise TypeError("'count' is not valid with 'is_flag'.") + + def to_info_dict(self) -> dict[str, t.Any]: + """ + .. versionchanged:: 8.3.0 + Returns ``None`` for the :attr:`flag_value` if it was not set. + """ + info_dict = super().to_info_dict() + info_dict.update( + help=self.help, + prompt=self.prompt, + is_flag=self.is_flag, + # We explicitly hide the :attr:`UNSET` value to the user, as we choose to + # make it an implementation detail. And because ``to_info_dict`` has been + # designed for documentation purposes, we return ``None`` instead. + flag_value=self.flag_value if self.flag_value is not UNSET else None, + count=self.count, + hidden=self.hidden, + ) + return info_dict + + def get_error_hint(self, ctx: Context) -> str: + result = super().get_error_hint(ctx) + if self.show_envvar and self.envvar is not None: + result += f" (env var: '{self.envvar}')" + return result + + def _parse_decls( + self, decls: cabc.Sequence[str], expose_value: bool + ) -> tuple[str | None, list[str], list[str]]: + opts = [] + secondary_opts = [] + name = None + possible_names = [] + + for decl in decls: + if decl.isidentifier(): + if name is not None: + raise TypeError(f"Name '{name}' defined twice") + name = decl + else: + split_char = ";" if decl[:1] == "/" else "/" + if split_char in decl: + first, second = decl.split(split_char, 1) + first = first.rstrip() + if first: + possible_names.append(_split_opt(first)) + opts.append(first) + second = second.lstrip() + if second: + secondary_opts.append(second.lstrip()) + if first == second: + raise ValueError( + f"Boolean option {decl!r} cannot use the" + " same flag for true/false." + ) + else: + possible_names.append(_split_opt(decl)) + opts.append(decl) + + if name is None and possible_names: + possible_names.sort(key=lambda x: -len(x[0])) # group long options first + name = possible_names[0][1].replace("-", "_").lower() + if not name.isidentifier(): + name = None + + if name is None: + if not expose_value: + return None, opts, secondary_opts + raise TypeError( + f"Could not determine name for option with declarations {decls!r}" + ) + + if not opts and not secondary_opts: + raise TypeError( + f"No options defined but a name was passed ({name})." + " Did you mean to declare an argument instead? Did" + f" you mean to pass '--{name}'?" + ) + + return name, opts, secondary_opts + + def add_to_parser(self, parser: _OptionParser, ctx: Context) -> None: + if self.multiple: + action = "append" + elif self.count: + action = "count" + else: + action = "store" + + if self.is_flag: + action = f"{action}_const" + + if self.is_bool_flag and self.secondary_opts: + parser.add_option( + obj=self, opts=self.opts, dest=self.name, action=action, const=True + ) + parser.add_option( + obj=self, + opts=self.secondary_opts, + dest=self.name, + action=action, + const=False, + ) + else: + parser.add_option( + obj=self, + opts=self.opts, + dest=self.name, + action=action, + const=self.flag_value, + ) + else: + parser.add_option( + obj=self, + opts=self.opts, + dest=self.name, + action=action, + nargs=self.nargs, + ) + + def get_help_record(self, ctx: Context) -> tuple[str, str] | None: + if self.hidden: + return None + + any_prefix_is_slash = False + + def _write_opts(opts: cabc.Sequence[str]) -> str: + nonlocal any_prefix_is_slash + + rv, any_slashes = join_options(opts) + + if any_slashes: + any_prefix_is_slash = True + + if not self.is_flag and not self.count: + rv += f" {self.make_metavar(ctx=ctx)}" + + return rv + + rv = [_write_opts(self.opts)] + + if self.secondary_opts: + rv.append(_write_opts(self.secondary_opts)) + + help = self.help or "" + + extra = self.get_help_extra(ctx) + extra_items = [] + if "envvars" in extra: + extra_items.append( + _("env var: {var}").format(var=", ".join(extra["envvars"])) + ) + if "default" in extra: + extra_items.append(_("default: {default}").format(default=extra["default"])) + if "range" in extra: + extra_items.append(extra["range"]) + if "required" in extra: + extra_items.append(_(extra["required"])) + + if extra_items: + extra_str = "; ".join(extra_items) + help = f"{help} [{extra_str}]" if help else f"[{extra_str}]" + + return ("; " if any_prefix_is_slash else " / ").join(rv), help + + def get_help_extra(self, ctx: Context) -> types.OptionHelpExtra: + extra: types.OptionHelpExtra = {} + + if self.show_envvar: + envvar = self.envvar + + if envvar is None: + if ( + self.allow_from_autoenv + and ctx.auto_envvar_prefix is not None + and self.name is not None + ): + envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" + + if envvar is not None: + if isinstance(envvar, str): + extra["envvars"] = (envvar,) + else: + extra["envvars"] = tuple(str(d) for d in envvar) + + # Temporarily enable resilient parsing to avoid type casting + # failing for the default. Might be possible to extend this to + # help formatting in general. + resilient = ctx.resilient_parsing + ctx.resilient_parsing = True + + try: + default_value = self.get_default(ctx, call=False) + finally: + ctx.resilient_parsing = resilient + + show_default = False + show_default_is_str = False + + if self.show_default is not None: + if isinstance(self.show_default, str): + show_default_is_str = show_default = True + else: + show_default = self.show_default + elif ctx.show_default is not None: + show_default = ctx.show_default + + if show_default_is_str or ( + show_default and (default_value not in (None, UNSET)) + ): + if show_default_is_str: + default_string = f"({self.show_default})" + elif isinstance(default_value, (list, tuple)): + default_string = ", ".join(str(d) for d in default_value) + elif isinstance(default_value, enum.Enum): + default_string = default_value.name + elif inspect.isfunction(default_value): + default_string = _("(dynamic)") + elif self.is_bool_flag and self.secondary_opts: + # For boolean flags that have distinct True/False opts, + # use the opt without prefix instead of the value. + default_string = _split_opt( + (self.opts if default_value else self.secondary_opts)[0] + )[1] + elif self.is_bool_flag and not self.secondary_opts and not default_value: + default_string = "" + elif default_value == "": + default_string = '""' + else: + default_string = str(default_value) + + if default_string: + extra["default"] = default_string + + if ( + isinstance(self.type, types._NumberRangeBase) + # skip count with default range type + and not (self.count and self.type.min == 0 and self.type.max is None) + ): + range_str = self.type._describe_range() + + if range_str: + extra["range"] = range_str + + if self.required: + extra["required"] = "required" + + return extra + + def prompt_for_value(self, ctx: Context) -> t.Any: + """This is an alternative flow that can be activated in the full + value processing if a value does not exist. It will prompt the + user until a valid value exists and then returns the processed + value as result. + """ + assert self.prompt is not None + + # Calculate the default before prompting anything to lock in the value before + # attempting any user interaction. + default = self.get_default(ctx) + + # A boolean flag can use a simplified [y/n] confirmation prompt. + if self.is_bool_flag: + # If we have no boolean default, we force the user to explicitly provide + # one. + if default in (UNSET, None): + default = None + # Nothing prevent you to declare an option that is simultaneously: + # 1) auto-detected as a boolean flag, + # 2) allowed to prompt, and + # 3) still declare a non-boolean default. + # This forced casting into a boolean is necessary to align any non-boolean + # default to the prompt, which is going to be a [y/n]-style confirmation + # because the option is still a boolean flag. That way, instead of [y/n], + # we get [Y/n] or [y/N] depending on the truthy value of the default. + # Refs: https://github.com/pallets/click/pull/3030#discussion_r2289180249 + else: + default = bool(default) + return confirm(self.prompt, default) + + # If show_default is set to True/False, provide this to `prompt` as well. For + # non-bool values of `show_default`, we use `prompt`'s default behavior + prompt_kwargs: t.Any = {} + if isinstance(self.show_default, bool): + prompt_kwargs["show_default"] = self.show_default + + return prompt( + self.prompt, + # Use ``None`` to inform the prompt() function to reiterate until a valid + # value is provided by the user if we have no default. + default=None if default is UNSET else default, + type=self.type, + hide_input=self.hide_input, + show_choices=self.show_choices, + confirmation_prompt=self.confirmation_prompt, + value_proc=lambda x: self.process_value(ctx, x), + **prompt_kwargs, + ) + + def resolve_envvar_value(self, ctx: Context) -> str | None: + """:class:`Option` resolves its environment variable the same way as + :func:`Parameter.resolve_envvar_value`, but it also supports + :attr:`Context.auto_envvar_prefix`. If we could not find an environment from + the :attr:`envvar` property, we fallback on :attr:`Context.auto_envvar_prefix` + to build dynamiccaly the environment variable name using the + :python:`{ctx.auto_envvar_prefix}_{self.name.upper()}` template. + + :meta private: + """ + rv = super().resolve_envvar_value(ctx) + + if rv is not None: + return rv + + if ( + self.allow_from_autoenv + and ctx.auto_envvar_prefix is not None + and self.name is not None + ): + envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" + rv = os.environ.get(envvar) + + if rv: + return rv + + return None + + def value_from_envvar(self, ctx: Context) -> t.Any: + """For :class:`Option`, this method processes the raw environment variable + string the same way as :func:`Parameter.value_from_envvar` does. + + But in the case of non-boolean flags, the value is analyzed to determine if the + flag is activated or not, and returns a boolean of its activation, or the + :attr:`flag_value` if the latter is set. + + This method also takes care of repeated options (i.e. options with + :attr:`multiple` set to ``True``). + + :meta private: + """ + rv = self.resolve_envvar_value(ctx) + + # Absent environment variable or an empty string is interpreted as unset. + if rv is None: + return None + + # Non-boolean flags are more liberal in what they accept. But a flag being a + # flag, its envvar value still needs to be analyzed to determine if the flag is + # activated or not. + if self.is_flag and not self.is_bool_flag: + # If the flag_value is set and match the envvar value, return it + # directly. + if self.flag_value is not UNSET and rv == self.flag_value: + return self.flag_value + # Analyze the envvar value as a boolean to know if the flag is + # activated or not. + return types.BoolParamType.str_to_bool(rv) + + # Split the envvar value if it is allowed to be repeated. + value_depth = (self.nargs != 1) + bool(self.multiple) + if value_depth > 0: + multi_rv = self.type.split_envvar_value(rv) + if self.multiple and self.nargs != 1: + multi_rv = batch(multi_rv, self.nargs) # type: ignore[assignment] + + return multi_rv + + return rv + + def consume_value( + self, ctx: Context, opts: cabc.Mapping[str, Parameter] + ) -> tuple[t.Any, ParameterSource]: + """For :class:`Option`, the value can be collected from an interactive prompt + if the option is a flag that needs a value (and the :attr:`prompt` property is + set). + + Additionally, this method handles flag option that are activated without a + value, in which case the :attr:`flag_value` is returned. + + :meta private: + """ + value, source = super().consume_value(ctx, opts) + + # The parser will emit a sentinel value if the option is allowed to as a flag + # without a value. + if value is FLAG_NEEDS_VALUE: + # If the option allows for a prompt, we start an interaction with the user. + if self.prompt is not None and not ctx.resilient_parsing: + value = self.prompt_for_value(ctx) + source = ParameterSource.PROMPT + # Else the flag takes its flag_value as value. + else: + value = self.flag_value + source = ParameterSource.COMMANDLINE + + # A flag which is activated always returns the flag value, unless the value + # comes from the explicitly sets default. + elif ( + self.is_flag + and value is True + and not self.is_bool_flag + and source not in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP) + ): + value = self.flag_value + + # Re-interpret a multiple option which has been sent as-is by the parser. + # Here we replace each occurrence of value-less flags (marked by the + # FLAG_NEEDS_VALUE sentinel) with the flag_value. + elif ( + self.multiple + and value is not UNSET + and source not in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP) + and any(v is FLAG_NEEDS_VALUE for v in value) + ): + value = [self.flag_value if v is FLAG_NEEDS_VALUE else v for v in value] + source = ParameterSource.COMMANDLINE + + # The value wasn't set, or used the param's default, prompt for one to the user + # if prompting is enabled. + elif ( + ( + value is UNSET + or source in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP) + ) + and self.prompt is not None + and (self.required or self.prompt_required) + and not ctx.resilient_parsing + ): + value = self.prompt_for_value(ctx) + source = ParameterSource.PROMPT + + return value, source + + def process_value(self, ctx: Context, value: t.Any) -> t.Any: + # process_value has to be overridden on Options in order to capture + # `value == UNSET` cases before `type_cast_value()` gets called. + # + # Refs: + # https://github.com/pallets/click/issues/3069 + if self.is_flag and not self.required and self.is_bool_flag and value is UNSET: + value = False + + if self.callback is not None: + value = self.callback(ctx, self, value) + + return value + + # in the normal case, rely on Parameter.process_value + return super().process_value(ctx, value) + + +class Argument(Parameter): + """Arguments are positional parameters to a command. They generally + provide fewer features than options but can have infinite ``nargs`` + and are required by default. + + All parameters are passed onwards to the constructor of :class:`Parameter`. + """ + + param_type_name = "argument" + + def __init__( + self, + param_decls: cabc.Sequence[str], + required: bool | None = None, + **attrs: t.Any, + ) -> None: + # Auto-detect the requirement status of the argument if not explicitly set. + if required is None: + # The argument gets automatically required if it has no explicit default + # value set and is setup to match at least one value. + if attrs.get("default", UNSET) is UNSET: + required = attrs.get("nargs", 1) > 0 + # If the argument has a default value, it is not required. + else: + required = False + + if "multiple" in attrs: + raise TypeError("__init__() got an unexpected keyword argument 'multiple'.") + + super().__init__(param_decls, required=required, **attrs) + + @property + def human_readable_name(self) -> str: + if self.metavar is not None: + return self.metavar + return self.name.upper() # type: ignore + + def make_metavar(self, ctx: Context) -> str: + if self.metavar is not None: + return self.metavar + var = self.type.get_metavar(param=self, ctx=ctx) + if not var: + var = self.name.upper() # type: ignore + if self.deprecated: + var += "!" + if not self.required: + var = f"[{var}]" + if self.nargs != 1: + var += "..." + return var + + def _parse_decls( + self, decls: cabc.Sequence[str], expose_value: bool + ) -> tuple[str | None, list[str], list[str]]: + if not decls: + if not expose_value: + return None, [], [] + raise TypeError("Argument is marked as exposed, but does not have a name.") + if len(decls) == 1: + name = arg = decls[0] + name = name.replace("-", "_").lower() + else: + raise TypeError( + "Arguments take exactly one parameter declaration, got" + f" {len(decls)}: {decls}." + ) + return name, [arg], [] + + def get_usage_pieces(self, ctx: Context) -> list[str]: + return [self.make_metavar(ctx)] + + def get_error_hint(self, ctx: Context) -> str: + return f"'{self.make_metavar(ctx)}'" + + def add_to_parser(self, parser: _OptionParser, ctx: Context) -> None: + parser.add_argument(dest=self.name, nargs=self.nargs, obj=self) + + +def __getattr__(name: str) -> object: + import warnings + + if name == "BaseCommand": + warnings.warn( + "'BaseCommand' is deprecated and will be removed in Click 9.0. Use" + " 'Command' instead.", + DeprecationWarning, + stacklevel=2, + ) + return _BaseCommand + + if name == "MultiCommand": + warnings.warn( + "'MultiCommand' is deprecated and will be removed in Click 9.0. Use" + " 'Group' instead.", + DeprecationWarning, + stacklevel=2, + ) + return _MultiCommand + + raise AttributeError(name) diff --git a/.venv/lib/python3.12/site-packages/click/decorators.py b/.venv/lib/python3.12/site-packages/click/decorators.py new file mode 100644 index 0000000..21f4c34 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/decorators.py @@ -0,0 +1,551 @@ +from __future__ import annotations + +import inspect +import typing as t +from functools import update_wrapper +from gettext import gettext as _ + +from .core import Argument +from .core import Command +from .core import Context +from .core import Group +from .core import Option +from .core import Parameter +from .globals import get_current_context +from .utils import echo + +if t.TYPE_CHECKING: + import typing_extensions as te + + P = te.ParamSpec("P") + +R = t.TypeVar("R") +T = t.TypeVar("T") +_AnyCallable = t.Callable[..., t.Any] +FC = t.TypeVar("FC", bound="_AnyCallable | Command") + + +def pass_context(f: t.Callable[te.Concatenate[Context, P], R]) -> t.Callable[P, R]: + """Marks a callback as wanting to receive the current context + object as first argument. + """ + + def new_func(*args: P.args, **kwargs: P.kwargs) -> R: + return f(get_current_context(), *args, **kwargs) + + return update_wrapper(new_func, f) + + +def pass_obj(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]: + """Similar to :func:`pass_context`, but only pass the object on the + context onwards (:attr:`Context.obj`). This is useful if that object + represents the state of a nested system. + """ + + def new_func(*args: P.args, **kwargs: P.kwargs) -> R: + return f(get_current_context().obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + +def make_pass_decorator( + object_type: type[T], ensure: bool = False +) -> t.Callable[[t.Callable[te.Concatenate[T, P], R]], t.Callable[P, R]]: + """Given an object type this creates a decorator that will work + similar to :func:`pass_obj` but instead of passing the object of the + current context, it will find the innermost context of type + :func:`object_type`. + + This generates a decorator that works roughly like this:: + + from functools import update_wrapper + + def decorator(f): + @pass_context + def new_func(ctx, *args, **kwargs): + obj = ctx.find_object(object_type) + return ctx.invoke(f, obj, *args, **kwargs) + return update_wrapper(new_func, f) + return decorator + + :param object_type: the type of the object to pass. + :param ensure: if set to `True`, a new object will be created and + remembered on the context if it's not there yet. + """ + + def decorator(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]: + def new_func(*args: P.args, **kwargs: P.kwargs) -> R: + ctx = get_current_context() + + obj: T | None + if ensure: + obj = ctx.ensure_object(object_type) + else: + obj = ctx.find_object(object_type) + + if obj is None: + raise RuntimeError( + "Managed to invoke callback without a context" + f" object of type {object_type.__name__!r}" + " existing." + ) + + return ctx.invoke(f, obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + return decorator + + +def pass_meta_key( + key: str, *, doc_description: str | None = None +) -> t.Callable[[t.Callable[te.Concatenate[T, P], R]], t.Callable[P, R]]: + """Create a decorator that passes a key from + :attr:`click.Context.meta` as the first argument to the decorated + function. + + :param key: Key in ``Context.meta`` to pass. + :param doc_description: Description of the object being passed, + inserted into the decorator's docstring. Defaults to "the 'key' + key from Context.meta". + + .. versionadded:: 8.0 + """ + + def decorator(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]: + def new_func(*args: P.args, **kwargs: P.kwargs) -> R: + ctx = get_current_context() + obj = ctx.meta[key] + return ctx.invoke(f, obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + if doc_description is None: + doc_description = f"the {key!r} key from :attr:`click.Context.meta`" + + decorator.__doc__ = ( + f"Decorator that passes {doc_description} as the first argument" + " to the decorated function." + ) + return decorator + + +CmdType = t.TypeVar("CmdType", bound=Command) + + +# variant: no call, directly as decorator for a function. +@t.overload +def command(name: _AnyCallable) -> Command: ... + + +# variant: with positional name and with positional or keyword cls argument: +# @command(namearg, CommandCls, ...) or @command(namearg, cls=CommandCls, ...) +@t.overload +def command( + name: str | None, + cls: type[CmdType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], CmdType]: ... + + +# variant: name omitted, cls _must_ be a keyword argument, @command(cls=CommandCls, ...) +@t.overload +def command( + name: None = None, + *, + cls: type[CmdType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], CmdType]: ... + + +# variant: with optional string name, no cls argument provided. +@t.overload +def command( + name: str | None = ..., cls: None = None, **attrs: t.Any +) -> t.Callable[[_AnyCallable], Command]: ... + + +def command( + name: str | _AnyCallable | None = None, + cls: type[CmdType] | None = None, + **attrs: t.Any, +) -> Command | t.Callable[[_AnyCallable], Command | CmdType]: + r"""Creates a new :class:`Command` and uses the decorated function as + callback. This will also automatically attach all decorated + :func:`option`\s and :func:`argument`\s as parameters to the command. + + The name of the command defaults to the name of the function, converted to + lowercase, with underscores ``_`` replaced by dashes ``-``, and the suffixes + ``_command``, ``_cmd``, ``_group``, and ``_grp`` are removed. For example, + ``init_data_command`` becomes ``init-data``. + + All keyword arguments are forwarded to the underlying command class. + For the ``params`` argument, any decorated params are appended to + the end of the list. + + Once decorated the function turns into a :class:`Command` instance + that can be invoked as a command line utility or be attached to a + command :class:`Group`. + + :param name: The name of the command. Defaults to modifying the function's + name as described above. + :param cls: The command class to create. Defaults to :class:`Command`. + + .. versionchanged:: 8.2 + The suffixes ``_command``, ``_cmd``, ``_group``, and ``_grp`` are + removed when generating the name. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.1 + The ``params`` argument can be used. Decorated params are + appended to the end of the list. + """ + + func: t.Callable[[_AnyCallable], t.Any] | None = None + + if callable(name): + func = name + name = None + assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class." + assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments." + + if cls is None: + cls = t.cast("type[CmdType]", Command) + + def decorator(f: _AnyCallable) -> CmdType: + if isinstance(f, Command): + raise TypeError("Attempted to convert a callback into a command twice.") + + attr_params = attrs.pop("params", None) + params = attr_params if attr_params is not None else [] + + try: + decorator_params = f.__click_params__ # type: ignore + except AttributeError: + pass + else: + del f.__click_params__ # type: ignore + params.extend(reversed(decorator_params)) + + if attrs.get("help") is None: + attrs["help"] = f.__doc__ + + if t.TYPE_CHECKING: + assert cls is not None + assert not callable(name) + + if name is not None: + cmd_name = name + else: + cmd_name = f.__name__.lower().replace("_", "-") + cmd_left, sep, suffix = cmd_name.rpartition("-") + + if sep and suffix in {"command", "cmd", "group", "grp"}: + cmd_name = cmd_left + + cmd = cls(name=cmd_name, callback=f, params=params, **attrs) + cmd.__doc__ = f.__doc__ + return cmd + + if func is not None: + return decorator(func) + + return decorator + + +GrpType = t.TypeVar("GrpType", bound=Group) + + +# variant: no call, directly as decorator for a function. +@t.overload +def group(name: _AnyCallable) -> Group: ... + + +# variant: with positional name and with positional or keyword cls argument: +# @group(namearg, GroupCls, ...) or @group(namearg, cls=GroupCls, ...) +@t.overload +def group( + name: str | None, + cls: type[GrpType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], GrpType]: ... + + +# variant: name omitted, cls _must_ be a keyword argument, @group(cmd=GroupCls, ...) +@t.overload +def group( + name: None = None, + *, + cls: type[GrpType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], GrpType]: ... + + +# variant: with optional string name, no cls argument provided. +@t.overload +def group( + name: str | None = ..., cls: None = None, **attrs: t.Any +) -> t.Callable[[_AnyCallable], Group]: ... + + +def group( + name: str | _AnyCallable | None = None, + cls: type[GrpType] | None = None, + **attrs: t.Any, +) -> Group | t.Callable[[_AnyCallable], Group | GrpType]: + """Creates a new :class:`Group` with a function as callback. This + works otherwise the same as :func:`command` just that the `cls` + parameter is set to :class:`Group`. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + """ + if cls is None: + cls = t.cast("type[GrpType]", Group) + + if callable(name): + return command(cls=cls, **attrs)(name) + + return command(name, cls, **attrs) + + +def _param_memo(f: t.Callable[..., t.Any], param: Parameter) -> None: + if isinstance(f, Command): + f.params.append(param) + else: + if not hasattr(f, "__click_params__"): + f.__click_params__ = [] # type: ignore + + f.__click_params__.append(param) # type: ignore + + +def argument( + *param_decls: str, cls: type[Argument] | None = None, **attrs: t.Any +) -> t.Callable[[FC], FC]: + """Attaches an argument to the command. All positional arguments are + passed as parameter declarations to :class:`Argument`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Argument` instance manually + and attaching it to the :attr:`Command.params` list. + + For the default argument class, refer to :class:`Argument` and + :class:`Parameter` for descriptions of parameters. + + :param cls: the argument class to instantiate. This defaults to + :class:`Argument`. + :param param_decls: Passed as positional arguments to the constructor of + ``cls``. + :param attrs: Passed as keyword arguments to the constructor of ``cls``. + """ + if cls is None: + cls = Argument + + def decorator(f: FC) -> FC: + _param_memo(f, cls(param_decls, **attrs)) + return f + + return decorator + + +def option( + *param_decls: str, cls: type[Option] | None = None, **attrs: t.Any +) -> t.Callable[[FC], FC]: + """Attaches an option to the command. All positional arguments are + passed as parameter declarations to :class:`Option`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Option` instance manually + and attaching it to the :attr:`Command.params` list. + + For the default option class, refer to :class:`Option` and + :class:`Parameter` for descriptions of parameters. + + :param cls: the option class to instantiate. This defaults to + :class:`Option`. + :param param_decls: Passed as positional arguments to the constructor of + ``cls``. + :param attrs: Passed as keyword arguments to the constructor of ``cls``. + """ + if cls is None: + cls = Option + + def decorator(f: FC) -> FC: + _param_memo(f, cls(param_decls, **attrs)) + return f + + return decorator + + +def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--yes`` option which shows a prompt before continuing if + not passed. If the prompt is declined, the program will exit. + + :param param_decls: One or more option names. Defaults to the single + value ``"--yes"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value: + ctx.abort() + + if not param_decls: + param_decls = ("--yes",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("callback", callback) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("prompt", "Do you want to continue?") + kwargs.setdefault("help", "Confirm the action without prompting.") + return option(*param_decls, **kwargs) + + +def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--password`` option which prompts for a password, hiding + input and asking to enter the value again for confirmation. + + :param param_decls: One or more option names. Defaults to the single + value ``"--password"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + if not param_decls: + param_decls = ("--password",) + + kwargs.setdefault("prompt", True) + kwargs.setdefault("confirmation_prompt", True) + kwargs.setdefault("hide_input", True) + return option(*param_decls, **kwargs) + + +def version_option( + version: str | None = None, + *param_decls: str, + package_name: str | None = None, + prog_name: str | None = None, + message: str | None = None, + **kwargs: t.Any, +) -> t.Callable[[FC], FC]: + """Add a ``--version`` option which immediately prints the version + number and exits the program. + + If ``version`` is not provided, Click will try to detect it using + :func:`importlib.metadata.version` to get the version for the + ``package_name``. + + If ``package_name`` is not provided, Click will try to detect it by + inspecting the stack frames. This will be used to detect the + version, so it must match the name of the installed package. + + :param version: The version number to show. If not provided, Click + will try to detect it. + :param param_decls: One or more option names. Defaults to the single + value ``"--version"``. + :param package_name: The package name to detect the version from. If + not provided, Click will try to detect it. + :param prog_name: The name of the CLI to show in the message. If not + provided, it will be detected from the command. + :param message: The message to show. The values ``%(prog)s``, + ``%(package)s``, and ``%(version)s`` are available. Defaults to + ``"%(prog)s, version %(version)s"``. + :param kwargs: Extra arguments are passed to :func:`option`. + :raise RuntimeError: ``version`` could not be detected. + + .. versionchanged:: 8.0 + Add the ``package_name`` parameter, and the ``%(package)s`` + value for messages. + + .. versionchanged:: 8.0 + Use :mod:`importlib.metadata` instead of ``pkg_resources``. The + version is detected based on the package name, not the entry + point name. The Python package name must match the installed + package name, or be passed with ``package_name=``. + """ + if message is None: + message = _("%(prog)s, version %(version)s") + + if version is None and package_name is None: + frame = inspect.currentframe() + f_back = frame.f_back if frame is not None else None + f_globals = f_back.f_globals if f_back is not None else None + # break reference cycle + # https://docs.python.org/3/library/inspect.html#the-interpreter-stack + del frame + + if f_globals is not None: + package_name = f_globals.get("__name__") + + if package_name == "__main__": + package_name = f_globals.get("__package__") + + if package_name: + package_name = package_name.partition(".")[0] + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value or ctx.resilient_parsing: + return + + nonlocal prog_name + nonlocal version + + if prog_name is None: + prog_name = ctx.find_root().info_name + + if version is None and package_name is not None: + import importlib.metadata + + try: + version = importlib.metadata.version(package_name) + except importlib.metadata.PackageNotFoundError: + raise RuntimeError( + f"{package_name!r} is not installed. Try passing" + " 'package_name' instead." + ) from None + + if version is None: + raise RuntimeError( + f"Could not determine the version for {package_name!r} automatically." + ) + + echo( + message % {"prog": prog_name, "package": package_name, "version": version}, + color=ctx.color, + ) + ctx.exit() + + if not param_decls: + param_decls = ("--version",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("is_eager", True) + kwargs.setdefault("help", _("Show the version and exit.")) + kwargs["callback"] = callback + return option(*param_decls, **kwargs) + + +def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Pre-configured ``--help`` option which immediately prints the help page + and exits the program. + + :param param_decls: One or more option names. Defaults to the single + value ``"--help"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + + def show_help(ctx: Context, param: Parameter, value: bool) -> None: + """Callback that print the help page on ```` and exits.""" + if value and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + if not param_decls: + param_decls = ("--help",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("is_eager", True) + kwargs.setdefault("help", _("Show this message and exit.")) + kwargs.setdefault("callback", show_help) + + return option(*param_decls, **kwargs) diff --git a/.venv/lib/python3.12/site-packages/click/exceptions.py b/.venv/lib/python3.12/site-packages/click/exceptions.py new file mode 100644 index 0000000..4d782ee --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/exceptions.py @@ -0,0 +1,308 @@ +from __future__ import annotations + +import collections.abc as cabc +import typing as t +from gettext import gettext as _ +from gettext import ngettext + +from ._compat import get_text_stderr +from .globals import resolve_color_default +from .utils import echo +from .utils import format_filename + +if t.TYPE_CHECKING: + from .core import Command + from .core import Context + from .core import Parameter + + +def _join_param_hints(param_hint: cabc.Sequence[str] | str | None) -> str | None: + if param_hint is not None and not isinstance(param_hint, str): + return " / ".join(repr(x) for x in param_hint) + + return param_hint + + +class ClickException(Exception): + """An exception that Click can handle and show to the user.""" + + #: The exit code for this exception. + exit_code = 1 + + def __init__(self, message: str) -> None: + super().__init__(message) + # The context will be removed by the time we print the message, so cache + # the color settings here to be used later on (in `show`) + self.show_color: bool | None = resolve_color_default() + self.message = message + + def format_message(self) -> str: + return self.message + + def __str__(self) -> str: + return self.message + + def show(self, file: t.IO[t.Any] | None = None) -> None: + if file is None: + file = get_text_stderr() + + echo( + _("Error: {message}").format(message=self.format_message()), + file=file, + color=self.show_color, + ) + + +class UsageError(ClickException): + """An internal exception that signals a usage error. This typically + aborts any further handling. + + :param message: the error message to display. + :param ctx: optionally the context that caused this error. Click will + fill in the context automatically in some situations. + """ + + exit_code = 2 + + def __init__(self, message: str, ctx: Context | None = None) -> None: + super().__init__(message) + self.ctx = ctx + self.cmd: Command | None = self.ctx.command if self.ctx else None + + def show(self, file: t.IO[t.Any] | None = None) -> None: + if file is None: + file = get_text_stderr() + color = None + hint = "" + if ( + self.ctx is not None + and self.ctx.command.get_help_option(self.ctx) is not None + ): + hint = _("Try '{command} {option}' for help.").format( + command=self.ctx.command_path, option=self.ctx.help_option_names[0] + ) + hint = f"{hint}\n" + if self.ctx is not None: + color = self.ctx.color + echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color) + echo( + _("Error: {message}").format(message=self.format_message()), + file=file, + color=color, + ) + + +class BadParameter(UsageError): + """An exception that formats out a standardized error message for a + bad parameter. This is useful when thrown from a callback or type as + Click will attach contextual information to it (for instance, which + parameter it is). + + .. versionadded:: 2.0 + + :param param: the parameter object that caused this error. This can + be left out, and Click will attach this info itself + if possible. + :param param_hint: a string that shows up as parameter name. This + can be used as alternative to `param` in cases + where custom validation should happen. If it is + a string it's used as such, if it's a list then + each item is quoted and separated. + """ + + def __init__( + self, + message: str, + ctx: Context | None = None, + param: Parameter | None = None, + param_hint: cabc.Sequence[str] | str | None = None, + ) -> None: + super().__init__(message, ctx) + self.param = param + self.param_hint = param_hint + + def format_message(self) -> str: + if self.param_hint is not None: + param_hint = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) # type: ignore + else: + return _("Invalid value: {message}").format(message=self.message) + + return _("Invalid value for {param_hint}: {message}").format( + param_hint=_join_param_hints(param_hint), message=self.message + ) + + +class MissingParameter(BadParameter): + """Raised if click required an option or argument but it was not + provided when invoking the script. + + .. versionadded:: 4.0 + + :param param_type: a string that indicates the type of the parameter. + The default is to inherit the parameter type from + the given `param`. Valid values are ``'parameter'``, + ``'option'`` or ``'argument'``. + """ + + def __init__( + self, + message: str | None = None, + ctx: Context | None = None, + param: Parameter | None = None, + param_hint: cabc.Sequence[str] | str | None = None, + param_type: str | None = None, + ) -> None: + super().__init__(message or "", ctx, param, param_hint) + self.param_type = param_type + + def format_message(self) -> str: + if self.param_hint is not None: + param_hint: cabc.Sequence[str] | str | None = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) # type: ignore + else: + param_hint = None + + param_hint = _join_param_hints(param_hint) + param_hint = f" {param_hint}" if param_hint else "" + + param_type = self.param_type + if param_type is None and self.param is not None: + param_type = self.param.param_type_name + + msg = self.message + if self.param is not None: + msg_extra = self.param.type.get_missing_message( + param=self.param, ctx=self.ctx + ) + if msg_extra: + if msg: + msg += f". {msg_extra}" + else: + msg = msg_extra + + msg = f" {msg}" if msg else "" + + # Translate param_type for known types. + if param_type == "argument": + missing = _("Missing argument") + elif param_type == "option": + missing = _("Missing option") + elif param_type == "parameter": + missing = _("Missing parameter") + else: + missing = _("Missing {param_type}").format(param_type=param_type) + + return f"{missing}{param_hint}.{msg}" + + def __str__(self) -> str: + if not self.message: + param_name = self.param.name if self.param else None + return _("Missing parameter: {param_name}").format(param_name=param_name) + else: + return self.message + + +class NoSuchOption(UsageError): + """Raised if click attempted to handle an option that does not + exist. + + .. versionadded:: 4.0 + """ + + def __init__( + self, + option_name: str, + message: str | None = None, + possibilities: cabc.Sequence[str] | None = None, + ctx: Context | None = None, + ) -> None: + if message is None: + message = _("No such option: {name}").format(name=option_name) + + super().__init__(message, ctx) + self.option_name = option_name + self.possibilities = possibilities + + def format_message(self) -> str: + if not self.possibilities: + return self.message + + possibility_str = ", ".join(sorted(self.possibilities)) + suggest = ngettext( + "Did you mean {possibility}?", + "(Possible options: {possibilities})", + len(self.possibilities), + ).format(possibility=possibility_str, possibilities=possibility_str) + return f"{self.message} {suggest}" + + +class BadOptionUsage(UsageError): + """Raised if an option is generally supplied but the use of the option + was incorrect. This is for instance raised if the number of arguments + for an option is not correct. + + .. versionadded:: 4.0 + + :param option_name: the name of the option being used incorrectly. + """ + + def __init__( + self, option_name: str, message: str, ctx: Context | None = None + ) -> None: + super().__init__(message, ctx) + self.option_name = option_name + + +class BadArgumentUsage(UsageError): + """Raised if an argument is generally supplied but the use of the argument + was incorrect. This is for instance raised if the number of values + for an argument is not correct. + + .. versionadded:: 6.0 + """ + + +class NoArgsIsHelpError(UsageError): + def __init__(self, ctx: Context) -> None: + self.ctx: Context + super().__init__(ctx.get_help(), ctx=ctx) + + def show(self, file: t.IO[t.Any] | None = None) -> None: + echo(self.format_message(), file=file, err=True, color=self.ctx.color) + + +class FileError(ClickException): + """Raised if a file cannot be opened.""" + + def __init__(self, filename: str, hint: str | None = None) -> None: + if hint is None: + hint = _("unknown error") + + super().__init__(hint) + self.ui_filename: str = format_filename(filename) + self.filename = filename + + def format_message(self) -> str: + return _("Could not open file {filename!r}: {message}").format( + filename=self.ui_filename, message=self.message + ) + + +class Abort(RuntimeError): + """An internal signalling exception that signals Click to abort.""" + + +class Exit(RuntimeError): + """An exception that indicates that the application should exit with some + status code. + + :param code: the status code to exit with. + """ + + __slots__ = ("exit_code",) + + def __init__(self, code: int = 0) -> None: + self.exit_code: int = code diff --git a/.venv/lib/python3.12/site-packages/click/formatting.py b/.venv/lib/python3.12/site-packages/click/formatting.py new file mode 100644 index 0000000..0b64f83 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/formatting.py @@ -0,0 +1,301 @@ +from __future__ import annotations + +import collections.abc as cabc +from contextlib import contextmanager +from gettext import gettext as _ + +from ._compat import term_len +from .parser import _split_opt + +# Can force a width. This is used by the test system +FORCED_WIDTH: int | None = None + + +def measure_table(rows: cabc.Iterable[tuple[str, str]]) -> tuple[int, ...]: + widths: dict[int, int] = {} + + for row in rows: + for idx, col in enumerate(row): + widths[idx] = max(widths.get(idx, 0), term_len(col)) + + return tuple(y for x, y in sorted(widths.items())) + + +def iter_rows( + rows: cabc.Iterable[tuple[str, str]], col_count: int +) -> cabc.Iterator[tuple[str, ...]]: + for row in rows: + yield row + ("",) * (col_count - len(row)) + + +def wrap_text( + text: str, + width: int = 78, + initial_indent: str = "", + subsequent_indent: str = "", + preserve_paragraphs: bool = False, +) -> str: + """A helper function that intelligently wraps text. By default, it + assumes that it operates on a single paragraph of text but if the + `preserve_paragraphs` parameter is provided it will intelligently + handle paragraphs (defined by two empty lines). + + If paragraphs are handled, a paragraph can be prefixed with an empty + line containing the ``\\b`` character (``\\x08``) to indicate that + no rewrapping should happen in that block. + + :param text: the text that should be rewrapped. + :param width: the maximum width for the text. + :param initial_indent: the initial indent that should be placed on the + first line as a string. + :param subsequent_indent: the indent string that should be placed on + each consecutive line. + :param preserve_paragraphs: if this flag is set then the wrapping will + intelligently handle paragraphs. + """ + from ._textwrap import TextWrapper + + text = text.expandtabs() + wrapper = TextWrapper( + width, + initial_indent=initial_indent, + subsequent_indent=subsequent_indent, + replace_whitespace=False, + ) + if not preserve_paragraphs: + return wrapper.fill(text) + + p: list[tuple[int, bool, str]] = [] + buf: list[str] = [] + indent = None + + def _flush_par() -> None: + if not buf: + return + if buf[0].strip() == "\b": + p.append((indent or 0, True, "\n".join(buf[1:]))) + else: + p.append((indent or 0, False, " ".join(buf))) + del buf[:] + + for line in text.splitlines(): + if not line: + _flush_par() + indent = None + else: + if indent is None: + orig_len = term_len(line) + line = line.lstrip() + indent = orig_len - term_len(line) + buf.append(line) + _flush_par() + + rv = [] + for indent, raw, text in p: + with wrapper.extra_indent(" " * indent): + if raw: + rv.append(wrapper.indent_only(text)) + else: + rv.append(wrapper.fill(text)) + + return "\n\n".join(rv) + + +class HelpFormatter: + """This class helps with formatting text-based help pages. It's + usually just needed for very special internal cases, but it's also + exposed so that developers can write their own fancy outputs. + + At present, it always writes into memory. + + :param indent_increment: the additional increment for each level. + :param width: the width for the text. This defaults to the terminal + width clamped to a maximum of 78. + """ + + def __init__( + self, + indent_increment: int = 2, + width: int | None = None, + max_width: int | None = None, + ) -> None: + self.indent_increment = indent_increment + if max_width is None: + max_width = 80 + if width is None: + import shutil + + width = FORCED_WIDTH + if width is None: + width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50) + self.width = width + self.current_indent: int = 0 + self.buffer: list[str] = [] + + def write(self, string: str) -> None: + """Writes a unicode string into the internal buffer.""" + self.buffer.append(string) + + def indent(self) -> None: + """Increases the indentation.""" + self.current_indent += self.indent_increment + + def dedent(self) -> None: + """Decreases the indentation.""" + self.current_indent -= self.indent_increment + + def write_usage(self, prog: str, args: str = "", prefix: str | None = None) -> None: + """Writes a usage line into the buffer. + + :param prog: the program name. + :param args: whitespace separated list of arguments. + :param prefix: The prefix for the first line. Defaults to + ``"Usage: "``. + """ + if prefix is None: + prefix = f"{_('Usage:')} " + + usage_prefix = f"{prefix:>{self.current_indent}}{prog} " + text_width = self.width - self.current_indent + + if text_width >= (term_len(usage_prefix) + 20): + # The arguments will fit to the right of the prefix. + indent = " " * term_len(usage_prefix) + self.write( + wrap_text( + args, + text_width, + initial_indent=usage_prefix, + subsequent_indent=indent, + ) + ) + else: + # The prefix is too long, put the arguments on the next line. + self.write(usage_prefix) + self.write("\n") + indent = " " * (max(self.current_indent, term_len(prefix)) + 4) + self.write( + wrap_text( + args, text_width, initial_indent=indent, subsequent_indent=indent + ) + ) + + self.write("\n") + + def write_heading(self, heading: str) -> None: + """Writes a heading into the buffer.""" + self.write(f"{'':>{self.current_indent}}{heading}:\n") + + def write_paragraph(self) -> None: + """Writes a paragraph into the buffer.""" + if self.buffer: + self.write("\n") + + def write_text(self, text: str) -> None: + """Writes re-indented text into the buffer. This rewraps and + preserves paragraphs. + """ + indent = " " * self.current_indent + self.write( + wrap_text( + text, + self.width, + initial_indent=indent, + subsequent_indent=indent, + preserve_paragraphs=True, + ) + ) + self.write("\n") + + def write_dl( + self, + rows: cabc.Sequence[tuple[str, str]], + col_max: int = 30, + col_spacing: int = 2, + ) -> None: + """Writes a definition list into the buffer. This is how options + and commands are usually formatted. + + :param rows: a list of two item tuples for the terms and values. + :param col_max: the maximum width of the first column. + :param col_spacing: the number of spaces between the first and + second column. + """ + rows = list(rows) + widths = measure_table(rows) + if len(widths) != 2: + raise TypeError("Expected two columns for definition list") + + first_col = min(widths[0], col_max) + col_spacing + + for first, second in iter_rows(rows, len(widths)): + self.write(f"{'':>{self.current_indent}}{first}") + if not second: + self.write("\n") + continue + if term_len(first) <= first_col - col_spacing: + self.write(" " * (first_col - term_len(first))) + else: + self.write("\n") + self.write(" " * (first_col + self.current_indent)) + + text_width = max(self.width - first_col - 2, 10) + wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True) + lines = wrapped_text.splitlines() + + if lines: + self.write(f"{lines[0]}\n") + + for line in lines[1:]: + self.write(f"{'':>{first_col + self.current_indent}}{line}\n") + else: + self.write("\n") + + @contextmanager + def section(self, name: str) -> cabc.Iterator[None]: + """Helpful context manager that writes a paragraph, a heading, + and the indents. + + :param name: the section name that is written as heading. + """ + self.write_paragraph() + self.write_heading(name) + self.indent() + try: + yield + finally: + self.dedent() + + @contextmanager + def indentation(self) -> cabc.Iterator[None]: + """A context manager that increases the indentation.""" + self.indent() + try: + yield + finally: + self.dedent() + + def getvalue(self) -> str: + """Returns the buffer contents.""" + return "".join(self.buffer) + + +def join_options(options: cabc.Sequence[str]) -> tuple[str, bool]: + """Given a list of option strings this joins them in the most appropriate + way and returns them in the form ``(formatted_string, + any_prefix_is_slash)`` where the second item in the tuple is a flag that + indicates if any of the option prefixes was a slash. + """ + rv = [] + any_prefix_is_slash = False + + for opt in options: + prefix = _split_opt(opt)[0] + + if prefix == "/": + any_prefix_is_slash = True + + rv.append((len(prefix), opt)) + + rv.sort(key=lambda x: x[0]) + return ", ".join(x[1] for x in rv), any_prefix_is_slash diff --git a/.venv/lib/python3.12/site-packages/click/globals.py b/.venv/lib/python3.12/site-packages/click/globals.py new file mode 100644 index 0000000..a2f9172 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/globals.py @@ -0,0 +1,67 @@ +from __future__ import annotations + +import typing as t +from threading import local + +if t.TYPE_CHECKING: + from .core import Context + +_local = local() + + +@t.overload +def get_current_context(silent: t.Literal[False] = False) -> Context: ... + + +@t.overload +def get_current_context(silent: bool = ...) -> Context | None: ... + + +def get_current_context(silent: bool = False) -> Context | None: + """Returns the current click context. This can be used as a way to + access the current context object from anywhere. This is a more implicit + alternative to the :func:`pass_context` decorator. This function is + primarily useful for helpers such as :func:`echo` which might be + interested in changing its behavior based on the current context. + + To push the current context, :meth:`Context.scope` can be used. + + .. versionadded:: 5.0 + + :param silent: if set to `True` the return value is `None` if no context + is available. The default behavior is to raise a + :exc:`RuntimeError`. + """ + try: + return t.cast("Context", _local.stack[-1]) + except (AttributeError, IndexError) as e: + if not silent: + raise RuntimeError("There is no active click context.") from e + + return None + + +def push_context(ctx: Context) -> None: + """Pushes a new context to the current stack.""" + _local.__dict__.setdefault("stack", []).append(ctx) + + +def pop_context() -> None: + """Removes the top level from the stack.""" + _local.stack.pop() + + +def resolve_color_default(color: bool | None = None) -> bool | None: + """Internal helper to get the default value of the color flag. If a + value is passed it's returned unchanged, otherwise it's looked up from + the current context. + """ + if color is not None: + return color + + ctx = get_current_context(silent=True) + + if ctx is not None: + return ctx.color + + return None diff --git a/.venv/lib/python3.12/site-packages/click/parser.py b/.venv/lib/python3.12/site-packages/click/parser.py new file mode 100644 index 0000000..1ea1f71 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/parser.py @@ -0,0 +1,532 @@ +""" +This module started out as largely a copy paste from the stdlib's +optparse module with the features removed that we do not need from +optparse because we implement them in Click on a higher level (for +instance type handling, help formatting and a lot more). + +The plan is to remove more and more from here over time. + +The reason this is a different module and not optparse from the stdlib +is that there are differences in 2.x and 3.x about the error messages +generated and optparse in the stdlib uses gettext for no good reason +and might cause us issues. + +Click uses parts of optparse written by Gregory P. Ward and maintained +by the Python Software Foundation. This is limited to code in parser.py. + +Copyright 2001-2006 Gregory P. Ward. All rights reserved. +Copyright 2002-2006 Python Software Foundation. All rights reserved. +""" + +# This code uses parts of optparse written by Gregory P. Ward and +# maintained by the Python Software Foundation. +# Copyright 2001-2006 Gregory P. Ward +# Copyright 2002-2006 Python Software Foundation +from __future__ import annotations + +import collections.abc as cabc +import typing as t +from collections import deque +from gettext import gettext as _ +from gettext import ngettext + +from ._utils import FLAG_NEEDS_VALUE +from ._utils import UNSET +from .exceptions import BadArgumentUsage +from .exceptions import BadOptionUsage +from .exceptions import NoSuchOption +from .exceptions import UsageError + +if t.TYPE_CHECKING: + from ._utils import T_FLAG_NEEDS_VALUE + from ._utils import T_UNSET + from .core import Argument as CoreArgument + from .core import Context + from .core import Option as CoreOption + from .core import Parameter as CoreParameter + +V = t.TypeVar("V") + + +def _unpack_args( + args: cabc.Sequence[str], nargs_spec: cabc.Sequence[int] +) -> tuple[cabc.Sequence[str | cabc.Sequence[str | None] | None], list[str]]: + """Given an iterable of arguments and an iterable of nargs specifications, + it returns a tuple with all the unpacked arguments at the first index + and all remaining arguments as the second. + + The nargs specification is the number of arguments that should be consumed + or `-1` to indicate that this position should eat up all the remainders. + + Missing items are filled with ``UNSET``. + """ + args = deque(args) + nargs_spec = deque(nargs_spec) + rv: list[str | tuple[str | T_UNSET, ...] | T_UNSET] = [] + spos: int | None = None + + def _fetch(c: deque[V]) -> V | T_UNSET: + try: + if spos is None: + return c.popleft() + else: + return c.pop() + except IndexError: + return UNSET + + while nargs_spec: + nargs = _fetch(nargs_spec) + + if nargs is None: + continue + + if nargs == 1: + rv.append(_fetch(args)) # type: ignore[arg-type] + elif nargs > 1: + x = [_fetch(args) for _ in range(nargs)] + + # If we're reversed, we're pulling in the arguments in reverse, + # so we need to turn them around. + if spos is not None: + x.reverse() + + rv.append(tuple(x)) + elif nargs < 0: + if spos is not None: + raise TypeError("Cannot have two nargs < 0") + + spos = len(rv) + rv.append(UNSET) + + # spos is the position of the wildcard (star). If it's not `None`, + # we fill it with the remainder. + if spos is not None: + rv[spos] = tuple(args) + args = [] + rv[spos + 1 :] = reversed(rv[spos + 1 :]) + + return tuple(rv), list(args) + + +def _split_opt(opt: str) -> tuple[str, str]: + first = opt[:1] + if first.isalnum(): + return "", opt + if opt[1:2] == first: + return opt[:2], opt[2:] + return first, opt[1:] + + +def _normalize_opt(opt: str, ctx: Context | None) -> str: + if ctx is None or ctx.token_normalize_func is None: + return opt + prefix, opt = _split_opt(opt) + return f"{prefix}{ctx.token_normalize_func(opt)}" + + +class _Option: + def __init__( + self, + obj: CoreOption, + opts: cabc.Sequence[str], + dest: str | None, + action: str | None = None, + nargs: int = 1, + const: t.Any | None = None, + ): + self._short_opts = [] + self._long_opts = [] + self.prefixes: set[str] = set() + + for opt in opts: + prefix, value = _split_opt(opt) + if not prefix: + raise ValueError(f"Invalid start character for option ({opt})") + self.prefixes.add(prefix[0]) + if len(prefix) == 1 and len(value) == 1: + self._short_opts.append(opt) + else: + self._long_opts.append(opt) + self.prefixes.add(prefix) + + if action is None: + action = "store" + + self.dest = dest + self.action = action + self.nargs = nargs + self.const = const + self.obj = obj + + @property + def takes_value(self) -> bool: + return self.action in ("store", "append") + + def process(self, value: t.Any, state: _ParsingState) -> None: + if self.action == "store": + state.opts[self.dest] = value # type: ignore + elif self.action == "store_const": + state.opts[self.dest] = self.const # type: ignore + elif self.action == "append": + state.opts.setdefault(self.dest, []).append(value) # type: ignore + elif self.action == "append_const": + state.opts.setdefault(self.dest, []).append(self.const) # type: ignore + elif self.action == "count": + state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore + else: + raise ValueError(f"unknown action '{self.action}'") + state.order.append(self.obj) + + +class _Argument: + def __init__(self, obj: CoreArgument, dest: str | None, nargs: int = 1): + self.dest = dest + self.nargs = nargs + self.obj = obj + + def process( + self, + value: str | cabc.Sequence[str | None] | None | T_UNSET, + state: _ParsingState, + ) -> None: + if self.nargs > 1: + assert isinstance(value, cabc.Sequence) + holes = sum(1 for x in value if x is UNSET) + if holes == len(value): + value = UNSET + elif holes != 0: + raise BadArgumentUsage( + _("Argument {name!r} takes {nargs} values.").format( + name=self.dest, nargs=self.nargs + ) + ) + + # We failed to collect any argument value so we consider the argument as unset. + if value == (): + value = UNSET + + state.opts[self.dest] = value # type: ignore + state.order.append(self.obj) + + +class _ParsingState: + def __init__(self, rargs: list[str]) -> None: + self.opts: dict[str, t.Any] = {} + self.largs: list[str] = [] + self.rargs = rargs + self.order: list[CoreParameter] = [] + + +class _OptionParser: + """The option parser is an internal class that is ultimately used to + parse options and arguments. It's modelled after optparse and brings + a similar but vastly simplified API. It should generally not be used + directly as the high level Click classes wrap it for you. + + It's not nearly as extensible as optparse or argparse as it does not + implement features that are implemented on a higher level (such as + types or defaults). + + :param ctx: optionally the :class:`~click.Context` where this parser + should go with. + + .. deprecated:: 8.2 + Will be removed in Click 9.0. + """ + + def __init__(self, ctx: Context | None = None) -> None: + #: The :class:`~click.Context` for this parser. This might be + #: `None` for some advanced use cases. + self.ctx = ctx + #: This controls how the parser deals with interspersed arguments. + #: If this is set to `False`, the parser will stop on the first + #: non-option. Click uses this to implement nested subcommands + #: safely. + self.allow_interspersed_args: bool = True + #: This tells the parser how to deal with unknown options. By + #: default it will error out (which is sensible), but there is a + #: second mode where it will ignore it and continue processing + #: after shifting all the unknown options into the resulting args. + self.ignore_unknown_options: bool = False + + if ctx is not None: + self.allow_interspersed_args = ctx.allow_interspersed_args + self.ignore_unknown_options = ctx.ignore_unknown_options + + self._short_opt: dict[str, _Option] = {} + self._long_opt: dict[str, _Option] = {} + self._opt_prefixes = {"-", "--"} + self._args: list[_Argument] = [] + + def add_option( + self, + obj: CoreOption, + opts: cabc.Sequence[str], + dest: str | None, + action: str | None = None, + nargs: int = 1, + const: t.Any | None = None, + ) -> None: + """Adds a new option named `dest` to the parser. The destination + is not inferred (unlike with optparse) and needs to be explicitly + provided. Action can be any of ``store``, ``store_const``, + ``append``, ``append_const`` or ``count``. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + opts = [_normalize_opt(opt, self.ctx) for opt in opts] + option = _Option(obj, opts, dest, action=action, nargs=nargs, const=const) + self._opt_prefixes.update(option.prefixes) + for opt in option._short_opts: + self._short_opt[opt] = option + for opt in option._long_opts: + self._long_opt[opt] = option + + def add_argument(self, obj: CoreArgument, dest: str | None, nargs: int = 1) -> None: + """Adds a positional argument named `dest` to the parser. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + self._args.append(_Argument(obj, dest=dest, nargs=nargs)) + + def parse_args( + self, args: list[str] + ) -> tuple[dict[str, t.Any], list[str], list[CoreParameter]]: + """Parses positional arguments and returns ``(values, args, order)`` + for the parsed options and arguments as well as the leftover + arguments if there are any. The order is a list of objects as they + appear on the command line. If arguments appear multiple times they + will be memorized multiple times as well. + """ + state = _ParsingState(args) + try: + self._process_args_for_options(state) + self._process_args_for_args(state) + except UsageError: + if self.ctx is None or not self.ctx.resilient_parsing: + raise + return state.opts, state.largs, state.order + + def _process_args_for_args(self, state: _ParsingState) -> None: + pargs, args = _unpack_args( + state.largs + state.rargs, [x.nargs for x in self._args] + ) + + for idx, arg in enumerate(self._args): + arg.process(pargs[idx], state) + + state.largs = args + state.rargs = [] + + def _process_args_for_options(self, state: _ParsingState) -> None: + while state.rargs: + arg = state.rargs.pop(0) + arglen = len(arg) + # Double dashes always handled explicitly regardless of what + # prefixes are valid. + if arg == "--": + return + elif arg[:1] in self._opt_prefixes and arglen > 1: + self._process_opts(arg, state) + elif self.allow_interspersed_args: + state.largs.append(arg) + else: + state.rargs.insert(0, arg) + return + + # Say this is the original argument list: + # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] + # ^ + # (we are about to process arg(i)). + # + # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of + # [arg0, ..., arg(i-1)] (any options and their arguments will have + # been removed from largs). + # + # The while loop will usually consume 1 or more arguments per pass. + # If it consumes 1 (eg. arg is an option that takes no arguments), + # then after _process_arg() is done the situation is: + # + # largs = subset of [arg0, ..., arg(i)] + # rargs = [arg(i+1), ..., arg(N-1)] + # + # If allow_interspersed_args is false, largs will always be + # *empty* -- still a subset of [arg0, ..., arg(i-1)], but + # not a very interesting subset! + + def _match_long_opt( + self, opt: str, explicit_value: str | None, state: _ParsingState + ) -> None: + if opt not in self._long_opt: + from difflib import get_close_matches + + possibilities = get_close_matches(opt, self._long_opt) + raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) + + option = self._long_opt[opt] + if option.takes_value: + # At this point it's safe to modify rargs by injecting the + # explicit value, because no exception is raised in this + # branch. This means that the inserted value will be fully + # consumed. + if explicit_value is not None: + state.rargs.insert(0, explicit_value) + + value = self._get_value_from_state(opt, option, state) + + elif explicit_value is not None: + raise BadOptionUsage( + opt, _("Option {name!r} does not take a value.").format(name=opt) + ) + + else: + value = UNSET + + option.process(value, state) + + def _match_short_opt(self, arg: str, state: _ParsingState) -> None: + stop = False + i = 1 + prefix = arg[0] + unknown_options = [] + + for ch in arg[1:]: + opt = _normalize_opt(f"{prefix}{ch}", self.ctx) + option = self._short_opt.get(opt) + i += 1 + + if not option: + if self.ignore_unknown_options: + unknown_options.append(ch) + continue + raise NoSuchOption(opt, ctx=self.ctx) + if option.takes_value: + # Any characters left in arg? Pretend they're the + # next arg, and stop consuming characters of arg. + if i < len(arg): + state.rargs.insert(0, arg[i:]) + stop = True + + value = self._get_value_from_state(opt, option, state) + + else: + value = UNSET + + option.process(value, state) + + if stop: + break + + # If we got any unknown options we recombine the string of the + # remaining options and re-attach the prefix, then report that + # to the state as new larg. This way there is basic combinatorics + # that can be achieved while still ignoring unknown arguments. + if self.ignore_unknown_options and unknown_options: + state.largs.append(f"{prefix}{''.join(unknown_options)}") + + def _get_value_from_state( + self, option_name: str, option: _Option, state: _ParsingState + ) -> str | cabc.Sequence[str] | T_FLAG_NEEDS_VALUE: + nargs = option.nargs + + value: str | cabc.Sequence[str] | T_FLAG_NEEDS_VALUE + + if len(state.rargs) < nargs: + if option.obj._flag_needs_value: + # Option allows omitting the value. + value = FLAG_NEEDS_VALUE + else: + raise BadOptionUsage( + option_name, + ngettext( + "Option {name!r} requires an argument.", + "Option {name!r} requires {nargs} arguments.", + nargs, + ).format(name=option_name, nargs=nargs), + ) + elif nargs == 1: + next_rarg = state.rargs[0] + + if ( + option.obj._flag_needs_value + and isinstance(next_rarg, str) + and next_rarg[:1] in self._opt_prefixes + and len(next_rarg) > 1 + ): + # The next arg looks like the start of an option, don't + # use it as the value if omitting the value is allowed. + value = FLAG_NEEDS_VALUE + else: + value = state.rargs.pop(0) + else: + value = tuple(state.rargs[:nargs]) + del state.rargs[:nargs] + + return value + + def _process_opts(self, arg: str, state: _ParsingState) -> None: + explicit_value = None + # Long option handling happens in two parts. The first part is + # supporting explicitly attached values. In any case, we will try + # to long match the option first. + if "=" in arg: + long_opt, explicit_value = arg.split("=", 1) + else: + long_opt = arg + norm_long_opt = _normalize_opt(long_opt, self.ctx) + + # At this point we will match the (assumed) long option through + # the long option matching code. Note that this allows options + # like "-foo" to be matched as long options. + try: + self._match_long_opt(norm_long_opt, explicit_value, state) + except NoSuchOption: + # At this point the long option matching failed, and we need + # to try with short options. However there is a special rule + # which says, that if we have a two character options prefix + # (applies to "--foo" for instance), we do not dispatch to the + # short option code and will instead raise the no option + # error. + if arg[:2] not in self._opt_prefixes: + self._match_short_opt(arg, state) + return + + if not self.ignore_unknown_options: + raise + + state.largs.append(arg) + + +def __getattr__(name: str) -> object: + import warnings + + if name in { + "OptionParser", + "Argument", + "Option", + "split_opt", + "normalize_opt", + "ParsingState", + }: + warnings.warn( + f"'parser.{name}' is deprecated and will be removed in Click 9.0." + " The old parser is available in 'optparse'.", + DeprecationWarning, + stacklevel=2, + ) + return globals()[f"_{name}"] + + if name == "split_arg_string": + from .shell_completion import split_arg_string + + warnings.warn( + "Importing 'parser.split_arg_string' is deprecated, it will only be" + " available in 'shell_completion' in Click 9.0.", + DeprecationWarning, + stacklevel=2, + ) + return split_arg_string + + raise AttributeError(name) diff --git a/.venv/lib/python3.12/site-packages/click/py.typed b/.venv/lib/python3.12/site-packages/click/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/click/shell_completion.py b/.venv/lib/python3.12/site-packages/click/shell_completion.py new file mode 100644 index 0000000..8f1564c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/shell_completion.py @@ -0,0 +1,667 @@ +from __future__ import annotations + +import collections.abc as cabc +import os +import re +import typing as t +from gettext import gettext as _ + +from .core import Argument +from .core import Command +from .core import Context +from .core import Group +from .core import Option +from .core import Parameter +from .core import ParameterSource +from .utils import echo + + +def shell_complete( + cli: Command, + ctx_args: cabc.MutableMapping[str, t.Any], + prog_name: str, + complete_var: str, + instruction: str, +) -> int: + """Perform shell completion for the given CLI program. + + :param cli: Command being called. + :param ctx_args: Extra arguments to pass to + ``cli.make_context``. + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. + :param instruction: Value of ``complete_var`` with the completion + instruction and shell, in the form ``instruction_shell``. + :return: Status code to exit with. + """ + shell, _, instruction = instruction.partition("_") + comp_cls = get_completion_class(shell) + + if comp_cls is None: + return 1 + + comp = comp_cls(cli, ctx_args, prog_name, complete_var) + + if instruction == "source": + echo(comp.source()) + return 0 + + if instruction == "complete": + echo(comp.complete()) + return 0 + + return 1 + + +class CompletionItem: + """Represents a completion value and metadata about the value. The + default metadata is ``type`` to indicate special shell handling, + and ``help`` if a shell supports showing a help string next to the + value. + + Arbitrary parameters can be passed when creating the object, and + accessed using ``item.attr``. If an attribute wasn't passed, + accessing it returns ``None``. + + :param value: The completion suggestion. + :param type: Tells the shell script to provide special completion + support for the type. Click uses ``"dir"`` and ``"file"``. + :param help: String shown next to the value if supported. + :param kwargs: Arbitrary metadata. The built-in implementations + don't use this, but custom type completions paired with custom + shell support could use it. + """ + + __slots__ = ("value", "type", "help", "_info") + + def __init__( + self, + value: t.Any, + type: str = "plain", + help: str | None = None, + **kwargs: t.Any, + ) -> None: + self.value: t.Any = value + self.type: str = type + self.help: str | None = help + self._info = kwargs + + def __getattr__(self, name: str) -> t.Any: + return self._info.get(name) + + +# Only Bash >= 4.4 has the nosort option. +_SOURCE_BASH = """\ +%(complete_func)s() { + local IFS=$'\\n' + local response + + response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \ +%(complete_var)s=bash_complete $1) + + for completion in $response; do + IFS=',' read type value <<< "$completion" + + if [[ $type == 'dir' ]]; then + COMPREPLY=() + compopt -o dirnames + elif [[ $type == 'file' ]]; then + COMPREPLY=() + compopt -o default + elif [[ $type == 'plain' ]]; then + COMPREPLY+=($value) + fi + done + + return 0 +} + +%(complete_func)s_setup() { + complete -o nosort -F %(complete_func)s %(prog_name)s +} + +%(complete_func)s_setup; +""" + +# See ZshComplete.format_completion below, and issue #2703, before +# changing this script. +# +# (TL;DR: _describe is picky about the format, but this Zsh script snippet +# is already widely deployed. So freeze this script, and use clever-ish +# handling of colons in ZshComplet.format_completion.) +_SOURCE_ZSH = """\ +#compdef %(prog_name)s + +%(complete_func)s() { + local -a completions + local -a completions_with_descriptions + local -a response + (( ! $+commands[%(prog_name)s] )) && return 1 + + response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \ +%(complete_var)s=zsh_complete %(prog_name)s)}") + + for type key descr in ${response}; do + if [[ "$type" == "plain" ]]; then + if [[ "$descr" == "_" ]]; then + completions+=("$key") + else + completions_with_descriptions+=("$key":"$descr") + fi + elif [[ "$type" == "dir" ]]; then + _path_files -/ + elif [[ "$type" == "file" ]]; then + _path_files -f + fi + done + + if [ -n "$completions_with_descriptions" ]; then + _describe -V unsorted completions_with_descriptions -U + fi + + if [ -n "$completions" ]; then + compadd -U -V unsorted -a completions + fi +} + +if [[ $zsh_eval_context[-1] == loadautofunc ]]; then + # autoload from fpath, call function directly + %(complete_func)s "$@" +else + # eval/source/. command, register function for later + compdef %(complete_func)s %(prog_name)s +fi +""" + +_SOURCE_FISH = """\ +function %(complete_func)s; + set -l response (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \ +COMP_CWORD=(commandline -t) %(prog_name)s); + + for completion in $response; + set -l metadata (string split "," $completion); + + if test $metadata[1] = "dir"; + __fish_complete_directories $metadata[2]; + else if test $metadata[1] = "file"; + __fish_complete_path $metadata[2]; + else if test $metadata[1] = "plain"; + echo $metadata[2]; + end; + end; +end; + +complete --no-files --command %(prog_name)s --arguments \ +"(%(complete_func)s)"; +""" + + +class ShellComplete: + """Base class for providing shell completion support. A subclass for + a given shell will override attributes and methods to implement the + completion instructions (``source`` and ``complete``). + + :param cli: Command being called. + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. + + .. versionadded:: 8.0 + """ + + name: t.ClassVar[str] + """Name to register the shell as with :func:`add_completion_class`. + This is used in completion instructions (``{name}_source`` and + ``{name}_complete``). + """ + + source_template: t.ClassVar[str] + """Completion script template formatted by :meth:`source`. This must + be provided by subclasses. + """ + + def __init__( + self, + cli: Command, + ctx_args: cabc.MutableMapping[str, t.Any], + prog_name: str, + complete_var: str, + ) -> None: + self.cli = cli + self.ctx_args = ctx_args + self.prog_name = prog_name + self.complete_var = complete_var + + @property + def func_name(self) -> str: + """The name of the shell function defined by the completion + script. + """ + safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), flags=re.ASCII) + return f"_{safe_name}_completion" + + def source_vars(self) -> dict[str, t.Any]: + """Vars for formatting :attr:`source_template`. + + By default this provides ``complete_func``, ``complete_var``, + and ``prog_name``. + """ + return { + "complete_func": self.func_name, + "complete_var": self.complete_var, + "prog_name": self.prog_name, + } + + def source(self) -> str: + """Produce the shell script that defines the completion + function. By default this ``%``-style formats + :attr:`source_template` with the dict returned by + :meth:`source_vars`. + """ + return self.source_template % self.source_vars() + + def get_completion_args(self) -> tuple[list[str], str]: + """Use the env vars defined by the shell script to return a + tuple of ``args, incomplete``. This must be implemented by + subclasses. + """ + raise NotImplementedError + + def get_completions(self, args: list[str], incomplete: str) -> list[CompletionItem]: + """Determine the context and last complete command or parameter + from the complete args. Call that object's ``shell_complete`` + method to get the completions for the incomplete value. + + :param args: List of complete args before the incomplete value. + :param incomplete: Value being completed. May be empty. + """ + ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args) + obj, incomplete = _resolve_incomplete(ctx, args, incomplete) + return obj.shell_complete(ctx, incomplete) + + def format_completion(self, item: CompletionItem) -> str: + """Format a completion item into the form recognized by the + shell script. This must be implemented by subclasses. + + :param item: Completion item to format. + """ + raise NotImplementedError + + def complete(self) -> str: + """Produce the completion data to send back to the shell. + + By default this calls :meth:`get_completion_args`, gets the + completions, then calls :meth:`format_completion` for each + completion. + """ + args, incomplete = self.get_completion_args() + completions = self.get_completions(args, incomplete) + out = [self.format_completion(item) for item in completions] + return "\n".join(out) + + +class BashComplete(ShellComplete): + """Shell completion for Bash.""" + + name = "bash" + source_template = _SOURCE_BASH + + @staticmethod + def _check_version() -> None: + import shutil + import subprocess + + bash_exe = shutil.which("bash") + + if bash_exe is None: + match = None + else: + output = subprocess.run( + [bash_exe, "--norc", "-c", 'echo "${BASH_VERSION}"'], + stdout=subprocess.PIPE, + ) + match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode()) + + if match is not None: + major, minor = match.groups() + + if major < "4" or major == "4" and minor < "4": + echo( + _( + "Shell completion is not supported for Bash" + " versions older than 4.4." + ), + err=True, + ) + else: + echo( + _("Couldn't detect Bash version, shell completion is not supported."), + err=True, + ) + + def source(self) -> str: + self._check_version() + return super().source() + + def get_completion_args(self) -> tuple[list[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + return f"{item.type},{item.value}" + + +class ZshComplete(ShellComplete): + """Shell completion for Zsh.""" + + name = "zsh" + source_template = _SOURCE_ZSH + + def get_completion_args(self) -> tuple[list[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + help_ = item.help or "_" + # The zsh completion script uses `_describe` on items with help + # texts (which splits the item help from the item value at the + # first unescaped colon) and `compadd` on items without help + # text (which uses the item value as-is and does not support + # colon escaping). So escape colons in the item value if and + # only if the item help is not the sentinel "_" value, as used + # by the completion script. + # + # (The zsh completion script is potentially widely deployed, and + # thus harder to fix than this method.) + # + # See issue #1812 and issue #2703 for further context. + value = item.value.replace(":", r"\:") if help_ != "_" else item.value + return f"{item.type}\n{value}\n{help_}" + + +class FishComplete(ShellComplete): + """Shell completion for Fish.""" + + name = "fish" + source_template = _SOURCE_FISH + + def get_completion_args(self) -> tuple[list[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + incomplete = os.environ["COMP_CWORD"] + if incomplete: + incomplete = split_arg_string(incomplete)[0] + args = cwords[1:] + + # Fish stores the partial word in both COMP_WORDS and + # COMP_CWORD, remove it from complete args. + if incomplete and args and args[-1] == incomplete: + args.pop() + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + if item.help: + return f"{item.type},{item.value}\t{item.help}" + + return f"{item.type},{item.value}" + + +ShellCompleteType = t.TypeVar("ShellCompleteType", bound="type[ShellComplete]") + + +_available_shells: dict[str, type[ShellComplete]] = { + "bash": BashComplete, + "fish": FishComplete, + "zsh": ZshComplete, +} + + +def add_completion_class( + cls: ShellCompleteType, name: str | None = None +) -> ShellCompleteType: + """Register a :class:`ShellComplete` subclass under the given name. + The name will be provided by the completion instruction environment + variable during completion. + + :param cls: The completion class that will handle completion for the + shell. + :param name: Name to register the class under. Defaults to the + class's ``name`` attribute. + """ + if name is None: + name = cls.name + + _available_shells[name] = cls + + return cls + + +def get_completion_class(shell: str) -> type[ShellComplete] | None: + """Look up a registered :class:`ShellComplete` subclass by the name + provided by the completion instruction environment variable. If the + name isn't registered, returns ``None``. + + :param shell: Name the class is registered under. + """ + return _available_shells.get(shell) + + +def split_arg_string(string: str) -> list[str]: + """Split an argument string as with :func:`shlex.split`, but don't + fail if the string is incomplete. Ignores a missing closing quote or + incomplete escape sequence and uses the partial token as-is. + + .. code-block:: python + + split_arg_string("example 'my file") + ["example", "my file"] + + split_arg_string("example my\\") + ["example", "my"] + + :param string: String to split. + + .. versionchanged:: 8.2 + Moved to ``shell_completion`` from ``parser``. + """ + import shlex + + lex = shlex.shlex(string, posix=True) + lex.whitespace_split = True + lex.commenters = "" + out = [] + + try: + for token in lex: + out.append(token) + except ValueError: + # Raised when end-of-string is reached in an invalid state. Use + # the partial token as-is. The quote or escape character is in + # lex.state, not lex.token. + out.append(lex.token) + + return out + + +def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool: + """Determine if the given parameter is an argument that can still + accept values. + + :param ctx: Invocation context for the command represented by the + parsed complete args. + :param param: Argument object being checked. + """ + if not isinstance(param, Argument): + return False + + assert param.name is not None + # Will be None if expose_value is False. + value = ctx.params.get(param.name) + return ( + param.nargs == -1 + or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE + or ( + param.nargs > 1 + and isinstance(value, (tuple, list)) + and len(value) < param.nargs + ) + ) + + +def _start_of_option(ctx: Context, value: str) -> bool: + """Check if the value looks like the start of an option.""" + if not value: + return False + + c = value[0] + return c in ctx._opt_prefixes + + +def _is_incomplete_option(ctx: Context, args: list[str], param: Parameter) -> bool: + """Determine if the given parameter is an option that needs a value. + + :param args: List of complete args before the incomplete value. + :param param: Option object being checked. + """ + if not isinstance(param, Option): + return False + + if param.is_flag or param.count: + return False + + last_option = None + + for index, arg in enumerate(reversed(args)): + if index + 1 > param.nargs: + break + + if _start_of_option(ctx, arg): + last_option = arg + break + + return last_option is not None and last_option in param.opts + + +def _resolve_context( + cli: Command, + ctx_args: cabc.MutableMapping[str, t.Any], + prog_name: str, + args: list[str], +) -> Context: + """Produce the context hierarchy starting with the command and + traversing the complete arguments. This only follows the commands, + it doesn't trigger input prompts or callbacks. + + :param cli: Command being called. + :param prog_name: Name of the executable in the shell. + :param args: List of complete args before the incomplete value. + """ + ctx_args["resilient_parsing"] = True + with cli.make_context(prog_name, args.copy(), **ctx_args) as ctx: + args = ctx._protected_args + ctx.args + + while args: + command = ctx.command + + if isinstance(command, Group): + if not command.chain: + name, cmd, args = command.resolve_command(ctx, args) + + if cmd is None: + return ctx + + with cmd.make_context( + name, args, parent=ctx, resilient_parsing=True + ) as sub_ctx: + ctx = sub_ctx + args = ctx._protected_args + ctx.args + else: + sub_ctx = ctx + + while args: + name, cmd, args = command.resolve_command(ctx, args) + + if cmd is None: + return ctx + + with cmd.make_context( + name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + resilient_parsing=True, + ) as sub_sub_ctx: + sub_ctx = sub_sub_ctx + args = sub_ctx.args + + ctx = sub_ctx + args = [*sub_ctx._protected_args, *sub_ctx.args] + else: + break + + return ctx + + +def _resolve_incomplete( + ctx: Context, args: list[str], incomplete: str +) -> tuple[Command | Parameter, str]: + """Find the Click object that will handle the completion of the + incomplete value. Return the object and the incomplete value. + + :param ctx: Invocation context for the command represented by + the parsed complete args. + :param args: List of complete args before the incomplete value. + :param incomplete: Value being completed. May be empty. + """ + # Different shells treat an "=" between a long option name and + # value differently. Might keep the value joined, return the "=" + # as a separate item, or return the split name and value. Always + # split and discard the "=" to make completion easier. + if incomplete == "=": + incomplete = "" + elif "=" in incomplete and _start_of_option(ctx, incomplete): + name, _, incomplete = incomplete.partition("=") + args.append(name) + + # The "--" marker tells Click to stop treating values as options + # even if they start with the option character. If it hasn't been + # given and the incomplete arg looks like an option, the current + # command will provide option name completions. + if "--" not in args and _start_of_option(ctx, incomplete): + return ctx.command, incomplete + + params = ctx.command.get_params(ctx) + + # If the last complete arg is an option name with an incomplete + # value, the option will provide value completions. + for param in params: + if _is_incomplete_option(ctx, args, param): + return param, incomplete + + # It's not an option name or value. The first argument without a + # parsed value will provide value completions. + for param in params: + if _is_incomplete_argument(ctx, param): + return param, incomplete + + # There were no unparsed arguments, the command may be a group that + # will provide command name completions. + return ctx.command, incomplete diff --git a/.venv/lib/python3.12/site-packages/click/termui.py b/.venv/lib/python3.12/site-packages/click/termui.py new file mode 100644 index 0000000..2e98a07 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/termui.py @@ -0,0 +1,883 @@ +from __future__ import annotations + +import collections.abc as cabc +import inspect +import io +import itertools +import sys +import typing as t +from contextlib import AbstractContextManager +from gettext import gettext as _ + +from ._compat import isatty +from ._compat import strip_ansi +from .exceptions import Abort +from .exceptions import UsageError +from .globals import resolve_color_default +from .types import Choice +from .types import convert_type +from .types import ParamType +from .utils import echo +from .utils import LazyFile + +if t.TYPE_CHECKING: + from ._termui_impl import ProgressBar + +V = t.TypeVar("V") + +# The prompt functions to use. The doc tools currently override these +# functions to customize how they work. +visible_prompt_func: t.Callable[[str], str] = input + +_ansi_colors = { + "black": 30, + "red": 31, + "green": 32, + "yellow": 33, + "blue": 34, + "magenta": 35, + "cyan": 36, + "white": 37, + "reset": 39, + "bright_black": 90, + "bright_red": 91, + "bright_green": 92, + "bright_yellow": 93, + "bright_blue": 94, + "bright_magenta": 95, + "bright_cyan": 96, + "bright_white": 97, +} +_ansi_reset_all = "\033[0m" + + +def hidden_prompt_func(prompt: str) -> str: + import getpass + + return getpass.getpass(prompt) + + +def _build_prompt( + text: str, + suffix: str, + show_default: bool = False, + default: t.Any | None = None, + show_choices: bool = True, + type: ParamType | None = None, +) -> str: + prompt = text + if type is not None and show_choices and isinstance(type, Choice): + prompt += f" ({', '.join(map(str, type.choices))})" + if default is not None and show_default: + prompt = f"{prompt} [{_format_default(default)}]" + return f"{prompt}{suffix}" + + +def _format_default(default: t.Any) -> t.Any: + if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"): + return default.name + + return default + + +def prompt( + text: str, + default: t.Any | None = None, + hide_input: bool = False, + confirmation_prompt: bool | str = False, + type: ParamType | t.Any | None = None, + value_proc: t.Callable[[str], t.Any] | None = None, + prompt_suffix: str = ": ", + show_default: bool = True, + err: bool = False, + show_choices: bool = True, +) -> t.Any: + """Prompts a user for input. This is a convenience function that can + be used to prompt a user for input later. + + If the user aborts the input by sending an interrupt signal, this + function will catch it and raise a :exc:`Abort` exception. + + :param text: the text to show for the prompt. + :param default: the default value to use if no input happens. If this + is not given it will prompt until it's aborted. + :param hide_input: if this is set to true then the input value will + be hidden. + :param confirmation_prompt: Prompt a second time to confirm the + value. Can be set to a string instead of ``True`` to customize + the message. + :param type: the type to use to check the value against. + :param value_proc: if this parameter is provided it's a function that + is invoked instead of the type conversion to + convert a value. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + :param show_choices: Show or hide choices if the passed type is a Choice. + For example if type is a Choice of either day or week, + show_choices is true and text is "Group by" then the + prompt will be "Group by (day, week): ". + + .. versionchanged:: 8.3.1 + A space is no longer appended to the prompt. + + .. versionadded:: 8.0 + ``confirmation_prompt`` can be a custom string. + + .. versionadded:: 7.0 + Added the ``show_choices`` parameter. + + .. versionadded:: 6.0 + Added unicode support for cmd.exe on Windows. + + .. versionadded:: 4.0 + Added the `err` parameter. + + """ + + def prompt_func(text: str) -> str: + f = hidden_prompt_func if hide_input else visible_prompt_func + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(text[:-1], nl=False, err=err) + # Echo the last character to stdout to work around an issue where + # readline causes backspace to clear the whole line. + return f(text[-1:]) + except (KeyboardInterrupt, EOFError): + # getpass doesn't print a newline if the user aborts input with ^C. + # Allegedly this behavior is inherited from getpass(3). + # A doc bug has been filed at https://bugs.python.org/issue24711 + if hide_input: + echo(None, err=err) + raise Abort() from None + + if value_proc is None: + value_proc = convert_type(type, default) + + prompt = _build_prompt( + text, prompt_suffix, show_default, default, show_choices, type + ) + + if confirmation_prompt: + if confirmation_prompt is True: + confirmation_prompt = _("Repeat for confirmation") + + confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix) + + while True: + while True: + value = prompt_func(prompt) + if value: + break + elif default is not None: + value = default + break + try: + result = value_proc(value) + except UsageError as e: + if hide_input: + echo(_("Error: The value you entered was invalid."), err=err) + else: + echo(_("Error: {e.message}").format(e=e), err=err) + continue + if not confirmation_prompt: + return result + while True: + value2 = prompt_func(confirmation_prompt) + is_empty = not value and not value2 + if value2 or is_empty: + break + if value == value2: + return result + echo(_("Error: The two entered values do not match."), err=err) + + +def confirm( + text: str, + default: bool | None = False, + abort: bool = False, + prompt_suffix: str = ": ", + show_default: bool = True, + err: bool = False, +) -> bool: + """Prompts for confirmation (yes/no question). + + If the user aborts the input by sending a interrupt signal this + function will catch it and raise a :exc:`Abort` exception. + + :param text: the question to ask. + :param default: The default value to use when no input is given. If + ``None``, repeat until input is given. + :param abort: if this is set to `True` a negative answer aborts the + exception by raising :exc:`Abort`. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + + .. versionchanged:: 8.3.1 + A space is no longer appended to the prompt. + + .. versionchanged:: 8.0 + Repeat until input is given if ``default`` is ``None``. + + .. versionadded:: 4.0 + Added the ``err`` parameter. + """ + prompt = _build_prompt( + text, + prompt_suffix, + show_default, + "y/n" if default is None else ("Y/n" if default else "y/N"), + ) + + while True: + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(prompt[:-1], nl=False, err=err) + # Echo the last character to stdout to work around an issue where + # readline causes backspace to clear the whole line. + value = visible_prompt_func(prompt[-1:]).lower().strip() + except (KeyboardInterrupt, EOFError): + raise Abort() from None + if value in ("y", "yes"): + rv = True + elif value in ("n", "no"): + rv = False + elif default is not None and value == "": + rv = default + else: + echo(_("Error: invalid input"), err=err) + continue + break + if abort and not rv: + raise Abort() + return rv + + +def echo_via_pager( + text_or_generator: cabc.Iterable[str] | t.Callable[[], cabc.Iterable[str]] | str, + color: bool | None = None, +) -> None: + """This function takes a text and shows it via an environment specific + pager on stdout. + + .. versionchanged:: 3.0 + Added the `color` flag. + + :param text_or_generator: the text to page, or alternatively, a + generator emitting the text to page. + :param color: controls if the pager supports ANSI colors or not. The + default is autodetection. + """ + color = resolve_color_default(color) + + if inspect.isgeneratorfunction(text_or_generator): + i = t.cast("t.Callable[[], cabc.Iterable[str]]", text_or_generator)() + elif isinstance(text_or_generator, str): + i = [text_or_generator] + else: + i = iter(t.cast("cabc.Iterable[str]", text_or_generator)) + + # convert every element of i to a text type if necessary + text_generator = (el if isinstance(el, str) else str(el) for el in i) + + from ._termui_impl import pager + + return pager(itertools.chain(text_generator, "\n"), color) + + +@t.overload +def progressbar( + *, + length: int, + label: str | None = None, + hidden: bool = False, + show_eta: bool = True, + show_percent: bool | None = None, + show_pos: bool = False, + fill_char: str = "#", + empty_char: str = "-", + bar_template: str = "%(label)s [%(bar)s] %(info)s", + info_sep: str = " ", + width: int = 36, + file: t.TextIO | None = None, + color: bool | None = None, + update_min_steps: int = 1, +) -> ProgressBar[int]: ... + + +@t.overload +def progressbar( + iterable: cabc.Iterable[V] | None = None, + length: int | None = None, + label: str | None = None, + hidden: bool = False, + show_eta: bool = True, + show_percent: bool | None = None, + show_pos: bool = False, + item_show_func: t.Callable[[V | None], str | None] | None = None, + fill_char: str = "#", + empty_char: str = "-", + bar_template: str = "%(label)s [%(bar)s] %(info)s", + info_sep: str = " ", + width: int = 36, + file: t.TextIO | None = None, + color: bool | None = None, + update_min_steps: int = 1, +) -> ProgressBar[V]: ... + + +def progressbar( + iterable: cabc.Iterable[V] | None = None, + length: int | None = None, + label: str | None = None, + hidden: bool = False, + show_eta: bool = True, + show_percent: bool | None = None, + show_pos: bool = False, + item_show_func: t.Callable[[V | None], str | None] | None = None, + fill_char: str = "#", + empty_char: str = "-", + bar_template: str = "%(label)s [%(bar)s] %(info)s", + info_sep: str = " ", + width: int = 36, + file: t.TextIO | None = None, + color: bool | None = None, + update_min_steps: int = 1, +) -> ProgressBar[V]: + """This function creates an iterable context manager that can be used + to iterate over something while showing a progress bar. It will + either iterate over the `iterable` or `length` items (that are counted + up). While iteration happens, this function will print a rendered + progress bar to the given `file` (defaults to stdout) and will attempt + to calculate remaining time and more. By default, this progress bar + will not be rendered if the file is not a terminal. + + The context manager creates the progress bar. When the context + manager is entered the progress bar is already created. With every + iteration over the progress bar, the iterable passed to the bar is + advanced and the bar is updated. When the context manager exits, + a newline is printed and the progress bar is finalized on screen. + + Note: The progress bar is currently designed for use cases where the + total progress can be expected to take at least several seconds. + Because of this, the ProgressBar class object won't display + progress that is considered too fast, and progress where the time + between steps is less than a second. + + No printing must happen or the progress bar will be unintentionally + destroyed. + + Example usage:: + + with progressbar(items) as bar: + for item in bar: + do_something_with(item) + + Alternatively, if no iterable is specified, one can manually update the + progress bar through the `update()` method instead of directly + iterating over the progress bar. The update method accepts the number + of steps to increment the bar with:: + + with progressbar(length=chunks.total_bytes) as bar: + for chunk in chunks: + process_chunk(chunk) + bar.update(chunks.bytes) + + The ``update()`` method also takes an optional value specifying the + ``current_item`` at the new position. This is useful when used + together with ``item_show_func`` to customize the output for each + manual step:: + + with click.progressbar( + length=total_size, + label='Unzipping archive', + item_show_func=lambda a: a.filename + ) as bar: + for archive in zip_file: + archive.extract() + bar.update(archive.size, archive) + + :param iterable: an iterable to iterate over. If not provided the length + is required. + :param length: the number of items to iterate over. By default the + progressbar will attempt to ask the iterator about its + length, which might or might not work. If an iterable is + also provided this parameter can be used to override the + length. If an iterable is not provided the progress bar + will iterate over a range of that length. + :param label: the label to show next to the progress bar. + :param hidden: hide the progressbar. Defaults to ``False``. When no tty is + detected, it will only print the progressbar label. Setting this to + ``False`` also disables that. + :param show_eta: enables or disables the estimated time display. This is + automatically disabled if the length cannot be + determined. + :param show_percent: enables or disables the percentage display. The + default is `True` if the iterable has a length or + `False` if not. + :param show_pos: enables or disables the absolute position display. The + default is `False`. + :param item_show_func: A function called with the current item which + can return a string to show next to the progress bar. If the + function returns ``None`` nothing is shown. The current item can + be ``None``, such as when entering and exiting the bar. + :param fill_char: the character to use to show the filled part of the + progress bar. + :param empty_char: the character to use to show the non-filled part of + the progress bar. + :param bar_template: the format string to use as template for the bar. + The parameters in it are ``label`` for the label, + ``bar`` for the progress bar and ``info`` for the + info section. + :param info_sep: the separator between multiple info items (eta etc.) + :param width: the width of the progress bar in characters, 0 means full + terminal width + :param file: The file to write to. If this is not a terminal then + only the label is printed. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are included anywhere in the progress bar output + which is not the case by default. + :param update_min_steps: Render only when this many updates have + completed. This allows tuning for very fast iterators. + + .. versionadded:: 8.2 + The ``hidden`` argument. + + .. versionchanged:: 8.0 + Output is shown even if execution time is less than 0.5 seconds. + + .. versionchanged:: 8.0 + ``item_show_func`` shows the current item, not the previous one. + + .. versionchanged:: 8.0 + Labels are echoed if the output is not a TTY. Reverts a change + in 7.0 that removed all output. + + .. versionadded:: 8.0 + The ``update_min_steps`` parameter. + + .. versionadded:: 4.0 + The ``color`` parameter and ``update`` method. + + .. versionadded:: 2.0 + """ + from ._termui_impl import ProgressBar + + color = resolve_color_default(color) + return ProgressBar( + iterable=iterable, + length=length, + hidden=hidden, + show_eta=show_eta, + show_percent=show_percent, + show_pos=show_pos, + item_show_func=item_show_func, + fill_char=fill_char, + empty_char=empty_char, + bar_template=bar_template, + info_sep=info_sep, + file=file, + label=label, + width=width, + color=color, + update_min_steps=update_min_steps, + ) + + +def clear() -> None: + """Clears the terminal screen. This will have the effect of clearing + the whole visible space of the terminal and moving the cursor to the + top left. This does not do anything if not connected to a terminal. + + .. versionadded:: 2.0 + """ + if not isatty(sys.stdout): + return + + # ANSI escape \033[2J clears the screen, \033[1;1H moves the cursor + echo("\033[2J\033[1;1H", nl=False) + + +def _interpret_color(color: int | tuple[int, int, int] | str, offset: int = 0) -> str: + if isinstance(color, int): + return f"{38 + offset};5;{color:d}" + + if isinstance(color, (tuple, list)): + r, g, b = color + return f"{38 + offset};2;{r:d};{g:d};{b:d}" + + return str(_ansi_colors[color] + offset) + + +def style( + text: t.Any, + fg: int | tuple[int, int, int] | str | None = None, + bg: int | tuple[int, int, int] | str | None = None, + bold: bool | None = None, + dim: bool | None = None, + underline: bool | None = None, + overline: bool | None = None, + italic: bool | None = None, + blink: bool | None = None, + reverse: bool | None = None, + strikethrough: bool | None = None, + reset: bool = True, +) -> str: + """Styles a text with ANSI styles and returns the new string. By + default the styling is self contained which means that at the end + of the string a reset code is issued. This can be prevented by + passing ``reset=False``. + + Examples:: + + click.echo(click.style('Hello World!', fg='green')) + click.echo(click.style('ATTENTION!', blink=True)) + click.echo(click.style('Some things', reverse=True, fg='cyan')) + click.echo(click.style('More colors', fg=(255, 12, 128), bg=117)) + + Supported color names: + + * ``black`` (might be a gray) + * ``red`` + * ``green`` + * ``yellow`` (might be an orange) + * ``blue`` + * ``magenta`` + * ``cyan`` + * ``white`` (might be light gray) + * ``bright_black`` + * ``bright_red`` + * ``bright_green`` + * ``bright_yellow`` + * ``bright_blue`` + * ``bright_magenta`` + * ``bright_cyan`` + * ``bright_white`` + * ``reset`` (reset the color code only) + + If the terminal supports it, color may also be specified as: + + - An integer in the interval [0, 255]. The terminal must support + 8-bit/256-color mode. + - An RGB tuple of three integers in [0, 255]. The terminal must + support 24-bit/true-color mode. + + See https://en.wikipedia.org/wiki/ANSI_color and + https://gist.github.com/XVilka/8346728 for more information. + + :param text: the string to style with ansi codes. + :param fg: if provided this will become the foreground color. + :param bg: if provided this will become the background color. + :param bold: if provided this will enable or disable bold mode. + :param dim: if provided this will enable or disable dim mode. This is + badly supported. + :param underline: if provided this will enable or disable underline. + :param overline: if provided this will enable or disable overline. + :param italic: if provided this will enable or disable italic. + :param blink: if provided this will enable or disable blinking. + :param reverse: if provided this will enable or disable inverse + rendering (foreground becomes background and the + other way round). + :param strikethrough: if provided this will enable or disable + striking through text. + :param reset: by default a reset-all code is added at the end of the + string which means that styles do not carry over. This + can be disabled to compose styles. + + .. versionchanged:: 8.0 + A non-string ``message`` is converted to a string. + + .. versionchanged:: 8.0 + Added support for 256 and RGB color codes. + + .. versionchanged:: 8.0 + Added the ``strikethrough``, ``italic``, and ``overline`` + parameters. + + .. versionchanged:: 7.0 + Added support for bright colors. + + .. versionadded:: 2.0 + """ + if not isinstance(text, str): + text = str(text) + + bits = [] + + if fg: + try: + bits.append(f"\033[{_interpret_color(fg)}m") + except KeyError: + raise TypeError(f"Unknown color {fg!r}") from None + + if bg: + try: + bits.append(f"\033[{_interpret_color(bg, 10)}m") + except KeyError: + raise TypeError(f"Unknown color {bg!r}") from None + + if bold is not None: + bits.append(f"\033[{1 if bold else 22}m") + if dim is not None: + bits.append(f"\033[{2 if dim else 22}m") + if underline is not None: + bits.append(f"\033[{4 if underline else 24}m") + if overline is not None: + bits.append(f"\033[{53 if overline else 55}m") + if italic is not None: + bits.append(f"\033[{3 if italic else 23}m") + if blink is not None: + bits.append(f"\033[{5 if blink else 25}m") + if reverse is not None: + bits.append(f"\033[{7 if reverse else 27}m") + if strikethrough is not None: + bits.append(f"\033[{9 if strikethrough else 29}m") + bits.append(text) + if reset: + bits.append(_ansi_reset_all) + return "".join(bits) + + +def unstyle(text: str) -> str: + """Removes ANSI styling information from a string. Usually it's not + necessary to use this function as Click's echo function will + automatically remove styling if necessary. + + .. versionadded:: 2.0 + + :param text: the text to remove style information from. + """ + return strip_ansi(text) + + +def secho( + message: t.Any | None = None, + file: t.IO[t.AnyStr] | None = None, + nl: bool = True, + err: bool = False, + color: bool | None = None, + **styles: t.Any, +) -> None: + """This function combines :func:`echo` and :func:`style` into one + call. As such the following two calls are the same:: + + click.secho('Hello World!', fg='green') + click.echo(click.style('Hello World!', fg='green')) + + All keyword arguments are forwarded to the underlying functions + depending on which one they go with. + + Non-string types will be converted to :class:`str`. However, + :class:`bytes` are passed directly to :meth:`echo` without applying + style. If you want to style bytes that represent text, call + :meth:`bytes.decode` first. + + .. versionchanged:: 8.0 + A non-string ``message`` is converted to a string. Bytes are + passed through without style applied. + + .. versionadded:: 2.0 + """ + if message is not None and not isinstance(message, (bytes, bytearray)): + message = style(message, **styles) + + return echo(message, file=file, nl=nl, err=err, color=color) + + +@t.overload +def edit( + text: bytes | bytearray, + editor: str | None = None, + env: cabc.Mapping[str, str] | None = None, + require_save: bool = False, + extension: str = ".txt", +) -> bytes | None: ... + + +@t.overload +def edit( + text: str, + editor: str | None = None, + env: cabc.Mapping[str, str] | None = None, + require_save: bool = True, + extension: str = ".txt", +) -> str | None: ... + + +@t.overload +def edit( + text: None = None, + editor: str | None = None, + env: cabc.Mapping[str, str] | None = None, + require_save: bool = True, + extension: str = ".txt", + filename: str | cabc.Iterable[str] | None = None, +) -> None: ... + + +def edit( + text: str | bytes | bytearray | None = None, + editor: str | None = None, + env: cabc.Mapping[str, str] | None = None, + require_save: bool = True, + extension: str = ".txt", + filename: str | cabc.Iterable[str] | None = None, +) -> str | bytes | bytearray | None: + r"""Edits the given text in the defined editor. If an editor is given + (should be the full path to the executable but the regular operating + system search path is used for finding the executable) it overrides + the detected editor. Optionally, some environment variables can be + used. If the editor is closed without changes, `None` is returned. In + case a file is edited directly the return value is always `None` and + `require_save` and `extension` are ignored. + + If the editor cannot be opened a :exc:`UsageError` is raised. + + Note for Windows: to simplify cross-platform usage, the newlines are + automatically converted from POSIX to Windows and vice versa. As such, + the message here will have ``\n`` as newline markers. + + :param text: the text to edit. + :param editor: optionally the editor to use. Defaults to automatic + detection. + :param env: environment variables to forward to the editor. + :param require_save: if this is true, then not saving in the editor + will make the return value become `None`. + :param extension: the extension to tell the editor about. This defaults + to `.txt` but changing this might change syntax + highlighting. + :param filename: if provided it will edit this file instead of the + provided text contents. It will not use a temporary + file as an indirection in that case. If the editor supports + editing multiple files at once, a sequence of files may be + passed as well. Invoke `click.file` once per file instead + if multiple files cannot be managed at once or editing the + files serially is desired. + + .. versionchanged:: 8.2.0 + ``filename`` now accepts any ``Iterable[str]`` in addition to a ``str`` + if the ``editor`` supports editing multiple files at once. + + """ + from ._termui_impl import Editor + + ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension) + + if filename is None: + return ed.edit(text) + + if isinstance(filename, str): + filename = (filename,) + + ed.edit_files(filenames=filename) + return None + + +def launch(url: str, wait: bool = False, locate: bool = False) -> int: + """This function launches the given URL (or filename) in the default + viewer application for this file type. If this is an executable, it + might launch the executable in a new session. The return value is + the exit code of the launched application. Usually, ``0`` indicates + success. + + Examples:: + + click.launch('https://click.palletsprojects.com/') + click.launch('/my/downloaded/file', locate=True) + + .. versionadded:: 2.0 + + :param url: URL or filename of the thing to launch. + :param wait: Wait for the program to exit before returning. This + only works if the launched program blocks. In particular, + ``xdg-open`` on Linux does not block. + :param locate: if this is set to `True` then instead of launching the + application associated with the URL it will attempt to + launch a file manager with the file located. This + might have weird effects if the URL does not point to + the filesystem. + """ + from ._termui_impl import open_url + + return open_url(url, wait=wait, locate=locate) + + +# If this is provided, getchar() calls into this instead. This is used +# for unittesting purposes. +_getchar: t.Callable[[bool], str] | None = None + + +def getchar(echo: bool = False) -> str: + """Fetches a single character from the terminal and returns it. This + will always return a unicode character and under certain rare + circumstances this might return more than one character. The + situations which more than one character is returned is when for + whatever reason multiple characters end up in the terminal buffer or + standard input was not actually a terminal. + + Note that this will always read from the terminal, even if something + is piped into the standard input. + + Note for Windows: in rare cases when typing non-ASCII characters, this + function might wait for a second character and then return both at once. + This is because certain Unicode characters look like special-key markers. + + .. versionadded:: 2.0 + + :param echo: if set to `True`, the character read will also show up on + the terminal. The default is to not show it. + """ + global _getchar + + if _getchar is None: + from ._termui_impl import getchar as f + + _getchar = f + + return _getchar(echo) + + +def raw_terminal() -> AbstractContextManager[int]: + from ._termui_impl import raw_terminal as f + + return f() + + +def pause(info: str | None = None, err: bool = False) -> None: + """This command stops execution and waits for the user to press any + key to continue. This is similar to the Windows batch "pause" + command. If the program is not run through a terminal, this command + will instead do nothing. + + .. versionadded:: 2.0 + + .. versionadded:: 4.0 + Added the `err` parameter. + + :param info: The message to print before pausing. Defaults to + ``"Press any key to continue..."``. + :param err: if set to message goes to ``stderr`` instead of + ``stdout``, the same as with echo. + """ + if not isatty(sys.stdin) or not isatty(sys.stdout): + return + + if info is None: + info = _("Press any key to continue...") + + try: + if info: + echo(info, nl=False, err=err) + try: + getchar() + except (KeyboardInterrupt, EOFError): + pass + finally: + if info: + echo(err=err) diff --git a/.venv/lib/python3.12/site-packages/click/testing.py b/.venv/lib/python3.12/site-packages/click/testing.py new file mode 100644 index 0000000..f6f60b8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/testing.py @@ -0,0 +1,577 @@ +from __future__ import annotations + +import collections.abc as cabc +import contextlib +import io +import os +import shlex +import sys +import tempfile +import typing as t +from types import TracebackType + +from . import _compat +from . import formatting +from . import termui +from . import utils +from ._compat import _find_binary_reader + +if t.TYPE_CHECKING: + from _typeshed import ReadableBuffer + + from .core import Command + + +class EchoingStdin: + def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None: + self._input = input + self._output = output + self._paused = False + + def __getattr__(self, x: str) -> t.Any: + return getattr(self._input, x) + + def _echo(self, rv: bytes) -> bytes: + if not self._paused: + self._output.write(rv) + + return rv + + def read(self, n: int = -1) -> bytes: + return self._echo(self._input.read(n)) + + def read1(self, n: int = -1) -> bytes: + return self._echo(self._input.read1(n)) # type: ignore + + def readline(self, n: int = -1) -> bytes: + return self._echo(self._input.readline(n)) + + def readlines(self) -> list[bytes]: + return [self._echo(x) for x in self._input.readlines()] + + def __iter__(self) -> cabc.Iterator[bytes]: + return iter(self._echo(x) for x in self._input) + + def __repr__(self) -> str: + return repr(self._input) + + +@contextlib.contextmanager +def _pause_echo(stream: EchoingStdin | None) -> cabc.Iterator[None]: + if stream is None: + yield + else: + stream._paused = True + yield + stream._paused = False + + +class BytesIOCopy(io.BytesIO): + """Patch ``io.BytesIO`` to let the written stream be copied to another. + + .. versionadded:: 8.2 + """ + + def __init__(self, copy_to: io.BytesIO) -> None: + super().__init__() + self.copy_to = copy_to + + def flush(self) -> None: + super().flush() + self.copy_to.flush() + + def write(self, b: ReadableBuffer) -> int: + self.copy_to.write(b) + return super().write(b) + + +class StreamMixer: + """Mixes `` and `` streams. + + The result is available in the ``output`` attribute. + + .. versionadded:: 8.2 + """ + + def __init__(self) -> None: + self.output: io.BytesIO = io.BytesIO() + self.stdout: io.BytesIO = BytesIOCopy(copy_to=self.output) + self.stderr: io.BytesIO = BytesIOCopy(copy_to=self.output) + + def __del__(self) -> None: + """ + Guarantee that embedded file-like objects are closed in a + predictable order, protecting against races between + self.output being closed and other streams being flushed on close + + .. versionadded:: 8.2.2 + """ + self.stderr.close() + self.stdout.close() + self.output.close() + + +class _NamedTextIOWrapper(io.TextIOWrapper): + def __init__( + self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any + ) -> None: + super().__init__(buffer, **kwargs) + self._name = name + self._mode = mode + + @property + def name(self) -> str: + return self._name + + @property + def mode(self) -> str: + return self._mode + + +def make_input_stream( + input: str | bytes | t.IO[t.Any] | None, charset: str +) -> t.BinaryIO: + # Is already an input stream. + if hasattr(input, "read"): + rv = _find_binary_reader(t.cast("t.IO[t.Any]", input)) + + if rv is not None: + return rv + + raise TypeError("Could not find binary reader for input stream.") + + if input is None: + input = b"" + elif isinstance(input, str): + input = input.encode(charset) + + return io.BytesIO(input) + + +class Result: + """Holds the captured result of an invoked CLI script. + + :param runner: The runner that created the result + :param stdout_bytes: The standard output as bytes. + :param stderr_bytes: The standard error as bytes. + :param output_bytes: A mix of ``stdout_bytes`` and ``stderr_bytes``, as the + user would see it in its terminal. + :param return_value: The value returned from the invoked command. + :param exit_code: The exit code as integer. + :param exception: The exception that happened if one did. + :param exc_info: Exception information (exception type, exception instance, + traceback type). + + .. versionchanged:: 8.2 + ``stderr_bytes`` no longer optional, ``output_bytes`` introduced and + ``mix_stderr`` has been removed. + + .. versionadded:: 8.0 + Added ``return_value``. + """ + + def __init__( + self, + runner: CliRunner, + stdout_bytes: bytes, + stderr_bytes: bytes, + output_bytes: bytes, + return_value: t.Any, + exit_code: int, + exception: BaseException | None, + exc_info: tuple[type[BaseException], BaseException, TracebackType] + | None = None, + ): + self.runner = runner + self.stdout_bytes = stdout_bytes + self.stderr_bytes = stderr_bytes + self.output_bytes = output_bytes + self.return_value = return_value + self.exit_code = exit_code + self.exception = exception + self.exc_info = exc_info + + @property + def output(self) -> str: + """The terminal output as unicode string, as the user would see it. + + .. versionchanged:: 8.2 + No longer a proxy for ``self.stdout``. Now has its own independent stream + that is mixing `` and ``, in the order they were written. + """ + return self.output_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + @property + def stdout(self) -> str: + """The standard output as unicode string.""" + return self.stdout_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + @property + def stderr(self) -> str: + """The standard error as unicode string. + + .. versionchanged:: 8.2 + No longer raise an exception, always returns the `` string. + """ + return self.stderr_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + def __repr__(self) -> str: + exc_str = repr(self.exception) if self.exception else "okay" + return f"<{type(self).__name__} {exc_str}>" + + +class CliRunner: + """The CLI runner provides functionality to invoke a Click command line + script for unittesting purposes in a isolated environment. This only + works in single-threaded systems without any concurrency as it changes the + global interpreter state. + + :param charset: the character set for the input and output data. + :param env: a dictionary with environment variables for overriding. + :param echo_stdin: if this is set to `True`, then reading from `` writes + to ``. This is useful for showing examples in + some circumstances. Note that regular prompts + will automatically echo the input. + :param catch_exceptions: Whether to catch any exceptions other than + ``SystemExit`` when running :meth:`~CliRunner.invoke`. + + .. versionchanged:: 8.2 + Added the ``catch_exceptions`` parameter. + + .. versionchanged:: 8.2 + ``mix_stderr`` parameter has been removed. + """ + + def __init__( + self, + charset: str = "utf-8", + env: cabc.Mapping[str, str | None] | None = None, + echo_stdin: bool = False, + catch_exceptions: bool = True, + ) -> None: + self.charset = charset + self.env: cabc.Mapping[str, str | None] = env or {} + self.echo_stdin = echo_stdin + self.catch_exceptions = catch_exceptions + + def get_default_prog_name(self, cli: Command) -> str: + """Given a command object it will return the default program name + for it. The default is the `name` attribute or ``"root"`` if not + set. + """ + return cli.name or "root" + + def make_env( + self, overrides: cabc.Mapping[str, str | None] | None = None + ) -> cabc.Mapping[str, str | None]: + """Returns the environment overrides for invoking a script.""" + rv = dict(self.env) + if overrides: + rv.update(overrides) + return rv + + @contextlib.contextmanager + def isolation( + self, + input: str | bytes | t.IO[t.Any] | None = None, + env: cabc.Mapping[str, str | None] | None = None, + color: bool = False, + ) -> cabc.Iterator[tuple[io.BytesIO, io.BytesIO, io.BytesIO]]: + """A context manager that sets up the isolation for invoking of a + command line tool. This sets up `` with the given input data + and `os.environ` with the overrides from the given dictionary. + This also rebinds some internals in Click to be mocked (like the + prompt functionality). + + This is automatically done in the :meth:`invoke` method. + + :param input: the input stream to put into `sys.stdin`. + :param env: the environment overrides as dictionary. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + + .. versionadded:: 8.2 + An additional output stream is returned, which is a mix of + `` and `` streams. + + .. versionchanged:: 8.2 + Always returns the `` stream. + + .. versionchanged:: 8.0 + `` is opened with ``errors="backslashreplace"`` + instead of the default ``"strict"``. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + """ + bytes_input = make_input_stream(input, self.charset) + echo_input = None + + old_stdin = sys.stdin + old_stdout = sys.stdout + old_stderr = sys.stderr + old_forced_width = formatting.FORCED_WIDTH + formatting.FORCED_WIDTH = 80 + + env = self.make_env(env) + + stream_mixer = StreamMixer() + + if self.echo_stdin: + bytes_input = echo_input = t.cast( + t.BinaryIO, EchoingStdin(bytes_input, stream_mixer.stdout) + ) + + sys.stdin = text_input = _NamedTextIOWrapper( + bytes_input, encoding=self.charset, name="", mode="r" + ) + + if self.echo_stdin: + # Force unbuffered reads, otherwise TextIOWrapper reads a + # large chunk which is echoed early. + text_input._CHUNK_SIZE = 1 # type: ignore + + sys.stdout = _NamedTextIOWrapper( + stream_mixer.stdout, encoding=self.charset, name="", mode="w" + ) + + sys.stderr = _NamedTextIOWrapper( + stream_mixer.stderr, + encoding=self.charset, + name="", + mode="w", + errors="backslashreplace", + ) + + @_pause_echo(echo_input) # type: ignore + def visible_input(prompt: str | None = None) -> str: + sys.stdout.write(prompt or "") + try: + val = next(text_input).rstrip("\r\n") + except StopIteration as e: + raise EOFError() from e + sys.stdout.write(f"{val}\n") + sys.stdout.flush() + return val + + @_pause_echo(echo_input) # type: ignore + def hidden_input(prompt: str | None = None) -> str: + sys.stdout.write(f"{prompt or ''}\n") + sys.stdout.flush() + try: + return next(text_input).rstrip("\r\n") + except StopIteration as e: + raise EOFError() from e + + @_pause_echo(echo_input) # type: ignore + def _getchar(echo: bool) -> str: + char = sys.stdin.read(1) + + if echo: + sys.stdout.write(char) + + sys.stdout.flush() + return char + + default_color = color + + def should_strip_ansi( + stream: t.IO[t.Any] | None = None, color: bool | None = None + ) -> bool: + if color is None: + return not default_color + return not color + + old_visible_prompt_func = termui.visible_prompt_func + old_hidden_prompt_func = termui.hidden_prompt_func + old__getchar_func = termui._getchar + old_should_strip_ansi = utils.should_strip_ansi # type: ignore + old__compat_should_strip_ansi = _compat.should_strip_ansi + termui.visible_prompt_func = visible_input + termui.hidden_prompt_func = hidden_input + termui._getchar = _getchar + utils.should_strip_ansi = should_strip_ansi # type: ignore + _compat.should_strip_ansi = should_strip_ansi + + old_env = {} + try: + for key, value in env.items(): + old_env[key] = os.environ.get(key) + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + yield (stream_mixer.stdout, stream_mixer.stderr, stream_mixer.output) + finally: + for key, value in old_env.items(): + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + sys.stdout = old_stdout + sys.stderr = old_stderr + sys.stdin = old_stdin + termui.visible_prompt_func = old_visible_prompt_func + termui.hidden_prompt_func = old_hidden_prompt_func + termui._getchar = old__getchar_func + utils.should_strip_ansi = old_should_strip_ansi # type: ignore + _compat.should_strip_ansi = old__compat_should_strip_ansi + formatting.FORCED_WIDTH = old_forced_width + + def invoke( + self, + cli: Command, + args: str | cabc.Sequence[str] | None = None, + input: str | bytes | t.IO[t.Any] | None = None, + env: cabc.Mapping[str, str | None] | None = None, + catch_exceptions: bool | None = None, + color: bool = False, + **extra: t.Any, + ) -> Result: + """Invokes a command in an isolated environment. The arguments are + forwarded directly to the command line script, the `extra` keyword + arguments are passed to the :meth:`~clickpkg.Command.main` function of + the command. + + This returns a :class:`Result` object. + + :param cli: the command to invoke + :param args: the arguments to invoke. It may be given as an iterable + or a string. When given as string it will be interpreted + as a Unix shell command. More details at + :func:`shlex.split`. + :param input: the input data for `sys.stdin`. + :param env: the environment overrides. + :param catch_exceptions: Whether to catch any other exceptions than + ``SystemExit``. If :data:`None`, the value + from :class:`CliRunner` is used. + :param extra: the keyword arguments to pass to :meth:`main`. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + + .. versionadded:: 8.2 + The result object has the ``output_bytes`` attribute with + the mix of ``stdout_bytes`` and ``stderr_bytes``, as the user would + see it in its terminal. + + .. versionchanged:: 8.2 + The result object always returns the ``stderr_bytes`` stream. + + .. versionchanged:: 8.0 + The result object has the ``return_value`` attribute with + the value returned from the invoked command. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + + .. versionchanged:: 3.0 + Added the ``catch_exceptions`` parameter. + + .. versionchanged:: 3.0 + The result object has the ``exc_info`` attribute with the + traceback if available. + """ + exc_info = None + if catch_exceptions is None: + catch_exceptions = self.catch_exceptions + + with self.isolation(input=input, env=env, color=color) as outstreams: + return_value = None + exception: BaseException | None = None + exit_code = 0 + + if isinstance(args, str): + args = shlex.split(args) + + try: + prog_name = extra.pop("prog_name") + except KeyError: + prog_name = self.get_default_prog_name(cli) + + try: + return_value = cli.main(args=args or (), prog_name=prog_name, **extra) + except SystemExit as e: + exc_info = sys.exc_info() + e_code = t.cast("int | t.Any | None", e.code) + + if e_code is None: + e_code = 0 + + if e_code != 0: + exception = e + + if not isinstance(e_code, int): + sys.stdout.write(str(e_code)) + sys.stdout.write("\n") + e_code = 1 + + exit_code = e_code + + except Exception as e: + if not catch_exceptions: + raise + exception = e + exit_code = 1 + exc_info = sys.exc_info() + finally: + sys.stdout.flush() + sys.stderr.flush() + stdout = outstreams[0].getvalue() + stderr = outstreams[1].getvalue() + output = outstreams[2].getvalue() + + return Result( + runner=self, + stdout_bytes=stdout, + stderr_bytes=stderr, + output_bytes=output, + return_value=return_value, + exit_code=exit_code, + exception=exception, + exc_info=exc_info, # type: ignore + ) + + @contextlib.contextmanager + def isolated_filesystem( + self, temp_dir: str | os.PathLike[str] | None = None + ) -> cabc.Iterator[str]: + """A context manager that creates a temporary directory and + changes the current working directory to it. This isolates tests + that affect the contents of the CWD to prevent them from + interfering with each other. + + :param temp_dir: Create the temporary directory under this + directory. If given, the created directory is not removed + when exiting. + + .. versionchanged:: 8.0 + Added the ``temp_dir`` parameter. + """ + cwd = os.getcwd() + dt = tempfile.mkdtemp(dir=temp_dir) + os.chdir(dt) + + try: + yield dt + finally: + os.chdir(cwd) + + if temp_dir is None: + import shutil + + try: + shutil.rmtree(dt) + except OSError: + pass diff --git a/.venv/lib/python3.12/site-packages/click/types.py b/.venv/lib/python3.12/site-packages/click/types.py new file mode 100644 index 0000000..e71c1c2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/types.py @@ -0,0 +1,1209 @@ +from __future__ import annotations + +import collections.abc as cabc +import enum +import os +import stat +import sys +import typing as t +from datetime import datetime +from gettext import gettext as _ +from gettext import ngettext + +from ._compat import _get_argv_encoding +from ._compat import open_stream +from .exceptions import BadParameter +from .utils import format_filename +from .utils import LazyFile +from .utils import safecall + +if t.TYPE_CHECKING: + import typing_extensions as te + + from .core import Context + from .core import Parameter + from .shell_completion import CompletionItem + +ParamTypeValue = t.TypeVar("ParamTypeValue") + + +class ParamType: + """Represents the type of a parameter. Validates and converts values + from the command line or Python into the correct type. + + To implement a custom type, subclass and implement at least the + following: + + - The :attr:`name` class attribute must be set. + - Calling an instance of the type with ``None`` must return + ``None``. This is already implemented by default. + - :meth:`convert` must convert string values to the correct type. + - :meth:`convert` must accept values that are already the correct + type. + - It must be able to convert a value if the ``ctx`` and ``param`` + arguments are ``None``. This can occur when converting prompt + input. + """ + + is_composite: t.ClassVar[bool] = False + arity: t.ClassVar[int] = 1 + + #: the descriptive name of this type + name: str + + #: if a list of this type is expected and the value is pulled from a + #: string environment variable, this is what splits it up. `None` + #: means any whitespace. For all parameters the general rule is that + #: whitespace splits them up. The exception are paths and files which + #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on + #: Windows). + envvar_list_splitter: t.ClassVar[str | None] = None + + def to_info_dict(self) -> dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + .. versionadded:: 8.0 + """ + # The class name without the "ParamType" suffix. + param_type = type(self).__name__.partition("ParamType")[0] + param_type = param_type.partition("ParameterType")[0] + + # Custom subclasses might not remember to set a name. + if hasattr(self, "name"): + name = self.name + else: + name = param_type + + return {"param_type": param_type, "name": name} + + def __call__( + self, + value: t.Any, + param: Parameter | None = None, + ctx: Context | None = None, + ) -> t.Any: + if value is not None: + return self.convert(value, param, ctx) + + def get_metavar(self, param: Parameter, ctx: Context) -> str | None: + """Returns the metavar default for this param if it provides one.""" + + def get_missing_message(self, param: Parameter, ctx: Context | None) -> str | None: + """Optionally might return extra information about a missing + parameter. + + .. versionadded:: 2.0 + """ + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> t.Any: + """Convert the value to the correct type. This is not called if + the value is ``None`` (the missing value). + + This must accept string values from the command line, as well as + values that are already the correct type. It may also convert + other compatible types. + + The ``param`` and ``ctx`` arguments may be ``None`` in certain + situations, such as when converting prompt input. + + If the value cannot be converted, call :meth:`fail` with a + descriptive message. + + :param value: The value to convert. + :param param: The parameter that is using this type to convert + its value. May be ``None``. + :param ctx: The current context that arrived at this value. May + be ``None``. + """ + return value + + def split_envvar_value(self, rv: str) -> cabc.Sequence[str]: + """Given a value from an environment variable this splits it up + into small chunks depending on the defined envvar list splitter. + + If the splitter is set to `None`, which means that whitespace splits, + then leading and trailing whitespace is ignored. Otherwise, leading + and trailing splitters usually lead to empty items being included. + """ + return (rv or "").split(self.envvar_list_splitter) + + def fail( + self, + message: str, + param: Parameter | None = None, + ctx: Context | None = None, + ) -> t.NoReturn: + """Helper method to fail with an invalid value message.""" + raise BadParameter(message, ctx=ctx, param=param) + + def shell_complete( + self, ctx: Context, param: Parameter, incomplete: str + ) -> list[CompletionItem]: + """Return a list of + :class:`~click.shell_completion.CompletionItem` objects for the + incomplete value. Most types do not provide completions, but + some do, and this allows custom types to provide custom + completions as well. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + return [] + + +class CompositeParamType(ParamType): + is_composite = True + + @property + def arity(self) -> int: # type: ignore + raise NotImplementedError() + + +class FuncParamType(ParamType): + def __init__(self, func: t.Callable[[t.Any], t.Any]) -> None: + self.name: str = func.__name__ + self.func = func + + def to_info_dict(self) -> dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["func"] = self.func + return info_dict + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> t.Any: + try: + return self.func(value) + except ValueError: + try: + value = str(value) + except UnicodeError: + value = value.decode("utf-8", "replace") + + self.fail(value, param, ctx) + + +class UnprocessedParamType(ParamType): + name = "text" + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> t.Any: + return value + + def __repr__(self) -> str: + return "UNPROCESSED" + + +class StringParamType(ParamType): + name = "text" + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> t.Any: + if isinstance(value, bytes): + enc = _get_argv_encoding() + try: + value = value.decode(enc) + except UnicodeError: + fs_enc = sys.getfilesystemencoding() + if fs_enc != enc: + try: + value = value.decode(fs_enc) + except UnicodeError: + value = value.decode("utf-8", "replace") + else: + value = value.decode("utf-8", "replace") + return value + return str(value) + + def __repr__(self) -> str: + return "STRING" + + +class Choice(ParamType, t.Generic[ParamTypeValue]): + """The choice type allows a value to be checked against a fixed set + of supported values. + + You may pass any iterable value which will be converted to a tuple + and thus will only be iterated once. + + The resulting value will always be one of the originally passed choices. + See :meth:`normalize_choice` for more info on the mapping of strings + to choices. See :ref:`choice-opts` for an example. + + :param case_sensitive: Set to false to make choices case + insensitive. Defaults to true. + + .. versionchanged:: 8.2.0 + Non-``str`` ``choices`` are now supported. It can additionally be any + iterable. Before you were not recommended to pass anything but a list or + tuple. + + .. versionadded:: 8.2.0 + Choice normalization can be overridden via :meth:`normalize_choice`. + """ + + name = "choice" + + def __init__( + self, choices: cabc.Iterable[ParamTypeValue], case_sensitive: bool = True + ) -> None: + self.choices: cabc.Sequence[ParamTypeValue] = tuple(choices) + self.case_sensitive = case_sensitive + + def to_info_dict(self) -> dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["choices"] = self.choices + info_dict["case_sensitive"] = self.case_sensitive + return info_dict + + def _normalized_mapping( + self, ctx: Context | None = None + ) -> cabc.Mapping[ParamTypeValue, str]: + """ + Returns mapping where keys are the original choices and the values are + the normalized values that are accepted via the command line. + + This is a simple wrapper around :meth:`normalize_choice`, use that + instead which is supported. + """ + return { + choice: self.normalize_choice( + choice=choice, + ctx=ctx, + ) + for choice in self.choices + } + + def normalize_choice(self, choice: ParamTypeValue, ctx: Context | None) -> str: + """ + Normalize a choice value, used to map a passed string to a choice. + Each choice must have a unique normalized value. + + By default uses :meth:`Context.token_normalize_func` and if not case + sensitive, convert it to a casefolded value. + + .. versionadded:: 8.2.0 + """ + normed_value = choice.name if isinstance(choice, enum.Enum) else str(choice) + + if ctx is not None and ctx.token_normalize_func is not None: + normed_value = ctx.token_normalize_func(normed_value) + + if not self.case_sensitive: + normed_value = normed_value.casefold() + + return normed_value + + def get_metavar(self, param: Parameter, ctx: Context) -> str | None: + if param.param_type_name == "option" and not param.show_choices: # type: ignore + choice_metavars = [ + convert_type(type(choice)).name.upper() for choice in self.choices + ] + choices_str = "|".join([*dict.fromkeys(choice_metavars)]) + else: + choices_str = "|".join( + [str(i) for i in self._normalized_mapping(ctx=ctx).values()] + ) + + # Use curly braces to indicate a required argument. + if param.required and param.param_type_name == "argument": + return f"{{{choices_str}}}" + + # Use square braces to indicate an option or optional argument. + return f"[{choices_str}]" + + def get_missing_message(self, param: Parameter, ctx: Context | None) -> str: + """ + Message shown when no choice is passed. + + .. versionchanged:: 8.2.0 Added ``ctx`` argument. + """ + return _("Choose from:\n\t{choices}").format( + choices=",\n\t".join(self._normalized_mapping(ctx=ctx).values()) + ) + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> ParamTypeValue: + """ + For a given value from the parser, normalize it and find its + matching normalized value in the list of choices. Then return the + matched "original" choice. + """ + normed_value = self.normalize_choice(choice=value, ctx=ctx) + normalized_mapping = self._normalized_mapping(ctx=ctx) + + try: + return next( + original + for original, normalized in normalized_mapping.items() + if normalized == normed_value + ) + except StopIteration: + self.fail( + self.get_invalid_choice_message(value=value, ctx=ctx), + param=param, + ctx=ctx, + ) + + def get_invalid_choice_message(self, value: t.Any, ctx: Context | None) -> str: + """Get the error message when the given choice is invalid. + + :param value: The invalid value. + + .. versionadded:: 8.2 + """ + choices_str = ", ".join(map(repr, self._normalized_mapping(ctx=ctx).values())) + return ngettext( + "{value!r} is not {choice}.", + "{value!r} is not one of {choices}.", + len(self.choices), + ).format(value=value, choice=choices_str, choices=choices_str) + + def __repr__(self) -> str: + return f"Choice({list(self.choices)})" + + def shell_complete( + self, ctx: Context, param: Parameter, incomplete: str + ) -> list[CompletionItem]: + """Complete choices that start with the incomplete value. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + str_choices = map(str, self.choices) + + if self.case_sensitive: + matched = (c for c in str_choices if c.startswith(incomplete)) + else: + incomplete = incomplete.lower() + matched = (c for c in str_choices if c.lower().startswith(incomplete)) + + return [CompletionItem(c) for c in matched] + + +class DateTime(ParamType): + """The DateTime type converts date strings into `datetime` objects. + + The format strings which are checked are configurable, but default to some + common (non-timezone aware) ISO 8601 formats. + + When specifying *DateTime* formats, you should only pass a list or a tuple. + Other iterables, like generators, may lead to surprising results. + + The format strings are processed using ``datetime.strptime``, and this + consequently defines the format strings which are allowed. + + Parsing is tried using each format, in order, and the first format which + parses successfully is used. + + :param formats: A list or tuple of date format strings, in the order in + which they should be tried. Defaults to + ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, + ``'%Y-%m-%d %H:%M:%S'``. + """ + + name = "datetime" + + def __init__(self, formats: cabc.Sequence[str] | None = None): + self.formats: cabc.Sequence[str] = formats or [ + "%Y-%m-%d", + "%Y-%m-%dT%H:%M:%S", + "%Y-%m-%d %H:%M:%S", + ] + + def to_info_dict(self) -> dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["formats"] = self.formats + return info_dict + + def get_metavar(self, param: Parameter, ctx: Context) -> str | None: + return f"[{'|'.join(self.formats)}]" + + def _try_to_convert_date(self, value: t.Any, format: str) -> datetime | None: + try: + return datetime.strptime(value, format) + except ValueError: + return None + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> t.Any: + if isinstance(value, datetime): + return value + + for format in self.formats: + converted = self._try_to_convert_date(value, format) + + if converted is not None: + return converted + + formats_str = ", ".join(map(repr, self.formats)) + self.fail( + ngettext( + "{value!r} does not match the format {format}.", + "{value!r} does not match the formats {formats}.", + len(self.formats), + ).format(value=value, format=formats_str, formats=formats_str), + param, + ctx, + ) + + def __repr__(self) -> str: + return "DateTime" + + +class _NumberParamTypeBase(ParamType): + _number_class: t.ClassVar[type[t.Any]] + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> t.Any: + try: + return self._number_class(value) + except ValueError: + self.fail( + _("{value!r} is not a valid {number_type}.").format( + value=value, number_type=self.name + ), + param, + ctx, + ) + + +class _NumberRangeBase(_NumberParamTypeBase): + def __init__( + self, + min: float | None = None, + max: float | None = None, + min_open: bool = False, + max_open: bool = False, + clamp: bool = False, + ) -> None: + self.min = min + self.max = max + self.min_open = min_open + self.max_open = max_open + self.clamp = clamp + + def to_info_dict(self) -> dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + min=self.min, + max=self.max, + min_open=self.min_open, + max_open=self.max_open, + clamp=self.clamp, + ) + return info_dict + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> t.Any: + import operator + + rv = super().convert(value, param, ctx) + lt_min: bool = self.min is not None and ( + operator.le if self.min_open else operator.lt + )(rv, self.min) + gt_max: bool = self.max is not None and ( + operator.ge if self.max_open else operator.gt + )(rv, self.max) + + if self.clamp: + if lt_min: + return self._clamp(self.min, 1, self.min_open) # type: ignore + + if gt_max: + return self._clamp(self.max, -1, self.max_open) # type: ignore + + if lt_min or gt_max: + self.fail( + _("{value} is not in the range {range}.").format( + value=rv, range=self._describe_range() + ), + param, + ctx, + ) + + return rv + + def _clamp(self, bound: float, dir: t.Literal[1, -1], open: bool) -> float: + """Find the valid value to clamp to bound in the given + direction. + + :param bound: The boundary value. + :param dir: 1 or -1 indicating the direction to move. + :param open: If true, the range does not include the bound. + """ + raise NotImplementedError + + def _describe_range(self) -> str: + """Describe the range for use in help text.""" + if self.min is None: + op = "<" if self.max_open else "<=" + return f"x{op}{self.max}" + + if self.max is None: + op = ">" if self.min_open else ">=" + return f"x{op}{self.min}" + + lop = "<" if self.min_open else "<=" + rop = "<" if self.max_open else "<=" + return f"{self.min}{lop}x{rop}{self.max}" + + def __repr__(self) -> str: + clamp = " clamped" if self.clamp else "" + return f"<{type(self).__name__} {self._describe_range()}{clamp}>" + + +class IntParamType(_NumberParamTypeBase): + name = "integer" + _number_class = int + + def __repr__(self) -> str: + return "INT" + + +class IntRange(_NumberRangeBase, IntParamType): + """Restrict an :data:`click.INT` value to a range of accepted + values. See :ref:`ranges`. + + If ``min`` or ``max`` are not passed, any value is accepted in that + direction. If ``min_open`` or ``max_open`` are enabled, the + corresponding boundary is not included in the range. + + If ``clamp`` is enabled, a value outside the range is clamped to the + boundary instead of failing. + + .. versionchanged:: 8.0 + Added the ``min_open`` and ``max_open`` parameters. + """ + + name = "integer range" + + def _clamp( # type: ignore + self, bound: int, dir: t.Literal[1, -1], open: bool + ) -> int: + if not open: + return bound + + return bound + dir + + +class FloatParamType(_NumberParamTypeBase): + name = "float" + _number_class = float + + def __repr__(self) -> str: + return "FLOAT" + + +class FloatRange(_NumberRangeBase, FloatParamType): + """Restrict a :data:`click.FLOAT` value to a range of accepted + values. See :ref:`ranges`. + + If ``min`` or ``max`` are not passed, any value is accepted in that + direction. If ``min_open`` or ``max_open`` are enabled, the + corresponding boundary is not included in the range. + + If ``clamp`` is enabled, a value outside the range is clamped to the + boundary instead of failing. This is not supported if either + boundary is marked ``open``. + + .. versionchanged:: 8.0 + Added the ``min_open`` and ``max_open`` parameters. + """ + + name = "float range" + + def __init__( + self, + min: float | None = None, + max: float | None = None, + min_open: bool = False, + max_open: bool = False, + clamp: bool = False, + ) -> None: + super().__init__( + min=min, max=max, min_open=min_open, max_open=max_open, clamp=clamp + ) + + if (min_open or max_open) and clamp: + raise TypeError("Clamping is not supported for open bounds.") + + def _clamp(self, bound: float, dir: t.Literal[1, -1], open: bool) -> float: + if not open: + return bound + + # Could use math.nextafter here, but clamping an + # open float range doesn't seem to be particularly useful. It's + # left up to the user to write a callback to do it if needed. + raise RuntimeError("Clamping is not supported for open bounds.") + + +class BoolParamType(ParamType): + name = "boolean" + + bool_states: dict[str, bool] = { + "1": True, + "0": False, + "yes": True, + "no": False, + "true": True, + "false": False, + "on": True, + "off": False, + "t": True, + "f": False, + "y": True, + "n": False, + # Absence of value is considered False. + "": False, + } + """A mapping of string values to boolean states. + + Mapping is inspired by :py:attr:`configparser.ConfigParser.BOOLEAN_STATES` + and extends it. + + .. caution:: + String values are lower-cased, as the ``str_to_bool`` comparison function + below is case-insensitive. + + .. warning:: + The mapping is not exhaustive, and does not cover all possible boolean strings + representations. It will remains as it is to avoid endless bikeshedding. + + Future work my be considered to make this mapping user-configurable from public + API. + """ + + @staticmethod + def str_to_bool(value: str | bool) -> bool | None: + """Convert a string to a boolean value. + + If the value is already a boolean, it is returned as-is. If the value is a + string, it is stripped of whitespaces and lower-cased, then checked against + the known boolean states pre-defined in the `BoolParamType.bool_states` mapping + above. + + Returns `None` if the value does not match any known boolean state. + """ + if isinstance(value, bool): + return value + return BoolParamType.bool_states.get(value.strip().lower()) + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> bool: + normalized = self.str_to_bool(value) + if normalized is None: + self.fail( + _( + "{value!r} is not a valid boolean. Recognized values: {states}" + ).format(value=value, states=", ".join(sorted(self.bool_states))), + param, + ctx, + ) + return normalized + + def __repr__(self) -> str: + return "BOOL" + + +class UUIDParameterType(ParamType): + name = "uuid" + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> t.Any: + import uuid + + if isinstance(value, uuid.UUID): + return value + + value = value.strip() + + try: + return uuid.UUID(value) + except ValueError: + self.fail( + _("{value!r} is not a valid UUID.").format(value=value), param, ctx + ) + + def __repr__(self) -> str: + return "UUID" + + +class File(ParamType): + """Declares a parameter to be a file for reading or writing. The file + is automatically closed once the context tears down (after the command + finished working). + + Files can be opened for reading or writing. The special value ``-`` + indicates stdin or stdout depending on the mode. + + By default, the file is opened for reading text data, but it can also be + opened in binary mode or for writing. The encoding parameter can be used + to force a specific encoding. + + The `lazy` flag controls if the file should be opened immediately or upon + first IO. The default is to be non-lazy for standard input and output + streams as well as files opened for reading, `lazy` otherwise. When opening a + file lazily for reading, it is still opened temporarily for validation, but + will not be held open until first IO. lazy is mainly useful when opening + for writing to avoid creating the file until it is needed. + + Files can also be opened atomically in which case all writes go into a + separate file in the same folder and upon completion the file will + be moved over to the original location. This is useful if a file + regularly read by other users is modified. + + See :ref:`file-args` for more information. + + .. versionchanged:: 2.0 + Added the ``atomic`` parameter. + """ + + name = "filename" + envvar_list_splitter: t.ClassVar[str] = os.path.pathsep + + def __init__( + self, + mode: str = "r", + encoding: str | None = None, + errors: str | None = "strict", + lazy: bool | None = None, + atomic: bool = False, + ) -> None: + self.mode = mode + self.encoding = encoding + self.errors = errors + self.lazy = lazy + self.atomic = atomic + + def to_info_dict(self) -> dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update(mode=self.mode, encoding=self.encoding) + return info_dict + + def resolve_lazy_flag(self, value: str | os.PathLike[str]) -> bool: + if self.lazy is not None: + return self.lazy + if os.fspath(value) == "-": + return False + elif "w" in self.mode: + return True + return False + + def convert( + self, + value: str | os.PathLike[str] | t.IO[t.Any], + param: Parameter | None, + ctx: Context | None, + ) -> t.IO[t.Any]: + if _is_file_like(value): + return value + + value = t.cast("str | os.PathLike[str]", value) + + try: + lazy = self.resolve_lazy_flag(value) + + if lazy: + lf = LazyFile( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + + if ctx is not None: + ctx.call_on_close(lf.close_intelligently) + + return t.cast("t.IO[t.Any]", lf) + + f, should_close = open_stream( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + + # If a context is provided, we automatically close the file + # at the end of the context execution (or flush out). If a + # context does not exist, it's the caller's responsibility to + # properly close the file. This for instance happens when the + # type is used with prompts. + if ctx is not None: + if should_close: + ctx.call_on_close(safecall(f.close)) + else: + ctx.call_on_close(safecall(f.flush)) + + return f + except OSError as e: + self.fail(f"'{format_filename(value)}': {e.strerror}", param, ctx) + + def shell_complete( + self, ctx: Context, param: Parameter, incomplete: str + ) -> list[CompletionItem]: + """Return a special completion marker that tells the completion + system to use the shell to provide file path completions. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + return [CompletionItem(incomplete, type="file")] + + +def _is_file_like(value: t.Any) -> te.TypeGuard[t.IO[t.Any]]: + return hasattr(value, "read") or hasattr(value, "write") + + +class Path(ParamType): + """The ``Path`` type is similar to the :class:`File` type, but + returns the filename instead of an open file. Various checks can be + enabled to validate the type of file and permissions. + + :param exists: The file or directory needs to exist for the value to + be valid. If this is not set to ``True``, and the file does not + exist, then all further checks are silently skipped. + :param file_okay: Allow a file as a value. + :param dir_okay: Allow a directory as a value. + :param readable: if true, a readable check is performed. + :param writable: if true, a writable check is performed. + :param executable: if true, an executable check is performed. + :param resolve_path: Make the value absolute and resolve any + symlinks. A ``~`` is not expanded, as this is supposed to be + done by the shell only. + :param allow_dash: Allow a single dash as a value, which indicates + a standard stream (but does not open it). Use + :func:`~click.open_file` to handle opening this value. + :param path_type: Convert the incoming path value to this type. If + ``None``, keep Python's default, which is ``str``. Useful to + convert to :class:`pathlib.Path`. + + .. versionchanged:: 8.1 + Added the ``executable`` parameter. + + .. versionchanged:: 8.0 + Allow passing ``path_type=pathlib.Path``. + + .. versionchanged:: 6.0 + Added the ``allow_dash`` parameter. + """ + + envvar_list_splitter: t.ClassVar[str] = os.path.pathsep + + def __init__( + self, + exists: bool = False, + file_okay: bool = True, + dir_okay: bool = True, + writable: bool = False, + readable: bool = True, + resolve_path: bool = False, + allow_dash: bool = False, + path_type: type[t.Any] | None = None, + executable: bool = False, + ): + self.exists = exists + self.file_okay = file_okay + self.dir_okay = dir_okay + self.readable = readable + self.writable = writable + self.executable = executable + self.resolve_path = resolve_path + self.allow_dash = allow_dash + self.type = path_type + + if self.file_okay and not self.dir_okay: + self.name: str = _("file") + elif self.dir_okay and not self.file_okay: + self.name = _("directory") + else: + self.name = _("path") + + def to_info_dict(self) -> dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + exists=self.exists, + file_okay=self.file_okay, + dir_okay=self.dir_okay, + writable=self.writable, + readable=self.readable, + allow_dash=self.allow_dash, + ) + return info_dict + + def coerce_path_result( + self, value: str | os.PathLike[str] + ) -> str | bytes | os.PathLike[str]: + if self.type is not None and not isinstance(value, self.type): + if self.type is str: + return os.fsdecode(value) + elif self.type is bytes: + return os.fsencode(value) + else: + return t.cast("os.PathLike[str]", self.type(value)) + + return value + + def convert( + self, + value: str | os.PathLike[str], + param: Parameter | None, + ctx: Context | None, + ) -> str | bytes | os.PathLike[str]: + rv = value + + is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-") + + if not is_dash: + if self.resolve_path: + rv = os.path.realpath(rv) + + try: + st = os.stat(rv) + except OSError: + if not self.exists: + return self.coerce_path_result(rv) + self.fail( + _("{name} {filename!r} does not exist.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if not self.file_okay and stat.S_ISREG(st.st_mode): + self.fail( + _("{name} {filename!r} is a file.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + if not self.dir_okay and stat.S_ISDIR(st.st_mode): + self.fail( + _("{name} {filename!r} is a directory.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if self.readable and not os.access(rv, os.R_OK): + self.fail( + _("{name} {filename!r} is not readable.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if self.writable and not os.access(rv, os.W_OK): + self.fail( + _("{name} {filename!r} is not writable.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if self.executable and not os.access(value, os.X_OK): + self.fail( + _("{name} {filename!r} is not executable.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + return self.coerce_path_result(rv) + + def shell_complete( + self, ctx: Context, param: Parameter, incomplete: str + ) -> list[CompletionItem]: + """Return a special completion marker that tells the completion + system to use the shell to provide path completions for only + directories or any paths. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + type = "dir" if self.dir_okay and not self.file_okay else "file" + return [CompletionItem(incomplete, type=type)] + + +class Tuple(CompositeParamType): + """The default behavior of Click is to apply a type on a value directly. + This works well in most cases, except for when `nargs` is set to a fixed + count and different types should be used for different items. In this + case the :class:`Tuple` type can be used. This type can only be used + if `nargs` is set to a fixed number. + + For more information see :ref:`tuple-type`. + + This can be selected by using a Python tuple literal as a type. + + :param types: a list of types that should be used for the tuple items. + """ + + def __init__(self, types: cabc.Sequence[type[t.Any] | ParamType]) -> None: + self.types: cabc.Sequence[ParamType] = [convert_type(ty) for ty in types] + + def to_info_dict(self) -> dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["types"] = [t.to_info_dict() for t in self.types] + return info_dict + + @property + def name(self) -> str: # type: ignore + return f"<{' '.join(ty.name for ty in self.types)}>" + + @property + def arity(self) -> int: # type: ignore + return len(self.types) + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> t.Any: + len_type = len(self.types) + len_value = len(value) + + if len_value != len_type: + self.fail( + ngettext( + "{len_type} values are required, but {len_value} was given.", + "{len_type} values are required, but {len_value} were given.", + len_value, + ).format(len_type=len_type, len_value=len_value), + param=param, + ctx=ctx, + ) + + return tuple( + ty(x, param, ctx) for ty, x in zip(self.types, value, strict=False) + ) + + +def convert_type(ty: t.Any | None, default: t.Any | None = None) -> ParamType: + """Find the most appropriate :class:`ParamType` for the given Python + type. If the type isn't provided, it can be inferred from a default + value. + """ + guessed_type = False + + if ty is None and default is not None: + if isinstance(default, (tuple, list)): + # If the default is empty, ty will remain None and will + # return STRING. + if default: + item = default[0] + + # A tuple of tuples needs to detect the inner types. + # Can't call convert recursively because that would + # incorrectly unwind the tuple to a single type. + if isinstance(item, (tuple, list)): + ty = tuple(map(type, item)) + else: + ty = type(item) + else: + ty = type(default) + + guessed_type = True + + if isinstance(ty, tuple): + return Tuple(ty) + + if isinstance(ty, ParamType): + return ty + + if ty is str or ty is None: + return STRING + + if ty is int: + return INT + + if ty is float: + return FLOAT + + if ty is bool: + return BOOL + + if guessed_type: + return STRING + + if __debug__: + try: + if issubclass(ty, ParamType): + raise AssertionError( + f"Attempted to use an uninstantiated parameter type ({ty})." + ) + except TypeError: + # ty is an instance (correct), so issubclass fails. + pass + + return FuncParamType(ty) + + +#: A dummy parameter type that just does nothing. From a user's +#: perspective this appears to just be the same as `STRING` but +#: internally no string conversion takes place if the input was bytes. +#: This is usually useful when working with file paths as they can +#: appear in bytes and unicode. +#: +#: For path related uses the :class:`Path` type is a better choice but +#: there are situations where an unprocessed type is useful which is why +#: it is is provided. +#: +#: .. versionadded:: 4.0 +UNPROCESSED = UnprocessedParamType() + +#: A unicode string parameter type which is the implicit default. This +#: can also be selected by using ``str`` as type. +STRING = StringParamType() + +#: An integer parameter. This can also be selected by using ``int`` as +#: type. +INT = IntParamType() + +#: A floating point value parameter. This can also be selected by using +#: ``float`` as type. +FLOAT = FloatParamType() + +#: A boolean parameter. This is the default for boolean flags. This can +#: also be selected by using ``bool`` as a type. +BOOL = BoolParamType() + +#: A UUID parameter. +UUID = UUIDParameterType() + + +class OptionHelpExtra(t.TypedDict, total=False): + envvars: tuple[str, ...] + default: str + range: str + required: str diff --git a/.venv/lib/python3.12/site-packages/click/utils.py b/.venv/lib/python3.12/site-packages/click/utils.py new file mode 100644 index 0000000..beae26f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/click/utils.py @@ -0,0 +1,627 @@ +from __future__ import annotations + +import collections.abc as cabc +import os +import re +import sys +import typing as t +from functools import update_wrapper +from types import ModuleType +from types import TracebackType + +from ._compat import _default_text_stderr +from ._compat import _default_text_stdout +from ._compat import _find_binary_writer +from ._compat import auto_wrap_for_ansi +from ._compat import binary_streams +from ._compat import open_stream +from ._compat import should_strip_ansi +from ._compat import strip_ansi +from ._compat import text_streams +from ._compat import WIN +from .globals import resolve_color_default + +if t.TYPE_CHECKING: + import typing_extensions as te + + P = te.ParamSpec("P") + +R = t.TypeVar("R") + + +def _posixify(name: str) -> str: + return "-".join(name.split()).lower() + + +def safecall(func: t.Callable[P, R]) -> t.Callable[P, R | None]: + """Wraps a function so that it swallows exceptions.""" + + def wrapper(*args: P.args, **kwargs: P.kwargs) -> R | None: + try: + return func(*args, **kwargs) + except Exception: + pass + return None + + return update_wrapper(wrapper, func) + + +def make_str(value: t.Any) -> str: + """Converts a value into a valid string.""" + if isinstance(value, bytes): + try: + return value.decode(sys.getfilesystemencoding()) + except UnicodeError: + return value.decode("utf-8", "replace") + return str(value) + + +def make_default_short_help(help: str, max_length: int = 45) -> str: + """Returns a condensed version of help string.""" + # Consider only the first paragraph. + paragraph_end = help.find("\n\n") + + if paragraph_end != -1: + help = help[:paragraph_end] + + # Collapse newlines, tabs, and spaces. + words = help.split() + + if not words: + return "" + + # The first paragraph started with a "no rewrap" marker, ignore it. + if words[0] == "\b": + words = words[1:] + + total_length = 0 + last_index = len(words) - 1 + + for i, word in enumerate(words): + total_length += len(word) + (i > 0) + + if total_length > max_length: # too long, truncate + break + + if word[-1] == ".": # sentence end, truncate without "..." + return " ".join(words[: i + 1]) + + if total_length == max_length and i != last_index: + break # not at sentence end, truncate with "..." + else: + return " ".join(words) # no truncation needed + + # Account for the length of the suffix. + total_length += len("...") + + # remove words until the length is short enough + while i > 0: + total_length -= len(words[i]) + (i > 0) + + if total_length <= max_length: + break + + i -= 1 + + return " ".join(words[:i]) + "..." + + +class LazyFile: + """A lazy file works like a regular file but it does not fully open + the file but it does perform some basic checks early to see if the + filename parameter does make sense. This is useful for safely opening + files for writing. + """ + + def __init__( + self, + filename: str | os.PathLike[str], + mode: str = "r", + encoding: str | None = None, + errors: str | None = "strict", + atomic: bool = False, + ): + self.name: str = os.fspath(filename) + self.mode = mode + self.encoding = encoding + self.errors = errors + self.atomic = atomic + self._f: t.IO[t.Any] | None + self.should_close: bool + + if self.name == "-": + self._f, self.should_close = open_stream(filename, mode, encoding, errors) + else: + if "r" in mode: + # Open and close the file in case we're opening it for + # reading so that we can catch at least some errors in + # some cases early. + open(filename, mode).close() + self._f = None + self.should_close = True + + def __getattr__(self, name: str) -> t.Any: + return getattr(self.open(), name) + + def __repr__(self) -> str: + if self._f is not None: + return repr(self._f) + return f"" + + def open(self) -> t.IO[t.Any]: + """Opens the file if it's not yet open. This call might fail with + a :exc:`FileError`. Not handling this error will produce an error + that Click shows. + """ + if self._f is not None: + return self._f + try: + rv, self.should_close = open_stream( + self.name, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + except OSError as e: + from .exceptions import FileError + + raise FileError(self.name, hint=e.strerror) from e + self._f = rv + return rv + + def close(self) -> None: + """Closes the underlying file, no matter what.""" + if self._f is not None: + self._f.close() + + def close_intelligently(self) -> None: + """This function only closes the file if it was opened by the lazy + file wrapper. For instance this will never close stdin. + """ + if self.should_close: + self.close() + + def __enter__(self) -> LazyFile: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + tb: TracebackType | None, + ) -> None: + self.close_intelligently() + + def __iter__(self) -> cabc.Iterator[t.AnyStr]: + self.open() + return iter(self._f) # type: ignore + + +class KeepOpenFile: + def __init__(self, file: t.IO[t.Any]) -> None: + self._file: t.IO[t.Any] = file + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._file, name) + + def __enter__(self) -> KeepOpenFile: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + tb: TracebackType | None, + ) -> None: + pass + + def __repr__(self) -> str: + return repr(self._file) + + def __iter__(self) -> cabc.Iterator[t.AnyStr]: + return iter(self._file) + + +def echo( + message: t.Any | None = None, + file: t.IO[t.Any] | None = None, + nl: bool = True, + err: bool = False, + color: bool | None = None, +) -> None: + """Print a message and newline to stdout or a file. This should be + used instead of :func:`print` because it provides better support + for different data, files, and environments. + + Compared to :func:`print`, this does the following: + + - Ensures that the output encoding is not misconfigured on Linux. + - Supports Unicode in the Windows console. + - Supports writing to binary outputs, and supports writing bytes + to text outputs. + - Supports colors and styles on Windows. + - Removes ANSI color and style codes if the output does not look + like an interactive terminal. + - Always flushes the output. + + :param message: The string or bytes to output. Other objects are + converted to strings. + :param file: The file to write to. Defaults to ``stdout``. + :param err: Write to ``stderr`` instead of ``stdout``. + :param nl: Print a newline after the message. Enabled by default. + :param color: Force showing or hiding colors and other styles. By + default Click will remove color if the output does not look like + an interactive terminal. + + .. versionchanged:: 6.0 + Support Unicode output on the Windows console. Click does not + modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()`` + will still not support Unicode. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + + .. versionadded:: 3.0 + Added the ``err`` parameter. + + .. versionchanged:: 2.0 + Support colors on Windows if colorama is installed. + """ + if file is None: + if err: + file = _default_text_stderr() + else: + file = _default_text_stdout() + + # There are no standard streams attached to write to. For example, + # pythonw on Windows. + if file is None: + return + + # Convert non bytes/text into the native string type. + if message is not None and not isinstance(message, (str, bytes, bytearray)): + out: str | bytes | bytearray | None = str(message) + else: + out = message + + if nl: + out = out or "" + if isinstance(out, str): + out += "\n" + else: + out += b"\n" + + if not out: + file.flush() + return + + # If there is a message and the value looks like bytes, we manually + # need to find the binary stream and write the message in there. + # This is done separately so that most stream types will work as you + # would expect. Eg: you can write to StringIO for other cases. + if isinstance(out, (bytes, bytearray)): + binary_file = _find_binary_writer(file) + + if binary_file is not None: + file.flush() + binary_file.write(out) + binary_file.flush() + return + + # ANSI style code support. For no message or bytes, nothing happens. + # When outputting to a file instead of a terminal, strip codes. + else: + color = resolve_color_default(color) + + if should_strip_ansi(file, color): + out = strip_ansi(out) + elif WIN: + if auto_wrap_for_ansi is not None: + file = auto_wrap_for_ansi(file, color) # type: ignore + elif not color: + out = strip_ansi(out) + + file.write(out) # type: ignore + file.flush() + + +def get_binary_stream(name: t.Literal["stdin", "stdout", "stderr"]) -> t.BinaryIO: + """Returns a system stream for byte processing. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + """ + opener = binary_streams.get(name) + if opener is None: + raise TypeError(f"Unknown standard stream '{name}'") + return opener() + + +def get_text_stream( + name: t.Literal["stdin", "stdout", "stderr"], + encoding: str | None = None, + errors: str | None = "strict", +) -> t.TextIO: + """Returns a system stream for text processing. This usually returns + a wrapped stream around a binary stream returned from + :func:`get_binary_stream` but it also can take shortcuts for already + correctly configured streams. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + :param encoding: overrides the detected default encoding. + :param errors: overrides the default error mode. + """ + opener = text_streams.get(name) + if opener is None: + raise TypeError(f"Unknown standard stream '{name}'") + return opener(encoding, errors) + + +def open_file( + filename: str | os.PathLike[str], + mode: str = "r", + encoding: str | None = None, + errors: str | None = "strict", + lazy: bool = False, + atomic: bool = False, +) -> t.IO[t.Any]: + """Open a file, with extra behavior to handle ``'-'`` to indicate + a standard stream, lazy open on write, and atomic write. Similar to + the behavior of the :class:`~click.File` param type. + + If ``'-'`` is given to open ``stdout`` or ``stdin``, the stream is + wrapped so that using it in a context manager will not close it. + This makes it possible to use the function without accidentally + closing a standard stream: + + .. code-block:: python + + with open_file(filename) as f: + ... + + :param filename: The name or Path of the file to open, or ``'-'`` for + ``stdin``/``stdout``. + :param mode: The mode in which to open the file. + :param encoding: The encoding to decode or encode a file opened in + text mode. + :param errors: The error handling mode. + :param lazy: Wait to open the file until it is accessed. For read + mode, the file is temporarily opened to raise access errors + early, then closed until it is read again. + :param atomic: Write to a temporary file and replace the given file + on close. + + .. versionadded:: 3.0 + """ + if lazy: + return t.cast( + "t.IO[t.Any]", LazyFile(filename, mode, encoding, errors, atomic=atomic) + ) + + f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic) + + if not should_close: + f = t.cast("t.IO[t.Any]", KeepOpenFile(f)) + + return f + + +def format_filename( + filename: str | bytes | os.PathLike[str] | os.PathLike[bytes], + shorten: bool = False, +) -> str: + """Format a filename as a string for display. Ensures the filename can be + displayed by replacing any invalid bytes or surrogate escapes in the name + with the replacement character ``�``. + + Invalid bytes or surrogate escapes will raise an error when written to a + stream with ``errors="strict"``. This will typically happen with ``stdout`` + when the locale is something like ``en_GB.UTF-8``. + + Many scenarios *are* safe to write surrogates though, due to PEP 538 and + PEP 540, including: + + - Writing to ``stderr``, which uses ``errors="backslashreplace"``. + - The system has ``LANG=C.UTF-8``, ``C``, or ``POSIX``. Python opens + stdout and stderr with ``errors="surrogateescape"``. + - None of ``LANG/LC_*`` are set. Python assumes ``LANG=C.UTF-8``. + - Python is started in UTF-8 mode with ``PYTHONUTF8=1`` or ``-X utf8``. + Python opens stdout and stderr with ``errors="surrogateescape"``. + + :param filename: formats a filename for UI display. This will also convert + the filename into unicode without failing. + :param shorten: this optionally shortens the filename to strip of the + path that leads up to it. + """ + if shorten: + filename = os.path.basename(filename) + else: + filename = os.fspath(filename) + + if isinstance(filename, bytes): + filename = filename.decode(sys.getfilesystemencoding(), "replace") + else: + filename = filename.encode("utf-8", "surrogateescape").decode( + "utf-8", "replace" + ) + + return filename + + +def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str: + r"""Returns the config folder for the application. The default behavior + is to return whatever is most appropriate for the operating system. + + To give you an idea, for an app called ``"Foo Bar"``, something like + the following folders could be returned: + + Mac OS X: + ``~/Library/Application Support/Foo Bar`` + Mac OS X (POSIX): + ``~/.foo-bar`` + Unix: + ``~/.config/foo-bar`` + Unix (POSIX): + ``~/.foo-bar`` + Windows (roaming): + ``C:\Users\\AppData\Roaming\Foo Bar`` + Windows (not roaming): + ``C:\Users\\AppData\Local\Foo Bar`` + + .. versionadded:: 2.0 + + :param app_name: the application name. This should be properly capitalized + and can contain whitespace. + :param roaming: controls if the folder should be roaming or not on Windows. + Has no effect otherwise. + :param force_posix: if this is set to `True` then on any POSIX system the + folder will be stored in the home folder with a leading + dot instead of the XDG config home or darwin's + application support folder. + """ + if WIN: + key = "APPDATA" if roaming else "LOCALAPPDATA" + folder = os.environ.get(key) + if folder is None: + folder = os.path.expanduser("~") + return os.path.join(folder, app_name) + if force_posix: + return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}")) + if sys.platform == "darwin": + return os.path.join( + os.path.expanduser("~/Library/Application Support"), app_name + ) + return os.path.join( + os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), + _posixify(app_name), + ) + + +class PacifyFlushWrapper: + """This wrapper is used to catch and suppress BrokenPipeErrors resulting + from ``.flush()`` being called on broken pipe during the shutdown/final-GC + of the Python interpreter. Notably ``.flush()`` is always called on + ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any + other cleanup code, and the case where the underlying file is not a broken + pipe, all calls and attributes are proxied. + """ + + def __init__(self, wrapped: t.IO[t.Any]) -> None: + self.wrapped = wrapped + + def flush(self) -> None: + try: + self.wrapped.flush() + except OSError as e: + import errno + + if e.errno != errno.EPIPE: + raise + + def __getattr__(self, attr: str) -> t.Any: + return getattr(self.wrapped, attr) + + +def _detect_program_name( + path: str | None = None, _main: ModuleType | None = None +) -> str: + """Determine the command used to run the program, for use in help + text. If a file or entry point was executed, the file name is + returned. If ``python -m`` was used to execute a module or package, + ``python -m name`` is returned. + + This doesn't try to be too precise, the goal is to give a concise + name for help text. Files are only shown as their name without the + path. ``python`` is only shown for modules, and the full path to + ``sys.executable`` is not shown. + + :param path: The Python file being executed. Python puts this in + ``sys.argv[0]``, which is used by default. + :param _main: The ``__main__`` module. This should only be passed + during internal testing. + + .. versionadded:: 8.0 + Based on command args detection in the Werkzeug reloader. + + :meta private: + """ + if _main is None: + _main = sys.modules["__main__"] + + if not path: + path = sys.argv[0] + + # The value of __package__ indicates how Python was called. It may + # not exist if a setuptools script is installed as an egg. It may be + # set incorrectly for entry points created with pip on Windows. + # It is set to "" inside a Shiv or PEX zipapp. + if getattr(_main, "__package__", None) in {None, ""} or ( + os.name == "nt" + and _main.__package__ == "" + and not os.path.exists(path) + and os.path.exists(f"{path}.exe") + ): + # Executed a file, like "python app.py". + return os.path.basename(path) + + # Executed a module, like "python -m example". + # Rewritten by Python from "-m script" to "/path/to/script.py". + # Need to look at main module to determine how it was executed. + py_module = t.cast(str, _main.__package__) + name = os.path.splitext(os.path.basename(path))[0] + + # A submodule like "example.cli". + if name != "__main__": + py_module = f"{py_module}.{name}" + + return f"python -m {py_module.lstrip('.')}" + + +def _expand_args( + args: cabc.Iterable[str], + *, + user: bool = True, + env: bool = True, + glob_recursive: bool = True, +) -> list[str]: + """Simulate Unix shell expansion with Python functions. + + See :func:`glob.glob`, :func:`os.path.expanduser`, and + :func:`os.path.expandvars`. + + This is intended for use on Windows, where the shell does not do any + expansion. It may not exactly match what a Unix shell would do. + + :param args: List of command line arguments to expand. + :param user: Expand user home directory. + :param env: Expand environment variables. + :param glob_recursive: ``**`` matches directories recursively. + + .. versionchanged:: 8.1 + Invalid glob patterns are treated as empty expansions rather + than raising an error. + + .. versionadded:: 8.0 + + :meta private: + """ + from glob import glob + + out = [] + + for arg in args: + if user: + arg = os.path.expanduser(arg) + + if env: + arg = os.path.expandvars(arg) + + try: + matches = glob(arg, recursive=glob_recursive) + except re.error: + matches = [] + + if not matches: + out.append(arg) + else: + out.extend(matches) + + return out diff --git a/.venv/lib/python3.12/site-packages/fastapi-0.128.0.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/fastapi-0.128.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi-0.128.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/fastapi-0.128.0.dist-info/METADATA b/.venv/lib/python3.12/site-packages/fastapi-0.128.0.dist-info/METADATA new file mode 100644 index 0000000..f5f8bd9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi-0.128.0.dist-info/METADATA @@ -0,0 +1,645 @@ +Metadata-Version: 2.4 +Name: fastapi +Version: 0.128.0 +Summary: FastAPI framework, high performance, easy to learn, fast to code, ready for production +Author-Email: =?utf-8?q?Sebasti=C3=A1n_Ram=C3=ADrez?= +License-Expression: MIT +License-File: LICENSE +Classifier: Intended Audience :: Information Technology +Classifier: Intended Audience :: System Administrators +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python +Classifier: Topic :: Internet +Classifier: Topic :: Software Development :: Libraries :: Application Frameworks +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development +Classifier: Typing :: Typed +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Web Environment +Classifier: Framework :: AsyncIO +Classifier: Framework :: FastAPI +Classifier: Framework :: Pydantic +Classifier: Framework :: Pydantic :: 2 +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Topic :: Internet :: WWW/HTTP :: HTTP Servers +Classifier: Topic :: Internet :: WWW/HTTP +Project-URL: Homepage, https://github.com/fastapi/fastapi +Project-URL: Documentation, https://fastapi.tiangolo.com/ +Project-URL: Repository, https://github.com/fastapi/fastapi +Project-URL: Issues, https://github.com/fastapi/fastapi/issues +Project-URL: Changelog, https://fastapi.tiangolo.com/release-notes/ +Requires-Python: >=3.9 +Requires-Dist: starlette<0.51.0,>=0.40.0 +Requires-Dist: pydantic>=2.7.0 +Requires-Dist: typing-extensions>=4.8.0 +Requires-Dist: annotated-doc>=0.0.2 +Provides-Extra: standard +Requires-Dist: fastapi-cli[standard]>=0.0.8; extra == "standard" +Requires-Dist: httpx<1.0.0,>=0.23.0; extra == "standard" +Requires-Dist: jinja2>=3.1.5; extra == "standard" +Requires-Dist: python-multipart>=0.0.18; extra == "standard" +Requires-Dist: email-validator>=2.0.0; extra == "standard" +Requires-Dist: uvicorn[standard]>=0.12.0; extra == "standard" +Requires-Dist: pydantic-settings>=2.0.0; extra == "standard" +Requires-Dist: pydantic-extra-types>=2.0.0; extra == "standard" +Provides-Extra: standard-no-fastapi-cloud-cli +Requires-Dist: fastapi-cli[standard-no-fastapi-cloud-cli]>=0.0.8; extra == "standard-no-fastapi-cloud-cli" +Requires-Dist: httpx<1.0.0,>=0.23.0; extra == "standard-no-fastapi-cloud-cli" +Requires-Dist: jinja2>=3.1.5; extra == "standard-no-fastapi-cloud-cli" +Requires-Dist: python-multipart>=0.0.18; extra == "standard-no-fastapi-cloud-cli" +Requires-Dist: email-validator>=2.0.0; extra == "standard-no-fastapi-cloud-cli" +Requires-Dist: uvicorn[standard]>=0.12.0; extra == "standard-no-fastapi-cloud-cli" +Requires-Dist: pydantic-settings>=2.0.0; extra == "standard-no-fastapi-cloud-cli" +Requires-Dist: pydantic-extra-types>=2.0.0; extra == "standard-no-fastapi-cloud-cli" +Provides-Extra: all +Requires-Dist: fastapi-cli[standard]>=0.0.8; extra == "all" +Requires-Dist: httpx<1.0.0,>=0.23.0; extra == "all" +Requires-Dist: jinja2>=3.1.5; extra == "all" +Requires-Dist: python-multipart>=0.0.18; extra == "all" +Requires-Dist: itsdangerous>=1.1.0; extra == "all" +Requires-Dist: pyyaml>=5.3.1; extra == "all" +Requires-Dist: ujson!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,>=4.0.1; extra == "all" +Requires-Dist: orjson>=3.2.1; extra == "all" +Requires-Dist: email-validator>=2.0.0; extra == "all" +Requires-Dist: uvicorn[standard]>=0.12.0; extra == "all" +Requires-Dist: pydantic-settings>=2.0.0; extra == "all" +Requires-Dist: pydantic-extra-types>=2.0.0; extra == "all" +Description-Content-Type: text/markdown + +

+ FastAPI +

+

+ FastAPI framework, high performance, easy to learn, fast to code, ready for production +

+

+ + Test + + + Coverage + + + Package version + + + Supported Python versions + +

+ +--- + +**Documentation**: https://fastapi.tiangolo.com + +**Source Code**: https://github.com/fastapi/fastapi + +--- + +FastAPI is a modern, fast (high-performance), web framework for building APIs with Python based on standard Python type hints. + +The key features are: + +* **Fast**: Very high performance, on par with **NodeJS** and **Go** (thanks to Starlette and Pydantic). [One of the fastest Python frameworks available](#performance). +* **Fast to code**: Increase the speed to develop features by about 200% to 300%. * +* **Fewer bugs**: Reduce about 40% of human (developer) induced errors. * +* **Intuitive**: Great editor support. Completion everywhere. Less time debugging. +* **Easy**: Designed to be easy to use and learn. Less time reading docs. +* **Short**: Minimize code duplication. Multiple features from each parameter declaration. Fewer bugs. +* **Robust**: Get production-ready code. With automatic interactive documentation. +* **Standards-based**: Based on (and fully compatible with) the open standards for APIs: OpenAPI (previously known as Swagger) and JSON Schema. + +* estimation based on tests conducted by an internal development team, building production applications. + +## Sponsors + + +### Keystone Sponsor + + + +### Gold and Silver Sponsors + + + + + + + + + + + + + + + + + + + + + + +Other sponsors + +## Opinions + +"_[...] I'm using **FastAPI** a ton these days. [...] I'm actually planning to use it for all of my team's **ML services at Microsoft**. Some of them are getting integrated into the core **Windows** product and some **Office** products._" + +
Kabir Khan - Microsoft (ref)
+ +--- + +"_We adopted the **FastAPI** library to spawn a **REST** server that can be queried to obtain **predictions**. [for Ludwig]_" + +
Piero Molino, Yaroslav Dudin, and Sai Sumanth Miryala - Uber (ref)
+ +--- + +"_**Netflix** is pleased to announce the open-source release of our **crisis management** orchestration framework: **Dispatch**! [built with **FastAPI**]_" + +
Kevin Glisson, Marc Vilanova, Forest Monsen - Netflix (ref)
+ +--- + +"_I’m over the moon excited about **FastAPI**. It’s so fun!_" + +
Brian Okken - Python Bytes podcast host (ref)
+ +--- + +"_Honestly, what you've built looks super solid and polished. In many ways, it's what I wanted **Hug** to be - it's really inspiring to see someone build that._" + +
Timothy Crosley - Hug creator (ref)
+ +--- + +"_If you're looking to learn one **modern framework** for building REST APIs, check out **FastAPI** [...] It's fast, easy to use and easy to learn [...]_" + +"_We've switched over to **FastAPI** for our **APIs** [...] I think you'll like it [...]_" + +
Ines Montani - Matthew Honnibal - Explosion AI founders - spaCy creators (ref) - (ref)
+ +--- + +"_If anyone is looking to build a production Python API, I would highly recommend **FastAPI**. It is **beautifully designed**, **simple to use** and **highly scalable**, it has become a **key component** in our API first development strategy and is driving many automations and services such as our Virtual TAC Engineer._" + +
Deon Pillsbury - Cisco (ref)
+ +--- + +## FastAPI mini documentary + +There's a FastAPI mini documentary released at the end of 2025, you can watch it online: + +FastAPI Mini Documentary + +## **Typer**, the FastAPI of CLIs + + + +If you are building a CLI app to be used in the terminal instead of a web API, check out **Typer**. + +**Typer** is FastAPI's little sibling. And it's intended to be the **FastAPI of CLIs**. ⌨️ 🚀 + +## Requirements + +FastAPI stands on the shoulders of giants: + +* Starlette for the web parts. +* Pydantic for the data parts. + +## Installation + +Create and activate a virtual environment and then install FastAPI: + +
+ +```console +$ pip install "fastapi[standard]" + +---> 100% +``` + +
+ +**Note**: Make sure you put `"fastapi[standard]"` in quotes to ensure it works in all terminals. + +## Example + +### Create it + +Create a file `main.py` with: + +```Python +from typing import Union + +from fastapi import FastAPI + +app = FastAPI() + + +@app.get("/") +def read_root(): + return {"Hello": "World"} + + +@app.get("/items/{item_id}") +def read_item(item_id: int, q: Union[str, None] = None): + return {"item_id": item_id, "q": q} +``` + +
+Or use async def... + +If your code uses `async` / `await`, use `async def`: + +```Python hl_lines="9 14" +from typing import Union + +from fastapi import FastAPI + +app = FastAPI() + + +@app.get("/") +async def read_root(): + return {"Hello": "World"} + + +@app.get("/items/{item_id}") +async def read_item(item_id: int, q: Union[str, None] = None): + return {"item_id": item_id, "q": q} +``` + +**Note**: + +If you don't know, check the _"In a hurry?"_ section about `async` and `await` in the docs. + +
+ +### Run it + +Run the server with: + +
+ +```console +$ fastapi dev main.py + + ╭────────── FastAPI CLI - Development mode ───────────╮ + │ │ + │ Serving at: http://127.0.0.1:8000 │ + │ │ + │ API docs: http://127.0.0.1:8000/docs │ + │ │ + │ Running in development mode, for production use: │ + │ │ + │ fastapi run │ + │ │ + ╰─────────────────────────────────────────────────────╯ + +INFO: Will watch for changes in these directories: ['/home/user/code/awesomeapp'] +INFO: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit) +INFO: Started reloader process [2248755] using WatchFiles +INFO: Started server process [2248757] +INFO: Waiting for application startup. +INFO: Application startup complete. +``` + +
+ +
+About the command fastapi dev main.py... + +The command `fastapi dev` reads your `main.py` file, detects the **FastAPI** app in it, and starts a server using Uvicorn. + +By default, `fastapi dev` will start with auto-reload enabled for local development. + +You can read more about it in the FastAPI CLI docs. + +
+ +### Check it + +Open your browser at http://127.0.0.1:8000/items/5?q=somequery. + +You will see the JSON response as: + +```JSON +{"item_id": 5, "q": "somequery"} +``` + +You already created an API that: + +* Receives HTTP requests in the _paths_ `/` and `/items/{item_id}`. +* Both _paths_ take `GET` operations (also known as HTTP _methods_). +* The _path_ `/items/{item_id}` has a _path parameter_ `item_id` that should be an `int`. +* The _path_ `/items/{item_id}` has an optional `str` _query parameter_ `q`. + +### Interactive API docs + +Now go to http://127.0.0.1:8000/docs. + +You will see the automatic interactive API documentation (provided by Swagger UI): + +![Swagger UI](https://fastapi.tiangolo.com/img/index/index-01-swagger-ui-simple.png) + +### Alternative API docs + +And now, go to http://127.0.0.1:8000/redoc. + +You will see the alternative automatic documentation (provided by ReDoc): + +![ReDoc](https://fastapi.tiangolo.com/img/index/index-02-redoc-simple.png) + +## Example upgrade + +Now modify the file `main.py` to receive a body from a `PUT` request. + +Declare the body using standard Python types, thanks to Pydantic. + +```Python hl_lines="4 9-12 25-27" +from typing import Union + +from fastapi import FastAPI +from pydantic import BaseModel + +app = FastAPI() + + +class Item(BaseModel): + name: str + price: float + is_offer: Union[bool, None] = None + + +@app.get("/") +def read_root(): + return {"Hello": "World"} + + +@app.get("/items/{item_id}") +def read_item(item_id: int, q: Union[str, None] = None): + return {"item_id": item_id, "q": q} + + +@app.put("/items/{item_id}") +def update_item(item_id: int, item: Item): + return {"item_name": item.name, "item_id": item_id} +``` + +The `fastapi dev` server should reload automatically. + +### Interactive API docs upgrade + +Now go to http://127.0.0.1:8000/docs. + +* The interactive API documentation will be automatically updated, including the new body: + +![Swagger UI](https://fastapi.tiangolo.com/img/index/index-03-swagger-02.png) + +* Click on the button "Try it out", it allows you to fill the parameters and directly interact with the API: + +![Swagger UI interaction](https://fastapi.tiangolo.com/img/index/index-04-swagger-03.png) + +* Then click on the "Execute" button, the user interface will communicate with your API, send the parameters, get the results and show them on the screen: + +![Swagger UI interaction](https://fastapi.tiangolo.com/img/index/index-05-swagger-04.png) + +### Alternative API docs upgrade + +And now, go to http://127.0.0.1:8000/redoc. + +* The alternative documentation will also reflect the new query parameter and body: + +![ReDoc](https://fastapi.tiangolo.com/img/index/index-06-redoc-02.png) + +### Recap + +In summary, you declare **once** the types of parameters, body, etc. as function parameters. + +You do that with standard modern Python types. + +You don't have to learn a new syntax, the methods or classes of a specific library, etc. + +Just standard **Python**. + +For example, for an `int`: + +```Python +item_id: int +``` + +or for a more complex `Item` model: + +```Python +item: Item +``` + +...and with that single declaration you get: + +* Editor support, including: + * Completion. + * Type checks. +* Validation of data: + * Automatic and clear errors when the data is invalid. + * Validation even for deeply nested JSON objects. +* Conversion of input data: coming from the network to Python data and types. Reading from: + * JSON. + * Path parameters. + * Query parameters. + * Cookies. + * Headers. + * Forms. + * Files. +* Conversion of output data: converting from Python data and types to network data (as JSON): + * Convert Python types (`str`, `int`, `float`, `bool`, `list`, etc). + * `datetime` objects. + * `UUID` objects. + * Database models. + * ...and many more. +* Automatic interactive API documentation, including 2 alternative user interfaces: + * Swagger UI. + * ReDoc. + +--- + +Coming back to the previous code example, **FastAPI** will: + +* Validate that there is an `item_id` in the path for `GET` and `PUT` requests. +* Validate that the `item_id` is of type `int` for `GET` and `PUT` requests. + * If it is not, the client will see a useful, clear error. +* Check if there is an optional query parameter named `q` (as in `http://127.0.0.1:8000/items/foo?q=somequery`) for `GET` requests. + * As the `q` parameter is declared with `= None`, it is optional. + * Without the `None` it would be required (as is the body in the case with `PUT`). +* For `PUT` requests to `/items/{item_id}`, read the body as JSON: + * Check that it has a required attribute `name` that should be a `str`. + * Check that it has a required attribute `price` that has to be a `float`. + * Check that it has an optional attribute `is_offer`, that should be a `bool`, if present. + * All this would also work for deeply nested JSON objects. +* Convert from and to JSON automatically. +* Document everything with OpenAPI, that can be used by: + * Interactive documentation systems. + * Automatic client code generation systems, for many languages. +* Provide 2 interactive documentation web interfaces directly. + +--- + +We just scratched the surface, but you already get the idea of how it all works. + +Try changing the line with: + +```Python + return {"item_name": item.name, "item_id": item_id} +``` + +...from: + +```Python + ... "item_name": item.name ... +``` + +...to: + +```Python + ... "item_price": item.price ... +``` + +...and see how your editor will auto-complete the attributes and know their types: + +![editor support](https://fastapi.tiangolo.com/img/vscode-completion.png) + +For a more complete example including more features, see the Tutorial - User Guide. + +**Spoiler alert**: the tutorial - user guide includes: + +* Declaration of **parameters** from other different places as: **headers**, **cookies**, **form fields** and **files**. +* How to set **validation constraints** as `maximum_length` or `regex`. +* A very powerful and easy to use **Dependency Injection** system. +* Security and authentication, including support for **OAuth2** with **JWT tokens** and **HTTP Basic** auth. +* More advanced (but equally easy) techniques for declaring **deeply nested JSON models** (thanks to Pydantic). +* **GraphQL** integration with Strawberry and other libraries. +* Many extra features (thanks to Starlette) as: + * **WebSockets** + * extremely easy tests based on HTTPX and `pytest` + * **CORS** + * **Cookie Sessions** + * ...and more. + +### Deploy your app (optional) + +You can optionally deploy your FastAPI app to FastAPI Cloud, go and join the waiting list if you haven't. 🚀 + +If you already have a **FastAPI Cloud** account (we invited you from the waiting list 😉), you can deploy your application with one command. + +Before deploying, make sure you are logged in: + +
+ +```console +$ fastapi login + +You are logged in to FastAPI Cloud 🚀 +``` + +
+ +Then deploy your app: + +
+ +```console +$ fastapi deploy + +Deploying to FastAPI Cloud... + +✅ Deployment successful! + +🐔 Ready the chicken! Your app is ready at https://myapp.fastapicloud.dev +``` + +
+ +That's it! Now you can access your app at that URL. ✨ + +#### About FastAPI Cloud + +**FastAPI Cloud** is built by the same author and team behind **FastAPI**. + +It streamlines the process of **building**, **deploying**, and **accessing** an API with minimal effort. + +It brings the same **developer experience** of building apps with FastAPI to **deploying** them to the cloud. 🎉 + +FastAPI Cloud is the primary sponsor and funding provider for the *FastAPI and friends* open source projects. ✨ + +#### Deploy to other cloud providers + +FastAPI is open source and based on standards. You can deploy FastAPI apps to any cloud provider you choose. + +Follow your cloud provider's guides to deploy FastAPI apps with them. 🤓 + +## Performance + +Independent TechEmpower benchmarks show **FastAPI** applications running under Uvicorn as one of the fastest Python frameworks available, only below Starlette and Uvicorn themselves (used internally by FastAPI). (*) + +To understand more about it, see the section Benchmarks. + +## Dependencies + +FastAPI depends on Pydantic and Starlette. + +### `standard` Dependencies + +When you install FastAPI with `pip install "fastapi[standard]"` it comes with the `standard` group of optional dependencies: + +Used by Pydantic: + +* email-validator - for email validation. + +Used by Starlette: + +* httpx - Required if you want to use the `TestClient`. +* jinja2 - Required if you want to use the default template configuration. +* python-multipart - Required if you want to support form "parsing", with `request.form()`. + +Used by FastAPI: + +* uvicorn - for the server that loads and serves your application. This includes `uvicorn[standard]`, which includes some dependencies (e.g. `uvloop`) needed for high performance serving. +* `fastapi-cli[standard]` - to provide the `fastapi` command. + * This includes `fastapi-cloud-cli`, which allows you to deploy your FastAPI application to FastAPI Cloud. + +### Without `standard` Dependencies + +If you don't want to include the `standard` optional dependencies, you can install with `pip install fastapi` instead of `pip install "fastapi[standard]"`. + +### Without `fastapi-cloud-cli` + +If you want to install FastAPI with the standard dependencies but without the `fastapi-cloud-cli`, you can install with `pip install "fastapi[standard-no-fastapi-cloud-cli]"`. + +### Additional Optional Dependencies + +There are some additional dependencies you might want to install. + +Additional optional Pydantic dependencies: + +* pydantic-settings - for settings management. +* pydantic-extra-types - for extra types to be used with Pydantic. + +Additional optional FastAPI dependencies: + +* orjson - Required if you want to use `ORJSONResponse`. +* ujson - Required if you want to use `UJSONResponse`. + +## License + +This project is licensed under the terms of the MIT license. diff --git a/.venv/lib/python3.12/site-packages/fastapi-0.128.0.dist-info/RECORD b/.venv/lib/python3.12/site-packages/fastapi-0.128.0.dist-info/RECORD new file mode 100644 index 0000000..39e49c5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi-0.128.0.dist-info/RECORD @@ -0,0 +1,102 @@ +../../../bin/fastapi,sha256=e3_n1rDyB4zeuggAT7-lBzrqEOObd7gUej0B93Rcncw,242 +fastapi-0.128.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +fastapi-0.128.0.dist-info/METADATA,sha256=hKL7LtKoBl4ojHSJ_u5MsihJRMPBU5AsppXt75m24FY,30977 +fastapi-0.128.0.dist-info/RECORD,, +fastapi-0.128.0.dist-info/WHEEL,sha256=tsUv_t7BDeJeRHaSrczbGeuK-TtDpGsWi_JfpzD255I,90 +fastapi-0.128.0.dist-info/entry_points.txt,sha256=GCf-WbIZxyGT4MUmrPGj1cOHYZoGsNPHAvNkT6hnGeA,61 +fastapi-0.128.0.dist-info/licenses/LICENSE,sha256=Tsif_IFIW5f-xYSy1KlhAy7v_oNEU4lP2cEnSQbMdE4,1086 +fastapi/__init__.py,sha256=wlYVfo3p5m4F8JxyVpV1aFS5XKRzgCXBfB9s231GveY,1081 +fastapi/__main__.py,sha256=bKePXLdO4SsVSM6r9SVoLickJDcR2c0cTOxZRKq26YQ,37 +fastapi/__pycache__/__init__.cpython-312.pyc,, +fastapi/__pycache__/__main__.cpython-312.pyc,, +fastapi/__pycache__/applications.cpython-312.pyc,, +fastapi/__pycache__/background.cpython-312.pyc,, +fastapi/__pycache__/cli.cpython-312.pyc,, +fastapi/__pycache__/concurrency.cpython-312.pyc,, +fastapi/__pycache__/datastructures.cpython-312.pyc,, +fastapi/__pycache__/encoders.cpython-312.pyc,, +fastapi/__pycache__/exception_handlers.cpython-312.pyc,, +fastapi/__pycache__/exceptions.cpython-312.pyc,, +fastapi/__pycache__/logger.cpython-312.pyc,, +fastapi/__pycache__/param_functions.cpython-312.pyc,, +fastapi/__pycache__/params.cpython-312.pyc,, +fastapi/__pycache__/requests.cpython-312.pyc,, +fastapi/__pycache__/responses.cpython-312.pyc,, +fastapi/__pycache__/routing.cpython-312.pyc,, +fastapi/__pycache__/staticfiles.cpython-312.pyc,, +fastapi/__pycache__/templating.cpython-312.pyc,, +fastapi/__pycache__/testclient.cpython-312.pyc,, +fastapi/__pycache__/types.cpython-312.pyc,, +fastapi/__pycache__/utils.cpython-312.pyc,, +fastapi/__pycache__/websockets.cpython-312.pyc,, +fastapi/_compat/__init__.py,sha256=o3dg67W5LlwA52_1Y9Re_JhelcG0oj5ke_GdQHcwBnw,2226 +fastapi/_compat/__pycache__/__init__.cpython-312.pyc,, +fastapi/_compat/__pycache__/shared.cpython-312.pyc,, +fastapi/_compat/__pycache__/v2.cpython-312.pyc,, +fastapi/_compat/shared.py,sha256=yFZWOnzG1JRIPuLOk0eaBcrwP2bap8UkP5I0XBuVZck,6842 +fastapi/_compat/v2.py,sha256=B5OawqkcpTaaDLNZDHFlasGQyYFS7VFbwyDVwUTydE0,19597 +fastapi/applications.py,sha256=IO5F5FdRacBFXYxGPk7zPbwRCa-cxN74HHf0eMEp7xE,180536 +fastapi/background.py,sha256=fDNVXWBZniIQIxW3v-Sc99FT2p4RDKOOWW2fhOe4Nko,1793 +fastapi/cli.py,sha256=OYhZb0NR_deuT5ofyPF2NoNBzZDNOP8Salef2nk-HqA,418 +fastapi/concurrency.py,sha256=xHGDEOQAA6cvFEDX46oq3r2t1Zd4sVvreaRgdIE4juM,1489 +fastapi/datastructures.py,sha256=41qs2ZhTzORMGn7JSAF9qsiPY9XP4uGyGOMKhfzg4i4,5205 +fastapi/dependencies/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fastapi/dependencies/__pycache__/__init__.cpython-312.pyc,, +fastapi/dependencies/__pycache__/models.cpython-312.pyc,, +fastapi/dependencies/__pycache__/utils.cpython-312.pyc,, +fastapi/dependencies/models.py,sha256=TjJB2l6m-vhFkau7ysLdgcymZ6SdIJmlrJjqMJs5TZc,7317 +fastapi/dependencies/utils.py,sha256=rJQFCFUC7q765yUcOoSV78GCyTGfS2ePLbMvJ2B-yLc,38549 +fastapi/encoders.py,sha256=uQfXjliV2O93wy7sng3pUuLZw9UOw8HNsAHb2B1ZfEs,11004 +fastapi/exception_handlers.py,sha256=YVcT8Zy021VYYeecgdyh5YEUjEIHKcLspbkSf4OfbJI,1275 +fastapi/exceptions.py,sha256=enNT5h_wDyzY90qA4a_VqMDRSUNHTd1LX_vihMNa-LE,6973 +fastapi/logger.py,sha256=I9NNi3ov8AcqbsbC9wl1X-hdItKgYt2XTrx1f99Zpl4,54 +fastapi/middleware/__init__.py,sha256=oQDxiFVcc1fYJUOIFvphnK7pTT5kktmfL32QXpBFvvo,58 +fastapi/middleware/__pycache__/__init__.cpython-312.pyc,, +fastapi/middleware/__pycache__/asyncexitstack.cpython-312.pyc,, +fastapi/middleware/__pycache__/cors.cpython-312.pyc,, +fastapi/middleware/__pycache__/gzip.cpython-312.pyc,, +fastapi/middleware/__pycache__/httpsredirect.cpython-312.pyc,, +fastapi/middleware/__pycache__/trustedhost.cpython-312.pyc,, +fastapi/middleware/__pycache__/wsgi.cpython-312.pyc,, +fastapi/middleware/asyncexitstack.py,sha256=RKGlQpGzg3GLosqVhrxBy_NCZ9qJS7zQeNHt5Y3x-00,637 +fastapi/middleware/cors.py,sha256=ynwjWQZoc_vbhzZ3_ZXceoaSrslHFHPdoM52rXr0WUU,79 +fastapi/middleware/gzip.py,sha256=xM5PcsH8QlAimZw4VDvcmTnqQamslThsfe3CVN2voa0,79 +fastapi/middleware/httpsredirect.py,sha256=rL8eXMnmLijwVkH7_400zHri1AekfeBd6D6qs8ix950,115 +fastapi/middleware/trustedhost.py,sha256=eE5XGRxGa7c5zPnMJDGp3BxaL25k5iVQlhnv-Pk0Pss,109 +fastapi/middleware/wsgi.py,sha256=Z3Ue-7wni4lUZMvH3G9ek__acgYdJstbnpZX_HQAboY,79 +fastapi/openapi/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fastapi/openapi/__pycache__/__init__.cpython-312.pyc,, +fastapi/openapi/__pycache__/constants.cpython-312.pyc,, +fastapi/openapi/__pycache__/docs.cpython-312.pyc,, +fastapi/openapi/__pycache__/models.cpython-312.pyc,, +fastapi/openapi/__pycache__/utils.cpython-312.pyc,, +fastapi/openapi/constants.py,sha256=adGzmis1L1HJRTE3kJ5fmHS_Noq6tIY6pWv_SFzoFDU,153 +fastapi/openapi/docs.py,sha256=wqcXZOhBdnf2pilVNyAIfjKAhfH9MXaQiitwZeJCR7I,10335 +fastapi/openapi/models.py,sha256=xJfPRE7DqNvtqgdouXbtMCCLBrZ-4Bd87QaA_WPUVTA,15419 +fastapi/openapi/utils.py,sha256=HsOqZ8uWSTUYL18jhYI0gU-A1nLHCNu3k3UEIAVzRyY,23795 +fastapi/param_functions.py,sha256=O8bsr2xM8XODE0wjtev8sZLl3Tt_glpdQteM2relVrU,64466 +fastapi/params.py,sha256=YS7z57t0N4H8Rdogx1sU6R-KZDcLn5y46SGz8z5lX-s,26982 +fastapi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fastapi/requests.py,sha256=zayepKFcienBllv3snmWI20Gk0oHNVLU4DDhqXBb4LU,142 +fastapi/responses.py,sha256=QNQQlwpKhQoIPZTTWkpc9d_QGeGZ_aVQPaDV3nQ8m7c,1761 +fastapi/routing.py,sha256=dHZW6NLzbvD7jZT2N-V6fj1Irbe8HR-FDeiBNSuTHHs,178746 +fastapi/security/__init__.py,sha256=bO8pNmxqVRXUjfl2mOKiVZLn0FpBQ61VUYVjmppnbJw,881 +fastapi/security/__pycache__/__init__.cpython-312.pyc,, +fastapi/security/__pycache__/api_key.cpython-312.pyc,, +fastapi/security/__pycache__/base.cpython-312.pyc,, +fastapi/security/__pycache__/http.cpython-312.pyc,, +fastapi/security/__pycache__/oauth2.cpython-312.pyc,, +fastapi/security/__pycache__/open_id_connect_url.cpython-312.pyc,, +fastapi/security/__pycache__/utils.cpython-312.pyc,, +fastapi/security/api_key.py,sha256=5AriUhrA_KgdtJRJ_BtCDgcTFOUlUUvDSultdIfdApc,9799 +fastapi/security/base.py,sha256=dl4pvbC-RxjfbWgPtCWd8MVU-7CB2SZ22rJDXVCXO6c,141 +fastapi/security/http.py,sha256=gckOhSa1ubLpARU819pxKiZZnmnyg_co6AwQyNE8yxw,13518 +fastapi/security/oauth2.py,sha256=8sU0yRncO_1mK8rdUES1GRijPawi2ZGwLGWphWeS02w,22477 +fastapi/security/open_id_connect_url.py,sha256=pFvSVESThhjYXSDWPlFGtm9bN62JXHzuwnVfmtyNcZE,3158 +fastapi/security/utils.py,sha256=Gk6KGztJnYqvYFTmuQO7ow_icayiqP3HL762ZFRQjfU,286 +fastapi/staticfiles.py,sha256=iirGIt3sdY2QZXd36ijs3Cj-T0FuGFda3cd90kM9Ikw,69 +fastapi/templating.py,sha256=4zsuTWgcjcEainMJFAlW6-gnslm6AgOS1SiiDWfmQxk,76 +fastapi/testclient.py,sha256=nBvaAmX66YldReJNZXPOk1sfuo2Q6hs8bOvIaCep6LQ,66 +fastapi/types.py,sha256=W0HOmfeZw_3PcMDOa6GA-Or9okP9hf_260UkbCfKHY4,455 +fastapi/utils.py,sha256=sl-ddHjWbQiLUNLFiG93IXStAZgp8NSFwJsjo41mb04,5230 +fastapi/websockets.py,sha256=419uncYObEKZG0YcrXscfQQYLSWoE10jqxVMetGdR98,222 diff --git a/.venv/lib/python3.12/site-packages/fastapi-0.128.0.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/fastapi-0.128.0.dist-info/WHEEL new file mode 100644 index 0000000..2efd4ed --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi-0.128.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: pdm-backend (2.4.6) +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.venv/lib/python3.12/site-packages/fastapi-0.128.0.dist-info/entry_points.txt b/.venv/lib/python3.12/site-packages/fastapi-0.128.0.dist-info/entry_points.txt new file mode 100644 index 0000000..b81849e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi-0.128.0.dist-info/entry_points.txt @@ -0,0 +1,5 @@ +[console_scripts] +fastapi = fastapi.cli:main + +[gui_scripts] + diff --git a/.venv/lib/python3.12/site-packages/fastapi-0.128.0.dist-info/licenses/LICENSE b/.venv/lib/python3.12/site-packages/fastapi-0.128.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..3e92463 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi-0.128.0.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2018 Sebastián Ramírez + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/.venv/lib/python3.12/site-packages/fastapi/__init__.py b/.venv/lib/python3.12/site-packages/fastapi/__init__.py new file mode 100644 index 0000000..6133787 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/__init__.py @@ -0,0 +1,25 @@ +"""FastAPI framework, high performance, easy to learn, fast to code, ready for production""" + +__version__ = "0.128.0" + +from starlette import status as status + +from .applications import FastAPI as FastAPI +from .background import BackgroundTasks as BackgroundTasks +from .datastructures import UploadFile as UploadFile +from .exceptions import HTTPException as HTTPException +from .exceptions import WebSocketException as WebSocketException +from .param_functions import Body as Body +from .param_functions import Cookie as Cookie +from .param_functions import Depends as Depends +from .param_functions import File as File +from .param_functions import Form as Form +from .param_functions import Header as Header +from .param_functions import Path as Path +from .param_functions import Query as Query +from .param_functions import Security as Security +from .requests import Request as Request +from .responses import Response as Response +from .routing import APIRouter as APIRouter +from .websockets import WebSocket as WebSocket +from .websockets import WebSocketDisconnect as WebSocketDisconnect diff --git a/.venv/lib/python3.12/site-packages/fastapi/__main__.py b/.venv/lib/python3.12/site-packages/fastapi/__main__.py new file mode 100644 index 0000000..fc36465 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/__main__.py @@ -0,0 +1,3 @@ +from fastapi.cli import main + +main() diff --git a/.venv/lib/python3.12/site-packages/fastapi/_compat/__init__.py b/.venv/lib/python3.12/site-packages/fastapi/_compat/__init__.py new file mode 100644 index 0000000..3dfaf9b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/_compat/__init__.py @@ -0,0 +1,41 @@ +from .shared import PYDANTIC_V2 as PYDANTIC_V2 +from .shared import PYDANTIC_VERSION_MINOR_TUPLE as PYDANTIC_VERSION_MINOR_TUPLE +from .shared import annotation_is_pydantic_v1 as annotation_is_pydantic_v1 +from .shared import field_annotation_is_scalar as field_annotation_is_scalar +from .shared import is_pydantic_v1_model_class as is_pydantic_v1_model_class +from .shared import is_pydantic_v1_model_instance as is_pydantic_v1_model_instance +from .shared import ( + is_uploadfile_or_nonable_uploadfile_annotation as is_uploadfile_or_nonable_uploadfile_annotation, +) +from .shared import ( + is_uploadfile_sequence_annotation as is_uploadfile_sequence_annotation, +) +from .shared import lenient_issubclass as lenient_issubclass +from .shared import sequence_types as sequence_types +from .shared import value_is_sequence as value_is_sequence +from .v2 import BaseConfig as BaseConfig +from .v2 import ModelField as ModelField +from .v2 import PydanticSchemaGenerationError as PydanticSchemaGenerationError +from .v2 import RequiredParam as RequiredParam +from .v2 import Undefined as Undefined +from .v2 import UndefinedType as UndefinedType +from .v2 import Url as Url +from .v2 import Validator as Validator +from .v2 import _regenerate_error_with_loc as _regenerate_error_with_loc +from .v2 import copy_field_info as copy_field_info +from .v2 import create_body_model as create_body_model +from .v2 import evaluate_forwardref as evaluate_forwardref +from .v2 import get_cached_model_fields as get_cached_model_fields +from .v2 import get_compat_model_name_map as get_compat_model_name_map +from .v2 import get_definitions as get_definitions +from .v2 import get_missing_field_error as get_missing_field_error +from .v2 import get_schema_from_model_field as get_schema_from_model_field +from .v2 import is_bytes_field as is_bytes_field +from .v2 import is_bytes_sequence_field as is_bytes_sequence_field +from .v2 import is_scalar_field as is_scalar_field +from .v2 import is_scalar_sequence_field as is_scalar_sequence_field +from .v2 import is_sequence_field as is_sequence_field +from .v2 import serialize_sequence_value as serialize_sequence_value +from .v2 import ( + with_info_plain_validator_function as with_info_plain_validator_function, +) diff --git a/.venv/lib/python3.12/site-packages/fastapi/_compat/shared.py b/.venv/lib/python3.12/site-packages/fastapi/_compat/shared.py new file mode 100644 index 0000000..419b58f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/_compat/shared.py @@ -0,0 +1,206 @@ +import sys +import types +import typing +import warnings +from collections import deque +from collections.abc import Mapping, Sequence +from dataclasses import is_dataclass +from typing import ( + Annotated, + Any, + Union, +) + +from fastapi.types import UnionType +from pydantic import BaseModel +from pydantic.version import VERSION as PYDANTIC_VERSION +from starlette.datastructures import UploadFile +from typing_extensions import get_args, get_origin + +# Copy from Pydantic v2, compatible with v1 +if sys.version_info < (3, 10): + WithArgsTypes: tuple[Any, ...] = (typing._GenericAlias, types.GenericAlias) # type: ignore[attr-defined] +else: + WithArgsTypes: tuple[Any, ...] = ( + typing._GenericAlias, # type: ignore[attr-defined] + types.GenericAlias, + types.UnionType, + ) # pyright: ignore[reportAttributeAccessIssue] + +PYDANTIC_VERSION_MINOR_TUPLE = tuple(int(x) for x in PYDANTIC_VERSION.split(".")[:2]) +PYDANTIC_V2 = PYDANTIC_VERSION_MINOR_TUPLE[0] == 2 + + +sequence_annotation_to_type = { + Sequence: list, + list: list, + tuple: tuple, + set: set, + frozenset: frozenset, + deque: deque, +} + +sequence_types = tuple(sequence_annotation_to_type.keys()) + +Url: type[Any] + + +# Copy of Pydantic v2, compatible with v1 +def lenient_issubclass( + cls: Any, class_or_tuple: Union[type[Any], tuple[type[Any], ...], None] +) -> bool: + try: + return isinstance(cls, type) and issubclass(cls, class_or_tuple) # type: ignore[arg-type] + except TypeError: # pragma: no cover + if isinstance(cls, WithArgsTypes): + return False + raise # pragma: no cover + + +def _annotation_is_sequence(annotation: Union[type[Any], None]) -> bool: + if lenient_issubclass(annotation, (str, bytes)): + return False + return lenient_issubclass(annotation, sequence_types) + + +def field_annotation_is_sequence(annotation: Union[type[Any], None]) -> bool: + origin = get_origin(annotation) + if origin is Union or origin is UnionType: + for arg in get_args(annotation): + if field_annotation_is_sequence(arg): + return True + return False + return _annotation_is_sequence(annotation) or _annotation_is_sequence( + get_origin(annotation) + ) + + +def value_is_sequence(value: Any) -> bool: + return isinstance(value, sequence_types) and not isinstance(value, (str, bytes)) + + +def _annotation_is_complex(annotation: Union[type[Any], None]) -> bool: + return ( + lenient_issubclass(annotation, (BaseModel, Mapping, UploadFile)) + or _annotation_is_sequence(annotation) + or is_dataclass(annotation) + ) + + +def field_annotation_is_complex(annotation: Union[type[Any], None]) -> bool: + origin = get_origin(annotation) + if origin is Union or origin is UnionType: + return any(field_annotation_is_complex(arg) for arg in get_args(annotation)) + + if origin is Annotated: + return field_annotation_is_complex(get_args(annotation)[0]) + + return ( + _annotation_is_complex(annotation) + or _annotation_is_complex(origin) + or hasattr(origin, "__pydantic_core_schema__") + or hasattr(origin, "__get_pydantic_core_schema__") + ) + + +def field_annotation_is_scalar(annotation: Any) -> bool: + # handle Ellipsis here to make tuple[int, ...] work nicely + return annotation is Ellipsis or not field_annotation_is_complex(annotation) + + +def field_annotation_is_scalar_sequence(annotation: Union[type[Any], None]) -> bool: + origin = get_origin(annotation) + if origin is Union or origin is UnionType: + at_least_one_scalar_sequence = False + for arg in get_args(annotation): + if field_annotation_is_scalar_sequence(arg): + at_least_one_scalar_sequence = True + continue + elif not field_annotation_is_scalar(arg): + return False + return at_least_one_scalar_sequence + return field_annotation_is_sequence(annotation) and all( + field_annotation_is_scalar(sub_annotation) + for sub_annotation in get_args(annotation) + ) + + +def is_bytes_or_nonable_bytes_annotation(annotation: Any) -> bool: + if lenient_issubclass(annotation, bytes): + return True + origin = get_origin(annotation) + if origin is Union or origin is UnionType: + for arg in get_args(annotation): + if lenient_issubclass(arg, bytes): + return True + return False + + +def is_uploadfile_or_nonable_uploadfile_annotation(annotation: Any) -> bool: + if lenient_issubclass(annotation, UploadFile): + return True + origin = get_origin(annotation) + if origin is Union or origin is UnionType: + for arg in get_args(annotation): + if lenient_issubclass(arg, UploadFile): + return True + return False + + +def is_bytes_sequence_annotation(annotation: Any) -> bool: + origin = get_origin(annotation) + if origin is Union or origin is UnionType: + at_least_one = False + for arg in get_args(annotation): + if is_bytes_sequence_annotation(arg): + at_least_one = True + continue + return at_least_one + return field_annotation_is_sequence(annotation) and all( + is_bytes_or_nonable_bytes_annotation(sub_annotation) + for sub_annotation in get_args(annotation) + ) + + +def is_uploadfile_sequence_annotation(annotation: Any) -> bool: + origin = get_origin(annotation) + if origin is Union or origin is UnionType: + at_least_one = False + for arg in get_args(annotation): + if is_uploadfile_sequence_annotation(arg): + at_least_one = True + continue + return at_least_one + return field_annotation_is_sequence(annotation) and all( + is_uploadfile_or_nonable_uploadfile_annotation(sub_annotation) + for sub_annotation in get_args(annotation) + ) + + +def is_pydantic_v1_model_instance(obj: Any) -> bool: + with warnings.catch_warnings(): + warnings.simplefilter("ignore", UserWarning) + from pydantic import v1 + return isinstance(obj, v1.BaseModel) + + +def is_pydantic_v1_model_class(cls: Any) -> bool: + with warnings.catch_warnings(): + warnings.simplefilter("ignore", UserWarning) + from pydantic import v1 + return lenient_issubclass(cls, v1.BaseModel) + + +def annotation_is_pydantic_v1(annotation: Any) -> bool: + if is_pydantic_v1_model_class(annotation): + return True + origin = get_origin(annotation) + if origin is Union or origin is UnionType: + for arg in get_args(annotation): + if is_pydantic_v1_model_class(arg): + return True + if field_annotation_is_sequence(annotation): + for sub_annotation in get_args(annotation): + if annotation_is_pydantic_v1(sub_annotation): + return True + return False diff --git a/.venv/lib/python3.12/site-packages/fastapi/_compat/v2.py b/.venv/lib/python3.12/site-packages/fastapi/_compat/v2.py new file mode 100644 index 0000000..25b6814 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/_compat/v2.py @@ -0,0 +1,568 @@ +import re +import warnings +from collections.abc import Sequence +from copy import copy, deepcopy +from dataclasses import dataclass, is_dataclass +from enum import Enum +from functools import lru_cache +from typing import ( + Annotated, + Any, + Union, + cast, +) + +from fastapi._compat import shared +from fastapi.openapi.constants import REF_TEMPLATE +from fastapi.types import IncEx, ModelNameMap, UnionType +from pydantic import BaseModel, ConfigDict, Field, TypeAdapter, create_model +from pydantic import PydanticSchemaGenerationError as PydanticSchemaGenerationError +from pydantic import PydanticUndefinedAnnotation as PydanticUndefinedAnnotation +from pydantic import ValidationError as ValidationError +from pydantic._internal._schema_generation_shared import ( # type: ignore[attr-defined] + GetJsonSchemaHandler as GetJsonSchemaHandler, +) +from pydantic._internal._typing_extra import eval_type_lenient +from pydantic._internal._utils import lenient_issubclass as lenient_issubclass +from pydantic.fields import FieldInfo as FieldInfo +from pydantic.json_schema import GenerateJsonSchema as GenerateJsonSchema +from pydantic.json_schema import JsonSchemaValue as JsonSchemaValue +from pydantic_core import CoreSchema as CoreSchema +from pydantic_core import PydanticUndefined, PydanticUndefinedType +from pydantic_core import Url as Url +from typing_extensions import Literal, get_args, get_origin + +try: + from pydantic_core.core_schema import ( + with_info_plain_validator_function as with_info_plain_validator_function, + ) +except ImportError: # pragma: no cover + from pydantic_core.core_schema import ( + general_plain_validator_function as with_info_plain_validator_function, # noqa: F401 + ) + +RequiredParam = PydanticUndefined +Undefined = PydanticUndefined +UndefinedType = PydanticUndefinedType +evaluate_forwardref = eval_type_lenient +Validator = Any + +# TODO: remove when dropping support for Pydantic < v2.12.3 +_Attrs = { + "default": ..., + "default_factory": None, + "alias": None, + "alias_priority": None, + "validation_alias": None, + "serialization_alias": None, + "title": None, + "field_title_generator": None, + "description": None, + "examples": None, + "exclude": None, + "exclude_if": None, + "discriminator": None, + "deprecated": None, + "json_schema_extra": None, + "frozen": None, + "validate_default": None, + "repr": True, + "init": None, + "init_var": None, + "kw_only": None, +} + + +# TODO: remove when dropping support for Pydantic < v2.12.3 +def asdict(field_info: FieldInfo) -> dict[str, Any]: + attributes = {} + for attr in _Attrs: + value = getattr(field_info, attr, Undefined) + if value is not Undefined: + attributes[attr] = value + return { + "annotation": field_info.annotation, + "metadata": field_info.metadata, + "attributes": attributes, + } + + +class BaseConfig: + pass + + +class ErrorWrapper(Exception): + pass + + +@dataclass +class ModelField: + field_info: FieldInfo + name: str + mode: Literal["validation", "serialization"] = "validation" + config: Union[ConfigDict, None] = None + + @property + def alias(self) -> str: + a = self.field_info.alias + return a if a is not None else self.name + + @property + def validation_alias(self) -> Union[str, None]: + va = self.field_info.validation_alias + if isinstance(va, str) and va: + return va + return None + + @property + def serialization_alias(self) -> Union[str, None]: + sa = self.field_info.serialization_alias + return sa or None + + @property + def required(self) -> bool: + return self.field_info.is_required() + + @property + def default(self) -> Any: + return self.get_default() + + @property + def type_(self) -> Any: + return self.field_info.annotation + + def __post_init__(self) -> None: + with warnings.catch_warnings(): + # Pydantic >= 2.12.0 warns about field specific metadata that is unused + # (e.g. `TypeAdapter(Annotated[int, Field(alias='b')])`). In some cases, we + # end up building the type adapter from a model field annotation so we + # need to ignore the warning: + if shared.PYDANTIC_VERSION_MINOR_TUPLE >= (2, 12): + from pydantic.warnings import UnsupportedFieldAttributeWarning + + warnings.simplefilter( + "ignore", category=UnsupportedFieldAttributeWarning + ) + # TODO: remove after dropping support for Python 3.8 and + # setting the min Pydantic to v2.12.3 that adds asdict() + field_dict = asdict(self.field_info) + annotated_args = ( + field_dict["annotation"], + *field_dict["metadata"], + # this FieldInfo needs to be created again so that it doesn't include + # the old field info metadata and only the rest of the attributes + Field(**field_dict["attributes"]), + ) + self._type_adapter: TypeAdapter[Any] = TypeAdapter( + Annotated[annotated_args], + config=self.config, + ) + + def get_default(self) -> Any: + if self.field_info.is_required(): + return Undefined + return self.field_info.get_default(call_default_factory=True) + + def validate( + self, + value: Any, + values: dict[str, Any] = {}, # noqa: B006 + *, + loc: tuple[Union[int, str], ...] = (), + ) -> tuple[Any, Union[list[dict[str, Any]], None]]: + try: + return ( + self._type_adapter.validate_python(value, from_attributes=True), + None, + ) + except ValidationError as exc: + return None, _regenerate_error_with_loc( + errors=exc.errors(include_url=False), loc_prefix=loc + ) + + def serialize( + self, + value: Any, + *, + mode: Literal["json", "python"] = "json", + include: Union[IncEx, None] = None, + exclude: Union[IncEx, None] = None, + by_alias: bool = True, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + ) -> Any: + # What calls this code passes a value that already called + # self._type_adapter.validate_python(value) + return self._type_adapter.dump_python( + value, + mode=mode, + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + + def __hash__(self) -> int: + # Each ModelField is unique for our purposes, to allow making a dict from + # ModelField to its JSON Schema. + return id(self) + + +def _has_computed_fields(field: ModelField) -> bool: + computed_fields = field._type_adapter.core_schema.get("schema", {}).get( + "computed_fields", [] + ) + return len(computed_fields) > 0 + + +def get_schema_from_model_field( + *, + field: ModelField, + model_name_map: ModelNameMap, + field_mapping: dict[ + tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue + ], + separate_input_output_schemas: bool = True, +) -> dict[str, Any]: + override_mode: Union[Literal["validation"], None] = ( + None + if (separate_input_output_schemas or _has_computed_fields(field)) + else "validation" + ) + field_alias = ( + (field.validation_alias or field.alias) + if field.mode == "validation" + else (field.serialization_alias or field.alias) + ) + + # This expects that GenerateJsonSchema was already used to generate the definitions + json_schema = field_mapping[(field, override_mode or field.mode)] + if "$ref" not in json_schema: + # TODO remove when deprecating Pydantic v1 + # Ref: https://github.com/pydantic/pydantic/blob/d61792cc42c80b13b23e3ffa74bc37ec7c77f7d1/pydantic/schema.py#L207 + json_schema["title"] = field.field_info.title or field_alias.title().replace( + "_", " " + ) + return json_schema + + +def get_definitions( + *, + fields: Sequence[ModelField], + model_name_map: ModelNameMap, + separate_input_output_schemas: bool = True, +) -> tuple[ + dict[tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue], + dict[str, dict[str, Any]], +]: + schema_generator = GenerateJsonSchema(ref_template=REF_TEMPLATE) + validation_fields = [field for field in fields if field.mode == "validation"] + serialization_fields = [field for field in fields if field.mode == "serialization"] + flat_validation_models = get_flat_models_from_fields( + validation_fields, known_models=set() + ) + flat_serialization_models = get_flat_models_from_fields( + serialization_fields, known_models=set() + ) + flat_validation_model_fields = [ + ModelField( + field_info=FieldInfo(annotation=model), + name=model.__name__, + mode="validation", + ) + for model in flat_validation_models + ] + flat_serialization_model_fields = [ + ModelField( + field_info=FieldInfo(annotation=model), + name=model.__name__, + mode="serialization", + ) + for model in flat_serialization_models + ] + flat_model_fields = flat_validation_model_fields + flat_serialization_model_fields + input_types = {f.type_ for f in fields} + unique_flat_model_fields = { + f for f in flat_model_fields if f.type_ not in input_types + } + inputs = [ + ( + field, + ( + field.mode + if (separate_input_output_schemas or _has_computed_fields(field)) + else "validation" + ), + field._type_adapter.core_schema, + ) + for field in list(fields) + list(unique_flat_model_fields) + ] + field_mapping, definitions = schema_generator.generate_definitions(inputs=inputs) + for item_def in cast(dict[str, dict[str, Any]], definitions).values(): + if "description" in item_def: + item_description = cast(str, item_def["description"]).split("\f")[0] + item_def["description"] = item_description + new_mapping, new_definitions = _remap_definitions_and_field_mappings( + model_name_map=model_name_map, + definitions=definitions, # type: ignore[arg-type] + field_mapping=field_mapping, + ) + return new_mapping, new_definitions + + +def _replace_refs( + *, + schema: dict[str, Any], + old_name_to_new_name_map: dict[str, str], +) -> dict[str, Any]: + new_schema = deepcopy(schema) + for key, value in new_schema.items(): + if key == "$ref": + value = schema["$ref"] + if isinstance(value, str): + ref_name = schema["$ref"].split("/")[-1] + if ref_name in old_name_to_new_name_map: + new_name = old_name_to_new_name_map[ref_name] + new_schema["$ref"] = REF_TEMPLATE.format(model=new_name) + continue + if isinstance(value, dict): + new_schema[key] = _replace_refs( + schema=value, + old_name_to_new_name_map=old_name_to_new_name_map, + ) + elif isinstance(value, list): + new_value = [] + for item in value: + if isinstance(item, dict): + new_item = _replace_refs( + schema=item, + old_name_to_new_name_map=old_name_to_new_name_map, + ) + new_value.append(new_item) + + else: + new_value.append(item) + new_schema[key] = new_value + return new_schema + + +def _remap_definitions_and_field_mappings( + *, + model_name_map: ModelNameMap, + definitions: dict[str, Any], + field_mapping: dict[ + tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue + ], +) -> tuple[ + dict[tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue], + dict[str, Any], +]: + old_name_to_new_name_map = {} + for field_key, schema in field_mapping.items(): + model = field_key[0].type_ + if model not in model_name_map or "$ref" not in schema: + continue + new_name = model_name_map[model] + old_name = schema["$ref"].split("/")[-1] + if old_name in {f"{new_name}-Input", f"{new_name}-Output"}: + continue + old_name_to_new_name_map[old_name] = new_name + + new_field_mapping: dict[ + tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue + ] = {} + for field_key, schema in field_mapping.items(): + new_schema = _replace_refs( + schema=schema, + old_name_to_new_name_map=old_name_to_new_name_map, + ) + new_field_mapping[field_key] = new_schema + + new_definitions = {} + for key, value in definitions.items(): + if key in old_name_to_new_name_map: + new_key = old_name_to_new_name_map[key] + else: + new_key = key + new_value = _replace_refs( + schema=value, + old_name_to_new_name_map=old_name_to_new_name_map, + ) + new_definitions[new_key] = new_value + return new_field_mapping, new_definitions + + +def is_scalar_field(field: ModelField) -> bool: + from fastapi import params + + return shared.field_annotation_is_scalar( + field.field_info.annotation + ) and not isinstance(field.field_info, params.Body) + + +def is_sequence_field(field: ModelField) -> bool: + return shared.field_annotation_is_sequence(field.field_info.annotation) + + +def is_scalar_sequence_field(field: ModelField) -> bool: + return shared.field_annotation_is_scalar_sequence(field.field_info.annotation) + + +def is_bytes_field(field: ModelField) -> bool: + return shared.is_bytes_or_nonable_bytes_annotation(field.type_) + + +def is_bytes_sequence_field(field: ModelField) -> bool: + return shared.is_bytes_sequence_annotation(field.type_) + + +def copy_field_info(*, field_info: FieldInfo, annotation: Any) -> FieldInfo: + cls = type(field_info) + merged_field_info = cls.from_annotation(annotation) + new_field_info = copy(field_info) + new_field_info.metadata = merged_field_info.metadata + new_field_info.annotation = merged_field_info.annotation + return new_field_info + + +def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]: + origin_type = get_origin(field.field_info.annotation) or field.field_info.annotation + if origin_type is Union or origin_type is UnionType: # Handle optional sequences + union_args = get_args(field.field_info.annotation) + for union_arg in union_args: + if union_arg is type(None): + continue + origin_type = get_origin(union_arg) or union_arg + break + assert issubclass(origin_type, shared.sequence_types) # type: ignore[arg-type] + return shared.sequence_annotation_to_type[origin_type](value) # type: ignore[no-any-return,index] + + +def get_missing_field_error(loc: tuple[str, ...]) -> dict[str, Any]: + error = ValidationError.from_exception_data( + "Field required", [{"type": "missing", "loc": loc, "input": {}}] + ).errors(include_url=False)[0] + error["input"] = None + return error # type: ignore[return-value] + + +def create_body_model( + *, fields: Sequence[ModelField], model_name: str +) -> type[BaseModel]: + field_params = {f.name: (f.field_info.annotation, f.field_info) for f in fields} + BodyModel: type[BaseModel] = create_model(model_name, **field_params) # type: ignore[call-overload] + return BodyModel + + +def get_model_fields(model: type[BaseModel]) -> list[ModelField]: + model_fields: list[ModelField] = [] + for name, field_info in model.model_fields.items(): + type_ = field_info.annotation + if lenient_issubclass(type_, (BaseModel, dict)) or is_dataclass(type_): + model_config = None + else: + model_config = model.model_config + model_fields.append( + ModelField( + field_info=field_info, + name=name, + config=model_config, + ) + ) + return model_fields + + +@lru_cache +def get_cached_model_fields(model: type[BaseModel]) -> list[ModelField]: + return get_model_fields(model) # type: ignore[return-value] + + +# Duplicate of several schema functions from Pydantic v1 to make them compatible with +# Pydantic v2 and allow mixing the models + +TypeModelOrEnum = Union[type["BaseModel"], type[Enum]] +TypeModelSet = set[TypeModelOrEnum] + + +def normalize_name(name: str) -> str: + return re.sub(r"[^a-zA-Z0-9.\-_]", "_", name) + + +def get_model_name_map(unique_models: TypeModelSet) -> dict[TypeModelOrEnum, str]: + name_model_map = {} + for model in unique_models: + model_name = normalize_name(model.__name__) + name_model_map[model_name] = model + return {v: k for k, v in name_model_map.items()} + + +def get_compat_model_name_map(fields: list[ModelField]) -> ModelNameMap: + all_flat_models = set() + + v2_model_fields = [field for field in fields if isinstance(field, ModelField)] + v2_flat_models = get_flat_models_from_fields(v2_model_fields, known_models=set()) + all_flat_models = all_flat_models.union(v2_flat_models) # type: ignore[arg-type] + + model_name_map = get_model_name_map(all_flat_models) # type: ignore[arg-type] + return model_name_map + + +def get_flat_models_from_model( + model: type["BaseModel"], known_models: Union[TypeModelSet, None] = None +) -> TypeModelSet: + known_models = known_models or set() + fields = get_model_fields(model) + get_flat_models_from_fields(fields, known_models=known_models) + return known_models + + +def get_flat_models_from_annotation( + annotation: Any, known_models: TypeModelSet +) -> TypeModelSet: + origin = get_origin(annotation) + if origin is not None: + for arg in get_args(annotation): + if lenient_issubclass(arg, (BaseModel, Enum)) and arg not in known_models: + known_models.add(arg) + if lenient_issubclass(arg, BaseModel): + get_flat_models_from_model(arg, known_models=known_models) + else: + get_flat_models_from_annotation(arg, known_models=known_models) + return known_models + + +def get_flat_models_from_field( + field: ModelField, known_models: TypeModelSet +) -> TypeModelSet: + field_type = field.type_ + if lenient_issubclass(field_type, BaseModel): + if field_type in known_models: + return known_models + known_models.add(field_type) + get_flat_models_from_model(field_type, known_models=known_models) + elif lenient_issubclass(field_type, Enum): + known_models.add(field_type) + else: + get_flat_models_from_annotation(field_type, known_models=known_models) + return known_models + + +def get_flat_models_from_fields( + fields: Sequence[ModelField], known_models: TypeModelSet +) -> TypeModelSet: + for field in fields: + get_flat_models_from_field(field, known_models=known_models) + return known_models + + +def _regenerate_error_with_loc( + *, errors: Sequence[Any], loc_prefix: tuple[Union[str, int], ...] +) -> list[dict[str, Any]]: + updated_loc_errors: list[Any] = [ + {**err, "loc": loc_prefix + err.get("loc", ())} for err in errors + ] + + return updated_loc_errors diff --git a/.venv/lib/python3.12/site-packages/fastapi/applications.py b/.venv/lib/python3.12/site-packages/fastapi/applications.py new file mode 100644 index 0000000..54175cb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/applications.py @@ -0,0 +1,4669 @@ +from collections.abc import Awaitable, Coroutine, Sequence +from enum import Enum +from typing import ( + Annotated, + Any, + Callable, + Optional, + TypeVar, + Union, +) + +from annotated_doc import Doc +from fastapi import routing +from fastapi.datastructures import Default, DefaultPlaceholder +from fastapi.exception_handlers import ( + http_exception_handler, + request_validation_exception_handler, + websocket_request_validation_exception_handler, +) +from fastapi.exceptions import RequestValidationError, WebSocketRequestValidationError +from fastapi.logger import logger +from fastapi.middleware.asyncexitstack import AsyncExitStackMiddleware +from fastapi.openapi.docs import ( + get_redoc_html, + get_swagger_ui_html, + get_swagger_ui_oauth2_redirect_html, +) +from fastapi.openapi.utils import get_openapi +from fastapi.params import Depends +from fastapi.types import DecoratedCallable, IncEx +from fastapi.utils import generate_unique_id +from starlette.applications import Starlette +from starlette.datastructures import State +from starlette.exceptions import HTTPException +from starlette.middleware import Middleware +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.middleware.errors import ServerErrorMiddleware +from starlette.middleware.exceptions import ExceptionMiddleware +from starlette.requests import Request +from starlette.responses import HTMLResponse, JSONResponse, Response +from starlette.routing import BaseRoute +from starlette.types import ASGIApp, ExceptionHandler, Lifespan, Receive, Scope, Send +from typing_extensions import deprecated + +AppType = TypeVar("AppType", bound="FastAPI") + + +class FastAPI(Starlette): + """ + `FastAPI` app class, the main entrypoint to use FastAPI. + + Read more in the + [FastAPI docs for First Steps](https://fastapi.tiangolo.com/tutorial/first-steps/). + + ## Example + + ```python + from fastapi import FastAPI + + app = FastAPI() + ``` + """ + + def __init__( + self: AppType, + *, + debug: Annotated[ + bool, + Doc( + """ + Boolean indicating if debug tracebacks should be returned on server + errors. + + Read more in the + [Starlette docs for Applications](https://www.starlette.dev/applications/#instantiating-the-application). + """ + ), + ] = False, + routes: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited + from Starlette and supported for compatibility. + + --- + + A list of routes to serve incoming HTTP and WebSocket requests. + """ + ), + deprecated( + """ + You normally wouldn't use this parameter with FastAPI, it is inherited + from Starlette and supported for compatibility. + + In FastAPI, you normally would use the *path operation methods*, + like `app.get()`, `app.post()`, etc. + """ + ), + ] = None, + title: Annotated[ + str, + Doc( + """ + The title of the API. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more in the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI(title="ChimichangApp") + ``` + """ + ), + ] = "FastAPI", + summary: Annotated[ + Optional[str], + Doc( + """ + A short summary of the API. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more in the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI(summary="Deadpond's favorite app. Nuff said.") + ``` + """ + ), + ] = None, + description: Annotated[ + str, + Doc( + ''' + A description of the API. Supports Markdown (using + [CommonMark syntax](https://commonmark.org/)). + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more in the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI( + description=""" + ChimichangApp API helps you do awesome stuff. 🚀 + + ## Items + + You can **read items**. + + ## Users + + You will be able to: + + * **Create users** (_not implemented_). + * **Read users** (_not implemented_). + + """ + ) + ``` + ''' + ), + ] = "", + version: Annotated[ + str, + Doc( + """ + The version of the API. + + **Note** This is the version of your application, not the version of + the OpenAPI specification nor the version of FastAPI being used. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more in the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI(version="0.0.1") + ``` + """ + ), + ] = "0.1.0", + openapi_url: Annotated[ + Optional[str], + Doc( + """ + The URL where the OpenAPI schema will be served from. + + If you set it to `None`, no OpenAPI schema will be served publicly, and + the default automatic endpoints `/docs` and `/redoc` will also be + disabled. + + Read more in the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#openapi-url). + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI(openapi_url="/api/v1/openapi.json") + ``` + """ + ), + ] = "/openapi.json", + openapi_tags: Annotated[ + Optional[list[dict[str, Any]]], + Doc( + """ + A list of tags used by OpenAPI, these are the same `tags` you can set + in the *path operations*, like: + + * `@app.get("/users/", tags=["users"])` + * `@app.get("/items/", tags=["items"])` + + The order of the tags can be used to specify the order shown in + tools like Swagger UI, used in the automatic path `/docs`. + + It's not required to specify all the tags used. + + The tags that are not declared MAY be organized randomly or based + on the tools' logic. Each tag name in the list MUST be unique. + + The value of each item is a `dict` containing: + + * `name`: The name of the tag. + * `description`: A short description of the tag. + [CommonMark syntax](https://commonmark.org/) MAY be used for rich + text representation. + * `externalDocs`: Additional external documentation for this tag. If + provided, it would contain a `dict` with: + * `description`: A short description of the target documentation. + [CommonMark syntax](https://commonmark.org/) MAY be used for + rich text representation. + * `url`: The URL for the target documentation. Value MUST be in + the form of a URL. + + Read more in the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-tags). + + **Example** + + ```python + from fastapi import FastAPI + + tags_metadata = [ + { + "name": "users", + "description": "Operations with users. The **login** logic is also here.", + }, + { + "name": "items", + "description": "Manage items. So _fancy_ they have their own docs.", + "externalDocs": { + "description": "Items external docs", + "url": "https://fastapi.tiangolo.com/", + }, + }, + ] + + app = FastAPI(openapi_tags=tags_metadata) + ``` + """ + ), + ] = None, + servers: Annotated[ + Optional[list[dict[str, Union[str, Any]]]], + Doc( + """ + A `list` of `dict`s with connectivity information to a target server. + + You would use it, for example, if your application is served from + different domains and you want to use the same Swagger UI in the + browser to interact with each of them (instead of having multiple + browser tabs open). Or if you want to leave fixed the possible URLs. + + If the servers `list` is not provided, or is an empty `list`, the + `servers` property in the generated OpenAPI will be: + + * a `dict` with a `url` value of the application's mounting point + (`root_path`) if it's different from `/`. + * otherwise, the `servers` property will be omitted from the OpenAPI + schema. + + Each item in the `list` is a `dict` containing: + + * `url`: A URL to the target host. This URL supports Server Variables + and MAY be relative, to indicate that the host location is relative + to the location where the OpenAPI document is being served. Variable + substitutions will be made when a variable is named in `{`brackets`}`. + * `description`: An optional string describing the host designated by + the URL. [CommonMark syntax](https://commonmark.org/) MAY be used for + rich text representation. + * `variables`: A `dict` between a variable name and its value. The value + is used for substitution in the server's URL template. + + Read more in the + [FastAPI docs for Behind a Proxy](https://fastapi.tiangolo.com/advanced/behind-a-proxy/#additional-servers). + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI( + servers=[ + {"url": "https://stag.example.com", "description": "Staging environment"}, + {"url": "https://prod.example.com", "description": "Production environment"}, + ] + ) + ``` + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of global dependencies, they will be applied to each + *path operation*, including in sub-routers. + + Read more about it in the + [FastAPI docs for Global Dependencies](https://fastapi.tiangolo.com/tutorial/dependencies/global-dependencies/). + + **Example** + + ```python + from fastapi import Depends, FastAPI + + from .dependencies import func_dep_1, func_dep_2 + + app = FastAPI(dependencies=[Depends(func_dep_1), Depends(func_dep_2)]) + ``` + """ + ), + ] = None, + default_response_class: Annotated[ + type[Response], + Doc( + """ + The default response class to be used. + + Read more in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class). + + **Example** + + ```python + from fastapi import FastAPI + from fastapi.responses import ORJSONResponse + + app = FastAPI(default_response_class=ORJSONResponse) + ``` + """ + ), + ] = Default(JSONResponse), + redirect_slashes: Annotated[ + bool, + Doc( + """ + Whether to detect and redirect slashes in URLs when the client doesn't + use the same format. + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI(redirect_slashes=True) # the default + + @app.get("/items/") + async def read_items(): + return [{"item_id": "Foo"}] + ``` + + With this app, if a client goes to `/items` (without a trailing slash), + they will be automatically redirected with an HTTP status code of 307 + to `/items/`. + """ + ), + ] = True, + docs_url: Annotated[ + Optional[str], + Doc( + """ + The path to the automatic interactive API documentation. + It is handled in the browser by Swagger UI. + + The default URL is `/docs`. You can disable it by setting it to `None`. + + If `openapi_url` is set to `None`, this will be automatically disabled. + + Read more in the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#docs-urls). + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI(docs_url="/documentation", redoc_url=None) + ``` + """ + ), + ] = "/docs", + redoc_url: Annotated[ + Optional[str], + Doc( + """ + The path to the alternative automatic interactive API documentation + provided by ReDoc. + + The default URL is `/redoc`. You can disable it by setting it to `None`. + + If `openapi_url` is set to `None`, this will be automatically disabled. + + Read more in the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#docs-urls). + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI(docs_url="/documentation", redoc_url="redocumentation") + ``` + """ + ), + ] = "/redoc", + swagger_ui_oauth2_redirect_url: Annotated[ + Optional[str], + Doc( + """ + The OAuth2 redirect endpoint for the Swagger UI. + + By default it is `/docs/oauth2-redirect`. + + This is only used if you use OAuth2 (with the "Authorize" button) + with Swagger UI. + """ + ), + ] = "/docs/oauth2-redirect", + swagger_ui_init_oauth: Annotated[ + Optional[dict[str, Any]], + Doc( + """ + OAuth2 configuration for the Swagger UI, by default shown at `/docs`. + + Read more about the available configuration options in the + [Swagger UI docs](https://swagger.io/docs/open-source-tools/swagger-ui/usage/oauth2/). + """ + ), + ] = None, + middleware: Annotated[ + Optional[Sequence[Middleware]], + Doc( + """ + List of middleware to be added when creating the application. + + In FastAPI you would normally do this with `app.add_middleware()` + instead. + + Read more in the + [FastAPI docs for Middleware](https://fastapi.tiangolo.com/tutorial/middleware/). + """ + ), + ] = None, + exception_handlers: Annotated[ + Optional[ + dict[ + Union[int, type[Exception]], + Callable[[Request, Any], Coroutine[Any, Any, Response]], + ] + ], + Doc( + """ + A dictionary with handlers for exceptions. + + In FastAPI, you would normally use the decorator + `@app.exception_handler()`. + + Read more in the + [FastAPI docs for Handling Errors](https://fastapi.tiangolo.com/tutorial/handling-errors/). + """ + ), + ] = None, + on_startup: Annotated[ + Optional[Sequence[Callable[[], Any]]], + Doc( + """ + A list of startup event handler functions. + + You should instead use the `lifespan` handlers. + + Read more in the [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). + """ + ), + ] = None, + on_shutdown: Annotated[ + Optional[Sequence[Callable[[], Any]]], + Doc( + """ + A list of shutdown event handler functions. + + You should instead use the `lifespan` handlers. + + Read more in the + [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). + """ + ), + ] = None, + lifespan: Annotated[ + Optional[Lifespan[AppType]], + Doc( + """ + A `Lifespan` context manager handler. This replaces `startup` and + `shutdown` functions with a single context manager. + + Read more in the + [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). + """ + ), + ] = None, + terms_of_service: Annotated[ + Optional[str], + Doc( + """ + A URL to the Terms of Service for your API. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more at the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). + + **Example** + + ```python + app = FastAPI(terms_of_service="http://example.com/terms/") + ``` + """ + ), + ] = None, + contact: Annotated[ + Optional[dict[str, Union[str, Any]]], + Doc( + """ + A dictionary with the contact information for the exposed API. + + It can contain several fields. + + * `name`: (`str`) The name of the contact person/organization. + * `url`: (`str`) A URL pointing to the contact information. MUST be in + the format of a URL. + * `email`: (`str`) The email address of the contact person/organization. + MUST be in the format of an email address. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more at the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). + + **Example** + + ```python + app = FastAPI( + contact={ + "name": "Deadpoolio the Amazing", + "url": "http://x-force.example.com/contact/", + "email": "dp@x-force.example.com", + } + ) + ``` + """ + ), + ] = None, + license_info: Annotated[ + Optional[dict[str, Union[str, Any]]], + Doc( + """ + A dictionary with the license information for the exposed API. + + It can contain several fields. + + * `name`: (`str`) **REQUIRED** (if a `license_info` is set). The + license name used for the API. + * `identifier`: (`str`) An [SPDX](https://spdx.dev/) license expression + for the API. The `identifier` field is mutually exclusive of the `url` + field. Available since OpenAPI 3.1.0, FastAPI 0.99.0. + * `url`: (`str`) A URL to the license used for the API. This MUST be + the format of a URL. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more at the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). + + **Example** + + ```python + app = FastAPI( + license_info={ + "name": "Apache 2.0", + "url": "https://www.apache.org/licenses/LICENSE-2.0.html", + } + ) + ``` + """ + ), + ] = None, + openapi_prefix: Annotated[ + str, + Doc( + """ + A URL prefix for the OpenAPI URL. + """ + ), + deprecated( + """ + "openapi_prefix" has been deprecated in favor of "root_path", which + follows more closely the ASGI standard, is simpler, and more + automatic. + """ + ), + ] = "", + root_path: Annotated[ + str, + Doc( + """ + A path prefix handled by a proxy that is not seen by the application + but is seen by external clients, which affects things like Swagger UI. + + Read more about it at the + [FastAPI docs for Behind a Proxy](https://fastapi.tiangolo.com/advanced/behind-a-proxy/). + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI(root_path="/api/v1") + ``` + """ + ), + ] = "", + root_path_in_servers: Annotated[ + bool, + Doc( + """ + To disable automatically generating the URLs in the `servers` field + in the autogenerated OpenAPI using the `root_path`. + + Read more about it in the + [FastAPI docs for Behind a Proxy](https://fastapi.tiangolo.com/advanced/behind-a-proxy/#disable-automatic-server-from-root_path). + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI(root_path_in_servers=False) + ``` + """ + ), + ] = True, + responses: Annotated[ + Optional[dict[Union[int, str], dict[str, Any]]], + Doc( + """ + Additional responses to be shown in OpenAPI. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Additional Responses in OpenAPI](https://fastapi.tiangolo.com/advanced/additional-responses/). + + And in the + [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). + """ + ), + ] = None, + callbacks: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + OpenAPI callbacks that should apply to all *path operations*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + webhooks: Annotated[ + Optional[routing.APIRouter], + Doc( + """ + Add OpenAPI webhooks. This is similar to `callbacks` but it doesn't + depend on specific *path operations*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + **Note**: This is available since OpenAPI 3.1.0, FastAPI 0.99.0. + + Read more about it in the + [FastAPI docs for OpenAPI Webhooks](https://fastapi.tiangolo.com/advanced/openapi-webhooks/). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark all *path operations* as deprecated. You probably don't need it, + but it's available. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + To include (or not) all the *path operations* in the generated OpenAPI. + You probably don't need it, but it's available. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). + """ + ), + ] = True, + swagger_ui_parameters: Annotated[ + Optional[dict[str, Any]], + Doc( + """ + Parameters to configure Swagger UI, the autogenerated interactive API + documentation (by default at `/docs`). + + Read more about it in the + [FastAPI docs about how to Configure Swagger UI](https://fastapi.tiangolo.com/how-to/configure-swagger-ui/). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[routing.APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + separate_input_output_schemas: Annotated[ + bool, + Doc( + """ + Whether to generate separate OpenAPI schemas for request body and + response body when the results would be more precise. + + This is particularly useful when automatically generating clients. + + For example, if you have a model like: + + ```python + from pydantic import BaseModel + + class Item(BaseModel): + name: str + tags: list[str] = [] + ``` + + When `Item` is used for input, a request body, `tags` is not required, + the client doesn't have to provide it. + + But when using `Item` for output, for a response body, `tags` is always + available because it has a default value, even if it's just an empty + list. So, the client should be able to always expect it. + + In this case, there would be two different schemas, one for input and + another one for output. + """ + ), + ] = True, + openapi_external_docs: Annotated[ + Optional[dict[str, Any]], + Doc( + """ + This field allows you to provide additional external documentation links. + If provided, it must be a dictionary containing: + + * `description`: A brief description of the external documentation. + * `url`: The URL pointing to the external documentation. The value **MUST** + be a valid URL format. + + **Example**: + + ```python + from fastapi import FastAPI + + external_docs = { + "description": "Detailed API Reference", + "url": "https://example.com/api-docs", + } + + app = FastAPI(openapi_external_docs=external_docs) + ``` + """ + ), + ] = None, + **extra: Annotated[ + Any, + Doc( + """ + Extra keyword arguments to be stored in the app, not used by FastAPI + anywhere. + """ + ), + ], + ) -> None: + self.debug = debug + self.title = title + self.summary = summary + self.description = description + self.version = version + self.terms_of_service = terms_of_service + self.contact = contact + self.license_info = license_info + self.openapi_url = openapi_url + self.openapi_tags = openapi_tags + self.root_path_in_servers = root_path_in_servers + self.docs_url = docs_url + self.redoc_url = redoc_url + self.swagger_ui_oauth2_redirect_url = swagger_ui_oauth2_redirect_url + self.swagger_ui_init_oauth = swagger_ui_init_oauth + self.swagger_ui_parameters = swagger_ui_parameters + self.servers = servers or [] + self.separate_input_output_schemas = separate_input_output_schemas + self.openapi_external_docs = openapi_external_docs + self.extra = extra + self.openapi_version: Annotated[ + str, + Doc( + """ + The version string of OpenAPI. + + FastAPI will generate OpenAPI version 3.1.0, and will output that as + the OpenAPI version. But some tools, even though they might be + compatible with OpenAPI 3.1.0, might not recognize it as a valid. + + So you could override this value to trick those tools into using + the generated OpenAPI. Have in mind that this is a hack. But if you + avoid using features added in OpenAPI 3.1.0, it might work for your + use case. + + This is not passed as a parameter to the `FastAPI` class to avoid + giving the false idea that FastAPI would generate a different OpenAPI + schema. It is only available as an attribute. + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI() + + app.openapi_version = "3.0.2" + ``` + """ + ), + ] = "3.1.0" + self.openapi_schema: Optional[dict[str, Any]] = None + if self.openapi_url: + assert self.title, "A title must be provided for OpenAPI, e.g.: 'My API'" + assert self.version, "A version must be provided for OpenAPI, e.g.: '2.1.0'" + # TODO: remove when discarding the openapi_prefix parameter + if openapi_prefix: + logger.warning( + '"openapi_prefix" has been deprecated in favor of "root_path", which ' + "follows more closely the ASGI standard, is simpler, and more " + "automatic. Check the docs at " + "https://fastapi.tiangolo.com/advanced/sub-applications/" + ) + self.webhooks: Annotated[ + routing.APIRouter, + Doc( + """ + The `app.webhooks` attribute is an `APIRouter` with the *path + operations* that will be used just for documentation of webhooks. + + Read more about it in the + [FastAPI docs for OpenAPI Webhooks](https://fastapi.tiangolo.com/advanced/openapi-webhooks/). + """ + ), + ] = webhooks or routing.APIRouter() + self.root_path = root_path or openapi_prefix + self.state: Annotated[ + State, + Doc( + """ + A state object for the application. This is the same object for the + entire application, it doesn't change from request to request. + + You normally wouldn't use this in FastAPI, for most of the cases you + would instead use FastAPI dependencies. + + This is simply inherited from Starlette. + + Read more about it in the + [Starlette docs for Applications](https://www.starlette.dev/applications/#storing-state-on-the-app-instance). + """ + ), + ] = State() + self.dependency_overrides: Annotated[ + dict[Callable[..., Any], Callable[..., Any]], + Doc( + """ + A dictionary with overrides for the dependencies. + + Each key is the original dependency callable, and the value is the + actual dependency that should be called. + + This is for testing, to replace expensive dependencies with testing + versions. + + Read more about it in the + [FastAPI docs for Testing Dependencies with Overrides](https://fastapi.tiangolo.com/advanced/testing-dependencies/). + """ + ), + ] = {} + self.router: routing.APIRouter = routing.APIRouter( + routes=routes, + redirect_slashes=redirect_slashes, + dependency_overrides_provider=self, + on_startup=on_startup, + on_shutdown=on_shutdown, + lifespan=lifespan, + default_response_class=default_response_class, + dependencies=dependencies, + callbacks=callbacks, + deprecated=deprecated, + include_in_schema=include_in_schema, + responses=responses, + generate_unique_id_function=generate_unique_id_function, + ) + self.exception_handlers: dict[ + Any, Callable[[Request, Any], Union[Response, Awaitable[Response]]] + ] = {} if exception_handlers is None else dict(exception_handlers) + self.exception_handlers.setdefault(HTTPException, http_exception_handler) + self.exception_handlers.setdefault( + RequestValidationError, request_validation_exception_handler + ) + self.exception_handlers.setdefault( + WebSocketRequestValidationError, + # Starlette still has incorrect type specification for the handlers + websocket_request_validation_exception_handler, # type: ignore + ) + + self.user_middleware: list[Middleware] = ( + [] if middleware is None else list(middleware) + ) + self.middleware_stack: Union[ASGIApp, None] = None + self.setup() + + def build_middleware_stack(self) -> ASGIApp: + # Duplicate/override from Starlette to add AsyncExitStackMiddleware + # inside of ExceptionMiddleware, inside of custom user middlewares + debug = self.debug + error_handler = None + exception_handlers: dict[Any, ExceptionHandler] = {} + + for key, value in self.exception_handlers.items(): + if key in (500, Exception): + error_handler = value + else: + exception_handlers[key] = value + + middleware = ( + [Middleware(ServerErrorMiddleware, handler=error_handler, debug=debug)] + + self.user_middleware + + [ + Middleware( + ExceptionMiddleware, handlers=exception_handlers, debug=debug + ), + # Add FastAPI-specific AsyncExitStackMiddleware for closing files. + # Before this was also used for closing dependencies with yield but + # those now have their own AsyncExitStack, to properly support + # streaming responses while keeping compatibility with the previous + # versions (as of writing 0.117.1) that allowed doing + # except HTTPException inside a dependency with yield. + # This needs to happen after user middlewares because those create a + # new contextvars context copy by using a new AnyIO task group. + # This AsyncExitStack preserves the context for contextvars, not + # strictly necessary for closing files but it was one of the original + # intentions. + # If the AsyncExitStack lived outside of the custom middlewares and + # contextvars were set, for example in a dependency with 'yield' + # in that internal contextvars context, the values would not be + # available in the outer context of the AsyncExitStack. + # By placing the middleware and the AsyncExitStack here, inside all + # user middlewares, the same context is used. + # This is currently not needed, only for closing files, but used to be + # important when dependencies with yield were closed here. + Middleware(AsyncExitStackMiddleware), + ] + ) + + app = self.router + for cls, args, kwargs in reversed(middleware): + app = cls(app, *args, **kwargs) + return app + + def openapi(self) -> dict[str, Any]: + """ + Generate the OpenAPI schema of the application. This is called by FastAPI + internally. + + The first time it is called it stores the result in the attribute + `app.openapi_schema`, and next times it is called, it just returns that same + result. To avoid the cost of generating the schema every time. + + If you need to modify the generated OpenAPI schema, you could modify it. + + Read more in the + [FastAPI docs for OpenAPI](https://fastapi.tiangolo.com/how-to/extending-openapi/). + """ + if not self.openapi_schema: + self.openapi_schema = get_openapi( + title=self.title, + version=self.version, + openapi_version=self.openapi_version, + summary=self.summary, + description=self.description, + terms_of_service=self.terms_of_service, + contact=self.contact, + license_info=self.license_info, + routes=self.routes, + webhooks=self.webhooks.routes, + tags=self.openapi_tags, + servers=self.servers, + separate_input_output_schemas=self.separate_input_output_schemas, + external_docs=self.openapi_external_docs, + ) + return self.openapi_schema + + def setup(self) -> None: + if self.openapi_url: + urls = (server_data.get("url") for server_data in self.servers) + server_urls = {url for url in urls if url} + + async def openapi(req: Request) -> JSONResponse: + root_path = req.scope.get("root_path", "").rstrip("/") + if root_path not in server_urls: + if root_path and self.root_path_in_servers: + self.servers.insert(0, {"url": root_path}) + server_urls.add(root_path) + return JSONResponse(self.openapi()) + + self.add_route(self.openapi_url, openapi, include_in_schema=False) + if self.openapi_url and self.docs_url: + + async def swagger_ui_html(req: Request) -> HTMLResponse: + root_path = req.scope.get("root_path", "").rstrip("/") + openapi_url = root_path + self.openapi_url + oauth2_redirect_url = self.swagger_ui_oauth2_redirect_url + if oauth2_redirect_url: + oauth2_redirect_url = root_path + oauth2_redirect_url + return get_swagger_ui_html( + openapi_url=openapi_url, + title=f"{self.title} - Swagger UI", + oauth2_redirect_url=oauth2_redirect_url, + init_oauth=self.swagger_ui_init_oauth, + swagger_ui_parameters=self.swagger_ui_parameters, + ) + + self.add_route(self.docs_url, swagger_ui_html, include_in_schema=False) + + if self.swagger_ui_oauth2_redirect_url: + + async def swagger_ui_redirect(req: Request) -> HTMLResponse: + return get_swagger_ui_oauth2_redirect_html() + + self.add_route( + self.swagger_ui_oauth2_redirect_url, + swagger_ui_redirect, + include_in_schema=False, + ) + if self.openapi_url and self.redoc_url: + + async def redoc_html(req: Request) -> HTMLResponse: + root_path = req.scope.get("root_path", "").rstrip("/") + openapi_url = root_path + self.openapi_url + return get_redoc_html( + openapi_url=openapi_url, title=f"{self.title} - ReDoc" + ) + + self.add_route(self.redoc_url, redoc_html, include_in_schema=False) + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + if self.root_path: + scope["root_path"] = self.root_path + await super().__call__(scope, receive, send) + + def add_api_route( + self, + path: str, + endpoint: Callable[..., Any], + *, + response_model: Any = Default(None), + status_code: Optional[int] = None, + tags: Optional[list[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[dict[Union[int, str], dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + methods: Optional[list[str]] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[IncEx] = None, + response_model_exclude: Optional[IncEx] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Union[type[Response], DefaultPlaceholder] = Default( + JSONResponse + ), + name: Optional[str] = None, + openapi_extra: Optional[dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> None: + self.router.add_api_route( + path, + endpoint=endpoint, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=methods, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def api_route( + self, + path: str, + *, + response_model: Any = Default(None), + status_code: Optional[int] = None, + tags: Optional[list[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[dict[Union[int, str], dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + methods: Optional[list[str]] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[IncEx] = None, + response_model_exclude: Optional[IncEx] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: type[Response] = Default(JSONResponse), + name: Optional[str] = None, + openapi_extra: Optional[dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.router.add_api_route( + path, + func, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=methods, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + return func + + return decorator + + def add_api_websocket_route( + self, + path: str, + endpoint: Callable[..., Any], + name: Optional[str] = None, + *, + dependencies: Optional[Sequence[Depends]] = None, + ) -> None: + self.router.add_api_websocket_route( + path, + endpoint, + name=name, + dependencies=dependencies, + ) + + def websocket( + self, + path: Annotated[ + str, + Doc( + """ + WebSocket path. + """ + ), + ], + name: Annotated[ + Optional[str], + Doc( + """ + A name for the WebSocket. Only used internally. + """ + ), + ] = None, + *, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be used for this + WebSocket. + + Read more about it in the + [FastAPI docs for WebSockets](https://fastapi.tiangolo.com/advanced/websockets/). + """ + ), + ] = None, + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Decorate a WebSocket function. + + Read more about it in the + [FastAPI docs for WebSockets](https://fastapi.tiangolo.com/advanced/websockets/). + + **Example** + + ```python + from fastapi import FastAPI, WebSocket + + app = FastAPI() + + @app.websocket("/ws") + async def websocket_endpoint(websocket: WebSocket): + await websocket.accept() + while True: + data = await websocket.receive_text() + await websocket.send_text(f"Message text was: {data}") + ``` + """ + + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_api_websocket_route( + path, + func, + name=name, + dependencies=dependencies, + ) + return func + + return decorator + + def include_router( + self, + router: Annotated[routing.APIRouter, Doc("The `APIRouter` to include.")], + *, + prefix: Annotated[str, Doc("An optional path prefix for the router.")] = "", + tags: Annotated[ + Optional[list[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to all the *path operations* in this + router. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to all the + *path operations* in this router. + + Read more about it in the + [FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). + + **Example** + + ```python + from fastapi import Depends, FastAPI + + from .dependencies import get_token_header + from .internal import admin + + app = FastAPI() + + app.include_router( + admin.router, + dependencies=[Depends(get_token_header)], + ) + ``` + """ + ), + ] = None, + responses: Annotated[ + Optional[dict[Union[int, str], dict[str, Any]]], + Doc( + """ + Additional responses to be shown in OpenAPI. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Additional Responses in OpenAPI](https://fastapi.tiangolo.com/advanced/additional-responses/). + + And in the + [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark all the *path operations* in this router as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + **Example** + + ```python + from fastapi import FastAPI + + from .internal import old_api + + app = FastAPI() + + app.include_router( + old_api.router, + deprecated=True, + ) + ``` + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include (or not) all the *path operations* in this router in the + generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + **Example** + + ```python + from fastapi import FastAPI + + from .internal import old_api + + app = FastAPI() + + app.include_router( + old_api.router, + include_in_schema=False, + ) + ``` + """ + ), + ] = True, + default_response_class: Annotated[ + type[Response], + Doc( + """ + Default response class to be used for the *path operations* in this + router. + + Read more in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class). + + **Example** + + ```python + from fastapi import FastAPI + from fastapi.responses import ORJSONResponse + + from .internal import old_api + + app = FastAPI() + + app.include_router( + old_api.router, + default_response_class=ORJSONResponse, + ) + ``` + """ + ), + ] = Default(JSONResponse), + callbacks: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[routing.APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> None: + """ + Include an `APIRouter` in the same app. + + Read more about it in the + [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/). + + ## Example + + ```python + from fastapi import FastAPI + + from .users import users_router + + app = FastAPI() + + app.include_router(users_router) + ``` + """ + self.router.include_router( + router, + prefix=prefix, + tags=tags, + dependencies=dependencies, + responses=responses, + deprecated=deprecated, + include_in_schema=include_in_schema, + default_response_class=default_response_class, + callbacks=callbacks, + generate_unique_id_function=generate_unique_id_function, + ) + + def get( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[list[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[dict[Union[int, str], dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[routing.APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP GET operation. + + ## Example + + ```python + from fastapi import FastAPI + + app = FastAPI() + + @app.get("/items/") + def read_items(): + return [{"name": "Empanada"}, {"name": "Arepa"}] + ``` + """ + return self.router.get( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def put( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[list[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[dict[Union[int, str], dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[routing.APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP PUT operation. + + ## Example + + ```python + from fastapi import FastAPI + from pydantic import BaseModel + + class Item(BaseModel): + name: str + description: str | None = None + + app = FastAPI() + + @app.put("/items/{item_id}") + def replace_item(item_id: str, item: Item): + return {"message": "Item replaced", "id": item_id} + ``` + """ + return self.router.put( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def post( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[list[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[dict[Union[int, str], dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[routing.APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP POST operation. + + ## Example + + ```python + from fastapi import FastAPI + from pydantic import BaseModel + + class Item(BaseModel): + name: str + description: str | None = None + + app = FastAPI() + + @app.post("/items/") + def create_item(item: Item): + return {"message": "Item created"} + ``` + """ + return self.router.post( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def delete( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[list[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[dict[Union[int, str], dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[routing.APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP DELETE operation. + + ## Example + + ```python + from fastapi import FastAPI + + app = FastAPI() + + @app.delete("/items/{item_id}") + def delete_item(item_id: str): + return {"message": "Item deleted"} + ``` + """ + return self.router.delete( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def options( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[list[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[dict[Union[int, str], dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[routing.APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP OPTIONS operation. + + ## Example + + ```python + from fastapi import FastAPI + + app = FastAPI() + + @app.options("/items/") + def get_item_options(): + return {"additions": ["Aji", "Guacamole"]} + ``` + """ + return self.router.options( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def head( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[list[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[dict[Union[int, str], dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[routing.APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP HEAD operation. + + ## Example + + ```python + from fastapi import FastAPI, Response + + app = FastAPI() + + @app.head("/items/", status_code=204) + def get_items_headers(response: Response): + response.headers["X-Cat-Dog"] = "Alone in the world" + ``` + """ + return self.router.head( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def patch( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[list[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[dict[Union[int, str], dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[routing.APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP PATCH operation. + + ## Example + + ```python + from fastapi import FastAPI + from pydantic import BaseModel + + class Item(BaseModel): + name: str + description: str | None = None + + app = FastAPI() + + @app.patch("/items/") + def update_item(item: Item): + return {"message": "Item updated in place"} + ``` + """ + return self.router.patch( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def trace( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[list[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[dict[Union[int, str], dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[routing.APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP TRACE operation. + + ## Example + + ```python + from fastapi import FastAPI + + app = FastAPI() + + @app.trace("/items/{item_id}") + def trace_item(item_id: str): + return None + ``` + """ + return self.router.trace( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def websocket_route( + self, path: str, name: Union[str, None] = None + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.router.add_websocket_route(path, func, name=name) + return func + + return decorator + + @deprecated( + """ + on_event is deprecated, use lifespan event handlers instead. + + Read more about it in the + [FastAPI docs for Lifespan Events](https://fastapi.tiangolo.com/advanced/events/). + """ + ) + def on_event( + self, + event_type: Annotated[ + str, + Doc( + """ + The type of event. `startup` or `shutdown`. + """ + ), + ], + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add an event handler for the application. + + `on_event` is deprecated, use `lifespan` event handlers instead. + + Read more about it in the + [FastAPI docs for Lifespan Events](https://fastapi.tiangolo.com/advanced/events/#alternative-events-deprecated). + """ + return self.router.on_event(event_type) + + def middleware( + self, + middleware_type: Annotated[ + str, + Doc( + """ + The type of middleware. Currently only supports `http`. + """ + ), + ], + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a middleware to the application. + + Read more about it in the + [FastAPI docs for Middleware](https://fastapi.tiangolo.com/tutorial/middleware/). + + ## Example + + ```python + import time + from typing import Awaitable, Callable + + from fastapi import FastAPI, Request, Response + + app = FastAPI() + + + @app.middleware("http") + async def add_process_time_header( + request: Request, call_next: Callable[[Request], Awaitable[Response]] + ) -> Response: + start_time = time.time() + response = await call_next(request) + process_time = time.time() - start_time + response.headers["X-Process-Time"] = str(process_time) + return response + ``` + """ + + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_middleware(BaseHTTPMiddleware, dispatch=func) + return func + + return decorator + + def exception_handler( + self, + exc_class_or_status_code: Annotated[ + Union[int, type[Exception]], + Doc( + """ + The Exception class this would handle, or a status code. + """ + ), + ], + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add an exception handler to the app. + + Read more about it in the + [FastAPI docs for Handling Errors](https://fastapi.tiangolo.com/tutorial/handling-errors/). + + ## Example + + ```python + from fastapi import FastAPI, Request + from fastapi.responses import JSONResponse + + + class UnicornException(Exception): + def __init__(self, name: str): + self.name = name + + + app = FastAPI() + + + @app.exception_handler(UnicornException) + async def unicorn_exception_handler(request: Request, exc: UnicornException): + return JSONResponse( + status_code=418, + content={"message": f"Oops! {exc.name} did something. There goes a rainbow..."}, + ) + ``` + """ + + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_exception_handler(exc_class_or_status_code, func) + return func + + return decorator diff --git a/.venv/lib/python3.12/site-packages/fastapi/background.py b/.venv/lib/python3.12/site-packages/fastapi/background.py new file mode 100644 index 0000000..20803ba --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/background.py @@ -0,0 +1,60 @@ +from typing import Annotated, Any, Callable + +from annotated_doc import Doc +from starlette.background import BackgroundTasks as StarletteBackgroundTasks +from typing_extensions import ParamSpec + +P = ParamSpec("P") + + +class BackgroundTasks(StarletteBackgroundTasks): + """ + A collection of background tasks that will be called after a response has been + sent to the client. + + Read more about it in the + [FastAPI docs for Background Tasks](https://fastapi.tiangolo.com/tutorial/background-tasks/). + + ## Example + + ```python + from fastapi import BackgroundTasks, FastAPI + + app = FastAPI() + + + def write_notification(email: str, message=""): + with open("log.txt", mode="w") as email_file: + content = f"notification for {email}: {message}" + email_file.write(content) + + + @app.post("/send-notification/{email}") + async def send_notification(email: str, background_tasks: BackgroundTasks): + background_tasks.add_task(write_notification, email, message="some notification") + return {"message": "Notification sent in the background"} + ``` + """ + + def add_task( + self, + func: Annotated[ + Callable[P, Any], + Doc( + """ + The function to call after the response is sent. + + It can be a regular `def` function or an `async def` function. + """ + ), + ], + *args: P.args, + **kwargs: P.kwargs, + ) -> None: + """ + Add a function to be called in the background after the response is sent. + + Read more about it in the + [FastAPI docs for Background Tasks](https://fastapi.tiangolo.com/tutorial/background-tasks/). + """ + return super().add_task(func, *args, **kwargs) diff --git a/.venv/lib/python3.12/site-packages/fastapi/cli.py b/.venv/lib/python3.12/site-packages/fastapi/cli.py new file mode 100644 index 0000000..8d3301e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/cli.py @@ -0,0 +1,13 @@ +try: + from fastapi_cli.cli import main as cli_main + +except ImportError: # pragma: no cover + cli_main = None # type: ignore + + +def main() -> None: + if not cli_main: # type: ignore[truthy-function] + message = 'To use the fastapi command, please install "fastapi[standard]":\n\n\tpip install "fastapi[standard]"\n' + print(message) + raise RuntimeError(message) # noqa: B904 + cli_main() diff --git a/.venv/lib/python3.12/site-packages/fastapi/concurrency.py b/.venv/lib/python3.12/site-packages/fastapi/concurrency.py new file mode 100644 index 0000000..76a5a2e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/concurrency.py @@ -0,0 +1,41 @@ +from collections.abc import AsyncGenerator +from contextlib import AbstractContextManager +from contextlib import asynccontextmanager as asynccontextmanager +from typing import TypeVar + +import anyio.to_thread +from anyio import CapacityLimiter +from starlette.concurrency import iterate_in_threadpool as iterate_in_threadpool # noqa +from starlette.concurrency import run_in_threadpool as run_in_threadpool # noqa +from starlette.concurrency import ( # noqa + run_until_first_complete as run_until_first_complete, +) + +_T = TypeVar("_T") + + +@asynccontextmanager +async def contextmanager_in_threadpool( + cm: AbstractContextManager[_T], +) -> AsyncGenerator[_T, None]: + # blocking __exit__ from running waiting on a free thread + # can create race conditions/deadlocks if the context manager itself + # has its own internal pool (e.g. a database connection pool) + # to avoid this we let __exit__ run without a capacity limit + # since we're creating a new limiter for each call, any non-zero limit + # works (1 is arbitrary) + exit_limiter = CapacityLimiter(1) + try: + yield await run_in_threadpool(cm.__enter__) + except Exception as e: + ok = bool( + await anyio.to_thread.run_sync( + cm.__exit__, type(e), e, e.__traceback__, limiter=exit_limiter + ) + ) + if not ok: + raise e + else: + await anyio.to_thread.run_sync( + cm.__exit__, None, None, None, limiter=exit_limiter + ) diff --git a/.venv/lib/python3.12/site-packages/fastapi/datastructures.py b/.venv/lib/python3.12/site-packages/fastapi/datastructures.py new file mode 100644 index 0000000..2bf5fdb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/datastructures.py @@ -0,0 +1,183 @@ +from collections.abc import Mapping +from typing import ( + Annotated, + Any, + BinaryIO, + Callable, + Optional, + TypeVar, + cast, +) + +from annotated_doc import Doc +from pydantic import GetJsonSchemaHandler +from starlette.datastructures import URL as URL # noqa: F401 +from starlette.datastructures import Address as Address # noqa: F401 +from starlette.datastructures import FormData as FormData # noqa: F401 +from starlette.datastructures import Headers as Headers # noqa: F401 +from starlette.datastructures import QueryParams as QueryParams # noqa: F401 +from starlette.datastructures import State as State # noqa: F401 +from starlette.datastructures import UploadFile as StarletteUploadFile + + +class UploadFile(StarletteUploadFile): + """ + A file uploaded in a request. + + Define it as a *path operation function* (or dependency) parameter. + + If you are using a regular `def` function, you can use the `upload_file.file` + attribute to access the raw standard Python file (blocking, not async), useful and + needed for non-async code. + + Read more about it in the + [FastAPI docs for Request Files](https://fastapi.tiangolo.com/tutorial/request-files/). + + ## Example + + ```python + from typing import Annotated + + from fastapi import FastAPI, File, UploadFile + + app = FastAPI() + + + @app.post("/files/") + async def create_file(file: Annotated[bytes, File()]): + return {"file_size": len(file)} + + + @app.post("/uploadfile/") + async def create_upload_file(file: UploadFile): + return {"filename": file.filename} + ``` + """ + + file: Annotated[ + BinaryIO, + Doc("The standard Python file object (non-async)."), + ] + filename: Annotated[Optional[str], Doc("The original file name.")] + size: Annotated[Optional[int], Doc("The size of the file in bytes.")] + headers: Annotated[Headers, Doc("The headers of the request.")] + content_type: Annotated[ + Optional[str], Doc("The content type of the request, from the headers.") + ] + + async def write( + self, + data: Annotated[ + bytes, + Doc( + """ + The bytes to write to the file. + """ + ), + ], + ) -> None: + """ + Write some bytes to the file. + + You normally wouldn't use this from a file you read in a request. + + To be awaitable, compatible with async, this is run in threadpool. + """ + return await super().write(data) + + async def read( + self, + size: Annotated[ + int, + Doc( + """ + The number of bytes to read from the file. + """ + ), + ] = -1, + ) -> bytes: + """ + Read some bytes from the file. + + To be awaitable, compatible with async, this is run in threadpool. + """ + return await super().read(size) + + async def seek( + self, + offset: Annotated[ + int, + Doc( + """ + The position in bytes to seek to in the file. + """ + ), + ], + ) -> None: + """ + Move to a position in the file. + + Any next read or write will be done from that position. + + To be awaitable, compatible with async, this is run in threadpool. + """ + return await super().seek(offset) + + async def close(self) -> None: + """ + Close the file. + + To be awaitable, compatible with async, this is run in threadpool. + """ + return await super().close() + + @classmethod + def _validate(cls, __input_value: Any, _: Any) -> "UploadFile": + if not isinstance(__input_value, StarletteUploadFile): + raise ValueError(f"Expected UploadFile, received: {type(__input_value)}") + return cast(UploadFile, __input_value) + + @classmethod + def __get_pydantic_json_schema__( + cls, core_schema: Mapping[str, Any], handler: GetJsonSchemaHandler + ) -> dict[str, Any]: + return {"type": "string", "format": "binary"} + + @classmethod + def __get_pydantic_core_schema__( + cls, source: type[Any], handler: Callable[[Any], Mapping[str, Any]] + ) -> Mapping[str, Any]: + from ._compat.v2 import with_info_plain_validator_function + + return with_info_plain_validator_function(cls._validate) + + +class DefaultPlaceholder: + """ + You shouldn't use this class directly. + + It's used internally to recognize when a default value has been overwritten, even + if the overridden default value was truthy. + """ + + def __init__(self, value: Any): + self.value = value + + def __bool__(self) -> bool: + return bool(self.value) + + def __eq__(self, o: object) -> bool: + return isinstance(o, DefaultPlaceholder) and o.value == self.value + + +DefaultType = TypeVar("DefaultType") + + +def Default(value: DefaultType) -> DefaultType: + """ + You shouldn't use this function directly. + + It's used internally to recognize when a default value has been overwritten, even + if the overridden default value was truthy. + """ + return DefaultPlaceholder(value) # type: ignore diff --git a/.venv/lib/python3.12/site-packages/fastapi/dependencies/__init__.py b/.venv/lib/python3.12/site-packages/fastapi/dependencies/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/fastapi/dependencies/models.py b/.venv/lib/python3.12/site-packages/fastapi/dependencies/models.py new file mode 100644 index 0000000..5839232 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/dependencies/models.py @@ -0,0 +1,193 @@ +import inspect +import sys +from dataclasses import dataclass, field +from functools import cached_property, partial +from typing import Any, Callable, Optional, Union + +from fastapi._compat import ModelField +from fastapi.security.base import SecurityBase +from fastapi.types import DependencyCacheKey +from typing_extensions import Literal + +if sys.version_info >= (3, 13): # pragma: no cover + from inspect import iscoroutinefunction +else: # pragma: no cover + from asyncio import iscoroutinefunction + + +def _unwrapped_call(call: Optional[Callable[..., Any]]) -> Any: + if call is None: + return call # pragma: no cover + unwrapped = inspect.unwrap(_impartial(call)) + return unwrapped + + +def _impartial(func: Callable[..., Any]) -> Callable[..., Any]: + while isinstance(func, partial): + func = func.func + return func + + +@dataclass +class Dependant: + path_params: list[ModelField] = field(default_factory=list) + query_params: list[ModelField] = field(default_factory=list) + header_params: list[ModelField] = field(default_factory=list) + cookie_params: list[ModelField] = field(default_factory=list) + body_params: list[ModelField] = field(default_factory=list) + dependencies: list["Dependant"] = field(default_factory=list) + name: Optional[str] = None + call: Optional[Callable[..., Any]] = None + request_param_name: Optional[str] = None + websocket_param_name: Optional[str] = None + http_connection_param_name: Optional[str] = None + response_param_name: Optional[str] = None + background_tasks_param_name: Optional[str] = None + security_scopes_param_name: Optional[str] = None + own_oauth_scopes: Optional[list[str]] = None + parent_oauth_scopes: Optional[list[str]] = None + use_cache: bool = True + path: Optional[str] = None + scope: Union[Literal["function", "request"], None] = None + + @cached_property + def oauth_scopes(self) -> list[str]: + scopes = self.parent_oauth_scopes.copy() if self.parent_oauth_scopes else [] + # This doesn't use a set to preserve order, just in case + for scope in self.own_oauth_scopes or []: + if scope not in scopes: + scopes.append(scope) + return scopes + + @cached_property + def cache_key(self) -> DependencyCacheKey: + scopes_for_cache = ( + tuple(sorted(set(self.oauth_scopes or []))) if self._uses_scopes else () + ) + return ( + self.call, + scopes_for_cache, + self.computed_scope or "", + ) + + @cached_property + def _uses_scopes(self) -> bool: + if self.own_oauth_scopes: + return True + if self.security_scopes_param_name is not None: + return True + if self._is_security_scheme: + return True + for sub_dep in self.dependencies: + if sub_dep._uses_scopes: + return True + return False + + @cached_property + def _is_security_scheme(self) -> bool: + if self.call is None: + return False # pragma: no cover + unwrapped = _unwrapped_call(self.call) + return isinstance(unwrapped, SecurityBase) + + # Mainly to get the type of SecurityBase, but it's the same self.call + @cached_property + def _security_scheme(self) -> SecurityBase: + unwrapped = _unwrapped_call(self.call) + assert isinstance(unwrapped, SecurityBase) + return unwrapped + + @cached_property + def _security_dependencies(self) -> list["Dependant"]: + security_deps = [dep for dep in self.dependencies if dep._is_security_scheme] + return security_deps + + @cached_property + def is_gen_callable(self) -> bool: + if self.call is None: + return False # pragma: no cover + if inspect.isgeneratorfunction( + _impartial(self.call) + ) or inspect.isgeneratorfunction(_unwrapped_call(self.call)): + return True + if inspect.isclass(_unwrapped_call(self.call)): + return False + dunder_call = getattr(_impartial(self.call), "__call__", None) # noqa: B004 + if dunder_call is None: + return False # pragma: no cover + if inspect.isgeneratorfunction( + _impartial(dunder_call) + ) or inspect.isgeneratorfunction(_unwrapped_call(dunder_call)): + return True + dunder_unwrapped_call = getattr(_unwrapped_call(self.call), "__call__", None) # noqa: B004 + if dunder_unwrapped_call is None: + return False # pragma: no cover + if inspect.isgeneratorfunction( + _impartial(dunder_unwrapped_call) + ) or inspect.isgeneratorfunction(_unwrapped_call(dunder_unwrapped_call)): + return True + return False + + @cached_property + def is_async_gen_callable(self) -> bool: + if self.call is None: + return False # pragma: no cover + if inspect.isasyncgenfunction( + _impartial(self.call) + ) or inspect.isasyncgenfunction(_unwrapped_call(self.call)): + return True + if inspect.isclass(_unwrapped_call(self.call)): + return False + dunder_call = getattr(_impartial(self.call), "__call__", None) # noqa: B004 + if dunder_call is None: + return False # pragma: no cover + if inspect.isasyncgenfunction( + _impartial(dunder_call) + ) or inspect.isasyncgenfunction(_unwrapped_call(dunder_call)): + return True + dunder_unwrapped_call = getattr(_unwrapped_call(self.call), "__call__", None) # noqa: B004 + if dunder_unwrapped_call is None: + return False # pragma: no cover + if inspect.isasyncgenfunction( + _impartial(dunder_unwrapped_call) + ) or inspect.isasyncgenfunction(_unwrapped_call(dunder_unwrapped_call)): + return True + return False + + @cached_property + def is_coroutine_callable(self) -> bool: + if self.call is None: + return False # pragma: no cover + if inspect.isroutine(_impartial(self.call)) and iscoroutinefunction( + _impartial(self.call) + ): + return True + if inspect.isroutine(_unwrapped_call(self.call)) and iscoroutinefunction( + _unwrapped_call(self.call) + ): + return True + if inspect.isclass(_unwrapped_call(self.call)): + return False + dunder_call = getattr(_impartial(self.call), "__call__", None) # noqa: B004 + if dunder_call is None: + return False # pragma: no cover + if iscoroutinefunction(_impartial(dunder_call)) or iscoroutinefunction( + _unwrapped_call(dunder_call) + ): + return True + dunder_unwrapped_call = getattr(_unwrapped_call(self.call), "__call__", None) # noqa: B004 + if dunder_unwrapped_call is None: + return False # pragma: no cover + if iscoroutinefunction( + _impartial(dunder_unwrapped_call) + ) or iscoroutinefunction(_unwrapped_call(dunder_unwrapped_call)): + return True + return False + + @cached_property + def computed_scope(self) -> Union[str, None]: + if self.scope: + return self.scope + if self.is_gen_callable or self.is_async_gen_callable: + return "request" + return None diff --git a/.venv/lib/python3.12/site-packages/fastapi/dependencies/utils.py b/.venv/lib/python3.12/site-packages/fastapi/dependencies/utils.py new file mode 100644 index 0000000..45e1ff3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/dependencies/utils.py @@ -0,0 +1,1021 @@ +import dataclasses +import inspect +import sys +from collections.abc import Coroutine, Mapping, Sequence +from contextlib import AsyncExitStack, contextmanager +from copy import copy, deepcopy +from dataclasses import dataclass +from typing import ( + Annotated, + Any, + Callable, + ForwardRef, + Optional, + Union, + cast, +) + +import anyio +from fastapi import params +from fastapi._compat import ( + ModelField, + RequiredParam, + Undefined, + _regenerate_error_with_loc, + copy_field_info, + create_body_model, + evaluate_forwardref, + field_annotation_is_scalar, + get_cached_model_fields, + get_missing_field_error, + is_bytes_field, + is_bytes_sequence_field, + is_scalar_field, + is_scalar_sequence_field, + is_sequence_field, + is_uploadfile_or_nonable_uploadfile_annotation, + is_uploadfile_sequence_annotation, + lenient_issubclass, + sequence_types, + serialize_sequence_value, + value_is_sequence, +) +from fastapi.background import BackgroundTasks +from fastapi.concurrency import ( + asynccontextmanager, + contextmanager_in_threadpool, +) +from fastapi.dependencies.models import Dependant +from fastapi.exceptions import DependencyScopeError +from fastapi.logger import logger +from fastapi.security.oauth2 import SecurityScopes +from fastapi.types import DependencyCacheKey +from fastapi.utils import create_model_field, get_path_param_names +from pydantic import BaseModel +from pydantic.fields import FieldInfo +from starlette.background import BackgroundTasks as StarletteBackgroundTasks +from starlette.concurrency import run_in_threadpool +from starlette.datastructures import ( + FormData, + Headers, + ImmutableMultiDict, + QueryParams, + UploadFile, +) +from starlette.requests import HTTPConnection, Request +from starlette.responses import Response +from starlette.websockets import WebSocket +from typing_extensions import Literal, get_args, get_origin + +multipart_not_installed_error = ( + 'Form data requires "python-multipart" to be installed. \n' + 'You can install "python-multipart" with: \n\n' + "pip install python-multipart\n" +) +multipart_incorrect_install_error = ( + 'Form data requires "python-multipart" to be installed. ' + 'It seems you installed "multipart" instead. \n' + 'You can remove "multipart" with: \n\n' + "pip uninstall multipart\n\n" + 'And then install "python-multipart" with: \n\n' + "pip install python-multipart\n" +) + + +def ensure_multipart_is_installed() -> None: + try: + from python_multipart import __version__ + + # Import an attribute that can be mocked/deleted in testing + assert __version__ > "0.0.12" + except (ImportError, AssertionError): + try: + # __version__ is available in both multiparts, and can be mocked + from multipart import __version__ # type: ignore[no-redef,import-untyped] + + assert __version__ + try: + # parse_options_header is only available in the right multipart + from multipart.multipart import ( # type: ignore[import-untyped] + parse_options_header, + ) + + assert parse_options_header + except ImportError: + logger.error(multipart_incorrect_install_error) + raise RuntimeError(multipart_incorrect_install_error) from None + except ImportError: + logger.error(multipart_not_installed_error) + raise RuntimeError(multipart_not_installed_error) from None + + +def get_parameterless_sub_dependant(*, depends: params.Depends, path: str) -> Dependant: + assert callable(depends.dependency), ( + "A parameter-less dependency must have a callable dependency" + ) + own_oauth_scopes: list[str] = [] + if isinstance(depends, params.Security) and depends.scopes: + own_oauth_scopes.extend(depends.scopes) + return get_dependant( + path=path, + call=depends.dependency, + scope=depends.scope, + own_oauth_scopes=own_oauth_scopes, + ) + + +def get_flat_dependant( + dependant: Dependant, + *, + skip_repeats: bool = False, + visited: Optional[list[DependencyCacheKey]] = None, + parent_oauth_scopes: Optional[list[str]] = None, +) -> Dependant: + if visited is None: + visited = [] + visited.append(dependant.cache_key) + use_parent_oauth_scopes = (parent_oauth_scopes or []) + ( + dependant.oauth_scopes or [] + ) + + flat_dependant = Dependant( + path_params=dependant.path_params.copy(), + query_params=dependant.query_params.copy(), + header_params=dependant.header_params.copy(), + cookie_params=dependant.cookie_params.copy(), + body_params=dependant.body_params.copy(), + name=dependant.name, + call=dependant.call, + request_param_name=dependant.request_param_name, + websocket_param_name=dependant.websocket_param_name, + http_connection_param_name=dependant.http_connection_param_name, + response_param_name=dependant.response_param_name, + background_tasks_param_name=dependant.background_tasks_param_name, + security_scopes_param_name=dependant.security_scopes_param_name, + own_oauth_scopes=dependant.own_oauth_scopes, + parent_oauth_scopes=use_parent_oauth_scopes, + use_cache=dependant.use_cache, + path=dependant.path, + scope=dependant.scope, + ) + for sub_dependant in dependant.dependencies: + if skip_repeats and sub_dependant.cache_key in visited: + continue + flat_sub = get_flat_dependant( + sub_dependant, + skip_repeats=skip_repeats, + visited=visited, + parent_oauth_scopes=flat_dependant.oauth_scopes, + ) + flat_dependant.dependencies.append(flat_sub) + flat_dependant.path_params.extend(flat_sub.path_params) + flat_dependant.query_params.extend(flat_sub.query_params) + flat_dependant.header_params.extend(flat_sub.header_params) + flat_dependant.cookie_params.extend(flat_sub.cookie_params) + flat_dependant.body_params.extend(flat_sub.body_params) + flat_dependant.dependencies.extend(flat_sub.dependencies) + + return flat_dependant + + +def _get_flat_fields_from_params(fields: list[ModelField]) -> list[ModelField]: + if not fields: + return fields + first_field = fields[0] + if len(fields) == 1 and lenient_issubclass(first_field.type_, BaseModel): + fields_to_extract = get_cached_model_fields(first_field.type_) + return fields_to_extract + return fields + + +def get_flat_params(dependant: Dependant) -> list[ModelField]: + flat_dependant = get_flat_dependant(dependant, skip_repeats=True) + path_params = _get_flat_fields_from_params(flat_dependant.path_params) + query_params = _get_flat_fields_from_params(flat_dependant.query_params) + header_params = _get_flat_fields_from_params(flat_dependant.header_params) + cookie_params = _get_flat_fields_from_params(flat_dependant.cookie_params) + return path_params + query_params + header_params + cookie_params + + +def _get_signature(call: Callable[..., Any]) -> inspect.Signature: + if sys.version_info >= (3, 10): + try: + signature = inspect.signature(call, eval_str=True) + except NameError: + # Handle type annotations with if TYPE_CHECKING, not used by FastAPI + # e.g. dependency return types + signature = inspect.signature(call) + else: + signature = inspect.signature(call) + return signature + + +def get_typed_signature(call: Callable[..., Any]) -> inspect.Signature: + signature = _get_signature(call) + unwrapped = inspect.unwrap(call) + globalns = getattr(unwrapped, "__globals__", {}) + typed_params = [ + inspect.Parameter( + name=param.name, + kind=param.kind, + default=param.default, + annotation=get_typed_annotation(param.annotation, globalns), + ) + for param in signature.parameters.values() + ] + typed_signature = inspect.Signature(typed_params) + return typed_signature + + +def get_typed_annotation(annotation: Any, globalns: dict[str, Any]) -> Any: + if isinstance(annotation, str): + annotation = ForwardRef(annotation) + annotation = evaluate_forwardref(annotation, globalns, globalns) + if annotation is type(None): + return None + return annotation + + +def get_typed_return_annotation(call: Callable[..., Any]) -> Any: + signature = _get_signature(call) + unwrapped = inspect.unwrap(call) + annotation = signature.return_annotation + + if annotation is inspect.Signature.empty: + return None + + globalns = getattr(unwrapped, "__globals__", {}) + return get_typed_annotation(annotation, globalns) + + +def get_dependant( + *, + path: str, + call: Callable[..., Any], + name: Optional[str] = None, + own_oauth_scopes: Optional[list[str]] = None, + parent_oauth_scopes: Optional[list[str]] = None, + use_cache: bool = True, + scope: Union[Literal["function", "request"], None] = None, +) -> Dependant: + dependant = Dependant( + call=call, + name=name, + path=path, + use_cache=use_cache, + scope=scope, + own_oauth_scopes=own_oauth_scopes, + parent_oauth_scopes=parent_oauth_scopes, + ) + current_scopes = (parent_oauth_scopes or []) + (own_oauth_scopes or []) + path_param_names = get_path_param_names(path) + endpoint_signature = get_typed_signature(call) + signature_params = endpoint_signature.parameters + for param_name, param in signature_params.items(): + is_path_param = param_name in path_param_names + param_details = analyze_param( + param_name=param_name, + annotation=param.annotation, + value=param.default, + is_path_param=is_path_param, + ) + if param_details.depends is not None: + assert param_details.depends.dependency + if ( + (dependant.is_gen_callable or dependant.is_async_gen_callable) + and dependant.computed_scope == "request" + and param_details.depends.scope == "function" + ): + assert dependant.call + raise DependencyScopeError( + f'The dependency "{dependant.call.__name__}" has a scope of ' + '"request", it cannot depend on dependencies with scope "function".' + ) + sub_own_oauth_scopes: list[str] = [] + if isinstance(param_details.depends, params.Security): + if param_details.depends.scopes: + sub_own_oauth_scopes = list(param_details.depends.scopes) + sub_dependant = get_dependant( + path=path, + call=param_details.depends.dependency, + name=param_name, + own_oauth_scopes=sub_own_oauth_scopes, + parent_oauth_scopes=current_scopes, + use_cache=param_details.depends.use_cache, + scope=param_details.depends.scope, + ) + dependant.dependencies.append(sub_dependant) + continue + if add_non_field_param_to_dependency( + param_name=param_name, + type_annotation=param_details.type_annotation, + dependant=dependant, + ): + assert param_details.field is None, ( + f"Cannot specify multiple FastAPI annotations for {param_name!r}" + ) + continue + assert param_details.field is not None + if isinstance(param_details.field.field_info, params.Body): + dependant.body_params.append(param_details.field) + else: + add_param_to_fields(field=param_details.field, dependant=dependant) + return dependant + + +def add_non_field_param_to_dependency( + *, param_name: str, type_annotation: Any, dependant: Dependant +) -> Optional[bool]: + if lenient_issubclass(type_annotation, Request): + dependant.request_param_name = param_name + return True + elif lenient_issubclass(type_annotation, WebSocket): + dependant.websocket_param_name = param_name + return True + elif lenient_issubclass(type_annotation, HTTPConnection): + dependant.http_connection_param_name = param_name + return True + elif lenient_issubclass(type_annotation, Response): + dependant.response_param_name = param_name + return True + elif lenient_issubclass(type_annotation, StarletteBackgroundTasks): + dependant.background_tasks_param_name = param_name + return True + elif lenient_issubclass(type_annotation, SecurityScopes): + dependant.security_scopes_param_name = param_name + return True + return None + + +@dataclass +class ParamDetails: + type_annotation: Any + depends: Optional[params.Depends] + field: Optional[ModelField] + + +def analyze_param( + *, + param_name: str, + annotation: Any, + value: Any, + is_path_param: bool, +) -> ParamDetails: + field_info = None + depends = None + type_annotation: Any = Any + use_annotation: Any = Any + if annotation is not inspect.Signature.empty: + use_annotation = annotation + type_annotation = annotation + # Extract Annotated info + if get_origin(use_annotation) is Annotated: + annotated_args = get_args(annotation) + type_annotation = annotated_args[0] + fastapi_annotations = [ + arg + for arg in annotated_args[1:] + if isinstance(arg, (FieldInfo, params.Depends)) + ] + fastapi_specific_annotations = [ + arg + for arg in fastapi_annotations + if isinstance( + arg, + ( + params.Param, + params.Body, + params.Depends, + ), + ) + ] + if fastapi_specific_annotations: + fastapi_annotation: Union[FieldInfo, params.Depends, None] = ( + fastapi_specific_annotations[-1] + ) + else: + fastapi_annotation = None + # Set default for Annotated FieldInfo + if isinstance(fastapi_annotation, FieldInfo): + # Copy `field_info` because we mutate `field_info.default` below. + field_info = copy_field_info( + field_info=fastapi_annotation, # type: ignore[arg-type] + annotation=use_annotation, + ) + assert ( + field_info.default == Undefined or field_info.default == RequiredParam + ), ( + f"`{field_info.__class__.__name__}` default value cannot be set in" + f" `Annotated` for {param_name!r}. Set the default value with `=` instead." + ) + if value is not inspect.Signature.empty: + assert not is_path_param, "Path parameters cannot have default values" + field_info.default = value + else: + field_info.default = RequiredParam + # Get Annotated Depends + elif isinstance(fastapi_annotation, params.Depends): + depends = fastapi_annotation + # Get Depends from default value + if isinstance(value, params.Depends): + assert depends is None, ( + "Cannot specify `Depends` in `Annotated` and default value" + f" together for {param_name!r}" + ) + assert field_info is None, ( + "Cannot specify a FastAPI annotation in `Annotated` and `Depends` as a" + f" default value together for {param_name!r}" + ) + depends = value + # Get FieldInfo from default value + elif isinstance(value, FieldInfo): + assert field_info is None, ( + "Cannot specify FastAPI annotations in `Annotated` and default value" + f" together for {param_name!r}" + ) + field_info = value # type: ignore[assignment] + if isinstance(field_info, FieldInfo): + field_info.annotation = type_annotation + + # Get Depends from type annotation + if depends is not None and depends.dependency is None: + # Copy `depends` before mutating it + depends = copy(depends) + depends = dataclasses.replace(depends, dependency=type_annotation) + + # Handle non-param type annotations like Request + if lenient_issubclass( + type_annotation, + ( + Request, + WebSocket, + HTTPConnection, + Response, + StarletteBackgroundTasks, + SecurityScopes, + ), + ): + assert depends is None, f"Cannot specify `Depends` for type {type_annotation!r}" + assert field_info is None, ( + f"Cannot specify FastAPI annotation for type {type_annotation!r}" + ) + # Handle default assignations, neither field_info nor depends was not found in Annotated nor default value + elif field_info is None and depends is None: + default_value = value if value is not inspect.Signature.empty else RequiredParam + if is_path_param: + # We might check here that `default_value is RequiredParam`, but the fact is that the same + # parameter might sometimes be a path parameter and sometimes not. See + # `tests/test_infer_param_optionality.py` for an example. + field_info = params.Path(annotation=use_annotation) + elif is_uploadfile_or_nonable_uploadfile_annotation( + type_annotation + ) or is_uploadfile_sequence_annotation(type_annotation): + field_info = params.File(annotation=use_annotation, default=default_value) + elif not field_annotation_is_scalar(annotation=type_annotation): + field_info = params.Body(annotation=use_annotation, default=default_value) + else: + field_info = params.Query(annotation=use_annotation, default=default_value) + + field = None + # It's a field_info, not a dependency + if field_info is not None: + # Handle field_info.in_ + if is_path_param: + assert isinstance(field_info, params.Path), ( + f"Cannot use `{field_info.__class__.__name__}` for path param" + f" {param_name!r}" + ) + elif ( + isinstance(field_info, params.Param) + and getattr(field_info, "in_", None) is None + ): + field_info.in_ = params.ParamTypes.query + use_annotation_from_field_info = use_annotation + if isinstance(field_info, params.Form): + ensure_multipart_is_installed() + if not field_info.alias and getattr(field_info, "convert_underscores", None): + alias = param_name.replace("_", "-") + else: + alias = field_info.alias or param_name + field_info.alias = alias + field = create_model_field( + name=param_name, + type_=use_annotation_from_field_info, + default=field_info.default, + alias=alias, + required=field_info.default in (RequiredParam, Undefined), + field_info=field_info, + ) + if is_path_param: + assert is_scalar_field(field=field), ( + "Path params must be of one of the supported types" + ) + elif isinstance(field_info, params.Query): + assert ( + is_scalar_field(field) + or is_scalar_sequence_field(field) + or ( + lenient_issubclass(field.type_, BaseModel) + # For Pydantic v1 + and getattr(field, "shape", 1) == 1 + ) + ) + + return ParamDetails(type_annotation=type_annotation, depends=depends, field=field) + + +def add_param_to_fields(*, field: ModelField, dependant: Dependant) -> None: + field_info = field.field_info + field_info_in = getattr(field_info, "in_", None) + if field_info_in == params.ParamTypes.path: + dependant.path_params.append(field) + elif field_info_in == params.ParamTypes.query: + dependant.query_params.append(field) + elif field_info_in == params.ParamTypes.header: + dependant.header_params.append(field) + else: + assert field_info_in == params.ParamTypes.cookie, ( + f"non-body parameters must be in path, query, header or cookie: {field.name}" + ) + dependant.cookie_params.append(field) + + +async def _solve_generator( + *, dependant: Dependant, stack: AsyncExitStack, sub_values: dict[str, Any] +) -> Any: + assert dependant.call + if dependant.is_async_gen_callable: + cm = asynccontextmanager(dependant.call)(**sub_values) + elif dependant.is_gen_callable: + cm = contextmanager_in_threadpool(contextmanager(dependant.call)(**sub_values)) + return await stack.enter_async_context(cm) + + +@dataclass +class SolvedDependency: + values: dict[str, Any] + errors: list[Any] + background_tasks: Optional[StarletteBackgroundTasks] + response: Response + dependency_cache: dict[DependencyCacheKey, Any] + + +async def solve_dependencies( + *, + request: Union[Request, WebSocket], + dependant: Dependant, + body: Optional[Union[dict[str, Any], FormData]] = None, + background_tasks: Optional[StarletteBackgroundTasks] = None, + response: Optional[Response] = None, + dependency_overrides_provider: Optional[Any] = None, + dependency_cache: Optional[dict[DependencyCacheKey, Any]] = None, + # TODO: remove this parameter later, no longer used, not removing it yet as some + # people might be monkey patching this function (although that's not supported) + async_exit_stack: AsyncExitStack, + embed_body_fields: bool, +) -> SolvedDependency: + request_astack = request.scope.get("fastapi_inner_astack") + assert isinstance(request_astack, AsyncExitStack), ( + "fastapi_inner_astack not found in request scope" + ) + function_astack = request.scope.get("fastapi_function_astack") + assert isinstance(function_astack, AsyncExitStack), ( + "fastapi_function_astack not found in request scope" + ) + values: dict[str, Any] = {} + errors: list[Any] = [] + if response is None: + response = Response() + del response.headers["content-length"] + response.status_code = None # type: ignore + if dependency_cache is None: + dependency_cache = {} + for sub_dependant in dependant.dependencies: + sub_dependant.call = cast(Callable[..., Any], sub_dependant.call) + call = sub_dependant.call + use_sub_dependant = sub_dependant + if ( + dependency_overrides_provider + and dependency_overrides_provider.dependency_overrides + ): + original_call = sub_dependant.call + call = getattr( + dependency_overrides_provider, "dependency_overrides", {} + ).get(original_call, original_call) + use_path: str = sub_dependant.path # type: ignore + use_sub_dependant = get_dependant( + path=use_path, + call=call, + name=sub_dependant.name, + parent_oauth_scopes=sub_dependant.oauth_scopes, + scope=sub_dependant.scope, + ) + + solved_result = await solve_dependencies( + request=request, + dependant=use_sub_dependant, + body=body, + background_tasks=background_tasks, + response=response, + dependency_overrides_provider=dependency_overrides_provider, + dependency_cache=dependency_cache, + async_exit_stack=async_exit_stack, + embed_body_fields=embed_body_fields, + ) + background_tasks = solved_result.background_tasks + if solved_result.errors: + errors.extend(solved_result.errors) + continue + if sub_dependant.use_cache and sub_dependant.cache_key in dependency_cache: + solved = dependency_cache[sub_dependant.cache_key] + elif ( + use_sub_dependant.is_gen_callable or use_sub_dependant.is_async_gen_callable + ): + use_astack = request_astack + if sub_dependant.scope == "function": + use_astack = function_astack + solved = await _solve_generator( + dependant=use_sub_dependant, + stack=use_astack, + sub_values=solved_result.values, + ) + elif use_sub_dependant.is_coroutine_callable: + solved = await call(**solved_result.values) + else: + solved = await run_in_threadpool(call, **solved_result.values) + if sub_dependant.name is not None: + values[sub_dependant.name] = solved + if sub_dependant.cache_key not in dependency_cache: + dependency_cache[sub_dependant.cache_key] = solved + path_values, path_errors = request_params_to_args( + dependant.path_params, request.path_params + ) + query_values, query_errors = request_params_to_args( + dependant.query_params, request.query_params + ) + header_values, header_errors = request_params_to_args( + dependant.header_params, request.headers + ) + cookie_values, cookie_errors = request_params_to_args( + dependant.cookie_params, request.cookies + ) + values.update(path_values) + values.update(query_values) + values.update(header_values) + values.update(cookie_values) + errors += path_errors + query_errors + header_errors + cookie_errors + if dependant.body_params: + ( + body_values, + body_errors, + ) = await request_body_to_args( # body_params checked above + body_fields=dependant.body_params, + received_body=body, + embed_body_fields=embed_body_fields, + ) + values.update(body_values) + errors.extend(body_errors) + if dependant.http_connection_param_name: + values[dependant.http_connection_param_name] = request + if dependant.request_param_name and isinstance(request, Request): + values[dependant.request_param_name] = request + elif dependant.websocket_param_name and isinstance(request, WebSocket): + values[dependant.websocket_param_name] = request + if dependant.background_tasks_param_name: + if background_tasks is None: + background_tasks = BackgroundTasks() + values[dependant.background_tasks_param_name] = background_tasks + if dependant.response_param_name: + values[dependant.response_param_name] = response + if dependant.security_scopes_param_name: + values[dependant.security_scopes_param_name] = SecurityScopes( + scopes=dependant.oauth_scopes + ) + return SolvedDependency( + values=values, + errors=errors, + background_tasks=background_tasks, + response=response, + dependency_cache=dependency_cache, + ) + + +def _validate_value_with_model_field( + *, field: ModelField, value: Any, values: dict[str, Any], loc: tuple[str, ...] +) -> tuple[Any, list[Any]]: + if value is None: + if field.required: + return None, [get_missing_field_error(loc=loc)] + else: + return deepcopy(field.default), [] + v_, errors_ = field.validate(value, values, loc=loc) + if isinstance(errors_, list): + new_errors = _regenerate_error_with_loc(errors=errors_, loc_prefix=()) + return None, new_errors + else: + return v_, [] + + +def _get_multidict_value( + field: ModelField, values: Mapping[str, Any], alias: Union[str, None] = None +) -> Any: + alias = alias or get_validation_alias(field) + if is_sequence_field(field) and isinstance(values, (ImmutableMultiDict, Headers)): + value = values.getlist(alias) + else: + value = values.get(alias, None) + if ( + value is None + or ( + isinstance(field.field_info, params.Form) + and isinstance(value, str) # For type checks + and value == "" + ) + or (is_sequence_field(field) and len(value) == 0) + ): + if field.required: + return + else: + return deepcopy(field.default) + return value + + +def request_params_to_args( + fields: Sequence[ModelField], + received_params: Union[Mapping[str, Any], QueryParams, Headers], +) -> tuple[dict[str, Any], list[Any]]: + values: dict[str, Any] = {} + errors: list[dict[str, Any]] = [] + + if not fields: + return values, errors + + first_field = fields[0] + fields_to_extract = fields + single_not_embedded_field = False + default_convert_underscores = True + if len(fields) == 1 and lenient_issubclass(first_field.type_, BaseModel): + fields_to_extract = get_cached_model_fields(first_field.type_) + single_not_embedded_field = True + # If headers are in a Pydantic model, the way to disable convert_underscores + # would be with Header(convert_underscores=False) at the Pydantic model level + default_convert_underscores = getattr( + first_field.field_info, "convert_underscores", True + ) + + params_to_process: dict[str, Any] = {} + + processed_keys = set() + + for field in fields_to_extract: + alias = None + if isinstance(received_params, Headers): + # Handle fields extracted from a Pydantic Model for a header, each field + # doesn't have a FieldInfo of type Header with the default convert_underscores=True + convert_underscores = getattr( + field.field_info, "convert_underscores", default_convert_underscores + ) + if convert_underscores: + alias = get_validation_alias(field) + if alias == field.name: + alias = alias.replace("_", "-") + value = _get_multidict_value(field, received_params, alias=alias) + if value is not None: + params_to_process[get_validation_alias(field)] = value + processed_keys.add(alias or get_validation_alias(field)) + + for key in received_params.keys(): + if key not in processed_keys: + if hasattr(received_params, "getlist"): + value = received_params.getlist(key) + if isinstance(value, list) and (len(value) == 1): + params_to_process[key] = value[0] + else: + params_to_process[key] = value + else: + params_to_process[key] = received_params.get(key) + + if single_not_embedded_field: + field_info = first_field.field_info + assert isinstance(field_info, params.Param), ( + "Params must be subclasses of Param" + ) + loc: tuple[str, ...] = (field_info.in_.value,) + v_, errors_ = _validate_value_with_model_field( + field=first_field, value=params_to_process, values=values, loc=loc + ) + return {first_field.name: v_}, errors_ + + for field in fields: + value = _get_multidict_value(field, received_params) + field_info = field.field_info + assert isinstance(field_info, params.Param), ( + "Params must be subclasses of Param" + ) + loc = (field_info.in_.value, get_validation_alias(field)) + v_, errors_ = _validate_value_with_model_field( + field=field, value=value, values=values, loc=loc + ) + if errors_: + errors.extend(errors_) + else: + values[field.name] = v_ + return values, errors + + +def is_union_of_base_models(field_type: Any) -> bool: + """Check if field type is a Union where all members are BaseModel subclasses.""" + from fastapi.types import UnionType + + origin = get_origin(field_type) + + # Check if it's a Union type (covers both typing.Union and types.UnionType in Python 3.10+) + if origin is not Union and origin is not UnionType: + return False + + union_args = get_args(field_type) + + for arg in union_args: + if not lenient_issubclass(arg, BaseModel): + return False + + return True + + +def _should_embed_body_fields(fields: list[ModelField]) -> bool: + if not fields: + return False + # More than one dependency could have the same field, it would show up as multiple + # fields but it's the same one, so count them by name + body_param_names_set = {field.name for field in fields} + # A top level field has to be a single field, not multiple + if len(body_param_names_set) > 1: + return True + first_field = fields[0] + # If it explicitly specifies it is embedded, it has to be embedded + if getattr(first_field.field_info, "embed", None): + return True + # If it's a Form (or File) field, it has to be a BaseModel (or a union of BaseModels) to be top level + # otherwise it has to be embedded, so that the key value pair can be extracted + if ( + isinstance(first_field.field_info, params.Form) + and not lenient_issubclass(first_field.type_, BaseModel) + and not is_union_of_base_models(first_field.type_) + ): + return True + return False + + +async def _extract_form_body( + body_fields: list[ModelField], + received_body: FormData, +) -> dict[str, Any]: + values = {} + + for field in body_fields: + value = _get_multidict_value(field, received_body) + field_info = field.field_info + if ( + isinstance(field_info, params.File) + and is_bytes_field(field) + and isinstance(value, UploadFile) + ): + value = await value.read() + elif ( + is_bytes_sequence_field(field) + and isinstance(field_info, params.File) + and value_is_sequence(value) + ): + # For types + assert isinstance(value, sequence_types) + results: list[Union[bytes, str]] = [] + + async def process_fn( + fn: Callable[[], Coroutine[Any, Any, Any]], + ) -> None: + result = await fn() + results.append(result) # noqa: B023 + + async with anyio.create_task_group() as tg: + for sub_value in value: + tg.start_soon(process_fn, sub_value.read) + value = serialize_sequence_value(field=field, value=results) + if value is not None: + values[get_validation_alias(field)] = value + field_aliases = {get_validation_alias(field) for field in body_fields} + for key in received_body.keys(): + if key not in field_aliases: + param_values = received_body.getlist(key) + if len(param_values) == 1: + values[key] = param_values[0] + else: + values[key] = param_values + return values + + +async def request_body_to_args( + body_fields: list[ModelField], + received_body: Optional[Union[dict[str, Any], FormData]], + embed_body_fields: bool, +) -> tuple[dict[str, Any], list[dict[str, Any]]]: + values: dict[str, Any] = {} + errors: list[dict[str, Any]] = [] + assert body_fields, "request_body_to_args() should be called with fields" + single_not_embedded_field = len(body_fields) == 1 and not embed_body_fields + first_field = body_fields[0] + body_to_process = received_body + + fields_to_extract: list[ModelField] = body_fields + + if ( + single_not_embedded_field + and lenient_issubclass(first_field.type_, BaseModel) + and isinstance(received_body, FormData) + ): + fields_to_extract = get_cached_model_fields(first_field.type_) + + if isinstance(received_body, FormData): + body_to_process = await _extract_form_body(fields_to_extract, received_body) + + if single_not_embedded_field: + loc: tuple[str, ...] = ("body",) + v_, errors_ = _validate_value_with_model_field( + field=first_field, value=body_to_process, values=values, loc=loc + ) + return {first_field.name: v_}, errors_ + for field in body_fields: + loc = ("body", get_validation_alias(field)) + value: Optional[Any] = None + if body_to_process is not None: + try: + value = body_to_process.get(get_validation_alias(field)) + # If the received body is a list, not a dict + except AttributeError: + errors.append(get_missing_field_error(loc)) + continue + v_, errors_ = _validate_value_with_model_field( + field=field, value=value, values=values, loc=loc + ) + if errors_: + errors.extend(errors_) + else: + values[field.name] = v_ + return values, errors + + +def get_body_field( + *, flat_dependant: Dependant, name: str, embed_body_fields: bool +) -> Optional[ModelField]: + """ + Get a ModelField representing the request body for a path operation, combining + all body parameters into a single field if necessary. + + Used to check if it's form data (with `isinstance(body_field, params.Form)`) + or JSON and to generate the JSON Schema for a request body. + + This is **not** used to validate/parse the request body, that's done with each + individual body parameter. + """ + if not flat_dependant.body_params: + return None + first_param = flat_dependant.body_params[0] + if not embed_body_fields: + return first_param + model_name = "Body_" + name + BodyModel = create_body_model( + fields=flat_dependant.body_params, model_name=model_name + ) + required = any(True for f in flat_dependant.body_params if f.required) + BodyFieldInfo_kwargs: dict[str, Any] = { + "annotation": BodyModel, + "alias": "body", + } + if not required: + BodyFieldInfo_kwargs["default"] = None + if any(isinstance(f.field_info, params.File) for f in flat_dependant.body_params): + BodyFieldInfo: type[params.Body] = params.File + elif any(isinstance(f.field_info, params.Form) for f in flat_dependant.body_params): + BodyFieldInfo = params.Form + else: + BodyFieldInfo = params.Body + + body_param_media_types = [ + f.field_info.media_type + for f in flat_dependant.body_params + if isinstance(f.field_info, params.Body) + ] + if len(set(body_param_media_types)) == 1: + BodyFieldInfo_kwargs["media_type"] = body_param_media_types[0] + final_field = create_model_field( + name="body", + type_=BodyModel, + required=required, + alias="body", + field_info=BodyFieldInfo(**BodyFieldInfo_kwargs), + ) + return final_field + + +def get_validation_alias(field: ModelField) -> str: + va = getattr(field, "validation_alias", None) + return va or field.alias diff --git a/.venv/lib/python3.12/site-packages/fastapi/encoders.py b/.venv/lib/python3.12/site-packages/fastapi/encoders.py new file mode 100644 index 0000000..e8610c9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/encoders.py @@ -0,0 +1,346 @@ +import dataclasses +import datetime +from collections import defaultdict, deque +from decimal import Decimal +from enum import Enum +from ipaddress import ( + IPv4Address, + IPv4Interface, + IPv4Network, + IPv6Address, + IPv6Interface, + IPv6Network, +) +from pathlib import Path, PurePath +from re import Pattern +from types import GeneratorType +from typing import Annotated, Any, Callable, Optional, Union +from uuid import UUID + +from annotated_doc import Doc +from fastapi.exceptions import PydanticV1NotSupportedError +from fastapi.types import IncEx +from pydantic import BaseModel +from pydantic.color import Color +from pydantic.networks import AnyUrl, NameEmail +from pydantic.types import SecretBytes, SecretStr +from pydantic_core import PydanticUndefinedType + +from ._compat import ( + Url, + is_pydantic_v1_model_instance, +) + + +# Taken from Pydantic v1 as is +def isoformat(o: Union[datetime.date, datetime.time]) -> str: + return o.isoformat() + + +# Adapted from Pydantic v1 +# TODO: pv2 should this return strings instead? +def decimal_encoder(dec_value: Decimal) -> Union[int, float]: + """ + Encodes a Decimal as int if there's no exponent, otherwise float + + This is useful when we use ConstrainedDecimal to represent Numeric(x,0) + where an integer (but not int typed) is used. Encoding this as a float + results in failed round-tripping between encode and parse. + Our Id type is a prime example of this. + + >>> decimal_encoder(Decimal("1.0")) + 1.0 + + >>> decimal_encoder(Decimal("1")) + 1 + + >>> decimal_encoder(Decimal("NaN")) + nan + """ + exponent = dec_value.as_tuple().exponent + if isinstance(exponent, int) and exponent >= 0: + return int(dec_value) + else: + return float(dec_value) + + +ENCODERS_BY_TYPE: dict[type[Any], Callable[[Any], Any]] = { + bytes: lambda o: o.decode(), + Color: str, + datetime.date: isoformat, + datetime.datetime: isoformat, + datetime.time: isoformat, + datetime.timedelta: lambda td: td.total_seconds(), + Decimal: decimal_encoder, + Enum: lambda o: o.value, + frozenset: list, + deque: list, + GeneratorType: list, + IPv4Address: str, + IPv4Interface: str, + IPv4Network: str, + IPv6Address: str, + IPv6Interface: str, + IPv6Network: str, + NameEmail: str, + Path: str, + Pattern: lambda o: o.pattern, + SecretBytes: str, + SecretStr: str, + set: list, + UUID: str, + Url: str, + AnyUrl: str, +} + + +def generate_encoders_by_class_tuples( + type_encoder_map: dict[Any, Callable[[Any], Any]], +) -> dict[Callable[[Any], Any], tuple[Any, ...]]: + encoders_by_class_tuples: dict[Callable[[Any], Any], tuple[Any, ...]] = defaultdict( + tuple + ) + for type_, encoder in type_encoder_map.items(): + encoders_by_class_tuples[encoder] += (type_,) + return encoders_by_class_tuples + + +encoders_by_class_tuples = generate_encoders_by_class_tuples(ENCODERS_BY_TYPE) + + +def jsonable_encoder( + obj: Annotated[ + Any, + Doc( + """ + The input object to convert to JSON. + """ + ), + ], + include: Annotated[ + Optional[IncEx], + Doc( + """ + Pydantic's `include` parameter, passed to Pydantic models to set the + fields to include. + """ + ), + ] = None, + exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Pydantic's `exclude` parameter, passed to Pydantic models to set the + fields to exclude. + """ + ), + ] = None, + by_alias: Annotated[ + bool, + Doc( + """ + Pydantic's `by_alias` parameter, passed to Pydantic models to define if + the output should use the alias names (when provided) or the Python + attribute names. In an API, if you set an alias, it's probably because you + want to use it in the result, so you probably want to leave this set to + `True`. + """ + ), + ] = True, + exclude_unset: Annotated[ + bool, + Doc( + """ + Pydantic's `exclude_unset` parameter, passed to Pydantic models to define + if it should exclude from the output the fields that were not explicitly + set (and that only had their default values). + """ + ), + ] = False, + exclude_defaults: Annotated[ + bool, + Doc( + """ + Pydantic's `exclude_defaults` parameter, passed to Pydantic models to define + if it should exclude from the output the fields that had the same default + value, even when they were explicitly set. + """ + ), + ] = False, + exclude_none: Annotated[ + bool, + Doc( + """ + Pydantic's `exclude_none` parameter, passed to Pydantic models to define + if it should exclude from the output any fields that have a `None` value. + """ + ), + ] = False, + custom_encoder: Annotated[ + Optional[dict[Any, Callable[[Any], Any]]], + Doc( + """ + Pydantic's `custom_encoder` parameter, passed to Pydantic models to define + a custom encoder. + """ + ), + ] = None, + sqlalchemy_safe: Annotated[ + bool, + Doc( + """ + Exclude from the output any fields that start with the name `_sa`. + + This is mainly a hack for compatibility with SQLAlchemy objects, they + store internal SQLAlchemy-specific state in attributes named with `_sa`, + and those objects can't (and shouldn't be) serialized to JSON. + """ + ), + ] = True, +) -> Any: + """ + Convert any object to something that can be encoded in JSON. + + This is used internally by FastAPI to make sure anything you return can be + encoded as JSON before it is sent to the client. + + You can also use it yourself, for example to convert objects before saving them + in a database that supports only JSON. + + Read more about it in the + [FastAPI docs for JSON Compatible Encoder](https://fastapi.tiangolo.com/tutorial/encoder/). + """ + custom_encoder = custom_encoder or {} + if custom_encoder: + if type(obj) in custom_encoder: + return custom_encoder[type(obj)](obj) + else: + for encoder_type, encoder_instance in custom_encoder.items(): + if isinstance(obj, encoder_type): + return encoder_instance(obj) + if include is not None and not isinstance(include, (set, dict)): + include = set(include) + if exclude is not None and not isinstance(exclude, (set, dict)): + exclude = set(exclude) + if isinstance(obj, BaseModel): + obj_dict = obj.model_dump( + mode="json", + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_none=exclude_none, + exclude_defaults=exclude_defaults, + ) + return jsonable_encoder( + obj_dict, + exclude_none=exclude_none, + exclude_defaults=exclude_defaults, + sqlalchemy_safe=sqlalchemy_safe, + ) + if dataclasses.is_dataclass(obj): + assert not isinstance(obj, type) + obj_dict = dataclasses.asdict(obj) + return jsonable_encoder( + obj_dict, + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + if isinstance(obj, Enum): + return obj.value + if isinstance(obj, PurePath): + return str(obj) + if isinstance(obj, (str, int, float, type(None))): + return obj + if isinstance(obj, PydanticUndefinedType): + return None + if isinstance(obj, dict): + encoded_dict = {} + allowed_keys = set(obj.keys()) + if include is not None: + allowed_keys &= set(include) + if exclude is not None: + allowed_keys -= set(exclude) + for key, value in obj.items(): + if ( + ( + not sqlalchemy_safe + or (not isinstance(key, str)) + or (not key.startswith("_sa")) + ) + and (value is not None or not exclude_none) + and key in allowed_keys + ): + encoded_key = jsonable_encoder( + key, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_none=exclude_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + encoded_value = jsonable_encoder( + value, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_none=exclude_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + encoded_dict[encoded_key] = encoded_value + return encoded_dict + if isinstance(obj, (list, set, frozenset, GeneratorType, tuple, deque)): + encoded_list = [] + for item in obj: + encoded_list.append( + jsonable_encoder( + item, + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + ) + return encoded_list + + if type(obj) in ENCODERS_BY_TYPE: + return ENCODERS_BY_TYPE[type(obj)](obj) + for encoder, classes_tuple in encoders_by_class_tuples.items(): + if isinstance(obj, classes_tuple): + return encoder(obj) + if is_pydantic_v1_model_instance(obj): + raise PydanticV1NotSupportedError( + "pydantic.v1 models are no longer supported by FastAPI." + f" Please update the model {obj!r}." + ) + try: + data = dict(obj) + except Exception as e: + errors: list[Exception] = [] + errors.append(e) + try: + data = vars(obj) + except Exception as e: + errors.append(e) + raise ValueError(errors) from e + return jsonable_encoder( + data, + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) diff --git a/.venv/lib/python3.12/site-packages/fastapi/exception_handlers.py b/.venv/lib/python3.12/site-packages/fastapi/exception_handlers.py new file mode 100644 index 0000000..475dd7b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/exception_handlers.py @@ -0,0 +1,34 @@ +from fastapi.encoders import jsonable_encoder +from fastapi.exceptions import RequestValidationError, WebSocketRequestValidationError +from fastapi.utils import is_body_allowed_for_status_code +from fastapi.websockets import WebSocket +from starlette.exceptions import HTTPException +from starlette.requests import Request +from starlette.responses import JSONResponse, Response +from starlette.status import WS_1008_POLICY_VIOLATION + + +async def http_exception_handler(request: Request, exc: HTTPException) -> Response: + headers = getattr(exc, "headers", None) + if not is_body_allowed_for_status_code(exc.status_code): + return Response(status_code=exc.status_code, headers=headers) + return JSONResponse( + {"detail": exc.detail}, status_code=exc.status_code, headers=headers + ) + + +async def request_validation_exception_handler( + request: Request, exc: RequestValidationError +) -> JSONResponse: + return JSONResponse( + status_code=422, + content={"detail": jsonable_encoder(exc.errors())}, + ) + + +async def websocket_request_validation_exception_handler( + websocket: WebSocket, exc: WebSocketRequestValidationError +) -> None: + await websocket.close( + code=WS_1008_POLICY_VIOLATION, reason=jsonable_encoder(exc.errors()) + ) diff --git a/.venv/lib/python3.12/site-packages/fastapi/exceptions.py b/.venv/lib/python3.12/site-packages/fastapi/exceptions.py new file mode 100644 index 0000000..1a3abd8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/exceptions.py @@ -0,0 +1,246 @@ +from collections.abc import Sequence +from typing import Annotated, Any, Optional, TypedDict, Union + +from annotated_doc import Doc +from pydantic import BaseModel, create_model +from starlette.exceptions import HTTPException as StarletteHTTPException +from starlette.exceptions import WebSocketException as StarletteWebSocketException + + +class EndpointContext(TypedDict, total=False): + function: str + path: str + file: str + line: int + + +class HTTPException(StarletteHTTPException): + """ + An HTTP exception you can raise in your own code to show errors to the client. + + This is for client errors, invalid authentication, invalid data, etc. Not for server + errors in your code. + + Read more about it in the + [FastAPI docs for Handling Errors](https://fastapi.tiangolo.com/tutorial/handling-errors/). + + ## Example + + ```python + from fastapi import FastAPI, HTTPException + + app = FastAPI() + + items = {"foo": "The Foo Wrestlers"} + + + @app.get("/items/{item_id}") + async def read_item(item_id: str): + if item_id not in items: + raise HTTPException(status_code=404, detail="Item not found") + return {"item": items[item_id]} + ``` + """ + + def __init__( + self, + status_code: Annotated[ + int, + Doc( + """ + HTTP status code to send to the client. + """ + ), + ], + detail: Annotated[ + Any, + Doc( + """ + Any data to be sent to the client in the `detail` key of the JSON + response. + """ + ), + ] = None, + headers: Annotated[ + Optional[dict[str, str]], + Doc( + """ + Any headers to send to the client in the response. + """ + ), + ] = None, + ) -> None: + super().__init__(status_code=status_code, detail=detail, headers=headers) + + +class WebSocketException(StarletteWebSocketException): + """ + A WebSocket exception you can raise in your own code to show errors to the client. + + This is for client errors, invalid authentication, invalid data, etc. Not for server + errors in your code. + + Read more about it in the + [FastAPI docs for WebSockets](https://fastapi.tiangolo.com/advanced/websockets/). + + ## Example + + ```python + from typing import Annotated + + from fastapi import ( + Cookie, + FastAPI, + WebSocket, + WebSocketException, + status, + ) + + app = FastAPI() + + @app.websocket("/items/{item_id}/ws") + async def websocket_endpoint( + *, + websocket: WebSocket, + session: Annotated[str | None, Cookie()] = None, + item_id: str, + ): + if session is None: + raise WebSocketException(code=status.WS_1008_POLICY_VIOLATION) + await websocket.accept() + while True: + data = await websocket.receive_text() + await websocket.send_text(f"Session cookie is: {session}") + await websocket.send_text(f"Message text was: {data}, for item ID: {item_id}") + ``` + """ + + def __init__( + self, + code: Annotated[ + int, + Doc( + """ + A closing code from the + [valid codes defined in the specification](https://datatracker.ietf.org/doc/html/rfc6455#section-7.4.1). + """ + ), + ], + reason: Annotated[ + Union[str, None], + Doc( + """ + The reason to close the WebSocket connection. + + It is UTF-8-encoded data. The interpretation of the reason is up to the + application, it is not specified by the WebSocket specification. + + It could contain text that could be human-readable or interpretable + by the client code, etc. + """ + ), + ] = None, + ) -> None: + super().__init__(code=code, reason=reason) + + +RequestErrorModel: type[BaseModel] = create_model("Request") +WebSocketErrorModel: type[BaseModel] = create_model("WebSocket") + + +class FastAPIError(RuntimeError): + """ + A generic, FastAPI-specific error. + """ + + +class DependencyScopeError(FastAPIError): + """ + A dependency declared that it depends on another dependency with an invalid + (narrower) scope. + """ + + +class ValidationException(Exception): + def __init__( + self, + errors: Sequence[Any], + *, + endpoint_ctx: Optional[EndpointContext] = None, + ) -> None: + self._errors = errors + self.endpoint_ctx = endpoint_ctx + + ctx = endpoint_ctx or {} + self.endpoint_function = ctx.get("function") + self.endpoint_path = ctx.get("path") + self.endpoint_file = ctx.get("file") + self.endpoint_line = ctx.get("line") + + def errors(self) -> Sequence[Any]: + return self._errors + + def _format_endpoint_context(self) -> str: + if not (self.endpoint_file and self.endpoint_line and self.endpoint_function): + if self.endpoint_path: + return f"\n Endpoint: {self.endpoint_path}" + return "" + + context = f'\n File "{self.endpoint_file}", line {self.endpoint_line}, in {self.endpoint_function}' + if self.endpoint_path: + context += f"\n {self.endpoint_path}" + return context + + def __str__(self) -> str: + message = f"{len(self._errors)} validation error{'s' if len(self._errors) != 1 else ''}:\n" + for err in self._errors: + message += f" {err}\n" + message += self._format_endpoint_context() + return message.rstrip() + + +class RequestValidationError(ValidationException): + def __init__( + self, + errors: Sequence[Any], + *, + body: Any = None, + endpoint_ctx: Optional[EndpointContext] = None, + ) -> None: + super().__init__(errors, endpoint_ctx=endpoint_ctx) + self.body = body + + +class WebSocketRequestValidationError(ValidationException): + def __init__( + self, + errors: Sequence[Any], + *, + endpoint_ctx: Optional[EndpointContext] = None, + ) -> None: + super().__init__(errors, endpoint_ctx=endpoint_ctx) + + +class ResponseValidationError(ValidationException): + def __init__( + self, + errors: Sequence[Any], + *, + body: Any = None, + endpoint_ctx: Optional[EndpointContext] = None, + ) -> None: + super().__init__(errors, endpoint_ctx=endpoint_ctx) + self.body = body + + +class PydanticV1NotSupportedError(FastAPIError): + """ + A pydantic.v1 model is used, which is no longer supported. + """ + + +class FastAPIDeprecationWarning(UserWarning): + """ + A custom deprecation warning as DeprecationWarning is ignored + Ref: https://sethmlarson.dev/deprecations-via-warnings-dont-work-for-python-libraries + """ diff --git a/.venv/lib/python3.12/site-packages/fastapi/logger.py b/.venv/lib/python3.12/site-packages/fastapi/logger.py new file mode 100644 index 0000000..5b2c4ad --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/logger.py @@ -0,0 +1,3 @@ +import logging + +logger = logging.getLogger("fastapi") diff --git a/.venv/lib/python3.12/site-packages/fastapi/middleware/__init__.py b/.venv/lib/python3.12/site-packages/fastapi/middleware/__init__.py new file mode 100644 index 0000000..620296d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/middleware/__init__.py @@ -0,0 +1 @@ +from starlette.middleware import Middleware as Middleware diff --git a/.venv/lib/python3.12/site-packages/fastapi/middleware/asyncexitstack.py b/.venv/lib/python3.12/site-packages/fastapi/middleware/asyncexitstack.py new file mode 100644 index 0000000..4ce3f5a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/middleware/asyncexitstack.py @@ -0,0 +1,18 @@ +from contextlib import AsyncExitStack + +from starlette.types import ASGIApp, Receive, Scope, Send + + +# Used mainly to close files after the request is done, dependencies are closed +# in their own AsyncExitStack +class AsyncExitStackMiddleware: + def __init__( + self, app: ASGIApp, context_name: str = "fastapi_middleware_astack" + ) -> None: + self.app = app + self.context_name = context_name + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + async with AsyncExitStack() as stack: + scope[self.context_name] = stack + await self.app(scope, receive, send) diff --git a/.venv/lib/python3.12/site-packages/fastapi/middleware/cors.py b/.venv/lib/python3.12/site-packages/fastapi/middleware/cors.py new file mode 100644 index 0000000..8dfaad0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/middleware/cors.py @@ -0,0 +1 @@ +from starlette.middleware.cors import CORSMiddleware as CORSMiddleware # noqa diff --git a/.venv/lib/python3.12/site-packages/fastapi/middleware/gzip.py b/.venv/lib/python3.12/site-packages/fastapi/middleware/gzip.py new file mode 100644 index 0000000..bbeb2cc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/middleware/gzip.py @@ -0,0 +1 @@ +from starlette.middleware.gzip import GZipMiddleware as GZipMiddleware # noqa diff --git a/.venv/lib/python3.12/site-packages/fastapi/middleware/httpsredirect.py b/.venv/lib/python3.12/site-packages/fastapi/middleware/httpsredirect.py new file mode 100644 index 0000000..b7a3d8e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/middleware/httpsredirect.py @@ -0,0 +1,3 @@ +from starlette.middleware.httpsredirect import ( # noqa + HTTPSRedirectMiddleware as HTTPSRedirectMiddleware, +) diff --git a/.venv/lib/python3.12/site-packages/fastapi/middleware/trustedhost.py b/.venv/lib/python3.12/site-packages/fastapi/middleware/trustedhost.py new file mode 100644 index 0000000..08d7e03 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/middleware/trustedhost.py @@ -0,0 +1,3 @@ +from starlette.middleware.trustedhost import ( # noqa + TrustedHostMiddleware as TrustedHostMiddleware, +) diff --git a/.venv/lib/python3.12/site-packages/fastapi/middleware/wsgi.py b/.venv/lib/python3.12/site-packages/fastapi/middleware/wsgi.py new file mode 100644 index 0000000..c4c6a79 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/middleware/wsgi.py @@ -0,0 +1 @@ +from starlette.middleware.wsgi import WSGIMiddleware as WSGIMiddleware # noqa diff --git a/.venv/lib/python3.12/site-packages/fastapi/openapi/__init__.py b/.venv/lib/python3.12/site-packages/fastapi/openapi/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/fastapi/openapi/constants.py b/.venv/lib/python3.12/site-packages/fastapi/openapi/constants.py new file mode 100644 index 0000000..d724ee3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/openapi/constants.py @@ -0,0 +1,3 @@ +METHODS_WITH_BODY = {"GET", "HEAD", "POST", "PUT", "DELETE", "PATCH"} +REF_PREFIX = "#/components/schemas/" +REF_TEMPLATE = "#/components/schemas/{model}" diff --git a/.venv/lib/python3.12/site-packages/fastapi/openapi/docs.py b/.venv/lib/python3.12/site-packages/fastapi/openapi/docs.py new file mode 100644 index 0000000..82380f8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/openapi/docs.py @@ -0,0 +1,344 @@ +import json +from typing import Annotated, Any, Optional + +from annotated_doc import Doc +from fastapi.encoders import jsonable_encoder +from starlette.responses import HTMLResponse + +swagger_ui_default_parameters: Annotated[ + dict[str, Any], + Doc( + """ + Default configurations for Swagger UI. + + You can use it as a template to add any other configurations needed. + """ + ), +] = { + "dom_id": "#swagger-ui", + "layout": "BaseLayout", + "deepLinking": True, + "showExtensions": True, + "showCommonExtensions": True, +} + + +def get_swagger_ui_html( + *, + openapi_url: Annotated[ + str, + Doc( + """ + The OpenAPI URL that Swagger UI should load and use. + + This is normally done automatically by FastAPI using the default URL + `/openapi.json`. + """ + ), + ], + title: Annotated[ + str, + Doc( + """ + The HTML `` content, normally shown in the browser tab. + """ + ), + ], + swagger_js_url: Annotated[ + str, + Doc( + """ + The URL to use to load the Swagger UI JavaScript. + + It is normally set to a CDN URL. + """ + ), + ] = "https://cdn.jsdelivr.net/npm/swagger-ui-dist@5/swagger-ui-bundle.js", + swagger_css_url: Annotated[ + str, + Doc( + """ + The URL to use to load the Swagger UI CSS. + + It is normally set to a CDN URL. + """ + ), + ] = "https://cdn.jsdelivr.net/npm/swagger-ui-dist@5/swagger-ui.css", + swagger_favicon_url: Annotated[ + str, + Doc( + """ + The URL of the favicon to use. It is normally shown in the browser tab. + """ + ), + ] = "https://fastapi.tiangolo.com/img/favicon.png", + oauth2_redirect_url: Annotated[ + Optional[str], + Doc( + """ + The OAuth2 redirect URL, it is normally automatically handled by FastAPI. + """ + ), + ] = None, + init_oauth: Annotated[ + Optional[dict[str, Any]], + Doc( + """ + A dictionary with Swagger UI OAuth2 initialization configurations. + """ + ), + ] = None, + swagger_ui_parameters: Annotated[ + Optional[dict[str, Any]], + Doc( + """ + Configuration parameters for Swagger UI. + + It defaults to [swagger_ui_default_parameters][fastapi.openapi.docs.swagger_ui_default_parameters]. + """ + ), + ] = None, +) -> HTMLResponse: + """ + Generate and return the HTML that loads Swagger UI for the interactive + API docs (normally served at `/docs`). + + You would only call this function yourself if you needed to override some parts, + for example the URLs to use to load Swagger UI's JavaScript and CSS. + + Read more about it in the + [FastAPI docs for Configure Swagger UI](https://fastapi.tiangolo.com/how-to/configure-swagger-ui/) + and the [FastAPI docs for Custom Docs UI Static Assets (Self-Hosting)](https://fastapi.tiangolo.com/how-to/custom-docs-ui-assets/). + """ + current_swagger_ui_parameters = swagger_ui_default_parameters.copy() + if swagger_ui_parameters: + current_swagger_ui_parameters.update(swagger_ui_parameters) + + html = f""" + <!DOCTYPE html> + <html> + <head> + <link type="text/css" rel="stylesheet" href="{swagger_css_url}"> + <link rel="shortcut icon" href="{swagger_favicon_url}"> + <title>{title} + + +
+
+ + + + + + """ + return HTMLResponse(html) + + +def get_redoc_html( + *, + openapi_url: Annotated[ + str, + Doc( + """ + The OpenAPI URL that ReDoc should load and use. + + This is normally done automatically by FastAPI using the default URL + `/openapi.json`. + """ + ), + ], + title: Annotated[ + str, + Doc( + """ + The HTML `` content, normally shown in the browser tab. + """ + ), + ], + redoc_js_url: Annotated[ + str, + Doc( + """ + The URL to use to load the ReDoc JavaScript. + + It is normally set to a CDN URL. + """ + ), + ] = "https://cdn.jsdelivr.net/npm/redoc@2/bundles/redoc.standalone.js", + redoc_favicon_url: Annotated[ + str, + Doc( + """ + The URL of the favicon to use. It is normally shown in the browser tab. + """ + ), + ] = "https://fastapi.tiangolo.com/img/favicon.png", + with_google_fonts: Annotated[ + bool, + Doc( + """ + Load and use Google Fonts. + """ + ), + ] = True, +) -> HTMLResponse: + """ + Generate and return the HTML response that loads ReDoc for the alternative + API docs (normally served at `/redoc`). + + You would only call this function yourself if you needed to override some parts, + for example the URLs to use to load ReDoc's JavaScript and CSS. + + Read more about it in the + [FastAPI docs for Custom Docs UI Static Assets (Self-Hosting)](https://fastapi.tiangolo.com/how-to/custom-docs-ui-assets/). + """ + html = f""" + <!DOCTYPE html> + <html> + <head> + <title>{title} + + + + """ + if with_google_fonts: + html += """ + + """ + html += f""" + + + + + + + + + + + """ + return HTMLResponse(html) + + +def get_swagger_ui_oauth2_redirect_html() -> HTMLResponse: + """ + Generate the HTML response with the OAuth2 redirection for Swagger UI. + + You normally don't need to use or change this. + """ + # copied from https://github.com/swagger-api/swagger-ui/blob/v4.14.0/dist/oauth2-redirect.html + html = """ + + + + Swagger UI: OAuth2 Redirect + + + + + + """ + return HTMLResponse(content=html) diff --git a/.venv/lib/python3.12/site-packages/fastapi/openapi/models.py b/.venv/lib/python3.12/site-packages/fastapi/openapi/models.py new file mode 100644 index 0000000..ac6a6d5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/openapi/models.py @@ -0,0 +1,438 @@ +from collections.abc import Iterable, Mapping +from enum import Enum +from typing import Annotated, Any, Callable, Optional, Union + +from fastapi._compat import with_info_plain_validator_function +from fastapi.logger import logger +from pydantic import ( + AnyUrl, + BaseModel, + Field, + GetJsonSchemaHandler, +) +from typing_extensions import Literal, TypedDict +from typing_extensions import deprecated as typing_deprecated + +try: + import email_validator + + assert email_validator # make autoflake ignore the unused import + from pydantic import EmailStr +except ImportError: # pragma: no cover + + class EmailStr(str): # type: ignore + @classmethod + def __get_validators__(cls) -> Iterable[Callable[..., Any]]: + yield cls.validate + + @classmethod + def validate(cls, v: Any) -> str: + logger.warning( + "email-validator not installed, email fields will be treated as str.\n" + "To install, run: pip install email-validator" + ) + return str(v) + + @classmethod + def _validate(cls, __input_value: Any, _: Any) -> str: + logger.warning( + "email-validator not installed, email fields will be treated as str.\n" + "To install, run: pip install email-validator" + ) + return str(__input_value) + + @classmethod + def __get_pydantic_json_schema__( + cls, core_schema: Mapping[str, Any], handler: GetJsonSchemaHandler + ) -> dict[str, Any]: + return {"type": "string", "format": "email"} + + @classmethod + def __get_pydantic_core_schema__( + cls, source: type[Any], handler: Callable[[Any], Mapping[str, Any]] + ) -> Mapping[str, Any]: + return with_info_plain_validator_function(cls._validate) + + +class BaseModelWithConfig(BaseModel): + model_config = {"extra": "allow"} + + +class Contact(BaseModelWithConfig): + name: Optional[str] = None + url: Optional[AnyUrl] = None + email: Optional[EmailStr] = None + + +class License(BaseModelWithConfig): + name: str + identifier: Optional[str] = None + url: Optional[AnyUrl] = None + + +class Info(BaseModelWithConfig): + title: str + summary: Optional[str] = None + description: Optional[str] = None + termsOfService: Optional[str] = None + contact: Optional[Contact] = None + license: Optional[License] = None + version: str + + +class ServerVariable(BaseModelWithConfig): + enum: Annotated[Optional[list[str]], Field(min_length=1)] = None + default: str + description: Optional[str] = None + + +class Server(BaseModelWithConfig): + url: Union[AnyUrl, str] + description: Optional[str] = None + variables: Optional[dict[str, ServerVariable]] = None + + +class Reference(BaseModel): + ref: str = Field(alias="$ref") + + +class Discriminator(BaseModel): + propertyName: str + mapping: Optional[dict[str, str]] = None + + +class XML(BaseModelWithConfig): + name: Optional[str] = None + namespace: Optional[str] = None + prefix: Optional[str] = None + attribute: Optional[bool] = None + wrapped: Optional[bool] = None + + +class ExternalDocumentation(BaseModelWithConfig): + description: Optional[str] = None + url: AnyUrl + + +# Ref JSON Schema 2020-12: https://json-schema.org/draft/2020-12/json-schema-validation#name-type +SchemaType = Literal[ + "array", "boolean", "integer", "null", "number", "object", "string" +] + + +class Schema(BaseModelWithConfig): + # Ref: JSON Schema 2020-12: https://json-schema.org/draft/2020-12/json-schema-core.html#name-the-json-schema-core-vocabu + # Core Vocabulary + schema_: Optional[str] = Field(default=None, alias="$schema") + vocabulary: Optional[str] = Field(default=None, alias="$vocabulary") + id: Optional[str] = Field(default=None, alias="$id") + anchor: Optional[str] = Field(default=None, alias="$anchor") + dynamicAnchor: Optional[str] = Field(default=None, alias="$dynamicAnchor") + ref: Optional[str] = Field(default=None, alias="$ref") + dynamicRef: Optional[str] = Field(default=None, alias="$dynamicRef") + defs: Optional[dict[str, "SchemaOrBool"]] = Field(default=None, alias="$defs") + comment: Optional[str] = Field(default=None, alias="$comment") + # Ref: JSON Schema 2020-12: https://json-schema.org/draft/2020-12/json-schema-core.html#name-a-vocabulary-for-applying-s + # A Vocabulary for Applying Subschemas + allOf: Optional[list["SchemaOrBool"]] = None + anyOf: Optional[list["SchemaOrBool"]] = None + oneOf: Optional[list["SchemaOrBool"]] = None + not_: Optional["SchemaOrBool"] = Field(default=None, alias="not") + if_: Optional["SchemaOrBool"] = Field(default=None, alias="if") + then: Optional["SchemaOrBool"] = None + else_: Optional["SchemaOrBool"] = Field(default=None, alias="else") + dependentSchemas: Optional[dict[str, "SchemaOrBool"]] = None + prefixItems: Optional[list["SchemaOrBool"]] = None + # TODO: uncomment and remove below when deprecating Pydantic v1 + # It generates a list of schemas for tuples, before prefixItems was available + # items: Optional["SchemaOrBool"] = None + items: Optional[Union["SchemaOrBool", list["SchemaOrBool"]]] = None + contains: Optional["SchemaOrBool"] = None + properties: Optional[dict[str, "SchemaOrBool"]] = None + patternProperties: Optional[dict[str, "SchemaOrBool"]] = None + additionalProperties: Optional["SchemaOrBool"] = None + propertyNames: Optional["SchemaOrBool"] = None + unevaluatedItems: Optional["SchemaOrBool"] = None + unevaluatedProperties: Optional["SchemaOrBool"] = None + # Ref: JSON Schema Validation 2020-12: https://json-schema.org/draft/2020-12/json-schema-validation.html#name-a-vocabulary-for-structural + # A Vocabulary for Structural Validation + type: Optional[Union[SchemaType, list[SchemaType]]] = None + enum: Optional[list[Any]] = None + const: Optional[Any] = None + multipleOf: Optional[float] = Field(default=None, gt=0) + maximum: Optional[float] = None + exclusiveMaximum: Optional[float] = None + minimum: Optional[float] = None + exclusiveMinimum: Optional[float] = None + maxLength: Optional[int] = Field(default=None, ge=0) + minLength: Optional[int] = Field(default=None, ge=0) + pattern: Optional[str] = None + maxItems: Optional[int] = Field(default=None, ge=0) + minItems: Optional[int] = Field(default=None, ge=0) + uniqueItems: Optional[bool] = None + maxContains: Optional[int] = Field(default=None, ge=0) + minContains: Optional[int] = Field(default=None, ge=0) + maxProperties: Optional[int] = Field(default=None, ge=0) + minProperties: Optional[int] = Field(default=None, ge=0) + required: Optional[list[str]] = None + dependentRequired: Optional[dict[str, set[str]]] = None + # Ref: JSON Schema Validation 2020-12: https://json-schema.org/draft/2020-12/json-schema-validation.html#name-vocabularies-for-semantic-c + # Vocabularies for Semantic Content With "format" + format: Optional[str] = None + # Ref: JSON Schema Validation 2020-12: https://json-schema.org/draft/2020-12/json-schema-validation.html#name-a-vocabulary-for-the-conten + # A Vocabulary for the Contents of String-Encoded Data + contentEncoding: Optional[str] = None + contentMediaType: Optional[str] = None + contentSchema: Optional["SchemaOrBool"] = None + # Ref: JSON Schema Validation 2020-12: https://json-schema.org/draft/2020-12/json-schema-validation.html#name-a-vocabulary-for-basic-meta + # A Vocabulary for Basic Meta-Data Annotations + title: Optional[str] = None + description: Optional[str] = None + default: Optional[Any] = None + deprecated: Optional[bool] = None + readOnly: Optional[bool] = None + writeOnly: Optional[bool] = None + examples: Optional[list[Any]] = None + # Ref: OpenAPI 3.1.0: https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.1.0.md#schema-object + # Schema Object + discriminator: Optional[Discriminator] = None + xml: Optional[XML] = None + externalDocs: Optional[ExternalDocumentation] = None + example: Annotated[ + Optional[Any], + typing_deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = None + + +# Ref: https://json-schema.org/draft/2020-12/json-schema-core.html#name-json-schema-documents +# A JSON Schema MUST be an object or a boolean. +SchemaOrBool = Union[Schema, bool] + + +class Example(TypedDict, total=False): + summary: Optional[str] + description: Optional[str] + value: Optional[Any] + externalValue: Optional[AnyUrl] + + __pydantic_config__ = {"extra": "allow"} # type: ignore[misc] + + +class ParameterInType(Enum): + query = "query" + header = "header" + path = "path" + cookie = "cookie" + + +class Encoding(BaseModelWithConfig): + contentType: Optional[str] = None + headers: Optional[dict[str, Union["Header", Reference]]] = None + style: Optional[str] = None + explode: Optional[bool] = None + allowReserved: Optional[bool] = None + + +class MediaType(BaseModelWithConfig): + schema_: Optional[Union[Schema, Reference]] = Field(default=None, alias="schema") + example: Optional[Any] = None + examples: Optional[dict[str, Union[Example, Reference]]] = None + encoding: Optional[dict[str, Encoding]] = None + + +class ParameterBase(BaseModelWithConfig): + description: Optional[str] = None + required: Optional[bool] = None + deprecated: Optional[bool] = None + # Serialization rules for simple scenarios + style: Optional[str] = None + explode: Optional[bool] = None + allowReserved: Optional[bool] = None + schema_: Optional[Union[Schema, Reference]] = Field(default=None, alias="schema") + example: Optional[Any] = None + examples: Optional[dict[str, Union[Example, Reference]]] = None + # Serialization rules for more complex scenarios + content: Optional[dict[str, MediaType]] = None + + +class Parameter(ParameterBase): + name: str + in_: ParameterInType = Field(alias="in") + + +class Header(ParameterBase): + pass + + +class RequestBody(BaseModelWithConfig): + description: Optional[str] = None + content: dict[str, MediaType] + required: Optional[bool] = None + + +class Link(BaseModelWithConfig): + operationRef: Optional[str] = None + operationId: Optional[str] = None + parameters: Optional[dict[str, Union[Any, str]]] = None + requestBody: Optional[Union[Any, str]] = None + description: Optional[str] = None + server: Optional[Server] = None + + +class Response(BaseModelWithConfig): + description: str + headers: Optional[dict[str, Union[Header, Reference]]] = None + content: Optional[dict[str, MediaType]] = None + links: Optional[dict[str, Union[Link, Reference]]] = None + + +class Operation(BaseModelWithConfig): + tags: Optional[list[str]] = None + summary: Optional[str] = None + description: Optional[str] = None + externalDocs: Optional[ExternalDocumentation] = None + operationId: Optional[str] = None + parameters: Optional[list[Union[Parameter, Reference]]] = None + requestBody: Optional[Union[RequestBody, Reference]] = None + # Using Any for Specification Extensions + responses: Optional[dict[str, Union[Response, Any]]] = None + callbacks: Optional[dict[str, Union[dict[str, "PathItem"], Reference]]] = None + deprecated: Optional[bool] = None + security: Optional[list[dict[str, list[str]]]] = None + servers: Optional[list[Server]] = None + + +class PathItem(BaseModelWithConfig): + ref: Optional[str] = Field(default=None, alias="$ref") + summary: Optional[str] = None + description: Optional[str] = None + get: Optional[Operation] = None + put: Optional[Operation] = None + post: Optional[Operation] = None + delete: Optional[Operation] = None + options: Optional[Operation] = None + head: Optional[Operation] = None + patch: Optional[Operation] = None + trace: Optional[Operation] = None + servers: Optional[list[Server]] = None + parameters: Optional[list[Union[Parameter, Reference]]] = None + + +class SecuritySchemeType(Enum): + apiKey = "apiKey" + http = "http" + oauth2 = "oauth2" + openIdConnect = "openIdConnect" + + +class SecurityBase(BaseModelWithConfig): + type_: SecuritySchemeType = Field(alias="type") + description: Optional[str] = None + + +class APIKeyIn(Enum): + query = "query" + header = "header" + cookie = "cookie" + + +class APIKey(SecurityBase): + type_: SecuritySchemeType = Field(default=SecuritySchemeType.apiKey, alias="type") + in_: APIKeyIn = Field(alias="in") + name: str + + +class HTTPBase(SecurityBase): + type_: SecuritySchemeType = Field(default=SecuritySchemeType.http, alias="type") + scheme: str + + +class HTTPBearer(HTTPBase): + scheme: Literal["bearer"] = "bearer" + bearerFormat: Optional[str] = None + + +class OAuthFlow(BaseModelWithConfig): + refreshUrl: Optional[str] = None + scopes: dict[str, str] = {} + + +class OAuthFlowImplicit(OAuthFlow): + authorizationUrl: str + + +class OAuthFlowPassword(OAuthFlow): + tokenUrl: str + + +class OAuthFlowClientCredentials(OAuthFlow): + tokenUrl: str + + +class OAuthFlowAuthorizationCode(OAuthFlow): + authorizationUrl: str + tokenUrl: str + + +class OAuthFlows(BaseModelWithConfig): + implicit: Optional[OAuthFlowImplicit] = None + password: Optional[OAuthFlowPassword] = None + clientCredentials: Optional[OAuthFlowClientCredentials] = None + authorizationCode: Optional[OAuthFlowAuthorizationCode] = None + + +class OAuth2(SecurityBase): + type_: SecuritySchemeType = Field(default=SecuritySchemeType.oauth2, alias="type") + flows: OAuthFlows + + +class OpenIdConnect(SecurityBase): + type_: SecuritySchemeType = Field( + default=SecuritySchemeType.openIdConnect, alias="type" + ) + openIdConnectUrl: str + + +SecurityScheme = Union[APIKey, HTTPBase, OAuth2, OpenIdConnect, HTTPBearer] + + +class Components(BaseModelWithConfig): + schemas: Optional[dict[str, Union[Schema, Reference]]] = None + responses: Optional[dict[str, Union[Response, Reference]]] = None + parameters: Optional[dict[str, Union[Parameter, Reference]]] = None + examples: Optional[dict[str, Union[Example, Reference]]] = None + requestBodies: Optional[dict[str, Union[RequestBody, Reference]]] = None + headers: Optional[dict[str, Union[Header, Reference]]] = None + securitySchemes: Optional[dict[str, Union[SecurityScheme, Reference]]] = None + links: Optional[dict[str, Union[Link, Reference]]] = None + # Using Any for Specification Extensions + callbacks: Optional[dict[str, Union[dict[str, PathItem], Reference, Any]]] = None + pathItems: Optional[dict[str, Union[PathItem, Reference]]] = None + + +class Tag(BaseModelWithConfig): + name: str + description: Optional[str] = None + externalDocs: Optional[ExternalDocumentation] = None + + +class OpenAPI(BaseModelWithConfig): + openapi: str + info: Info + jsonSchemaDialect: Optional[str] = None + servers: Optional[list[Server]] = None + # Using Any for Specification Extensions + paths: Optional[dict[str, Union[PathItem, Any]]] = None + webhooks: Optional[dict[str, Union[PathItem, Reference]]] = None + components: Optional[Components] = None + security: Optional[list[dict[str, list[str]]]] = None + tags: Optional[list[Tag]] = None + externalDocs: Optional[ExternalDocumentation] = None + + +Schema.model_rebuild() +Operation.model_rebuild() +Encoding.model_rebuild() diff --git a/.venv/lib/python3.12/site-packages/fastapi/openapi/utils.py b/.venv/lib/python3.12/site-packages/fastapi/openapi/utils.py new file mode 100644 index 0000000..75ff261 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/openapi/utils.py @@ -0,0 +1,567 @@ +import http.client +import inspect +import warnings +from collections.abc import Sequence +from typing import Any, Optional, Union, cast + +from fastapi import routing +from fastapi._compat import ( + ModelField, + Undefined, + get_compat_model_name_map, + get_definitions, + get_schema_from_model_field, + lenient_issubclass, +) +from fastapi.datastructures import DefaultPlaceholder +from fastapi.dependencies.models import Dependant +from fastapi.dependencies.utils import ( + _get_flat_fields_from_params, + get_flat_dependant, + get_flat_params, + get_validation_alias, +) +from fastapi.encoders import jsonable_encoder +from fastapi.exceptions import FastAPIDeprecationWarning +from fastapi.openapi.constants import METHODS_WITH_BODY, REF_PREFIX +from fastapi.openapi.models import OpenAPI +from fastapi.params import Body, ParamTypes +from fastapi.responses import Response +from fastapi.types import ModelNameMap +from fastapi.utils import ( + deep_dict_update, + generate_operation_id_for_path, + is_body_allowed_for_status_code, +) +from pydantic import BaseModel +from starlette.responses import JSONResponse +from starlette.routing import BaseRoute +from typing_extensions import Literal + +validation_error_definition = { + "title": "ValidationError", + "type": "object", + "properties": { + "loc": { + "title": "Location", + "type": "array", + "items": {"anyOf": [{"type": "string"}, {"type": "integer"}]}, + }, + "msg": {"title": "Message", "type": "string"}, + "type": {"title": "Error Type", "type": "string"}, + }, + "required": ["loc", "msg", "type"], +} + +validation_error_response_definition = { + "title": "HTTPValidationError", + "type": "object", + "properties": { + "detail": { + "title": "Detail", + "type": "array", + "items": {"$ref": REF_PREFIX + "ValidationError"}, + } + }, +} + +status_code_ranges: dict[str, str] = { + "1XX": "Information", + "2XX": "Success", + "3XX": "Redirection", + "4XX": "Client Error", + "5XX": "Server Error", + "DEFAULT": "Default Response", +} + + +def get_openapi_security_definitions( + flat_dependant: Dependant, +) -> tuple[dict[str, Any], list[dict[str, Any]]]: + security_definitions = {} + # Use a dict to merge scopes for same security scheme + operation_security_dict: dict[str, list[str]] = {} + for security_dependency in flat_dependant._security_dependencies: + security_definition = jsonable_encoder( + security_dependency._security_scheme.model, + by_alias=True, + exclude_none=True, + ) + security_name = security_dependency._security_scheme.scheme_name + security_definitions[security_name] = security_definition + # Merge scopes for the same security scheme + if security_name not in operation_security_dict: + operation_security_dict[security_name] = [] + for scope in security_dependency.oauth_scopes or []: + if scope not in operation_security_dict[security_name]: + operation_security_dict[security_name].append(scope) + operation_security = [ + {name: scopes} for name, scopes in operation_security_dict.items() + ] + return security_definitions, operation_security + + +def _get_openapi_operation_parameters( + *, + dependant: Dependant, + model_name_map: ModelNameMap, + field_mapping: dict[ + tuple[ModelField, Literal["validation", "serialization"]], dict[str, Any] + ], + separate_input_output_schemas: bool = True, +) -> list[dict[str, Any]]: + parameters = [] + flat_dependant = get_flat_dependant(dependant, skip_repeats=True) + path_params = _get_flat_fields_from_params(flat_dependant.path_params) + query_params = _get_flat_fields_from_params(flat_dependant.query_params) + header_params = _get_flat_fields_from_params(flat_dependant.header_params) + cookie_params = _get_flat_fields_from_params(flat_dependant.cookie_params) + parameter_groups = [ + (ParamTypes.path, path_params), + (ParamTypes.query, query_params), + (ParamTypes.header, header_params), + (ParamTypes.cookie, cookie_params), + ] + default_convert_underscores = True + if len(flat_dependant.header_params) == 1: + first_field = flat_dependant.header_params[0] + if lenient_issubclass(first_field.type_, BaseModel): + default_convert_underscores = getattr( + first_field.field_info, "convert_underscores", True + ) + for param_type, param_group in parameter_groups: + for param in param_group: + field_info = param.field_info + # field_info = cast(Param, field_info) + if not getattr(field_info, "include_in_schema", True): + continue + param_schema = get_schema_from_model_field( + field=param, + model_name_map=model_name_map, + field_mapping=field_mapping, + separate_input_output_schemas=separate_input_output_schemas, + ) + name = get_validation_alias(param) + convert_underscores = getattr( + param.field_info, + "convert_underscores", + default_convert_underscores, + ) + if ( + param_type == ParamTypes.header + and name == param.name + and convert_underscores + ): + name = param.name.replace("_", "-") + + parameter = { + "name": name, + "in": param_type.value, + "required": param.required, + "schema": param_schema, + } + if field_info.description: + parameter["description"] = field_info.description + openapi_examples = getattr(field_info, "openapi_examples", None) + example = getattr(field_info, "example", None) + if openapi_examples: + parameter["examples"] = jsonable_encoder(openapi_examples) + elif example != Undefined: + parameter["example"] = jsonable_encoder(example) + if getattr(field_info, "deprecated", None): + parameter["deprecated"] = True + parameters.append(parameter) + return parameters + + +def get_openapi_operation_request_body( + *, + body_field: Optional[ModelField], + model_name_map: ModelNameMap, + field_mapping: dict[ + tuple[ModelField, Literal["validation", "serialization"]], dict[str, Any] + ], + separate_input_output_schemas: bool = True, +) -> Optional[dict[str, Any]]: + if not body_field: + return None + assert isinstance(body_field, ModelField) + body_schema = get_schema_from_model_field( + field=body_field, + model_name_map=model_name_map, + field_mapping=field_mapping, + separate_input_output_schemas=separate_input_output_schemas, + ) + field_info = cast(Body, body_field.field_info) + request_media_type = field_info.media_type + required = body_field.required + request_body_oai: dict[str, Any] = {} + if required: + request_body_oai["required"] = required + request_media_content: dict[str, Any] = {"schema": body_schema} + if field_info.openapi_examples: + request_media_content["examples"] = jsonable_encoder( + field_info.openapi_examples + ) + elif field_info.example != Undefined: + request_media_content["example"] = jsonable_encoder(field_info.example) + request_body_oai["content"] = {request_media_type: request_media_content} + return request_body_oai + + +def generate_operation_id( + *, route: routing.APIRoute, method: str +) -> str: # pragma: nocover + warnings.warn( + message="fastapi.openapi.utils.generate_operation_id() was deprecated, " + "it is not used internally, and will be removed soon", + category=FastAPIDeprecationWarning, + stacklevel=2, + ) + if route.operation_id: + return route.operation_id + path: str = route.path_format + return generate_operation_id_for_path(name=route.name, path=path, method=method) + + +def generate_operation_summary(*, route: routing.APIRoute, method: str) -> str: + if route.summary: + return route.summary + return route.name.replace("_", " ").title() + + +def get_openapi_operation_metadata( + *, route: routing.APIRoute, method: str, operation_ids: set[str] +) -> dict[str, Any]: + operation: dict[str, Any] = {} + if route.tags: + operation["tags"] = route.tags + operation["summary"] = generate_operation_summary(route=route, method=method) + if route.description: + operation["description"] = route.description + operation_id = route.operation_id or route.unique_id + if operation_id in operation_ids: + message = ( + f"Duplicate Operation ID {operation_id} for function " + + f"{route.endpoint.__name__}" + ) + file_name = getattr(route.endpoint, "__globals__", {}).get("__file__") + if file_name: + message += f" at {file_name}" + warnings.warn(message, stacklevel=1) + operation_ids.add(operation_id) + operation["operationId"] = operation_id + if route.deprecated: + operation["deprecated"] = route.deprecated + return operation + + +def get_openapi_path( + *, + route: routing.APIRoute, + operation_ids: set[str], + model_name_map: ModelNameMap, + field_mapping: dict[ + tuple[ModelField, Literal["validation", "serialization"]], dict[str, Any] + ], + separate_input_output_schemas: bool = True, +) -> tuple[dict[str, Any], dict[str, Any], dict[str, Any]]: + path = {} + security_schemes: dict[str, Any] = {} + definitions: dict[str, Any] = {} + assert route.methods is not None, "Methods must be a list" + if isinstance(route.response_class, DefaultPlaceholder): + current_response_class: type[Response] = route.response_class.value + else: + current_response_class = route.response_class + assert current_response_class, "A response class is needed to generate OpenAPI" + route_response_media_type: Optional[str] = current_response_class.media_type + if route.include_in_schema: + for method in route.methods: + operation = get_openapi_operation_metadata( + route=route, method=method, operation_ids=operation_ids + ) + parameters: list[dict[str, Any]] = [] + flat_dependant = get_flat_dependant(route.dependant, skip_repeats=True) + security_definitions, operation_security = get_openapi_security_definitions( + flat_dependant=flat_dependant + ) + if operation_security: + operation.setdefault("security", []).extend(operation_security) + if security_definitions: + security_schemes.update(security_definitions) + operation_parameters = _get_openapi_operation_parameters( + dependant=route.dependant, + model_name_map=model_name_map, + field_mapping=field_mapping, + separate_input_output_schemas=separate_input_output_schemas, + ) + parameters.extend(operation_parameters) + if parameters: + all_parameters = { + (param["in"], param["name"]): param for param in parameters + } + required_parameters = { + (param["in"], param["name"]): param + for param in parameters + if param.get("required") + } + # Make sure required definitions of the same parameter take precedence + # over non-required definitions + all_parameters.update(required_parameters) + operation["parameters"] = list(all_parameters.values()) + if method in METHODS_WITH_BODY: + request_body_oai = get_openapi_operation_request_body( + body_field=route.body_field, + model_name_map=model_name_map, + field_mapping=field_mapping, + separate_input_output_schemas=separate_input_output_schemas, + ) + if request_body_oai: + operation["requestBody"] = request_body_oai + if route.callbacks: + callbacks = {} + for callback in route.callbacks: + if isinstance(callback, routing.APIRoute): + ( + cb_path, + cb_security_schemes, + cb_definitions, + ) = get_openapi_path( + route=callback, + operation_ids=operation_ids, + model_name_map=model_name_map, + field_mapping=field_mapping, + separate_input_output_schemas=separate_input_output_schemas, + ) + callbacks[callback.name] = {callback.path: cb_path} + operation["callbacks"] = callbacks + if route.status_code is not None: + status_code = str(route.status_code) + else: + # It would probably make more sense for all response classes to have an + # explicit default status_code, and to extract it from them, instead of + # doing this inspection tricks, that would probably be in the future + # TODO: probably make status_code a default class attribute for all + # responses in Starlette + response_signature = inspect.signature(current_response_class.__init__) + status_code_param = response_signature.parameters.get("status_code") + if status_code_param is not None: + if isinstance(status_code_param.default, int): + status_code = str(status_code_param.default) + operation.setdefault("responses", {}).setdefault(status_code, {})[ + "description" + ] = route.response_description + if route_response_media_type and is_body_allowed_for_status_code( + route.status_code + ): + response_schema = {"type": "string"} + if lenient_issubclass(current_response_class, JSONResponse): + if route.response_field: + response_schema = get_schema_from_model_field( + field=route.response_field, + model_name_map=model_name_map, + field_mapping=field_mapping, + separate_input_output_schemas=separate_input_output_schemas, + ) + else: + response_schema = {} + operation.setdefault("responses", {}).setdefault( + status_code, {} + ).setdefault("content", {}).setdefault(route_response_media_type, {})[ + "schema" + ] = response_schema + if route.responses: + operation_responses = operation.setdefault("responses", {}) + for ( + additional_status_code, + additional_response, + ) in route.responses.items(): + process_response = additional_response.copy() + process_response.pop("model", None) + status_code_key = str(additional_status_code).upper() + if status_code_key == "DEFAULT": + status_code_key = "default" + openapi_response = operation_responses.setdefault( + status_code_key, {} + ) + assert isinstance(process_response, dict), ( + "An additional response must be a dict" + ) + field = route.response_fields.get(additional_status_code) + additional_field_schema: Optional[dict[str, Any]] = None + if field: + additional_field_schema = get_schema_from_model_field( + field=field, + model_name_map=model_name_map, + field_mapping=field_mapping, + separate_input_output_schemas=separate_input_output_schemas, + ) + media_type = route_response_media_type or "application/json" + additional_schema = ( + process_response.setdefault("content", {}) + .setdefault(media_type, {}) + .setdefault("schema", {}) + ) + deep_dict_update(additional_schema, additional_field_schema) + status_text: Optional[str] = status_code_ranges.get( + str(additional_status_code).upper() + ) or http.client.responses.get(int(additional_status_code)) + description = ( + process_response.get("description") + or openapi_response.get("description") + or status_text + or "Additional Response" + ) + deep_dict_update(openapi_response, process_response) + openapi_response["description"] = description + http422 = "422" + all_route_params = get_flat_params(route.dependant) + if (all_route_params or route.body_field) and not any( + status in operation["responses"] + for status in [http422, "4XX", "default"] + ): + operation["responses"][http422] = { + "description": "Validation Error", + "content": { + "application/json": { + "schema": {"$ref": REF_PREFIX + "HTTPValidationError"} + } + }, + } + if "ValidationError" not in definitions: + definitions.update( + { + "ValidationError": validation_error_definition, + "HTTPValidationError": validation_error_response_definition, + } + ) + if route.openapi_extra: + deep_dict_update(operation, route.openapi_extra) + path[method.lower()] = operation + return path, security_schemes, definitions + + +def get_fields_from_routes( + routes: Sequence[BaseRoute], +) -> list[ModelField]: + body_fields_from_routes: list[ModelField] = [] + responses_from_routes: list[ModelField] = [] + request_fields_from_routes: list[ModelField] = [] + callback_flat_models: list[ModelField] = [] + for route in routes: + if getattr(route, "include_in_schema", None) and isinstance( + route, routing.APIRoute + ): + if route.body_field: + assert isinstance(route.body_field, ModelField), ( + "A request body must be a Pydantic Field" + ) + body_fields_from_routes.append(route.body_field) + if route.response_field: + responses_from_routes.append(route.response_field) + if route.response_fields: + responses_from_routes.extend(route.response_fields.values()) + if route.callbacks: + callback_flat_models.extend(get_fields_from_routes(route.callbacks)) + params = get_flat_params(route.dependant) + request_fields_from_routes.extend(params) + + flat_models = callback_flat_models + list( + body_fields_from_routes + responses_from_routes + request_fields_from_routes + ) + return flat_models + + +def get_openapi( + *, + title: str, + version: str, + openapi_version: str = "3.1.0", + summary: Optional[str] = None, + description: Optional[str] = None, + routes: Sequence[BaseRoute], + webhooks: Optional[Sequence[BaseRoute]] = None, + tags: Optional[list[dict[str, Any]]] = None, + servers: Optional[list[dict[str, Union[str, Any]]]] = None, + terms_of_service: Optional[str] = None, + contact: Optional[dict[str, Union[str, Any]]] = None, + license_info: Optional[dict[str, Union[str, Any]]] = None, + separate_input_output_schemas: bool = True, + external_docs: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + info: dict[str, Any] = {"title": title, "version": version} + if summary: + info["summary"] = summary + if description: + info["description"] = description + if terms_of_service: + info["termsOfService"] = terms_of_service + if contact: + info["contact"] = contact + if license_info: + info["license"] = license_info + output: dict[str, Any] = {"openapi": openapi_version, "info": info} + if servers: + output["servers"] = servers + components: dict[str, dict[str, Any]] = {} + paths: dict[str, dict[str, Any]] = {} + webhook_paths: dict[str, dict[str, Any]] = {} + operation_ids: set[str] = set() + all_fields = get_fields_from_routes(list(routes or []) + list(webhooks or [])) + model_name_map = get_compat_model_name_map(all_fields) + field_mapping, definitions = get_definitions( + fields=all_fields, + model_name_map=model_name_map, + separate_input_output_schemas=separate_input_output_schemas, + ) + for route in routes or []: + if isinstance(route, routing.APIRoute): + result = get_openapi_path( + route=route, + operation_ids=operation_ids, + model_name_map=model_name_map, + field_mapping=field_mapping, + separate_input_output_schemas=separate_input_output_schemas, + ) + if result: + path, security_schemes, path_definitions = result + if path: + paths.setdefault(route.path_format, {}).update(path) + if security_schemes: + components.setdefault("securitySchemes", {}).update( + security_schemes + ) + if path_definitions: + definitions.update(path_definitions) + for webhook in webhooks or []: + if isinstance(webhook, routing.APIRoute): + result = get_openapi_path( + route=webhook, + operation_ids=operation_ids, + model_name_map=model_name_map, + field_mapping=field_mapping, + separate_input_output_schemas=separate_input_output_schemas, + ) + if result: + path, security_schemes, path_definitions = result + if path: + webhook_paths.setdefault(webhook.path_format, {}).update(path) + if security_schemes: + components.setdefault("securitySchemes", {}).update( + security_schemes + ) + if path_definitions: + definitions.update(path_definitions) + if definitions: + components["schemas"] = {k: definitions[k] for k in sorted(definitions)} + if components: + output["components"] = components + output["paths"] = paths + if webhook_paths: + output["webhooks"] = webhook_paths + if tags: + output["tags"] = tags + if external_docs: + output["externalDocs"] = external_docs + return jsonable_encoder(OpenAPI(**output), by_alias=True, exclude_none=True) # type: ignore diff --git a/.venv/lib/python3.12/site-packages/fastapi/param_functions.py b/.venv/lib/python3.12/site-packages/fastapi/param_functions.py new file mode 100644 index 0000000..0834fd7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/param_functions.py @@ -0,0 +1,2369 @@ +from collections.abc import Sequence +from typing import Annotated, Any, Callable, Optional, Union + +from annotated_doc import Doc +from fastapi import params +from fastapi._compat import Undefined +from fastapi.openapi.models import Example +from pydantic import AliasChoices, AliasPath +from typing_extensions import Literal, deprecated + +_Unset: Any = Undefined + + +def Path( # noqa: N802 + default: Annotated[ + Any, + Doc( + """ + Default value if the parameter field is not set. + + This doesn't affect `Path` parameters as the value is always required. + The parameter is available only for compatibility. + """ + ), + ] = ..., + *, + default_factory: Annotated[ + Union[Callable[[], Any], None], + Doc( + """ + A callable to generate the default value. + + This doesn't affect `Path` parameters as the value is always required. + The parameter is available only for compatibility. + """ + ), + ] = _Unset, + alias: Annotated[ + Optional[str], + Doc( + """ + An alternative name for the parameter field. + + This will be used to extract the data and for the generated OpenAPI. + It is particularly useful when you can't use the name you want because it + is a Python reserved keyword or similar. + """ + ), + ] = None, + alias_priority: Annotated[ + Union[int, None], + Doc( + """ + Priority of the alias. This affects whether an alias generator is used. + """ + ), + ] = _Unset, + validation_alias: Annotated[ + Union[str, AliasPath, AliasChoices, None], + Doc( + """ + 'Whitelist' validation step. The parameter field will be the single one + allowed by the alias or set of aliases defined. + """ + ), + ] = None, + serialization_alias: Annotated[ + Union[str, None], + Doc( + """ + 'Blacklist' validation step. The vanilla parameter field will be the + single one of the alias' or set of aliases' fields and all the other + fields will be ignored at serialization time. + """ + ), + ] = None, + title: Annotated[ + Optional[str], + Doc( + """ + Human-readable title. + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Human-readable description. + """ + ), + ] = None, + gt: Annotated[ + Optional[float], + Doc( + """ + Greater than. If set, value must be greater than this. Only applicable to + numbers. + """ + ), + ] = None, + ge: Annotated[ + Optional[float], + Doc( + """ + Greater than or equal. If set, value must be greater than or equal to + this. Only applicable to numbers. + """ + ), + ] = None, + lt: Annotated[ + Optional[float], + Doc( + """ + Less than. If set, value must be less than this. Only applicable to numbers. + """ + ), + ] = None, + le: Annotated[ + Optional[float], + Doc( + """ + Less than or equal. If set, value must be less than or equal to this. + Only applicable to numbers. + """ + ), + ] = None, + min_length: Annotated[ + Optional[int], + Doc( + """ + Minimum length for strings. + """ + ), + ] = None, + max_length: Annotated[ + Optional[int], + Doc( + """ + Maximum length for strings. + """ + ), + ] = None, + pattern: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + ] = None, + regex: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Annotated[ + Union[str, None], + Doc( + """ + Parameter field name for discriminating the type in a tagged union. + """ + ), + ] = None, + strict: Annotated[ + Union[bool, None], + Doc( + """ + If `True`, strict validation is applied to the field. + """ + ), + ] = _Unset, + multiple_of: Annotated[ + Union[float, None], + Doc( + """ + Value must be a multiple of this. Only applicable to numbers. + """ + ), + ] = _Unset, + allow_inf_nan: Annotated[ + Union[bool, None], + Doc( + """ + Allow `inf`, `-inf`, `nan`. Only applicable to numbers. + """ + ), + ] = _Unset, + max_digits: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of allow digits for strings. + """ + ), + ] = _Unset, + decimal_places: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of decimal places allowed for numbers. + """ + ), + ] = _Unset, + examples: Annotated[ + Optional[list[Any]], + Doc( + """ + Example values for this field. + """ + ), + ] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Annotated[ + Optional[dict[str, Example]], + Doc( + """ + OpenAPI-specific examples. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Swagger UI (that provides the `/docs` interface) has better support for the + OpenAPI-specific examples than the JSON Schema `examples`, that's the main + use case for this. + + Read more about it in the + [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). + """ + ), + ] = None, + deprecated: Annotated[ + Union[deprecated, str, bool, None], + Doc( + """ + Mark this parameter field as deprecated. + + It will affect the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + To include (or not) this parameter field in the generated OpenAPI. + You probably don't need it, but it's available. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = True, + json_schema_extra: Annotated[ + Union[dict[str, Any], None], + Doc( + """ + Any additional JSON schema data. + """ + ), + ] = None, + **extra: Annotated[ + Any, + Doc( + """ + Include extra fields used by the JSON Schema. + """ + ), + deprecated( + """ + The `extra` kwargs is deprecated. Use `json_schema_extra` instead. + """ + ), + ], +) -> Any: + """ + Declare a path parameter for a *path operation*. + + Read more about it in the + [FastAPI docs for Path Parameters and Numeric Validations](https://fastapi.tiangolo.com/tutorial/path-params-numeric-validations/). + + ```python + from typing import Annotated + + from fastapi import FastAPI, Path + + app = FastAPI() + + + @app.get("/items/{item_id}") + async def read_items( + item_id: Annotated[int, Path(title="The ID of the item to get")], + ): + return {"item_id": item_id} + ``` + """ + return params.Path( + default=default, + default_factory=default_factory, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + example=example, + examples=examples, + openapi_examples=openapi_examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +def Query( # noqa: N802 + default: Annotated[ + Any, + Doc( + """ + Default value if the parameter field is not set. + """ + ), + ] = Undefined, + *, + default_factory: Annotated[ + Union[Callable[[], Any], None], + Doc( + """ + A callable to generate the default value. + + This doesn't affect `Path` parameters as the value is always required. + The parameter is available only for compatibility. + """ + ), + ] = _Unset, + alias: Annotated[ + Optional[str], + Doc( + """ + An alternative name for the parameter field. + + This will be used to extract the data and for the generated OpenAPI. + It is particularly useful when you can't use the name you want because it + is a Python reserved keyword or similar. + """ + ), + ] = None, + alias_priority: Annotated[ + Union[int, None], + Doc( + """ + Priority of the alias. This affects whether an alias generator is used. + """ + ), + ] = _Unset, + validation_alias: Annotated[ + Union[str, AliasPath, AliasChoices, None], + Doc( + """ + 'Whitelist' validation step. The parameter field will be the single one + allowed by the alias or set of aliases defined. + """ + ), + ] = None, + serialization_alias: Annotated[ + Union[str, None], + Doc( + """ + 'Blacklist' validation step. The vanilla parameter field will be the + single one of the alias' or set of aliases' fields and all the other + fields will be ignored at serialization time. + """ + ), + ] = None, + title: Annotated[ + Optional[str], + Doc( + """ + Human-readable title. + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Human-readable description. + """ + ), + ] = None, + gt: Annotated[ + Optional[float], + Doc( + """ + Greater than. If set, value must be greater than this. Only applicable to + numbers. + """ + ), + ] = None, + ge: Annotated[ + Optional[float], + Doc( + """ + Greater than or equal. If set, value must be greater than or equal to + this. Only applicable to numbers. + """ + ), + ] = None, + lt: Annotated[ + Optional[float], + Doc( + """ + Less than. If set, value must be less than this. Only applicable to numbers. + """ + ), + ] = None, + le: Annotated[ + Optional[float], + Doc( + """ + Less than or equal. If set, value must be less than or equal to this. + Only applicable to numbers. + """ + ), + ] = None, + min_length: Annotated[ + Optional[int], + Doc( + """ + Minimum length for strings. + """ + ), + ] = None, + max_length: Annotated[ + Optional[int], + Doc( + """ + Maximum length for strings. + """ + ), + ] = None, + pattern: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + ] = None, + regex: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Annotated[ + Union[str, None], + Doc( + """ + Parameter field name for discriminating the type in a tagged union. + """ + ), + ] = None, + strict: Annotated[ + Union[bool, None], + Doc( + """ + If `True`, strict validation is applied to the field. + """ + ), + ] = _Unset, + multiple_of: Annotated[ + Union[float, None], + Doc( + """ + Value must be a multiple of this. Only applicable to numbers. + """ + ), + ] = _Unset, + allow_inf_nan: Annotated[ + Union[bool, None], + Doc( + """ + Allow `inf`, `-inf`, `nan`. Only applicable to numbers. + """ + ), + ] = _Unset, + max_digits: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of allow digits for strings. + """ + ), + ] = _Unset, + decimal_places: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of decimal places allowed for numbers. + """ + ), + ] = _Unset, + examples: Annotated[ + Optional[list[Any]], + Doc( + """ + Example values for this field. + """ + ), + ] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Annotated[ + Optional[dict[str, Example]], + Doc( + """ + OpenAPI-specific examples. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Swagger UI (that provides the `/docs` interface) has better support for the + OpenAPI-specific examples than the JSON Schema `examples`, that's the main + use case for this. + + Read more about it in the + [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). + """ + ), + ] = None, + deprecated: Annotated[ + Union[deprecated, str, bool, None], + Doc( + """ + Mark this parameter field as deprecated. + + It will affect the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + To include (or not) this parameter field in the generated OpenAPI. + You probably don't need it, but it's available. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = True, + json_schema_extra: Annotated[ + Union[dict[str, Any], None], + Doc( + """ + Any additional JSON schema data. + """ + ), + ] = None, + **extra: Annotated[ + Any, + Doc( + """ + Include extra fields used by the JSON Schema. + """ + ), + deprecated( + """ + The `extra` kwargs is deprecated. Use `json_schema_extra` instead. + """ + ), + ], +) -> Any: + return params.Query( + default=default, + default_factory=default_factory, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + example=example, + examples=examples, + openapi_examples=openapi_examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +def Header( # noqa: N802 + default: Annotated[ + Any, + Doc( + """ + Default value if the parameter field is not set. + """ + ), + ] = Undefined, + *, + default_factory: Annotated[ + Union[Callable[[], Any], None], + Doc( + """ + A callable to generate the default value. + + This doesn't affect `Path` parameters as the value is always required. + The parameter is available only for compatibility. + """ + ), + ] = _Unset, + alias: Annotated[ + Optional[str], + Doc( + """ + An alternative name for the parameter field. + + This will be used to extract the data and for the generated OpenAPI. + It is particularly useful when you can't use the name you want because it + is a Python reserved keyword or similar. + """ + ), + ] = None, + alias_priority: Annotated[ + Union[int, None], + Doc( + """ + Priority of the alias. This affects whether an alias generator is used. + """ + ), + ] = _Unset, + validation_alias: Annotated[ + Union[str, AliasPath, AliasChoices, None], + Doc( + """ + 'Whitelist' validation step. The parameter field will be the single one + allowed by the alias or set of aliases defined. + """ + ), + ] = None, + serialization_alias: Annotated[ + Union[str, None], + Doc( + """ + 'Blacklist' validation step. The vanilla parameter field will be the + single one of the alias' or set of aliases' fields and all the other + fields will be ignored at serialization time. + """ + ), + ] = None, + convert_underscores: Annotated[ + bool, + Doc( + """ + Automatically convert underscores to hyphens in the parameter field name. + + Read more about it in the + [FastAPI docs for Header Parameters](https://fastapi.tiangolo.com/tutorial/header-params/#automatic-conversion) + """ + ), + ] = True, + title: Annotated[ + Optional[str], + Doc( + """ + Human-readable title. + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Human-readable description. + """ + ), + ] = None, + gt: Annotated[ + Optional[float], + Doc( + """ + Greater than. If set, value must be greater than this. Only applicable to + numbers. + """ + ), + ] = None, + ge: Annotated[ + Optional[float], + Doc( + """ + Greater than or equal. If set, value must be greater than or equal to + this. Only applicable to numbers. + """ + ), + ] = None, + lt: Annotated[ + Optional[float], + Doc( + """ + Less than. If set, value must be less than this. Only applicable to numbers. + """ + ), + ] = None, + le: Annotated[ + Optional[float], + Doc( + """ + Less than or equal. If set, value must be less than or equal to this. + Only applicable to numbers. + """ + ), + ] = None, + min_length: Annotated[ + Optional[int], + Doc( + """ + Minimum length for strings. + """ + ), + ] = None, + max_length: Annotated[ + Optional[int], + Doc( + """ + Maximum length for strings. + """ + ), + ] = None, + pattern: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + ] = None, + regex: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Annotated[ + Union[str, None], + Doc( + """ + Parameter field name for discriminating the type in a tagged union. + """ + ), + ] = None, + strict: Annotated[ + Union[bool, None], + Doc( + """ + If `True`, strict validation is applied to the field. + """ + ), + ] = _Unset, + multiple_of: Annotated[ + Union[float, None], + Doc( + """ + Value must be a multiple of this. Only applicable to numbers. + """ + ), + ] = _Unset, + allow_inf_nan: Annotated[ + Union[bool, None], + Doc( + """ + Allow `inf`, `-inf`, `nan`. Only applicable to numbers. + """ + ), + ] = _Unset, + max_digits: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of allow digits for strings. + """ + ), + ] = _Unset, + decimal_places: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of decimal places allowed for numbers. + """ + ), + ] = _Unset, + examples: Annotated[ + Optional[list[Any]], + Doc( + """ + Example values for this field. + """ + ), + ] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Annotated[ + Optional[dict[str, Example]], + Doc( + """ + OpenAPI-specific examples. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Swagger UI (that provides the `/docs` interface) has better support for the + OpenAPI-specific examples than the JSON Schema `examples`, that's the main + use case for this. + + Read more about it in the + [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). + """ + ), + ] = None, + deprecated: Annotated[ + Union[deprecated, str, bool, None], + Doc( + """ + Mark this parameter field as deprecated. + + It will affect the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + To include (or not) this parameter field in the generated OpenAPI. + You probably don't need it, but it's available. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = True, + json_schema_extra: Annotated[ + Union[dict[str, Any], None], + Doc( + """ + Any additional JSON schema data. + """ + ), + ] = None, + **extra: Annotated[ + Any, + Doc( + """ + Include extra fields used by the JSON Schema. + """ + ), + deprecated( + """ + The `extra` kwargs is deprecated. Use `json_schema_extra` instead. + """ + ), + ], +) -> Any: + return params.Header( + default=default, + default_factory=default_factory, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + convert_underscores=convert_underscores, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + example=example, + examples=examples, + openapi_examples=openapi_examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +def Cookie( # noqa: N802 + default: Annotated[ + Any, + Doc( + """ + Default value if the parameter field is not set. + """ + ), + ] = Undefined, + *, + default_factory: Annotated[ + Union[Callable[[], Any], None], + Doc( + """ + A callable to generate the default value. + + This doesn't affect `Path` parameters as the value is always required. + The parameter is available only for compatibility. + """ + ), + ] = _Unset, + alias: Annotated[ + Optional[str], + Doc( + """ + An alternative name for the parameter field. + + This will be used to extract the data and for the generated OpenAPI. + It is particularly useful when you can't use the name you want because it + is a Python reserved keyword or similar. + """ + ), + ] = None, + alias_priority: Annotated[ + Union[int, None], + Doc( + """ + Priority of the alias. This affects whether an alias generator is used. + """ + ), + ] = _Unset, + validation_alias: Annotated[ + Union[str, AliasPath, AliasChoices, None], + Doc( + """ + 'Whitelist' validation step. The parameter field will be the single one + allowed by the alias or set of aliases defined. + """ + ), + ] = None, + serialization_alias: Annotated[ + Union[str, None], + Doc( + """ + 'Blacklist' validation step. The vanilla parameter field will be the + single one of the alias' or set of aliases' fields and all the other + fields will be ignored at serialization time. + """ + ), + ] = None, + title: Annotated[ + Optional[str], + Doc( + """ + Human-readable title. + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Human-readable description. + """ + ), + ] = None, + gt: Annotated[ + Optional[float], + Doc( + """ + Greater than. If set, value must be greater than this. Only applicable to + numbers. + """ + ), + ] = None, + ge: Annotated[ + Optional[float], + Doc( + """ + Greater than or equal. If set, value must be greater than or equal to + this. Only applicable to numbers. + """ + ), + ] = None, + lt: Annotated[ + Optional[float], + Doc( + """ + Less than. If set, value must be less than this. Only applicable to numbers. + """ + ), + ] = None, + le: Annotated[ + Optional[float], + Doc( + """ + Less than or equal. If set, value must be less than or equal to this. + Only applicable to numbers. + """ + ), + ] = None, + min_length: Annotated[ + Optional[int], + Doc( + """ + Minimum length for strings. + """ + ), + ] = None, + max_length: Annotated[ + Optional[int], + Doc( + """ + Maximum length for strings. + """ + ), + ] = None, + pattern: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + ] = None, + regex: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Annotated[ + Union[str, None], + Doc( + """ + Parameter field name for discriminating the type in a tagged union. + """ + ), + ] = None, + strict: Annotated[ + Union[bool, None], + Doc( + """ + If `True`, strict validation is applied to the field. + """ + ), + ] = _Unset, + multiple_of: Annotated[ + Union[float, None], + Doc( + """ + Value must be a multiple of this. Only applicable to numbers. + """ + ), + ] = _Unset, + allow_inf_nan: Annotated[ + Union[bool, None], + Doc( + """ + Allow `inf`, `-inf`, `nan`. Only applicable to numbers. + """ + ), + ] = _Unset, + max_digits: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of allow digits for strings. + """ + ), + ] = _Unset, + decimal_places: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of decimal places allowed for numbers. + """ + ), + ] = _Unset, + examples: Annotated[ + Optional[list[Any]], + Doc( + """ + Example values for this field. + """ + ), + ] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Annotated[ + Optional[dict[str, Example]], + Doc( + """ + OpenAPI-specific examples. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Swagger UI (that provides the `/docs` interface) has better support for the + OpenAPI-specific examples than the JSON Schema `examples`, that's the main + use case for this. + + Read more about it in the + [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). + """ + ), + ] = None, + deprecated: Annotated[ + Union[deprecated, str, bool, None], + Doc( + """ + Mark this parameter field as deprecated. + + It will affect the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + To include (or not) this parameter field in the generated OpenAPI. + You probably don't need it, but it's available. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = True, + json_schema_extra: Annotated[ + Union[dict[str, Any], None], + Doc( + """ + Any additional JSON schema data. + """ + ), + ] = None, + **extra: Annotated[ + Any, + Doc( + """ + Include extra fields used by the JSON Schema. + """ + ), + deprecated( + """ + The `extra` kwargs is deprecated. Use `json_schema_extra` instead. + """ + ), + ], +) -> Any: + return params.Cookie( + default=default, + default_factory=default_factory, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + example=example, + examples=examples, + openapi_examples=openapi_examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +def Body( # noqa: N802 + default: Annotated[ + Any, + Doc( + """ + Default value if the parameter field is not set. + """ + ), + ] = Undefined, + *, + default_factory: Annotated[ + Union[Callable[[], Any], None], + Doc( + """ + A callable to generate the default value. + + This doesn't affect `Path` parameters as the value is always required. + The parameter is available only for compatibility. + """ + ), + ] = _Unset, + embed: Annotated[ + Union[bool, None], + Doc( + """ + When `embed` is `True`, the parameter will be expected in a JSON body as a + key instead of being the JSON body itself. + + This happens automatically when more than one `Body` parameter is declared. + + Read more about it in the + [FastAPI docs for Body - Multiple Parameters](https://fastapi.tiangolo.com/tutorial/body-multiple-params/#embed-a-single-body-parameter). + """ + ), + ] = None, + media_type: Annotated[ + str, + Doc( + """ + The media type of this parameter field. Changing it would affect the + generated OpenAPI, but currently it doesn't affect the parsing of the data. + """ + ), + ] = "application/json", + alias: Annotated[ + Optional[str], + Doc( + """ + An alternative name for the parameter field. + + This will be used to extract the data and for the generated OpenAPI. + It is particularly useful when you can't use the name you want because it + is a Python reserved keyword or similar. + """ + ), + ] = None, + alias_priority: Annotated[ + Union[int, None], + Doc( + """ + Priority of the alias. This affects whether an alias generator is used. + """ + ), + ] = _Unset, + validation_alias: Annotated[ + Union[str, AliasPath, AliasChoices, None], + Doc( + """ + 'Whitelist' validation step. The parameter field will be the single one + allowed by the alias or set of aliases defined. + """ + ), + ] = None, + serialization_alias: Annotated[ + Union[str, None], + Doc( + """ + 'Blacklist' validation step. The vanilla parameter field will be the + single one of the alias' or set of aliases' fields and all the other + fields will be ignored at serialization time. + """ + ), + ] = None, + title: Annotated[ + Optional[str], + Doc( + """ + Human-readable title. + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Human-readable description. + """ + ), + ] = None, + gt: Annotated[ + Optional[float], + Doc( + """ + Greater than. If set, value must be greater than this. Only applicable to + numbers. + """ + ), + ] = None, + ge: Annotated[ + Optional[float], + Doc( + """ + Greater than or equal. If set, value must be greater than or equal to + this. Only applicable to numbers. + """ + ), + ] = None, + lt: Annotated[ + Optional[float], + Doc( + """ + Less than. If set, value must be less than this. Only applicable to numbers. + """ + ), + ] = None, + le: Annotated[ + Optional[float], + Doc( + """ + Less than or equal. If set, value must be less than or equal to this. + Only applicable to numbers. + """ + ), + ] = None, + min_length: Annotated[ + Optional[int], + Doc( + """ + Minimum length for strings. + """ + ), + ] = None, + max_length: Annotated[ + Optional[int], + Doc( + """ + Maximum length for strings. + """ + ), + ] = None, + pattern: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + ] = None, + regex: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Annotated[ + Union[str, None], + Doc( + """ + Parameter field name for discriminating the type in a tagged union. + """ + ), + ] = None, + strict: Annotated[ + Union[bool, None], + Doc( + """ + If `True`, strict validation is applied to the field. + """ + ), + ] = _Unset, + multiple_of: Annotated[ + Union[float, None], + Doc( + """ + Value must be a multiple of this. Only applicable to numbers. + """ + ), + ] = _Unset, + allow_inf_nan: Annotated[ + Union[bool, None], + Doc( + """ + Allow `inf`, `-inf`, `nan`. Only applicable to numbers. + """ + ), + ] = _Unset, + max_digits: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of allow digits for strings. + """ + ), + ] = _Unset, + decimal_places: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of decimal places allowed for numbers. + """ + ), + ] = _Unset, + examples: Annotated[ + Optional[list[Any]], + Doc( + """ + Example values for this field. + """ + ), + ] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Annotated[ + Optional[dict[str, Example]], + Doc( + """ + OpenAPI-specific examples. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Swagger UI (that provides the `/docs` interface) has better support for the + OpenAPI-specific examples than the JSON Schema `examples`, that's the main + use case for this. + + Read more about it in the + [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). + """ + ), + ] = None, + deprecated: Annotated[ + Union[deprecated, str, bool, None], + Doc( + """ + Mark this parameter field as deprecated. + + It will affect the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + To include (or not) this parameter field in the generated OpenAPI. + You probably don't need it, but it's available. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = True, + json_schema_extra: Annotated[ + Union[dict[str, Any], None], + Doc( + """ + Any additional JSON schema data. + """ + ), + ] = None, + **extra: Annotated[ + Any, + Doc( + """ + Include extra fields used by the JSON Schema. + """ + ), + deprecated( + """ + The `extra` kwargs is deprecated. Use `json_schema_extra` instead. + """ + ), + ], +) -> Any: + return params.Body( + default=default, + default_factory=default_factory, + embed=embed, + media_type=media_type, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + example=example, + examples=examples, + openapi_examples=openapi_examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +def Form( # noqa: N802 + default: Annotated[ + Any, + Doc( + """ + Default value if the parameter field is not set. + """ + ), + ] = Undefined, + *, + default_factory: Annotated[ + Union[Callable[[], Any], None], + Doc( + """ + A callable to generate the default value. + + This doesn't affect `Path` parameters as the value is always required. + The parameter is available only for compatibility. + """ + ), + ] = _Unset, + media_type: Annotated[ + str, + Doc( + """ + The media type of this parameter field. Changing it would affect the + generated OpenAPI, but currently it doesn't affect the parsing of the data. + """ + ), + ] = "application/x-www-form-urlencoded", + alias: Annotated[ + Optional[str], + Doc( + """ + An alternative name for the parameter field. + + This will be used to extract the data and for the generated OpenAPI. + It is particularly useful when you can't use the name you want because it + is a Python reserved keyword or similar. + """ + ), + ] = None, + alias_priority: Annotated[ + Union[int, None], + Doc( + """ + Priority of the alias. This affects whether an alias generator is used. + """ + ), + ] = _Unset, + validation_alias: Annotated[ + Union[str, AliasPath, AliasChoices, None], + Doc( + """ + 'Whitelist' validation step. The parameter field will be the single one + allowed by the alias or set of aliases defined. + """ + ), + ] = None, + serialization_alias: Annotated[ + Union[str, None], + Doc( + """ + 'Blacklist' validation step. The vanilla parameter field will be the + single one of the alias' or set of aliases' fields and all the other + fields will be ignored at serialization time. + """ + ), + ] = None, + title: Annotated[ + Optional[str], + Doc( + """ + Human-readable title. + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Human-readable description. + """ + ), + ] = None, + gt: Annotated[ + Optional[float], + Doc( + """ + Greater than. If set, value must be greater than this. Only applicable to + numbers. + """ + ), + ] = None, + ge: Annotated[ + Optional[float], + Doc( + """ + Greater than or equal. If set, value must be greater than or equal to + this. Only applicable to numbers. + """ + ), + ] = None, + lt: Annotated[ + Optional[float], + Doc( + """ + Less than. If set, value must be less than this. Only applicable to numbers. + """ + ), + ] = None, + le: Annotated[ + Optional[float], + Doc( + """ + Less than or equal. If set, value must be less than or equal to this. + Only applicable to numbers. + """ + ), + ] = None, + min_length: Annotated[ + Optional[int], + Doc( + """ + Minimum length for strings. + """ + ), + ] = None, + max_length: Annotated[ + Optional[int], + Doc( + """ + Maximum length for strings. + """ + ), + ] = None, + pattern: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + ] = None, + regex: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Annotated[ + Union[str, None], + Doc( + """ + Parameter field name for discriminating the type in a tagged union. + """ + ), + ] = None, + strict: Annotated[ + Union[bool, None], + Doc( + """ + If `True`, strict validation is applied to the field. + """ + ), + ] = _Unset, + multiple_of: Annotated[ + Union[float, None], + Doc( + """ + Value must be a multiple of this. Only applicable to numbers. + """ + ), + ] = _Unset, + allow_inf_nan: Annotated[ + Union[bool, None], + Doc( + """ + Allow `inf`, `-inf`, `nan`. Only applicable to numbers. + """ + ), + ] = _Unset, + max_digits: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of allow digits for strings. + """ + ), + ] = _Unset, + decimal_places: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of decimal places allowed for numbers. + """ + ), + ] = _Unset, + examples: Annotated[ + Optional[list[Any]], + Doc( + """ + Example values for this field. + """ + ), + ] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Annotated[ + Optional[dict[str, Example]], + Doc( + """ + OpenAPI-specific examples. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Swagger UI (that provides the `/docs` interface) has better support for the + OpenAPI-specific examples than the JSON Schema `examples`, that's the main + use case for this. + + Read more about it in the + [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). + """ + ), + ] = None, + deprecated: Annotated[ + Union[deprecated, str, bool, None], + Doc( + """ + Mark this parameter field as deprecated. + + It will affect the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + To include (or not) this parameter field in the generated OpenAPI. + You probably don't need it, but it's available. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = True, + json_schema_extra: Annotated[ + Union[dict[str, Any], None], + Doc( + """ + Any additional JSON schema data. + """ + ), + ] = None, + **extra: Annotated[ + Any, + Doc( + """ + Include extra fields used by the JSON Schema. + """ + ), + deprecated( + """ + The `extra` kwargs is deprecated. Use `json_schema_extra` instead. + """ + ), + ], +) -> Any: + return params.Form( + default=default, + default_factory=default_factory, + media_type=media_type, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + example=example, + examples=examples, + openapi_examples=openapi_examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +def File( # noqa: N802 + default: Annotated[ + Any, + Doc( + """ + Default value if the parameter field is not set. + """ + ), + ] = Undefined, + *, + default_factory: Annotated[ + Union[Callable[[], Any], None], + Doc( + """ + A callable to generate the default value. + + This doesn't affect `Path` parameters as the value is always required. + The parameter is available only for compatibility. + """ + ), + ] = _Unset, + media_type: Annotated[ + str, + Doc( + """ + The media type of this parameter field. Changing it would affect the + generated OpenAPI, but currently it doesn't affect the parsing of the data. + """ + ), + ] = "multipart/form-data", + alias: Annotated[ + Optional[str], + Doc( + """ + An alternative name for the parameter field. + + This will be used to extract the data and for the generated OpenAPI. + It is particularly useful when you can't use the name you want because it + is a Python reserved keyword or similar. + """ + ), + ] = None, + alias_priority: Annotated[ + Union[int, None], + Doc( + """ + Priority of the alias. This affects whether an alias generator is used. + """ + ), + ] = _Unset, + validation_alias: Annotated[ + Union[str, AliasPath, AliasChoices, None], + Doc( + """ + 'Whitelist' validation step. The parameter field will be the single one + allowed by the alias or set of aliases defined. + """ + ), + ] = None, + serialization_alias: Annotated[ + Union[str, None], + Doc( + """ + 'Blacklist' validation step. The vanilla parameter field will be the + single one of the alias' or set of aliases' fields and all the other + fields will be ignored at serialization time. + """ + ), + ] = None, + title: Annotated[ + Optional[str], + Doc( + """ + Human-readable title. + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Human-readable description. + """ + ), + ] = None, + gt: Annotated[ + Optional[float], + Doc( + """ + Greater than. If set, value must be greater than this. Only applicable to + numbers. + """ + ), + ] = None, + ge: Annotated[ + Optional[float], + Doc( + """ + Greater than or equal. If set, value must be greater than or equal to + this. Only applicable to numbers. + """ + ), + ] = None, + lt: Annotated[ + Optional[float], + Doc( + """ + Less than. If set, value must be less than this. Only applicable to numbers. + """ + ), + ] = None, + le: Annotated[ + Optional[float], + Doc( + """ + Less than or equal. If set, value must be less than or equal to this. + Only applicable to numbers. + """ + ), + ] = None, + min_length: Annotated[ + Optional[int], + Doc( + """ + Minimum length for strings. + """ + ), + ] = None, + max_length: Annotated[ + Optional[int], + Doc( + """ + Maximum length for strings. + """ + ), + ] = None, + pattern: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + ] = None, + regex: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Annotated[ + Union[str, None], + Doc( + """ + Parameter field name for discriminating the type in a tagged union. + """ + ), + ] = None, + strict: Annotated[ + Union[bool, None], + Doc( + """ + If `True`, strict validation is applied to the field. + """ + ), + ] = _Unset, + multiple_of: Annotated[ + Union[float, None], + Doc( + """ + Value must be a multiple of this. Only applicable to numbers. + """ + ), + ] = _Unset, + allow_inf_nan: Annotated[ + Union[bool, None], + Doc( + """ + Allow `inf`, `-inf`, `nan`. Only applicable to numbers. + """ + ), + ] = _Unset, + max_digits: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of allow digits for strings. + """ + ), + ] = _Unset, + decimal_places: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of decimal places allowed for numbers. + """ + ), + ] = _Unset, + examples: Annotated[ + Optional[list[Any]], + Doc( + """ + Example values for this field. + """ + ), + ] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Annotated[ + Optional[dict[str, Example]], + Doc( + """ + OpenAPI-specific examples. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Swagger UI (that provides the `/docs` interface) has better support for the + OpenAPI-specific examples than the JSON Schema `examples`, that's the main + use case for this. + + Read more about it in the + [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). + """ + ), + ] = None, + deprecated: Annotated[ + Union[deprecated, str, bool, None], + Doc( + """ + Mark this parameter field as deprecated. + + It will affect the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + To include (or not) this parameter field in the generated OpenAPI. + You probably don't need it, but it's available. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = True, + json_schema_extra: Annotated[ + Union[dict[str, Any], None], + Doc( + """ + Any additional JSON schema data. + """ + ), + ] = None, + **extra: Annotated[ + Any, + Doc( + """ + Include extra fields used by the JSON Schema. + """ + ), + deprecated( + """ + The `extra` kwargs is deprecated. Use `json_schema_extra` instead. + """ + ), + ], +) -> Any: + return params.File( + default=default, + default_factory=default_factory, + media_type=media_type, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + example=example, + examples=examples, + openapi_examples=openapi_examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +def Depends( # noqa: N802 + dependency: Annotated[ + Optional[Callable[..., Any]], + Doc( + """ + A "dependable" callable (like a function). + + Don't call it directly, FastAPI will call it for you, just pass the object + directly. + """ + ), + ] = None, + *, + use_cache: Annotated[ + bool, + Doc( + """ + By default, after a dependency is called the first time in a request, if + the dependency is declared again for the rest of the request (for example + if the dependency is needed by several dependencies), the value will be + re-used for the rest of the request. + + Set `use_cache` to `False` to disable this behavior and ensure the + dependency is called again (if declared more than once) in the same request. + """ + ), + ] = True, + scope: Annotated[ + Union[Literal["function", "request"], None], + Doc( + """ + Mainly for dependencies with `yield`, define when the dependency function + should start (the code before `yield`) and when it should end (the code + after `yield`). + + * `"function"`: start the dependency before the *path operation function* + that handles the request, end the dependency after the *path operation + function* ends, but **before** the response is sent back to the client. + So, the dependency function will be executed **around** the *path operation + **function***. + * `"request"`: start the dependency before the *path operation function* + that handles the request (similar to when using `"function"`), but end + **after** the response is sent back to the client. So, the dependency + function will be executed **around** the **request** and response cycle. + """ + ), + ] = None, +) -> Any: + """ + Declare a FastAPI dependency. + + It takes a single "dependable" callable (like a function). + + Don't call it directly, FastAPI will call it for you. + + Read more about it in the + [FastAPI docs for Dependencies](https://fastapi.tiangolo.com/tutorial/dependencies/). + + **Example** + + ```python + from typing import Annotated + + from fastapi import Depends, FastAPI + + app = FastAPI() + + + async def common_parameters(q: str | None = None, skip: int = 0, limit: int = 100): + return {"q": q, "skip": skip, "limit": limit} + + + @app.get("/items/") + async def read_items(commons: Annotated[dict, Depends(common_parameters)]): + return commons + ``` + """ + return params.Depends(dependency=dependency, use_cache=use_cache, scope=scope) + + +def Security( # noqa: N802 + dependency: Annotated[ + Optional[Callable[..., Any]], + Doc( + """ + A "dependable" callable (like a function). + + Don't call it directly, FastAPI will call it for you, just pass the object + directly. + """ + ), + ] = None, + *, + scopes: Annotated[ + Optional[Sequence[str]], + Doc( + """ + OAuth2 scopes required for the *path operation* that uses this Security + dependency. + + The term "scope" comes from the OAuth2 specification, it seems to be + intentionally vague and interpretable. It normally refers to permissions, + in cases to roles. + + These scopes are integrated with OpenAPI (and the API docs at `/docs`). + So they are visible in the OpenAPI specification. + ) + """ + ), + ] = None, + use_cache: Annotated[ + bool, + Doc( + """ + By default, after a dependency is called the first time in a request, if + the dependency is declared again for the rest of the request (for example + if the dependency is needed by several dependencies), the value will be + re-used for the rest of the request. + + Set `use_cache` to `False` to disable this behavior and ensure the + dependency is called again (if declared more than once) in the same request. + """ + ), + ] = True, +) -> Any: + """ + Declare a FastAPI Security dependency. + + The only difference with a regular dependency is that it can declare OAuth2 + scopes that will be integrated with OpenAPI and the automatic UI docs (by default + at `/docs`). + + It takes a single "dependable" callable (like a function). + + Don't call it directly, FastAPI will call it for you. + + Read more about it in the + [FastAPI docs for Security](https://fastapi.tiangolo.com/tutorial/security/) and + in the + [FastAPI docs for OAuth2 scopes](https://fastapi.tiangolo.com/advanced/security/oauth2-scopes/). + + **Example** + + ```python + from typing import Annotated + + from fastapi import Security, FastAPI + + from .db import User + from .security import get_current_active_user + + app = FastAPI() + + @app.get("/users/me/items/") + async def read_own_items( + current_user: Annotated[User, Security(get_current_active_user, scopes=["items"])] + ): + return [{"item_id": "Foo", "owner": current_user.username}] + ``` + """ + return params.Security(dependency=dependency, scopes=scopes, use_cache=use_cache) diff --git a/.venv/lib/python3.12/site-packages/fastapi/params.py b/.venv/lib/python3.12/site-packages/fastapi/params.py new file mode 100644 index 0000000..72e797f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/params.py @@ -0,0 +1,755 @@ +import warnings +from collections.abc import Sequence +from dataclasses import dataclass +from enum import Enum +from typing import Annotated, Any, Callable, Optional, Union + +from fastapi.exceptions import FastAPIDeprecationWarning +from fastapi.openapi.models import Example +from pydantic import AliasChoices, AliasPath +from pydantic.fields import FieldInfo +from typing_extensions import Literal, deprecated + +from ._compat import ( + Undefined, +) + +_Unset: Any = Undefined + + +class ParamTypes(Enum): + query = "query" + header = "header" + path = "path" + cookie = "cookie" + + +class Param(FieldInfo): # type: ignore[misc] + in_: ParamTypes + + def __init__( + self, + default: Any = Undefined, + *, + default_factory: Union[Callable[[], Any], None] = _Unset, + annotation: Optional[Any] = None, + alias: Optional[str] = None, + alias_priority: Union[int, None] = _Unset, + validation_alias: Union[str, AliasPath, AliasChoices, None] = None, + serialization_alias: Union[str, None] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + pattern: Optional[str] = None, + regex: Annotated[ + Optional[str], + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Union[str, None] = None, + strict: Union[bool, None] = _Unset, + multiple_of: Union[float, None] = _Unset, + allow_inf_nan: Union[bool, None] = _Unset, + max_digits: Union[int, None] = _Unset, + decimal_places: Union[int, None] = _Unset, + examples: Optional[list[Any]] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Optional[dict[str, Example]] = None, + deprecated: Union[deprecated, str, bool, None] = None, + include_in_schema: bool = True, + json_schema_extra: Union[dict[str, Any], None] = None, + **extra: Any, + ): + if example is not _Unset: + warnings.warn( + "`example` has been deprecated, please use `examples` instead", + category=FastAPIDeprecationWarning, + stacklevel=4, + ) + self.example = example + self.include_in_schema = include_in_schema + self.openapi_examples = openapi_examples + kwargs = dict( + default=default, + default_factory=default_factory, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + discriminator=discriminator, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + **extra, + ) + if examples is not None: + kwargs["examples"] = examples + if regex is not None: + warnings.warn( + "`regex` has been deprecated, please use `pattern` instead", + category=FastAPIDeprecationWarning, + stacklevel=4, + ) + current_json_schema_extra = json_schema_extra or extra + kwargs["deprecated"] = deprecated + + if serialization_alias in (_Unset, None) and isinstance(alias, str): + serialization_alias = alias + if validation_alias in (_Unset, None): + validation_alias = alias + kwargs.update( + { + "annotation": annotation, + "alias_priority": alias_priority, + "validation_alias": validation_alias, + "serialization_alias": serialization_alias, + "strict": strict, + "json_schema_extra": current_json_schema_extra, + } + ) + kwargs["pattern"] = pattern or regex + + use_kwargs = {k: v for k, v in kwargs.items() if v is not _Unset} + + super().__init__(**use_kwargs) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.default})" + + +class Path(Param): # type: ignore[misc] + in_ = ParamTypes.path + + def __init__( + self, + default: Any = ..., + *, + default_factory: Union[Callable[[], Any], None] = _Unset, + annotation: Optional[Any] = None, + alias: Optional[str] = None, + alias_priority: Union[int, None] = _Unset, + validation_alias: Union[str, AliasPath, AliasChoices, None] = None, + serialization_alias: Union[str, None] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + pattern: Optional[str] = None, + regex: Annotated[ + Optional[str], + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Union[str, None] = None, + strict: Union[bool, None] = _Unset, + multiple_of: Union[float, None] = _Unset, + allow_inf_nan: Union[bool, None] = _Unset, + max_digits: Union[int, None] = _Unset, + decimal_places: Union[int, None] = _Unset, + examples: Optional[list[Any]] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Optional[dict[str, Example]] = None, + deprecated: Union[deprecated, str, bool, None] = None, + include_in_schema: bool = True, + json_schema_extra: Union[dict[str, Any], None] = None, + **extra: Any, + ): + assert default is ..., "Path parameters cannot have a default value" + self.in_ = self.in_ + super().__init__( + default=default, + default_factory=default_factory, + annotation=annotation, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + deprecated=deprecated, + example=example, + examples=examples, + openapi_examples=openapi_examples, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +class Query(Param): # type: ignore[misc] + in_ = ParamTypes.query + + def __init__( + self, + default: Any = Undefined, + *, + default_factory: Union[Callable[[], Any], None] = _Unset, + annotation: Optional[Any] = None, + alias: Optional[str] = None, + alias_priority: Union[int, None] = _Unset, + validation_alias: Union[str, AliasPath, AliasChoices, None] = None, + serialization_alias: Union[str, None] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + pattern: Optional[str] = None, + regex: Annotated[ + Optional[str], + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Union[str, None] = None, + strict: Union[bool, None] = _Unset, + multiple_of: Union[float, None] = _Unset, + allow_inf_nan: Union[bool, None] = _Unset, + max_digits: Union[int, None] = _Unset, + decimal_places: Union[int, None] = _Unset, + examples: Optional[list[Any]] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Optional[dict[str, Example]] = None, + deprecated: Union[deprecated, str, bool, None] = None, + include_in_schema: bool = True, + json_schema_extra: Union[dict[str, Any], None] = None, + **extra: Any, + ): + super().__init__( + default=default, + default_factory=default_factory, + annotation=annotation, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + deprecated=deprecated, + example=example, + examples=examples, + openapi_examples=openapi_examples, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +class Header(Param): # type: ignore[misc] + in_ = ParamTypes.header + + def __init__( + self, + default: Any = Undefined, + *, + default_factory: Union[Callable[[], Any], None] = _Unset, + annotation: Optional[Any] = None, + alias: Optional[str] = None, + alias_priority: Union[int, None] = _Unset, + validation_alias: Union[str, AliasPath, AliasChoices, None] = None, + serialization_alias: Union[str, None] = None, + convert_underscores: bool = True, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + pattern: Optional[str] = None, + regex: Annotated[ + Optional[str], + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Union[str, None] = None, + strict: Union[bool, None] = _Unset, + multiple_of: Union[float, None] = _Unset, + allow_inf_nan: Union[bool, None] = _Unset, + max_digits: Union[int, None] = _Unset, + decimal_places: Union[int, None] = _Unset, + examples: Optional[list[Any]] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Optional[dict[str, Example]] = None, + deprecated: Union[deprecated, str, bool, None] = None, + include_in_schema: bool = True, + json_schema_extra: Union[dict[str, Any], None] = None, + **extra: Any, + ): + self.convert_underscores = convert_underscores + super().__init__( + default=default, + default_factory=default_factory, + annotation=annotation, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + deprecated=deprecated, + example=example, + examples=examples, + openapi_examples=openapi_examples, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +class Cookie(Param): # type: ignore[misc] + in_ = ParamTypes.cookie + + def __init__( + self, + default: Any = Undefined, + *, + default_factory: Union[Callable[[], Any], None] = _Unset, + annotation: Optional[Any] = None, + alias: Optional[str] = None, + alias_priority: Union[int, None] = _Unset, + validation_alias: Union[str, AliasPath, AliasChoices, None] = None, + serialization_alias: Union[str, None] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + pattern: Optional[str] = None, + regex: Annotated[ + Optional[str], + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Union[str, None] = None, + strict: Union[bool, None] = _Unset, + multiple_of: Union[float, None] = _Unset, + allow_inf_nan: Union[bool, None] = _Unset, + max_digits: Union[int, None] = _Unset, + decimal_places: Union[int, None] = _Unset, + examples: Optional[list[Any]] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Optional[dict[str, Example]] = None, + deprecated: Union[deprecated, str, bool, None] = None, + include_in_schema: bool = True, + json_schema_extra: Union[dict[str, Any], None] = None, + **extra: Any, + ): + super().__init__( + default=default, + default_factory=default_factory, + annotation=annotation, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + deprecated=deprecated, + example=example, + examples=examples, + openapi_examples=openapi_examples, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +class Body(FieldInfo): # type: ignore[misc] + def __init__( + self, + default: Any = Undefined, + *, + default_factory: Union[Callable[[], Any], None] = _Unset, + annotation: Optional[Any] = None, + embed: Union[bool, None] = None, + media_type: str = "application/json", + alias: Optional[str] = None, + alias_priority: Union[int, None] = _Unset, + validation_alias: Union[str, AliasPath, AliasChoices, None] = None, + serialization_alias: Union[str, None] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + pattern: Optional[str] = None, + regex: Annotated[ + Optional[str], + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Union[str, None] = None, + strict: Union[bool, None] = _Unset, + multiple_of: Union[float, None] = _Unset, + allow_inf_nan: Union[bool, None] = _Unset, + max_digits: Union[int, None] = _Unset, + decimal_places: Union[int, None] = _Unset, + examples: Optional[list[Any]] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Optional[dict[str, Example]] = None, + deprecated: Union[deprecated, str, bool, None] = None, + include_in_schema: bool = True, + json_schema_extra: Union[dict[str, Any], None] = None, + **extra: Any, + ): + self.embed = embed + self.media_type = media_type + if example is not _Unset: + warnings.warn( + "`example` has been deprecated, please use `examples` instead", + category=FastAPIDeprecationWarning, + stacklevel=4, + ) + self.example = example + self.include_in_schema = include_in_schema + self.openapi_examples = openapi_examples + kwargs = dict( + default=default, + default_factory=default_factory, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + discriminator=discriminator, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + **extra, + ) + if examples is not None: + kwargs["examples"] = examples + if regex is not None: + warnings.warn( + "`regex` has been deprecated, please use `pattern` instead", + category=FastAPIDeprecationWarning, + stacklevel=4, + ) + current_json_schema_extra = json_schema_extra or extra + kwargs["deprecated"] = deprecated + if serialization_alias in (_Unset, None) and isinstance(alias, str): + serialization_alias = alias + if validation_alias in (_Unset, None): + validation_alias = alias + kwargs.update( + { + "annotation": annotation, + "alias_priority": alias_priority, + "validation_alias": validation_alias, + "serialization_alias": serialization_alias, + "strict": strict, + "json_schema_extra": current_json_schema_extra, + } + ) + kwargs["pattern"] = pattern or regex + + use_kwargs = {k: v for k, v in kwargs.items() if v is not _Unset} + + super().__init__(**use_kwargs) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.default})" + + +class Form(Body): # type: ignore[misc] + def __init__( + self, + default: Any = Undefined, + *, + default_factory: Union[Callable[[], Any], None] = _Unset, + annotation: Optional[Any] = None, + media_type: str = "application/x-www-form-urlencoded", + alias: Optional[str] = None, + alias_priority: Union[int, None] = _Unset, + validation_alias: Union[str, AliasPath, AliasChoices, None] = None, + serialization_alias: Union[str, None] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + pattern: Optional[str] = None, + regex: Annotated[ + Optional[str], + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Union[str, None] = None, + strict: Union[bool, None] = _Unset, + multiple_of: Union[float, None] = _Unset, + allow_inf_nan: Union[bool, None] = _Unset, + max_digits: Union[int, None] = _Unset, + decimal_places: Union[int, None] = _Unset, + examples: Optional[list[Any]] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Optional[dict[str, Example]] = None, + deprecated: Union[deprecated, str, bool, None] = None, + include_in_schema: bool = True, + json_schema_extra: Union[dict[str, Any], None] = None, + **extra: Any, + ): + super().__init__( + default=default, + default_factory=default_factory, + annotation=annotation, + media_type=media_type, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + deprecated=deprecated, + example=example, + examples=examples, + openapi_examples=openapi_examples, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +class File(Form): # type: ignore[misc] + def __init__( + self, + default: Any = Undefined, + *, + default_factory: Union[Callable[[], Any], None] = _Unset, + annotation: Optional[Any] = None, + media_type: str = "multipart/form-data", + alias: Optional[str] = None, + alias_priority: Union[int, None] = _Unset, + validation_alias: Union[str, AliasPath, AliasChoices, None] = None, + serialization_alias: Union[str, None] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + pattern: Optional[str] = None, + regex: Annotated[ + Optional[str], + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Union[str, None] = None, + strict: Union[bool, None] = _Unset, + multiple_of: Union[float, None] = _Unset, + allow_inf_nan: Union[bool, None] = _Unset, + max_digits: Union[int, None] = _Unset, + decimal_places: Union[int, None] = _Unset, + examples: Optional[list[Any]] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Optional[dict[str, Example]] = None, + deprecated: Union[deprecated, str, bool, None] = None, + include_in_schema: bool = True, + json_schema_extra: Union[dict[str, Any], None] = None, + **extra: Any, + ): + super().__init__( + default=default, + default_factory=default_factory, + annotation=annotation, + media_type=media_type, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + deprecated=deprecated, + example=example, + examples=examples, + openapi_examples=openapi_examples, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +@dataclass(frozen=True) +class Depends: + dependency: Optional[Callable[..., Any]] = None + use_cache: bool = True + scope: Union[Literal["function", "request"], None] = None + + +@dataclass(frozen=True) +class Security(Depends): + scopes: Optional[Sequence[str]] = None diff --git a/.venv/lib/python3.12/site-packages/fastapi/py.typed b/.venv/lib/python3.12/site-packages/fastapi/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/fastapi/requests.py b/.venv/lib/python3.12/site-packages/fastapi/requests.py new file mode 100644 index 0000000..d16552c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/requests.py @@ -0,0 +1,2 @@ +from starlette.requests import HTTPConnection as HTTPConnection # noqa: F401 +from starlette.requests import Request as Request # noqa: F401 diff --git a/.venv/lib/python3.12/site-packages/fastapi/responses.py b/.venv/lib/python3.12/site-packages/fastapi/responses.py new file mode 100644 index 0000000..6c8db6f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/responses.py @@ -0,0 +1,48 @@ +from typing import Any + +from starlette.responses import FileResponse as FileResponse # noqa +from starlette.responses import HTMLResponse as HTMLResponse # noqa +from starlette.responses import JSONResponse as JSONResponse # noqa +from starlette.responses import PlainTextResponse as PlainTextResponse # noqa +from starlette.responses import RedirectResponse as RedirectResponse # noqa +from starlette.responses import Response as Response # noqa +from starlette.responses import StreamingResponse as StreamingResponse # noqa + +try: + import ujson +except ImportError: # pragma: nocover + ujson = None # type: ignore + + +try: + import orjson +except ImportError: # pragma: nocover + orjson = None # type: ignore + + +class UJSONResponse(JSONResponse): + """ + JSON response using the high-performance ujson library to serialize data to JSON. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/). + """ + + def render(self, content: Any) -> bytes: + assert ujson is not None, "ujson must be installed to use UJSONResponse" + return ujson.dumps(content, ensure_ascii=False).encode("utf-8") + + +class ORJSONResponse(JSONResponse): + """ + JSON response using the high-performance orjson library to serialize data to JSON. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/). + """ + + def render(self, content: Any) -> bytes: + assert orjson is not None, "orjson must be installed to use ORJSONResponse" + return orjson.dumps( + content, option=orjson.OPT_NON_STR_KEYS | orjson.OPT_SERIALIZE_NUMPY + ) diff --git a/.venv/lib/python3.12/site-packages/fastapi/routing.py b/.venv/lib/python3.12/site-packages/fastapi/routing.py new file mode 100644 index 0000000..9ca2f46 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/routing.py @@ -0,0 +1,4508 @@ +import email.message +import functools +import inspect +import json +from collections.abc import ( + AsyncIterator, + Awaitable, + Collection, + Coroutine, + Mapping, + Sequence, +) +from contextlib import AsyncExitStack, asynccontextmanager +from enum import Enum, IntEnum +from typing import ( + Annotated, + Any, + Callable, + Optional, + Union, +) + +from annotated_doc import Doc +from fastapi import params +from fastapi._compat import ( + ModelField, + Undefined, + annotation_is_pydantic_v1, + lenient_issubclass, +) +from fastapi.datastructures import Default, DefaultPlaceholder +from fastapi.dependencies.models import Dependant +from fastapi.dependencies.utils import ( + _should_embed_body_fields, + get_body_field, + get_dependant, + get_flat_dependant, + get_parameterless_sub_dependant, + get_typed_return_annotation, + solve_dependencies, +) +from fastapi.encoders import jsonable_encoder +from fastapi.exceptions import ( + EndpointContext, + FastAPIError, + PydanticV1NotSupportedError, + RequestValidationError, + ResponseValidationError, + WebSocketRequestValidationError, +) +from fastapi.types import DecoratedCallable, IncEx +from fastapi.utils import ( + create_cloned_field, + create_model_field, + generate_unique_id, + get_value_or_default, + is_body_allowed_for_status_code, +) +from starlette import routing +from starlette._exception_handler import wrap_app_handling_exceptions +from starlette._utils import is_async_callable +from starlette.concurrency import run_in_threadpool +from starlette.exceptions import HTTPException +from starlette.requests import Request +from starlette.responses import JSONResponse, Response +from starlette.routing import ( + BaseRoute, + Match, + compile_path, + get_name, +) +from starlette.routing import Mount as Mount # noqa +from starlette.types import AppType, ASGIApp, Lifespan, Receive, Scope, Send +from starlette.websockets import WebSocket +from typing_extensions import deprecated + + +# Copy of starlette.routing.request_response modified to include the +# dependencies' AsyncExitStack +def request_response( + func: Callable[[Request], Union[Awaitable[Response], Response]], +) -> ASGIApp: + """ + Takes a function or coroutine `func(request) -> response`, + and returns an ASGI application. + """ + f: Callable[[Request], Awaitable[Response]] = ( + func if is_async_callable(func) else functools.partial(run_in_threadpool, func) # type:ignore + ) + + async def app(scope: Scope, receive: Receive, send: Send) -> None: + request = Request(scope, receive, send) + + async def app(scope: Scope, receive: Receive, send: Send) -> None: + # Starts customization + response_awaited = False + async with AsyncExitStack() as request_stack: + scope["fastapi_inner_astack"] = request_stack + async with AsyncExitStack() as function_stack: + scope["fastapi_function_astack"] = function_stack + response = await f(request) + await response(scope, receive, send) + # Continues customization + response_awaited = True + if not response_awaited: + raise FastAPIError( + "Response not awaited. There's a high chance that the " + "application code is raising an exception and a dependency with yield " + "has a block with a bare except, or a block with except Exception, " + "and is not raising the exception again. Read more about it in the " + "docs: https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-with-yield/#dependencies-with-yield-and-except" + ) + + # Same as in Starlette + await wrap_app_handling_exceptions(app, request)(scope, receive, send) + + return app + + +# Copy of starlette.routing.websocket_session modified to include the +# dependencies' AsyncExitStack +def websocket_session( + func: Callable[[WebSocket], Awaitable[None]], +) -> ASGIApp: + """ + Takes a coroutine `func(session)`, and returns an ASGI application. + """ + # assert asyncio.iscoroutinefunction(func), "WebSocket endpoints must be async" + + async def app(scope: Scope, receive: Receive, send: Send) -> None: + session = WebSocket(scope, receive=receive, send=send) + + async def app(scope: Scope, receive: Receive, send: Send) -> None: + async with AsyncExitStack() as request_stack: + scope["fastapi_inner_astack"] = request_stack + async with AsyncExitStack() as function_stack: + scope["fastapi_function_astack"] = function_stack + await func(session) + + # Same as in Starlette + await wrap_app_handling_exceptions(app, session)(scope, receive, send) + + return app + + +def _merge_lifespan_context( + original_context: Lifespan[Any], nested_context: Lifespan[Any] +) -> Lifespan[Any]: + @asynccontextmanager + async def merged_lifespan( + app: AppType, + ) -> AsyncIterator[Optional[Mapping[str, Any]]]: + async with original_context(app) as maybe_original_state: + async with nested_context(app) as maybe_nested_state: + if maybe_nested_state is None and maybe_original_state is None: + yield None # old ASGI compatibility + else: + yield {**(maybe_nested_state or {}), **(maybe_original_state or {})} + + return merged_lifespan # type: ignore[return-value] + + +# Cache for endpoint context to avoid re-extracting on every request +_endpoint_context_cache: dict[int, EndpointContext] = {} + + +def _extract_endpoint_context(func: Any) -> EndpointContext: + """Extract endpoint context with caching to avoid repeated file I/O.""" + func_id = id(func) + + if func_id in _endpoint_context_cache: + return _endpoint_context_cache[func_id] + + try: + ctx: EndpointContext = {} + + if (source_file := inspect.getsourcefile(func)) is not None: + ctx["file"] = source_file + if (line_number := inspect.getsourcelines(func)[1]) is not None: + ctx["line"] = line_number + if (func_name := getattr(func, "__name__", None)) is not None: + ctx["function"] = func_name + except Exception: + ctx = EndpointContext() + + _endpoint_context_cache[func_id] = ctx + return ctx + + +async def serialize_response( + *, + field: Optional[ModelField] = None, + response_content: Any, + include: Optional[IncEx] = None, + exclude: Optional[IncEx] = None, + by_alias: bool = True, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + is_coroutine: bool = True, + endpoint_ctx: Optional[EndpointContext] = None, +) -> Any: + if field: + errors = [] + if is_coroutine: + value, errors_ = field.validate(response_content, {}, loc=("response",)) + else: + value, errors_ = await run_in_threadpool( + field.validate, response_content, {}, loc=("response",) + ) + if isinstance(errors_, list): + errors.extend(errors_) + if errors: + ctx = endpoint_ctx or EndpointContext() + raise ResponseValidationError( + errors=errors, + body=response_content, + endpoint_ctx=ctx, + ) + + return field.serialize( + value, + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + + else: + return jsonable_encoder(response_content) + + +async def run_endpoint_function( + *, dependant: Dependant, values: dict[str, Any], is_coroutine: bool +) -> Any: + # Only called by get_request_handler. Has been split into its own function to + # facilitate profiling endpoints, since inner functions are harder to profile. + assert dependant.call is not None, "dependant.call must be a function" + + if is_coroutine: + return await dependant.call(**values) + else: + return await run_in_threadpool(dependant.call, **values) + + +def get_request_handler( + dependant: Dependant, + body_field: Optional[ModelField] = None, + status_code: Optional[int] = None, + response_class: Union[type[Response], DefaultPlaceholder] = Default(JSONResponse), + response_field: Optional[ModelField] = None, + response_model_include: Optional[IncEx] = None, + response_model_exclude: Optional[IncEx] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + dependency_overrides_provider: Optional[Any] = None, + embed_body_fields: bool = False, +) -> Callable[[Request], Coroutine[Any, Any, Response]]: + assert dependant.call is not None, "dependant.call must be a function" + is_coroutine = dependant.is_coroutine_callable + is_body_form = body_field and isinstance(body_field.field_info, params.Form) + if isinstance(response_class, DefaultPlaceholder): + actual_response_class: type[Response] = response_class.value + else: + actual_response_class = response_class + + async def app(request: Request) -> Response: + response: Union[Response, None] = None + file_stack = request.scope.get("fastapi_middleware_astack") + assert isinstance(file_stack, AsyncExitStack), ( + "fastapi_middleware_astack not found in request scope" + ) + + # Extract endpoint context for error messages + endpoint_ctx = ( + _extract_endpoint_context(dependant.call) + if dependant.call + else EndpointContext() + ) + + if dependant.path: + # For mounted sub-apps, include the mount path prefix + mount_path = request.scope.get("root_path", "").rstrip("/") + endpoint_ctx["path"] = f"{request.method} {mount_path}{dependant.path}" + + # Read body and auto-close files + try: + body: Any = None + if body_field: + if is_body_form: + body = await request.form() + file_stack.push_async_callback(body.close) + else: + body_bytes = await request.body() + if body_bytes: + json_body: Any = Undefined + content_type_value = request.headers.get("content-type") + if not content_type_value: + json_body = await request.json() + else: + message = email.message.Message() + message["content-type"] = content_type_value + if message.get_content_maintype() == "application": + subtype = message.get_content_subtype() + if subtype == "json" or subtype.endswith("+json"): + json_body = await request.json() + if json_body != Undefined: + body = json_body + else: + body = body_bytes + except json.JSONDecodeError as e: + validation_error = RequestValidationError( + [ + { + "type": "json_invalid", + "loc": ("body", e.pos), + "msg": "JSON decode error", + "input": {}, + "ctx": {"error": e.msg}, + } + ], + body=e.doc, + endpoint_ctx=endpoint_ctx, + ) + raise validation_error from e + except HTTPException: + # If a middleware raises an HTTPException, it should be raised again + raise + except Exception as e: + http_error = HTTPException( + status_code=400, detail="There was an error parsing the body" + ) + raise http_error from e + + # Solve dependencies and run path operation function, auto-closing dependencies + errors: list[Any] = [] + async_exit_stack = request.scope.get("fastapi_inner_astack") + assert isinstance(async_exit_stack, AsyncExitStack), ( + "fastapi_inner_astack not found in request scope" + ) + solved_result = await solve_dependencies( + request=request, + dependant=dependant, + body=body, + dependency_overrides_provider=dependency_overrides_provider, + async_exit_stack=async_exit_stack, + embed_body_fields=embed_body_fields, + ) + errors = solved_result.errors + if not errors: + raw_response = await run_endpoint_function( + dependant=dependant, + values=solved_result.values, + is_coroutine=is_coroutine, + ) + if isinstance(raw_response, Response): + if raw_response.background is None: + raw_response.background = solved_result.background_tasks + response = raw_response + else: + response_args: dict[str, Any] = { + "background": solved_result.background_tasks + } + # If status_code was set, use it, otherwise use the default from the + # response class, in the case of redirect it's 307 + current_status_code = ( + status_code if status_code else solved_result.response.status_code + ) + if current_status_code is not None: + response_args["status_code"] = current_status_code + if solved_result.response.status_code: + response_args["status_code"] = solved_result.response.status_code + content = await serialize_response( + field=response_field, + response_content=raw_response, + include=response_model_include, + exclude=response_model_exclude, + by_alias=response_model_by_alias, + exclude_unset=response_model_exclude_unset, + exclude_defaults=response_model_exclude_defaults, + exclude_none=response_model_exclude_none, + is_coroutine=is_coroutine, + endpoint_ctx=endpoint_ctx, + ) + response = actual_response_class(content, **response_args) + if not is_body_allowed_for_status_code(response.status_code): + response.body = b"" + response.headers.raw.extend(solved_result.response.headers.raw) + if errors: + validation_error = RequestValidationError( + errors, body=body, endpoint_ctx=endpoint_ctx + ) + raise validation_error + + # Return response + assert response + return response + + return app + + +def get_websocket_app( + dependant: Dependant, + dependency_overrides_provider: Optional[Any] = None, + embed_body_fields: bool = False, +) -> Callable[[WebSocket], Coroutine[Any, Any, Any]]: + async def app(websocket: WebSocket) -> None: + endpoint_ctx = ( + _extract_endpoint_context(dependant.call) + if dependant.call + else EndpointContext() + ) + if dependant.path: + # For mounted sub-apps, include the mount path prefix + mount_path = websocket.scope.get("root_path", "").rstrip("/") + endpoint_ctx["path"] = f"WS {mount_path}{dependant.path}" + async_exit_stack = websocket.scope.get("fastapi_inner_astack") + assert isinstance(async_exit_stack, AsyncExitStack), ( + "fastapi_inner_astack not found in request scope" + ) + solved_result = await solve_dependencies( + request=websocket, + dependant=dependant, + dependency_overrides_provider=dependency_overrides_provider, + async_exit_stack=async_exit_stack, + embed_body_fields=embed_body_fields, + ) + if solved_result.errors: + raise WebSocketRequestValidationError( + solved_result.errors, + endpoint_ctx=endpoint_ctx, + ) + assert dependant.call is not None, "dependant.call must be a function" + await dependant.call(**solved_result.values) + + return app + + +class APIWebSocketRoute(routing.WebSocketRoute): + def __init__( + self, + path: str, + endpoint: Callable[..., Any], + *, + name: Optional[str] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + dependency_overrides_provider: Optional[Any] = None, + ) -> None: + self.path = path + self.endpoint = endpoint + self.name = get_name(endpoint) if name is None else name + self.dependencies = list(dependencies or []) + self.path_regex, self.path_format, self.param_convertors = compile_path(path) + self.dependant = get_dependant( + path=self.path_format, call=self.endpoint, scope="function" + ) + for depends in self.dependencies[::-1]: + self.dependant.dependencies.insert( + 0, + get_parameterless_sub_dependant(depends=depends, path=self.path_format), + ) + self._flat_dependant = get_flat_dependant(self.dependant) + self._embed_body_fields = _should_embed_body_fields( + self._flat_dependant.body_params + ) + self.app = websocket_session( + get_websocket_app( + dependant=self.dependant, + dependency_overrides_provider=dependency_overrides_provider, + embed_body_fields=self._embed_body_fields, + ) + ) + + def matches(self, scope: Scope) -> tuple[Match, Scope]: + match, child_scope = super().matches(scope) + if match != Match.NONE: + child_scope["route"] = self + return match, child_scope + + +class APIRoute(routing.Route): + def __init__( + self, + path: str, + endpoint: Callable[..., Any], + *, + response_model: Any = Default(None), + status_code: Optional[int] = None, + tags: Optional[list[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[dict[Union[int, str], dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + name: Optional[str] = None, + methods: Optional[Union[set[str], list[str]]] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[IncEx] = None, + response_model_exclude: Optional[IncEx] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Union[type[Response], DefaultPlaceholder] = Default( + JSONResponse + ), + dependency_overrides_provider: Optional[Any] = None, + callbacks: Optional[list[BaseRoute]] = None, + openapi_extra: Optional[dict[str, Any]] = None, + generate_unique_id_function: Union[ + Callable[["APIRoute"], str], DefaultPlaceholder + ] = Default(generate_unique_id), + ) -> None: + self.path = path + self.endpoint = endpoint + if isinstance(response_model, DefaultPlaceholder): + return_annotation = get_typed_return_annotation(endpoint) + if lenient_issubclass(return_annotation, Response): + response_model = None + else: + response_model = return_annotation + self.response_model = response_model + self.summary = summary + self.response_description = response_description + self.deprecated = deprecated + self.operation_id = operation_id + self.response_model_include = response_model_include + self.response_model_exclude = response_model_exclude + self.response_model_by_alias = response_model_by_alias + self.response_model_exclude_unset = response_model_exclude_unset + self.response_model_exclude_defaults = response_model_exclude_defaults + self.response_model_exclude_none = response_model_exclude_none + self.include_in_schema = include_in_schema + self.response_class = response_class + self.dependency_overrides_provider = dependency_overrides_provider + self.callbacks = callbacks + self.openapi_extra = openapi_extra + self.generate_unique_id_function = generate_unique_id_function + self.tags = tags or [] + self.responses = responses or {} + self.name = get_name(endpoint) if name is None else name + self.path_regex, self.path_format, self.param_convertors = compile_path(path) + if methods is None: + methods = ["GET"] + self.methods: set[str] = {method.upper() for method in methods} + if isinstance(generate_unique_id_function, DefaultPlaceholder): + current_generate_unique_id: Callable[[APIRoute], str] = ( + generate_unique_id_function.value + ) + else: + current_generate_unique_id = generate_unique_id_function + self.unique_id = self.operation_id or current_generate_unique_id(self) + # normalize enums e.g. http.HTTPStatus + if isinstance(status_code, IntEnum): + status_code = int(status_code) + self.status_code = status_code + if self.response_model: + assert is_body_allowed_for_status_code(status_code), ( + f"Status code {status_code} must not have a response body" + ) + response_name = "Response_" + self.unique_id + if annotation_is_pydantic_v1(self.response_model): + raise PydanticV1NotSupportedError( + "pydantic.v1 models are no longer supported by FastAPI." + f" Please update the response model {self.response_model!r}." + ) + self.response_field = create_model_field( + name=response_name, + type_=self.response_model, + mode="serialization", + ) + # Create a clone of the field, so that a Pydantic submodel is not returned + # as is just because it's an instance of a subclass of a more limited class + # e.g. UserInDB (containing hashed_password) could be a subclass of User + # that doesn't have the hashed_password. But because it's a subclass, it + # would pass the validation and be returned as is. + # By being a new field, no inheritance will be passed as is. A new model + # will always be created. + # TODO: remove when deprecating Pydantic v1 + self.secure_cloned_response_field: Optional[ModelField] = ( + create_cloned_field(self.response_field) + ) + else: + self.response_field = None # type: ignore + self.secure_cloned_response_field = None + self.dependencies = list(dependencies or []) + self.description = description or inspect.cleandoc(self.endpoint.__doc__ or "") + # if a "form feed" character (page break) is found in the description text, + # truncate description text to the content preceding the first "form feed" + self.description = self.description.split("\f")[0].strip() + response_fields = {} + for additional_status_code, response in self.responses.items(): + assert isinstance(response, dict), "An additional response must be a dict" + model = response.get("model") + if model: + assert is_body_allowed_for_status_code(additional_status_code), ( + f"Status code {additional_status_code} must not have a response body" + ) + response_name = f"Response_{additional_status_code}_{self.unique_id}" + if annotation_is_pydantic_v1(model): + raise PydanticV1NotSupportedError( + "pydantic.v1 models are no longer supported by FastAPI." + f" In responses={{}}, please update {model}." + ) + response_field = create_model_field( + name=response_name, type_=model, mode="serialization" + ) + response_fields[additional_status_code] = response_field + if response_fields: + self.response_fields: dict[Union[int, str], ModelField] = response_fields + else: + self.response_fields = {} + + assert callable(endpoint), "An endpoint must be a callable" + self.dependant = get_dependant( + path=self.path_format, call=self.endpoint, scope="function" + ) + for depends in self.dependencies[::-1]: + self.dependant.dependencies.insert( + 0, + get_parameterless_sub_dependant(depends=depends, path=self.path_format), + ) + self._flat_dependant = get_flat_dependant(self.dependant) + self._embed_body_fields = _should_embed_body_fields( + self._flat_dependant.body_params + ) + self.body_field = get_body_field( + flat_dependant=self._flat_dependant, + name=self.unique_id, + embed_body_fields=self._embed_body_fields, + ) + self.app = request_response(self.get_route_handler()) + + def get_route_handler(self) -> Callable[[Request], Coroutine[Any, Any, Response]]: + return get_request_handler( + dependant=self.dependant, + body_field=self.body_field, + status_code=self.status_code, + response_class=self.response_class, + response_field=self.secure_cloned_response_field, + response_model_include=self.response_model_include, + response_model_exclude=self.response_model_exclude, + response_model_by_alias=self.response_model_by_alias, + response_model_exclude_unset=self.response_model_exclude_unset, + response_model_exclude_defaults=self.response_model_exclude_defaults, + response_model_exclude_none=self.response_model_exclude_none, + dependency_overrides_provider=self.dependency_overrides_provider, + embed_body_fields=self._embed_body_fields, + ) + + def matches(self, scope: Scope) -> tuple[Match, Scope]: + match, child_scope = super().matches(scope) + if match != Match.NONE: + child_scope["route"] = self + return match, child_scope + + +class APIRouter(routing.Router): + """ + `APIRouter` class, used to group *path operations*, for example to structure + an app in multiple files. It would then be included in the `FastAPI` app, or + in another `APIRouter` (ultimately included in the app). + + Read more about it in the + [FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/). + + ## Example + + ```python + from fastapi import APIRouter, FastAPI + + app = FastAPI() + router = APIRouter() + + + @router.get("/users/", tags=["users"]) + async def read_users(): + return [{"username": "Rick"}, {"username": "Morty"}] + + + app.include_router(router) + ``` + """ + + def __init__( + self, + *, + prefix: Annotated[str, Doc("An optional path prefix for the router.")] = "", + tags: Annotated[ + Optional[list[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to all the *path operations* in this + router. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to all the + *path operations* in this router. + + Read more about it in the + [FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). + """ + ), + ] = None, + default_response_class: Annotated[ + type[Response], + Doc( + """ + The default response class to be used. + + Read more in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class). + """ + ), + ] = Default(JSONResponse), + responses: Annotated[ + Optional[dict[Union[int, str], dict[str, Any]]], + Doc( + """ + Additional responses to be shown in OpenAPI. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Additional Responses in OpenAPI](https://fastapi.tiangolo.com/advanced/additional-responses/). + + And in the + [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). + """ + ), + ] = None, + callbacks: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + OpenAPI callbacks that should apply to all *path operations* in this + router. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + routes: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited + from Starlette and supported for compatibility. + + --- + + A list of routes to serve incoming HTTP and WebSocket requests. + """ + ), + deprecated( + """ + You normally wouldn't use this parameter with FastAPI, it is inherited + from Starlette and supported for compatibility. + + In FastAPI, you normally would use the *path operation methods*, + like `router.get()`, `router.post()`, etc. + """ + ), + ] = None, + redirect_slashes: Annotated[ + bool, + Doc( + """ + Whether to detect and redirect slashes in URLs when the client doesn't + use the same format. + """ + ), + ] = True, + default: Annotated[ + Optional[ASGIApp], + Doc( + """ + Default function handler for this router. Used to handle + 404 Not Found errors. + """ + ), + ] = None, + dependency_overrides_provider: Annotated[ + Optional[Any], + Doc( + """ + Only used internally by FastAPI to handle dependency overrides. + + You shouldn't need to use it. It normally points to the `FastAPI` app + object. + """ + ), + ] = None, + route_class: Annotated[ + type[APIRoute], + Doc( + """ + Custom route (*path operation*) class to be used by this router. + + Read more about it in the + [FastAPI docs for Custom Request and APIRoute class](https://fastapi.tiangolo.com/how-to/custom-request-and-route/#custom-apiroute-class-in-a-router). + """ + ), + ] = APIRoute, + on_startup: Annotated[ + Optional[Sequence[Callable[[], Any]]], + Doc( + """ + A list of startup event handler functions. + + You should instead use the `lifespan` handlers. + + Read more in the [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). + """ + ), + ] = None, + on_shutdown: Annotated[ + Optional[Sequence[Callable[[], Any]]], + Doc( + """ + A list of shutdown event handler functions. + + You should instead use the `lifespan` handlers. + + Read more in the + [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). + """ + ), + ] = None, + # the generic to Lifespan[AppType] is the type of the top level application + # which the router cannot know statically, so we use typing.Any + lifespan: Annotated[ + Optional[Lifespan[Any]], + Doc( + """ + A `Lifespan` context manager handler. This replaces `startup` and + `shutdown` functions with a single context manager. + + Read more in the + [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark all *path operations* in this router as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + To include (or not) all the *path operations* in this router in the + generated OpenAPI. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). + """ + ), + ] = True, + generate_unique_id_function: Annotated[ + Callable[[APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> None: + super().__init__( + routes=routes, + redirect_slashes=redirect_slashes, + default=default, + on_startup=on_startup, + on_shutdown=on_shutdown, + lifespan=lifespan, + ) + if prefix: + assert prefix.startswith("/"), "A path prefix must start with '/'" + assert not prefix.endswith("/"), ( + "A path prefix must not end with '/', as the routes will start with '/'" + ) + self.prefix = prefix + self.tags: list[Union[str, Enum]] = tags or [] + self.dependencies = list(dependencies or []) + self.deprecated = deprecated + self.include_in_schema = include_in_schema + self.responses = responses or {} + self.callbacks = callbacks or [] + self.dependency_overrides_provider = dependency_overrides_provider + self.route_class = route_class + self.default_response_class = default_response_class + self.generate_unique_id_function = generate_unique_id_function + + def route( + self, + path: str, + methods: Optional[Collection[str]] = None, + name: Optional[str] = None, + include_in_schema: bool = True, + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_route( + path, + func, + methods=methods, + name=name, + include_in_schema=include_in_schema, + ) + return func + + return decorator + + def add_api_route( + self, + path: str, + endpoint: Callable[..., Any], + *, + response_model: Any = Default(None), + status_code: Optional[int] = None, + tags: Optional[list[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[dict[Union[int, str], dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + methods: Optional[Union[set[str], list[str]]] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[IncEx] = None, + response_model_exclude: Optional[IncEx] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Union[type[Response], DefaultPlaceholder] = Default( + JSONResponse + ), + name: Optional[str] = None, + route_class_override: Optional[type[APIRoute]] = None, + callbacks: Optional[list[BaseRoute]] = None, + openapi_extra: Optional[dict[str, Any]] = None, + generate_unique_id_function: Union[ + Callable[[APIRoute], str], DefaultPlaceholder + ] = Default(generate_unique_id), + ) -> None: + route_class = route_class_override or self.route_class + responses = responses or {} + combined_responses = {**self.responses, **responses} + current_response_class = get_value_or_default( + response_class, self.default_response_class + ) + current_tags = self.tags.copy() + if tags: + current_tags.extend(tags) + current_dependencies = self.dependencies.copy() + if dependencies: + current_dependencies.extend(dependencies) + current_callbacks = self.callbacks.copy() + if callbacks: + current_callbacks.extend(callbacks) + current_generate_unique_id = get_value_or_default( + generate_unique_id_function, self.generate_unique_id_function + ) + route = route_class( + self.prefix + path, + endpoint=endpoint, + response_model=response_model, + status_code=status_code, + tags=current_tags, + dependencies=current_dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=combined_responses, + deprecated=deprecated or self.deprecated, + methods=methods, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema and self.include_in_schema, + response_class=current_response_class, + name=name, + dependency_overrides_provider=self.dependency_overrides_provider, + callbacks=current_callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=current_generate_unique_id, + ) + self.routes.append(route) + + def api_route( + self, + path: str, + *, + response_model: Any = Default(None), + status_code: Optional[int] = None, + tags: Optional[list[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[dict[Union[int, str], dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + methods: Optional[list[str]] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[IncEx] = None, + response_model_exclude: Optional[IncEx] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[list[BaseRoute]] = None, + openapi_extra: Optional[dict[str, Any]] = None, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_api_route( + path, + func, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=methods, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + return func + + return decorator + + def add_api_websocket_route( + self, + path: str, + endpoint: Callable[..., Any], + name: Optional[str] = None, + *, + dependencies: Optional[Sequence[params.Depends]] = None, + ) -> None: + current_dependencies = self.dependencies.copy() + if dependencies: + current_dependencies.extend(dependencies) + + route = APIWebSocketRoute( + self.prefix + path, + endpoint=endpoint, + name=name, + dependencies=current_dependencies, + dependency_overrides_provider=self.dependency_overrides_provider, + ) + self.routes.append(route) + + def websocket( + self, + path: Annotated[ + str, + Doc( + """ + WebSocket path. + """ + ), + ], + name: Annotated[ + Optional[str], + Doc( + """ + A name for the WebSocket. Only used internally. + """ + ), + ] = None, + *, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be used for this + WebSocket. + + Read more about it in the + [FastAPI docs for WebSockets](https://fastapi.tiangolo.com/advanced/websockets/). + """ + ), + ] = None, + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Decorate a WebSocket function. + + Read more about it in the + [FastAPI docs for WebSockets](https://fastapi.tiangolo.com/advanced/websockets/). + + **Example** + + ## Example + + ```python + from fastapi import APIRouter, FastAPI, WebSocket + + app = FastAPI() + router = APIRouter() + + @router.websocket("/ws") + async def websocket_endpoint(websocket: WebSocket): + await websocket.accept() + while True: + data = await websocket.receive_text() + await websocket.send_text(f"Message text was: {data}") + + app.include_router(router) + ``` + """ + + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_api_websocket_route( + path, func, name=name, dependencies=dependencies + ) + return func + + return decorator + + def websocket_route( + self, path: str, name: Union[str, None] = None + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_websocket_route(path, func, name=name) + return func + + return decorator + + def include_router( + self, + router: Annotated["APIRouter", Doc("The `APIRouter` to include.")], + *, + prefix: Annotated[str, Doc("An optional path prefix for the router.")] = "", + tags: Annotated[ + Optional[list[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to all the *path operations* in this + router. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to all the + *path operations* in this router. + + Read more about it in the + [FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). + """ + ), + ] = None, + default_response_class: Annotated[ + type[Response], + Doc( + """ + The default response class to be used. + + Read more in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class). + """ + ), + ] = Default(JSONResponse), + responses: Annotated[ + Optional[dict[Union[int, str], dict[str, Any]]], + Doc( + """ + Additional responses to be shown in OpenAPI. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Additional Responses in OpenAPI](https://fastapi.tiangolo.com/advanced/additional-responses/). + + And in the + [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). + """ + ), + ] = None, + callbacks: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + OpenAPI callbacks that should apply to all *path operations* in this + router. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark all *path operations* in this router as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include (or not) all the *path operations* in this router in the + generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = True, + generate_unique_id_function: Annotated[ + Callable[[APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> None: + """ + Include another `APIRouter` in the same current `APIRouter`. + + Read more about it in the + [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/). + + ## Example + + ```python + from fastapi import APIRouter, FastAPI + + app = FastAPI() + internal_router = APIRouter() + users_router = APIRouter() + + @users_router.get("/users/") + def read_users(): + return [{"name": "Rick"}, {"name": "Morty"}] + + internal_router.include_router(users_router) + app.include_router(internal_router) + ``` + """ + if prefix: + assert prefix.startswith("/"), "A path prefix must start with '/'" + assert not prefix.endswith("/"), ( + "A path prefix must not end with '/', as the routes will start with '/'" + ) + else: + for r in router.routes: + path = getattr(r, "path") # noqa: B009 + name = getattr(r, "name", "unknown") + if path is not None and not path: + raise FastAPIError( + f"Prefix and path cannot be both empty (path operation: {name})" + ) + if responses is None: + responses = {} + for route in router.routes: + if isinstance(route, APIRoute): + combined_responses = {**responses, **route.responses} + use_response_class = get_value_or_default( + route.response_class, + router.default_response_class, + default_response_class, + self.default_response_class, + ) + current_tags = [] + if tags: + current_tags.extend(tags) + if route.tags: + current_tags.extend(route.tags) + current_dependencies: list[params.Depends] = [] + if dependencies: + current_dependencies.extend(dependencies) + if route.dependencies: + current_dependencies.extend(route.dependencies) + current_callbacks = [] + if callbacks: + current_callbacks.extend(callbacks) + if route.callbacks: + current_callbacks.extend(route.callbacks) + current_generate_unique_id = get_value_or_default( + route.generate_unique_id_function, + router.generate_unique_id_function, + generate_unique_id_function, + self.generate_unique_id_function, + ) + self.add_api_route( + prefix + route.path, + route.endpoint, + response_model=route.response_model, + status_code=route.status_code, + tags=current_tags, + dependencies=current_dependencies, + summary=route.summary, + description=route.description, + response_description=route.response_description, + responses=combined_responses, + deprecated=route.deprecated or deprecated or self.deprecated, + methods=route.methods, + operation_id=route.operation_id, + response_model_include=route.response_model_include, + response_model_exclude=route.response_model_exclude, + response_model_by_alias=route.response_model_by_alias, + response_model_exclude_unset=route.response_model_exclude_unset, + response_model_exclude_defaults=route.response_model_exclude_defaults, + response_model_exclude_none=route.response_model_exclude_none, + include_in_schema=route.include_in_schema + and self.include_in_schema + and include_in_schema, + response_class=use_response_class, + name=route.name, + route_class_override=type(route), + callbacks=current_callbacks, + openapi_extra=route.openapi_extra, + generate_unique_id_function=current_generate_unique_id, + ) + elif isinstance(route, routing.Route): + methods = list(route.methods or []) + self.add_route( + prefix + route.path, + route.endpoint, + methods=methods, + include_in_schema=route.include_in_schema, + name=route.name, + ) + elif isinstance(route, APIWebSocketRoute): + current_dependencies = [] + if dependencies: + current_dependencies.extend(dependencies) + if route.dependencies: + current_dependencies.extend(route.dependencies) + self.add_api_websocket_route( + prefix + route.path, + route.endpoint, + dependencies=current_dependencies, + name=route.name, + ) + elif isinstance(route, routing.WebSocketRoute): + self.add_websocket_route( + prefix + route.path, route.endpoint, name=route.name + ) + for handler in router.on_startup: + self.add_event_handler("startup", handler) + for handler in router.on_shutdown: + self.add_event_handler("shutdown", handler) + self.lifespan_context = _merge_lifespan_context( + self.lifespan_context, + router.lifespan_context, + ) + + def get( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[list[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[dict[Union[int, str], dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP GET operation. + + ## Example + + ```python + from fastapi import APIRouter, FastAPI + + app = FastAPI() + router = APIRouter() + + @router.get("/items/") + def read_items(): + return [{"name": "Empanada"}, {"name": "Arepa"}] + + app.include_router(router) + ``` + """ + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["GET"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def put( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[list[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[dict[Union[int, str], dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP PUT operation. + + ## Example + + ```python + from fastapi import APIRouter, FastAPI + from pydantic import BaseModel + + class Item(BaseModel): + name: str + description: str | None = None + + app = FastAPI() + router = APIRouter() + + @router.put("/items/{item_id}") + def replace_item(item_id: str, item: Item): + return {"message": "Item replaced", "id": item_id} + + app.include_router(router) + ``` + """ + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["PUT"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def post( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[list[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[dict[Union[int, str], dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP POST operation. + + ## Example + + ```python + from fastapi import APIRouter, FastAPI + from pydantic import BaseModel + + class Item(BaseModel): + name: str + description: str | None = None + + app = FastAPI() + router = APIRouter() + + @router.post("/items/") + def create_item(item: Item): + return {"message": "Item created"} + + app.include_router(router) + ``` + """ + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["POST"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def delete( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[list[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[dict[Union[int, str], dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP DELETE operation. + + ## Example + + ```python + from fastapi import APIRouter, FastAPI + + app = FastAPI() + router = APIRouter() + + @router.delete("/items/{item_id}") + def delete_item(item_id: str): + return {"message": "Item deleted"} + + app.include_router(router) + ``` + """ + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["DELETE"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def options( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[list[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[dict[Union[int, str], dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP OPTIONS operation. + + ## Example + + ```python + from fastapi import APIRouter, FastAPI + + app = FastAPI() + router = APIRouter() + + @router.options("/items/") + def get_item_options(): + return {"additions": ["Aji", "Guacamole"]} + + app.include_router(router) + ``` + """ + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["OPTIONS"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def head( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[list[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[dict[Union[int, str], dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP HEAD operation. + + ## Example + + ```python + from fastapi import APIRouter, FastAPI + from pydantic import BaseModel + + class Item(BaseModel): + name: str + description: str | None = None + + app = FastAPI() + router = APIRouter() + + @router.head("/items/", status_code=204) + def get_items_headers(response: Response): + response.headers["X-Cat-Dog"] = "Alone in the world" + + app.include_router(router) + ``` + """ + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["HEAD"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def patch( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[list[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[dict[Union[int, str], dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP PATCH operation. + + ## Example + + ```python + from fastapi import APIRouter, FastAPI + from pydantic import BaseModel + + class Item(BaseModel): + name: str + description: str | None = None + + app = FastAPI() + router = APIRouter() + + @router.patch("/items/") + def update_item(item: Item): + return {"message": "Item updated in place"} + + app.include_router(router) + ``` + """ + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["PATCH"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def trace( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[list[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[dict[Union[int, str], dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[list[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP TRACE operation. + + ## Example + + ```python + from fastapi import APIRouter, FastAPI + from pydantic import BaseModel + + class Item(BaseModel): + name: str + description: str | None = None + + app = FastAPI() + router = APIRouter() + + @router.trace("/items/{item_id}") + def trace_item(item_id: str): + return None + + app.include_router(router) + ``` + """ + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["TRACE"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + @deprecated( + """ + on_event is deprecated, use lifespan event handlers instead. + + Read more about it in the + [FastAPI docs for Lifespan Events](https://fastapi.tiangolo.com/advanced/events/). + """ + ) + def on_event( + self, + event_type: Annotated[ + str, + Doc( + """ + The type of event. `startup` or `shutdown`. + """ + ), + ], + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add an event handler for the router. + + `on_event` is deprecated, use `lifespan` event handlers instead. + + Read more about it in the + [FastAPI docs for Lifespan Events](https://fastapi.tiangolo.com/advanced/events/#alternative-events-deprecated). + """ + + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_event_handler(event_type, func) + return func + + return decorator diff --git a/.venv/lib/python3.12/site-packages/fastapi/security/__init__.py b/.venv/lib/python3.12/site-packages/fastapi/security/__init__.py new file mode 100644 index 0000000..3aa6bf2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/security/__init__.py @@ -0,0 +1,15 @@ +from .api_key import APIKeyCookie as APIKeyCookie +from .api_key import APIKeyHeader as APIKeyHeader +from .api_key import APIKeyQuery as APIKeyQuery +from .http import HTTPAuthorizationCredentials as HTTPAuthorizationCredentials +from .http import HTTPBasic as HTTPBasic +from .http import HTTPBasicCredentials as HTTPBasicCredentials +from .http import HTTPBearer as HTTPBearer +from .http import HTTPDigest as HTTPDigest +from .oauth2 import OAuth2 as OAuth2 +from .oauth2 import OAuth2AuthorizationCodeBearer as OAuth2AuthorizationCodeBearer +from .oauth2 import OAuth2PasswordBearer as OAuth2PasswordBearer +from .oauth2 import OAuth2PasswordRequestForm as OAuth2PasswordRequestForm +from .oauth2 import OAuth2PasswordRequestFormStrict as OAuth2PasswordRequestFormStrict +from .oauth2 import SecurityScopes as SecurityScopes +from .open_id_connect_url import OpenIdConnect as OpenIdConnect diff --git a/.venv/lib/python3.12/site-packages/fastapi/security/api_key.py b/.venv/lib/python3.12/site-packages/fastapi/security/api_key.py new file mode 100644 index 0000000..18dfb8e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/security/api_key.py @@ -0,0 +1,318 @@ +from typing import Annotated, Optional, Union + +from annotated_doc import Doc +from fastapi.openapi.models import APIKey, APIKeyIn +from fastapi.security.base import SecurityBase +from starlette.exceptions import HTTPException +from starlette.requests import Request +from starlette.status import HTTP_401_UNAUTHORIZED + + +class APIKeyBase(SecurityBase): + def __init__( + self, + location: APIKeyIn, + name: str, + description: Union[str, None], + scheme_name: Union[str, None], + auto_error: bool, + ): + self.auto_error = auto_error + + self.model: APIKey = APIKey( + **{"in": location}, + name=name, + description=description, + ) + self.scheme_name = scheme_name or self.__class__.__name__ + + def make_not_authenticated_error(self) -> HTTPException: + """ + The WWW-Authenticate header is not standardized for API Key authentication but + the HTTP specification requires that an error of 401 "Unauthorized" must + include a WWW-Authenticate header. + + Ref: https://datatracker.ietf.org/doc/html/rfc9110#name-401-unauthorized + + For this, this method sends a custom challenge `APIKey`. + """ + return HTTPException( + status_code=HTTP_401_UNAUTHORIZED, + detail="Not authenticated", + headers={"WWW-Authenticate": "APIKey"}, + ) + + def check_api_key(self, api_key: Optional[str]) -> Optional[str]: + if not api_key: + if self.auto_error: + raise self.make_not_authenticated_error() + return None + return api_key + + +class APIKeyQuery(APIKeyBase): + """ + API key authentication using a query parameter. + + This defines the name of the query parameter that should be provided in the request + with the API key and integrates that into the OpenAPI documentation. It extracts + the key value sent in the query parameter automatically and provides it as the + dependency result. But it doesn't define how to send that API key to the client. + + ## Usage + + Create an instance object and use that object as the dependency in `Depends()`. + + The dependency result will be a string containing the key value. + + ## Example + + ```python + from fastapi import Depends, FastAPI + from fastapi.security import APIKeyQuery + + app = FastAPI() + + query_scheme = APIKeyQuery(name="api_key") + + + @app.get("/items/") + async def read_items(api_key: str = Depends(query_scheme)): + return {"api_key": api_key} + ``` + """ + + def __init__( + self, + *, + name: Annotated[ + str, + Doc("Query parameter name."), + ], + scheme_name: Annotated[ + Optional[str], + Doc( + """ + Security scheme name. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Security scheme description. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + auto_error: Annotated[ + bool, + Doc( + """ + By default, if the query parameter is not provided, `APIKeyQuery` will + automatically cancel the request and send the client an error. + + If `auto_error` is set to `False`, when the query parameter is not + available, instead of erroring out, the dependency result will be + `None`. + + This is useful when you want to have optional authentication. + + It is also useful when you want to have authentication that can be + provided in one of multiple optional ways (for example, in a query + parameter or in an HTTP Bearer token). + """ + ), + ] = True, + ): + super().__init__( + location=APIKeyIn.query, + name=name, + scheme_name=scheme_name, + description=description, + auto_error=auto_error, + ) + + async def __call__(self, request: Request) -> Optional[str]: + api_key = request.query_params.get(self.model.name) + return self.check_api_key(api_key) + + +class APIKeyHeader(APIKeyBase): + """ + API key authentication using a header. + + This defines the name of the header that should be provided in the request with + the API key and integrates that into the OpenAPI documentation. It extracts + the key value sent in the header automatically and provides it as the dependency + result. But it doesn't define how to send that key to the client. + + ## Usage + + Create an instance object and use that object as the dependency in `Depends()`. + + The dependency result will be a string containing the key value. + + ## Example + + ```python + from fastapi import Depends, FastAPI + from fastapi.security import APIKeyHeader + + app = FastAPI() + + header_scheme = APIKeyHeader(name="x-key") + + + @app.get("/items/") + async def read_items(key: str = Depends(header_scheme)): + return {"key": key} + ``` + """ + + def __init__( + self, + *, + name: Annotated[str, Doc("Header name.")], + scheme_name: Annotated[ + Optional[str], + Doc( + """ + Security scheme name. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Security scheme description. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + auto_error: Annotated[ + bool, + Doc( + """ + By default, if the header is not provided, `APIKeyHeader` will + automatically cancel the request and send the client an error. + + If `auto_error` is set to `False`, when the header is not available, + instead of erroring out, the dependency result will be `None`. + + This is useful when you want to have optional authentication. + + It is also useful when you want to have authentication that can be + provided in one of multiple optional ways (for example, in a header or + in an HTTP Bearer token). + """ + ), + ] = True, + ): + super().__init__( + location=APIKeyIn.header, + name=name, + scheme_name=scheme_name, + description=description, + auto_error=auto_error, + ) + + async def __call__(self, request: Request) -> Optional[str]: + api_key = request.headers.get(self.model.name) + return self.check_api_key(api_key) + + +class APIKeyCookie(APIKeyBase): + """ + API key authentication using a cookie. + + This defines the name of the cookie that should be provided in the request with + the API key and integrates that into the OpenAPI documentation. It extracts + the key value sent in the cookie automatically and provides it as the dependency + result. But it doesn't define how to set that cookie. + + ## Usage + + Create an instance object and use that object as the dependency in `Depends()`. + + The dependency result will be a string containing the key value. + + ## Example + + ```python + from fastapi import Depends, FastAPI + from fastapi.security import APIKeyCookie + + app = FastAPI() + + cookie_scheme = APIKeyCookie(name="session") + + + @app.get("/items/") + async def read_items(session: str = Depends(cookie_scheme)): + return {"session": session} + ``` + """ + + def __init__( + self, + *, + name: Annotated[str, Doc("Cookie name.")], + scheme_name: Annotated[ + Optional[str], + Doc( + """ + Security scheme name. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Security scheme description. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + auto_error: Annotated[ + bool, + Doc( + """ + By default, if the cookie is not provided, `APIKeyCookie` will + automatically cancel the request and send the client an error. + + If `auto_error` is set to `False`, when the cookie is not available, + instead of erroring out, the dependency result will be `None`. + + This is useful when you want to have optional authentication. + + It is also useful when you want to have authentication that can be + provided in one of multiple optional ways (for example, in a cookie or + in an HTTP Bearer token). + """ + ), + ] = True, + ): + super().__init__( + location=APIKeyIn.cookie, + name=name, + scheme_name=scheme_name, + description=description, + auto_error=auto_error, + ) + + async def __call__(self, request: Request) -> Optional[str]: + api_key = request.cookies.get(self.model.name) + return self.check_api_key(api_key) diff --git a/.venv/lib/python3.12/site-packages/fastapi/security/base.py b/.venv/lib/python3.12/site-packages/fastapi/security/base.py new file mode 100644 index 0000000..c43555d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/security/base.py @@ -0,0 +1,6 @@ +from fastapi.openapi.models import SecurityBase as SecurityBaseModel + + +class SecurityBase: + model: SecurityBaseModel + scheme_name: str diff --git a/.venv/lib/python3.12/site-packages/fastapi/security/http.py b/.venv/lib/python3.12/site-packages/fastapi/security/http.py new file mode 100644 index 0000000..b4c3bc6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/security/http.py @@ -0,0 +1,423 @@ +import binascii +from base64 import b64decode +from typing import Annotated, Optional + +from annotated_doc import Doc +from fastapi.exceptions import HTTPException +from fastapi.openapi.models import HTTPBase as HTTPBaseModel +from fastapi.openapi.models import HTTPBearer as HTTPBearerModel +from fastapi.security.base import SecurityBase +from fastapi.security.utils import get_authorization_scheme_param +from pydantic import BaseModel +from starlette.requests import Request +from starlette.status import HTTP_401_UNAUTHORIZED + + +class HTTPBasicCredentials(BaseModel): + """ + The HTTP Basic credentials given as the result of using `HTTPBasic` in a + dependency. + + Read more about it in the + [FastAPI docs for HTTP Basic Auth](https://fastapi.tiangolo.com/advanced/security/http-basic-auth/). + """ + + username: Annotated[str, Doc("The HTTP Basic username.")] + password: Annotated[str, Doc("The HTTP Basic password.")] + + +class HTTPAuthorizationCredentials(BaseModel): + """ + The HTTP authorization credentials in the result of using `HTTPBearer` or + `HTTPDigest` in a dependency. + + The HTTP authorization header value is split by the first space. + + The first part is the `scheme`, the second part is the `credentials`. + + For example, in an HTTP Bearer token scheme, the client will send a header + like: + + ``` + Authorization: Bearer deadbeef12346 + ``` + + In this case: + + * `scheme` will have the value `"Bearer"` + * `credentials` will have the value `"deadbeef12346"` + """ + + scheme: Annotated[ + str, + Doc( + """ + The HTTP authorization scheme extracted from the header value. + """ + ), + ] + credentials: Annotated[ + str, + Doc( + """ + The HTTP authorization credentials extracted from the header value. + """ + ), + ] + + +class HTTPBase(SecurityBase): + def __init__( + self, + *, + scheme: str, + scheme_name: Optional[str] = None, + description: Optional[str] = None, + auto_error: bool = True, + ): + self.model: HTTPBaseModel = HTTPBaseModel( + scheme=scheme, description=description + ) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + def make_authenticate_headers(self) -> dict[str, str]: + return {"WWW-Authenticate": f"{self.model.scheme.title()}"} + + def make_not_authenticated_error(self) -> HTTPException: + return HTTPException( + status_code=HTTP_401_UNAUTHORIZED, + detail="Not authenticated", + headers=self.make_authenticate_headers(), + ) + + async def __call__( + self, request: Request + ) -> Optional[HTTPAuthorizationCredentials]: + authorization = request.headers.get("Authorization") + scheme, credentials = get_authorization_scheme_param(authorization) + if not (authorization and scheme and credentials): + if self.auto_error: + raise self.make_not_authenticated_error() + else: + return None + return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials) + + +class HTTPBasic(HTTPBase): + """ + HTTP Basic authentication. + + Ref: https://datatracker.ietf.org/doc/html/rfc7617 + + ## Usage + + Create an instance object and use that object as the dependency in `Depends()`. + + The dependency result will be an `HTTPBasicCredentials` object containing the + `username` and the `password`. + + Read more about it in the + [FastAPI docs for HTTP Basic Auth](https://fastapi.tiangolo.com/advanced/security/http-basic-auth/). + + ## Example + + ```python + from typing import Annotated + + from fastapi import Depends, FastAPI + from fastapi.security import HTTPBasic, HTTPBasicCredentials + + app = FastAPI() + + security = HTTPBasic() + + + @app.get("/users/me") + def read_current_user(credentials: Annotated[HTTPBasicCredentials, Depends(security)]): + return {"username": credentials.username, "password": credentials.password} + ``` + """ + + def __init__( + self, + *, + scheme_name: Annotated[ + Optional[str], + Doc( + """ + Security scheme name. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + realm: Annotated[ + Optional[str], + Doc( + """ + HTTP Basic authentication realm. + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Security scheme description. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + auto_error: Annotated[ + bool, + Doc( + """ + By default, if the HTTP Basic authentication is not provided (a + header), `HTTPBasic` will automatically cancel the request and send the + client an error. + + If `auto_error` is set to `False`, when the HTTP Basic authentication + is not available, instead of erroring out, the dependency result will + be `None`. + + This is useful when you want to have optional authentication. + + It is also useful when you want to have authentication that can be + provided in one of multiple optional ways (for example, in HTTP Basic + authentication or in an HTTP Bearer token). + """ + ), + ] = True, + ): + self.model = HTTPBaseModel(scheme="basic", description=description) + self.scheme_name = scheme_name or self.__class__.__name__ + self.realm = realm + self.auto_error = auto_error + + def make_authenticate_headers(self) -> dict[str, str]: + if self.realm: + return {"WWW-Authenticate": f'Basic realm="{self.realm}"'} + return {"WWW-Authenticate": "Basic"} + + async def __call__( # type: ignore + self, request: Request + ) -> Optional[HTTPBasicCredentials]: + authorization = request.headers.get("Authorization") + scheme, param = get_authorization_scheme_param(authorization) + if not authorization or scheme.lower() != "basic": + if self.auto_error: + raise self.make_not_authenticated_error() + else: + return None + try: + data = b64decode(param).decode("ascii") + except (ValueError, UnicodeDecodeError, binascii.Error) as e: + raise self.make_not_authenticated_error() from e + username, separator, password = data.partition(":") + if not separator: + raise self.make_not_authenticated_error() + return HTTPBasicCredentials(username=username, password=password) + + +class HTTPBearer(HTTPBase): + """ + HTTP Bearer token authentication. + + ## Usage + + Create an instance object and use that object as the dependency in `Depends()`. + + The dependency result will be an `HTTPAuthorizationCredentials` object containing + the `scheme` and the `credentials`. + + ## Example + + ```python + from typing import Annotated + + from fastapi import Depends, FastAPI + from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer + + app = FastAPI() + + security = HTTPBearer() + + + @app.get("/users/me") + def read_current_user( + credentials: Annotated[HTTPAuthorizationCredentials, Depends(security)] + ): + return {"scheme": credentials.scheme, "credentials": credentials.credentials} + ``` + """ + + def __init__( + self, + *, + bearerFormat: Annotated[Optional[str], Doc("Bearer token format.")] = None, + scheme_name: Annotated[ + Optional[str], + Doc( + """ + Security scheme name. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Security scheme description. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + auto_error: Annotated[ + bool, + Doc( + """ + By default, if the HTTP Bearer token is not provided (in an + `Authorization` header), `HTTPBearer` will automatically cancel the + request and send the client an error. + + If `auto_error` is set to `False`, when the HTTP Bearer token + is not available, instead of erroring out, the dependency result will + be `None`. + + This is useful when you want to have optional authentication. + + It is also useful when you want to have authentication that can be + provided in one of multiple optional ways (for example, in an HTTP + Bearer token or in a cookie). + """ + ), + ] = True, + ): + self.model = HTTPBearerModel(bearerFormat=bearerFormat, description=description) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__( + self, request: Request + ) -> Optional[HTTPAuthorizationCredentials]: + authorization = request.headers.get("Authorization") + scheme, credentials = get_authorization_scheme_param(authorization) + if not (authorization and scheme and credentials): + if self.auto_error: + raise self.make_not_authenticated_error() + else: + return None + if scheme.lower() != "bearer": + if self.auto_error: + raise self.make_not_authenticated_error() + else: + return None + return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials) + + +class HTTPDigest(HTTPBase): + """ + HTTP Digest authentication. + + **Warning**: this is only a stub to connect the components with OpenAPI in FastAPI, + but it doesn't implement the full Digest scheme, you would need to to subclass it + and implement it in your code. + + Ref: https://datatracker.ietf.org/doc/html/rfc7616 + + ## Usage + + Create an instance object and use that object as the dependency in `Depends()`. + + The dependency result will be an `HTTPAuthorizationCredentials` object containing + the `scheme` and the `credentials`. + + ## Example + + ```python + from typing import Annotated + + from fastapi import Depends, FastAPI + from fastapi.security import HTTPAuthorizationCredentials, HTTPDigest + + app = FastAPI() + + security = HTTPDigest() + + + @app.get("/users/me") + def read_current_user( + credentials: Annotated[HTTPAuthorizationCredentials, Depends(security)] + ): + return {"scheme": credentials.scheme, "credentials": credentials.credentials} + ``` + """ + + def __init__( + self, + *, + scheme_name: Annotated[ + Optional[str], + Doc( + """ + Security scheme name. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Security scheme description. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + auto_error: Annotated[ + bool, + Doc( + """ + By default, if the HTTP Digest is not provided, `HTTPDigest` will + automatically cancel the request and send the client an error. + + If `auto_error` is set to `False`, when the HTTP Digest is not + available, instead of erroring out, the dependency result will + be `None`. + + This is useful when you want to have optional authentication. + + It is also useful when you want to have authentication that can be + provided in one of multiple optional ways (for example, in HTTP + Digest or in a cookie). + """ + ), + ] = True, + ): + self.model = HTTPBaseModel(scheme="digest", description=description) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__( + self, request: Request + ) -> Optional[HTTPAuthorizationCredentials]: + authorization = request.headers.get("Authorization") + scheme, credentials = get_authorization_scheme_param(authorization) + if not (authorization and scheme and credentials): + if self.auto_error: + raise self.make_not_authenticated_error() + else: + return None + if scheme.lower() != "digest": + if self.auto_error: + raise self.make_not_authenticated_error() + else: + return None + return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials) diff --git a/.venv/lib/python3.12/site-packages/fastapi/security/oauth2.py b/.venv/lib/python3.12/site-packages/fastapi/security/oauth2.py new file mode 100644 index 0000000..fc49ba1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/security/oauth2.py @@ -0,0 +1,663 @@ +from typing import Annotated, Any, Optional, Union, cast + +from annotated_doc import Doc +from fastapi.exceptions import HTTPException +from fastapi.openapi.models import OAuth2 as OAuth2Model +from fastapi.openapi.models import OAuthFlows as OAuthFlowsModel +from fastapi.param_functions import Form +from fastapi.security.base import SecurityBase +from fastapi.security.utils import get_authorization_scheme_param +from starlette.requests import Request +from starlette.status import HTTP_401_UNAUTHORIZED + + +class OAuth2PasswordRequestForm: + """ + This is a dependency class to collect the `username` and `password` as form data + for an OAuth2 password flow. + + The OAuth2 specification dictates that for a password flow the data should be + collected using form data (instead of JSON) and that it should have the specific + fields `username` and `password`. + + All the initialization parameters are extracted from the request. + + Read more about it in the + [FastAPI docs for Simple OAuth2 with Password and Bearer](https://fastapi.tiangolo.com/tutorial/security/simple-oauth2/). + + ## Example + + ```python + from typing import Annotated + + from fastapi import Depends, FastAPI + from fastapi.security import OAuth2PasswordRequestForm + + app = FastAPI() + + + @app.post("/login") + def login(form_data: Annotated[OAuth2PasswordRequestForm, Depends()]): + data = {} + data["scopes"] = [] + for scope in form_data.scopes: + data["scopes"].append(scope) + if form_data.client_id: + data["client_id"] = form_data.client_id + if form_data.client_secret: + data["client_secret"] = form_data.client_secret + return data + ``` + + Note that for OAuth2 the scope `items:read` is a single scope in an opaque string. + You could have custom internal logic to separate it by colon characters (`:`) or + similar, and get the two parts `items` and `read`. Many applications do that to + group and organize permissions, you could do it as well in your application, just + know that that it is application specific, it's not part of the specification. + """ + + def __init__( + self, + *, + grant_type: Annotated[ + Union[str, None], + Form(pattern="^password$"), + Doc( + """ + The OAuth2 spec says it is required and MUST be the fixed string + "password". Nevertheless, this dependency class is permissive and + allows not passing it. If you want to enforce it, use instead the + `OAuth2PasswordRequestFormStrict` dependency. + """ + ), + ] = None, + username: Annotated[ + str, + Form(), + Doc( + """ + `username` string. The OAuth2 spec requires the exact field name + `username`. + """ + ), + ], + password: Annotated[ + str, + Form(json_schema_extra={"format": "password"}), + Doc( + """ + `password` string. The OAuth2 spec requires the exact field name + `password`. + """ + ), + ], + scope: Annotated[ + str, + Form(), + Doc( + """ + A single string with actually several scopes separated by spaces. Each + scope is also a string. + + For example, a single string with: + + ```python + "items:read items:write users:read profile openid" + ```` + + would represent the scopes: + + * `items:read` + * `items:write` + * `users:read` + * `profile` + * `openid` + """ + ), + ] = "", + client_id: Annotated[ + Union[str, None], + Form(), + Doc( + """ + If there's a `client_id`, it can be sent as part of the form fields. + But the OAuth2 specification recommends sending the `client_id` and + `client_secret` (if any) using HTTP Basic auth. + """ + ), + ] = None, + client_secret: Annotated[ + Union[str, None], + Form(json_schema_extra={"format": "password"}), + Doc( + """ + If there's a `client_password` (and a `client_id`), they can be sent + as part of the form fields. But the OAuth2 specification recommends + sending the `client_id` and `client_secret` (if any) using HTTP Basic + auth. + """ + ), + ] = None, + ): + self.grant_type = grant_type + self.username = username + self.password = password + self.scopes = scope.split() + self.client_id = client_id + self.client_secret = client_secret + + +class OAuth2PasswordRequestFormStrict(OAuth2PasswordRequestForm): + """ + This is a dependency class to collect the `username` and `password` as form data + for an OAuth2 password flow. + + The OAuth2 specification dictates that for a password flow the data should be + collected using form data (instead of JSON) and that it should have the specific + fields `username` and `password`. + + All the initialization parameters are extracted from the request. + + The only difference between `OAuth2PasswordRequestFormStrict` and + `OAuth2PasswordRequestForm` is that `OAuth2PasswordRequestFormStrict` requires the + client to send the form field `grant_type` with the value `"password"`, which + is required in the OAuth2 specification (it seems that for no particular reason), + while for `OAuth2PasswordRequestForm` `grant_type` is optional. + + Read more about it in the + [FastAPI docs for Simple OAuth2 with Password and Bearer](https://fastapi.tiangolo.com/tutorial/security/simple-oauth2/). + + ## Example + + ```python + from typing import Annotated + + from fastapi import Depends, FastAPI + from fastapi.security import OAuth2PasswordRequestForm + + app = FastAPI() + + + @app.post("/login") + def login(form_data: Annotated[OAuth2PasswordRequestFormStrict, Depends()]): + data = {} + data["scopes"] = [] + for scope in form_data.scopes: + data["scopes"].append(scope) + if form_data.client_id: + data["client_id"] = form_data.client_id + if form_data.client_secret: + data["client_secret"] = form_data.client_secret + return data + ``` + + Note that for OAuth2 the scope `items:read` is a single scope in an opaque string. + You could have custom internal logic to separate it by colon characters (`:`) or + similar, and get the two parts `items` and `read`. Many applications do that to + group and organize permissions, you could do it as well in your application, just + know that that it is application specific, it's not part of the specification. + + + grant_type: the OAuth2 spec says it is required and MUST be the fixed string "password". + This dependency is strict about it. If you want to be permissive, use instead the + OAuth2PasswordRequestForm dependency class. + username: username string. The OAuth2 spec requires the exact field name "username". + password: password string. The OAuth2 spec requires the exact field name "password". + scope: Optional string. Several scopes (each one a string) separated by spaces. E.g. + "items:read items:write users:read profile openid" + client_id: optional string. OAuth2 recommends sending the client_id and client_secret (if any) + using HTTP Basic auth, as: client_id:client_secret + client_secret: optional string. OAuth2 recommends sending the client_id and client_secret (if any) + using HTTP Basic auth, as: client_id:client_secret + """ + + def __init__( + self, + grant_type: Annotated[ + str, + Form(pattern="^password$"), + Doc( + """ + The OAuth2 spec says it is required and MUST be the fixed string + "password". This dependency is strict about it. If you want to be + permissive, use instead the `OAuth2PasswordRequestForm` dependency + class. + """ + ), + ], + username: Annotated[ + str, + Form(), + Doc( + """ + `username` string. The OAuth2 spec requires the exact field name + `username`. + """ + ), + ], + password: Annotated[ + str, + Form(), + Doc( + """ + `password` string. The OAuth2 spec requires the exact field name + `password`. + """ + ), + ], + scope: Annotated[ + str, + Form(), + Doc( + """ + A single string with actually several scopes separated by spaces. Each + scope is also a string. + + For example, a single string with: + + ```python + "items:read items:write users:read profile openid" + ```` + + would represent the scopes: + + * `items:read` + * `items:write` + * `users:read` + * `profile` + * `openid` + """ + ), + ] = "", + client_id: Annotated[ + Union[str, None], + Form(), + Doc( + """ + If there's a `client_id`, it can be sent as part of the form fields. + But the OAuth2 specification recommends sending the `client_id` and + `client_secret` (if any) using HTTP Basic auth. + """ + ), + ] = None, + client_secret: Annotated[ + Union[str, None], + Form(), + Doc( + """ + If there's a `client_password` (and a `client_id`), they can be sent + as part of the form fields. But the OAuth2 specification recommends + sending the `client_id` and `client_secret` (if any) using HTTP Basic + auth. + """ + ), + ] = None, + ): + super().__init__( + grant_type=grant_type, + username=username, + password=password, + scope=scope, + client_id=client_id, + client_secret=client_secret, + ) + + +class OAuth2(SecurityBase): + """ + This is the base class for OAuth2 authentication, an instance of it would be used + as a dependency. All other OAuth2 classes inherit from it and customize it for + each OAuth2 flow. + + You normally would not create a new class inheriting from it but use one of the + existing subclasses, and maybe compose them if you want to support multiple flows. + + Read more about it in the + [FastAPI docs for Security](https://fastapi.tiangolo.com/tutorial/security/). + """ + + def __init__( + self, + *, + flows: Annotated[ + Union[OAuthFlowsModel, dict[str, dict[str, Any]]], + Doc( + """ + The dictionary of OAuth2 flows. + """ + ), + ] = OAuthFlowsModel(), + scheme_name: Annotated[ + Optional[str], + Doc( + """ + Security scheme name. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Security scheme description. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + auto_error: Annotated[ + bool, + Doc( + """ + By default, if no HTTP Authorization header is provided, required for + OAuth2 authentication, it will automatically cancel the request and + send the client an error. + + If `auto_error` is set to `False`, when the HTTP Authorization header + is not available, instead of erroring out, the dependency result will + be `None`. + + This is useful when you want to have optional authentication. + + It is also useful when you want to have authentication that can be + provided in one of multiple optional ways (for example, with OAuth2 + or in a cookie). + """ + ), + ] = True, + ): + self.model = OAuth2Model( + flows=cast(OAuthFlowsModel, flows), description=description + ) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + def make_not_authenticated_error(self) -> HTTPException: + """ + The OAuth 2 specification doesn't define the challenge that should be used, + because a `Bearer` token is not really the only option to authenticate. + + But declaring any other authentication challenge would be application-specific + as it's not defined in the specification. + + For practical reasons, this method uses the `Bearer` challenge by default, as + it's probably the most common one. + + If you are implementing an OAuth2 authentication scheme other than the provided + ones in FastAPI (based on bearer tokens), you might want to override this. + + Ref: https://datatracker.ietf.org/doc/html/rfc6749 + """ + return HTTPException( + status_code=HTTP_401_UNAUTHORIZED, + detail="Not authenticated", + headers={"WWW-Authenticate": "Bearer"}, + ) + + async def __call__(self, request: Request) -> Optional[str]: + authorization = request.headers.get("Authorization") + if not authorization: + if self.auto_error: + raise self.make_not_authenticated_error() + else: + return None + return authorization + + +class OAuth2PasswordBearer(OAuth2): + """ + OAuth2 flow for authentication using a bearer token obtained with a password. + An instance of it would be used as a dependency. + + Read more about it in the + [FastAPI docs for Simple OAuth2 with Password and Bearer](https://fastapi.tiangolo.com/tutorial/security/simple-oauth2/). + """ + + def __init__( + self, + tokenUrl: Annotated[ + str, + Doc( + """ + The URL to obtain the OAuth2 token. This would be the *path operation* + that has `OAuth2PasswordRequestForm` as a dependency. + """ + ), + ], + scheme_name: Annotated[ + Optional[str], + Doc( + """ + Security scheme name. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + scopes: Annotated[ + Optional[dict[str, str]], + Doc( + """ + The OAuth2 scopes that would be required by the *path operations* that + use this dependency. + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Security scheme description. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + auto_error: Annotated[ + bool, + Doc( + """ + By default, if no HTTP Authorization header is provided, required for + OAuth2 authentication, it will automatically cancel the request and + send the client an error. + + If `auto_error` is set to `False`, when the HTTP Authorization header + is not available, instead of erroring out, the dependency result will + be `None`. + + This is useful when you want to have optional authentication. + + It is also useful when you want to have authentication that can be + provided in one of multiple optional ways (for example, with OAuth2 + or in a cookie). + """ + ), + ] = True, + refreshUrl: Annotated[ + Optional[str], + Doc( + """ + The URL to refresh the token and obtain a new one. + """ + ), + ] = None, + ): + if not scopes: + scopes = {} + flows = OAuthFlowsModel( + password=cast( + Any, + { + "tokenUrl": tokenUrl, + "refreshUrl": refreshUrl, + "scopes": scopes, + }, + ) + ) + super().__init__( + flows=flows, + scheme_name=scheme_name, + description=description, + auto_error=auto_error, + ) + + async def __call__(self, request: Request) -> Optional[str]: + authorization = request.headers.get("Authorization") + scheme, param = get_authorization_scheme_param(authorization) + if not authorization or scheme.lower() != "bearer": + if self.auto_error: + raise self.make_not_authenticated_error() + else: + return None + return param + + +class OAuth2AuthorizationCodeBearer(OAuth2): + """ + OAuth2 flow for authentication using a bearer token obtained with an OAuth2 code + flow. An instance of it would be used as a dependency. + """ + + def __init__( + self, + authorizationUrl: str, + tokenUrl: Annotated[ + str, + Doc( + """ + The URL to obtain the OAuth2 token. + """ + ), + ], + refreshUrl: Annotated[ + Optional[str], + Doc( + """ + The URL to refresh the token and obtain a new one. + """ + ), + ] = None, + scheme_name: Annotated[ + Optional[str], + Doc( + """ + Security scheme name. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + scopes: Annotated[ + Optional[dict[str, str]], + Doc( + """ + The OAuth2 scopes that would be required by the *path operations* that + use this dependency. + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Security scheme description. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + auto_error: Annotated[ + bool, + Doc( + """ + By default, if no HTTP Authorization header is provided, required for + OAuth2 authentication, it will automatically cancel the request and + send the client an error. + + If `auto_error` is set to `False`, when the HTTP Authorization header + is not available, instead of erroring out, the dependency result will + be `None`. + + This is useful when you want to have optional authentication. + + It is also useful when you want to have authentication that can be + provided in one of multiple optional ways (for example, with OAuth2 + or in a cookie). + """ + ), + ] = True, + ): + if not scopes: + scopes = {} + flows = OAuthFlowsModel( + authorizationCode=cast( + Any, + { + "authorizationUrl": authorizationUrl, + "tokenUrl": tokenUrl, + "refreshUrl": refreshUrl, + "scopes": scopes, + }, + ) + ) + super().__init__( + flows=flows, + scheme_name=scheme_name, + description=description, + auto_error=auto_error, + ) + + async def __call__(self, request: Request) -> Optional[str]: + authorization = request.headers.get("Authorization") + scheme, param = get_authorization_scheme_param(authorization) + if not authorization or scheme.lower() != "bearer": + if self.auto_error: + raise self.make_not_authenticated_error() + else: + return None # pragma: nocover + return param + + +class SecurityScopes: + """ + This is a special class that you can define in a parameter in a dependency to + obtain the OAuth2 scopes required by all the dependencies in the same chain. + + This way, multiple dependencies can have different scopes, even when used in the + same *path operation*. And with this, you can access all the scopes required in + all those dependencies in a single place. + + Read more about it in the + [FastAPI docs for OAuth2 scopes](https://fastapi.tiangolo.com/advanced/security/oauth2-scopes/). + """ + + def __init__( + self, + scopes: Annotated[ + Optional[list[str]], + Doc( + """ + This will be filled by FastAPI. + """ + ), + ] = None, + ): + self.scopes: Annotated[ + list[str], + Doc( + """ + The list of all the scopes required by dependencies. + """ + ), + ] = scopes or [] + self.scope_str: Annotated[ + str, + Doc( + """ + All the scopes required by all the dependencies in a single string + separated by spaces, as defined in the OAuth2 specification. + """ + ), + ] = " ".join(self.scopes) diff --git a/.venv/lib/python3.12/site-packages/fastapi/security/open_id_connect_url.py b/.venv/lib/python3.12/site-packages/fastapi/security/open_id_connect_url.py new file mode 100644 index 0000000..f4d9533 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/security/open_id_connect_url.py @@ -0,0 +1,94 @@ +from typing import Annotated, Optional + +from annotated_doc import Doc +from fastapi.openapi.models import OpenIdConnect as OpenIdConnectModel +from fastapi.security.base import SecurityBase +from starlette.exceptions import HTTPException +from starlette.requests import Request +from starlette.status import HTTP_401_UNAUTHORIZED + + +class OpenIdConnect(SecurityBase): + """ + OpenID Connect authentication class. An instance of it would be used as a + dependency. + + **Warning**: this is only a stub to connect the components with OpenAPI in FastAPI, + but it doesn't implement the full OpenIdConnect scheme, for example, it doesn't use + the OpenIDConnect URL. You would need to to subclass it and implement it in your + code. + """ + + def __init__( + self, + *, + openIdConnectUrl: Annotated[ + str, + Doc( + """ + The OpenID Connect URL. + """ + ), + ], + scheme_name: Annotated[ + Optional[str], + Doc( + """ + Security scheme name. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Security scheme description. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + auto_error: Annotated[ + bool, + Doc( + """ + By default, if no HTTP Authorization header is provided, required for + OpenID Connect authentication, it will automatically cancel the request + and send the client an error. + + If `auto_error` is set to `False`, when the HTTP Authorization header + is not available, instead of erroring out, the dependency result will + be `None`. + + This is useful when you want to have optional authentication. + + It is also useful when you want to have authentication that can be + provided in one of multiple optional ways (for example, with OpenID + Connect or in a cookie). + """ + ), + ] = True, + ): + self.model = OpenIdConnectModel( + openIdConnectUrl=openIdConnectUrl, description=description + ) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + def make_not_authenticated_error(self) -> HTTPException: + return HTTPException( + status_code=HTTP_401_UNAUTHORIZED, + detail="Not authenticated", + headers={"WWW-Authenticate": "Bearer"}, + ) + + async def __call__(self, request: Request) -> Optional[str]: + authorization = request.headers.get("Authorization") + if not authorization: + if self.auto_error: + raise self.make_not_authenticated_error() + else: + return None + return authorization diff --git a/.venv/lib/python3.12/site-packages/fastapi/security/utils.py b/.venv/lib/python3.12/site-packages/fastapi/security/utils.py new file mode 100644 index 0000000..002e68b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/security/utils.py @@ -0,0 +1,10 @@ +from typing import Optional + + +def get_authorization_scheme_param( + authorization_header_value: Optional[str], +) -> tuple[str, str]: + if not authorization_header_value: + return "", "" + scheme, _, param = authorization_header_value.partition(" ") + return scheme, param diff --git a/.venv/lib/python3.12/site-packages/fastapi/staticfiles.py b/.venv/lib/python3.12/site-packages/fastapi/staticfiles.py new file mode 100644 index 0000000..299015d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/staticfiles.py @@ -0,0 +1 @@ +from starlette.staticfiles import StaticFiles as StaticFiles # noqa diff --git a/.venv/lib/python3.12/site-packages/fastapi/templating.py b/.venv/lib/python3.12/site-packages/fastapi/templating.py new file mode 100644 index 0000000..0cb8684 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/templating.py @@ -0,0 +1 @@ +from starlette.templating import Jinja2Templates as Jinja2Templates # noqa diff --git a/.venv/lib/python3.12/site-packages/fastapi/testclient.py b/.venv/lib/python3.12/site-packages/fastapi/testclient.py new file mode 100644 index 0000000..4012406 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/testclient.py @@ -0,0 +1 @@ +from starlette.testclient import TestClient as TestClient # noqa diff --git a/.venv/lib/python3.12/site-packages/fastapi/types.py b/.venv/lib/python3.12/site-packages/fastapi/types.py new file mode 100644 index 0000000..d3e980c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/types.py @@ -0,0 +1,11 @@ +import types +from enum import Enum +from typing import Any, Callable, Optional, TypeVar, Union + +from pydantic import BaseModel + +DecoratedCallable = TypeVar("DecoratedCallable", bound=Callable[..., Any]) +UnionType = getattr(types, "UnionType", Union) +ModelNameMap = dict[Union[type[BaseModel], type[Enum]], str] +IncEx = Union[set[int], set[str], dict[int, Any], dict[str, Any]] +DependencyCacheKey = tuple[Optional[Callable[..., Any]], tuple[str, ...], str] diff --git a/.venv/lib/python3.12/site-packages/fastapi/utils.py b/.venv/lib/python3.12/site-packages/fastapi/utils.py new file mode 100644 index 0000000..78fdcbb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/utils.py @@ -0,0 +1,164 @@ +import re +import warnings +from collections.abc import MutableMapping +from typing import ( + TYPE_CHECKING, + Any, + Optional, + Union, +) +from weakref import WeakKeyDictionary + +import fastapi +from fastapi._compat import ( + BaseConfig, + ModelField, + PydanticSchemaGenerationError, + Undefined, + UndefinedType, + Validator, + annotation_is_pydantic_v1, +) +from fastapi.datastructures import DefaultPlaceholder, DefaultType +from fastapi.exceptions import FastAPIDeprecationWarning, PydanticV1NotSupportedError +from pydantic import BaseModel +from pydantic.fields import FieldInfo +from typing_extensions import Literal + +from ._compat import v2 + +if TYPE_CHECKING: # pragma: nocover + from .routing import APIRoute + +# Cache for `create_cloned_field` +_CLONED_TYPES_CACHE: MutableMapping[type[BaseModel], type[BaseModel]] = ( + WeakKeyDictionary() +) + + +def is_body_allowed_for_status_code(status_code: Union[int, str, None]) -> bool: + if status_code is None: + return True + # Ref: https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.1.0.md#patterned-fields-1 + if status_code in { + "default", + "1XX", + "2XX", + "3XX", + "4XX", + "5XX", + }: + return True + current_status_code = int(status_code) + return not (current_status_code < 200 or current_status_code in {204, 205, 304}) + + +def get_path_param_names(path: str) -> set[str]: + return set(re.findall("{(.*?)}", path)) + + +_invalid_args_message = ( + "Invalid args for response field! Hint: " + "check that {type_} is a valid Pydantic field type. " + "If you are using a return type annotation that is not a valid Pydantic " + "field (e.g. Union[Response, dict, None]) you can disable generating the " + "response model from the type annotation with the path operation decorator " + "parameter response_model=None. Read more: " + "https://fastapi.tiangolo.com/tutorial/response-model/" +) + + +def create_model_field( + name: str, + type_: Any, + class_validators: Optional[dict[str, Validator]] = None, + default: Optional[Any] = Undefined, + required: Union[bool, UndefinedType] = Undefined, + model_config: Union[type[BaseConfig], None] = None, + field_info: Optional[FieldInfo] = None, + alias: Optional[str] = None, + mode: Literal["validation", "serialization"] = "validation", + version: Literal["1", "auto"] = "auto", +) -> ModelField: + if annotation_is_pydantic_v1(type_): + raise PydanticV1NotSupportedError( + "pydantic.v1 models are no longer supported by FastAPI." + f" Please update the response model {type_!r}." + ) + class_validators = class_validators or {} + + field_info = field_info or FieldInfo(annotation=type_, default=default, alias=alias) + kwargs = {"mode": mode, "name": name, "field_info": field_info} + try: + return v2.ModelField(**kwargs) # type: ignore[return-value,arg-type] + except PydanticSchemaGenerationError: + raise fastapi.exceptions.FastAPIError( + _invalid_args_message.format(type_=type_) + ) from None + + +def create_cloned_field( + field: ModelField, + *, + cloned_types: Optional[MutableMapping[type[BaseModel], type[BaseModel]]] = None, +) -> ModelField: + return field + + +def generate_operation_id_for_path( + *, name: str, path: str, method: str +) -> str: # pragma: nocover + warnings.warn( + message="fastapi.utils.generate_operation_id_for_path() was deprecated, " + "it is not used internally, and will be removed soon", + category=FastAPIDeprecationWarning, + stacklevel=2, + ) + operation_id = f"{name}{path}" + operation_id = re.sub(r"\W", "_", operation_id) + operation_id = f"{operation_id}_{method.lower()}" + return operation_id + + +def generate_unique_id(route: "APIRoute") -> str: + operation_id = f"{route.name}{route.path_format}" + operation_id = re.sub(r"\W", "_", operation_id) + assert route.methods + operation_id = f"{operation_id}_{list(route.methods)[0].lower()}" + return operation_id + + +def deep_dict_update(main_dict: dict[Any, Any], update_dict: dict[Any, Any]) -> None: + for key, value in update_dict.items(): + if ( + key in main_dict + and isinstance(main_dict[key], dict) + and isinstance(value, dict) + ): + deep_dict_update(main_dict[key], value) + elif ( + key in main_dict + and isinstance(main_dict[key], list) + and isinstance(update_dict[key], list) + ): + main_dict[key] = main_dict[key] + update_dict[key] + else: + main_dict[key] = value + + +def get_value_or_default( + first_item: Union[DefaultPlaceholder, DefaultType], + *extra_items: Union[DefaultPlaceholder, DefaultType], +) -> Union[DefaultPlaceholder, DefaultType]: + """ + Pass items or `DefaultPlaceholder`s by descending priority. + + The first one to _not_ be a `DefaultPlaceholder` will be returned. + + Otherwise, the first item (a `DefaultPlaceholder`) will be returned. + """ + items = (first_item,) + extra_items + for item in items: + if not isinstance(item, DefaultPlaceholder): + return item + return first_item diff --git a/.venv/lib/python3.12/site-packages/fastapi/websockets.py b/.venv/lib/python3.12/site-packages/fastapi/websockets.py new file mode 100644 index 0000000..55a4ac4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fastapi/websockets.py @@ -0,0 +1,3 @@ +from starlette.websockets import WebSocket as WebSocket # noqa +from starlette.websockets import WebSocketDisconnect as WebSocketDisconnect # noqa +from starlette.websockets import WebSocketState as WebSocketState # noqa diff --git a/.venv/lib/python3.12/site-packages/fusionagi-0.1.0.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/fusionagi-0.1.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fusionagi-0.1.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/fusionagi-0.1.0.dist-info/METADATA b/.venv/lib/python3.12/site-packages/fusionagi-0.1.0.dist-info/METADATA new file mode 100644 index 0000000..8867228 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fusionagi-0.1.0.dist-info/METADATA @@ -0,0 +1,209 @@ +Metadata-Version: 2.4 +Name: fusionagi +Version: 0.1.0 +Summary: Modular, agentic AI orchestration framework with reasoning, planning, execution, and memory. +Author: FusionAGI +License: MIT +Project-URL: Repository, https://github.com/fusionagi/fusionagi +Keywords: agi,agents,orchestration,llm +Classifier: Development Status :: 3 - Alpha +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Requires-Python: >=3.10 +Description-Content-Type: text/markdown +Requires-Dist: pydantic<3,>=2.0 +Provides-Extra: openai +Requires-Dist: openai>=1.12; extra == "openai" +Provides-Extra: anthropic +Requires-Dist: anthropic>=0.39; extra == "anthropic" +Provides-Extra: local +Requires-Dist: litellm>=1.40; extra == "local" +Provides-Extra: api +Requires-Dist: fastapi>=0.115; extra == "api" +Requires-Dist: uvicorn>=0.32; extra == "api" +Requires-Dist: httpx>=0.27; extra == "api" +Provides-Extra: maa +Provides-Extra: dev +Requires-Dist: pytest>=7.4; extra == "dev" +Requires-Dist: mypy>=1.8; extra == "dev" +Requires-Dist: ruff>=0.4; extra == "dev" +Provides-Extra: all +Requires-Dist: fusionagi[anthropic,local,openai]; extra == "all" + +# FusionAGI + +The world's most advanced **agentic AGI system**—Artificial General Intelligence, not narrow AI. A modular, composable intelligence framework that supports reasoning, planning, execution, memory, and tool use through coordinated agents, with built-in **self-improvement**, **self-correction**, **auto-recommend/suggest**, and **auto-training**. + +## Features + +- **AGI-first:** General intelligence across domains via composable agents, not single-task AI. +- **Self-improvement:** Learns from outcomes; reflection and heuristic updates improve behavior over time. +- **Self-correction:** Detects failures, runs critique loops, validates outputs, and retries with feedback. +- **Auto recommend / suggest:** Produces actionable recommendations (next actions, training targets, tool additions) from lessons and evaluations. +- **Auto training:** Suggests and applies heuristic updates, prompt refinements, and training targets from execution traces and reflection. +- **Modularity:** Reasoning, planning, memory, tooling, and governance are independent, replaceable modules. +- **Agent-oriented:** Agents have roles, goals, and constraints; they communicate via structured messages. +- **Model-agnostic:** LLMs are abstracted behind adapters (OpenAI, Anthropic, local). +- **Determinism:** Explicit state transitions, logged decisions, and replayable execution traces. + +## Installation + +```bash +pip install -e . +# With LLM adapters (optional; MAA and core are built-in): +pip install -e ".[openai]" # OpenAIAdapter +pip install -e ".[anthropic]" +pip install -e ".[local]" +pip install -e ".[all]" # openai + anthropic + local +pip install -e ".[dev]" # pytest +``` + +- **MAA** (Manufacturing Authority Add-On) is built-in; no extra dependency. +- **Optional extras:** `openai`, `anthropic`, `local` are for LLM providers; `dev` is for tests (`pytest`). +- **OpenAIAdapter** requires `fusionagi[openai]`; use `from fusionagi.adapters import OpenAIAdapter` (or `from fusionagi.adapters.openai_adapter import OpenAIAdapter` if the optional import is not used). +- **CachedAdapter** wraps any `LLMAdapter` and caches `complete()` responses; no extra dependency. Use `from fusionagi.adapters import CachedAdapter`. + +## Project Layout + +``` +fusionagi/ +├── core/ # Orchestrator, event bus, state manager, persistence +├── agents/ # Planner, Reasoner, Executor, Critic +├── reasoning/ # Chain-of-thought and tree-of-thought +├── planning/ # Plan graph, dependency resolution, strategies +├── memory/ # Working, episodic, reflective memory +├── tools/ # Tool registry, safe runner, builtins +├── governance/ # Guardrails, rate limiting, access control, override hooks +├── reflection/ # Post-task reflection and heuristic updates +├── self_improvement/ # Self-correction, auto-recommend/suggest, auto-training, FusionAGILoop +├── interfaces/ # Admin panel, multi-modal UI, voice, conversation +├── adapters/ # LLM adapters (OpenAI, stub, cache) +├── schemas/ # Task, message, plan, recommendation schemas +├── tests/ +└── docs/ +``` + +## Usage + +```python +from fusionagi import Orchestrator, EventBus, StateManager +from fusionagi.agents import PlannerAgent + +bus = EventBus() +state = StateManager() +orch = Orchestrator(event_bus=bus, state_manager=state) +planner_agent = PlannerAgent() +orch.register_agent("planner", planner_agent) +task_id = orch.submit_task(goal="Summarize the project README") +``` + +With self-improvement (FusionAGILoop): self-correction, auto-recommend, auto-training: + +```python +from fusionagi import Orchestrator, EventBus, StateManager, FusionAGILoop +from fusionagi.memory import ReflectiveMemory +from fusionagi.agents import CriticAgent + +bus = EventBus() +state = StateManager() +orch = Orchestrator(event_bus=bus, state_manager=state) +reflective = ReflectiveMemory() +critic = CriticAgent(identity="critic") +orch.register_agent("critic", critic) + +agi_loop = FusionAGILoop( + event_bus=bus, + state_manager=state, + orchestrator=orch, + critic_agent=critic, + reflective_memory=reflective, + auto_retry_on_failure=False, + on_recommendations=lambda recs: print("Recommendations:", len(recs)), + on_training_suggestions=lambda sugs: print("Training suggestions:", len(sugs)), +) +# On task_state_changed(FAILED) and reflection_done, AGI loop runs correction, recommend, and training. +``` + +With an LLM adapter (optional): + +```python +from fusionagi.adapters import StubAdapter, CachedAdapter +# OpenAIAdapter requires: pip install "fusionagi[openai]" +# from fusionagi.adapters import OpenAIAdapter +adapter = CachedAdapter(StubAdapter("response"), max_entries=100) +``` + +With admin control panel and multi-modal UI: + +```python +from fusionagi.interfaces import AdminControlPanel, MultiModalUI +from fusionagi.interfaces import VoiceInterface, VoiceLibrary, ConversationManager + +# Admin panel for system management +admin = AdminControlPanel( + orchestrator=orch, + event_bus=bus, + state_manager=state, +) + +# Add voice profiles +from fusionagi.interfaces.voice import VoiceProfile +voice = VoiceProfile(name="Assistant", language="en-US", style="friendly") +admin.add_voice_profile(voice) + +# Multi-modal user interface +voice_interface = VoiceInterface(stt_provider="whisper", tts_provider="elevenlabs") +ui = MultiModalUI( + orchestrator=orch, + conversation_manager=ConversationManager(), + voice_interface=voice_interface, +) + +# Create user session with text and voice +from fusionagi.interfaces.base import ModalityType +session_id = ui.create_session( + preferred_modalities=[ModalityType.TEXT, ModalityType.VOICE] +) + +# Interactive task submission with real-time feedback +task_id = await ui.submit_task_interactive(session_id, goal="Analyze data") +``` + +## Interfaces + +FusionAGI provides comprehensive interface layers: + +### Admin Control Panel +- Voice library management (TTS/STT configuration) +- Conversation style tuning (personality, formality, verbosity) +- Agent configuration and monitoring +- System health and performance metrics +- Governance policies and audit logs +- Manufacturing authority (MAA) oversight + +### Multi-Modal User Interface +Supports multiple sensory modalities: +- **Text**: Chat, commands, structured input +- **Voice**: Speech-to-text, text-to-speech +- **Visual**: Images, video, AR/VR (extensible) +- **Haptic**: Touch feedback (extensible) +- **Gesture**: Motion control (extensible) +- **Biometric**: Emotion detection (extensible) + +See `docs/interfaces.md` and `examples/` for detailed usage. + +## Development + +- See `docs/architecture.md` for high-level components. +- See `docs/interfaces.md` for UI/UX layer details. +- Use `.cursor/rules` for coding standards. +- Run tests: `pytest tests/` +- Examples: `python examples/admin_panel_example.py` + +## License + +MIT diff --git a/.venv/lib/python3.12/site-packages/fusionagi-0.1.0.dist-info/RECORD b/.venv/lib/python3.12/site-packages/fusionagi-0.1.0.dist-info/RECORD new file mode 100644 index 0000000..459221d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fusionagi-0.1.0.dist-info/RECORD @@ -0,0 +1,10 @@ +__editable__.fusionagi-0.1.0.pth,sha256=OCK6C9z4Px1YqsNsb-jP44tx4thjWydRAS4dI3hldvI,89 +__editable___fusionagi_0_1_0_finder.py,sha256=PdyF5t3wHlZGZo1DDnY1dzqWplNfsRE5rHcDim4khYI,3395 +__pycache__/__editable___fusionagi_0_1_0_finder.cpython-312.pyc,, +fusionagi-0.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +fusionagi-0.1.0.dist-info/METADATA,sha256=3k0njqoF77yeGM3A6gwtQXLkkKINQloo8BcLaW6Sj98,8239 +fusionagi-0.1.0.dist-info/RECORD,, +fusionagi-0.1.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fusionagi-0.1.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92 +fusionagi-0.1.0.dist-info/direct_url.json,sha256=jtPnxUGhrPC1bQr_ZdrmecaunvUo3f7mvaJUXArJM3M,80 +fusionagi-0.1.0.dist-info/top_level.txt,sha256=9ljSXGG3NB6zz93CyUiKVEb_zu6EpcYrLFkpxdTQ-FE,10 diff --git a/.venv/lib/python3.12/site-packages/fusionagi-0.1.0.dist-info/REQUESTED b/.venv/lib/python3.12/site-packages/fusionagi-0.1.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/fusionagi-0.1.0.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/fusionagi-0.1.0.dist-info/WHEEL new file mode 100644 index 0000000..0885d05 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fusionagi-0.1.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.10.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.12/site-packages/fusionagi-0.1.0.dist-info/direct_url.json b/.venv/lib/python3.12/site-packages/fusionagi-0.1.0.dist-info/direct_url.json new file mode 100644 index 0000000..4a47770 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fusionagi-0.1.0.dist-info/direct_url.json @@ -0,0 +1 @@ +{"dir_info": {"editable": true}, "url": "file:///home/intlc/projects/FusionAGI"} \ No newline at end of file diff --git a/.venv/lib/python3.12/site-packages/fusionagi-0.1.0.dist-info/top_level.txt b/.venv/lib/python3.12/site-packages/fusionagi-0.1.0.dist-info/top_level.txt new file mode 100644 index 0000000..21f3246 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/fusionagi-0.1.0.dist-info/top_level.txt @@ -0,0 +1 @@ +fusionagi diff --git a/.venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/METADATA b/.venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/METADATA new file mode 100644 index 0000000..8a2f639 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/METADATA @@ -0,0 +1,202 @@ +Metadata-Version: 2.4 +Name: h11 +Version: 0.16.0 +Summary: A pure-Python, bring-your-own-I/O implementation of HTTP/1.1 +Home-page: https://github.com/python-hyper/h11 +Author: Nathaniel J. Smith +Author-email: njs@pobox.com +License: MIT +Classifier: Development Status :: 3 - Alpha +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: System :: Networking +Requires-Python: >=3.8 +License-File: LICENSE.txt +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: home-page +Dynamic: license +Dynamic: license-file +Dynamic: requires-python +Dynamic: summary + +h11 +=== + +.. image:: https://travis-ci.org/python-hyper/h11.svg?branch=master + :target: https://travis-ci.org/python-hyper/h11 + :alt: Automated test status + +.. image:: https://codecov.io/gh/python-hyper/h11/branch/master/graph/badge.svg + :target: https://codecov.io/gh/python-hyper/h11 + :alt: Test coverage + +.. image:: https://readthedocs.org/projects/h11/badge/?version=latest + :target: http://h11.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +This is a little HTTP/1.1 library written from scratch in Python, +heavily inspired by `hyper-h2 `_. + +It's a "bring-your-own-I/O" library; h11 contains no IO code +whatsoever. This means you can hook h11 up to your favorite network +API, and that could be anything you want: synchronous, threaded, +asynchronous, or your own implementation of `RFC 6214 +`_ -- h11 won't judge you. +(Compare this to the current state of the art, where every time a `new +network API `_ comes along then someone +gets to start over reimplementing the entire HTTP protocol from +scratch.) Cory Benfield made an `excellent blog post describing the +benefits of this approach +`_, or if you like video +then here's his `PyCon 2016 talk on the same theme +`_. + +This also means that h11 is not immediately useful out of the box: +it's a toolkit for building programs that speak HTTP, not something +that could directly replace ``requests`` or ``twisted.web`` or +whatever. But h11 makes it much easier to implement something like +``requests`` or ``twisted.web``. + +At a high level, working with h11 goes like this: + +1) First, create an ``h11.Connection`` object to track the state of a + single HTTP/1.1 connection. + +2) When you read data off the network, pass it to + ``conn.receive_data(...)``; you'll get back a list of objects + representing high-level HTTP "events". + +3) When you want to send a high-level HTTP event, create the + corresponding "event" object and pass it to ``conn.send(...)``; + this will give you back some bytes that you can then push out + through the network. + +For example, a client might instantiate and then send a +``h11.Request`` object, then zero or more ``h11.Data`` objects for the +request body (e.g., if this is a POST), and then a +``h11.EndOfMessage`` to indicate the end of the message. Then the +server would then send back a ``h11.Response``, some ``h11.Data``, and +its own ``h11.EndOfMessage``. If either side violates the protocol, +you'll get a ``h11.ProtocolError`` exception. + +h11 is suitable for implementing both servers and clients, and has a +pleasantly symmetric API: the events you send as a client are exactly +the ones that you receive as a server and vice-versa. + +`Here's an example of a tiny HTTP client +`_ + +It also has `a fine manual `_. + +FAQ +--- + +*Whyyyyy?* + +I wanted to play with HTTP in `Curio +`__ and `Trio +`__, which at the time didn't have any +HTTP libraries. So I thought, no big deal, Python has, like, a dozen +different implementations of HTTP, surely I can find one that's +reusable. I didn't find one, but I did find Cory's call-to-arms +blog-post. So I figured, well, fine, if I have to implement HTTP from +scratch, at least I can make sure no-one *else* has to ever again. + +*Should I use it?* + +Maybe. You should be aware that it's a very young project. But, it's +feature complete and has an exhaustive test-suite and complete docs, +so the next step is for people to try using it and see how it goes +:-). If you do then please let us know -- if nothing else we'll want +to talk to you before making any incompatible changes! + +*What are the features/limitations?* + +Roughly speaking, it's trying to be a robust, complete, and non-hacky +implementation of the first "chapter" of the HTTP/1.1 spec: `RFC 7230: +HTTP/1.1 Message Syntax and Routing +`_. That is, it mostly focuses on +implementing HTTP at the level of taking bytes on and off the wire, +and the headers related to that, and tries to be anal about spec +conformance. It doesn't know about higher-level concerns like URL +routing, conditional GETs, cross-origin cookie policies, or content +negotiation. But it does know how to take care of framing, +cross-version differences in keep-alive handling, and the "obsolete +line folding" rule, so you can focus your energies on the hard / +interesting parts for your application, and it tries to support the +full specification in the sense that any useful HTTP/1.1 conformant +application should be able to use h11. + +It's pure Python, and has no dependencies outside of the standard +library. + +It has a test suite with 100.0% coverage for both statements and +branches. + +Currently it supports Python 3 (testing on 3.8-3.12) and PyPy 3. +The last Python 2-compatible version was h11 0.11.x. +(Originally it had a Cython wrapper for `http-parser +`_ and a beautiful nested state +machine implemented with ``yield from`` to postprocess the output. But +I had to take these out -- the new *parser* needs fewer lines-of-code +than the old *parser wrapper*, is written in pure Python, uses no +exotic language syntax, and has more features. It's sad, really; that +old state machine was really slick. I just need a few sentences here +to mourn that.) + +I don't know how fast it is. I haven't benchmarked or profiled it yet, +so it's probably got a few pointless hot spots, and I've been trying +to err on the side of simplicity and robustness instead of +micro-optimization. But at the architectural level I tried hard to +avoid fundamentally bad decisions, e.g., I believe that all the +parsing algorithms remain linear-time even in the face of pathological +input like slowloris, and there are no byte-by-byte loops. (I also +believe that it maintains bounded memory usage in the face of +arbitrary/pathological input.) + +The whole library is ~800 lines-of-code. You can read and understand +the whole thing in less than an hour. Most of the energy invested in +this so far has been spent on trying to keep things simple by +minimizing special-cases and ad hoc state manipulation; even though it +is now quite small and simple, I'm still annoyed that I haven't +figured out how to make it even smaller and simpler. (Unfortunately, +HTTP does not lend itself to simplicity.) + +The API is ~feature complete and I don't expect the general outlines +to change much, but you can't judge an API's ergonomics until you +actually document and use it, so I'd expect some changes in the +details. + +*How do I try it?* + +.. code-block:: sh + + $ pip install h11 + $ git clone git@github.com:python-hyper/h11 + $ cd h11/examples + $ python basic-client.py + +and go from there. + +*License?* + +MIT + +*Code of conduct?* + +Contributors are requested to follow our `code of conduct +`_ in +all project spaces. diff --git a/.venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/RECORD b/.venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/RECORD new file mode 100644 index 0000000..a8f8e63 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/RECORD @@ -0,0 +1,29 @@ +h11-0.16.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +h11-0.16.0.dist-info/METADATA,sha256=KPMmCYrAn8unm48YD5YIfIQf4kViFct7hyqcfVzRnWQ,8348 +h11-0.16.0.dist-info/RECORD,, +h11-0.16.0.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91 +h11-0.16.0.dist-info/licenses/LICENSE.txt,sha256=N9tbuFkm2yikJ6JYZ_ELEjIAOuob5pzLhRE4rbjm82E,1124 +h11-0.16.0.dist-info/top_level.txt,sha256=F7dC4jl3zeh8TGHEPaWJrMbeuoWbS379Gwdi-Yvdcis,4 +h11/__init__.py,sha256=iO1KzkSO42yZ6ffg-VMgbx_ZVTWGUY00nRYEWn-s3kY,1507 +h11/__pycache__/__init__.cpython-312.pyc,, +h11/__pycache__/_abnf.cpython-312.pyc,, +h11/__pycache__/_connection.cpython-312.pyc,, +h11/__pycache__/_events.cpython-312.pyc,, +h11/__pycache__/_headers.cpython-312.pyc,, +h11/__pycache__/_readers.cpython-312.pyc,, +h11/__pycache__/_receivebuffer.cpython-312.pyc,, +h11/__pycache__/_state.cpython-312.pyc,, +h11/__pycache__/_util.cpython-312.pyc,, +h11/__pycache__/_version.cpython-312.pyc,, +h11/__pycache__/_writers.cpython-312.pyc,, +h11/_abnf.py,sha256=ybixr0xsupnkA6GFAyMubuXF6Tc1lb_hF890NgCsfNc,4815 +h11/_connection.py,sha256=k9YRVf6koZqbttBW36xSWaJpWdZwa-xQVU9AHEo9DuI,26863 +h11/_events.py,sha256=I97aXoal1Wu7dkL548BANBUCkOIbe-x5CioYA9IBY14,11792 +h11/_headers.py,sha256=P7D-lBNxHwdLZPLimmYwrPG-9ZkjElvvJZJdZAgSP-4,10412 +h11/_readers.py,sha256=a4RypORUCC3d0q_kxPuBIM7jTD8iLt5X91TH0FsduN4,8590 +h11/_receivebuffer.py,sha256=xrspsdsNgWFxRfQcTXxR8RrdjRXXTK0Io5cQYWpJ1Ws,5252 +h11/_state.py,sha256=_5LG_BGR8FCcFQeBPH-TMHgm_-B-EUcWCnQof_9XjFE,13231 +h11/_util.py,sha256=LWkkjXyJaFlAy6Lt39w73UStklFT5ovcvo0TkY7RYuk,4888 +h11/_version.py,sha256=GVSsbPSPDcOuF6ptfIiXnVJoaEm3ygXbMnqlr_Giahw,686 +h11/_writers.py,sha256=oFKm6PtjeHfbj4RLX7VB7KDc1gIY53gXG3_HR9ltmTA,5081 +h11/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7 diff --git a/.venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/WHEEL new file mode 100644 index 0000000..1eb3c49 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (78.1.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/licenses/LICENSE.txt b/.venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/licenses/LICENSE.txt new file mode 100644 index 0000000..8f080ea --- /dev/null +++ b/.venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/licenses/LICENSE.txt @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2016 Nathaniel J. Smith and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/.venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/top_level.txt b/.venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/top_level.txt new file mode 100644 index 0000000..0d24def --- /dev/null +++ b/.venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/top_level.txt @@ -0,0 +1 @@ +h11 diff --git a/.venv/lib/python3.12/site-packages/h11/__init__.py b/.venv/lib/python3.12/site-packages/h11/__init__.py new file mode 100644 index 0000000..989e92c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/h11/__init__.py @@ -0,0 +1,62 @@ +# A highish-level implementation of the HTTP/1.1 wire protocol (RFC 7230), +# containing no networking code at all, loosely modelled on hyper-h2's generic +# implementation of HTTP/2 (and in particular the h2.connection.H2Connection +# class). There's still a bunch of subtle details you need to get right if you +# want to make this actually useful, because it doesn't implement all the +# semantics to check that what you're asking to write to the wire is sensible, +# but at least it gets you out of dealing with the wire itself. + +from h11._connection import Connection, NEED_DATA, PAUSED +from h11._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from h11._state import ( + CLIENT, + CLOSED, + DONE, + ERROR, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from h11._util import LocalProtocolError, ProtocolError, RemoteProtocolError +from h11._version import __version__ + +PRODUCT_ID = "python-h11/" + __version__ + + +__all__ = ( + "Connection", + "NEED_DATA", + "PAUSED", + "ConnectionClosed", + "Data", + "EndOfMessage", + "Event", + "InformationalResponse", + "Request", + "Response", + "CLIENT", + "CLOSED", + "DONE", + "ERROR", + "IDLE", + "MUST_CLOSE", + "SEND_BODY", + "SEND_RESPONSE", + "SERVER", + "SWITCHED_PROTOCOL", + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", +) diff --git a/.venv/lib/python3.12/site-packages/h11/_abnf.py b/.venv/lib/python3.12/site-packages/h11/_abnf.py new file mode 100644 index 0000000..933587f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/h11/_abnf.py @@ -0,0 +1,132 @@ +# We use native strings for all the re patterns, to take advantage of string +# formatting, and then convert to bytestrings when compiling the final re +# objects. + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#whitespace +# OWS = *( SP / HTAB ) +# ; optional whitespace +OWS = r"[ \t]*" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.token.separators +# token = 1*tchar +# +# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" +# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" +# / DIGIT / ALPHA +# ; any VCHAR, except delimiters +token = r"[-!#$%&'*+.^_`|~0-9a-zA-Z]+" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#header.fields +# field-name = token +field_name = token + +# The standard says: +# +# field-value = *( field-content / obs-fold ) +# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] +# field-vchar = VCHAR / obs-text +# obs-fold = CRLF 1*( SP / HTAB ) +# ; obsolete line folding +# ; see Section 3.2.4 +# +# https://tools.ietf.org/html/rfc5234#appendix-B.1 +# +# VCHAR = %x21-7E +# ; visible (printing) characters +# +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.quoted-string +# obs-text = %x80-FF +# +# However, the standard definition of field-content is WRONG! It disallows +# fields containing a single visible character surrounded by whitespace, +# e.g. "foo a bar". +# +# See: https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189 +# +# So our definition of field_content attempts to fix it up... +# +# Also, we allow lots of control characters, because apparently people assume +# that they're legal in practice (e.g., google analytics makes cookies with +# \x01 in them!): +# https://github.com/python-hyper/h11/issues/57 +# We still don't allow NUL or whitespace, because those are often treated as +# meta-characters and letting them through can lead to nasty issues like SSRF. +vchar = r"[\x21-\x7e]" +vchar_or_obs_text = r"[^\x00\s]" +field_vchar = vchar_or_obs_text +field_content = r"{field_vchar}+(?:[ \t]+{field_vchar}+)*".format(**globals()) + +# We handle obs-fold at a different level, and our fixed-up field_content +# already grows to swallow the whole value, so ? instead of * +field_value = r"({field_content})?".format(**globals()) + +# header-field = field-name ":" OWS field-value OWS +header_field = ( + r"(?P{field_name})" + r":" + r"{OWS}" + r"(?P{field_value})" + r"{OWS}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#request.line +# +# request-line = method SP request-target SP HTTP-version CRLF +# method = token +# HTTP-version = HTTP-name "/" DIGIT "." DIGIT +# HTTP-name = %x48.54.54.50 ; "HTTP", case-sensitive +# +# request-target is complicated (see RFC 7230 sec 5.3) -- could be path, full +# URL, host+port (for connect), or even "*", but in any case we are guaranteed +# that it contists of the visible printing characters. +method = token +request_target = r"{vchar}+".format(**globals()) +http_version = r"HTTP/(?P[0-9]\.[0-9])" +request_line = ( + r"(?P{method})" + r" " + r"(?P{request_target})" + r" " + r"{http_version}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#status.line +# +# status-line = HTTP-version SP status-code SP reason-phrase CRLF +# status-code = 3DIGIT +# reason-phrase = *( HTAB / SP / VCHAR / obs-text ) +status_code = r"[0-9]{3}" +reason_phrase = r"([ \t]|{vchar_or_obs_text})*".format(**globals()) +status_line = ( + r"{http_version}" + r" " + r"(?P{status_code})" + # However, there are apparently a few too many servers out there that just + # leave out the reason phrase: + # https://github.com/scrapy/scrapy/issues/345#issuecomment-281756036 + # https://github.com/seanmonstar/httparse/issues/29 + # so make it optional. ?: is a non-capturing group. + r"(?: (?P{reason_phrase}))?".format(**globals()) +) + +HEXDIG = r"[0-9A-Fa-f]" +# Actually +# +# chunk-size = 1*HEXDIG +# +# but we impose an upper-limit to avoid ridiculosity. len(str(2**64)) == 20 +chunk_size = r"({HEXDIG}){{1,20}}".format(**globals()) +# Actually +# +# chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] ) +# +# but we aren't parsing the things so we don't really care. +chunk_ext = r";.*" +chunk_header = ( + r"(?P{chunk_size})" + r"(?P{chunk_ext})?" + r"{OWS}\r\n".format( + **globals() + ) # Even though the specification does not allow for extra whitespaces, + # we are lenient with trailing whitespaces because some servers on the wild use it. +) diff --git a/.venv/lib/python3.12/site-packages/h11/_connection.py b/.venv/lib/python3.12/site-packages/h11/_connection.py new file mode 100644 index 0000000..e37d82a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/h11/_connection.py @@ -0,0 +1,659 @@ +# This contains the main Connection class. Everything in h11 revolves around +# this. +from typing import ( + Any, + Callable, + cast, + Dict, + List, + Optional, + overload, + Tuple, + Type, + Union, +) + +from ._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from ._headers import get_comma_header, has_expect_100_continue, set_comma_header +from ._readers import READERS, ReadersType +from ._receivebuffer import ReceiveBuffer +from ._state import ( + _SWITCH_CONNECT, + _SWITCH_UPGRADE, + CLIENT, + ConnectionState, + DONE, + ERROR, + MIGHT_SWITCH_PROTOCOL, + SEND_BODY, + SERVER, + SWITCHED_PROTOCOL, +) +from ._util import ( # Import the internal things we need + LocalProtocolError, + RemoteProtocolError, + Sentinel, +) +from ._writers import WRITERS, WritersType + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = ["Connection", "NEED_DATA", "PAUSED"] + + +class NEED_DATA(Sentinel, metaclass=Sentinel): + pass + + +class PAUSED(Sentinel, metaclass=Sentinel): + pass + + +# If we ever have this much buffered without it making a complete parseable +# event, we error out. The only time we really buffer is when reading the +# request/response line + headers together, so this is effectively the limit on +# the size of that. +# +# Some precedents for defaults: +# - node.js: 80 * 1024 +# - tomcat: 8 * 1024 +# - IIS: 16 * 1024 +# - Apache: <8 KiB per line> +DEFAULT_MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024 + + +# RFC 7230's rules for connection lifecycles: +# - If either side says they want to close the connection, then the connection +# must close. +# - HTTP/1.1 defaults to keep-alive unless someone says Connection: close +# - HTTP/1.0 defaults to close unless both sides say Connection: keep-alive +# (and even this is a mess -- e.g. if you're implementing a proxy then +# sending Connection: keep-alive is forbidden). +# +# We simplify life by simply not supporting keep-alive with HTTP/1.0 peers. So +# our rule is: +# - If someone says Connection: close, we will close +# - If someone uses HTTP/1.0, we will close. +def _keep_alive(event: Union[Request, Response]) -> bool: + connection = get_comma_header(event.headers, b"connection") + if b"close" in connection: + return False + if getattr(event, "http_version", b"1.1") < b"1.1": + return False + return True + + +def _body_framing( + request_method: bytes, event: Union[Request, Response] +) -> Tuple[str, Union[Tuple[()], Tuple[int]]]: + # Called when we enter SEND_BODY to figure out framing information for + # this body. + # + # These are the only two events that can trigger a SEND_BODY state: + assert type(event) in (Request, Response) + # Returns one of: + # + # ("content-length", count) + # ("chunked", ()) + # ("http/1.0", ()) + # + # which are (lookup key, *args) for constructing body reader/writer + # objects. + # + # Reference: https://tools.ietf.org/html/rfc7230#section-3.3.3 + # + # Step 1: some responses always have an empty body, regardless of what the + # headers say. + if type(event) is Response: + if ( + event.status_code in (204, 304) + or request_method == b"HEAD" + or (request_method == b"CONNECT" and 200 <= event.status_code < 300) + ): + return ("content-length", (0,)) + # Section 3.3.3 also lists another case -- responses with status_code + # < 200. For us these are InformationalResponses, not Responses, so + # they can't get into this function in the first place. + assert event.status_code >= 200 + + # Step 2: check for Transfer-Encoding (T-E beats C-L): + transfer_encodings = get_comma_header(event.headers, b"transfer-encoding") + if transfer_encodings: + assert transfer_encodings == [b"chunked"] + return ("chunked", ()) + + # Step 3: check for Content-Length + content_lengths = get_comma_header(event.headers, b"content-length") + if content_lengths: + return ("content-length", (int(content_lengths[0]),)) + + # Step 4: no applicable headers; fallback/default depends on type + if type(event) is Request: + return ("content-length", (0,)) + else: + return ("http/1.0", ()) + + +################################################################ +# +# The main Connection class +# +################################################################ + + +class Connection: + """An object encapsulating the state of an HTTP connection. + + Args: + our_role: If you're implementing a client, pass :data:`h11.CLIENT`. If + you're implementing a server, pass :data:`h11.SERVER`. + + max_incomplete_event_size (int): + The maximum number of bytes we're willing to buffer of an + incomplete event. In practice this mostly sets a limit on the + maximum size of the request/response line + headers. If this is + exceeded, then :meth:`next_event` will raise + :exc:`RemoteProtocolError`. + + """ + + def __init__( + self, + our_role: Type[Sentinel], + max_incomplete_event_size: int = DEFAULT_MAX_INCOMPLETE_EVENT_SIZE, + ) -> None: + self._max_incomplete_event_size = max_incomplete_event_size + # State and role tracking + if our_role not in (CLIENT, SERVER): + raise ValueError(f"expected CLIENT or SERVER, not {our_role!r}") + self.our_role = our_role + self.their_role: Type[Sentinel] + if our_role is CLIENT: + self.their_role = SERVER + else: + self.their_role = CLIENT + self._cstate = ConnectionState() + + # Callables for converting data->events or vice-versa given the + # current state + self._writer = self._get_io_object(self.our_role, None, WRITERS) + self._reader = self._get_io_object(self.their_role, None, READERS) + + # Holds any unprocessed received data + self._receive_buffer = ReceiveBuffer() + # If this is true, then it indicates that the incoming connection was + # closed *after* the end of whatever's in self._receive_buffer: + self._receive_buffer_closed = False + + # Extra bits of state that don't fit into the state machine. + # + # These two are only used to interpret framing headers for figuring + # out how to read/write response bodies. their_http_version is also + # made available as a convenient public API. + self.their_http_version: Optional[bytes] = None + self._request_method: Optional[bytes] = None + # This is pure flow-control and doesn't at all affect the set of legal + # transitions, so no need to bother ConnectionState with it: + self.client_is_waiting_for_100_continue = False + + @property + def states(self) -> Dict[Type[Sentinel], Type[Sentinel]]: + """A dictionary like:: + + {CLIENT: , SERVER: } + + See :ref:`state-machine` for details. + + """ + return dict(self._cstate.states) + + @property + def our_state(self) -> Type[Sentinel]: + """The current state of whichever role we are playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.our_role] + + @property + def their_state(self) -> Type[Sentinel]: + """The current state of whichever role we are NOT playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.their_role] + + @property + def they_are_waiting_for_100_continue(self) -> bool: + return self.their_role is CLIENT and self.client_is_waiting_for_100_continue + + def start_next_cycle(self) -> None: + """Attempt to reset our connection state for a new request/response + cycle. + + If both client and server are in :data:`DONE` state, then resets them + both to :data:`IDLE` state in preparation for a new request/response + cycle on this same connection. Otherwise, raises a + :exc:`LocalProtocolError`. + + See :ref:`keepalive-and-pipelining`. + + """ + old_states = dict(self._cstate.states) + self._cstate.start_next_cycle() + self._request_method = None + # self.their_http_version gets left alone, since it presumably lasts + # beyond a single request/response cycle + assert not self.client_is_waiting_for_100_continue + self._respond_to_state_changes(old_states) + + def _process_error(self, role: Type[Sentinel]) -> None: + old_states = dict(self._cstate.states) + self._cstate.process_error(role) + self._respond_to_state_changes(old_states) + + def _server_switch_event(self, event: Event) -> Optional[Type[Sentinel]]: + if type(event) is InformationalResponse and event.status_code == 101: + return _SWITCH_UPGRADE + if type(event) is Response: + if ( + _SWITCH_CONNECT in self._cstate.pending_switch_proposals + and 200 <= event.status_code < 300 + ): + return _SWITCH_CONNECT + return None + + # All events go through here + def _process_event(self, role: Type[Sentinel], event: Event) -> None: + # First, pass the event through the state machine to make sure it + # succeeds. + old_states = dict(self._cstate.states) + if role is CLIENT and type(event) is Request: + if event.method == b"CONNECT": + self._cstate.process_client_switch_proposal(_SWITCH_CONNECT) + if get_comma_header(event.headers, b"upgrade"): + self._cstate.process_client_switch_proposal(_SWITCH_UPGRADE) + server_switch_event = None + if role is SERVER: + server_switch_event = self._server_switch_event(event) + self._cstate.process_event(role, type(event), server_switch_event) + + # Then perform the updates triggered by it. + + if type(event) is Request: + self._request_method = event.method + + if role is self.their_role and type(event) in ( + Request, + Response, + InformationalResponse, + ): + event = cast(Union[Request, Response, InformationalResponse], event) + self.their_http_version = event.http_version + + # Keep alive handling + # + # RFC 7230 doesn't really say what one should do if Connection: close + # shows up on a 1xx InformationalResponse. I think the idea is that + # this is not supposed to happen. In any case, if it does happen, we + # ignore it. + if type(event) in (Request, Response) and not _keep_alive( + cast(Union[Request, Response], event) + ): + self._cstate.process_keep_alive_disabled() + + # 100-continue + if type(event) is Request and has_expect_100_continue(event): + self.client_is_waiting_for_100_continue = True + if type(event) in (InformationalResponse, Response): + self.client_is_waiting_for_100_continue = False + if role is CLIENT and type(event) in (Data, EndOfMessage): + self.client_is_waiting_for_100_continue = False + + self._respond_to_state_changes(old_states, event) + + def _get_io_object( + self, + role: Type[Sentinel], + event: Optional[Event], + io_dict: Union[ReadersType, WritersType], + ) -> Optional[Callable[..., Any]]: + # event may be None; it's only used when entering SEND_BODY + state = self._cstate.states[role] + if state is SEND_BODY: + # Special case: the io_dict has a dict of reader/writer factories + # that depend on the request/response framing. + framing_type, args = _body_framing( + cast(bytes, self._request_method), cast(Union[Request, Response], event) + ) + return io_dict[SEND_BODY][framing_type](*args) # type: ignore[index] + else: + # General case: the io_dict just has the appropriate reader/writer + # for this state + return io_dict.get((role, state)) # type: ignore[return-value] + + # This must be called after any action that might have caused + # self._cstate.states to change. + def _respond_to_state_changes( + self, + old_states: Dict[Type[Sentinel], Type[Sentinel]], + event: Optional[Event] = None, + ) -> None: + # Update reader/writer + if self.our_state != old_states[self.our_role]: + self._writer = self._get_io_object(self.our_role, event, WRITERS) + if self.their_state != old_states[self.their_role]: + self._reader = self._get_io_object(self.their_role, event, READERS) + + @property + def trailing_data(self) -> Tuple[bytes, bool]: + """Data that has been received, but not yet processed, represented as + a tuple with two elements, where the first is a byte-string containing + the unprocessed data itself, and the second is a bool that is True if + the receive connection was closed. + + See :ref:`switching-protocols` for discussion of why you'd want this. + """ + return (bytes(self._receive_buffer), self._receive_buffer_closed) + + def receive_data(self, data: bytes) -> None: + """Add data to our internal receive buffer. + + This does not actually do any processing on the data, just stores + it. To trigger processing, you have to call :meth:`next_event`. + + Args: + data (:term:`bytes-like object`): + The new data that was just received. + + Special case: If *data* is an empty byte-string like ``b""``, + then this indicates that the remote side has closed the + connection (end of file). Normally this is convenient, because + standard Python APIs like :meth:`file.read` or + :meth:`socket.recv` use ``b""`` to indicate end-of-file, while + other failures to read are indicated using other mechanisms + like raising :exc:`TimeoutError`. When using such an API you + can just blindly pass through whatever you get from ``read`` + to :meth:`receive_data`, and everything will work. + + But, if you have an API where reading an empty string is a + valid non-EOF condition, then you need to be aware of this and + make sure to check for such strings and avoid passing them to + :meth:`receive_data`. + + Returns: + Nothing, but after calling this you should call :meth:`next_event` + to parse the newly received data. + + Raises: + RuntimeError: + Raised if you pass an empty *data*, indicating EOF, and then + pass a non-empty *data*, indicating more data that somehow + arrived after the EOF. + + (Calling ``receive_data(b"")`` multiple times is fine, + and equivalent to calling it once.) + + """ + if data: + if self._receive_buffer_closed: + raise RuntimeError("received close, then received more data?") + self._receive_buffer += data + else: + self._receive_buffer_closed = True + + def _extract_next_receive_event( + self, + ) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: + state = self.their_state + # We don't pause immediately when they enter DONE, because even in + # DONE state we can still process a ConnectionClosed() event. But + # if we have data in our buffer, then we definitely aren't getting + # a ConnectionClosed() immediately and we need to pause. + if state is DONE and self._receive_buffer: + return PAUSED + if state is MIGHT_SWITCH_PROTOCOL or state is SWITCHED_PROTOCOL: + return PAUSED + assert self._reader is not None + event = self._reader(self._receive_buffer) + if event is None: + if not self._receive_buffer and self._receive_buffer_closed: + # In some unusual cases (basically just HTTP/1.0 bodies), EOF + # triggers an actual protocol event; in that case, we want to + # return that event, and then the state will change and we'll + # get called again to generate the actual ConnectionClosed(). + if hasattr(self._reader, "read_eof"): + event = self._reader.read_eof() + else: + event = ConnectionClosed() + if event is None: + event = NEED_DATA + return event # type: ignore[no-any-return] + + def next_event(self) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: + """Parse the next event out of our receive buffer, update our internal + state, and return it. + + This is a mutating operation -- think of it like calling :func:`next` + on an iterator. + + Returns: + : One of three things: + + 1) An event object -- see :ref:`events`. + + 2) The special constant :data:`NEED_DATA`, which indicates that + you need to read more data from your socket and pass it to + :meth:`receive_data` before this method will be able to return + any more events. + + 3) The special constant :data:`PAUSED`, which indicates that we + are not in a state where we can process incoming data (usually + because the peer has finished their part of the current + request/response cycle, and you have not yet called + :meth:`start_next_cycle`). See :ref:`flow-control` for details. + + Raises: + RemoteProtocolError: + The peer has misbehaved. You should close the connection + (possibly after sending some kind of 4xx response). + + Once this method returns :class:`ConnectionClosed` once, then all + subsequent calls will also return :class:`ConnectionClosed`. + + If this method raises any exception besides :exc:`RemoteProtocolError` + then that's a bug -- if it happens please file a bug report! + + If this method raises any exception then it also sets + :attr:`Connection.their_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + + if self.their_state is ERROR: + raise RemoteProtocolError("Can't receive data when peer state is ERROR") + try: + event = self._extract_next_receive_event() + if event not in [NEED_DATA, PAUSED]: + self._process_event(self.their_role, cast(Event, event)) + if event is NEED_DATA: + if len(self._receive_buffer) > self._max_incomplete_event_size: + # 431 is "Request header fields too large" which is pretty + # much the only situation where we can get here + raise RemoteProtocolError( + "Receive buffer too long", error_status_hint=431 + ) + if self._receive_buffer_closed: + # We're still trying to complete some event, but that's + # never going to happen because no more data is coming + raise RemoteProtocolError("peer unexpectedly closed connection") + return event + except BaseException as exc: + self._process_error(self.their_role) + if isinstance(exc, LocalProtocolError): + exc._reraise_as_remote_protocol_error() + else: + raise + + @overload + def send(self, event: ConnectionClosed) -> None: + ... + + @overload + def send( + self, event: Union[Request, InformationalResponse, Response, Data, EndOfMessage] + ) -> bytes: + ... + + @overload + def send(self, event: Event) -> Optional[bytes]: + ... + + def send(self, event: Event) -> Optional[bytes]: + """Convert a high-level event into bytes that can be sent to the peer, + while updating our internal state machine. + + Args: + event: The :ref:`event ` to send. + + Returns: + If ``type(event) is ConnectionClosed``, then returns + ``None``. Otherwise, returns a :term:`bytes-like object`. + + Raises: + LocalProtocolError: + Sending this event at this time would violate our + understanding of the HTTP/1.1 protocol. + + If this method raises any exception then it also sets + :attr:`Connection.our_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + data_list = self.send_with_data_passthrough(event) + if data_list is None: + return None + else: + return b"".join(data_list) + + def send_with_data_passthrough(self, event: Event) -> Optional[List[bytes]]: + """Identical to :meth:`send`, except that in situations where + :meth:`send` returns a single :term:`bytes-like object`, this instead + returns a list of them -- and when sending a :class:`Data` event, this + list is guaranteed to contain the exact object you passed in as + :attr:`Data.data`. See :ref:`sendfile` for discussion. + + """ + if self.our_state is ERROR: + raise LocalProtocolError("Can't send data when our state is ERROR") + try: + if type(event) is Response: + event = self._clean_up_response_headers_for_sending(event) + # We want to call _process_event before calling the writer, + # because if someone tries to do something invalid then this will + # give a sensible error message, while our writers all just assume + # they will only receive valid events. But, _process_event might + # change self._writer. So we have to do a little dance: + writer = self._writer + self._process_event(self.our_role, event) + if type(event) is ConnectionClosed: + return None + else: + # In any situation where writer is None, process_event should + # have raised ProtocolError + assert writer is not None + data_list: List[bytes] = [] + writer(event, data_list.append) + return data_list + except: + self._process_error(self.our_role) + raise + + def send_failed(self) -> None: + """Notify the state machine that we failed to send the data it gave + us. + + This causes :attr:`Connection.our_state` to immediately become + :data:`ERROR` -- see :ref:`error-handling` for discussion. + + """ + self._process_error(self.our_role) + + # When sending a Response, we take responsibility for a few things: + # + # - Sometimes you MUST set Connection: close. We take care of those + # times. (You can also set it yourself if you want, and if you do then + # we'll respect that and close the connection at the right time. But you + # don't have to worry about that unless you want to.) + # + # - The user has to set Content-Length if they want it. Otherwise, for + # responses that have bodies (e.g. not HEAD), then we will automatically + # select the right mechanism for streaming a body of unknown length, + # which depends on depending on the peer's HTTP version. + # + # This function's *only* responsibility is making sure headers are set up + # right -- everything downstream just looks at the headers. There are no + # side channels. + def _clean_up_response_headers_for_sending(self, response: Response) -> Response: + assert type(response) is Response + + headers = response.headers + need_close = False + + # HEAD requests need some special handling: they always act like they + # have Content-Length: 0, and that's how _body_framing treats + # them. But their headers are supposed to match what we would send if + # the request was a GET. (Technically there is one deviation allowed: + # we're allowed to leave out the framing headers -- see + # https://tools.ietf.org/html/rfc7231#section-4.3.2 . But it's just as + # easy to get them right.) + method_for_choosing_headers = cast(bytes, self._request_method) + if method_for_choosing_headers == b"HEAD": + method_for_choosing_headers = b"GET" + framing_type, _ = _body_framing(method_for_choosing_headers, response) + if framing_type in ("chunked", "http/1.0"): + # This response has a body of unknown length. + # If our peer is HTTP/1.1, we use Transfer-Encoding: chunked + # If our peer is HTTP/1.0, we use no framing headers, and close the + # connection afterwards. + # + # Make sure to clear Content-Length (in principle user could have + # set both and then we ignored Content-Length b/c + # Transfer-Encoding overwrote it -- this would be naughty of them, + # but the HTTP spec says that if our peer does this then we have + # to fix it instead of erroring out, so we'll accord the user the + # same respect). + headers = set_comma_header(headers, b"content-length", []) + if self.their_http_version is None or self.their_http_version < b"1.1": + # Either we never got a valid request and are sending back an + # error (their_http_version is None), so we assume the worst; + # or else we did get a valid HTTP/1.0 request, so we know that + # they don't understand chunked encoding. + headers = set_comma_header(headers, b"transfer-encoding", []) + # This is actually redundant ATM, since currently we + # unconditionally disable keep-alive when talking to HTTP/1.0 + # peers. But let's be defensive just in case we add + # Connection: keep-alive support later: + if self._request_method != b"HEAD": + need_close = True + else: + headers = set_comma_header(headers, b"transfer-encoding", [b"chunked"]) + + if not self._cstate.keep_alive or need_close: + # Make sure Connection: close is set + connection = set(get_comma_header(headers, b"connection")) + connection.discard(b"keep-alive") + connection.add(b"close") + headers = set_comma_header(headers, b"connection", sorted(connection)) + + return Response( + headers=headers, + status_code=response.status_code, + http_version=response.http_version, + reason=response.reason, + ) diff --git a/.venv/lib/python3.12/site-packages/h11/_events.py b/.venv/lib/python3.12/site-packages/h11/_events.py new file mode 100644 index 0000000..ca1c3ad --- /dev/null +++ b/.venv/lib/python3.12/site-packages/h11/_events.py @@ -0,0 +1,369 @@ +# High level events that make up HTTP/1.1 conversations. Loosely inspired by +# the corresponding events in hyper-h2: +# +# http://python-hyper.org/h2/en/stable/api.html#events +# +# Don't subclass these. Stuff will break. + +import re +from abc import ABC +from dataclasses import dataclass +from typing import List, Tuple, Union + +from ._abnf import method, request_target +from ._headers import Headers, normalize_and_validate +from ._util import bytesify, LocalProtocolError, validate + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "Event", + "Request", + "InformationalResponse", + "Response", + "Data", + "EndOfMessage", + "ConnectionClosed", +] + +method_re = re.compile(method.encode("ascii")) +request_target_re = re.compile(request_target.encode("ascii")) + + +class Event(ABC): + """ + Base class for h11 events. + """ + + __slots__ = () + + +@dataclass(init=False, frozen=True) +class Request(Event): + """The beginning of an HTTP request. + + Fields: + + .. attribute:: method + + An HTTP method, e.g. ``b"GET"`` or ``b"POST"``. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: target + + The target of an HTTP request, e.g. ``b"/index.html"``, or one of the + more exotic formats described in `RFC 7320, section 5.3 + `_. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + """ + + __slots__ = ("method", "headers", "target", "http_version") + + method: bytes + headers: Headers + target: bytes + http_version: bytes + + def __init__( + self, + *, + method: Union[bytes, str], + headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], + target: Union[bytes, str], + http_version: Union[bytes, str] = b"1.1", + _parsed: bool = False, + ) -> None: + super().__init__() + if isinstance(headers, Headers): + object.__setattr__(self, "headers", headers) + else: + object.__setattr__( + self, "headers", normalize_and_validate(headers, _parsed=_parsed) + ) + if not _parsed: + object.__setattr__(self, "method", bytesify(method)) + object.__setattr__(self, "target", bytesify(target)) + object.__setattr__(self, "http_version", bytesify(http_version)) + else: + object.__setattr__(self, "method", method) + object.__setattr__(self, "target", target) + object.__setattr__(self, "http_version", http_version) + + # "A server MUST respond with a 400 (Bad Request) status code to any + # HTTP/1.1 request message that lacks a Host header field and to any + # request message that contains more than one Host header field or a + # Host header field with an invalid field-value." + # -- https://tools.ietf.org/html/rfc7230#section-5.4 + host_count = 0 + for name, value in self.headers: + if name == b"host": + host_count += 1 + if self.http_version == b"1.1" and host_count == 0: + raise LocalProtocolError("Missing mandatory Host: header") + if host_count > 1: + raise LocalProtocolError("Found multiple Host: headers") + + validate(method_re, self.method, "Illegal method characters") + validate(request_target_re, self.target, "Illegal target characters") + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class _ResponseBase(Event): + __slots__ = ("headers", "http_version", "reason", "status_code") + + headers: Headers + http_version: bytes + reason: bytes + status_code: int + + def __init__( + self, + *, + headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], + status_code: int, + http_version: Union[bytes, str] = b"1.1", + reason: Union[bytes, str] = b"", + _parsed: bool = False, + ) -> None: + super().__init__() + if isinstance(headers, Headers): + object.__setattr__(self, "headers", headers) + else: + object.__setattr__( + self, "headers", normalize_and_validate(headers, _parsed=_parsed) + ) + if not _parsed: + object.__setattr__(self, "reason", bytesify(reason)) + object.__setattr__(self, "http_version", bytesify(http_version)) + if not isinstance(status_code, int): + raise LocalProtocolError("status code must be integer") + # Because IntEnum objects are instances of int, but aren't + # duck-compatible (sigh), see gh-72. + object.__setattr__(self, "status_code", int(status_code)) + else: + object.__setattr__(self, "reason", reason) + object.__setattr__(self, "http_version", http_version) + object.__setattr__(self, "status_code", status_code) + + self.__post_init__() + + def __post_init__(self) -> None: + pass + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class InformationalResponse(_ResponseBase): + """An HTTP informational response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`InformationalResponse`, this is always in the range [100, + 200). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for + details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def __post_init__(self) -> None: + if not (100 <= self.status_code < 200): + raise LocalProtocolError( + "InformationalResponse status_code should be in range " + "[100, 200), not {}".format(self.status_code) + ) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class Response(_ResponseBase): + """The beginning of an HTTP response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`Response`, this is always in the range [200, + 1000). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def __post_init__(self) -> None: + if not (200 <= self.status_code < 1000): + raise LocalProtocolError( + "Response status_code should be in range [200, 1000), not {}".format( + self.status_code + ) + ) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class Data(Event): + """Part of an HTTP message body. + + Fields: + + .. attribute:: data + + A :term:`bytes-like object` containing part of a message body. Or, if + using the ``combine=False`` argument to :meth:`Connection.send`, then + any object that your socket writing code knows what to do with, and for + which calling :func:`len` returns the number of bytes that will be + written -- see :ref:`sendfile` for details. + + .. attribute:: chunk_start + + A marker that indicates whether this data object is from the start of a + chunked transfer encoding chunk. This field is ignored when when a Data + event is provided to :meth:`Connection.send`: it is only valid on + events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + .. attribute:: chunk_end + + A marker that indicates whether this data object is the last for a + given chunked transfer encoding chunk. This field is ignored when when + a Data event is provided to :meth:`Connection.send`: it is only valid + on events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + """ + + __slots__ = ("data", "chunk_start", "chunk_end") + + data: bytes + chunk_start: bool + chunk_end: bool + + def __init__( + self, data: bytes, chunk_start: bool = False, chunk_end: bool = False + ) -> None: + object.__setattr__(self, "data", data) + object.__setattr__(self, "chunk_start", chunk_start) + object.__setattr__(self, "chunk_end", chunk_end) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +# XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that +# are forbidden to be sent in a trailer, since processing them as if they were +# present in the header section might bypass external security filters." +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part +# Unfortunately, the list of forbidden fields is long and vague :-/ +@dataclass(init=False, frozen=True) +class EndOfMessage(Event): + """The end of an HTTP message. + + Fields: + + .. attribute:: headers + + Default value: ``[]`` + + Any trailing headers attached to this message, represented as a list of + (name, value) pairs. See :ref:`the header normalization rules + ` for details. + + Must be empty unless ``Transfer-Encoding: chunked`` is in use. + + """ + + __slots__ = ("headers",) + + headers: Headers + + def __init__( + self, + *, + headers: Union[ + Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]], None + ] = None, + _parsed: bool = False, + ) -> None: + super().__init__() + if headers is None: + headers = Headers([]) + elif not isinstance(headers, Headers): + headers = normalize_and_validate(headers, _parsed=_parsed) + + object.__setattr__(self, "headers", headers) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(frozen=True) +class ConnectionClosed(Event): + """This event indicates that the sender has closed their outgoing + connection. + + Note that this does not necessarily mean that they can't *receive* further + data, because TCP connections are composed to two one-way channels which + can be closed independently. See :ref:`closing` for details. + + No fields. + """ + + pass diff --git a/.venv/lib/python3.12/site-packages/h11/_headers.py b/.venv/lib/python3.12/site-packages/h11/_headers.py new file mode 100644 index 0000000..31da3e2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/h11/_headers.py @@ -0,0 +1,282 @@ +import re +from typing import AnyStr, cast, List, overload, Sequence, Tuple, TYPE_CHECKING, Union + +from ._abnf import field_name, field_value +from ._util import bytesify, LocalProtocolError, validate + +if TYPE_CHECKING: + from ._events import Request + +try: + from typing import Literal +except ImportError: + from typing_extensions import Literal # type: ignore + +CONTENT_LENGTH_MAX_DIGITS = 20 # allow up to 1 billion TB - 1 + + +# Facts +# ----- +# +# Headers are: +# keys: case-insensitive ascii +# values: mixture of ascii and raw bytes +# +# "Historically, HTTP has allowed field content with text in the ISO-8859-1 +# charset [ISO-8859-1], supporting other charsets only through use of +# [RFC2047] encoding. In practice, most HTTP header field values use only a +# subset of the US-ASCII charset [USASCII]. Newly defined header fields SHOULD +# limit their field values to US-ASCII octets. A recipient SHOULD treat other +# octets in field content (obs-text) as opaque data." +# And it deprecates all non-ascii values +# +# Leading/trailing whitespace in header names is forbidden +# +# Values get leading/trailing whitespace stripped +# +# Content-Disposition actually needs to contain unicode semantically; to +# accomplish this it has a terrifically weird way of encoding the filename +# itself as ascii (and even this still has lots of cross-browser +# incompatibilities) +# +# Order is important: +# "a proxy MUST NOT change the order of these field values when forwarding a +# message" +# (and there are several headers where the order indicates a preference) +# +# Multiple occurences of the same header: +# "A sender MUST NOT generate multiple header fields with the same field name +# in a message unless either the entire field value for that header field is +# defined as a comma-separated list [or the header is Set-Cookie which gets a +# special exception]" - RFC 7230. (cookies are in RFC 6265) +# +# So every header aside from Set-Cookie can be merged by b", ".join if it +# occurs repeatedly. But, of course, they can't necessarily be split by +# .split(b","), because quoting. +# +# Given all this mess (case insensitive, duplicates allowed, order is +# important, ...), there doesn't appear to be any standard way to handle +# headers in Python -- they're almost like dicts, but... actually just +# aren't. For now we punt and just use a super simple representation: headers +# are a list of pairs +# +# [(name1, value1), (name2, value2), ...] +# +# where all entries are bytestrings, names are lowercase and have no +# leading/trailing whitespace, and values are bytestrings with no +# leading/trailing whitespace. Searching and updating are done via naive O(n) +# methods. +# +# Maybe a dict-of-lists would be better? + +_content_length_re = re.compile(rb"[0-9]+") +_field_name_re = re.compile(field_name.encode("ascii")) +_field_value_re = re.compile(field_value.encode("ascii")) + + +class Headers(Sequence[Tuple[bytes, bytes]]): + """ + A list-like interface that allows iterating over headers as byte-pairs + of (lowercased-name, value). + + Internally we actually store the representation as three-tuples, + including both the raw original casing, in order to preserve casing + over-the-wire, and the lowercased name, for case-insensitive comparisions. + + r = Request( + method="GET", + target="/", + headers=[("Host", "example.org"), ("Connection", "keep-alive")], + http_version="1.1", + ) + assert r.headers == [ + (b"host", b"example.org"), + (b"connection", b"keep-alive") + ] + assert r.headers.raw_items() == [ + (b"Host", b"example.org"), + (b"Connection", b"keep-alive") + ] + """ + + __slots__ = "_full_items" + + def __init__(self, full_items: List[Tuple[bytes, bytes, bytes]]) -> None: + self._full_items = full_items + + def __bool__(self) -> bool: + return bool(self._full_items) + + def __eq__(self, other: object) -> bool: + return list(self) == list(other) # type: ignore + + def __len__(self) -> int: + return len(self._full_items) + + def __repr__(self) -> str: + return "" % repr(list(self)) + + def __getitem__(self, idx: int) -> Tuple[bytes, bytes]: # type: ignore[override] + _, name, value = self._full_items[idx] + return (name, value) + + def raw_items(self) -> List[Tuple[bytes, bytes]]: + return [(raw_name, value) for raw_name, _, value in self._full_items] + + +HeaderTypes = Union[ + List[Tuple[bytes, bytes]], + List[Tuple[bytes, str]], + List[Tuple[str, bytes]], + List[Tuple[str, str]], +] + + +@overload +def normalize_and_validate(headers: Headers, _parsed: Literal[True]) -> Headers: + ... + + +@overload +def normalize_and_validate(headers: HeaderTypes, _parsed: Literal[False]) -> Headers: + ... + + +@overload +def normalize_and_validate( + headers: Union[Headers, HeaderTypes], _parsed: bool = False +) -> Headers: + ... + + +def normalize_and_validate( + headers: Union[Headers, HeaderTypes], _parsed: bool = False +) -> Headers: + new_headers = [] + seen_content_length = None + saw_transfer_encoding = False + for name, value in headers: + # For headers coming out of the parser, we can safely skip some steps, + # because it always returns bytes and has already run these regexes + # over the data: + if not _parsed: + name = bytesify(name) + value = bytesify(value) + validate(_field_name_re, name, "Illegal header name {!r}", name) + validate(_field_value_re, value, "Illegal header value {!r}", value) + assert isinstance(name, bytes) + assert isinstance(value, bytes) + + raw_name = name + name = name.lower() + if name == b"content-length": + lengths = {length.strip() for length in value.split(b",")} + if len(lengths) != 1: + raise LocalProtocolError("conflicting Content-Length headers") + value = lengths.pop() + validate(_content_length_re, value, "bad Content-Length") + if len(value) > CONTENT_LENGTH_MAX_DIGITS: + raise LocalProtocolError("bad Content-Length") + if seen_content_length is None: + seen_content_length = value + new_headers.append((raw_name, name, value)) + elif seen_content_length != value: + raise LocalProtocolError("conflicting Content-Length headers") + elif name == b"transfer-encoding": + # "A server that receives a request message with a transfer coding + # it does not understand SHOULD respond with 501 (Not + # Implemented)." + # https://tools.ietf.org/html/rfc7230#section-3.3.1 + if saw_transfer_encoding: + raise LocalProtocolError( + "multiple Transfer-Encoding headers", error_status_hint=501 + ) + # "All transfer-coding names are case-insensitive" + # -- https://tools.ietf.org/html/rfc7230#section-4 + value = value.lower() + if value != b"chunked": + raise LocalProtocolError( + "Only Transfer-Encoding: chunked is supported", + error_status_hint=501, + ) + saw_transfer_encoding = True + new_headers.append((raw_name, name, value)) + else: + new_headers.append((raw_name, name, value)) + return Headers(new_headers) + + +def get_comma_header(headers: Headers, name: bytes) -> List[bytes]: + # Should only be used for headers whose value is a list of + # comma-separated, case-insensitive values. + # + # The header name `name` is expected to be lower-case bytes. + # + # Connection: meets these criteria (including cast insensitivity). + # + # Content-Length: technically is just a single value (1*DIGIT), but the + # standard makes reference to implementations that do multiple values, and + # using this doesn't hurt. Ditto, case insensitivity doesn't things either + # way. + # + # Transfer-Encoding: is more complex (allows for quoted strings), so + # splitting on , is actually wrong. For example, this is legal: + # + # Transfer-Encoding: foo; options="1,2", chunked + # + # and should be parsed as + # + # foo; options="1,2" + # chunked + # + # but this naive function will parse it as + # + # foo; options="1 + # 2" + # chunked + # + # However, this is okay because the only thing we are going to do with + # any Transfer-Encoding is reject ones that aren't just "chunked", so + # both of these will be treated the same anyway. + # + # Expect: the only legal value is the literal string + # "100-continue". Splitting on commas is harmless. Case insensitive. + # + out: List[bytes] = [] + for _, found_name, found_raw_value in headers._full_items: + if found_name == name: + found_raw_value = found_raw_value.lower() + for found_split_value in found_raw_value.split(b","): + found_split_value = found_split_value.strip() + if found_split_value: + out.append(found_split_value) + return out + + +def set_comma_header(headers: Headers, name: bytes, new_values: List[bytes]) -> Headers: + # The header name `name` is expected to be lower-case bytes. + # + # Note that when we store the header we use title casing for the header + # names, in order to match the conventional HTTP header style. + # + # Simply calling `.title()` is a blunt approach, but it's correct + # here given the cases where we're using `set_comma_header`... + # + # Connection, Content-Length, Transfer-Encoding. + new_headers: List[Tuple[bytes, bytes]] = [] + for found_raw_name, found_name, found_raw_value in headers._full_items: + if found_name != name: + new_headers.append((found_raw_name, found_raw_value)) + for new_value in new_values: + new_headers.append((name.title(), new_value)) + return normalize_and_validate(new_headers) + + +def has_expect_100_continue(request: "Request") -> bool: + # https://tools.ietf.org/html/rfc7231#section-5.1.1 + # "A server that receives a 100-continue expectation in an HTTP/1.0 request + # MUST ignore that expectation." + if request.http_version < b"1.1": + return False + expect = get_comma_header(request.headers, b"expect") + return b"100-continue" in expect diff --git a/.venv/lib/python3.12/site-packages/h11/_readers.py b/.venv/lib/python3.12/site-packages/h11/_readers.py new file mode 100644 index 0000000..576804c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/h11/_readers.py @@ -0,0 +1,250 @@ +# Code to read HTTP data +# +# Strategy: each reader is a callable which takes a ReceiveBuffer object, and +# either: +# 1) consumes some of it and returns an Event +# 2) raises a LocalProtocolError (for consistency -- e.g. we call validate() +# and it might raise a LocalProtocolError, so simpler just to always use +# this) +# 3) returns None, meaning "I need more data" +# +# If they have a .read_eof attribute, then this will be called if an EOF is +# received -- but this is optional. Either way, the actual ConnectionClosed +# event will be generated afterwards. +# +# READERS is a dict describing how to pick a reader. It maps states to either: +# - a reader +# - or, for body readers, a dict of per-framing reader factories + +import re +from typing import Any, Callable, Dict, Iterable, NoReturn, Optional, Tuple, Type, Union + +from ._abnf import chunk_header, header_field, request_line, status_line +from ._events import Data, EndOfMessage, InformationalResponse, Request, Response +from ._receivebuffer import ReceiveBuffer +from ._state import ( + CLIENT, + CLOSED, + DONE, + IDLE, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, +) +from ._util import LocalProtocolError, RemoteProtocolError, Sentinel, validate + +__all__ = ["READERS"] + +header_field_re = re.compile(header_field.encode("ascii")) +obs_fold_re = re.compile(rb"[ \t]+") + + +def _obsolete_line_fold(lines: Iterable[bytes]) -> Iterable[bytes]: + it = iter(lines) + last: Optional[bytes] = None + for line in it: + match = obs_fold_re.match(line) + if match: + if last is None: + raise LocalProtocolError("continuation line at start of headers") + if not isinstance(last, bytearray): + # Cast to a mutable type, avoiding copy on append to ensure O(n) time + last = bytearray(last) + last += b" " + last += line[match.end() :] + else: + if last is not None: + yield last + last = line + if last is not None: + yield last + + +def _decode_header_lines( + lines: Iterable[bytes], +) -> Iterable[Tuple[bytes, bytes]]: + for line in _obsolete_line_fold(lines): + matches = validate(header_field_re, line, "illegal header line: {!r}", line) + yield (matches["field_name"], matches["field_value"]) + + +request_line_re = re.compile(request_line.encode("ascii")) + + +def maybe_read_from_IDLE_client(buf: ReceiveBuffer) -> Optional[Request]: + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no request line received") + matches = validate( + request_line_re, lines[0], "illegal request line: {!r}", lines[0] + ) + return Request( + headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches + ) + + +status_line_re = re.compile(status_line.encode("ascii")) + + +def maybe_read_from_SEND_RESPONSE_server( + buf: ReceiveBuffer, +) -> Union[InformationalResponse, Response, None]: + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no response line received") + matches = validate(status_line_re, lines[0], "illegal status line: {!r}", lines[0]) + http_version = ( + b"1.1" if matches["http_version"] is None else matches["http_version"] + ) + reason = b"" if matches["reason"] is None else matches["reason"] + status_code = int(matches["status_code"]) + class_: Union[Type[InformationalResponse], Type[Response]] = ( + InformationalResponse if status_code < 200 else Response + ) + return class_( + headers=list(_decode_header_lines(lines[1:])), + _parsed=True, + status_code=status_code, + reason=reason, + http_version=http_version, + ) + + +class ContentLengthReader: + def __init__(self, length: int) -> None: + self._length = length + self._remaining = length + + def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: + if self._remaining == 0: + return EndOfMessage() + data = buf.maybe_extract_at_most(self._remaining) + if data is None: + return None + self._remaining -= len(data) + return Data(data=data) + + def read_eof(self) -> NoReturn: + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(received {} bytes, expected {})".format( + self._length - self._remaining, self._length + ) + ) + + +chunk_header_re = re.compile(chunk_header.encode("ascii")) + + +class ChunkedReader: + def __init__(self) -> None: + self._bytes_in_chunk = 0 + # After reading a chunk, we have to throw away the trailing \r\n. + # This tracks the bytes that we need to match and throw away. + self._bytes_to_discard = b"" + self._reading_trailer = False + + def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: + if self._reading_trailer: + lines = buf.maybe_extract_lines() + if lines is None: + return None + return EndOfMessage(headers=list(_decode_header_lines(lines))) + if self._bytes_to_discard: + data = buf.maybe_extract_at_most(len(self._bytes_to_discard)) + if data is None: + return None + if data != self._bytes_to_discard[: len(data)]: + raise LocalProtocolError( + f"malformed chunk footer: {data!r} (expected {self._bytes_to_discard!r})" + ) + self._bytes_to_discard = self._bytes_to_discard[len(data) :] + if self._bytes_to_discard: + return None + # else, fall through and read some more + assert self._bytes_to_discard == b"" + if self._bytes_in_chunk == 0: + # We need to refill our chunk count + chunk_header = buf.maybe_extract_next_line() + if chunk_header is None: + return None + matches = validate( + chunk_header_re, + chunk_header, + "illegal chunk header: {!r}", + chunk_header, + ) + # XX FIXME: we discard chunk extensions. Does anyone care? + self._bytes_in_chunk = int(matches["chunk_size"], base=16) + if self._bytes_in_chunk == 0: + self._reading_trailer = True + return self(buf) + chunk_start = True + else: + chunk_start = False + assert self._bytes_in_chunk > 0 + data = buf.maybe_extract_at_most(self._bytes_in_chunk) + if data is None: + return None + self._bytes_in_chunk -= len(data) + if self._bytes_in_chunk == 0: + self._bytes_to_discard = b"\r\n" + chunk_end = True + else: + chunk_end = False + return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end) + + def read_eof(self) -> NoReturn: + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(incomplete chunked read)" + ) + + +class Http10Reader: + def __call__(self, buf: ReceiveBuffer) -> Optional[Data]: + data = buf.maybe_extract_at_most(999999999) + if data is None: + return None + return Data(data=data) + + def read_eof(self) -> EndOfMessage: + return EndOfMessage() + + +def expect_nothing(buf: ReceiveBuffer) -> None: + if buf: + raise LocalProtocolError("Got data when expecting EOF") + return None + + +ReadersType = Dict[ + Union[Type[Sentinel], Tuple[Type[Sentinel], Type[Sentinel]]], + Union[Callable[..., Any], Dict[str, Callable[..., Any]]], +] + +READERS: ReadersType = { + (CLIENT, IDLE): maybe_read_from_IDLE_client, + (SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server, + (SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server, + (CLIENT, DONE): expect_nothing, + (CLIENT, MUST_CLOSE): expect_nothing, + (CLIENT, CLOSED): expect_nothing, + (SERVER, DONE): expect_nothing, + (SERVER, MUST_CLOSE): expect_nothing, + (SERVER, CLOSED): expect_nothing, + SEND_BODY: { + "chunked": ChunkedReader, + "content-length": ContentLengthReader, + "http/1.0": Http10Reader, + }, +} diff --git a/.venv/lib/python3.12/site-packages/h11/_receivebuffer.py b/.venv/lib/python3.12/site-packages/h11/_receivebuffer.py new file mode 100644 index 0000000..e5c4e08 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/h11/_receivebuffer.py @@ -0,0 +1,153 @@ +import re +import sys +from typing import List, Optional, Union + +__all__ = ["ReceiveBuffer"] + + +# Operations we want to support: +# - find next \r\n or \r\n\r\n (\n or \n\n are also acceptable), +# or wait until there is one +# - read at-most-N bytes +# Goals: +# - on average, do this fast +# - worst case, do this in O(n) where n is the number of bytes processed +# Plan: +# - store bytearray, offset, how far we've searched for a separator token +# - use the how-far-we've-searched data to avoid rescanning +# - while doing a stream of uninterrupted processing, advance offset instead +# of constantly copying +# WARNING: +# - I haven't benchmarked or profiled any of this yet. +# +# Note that starting in Python 3.4, deleting the initial n bytes from a +# bytearray is amortized O(n), thanks to some excellent work by Antoine +# Martin: +# +# https://bugs.python.org/issue19087 +# +# This means that if we only supported 3.4+, we could get rid of the code here +# involving self._start and self.compress, because it's doing exactly the same +# thing that bytearray now does internally. +# +# BUT unfortunately, we still support 2.7, and reading short segments out of a +# long buffer MUST be O(bytes read) to avoid DoS issues, so we can't actually +# delete this code. Yet: +# +# https://pythonclock.org/ +# +# (Two things to double-check first though: make sure PyPy also has the +# optimization, and benchmark to make sure it's a win, since we do have a +# slightly clever thing where we delay calling compress() until we've +# processed a whole event, which could in theory be slightly more efficient +# than the internal bytearray support.) +blank_line_regex = re.compile(b"\n\r?\n", re.MULTILINE) + + +class ReceiveBuffer: + def __init__(self) -> None: + self._data = bytearray() + self._next_line_search = 0 + self._multiple_lines_search = 0 + + def __iadd__(self, byteslike: Union[bytes, bytearray]) -> "ReceiveBuffer": + self._data += byteslike + return self + + def __bool__(self) -> bool: + return bool(len(self)) + + def __len__(self) -> int: + return len(self._data) + + # for @property unprocessed_data + def __bytes__(self) -> bytes: + return bytes(self._data) + + def _extract(self, count: int) -> bytearray: + # extracting an initial slice of the data buffer and return it + out = self._data[:count] + del self._data[:count] + + self._next_line_search = 0 + self._multiple_lines_search = 0 + + return out + + def maybe_extract_at_most(self, count: int) -> Optional[bytearray]: + """ + Extract a fixed number of bytes from the buffer. + """ + out = self._data[:count] + if not out: + return None + + return self._extract(count) + + def maybe_extract_next_line(self) -> Optional[bytearray]: + """ + Extract the first line, if it is completed in the buffer. + """ + # Only search in buffer space that we've not already looked at. + search_start_index = max(0, self._next_line_search - 1) + partial_idx = self._data.find(b"\r\n", search_start_index) + + if partial_idx == -1: + self._next_line_search = len(self._data) + return None + + # + 2 is to compensate len(b"\r\n") + idx = partial_idx + 2 + + return self._extract(idx) + + def maybe_extract_lines(self) -> Optional[List[bytearray]]: + """ + Extract everything up to the first blank line, and return a list of lines. + """ + # Handle the case where we have an immediate empty line. + if self._data[:1] == b"\n": + self._extract(1) + return [] + + if self._data[:2] == b"\r\n": + self._extract(2) + return [] + + # Only search in buffer space that we've not already looked at. + match = blank_line_regex.search(self._data, self._multiple_lines_search) + if match is None: + self._multiple_lines_search = max(0, len(self._data) - 2) + return None + + # Truncate the buffer and return it. + idx = match.span(0)[-1] + out = self._extract(idx) + lines = out.split(b"\n") + + for line in lines: + if line.endswith(b"\r"): + del line[-1] + + assert lines[-2] == lines[-1] == b"" + + del lines[-2:] + + return lines + + # In theory we should wait until `\r\n` before starting to validate + # incoming data. However it's interesting to detect (very) invalid data + # early given they might not even contain `\r\n` at all (hence only + # timeout will get rid of them). + # This is not a 100% effective detection but more of a cheap sanity check + # allowing for early abort in some useful cases. + # This is especially interesting when peer is messing up with HTTPS and + # sent us a TLS stream where we were expecting plain HTTP given all + # versions of TLS so far start handshake with a 0x16 message type code. + def is_next_line_obviously_invalid_request_line(self) -> bool: + try: + # HTTP header line must not contain non-printable characters + # and should not start with a space + return self._data[0] < 0x21 + except IndexError: + return False diff --git a/.venv/lib/python3.12/site-packages/h11/_state.py b/.venv/lib/python3.12/site-packages/h11/_state.py new file mode 100644 index 0000000..3ad444b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/h11/_state.py @@ -0,0 +1,365 @@ +################################################################ +# The core state machine +################################################################ +# +# Rule 1: everything that affects the state machine and state transitions must +# live here in this file. As much as possible goes into the table-based +# representation, but for the bits that don't quite fit, the actual code and +# state must nonetheless live here. +# +# Rule 2: this file does not know about what role we're playing; it only knows +# about HTTP request/response cycles in the abstract. This ensures that we +# don't cheat and apply different rules to local and remote parties. +# +# +# Theory of operation +# =================== +# +# Possibly the simplest way to think about this is that we actually have 5 +# different state machines here. Yes, 5. These are: +# +# 1) The client state, with its complicated automaton (see the docs) +# 2) The server state, with its complicated automaton (see the docs) +# 3) The keep-alive state, with possible states {True, False} +# 4) The SWITCH_CONNECT state, with possible states {False, True} +# 5) The SWITCH_UPGRADE state, with possible states {False, True} +# +# For (3)-(5), the first state listed is the initial state. +# +# (1)-(3) are stored explicitly in member variables. The last +# two are stored implicitly in the pending_switch_proposals set as: +# (state of 4) == (_SWITCH_CONNECT in pending_switch_proposals) +# (state of 5) == (_SWITCH_UPGRADE in pending_switch_proposals) +# +# And each of these machines has two different kinds of transitions: +# +# a) Event-triggered +# b) State-triggered +# +# Event triggered is the obvious thing that you'd think it is: some event +# happens, and if it's the right event at the right time then a transition +# happens. But there are somewhat complicated rules for which machines can +# "see" which events. (As a rule of thumb, if a machine "sees" an event, this +# means two things: the event can affect the machine, and if the machine is +# not in a state where it expects that event then it's an error.) These rules +# are: +# +# 1) The client machine sees all h11.events objects emitted by the client. +# +# 2) The server machine sees all h11.events objects emitted by the server. +# +# It also sees the client's Request event. +# +# And sometimes, server events are annotated with a _SWITCH_* event. For +# example, we can have a (Response, _SWITCH_CONNECT) event, which is +# different from a regular Response event. +# +# 3) The keep-alive machine sees the process_keep_alive_disabled() event +# (which is derived from Request/Response events), and this event +# transitions it from True -> False, or from False -> False. There's no way +# to transition back. +# +# 4&5) The _SWITCH_* machines transition from False->True when we get a +# Request that proposes the relevant type of switch (via +# process_client_switch_proposals), and they go from True->False when we +# get a Response that has no _SWITCH_* annotation. +# +# So that's event-triggered transitions. +# +# State-triggered transitions are less standard. What they do here is couple +# the machines together. The way this works is, when certain *joint* +# configurations of states are achieved, then we automatically transition to a +# new *joint* state. So, for example, if we're ever in a joint state with +# +# client: DONE +# keep-alive: False +# +# then the client state immediately transitions to: +# +# client: MUST_CLOSE +# +# This is fundamentally different from an event-based transition, because it +# doesn't matter how we arrived at the {client: DONE, keep-alive: False} state +# -- maybe the client transitioned SEND_BODY -> DONE, or keep-alive +# transitioned True -> False. Either way, once this precondition is satisfied, +# this transition is immediately triggered. +# +# What if two conflicting state-based transitions get enabled at the same +# time? In practice there's only one case where this arises (client DONE -> +# MIGHT_SWITCH_PROTOCOL versus DONE -> MUST_CLOSE), and we resolve it by +# explicitly prioritizing the DONE -> MIGHT_SWITCH_PROTOCOL transition. +# +# Implementation +# -------------- +# +# The event-triggered transitions for the server and client machines are all +# stored explicitly in a table. Ditto for the state-triggered transitions that +# involve just the server and client state. +# +# The transitions for the other machines, and the state-triggered transitions +# that involve the other machines, are written out as explicit Python code. +# +# It'd be nice if there were some cleaner way to do all this. This isn't +# *too* terrible, but I feel like it could probably be better. +# +# WARNING +# ------- +# +# The script that generates the state machine diagrams for the docs knows how +# to read out the EVENT_TRIGGERED_TRANSITIONS and STATE_TRIGGERED_TRANSITIONS +# tables. But it can't automatically read the transitions that are written +# directly in Python code. So if you touch those, you need to also update the +# script to keep it in sync! +from typing import cast, Dict, Optional, Set, Tuple, Type, Union + +from ._events import * +from ._util import LocalProtocolError, Sentinel + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "CLIENT", + "SERVER", + "IDLE", + "SEND_RESPONSE", + "SEND_BODY", + "DONE", + "MUST_CLOSE", + "CLOSED", + "MIGHT_SWITCH_PROTOCOL", + "SWITCHED_PROTOCOL", + "ERROR", +] + + +class CLIENT(Sentinel, metaclass=Sentinel): + pass + + +class SERVER(Sentinel, metaclass=Sentinel): + pass + + +# States +class IDLE(Sentinel, metaclass=Sentinel): + pass + + +class SEND_RESPONSE(Sentinel, metaclass=Sentinel): + pass + + +class SEND_BODY(Sentinel, metaclass=Sentinel): + pass + + +class DONE(Sentinel, metaclass=Sentinel): + pass + + +class MUST_CLOSE(Sentinel, metaclass=Sentinel): + pass + + +class CLOSED(Sentinel, metaclass=Sentinel): + pass + + +class ERROR(Sentinel, metaclass=Sentinel): + pass + + +# Switch types +class MIGHT_SWITCH_PROTOCOL(Sentinel, metaclass=Sentinel): + pass + + +class SWITCHED_PROTOCOL(Sentinel, metaclass=Sentinel): + pass + + +class _SWITCH_UPGRADE(Sentinel, metaclass=Sentinel): + pass + + +class _SWITCH_CONNECT(Sentinel, metaclass=Sentinel): + pass + + +EventTransitionType = Dict[ + Type[Sentinel], + Dict[ + Type[Sentinel], + Dict[Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], Type[Sentinel]], + ], +] + +EVENT_TRIGGERED_TRANSITIONS: EventTransitionType = { + CLIENT: { + IDLE: {Request: SEND_BODY, ConnectionClosed: CLOSED}, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + MIGHT_SWITCH_PROTOCOL: {}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, + SERVER: { + IDLE: { + ConnectionClosed: CLOSED, + Response: SEND_BODY, + # Special case: server sees client Request events, in this form + (Request, CLIENT): SEND_RESPONSE, + }, + SEND_RESPONSE: { + InformationalResponse: SEND_RESPONSE, + Response: SEND_BODY, + (InformationalResponse, _SWITCH_UPGRADE): SWITCHED_PROTOCOL, + (Response, _SWITCH_CONNECT): SWITCHED_PROTOCOL, + }, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, +} + +StateTransitionType = Dict[ + Tuple[Type[Sentinel], Type[Sentinel]], Dict[Type[Sentinel], Type[Sentinel]] +] + +# NB: there are also some special-case state-triggered transitions hard-coded +# into _fire_state_triggered_transitions below. +STATE_TRIGGERED_TRANSITIONS: StateTransitionType = { + # (Client state, Server state) -> new states + # Protocol negotiation + (MIGHT_SWITCH_PROTOCOL, SWITCHED_PROTOCOL): {CLIENT: SWITCHED_PROTOCOL}, + # Socket shutdown + (CLOSED, DONE): {SERVER: MUST_CLOSE}, + (CLOSED, IDLE): {SERVER: MUST_CLOSE}, + (ERROR, DONE): {SERVER: MUST_CLOSE}, + (DONE, CLOSED): {CLIENT: MUST_CLOSE}, + (IDLE, CLOSED): {CLIENT: MUST_CLOSE}, + (DONE, ERROR): {CLIENT: MUST_CLOSE}, +} + + +class ConnectionState: + def __init__(self) -> None: + # Extra bits of state that don't quite fit into the state model. + + # If this is False then it enables the automatic DONE -> MUST_CLOSE + # transition. Don't set this directly; call .keep_alive_disabled() + self.keep_alive = True + + # This is a subset of {UPGRADE, CONNECT}, containing the proposals + # made by the client for switching protocols. + self.pending_switch_proposals: Set[Type[Sentinel]] = set() + + self.states: Dict[Type[Sentinel], Type[Sentinel]] = {CLIENT: IDLE, SERVER: IDLE} + + def process_error(self, role: Type[Sentinel]) -> None: + self.states[role] = ERROR + self._fire_state_triggered_transitions() + + def process_keep_alive_disabled(self) -> None: + self.keep_alive = False + self._fire_state_triggered_transitions() + + def process_client_switch_proposal(self, switch_event: Type[Sentinel]) -> None: + self.pending_switch_proposals.add(switch_event) + self._fire_state_triggered_transitions() + + def process_event( + self, + role: Type[Sentinel], + event_type: Type[Event], + server_switch_event: Optional[Type[Sentinel]] = None, + ) -> None: + _event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]] = event_type + if server_switch_event is not None: + assert role is SERVER + if server_switch_event not in self.pending_switch_proposals: + raise LocalProtocolError( + "Received server _SWITCH_UPGRADE event without a pending proposal" + ) + _event_type = (event_type, server_switch_event) + if server_switch_event is None and _event_type is Response: + self.pending_switch_proposals = set() + self._fire_event_triggered_transitions(role, _event_type) + # Special case: the server state does get to see Request + # events. + if _event_type is Request: + assert role is CLIENT + self._fire_event_triggered_transitions(SERVER, (Request, CLIENT)) + self._fire_state_triggered_transitions() + + def _fire_event_triggered_transitions( + self, + role: Type[Sentinel], + event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], + ) -> None: + state = self.states[role] + try: + new_state = EVENT_TRIGGERED_TRANSITIONS[role][state][event_type] + except KeyError: + event_type = cast(Type[Event], event_type) + raise LocalProtocolError( + "can't handle event type {} when role={} and state={}".format( + event_type.__name__, role, self.states[role] + ) + ) from None + self.states[role] = new_state + + def _fire_state_triggered_transitions(self) -> None: + # We apply these rules repeatedly until converging on a fixed point + while True: + start_states = dict(self.states) + + # It could happen that both these special-case transitions are + # enabled at the same time: + # + # DONE -> MIGHT_SWITCH_PROTOCOL + # DONE -> MUST_CLOSE + # + # For example, this will always be true of a HTTP/1.0 client + # requesting CONNECT. If this happens, the protocol switch takes + # priority. From there the client will either go to + # SWITCHED_PROTOCOL, in which case it's none of our business when + # they close the connection, or else the server will deny the + # request, in which case the client will go back to DONE and then + # from there to MUST_CLOSE. + if self.pending_switch_proposals: + if self.states[CLIENT] is DONE: + self.states[CLIENT] = MIGHT_SWITCH_PROTOCOL + + if not self.pending_switch_proposals: + if self.states[CLIENT] is MIGHT_SWITCH_PROTOCOL: + self.states[CLIENT] = DONE + + if not self.keep_alive: + for role in (CLIENT, SERVER): + if self.states[role] is DONE: + self.states[role] = MUST_CLOSE + + # Tabular state-triggered transitions + joint_state = (self.states[CLIENT], self.states[SERVER]) + changes = STATE_TRIGGERED_TRANSITIONS.get(joint_state, {}) + self.states.update(changes) + + if self.states == start_states: + # Fixed point reached + return + + def start_next_cycle(self) -> None: + if self.states != {CLIENT: DONE, SERVER: DONE}: + raise LocalProtocolError( + f"not in a reusable state. self.states={self.states}" + ) + # Can't reach DONE/DONE with any of these active, but still, let's be + # sure. + assert self.keep_alive + assert not self.pending_switch_proposals + self.states = {CLIENT: IDLE, SERVER: IDLE} diff --git a/.venv/lib/python3.12/site-packages/h11/_util.py b/.venv/lib/python3.12/site-packages/h11/_util.py new file mode 100644 index 0000000..6718445 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/h11/_util.py @@ -0,0 +1,135 @@ +from typing import Any, Dict, NoReturn, Pattern, Tuple, Type, TypeVar, Union + +__all__ = [ + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", + "validate", + "bytesify", +] + + +class ProtocolError(Exception): + """Exception indicating a violation of the HTTP/1.1 protocol. + + This as an abstract base class, with two concrete base classes: + :exc:`LocalProtocolError`, which indicates that you tried to do something + that HTTP/1.1 says is illegal, and :exc:`RemoteProtocolError`, which + indicates that the remote peer tried to do something that HTTP/1.1 says is + illegal. See :ref:`error-handling` for details. + + In addition to the normal :exc:`Exception` features, it has one attribute: + + .. attribute:: error_status_hint + + This gives a suggestion as to what status code a server might use if + this error occurred as part of a request. + + For a :exc:`RemoteProtocolError`, this is useful as a suggestion for + how you might want to respond to a misbehaving peer, if you're + implementing a server. + + For a :exc:`LocalProtocolError`, this can be taken as a suggestion for + how your peer might have responded to *you* if h11 had allowed you to + continue. + + The default is 400 Bad Request, a generic catch-all for protocol + violations. + + """ + + def __init__(self, msg: str, error_status_hint: int = 400) -> None: + if type(self) is ProtocolError: + raise TypeError("tried to directly instantiate ProtocolError") + Exception.__init__(self, msg) + self.error_status_hint = error_status_hint + + +# Strategy: there are a number of public APIs where a LocalProtocolError can +# be raised (send(), all the different event constructors, ...), and only one +# public API where RemoteProtocolError can be raised +# (receive_data()). Therefore we always raise LocalProtocolError internally, +# and then receive_data will translate this into a RemoteProtocolError. +# +# Internally: +# LocalProtocolError is the generic "ProtocolError". +# Externally: +# LocalProtocolError is for local errors and RemoteProtocolError is for +# remote errors. +class LocalProtocolError(ProtocolError): + def _reraise_as_remote_protocol_error(self) -> NoReturn: + # After catching a LocalProtocolError, use this method to re-raise it + # as a RemoteProtocolError. This method must be called from inside an + # except: block. + # + # An easy way to get an equivalent RemoteProtocolError is just to + # modify 'self' in place. + self.__class__ = RemoteProtocolError # type: ignore + # But the re-raising is somewhat non-trivial -- you might think that + # now that we've modified the in-flight exception object, that just + # doing 'raise' to re-raise it would be enough. But it turns out that + # this doesn't work, because Python tracks the exception type + # (exc_info[0]) separately from the exception object (exc_info[1]), + # and we only modified the latter. So we really do need to re-raise + # the new type explicitly. + # On py3, the traceback is part of the exception object, so our + # in-place modification preserved it and we can just re-raise: + raise self + + +class RemoteProtocolError(ProtocolError): + pass + + +def validate( + regex: Pattern[bytes], data: bytes, msg: str = "malformed data", *format_args: Any +) -> Dict[str, bytes]: + match = regex.fullmatch(data) + if not match: + if format_args: + msg = msg.format(*format_args) + raise LocalProtocolError(msg) + return match.groupdict() + + +# Sentinel values +# +# - Inherit identity-based comparison and hashing from object +# - Have a nice repr +# - Have a *bonus property*: type(sentinel) is sentinel +# +# The bonus property is useful if you want to take the return value from +# next_event() and do some sort of dispatch based on type(event). + +_T_Sentinel = TypeVar("_T_Sentinel", bound="Sentinel") + + +class Sentinel(type): + def __new__( + cls: Type[_T_Sentinel], + name: str, + bases: Tuple[type, ...], + namespace: Dict[str, Any], + **kwds: Any + ) -> _T_Sentinel: + assert bases == (Sentinel,) + v = super().__new__(cls, name, bases, namespace, **kwds) + v.__class__ = v # type: ignore + return v + + def __repr__(self) -> str: + return self.__name__ + + +# Used for methods, request targets, HTTP versions, header names, and header +# values. Accepts ascii-strings, or bytes/bytearray/memoryview/..., and always +# returns bytes. +def bytesify(s: Union[bytes, bytearray, memoryview, int, str]) -> bytes: + # Fast-path: + if type(s) is bytes: + return s + if isinstance(s, str): + s = s.encode("ascii") + if isinstance(s, int): + raise TypeError("expected bytes-like object, not int") + return bytes(s) diff --git a/.venv/lib/python3.12/site-packages/h11/_version.py b/.venv/lib/python3.12/site-packages/h11/_version.py new file mode 100644 index 0000000..76e7327 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/h11/_version.py @@ -0,0 +1,16 @@ +# This file must be kept very simple, because it is consumed from several +# places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc. + +# We use a simple scheme: +# 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev +# where the +dev versions are never released into the wild, they're just what +# we stick into the VCS in between releases. +# +# This is compatible with PEP 440: +# http://legacy.python.org/dev/peps/pep-0440/ +# via the use of the "local suffix" "+dev", which is disallowed on index +# servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we +# want. (Contrast with the special suffix 1.0.0.dev, which sorts *before* +# 1.0.0.) + +__version__ = "0.16.0" diff --git a/.venv/lib/python3.12/site-packages/h11/_writers.py b/.venv/lib/python3.12/site-packages/h11/_writers.py new file mode 100644 index 0000000..939cdb9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/h11/_writers.py @@ -0,0 +1,145 @@ +# Code to read HTTP data +# +# Strategy: each writer takes an event + a write-some-bytes function, which is +# calls. +# +# WRITERS is a dict describing how to pick a reader. It maps states to either: +# - a writer +# - or, for body writers, a dict of framin-dependent writer factories + +from typing import Any, Callable, Dict, List, Tuple, Type, Union + +from ._events import Data, EndOfMessage, Event, InformationalResponse, Request, Response +from ._headers import Headers +from ._state import CLIENT, IDLE, SEND_BODY, SEND_RESPONSE, SERVER +from ._util import LocalProtocolError, Sentinel + +__all__ = ["WRITERS"] + +Writer = Callable[[bytes], Any] + + +def write_headers(headers: Headers, write: Writer) -> None: + # "Since the Host field-value is critical information for handling a + # request, a user agent SHOULD generate Host as the first header field + # following the request-line." - RFC 7230 + raw_items = headers._full_items + for raw_name, name, value in raw_items: + if name == b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + for raw_name, name, value in raw_items: + if name != b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + write(b"\r\n") + + +def write_request(request: Request, write: Writer) -> None: + if request.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + write(b"%s %s HTTP/1.1\r\n" % (request.method, request.target)) + write_headers(request.headers, write) + + +# Shared between InformationalResponse and Response +def write_any_response( + response: Union[InformationalResponse, Response], write: Writer +) -> None: + if response.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + status_bytes = str(response.status_code).encode("ascii") + # We don't bother sending ascii status messages like "OK"; they're + # optional and ignored by the protocol. (But the space after the numeric + # status code is mandatory.) + # + # XX FIXME: could at least make an effort to pull out the status message + # from stdlib's http.HTTPStatus table. Or maybe just steal their enums + # (either by import or copy/paste). We already accept them as status codes + # since they're of type IntEnum < int. + write(b"HTTP/1.1 %s %s\r\n" % (status_bytes, response.reason)) + write_headers(response.headers, write) + + +class BodyWriter: + def __call__(self, event: Event, write: Writer) -> None: + if type(event) is Data: + self.send_data(event.data, write) + elif type(event) is EndOfMessage: + self.send_eom(event.headers, write) + else: # pragma: no cover + assert False + + def send_data(self, data: bytes, write: Writer) -> None: + pass + + def send_eom(self, headers: Headers, write: Writer) -> None: + pass + + +# +# These are all careful not to do anything to 'data' except call len(data) and +# write(data). This allows us to transparently pass-through funny objects, +# like placeholder objects referring to files on disk that will be sent via +# sendfile(2). +# +class ContentLengthWriter(BodyWriter): + def __init__(self, length: int) -> None: + self._length = length + + def send_data(self, data: bytes, write: Writer) -> None: + self._length -= len(data) + if self._length < 0: + raise LocalProtocolError("Too much data for declared Content-Length") + write(data) + + def send_eom(self, headers: Headers, write: Writer) -> None: + if self._length != 0: + raise LocalProtocolError("Too little data for declared Content-Length") + if headers: + raise LocalProtocolError("Content-Length and trailers don't mix") + + +class ChunkedWriter(BodyWriter): + def send_data(self, data: bytes, write: Writer) -> None: + # if we encoded 0-length data in the naive way, it would look like an + # end-of-message. + if not data: + return + write(b"%x\r\n" % len(data)) + write(data) + write(b"\r\n") + + def send_eom(self, headers: Headers, write: Writer) -> None: + write(b"0\r\n") + write_headers(headers, write) + + +class Http10Writer(BodyWriter): + def send_data(self, data: bytes, write: Writer) -> None: + write(data) + + def send_eom(self, headers: Headers, write: Writer) -> None: + if headers: + raise LocalProtocolError("can't send trailers to HTTP/1.0 client") + # no need to close the socket ourselves, that will be taken care of by + # Connection: close machinery + + +WritersType = Dict[ + Union[Tuple[Type[Sentinel], Type[Sentinel]], Type[Sentinel]], + Union[ + Dict[str, Type[BodyWriter]], + Callable[[Union[InformationalResponse, Response], Writer], None], + Callable[[Request, Writer], None], + ], +] + +WRITERS: WritersType = { + (CLIENT, IDLE): write_request, + (SERVER, IDLE): write_any_response, + (SERVER, SEND_RESPONSE): write_any_response, + SEND_BODY: { + "chunked": ChunkedWriter, + "content-length": ContentLengthWriter, + "http/1.0": Http10Writer, + }, +} diff --git a/.venv/lib/python3.12/site-packages/h11/py.typed b/.venv/lib/python3.12/site-packages/h11/py.typed new file mode 100644 index 0000000..f5642f7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/h11/py.typed @@ -0,0 +1 @@ +Marker diff --git a/.venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/METADATA b/.venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/METADATA new file mode 100644 index 0000000..8056834 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/METADATA @@ -0,0 +1,625 @@ +Metadata-Version: 2.4 +Name: httpcore +Version: 1.0.9 +Summary: A minimal low-level HTTP client. +Project-URL: Documentation, https://www.encode.io/httpcore +Project-URL: Homepage, https://www.encode.io/httpcore/ +Project-URL: Source, https://github.com/encode/httpcore +Author-email: Tom Christie +License-Expression: BSD-3-Clause +License-File: LICENSE.md +Classifier: Development Status :: 3 - Alpha +Classifier: Environment :: Web Environment +Classifier: Framework :: AsyncIO +Classifier: Framework :: Trio +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Internet :: WWW/HTTP +Requires-Python: >=3.8 +Requires-Dist: certifi +Requires-Dist: h11>=0.16 +Provides-Extra: asyncio +Requires-Dist: anyio<5.0,>=4.0; extra == 'asyncio' +Provides-Extra: http2 +Requires-Dist: h2<5,>=3; extra == 'http2' +Provides-Extra: socks +Requires-Dist: socksio==1.*; extra == 'socks' +Provides-Extra: trio +Requires-Dist: trio<1.0,>=0.22.0; extra == 'trio' +Description-Content-Type: text/markdown + +# HTTP Core + +[![Test Suite](https://github.com/encode/httpcore/workflows/Test%20Suite/badge.svg)](https://github.com/encode/httpcore/actions) +[![Package version](https://badge.fury.io/py/httpcore.svg)](https://pypi.org/project/httpcore/) + +> *Do one thing, and do it well.* + +The HTTP Core package provides a minimal low-level HTTP client, which does +one thing only. Sending HTTP requests. + +It does not provide any high level model abstractions over the API, +does not handle redirects, multipart uploads, building authentication headers, +transparent HTTP caching, URL parsing, session cookie handling, +content or charset decoding, handling JSON, environment based configuration +defaults, or any of that Jazz. + +Some things HTTP Core does do: + +* Sending HTTP requests. +* Thread-safe / task-safe connection pooling. +* HTTP(S) proxy & SOCKS proxy support. +* Supports HTTP/1.1 and HTTP/2. +* Provides both sync and async interfaces. +* Async backend support for `asyncio` and `trio`. + +## Requirements + +Python 3.8+ + +## Installation + +For HTTP/1.1 only support, install with: + +```shell +$ pip install httpcore +``` + +There are also a number of optional extras available... + +```shell +$ pip install httpcore['asyncio,trio,http2,socks'] +``` + +## Sending requests + +Send an HTTP request: + +```python +import httpcore + +response = httpcore.request("GET", "https://www.example.com/") + +print(response) +# +print(response.status) +# 200 +print(response.headers) +# [(b'Accept-Ranges', b'bytes'), (b'Age', b'557328'), (b'Cache-Control', b'max-age=604800'), ...] +print(response.content) +# b'\n\n\nExample Domain\n\n\n ...' +``` + +The top-level `httpcore.request()` function is provided for convenience. In practice whenever you're working with `httpcore` you'll want to use the connection pooling functionality that it provides. + +```python +import httpcore + +http = httpcore.ConnectionPool() +response = http.request("GET", "https://www.example.com/") +``` + +Once you're ready to get going, [head over to the documentation](https://www.encode.io/httpcore/). + +## Motivation + +You *probably* don't want to be using HTTP Core directly. It might make sense if +you're writing something like a proxy service in Python, and you just want +something at the lowest possible level, but more typically you'll want to use +a higher level client library, such as `httpx`. + +The motivation for `httpcore` is: + +* To provide a reusable low-level client library, that other packages can then build on top of. +* To provide a *really clear interface split* between the networking code and client logic, + so that each is easier to understand and reason about in isolation. + +## Dependencies + +The `httpcore` package has the following dependencies... + +* `h11` +* `certifi` + +And the following optional extras... + +* `anyio` - Required by `pip install httpcore['asyncio']`. +* `trio` - Required by `pip install httpcore['trio']`. +* `h2` - Required by `pip install httpcore['http2']`. +* `socksio` - Required by `pip install httpcore['socks']`. + +## Versioning + +We use [SEMVER for our versioning policy](https://semver.org/). + +For changes between package versions please see our [project changelog](CHANGELOG.md). + +We recommend pinning your requirements either the most current major version, or a more specific version range: + +```python +pip install 'httpcore==1.*' +``` +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +## Version 1.0.9 (April 24th, 2025) + +- Resolve https://github.com/advisories/GHSA-vqfr-h8mv-ghfj with h11 dependency update. (#1008) + +## Version 1.0.8 (April 11th, 2025) + +- Fix `AttributeError` when importing on Python 3.14. (#1005) + +## Version 1.0.7 (November 15th, 2024) + +- Support `proxy=…` configuration on `ConnectionPool()`. (#974) + +## Version 1.0.6 (October 1st, 2024) + +- Relax `trio` dependency pinning. (#956) +- Handle `trio` raising `NotImplementedError` on unsupported platforms. (#955) +- Handle mapping `ssl.SSLError` to `httpcore.ConnectError`. (#918) + +## 1.0.5 (March 27th, 2024) + +- Handle `EndOfStream` exception for anyio backend. (#899) +- Allow trio `0.25.*` series in package dependancies. (#903) + +## 1.0.4 (February 21st, 2024) + +- Add `target` request extension. (#888) +- Fix support for connection `Upgrade` and `CONNECT` when some data in the stream has been read. (#882) + +## 1.0.3 (February 13th, 2024) + +- Fix support for async cancellations. (#880) +- Fix trace extension when used with socks proxy. (#849) +- Fix SSL context for connections using the "wss" scheme (#869) + +## 1.0.2 (November 10th, 2023) + +- Fix `float("inf")` timeouts in `Event.wait` function. (#846) + +## 1.0.1 (November 3rd, 2023) + +- Fix pool timeout to account for the total time spent retrying. (#823) +- Raise a neater RuntimeError when the correct async deps are not installed. (#826) +- Add support for synchronous TLS-in-TLS streams. (#840) + +## 1.0.0 (October 6th, 2023) + +From version 1.0 our async support is now optional, as the package has minimal dependencies by default. + +For async support use either `pip install 'httpcore[asyncio]'` or `pip install 'httpcore[trio]'`. + +The project versioning policy is now explicitly governed by SEMVER. See https://semver.org/. + +- Async support becomes fully optional. (#809) +- Add support for Python 3.12. (#807) + +## 0.18.0 (September 8th, 2023) + +- Add support for HTTPS proxies. (#745, #786) +- Drop Python 3.7 support. (#727) +- Handle `sni_hostname` extension with SOCKS proxy. (#774) +- Handle HTTP/1.1 half-closed connections gracefully. (#641) +- Change the type of `Extensions` from `Mapping[Str, Any]` to `MutableMapping[Str, Any]`. (#762) + +## 0.17.3 (July 5th, 2023) + +- Support async cancellations, ensuring that the connection pool is left in a clean state when cancellations occur. (#726) +- The networking backend interface has [been added to the public API](https://www.encode.io/httpcore/network-backends). Some classes which were previously private implementation detail are now part of the top-level public API. (#699) +- Graceful handling of HTTP/2 GoAway frames, with requests being transparently retried on a new connection. (#730) +- Add exceptions when a synchronous `trace callback` is passed to an asynchronous request or an asynchronous `trace callback` is passed to a synchronous request. (#717) +- Drop Python 3.7 support. (#727) + +## 0.17.2 (May 23th, 2023) + +- Add `socket_options` argument to `ConnectionPool` and `HTTProxy` classes. (#668) +- Improve logging with per-module logger names. (#690) +- Add `sni_hostname` request extension. (#696) +- Resolve race condition during import of `anyio` package. (#692) +- Enable TCP_NODELAY for all synchronous sockets. (#651) + +## 0.17.1 (May 17th, 2023) + +- If 'retries' is set, then allow retries if an SSL handshake error occurs. (#669) +- Improve correctness of tracebacks on network exceptions, by raising properly chained exceptions. (#678) +- Prevent connection-hanging behaviour when HTTP/2 connections are closed by a server-sent 'GoAway' frame. (#679) +- Fix edge-case exception when removing requests from the connection pool. (#680) +- Fix pool timeout edge-case. (#688) + +## 0.17.0 (March 16th, 2023) + +- Add DEBUG level logging. (#648) +- Respect HTTP/2 max concurrent streams when settings updates are sent by server. (#652) +- Increase the allowable HTTP header size to 100kB. (#647) +- Add `retries` option to SOCKS proxy classes. (#643) + +## 0.16.3 (December 20th, 2022) + +- Allow `ws` and `wss` schemes. Allows us to properly support websocket upgrade connections. (#625) +- Forwarding HTTP proxies use a connection-per-remote-host. Required by some proxy implementations. (#637) +- Don't raise `RuntimeError` when closing a connection pool with active connections. Removes some error cases when cancellations are used. (#631) +- Lazy import `anyio`, so that it's no longer a hard dependancy, and isn't imported if unused. (#639) + +## 0.16.2 (November 25th, 2022) + +- Revert 'Fix async cancellation behaviour', which introduced race conditions. (#627) +- Raise `RuntimeError` if attempting to us UNIX domain sockets on Windows. (#619) + +## 0.16.1 (November 17th, 2022) + +- Fix HTTP/1.1 interim informational responses, such as "100 Continue". (#605) + +## 0.16.0 (October 11th, 2022) + +- Support HTTP/1.1 informational responses. (#581) +- Fix async cancellation behaviour. (#580) +- Support `h11` 0.14. (#579) + +## 0.15.0 (May 17th, 2022) + +- Drop Python 3.6 support (#535) +- Ensure HTTP proxy CONNECT requests include `timeout` configuration. (#506) +- Switch to explicit `typing.Optional` for type hints. (#513) +- For `trio` map OSError exceptions to `ConnectError`. (#543) + +## 0.14.7 (February 4th, 2022) + +- Requests which raise a PoolTimeout need to be removed from the pool queue. (#502) +- Fix AttributeError that happened when Socks5Connection were terminated. (#501) + +## 0.14.6 (February 1st, 2022) + +- Fix SOCKS support for `http://` URLs. (#492) +- Resolve race condition around exceptions during streaming a response. (#491) + +## 0.14.5 (January 18th, 2022) + +- SOCKS proxy support. (#478) +- Add proxy_auth argument to HTTPProxy. (#481) +- Improve error message on 'RemoteProtocolError' exception when server disconnects without sending a response. (#479) + +## 0.14.4 (January 5th, 2022) + +- Support HTTP/2 on HTTPS tunnelling proxies. (#468) +- Fix proxy headers missing on HTTP forwarding. (#456) +- Only instantiate SSL context if required. (#457) +- More robust HTTP/2 handling. (#253, #439, #440, #441) + +## 0.14.3 (November 17th, 2021) + +- Fix race condition when removing closed connections from the pool. (#437) + +## 0.14.2 (November 16th, 2021) + +- Failed connections no longer remain in the pool. (Pull #433) + +## 0.14.1 (November 12th, 2021) + +- `max_connections` becomes optional. (Pull #429) +- `certifi` is now included in the install dependancies. (Pull #428) +- `h2` is now strictly optional. (Pull #428) + +## 0.14.0 (November 11th, 2021) + +The 0.14 release is a complete reworking of `httpcore`, comprehensively addressing some underlying issues in the connection pooling, as well as substantially redesigning the API to be more user friendly. + +Some of the lower-level API design also makes the components more easily testable in isolation, and the package now has 100% test coverage. + +See [discussion #419](https://github.com/encode/httpcore/discussions/419) for a little more background. + +There's some other neat bits in there too, such as the "trace" extension, which gives a hook into inspecting the internal events that occur during the request/response cycle. This extension is needed for the HTTPX cli, in order to... + +* Log the point at which the connection is established, and the IP/port on which it is made. +* Determine if the outgoing request should log as HTTP/1.1 or HTTP/2, rather than having to assume it's HTTP/2 if the --http2 flag was passed. (Which may not actually be true.) +* Log SSL version info / certificate info. + +Note that `curio` support is not currently available in 0.14.0. If you're using `httpcore` with `curio` please get in touch, so we can assess if we ought to prioritize it as a feature or not. + +## 0.13.7 (September 13th, 2021) + +- Fix broken error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #403) + +## 0.13.6 (June 15th, 2021) + +### Fixed + +- Close sockets when read or write timeouts occur. (Pull #365) + +## 0.13.5 (June 14th, 2021) + +### Fixed + +- Resolved niggles with AnyIO EOF behaviours. (Pull #358, #362) + +## 0.13.4 (June 9th, 2021) + +### Added + +- Improved error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #354) + +### Fixed + +- Switched to `anyio` as the default backend implementation when running with `asyncio`. Resolves some awkward [TLS timeout issues](https://github.com/encode/httpx/discussions/1511). + +## 0.13.3 (May 6th, 2021) + +### Added + +- Support HTTP/2 prior knowledge, using `httpcore.SyncConnectionPool(http1=False)`. (Pull #333) + +### Fixed + +- Handle cases where environment does not provide `select.poll` support. (Pull #331) + +## 0.13.2 (April 29th, 2021) + +### Added + +- Improve error message for specific case of `RemoteProtocolError` where server disconnects without sending a response. (Pull #313) + +## 0.13.1 (April 28th, 2021) + +### Fixed + +- More resiliant testing for closed connections. (Pull #311) +- Don't raise exceptions on ungraceful connection closes. (Pull #310) + +## 0.13.0 (April 21st, 2021) + +The 0.13 release updates the core API in order to match the HTTPX Transport API, +introduced in HTTPX 0.18 onwards. + +An example of making requests with the new interface is: + +```python +with httpcore.SyncConnectionPool() as http: + status_code, headers, stream, extensions = http.handle_request( + method=b'GET', + url=(b'https', b'example.org', 443, b'/'), + headers=[(b'host', b'example.org'), (b'user-agent', b'httpcore')] + stream=httpcore.ByteStream(b''), + extensions={} + ) + body = stream.read() + print(status_code, body) +``` + +### Changed + +- The `.request()` method is now `handle_request()`. (Pull #296) +- The `.arequest()` method is now `.handle_async_request()`. (Pull #296) +- The `headers` argument is no longer optional. (Pull #296) +- The `stream` argument is no longer optional. (Pull #296) +- The `ext` argument is now named `extensions`, and is no longer optional. (Pull #296) +- The `"reason"` extension keyword is now named `"reason_phrase"`. (Pull #296) +- The `"reason_phrase"` and `"http_version"` extensions now use byte strings for their values. (Pull #296) +- The `httpcore.PlainByteStream()` class becomes `httpcore.ByteStream()`. (Pull #296) + +### Added + +- Streams now support a `.read()` interface. (Pull #296) + +### Fixed + +- Task cancellation no longer leaks connections from the connection pool. (Pull #305) + +## 0.12.3 (December 7th, 2020) + +### Fixed + +- Abort SSL connections on close rather than waiting for remote EOF when using `asyncio`. (Pull #167) +- Fix exception raised in case of connect timeouts when using the `anyio` backend. (Pull #236) +- Fix `Host` header precedence for `:authority` in HTTP/2. (Pull #241, #243) +- Handle extra edge case when detecting for socket readability when using `asyncio`. (Pull #242, #244) +- Fix `asyncio` SSL warning when using proxy tunneling. (Pull #249) + +## 0.12.2 (November 20th, 2020) + +### Fixed + +- Properly wrap connect errors on the asyncio backend. (Pull #235) +- Fix `ImportError` occurring on Python 3.9 when using the HTTP/1.1 sync client in a multithreaded context. (Pull #237) + +## 0.12.1 (November 7th, 2020) + +### Added + +- Add connect retries. (Pull #221) + +### Fixed + +- Tweak detection of dropped connections, resolving an issue with open files limits on Linux. (Pull #185) +- Avoid leaking connections when establishing an HTTP tunnel to a proxy has failed. (Pull #223) +- Properly wrap OS errors when using `trio`. (Pull #225) + +## 0.12.0 (October 6th, 2020) + +### Changed + +- HTTP header casing is now preserved, rather than always sent in lowercase. (#216 and python-hyper/h11#104) + +### Added + +- Add Python 3.9 to officially supported versions. + +### Fixed + +- Gracefully handle a stdlib asyncio bug when a connection is closed while it is in a paused-for-reading state. (#201) + +## 0.11.1 (September 28nd, 2020) + +### Fixed + +- Add await to async semaphore release() coroutine (#197) +- Drop incorrect curio classifier (#192) + +## 0.11.0 (September 22nd, 2020) + +The Transport API with 0.11.0 has a couple of significant changes. + +Firstly we've moved changed the request interface in order to allow extensions, which will later enable us to support features +such as trailing headers, HTTP/2 server push, and CONNECT/Upgrade connections. + +The interface changes from: + +```python +def request(method, url, headers, stream, timeout): + return (http_version, status_code, reason, headers, stream) +``` + +To instead including an optional dictionary of extensions on the request and response: + +```python +def request(method, url, headers, stream, ext): + return (status_code, headers, stream, ext) +``` + +Having an open-ended extensions point will allow us to add later support for various optional features, that wouldn't otherwise be supported without these API changes. + +In particular: + +* Trailing headers support. +* HTTP/2 Server Push +* sendfile. +* Exposing raw connection on CONNECT, Upgrade, HTTP/2 bi-di streaming. +* Exposing debug information out of the API, including template name, template context. + +Currently extensions are limited to: + +* request: `timeout` - Optional. Timeout dictionary. +* response: `http_version` - Optional. Include the HTTP version used on the response. +* response: `reason` - Optional. Include the reason phrase used on the response. Only valid with HTTP/1.*. + +See https://github.com/encode/httpx/issues/1274#issuecomment-694884553 for the history behind this. + +Secondly, the async version of `request` is now namespaced as `arequest`. + +This allows concrete transports to support both sync and async implementations on the same class. + +### Added + +- Add curio support. (Pull #168) +- Add anyio support, with `backend="anyio"`. (Pull #169) + +### Changed + +- Update the Transport API to use 'ext' for optional extensions. (Pull #190) +- Update the Transport API to use `.request` and `.arequest` so implementations can support both sync and async. (Pull #189) + +## 0.10.2 (August 20th, 2020) + +### Added + +- Added Unix Domain Socket support. (Pull #139) + +### Fixed + +- Always include the port on proxy CONNECT requests. (Pull #154) +- Fix `max_keepalive_connections` configuration. (Pull #153) +- Fixes behaviour in HTTP/1.1 where server disconnects can be used to signal the end of the response body. (Pull #164) + +## 0.10.1 (August 7th, 2020) + +- Include `max_keepalive_connections` on `AsyncHTTPProxy`/`SyncHTTPProxy` classes. + +## 0.10.0 (August 7th, 2020) + +The most notable change in the 0.10.0 release is that HTTP/2 support is now fully optional. + +Use either `pip install httpcore` for HTTP/1.1 support only, or `pip install httpcore[http2]` for HTTP/1.1 and HTTP/2 support. + +### Added + +- HTTP/2 support becomes optional. (Pull #121, #130) +- Add `local_address=...` support. (Pull #100, #134) +- Add `PlainByteStream`, `IteratorByteStream`, `AsyncIteratorByteStream`. The `AsyncByteSteam` and `SyncByteStream` classes are now pure interface classes. (#133) +- Add `LocalProtocolError`, `RemoteProtocolError` exceptions. (Pull #129) +- Add `UnsupportedProtocol` exception. (Pull #128) +- Add `.get_connection_info()` method. (Pull #102, #137) +- Add better TRACE logs. (Pull #101) + +### Changed + +- `max_keepalive` is deprecated in favour of `max_keepalive_connections`. (Pull #140) + +### Fixed + +- Improve handling of server disconnects. (Pull #112) + +## 0.9.1 (May 27th, 2020) + +### Fixed + +- Proper host resolution for sync case, including IPv6 support. (Pull #97) +- Close outstanding connections when connection pool is closed. (Pull #98) + +## 0.9.0 (May 21th, 2020) + +### Changed + +- URL port becomes an `Optional[int]` instead of `int`. (Pull #92) + +### Fixed + +- Honor HTTP/2 max concurrent streams settings. (Pull #89, #90) +- Remove incorrect debug log. (Pull #83) + +## 0.8.4 (May 11th, 2020) + +### Added + +- Logging via HTTPCORE_LOG_LEVEL and HTTPX_LOG_LEVEL environment variables +and TRACE level logging. (Pull #79) + +### Fixed + +- Reuse of connections on HTTP/2 in close concurrency situations. (Pull #81) + +## 0.8.3 (May 6rd, 2020) + +### Fixed + +- Include `Host` and `Accept` headers on proxy "CONNECT" requests. +- De-duplicate any headers also contained in proxy_headers. +- HTTP/2 flag not being passed down to proxy connections. + +## 0.8.2 (May 3rd, 2020) + +### Fixed + +- Fix connections using proxy forwarding requests not being added to the +connection pool properly. (Pull #70) + +## 0.8.1 (April 30th, 2020) + +### Changed + +- Allow inherintance of both `httpcore.AsyncByteStream`, `httpcore.SyncByteStream` without type conflicts. + +## 0.8.0 (April 30th, 2020) + +### Fixed + +- Fixed tunnel proxy support. + +### Added + +- New `TimeoutException` base class. + +## 0.7.0 (March 5th, 2020) + +- First integration with HTTPX. diff --git a/.venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/RECORD b/.venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/RECORD new file mode 100644 index 0000000..44d4769 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/RECORD @@ -0,0 +1,68 @@ +httpcore-1.0.9.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +httpcore-1.0.9.dist-info/METADATA,sha256=_i1P2mGZEol4d54M8n88BFxTGGP83Zh-rMdPOhjUHCE,21529 +httpcore-1.0.9.dist-info/RECORD,, +httpcore-1.0.9.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87 +httpcore-1.0.9.dist-info/licenses/LICENSE.md,sha256=_ctZFUx0y6uhahEkL3dAvqnyPW_rVUeRfYxflKgDkqU,1518 +httpcore/__init__.py,sha256=9kT_kqChCCJUTHww24ZmR_ezcdbpRYWksD-gYNzkZP8,3445 +httpcore/__pycache__/__init__.cpython-312.pyc,, +httpcore/__pycache__/_api.cpython-312.pyc,, +httpcore/__pycache__/_exceptions.cpython-312.pyc,, +httpcore/__pycache__/_models.cpython-312.pyc,, +httpcore/__pycache__/_ssl.cpython-312.pyc,, +httpcore/__pycache__/_synchronization.cpython-312.pyc,, +httpcore/__pycache__/_trace.cpython-312.pyc,, +httpcore/__pycache__/_utils.cpython-312.pyc,, +httpcore/_api.py,sha256=unZmeDschBWCGCPCwkS3Wot9euK6bg_kKxLtGTxw214,3146 +httpcore/_async/__init__.py,sha256=EWdl2v4thnAHzJpqjU4h2a8DUiGAvNiWrkii9pfhTf0,1221 +httpcore/_async/__pycache__/__init__.cpython-312.pyc,, +httpcore/_async/__pycache__/connection.cpython-312.pyc,, +httpcore/_async/__pycache__/connection_pool.cpython-312.pyc,, +httpcore/_async/__pycache__/http11.cpython-312.pyc,, +httpcore/_async/__pycache__/http2.cpython-312.pyc,, +httpcore/_async/__pycache__/http_proxy.cpython-312.pyc,, +httpcore/_async/__pycache__/interfaces.cpython-312.pyc,, +httpcore/_async/__pycache__/socks_proxy.cpython-312.pyc,, +httpcore/_async/connection.py,sha256=6OcPXqMEfc0BU38_-iHUNDd1vKSTc2UVT09XqNb_BOk,8449 +httpcore/_async/connection_pool.py,sha256=DOIQ2s2ZCf9qfwxhzMprTPLqCL8OxGXiKF6qRHxvVyY,17307 +httpcore/_async/http11.py,sha256=-qM9bV7PjSQF5vxs37-eUXOIFwbIjPcZbNliuX9TtBw,13880 +httpcore/_async/http2.py,sha256=azX1fcmtXaIwjputFlZ4vd92J8xwjGOa9ax9QIv4394,23936 +httpcore/_async/http_proxy.py,sha256=2zVkrlv-Ds-rWGaqaXlrhEJiAQFPo23BT3Gq_sWoBXU,14701 +httpcore/_async/interfaces.py,sha256=jTiaWL83pgpGC9ziv90ZfwaKNMmHwmOalzaKiuTxATo,4455 +httpcore/_async/socks_proxy.py,sha256=lLKgLlggPfhFlqi0ODeBkOWvt9CghBBUyqsnsU1tx6Q,13841 +httpcore/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +httpcore/_backends/__pycache__/__init__.cpython-312.pyc,, +httpcore/_backends/__pycache__/anyio.cpython-312.pyc,, +httpcore/_backends/__pycache__/auto.cpython-312.pyc,, +httpcore/_backends/__pycache__/base.cpython-312.pyc,, +httpcore/_backends/__pycache__/mock.cpython-312.pyc,, +httpcore/_backends/__pycache__/sync.cpython-312.pyc,, +httpcore/_backends/__pycache__/trio.cpython-312.pyc,, +httpcore/_backends/anyio.py,sha256=x8PgEhXRC8bVqsdzk_YJx8Y6d9Tub06CuUSwnbmtqoY,5252 +httpcore/_backends/auto.py,sha256=zO136PKZmsaTDK-HRk84eA-MUg8_2wJf4NvmK432Aio,1662 +httpcore/_backends/base.py,sha256=aShgRdZnMmRhFWHetjumlM73f8Kz1YOAyCUP_4kHslA,3042 +httpcore/_backends/mock.py,sha256=er9T436uSe7NLrfiLa4x6Nuqg5ivQ693CxWYCWsgbH4,4077 +httpcore/_backends/sync.py,sha256=bhE4d9iK9Umxdsdsgm2EfKnXaBms2WggGYU-7jmUujU,7977 +httpcore/_backends/trio.py,sha256=LHu4_Mr5MswQmmT3yE4oLgf9b_JJfeVS4BjDxeJc7Ro,5996 +httpcore/_exceptions.py,sha256=looCKga3_YVYu3s-d3L9RMPRJyhsY7fiuuGxvkOD0c0,1184 +httpcore/_models.py,sha256=IO2CcXcdpovRcLTdGFGB6RyBZdEm2h_TOmoCc4rEKho,17623 +httpcore/_ssl.py,sha256=srqmSNU4iOUvWF-SrJvb8G_YEbHFELOXQOwdDIBTS9c,187 +httpcore/_sync/__init__.py,sha256=JBDIgXt5la1LCJ1sLQeKhjKFpLnpNr8Svs6z2ni3fgg,1141 +httpcore/_sync/__pycache__/__init__.cpython-312.pyc,, +httpcore/_sync/__pycache__/connection.cpython-312.pyc,, +httpcore/_sync/__pycache__/connection_pool.cpython-312.pyc,, +httpcore/_sync/__pycache__/http11.cpython-312.pyc,, +httpcore/_sync/__pycache__/http2.cpython-312.pyc,, +httpcore/_sync/__pycache__/http_proxy.cpython-312.pyc,, +httpcore/_sync/__pycache__/interfaces.cpython-312.pyc,, +httpcore/_sync/__pycache__/socks_proxy.cpython-312.pyc,, +httpcore/_sync/connection.py,sha256=9exGOb3PB-Mp2T1-sckSeL2t-tJ_9-NXomV8ihmWCgU,8238 +httpcore/_sync/connection_pool.py,sha256=a-T8LTsUxc7r0Ww1atfHSDoWPjQ0fA8Ul7S3-F0Mj70,16955 +httpcore/_sync/http11.py,sha256=IFobD1Md5JFlJGKWnh1_Q3epikUryI8qo09v8MiJIEA,13476 +httpcore/_sync/http2.py,sha256=AxU4yhcq68Bn5vqdJYtiXKYUj7nvhYbxz3v4rT4xnvA,23400 +httpcore/_sync/http_proxy.py,sha256=_al_6crKuEZu2wyvu493RZImJdBJnj5oGKNjLOJL2Zo,14463 +httpcore/_sync/interfaces.py,sha256=snXON42vUDHO5JBJvo8D4VWk2Wat44z2OXXHDrjbl94,4344 +httpcore/_sync/socks_proxy.py,sha256=zegZW9Snqj2_992DFJa8_CppOVBkVL4AgwduRkStakQ,13614 +httpcore/_synchronization.py,sha256=zSi13mAColBnknjZBknUC6hKNDQT4C6ijnezZ-r0T2s,9434 +httpcore/_trace.py,sha256=ck6ZoIzYTkdNAIfq5MGeKqBXDtqjOX-qfYwmZFbrGco,3952 +httpcore/_utils.py,sha256=_RLgXYOAYC350ikALV59GZ68IJrdocRZxPs9PjmzdFY,1537 +httpcore/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/.venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/WHEEL new file mode 100644 index 0000000..12228d4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.27.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/licenses/LICENSE.md b/.venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/licenses/LICENSE.md new file mode 100644 index 0000000..311b2b5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/licenses/LICENSE.md @@ -0,0 +1,27 @@ +Copyright © 2020, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/lib/python3.12/site-packages/httpcore/__init__.py b/.venv/lib/python3.12/site-packages/httpcore/__init__.py new file mode 100644 index 0000000..9a92dc4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/__init__.py @@ -0,0 +1,141 @@ +from ._api import request, stream +from ._async import ( + AsyncConnectionInterface, + AsyncConnectionPool, + AsyncHTTP2Connection, + AsyncHTTP11Connection, + AsyncHTTPConnection, + AsyncHTTPProxy, + AsyncSOCKSProxy, +) +from ._backends.base import ( + SOCKET_OPTION, + AsyncNetworkBackend, + AsyncNetworkStream, + NetworkBackend, + NetworkStream, +) +from ._backends.mock import AsyncMockBackend, AsyncMockStream, MockBackend, MockStream +from ._backends.sync import SyncBackend +from ._exceptions import ( + ConnectError, + ConnectionNotAvailable, + ConnectTimeout, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + TimeoutException, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from ._models import URL, Origin, Proxy, Request, Response +from ._ssl import default_ssl_context +from ._sync import ( + ConnectionInterface, + ConnectionPool, + HTTP2Connection, + HTTP11Connection, + HTTPConnection, + HTTPProxy, + SOCKSProxy, +) + +# The 'httpcore.AnyIOBackend' class is conditional on 'anyio' being installed. +try: + from ._backends.anyio import AnyIOBackend +except ImportError: # pragma: nocover + + class AnyIOBackend: # type: ignore + def __init__(self, *args, **kwargs): # type: ignore + msg = ( + "Attempted to use 'httpcore.AnyIOBackend' but 'anyio' is not installed." + ) + raise RuntimeError(msg) + + +# The 'httpcore.TrioBackend' class is conditional on 'trio' being installed. +try: + from ._backends.trio import TrioBackend +except ImportError: # pragma: nocover + + class TrioBackend: # type: ignore + def __init__(self, *args, **kwargs): # type: ignore + msg = "Attempted to use 'httpcore.TrioBackend' but 'trio' is not installed." + raise RuntimeError(msg) + + +__all__ = [ + # top-level requests + "request", + "stream", + # models + "Origin", + "URL", + "Request", + "Response", + "Proxy", + # async + "AsyncHTTPConnection", + "AsyncConnectionPool", + "AsyncHTTPProxy", + "AsyncHTTP11Connection", + "AsyncHTTP2Connection", + "AsyncConnectionInterface", + "AsyncSOCKSProxy", + # sync + "HTTPConnection", + "ConnectionPool", + "HTTPProxy", + "HTTP11Connection", + "HTTP2Connection", + "ConnectionInterface", + "SOCKSProxy", + # network backends, implementations + "SyncBackend", + "AnyIOBackend", + "TrioBackend", + # network backends, mock implementations + "AsyncMockBackend", + "AsyncMockStream", + "MockBackend", + "MockStream", + # network backends, interface + "AsyncNetworkStream", + "AsyncNetworkBackend", + "NetworkStream", + "NetworkBackend", + # util + "default_ssl_context", + "SOCKET_OPTION", + # exceptions + "ConnectionNotAvailable", + "ProxyError", + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", + "UnsupportedProtocol", + "TimeoutException", + "PoolTimeout", + "ConnectTimeout", + "ReadTimeout", + "WriteTimeout", + "NetworkError", + "ConnectError", + "ReadError", + "WriteError", +] + +__version__ = "1.0.9" + + +__locals = locals() +for __name in __all__: + # Exclude SOCKET_OPTION, it causes AttributeError on Python 3.14 + if not __name.startswith(("__", "SOCKET_OPTION")): + setattr(__locals[__name], "__module__", "httpcore") # noqa diff --git a/.venv/lib/python3.12/site-packages/httpcore/_api.py b/.venv/lib/python3.12/site-packages/httpcore/_api.py new file mode 100644 index 0000000..38b961d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_api.py @@ -0,0 +1,94 @@ +from __future__ import annotations + +import contextlib +import typing + +from ._models import URL, Extensions, HeaderTypes, Response +from ._sync.connection_pool import ConnectionPool + + +def request( + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.Iterator[bytes] | None = None, + extensions: Extensions | None = None, +) -> Response: + """ + Sends an HTTP request, returning the response. + + ``` + response = httpcore.request("GET", "https://www.example.com/") + ``` + + Arguments: + method: The HTTP method for the request. Typically one of `"GET"`, + `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. + url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, + or as str/bytes. + headers: The HTTP request headers. Either as a dictionary of str/bytes, + or as a list of two-tuples of str/bytes. + content: The content of the request body. Either as bytes, + or as a bytes iterator. + extensions: A dictionary of optional extra information included on the request. + Possible keys include `"timeout"`. + + Returns: + An instance of `httpcore.Response`. + """ + with ConnectionPool() as pool: + return pool.request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + + +@contextlib.contextmanager +def stream( + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.Iterator[bytes] | None = None, + extensions: Extensions | None = None, +) -> typing.Iterator[Response]: + """ + Sends an HTTP request, returning the response within a content manager. + + ``` + with httpcore.stream("GET", "https://www.example.com/") as response: + ... + ``` + + When using the `stream()` function, the body of the response will not be + automatically read. If you want to access the response body you should + either use `content = response.read()`, or `for chunk in response.iter_content()`. + + Arguments: + method: The HTTP method for the request. Typically one of `"GET"`, + `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. + url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, + or as str/bytes. + headers: The HTTP request headers. Either as a dictionary of str/bytes, + or as a list of two-tuples of str/bytes. + content: The content of the request body. Either as bytes, + or as a bytes iterator. + extensions: A dictionary of optional extra information included on the request. + Possible keys include `"timeout"`. + + Returns: + An instance of `httpcore.Response`. + """ + with ConnectionPool() as pool: + with pool.stream( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) as response: + yield response diff --git a/.venv/lib/python3.12/site-packages/httpcore/_async/__init__.py b/.venv/lib/python3.12/site-packages/httpcore/_async/__init__.py new file mode 100644 index 0000000..88dc7f0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_async/__init__.py @@ -0,0 +1,39 @@ +from .connection import AsyncHTTPConnection +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .http_proxy import AsyncHTTPProxy +from .interfaces import AsyncConnectionInterface + +try: + from .http2 import AsyncHTTP2Connection +except ImportError: # pragma: nocover + + class AsyncHTTP2Connection: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use http2 support, but the `h2` package is not " + "installed. Use 'pip install httpcore[http2]'." + ) + + +try: + from .socks_proxy import AsyncSOCKSProxy +except ImportError: # pragma: nocover + + class AsyncSOCKSProxy: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use SOCKS support, but the `socksio` package is not " + "installed. Use 'pip install httpcore[socks]'." + ) + + +__all__ = [ + "AsyncHTTPConnection", + "AsyncConnectionPool", + "AsyncHTTPProxy", + "AsyncHTTP11Connection", + "AsyncHTTP2Connection", + "AsyncConnectionInterface", + "AsyncSOCKSProxy", +] diff --git a/.venv/lib/python3.12/site-packages/httpcore/_async/connection.py b/.venv/lib/python3.12/site-packages/httpcore/_async/connection.py new file mode 100644 index 0000000..b42581d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_async/connection.py @@ -0,0 +1,222 @@ +from __future__ import annotations + +import itertools +import logging +import ssl +import types +import typing + +from .._backends.auto import AutoBackend +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream +from .._exceptions import ConnectError, ConnectTimeout +from .._models import Origin, Request, Response +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. + + +logger = logging.getLogger("httpcore.connection") + + +def exponential_backoff(factor: float) -> typing.Iterator[float]: + """ + Generate a geometric sequence that has a ratio of 2 and starts with 0. + + For example: + - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` + - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` + """ + yield 0 + for n in itertools.count(): + yield factor * 2**n + + +class AsyncHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + origin: Origin, + ssl_context: ssl.SSLContext | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: AsyncNetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + self._origin = origin + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend: AsyncNetworkBackend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._connection: AsyncConnectionInterface | None = None + self._connect_failed: bool = False + self._request_lock = AsyncLock() + self._socket_options = socket_options + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection to {self._origin}" + ) + + try: + async with self._request_lock: + if self._connection is None: + stream = await self._connect(request) + + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except BaseException as exc: + self._connect_failed = True + raise exc + + return await self._connection.handle_async_request(request) + + async def _connect(self, request: Request) -> AsyncNetworkStream: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + retries_left = self._retries + delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) + + while True: + try: + if self._uds is None: + kwargs = { + "host": self._origin.host.decode("ascii"), + "port": self._origin.port, + "local_address": self._local_address, + "timeout": timeout, + "socket_options": self._socket_options, + } + async with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = await self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + else: + kwargs = { + "path": self._uds, + "timeout": timeout, + "socket_options": self._socket_options, + } + async with Trace( + "connect_unix_socket", logger, request, kwargs + ) as trace: + stream = await self._network_backend.connect_unix_socket( + **kwargs + ) + trace.return_value = stream + + if self._origin.scheme in (b"https", b"wss"): + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + return stream + except (ConnectError, ConnectTimeout): + if retries_left <= 0: + raise + retries_left -= 1 + delay = next(delays) + async with Trace("retry", logger, request, kwargs) as trace: + await self._network_backend.sleep(delay) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + async def aclose(self) -> None: + if self._connection is not None: + async with Trace("close", logger, None, {}): + await self._connection.aclose() + + def is_available(self) -> bool: + if self._connection is None: + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> AsyncHTTPConnection: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + await self.aclose() diff --git a/.venv/lib/python3.12/site-packages/httpcore/_async/connection_pool.py b/.venv/lib/python3.12/site-packages/httpcore/_async/connection_pool.py new file mode 100644 index 0000000..96e973d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_async/connection_pool.py @@ -0,0 +1,420 @@ +from __future__ import annotations + +import ssl +import sys +import types +import typing + +from .._backends.auto import AutoBackend +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend +from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol +from .._models import Origin, Proxy, Request, Response +from .._synchronization import AsyncEvent, AsyncShieldCancellation, AsyncThreadLock +from .connection import AsyncHTTPConnection +from .interfaces import AsyncConnectionInterface, AsyncRequestInterface + + +class AsyncPoolRequest: + def __init__(self, request: Request) -> None: + self.request = request + self.connection: AsyncConnectionInterface | None = None + self._connection_acquired = AsyncEvent() + + def assign_to_connection(self, connection: AsyncConnectionInterface | None) -> None: + self.connection = connection + self._connection_acquired.set() + + def clear_connection(self) -> None: + self.connection = None + self._connection_acquired = AsyncEvent() + + async def wait_for_connection( + self, timeout: float | None = None + ) -> AsyncConnectionInterface: + if self.connection is None: + await self._connection_acquired.wait(timeout=timeout) + assert self.connection is not None + return self.connection + + def is_queued(self) -> bool: + return self.connection is None + + +class AsyncConnectionPool(AsyncRequestInterface): + """ + A connection pool for making HTTP requests. + """ + + def __init__( + self, + ssl_context: ssl.SSLContext | None = None, + proxy: Proxy | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: AsyncNetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish a + connection. + local_address: Local address to connect from. Can also be used to connect + using a particular address family. Using `local_address="0.0.0.0"` + will connect using an `AF_INET` address (IPv4), while using + `local_address="::"` will connect using an `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + socket_options: Socket options that have to be included + in the TCP socket when the connection was established. + """ + self._ssl_context = ssl_context + self._proxy = proxy + self._max_connections = ( + sys.maxsize if max_connections is None else max_connections + ) + self._max_keepalive_connections = ( + sys.maxsize + if max_keepalive_connections is None + else max_keepalive_connections + ) + self._max_keepalive_connections = min( + self._max_connections, self._max_keepalive_connections + ) + + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._socket_options = socket_options + + # The mutable state on a connection pool is the queue of incoming requests, + # and the set of connections that are servicing those requests. + self._connections: list[AsyncConnectionInterface] = [] + self._requests: list[AsyncPoolRequest] = [] + + # We only mutate the state of the connection pool within an 'optional_thread_lock' + # context. This holds a threading lock unless we're running in async mode, + # in which case it is a no-op. + self._optional_thread_lock = AsyncThreadLock() + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + if self._proxy is not None: + if self._proxy.url.scheme in (b"socks5", b"socks5h"): + from .socks_proxy import AsyncSocks5Connection + + return AsyncSocks5Connection( + proxy_origin=self._proxy.url.origin, + proxy_auth=self._proxy.auth, + remote_origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + elif origin.scheme == b"http": + from .http_proxy import AsyncForwardHTTPConnection + + return AsyncForwardHTTPConnection( + proxy_origin=self._proxy.url.origin, + proxy_headers=self._proxy.headers, + proxy_ssl_context=self._proxy.ssl_context, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + ) + from .http_proxy import AsyncTunnelHTTPConnection + + return AsyncTunnelHTTPConnection( + proxy_origin=self._proxy.url.origin, + proxy_headers=self._proxy.headers, + proxy_ssl_context=self._proxy.ssl_context, + remote_origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + return AsyncHTTPConnection( + origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + retries=self._retries, + local_address=self._local_address, + uds=self._uds, + network_backend=self._network_backend, + socket_options=self._socket_options, + ) + + @property + def connections(self) -> list[AsyncConnectionInterface]: + """ + Return a list of the connections currently in the pool. + + For example: + + ```python + >>> pool.connections + [ + , + , + , + ] + ``` + """ + return list(self._connections) + + async def handle_async_request(self, request: Request) -> Response: + """ + Send an HTTP request, and return an HTTP response. + + This is the core implementation that is called into by `.request()` or `.stream()`. + """ + scheme = request.url.scheme.decode() + if scheme == "": + raise UnsupportedProtocol( + "Request URL is missing an 'http://' or 'https://' protocol." + ) + if scheme not in ("http", "https", "ws", "wss"): + raise UnsupportedProtocol( + f"Request URL has an unsupported protocol '{scheme}://'." + ) + + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("pool", None) + + with self._optional_thread_lock: + # Add the incoming request to our request queue. + pool_request = AsyncPoolRequest(request) + self._requests.append(pool_request) + + try: + while True: + with self._optional_thread_lock: + # Assign incoming requests to available connections, + # closing or creating new connections as required. + closing = self._assign_requests_to_connections() + await self._close_connections(closing) + + # Wait until this request has an assigned connection. + connection = await pool_request.wait_for_connection(timeout=timeout) + + try: + # Send the request on the assigned connection. + response = await connection.handle_async_request( + pool_request.request + ) + except ConnectionNotAvailable: + # In some cases a connection may initially be available to + # handle a request, but then become unavailable. + # + # In this case we clear the connection and try again. + pool_request.clear_connection() + else: + break # pragma: nocover + + except BaseException as exc: + with self._optional_thread_lock: + # For any exception or cancellation we remove the request from + # the queue, and then re-assign requests to connections. + self._requests.remove(pool_request) + closing = self._assign_requests_to_connections() + + await self._close_connections(closing) + raise exc from None + + # Return the response. Note that in this case we still have to manage + # the point at which the response is closed. + assert isinstance(response.stream, typing.AsyncIterable) + return Response( + status=response.status, + headers=response.headers, + content=PoolByteStream( + stream=response.stream, pool_request=pool_request, pool=self + ), + extensions=response.extensions, + ) + + def _assign_requests_to_connections(self) -> list[AsyncConnectionInterface]: + """ + Manage the state of the connection pool, assigning incoming + requests to connections as available. + + Called whenever a new request is added or removed from the pool. + + Any closing connections are returned, allowing the I/O for closing + those connections to be handled seperately. + """ + closing_connections = [] + + # First we handle cleaning up any connections that are closed, + # have expired their keep-alive, or surplus idle connections. + for connection in list(self._connections): + if connection.is_closed(): + # log: "removing closed connection" + self._connections.remove(connection) + elif connection.has_expired(): + # log: "closing expired connection" + self._connections.remove(connection) + closing_connections.append(connection) + elif ( + connection.is_idle() + and len([connection.is_idle() for connection in self._connections]) + > self._max_keepalive_connections + ): + # log: "closing idle connection" + self._connections.remove(connection) + closing_connections.append(connection) + + # Assign queued requests to connections. + queued_requests = [request for request in self._requests if request.is_queued()] + for pool_request in queued_requests: + origin = pool_request.request.url.origin + available_connections = [ + connection + for connection in self._connections + if connection.can_handle_request(origin) and connection.is_available() + ] + idle_connections = [ + connection for connection in self._connections if connection.is_idle() + ] + + # There are three cases for how we may be able to handle the request: + # + # 1. There is an existing connection that can handle the request. + # 2. We can create a new connection to handle the request. + # 3. We can close an idle connection and then create a new connection + # to handle the request. + if available_connections: + # log: "reusing existing connection" + connection = available_connections[0] + pool_request.assign_to_connection(connection) + elif len(self._connections) < self._max_connections: + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + elif idle_connections: + # log: "closing idle connection" + connection = idle_connections[0] + self._connections.remove(connection) + closing_connections.append(connection) + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + + return closing_connections + + async def _close_connections(self, closing: list[AsyncConnectionInterface]) -> None: + # Close connections which have been removed from the pool. + with AsyncShieldCancellation(): + for connection in closing: + await connection.aclose() + + async def aclose(self) -> None: + # Explicitly close the connection pool. + # Clears all existing requests and connections. + with self._optional_thread_lock: + closing_connections = list(self._connections) + self._connections = [] + await self._close_connections(closing_connections) + + async def __aenter__(self) -> AsyncConnectionPool: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + await self.aclose() + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + with self._optional_thread_lock: + request_is_queued = [request.is_queued() for request in self._requests] + connection_is_idle = [ + connection.is_idle() for connection in self._connections + ] + + num_active_requests = request_is_queued.count(False) + num_queued_requests = request_is_queued.count(True) + num_active_connections = connection_is_idle.count(False) + num_idle_connections = connection_is_idle.count(True) + + requests_info = ( + f"Requests: {num_active_requests} active, {num_queued_requests} queued" + ) + connection_info = ( + f"Connections: {num_active_connections} active, {num_idle_connections} idle" + ) + + return f"<{class_name} [{requests_info} | {connection_info}]>" + + +class PoolByteStream: + def __init__( + self, + stream: typing.AsyncIterable[bytes], + pool_request: AsyncPoolRequest, + pool: AsyncConnectionPool, + ) -> None: + self._stream = stream + self._pool_request = pool_request + self._pool = pool + self._closed = False + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + try: + async for part in self._stream: + yield part + except BaseException as exc: + await self.aclose() + raise exc from None + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + with AsyncShieldCancellation(): + if hasattr(self._stream, "aclose"): + await self._stream.aclose() + + with self._pool._optional_thread_lock: + self._pool._requests.remove(self._pool_request) + closing = self._pool._assign_requests_to_connections() + + await self._pool._close_connections(closing) diff --git a/.venv/lib/python3.12/site-packages/httpcore/_async/http11.py b/.venv/lib/python3.12/site-packages/httpcore/_async/http11.py new file mode 100644 index 0000000..e6d6d70 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_async/http11.py @@ -0,0 +1,379 @@ +from __future__ import annotations + +import enum +import logging +import ssl +import time +import types +import typing + +import h11 + +from .._backends.base import AsyncNetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, + WriteError, + map_exceptions, +) +from .._models import Origin, Request, Response +from .._synchronization import AsyncLock, AsyncShieldCancellation +from .._trace import Trace +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.http11") + + +# A subset of `h11.Event` types supported by `_send_event` +H11SendEvent = typing.Union[ + h11.Request, + h11.Data, + h11.EndOfMessage, +] + + +class HTTPConnectionState(enum.IntEnum): + NEW = 0 + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class AsyncHTTP11Connection(AsyncConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 + + def __init__( + self, + origin: Origin, + stream: AsyncNetworkStream, + keepalive_expiry: float | None = None, + ) -> None: + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: float | None = keepalive_expiry + self._expire_at: float | None = None + self._state = HTTPConnectionState.NEW + self._state_lock = AsyncLock() + self._request_count = 0 + self._h11_state = h11.Connection( + our_role=h11.CLIENT, + max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, + ) + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + async with self._state_lock: + if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): + self._request_count += 1 + self._state = HTTPConnectionState.ACTIVE + self._expire_at = None + else: + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request} + try: + async with Trace( + "send_request_headers", logger, request, kwargs + ) as trace: + await self._send_request_headers(**kwargs) + async with Trace("send_request_body", logger, request, kwargs) as trace: + await self._send_request_body(**kwargs) + except WriteError: + # If we get a write error while we're writing the request, + # then we supress this error and move on to attempting to + # read the response. Servers can sometimes close the request + # pre-emptively and then respond with a well formed HTTP + # error response. + pass + + async with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + ( + http_version, + status, + reason_phrase, + headers, + trailing_data, + ) = await self._receive_response_headers(**kwargs) + trace.return_value = ( + http_version, + status, + reason_phrase, + headers, + ) + + network_stream = self._network_stream + + # CONNECT or Upgrade request + if (status == 101) or ( + (request.method == b"CONNECT") and (200 <= status < 300) + ): + network_stream = AsyncHTTP11UpgradeStream(network_stream, trailing_data) + + return Response( + status=status, + headers=headers, + content=HTTP11ConnectionByteStream(self, request), + extensions={ + "http_version": http_version, + "reason_phrase": reason_phrase, + "network_stream": network_stream, + }, + ) + except BaseException as exc: + with AsyncShieldCancellation(): + async with Trace("response_closed", logger, request) as trace: + await self._response_closed() + raise exc + + # Sending the request... + + async def _send_request_headers(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): + event = h11.Request( + method=request.method, + target=request.url.target, + headers=request.headers, + ) + await self._send_event(event, timeout=timeout) + + async def _send_request_body(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + assert isinstance(request.stream, typing.AsyncIterable) + async for chunk in request.stream: + event = h11.Data(data=chunk) + await self._send_event(event, timeout=timeout) + + await self._send_event(h11.EndOfMessage(), timeout=timeout) + + async def _send_event(self, event: h11.Event, timeout: float | None = None) -> None: + bytes_to_send = self._h11_state.send(event) + if bytes_to_send is not None: + await self._network_stream.write(bytes_to_send, timeout=timeout) + + # Receiving the response... + + async def _receive_response_headers( + self, request: Request + ) -> tuple[bytes, int, bytes, list[tuple[bytes, bytes]], bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = await self._receive_event(timeout=timeout) + if isinstance(event, h11.Response): + break + if ( + isinstance(event, h11.InformationalResponse) + and event.status_code == 101 + ): + break + + http_version = b"HTTP/" + event.http_version + + # h11 version 0.11+ supports a `raw_items` interface to get the + # raw header casing, rather than the enforced lowercase headers. + headers = event.headers.raw_items() + + trailing_data, _ = self._h11_state.trailing_data + + return http_version, event.status_code, event.reason, headers, trailing_data + + async def _receive_response_body( + self, request: Request + ) -> typing.AsyncIterator[bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = await self._receive_event(timeout=timeout) + if isinstance(event, h11.Data): + yield bytes(event.data) + elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): + break + + async def _receive_event( + self, timeout: float | None = None + ) -> h11.Event | type[h11.PAUSED]: + while True: + with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): + event = self._h11_state.next_event() + + if event is h11.NEED_DATA: + data = await self._network_stream.read( + self.READ_NUM_BYTES, timeout=timeout + ) + + # If we feed this case through h11 we'll raise an exception like: + # + # httpcore.RemoteProtocolError: can't handle event type + # ConnectionClosed when role=SERVER and state=SEND_RESPONSE + # + # Which is accurate, but not very informative from an end-user + # perspective. Instead we handle this case distinctly and treat + # it as a ConnectError. + if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: + msg = "Server disconnected without sending a response." + raise RemoteProtocolError(msg) + + self._h11_state.receive_data(data) + else: + # mypy fails to narrow the type in the above if statement above + return event # type: ignore[return-value] + + async def _response_closed(self) -> None: + async with self._state_lock: + if ( + self._h11_state.our_state is h11.DONE + and self._h11_state.their_state is h11.DONE + ): + self._state = HTTPConnectionState.IDLE + self._h11_state.start_next_cycle() + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + else: + await self.aclose() + + # Once the connection is no longer required... + + async def aclose(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._state = HTTPConnectionState.CLOSED + await self._network_stream.aclose() + + # The AsyncConnectionInterface methods provide information about the state of + # the connection, allowing for a connection pooling implementation to + # determine when to reuse and when to close the connection... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + # Note that HTTP/1.1 connections in the "NEW" state are not treated as + # being "available". The control flow which created the connection will + # be able to send an outgoing request, but the connection will not be + # acquired from the connection pool for any other request. + return self._state == HTTPConnectionState.IDLE + + def has_expired(self) -> bool: + now = time.monotonic() + keepalive_expired = self._expire_at is not None and now > self._expire_at + + # If the HTTP connection is idle but the socket is readable, then the + # only valid state is that the socket is about to return b"", indicating + # a server-initiated disconnect. + server_disconnected = ( + self._state == HTTPConnectionState.IDLE + and self._network_stream.get_extra_info("is_readable") + ) + + return keepalive_expired or server_disconnected + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/1.1, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> AsyncHTTP11Connection: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + await self.aclose() + + +class HTTP11ConnectionByteStream: + def __init__(self, connection: AsyncHTTP11Connection, request: Request) -> None: + self._connection = connection + self._request = request + self._closed = False + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + kwargs = {"request": self._request} + try: + async with Trace("receive_response_body", logger, self._request, kwargs): + async for chunk in self._connection._receive_response_body(**kwargs): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + async with Trace("response_closed", logger, self._request): + await self._connection._response_closed() + + +class AsyncHTTP11UpgradeStream(AsyncNetworkStream): + def __init__(self, stream: AsyncNetworkStream, leading_data: bytes) -> None: + self._stream = stream + self._leading_data = leading_data + + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + if self._leading_data: + buffer = self._leading_data[:max_bytes] + self._leading_data = self._leading_data[max_bytes:] + return buffer + else: + return await self._stream.read(max_bytes, timeout) + + async def write(self, buffer: bytes, timeout: float | None = None) -> None: + await self._stream.write(buffer, timeout) + + async def aclose(self) -> None: + await self._stream.aclose() + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> AsyncNetworkStream: + return await self._stream.start_tls(ssl_context, server_hostname, timeout) + + def get_extra_info(self, info: str) -> typing.Any: + return self._stream.get_extra_info(info) diff --git a/.venv/lib/python3.12/site-packages/httpcore/_async/http2.py b/.venv/lib/python3.12/site-packages/httpcore/_async/http2.py new file mode 100644 index 0000000..dbd0bee --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_async/http2.py @@ -0,0 +1,592 @@ +from __future__ import annotations + +import enum +import logging +import time +import types +import typing + +import h2.config +import h2.connection +import h2.events +import h2.exceptions +import h2.settings + +from .._backends.base import AsyncNetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, +) +from .._models import Origin, Request, Response +from .._synchronization import AsyncLock, AsyncSemaphore, AsyncShieldCancellation +from .._trace import Trace +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.http2") + + +def has_body_headers(request: Request) -> bool: + return any( + k.lower() == b"content-length" or k.lower() == b"transfer-encoding" + for k, v in request.headers + ) + + +class HTTPConnectionState(enum.IntEnum): + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class AsyncHTTP2Connection(AsyncConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) + + def __init__( + self, + origin: Origin, + stream: AsyncNetworkStream, + keepalive_expiry: float | None = None, + ): + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: float | None = keepalive_expiry + self._h2_state = h2.connection.H2Connection(config=self.CONFIG) + self._state = HTTPConnectionState.IDLE + self._expire_at: float | None = None + self._request_count = 0 + self._init_lock = AsyncLock() + self._state_lock = AsyncLock() + self._read_lock = AsyncLock() + self._write_lock = AsyncLock() + self._sent_connection_init = False + self._used_all_stream_ids = False + self._connection_error = False + + # Mapping from stream ID to response stream events. + self._events: dict[ + int, + list[ + h2.events.ResponseReceived + | h2.events.DataReceived + | h2.events.StreamEnded + | h2.events.StreamReset, + ], + ] = {} + + # Connection terminated events are stored as state since + # we need to handle them for all streams. + self._connection_terminated: h2.events.ConnectionTerminated | None = None + + self._read_exception: Exception | None = None + self._write_exception: Exception | None = None + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + # This cannot occur in normal operation, since the connection pool + # will only send requests on connections that handle them. + # It's in place simply for resilience as a guard against incorrect + # usage, for anyone working directly with httpcore connections. + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + async with self._state_lock: + if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): + self._request_count += 1 + self._expire_at = None + self._state = HTTPConnectionState.ACTIVE + else: + raise ConnectionNotAvailable() + + async with self._init_lock: + if not self._sent_connection_init: + try: + sci_kwargs = {"request": request} + async with Trace( + "send_connection_init", logger, request, sci_kwargs + ): + await self._send_connection_init(**sci_kwargs) + except BaseException as exc: + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + self._sent_connection_init = True + + # Initially start with just 1 until the remote server provides + # its max_concurrent_streams value + self._max_streams = 1 + + local_settings_max_streams = ( + self._h2_state.local_settings.max_concurrent_streams + ) + self._max_streams_semaphore = AsyncSemaphore(local_settings_max_streams) + + for _ in range(local_settings_max_streams - self._max_streams): + await self._max_streams_semaphore.acquire() + + await self._max_streams_semaphore.acquire() + + try: + stream_id = self._h2_state.get_next_available_stream_id() + self._events[stream_id] = [] + except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover + self._used_all_stream_ids = True + self._request_count -= 1 + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request, "stream_id": stream_id} + async with Trace("send_request_headers", logger, request, kwargs): + await self._send_request_headers(request=request, stream_id=stream_id) + async with Trace("send_request_body", logger, request, kwargs): + await self._send_request_body(request=request, stream_id=stream_id) + async with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + status, headers = await self._receive_response( + request=request, stream_id=stream_id + ) + trace.return_value = (status, headers) + + return Response( + status=status, + headers=headers, + content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), + extensions={ + "http_version": b"HTTP/2", + "network_stream": self._network_stream, + "stream_id": stream_id, + }, + ) + except BaseException as exc: # noqa: PIE786 + with AsyncShieldCancellation(): + kwargs = {"stream_id": stream_id} + async with Trace("response_closed", logger, request, kwargs): + await self._response_closed(stream_id=stream_id) + + if isinstance(exc, h2.exceptions.ProtocolError): + # One case where h2 can raise a protocol error is when a + # closed frame has been seen by the state machine. + # + # This happens when one stream is reading, and encounters + # a GOAWAY event. Other flows of control may then raise + # a protocol error at any point they interact with the 'h2_state'. + # + # In this case we'll have stored the event, and should raise + # it as a RemoteProtocolError. + if self._connection_terminated: # pragma: nocover + raise RemoteProtocolError(self._connection_terminated) + # If h2 raises a protocol error in some other state then we + # must somehow have made a protocol violation. + raise LocalProtocolError(exc) # pragma: nocover + + raise exc + + async def _send_connection_init(self, request: Request) -> None: + """ + The HTTP/2 connection requires some initial setup before we can start + using individual request/response streams on it. + """ + # Need to set these manually here instead of manipulating via + # __setitem__() otherwise the H2Connection will emit SettingsUpdate + # frames in addition to sending the undesired defaults. + self._h2_state.local_settings = h2.settings.Settings( + client=True, + initial_values={ + # Disable PUSH_PROMISE frames from the server since we don't do anything + # with them for now. Maybe when we support caching? + h2.settings.SettingCodes.ENABLE_PUSH: 0, + # These two are taken from h2 for safe defaults + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, + h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, + }, + ) + + # Some websites (*cough* Yahoo *cough*) balk at this setting being + # present in the initial handshake since it's not defined in the original + # RFC despite the RFC mandating ignoring settings you don't know about. + del self._h2_state.local_settings[ + h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL + ] + + self._h2_state.initiate_connection() + self._h2_state.increment_flow_control_window(2**24) + await self._write_outgoing_data(request) + + # Sending the request... + + async def _send_request_headers(self, request: Request, stream_id: int) -> None: + """ + Send the request headers to a given stream ID. + """ + end_stream = not has_body_headers(request) + + # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. + # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require + # HTTP/1.1 style headers, and map them appropriately if we end up on + # an HTTP/2 connection. + authority = [v for k, v in request.headers if k.lower() == b"host"][0] + + headers = [ + (b":method", request.method), + (b":authority", authority), + (b":scheme", request.url.scheme), + (b":path", request.url.target), + ] + [ + (k.lower(), v) + for k, v in request.headers + if k.lower() + not in ( + b"host", + b"transfer-encoding", + ) + ] + + self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) + self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) + await self._write_outgoing_data(request) + + async def _send_request_body(self, request: Request, stream_id: int) -> None: + """ + Iterate over the request body sending it to a given stream ID. + """ + if not has_body_headers(request): + return + + assert isinstance(request.stream, typing.AsyncIterable) + async for data in request.stream: + await self._send_stream_data(request, stream_id, data) + await self._send_end_stream(request, stream_id) + + async def _send_stream_data( + self, request: Request, stream_id: int, data: bytes + ) -> None: + """ + Send a single chunk of data in one or more data frames. + """ + while data: + max_flow = await self._wait_for_outgoing_flow(request, stream_id) + chunk_size = min(len(data), max_flow) + chunk, data = data[:chunk_size], data[chunk_size:] + self._h2_state.send_data(stream_id, chunk) + await self._write_outgoing_data(request) + + async def _send_end_stream(self, request: Request, stream_id: int) -> None: + """ + Send an empty data frame on on a given stream ID with the END_STREAM flag set. + """ + self._h2_state.end_stream(stream_id) + await self._write_outgoing_data(request) + + # Receiving the response... + + async def _receive_response( + self, request: Request, stream_id: int + ) -> tuple[int, list[tuple[bytes, bytes]]]: + """ + Return the response status code and headers for a given stream ID. + """ + while True: + event = await self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.ResponseReceived): + break + + status_code = 200 + headers = [] + assert event.headers is not None + for k, v in event.headers: + if k == b":status": + status_code = int(v.decode("ascii", errors="ignore")) + elif not k.startswith(b":"): + headers.append((k, v)) + + return (status_code, headers) + + async def _receive_response_body( + self, request: Request, stream_id: int + ) -> typing.AsyncIterator[bytes]: + """ + Iterator that returns the bytes of the response body for a given stream ID. + """ + while True: + event = await self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.DataReceived): + assert event.flow_controlled_length is not None + assert event.data is not None + amount = event.flow_controlled_length + self._h2_state.acknowledge_received_data(amount, stream_id) + await self._write_outgoing_data(request) + yield event.data + elif isinstance(event, h2.events.StreamEnded): + break + + async def _receive_stream_event( + self, request: Request, stream_id: int + ) -> h2.events.ResponseReceived | h2.events.DataReceived | h2.events.StreamEnded: + """ + Return the next available event for a given stream ID. + + Will read more data from the network if required. + """ + while not self._events.get(stream_id): + await self._receive_events(request, stream_id) + event = self._events[stream_id].pop(0) + if isinstance(event, h2.events.StreamReset): + raise RemoteProtocolError(event) + return event + + async def _receive_events( + self, request: Request, stream_id: int | None = None + ) -> None: + """ + Read some data from the network until we see one or more events + for a given stream ID. + """ + async with self._read_lock: + if self._connection_terminated is not None: + last_stream_id = self._connection_terminated.last_stream_id + if stream_id and last_stream_id and stream_id > last_stream_id: + self._request_count -= 1 + raise ConnectionNotAvailable() + raise RemoteProtocolError(self._connection_terminated) + + # This conditional is a bit icky. We don't want to block reading if we've + # actually got an event to return for a given stream. We need to do that + # check *within* the atomic read lock. Though it also need to be optional, + # because when we call it from `_wait_for_outgoing_flow` we *do* want to + # block until we've available flow control, event when we have events + # pending for the stream ID we're attempting to send on. + if stream_id is None or not self._events.get(stream_id): + events = await self._read_incoming_data(request) + for event in events: + if isinstance(event, h2.events.RemoteSettingsChanged): + async with Trace( + "receive_remote_settings", logger, request + ) as trace: + await self._receive_remote_settings_change(event) + trace.return_value = event + + elif isinstance( + event, + ( + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ), + ): + if event.stream_id in self._events: + self._events[event.stream_id].append(event) + + elif isinstance(event, h2.events.ConnectionTerminated): + self._connection_terminated = event + + await self._write_outgoing_data(request) + + async def _receive_remote_settings_change( + self, event: h2.events.RemoteSettingsChanged + ) -> None: + max_concurrent_streams = event.changed_settings.get( + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS + ) + if max_concurrent_streams: + new_max_streams = min( + max_concurrent_streams.new_value, + self._h2_state.local_settings.max_concurrent_streams, + ) + if new_max_streams and new_max_streams != self._max_streams: + while new_max_streams > self._max_streams: + await self._max_streams_semaphore.release() + self._max_streams += 1 + while new_max_streams < self._max_streams: + await self._max_streams_semaphore.acquire() + self._max_streams -= 1 + + async def _response_closed(self, stream_id: int) -> None: + await self._max_streams_semaphore.release() + del self._events[stream_id] + async with self._state_lock: + if self._connection_terminated and not self._events: + await self.aclose() + + elif self._state == HTTPConnectionState.ACTIVE and not self._events: + self._state = HTTPConnectionState.IDLE + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + if self._used_all_stream_ids: # pragma: nocover + await self.aclose() + + async def aclose(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._h2_state.close_connection() + self._state = HTTPConnectionState.CLOSED + await self._network_stream.aclose() + + # Wrappers around network read/write operations... + + async def _read_incoming_data(self, request: Request) -> list[h2.events.Event]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + if self._read_exception is not None: + raise self._read_exception # pragma: nocover + + try: + data = await self._network_stream.read(self.READ_NUM_BYTES, timeout) + if data == b"": + raise RemoteProtocolError("Server disconnected") + except Exception as exc: + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future reads. + # (For example, this means that a single read timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._read_exception = exc + self._connection_error = True + raise exc + + events: list[h2.events.Event] = self._h2_state.receive_data(data) + + return events + + async def _write_outgoing_data(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + async with self._write_lock: + data_to_send = self._h2_state.data_to_send() + + if self._write_exception is not None: + raise self._write_exception # pragma: nocover + + try: + await self._network_stream.write(data_to_send, timeout) + except Exception as exc: # pragma: nocover + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future write. + # (For example, this means that a single write timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._write_exception = exc + self._connection_error = True + raise exc + + # Flow control... + + async def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: + """ + Returns the maximum allowable outgoing flow for a given stream. + + If the allowable flow is zero, then waits on the network until + WindowUpdated frames have increased the flow rate. + https://tools.ietf.org/html/rfc7540#section-6.9 + """ + local_flow: int = self._h2_state.local_flow_control_window(stream_id) + max_frame_size: int = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + while flow == 0: + await self._receive_events(request) + local_flow = self._h2_state.local_flow_control_window(stream_id) + max_frame_size = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + return flow + + # Interface for connection pooling... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + return ( + self._state != HTTPConnectionState.CLOSED + and not self._connection_error + and not self._used_all_stream_ids + and not ( + self._h2_state.state_machine.state + == h2.connection.ConnectionState.CLOSED + ) + ) + + def has_expired(self) -> bool: + now = time.monotonic() + return self._expire_at is not None and now > self._expire_at + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/2, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> AsyncHTTP2Connection: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + await self.aclose() + + +class HTTP2ConnectionByteStream: + def __init__( + self, connection: AsyncHTTP2Connection, request: Request, stream_id: int + ) -> None: + self._connection = connection + self._request = request + self._stream_id = stream_id + self._closed = False + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + kwargs = {"request": self._request, "stream_id": self._stream_id} + try: + async with Trace("receive_response_body", logger, self._request, kwargs): + async for chunk in self._connection._receive_response_body( + request=self._request, stream_id=self._stream_id + ): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + kwargs = {"stream_id": self._stream_id} + async with Trace("response_closed", logger, self._request, kwargs): + await self._connection._response_closed(stream_id=self._stream_id) diff --git a/.venv/lib/python3.12/site-packages/httpcore/_async/http_proxy.py b/.venv/lib/python3.12/site-packages/httpcore/_async/http_proxy.py new file mode 100644 index 0000000..cc9d920 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_async/http_proxy.py @@ -0,0 +1,367 @@ +from __future__ import annotations + +import base64 +import logging +import ssl +import typing + +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend +from .._exceptions import ProxyError +from .._models import ( + URL, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, +) +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .connection import AsyncHTTPConnection +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +ByteOrStr = typing.Union[bytes, str] +HeadersAsSequence = typing.Sequence[typing.Tuple[ByteOrStr, ByteOrStr]] +HeadersAsMapping = typing.Mapping[ByteOrStr, ByteOrStr] + + +logger = logging.getLogger("httpcore.proxy") + + +def merge_headers( + default_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, + override_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, +) -> list[tuple[bytes, bytes]]: + """ + Append default_headers and override_headers, de-duplicating if a key exists + in both cases. + """ + default_headers = [] if default_headers is None else list(default_headers) + override_headers = [] if override_headers is None else list(override_headers) + has_override = set(key.lower() for key, value in override_headers) + default_headers = [ + (key, value) + for key, value in default_headers + if key.lower() not in has_override + ] + return default_headers + override_headers + + +class AsyncHTTPProxy(AsyncConnectionPool): # pragma: nocover + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: URL | bytes | str, + proxy_auth: tuple[bytes | str, bytes | str] | None = None, + proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, + ssl_context: ssl.SSLContext | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: AsyncNetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + proxy_auth: Any proxy authentication as a two-tuple of + (username, password). May be either bytes or ascii-only str. + proxy_headers: Any HTTP headers to use for the proxy requests. + For example `{"Proxy-Authorization": "Basic :"}`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + local_address=local_address, + uds=uds, + socket_options=socket_options, + ) + + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if ( + self._proxy_url.scheme == b"http" and proxy_ssl_context is not None + ): # pragma: no cover + raise RuntimeError( + "The `proxy_ssl_context` argument is not allowed for the http scheme" + ) + + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + if proxy_auth is not None: + username = enforce_bytes(proxy_auth[0], name="proxy_auth") + password = enforce_bytes(proxy_auth[1], name="proxy_auth") + userpass = username + b":" + password + authorization = b"Basic " + base64.b64encode(userpass) + self._proxy_headers = [ + (b"Proxy-Authorization", authorization) + ] + self._proxy_headers + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + if origin.scheme == b"http": + return AsyncForwardHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + proxy_ssl_context=self._proxy_ssl_context, + ) + return AsyncTunnelHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + ssl_context=self._ssl_context, + proxy_ssl_context=self._proxy_ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class AsyncForwardHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, + keepalive_expiry: float | None = None, + network_backend: AsyncNetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + ) -> None: + self._connection = AsyncHTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._remote_origin = remote_origin + + async def handle_async_request(self, request: Request) -> Response: + headers = merge_headers(self._proxy_headers, request.headers) + url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=bytes(request.url), + ) + proxy_request = Request( + method=request.method, + url=url, + headers=headers, + content=request.stream, + extensions=request.extensions, + ) + return await self._connection.handle_async_request(proxy_request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + await self._connection.aclose() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + +class AsyncTunnelHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + ssl_context: ssl.SSLContext | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + proxy_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + network_backend: AsyncNetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + self._connection: AsyncConnectionInterface = AsyncHTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._connect_lock = AsyncLock() + self._connected = False + + async def handle_async_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("connect", None) + + async with self._connect_lock: + if not self._connected: + target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) + + connect_url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=target, + ) + connect_headers = merge_headers( + [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers + ) + connect_request = Request( + method=b"CONNECT", + url=connect_url, + headers=connect_headers, + extensions=request.extensions, + ) + connect_response = await self._connection.handle_async_request( + connect_request + ) + + if connect_response.status < 200 or connect_response.status > 299: + reason_bytes = connect_response.extensions.get("reason_phrase", b"") + reason_str = reason_bytes.decode("ascii", errors="ignore") + msg = "%d %s" % (connect_response.status, reason_str) + await self._connection.aclose() + raise ProxyError(msg) + + stream = connect_response.extensions["network_stream"] + + # Upgrade the stream to SSL + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + + self._connected = True + return await self._connection.handle_async_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + await self._connection.aclose() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/.venv/lib/python3.12/site-packages/httpcore/_async/interfaces.py b/.venv/lib/python3.12/site-packages/httpcore/_async/interfaces.py new file mode 100644 index 0000000..361583b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_async/interfaces.py @@ -0,0 +1,137 @@ +from __future__ import annotations + +import contextlib +import typing + +from .._models import ( + URL, + Extensions, + HeaderTypes, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, + include_request_headers, +) + + +class AsyncRequestInterface: + async def request( + self, + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.AsyncIterator[bytes] | None = None, + extensions: Extensions | None = None, + ) -> Response: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = await self.handle_async_request(request) + try: + await response.aread() + finally: + await response.aclose() + return response + + @contextlib.asynccontextmanager + async def stream( + self, + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.AsyncIterator[bytes] | None = None, + extensions: Extensions | None = None, + ) -> typing.AsyncIterator[Response]: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = await self.handle_async_request(request) + try: + yield response + finally: + await response.aclose() + + async def handle_async_request(self, request: Request) -> Response: + raise NotImplementedError() # pragma: nocover + + +class AsyncConnectionInterface(AsyncRequestInterface): + async def aclose(self) -> None: + raise NotImplementedError() # pragma: nocover + + def info(self) -> str: + raise NotImplementedError() # pragma: nocover + + def can_handle_request(self, origin: Origin) -> bool: + raise NotImplementedError() # pragma: nocover + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an + outgoing request. + + An HTTP/1.1 connection will only be available if it is currently idle. + + An HTTP/2 connection will be available so long as the stream ID space is + not yet exhausted, and the connection is not in an error state. + + While the connection is being established we may not yet know if it is going + to result in an HTTP/1.1 or HTTP/2 connection. The connection should be + treated as being available, but might ultimately raise `NewConnectionRequired` + required exceptions if multiple requests are attempted over a connection + that ends up being established as HTTP/1.1. + """ + raise NotImplementedError() # pragma: nocover + + def has_expired(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + + This either means that the connection is idle and it has passed the + expiry time on its keep-alive, or that server has sent an EOF. + """ + raise NotImplementedError() # pragma: nocover + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + raise NotImplementedError() # pragma: nocover + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + + Used when a response is closed to determine if the connection may be + returned to the connection pool or not. + """ + raise NotImplementedError() # pragma: nocover diff --git a/.venv/lib/python3.12/site-packages/httpcore/_async/socks_proxy.py b/.venv/lib/python3.12/site-packages/httpcore/_async/socks_proxy.py new file mode 100644 index 0000000..b363f55 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_async/socks_proxy.py @@ -0,0 +1,341 @@ +from __future__ import annotations + +import logging +import ssl + +import socksio + +from .._backends.auto import AutoBackend +from .._backends.base import AsyncNetworkBackend, AsyncNetworkStream +from .._exceptions import ConnectionNotAvailable, ProxyError +from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.socks") + + +AUTH_METHODS = { + b"\x00": "NO AUTHENTICATION REQUIRED", + b"\x01": "GSSAPI", + b"\x02": "USERNAME/PASSWORD", + b"\xff": "NO ACCEPTABLE METHODS", +} + +REPLY_CODES = { + b"\x00": "Succeeded", + b"\x01": "General SOCKS server failure", + b"\x02": "Connection not allowed by ruleset", + b"\x03": "Network unreachable", + b"\x04": "Host unreachable", + b"\x05": "Connection refused", + b"\x06": "TTL expired", + b"\x07": "Command not supported", + b"\x08": "Address type not supported", +} + + +async def _init_socks5_connection( + stream: AsyncNetworkStream, + *, + host: bytes, + port: int, + auth: tuple[bytes, bytes] | None = None, +) -> None: + conn = socksio.socks5.SOCKS5Connection() + + # Auth method request + auth_method = ( + socksio.socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED + if auth is None + else socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD + ) + conn.send(socksio.socks5.SOCKS5AuthMethodsRequest([auth_method])) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Auth method response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5AuthReply) + if response.method != auth_method: + requested = AUTH_METHODS.get(auth_method, "UNKNOWN") + responded = AUTH_METHODS.get(response.method, "UNKNOWN") + raise ProxyError( + f"Requested {requested} from proxy server, but got {responded}." + ) + + if response.method == socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: + # Username/password request + assert auth is not None + username, password = auth + conn.send(socksio.socks5.SOCKS5UsernamePasswordRequest(username, password)) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Username/password response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5UsernamePasswordReply) + if not response.success: + raise ProxyError("Invalid username/password") + + # Connect request + conn.send( + socksio.socks5.SOCKS5CommandRequest.from_address( + socksio.socks5.SOCKS5Command.CONNECT, (host, port) + ) + ) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Connect response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5Reply) + if response.reply_code != socksio.socks5.SOCKS5ReplyCode.SUCCEEDED: + reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") + raise ProxyError(f"Proxy Server could not connect: {reply_code}.") + + +class AsyncSOCKSProxy(AsyncConnectionPool): # pragma: nocover + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: URL | bytes | str, + proxy_auth: tuple[bytes | str, bytes | str] | None = None, + ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + network_backend: AsyncNetworkBackend | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + ) + self._ssl_context = ssl_context + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if proxy_auth is not None: + username, password = proxy_auth + username_bytes = enforce_bytes(username, name="proxy_auth") + password_bytes = enforce_bytes(password, name="proxy_auth") + self._proxy_auth: tuple[bytes, bytes] | None = ( + username_bytes, + password_bytes, + ) + else: + self._proxy_auth = None + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + return AsyncSocks5Connection( + proxy_origin=self._proxy_url.origin, + remote_origin=origin, + proxy_auth=self._proxy_auth, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class AsyncSocks5Connection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_auth: tuple[bytes, bytes] | None = None, + ssl_context: ssl.SSLContext | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + network_backend: AsyncNetworkBackend | None = None, + ) -> None: + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._proxy_auth = proxy_auth + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + + self._network_backend: AsyncNetworkBackend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._connect_lock = AsyncLock() + self._connection: AsyncConnectionInterface | None = None + self._connect_failed = False + + async def handle_async_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + async with self._connect_lock: + if self._connection is None: + try: + # Connect to the proxy + kwargs = { + "host": self._proxy_origin.host.decode("ascii"), + "port": self._proxy_origin.port, + "timeout": timeout, + } + async with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = await self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + + # Connect to the remote host using socks5 + kwargs = { + "stream": stream, + "host": self._remote_origin.host.decode("ascii"), + "port": self._remote_origin.port, + "auth": self._proxy_auth, + } + async with Trace( + "setup_socks5_connection", logger, request, kwargs + ) as trace: + await _init_socks5_connection(**kwargs) + trace.return_value = stream + + # Upgrade the stream to SSL + if self._remote_origin.scheme == b"https": + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ( + ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ) + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or ( + self._http2 and not self._http1 + ): # pragma: nocover + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except Exception as exc: + self._connect_failed = True + raise exc + elif not self._connection.is_available(): # pragma: nocover + raise ConnectionNotAvailable() + + return await self._connection.handle_async_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + if self._connection is not None: + await self._connection.aclose() + + def is_available(self) -> bool: + if self._connection is None: # pragma: nocover + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._remote_origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: # pragma: nocover + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/.venv/lib/python3.12/site-packages/httpcore/_backends/__init__.py b/.venv/lib/python3.12/site-packages/httpcore/_backends/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/httpcore/_backends/anyio.py b/.venv/lib/python3.12/site-packages/httpcore/_backends/anyio.py new file mode 100644 index 0000000..a140095 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_backends/anyio.py @@ -0,0 +1,146 @@ +from __future__ import annotations + +import ssl +import typing + +import anyio + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._utils import is_socket_readable +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream + + +class AnyIOStream(AsyncNetworkStream): + def __init__(self, stream: anyio.abc.ByteStream) -> None: + self._stream = stream + + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + exc_map = { + TimeoutError: ReadTimeout, + anyio.BrokenResourceError: ReadError, + anyio.ClosedResourceError: ReadError, + anyio.EndOfStream: ReadError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + try: + return await self._stream.receive(max_bytes=max_bytes) + except anyio.EndOfStream: # pragma: nocover + return b"" + + async def write(self, buffer: bytes, timeout: float | None = None) -> None: + if not buffer: + return + + exc_map = { + TimeoutError: WriteTimeout, + anyio.BrokenResourceError: WriteError, + anyio.ClosedResourceError: WriteError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + await self._stream.send(item=buffer) + + async def aclose(self) -> None: + await self._stream.aclose() + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> AsyncNetworkStream: + exc_map = { + TimeoutError: ConnectTimeout, + anyio.BrokenResourceError: ConnectError, + anyio.EndOfStream: ConnectError, + ssl.SSLError: ConnectError, + } + with map_exceptions(exc_map): + try: + with anyio.fail_after(timeout): + ssl_stream = await anyio.streams.tls.TLSStream.wrap( + self._stream, + ssl_context=ssl_context, + hostname=server_hostname, + standard_compatible=False, + server_side=False, + ) + except Exception as exc: # pragma: nocover + await self.aclose() + raise exc + return AnyIOStream(ssl_stream) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object": + return self._stream.extra(anyio.streams.tls.TLSAttribute.ssl_object, None) + if info == "client_addr": + return self._stream.extra(anyio.abc.SocketAttribute.local_address, None) + if info == "server_addr": + return self._stream.extra(anyio.abc.SocketAttribute.remote_address, None) + if info == "socket": + return self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) + if info == "is_readable": + sock = self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) + return is_socket_readable(sock) + return None + + +class AnyIOBackend(AsyncNetworkBackend): + async def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: # pragma: nocover + if socket_options is None: + socket_options = [] + exc_map = { + TimeoutError: ConnectTimeout, + OSError: ConnectError, + anyio.BrokenResourceError: ConnectError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + stream: anyio.abc.ByteStream = await anyio.connect_tcp( + remote_host=host, + remote_port=port, + local_host=local_address, + ) + # By default TCP sockets opened in `asyncio` include TCP_NODELAY. + for option in socket_options: + stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return AnyIOStream(stream) + + async def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: # pragma: nocover + if socket_options is None: + socket_options = [] + exc_map = { + TimeoutError: ConnectTimeout, + OSError: ConnectError, + anyio.BrokenResourceError: ConnectError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + stream: anyio.abc.ByteStream = await anyio.connect_unix(path) + for option in socket_options: + stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return AnyIOStream(stream) + + async def sleep(self, seconds: float) -> None: + await anyio.sleep(seconds) # pragma: nocover diff --git a/.venv/lib/python3.12/site-packages/httpcore/_backends/auto.py b/.venv/lib/python3.12/site-packages/httpcore/_backends/auto.py new file mode 100644 index 0000000..49f0e69 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_backends/auto.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +import typing + +from .._synchronization import current_async_library +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream + + +class AutoBackend(AsyncNetworkBackend): + async def _init_backend(self) -> None: + if not (hasattr(self, "_backend")): + backend = current_async_library() + if backend == "trio": + from .trio import TrioBackend + + self._backend: AsyncNetworkBackend = TrioBackend() + else: + from .anyio import AnyIOBackend + + self._backend = AnyIOBackend() + + async def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + await self._init_backend() + return await self._backend.connect_tcp( + host, + port, + timeout=timeout, + local_address=local_address, + socket_options=socket_options, + ) + + async def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: # pragma: nocover + await self._init_backend() + return await self._backend.connect_unix_socket( + path, timeout=timeout, socket_options=socket_options + ) + + async def sleep(self, seconds: float) -> None: # pragma: nocover + await self._init_backend() + return await self._backend.sleep(seconds) diff --git a/.venv/lib/python3.12/site-packages/httpcore/_backends/base.py b/.venv/lib/python3.12/site-packages/httpcore/_backends/base.py new file mode 100644 index 0000000..cf55c8b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_backends/base.py @@ -0,0 +1,101 @@ +from __future__ import annotations + +import ssl +import time +import typing + +SOCKET_OPTION = typing.Union[ + typing.Tuple[int, int, int], + typing.Tuple[int, int, typing.Union[bytes, bytearray]], + typing.Tuple[int, int, None, int], +] + + +class NetworkStream: + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + raise NotImplementedError() # pragma: nocover + + def write(self, buffer: bytes, timeout: float | None = None) -> None: + raise NotImplementedError() # pragma: nocover + + def close(self) -> None: + raise NotImplementedError() # pragma: nocover + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: + raise NotImplementedError() # pragma: nocover + + def get_extra_info(self, info: str) -> typing.Any: + return None # pragma: nocover + + +class NetworkBackend: + def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: + raise NotImplementedError() # pragma: nocover + + def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: + raise NotImplementedError() # pragma: nocover + + def sleep(self, seconds: float) -> None: + time.sleep(seconds) # pragma: nocover + + +class AsyncNetworkStream: + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + raise NotImplementedError() # pragma: nocover + + async def write(self, buffer: bytes, timeout: float | None = None) -> None: + raise NotImplementedError() # pragma: nocover + + async def aclose(self) -> None: + raise NotImplementedError() # pragma: nocover + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> AsyncNetworkStream: + raise NotImplementedError() # pragma: nocover + + def get_extra_info(self, info: str) -> typing.Any: + return None # pragma: nocover + + +class AsyncNetworkBackend: + async def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + raise NotImplementedError() # pragma: nocover + + async def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + raise NotImplementedError() # pragma: nocover + + async def sleep(self, seconds: float) -> None: + raise NotImplementedError() # pragma: nocover diff --git a/.venv/lib/python3.12/site-packages/httpcore/_backends/mock.py b/.venv/lib/python3.12/site-packages/httpcore/_backends/mock.py new file mode 100644 index 0000000..9b6edca --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_backends/mock.py @@ -0,0 +1,143 @@ +from __future__ import annotations + +import ssl +import typing + +from .._exceptions import ReadError +from .base import ( + SOCKET_OPTION, + AsyncNetworkBackend, + AsyncNetworkStream, + NetworkBackend, + NetworkStream, +) + + +class MockSSLObject: + def __init__(self, http2: bool): + self._http2 = http2 + + def selected_alpn_protocol(self) -> str: + return "h2" if self._http2 else "http/1.1" + + +class MockStream(NetworkStream): + def __init__(self, buffer: list[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + self._closed = False + + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + if self._closed: + raise ReadError("Connection closed") + if not self._buffer: + return b"" + return self._buffer.pop(0) + + def write(self, buffer: bytes, timeout: float | None = None) -> None: + pass + + def close(self) -> None: + self._closed = True + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: + return self + + def get_extra_info(self, info: str) -> typing.Any: + return MockSSLObject(http2=self._http2) if info == "ssl_object" else None + + def __repr__(self) -> str: + return "" + + +class MockBackend(NetworkBackend): + def __init__(self, buffer: list[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + + def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: + return MockStream(list(self._buffer), http2=self._http2) + + def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: + return MockStream(list(self._buffer), http2=self._http2) + + def sleep(self, seconds: float) -> None: + pass + + +class AsyncMockStream(AsyncNetworkStream): + def __init__(self, buffer: list[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + self._closed = False + + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + if self._closed: + raise ReadError("Connection closed") + if not self._buffer: + return b"" + return self._buffer.pop(0) + + async def write(self, buffer: bytes, timeout: float | None = None) -> None: + pass + + async def aclose(self) -> None: + self._closed = True + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> AsyncNetworkStream: + return self + + def get_extra_info(self, info: str) -> typing.Any: + return MockSSLObject(http2=self._http2) if info == "ssl_object" else None + + def __repr__(self) -> str: + return "" + + +class AsyncMockBackend(AsyncNetworkBackend): + def __init__(self, buffer: list[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + + async def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + return AsyncMockStream(list(self._buffer), http2=self._http2) + + async def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + return AsyncMockStream(list(self._buffer), http2=self._http2) + + async def sleep(self, seconds: float) -> None: + pass diff --git a/.venv/lib/python3.12/site-packages/httpcore/_backends/sync.py b/.venv/lib/python3.12/site-packages/httpcore/_backends/sync.py new file mode 100644 index 0000000..4018a09 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_backends/sync.py @@ -0,0 +1,241 @@ +from __future__ import annotations + +import functools +import socket +import ssl +import sys +import typing + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ExceptionMapping, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._utils import is_socket_readable +from .base import SOCKET_OPTION, NetworkBackend, NetworkStream + + +class TLSinTLSStream(NetworkStream): # pragma: no cover + """ + Because the standard `SSLContext.wrap_socket` method does + not work for `SSLSocket` objects, we need this class + to implement TLS stream using an underlying `SSLObject` + instance in order to support TLS on top of TLS. + """ + + # Defined in RFC 8449 + TLS_RECORD_SIZE = 16384 + + def __init__( + self, + sock: socket.socket, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ): + self._sock = sock + self._incoming = ssl.MemoryBIO() + self._outgoing = ssl.MemoryBIO() + + self.ssl_obj = ssl_context.wrap_bio( + incoming=self._incoming, + outgoing=self._outgoing, + server_hostname=server_hostname, + ) + + self._sock.settimeout(timeout) + self._perform_io(self.ssl_obj.do_handshake) + + def _perform_io( + self, + func: typing.Callable[..., typing.Any], + ) -> typing.Any: + ret = None + + while True: + errno = None + try: + ret = func() + except (ssl.SSLWantReadError, ssl.SSLWantWriteError) as e: + errno = e.errno + + self._sock.sendall(self._outgoing.read()) + + if errno == ssl.SSL_ERROR_WANT_READ: + buf = self._sock.recv(self.TLS_RECORD_SIZE) + + if buf: + self._incoming.write(buf) + else: + self._incoming.write_eof() + if errno is None: + return ret + + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + return typing.cast( + bytes, self._perform_io(functools.partial(self.ssl_obj.read, max_bytes)) + ) + + def write(self, buffer: bytes, timeout: float | None = None) -> None: + exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + while buffer: + nsent = self._perform_io(functools.partial(self.ssl_obj.write, buffer)) + buffer = buffer[nsent:] + + def close(self) -> None: + self._sock.close() + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: + raise NotImplementedError() + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object": + return self.ssl_obj + if info == "client_addr": + return self._sock.getsockname() + if info == "server_addr": + return self._sock.getpeername() + if info == "socket": + return self._sock + if info == "is_readable": + return is_socket_readable(self._sock) + return None + + +class SyncStream(NetworkStream): + def __init__(self, sock: socket.socket) -> None: + self._sock = sock + + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + return self._sock.recv(max_bytes) + + def write(self, buffer: bytes, timeout: float | None = None) -> None: + if not buffer: + return + + exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} + with map_exceptions(exc_map): + while buffer: + self._sock.settimeout(timeout) + n = self._sock.send(buffer) + buffer = buffer[n:] + + def close(self) -> None: + self._sock.close() + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + with map_exceptions(exc_map): + try: + if isinstance(self._sock, ssl.SSLSocket): # pragma: no cover + # If the underlying socket has already been upgraded + # to the TLS layer (i.e. is an instance of SSLSocket), + # we need some additional smarts to support TLS-in-TLS. + return TLSinTLSStream( + self._sock, ssl_context, server_hostname, timeout + ) + else: + self._sock.settimeout(timeout) + sock = ssl_context.wrap_socket( + self._sock, server_hostname=server_hostname + ) + except Exception as exc: # pragma: nocover + self.close() + raise exc + return SyncStream(sock) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object" and isinstance(self._sock, ssl.SSLSocket): + return self._sock._sslobj # type: ignore + if info == "client_addr": + return self._sock.getsockname() + if info == "server_addr": + return self._sock.getpeername() + if info == "socket": + return self._sock + if info == "is_readable": + return is_socket_readable(self._sock) + return None + + +class SyncBackend(NetworkBackend): + def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: + # Note that we automatically include `TCP_NODELAY` + # in addition to any other custom socket options. + if socket_options is None: + socket_options = [] # pragma: no cover + address = (host, port) + source_address = None if local_address is None else (local_address, 0) + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + + with map_exceptions(exc_map): + sock = socket.create_connection( + address, + timeout, + source_address=source_address, + ) + for option in socket_options: + sock.setsockopt(*option) # pragma: no cover + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + return SyncStream(sock) + + def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: # pragma: nocover + if sys.platform == "win32": + raise RuntimeError( + "Attempted to connect to a UNIX socket on a Windows system." + ) + if socket_options is None: + socket_options = [] + + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + with map_exceptions(exc_map): + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + for option in socket_options: + sock.setsockopt(*option) + sock.settimeout(timeout) + sock.connect(path) + return SyncStream(sock) diff --git a/.venv/lib/python3.12/site-packages/httpcore/_backends/trio.py b/.venv/lib/python3.12/site-packages/httpcore/_backends/trio.py new file mode 100644 index 0000000..6f53f5f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_backends/trio.py @@ -0,0 +1,159 @@ +from __future__ import annotations + +import ssl +import typing + +import trio + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ExceptionMapping, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream + + +class TrioStream(AsyncNetworkStream): + def __init__(self, stream: trio.abc.Stream) -> None: + self._stream = stream + + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ReadTimeout, + trio.BrokenResourceError: ReadError, + trio.ClosedResourceError: ReadError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + data: bytes = await self._stream.receive_some(max_bytes=max_bytes) + return data + + async def write(self, buffer: bytes, timeout: float | None = None) -> None: + if not buffer: + return + + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: WriteTimeout, + trio.BrokenResourceError: WriteError, + trio.ClosedResourceError: WriteError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + await self._stream.send_all(data=buffer) + + async def aclose(self) -> None: + await self._stream.aclose() + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> AsyncNetworkStream: + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + } + ssl_stream = trio.SSLStream( + self._stream, + ssl_context=ssl_context, + server_hostname=server_hostname, + https_compatible=True, + server_side=False, + ) + with map_exceptions(exc_map): + try: + with trio.fail_after(timeout_or_inf): + await ssl_stream.do_handshake() + except Exception as exc: # pragma: nocover + await self.aclose() + raise exc + return TrioStream(ssl_stream) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object" and isinstance(self._stream, trio.SSLStream): + # Type checkers cannot see `_ssl_object` attribute because trio._ssl.SSLStream uses __getattr__/__setattr__. + # Tracked at https://github.com/python-trio/trio/issues/542 + return self._stream._ssl_object # type: ignore[attr-defined] + if info == "client_addr": + return self._get_socket_stream().socket.getsockname() + if info == "server_addr": + return self._get_socket_stream().socket.getpeername() + if info == "socket": + stream = self._stream + while isinstance(stream, trio.SSLStream): + stream = stream.transport_stream + assert isinstance(stream, trio.SocketStream) + return stream.socket + if info == "is_readable": + socket = self.get_extra_info("socket") + return socket.is_readable() + return None + + def _get_socket_stream(self) -> trio.SocketStream: + stream = self._stream + while isinstance(stream, trio.SSLStream): + stream = stream.transport_stream + assert isinstance(stream, trio.SocketStream) + return stream + + +class TrioBackend(AsyncNetworkBackend): + async def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + # By default for TCP sockets, trio enables TCP_NODELAY. + # https://trio.readthedocs.io/en/stable/reference-io.html#trio.SocketStream + if socket_options is None: + socket_options = [] # pragma: no cover + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + OSError: ConnectError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + stream: trio.abc.Stream = await trio.open_tcp_stream( + host=host, port=port, local_address=local_address + ) + for option in socket_options: + stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return TrioStream(stream) + + async def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: # pragma: nocover + if socket_options is None: + socket_options = [] + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + OSError: ConnectError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + stream: trio.abc.Stream = await trio.open_unix_socket(path) + for option in socket_options: + stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return TrioStream(stream) + + async def sleep(self, seconds: float) -> None: + await trio.sleep(seconds) # pragma: nocover diff --git a/.venv/lib/python3.12/site-packages/httpcore/_exceptions.py b/.venv/lib/python3.12/site-packages/httpcore/_exceptions.py new file mode 100644 index 0000000..bc28d44 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_exceptions.py @@ -0,0 +1,81 @@ +import contextlib +import typing + +ExceptionMapping = typing.Mapping[typing.Type[Exception], typing.Type[Exception]] + + +@contextlib.contextmanager +def map_exceptions(map: ExceptionMapping) -> typing.Iterator[None]: + try: + yield + except Exception as exc: # noqa: PIE786 + for from_exc, to_exc in map.items(): + if isinstance(exc, from_exc): + raise to_exc(exc) from exc + raise # pragma: nocover + + +class ConnectionNotAvailable(Exception): + pass + + +class ProxyError(Exception): + pass + + +class UnsupportedProtocol(Exception): + pass + + +class ProtocolError(Exception): + pass + + +class RemoteProtocolError(ProtocolError): + pass + + +class LocalProtocolError(ProtocolError): + pass + + +# Timeout errors + + +class TimeoutException(Exception): + pass + + +class PoolTimeout(TimeoutException): + pass + + +class ConnectTimeout(TimeoutException): + pass + + +class ReadTimeout(TimeoutException): + pass + + +class WriteTimeout(TimeoutException): + pass + + +# Network errors + + +class NetworkError(Exception): + pass + + +class ConnectError(NetworkError): + pass + + +class ReadError(NetworkError): + pass + + +class WriteError(NetworkError): + pass diff --git a/.venv/lib/python3.12/site-packages/httpcore/_models.py b/.venv/lib/python3.12/site-packages/httpcore/_models.py new file mode 100644 index 0000000..8a65f13 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_models.py @@ -0,0 +1,516 @@ +from __future__ import annotations + +import base64 +import ssl +import typing +import urllib.parse + +# Functions for typechecking... + + +ByteOrStr = typing.Union[bytes, str] +HeadersAsSequence = typing.Sequence[typing.Tuple[ByteOrStr, ByteOrStr]] +HeadersAsMapping = typing.Mapping[ByteOrStr, ByteOrStr] +HeaderTypes = typing.Union[HeadersAsSequence, HeadersAsMapping, None] + +Extensions = typing.MutableMapping[str, typing.Any] + + +def enforce_bytes(value: bytes | str, *, name: str) -> bytes: + """ + Any arguments that are ultimately represented as bytes can be specified + either as bytes or as strings. + + However we enforce that any string arguments must only contain characters in + the plain ASCII range. chr(0)...chr(127). If you need to use characters + outside that range then be precise, and use a byte-wise argument. + """ + if isinstance(value, str): + try: + return value.encode("ascii") + except UnicodeEncodeError: + raise TypeError(f"{name} strings may not include unicode characters.") + elif isinstance(value, bytes): + return value + + seen_type = type(value).__name__ + raise TypeError(f"{name} must be bytes or str, but got {seen_type}.") + + +def enforce_url(value: URL | bytes | str, *, name: str) -> URL: + """ + Type check for URL parameters. + """ + if isinstance(value, (bytes, str)): + return URL(value) + elif isinstance(value, URL): + return value + + seen_type = type(value).__name__ + raise TypeError(f"{name} must be a URL, bytes, or str, but got {seen_type}.") + + +def enforce_headers( + value: HeadersAsMapping | HeadersAsSequence | None = None, *, name: str +) -> list[tuple[bytes, bytes]]: + """ + Convienence function that ensure all items in request or response headers + are either bytes or strings in the plain ASCII range. + """ + if value is None: + return [] + elif isinstance(value, typing.Mapping): + return [ + ( + enforce_bytes(k, name="header name"), + enforce_bytes(v, name="header value"), + ) + for k, v in value.items() + ] + elif isinstance(value, typing.Sequence): + return [ + ( + enforce_bytes(k, name="header name"), + enforce_bytes(v, name="header value"), + ) + for k, v in value + ] + + seen_type = type(value).__name__ + raise TypeError( + f"{name} must be a mapping or sequence of two-tuples, but got {seen_type}." + ) + + +def enforce_stream( + value: bytes | typing.Iterable[bytes] | typing.AsyncIterable[bytes] | None, + *, + name: str, +) -> typing.Iterable[bytes] | typing.AsyncIterable[bytes]: + if value is None: + return ByteStream(b"") + elif isinstance(value, bytes): + return ByteStream(value) + return value + + +# * https://tools.ietf.org/html/rfc3986#section-3.2.3 +# * https://url.spec.whatwg.org/#url-miscellaneous +# * https://url.spec.whatwg.org/#scheme-state +DEFAULT_PORTS = { + b"ftp": 21, + b"http": 80, + b"https": 443, + b"ws": 80, + b"wss": 443, +} + + +def include_request_headers( + headers: list[tuple[bytes, bytes]], + *, + url: "URL", + content: None | bytes | typing.Iterable[bytes] | typing.AsyncIterable[bytes], +) -> list[tuple[bytes, bytes]]: + headers_set = set(k.lower() for k, v in headers) + + if b"host" not in headers_set: + default_port = DEFAULT_PORTS.get(url.scheme) + if url.port is None or url.port == default_port: + header_value = url.host + else: + header_value = b"%b:%d" % (url.host, url.port) + headers = [(b"Host", header_value)] + headers + + if ( + content is not None + and b"content-length" not in headers_set + and b"transfer-encoding" not in headers_set + ): + if isinstance(content, bytes): + content_length = str(len(content)).encode("ascii") + headers += [(b"Content-Length", content_length)] + else: + headers += [(b"Transfer-Encoding", b"chunked")] # pragma: nocover + + return headers + + +# Interfaces for byte streams... + + +class ByteStream: + """ + A container for non-streaming content, and that supports both sync and async + stream iteration. + """ + + def __init__(self, content: bytes) -> None: + self._content = content + + def __iter__(self) -> typing.Iterator[bytes]: + yield self._content + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + yield self._content + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{len(self._content)} bytes]>" + + +class Origin: + def __init__(self, scheme: bytes, host: bytes, port: int) -> None: + self.scheme = scheme + self.host = host + self.port = port + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, Origin) + and self.scheme == other.scheme + and self.host == other.host + and self.port == other.port + ) + + def __str__(self) -> str: + scheme = self.scheme.decode("ascii") + host = self.host.decode("ascii") + port = str(self.port) + return f"{scheme}://{host}:{port}" + + +class URL: + """ + Represents the URL against which an HTTP request may be made. + + The URL may either be specified as a plain string, for convienence: + + ```python + url = httpcore.URL("https://www.example.com/") + ``` + + Or be constructed with explicitily pre-parsed components: + + ```python + url = httpcore.URL(scheme=b'https', host=b'www.example.com', port=None, target=b'/') + ``` + + Using this second more explicit style allows integrations that are using + `httpcore` to pass through URLs that have already been parsed in order to use + libraries such as `rfc-3986` rather than relying on the stdlib. It also ensures + that URL parsing is treated identically at both the networking level and at any + higher layers of abstraction. + + The four components are important here, as they allow the URL to be precisely + specified in a pre-parsed format. They also allow certain types of request to + be created that could not otherwise be expressed. + + For example, an HTTP request to `http://www.example.com/` forwarded via a proxy + at `http://localhost:8080`... + + ```python + # Constructs an HTTP request with a complete URL as the target: + # GET https://www.example.com/ HTTP/1.1 + url = httpcore.URL( + scheme=b'http', + host=b'localhost', + port=8080, + target=b'https://www.example.com/' + ) + request = httpcore.Request( + method="GET", + url=url + ) + ``` + + Another example is constructing an `OPTIONS *` request... + + ```python + # Constructs an 'OPTIONS *' HTTP request: + # OPTIONS * HTTP/1.1 + url = httpcore.URL(scheme=b'https', host=b'www.example.com', target=b'*') + request = httpcore.Request(method="OPTIONS", url=url) + ``` + + This kind of request is not possible to formulate with a URL string, + because the `/` delimiter is always used to demark the target from the + host/port portion of the URL. + + For convenience, string-like arguments may be specified either as strings or + as bytes. However, once a request is being issue over-the-wire, the URL + components are always ultimately required to be a bytewise representation. + + In order to avoid any ambiguity over character encodings, when strings are used + as arguments, they must be strictly limited to the ASCII range `chr(0)`-`chr(127)`. + If you require a bytewise representation that is outside this range you must + handle the character encoding directly, and pass a bytes instance. + """ + + def __init__( + self, + url: bytes | str = "", + *, + scheme: bytes | str = b"", + host: bytes | str = b"", + port: int | None = None, + target: bytes | str = b"", + ) -> None: + """ + Parameters: + url: The complete URL as a string or bytes. + scheme: The URL scheme as a string or bytes. + Typically either `"http"` or `"https"`. + host: The URL host as a string or bytes. Such as `"www.example.com"`. + port: The port to connect to. Either an integer or `None`. + target: The target of the HTTP request. Such as `"/items?search=red"`. + """ + if url: + parsed = urllib.parse.urlparse(enforce_bytes(url, name="url")) + self.scheme = parsed.scheme + self.host = parsed.hostname or b"" + self.port = parsed.port + self.target = (parsed.path or b"/") + ( + b"?" + parsed.query if parsed.query else b"" + ) + else: + self.scheme = enforce_bytes(scheme, name="scheme") + self.host = enforce_bytes(host, name="host") + self.port = port + self.target = enforce_bytes(target, name="target") + + @property + def origin(self) -> Origin: + default_port = { + b"http": 80, + b"https": 443, + b"ws": 80, + b"wss": 443, + b"socks5": 1080, + b"socks5h": 1080, + }[self.scheme] + return Origin( + scheme=self.scheme, host=self.host, port=self.port or default_port + ) + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, URL) + and other.scheme == self.scheme + and other.host == self.host + and other.port == self.port + and other.target == self.target + ) + + def __bytes__(self) -> bytes: + if self.port is None: + return b"%b://%b%b" % (self.scheme, self.host, self.target) + return b"%b://%b:%d%b" % (self.scheme, self.host, self.port, self.target) + + def __repr__(self) -> str: + return ( + f"{self.__class__.__name__}(scheme={self.scheme!r}, " + f"host={self.host!r}, port={self.port!r}, target={self.target!r})" + ) + + +class Request: + """ + An HTTP request. + """ + + def __init__( + self, + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes + | typing.Iterable[bytes] + | typing.AsyncIterable[bytes] + | None = None, + extensions: Extensions | None = None, + ) -> None: + """ + Parameters: + method: The HTTP request method, either as a string or bytes. + For example: `GET`. + url: The request URL, either as a `URL` instance, or as a string or bytes. + For example: `"https://www.example.com".` + headers: The HTTP request headers. + content: The content of the request body. + extensions: A dictionary of optional extra information included on + the request. Possible keys include `"timeout"`, and `"trace"`. + """ + self.method: bytes = enforce_bytes(method, name="method") + self.url: URL = enforce_url(url, name="url") + self.headers: list[tuple[bytes, bytes]] = enforce_headers( + headers, name="headers" + ) + self.stream: typing.Iterable[bytes] | typing.AsyncIterable[bytes] = ( + enforce_stream(content, name="content") + ) + self.extensions = {} if extensions is None else extensions + + if "target" in self.extensions: + self.url = URL( + scheme=self.url.scheme, + host=self.url.host, + port=self.url.port, + target=self.extensions["target"], + ) + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.method!r}]>" + + +class Response: + """ + An HTTP response. + """ + + def __init__( + self, + status: int, + *, + headers: HeaderTypes = None, + content: bytes + | typing.Iterable[bytes] + | typing.AsyncIterable[bytes] + | None = None, + extensions: Extensions | None = None, + ) -> None: + """ + Parameters: + status: The HTTP status code of the response. For example `200`. + headers: The HTTP response headers. + content: The content of the response body. + extensions: A dictionary of optional extra information included on + the responseself.Possible keys include `"http_version"`, + `"reason_phrase"`, and `"network_stream"`. + """ + self.status: int = status + self.headers: list[tuple[bytes, bytes]] = enforce_headers( + headers, name="headers" + ) + self.stream: typing.Iterable[bytes] | typing.AsyncIterable[bytes] = ( + enforce_stream(content, name="content") + ) + self.extensions = {} if extensions is None else extensions + + self._stream_consumed = False + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + if isinstance(self.stream, typing.Iterable): + raise RuntimeError( + "Attempted to access 'response.content' on a streaming response. " + "Call 'response.read()' first." + ) + else: + raise RuntimeError( + "Attempted to access 'response.content' on a streaming response. " + "Call 'await response.aread()' first." + ) + return self._content + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.status}]>" + + # Sync interface... + + def read(self) -> bytes: + if not isinstance(self.stream, typing.Iterable): # pragma: nocover + raise RuntimeError( + "Attempted to read an asynchronous response using 'response.read()'. " + "You should use 'await response.aread()' instead." + ) + if not hasattr(self, "_content"): + self._content = b"".join([part for part in self.iter_stream()]) + return self._content + + def iter_stream(self) -> typing.Iterator[bytes]: + if not isinstance(self.stream, typing.Iterable): # pragma: nocover + raise RuntimeError( + "Attempted to stream an asynchronous response using 'for ... in " + "response.iter_stream()'. " + "You should use 'async for ... in response.aiter_stream()' instead." + ) + if self._stream_consumed: + raise RuntimeError( + "Attempted to call 'for ... in response.iter_stream()' more than once." + ) + self._stream_consumed = True + for chunk in self.stream: + yield chunk + + def close(self) -> None: + if not isinstance(self.stream, typing.Iterable): # pragma: nocover + raise RuntimeError( + "Attempted to close an asynchronous response using 'response.close()'. " + "You should use 'await response.aclose()' instead." + ) + if hasattr(self.stream, "close"): + self.stream.close() + + # Async interface... + + async def aread(self) -> bytes: + if not isinstance(self.stream, typing.AsyncIterable): # pragma: nocover + raise RuntimeError( + "Attempted to read an synchronous response using " + "'await response.aread()'. " + "You should use 'response.read()' instead." + ) + if not hasattr(self, "_content"): + self._content = b"".join([part async for part in self.aiter_stream()]) + return self._content + + async def aiter_stream(self) -> typing.AsyncIterator[bytes]: + if not isinstance(self.stream, typing.AsyncIterable): # pragma: nocover + raise RuntimeError( + "Attempted to stream an synchronous response using 'async for ... in " + "response.aiter_stream()'. " + "You should use 'for ... in response.iter_stream()' instead." + ) + if self._stream_consumed: + raise RuntimeError( + "Attempted to call 'async for ... in response.aiter_stream()' " + "more than once." + ) + self._stream_consumed = True + async for chunk in self.stream: + yield chunk + + async def aclose(self) -> None: + if not isinstance(self.stream, typing.AsyncIterable): # pragma: nocover + raise RuntimeError( + "Attempted to close a synchronous response using " + "'await response.aclose()'. " + "You should use 'response.close()' instead." + ) + if hasattr(self.stream, "aclose"): + await self.stream.aclose() + + +class Proxy: + def __init__( + self, + url: URL | bytes | str, + auth: tuple[bytes | str, bytes | str] | None = None, + headers: HeadersAsMapping | HeadersAsSequence | None = None, + ssl_context: ssl.SSLContext | None = None, + ): + self.url = enforce_url(url, name="url") + self.headers = enforce_headers(headers, name="headers") + self.ssl_context = ssl_context + + if auth is not None: + username = enforce_bytes(auth[0], name="auth") + password = enforce_bytes(auth[1], name="auth") + userpass = username + b":" + password + authorization = b"Basic " + base64.b64encode(userpass) + self.auth: tuple[bytes, bytes] | None = (username, password) + self.headers = [(b"Proxy-Authorization", authorization)] + self.headers + else: + self.auth = None diff --git a/.venv/lib/python3.12/site-packages/httpcore/_ssl.py b/.venv/lib/python3.12/site-packages/httpcore/_ssl.py new file mode 100644 index 0000000..c99c5a6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_ssl.py @@ -0,0 +1,9 @@ +import ssl + +import certifi + + +def default_ssl_context() -> ssl.SSLContext: + context = ssl.create_default_context() + context.load_verify_locations(certifi.where()) + return context diff --git a/.venv/lib/python3.12/site-packages/httpcore/_sync/__init__.py b/.venv/lib/python3.12/site-packages/httpcore/_sync/__init__.py new file mode 100644 index 0000000..b476d76 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_sync/__init__.py @@ -0,0 +1,39 @@ +from .connection import HTTPConnection +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .http_proxy import HTTPProxy +from .interfaces import ConnectionInterface + +try: + from .http2 import HTTP2Connection +except ImportError: # pragma: nocover + + class HTTP2Connection: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use http2 support, but the `h2` package is not " + "installed. Use 'pip install httpcore[http2]'." + ) + + +try: + from .socks_proxy import SOCKSProxy +except ImportError: # pragma: nocover + + class SOCKSProxy: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use SOCKS support, but the `socksio` package is not " + "installed. Use 'pip install httpcore[socks]'." + ) + + +__all__ = [ + "HTTPConnection", + "ConnectionPool", + "HTTPProxy", + "HTTP11Connection", + "HTTP2Connection", + "ConnectionInterface", + "SOCKSProxy", +] diff --git a/.venv/lib/python3.12/site-packages/httpcore/_sync/connection.py b/.venv/lib/python3.12/site-packages/httpcore/_sync/connection.py new file mode 100644 index 0000000..363f8be --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_sync/connection.py @@ -0,0 +1,222 @@ +from __future__ import annotations + +import itertools +import logging +import ssl +import types +import typing + +from .._backends.sync import SyncBackend +from .._backends.base import SOCKET_OPTION, NetworkBackend, NetworkStream +from .._exceptions import ConnectError, ConnectTimeout +from .._models import Origin, Request, Response +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. + + +logger = logging.getLogger("httpcore.connection") + + +def exponential_backoff(factor: float) -> typing.Iterator[float]: + """ + Generate a geometric sequence that has a ratio of 2 and starts with 0. + + For example: + - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` + - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` + """ + yield 0 + for n in itertools.count(): + yield factor * 2**n + + +class HTTPConnection(ConnectionInterface): + def __init__( + self, + origin: Origin, + ssl_context: ssl.SSLContext | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: NetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + self._origin = origin + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend: NetworkBackend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._connection: ConnectionInterface | None = None + self._connect_failed: bool = False + self._request_lock = Lock() + self._socket_options = socket_options + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection to {self._origin}" + ) + + try: + with self._request_lock: + if self._connection is None: + stream = self._connect(request) + + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except BaseException as exc: + self._connect_failed = True + raise exc + + return self._connection.handle_request(request) + + def _connect(self, request: Request) -> NetworkStream: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + retries_left = self._retries + delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) + + while True: + try: + if self._uds is None: + kwargs = { + "host": self._origin.host.decode("ascii"), + "port": self._origin.port, + "local_address": self._local_address, + "timeout": timeout, + "socket_options": self._socket_options, + } + with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + else: + kwargs = { + "path": self._uds, + "timeout": timeout, + "socket_options": self._socket_options, + } + with Trace( + "connect_unix_socket", logger, request, kwargs + ) as trace: + stream = self._network_backend.connect_unix_socket( + **kwargs + ) + trace.return_value = stream + + if self._origin.scheme in (b"https", b"wss"): + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + return stream + except (ConnectError, ConnectTimeout): + if retries_left <= 0: + raise + retries_left -= 1 + delay = next(delays) + with Trace("retry", logger, request, kwargs) as trace: + self._network_backend.sleep(delay) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def close(self) -> None: + if self._connection is not None: + with Trace("close", logger, None, {}): + self._connection.close() + + def is_available(self) -> bool: + if self._connection is None: + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> HTTPConnection: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self.close() diff --git a/.venv/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py b/.venv/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py new file mode 100644 index 0000000..9ccfa53 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py @@ -0,0 +1,420 @@ +from __future__ import annotations + +import ssl +import sys +import types +import typing + +from .._backends.sync import SyncBackend +from .._backends.base import SOCKET_OPTION, NetworkBackend +from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol +from .._models import Origin, Proxy, Request, Response +from .._synchronization import Event, ShieldCancellation, ThreadLock +from .connection import HTTPConnection +from .interfaces import ConnectionInterface, RequestInterface + + +class PoolRequest: + def __init__(self, request: Request) -> None: + self.request = request + self.connection: ConnectionInterface | None = None + self._connection_acquired = Event() + + def assign_to_connection(self, connection: ConnectionInterface | None) -> None: + self.connection = connection + self._connection_acquired.set() + + def clear_connection(self) -> None: + self.connection = None + self._connection_acquired = Event() + + def wait_for_connection( + self, timeout: float | None = None + ) -> ConnectionInterface: + if self.connection is None: + self._connection_acquired.wait(timeout=timeout) + assert self.connection is not None + return self.connection + + def is_queued(self) -> bool: + return self.connection is None + + +class ConnectionPool(RequestInterface): + """ + A connection pool for making HTTP requests. + """ + + def __init__( + self, + ssl_context: ssl.SSLContext | None = None, + proxy: Proxy | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: NetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish a + connection. + local_address: Local address to connect from. Can also be used to connect + using a particular address family. Using `local_address="0.0.0.0"` + will connect using an `AF_INET` address (IPv4), while using + `local_address="::"` will connect using an `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + socket_options: Socket options that have to be included + in the TCP socket when the connection was established. + """ + self._ssl_context = ssl_context + self._proxy = proxy + self._max_connections = ( + sys.maxsize if max_connections is None else max_connections + ) + self._max_keepalive_connections = ( + sys.maxsize + if max_keepalive_connections is None + else max_keepalive_connections + ) + self._max_keepalive_connections = min( + self._max_connections, self._max_keepalive_connections + ) + + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._socket_options = socket_options + + # The mutable state on a connection pool is the queue of incoming requests, + # and the set of connections that are servicing those requests. + self._connections: list[ConnectionInterface] = [] + self._requests: list[PoolRequest] = [] + + # We only mutate the state of the connection pool within an 'optional_thread_lock' + # context. This holds a threading lock unless we're running in async mode, + # in which case it is a no-op. + self._optional_thread_lock = ThreadLock() + + def create_connection(self, origin: Origin) -> ConnectionInterface: + if self._proxy is not None: + if self._proxy.url.scheme in (b"socks5", b"socks5h"): + from .socks_proxy import Socks5Connection + + return Socks5Connection( + proxy_origin=self._proxy.url.origin, + proxy_auth=self._proxy.auth, + remote_origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + elif origin.scheme == b"http": + from .http_proxy import ForwardHTTPConnection + + return ForwardHTTPConnection( + proxy_origin=self._proxy.url.origin, + proxy_headers=self._proxy.headers, + proxy_ssl_context=self._proxy.ssl_context, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + ) + from .http_proxy import TunnelHTTPConnection + + return TunnelHTTPConnection( + proxy_origin=self._proxy.url.origin, + proxy_headers=self._proxy.headers, + proxy_ssl_context=self._proxy.ssl_context, + remote_origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + return HTTPConnection( + origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + retries=self._retries, + local_address=self._local_address, + uds=self._uds, + network_backend=self._network_backend, + socket_options=self._socket_options, + ) + + @property + def connections(self) -> list[ConnectionInterface]: + """ + Return a list of the connections currently in the pool. + + For example: + + ```python + >>> pool.connections + [ + , + , + , + ] + ``` + """ + return list(self._connections) + + def handle_request(self, request: Request) -> Response: + """ + Send an HTTP request, and return an HTTP response. + + This is the core implementation that is called into by `.request()` or `.stream()`. + """ + scheme = request.url.scheme.decode() + if scheme == "": + raise UnsupportedProtocol( + "Request URL is missing an 'http://' or 'https://' protocol." + ) + if scheme not in ("http", "https", "ws", "wss"): + raise UnsupportedProtocol( + f"Request URL has an unsupported protocol '{scheme}://'." + ) + + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("pool", None) + + with self._optional_thread_lock: + # Add the incoming request to our request queue. + pool_request = PoolRequest(request) + self._requests.append(pool_request) + + try: + while True: + with self._optional_thread_lock: + # Assign incoming requests to available connections, + # closing or creating new connections as required. + closing = self._assign_requests_to_connections() + self._close_connections(closing) + + # Wait until this request has an assigned connection. + connection = pool_request.wait_for_connection(timeout=timeout) + + try: + # Send the request on the assigned connection. + response = connection.handle_request( + pool_request.request + ) + except ConnectionNotAvailable: + # In some cases a connection may initially be available to + # handle a request, but then become unavailable. + # + # In this case we clear the connection and try again. + pool_request.clear_connection() + else: + break # pragma: nocover + + except BaseException as exc: + with self._optional_thread_lock: + # For any exception or cancellation we remove the request from + # the queue, and then re-assign requests to connections. + self._requests.remove(pool_request) + closing = self._assign_requests_to_connections() + + self._close_connections(closing) + raise exc from None + + # Return the response. Note that in this case we still have to manage + # the point at which the response is closed. + assert isinstance(response.stream, typing.Iterable) + return Response( + status=response.status, + headers=response.headers, + content=PoolByteStream( + stream=response.stream, pool_request=pool_request, pool=self + ), + extensions=response.extensions, + ) + + def _assign_requests_to_connections(self) -> list[ConnectionInterface]: + """ + Manage the state of the connection pool, assigning incoming + requests to connections as available. + + Called whenever a new request is added or removed from the pool. + + Any closing connections are returned, allowing the I/O for closing + those connections to be handled seperately. + """ + closing_connections = [] + + # First we handle cleaning up any connections that are closed, + # have expired their keep-alive, or surplus idle connections. + for connection in list(self._connections): + if connection.is_closed(): + # log: "removing closed connection" + self._connections.remove(connection) + elif connection.has_expired(): + # log: "closing expired connection" + self._connections.remove(connection) + closing_connections.append(connection) + elif ( + connection.is_idle() + and len([connection.is_idle() for connection in self._connections]) + > self._max_keepalive_connections + ): + # log: "closing idle connection" + self._connections.remove(connection) + closing_connections.append(connection) + + # Assign queued requests to connections. + queued_requests = [request for request in self._requests if request.is_queued()] + for pool_request in queued_requests: + origin = pool_request.request.url.origin + available_connections = [ + connection + for connection in self._connections + if connection.can_handle_request(origin) and connection.is_available() + ] + idle_connections = [ + connection for connection in self._connections if connection.is_idle() + ] + + # There are three cases for how we may be able to handle the request: + # + # 1. There is an existing connection that can handle the request. + # 2. We can create a new connection to handle the request. + # 3. We can close an idle connection and then create a new connection + # to handle the request. + if available_connections: + # log: "reusing existing connection" + connection = available_connections[0] + pool_request.assign_to_connection(connection) + elif len(self._connections) < self._max_connections: + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + elif idle_connections: + # log: "closing idle connection" + connection = idle_connections[0] + self._connections.remove(connection) + closing_connections.append(connection) + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + + return closing_connections + + def _close_connections(self, closing: list[ConnectionInterface]) -> None: + # Close connections which have been removed from the pool. + with ShieldCancellation(): + for connection in closing: + connection.close() + + def close(self) -> None: + # Explicitly close the connection pool. + # Clears all existing requests and connections. + with self._optional_thread_lock: + closing_connections = list(self._connections) + self._connections = [] + self._close_connections(closing_connections) + + def __enter__(self) -> ConnectionPool: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self.close() + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + with self._optional_thread_lock: + request_is_queued = [request.is_queued() for request in self._requests] + connection_is_idle = [ + connection.is_idle() for connection in self._connections + ] + + num_active_requests = request_is_queued.count(False) + num_queued_requests = request_is_queued.count(True) + num_active_connections = connection_is_idle.count(False) + num_idle_connections = connection_is_idle.count(True) + + requests_info = ( + f"Requests: {num_active_requests} active, {num_queued_requests} queued" + ) + connection_info = ( + f"Connections: {num_active_connections} active, {num_idle_connections} idle" + ) + + return f"<{class_name} [{requests_info} | {connection_info}]>" + + +class PoolByteStream: + def __init__( + self, + stream: typing.Iterable[bytes], + pool_request: PoolRequest, + pool: ConnectionPool, + ) -> None: + self._stream = stream + self._pool_request = pool_request + self._pool = pool + self._closed = False + + def __iter__(self) -> typing.Iterator[bytes]: + try: + for part in self._stream: + yield part + except BaseException as exc: + self.close() + raise exc from None + + def close(self) -> None: + if not self._closed: + self._closed = True + with ShieldCancellation(): + if hasattr(self._stream, "close"): + self._stream.close() + + with self._pool._optional_thread_lock: + self._pool._requests.remove(self._pool_request) + closing = self._pool._assign_requests_to_connections() + + self._pool._close_connections(closing) diff --git a/.venv/lib/python3.12/site-packages/httpcore/_sync/http11.py b/.venv/lib/python3.12/site-packages/httpcore/_sync/http11.py new file mode 100644 index 0000000..ebd3a97 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_sync/http11.py @@ -0,0 +1,379 @@ +from __future__ import annotations + +import enum +import logging +import ssl +import time +import types +import typing + +import h11 + +from .._backends.base import NetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, + WriteError, + map_exceptions, +) +from .._models import Origin, Request, Response +from .._synchronization import Lock, ShieldCancellation +from .._trace import Trace +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.http11") + + +# A subset of `h11.Event` types supported by `_send_event` +H11SendEvent = typing.Union[ + h11.Request, + h11.Data, + h11.EndOfMessage, +] + + +class HTTPConnectionState(enum.IntEnum): + NEW = 0 + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class HTTP11Connection(ConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 + + def __init__( + self, + origin: Origin, + stream: NetworkStream, + keepalive_expiry: float | None = None, + ) -> None: + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: float | None = keepalive_expiry + self._expire_at: float | None = None + self._state = HTTPConnectionState.NEW + self._state_lock = Lock() + self._request_count = 0 + self._h11_state = h11.Connection( + our_role=h11.CLIENT, + max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, + ) + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + with self._state_lock: + if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): + self._request_count += 1 + self._state = HTTPConnectionState.ACTIVE + self._expire_at = None + else: + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request} + try: + with Trace( + "send_request_headers", logger, request, kwargs + ) as trace: + self._send_request_headers(**kwargs) + with Trace("send_request_body", logger, request, kwargs) as trace: + self._send_request_body(**kwargs) + except WriteError: + # If we get a write error while we're writing the request, + # then we supress this error and move on to attempting to + # read the response. Servers can sometimes close the request + # pre-emptively and then respond with a well formed HTTP + # error response. + pass + + with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + ( + http_version, + status, + reason_phrase, + headers, + trailing_data, + ) = self._receive_response_headers(**kwargs) + trace.return_value = ( + http_version, + status, + reason_phrase, + headers, + ) + + network_stream = self._network_stream + + # CONNECT or Upgrade request + if (status == 101) or ( + (request.method == b"CONNECT") and (200 <= status < 300) + ): + network_stream = HTTP11UpgradeStream(network_stream, trailing_data) + + return Response( + status=status, + headers=headers, + content=HTTP11ConnectionByteStream(self, request), + extensions={ + "http_version": http_version, + "reason_phrase": reason_phrase, + "network_stream": network_stream, + }, + ) + except BaseException as exc: + with ShieldCancellation(): + with Trace("response_closed", logger, request) as trace: + self._response_closed() + raise exc + + # Sending the request... + + def _send_request_headers(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): + event = h11.Request( + method=request.method, + target=request.url.target, + headers=request.headers, + ) + self._send_event(event, timeout=timeout) + + def _send_request_body(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + assert isinstance(request.stream, typing.Iterable) + for chunk in request.stream: + event = h11.Data(data=chunk) + self._send_event(event, timeout=timeout) + + self._send_event(h11.EndOfMessage(), timeout=timeout) + + def _send_event(self, event: h11.Event, timeout: float | None = None) -> None: + bytes_to_send = self._h11_state.send(event) + if bytes_to_send is not None: + self._network_stream.write(bytes_to_send, timeout=timeout) + + # Receiving the response... + + def _receive_response_headers( + self, request: Request + ) -> tuple[bytes, int, bytes, list[tuple[bytes, bytes]], bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = self._receive_event(timeout=timeout) + if isinstance(event, h11.Response): + break + if ( + isinstance(event, h11.InformationalResponse) + and event.status_code == 101 + ): + break + + http_version = b"HTTP/" + event.http_version + + # h11 version 0.11+ supports a `raw_items` interface to get the + # raw header casing, rather than the enforced lowercase headers. + headers = event.headers.raw_items() + + trailing_data, _ = self._h11_state.trailing_data + + return http_version, event.status_code, event.reason, headers, trailing_data + + def _receive_response_body( + self, request: Request + ) -> typing.Iterator[bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = self._receive_event(timeout=timeout) + if isinstance(event, h11.Data): + yield bytes(event.data) + elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): + break + + def _receive_event( + self, timeout: float | None = None + ) -> h11.Event | type[h11.PAUSED]: + while True: + with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): + event = self._h11_state.next_event() + + if event is h11.NEED_DATA: + data = self._network_stream.read( + self.READ_NUM_BYTES, timeout=timeout + ) + + # If we feed this case through h11 we'll raise an exception like: + # + # httpcore.RemoteProtocolError: can't handle event type + # ConnectionClosed when role=SERVER and state=SEND_RESPONSE + # + # Which is accurate, but not very informative from an end-user + # perspective. Instead we handle this case distinctly and treat + # it as a ConnectError. + if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: + msg = "Server disconnected without sending a response." + raise RemoteProtocolError(msg) + + self._h11_state.receive_data(data) + else: + # mypy fails to narrow the type in the above if statement above + return event # type: ignore[return-value] + + def _response_closed(self) -> None: + with self._state_lock: + if ( + self._h11_state.our_state is h11.DONE + and self._h11_state.their_state is h11.DONE + ): + self._state = HTTPConnectionState.IDLE + self._h11_state.start_next_cycle() + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + else: + self.close() + + # Once the connection is no longer required... + + def close(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._state = HTTPConnectionState.CLOSED + self._network_stream.close() + + # The ConnectionInterface methods provide information about the state of + # the connection, allowing for a connection pooling implementation to + # determine when to reuse and when to close the connection... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + # Note that HTTP/1.1 connections in the "NEW" state are not treated as + # being "available". The control flow which created the connection will + # be able to send an outgoing request, but the connection will not be + # acquired from the connection pool for any other request. + return self._state == HTTPConnectionState.IDLE + + def has_expired(self) -> bool: + now = time.monotonic() + keepalive_expired = self._expire_at is not None and now > self._expire_at + + # If the HTTP connection is idle but the socket is readable, then the + # only valid state is that the socket is about to return b"", indicating + # a server-initiated disconnect. + server_disconnected = ( + self._state == HTTPConnectionState.IDLE + and self._network_stream.get_extra_info("is_readable") + ) + + return keepalive_expired or server_disconnected + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/1.1, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> HTTP11Connection: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self.close() + + +class HTTP11ConnectionByteStream: + def __init__(self, connection: HTTP11Connection, request: Request) -> None: + self._connection = connection + self._request = request + self._closed = False + + def __iter__(self) -> typing.Iterator[bytes]: + kwargs = {"request": self._request} + try: + with Trace("receive_response_body", logger, self._request, kwargs): + for chunk in self._connection._receive_response_body(**kwargs): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with ShieldCancellation(): + self.close() + raise exc + + def close(self) -> None: + if not self._closed: + self._closed = True + with Trace("response_closed", logger, self._request): + self._connection._response_closed() + + +class HTTP11UpgradeStream(NetworkStream): + def __init__(self, stream: NetworkStream, leading_data: bytes) -> None: + self._stream = stream + self._leading_data = leading_data + + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + if self._leading_data: + buffer = self._leading_data[:max_bytes] + self._leading_data = self._leading_data[max_bytes:] + return buffer + else: + return self._stream.read(max_bytes, timeout) + + def write(self, buffer: bytes, timeout: float | None = None) -> None: + self._stream.write(buffer, timeout) + + def close(self) -> None: + self._stream.close() + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: + return self._stream.start_tls(ssl_context, server_hostname, timeout) + + def get_extra_info(self, info: str) -> typing.Any: + return self._stream.get_extra_info(info) diff --git a/.venv/lib/python3.12/site-packages/httpcore/_sync/http2.py b/.venv/lib/python3.12/site-packages/httpcore/_sync/http2.py new file mode 100644 index 0000000..ddcc189 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_sync/http2.py @@ -0,0 +1,592 @@ +from __future__ import annotations + +import enum +import logging +import time +import types +import typing + +import h2.config +import h2.connection +import h2.events +import h2.exceptions +import h2.settings + +from .._backends.base import NetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, +) +from .._models import Origin, Request, Response +from .._synchronization import Lock, Semaphore, ShieldCancellation +from .._trace import Trace +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.http2") + + +def has_body_headers(request: Request) -> bool: + return any( + k.lower() == b"content-length" or k.lower() == b"transfer-encoding" + for k, v in request.headers + ) + + +class HTTPConnectionState(enum.IntEnum): + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class HTTP2Connection(ConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) + + def __init__( + self, + origin: Origin, + stream: NetworkStream, + keepalive_expiry: float | None = None, + ): + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: float | None = keepalive_expiry + self._h2_state = h2.connection.H2Connection(config=self.CONFIG) + self._state = HTTPConnectionState.IDLE + self._expire_at: float | None = None + self._request_count = 0 + self._init_lock = Lock() + self._state_lock = Lock() + self._read_lock = Lock() + self._write_lock = Lock() + self._sent_connection_init = False + self._used_all_stream_ids = False + self._connection_error = False + + # Mapping from stream ID to response stream events. + self._events: dict[ + int, + list[ + h2.events.ResponseReceived + | h2.events.DataReceived + | h2.events.StreamEnded + | h2.events.StreamReset, + ], + ] = {} + + # Connection terminated events are stored as state since + # we need to handle them for all streams. + self._connection_terminated: h2.events.ConnectionTerminated | None = None + + self._read_exception: Exception | None = None + self._write_exception: Exception | None = None + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + # This cannot occur in normal operation, since the connection pool + # will only send requests on connections that handle them. + # It's in place simply for resilience as a guard against incorrect + # usage, for anyone working directly with httpcore connections. + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + with self._state_lock: + if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): + self._request_count += 1 + self._expire_at = None + self._state = HTTPConnectionState.ACTIVE + else: + raise ConnectionNotAvailable() + + with self._init_lock: + if not self._sent_connection_init: + try: + sci_kwargs = {"request": request} + with Trace( + "send_connection_init", logger, request, sci_kwargs + ): + self._send_connection_init(**sci_kwargs) + except BaseException as exc: + with ShieldCancellation(): + self.close() + raise exc + + self._sent_connection_init = True + + # Initially start with just 1 until the remote server provides + # its max_concurrent_streams value + self._max_streams = 1 + + local_settings_max_streams = ( + self._h2_state.local_settings.max_concurrent_streams + ) + self._max_streams_semaphore = Semaphore(local_settings_max_streams) + + for _ in range(local_settings_max_streams - self._max_streams): + self._max_streams_semaphore.acquire() + + self._max_streams_semaphore.acquire() + + try: + stream_id = self._h2_state.get_next_available_stream_id() + self._events[stream_id] = [] + except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover + self._used_all_stream_ids = True + self._request_count -= 1 + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request, "stream_id": stream_id} + with Trace("send_request_headers", logger, request, kwargs): + self._send_request_headers(request=request, stream_id=stream_id) + with Trace("send_request_body", logger, request, kwargs): + self._send_request_body(request=request, stream_id=stream_id) + with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + status, headers = self._receive_response( + request=request, stream_id=stream_id + ) + trace.return_value = (status, headers) + + return Response( + status=status, + headers=headers, + content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), + extensions={ + "http_version": b"HTTP/2", + "network_stream": self._network_stream, + "stream_id": stream_id, + }, + ) + except BaseException as exc: # noqa: PIE786 + with ShieldCancellation(): + kwargs = {"stream_id": stream_id} + with Trace("response_closed", logger, request, kwargs): + self._response_closed(stream_id=stream_id) + + if isinstance(exc, h2.exceptions.ProtocolError): + # One case where h2 can raise a protocol error is when a + # closed frame has been seen by the state machine. + # + # This happens when one stream is reading, and encounters + # a GOAWAY event. Other flows of control may then raise + # a protocol error at any point they interact with the 'h2_state'. + # + # In this case we'll have stored the event, and should raise + # it as a RemoteProtocolError. + if self._connection_terminated: # pragma: nocover + raise RemoteProtocolError(self._connection_terminated) + # If h2 raises a protocol error in some other state then we + # must somehow have made a protocol violation. + raise LocalProtocolError(exc) # pragma: nocover + + raise exc + + def _send_connection_init(self, request: Request) -> None: + """ + The HTTP/2 connection requires some initial setup before we can start + using individual request/response streams on it. + """ + # Need to set these manually here instead of manipulating via + # __setitem__() otherwise the H2Connection will emit SettingsUpdate + # frames in addition to sending the undesired defaults. + self._h2_state.local_settings = h2.settings.Settings( + client=True, + initial_values={ + # Disable PUSH_PROMISE frames from the server since we don't do anything + # with them for now. Maybe when we support caching? + h2.settings.SettingCodes.ENABLE_PUSH: 0, + # These two are taken from h2 for safe defaults + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, + h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, + }, + ) + + # Some websites (*cough* Yahoo *cough*) balk at this setting being + # present in the initial handshake since it's not defined in the original + # RFC despite the RFC mandating ignoring settings you don't know about. + del self._h2_state.local_settings[ + h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL + ] + + self._h2_state.initiate_connection() + self._h2_state.increment_flow_control_window(2**24) + self._write_outgoing_data(request) + + # Sending the request... + + def _send_request_headers(self, request: Request, stream_id: int) -> None: + """ + Send the request headers to a given stream ID. + """ + end_stream = not has_body_headers(request) + + # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. + # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require + # HTTP/1.1 style headers, and map them appropriately if we end up on + # an HTTP/2 connection. + authority = [v for k, v in request.headers if k.lower() == b"host"][0] + + headers = [ + (b":method", request.method), + (b":authority", authority), + (b":scheme", request.url.scheme), + (b":path", request.url.target), + ] + [ + (k.lower(), v) + for k, v in request.headers + if k.lower() + not in ( + b"host", + b"transfer-encoding", + ) + ] + + self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) + self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) + self._write_outgoing_data(request) + + def _send_request_body(self, request: Request, stream_id: int) -> None: + """ + Iterate over the request body sending it to a given stream ID. + """ + if not has_body_headers(request): + return + + assert isinstance(request.stream, typing.Iterable) + for data in request.stream: + self._send_stream_data(request, stream_id, data) + self._send_end_stream(request, stream_id) + + def _send_stream_data( + self, request: Request, stream_id: int, data: bytes + ) -> None: + """ + Send a single chunk of data in one or more data frames. + """ + while data: + max_flow = self._wait_for_outgoing_flow(request, stream_id) + chunk_size = min(len(data), max_flow) + chunk, data = data[:chunk_size], data[chunk_size:] + self._h2_state.send_data(stream_id, chunk) + self._write_outgoing_data(request) + + def _send_end_stream(self, request: Request, stream_id: int) -> None: + """ + Send an empty data frame on on a given stream ID with the END_STREAM flag set. + """ + self._h2_state.end_stream(stream_id) + self._write_outgoing_data(request) + + # Receiving the response... + + def _receive_response( + self, request: Request, stream_id: int + ) -> tuple[int, list[tuple[bytes, bytes]]]: + """ + Return the response status code and headers for a given stream ID. + """ + while True: + event = self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.ResponseReceived): + break + + status_code = 200 + headers = [] + assert event.headers is not None + for k, v in event.headers: + if k == b":status": + status_code = int(v.decode("ascii", errors="ignore")) + elif not k.startswith(b":"): + headers.append((k, v)) + + return (status_code, headers) + + def _receive_response_body( + self, request: Request, stream_id: int + ) -> typing.Iterator[bytes]: + """ + Iterator that returns the bytes of the response body for a given stream ID. + """ + while True: + event = self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.DataReceived): + assert event.flow_controlled_length is not None + assert event.data is not None + amount = event.flow_controlled_length + self._h2_state.acknowledge_received_data(amount, stream_id) + self._write_outgoing_data(request) + yield event.data + elif isinstance(event, h2.events.StreamEnded): + break + + def _receive_stream_event( + self, request: Request, stream_id: int + ) -> h2.events.ResponseReceived | h2.events.DataReceived | h2.events.StreamEnded: + """ + Return the next available event for a given stream ID. + + Will read more data from the network if required. + """ + while not self._events.get(stream_id): + self._receive_events(request, stream_id) + event = self._events[stream_id].pop(0) + if isinstance(event, h2.events.StreamReset): + raise RemoteProtocolError(event) + return event + + def _receive_events( + self, request: Request, stream_id: int | None = None + ) -> None: + """ + Read some data from the network until we see one or more events + for a given stream ID. + """ + with self._read_lock: + if self._connection_terminated is not None: + last_stream_id = self._connection_terminated.last_stream_id + if stream_id and last_stream_id and stream_id > last_stream_id: + self._request_count -= 1 + raise ConnectionNotAvailable() + raise RemoteProtocolError(self._connection_terminated) + + # This conditional is a bit icky. We don't want to block reading if we've + # actually got an event to return for a given stream. We need to do that + # check *within* the atomic read lock. Though it also need to be optional, + # because when we call it from `_wait_for_outgoing_flow` we *do* want to + # block until we've available flow control, event when we have events + # pending for the stream ID we're attempting to send on. + if stream_id is None or not self._events.get(stream_id): + events = self._read_incoming_data(request) + for event in events: + if isinstance(event, h2.events.RemoteSettingsChanged): + with Trace( + "receive_remote_settings", logger, request + ) as trace: + self._receive_remote_settings_change(event) + trace.return_value = event + + elif isinstance( + event, + ( + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ), + ): + if event.stream_id in self._events: + self._events[event.stream_id].append(event) + + elif isinstance(event, h2.events.ConnectionTerminated): + self._connection_terminated = event + + self._write_outgoing_data(request) + + def _receive_remote_settings_change( + self, event: h2.events.RemoteSettingsChanged + ) -> None: + max_concurrent_streams = event.changed_settings.get( + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS + ) + if max_concurrent_streams: + new_max_streams = min( + max_concurrent_streams.new_value, + self._h2_state.local_settings.max_concurrent_streams, + ) + if new_max_streams and new_max_streams != self._max_streams: + while new_max_streams > self._max_streams: + self._max_streams_semaphore.release() + self._max_streams += 1 + while new_max_streams < self._max_streams: + self._max_streams_semaphore.acquire() + self._max_streams -= 1 + + def _response_closed(self, stream_id: int) -> None: + self._max_streams_semaphore.release() + del self._events[stream_id] + with self._state_lock: + if self._connection_terminated and not self._events: + self.close() + + elif self._state == HTTPConnectionState.ACTIVE and not self._events: + self._state = HTTPConnectionState.IDLE + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + if self._used_all_stream_ids: # pragma: nocover + self.close() + + def close(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._h2_state.close_connection() + self._state = HTTPConnectionState.CLOSED + self._network_stream.close() + + # Wrappers around network read/write operations... + + def _read_incoming_data(self, request: Request) -> list[h2.events.Event]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + if self._read_exception is not None: + raise self._read_exception # pragma: nocover + + try: + data = self._network_stream.read(self.READ_NUM_BYTES, timeout) + if data == b"": + raise RemoteProtocolError("Server disconnected") + except Exception as exc: + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future reads. + # (For example, this means that a single read timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._read_exception = exc + self._connection_error = True + raise exc + + events: list[h2.events.Event] = self._h2_state.receive_data(data) + + return events + + def _write_outgoing_data(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with self._write_lock: + data_to_send = self._h2_state.data_to_send() + + if self._write_exception is not None: + raise self._write_exception # pragma: nocover + + try: + self._network_stream.write(data_to_send, timeout) + except Exception as exc: # pragma: nocover + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future write. + # (For example, this means that a single write timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._write_exception = exc + self._connection_error = True + raise exc + + # Flow control... + + def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: + """ + Returns the maximum allowable outgoing flow for a given stream. + + If the allowable flow is zero, then waits on the network until + WindowUpdated frames have increased the flow rate. + https://tools.ietf.org/html/rfc7540#section-6.9 + """ + local_flow: int = self._h2_state.local_flow_control_window(stream_id) + max_frame_size: int = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + while flow == 0: + self._receive_events(request) + local_flow = self._h2_state.local_flow_control_window(stream_id) + max_frame_size = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + return flow + + # Interface for connection pooling... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + return ( + self._state != HTTPConnectionState.CLOSED + and not self._connection_error + and not self._used_all_stream_ids + and not ( + self._h2_state.state_machine.state + == h2.connection.ConnectionState.CLOSED + ) + ) + + def has_expired(self) -> bool: + now = time.monotonic() + return self._expire_at is not None and now > self._expire_at + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/2, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> HTTP2Connection: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self.close() + + +class HTTP2ConnectionByteStream: + def __init__( + self, connection: HTTP2Connection, request: Request, stream_id: int + ) -> None: + self._connection = connection + self._request = request + self._stream_id = stream_id + self._closed = False + + def __iter__(self) -> typing.Iterator[bytes]: + kwargs = {"request": self._request, "stream_id": self._stream_id} + try: + with Trace("receive_response_body", logger, self._request, kwargs): + for chunk in self._connection._receive_response_body( + request=self._request, stream_id=self._stream_id + ): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with ShieldCancellation(): + self.close() + raise exc + + def close(self) -> None: + if not self._closed: + self._closed = True + kwargs = {"stream_id": self._stream_id} + with Trace("response_closed", logger, self._request, kwargs): + self._connection._response_closed(stream_id=self._stream_id) diff --git a/.venv/lib/python3.12/site-packages/httpcore/_sync/http_proxy.py b/.venv/lib/python3.12/site-packages/httpcore/_sync/http_proxy.py new file mode 100644 index 0000000..ecca88f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_sync/http_proxy.py @@ -0,0 +1,367 @@ +from __future__ import annotations + +import base64 +import logging +import ssl +import typing + +from .._backends.base import SOCKET_OPTION, NetworkBackend +from .._exceptions import ProxyError +from .._models import ( + URL, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, +) +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .connection import HTTPConnection +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +ByteOrStr = typing.Union[bytes, str] +HeadersAsSequence = typing.Sequence[typing.Tuple[ByteOrStr, ByteOrStr]] +HeadersAsMapping = typing.Mapping[ByteOrStr, ByteOrStr] + + +logger = logging.getLogger("httpcore.proxy") + + +def merge_headers( + default_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, + override_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, +) -> list[tuple[bytes, bytes]]: + """ + Append default_headers and override_headers, de-duplicating if a key exists + in both cases. + """ + default_headers = [] if default_headers is None else list(default_headers) + override_headers = [] if override_headers is None else list(override_headers) + has_override = set(key.lower() for key, value in override_headers) + default_headers = [ + (key, value) + for key, value in default_headers + if key.lower() not in has_override + ] + return default_headers + override_headers + + +class HTTPProxy(ConnectionPool): # pragma: nocover + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: URL | bytes | str, + proxy_auth: tuple[bytes | str, bytes | str] | None = None, + proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, + ssl_context: ssl.SSLContext | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: NetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + proxy_auth: Any proxy authentication as a two-tuple of + (username, password). May be either bytes or ascii-only str. + proxy_headers: Any HTTP headers to use for the proxy requests. + For example `{"Proxy-Authorization": "Basic :"}`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + local_address=local_address, + uds=uds, + socket_options=socket_options, + ) + + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if ( + self._proxy_url.scheme == b"http" and proxy_ssl_context is not None + ): # pragma: no cover + raise RuntimeError( + "The `proxy_ssl_context` argument is not allowed for the http scheme" + ) + + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + if proxy_auth is not None: + username = enforce_bytes(proxy_auth[0], name="proxy_auth") + password = enforce_bytes(proxy_auth[1], name="proxy_auth") + userpass = username + b":" + password + authorization = b"Basic " + base64.b64encode(userpass) + self._proxy_headers = [ + (b"Proxy-Authorization", authorization) + ] + self._proxy_headers + + def create_connection(self, origin: Origin) -> ConnectionInterface: + if origin.scheme == b"http": + return ForwardHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + proxy_ssl_context=self._proxy_ssl_context, + ) + return TunnelHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + ssl_context=self._ssl_context, + proxy_ssl_context=self._proxy_ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class ForwardHTTPConnection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, + keepalive_expiry: float | None = None, + network_backend: NetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + ) -> None: + self._connection = HTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._remote_origin = remote_origin + + def handle_request(self, request: Request) -> Response: + headers = merge_headers(self._proxy_headers, request.headers) + url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=bytes(request.url), + ) + proxy_request = Request( + method=request.method, + url=url, + headers=headers, + content=request.stream, + extensions=request.extensions, + ) + return self._connection.handle_request(proxy_request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + self._connection.close() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + +class TunnelHTTPConnection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + ssl_context: ssl.SSLContext | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + proxy_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + network_backend: NetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + self._connection: ConnectionInterface = HTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._connect_lock = Lock() + self._connected = False + + def handle_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("connect", None) + + with self._connect_lock: + if not self._connected: + target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) + + connect_url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=target, + ) + connect_headers = merge_headers( + [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers + ) + connect_request = Request( + method=b"CONNECT", + url=connect_url, + headers=connect_headers, + extensions=request.extensions, + ) + connect_response = self._connection.handle_request( + connect_request + ) + + if connect_response.status < 200 or connect_response.status > 299: + reason_bytes = connect_response.extensions.get("reason_phrase", b"") + reason_str = reason_bytes.decode("ascii", errors="ignore") + msg = "%d %s" % (connect_response.status, reason_str) + self._connection.close() + raise ProxyError(msg) + + stream = connect_response.extensions["network_stream"] + + # Upgrade the stream to SSL + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + + self._connected = True + return self._connection.handle_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + self._connection.close() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/.venv/lib/python3.12/site-packages/httpcore/_sync/interfaces.py b/.venv/lib/python3.12/site-packages/httpcore/_sync/interfaces.py new file mode 100644 index 0000000..e673d4c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_sync/interfaces.py @@ -0,0 +1,137 @@ +from __future__ import annotations + +import contextlib +import typing + +from .._models import ( + URL, + Extensions, + HeaderTypes, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, + include_request_headers, +) + + +class RequestInterface: + def request( + self, + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.Iterator[bytes] | None = None, + extensions: Extensions | None = None, + ) -> Response: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = self.handle_request(request) + try: + response.read() + finally: + response.close() + return response + + @contextlib.contextmanager + def stream( + self, + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.Iterator[bytes] | None = None, + extensions: Extensions | None = None, + ) -> typing.Iterator[Response]: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = self.handle_request(request) + try: + yield response + finally: + response.close() + + def handle_request(self, request: Request) -> Response: + raise NotImplementedError() # pragma: nocover + + +class ConnectionInterface(RequestInterface): + def close(self) -> None: + raise NotImplementedError() # pragma: nocover + + def info(self) -> str: + raise NotImplementedError() # pragma: nocover + + def can_handle_request(self, origin: Origin) -> bool: + raise NotImplementedError() # pragma: nocover + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an + outgoing request. + + An HTTP/1.1 connection will only be available if it is currently idle. + + An HTTP/2 connection will be available so long as the stream ID space is + not yet exhausted, and the connection is not in an error state. + + While the connection is being established we may not yet know if it is going + to result in an HTTP/1.1 or HTTP/2 connection. The connection should be + treated as being available, but might ultimately raise `NewConnectionRequired` + required exceptions if multiple requests are attempted over a connection + that ends up being established as HTTP/1.1. + """ + raise NotImplementedError() # pragma: nocover + + def has_expired(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + + This either means that the connection is idle and it has passed the + expiry time on its keep-alive, or that server has sent an EOF. + """ + raise NotImplementedError() # pragma: nocover + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + raise NotImplementedError() # pragma: nocover + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + + Used when a response is closed to determine if the connection may be + returned to the connection pool or not. + """ + raise NotImplementedError() # pragma: nocover diff --git a/.venv/lib/python3.12/site-packages/httpcore/_sync/socks_proxy.py b/.venv/lib/python3.12/site-packages/httpcore/_sync/socks_proxy.py new file mode 100644 index 0000000..0ca96dd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_sync/socks_proxy.py @@ -0,0 +1,341 @@ +from __future__ import annotations + +import logging +import ssl + +import socksio + +from .._backends.sync import SyncBackend +from .._backends.base import NetworkBackend, NetworkStream +from .._exceptions import ConnectionNotAvailable, ProxyError +from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.socks") + + +AUTH_METHODS = { + b"\x00": "NO AUTHENTICATION REQUIRED", + b"\x01": "GSSAPI", + b"\x02": "USERNAME/PASSWORD", + b"\xff": "NO ACCEPTABLE METHODS", +} + +REPLY_CODES = { + b"\x00": "Succeeded", + b"\x01": "General SOCKS server failure", + b"\x02": "Connection not allowed by ruleset", + b"\x03": "Network unreachable", + b"\x04": "Host unreachable", + b"\x05": "Connection refused", + b"\x06": "TTL expired", + b"\x07": "Command not supported", + b"\x08": "Address type not supported", +} + + +def _init_socks5_connection( + stream: NetworkStream, + *, + host: bytes, + port: int, + auth: tuple[bytes, bytes] | None = None, +) -> None: + conn = socksio.socks5.SOCKS5Connection() + + # Auth method request + auth_method = ( + socksio.socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED + if auth is None + else socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD + ) + conn.send(socksio.socks5.SOCKS5AuthMethodsRequest([auth_method])) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Auth method response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5AuthReply) + if response.method != auth_method: + requested = AUTH_METHODS.get(auth_method, "UNKNOWN") + responded = AUTH_METHODS.get(response.method, "UNKNOWN") + raise ProxyError( + f"Requested {requested} from proxy server, but got {responded}." + ) + + if response.method == socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: + # Username/password request + assert auth is not None + username, password = auth + conn.send(socksio.socks5.SOCKS5UsernamePasswordRequest(username, password)) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Username/password response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5UsernamePasswordReply) + if not response.success: + raise ProxyError("Invalid username/password") + + # Connect request + conn.send( + socksio.socks5.SOCKS5CommandRequest.from_address( + socksio.socks5.SOCKS5Command.CONNECT, (host, port) + ) + ) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Connect response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5Reply) + if response.reply_code != socksio.socks5.SOCKS5ReplyCode.SUCCEEDED: + reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") + raise ProxyError(f"Proxy Server could not connect: {reply_code}.") + + +class SOCKSProxy(ConnectionPool): # pragma: nocover + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: URL | bytes | str, + proxy_auth: tuple[bytes | str, bytes | str] | None = None, + ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + network_backend: NetworkBackend | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + ) + self._ssl_context = ssl_context + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if proxy_auth is not None: + username, password = proxy_auth + username_bytes = enforce_bytes(username, name="proxy_auth") + password_bytes = enforce_bytes(password, name="proxy_auth") + self._proxy_auth: tuple[bytes, bytes] | None = ( + username_bytes, + password_bytes, + ) + else: + self._proxy_auth = None + + def create_connection(self, origin: Origin) -> ConnectionInterface: + return Socks5Connection( + proxy_origin=self._proxy_url.origin, + remote_origin=origin, + proxy_auth=self._proxy_auth, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class Socks5Connection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_auth: tuple[bytes, bytes] | None = None, + ssl_context: ssl.SSLContext | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + network_backend: NetworkBackend | None = None, + ) -> None: + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._proxy_auth = proxy_auth + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + + self._network_backend: NetworkBackend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._connect_lock = Lock() + self._connection: ConnectionInterface | None = None + self._connect_failed = False + + def handle_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + with self._connect_lock: + if self._connection is None: + try: + # Connect to the proxy + kwargs = { + "host": self._proxy_origin.host.decode("ascii"), + "port": self._proxy_origin.port, + "timeout": timeout, + } + with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + + # Connect to the remote host using socks5 + kwargs = { + "stream": stream, + "host": self._remote_origin.host.decode("ascii"), + "port": self._remote_origin.port, + "auth": self._proxy_auth, + } + with Trace( + "setup_socks5_connection", logger, request, kwargs + ) as trace: + _init_socks5_connection(**kwargs) + trace.return_value = stream + + # Upgrade the stream to SSL + if self._remote_origin.scheme == b"https": + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ( + ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ) + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or ( + self._http2 and not self._http1 + ): # pragma: nocover + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except Exception as exc: + self._connect_failed = True + raise exc + elif not self._connection.is_available(): # pragma: nocover + raise ConnectionNotAvailable() + + return self._connection.handle_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + if self._connection is not None: + self._connection.close() + + def is_available(self) -> bool: + if self._connection is None: # pragma: nocover + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._remote_origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: # pragma: nocover + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/.venv/lib/python3.12/site-packages/httpcore/_synchronization.py b/.venv/lib/python3.12/site-packages/httpcore/_synchronization.py new file mode 100644 index 0000000..2ecc9e9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_synchronization.py @@ -0,0 +1,318 @@ +from __future__ import annotations + +import threading +import types + +from ._exceptions import ExceptionMapping, PoolTimeout, map_exceptions + +# Our async synchronization primatives use either 'anyio' or 'trio' depending +# on if they're running under asyncio or trio. + +try: + import trio +except (ImportError, NotImplementedError): # pragma: nocover + trio = None # type: ignore + +try: + import anyio +except ImportError: # pragma: nocover + anyio = None # type: ignore + + +def current_async_library() -> str: + # Determine if we're running under trio or asyncio. + # See https://sniffio.readthedocs.io/en/latest/ + try: + import sniffio + except ImportError: # pragma: nocover + environment = "asyncio" + else: + environment = sniffio.current_async_library() + + if environment not in ("asyncio", "trio"): # pragma: nocover + raise RuntimeError("Running under an unsupported async environment.") + + if environment == "asyncio" and anyio is None: # pragma: nocover + raise RuntimeError( + "Running with asyncio requires installation of 'httpcore[asyncio]'." + ) + + if environment == "trio" and trio is None: # pragma: nocover + raise RuntimeError( + "Running with trio requires installation of 'httpcore[trio]'." + ) + + return environment + + +class AsyncLock: + """ + This is a standard lock. + + In the sync case `Lock` provides thread locking. + In the async case `AsyncLock` provides async locking. + """ + + def __init__(self) -> None: + self._backend = "" + + def setup(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a lock with the correct implementation. + """ + self._backend = current_async_library() + if self._backend == "trio": + self._trio_lock = trio.Lock() + elif self._backend == "asyncio": + self._anyio_lock = anyio.Lock() + + async def __aenter__(self) -> AsyncLock: + if not self._backend: + self.setup() + + if self._backend == "trio": + await self._trio_lock.acquire() + elif self._backend == "asyncio": + await self._anyio_lock.acquire() + + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + if self._backend == "trio": + self._trio_lock.release() + elif self._backend == "asyncio": + self._anyio_lock.release() + + +class AsyncThreadLock: + """ + This is a threading-only lock for no-I/O contexts. + + In the sync case `ThreadLock` provides thread locking. + In the async case `AsyncThreadLock` is a no-op. + """ + + def __enter__(self) -> AsyncThreadLock: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + pass + + +class AsyncEvent: + def __init__(self) -> None: + self._backend = "" + + def setup(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a lock with the correct implementation. + """ + self._backend = current_async_library() + if self._backend == "trio": + self._trio_event = trio.Event() + elif self._backend == "asyncio": + self._anyio_event = anyio.Event() + + def set(self) -> None: + if not self._backend: + self.setup() + + if self._backend == "trio": + self._trio_event.set() + elif self._backend == "asyncio": + self._anyio_event.set() + + async def wait(self, timeout: float | None = None) -> None: + if not self._backend: + self.setup() + + if self._backend == "trio": + trio_exc_map: ExceptionMapping = {trio.TooSlowError: PoolTimeout} + timeout_or_inf = float("inf") if timeout is None else timeout + with map_exceptions(trio_exc_map): + with trio.fail_after(timeout_or_inf): + await self._trio_event.wait() + elif self._backend == "asyncio": + anyio_exc_map: ExceptionMapping = {TimeoutError: PoolTimeout} + with map_exceptions(anyio_exc_map): + with anyio.fail_after(timeout): + await self._anyio_event.wait() + + +class AsyncSemaphore: + def __init__(self, bound: int) -> None: + self._bound = bound + self._backend = "" + + def setup(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a semaphore with the correct implementation. + """ + self._backend = current_async_library() + if self._backend == "trio": + self._trio_semaphore = trio.Semaphore( + initial_value=self._bound, max_value=self._bound + ) + elif self._backend == "asyncio": + self._anyio_semaphore = anyio.Semaphore( + initial_value=self._bound, max_value=self._bound + ) + + async def acquire(self) -> None: + if not self._backend: + self.setup() + + if self._backend == "trio": + await self._trio_semaphore.acquire() + elif self._backend == "asyncio": + await self._anyio_semaphore.acquire() + + async def release(self) -> None: + if self._backend == "trio": + self._trio_semaphore.release() + elif self._backend == "asyncio": + self._anyio_semaphore.release() + + +class AsyncShieldCancellation: + # For certain portions of our codebase where we're dealing with + # closing connections during exception handling we want to shield + # the operation from being cancelled. + # + # with AsyncShieldCancellation(): + # ... # clean-up operations, shielded from cancellation. + + def __init__(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a shielded scope with the correct implementation. + """ + self._backend = current_async_library() + + if self._backend == "trio": + self._trio_shield = trio.CancelScope(shield=True) + elif self._backend == "asyncio": + self._anyio_shield = anyio.CancelScope(shield=True) + + def __enter__(self) -> AsyncShieldCancellation: + if self._backend == "trio": + self._trio_shield.__enter__() + elif self._backend == "asyncio": + self._anyio_shield.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + if self._backend == "trio": + self._trio_shield.__exit__(exc_type, exc_value, traceback) + elif self._backend == "asyncio": + self._anyio_shield.__exit__(exc_type, exc_value, traceback) + + +# Our thread-based synchronization primitives... + + +class Lock: + """ + This is a standard lock. + + In the sync case `Lock` provides thread locking. + In the async case `AsyncLock` provides async locking. + """ + + def __init__(self) -> None: + self._lock = threading.Lock() + + def __enter__(self) -> Lock: + self._lock.acquire() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self._lock.release() + + +class ThreadLock: + """ + This is a threading-only lock for no-I/O contexts. + + In the sync case `ThreadLock` provides thread locking. + In the async case `AsyncThreadLock` is a no-op. + """ + + def __init__(self) -> None: + self._lock = threading.Lock() + + def __enter__(self) -> ThreadLock: + self._lock.acquire() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self._lock.release() + + +class Event: + def __init__(self) -> None: + self._event = threading.Event() + + def set(self) -> None: + self._event.set() + + def wait(self, timeout: float | None = None) -> None: + if timeout == float("inf"): # pragma: no cover + timeout = None + if not self._event.wait(timeout=timeout): + raise PoolTimeout() # pragma: nocover + + +class Semaphore: + def __init__(self, bound: int) -> None: + self._semaphore = threading.Semaphore(value=bound) + + def acquire(self) -> None: + self._semaphore.acquire() + + def release(self) -> None: + self._semaphore.release() + + +class ShieldCancellation: + # Thread-synchronous codebases don't support cancellation semantics. + # We have this class because we need to mirror the async and sync + # cases within our package, but it's just a no-op. + def __enter__(self) -> ShieldCancellation: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + pass diff --git a/.venv/lib/python3.12/site-packages/httpcore/_trace.py b/.venv/lib/python3.12/site-packages/httpcore/_trace.py new file mode 100644 index 0000000..5f1cd7c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_trace.py @@ -0,0 +1,107 @@ +from __future__ import annotations + +import inspect +import logging +import types +import typing + +from ._models import Request + + +class Trace: + def __init__( + self, + name: str, + logger: logging.Logger, + request: Request | None = None, + kwargs: dict[str, typing.Any] | None = None, + ) -> None: + self.name = name + self.logger = logger + self.trace_extension = ( + None if request is None else request.extensions.get("trace") + ) + self.debug = self.logger.isEnabledFor(logging.DEBUG) + self.kwargs = kwargs or {} + self.return_value: typing.Any = None + self.should_trace = self.debug or self.trace_extension is not None + self.prefix = self.logger.name.split(".")[-1] + + def trace(self, name: str, info: dict[str, typing.Any]) -> None: + if self.trace_extension is not None: + prefix_and_name = f"{self.prefix}.{name}" + ret = self.trace_extension(prefix_and_name, info) + if inspect.iscoroutine(ret): # pragma: no cover + raise TypeError( + "If you are using a synchronous interface, " + "the callback of the `trace` extension should " + "be a normal function instead of an asynchronous function." + ) + + if self.debug: + if not info or "return_value" in info and info["return_value"] is None: + message = name + else: + args = " ".join([f"{key}={value!r}" for key, value in info.items()]) + message = f"{name} {args}" + self.logger.debug(message) + + def __enter__(self) -> Trace: + if self.should_trace: + info = self.kwargs + self.trace(f"{self.name}.started", info) + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + if self.should_trace: + if exc_value is None: + info = {"return_value": self.return_value} + self.trace(f"{self.name}.complete", info) + else: + info = {"exception": exc_value} + self.trace(f"{self.name}.failed", info) + + async def atrace(self, name: str, info: dict[str, typing.Any]) -> None: + if self.trace_extension is not None: + prefix_and_name = f"{self.prefix}.{name}" + coro = self.trace_extension(prefix_and_name, info) + if not inspect.iscoroutine(coro): # pragma: no cover + raise TypeError( + "If you're using an asynchronous interface, " + "the callback of the `trace` extension should " + "be an asynchronous function rather than a normal function." + ) + await coro + + if self.debug: + if not info or "return_value" in info and info["return_value"] is None: + message = name + else: + args = " ".join([f"{key}={value!r}" for key, value in info.items()]) + message = f"{name} {args}" + self.logger.debug(message) + + async def __aenter__(self) -> Trace: + if self.should_trace: + info = self.kwargs + await self.atrace(f"{self.name}.started", info) + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + if self.should_trace: + if exc_value is None: + info = {"return_value": self.return_value} + await self.atrace(f"{self.name}.complete", info) + else: + info = {"exception": exc_value} + await self.atrace(f"{self.name}.failed", info) diff --git a/.venv/lib/python3.12/site-packages/httpcore/_utils.py b/.venv/lib/python3.12/site-packages/httpcore/_utils.py new file mode 100644 index 0000000..c44ff93 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpcore/_utils.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +import select +import socket +import sys + + +def is_socket_readable(sock: socket.socket | None) -> bool: + """ + Return whether a socket, as identifed by its file descriptor, is readable. + "A socket is readable" means that the read buffer isn't empty, i.e. that calling + .recv() on it would immediately return some data. + """ + # NOTE: we want check for readability without actually attempting to read, because + # we don't want to block forever if it's not readable. + + # In the case that the socket no longer exists, or cannot return a file + # descriptor, we treat it as being readable, as if it the next read operation + # on it is ready to return the terminating `b""`. + sock_fd = None if sock is None else sock.fileno() + if sock_fd is None or sock_fd < 0: # pragma: nocover + return True + + # The implementation below was stolen from: + # https://github.com/python-trio/trio/blob/20ee2b1b7376db637435d80e266212a35837ddcc/trio/_socket.py#L471-L478 + # See also: https://github.com/encode/httpcore/pull/193#issuecomment-703129316 + + # Use select.select on Windows, and when poll is unavailable and select.poll + # everywhere else. (E.g. When eventlet is in use. See #327) + if ( + sys.platform == "win32" or getattr(select, "poll", None) is None + ): # pragma: nocover + rready, _, _ = select.select([sock_fd], [], [], 0) + return bool(rready) + p = select.poll() + p.register(sock_fd, select.POLLIN) + return bool(p.poll(0)) diff --git a/.venv/lib/python3.12/site-packages/httpcore/py.typed b/.venv/lib/python3.12/site-packages/httpcore/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/METADATA b/.venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/METADATA new file mode 100644 index 0000000..b0d2b19 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/METADATA @@ -0,0 +1,203 @@ +Metadata-Version: 2.3 +Name: httpx +Version: 0.28.1 +Summary: The next generation HTTP client. +Project-URL: Changelog, https://github.com/encode/httpx/blob/master/CHANGELOG.md +Project-URL: Documentation, https://www.python-httpx.org +Project-URL: Homepage, https://github.com/encode/httpx +Project-URL: Source, https://github.com/encode/httpx +Author-email: Tom Christie +License: BSD-3-Clause +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Web Environment +Classifier: Framework :: AsyncIO +Classifier: Framework :: Trio +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Internet :: WWW/HTTP +Requires-Python: >=3.8 +Requires-Dist: anyio +Requires-Dist: certifi +Requires-Dist: httpcore==1.* +Requires-Dist: idna +Provides-Extra: brotli +Requires-Dist: brotli; (platform_python_implementation == 'CPython') and extra == 'brotli' +Requires-Dist: brotlicffi; (platform_python_implementation != 'CPython') and extra == 'brotli' +Provides-Extra: cli +Requires-Dist: click==8.*; extra == 'cli' +Requires-Dist: pygments==2.*; extra == 'cli' +Requires-Dist: rich<14,>=10; extra == 'cli' +Provides-Extra: http2 +Requires-Dist: h2<5,>=3; extra == 'http2' +Provides-Extra: socks +Requires-Dist: socksio==1.*; extra == 'socks' +Provides-Extra: zstd +Requires-Dist: zstandard>=0.18.0; extra == 'zstd' +Description-Content-Type: text/markdown + +

+ HTTPX +

+ +

HTTPX - A next-generation HTTP client for Python.

+ +

+ + Test Suite + + + Package version + +

+ +HTTPX is a fully featured HTTP client library for Python 3. It includes **an integrated command line client**, has support for both **HTTP/1.1 and HTTP/2**, and provides both **sync and async APIs**. + +--- + +Install HTTPX using pip: + +```shell +$ pip install httpx +``` + +Now, let's get started: + +```pycon +>>> import httpx +>>> r = httpx.get('https://www.example.org/') +>>> r + +>>> r.status_code +200 +>>> r.headers['content-type'] +'text/html; charset=UTF-8' +>>> r.text +'\n\n\nExample Domain...' +``` + +Or, using the command-line client. + +```shell +$ pip install 'httpx[cli]' # The command line client is an optional dependency. +``` + +Which now allows us to use HTTPX directly from the command-line... + +

+ httpx --help +

+ +Sending a request... + +

+ httpx http://httpbin.org/json +

+ +## Features + +HTTPX builds on the well-established usability of `requests`, and gives you: + +* A broadly [requests-compatible API](https://www.python-httpx.org/compatibility/). +* An integrated command-line client. +* HTTP/1.1 [and HTTP/2 support](https://www.python-httpx.org/http2/). +* Standard synchronous interface, but with [async support if you need it](https://www.python-httpx.org/async/). +* Ability to make requests directly to [WSGI applications](https://www.python-httpx.org/advanced/transports/#wsgi-transport) or [ASGI applications](https://www.python-httpx.org/advanced/transports/#asgi-transport). +* Strict timeouts everywhere. +* Fully type annotated. +* 100% test coverage. + +Plus all the standard features of `requests`... + +* International Domains and URLs +* Keep-Alive & Connection Pooling +* Sessions with Cookie Persistence +* Browser-style SSL Verification +* Basic/Digest Authentication +* Elegant Key/Value Cookies +* Automatic Decompression +* Automatic Content Decoding +* Unicode Response Bodies +* Multipart File Uploads +* HTTP(S) Proxy Support +* Connection Timeouts +* Streaming Downloads +* .netrc Support +* Chunked Requests + +## Installation + +Install with pip: + +```shell +$ pip install httpx +``` + +Or, to include the optional HTTP/2 support, use: + +```shell +$ pip install httpx[http2] +``` + +HTTPX requires Python 3.8+. + +## Documentation + +Project documentation is available at [https://www.python-httpx.org/](https://www.python-httpx.org/). + +For a run-through of all the basics, head over to the [QuickStart](https://www.python-httpx.org/quickstart/). + +For more advanced topics, see the [Advanced Usage](https://www.python-httpx.org/advanced/) section, the [async support](https://www.python-httpx.org/async/) section, or the [HTTP/2](https://www.python-httpx.org/http2/) section. + +The [Developer Interface](https://www.python-httpx.org/api/) provides a comprehensive API reference. + +To find out about tools that integrate with HTTPX, see [Third Party Packages](https://www.python-httpx.org/third_party_packages/). + +## Contribute + +If you want to contribute with HTTPX check out the [Contributing Guide](https://www.python-httpx.org/contributing/) to learn how to start. + +## Dependencies + +The HTTPX project relies on these excellent libraries: + +* `httpcore` - The underlying transport implementation for `httpx`. + * `h11` - HTTP/1.1 support. +* `certifi` - SSL certificates. +* `idna` - Internationalized domain name support. +* `sniffio` - Async library autodetection. + +As well as these optional installs: + +* `h2` - HTTP/2 support. *(Optional, with `httpx[http2]`)* +* `socksio` - SOCKS proxy support. *(Optional, with `httpx[socks]`)* +* `rich` - Rich terminal support. *(Optional, with `httpx[cli]`)* +* `click` - Command line client support. *(Optional, with `httpx[cli]`)* +* `brotli` or `brotlicffi` - Decoding for "brotli" compressed responses. *(Optional, with `httpx[brotli]`)* +* `zstandard` - Decoding for "zstd" compressed responses. *(Optional, with `httpx[zstd]`)* + +A huge amount of credit is due to `requests` for the API layout that +much of this work follows, as well as to `urllib3` for plenty of design +inspiration around the lower-level networking details. + +--- + +

HTTPX is BSD licensed code.
Designed & crafted with care.

— 🦋 —

+ +## Release Information + +### Fixed + +* Reintroduced supposedly-private `URLTypes` shortcut. (#2673) + + +--- + +[Full changelog](https://github.com/encode/httpx/blob/master/CHANGELOG.md) diff --git a/.venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/RECORD b/.venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/RECORD new file mode 100644 index 0000000..99978c5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/RECORD @@ -0,0 +1,55 @@ +../../../bin/httpx,sha256=WGLHRDRsO0KnETx9LRIltoIFi9-i6WrcJisMxJj5_Hg,236 +httpx-0.28.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +httpx-0.28.1.dist-info/METADATA,sha256=_rubD48-gNV8gZnDBPNcQzboWB0dGNeYPJJ2a4J5OyU,7052 +httpx-0.28.1.dist-info/RECORD,, +httpx-0.28.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +httpx-0.28.1.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87 +httpx-0.28.1.dist-info/entry_points.txt,sha256=2lVkdQmxLA1pNMgSN2eV89o90HCZezhmNwsy6ryKDSA,37 +httpx-0.28.1.dist-info/licenses/LICENSE.md,sha256=TsWdVE8StfU5o6cW_TIaxYzNgDC0ZSIfLIgCAM3yjY0,1508 +httpx/__init__.py,sha256=CsaZe6yZj0rHg6322AWKWHGTMVr9txgEfD5P3_Rrz60,2171 +httpx/__pycache__/__init__.cpython-312.pyc,, +httpx/__pycache__/__version__.cpython-312.pyc,, +httpx/__pycache__/_api.cpython-312.pyc,, +httpx/__pycache__/_auth.cpython-312.pyc,, +httpx/__pycache__/_client.cpython-312.pyc,, +httpx/__pycache__/_config.cpython-312.pyc,, +httpx/__pycache__/_content.cpython-312.pyc,, +httpx/__pycache__/_decoders.cpython-312.pyc,, +httpx/__pycache__/_exceptions.cpython-312.pyc,, +httpx/__pycache__/_main.cpython-312.pyc,, +httpx/__pycache__/_models.cpython-312.pyc,, +httpx/__pycache__/_multipart.cpython-312.pyc,, +httpx/__pycache__/_status_codes.cpython-312.pyc,, +httpx/__pycache__/_types.cpython-312.pyc,, +httpx/__pycache__/_urlparse.cpython-312.pyc,, +httpx/__pycache__/_urls.cpython-312.pyc,, +httpx/__pycache__/_utils.cpython-312.pyc,, +httpx/__version__.py,sha256=LoUyYeOXTieGzuP_64UL0wxdtxjuu_QbOvE7NOg-IqU,108 +httpx/_api.py,sha256=r_Zgs4jIpcPJLqK5dbbSayqo_iVMKFaxZCd-oOHxLEs,11743 +httpx/_auth.py,sha256=Yr3QwaUSK17rGYx-7j-FdicFIzz4Y9FFV-1F4-7RXX4,11891 +httpx/_client.py,sha256=xD-UG67-WMkeltAAOeGGj-cZ2RRTAm19sWRxlFY7_40,65714 +httpx/_config.py,sha256=pPp2U-wicfcKsF-KYRE1LYdt3e6ERGeIoXZ8Gjo3LWc,8547 +httpx/_content.py,sha256=LGGzrJTR3OvN4Mb1GVVNLXkXJH-6oKlwAttO9p5w_yg,8161 +httpx/_decoders.py,sha256=p0dX8I0NEHexs3UGp4SsZutiMhsXrrWl6-GnqVb0iKM,12041 +httpx/_exceptions.py,sha256=bxW7fxzgVMAdNTbwT0Vnq04gJDW1_gI_GFiQPuMyjL0,8527 +httpx/_main.py,sha256=Cg9GMabiTT_swaDfUgIRitSwxLRMSwUDOm7LdSGqlA4,15626 +httpx/_models.py,sha256=4__Guyv1gLxuZChwim8kfQNiIOcJ9acreFOSurvZfms,44700 +httpx/_multipart.py,sha256=KOHEZZl6oohg9mPaKyyu345qq1rJLg35TUG3YAzXB3Y,9843 +httpx/_status_codes.py,sha256=DYn-2ufBgMeXy5s8x3_TB7wjAuAAMewTakPrm5rXEsc,5639 +httpx/_transports/__init__.py,sha256=GbUoBSAOp7z-l-9j5YhMhR3DMIcn6FVLhj072O3Nnno,275 +httpx/_transports/__pycache__/__init__.cpython-312.pyc,, +httpx/_transports/__pycache__/asgi.cpython-312.pyc,, +httpx/_transports/__pycache__/base.cpython-312.pyc,, +httpx/_transports/__pycache__/default.cpython-312.pyc,, +httpx/_transports/__pycache__/mock.cpython-312.pyc,, +httpx/_transports/__pycache__/wsgi.cpython-312.pyc,, +httpx/_transports/asgi.py,sha256=HRfiDYMPt4wQH2gFgHZg4c-i3sblo6bL5GTqcET-xz8,5501 +httpx/_transports/base.py,sha256=kZS_VMbViYfF570pogUCJ1bulz-ybfL51Pqs9yktebU,2523 +httpx/_transports/default.py,sha256=AzeaRUyVwCccTyyNJexDf0n1dFfzzydpdIQgvw7PLnk,13983 +httpx/_transports/mock.py,sha256=PTo0d567RITXxGrki6kN7_67wwAxfwiMDcuXJiZCjEo,1232 +httpx/_transports/wsgi.py,sha256=NcPX3Xap_EwCFZWO_OaSyQNuInCYx1QMNbO8GAei6jY,4825 +httpx/_types.py,sha256=Jyh41GQq7AOev8IOWKDAg7zCbvHAfufmW5g_PiTtErY,2965 +httpx/_urlparse.py,sha256=ZAmH47ONfkxrrj-PPYhGeiHjb6AjKCS-ANWIN4OL_KY,18546 +httpx/_urls.py,sha256=dX99VR1DSOHpgo9Aq7PzYO4FKdxqKjwyNp8grf8dHN0,21550 +httpx/_utils.py,sha256=_TVeqAKvxJkKHdz7dFeb4s0LZqQXgeFkXSgfiHBK_1o,8285 +httpx/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/.venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/REQUESTED b/.venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/WHEEL new file mode 100644 index 0000000..21aaa72 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.26.3 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/entry_points.txt b/.venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/entry_points.txt new file mode 100644 index 0000000..8ae9600 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +httpx = httpx:main diff --git a/.venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/licenses/LICENSE.md b/.venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/licenses/LICENSE.md new file mode 100644 index 0000000..ab79d16 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/licenses/LICENSE.md @@ -0,0 +1,12 @@ +Copyright © 2019, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/lib/python3.12/site-packages/httpx/__init__.py b/.venv/lib/python3.12/site-packages/httpx/__init__.py new file mode 100644 index 0000000..e9addde --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/__init__.py @@ -0,0 +1,105 @@ +from .__version__ import __description__, __title__, __version__ +from ._api import * +from ._auth import * +from ._client import * +from ._config import * +from ._content import * +from ._exceptions import * +from ._models import * +from ._status_codes import * +from ._transports import * +from ._types import * +from ._urls import * + +try: + from ._main import main +except ImportError: # pragma: no cover + + def main() -> None: # type: ignore + import sys + + print( + "The httpx command line client could not run because the required " + "dependencies were not installed.\nMake sure you've installed " + "everything with: pip install 'httpx[cli]'" + ) + sys.exit(1) + + +__all__ = [ + "__description__", + "__title__", + "__version__", + "ASGITransport", + "AsyncBaseTransport", + "AsyncByteStream", + "AsyncClient", + "AsyncHTTPTransport", + "Auth", + "BaseTransport", + "BasicAuth", + "ByteStream", + "Client", + "CloseError", + "codes", + "ConnectError", + "ConnectTimeout", + "CookieConflict", + "Cookies", + "create_ssl_context", + "DecodingError", + "delete", + "DigestAuth", + "get", + "head", + "Headers", + "HTTPError", + "HTTPStatusError", + "HTTPTransport", + "InvalidURL", + "Limits", + "LocalProtocolError", + "main", + "MockTransport", + "NetRCAuth", + "NetworkError", + "options", + "patch", + "PoolTimeout", + "post", + "ProtocolError", + "Proxy", + "ProxyError", + "put", + "QueryParams", + "ReadError", + "ReadTimeout", + "RemoteProtocolError", + "request", + "Request", + "RequestError", + "RequestNotRead", + "Response", + "ResponseNotRead", + "stream", + "StreamClosed", + "StreamConsumed", + "StreamError", + "SyncByteStream", + "Timeout", + "TimeoutException", + "TooManyRedirects", + "TransportError", + "UnsupportedProtocol", + "URL", + "USE_CLIENT_DEFAULT", + "WriteError", + "WriteTimeout", + "WSGITransport", +] + + +__locals = locals() +for __name in __all__: + if not __name.startswith("__"): + setattr(__locals[__name], "__module__", "httpx") # noqa diff --git a/.venv/lib/python3.12/site-packages/httpx/__version__.py b/.venv/lib/python3.12/site-packages/httpx/__version__.py new file mode 100644 index 0000000..801bfac --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/__version__.py @@ -0,0 +1,3 @@ +__title__ = "httpx" +__description__ = "A next generation HTTP client, for Python 3." +__version__ = "0.28.1" diff --git a/.venv/lib/python3.12/site-packages/httpx/_api.py b/.venv/lib/python3.12/site-packages/httpx/_api.py new file mode 100644 index 0000000..c3cda1e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/_api.py @@ -0,0 +1,438 @@ +from __future__ import annotations + +import typing +from contextlib import contextmanager + +from ._client import Client +from ._config import DEFAULT_TIMEOUT_CONFIG +from ._models import Response +from ._types import ( + AuthTypes, + CookieTypes, + HeaderTypes, + ProxyTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestFiles, + TimeoutTypes, +) +from ._urls import URL + +if typing.TYPE_CHECKING: + import ssl # pragma: no cover + + +__all__ = [ + "delete", + "get", + "head", + "options", + "patch", + "post", + "put", + "request", + "stream", +] + + +def request( + method: str, + url: URL | str, + *, + params: QueryParamTypes | None = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + verify: ssl.SSLContext | str | bool = True, + trust_env: bool = True, +) -> Response: + """ + Sends an HTTP request. + + **Parameters:** + + * **method** - HTTP method for the new `Request` object: `GET`, `OPTIONS`, + `HEAD`, `POST`, `PUT`, `PATCH`, or `DELETE`. + * **url** - URL for the new `Request` object. + * **params** - *(optional)* Query parameters to include in the URL, as a + string, dictionary, or sequence of two-tuples. + * **content** - *(optional)* Binary content to include in the body of the + request, as bytes or a byte iterator. + * **data** - *(optional)* Form data to include in the body of the request, + as a dictionary. + * **files** - *(optional)* A dictionary of upload files to include in the + body of the request. + * **json** - *(optional)* A JSON serializable object to include in the body + of the request. + * **headers** - *(optional)* Dictionary of HTTP headers to include in the + request. + * **cookies** - *(optional)* Dictionary of Cookie items to include in the + request. + * **auth** - *(optional)* An authentication class to use when sending the + request. + * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. + * **timeout** - *(optional)* The timeout configuration to use when sending + the request. + * **follow_redirects** - *(optional)* Enables or disables HTTP redirects. + * **verify** - *(optional)* Either `True` to use an SSL context with the + default CA bundle, `False` to disable verification, or an instance of + `ssl.SSLContext` to use a custom context. + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + + **Returns:** `Response` + + Usage: + + ``` + >>> import httpx + >>> response = httpx.request('GET', 'https://httpbin.org/get') + >>> response + + ``` + """ + with Client( + cookies=cookies, + proxy=proxy, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) as client: + return client.request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + auth=auth, + follow_redirects=follow_redirects, + ) + + +@contextmanager +def stream( + method: str, + url: URL | str, + *, + params: QueryParamTypes | None = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + verify: ssl.SSLContext | str | bool = True, + trust_env: bool = True, +) -> typing.Iterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + with Client( + cookies=cookies, + proxy=proxy, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) as client: + with client.stream( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + auth=auth, + follow_redirects=follow_redirects, + ) as response: + yield response + + +def get( + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + follow_redirects: bool = False, + verify: ssl.SSLContext | str | bool = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `GET` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `GET` requests should not include a request body. + """ + return request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + follow_redirects=follow_redirects, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def options( + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + follow_redirects: bool = False, + verify: ssl.SSLContext | str | bool = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `OPTIONS` requests should not include a request body. + """ + return request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + follow_redirects=follow_redirects, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def head( + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + follow_redirects: bool = False, + verify: ssl.SSLContext | str | bool = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `HEAD` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `HEAD` requests should not include a request body. + """ + return request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + follow_redirects=follow_redirects, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def post( + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + follow_redirects: bool = False, + verify: ssl.SSLContext | str | bool = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + follow_redirects=follow_redirects, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def put( + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + follow_redirects: bool = False, + verify: ssl.SSLContext | str | bool = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + follow_redirects=follow_redirects, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def patch( + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + follow_redirects: bool = False, + verify: ssl.SSLContext | str | bool = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + follow_redirects=follow_redirects, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def delete( + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + follow_redirects: bool = False, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + verify: ssl.SSLContext | str | bool = True, + trust_env: bool = True, +) -> Response: + """ + Sends a `DELETE` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `DELETE` requests should not include a request body. + """ + return request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + follow_redirects=follow_redirects, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) diff --git a/.venv/lib/python3.12/site-packages/httpx/_auth.py b/.venv/lib/python3.12/site-packages/httpx/_auth.py new file mode 100644 index 0000000..b03971a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/_auth.py @@ -0,0 +1,348 @@ +from __future__ import annotations + +import hashlib +import os +import re +import time +import typing +from base64 import b64encode +from urllib.request import parse_http_list + +from ._exceptions import ProtocolError +from ._models import Cookies, Request, Response +from ._utils import to_bytes, to_str, unquote + +if typing.TYPE_CHECKING: # pragma: no cover + from hashlib import _Hash + + +__all__ = ["Auth", "BasicAuth", "DigestAuth", "NetRCAuth"] + + +class Auth: + """ + Base class for all authentication schemes. + + To implement a custom authentication scheme, subclass `Auth` and override + the `.auth_flow()` method. + + If the authentication scheme does I/O such as disk access or network calls, or uses + synchronization primitives such as locks, you should override `.sync_auth_flow()` + and/or `.async_auth_flow()` instead of `.auth_flow()` to provide specialized + implementations that will be used by `Client` and `AsyncClient` respectively. + """ + + requires_request_body = False + requires_response_body = False + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + """ + Execute the authentication flow. + + To dispatch a request, `yield` it: + + ``` + yield request + ``` + + The client will `.send()` the response back into the flow generator. You can + access it like so: + + ``` + response = yield request + ``` + + A `return` (or reaching the end of the generator) will result in the + client returning the last response obtained from the server. + + You can dispatch as many requests as is necessary. + """ + yield request + + def sync_auth_flow( + self, request: Request + ) -> typing.Generator[Request, Response, None]: + """ + Execute the authentication flow synchronously. + + By default, this defers to `.auth_flow()`. You should override this method + when the authentication scheme does I/O and/or uses concurrency primitives. + """ + if self.requires_request_body: + request.read() + + flow = self.auth_flow(request) + request = next(flow) + + while True: + response = yield request + if self.requires_response_body: + response.read() + + try: + request = flow.send(response) + except StopIteration: + break + + async def async_auth_flow( + self, request: Request + ) -> typing.AsyncGenerator[Request, Response]: + """ + Execute the authentication flow asynchronously. + + By default, this defers to `.auth_flow()`. You should override this method + when the authentication scheme does I/O and/or uses concurrency primitives. + """ + if self.requires_request_body: + await request.aread() + + flow = self.auth_flow(request) + request = next(flow) + + while True: + response = yield request + if self.requires_response_body: + await response.aread() + + try: + request = flow.send(response) + except StopIteration: + break + + +class FunctionAuth(Auth): + """ + Allows the 'auth' argument to be passed as a simple callable function, + that takes the request, and returns a new, modified request. + """ + + def __init__(self, func: typing.Callable[[Request], Request]) -> None: + self._func = func + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + yield self._func(request) + + +class BasicAuth(Auth): + """ + Allows the 'auth' argument to be passed as a (username, password) pair, + and uses HTTP Basic authentication. + """ + + def __init__(self, username: str | bytes, password: str | bytes) -> None: + self._auth_header = self._build_auth_header(username, password) + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + request.headers["Authorization"] = self._auth_header + yield request + + def _build_auth_header(self, username: str | bytes, password: str | bytes) -> str: + userpass = b":".join((to_bytes(username), to_bytes(password))) + token = b64encode(userpass).decode() + return f"Basic {token}" + + +class NetRCAuth(Auth): + """ + Use a 'netrc' file to lookup basic auth credentials based on the url host. + """ + + def __init__(self, file: str | None = None) -> None: + # Lazily import 'netrc'. + # There's no need for us to load this module unless 'NetRCAuth' is being used. + import netrc + + self._netrc_info = netrc.netrc(file) + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + auth_info = self._netrc_info.authenticators(request.url.host) + if auth_info is None or not auth_info[2]: + # The netrc file did not have authentication credentials for this host. + yield request + else: + # Build a basic auth header with credentials from the netrc file. + request.headers["Authorization"] = self._build_auth_header( + username=auth_info[0], password=auth_info[2] + ) + yield request + + def _build_auth_header(self, username: str | bytes, password: str | bytes) -> str: + userpass = b":".join((to_bytes(username), to_bytes(password))) + token = b64encode(userpass).decode() + return f"Basic {token}" + + +class DigestAuth(Auth): + _ALGORITHM_TO_HASH_FUNCTION: dict[str, typing.Callable[[bytes], _Hash]] = { + "MD5": hashlib.md5, + "MD5-SESS": hashlib.md5, + "SHA": hashlib.sha1, + "SHA-SESS": hashlib.sha1, + "SHA-256": hashlib.sha256, + "SHA-256-SESS": hashlib.sha256, + "SHA-512": hashlib.sha512, + "SHA-512-SESS": hashlib.sha512, + } + + def __init__(self, username: str | bytes, password: str | bytes) -> None: + self._username = to_bytes(username) + self._password = to_bytes(password) + self._last_challenge: _DigestAuthChallenge | None = None + self._nonce_count = 1 + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + if self._last_challenge: + request.headers["Authorization"] = self._build_auth_header( + request, self._last_challenge + ) + + response = yield request + + if response.status_code != 401 or "www-authenticate" not in response.headers: + # If the response is not a 401 then we don't + # need to build an authenticated request. + return + + for auth_header in response.headers.get_list("www-authenticate"): + if auth_header.lower().startswith("digest "): + break + else: + # If the response does not include a 'WWW-Authenticate: Digest ...' + # header, then we don't need to build an authenticated request. + return + + self._last_challenge = self._parse_challenge(request, response, auth_header) + self._nonce_count = 1 + + request.headers["Authorization"] = self._build_auth_header( + request, self._last_challenge + ) + if response.cookies: + Cookies(response.cookies).set_cookie_header(request=request) + yield request + + def _parse_challenge( + self, request: Request, response: Response, auth_header: str + ) -> _DigestAuthChallenge: + """ + Returns a challenge from a Digest WWW-Authenticate header. + These take the form of: + `Digest realm="realm@host.com",qop="auth,auth-int",nonce="abc",opaque="xyz"` + """ + scheme, _, fields = auth_header.partition(" ") + + # This method should only ever have been called with a Digest auth header. + assert scheme.lower() == "digest" + + header_dict: dict[str, str] = {} + for field in parse_http_list(fields): + key, value = field.strip().split("=", 1) + header_dict[key] = unquote(value) + + try: + realm = header_dict["realm"].encode() + nonce = header_dict["nonce"].encode() + algorithm = header_dict.get("algorithm", "MD5") + opaque = header_dict["opaque"].encode() if "opaque" in header_dict else None + qop = header_dict["qop"].encode() if "qop" in header_dict else None + return _DigestAuthChallenge( + realm=realm, nonce=nonce, algorithm=algorithm, opaque=opaque, qop=qop + ) + except KeyError as exc: + message = "Malformed Digest WWW-Authenticate header" + raise ProtocolError(message, request=request) from exc + + def _build_auth_header( + self, request: Request, challenge: _DigestAuthChallenge + ) -> str: + hash_func = self._ALGORITHM_TO_HASH_FUNCTION[challenge.algorithm.upper()] + + def digest(data: bytes) -> bytes: + return hash_func(data).hexdigest().encode() + + A1 = b":".join((self._username, challenge.realm, self._password)) + + path = request.url.raw_path + A2 = b":".join((request.method.encode(), path)) + # TODO: implement auth-int + HA2 = digest(A2) + + nc_value = b"%08x" % self._nonce_count + cnonce = self._get_client_nonce(self._nonce_count, challenge.nonce) + self._nonce_count += 1 + + HA1 = digest(A1) + if challenge.algorithm.lower().endswith("-sess"): + HA1 = digest(b":".join((HA1, challenge.nonce, cnonce))) + + qop = self._resolve_qop(challenge.qop, request=request) + if qop is None: + # Following RFC 2069 + digest_data = [HA1, challenge.nonce, HA2] + else: + # Following RFC 2617/7616 + digest_data = [HA1, challenge.nonce, nc_value, cnonce, qop, HA2] + + format_args = { + "username": self._username, + "realm": challenge.realm, + "nonce": challenge.nonce, + "uri": path, + "response": digest(b":".join(digest_data)), + "algorithm": challenge.algorithm.encode(), + } + if challenge.opaque: + format_args["opaque"] = challenge.opaque + if qop: + format_args["qop"] = b"auth" + format_args["nc"] = nc_value + format_args["cnonce"] = cnonce + + return "Digest " + self._get_header_value(format_args) + + def _get_client_nonce(self, nonce_count: int, nonce: bytes) -> bytes: + s = str(nonce_count).encode() + s += nonce + s += time.ctime().encode() + s += os.urandom(8) + + return hashlib.sha1(s).hexdigest()[:16].encode() + + def _get_header_value(self, header_fields: dict[str, bytes]) -> str: + NON_QUOTED_FIELDS = ("algorithm", "qop", "nc") + QUOTED_TEMPLATE = '{}="{}"' + NON_QUOTED_TEMPLATE = "{}={}" + + header_value = "" + for i, (field, value) in enumerate(header_fields.items()): + if i > 0: + header_value += ", " + template = ( + QUOTED_TEMPLATE + if field not in NON_QUOTED_FIELDS + else NON_QUOTED_TEMPLATE + ) + header_value += template.format(field, to_str(value)) + + return header_value + + def _resolve_qop(self, qop: bytes | None, request: Request) -> bytes | None: + if qop is None: + return None + qops = re.split(b", ?", qop) + if b"auth" in qops: + return b"auth" + + if qops == [b"auth-int"]: + raise NotImplementedError("Digest auth-int support is not yet implemented") + + message = f'Unexpected qop value "{qop!r}" in digest auth' + raise ProtocolError(message, request=request) + + +class _DigestAuthChallenge(typing.NamedTuple): + realm: bytes + nonce: bytes + algorithm: str + opaque: bytes | None + qop: bytes | None diff --git a/.venv/lib/python3.12/site-packages/httpx/_client.py b/.venv/lib/python3.12/site-packages/httpx/_client.py new file mode 100644 index 0000000..2249231 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/_client.py @@ -0,0 +1,2019 @@ +from __future__ import annotations + +import datetime +import enum +import logging +import time +import typing +import warnings +from contextlib import asynccontextmanager, contextmanager +from types import TracebackType + +from .__version__ import __version__ +from ._auth import Auth, BasicAuth, FunctionAuth +from ._config import ( + DEFAULT_LIMITS, + DEFAULT_MAX_REDIRECTS, + DEFAULT_TIMEOUT_CONFIG, + Limits, + Proxy, + Timeout, +) +from ._decoders import SUPPORTED_DECODERS +from ._exceptions import ( + InvalidURL, + RemoteProtocolError, + TooManyRedirects, + request_context, +) +from ._models import Cookies, Headers, Request, Response +from ._status_codes import codes +from ._transports.base import AsyncBaseTransport, BaseTransport +from ._transports.default import AsyncHTTPTransport, HTTPTransport +from ._types import ( + AsyncByteStream, + AuthTypes, + CertTypes, + CookieTypes, + HeaderTypes, + ProxyTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestExtensions, + RequestFiles, + SyncByteStream, + TimeoutTypes, +) +from ._urls import URL, QueryParams +from ._utils import URLPattern, get_environment_proxies + +if typing.TYPE_CHECKING: + import ssl # pragma: no cover + +__all__ = ["USE_CLIENT_DEFAULT", "AsyncClient", "Client"] + +# The type annotation for @classmethod and context managers here follows PEP 484 +# https://www.python.org/dev/peps/pep-0484/#annotating-instance-and-class-methods +T = typing.TypeVar("T", bound="Client") +U = typing.TypeVar("U", bound="AsyncClient") + + +def _is_https_redirect(url: URL, location: URL) -> bool: + """ + Return 'True' if 'location' is a HTTPS upgrade of 'url' + """ + if url.host != location.host: + return False + + return ( + url.scheme == "http" + and _port_or_default(url) == 80 + and location.scheme == "https" + and _port_or_default(location) == 443 + ) + + +def _port_or_default(url: URL) -> int | None: + if url.port is not None: + return url.port + return {"http": 80, "https": 443}.get(url.scheme) + + +def _same_origin(url: URL, other: URL) -> bool: + """ + Return 'True' if the given URLs share the same origin. + """ + return ( + url.scheme == other.scheme + and url.host == other.host + and _port_or_default(url) == _port_or_default(other) + ) + + +class UseClientDefault: + """ + For some parameters such as `auth=...` and `timeout=...` we need to be able + to indicate the default "unset" state, in a way that is distinctly different + to using `None`. + + The default "unset" state indicates that whatever default is set on the + client should be used. This is different to setting `None`, which + explicitly disables the parameter, possibly overriding a client default. + + For example we use `timeout=USE_CLIENT_DEFAULT` in the `request()` signature. + Omitting the `timeout` parameter will send a request using whatever default + timeout has been configured on the client. Including `timeout=None` will + ensure no timeout is used. + + Note that user code shouldn't need to use the `USE_CLIENT_DEFAULT` constant, + but it is used internally when a parameter is not included. + """ + + +USE_CLIENT_DEFAULT = UseClientDefault() + + +logger = logging.getLogger("httpx") + +USER_AGENT = f"python-httpx/{__version__}" +ACCEPT_ENCODING = ", ".join( + [key for key in SUPPORTED_DECODERS.keys() if key != "identity"] +) + + +class ClientState(enum.Enum): + # UNOPENED: + # The client has been instantiated, but has not been used to send a request, + # or been opened by entering the context of a `with` block. + UNOPENED = 1 + # OPENED: + # The client has either sent a request, or is within a `with` block. + OPENED = 2 + # CLOSED: + # The client has either exited the `with` block, or `close()` has + # been called explicitly. + CLOSED = 3 + + +class BoundSyncStream(SyncByteStream): + """ + A byte stream that is bound to a given response instance, and that + ensures the `response.elapsed` is set once the response is closed. + """ + + def __init__( + self, stream: SyncByteStream, response: Response, start: float + ) -> None: + self._stream = stream + self._response = response + self._start = start + + def __iter__(self) -> typing.Iterator[bytes]: + for chunk in self._stream: + yield chunk + + def close(self) -> None: + elapsed = time.perf_counter() - self._start + self._response.elapsed = datetime.timedelta(seconds=elapsed) + self._stream.close() + + +class BoundAsyncStream(AsyncByteStream): + """ + An async byte stream that is bound to a given response instance, and that + ensures the `response.elapsed` is set once the response is closed. + """ + + def __init__( + self, stream: AsyncByteStream, response: Response, start: float + ) -> None: + self._stream = stream + self._response = response + self._start = start + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + async for chunk in self._stream: + yield chunk + + async def aclose(self) -> None: + elapsed = time.perf_counter() - self._start + self._response.elapsed = datetime.timedelta(seconds=elapsed) + await self._stream.aclose() + + +EventHook = typing.Callable[..., typing.Any] + + +class BaseClient: + def __init__( + self, + *, + auth: AuthTypes | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None, + base_url: URL | str = "", + trust_env: bool = True, + default_encoding: str | typing.Callable[[bytes], str] = "utf-8", + ) -> None: + event_hooks = {} if event_hooks is None else event_hooks + + self._base_url = self._enforce_trailing_slash(URL(base_url)) + + self._auth = self._build_auth(auth) + self._params = QueryParams(params) + self.headers = Headers(headers) + self._cookies = Cookies(cookies) + self._timeout = Timeout(timeout) + self.follow_redirects = follow_redirects + self.max_redirects = max_redirects + self._event_hooks = { + "request": list(event_hooks.get("request", [])), + "response": list(event_hooks.get("response", [])), + } + self._trust_env = trust_env + self._default_encoding = default_encoding + self._state = ClientState.UNOPENED + + @property + def is_closed(self) -> bool: + """ + Check if the client being closed + """ + return self._state == ClientState.CLOSED + + @property + def trust_env(self) -> bool: + return self._trust_env + + def _enforce_trailing_slash(self, url: URL) -> URL: + if url.raw_path.endswith(b"/"): + return url + return url.copy_with(raw_path=url.raw_path + b"/") + + def _get_proxy_map( + self, proxy: ProxyTypes | None, allow_env_proxies: bool + ) -> dict[str, Proxy | None]: + if proxy is None: + if allow_env_proxies: + return { + key: None if url is None else Proxy(url=url) + for key, url in get_environment_proxies().items() + } + return {} + else: + proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy + return {"all://": proxy} + + @property + def timeout(self) -> Timeout: + return self._timeout + + @timeout.setter + def timeout(self, timeout: TimeoutTypes) -> None: + self._timeout = Timeout(timeout) + + @property + def event_hooks(self) -> dict[str, list[EventHook]]: + return self._event_hooks + + @event_hooks.setter + def event_hooks(self, event_hooks: dict[str, list[EventHook]]) -> None: + self._event_hooks = { + "request": list(event_hooks.get("request", [])), + "response": list(event_hooks.get("response", [])), + } + + @property + def auth(self) -> Auth | None: + """ + Authentication class used when none is passed at the request-level. + + See also [Authentication][0]. + + [0]: /quickstart/#authentication + """ + return self._auth + + @auth.setter + def auth(self, auth: AuthTypes) -> None: + self._auth = self._build_auth(auth) + + @property + def base_url(self) -> URL: + """ + Base URL to use when sending requests with relative URLs. + """ + return self._base_url + + @base_url.setter + def base_url(self, url: URL | str) -> None: + self._base_url = self._enforce_trailing_slash(URL(url)) + + @property + def headers(self) -> Headers: + """ + HTTP headers to include when sending requests. + """ + return self._headers + + @headers.setter + def headers(self, headers: HeaderTypes) -> None: + client_headers = Headers( + { + b"Accept": b"*/*", + b"Accept-Encoding": ACCEPT_ENCODING.encode("ascii"), + b"Connection": b"keep-alive", + b"User-Agent": USER_AGENT.encode("ascii"), + } + ) + client_headers.update(headers) + self._headers = client_headers + + @property + def cookies(self) -> Cookies: + """ + Cookie values to include when sending requests. + """ + return self._cookies + + @cookies.setter + def cookies(self, cookies: CookieTypes) -> None: + self._cookies = Cookies(cookies) + + @property + def params(self) -> QueryParams: + """ + Query parameters to include in the URL when sending requests. + """ + return self._params + + @params.setter + def params(self, params: QueryParamTypes) -> None: + self._params = QueryParams(params) + + def build_request( + self, + method: str, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Request: + """ + Build and return a request instance. + + * The `params`, `headers` and `cookies` arguments + are merged with any values set on the client. + * The `url` argument is merged with any `base_url` set on the client. + + See also: [Request instances][0] + + [0]: /advanced/clients/#request-instances + """ + url = self._merge_url(url) + headers = self._merge_headers(headers) + cookies = self._merge_cookies(cookies) + params = self._merge_queryparams(params) + extensions = {} if extensions is None else extensions + if "timeout" not in extensions: + timeout = ( + self.timeout + if isinstance(timeout, UseClientDefault) + else Timeout(timeout) + ) + extensions = dict(**extensions, timeout=timeout.as_dict()) + return Request( + method, + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + extensions=extensions, + ) + + def _merge_url(self, url: URL | str) -> URL: + """ + Merge a URL argument together with any 'base_url' on the client, + to create the URL used for the outgoing request. + """ + merge_url = URL(url) + if merge_url.is_relative_url: + # To merge URLs we always append to the base URL. To get this + # behaviour correct we always ensure the base URL ends in a '/' + # separator, and strip any leading '/' from the merge URL. + # + # So, eg... + # + # >>> client = Client(base_url="https://www.example.com/subpath") + # >>> client.base_url + # URL('https://www.example.com/subpath/') + # >>> client.build_request("GET", "/path").url + # URL('https://www.example.com/subpath/path') + merge_raw_path = self.base_url.raw_path + merge_url.raw_path.lstrip(b"/") + return self.base_url.copy_with(raw_path=merge_raw_path) + return merge_url + + def _merge_cookies(self, cookies: CookieTypes | None = None) -> CookieTypes | None: + """ + Merge a cookies argument together with any cookies on the client, + to create the cookies used for the outgoing request. + """ + if cookies or self.cookies: + merged_cookies = Cookies(self.cookies) + merged_cookies.update(cookies) + return merged_cookies + return cookies + + def _merge_headers(self, headers: HeaderTypes | None = None) -> HeaderTypes | None: + """ + Merge a headers argument together with any headers on the client, + to create the headers used for the outgoing request. + """ + merged_headers = Headers(self.headers) + merged_headers.update(headers) + return merged_headers + + def _merge_queryparams( + self, params: QueryParamTypes | None = None + ) -> QueryParamTypes | None: + """ + Merge a queryparams argument together with any queryparams on the client, + to create the queryparams used for the outgoing request. + """ + if params or self.params: + merged_queryparams = QueryParams(self.params) + return merged_queryparams.merge(params) + return params + + def _build_auth(self, auth: AuthTypes | None) -> Auth | None: + if auth is None: + return None + elif isinstance(auth, tuple): + return BasicAuth(username=auth[0], password=auth[1]) + elif isinstance(auth, Auth): + return auth + elif callable(auth): + return FunctionAuth(func=auth) + else: + raise TypeError(f'Invalid "auth" argument: {auth!r}') + + def _build_request_auth( + self, + request: Request, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + ) -> Auth: + auth = ( + self._auth if isinstance(auth, UseClientDefault) else self._build_auth(auth) + ) + + if auth is not None: + return auth + + username, password = request.url.username, request.url.password + if username or password: + return BasicAuth(username=username, password=password) + + return Auth() + + def _build_redirect_request(self, request: Request, response: Response) -> Request: + """ + Given a request and a redirect response, return a new request that + should be used to effect the redirect. + """ + method = self._redirect_method(request, response) + url = self._redirect_url(request, response) + headers = self._redirect_headers(request, url, method) + stream = self._redirect_stream(request, method) + cookies = Cookies(self.cookies) + return Request( + method=method, + url=url, + headers=headers, + cookies=cookies, + stream=stream, + extensions=request.extensions, + ) + + def _redirect_method(self, request: Request, response: Response) -> str: + """ + When being redirected we may want to change the method of the request + based on certain specs or browser behavior. + """ + method = request.method + + # https://tools.ietf.org/html/rfc7231#section-6.4.4 + if response.status_code == codes.SEE_OTHER and method != "HEAD": + method = "GET" + + # Do what the browsers do, despite standards... + # Turn 302s into GETs. + if response.status_code == codes.FOUND and method != "HEAD": + method = "GET" + + # If a POST is responded to with a 301, turn it into a GET. + # This bizarre behaviour is explained in 'requests' issue 1704. + if response.status_code == codes.MOVED_PERMANENTLY and method == "POST": + method = "GET" + + return method + + def _redirect_url(self, request: Request, response: Response) -> URL: + """ + Return the URL for the redirect to follow. + """ + location = response.headers["Location"] + + try: + url = URL(location) + except InvalidURL as exc: + raise RemoteProtocolError( + f"Invalid URL in location header: {exc}.", request=request + ) from None + + # Handle malformed 'Location' headers that are "absolute" form, have no host. + # See: https://github.com/encode/httpx/issues/771 + if url.scheme and not url.host: + url = url.copy_with(host=request.url.host) + + # Facilitate relative 'Location' headers, as allowed by RFC 7231. + # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') + if url.is_relative_url: + url = request.url.join(url) + + # Attach previous fragment if needed (RFC 7231 7.1.2) + if request.url.fragment and not url.fragment: + url = url.copy_with(fragment=request.url.fragment) + + return url + + def _redirect_headers(self, request: Request, url: URL, method: str) -> Headers: + """ + Return the headers that should be used for the redirect request. + """ + headers = Headers(request.headers) + + if not _same_origin(url, request.url): + if not _is_https_redirect(request.url, url): + # Strip Authorization headers when responses are redirected + # away from the origin. (Except for direct HTTP to HTTPS redirects.) + headers.pop("Authorization", None) + + # Update the Host header. + headers["Host"] = url.netloc.decode("ascii") + + if method != request.method and method == "GET": + # If we've switch to a 'GET' request, then strip any headers which + # are only relevant to the request body. + headers.pop("Content-Length", None) + headers.pop("Transfer-Encoding", None) + + # We should use the client cookie store to determine any cookie header, + # rather than whatever was on the original outgoing request. + headers.pop("Cookie", None) + + return headers + + def _redirect_stream( + self, request: Request, method: str + ) -> SyncByteStream | AsyncByteStream | None: + """ + Return the body that should be used for the redirect request. + """ + if method != request.method and method == "GET": + return None + + return request.stream + + def _set_timeout(self, request: Request) -> None: + if "timeout" not in request.extensions: + timeout = ( + self.timeout + if isinstance(self.timeout, UseClientDefault) + else Timeout(self.timeout) + ) + request.extensions = dict(**request.extensions, timeout=timeout.as_dict()) + + +class Client(BaseClient): + """ + An HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc. + + It can be shared between threads. + + Usage: + + ```python + >>> client = httpx.Client() + >>> response = client.get('https://example.org') + ``` + + **Parameters:** + + * **auth** - *(optional)* An authentication class to use when sending + requests. + * **params** - *(optional)* Query parameters to include in request URLs, as + a string, dictionary, or sequence of two-tuples. + * **headers** - *(optional)* Dictionary of HTTP headers to include when + sending requests. + * **cookies** - *(optional)* Dictionary of Cookie items to include when + sending requests. + * **verify** - *(optional)* Either `True` to use an SSL context with the + default CA bundle, `False` to disable verification, or an instance of + `ssl.SSLContext` to use a custom context. + * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be + enabled. Defaults to `False`. + * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. + * **timeout** - *(optional)* The timeout configuration to use when sending + requests. + * **limits** - *(optional)* The limits configuration to use. + * **max_redirects** - *(optional)* The maximum number of redirect responses + that should be followed. + * **base_url** - *(optional)* A URL to use as the base when building + request URLs. + * **transport** - *(optional)* A transport class to use for sending requests + over the network. + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + * **default_encoding** - *(optional)* The default encoding to use for decoding + response text, if no charset information is included in a response Content-Type + header. Set to a callable for automatic character set detection. Default: "utf-8". + """ + + def __init__( + self, + *, + auth: AuthTypes | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + verify: ssl.SSLContext | str | bool = True, + cert: CertTypes | None = None, + trust_env: bool = True, + http1: bool = True, + http2: bool = False, + proxy: ProxyTypes | None = None, + mounts: None | (typing.Mapping[str, BaseTransport | None]) = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + limits: Limits = DEFAULT_LIMITS, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None, + base_url: URL | str = "", + transport: BaseTransport | None = None, + default_encoding: str | typing.Callable[[bytes], str] = "utf-8", + ) -> None: + super().__init__( + auth=auth, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + follow_redirects=follow_redirects, + max_redirects=max_redirects, + event_hooks=event_hooks, + base_url=base_url, + trust_env=trust_env, + default_encoding=default_encoding, + ) + + if http2: + try: + import h2 # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using http2=True, but the 'h2' package is not installed. " + "Make sure to install httpx using `pip install httpx[http2]`." + ) from None + + allow_env_proxies = trust_env and transport is None + proxy_map = self._get_proxy_map(proxy, allow_env_proxies) + + self._transport = self._init_transport( + verify=verify, + cert=cert, + trust_env=trust_env, + http1=http1, + http2=http2, + limits=limits, + transport=transport, + ) + self._mounts: dict[URLPattern, BaseTransport | None] = { + URLPattern(key): None + if proxy is None + else self._init_proxy_transport( + proxy, + verify=verify, + cert=cert, + trust_env=trust_env, + http1=http1, + http2=http2, + limits=limits, + ) + for key, proxy in proxy_map.items() + } + if mounts is not None: + self._mounts.update( + {URLPattern(key): transport for key, transport in mounts.items()} + ) + + self._mounts = dict(sorted(self._mounts.items())) + + def _init_transport( + self, + verify: ssl.SSLContext | str | bool = True, + cert: CertTypes | None = None, + trust_env: bool = True, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + transport: BaseTransport | None = None, + ) -> BaseTransport: + if transport is not None: + return transport + + return HTTPTransport( + verify=verify, + cert=cert, + trust_env=trust_env, + http1=http1, + http2=http2, + limits=limits, + ) + + def _init_proxy_transport( + self, + proxy: Proxy, + verify: ssl.SSLContext | str | bool = True, + cert: CertTypes | None = None, + trust_env: bool = True, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + ) -> BaseTransport: + return HTTPTransport( + verify=verify, + cert=cert, + trust_env=trust_env, + http1=http1, + http2=http2, + limits=limits, + proxy=proxy, + ) + + def _transport_for_url(self, url: URL) -> BaseTransport: + """ + Returns the transport instance that should be used for a given URL. + This will either be the standard connection pool, or a proxy. + """ + for pattern, transport in self._mounts.items(): + if pattern.matches(url): + return self._transport if transport is None else transport + + return self._transport + + def request( + self, + method: str, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Build and send a request. + + Equivalent to: + + ```python + request = client.build_request(...) + response = client.send(request, ...) + ``` + + See `Client.build_request()`, `Client.send()` and + [Merging of configuration][0] for how the various parameters + are merged with client-level configuration. + + [0]: /advanced/clients/#merging-of-configuration + """ + if cookies is not None: + message = ( + "Setting per-request cookies=<...> is being deprecated, because " + "the expected behaviour on cookie persistence is ambiguous. Set " + "cookies directly on the client instance instead." + ) + warnings.warn(message, DeprecationWarning, stacklevel=2) + + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + return self.send(request, auth=auth, follow_redirects=follow_redirects) + + @contextmanager + def stream( + self, + method: str, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> typing.Iterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + response = self.send( + request=request, + auth=auth, + follow_redirects=follow_redirects, + stream=True, + ) + try: + yield response + finally: + response.close() + + def send( + self, + request: Request, + *, + stream: bool = False, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a request. + + The request is sent as-is, unmodified. + + Typically you'll want to build one with `Client.build_request()` + so that any client-level configuration is merged into the request, + but passing an explicit `httpx.Request()` is supported as well. + + See also: [Request instances][0] + + [0]: /advanced/clients/#request-instances + """ + if self._state == ClientState.CLOSED: + raise RuntimeError("Cannot send a request, as the client has been closed.") + + self._state = ClientState.OPENED + follow_redirects = ( + self.follow_redirects + if isinstance(follow_redirects, UseClientDefault) + else follow_redirects + ) + + self._set_timeout(request) + + auth = self._build_request_auth(request, auth) + + response = self._send_handling_auth( + request, + auth=auth, + follow_redirects=follow_redirects, + history=[], + ) + try: + if not stream: + response.read() + + return response + + except BaseException as exc: + response.close() + raise exc + + def _send_handling_auth( + self, + request: Request, + auth: Auth, + follow_redirects: bool, + history: list[Response], + ) -> Response: + auth_flow = auth.sync_auth_flow(request) + try: + request = next(auth_flow) + + while True: + response = self._send_handling_redirects( + request, + follow_redirects=follow_redirects, + history=history, + ) + try: + try: + next_request = auth_flow.send(response) + except StopIteration: + return response + + response.history = list(history) + response.read() + request = next_request + history.append(response) + + except BaseException as exc: + response.close() + raise exc + finally: + auth_flow.close() + + def _send_handling_redirects( + self, + request: Request, + follow_redirects: bool, + history: list[Response], + ) -> Response: + while True: + if len(history) > self.max_redirects: + raise TooManyRedirects( + "Exceeded maximum allowed redirects.", request=request + ) + + for hook in self._event_hooks["request"]: + hook(request) + + response = self._send_single_request(request) + try: + for hook in self._event_hooks["response"]: + hook(response) + response.history = list(history) + + if not response.has_redirect_location: + return response + + request = self._build_redirect_request(request, response) + history = history + [response] + + if follow_redirects: + response.read() + else: + response.next_request = request + return response + + except BaseException as exc: + response.close() + raise exc + + def _send_single_request(self, request: Request) -> Response: + """ + Sends a single request, without handling any redirections. + """ + transport = self._transport_for_url(request.url) + start = time.perf_counter() + + if not isinstance(request.stream, SyncByteStream): + raise RuntimeError( + "Attempted to send an async request with a sync Client instance." + ) + + with request_context(request=request): + response = transport.handle_request(request) + + assert isinstance(response.stream, SyncByteStream) + + response.request = request + response.stream = BoundSyncStream( + response.stream, response=response, start=start + ) + self.cookies.extract_cookies(response) + response.default_encoding = self._default_encoding + + logger.info( + 'HTTP Request: %s %s "%s %d %s"', + request.method, + request.url, + response.http_version, + response.status_code, + response.reason_phrase, + ) + + return response + + def get( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `GET` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def options( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def head( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `HEAD` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def post( + self, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def put( + self, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def patch( + self, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def delete( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `DELETE` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def close(self) -> None: + """ + Close transport and proxies. + """ + if self._state != ClientState.CLOSED: + self._state = ClientState.CLOSED + + self._transport.close() + for transport in self._mounts.values(): + if transport is not None: + transport.close() + + def __enter__(self: T) -> T: + if self._state != ClientState.UNOPENED: + msg = { + ClientState.OPENED: "Cannot open a client instance more than once.", + ClientState.CLOSED: ( + "Cannot reopen a client instance, once it has been closed." + ), + }[self._state] + raise RuntimeError(msg) + + self._state = ClientState.OPENED + + self._transport.__enter__() + for transport in self._mounts.values(): + if transport is not None: + transport.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + self._state = ClientState.CLOSED + + self._transport.__exit__(exc_type, exc_value, traceback) + for transport in self._mounts.values(): + if transport is not None: + transport.__exit__(exc_type, exc_value, traceback) + + +class AsyncClient(BaseClient): + """ + An asynchronous HTTP client, with connection pooling, HTTP/2, redirects, + cookie persistence, etc. + + It can be shared between tasks. + + Usage: + + ```python + >>> async with httpx.AsyncClient() as client: + >>> response = await client.get('https://example.org') + ``` + + **Parameters:** + + * **auth** - *(optional)* An authentication class to use when sending + requests. + * **params** - *(optional)* Query parameters to include in request URLs, as + a string, dictionary, or sequence of two-tuples. + * **headers** - *(optional)* Dictionary of HTTP headers to include when + sending requests. + * **cookies** - *(optional)* Dictionary of Cookie items to include when + sending requests. + * **verify** - *(optional)* Either `True` to use an SSL context with the + default CA bundle, `False` to disable verification, or an instance of + `ssl.SSLContext` to use a custom context. + * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be + enabled. Defaults to `False`. + * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. + * **timeout** - *(optional)* The timeout configuration to use when sending + requests. + * **limits** - *(optional)* The limits configuration to use. + * **max_redirects** - *(optional)* The maximum number of redirect responses + that should be followed. + * **base_url** - *(optional)* A URL to use as the base when building + request URLs. + * **transport** - *(optional)* A transport class to use for sending requests + over the network. + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + * **default_encoding** - *(optional)* The default encoding to use for decoding + response text, if no charset information is included in a response Content-Type + header. Set to a callable for automatic character set detection. Default: "utf-8". + """ + + def __init__( + self, + *, + auth: AuthTypes | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + verify: ssl.SSLContext | str | bool = True, + cert: CertTypes | None = None, + http1: bool = True, + http2: bool = False, + proxy: ProxyTypes | None = None, + mounts: None | (typing.Mapping[str, AsyncBaseTransport | None]) = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + limits: Limits = DEFAULT_LIMITS, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None, + base_url: URL | str = "", + transport: AsyncBaseTransport | None = None, + trust_env: bool = True, + default_encoding: str | typing.Callable[[bytes], str] = "utf-8", + ) -> None: + super().__init__( + auth=auth, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + follow_redirects=follow_redirects, + max_redirects=max_redirects, + event_hooks=event_hooks, + base_url=base_url, + trust_env=trust_env, + default_encoding=default_encoding, + ) + + if http2: + try: + import h2 # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using http2=True, but the 'h2' package is not installed. " + "Make sure to install httpx using `pip install httpx[http2]`." + ) from None + + allow_env_proxies = trust_env and transport is None + proxy_map = self._get_proxy_map(proxy, allow_env_proxies) + + self._transport = self._init_transport( + verify=verify, + cert=cert, + trust_env=trust_env, + http1=http1, + http2=http2, + limits=limits, + transport=transport, + ) + + self._mounts: dict[URLPattern, AsyncBaseTransport | None] = { + URLPattern(key): None + if proxy is None + else self._init_proxy_transport( + proxy, + verify=verify, + cert=cert, + trust_env=trust_env, + http1=http1, + http2=http2, + limits=limits, + ) + for key, proxy in proxy_map.items() + } + if mounts is not None: + self._mounts.update( + {URLPattern(key): transport for key, transport in mounts.items()} + ) + self._mounts = dict(sorted(self._mounts.items())) + + def _init_transport( + self, + verify: ssl.SSLContext | str | bool = True, + cert: CertTypes | None = None, + trust_env: bool = True, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + transport: AsyncBaseTransport | None = None, + ) -> AsyncBaseTransport: + if transport is not None: + return transport + + return AsyncHTTPTransport( + verify=verify, + cert=cert, + trust_env=trust_env, + http1=http1, + http2=http2, + limits=limits, + ) + + def _init_proxy_transport( + self, + proxy: Proxy, + verify: ssl.SSLContext | str | bool = True, + cert: CertTypes | None = None, + trust_env: bool = True, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + ) -> AsyncBaseTransport: + return AsyncHTTPTransport( + verify=verify, + cert=cert, + trust_env=trust_env, + http1=http1, + http2=http2, + limits=limits, + proxy=proxy, + ) + + def _transport_for_url(self, url: URL) -> AsyncBaseTransport: + """ + Returns the transport instance that should be used for a given URL. + This will either be the standard connection pool, or a proxy. + """ + for pattern, transport in self._mounts.items(): + if pattern.matches(url): + return self._transport if transport is None else transport + + return self._transport + + async def request( + self, + method: str, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Build and send a request. + + Equivalent to: + + ```python + request = client.build_request(...) + response = await client.send(request, ...) + ``` + + See `AsyncClient.build_request()`, `AsyncClient.send()` + and [Merging of configuration][0] for how the various parameters + are merged with client-level configuration. + + [0]: /advanced/clients/#merging-of-configuration + """ + + if cookies is not None: # pragma: no cover + message = ( + "Setting per-request cookies=<...> is being deprecated, because " + "the expected behaviour on cookie persistence is ambiguous. Set " + "cookies directly on the client instance instead." + ) + warnings.warn(message, DeprecationWarning, stacklevel=2) + + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + return await self.send(request, auth=auth, follow_redirects=follow_redirects) + + @asynccontextmanager + async def stream( + self, + method: str, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> typing.AsyncIterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + response = await self.send( + request=request, + auth=auth, + follow_redirects=follow_redirects, + stream=True, + ) + try: + yield response + finally: + await response.aclose() + + async def send( + self, + request: Request, + *, + stream: bool = False, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a request. + + The request is sent as-is, unmodified. + + Typically you'll want to build one with `AsyncClient.build_request()` + so that any client-level configuration is merged into the request, + but passing an explicit `httpx.Request()` is supported as well. + + See also: [Request instances][0] + + [0]: /advanced/clients/#request-instances + """ + if self._state == ClientState.CLOSED: + raise RuntimeError("Cannot send a request, as the client has been closed.") + + self._state = ClientState.OPENED + follow_redirects = ( + self.follow_redirects + if isinstance(follow_redirects, UseClientDefault) + else follow_redirects + ) + + self._set_timeout(request) + + auth = self._build_request_auth(request, auth) + + response = await self._send_handling_auth( + request, + auth=auth, + follow_redirects=follow_redirects, + history=[], + ) + try: + if not stream: + await response.aread() + + return response + + except BaseException as exc: + await response.aclose() + raise exc + + async def _send_handling_auth( + self, + request: Request, + auth: Auth, + follow_redirects: bool, + history: list[Response], + ) -> Response: + auth_flow = auth.async_auth_flow(request) + try: + request = await auth_flow.__anext__() + + while True: + response = await self._send_handling_redirects( + request, + follow_redirects=follow_redirects, + history=history, + ) + try: + try: + next_request = await auth_flow.asend(response) + except StopAsyncIteration: + return response + + response.history = list(history) + await response.aread() + request = next_request + history.append(response) + + except BaseException as exc: + await response.aclose() + raise exc + finally: + await auth_flow.aclose() + + async def _send_handling_redirects( + self, + request: Request, + follow_redirects: bool, + history: list[Response], + ) -> Response: + while True: + if len(history) > self.max_redirects: + raise TooManyRedirects( + "Exceeded maximum allowed redirects.", request=request + ) + + for hook in self._event_hooks["request"]: + await hook(request) + + response = await self._send_single_request(request) + try: + for hook in self._event_hooks["response"]: + await hook(response) + + response.history = list(history) + + if not response.has_redirect_location: + return response + + request = self._build_redirect_request(request, response) + history = history + [response] + + if follow_redirects: + await response.aread() + else: + response.next_request = request + return response + + except BaseException as exc: + await response.aclose() + raise exc + + async def _send_single_request(self, request: Request) -> Response: + """ + Sends a single request, without handling any redirections. + """ + transport = self._transport_for_url(request.url) + start = time.perf_counter() + + if not isinstance(request.stream, AsyncByteStream): + raise RuntimeError( + "Attempted to send an sync request with an AsyncClient instance." + ) + + with request_context(request=request): + response = await transport.handle_async_request(request) + + assert isinstance(response.stream, AsyncByteStream) + response.request = request + response.stream = BoundAsyncStream( + response.stream, response=response, start=start + ) + self.cookies.extract_cookies(response) + response.default_encoding = self._default_encoding + + logger.info( + 'HTTP Request: %s %s "%s %d %s"', + request.method, + request.url, + response.http_version, + response.status_code, + response.reason_phrase, + ) + + return response + + async def get( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `GET` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def options( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def head( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `HEAD` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def post( + self, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def put( + self, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def patch( + self, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def delete( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `DELETE` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def aclose(self) -> None: + """ + Close transport and proxies. + """ + if self._state != ClientState.CLOSED: + self._state = ClientState.CLOSED + + await self._transport.aclose() + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.aclose() + + async def __aenter__(self: U) -> U: + if self._state != ClientState.UNOPENED: + msg = { + ClientState.OPENED: "Cannot open a client instance more than once.", + ClientState.CLOSED: ( + "Cannot reopen a client instance, once it has been closed." + ), + }[self._state] + raise RuntimeError(msg) + + self._state = ClientState.OPENED + + await self._transport.__aenter__() + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.__aenter__() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + self._state = ClientState.CLOSED + + await self._transport.__aexit__(exc_type, exc_value, traceback) + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.__aexit__(exc_type, exc_value, traceback) diff --git a/.venv/lib/python3.12/site-packages/httpx/_config.py b/.venv/lib/python3.12/site-packages/httpx/_config.py new file mode 100644 index 0000000..467a6c9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/_config.py @@ -0,0 +1,248 @@ +from __future__ import annotations + +import os +import typing + +from ._models import Headers +from ._types import CertTypes, HeaderTypes, TimeoutTypes +from ._urls import URL + +if typing.TYPE_CHECKING: + import ssl # pragma: no cover + +__all__ = ["Limits", "Proxy", "Timeout", "create_ssl_context"] + + +class UnsetType: + pass # pragma: no cover + + +UNSET = UnsetType() + + +def create_ssl_context( + verify: ssl.SSLContext | str | bool = True, + cert: CertTypes | None = None, + trust_env: bool = True, +) -> ssl.SSLContext: + import ssl + import warnings + + import certifi + + if verify is True: + if trust_env and os.environ.get("SSL_CERT_FILE"): # pragma: nocover + ctx = ssl.create_default_context(cafile=os.environ["SSL_CERT_FILE"]) + elif trust_env and os.environ.get("SSL_CERT_DIR"): # pragma: nocover + ctx = ssl.create_default_context(capath=os.environ["SSL_CERT_DIR"]) + else: + # Default case... + ctx = ssl.create_default_context(cafile=certifi.where()) + elif verify is False: + ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + ctx.check_hostname = False + ctx.verify_mode = ssl.CERT_NONE + elif isinstance(verify, str): # pragma: nocover + message = ( + "`verify=` is deprecated. " + "Use `verify=ssl.create_default_context(cafile=...)` " + "or `verify=ssl.create_default_context(capath=...)` instead." + ) + warnings.warn(message, DeprecationWarning) + if os.path.isdir(verify): + return ssl.create_default_context(capath=verify) + return ssl.create_default_context(cafile=verify) + else: + ctx = verify + + if cert: # pragma: nocover + message = ( + "`cert=...` is deprecated. Use `verify=` instead," + "with `.load_cert_chain()` to configure the certificate chain." + ) + warnings.warn(message, DeprecationWarning) + if isinstance(cert, str): + ctx.load_cert_chain(cert) + else: + ctx.load_cert_chain(*cert) + + return ctx + + +class Timeout: + """ + Timeout configuration. + + **Usage**: + + Timeout(None) # No timeouts. + Timeout(5.0) # 5s timeout on all operations. + Timeout(None, connect=5.0) # 5s timeout on connect, no other timeouts. + Timeout(5.0, connect=10.0) # 10s timeout on connect. 5s timeout elsewhere. + Timeout(5.0, pool=None) # No timeout on acquiring connection from pool. + # 5s timeout elsewhere. + """ + + def __init__( + self, + timeout: TimeoutTypes | UnsetType = UNSET, + *, + connect: None | float | UnsetType = UNSET, + read: None | float | UnsetType = UNSET, + write: None | float | UnsetType = UNSET, + pool: None | float | UnsetType = UNSET, + ) -> None: + if isinstance(timeout, Timeout): + # Passed as a single explicit Timeout. + assert connect is UNSET + assert read is UNSET + assert write is UNSET + assert pool is UNSET + self.connect = timeout.connect # type: typing.Optional[float] + self.read = timeout.read # type: typing.Optional[float] + self.write = timeout.write # type: typing.Optional[float] + self.pool = timeout.pool # type: typing.Optional[float] + elif isinstance(timeout, tuple): + # Passed as a tuple. + self.connect = timeout[0] + self.read = timeout[1] + self.write = None if len(timeout) < 3 else timeout[2] + self.pool = None if len(timeout) < 4 else timeout[3] + elif not ( + isinstance(connect, UnsetType) + or isinstance(read, UnsetType) + or isinstance(write, UnsetType) + or isinstance(pool, UnsetType) + ): + self.connect = connect + self.read = read + self.write = write + self.pool = pool + else: + if isinstance(timeout, UnsetType): + raise ValueError( + "httpx.Timeout must either include a default, or set all " + "four parameters explicitly." + ) + self.connect = timeout if isinstance(connect, UnsetType) else connect + self.read = timeout if isinstance(read, UnsetType) else read + self.write = timeout if isinstance(write, UnsetType) else write + self.pool = timeout if isinstance(pool, UnsetType) else pool + + def as_dict(self) -> dict[str, float | None]: + return { + "connect": self.connect, + "read": self.read, + "write": self.write, + "pool": self.pool, + } + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, self.__class__) + and self.connect == other.connect + and self.read == other.read + and self.write == other.write + and self.pool == other.pool + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + if len({self.connect, self.read, self.write, self.pool}) == 1: + return f"{class_name}(timeout={self.connect})" + return ( + f"{class_name}(connect={self.connect}, " + f"read={self.read}, write={self.write}, pool={self.pool})" + ) + + +class Limits: + """ + Configuration for limits to various client behaviors. + + **Parameters:** + + * **max_connections** - The maximum number of concurrent connections that may be + established. + * **max_keepalive_connections** - Allow the connection pool to maintain + keep-alive connections below this point. Should be less than or equal + to `max_connections`. + * **keepalive_expiry** - Time limit on idle keep-alive connections in seconds. + """ + + def __init__( + self, + *, + max_connections: int | None = None, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = 5.0, + ) -> None: + self.max_connections = max_connections + self.max_keepalive_connections = max_keepalive_connections + self.keepalive_expiry = keepalive_expiry + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, self.__class__) + and self.max_connections == other.max_connections + and self.max_keepalive_connections == other.max_keepalive_connections + and self.keepalive_expiry == other.keepalive_expiry + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + return ( + f"{class_name}(max_connections={self.max_connections}, " + f"max_keepalive_connections={self.max_keepalive_connections}, " + f"keepalive_expiry={self.keepalive_expiry})" + ) + + +class Proxy: + def __init__( + self, + url: URL | str, + *, + ssl_context: ssl.SSLContext | None = None, + auth: tuple[str, str] | None = None, + headers: HeaderTypes | None = None, + ) -> None: + url = URL(url) + headers = Headers(headers) + + if url.scheme not in ("http", "https", "socks5", "socks5h"): + raise ValueError(f"Unknown scheme for proxy URL {url!r}") + + if url.username or url.password: + # Remove any auth credentials from the URL. + auth = (url.username, url.password) + url = url.copy_with(username=None, password=None) + + self.url = url + self.auth = auth + self.headers = headers + self.ssl_context = ssl_context + + @property + def raw_auth(self) -> tuple[bytes, bytes] | None: + # The proxy authentication as raw bytes. + return ( + None + if self.auth is None + else (self.auth[0].encode("utf-8"), self.auth[1].encode("utf-8")) + ) + + def __repr__(self) -> str: + # The authentication is represented with the password component masked. + auth = (self.auth[0], "********") if self.auth else None + + # Build a nice concise representation. + url_str = f"{str(self.url)!r}" + auth_str = f", auth={auth!r}" if auth else "" + headers_str = f", headers={dict(self.headers)!r}" if self.headers else "" + return f"Proxy({url_str}{auth_str}{headers_str})" + + +DEFAULT_TIMEOUT_CONFIG = Timeout(timeout=5.0) +DEFAULT_LIMITS = Limits(max_connections=100, max_keepalive_connections=20) +DEFAULT_MAX_REDIRECTS = 20 diff --git a/.venv/lib/python3.12/site-packages/httpx/_content.py b/.venv/lib/python3.12/site-packages/httpx/_content.py new file mode 100644 index 0000000..6f479a0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/_content.py @@ -0,0 +1,240 @@ +from __future__ import annotations + +import inspect +import warnings +from json import dumps as json_dumps +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Iterable, + Iterator, + Mapping, +) +from urllib.parse import urlencode + +from ._exceptions import StreamClosed, StreamConsumed +from ._multipart import MultipartStream +from ._types import ( + AsyncByteStream, + RequestContent, + RequestData, + RequestFiles, + ResponseContent, + SyncByteStream, +) +from ._utils import peek_filelike_length, primitive_value_to_str + +__all__ = ["ByteStream"] + + +class ByteStream(AsyncByteStream, SyncByteStream): + def __init__(self, stream: bytes) -> None: + self._stream = stream + + def __iter__(self) -> Iterator[bytes]: + yield self._stream + + async def __aiter__(self) -> AsyncIterator[bytes]: + yield self._stream + + +class IteratorByteStream(SyncByteStream): + CHUNK_SIZE = 65_536 + + def __init__(self, stream: Iterable[bytes]) -> None: + self._stream = stream + self._is_stream_consumed = False + self._is_generator = inspect.isgenerator(stream) + + def __iter__(self) -> Iterator[bytes]: + if self._is_stream_consumed and self._is_generator: + raise StreamConsumed() + + self._is_stream_consumed = True + if hasattr(self._stream, "read"): + # File-like interfaces should use 'read' directly. + chunk = self._stream.read(self.CHUNK_SIZE) + while chunk: + yield chunk + chunk = self._stream.read(self.CHUNK_SIZE) + else: + # Otherwise iterate. + for part in self._stream: + yield part + + +class AsyncIteratorByteStream(AsyncByteStream): + CHUNK_SIZE = 65_536 + + def __init__(self, stream: AsyncIterable[bytes]) -> None: + self._stream = stream + self._is_stream_consumed = False + self._is_generator = inspect.isasyncgen(stream) + + async def __aiter__(self) -> AsyncIterator[bytes]: + if self._is_stream_consumed and self._is_generator: + raise StreamConsumed() + + self._is_stream_consumed = True + if hasattr(self._stream, "aread"): + # File-like interfaces should use 'aread' directly. + chunk = await self._stream.aread(self.CHUNK_SIZE) + while chunk: + yield chunk + chunk = await self._stream.aread(self.CHUNK_SIZE) + else: + # Otherwise iterate. + async for part in self._stream: + yield part + + +class UnattachedStream(AsyncByteStream, SyncByteStream): + """ + If a request or response is serialized using pickle, then it is no longer + attached to a stream for I/O purposes. Any stream operations should result + in `httpx.StreamClosed`. + """ + + def __iter__(self) -> Iterator[bytes]: + raise StreamClosed() + + async def __aiter__(self) -> AsyncIterator[bytes]: + raise StreamClosed() + yield b"" # pragma: no cover + + +def encode_content( + content: str | bytes | Iterable[bytes] | AsyncIterable[bytes], +) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]: + if isinstance(content, (bytes, str)): + body = content.encode("utf-8") if isinstance(content, str) else content + content_length = len(body) + headers = {"Content-Length": str(content_length)} if body else {} + return headers, ByteStream(body) + + elif isinstance(content, Iterable) and not isinstance(content, dict): + # `not isinstance(content, dict)` is a bit oddly specific, but it + # catches a case that's easy for users to make in error, and would + # otherwise pass through here, like any other bytes-iterable, + # because `dict` happens to be iterable. See issue #2491. + content_length_or_none = peek_filelike_length(content) + + if content_length_or_none is None: + headers = {"Transfer-Encoding": "chunked"} + else: + headers = {"Content-Length": str(content_length_or_none)} + return headers, IteratorByteStream(content) # type: ignore + + elif isinstance(content, AsyncIterable): + headers = {"Transfer-Encoding": "chunked"} + return headers, AsyncIteratorByteStream(content) + + raise TypeError(f"Unexpected type for 'content', {type(content)!r}") + + +def encode_urlencoded_data( + data: RequestData, +) -> tuple[dict[str, str], ByteStream]: + plain_data = [] + for key, value in data.items(): + if isinstance(value, (list, tuple)): + plain_data.extend([(key, primitive_value_to_str(item)) for item in value]) + else: + plain_data.append((key, primitive_value_to_str(value))) + body = urlencode(plain_data, doseq=True).encode("utf-8") + content_length = str(len(body)) + content_type = "application/x-www-form-urlencoded" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_multipart_data( + data: RequestData, files: RequestFiles, boundary: bytes | None +) -> tuple[dict[str, str], MultipartStream]: + multipart = MultipartStream(data=data, files=files, boundary=boundary) + headers = multipart.get_headers() + return headers, multipart + + +def encode_text(text: str) -> tuple[dict[str, str], ByteStream]: + body = text.encode("utf-8") + content_length = str(len(body)) + content_type = "text/plain; charset=utf-8" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_html(html: str) -> tuple[dict[str, str], ByteStream]: + body = html.encode("utf-8") + content_length = str(len(body)) + content_type = "text/html; charset=utf-8" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_json(json: Any) -> tuple[dict[str, str], ByteStream]: + body = json_dumps( + json, ensure_ascii=False, separators=(",", ":"), allow_nan=False + ).encode("utf-8") + content_length = str(len(body)) + content_type = "application/json" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_request( + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: Any | None = None, + boundary: bytes | None = None, +) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]: + """ + Handles encoding the given `content`, `data`, `files`, and `json`, + returning a two-tuple of (, ). + """ + if data is not None and not isinstance(data, Mapping): + # We prefer to separate `content=` + # for raw request content, and `data=
` for url encoded or + # multipart form content. + # + # However for compat with requests, we *do* still support + # `data=` usages. We deal with that case here, treating it + # as if `content=<...>` had been supplied instead. + message = "Use 'content=<...>' to upload raw bytes/text content." + warnings.warn(message, DeprecationWarning, stacklevel=2) + return encode_content(data) + + if content is not None: + return encode_content(content) + elif files: + return encode_multipart_data(data or {}, files, boundary) + elif data: + return encode_urlencoded_data(data) + elif json is not None: + return encode_json(json) + + return {}, ByteStream(b"") + + +def encode_response( + content: ResponseContent | None = None, + text: str | None = None, + html: str | None = None, + json: Any | None = None, +) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]: + """ + Handles encoding the given `content`, returning a two-tuple of + (, ). + """ + if content is not None: + return encode_content(content) + elif text is not None: + return encode_text(text) + elif html is not None: + return encode_html(html) + elif json is not None: + return encode_json(json) + + return {}, ByteStream(b"") diff --git a/.venv/lib/python3.12/site-packages/httpx/_decoders.py b/.venv/lib/python3.12/site-packages/httpx/_decoders.py new file mode 100644 index 0000000..899dfad --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/_decoders.py @@ -0,0 +1,393 @@ +""" +Handlers for Content-Encoding. + +See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding +""" + +from __future__ import annotations + +import codecs +import io +import typing +import zlib + +from ._exceptions import DecodingError + +# Brotli support is optional +try: + # The C bindings in `brotli` are recommended for CPython. + import brotli +except ImportError: # pragma: no cover + try: + # The CFFI bindings in `brotlicffi` are recommended for PyPy + # and other environments. + import brotlicffi as brotli + except ImportError: + brotli = None + + +# Zstandard support is optional +try: + import zstandard +except ImportError: # pragma: no cover + zstandard = None # type: ignore + + +class ContentDecoder: + def decode(self, data: bytes) -> bytes: + raise NotImplementedError() # pragma: no cover + + def flush(self) -> bytes: + raise NotImplementedError() # pragma: no cover + + +class IdentityDecoder(ContentDecoder): + """ + Handle unencoded data. + """ + + def decode(self, data: bytes) -> bytes: + return data + + def flush(self) -> bytes: + return b"" + + +class DeflateDecoder(ContentDecoder): + """ + Handle 'deflate' decoding. + + See: https://stackoverflow.com/questions/1838699 + """ + + def __init__(self) -> None: + self.first_attempt = True + self.decompressor = zlib.decompressobj() + + def decode(self, data: bytes) -> bytes: + was_first_attempt = self.first_attempt + self.first_attempt = False + try: + return self.decompressor.decompress(data) + except zlib.error as exc: + if was_first_attempt: + self.decompressor = zlib.decompressobj(-zlib.MAX_WBITS) + return self.decode(data) + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + try: + return self.decompressor.flush() + except zlib.error as exc: # pragma: no cover + raise DecodingError(str(exc)) from exc + + +class GZipDecoder(ContentDecoder): + """ + Handle 'gzip' decoding. + + See: https://stackoverflow.com/questions/1838699 + """ + + def __init__(self) -> None: + self.decompressor = zlib.decompressobj(zlib.MAX_WBITS | 16) + + def decode(self, data: bytes) -> bytes: + try: + return self.decompressor.decompress(data) + except zlib.error as exc: + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + try: + return self.decompressor.flush() + except zlib.error as exc: # pragma: no cover + raise DecodingError(str(exc)) from exc + + +class BrotliDecoder(ContentDecoder): + """ + Handle 'brotli' decoding. + + Requires `pip install brotlipy`. See: https://brotlipy.readthedocs.io/ + or `pip install brotli`. See https://github.com/google/brotli + Supports both 'brotlipy' and 'Brotli' packages since they share an import + name. The top branches are for 'brotlipy' and bottom branches for 'Brotli' + """ + + def __init__(self) -> None: + if brotli is None: # pragma: no cover + raise ImportError( + "Using 'BrotliDecoder', but neither of the 'brotlicffi' or 'brotli' " + "packages have been installed. " + "Make sure to install httpx using `pip install httpx[brotli]`." + ) from None + + self.decompressor = brotli.Decompressor() + self.seen_data = False + self._decompress: typing.Callable[[bytes], bytes] + if hasattr(self.decompressor, "decompress"): + # The 'brotlicffi' package. + self._decompress = self.decompressor.decompress # pragma: no cover + else: + # The 'brotli' package. + self._decompress = self.decompressor.process # pragma: no cover + + def decode(self, data: bytes) -> bytes: + if not data: + return b"" + self.seen_data = True + try: + return self._decompress(data) + except brotli.error as exc: + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + if not self.seen_data: + return b"" + try: + if hasattr(self.decompressor, "finish"): + # Only available in the 'brotlicffi' package. + + # As the decompressor decompresses eagerly, this + # will never actually emit any data. However, it will potentially throw + # errors if a truncated or damaged data stream has been used. + self.decompressor.finish() # pragma: no cover + return b"" + except brotli.error as exc: # pragma: no cover + raise DecodingError(str(exc)) from exc + + +class ZStandardDecoder(ContentDecoder): + """ + Handle 'zstd' RFC 8878 decoding. + + Requires `pip install zstandard`. + Can be installed as a dependency of httpx using `pip install httpx[zstd]`. + """ + + # inspired by the ZstdDecoder implementation in urllib3 + def __init__(self) -> None: + if zstandard is None: # pragma: no cover + raise ImportError( + "Using 'ZStandardDecoder', ..." + "Make sure to install httpx using `pip install httpx[zstd]`." + ) from None + + self.decompressor = zstandard.ZstdDecompressor().decompressobj() + self.seen_data = False + + def decode(self, data: bytes) -> bytes: + assert zstandard is not None + self.seen_data = True + output = io.BytesIO() + try: + output.write(self.decompressor.decompress(data)) + while self.decompressor.eof and self.decompressor.unused_data: + unused_data = self.decompressor.unused_data + self.decompressor = zstandard.ZstdDecompressor().decompressobj() + output.write(self.decompressor.decompress(unused_data)) + except zstandard.ZstdError as exc: + raise DecodingError(str(exc)) from exc + return output.getvalue() + + def flush(self) -> bytes: + if not self.seen_data: + return b"" + ret = self.decompressor.flush() # note: this is a no-op + if not self.decompressor.eof: + raise DecodingError("Zstandard data is incomplete") # pragma: no cover + return bytes(ret) + + +class MultiDecoder(ContentDecoder): + """ + Handle the case where multiple encodings have been applied. + """ + + def __init__(self, children: typing.Sequence[ContentDecoder]) -> None: + """ + 'children' should be a sequence of decoders in the order in which + each was applied. + """ + # Note that we reverse the order for decoding. + self.children = list(reversed(children)) + + def decode(self, data: bytes) -> bytes: + for child in self.children: + data = child.decode(data) + return data + + def flush(self) -> bytes: + data = b"" + for child in self.children: + data = child.decode(data) + child.flush() + return data + + +class ByteChunker: + """ + Handles returning byte content in fixed-size chunks. + """ + + def __init__(self, chunk_size: int | None = None) -> None: + self._buffer = io.BytesIO() + self._chunk_size = chunk_size + + def decode(self, content: bytes) -> list[bytes]: + if self._chunk_size is None: + return [content] if content else [] + + self._buffer.write(content) + if self._buffer.tell() >= self._chunk_size: + value = self._buffer.getvalue() + chunks = [ + value[i : i + self._chunk_size] + for i in range(0, len(value), self._chunk_size) + ] + if len(chunks[-1]) == self._chunk_size: + self._buffer.seek(0) + self._buffer.truncate() + return chunks + else: + self._buffer.seek(0) + self._buffer.write(chunks[-1]) + self._buffer.truncate() + return chunks[:-1] + else: + return [] + + def flush(self) -> list[bytes]: + value = self._buffer.getvalue() + self._buffer.seek(0) + self._buffer.truncate() + return [value] if value else [] + + +class TextChunker: + """ + Handles returning text content in fixed-size chunks. + """ + + def __init__(self, chunk_size: int | None = None) -> None: + self._buffer = io.StringIO() + self._chunk_size = chunk_size + + def decode(self, content: str) -> list[str]: + if self._chunk_size is None: + return [content] if content else [] + + self._buffer.write(content) + if self._buffer.tell() >= self._chunk_size: + value = self._buffer.getvalue() + chunks = [ + value[i : i + self._chunk_size] + for i in range(0, len(value), self._chunk_size) + ] + if len(chunks[-1]) == self._chunk_size: + self._buffer.seek(0) + self._buffer.truncate() + return chunks + else: + self._buffer.seek(0) + self._buffer.write(chunks[-1]) + self._buffer.truncate() + return chunks[:-1] + else: + return [] + + def flush(self) -> list[str]: + value = self._buffer.getvalue() + self._buffer.seek(0) + self._buffer.truncate() + return [value] if value else [] + + +class TextDecoder: + """ + Handles incrementally decoding bytes into text + """ + + def __init__(self, encoding: str = "utf-8") -> None: + self.decoder = codecs.getincrementaldecoder(encoding)(errors="replace") + + def decode(self, data: bytes) -> str: + return self.decoder.decode(data) + + def flush(self) -> str: + return self.decoder.decode(b"", True) + + +class LineDecoder: + """ + Handles incrementally reading lines from text. + + Has the same behaviour as the stdllib splitlines, + but handling the input iteratively. + """ + + def __init__(self) -> None: + self.buffer: list[str] = [] + self.trailing_cr: bool = False + + def decode(self, text: str) -> list[str]: + # See https://docs.python.org/3/library/stdtypes.html#str.splitlines + NEWLINE_CHARS = "\n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029" + + # We always push a trailing `\r` into the next decode iteration. + if self.trailing_cr: + text = "\r" + text + self.trailing_cr = False + if text.endswith("\r"): + self.trailing_cr = True + text = text[:-1] + + if not text: + # NOTE: the edge case input of empty text doesn't occur in practice, + # because other httpx internals filter out this value + return [] # pragma: no cover + + trailing_newline = text[-1] in NEWLINE_CHARS + lines = text.splitlines() + + if len(lines) == 1 and not trailing_newline: + # No new lines, buffer the input and continue. + self.buffer.append(lines[0]) + return [] + + if self.buffer: + # Include any existing buffer in the first portion of the + # splitlines result. + lines = ["".join(self.buffer) + lines[0]] + lines[1:] + self.buffer = [] + + if not trailing_newline: + # If the last segment of splitlines is not newline terminated, + # then drop it from our output and start a new buffer. + self.buffer = [lines.pop()] + + return lines + + def flush(self) -> list[str]: + if not self.buffer and not self.trailing_cr: + return [] + + lines = ["".join(self.buffer)] + self.buffer = [] + self.trailing_cr = False + return lines + + +SUPPORTED_DECODERS = { + "identity": IdentityDecoder, + "gzip": GZipDecoder, + "deflate": DeflateDecoder, + "br": BrotliDecoder, + "zstd": ZStandardDecoder, +} + + +if brotli is None: + SUPPORTED_DECODERS.pop("br") # pragma: no cover +if zstandard is None: + SUPPORTED_DECODERS.pop("zstd") # pragma: no cover diff --git a/.venv/lib/python3.12/site-packages/httpx/_exceptions.py b/.venv/lib/python3.12/site-packages/httpx/_exceptions.py new file mode 100644 index 0000000..77f45a6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/_exceptions.py @@ -0,0 +1,379 @@ +""" +Our exception hierarchy: + +* HTTPError + x RequestError + + TransportError + - TimeoutException + · ConnectTimeout + · ReadTimeout + · WriteTimeout + · PoolTimeout + - NetworkError + · ConnectError + · ReadError + · WriteError + · CloseError + - ProtocolError + · LocalProtocolError + · RemoteProtocolError + - ProxyError + - UnsupportedProtocol + + DecodingError + + TooManyRedirects + x HTTPStatusError +* InvalidURL +* CookieConflict +* StreamError + x StreamConsumed + x StreamClosed + x ResponseNotRead + x RequestNotRead +""" + +from __future__ import annotations + +import contextlib +import typing + +if typing.TYPE_CHECKING: + from ._models import Request, Response # pragma: no cover + +__all__ = [ + "CloseError", + "ConnectError", + "ConnectTimeout", + "CookieConflict", + "DecodingError", + "HTTPError", + "HTTPStatusError", + "InvalidURL", + "LocalProtocolError", + "NetworkError", + "PoolTimeout", + "ProtocolError", + "ProxyError", + "ReadError", + "ReadTimeout", + "RemoteProtocolError", + "RequestError", + "RequestNotRead", + "ResponseNotRead", + "StreamClosed", + "StreamConsumed", + "StreamError", + "TimeoutException", + "TooManyRedirects", + "TransportError", + "UnsupportedProtocol", + "WriteError", + "WriteTimeout", +] + + +class HTTPError(Exception): + """ + Base class for `RequestError` and `HTTPStatusError`. + + Useful for `try...except` blocks when issuing a request, + and then calling `.raise_for_status()`. + + For example: + + ``` + try: + response = httpx.get("https://www.example.com") + response.raise_for_status() + except httpx.HTTPError as exc: + print(f"HTTP Exception for {exc.request.url} - {exc}") + ``` + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + self._request: Request | None = None + + @property + def request(self) -> Request: + if self._request is None: + raise RuntimeError("The .request property has not been set.") + return self._request + + @request.setter + def request(self, request: Request) -> None: + self._request = request + + +class RequestError(HTTPError): + """ + Base class for all exceptions that may occur when issuing a `.request()`. + """ + + def __init__(self, message: str, *, request: Request | None = None) -> None: + super().__init__(message) + # At the point an exception is raised we won't typically have a request + # instance to associate it with. + # + # The 'request_context' context manager is used within the Client and + # Response methods in order to ensure that any raised exceptions + # have a `.request` property set on them. + self._request = request + + +class TransportError(RequestError): + """ + Base class for all exceptions that occur at the level of the Transport API. + """ + + +# Timeout exceptions... + + +class TimeoutException(TransportError): + """ + The base class for timeout errors. + + An operation has timed out. + """ + + +class ConnectTimeout(TimeoutException): + """ + Timed out while connecting to the host. + """ + + +class ReadTimeout(TimeoutException): + """ + Timed out while receiving data from the host. + """ + + +class WriteTimeout(TimeoutException): + """ + Timed out while sending data to the host. + """ + + +class PoolTimeout(TimeoutException): + """ + Timed out waiting to acquire a connection from the pool. + """ + + +# Core networking exceptions... + + +class NetworkError(TransportError): + """ + The base class for network-related errors. + + An error occurred while interacting with the network. + """ + + +class ReadError(NetworkError): + """ + Failed to receive data from the network. + """ + + +class WriteError(NetworkError): + """ + Failed to send data through the network. + """ + + +class ConnectError(NetworkError): + """ + Failed to establish a connection. + """ + + +class CloseError(NetworkError): + """ + Failed to close a connection. + """ + + +# Other transport exceptions... + + +class ProxyError(TransportError): + """ + An error occurred while establishing a proxy connection. + """ + + +class UnsupportedProtocol(TransportError): + """ + Attempted to make a request to an unsupported protocol. + + For example issuing a request to `ftp://www.example.com`. + """ + + +class ProtocolError(TransportError): + """ + The protocol was violated. + """ + + +class LocalProtocolError(ProtocolError): + """ + A protocol was violated by the client. + + For example if the user instantiated a `Request` instance explicitly, + failed to include the mandatory `Host:` header, and then issued it directly + using `client.send()`. + """ + + +class RemoteProtocolError(ProtocolError): + """ + The protocol was violated by the server. + + For example, returning malformed HTTP. + """ + + +# Other request exceptions... + + +class DecodingError(RequestError): + """ + Decoding of the response failed, due to a malformed encoding. + """ + + +class TooManyRedirects(RequestError): + """ + Too many redirects. + """ + + +# Client errors + + +class HTTPStatusError(HTTPError): + """ + The response had an error HTTP status of 4xx or 5xx. + + May be raised when calling `response.raise_for_status()` + """ + + def __init__(self, message: str, *, request: Request, response: Response) -> None: + super().__init__(message) + self.request = request + self.response = response + + +class InvalidURL(Exception): + """ + URL is improperly formed or cannot be parsed. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +class CookieConflict(Exception): + """ + Attempted to lookup a cookie by name, but multiple cookies existed. + + Can occur when calling `response.cookies.get(...)`. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +# Stream exceptions... + +# These may occur as the result of a programming error, by accessing +# the request/response stream in an invalid manner. + + +class StreamError(RuntimeError): + """ + The base class for stream exceptions. + + The developer made an error in accessing the request stream in + an invalid way. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +class StreamConsumed(StreamError): + """ + Attempted to read or stream content, but the content has already + been streamed. + """ + + def __init__(self) -> None: + message = ( + "Attempted to read or stream some content, but the content has " + "already been streamed. For requests, this could be due to passing " + "a generator as request content, and then receiving a redirect " + "response or a secondary request as part of an authentication flow." + "For responses, this could be due to attempting to stream the response " + "content more than once." + ) + super().__init__(message) + + +class StreamClosed(StreamError): + """ + Attempted to read or stream response content, but the request has been + closed. + """ + + def __init__(self) -> None: + message = ( + "Attempted to read or stream content, but the stream has " "been closed." + ) + super().__init__(message) + + +class ResponseNotRead(StreamError): + """ + Attempted to access streaming response content, without having called `read()`. + """ + + def __init__(self) -> None: + message = ( + "Attempted to access streaming response content," + " without having called `read()`." + ) + super().__init__(message) + + +class RequestNotRead(StreamError): + """ + Attempted to access streaming request content, without having called `read()`. + """ + + def __init__(self) -> None: + message = ( + "Attempted to access streaming request content," + " without having called `read()`." + ) + super().__init__(message) + + +@contextlib.contextmanager +def request_context( + request: Request | None = None, +) -> typing.Iterator[None]: + """ + A context manager that can be used to attach the given request context + to any `RequestError` exceptions that are raised within the block. + """ + try: + yield + except RequestError as exc: + if request is not None: + exc.request = request + raise exc diff --git a/.venv/lib/python3.12/site-packages/httpx/_main.py b/.venv/lib/python3.12/site-packages/httpx/_main.py new file mode 100644 index 0000000..cffa4bb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/_main.py @@ -0,0 +1,506 @@ +from __future__ import annotations + +import functools +import json +import sys +import typing + +import click +import pygments.lexers +import pygments.util +import rich.console +import rich.markup +import rich.progress +import rich.syntax +import rich.table + +from ._client import Client +from ._exceptions import RequestError +from ._models import Response +from ._status_codes import codes + +if typing.TYPE_CHECKING: + import httpcore # pragma: no cover + + +def print_help() -> None: + console = rich.console.Console() + + console.print("[bold]HTTPX :butterfly:", justify="center") + console.print() + console.print("A next generation HTTP client.", justify="center") + console.print() + console.print( + "Usage: [bold]httpx[/bold] [cyan] [OPTIONS][/cyan] ", justify="left" + ) + console.print() + + table = rich.table.Table.grid(padding=1, pad_edge=True) + table.add_column("Parameter", no_wrap=True, justify="left", style="bold") + table.add_column("Description") + table.add_row( + "-m, --method [cyan]METHOD", + "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD.\n" + "[Default: GET, or POST if a request body is included]", + ) + table.add_row( + "-p, --params [cyan] ...", + "Query parameters to include in the request URL.", + ) + table.add_row( + "-c, --content [cyan]TEXT", "Byte content to include in the request body." + ) + table.add_row( + "-d, --data [cyan] ...", "Form data to include in the request body." + ) + table.add_row( + "-f, --files [cyan] ...", + "Form files to include in the request body.", + ) + table.add_row("-j, --json [cyan]TEXT", "JSON data to include in the request body.") + table.add_row( + "-h, --headers [cyan] ...", + "Include additional HTTP headers in the request.", + ) + table.add_row( + "--cookies [cyan] ...", "Cookies to include in the request." + ) + table.add_row( + "--auth [cyan]", + "Username and password to include in the request. Specify '-' for the password" + " to use a password prompt. Note that using --verbose/-v will expose" + " the Authorization header, including the password encoding" + " in a trivially reversible format.", + ) + + table.add_row( + "--proxy [cyan]URL", + "Send the request via a proxy. Should be the URL giving the proxy address.", + ) + + table.add_row( + "--timeout [cyan]FLOAT", + "Timeout value to use for network operations, such as establishing the" + " connection, reading some data, etc... [Default: 5.0]", + ) + + table.add_row("--follow-redirects", "Automatically follow redirects.") + table.add_row("--no-verify", "Disable SSL verification.") + table.add_row( + "--http2", "Send the request using HTTP/2, if the remote server supports it." + ) + + table.add_row( + "--download [cyan]FILE", + "Save the response content as a file, rather than displaying it.", + ) + + table.add_row("-v, --verbose", "Verbose output. Show request as well as response.") + table.add_row("--help", "Show this message and exit.") + console.print(table) + + +def get_lexer_for_response(response: Response) -> str: + content_type = response.headers.get("Content-Type") + if content_type is not None: + mime_type, _, _ = content_type.partition(";") + try: + return typing.cast( + str, pygments.lexers.get_lexer_for_mimetype(mime_type.strip()).name + ) + except pygments.util.ClassNotFound: # pragma: no cover + pass + return "" # pragma: no cover + + +def format_request_headers(request: httpcore.Request, http2: bool = False) -> str: + version = "HTTP/2" if http2 else "HTTP/1.1" + headers = [ + (name.lower() if http2 else name, value) for name, value in request.headers + ] + method = request.method.decode("ascii") + target = request.url.target.decode("ascii") + lines = [f"{method} {target} {version}"] + [ + f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers + ] + return "\n".join(lines) + + +def format_response_headers( + http_version: bytes, + status: int, + reason_phrase: bytes | None, + headers: list[tuple[bytes, bytes]], +) -> str: + version = http_version.decode("ascii") + reason = ( + codes.get_reason_phrase(status) + if reason_phrase is None + else reason_phrase.decode("ascii") + ) + lines = [f"{version} {status} {reason}"] + [ + f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers + ] + return "\n".join(lines) + + +def print_request_headers(request: httpcore.Request, http2: bool = False) -> None: + console = rich.console.Console() + http_text = format_request_headers(request, http2=http2) + syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + + +def print_response_headers( + http_version: bytes, + status: int, + reason_phrase: bytes | None, + headers: list[tuple[bytes, bytes]], +) -> None: + console = rich.console.Console() + http_text = format_response_headers(http_version, status, reason_phrase, headers) + syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + + +def print_response(response: Response) -> None: + console = rich.console.Console() + lexer_name = get_lexer_for_response(response) + if lexer_name: + if lexer_name.lower() == "json": + try: + data = response.json() + text = json.dumps(data, indent=4) + except ValueError: # pragma: no cover + text = response.text + else: + text = response.text + + syntax = rich.syntax.Syntax(text, lexer_name, theme="ansi_dark", word_wrap=True) + console.print(syntax) + else: + console.print(f"<{len(response.content)} bytes of binary data>") + + +_PCTRTT = typing.Tuple[typing.Tuple[str, str], ...] +_PCTRTTT = typing.Tuple[_PCTRTT, ...] +_PeerCertRetDictType = typing.Dict[str, typing.Union[str, _PCTRTTT, _PCTRTT]] + + +def format_certificate(cert: _PeerCertRetDictType) -> str: # pragma: no cover + lines = [] + for key, value in cert.items(): + if isinstance(value, (list, tuple)): + lines.append(f"* {key}:") + for item in value: + if key in ("subject", "issuer"): + for sub_item in item: + lines.append(f"* {sub_item[0]}: {sub_item[1]!r}") + elif isinstance(item, tuple) and len(item) == 2: + lines.append(f"* {item[0]}: {item[1]!r}") + else: + lines.append(f"* {item!r}") + else: + lines.append(f"* {key}: {value!r}") + return "\n".join(lines) + + +def trace( + name: str, info: typing.Mapping[str, typing.Any], verbose: bool = False +) -> None: + console = rich.console.Console() + if name == "connection.connect_tcp.started" and verbose: + host = info["host"] + console.print(f"* Connecting to {host!r}") + elif name == "connection.connect_tcp.complete" and verbose: + stream = info["return_value"] + server_addr = stream.get_extra_info("server_addr") + console.print(f"* Connected to {server_addr[0]!r} on port {server_addr[1]}") + elif name == "connection.start_tls.complete" and verbose: # pragma: no cover + stream = info["return_value"] + ssl_object = stream.get_extra_info("ssl_object") + version = ssl_object.version() + cipher = ssl_object.cipher() + server_cert = ssl_object.getpeercert() + alpn = ssl_object.selected_alpn_protocol() + console.print(f"* SSL established using {version!r} / {cipher[0]!r}") + console.print(f"* Selected ALPN protocol: {alpn!r}") + if server_cert: + console.print("* Server certificate:") + console.print(format_certificate(server_cert)) + elif name == "http11.send_request_headers.started" and verbose: + request = info["request"] + print_request_headers(request, http2=False) + elif name == "http2.send_request_headers.started" and verbose: # pragma: no cover + request = info["request"] + print_request_headers(request, http2=True) + elif name == "http11.receive_response_headers.complete": + http_version, status, reason_phrase, headers = info["return_value"] + print_response_headers(http_version, status, reason_phrase, headers) + elif name == "http2.receive_response_headers.complete": # pragma: no cover + status, headers = info["return_value"] + http_version = b"HTTP/2" + reason_phrase = None + print_response_headers(http_version, status, reason_phrase, headers) + + +def download_response(response: Response, download: typing.BinaryIO) -> None: + console = rich.console.Console() + console.print() + content_length = response.headers.get("Content-Length") + with rich.progress.Progress( + "[progress.description]{task.description}", + "[progress.percentage]{task.percentage:>3.0f}%", + rich.progress.BarColumn(bar_width=None), + rich.progress.DownloadColumn(), + rich.progress.TransferSpeedColumn(), + ) as progress: + description = f"Downloading [bold]{rich.markup.escape(download.name)}" + download_task = progress.add_task( + description, + total=int(content_length or 0), + start=content_length is not None, + ) + for chunk in response.iter_bytes(): + download.write(chunk) + progress.update(download_task, completed=response.num_bytes_downloaded) + + +def validate_json( + ctx: click.Context, + param: click.Option | click.Parameter, + value: typing.Any, +) -> typing.Any: + if value is None: + return None + + try: + return json.loads(value) + except json.JSONDecodeError: # pragma: no cover + raise click.BadParameter("Not valid JSON") + + +def validate_auth( + ctx: click.Context, + param: click.Option | click.Parameter, + value: typing.Any, +) -> typing.Any: + if value == (None, None): + return None + + username, password = value + if password == "-": # pragma: no cover + password = click.prompt("Password", hide_input=True) + return (username, password) + + +def handle_help( + ctx: click.Context, + param: click.Option | click.Parameter, + value: typing.Any, +) -> None: + if not value or ctx.resilient_parsing: + return + + print_help() + ctx.exit() + + +@click.command(add_help_option=False) +@click.argument("url", type=str) +@click.option( + "--method", + "-m", + "method", + type=str, + help=( + "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD. " + "[Default: GET, or POST if a request body is included]" + ), +) +@click.option( + "--params", + "-p", + "params", + type=(str, str), + multiple=True, + help="Query parameters to include in the request URL.", +) +@click.option( + "--content", + "-c", + "content", + type=str, + help="Byte content to include in the request body.", +) +@click.option( + "--data", + "-d", + "data", + type=(str, str), + multiple=True, + help="Form data to include in the request body.", +) +@click.option( + "--files", + "-f", + "files", + type=(str, click.File(mode="rb")), + multiple=True, + help="Form files to include in the request body.", +) +@click.option( + "--json", + "-j", + "json", + type=str, + callback=validate_json, + help="JSON data to include in the request body.", +) +@click.option( + "--headers", + "-h", + "headers", + type=(str, str), + multiple=True, + help="Include additional HTTP headers in the request.", +) +@click.option( + "--cookies", + "cookies", + type=(str, str), + multiple=True, + help="Cookies to include in the request.", +) +@click.option( + "--auth", + "auth", + type=(str, str), + default=(None, None), + callback=validate_auth, + help=( + "Username and password to include in the request. " + "Specify '-' for the password to use a password prompt. " + "Note that using --verbose/-v will expose the Authorization header, " + "including the password encoding in a trivially reversible format." + ), +) +@click.option( + "--proxy", + "proxy", + type=str, + default=None, + help="Send the request via a proxy. Should be the URL giving the proxy address.", +) +@click.option( + "--timeout", + "timeout", + type=float, + default=5.0, + help=( + "Timeout value to use for network operations, such as establishing the " + "connection, reading some data, etc... [Default: 5.0]" + ), +) +@click.option( + "--follow-redirects", + "follow_redirects", + is_flag=True, + default=False, + help="Automatically follow redirects.", +) +@click.option( + "--no-verify", + "verify", + is_flag=True, + default=True, + help="Disable SSL verification.", +) +@click.option( + "--http2", + "http2", + type=bool, + is_flag=True, + default=False, + help="Send the request using HTTP/2, if the remote server supports it.", +) +@click.option( + "--download", + type=click.File("wb"), + help="Save the response content as a file, rather than displaying it.", +) +@click.option( + "--verbose", + "-v", + type=bool, + is_flag=True, + default=False, + help="Verbose. Show request as well as response.", +) +@click.option( + "--help", + is_flag=True, + is_eager=True, + expose_value=False, + callback=handle_help, + help="Show this message and exit.", +) +def main( + url: str, + method: str, + params: list[tuple[str, str]], + content: str, + data: list[tuple[str, str]], + files: list[tuple[str, click.File]], + json: str, + headers: list[tuple[str, str]], + cookies: list[tuple[str, str]], + auth: tuple[str, str] | None, + proxy: str, + timeout: float, + follow_redirects: bool, + verify: bool, + http2: bool, + download: typing.BinaryIO | None, + verbose: bool, +) -> None: + """ + An HTTP command line client. + Sends a request and displays the response. + """ + if not method: + method = "POST" if content or data or files or json else "GET" + + try: + with Client(proxy=proxy, timeout=timeout, http2=http2, verify=verify) as client: + with client.stream( + method, + url, + params=list(params), + content=content, + data=dict(data), + files=files, # type: ignore + json=json, + headers=headers, + cookies=dict(cookies), + auth=auth, + follow_redirects=follow_redirects, + extensions={"trace": functools.partial(trace, verbose=verbose)}, + ) as response: + if download is not None: + download_response(response, download) + else: + response.read() + if response.content: + print_response(response) + + except RequestError as exc: + console = rich.console.Console() + console.print(f"[red]{type(exc).__name__}[/red]: {exc}") + sys.exit(1) + + sys.exit(0 if response.is_success else 1) diff --git a/.venv/lib/python3.12/site-packages/httpx/_models.py b/.venv/lib/python3.12/site-packages/httpx/_models.py new file mode 100644 index 0000000..67d74bf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/_models.py @@ -0,0 +1,1277 @@ +from __future__ import annotations + +import codecs +import datetime +import email.message +import json as jsonlib +import re +import typing +import urllib.request +from collections.abc import Mapping +from http.cookiejar import Cookie, CookieJar + +from ._content import ByteStream, UnattachedStream, encode_request, encode_response +from ._decoders import ( + SUPPORTED_DECODERS, + ByteChunker, + ContentDecoder, + IdentityDecoder, + LineDecoder, + MultiDecoder, + TextChunker, + TextDecoder, +) +from ._exceptions import ( + CookieConflict, + HTTPStatusError, + RequestNotRead, + ResponseNotRead, + StreamClosed, + StreamConsumed, + request_context, +) +from ._multipart import get_multipart_boundary_from_content_type +from ._status_codes import codes +from ._types import ( + AsyncByteStream, + CookieTypes, + HeaderTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestExtensions, + RequestFiles, + ResponseContent, + ResponseExtensions, + SyncByteStream, +) +from ._urls import URL +from ._utils import to_bytes_or_str, to_str + +__all__ = ["Cookies", "Headers", "Request", "Response"] + +SENSITIVE_HEADERS = {"authorization", "proxy-authorization"} + + +def _is_known_encoding(encoding: str) -> bool: + """ + Return `True` if `encoding` is a known codec. + """ + try: + codecs.lookup(encoding) + except LookupError: + return False + return True + + +def _normalize_header_key(key: str | bytes, encoding: str | None = None) -> bytes: + """ + Coerce str/bytes into a strictly byte-wise HTTP header key. + """ + return key if isinstance(key, bytes) else key.encode(encoding or "ascii") + + +def _normalize_header_value(value: str | bytes, encoding: str | None = None) -> bytes: + """ + Coerce str/bytes into a strictly byte-wise HTTP header value. + """ + if isinstance(value, bytes): + return value + if not isinstance(value, str): + raise TypeError(f"Header value must be str or bytes, not {type(value)}") + return value.encode(encoding or "ascii") + + +def _parse_content_type_charset(content_type: str) -> str | None: + # We used to use `cgi.parse_header()` here, but `cgi` became a dead battery. + # See: https://peps.python.org/pep-0594/#cgi + msg = email.message.Message() + msg["content-type"] = content_type + return msg.get_content_charset(failobj=None) + + +def _parse_header_links(value: str) -> list[dict[str, str]]: + """ + Returns a list of parsed link headers, for more info see: + https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Link + The generic syntax of those is: + Link: < uri-reference >; param1=value1; param2="value2" + So for instance: + Link; '; type="image/jpeg",;' + would return + [ + {"url": "http:/.../front.jpeg", "type": "image/jpeg"}, + {"url": "http://.../back.jpeg"}, + ] + :param value: HTTP Link entity-header field + :return: list of parsed link headers + """ + links: list[dict[str, str]] = [] + replace_chars = " '\"" + value = value.strip(replace_chars) + if not value: + return links + for val in re.split(", *<", value): + try: + url, params = val.split(";", 1) + except ValueError: + url, params = val, "" + link = {"url": url.strip("<> '\"")} + for param in params.split(";"): + try: + key, value = param.split("=") + except ValueError: + break + link[key.strip(replace_chars)] = value.strip(replace_chars) + links.append(link) + return links + + +def _obfuscate_sensitive_headers( + items: typing.Iterable[tuple[typing.AnyStr, typing.AnyStr]], +) -> typing.Iterator[tuple[typing.AnyStr, typing.AnyStr]]: + for k, v in items: + if to_str(k.lower()) in SENSITIVE_HEADERS: + v = to_bytes_or_str("[secure]", match_type_of=v) + yield k, v + + +class Headers(typing.MutableMapping[str, str]): + """ + HTTP headers, as a case-insensitive multi-dict. + """ + + def __init__( + self, + headers: HeaderTypes | None = None, + encoding: str | None = None, + ) -> None: + self._list = [] # type: typing.List[typing.Tuple[bytes, bytes, bytes]] + + if isinstance(headers, Headers): + self._list = list(headers._list) + elif isinstance(headers, Mapping): + for k, v in headers.items(): + bytes_key = _normalize_header_key(k, encoding) + bytes_value = _normalize_header_value(v, encoding) + self._list.append((bytes_key, bytes_key.lower(), bytes_value)) + elif headers is not None: + for k, v in headers: + bytes_key = _normalize_header_key(k, encoding) + bytes_value = _normalize_header_value(v, encoding) + self._list.append((bytes_key, bytes_key.lower(), bytes_value)) + + self._encoding = encoding + + @property + def encoding(self) -> str: + """ + Header encoding is mandated as ascii, but we allow fallbacks to utf-8 + or iso-8859-1. + """ + if self._encoding is None: + for encoding in ["ascii", "utf-8"]: + for key, value in self.raw: + try: + key.decode(encoding) + value.decode(encoding) + except UnicodeDecodeError: + break + else: + # The else block runs if 'break' did not occur, meaning + # all values fitted the encoding. + self._encoding = encoding + break + else: + # The ISO-8859-1 encoding covers all 256 code points in a byte, + # so will never raise decode errors. + self._encoding = "iso-8859-1" + return self._encoding + + @encoding.setter + def encoding(self, value: str) -> None: + self._encoding = value + + @property + def raw(self) -> list[tuple[bytes, bytes]]: + """ + Returns a list of the raw header items, as byte pairs. + """ + return [(raw_key, value) for raw_key, _, value in self._list] + + def keys(self) -> typing.KeysView[str]: + return {key.decode(self.encoding): None for _, key, value in self._list}.keys() + + def values(self) -> typing.ValuesView[str]: + values_dict: dict[str, str] = {} + for _, key, value in self._list: + str_key = key.decode(self.encoding) + str_value = value.decode(self.encoding) + if str_key in values_dict: + values_dict[str_key] += f", {str_value}" + else: + values_dict[str_key] = str_value + return values_dict.values() + + def items(self) -> typing.ItemsView[str, str]: + """ + Return `(key, value)` items of headers. Concatenate headers + into a single comma separated value when a key occurs multiple times. + """ + values_dict: dict[str, str] = {} + for _, key, value in self._list: + str_key = key.decode(self.encoding) + str_value = value.decode(self.encoding) + if str_key in values_dict: + values_dict[str_key] += f", {str_value}" + else: + values_dict[str_key] = str_value + return values_dict.items() + + def multi_items(self) -> list[tuple[str, str]]: + """ + Return a list of `(key, value)` pairs of headers. Allow multiple + occurrences of the same key without concatenating into a single + comma separated value. + """ + return [ + (key.decode(self.encoding), value.decode(self.encoding)) + for _, key, value in self._list + ] + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Return a header value. If multiple occurrences of the header occur + then concatenate them together with commas. + """ + try: + return self[key] + except KeyError: + return default + + def get_list(self, key: str, split_commas: bool = False) -> list[str]: + """ + Return a list of all header values for a given key. + If `split_commas=True` is passed, then any comma separated header + values are split into multiple return strings. + """ + get_header_key = key.lower().encode(self.encoding) + + values = [ + item_value.decode(self.encoding) + for _, item_key, item_value in self._list + if item_key.lower() == get_header_key + ] + + if not split_commas: + return values + + split_values = [] + for value in values: + split_values.extend([item.strip() for item in value.split(",")]) + return split_values + + def update(self, headers: HeaderTypes | None = None) -> None: # type: ignore + headers = Headers(headers) + for key in headers.keys(): + if key in self: + self.pop(key) + self._list.extend(headers._list) + + def copy(self) -> Headers: + return Headers(self, encoding=self.encoding) + + def __getitem__(self, key: str) -> str: + """ + Return a single header value. + + If there are multiple headers with the same key, then we concatenate + them with commas. See: https://tools.ietf.org/html/rfc7230#section-3.2.2 + """ + normalized_key = key.lower().encode(self.encoding) + + items = [ + header_value.decode(self.encoding) + for _, header_key, header_value in self._list + if header_key == normalized_key + ] + + if items: + return ", ".join(items) + + raise KeyError(key) + + def __setitem__(self, key: str, value: str) -> None: + """ + Set the header `key` to `value`, removing any duplicate entries. + Retains insertion order. + """ + set_key = key.encode(self._encoding or "utf-8") + set_value = value.encode(self._encoding or "utf-8") + lookup_key = set_key.lower() + + found_indexes = [ + idx + for idx, (_, item_key, _) in enumerate(self._list) + if item_key == lookup_key + ] + + for idx in reversed(found_indexes[1:]): + del self._list[idx] + + if found_indexes: + idx = found_indexes[0] + self._list[idx] = (set_key, lookup_key, set_value) + else: + self._list.append((set_key, lookup_key, set_value)) + + def __delitem__(self, key: str) -> None: + """ + Remove the header `key`. + """ + del_key = key.lower().encode(self.encoding) + + pop_indexes = [ + idx + for idx, (_, item_key, _) in enumerate(self._list) + if item_key.lower() == del_key + ] + + if not pop_indexes: + raise KeyError(key) + + for idx in reversed(pop_indexes): + del self._list[idx] + + def __contains__(self, key: typing.Any) -> bool: + header_key = key.lower().encode(self.encoding) + return header_key in [key for _, key, _ in self._list] + + def __iter__(self) -> typing.Iterator[typing.Any]: + return iter(self.keys()) + + def __len__(self) -> int: + return len(self._list) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_headers = Headers(other) + except ValueError: + return False + + self_list = [(key, value) for _, key, value in self._list] + other_list = [(key, value) for _, key, value in other_headers._list] + return sorted(self_list) == sorted(other_list) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + + encoding_str = "" + if self.encoding != "ascii": + encoding_str = f", encoding={self.encoding!r}" + + as_list = list(_obfuscate_sensitive_headers(self.multi_items())) + as_dict = dict(as_list) + + no_duplicate_keys = len(as_dict) == len(as_list) + if no_duplicate_keys: + return f"{class_name}({as_dict!r}{encoding_str})" + return f"{class_name}({as_list!r}{encoding_str})" + + +class Request: + def __init__( + self, + method: str, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + stream: SyncByteStream | AsyncByteStream | None = None, + extensions: RequestExtensions | None = None, + ) -> None: + self.method = method.upper() + self.url = URL(url) if params is None else URL(url, params=params) + self.headers = Headers(headers) + self.extensions = {} if extensions is None else dict(extensions) + + if cookies: + Cookies(cookies).set_cookie_header(self) + + if stream is None: + content_type: str | None = self.headers.get("content-type") + headers, stream = encode_request( + content=content, + data=data, + files=files, + json=json, + boundary=get_multipart_boundary_from_content_type( + content_type=content_type.encode(self.headers.encoding) + if content_type + else None + ), + ) + self._prepare(headers) + self.stream = stream + # Load the request body, except for streaming content. + if isinstance(stream, ByteStream): + self.read() + else: + # There's an important distinction between `Request(content=...)`, + # and `Request(stream=...)`. + # + # Using `content=...` implies automatically populated `Host` and content + # headers, of either `Content-Length: ...` or `Transfer-Encoding: chunked`. + # + # Using `stream=...` will not automatically include *any* + # auto-populated headers. + # + # As an end-user you don't really need `stream=...`. It's only + # useful when: + # + # * Preserving the request stream when copying requests, eg for redirects. + # * Creating request instances on the *server-side* of the transport API. + self.stream = stream + + def _prepare(self, default_headers: dict[str, str]) -> None: + for key, value in default_headers.items(): + # Ignore Transfer-Encoding if the Content-Length has been set explicitly. + if key.lower() == "transfer-encoding" and "Content-Length" in self.headers: + continue + self.headers.setdefault(key, value) + + auto_headers: list[tuple[bytes, bytes]] = [] + + has_host = "Host" in self.headers + has_content_length = ( + "Content-Length" in self.headers or "Transfer-Encoding" in self.headers + ) + + if not has_host and self.url.host: + auto_headers.append((b"Host", self.url.netloc)) + if not has_content_length and self.method in ("POST", "PUT", "PATCH"): + auto_headers.append((b"Content-Length", b"0")) + + self.headers = Headers(auto_headers + self.headers.raw) + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + raise RequestNotRead() + return self._content + + def read(self) -> bytes: + """ + Read and return the request content. + """ + if not hasattr(self, "_content"): + assert isinstance(self.stream, typing.Iterable) + self._content = b"".join(self.stream) + if not isinstance(self.stream, ByteStream): + # If a streaming request has been read entirely into memory, then + # we can replace the stream with a raw bytes implementation, + # to ensure that any non-replayable streams can still be used. + self.stream = ByteStream(self._content) + return self._content + + async def aread(self) -> bytes: + """ + Read and return the request content. + """ + if not hasattr(self, "_content"): + assert isinstance(self.stream, typing.AsyncIterable) + self._content = b"".join([part async for part in self.stream]) + if not isinstance(self.stream, ByteStream): + # If a streaming request has been read entirely into memory, then + # we can replace the stream with a raw bytes implementation, + # to ensure that any non-replayable streams can still be used. + self.stream = ByteStream(self._content) + return self._content + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + url = str(self.url) + return f"<{class_name}({self.method!r}, {url!r})>" + + def __getstate__(self) -> dict[str, typing.Any]: + return { + name: value + for name, value in self.__dict__.items() + if name not in ["extensions", "stream"] + } + + def __setstate__(self, state: dict[str, typing.Any]) -> None: + for name, value in state.items(): + setattr(self, name, value) + self.extensions = {} + self.stream = UnattachedStream() + + +class Response: + def __init__( + self, + status_code: int, + *, + headers: HeaderTypes | None = None, + content: ResponseContent | None = None, + text: str | None = None, + html: str | None = None, + json: typing.Any = None, + stream: SyncByteStream | AsyncByteStream | None = None, + request: Request | None = None, + extensions: ResponseExtensions | None = None, + history: list[Response] | None = None, + default_encoding: str | typing.Callable[[bytes], str] = "utf-8", + ) -> None: + self.status_code = status_code + self.headers = Headers(headers) + + self._request: Request | None = request + + # When follow_redirects=False and a redirect is received, + # the client will set `response.next_request`. + self.next_request: Request | None = None + + self.extensions = {} if extensions is None else dict(extensions) + self.history = [] if history is None else list(history) + + self.is_closed = False + self.is_stream_consumed = False + + self.default_encoding = default_encoding + + if stream is None: + headers, stream = encode_response(content, text, html, json) + self._prepare(headers) + self.stream = stream + if isinstance(stream, ByteStream): + # Load the response body, except for streaming content. + self.read() + else: + # There's an important distinction between `Response(content=...)`, + # and `Response(stream=...)`. + # + # Using `content=...` implies automatically populated content headers, + # of either `Content-Length: ...` or `Transfer-Encoding: chunked`. + # + # Using `stream=...` will not automatically include any content headers. + # + # As an end-user you don't really need `stream=...`. It's only + # useful when creating response instances having received a stream + # from the transport API. + self.stream = stream + + self._num_bytes_downloaded = 0 + + def _prepare(self, default_headers: dict[str, str]) -> None: + for key, value in default_headers.items(): + # Ignore Transfer-Encoding if the Content-Length has been set explicitly. + if key.lower() == "transfer-encoding" and "content-length" in self.headers: + continue + self.headers.setdefault(key, value) + + @property + def elapsed(self) -> datetime.timedelta: + """ + Returns the time taken for the complete request/response + cycle to complete. + """ + if not hasattr(self, "_elapsed"): + raise RuntimeError( + "'.elapsed' may only be accessed after the response " + "has been read or closed." + ) + return self._elapsed + + @elapsed.setter + def elapsed(self, elapsed: datetime.timedelta) -> None: + self._elapsed = elapsed + + @property + def request(self) -> Request: + """ + Returns the request instance associated to the current response. + """ + if self._request is None: + raise RuntimeError( + "The request instance has not been set on this response." + ) + return self._request + + @request.setter + def request(self, value: Request) -> None: + self._request = value + + @property + def http_version(self) -> str: + try: + http_version: bytes = self.extensions["http_version"] + except KeyError: + return "HTTP/1.1" + else: + return http_version.decode("ascii", errors="ignore") + + @property + def reason_phrase(self) -> str: + try: + reason_phrase: bytes = self.extensions["reason_phrase"] + except KeyError: + return codes.get_reason_phrase(self.status_code) + else: + return reason_phrase.decode("ascii", errors="ignore") + + @property + def url(self) -> URL: + """ + Returns the URL for which the request was made. + """ + return self.request.url + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + raise ResponseNotRead() + return self._content + + @property + def text(self) -> str: + if not hasattr(self, "_text"): + content = self.content + if not content: + self._text = "" + else: + decoder = TextDecoder(encoding=self.encoding or "utf-8") + self._text = "".join([decoder.decode(self.content), decoder.flush()]) + return self._text + + @property + def encoding(self) -> str | None: + """ + Return an encoding to use for decoding the byte content into text. + The priority for determining this is given by... + + * `.encoding = <>` has been set explicitly. + * The encoding as specified by the charset parameter in the Content-Type header. + * The encoding as determined by `default_encoding`, which may either be + a string like "utf-8" indicating the encoding to use, or may be a callable + which enables charset autodetection. + """ + if not hasattr(self, "_encoding"): + encoding = self.charset_encoding + if encoding is None or not _is_known_encoding(encoding): + if isinstance(self.default_encoding, str): + encoding = self.default_encoding + elif hasattr(self, "_content"): + encoding = self.default_encoding(self._content) + self._encoding = encoding or "utf-8" + return self._encoding + + @encoding.setter + def encoding(self, value: str) -> None: + """ + Set the encoding to use for decoding the byte content into text. + + If the `text` attribute has been accessed, attempting to set the + encoding will throw a ValueError. + """ + if hasattr(self, "_text"): + raise ValueError( + "Setting encoding after `text` has been accessed is not allowed." + ) + self._encoding = value + + @property + def charset_encoding(self) -> str | None: + """ + Return the encoding, as specified by the Content-Type header. + """ + content_type = self.headers.get("Content-Type") + if content_type is None: + return None + + return _parse_content_type_charset(content_type) + + def _get_content_decoder(self) -> ContentDecoder: + """ + Returns a decoder instance which can be used to decode the raw byte + content, depending on the Content-Encoding used in the response. + """ + if not hasattr(self, "_decoder"): + decoders: list[ContentDecoder] = [] + values = self.headers.get_list("content-encoding", split_commas=True) + for value in values: + value = value.strip().lower() + try: + decoder_cls = SUPPORTED_DECODERS[value] + decoders.append(decoder_cls()) + except KeyError: + continue + + if len(decoders) == 1: + self._decoder = decoders[0] + elif len(decoders) > 1: + self._decoder = MultiDecoder(children=decoders) + else: + self._decoder = IdentityDecoder() + + return self._decoder + + @property + def is_informational(self) -> bool: + """ + A property which is `True` for 1xx status codes, `False` otherwise. + """ + return codes.is_informational(self.status_code) + + @property + def is_success(self) -> bool: + """ + A property which is `True` for 2xx status codes, `False` otherwise. + """ + return codes.is_success(self.status_code) + + @property + def is_redirect(self) -> bool: + """ + A property which is `True` for 3xx status codes, `False` otherwise. + + Note that not all responses with a 3xx status code indicate a URL redirect. + + Use `response.has_redirect_location` to determine responses with a properly + formed URL redirection. + """ + return codes.is_redirect(self.status_code) + + @property + def is_client_error(self) -> bool: + """ + A property which is `True` for 4xx status codes, `False` otherwise. + """ + return codes.is_client_error(self.status_code) + + @property + def is_server_error(self) -> bool: + """ + A property which is `True` for 5xx status codes, `False` otherwise. + """ + return codes.is_server_error(self.status_code) + + @property + def is_error(self) -> bool: + """ + A property which is `True` for 4xx and 5xx status codes, `False` otherwise. + """ + return codes.is_error(self.status_code) + + @property + def has_redirect_location(self) -> bool: + """ + Returns True for 3xx responses with a properly formed URL redirection, + `False` otherwise. + """ + return ( + self.status_code + in ( + # 301 (Cacheable redirect. Method may change to GET.) + codes.MOVED_PERMANENTLY, + # 302 (Uncacheable redirect. Method may change to GET.) + codes.FOUND, + # 303 (Client should make a GET or HEAD request.) + codes.SEE_OTHER, + # 307 (Equiv. 302, but retain method) + codes.TEMPORARY_REDIRECT, + # 308 (Equiv. 301, but retain method) + codes.PERMANENT_REDIRECT, + ) + and "Location" in self.headers + ) + + def raise_for_status(self) -> Response: + """ + Raise the `HTTPStatusError` if one occurred. + """ + request = self._request + if request is None: + raise RuntimeError( + "Cannot call `raise_for_status` as the request " + "instance has not been set on this response." + ) + + if self.is_success: + return self + + if self.has_redirect_location: + message = ( + "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n" + "Redirect location: '{0.headers[location]}'\n" + "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}" + ) + else: + message = ( + "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n" + "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}" + ) + + status_class = self.status_code // 100 + error_types = { + 1: "Informational response", + 3: "Redirect response", + 4: "Client error", + 5: "Server error", + } + error_type = error_types.get(status_class, "Invalid status code") + message = message.format(self, error_type=error_type) + raise HTTPStatusError(message, request=request, response=self) + + def json(self, **kwargs: typing.Any) -> typing.Any: + return jsonlib.loads(self.content, **kwargs) + + @property + def cookies(self) -> Cookies: + if not hasattr(self, "_cookies"): + self._cookies = Cookies() + self._cookies.extract_cookies(self) + return self._cookies + + @property + def links(self) -> dict[str | None, dict[str, str]]: + """ + Returns the parsed header links of the response, if any + """ + header = self.headers.get("link") + if header is None: + return {} + + return { + (link.get("rel") or link.get("url")): link + for link in _parse_header_links(header) + } + + @property + def num_bytes_downloaded(self) -> int: + return self._num_bytes_downloaded + + def __repr__(self) -> str: + return f"" + + def __getstate__(self) -> dict[str, typing.Any]: + return { + name: value + for name, value in self.__dict__.items() + if name not in ["extensions", "stream", "is_closed", "_decoder"] + } + + def __setstate__(self, state: dict[str, typing.Any]) -> None: + for name, value in state.items(): + setattr(self, name, value) + self.is_closed = True + self.extensions = {} + self.stream = UnattachedStream() + + def read(self) -> bytes: + """ + Read and return the response content. + """ + if not hasattr(self, "_content"): + self._content = b"".join(self.iter_bytes()) + return self._content + + def iter_bytes(self, chunk_size: int | None = None) -> typing.Iterator[bytes]: + """ + A byte-iterator over the decoded response content. + This allows us to handle gzip, deflate, brotli, and zstd encoded responses. + """ + if hasattr(self, "_content"): + chunk_size = len(self._content) if chunk_size is None else chunk_size + for i in range(0, len(self._content), max(chunk_size, 1)): + yield self._content[i : i + chunk_size] + else: + decoder = self._get_content_decoder() + chunker = ByteChunker(chunk_size=chunk_size) + with request_context(request=self._request): + for raw_bytes in self.iter_raw(): + decoded = decoder.decode(raw_bytes) + for chunk in chunker.decode(decoded): + yield chunk + decoded = decoder.flush() + for chunk in chunker.decode(decoded): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + def iter_text(self, chunk_size: int | None = None) -> typing.Iterator[str]: + """ + A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + decoder = TextDecoder(encoding=self.encoding or "utf-8") + chunker = TextChunker(chunk_size=chunk_size) + with request_context(request=self._request): + for byte_content in self.iter_bytes(): + text_content = decoder.decode(byte_content) + for chunk in chunker.decode(text_content): + yield chunk + text_content = decoder.flush() + for chunk in chunker.decode(text_content): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + def iter_lines(self) -> typing.Iterator[str]: + decoder = LineDecoder() + with request_context(request=self._request): + for text in self.iter_text(): + for line in decoder.decode(text): + yield line + for line in decoder.flush(): + yield line + + def iter_raw(self, chunk_size: int | None = None) -> typing.Iterator[bytes]: + """ + A byte-iterator over the raw response content. + """ + if self.is_stream_consumed: + raise StreamConsumed() + if self.is_closed: + raise StreamClosed() + if not isinstance(self.stream, SyncByteStream): + raise RuntimeError("Attempted to call a sync iterator on an async stream.") + + self.is_stream_consumed = True + self._num_bytes_downloaded = 0 + chunker = ByteChunker(chunk_size=chunk_size) + + with request_context(request=self._request): + for raw_stream_bytes in self.stream: + self._num_bytes_downloaded += len(raw_stream_bytes) + for chunk in chunker.decode(raw_stream_bytes): + yield chunk + + for chunk in chunker.flush(): + yield chunk + + self.close() + + def close(self) -> None: + """ + Close the response and release the connection. + Automatically called if the response body is read to completion. + """ + if not isinstance(self.stream, SyncByteStream): + raise RuntimeError("Attempted to call an sync close on an async stream.") + + if not self.is_closed: + self.is_closed = True + with request_context(request=self._request): + self.stream.close() + + async def aread(self) -> bytes: + """ + Read and return the response content. + """ + if not hasattr(self, "_content"): + self._content = b"".join([part async for part in self.aiter_bytes()]) + return self._content + + async def aiter_bytes( + self, chunk_size: int | None = None + ) -> typing.AsyncIterator[bytes]: + """ + A byte-iterator over the decoded response content. + This allows us to handle gzip, deflate, brotli, and zstd encoded responses. + """ + if hasattr(self, "_content"): + chunk_size = len(self._content) if chunk_size is None else chunk_size + for i in range(0, len(self._content), max(chunk_size, 1)): + yield self._content[i : i + chunk_size] + else: + decoder = self._get_content_decoder() + chunker = ByteChunker(chunk_size=chunk_size) + with request_context(request=self._request): + async for raw_bytes in self.aiter_raw(): + decoded = decoder.decode(raw_bytes) + for chunk in chunker.decode(decoded): + yield chunk + decoded = decoder.flush() + for chunk in chunker.decode(decoded): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + async def aiter_text( + self, chunk_size: int | None = None + ) -> typing.AsyncIterator[str]: + """ + A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + decoder = TextDecoder(encoding=self.encoding or "utf-8") + chunker = TextChunker(chunk_size=chunk_size) + with request_context(request=self._request): + async for byte_content in self.aiter_bytes(): + text_content = decoder.decode(byte_content) + for chunk in chunker.decode(text_content): + yield chunk + text_content = decoder.flush() + for chunk in chunker.decode(text_content): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + async def aiter_lines(self) -> typing.AsyncIterator[str]: + decoder = LineDecoder() + with request_context(request=self._request): + async for text in self.aiter_text(): + for line in decoder.decode(text): + yield line + for line in decoder.flush(): + yield line + + async def aiter_raw( + self, chunk_size: int | None = None + ) -> typing.AsyncIterator[bytes]: + """ + A byte-iterator over the raw response content. + """ + if self.is_stream_consumed: + raise StreamConsumed() + if self.is_closed: + raise StreamClosed() + if not isinstance(self.stream, AsyncByteStream): + raise RuntimeError("Attempted to call an async iterator on an sync stream.") + + self.is_stream_consumed = True + self._num_bytes_downloaded = 0 + chunker = ByteChunker(chunk_size=chunk_size) + + with request_context(request=self._request): + async for raw_stream_bytes in self.stream: + self._num_bytes_downloaded += len(raw_stream_bytes) + for chunk in chunker.decode(raw_stream_bytes): + yield chunk + + for chunk in chunker.flush(): + yield chunk + + await self.aclose() + + async def aclose(self) -> None: + """ + Close the response and release the connection. + Automatically called if the response body is read to completion. + """ + if not isinstance(self.stream, AsyncByteStream): + raise RuntimeError("Attempted to call an async close on an sync stream.") + + if not self.is_closed: + self.is_closed = True + with request_context(request=self._request): + await self.stream.aclose() + + +class Cookies(typing.MutableMapping[str, str]): + """ + HTTP Cookies, as a mutable mapping. + """ + + def __init__(self, cookies: CookieTypes | None = None) -> None: + if cookies is None or isinstance(cookies, dict): + self.jar = CookieJar() + if isinstance(cookies, dict): + for key, value in cookies.items(): + self.set(key, value) + elif isinstance(cookies, list): + self.jar = CookieJar() + for key, value in cookies: + self.set(key, value) + elif isinstance(cookies, Cookies): + self.jar = CookieJar() + for cookie in cookies.jar: + self.jar.set_cookie(cookie) + else: + self.jar = cookies + + def extract_cookies(self, response: Response) -> None: + """ + Loads any cookies based on the response `Set-Cookie` headers. + """ + urllib_response = self._CookieCompatResponse(response) + urllib_request = self._CookieCompatRequest(response.request) + + self.jar.extract_cookies(urllib_response, urllib_request) # type: ignore + + def set_cookie_header(self, request: Request) -> None: + """ + Sets an appropriate 'Cookie:' HTTP header on the `Request`. + """ + urllib_request = self._CookieCompatRequest(request) + self.jar.add_cookie_header(urllib_request) + + def set(self, name: str, value: str, domain: str = "", path: str = "/") -> None: + """ + Set a cookie value by name. May optionally include domain and path. + """ + kwargs = { + "version": 0, + "name": name, + "value": value, + "port": None, + "port_specified": False, + "domain": domain, + "domain_specified": bool(domain), + "domain_initial_dot": domain.startswith("."), + "path": path, + "path_specified": bool(path), + "secure": False, + "expires": None, + "discard": True, + "comment": None, + "comment_url": None, + "rest": {"HttpOnly": None}, + "rfc2109": False, + } + cookie = Cookie(**kwargs) # type: ignore + self.jar.set_cookie(cookie) + + def get( # type: ignore + self, + name: str, + default: str | None = None, + domain: str | None = None, + path: str | None = None, + ) -> str | None: + """ + Get a cookie by name. May optionally include domain and path + in order to specify exactly which cookie to retrieve. + """ + value = None + for cookie in self.jar: + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + if value is not None: + message = f"Multiple cookies exist with name={name}" + raise CookieConflict(message) + value = cookie.value + + if value is None: + return default + return value + + def delete( + self, + name: str, + domain: str | None = None, + path: str | None = None, + ) -> None: + """ + Delete a cookie by name. May optionally include domain and path + in order to specify exactly which cookie to delete. + """ + if domain is not None and path is not None: + return self.jar.clear(domain, path, name) + + remove = [ + cookie + for cookie in self.jar + if cookie.name == name + and (domain is None or cookie.domain == domain) + and (path is None or cookie.path == path) + ] + + for cookie in remove: + self.jar.clear(cookie.domain, cookie.path, cookie.name) + + def clear(self, domain: str | None = None, path: str | None = None) -> None: + """ + Delete all cookies. Optionally include a domain and path in + order to only delete a subset of all the cookies. + """ + args = [] + if domain is not None: + args.append(domain) + if path is not None: + assert domain is not None + args.append(path) + self.jar.clear(*args) + + def update(self, cookies: CookieTypes | None = None) -> None: # type: ignore + cookies = Cookies(cookies) + for cookie in cookies.jar: + self.jar.set_cookie(cookie) + + def __setitem__(self, name: str, value: str) -> None: + return self.set(name, value) + + def __getitem__(self, name: str) -> str: + value = self.get(name) + if value is None: + raise KeyError(name) + return value + + def __delitem__(self, name: str) -> None: + return self.delete(name) + + def __len__(self) -> int: + return len(self.jar) + + def __iter__(self) -> typing.Iterator[str]: + return (cookie.name for cookie in self.jar) + + def __bool__(self) -> bool: + for _ in self.jar: + return True + return False + + def __repr__(self) -> str: + cookies_repr = ", ".join( + [ + f"" + for cookie in self.jar + ] + ) + + return f"" + + class _CookieCompatRequest(urllib.request.Request): + """ + Wraps a `Request` instance up in a compatibility interface suitable + for use with `CookieJar` operations. + """ + + def __init__(self, request: Request) -> None: + super().__init__( + url=str(request.url), + headers=dict(request.headers), + method=request.method, + ) + self.request = request + + def add_unredirected_header(self, key: str, value: str) -> None: + super().add_unredirected_header(key, value) + self.request.headers[key] = value + + class _CookieCompatResponse: + """ + Wraps a `Request` instance up in a compatibility interface suitable + for use with `CookieJar` operations. + """ + + def __init__(self, response: Response) -> None: + self.response = response + + def info(self) -> email.message.Message: + info = email.message.Message() + for key, value in self.response.headers.multi_items(): + # Note that setting `info[key]` here is an "append" operation, + # not a "replace" operation. + # https://docs.python.org/3/library/email.compat32-message.html#email.message.Message.__setitem__ + info[key] = value + return info diff --git a/.venv/lib/python3.12/site-packages/httpx/_multipart.py b/.venv/lib/python3.12/site-packages/httpx/_multipart.py new file mode 100644 index 0000000..b4761af --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/_multipart.py @@ -0,0 +1,300 @@ +from __future__ import annotations + +import io +import mimetypes +import os +import re +import typing +from pathlib import Path + +from ._types import ( + AsyncByteStream, + FileContent, + FileTypes, + RequestData, + RequestFiles, + SyncByteStream, +) +from ._utils import ( + peek_filelike_length, + primitive_value_to_str, + to_bytes, +) + +_HTML5_FORM_ENCODING_REPLACEMENTS = {'"': "%22", "\\": "\\\\"} +_HTML5_FORM_ENCODING_REPLACEMENTS.update( + {chr(c): "%{:02X}".format(c) for c in range(0x1F + 1) if c != 0x1B} +) +_HTML5_FORM_ENCODING_RE = re.compile( + r"|".join([re.escape(c) for c in _HTML5_FORM_ENCODING_REPLACEMENTS.keys()]) +) + + +def _format_form_param(name: str, value: str) -> bytes: + """ + Encode a name/value pair within a multipart form. + """ + + def replacer(match: typing.Match[str]) -> str: + return _HTML5_FORM_ENCODING_REPLACEMENTS[match.group(0)] + + value = _HTML5_FORM_ENCODING_RE.sub(replacer, value) + return f'{name}="{value}"'.encode() + + +def _guess_content_type(filename: str | None) -> str | None: + """ + Guesses the mimetype based on a filename. Defaults to `application/octet-stream`. + + Returns `None` if `filename` is `None` or empty. + """ + if filename: + return mimetypes.guess_type(filename)[0] or "application/octet-stream" + return None + + +def get_multipart_boundary_from_content_type( + content_type: bytes | None, +) -> bytes | None: + if not content_type or not content_type.startswith(b"multipart/form-data"): + return None + # parse boundary according to + # https://www.rfc-editor.org/rfc/rfc2046#section-5.1.1 + if b";" in content_type: + for section in content_type.split(b";"): + if section.strip().lower().startswith(b"boundary="): + return section.strip()[len(b"boundary=") :].strip(b'"') + return None + + +class DataField: + """ + A single form field item, within a multipart form field. + """ + + def __init__(self, name: str, value: str | bytes | int | float | None) -> None: + if not isinstance(name, str): + raise TypeError( + f"Invalid type for name. Expected str, got {type(name)}: {name!r}" + ) + if value is not None and not isinstance(value, (str, bytes, int, float)): + raise TypeError( + "Invalid type for value. Expected primitive type," + f" got {type(value)}: {value!r}" + ) + self.name = name + self.value: str | bytes = ( + value if isinstance(value, bytes) else primitive_value_to_str(value) + ) + + def render_headers(self) -> bytes: + if not hasattr(self, "_headers"): + name = _format_form_param("name", self.name) + self._headers = b"".join( + [b"Content-Disposition: form-data; ", name, b"\r\n\r\n"] + ) + + return self._headers + + def render_data(self) -> bytes: + if not hasattr(self, "_data"): + self._data = to_bytes(self.value) + + return self._data + + def get_length(self) -> int: + headers = self.render_headers() + data = self.render_data() + return len(headers) + len(data) + + def render(self) -> typing.Iterator[bytes]: + yield self.render_headers() + yield self.render_data() + + +class FileField: + """ + A single file field item, within a multipart form field. + """ + + CHUNK_SIZE = 64 * 1024 + + def __init__(self, name: str, value: FileTypes) -> None: + self.name = name + + fileobj: FileContent + + headers: dict[str, str] = {} + content_type: str | None = None + + # This large tuple based API largely mirror's requests' API + # It would be good to think of better APIs for this that we could + # include in httpx 2.0 since variable length tuples(especially of 4 elements) + # are quite unwieldly + if isinstance(value, tuple): + if len(value) == 2: + # neither the 3rd parameter (content_type) nor the 4th (headers) + # was included + filename, fileobj = value + elif len(value) == 3: + filename, fileobj, content_type = value + else: + # all 4 parameters included + filename, fileobj, content_type, headers = value # type: ignore + else: + filename = Path(str(getattr(value, "name", "upload"))).name + fileobj = value + + if content_type is None: + content_type = _guess_content_type(filename) + + has_content_type_header = any("content-type" in key.lower() for key in headers) + if content_type is not None and not has_content_type_header: + # note that unlike requests, we ignore the content_type provided in the 3rd + # tuple element if it is also included in the headers requests does + # the opposite (it overwrites the headerwith the 3rd tuple element) + headers["Content-Type"] = content_type + + if isinstance(fileobj, io.StringIO): + raise TypeError( + "Multipart file uploads require 'io.BytesIO', not 'io.StringIO'." + ) + if isinstance(fileobj, io.TextIOBase): + raise TypeError( + "Multipart file uploads must be opened in binary mode, not text mode." + ) + + self.filename = filename + self.file = fileobj + self.headers = headers + + def get_length(self) -> int | None: + headers = self.render_headers() + + if isinstance(self.file, (str, bytes)): + return len(headers) + len(to_bytes(self.file)) + + file_length = peek_filelike_length(self.file) + + # If we can't determine the filesize without reading it into memory, + # then return `None` here, to indicate an unknown file length. + if file_length is None: + return None + + return len(headers) + file_length + + def render_headers(self) -> bytes: + if not hasattr(self, "_headers"): + parts = [ + b"Content-Disposition: form-data; ", + _format_form_param("name", self.name), + ] + if self.filename: + filename = _format_form_param("filename", self.filename) + parts.extend([b"; ", filename]) + for header_name, header_value in self.headers.items(): + key, val = f"\r\n{header_name}: ".encode(), header_value.encode() + parts.extend([key, val]) + parts.append(b"\r\n\r\n") + self._headers = b"".join(parts) + + return self._headers + + def render_data(self) -> typing.Iterator[bytes]: + if isinstance(self.file, (str, bytes)): + yield to_bytes(self.file) + return + + if hasattr(self.file, "seek"): + try: + self.file.seek(0) + except io.UnsupportedOperation: + pass + + chunk = self.file.read(self.CHUNK_SIZE) + while chunk: + yield to_bytes(chunk) + chunk = self.file.read(self.CHUNK_SIZE) + + def render(self) -> typing.Iterator[bytes]: + yield self.render_headers() + yield from self.render_data() + + +class MultipartStream(SyncByteStream, AsyncByteStream): + """ + Request content as streaming multipart encoded form data. + """ + + def __init__( + self, + data: RequestData, + files: RequestFiles, + boundary: bytes | None = None, + ) -> None: + if boundary is None: + boundary = os.urandom(16).hex().encode("ascii") + + self.boundary = boundary + self.content_type = "multipart/form-data; boundary=%s" % boundary.decode( + "ascii" + ) + self.fields = list(self._iter_fields(data, files)) + + def _iter_fields( + self, data: RequestData, files: RequestFiles + ) -> typing.Iterator[FileField | DataField]: + for name, value in data.items(): + if isinstance(value, (tuple, list)): + for item in value: + yield DataField(name=name, value=item) + else: + yield DataField(name=name, value=value) + + file_items = files.items() if isinstance(files, typing.Mapping) else files + for name, value in file_items: + yield FileField(name=name, value=value) + + def iter_chunks(self) -> typing.Iterator[bytes]: + for field in self.fields: + yield b"--%s\r\n" % self.boundary + yield from field.render() + yield b"\r\n" + yield b"--%s--\r\n" % self.boundary + + def get_content_length(self) -> int | None: + """ + Return the length of the multipart encoded content, or `None` if + any of the files have a length that cannot be determined upfront. + """ + boundary_length = len(self.boundary) + length = 0 + + for field in self.fields: + field_length = field.get_length() + if field_length is None: + return None + + length += 2 + boundary_length + 2 # b"--{boundary}\r\n" + length += field_length + length += 2 # b"\r\n" + + length += 2 + boundary_length + 4 # b"--{boundary}--\r\n" + return length + + # Content stream interface. + + def get_headers(self) -> dict[str, str]: + content_length = self.get_content_length() + content_type = self.content_type + if content_length is None: + return {"Transfer-Encoding": "chunked", "Content-Type": content_type} + return {"Content-Length": str(content_length), "Content-Type": content_type} + + def __iter__(self) -> typing.Iterator[bytes]: + for chunk in self.iter_chunks(): + yield chunk + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + for chunk in self.iter_chunks(): + yield chunk diff --git a/.venv/lib/python3.12/site-packages/httpx/_status_codes.py b/.venv/lib/python3.12/site-packages/httpx/_status_codes.py new file mode 100644 index 0000000..133a623 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/_status_codes.py @@ -0,0 +1,162 @@ +from __future__ import annotations + +from enum import IntEnum + +__all__ = ["codes"] + + +class codes(IntEnum): + """HTTP status codes and reason phrases + + Status codes from the following RFCs are all observed: + + * RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616 + * RFC 6585: Additional HTTP Status Codes + * RFC 3229: Delta encoding in HTTP + * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518 + * RFC 5842: Binding Extensions to WebDAV + * RFC 7238: Permanent Redirect + * RFC 2295: Transparent Content Negotiation in HTTP + * RFC 2774: An HTTP Extension Framework + * RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2) + * RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0) + * RFC 7725: An HTTP Status Code to Report Legal Obstacles + * RFC 8297: An HTTP Status Code for Indicating Hints + * RFC 8470: Using Early Data in HTTP + """ + + def __new__(cls, value: int, phrase: str = "") -> codes: + obj = int.__new__(cls, value) + obj._value_ = value + + obj.phrase = phrase # type: ignore[attr-defined] + return obj + + def __str__(self) -> str: + return str(self.value) + + @classmethod + def get_reason_phrase(cls, value: int) -> str: + try: + return codes(value).phrase # type: ignore + except ValueError: + return "" + + @classmethod + def is_informational(cls, value: int) -> bool: + """ + Returns `True` for 1xx status codes, `False` otherwise. + """ + return 100 <= value <= 199 + + @classmethod + def is_success(cls, value: int) -> bool: + """ + Returns `True` for 2xx status codes, `False` otherwise. + """ + return 200 <= value <= 299 + + @classmethod + def is_redirect(cls, value: int) -> bool: + """ + Returns `True` for 3xx status codes, `False` otherwise. + """ + return 300 <= value <= 399 + + @classmethod + def is_client_error(cls, value: int) -> bool: + """ + Returns `True` for 4xx status codes, `False` otherwise. + """ + return 400 <= value <= 499 + + @classmethod + def is_server_error(cls, value: int) -> bool: + """ + Returns `True` for 5xx status codes, `False` otherwise. + """ + return 500 <= value <= 599 + + @classmethod + def is_error(cls, value: int) -> bool: + """ + Returns `True` for 4xx or 5xx status codes, `False` otherwise. + """ + return 400 <= value <= 599 + + # informational + CONTINUE = 100, "Continue" + SWITCHING_PROTOCOLS = 101, "Switching Protocols" + PROCESSING = 102, "Processing" + EARLY_HINTS = 103, "Early Hints" + + # success + OK = 200, "OK" + CREATED = 201, "Created" + ACCEPTED = 202, "Accepted" + NON_AUTHORITATIVE_INFORMATION = 203, "Non-Authoritative Information" + NO_CONTENT = 204, "No Content" + RESET_CONTENT = 205, "Reset Content" + PARTIAL_CONTENT = 206, "Partial Content" + MULTI_STATUS = 207, "Multi-Status" + ALREADY_REPORTED = 208, "Already Reported" + IM_USED = 226, "IM Used" + + # redirection + MULTIPLE_CHOICES = 300, "Multiple Choices" + MOVED_PERMANENTLY = 301, "Moved Permanently" + FOUND = 302, "Found" + SEE_OTHER = 303, "See Other" + NOT_MODIFIED = 304, "Not Modified" + USE_PROXY = 305, "Use Proxy" + TEMPORARY_REDIRECT = 307, "Temporary Redirect" + PERMANENT_REDIRECT = 308, "Permanent Redirect" + + # client error + BAD_REQUEST = 400, "Bad Request" + UNAUTHORIZED = 401, "Unauthorized" + PAYMENT_REQUIRED = 402, "Payment Required" + FORBIDDEN = 403, "Forbidden" + NOT_FOUND = 404, "Not Found" + METHOD_NOT_ALLOWED = 405, "Method Not Allowed" + NOT_ACCEPTABLE = 406, "Not Acceptable" + PROXY_AUTHENTICATION_REQUIRED = 407, "Proxy Authentication Required" + REQUEST_TIMEOUT = 408, "Request Timeout" + CONFLICT = 409, "Conflict" + GONE = 410, "Gone" + LENGTH_REQUIRED = 411, "Length Required" + PRECONDITION_FAILED = 412, "Precondition Failed" + REQUEST_ENTITY_TOO_LARGE = 413, "Request Entity Too Large" + REQUEST_URI_TOO_LONG = 414, "Request-URI Too Long" + UNSUPPORTED_MEDIA_TYPE = 415, "Unsupported Media Type" + REQUESTED_RANGE_NOT_SATISFIABLE = 416, "Requested Range Not Satisfiable" + EXPECTATION_FAILED = 417, "Expectation Failed" + IM_A_TEAPOT = 418, "I'm a teapot" + MISDIRECTED_REQUEST = 421, "Misdirected Request" + UNPROCESSABLE_ENTITY = 422, "Unprocessable Entity" + LOCKED = 423, "Locked" + FAILED_DEPENDENCY = 424, "Failed Dependency" + TOO_EARLY = 425, "Too Early" + UPGRADE_REQUIRED = 426, "Upgrade Required" + PRECONDITION_REQUIRED = 428, "Precondition Required" + TOO_MANY_REQUESTS = 429, "Too Many Requests" + REQUEST_HEADER_FIELDS_TOO_LARGE = 431, "Request Header Fields Too Large" + UNAVAILABLE_FOR_LEGAL_REASONS = 451, "Unavailable For Legal Reasons" + + # server errors + INTERNAL_SERVER_ERROR = 500, "Internal Server Error" + NOT_IMPLEMENTED = 501, "Not Implemented" + BAD_GATEWAY = 502, "Bad Gateway" + SERVICE_UNAVAILABLE = 503, "Service Unavailable" + GATEWAY_TIMEOUT = 504, "Gateway Timeout" + HTTP_VERSION_NOT_SUPPORTED = 505, "HTTP Version Not Supported" + VARIANT_ALSO_NEGOTIATES = 506, "Variant Also Negotiates" + INSUFFICIENT_STORAGE = 507, "Insufficient Storage" + LOOP_DETECTED = 508, "Loop Detected" + NOT_EXTENDED = 510, "Not Extended" + NETWORK_AUTHENTICATION_REQUIRED = 511, "Network Authentication Required" + + +# Include lower-case styles for `requests` compatibility. +for code in codes: + setattr(codes, code._name_.lower(), int(code)) diff --git a/.venv/lib/python3.12/site-packages/httpx/_transports/__init__.py b/.venv/lib/python3.12/site-packages/httpx/_transports/__init__.py new file mode 100644 index 0000000..7a32105 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/_transports/__init__.py @@ -0,0 +1,15 @@ +from .asgi import * +from .base import * +from .default import * +from .mock import * +from .wsgi import * + +__all__ = [ + "ASGITransport", + "AsyncBaseTransport", + "BaseTransport", + "AsyncHTTPTransport", + "HTTPTransport", + "MockTransport", + "WSGITransport", +] diff --git a/.venv/lib/python3.12/site-packages/httpx/_transports/asgi.py b/.venv/lib/python3.12/site-packages/httpx/_transports/asgi.py new file mode 100644 index 0000000..2bc4efa --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/_transports/asgi.py @@ -0,0 +1,187 @@ +from __future__ import annotations + +import typing + +from .._models import Request, Response +from .._types import AsyncByteStream +from .base import AsyncBaseTransport + +if typing.TYPE_CHECKING: # pragma: no cover + import asyncio + + import trio + + Event = typing.Union[asyncio.Event, trio.Event] + + +_Message = typing.MutableMapping[str, typing.Any] +_Receive = typing.Callable[[], typing.Awaitable[_Message]] +_Send = typing.Callable[ + [typing.MutableMapping[str, typing.Any]], typing.Awaitable[None] +] +_ASGIApp = typing.Callable[ + [typing.MutableMapping[str, typing.Any], _Receive, _Send], typing.Awaitable[None] +] + +__all__ = ["ASGITransport"] + + +def is_running_trio() -> bool: + try: + # sniffio is a dependency of trio. + + # See https://github.com/python-trio/trio/issues/2802 + import sniffio + + if sniffio.current_async_library() == "trio": + return True + except ImportError: # pragma: nocover + pass + + return False + + +def create_event() -> Event: + if is_running_trio(): + import trio + + return trio.Event() + + import asyncio + + return asyncio.Event() + + +class ASGIResponseStream(AsyncByteStream): + def __init__(self, body: list[bytes]) -> None: + self._body = body + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + yield b"".join(self._body) + + +class ASGITransport(AsyncBaseTransport): + """ + A custom AsyncTransport that handles sending requests directly to an ASGI app. + + ```python + transport = httpx.ASGITransport( + app=app, + root_path="/submount", + client=("1.2.3.4", 123) + ) + client = httpx.AsyncClient(transport=transport) + ``` + + Arguments: + + * `app` - The ASGI application. + * `raise_app_exceptions` - Boolean indicating if exceptions in the application + should be raised. Default to `True`. Can be set to `False` for use cases + such as testing the content of a client 500 response. + * `root_path` - The root path on which the ASGI application should be mounted. + * `client` - A two-tuple indicating the client IP and port of incoming requests. + ``` + """ + + def __init__( + self, + app: _ASGIApp, + raise_app_exceptions: bool = True, + root_path: str = "", + client: tuple[str, int] = ("127.0.0.1", 123), + ) -> None: + self.app = app + self.raise_app_exceptions = raise_app_exceptions + self.root_path = root_path + self.client = client + + async def handle_async_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, AsyncByteStream) + + # ASGI scope. + scope = { + "type": "http", + "asgi": {"version": "3.0"}, + "http_version": "1.1", + "method": request.method, + "headers": [(k.lower(), v) for (k, v) in request.headers.raw], + "scheme": request.url.scheme, + "path": request.url.path, + "raw_path": request.url.raw_path.split(b"?")[0], + "query_string": request.url.query, + "server": (request.url.host, request.url.port), + "client": self.client, + "root_path": self.root_path, + } + + # Request. + request_body_chunks = request.stream.__aiter__() + request_complete = False + + # Response. + status_code = None + response_headers = None + body_parts = [] + response_started = False + response_complete = create_event() + + # ASGI callables. + + async def receive() -> dict[str, typing.Any]: + nonlocal request_complete + + if request_complete: + await response_complete.wait() + return {"type": "http.disconnect"} + + try: + body = await request_body_chunks.__anext__() + except StopAsyncIteration: + request_complete = True + return {"type": "http.request", "body": b"", "more_body": False} + return {"type": "http.request", "body": body, "more_body": True} + + async def send(message: typing.MutableMapping[str, typing.Any]) -> None: + nonlocal status_code, response_headers, response_started + + if message["type"] == "http.response.start": + assert not response_started + + status_code = message["status"] + response_headers = message.get("headers", []) + response_started = True + + elif message["type"] == "http.response.body": + assert not response_complete.is_set() + body = message.get("body", b"") + more_body = message.get("more_body", False) + + if body and request.method != "HEAD": + body_parts.append(body) + + if not more_body: + response_complete.set() + + try: + await self.app(scope, receive, send) + except Exception: # noqa: PIE-786 + if self.raise_app_exceptions: + raise + + response_complete.set() + if status_code is None: + status_code = 500 + if response_headers is None: + response_headers = {} + + assert response_complete.is_set() + assert status_code is not None + assert response_headers is not None + + stream = ASGIResponseStream(body_parts) + + return Response(status_code, headers=response_headers, stream=stream) diff --git a/.venv/lib/python3.12/site-packages/httpx/_transports/base.py b/.venv/lib/python3.12/site-packages/httpx/_transports/base.py new file mode 100644 index 0000000..66fd99d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/_transports/base.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +import typing +from types import TracebackType + +from .._models import Request, Response + +T = typing.TypeVar("T", bound="BaseTransport") +A = typing.TypeVar("A", bound="AsyncBaseTransport") + +__all__ = ["AsyncBaseTransport", "BaseTransport"] + + +class BaseTransport: + def __enter__(self: T) -> T: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + self.close() + + def handle_request(self, request: Request) -> Response: + """ + Send a single HTTP request and return a response. + + Developers shouldn't typically ever need to call into this API directly, + since the Client class provides all the higher level user-facing API + niceties. + + In order to properly release any network resources, the response + stream should *either* be consumed immediately, with a call to + `response.stream.read()`, or else the `handle_request` call should + be followed with a try/finally block to ensuring the stream is + always closed. + + Example usage: + + with httpx.HTTPTransport() as transport: + req = httpx.Request( + method=b"GET", + url=(b"https", b"www.example.com", 443, b"/"), + headers=[(b"Host", b"www.example.com")], + ) + resp = transport.handle_request(req) + body = resp.stream.read() + print(resp.status_code, resp.headers, body) + + + Takes a `Request` instance as the only argument. + + Returns a `Response` instance. + """ + raise NotImplementedError( + "The 'handle_request' method must be implemented." + ) # pragma: no cover + + def close(self) -> None: + pass + + +class AsyncBaseTransport: + async def __aenter__(self: A) -> A: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + await self.aclose() + + async def handle_async_request( + self, + request: Request, + ) -> Response: + raise NotImplementedError( + "The 'handle_async_request' method must be implemented." + ) # pragma: no cover + + async def aclose(self) -> None: + pass diff --git a/.venv/lib/python3.12/site-packages/httpx/_transports/default.py b/.venv/lib/python3.12/site-packages/httpx/_transports/default.py new file mode 100644 index 0000000..d5aa05f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/_transports/default.py @@ -0,0 +1,406 @@ +""" +Custom transports, with nicely configured defaults. + +The following additional keyword arguments are currently supported by httpcore... + +* uds: str +* local_address: str +* retries: int + +Example usages... + +# Disable HTTP/2 on a single specific domain. +mounts = { + "all://": httpx.HTTPTransport(http2=True), + "all://*example.org": httpx.HTTPTransport() +} + +# Using advanced httpcore configuration, with connection retries. +transport = httpx.HTTPTransport(retries=1) +client = httpx.Client(transport=transport) + +# Using advanced httpcore configuration, with unix domain sockets. +transport = httpx.HTTPTransport(uds="socket.uds") +client = httpx.Client(transport=transport) +""" + +from __future__ import annotations + +import contextlib +import typing +from types import TracebackType + +if typing.TYPE_CHECKING: + import ssl # pragma: no cover + + import httpx # pragma: no cover + +from .._config import DEFAULT_LIMITS, Limits, Proxy, create_ssl_context +from .._exceptions import ( + ConnectError, + ConnectTimeout, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + TimeoutException, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from .._models import Request, Response +from .._types import AsyncByteStream, CertTypes, ProxyTypes, SyncByteStream +from .._urls import URL +from .base import AsyncBaseTransport, BaseTransport + +T = typing.TypeVar("T", bound="HTTPTransport") +A = typing.TypeVar("A", bound="AsyncHTTPTransport") + +SOCKET_OPTION = typing.Union[ + typing.Tuple[int, int, int], + typing.Tuple[int, int, typing.Union[bytes, bytearray]], + typing.Tuple[int, int, None, int], +] + +__all__ = ["AsyncHTTPTransport", "HTTPTransport"] + +HTTPCORE_EXC_MAP: dict[type[Exception], type[httpx.HTTPError]] = {} + + +def _load_httpcore_exceptions() -> dict[type[Exception], type[httpx.HTTPError]]: + import httpcore + + return { + httpcore.TimeoutException: TimeoutException, + httpcore.ConnectTimeout: ConnectTimeout, + httpcore.ReadTimeout: ReadTimeout, + httpcore.WriteTimeout: WriteTimeout, + httpcore.PoolTimeout: PoolTimeout, + httpcore.NetworkError: NetworkError, + httpcore.ConnectError: ConnectError, + httpcore.ReadError: ReadError, + httpcore.WriteError: WriteError, + httpcore.ProxyError: ProxyError, + httpcore.UnsupportedProtocol: UnsupportedProtocol, + httpcore.ProtocolError: ProtocolError, + httpcore.LocalProtocolError: LocalProtocolError, + httpcore.RemoteProtocolError: RemoteProtocolError, + } + + +@contextlib.contextmanager +def map_httpcore_exceptions() -> typing.Iterator[None]: + global HTTPCORE_EXC_MAP + if len(HTTPCORE_EXC_MAP) == 0: + HTTPCORE_EXC_MAP = _load_httpcore_exceptions() + try: + yield + except Exception as exc: + mapped_exc = None + + for from_exc, to_exc in HTTPCORE_EXC_MAP.items(): + if not isinstance(exc, from_exc): + continue + # We want to map to the most specific exception we can find. + # Eg if `exc` is an `httpcore.ReadTimeout`, we want to map to + # `httpx.ReadTimeout`, not just `httpx.TimeoutException`. + if mapped_exc is None or issubclass(to_exc, mapped_exc): + mapped_exc = to_exc + + if mapped_exc is None: # pragma: no cover + raise + + message = str(exc) + raise mapped_exc(message) from exc + + +class ResponseStream(SyncByteStream): + def __init__(self, httpcore_stream: typing.Iterable[bytes]) -> None: + self._httpcore_stream = httpcore_stream + + def __iter__(self) -> typing.Iterator[bytes]: + with map_httpcore_exceptions(): + for part in self._httpcore_stream: + yield part + + def close(self) -> None: + if hasattr(self._httpcore_stream, "close"): + self._httpcore_stream.close() + + +class HTTPTransport(BaseTransport): + def __init__( + self, + verify: ssl.SSLContext | str | bool = True, + cert: CertTypes | None = None, + trust_env: bool = True, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + proxy: ProxyTypes | None = None, + uds: str | None = None, + local_address: str | None = None, + retries: int = 0, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + import httpcore + + proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy + ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) + + if proxy is None: + self._pool = httpcore.ConnectionPool( + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + uds=uds, + local_address=local_address, + retries=retries, + socket_options=socket_options, + ) + elif proxy.url.scheme in ("http", "https"): + self._pool = httpcore.HTTPProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + proxy_headers=proxy.headers.raw, + ssl_context=ssl_context, + proxy_ssl_context=proxy.ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + socket_options=socket_options, + ) + elif proxy.url.scheme in ("socks5", "socks5h"): + try: + import socksio # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using SOCKS proxy, but the 'socksio' package is not installed. " + "Make sure to install httpx using `pip install httpx[socks]`." + ) from None + + self._pool = httpcore.SOCKSProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + ) + else: # pragma: no cover + raise ValueError( + "Proxy protocol must be either 'http', 'https', 'socks5', or 'socks5h'," + f" but got {proxy.url.scheme!r}." + ) + + def __enter__(self: T) -> T: # Use generics for subclass support. + self._pool.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + with map_httpcore_exceptions(): + self._pool.__exit__(exc_type, exc_value, traceback) + + def handle_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, SyncByteStream) + import httpcore + + req = httpcore.Request( + method=request.method, + url=httpcore.URL( + scheme=request.url.raw_scheme, + host=request.url.raw_host, + port=request.url.port, + target=request.url.raw_path, + ), + headers=request.headers.raw, + content=request.stream, + extensions=request.extensions, + ) + with map_httpcore_exceptions(): + resp = self._pool.handle_request(req) + + assert isinstance(resp.stream, typing.Iterable) + + return Response( + status_code=resp.status, + headers=resp.headers, + stream=ResponseStream(resp.stream), + extensions=resp.extensions, + ) + + def close(self) -> None: + self._pool.close() + + +class AsyncResponseStream(AsyncByteStream): + def __init__(self, httpcore_stream: typing.AsyncIterable[bytes]) -> None: + self._httpcore_stream = httpcore_stream + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + with map_httpcore_exceptions(): + async for part in self._httpcore_stream: + yield part + + async def aclose(self) -> None: + if hasattr(self._httpcore_stream, "aclose"): + await self._httpcore_stream.aclose() + + +class AsyncHTTPTransport(AsyncBaseTransport): + def __init__( + self, + verify: ssl.SSLContext | str | bool = True, + cert: CertTypes | None = None, + trust_env: bool = True, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + proxy: ProxyTypes | None = None, + uds: str | None = None, + local_address: str | None = None, + retries: int = 0, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + import httpcore + + proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy + ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) + + if proxy is None: + self._pool = httpcore.AsyncConnectionPool( + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + uds=uds, + local_address=local_address, + retries=retries, + socket_options=socket_options, + ) + elif proxy.url.scheme in ("http", "https"): + self._pool = httpcore.AsyncHTTPProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + proxy_headers=proxy.headers.raw, + proxy_ssl_context=proxy.ssl_context, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + socket_options=socket_options, + ) + elif proxy.url.scheme in ("socks5", "socks5h"): + try: + import socksio # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using SOCKS proxy, but the 'socksio' package is not installed. " + "Make sure to install httpx using `pip install httpx[socks]`." + ) from None + + self._pool = httpcore.AsyncSOCKSProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + ) + else: # pragma: no cover + raise ValueError( + "Proxy protocol must be either 'http', 'https', 'socks5', or 'socks5h'," + " but got {proxy.url.scheme!r}." + ) + + async def __aenter__(self: A) -> A: # Use generics for subclass support. + await self._pool.__aenter__() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + with map_httpcore_exceptions(): + await self._pool.__aexit__(exc_type, exc_value, traceback) + + async def handle_async_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, AsyncByteStream) + import httpcore + + req = httpcore.Request( + method=request.method, + url=httpcore.URL( + scheme=request.url.raw_scheme, + host=request.url.raw_host, + port=request.url.port, + target=request.url.raw_path, + ), + headers=request.headers.raw, + content=request.stream, + extensions=request.extensions, + ) + with map_httpcore_exceptions(): + resp = await self._pool.handle_async_request(req) + + assert isinstance(resp.stream, typing.AsyncIterable) + + return Response( + status_code=resp.status, + headers=resp.headers, + stream=AsyncResponseStream(resp.stream), + extensions=resp.extensions, + ) + + async def aclose(self) -> None: + await self._pool.aclose() diff --git a/.venv/lib/python3.12/site-packages/httpx/_transports/mock.py b/.venv/lib/python3.12/site-packages/httpx/_transports/mock.py new file mode 100644 index 0000000..8c418f5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/_transports/mock.py @@ -0,0 +1,43 @@ +from __future__ import annotations + +import typing + +from .._models import Request, Response +from .base import AsyncBaseTransport, BaseTransport + +SyncHandler = typing.Callable[[Request], Response] +AsyncHandler = typing.Callable[[Request], typing.Coroutine[None, None, Response]] + + +__all__ = ["MockTransport"] + + +class MockTransport(AsyncBaseTransport, BaseTransport): + def __init__(self, handler: SyncHandler | AsyncHandler) -> None: + self.handler = handler + + def handle_request( + self, + request: Request, + ) -> Response: + request.read() + response = self.handler(request) + if not isinstance(response, Response): # pragma: no cover + raise TypeError("Cannot use an async handler in a sync Client") + return response + + async def handle_async_request( + self, + request: Request, + ) -> Response: + await request.aread() + response = self.handler(request) + + # Allow handler to *optionally* be an `async` function. + # If it is, then the `response` variable need to be awaited to actually + # return the result. + + if not isinstance(response, Response): + response = await response + + return response diff --git a/.venv/lib/python3.12/site-packages/httpx/_transports/wsgi.py b/.venv/lib/python3.12/site-packages/httpx/_transports/wsgi.py new file mode 100644 index 0000000..8592ffe --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/_transports/wsgi.py @@ -0,0 +1,149 @@ +from __future__ import annotations + +import io +import itertools +import sys +import typing + +from .._models import Request, Response +from .._types import SyncByteStream +from .base import BaseTransport + +if typing.TYPE_CHECKING: + from _typeshed import OptExcInfo # pragma: no cover + from _typeshed.wsgi import WSGIApplication # pragma: no cover + +_T = typing.TypeVar("_T") + + +__all__ = ["WSGITransport"] + + +def _skip_leading_empty_chunks(body: typing.Iterable[_T]) -> typing.Iterable[_T]: + body = iter(body) + for chunk in body: + if chunk: + return itertools.chain([chunk], body) + return [] + + +class WSGIByteStream(SyncByteStream): + def __init__(self, result: typing.Iterable[bytes]) -> None: + self._close = getattr(result, "close", None) + self._result = _skip_leading_empty_chunks(result) + + def __iter__(self) -> typing.Iterator[bytes]: + for part in self._result: + yield part + + def close(self) -> None: + if self._close is not None: + self._close() + + +class WSGITransport(BaseTransport): + """ + A custom transport that handles sending requests directly to an WSGI app. + The simplest way to use this functionality is to use the `app` argument. + + ``` + client = httpx.Client(app=app) + ``` + + Alternatively, you can setup the transport instance explicitly. + This allows you to include any additional configuration arguments specific + to the WSGITransport class: + + ``` + transport = httpx.WSGITransport( + app=app, + script_name="/submount", + remote_addr="1.2.3.4" + ) + client = httpx.Client(transport=transport) + ``` + + Arguments: + + * `app` - The WSGI application. + * `raise_app_exceptions` - Boolean indicating if exceptions in the application + should be raised. Default to `True`. Can be set to `False` for use cases + such as testing the content of a client 500 response. + * `script_name` - The root path on which the WSGI application should be mounted. + * `remote_addr` - A string indicating the client IP of incoming requests. + ``` + """ + + def __init__( + self, + app: WSGIApplication, + raise_app_exceptions: bool = True, + script_name: str = "", + remote_addr: str = "127.0.0.1", + wsgi_errors: typing.TextIO | None = None, + ) -> None: + self.app = app + self.raise_app_exceptions = raise_app_exceptions + self.script_name = script_name + self.remote_addr = remote_addr + self.wsgi_errors = wsgi_errors + + def handle_request(self, request: Request) -> Response: + request.read() + wsgi_input = io.BytesIO(request.content) + + port = request.url.port or {"http": 80, "https": 443}[request.url.scheme] + environ = { + "wsgi.version": (1, 0), + "wsgi.url_scheme": request.url.scheme, + "wsgi.input": wsgi_input, + "wsgi.errors": self.wsgi_errors or sys.stderr, + "wsgi.multithread": True, + "wsgi.multiprocess": False, + "wsgi.run_once": False, + "REQUEST_METHOD": request.method, + "SCRIPT_NAME": self.script_name, + "PATH_INFO": request.url.path, + "QUERY_STRING": request.url.query.decode("ascii"), + "SERVER_NAME": request.url.host, + "SERVER_PORT": str(port), + "SERVER_PROTOCOL": "HTTP/1.1", + "REMOTE_ADDR": self.remote_addr, + } + for header_key, header_value in request.headers.raw: + key = header_key.decode("ascii").upper().replace("-", "_") + if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"): + key = "HTTP_" + key + environ[key] = header_value.decode("ascii") + + seen_status = None + seen_response_headers = None + seen_exc_info = None + + def start_response( + status: str, + response_headers: list[tuple[str, str]], + exc_info: OptExcInfo | None = None, + ) -> typing.Callable[[bytes], typing.Any]: + nonlocal seen_status, seen_response_headers, seen_exc_info + seen_status = status + seen_response_headers = response_headers + seen_exc_info = exc_info + return lambda _: None + + result = self.app(environ, start_response) + + stream = WSGIByteStream(result) + + assert seen_status is not None + assert seen_response_headers is not None + if seen_exc_info and seen_exc_info[0] and self.raise_app_exceptions: + raise seen_exc_info[1] + + status_code = int(seen_status.split()[0]) + headers = [ + (key.encode("ascii"), value.encode("ascii")) + for key, value in seen_response_headers + ] + + return Response(status_code, headers=headers, stream=stream) diff --git a/.venv/lib/python3.12/site-packages/httpx/_types.py b/.venv/lib/python3.12/site-packages/httpx/_types.py new file mode 100644 index 0000000..704dfdf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/_types.py @@ -0,0 +1,114 @@ +""" +Type definitions for type checking purposes. +""" + +from http.cookiejar import CookieJar +from typing import ( + IO, + TYPE_CHECKING, + Any, + AsyncIterable, + AsyncIterator, + Callable, + Dict, + Iterable, + Iterator, + List, + Mapping, + Optional, + Sequence, + Tuple, + Union, +) + +if TYPE_CHECKING: # pragma: no cover + from ._auth import Auth # noqa: F401 + from ._config import Proxy, Timeout # noqa: F401 + from ._models import Cookies, Headers, Request # noqa: F401 + from ._urls import URL, QueryParams # noqa: F401 + + +PrimitiveData = Optional[Union[str, int, float, bool]] + +URLTypes = Union["URL", str] + +QueryParamTypes = Union[ + "QueryParams", + Mapping[str, Union[PrimitiveData, Sequence[PrimitiveData]]], + List[Tuple[str, PrimitiveData]], + Tuple[Tuple[str, PrimitiveData], ...], + str, + bytes, +] + +HeaderTypes = Union[ + "Headers", + Mapping[str, str], + Mapping[bytes, bytes], + Sequence[Tuple[str, str]], + Sequence[Tuple[bytes, bytes]], +] + +CookieTypes = Union["Cookies", CookieJar, Dict[str, str], List[Tuple[str, str]]] + +TimeoutTypes = Union[ + Optional[float], + Tuple[Optional[float], Optional[float], Optional[float], Optional[float]], + "Timeout", +] +ProxyTypes = Union["URL", str, "Proxy"] +CertTypes = Union[str, Tuple[str, str], Tuple[str, str, str]] + +AuthTypes = Union[ + Tuple[Union[str, bytes], Union[str, bytes]], + Callable[["Request"], "Request"], + "Auth", +] + +RequestContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] +ResponseContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] +ResponseExtensions = Mapping[str, Any] + +RequestData = Mapping[str, Any] + +FileContent = Union[IO[bytes], bytes, str] +FileTypes = Union[ + # file (or bytes) + FileContent, + # (filename, file (or bytes)) + Tuple[Optional[str], FileContent], + # (filename, file (or bytes), content_type) + Tuple[Optional[str], FileContent, Optional[str]], + # (filename, file (or bytes), content_type, headers) + Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], +] +RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]] + +RequestExtensions = Mapping[str, Any] + +__all__ = ["AsyncByteStream", "SyncByteStream"] + + +class SyncByteStream: + def __iter__(self) -> Iterator[bytes]: + raise NotImplementedError( + "The '__iter__' method must be implemented." + ) # pragma: no cover + yield b"" # pragma: no cover + + def close(self) -> None: + """ + Subclasses can override this method to release any network resources + after a request/response cycle is complete. + """ + + +class AsyncByteStream: + async def __aiter__(self) -> AsyncIterator[bytes]: + raise NotImplementedError( + "The '__aiter__' method must be implemented." + ) # pragma: no cover + yield b"" # pragma: no cover + + async def aclose(self) -> None: + pass diff --git a/.venv/lib/python3.12/site-packages/httpx/_urlparse.py b/.venv/lib/python3.12/site-packages/httpx/_urlparse.py new file mode 100644 index 0000000..bf190fd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/_urlparse.py @@ -0,0 +1,527 @@ +""" +An implementation of `urlparse` that provides URL validation and normalization +as described by RFC3986. + +We rely on this implementation rather than the one in Python's stdlib, because: + +* It provides more complete URL validation. +* It properly differentiates between an empty querystring and an absent querystring, + to distinguish URLs with a trailing '?'. +* It handles scheme, hostname, port, and path normalization. +* It supports IDNA hostnames, normalizing them to their encoded form. +* The API supports passing individual components, as well as the complete URL string. + +Previously we relied on the excellent `rfc3986` package to handle URL parsing and +validation, but this module provides a simpler alternative, with less indirection +required. +""" + +from __future__ import annotations + +import ipaddress +import re +import typing + +import idna + +from ._exceptions import InvalidURL + +MAX_URL_LENGTH = 65536 + +# https://datatracker.ietf.org/doc/html/rfc3986.html#section-2.3 +UNRESERVED_CHARACTERS = ( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~" +) +SUB_DELIMS = "!$&'()*+,;=" + +PERCENT_ENCODED_REGEX = re.compile("%[A-Fa-f0-9]{2}") + +# https://url.spec.whatwg.org/#percent-encoded-bytes + +# The fragment percent-encode set is the C0 control percent-encode set +# and U+0020 SPACE, U+0022 ("), U+003C (<), U+003E (>), and U+0060 (`). +FRAG_SAFE = "".join( + [chr(i) for i in range(0x20, 0x7F) if i not in (0x20, 0x22, 0x3C, 0x3E, 0x60)] +) + +# The query percent-encode set is the C0 control percent-encode set +# and U+0020 SPACE, U+0022 ("), U+0023 (#), U+003C (<), and U+003E (>). +QUERY_SAFE = "".join( + [chr(i) for i in range(0x20, 0x7F) if i not in (0x20, 0x22, 0x23, 0x3C, 0x3E)] +) + +# The path percent-encode set is the query percent-encode set +# and U+003F (?), U+0060 (`), U+007B ({), and U+007D (}). +PATH_SAFE = "".join( + [ + chr(i) + for i in range(0x20, 0x7F) + if i not in (0x20, 0x22, 0x23, 0x3C, 0x3E) + (0x3F, 0x60, 0x7B, 0x7D) + ] +) + +# The userinfo percent-encode set is the path percent-encode set +# and U+002F (/), U+003A (:), U+003B (;), U+003D (=), U+0040 (@), +# U+005B ([) to U+005E (^), inclusive, and U+007C (|). +USERNAME_SAFE = "".join( + [ + chr(i) + for i in range(0x20, 0x7F) + if i + not in (0x20, 0x22, 0x23, 0x3C, 0x3E) + + (0x3F, 0x60, 0x7B, 0x7D) + + (0x2F, 0x3A, 0x3B, 0x3D, 0x40, 0x5B, 0x5C, 0x5D, 0x5E, 0x7C) + ] +) +PASSWORD_SAFE = "".join( + [ + chr(i) + for i in range(0x20, 0x7F) + if i + not in (0x20, 0x22, 0x23, 0x3C, 0x3E) + + (0x3F, 0x60, 0x7B, 0x7D) + + (0x2F, 0x3A, 0x3B, 0x3D, 0x40, 0x5B, 0x5C, 0x5D, 0x5E, 0x7C) + ] +) +# Note... The terminology 'userinfo' percent-encode set in the WHATWG document +# is used for the username and password quoting. For the joint userinfo component +# we remove U+003A (:) from the safe set. +USERINFO_SAFE = "".join( + [ + chr(i) + for i in range(0x20, 0x7F) + if i + not in (0x20, 0x22, 0x23, 0x3C, 0x3E) + + (0x3F, 0x60, 0x7B, 0x7D) + + (0x2F, 0x3B, 0x3D, 0x40, 0x5B, 0x5C, 0x5D, 0x5E, 0x7C) + ] +) + + +# {scheme}: (optional) +# //{authority} (optional) +# {path} +# ?{query} (optional) +# #{fragment} (optional) +URL_REGEX = re.compile( + ( + r"(?:(?P{scheme}):)?" + r"(?://(?P{authority}))?" + r"(?P{path})" + r"(?:\?(?P{query}))?" + r"(?:#(?P{fragment}))?" + ).format( + scheme="([a-zA-Z][a-zA-Z0-9+.-]*)?", + authority="[^/?#]*", + path="[^?#]*", + query="[^#]*", + fragment=".*", + ) +) + +# {userinfo}@ (optional) +# {host} +# :{port} (optional) +AUTHORITY_REGEX = re.compile( + ( + r"(?:(?P{userinfo})@)?" r"(?P{host})" r":?(?P{port})?" + ).format( + userinfo=".*", # Any character sequence. + host="(\\[.*\\]|[^:@]*)", # Either any character sequence excluding ':' or '@', + # or an IPv6 address enclosed within square brackets. + port=".*", # Any character sequence. + ) +) + + +# If we call urlparse with an individual component, then we need to regex +# validate that component individually. +# Note that we're duplicating the same strings as above. Shock! Horror!! +COMPONENT_REGEX = { + "scheme": re.compile("([a-zA-Z][a-zA-Z0-9+.-]*)?"), + "authority": re.compile("[^/?#]*"), + "path": re.compile("[^?#]*"), + "query": re.compile("[^#]*"), + "fragment": re.compile(".*"), + "userinfo": re.compile("[^@]*"), + "host": re.compile("(\\[.*\\]|[^:]*)"), + "port": re.compile(".*"), +} + + +# We use these simple regexs as a first pass before handing off to +# the stdlib 'ipaddress' module for IP address validation. +IPv4_STYLE_HOSTNAME = re.compile(r"^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$") +IPv6_STYLE_HOSTNAME = re.compile(r"^\[.*\]$") + + +class ParseResult(typing.NamedTuple): + scheme: str + userinfo: str + host: str + port: int | None + path: str + query: str | None + fragment: str | None + + @property + def authority(self) -> str: + return "".join( + [ + f"{self.userinfo}@" if self.userinfo else "", + f"[{self.host}]" if ":" in self.host else self.host, + f":{self.port}" if self.port is not None else "", + ] + ) + + @property + def netloc(self) -> str: + return "".join( + [ + f"[{self.host}]" if ":" in self.host else self.host, + f":{self.port}" if self.port is not None else "", + ] + ) + + def copy_with(self, **kwargs: str | None) -> ParseResult: + if not kwargs: + return self + + defaults = { + "scheme": self.scheme, + "authority": self.authority, + "path": self.path, + "query": self.query, + "fragment": self.fragment, + } + defaults.update(kwargs) + return urlparse("", **defaults) + + def __str__(self) -> str: + authority = self.authority + return "".join( + [ + f"{self.scheme}:" if self.scheme else "", + f"//{authority}" if authority else "", + self.path, + f"?{self.query}" if self.query is not None else "", + f"#{self.fragment}" if self.fragment is not None else "", + ] + ) + + +def urlparse(url: str = "", **kwargs: str | None) -> ParseResult: + # Initial basic checks on allowable URLs. + # --------------------------------------- + + # Hard limit the maximum allowable URL length. + if len(url) > MAX_URL_LENGTH: + raise InvalidURL("URL too long") + + # If a URL includes any ASCII control characters including \t, \r, \n, + # then treat it as invalid. + if any(char.isascii() and not char.isprintable() for char in url): + char = next(char for char in url if char.isascii() and not char.isprintable()) + idx = url.find(char) + error = ( + f"Invalid non-printable ASCII character in URL, {char!r} at position {idx}." + ) + raise InvalidURL(error) + + # Some keyword arguments require special handling. + # ------------------------------------------------ + + # Coerce "port" to a string, if it is provided as an integer. + if "port" in kwargs: + port = kwargs["port"] + kwargs["port"] = str(port) if isinstance(port, int) else port + + # Replace "netloc" with "host and "port". + if "netloc" in kwargs: + netloc = kwargs.pop("netloc") or "" + kwargs["host"], _, kwargs["port"] = netloc.partition(":") + + # Replace "username" and/or "password" with "userinfo". + if "username" in kwargs or "password" in kwargs: + username = quote(kwargs.pop("username", "") or "", safe=USERNAME_SAFE) + password = quote(kwargs.pop("password", "") or "", safe=PASSWORD_SAFE) + kwargs["userinfo"] = f"{username}:{password}" if password else username + + # Replace "raw_path" with "path" and "query". + if "raw_path" in kwargs: + raw_path = kwargs.pop("raw_path") or "" + kwargs["path"], seperator, kwargs["query"] = raw_path.partition("?") + if not seperator: + kwargs["query"] = None + + # Ensure that IPv6 "host" addresses are always escaped with "[...]". + if "host" in kwargs: + host = kwargs.get("host") or "" + if ":" in host and not (host.startswith("[") and host.endswith("]")): + kwargs["host"] = f"[{host}]" + + # If any keyword arguments are provided, ensure they are valid. + # ------------------------------------------------------------- + + for key, value in kwargs.items(): + if value is not None: + if len(value) > MAX_URL_LENGTH: + raise InvalidURL(f"URL component '{key}' too long") + + # If a component includes any ASCII control characters including \t, \r, \n, + # then treat it as invalid. + if any(char.isascii() and not char.isprintable() for char in value): + char = next( + char for char in value if char.isascii() and not char.isprintable() + ) + idx = value.find(char) + error = ( + f"Invalid non-printable ASCII character in URL {key} component, " + f"{char!r} at position {idx}." + ) + raise InvalidURL(error) + + # Ensure that keyword arguments match as a valid regex. + if not COMPONENT_REGEX[key].fullmatch(value): + raise InvalidURL(f"Invalid URL component '{key}'") + + # The URL_REGEX will always match, but may have empty components. + url_match = URL_REGEX.match(url) + assert url_match is not None + url_dict = url_match.groupdict() + + # * 'scheme', 'authority', and 'path' may be empty strings. + # * 'query' may be 'None', indicating no trailing "?" portion. + # Any string including the empty string, indicates a trailing "?". + # * 'fragment' may be 'None', indicating no trailing "#" portion. + # Any string including the empty string, indicates a trailing "#". + scheme = kwargs.get("scheme", url_dict["scheme"]) or "" + authority = kwargs.get("authority", url_dict["authority"]) or "" + path = kwargs.get("path", url_dict["path"]) or "" + query = kwargs.get("query", url_dict["query"]) + frag = kwargs.get("fragment", url_dict["fragment"]) + + # The AUTHORITY_REGEX will always match, but may have empty components. + authority_match = AUTHORITY_REGEX.match(authority) + assert authority_match is not None + authority_dict = authority_match.groupdict() + + # * 'userinfo' and 'host' may be empty strings. + # * 'port' may be 'None'. + userinfo = kwargs.get("userinfo", authority_dict["userinfo"]) or "" + host = kwargs.get("host", authority_dict["host"]) or "" + port = kwargs.get("port", authority_dict["port"]) + + # Normalize and validate each component. + # We end up with a parsed representation of the URL, + # with components that are plain ASCII bytestrings. + parsed_scheme: str = scheme.lower() + parsed_userinfo: str = quote(userinfo, safe=USERINFO_SAFE) + parsed_host: str = encode_host(host) + parsed_port: int | None = normalize_port(port, scheme) + + has_scheme = parsed_scheme != "" + has_authority = ( + parsed_userinfo != "" or parsed_host != "" or parsed_port is not None + ) + validate_path(path, has_scheme=has_scheme, has_authority=has_authority) + if has_scheme or has_authority: + path = normalize_path(path) + + parsed_path: str = quote(path, safe=PATH_SAFE) + parsed_query: str | None = None if query is None else quote(query, safe=QUERY_SAFE) + parsed_frag: str | None = None if frag is None else quote(frag, safe=FRAG_SAFE) + + # The parsed ASCII bytestrings are our canonical form. + # All properties of the URL are derived from these. + return ParseResult( + parsed_scheme, + parsed_userinfo, + parsed_host, + parsed_port, + parsed_path, + parsed_query, + parsed_frag, + ) + + +def encode_host(host: str) -> str: + if not host: + return "" + + elif IPv4_STYLE_HOSTNAME.match(host): + # Validate IPv4 hostnames like #.#.#.# + # + # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 + # + # IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet + try: + ipaddress.IPv4Address(host) + except ipaddress.AddressValueError: + raise InvalidURL(f"Invalid IPv4 address: {host!r}") + return host + + elif IPv6_STYLE_HOSTNAME.match(host): + # Validate IPv6 hostnames like [...] + # + # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 + # + # "A host identified by an Internet Protocol literal address, version 6 + # [RFC3513] or later, is distinguished by enclosing the IP literal + # within square brackets ("[" and "]"). This is the only place where + # square bracket characters are allowed in the URI syntax." + try: + ipaddress.IPv6Address(host[1:-1]) + except ipaddress.AddressValueError: + raise InvalidURL(f"Invalid IPv6 address: {host!r}") + return host[1:-1] + + elif host.isascii(): + # Regular ASCII hostnames + # + # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 + # + # reg-name = *( unreserved / pct-encoded / sub-delims ) + WHATWG_SAFE = '"`{}%|\\' + return quote(host.lower(), safe=SUB_DELIMS + WHATWG_SAFE) + + # IDNA hostnames + try: + return idna.encode(host.lower()).decode("ascii") + except idna.IDNAError: + raise InvalidURL(f"Invalid IDNA hostname: {host!r}") + + +def normalize_port(port: str | int | None, scheme: str) -> int | None: + # From https://tools.ietf.org/html/rfc3986#section-3.2.3 + # + # "A scheme may define a default port. For example, the "http" scheme + # defines a default port of "80", corresponding to its reserved TCP + # port number. The type of port designated by the port number (e.g., + # TCP, UDP, SCTP) is defined by the URI scheme. URI producers and + # normalizers should omit the port component and its ":" delimiter if + # port is empty or if its value would be the same as that of the + # scheme's default." + if port is None or port == "": + return None + + try: + port_as_int = int(port) + except ValueError: + raise InvalidURL(f"Invalid port: {port!r}") + + # See https://url.spec.whatwg.org/#url-miscellaneous + default_port = {"ftp": 21, "http": 80, "https": 443, "ws": 80, "wss": 443}.get( + scheme + ) + if port_as_int == default_port: + return None + return port_as_int + + +def validate_path(path: str, has_scheme: bool, has_authority: bool) -> None: + """ + Path validation rules that depend on if the URL contains + a scheme or authority component. + + See https://datatracker.ietf.org/doc/html/rfc3986.html#section-3.3 + """ + if has_authority: + # If a URI contains an authority component, then the path component + # must either be empty or begin with a slash ("/") character." + if path and not path.startswith("/"): + raise InvalidURL("For absolute URLs, path must be empty or begin with '/'") + + if not has_scheme and not has_authority: + # If a URI does not contain an authority component, then the path cannot begin + # with two slash characters ("//"). + if path.startswith("//"): + raise InvalidURL("Relative URLs cannot have a path starting with '//'") + + # In addition, a URI reference (Section 4.1) may be a relative-path reference, + # in which case the first path segment cannot contain a colon (":") character. + if path.startswith(":"): + raise InvalidURL("Relative URLs cannot have a path starting with ':'") + + +def normalize_path(path: str) -> str: + """ + Drop "." and ".." segments from a URL path. + + For example: + + normalize_path("/path/./to/somewhere/..") == "/path/to" + """ + # Fast return when no '.' characters in the path. + if "." not in path: + return path + + components = path.split("/") + + # Fast return when no '.' or '..' components in the path. + if "." not in components and ".." not in components: + return path + + # https://datatracker.ietf.org/doc/html/rfc3986#section-5.2.4 + output: list[str] = [] + for component in components: + if component == ".": + pass + elif component == "..": + if output and output != [""]: + output.pop() + else: + output.append(component) + return "/".join(output) + + +def PERCENT(string: str) -> str: + return "".join([f"%{byte:02X}" for byte in string.encode("utf-8")]) + + +def percent_encoded(string: str, safe: str) -> str: + """ + Use percent-encoding to quote a string. + """ + NON_ESCAPED_CHARS = UNRESERVED_CHARACTERS + safe + + # Fast path for strings that don't need escaping. + if not string.rstrip(NON_ESCAPED_CHARS): + return string + + return "".join( + [char if char in NON_ESCAPED_CHARS else PERCENT(char) for char in string] + ) + + +def quote(string: str, safe: str) -> str: + """ + Use percent-encoding to quote a string, omitting existing '%xx' escape sequences. + + See: https://www.rfc-editor.org/rfc/rfc3986#section-2.1 + + * `string`: The string to be percent-escaped. + * `safe`: A string containing characters that may be treated as safe, and do not + need to be escaped. Unreserved characters are always treated as safe. + See: https://www.rfc-editor.org/rfc/rfc3986#section-2.3 + """ + parts = [] + current_position = 0 + for match in re.finditer(PERCENT_ENCODED_REGEX, string): + start_position, end_position = match.start(), match.end() + matched_text = match.group(0) + # Add any text up to the '%xx' escape sequence. + if start_position != current_position: + leading_text = string[current_position:start_position] + parts.append(percent_encoded(leading_text, safe=safe)) + + # Add the '%xx' escape sequence. + parts.append(matched_text) + current_position = end_position + + # Add any text after the final '%xx' escape sequence. + if current_position != len(string): + trailing_text = string[current_position:] + parts.append(percent_encoded(trailing_text, safe=safe)) + + return "".join(parts) diff --git a/.venv/lib/python3.12/site-packages/httpx/_urls.py b/.venv/lib/python3.12/site-packages/httpx/_urls.py new file mode 100644 index 0000000..147a8fa --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/_urls.py @@ -0,0 +1,641 @@ +from __future__ import annotations + +import typing +from urllib.parse import parse_qs, unquote, urlencode + +import idna + +from ._types import QueryParamTypes +from ._urlparse import urlparse +from ._utils import primitive_value_to_str + +__all__ = ["URL", "QueryParams"] + + +class URL: + """ + url = httpx.URL("HTTPS://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink") + + assert url.scheme == "https" + assert url.username == "jo@email.com" + assert url.password == "a secret" + assert url.userinfo == b"jo%40email.com:a%20secret" + assert url.host == "müller.de" + assert url.raw_host == b"xn--mller-kva.de" + assert url.port == 1234 + assert url.netloc == b"xn--mller-kva.de:1234" + assert url.path == "/pa th" + assert url.query == b"?search=ab" + assert url.raw_path == b"/pa%20th?search=ab" + assert url.fragment == "anchorlink" + + The components of a URL are broken down like this: + + https://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink + [scheme] [ username ] [password] [ host ][port][ path ] [ query ] [fragment] + [ userinfo ] [ netloc ][ raw_path ] + + Note that: + + * `url.scheme` is normalized to always be lowercased. + + * `url.host` is normalized to always be lowercased. Internationalized domain + names are represented in unicode, without IDNA encoding applied. For instance: + + url = httpx.URL("http://中国.icom.museum") + assert url.host == "中国.icom.museum" + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.host == "中国.icom.museum" + + * `url.raw_host` is normalized to always be lowercased, and is IDNA encoded. + + url = httpx.URL("http://中国.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + * `url.port` is either None or an integer. URLs that include the default port for + "http", "https", "ws", "wss", and "ftp" schemes have their port + normalized to `None`. + + assert httpx.URL("http://example.com") == httpx.URL("http://example.com:80") + assert httpx.URL("http://example.com").port is None + assert httpx.URL("http://example.com:80").port is None + + * `url.userinfo` is raw bytes, without URL escaping. Usually you'll want to work + with `url.username` and `url.password` instead, which handle the URL escaping. + + * `url.raw_path` is raw bytes of both the path and query, without URL escaping. + This portion is used as the target when constructing HTTP requests. Usually you'll + want to work with `url.path` instead. + + * `url.query` is raw bytes, without URL escaping. A URL query string portion can + only be properly URL escaped when decoding the parameter names and values + themselves. + """ + + def __init__(self, url: URL | str = "", **kwargs: typing.Any) -> None: + if kwargs: + allowed = { + "scheme": str, + "username": str, + "password": str, + "userinfo": bytes, + "host": str, + "port": int, + "netloc": bytes, + "path": str, + "query": bytes, + "raw_path": bytes, + "fragment": str, + "params": object, + } + + # Perform type checking for all supported keyword arguments. + for key, value in kwargs.items(): + if key not in allowed: + message = f"{key!r} is an invalid keyword argument for URL()" + raise TypeError(message) + if value is not None and not isinstance(value, allowed[key]): + expected = allowed[key].__name__ + seen = type(value).__name__ + message = f"Argument {key!r} must be {expected} but got {seen}" + raise TypeError(message) + if isinstance(value, bytes): + kwargs[key] = value.decode("ascii") + + if "params" in kwargs: + # Replace any "params" keyword with the raw "query" instead. + # + # Ensure that empty params use `kwargs["query"] = None` rather + # than `kwargs["query"] = ""`, so that generated URLs do not + # include an empty trailing "?". + params = kwargs.pop("params") + kwargs["query"] = None if not params else str(QueryParams(params)) + + if isinstance(url, str): + self._uri_reference = urlparse(url, **kwargs) + elif isinstance(url, URL): + self._uri_reference = url._uri_reference.copy_with(**kwargs) + else: + raise TypeError( + "Invalid type for url. Expected str or httpx.URL," + f" got {type(url)}: {url!r}" + ) + + @property + def scheme(self) -> str: + """ + The URL scheme, such as "http", "https". + Always normalised to lowercase. + """ + return self._uri_reference.scheme + + @property + def raw_scheme(self) -> bytes: + """ + The raw bytes representation of the URL scheme, such as b"http", b"https". + Always normalised to lowercase. + """ + return self._uri_reference.scheme.encode("ascii") + + @property + def userinfo(self) -> bytes: + """ + The URL userinfo as a raw bytestring. + For example: b"jo%40email.com:a%20secret". + """ + return self._uri_reference.userinfo.encode("ascii") + + @property + def username(self) -> str: + """ + The URL username as a string, with URL decoding applied. + For example: "jo@email.com" + """ + userinfo = self._uri_reference.userinfo + return unquote(userinfo.partition(":")[0]) + + @property + def password(self) -> str: + """ + The URL password as a string, with URL decoding applied. + For example: "a secret" + """ + userinfo = self._uri_reference.userinfo + return unquote(userinfo.partition(":")[2]) + + @property + def host(self) -> str: + """ + The URL host as a string. + Always normalized to lowercase, with IDNA hosts decoded into unicode. + + Examples: + + url = httpx.URL("http://www.EXAMPLE.org") + assert url.host == "www.example.org" + + url = httpx.URL("http://中国.icom.museum") + assert url.host == "中国.icom.museum" + + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.host == "中国.icom.museum" + + url = httpx.URL("https://[::ffff:192.168.0.1]") + assert url.host == "::ffff:192.168.0.1" + """ + host: str = self._uri_reference.host + + if host.startswith("xn--"): + host = idna.decode(host) + + return host + + @property + def raw_host(self) -> bytes: + """ + The raw bytes representation of the URL host. + Always normalized to lowercase, and IDNA encoded. + + Examples: + + url = httpx.URL("http://www.EXAMPLE.org") + assert url.raw_host == b"www.example.org" + + url = httpx.URL("http://中国.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + url = httpx.URL("https://[::ffff:192.168.0.1]") + assert url.raw_host == b"::ffff:192.168.0.1" + """ + return self._uri_reference.host.encode("ascii") + + @property + def port(self) -> int | None: + """ + The URL port as an integer. + + Note that the URL class performs port normalization as per the WHATWG spec. + Default ports for "http", "https", "ws", "wss", and "ftp" schemes are always + treated as `None`. + + For example: + + assert httpx.URL("http://www.example.com") == httpx.URL("http://www.example.com:80") + assert httpx.URL("http://www.example.com:80").port is None + """ + return self._uri_reference.port + + @property + def netloc(self) -> bytes: + """ + Either `` or `:` as bytes. + Always normalized to lowercase, and IDNA encoded. + + This property may be used for generating the value of a request + "Host" header. + """ + return self._uri_reference.netloc.encode("ascii") + + @property + def path(self) -> str: + """ + The URL path as a string. Excluding the query string, and URL decoded. + + For example: + + url = httpx.URL("https://example.com/pa%20th") + assert url.path == "/pa th" + """ + path = self._uri_reference.path or "/" + return unquote(path) + + @property + def query(self) -> bytes: + """ + The URL query string, as raw bytes, excluding the leading b"?". + + This is necessarily a bytewise interface, because we cannot + perform URL decoding of this representation until we've parsed + the keys and values into a QueryParams instance. + + For example: + + url = httpx.URL("https://example.com/?filter=some%20search%20terms") + assert url.query == b"filter=some%20search%20terms" + """ + query = self._uri_reference.query or "" + return query.encode("ascii") + + @property + def params(self) -> QueryParams: + """ + The URL query parameters, neatly parsed and packaged into an immutable + multidict representation. + """ + return QueryParams(self._uri_reference.query) + + @property + def raw_path(self) -> bytes: + """ + The complete URL path and query string as raw bytes. + Used as the target when constructing HTTP requests. + + For example: + + GET /users?search=some%20text HTTP/1.1 + Host: www.example.org + Connection: close + """ + path = self._uri_reference.path or "/" + if self._uri_reference.query is not None: + path += "?" + self._uri_reference.query + return path.encode("ascii") + + @property + def fragment(self) -> str: + """ + The URL fragments, as used in HTML anchors. + As a string, without the leading '#'. + """ + return unquote(self._uri_reference.fragment or "") + + @property + def is_absolute_url(self) -> bool: + """ + Return `True` for absolute URLs such as 'http://example.com/path', + and `False` for relative URLs such as '/path'. + """ + # We don't use `.is_absolute` from `rfc3986` because it treats + # URLs with a fragment portion as not absolute. + # What we actually care about is if the URL provides + # a scheme and hostname to which connections should be made. + return bool(self._uri_reference.scheme and self._uri_reference.host) + + @property + def is_relative_url(self) -> bool: + """ + Return `False` for absolute URLs such as 'http://example.com/path', + and `True` for relative URLs such as '/path'. + """ + return not self.is_absolute_url + + def copy_with(self, **kwargs: typing.Any) -> URL: + """ + Copy this URL, returning a new URL with some components altered. + Accepts the same set of parameters as the components that are made + available via properties on the `URL` class. + + For example: + + url = httpx.URL("https://www.example.com").copy_with( + username="jo@gmail.com", password="a secret" + ) + assert url == "https://jo%40email.com:a%20secret@www.example.com" + """ + return URL(self, **kwargs) + + def copy_set_param(self, key: str, value: typing.Any = None) -> URL: + return self.copy_with(params=self.params.set(key, value)) + + def copy_add_param(self, key: str, value: typing.Any = None) -> URL: + return self.copy_with(params=self.params.add(key, value)) + + def copy_remove_param(self, key: str) -> URL: + return self.copy_with(params=self.params.remove(key)) + + def copy_merge_params(self, params: QueryParamTypes) -> URL: + return self.copy_with(params=self.params.merge(params)) + + def join(self, url: URL | str) -> URL: + """ + Return an absolute URL, using this URL as the base. + + Eg. + + url = httpx.URL("https://www.example.com/test") + url = url.join("/new/path") + assert url == "https://www.example.com/new/path" + """ + from urllib.parse import urljoin + + return URL(urljoin(str(self), str(URL(url)))) + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, (URL, str)) and str(self) == str(URL(other)) + + def __str__(self) -> str: + return str(self._uri_reference) + + def __repr__(self) -> str: + scheme, userinfo, host, port, path, query, fragment = self._uri_reference + + if ":" in userinfo: + # Mask any password component. + userinfo = f'{userinfo.split(":")[0]}:[secure]' + + authority = "".join( + [ + f"{userinfo}@" if userinfo else "", + f"[{host}]" if ":" in host else host, + f":{port}" if port is not None else "", + ] + ) + url = "".join( + [ + f"{self.scheme}:" if scheme else "", + f"//{authority}" if authority else "", + path, + f"?{query}" if query is not None else "", + f"#{fragment}" if fragment is not None else "", + ] + ) + + return f"{self.__class__.__name__}({url!r})" + + @property + def raw(self) -> tuple[bytes, bytes, int, bytes]: # pragma: nocover + import collections + import warnings + + warnings.warn("URL.raw is deprecated.") + RawURL = collections.namedtuple( + "RawURL", ["raw_scheme", "raw_host", "port", "raw_path"] + ) + return RawURL( + raw_scheme=self.raw_scheme, + raw_host=self.raw_host, + port=self.port, + raw_path=self.raw_path, + ) + + +class QueryParams(typing.Mapping[str, str]): + """ + URL query parameters, as a multi-dict. + """ + + def __init__(self, *args: QueryParamTypes | None, **kwargs: typing.Any) -> None: + assert len(args) < 2, "Too many arguments." + assert not (args and kwargs), "Cannot mix named and unnamed arguments." + + value = args[0] if args else kwargs + + if value is None or isinstance(value, (str, bytes)): + value = value.decode("ascii") if isinstance(value, bytes) else value + self._dict = parse_qs(value, keep_blank_values=True) + elif isinstance(value, QueryParams): + self._dict = {k: list(v) for k, v in value._dict.items()} + else: + dict_value: dict[typing.Any, list[typing.Any]] = {} + if isinstance(value, (list, tuple)): + # Convert list inputs like: + # [("a", "123"), ("a", "456"), ("b", "789")] + # To a dict representation, like: + # {"a": ["123", "456"], "b": ["789"]} + for item in value: + dict_value.setdefault(item[0], []).append(item[1]) + else: + # Convert dict inputs like: + # {"a": "123", "b": ["456", "789"]} + # To dict inputs where values are always lists, like: + # {"a": ["123"], "b": ["456", "789"]} + dict_value = { + k: list(v) if isinstance(v, (list, tuple)) else [v] + for k, v in value.items() + } + + # Ensure that keys and values are neatly coerced to strings. + # We coerce values `True` and `False` to JSON-like "true" and "false" + # representations, and coerce `None` values to the empty string. + self._dict = { + str(k): [primitive_value_to_str(item) for item in v] + for k, v in dict_value.items() + } + + def keys(self) -> typing.KeysView[str]: + """ + Return all the keys in the query params. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.keys()) == ["a", "b"] + """ + return self._dict.keys() + + def values(self) -> typing.ValuesView[str]: + """ + Return all the values in the query params. If a key occurs more than once + only the first item for that key is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.values()) == ["123", "789"] + """ + return {k: v[0] for k, v in self._dict.items()}.values() + + def items(self) -> typing.ItemsView[str, str]: + """ + Return all items in the query params. If a key occurs more than once + only the first item for that key is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.items()) == [("a", "123"), ("b", "789")] + """ + return {k: v[0] for k, v in self._dict.items()}.items() + + def multi_items(self) -> list[tuple[str, str]]: + """ + Return all items in the query params. Allow duplicate keys to occur. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.multi_items()) == [("a", "123"), ("a", "456"), ("b", "789")] + """ + multi_items: list[tuple[str, str]] = [] + for k, v in self._dict.items(): + multi_items.extend([(k, i) for i in v]) + return multi_items + + def get(self, key: typing.Any, default: typing.Any = None) -> typing.Any: + """ + Get a value from the query param for a given key. If the key occurs + more than once, then only the first value is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert q.get("a") == "123" + """ + if key in self._dict: + return self._dict[str(key)][0] + return default + + def get_list(self, key: str) -> list[str]: + """ + Get all values from the query param for a given key. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert q.get_list("a") == ["123", "456"] + """ + return list(self._dict.get(str(key), [])) + + def set(self, key: str, value: typing.Any = None) -> QueryParams: + """ + Return a new QueryParams instance, setting the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.set("a", "456") + assert q == httpx.QueryParams("a=456") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict[str(key)] = [primitive_value_to_str(value)] + return q + + def add(self, key: str, value: typing.Any = None) -> QueryParams: + """ + Return a new QueryParams instance, setting or appending the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.add("a", "456") + assert q == httpx.QueryParams("a=123&a=456") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict[str(key)] = q.get_list(key) + [primitive_value_to_str(value)] + return q + + def remove(self, key: str) -> QueryParams: + """ + Return a new QueryParams instance, removing the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.remove("a") + assert q == httpx.QueryParams("") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict.pop(str(key), None) + return q + + def merge(self, params: QueryParamTypes | None = None) -> QueryParams: + """ + Return a new QueryParams instance, updated with. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.merge({"b": "456"}) + assert q == httpx.QueryParams("a=123&b=456") + + q = httpx.QueryParams("a=123") + q = q.merge({"a": "456", "b": "789"}) + assert q == httpx.QueryParams("a=456&b=789") + """ + q = QueryParams(params) + q._dict = {**self._dict, **q._dict} + return q + + def __getitem__(self, key: typing.Any) -> str: + return self._dict[key][0] + + def __contains__(self, key: typing.Any) -> bool: + return key in self._dict + + def __iter__(self) -> typing.Iterator[typing.Any]: + return iter(self.keys()) + + def __len__(self) -> int: + return len(self._dict) + + def __bool__(self) -> bool: + return bool(self._dict) + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: typing.Any) -> bool: + if not isinstance(other, self.__class__): + return False + return sorted(self.multi_items()) == sorted(other.multi_items()) + + def __str__(self) -> str: + return urlencode(self.multi_items()) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + query_string = str(self) + return f"{class_name}({query_string!r})" + + def update(self, params: QueryParamTypes | None = None) -> None: + raise RuntimeError( + "QueryParams are immutable since 0.18.0. " + "Use `q = q.merge(...)` to create an updated copy." + ) + + def __setitem__(self, key: str, value: str) -> None: + raise RuntimeError( + "QueryParams are immutable since 0.18.0. " + "Use `q = q.set(key, value)` to create an updated copy." + ) diff --git a/.venv/lib/python3.12/site-packages/httpx/_utils.py b/.venv/lib/python3.12/site-packages/httpx/_utils.py new file mode 100644 index 0000000..7fe827d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/httpx/_utils.py @@ -0,0 +1,242 @@ +from __future__ import annotations + +import ipaddress +import os +import re +import typing +from urllib.request import getproxies + +from ._types import PrimitiveData + +if typing.TYPE_CHECKING: # pragma: no cover + from ._urls import URL + + +def primitive_value_to_str(value: PrimitiveData) -> str: + """ + Coerce a primitive data type into a string value. + + Note that we prefer JSON-style 'true'/'false' for boolean values here. + """ + if value is True: + return "true" + elif value is False: + return "false" + elif value is None: + return "" + return str(value) + + +def get_environment_proxies() -> dict[str, str | None]: + """Gets proxy information from the environment""" + + # urllib.request.getproxies() falls back on System + # Registry and Config for proxies on Windows and macOS. + # We don't want to propagate non-HTTP proxies into + # our configuration such as 'TRAVIS_APT_PROXY'. + proxy_info = getproxies() + mounts: dict[str, str | None] = {} + + for scheme in ("http", "https", "all"): + if proxy_info.get(scheme): + hostname = proxy_info[scheme] + mounts[f"{scheme}://"] = ( + hostname if "://" in hostname else f"http://{hostname}" + ) + + no_proxy_hosts = [host.strip() for host in proxy_info.get("no", "").split(",")] + for hostname in no_proxy_hosts: + # See https://curl.haxx.se/libcurl/c/CURLOPT_NOPROXY.html for details + # on how names in `NO_PROXY` are handled. + if hostname == "*": + # If NO_PROXY=* is used or if "*" occurs as any one of the comma + # separated hostnames, then we should just bypass any information + # from HTTP_PROXY, HTTPS_PROXY, ALL_PROXY, and always ignore + # proxies. + return {} + elif hostname: + # NO_PROXY=.google.com is marked as "all://*.google.com, + # which disables "www.google.com" but not "google.com" + # NO_PROXY=google.com is marked as "all://*google.com, + # which disables "www.google.com" and "google.com". + # (But not "wwwgoogle.com") + # NO_PROXY can include domains, IPv6, IPv4 addresses and "localhost" + # NO_PROXY=example.com,::1,localhost,192.168.0.0/16 + if "://" in hostname: + mounts[hostname] = None + elif is_ipv4_hostname(hostname): + mounts[f"all://{hostname}"] = None + elif is_ipv6_hostname(hostname): + mounts[f"all://[{hostname}]"] = None + elif hostname.lower() == "localhost": + mounts[f"all://{hostname}"] = None + else: + mounts[f"all://*{hostname}"] = None + + return mounts + + +def to_bytes(value: str | bytes, encoding: str = "utf-8") -> bytes: + return value.encode(encoding) if isinstance(value, str) else value + + +def to_str(value: str | bytes, encoding: str = "utf-8") -> str: + return value if isinstance(value, str) else value.decode(encoding) + + +def to_bytes_or_str(value: str, match_type_of: typing.AnyStr) -> typing.AnyStr: + return value if isinstance(match_type_of, str) else value.encode() + + +def unquote(value: str) -> str: + return value[1:-1] if value[0] == value[-1] == '"' else value + + +def peek_filelike_length(stream: typing.Any) -> int | None: + """ + Given a file-like stream object, return its length in number of bytes + without reading it into memory. + """ + try: + # Is it an actual file? + fd = stream.fileno() + # Yup, seems to be an actual file. + length = os.fstat(fd).st_size + except (AttributeError, OSError): + # No... Maybe it's something that supports random access, like `io.BytesIO`? + try: + # Assuming so, go to end of stream to figure out its length, + # then put it back in place. + offset = stream.tell() + length = stream.seek(0, os.SEEK_END) + stream.seek(offset) + except (AttributeError, OSError): + # Not even that? Sorry, we're doomed... + return None + + return length + + +class URLPattern: + """ + A utility class currently used for making lookups against proxy keys... + + # Wildcard matching... + >>> pattern = URLPattern("all://") + >>> pattern.matches(httpx.URL("http://example.com")) + True + + # Witch scheme matching... + >>> pattern = URLPattern("https://") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + False + + # With domain matching... + >>> pattern = URLPattern("https://example.com") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + False + >>> pattern.matches(httpx.URL("https://other.com")) + False + + # Wildcard scheme, with domain matching... + >>> pattern = URLPattern("all://example.com") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + True + >>> pattern.matches(httpx.URL("https://other.com")) + False + + # With port matching... + >>> pattern = URLPattern("https://example.com:1234") + >>> pattern.matches(httpx.URL("https://example.com:1234")) + True + >>> pattern.matches(httpx.URL("https://example.com")) + False + """ + + def __init__(self, pattern: str) -> None: + from ._urls import URL + + if pattern and ":" not in pattern: + raise ValueError( + f"Proxy keys should use proper URL forms rather " + f"than plain scheme strings. " + f'Instead of "{pattern}", use "{pattern}://"' + ) + + url = URL(pattern) + self.pattern = pattern + self.scheme = "" if url.scheme == "all" else url.scheme + self.host = "" if url.host == "*" else url.host + self.port = url.port + if not url.host or url.host == "*": + self.host_regex: typing.Pattern[str] | None = None + elif url.host.startswith("*."): + # *.example.com should match "www.example.com", but not "example.com" + domain = re.escape(url.host[2:]) + self.host_regex = re.compile(f"^.+\\.{domain}$") + elif url.host.startswith("*"): + # *example.com should match "www.example.com" and "example.com" + domain = re.escape(url.host[1:]) + self.host_regex = re.compile(f"^(.+\\.)?{domain}$") + else: + # example.com should match "example.com" but not "www.example.com" + domain = re.escape(url.host) + self.host_regex = re.compile(f"^{domain}$") + + def matches(self, other: URL) -> bool: + if self.scheme and self.scheme != other.scheme: + return False + if ( + self.host + and self.host_regex is not None + and not self.host_regex.match(other.host) + ): + return False + if self.port is not None and self.port != other.port: + return False + return True + + @property + def priority(self) -> tuple[int, int, int]: + """ + The priority allows URLPattern instances to be sortable, so that + we can match from most specific to least specific. + """ + # URLs with a port should take priority over URLs without a port. + port_priority = 0 if self.port is not None else 1 + # Longer hostnames should match first. + host_priority = -len(self.host) + # Longer schemes should match first. + scheme_priority = -len(self.scheme) + return (port_priority, host_priority, scheme_priority) + + def __hash__(self) -> int: + return hash(self.pattern) + + def __lt__(self, other: URLPattern) -> bool: + return self.priority < other.priority + + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, URLPattern) and self.pattern == other.pattern + + +def is_ipv4_hostname(hostname: str) -> bool: + try: + ipaddress.IPv4Address(hostname.split("/")[0]) + except Exception: + return False + return True + + +def is_ipv6_hostname(hostname: str) -> bool: + try: + ipaddress.IPv6Address(hostname.split("/")[0]) + except Exception: + return False + return True diff --git a/.venv/lib/python3.12/site-packages/httpx/py.typed b/.venv/lib/python3.12/site-packages/httpx/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/idna-3.11.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/idna-3.11.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/idna-3.11.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/idna-3.11.dist-info/METADATA b/.venv/lib/python3.12/site-packages/idna-3.11.dist-info/METADATA new file mode 100644 index 0000000..7a4a4b7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/idna-3.11.dist-info/METADATA @@ -0,0 +1,209 @@ +Metadata-Version: 2.4 +Name: idna +Version: 3.11 +Summary: Internationalized Domain Names in Applications (IDNA) +Author-email: Kim Davies +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-Expression: BSD-3-Clause +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: Name Service (DNS) +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Utilities +License-File: LICENSE.md +Requires-Dist: ruff >= 0.6.2 ; extra == "all" +Requires-Dist: mypy >= 1.11.2 ; extra == "all" +Requires-Dist: pytest >= 8.3.2 ; extra == "all" +Requires-Dist: flake8 >= 7.1.1 ; extra == "all" +Project-URL: Changelog, https://github.com/kjd/idna/blob/master/HISTORY.rst +Project-URL: Issue tracker, https://github.com/kjd/idna/issues +Project-URL: Source, https://github.com/kjd/idna +Provides-Extra: all + +Internationalized Domain Names in Applications (IDNA) +===================================================== + +Support for `Internationalized Domain Names in +Applications (IDNA) `_ +and `Unicode IDNA Compatibility Processing +`_. + +The latest versions of these standards supplied here provide +more comprehensive language coverage and reduce the potential of +allowing domains with known security vulnerabilities. This library +is a suitable replacement for the “encodings.idna” +module that comes with the Python standard library, but which +only supports an older superseded IDNA specification from 2003. + +Basic functions are simply executed: + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + + +Installation +------------ + +This package is available for installation from PyPI via the +typical mechanisms, such as: + +.. code-block:: bash + + $ python3 -m pip install idna + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a +domain name argument and perform a conversion to ASCII compatible encoding +(known as A-labels), or to Unicode strings (known as U-labels) +respectively. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + +Conversions can be applied at a per-label basis using the ``ulabel`` or +``alabel`` functions if necessary: + +.. code-block:: pycon + + >>> idna.alabel('测试') + b'xn--0zwm56d' + + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +This library provides support for `Unicode IDNA Compatibility +Processing `_ which normalizes input from +different potential ways a user may input a domain prior to performing the IDNA +conversion operations. This functionality, known as a +`mapping `_, is considered by the +specification to be a local user-interface issue distinct from IDNA +conversion functionality. + +For example, “Königsgäßchen” is not a permissible label as *LATIN +CAPITAL LETTER K* is not allowed (nor are capital letters in general). +UTS 46 will convert this into lower case prior to applying the IDNA +conversion. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed + >>> idna.encode('Königsgäßchen', uts46=True) + b'xn--knigsgchen-b4a3dun' + >>> print(idna.decode('xn--knigsgchen-b4a3dun')) + königsgäßchen + + +Exceptions +---------- + +All errors raised during the conversion following the specification +should raise an exception derived from the ``idna.IDNAError`` base +class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and +right-to-left characters in a label; ``idna.InvalidCodepoint`` when +a specific codepoint is an illegal character in an IDN label (i.e. +INVALID); and ``idna.InvalidCodepointContext`` when the codepoint is +illegal based on its position in the string (i.e. it is CONTEXTO or CONTEXTJ +but the contextual requirements are not satisfied.) + +Building and Diagnostics +------------------------ + +The IDNA and UTS 46 functionality relies upon pre-calculated lookup +tables for performance. These tables are derived from computing against +eligibility criteria in the respective standards using the command-line +script ``tools/idna-data``. + +This tool will fetch relevant codepoint data from the Unicode repository +and perform the required calculations to identify eligibility. There are +three main modes: + +* ``idna-data make-libdata``. Generates ``idnadata.py`` and + ``uts46data.py``, the pre-calculated lookup tables used for IDNA and + UTS 46 conversions. Implementers who wish to track this library against + a different Unicode version may use this tool to manually generate a + different version of the ``idnadata.py`` and ``uts46data.py`` files. + +* ``idna-data make-table``. Generate a table of the IDNA disposition + (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix + B.1 of RFC 5892 and the pre-computed tables published by `IANA + `_. + +* ``idna-data U+0061``. Prints debugging output on the various + properties associated with an individual Unicode codepoint (in this + case, U+0061), that are used to assess the IDNA and UTS 46 status of a + codepoint. This is helpful in debugging or analysis. + +The tool accepts a number of arguments, described using ``idna-data +-h``. Most notably, the ``--version`` argument allows the specification +of the version of Unicode to be used in computing the table data. For +example, ``idna-data --version 9.0.0 make-libdata`` will generate +library data against Unicode 9.0.0. + + +Additional Notes +---------------- + +* **Packages**. The latest tagged release version is published in the + `Python Package Index `_. + +* **Version support**. This library supports Python 3.8 and higher. + As this library serves as a low-level toolkit for a variety of + applications, many of which strive for broad compatibility with older + Python versions, there is no rush to remove older interpreter support. + Support for older versions are likely to be removed from new releases + as automated tests can no longer easily be run, i.e. once the Python + version is officially end-of-life. + +* **Testing**. The library has a test suite based on each rule of the + IDNA specification, as well as tests that are provided as part of the + Unicode Technical Standard 46, `Unicode IDNA Compatibility Processing + `_. + +* **Emoji**. It is an occasional request to support emoji domains in + this library. Encoding of symbols like emoji is expressly prohibited by + the technical standard IDNA 2008 and emoji domains are broadly phased + out across the domain industry due to associated security risks. For + now, applications that need to support these non-compliant labels + may wish to consider trying the encode/decode operation in this library + first, and then falling back to using `encodings.idna`. See `the Github + project `_ for more discussion. + +* **Transitional processing**. Unicode 16.0.0 removed transitional + processing so the `transitional` argument for the encode() method + no longer has any effect and will be removed at a later date. + diff --git a/.venv/lib/python3.12/site-packages/idna-3.11.dist-info/RECORD b/.venv/lib/python3.12/site-packages/idna-3.11.dist-info/RECORD new file mode 100644 index 0000000..8525b6d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/idna-3.11.dist-info/RECORD @@ -0,0 +1,22 @@ +idna-3.11.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +idna-3.11.dist-info/METADATA,sha256=fCwSww9SuiN8TIHllFSASUQCW55hAs8dzKnr9RaEEbA,8378 +idna-3.11.dist-info/RECORD,, +idna-3.11.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 +idna-3.11.dist-info/licenses/LICENSE.md,sha256=t6M2q_OwThgOwGXN0W5wXQeeHMehT5EKpukYfza5zYc,1541 +idna/__init__.py,sha256=MPqNDLZbXqGaNdXxAFhiqFPKEQXju2jNQhCey6-5eJM,868 +idna/__pycache__/__init__.cpython-312.pyc,, +idna/__pycache__/codec.cpython-312.pyc,, +idna/__pycache__/compat.cpython-312.pyc,, +idna/__pycache__/core.cpython-312.pyc,, +idna/__pycache__/idnadata.cpython-312.pyc,, +idna/__pycache__/intranges.cpython-312.pyc,, +idna/__pycache__/package_data.cpython-312.pyc,, +idna/__pycache__/uts46data.cpython-312.pyc,, +idna/codec.py,sha256=M2SGWN7cs_6B32QmKTyTN6xQGZeYQgQ2wiX3_DR6loE,3438 +idna/compat.py,sha256=RzLy6QQCdl9784aFhb2EX9EKGCJjg0P3PilGdeXXcx8,316 +idna/core.py,sha256=P26_XVycuMTZ1R2mNK1ZREVzM5mvTzdabBXfyZVU1Lc,13246 +idna/idnadata.py,sha256=SG8jhaGE53iiD6B49pt2pwTv_UvClciWE-N54oR2p4U,79623 +idna/intranges.py,sha256=amUtkdhYcQG8Zr-CoMM_kVRacxkivC1WgxN1b63KKdU,1898 +idna/package_data.py,sha256=_CUavOxobnbyNG2FLyHoN8QHP3QM9W1tKuw7eq9QwBk,21 +idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +idna/uts46data.py,sha256=H9J35VkD0F9L9mKOqjeNGd2A-Va6FlPoz6Jz4K7h-ps,243725 diff --git a/.venv/lib/python3.12/site-packages/idna-3.11.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/idna-3.11.dist-info/WHEEL new file mode 100644 index 0000000..d8b9936 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/idna-3.11.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.12.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.venv/lib/python3.12/site-packages/idna-3.11.dist-info/licenses/LICENSE.md b/.venv/lib/python3.12/site-packages/idna-3.11.dist-info/licenses/LICENSE.md new file mode 100644 index 0000000..256ba90 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/idna-3.11.dist-info/licenses/LICENSE.md @@ -0,0 +1,31 @@ +BSD 3-Clause License + +Copyright (c) 2013-2025, Kim Davies and contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/lib/python3.12/site-packages/idna/__init__.py b/.venv/lib/python3.12/site-packages/idna/__init__.py new file mode 100644 index 0000000..cfdc030 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/idna/__init__.py @@ -0,0 +1,45 @@ +from .core import ( + IDNABidiError, + IDNAError, + InvalidCodepoint, + InvalidCodepointContext, + alabel, + check_bidi, + check_hyphen_ok, + check_initial_combiner, + check_label, + check_nfc, + decode, + encode, + ulabel, + uts46_remap, + valid_contextj, + valid_contexto, + valid_label_length, + valid_string_length, +) +from .intranges import intranges_contain +from .package_data import __version__ + +__all__ = [ + "__version__", + "IDNABidiError", + "IDNAError", + "InvalidCodepoint", + "InvalidCodepointContext", + "alabel", + "check_bidi", + "check_hyphen_ok", + "check_initial_combiner", + "check_label", + "check_nfc", + "decode", + "encode", + "intranges_contain", + "ulabel", + "uts46_remap", + "valid_contextj", + "valid_contexto", + "valid_label_length", + "valid_string_length", +] diff --git a/.venv/lib/python3.12/site-packages/idna/codec.py b/.venv/lib/python3.12/site-packages/idna/codec.py new file mode 100644 index 0000000..cbc2e4f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/idna/codec.py @@ -0,0 +1,122 @@ +import codecs +import re +from typing import Any, Optional, Tuple + +from .core import IDNAError, alabel, decode, encode, ulabel + +_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]") + + +class Codec(codecs.Codec): + def encode(self, data: str, errors: str = "strict") -> Tuple[bytes, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return b"", 0 + + return encode(data), len(data) + + def decode(self, data: bytes, errors: str = "strict") -> Tuple[str, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return "", 0 + + return decode(data), len(data) + + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[bytes, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return b"", 0 + + labels = _unicode_dots_re.split(data) + trailing_dot = b"" + if labels: + if not labels[-1]: + trailing_dot = b"." + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = b"." + + result = [] + size = 0 + for label in labels: + result.append(alabel(label)) + if size: + size += 1 + size += len(label) + + # Join with U+002E + result_bytes = b".".join(result) + trailing_dot + size += len(trailing_dot) + return result_bytes, size + + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, data: Any, errors: str, final: bool) -> Tuple[str, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return ("", 0) + + if not isinstance(data, str): + data = str(data, "ascii") + + labels = _unicode_dots_re.split(data) + trailing_dot = "" + if labels: + if not labels[-1]: + trailing_dot = "." + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = "." + + result = [] + size = 0 + for label in labels: + result.append(ulabel(label)) + if size: + size += 1 + size += len(label) + + result_str = ".".join(result) + trailing_dot + size += len(trailing_dot) + return (result_str, size) + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + + +class StreamReader(Codec, codecs.StreamReader): + pass + + +def search_function(name: str) -> Optional[codecs.CodecInfo]: + if name != "idna2008": + return None + return codecs.CodecInfo( + name=name, + encode=Codec().encode, + decode=Codec().decode, # type: ignore + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamwriter=StreamWriter, + streamreader=StreamReader, + ) + + +codecs.register(search_function) diff --git a/.venv/lib/python3.12/site-packages/idna/compat.py b/.venv/lib/python3.12/site-packages/idna/compat.py new file mode 100644 index 0000000..1df9f2a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/idna/compat.py @@ -0,0 +1,15 @@ +from typing import Any, Union + +from .core import decode, encode + + +def ToASCII(label: str) -> bytes: + return encode(label) + + +def ToUnicode(label: Union[bytes, bytearray]) -> str: + return decode(label) + + +def nameprep(s: Any) -> None: + raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") diff --git a/.venv/lib/python3.12/site-packages/idna/core.py b/.venv/lib/python3.12/site-packages/idna/core.py new file mode 100644 index 0000000..8177bf7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/idna/core.py @@ -0,0 +1,437 @@ +import bisect +import re +import unicodedata +from typing import Optional, Union + +from . import idnadata +from .intranges import intranges_contain + +_virama_combining_class = 9 +_alabel_prefix = b"xn--" +_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]") + + +class IDNAError(UnicodeError): + """Base exception for all IDNA-encoding related problems""" + + pass + + +class IDNABidiError(IDNAError): + """Exception when bidirectional requirements are not satisfied""" + + pass + + +class InvalidCodepoint(IDNAError): + """Exception when a disallowed or unallocated codepoint is used""" + + pass + + +class InvalidCodepointContext(IDNAError): + """Exception when the codepoint is not valid in the context it is used""" + + pass + + +def _combining_class(cp: int) -> int: + v = unicodedata.combining(chr(cp)) + if v == 0: + if not unicodedata.name(chr(cp)): + raise ValueError("Unknown character in unicodedata") + return v + + +def _is_script(cp: str, script: str) -> bool: + return intranges_contain(ord(cp), idnadata.scripts[script]) + + +def _punycode(s: str) -> bytes: + return s.encode("punycode") + + +def _unot(s: int) -> str: + return "U+{:04X}".format(s) + + +def valid_label_length(label: Union[bytes, str]) -> bool: + if len(label) > 63: + return False + return True + + +def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool: + if len(label) > (254 if trailing_dot else 253): + return False + return True + + +def check_bidi(label: str, check_ltr: bool = False) -> bool: + # Bidi rules should only be applied if string contains RTL characters + bidi_label = False + for idx, cp in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + if direction == "": + # String likely comes from a newer version of Unicode + raise IDNABidiError("Unknown directionality in label {} at position {}".format(repr(label), idx)) + if direction in ["R", "AL", "AN"]: + bidi_label = True + if not bidi_label and not check_ltr: + return True + + # Bidi rule 1 + direction = unicodedata.bidirectional(label[0]) + if direction in ["R", "AL"]: + rtl = True + elif direction == "L": + rtl = False + else: + raise IDNABidiError("First codepoint in label {} must be directionality L, R or AL".format(repr(label))) + + valid_ending = False + number_type: Optional[str] = None + for idx, cp in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + + if rtl: + # Bidi rule 2 + if direction not in [ + "R", + "AL", + "AN", + "EN", + "ES", + "CS", + "ET", + "ON", + "BN", + "NSM", + ]: + raise IDNABidiError("Invalid direction for codepoint at position {} in a right-to-left label".format(idx)) + # Bidi rule 3 + if direction in ["R", "AL", "EN", "AN"]: + valid_ending = True + elif direction != "NSM": + valid_ending = False + # Bidi rule 4 + if direction in ["AN", "EN"]: + if not number_type: + number_type = direction + else: + if number_type != direction: + raise IDNABidiError("Can not mix numeral types in a right-to-left label") + else: + # Bidi rule 5 + if direction not in ["L", "EN", "ES", "CS", "ET", "ON", "BN", "NSM"]: + raise IDNABidiError("Invalid direction for codepoint at position {} in a left-to-right label".format(idx)) + # Bidi rule 6 + if direction in ["L", "EN"]: + valid_ending = True + elif direction != "NSM": + valid_ending = False + + if not valid_ending: + raise IDNABidiError("Label ends with illegal codepoint directionality") + + return True + + +def check_initial_combiner(label: str) -> bool: + if unicodedata.category(label[0])[0] == "M": + raise IDNAError("Label begins with an illegal combining character") + return True + + +def check_hyphen_ok(label: str) -> bool: + if label[2:4] == "--": + raise IDNAError("Label has disallowed hyphens in 3rd and 4th position") + if label[0] == "-" or label[-1] == "-": + raise IDNAError("Label must not start or end with a hyphen") + return True + + +def check_nfc(label: str) -> None: + if unicodedata.normalize("NFC", label) != label: + raise IDNAError("Label must be in Normalization Form C") + + +def valid_contextj(label: str, pos: int) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x200C: + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + + ok = False + for i in range(pos - 1, -1, -1): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord("T"): + continue + elif joining_type in [ord("L"), ord("D")]: + ok = True + break + else: + break + + if not ok: + return False + + ok = False + for i in range(pos + 1, len(label)): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord("T"): + continue + elif joining_type in [ord("R"), ord("D")]: + ok = True + break + else: + break + return ok + + if cp_value == 0x200D: + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + return False + + else: + return False + + +def valid_contexto(label: str, pos: int, exception: bool = False) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x00B7: + if 0 < pos < len(label) - 1: + if ord(label[pos - 1]) == 0x006C and ord(label[pos + 1]) == 0x006C: + return True + return False + + elif cp_value == 0x0375: + if pos < len(label) - 1 and len(label) > 1: + return _is_script(label[pos + 1], "Greek") + return False + + elif cp_value == 0x05F3 or cp_value == 0x05F4: + if pos > 0: + return _is_script(label[pos - 1], "Hebrew") + return False + + elif cp_value == 0x30FB: + for cp in label: + if cp == "\u30fb": + continue + if _is_script(cp, "Hiragana") or _is_script(cp, "Katakana") or _is_script(cp, "Han"): + return True + return False + + elif 0x660 <= cp_value <= 0x669: + for cp in label: + if 0x6F0 <= ord(cp) <= 0x06F9: + return False + return True + + elif 0x6F0 <= cp_value <= 0x6F9: + for cp in label: + if 0x660 <= ord(cp) <= 0x0669: + return False + return True + + return False + + +def check_label(label: Union[str, bytes, bytearray]) -> None: + if isinstance(label, (bytes, bytearray)): + label = label.decode("utf-8") + if len(label) == 0: + raise IDNAError("Empty Label") + + check_nfc(label) + check_hyphen_ok(label) + check_initial_combiner(label) + + for pos, cp in enumerate(label): + cp_value = ord(cp) + if intranges_contain(cp_value, idnadata.codepoint_classes["PVALID"]): + continue + elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTJ"]): + try: + if not valid_contextj(label, pos): + raise InvalidCodepointContext( + "Joiner {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label)) + ) + except ValueError: + raise IDNAError( + "Unknown codepoint adjacent to joiner {} at position {} in {}".format( + _unot(cp_value), pos + 1, repr(label) + ) + ) + elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTO"]): + if not valid_contexto(label, pos): + raise InvalidCodepointContext( + "Codepoint {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label)) + ) + else: + raise InvalidCodepoint( + "Codepoint {} at position {} of {} not allowed".format(_unot(cp_value), pos + 1, repr(label)) + ) + + check_bidi(label) + + +def alabel(label: str) -> bytes: + try: + label_bytes = label.encode("ascii") + ulabel(label_bytes) + if not valid_label_length(label_bytes): + raise IDNAError("Label too long") + return label_bytes + except UnicodeEncodeError: + pass + + check_label(label) + label_bytes = _alabel_prefix + _punycode(label) + + if not valid_label_length(label_bytes): + raise IDNAError("Label too long") + + return label_bytes + + +def ulabel(label: Union[str, bytes, bytearray]) -> str: + if not isinstance(label, (bytes, bytearray)): + try: + label_bytes = label.encode("ascii") + except UnicodeEncodeError: + check_label(label) + return label + else: + label_bytes = bytes(label) + + label_bytes = label_bytes.lower() + if label_bytes.startswith(_alabel_prefix): + label_bytes = label_bytes[len(_alabel_prefix) :] + if not label_bytes: + raise IDNAError("Malformed A-label, no Punycode eligible content found") + if label_bytes.decode("ascii")[-1] == "-": + raise IDNAError("A-label must not end with a hyphen") + else: + check_label(label_bytes) + return label_bytes.decode("ascii") + + try: + label = label_bytes.decode("punycode") + except UnicodeError: + raise IDNAError("Invalid A-label") + check_label(label) + return label + + +def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str: + """Re-map the characters in the string according to UTS46 processing.""" + from .uts46data import uts46data + + output = "" + + for pos, char in enumerate(domain): + code_point = ord(char) + try: + uts46row = uts46data[code_point if code_point < 256 else bisect.bisect_left(uts46data, (code_point, "Z")) - 1] + status = uts46row[1] + replacement: Optional[str] = None + if len(uts46row) == 3: + replacement = uts46row[2] + if ( + status == "V" + or (status == "D" and not transitional) + or (status == "3" and not std3_rules and replacement is None) + ): + output += char + elif replacement is not None and ( + status == "M" or (status == "3" and not std3_rules) or (status == "D" and transitional) + ): + output += replacement + elif status != "I": + raise IndexError() + except IndexError: + raise InvalidCodepoint( + "Codepoint {} not allowed at position {} in {}".format(_unot(code_point), pos + 1, repr(domain)) + ) + + return unicodedata.normalize("NFC", output) + + +def encode( + s: Union[str, bytes, bytearray], + strict: bool = False, + uts46: bool = False, + std3_rules: bool = False, + transitional: bool = False, +) -> bytes: + if not isinstance(s, str): + try: + s = str(s, "ascii") + except UnicodeDecodeError: + raise IDNAError("should pass a unicode string to the function rather than a byte string.") + if uts46: + s = uts46_remap(s, std3_rules, transitional) + trailing_dot = False + result = [] + if strict: + labels = s.split(".") + else: + labels = _unicode_dots_re.split(s) + if not labels or labels == [""]: + raise IDNAError("Empty domain") + if labels[-1] == "": + del labels[-1] + trailing_dot = True + for label in labels: + s = alabel(label) + if s: + result.append(s) + else: + raise IDNAError("Empty label") + if trailing_dot: + result.append(b"") + s = b".".join(result) + if not valid_string_length(s, trailing_dot): + raise IDNAError("Domain too long") + return s + + +def decode( + s: Union[str, bytes, bytearray], + strict: bool = False, + uts46: bool = False, + std3_rules: bool = False, +) -> str: + try: + if not isinstance(s, str): + s = str(s, "ascii") + except UnicodeDecodeError: + raise IDNAError("Invalid ASCII in A-label") + if uts46: + s = uts46_remap(s, std3_rules, False) + trailing_dot = False + result = [] + if not strict: + labels = _unicode_dots_re.split(s) + else: + labels = s.split(".") + if not labels or labels == [""]: + raise IDNAError("Empty domain") + if not labels[-1]: + del labels[-1] + trailing_dot = True + for label in labels: + s = ulabel(label) + if s: + result.append(s) + else: + raise IDNAError("Empty label") + if trailing_dot: + result.append("") + return ".".join(result) diff --git a/.venv/lib/python3.12/site-packages/idna/idnadata.py b/.venv/lib/python3.12/site-packages/idna/idnadata.py new file mode 100644 index 0000000..ded47ca --- /dev/null +++ b/.venv/lib/python3.12/site-packages/idna/idnadata.py @@ -0,0 +1,4309 @@ +# This file is automatically generated by tools/idna-data + +__version__ = "16.0.0" + +scripts = { + "Greek": ( + 0x37000000374, + 0x37500000378, + 0x37A0000037E, + 0x37F00000380, + 0x38400000385, + 0x38600000387, + 0x3880000038B, + 0x38C0000038D, + 0x38E000003A2, + 0x3A3000003E2, + 0x3F000000400, + 0x1D2600001D2B, + 0x1D5D00001D62, + 0x1D6600001D6B, + 0x1DBF00001DC0, + 0x1F0000001F16, + 0x1F1800001F1E, + 0x1F2000001F46, + 0x1F4800001F4E, + 0x1F5000001F58, + 0x1F5900001F5A, + 0x1F5B00001F5C, + 0x1F5D00001F5E, + 0x1F5F00001F7E, + 0x1F8000001FB5, + 0x1FB600001FC5, + 0x1FC600001FD4, + 0x1FD600001FDC, + 0x1FDD00001FF0, + 0x1FF200001FF5, + 0x1FF600001FFF, + 0x212600002127, + 0xAB650000AB66, + 0x101400001018F, + 0x101A0000101A1, + 0x1D2000001D246, + ), + "Han": ( + 0x2E8000002E9A, + 0x2E9B00002EF4, + 0x2F0000002FD6, + 0x300500003006, + 0x300700003008, + 0x30210000302A, + 0x30380000303C, + 0x340000004DC0, + 0x4E000000A000, + 0xF9000000FA6E, + 0xFA700000FADA, + 0x16FE200016FE4, + 0x16FF000016FF2, + 0x200000002A6E0, + 0x2A7000002B73A, + 0x2B7400002B81E, + 0x2B8200002CEA2, + 0x2CEB00002EBE1, + 0x2EBF00002EE5E, + 0x2F8000002FA1E, + 0x300000003134B, + 0x31350000323B0, + ), + "Hebrew": ( + 0x591000005C8, + 0x5D0000005EB, + 0x5EF000005F5, + 0xFB1D0000FB37, + 0xFB380000FB3D, + 0xFB3E0000FB3F, + 0xFB400000FB42, + 0xFB430000FB45, + 0xFB460000FB50, + ), + "Hiragana": ( + 0x304100003097, + 0x309D000030A0, + 0x1B0010001B120, + 0x1B1320001B133, + 0x1B1500001B153, + 0x1F2000001F201, + ), + "Katakana": ( + 0x30A1000030FB, + 0x30FD00003100, + 0x31F000003200, + 0x32D0000032FF, + 0x330000003358, + 0xFF660000FF70, + 0xFF710000FF9E, + 0x1AFF00001AFF4, + 0x1AFF50001AFFC, + 0x1AFFD0001AFFF, + 0x1B0000001B001, + 0x1B1200001B123, + 0x1B1550001B156, + 0x1B1640001B168, + ), +} +joining_types = { + 0xAD: 84, + 0x300: 84, + 0x301: 84, + 0x302: 84, + 0x303: 84, + 0x304: 84, + 0x305: 84, + 0x306: 84, + 0x307: 84, + 0x308: 84, + 0x309: 84, + 0x30A: 84, + 0x30B: 84, + 0x30C: 84, + 0x30D: 84, + 0x30E: 84, + 0x30F: 84, + 0x310: 84, + 0x311: 84, + 0x312: 84, + 0x313: 84, + 0x314: 84, + 0x315: 84, + 0x316: 84, + 0x317: 84, + 0x318: 84, + 0x319: 84, + 0x31A: 84, + 0x31B: 84, + 0x31C: 84, + 0x31D: 84, + 0x31E: 84, + 0x31F: 84, + 0x320: 84, + 0x321: 84, + 0x322: 84, + 0x323: 84, + 0x324: 84, + 0x325: 84, + 0x326: 84, + 0x327: 84, + 0x328: 84, + 0x329: 84, + 0x32A: 84, + 0x32B: 84, + 0x32C: 84, + 0x32D: 84, + 0x32E: 84, + 0x32F: 84, + 0x330: 84, + 0x331: 84, + 0x332: 84, + 0x333: 84, + 0x334: 84, + 0x335: 84, + 0x336: 84, + 0x337: 84, + 0x338: 84, + 0x339: 84, + 0x33A: 84, + 0x33B: 84, + 0x33C: 84, + 0x33D: 84, + 0x33E: 84, + 0x33F: 84, + 0x340: 84, + 0x341: 84, + 0x342: 84, + 0x343: 84, + 0x344: 84, + 0x345: 84, + 0x346: 84, + 0x347: 84, + 0x348: 84, + 0x349: 84, + 0x34A: 84, + 0x34B: 84, + 0x34C: 84, + 0x34D: 84, + 0x34E: 84, + 0x34F: 84, + 0x350: 84, + 0x351: 84, + 0x352: 84, + 0x353: 84, + 0x354: 84, + 0x355: 84, + 0x356: 84, + 0x357: 84, + 0x358: 84, + 0x359: 84, + 0x35A: 84, + 0x35B: 84, + 0x35C: 84, + 0x35D: 84, + 0x35E: 84, + 0x35F: 84, + 0x360: 84, + 0x361: 84, + 0x362: 84, + 0x363: 84, + 0x364: 84, + 0x365: 84, + 0x366: 84, + 0x367: 84, + 0x368: 84, + 0x369: 84, + 0x36A: 84, + 0x36B: 84, + 0x36C: 84, + 0x36D: 84, + 0x36E: 84, + 0x36F: 84, + 0x483: 84, + 0x484: 84, + 0x485: 84, + 0x486: 84, + 0x487: 84, + 0x488: 84, + 0x489: 84, + 0x591: 84, + 0x592: 84, + 0x593: 84, + 0x594: 84, + 0x595: 84, + 0x596: 84, + 0x597: 84, + 0x598: 84, + 0x599: 84, + 0x59A: 84, + 0x59B: 84, + 0x59C: 84, + 0x59D: 84, + 0x59E: 84, + 0x59F: 84, + 0x5A0: 84, + 0x5A1: 84, + 0x5A2: 84, + 0x5A3: 84, + 0x5A4: 84, + 0x5A5: 84, + 0x5A6: 84, + 0x5A7: 84, + 0x5A8: 84, + 0x5A9: 84, + 0x5AA: 84, + 0x5AB: 84, + 0x5AC: 84, + 0x5AD: 84, + 0x5AE: 84, + 0x5AF: 84, + 0x5B0: 84, + 0x5B1: 84, + 0x5B2: 84, + 0x5B3: 84, + 0x5B4: 84, + 0x5B5: 84, + 0x5B6: 84, + 0x5B7: 84, + 0x5B8: 84, + 0x5B9: 84, + 0x5BA: 84, + 0x5BB: 84, + 0x5BC: 84, + 0x5BD: 84, + 0x5BF: 84, + 0x5C1: 84, + 0x5C2: 84, + 0x5C4: 84, + 0x5C5: 84, + 0x5C7: 84, + 0x610: 84, + 0x611: 84, + 0x612: 84, + 0x613: 84, + 0x614: 84, + 0x615: 84, + 0x616: 84, + 0x617: 84, + 0x618: 84, + 0x619: 84, + 0x61A: 84, + 0x61C: 84, + 0x620: 68, + 0x622: 82, + 0x623: 82, + 0x624: 82, + 0x625: 82, + 0x626: 68, + 0x627: 82, + 0x628: 68, + 0x629: 82, + 0x62A: 68, + 0x62B: 68, + 0x62C: 68, + 0x62D: 68, + 0x62E: 68, + 0x62F: 82, + 0x630: 82, + 0x631: 82, + 0x632: 82, + 0x633: 68, + 0x634: 68, + 0x635: 68, + 0x636: 68, + 0x637: 68, + 0x638: 68, + 0x639: 68, + 0x63A: 68, + 0x63B: 68, + 0x63C: 68, + 0x63D: 68, + 0x63E: 68, + 0x63F: 68, + 0x640: 67, + 0x641: 68, + 0x642: 68, + 0x643: 68, + 0x644: 68, + 0x645: 68, + 0x646: 68, + 0x647: 68, + 0x648: 82, + 0x649: 68, + 0x64A: 68, + 0x64B: 84, + 0x64C: 84, + 0x64D: 84, + 0x64E: 84, + 0x64F: 84, + 0x650: 84, + 0x651: 84, + 0x652: 84, + 0x653: 84, + 0x654: 84, + 0x655: 84, + 0x656: 84, + 0x657: 84, + 0x658: 84, + 0x659: 84, + 0x65A: 84, + 0x65B: 84, + 0x65C: 84, + 0x65D: 84, + 0x65E: 84, + 0x65F: 84, + 0x66E: 68, + 0x66F: 68, + 0x670: 84, + 0x671: 82, + 0x672: 82, + 0x673: 82, + 0x675: 82, + 0x676: 82, + 0x677: 82, + 0x678: 68, + 0x679: 68, + 0x67A: 68, + 0x67B: 68, + 0x67C: 68, + 0x67D: 68, + 0x67E: 68, + 0x67F: 68, + 0x680: 68, + 0x681: 68, + 0x682: 68, + 0x683: 68, + 0x684: 68, + 0x685: 68, + 0x686: 68, + 0x687: 68, + 0x688: 82, + 0x689: 82, + 0x68A: 82, + 0x68B: 82, + 0x68C: 82, + 0x68D: 82, + 0x68E: 82, + 0x68F: 82, + 0x690: 82, + 0x691: 82, + 0x692: 82, + 0x693: 82, + 0x694: 82, + 0x695: 82, + 0x696: 82, + 0x697: 82, + 0x698: 82, + 0x699: 82, + 0x69A: 68, + 0x69B: 68, + 0x69C: 68, + 0x69D: 68, + 0x69E: 68, + 0x69F: 68, + 0x6A0: 68, + 0x6A1: 68, + 0x6A2: 68, + 0x6A3: 68, + 0x6A4: 68, + 0x6A5: 68, + 0x6A6: 68, + 0x6A7: 68, + 0x6A8: 68, + 0x6A9: 68, + 0x6AA: 68, + 0x6AB: 68, + 0x6AC: 68, + 0x6AD: 68, + 0x6AE: 68, + 0x6AF: 68, + 0x6B0: 68, + 0x6B1: 68, + 0x6B2: 68, + 0x6B3: 68, + 0x6B4: 68, + 0x6B5: 68, + 0x6B6: 68, + 0x6B7: 68, + 0x6B8: 68, + 0x6B9: 68, + 0x6BA: 68, + 0x6BB: 68, + 0x6BC: 68, + 0x6BD: 68, + 0x6BE: 68, + 0x6BF: 68, + 0x6C0: 82, + 0x6C1: 68, + 0x6C2: 68, + 0x6C3: 82, + 0x6C4: 82, + 0x6C5: 82, + 0x6C6: 82, + 0x6C7: 82, + 0x6C8: 82, + 0x6C9: 82, + 0x6CA: 82, + 0x6CB: 82, + 0x6CC: 68, + 0x6CD: 82, + 0x6CE: 68, + 0x6CF: 82, + 0x6D0: 68, + 0x6D1: 68, + 0x6D2: 82, + 0x6D3: 82, + 0x6D5: 82, + 0x6D6: 84, + 0x6D7: 84, + 0x6D8: 84, + 0x6D9: 84, + 0x6DA: 84, + 0x6DB: 84, + 0x6DC: 84, + 0x6DF: 84, + 0x6E0: 84, + 0x6E1: 84, + 0x6E2: 84, + 0x6E3: 84, + 0x6E4: 84, + 0x6E7: 84, + 0x6E8: 84, + 0x6EA: 84, + 0x6EB: 84, + 0x6EC: 84, + 0x6ED: 84, + 0x6EE: 82, + 0x6EF: 82, + 0x6FA: 68, + 0x6FB: 68, + 0x6FC: 68, + 0x6FF: 68, + 0x70F: 84, + 0x710: 82, + 0x711: 84, + 0x712: 68, + 0x713: 68, + 0x714: 68, + 0x715: 82, + 0x716: 82, + 0x717: 82, + 0x718: 82, + 0x719: 82, + 0x71A: 68, + 0x71B: 68, + 0x71C: 68, + 0x71D: 68, + 0x71E: 82, + 0x71F: 68, + 0x720: 68, + 0x721: 68, + 0x722: 68, + 0x723: 68, + 0x724: 68, + 0x725: 68, + 0x726: 68, + 0x727: 68, + 0x728: 82, + 0x729: 68, + 0x72A: 82, + 0x72B: 68, + 0x72C: 82, + 0x72D: 68, + 0x72E: 68, + 0x72F: 82, + 0x730: 84, + 0x731: 84, + 0x732: 84, + 0x733: 84, + 0x734: 84, + 0x735: 84, + 0x736: 84, + 0x737: 84, + 0x738: 84, + 0x739: 84, + 0x73A: 84, + 0x73B: 84, + 0x73C: 84, + 0x73D: 84, + 0x73E: 84, + 0x73F: 84, + 0x740: 84, + 0x741: 84, + 0x742: 84, + 0x743: 84, + 0x744: 84, + 0x745: 84, + 0x746: 84, + 0x747: 84, + 0x748: 84, + 0x749: 84, + 0x74A: 84, + 0x74D: 82, + 0x74E: 68, + 0x74F: 68, + 0x750: 68, + 0x751: 68, + 0x752: 68, + 0x753: 68, + 0x754: 68, + 0x755: 68, + 0x756: 68, + 0x757: 68, + 0x758: 68, + 0x759: 82, + 0x75A: 82, + 0x75B: 82, + 0x75C: 68, + 0x75D: 68, + 0x75E: 68, + 0x75F: 68, + 0x760: 68, + 0x761: 68, + 0x762: 68, + 0x763: 68, + 0x764: 68, + 0x765: 68, + 0x766: 68, + 0x767: 68, + 0x768: 68, + 0x769: 68, + 0x76A: 68, + 0x76B: 82, + 0x76C: 82, + 0x76D: 68, + 0x76E: 68, + 0x76F: 68, + 0x770: 68, + 0x771: 82, + 0x772: 68, + 0x773: 82, + 0x774: 82, + 0x775: 68, + 0x776: 68, + 0x777: 68, + 0x778: 82, + 0x779: 82, + 0x77A: 68, + 0x77B: 68, + 0x77C: 68, + 0x77D: 68, + 0x77E: 68, + 0x77F: 68, + 0x7A6: 84, + 0x7A7: 84, + 0x7A8: 84, + 0x7A9: 84, + 0x7AA: 84, + 0x7AB: 84, + 0x7AC: 84, + 0x7AD: 84, + 0x7AE: 84, + 0x7AF: 84, + 0x7B0: 84, + 0x7CA: 68, + 0x7CB: 68, + 0x7CC: 68, + 0x7CD: 68, + 0x7CE: 68, + 0x7CF: 68, + 0x7D0: 68, + 0x7D1: 68, + 0x7D2: 68, + 0x7D3: 68, + 0x7D4: 68, + 0x7D5: 68, + 0x7D6: 68, + 0x7D7: 68, + 0x7D8: 68, + 0x7D9: 68, + 0x7DA: 68, + 0x7DB: 68, + 0x7DC: 68, + 0x7DD: 68, + 0x7DE: 68, + 0x7DF: 68, + 0x7E0: 68, + 0x7E1: 68, + 0x7E2: 68, + 0x7E3: 68, + 0x7E4: 68, + 0x7E5: 68, + 0x7E6: 68, + 0x7E7: 68, + 0x7E8: 68, + 0x7E9: 68, + 0x7EA: 68, + 0x7EB: 84, + 0x7EC: 84, + 0x7ED: 84, + 0x7EE: 84, + 0x7EF: 84, + 0x7F0: 84, + 0x7F1: 84, + 0x7F2: 84, + 0x7F3: 84, + 0x7FA: 67, + 0x7FD: 84, + 0x816: 84, + 0x817: 84, + 0x818: 84, + 0x819: 84, + 0x81B: 84, + 0x81C: 84, + 0x81D: 84, + 0x81E: 84, + 0x81F: 84, + 0x820: 84, + 0x821: 84, + 0x822: 84, + 0x823: 84, + 0x825: 84, + 0x826: 84, + 0x827: 84, + 0x829: 84, + 0x82A: 84, + 0x82B: 84, + 0x82C: 84, + 0x82D: 84, + 0x840: 82, + 0x841: 68, + 0x842: 68, + 0x843: 68, + 0x844: 68, + 0x845: 68, + 0x846: 82, + 0x847: 82, + 0x848: 68, + 0x849: 82, + 0x84A: 68, + 0x84B: 68, + 0x84C: 68, + 0x84D: 68, + 0x84E: 68, + 0x84F: 68, + 0x850: 68, + 0x851: 68, + 0x852: 68, + 0x853: 68, + 0x854: 82, + 0x855: 68, + 0x856: 82, + 0x857: 82, + 0x858: 82, + 0x859: 84, + 0x85A: 84, + 0x85B: 84, + 0x860: 68, + 0x862: 68, + 0x863: 68, + 0x864: 68, + 0x865: 68, + 0x867: 82, + 0x868: 68, + 0x869: 82, + 0x86A: 82, + 0x870: 82, + 0x871: 82, + 0x872: 82, + 0x873: 82, + 0x874: 82, + 0x875: 82, + 0x876: 82, + 0x877: 82, + 0x878: 82, + 0x879: 82, + 0x87A: 82, + 0x87B: 82, + 0x87C: 82, + 0x87D: 82, + 0x87E: 82, + 0x87F: 82, + 0x880: 82, + 0x881: 82, + 0x882: 82, + 0x883: 67, + 0x884: 67, + 0x885: 67, + 0x886: 68, + 0x889: 68, + 0x88A: 68, + 0x88B: 68, + 0x88C: 68, + 0x88D: 68, + 0x88E: 82, + 0x897: 84, + 0x898: 84, + 0x899: 84, + 0x89A: 84, + 0x89B: 84, + 0x89C: 84, + 0x89D: 84, + 0x89E: 84, + 0x89F: 84, + 0x8A0: 68, + 0x8A1: 68, + 0x8A2: 68, + 0x8A3: 68, + 0x8A4: 68, + 0x8A5: 68, + 0x8A6: 68, + 0x8A7: 68, + 0x8A8: 68, + 0x8A9: 68, + 0x8AA: 82, + 0x8AB: 82, + 0x8AC: 82, + 0x8AE: 82, + 0x8AF: 68, + 0x8B0: 68, + 0x8B1: 82, + 0x8B2: 82, + 0x8B3: 68, + 0x8B4: 68, + 0x8B5: 68, + 0x8B6: 68, + 0x8B7: 68, + 0x8B8: 68, + 0x8B9: 82, + 0x8BA: 68, + 0x8BB: 68, + 0x8BC: 68, + 0x8BD: 68, + 0x8BE: 68, + 0x8BF: 68, + 0x8C0: 68, + 0x8C1: 68, + 0x8C2: 68, + 0x8C3: 68, + 0x8C4: 68, + 0x8C5: 68, + 0x8C6: 68, + 0x8C7: 68, + 0x8C8: 68, + 0x8CA: 84, + 0x8CB: 84, + 0x8CC: 84, + 0x8CD: 84, + 0x8CE: 84, + 0x8CF: 84, + 0x8D0: 84, + 0x8D1: 84, + 0x8D2: 84, + 0x8D3: 84, + 0x8D4: 84, + 0x8D5: 84, + 0x8D6: 84, + 0x8D7: 84, + 0x8D8: 84, + 0x8D9: 84, + 0x8DA: 84, + 0x8DB: 84, + 0x8DC: 84, + 0x8DD: 84, + 0x8DE: 84, + 0x8DF: 84, + 0x8E0: 84, + 0x8E1: 84, + 0x8E3: 84, + 0x8E4: 84, + 0x8E5: 84, + 0x8E6: 84, + 0x8E7: 84, + 0x8E8: 84, + 0x8E9: 84, + 0x8EA: 84, + 0x8EB: 84, + 0x8EC: 84, + 0x8ED: 84, + 0x8EE: 84, + 0x8EF: 84, + 0x8F0: 84, + 0x8F1: 84, + 0x8F2: 84, + 0x8F3: 84, + 0x8F4: 84, + 0x8F5: 84, + 0x8F6: 84, + 0x8F7: 84, + 0x8F8: 84, + 0x8F9: 84, + 0x8FA: 84, + 0x8FB: 84, + 0x8FC: 84, + 0x8FD: 84, + 0x8FE: 84, + 0x8FF: 84, + 0x900: 84, + 0x901: 84, + 0x902: 84, + 0x93A: 84, + 0x93C: 84, + 0x941: 84, + 0x942: 84, + 0x943: 84, + 0x944: 84, + 0x945: 84, + 0x946: 84, + 0x947: 84, + 0x948: 84, + 0x94D: 84, + 0x951: 84, + 0x952: 84, + 0x953: 84, + 0x954: 84, + 0x955: 84, + 0x956: 84, + 0x957: 84, + 0x962: 84, + 0x963: 84, + 0x981: 84, + 0x9BC: 84, + 0x9C1: 84, + 0x9C2: 84, + 0x9C3: 84, + 0x9C4: 84, + 0x9CD: 84, + 0x9E2: 84, + 0x9E3: 84, + 0x9FE: 84, + 0xA01: 84, + 0xA02: 84, + 0xA3C: 84, + 0xA41: 84, + 0xA42: 84, + 0xA47: 84, + 0xA48: 84, + 0xA4B: 84, + 0xA4C: 84, + 0xA4D: 84, + 0xA51: 84, + 0xA70: 84, + 0xA71: 84, + 0xA75: 84, + 0xA81: 84, + 0xA82: 84, + 0xABC: 84, + 0xAC1: 84, + 0xAC2: 84, + 0xAC3: 84, + 0xAC4: 84, + 0xAC5: 84, + 0xAC7: 84, + 0xAC8: 84, + 0xACD: 84, + 0xAE2: 84, + 0xAE3: 84, + 0xAFA: 84, + 0xAFB: 84, + 0xAFC: 84, + 0xAFD: 84, + 0xAFE: 84, + 0xAFF: 84, + 0xB01: 84, + 0xB3C: 84, + 0xB3F: 84, + 0xB41: 84, + 0xB42: 84, + 0xB43: 84, + 0xB44: 84, + 0xB4D: 84, + 0xB55: 84, + 0xB56: 84, + 0xB62: 84, + 0xB63: 84, + 0xB82: 84, + 0xBC0: 84, + 0xBCD: 84, + 0xC00: 84, + 0xC04: 84, + 0xC3C: 84, + 0xC3E: 84, + 0xC3F: 84, + 0xC40: 84, + 0xC46: 84, + 0xC47: 84, + 0xC48: 84, + 0xC4A: 84, + 0xC4B: 84, + 0xC4C: 84, + 0xC4D: 84, + 0xC55: 84, + 0xC56: 84, + 0xC62: 84, + 0xC63: 84, + 0xC81: 84, + 0xCBC: 84, + 0xCBF: 84, + 0xCC6: 84, + 0xCCC: 84, + 0xCCD: 84, + 0xCE2: 84, + 0xCE3: 84, + 0xD00: 84, + 0xD01: 84, + 0xD3B: 84, + 0xD3C: 84, + 0xD41: 84, + 0xD42: 84, + 0xD43: 84, + 0xD44: 84, + 0xD4D: 84, + 0xD62: 84, + 0xD63: 84, + 0xD81: 84, + 0xDCA: 84, + 0xDD2: 84, + 0xDD3: 84, + 0xDD4: 84, + 0xDD6: 84, + 0xE31: 84, + 0xE34: 84, + 0xE35: 84, + 0xE36: 84, + 0xE37: 84, + 0xE38: 84, + 0xE39: 84, + 0xE3A: 84, + 0xE47: 84, + 0xE48: 84, + 0xE49: 84, + 0xE4A: 84, + 0xE4B: 84, + 0xE4C: 84, + 0xE4D: 84, + 0xE4E: 84, + 0xEB1: 84, + 0xEB4: 84, + 0xEB5: 84, + 0xEB6: 84, + 0xEB7: 84, + 0xEB8: 84, + 0xEB9: 84, + 0xEBA: 84, + 0xEBB: 84, + 0xEBC: 84, + 0xEC8: 84, + 0xEC9: 84, + 0xECA: 84, + 0xECB: 84, + 0xECC: 84, + 0xECD: 84, + 0xECE: 84, + 0xF18: 84, + 0xF19: 84, + 0xF35: 84, + 0xF37: 84, + 0xF39: 84, + 0xF71: 84, + 0xF72: 84, + 0xF73: 84, + 0xF74: 84, + 0xF75: 84, + 0xF76: 84, + 0xF77: 84, + 0xF78: 84, + 0xF79: 84, + 0xF7A: 84, + 0xF7B: 84, + 0xF7C: 84, + 0xF7D: 84, + 0xF7E: 84, + 0xF80: 84, + 0xF81: 84, + 0xF82: 84, + 0xF83: 84, + 0xF84: 84, + 0xF86: 84, + 0xF87: 84, + 0xF8D: 84, + 0xF8E: 84, + 0xF8F: 84, + 0xF90: 84, + 0xF91: 84, + 0xF92: 84, + 0xF93: 84, + 0xF94: 84, + 0xF95: 84, + 0xF96: 84, + 0xF97: 84, + 0xF99: 84, + 0xF9A: 84, + 0xF9B: 84, + 0xF9C: 84, + 0xF9D: 84, + 0xF9E: 84, + 0xF9F: 84, + 0xFA0: 84, + 0xFA1: 84, + 0xFA2: 84, + 0xFA3: 84, + 0xFA4: 84, + 0xFA5: 84, + 0xFA6: 84, + 0xFA7: 84, + 0xFA8: 84, + 0xFA9: 84, + 0xFAA: 84, + 0xFAB: 84, + 0xFAC: 84, + 0xFAD: 84, + 0xFAE: 84, + 0xFAF: 84, + 0xFB0: 84, + 0xFB1: 84, + 0xFB2: 84, + 0xFB3: 84, + 0xFB4: 84, + 0xFB5: 84, + 0xFB6: 84, + 0xFB7: 84, + 0xFB8: 84, + 0xFB9: 84, + 0xFBA: 84, + 0xFBB: 84, + 0xFBC: 84, + 0xFC6: 84, + 0x102D: 84, + 0x102E: 84, + 0x102F: 84, + 0x1030: 84, + 0x1032: 84, + 0x1033: 84, + 0x1034: 84, + 0x1035: 84, + 0x1036: 84, + 0x1037: 84, + 0x1039: 84, + 0x103A: 84, + 0x103D: 84, + 0x103E: 84, + 0x1058: 84, + 0x1059: 84, + 0x105E: 84, + 0x105F: 84, + 0x1060: 84, + 0x1071: 84, + 0x1072: 84, + 0x1073: 84, + 0x1074: 84, + 0x1082: 84, + 0x1085: 84, + 0x1086: 84, + 0x108D: 84, + 0x109D: 84, + 0x135D: 84, + 0x135E: 84, + 0x135F: 84, + 0x1712: 84, + 0x1713: 84, + 0x1714: 84, + 0x1732: 84, + 0x1733: 84, + 0x1752: 84, + 0x1753: 84, + 0x1772: 84, + 0x1773: 84, + 0x17B4: 84, + 0x17B5: 84, + 0x17B7: 84, + 0x17B8: 84, + 0x17B9: 84, + 0x17BA: 84, + 0x17BB: 84, + 0x17BC: 84, + 0x17BD: 84, + 0x17C6: 84, + 0x17C9: 84, + 0x17CA: 84, + 0x17CB: 84, + 0x17CC: 84, + 0x17CD: 84, + 0x17CE: 84, + 0x17CF: 84, + 0x17D0: 84, + 0x17D1: 84, + 0x17D2: 84, + 0x17D3: 84, + 0x17DD: 84, + 0x1807: 68, + 0x180A: 67, + 0x180B: 84, + 0x180C: 84, + 0x180D: 84, + 0x180F: 84, + 0x1820: 68, + 0x1821: 68, + 0x1822: 68, + 0x1823: 68, + 0x1824: 68, + 0x1825: 68, + 0x1826: 68, + 0x1827: 68, + 0x1828: 68, + 0x1829: 68, + 0x182A: 68, + 0x182B: 68, + 0x182C: 68, + 0x182D: 68, + 0x182E: 68, + 0x182F: 68, + 0x1830: 68, + 0x1831: 68, + 0x1832: 68, + 0x1833: 68, + 0x1834: 68, + 0x1835: 68, + 0x1836: 68, + 0x1837: 68, + 0x1838: 68, + 0x1839: 68, + 0x183A: 68, + 0x183B: 68, + 0x183C: 68, + 0x183D: 68, + 0x183E: 68, + 0x183F: 68, + 0x1840: 68, + 0x1841: 68, + 0x1842: 68, + 0x1843: 68, + 0x1844: 68, + 0x1845: 68, + 0x1846: 68, + 0x1847: 68, + 0x1848: 68, + 0x1849: 68, + 0x184A: 68, + 0x184B: 68, + 0x184C: 68, + 0x184D: 68, + 0x184E: 68, + 0x184F: 68, + 0x1850: 68, + 0x1851: 68, + 0x1852: 68, + 0x1853: 68, + 0x1854: 68, + 0x1855: 68, + 0x1856: 68, + 0x1857: 68, + 0x1858: 68, + 0x1859: 68, + 0x185A: 68, + 0x185B: 68, + 0x185C: 68, + 0x185D: 68, + 0x185E: 68, + 0x185F: 68, + 0x1860: 68, + 0x1861: 68, + 0x1862: 68, + 0x1863: 68, + 0x1864: 68, + 0x1865: 68, + 0x1866: 68, + 0x1867: 68, + 0x1868: 68, + 0x1869: 68, + 0x186A: 68, + 0x186B: 68, + 0x186C: 68, + 0x186D: 68, + 0x186E: 68, + 0x186F: 68, + 0x1870: 68, + 0x1871: 68, + 0x1872: 68, + 0x1873: 68, + 0x1874: 68, + 0x1875: 68, + 0x1876: 68, + 0x1877: 68, + 0x1878: 68, + 0x1885: 84, + 0x1886: 84, + 0x1887: 68, + 0x1888: 68, + 0x1889: 68, + 0x188A: 68, + 0x188B: 68, + 0x188C: 68, + 0x188D: 68, + 0x188E: 68, + 0x188F: 68, + 0x1890: 68, + 0x1891: 68, + 0x1892: 68, + 0x1893: 68, + 0x1894: 68, + 0x1895: 68, + 0x1896: 68, + 0x1897: 68, + 0x1898: 68, + 0x1899: 68, + 0x189A: 68, + 0x189B: 68, + 0x189C: 68, + 0x189D: 68, + 0x189E: 68, + 0x189F: 68, + 0x18A0: 68, + 0x18A1: 68, + 0x18A2: 68, + 0x18A3: 68, + 0x18A4: 68, + 0x18A5: 68, + 0x18A6: 68, + 0x18A7: 68, + 0x18A8: 68, + 0x18A9: 84, + 0x18AA: 68, + 0x1920: 84, + 0x1921: 84, + 0x1922: 84, + 0x1927: 84, + 0x1928: 84, + 0x1932: 84, + 0x1939: 84, + 0x193A: 84, + 0x193B: 84, + 0x1A17: 84, + 0x1A18: 84, + 0x1A1B: 84, + 0x1A56: 84, + 0x1A58: 84, + 0x1A59: 84, + 0x1A5A: 84, + 0x1A5B: 84, + 0x1A5C: 84, + 0x1A5D: 84, + 0x1A5E: 84, + 0x1A60: 84, + 0x1A62: 84, + 0x1A65: 84, + 0x1A66: 84, + 0x1A67: 84, + 0x1A68: 84, + 0x1A69: 84, + 0x1A6A: 84, + 0x1A6B: 84, + 0x1A6C: 84, + 0x1A73: 84, + 0x1A74: 84, + 0x1A75: 84, + 0x1A76: 84, + 0x1A77: 84, + 0x1A78: 84, + 0x1A79: 84, + 0x1A7A: 84, + 0x1A7B: 84, + 0x1A7C: 84, + 0x1A7F: 84, + 0x1AB0: 84, + 0x1AB1: 84, + 0x1AB2: 84, + 0x1AB3: 84, + 0x1AB4: 84, + 0x1AB5: 84, + 0x1AB6: 84, + 0x1AB7: 84, + 0x1AB8: 84, + 0x1AB9: 84, + 0x1ABA: 84, + 0x1ABB: 84, + 0x1ABC: 84, + 0x1ABD: 84, + 0x1ABE: 84, + 0x1ABF: 84, + 0x1AC0: 84, + 0x1AC1: 84, + 0x1AC2: 84, + 0x1AC3: 84, + 0x1AC4: 84, + 0x1AC5: 84, + 0x1AC6: 84, + 0x1AC7: 84, + 0x1AC8: 84, + 0x1AC9: 84, + 0x1ACA: 84, + 0x1ACB: 84, + 0x1ACC: 84, + 0x1ACD: 84, + 0x1ACE: 84, + 0x1B00: 84, + 0x1B01: 84, + 0x1B02: 84, + 0x1B03: 84, + 0x1B34: 84, + 0x1B36: 84, + 0x1B37: 84, + 0x1B38: 84, + 0x1B39: 84, + 0x1B3A: 84, + 0x1B3C: 84, + 0x1B42: 84, + 0x1B6B: 84, + 0x1B6C: 84, + 0x1B6D: 84, + 0x1B6E: 84, + 0x1B6F: 84, + 0x1B70: 84, + 0x1B71: 84, + 0x1B72: 84, + 0x1B73: 84, + 0x1B80: 84, + 0x1B81: 84, + 0x1BA2: 84, + 0x1BA3: 84, + 0x1BA4: 84, + 0x1BA5: 84, + 0x1BA8: 84, + 0x1BA9: 84, + 0x1BAB: 84, + 0x1BAC: 84, + 0x1BAD: 84, + 0x1BE6: 84, + 0x1BE8: 84, + 0x1BE9: 84, + 0x1BED: 84, + 0x1BEF: 84, + 0x1BF0: 84, + 0x1BF1: 84, + 0x1C2C: 84, + 0x1C2D: 84, + 0x1C2E: 84, + 0x1C2F: 84, + 0x1C30: 84, + 0x1C31: 84, + 0x1C32: 84, + 0x1C33: 84, + 0x1C36: 84, + 0x1C37: 84, + 0x1CD0: 84, + 0x1CD1: 84, + 0x1CD2: 84, + 0x1CD4: 84, + 0x1CD5: 84, + 0x1CD6: 84, + 0x1CD7: 84, + 0x1CD8: 84, + 0x1CD9: 84, + 0x1CDA: 84, + 0x1CDB: 84, + 0x1CDC: 84, + 0x1CDD: 84, + 0x1CDE: 84, + 0x1CDF: 84, + 0x1CE0: 84, + 0x1CE2: 84, + 0x1CE3: 84, + 0x1CE4: 84, + 0x1CE5: 84, + 0x1CE6: 84, + 0x1CE7: 84, + 0x1CE8: 84, + 0x1CED: 84, + 0x1CF4: 84, + 0x1CF8: 84, + 0x1CF9: 84, + 0x1DC0: 84, + 0x1DC1: 84, + 0x1DC2: 84, + 0x1DC3: 84, + 0x1DC4: 84, + 0x1DC5: 84, + 0x1DC6: 84, + 0x1DC7: 84, + 0x1DC8: 84, + 0x1DC9: 84, + 0x1DCA: 84, + 0x1DCB: 84, + 0x1DCC: 84, + 0x1DCD: 84, + 0x1DCE: 84, + 0x1DCF: 84, + 0x1DD0: 84, + 0x1DD1: 84, + 0x1DD2: 84, + 0x1DD3: 84, + 0x1DD4: 84, + 0x1DD5: 84, + 0x1DD6: 84, + 0x1DD7: 84, + 0x1DD8: 84, + 0x1DD9: 84, + 0x1DDA: 84, + 0x1DDB: 84, + 0x1DDC: 84, + 0x1DDD: 84, + 0x1DDE: 84, + 0x1DDF: 84, + 0x1DE0: 84, + 0x1DE1: 84, + 0x1DE2: 84, + 0x1DE3: 84, + 0x1DE4: 84, + 0x1DE5: 84, + 0x1DE6: 84, + 0x1DE7: 84, + 0x1DE8: 84, + 0x1DE9: 84, + 0x1DEA: 84, + 0x1DEB: 84, + 0x1DEC: 84, + 0x1DED: 84, + 0x1DEE: 84, + 0x1DEF: 84, + 0x1DF0: 84, + 0x1DF1: 84, + 0x1DF2: 84, + 0x1DF3: 84, + 0x1DF4: 84, + 0x1DF5: 84, + 0x1DF6: 84, + 0x1DF7: 84, + 0x1DF8: 84, + 0x1DF9: 84, + 0x1DFA: 84, + 0x1DFB: 84, + 0x1DFC: 84, + 0x1DFD: 84, + 0x1DFE: 84, + 0x1DFF: 84, + 0x200B: 84, + 0x200D: 67, + 0x200E: 84, + 0x200F: 84, + 0x202A: 84, + 0x202B: 84, + 0x202C: 84, + 0x202D: 84, + 0x202E: 84, + 0x2060: 84, + 0x2061: 84, + 0x2062: 84, + 0x2063: 84, + 0x2064: 84, + 0x206A: 84, + 0x206B: 84, + 0x206C: 84, + 0x206D: 84, + 0x206E: 84, + 0x206F: 84, + 0x20D0: 84, + 0x20D1: 84, + 0x20D2: 84, + 0x20D3: 84, + 0x20D4: 84, + 0x20D5: 84, + 0x20D6: 84, + 0x20D7: 84, + 0x20D8: 84, + 0x20D9: 84, + 0x20DA: 84, + 0x20DB: 84, + 0x20DC: 84, + 0x20DD: 84, + 0x20DE: 84, + 0x20DF: 84, + 0x20E0: 84, + 0x20E1: 84, + 0x20E2: 84, + 0x20E3: 84, + 0x20E4: 84, + 0x20E5: 84, + 0x20E6: 84, + 0x20E7: 84, + 0x20E8: 84, + 0x20E9: 84, + 0x20EA: 84, + 0x20EB: 84, + 0x20EC: 84, + 0x20ED: 84, + 0x20EE: 84, + 0x20EF: 84, + 0x20F0: 84, + 0x2CEF: 84, + 0x2CF0: 84, + 0x2CF1: 84, + 0x2D7F: 84, + 0x2DE0: 84, + 0x2DE1: 84, + 0x2DE2: 84, + 0x2DE3: 84, + 0x2DE4: 84, + 0x2DE5: 84, + 0x2DE6: 84, + 0x2DE7: 84, + 0x2DE8: 84, + 0x2DE9: 84, + 0x2DEA: 84, + 0x2DEB: 84, + 0x2DEC: 84, + 0x2DED: 84, + 0x2DEE: 84, + 0x2DEF: 84, + 0x2DF0: 84, + 0x2DF1: 84, + 0x2DF2: 84, + 0x2DF3: 84, + 0x2DF4: 84, + 0x2DF5: 84, + 0x2DF6: 84, + 0x2DF7: 84, + 0x2DF8: 84, + 0x2DF9: 84, + 0x2DFA: 84, + 0x2DFB: 84, + 0x2DFC: 84, + 0x2DFD: 84, + 0x2DFE: 84, + 0x2DFF: 84, + 0x302A: 84, + 0x302B: 84, + 0x302C: 84, + 0x302D: 84, + 0x3099: 84, + 0x309A: 84, + 0xA66F: 84, + 0xA670: 84, + 0xA671: 84, + 0xA672: 84, + 0xA674: 84, + 0xA675: 84, + 0xA676: 84, + 0xA677: 84, + 0xA678: 84, + 0xA679: 84, + 0xA67A: 84, + 0xA67B: 84, + 0xA67C: 84, + 0xA67D: 84, + 0xA69E: 84, + 0xA69F: 84, + 0xA6F0: 84, + 0xA6F1: 84, + 0xA802: 84, + 0xA806: 84, + 0xA80B: 84, + 0xA825: 84, + 0xA826: 84, + 0xA82C: 84, + 0xA840: 68, + 0xA841: 68, + 0xA842: 68, + 0xA843: 68, + 0xA844: 68, + 0xA845: 68, + 0xA846: 68, + 0xA847: 68, + 0xA848: 68, + 0xA849: 68, + 0xA84A: 68, + 0xA84B: 68, + 0xA84C: 68, + 0xA84D: 68, + 0xA84E: 68, + 0xA84F: 68, + 0xA850: 68, + 0xA851: 68, + 0xA852: 68, + 0xA853: 68, + 0xA854: 68, + 0xA855: 68, + 0xA856: 68, + 0xA857: 68, + 0xA858: 68, + 0xA859: 68, + 0xA85A: 68, + 0xA85B: 68, + 0xA85C: 68, + 0xA85D: 68, + 0xA85E: 68, + 0xA85F: 68, + 0xA860: 68, + 0xA861: 68, + 0xA862: 68, + 0xA863: 68, + 0xA864: 68, + 0xA865: 68, + 0xA866: 68, + 0xA867: 68, + 0xA868: 68, + 0xA869: 68, + 0xA86A: 68, + 0xA86B: 68, + 0xA86C: 68, + 0xA86D: 68, + 0xA86E: 68, + 0xA86F: 68, + 0xA870: 68, + 0xA871: 68, + 0xA872: 76, + 0xA8C4: 84, + 0xA8C5: 84, + 0xA8E0: 84, + 0xA8E1: 84, + 0xA8E2: 84, + 0xA8E3: 84, + 0xA8E4: 84, + 0xA8E5: 84, + 0xA8E6: 84, + 0xA8E7: 84, + 0xA8E8: 84, + 0xA8E9: 84, + 0xA8EA: 84, + 0xA8EB: 84, + 0xA8EC: 84, + 0xA8ED: 84, + 0xA8EE: 84, + 0xA8EF: 84, + 0xA8F0: 84, + 0xA8F1: 84, + 0xA8FF: 84, + 0xA926: 84, + 0xA927: 84, + 0xA928: 84, + 0xA929: 84, + 0xA92A: 84, + 0xA92B: 84, + 0xA92C: 84, + 0xA92D: 84, + 0xA947: 84, + 0xA948: 84, + 0xA949: 84, + 0xA94A: 84, + 0xA94B: 84, + 0xA94C: 84, + 0xA94D: 84, + 0xA94E: 84, + 0xA94F: 84, + 0xA950: 84, + 0xA951: 84, + 0xA980: 84, + 0xA981: 84, + 0xA982: 84, + 0xA9B3: 84, + 0xA9B6: 84, + 0xA9B7: 84, + 0xA9B8: 84, + 0xA9B9: 84, + 0xA9BC: 84, + 0xA9BD: 84, + 0xA9E5: 84, + 0xAA29: 84, + 0xAA2A: 84, + 0xAA2B: 84, + 0xAA2C: 84, + 0xAA2D: 84, + 0xAA2E: 84, + 0xAA31: 84, + 0xAA32: 84, + 0xAA35: 84, + 0xAA36: 84, + 0xAA43: 84, + 0xAA4C: 84, + 0xAA7C: 84, + 0xAAB0: 84, + 0xAAB2: 84, + 0xAAB3: 84, + 0xAAB4: 84, + 0xAAB7: 84, + 0xAAB8: 84, + 0xAABE: 84, + 0xAABF: 84, + 0xAAC1: 84, + 0xAAEC: 84, + 0xAAED: 84, + 0xAAF6: 84, + 0xABE5: 84, + 0xABE8: 84, + 0xABED: 84, + 0xFB1E: 84, + 0xFE00: 84, + 0xFE01: 84, + 0xFE02: 84, + 0xFE03: 84, + 0xFE04: 84, + 0xFE05: 84, + 0xFE06: 84, + 0xFE07: 84, + 0xFE08: 84, + 0xFE09: 84, + 0xFE0A: 84, + 0xFE0B: 84, + 0xFE0C: 84, + 0xFE0D: 84, + 0xFE0E: 84, + 0xFE0F: 84, + 0xFE20: 84, + 0xFE21: 84, + 0xFE22: 84, + 0xFE23: 84, + 0xFE24: 84, + 0xFE25: 84, + 0xFE26: 84, + 0xFE27: 84, + 0xFE28: 84, + 0xFE29: 84, + 0xFE2A: 84, + 0xFE2B: 84, + 0xFE2C: 84, + 0xFE2D: 84, + 0xFE2E: 84, + 0xFE2F: 84, + 0xFEFF: 84, + 0xFFF9: 84, + 0xFFFA: 84, + 0xFFFB: 84, + 0x101FD: 84, + 0x102E0: 84, + 0x10376: 84, + 0x10377: 84, + 0x10378: 84, + 0x10379: 84, + 0x1037A: 84, + 0x10A01: 84, + 0x10A02: 84, + 0x10A03: 84, + 0x10A05: 84, + 0x10A06: 84, + 0x10A0C: 84, + 0x10A0D: 84, + 0x10A0E: 84, + 0x10A0F: 84, + 0x10A38: 84, + 0x10A39: 84, + 0x10A3A: 84, + 0x10A3F: 84, + 0x10AC0: 68, + 0x10AC1: 68, + 0x10AC2: 68, + 0x10AC3: 68, + 0x10AC4: 68, + 0x10AC5: 82, + 0x10AC7: 82, + 0x10AC9: 82, + 0x10ACA: 82, + 0x10ACD: 76, + 0x10ACE: 82, + 0x10ACF: 82, + 0x10AD0: 82, + 0x10AD1: 82, + 0x10AD2: 82, + 0x10AD3: 68, + 0x10AD4: 68, + 0x10AD5: 68, + 0x10AD6: 68, + 0x10AD7: 76, + 0x10AD8: 68, + 0x10AD9: 68, + 0x10ADA: 68, + 0x10ADB: 68, + 0x10ADC: 68, + 0x10ADD: 82, + 0x10ADE: 68, + 0x10ADF: 68, + 0x10AE0: 68, + 0x10AE1: 82, + 0x10AE4: 82, + 0x10AE5: 84, + 0x10AE6: 84, + 0x10AEB: 68, + 0x10AEC: 68, + 0x10AED: 68, + 0x10AEE: 68, + 0x10AEF: 82, + 0x10B80: 68, + 0x10B81: 82, + 0x10B82: 68, + 0x10B83: 82, + 0x10B84: 82, + 0x10B85: 82, + 0x10B86: 68, + 0x10B87: 68, + 0x10B88: 68, + 0x10B89: 82, + 0x10B8A: 68, + 0x10B8B: 68, + 0x10B8C: 82, + 0x10B8D: 68, + 0x10B8E: 82, + 0x10B8F: 82, + 0x10B90: 68, + 0x10B91: 82, + 0x10BA9: 82, + 0x10BAA: 82, + 0x10BAB: 82, + 0x10BAC: 82, + 0x10BAD: 68, + 0x10BAE: 68, + 0x10D00: 76, + 0x10D01: 68, + 0x10D02: 68, + 0x10D03: 68, + 0x10D04: 68, + 0x10D05: 68, + 0x10D06: 68, + 0x10D07: 68, + 0x10D08: 68, + 0x10D09: 68, + 0x10D0A: 68, + 0x10D0B: 68, + 0x10D0C: 68, + 0x10D0D: 68, + 0x10D0E: 68, + 0x10D0F: 68, + 0x10D10: 68, + 0x10D11: 68, + 0x10D12: 68, + 0x10D13: 68, + 0x10D14: 68, + 0x10D15: 68, + 0x10D16: 68, + 0x10D17: 68, + 0x10D18: 68, + 0x10D19: 68, + 0x10D1A: 68, + 0x10D1B: 68, + 0x10D1C: 68, + 0x10D1D: 68, + 0x10D1E: 68, + 0x10D1F: 68, + 0x10D20: 68, + 0x10D21: 68, + 0x10D22: 82, + 0x10D23: 68, + 0x10D24: 84, + 0x10D25: 84, + 0x10D26: 84, + 0x10D27: 84, + 0x10D69: 84, + 0x10D6A: 84, + 0x10D6B: 84, + 0x10D6C: 84, + 0x10D6D: 84, + 0x10EAB: 84, + 0x10EAC: 84, + 0x10EC2: 82, + 0x10EC3: 68, + 0x10EC4: 68, + 0x10EFC: 84, + 0x10EFD: 84, + 0x10EFE: 84, + 0x10EFF: 84, + 0x10F30: 68, + 0x10F31: 68, + 0x10F32: 68, + 0x10F33: 82, + 0x10F34: 68, + 0x10F35: 68, + 0x10F36: 68, + 0x10F37: 68, + 0x10F38: 68, + 0x10F39: 68, + 0x10F3A: 68, + 0x10F3B: 68, + 0x10F3C: 68, + 0x10F3D: 68, + 0x10F3E: 68, + 0x10F3F: 68, + 0x10F40: 68, + 0x10F41: 68, + 0x10F42: 68, + 0x10F43: 68, + 0x10F44: 68, + 0x10F46: 84, + 0x10F47: 84, + 0x10F48: 84, + 0x10F49: 84, + 0x10F4A: 84, + 0x10F4B: 84, + 0x10F4C: 84, + 0x10F4D: 84, + 0x10F4E: 84, + 0x10F4F: 84, + 0x10F50: 84, + 0x10F51: 68, + 0x10F52: 68, + 0x10F53: 68, + 0x10F54: 82, + 0x10F70: 68, + 0x10F71: 68, + 0x10F72: 68, + 0x10F73: 68, + 0x10F74: 82, + 0x10F75: 82, + 0x10F76: 68, + 0x10F77: 68, + 0x10F78: 68, + 0x10F79: 68, + 0x10F7A: 68, + 0x10F7B: 68, + 0x10F7C: 68, + 0x10F7D: 68, + 0x10F7E: 68, + 0x10F7F: 68, + 0x10F80: 68, + 0x10F81: 68, + 0x10F82: 84, + 0x10F83: 84, + 0x10F84: 84, + 0x10F85: 84, + 0x10FB0: 68, + 0x10FB2: 68, + 0x10FB3: 68, + 0x10FB4: 82, + 0x10FB5: 82, + 0x10FB6: 82, + 0x10FB8: 68, + 0x10FB9: 82, + 0x10FBA: 82, + 0x10FBB: 68, + 0x10FBC: 68, + 0x10FBD: 82, + 0x10FBE: 68, + 0x10FBF: 68, + 0x10FC1: 68, + 0x10FC2: 82, + 0x10FC3: 82, + 0x10FC4: 68, + 0x10FC9: 82, + 0x10FCA: 68, + 0x10FCB: 76, + 0x11001: 84, + 0x11038: 84, + 0x11039: 84, + 0x1103A: 84, + 0x1103B: 84, + 0x1103C: 84, + 0x1103D: 84, + 0x1103E: 84, + 0x1103F: 84, + 0x11040: 84, + 0x11041: 84, + 0x11042: 84, + 0x11043: 84, + 0x11044: 84, + 0x11045: 84, + 0x11046: 84, + 0x11070: 84, + 0x11073: 84, + 0x11074: 84, + 0x1107F: 84, + 0x11080: 84, + 0x11081: 84, + 0x110B3: 84, + 0x110B4: 84, + 0x110B5: 84, + 0x110B6: 84, + 0x110B9: 84, + 0x110BA: 84, + 0x110C2: 84, + 0x11100: 84, + 0x11101: 84, + 0x11102: 84, + 0x11127: 84, + 0x11128: 84, + 0x11129: 84, + 0x1112A: 84, + 0x1112B: 84, + 0x1112D: 84, + 0x1112E: 84, + 0x1112F: 84, + 0x11130: 84, + 0x11131: 84, + 0x11132: 84, + 0x11133: 84, + 0x11134: 84, + 0x11173: 84, + 0x11180: 84, + 0x11181: 84, + 0x111B6: 84, + 0x111B7: 84, + 0x111B8: 84, + 0x111B9: 84, + 0x111BA: 84, + 0x111BB: 84, + 0x111BC: 84, + 0x111BD: 84, + 0x111BE: 84, + 0x111C9: 84, + 0x111CA: 84, + 0x111CB: 84, + 0x111CC: 84, + 0x111CF: 84, + 0x1122F: 84, + 0x11230: 84, + 0x11231: 84, + 0x11234: 84, + 0x11236: 84, + 0x11237: 84, + 0x1123E: 84, + 0x11241: 84, + 0x112DF: 84, + 0x112E3: 84, + 0x112E4: 84, + 0x112E5: 84, + 0x112E6: 84, + 0x112E7: 84, + 0x112E8: 84, + 0x112E9: 84, + 0x112EA: 84, + 0x11300: 84, + 0x11301: 84, + 0x1133B: 84, + 0x1133C: 84, + 0x11340: 84, + 0x11366: 84, + 0x11367: 84, + 0x11368: 84, + 0x11369: 84, + 0x1136A: 84, + 0x1136B: 84, + 0x1136C: 84, + 0x11370: 84, + 0x11371: 84, + 0x11372: 84, + 0x11373: 84, + 0x11374: 84, + 0x113BB: 84, + 0x113BC: 84, + 0x113BD: 84, + 0x113BE: 84, + 0x113BF: 84, + 0x113C0: 84, + 0x113CE: 84, + 0x113D0: 84, + 0x113D2: 84, + 0x113E1: 84, + 0x113E2: 84, + 0x11438: 84, + 0x11439: 84, + 0x1143A: 84, + 0x1143B: 84, + 0x1143C: 84, + 0x1143D: 84, + 0x1143E: 84, + 0x1143F: 84, + 0x11442: 84, + 0x11443: 84, + 0x11444: 84, + 0x11446: 84, + 0x1145E: 84, + 0x114B3: 84, + 0x114B4: 84, + 0x114B5: 84, + 0x114B6: 84, + 0x114B7: 84, + 0x114B8: 84, + 0x114BA: 84, + 0x114BF: 84, + 0x114C0: 84, + 0x114C2: 84, + 0x114C3: 84, + 0x115B2: 84, + 0x115B3: 84, + 0x115B4: 84, + 0x115B5: 84, + 0x115BC: 84, + 0x115BD: 84, + 0x115BF: 84, + 0x115C0: 84, + 0x115DC: 84, + 0x115DD: 84, + 0x11633: 84, + 0x11634: 84, + 0x11635: 84, + 0x11636: 84, + 0x11637: 84, + 0x11638: 84, + 0x11639: 84, + 0x1163A: 84, + 0x1163D: 84, + 0x1163F: 84, + 0x11640: 84, + 0x116AB: 84, + 0x116AD: 84, + 0x116B0: 84, + 0x116B1: 84, + 0x116B2: 84, + 0x116B3: 84, + 0x116B4: 84, + 0x116B5: 84, + 0x116B7: 84, + 0x1171D: 84, + 0x1171F: 84, + 0x11722: 84, + 0x11723: 84, + 0x11724: 84, + 0x11725: 84, + 0x11727: 84, + 0x11728: 84, + 0x11729: 84, + 0x1172A: 84, + 0x1172B: 84, + 0x1182F: 84, + 0x11830: 84, + 0x11831: 84, + 0x11832: 84, + 0x11833: 84, + 0x11834: 84, + 0x11835: 84, + 0x11836: 84, + 0x11837: 84, + 0x11839: 84, + 0x1183A: 84, + 0x1193B: 84, + 0x1193C: 84, + 0x1193E: 84, + 0x11943: 84, + 0x119D4: 84, + 0x119D5: 84, + 0x119D6: 84, + 0x119D7: 84, + 0x119DA: 84, + 0x119DB: 84, + 0x119E0: 84, + 0x11A01: 84, + 0x11A02: 84, + 0x11A03: 84, + 0x11A04: 84, + 0x11A05: 84, + 0x11A06: 84, + 0x11A07: 84, + 0x11A08: 84, + 0x11A09: 84, + 0x11A0A: 84, + 0x11A33: 84, + 0x11A34: 84, + 0x11A35: 84, + 0x11A36: 84, + 0x11A37: 84, + 0x11A38: 84, + 0x11A3B: 84, + 0x11A3C: 84, + 0x11A3D: 84, + 0x11A3E: 84, + 0x11A47: 84, + 0x11A51: 84, + 0x11A52: 84, + 0x11A53: 84, + 0x11A54: 84, + 0x11A55: 84, + 0x11A56: 84, + 0x11A59: 84, + 0x11A5A: 84, + 0x11A5B: 84, + 0x11A8A: 84, + 0x11A8B: 84, + 0x11A8C: 84, + 0x11A8D: 84, + 0x11A8E: 84, + 0x11A8F: 84, + 0x11A90: 84, + 0x11A91: 84, + 0x11A92: 84, + 0x11A93: 84, + 0x11A94: 84, + 0x11A95: 84, + 0x11A96: 84, + 0x11A98: 84, + 0x11A99: 84, + 0x11C30: 84, + 0x11C31: 84, + 0x11C32: 84, + 0x11C33: 84, + 0x11C34: 84, + 0x11C35: 84, + 0x11C36: 84, + 0x11C38: 84, + 0x11C39: 84, + 0x11C3A: 84, + 0x11C3B: 84, + 0x11C3C: 84, + 0x11C3D: 84, + 0x11C3F: 84, + 0x11C92: 84, + 0x11C93: 84, + 0x11C94: 84, + 0x11C95: 84, + 0x11C96: 84, + 0x11C97: 84, + 0x11C98: 84, + 0x11C99: 84, + 0x11C9A: 84, + 0x11C9B: 84, + 0x11C9C: 84, + 0x11C9D: 84, + 0x11C9E: 84, + 0x11C9F: 84, + 0x11CA0: 84, + 0x11CA1: 84, + 0x11CA2: 84, + 0x11CA3: 84, + 0x11CA4: 84, + 0x11CA5: 84, + 0x11CA6: 84, + 0x11CA7: 84, + 0x11CAA: 84, + 0x11CAB: 84, + 0x11CAC: 84, + 0x11CAD: 84, + 0x11CAE: 84, + 0x11CAF: 84, + 0x11CB0: 84, + 0x11CB2: 84, + 0x11CB3: 84, + 0x11CB5: 84, + 0x11CB6: 84, + 0x11D31: 84, + 0x11D32: 84, + 0x11D33: 84, + 0x11D34: 84, + 0x11D35: 84, + 0x11D36: 84, + 0x11D3A: 84, + 0x11D3C: 84, + 0x11D3D: 84, + 0x11D3F: 84, + 0x11D40: 84, + 0x11D41: 84, + 0x11D42: 84, + 0x11D43: 84, + 0x11D44: 84, + 0x11D45: 84, + 0x11D47: 84, + 0x11D90: 84, + 0x11D91: 84, + 0x11D95: 84, + 0x11D97: 84, + 0x11EF3: 84, + 0x11EF4: 84, + 0x11F00: 84, + 0x11F01: 84, + 0x11F36: 84, + 0x11F37: 84, + 0x11F38: 84, + 0x11F39: 84, + 0x11F3A: 84, + 0x11F40: 84, + 0x11F42: 84, + 0x11F5A: 84, + 0x13430: 84, + 0x13431: 84, + 0x13432: 84, + 0x13433: 84, + 0x13434: 84, + 0x13435: 84, + 0x13436: 84, + 0x13437: 84, + 0x13438: 84, + 0x13439: 84, + 0x1343A: 84, + 0x1343B: 84, + 0x1343C: 84, + 0x1343D: 84, + 0x1343E: 84, + 0x1343F: 84, + 0x13440: 84, + 0x13447: 84, + 0x13448: 84, + 0x13449: 84, + 0x1344A: 84, + 0x1344B: 84, + 0x1344C: 84, + 0x1344D: 84, + 0x1344E: 84, + 0x1344F: 84, + 0x13450: 84, + 0x13451: 84, + 0x13452: 84, + 0x13453: 84, + 0x13454: 84, + 0x13455: 84, + 0x1611E: 84, + 0x1611F: 84, + 0x16120: 84, + 0x16121: 84, + 0x16122: 84, + 0x16123: 84, + 0x16124: 84, + 0x16125: 84, + 0x16126: 84, + 0x16127: 84, + 0x16128: 84, + 0x16129: 84, + 0x1612D: 84, + 0x1612E: 84, + 0x1612F: 84, + 0x16AF0: 84, + 0x16AF1: 84, + 0x16AF2: 84, + 0x16AF3: 84, + 0x16AF4: 84, + 0x16B30: 84, + 0x16B31: 84, + 0x16B32: 84, + 0x16B33: 84, + 0x16B34: 84, + 0x16B35: 84, + 0x16B36: 84, + 0x16F4F: 84, + 0x16F8F: 84, + 0x16F90: 84, + 0x16F91: 84, + 0x16F92: 84, + 0x16FE4: 84, + 0x1BC9D: 84, + 0x1BC9E: 84, + 0x1BCA0: 84, + 0x1BCA1: 84, + 0x1BCA2: 84, + 0x1BCA3: 84, + 0x1CF00: 84, + 0x1CF01: 84, + 0x1CF02: 84, + 0x1CF03: 84, + 0x1CF04: 84, + 0x1CF05: 84, + 0x1CF06: 84, + 0x1CF07: 84, + 0x1CF08: 84, + 0x1CF09: 84, + 0x1CF0A: 84, + 0x1CF0B: 84, + 0x1CF0C: 84, + 0x1CF0D: 84, + 0x1CF0E: 84, + 0x1CF0F: 84, + 0x1CF10: 84, + 0x1CF11: 84, + 0x1CF12: 84, + 0x1CF13: 84, + 0x1CF14: 84, + 0x1CF15: 84, + 0x1CF16: 84, + 0x1CF17: 84, + 0x1CF18: 84, + 0x1CF19: 84, + 0x1CF1A: 84, + 0x1CF1B: 84, + 0x1CF1C: 84, + 0x1CF1D: 84, + 0x1CF1E: 84, + 0x1CF1F: 84, + 0x1CF20: 84, + 0x1CF21: 84, + 0x1CF22: 84, + 0x1CF23: 84, + 0x1CF24: 84, + 0x1CF25: 84, + 0x1CF26: 84, + 0x1CF27: 84, + 0x1CF28: 84, + 0x1CF29: 84, + 0x1CF2A: 84, + 0x1CF2B: 84, + 0x1CF2C: 84, + 0x1CF2D: 84, + 0x1CF30: 84, + 0x1CF31: 84, + 0x1CF32: 84, + 0x1CF33: 84, + 0x1CF34: 84, + 0x1CF35: 84, + 0x1CF36: 84, + 0x1CF37: 84, + 0x1CF38: 84, + 0x1CF39: 84, + 0x1CF3A: 84, + 0x1CF3B: 84, + 0x1CF3C: 84, + 0x1CF3D: 84, + 0x1CF3E: 84, + 0x1CF3F: 84, + 0x1CF40: 84, + 0x1CF41: 84, + 0x1CF42: 84, + 0x1CF43: 84, + 0x1CF44: 84, + 0x1CF45: 84, + 0x1CF46: 84, + 0x1D167: 84, + 0x1D168: 84, + 0x1D169: 84, + 0x1D173: 84, + 0x1D174: 84, + 0x1D175: 84, + 0x1D176: 84, + 0x1D177: 84, + 0x1D178: 84, + 0x1D179: 84, + 0x1D17A: 84, + 0x1D17B: 84, + 0x1D17C: 84, + 0x1D17D: 84, + 0x1D17E: 84, + 0x1D17F: 84, + 0x1D180: 84, + 0x1D181: 84, + 0x1D182: 84, + 0x1D185: 84, + 0x1D186: 84, + 0x1D187: 84, + 0x1D188: 84, + 0x1D189: 84, + 0x1D18A: 84, + 0x1D18B: 84, + 0x1D1AA: 84, + 0x1D1AB: 84, + 0x1D1AC: 84, + 0x1D1AD: 84, + 0x1D242: 84, + 0x1D243: 84, + 0x1D244: 84, + 0x1DA00: 84, + 0x1DA01: 84, + 0x1DA02: 84, + 0x1DA03: 84, + 0x1DA04: 84, + 0x1DA05: 84, + 0x1DA06: 84, + 0x1DA07: 84, + 0x1DA08: 84, + 0x1DA09: 84, + 0x1DA0A: 84, + 0x1DA0B: 84, + 0x1DA0C: 84, + 0x1DA0D: 84, + 0x1DA0E: 84, + 0x1DA0F: 84, + 0x1DA10: 84, + 0x1DA11: 84, + 0x1DA12: 84, + 0x1DA13: 84, + 0x1DA14: 84, + 0x1DA15: 84, + 0x1DA16: 84, + 0x1DA17: 84, + 0x1DA18: 84, + 0x1DA19: 84, + 0x1DA1A: 84, + 0x1DA1B: 84, + 0x1DA1C: 84, + 0x1DA1D: 84, + 0x1DA1E: 84, + 0x1DA1F: 84, + 0x1DA20: 84, + 0x1DA21: 84, + 0x1DA22: 84, + 0x1DA23: 84, + 0x1DA24: 84, + 0x1DA25: 84, + 0x1DA26: 84, + 0x1DA27: 84, + 0x1DA28: 84, + 0x1DA29: 84, + 0x1DA2A: 84, + 0x1DA2B: 84, + 0x1DA2C: 84, + 0x1DA2D: 84, + 0x1DA2E: 84, + 0x1DA2F: 84, + 0x1DA30: 84, + 0x1DA31: 84, + 0x1DA32: 84, + 0x1DA33: 84, + 0x1DA34: 84, + 0x1DA35: 84, + 0x1DA36: 84, + 0x1DA3B: 84, + 0x1DA3C: 84, + 0x1DA3D: 84, + 0x1DA3E: 84, + 0x1DA3F: 84, + 0x1DA40: 84, + 0x1DA41: 84, + 0x1DA42: 84, + 0x1DA43: 84, + 0x1DA44: 84, + 0x1DA45: 84, + 0x1DA46: 84, + 0x1DA47: 84, + 0x1DA48: 84, + 0x1DA49: 84, + 0x1DA4A: 84, + 0x1DA4B: 84, + 0x1DA4C: 84, + 0x1DA4D: 84, + 0x1DA4E: 84, + 0x1DA4F: 84, + 0x1DA50: 84, + 0x1DA51: 84, + 0x1DA52: 84, + 0x1DA53: 84, + 0x1DA54: 84, + 0x1DA55: 84, + 0x1DA56: 84, + 0x1DA57: 84, + 0x1DA58: 84, + 0x1DA59: 84, + 0x1DA5A: 84, + 0x1DA5B: 84, + 0x1DA5C: 84, + 0x1DA5D: 84, + 0x1DA5E: 84, + 0x1DA5F: 84, + 0x1DA60: 84, + 0x1DA61: 84, + 0x1DA62: 84, + 0x1DA63: 84, + 0x1DA64: 84, + 0x1DA65: 84, + 0x1DA66: 84, + 0x1DA67: 84, + 0x1DA68: 84, + 0x1DA69: 84, + 0x1DA6A: 84, + 0x1DA6B: 84, + 0x1DA6C: 84, + 0x1DA75: 84, + 0x1DA84: 84, + 0x1DA9B: 84, + 0x1DA9C: 84, + 0x1DA9D: 84, + 0x1DA9E: 84, + 0x1DA9F: 84, + 0x1DAA1: 84, + 0x1DAA2: 84, + 0x1DAA3: 84, + 0x1DAA4: 84, + 0x1DAA5: 84, + 0x1DAA6: 84, + 0x1DAA7: 84, + 0x1DAA8: 84, + 0x1DAA9: 84, + 0x1DAAA: 84, + 0x1DAAB: 84, + 0x1DAAC: 84, + 0x1DAAD: 84, + 0x1DAAE: 84, + 0x1DAAF: 84, + 0x1E000: 84, + 0x1E001: 84, + 0x1E002: 84, + 0x1E003: 84, + 0x1E004: 84, + 0x1E005: 84, + 0x1E006: 84, + 0x1E008: 84, + 0x1E009: 84, + 0x1E00A: 84, + 0x1E00B: 84, + 0x1E00C: 84, + 0x1E00D: 84, + 0x1E00E: 84, + 0x1E00F: 84, + 0x1E010: 84, + 0x1E011: 84, + 0x1E012: 84, + 0x1E013: 84, + 0x1E014: 84, + 0x1E015: 84, + 0x1E016: 84, + 0x1E017: 84, + 0x1E018: 84, + 0x1E01B: 84, + 0x1E01C: 84, + 0x1E01D: 84, + 0x1E01E: 84, + 0x1E01F: 84, + 0x1E020: 84, + 0x1E021: 84, + 0x1E023: 84, + 0x1E024: 84, + 0x1E026: 84, + 0x1E027: 84, + 0x1E028: 84, + 0x1E029: 84, + 0x1E02A: 84, + 0x1E08F: 84, + 0x1E130: 84, + 0x1E131: 84, + 0x1E132: 84, + 0x1E133: 84, + 0x1E134: 84, + 0x1E135: 84, + 0x1E136: 84, + 0x1E2AE: 84, + 0x1E2EC: 84, + 0x1E2ED: 84, + 0x1E2EE: 84, + 0x1E2EF: 84, + 0x1E4EC: 84, + 0x1E4ED: 84, + 0x1E4EE: 84, + 0x1E4EF: 84, + 0x1E5EE: 84, + 0x1E5EF: 84, + 0x1E8D0: 84, + 0x1E8D1: 84, + 0x1E8D2: 84, + 0x1E8D3: 84, + 0x1E8D4: 84, + 0x1E8D5: 84, + 0x1E8D6: 84, + 0x1E900: 68, + 0x1E901: 68, + 0x1E902: 68, + 0x1E903: 68, + 0x1E904: 68, + 0x1E905: 68, + 0x1E906: 68, + 0x1E907: 68, + 0x1E908: 68, + 0x1E909: 68, + 0x1E90A: 68, + 0x1E90B: 68, + 0x1E90C: 68, + 0x1E90D: 68, + 0x1E90E: 68, + 0x1E90F: 68, + 0x1E910: 68, + 0x1E911: 68, + 0x1E912: 68, + 0x1E913: 68, + 0x1E914: 68, + 0x1E915: 68, + 0x1E916: 68, + 0x1E917: 68, + 0x1E918: 68, + 0x1E919: 68, + 0x1E91A: 68, + 0x1E91B: 68, + 0x1E91C: 68, + 0x1E91D: 68, + 0x1E91E: 68, + 0x1E91F: 68, + 0x1E920: 68, + 0x1E921: 68, + 0x1E922: 68, + 0x1E923: 68, + 0x1E924: 68, + 0x1E925: 68, + 0x1E926: 68, + 0x1E927: 68, + 0x1E928: 68, + 0x1E929: 68, + 0x1E92A: 68, + 0x1E92B: 68, + 0x1E92C: 68, + 0x1E92D: 68, + 0x1E92E: 68, + 0x1E92F: 68, + 0x1E930: 68, + 0x1E931: 68, + 0x1E932: 68, + 0x1E933: 68, + 0x1E934: 68, + 0x1E935: 68, + 0x1E936: 68, + 0x1E937: 68, + 0x1E938: 68, + 0x1E939: 68, + 0x1E93A: 68, + 0x1E93B: 68, + 0x1E93C: 68, + 0x1E93D: 68, + 0x1E93E: 68, + 0x1E93F: 68, + 0x1E940: 68, + 0x1E941: 68, + 0x1E942: 68, + 0x1E943: 68, + 0x1E944: 84, + 0x1E945: 84, + 0x1E946: 84, + 0x1E947: 84, + 0x1E948: 84, + 0x1E949: 84, + 0x1E94A: 84, + 0x1E94B: 84, + 0xE0001: 84, + 0xE0020: 84, + 0xE0021: 84, + 0xE0022: 84, + 0xE0023: 84, + 0xE0024: 84, + 0xE0025: 84, + 0xE0026: 84, + 0xE0027: 84, + 0xE0028: 84, + 0xE0029: 84, + 0xE002A: 84, + 0xE002B: 84, + 0xE002C: 84, + 0xE002D: 84, + 0xE002E: 84, + 0xE002F: 84, + 0xE0030: 84, + 0xE0031: 84, + 0xE0032: 84, + 0xE0033: 84, + 0xE0034: 84, + 0xE0035: 84, + 0xE0036: 84, + 0xE0037: 84, + 0xE0038: 84, + 0xE0039: 84, + 0xE003A: 84, + 0xE003B: 84, + 0xE003C: 84, + 0xE003D: 84, + 0xE003E: 84, + 0xE003F: 84, + 0xE0040: 84, + 0xE0041: 84, + 0xE0042: 84, + 0xE0043: 84, + 0xE0044: 84, + 0xE0045: 84, + 0xE0046: 84, + 0xE0047: 84, + 0xE0048: 84, + 0xE0049: 84, + 0xE004A: 84, + 0xE004B: 84, + 0xE004C: 84, + 0xE004D: 84, + 0xE004E: 84, + 0xE004F: 84, + 0xE0050: 84, + 0xE0051: 84, + 0xE0052: 84, + 0xE0053: 84, + 0xE0054: 84, + 0xE0055: 84, + 0xE0056: 84, + 0xE0057: 84, + 0xE0058: 84, + 0xE0059: 84, + 0xE005A: 84, + 0xE005B: 84, + 0xE005C: 84, + 0xE005D: 84, + 0xE005E: 84, + 0xE005F: 84, + 0xE0060: 84, + 0xE0061: 84, + 0xE0062: 84, + 0xE0063: 84, + 0xE0064: 84, + 0xE0065: 84, + 0xE0066: 84, + 0xE0067: 84, + 0xE0068: 84, + 0xE0069: 84, + 0xE006A: 84, + 0xE006B: 84, + 0xE006C: 84, + 0xE006D: 84, + 0xE006E: 84, + 0xE006F: 84, + 0xE0070: 84, + 0xE0071: 84, + 0xE0072: 84, + 0xE0073: 84, + 0xE0074: 84, + 0xE0075: 84, + 0xE0076: 84, + 0xE0077: 84, + 0xE0078: 84, + 0xE0079: 84, + 0xE007A: 84, + 0xE007B: 84, + 0xE007C: 84, + 0xE007D: 84, + 0xE007E: 84, + 0xE007F: 84, + 0xE0100: 84, + 0xE0101: 84, + 0xE0102: 84, + 0xE0103: 84, + 0xE0104: 84, + 0xE0105: 84, + 0xE0106: 84, + 0xE0107: 84, + 0xE0108: 84, + 0xE0109: 84, + 0xE010A: 84, + 0xE010B: 84, + 0xE010C: 84, + 0xE010D: 84, + 0xE010E: 84, + 0xE010F: 84, + 0xE0110: 84, + 0xE0111: 84, + 0xE0112: 84, + 0xE0113: 84, + 0xE0114: 84, + 0xE0115: 84, + 0xE0116: 84, + 0xE0117: 84, + 0xE0118: 84, + 0xE0119: 84, + 0xE011A: 84, + 0xE011B: 84, + 0xE011C: 84, + 0xE011D: 84, + 0xE011E: 84, + 0xE011F: 84, + 0xE0120: 84, + 0xE0121: 84, + 0xE0122: 84, + 0xE0123: 84, + 0xE0124: 84, + 0xE0125: 84, + 0xE0126: 84, + 0xE0127: 84, + 0xE0128: 84, + 0xE0129: 84, + 0xE012A: 84, + 0xE012B: 84, + 0xE012C: 84, + 0xE012D: 84, + 0xE012E: 84, + 0xE012F: 84, + 0xE0130: 84, + 0xE0131: 84, + 0xE0132: 84, + 0xE0133: 84, + 0xE0134: 84, + 0xE0135: 84, + 0xE0136: 84, + 0xE0137: 84, + 0xE0138: 84, + 0xE0139: 84, + 0xE013A: 84, + 0xE013B: 84, + 0xE013C: 84, + 0xE013D: 84, + 0xE013E: 84, + 0xE013F: 84, + 0xE0140: 84, + 0xE0141: 84, + 0xE0142: 84, + 0xE0143: 84, + 0xE0144: 84, + 0xE0145: 84, + 0xE0146: 84, + 0xE0147: 84, + 0xE0148: 84, + 0xE0149: 84, + 0xE014A: 84, + 0xE014B: 84, + 0xE014C: 84, + 0xE014D: 84, + 0xE014E: 84, + 0xE014F: 84, + 0xE0150: 84, + 0xE0151: 84, + 0xE0152: 84, + 0xE0153: 84, + 0xE0154: 84, + 0xE0155: 84, + 0xE0156: 84, + 0xE0157: 84, + 0xE0158: 84, + 0xE0159: 84, + 0xE015A: 84, + 0xE015B: 84, + 0xE015C: 84, + 0xE015D: 84, + 0xE015E: 84, + 0xE015F: 84, + 0xE0160: 84, + 0xE0161: 84, + 0xE0162: 84, + 0xE0163: 84, + 0xE0164: 84, + 0xE0165: 84, + 0xE0166: 84, + 0xE0167: 84, + 0xE0168: 84, + 0xE0169: 84, + 0xE016A: 84, + 0xE016B: 84, + 0xE016C: 84, + 0xE016D: 84, + 0xE016E: 84, + 0xE016F: 84, + 0xE0170: 84, + 0xE0171: 84, + 0xE0172: 84, + 0xE0173: 84, + 0xE0174: 84, + 0xE0175: 84, + 0xE0176: 84, + 0xE0177: 84, + 0xE0178: 84, + 0xE0179: 84, + 0xE017A: 84, + 0xE017B: 84, + 0xE017C: 84, + 0xE017D: 84, + 0xE017E: 84, + 0xE017F: 84, + 0xE0180: 84, + 0xE0181: 84, + 0xE0182: 84, + 0xE0183: 84, + 0xE0184: 84, + 0xE0185: 84, + 0xE0186: 84, + 0xE0187: 84, + 0xE0188: 84, + 0xE0189: 84, + 0xE018A: 84, + 0xE018B: 84, + 0xE018C: 84, + 0xE018D: 84, + 0xE018E: 84, + 0xE018F: 84, + 0xE0190: 84, + 0xE0191: 84, + 0xE0192: 84, + 0xE0193: 84, + 0xE0194: 84, + 0xE0195: 84, + 0xE0196: 84, + 0xE0197: 84, + 0xE0198: 84, + 0xE0199: 84, + 0xE019A: 84, + 0xE019B: 84, + 0xE019C: 84, + 0xE019D: 84, + 0xE019E: 84, + 0xE019F: 84, + 0xE01A0: 84, + 0xE01A1: 84, + 0xE01A2: 84, + 0xE01A3: 84, + 0xE01A4: 84, + 0xE01A5: 84, + 0xE01A6: 84, + 0xE01A7: 84, + 0xE01A8: 84, + 0xE01A9: 84, + 0xE01AA: 84, + 0xE01AB: 84, + 0xE01AC: 84, + 0xE01AD: 84, + 0xE01AE: 84, + 0xE01AF: 84, + 0xE01B0: 84, + 0xE01B1: 84, + 0xE01B2: 84, + 0xE01B3: 84, + 0xE01B4: 84, + 0xE01B5: 84, + 0xE01B6: 84, + 0xE01B7: 84, + 0xE01B8: 84, + 0xE01B9: 84, + 0xE01BA: 84, + 0xE01BB: 84, + 0xE01BC: 84, + 0xE01BD: 84, + 0xE01BE: 84, + 0xE01BF: 84, + 0xE01C0: 84, + 0xE01C1: 84, + 0xE01C2: 84, + 0xE01C3: 84, + 0xE01C4: 84, + 0xE01C5: 84, + 0xE01C6: 84, + 0xE01C7: 84, + 0xE01C8: 84, + 0xE01C9: 84, + 0xE01CA: 84, + 0xE01CB: 84, + 0xE01CC: 84, + 0xE01CD: 84, + 0xE01CE: 84, + 0xE01CF: 84, + 0xE01D0: 84, + 0xE01D1: 84, + 0xE01D2: 84, + 0xE01D3: 84, + 0xE01D4: 84, + 0xE01D5: 84, + 0xE01D6: 84, + 0xE01D7: 84, + 0xE01D8: 84, + 0xE01D9: 84, + 0xE01DA: 84, + 0xE01DB: 84, + 0xE01DC: 84, + 0xE01DD: 84, + 0xE01DE: 84, + 0xE01DF: 84, + 0xE01E0: 84, + 0xE01E1: 84, + 0xE01E2: 84, + 0xE01E3: 84, + 0xE01E4: 84, + 0xE01E5: 84, + 0xE01E6: 84, + 0xE01E7: 84, + 0xE01E8: 84, + 0xE01E9: 84, + 0xE01EA: 84, + 0xE01EB: 84, + 0xE01EC: 84, + 0xE01ED: 84, + 0xE01EE: 84, + 0xE01EF: 84, +} +codepoint_classes = { + "PVALID": ( + 0x2D0000002E, + 0x300000003A, + 0x610000007B, + 0xDF000000F7, + 0xF800000100, + 0x10100000102, + 0x10300000104, + 0x10500000106, + 0x10700000108, + 0x1090000010A, + 0x10B0000010C, + 0x10D0000010E, + 0x10F00000110, + 0x11100000112, + 0x11300000114, + 0x11500000116, + 0x11700000118, + 0x1190000011A, + 0x11B0000011C, + 0x11D0000011E, + 0x11F00000120, + 0x12100000122, + 0x12300000124, + 0x12500000126, + 0x12700000128, + 0x1290000012A, + 0x12B0000012C, + 0x12D0000012E, + 0x12F00000130, + 0x13100000132, + 0x13500000136, + 0x13700000139, + 0x13A0000013B, + 0x13C0000013D, + 0x13E0000013F, + 0x14200000143, + 0x14400000145, + 0x14600000147, + 0x14800000149, + 0x14B0000014C, + 0x14D0000014E, + 0x14F00000150, + 0x15100000152, + 0x15300000154, + 0x15500000156, + 0x15700000158, + 0x1590000015A, + 0x15B0000015C, + 0x15D0000015E, + 0x15F00000160, + 0x16100000162, + 0x16300000164, + 0x16500000166, + 0x16700000168, + 0x1690000016A, + 0x16B0000016C, + 0x16D0000016E, + 0x16F00000170, + 0x17100000172, + 0x17300000174, + 0x17500000176, + 0x17700000178, + 0x17A0000017B, + 0x17C0000017D, + 0x17E0000017F, + 0x18000000181, + 0x18300000184, + 0x18500000186, + 0x18800000189, + 0x18C0000018E, + 0x19200000193, + 0x19500000196, + 0x1990000019C, + 0x19E0000019F, + 0x1A1000001A2, + 0x1A3000001A4, + 0x1A5000001A6, + 0x1A8000001A9, + 0x1AA000001AC, + 0x1AD000001AE, + 0x1B0000001B1, + 0x1B4000001B5, + 0x1B6000001B7, + 0x1B9000001BC, + 0x1BD000001C4, + 0x1CE000001CF, + 0x1D0000001D1, + 0x1D2000001D3, + 0x1D4000001D5, + 0x1D6000001D7, + 0x1D8000001D9, + 0x1DA000001DB, + 0x1DC000001DE, + 0x1DF000001E0, + 0x1E1000001E2, + 0x1E3000001E4, + 0x1E5000001E6, + 0x1E7000001E8, + 0x1E9000001EA, + 0x1EB000001EC, + 0x1ED000001EE, + 0x1EF000001F1, + 0x1F5000001F6, + 0x1F9000001FA, + 0x1FB000001FC, + 0x1FD000001FE, + 0x1FF00000200, + 0x20100000202, + 0x20300000204, + 0x20500000206, + 0x20700000208, + 0x2090000020A, + 0x20B0000020C, + 0x20D0000020E, + 0x20F00000210, + 0x21100000212, + 0x21300000214, + 0x21500000216, + 0x21700000218, + 0x2190000021A, + 0x21B0000021C, + 0x21D0000021E, + 0x21F00000220, + 0x22100000222, + 0x22300000224, + 0x22500000226, + 0x22700000228, + 0x2290000022A, + 0x22B0000022C, + 0x22D0000022E, + 0x22F00000230, + 0x23100000232, + 0x2330000023A, + 0x23C0000023D, + 0x23F00000241, + 0x24200000243, + 0x24700000248, + 0x2490000024A, + 0x24B0000024C, + 0x24D0000024E, + 0x24F000002B0, + 0x2B9000002C2, + 0x2C6000002D2, + 0x2EC000002ED, + 0x2EE000002EF, + 0x30000000340, + 0x34200000343, + 0x3460000034F, + 0x35000000370, + 0x37100000372, + 0x37300000374, + 0x37700000378, + 0x37B0000037E, + 0x39000000391, + 0x3AC000003CF, + 0x3D7000003D8, + 0x3D9000003DA, + 0x3DB000003DC, + 0x3DD000003DE, + 0x3DF000003E0, + 0x3E1000003E2, + 0x3E3000003E4, + 0x3E5000003E6, + 0x3E7000003E8, + 0x3E9000003EA, + 0x3EB000003EC, + 0x3ED000003EE, + 0x3EF000003F0, + 0x3F3000003F4, + 0x3F8000003F9, + 0x3FB000003FD, + 0x43000000460, + 0x46100000462, + 0x46300000464, + 0x46500000466, + 0x46700000468, + 0x4690000046A, + 0x46B0000046C, + 0x46D0000046E, + 0x46F00000470, + 0x47100000472, + 0x47300000474, + 0x47500000476, + 0x47700000478, + 0x4790000047A, + 0x47B0000047C, + 0x47D0000047E, + 0x47F00000480, + 0x48100000482, + 0x48300000488, + 0x48B0000048C, + 0x48D0000048E, + 0x48F00000490, + 0x49100000492, + 0x49300000494, + 0x49500000496, + 0x49700000498, + 0x4990000049A, + 0x49B0000049C, + 0x49D0000049E, + 0x49F000004A0, + 0x4A1000004A2, + 0x4A3000004A4, + 0x4A5000004A6, + 0x4A7000004A8, + 0x4A9000004AA, + 0x4AB000004AC, + 0x4AD000004AE, + 0x4AF000004B0, + 0x4B1000004B2, + 0x4B3000004B4, + 0x4B5000004B6, + 0x4B7000004B8, + 0x4B9000004BA, + 0x4BB000004BC, + 0x4BD000004BE, + 0x4BF000004C0, + 0x4C2000004C3, + 0x4C4000004C5, + 0x4C6000004C7, + 0x4C8000004C9, + 0x4CA000004CB, + 0x4CC000004CD, + 0x4CE000004D0, + 0x4D1000004D2, + 0x4D3000004D4, + 0x4D5000004D6, + 0x4D7000004D8, + 0x4D9000004DA, + 0x4DB000004DC, + 0x4DD000004DE, + 0x4DF000004E0, + 0x4E1000004E2, + 0x4E3000004E4, + 0x4E5000004E6, + 0x4E7000004E8, + 0x4E9000004EA, + 0x4EB000004EC, + 0x4ED000004EE, + 0x4EF000004F0, + 0x4F1000004F2, + 0x4F3000004F4, + 0x4F5000004F6, + 0x4F7000004F8, + 0x4F9000004FA, + 0x4FB000004FC, + 0x4FD000004FE, + 0x4FF00000500, + 0x50100000502, + 0x50300000504, + 0x50500000506, + 0x50700000508, + 0x5090000050A, + 0x50B0000050C, + 0x50D0000050E, + 0x50F00000510, + 0x51100000512, + 0x51300000514, + 0x51500000516, + 0x51700000518, + 0x5190000051A, + 0x51B0000051C, + 0x51D0000051E, + 0x51F00000520, + 0x52100000522, + 0x52300000524, + 0x52500000526, + 0x52700000528, + 0x5290000052A, + 0x52B0000052C, + 0x52D0000052E, + 0x52F00000530, + 0x5590000055A, + 0x56000000587, + 0x58800000589, + 0x591000005BE, + 0x5BF000005C0, + 0x5C1000005C3, + 0x5C4000005C6, + 0x5C7000005C8, + 0x5D0000005EB, + 0x5EF000005F3, + 0x6100000061B, + 0x62000000640, + 0x64100000660, + 0x66E00000675, + 0x679000006D4, + 0x6D5000006DD, + 0x6DF000006E9, + 0x6EA000006F0, + 0x6FA00000700, + 0x7100000074B, + 0x74D000007B2, + 0x7C0000007F6, + 0x7FD000007FE, + 0x8000000082E, + 0x8400000085C, + 0x8600000086B, + 0x87000000888, + 0x8890000088F, + 0x897000008E2, + 0x8E300000958, + 0x96000000964, + 0x96600000970, + 0x97100000984, + 0x9850000098D, + 0x98F00000991, + 0x993000009A9, + 0x9AA000009B1, + 0x9B2000009B3, + 0x9B6000009BA, + 0x9BC000009C5, + 0x9C7000009C9, + 0x9CB000009CF, + 0x9D7000009D8, + 0x9E0000009E4, + 0x9E6000009F2, + 0x9FC000009FD, + 0x9FE000009FF, + 0xA0100000A04, + 0xA0500000A0B, + 0xA0F00000A11, + 0xA1300000A29, + 0xA2A00000A31, + 0xA3200000A33, + 0xA3500000A36, + 0xA3800000A3A, + 0xA3C00000A3D, + 0xA3E00000A43, + 0xA4700000A49, + 0xA4B00000A4E, + 0xA5100000A52, + 0xA5C00000A5D, + 0xA6600000A76, + 0xA8100000A84, + 0xA8500000A8E, + 0xA8F00000A92, + 0xA9300000AA9, + 0xAAA00000AB1, + 0xAB200000AB4, + 0xAB500000ABA, + 0xABC00000AC6, + 0xAC700000ACA, + 0xACB00000ACE, + 0xAD000000AD1, + 0xAE000000AE4, + 0xAE600000AF0, + 0xAF900000B00, + 0xB0100000B04, + 0xB0500000B0D, + 0xB0F00000B11, + 0xB1300000B29, + 0xB2A00000B31, + 0xB3200000B34, + 0xB3500000B3A, + 0xB3C00000B45, + 0xB4700000B49, + 0xB4B00000B4E, + 0xB5500000B58, + 0xB5F00000B64, + 0xB6600000B70, + 0xB7100000B72, + 0xB8200000B84, + 0xB8500000B8B, + 0xB8E00000B91, + 0xB9200000B96, + 0xB9900000B9B, + 0xB9C00000B9D, + 0xB9E00000BA0, + 0xBA300000BA5, + 0xBA800000BAB, + 0xBAE00000BBA, + 0xBBE00000BC3, + 0xBC600000BC9, + 0xBCA00000BCE, + 0xBD000000BD1, + 0xBD700000BD8, + 0xBE600000BF0, + 0xC0000000C0D, + 0xC0E00000C11, + 0xC1200000C29, + 0xC2A00000C3A, + 0xC3C00000C45, + 0xC4600000C49, + 0xC4A00000C4E, + 0xC5500000C57, + 0xC5800000C5B, + 0xC5D00000C5E, + 0xC6000000C64, + 0xC6600000C70, + 0xC8000000C84, + 0xC8500000C8D, + 0xC8E00000C91, + 0xC9200000CA9, + 0xCAA00000CB4, + 0xCB500000CBA, + 0xCBC00000CC5, + 0xCC600000CC9, + 0xCCA00000CCE, + 0xCD500000CD7, + 0xCDD00000CDF, + 0xCE000000CE4, + 0xCE600000CF0, + 0xCF100000CF4, + 0xD0000000D0D, + 0xD0E00000D11, + 0xD1200000D45, + 0xD4600000D49, + 0xD4A00000D4F, + 0xD5400000D58, + 0xD5F00000D64, + 0xD6600000D70, + 0xD7A00000D80, + 0xD8100000D84, + 0xD8500000D97, + 0xD9A00000DB2, + 0xDB300000DBC, + 0xDBD00000DBE, + 0xDC000000DC7, + 0xDCA00000DCB, + 0xDCF00000DD5, + 0xDD600000DD7, + 0xDD800000DE0, + 0xDE600000DF0, + 0xDF200000DF4, + 0xE0100000E33, + 0xE3400000E3B, + 0xE4000000E4F, + 0xE5000000E5A, + 0xE8100000E83, + 0xE8400000E85, + 0xE8600000E8B, + 0xE8C00000EA4, + 0xEA500000EA6, + 0xEA700000EB3, + 0xEB400000EBE, + 0xEC000000EC5, + 0xEC600000EC7, + 0xEC800000ECF, + 0xED000000EDA, + 0xEDE00000EE0, + 0xF0000000F01, + 0xF0B00000F0C, + 0xF1800000F1A, + 0xF2000000F2A, + 0xF3500000F36, + 0xF3700000F38, + 0xF3900000F3A, + 0xF3E00000F43, + 0xF4400000F48, + 0xF4900000F4D, + 0xF4E00000F52, + 0xF5300000F57, + 0xF5800000F5C, + 0xF5D00000F69, + 0xF6A00000F6D, + 0xF7100000F73, + 0xF7400000F75, + 0xF7A00000F81, + 0xF8200000F85, + 0xF8600000F93, + 0xF9400000F98, + 0xF9900000F9D, + 0xF9E00000FA2, + 0xFA300000FA7, + 0xFA800000FAC, + 0xFAD00000FB9, + 0xFBA00000FBD, + 0xFC600000FC7, + 0x10000000104A, + 0x10500000109E, + 0x10D0000010FB, + 0x10FD00001100, + 0x120000001249, + 0x124A0000124E, + 0x125000001257, + 0x125800001259, + 0x125A0000125E, + 0x126000001289, + 0x128A0000128E, + 0x1290000012B1, + 0x12B2000012B6, + 0x12B8000012BF, + 0x12C0000012C1, + 0x12C2000012C6, + 0x12C8000012D7, + 0x12D800001311, + 0x131200001316, + 0x13180000135B, + 0x135D00001360, + 0x138000001390, + 0x13A0000013F6, + 0x14010000166D, + 0x166F00001680, + 0x16810000169B, + 0x16A0000016EB, + 0x16F1000016F9, + 0x170000001716, + 0x171F00001735, + 0x174000001754, + 0x17600000176D, + 0x176E00001771, + 0x177200001774, + 0x1780000017B4, + 0x17B6000017D4, + 0x17D7000017D8, + 0x17DC000017DE, + 0x17E0000017EA, + 0x18100000181A, + 0x182000001879, + 0x1880000018AB, + 0x18B0000018F6, + 0x19000000191F, + 0x19200000192C, + 0x19300000193C, + 0x19460000196E, + 0x197000001975, + 0x1980000019AC, + 0x19B0000019CA, + 0x19D0000019DA, + 0x1A0000001A1C, + 0x1A2000001A5F, + 0x1A6000001A7D, + 0x1A7F00001A8A, + 0x1A9000001A9A, + 0x1AA700001AA8, + 0x1AB000001ABE, + 0x1ABF00001ACF, + 0x1B0000001B4D, + 0x1B5000001B5A, + 0x1B6B00001B74, + 0x1B8000001BF4, + 0x1C0000001C38, + 0x1C4000001C4A, + 0x1C4D00001C7E, + 0x1C8A00001C8B, + 0x1CD000001CD3, + 0x1CD400001CFB, + 0x1D0000001D2C, + 0x1D2F00001D30, + 0x1D3B00001D3C, + 0x1D4E00001D4F, + 0x1D6B00001D78, + 0x1D7900001D9B, + 0x1DC000001E00, + 0x1E0100001E02, + 0x1E0300001E04, + 0x1E0500001E06, + 0x1E0700001E08, + 0x1E0900001E0A, + 0x1E0B00001E0C, + 0x1E0D00001E0E, + 0x1E0F00001E10, + 0x1E1100001E12, + 0x1E1300001E14, + 0x1E1500001E16, + 0x1E1700001E18, + 0x1E1900001E1A, + 0x1E1B00001E1C, + 0x1E1D00001E1E, + 0x1E1F00001E20, + 0x1E2100001E22, + 0x1E2300001E24, + 0x1E2500001E26, + 0x1E2700001E28, + 0x1E2900001E2A, + 0x1E2B00001E2C, + 0x1E2D00001E2E, + 0x1E2F00001E30, + 0x1E3100001E32, + 0x1E3300001E34, + 0x1E3500001E36, + 0x1E3700001E38, + 0x1E3900001E3A, + 0x1E3B00001E3C, + 0x1E3D00001E3E, + 0x1E3F00001E40, + 0x1E4100001E42, + 0x1E4300001E44, + 0x1E4500001E46, + 0x1E4700001E48, + 0x1E4900001E4A, + 0x1E4B00001E4C, + 0x1E4D00001E4E, + 0x1E4F00001E50, + 0x1E5100001E52, + 0x1E5300001E54, + 0x1E5500001E56, + 0x1E5700001E58, + 0x1E5900001E5A, + 0x1E5B00001E5C, + 0x1E5D00001E5E, + 0x1E5F00001E60, + 0x1E6100001E62, + 0x1E6300001E64, + 0x1E6500001E66, + 0x1E6700001E68, + 0x1E6900001E6A, + 0x1E6B00001E6C, + 0x1E6D00001E6E, + 0x1E6F00001E70, + 0x1E7100001E72, + 0x1E7300001E74, + 0x1E7500001E76, + 0x1E7700001E78, + 0x1E7900001E7A, + 0x1E7B00001E7C, + 0x1E7D00001E7E, + 0x1E7F00001E80, + 0x1E8100001E82, + 0x1E8300001E84, + 0x1E8500001E86, + 0x1E8700001E88, + 0x1E8900001E8A, + 0x1E8B00001E8C, + 0x1E8D00001E8E, + 0x1E8F00001E90, + 0x1E9100001E92, + 0x1E9300001E94, + 0x1E9500001E9A, + 0x1E9C00001E9E, + 0x1E9F00001EA0, + 0x1EA100001EA2, + 0x1EA300001EA4, + 0x1EA500001EA6, + 0x1EA700001EA8, + 0x1EA900001EAA, + 0x1EAB00001EAC, + 0x1EAD00001EAE, + 0x1EAF00001EB0, + 0x1EB100001EB2, + 0x1EB300001EB4, + 0x1EB500001EB6, + 0x1EB700001EB8, + 0x1EB900001EBA, + 0x1EBB00001EBC, + 0x1EBD00001EBE, + 0x1EBF00001EC0, + 0x1EC100001EC2, + 0x1EC300001EC4, + 0x1EC500001EC6, + 0x1EC700001EC8, + 0x1EC900001ECA, + 0x1ECB00001ECC, + 0x1ECD00001ECE, + 0x1ECF00001ED0, + 0x1ED100001ED2, + 0x1ED300001ED4, + 0x1ED500001ED6, + 0x1ED700001ED8, + 0x1ED900001EDA, + 0x1EDB00001EDC, + 0x1EDD00001EDE, + 0x1EDF00001EE0, + 0x1EE100001EE2, + 0x1EE300001EE4, + 0x1EE500001EE6, + 0x1EE700001EE8, + 0x1EE900001EEA, + 0x1EEB00001EEC, + 0x1EED00001EEE, + 0x1EEF00001EF0, + 0x1EF100001EF2, + 0x1EF300001EF4, + 0x1EF500001EF6, + 0x1EF700001EF8, + 0x1EF900001EFA, + 0x1EFB00001EFC, + 0x1EFD00001EFE, + 0x1EFF00001F08, + 0x1F1000001F16, + 0x1F2000001F28, + 0x1F3000001F38, + 0x1F4000001F46, + 0x1F5000001F58, + 0x1F6000001F68, + 0x1F7000001F71, + 0x1F7200001F73, + 0x1F7400001F75, + 0x1F7600001F77, + 0x1F7800001F79, + 0x1F7A00001F7B, + 0x1F7C00001F7D, + 0x1FB000001FB2, + 0x1FB600001FB7, + 0x1FC600001FC7, + 0x1FD000001FD3, + 0x1FD600001FD8, + 0x1FE000001FE3, + 0x1FE400001FE8, + 0x1FF600001FF7, + 0x214E0000214F, + 0x218400002185, + 0x2C3000002C60, + 0x2C6100002C62, + 0x2C6500002C67, + 0x2C6800002C69, + 0x2C6A00002C6B, + 0x2C6C00002C6D, + 0x2C7100002C72, + 0x2C7300002C75, + 0x2C7600002C7C, + 0x2C8100002C82, + 0x2C8300002C84, + 0x2C8500002C86, + 0x2C8700002C88, + 0x2C8900002C8A, + 0x2C8B00002C8C, + 0x2C8D00002C8E, + 0x2C8F00002C90, + 0x2C9100002C92, + 0x2C9300002C94, + 0x2C9500002C96, + 0x2C9700002C98, + 0x2C9900002C9A, + 0x2C9B00002C9C, + 0x2C9D00002C9E, + 0x2C9F00002CA0, + 0x2CA100002CA2, + 0x2CA300002CA4, + 0x2CA500002CA6, + 0x2CA700002CA8, + 0x2CA900002CAA, + 0x2CAB00002CAC, + 0x2CAD00002CAE, + 0x2CAF00002CB0, + 0x2CB100002CB2, + 0x2CB300002CB4, + 0x2CB500002CB6, + 0x2CB700002CB8, + 0x2CB900002CBA, + 0x2CBB00002CBC, + 0x2CBD00002CBE, + 0x2CBF00002CC0, + 0x2CC100002CC2, + 0x2CC300002CC4, + 0x2CC500002CC6, + 0x2CC700002CC8, + 0x2CC900002CCA, + 0x2CCB00002CCC, + 0x2CCD00002CCE, + 0x2CCF00002CD0, + 0x2CD100002CD2, + 0x2CD300002CD4, + 0x2CD500002CD6, + 0x2CD700002CD8, + 0x2CD900002CDA, + 0x2CDB00002CDC, + 0x2CDD00002CDE, + 0x2CDF00002CE0, + 0x2CE100002CE2, + 0x2CE300002CE5, + 0x2CEC00002CED, + 0x2CEE00002CF2, + 0x2CF300002CF4, + 0x2D0000002D26, + 0x2D2700002D28, + 0x2D2D00002D2E, + 0x2D3000002D68, + 0x2D7F00002D97, + 0x2DA000002DA7, + 0x2DA800002DAF, + 0x2DB000002DB7, + 0x2DB800002DBF, + 0x2DC000002DC7, + 0x2DC800002DCF, + 0x2DD000002DD7, + 0x2DD800002DDF, + 0x2DE000002E00, + 0x2E2F00002E30, + 0x300500003008, + 0x302A0000302E, + 0x303C0000303D, + 0x304100003097, + 0x30990000309B, + 0x309D0000309F, + 0x30A1000030FB, + 0x30FC000030FF, + 0x310500003130, + 0x31A0000031C0, + 0x31F000003200, + 0x340000004DC0, + 0x4E000000A48D, + 0xA4D00000A4FE, + 0xA5000000A60D, + 0xA6100000A62C, + 0xA6410000A642, + 0xA6430000A644, + 0xA6450000A646, + 0xA6470000A648, + 0xA6490000A64A, + 0xA64B0000A64C, + 0xA64D0000A64E, + 0xA64F0000A650, + 0xA6510000A652, + 0xA6530000A654, + 0xA6550000A656, + 0xA6570000A658, + 0xA6590000A65A, + 0xA65B0000A65C, + 0xA65D0000A65E, + 0xA65F0000A660, + 0xA6610000A662, + 0xA6630000A664, + 0xA6650000A666, + 0xA6670000A668, + 0xA6690000A66A, + 0xA66B0000A66C, + 0xA66D0000A670, + 0xA6740000A67E, + 0xA67F0000A680, + 0xA6810000A682, + 0xA6830000A684, + 0xA6850000A686, + 0xA6870000A688, + 0xA6890000A68A, + 0xA68B0000A68C, + 0xA68D0000A68E, + 0xA68F0000A690, + 0xA6910000A692, + 0xA6930000A694, + 0xA6950000A696, + 0xA6970000A698, + 0xA6990000A69A, + 0xA69B0000A69C, + 0xA69E0000A6E6, + 0xA6F00000A6F2, + 0xA7170000A720, + 0xA7230000A724, + 0xA7250000A726, + 0xA7270000A728, + 0xA7290000A72A, + 0xA72B0000A72C, + 0xA72D0000A72E, + 0xA72F0000A732, + 0xA7330000A734, + 0xA7350000A736, + 0xA7370000A738, + 0xA7390000A73A, + 0xA73B0000A73C, + 0xA73D0000A73E, + 0xA73F0000A740, + 0xA7410000A742, + 0xA7430000A744, + 0xA7450000A746, + 0xA7470000A748, + 0xA7490000A74A, + 0xA74B0000A74C, + 0xA74D0000A74E, + 0xA74F0000A750, + 0xA7510000A752, + 0xA7530000A754, + 0xA7550000A756, + 0xA7570000A758, + 0xA7590000A75A, + 0xA75B0000A75C, + 0xA75D0000A75E, + 0xA75F0000A760, + 0xA7610000A762, + 0xA7630000A764, + 0xA7650000A766, + 0xA7670000A768, + 0xA7690000A76A, + 0xA76B0000A76C, + 0xA76D0000A76E, + 0xA76F0000A770, + 0xA7710000A779, + 0xA77A0000A77B, + 0xA77C0000A77D, + 0xA77F0000A780, + 0xA7810000A782, + 0xA7830000A784, + 0xA7850000A786, + 0xA7870000A789, + 0xA78C0000A78D, + 0xA78E0000A790, + 0xA7910000A792, + 0xA7930000A796, + 0xA7970000A798, + 0xA7990000A79A, + 0xA79B0000A79C, + 0xA79D0000A79E, + 0xA79F0000A7A0, + 0xA7A10000A7A2, + 0xA7A30000A7A4, + 0xA7A50000A7A6, + 0xA7A70000A7A8, + 0xA7A90000A7AA, + 0xA7AF0000A7B0, + 0xA7B50000A7B6, + 0xA7B70000A7B8, + 0xA7B90000A7BA, + 0xA7BB0000A7BC, + 0xA7BD0000A7BE, + 0xA7BF0000A7C0, + 0xA7C10000A7C2, + 0xA7C30000A7C4, + 0xA7C80000A7C9, + 0xA7CA0000A7CB, + 0xA7CD0000A7CE, + 0xA7D10000A7D2, + 0xA7D30000A7D4, + 0xA7D50000A7D6, + 0xA7D70000A7D8, + 0xA7D90000A7DA, + 0xA7DB0000A7DC, + 0xA7F60000A7F8, + 0xA7FA0000A828, + 0xA82C0000A82D, + 0xA8400000A874, + 0xA8800000A8C6, + 0xA8D00000A8DA, + 0xA8E00000A8F8, + 0xA8FB0000A8FC, + 0xA8FD0000A92E, + 0xA9300000A954, + 0xA9800000A9C1, + 0xA9CF0000A9DA, + 0xA9E00000A9FF, + 0xAA000000AA37, + 0xAA400000AA4E, + 0xAA500000AA5A, + 0xAA600000AA77, + 0xAA7A0000AAC3, + 0xAADB0000AADE, + 0xAAE00000AAF0, + 0xAAF20000AAF7, + 0xAB010000AB07, + 0xAB090000AB0F, + 0xAB110000AB17, + 0xAB200000AB27, + 0xAB280000AB2F, + 0xAB300000AB5B, + 0xAB600000AB69, + 0xABC00000ABEB, + 0xABEC0000ABEE, + 0xABF00000ABFA, + 0xAC000000D7A4, + 0xFA0E0000FA10, + 0xFA110000FA12, + 0xFA130000FA15, + 0xFA1F0000FA20, + 0xFA210000FA22, + 0xFA230000FA25, + 0xFA270000FA2A, + 0xFB1E0000FB1F, + 0xFE200000FE30, + 0xFE730000FE74, + 0x100000001000C, + 0x1000D00010027, + 0x100280001003B, + 0x1003C0001003E, + 0x1003F0001004E, + 0x100500001005E, + 0x10080000100FB, + 0x101FD000101FE, + 0x102800001029D, + 0x102A0000102D1, + 0x102E0000102E1, + 0x1030000010320, + 0x1032D00010341, + 0x103420001034A, + 0x103500001037B, + 0x103800001039E, + 0x103A0000103C4, + 0x103C8000103D0, + 0x104280001049E, + 0x104A0000104AA, + 0x104D8000104FC, + 0x1050000010528, + 0x1053000010564, + 0x10597000105A2, + 0x105A3000105B2, + 0x105B3000105BA, + 0x105BB000105BD, + 0x105C0000105F4, + 0x1060000010737, + 0x1074000010756, + 0x1076000010768, + 0x1078000010781, + 0x1080000010806, + 0x1080800010809, + 0x1080A00010836, + 0x1083700010839, + 0x1083C0001083D, + 0x1083F00010856, + 0x1086000010877, + 0x108800001089F, + 0x108E0000108F3, + 0x108F4000108F6, + 0x1090000010916, + 0x109200001093A, + 0x10980000109B8, + 0x109BE000109C0, + 0x10A0000010A04, + 0x10A0500010A07, + 0x10A0C00010A14, + 0x10A1500010A18, + 0x10A1900010A36, + 0x10A3800010A3B, + 0x10A3F00010A40, + 0x10A6000010A7D, + 0x10A8000010A9D, + 0x10AC000010AC8, + 0x10AC900010AE7, + 0x10B0000010B36, + 0x10B4000010B56, + 0x10B6000010B73, + 0x10B8000010B92, + 0x10C0000010C49, + 0x10CC000010CF3, + 0x10D0000010D28, + 0x10D3000010D3A, + 0x10D4000010D50, + 0x10D6900010D6E, + 0x10D6F00010D86, + 0x10E8000010EAA, + 0x10EAB00010EAD, + 0x10EB000010EB2, + 0x10EC200010EC5, + 0x10EFC00010F1D, + 0x10F2700010F28, + 0x10F3000010F51, + 0x10F7000010F86, + 0x10FB000010FC5, + 0x10FE000010FF7, + 0x1100000011047, + 0x1106600011076, + 0x1107F000110BB, + 0x110C2000110C3, + 0x110D0000110E9, + 0x110F0000110FA, + 0x1110000011135, + 0x1113600011140, + 0x1114400011148, + 0x1115000011174, + 0x1117600011177, + 0x11180000111C5, + 0x111C9000111CD, + 0x111CE000111DB, + 0x111DC000111DD, + 0x1120000011212, + 0x1121300011238, + 0x1123E00011242, + 0x1128000011287, + 0x1128800011289, + 0x1128A0001128E, + 0x1128F0001129E, + 0x1129F000112A9, + 0x112B0000112EB, + 0x112F0000112FA, + 0x1130000011304, + 0x113050001130D, + 0x1130F00011311, + 0x1131300011329, + 0x1132A00011331, + 0x1133200011334, + 0x113350001133A, + 0x1133B00011345, + 0x1134700011349, + 0x1134B0001134E, + 0x1135000011351, + 0x1135700011358, + 0x1135D00011364, + 0x113660001136D, + 0x1137000011375, + 0x113800001138A, + 0x1138B0001138C, + 0x1138E0001138F, + 0x11390000113B6, + 0x113B7000113C1, + 0x113C2000113C3, + 0x113C5000113C6, + 0x113C7000113CB, + 0x113CC000113D4, + 0x113E1000113E3, + 0x114000001144B, + 0x114500001145A, + 0x1145E00011462, + 0x11480000114C6, + 0x114C7000114C8, + 0x114D0000114DA, + 0x11580000115B6, + 0x115B8000115C1, + 0x115D8000115DE, + 0x1160000011641, + 0x1164400011645, + 0x116500001165A, + 0x11680000116B9, + 0x116C0000116CA, + 0x116D0000116E4, + 0x117000001171B, + 0x1171D0001172C, + 0x117300001173A, + 0x1174000011747, + 0x118000001183B, + 0x118C0000118EA, + 0x118FF00011907, + 0x119090001190A, + 0x1190C00011914, + 0x1191500011917, + 0x1191800011936, + 0x1193700011939, + 0x1193B00011944, + 0x119500001195A, + 0x119A0000119A8, + 0x119AA000119D8, + 0x119DA000119E2, + 0x119E3000119E5, + 0x11A0000011A3F, + 0x11A4700011A48, + 0x11A5000011A9A, + 0x11A9D00011A9E, + 0x11AB000011AF9, + 0x11BC000011BE1, + 0x11BF000011BFA, + 0x11C0000011C09, + 0x11C0A00011C37, + 0x11C3800011C41, + 0x11C5000011C5A, + 0x11C7200011C90, + 0x11C9200011CA8, + 0x11CA900011CB7, + 0x11D0000011D07, + 0x11D0800011D0A, + 0x11D0B00011D37, + 0x11D3A00011D3B, + 0x11D3C00011D3E, + 0x11D3F00011D48, + 0x11D5000011D5A, + 0x11D6000011D66, + 0x11D6700011D69, + 0x11D6A00011D8F, + 0x11D9000011D92, + 0x11D9300011D99, + 0x11DA000011DAA, + 0x11EE000011EF7, + 0x11F0000011F11, + 0x11F1200011F3B, + 0x11F3E00011F43, + 0x11F5000011F5B, + 0x11FB000011FB1, + 0x120000001239A, + 0x1248000012544, + 0x12F9000012FF1, + 0x1300000013430, + 0x1344000013456, + 0x13460000143FB, + 0x1440000014647, + 0x161000001613A, + 0x1680000016A39, + 0x16A4000016A5F, + 0x16A6000016A6A, + 0x16A7000016ABF, + 0x16AC000016ACA, + 0x16AD000016AEE, + 0x16AF000016AF5, + 0x16B0000016B37, + 0x16B4000016B44, + 0x16B5000016B5A, + 0x16B6300016B78, + 0x16B7D00016B90, + 0x16D4000016D6D, + 0x16D7000016D7A, + 0x16E6000016E80, + 0x16F0000016F4B, + 0x16F4F00016F88, + 0x16F8F00016FA0, + 0x16FE000016FE2, + 0x16FE300016FE5, + 0x16FF000016FF2, + 0x17000000187F8, + 0x1880000018CD6, + 0x18CFF00018D09, + 0x1AFF00001AFF4, + 0x1AFF50001AFFC, + 0x1AFFD0001AFFF, + 0x1B0000001B123, + 0x1B1320001B133, + 0x1B1500001B153, + 0x1B1550001B156, + 0x1B1640001B168, + 0x1B1700001B2FC, + 0x1BC000001BC6B, + 0x1BC700001BC7D, + 0x1BC800001BC89, + 0x1BC900001BC9A, + 0x1BC9D0001BC9F, + 0x1CCF00001CCFA, + 0x1CF000001CF2E, + 0x1CF300001CF47, + 0x1DA000001DA37, + 0x1DA3B0001DA6D, + 0x1DA750001DA76, + 0x1DA840001DA85, + 0x1DA9B0001DAA0, + 0x1DAA10001DAB0, + 0x1DF000001DF1F, + 0x1DF250001DF2B, + 0x1E0000001E007, + 0x1E0080001E019, + 0x1E01B0001E022, + 0x1E0230001E025, + 0x1E0260001E02B, + 0x1E08F0001E090, + 0x1E1000001E12D, + 0x1E1300001E13E, + 0x1E1400001E14A, + 0x1E14E0001E14F, + 0x1E2900001E2AF, + 0x1E2C00001E2FA, + 0x1E4D00001E4FA, + 0x1E5D00001E5FB, + 0x1E7E00001E7E7, + 0x1E7E80001E7EC, + 0x1E7ED0001E7EF, + 0x1E7F00001E7FF, + 0x1E8000001E8C5, + 0x1E8D00001E8D7, + 0x1E9220001E94C, + 0x1E9500001E95A, + 0x200000002A6E0, + 0x2A7000002B73A, + 0x2B7400002B81E, + 0x2B8200002CEA2, + 0x2CEB00002EBE1, + 0x2EBF00002EE5E, + 0x300000003134B, + 0x31350000323B0, + ), + "CONTEXTJ": (0x200C0000200E,), + "CONTEXTO": ( + 0xB7000000B8, + 0x37500000376, + 0x5F3000005F5, + 0x6600000066A, + 0x6F0000006FA, + 0x30FB000030FC, + ), +} diff --git a/.venv/lib/python3.12/site-packages/idna/intranges.py b/.venv/lib/python3.12/site-packages/idna/intranges.py new file mode 100644 index 0000000..7bfaa8d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/idna/intranges.py @@ -0,0 +1,57 @@ +""" +Given a list of integers, made up of (hopefully) a small number of long runs +of consecutive integers, compute a representation of the form +((start1, end1), (start2, end2) ...). Then answer the question "was x present +in the original list?" in time O(log(# runs)). +""" + +import bisect +from typing import List, Tuple + + +def intranges_from_list(list_: List[int]) -> Tuple[int, ...]: + """Represent a list of integers as a sequence of ranges: + ((start_0, end_0), (start_1, end_1), ...), such that the original + integers are exactly those x such that start_i <= x < end_i for some i. + + Ranges are encoded as single integers (start << 32 | end), not as tuples. + """ + + sorted_list = sorted(list_) + ranges = [] + last_write = -1 + for i in range(len(sorted_list)): + if i + 1 < len(sorted_list): + if sorted_list[i] == sorted_list[i + 1] - 1: + continue + current_range = sorted_list[last_write + 1 : i + 1] + ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) + last_write = i + + return tuple(ranges) + + +def _encode_range(start: int, end: int) -> int: + return (start << 32) | end + + +def _decode_range(r: int) -> Tuple[int, int]: + return (r >> 32), (r & ((1 << 32) - 1)) + + +def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool: + """Determine if `int_` falls into one of the ranges in `ranges`.""" + tuple_ = _encode_range(int_, 0) + pos = bisect.bisect_left(ranges, tuple_) + # we could be immediately ahead of a tuple (start, end) + # with start < int_ <= end + if pos > 0: + left, right = _decode_range(ranges[pos - 1]) + if left <= int_ < right: + return True + # or we could be immediately behind a tuple (int_, end) + if pos < len(ranges): + left, _ = _decode_range(ranges[pos]) + if left == int_: + return True + return False diff --git a/.venv/lib/python3.12/site-packages/idna/package_data.py b/.venv/lib/python3.12/site-packages/idna/package_data.py new file mode 100644 index 0000000..7272c8d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/idna/package_data.py @@ -0,0 +1 @@ +__version__ = "3.11" diff --git a/.venv/lib/python3.12/site-packages/idna/py.typed b/.venv/lib/python3.12/site-packages/idna/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/idna/uts46data.py b/.venv/lib/python3.12/site-packages/idna/uts46data.py new file mode 100644 index 0000000..4610b71 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/idna/uts46data.py @@ -0,0 +1,8841 @@ +# This file is automatically generated by tools/idna-data +# vim: set fileencoding=utf-8 : + +from typing import List, Tuple, Union + +"""IDNA Mapping Table from UTS46.""" + + +__version__ = "16.0.0" + + +def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x0, "V"), + (0x1, "V"), + (0x2, "V"), + (0x3, "V"), + (0x4, "V"), + (0x5, "V"), + (0x6, "V"), + (0x7, "V"), + (0x8, "V"), + (0x9, "V"), + (0xA, "V"), + (0xB, "V"), + (0xC, "V"), + (0xD, "V"), + (0xE, "V"), + (0xF, "V"), + (0x10, "V"), + (0x11, "V"), + (0x12, "V"), + (0x13, "V"), + (0x14, "V"), + (0x15, "V"), + (0x16, "V"), + (0x17, "V"), + (0x18, "V"), + (0x19, "V"), + (0x1A, "V"), + (0x1B, "V"), + (0x1C, "V"), + (0x1D, "V"), + (0x1E, "V"), + (0x1F, "V"), + (0x20, "V"), + (0x21, "V"), + (0x22, "V"), + (0x23, "V"), + (0x24, "V"), + (0x25, "V"), + (0x26, "V"), + (0x27, "V"), + (0x28, "V"), + (0x29, "V"), + (0x2A, "V"), + (0x2B, "V"), + (0x2C, "V"), + (0x2D, "V"), + (0x2E, "V"), + (0x2F, "V"), + (0x30, "V"), + (0x31, "V"), + (0x32, "V"), + (0x33, "V"), + (0x34, "V"), + (0x35, "V"), + (0x36, "V"), + (0x37, "V"), + (0x38, "V"), + (0x39, "V"), + (0x3A, "V"), + (0x3B, "V"), + (0x3C, "V"), + (0x3D, "V"), + (0x3E, "V"), + (0x3F, "V"), + (0x40, "V"), + (0x41, "M", "a"), + (0x42, "M", "b"), + (0x43, "M", "c"), + (0x44, "M", "d"), + (0x45, "M", "e"), + (0x46, "M", "f"), + (0x47, "M", "g"), + (0x48, "M", "h"), + (0x49, "M", "i"), + (0x4A, "M", "j"), + (0x4B, "M", "k"), + (0x4C, "M", "l"), + (0x4D, "M", "m"), + (0x4E, "M", "n"), + (0x4F, "M", "o"), + (0x50, "M", "p"), + (0x51, "M", "q"), + (0x52, "M", "r"), + (0x53, "M", "s"), + (0x54, "M", "t"), + (0x55, "M", "u"), + (0x56, "M", "v"), + (0x57, "M", "w"), + (0x58, "M", "x"), + (0x59, "M", "y"), + (0x5A, "M", "z"), + (0x5B, "V"), + (0x5C, "V"), + (0x5D, "V"), + (0x5E, "V"), + (0x5F, "V"), + (0x60, "V"), + (0x61, "V"), + (0x62, "V"), + (0x63, "V"), + ] + + +def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x64, "V"), + (0x65, "V"), + (0x66, "V"), + (0x67, "V"), + (0x68, "V"), + (0x69, "V"), + (0x6A, "V"), + (0x6B, "V"), + (0x6C, "V"), + (0x6D, "V"), + (0x6E, "V"), + (0x6F, "V"), + (0x70, "V"), + (0x71, "V"), + (0x72, "V"), + (0x73, "V"), + (0x74, "V"), + (0x75, "V"), + (0x76, "V"), + (0x77, "V"), + (0x78, "V"), + (0x79, "V"), + (0x7A, "V"), + (0x7B, "V"), + (0x7C, "V"), + (0x7D, "V"), + (0x7E, "V"), + (0x7F, "V"), + (0x80, "X"), + (0x81, "X"), + (0x82, "X"), + (0x83, "X"), + (0x84, "X"), + (0x85, "X"), + (0x86, "X"), + (0x87, "X"), + (0x88, "X"), + (0x89, "X"), + (0x8A, "X"), + (0x8B, "X"), + (0x8C, "X"), + (0x8D, "X"), + (0x8E, "X"), + (0x8F, "X"), + (0x90, "X"), + (0x91, "X"), + (0x92, "X"), + (0x93, "X"), + (0x94, "X"), + (0x95, "X"), + (0x96, "X"), + (0x97, "X"), + (0x98, "X"), + (0x99, "X"), + (0x9A, "X"), + (0x9B, "X"), + (0x9C, "X"), + (0x9D, "X"), + (0x9E, "X"), + (0x9F, "X"), + (0xA0, "M", " "), + (0xA1, "V"), + (0xA2, "V"), + (0xA3, "V"), + (0xA4, "V"), + (0xA5, "V"), + (0xA6, "V"), + (0xA7, "V"), + (0xA8, "M", " ̈"), + (0xA9, "V"), + (0xAA, "M", "a"), + (0xAB, "V"), + (0xAC, "V"), + (0xAD, "I"), + (0xAE, "V"), + (0xAF, "M", " ̄"), + (0xB0, "V"), + (0xB1, "V"), + (0xB2, "M", "2"), + (0xB3, "M", "3"), + (0xB4, "M", " ́"), + (0xB5, "M", "μ"), + (0xB6, "V"), + (0xB7, "V"), + (0xB8, "M", " ̧"), + (0xB9, "M", "1"), + (0xBA, "M", "o"), + (0xBB, "V"), + (0xBC, "M", "1⁄4"), + (0xBD, "M", "1⁄2"), + (0xBE, "M", "3⁄4"), + (0xBF, "V"), + (0xC0, "M", "à"), + (0xC1, "M", "á"), + (0xC2, "M", "â"), + (0xC3, "M", "ã"), + (0xC4, "M", "ä"), + (0xC5, "M", "å"), + (0xC6, "M", "æ"), + (0xC7, "M", "ç"), + ] + + +def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC8, "M", "è"), + (0xC9, "M", "é"), + (0xCA, "M", "ê"), + (0xCB, "M", "ë"), + (0xCC, "M", "ì"), + (0xCD, "M", "í"), + (0xCE, "M", "î"), + (0xCF, "M", "ï"), + (0xD0, "M", "ð"), + (0xD1, "M", "ñ"), + (0xD2, "M", "ò"), + (0xD3, "M", "ó"), + (0xD4, "M", "ô"), + (0xD5, "M", "õ"), + (0xD6, "M", "ö"), + (0xD7, "V"), + (0xD8, "M", "ø"), + (0xD9, "M", "ù"), + (0xDA, "M", "ú"), + (0xDB, "M", "û"), + (0xDC, "M", "ü"), + (0xDD, "M", "ý"), + (0xDE, "M", "þ"), + (0xDF, "D", "ss"), + (0xE0, "V"), + (0xE1, "V"), + (0xE2, "V"), + (0xE3, "V"), + (0xE4, "V"), + (0xE5, "V"), + (0xE6, "V"), + (0xE7, "V"), + (0xE8, "V"), + (0xE9, "V"), + (0xEA, "V"), + (0xEB, "V"), + (0xEC, "V"), + (0xED, "V"), + (0xEE, "V"), + (0xEF, "V"), + (0xF0, "V"), + (0xF1, "V"), + (0xF2, "V"), + (0xF3, "V"), + (0xF4, "V"), + (0xF5, "V"), + (0xF6, "V"), + (0xF7, "V"), + (0xF8, "V"), + (0xF9, "V"), + (0xFA, "V"), + (0xFB, "V"), + (0xFC, "V"), + (0xFD, "V"), + (0xFE, "V"), + (0xFF, "V"), + (0x100, "M", "ā"), + (0x101, "V"), + (0x102, "M", "ă"), + (0x103, "V"), + (0x104, "M", "ą"), + (0x105, "V"), + (0x106, "M", "ć"), + (0x107, "V"), + (0x108, "M", "ĉ"), + (0x109, "V"), + (0x10A, "M", "ċ"), + (0x10B, "V"), + (0x10C, "M", "č"), + (0x10D, "V"), + (0x10E, "M", "ď"), + (0x10F, "V"), + (0x110, "M", "đ"), + (0x111, "V"), + (0x112, "M", "ē"), + (0x113, "V"), + (0x114, "M", "ĕ"), + (0x115, "V"), + (0x116, "M", "ė"), + (0x117, "V"), + (0x118, "M", "ę"), + (0x119, "V"), + (0x11A, "M", "ě"), + (0x11B, "V"), + (0x11C, "M", "ĝ"), + (0x11D, "V"), + (0x11E, "M", "ğ"), + (0x11F, "V"), + (0x120, "M", "ġ"), + (0x121, "V"), + (0x122, "M", "ģ"), + (0x123, "V"), + (0x124, "M", "ĥ"), + (0x125, "V"), + (0x126, "M", "ħ"), + (0x127, "V"), + (0x128, "M", "ĩ"), + (0x129, "V"), + (0x12A, "M", "ī"), + (0x12B, "V"), + ] + + +def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x12C, "M", "ĭ"), + (0x12D, "V"), + (0x12E, "M", "į"), + (0x12F, "V"), + (0x130, "M", "i̇"), + (0x131, "V"), + (0x132, "M", "ij"), + (0x134, "M", "ĵ"), + (0x135, "V"), + (0x136, "M", "ķ"), + (0x137, "V"), + (0x139, "M", "ĺ"), + (0x13A, "V"), + (0x13B, "M", "ļ"), + (0x13C, "V"), + (0x13D, "M", "ľ"), + (0x13E, "V"), + (0x13F, "M", "l·"), + (0x141, "M", "ł"), + (0x142, "V"), + (0x143, "M", "ń"), + (0x144, "V"), + (0x145, "M", "ņ"), + (0x146, "V"), + (0x147, "M", "ň"), + (0x148, "V"), + (0x149, "M", "ʼn"), + (0x14A, "M", "ŋ"), + (0x14B, "V"), + (0x14C, "M", "ō"), + (0x14D, "V"), + (0x14E, "M", "ŏ"), + (0x14F, "V"), + (0x150, "M", "ő"), + (0x151, "V"), + (0x152, "M", "œ"), + (0x153, "V"), + (0x154, "M", "ŕ"), + (0x155, "V"), + (0x156, "M", "ŗ"), + (0x157, "V"), + (0x158, "M", "ř"), + (0x159, "V"), + (0x15A, "M", "ś"), + (0x15B, "V"), + (0x15C, "M", "ŝ"), + (0x15D, "V"), + (0x15E, "M", "ş"), + (0x15F, "V"), + (0x160, "M", "š"), + (0x161, "V"), + (0x162, "M", "ţ"), + (0x163, "V"), + (0x164, "M", "ť"), + (0x165, "V"), + (0x166, "M", "ŧ"), + (0x167, "V"), + (0x168, "M", "ũ"), + (0x169, "V"), + (0x16A, "M", "ū"), + (0x16B, "V"), + (0x16C, "M", "ŭ"), + (0x16D, "V"), + (0x16E, "M", "ů"), + (0x16F, "V"), + (0x170, "M", "ű"), + (0x171, "V"), + (0x172, "M", "ų"), + (0x173, "V"), + (0x174, "M", "ŵ"), + (0x175, "V"), + (0x176, "M", "ŷ"), + (0x177, "V"), + (0x178, "M", "ÿ"), + (0x179, "M", "ź"), + (0x17A, "V"), + (0x17B, "M", "ż"), + (0x17C, "V"), + (0x17D, "M", "ž"), + (0x17E, "V"), + (0x17F, "M", "s"), + (0x180, "V"), + (0x181, "M", "ɓ"), + (0x182, "M", "ƃ"), + (0x183, "V"), + (0x184, "M", "ƅ"), + (0x185, "V"), + (0x186, "M", "ɔ"), + (0x187, "M", "ƈ"), + (0x188, "V"), + (0x189, "M", "ɖ"), + (0x18A, "M", "ɗ"), + (0x18B, "M", "ƌ"), + (0x18C, "V"), + (0x18E, "M", "ǝ"), + (0x18F, "M", "ə"), + (0x190, "M", "ɛ"), + (0x191, "M", "ƒ"), + (0x192, "V"), + (0x193, "M", "ɠ"), + ] + + +def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x194, "M", "ɣ"), + (0x195, "V"), + (0x196, "M", "ɩ"), + (0x197, "M", "ɨ"), + (0x198, "M", "ƙ"), + (0x199, "V"), + (0x19C, "M", "ɯ"), + (0x19D, "M", "ɲ"), + (0x19E, "V"), + (0x19F, "M", "ɵ"), + (0x1A0, "M", "ơ"), + (0x1A1, "V"), + (0x1A2, "M", "ƣ"), + (0x1A3, "V"), + (0x1A4, "M", "ƥ"), + (0x1A5, "V"), + (0x1A6, "M", "ʀ"), + (0x1A7, "M", "ƨ"), + (0x1A8, "V"), + (0x1A9, "M", "ʃ"), + (0x1AA, "V"), + (0x1AC, "M", "ƭ"), + (0x1AD, "V"), + (0x1AE, "M", "ʈ"), + (0x1AF, "M", "ư"), + (0x1B0, "V"), + (0x1B1, "M", "ʊ"), + (0x1B2, "M", "ʋ"), + (0x1B3, "M", "ƴ"), + (0x1B4, "V"), + (0x1B5, "M", "ƶ"), + (0x1B6, "V"), + (0x1B7, "M", "ʒ"), + (0x1B8, "M", "ƹ"), + (0x1B9, "V"), + (0x1BC, "M", "ƽ"), + (0x1BD, "V"), + (0x1C4, "M", "dž"), + (0x1C7, "M", "lj"), + (0x1CA, "M", "nj"), + (0x1CD, "M", "ǎ"), + (0x1CE, "V"), + (0x1CF, "M", "ǐ"), + (0x1D0, "V"), + (0x1D1, "M", "ǒ"), + (0x1D2, "V"), + (0x1D3, "M", "ǔ"), + (0x1D4, "V"), + (0x1D5, "M", "ǖ"), + (0x1D6, "V"), + (0x1D7, "M", "ǘ"), + (0x1D8, "V"), + (0x1D9, "M", "ǚ"), + (0x1DA, "V"), + (0x1DB, "M", "ǜ"), + (0x1DC, "V"), + (0x1DE, "M", "ǟ"), + (0x1DF, "V"), + (0x1E0, "M", "ǡ"), + (0x1E1, "V"), + (0x1E2, "M", "ǣ"), + (0x1E3, "V"), + (0x1E4, "M", "ǥ"), + (0x1E5, "V"), + (0x1E6, "M", "ǧ"), + (0x1E7, "V"), + (0x1E8, "M", "ǩ"), + (0x1E9, "V"), + (0x1EA, "M", "ǫ"), + (0x1EB, "V"), + (0x1EC, "M", "ǭ"), + (0x1ED, "V"), + (0x1EE, "M", "ǯ"), + (0x1EF, "V"), + (0x1F1, "M", "dz"), + (0x1F4, "M", "ǵ"), + (0x1F5, "V"), + (0x1F6, "M", "ƕ"), + (0x1F7, "M", "ƿ"), + (0x1F8, "M", "ǹ"), + (0x1F9, "V"), + (0x1FA, "M", "ǻ"), + (0x1FB, "V"), + (0x1FC, "M", "ǽ"), + (0x1FD, "V"), + (0x1FE, "M", "ǿ"), + (0x1FF, "V"), + (0x200, "M", "ȁ"), + (0x201, "V"), + (0x202, "M", "ȃ"), + (0x203, "V"), + (0x204, "M", "ȅ"), + (0x205, "V"), + (0x206, "M", "ȇ"), + (0x207, "V"), + (0x208, "M", "ȉ"), + (0x209, "V"), + (0x20A, "M", "ȋ"), + (0x20B, "V"), + (0x20C, "M", "ȍ"), + ] + + +def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x20D, "V"), + (0x20E, "M", "ȏ"), + (0x20F, "V"), + (0x210, "M", "ȑ"), + (0x211, "V"), + (0x212, "M", "ȓ"), + (0x213, "V"), + (0x214, "M", "ȕ"), + (0x215, "V"), + (0x216, "M", "ȗ"), + (0x217, "V"), + (0x218, "M", "ș"), + (0x219, "V"), + (0x21A, "M", "ț"), + (0x21B, "V"), + (0x21C, "M", "ȝ"), + (0x21D, "V"), + (0x21E, "M", "ȟ"), + (0x21F, "V"), + (0x220, "M", "ƞ"), + (0x221, "V"), + (0x222, "M", "ȣ"), + (0x223, "V"), + (0x224, "M", "ȥ"), + (0x225, "V"), + (0x226, "M", "ȧ"), + (0x227, "V"), + (0x228, "M", "ȩ"), + (0x229, "V"), + (0x22A, "M", "ȫ"), + (0x22B, "V"), + (0x22C, "M", "ȭ"), + (0x22D, "V"), + (0x22E, "M", "ȯ"), + (0x22F, "V"), + (0x230, "M", "ȱ"), + (0x231, "V"), + (0x232, "M", "ȳ"), + (0x233, "V"), + (0x23A, "M", "ⱥ"), + (0x23B, "M", "ȼ"), + (0x23C, "V"), + (0x23D, "M", "ƚ"), + (0x23E, "M", "ⱦ"), + (0x23F, "V"), + (0x241, "M", "ɂ"), + (0x242, "V"), + (0x243, "M", "ƀ"), + (0x244, "M", "ʉ"), + (0x245, "M", "ʌ"), + (0x246, "M", "ɇ"), + (0x247, "V"), + (0x248, "M", "ɉ"), + (0x249, "V"), + (0x24A, "M", "ɋ"), + (0x24B, "V"), + (0x24C, "M", "ɍ"), + (0x24D, "V"), + (0x24E, "M", "ɏ"), + (0x24F, "V"), + (0x2B0, "M", "h"), + (0x2B1, "M", "ɦ"), + (0x2B2, "M", "j"), + (0x2B3, "M", "r"), + (0x2B4, "M", "ɹ"), + (0x2B5, "M", "ɻ"), + (0x2B6, "M", "ʁ"), + (0x2B7, "M", "w"), + (0x2B8, "M", "y"), + (0x2B9, "V"), + (0x2D8, "M", " ̆"), + (0x2D9, "M", " ̇"), + (0x2DA, "M", " ̊"), + (0x2DB, "M", " ̨"), + (0x2DC, "M", " ̃"), + (0x2DD, "M", " ̋"), + (0x2DE, "V"), + (0x2E0, "M", "ɣ"), + (0x2E1, "M", "l"), + (0x2E2, "M", "s"), + (0x2E3, "M", "x"), + (0x2E4, "M", "ʕ"), + (0x2E5, "V"), + (0x340, "M", "̀"), + (0x341, "M", "́"), + (0x342, "V"), + (0x343, "M", "̓"), + (0x344, "M", "̈́"), + (0x345, "M", "ι"), + (0x346, "V"), + (0x34F, "I"), + (0x350, "V"), + (0x370, "M", "ͱ"), + (0x371, "V"), + (0x372, "M", "ͳ"), + (0x373, "V"), + (0x374, "M", "ʹ"), + (0x375, "V"), + (0x376, "M", "ͷ"), + (0x377, "V"), + ] + + +def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x378, "X"), + (0x37A, "M", " ι"), + (0x37B, "V"), + (0x37E, "M", ";"), + (0x37F, "M", "ϳ"), + (0x380, "X"), + (0x384, "M", " ́"), + (0x385, "M", " ̈́"), + (0x386, "M", "ά"), + (0x387, "M", "·"), + (0x388, "M", "έ"), + (0x389, "M", "ή"), + (0x38A, "M", "ί"), + (0x38B, "X"), + (0x38C, "M", "ό"), + (0x38D, "X"), + (0x38E, "M", "ύ"), + (0x38F, "M", "ώ"), + (0x390, "V"), + (0x391, "M", "α"), + (0x392, "M", "β"), + (0x393, "M", "γ"), + (0x394, "M", "δ"), + (0x395, "M", "ε"), + (0x396, "M", "ζ"), + (0x397, "M", "η"), + (0x398, "M", "θ"), + (0x399, "M", "ι"), + (0x39A, "M", "κ"), + (0x39B, "M", "λ"), + (0x39C, "M", "μ"), + (0x39D, "M", "ν"), + (0x39E, "M", "ξ"), + (0x39F, "M", "ο"), + (0x3A0, "M", "π"), + (0x3A1, "M", "ρ"), + (0x3A2, "X"), + (0x3A3, "M", "σ"), + (0x3A4, "M", "τ"), + (0x3A5, "M", "υ"), + (0x3A6, "M", "φ"), + (0x3A7, "M", "χ"), + (0x3A8, "M", "ψ"), + (0x3A9, "M", "ω"), + (0x3AA, "M", "ϊ"), + (0x3AB, "M", "ϋ"), + (0x3AC, "V"), + (0x3C2, "D", "σ"), + (0x3C3, "V"), + (0x3CF, "M", "ϗ"), + (0x3D0, "M", "β"), + (0x3D1, "M", "θ"), + (0x3D2, "M", "υ"), + (0x3D3, "M", "ύ"), + (0x3D4, "M", "ϋ"), + (0x3D5, "M", "φ"), + (0x3D6, "M", "π"), + (0x3D7, "V"), + (0x3D8, "M", "ϙ"), + (0x3D9, "V"), + (0x3DA, "M", "ϛ"), + (0x3DB, "V"), + (0x3DC, "M", "ϝ"), + (0x3DD, "V"), + (0x3DE, "M", "ϟ"), + (0x3DF, "V"), + (0x3E0, "M", "ϡ"), + (0x3E1, "V"), + (0x3E2, "M", "ϣ"), + (0x3E3, "V"), + (0x3E4, "M", "ϥ"), + (0x3E5, "V"), + (0x3E6, "M", "ϧ"), + (0x3E7, "V"), + (0x3E8, "M", "ϩ"), + (0x3E9, "V"), + (0x3EA, "M", "ϫ"), + (0x3EB, "V"), + (0x3EC, "M", "ϭ"), + (0x3ED, "V"), + (0x3EE, "M", "ϯ"), + (0x3EF, "V"), + (0x3F0, "M", "κ"), + (0x3F1, "M", "ρ"), + (0x3F2, "M", "σ"), + (0x3F3, "V"), + (0x3F4, "M", "θ"), + (0x3F5, "M", "ε"), + (0x3F6, "V"), + (0x3F7, "M", "ϸ"), + (0x3F8, "V"), + (0x3F9, "M", "σ"), + (0x3FA, "M", "ϻ"), + (0x3FB, "V"), + (0x3FD, "M", "ͻ"), + (0x3FE, "M", "ͼ"), + (0x3FF, "M", "ͽ"), + (0x400, "M", "ѐ"), + (0x401, "M", "ё"), + (0x402, "M", "ђ"), + ] + + +def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x403, "M", "ѓ"), + (0x404, "M", "є"), + (0x405, "M", "ѕ"), + (0x406, "M", "і"), + (0x407, "M", "ї"), + (0x408, "M", "ј"), + (0x409, "M", "љ"), + (0x40A, "M", "њ"), + (0x40B, "M", "ћ"), + (0x40C, "M", "ќ"), + (0x40D, "M", "ѝ"), + (0x40E, "M", "ў"), + (0x40F, "M", "џ"), + (0x410, "M", "а"), + (0x411, "M", "б"), + (0x412, "M", "в"), + (0x413, "M", "г"), + (0x414, "M", "д"), + (0x415, "M", "е"), + (0x416, "M", "ж"), + (0x417, "M", "з"), + (0x418, "M", "и"), + (0x419, "M", "й"), + (0x41A, "M", "к"), + (0x41B, "M", "л"), + (0x41C, "M", "м"), + (0x41D, "M", "н"), + (0x41E, "M", "о"), + (0x41F, "M", "п"), + (0x420, "M", "р"), + (0x421, "M", "с"), + (0x422, "M", "т"), + (0x423, "M", "у"), + (0x424, "M", "ф"), + (0x425, "M", "х"), + (0x426, "M", "ц"), + (0x427, "M", "ч"), + (0x428, "M", "ш"), + (0x429, "M", "щ"), + (0x42A, "M", "ъ"), + (0x42B, "M", "ы"), + (0x42C, "M", "ь"), + (0x42D, "M", "э"), + (0x42E, "M", "ю"), + (0x42F, "M", "я"), + (0x430, "V"), + (0x460, "M", "ѡ"), + (0x461, "V"), + (0x462, "M", "ѣ"), + (0x463, "V"), + (0x464, "M", "ѥ"), + (0x465, "V"), + (0x466, "M", "ѧ"), + (0x467, "V"), + (0x468, "M", "ѩ"), + (0x469, "V"), + (0x46A, "M", "ѫ"), + (0x46B, "V"), + (0x46C, "M", "ѭ"), + (0x46D, "V"), + (0x46E, "M", "ѯ"), + (0x46F, "V"), + (0x470, "M", "ѱ"), + (0x471, "V"), + (0x472, "M", "ѳ"), + (0x473, "V"), + (0x474, "M", "ѵ"), + (0x475, "V"), + (0x476, "M", "ѷ"), + (0x477, "V"), + (0x478, "M", "ѹ"), + (0x479, "V"), + (0x47A, "M", "ѻ"), + (0x47B, "V"), + (0x47C, "M", "ѽ"), + (0x47D, "V"), + (0x47E, "M", "ѿ"), + (0x47F, "V"), + (0x480, "M", "ҁ"), + (0x481, "V"), + (0x48A, "M", "ҋ"), + (0x48B, "V"), + (0x48C, "M", "ҍ"), + (0x48D, "V"), + (0x48E, "M", "ҏ"), + (0x48F, "V"), + (0x490, "M", "ґ"), + (0x491, "V"), + (0x492, "M", "ғ"), + (0x493, "V"), + (0x494, "M", "ҕ"), + (0x495, "V"), + (0x496, "M", "җ"), + (0x497, "V"), + (0x498, "M", "ҙ"), + (0x499, "V"), + (0x49A, "M", "қ"), + (0x49B, "V"), + (0x49C, "M", "ҝ"), + (0x49D, "V"), + ] + + +def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x49E, "M", "ҟ"), + (0x49F, "V"), + (0x4A0, "M", "ҡ"), + (0x4A1, "V"), + (0x4A2, "M", "ң"), + (0x4A3, "V"), + (0x4A4, "M", "ҥ"), + (0x4A5, "V"), + (0x4A6, "M", "ҧ"), + (0x4A7, "V"), + (0x4A8, "M", "ҩ"), + (0x4A9, "V"), + (0x4AA, "M", "ҫ"), + (0x4AB, "V"), + (0x4AC, "M", "ҭ"), + (0x4AD, "V"), + (0x4AE, "M", "ү"), + (0x4AF, "V"), + (0x4B0, "M", "ұ"), + (0x4B1, "V"), + (0x4B2, "M", "ҳ"), + (0x4B3, "V"), + (0x4B4, "M", "ҵ"), + (0x4B5, "V"), + (0x4B6, "M", "ҷ"), + (0x4B7, "V"), + (0x4B8, "M", "ҹ"), + (0x4B9, "V"), + (0x4BA, "M", "һ"), + (0x4BB, "V"), + (0x4BC, "M", "ҽ"), + (0x4BD, "V"), + (0x4BE, "M", "ҿ"), + (0x4BF, "V"), + (0x4C0, "M", "ӏ"), + (0x4C1, "M", "ӂ"), + (0x4C2, "V"), + (0x4C3, "M", "ӄ"), + (0x4C4, "V"), + (0x4C5, "M", "ӆ"), + (0x4C6, "V"), + (0x4C7, "M", "ӈ"), + (0x4C8, "V"), + (0x4C9, "M", "ӊ"), + (0x4CA, "V"), + (0x4CB, "M", "ӌ"), + (0x4CC, "V"), + (0x4CD, "M", "ӎ"), + (0x4CE, "V"), + (0x4D0, "M", "ӑ"), + (0x4D1, "V"), + (0x4D2, "M", "ӓ"), + (0x4D3, "V"), + (0x4D4, "M", "ӕ"), + (0x4D5, "V"), + (0x4D6, "M", "ӗ"), + (0x4D7, "V"), + (0x4D8, "M", "ә"), + (0x4D9, "V"), + (0x4DA, "M", "ӛ"), + (0x4DB, "V"), + (0x4DC, "M", "ӝ"), + (0x4DD, "V"), + (0x4DE, "M", "ӟ"), + (0x4DF, "V"), + (0x4E0, "M", "ӡ"), + (0x4E1, "V"), + (0x4E2, "M", "ӣ"), + (0x4E3, "V"), + (0x4E4, "M", "ӥ"), + (0x4E5, "V"), + (0x4E6, "M", "ӧ"), + (0x4E7, "V"), + (0x4E8, "M", "ө"), + (0x4E9, "V"), + (0x4EA, "M", "ӫ"), + (0x4EB, "V"), + (0x4EC, "M", "ӭ"), + (0x4ED, "V"), + (0x4EE, "M", "ӯ"), + (0x4EF, "V"), + (0x4F0, "M", "ӱ"), + (0x4F1, "V"), + (0x4F2, "M", "ӳ"), + (0x4F3, "V"), + (0x4F4, "M", "ӵ"), + (0x4F5, "V"), + (0x4F6, "M", "ӷ"), + (0x4F7, "V"), + (0x4F8, "M", "ӹ"), + (0x4F9, "V"), + (0x4FA, "M", "ӻ"), + (0x4FB, "V"), + (0x4FC, "M", "ӽ"), + (0x4FD, "V"), + (0x4FE, "M", "ӿ"), + (0x4FF, "V"), + (0x500, "M", "ԁ"), + (0x501, "V"), + (0x502, "M", "ԃ"), + ] + + +def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x503, "V"), + (0x504, "M", "ԅ"), + (0x505, "V"), + (0x506, "M", "ԇ"), + (0x507, "V"), + (0x508, "M", "ԉ"), + (0x509, "V"), + (0x50A, "M", "ԋ"), + (0x50B, "V"), + (0x50C, "M", "ԍ"), + (0x50D, "V"), + (0x50E, "M", "ԏ"), + (0x50F, "V"), + (0x510, "M", "ԑ"), + (0x511, "V"), + (0x512, "M", "ԓ"), + (0x513, "V"), + (0x514, "M", "ԕ"), + (0x515, "V"), + (0x516, "M", "ԗ"), + (0x517, "V"), + (0x518, "M", "ԙ"), + (0x519, "V"), + (0x51A, "M", "ԛ"), + (0x51B, "V"), + (0x51C, "M", "ԝ"), + (0x51D, "V"), + (0x51E, "M", "ԟ"), + (0x51F, "V"), + (0x520, "M", "ԡ"), + (0x521, "V"), + (0x522, "M", "ԣ"), + (0x523, "V"), + (0x524, "M", "ԥ"), + (0x525, "V"), + (0x526, "M", "ԧ"), + (0x527, "V"), + (0x528, "M", "ԩ"), + (0x529, "V"), + (0x52A, "M", "ԫ"), + (0x52B, "V"), + (0x52C, "M", "ԭ"), + (0x52D, "V"), + (0x52E, "M", "ԯ"), + (0x52F, "V"), + (0x530, "X"), + (0x531, "M", "ա"), + (0x532, "M", "բ"), + (0x533, "M", "գ"), + (0x534, "M", "դ"), + (0x535, "M", "ե"), + (0x536, "M", "զ"), + (0x537, "M", "է"), + (0x538, "M", "ը"), + (0x539, "M", "թ"), + (0x53A, "M", "ժ"), + (0x53B, "M", "ի"), + (0x53C, "M", "լ"), + (0x53D, "M", "խ"), + (0x53E, "M", "ծ"), + (0x53F, "M", "կ"), + (0x540, "M", "հ"), + (0x541, "M", "ձ"), + (0x542, "M", "ղ"), + (0x543, "M", "ճ"), + (0x544, "M", "մ"), + (0x545, "M", "յ"), + (0x546, "M", "ն"), + (0x547, "M", "շ"), + (0x548, "M", "ո"), + (0x549, "M", "չ"), + (0x54A, "M", "պ"), + (0x54B, "M", "ջ"), + (0x54C, "M", "ռ"), + (0x54D, "M", "ս"), + (0x54E, "M", "վ"), + (0x54F, "M", "տ"), + (0x550, "M", "ր"), + (0x551, "M", "ց"), + (0x552, "M", "ւ"), + (0x553, "M", "փ"), + (0x554, "M", "ք"), + (0x555, "M", "օ"), + (0x556, "M", "ֆ"), + (0x557, "X"), + (0x559, "V"), + (0x587, "M", "եւ"), + (0x588, "V"), + (0x58B, "X"), + (0x58D, "V"), + (0x590, "X"), + (0x591, "V"), + (0x5C8, "X"), + (0x5D0, "V"), + (0x5EB, "X"), + (0x5EF, "V"), + (0x5F5, "X"), + (0x606, "V"), + (0x61C, "X"), + (0x61D, "V"), + ] + + +def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x675, "M", "اٴ"), + (0x676, "M", "وٴ"), + (0x677, "M", "ۇٴ"), + (0x678, "M", "يٴ"), + (0x679, "V"), + (0x6DD, "X"), + (0x6DE, "V"), + (0x70E, "X"), + (0x710, "V"), + (0x74B, "X"), + (0x74D, "V"), + (0x7B2, "X"), + (0x7C0, "V"), + (0x7FB, "X"), + (0x7FD, "V"), + (0x82E, "X"), + (0x830, "V"), + (0x83F, "X"), + (0x840, "V"), + (0x85C, "X"), + (0x85E, "V"), + (0x85F, "X"), + (0x860, "V"), + (0x86B, "X"), + (0x870, "V"), + (0x88F, "X"), + (0x897, "V"), + (0x8E2, "X"), + (0x8E3, "V"), + (0x958, "M", "क़"), + (0x959, "M", "ख़"), + (0x95A, "M", "ग़"), + (0x95B, "M", "ज़"), + (0x95C, "M", "ड़"), + (0x95D, "M", "ढ़"), + (0x95E, "M", "फ़"), + (0x95F, "M", "य़"), + (0x960, "V"), + (0x984, "X"), + (0x985, "V"), + (0x98D, "X"), + (0x98F, "V"), + (0x991, "X"), + (0x993, "V"), + (0x9A9, "X"), + (0x9AA, "V"), + (0x9B1, "X"), + (0x9B2, "V"), + (0x9B3, "X"), + (0x9B6, "V"), + (0x9BA, "X"), + (0x9BC, "V"), + (0x9C5, "X"), + (0x9C7, "V"), + (0x9C9, "X"), + (0x9CB, "V"), + (0x9CF, "X"), + (0x9D7, "V"), + (0x9D8, "X"), + (0x9DC, "M", "ড়"), + (0x9DD, "M", "ঢ়"), + (0x9DE, "X"), + (0x9DF, "M", "য়"), + (0x9E0, "V"), + (0x9E4, "X"), + (0x9E6, "V"), + (0x9FF, "X"), + (0xA01, "V"), + (0xA04, "X"), + (0xA05, "V"), + (0xA0B, "X"), + (0xA0F, "V"), + (0xA11, "X"), + (0xA13, "V"), + (0xA29, "X"), + (0xA2A, "V"), + (0xA31, "X"), + (0xA32, "V"), + (0xA33, "M", "ਲ਼"), + (0xA34, "X"), + (0xA35, "V"), + (0xA36, "M", "ਸ਼"), + (0xA37, "X"), + (0xA38, "V"), + (0xA3A, "X"), + (0xA3C, "V"), + (0xA3D, "X"), + (0xA3E, "V"), + (0xA43, "X"), + (0xA47, "V"), + (0xA49, "X"), + (0xA4B, "V"), + (0xA4E, "X"), + (0xA51, "V"), + (0xA52, "X"), + (0xA59, "M", "ਖ਼"), + (0xA5A, "M", "ਗ਼"), + (0xA5B, "M", "ਜ਼"), + (0xA5C, "V"), + (0xA5D, "X"), + ] + + +def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA5E, "M", "ਫ਼"), + (0xA5F, "X"), + (0xA66, "V"), + (0xA77, "X"), + (0xA81, "V"), + (0xA84, "X"), + (0xA85, "V"), + (0xA8E, "X"), + (0xA8F, "V"), + (0xA92, "X"), + (0xA93, "V"), + (0xAA9, "X"), + (0xAAA, "V"), + (0xAB1, "X"), + (0xAB2, "V"), + (0xAB4, "X"), + (0xAB5, "V"), + (0xABA, "X"), + (0xABC, "V"), + (0xAC6, "X"), + (0xAC7, "V"), + (0xACA, "X"), + (0xACB, "V"), + (0xACE, "X"), + (0xAD0, "V"), + (0xAD1, "X"), + (0xAE0, "V"), + (0xAE4, "X"), + (0xAE6, "V"), + (0xAF2, "X"), + (0xAF9, "V"), + (0xB00, "X"), + (0xB01, "V"), + (0xB04, "X"), + (0xB05, "V"), + (0xB0D, "X"), + (0xB0F, "V"), + (0xB11, "X"), + (0xB13, "V"), + (0xB29, "X"), + (0xB2A, "V"), + (0xB31, "X"), + (0xB32, "V"), + (0xB34, "X"), + (0xB35, "V"), + (0xB3A, "X"), + (0xB3C, "V"), + (0xB45, "X"), + (0xB47, "V"), + (0xB49, "X"), + (0xB4B, "V"), + (0xB4E, "X"), + (0xB55, "V"), + (0xB58, "X"), + (0xB5C, "M", "ଡ଼"), + (0xB5D, "M", "ଢ଼"), + (0xB5E, "X"), + (0xB5F, "V"), + (0xB64, "X"), + (0xB66, "V"), + (0xB78, "X"), + (0xB82, "V"), + (0xB84, "X"), + (0xB85, "V"), + (0xB8B, "X"), + (0xB8E, "V"), + (0xB91, "X"), + (0xB92, "V"), + (0xB96, "X"), + (0xB99, "V"), + (0xB9B, "X"), + (0xB9C, "V"), + (0xB9D, "X"), + (0xB9E, "V"), + (0xBA0, "X"), + (0xBA3, "V"), + (0xBA5, "X"), + (0xBA8, "V"), + (0xBAB, "X"), + (0xBAE, "V"), + (0xBBA, "X"), + (0xBBE, "V"), + (0xBC3, "X"), + (0xBC6, "V"), + (0xBC9, "X"), + (0xBCA, "V"), + (0xBCE, "X"), + (0xBD0, "V"), + (0xBD1, "X"), + (0xBD7, "V"), + (0xBD8, "X"), + (0xBE6, "V"), + (0xBFB, "X"), + (0xC00, "V"), + (0xC0D, "X"), + (0xC0E, "V"), + (0xC11, "X"), + (0xC12, "V"), + (0xC29, "X"), + (0xC2A, "V"), + ] + + +def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC3A, "X"), + (0xC3C, "V"), + (0xC45, "X"), + (0xC46, "V"), + (0xC49, "X"), + (0xC4A, "V"), + (0xC4E, "X"), + (0xC55, "V"), + (0xC57, "X"), + (0xC58, "V"), + (0xC5B, "X"), + (0xC5D, "V"), + (0xC5E, "X"), + (0xC60, "V"), + (0xC64, "X"), + (0xC66, "V"), + (0xC70, "X"), + (0xC77, "V"), + (0xC8D, "X"), + (0xC8E, "V"), + (0xC91, "X"), + (0xC92, "V"), + (0xCA9, "X"), + (0xCAA, "V"), + (0xCB4, "X"), + (0xCB5, "V"), + (0xCBA, "X"), + (0xCBC, "V"), + (0xCC5, "X"), + (0xCC6, "V"), + (0xCC9, "X"), + (0xCCA, "V"), + (0xCCE, "X"), + (0xCD5, "V"), + (0xCD7, "X"), + (0xCDD, "V"), + (0xCDF, "X"), + (0xCE0, "V"), + (0xCE4, "X"), + (0xCE6, "V"), + (0xCF0, "X"), + (0xCF1, "V"), + (0xCF4, "X"), + (0xD00, "V"), + (0xD0D, "X"), + (0xD0E, "V"), + (0xD11, "X"), + (0xD12, "V"), + (0xD45, "X"), + (0xD46, "V"), + (0xD49, "X"), + (0xD4A, "V"), + (0xD50, "X"), + (0xD54, "V"), + (0xD64, "X"), + (0xD66, "V"), + (0xD80, "X"), + (0xD81, "V"), + (0xD84, "X"), + (0xD85, "V"), + (0xD97, "X"), + (0xD9A, "V"), + (0xDB2, "X"), + (0xDB3, "V"), + (0xDBC, "X"), + (0xDBD, "V"), + (0xDBE, "X"), + (0xDC0, "V"), + (0xDC7, "X"), + (0xDCA, "V"), + (0xDCB, "X"), + (0xDCF, "V"), + (0xDD5, "X"), + (0xDD6, "V"), + (0xDD7, "X"), + (0xDD8, "V"), + (0xDE0, "X"), + (0xDE6, "V"), + (0xDF0, "X"), + (0xDF2, "V"), + (0xDF5, "X"), + (0xE01, "V"), + (0xE33, "M", "ํา"), + (0xE34, "V"), + (0xE3B, "X"), + (0xE3F, "V"), + (0xE5C, "X"), + (0xE81, "V"), + (0xE83, "X"), + (0xE84, "V"), + (0xE85, "X"), + (0xE86, "V"), + (0xE8B, "X"), + (0xE8C, "V"), + (0xEA4, "X"), + (0xEA5, "V"), + (0xEA6, "X"), + (0xEA7, "V"), + (0xEB3, "M", "ໍາ"), + (0xEB4, "V"), + ] + + +def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xEBE, "X"), + (0xEC0, "V"), + (0xEC5, "X"), + (0xEC6, "V"), + (0xEC7, "X"), + (0xEC8, "V"), + (0xECF, "X"), + (0xED0, "V"), + (0xEDA, "X"), + (0xEDC, "M", "ຫນ"), + (0xEDD, "M", "ຫມ"), + (0xEDE, "V"), + (0xEE0, "X"), + (0xF00, "V"), + (0xF0C, "M", "་"), + (0xF0D, "V"), + (0xF43, "M", "གྷ"), + (0xF44, "V"), + (0xF48, "X"), + (0xF49, "V"), + (0xF4D, "M", "ཌྷ"), + (0xF4E, "V"), + (0xF52, "M", "དྷ"), + (0xF53, "V"), + (0xF57, "M", "བྷ"), + (0xF58, "V"), + (0xF5C, "M", "ཛྷ"), + (0xF5D, "V"), + (0xF69, "M", "ཀྵ"), + (0xF6A, "V"), + (0xF6D, "X"), + (0xF71, "V"), + (0xF73, "M", "ཱི"), + (0xF74, "V"), + (0xF75, "M", "ཱུ"), + (0xF76, "M", "ྲྀ"), + (0xF77, "M", "ྲཱྀ"), + (0xF78, "M", "ླྀ"), + (0xF79, "M", "ླཱྀ"), + (0xF7A, "V"), + (0xF81, "M", "ཱྀ"), + (0xF82, "V"), + (0xF93, "M", "ྒྷ"), + (0xF94, "V"), + (0xF98, "X"), + (0xF99, "V"), + (0xF9D, "M", "ྜྷ"), + (0xF9E, "V"), + (0xFA2, "M", "ྡྷ"), + (0xFA3, "V"), + (0xFA7, "M", "ྦྷ"), + (0xFA8, "V"), + (0xFAC, "M", "ྫྷ"), + (0xFAD, "V"), + (0xFB9, "M", "ྐྵ"), + (0xFBA, "V"), + (0xFBD, "X"), + (0xFBE, "V"), + (0xFCD, "X"), + (0xFCE, "V"), + (0xFDB, "X"), + (0x1000, "V"), + (0x10A0, "M", "ⴀ"), + (0x10A1, "M", "ⴁ"), + (0x10A2, "M", "ⴂ"), + (0x10A3, "M", "ⴃ"), + (0x10A4, "M", "ⴄ"), + (0x10A5, "M", "ⴅ"), + (0x10A6, "M", "ⴆ"), + (0x10A7, "M", "ⴇ"), + (0x10A8, "M", "ⴈ"), + (0x10A9, "M", "ⴉ"), + (0x10AA, "M", "ⴊ"), + (0x10AB, "M", "ⴋ"), + (0x10AC, "M", "ⴌ"), + (0x10AD, "M", "ⴍ"), + (0x10AE, "M", "ⴎ"), + (0x10AF, "M", "ⴏ"), + (0x10B0, "M", "ⴐ"), + (0x10B1, "M", "ⴑ"), + (0x10B2, "M", "ⴒ"), + (0x10B3, "M", "ⴓ"), + (0x10B4, "M", "ⴔ"), + (0x10B5, "M", "ⴕ"), + (0x10B6, "M", "ⴖ"), + (0x10B7, "M", "ⴗ"), + (0x10B8, "M", "ⴘ"), + (0x10B9, "M", "ⴙ"), + (0x10BA, "M", "ⴚ"), + (0x10BB, "M", "ⴛ"), + (0x10BC, "M", "ⴜ"), + (0x10BD, "M", "ⴝ"), + (0x10BE, "M", "ⴞ"), + (0x10BF, "M", "ⴟ"), + (0x10C0, "M", "ⴠ"), + (0x10C1, "M", "ⴡ"), + (0x10C2, "M", "ⴢ"), + (0x10C3, "M", "ⴣ"), + (0x10C4, "M", "ⴤ"), + (0x10C5, "M", "ⴥ"), + ] + + +def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x10C6, "X"), + (0x10C7, "M", "ⴧ"), + (0x10C8, "X"), + (0x10CD, "M", "ⴭ"), + (0x10CE, "X"), + (0x10D0, "V"), + (0x10FC, "M", "ნ"), + (0x10FD, "V"), + (0x115F, "I"), + (0x1161, "V"), + (0x1249, "X"), + (0x124A, "V"), + (0x124E, "X"), + (0x1250, "V"), + (0x1257, "X"), + (0x1258, "V"), + (0x1259, "X"), + (0x125A, "V"), + (0x125E, "X"), + (0x1260, "V"), + (0x1289, "X"), + (0x128A, "V"), + (0x128E, "X"), + (0x1290, "V"), + (0x12B1, "X"), + (0x12B2, "V"), + (0x12B6, "X"), + (0x12B8, "V"), + (0x12BF, "X"), + (0x12C0, "V"), + (0x12C1, "X"), + (0x12C2, "V"), + (0x12C6, "X"), + (0x12C8, "V"), + (0x12D7, "X"), + (0x12D8, "V"), + (0x1311, "X"), + (0x1312, "V"), + (0x1316, "X"), + (0x1318, "V"), + (0x135B, "X"), + (0x135D, "V"), + (0x137D, "X"), + (0x1380, "V"), + (0x139A, "X"), + (0x13A0, "V"), + (0x13F6, "X"), + (0x13F8, "M", "Ᏸ"), + (0x13F9, "M", "Ᏹ"), + (0x13FA, "M", "Ᏺ"), + (0x13FB, "M", "Ᏻ"), + (0x13FC, "M", "Ᏼ"), + (0x13FD, "M", "Ᏽ"), + (0x13FE, "X"), + (0x1400, "V"), + (0x1680, "X"), + (0x1681, "V"), + (0x169D, "X"), + (0x16A0, "V"), + (0x16F9, "X"), + (0x1700, "V"), + (0x1716, "X"), + (0x171F, "V"), + (0x1737, "X"), + (0x1740, "V"), + (0x1754, "X"), + (0x1760, "V"), + (0x176D, "X"), + (0x176E, "V"), + (0x1771, "X"), + (0x1772, "V"), + (0x1774, "X"), + (0x1780, "V"), + (0x17B4, "I"), + (0x17B6, "V"), + (0x17DE, "X"), + (0x17E0, "V"), + (0x17EA, "X"), + (0x17F0, "V"), + (0x17FA, "X"), + (0x1800, "V"), + (0x180B, "I"), + (0x1810, "V"), + (0x181A, "X"), + (0x1820, "V"), + (0x1879, "X"), + (0x1880, "V"), + (0x18AB, "X"), + (0x18B0, "V"), + (0x18F6, "X"), + (0x1900, "V"), + (0x191F, "X"), + (0x1920, "V"), + (0x192C, "X"), + (0x1930, "V"), + (0x193C, "X"), + (0x1940, "V"), + (0x1941, "X"), + (0x1944, "V"), + (0x196E, "X"), + ] + + +def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1970, "V"), + (0x1975, "X"), + (0x1980, "V"), + (0x19AC, "X"), + (0x19B0, "V"), + (0x19CA, "X"), + (0x19D0, "V"), + (0x19DB, "X"), + (0x19DE, "V"), + (0x1A1C, "X"), + (0x1A1E, "V"), + (0x1A5F, "X"), + (0x1A60, "V"), + (0x1A7D, "X"), + (0x1A7F, "V"), + (0x1A8A, "X"), + (0x1A90, "V"), + (0x1A9A, "X"), + (0x1AA0, "V"), + (0x1AAE, "X"), + (0x1AB0, "V"), + (0x1ACF, "X"), + (0x1B00, "V"), + (0x1B4D, "X"), + (0x1B4E, "V"), + (0x1BF4, "X"), + (0x1BFC, "V"), + (0x1C38, "X"), + (0x1C3B, "V"), + (0x1C4A, "X"), + (0x1C4D, "V"), + (0x1C80, "M", "в"), + (0x1C81, "M", "д"), + (0x1C82, "M", "о"), + (0x1C83, "M", "с"), + (0x1C84, "M", "т"), + (0x1C86, "M", "ъ"), + (0x1C87, "M", "ѣ"), + (0x1C88, "M", "ꙋ"), + (0x1C89, "M", "ᲊ"), + (0x1C8A, "V"), + (0x1C8B, "X"), + (0x1C90, "M", "ა"), + (0x1C91, "M", "ბ"), + (0x1C92, "M", "გ"), + (0x1C93, "M", "დ"), + (0x1C94, "M", "ე"), + (0x1C95, "M", "ვ"), + (0x1C96, "M", "ზ"), + (0x1C97, "M", "თ"), + (0x1C98, "M", "ი"), + (0x1C99, "M", "კ"), + (0x1C9A, "M", "ლ"), + (0x1C9B, "M", "მ"), + (0x1C9C, "M", "ნ"), + (0x1C9D, "M", "ო"), + (0x1C9E, "M", "პ"), + (0x1C9F, "M", "ჟ"), + (0x1CA0, "M", "რ"), + (0x1CA1, "M", "ს"), + (0x1CA2, "M", "ტ"), + (0x1CA3, "M", "უ"), + (0x1CA4, "M", "ფ"), + (0x1CA5, "M", "ქ"), + (0x1CA6, "M", "ღ"), + (0x1CA7, "M", "ყ"), + (0x1CA8, "M", "შ"), + (0x1CA9, "M", "ჩ"), + (0x1CAA, "M", "ც"), + (0x1CAB, "M", "ძ"), + (0x1CAC, "M", "წ"), + (0x1CAD, "M", "ჭ"), + (0x1CAE, "M", "ხ"), + (0x1CAF, "M", "ჯ"), + (0x1CB0, "M", "ჰ"), + (0x1CB1, "M", "ჱ"), + (0x1CB2, "M", "ჲ"), + (0x1CB3, "M", "ჳ"), + (0x1CB4, "M", "ჴ"), + (0x1CB5, "M", "ჵ"), + (0x1CB6, "M", "ჶ"), + (0x1CB7, "M", "ჷ"), + (0x1CB8, "M", "ჸ"), + (0x1CB9, "M", "ჹ"), + (0x1CBA, "M", "ჺ"), + (0x1CBB, "X"), + (0x1CBD, "M", "ჽ"), + (0x1CBE, "M", "ჾ"), + (0x1CBF, "M", "ჿ"), + (0x1CC0, "V"), + (0x1CC8, "X"), + (0x1CD0, "V"), + (0x1CFB, "X"), + (0x1D00, "V"), + (0x1D2C, "M", "a"), + (0x1D2D, "M", "æ"), + (0x1D2E, "M", "b"), + (0x1D2F, "V"), + (0x1D30, "M", "d"), + (0x1D31, "M", "e"), + ] + + +def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D32, "M", "ǝ"), + (0x1D33, "M", "g"), + (0x1D34, "M", "h"), + (0x1D35, "M", "i"), + (0x1D36, "M", "j"), + (0x1D37, "M", "k"), + (0x1D38, "M", "l"), + (0x1D39, "M", "m"), + (0x1D3A, "M", "n"), + (0x1D3B, "V"), + (0x1D3C, "M", "o"), + (0x1D3D, "M", "ȣ"), + (0x1D3E, "M", "p"), + (0x1D3F, "M", "r"), + (0x1D40, "M", "t"), + (0x1D41, "M", "u"), + (0x1D42, "M", "w"), + (0x1D43, "M", "a"), + (0x1D44, "M", "ɐ"), + (0x1D45, "M", "ɑ"), + (0x1D46, "M", "ᴂ"), + (0x1D47, "M", "b"), + (0x1D48, "M", "d"), + (0x1D49, "M", "e"), + (0x1D4A, "M", "ə"), + (0x1D4B, "M", "ɛ"), + (0x1D4C, "M", "ɜ"), + (0x1D4D, "M", "g"), + (0x1D4E, "V"), + (0x1D4F, "M", "k"), + (0x1D50, "M", "m"), + (0x1D51, "M", "ŋ"), + (0x1D52, "M", "o"), + (0x1D53, "M", "ɔ"), + (0x1D54, "M", "ᴖ"), + (0x1D55, "M", "ᴗ"), + (0x1D56, "M", "p"), + (0x1D57, "M", "t"), + (0x1D58, "M", "u"), + (0x1D59, "M", "ᴝ"), + (0x1D5A, "M", "ɯ"), + (0x1D5B, "M", "v"), + (0x1D5C, "M", "ᴥ"), + (0x1D5D, "M", "β"), + (0x1D5E, "M", "γ"), + (0x1D5F, "M", "δ"), + (0x1D60, "M", "φ"), + (0x1D61, "M", "χ"), + (0x1D62, "M", "i"), + (0x1D63, "M", "r"), + (0x1D64, "M", "u"), + (0x1D65, "M", "v"), + (0x1D66, "M", "β"), + (0x1D67, "M", "γ"), + (0x1D68, "M", "ρ"), + (0x1D69, "M", "φ"), + (0x1D6A, "M", "χ"), + (0x1D6B, "V"), + (0x1D78, "M", "н"), + (0x1D79, "V"), + (0x1D9B, "M", "ɒ"), + (0x1D9C, "M", "c"), + (0x1D9D, "M", "ɕ"), + (0x1D9E, "M", "ð"), + (0x1D9F, "M", "ɜ"), + (0x1DA0, "M", "f"), + (0x1DA1, "M", "ɟ"), + (0x1DA2, "M", "ɡ"), + (0x1DA3, "M", "ɥ"), + (0x1DA4, "M", "ɨ"), + (0x1DA5, "M", "ɩ"), + (0x1DA6, "M", "ɪ"), + (0x1DA7, "M", "ᵻ"), + (0x1DA8, "M", "ʝ"), + (0x1DA9, "M", "ɭ"), + (0x1DAA, "M", "ᶅ"), + (0x1DAB, "M", "ʟ"), + (0x1DAC, "M", "ɱ"), + (0x1DAD, "M", "ɰ"), + (0x1DAE, "M", "ɲ"), + (0x1DAF, "M", "ɳ"), + (0x1DB0, "M", "ɴ"), + (0x1DB1, "M", "ɵ"), + (0x1DB2, "M", "ɸ"), + (0x1DB3, "M", "ʂ"), + (0x1DB4, "M", "ʃ"), + (0x1DB5, "M", "ƫ"), + (0x1DB6, "M", "ʉ"), + (0x1DB7, "M", "ʊ"), + (0x1DB8, "M", "ᴜ"), + (0x1DB9, "M", "ʋ"), + (0x1DBA, "M", "ʌ"), + (0x1DBB, "M", "z"), + (0x1DBC, "M", "ʐ"), + (0x1DBD, "M", "ʑ"), + (0x1DBE, "M", "ʒ"), + (0x1DBF, "M", "θ"), + (0x1DC0, "V"), + (0x1E00, "M", "ḁ"), + (0x1E01, "V"), + ] + + +def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E02, "M", "ḃ"), + (0x1E03, "V"), + (0x1E04, "M", "ḅ"), + (0x1E05, "V"), + (0x1E06, "M", "ḇ"), + (0x1E07, "V"), + (0x1E08, "M", "ḉ"), + (0x1E09, "V"), + (0x1E0A, "M", "ḋ"), + (0x1E0B, "V"), + (0x1E0C, "M", "ḍ"), + (0x1E0D, "V"), + (0x1E0E, "M", "ḏ"), + (0x1E0F, "V"), + (0x1E10, "M", "ḑ"), + (0x1E11, "V"), + (0x1E12, "M", "ḓ"), + (0x1E13, "V"), + (0x1E14, "M", "ḕ"), + (0x1E15, "V"), + (0x1E16, "M", "ḗ"), + (0x1E17, "V"), + (0x1E18, "M", "ḙ"), + (0x1E19, "V"), + (0x1E1A, "M", "ḛ"), + (0x1E1B, "V"), + (0x1E1C, "M", "ḝ"), + (0x1E1D, "V"), + (0x1E1E, "M", "ḟ"), + (0x1E1F, "V"), + (0x1E20, "M", "ḡ"), + (0x1E21, "V"), + (0x1E22, "M", "ḣ"), + (0x1E23, "V"), + (0x1E24, "M", "ḥ"), + (0x1E25, "V"), + (0x1E26, "M", "ḧ"), + (0x1E27, "V"), + (0x1E28, "M", "ḩ"), + (0x1E29, "V"), + (0x1E2A, "M", "ḫ"), + (0x1E2B, "V"), + (0x1E2C, "M", "ḭ"), + (0x1E2D, "V"), + (0x1E2E, "M", "ḯ"), + (0x1E2F, "V"), + (0x1E30, "M", "ḱ"), + (0x1E31, "V"), + (0x1E32, "M", "ḳ"), + (0x1E33, "V"), + (0x1E34, "M", "ḵ"), + (0x1E35, "V"), + (0x1E36, "M", "ḷ"), + (0x1E37, "V"), + (0x1E38, "M", "ḹ"), + (0x1E39, "V"), + (0x1E3A, "M", "ḻ"), + (0x1E3B, "V"), + (0x1E3C, "M", "ḽ"), + (0x1E3D, "V"), + (0x1E3E, "M", "ḿ"), + (0x1E3F, "V"), + (0x1E40, "M", "ṁ"), + (0x1E41, "V"), + (0x1E42, "M", "ṃ"), + (0x1E43, "V"), + (0x1E44, "M", "ṅ"), + (0x1E45, "V"), + (0x1E46, "M", "ṇ"), + (0x1E47, "V"), + (0x1E48, "M", "ṉ"), + (0x1E49, "V"), + (0x1E4A, "M", "ṋ"), + (0x1E4B, "V"), + (0x1E4C, "M", "ṍ"), + (0x1E4D, "V"), + (0x1E4E, "M", "ṏ"), + (0x1E4F, "V"), + (0x1E50, "M", "ṑ"), + (0x1E51, "V"), + (0x1E52, "M", "ṓ"), + (0x1E53, "V"), + (0x1E54, "M", "ṕ"), + (0x1E55, "V"), + (0x1E56, "M", "ṗ"), + (0x1E57, "V"), + (0x1E58, "M", "ṙ"), + (0x1E59, "V"), + (0x1E5A, "M", "ṛ"), + (0x1E5B, "V"), + (0x1E5C, "M", "ṝ"), + (0x1E5D, "V"), + (0x1E5E, "M", "ṟ"), + (0x1E5F, "V"), + (0x1E60, "M", "ṡ"), + (0x1E61, "V"), + (0x1E62, "M", "ṣ"), + (0x1E63, "V"), + (0x1E64, "M", "ṥ"), + (0x1E65, "V"), + ] + + +def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E66, "M", "ṧ"), + (0x1E67, "V"), + (0x1E68, "M", "ṩ"), + (0x1E69, "V"), + (0x1E6A, "M", "ṫ"), + (0x1E6B, "V"), + (0x1E6C, "M", "ṭ"), + (0x1E6D, "V"), + (0x1E6E, "M", "ṯ"), + (0x1E6F, "V"), + (0x1E70, "M", "ṱ"), + (0x1E71, "V"), + (0x1E72, "M", "ṳ"), + (0x1E73, "V"), + (0x1E74, "M", "ṵ"), + (0x1E75, "V"), + (0x1E76, "M", "ṷ"), + (0x1E77, "V"), + (0x1E78, "M", "ṹ"), + (0x1E79, "V"), + (0x1E7A, "M", "ṻ"), + (0x1E7B, "V"), + (0x1E7C, "M", "ṽ"), + (0x1E7D, "V"), + (0x1E7E, "M", "ṿ"), + (0x1E7F, "V"), + (0x1E80, "M", "ẁ"), + (0x1E81, "V"), + (0x1E82, "M", "ẃ"), + (0x1E83, "V"), + (0x1E84, "M", "ẅ"), + (0x1E85, "V"), + (0x1E86, "M", "ẇ"), + (0x1E87, "V"), + (0x1E88, "M", "ẉ"), + (0x1E89, "V"), + (0x1E8A, "M", "ẋ"), + (0x1E8B, "V"), + (0x1E8C, "M", "ẍ"), + (0x1E8D, "V"), + (0x1E8E, "M", "ẏ"), + (0x1E8F, "V"), + (0x1E90, "M", "ẑ"), + (0x1E91, "V"), + (0x1E92, "M", "ẓ"), + (0x1E93, "V"), + (0x1E94, "M", "ẕ"), + (0x1E95, "V"), + (0x1E9A, "M", "aʾ"), + (0x1E9B, "M", "ṡ"), + (0x1E9C, "V"), + (0x1E9E, "M", "ß"), + (0x1E9F, "V"), + (0x1EA0, "M", "ạ"), + (0x1EA1, "V"), + (0x1EA2, "M", "ả"), + (0x1EA3, "V"), + (0x1EA4, "M", "ấ"), + (0x1EA5, "V"), + (0x1EA6, "M", "ầ"), + (0x1EA7, "V"), + (0x1EA8, "M", "ẩ"), + (0x1EA9, "V"), + (0x1EAA, "M", "ẫ"), + (0x1EAB, "V"), + (0x1EAC, "M", "ậ"), + (0x1EAD, "V"), + (0x1EAE, "M", "ắ"), + (0x1EAF, "V"), + (0x1EB0, "M", "ằ"), + (0x1EB1, "V"), + (0x1EB2, "M", "ẳ"), + (0x1EB3, "V"), + (0x1EB4, "M", "ẵ"), + (0x1EB5, "V"), + (0x1EB6, "M", "ặ"), + (0x1EB7, "V"), + (0x1EB8, "M", "ẹ"), + (0x1EB9, "V"), + (0x1EBA, "M", "ẻ"), + (0x1EBB, "V"), + (0x1EBC, "M", "ẽ"), + (0x1EBD, "V"), + (0x1EBE, "M", "ế"), + (0x1EBF, "V"), + (0x1EC0, "M", "ề"), + (0x1EC1, "V"), + (0x1EC2, "M", "ể"), + (0x1EC3, "V"), + (0x1EC4, "M", "ễ"), + (0x1EC5, "V"), + (0x1EC6, "M", "ệ"), + (0x1EC7, "V"), + (0x1EC8, "M", "ỉ"), + (0x1EC9, "V"), + (0x1ECA, "M", "ị"), + (0x1ECB, "V"), + (0x1ECC, "M", "ọ"), + (0x1ECD, "V"), + (0x1ECE, "M", "ỏ"), + ] + + +def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1ECF, "V"), + (0x1ED0, "M", "ố"), + (0x1ED1, "V"), + (0x1ED2, "M", "ồ"), + (0x1ED3, "V"), + (0x1ED4, "M", "ổ"), + (0x1ED5, "V"), + (0x1ED6, "M", "ỗ"), + (0x1ED7, "V"), + (0x1ED8, "M", "ộ"), + (0x1ED9, "V"), + (0x1EDA, "M", "ớ"), + (0x1EDB, "V"), + (0x1EDC, "M", "ờ"), + (0x1EDD, "V"), + (0x1EDE, "M", "ở"), + (0x1EDF, "V"), + (0x1EE0, "M", "ỡ"), + (0x1EE1, "V"), + (0x1EE2, "M", "ợ"), + (0x1EE3, "V"), + (0x1EE4, "M", "ụ"), + (0x1EE5, "V"), + (0x1EE6, "M", "ủ"), + (0x1EE7, "V"), + (0x1EE8, "M", "ứ"), + (0x1EE9, "V"), + (0x1EEA, "M", "ừ"), + (0x1EEB, "V"), + (0x1EEC, "M", "ử"), + (0x1EED, "V"), + (0x1EEE, "M", "ữ"), + (0x1EEF, "V"), + (0x1EF0, "M", "ự"), + (0x1EF1, "V"), + (0x1EF2, "M", "ỳ"), + (0x1EF3, "V"), + (0x1EF4, "M", "ỵ"), + (0x1EF5, "V"), + (0x1EF6, "M", "ỷ"), + (0x1EF7, "V"), + (0x1EF8, "M", "ỹ"), + (0x1EF9, "V"), + (0x1EFA, "M", "ỻ"), + (0x1EFB, "V"), + (0x1EFC, "M", "ỽ"), + (0x1EFD, "V"), + (0x1EFE, "M", "ỿ"), + (0x1EFF, "V"), + (0x1F08, "M", "ἀ"), + (0x1F09, "M", "ἁ"), + (0x1F0A, "M", "ἂ"), + (0x1F0B, "M", "ἃ"), + (0x1F0C, "M", "ἄ"), + (0x1F0D, "M", "ἅ"), + (0x1F0E, "M", "ἆ"), + (0x1F0F, "M", "ἇ"), + (0x1F10, "V"), + (0x1F16, "X"), + (0x1F18, "M", "ἐ"), + (0x1F19, "M", "ἑ"), + (0x1F1A, "M", "ἒ"), + (0x1F1B, "M", "ἓ"), + (0x1F1C, "M", "ἔ"), + (0x1F1D, "M", "ἕ"), + (0x1F1E, "X"), + (0x1F20, "V"), + (0x1F28, "M", "ἠ"), + (0x1F29, "M", "ἡ"), + (0x1F2A, "M", "ἢ"), + (0x1F2B, "M", "ἣ"), + (0x1F2C, "M", "ἤ"), + (0x1F2D, "M", "ἥ"), + (0x1F2E, "M", "ἦ"), + (0x1F2F, "M", "ἧ"), + (0x1F30, "V"), + (0x1F38, "M", "ἰ"), + (0x1F39, "M", "ἱ"), + (0x1F3A, "M", "ἲ"), + (0x1F3B, "M", "ἳ"), + (0x1F3C, "M", "ἴ"), + (0x1F3D, "M", "ἵ"), + (0x1F3E, "M", "ἶ"), + (0x1F3F, "M", "ἷ"), + (0x1F40, "V"), + (0x1F46, "X"), + (0x1F48, "M", "ὀ"), + (0x1F49, "M", "ὁ"), + (0x1F4A, "M", "ὂ"), + (0x1F4B, "M", "ὃ"), + (0x1F4C, "M", "ὄ"), + (0x1F4D, "M", "ὅ"), + (0x1F4E, "X"), + (0x1F50, "V"), + (0x1F58, "X"), + (0x1F59, "M", "ὑ"), + (0x1F5A, "X"), + (0x1F5B, "M", "ὓ"), + (0x1F5C, "X"), + (0x1F5D, "M", "ὕ"), + ] + + +def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F5E, "X"), + (0x1F5F, "M", "ὗ"), + (0x1F60, "V"), + (0x1F68, "M", "ὠ"), + (0x1F69, "M", "ὡ"), + (0x1F6A, "M", "ὢ"), + (0x1F6B, "M", "ὣ"), + (0x1F6C, "M", "ὤ"), + (0x1F6D, "M", "ὥ"), + (0x1F6E, "M", "ὦ"), + (0x1F6F, "M", "ὧ"), + (0x1F70, "V"), + (0x1F71, "M", "ά"), + (0x1F72, "V"), + (0x1F73, "M", "έ"), + (0x1F74, "V"), + (0x1F75, "M", "ή"), + (0x1F76, "V"), + (0x1F77, "M", "ί"), + (0x1F78, "V"), + (0x1F79, "M", "ό"), + (0x1F7A, "V"), + (0x1F7B, "M", "ύ"), + (0x1F7C, "V"), + (0x1F7D, "M", "ώ"), + (0x1F7E, "X"), + (0x1F80, "M", "ἀι"), + (0x1F81, "M", "ἁι"), + (0x1F82, "M", "ἂι"), + (0x1F83, "M", "ἃι"), + (0x1F84, "M", "ἄι"), + (0x1F85, "M", "ἅι"), + (0x1F86, "M", "ἆι"), + (0x1F87, "M", "ἇι"), + (0x1F88, "M", "ἀι"), + (0x1F89, "M", "ἁι"), + (0x1F8A, "M", "ἂι"), + (0x1F8B, "M", "ἃι"), + (0x1F8C, "M", "ἄι"), + (0x1F8D, "M", "ἅι"), + (0x1F8E, "M", "ἆι"), + (0x1F8F, "M", "ἇι"), + (0x1F90, "M", "ἠι"), + (0x1F91, "M", "ἡι"), + (0x1F92, "M", "ἢι"), + (0x1F93, "M", "ἣι"), + (0x1F94, "M", "ἤι"), + (0x1F95, "M", "ἥι"), + (0x1F96, "M", "ἦι"), + (0x1F97, "M", "ἧι"), + (0x1F98, "M", "ἠι"), + (0x1F99, "M", "ἡι"), + (0x1F9A, "M", "ἢι"), + (0x1F9B, "M", "ἣι"), + (0x1F9C, "M", "ἤι"), + (0x1F9D, "M", "ἥι"), + (0x1F9E, "M", "ἦι"), + (0x1F9F, "M", "ἧι"), + (0x1FA0, "M", "ὠι"), + (0x1FA1, "M", "ὡι"), + (0x1FA2, "M", "ὢι"), + (0x1FA3, "M", "ὣι"), + (0x1FA4, "M", "ὤι"), + (0x1FA5, "M", "ὥι"), + (0x1FA6, "M", "ὦι"), + (0x1FA7, "M", "ὧι"), + (0x1FA8, "M", "ὠι"), + (0x1FA9, "M", "ὡι"), + (0x1FAA, "M", "ὢι"), + (0x1FAB, "M", "ὣι"), + (0x1FAC, "M", "ὤι"), + (0x1FAD, "M", "ὥι"), + (0x1FAE, "M", "ὦι"), + (0x1FAF, "M", "ὧι"), + (0x1FB0, "V"), + (0x1FB2, "M", "ὰι"), + (0x1FB3, "M", "αι"), + (0x1FB4, "M", "άι"), + (0x1FB5, "X"), + (0x1FB6, "V"), + (0x1FB7, "M", "ᾶι"), + (0x1FB8, "M", "ᾰ"), + (0x1FB9, "M", "ᾱ"), + (0x1FBA, "M", "ὰ"), + (0x1FBB, "M", "ά"), + (0x1FBC, "M", "αι"), + (0x1FBD, "M", " ̓"), + (0x1FBE, "M", "ι"), + (0x1FBF, "M", " ̓"), + (0x1FC0, "M", " ͂"), + (0x1FC1, "M", " ̈͂"), + (0x1FC2, "M", "ὴι"), + (0x1FC3, "M", "ηι"), + (0x1FC4, "M", "ήι"), + (0x1FC5, "X"), + (0x1FC6, "V"), + (0x1FC7, "M", "ῆι"), + (0x1FC8, "M", "ὲ"), + (0x1FC9, "M", "έ"), + (0x1FCA, "M", "ὴ"), + ] + + +def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1FCB, "M", "ή"), + (0x1FCC, "M", "ηι"), + (0x1FCD, "M", " ̓̀"), + (0x1FCE, "M", " ̓́"), + (0x1FCF, "M", " ̓͂"), + (0x1FD0, "V"), + (0x1FD3, "M", "ΐ"), + (0x1FD4, "X"), + (0x1FD6, "V"), + (0x1FD8, "M", "ῐ"), + (0x1FD9, "M", "ῑ"), + (0x1FDA, "M", "ὶ"), + (0x1FDB, "M", "ί"), + (0x1FDC, "X"), + (0x1FDD, "M", " ̔̀"), + (0x1FDE, "M", " ̔́"), + (0x1FDF, "M", " ̔͂"), + (0x1FE0, "V"), + (0x1FE3, "M", "ΰ"), + (0x1FE4, "V"), + (0x1FE8, "M", "ῠ"), + (0x1FE9, "M", "ῡ"), + (0x1FEA, "M", "ὺ"), + (0x1FEB, "M", "ύ"), + (0x1FEC, "M", "ῥ"), + (0x1FED, "M", " ̈̀"), + (0x1FEE, "M", " ̈́"), + (0x1FEF, "M", "`"), + (0x1FF0, "X"), + (0x1FF2, "M", "ὼι"), + (0x1FF3, "M", "ωι"), + (0x1FF4, "M", "ώι"), + (0x1FF5, "X"), + (0x1FF6, "V"), + (0x1FF7, "M", "ῶι"), + (0x1FF8, "M", "ὸ"), + (0x1FF9, "M", "ό"), + (0x1FFA, "M", "ὼ"), + (0x1FFB, "M", "ώ"), + (0x1FFC, "M", "ωι"), + (0x1FFD, "M", " ́"), + (0x1FFE, "M", " ̔"), + (0x1FFF, "X"), + (0x2000, "M", " "), + (0x200B, "I"), + (0x200C, "D", ""), + (0x200E, "X"), + (0x2010, "V"), + (0x2011, "M", "‐"), + (0x2012, "V"), + (0x2017, "M", " ̳"), + (0x2018, "V"), + (0x2024, "X"), + (0x2027, "V"), + (0x2028, "X"), + (0x202F, "M", " "), + (0x2030, "V"), + (0x2033, "M", "′′"), + (0x2034, "M", "′′′"), + (0x2035, "V"), + (0x2036, "M", "‵‵"), + (0x2037, "M", "‵‵‵"), + (0x2038, "V"), + (0x203C, "M", "!!"), + (0x203D, "V"), + (0x203E, "M", " ̅"), + (0x203F, "V"), + (0x2047, "M", "??"), + (0x2048, "M", "?!"), + (0x2049, "M", "!?"), + (0x204A, "V"), + (0x2057, "M", "′′′′"), + (0x2058, "V"), + (0x205F, "M", " "), + (0x2060, "I"), + (0x2065, "X"), + (0x206A, "I"), + (0x2070, "M", "0"), + (0x2071, "M", "i"), + (0x2072, "X"), + (0x2074, "M", "4"), + (0x2075, "M", "5"), + (0x2076, "M", "6"), + (0x2077, "M", "7"), + (0x2078, "M", "8"), + (0x2079, "M", "9"), + (0x207A, "M", "+"), + (0x207B, "M", "−"), + (0x207C, "M", "="), + (0x207D, "M", "("), + (0x207E, "M", ")"), + (0x207F, "M", "n"), + (0x2080, "M", "0"), + (0x2081, "M", "1"), + (0x2082, "M", "2"), + (0x2083, "M", "3"), + (0x2084, "M", "4"), + (0x2085, "M", "5"), + (0x2086, "M", "6"), + (0x2087, "M", "7"), + ] + + +def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2088, "M", "8"), + (0x2089, "M", "9"), + (0x208A, "M", "+"), + (0x208B, "M", "−"), + (0x208C, "M", "="), + (0x208D, "M", "("), + (0x208E, "M", ")"), + (0x208F, "X"), + (0x2090, "M", "a"), + (0x2091, "M", "e"), + (0x2092, "M", "o"), + (0x2093, "M", "x"), + (0x2094, "M", "ə"), + (0x2095, "M", "h"), + (0x2096, "M", "k"), + (0x2097, "M", "l"), + (0x2098, "M", "m"), + (0x2099, "M", "n"), + (0x209A, "M", "p"), + (0x209B, "M", "s"), + (0x209C, "M", "t"), + (0x209D, "X"), + (0x20A0, "V"), + (0x20A8, "M", "rs"), + (0x20A9, "V"), + (0x20C1, "X"), + (0x20D0, "V"), + (0x20F1, "X"), + (0x2100, "M", "a/c"), + (0x2101, "M", "a/s"), + (0x2102, "M", "c"), + (0x2103, "M", "°c"), + (0x2104, "V"), + (0x2105, "M", "c/o"), + (0x2106, "M", "c/u"), + (0x2107, "M", "ɛ"), + (0x2108, "V"), + (0x2109, "M", "°f"), + (0x210A, "M", "g"), + (0x210B, "M", "h"), + (0x210F, "M", "ħ"), + (0x2110, "M", "i"), + (0x2112, "M", "l"), + (0x2114, "V"), + (0x2115, "M", "n"), + (0x2116, "M", "no"), + (0x2117, "V"), + (0x2119, "M", "p"), + (0x211A, "M", "q"), + (0x211B, "M", "r"), + (0x211E, "V"), + (0x2120, "M", "sm"), + (0x2121, "M", "tel"), + (0x2122, "M", "tm"), + (0x2123, "V"), + (0x2124, "M", "z"), + (0x2125, "V"), + (0x2126, "M", "ω"), + (0x2127, "V"), + (0x2128, "M", "z"), + (0x2129, "V"), + (0x212A, "M", "k"), + (0x212B, "M", "å"), + (0x212C, "M", "b"), + (0x212D, "M", "c"), + (0x212E, "V"), + (0x212F, "M", "e"), + (0x2131, "M", "f"), + (0x2132, "M", "ⅎ"), + (0x2133, "M", "m"), + (0x2134, "M", "o"), + (0x2135, "M", "א"), + (0x2136, "M", "ב"), + (0x2137, "M", "ג"), + (0x2138, "M", "ד"), + (0x2139, "M", "i"), + (0x213A, "V"), + (0x213B, "M", "fax"), + (0x213C, "M", "π"), + (0x213D, "M", "γ"), + (0x213F, "M", "π"), + (0x2140, "M", "∑"), + (0x2141, "V"), + (0x2145, "M", "d"), + (0x2147, "M", "e"), + (0x2148, "M", "i"), + (0x2149, "M", "j"), + (0x214A, "V"), + (0x2150, "M", "1⁄7"), + (0x2151, "M", "1⁄9"), + (0x2152, "M", "1⁄10"), + (0x2153, "M", "1⁄3"), + (0x2154, "M", "2⁄3"), + (0x2155, "M", "1⁄5"), + (0x2156, "M", "2⁄5"), + (0x2157, "M", "3⁄5"), + (0x2158, "M", "4⁄5"), + (0x2159, "M", "1⁄6"), + (0x215A, "M", "5⁄6"), + (0x215B, "M", "1⁄8"), + ] + + +def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x215C, "M", "3⁄8"), + (0x215D, "M", "5⁄8"), + (0x215E, "M", "7⁄8"), + (0x215F, "M", "1⁄"), + (0x2160, "M", "i"), + (0x2161, "M", "ii"), + (0x2162, "M", "iii"), + (0x2163, "M", "iv"), + (0x2164, "M", "v"), + (0x2165, "M", "vi"), + (0x2166, "M", "vii"), + (0x2167, "M", "viii"), + (0x2168, "M", "ix"), + (0x2169, "M", "x"), + (0x216A, "M", "xi"), + (0x216B, "M", "xii"), + (0x216C, "M", "l"), + (0x216D, "M", "c"), + (0x216E, "M", "d"), + (0x216F, "M", "m"), + (0x2170, "M", "i"), + (0x2171, "M", "ii"), + (0x2172, "M", "iii"), + (0x2173, "M", "iv"), + (0x2174, "M", "v"), + (0x2175, "M", "vi"), + (0x2176, "M", "vii"), + (0x2177, "M", "viii"), + (0x2178, "M", "ix"), + (0x2179, "M", "x"), + (0x217A, "M", "xi"), + (0x217B, "M", "xii"), + (0x217C, "M", "l"), + (0x217D, "M", "c"), + (0x217E, "M", "d"), + (0x217F, "M", "m"), + (0x2180, "V"), + (0x2183, "M", "ↄ"), + (0x2184, "V"), + (0x2189, "M", "0⁄3"), + (0x218A, "V"), + (0x218C, "X"), + (0x2190, "V"), + (0x222C, "M", "∫∫"), + (0x222D, "M", "∫∫∫"), + (0x222E, "V"), + (0x222F, "M", "∮∮"), + (0x2230, "M", "∮∮∮"), + (0x2231, "V"), + (0x2329, "M", "〈"), + (0x232A, "M", "〉"), + (0x232B, "V"), + (0x242A, "X"), + (0x2440, "V"), + (0x244B, "X"), + (0x2460, "M", "1"), + (0x2461, "M", "2"), + (0x2462, "M", "3"), + (0x2463, "M", "4"), + (0x2464, "M", "5"), + (0x2465, "M", "6"), + (0x2466, "M", "7"), + (0x2467, "M", "8"), + (0x2468, "M", "9"), + (0x2469, "M", "10"), + (0x246A, "M", "11"), + (0x246B, "M", "12"), + (0x246C, "M", "13"), + (0x246D, "M", "14"), + (0x246E, "M", "15"), + (0x246F, "M", "16"), + (0x2470, "M", "17"), + (0x2471, "M", "18"), + (0x2472, "M", "19"), + (0x2473, "M", "20"), + (0x2474, "M", "(1)"), + (0x2475, "M", "(2)"), + (0x2476, "M", "(3)"), + (0x2477, "M", "(4)"), + (0x2478, "M", "(5)"), + (0x2479, "M", "(6)"), + (0x247A, "M", "(7)"), + (0x247B, "M", "(8)"), + (0x247C, "M", "(9)"), + (0x247D, "M", "(10)"), + (0x247E, "M", "(11)"), + (0x247F, "M", "(12)"), + (0x2480, "M", "(13)"), + (0x2481, "M", "(14)"), + (0x2482, "M", "(15)"), + (0x2483, "M", "(16)"), + (0x2484, "M", "(17)"), + (0x2485, "M", "(18)"), + (0x2486, "M", "(19)"), + (0x2487, "M", "(20)"), + (0x2488, "X"), + (0x249C, "M", "(a)"), + (0x249D, "M", "(b)"), + (0x249E, "M", "(c)"), + (0x249F, "M", "(d)"), + ] + + +def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x24A0, "M", "(e)"), + (0x24A1, "M", "(f)"), + (0x24A2, "M", "(g)"), + (0x24A3, "M", "(h)"), + (0x24A4, "M", "(i)"), + (0x24A5, "M", "(j)"), + (0x24A6, "M", "(k)"), + (0x24A7, "M", "(l)"), + (0x24A8, "M", "(m)"), + (0x24A9, "M", "(n)"), + (0x24AA, "M", "(o)"), + (0x24AB, "M", "(p)"), + (0x24AC, "M", "(q)"), + (0x24AD, "M", "(r)"), + (0x24AE, "M", "(s)"), + (0x24AF, "M", "(t)"), + (0x24B0, "M", "(u)"), + (0x24B1, "M", "(v)"), + (0x24B2, "M", "(w)"), + (0x24B3, "M", "(x)"), + (0x24B4, "M", "(y)"), + (0x24B5, "M", "(z)"), + (0x24B6, "M", "a"), + (0x24B7, "M", "b"), + (0x24B8, "M", "c"), + (0x24B9, "M", "d"), + (0x24BA, "M", "e"), + (0x24BB, "M", "f"), + (0x24BC, "M", "g"), + (0x24BD, "M", "h"), + (0x24BE, "M", "i"), + (0x24BF, "M", "j"), + (0x24C0, "M", "k"), + (0x24C1, "M", "l"), + (0x24C2, "M", "m"), + (0x24C3, "M", "n"), + (0x24C4, "M", "o"), + (0x24C5, "M", "p"), + (0x24C6, "M", "q"), + (0x24C7, "M", "r"), + (0x24C8, "M", "s"), + (0x24C9, "M", "t"), + (0x24CA, "M", "u"), + (0x24CB, "M", "v"), + (0x24CC, "M", "w"), + (0x24CD, "M", "x"), + (0x24CE, "M", "y"), + (0x24CF, "M", "z"), + (0x24D0, "M", "a"), + (0x24D1, "M", "b"), + (0x24D2, "M", "c"), + (0x24D3, "M", "d"), + (0x24D4, "M", "e"), + (0x24D5, "M", "f"), + (0x24D6, "M", "g"), + (0x24D7, "M", "h"), + (0x24D8, "M", "i"), + (0x24D9, "M", "j"), + (0x24DA, "M", "k"), + (0x24DB, "M", "l"), + (0x24DC, "M", "m"), + (0x24DD, "M", "n"), + (0x24DE, "M", "o"), + (0x24DF, "M", "p"), + (0x24E0, "M", "q"), + (0x24E1, "M", "r"), + (0x24E2, "M", "s"), + (0x24E3, "M", "t"), + (0x24E4, "M", "u"), + (0x24E5, "M", "v"), + (0x24E6, "M", "w"), + (0x24E7, "M", "x"), + (0x24E8, "M", "y"), + (0x24E9, "M", "z"), + (0x24EA, "M", "0"), + (0x24EB, "V"), + (0x2A0C, "M", "∫∫∫∫"), + (0x2A0D, "V"), + (0x2A74, "M", "::="), + (0x2A75, "M", "=="), + (0x2A76, "M", "==="), + (0x2A77, "V"), + (0x2ADC, "M", "⫝̸"), + (0x2ADD, "V"), + (0x2B74, "X"), + (0x2B76, "V"), + (0x2B96, "X"), + (0x2B97, "V"), + (0x2C00, "M", "ⰰ"), + (0x2C01, "M", "ⰱ"), + (0x2C02, "M", "ⰲ"), + (0x2C03, "M", "ⰳ"), + (0x2C04, "M", "ⰴ"), + (0x2C05, "M", "ⰵ"), + (0x2C06, "M", "ⰶ"), + (0x2C07, "M", "ⰷ"), + (0x2C08, "M", "ⰸ"), + (0x2C09, "M", "ⰹ"), + (0x2C0A, "M", "ⰺ"), + (0x2C0B, "M", "ⰻ"), + ] + + +def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2C0C, "M", "ⰼ"), + (0x2C0D, "M", "ⰽ"), + (0x2C0E, "M", "ⰾ"), + (0x2C0F, "M", "ⰿ"), + (0x2C10, "M", "ⱀ"), + (0x2C11, "M", "ⱁ"), + (0x2C12, "M", "ⱂ"), + (0x2C13, "M", "ⱃ"), + (0x2C14, "M", "ⱄ"), + (0x2C15, "M", "ⱅ"), + (0x2C16, "M", "ⱆ"), + (0x2C17, "M", "ⱇ"), + (0x2C18, "M", "ⱈ"), + (0x2C19, "M", "ⱉ"), + (0x2C1A, "M", "ⱊ"), + (0x2C1B, "M", "ⱋ"), + (0x2C1C, "M", "ⱌ"), + (0x2C1D, "M", "ⱍ"), + (0x2C1E, "M", "ⱎ"), + (0x2C1F, "M", "ⱏ"), + (0x2C20, "M", "ⱐ"), + (0x2C21, "M", "ⱑ"), + (0x2C22, "M", "ⱒ"), + (0x2C23, "M", "ⱓ"), + (0x2C24, "M", "ⱔ"), + (0x2C25, "M", "ⱕ"), + (0x2C26, "M", "ⱖ"), + (0x2C27, "M", "ⱗ"), + (0x2C28, "M", "ⱘ"), + (0x2C29, "M", "ⱙ"), + (0x2C2A, "M", "ⱚ"), + (0x2C2B, "M", "ⱛ"), + (0x2C2C, "M", "ⱜ"), + (0x2C2D, "M", "ⱝ"), + (0x2C2E, "M", "ⱞ"), + (0x2C2F, "M", "ⱟ"), + (0x2C30, "V"), + (0x2C60, "M", "ⱡ"), + (0x2C61, "V"), + (0x2C62, "M", "ɫ"), + (0x2C63, "M", "ᵽ"), + (0x2C64, "M", "ɽ"), + (0x2C65, "V"), + (0x2C67, "M", "ⱨ"), + (0x2C68, "V"), + (0x2C69, "M", "ⱪ"), + (0x2C6A, "V"), + (0x2C6B, "M", "ⱬ"), + (0x2C6C, "V"), + (0x2C6D, "M", "ɑ"), + (0x2C6E, "M", "ɱ"), + (0x2C6F, "M", "ɐ"), + (0x2C70, "M", "ɒ"), + (0x2C71, "V"), + (0x2C72, "M", "ⱳ"), + (0x2C73, "V"), + (0x2C75, "M", "ⱶ"), + (0x2C76, "V"), + (0x2C7C, "M", "j"), + (0x2C7D, "M", "v"), + (0x2C7E, "M", "ȿ"), + (0x2C7F, "M", "ɀ"), + (0x2C80, "M", "ⲁ"), + (0x2C81, "V"), + (0x2C82, "M", "ⲃ"), + (0x2C83, "V"), + (0x2C84, "M", "ⲅ"), + (0x2C85, "V"), + (0x2C86, "M", "ⲇ"), + (0x2C87, "V"), + (0x2C88, "M", "ⲉ"), + (0x2C89, "V"), + (0x2C8A, "M", "ⲋ"), + (0x2C8B, "V"), + (0x2C8C, "M", "ⲍ"), + (0x2C8D, "V"), + (0x2C8E, "M", "ⲏ"), + (0x2C8F, "V"), + (0x2C90, "M", "ⲑ"), + (0x2C91, "V"), + (0x2C92, "M", "ⲓ"), + (0x2C93, "V"), + (0x2C94, "M", "ⲕ"), + (0x2C95, "V"), + (0x2C96, "M", "ⲗ"), + (0x2C97, "V"), + (0x2C98, "M", "ⲙ"), + (0x2C99, "V"), + (0x2C9A, "M", "ⲛ"), + (0x2C9B, "V"), + (0x2C9C, "M", "ⲝ"), + (0x2C9D, "V"), + (0x2C9E, "M", "ⲟ"), + (0x2C9F, "V"), + (0x2CA0, "M", "ⲡ"), + (0x2CA1, "V"), + (0x2CA2, "M", "ⲣ"), + (0x2CA3, "V"), + (0x2CA4, "M", "ⲥ"), + (0x2CA5, "V"), + ] + + +def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2CA6, "M", "ⲧ"), + (0x2CA7, "V"), + (0x2CA8, "M", "ⲩ"), + (0x2CA9, "V"), + (0x2CAA, "M", "ⲫ"), + (0x2CAB, "V"), + (0x2CAC, "M", "ⲭ"), + (0x2CAD, "V"), + (0x2CAE, "M", "ⲯ"), + (0x2CAF, "V"), + (0x2CB0, "M", "ⲱ"), + (0x2CB1, "V"), + (0x2CB2, "M", "ⲳ"), + (0x2CB3, "V"), + (0x2CB4, "M", "ⲵ"), + (0x2CB5, "V"), + (0x2CB6, "M", "ⲷ"), + (0x2CB7, "V"), + (0x2CB8, "M", "ⲹ"), + (0x2CB9, "V"), + (0x2CBA, "M", "ⲻ"), + (0x2CBB, "V"), + (0x2CBC, "M", "ⲽ"), + (0x2CBD, "V"), + (0x2CBE, "M", "ⲿ"), + (0x2CBF, "V"), + (0x2CC0, "M", "ⳁ"), + (0x2CC1, "V"), + (0x2CC2, "M", "ⳃ"), + (0x2CC3, "V"), + (0x2CC4, "M", "ⳅ"), + (0x2CC5, "V"), + (0x2CC6, "M", "ⳇ"), + (0x2CC7, "V"), + (0x2CC8, "M", "ⳉ"), + (0x2CC9, "V"), + (0x2CCA, "M", "ⳋ"), + (0x2CCB, "V"), + (0x2CCC, "M", "ⳍ"), + (0x2CCD, "V"), + (0x2CCE, "M", "ⳏ"), + (0x2CCF, "V"), + (0x2CD0, "M", "ⳑ"), + (0x2CD1, "V"), + (0x2CD2, "M", "ⳓ"), + (0x2CD3, "V"), + (0x2CD4, "M", "ⳕ"), + (0x2CD5, "V"), + (0x2CD6, "M", "ⳗ"), + (0x2CD7, "V"), + (0x2CD8, "M", "ⳙ"), + (0x2CD9, "V"), + (0x2CDA, "M", "ⳛ"), + (0x2CDB, "V"), + (0x2CDC, "M", "ⳝ"), + (0x2CDD, "V"), + (0x2CDE, "M", "ⳟ"), + (0x2CDF, "V"), + (0x2CE0, "M", "ⳡ"), + (0x2CE1, "V"), + (0x2CE2, "M", "ⳣ"), + (0x2CE3, "V"), + (0x2CEB, "M", "ⳬ"), + (0x2CEC, "V"), + (0x2CED, "M", "ⳮ"), + (0x2CEE, "V"), + (0x2CF2, "M", "ⳳ"), + (0x2CF3, "V"), + (0x2CF4, "X"), + (0x2CF9, "V"), + (0x2D26, "X"), + (0x2D27, "V"), + (0x2D28, "X"), + (0x2D2D, "V"), + (0x2D2E, "X"), + (0x2D30, "V"), + (0x2D68, "X"), + (0x2D6F, "M", "ⵡ"), + (0x2D70, "V"), + (0x2D71, "X"), + (0x2D7F, "V"), + (0x2D97, "X"), + (0x2DA0, "V"), + (0x2DA7, "X"), + (0x2DA8, "V"), + (0x2DAF, "X"), + (0x2DB0, "V"), + (0x2DB7, "X"), + (0x2DB8, "V"), + (0x2DBF, "X"), + (0x2DC0, "V"), + (0x2DC7, "X"), + (0x2DC8, "V"), + (0x2DCF, "X"), + (0x2DD0, "V"), + (0x2DD7, "X"), + (0x2DD8, "V"), + (0x2DDF, "X"), + (0x2DE0, "V"), + (0x2E5E, "X"), + ] + + +def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2E80, "V"), + (0x2E9A, "X"), + (0x2E9B, "V"), + (0x2E9F, "M", "母"), + (0x2EA0, "V"), + (0x2EF3, "M", "龟"), + (0x2EF4, "X"), + (0x2F00, "M", "一"), + (0x2F01, "M", "丨"), + (0x2F02, "M", "丶"), + (0x2F03, "M", "丿"), + (0x2F04, "M", "乙"), + (0x2F05, "M", "亅"), + (0x2F06, "M", "二"), + (0x2F07, "M", "亠"), + (0x2F08, "M", "人"), + (0x2F09, "M", "儿"), + (0x2F0A, "M", "入"), + (0x2F0B, "M", "八"), + (0x2F0C, "M", "冂"), + (0x2F0D, "M", "冖"), + (0x2F0E, "M", "冫"), + (0x2F0F, "M", "几"), + (0x2F10, "M", "凵"), + (0x2F11, "M", "刀"), + (0x2F12, "M", "力"), + (0x2F13, "M", "勹"), + (0x2F14, "M", "匕"), + (0x2F15, "M", "匚"), + (0x2F16, "M", "匸"), + (0x2F17, "M", "十"), + (0x2F18, "M", "卜"), + (0x2F19, "M", "卩"), + (0x2F1A, "M", "厂"), + (0x2F1B, "M", "厶"), + (0x2F1C, "M", "又"), + (0x2F1D, "M", "口"), + (0x2F1E, "M", "囗"), + (0x2F1F, "M", "土"), + (0x2F20, "M", "士"), + (0x2F21, "M", "夂"), + (0x2F22, "M", "夊"), + (0x2F23, "M", "夕"), + (0x2F24, "M", "大"), + (0x2F25, "M", "女"), + (0x2F26, "M", "子"), + (0x2F27, "M", "宀"), + (0x2F28, "M", "寸"), + (0x2F29, "M", "小"), + (0x2F2A, "M", "尢"), + (0x2F2B, "M", "尸"), + (0x2F2C, "M", "屮"), + (0x2F2D, "M", "山"), + (0x2F2E, "M", "巛"), + (0x2F2F, "M", "工"), + (0x2F30, "M", "己"), + (0x2F31, "M", "巾"), + (0x2F32, "M", "干"), + (0x2F33, "M", "幺"), + (0x2F34, "M", "广"), + (0x2F35, "M", "廴"), + (0x2F36, "M", "廾"), + (0x2F37, "M", "弋"), + (0x2F38, "M", "弓"), + (0x2F39, "M", "彐"), + (0x2F3A, "M", "彡"), + (0x2F3B, "M", "彳"), + (0x2F3C, "M", "心"), + (0x2F3D, "M", "戈"), + (0x2F3E, "M", "戶"), + (0x2F3F, "M", "手"), + (0x2F40, "M", "支"), + (0x2F41, "M", "攴"), + (0x2F42, "M", "文"), + (0x2F43, "M", "斗"), + (0x2F44, "M", "斤"), + (0x2F45, "M", "方"), + (0x2F46, "M", "无"), + (0x2F47, "M", "日"), + (0x2F48, "M", "曰"), + (0x2F49, "M", "月"), + (0x2F4A, "M", "木"), + (0x2F4B, "M", "欠"), + (0x2F4C, "M", "止"), + (0x2F4D, "M", "歹"), + (0x2F4E, "M", "殳"), + (0x2F4F, "M", "毋"), + (0x2F50, "M", "比"), + (0x2F51, "M", "毛"), + (0x2F52, "M", "氏"), + (0x2F53, "M", "气"), + (0x2F54, "M", "水"), + (0x2F55, "M", "火"), + (0x2F56, "M", "爪"), + (0x2F57, "M", "父"), + (0x2F58, "M", "爻"), + (0x2F59, "M", "爿"), + (0x2F5A, "M", "片"), + (0x2F5B, "M", "牙"), + (0x2F5C, "M", "牛"), + ] + + +def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F5D, "M", "犬"), + (0x2F5E, "M", "玄"), + (0x2F5F, "M", "玉"), + (0x2F60, "M", "瓜"), + (0x2F61, "M", "瓦"), + (0x2F62, "M", "甘"), + (0x2F63, "M", "生"), + (0x2F64, "M", "用"), + (0x2F65, "M", "田"), + (0x2F66, "M", "疋"), + (0x2F67, "M", "疒"), + (0x2F68, "M", "癶"), + (0x2F69, "M", "白"), + (0x2F6A, "M", "皮"), + (0x2F6B, "M", "皿"), + (0x2F6C, "M", "目"), + (0x2F6D, "M", "矛"), + (0x2F6E, "M", "矢"), + (0x2F6F, "M", "石"), + (0x2F70, "M", "示"), + (0x2F71, "M", "禸"), + (0x2F72, "M", "禾"), + (0x2F73, "M", "穴"), + (0x2F74, "M", "立"), + (0x2F75, "M", "竹"), + (0x2F76, "M", "米"), + (0x2F77, "M", "糸"), + (0x2F78, "M", "缶"), + (0x2F79, "M", "网"), + (0x2F7A, "M", "羊"), + (0x2F7B, "M", "羽"), + (0x2F7C, "M", "老"), + (0x2F7D, "M", "而"), + (0x2F7E, "M", "耒"), + (0x2F7F, "M", "耳"), + (0x2F80, "M", "聿"), + (0x2F81, "M", "肉"), + (0x2F82, "M", "臣"), + (0x2F83, "M", "自"), + (0x2F84, "M", "至"), + (0x2F85, "M", "臼"), + (0x2F86, "M", "舌"), + (0x2F87, "M", "舛"), + (0x2F88, "M", "舟"), + (0x2F89, "M", "艮"), + (0x2F8A, "M", "色"), + (0x2F8B, "M", "艸"), + (0x2F8C, "M", "虍"), + (0x2F8D, "M", "虫"), + (0x2F8E, "M", "血"), + (0x2F8F, "M", "行"), + (0x2F90, "M", "衣"), + (0x2F91, "M", "襾"), + (0x2F92, "M", "見"), + (0x2F93, "M", "角"), + (0x2F94, "M", "言"), + (0x2F95, "M", "谷"), + (0x2F96, "M", "豆"), + (0x2F97, "M", "豕"), + (0x2F98, "M", "豸"), + (0x2F99, "M", "貝"), + (0x2F9A, "M", "赤"), + (0x2F9B, "M", "走"), + (0x2F9C, "M", "足"), + (0x2F9D, "M", "身"), + (0x2F9E, "M", "車"), + (0x2F9F, "M", "辛"), + (0x2FA0, "M", "辰"), + (0x2FA1, "M", "辵"), + (0x2FA2, "M", "邑"), + (0x2FA3, "M", "酉"), + (0x2FA4, "M", "釆"), + (0x2FA5, "M", "里"), + (0x2FA6, "M", "金"), + (0x2FA7, "M", "長"), + (0x2FA8, "M", "門"), + (0x2FA9, "M", "阜"), + (0x2FAA, "M", "隶"), + (0x2FAB, "M", "隹"), + (0x2FAC, "M", "雨"), + (0x2FAD, "M", "靑"), + (0x2FAE, "M", "非"), + (0x2FAF, "M", "面"), + (0x2FB0, "M", "革"), + (0x2FB1, "M", "韋"), + (0x2FB2, "M", "韭"), + (0x2FB3, "M", "音"), + (0x2FB4, "M", "頁"), + (0x2FB5, "M", "風"), + (0x2FB6, "M", "飛"), + (0x2FB7, "M", "食"), + (0x2FB8, "M", "首"), + (0x2FB9, "M", "香"), + (0x2FBA, "M", "馬"), + (0x2FBB, "M", "骨"), + (0x2FBC, "M", "高"), + (0x2FBD, "M", "髟"), + (0x2FBE, "M", "鬥"), + (0x2FBF, "M", "鬯"), + (0x2FC0, "M", "鬲"), + ] + + +def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2FC1, "M", "鬼"), + (0x2FC2, "M", "魚"), + (0x2FC3, "M", "鳥"), + (0x2FC4, "M", "鹵"), + (0x2FC5, "M", "鹿"), + (0x2FC6, "M", "麥"), + (0x2FC7, "M", "麻"), + (0x2FC8, "M", "黃"), + (0x2FC9, "M", "黍"), + (0x2FCA, "M", "黑"), + (0x2FCB, "M", "黹"), + (0x2FCC, "M", "黽"), + (0x2FCD, "M", "鼎"), + (0x2FCE, "M", "鼓"), + (0x2FCF, "M", "鼠"), + (0x2FD0, "M", "鼻"), + (0x2FD1, "M", "齊"), + (0x2FD2, "M", "齒"), + (0x2FD3, "M", "龍"), + (0x2FD4, "M", "龜"), + (0x2FD5, "M", "龠"), + (0x2FD6, "X"), + (0x3000, "M", " "), + (0x3001, "V"), + (0x3002, "M", "."), + (0x3003, "V"), + (0x3036, "M", "〒"), + (0x3037, "V"), + (0x3038, "M", "十"), + (0x3039, "M", "卄"), + (0x303A, "M", "卅"), + (0x303B, "V"), + (0x3040, "X"), + (0x3041, "V"), + (0x3097, "X"), + (0x3099, "V"), + (0x309B, "M", " ゙"), + (0x309C, "M", " ゚"), + (0x309D, "V"), + (0x309F, "M", "より"), + (0x30A0, "V"), + (0x30FF, "M", "コト"), + (0x3100, "X"), + (0x3105, "V"), + (0x3130, "X"), + (0x3131, "M", "ᄀ"), + (0x3132, "M", "ᄁ"), + (0x3133, "M", "ᆪ"), + (0x3134, "M", "ᄂ"), + (0x3135, "M", "ᆬ"), + (0x3136, "M", "ᆭ"), + (0x3137, "M", "ᄃ"), + (0x3138, "M", "ᄄ"), + (0x3139, "M", "ᄅ"), + (0x313A, "M", "ᆰ"), + (0x313B, "M", "ᆱ"), + (0x313C, "M", "ᆲ"), + (0x313D, "M", "ᆳ"), + (0x313E, "M", "ᆴ"), + (0x313F, "M", "ᆵ"), + (0x3140, "M", "ᄚ"), + (0x3141, "M", "ᄆ"), + (0x3142, "M", "ᄇ"), + (0x3143, "M", "ᄈ"), + (0x3144, "M", "ᄡ"), + (0x3145, "M", "ᄉ"), + (0x3146, "M", "ᄊ"), + (0x3147, "M", "ᄋ"), + (0x3148, "M", "ᄌ"), + (0x3149, "M", "ᄍ"), + (0x314A, "M", "ᄎ"), + (0x314B, "M", "ᄏ"), + (0x314C, "M", "ᄐ"), + (0x314D, "M", "ᄑ"), + (0x314E, "M", "ᄒ"), + (0x314F, "M", "ᅡ"), + (0x3150, "M", "ᅢ"), + (0x3151, "M", "ᅣ"), + (0x3152, "M", "ᅤ"), + (0x3153, "M", "ᅥ"), + (0x3154, "M", "ᅦ"), + (0x3155, "M", "ᅧ"), + (0x3156, "M", "ᅨ"), + (0x3157, "M", "ᅩ"), + (0x3158, "M", "ᅪ"), + (0x3159, "M", "ᅫ"), + (0x315A, "M", "ᅬ"), + (0x315B, "M", "ᅭ"), + (0x315C, "M", "ᅮ"), + (0x315D, "M", "ᅯ"), + (0x315E, "M", "ᅰ"), + (0x315F, "M", "ᅱ"), + (0x3160, "M", "ᅲ"), + (0x3161, "M", "ᅳ"), + (0x3162, "M", "ᅴ"), + (0x3163, "M", "ᅵ"), + (0x3164, "I"), + (0x3165, "M", "ᄔ"), + (0x3166, "M", "ᄕ"), + (0x3167, "M", "ᇇ"), + ] + + +def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3168, "M", "ᇈ"), + (0x3169, "M", "ᇌ"), + (0x316A, "M", "ᇎ"), + (0x316B, "M", "ᇓ"), + (0x316C, "M", "ᇗ"), + (0x316D, "M", "ᇙ"), + (0x316E, "M", "ᄜ"), + (0x316F, "M", "ᇝ"), + (0x3170, "M", "ᇟ"), + (0x3171, "M", "ᄝ"), + (0x3172, "M", "ᄞ"), + (0x3173, "M", "ᄠ"), + (0x3174, "M", "ᄢ"), + (0x3175, "M", "ᄣ"), + (0x3176, "M", "ᄧ"), + (0x3177, "M", "ᄩ"), + (0x3178, "M", "ᄫ"), + (0x3179, "M", "ᄬ"), + (0x317A, "M", "ᄭ"), + (0x317B, "M", "ᄮ"), + (0x317C, "M", "ᄯ"), + (0x317D, "M", "ᄲ"), + (0x317E, "M", "ᄶ"), + (0x317F, "M", "ᅀ"), + (0x3180, "M", "ᅇ"), + (0x3181, "M", "ᅌ"), + (0x3182, "M", "ᇱ"), + (0x3183, "M", "ᇲ"), + (0x3184, "M", "ᅗ"), + (0x3185, "M", "ᅘ"), + (0x3186, "M", "ᅙ"), + (0x3187, "M", "ᆄ"), + (0x3188, "M", "ᆅ"), + (0x3189, "M", "ᆈ"), + (0x318A, "M", "ᆑ"), + (0x318B, "M", "ᆒ"), + (0x318C, "M", "ᆔ"), + (0x318D, "M", "ᆞ"), + (0x318E, "M", "ᆡ"), + (0x318F, "X"), + (0x3190, "V"), + (0x3192, "M", "一"), + (0x3193, "M", "二"), + (0x3194, "M", "三"), + (0x3195, "M", "四"), + (0x3196, "M", "上"), + (0x3197, "M", "中"), + (0x3198, "M", "下"), + (0x3199, "M", "甲"), + (0x319A, "M", "乙"), + (0x319B, "M", "丙"), + (0x319C, "M", "丁"), + (0x319D, "M", "天"), + (0x319E, "M", "地"), + (0x319F, "M", "人"), + (0x31A0, "V"), + (0x31E6, "X"), + (0x31F0, "V"), + (0x3200, "M", "(ᄀ)"), + (0x3201, "M", "(ᄂ)"), + (0x3202, "M", "(ᄃ)"), + (0x3203, "M", "(ᄅ)"), + (0x3204, "M", "(ᄆ)"), + (0x3205, "M", "(ᄇ)"), + (0x3206, "M", "(ᄉ)"), + (0x3207, "M", "(ᄋ)"), + (0x3208, "M", "(ᄌ)"), + (0x3209, "M", "(ᄎ)"), + (0x320A, "M", "(ᄏ)"), + (0x320B, "M", "(ᄐ)"), + (0x320C, "M", "(ᄑ)"), + (0x320D, "M", "(ᄒ)"), + (0x320E, "M", "(가)"), + (0x320F, "M", "(나)"), + (0x3210, "M", "(다)"), + (0x3211, "M", "(라)"), + (0x3212, "M", "(마)"), + (0x3213, "M", "(바)"), + (0x3214, "M", "(사)"), + (0x3215, "M", "(아)"), + (0x3216, "M", "(자)"), + (0x3217, "M", "(차)"), + (0x3218, "M", "(카)"), + (0x3219, "M", "(타)"), + (0x321A, "M", "(파)"), + (0x321B, "M", "(하)"), + (0x321C, "M", "(주)"), + (0x321D, "M", "(오전)"), + (0x321E, "M", "(오후)"), + (0x321F, "X"), + (0x3220, "M", "(一)"), + (0x3221, "M", "(二)"), + (0x3222, "M", "(三)"), + (0x3223, "M", "(四)"), + (0x3224, "M", "(五)"), + (0x3225, "M", "(六)"), + (0x3226, "M", "(七)"), + (0x3227, "M", "(八)"), + (0x3228, "M", "(九)"), + (0x3229, "M", "(十)"), + ] + + +def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x322A, "M", "(月)"), + (0x322B, "M", "(火)"), + (0x322C, "M", "(水)"), + (0x322D, "M", "(木)"), + (0x322E, "M", "(金)"), + (0x322F, "M", "(土)"), + (0x3230, "M", "(日)"), + (0x3231, "M", "(株)"), + (0x3232, "M", "(有)"), + (0x3233, "M", "(社)"), + (0x3234, "M", "(名)"), + (0x3235, "M", "(特)"), + (0x3236, "M", "(財)"), + (0x3237, "M", "(祝)"), + (0x3238, "M", "(労)"), + (0x3239, "M", "(代)"), + (0x323A, "M", "(呼)"), + (0x323B, "M", "(学)"), + (0x323C, "M", "(監)"), + (0x323D, "M", "(企)"), + (0x323E, "M", "(資)"), + (0x323F, "M", "(協)"), + (0x3240, "M", "(祭)"), + (0x3241, "M", "(休)"), + (0x3242, "M", "(自)"), + (0x3243, "M", "(至)"), + (0x3244, "M", "問"), + (0x3245, "M", "幼"), + (0x3246, "M", "文"), + (0x3247, "M", "箏"), + (0x3248, "V"), + (0x3250, "M", "pte"), + (0x3251, "M", "21"), + (0x3252, "M", "22"), + (0x3253, "M", "23"), + (0x3254, "M", "24"), + (0x3255, "M", "25"), + (0x3256, "M", "26"), + (0x3257, "M", "27"), + (0x3258, "M", "28"), + (0x3259, "M", "29"), + (0x325A, "M", "30"), + (0x325B, "M", "31"), + (0x325C, "M", "32"), + (0x325D, "M", "33"), + (0x325E, "M", "34"), + (0x325F, "M", "35"), + (0x3260, "M", "ᄀ"), + (0x3261, "M", "ᄂ"), + (0x3262, "M", "ᄃ"), + (0x3263, "M", "ᄅ"), + (0x3264, "M", "ᄆ"), + (0x3265, "M", "ᄇ"), + (0x3266, "M", "ᄉ"), + (0x3267, "M", "ᄋ"), + (0x3268, "M", "ᄌ"), + (0x3269, "M", "ᄎ"), + (0x326A, "M", "ᄏ"), + (0x326B, "M", "ᄐ"), + (0x326C, "M", "ᄑ"), + (0x326D, "M", "ᄒ"), + (0x326E, "M", "가"), + (0x326F, "M", "나"), + (0x3270, "M", "다"), + (0x3271, "M", "라"), + (0x3272, "M", "마"), + (0x3273, "M", "바"), + (0x3274, "M", "사"), + (0x3275, "M", "아"), + (0x3276, "M", "자"), + (0x3277, "M", "차"), + (0x3278, "M", "카"), + (0x3279, "M", "타"), + (0x327A, "M", "파"), + (0x327B, "M", "하"), + (0x327C, "M", "참고"), + (0x327D, "M", "주의"), + (0x327E, "M", "우"), + (0x327F, "V"), + (0x3280, "M", "一"), + (0x3281, "M", "二"), + (0x3282, "M", "三"), + (0x3283, "M", "四"), + (0x3284, "M", "五"), + (0x3285, "M", "六"), + (0x3286, "M", "七"), + (0x3287, "M", "八"), + (0x3288, "M", "九"), + (0x3289, "M", "十"), + (0x328A, "M", "月"), + (0x328B, "M", "火"), + (0x328C, "M", "水"), + (0x328D, "M", "木"), + (0x328E, "M", "金"), + (0x328F, "M", "土"), + (0x3290, "M", "日"), + (0x3291, "M", "株"), + (0x3292, "M", "有"), + (0x3293, "M", "社"), + (0x3294, "M", "名"), + ] + + +def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3295, "M", "特"), + (0x3296, "M", "財"), + (0x3297, "M", "祝"), + (0x3298, "M", "労"), + (0x3299, "M", "秘"), + (0x329A, "M", "男"), + (0x329B, "M", "女"), + (0x329C, "M", "適"), + (0x329D, "M", "優"), + (0x329E, "M", "印"), + (0x329F, "M", "注"), + (0x32A0, "M", "項"), + (0x32A1, "M", "休"), + (0x32A2, "M", "写"), + (0x32A3, "M", "正"), + (0x32A4, "M", "上"), + (0x32A5, "M", "中"), + (0x32A6, "M", "下"), + (0x32A7, "M", "左"), + (0x32A8, "M", "右"), + (0x32A9, "M", "医"), + (0x32AA, "M", "宗"), + (0x32AB, "M", "学"), + (0x32AC, "M", "監"), + (0x32AD, "M", "企"), + (0x32AE, "M", "資"), + (0x32AF, "M", "協"), + (0x32B0, "M", "夜"), + (0x32B1, "M", "36"), + (0x32B2, "M", "37"), + (0x32B3, "M", "38"), + (0x32B4, "M", "39"), + (0x32B5, "M", "40"), + (0x32B6, "M", "41"), + (0x32B7, "M", "42"), + (0x32B8, "M", "43"), + (0x32B9, "M", "44"), + (0x32BA, "M", "45"), + (0x32BB, "M", "46"), + (0x32BC, "M", "47"), + (0x32BD, "M", "48"), + (0x32BE, "M", "49"), + (0x32BF, "M", "50"), + (0x32C0, "M", "1月"), + (0x32C1, "M", "2月"), + (0x32C2, "M", "3月"), + (0x32C3, "M", "4月"), + (0x32C4, "M", "5月"), + (0x32C5, "M", "6月"), + (0x32C6, "M", "7月"), + (0x32C7, "M", "8月"), + (0x32C8, "M", "9月"), + (0x32C9, "M", "10月"), + (0x32CA, "M", "11月"), + (0x32CB, "M", "12月"), + (0x32CC, "M", "hg"), + (0x32CD, "M", "erg"), + (0x32CE, "M", "ev"), + (0x32CF, "M", "ltd"), + (0x32D0, "M", "ア"), + (0x32D1, "M", "イ"), + (0x32D2, "M", "ウ"), + (0x32D3, "M", "エ"), + (0x32D4, "M", "オ"), + (0x32D5, "M", "カ"), + (0x32D6, "M", "キ"), + (0x32D7, "M", "ク"), + (0x32D8, "M", "ケ"), + (0x32D9, "M", "コ"), + (0x32DA, "M", "サ"), + (0x32DB, "M", "シ"), + (0x32DC, "M", "ス"), + (0x32DD, "M", "セ"), + (0x32DE, "M", "ソ"), + (0x32DF, "M", "タ"), + (0x32E0, "M", "チ"), + (0x32E1, "M", "ツ"), + (0x32E2, "M", "テ"), + (0x32E3, "M", "ト"), + (0x32E4, "M", "ナ"), + (0x32E5, "M", "ニ"), + (0x32E6, "M", "ヌ"), + (0x32E7, "M", "ネ"), + (0x32E8, "M", "ノ"), + (0x32E9, "M", "ハ"), + (0x32EA, "M", "ヒ"), + (0x32EB, "M", "フ"), + (0x32EC, "M", "ヘ"), + (0x32ED, "M", "ホ"), + (0x32EE, "M", "マ"), + (0x32EF, "M", "ミ"), + (0x32F0, "M", "ム"), + (0x32F1, "M", "メ"), + (0x32F2, "M", "モ"), + (0x32F3, "M", "ヤ"), + (0x32F4, "M", "ユ"), + (0x32F5, "M", "ヨ"), + (0x32F6, "M", "ラ"), + (0x32F7, "M", "リ"), + (0x32F8, "M", "ル"), + ] + + +def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x32F9, "M", "レ"), + (0x32FA, "M", "ロ"), + (0x32FB, "M", "ワ"), + (0x32FC, "M", "ヰ"), + (0x32FD, "M", "ヱ"), + (0x32FE, "M", "ヲ"), + (0x32FF, "M", "令和"), + (0x3300, "M", "アパート"), + (0x3301, "M", "アルファ"), + (0x3302, "M", "アンペア"), + (0x3303, "M", "アール"), + (0x3304, "M", "イニング"), + (0x3305, "M", "インチ"), + (0x3306, "M", "ウォン"), + (0x3307, "M", "エスクード"), + (0x3308, "M", "エーカー"), + (0x3309, "M", "オンス"), + (0x330A, "M", "オーム"), + (0x330B, "M", "カイリ"), + (0x330C, "M", "カラット"), + (0x330D, "M", "カロリー"), + (0x330E, "M", "ガロン"), + (0x330F, "M", "ガンマ"), + (0x3310, "M", "ギガ"), + (0x3311, "M", "ギニー"), + (0x3312, "M", "キュリー"), + (0x3313, "M", "ギルダー"), + (0x3314, "M", "キロ"), + (0x3315, "M", "キログラム"), + (0x3316, "M", "キロメートル"), + (0x3317, "M", "キロワット"), + (0x3318, "M", "グラム"), + (0x3319, "M", "グラムトン"), + (0x331A, "M", "クルゼイロ"), + (0x331B, "M", "クローネ"), + (0x331C, "M", "ケース"), + (0x331D, "M", "コルナ"), + (0x331E, "M", "コーポ"), + (0x331F, "M", "サイクル"), + (0x3320, "M", "サンチーム"), + (0x3321, "M", "シリング"), + (0x3322, "M", "センチ"), + (0x3323, "M", "セント"), + (0x3324, "M", "ダース"), + (0x3325, "M", "デシ"), + (0x3326, "M", "ドル"), + (0x3327, "M", "トン"), + (0x3328, "M", "ナノ"), + (0x3329, "M", "ノット"), + (0x332A, "M", "ハイツ"), + (0x332B, "M", "パーセント"), + (0x332C, "M", "パーツ"), + (0x332D, "M", "バーレル"), + (0x332E, "M", "ピアストル"), + (0x332F, "M", "ピクル"), + (0x3330, "M", "ピコ"), + (0x3331, "M", "ビル"), + (0x3332, "M", "ファラッド"), + (0x3333, "M", "フィート"), + (0x3334, "M", "ブッシェル"), + (0x3335, "M", "フラン"), + (0x3336, "M", "ヘクタール"), + (0x3337, "M", "ペソ"), + (0x3338, "M", "ペニヒ"), + (0x3339, "M", "ヘルツ"), + (0x333A, "M", "ペンス"), + (0x333B, "M", "ページ"), + (0x333C, "M", "ベータ"), + (0x333D, "M", "ポイント"), + (0x333E, "M", "ボルト"), + (0x333F, "M", "ホン"), + (0x3340, "M", "ポンド"), + (0x3341, "M", "ホール"), + (0x3342, "M", "ホーン"), + (0x3343, "M", "マイクロ"), + (0x3344, "M", "マイル"), + (0x3345, "M", "マッハ"), + (0x3346, "M", "マルク"), + (0x3347, "M", "マンション"), + (0x3348, "M", "ミクロン"), + (0x3349, "M", "ミリ"), + (0x334A, "M", "ミリバール"), + (0x334B, "M", "メガ"), + (0x334C, "M", "メガトン"), + (0x334D, "M", "メートル"), + (0x334E, "M", "ヤード"), + (0x334F, "M", "ヤール"), + (0x3350, "M", "ユアン"), + (0x3351, "M", "リットル"), + (0x3352, "M", "リラ"), + (0x3353, "M", "ルピー"), + (0x3354, "M", "ルーブル"), + (0x3355, "M", "レム"), + (0x3356, "M", "レントゲン"), + (0x3357, "M", "ワット"), + (0x3358, "M", "0点"), + (0x3359, "M", "1点"), + (0x335A, "M", "2点"), + (0x335B, "M", "3点"), + (0x335C, "M", "4点"), + ] + + +def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x335D, "M", "5点"), + (0x335E, "M", "6点"), + (0x335F, "M", "7点"), + (0x3360, "M", "8点"), + (0x3361, "M", "9点"), + (0x3362, "M", "10点"), + (0x3363, "M", "11点"), + (0x3364, "M", "12点"), + (0x3365, "M", "13点"), + (0x3366, "M", "14点"), + (0x3367, "M", "15点"), + (0x3368, "M", "16点"), + (0x3369, "M", "17点"), + (0x336A, "M", "18点"), + (0x336B, "M", "19点"), + (0x336C, "M", "20点"), + (0x336D, "M", "21点"), + (0x336E, "M", "22点"), + (0x336F, "M", "23点"), + (0x3370, "M", "24点"), + (0x3371, "M", "hpa"), + (0x3372, "M", "da"), + (0x3373, "M", "au"), + (0x3374, "M", "bar"), + (0x3375, "M", "ov"), + (0x3376, "M", "pc"), + (0x3377, "M", "dm"), + (0x3378, "M", "dm2"), + (0x3379, "M", "dm3"), + (0x337A, "M", "iu"), + (0x337B, "M", "平成"), + (0x337C, "M", "昭和"), + (0x337D, "M", "大正"), + (0x337E, "M", "明治"), + (0x337F, "M", "株式会社"), + (0x3380, "M", "pa"), + (0x3381, "M", "na"), + (0x3382, "M", "μa"), + (0x3383, "M", "ma"), + (0x3384, "M", "ka"), + (0x3385, "M", "kb"), + (0x3386, "M", "mb"), + (0x3387, "M", "gb"), + (0x3388, "M", "cal"), + (0x3389, "M", "kcal"), + (0x338A, "M", "pf"), + (0x338B, "M", "nf"), + (0x338C, "M", "μf"), + (0x338D, "M", "μg"), + (0x338E, "M", "mg"), + (0x338F, "M", "kg"), + (0x3390, "M", "hz"), + (0x3391, "M", "khz"), + (0x3392, "M", "mhz"), + (0x3393, "M", "ghz"), + (0x3394, "M", "thz"), + (0x3395, "M", "μl"), + (0x3396, "M", "ml"), + (0x3397, "M", "dl"), + (0x3398, "M", "kl"), + (0x3399, "M", "fm"), + (0x339A, "M", "nm"), + (0x339B, "M", "μm"), + (0x339C, "M", "mm"), + (0x339D, "M", "cm"), + (0x339E, "M", "km"), + (0x339F, "M", "mm2"), + (0x33A0, "M", "cm2"), + (0x33A1, "M", "m2"), + (0x33A2, "M", "km2"), + (0x33A3, "M", "mm3"), + (0x33A4, "M", "cm3"), + (0x33A5, "M", "m3"), + (0x33A6, "M", "km3"), + (0x33A7, "M", "m∕s"), + (0x33A8, "M", "m∕s2"), + (0x33A9, "M", "pa"), + (0x33AA, "M", "kpa"), + (0x33AB, "M", "mpa"), + (0x33AC, "M", "gpa"), + (0x33AD, "M", "rad"), + (0x33AE, "M", "rad∕s"), + (0x33AF, "M", "rad∕s2"), + (0x33B0, "M", "ps"), + (0x33B1, "M", "ns"), + (0x33B2, "M", "μs"), + (0x33B3, "M", "ms"), + (0x33B4, "M", "pv"), + (0x33B5, "M", "nv"), + (0x33B6, "M", "μv"), + (0x33B7, "M", "mv"), + (0x33B8, "M", "kv"), + (0x33B9, "M", "mv"), + (0x33BA, "M", "pw"), + (0x33BB, "M", "nw"), + (0x33BC, "M", "μw"), + (0x33BD, "M", "mw"), + (0x33BE, "M", "kw"), + (0x33BF, "M", "mw"), + (0x33C0, "M", "kω"), + ] + + +def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x33C1, "M", "mω"), + (0x33C2, "X"), + (0x33C3, "M", "bq"), + (0x33C4, "M", "cc"), + (0x33C5, "M", "cd"), + (0x33C6, "M", "c∕kg"), + (0x33C7, "X"), + (0x33C8, "M", "db"), + (0x33C9, "M", "gy"), + (0x33CA, "M", "ha"), + (0x33CB, "M", "hp"), + (0x33CC, "M", "in"), + (0x33CD, "M", "kk"), + (0x33CE, "M", "km"), + (0x33CF, "M", "kt"), + (0x33D0, "M", "lm"), + (0x33D1, "M", "ln"), + (0x33D2, "M", "log"), + (0x33D3, "M", "lx"), + (0x33D4, "M", "mb"), + (0x33D5, "M", "mil"), + (0x33D6, "M", "mol"), + (0x33D7, "M", "ph"), + (0x33D8, "X"), + (0x33D9, "M", "ppm"), + (0x33DA, "M", "pr"), + (0x33DB, "M", "sr"), + (0x33DC, "M", "sv"), + (0x33DD, "M", "wb"), + (0x33DE, "M", "v∕m"), + (0x33DF, "M", "a∕m"), + (0x33E0, "M", "1日"), + (0x33E1, "M", "2日"), + (0x33E2, "M", "3日"), + (0x33E3, "M", "4日"), + (0x33E4, "M", "5日"), + (0x33E5, "M", "6日"), + (0x33E6, "M", "7日"), + (0x33E7, "M", "8日"), + (0x33E8, "M", "9日"), + (0x33E9, "M", "10日"), + (0x33EA, "M", "11日"), + (0x33EB, "M", "12日"), + (0x33EC, "M", "13日"), + (0x33ED, "M", "14日"), + (0x33EE, "M", "15日"), + (0x33EF, "M", "16日"), + (0x33F0, "M", "17日"), + (0x33F1, "M", "18日"), + (0x33F2, "M", "19日"), + (0x33F3, "M", "20日"), + (0x33F4, "M", "21日"), + (0x33F5, "M", "22日"), + (0x33F6, "M", "23日"), + (0x33F7, "M", "24日"), + (0x33F8, "M", "25日"), + (0x33F9, "M", "26日"), + (0x33FA, "M", "27日"), + (0x33FB, "M", "28日"), + (0x33FC, "M", "29日"), + (0x33FD, "M", "30日"), + (0x33FE, "M", "31日"), + (0x33FF, "M", "gal"), + (0x3400, "V"), + (0xA48D, "X"), + (0xA490, "V"), + (0xA4C7, "X"), + (0xA4D0, "V"), + (0xA62C, "X"), + (0xA640, "M", "ꙁ"), + (0xA641, "V"), + (0xA642, "M", "ꙃ"), + (0xA643, "V"), + (0xA644, "M", "ꙅ"), + (0xA645, "V"), + (0xA646, "M", "ꙇ"), + (0xA647, "V"), + (0xA648, "M", "ꙉ"), + (0xA649, "V"), + (0xA64A, "M", "ꙋ"), + (0xA64B, "V"), + (0xA64C, "M", "ꙍ"), + (0xA64D, "V"), + (0xA64E, "M", "ꙏ"), + (0xA64F, "V"), + (0xA650, "M", "ꙑ"), + (0xA651, "V"), + (0xA652, "M", "ꙓ"), + (0xA653, "V"), + (0xA654, "M", "ꙕ"), + (0xA655, "V"), + (0xA656, "M", "ꙗ"), + (0xA657, "V"), + (0xA658, "M", "ꙙ"), + (0xA659, "V"), + (0xA65A, "M", "ꙛ"), + (0xA65B, "V"), + (0xA65C, "M", "ꙝ"), + (0xA65D, "V"), + (0xA65E, "M", "ꙟ"), + ] + + +def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA65F, "V"), + (0xA660, "M", "ꙡ"), + (0xA661, "V"), + (0xA662, "M", "ꙣ"), + (0xA663, "V"), + (0xA664, "M", "ꙥ"), + (0xA665, "V"), + (0xA666, "M", "ꙧ"), + (0xA667, "V"), + (0xA668, "M", "ꙩ"), + (0xA669, "V"), + (0xA66A, "M", "ꙫ"), + (0xA66B, "V"), + (0xA66C, "M", "ꙭ"), + (0xA66D, "V"), + (0xA680, "M", "ꚁ"), + (0xA681, "V"), + (0xA682, "M", "ꚃ"), + (0xA683, "V"), + (0xA684, "M", "ꚅ"), + (0xA685, "V"), + (0xA686, "M", "ꚇ"), + (0xA687, "V"), + (0xA688, "M", "ꚉ"), + (0xA689, "V"), + (0xA68A, "M", "ꚋ"), + (0xA68B, "V"), + (0xA68C, "M", "ꚍ"), + (0xA68D, "V"), + (0xA68E, "M", "ꚏ"), + (0xA68F, "V"), + (0xA690, "M", "ꚑ"), + (0xA691, "V"), + (0xA692, "M", "ꚓ"), + (0xA693, "V"), + (0xA694, "M", "ꚕ"), + (0xA695, "V"), + (0xA696, "M", "ꚗ"), + (0xA697, "V"), + (0xA698, "M", "ꚙ"), + (0xA699, "V"), + (0xA69A, "M", "ꚛ"), + (0xA69B, "V"), + (0xA69C, "M", "ъ"), + (0xA69D, "M", "ь"), + (0xA69E, "V"), + (0xA6F8, "X"), + (0xA700, "V"), + (0xA722, "M", "ꜣ"), + (0xA723, "V"), + (0xA724, "M", "ꜥ"), + (0xA725, "V"), + (0xA726, "M", "ꜧ"), + (0xA727, "V"), + (0xA728, "M", "ꜩ"), + (0xA729, "V"), + (0xA72A, "M", "ꜫ"), + (0xA72B, "V"), + (0xA72C, "M", "ꜭ"), + (0xA72D, "V"), + (0xA72E, "M", "ꜯ"), + (0xA72F, "V"), + (0xA732, "M", "ꜳ"), + (0xA733, "V"), + (0xA734, "M", "ꜵ"), + (0xA735, "V"), + (0xA736, "M", "ꜷ"), + (0xA737, "V"), + (0xA738, "M", "ꜹ"), + (0xA739, "V"), + (0xA73A, "M", "ꜻ"), + (0xA73B, "V"), + (0xA73C, "M", "ꜽ"), + (0xA73D, "V"), + (0xA73E, "M", "ꜿ"), + (0xA73F, "V"), + (0xA740, "M", "ꝁ"), + (0xA741, "V"), + (0xA742, "M", "ꝃ"), + (0xA743, "V"), + (0xA744, "M", "ꝅ"), + (0xA745, "V"), + (0xA746, "M", "ꝇ"), + (0xA747, "V"), + (0xA748, "M", "ꝉ"), + (0xA749, "V"), + (0xA74A, "M", "ꝋ"), + (0xA74B, "V"), + (0xA74C, "M", "ꝍ"), + (0xA74D, "V"), + (0xA74E, "M", "ꝏ"), + (0xA74F, "V"), + (0xA750, "M", "ꝑ"), + (0xA751, "V"), + (0xA752, "M", "ꝓ"), + (0xA753, "V"), + (0xA754, "M", "ꝕ"), + (0xA755, "V"), + (0xA756, "M", "ꝗ"), + (0xA757, "V"), + ] + + +def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA758, "M", "ꝙ"), + (0xA759, "V"), + (0xA75A, "M", "ꝛ"), + (0xA75B, "V"), + (0xA75C, "M", "ꝝ"), + (0xA75D, "V"), + (0xA75E, "M", "ꝟ"), + (0xA75F, "V"), + (0xA760, "M", "ꝡ"), + (0xA761, "V"), + (0xA762, "M", "ꝣ"), + (0xA763, "V"), + (0xA764, "M", "ꝥ"), + (0xA765, "V"), + (0xA766, "M", "ꝧ"), + (0xA767, "V"), + (0xA768, "M", "ꝩ"), + (0xA769, "V"), + (0xA76A, "M", "ꝫ"), + (0xA76B, "V"), + (0xA76C, "M", "ꝭ"), + (0xA76D, "V"), + (0xA76E, "M", "ꝯ"), + (0xA76F, "V"), + (0xA770, "M", "ꝯ"), + (0xA771, "V"), + (0xA779, "M", "ꝺ"), + (0xA77A, "V"), + (0xA77B, "M", "ꝼ"), + (0xA77C, "V"), + (0xA77D, "M", "ᵹ"), + (0xA77E, "M", "ꝿ"), + (0xA77F, "V"), + (0xA780, "M", "ꞁ"), + (0xA781, "V"), + (0xA782, "M", "ꞃ"), + (0xA783, "V"), + (0xA784, "M", "ꞅ"), + (0xA785, "V"), + (0xA786, "M", "ꞇ"), + (0xA787, "V"), + (0xA78B, "M", "ꞌ"), + (0xA78C, "V"), + (0xA78D, "M", "ɥ"), + (0xA78E, "V"), + (0xA790, "M", "ꞑ"), + (0xA791, "V"), + (0xA792, "M", "ꞓ"), + (0xA793, "V"), + (0xA796, "M", "ꞗ"), + (0xA797, "V"), + (0xA798, "M", "ꞙ"), + (0xA799, "V"), + (0xA79A, "M", "ꞛ"), + (0xA79B, "V"), + (0xA79C, "M", "ꞝ"), + (0xA79D, "V"), + (0xA79E, "M", "ꞟ"), + (0xA79F, "V"), + (0xA7A0, "M", "ꞡ"), + (0xA7A1, "V"), + (0xA7A2, "M", "ꞣ"), + (0xA7A3, "V"), + (0xA7A4, "M", "ꞥ"), + (0xA7A5, "V"), + (0xA7A6, "M", "ꞧ"), + (0xA7A7, "V"), + (0xA7A8, "M", "ꞩ"), + (0xA7A9, "V"), + (0xA7AA, "M", "ɦ"), + (0xA7AB, "M", "ɜ"), + (0xA7AC, "M", "ɡ"), + (0xA7AD, "M", "ɬ"), + (0xA7AE, "M", "ɪ"), + (0xA7AF, "V"), + (0xA7B0, "M", "ʞ"), + (0xA7B1, "M", "ʇ"), + (0xA7B2, "M", "ʝ"), + (0xA7B3, "M", "ꭓ"), + (0xA7B4, "M", "ꞵ"), + (0xA7B5, "V"), + (0xA7B6, "M", "ꞷ"), + (0xA7B7, "V"), + (0xA7B8, "M", "ꞹ"), + (0xA7B9, "V"), + (0xA7BA, "M", "ꞻ"), + (0xA7BB, "V"), + (0xA7BC, "M", "ꞽ"), + (0xA7BD, "V"), + (0xA7BE, "M", "ꞿ"), + (0xA7BF, "V"), + (0xA7C0, "M", "ꟁ"), + (0xA7C1, "V"), + (0xA7C2, "M", "ꟃ"), + (0xA7C3, "V"), + (0xA7C4, "M", "ꞔ"), + (0xA7C5, "M", "ʂ"), + (0xA7C6, "M", "ᶎ"), + (0xA7C7, "M", "ꟈ"), + (0xA7C8, "V"), + ] + + +def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA7C9, "M", "ꟊ"), + (0xA7CA, "V"), + (0xA7CB, "M", "ɤ"), + (0xA7CC, "M", "ꟍ"), + (0xA7CD, "V"), + (0xA7CE, "X"), + (0xA7D0, "M", "ꟑ"), + (0xA7D1, "V"), + (0xA7D2, "X"), + (0xA7D3, "V"), + (0xA7D4, "X"), + (0xA7D5, "V"), + (0xA7D6, "M", "ꟗ"), + (0xA7D7, "V"), + (0xA7D8, "M", "ꟙ"), + (0xA7D9, "V"), + (0xA7DA, "M", "ꟛ"), + (0xA7DB, "V"), + (0xA7DC, "M", "ƛ"), + (0xA7DD, "X"), + (0xA7F2, "M", "c"), + (0xA7F3, "M", "f"), + (0xA7F4, "M", "q"), + (0xA7F5, "M", "ꟶ"), + (0xA7F6, "V"), + (0xA7F8, "M", "ħ"), + (0xA7F9, "M", "œ"), + (0xA7FA, "V"), + (0xA82D, "X"), + (0xA830, "V"), + (0xA83A, "X"), + (0xA840, "V"), + (0xA878, "X"), + (0xA880, "V"), + (0xA8C6, "X"), + (0xA8CE, "V"), + (0xA8DA, "X"), + (0xA8E0, "V"), + (0xA954, "X"), + (0xA95F, "V"), + (0xA97D, "X"), + (0xA980, "V"), + (0xA9CE, "X"), + (0xA9CF, "V"), + (0xA9DA, "X"), + (0xA9DE, "V"), + (0xA9FF, "X"), + (0xAA00, "V"), + (0xAA37, "X"), + (0xAA40, "V"), + (0xAA4E, "X"), + (0xAA50, "V"), + (0xAA5A, "X"), + (0xAA5C, "V"), + (0xAAC3, "X"), + (0xAADB, "V"), + (0xAAF7, "X"), + (0xAB01, "V"), + (0xAB07, "X"), + (0xAB09, "V"), + (0xAB0F, "X"), + (0xAB11, "V"), + (0xAB17, "X"), + (0xAB20, "V"), + (0xAB27, "X"), + (0xAB28, "V"), + (0xAB2F, "X"), + (0xAB30, "V"), + (0xAB5C, "M", "ꜧ"), + (0xAB5D, "M", "ꬷ"), + (0xAB5E, "M", "ɫ"), + (0xAB5F, "M", "ꭒ"), + (0xAB60, "V"), + (0xAB69, "M", "ʍ"), + (0xAB6A, "V"), + (0xAB6C, "X"), + (0xAB70, "M", "Ꭰ"), + (0xAB71, "M", "Ꭱ"), + (0xAB72, "M", "Ꭲ"), + (0xAB73, "M", "Ꭳ"), + (0xAB74, "M", "Ꭴ"), + (0xAB75, "M", "Ꭵ"), + (0xAB76, "M", "Ꭶ"), + (0xAB77, "M", "Ꭷ"), + (0xAB78, "M", "Ꭸ"), + (0xAB79, "M", "Ꭹ"), + (0xAB7A, "M", "Ꭺ"), + (0xAB7B, "M", "Ꭻ"), + (0xAB7C, "M", "Ꭼ"), + (0xAB7D, "M", "Ꭽ"), + (0xAB7E, "M", "Ꭾ"), + (0xAB7F, "M", "Ꭿ"), + (0xAB80, "M", "Ꮀ"), + (0xAB81, "M", "Ꮁ"), + (0xAB82, "M", "Ꮂ"), + (0xAB83, "M", "Ꮃ"), + (0xAB84, "M", "Ꮄ"), + (0xAB85, "M", "Ꮅ"), + (0xAB86, "M", "Ꮆ"), + (0xAB87, "M", "Ꮇ"), + ] + + +def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xAB88, "M", "Ꮈ"), + (0xAB89, "M", "Ꮉ"), + (0xAB8A, "M", "Ꮊ"), + (0xAB8B, "M", "Ꮋ"), + (0xAB8C, "M", "Ꮌ"), + (0xAB8D, "M", "Ꮍ"), + (0xAB8E, "M", "Ꮎ"), + (0xAB8F, "M", "Ꮏ"), + (0xAB90, "M", "Ꮐ"), + (0xAB91, "M", "Ꮑ"), + (0xAB92, "M", "Ꮒ"), + (0xAB93, "M", "Ꮓ"), + (0xAB94, "M", "Ꮔ"), + (0xAB95, "M", "Ꮕ"), + (0xAB96, "M", "Ꮖ"), + (0xAB97, "M", "Ꮗ"), + (0xAB98, "M", "Ꮘ"), + (0xAB99, "M", "Ꮙ"), + (0xAB9A, "M", "Ꮚ"), + (0xAB9B, "M", "Ꮛ"), + (0xAB9C, "M", "Ꮜ"), + (0xAB9D, "M", "Ꮝ"), + (0xAB9E, "M", "Ꮞ"), + (0xAB9F, "M", "Ꮟ"), + (0xABA0, "M", "Ꮠ"), + (0xABA1, "M", "Ꮡ"), + (0xABA2, "M", "Ꮢ"), + (0xABA3, "M", "Ꮣ"), + (0xABA4, "M", "Ꮤ"), + (0xABA5, "M", "Ꮥ"), + (0xABA6, "M", "Ꮦ"), + (0xABA7, "M", "Ꮧ"), + (0xABA8, "M", "Ꮨ"), + (0xABA9, "M", "Ꮩ"), + (0xABAA, "M", "Ꮪ"), + (0xABAB, "M", "Ꮫ"), + (0xABAC, "M", "Ꮬ"), + (0xABAD, "M", "Ꮭ"), + (0xABAE, "M", "Ꮮ"), + (0xABAF, "M", "Ꮯ"), + (0xABB0, "M", "Ꮰ"), + (0xABB1, "M", "Ꮱ"), + (0xABB2, "M", "Ꮲ"), + (0xABB3, "M", "Ꮳ"), + (0xABB4, "M", "Ꮴ"), + (0xABB5, "M", "Ꮵ"), + (0xABB6, "M", "Ꮶ"), + (0xABB7, "M", "Ꮷ"), + (0xABB8, "M", "Ꮸ"), + (0xABB9, "M", "Ꮹ"), + (0xABBA, "M", "Ꮺ"), + (0xABBB, "M", "Ꮻ"), + (0xABBC, "M", "Ꮼ"), + (0xABBD, "M", "Ꮽ"), + (0xABBE, "M", "Ꮾ"), + (0xABBF, "M", "Ꮿ"), + (0xABC0, "V"), + (0xABEE, "X"), + (0xABF0, "V"), + (0xABFA, "X"), + (0xAC00, "V"), + (0xD7A4, "X"), + (0xD7B0, "V"), + (0xD7C7, "X"), + (0xD7CB, "V"), + (0xD7FC, "X"), + (0xF900, "M", "豈"), + (0xF901, "M", "更"), + (0xF902, "M", "車"), + (0xF903, "M", "賈"), + (0xF904, "M", "滑"), + (0xF905, "M", "串"), + (0xF906, "M", "句"), + (0xF907, "M", "龜"), + (0xF909, "M", "契"), + (0xF90A, "M", "金"), + (0xF90B, "M", "喇"), + (0xF90C, "M", "奈"), + (0xF90D, "M", "懶"), + (0xF90E, "M", "癩"), + (0xF90F, "M", "羅"), + (0xF910, "M", "蘿"), + (0xF911, "M", "螺"), + (0xF912, "M", "裸"), + (0xF913, "M", "邏"), + (0xF914, "M", "樂"), + (0xF915, "M", "洛"), + (0xF916, "M", "烙"), + (0xF917, "M", "珞"), + (0xF918, "M", "落"), + (0xF919, "M", "酪"), + (0xF91A, "M", "駱"), + (0xF91B, "M", "亂"), + (0xF91C, "M", "卵"), + (0xF91D, "M", "欄"), + (0xF91E, "M", "爛"), + (0xF91F, "M", "蘭"), + (0xF920, "M", "鸞"), + (0xF921, "M", "嵐"), + (0xF922, "M", "濫"), + ] + + +def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF923, "M", "藍"), + (0xF924, "M", "襤"), + (0xF925, "M", "拉"), + (0xF926, "M", "臘"), + (0xF927, "M", "蠟"), + (0xF928, "M", "廊"), + (0xF929, "M", "朗"), + (0xF92A, "M", "浪"), + (0xF92B, "M", "狼"), + (0xF92C, "M", "郎"), + (0xF92D, "M", "來"), + (0xF92E, "M", "冷"), + (0xF92F, "M", "勞"), + (0xF930, "M", "擄"), + (0xF931, "M", "櫓"), + (0xF932, "M", "爐"), + (0xF933, "M", "盧"), + (0xF934, "M", "老"), + (0xF935, "M", "蘆"), + (0xF936, "M", "虜"), + (0xF937, "M", "路"), + (0xF938, "M", "露"), + (0xF939, "M", "魯"), + (0xF93A, "M", "鷺"), + (0xF93B, "M", "碌"), + (0xF93C, "M", "祿"), + (0xF93D, "M", "綠"), + (0xF93E, "M", "菉"), + (0xF93F, "M", "錄"), + (0xF940, "M", "鹿"), + (0xF941, "M", "論"), + (0xF942, "M", "壟"), + (0xF943, "M", "弄"), + (0xF944, "M", "籠"), + (0xF945, "M", "聾"), + (0xF946, "M", "牢"), + (0xF947, "M", "磊"), + (0xF948, "M", "賂"), + (0xF949, "M", "雷"), + (0xF94A, "M", "壘"), + (0xF94B, "M", "屢"), + (0xF94C, "M", "樓"), + (0xF94D, "M", "淚"), + (0xF94E, "M", "漏"), + (0xF94F, "M", "累"), + (0xF950, "M", "縷"), + (0xF951, "M", "陋"), + (0xF952, "M", "勒"), + (0xF953, "M", "肋"), + (0xF954, "M", "凜"), + (0xF955, "M", "凌"), + (0xF956, "M", "稜"), + (0xF957, "M", "綾"), + (0xF958, "M", "菱"), + (0xF959, "M", "陵"), + (0xF95A, "M", "讀"), + (0xF95B, "M", "拏"), + (0xF95C, "M", "樂"), + (0xF95D, "M", "諾"), + (0xF95E, "M", "丹"), + (0xF95F, "M", "寧"), + (0xF960, "M", "怒"), + (0xF961, "M", "率"), + (0xF962, "M", "異"), + (0xF963, "M", "北"), + (0xF964, "M", "磻"), + (0xF965, "M", "便"), + (0xF966, "M", "復"), + (0xF967, "M", "不"), + (0xF968, "M", "泌"), + (0xF969, "M", "數"), + (0xF96A, "M", "索"), + (0xF96B, "M", "參"), + (0xF96C, "M", "塞"), + (0xF96D, "M", "省"), + (0xF96E, "M", "葉"), + (0xF96F, "M", "說"), + (0xF970, "M", "殺"), + (0xF971, "M", "辰"), + (0xF972, "M", "沈"), + (0xF973, "M", "拾"), + (0xF974, "M", "若"), + (0xF975, "M", "掠"), + (0xF976, "M", "略"), + (0xF977, "M", "亮"), + (0xF978, "M", "兩"), + (0xF979, "M", "凉"), + (0xF97A, "M", "梁"), + (0xF97B, "M", "糧"), + (0xF97C, "M", "良"), + (0xF97D, "M", "諒"), + (0xF97E, "M", "量"), + (0xF97F, "M", "勵"), + (0xF980, "M", "呂"), + (0xF981, "M", "女"), + (0xF982, "M", "廬"), + (0xF983, "M", "旅"), + (0xF984, "M", "濾"), + (0xF985, "M", "礪"), + (0xF986, "M", "閭"), + ] + + +def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF987, "M", "驪"), + (0xF988, "M", "麗"), + (0xF989, "M", "黎"), + (0xF98A, "M", "力"), + (0xF98B, "M", "曆"), + (0xF98C, "M", "歷"), + (0xF98D, "M", "轢"), + (0xF98E, "M", "年"), + (0xF98F, "M", "憐"), + (0xF990, "M", "戀"), + (0xF991, "M", "撚"), + (0xF992, "M", "漣"), + (0xF993, "M", "煉"), + (0xF994, "M", "璉"), + (0xF995, "M", "秊"), + (0xF996, "M", "練"), + (0xF997, "M", "聯"), + (0xF998, "M", "輦"), + (0xF999, "M", "蓮"), + (0xF99A, "M", "連"), + (0xF99B, "M", "鍊"), + (0xF99C, "M", "列"), + (0xF99D, "M", "劣"), + (0xF99E, "M", "咽"), + (0xF99F, "M", "烈"), + (0xF9A0, "M", "裂"), + (0xF9A1, "M", "說"), + (0xF9A2, "M", "廉"), + (0xF9A3, "M", "念"), + (0xF9A4, "M", "捻"), + (0xF9A5, "M", "殮"), + (0xF9A6, "M", "簾"), + (0xF9A7, "M", "獵"), + (0xF9A8, "M", "令"), + (0xF9A9, "M", "囹"), + (0xF9AA, "M", "寧"), + (0xF9AB, "M", "嶺"), + (0xF9AC, "M", "怜"), + (0xF9AD, "M", "玲"), + (0xF9AE, "M", "瑩"), + (0xF9AF, "M", "羚"), + (0xF9B0, "M", "聆"), + (0xF9B1, "M", "鈴"), + (0xF9B2, "M", "零"), + (0xF9B3, "M", "靈"), + (0xF9B4, "M", "領"), + (0xF9B5, "M", "例"), + (0xF9B6, "M", "禮"), + (0xF9B7, "M", "醴"), + (0xF9B8, "M", "隸"), + (0xF9B9, "M", "惡"), + (0xF9BA, "M", "了"), + (0xF9BB, "M", "僚"), + (0xF9BC, "M", "寮"), + (0xF9BD, "M", "尿"), + (0xF9BE, "M", "料"), + (0xF9BF, "M", "樂"), + (0xF9C0, "M", "燎"), + (0xF9C1, "M", "療"), + (0xF9C2, "M", "蓼"), + (0xF9C3, "M", "遼"), + (0xF9C4, "M", "龍"), + (0xF9C5, "M", "暈"), + (0xF9C6, "M", "阮"), + (0xF9C7, "M", "劉"), + (0xF9C8, "M", "杻"), + (0xF9C9, "M", "柳"), + (0xF9CA, "M", "流"), + (0xF9CB, "M", "溜"), + (0xF9CC, "M", "琉"), + (0xF9CD, "M", "留"), + (0xF9CE, "M", "硫"), + (0xF9CF, "M", "紐"), + (0xF9D0, "M", "類"), + (0xF9D1, "M", "六"), + (0xF9D2, "M", "戮"), + (0xF9D3, "M", "陸"), + (0xF9D4, "M", "倫"), + (0xF9D5, "M", "崙"), + (0xF9D6, "M", "淪"), + (0xF9D7, "M", "輪"), + (0xF9D8, "M", "律"), + (0xF9D9, "M", "慄"), + (0xF9DA, "M", "栗"), + (0xF9DB, "M", "率"), + (0xF9DC, "M", "隆"), + (0xF9DD, "M", "利"), + (0xF9DE, "M", "吏"), + (0xF9DF, "M", "履"), + (0xF9E0, "M", "易"), + (0xF9E1, "M", "李"), + (0xF9E2, "M", "梨"), + (0xF9E3, "M", "泥"), + (0xF9E4, "M", "理"), + (0xF9E5, "M", "痢"), + (0xF9E6, "M", "罹"), + (0xF9E7, "M", "裏"), + (0xF9E8, "M", "裡"), + (0xF9E9, "M", "里"), + (0xF9EA, "M", "離"), + ] + + +def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF9EB, "M", "匿"), + (0xF9EC, "M", "溺"), + (0xF9ED, "M", "吝"), + (0xF9EE, "M", "燐"), + (0xF9EF, "M", "璘"), + (0xF9F0, "M", "藺"), + (0xF9F1, "M", "隣"), + (0xF9F2, "M", "鱗"), + (0xF9F3, "M", "麟"), + (0xF9F4, "M", "林"), + (0xF9F5, "M", "淋"), + (0xF9F6, "M", "臨"), + (0xF9F7, "M", "立"), + (0xF9F8, "M", "笠"), + (0xF9F9, "M", "粒"), + (0xF9FA, "M", "狀"), + (0xF9FB, "M", "炙"), + (0xF9FC, "M", "識"), + (0xF9FD, "M", "什"), + (0xF9FE, "M", "茶"), + (0xF9FF, "M", "刺"), + (0xFA00, "M", "切"), + (0xFA01, "M", "度"), + (0xFA02, "M", "拓"), + (0xFA03, "M", "糖"), + (0xFA04, "M", "宅"), + (0xFA05, "M", "洞"), + (0xFA06, "M", "暴"), + (0xFA07, "M", "輻"), + (0xFA08, "M", "行"), + (0xFA09, "M", "降"), + (0xFA0A, "M", "見"), + (0xFA0B, "M", "廓"), + (0xFA0C, "M", "兀"), + (0xFA0D, "M", "嗀"), + (0xFA0E, "V"), + (0xFA10, "M", "塚"), + (0xFA11, "V"), + (0xFA12, "M", "晴"), + (0xFA13, "V"), + (0xFA15, "M", "凞"), + (0xFA16, "M", "猪"), + (0xFA17, "M", "益"), + (0xFA18, "M", "礼"), + (0xFA19, "M", "神"), + (0xFA1A, "M", "祥"), + (0xFA1B, "M", "福"), + (0xFA1C, "M", "靖"), + (0xFA1D, "M", "精"), + (0xFA1E, "M", "羽"), + (0xFA1F, "V"), + (0xFA20, "M", "蘒"), + (0xFA21, "V"), + (0xFA22, "M", "諸"), + (0xFA23, "V"), + (0xFA25, "M", "逸"), + (0xFA26, "M", "都"), + (0xFA27, "V"), + (0xFA2A, "M", "飯"), + (0xFA2B, "M", "飼"), + (0xFA2C, "M", "館"), + (0xFA2D, "M", "鶴"), + (0xFA2E, "M", "郞"), + (0xFA2F, "M", "隷"), + (0xFA30, "M", "侮"), + (0xFA31, "M", "僧"), + (0xFA32, "M", "免"), + (0xFA33, "M", "勉"), + (0xFA34, "M", "勤"), + (0xFA35, "M", "卑"), + (0xFA36, "M", "喝"), + (0xFA37, "M", "嘆"), + (0xFA38, "M", "器"), + (0xFA39, "M", "塀"), + (0xFA3A, "M", "墨"), + (0xFA3B, "M", "層"), + (0xFA3C, "M", "屮"), + (0xFA3D, "M", "悔"), + (0xFA3E, "M", "慨"), + (0xFA3F, "M", "憎"), + (0xFA40, "M", "懲"), + (0xFA41, "M", "敏"), + (0xFA42, "M", "既"), + (0xFA43, "M", "暑"), + (0xFA44, "M", "梅"), + (0xFA45, "M", "海"), + (0xFA46, "M", "渚"), + (0xFA47, "M", "漢"), + (0xFA48, "M", "煮"), + (0xFA49, "M", "爫"), + (0xFA4A, "M", "琢"), + (0xFA4B, "M", "碑"), + (0xFA4C, "M", "社"), + (0xFA4D, "M", "祉"), + (0xFA4E, "M", "祈"), + (0xFA4F, "M", "祐"), + (0xFA50, "M", "祖"), + (0xFA51, "M", "祝"), + (0xFA52, "M", "禍"), + (0xFA53, "M", "禎"), + ] + + +def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFA54, "M", "穀"), + (0xFA55, "M", "突"), + (0xFA56, "M", "節"), + (0xFA57, "M", "練"), + (0xFA58, "M", "縉"), + (0xFA59, "M", "繁"), + (0xFA5A, "M", "署"), + (0xFA5B, "M", "者"), + (0xFA5C, "M", "臭"), + (0xFA5D, "M", "艹"), + (0xFA5F, "M", "著"), + (0xFA60, "M", "褐"), + (0xFA61, "M", "視"), + (0xFA62, "M", "謁"), + (0xFA63, "M", "謹"), + (0xFA64, "M", "賓"), + (0xFA65, "M", "贈"), + (0xFA66, "M", "辶"), + (0xFA67, "M", "逸"), + (0xFA68, "M", "難"), + (0xFA69, "M", "響"), + (0xFA6A, "M", "頻"), + (0xFA6B, "M", "恵"), + (0xFA6C, "M", "𤋮"), + (0xFA6D, "M", "舘"), + (0xFA6E, "X"), + (0xFA70, "M", "並"), + (0xFA71, "M", "况"), + (0xFA72, "M", "全"), + (0xFA73, "M", "侀"), + (0xFA74, "M", "充"), + (0xFA75, "M", "冀"), + (0xFA76, "M", "勇"), + (0xFA77, "M", "勺"), + (0xFA78, "M", "喝"), + (0xFA79, "M", "啕"), + (0xFA7A, "M", "喙"), + (0xFA7B, "M", "嗢"), + (0xFA7C, "M", "塚"), + (0xFA7D, "M", "墳"), + (0xFA7E, "M", "奄"), + (0xFA7F, "M", "奔"), + (0xFA80, "M", "婢"), + (0xFA81, "M", "嬨"), + (0xFA82, "M", "廒"), + (0xFA83, "M", "廙"), + (0xFA84, "M", "彩"), + (0xFA85, "M", "徭"), + (0xFA86, "M", "惘"), + (0xFA87, "M", "慎"), + (0xFA88, "M", "愈"), + (0xFA89, "M", "憎"), + (0xFA8A, "M", "慠"), + (0xFA8B, "M", "懲"), + (0xFA8C, "M", "戴"), + (0xFA8D, "M", "揄"), + (0xFA8E, "M", "搜"), + (0xFA8F, "M", "摒"), + (0xFA90, "M", "敖"), + (0xFA91, "M", "晴"), + (0xFA92, "M", "朗"), + (0xFA93, "M", "望"), + (0xFA94, "M", "杖"), + (0xFA95, "M", "歹"), + (0xFA96, "M", "殺"), + (0xFA97, "M", "流"), + (0xFA98, "M", "滛"), + (0xFA99, "M", "滋"), + (0xFA9A, "M", "漢"), + (0xFA9B, "M", "瀞"), + (0xFA9C, "M", "煮"), + (0xFA9D, "M", "瞧"), + (0xFA9E, "M", "爵"), + (0xFA9F, "M", "犯"), + (0xFAA0, "M", "猪"), + (0xFAA1, "M", "瑱"), + (0xFAA2, "M", "甆"), + (0xFAA3, "M", "画"), + (0xFAA4, "M", "瘝"), + (0xFAA5, "M", "瘟"), + (0xFAA6, "M", "益"), + (0xFAA7, "M", "盛"), + (0xFAA8, "M", "直"), + (0xFAA9, "M", "睊"), + (0xFAAA, "M", "着"), + (0xFAAB, "M", "磌"), + (0xFAAC, "M", "窱"), + (0xFAAD, "M", "節"), + (0xFAAE, "M", "类"), + (0xFAAF, "M", "絛"), + (0xFAB0, "M", "練"), + (0xFAB1, "M", "缾"), + (0xFAB2, "M", "者"), + (0xFAB3, "M", "荒"), + (0xFAB4, "M", "華"), + (0xFAB5, "M", "蝹"), + (0xFAB6, "M", "襁"), + (0xFAB7, "M", "覆"), + (0xFAB8, "M", "視"), + (0xFAB9, "M", "調"), + ] + + +def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFABA, "M", "諸"), + (0xFABB, "M", "請"), + (0xFABC, "M", "謁"), + (0xFABD, "M", "諾"), + (0xFABE, "M", "諭"), + (0xFABF, "M", "謹"), + (0xFAC0, "M", "變"), + (0xFAC1, "M", "贈"), + (0xFAC2, "M", "輸"), + (0xFAC3, "M", "遲"), + (0xFAC4, "M", "醙"), + (0xFAC5, "M", "鉶"), + (0xFAC6, "M", "陼"), + (0xFAC7, "M", "難"), + (0xFAC8, "M", "靖"), + (0xFAC9, "M", "韛"), + (0xFACA, "M", "響"), + (0xFACB, "M", "頋"), + (0xFACC, "M", "頻"), + (0xFACD, "M", "鬒"), + (0xFACE, "M", "龜"), + (0xFACF, "M", "𢡊"), + (0xFAD0, "M", "𢡄"), + (0xFAD1, "M", "𣏕"), + (0xFAD2, "M", "㮝"), + (0xFAD3, "M", "䀘"), + (0xFAD4, "M", "䀹"), + (0xFAD5, "M", "𥉉"), + (0xFAD6, "M", "𥳐"), + (0xFAD7, "M", "𧻓"), + (0xFAD8, "M", "齃"), + (0xFAD9, "M", "龎"), + (0xFADA, "X"), + (0xFB00, "M", "ff"), + (0xFB01, "M", "fi"), + (0xFB02, "M", "fl"), + (0xFB03, "M", "ffi"), + (0xFB04, "M", "ffl"), + (0xFB05, "M", "st"), + (0xFB07, "X"), + (0xFB13, "M", "մն"), + (0xFB14, "M", "մե"), + (0xFB15, "M", "մի"), + (0xFB16, "M", "վն"), + (0xFB17, "M", "մխ"), + (0xFB18, "X"), + (0xFB1D, "M", "יִ"), + (0xFB1E, "V"), + (0xFB1F, "M", "ײַ"), + (0xFB20, "M", "ע"), + (0xFB21, "M", "א"), + (0xFB22, "M", "ד"), + (0xFB23, "M", "ה"), + (0xFB24, "M", "כ"), + (0xFB25, "M", "ל"), + (0xFB26, "M", "ם"), + (0xFB27, "M", "ר"), + (0xFB28, "M", "ת"), + (0xFB29, "M", "+"), + (0xFB2A, "M", "שׁ"), + (0xFB2B, "M", "שׂ"), + (0xFB2C, "M", "שּׁ"), + (0xFB2D, "M", "שּׂ"), + (0xFB2E, "M", "אַ"), + (0xFB2F, "M", "אָ"), + (0xFB30, "M", "אּ"), + (0xFB31, "M", "בּ"), + (0xFB32, "M", "גּ"), + (0xFB33, "M", "דּ"), + (0xFB34, "M", "הּ"), + (0xFB35, "M", "וּ"), + (0xFB36, "M", "זּ"), + (0xFB37, "X"), + (0xFB38, "M", "טּ"), + (0xFB39, "M", "יּ"), + (0xFB3A, "M", "ךּ"), + (0xFB3B, "M", "כּ"), + (0xFB3C, "M", "לּ"), + (0xFB3D, "X"), + (0xFB3E, "M", "מּ"), + (0xFB3F, "X"), + (0xFB40, "M", "נּ"), + (0xFB41, "M", "סּ"), + (0xFB42, "X"), + (0xFB43, "M", "ףּ"), + (0xFB44, "M", "פּ"), + (0xFB45, "X"), + (0xFB46, "M", "צּ"), + (0xFB47, "M", "קּ"), + (0xFB48, "M", "רּ"), + (0xFB49, "M", "שּ"), + (0xFB4A, "M", "תּ"), + (0xFB4B, "M", "וֹ"), + (0xFB4C, "M", "בֿ"), + (0xFB4D, "M", "כֿ"), + (0xFB4E, "M", "פֿ"), + (0xFB4F, "M", "אל"), + (0xFB50, "M", "ٱ"), + (0xFB52, "M", "ٻ"), + (0xFB56, "M", "پ"), + ] + + +def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFB5A, "M", "ڀ"), + (0xFB5E, "M", "ٺ"), + (0xFB62, "M", "ٿ"), + (0xFB66, "M", "ٹ"), + (0xFB6A, "M", "ڤ"), + (0xFB6E, "M", "ڦ"), + (0xFB72, "M", "ڄ"), + (0xFB76, "M", "ڃ"), + (0xFB7A, "M", "چ"), + (0xFB7E, "M", "ڇ"), + (0xFB82, "M", "ڍ"), + (0xFB84, "M", "ڌ"), + (0xFB86, "M", "ڎ"), + (0xFB88, "M", "ڈ"), + (0xFB8A, "M", "ژ"), + (0xFB8C, "M", "ڑ"), + (0xFB8E, "M", "ک"), + (0xFB92, "M", "گ"), + (0xFB96, "M", "ڳ"), + (0xFB9A, "M", "ڱ"), + (0xFB9E, "M", "ں"), + (0xFBA0, "M", "ڻ"), + (0xFBA4, "M", "ۀ"), + (0xFBA6, "M", "ہ"), + (0xFBAA, "M", "ھ"), + (0xFBAE, "M", "ے"), + (0xFBB0, "M", "ۓ"), + (0xFBB2, "V"), + (0xFBC3, "X"), + (0xFBD3, "M", "ڭ"), + (0xFBD7, "M", "ۇ"), + (0xFBD9, "M", "ۆ"), + (0xFBDB, "M", "ۈ"), + (0xFBDD, "M", "ۇٴ"), + (0xFBDE, "M", "ۋ"), + (0xFBE0, "M", "ۅ"), + (0xFBE2, "M", "ۉ"), + (0xFBE4, "M", "ې"), + (0xFBE8, "M", "ى"), + (0xFBEA, "M", "ئا"), + (0xFBEC, "M", "ئە"), + (0xFBEE, "M", "ئو"), + (0xFBF0, "M", "ئۇ"), + (0xFBF2, "M", "ئۆ"), + (0xFBF4, "M", "ئۈ"), + (0xFBF6, "M", "ئې"), + (0xFBF9, "M", "ئى"), + (0xFBFC, "M", "ی"), + (0xFC00, "M", "ئج"), + (0xFC01, "M", "ئح"), + (0xFC02, "M", "ئم"), + (0xFC03, "M", "ئى"), + (0xFC04, "M", "ئي"), + (0xFC05, "M", "بج"), + (0xFC06, "M", "بح"), + (0xFC07, "M", "بخ"), + (0xFC08, "M", "بم"), + (0xFC09, "M", "بى"), + (0xFC0A, "M", "بي"), + (0xFC0B, "M", "تج"), + (0xFC0C, "M", "تح"), + (0xFC0D, "M", "تخ"), + (0xFC0E, "M", "تم"), + (0xFC0F, "M", "تى"), + (0xFC10, "M", "تي"), + (0xFC11, "M", "ثج"), + (0xFC12, "M", "ثم"), + (0xFC13, "M", "ثى"), + (0xFC14, "M", "ثي"), + (0xFC15, "M", "جح"), + (0xFC16, "M", "جم"), + (0xFC17, "M", "حج"), + (0xFC18, "M", "حم"), + (0xFC19, "M", "خج"), + (0xFC1A, "M", "خح"), + (0xFC1B, "M", "خم"), + (0xFC1C, "M", "سج"), + (0xFC1D, "M", "سح"), + (0xFC1E, "M", "سخ"), + (0xFC1F, "M", "سم"), + (0xFC20, "M", "صح"), + (0xFC21, "M", "صم"), + (0xFC22, "M", "ضج"), + (0xFC23, "M", "ضح"), + (0xFC24, "M", "ضخ"), + (0xFC25, "M", "ضم"), + (0xFC26, "M", "طح"), + (0xFC27, "M", "طم"), + (0xFC28, "M", "ظم"), + (0xFC29, "M", "عج"), + (0xFC2A, "M", "عم"), + (0xFC2B, "M", "غج"), + (0xFC2C, "M", "غم"), + (0xFC2D, "M", "فج"), + (0xFC2E, "M", "فح"), + (0xFC2F, "M", "فخ"), + (0xFC30, "M", "فم"), + (0xFC31, "M", "فى"), + (0xFC32, "M", "في"), + (0xFC33, "M", "قح"), + ] + + +def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFC34, "M", "قم"), + (0xFC35, "M", "قى"), + (0xFC36, "M", "قي"), + (0xFC37, "M", "كا"), + (0xFC38, "M", "كج"), + (0xFC39, "M", "كح"), + (0xFC3A, "M", "كخ"), + (0xFC3B, "M", "كل"), + (0xFC3C, "M", "كم"), + (0xFC3D, "M", "كى"), + (0xFC3E, "M", "كي"), + (0xFC3F, "M", "لج"), + (0xFC40, "M", "لح"), + (0xFC41, "M", "لخ"), + (0xFC42, "M", "لم"), + (0xFC43, "M", "لى"), + (0xFC44, "M", "لي"), + (0xFC45, "M", "مج"), + (0xFC46, "M", "مح"), + (0xFC47, "M", "مخ"), + (0xFC48, "M", "مم"), + (0xFC49, "M", "مى"), + (0xFC4A, "M", "مي"), + (0xFC4B, "M", "نج"), + (0xFC4C, "M", "نح"), + (0xFC4D, "M", "نخ"), + (0xFC4E, "M", "نم"), + (0xFC4F, "M", "نى"), + (0xFC50, "M", "ني"), + (0xFC51, "M", "هج"), + (0xFC52, "M", "هم"), + (0xFC53, "M", "هى"), + (0xFC54, "M", "هي"), + (0xFC55, "M", "يج"), + (0xFC56, "M", "يح"), + (0xFC57, "M", "يخ"), + (0xFC58, "M", "يم"), + (0xFC59, "M", "يى"), + (0xFC5A, "M", "يي"), + (0xFC5B, "M", "ذٰ"), + (0xFC5C, "M", "رٰ"), + (0xFC5D, "M", "ىٰ"), + (0xFC5E, "M", " ٌّ"), + (0xFC5F, "M", " ٍّ"), + (0xFC60, "M", " َّ"), + (0xFC61, "M", " ُّ"), + (0xFC62, "M", " ِّ"), + (0xFC63, "M", " ّٰ"), + (0xFC64, "M", "ئر"), + (0xFC65, "M", "ئز"), + (0xFC66, "M", "ئم"), + (0xFC67, "M", "ئن"), + (0xFC68, "M", "ئى"), + (0xFC69, "M", "ئي"), + (0xFC6A, "M", "بر"), + (0xFC6B, "M", "بز"), + (0xFC6C, "M", "بم"), + (0xFC6D, "M", "بن"), + (0xFC6E, "M", "بى"), + (0xFC6F, "M", "بي"), + (0xFC70, "M", "تر"), + (0xFC71, "M", "تز"), + (0xFC72, "M", "تم"), + (0xFC73, "M", "تن"), + (0xFC74, "M", "تى"), + (0xFC75, "M", "تي"), + (0xFC76, "M", "ثر"), + (0xFC77, "M", "ثز"), + (0xFC78, "M", "ثم"), + (0xFC79, "M", "ثن"), + (0xFC7A, "M", "ثى"), + (0xFC7B, "M", "ثي"), + (0xFC7C, "M", "فى"), + (0xFC7D, "M", "في"), + (0xFC7E, "M", "قى"), + (0xFC7F, "M", "قي"), + (0xFC80, "M", "كا"), + (0xFC81, "M", "كل"), + (0xFC82, "M", "كم"), + (0xFC83, "M", "كى"), + (0xFC84, "M", "كي"), + (0xFC85, "M", "لم"), + (0xFC86, "M", "لى"), + (0xFC87, "M", "لي"), + (0xFC88, "M", "ما"), + (0xFC89, "M", "مم"), + (0xFC8A, "M", "نر"), + (0xFC8B, "M", "نز"), + (0xFC8C, "M", "نم"), + (0xFC8D, "M", "نن"), + (0xFC8E, "M", "نى"), + (0xFC8F, "M", "ني"), + (0xFC90, "M", "ىٰ"), + (0xFC91, "M", "ير"), + (0xFC92, "M", "يز"), + (0xFC93, "M", "يم"), + (0xFC94, "M", "ين"), + (0xFC95, "M", "يى"), + (0xFC96, "M", "يي"), + (0xFC97, "M", "ئج"), + ] + + +def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFC98, "M", "ئح"), + (0xFC99, "M", "ئخ"), + (0xFC9A, "M", "ئم"), + (0xFC9B, "M", "ئه"), + (0xFC9C, "M", "بج"), + (0xFC9D, "M", "بح"), + (0xFC9E, "M", "بخ"), + (0xFC9F, "M", "بم"), + (0xFCA0, "M", "به"), + (0xFCA1, "M", "تج"), + (0xFCA2, "M", "تح"), + (0xFCA3, "M", "تخ"), + (0xFCA4, "M", "تم"), + (0xFCA5, "M", "ته"), + (0xFCA6, "M", "ثم"), + (0xFCA7, "M", "جح"), + (0xFCA8, "M", "جم"), + (0xFCA9, "M", "حج"), + (0xFCAA, "M", "حم"), + (0xFCAB, "M", "خج"), + (0xFCAC, "M", "خم"), + (0xFCAD, "M", "سج"), + (0xFCAE, "M", "سح"), + (0xFCAF, "M", "سخ"), + (0xFCB0, "M", "سم"), + (0xFCB1, "M", "صح"), + (0xFCB2, "M", "صخ"), + (0xFCB3, "M", "صم"), + (0xFCB4, "M", "ضج"), + (0xFCB5, "M", "ضح"), + (0xFCB6, "M", "ضخ"), + (0xFCB7, "M", "ضم"), + (0xFCB8, "M", "طح"), + (0xFCB9, "M", "ظم"), + (0xFCBA, "M", "عج"), + (0xFCBB, "M", "عم"), + (0xFCBC, "M", "غج"), + (0xFCBD, "M", "غم"), + (0xFCBE, "M", "فج"), + (0xFCBF, "M", "فح"), + (0xFCC0, "M", "فخ"), + (0xFCC1, "M", "فم"), + (0xFCC2, "M", "قح"), + (0xFCC3, "M", "قم"), + (0xFCC4, "M", "كج"), + (0xFCC5, "M", "كح"), + (0xFCC6, "M", "كخ"), + (0xFCC7, "M", "كل"), + (0xFCC8, "M", "كم"), + (0xFCC9, "M", "لج"), + (0xFCCA, "M", "لح"), + (0xFCCB, "M", "لخ"), + (0xFCCC, "M", "لم"), + (0xFCCD, "M", "له"), + (0xFCCE, "M", "مج"), + (0xFCCF, "M", "مح"), + (0xFCD0, "M", "مخ"), + (0xFCD1, "M", "مم"), + (0xFCD2, "M", "نج"), + (0xFCD3, "M", "نح"), + (0xFCD4, "M", "نخ"), + (0xFCD5, "M", "نم"), + (0xFCD6, "M", "نه"), + (0xFCD7, "M", "هج"), + (0xFCD8, "M", "هم"), + (0xFCD9, "M", "هٰ"), + (0xFCDA, "M", "يج"), + (0xFCDB, "M", "يح"), + (0xFCDC, "M", "يخ"), + (0xFCDD, "M", "يم"), + (0xFCDE, "M", "يه"), + (0xFCDF, "M", "ئم"), + (0xFCE0, "M", "ئه"), + (0xFCE1, "M", "بم"), + (0xFCE2, "M", "به"), + (0xFCE3, "M", "تم"), + (0xFCE4, "M", "ته"), + (0xFCE5, "M", "ثم"), + (0xFCE6, "M", "ثه"), + (0xFCE7, "M", "سم"), + (0xFCE8, "M", "سه"), + (0xFCE9, "M", "شم"), + (0xFCEA, "M", "شه"), + (0xFCEB, "M", "كل"), + (0xFCEC, "M", "كم"), + (0xFCED, "M", "لم"), + (0xFCEE, "M", "نم"), + (0xFCEF, "M", "نه"), + (0xFCF0, "M", "يم"), + (0xFCF1, "M", "يه"), + (0xFCF2, "M", "ـَّ"), + (0xFCF3, "M", "ـُّ"), + (0xFCF4, "M", "ـِّ"), + (0xFCF5, "M", "طى"), + (0xFCF6, "M", "طي"), + (0xFCF7, "M", "عى"), + (0xFCF8, "M", "عي"), + (0xFCF9, "M", "غى"), + (0xFCFA, "M", "غي"), + (0xFCFB, "M", "سى"), + ] + + +def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFCFC, "M", "سي"), + (0xFCFD, "M", "شى"), + (0xFCFE, "M", "شي"), + (0xFCFF, "M", "حى"), + (0xFD00, "M", "حي"), + (0xFD01, "M", "جى"), + (0xFD02, "M", "جي"), + (0xFD03, "M", "خى"), + (0xFD04, "M", "خي"), + (0xFD05, "M", "صى"), + (0xFD06, "M", "صي"), + (0xFD07, "M", "ضى"), + (0xFD08, "M", "ضي"), + (0xFD09, "M", "شج"), + (0xFD0A, "M", "شح"), + (0xFD0B, "M", "شخ"), + (0xFD0C, "M", "شم"), + (0xFD0D, "M", "شر"), + (0xFD0E, "M", "سر"), + (0xFD0F, "M", "صر"), + (0xFD10, "M", "ضر"), + (0xFD11, "M", "طى"), + (0xFD12, "M", "طي"), + (0xFD13, "M", "عى"), + (0xFD14, "M", "عي"), + (0xFD15, "M", "غى"), + (0xFD16, "M", "غي"), + (0xFD17, "M", "سى"), + (0xFD18, "M", "سي"), + (0xFD19, "M", "شى"), + (0xFD1A, "M", "شي"), + (0xFD1B, "M", "حى"), + (0xFD1C, "M", "حي"), + (0xFD1D, "M", "جى"), + (0xFD1E, "M", "جي"), + (0xFD1F, "M", "خى"), + (0xFD20, "M", "خي"), + (0xFD21, "M", "صى"), + (0xFD22, "M", "صي"), + (0xFD23, "M", "ضى"), + (0xFD24, "M", "ضي"), + (0xFD25, "M", "شج"), + (0xFD26, "M", "شح"), + (0xFD27, "M", "شخ"), + (0xFD28, "M", "شم"), + (0xFD29, "M", "شر"), + (0xFD2A, "M", "سر"), + (0xFD2B, "M", "صر"), + (0xFD2C, "M", "ضر"), + (0xFD2D, "M", "شج"), + (0xFD2E, "M", "شح"), + (0xFD2F, "M", "شخ"), + (0xFD30, "M", "شم"), + (0xFD31, "M", "سه"), + (0xFD32, "M", "شه"), + (0xFD33, "M", "طم"), + (0xFD34, "M", "سج"), + (0xFD35, "M", "سح"), + (0xFD36, "M", "سخ"), + (0xFD37, "M", "شج"), + (0xFD38, "M", "شح"), + (0xFD39, "M", "شخ"), + (0xFD3A, "M", "طم"), + (0xFD3B, "M", "ظم"), + (0xFD3C, "M", "اً"), + (0xFD3E, "V"), + (0xFD50, "M", "تجم"), + (0xFD51, "M", "تحج"), + (0xFD53, "M", "تحم"), + (0xFD54, "M", "تخم"), + (0xFD55, "M", "تمج"), + (0xFD56, "M", "تمح"), + (0xFD57, "M", "تمخ"), + (0xFD58, "M", "جمح"), + (0xFD5A, "M", "حمي"), + (0xFD5B, "M", "حمى"), + (0xFD5C, "M", "سحج"), + (0xFD5D, "M", "سجح"), + (0xFD5E, "M", "سجى"), + (0xFD5F, "M", "سمح"), + (0xFD61, "M", "سمج"), + (0xFD62, "M", "سمم"), + (0xFD64, "M", "صحح"), + (0xFD66, "M", "صمم"), + (0xFD67, "M", "شحم"), + (0xFD69, "M", "شجي"), + (0xFD6A, "M", "شمخ"), + (0xFD6C, "M", "شمم"), + (0xFD6E, "M", "ضحى"), + (0xFD6F, "M", "ضخم"), + (0xFD71, "M", "طمح"), + (0xFD73, "M", "طمم"), + (0xFD74, "M", "طمي"), + (0xFD75, "M", "عجم"), + (0xFD76, "M", "عمم"), + (0xFD78, "M", "عمى"), + (0xFD79, "M", "غمم"), + (0xFD7A, "M", "غمي"), + (0xFD7B, "M", "غمى"), + (0xFD7C, "M", "فخم"), + ] + + +def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFD7E, "M", "قمح"), + (0xFD7F, "M", "قمم"), + (0xFD80, "M", "لحم"), + (0xFD81, "M", "لحي"), + (0xFD82, "M", "لحى"), + (0xFD83, "M", "لجج"), + (0xFD85, "M", "لخم"), + (0xFD87, "M", "لمح"), + (0xFD89, "M", "محج"), + (0xFD8A, "M", "محم"), + (0xFD8B, "M", "محي"), + (0xFD8C, "M", "مجح"), + (0xFD8D, "M", "مجم"), + (0xFD8E, "M", "مخج"), + (0xFD8F, "M", "مخم"), + (0xFD90, "X"), + (0xFD92, "M", "مجخ"), + (0xFD93, "M", "همج"), + (0xFD94, "M", "همم"), + (0xFD95, "M", "نحم"), + (0xFD96, "M", "نحى"), + (0xFD97, "M", "نجم"), + (0xFD99, "M", "نجى"), + (0xFD9A, "M", "نمي"), + (0xFD9B, "M", "نمى"), + (0xFD9C, "M", "يمم"), + (0xFD9E, "M", "بخي"), + (0xFD9F, "M", "تجي"), + (0xFDA0, "M", "تجى"), + (0xFDA1, "M", "تخي"), + (0xFDA2, "M", "تخى"), + (0xFDA3, "M", "تمي"), + (0xFDA4, "M", "تمى"), + (0xFDA5, "M", "جمي"), + (0xFDA6, "M", "جحى"), + (0xFDA7, "M", "جمى"), + (0xFDA8, "M", "سخى"), + (0xFDA9, "M", "صحي"), + (0xFDAA, "M", "شحي"), + (0xFDAB, "M", "ضحي"), + (0xFDAC, "M", "لجي"), + (0xFDAD, "M", "لمي"), + (0xFDAE, "M", "يحي"), + (0xFDAF, "M", "يجي"), + (0xFDB0, "M", "يمي"), + (0xFDB1, "M", "ممي"), + (0xFDB2, "M", "قمي"), + (0xFDB3, "M", "نحي"), + (0xFDB4, "M", "قمح"), + (0xFDB5, "M", "لحم"), + (0xFDB6, "M", "عمي"), + (0xFDB7, "M", "كمي"), + (0xFDB8, "M", "نجح"), + (0xFDB9, "M", "مخي"), + (0xFDBA, "M", "لجم"), + (0xFDBB, "M", "كمم"), + (0xFDBC, "M", "لجم"), + (0xFDBD, "M", "نجح"), + (0xFDBE, "M", "جحي"), + (0xFDBF, "M", "حجي"), + (0xFDC0, "M", "مجي"), + (0xFDC1, "M", "فمي"), + (0xFDC2, "M", "بحي"), + (0xFDC3, "M", "كمم"), + (0xFDC4, "M", "عجم"), + (0xFDC5, "M", "صمم"), + (0xFDC6, "M", "سخي"), + (0xFDC7, "M", "نجي"), + (0xFDC8, "X"), + (0xFDCF, "V"), + (0xFDD0, "X"), + (0xFDF0, "M", "صلے"), + (0xFDF1, "M", "قلے"), + (0xFDF2, "M", "الله"), + (0xFDF3, "M", "اكبر"), + (0xFDF4, "M", "محمد"), + (0xFDF5, "M", "صلعم"), + (0xFDF6, "M", "رسول"), + (0xFDF7, "M", "عليه"), + (0xFDF8, "M", "وسلم"), + (0xFDF9, "M", "صلى"), + (0xFDFA, "M", "صلى الله عليه وسلم"), + (0xFDFB, "M", "جل جلاله"), + (0xFDFC, "M", "ریال"), + (0xFDFD, "V"), + (0xFE00, "I"), + (0xFE10, "M", ","), + (0xFE11, "M", "、"), + (0xFE12, "X"), + (0xFE13, "M", ":"), + (0xFE14, "M", ";"), + (0xFE15, "M", "!"), + (0xFE16, "M", "?"), + (0xFE17, "M", "〖"), + (0xFE18, "M", "〗"), + (0xFE19, "X"), + (0xFE20, "V"), + (0xFE30, "X"), + (0xFE31, "M", "—"), + (0xFE32, "M", "–"), + ] + + +def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFE33, "M", "_"), + (0xFE35, "M", "("), + (0xFE36, "M", ")"), + (0xFE37, "M", "{"), + (0xFE38, "M", "}"), + (0xFE39, "M", "〔"), + (0xFE3A, "M", "〕"), + (0xFE3B, "M", "【"), + (0xFE3C, "M", "】"), + (0xFE3D, "M", "《"), + (0xFE3E, "M", "》"), + (0xFE3F, "M", "〈"), + (0xFE40, "M", "〉"), + (0xFE41, "M", "「"), + (0xFE42, "M", "」"), + (0xFE43, "M", "『"), + (0xFE44, "M", "』"), + (0xFE45, "V"), + (0xFE47, "M", "["), + (0xFE48, "M", "]"), + (0xFE49, "M", " ̅"), + (0xFE4D, "M", "_"), + (0xFE50, "M", ","), + (0xFE51, "M", "、"), + (0xFE52, "X"), + (0xFE54, "M", ";"), + (0xFE55, "M", ":"), + (0xFE56, "M", "?"), + (0xFE57, "M", "!"), + (0xFE58, "M", "—"), + (0xFE59, "M", "("), + (0xFE5A, "M", ")"), + (0xFE5B, "M", "{"), + (0xFE5C, "M", "}"), + (0xFE5D, "M", "〔"), + (0xFE5E, "M", "〕"), + (0xFE5F, "M", "#"), + (0xFE60, "M", "&"), + (0xFE61, "M", "*"), + (0xFE62, "M", "+"), + (0xFE63, "M", "-"), + (0xFE64, "M", "<"), + (0xFE65, "M", ">"), + (0xFE66, "M", "="), + (0xFE67, "X"), + (0xFE68, "M", "\\"), + (0xFE69, "M", "$"), + (0xFE6A, "M", "%"), + (0xFE6B, "M", "@"), + (0xFE6C, "X"), + (0xFE70, "M", " ً"), + (0xFE71, "M", "ـً"), + (0xFE72, "M", " ٌ"), + (0xFE73, "V"), + (0xFE74, "M", " ٍ"), + (0xFE75, "X"), + (0xFE76, "M", " َ"), + (0xFE77, "M", "ـَ"), + (0xFE78, "M", " ُ"), + (0xFE79, "M", "ـُ"), + (0xFE7A, "M", " ِ"), + (0xFE7B, "M", "ـِ"), + (0xFE7C, "M", " ّ"), + (0xFE7D, "M", "ـّ"), + (0xFE7E, "M", " ْ"), + (0xFE7F, "M", "ـْ"), + (0xFE80, "M", "ء"), + (0xFE81, "M", "آ"), + (0xFE83, "M", "أ"), + (0xFE85, "M", "ؤ"), + (0xFE87, "M", "إ"), + (0xFE89, "M", "ئ"), + (0xFE8D, "M", "ا"), + (0xFE8F, "M", "ب"), + (0xFE93, "M", "ة"), + (0xFE95, "M", "ت"), + (0xFE99, "M", "ث"), + (0xFE9D, "M", "ج"), + (0xFEA1, "M", "ح"), + (0xFEA5, "M", "خ"), + (0xFEA9, "M", "د"), + (0xFEAB, "M", "ذ"), + (0xFEAD, "M", "ر"), + (0xFEAF, "M", "ز"), + (0xFEB1, "M", "س"), + (0xFEB5, "M", "ش"), + (0xFEB9, "M", "ص"), + (0xFEBD, "M", "ض"), + (0xFEC1, "M", "ط"), + (0xFEC5, "M", "ظ"), + (0xFEC9, "M", "ع"), + (0xFECD, "M", "غ"), + (0xFED1, "M", "ف"), + (0xFED5, "M", "ق"), + (0xFED9, "M", "ك"), + (0xFEDD, "M", "ل"), + (0xFEE1, "M", "م"), + (0xFEE5, "M", "ن"), + (0xFEE9, "M", "ه"), + (0xFEED, "M", "و"), + ] + + +def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFEEF, "M", "ى"), + (0xFEF1, "M", "ي"), + (0xFEF5, "M", "لآ"), + (0xFEF7, "M", "لأ"), + (0xFEF9, "M", "لإ"), + (0xFEFB, "M", "لا"), + (0xFEFD, "X"), + (0xFEFF, "I"), + (0xFF00, "X"), + (0xFF01, "M", "!"), + (0xFF02, "M", '"'), + (0xFF03, "M", "#"), + (0xFF04, "M", "$"), + (0xFF05, "M", "%"), + (0xFF06, "M", "&"), + (0xFF07, "M", "'"), + (0xFF08, "M", "("), + (0xFF09, "M", ")"), + (0xFF0A, "M", "*"), + (0xFF0B, "M", "+"), + (0xFF0C, "M", ","), + (0xFF0D, "M", "-"), + (0xFF0E, "M", "."), + (0xFF0F, "M", "/"), + (0xFF10, "M", "0"), + (0xFF11, "M", "1"), + (0xFF12, "M", "2"), + (0xFF13, "M", "3"), + (0xFF14, "M", "4"), + (0xFF15, "M", "5"), + (0xFF16, "M", "6"), + (0xFF17, "M", "7"), + (0xFF18, "M", "8"), + (0xFF19, "M", "9"), + (0xFF1A, "M", ":"), + (0xFF1B, "M", ";"), + (0xFF1C, "M", "<"), + (0xFF1D, "M", "="), + (0xFF1E, "M", ">"), + (0xFF1F, "M", "?"), + (0xFF20, "M", "@"), + (0xFF21, "M", "a"), + (0xFF22, "M", "b"), + (0xFF23, "M", "c"), + (0xFF24, "M", "d"), + (0xFF25, "M", "e"), + (0xFF26, "M", "f"), + (0xFF27, "M", "g"), + (0xFF28, "M", "h"), + (0xFF29, "M", "i"), + (0xFF2A, "M", "j"), + (0xFF2B, "M", "k"), + (0xFF2C, "M", "l"), + (0xFF2D, "M", "m"), + (0xFF2E, "M", "n"), + (0xFF2F, "M", "o"), + (0xFF30, "M", "p"), + (0xFF31, "M", "q"), + (0xFF32, "M", "r"), + (0xFF33, "M", "s"), + (0xFF34, "M", "t"), + (0xFF35, "M", "u"), + (0xFF36, "M", "v"), + (0xFF37, "M", "w"), + (0xFF38, "M", "x"), + (0xFF39, "M", "y"), + (0xFF3A, "M", "z"), + (0xFF3B, "M", "["), + (0xFF3C, "M", "\\"), + (0xFF3D, "M", "]"), + (0xFF3E, "M", "^"), + (0xFF3F, "M", "_"), + (0xFF40, "M", "`"), + (0xFF41, "M", "a"), + (0xFF42, "M", "b"), + (0xFF43, "M", "c"), + (0xFF44, "M", "d"), + (0xFF45, "M", "e"), + (0xFF46, "M", "f"), + (0xFF47, "M", "g"), + (0xFF48, "M", "h"), + (0xFF49, "M", "i"), + (0xFF4A, "M", "j"), + (0xFF4B, "M", "k"), + (0xFF4C, "M", "l"), + (0xFF4D, "M", "m"), + (0xFF4E, "M", "n"), + (0xFF4F, "M", "o"), + (0xFF50, "M", "p"), + (0xFF51, "M", "q"), + (0xFF52, "M", "r"), + (0xFF53, "M", "s"), + (0xFF54, "M", "t"), + (0xFF55, "M", "u"), + (0xFF56, "M", "v"), + (0xFF57, "M", "w"), + (0xFF58, "M", "x"), + (0xFF59, "M", "y"), + (0xFF5A, "M", "z"), + (0xFF5B, "M", "{"), + ] + + +def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFF5C, "M", "|"), + (0xFF5D, "M", "}"), + (0xFF5E, "M", "~"), + (0xFF5F, "M", "⦅"), + (0xFF60, "M", "⦆"), + (0xFF61, "M", "."), + (0xFF62, "M", "「"), + (0xFF63, "M", "」"), + (0xFF64, "M", "、"), + (0xFF65, "M", "・"), + (0xFF66, "M", "ヲ"), + (0xFF67, "M", "ァ"), + (0xFF68, "M", "ィ"), + (0xFF69, "M", "ゥ"), + (0xFF6A, "M", "ェ"), + (0xFF6B, "M", "ォ"), + (0xFF6C, "M", "ャ"), + (0xFF6D, "M", "ュ"), + (0xFF6E, "M", "ョ"), + (0xFF6F, "M", "ッ"), + (0xFF70, "M", "ー"), + (0xFF71, "M", "ア"), + (0xFF72, "M", "イ"), + (0xFF73, "M", "ウ"), + (0xFF74, "M", "エ"), + (0xFF75, "M", "オ"), + (0xFF76, "M", "カ"), + (0xFF77, "M", "キ"), + (0xFF78, "M", "ク"), + (0xFF79, "M", "ケ"), + (0xFF7A, "M", "コ"), + (0xFF7B, "M", "サ"), + (0xFF7C, "M", "シ"), + (0xFF7D, "M", "ス"), + (0xFF7E, "M", "セ"), + (0xFF7F, "M", "ソ"), + (0xFF80, "M", "タ"), + (0xFF81, "M", "チ"), + (0xFF82, "M", "ツ"), + (0xFF83, "M", "テ"), + (0xFF84, "M", "ト"), + (0xFF85, "M", "ナ"), + (0xFF86, "M", "ニ"), + (0xFF87, "M", "ヌ"), + (0xFF88, "M", "ネ"), + (0xFF89, "M", "ノ"), + (0xFF8A, "M", "ハ"), + (0xFF8B, "M", "ヒ"), + (0xFF8C, "M", "フ"), + (0xFF8D, "M", "ヘ"), + (0xFF8E, "M", "ホ"), + (0xFF8F, "M", "マ"), + (0xFF90, "M", "ミ"), + (0xFF91, "M", "ム"), + (0xFF92, "M", "メ"), + (0xFF93, "M", "モ"), + (0xFF94, "M", "ヤ"), + (0xFF95, "M", "ユ"), + (0xFF96, "M", "ヨ"), + (0xFF97, "M", "ラ"), + (0xFF98, "M", "リ"), + (0xFF99, "M", "ル"), + (0xFF9A, "M", "レ"), + (0xFF9B, "M", "ロ"), + (0xFF9C, "M", "ワ"), + (0xFF9D, "M", "ン"), + (0xFF9E, "M", "゙"), + (0xFF9F, "M", "゚"), + (0xFFA0, "I"), + (0xFFA1, "M", "ᄀ"), + (0xFFA2, "M", "ᄁ"), + (0xFFA3, "M", "ᆪ"), + (0xFFA4, "M", "ᄂ"), + (0xFFA5, "M", "ᆬ"), + (0xFFA6, "M", "ᆭ"), + (0xFFA7, "M", "ᄃ"), + (0xFFA8, "M", "ᄄ"), + (0xFFA9, "M", "ᄅ"), + (0xFFAA, "M", "ᆰ"), + (0xFFAB, "M", "ᆱ"), + (0xFFAC, "M", "ᆲ"), + (0xFFAD, "M", "ᆳ"), + (0xFFAE, "M", "ᆴ"), + (0xFFAF, "M", "ᆵ"), + (0xFFB0, "M", "ᄚ"), + (0xFFB1, "M", "ᄆ"), + (0xFFB2, "M", "ᄇ"), + (0xFFB3, "M", "ᄈ"), + (0xFFB4, "M", "ᄡ"), + (0xFFB5, "M", "ᄉ"), + (0xFFB6, "M", "ᄊ"), + (0xFFB7, "M", "ᄋ"), + (0xFFB8, "M", "ᄌ"), + (0xFFB9, "M", "ᄍ"), + (0xFFBA, "M", "ᄎ"), + (0xFFBB, "M", "ᄏ"), + (0xFFBC, "M", "ᄐ"), + (0xFFBD, "M", "ᄑ"), + (0xFFBE, "M", "ᄒ"), + (0xFFBF, "X"), + ] + + +def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFFC2, "M", "ᅡ"), + (0xFFC3, "M", "ᅢ"), + (0xFFC4, "M", "ᅣ"), + (0xFFC5, "M", "ᅤ"), + (0xFFC6, "M", "ᅥ"), + (0xFFC7, "M", "ᅦ"), + (0xFFC8, "X"), + (0xFFCA, "M", "ᅧ"), + (0xFFCB, "M", "ᅨ"), + (0xFFCC, "M", "ᅩ"), + (0xFFCD, "M", "ᅪ"), + (0xFFCE, "M", "ᅫ"), + (0xFFCF, "M", "ᅬ"), + (0xFFD0, "X"), + (0xFFD2, "M", "ᅭ"), + (0xFFD3, "M", "ᅮ"), + (0xFFD4, "M", "ᅯ"), + (0xFFD5, "M", "ᅰ"), + (0xFFD6, "M", "ᅱ"), + (0xFFD7, "M", "ᅲ"), + (0xFFD8, "X"), + (0xFFDA, "M", "ᅳ"), + (0xFFDB, "M", "ᅴ"), + (0xFFDC, "M", "ᅵ"), + (0xFFDD, "X"), + (0xFFE0, "M", "¢"), + (0xFFE1, "M", "£"), + (0xFFE2, "M", "¬"), + (0xFFE3, "M", " ̄"), + (0xFFE4, "M", "¦"), + (0xFFE5, "M", "¥"), + (0xFFE6, "M", "₩"), + (0xFFE7, "X"), + (0xFFE8, "M", "│"), + (0xFFE9, "M", "←"), + (0xFFEA, "M", "↑"), + (0xFFEB, "M", "→"), + (0xFFEC, "M", "↓"), + (0xFFED, "M", "■"), + (0xFFEE, "M", "○"), + (0xFFEF, "X"), + (0x10000, "V"), + (0x1000C, "X"), + (0x1000D, "V"), + (0x10027, "X"), + (0x10028, "V"), + (0x1003B, "X"), + (0x1003C, "V"), + (0x1003E, "X"), + (0x1003F, "V"), + (0x1004E, "X"), + (0x10050, "V"), + (0x1005E, "X"), + (0x10080, "V"), + (0x100FB, "X"), + (0x10100, "V"), + (0x10103, "X"), + (0x10107, "V"), + (0x10134, "X"), + (0x10137, "V"), + (0x1018F, "X"), + (0x10190, "V"), + (0x1019D, "X"), + (0x101A0, "V"), + (0x101A1, "X"), + (0x101D0, "V"), + (0x101FE, "X"), + (0x10280, "V"), + (0x1029D, "X"), + (0x102A0, "V"), + (0x102D1, "X"), + (0x102E0, "V"), + (0x102FC, "X"), + (0x10300, "V"), + (0x10324, "X"), + (0x1032D, "V"), + (0x1034B, "X"), + (0x10350, "V"), + (0x1037B, "X"), + (0x10380, "V"), + (0x1039E, "X"), + (0x1039F, "V"), + (0x103C4, "X"), + (0x103C8, "V"), + (0x103D6, "X"), + (0x10400, "M", "𐐨"), + (0x10401, "M", "𐐩"), + (0x10402, "M", "𐐪"), + (0x10403, "M", "𐐫"), + (0x10404, "M", "𐐬"), + (0x10405, "M", "𐐭"), + (0x10406, "M", "𐐮"), + (0x10407, "M", "𐐯"), + (0x10408, "M", "𐐰"), + (0x10409, "M", "𐐱"), + (0x1040A, "M", "𐐲"), + (0x1040B, "M", "𐐳"), + (0x1040C, "M", "𐐴"), + (0x1040D, "M", "𐐵"), + (0x1040E, "M", "𐐶"), + ] + + +def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1040F, "M", "𐐷"), + (0x10410, "M", "𐐸"), + (0x10411, "M", "𐐹"), + (0x10412, "M", "𐐺"), + (0x10413, "M", "𐐻"), + (0x10414, "M", "𐐼"), + (0x10415, "M", "𐐽"), + (0x10416, "M", "𐐾"), + (0x10417, "M", "𐐿"), + (0x10418, "M", "𐑀"), + (0x10419, "M", "𐑁"), + (0x1041A, "M", "𐑂"), + (0x1041B, "M", "𐑃"), + (0x1041C, "M", "𐑄"), + (0x1041D, "M", "𐑅"), + (0x1041E, "M", "𐑆"), + (0x1041F, "M", "𐑇"), + (0x10420, "M", "𐑈"), + (0x10421, "M", "𐑉"), + (0x10422, "M", "𐑊"), + (0x10423, "M", "𐑋"), + (0x10424, "M", "𐑌"), + (0x10425, "M", "𐑍"), + (0x10426, "M", "𐑎"), + (0x10427, "M", "𐑏"), + (0x10428, "V"), + (0x1049E, "X"), + (0x104A0, "V"), + (0x104AA, "X"), + (0x104B0, "M", "𐓘"), + (0x104B1, "M", "𐓙"), + (0x104B2, "M", "𐓚"), + (0x104B3, "M", "𐓛"), + (0x104B4, "M", "𐓜"), + (0x104B5, "M", "𐓝"), + (0x104B6, "M", "𐓞"), + (0x104B7, "M", "𐓟"), + (0x104B8, "M", "𐓠"), + (0x104B9, "M", "𐓡"), + (0x104BA, "M", "𐓢"), + (0x104BB, "M", "𐓣"), + (0x104BC, "M", "𐓤"), + (0x104BD, "M", "𐓥"), + (0x104BE, "M", "𐓦"), + (0x104BF, "M", "𐓧"), + (0x104C0, "M", "𐓨"), + (0x104C1, "M", "𐓩"), + (0x104C2, "M", "𐓪"), + (0x104C3, "M", "𐓫"), + (0x104C4, "M", "𐓬"), + (0x104C5, "M", "𐓭"), + (0x104C6, "M", "𐓮"), + (0x104C7, "M", "𐓯"), + (0x104C8, "M", "𐓰"), + (0x104C9, "M", "𐓱"), + (0x104CA, "M", "𐓲"), + (0x104CB, "M", "𐓳"), + (0x104CC, "M", "𐓴"), + (0x104CD, "M", "𐓵"), + (0x104CE, "M", "𐓶"), + (0x104CF, "M", "𐓷"), + (0x104D0, "M", "𐓸"), + (0x104D1, "M", "𐓹"), + (0x104D2, "M", "𐓺"), + (0x104D3, "M", "𐓻"), + (0x104D4, "X"), + (0x104D8, "V"), + (0x104FC, "X"), + (0x10500, "V"), + (0x10528, "X"), + (0x10530, "V"), + (0x10564, "X"), + (0x1056F, "V"), + (0x10570, "M", "𐖗"), + (0x10571, "M", "𐖘"), + (0x10572, "M", "𐖙"), + (0x10573, "M", "𐖚"), + (0x10574, "M", "𐖛"), + (0x10575, "M", "𐖜"), + (0x10576, "M", "𐖝"), + (0x10577, "M", "𐖞"), + (0x10578, "M", "𐖟"), + (0x10579, "M", "𐖠"), + (0x1057A, "M", "𐖡"), + (0x1057B, "X"), + (0x1057C, "M", "𐖣"), + (0x1057D, "M", "𐖤"), + (0x1057E, "M", "𐖥"), + (0x1057F, "M", "𐖦"), + (0x10580, "M", "𐖧"), + (0x10581, "M", "𐖨"), + (0x10582, "M", "𐖩"), + (0x10583, "M", "𐖪"), + (0x10584, "M", "𐖫"), + (0x10585, "M", "𐖬"), + (0x10586, "M", "𐖭"), + (0x10587, "M", "𐖮"), + (0x10588, "M", "𐖯"), + (0x10589, "M", "𐖰"), + (0x1058A, "M", "𐖱"), + ] + + +def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1058B, "X"), + (0x1058C, "M", "𐖳"), + (0x1058D, "M", "𐖴"), + (0x1058E, "M", "𐖵"), + (0x1058F, "M", "𐖶"), + (0x10590, "M", "𐖷"), + (0x10591, "M", "𐖸"), + (0x10592, "M", "𐖹"), + (0x10593, "X"), + (0x10594, "M", "𐖻"), + (0x10595, "M", "𐖼"), + (0x10596, "X"), + (0x10597, "V"), + (0x105A2, "X"), + (0x105A3, "V"), + (0x105B2, "X"), + (0x105B3, "V"), + (0x105BA, "X"), + (0x105BB, "V"), + (0x105BD, "X"), + (0x105C0, "V"), + (0x105F4, "X"), + (0x10600, "V"), + (0x10737, "X"), + (0x10740, "V"), + (0x10756, "X"), + (0x10760, "V"), + (0x10768, "X"), + (0x10780, "V"), + (0x10781, "M", "ː"), + (0x10782, "M", "ˑ"), + (0x10783, "M", "æ"), + (0x10784, "M", "ʙ"), + (0x10785, "M", "ɓ"), + (0x10786, "X"), + (0x10787, "M", "ʣ"), + (0x10788, "M", "ꭦ"), + (0x10789, "M", "ʥ"), + (0x1078A, "M", "ʤ"), + (0x1078B, "M", "ɖ"), + (0x1078C, "M", "ɗ"), + (0x1078D, "M", "ᶑ"), + (0x1078E, "M", "ɘ"), + (0x1078F, "M", "ɞ"), + (0x10790, "M", "ʩ"), + (0x10791, "M", "ɤ"), + (0x10792, "M", "ɢ"), + (0x10793, "M", "ɠ"), + (0x10794, "M", "ʛ"), + (0x10795, "M", "ħ"), + (0x10796, "M", "ʜ"), + (0x10797, "M", "ɧ"), + (0x10798, "M", "ʄ"), + (0x10799, "M", "ʪ"), + (0x1079A, "M", "ʫ"), + (0x1079B, "M", "ɬ"), + (0x1079C, "M", "𝼄"), + (0x1079D, "M", "ꞎ"), + (0x1079E, "M", "ɮ"), + (0x1079F, "M", "𝼅"), + (0x107A0, "M", "ʎ"), + (0x107A1, "M", "𝼆"), + (0x107A2, "M", "ø"), + (0x107A3, "M", "ɶ"), + (0x107A4, "M", "ɷ"), + (0x107A5, "M", "q"), + (0x107A6, "M", "ɺ"), + (0x107A7, "M", "𝼈"), + (0x107A8, "M", "ɽ"), + (0x107A9, "M", "ɾ"), + (0x107AA, "M", "ʀ"), + (0x107AB, "M", "ʨ"), + (0x107AC, "M", "ʦ"), + (0x107AD, "M", "ꭧ"), + (0x107AE, "M", "ʧ"), + (0x107AF, "M", "ʈ"), + (0x107B0, "M", "ⱱ"), + (0x107B1, "X"), + (0x107B2, "M", "ʏ"), + (0x107B3, "M", "ʡ"), + (0x107B4, "M", "ʢ"), + (0x107B5, "M", "ʘ"), + (0x107B6, "M", "ǀ"), + (0x107B7, "M", "ǁ"), + (0x107B8, "M", "ǂ"), + (0x107B9, "M", "𝼊"), + (0x107BA, "M", "𝼞"), + (0x107BB, "X"), + (0x10800, "V"), + (0x10806, "X"), + (0x10808, "V"), + (0x10809, "X"), + (0x1080A, "V"), + (0x10836, "X"), + (0x10837, "V"), + (0x10839, "X"), + (0x1083C, "V"), + (0x1083D, "X"), + (0x1083F, "V"), + (0x10856, "X"), + ] + + +def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x10857, "V"), + (0x1089F, "X"), + (0x108A7, "V"), + (0x108B0, "X"), + (0x108E0, "V"), + (0x108F3, "X"), + (0x108F4, "V"), + (0x108F6, "X"), + (0x108FB, "V"), + (0x1091C, "X"), + (0x1091F, "V"), + (0x1093A, "X"), + (0x1093F, "V"), + (0x10940, "X"), + (0x10980, "V"), + (0x109B8, "X"), + (0x109BC, "V"), + (0x109D0, "X"), + (0x109D2, "V"), + (0x10A04, "X"), + (0x10A05, "V"), + (0x10A07, "X"), + (0x10A0C, "V"), + (0x10A14, "X"), + (0x10A15, "V"), + (0x10A18, "X"), + (0x10A19, "V"), + (0x10A36, "X"), + (0x10A38, "V"), + (0x10A3B, "X"), + (0x10A3F, "V"), + (0x10A49, "X"), + (0x10A50, "V"), + (0x10A59, "X"), + (0x10A60, "V"), + (0x10AA0, "X"), + (0x10AC0, "V"), + (0x10AE7, "X"), + (0x10AEB, "V"), + (0x10AF7, "X"), + (0x10B00, "V"), + (0x10B36, "X"), + (0x10B39, "V"), + (0x10B56, "X"), + (0x10B58, "V"), + (0x10B73, "X"), + (0x10B78, "V"), + (0x10B92, "X"), + (0x10B99, "V"), + (0x10B9D, "X"), + (0x10BA9, "V"), + (0x10BB0, "X"), + (0x10C00, "V"), + (0x10C49, "X"), + (0x10C80, "M", "𐳀"), + (0x10C81, "M", "𐳁"), + (0x10C82, "M", "𐳂"), + (0x10C83, "M", "𐳃"), + (0x10C84, "M", "𐳄"), + (0x10C85, "M", "𐳅"), + (0x10C86, "M", "𐳆"), + (0x10C87, "M", "𐳇"), + (0x10C88, "M", "𐳈"), + (0x10C89, "M", "𐳉"), + (0x10C8A, "M", "𐳊"), + (0x10C8B, "M", "𐳋"), + (0x10C8C, "M", "𐳌"), + (0x10C8D, "M", "𐳍"), + (0x10C8E, "M", "𐳎"), + (0x10C8F, "M", "𐳏"), + (0x10C90, "M", "𐳐"), + (0x10C91, "M", "𐳑"), + (0x10C92, "M", "𐳒"), + (0x10C93, "M", "𐳓"), + (0x10C94, "M", "𐳔"), + (0x10C95, "M", "𐳕"), + (0x10C96, "M", "𐳖"), + (0x10C97, "M", "𐳗"), + (0x10C98, "M", "𐳘"), + (0x10C99, "M", "𐳙"), + (0x10C9A, "M", "𐳚"), + (0x10C9B, "M", "𐳛"), + (0x10C9C, "M", "𐳜"), + (0x10C9D, "M", "𐳝"), + (0x10C9E, "M", "𐳞"), + (0x10C9F, "M", "𐳟"), + (0x10CA0, "M", "𐳠"), + (0x10CA1, "M", "𐳡"), + (0x10CA2, "M", "𐳢"), + (0x10CA3, "M", "𐳣"), + (0x10CA4, "M", "𐳤"), + (0x10CA5, "M", "𐳥"), + (0x10CA6, "M", "𐳦"), + (0x10CA7, "M", "𐳧"), + (0x10CA8, "M", "𐳨"), + (0x10CA9, "M", "𐳩"), + (0x10CAA, "M", "𐳪"), + (0x10CAB, "M", "𐳫"), + (0x10CAC, "M", "𐳬"), + (0x10CAD, "M", "𐳭"), + ] + + +def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x10CAE, "M", "𐳮"), + (0x10CAF, "M", "𐳯"), + (0x10CB0, "M", "𐳰"), + (0x10CB1, "M", "𐳱"), + (0x10CB2, "M", "𐳲"), + (0x10CB3, "X"), + (0x10CC0, "V"), + (0x10CF3, "X"), + (0x10CFA, "V"), + (0x10D28, "X"), + (0x10D30, "V"), + (0x10D3A, "X"), + (0x10D40, "V"), + (0x10D50, "M", "𐵰"), + (0x10D51, "M", "𐵱"), + (0x10D52, "M", "𐵲"), + (0x10D53, "M", "𐵳"), + (0x10D54, "M", "𐵴"), + (0x10D55, "M", "𐵵"), + (0x10D56, "M", "𐵶"), + (0x10D57, "M", "𐵷"), + (0x10D58, "M", "𐵸"), + (0x10D59, "M", "𐵹"), + (0x10D5A, "M", "𐵺"), + (0x10D5B, "M", "𐵻"), + (0x10D5C, "M", "𐵼"), + (0x10D5D, "M", "𐵽"), + (0x10D5E, "M", "𐵾"), + (0x10D5F, "M", "𐵿"), + (0x10D60, "M", "𐶀"), + (0x10D61, "M", "𐶁"), + (0x10D62, "M", "𐶂"), + (0x10D63, "M", "𐶃"), + (0x10D64, "M", "𐶄"), + (0x10D65, "M", "𐶅"), + (0x10D66, "X"), + (0x10D69, "V"), + (0x10D86, "X"), + (0x10D8E, "V"), + (0x10D90, "X"), + (0x10E60, "V"), + (0x10E7F, "X"), + (0x10E80, "V"), + (0x10EAA, "X"), + (0x10EAB, "V"), + (0x10EAE, "X"), + (0x10EB0, "V"), + (0x10EB2, "X"), + (0x10EC2, "V"), + (0x10EC5, "X"), + (0x10EFC, "V"), + (0x10F28, "X"), + (0x10F30, "V"), + (0x10F5A, "X"), + (0x10F70, "V"), + (0x10F8A, "X"), + (0x10FB0, "V"), + (0x10FCC, "X"), + (0x10FE0, "V"), + (0x10FF7, "X"), + (0x11000, "V"), + (0x1104E, "X"), + (0x11052, "V"), + (0x11076, "X"), + (0x1107F, "V"), + (0x110BD, "X"), + (0x110BE, "V"), + (0x110C3, "X"), + (0x110D0, "V"), + (0x110E9, "X"), + (0x110F0, "V"), + (0x110FA, "X"), + (0x11100, "V"), + (0x11135, "X"), + (0x11136, "V"), + (0x11148, "X"), + (0x11150, "V"), + (0x11177, "X"), + (0x11180, "V"), + (0x111E0, "X"), + (0x111E1, "V"), + (0x111F5, "X"), + (0x11200, "V"), + (0x11212, "X"), + (0x11213, "V"), + (0x11242, "X"), + (0x11280, "V"), + (0x11287, "X"), + (0x11288, "V"), + (0x11289, "X"), + (0x1128A, "V"), + (0x1128E, "X"), + (0x1128F, "V"), + (0x1129E, "X"), + (0x1129F, "V"), + (0x112AA, "X"), + (0x112B0, "V"), + (0x112EB, "X"), + (0x112F0, "V"), + (0x112FA, "X"), + ] + + +def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x11300, "V"), + (0x11304, "X"), + (0x11305, "V"), + (0x1130D, "X"), + (0x1130F, "V"), + (0x11311, "X"), + (0x11313, "V"), + (0x11329, "X"), + (0x1132A, "V"), + (0x11331, "X"), + (0x11332, "V"), + (0x11334, "X"), + (0x11335, "V"), + (0x1133A, "X"), + (0x1133B, "V"), + (0x11345, "X"), + (0x11347, "V"), + (0x11349, "X"), + (0x1134B, "V"), + (0x1134E, "X"), + (0x11350, "V"), + (0x11351, "X"), + (0x11357, "V"), + (0x11358, "X"), + (0x1135D, "V"), + (0x11364, "X"), + (0x11366, "V"), + (0x1136D, "X"), + (0x11370, "V"), + (0x11375, "X"), + (0x11380, "V"), + (0x1138A, "X"), + (0x1138B, "V"), + (0x1138C, "X"), + (0x1138E, "V"), + (0x1138F, "X"), + (0x11390, "V"), + (0x113B6, "X"), + (0x113B7, "V"), + (0x113C1, "X"), + (0x113C2, "V"), + (0x113C3, "X"), + (0x113C5, "V"), + (0x113C6, "X"), + (0x113C7, "V"), + (0x113CB, "X"), + (0x113CC, "V"), + (0x113D6, "X"), + (0x113D7, "V"), + (0x113D9, "X"), + (0x113E1, "V"), + (0x113E3, "X"), + (0x11400, "V"), + (0x1145C, "X"), + (0x1145D, "V"), + (0x11462, "X"), + (0x11480, "V"), + (0x114C8, "X"), + (0x114D0, "V"), + (0x114DA, "X"), + (0x11580, "V"), + (0x115B6, "X"), + (0x115B8, "V"), + (0x115DE, "X"), + (0x11600, "V"), + (0x11645, "X"), + (0x11650, "V"), + (0x1165A, "X"), + (0x11660, "V"), + (0x1166D, "X"), + (0x11680, "V"), + (0x116BA, "X"), + (0x116C0, "V"), + (0x116CA, "X"), + (0x116D0, "V"), + (0x116E4, "X"), + (0x11700, "V"), + (0x1171B, "X"), + (0x1171D, "V"), + (0x1172C, "X"), + (0x11730, "V"), + (0x11747, "X"), + (0x11800, "V"), + (0x1183C, "X"), + (0x118A0, "M", "𑣀"), + (0x118A1, "M", "𑣁"), + (0x118A2, "M", "𑣂"), + (0x118A3, "M", "𑣃"), + (0x118A4, "M", "𑣄"), + (0x118A5, "M", "𑣅"), + (0x118A6, "M", "𑣆"), + (0x118A7, "M", "𑣇"), + (0x118A8, "M", "𑣈"), + (0x118A9, "M", "𑣉"), + (0x118AA, "M", "𑣊"), + (0x118AB, "M", "𑣋"), + (0x118AC, "M", "𑣌"), + (0x118AD, "M", "𑣍"), + (0x118AE, "M", "𑣎"), + (0x118AF, "M", "𑣏"), + ] + + +def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x118B0, "M", "𑣐"), + (0x118B1, "M", "𑣑"), + (0x118B2, "M", "𑣒"), + (0x118B3, "M", "𑣓"), + (0x118B4, "M", "𑣔"), + (0x118B5, "M", "𑣕"), + (0x118B6, "M", "𑣖"), + (0x118B7, "M", "𑣗"), + (0x118B8, "M", "𑣘"), + (0x118B9, "M", "𑣙"), + (0x118BA, "M", "𑣚"), + (0x118BB, "M", "𑣛"), + (0x118BC, "M", "𑣜"), + (0x118BD, "M", "𑣝"), + (0x118BE, "M", "𑣞"), + (0x118BF, "M", "𑣟"), + (0x118C0, "V"), + (0x118F3, "X"), + (0x118FF, "V"), + (0x11907, "X"), + (0x11909, "V"), + (0x1190A, "X"), + (0x1190C, "V"), + (0x11914, "X"), + (0x11915, "V"), + (0x11917, "X"), + (0x11918, "V"), + (0x11936, "X"), + (0x11937, "V"), + (0x11939, "X"), + (0x1193B, "V"), + (0x11947, "X"), + (0x11950, "V"), + (0x1195A, "X"), + (0x119A0, "V"), + (0x119A8, "X"), + (0x119AA, "V"), + (0x119D8, "X"), + (0x119DA, "V"), + (0x119E5, "X"), + (0x11A00, "V"), + (0x11A48, "X"), + (0x11A50, "V"), + (0x11AA3, "X"), + (0x11AB0, "V"), + (0x11AF9, "X"), + (0x11B00, "V"), + (0x11B0A, "X"), + (0x11BC0, "V"), + (0x11BE2, "X"), + (0x11BF0, "V"), + (0x11BFA, "X"), + (0x11C00, "V"), + (0x11C09, "X"), + (0x11C0A, "V"), + (0x11C37, "X"), + (0x11C38, "V"), + (0x11C46, "X"), + (0x11C50, "V"), + (0x11C6D, "X"), + (0x11C70, "V"), + (0x11C90, "X"), + (0x11C92, "V"), + (0x11CA8, "X"), + (0x11CA9, "V"), + (0x11CB7, "X"), + (0x11D00, "V"), + (0x11D07, "X"), + (0x11D08, "V"), + (0x11D0A, "X"), + (0x11D0B, "V"), + (0x11D37, "X"), + (0x11D3A, "V"), + (0x11D3B, "X"), + (0x11D3C, "V"), + (0x11D3E, "X"), + (0x11D3F, "V"), + (0x11D48, "X"), + (0x11D50, "V"), + (0x11D5A, "X"), + (0x11D60, "V"), + (0x11D66, "X"), + (0x11D67, "V"), + (0x11D69, "X"), + (0x11D6A, "V"), + (0x11D8F, "X"), + (0x11D90, "V"), + (0x11D92, "X"), + (0x11D93, "V"), + (0x11D99, "X"), + (0x11DA0, "V"), + (0x11DAA, "X"), + (0x11EE0, "V"), + (0x11EF9, "X"), + (0x11F00, "V"), + (0x11F11, "X"), + (0x11F12, "V"), + (0x11F3B, "X"), + (0x11F3E, "V"), + (0x11F5B, "X"), + ] + + +def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x11FB0, "V"), + (0x11FB1, "X"), + (0x11FC0, "V"), + (0x11FF2, "X"), + (0x11FFF, "V"), + (0x1239A, "X"), + (0x12400, "V"), + (0x1246F, "X"), + (0x12470, "V"), + (0x12475, "X"), + (0x12480, "V"), + (0x12544, "X"), + (0x12F90, "V"), + (0x12FF3, "X"), + (0x13000, "V"), + (0x13430, "X"), + (0x13440, "V"), + (0x13456, "X"), + (0x13460, "V"), + (0x143FB, "X"), + (0x14400, "V"), + (0x14647, "X"), + (0x16100, "V"), + (0x1613A, "X"), + (0x16800, "V"), + (0x16A39, "X"), + (0x16A40, "V"), + (0x16A5F, "X"), + (0x16A60, "V"), + (0x16A6A, "X"), + (0x16A6E, "V"), + (0x16ABF, "X"), + (0x16AC0, "V"), + (0x16ACA, "X"), + (0x16AD0, "V"), + (0x16AEE, "X"), + (0x16AF0, "V"), + (0x16AF6, "X"), + (0x16B00, "V"), + (0x16B46, "X"), + (0x16B50, "V"), + (0x16B5A, "X"), + (0x16B5B, "V"), + (0x16B62, "X"), + (0x16B63, "V"), + (0x16B78, "X"), + (0x16B7D, "V"), + (0x16B90, "X"), + (0x16D40, "V"), + (0x16D7A, "X"), + (0x16E40, "M", "𖹠"), + (0x16E41, "M", "𖹡"), + (0x16E42, "M", "𖹢"), + (0x16E43, "M", "𖹣"), + (0x16E44, "M", "𖹤"), + (0x16E45, "M", "𖹥"), + (0x16E46, "M", "𖹦"), + (0x16E47, "M", "𖹧"), + (0x16E48, "M", "𖹨"), + (0x16E49, "M", "𖹩"), + (0x16E4A, "M", "𖹪"), + (0x16E4B, "M", "𖹫"), + (0x16E4C, "M", "𖹬"), + (0x16E4D, "M", "𖹭"), + (0x16E4E, "M", "𖹮"), + (0x16E4F, "M", "𖹯"), + (0x16E50, "M", "𖹰"), + (0x16E51, "M", "𖹱"), + (0x16E52, "M", "𖹲"), + (0x16E53, "M", "𖹳"), + (0x16E54, "M", "𖹴"), + (0x16E55, "M", "𖹵"), + (0x16E56, "M", "𖹶"), + (0x16E57, "M", "𖹷"), + (0x16E58, "M", "𖹸"), + (0x16E59, "M", "𖹹"), + (0x16E5A, "M", "𖹺"), + (0x16E5B, "M", "𖹻"), + (0x16E5C, "M", "𖹼"), + (0x16E5D, "M", "𖹽"), + (0x16E5E, "M", "𖹾"), + (0x16E5F, "M", "𖹿"), + (0x16E60, "V"), + (0x16E9B, "X"), + (0x16F00, "V"), + (0x16F4B, "X"), + (0x16F4F, "V"), + (0x16F88, "X"), + (0x16F8F, "V"), + (0x16FA0, "X"), + (0x16FE0, "V"), + (0x16FE5, "X"), + (0x16FF0, "V"), + (0x16FF2, "X"), + (0x17000, "V"), + (0x187F8, "X"), + (0x18800, "V"), + (0x18CD6, "X"), + (0x18CFF, "V"), + (0x18D09, "X"), + ] + + +def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1AFF0, "V"), + (0x1AFF4, "X"), + (0x1AFF5, "V"), + (0x1AFFC, "X"), + (0x1AFFD, "V"), + (0x1AFFF, "X"), + (0x1B000, "V"), + (0x1B123, "X"), + (0x1B132, "V"), + (0x1B133, "X"), + (0x1B150, "V"), + (0x1B153, "X"), + (0x1B155, "V"), + (0x1B156, "X"), + (0x1B164, "V"), + (0x1B168, "X"), + (0x1B170, "V"), + (0x1B2FC, "X"), + (0x1BC00, "V"), + (0x1BC6B, "X"), + (0x1BC70, "V"), + (0x1BC7D, "X"), + (0x1BC80, "V"), + (0x1BC89, "X"), + (0x1BC90, "V"), + (0x1BC9A, "X"), + (0x1BC9C, "V"), + (0x1BCA0, "I"), + (0x1BCA4, "X"), + (0x1CC00, "V"), + (0x1CCD6, "M", "a"), + (0x1CCD7, "M", "b"), + (0x1CCD8, "M", "c"), + (0x1CCD9, "M", "d"), + (0x1CCDA, "M", "e"), + (0x1CCDB, "M", "f"), + (0x1CCDC, "M", "g"), + (0x1CCDD, "M", "h"), + (0x1CCDE, "M", "i"), + (0x1CCDF, "M", "j"), + (0x1CCE0, "M", "k"), + (0x1CCE1, "M", "l"), + (0x1CCE2, "M", "m"), + (0x1CCE3, "M", "n"), + (0x1CCE4, "M", "o"), + (0x1CCE5, "M", "p"), + (0x1CCE6, "M", "q"), + (0x1CCE7, "M", "r"), + (0x1CCE8, "M", "s"), + (0x1CCE9, "M", "t"), + (0x1CCEA, "M", "u"), + (0x1CCEB, "M", "v"), + (0x1CCEC, "M", "w"), + (0x1CCED, "M", "x"), + (0x1CCEE, "M", "y"), + (0x1CCEF, "M", "z"), + (0x1CCF0, "M", "0"), + (0x1CCF1, "M", "1"), + (0x1CCF2, "M", "2"), + (0x1CCF3, "M", "3"), + (0x1CCF4, "M", "4"), + (0x1CCF5, "M", "5"), + (0x1CCF6, "M", "6"), + (0x1CCF7, "M", "7"), + (0x1CCF8, "M", "8"), + (0x1CCF9, "M", "9"), + (0x1CCFA, "X"), + (0x1CD00, "V"), + (0x1CEB4, "X"), + (0x1CF00, "V"), + (0x1CF2E, "X"), + (0x1CF30, "V"), + (0x1CF47, "X"), + (0x1CF50, "V"), + (0x1CFC4, "X"), + (0x1D000, "V"), + (0x1D0F6, "X"), + (0x1D100, "V"), + (0x1D127, "X"), + (0x1D129, "V"), + (0x1D15E, "M", "𝅗𝅥"), + (0x1D15F, "M", "𝅘𝅥"), + (0x1D160, "M", "𝅘𝅥𝅮"), + (0x1D161, "M", "𝅘𝅥𝅯"), + (0x1D162, "M", "𝅘𝅥𝅰"), + (0x1D163, "M", "𝅘𝅥𝅱"), + (0x1D164, "M", "𝅘𝅥𝅲"), + (0x1D165, "V"), + (0x1D173, "I"), + (0x1D17B, "V"), + (0x1D1BB, "M", "𝆹𝅥"), + (0x1D1BC, "M", "𝆺𝅥"), + (0x1D1BD, "M", "𝆹𝅥𝅮"), + (0x1D1BE, "M", "𝆺𝅥𝅮"), + (0x1D1BF, "M", "𝆹𝅥𝅯"), + (0x1D1C0, "M", "𝆺𝅥𝅯"), + (0x1D1C1, "V"), + (0x1D1EB, "X"), + (0x1D200, "V"), + (0x1D246, "X"), + ] + + +def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D2C0, "V"), + (0x1D2D4, "X"), + (0x1D2E0, "V"), + (0x1D2F4, "X"), + (0x1D300, "V"), + (0x1D357, "X"), + (0x1D360, "V"), + (0x1D379, "X"), + (0x1D400, "M", "a"), + (0x1D401, "M", "b"), + (0x1D402, "M", "c"), + (0x1D403, "M", "d"), + (0x1D404, "M", "e"), + (0x1D405, "M", "f"), + (0x1D406, "M", "g"), + (0x1D407, "M", "h"), + (0x1D408, "M", "i"), + (0x1D409, "M", "j"), + (0x1D40A, "M", "k"), + (0x1D40B, "M", "l"), + (0x1D40C, "M", "m"), + (0x1D40D, "M", "n"), + (0x1D40E, "M", "o"), + (0x1D40F, "M", "p"), + (0x1D410, "M", "q"), + (0x1D411, "M", "r"), + (0x1D412, "M", "s"), + (0x1D413, "M", "t"), + (0x1D414, "M", "u"), + (0x1D415, "M", "v"), + (0x1D416, "M", "w"), + (0x1D417, "M", "x"), + (0x1D418, "M", "y"), + (0x1D419, "M", "z"), + (0x1D41A, "M", "a"), + (0x1D41B, "M", "b"), + (0x1D41C, "M", "c"), + (0x1D41D, "M", "d"), + (0x1D41E, "M", "e"), + (0x1D41F, "M", "f"), + (0x1D420, "M", "g"), + (0x1D421, "M", "h"), + (0x1D422, "M", "i"), + (0x1D423, "M", "j"), + (0x1D424, "M", "k"), + (0x1D425, "M", "l"), + (0x1D426, "M", "m"), + (0x1D427, "M", "n"), + (0x1D428, "M", "o"), + (0x1D429, "M", "p"), + (0x1D42A, "M", "q"), + (0x1D42B, "M", "r"), + (0x1D42C, "M", "s"), + (0x1D42D, "M", "t"), + (0x1D42E, "M", "u"), + (0x1D42F, "M", "v"), + (0x1D430, "M", "w"), + (0x1D431, "M", "x"), + (0x1D432, "M", "y"), + (0x1D433, "M", "z"), + (0x1D434, "M", "a"), + (0x1D435, "M", "b"), + (0x1D436, "M", "c"), + (0x1D437, "M", "d"), + (0x1D438, "M", "e"), + (0x1D439, "M", "f"), + (0x1D43A, "M", "g"), + (0x1D43B, "M", "h"), + (0x1D43C, "M", "i"), + (0x1D43D, "M", "j"), + (0x1D43E, "M", "k"), + (0x1D43F, "M", "l"), + (0x1D440, "M", "m"), + (0x1D441, "M", "n"), + (0x1D442, "M", "o"), + (0x1D443, "M", "p"), + (0x1D444, "M", "q"), + (0x1D445, "M", "r"), + (0x1D446, "M", "s"), + (0x1D447, "M", "t"), + (0x1D448, "M", "u"), + (0x1D449, "M", "v"), + (0x1D44A, "M", "w"), + (0x1D44B, "M", "x"), + (0x1D44C, "M", "y"), + (0x1D44D, "M", "z"), + (0x1D44E, "M", "a"), + (0x1D44F, "M", "b"), + (0x1D450, "M", "c"), + (0x1D451, "M", "d"), + (0x1D452, "M", "e"), + (0x1D453, "M", "f"), + (0x1D454, "M", "g"), + (0x1D455, "X"), + (0x1D456, "M", "i"), + (0x1D457, "M", "j"), + (0x1D458, "M", "k"), + (0x1D459, "M", "l"), + (0x1D45A, "M", "m"), + (0x1D45B, "M", "n"), + ] + + +def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D45C, "M", "o"), + (0x1D45D, "M", "p"), + (0x1D45E, "M", "q"), + (0x1D45F, "M", "r"), + (0x1D460, "M", "s"), + (0x1D461, "M", "t"), + (0x1D462, "M", "u"), + (0x1D463, "M", "v"), + (0x1D464, "M", "w"), + (0x1D465, "M", "x"), + (0x1D466, "M", "y"), + (0x1D467, "M", "z"), + (0x1D468, "M", "a"), + (0x1D469, "M", "b"), + (0x1D46A, "M", "c"), + (0x1D46B, "M", "d"), + (0x1D46C, "M", "e"), + (0x1D46D, "M", "f"), + (0x1D46E, "M", "g"), + (0x1D46F, "M", "h"), + (0x1D470, "M", "i"), + (0x1D471, "M", "j"), + (0x1D472, "M", "k"), + (0x1D473, "M", "l"), + (0x1D474, "M", "m"), + (0x1D475, "M", "n"), + (0x1D476, "M", "o"), + (0x1D477, "M", "p"), + (0x1D478, "M", "q"), + (0x1D479, "M", "r"), + (0x1D47A, "M", "s"), + (0x1D47B, "M", "t"), + (0x1D47C, "M", "u"), + (0x1D47D, "M", "v"), + (0x1D47E, "M", "w"), + (0x1D47F, "M", "x"), + (0x1D480, "M", "y"), + (0x1D481, "M", "z"), + (0x1D482, "M", "a"), + (0x1D483, "M", "b"), + (0x1D484, "M", "c"), + (0x1D485, "M", "d"), + (0x1D486, "M", "e"), + (0x1D487, "M", "f"), + (0x1D488, "M", "g"), + (0x1D489, "M", "h"), + (0x1D48A, "M", "i"), + (0x1D48B, "M", "j"), + (0x1D48C, "M", "k"), + (0x1D48D, "M", "l"), + (0x1D48E, "M", "m"), + (0x1D48F, "M", "n"), + (0x1D490, "M", "o"), + (0x1D491, "M", "p"), + (0x1D492, "M", "q"), + (0x1D493, "M", "r"), + (0x1D494, "M", "s"), + (0x1D495, "M", "t"), + (0x1D496, "M", "u"), + (0x1D497, "M", "v"), + (0x1D498, "M", "w"), + (0x1D499, "M", "x"), + (0x1D49A, "M", "y"), + (0x1D49B, "M", "z"), + (0x1D49C, "M", "a"), + (0x1D49D, "X"), + (0x1D49E, "M", "c"), + (0x1D49F, "M", "d"), + (0x1D4A0, "X"), + (0x1D4A2, "M", "g"), + (0x1D4A3, "X"), + (0x1D4A5, "M", "j"), + (0x1D4A6, "M", "k"), + (0x1D4A7, "X"), + (0x1D4A9, "M", "n"), + (0x1D4AA, "M", "o"), + (0x1D4AB, "M", "p"), + (0x1D4AC, "M", "q"), + (0x1D4AD, "X"), + (0x1D4AE, "M", "s"), + (0x1D4AF, "M", "t"), + (0x1D4B0, "M", "u"), + (0x1D4B1, "M", "v"), + (0x1D4B2, "M", "w"), + (0x1D4B3, "M", "x"), + (0x1D4B4, "M", "y"), + (0x1D4B5, "M", "z"), + (0x1D4B6, "M", "a"), + (0x1D4B7, "M", "b"), + (0x1D4B8, "M", "c"), + (0x1D4B9, "M", "d"), + (0x1D4BA, "X"), + (0x1D4BB, "M", "f"), + (0x1D4BC, "X"), + (0x1D4BD, "M", "h"), + (0x1D4BE, "M", "i"), + (0x1D4BF, "M", "j"), + (0x1D4C0, "M", "k"), + (0x1D4C1, "M", "l"), + (0x1D4C2, "M", "m"), + ] + + +def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D4C3, "M", "n"), + (0x1D4C4, "X"), + (0x1D4C5, "M", "p"), + (0x1D4C6, "M", "q"), + (0x1D4C7, "M", "r"), + (0x1D4C8, "M", "s"), + (0x1D4C9, "M", "t"), + (0x1D4CA, "M", "u"), + (0x1D4CB, "M", "v"), + (0x1D4CC, "M", "w"), + (0x1D4CD, "M", "x"), + (0x1D4CE, "M", "y"), + (0x1D4CF, "M", "z"), + (0x1D4D0, "M", "a"), + (0x1D4D1, "M", "b"), + (0x1D4D2, "M", "c"), + (0x1D4D3, "M", "d"), + (0x1D4D4, "M", "e"), + (0x1D4D5, "M", "f"), + (0x1D4D6, "M", "g"), + (0x1D4D7, "M", "h"), + (0x1D4D8, "M", "i"), + (0x1D4D9, "M", "j"), + (0x1D4DA, "M", "k"), + (0x1D4DB, "M", "l"), + (0x1D4DC, "M", "m"), + (0x1D4DD, "M", "n"), + (0x1D4DE, "M", "o"), + (0x1D4DF, "M", "p"), + (0x1D4E0, "M", "q"), + (0x1D4E1, "M", "r"), + (0x1D4E2, "M", "s"), + (0x1D4E3, "M", "t"), + (0x1D4E4, "M", "u"), + (0x1D4E5, "M", "v"), + (0x1D4E6, "M", "w"), + (0x1D4E7, "M", "x"), + (0x1D4E8, "M", "y"), + (0x1D4E9, "M", "z"), + (0x1D4EA, "M", "a"), + (0x1D4EB, "M", "b"), + (0x1D4EC, "M", "c"), + (0x1D4ED, "M", "d"), + (0x1D4EE, "M", "e"), + (0x1D4EF, "M", "f"), + (0x1D4F0, "M", "g"), + (0x1D4F1, "M", "h"), + (0x1D4F2, "M", "i"), + (0x1D4F3, "M", "j"), + (0x1D4F4, "M", "k"), + (0x1D4F5, "M", "l"), + (0x1D4F6, "M", "m"), + (0x1D4F7, "M", "n"), + (0x1D4F8, "M", "o"), + (0x1D4F9, "M", "p"), + (0x1D4FA, "M", "q"), + (0x1D4FB, "M", "r"), + (0x1D4FC, "M", "s"), + (0x1D4FD, "M", "t"), + (0x1D4FE, "M", "u"), + (0x1D4FF, "M", "v"), + (0x1D500, "M", "w"), + (0x1D501, "M", "x"), + (0x1D502, "M", "y"), + (0x1D503, "M", "z"), + (0x1D504, "M", "a"), + (0x1D505, "M", "b"), + (0x1D506, "X"), + (0x1D507, "M", "d"), + (0x1D508, "M", "e"), + (0x1D509, "M", "f"), + (0x1D50A, "M", "g"), + (0x1D50B, "X"), + (0x1D50D, "M", "j"), + (0x1D50E, "M", "k"), + (0x1D50F, "M", "l"), + (0x1D510, "M", "m"), + (0x1D511, "M", "n"), + (0x1D512, "M", "o"), + (0x1D513, "M", "p"), + (0x1D514, "M", "q"), + (0x1D515, "X"), + (0x1D516, "M", "s"), + (0x1D517, "M", "t"), + (0x1D518, "M", "u"), + (0x1D519, "M", "v"), + (0x1D51A, "M", "w"), + (0x1D51B, "M", "x"), + (0x1D51C, "M", "y"), + (0x1D51D, "X"), + (0x1D51E, "M", "a"), + (0x1D51F, "M", "b"), + (0x1D520, "M", "c"), + (0x1D521, "M", "d"), + (0x1D522, "M", "e"), + (0x1D523, "M", "f"), + (0x1D524, "M", "g"), + (0x1D525, "M", "h"), + (0x1D526, "M", "i"), + (0x1D527, "M", "j"), + ] + + +def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D528, "M", "k"), + (0x1D529, "M", "l"), + (0x1D52A, "M", "m"), + (0x1D52B, "M", "n"), + (0x1D52C, "M", "o"), + (0x1D52D, "M", "p"), + (0x1D52E, "M", "q"), + (0x1D52F, "M", "r"), + (0x1D530, "M", "s"), + (0x1D531, "M", "t"), + (0x1D532, "M", "u"), + (0x1D533, "M", "v"), + (0x1D534, "M", "w"), + (0x1D535, "M", "x"), + (0x1D536, "M", "y"), + (0x1D537, "M", "z"), + (0x1D538, "M", "a"), + (0x1D539, "M", "b"), + (0x1D53A, "X"), + (0x1D53B, "M", "d"), + (0x1D53C, "M", "e"), + (0x1D53D, "M", "f"), + (0x1D53E, "M", "g"), + (0x1D53F, "X"), + (0x1D540, "M", "i"), + (0x1D541, "M", "j"), + (0x1D542, "M", "k"), + (0x1D543, "M", "l"), + (0x1D544, "M", "m"), + (0x1D545, "X"), + (0x1D546, "M", "o"), + (0x1D547, "X"), + (0x1D54A, "M", "s"), + (0x1D54B, "M", "t"), + (0x1D54C, "M", "u"), + (0x1D54D, "M", "v"), + (0x1D54E, "M", "w"), + (0x1D54F, "M", "x"), + (0x1D550, "M", "y"), + (0x1D551, "X"), + (0x1D552, "M", "a"), + (0x1D553, "M", "b"), + (0x1D554, "M", "c"), + (0x1D555, "M", "d"), + (0x1D556, "M", "e"), + (0x1D557, "M", "f"), + (0x1D558, "M", "g"), + (0x1D559, "M", "h"), + (0x1D55A, "M", "i"), + (0x1D55B, "M", "j"), + (0x1D55C, "M", "k"), + (0x1D55D, "M", "l"), + (0x1D55E, "M", "m"), + (0x1D55F, "M", "n"), + (0x1D560, "M", "o"), + (0x1D561, "M", "p"), + (0x1D562, "M", "q"), + (0x1D563, "M", "r"), + (0x1D564, "M", "s"), + (0x1D565, "M", "t"), + (0x1D566, "M", "u"), + (0x1D567, "M", "v"), + (0x1D568, "M", "w"), + (0x1D569, "M", "x"), + (0x1D56A, "M", "y"), + (0x1D56B, "M", "z"), + (0x1D56C, "M", "a"), + (0x1D56D, "M", "b"), + (0x1D56E, "M", "c"), + (0x1D56F, "M", "d"), + (0x1D570, "M", "e"), + (0x1D571, "M", "f"), + (0x1D572, "M", "g"), + (0x1D573, "M", "h"), + (0x1D574, "M", "i"), + (0x1D575, "M", "j"), + (0x1D576, "M", "k"), + (0x1D577, "M", "l"), + (0x1D578, "M", "m"), + (0x1D579, "M", "n"), + (0x1D57A, "M", "o"), + (0x1D57B, "M", "p"), + (0x1D57C, "M", "q"), + (0x1D57D, "M", "r"), + (0x1D57E, "M", "s"), + (0x1D57F, "M", "t"), + (0x1D580, "M", "u"), + (0x1D581, "M", "v"), + (0x1D582, "M", "w"), + (0x1D583, "M", "x"), + (0x1D584, "M", "y"), + (0x1D585, "M", "z"), + (0x1D586, "M", "a"), + (0x1D587, "M", "b"), + (0x1D588, "M", "c"), + (0x1D589, "M", "d"), + (0x1D58A, "M", "e"), + (0x1D58B, "M", "f"), + (0x1D58C, "M", "g"), + (0x1D58D, "M", "h"), + ] + + +def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D58E, "M", "i"), + (0x1D58F, "M", "j"), + (0x1D590, "M", "k"), + (0x1D591, "M", "l"), + (0x1D592, "M", "m"), + (0x1D593, "M", "n"), + (0x1D594, "M", "o"), + (0x1D595, "M", "p"), + (0x1D596, "M", "q"), + (0x1D597, "M", "r"), + (0x1D598, "M", "s"), + (0x1D599, "M", "t"), + (0x1D59A, "M", "u"), + (0x1D59B, "M", "v"), + (0x1D59C, "M", "w"), + (0x1D59D, "M", "x"), + (0x1D59E, "M", "y"), + (0x1D59F, "M", "z"), + (0x1D5A0, "M", "a"), + (0x1D5A1, "M", "b"), + (0x1D5A2, "M", "c"), + (0x1D5A3, "M", "d"), + (0x1D5A4, "M", "e"), + (0x1D5A5, "M", "f"), + (0x1D5A6, "M", "g"), + (0x1D5A7, "M", "h"), + (0x1D5A8, "M", "i"), + (0x1D5A9, "M", "j"), + (0x1D5AA, "M", "k"), + (0x1D5AB, "M", "l"), + (0x1D5AC, "M", "m"), + (0x1D5AD, "M", "n"), + (0x1D5AE, "M", "o"), + (0x1D5AF, "M", "p"), + (0x1D5B0, "M", "q"), + (0x1D5B1, "M", "r"), + (0x1D5B2, "M", "s"), + (0x1D5B3, "M", "t"), + (0x1D5B4, "M", "u"), + (0x1D5B5, "M", "v"), + (0x1D5B6, "M", "w"), + (0x1D5B7, "M", "x"), + (0x1D5B8, "M", "y"), + (0x1D5B9, "M", "z"), + (0x1D5BA, "M", "a"), + (0x1D5BB, "M", "b"), + (0x1D5BC, "M", "c"), + (0x1D5BD, "M", "d"), + (0x1D5BE, "M", "e"), + (0x1D5BF, "M", "f"), + (0x1D5C0, "M", "g"), + (0x1D5C1, "M", "h"), + (0x1D5C2, "M", "i"), + (0x1D5C3, "M", "j"), + (0x1D5C4, "M", "k"), + (0x1D5C5, "M", "l"), + (0x1D5C6, "M", "m"), + (0x1D5C7, "M", "n"), + (0x1D5C8, "M", "o"), + (0x1D5C9, "M", "p"), + (0x1D5CA, "M", "q"), + (0x1D5CB, "M", "r"), + (0x1D5CC, "M", "s"), + (0x1D5CD, "M", "t"), + (0x1D5CE, "M", "u"), + (0x1D5CF, "M", "v"), + (0x1D5D0, "M", "w"), + (0x1D5D1, "M", "x"), + (0x1D5D2, "M", "y"), + (0x1D5D3, "M", "z"), + (0x1D5D4, "M", "a"), + (0x1D5D5, "M", "b"), + (0x1D5D6, "M", "c"), + (0x1D5D7, "M", "d"), + (0x1D5D8, "M", "e"), + (0x1D5D9, "M", "f"), + (0x1D5DA, "M", "g"), + (0x1D5DB, "M", "h"), + (0x1D5DC, "M", "i"), + (0x1D5DD, "M", "j"), + (0x1D5DE, "M", "k"), + (0x1D5DF, "M", "l"), + (0x1D5E0, "M", "m"), + (0x1D5E1, "M", "n"), + (0x1D5E2, "M", "o"), + (0x1D5E3, "M", "p"), + (0x1D5E4, "M", "q"), + (0x1D5E5, "M", "r"), + (0x1D5E6, "M", "s"), + (0x1D5E7, "M", "t"), + (0x1D5E8, "M", "u"), + (0x1D5E9, "M", "v"), + (0x1D5EA, "M", "w"), + (0x1D5EB, "M", "x"), + (0x1D5EC, "M", "y"), + (0x1D5ED, "M", "z"), + (0x1D5EE, "M", "a"), + (0x1D5EF, "M", "b"), + (0x1D5F0, "M", "c"), + (0x1D5F1, "M", "d"), + ] + + +def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D5F2, "M", "e"), + (0x1D5F3, "M", "f"), + (0x1D5F4, "M", "g"), + (0x1D5F5, "M", "h"), + (0x1D5F6, "M", "i"), + (0x1D5F7, "M", "j"), + (0x1D5F8, "M", "k"), + (0x1D5F9, "M", "l"), + (0x1D5FA, "M", "m"), + (0x1D5FB, "M", "n"), + (0x1D5FC, "M", "o"), + (0x1D5FD, "M", "p"), + (0x1D5FE, "M", "q"), + (0x1D5FF, "M", "r"), + (0x1D600, "M", "s"), + (0x1D601, "M", "t"), + (0x1D602, "M", "u"), + (0x1D603, "M", "v"), + (0x1D604, "M", "w"), + (0x1D605, "M", "x"), + (0x1D606, "M", "y"), + (0x1D607, "M", "z"), + (0x1D608, "M", "a"), + (0x1D609, "M", "b"), + (0x1D60A, "M", "c"), + (0x1D60B, "M", "d"), + (0x1D60C, "M", "e"), + (0x1D60D, "M", "f"), + (0x1D60E, "M", "g"), + (0x1D60F, "M", "h"), + (0x1D610, "M", "i"), + (0x1D611, "M", "j"), + (0x1D612, "M", "k"), + (0x1D613, "M", "l"), + (0x1D614, "M", "m"), + (0x1D615, "M", "n"), + (0x1D616, "M", "o"), + (0x1D617, "M", "p"), + (0x1D618, "M", "q"), + (0x1D619, "M", "r"), + (0x1D61A, "M", "s"), + (0x1D61B, "M", "t"), + (0x1D61C, "M", "u"), + (0x1D61D, "M", "v"), + (0x1D61E, "M", "w"), + (0x1D61F, "M", "x"), + (0x1D620, "M", "y"), + (0x1D621, "M", "z"), + (0x1D622, "M", "a"), + (0x1D623, "M", "b"), + (0x1D624, "M", "c"), + (0x1D625, "M", "d"), + (0x1D626, "M", "e"), + (0x1D627, "M", "f"), + (0x1D628, "M", "g"), + (0x1D629, "M", "h"), + (0x1D62A, "M", "i"), + (0x1D62B, "M", "j"), + (0x1D62C, "M", "k"), + (0x1D62D, "M", "l"), + (0x1D62E, "M", "m"), + (0x1D62F, "M", "n"), + (0x1D630, "M", "o"), + (0x1D631, "M", "p"), + (0x1D632, "M", "q"), + (0x1D633, "M", "r"), + (0x1D634, "M", "s"), + (0x1D635, "M", "t"), + (0x1D636, "M", "u"), + (0x1D637, "M", "v"), + (0x1D638, "M", "w"), + (0x1D639, "M", "x"), + (0x1D63A, "M", "y"), + (0x1D63B, "M", "z"), + (0x1D63C, "M", "a"), + (0x1D63D, "M", "b"), + (0x1D63E, "M", "c"), + (0x1D63F, "M", "d"), + (0x1D640, "M", "e"), + (0x1D641, "M", "f"), + (0x1D642, "M", "g"), + (0x1D643, "M", "h"), + (0x1D644, "M", "i"), + (0x1D645, "M", "j"), + (0x1D646, "M", "k"), + (0x1D647, "M", "l"), + (0x1D648, "M", "m"), + (0x1D649, "M", "n"), + (0x1D64A, "M", "o"), + (0x1D64B, "M", "p"), + (0x1D64C, "M", "q"), + (0x1D64D, "M", "r"), + (0x1D64E, "M", "s"), + (0x1D64F, "M", "t"), + (0x1D650, "M", "u"), + (0x1D651, "M", "v"), + (0x1D652, "M", "w"), + (0x1D653, "M", "x"), + (0x1D654, "M", "y"), + (0x1D655, "M", "z"), + ] + + +def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D656, "M", "a"), + (0x1D657, "M", "b"), + (0x1D658, "M", "c"), + (0x1D659, "M", "d"), + (0x1D65A, "M", "e"), + (0x1D65B, "M", "f"), + (0x1D65C, "M", "g"), + (0x1D65D, "M", "h"), + (0x1D65E, "M", "i"), + (0x1D65F, "M", "j"), + (0x1D660, "M", "k"), + (0x1D661, "M", "l"), + (0x1D662, "M", "m"), + (0x1D663, "M", "n"), + (0x1D664, "M", "o"), + (0x1D665, "M", "p"), + (0x1D666, "M", "q"), + (0x1D667, "M", "r"), + (0x1D668, "M", "s"), + (0x1D669, "M", "t"), + (0x1D66A, "M", "u"), + (0x1D66B, "M", "v"), + (0x1D66C, "M", "w"), + (0x1D66D, "M", "x"), + (0x1D66E, "M", "y"), + (0x1D66F, "M", "z"), + (0x1D670, "M", "a"), + (0x1D671, "M", "b"), + (0x1D672, "M", "c"), + (0x1D673, "M", "d"), + (0x1D674, "M", "e"), + (0x1D675, "M", "f"), + (0x1D676, "M", "g"), + (0x1D677, "M", "h"), + (0x1D678, "M", "i"), + (0x1D679, "M", "j"), + (0x1D67A, "M", "k"), + (0x1D67B, "M", "l"), + (0x1D67C, "M", "m"), + (0x1D67D, "M", "n"), + (0x1D67E, "M", "o"), + (0x1D67F, "M", "p"), + (0x1D680, "M", "q"), + (0x1D681, "M", "r"), + (0x1D682, "M", "s"), + (0x1D683, "M", "t"), + (0x1D684, "M", "u"), + (0x1D685, "M", "v"), + (0x1D686, "M", "w"), + (0x1D687, "M", "x"), + (0x1D688, "M", "y"), + (0x1D689, "M", "z"), + (0x1D68A, "M", "a"), + (0x1D68B, "M", "b"), + (0x1D68C, "M", "c"), + (0x1D68D, "M", "d"), + (0x1D68E, "M", "e"), + (0x1D68F, "M", "f"), + (0x1D690, "M", "g"), + (0x1D691, "M", "h"), + (0x1D692, "M", "i"), + (0x1D693, "M", "j"), + (0x1D694, "M", "k"), + (0x1D695, "M", "l"), + (0x1D696, "M", "m"), + (0x1D697, "M", "n"), + (0x1D698, "M", "o"), + (0x1D699, "M", "p"), + (0x1D69A, "M", "q"), + (0x1D69B, "M", "r"), + (0x1D69C, "M", "s"), + (0x1D69D, "M", "t"), + (0x1D69E, "M", "u"), + (0x1D69F, "M", "v"), + (0x1D6A0, "M", "w"), + (0x1D6A1, "M", "x"), + (0x1D6A2, "M", "y"), + (0x1D6A3, "M", "z"), + (0x1D6A4, "M", "ı"), + (0x1D6A5, "M", "ȷ"), + (0x1D6A6, "X"), + (0x1D6A8, "M", "α"), + (0x1D6A9, "M", "β"), + (0x1D6AA, "M", "γ"), + (0x1D6AB, "M", "δ"), + (0x1D6AC, "M", "ε"), + (0x1D6AD, "M", "ζ"), + (0x1D6AE, "M", "η"), + (0x1D6AF, "M", "θ"), + (0x1D6B0, "M", "ι"), + (0x1D6B1, "M", "κ"), + (0x1D6B2, "M", "λ"), + (0x1D6B3, "M", "μ"), + (0x1D6B4, "M", "ν"), + (0x1D6B5, "M", "ξ"), + (0x1D6B6, "M", "ο"), + (0x1D6B7, "M", "π"), + (0x1D6B8, "M", "ρ"), + (0x1D6B9, "M", "θ"), + (0x1D6BA, "M", "σ"), + ] + + +def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D6BB, "M", "τ"), + (0x1D6BC, "M", "υ"), + (0x1D6BD, "M", "φ"), + (0x1D6BE, "M", "χ"), + (0x1D6BF, "M", "ψ"), + (0x1D6C0, "M", "ω"), + (0x1D6C1, "M", "∇"), + (0x1D6C2, "M", "α"), + (0x1D6C3, "M", "β"), + (0x1D6C4, "M", "γ"), + (0x1D6C5, "M", "δ"), + (0x1D6C6, "M", "ε"), + (0x1D6C7, "M", "ζ"), + (0x1D6C8, "M", "η"), + (0x1D6C9, "M", "θ"), + (0x1D6CA, "M", "ι"), + (0x1D6CB, "M", "κ"), + (0x1D6CC, "M", "λ"), + (0x1D6CD, "M", "μ"), + (0x1D6CE, "M", "ν"), + (0x1D6CF, "M", "ξ"), + (0x1D6D0, "M", "ο"), + (0x1D6D1, "M", "π"), + (0x1D6D2, "M", "ρ"), + (0x1D6D3, "M", "σ"), + (0x1D6D5, "M", "τ"), + (0x1D6D6, "M", "υ"), + (0x1D6D7, "M", "φ"), + (0x1D6D8, "M", "χ"), + (0x1D6D9, "M", "ψ"), + (0x1D6DA, "M", "ω"), + (0x1D6DB, "M", "∂"), + (0x1D6DC, "M", "ε"), + (0x1D6DD, "M", "θ"), + (0x1D6DE, "M", "κ"), + (0x1D6DF, "M", "φ"), + (0x1D6E0, "M", "ρ"), + (0x1D6E1, "M", "π"), + (0x1D6E2, "M", "α"), + (0x1D6E3, "M", "β"), + (0x1D6E4, "M", "γ"), + (0x1D6E5, "M", "δ"), + (0x1D6E6, "M", "ε"), + (0x1D6E7, "M", "ζ"), + (0x1D6E8, "M", "η"), + (0x1D6E9, "M", "θ"), + (0x1D6EA, "M", "ι"), + (0x1D6EB, "M", "κ"), + (0x1D6EC, "M", "λ"), + (0x1D6ED, "M", "μ"), + (0x1D6EE, "M", "ν"), + (0x1D6EF, "M", "ξ"), + (0x1D6F0, "M", "ο"), + (0x1D6F1, "M", "π"), + (0x1D6F2, "M", "ρ"), + (0x1D6F3, "M", "θ"), + (0x1D6F4, "M", "σ"), + (0x1D6F5, "M", "τ"), + (0x1D6F6, "M", "υ"), + (0x1D6F7, "M", "φ"), + (0x1D6F8, "M", "χ"), + (0x1D6F9, "M", "ψ"), + (0x1D6FA, "M", "ω"), + (0x1D6FB, "M", "∇"), + (0x1D6FC, "M", "α"), + (0x1D6FD, "M", "β"), + (0x1D6FE, "M", "γ"), + (0x1D6FF, "M", "δ"), + (0x1D700, "M", "ε"), + (0x1D701, "M", "ζ"), + (0x1D702, "M", "η"), + (0x1D703, "M", "θ"), + (0x1D704, "M", "ι"), + (0x1D705, "M", "κ"), + (0x1D706, "M", "λ"), + (0x1D707, "M", "μ"), + (0x1D708, "M", "ν"), + (0x1D709, "M", "ξ"), + (0x1D70A, "M", "ο"), + (0x1D70B, "M", "π"), + (0x1D70C, "M", "ρ"), + (0x1D70D, "M", "σ"), + (0x1D70F, "M", "τ"), + (0x1D710, "M", "υ"), + (0x1D711, "M", "φ"), + (0x1D712, "M", "χ"), + (0x1D713, "M", "ψ"), + (0x1D714, "M", "ω"), + (0x1D715, "M", "∂"), + (0x1D716, "M", "ε"), + (0x1D717, "M", "θ"), + (0x1D718, "M", "κ"), + (0x1D719, "M", "φ"), + (0x1D71A, "M", "ρ"), + (0x1D71B, "M", "π"), + (0x1D71C, "M", "α"), + (0x1D71D, "M", "β"), + (0x1D71E, "M", "γ"), + (0x1D71F, "M", "δ"), + (0x1D720, "M", "ε"), + ] + + +def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D721, "M", "ζ"), + (0x1D722, "M", "η"), + (0x1D723, "M", "θ"), + (0x1D724, "M", "ι"), + (0x1D725, "M", "κ"), + (0x1D726, "M", "λ"), + (0x1D727, "M", "μ"), + (0x1D728, "M", "ν"), + (0x1D729, "M", "ξ"), + (0x1D72A, "M", "ο"), + (0x1D72B, "M", "π"), + (0x1D72C, "M", "ρ"), + (0x1D72D, "M", "θ"), + (0x1D72E, "M", "σ"), + (0x1D72F, "M", "τ"), + (0x1D730, "M", "υ"), + (0x1D731, "M", "φ"), + (0x1D732, "M", "χ"), + (0x1D733, "M", "ψ"), + (0x1D734, "M", "ω"), + (0x1D735, "M", "∇"), + (0x1D736, "M", "α"), + (0x1D737, "M", "β"), + (0x1D738, "M", "γ"), + (0x1D739, "M", "δ"), + (0x1D73A, "M", "ε"), + (0x1D73B, "M", "ζ"), + (0x1D73C, "M", "η"), + (0x1D73D, "M", "θ"), + (0x1D73E, "M", "ι"), + (0x1D73F, "M", "κ"), + (0x1D740, "M", "λ"), + (0x1D741, "M", "μ"), + (0x1D742, "M", "ν"), + (0x1D743, "M", "ξ"), + (0x1D744, "M", "ο"), + (0x1D745, "M", "π"), + (0x1D746, "M", "ρ"), + (0x1D747, "M", "σ"), + (0x1D749, "M", "τ"), + (0x1D74A, "M", "υ"), + (0x1D74B, "M", "φ"), + (0x1D74C, "M", "χ"), + (0x1D74D, "M", "ψ"), + (0x1D74E, "M", "ω"), + (0x1D74F, "M", "∂"), + (0x1D750, "M", "ε"), + (0x1D751, "M", "θ"), + (0x1D752, "M", "κ"), + (0x1D753, "M", "φ"), + (0x1D754, "M", "ρ"), + (0x1D755, "M", "π"), + (0x1D756, "M", "α"), + (0x1D757, "M", "β"), + (0x1D758, "M", "γ"), + (0x1D759, "M", "δ"), + (0x1D75A, "M", "ε"), + (0x1D75B, "M", "ζ"), + (0x1D75C, "M", "η"), + (0x1D75D, "M", "θ"), + (0x1D75E, "M", "ι"), + (0x1D75F, "M", "κ"), + (0x1D760, "M", "λ"), + (0x1D761, "M", "μ"), + (0x1D762, "M", "ν"), + (0x1D763, "M", "ξ"), + (0x1D764, "M", "ο"), + (0x1D765, "M", "π"), + (0x1D766, "M", "ρ"), + (0x1D767, "M", "θ"), + (0x1D768, "M", "σ"), + (0x1D769, "M", "τ"), + (0x1D76A, "M", "υ"), + (0x1D76B, "M", "φ"), + (0x1D76C, "M", "χ"), + (0x1D76D, "M", "ψ"), + (0x1D76E, "M", "ω"), + (0x1D76F, "M", "∇"), + (0x1D770, "M", "α"), + (0x1D771, "M", "β"), + (0x1D772, "M", "γ"), + (0x1D773, "M", "δ"), + (0x1D774, "M", "ε"), + (0x1D775, "M", "ζ"), + (0x1D776, "M", "η"), + (0x1D777, "M", "θ"), + (0x1D778, "M", "ι"), + (0x1D779, "M", "κ"), + (0x1D77A, "M", "λ"), + (0x1D77B, "M", "μ"), + (0x1D77C, "M", "ν"), + (0x1D77D, "M", "ξ"), + (0x1D77E, "M", "ο"), + (0x1D77F, "M", "π"), + (0x1D780, "M", "ρ"), + (0x1D781, "M", "σ"), + (0x1D783, "M", "τ"), + (0x1D784, "M", "υ"), + (0x1D785, "M", "φ"), + (0x1D786, "M", "χ"), + ] + + +def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D787, "M", "ψ"), + (0x1D788, "M", "ω"), + (0x1D789, "M", "∂"), + (0x1D78A, "M", "ε"), + (0x1D78B, "M", "θ"), + (0x1D78C, "M", "κ"), + (0x1D78D, "M", "φ"), + (0x1D78E, "M", "ρ"), + (0x1D78F, "M", "π"), + (0x1D790, "M", "α"), + (0x1D791, "M", "β"), + (0x1D792, "M", "γ"), + (0x1D793, "M", "δ"), + (0x1D794, "M", "ε"), + (0x1D795, "M", "ζ"), + (0x1D796, "M", "η"), + (0x1D797, "M", "θ"), + (0x1D798, "M", "ι"), + (0x1D799, "M", "κ"), + (0x1D79A, "M", "λ"), + (0x1D79B, "M", "μ"), + (0x1D79C, "M", "ν"), + (0x1D79D, "M", "ξ"), + (0x1D79E, "M", "ο"), + (0x1D79F, "M", "π"), + (0x1D7A0, "M", "ρ"), + (0x1D7A1, "M", "θ"), + (0x1D7A2, "M", "σ"), + (0x1D7A3, "M", "τ"), + (0x1D7A4, "M", "υ"), + (0x1D7A5, "M", "φ"), + (0x1D7A6, "M", "χ"), + (0x1D7A7, "M", "ψ"), + (0x1D7A8, "M", "ω"), + (0x1D7A9, "M", "∇"), + (0x1D7AA, "M", "α"), + (0x1D7AB, "M", "β"), + (0x1D7AC, "M", "γ"), + (0x1D7AD, "M", "δ"), + (0x1D7AE, "M", "ε"), + (0x1D7AF, "M", "ζ"), + (0x1D7B0, "M", "η"), + (0x1D7B1, "M", "θ"), + (0x1D7B2, "M", "ι"), + (0x1D7B3, "M", "κ"), + (0x1D7B4, "M", "λ"), + (0x1D7B5, "M", "μ"), + (0x1D7B6, "M", "ν"), + (0x1D7B7, "M", "ξ"), + (0x1D7B8, "M", "ο"), + (0x1D7B9, "M", "π"), + (0x1D7BA, "M", "ρ"), + (0x1D7BB, "M", "σ"), + (0x1D7BD, "M", "τ"), + (0x1D7BE, "M", "υ"), + (0x1D7BF, "M", "φ"), + (0x1D7C0, "M", "χ"), + (0x1D7C1, "M", "ψ"), + (0x1D7C2, "M", "ω"), + (0x1D7C3, "M", "∂"), + (0x1D7C4, "M", "ε"), + (0x1D7C5, "M", "θ"), + (0x1D7C6, "M", "κ"), + (0x1D7C7, "M", "φ"), + (0x1D7C8, "M", "ρ"), + (0x1D7C9, "M", "π"), + (0x1D7CA, "M", "ϝ"), + (0x1D7CC, "X"), + (0x1D7CE, "M", "0"), + (0x1D7CF, "M", "1"), + (0x1D7D0, "M", "2"), + (0x1D7D1, "M", "3"), + (0x1D7D2, "M", "4"), + (0x1D7D3, "M", "5"), + (0x1D7D4, "M", "6"), + (0x1D7D5, "M", "7"), + (0x1D7D6, "M", "8"), + (0x1D7D7, "M", "9"), + (0x1D7D8, "M", "0"), + (0x1D7D9, "M", "1"), + (0x1D7DA, "M", "2"), + (0x1D7DB, "M", "3"), + (0x1D7DC, "M", "4"), + (0x1D7DD, "M", "5"), + (0x1D7DE, "M", "6"), + (0x1D7DF, "M", "7"), + (0x1D7E0, "M", "8"), + (0x1D7E1, "M", "9"), + (0x1D7E2, "M", "0"), + (0x1D7E3, "M", "1"), + (0x1D7E4, "M", "2"), + (0x1D7E5, "M", "3"), + (0x1D7E6, "M", "4"), + (0x1D7E7, "M", "5"), + (0x1D7E8, "M", "6"), + (0x1D7E9, "M", "7"), + (0x1D7EA, "M", "8"), + (0x1D7EB, "M", "9"), + (0x1D7EC, "M", "0"), + (0x1D7ED, "M", "1"), + ] + + +def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D7EE, "M", "2"), + (0x1D7EF, "M", "3"), + (0x1D7F0, "M", "4"), + (0x1D7F1, "M", "5"), + (0x1D7F2, "M", "6"), + (0x1D7F3, "M", "7"), + (0x1D7F4, "M", "8"), + (0x1D7F5, "M", "9"), + (0x1D7F6, "M", "0"), + (0x1D7F7, "M", "1"), + (0x1D7F8, "M", "2"), + (0x1D7F9, "M", "3"), + (0x1D7FA, "M", "4"), + (0x1D7FB, "M", "5"), + (0x1D7FC, "M", "6"), + (0x1D7FD, "M", "7"), + (0x1D7FE, "M", "8"), + (0x1D7FF, "M", "9"), + (0x1D800, "V"), + (0x1DA8C, "X"), + (0x1DA9B, "V"), + (0x1DAA0, "X"), + (0x1DAA1, "V"), + (0x1DAB0, "X"), + (0x1DF00, "V"), + (0x1DF1F, "X"), + (0x1DF25, "V"), + (0x1DF2B, "X"), + (0x1E000, "V"), + (0x1E007, "X"), + (0x1E008, "V"), + (0x1E019, "X"), + (0x1E01B, "V"), + (0x1E022, "X"), + (0x1E023, "V"), + (0x1E025, "X"), + (0x1E026, "V"), + (0x1E02B, "X"), + (0x1E030, "M", "а"), + (0x1E031, "M", "б"), + (0x1E032, "M", "в"), + (0x1E033, "M", "г"), + (0x1E034, "M", "д"), + (0x1E035, "M", "е"), + (0x1E036, "M", "ж"), + (0x1E037, "M", "з"), + (0x1E038, "M", "и"), + (0x1E039, "M", "к"), + (0x1E03A, "M", "л"), + (0x1E03B, "M", "м"), + (0x1E03C, "M", "о"), + (0x1E03D, "M", "п"), + (0x1E03E, "M", "р"), + (0x1E03F, "M", "с"), + (0x1E040, "M", "т"), + (0x1E041, "M", "у"), + (0x1E042, "M", "ф"), + (0x1E043, "M", "х"), + (0x1E044, "M", "ц"), + (0x1E045, "M", "ч"), + (0x1E046, "M", "ш"), + (0x1E047, "M", "ы"), + (0x1E048, "M", "э"), + (0x1E049, "M", "ю"), + (0x1E04A, "M", "ꚉ"), + (0x1E04B, "M", "ә"), + (0x1E04C, "M", "і"), + (0x1E04D, "M", "ј"), + (0x1E04E, "M", "ө"), + (0x1E04F, "M", "ү"), + (0x1E050, "M", "ӏ"), + (0x1E051, "M", "а"), + (0x1E052, "M", "б"), + (0x1E053, "M", "в"), + (0x1E054, "M", "г"), + (0x1E055, "M", "д"), + (0x1E056, "M", "е"), + (0x1E057, "M", "ж"), + (0x1E058, "M", "з"), + (0x1E059, "M", "и"), + (0x1E05A, "M", "к"), + (0x1E05B, "M", "л"), + (0x1E05C, "M", "о"), + (0x1E05D, "M", "п"), + (0x1E05E, "M", "с"), + (0x1E05F, "M", "у"), + (0x1E060, "M", "ф"), + (0x1E061, "M", "х"), + (0x1E062, "M", "ц"), + (0x1E063, "M", "ч"), + (0x1E064, "M", "ш"), + (0x1E065, "M", "ъ"), + (0x1E066, "M", "ы"), + (0x1E067, "M", "ґ"), + (0x1E068, "M", "і"), + (0x1E069, "M", "ѕ"), + (0x1E06A, "M", "џ"), + (0x1E06B, "M", "ҫ"), + (0x1E06C, "M", "ꙑ"), + (0x1E06D, "M", "ұ"), + ] + + +def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E06E, "X"), + (0x1E08F, "V"), + (0x1E090, "X"), + (0x1E100, "V"), + (0x1E12D, "X"), + (0x1E130, "V"), + (0x1E13E, "X"), + (0x1E140, "V"), + (0x1E14A, "X"), + (0x1E14E, "V"), + (0x1E150, "X"), + (0x1E290, "V"), + (0x1E2AF, "X"), + (0x1E2C0, "V"), + (0x1E2FA, "X"), + (0x1E2FF, "V"), + (0x1E300, "X"), + (0x1E4D0, "V"), + (0x1E4FA, "X"), + (0x1E5D0, "V"), + (0x1E5FB, "X"), + (0x1E5FF, "V"), + (0x1E600, "X"), + (0x1E7E0, "V"), + (0x1E7E7, "X"), + (0x1E7E8, "V"), + (0x1E7EC, "X"), + (0x1E7ED, "V"), + (0x1E7EF, "X"), + (0x1E7F0, "V"), + (0x1E7FF, "X"), + (0x1E800, "V"), + (0x1E8C5, "X"), + (0x1E8C7, "V"), + (0x1E8D7, "X"), + (0x1E900, "M", "𞤢"), + (0x1E901, "M", "𞤣"), + (0x1E902, "M", "𞤤"), + (0x1E903, "M", "𞤥"), + (0x1E904, "M", "𞤦"), + (0x1E905, "M", "𞤧"), + (0x1E906, "M", "𞤨"), + (0x1E907, "M", "𞤩"), + (0x1E908, "M", "𞤪"), + (0x1E909, "M", "𞤫"), + (0x1E90A, "M", "𞤬"), + (0x1E90B, "M", "𞤭"), + (0x1E90C, "M", "𞤮"), + (0x1E90D, "M", "𞤯"), + (0x1E90E, "M", "𞤰"), + (0x1E90F, "M", "𞤱"), + (0x1E910, "M", "𞤲"), + (0x1E911, "M", "𞤳"), + (0x1E912, "M", "𞤴"), + (0x1E913, "M", "𞤵"), + (0x1E914, "M", "𞤶"), + (0x1E915, "M", "𞤷"), + (0x1E916, "M", "𞤸"), + (0x1E917, "M", "𞤹"), + (0x1E918, "M", "𞤺"), + (0x1E919, "M", "𞤻"), + (0x1E91A, "M", "𞤼"), + (0x1E91B, "M", "𞤽"), + (0x1E91C, "M", "𞤾"), + (0x1E91D, "M", "𞤿"), + (0x1E91E, "M", "𞥀"), + (0x1E91F, "M", "𞥁"), + (0x1E920, "M", "𞥂"), + (0x1E921, "M", "𞥃"), + (0x1E922, "V"), + (0x1E94C, "X"), + (0x1E950, "V"), + (0x1E95A, "X"), + (0x1E95E, "V"), + (0x1E960, "X"), + (0x1EC71, "V"), + (0x1ECB5, "X"), + (0x1ED01, "V"), + (0x1ED3E, "X"), + (0x1EE00, "M", "ا"), + (0x1EE01, "M", "ب"), + (0x1EE02, "M", "ج"), + (0x1EE03, "M", "د"), + (0x1EE04, "X"), + (0x1EE05, "M", "و"), + (0x1EE06, "M", "ز"), + (0x1EE07, "M", "ح"), + (0x1EE08, "M", "ط"), + (0x1EE09, "M", "ي"), + (0x1EE0A, "M", "ك"), + (0x1EE0B, "M", "ل"), + (0x1EE0C, "M", "م"), + (0x1EE0D, "M", "ن"), + (0x1EE0E, "M", "س"), + (0x1EE0F, "M", "ع"), + (0x1EE10, "M", "ف"), + (0x1EE11, "M", "ص"), + (0x1EE12, "M", "ق"), + (0x1EE13, "M", "ر"), + (0x1EE14, "M", "ش"), + ] + + +def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EE15, "M", "ت"), + (0x1EE16, "M", "ث"), + (0x1EE17, "M", "خ"), + (0x1EE18, "M", "ذ"), + (0x1EE19, "M", "ض"), + (0x1EE1A, "M", "ظ"), + (0x1EE1B, "M", "غ"), + (0x1EE1C, "M", "ٮ"), + (0x1EE1D, "M", "ں"), + (0x1EE1E, "M", "ڡ"), + (0x1EE1F, "M", "ٯ"), + (0x1EE20, "X"), + (0x1EE21, "M", "ب"), + (0x1EE22, "M", "ج"), + (0x1EE23, "X"), + (0x1EE24, "M", "ه"), + (0x1EE25, "X"), + (0x1EE27, "M", "ح"), + (0x1EE28, "X"), + (0x1EE29, "M", "ي"), + (0x1EE2A, "M", "ك"), + (0x1EE2B, "M", "ل"), + (0x1EE2C, "M", "م"), + (0x1EE2D, "M", "ن"), + (0x1EE2E, "M", "س"), + (0x1EE2F, "M", "ع"), + (0x1EE30, "M", "ف"), + (0x1EE31, "M", "ص"), + (0x1EE32, "M", "ق"), + (0x1EE33, "X"), + (0x1EE34, "M", "ش"), + (0x1EE35, "M", "ت"), + (0x1EE36, "M", "ث"), + (0x1EE37, "M", "خ"), + (0x1EE38, "X"), + (0x1EE39, "M", "ض"), + (0x1EE3A, "X"), + (0x1EE3B, "M", "غ"), + (0x1EE3C, "X"), + (0x1EE42, "M", "ج"), + (0x1EE43, "X"), + (0x1EE47, "M", "ح"), + (0x1EE48, "X"), + (0x1EE49, "M", "ي"), + (0x1EE4A, "X"), + (0x1EE4B, "M", "ل"), + (0x1EE4C, "X"), + (0x1EE4D, "M", "ن"), + (0x1EE4E, "M", "س"), + (0x1EE4F, "M", "ع"), + (0x1EE50, "X"), + (0x1EE51, "M", "ص"), + (0x1EE52, "M", "ق"), + (0x1EE53, "X"), + (0x1EE54, "M", "ش"), + (0x1EE55, "X"), + (0x1EE57, "M", "خ"), + (0x1EE58, "X"), + (0x1EE59, "M", "ض"), + (0x1EE5A, "X"), + (0x1EE5B, "M", "غ"), + (0x1EE5C, "X"), + (0x1EE5D, "M", "ں"), + (0x1EE5E, "X"), + (0x1EE5F, "M", "ٯ"), + (0x1EE60, "X"), + (0x1EE61, "M", "ب"), + (0x1EE62, "M", "ج"), + (0x1EE63, "X"), + (0x1EE64, "M", "ه"), + (0x1EE65, "X"), + (0x1EE67, "M", "ح"), + (0x1EE68, "M", "ط"), + (0x1EE69, "M", "ي"), + (0x1EE6A, "M", "ك"), + (0x1EE6B, "X"), + (0x1EE6C, "M", "م"), + (0x1EE6D, "M", "ن"), + (0x1EE6E, "M", "س"), + (0x1EE6F, "M", "ع"), + (0x1EE70, "M", "ف"), + (0x1EE71, "M", "ص"), + (0x1EE72, "M", "ق"), + (0x1EE73, "X"), + (0x1EE74, "M", "ش"), + (0x1EE75, "M", "ت"), + (0x1EE76, "M", "ث"), + (0x1EE77, "M", "خ"), + (0x1EE78, "X"), + (0x1EE79, "M", "ض"), + (0x1EE7A, "M", "ظ"), + (0x1EE7B, "M", "غ"), + (0x1EE7C, "M", "ٮ"), + (0x1EE7D, "X"), + (0x1EE7E, "M", "ڡ"), + (0x1EE7F, "X"), + (0x1EE80, "M", "ا"), + (0x1EE81, "M", "ب"), + (0x1EE82, "M", "ج"), + (0x1EE83, "M", "د"), + ] + + +def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EE84, "M", "ه"), + (0x1EE85, "M", "و"), + (0x1EE86, "M", "ز"), + (0x1EE87, "M", "ح"), + (0x1EE88, "M", "ط"), + (0x1EE89, "M", "ي"), + (0x1EE8A, "X"), + (0x1EE8B, "M", "ل"), + (0x1EE8C, "M", "م"), + (0x1EE8D, "M", "ن"), + (0x1EE8E, "M", "س"), + (0x1EE8F, "M", "ع"), + (0x1EE90, "M", "ف"), + (0x1EE91, "M", "ص"), + (0x1EE92, "M", "ق"), + (0x1EE93, "M", "ر"), + (0x1EE94, "M", "ش"), + (0x1EE95, "M", "ت"), + (0x1EE96, "M", "ث"), + (0x1EE97, "M", "خ"), + (0x1EE98, "M", "ذ"), + (0x1EE99, "M", "ض"), + (0x1EE9A, "M", "ظ"), + (0x1EE9B, "M", "غ"), + (0x1EE9C, "X"), + (0x1EEA1, "M", "ب"), + (0x1EEA2, "M", "ج"), + (0x1EEA3, "M", "د"), + (0x1EEA4, "X"), + (0x1EEA5, "M", "و"), + (0x1EEA6, "M", "ز"), + (0x1EEA7, "M", "ح"), + (0x1EEA8, "M", "ط"), + (0x1EEA9, "M", "ي"), + (0x1EEAA, "X"), + (0x1EEAB, "M", "ل"), + (0x1EEAC, "M", "م"), + (0x1EEAD, "M", "ن"), + (0x1EEAE, "M", "س"), + (0x1EEAF, "M", "ع"), + (0x1EEB0, "M", "ف"), + (0x1EEB1, "M", "ص"), + (0x1EEB2, "M", "ق"), + (0x1EEB3, "M", "ر"), + (0x1EEB4, "M", "ش"), + (0x1EEB5, "M", "ت"), + (0x1EEB6, "M", "ث"), + (0x1EEB7, "M", "خ"), + (0x1EEB8, "M", "ذ"), + (0x1EEB9, "M", "ض"), + (0x1EEBA, "M", "ظ"), + (0x1EEBB, "M", "غ"), + (0x1EEBC, "X"), + (0x1EEF0, "V"), + (0x1EEF2, "X"), + (0x1F000, "V"), + (0x1F02C, "X"), + (0x1F030, "V"), + (0x1F094, "X"), + (0x1F0A0, "V"), + (0x1F0AF, "X"), + (0x1F0B1, "V"), + (0x1F0C0, "X"), + (0x1F0C1, "V"), + (0x1F0D0, "X"), + (0x1F0D1, "V"), + (0x1F0F6, "X"), + (0x1F101, "M", "0,"), + (0x1F102, "M", "1,"), + (0x1F103, "M", "2,"), + (0x1F104, "M", "3,"), + (0x1F105, "M", "4,"), + (0x1F106, "M", "5,"), + (0x1F107, "M", "6,"), + (0x1F108, "M", "7,"), + (0x1F109, "M", "8,"), + (0x1F10A, "M", "9,"), + (0x1F10B, "V"), + (0x1F110, "M", "(a)"), + (0x1F111, "M", "(b)"), + (0x1F112, "M", "(c)"), + (0x1F113, "M", "(d)"), + (0x1F114, "M", "(e)"), + (0x1F115, "M", "(f)"), + (0x1F116, "M", "(g)"), + (0x1F117, "M", "(h)"), + (0x1F118, "M", "(i)"), + (0x1F119, "M", "(j)"), + (0x1F11A, "M", "(k)"), + (0x1F11B, "M", "(l)"), + (0x1F11C, "M", "(m)"), + (0x1F11D, "M", "(n)"), + (0x1F11E, "M", "(o)"), + (0x1F11F, "M", "(p)"), + (0x1F120, "M", "(q)"), + (0x1F121, "M", "(r)"), + (0x1F122, "M", "(s)"), + (0x1F123, "M", "(t)"), + (0x1F124, "M", "(u)"), + (0x1F125, "M", "(v)"), + ] + + +def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F126, "M", "(w)"), + (0x1F127, "M", "(x)"), + (0x1F128, "M", "(y)"), + (0x1F129, "M", "(z)"), + (0x1F12A, "M", "〔s〕"), + (0x1F12B, "M", "c"), + (0x1F12C, "M", "r"), + (0x1F12D, "M", "cd"), + (0x1F12E, "M", "wz"), + (0x1F12F, "V"), + (0x1F130, "M", "a"), + (0x1F131, "M", "b"), + (0x1F132, "M", "c"), + (0x1F133, "M", "d"), + (0x1F134, "M", "e"), + (0x1F135, "M", "f"), + (0x1F136, "M", "g"), + (0x1F137, "M", "h"), + (0x1F138, "M", "i"), + (0x1F139, "M", "j"), + (0x1F13A, "M", "k"), + (0x1F13B, "M", "l"), + (0x1F13C, "M", "m"), + (0x1F13D, "M", "n"), + (0x1F13E, "M", "o"), + (0x1F13F, "M", "p"), + (0x1F140, "M", "q"), + (0x1F141, "M", "r"), + (0x1F142, "M", "s"), + (0x1F143, "M", "t"), + (0x1F144, "M", "u"), + (0x1F145, "M", "v"), + (0x1F146, "M", "w"), + (0x1F147, "M", "x"), + (0x1F148, "M", "y"), + (0x1F149, "M", "z"), + (0x1F14A, "M", "hv"), + (0x1F14B, "M", "mv"), + (0x1F14C, "M", "sd"), + (0x1F14D, "M", "ss"), + (0x1F14E, "M", "ppv"), + (0x1F14F, "M", "wc"), + (0x1F150, "V"), + (0x1F16A, "M", "mc"), + (0x1F16B, "M", "md"), + (0x1F16C, "M", "mr"), + (0x1F16D, "V"), + (0x1F190, "M", "dj"), + (0x1F191, "V"), + (0x1F1AE, "X"), + (0x1F1E6, "V"), + (0x1F200, "M", "ほか"), + (0x1F201, "M", "ココ"), + (0x1F202, "M", "サ"), + (0x1F203, "X"), + (0x1F210, "M", "手"), + (0x1F211, "M", "字"), + (0x1F212, "M", "双"), + (0x1F213, "M", "デ"), + (0x1F214, "M", "二"), + (0x1F215, "M", "多"), + (0x1F216, "M", "解"), + (0x1F217, "M", "天"), + (0x1F218, "M", "交"), + (0x1F219, "M", "映"), + (0x1F21A, "M", "無"), + (0x1F21B, "M", "料"), + (0x1F21C, "M", "前"), + (0x1F21D, "M", "後"), + (0x1F21E, "M", "再"), + (0x1F21F, "M", "新"), + (0x1F220, "M", "初"), + (0x1F221, "M", "終"), + (0x1F222, "M", "生"), + (0x1F223, "M", "販"), + (0x1F224, "M", "声"), + (0x1F225, "M", "吹"), + (0x1F226, "M", "演"), + (0x1F227, "M", "投"), + (0x1F228, "M", "捕"), + (0x1F229, "M", "一"), + (0x1F22A, "M", "三"), + (0x1F22B, "M", "遊"), + (0x1F22C, "M", "左"), + (0x1F22D, "M", "中"), + (0x1F22E, "M", "右"), + (0x1F22F, "M", "指"), + (0x1F230, "M", "走"), + (0x1F231, "M", "打"), + (0x1F232, "M", "禁"), + (0x1F233, "M", "空"), + (0x1F234, "M", "合"), + (0x1F235, "M", "満"), + (0x1F236, "M", "有"), + (0x1F237, "M", "月"), + (0x1F238, "M", "申"), + (0x1F239, "M", "割"), + (0x1F23A, "M", "営"), + (0x1F23B, "M", "配"), + (0x1F23C, "X"), + ] + + +def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F240, "M", "〔本〕"), + (0x1F241, "M", "〔三〕"), + (0x1F242, "M", "〔二〕"), + (0x1F243, "M", "〔安〕"), + (0x1F244, "M", "〔点〕"), + (0x1F245, "M", "〔打〕"), + (0x1F246, "M", "〔盗〕"), + (0x1F247, "M", "〔勝〕"), + (0x1F248, "M", "〔敗〕"), + (0x1F249, "X"), + (0x1F250, "M", "得"), + (0x1F251, "M", "可"), + (0x1F252, "X"), + (0x1F260, "V"), + (0x1F266, "X"), + (0x1F300, "V"), + (0x1F6D8, "X"), + (0x1F6DC, "V"), + (0x1F6ED, "X"), + (0x1F6F0, "V"), + (0x1F6FD, "X"), + (0x1F700, "V"), + (0x1F777, "X"), + (0x1F77B, "V"), + (0x1F7DA, "X"), + (0x1F7E0, "V"), + (0x1F7EC, "X"), + (0x1F7F0, "V"), + (0x1F7F1, "X"), + (0x1F800, "V"), + (0x1F80C, "X"), + (0x1F810, "V"), + (0x1F848, "X"), + (0x1F850, "V"), + (0x1F85A, "X"), + (0x1F860, "V"), + (0x1F888, "X"), + (0x1F890, "V"), + (0x1F8AE, "X"), + (0x1F8B0, "V"), + (0x1F8BC, "X"), + (0x1F8C0, "V"), + (0x1F8C2, "X"), + (0x1F900, "V"), + (0x1FA54, "X"), + (0x1FA60, "V"), + (0x1FA6E, "X"), + (0x1FA70, "V"), + (0x1FA7D, "X"), + (0x1FA80, "V"), + (0x1FA8A, "X"), + (0x1FA8F, "V"), + (0x1FAC7, "X"), + (0x1FACE, "V"), + (0x1FADD, "X"), + (0x1FADF, "V"), + (0x1FAEA, "X"), + (0x1FAF0, "V"), + (0x1FAF9, "X"), + (0x1FB00, "V"), + (0x1FB93, "X"), + (0x1FB94, "V"), + (0x1FBF0, "M", "0"), + (0x1FBF1, "M", "1"), + (0x1FBF2, "M", "2"), + (0x1FBF3, "M", "3"), + (0x1FBF4, "M", "4"), + (0x1FBF5, "M", "5"), + (0x1FBF6, "M", "6"), + (0x1FBF7, "M", "7"), + (0x1FBF8, "M", "8"), + (0x1FBF9, "M", "9"), + (0x1FBFA, "X"), + (0x20000, "V"), + (0x2A6E0, "X"), + (0x2A700, "V"), + (0x2B73A, "X"), + (0x2B740, "V"), + (0x2B81E, "X"), + (0x2B820, "V"), + (0x2CEA2, "X"), + (0x2CEB0, "V"), + (0x2EBE1, "X"), + (0x2EBF0, "V"), + (0x2EE5E, "X"), + (0x2F800, "M", "丽"), + (0x2F801, "M", "丸"), + (0x2F802, "M", "乁"), + (0x2F803, "M", "𠄢"), + (0x2F804, "M", "你"), + (0x2F805, "M", "侮"), + (0x2F806, "M", "侻"), + (0x2F807, "M", "倂"), + (0x2F808, "M", "偺"), + (0x2F809, "M", "備"), + (0x2F80A, "M", "僧"), + (0x2F80B, "M", "像"), + (0x2F80C, "M", "㒞"), + (0x2F80D, "M", "𠘺"), + (0x2F80E, "M", "免"), + ] + + +def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F80F, "M", "兔"), + (0x2F810, "M", "兤"), + (0x2F811, "M", "具"), + (0x2F812, "M", "𠔜"), + (0x2F813, "M", "㒹"), + (0x2F814, "M", "內"), + (0x2F815, "M", "再"), + (0x2F816, "M", "𠕋"), + (0x2F817, "M", "冗"), + (0x2F818, "M", "冤"), + (0x2F819, "M", "仌"), + (0x2F81A, "M", "冬"), + (0x2F81B, "M", "况"), + (0x2F81C, "M", "𩇟"), + (0x2F81D, "M", "凵"), + (0x2F81E, "M", "刃"), + (0x2F81F, "M", "㓟"), + (0x2F820, "M", "刻"), + (0x2F821, "M", "剆"), + (0x2F822, "M", "割"), + (0x2F823, "M", "剷"), + (0x2F824, "M", "㔕"), + (0x2F825, "M", "勇"), + (0x2F826, "M", "勉"), + (0x2F827, "M", "勤"), + (0x2F828, "M", "勺"), + (0x2F829, "M", "包"), + (0x2F82A, "M", "匆"), + (0x2F82B, "M", "北"), + (0x2F82C, "M", "卉"), + (0x2F82D, "M", "卑"), + (0x2F82E, "M", "博"), + (0x2F82F, "M", "即"), + (0x2F830, "M", "卽"), + (0x2F831, "M", "卿"), + (0x2F834, "M", "𠨬"), + (0x2F835, "M", "灰"), + (0x2F836, "M", "及"), + (0x2F837, "M", "叟"), + (0x2F838, "M", "𠭣"), + (0x2F839, "M", "叫"), + (0x2F83A, "M", "叱"), + (0x2F83B, "M", "吆"), + (0x2F83C, "M", "咞"), + (0x2F83D, "M", "吸"), + (0x2F83E, "M", "呈"), + (0x2F83F, "M", "周"), + (0x2F840, "M", "咢"), + (0x2F841, "M", "哶"), + (0x2F842, "M", "唐"), + (0x2F843, "M", "啓"), + (0x2F844, "M", "啣"), + (0x2F845, "M", "善"), + (0x2F847, "M", "喙"), + (0x2F848, "M", "喫"), + (0x2F849, "M", "喳"), + (0x2F84A, "M", "嗂"), + (0x2F84B, "M", "圖"), + (0x2F84C, "M", "嘆"), + (0x2F84D, "M", "圗"), + (0x2F84E, "M", "噑"), + (0x2F84F, "M", "噴"), + (0x2F850, "M", "切"), + (0x2F851, "M", "壮"), + (0x2F852, "M", "城"), + (0x2F853, "M", "埴"), + (0x2F854, "M", "堍"), + (0x2F855, "M", "型"), + (0x2F856, "M", "堲"), + (0x2F857, "M", "報"), + (0x2F858, "M", "墬"), + (0x2F859, "M", "𡓤"), + (0x2F85A, "M", "売"), + (0x2F85B, "M", "壷"), + (0x2F85C, "M", "夆"), + (0x2F85D, "M", "多"), + (0x2F85E, "M", "夢"), + (0x2F85F, "M", "奢"), + (0x2F860, "M", "𡚨"), + (0x2F861, "M", "𡛪"), + (0x2F862, "M", "姬"), + (0x2F863, "M", "娛"), + (0x2F864, "M", "娧"), + (0x2F865, "M", "姘"), + (0x2F866, "M", "婦"), + (0x2F867, "M", "㛮"), + (0x2F868, "M", "㛼"), + (0x2F869, "M", "嬈"), + (0x2F86A, "M", "嬾"), + (0x2F86C, "M", "𡧈"), + (0x2F86D, "M", "寃"), + (0x2F86E, "M", "寘"), + (0x2F86F, "M", "寧"), + (0x2F870, "M", "寳"), + (0x2F871, "M", "𡬘"), + (0x2F872, "M", "寿"), + (0x2F873, "M", "将"), + (0x2F874, "M", "当"), + (0x2F875, "M", "尢"), + (0x2F876, "M", "㞁"), + ] + + +def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F877, "M", "屠"), + (0x2F878, "M", "屮"), + (0x2F879, "M", "峀"), + (0x2F87A, "M", "岍"), + (0x2F87B, "M", "𡷤"), + (0x2F87C, "M", "嵃"), + (0x2F87D, "M", "𡷦"), + (0x2F87E, "M", "嵮"), + (0x2F87F, "M", "嵫"), + (0x2F880, "M", "嵼"), + (0x2F881, "M", "巡"), + (0x2F882, "M", "巢"), + (0x2F883, "M", "㠯"), + (0x2F884, "M", "巽"), + (0x2F885, "M", "帨"), + (0x2F886, "M", "帽"), + (0x2F887, "M", "幩"), + (0x2F888, "M", "㡢"), + (0x2F889, "M", "𢆃"), + (0x2F88A, "M", "㡼"), + (0x2F88B, "M", "庰"), + (0x2F88C, "M", "庳"), + (0x2F88D, "M", "庶"), + (0x2F88E, "M", "廊"), + (0x2F88F, "M", "𪎒"), + (0x2F890, "M", "廾"), + (0x2F891, "M", "𢌱"), + (0x2F893, "M", "舁"), + (0x2F894, "M", "弢"), + (0x2F896, "M", "㣇"), + (0x2F897, "M", "𣊸"), + (0x2F898, "M", "𦇚"), + (0x2F899, "M", "形"), + (0x2F89A, "M", "彫"), + (0x2F89B, "M", "㣣"), + (0x2F89C, "M", "徚"), + (0x2F89D, "M", "忍"), + (0x2F89E, "M", "志"), + (0x2F89F, "M", "忹"), + (0x2F8A0, "M", "悁"), + (0x2F8A1, "M", "㤺"), + (0x2F8A2, "M", "㤜"), + (0x2F8A3, "M", "悔"), + (0x2F8A4, "M", "𢛔"), + (0x2F8A5, "M", "惇"), + (0x2F8A6, "M", "慈"), + (0x2F8A7, "M", "慌"), + (0x2F8A8, "M", "慎"), + (0x2F8A9, "M", "慌"), + (0x2F8AA, "M", "慺"), + (0x2F8AB, "M", "憎"), + (0x2F8AC, "M", "憲"), + (0x2F8AD, "M", "憤"), + (0x2F8AE, "M", "憯"), + (0x2F8AF, "M", "懞"), + (0x2F8B0, "M", "懲"), + (0x2F8B1, "M", "懶"), + (0x2F8B2, "M", "成"), + (0x2F8B3, "M", "戛"), + (0x2F8B4, "M", "扝"), + (0x2F8B5, "M", "抱"), + (0x2F8B6, "M", "拔"), + (0x2F8B7, "M", "捐"), + (0x2F8B8, "M", "𢬌"), + (0x2F8B9, "M", "挽"), + (0x2F8BA, "M", "拼"), + (0x2F8BB, "M", "捨"), + (0x2F8BC, "M", "掃"), + (0x2F8BD, "M", "揤"), + (0x2F8BE, "M", "𢯱"), + (0x2F8BF, "M", "搢"), + (0x2F8C0, "M", "揅"), + (0x2F8C1, "M", "掩"), + (0x2F8C2, "M", "㨮"), + (0x2F8C3, "M", "摩"), + (0x2F8C4, "M", "摾"), + (0x2F8C5, "M", "撝"), + (0x2F8C6, "M", "摷"), + (0x2F8C7, "M", "㩬"), + (0x2F8C8, "M", "敏"), + (0x2F8C9, "M", "敬"), + (0x2F8CA, "M", "𣀊"), + (0x2F8CB, "M", "旣"), + (0x2F8CC, "M", "書"), + (0x2F8CD, "M", "晉"), + (0x2F8CE, "M", "㬙"), + (0x2F8CF, "M", "暑"), + (0x2F8D0, "M", "㬈"), + (0x2F8D1, "M", "㫤"), + (0x2F8D2, "M", "冒"), + (0x2F8D3, "M", "冕"), + (0x2F8D4, "M", "最"), + (0x2F8D5, "M", "暜"), + (0x2F8D6, "M", "肭"), + (0x2F8D7, "M", "䏙"), + (0x2F8D8, "M", "朗"), + (0x2F8D9, "M", "望"), + (0x2F8DA, "M", "朡"), + (0x2F8DB, "M", "杞"), + (0x2F8DC, "M", "杓"), + ] + + +def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F8DD, "M", "𣏃"), + (0x2F8DE, "M", "㭉"), + (0x2F8DF, "M", "柺"), + (0x2F8E0, "M", "枅"), + (0x2F8E1, "M", "桒"), + (0x2F8E2, "M", "梅"), + (0x2F8E3, "M", "𣑭"), + (0x2F8E4, "M", "梎"), + (0x2F8E5, "M", "栟"), + (0x2F8E6, "M", "椔"), + (0x2F8E7, "M", "㮝"), + (0x2F8E8, "M", "楂"), + (0x2F8E9, "M", "榣"), + (0x2F8EA, "M", "槪"), + (0x2F8EB, "M", "檨"), + (0x2F8EC, "M", "𣚣"), + (0x2F8ED, "M", "櫛"), + (0x2F8EE, "M", "㰘"), + (0x2F8EF, "M", "次"), + (0x2F8F0, "M", "𣢧"), + (0x2F8F1, "M", "歔"), + (0x2F8F2, "M", "㱎"), + (0x2F8F3, "M", "歲"), + (0x2F8F4, "M", "殟"), + (0x2F8F5, "M", "殺"), + (0x2F8F6, "M", "殻"), + (0x2F8F7, "M", "𣪍"), + (0x2F8F8, "M", "𡴋"), + (0x2F8F9, "M", "𣫺"), + (0x2F8FA, "M", "汎"), + (0x2F8FB, "M", "𣲼"), + (0x2F8FC, "M", "沿"), + (0x2F8FD, "M", "泍"), + (0x2F8FE, "M", "汧"), + (0x2F8FF, "M", "洖"), + (0x2F900, "M", "派"), + (0x2F901, "M", "海"), + (0x2F902, "M", "流"), + (0x2F903, "M", "浩"), + (0x2F904, "M", "浸"), + (0x2F905, "M", "涅"), + (0x2F906, "M", "𣴞"), + (0x2F907, "M", "洴"), + (0x2F908, "M", "港"), + (0x2F909, "M", "湮"), + (0x2F90A, "M", "㴳"), + (0x2F90B, "M", "滋"), + (0x2F90C, "M", "滇"), + (0x2F90D, "M", "𣻑"), + (0x2F90E, "M", "淹"), + (0x2F90F, "M", "潮"), + (0x2F910, "M", "𣽞"), + (0x2F911, "M", "𣾎"), + (0x2F912, "M", "濆"), + (0x2F913, "M", "瀹"), + (0x2F914, "M", "瀞"), + (0x2F915, "M", "瀛"), + (0x2F916, "M", "㶖"), + (0x2F917, "M", "灊"), + (0x2F918, "M", "災"), + (0x2F919, "M", "灷"), + (0x2F91A, "M", "炭"), + (0x2F91B, "M", "𠔥"), + (0x2F91C, "M", "煅"), + (0x2F91D, "M", "𤉣"), + (0x2F91E, "M", "熜"), + (0x2F91F, "M", "𤎫"), + (0x2F920, "M", "爨"), + (0x2F921, "M", "爵"), + (0x2F922, "M", "牐"), + (0x2F923, "M", "𤘈"), + (0x2F924, "M", "犀"), + (0x2F925, "M", "犕"), + (0x2F926, "M", "𤜵"), + (0x2F927, "M", "𤠔"), + (0x2F928, "M", "獺"), + (0x2F929, "M", "王"), + (0x2F92A, "M", "㺬"), + (0x2F92B, "M", "玥"), + (0x2F92C, "M", "㺸"), + (0x2F92E, "M", "瑇"), + (0x2F92F, "M", "瑜"), + (0x2F930, "M", "瑱"), + (0x2F931, "M", "璅"), + (0x2F932, "M", "瓊"), + (0x2F933, "M", "㼛"), + (0x2F934, "M", "甤"), + (0x2F935, "M", "𤰶"), + (0x2F936, "M", "甾"), + (0x2F937, "M", "𤲒"), + (0x2F938, "M", "異"), + (0x2F939, "M", "𢆟"), + (0x2F93A, "M", "瘐"), + (0x2F93B, "M", "𤾡"), + (0x2F93C, "M", "𤾸"), + (0x2F93D, "M", "𥁄"), + (0x2F93E, "M", "㿼"), + (0x2F93F, "M", "䀈"), + (0x2F940, "M", "直"), + (0x2F941, "M", "𥃳"), + ] + + +def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F942, "M", "𥃲"), + (0x2F943, "M", "𥄙"), + (0x2F944, "M", "𥄳"), + (0x2F945, "M", "眞"), + (0x2F946, "M", "真"), + (0x2F948, "M", "睊"), + (0x2F949, "M", "䀹"), + (0x2F94A, "M", "瞋"), + (0x2F94B, "M", "䁆"), + (0x2F94C, "M", "䂖"), + (0x2F94D, "M", "𥐝"), + (0x2F94E, "M", "硎"), + (0x2F94F, "M", "碌"), + (0x2F950, "M", "磌"), + (0x2F951, "M", "䃣"), + (0x2F952, "M", "𥘦"), + (0x2F953, "M", "祖"), + (0x2F954, "M", "𥚚"), + (0x2F955, "M", "𥛅"), + (0x2F956, "M", "福"), + (0x2F957, "M", "秫"), + (0x2F958, "M", "䄯"), + (0x2F959, "M", "穀"), + (0x2F95A, "M", "穊"), + (0x2F95B, "M", "穏"), + (0x2F95C, "M", "𥥼"), + (0x2F95D, "M", "𥪧"), + (0x2F95F, "M", "竮"), + (0x2F960, "M", "䈂"), + (0x2F961, "M", "𥮫"), + (0x2F962, "M", "篆"), + (0x2F963, "M", "築"), + (0x2F964, "M", "䈧"), + (0x2F965, "M", "𥲀"), + (0x2F966, "M", "糒"), + (0x2F967, "M", "䊠"), + (0x2F968, "M", "糨"), + (0x2F969, "M", "糣"), + (0x2F96A, "M", "紀"), + (0x2F96B, "M", "𥾆"), + (0x2F96C, "M", "絣"), + (0x2F96D, "M", "䌁"), + (0x2F96E, "M", "緇"), + (0x2F96F, "M", "縂"), + (0x2F970, "M", "繅"), + (0x2F971, "M", "䌴"), + (0x2F972, "M", "𦈨"), + (0x2F973, "M", "𦉇"), + (0x2F974, "M", "䍙"), + (0x2F975, "M", "𦋙"), + (0x2F976, "M", "罺"), + (0x2F977, "M", "𦌾"), + (0x2F978, "M", "羕"), + (0x2F979, "M", "翺"), + (0x2F97A, "M", "者"), + (0x2F97B, "M", "𦓚"), + (0x2F97C, "M", "𦔣"), + (0x2F97D, "M", "聠"), + (0x2F97E, "M", "𦖨"), + (0x2F97F, "M", "聰"), + (0x2F980, "M", "𣍟"), + (0x2F981, "M", "䏕"), + (0x2F982, "M", "育"), + (0x2F983, "M", "脃"), + (0x2F984, "M", "䐋"), + (0x2F985, "M", "脾"), + (0x2F986, "M", "媵"), + (0x2F987, "M", "𦞧"), + (0x2F988, "M", "𦞵"), + (0x2F989, "M", "𣎓"), + (0x2F98A, "M", "𣎜"), + (0x2F98B, "M", "舁"), + (0x2F98C, "M", "舄"), + (0x2F98D, "M", "辞"), + (0x2F98E, "M", "䑫"), + (0x2F98F, "M", "芑"), + (0x2F990, "M", "芋"), + (0x2F991, "M", "芝"), + (0x2F992, "M", "劳"), + (0x2F993, "M", "花"), + (0x2F994, "M", "芳"), + (0x2F995, "M", "芽"), + (0x2F996, "M", "苦"), + (0x2F997, "M", "𦬼"), + (0x2F998, "M", "若"), + (0x2F999, "M", "茝"), + (0x2F99A, "M", "荣"), + (0x2F99B, "M", "莭"), + (0x2F99C, "M", "茣"), + (0x2F99D, "M", "莽"), + (0x2F99E, "M", "菧"), + (0x2F99F, "M", "著"), + (0x2F9A0, "M", "荓"), + (0x2F9A1, "M", "菊"), + (0x2F9A2, "M", "菌"), + (0x2F9A3, "M", "菜"), + (0x2F9A4, "M", "𦰶"), + (0x2F9A5, "M", "𦵫"), + (0x2F9A6, "M", "𦳕"), + (0x2F9A7, "M", "䔫"), + ] + + +def _seg_82() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F9A8, "M", "蓱"), + (0x2F9A9, "M", "蓳"), + (0x2F9AA, "M", "蔖"), + (0x2F9AB, "M", "𧏊"), + (0x2F9AC, "M", "蕤"), + (0x2F9AD, "M", "𦼬"), + (0x2F9AE, "M", "䕝"), + (0x2F9AF, "M", "䕡"), + (0x2F9B0, "M", "𦾱"), + (0x2F9B1, "M", "𧃒"), + (0x2F9B2, "M", "䕫"), + (0x2F9B3, "M", "虐"), + (0x2F9B4, "M", "虜"), + (0x2F9B5, "M", "虧"), + (0x2F9B6, "M", "虩"), + (0x2F9B7, "M", "蚩"), + (0x2F9B8, "M", "蚈"), + (0x2F9B9, "M", "蜎"), + (0x2F9BA, "M", "蛢"), + (0x2F9BB, "M", "蝹"), + (0x2F9BC, "M", "蜨"), + (0x2F9BD, "M", "蝫"), + (0x2F9BE, "M", "螆"), + (0x2F9BF, "M", "䗗"), + (0x2F9C0, "M", "蟡"), + (0x2F9C1, "M", "蠁"), + (0x2F9C2, "M", "䗹"), + (0x2F9C3, "M", "衠"), + (0x2F9C4, "M", "衣"), + (0x2F9C5, "M", "𧙧"), + (0x2F9C6, "M", "裗"), + (0x2F9C7, "M", "裞"), + (0x2F9C8, "M", "䘵"), + (0x2F9C9, "M", "裺"), + (0x2F9CA, "M", "㒻"), + (0x2F9CB, "M", "𧢮"), + (0x2F9CC, "M", "𧥦"), + (0x2F9CD, "M", "䚾"), + (0x2F9CE, "M", "䛇"), + (0x2F9CF, "M", "誠"), + (0x2F9D0, "M", "諭"), + (0x2F9D1, "M", "變"), + (0x2F9D2, "M", "豕"), + (0x2F9D3, "M", "𧲨"), + (0x2F9D4, "M", "貫"), + (0x2F9D5, "M", "賁"), + (0x2F9D6, "M", "贛"), + (0x2F9D7, "M", "起"), + (0x2F9D8, "M", "𧼯"), + (0x2F9D9, "M", "𠠄"), + (0x2F9DA, "M", "跋"), + (0x2F9DB, "M", "趼"), + (0x2F9DC, "M", "跰"), + (0x2F9DD, "M", "𠣞"), + (0x2F9DE, "M", "軔"), + (0x2F9DF, "M", "輸"), + (0x2F9E0, "M", "𨗒"), + (0x2F9E1, "M", "𨗭"), + (0x2F9E2, "M", "邔"), + (0x2F9E3, "M", "郱"), + (0x2F9E4, "M", "鄑"), + (0x2F9E5, "M", "𨜮"), + (0x2F9E6, "M", "鄛"), + (0x2F9E7, "M", "鈸"), + (0x2F9E8, "M", "鋗"), + (0x2F9E9, "M", "鋘"), + (0x2F9EA, "M", "鉼"), + (0x2F9EB, "M", "鏹"), + (0x2F9EC, "M", "鐕"), + (0x2F9ED, "M", "𨯺"), + (0x2F9EE, "M", "開"), + (0x2F9EF, "M", "䦕"), + (0x2F9F0, "M", "閷"), + (0x2F9F1, "M", "𨵷"), + (0x2F9F2, "M", "䧦"), + (0x2F9F3, "M", "雃"), + (0x2F9F4, "M", "嶲"), + (0x2F9F5, "M", "霣"), + (0x2F9F6, "M", "𩅅"), + (0x2F9F7, "M", "𩈚"), + (0x2F9F8, "M", "䩮"), + (0x2F9F9, "M", "䩶"), + (0x2F9FA, "M", "韠"), + (0x2F9FB, "M", "𩐊"), + (0x2F9FC, "M", "䪲"), + (0x2F9FD, "M", "𩒖"), + (0x2F9FE, "M", "頋"), + (0x2FA00, "M", "頩"), + (0x2FA01, "M", "𩖶"), + (0x2FA02, "M", "飢"), + (0x2FA03, "M", "䬳"), + (0x2FA04, "M", "餩"), + (0x2FA05, "M", "馧"), + (0x2FA06, "M", "駂"), + (0x2FA07, "M", "駾"), + (0x2FA08, "M", "䯎"), + (0x2FA09, "M", "𩬰"), + (0x2FA0A, "M", "鬒"), + (0x2FA0B, "M", "鱀"), + (0x2FA0C, "M", "鳽"), + ] + + +def _seg_83() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2FA0D, "M", "䳎"), + (0x2FA0E, "M", "䳭"), + (0x2FA0F, "M", "鵧"), + (0x2FA10, "M", "𪃎"), + (0x2FA11, "M", "䳸"), + (0x2FA12, "M", "𪄅"), + (0x2FA13, "M", "𪈎"), + (0x2FA14, "M", "𪊑"), + (0x2FA15, "M", "麻"), + (0x2FA16, "M", "䵖"), + (0x2FA17, "M", "黹"), + (0x2FA18, "M", "黾"), + (0x2FA19, "M", "鼅"), + (0x2FA1A, "M", "鼏"), + (0x2FA1B, "M", "鼖"), + (0x2FA1C, "M", "鼻"), + (0x2FA1D, "M", "𪘀"), + (0x2FA1E, "X"), + (0x30000, "V"), + (0x3134B, "X"), + (0x31350, "V"), + (0x323B0, "X"), + (0xE0100, "I"), + (0xE01F0, "X"), + ] + + +uts46data = tuple( + _seg_0() + + _seg_1() + + _seg_2() + + _seg_3() + + _seg_4() + + _seg_5() + + _seg_6() + + _seg_7() + + _seg_8() + + _seg_9() + + _seg_10() + + _seg_11() + + _seg_12() + + _seg_13() + + _seg_14() + + _seg_15() + + _seg_16() + + _seg_17() + + _seg_18() + + _seg_19() + + _seg_20() + + _seg_21() + + _seg_22() + + _seg_23() + + _seg_24() + + _seg_25() + + _seg_26() + + _seg_27() + + _seg_28() + + _seg_29() + + _seg_30() + + _seg_31() + + _seg_32() + + _seg_33() + + _seg_34() + + _seg_35() + + _seg_36() + + _seg_37() + + _seg_38() + + _seg_39() + + _seg_40() + + _seg_41() + + _seg_42() + + _seg_43() + + _seg_44() + + _seg_45() + + _seg_46() + + _seg_47() + + _seg_48() + + _seg_49() + + _seg_50() + + _seg_51() + + _seg_52() + + _seg_53() + + _seg_54() + + _seg_55() + + _seg_56() + + _seg_57() + + _seg_58() + + _seg_59() + + _seg_60() + + _seg_61() + + _seg_62() + + _seg_63() + + _seg_64() + + _seg_65() + + _seg_66() + + _seg_67() + + _seg_68() + + _seg_69() + + _seg_70() + + _seg_71() + + _seg_72() + + _seg_73() + + _seg_74() + + _seg_75() + + _seg_76() + + _seg_77() + + _seg_78() + + _seg_79() + + _seg_80() + + _seg_81() + + _seg_82() + + _seg_83() +) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...] diff --git a/.venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/METADATA b/.venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/METADATA new file mode 100644 index 0000000..fc3c00d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/METADATA @@ -0,0 +1,79 @@ +Metadata-Version: 2.4 +Name: iniconfig +Version: 2.3.0 +Summary: brain-dead simple config-ini parsing +Author-email: Ronny Pfannschmidt , Holger Krekel +License-Expression: MIT +Project-URL: Homepage, https://github.com/pytest-dev/iniconfig +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Utilities +Requires-Python: >=3.10 +Description-Content-Type: text/x-rst +License-File: LICENSE +Dynamic: license-file + +iniconfig: brain-dead simple parsing of ini files +======================================================= + +iniconfig is a small and simple INI-file parser module +having a unique set of features: + +* maintains order of sections and entries +* supports multi-line values with or without line-continuations +* supports "#" comments everywhere +* raises errors with proper line-numbers +* no bells and whistles like automatic substitutions +* iniconfig raises an Error if two sections have the same name. + +If you encounter issues or have feature wishes please report them to: + + https://github.com/RonnyPfannschmidt/iniconfig/issues + +Basic Example +=================================== + +If you have an ini file like this: + +.. code-block:: ini + + # content of example.ini + [section1] # comment + name1=value1 # comment + name1b=value1,value2 # comment + + [section2] + name2= + line1 + line2 + +then you can do: + +.. code-block:: pycon + + >>> import iniconfig + >>> ini = iniconfig.IniConfig("example.ini") + >>> ini['section1']['name1'] # raises KeyError if not exists + 'value1' + >>> ini.get('section1', 'name1b', [], lambda x: x.split(",")) + ['value1', 'value2'] + >>> ini.get('section1', 'notexist', [], lambda x: x.split(",")) + [] + >>> [x.name for x in list(ini)] + ['section1', 'section2'] + >>> list(list(ini)[0].items()) + [('name1', 'value1'), ('name1b', 'value1,value2')] + >>> 'section1' in ini + True + >>> 'inexistendsection' in ini + False diff --git a/.venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/RECORD b/.venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/RECORD new file mode 100644 index 0000000..c9899e4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/RECORD @@ -0,0 +1,15 @@ +iniconfig-2.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +iniconfig-2.3.0.dist-info/METADATA,sha256=QNdz-E5OES9JW79PG-nL0tRWwK6271MR910b8yLyFls,2526 +iniconfig-2.3.0.dist-info/RECORD,, +iniconfig-2.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 +iniconfig-2.3.0.dist-info/licenses/LICENSE,sha256=NAn6kfes5VeJRjJnZlbjImT-XvdYFTVyXcmiN3RVG9Q,1098 +iniconfig-2.3.0.dist-info/top_level.txt,sha256=7KfM0fugdlToj9UW7enKXk2HYALQD8qHiyKtjhSzgN8,10 +iniconfig/__init__.py,sha256=XL5eqUYj4mskAOorZ5jfRAinJvJzTI-fJxpP4xfXtaw,7497 +iniconfig/__pycache__/__init__.cpython-312.pyc,, +iniconfig/__pycache__/_parse.cpython-312.pyc,, +iniconfig/__pycache__/_version.cpython-312.pyc,, +iniconfig/__pycache__/exceptions.cpython-312.pyc,, +iniconfig/_parse.py,sha256=5ncBl7MAQiaPNnpRrs9FR4t6G6DkgOUs458OY_1CR28,5223 +iniconfig/_version.py,sha256=KNFYe-Vtdt7Z-oHyl8jmDAQ9qXoCNMAEXigj6BR1QUI,704 +iniconfig/exceptions.py,sha256=mipQ_aMxD9CvSvFWN1oTXY4QuRnKAMZ1f3sCdmjDTU0,399 +iniconfig/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/.venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/WHEEL new file mode 100644 index 0000000..e7fa31b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/licenses/LICENSE b/.venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..46f4b28 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2010 - 2023 Holger Krekel and others + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/.venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/top_level.txt b/.venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/top_level.txt new file mode 100644 index 0000000..9dda536 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/iniconfig-2.3.0.dist-info/top_level.txt @@ -0,0 +1 @@ +iniconfig diff --git a/.venv/lib/python3.12/site-packages/iniconfig/__init__.py b/.venv/lib/python3.12/site-packages/iniconfig/__init__.py new file mode 100644 index 0000000..b84809f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/iniconfig/__init__.py @@ -0,0 +1,249 @@ +"""brain-dead simple parser for ini-style files. +(C) Ronny Pfannschmidt, Holger Krekel -- MIT licensed +""" + +import os +from collections.abc import Callable +from collections.abc import Iterator +from collections.abc import Mapping +from typing import Final +from typing import TypeVar +from typing import overload + +__all__ = ["IniConfig", "ParseError", "COMMENTCHARS", "iscommentline"] + +from . import _parse +from ._parse import COMMENTCHARS +from ._parse import iscommentline +from .exceptions import ParseError + +_D = TypeVar("_D") +_T = TypeVar("_T") + + +class SectionWrapper: + config: Final["IniConfig"] + name: Final[str] + + def __init__(self, config: "IniConfig", name: str) -> None: + self.config = config + self.name = name + + def lineof(self, name: str) -> int | None: + return self.config.lineof(self.name, name) + + @overload + def get(self, key: str) -> str | None: ... + + @overload + def get( + self, + key: str, + convert: Callable[[str], _T], + ) -> _T | None: ... + + @overload + def get( + self, + key: str, + default: None, + convert: Callable[[str], _T], + ) -> _T | None: ... + + @overload + def get(self, key: str, default: _D, convert: None = None) -> str | _D: ... + + @overload + def get( + self, + key: str, + default: _D, + convert: Callable[[str], _T], + ) -> _T | _D: ... + + # TODO: investigate possible mypy bug wrt matching the passed over data + def get( # type: ignore [misc] + self, + key: str, + default: _D | None = None, + convert: Callable[[str], _T] | None = None, + ) -> _D | _T | str | None: + return self.config.get(self.name, key, convert=convert, default=default) + + def __getitem__(self, key: str) -> str: + return self.config.sections[self.name][key] + + def __iter__(self) -> Iterator[str]: + section: Mapping[str, str] = self.config.sections.get(self.name, {}) + + def lineof(key: str) -> int: + return self.config.lineof(self.name, key) # type: ignore[return-value] + + yield from sorted(section, key=lineof) + + def items(self) -> Iterator[tuple[str, str]]: + for name in self: + yield name, self[name] + + +class IniConfig: + path: Final[str] + sections: Final[Mapping[str, Mapping[str, str]]] + _sources: Final[Mapping[tuple[str, str | None], int]] + + def __init__( + self, + path: str | os.PathLike[str], + data: str | None = None, + encoding: str = "utf-8", + *, + _sections: Mapping[str, Mapping[str, str]] | None = None, + _sources: Mapping[tuple[str, str | None], int] | None = None, + ) -> None: + self.path = os.fspath(path) + + # Determine sections and sources + if _sections is not None and _sources is not None: + # Use provided pre-parsed data (called from parse()) + sections_data = _sections + sources = _sources + else: + # Parse the data (backward compatible path) + if data is None: + with open(self.path, encoding=encoding) as fp: + data = fp.read() + + # Use old behavior (no stripping) for backward compatibility + sections_data, sources = _parse.parse_ini_data( + self.path, data, strip_inline_comments=False + ) + + # Assign once to Final attributes + self._sources = sources + self.sections = sections_data + + @classmethod + def parse( + cls, + path: str | os.PathLike[str], + data: str | None = None, + encoding: str = "utf-8", + *, + strip_inline_comments: bool = True, + strip_section_whitespace: bool = False, + ) -> "IniConfig": + """Parse an INI file. + + Args: + path: Path to the INI file (used for error messages) + data: Optional INI content as string. If None, reads from path. + encoding: Encoding to use when reading the file (default: utf-8) + strip_inline_comments: Whether to strip inline comments from values + (default: True). When True, comments starting with # or ; are + removed from values, matching the behavior for section comments. + strip_section_whitespace: Whether to strip whitespace from section and key names + (default: False). When True, strips Unicode whitespace from section and key names, + addressing issue #4. When False, preserves existing behavior for backward compatibility. + + Returns: + IniConfig instance with parsed configuration + + Example: + # With comment stripping (default): + config = IniConfig.parse("setup.cfg") + # value = "foo" instead of "foo # comment" + + # Without comment stripping (old behavior): + config = IniConfig.parse("setup.cfg", strip_inline_comments=False) + # value = "foo # comment" + + # With section name stripping (opt-in for issue #4): + config = IniConfig.parse("setup.cfg", strip_section_whitespace=True) + # section names and keys have Unicode whitespace stripped + """ + fspath = os.fspath(path) + + if data is None: + with open(fspath, encoding=encoding) as fp: + data = fp.read() + + sections_data, sources = _parse.parse_ini_data( + fspath, + data, + strip_inline_comments=strip_inline_comments, + strip_section_whitespace=strip_section_whitespace, + ) + + # Call constructor with pre-parsed sections and sources + return cls(path=fspath, _sections=sections_data, _sources=sources) + + def lineof(self, section: str, name: str | None = None) -> int | None: + lineno = self._sources.get((section, name)) + return None if lineno is None else lineno + 1 + + @overload + def get( + self, + section: str, + name: str, + ) -> str | None: ... + + @overload + def get( + self, + section: str, + name: str, + convert: Callable[[str], _T], + ) -> _T | None: ... + + @overload + def get( + self, + section: str, + name: str, + default: None, + convert: Callable[[str], _T], + ) -> _T | None: ... + + @overload + def get( + self, section: str, name: str, default: _D, convert: None = None + ) -> str | _D: ... + + @overload + def get( + self, + section: str, + name: str, + default: _D, + convert: Callable[[str], _T], + ) -> _T | _D: ... + + def get( # type: ignore + self, + section: str, + name: str, + default: _D | None = None, + convert: Callable[[str], _T] | None = None, + ) -> _D | _T | str | None: + try: + value: str = self.sections[section][name] + except KeyError: + return default + else: + if convert is not None: + return convert(value) + else: + return value + + def __getitem__(self, name: str) -> SectionWrapper: + if name not in self.sections: + raise KeyError(name) + return SectionWrapper(self, name) + + def __iter__(self) -> Iterator[SectionWrapper]: + for name in sorted(self.sections, key=self.lineof): # type: ignore + yield SectionWrapper(self, name) + + def __contains__(self, arg: str) -> bool: + return arg in self.sections diff --git a/.venv/lib/python3.12/site-packages/iniconfig/_parse.py b/.venv/lib/python3.12/site-packages/iniconfig/_parse.py new file mode 100644 index 0000000..57b9b44 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/iniconfig/_parse.py @@ -0,0 +1,163 @@ +from collections.abc import Mapping +from typing import NamedTuple + +from .exceptions import ParseError + +COMMENTCHARS = "#;" + + +class ParsedLine(NamedTuple): + lineno: int + section: str | None + name: str | None + value: str | None + + +def parse_ini_data( + path: str, + data: str, + *, + strip_inline_comments: bool, + strip_section_whitespace: bool = False, +) -> tuple[Mapping[str, Mapping[str, str]], Mapping[tuple[str, str | None], int]]: + """Parse INI data and return sections and sources mappings. + + Args: + path: Path for error messages + data: INI content as string + strip_inline_comments: Whether to strip inline comments from values + strip_section_whitespace: Whether to strip whitespace from section and key names + (default: False). When True, addresses issue #4 by stripping Unicode whitespace. + + Returns: + Tuple of (sections_data, sources) where: + - sections_data: mapping of section -> {name -> value} + - sources: mapping of (section, name) -> line number + """ + tokens = parse_lines( + path, + data.splitlines(True), + strip_inline_comments=strip_inline_comments, + strip_section_whitespace=strip_section_whitespace, + ) + + sources: dict[tuple[str, str | None], int] = {} + sections_data: dict[str, dict[str, str]] = {} + + for lineno, section, name, value in tokens: + if section is None: + raise ParseError(path, lineno, "no section header defined") + sources[section, name] = lineno + if name is None: + if section in sections_data: + raise ParseError(path, lineno, f"duplicate section {section!r}") + sections_data[section] = {} + else: + if name in sections_data[section]: + raise ParseError(path, lineno, f"duplicate name {name!r}") + assert value is not None + sections_data[section][name] = value + + return sections_data, sources + + +def parse_lines( + path: str, + line_iter: list[str], + *, + strip_inline_comments: bool = False, + strip_section_whitespace: bool = False, +) -> list[ParsedLine]: + result: list[ParsedLine] = [] + section = None + for lineno, line in enumerate(line_iter): + name, data = _parseline( + path, line, lineno, strip_inline_comments, strip_section_whitespace + ) + # new value + if name is not None and data is not None: + result.append(ParsedLine(lineno, section, name, data)) + # new section + elif name is not None and data is None: + if not name: + raise ParseError(path, lineno, "empty section name") + section = name + result.append(ParsedLine(lineno, section, None, None)) + # continuation + elif name is None and data is not None: + if not result: + raise ParseError(path, lineno, "unexpected value continuation") + last = result.pop() + if last.name is None: + raise ParseError(path, lineno, "unexpected value continuation") + + if last.value: + last = last._replace(value=f"{last.value}\n{data}") + else: + last = last._replace(value=data) + result.append(last) + return result + + +def _parseline( + path: str, + line: str, + lineno: int, + strip_inline_comments: bool, + strip_section_whitespace: bool, +) -> tuple[str | None, str | None]: + # blank lines + if iscommentline(line): + line = "" + else: + line = line.rstrip() + if not line: + return None, None + # section + if line[0] == "[": + realline = line + for c in COMMENTCHARS: + line = line.split(c)[0].rstrip() + if line[-1] == "]": + section_name = line[1:-1] + # Optionally strip whitespace from section name (issue #4) + if strip_section_whitespace: + section_name = section_name.strip() + return section_name, None + return None, realline.strip() + # value + elif not line[0].isspace(): + try: + name, value = line.split("=", 1) + if ":" in name: + raise ValueError() + except ValueError: + try: + name, value = line.split(":", 1) + except ValueError: + raise ParseError(path, lineno, f"unexpected line: {line!r}") from None + + # Strip key name (always for backward compatibility, optionally with unicode awareness) + key_name = name.strip() + + # Strip value + value = value.strip() + # Strip inline comments from values if requested (issue #55) + if strip_inline_comments: + for c in COMMENTCHARS: + value = value.split(c)[0].rstrip() + + return key_name, value + # continuation + else: + line = line.strip() + # Strip inline comments from continuations if requested (issue #55) + if strip_inline_comments: + for c in COMMENTCHARS: + line = line.split(c)[0].rstrip() + return None, line + + +def iscommentline(line: str) -> bool: + c = line.lstrip()[:1] + return c in COMMENTCHARS diff --git a/.venv/lib/python3.12/site-packages/iniconfig/_version.py b/.venv/lib/python3.12/site-packages/iniconfig/_version.py new file mode 100644 index 0000000..b982b02 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/iniconfig/_version.py @@ -0,0 +1,34 @@ +# file generated by setuptools-scm +# don't change, don't track in version control + +__all__ = [ + "__version__", + "__version_tuple__", + "version", + "version_tuple", + "__commit_id__", + "commit_id", +] + +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple + from typing import Union + + VERSION_TUPLE = Tuple[Union[int, str], ...] + COMMIT_ID = Union[str, None] +else: + VERSION_TUPLE = object + COMMIT_ID = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE +commit_id: COMMIT_ID +__commit_id__: COMMIT_ID + +__version__ = version = '2.3.0' +__version_tuple__ = version_tuple = (2, 3, 0) + +__commit_id__ = commit_id = None diff --git a/.venv/lib/python3.12/site-packages/iniconfig/exceptions.py b/.venv/lib/python3.12/site-packages/iniconfig/exceptions.py new file mode 100644 index 0000000..d078bc6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/iniconfig/exceptions.py @@ -0,0 +1,16 @@ +from typing import Final + + +class ParseError(Exception): + path: Final[str] + lineno: Final[int] + msg: Final[str] + + def __init__(self, path: str, lineno: int, msg: str) -> None: + super().__init__(path, lineno, msg) + self.path = path + self.lineno = lineno + self.msg = msg + + def __str__(self) -> str: + return f"{self.path}:{self.lineno + 1}: {self.msg}" diff --git a/.venv/lib/python3.12/site-packages/iniconfig/py.typed b/.venv/lib/python3.12/site-packages/iniconfig/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/librt-0.7.8.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/librt-0.7.8.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/librt-0.7.8.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/librt-0.7.8.dist-info/METADATA b/.venv/lib/python3.12/site-packages/librt-0.7.8.dist-info/METADATA new file mode 100644 index 0000000..bc482ec --- /dev/null +++ b/.venv/lib/python3.12/site-packages/librt-0.7.8.dist-info/METADATA @@ -0,0 +1,34 @@ +Metadata-Version: 2.4 +Name: librt +Version: 0.7.8 +Summary: Mypyc runtime library +Author-email: Jukka Lehtosalo , Ivan Levkivskyi +License: MIT +Project-URL: Homepage, https://github.com/mypyc/librt +Project-URL: Issues, https://github.com/mypyc/mypyc/issues +Classifier: Development Status :: 3 - Alpha +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Topic :: Software Development +Classifier: Typing :: Typed +Requires-Python: >=3.9 +Description-Content-Type: text/x-rst +License-File: LICENSE +Dynamic: license-file + +Mypyc runtime library +===================== + +This library contains efficient C implementations of various Python standard +library classes and functions. Mypyc can use these fast implementations when +compiling Python code to native extension modules. + +Mypyc compiler is a part of `mypy distribution `__. diff --git a/.venv/lib/python3.12/site-packages/librt-0.7.8.dist-info/RECORD b/.venv/lib/python3.12/site-packages/librt-0.7.8.dist-info/RECORD new file mode 100644 index 0000000..b945bc4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/librt-0.7.8.dist-info/RECORD @@ -0,0 +1,14 @@ +librt-0.7.8.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +librt-0.7.8.dist-info/METADATA,sha256=fpSqK7KeWD5pNoUbmyf-wr8n1Qjj66HiFlLcEKr5Vnc,1337 +librt-0.7.8.dist-info/RECORD,, +librt-0.7.8.dist-info/WHEEL,sha256=DxRnWQz-Kp9-4a4hdDHsSv0KUC3H7sN9Nbef3-8RjXU,190 +librt-0.7.8.dist-info/licenses/LICENSE,sha256=iPnjecaEWABTxY514pbKLrJDtu3MblNX8kcxDpbu74o,12789 +librt-0.7.8.dist-info/top_level.txt,sha256=30um8Di5l6cLfadmd2Amp11XwLlr8b0ES1ckJkZ4Ulg,6 +librt/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +librt/base64.cpython-312-x86_64-linux-gnu.so,sha256=UfhusQHADMF-E85vtpflJ2vgDj7aJrb2mc-8sUdey2U,154736 +librt/base64.pyi,sha256=eg1QM7rzBPf0M6vRyeufBzPEs5IVIdi0Ibz8ZBI75m8,180 +librt/internal.cpython-312-x86_64-linux-gnu.so,sha256=mijwtlfSbnDCshT4fCnihdytQd1iMb4aszYRJRcxy50,244624 +librt/internal.pyi,sha256=RneVcEnP7hwJKqNx7y_6JFzrjbmvycs-o2m58F5AMJw,814 +librt/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +librt/strings.cpython-312-x86_64-linux-gnu.so,sha256=DMPkkuFNrYpO5xionU0LZ-vv6QZCFl3B_zBKOqi6bBc,178568 +librt/strings.pyi,sha256=Xr97g-L0_5kfIA1Y27cSYD1UR93_lMs9qcj0jeNs4rQ,418 diff --git a/.venv/lib/python3.12/site-packages/librt-0.7.8.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/librt-0.7.8.dist-info/WHEEL new file mode 100644 index 0000000..f3e8a97 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/librt-0.7.8.dist-info/WHEEL @@ -0,0 +1,7 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: false +Tag: cp312-cp312-manylinux_2_17_x86_64 +Tag: cp312-cp312-manylinux2014_x86_64 +Tag: cp312-cp312-manylinux_2_28_x86_64 + diff --git a/.venv/lib/python3.12/site-packages/librt-0.7.8.dist-info/licenses/LICENSE b/.venv/lib/python3.12/site-packages/librt-0.7.8.dist-info/licenses/LICENSE new file mode 100644 index 0000000..80e1321 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/librt-0.7.8.dist-info/licenses/LICENSE @@ -0,0 +1,263 @@ +Mypyc runtime library is licensed under the terms of the MIT license, reproduced below. + += = = = = + +The MIT License + +Copyright (c) 2025 Mypyc contributors + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + += = = = = + +Portions of mypy and mypyc are licensed under different licenses. +The files +mypyc/lib-rt/pythonsupport.h, mypyc/lib-rt/getargs.c and +mypyc/lib-rt/getargsfast.c are licensed under the PSF 2 License, reproduced +below. + += = = = = + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012 Python Software Foundation; All Rights Reserved" are retained in Python +alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the Internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the Internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + += = = = = + +Files under lib-rt/base64 are licensed under the following license. + += = = = = + +Copyright (c) 2005-2007, Nick Galbreath +Copyright (c) 2015-2018, Wojciech Muła +Copyright (c) 2016-2017, Matthieu Darbois +Copyright (c) 2013-2022, Alfred Klomp +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +- Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + +- Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/lib/python3.12/site-packages/librt-0.7.8.dist-info/top_level.txt b/.venv/lib/python3.12/site-packages/librt-0.7.8.dist-info/top_level.txt new file mode 100644 index 0000000..e49e10c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/librt-0.7.8.dist-info/top_level.txt @@ -0,0 +1 @@ +librt diff --git a/.venv/lib/python3.12/site-packages/librt/__init__.pyi b/.venv/lib/python3.12/site-packages/librt/__init__.pyi new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/librt/base64.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/librt/base64.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..7f6a167 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/librt/base64.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/librt/base64.pyi b/.venv/lib/python3.12/site-packages/librt/base64.pyi new file mode 100644 index 0000000..275258a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/librt/base64.pyi @@ -0,0 +1,4 @@ +def b64encode(s: bytes) -> bytes: ... +def b64decode(s: bytes | str) -> bytes: ... +def urlsafe_b64encode(s: bytes) -> bytes: ... +def urlsafe_b64decode(s: bytes | str) -> bytes: ... diff --git a/.venv/lib/python3.12/site-packages/librt/internal.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/librt/internal.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..f93b01f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/librt/internal.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/librt/internal.pyi b/.venv/lib/python3.12/site-packages/librt/internal.pyi new file mode 100644 index 0000000..2969ccf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/librt/internal.pyi @@ -0,0 +1,21 @@ +from mypy_extensions import u8 + +class ReadBuffer: + def __init__(self, source: bytes) -> None: ... + +class WriteBuffer: + def getvalue(self) -> bytes: ... + +def write_bool(data: WriteBuffer, value: bool) -> None: ... +def read_bool(data: ReadBuffer) -> bool: ... +def write_str(data: WriteBuffer, value: str) -> None: ... +def read_str(data: ReadBuffer) -> str: ... +def write_bytes(data: WriteBuffer, value: bytes) -> None: ... +def read_bytes(data: ReadBuffer) -> bytes: ... +def write_float(data: WriteBuffer, value: float) -> None: ... +def read_float(data: ReadBuffer) -> float: ... +def write_int(data: WriteBuffer, value: int) -> None: ... +def read_int(data: ReadBuffer) -> int: ... +def write_tag(data: WriteBuffer, value: u8) -> None: ... +def read_tag(data: ReadBuffer) -> u8: ... +def cache_version() -> u8: ... diff --git a/.venv/lib/python3.12/site-packages/librt/py.typed b/.venv/lib/python3.12/site-packages/librt/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/librt/strings.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/librt/strings.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..ea2d85b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/librt/strings.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/librt/strings.pyi b/.venv/lib/python3.12/site-packages/librt/strings.pyi new file mode 100644 index 0000000..241f6a6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/librt/strings.pyi @@ -0,0 +1,13 @@ +from typing import final + +from mypy_extensions import i64, u8 + +@final +class BytesWriter: + def append(self, /, x: int) -> None: ... + def write(self, /, b: bytes | bytearray) -> None: ... + def getvalue(self) -> bytes: ... + def truncate(self, /, size: i64) -> None: ... + def __len__(self) -> i64: ... + def __getitem__(self, /, i: i64) -> u8: ... + def __setitem__(self, /, i: i64, x: u8) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy-1.19.1.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/mypy-1.19.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy-1.19.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/mypy-1.19.1.dist-info/METADATA b/.venv/lib/python3.12/site-packages/mypy-1.19.1.dist-info/METADATA new file mode 100644 index 0000000..f705cb7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy-1.19.1.dist-info/METADATA @@ -0,0 +1,54 @@ +Metadata-Version: 2.4 +Name: mypy +Version: 1.19.1 +Summary: Optional static typing for Python +Author-email: Jukka Lehtosalo +License: MIT +Project-URL: Homepage, https://www.mypy-lang.org/ +Project-URL: Documentation, https://mypy.readthedocs.io/en/stable/index.html +Project-URL: Repository, https://github.com/python/mypy +Project-URL: Changelog, https://github.com/python/mypy/blob/master/CHANGELOG.md +Project-URL: Issues, https://github.com/python/mypy/issues +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Topic :: Software Development +Classifier: Typing :: Typed +Requires-Python: >=3.9 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: typing_extensions>=4.6.0 +Requires-Dist: mypy_extensions>=1.0.0 +Requires-Dist: pathspec>=0.9.0 +Requires-Dist: tomli>=1.1.0; python_version < "3.11" +Requires-Dist: librt>=0.6.2; platform_python_implementation != "PyPy" +Provides-Extra: dmypy +Requires-Dist: psutil>=4.0; extra == "dmypy" +Provides-Extra: mypyc +Requires-Dist: setuptools>=50; extra == "mypyc" +Provides-Extra: python2 +Provides-Extra: reports +Requires-Dist: lxml; extra == "reports" +Provides-Extra: install-types +Requires-Dist: pip; extra == "install-types" +Provides-Extra: faster-cache +Requires-Dist: orjson; extra == "faster-cache" +Dynamic: license-file + +Mypy -- Optional Static Typing for Python +========================================= + +Add type annotations to your Python programs, and use mypy to type +check them. Mypy is essentially a Python linter on steroids, and it +can catch many programming errors by analyzing your program, without +actually having to run it. Mypy has a powerful type system with +features such as type inference, gradual typing, generics and union +types. diff --git a/.venv/lib/python3.12/site-packages/mypy-1.19.1.dist-info/RECORD b/.venv/lib/python3.12/site-packages/mypy-1.19.1.dist-info/RECORD new file mode 100644 index 0000000..7454e78 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy-1.19.1.dist-info/RECORD @@ -0,0 +1,1672 @@ +../../../bin/dmypy,sha256=NwHH6OI9PA85odoxAZVCQwCIIf0XJ8W2FExMy6o6n64,266 +../../../bin/mypy,sha256=WWILzIMHG_0CnG8JCjiP-Ex2Atpm71ntWIIs9LNizgY,262 +../../../bin/mypyc,sha256=zfo7c10zGKlFePY1XelEYyrDenbyIsCxawqDFykML_k,245 +../../../bin/stubgen,sha256=MRyQprMmgKjy1HZ2S0Ib6-1y3qaKAUr3Tj9bfDzXWho,243 +../../../bin/stubtest,sha256=Ncw_MrbojDtYQfktMxFL9Qa81WThlYBq2XjtNVOskxw,244 +4c842c94c09923bae9e4__mypyc.cpython-312-x86_64-linux-gnu.so,sha256=vIpypjzCP6RDQZ6cVVPj0dYWRsxsfq0T4tq1HuQsit8,33466896 +mypy-1.19.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +mypy-1.19.1.dist-info/METADATA,sha256=MSkWi-WDvJbz-sYqfCPz1P9lF_o64Ei694_aQFEp9nk,2234 +mypy-1.19.1.dist-info/RECORD,, +mypy-1.19.1.dist-info/WHEEL,sha256=DxRnWQz-Kp9-4a4hdDHsSv0KUC3H7sN9Nbef3-8RjXU,190 +mypy-1.19.1.dist-info/entry_points.txt,sha256=DKRnGYlnjnz9_6jxYhHskdeZLwNC69R-ZPVxv3b9dpc,179 +mypy-1.19.1.dist-info/licenses/LICENSE,sha256=_fHNF_UOEwXtjP03RnMS0nqYRkjbpiLO9wVztVEuThs,12842 +mypy-1.19.1.dist-info/top_level.txt,sha256=RwqDIYgjXyLD9BXvPxVD_num2GLz3HSCSXZ82WQEXu0,39 +mypy/__init__.cpython-312-x86_64-linux-gnu.so,sha256=m1XEESj2dzEYfxP8F-4g0BzVr4k7t_P14fvDWQxg6tM,15976 +mypy/__init__.py,sha256=4yp43qNAZZ0ViBpVn56Bc7MA4H2UMXe0WTVPdkODP6k,37 +mypy/__main__.py,sha256=OYmAgQIvrZCCYYZc1L4ZM_ZebZ5ZkcxqNeWkJG4Zg70,1061 +mypy/__pycache__/__init__.cpython-312.pyc,, +mypy/__pycache__/__main__.cpython-312.pyc,, +mypy/__pycache__/api.cpython-312.pyc,, +mypy/__pycache__/applytype.cpython-312.pyc,, +mypy/__pycache__/argmap.cpython-312.pyc,, +mypy/__pycache__/binder.cpython-312.pyc,, +mypy/__pycache__/bogus_type.cpython-312.pyc,, +mypy/__pycache__/build.cpython-312.pyc,, +mypy/__pycache__/cache.cpython-312.pyc,, +mypy/__pycache__/checker.cpython-312.pyc,, +mypy/__pycache__/checker_shared.cpython-312.pyc,, +mypy/__pycache__/checker_state.cpython-312.pyc,, +mypy/__pycache__/checkexpr.cpython-312.pyc,, +mypy/__pycache__/checkmember.cpython-312.pyc,, +mypy/__pycache__/checkpattern.cpython-312.pyc,, +mypy/__pycache__/checkstrformat.cpython-312.pyc,, +mypy/__pycache__/config_parser.cpython-312.pyc,, +mypy/__pycache__/constant_fold.cpython-312.pyc,, +mypy/__pycache__/constraints.cpython-312.pyc,, +mypy/__pycache__/copytype.cpython-312.pyc,, +mypy/__pycache__/defaults.cpython-312.pyc,, +mypy/__pycache__/dmypy_os.cpython-312.pyc,, +mypy/__pycache__/dmypy_server.cpython-312.pyc,, +mypy/__pycache__/dmypy_util.cpython-312.pyc,, +mypy/__pycache__/erasetype.cpython-312.pyc,, +mypy/__pycache__/error_formatter.cpython-312.pyc,, +mypy/__pycache__/errorcodes.cpython-312.pyc,, +mypy/__pycache__/errors.cpython-312.pyc,, +mypy/__pycache__/evalexpr.cpython-312.pyc,, +mypy/__pycache__/expandtype.cpython-312.pyc,, +mypy/__pycache__/exportjson.cpython-312.pyc,, +mypy/__pycache__/exprtotype.cpython-312.pyc,, +mypy/__pycache__/fastparse.cpython-312.pyc,, +mypy/__pycache__/find_sources.cpython-312.pyc,, +mypy/__pycache__/fixup.cpython-312.pyc,, +mypy/__pycache__/freetree.cpython-312.pyc,, +mypy/__pycache__/fscache.cpython-312.pyc,, +mypy/__pycache__/fswatcher.cpython-312.pyc,, +mypy/__pycache__/gclogger.cpython-312.pyc,, +mypy/__pycache__/git.cpython-312.pyc,, +mypy/__pycache__/graph_utils.cpython-312.pyc,, +mypy/__pycache__/indirection.cpython-312.pyc,, +mypy/__pycache__/infer.cpython-312.pyc,, +mypy/__pycache__/inspections.cpython-312.pyc,, +mypy/__pycache__/ipc.cpython-312.pyc,, +mypy/__pycache__/join.cpython-312.pyc,, +mypy/__pycache__/literals.cpython-312.pyc,, +mypy/__pycache__/lookup.cpython-312.pyc,, +mypy/__pycache__/main.cpython-312.pyc,, +mypy/__pycache__/maptype.cpython-312.pyc,, +mypy/__pycache__/meet.cpython-312.pyc,, +mypy/__pycache__/memprofile.cpython-312.pyc,, +mypy/__pycache__/message_registry.cpython-312.pyc,, +mypy/__pycache__/messages.cpython-312.pyc,, +mypy/__pycache__/metastore.cpython-312.pyc,, +mypy/__pycache__/mixedtraverser.cpython-312.pyc,, +mypy/__pycache__/modulefinder.cpython-312.pyc,, +mypy/__pycache__/moduleinspect.cpython-312.pyc,, +mypy/__pycache__/mro.cpython-312.pyc,, +mypy/__pycache__/nodes.cpython-312.pyc,, +mypy/__pycache__/operators.cpython-312.pyc,, +mypy/__pycache__/options.cpython-312.pyc,, +mypy/__pycache__/parse.cpython-312.pyc,, +mypy/__pycache__/partially_defined.cpython-312.pyc,, +mypy/__pycache__/patterns.cpython-312.pyc,, +mypy/__pycache__/plugin.cpython-312.pyc,, +mypy/__pycache__/pyinfo.cpython-312.pyc,, +mypy/__pycache__/reachability.cpython-312.pyc,, +mypy/__pycache__/refinfo.cpython-312.pyc,, +mypy/__pycache__/renaming.cpython-312.pyc,, +mypy/__pycache__/report.cpython-312.pyc,, +mypy/__pycache__/scope.cpython-312.pyc,, +mypy/__pycache__/semanal.cpython-312.pyc,, +mypy/__pycache__/semanal_classprop.cpython-312.pyc,, +mypy/__pycache__/semanal_enum.cpython-312.pyc,, +mypy/__pycache__/semanal_infer.cpython-312.pyc,, +mypy/__pycache__/semanal_main.cpython-312.pyc,, +mypy/__pycache__/semanal_namedtuple.cpython-312.pyc,, +mypy/__pycache__/semanal_newtype.cpython-312.pyc,, +mypy/__pycache__/semanal_pass1.cpython-312.pyc,, +mypy/__pycache__/semanal_shared.cpython-312.pyc,, +mypy/__pycache__/semanal_typeargs.cpython-312.pyc,, +mypy/__pycache__/semanal_typeddict.cpython-312.pyc,, +mypy/__pycache__/sharedparse.cpython-312.pyc,, +mypy/__pycache__/solve.cpython-312.pyc,, +mypy/__pycache__/split_namespace.cpython-312.pyc,, +mypy/__pycache__/state.cpython-312.pyc,, +mypy/__pycache__/stats.cpython-312.pyc,, +mypy/__pycache__/strconv.cpython-312.pyc,, +mypy/__pycache__/stubdoc.cpython-312.pyc,, +mypy/__pycache__/stubgen.cpython-312.pyc,, +mypy/__pycache__/stubgenc.cpython-312.pyc,, +mypy/__pycache__/stubinfo.cpython-312.pyc,, +mypy/__pycache__/stubtest.cpython-312.pyc,, +mypy/__pycache__/stubutil.cpython-312.pyc,, +mypy/__pycache__/subtypes.cpython-312.pyc,, +mypy/__pycache__/suggestions.cpython-312.pyc,, +mypy/__pycache__/traverser.cpython-312.pyc,, +mypy/__pycache__/treetransform.cpython-312.pyc,, +mypy/__pycache__/tvar_scope.cpython-312.pyc,, +mypy/__pycache__/type_visitor.cpython-312.pyc,, +mypy/__pycache__/typeanal.cpython-312.pyc,, +mypy/__pycache__/typeops.cpython-312.pyc,, +mypy/__pycache__/types.cpython-312.pyc,, +mypy/__pycache__/types_utils.cpython-312.pyc,, +mypy/__pycache__/typestate.cpython-312.pyc,, +mypy/__pycache__/typetraverser.cpython-312.pyc,, +mypy/__pycache__/typevars.cpython-312.pyc,, +mypy/__pycache__/typevartuples.cpython-312.pyc,, +mypy/__pycache__/util.cpython-312.pyc,, +mypy/__pycache__/version.cpython-312.pyc,, +mypy/__pycache__/visitor.cpython-312.pyc,, +mypy/api.cpython-312-x86_64-linux-gnu.so,sha256=Xxgyb-Z9IbxaJcoR_vERAECBgbyOEIOgIG3nOhbcLdw,15976 +mypy/api.py,sha256=z1YRAJA2Tk5dvAspKo4yCkan0fB6OSBtQq-qKQEMEBM,2922 +mypy/applytype.cpython-312-x86_64-linux-gnu.so,sha256=5HTWNfCVxcvNWWztZV2azr0run7C60BnRn4Zh830M5w,15984 +mypy/applytype.py,sha256=eMJK6laJsNW00B6yYf7NpHdJH3r598FWblTXUps8688,12050 +mypy/argmap.cpython-312-x86_64-linux-gnu.so,sha256=OlCbId8AVk21xRUCN5yeFSBP9t5xKRNCEPhaXCGOyh4,15984 +mypy/argmap.py,sha256=hmYW-Y33Hi94PZ7PNzDhlIGq9bYij43uUS75Ss5cDkc,11421 +mypy/binder.cpython-312-x86_64-linux-gnu.so,sha256=oqZ76S2DfL2NcHI0TTOxJRK9MolWgPDQZLgrj2DvIm0,15984 +mypy/binder.py,sha256=Ra81MKR49qoulA8MitVEqp0KcJQJ_BSh79XuIbWPV9A,25339 +mypy/bogus_type.py,sha256=w3GrsWoj5FKbfEUsc87OVFO812HC9BvnWnSaV2T4u1c,816 +mypy/build.cpython-312-x86_64-linux-gnu.so,sha256=oVE95K4Wh4X4eqtDX9BzMF20O_Jw4g8nznMu5A-uNDo,15976 +mypy/build.py,sha256=vo8qhmMvR-bnawE55RZ6WsasmHRfqPPcbxyL7N-IaX0,150414 +mypy/cache.cpython-312-x86_64-linux-gnu.so,sha256=KdOO2qEnwBGu-7M5LbebIaobz-XHYF4bHS60O5SiWK0,15976 +mypy/cache.py,sha256=xncDGLICIvP0uvUtKWvnXp5wo67BPZn2CnUdtxyg3ac,17959 +mypy/checker.cpython-312-x86_64-linux-gnu.so,sha256=bCbz3LrhFkaKilMVREiH1KFk11WITsC3qMgEOE-_XAs,15984 +mypy/checker.py,sha256=Ii3h8Eaiq9zhql9TjRnUj1TeWG9GjCdyG2ElhLYht0s,423500 +mypy/checker_shared.cpython-312-x86_64-linux-gnu.so,sha256=QKnCf4pL6s_PQgTNESK-qfvHigxk3HroGh42-E8wzOw,16000 +mypy/checker_shared.py,sha256=5t2iWNPSnFkEI8Ik54017gV-RVZqPJP5-_SFvlVQkU4,10231 +mypy/checker_state.cpython-312-x86_64-linux-gnu.so,sha256=uI2EaIQzKn7znsmfjolbWse0Q0c-6ctEh1gYOw1RV1U,15992 +mypy/checker_state.py,sha256=JqVXZnHaqh0oWmLIWPZdLEct9lUCUe9gUFCTu8WT0hQ,858 +mypy/checkexpr.cpython-312-x86_64-linux-gnu.so,sha256=vISg9zp8LB7lWKeljr3FJRN8-ZePT-QrOO1RbG5326I,15984 +mypy/checkexpr.py,sha256=mfXXaIuHR4Wb2iFvD1904k6dtuO_o5MnxkXmvYVF-yI,303067 +mypy/checkmember.cpython-312-x86_64-linux-gnu.so,sha256=0HjUN3zu-w92byXO9PP52bZmwOgXWVwBpXqMAM8Mv50,15992 +mypy/checkmember.py,sha256=Pf0wcCPSsbm_-BAPK9X4uB5dUjohdKEQtZ0HM6oVJ6E,63927 +mypy/checkpattern.cpython-312-x86_64-linux-gnu.so,sha256=EEu0rtn9lOBwDi8YgXNGnHGG4q04xRaIUUxrD5HuNdk,15992 +mypy/checkpattern.py,sha256=I9Ye1LEzsjdxJin4uwNfJbFDHDjS0pu6ylDt8XcPy34,34405 +mypy/checkstrformat.cpython-312-x86_64-linux-gnu.so,sha256=K8qe6_jTn7kM9KmT6quNHkPs_CzeeGSo0VvSlvQ9UnY,16000 +mypy/checkstrformat.py,sha256=lPli7E3DjV7pXkl1QFk_GctqB4g_7mR-ijd9WYGkHc4,46022 +mypy/config_parser.cpython-312-x86_64-linux-gnu.so,sha256=cADglytAzEz5ejpqGmN1rn_IZGIL09LodMkOokgmID0,15992 +mypy/config_parser.py,sha256=8iT4nFBIWmITV9MFcsG0ySmvUgV-XcgbMnM2JFy6owE,25973 +mypy/constant_fold.cpython-312-x86_64-linux-gnu.so,sha256=CEAVndrU5c3r81-_vRQhmX5RPKO024sDvCWxPkfswqc,15992 +mypy/constant_fold.py,sha256=tAkvl9svLCOKMRZQnnUKdMUhU5bEBZmtBK89dtrPKmo,6071 +mypy/constraints.cpython-312-x86_64-linux-gnu.so,sha256=73JQSACejcIpxCLCbwzSOhuW_KSF2FnMPvW9PTEEthI,15992 +mypy/constraints.py,sha256=zHOfx5SfIqYDOZgAQOpOojUVbASkotOi4-23olRxo-Y,80095 +mypy/copytype.cpython-312-x86_64-linux-gnu.so,sha256=picxdA4eoDqce-tG0N713_Qt-r7XFcrfyLDNAa5Sy9o,15984 +mypy/copytype.py,sha256=AutWOFSpMH1P5eQYS31GusfkAXXbtZzv6I76KBo5Q-4,4480 +mypy/defaults.cpython-312-x86_64-linux-gnu.so,sha256=7uu-eMfXzVw_qBVG3f-UseKYwmDlhwhPmFxvdeSnKeM,15984 +mypy/defaults.py,sha256=C4WViGX7PL5antn2rfGmVYyM2yH7S2HkiSFQIboiwwQ,1493 +mypy/dmypy/__init__.cpython-312-x86_64-linux-gnu.so,sha256=CO8WdfQb72595el8xCJt1FPjGkWrQ8dWjPnzoPa1kGM,15976 +mypy/dmypy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypy/dmypy/__main__.py,sha256=u6ZYw52bfIJ11Oo88mzx7p4WD2WUo1-H9PRsT45eswU,128 +mypy/dmypy/__pycache__/__init__.cpython-312.pyc,, +mypy/dmypy/__pycache__/__main__.cpython-312.pyc,, +mypy/dmypy/__pycache__/client.cpython-312.pyc,, +mypy/dmypy/client.cpython-312-x86_64-linux-gnu.so,sha256=uz8PKDysHdwd7ffpZKuWRn4HiuR1yfnfnykM51oJpFI,15984 +mypy/dmypy/client.py,sha256=s_da0HPrspk-YKuSW69zCJby128Knt-JS9eFSjvSNjA,25061 +mypy/dmypy_os.cpython-312-x86_64-linux-gnu.so,sha256=GiGNHoe180Rta92MXB-3e6HT99412Nj8neXlmf6PtfM,15984 +mypy/dmypy_os.py,sha256=nmfi-HdtYmmEa06t2516asBL-ozX048daR1OxaxFU9w,1154 +mypy/dmypy_server.cpython-312-x86_64-linux-gnu.so,sha256=junW6zdrt65Iq-oP2MpfnEK4U_aW9nu_hxcwTaUkjVk,15992 +mypy/dmypy_server.py,sha256=0IJfT-P0b_JzO23RZY3w_Nbi-PaVDlyU5pccTLlM3_4,45625 +mypy/dmypy_util.cpython-312-x86_64-linux-gnu.so,sha256=ZvLaAzUXreTXhd40Qv1dRjq7ODC72LL-Pz-tEdU3rtc,15992 +mypy/dmypy_util.py,sha256=Qmmxof6Jdzy5zD6lX4-DF47dqAJNmoRmLYy_lJag9PY,3006 +mypy/erasetype.cpython-312-x86_64-linux-gnu.so,sha256=HJj5U-0uX0wFSJ7yV5QI-YBObj-sqysmMzFMIIRhJ8o,15984 +mypy/erasetype.py,sha256=6-VNPfOh_4KJAUCz7rnshtfThD6bKrU59AKHprh-D9c,10768 +mypy/error_formatter.cpython-312-x86_64-linux-gnu.so,sha256=tTrVtfZTDFhWKViP6FfdRJ0GRdsq1jK6qymlJKU7Lvs,16000 +mypy/error_formatter.py,sha256=IcZbZr67gf7zR08DxD2K4rLV_Eb37dt6oy7LQSq5ai4,1115 +mypy/errorcodes.cpython-312-x86_64-linux-gnu.so,sha256=zZyWuF1Plb2G2nqHIhWT4RbxmOib6vgJTwqAsM1JyMk,15992 +mypy/errorcodes.py,sha256=j3o5vkuKvYOHdBy8a4RjQilkQzuupFyp-u2wn8nHrcw,11884 +mypy/errors.cpython-312-x86_64-linux-gnu.so,sha256=_nrFF10rV8Un9nf0o9PuNAEDOlbTO2iR_Nz5N5nOoBA,15984 +mypy/errors.py,sha256=jqseQ7ztLHZdQunQGcUuwk7ivFruvFkdSOj6MSzJnys,54259 +mypy/evalexpr.cpython-312-x86_64-linux-gnu.so,sha256=PSPdqkI4Qw6jcHOmvZiEHoGZU7bkp-1DGL7n4nJW3ck,15984 +mypy/evalexpr.py,sha256=I_4zK129Uq3eLhuG3--yubLp2wqWw_yyudi_U_O1g1s,6660 +mypy/expandtype.cpython-312-x86_64-linux-gnu.so,sha256=8qKgK5cj-iMckDkfda1PS9aS1Fv54eLZ_n6tnchR1-0,15992 +mypy/expandtype.py,sha256=RtAFv4w10GzUISChU728P9Y4XxNL5WE2RDdEsS95YPw,25023 +mypy/exportjson.py,sha256=68J4_a6BBx6PtCJ-Wq6hyq--i9WQCr2reeGpkyAyWpU,18956 +mypy/exprtotype.cpython-312-x86_64-linux-gnu.so,sha256=LAOti4BqAf73myXEAgElHDzThQK09Kz6txQzIo9e9gg,15992 +mypy/exprtotype.py,sha256=QfnHWJINWbTgUmK8_2KNGHoYjvEG1t6Rb5ccgeqoIAA,10589 +mypy/fastparse.cpython-312-x86_64-linux-gnu.so,sha256=gqfYq9NMz3LdA0MCHxxKGAqqng1Jlcz_hXXCGPCiqcQ,15984 +mypy/fastparse.py,sha256=5yh0mQmn8hqzS3aKwgV8qfVBg5R_CVO8LRurK_y8Dlc,87829 +mypy/find_sources.cpython-312-x86_64-linux-gnu.so,sha256=zM83-XTHI0k2EoxKC-lbiYFkudMEF2JHJm0cNjqgtl4,15992 +mypy/find_sources.py,sha256=PLaqMG2qbDE_C6XPDJM43u1JlYGTh0DNA5V_QrtbfpU,9630 +mypy/fixup.cpython-312-x86_64-linux-gnu.so,sha256=qCYAelBdTnej9WJzODgoQK3zwE1eJ0LHYGGSjfvt4e0,15976 +mypy/fixup.py,sha256=OCBOWLUaOv38M-7Lp_5CZ6lexfvp6D4-s_vfcy2TiM0,16600 +mypy/freetree.cpython-312-x86_64-linux-gnu.so,sha256=EYfUCMEJ2M86BC7g9bUz2LKrfE4ZorKp31lguIV0eTI,15984 +mypy/freetree.py,sha256=yz4_ZUq8Ja89176nbDEAiBk-Et2nP74_KXyCcaW4inA,617 +mypy/fscache.cpython-312-x86_64-linux-gnu.so,sha256=8cC2pMxNF6axCOfgz3pHZ8g3Ok2S9JoXaHHLFruExQM,15984 +mypy/fscache.py,sha256=9YJBIKHPjk8Vzd9Yube6sXIzxrOdq-b2bG8kebzAUV0,11102 +mypy/fswatcher.cpython-312-x86_64-linux-gnu.so,sha256=gTPWbcxf6fC0szbONrJnAIXQpnU03U04JBm-1g0xPFU,15984 +mypy/fswatcher.py,sha256=FSTEaV9NmgNZArX_A9Wox7wofa5vg9-GPgTEZWqx3yY,3985 +mypy/gclogger.cpython-312-x86_64-linux-gnu.so,sha256=xlz0UV1yBzbi85pOicD8NlSNC_KuyjaIjWe_tOEehuk,15984 +mypy/gclogger.py,sha256=E-xdukA7h0ttgwFquruln_thKmREjbYA3dIkj8fYC-k,1639 +mypy/git.cpython-312-x86_64-linux-gnu.so,sha256=CqnckF9SnbOwYY1BD1cJbfxW7xyZLW_TMMz0JvwdiFg,15976 +mypy/git.py,sha256=FYdMg-3fTtikKjUwfFPXbWiNmpOIMG4rNgMAWIPBsLM,980 +mypy/graph_utils.cpython-312-x86_64-linux-gnu.so,sha256=Vjzn---oSbue868sfurrezA2PUsT-QlF39vMg2WhS4A,15992 +mypy/graph_utils.py,sha256=W4cTVJceWHzGZAbOu-ceqMfBG9ss6KGc5haqcX0CHEQ,3446 +mypy/indirection.cpython-312-x86_64-linux-gnu.so,sha256=m9B5NgQCaxVH3kgks0PidP3UlbLEWGbY6-6YWSePPhc,15992 +mypy/indirection.py,sha256=Asc2jTbTpbDkoMcYun8G-OTYcq1xgeobeVbgE-1BA7c,6023 +mypy/infer.cpython-312-x86_64-linux-gnu.so,sha256=__65vNR75x8ZEI0dLgaid-5Ri_sYNqf5HiBsOjvhI5k,15976 +mypy/infer.py,sha256=J8bcCjYFX7VZ4UjazbYC9SmAR2LA8eczsLvh1Ay14Yw,2538 +mypy/inspections.cpython-312-x86_64-linux-gnu.so,sha256=RL98zdURWRgcKnFL59MraJPD7XRlDS8MI9lDsx-I-j8,15992 +mypy/inspections.py,sha256=pGC15_FgqICztifun5vPwb8FePsgT1VY4CeFif7Xi_Q,23804 +mypy/ipc.cpython-312-x86_64-linux-gnu.so,sha256=8DIOJGYqV4UuYFjtCEGeK4lDJktPDPfsoNCpXDkDmFA,15976 +mypy/ipc.py,sha256=jKYEifG-WqVCmXurD13ULZZGR8xQuzrDS4RmL2huZkU,11899 +mypy/join.cpython-312-x86_64-linux-gnu.so,sha256=0WGnvozB--RCr9gbyu0gdshv0mjU1qCpitE9sGJm4JI,15976 +mypy/join.py,sha256=tL7xIo9b0ExFhjGhN90Okxjk9KLqFD5JZo9EXkNz_N4,39146 +mypy/literals.cpython-312-x86_64-linux-gnu.so,sha256=v1bPDGmz31RIEW4TzgtNFlG3SyYlf6VPS3u2duhpTY0,15984 +mypy/literals.py,sha256=zeHH95bfJTo4fiRJvC4qbCB7jnGLrqNouacHGBbUtPg,9345 +mypy/lookup.cpython-312-x86_64-linux-gnu.so,sha256=c9dwKtlmu6BEpty_up5PudXsNh3u7LahRMXi4nZpdys,15984 +mypy/lookup.py,sha256=spk-4e6hKwJgodm9Dr9QM7_XqtX2FecWAJgFg56mdQ0,2228 +mypy/main.cpython-312-x86_64-linux-gnu.so,sha256=pS_mF3Rbf9Hiq_V1CA7O-fJfRaISbGik37RPlOOpzxY,15976 +mypy/main.py,sha256=8JU388A9Cc2EJiPoIeh7bUvlQjuf55pPa112SLQg2ys,64296 +mypy/maptype.cpython-312-x86_64-linux-gnu.so,sha256=dqvA2Qn5Zp4hfOxHG1xmAGco-TV5v8Y_zz34Obapmbs,15984 +mypy/maptype.py,sha256=USEg3N_4LCesekOVOLhwFoq65urhcR5CotSkprcJleU,4331 +mypy/meet.cpython-312-x86_64-linux-gnu.so,sha256=1Xnp473ETumcRcFnTarDl1QRBzwtcaeP9z68VWb9_Ww,15976 +mypy/meet.py,sha256=Qan72cxYsjfWZbZLtXtqYHkj-iiVMzWM-pH27vxUmek,53950 +mypy/memprofile.cpython-312-x86_64-linux-gnu.so,sha256=4PcKpCekXc3sW_fM4wLo_g65KE3WM1Jbi7qsyW6tzFw,15992 +mypy/memprofile.py,sha256=Ar4FwaVBON42iT2OHHoj6_G5VL1YNnPAZU_cp4mgll8,4174 +mypy/message_registry.cpython-312-x86_64-linux-gnu.so,sha256=sGeskaK-zRLvlXETMSzYC7p0BdZ8Z4WocvAS2yKy3m8,16000 +mypy/message_registry.py,sha256=S-TozJ0KiYuHD0p6X8pizYT1KCFlmQLd8kc4HUPu4nI,17200 +mypy/messages.cpython-312-x86_64-linux-gnu.so,sha256=_VFA2c5ynyYt76kHQFfA2mFrsjQI6xMQtzBwYTDBqr4,15984 +mypy/messages.py,sha256=_j2CDTesiahqtGOAL1EYgPWa6mKzIzxLfQtgYaJxNoM,136729 +mypy/metastore.cpython-312-x86_64-linux-gnu.so,sha256=4uJZ3DoC8tYdHdIKoH8p9nV2zN15GVhJTdCNg9LJyUs,15984 +mypy/metastore.py,sha256=ZVHGjiLy8eDaNpBQmubUC50g44vmu-7G2mkKwO8zWts,6598 +mypy/mixedtraverser.cpython-312-x86_64-linux-gnu.so,sha256=mI-VRKaRYm2P07er2apWqCTJXylxnZ0ZX_4T8fW-Y84,16000 +mypy/mixedtraverser.py,sha256=arCWrDAQzA36GJA-iQSzbXQThi-Pf2p65JTQlew9RNA,3821 +mypy/modulefinder.cpython-312-x86_64-linux-gnu.so,sha256=27Iv1-KrFDj7bsHzxBmolsdCETLpN6KyLcCXd4u0WKw,15992 +mypy/modulefinder.py,sha256=LJUEosy6zWT6QVq8IVjL4rCcmak56HXGnpvB1nmwN2w,42896 +mypy/moduleinspect.cpython-312-x86_64-linux-gnu.so,sha256=ujcPUS-otwRaiWNoHfO9MjfznTHf8y4Q8mwtweActqg,15992 +mypy/moduleinspect.py,sha256=HCEI7yW61OkMNFqUqjuRB09HcTDpalcmtVBYjlWfxyo,6326 +mypy/mro.cpython-312-x86_64-linux-gnu.so,sha256=_oRSr4LNgUoKDfE1hmS_bGScicHwYDasvfMacALHdCw,15976 +mypy/mro.py,sha256=Mj_6Ke6W-s2ordeoLWS-LAya3-LUNTv-p2iHFcyxF1A,1993 +mypy/nodes.cpython-312-x86_64-linux-gnu.so,sha256=xUONQX2mDFm0wnmDmXvYdulqxGuo__VBDW3I9typ_yY,15976 +mypy/nodes.py,sha256=hGal95-q-xN1hPJzG2uPfZo9JxdgQYZnz2NEyyCStqE,169274 +mypy/operators.cpython-312-x86_64-linux-gnu.so,sha256=H0tLzg17mq6mp8qg7ZYpUnXfFaFO-zDyrRSGZPlm4nw,15984 +mypy/operators.py,sha256=BHafr2ENZYPmUytEgKOYMS1GwPKFebWBs5pnk8pyZk8,2866 +mypy/options.cpython-312-x86_64-linux-gnu.so,sha256=p6o6_6-OEt1ESiNLP6dJV3lDOYVbQky-s-Ko46_MoXo,15984 +mypy/options.py,sha256=nM9G_1qKxn-1pNazKR0TV1bCYRZiov0VLc29qLWJ02Y,26211 +mypy/parse.cpython-312-x86_64-linux-gnu.so,sha256=jm7sdg8jnnjP7d464K-FY7GUIhIJ1_8yLUOVwwj9gHw,15976 +mypy/parse.py,sha256=jj7RqYXwGzUCeU6s9ynMtSrk6q7PSWCbBhgt_UI6a8U,913 +mypy/partially_defined.cpython-312-x86_64-linux-gnu.so,sha256=xEgoE9BWlokEue916_lmFAHHRH143P-hXnD7zA-Gv34,16000 +mypy/partially_defined.py,sha256=My8WOKKNxxBo_riHLLnpoyp1zPosvt3Pap5igoeuYIE,25609 +mypy/patterns.cpython-312-x86_64-linux-gnu.so,sha256=mP8n0Gb6yFbNHrovZAvTIiOyjHmood1FmXM8wUdRP-I,15984 +mypy/patterns.py,sha256=epS_R9Fv5mnSAGsc6EtUxtmo34_DD1lJ5BevTdMn8Ak,4048 +mypy/plugin.cpython-312-x86_64-linux-gnu.so,sha256=c3mLRrCXWu4_Q87sbfrYkGMjlSZaPpgy8KIJ2YnkKwU,15984 +mypy/plugin.py,sha256=t0opRSnX-2N5lVzHZR8txWH9fn-JhnmjtBd2m-Clx-Y,36223 +mypy/plugins/__init__.cpython-312-x86_64-linux-gnu.so,sha256=Ym5AkE1QBdc2RCNmgpkJH88HMLZRQ0e03rIC7bPFs80,15984 +mypy/plugins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypy/plugins/__pycache__/__init__.cpython-312.pyc,, +mypy/plugins/__pycache__/attrs.cpython-312.pyc,, +mypy/plugins/__pycache__/common.cpython-312.pyc,, +mypy/plugins/__pycache__/constants.cpython-312.pyc,, +mypy/plugins/__pycache__/ctypes.cpython-312.pyc,, +mypy/plugins/__pycache__/dataclasses.cpython-312.pyc,, +mypy/plugins/__pycache__/default.cpython-312.pyc,, +mypy/plugins/__pycache__/enums.cpython-312.pyc,, +mypy/plugins/__pycache__/functools.cpython-312.pyc,, +mypy/plugins/__pycache__/proper_plugin.cpython-312.pyc,, +mypy/plugins/__pycache__/singledispatch.cpython-312.pyc,, +mypy/plugins/attrs.cpython-312-x86_64-linux-gnu.so,sha256=k_-XLGnqbnsOXWNBX1IC0e-gntoCKh8peafcSzhGenw,15976 +mypy/plugins/attrs.py,sha256=BJs7kJfWR0eic-GMhyJhfi89aHYMuTQi-lzb-vdSvNQ,46406 +mypy/plugins/common.cpython-312-x86_64-linux-gnu.so,sha256=UD4fATM3V7PNvTOgx2yKo7BP3zA9Whz-8FSBdfxiEOw,15984 +mypy/plugins/common.py,sha256=ikFQb2netqmmSW_72Ydz7j1TeUqG4teFuP1DnpOOLDY,14117 +mypy/plugins/constants.cpython-312-x86_64-linux-gnu.so,sha256=K893mfXFhmamnFBGIb8l7a6X6qSPokYtL6yPnDM_7vk,15984 +mypy/plugins/constants.py,sha256=EhWnIlkU1lHkF6CwlVqEitQE9dM9PfdZn35bHMI37jc,819 +mypy/plugins/ctypes.cpython-312-x86_64-linux-gnu.so,sha256=B9RCOiyTvNzBrcBTQ_N29y-JhMMxZ1rEQ0QnByY0NNc,15984 +mypy/plugins/ctypes.py,sha256=uB84xNCEzCVfeKjC7FHy7dFF5o55V3L-Rve9OD3YoxM,10675 +mypy/plugins/dataclasses.cpython-312-x86_64-linux-gnu.so,sha256=AC7Xcm2PmX3LZ8WBtF4IosPLvoyMQw5kLUIsiGQ6rMI,15992 +mypy/plugins/dataclasses.py,sha256=6qtMaGvWGFCfOi4UEfiyP98AGTkxQtwbIWGWsESMS5s,47043 +mypy/plugins/default.cpython-312-x86_64-linux-gnu.so,sha256=UG2-xrQlPXwJWET5z277zdzEjM-4yzB5M3HRz7jI1uY,15984 +mypy/plugins/default.py,sha256=7W4F5uJyofiEgUG3VZ8P699eRYRXanej_6JnsyPjwOI,23434 +mypy/plugins/enums.cpython-312-x86_64-linux-gnu.so,sha256=ScEwKJ89Y8G943-zs0S1omRF4WSjOIZx9kc-2hjsO6g,15976 +mypy/plugins/enums.py,sha256=imBuRkH7BA4reJfam0fZMZrftF5SI7E5lR9ph3wzOn4,11946 +mypy/plugins/functools.cpython-312-x86_64-linux-gnu.so,sha256=WivMy5FdYDr_SUzBi_J6GqDKiC1FZ0rKbv61NkcrAGg,15984 +mypy/plugins/functools.py,sha256=Rn7mQbpxYwdfSiCbAUQ58EzNKFT0j2FlD7AhMxxWa7Q,15282 +mypy/plugins/proper_plugin.cpython-312-x86_64-linux-gnu.so,sha256=uUOg5xolkKx4587q_eASerOG0hQ-tpQrIbEPvCKVo48,15992 +mypy/plugins/proper_plugin.py,sha256=6a6HpkclPn6yMtSSFjhbUpj5T1aboHqcHKGYK1m7Rkw,6565 +mypy/plugins/singledispatch.cpython-312-x86_64-linux-gnu.so,sha256=9stf2DyddIusWx8AH2kEJQAl6gI3mbGWET4aXlmUmm0,16000 +mypy/plugins/singledispatch.py,sha256=WRjVoLYAHYp4h9Iu6yJBIT0S8L6ZTxgvLMpQYUR0IbY,8193 +mypy/py.typed,sha256=zlBhTdAQBRfJxeJXD-QnlXiZYsiAJYYkr3mbEsmwSac,64 +mypy/pyinfo.py,sha256=URtMQq4FxPkrPWB2jd8wQGDegZFoIvO8jM_AWTayOiY,3014 +mypy/reachability.cpython-312-x86_64-linux-gnu.so,sha256=k9a-10m_SMMCOit3IczjTyHmnkO0KJC2MYEtQO1mXRU,15992 +mypy/reachability.py,sha256=nNq3O3fS61NC-jDLaQ3jlGLuM13AEL9j2TmRAPavpAM,13013 +mypy/refinfo.cpython-312-x86_64-linux-gnu.so,sha256=FZFhsxpCdqWLU14oBGNmwx8MECUuGN0Jjb9tRrhVZI0,15984 +mypy/refinfo.py,sha256=qaWKWtkgdlYQVxNucU3KG-4U2OsOBCSG8K8lqupc2S8,2784 +mypy/renaming.cpython-312-x86_64-linux-gnu.so,sha256=zv_ExFyYg80gRxqdfceHtiAz72fnOkxJdhuhJr_J15k,15984 +mypy/renaming.py,sha256=-Ju3NKpUnJyvU8FSzZR-KgipUAIpRhrLiIVE2395NVo,20494 +mypy/report.cpython-312-x86_64-linux-gnu.so,sha256=nP927DDazm3gCXLSvFe706qAwrEncqzgNSxTxCz52LY,15984 +mypy/report.py,sha256=vpTB3Bjiwhe8SYU7HrWHAWkCjP1J9PK4Qrn9dO0IEqA,34525 +mypy/scope.cpython-312-x86_64-linux-gnu.so,sha256=24oAxt2qNBySv-G6jQ1FSF4easj2v-DXQe6vt-ofQeU,15976 +mypy/scope.py,sha256=ckiJe7zPlRx3IGmw7qga7VOrCq8f7SiGvJ0WTaA88vE,4278 +mypy/semanal.cpython-312-x86_64-linux-gnu.so,sha256=qN_1ALzjAFkD8Vs3PJNtaa06Wy9j_ecSwVr-lf8UnPQ,15984 +mypy/semanal.py,sha256=KMvIvdmDyzSbBgWbwiQDZ_gvbZezRNS2hrdpj53Awm8,356731 +mypy/semanal_classprop.cpython-312-x86_64-linux-gnu.so,sha256=ymwdpxIijQeaAyYRaVMWYd6FBCafyTJaJuwGEuut0No,16000 +mypy/semanal_classprop.py,sha256=81ClR1KA27TDEIl04vN3cpLPTVMEJphFiGp4n87kRjY,7673 +mypy/semanal_enum.cpython-312-x86_64-linux-gnu.so,sha256=GCFAvA3djfNDP2b-CmgTQdeOfPpmftTIoss7NkYHbt0,15992 +mypy/semanal_enum.py,sha256=NfHeW7rlHu0qzuXOOWFkCrITCHsNcMJxZDb58hNobCw,10197 +mypy/semanal_infer.cpython-312-x86_64-linux-gnu.so,sha256=y3rKsbGp7-D67emGMYVN6wYlKNva26hifSeW5BUxX00,15992 +mypy/semanal_infer.py,sha256=ysch4tIEiwxefw_1LPSqB2wTyoJbnlOmCfO4Iq-_bC4,5350 +mypy/semanal_main.cpython-312-x86_64-linux-gnu.so,sha256=5Lrc2MJqF6IC6R_72Hh_OIeYuPvbvqRa4jE6Nf928GU,15992 +mypy/semanal_main.py,sha256=BBaAGy8H06Nmv5aXkOHR4L1_SczYMGaQbOWaBiUeizA,23109 +mypy/semanal_namedtuple.cpython-312-x86_64-linux-gnu.so,sha256=ZwhCdmkGjTjj9PtBlZuMKHhCzn2Efzjhv1SljLzlcc0,16008 +mypy/semanal_namedtuple.py,sha256=eJrwRcPiWfw3tgeCqbwsUNLTbQr_ObD4EnYwgtoMrEY,31406 +mypy/semanal_newtype.cpython-312-x86_64-linux-gnu.so,sha256=9CeGCsSlU3-2QFaNcIUatwVL-_YAVUgWZhx1luknE78,16000 +mypy/semanal_newtype.py,sha256=-kKdzbYvTuUpRqKMN9GpFSBqkKGZqFIDKf57pkoscUs,10576 +mypy/semanal_pass1.cpython-312-x86_64-linux-gnu.so,sha256=xnn0H7xA-EfdNqwNdBHJmkLu_aNOtmUEYEgularZWZU,15992 +mypy/semanal_pass1.py,sha256=6rhzEF6V7orOsgjYsnGHz8lqaG3TA-gr8QFA_ytihkc,5584 +mypy/semanal_shared.cpython-312-x86_64-linux-gnu.so,sha256=4ZmKocs5dkxl_5Ei4nzQhrBVzFjYA2cF2AkAMRJ7bi8,16000 +mypy/semanal_shared.py,sha256=2GKXAPJvzoNsD44ykGOu7PWvt0TPpvlKUfeS7izY_Zw,15768 +mypy/semanal_typeargs.cpython-312-x86_64-linux-gnu.so,sha256=s-FuB_RAnPp2wWMQPM3pEf5wY_Y-bxxB3nuHEQf7CHM,16000 +mypy/semanal_typeargs.py,sha256=HbN7bEq355B6ExJCjQ_zQPr84I9_puug6nUcmocI4vk,12876 +mypy/semanal_typeddict.cpython-312-x86_64-linux-gnu.so,sha256=TZEEsPD_aK4OY-7IXQMSJ4QQsizoWE6fGlbTz1mTR4g,16000 +mypy/semanal_typeddict.py,sha256=dbikIKON8YQ2mwqvJfX6zuLRF9vXU5RnJGUDg16V6jM,26079 +mypy/server/__init__.cpython-312-x86_64-linux-gnu.so,sha256=UKpaCFGrxJGKvboy4YMS2CLFwY0a8Xi37r749c38HxU,15984 +mypy/server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypy/server/__pycache__/__init__.cpython-312.pyc,, +mypy/server/__pycache__/astdiff.cpython-312.pyc,, +mypy/server/__pycache__/astmerge.cpython-312.pyc,, +mypy/server/__pycache__/aststrip.cpython-312.pyc,, +mypy/server/__pycache__/deps.cpython-312.pyc,, +mypy/server/__pycache__/mergecheck.cpython-312.pyc,, +mypy/server/__pycache__/objgraph.cpython-312.pyc,, +mypy/server/__pycache__/subexpr.cpython-312.pyc,, +mypy/server/__pycache__/target.cpython-312.pyc,, +mypy/server/__pycache__/trigger.cpython-312.pyc,, +mypy/server/__pycache__/update.cpython-312.pyc,, +mypy/server/astdiff.cpython-312-x86_64-linux-gnu.so,sha256=e3VNL2BMSGSacmZHVDLX-6L565W0-Z-0yGowr_bFOx8,15984 +mypy/server/astdiff.py,sha256=qagHsZVhqdr86dyoeE_EtrgR05_2P91Bz2rQR4Xt11A,21395 +mypy/server/astmerge.cpython-312-x86_64-linux-gnu.so,sha256=F_blPr0ewXsLucLyn5YrVOj9fEnzxtIKhQLtmUL-qBA,15984 +mypy/server/astmerge.py,sha256=4_uLEbB2qQhdorpr8EwrGtBnVk5Dw8PhjEkuBlmHThw,20982 +mypy/server/aststrip.cpython-312-x86_64-linux-gnu.so,sha256=8K4JM_XzsJVFV75p6NavYSXGpj8njE85P0ckKvYiX0w,15984 +mypy/server/aststrip.py,sha256=F0F2V0HdW-YQJP3i9KlzSYe3y3J1JHaQ0kRu4MkHPhk,11289 +mypy/server/deps.cpython-312-x86_64-linux-gnu.so,sha256=xRVqcwY1LGnxCmd6tEnkCrOhVQj5gx25uLycXrw6rQs,15976 +mypy/server/deps.py,sha256=MJxOLZF08Ta9XgzBh982CVtknZ2DJ9QL928v36PBFiM,49683 +mypy/server/mergecheck.cpython-312-x86_64-linux-gnu.so,sha256=nOeScWohS7FQIzG_wXde3x7SB7FHqmsIw3WKytPo11o,15992 +mypy/server/mergecheck.py,sha256=yFpGbyK9JX_5VCB9V0Zz-b3o__PKmejRwgAL9LF0bUc,2757 +mypy/server/objgraph.cpython-312-x86_64-linux-gnu.so,sha256=jJ89JsGTb2EJvKTRfegA5cklVUfSxGGddbxWYULdL64,15984 +mypy/server/objgraph.py,sha256=l2otuEtyy6J67pfWgU17dg8LIWkDqP34YrBNjDrjytc,3230 +mypy/server/subexpr.cpython-312-x86_64-linux-gnu.so,sha256=QDUfmbQxMkV04pZqt48KLHXcAGiQeH8gLQc7TthyXBY,15984 +mypy/server/subexpr.py,sha256=KNfSVV1sAR1T7TOexD2R99lAptQuHh2o0f5A7uZYb4U,5342 +mypy/server/target.cpython-312-x86_64-linux-gnu.so,sha256=dN3bnxLUu0q7D1gocbgQdZ2FcLo2zDeP0_L7CWKXImU,15984 +mypy/server/target.py,sha256=IbuK2qqtMvEPAof83BdgYJv6AGW2q_o4CQxC5TnB-Bg,273 +mypy/server/trigger.cpython-312-x86_64-linux-gnu.so,sha256=Hs18p8JOFvPdrjriXIWlI2t12ayzLgQEHu4fk91DkKI,15984 +mypy/server/trigger.py,sha256=qvo4tCLyrhI48oPTfDO_nWOVZfjMcYjoMdGgWsocEKg,793 +mypy/server/update.cpython-312-x86_64-linux-gnu.so,sha256=oKo8zzb3JM1RgL9JXvcZZ7FiIb9XKIOiJCkbIk1M3n0,15984 +mypy/server/update.py,sha256=JG5uI8AhiS1ygSDJAOP_6y6AKr3ac1QBF70kVMVysT0,53533 +mypy/sharedparse.cpython-312-x86_64-linux-gnu.so,sha256=bnFbe8wIp3ZhsGaJZTenkamo0VFRxPiux7_iMyooOAg,15992 +mypy/sharedparse.py,sha256=tPLG0cUbBkZU7Pc37G80K5s24Fb8hkevp7qJIJBTplk,2146 +mypy/solve.cpython-312-x86_64-linux-gnu.so,sha256=PkJA61mooSftC_IrNeV67NItmE7TcxLex9lZIdWQQG8,15976 +mypy/solve.py,sha256=YoLPBsWgq4LRZYRhCvwfK6aCxYqTzlF7vnKPXEQ216k,24541 +mypy/split_namespace.py,sha256=P67HianSrsMSZoeuS6F9swM5eK-B2fEBU3XJ6RFtYo0,1289 +mypy/state.cpython-312-x86_64-linux-gnu.so,sha256=NcB5qtoJeXiHFEXxZpO3sANXyh9MmaKUwT9HFzJ5tR0,15976 +mypy/state.py,sha256=yGfTdStRI9BJ3MpFvZS89uvVOLuqWxNy9DCY-SDHwcw,850 +mypy/stats.cpython-312-x86_64-linux-gnu.so,sha256=l_wE4fgakzgyVaII6MF0cXDhBDKb30Jff-CvMb95J0M,15976 +mypy/stats.py,sha256=661ysjBeZayvB16uK_bFr98KjqLamNm9BQpFO8iR9zA,16846 +mypy/strconv.cpython-312-x86_64-linux-gnu.so,sha256=2ymn3PcPVyefmSIHK4c6daEkjJXeGqWCeXOojiBMAs8,15984 +mypy/strconv.py,sha256=l6tJMeCovdwldfhwGV6Lv1FmbRXb07LYG1tPAj3cwLs,24918 +mypy/stubdoc.py,sha256=JT18oLHTiogu40pCjKxF3JLK00qRsQjAMD9g1X3M24A,18780 +mypy/stubgen.cpython-312-x86_64-linux-gnu.so,sha256=x3IC_wJ0qmF6zZ0lH7YSMAIoLPfZcgb21CZ2p9f4yTo,15984 +mypy/stubgen.py,sha256=iGA3YuF1t74YEQQMw9JM85qBlqNVhqGVDGfkJEoyxvU,78478 +mypy/stubgenc.py,sha256=g8yb6sxrg9HLc9sgZZ5TTlMpzXweMjTY53Ix6sbjCWU,39265 +mypy/stubinfo.cpython-312-x86_64-linux-gnu.so,sha256=9Mui7BmmADmWzx1nnTNSxQNAeYGFxNbUwDwrjZ3f5dI,15984 +mypy/stubinfo.py,sha256=U4RPpvqQF35tLOchCYtJFSE9bTMaWwdqsKLdfRXEnMk,11319 +mypy/stubtest.py,sha256=4cpcHJeY80n_4-4A1UDmLY1HcVNHySj04lE92BJ61Zo,98817 +mypy/stubutil.cpython-312-x86_64-linux-gnu.so,sha256=0hUeCigzx-JtFhiVfbELkGtKyk9QdkWekP9M7ral8LM,15984 +mypy/stubutil.py,sha256=PIaWaM8uadNwvzR0s3YPTd0MaNlJ8HxzZaZh-k5J-4A,33478 +mypy/subtypes.cpython-312-x86_64-linux-gnu.so,sha256=-NE8CIrNslrD8LvMjnr4obj0jXbwePbB9fqK8Q8zYz8,15984 +mypy/subtypes.py,sha256=umVeNN-IC4YXU4gQBhcqUCLKFBfgjrxBYfCr7cbxLYU,100605 +mypy/suggestions.cpython-312-x86_64-linux-gnu.so,sha256=5K76Mw2hTDtpvgFA5jNFrH8VZo1dGk-RcMql-NilN-o,15992 +mypy/suggestions.py,sha256=EXq-ObwO9_m428N8TVjkYt8-ARP-zUI_dr2x9x8RkCk,39110 +mypy/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypy/test/__pycache__/__init__.cpython-312.pyc,, +mypy/test/__pycache__/config.cpython-312.pyc,, +mypy/test/__pycache__/data.cpython-312.pyc,, +mypy/test/__pycache__/helpers.cpython-312.pyc,, +mypy/test/__pycache__/test_config_parser.cpython-312.pyc,, +mypy/test/__pycache__/test_find_sources.cpython-312.pyc,, +mypy/test/__pycache__/test_ref_info.cpython-312.pyc,, +mypy/test/__pycache__/testapi.cpython-312.pyc,, +mypy/test/__pycache__/testargs.cpython-312.pyc,, +mypy/test/__pycache__/testcheck.cpython-312.pyc,, +mypy/test/__pycache__/testcmdline.cpython-312.pyc,, +mypy/test/__pycache__/testconstraints.cpython-312.pyc,, +mypy/test/__pycache__/testdaemon.cpython-312.pyc,, +mypy/test/__pycache__/testdeps.cpython-312.pyc,, +mypy/test/__pycache__/testdiff.cpython-312.pyc,, +mypy/test/__pycache__/testerrorstream.cpython-312.pyc,, +mypy/test/__pycache__/testexportjson.cpython-312.pyc,, +mypy/test/__pycache__/testfinegrained.cpython-312.pyc,, +mypy/test/__pycache__/testfinegrainedcache.cpython-312.pyc,, +mypy/test/__pycache__/testformatter.cpython-312.pyc,, +mypy/test/__pycache__/testfscache.cpython-312.pyc,, +mypy/test/__pycache__/testgraph.cpython-312.pyc,, +mypy/test/__pycache__/testinfer.cpython-312.pyc,, +mypy/test/__pycache__/testipc.cpython-312.pyc,, +mypy/test/__pycache__/testmerge.cpython-312.pyc,, +mypy/test/__pycache__/testmodulefinder.cpython-312.pyc,, +mypy/test/__pycache__/testmypyc.cpython-312.pyc,, +mypy/test/__pycache__/testoutput.cpython-312.pyc,, +mypy/test/__pycache__/testparse.cpython-312.pyc,, +mypy/test/__pycache__/testpep561.cpython-312.pyc,, +mypy/test/__pycache__/testpythoneval.cpython-312.pyc,, +mypy/test/__pycache__/testreports.cpython-312.pyc,, +mypy/test/__pycache__/testsemanal.cpython-312.pyc,, +mypy/test/__pycache__/testsolve.cpython-312.pyc,, +mypy/test/__pycache__/teststubgen.cpython-312.pyc,, +mypy/test/__pycache__/teststubinfo.cpython-312.pyc,, +mypy/test/__pycache__/teststubtest.cpython-312.pyc,, +mypy/test/__pycache__/testsubtypes.cpython-312.pyc,, +mypy/test/__pycache__/testtransform.cpython-312.pyc,, +mypy/test/__pycache__/testtypegen.cpython-312.pyc,, +mypy/test/__pycache__/testtypes.cpython-312.pyc,, +mypy/test/__pycache__/testutil.cpython-312.pyc,, +mypy/test/__pycache__/typefixture.cpython-312.pyc,, +mypy/test/__pycache__/update_data.cpython-312.pyc,, +mypy/test/__pycache__/visitors.cpython-312.pyc,, +mypy/test/config.py,sha256=VEePvz7BHWcNCQS1qY5H-sOvCgNuIN2yY6zZmXbo9kU,1301 +mypy/test/data.py,sha256=h9oObyIqucGfhUwJkBMPxyi8fECaLXaokQi7g4N89lc,30216 +mypy/test/helpers.py,sha256=Jz_kpVcVOivf52JD5t-pSt9293PNM_cx8qguvpNAwas,16792 +mypy/test/meta/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypy/test/meta/__pycache__/__init__.cpython-312.pyc,, +mypy/test/meta/__pycache__/_pytest.cpython-312.pyc,, +mypy/test/meta/__pycache__/test_diff_helper.cpython-312.pyc,, +mypy/test/meta/__pycache__/test_parse_data.cpython-312.pyc,, +mypy/test/meta/__pycache__/test_update_data.cpython-312.pyc,, +mypy/test/meta/_pytest.py,sha256=BHGoXuST1N2IqVBlWsJPAvBSxc0qVpALDjyLWVVvxPA,2276 +mypy/test/meta/test_diff_helper.py,sha256=ETTk0kyEvdKP_CMIKddY2sX6oSwTeUzEqNgDeBPLI6E,1692 +mypy/test/meta/test_parse_data.py,sha256=pq-pQ5A5-QaOBr7OQGPpAbXUSa_zVI6hOhv-Ch-VoXI,1931 +mypy/test/meta/test_update_data.py,sha256=ywoiRYRr4dyi8gkxw5-uaRACei6NQR6f7NVL1y6UC2w,4814 +mypy/test/test_config_parser.py,sha256=40D_aqRD6QCqJU4BsDBm_47b4X-Dnu05n36I2BY1lOU,4167 +mypy/test/test_find_sources.py,sha256=X_YRHcS6F7sp2MC2YepatigrfxxShIA6q2zj6Yh0JfA,13693 +mypy/test/test_ref_info.py,sha256=hz0P6MOqKTppSCyUXWvGamUDX433v15IpfVIHKgqFJw,1432 +mypy/test/testapi.py,sha256=Xinte9ICqFeoe9AUweIEKiHvjbgD8H_Xv6Leck_sUoA,1447 +mypy/test/testargs.py,sha256=LQy4ZS7hMSdtsgTLiwhWfH_FB4R_DsobMxYpKTYMeH4,3213 +mypy/test/testcheck.py,sha256=8oD8aEb1XR1WP-_PpVKMfLmRd8opgCuMnZC55WLUBU8,13180 +mypy/test/testcmdline.py,sha256=0Q6RiiHMtiOt_yltbgQQZpFrgwavO4eSJh7mjURHhYM,4980 +mypy/test/testconstraints.py,sha256=s3a2C6JcqTzzQeh2IFSKEXHF_OchhtmttX-TTmXYIJ8,5267 +mypy/test/testdaemon.py,sha256=9OACkimdIGIsqx7x7yhl78Zqwz-xpD860kCh0JcfbI0,4511 +mypy/test/testdeps.py,sha256=bYQ_g6sHA2VCWsrapenHOtZRkfBlsYg4PUH6Y5sNFVw,3236 +mypy/test/testdiff.py,sha256=VdM_0vp0NSOxlYifl0_ElvGEHhMoqsp6g9wWBfk5Rt4,2510 +mypy/test/testerrorstream.py,sha256=bEAw3kMIfSJNec8G2iR2VgcsvbupguGxhW71EZ_Cias,1441 +mypy/test/testexportjson.py,sha256=1w3JgZ4I9yR0XhRiLTUO6GMC6JYOJO8R-dq0At9vSbo,2574 +mypy/test/testfinegrained.py,sha256=In8nDEIe8ipivTdPkP8ArkUAaH579gaFHJQHSDXLPZw,17776 +mypy/test/testfinegrainedcache.py,sha256=AocgzZWRs8dlNcaaKzwY3QSBwxbbdwi3xwq5qcH9XTI,580 +mypy/test/testformatter.py,sha256=QwuFdblCF28X2J6K43mSUw95pl_VRdwqrAfOkCQr1xM,2639 +mypy/test/testfscache.py,sha256=1QDTTg7Towo5GI31NIwamXiIhbpS8o1zVmYcRNzHwh8,4465 +mypy/test/testgraph.py,sha256=YpvADzyZs_b27RwQ0-ZJ-VNiOYIEii2Py82fqYrlQUE,3127 +mypy/test/testinfer.py,sha256=d3NV8bTBrNJbaY1aO_IiYfMBeiWIeZjgeyYeQVShQwA,13856 +mypy/test/testipc.py,sha256=pBz9DjZzPK_9l3EZVtrdzvUQr8aenIHCayatDi2YPuY,3966 +mypy/test/testmerge.py,sha256=f9auvLodFe0_CxnnmqkCiGWDOiIm2xqtcnNsm6Hc5qU,8504 +mypy/test/testmodulefinder.py,sha256=T4bQKD0C6SQJsQdkXJ0ZqFKM47Bdaoqe8sUCeIoVp5U,13957 +mypy/test/testmypyc.py,sha256=gaQS_ZFFXh8D8eCi_IPwKPvcIQvlhnxcgX5OwrXBySM,397 +mypy/test/testoutput.py,sha256=YJqb5Utxrl2r18PMacgHr9jTd68I-1EUk2y8Pdp10zg,1980 +mypy/test/testparse.py,sha256=Rkf_4lkGfEMbBbDV0WFE9-SPaTa_J5N85zLdm2n5bWk,3790 +mypy/test/testpep561.py,sha256=hxowZEKsmA5bWLr4lFD1fxLF8aZmX47lf4Oncpe6LlY,6839 +mypy/test/testpythoneval.py,sha256=VrmzHfwenbJ7O0UeBO6WtXeWdhD3PYEBFEdO6ln7uIY,4586 +mypy/test/testreports.py,sha256=AHSNiKtdjwBh3ZI_WGk4dELfy8Bw6iAf1ELYyv_LbZc,1773 +mypy/test/testsemanal.py,sha256=ZKl-CXa77ThHBSLRLlsuFElN_NmT36hDmbJLFmcrUJ4,6643 +mypy/test/testsolve.py,sha256=e_2Yagxy7AsA4SkqwKVYXsjh1z5z6V9D5k2cMzwNdeU,10069 +mypy/test/teststubgen.py,sha256=Y0rdkFJ2gWbyD04f3nAoAZnYbsN9SQq0mWDm7bG3exA,60843 +mypy/test/teststubinfo.py,sha256=yx1VeSL53qh_MimncCq8gKT9tyD-84ZDeYLk_tOf214,1587 +mypy/test/teststubtest.py,sha256=dJlhHgpTFevfSAurr4asgA7cBIGOr4QrvjfPVdaozjY,93457 +mypy/test/testsubtypes.py,sha256=UqehkYlJVgs-IsQa8XsZUgJqBawHgsDD78pguXKiWYo,12426 +mypy/test/testtransform.py,sha256=WD43JvD-37Cr3UdxW8sPF9JNAELqFKmTtEjBU3loVR4,2151 +mypy/test/testtypegen.py,sha256=YkyCSRqNKYxf3gGvIim8YKeyRRf2UM--TfjcMNaExmU,3101 +mypy/test/testtypes.py,sha256=T5q0owaGUyUzj6u7MEPYOwEkr-GRaA43vqPr2o5bkJM,63689 +mypy/test/testutil.py,sha256=HqmkgHC3TBnm7VUF5059l394PKLhS-YItLheKq8htLU,4233 +mypy/test/typefixture.py,sha256=-qWDr5Wxl8jURtrMVweDBNusUEuJb0nWg5r4iwwgX40,16014 +mypy/test/update_data.py,sha256=IOqTyP5RTOd2SUsj1faveoFKYc3q0i1kqP-_WBLVVmU,3685 +mypy/test/visitors.cpython-312-x86_64-linux-gnu.so,sha256=w6k9SiqWby8LKuWRg2M0I3WRkdw-4z4rU0-qp4PLZKk,15984 +mypy/test/visitors.py,sha256=cfsPawFO9J2UnoeZGzkYbAbZcuZ8HRDc1FKGd9SV1E0,2089 +mypy/traverser.cpython-312-x86_64-linux-gnu.so,sha256=OQj5NbrbQJHim97k028CLOUQgsAYKzAZnE5Zwcf5qPg,15984 +mypy/traverser.py,sha256=4xfKhTUI2W-Bs-MXT4UVqa1vCaXOU2aC81gEnwaKutQ,30603 +mypy/treetransform.cpython-312-x86_64-linux-gnu.so,sha256=j7oteOc3VsueJJOZW7Mfx1wynVC0U0i-DNMANhUdGfs,15992 +mypy/treetransform.py,sha256=Vfz34VWATa6BvGCpYnmK28mFpxKJZmn_9sv4itCAcu8,28700 +mypy/tvar_scope.cpython-312-x86_64-linux-gnu.so,sha256=3w2wsMoA1ZO6Bk9zT1w8-5e8lrQdcl6Fc26Ma-olzb4,15992 +mypy/tvar_scope.py,sha256=Pvk0ZNVugkuvC6Bpsm3uJYjoG-yFCGcwlkBAiKPkkQM,5895 +mypy/type_visitor.cpython-312-x86_64-linux-gnu.so,sha256=ScJ-FB0_PdfMO6t4-_iclLvcgxhgCZzB_L1WZVhG1H0,15992 +mypy/type_visitor.py,sha256=rfjJOstoCbI2ahlF9Hn8EZBoDcS4Cn-LvOuROHRQOic,19975 +mypy/typeanal.cpython-312-x86_64-linux-gnu.so,sha256=rozI_i_U_pDrd9q0wLnyQqUlH6xDQe3HuQChIiWf2Tk,15984 +mypy/typeanal.py,sha256=FtujxYfoqolTL5S51dfAxKjqUDLDzRI9Oc_1lj7OHto,116816 +mypy/typeops.cpython-312-x86_64-linux-gnu.so,sha256=wmay-4vsOhffZq7lL6uBNYxG65HivUdAKN3PJzzrPRk,15984 +mypy/typeops.py,sha256=hhXZLxhJBD2Q57sU9lrQcVp5PK6VyukkUruTsXEM-tc,50009 +mypy/types.cpython-312-x86_64-linux-gnu.so,sha256=jdZxZ9DkY5xnev7nyeaEaTrbI03epQ32NphmTGcDKjo,15976 +mypy/types.py,sha256=O7EzxZCGpraWiFJQIjFOeGOSb-d27noDpCOiFZa_xls,159147 +mypy/types_utils.cpython-312-x86_64-linux-gnu.so,sha256=FTmLeoqkkRC7on0bWzv-732Gt6aQRucYccghH0R8RIE,15992 +mypy/types_utils.py,sha256=4tibUX5YsLLVWXfWX1hK9Dn2lLRy0vG5QWqTA2C4hxo,6126 +mypy/typeshed/LICENSE,sha256=KV-FOMlK5cMEMwHPfP8chS2ranhqjd7kceBhtA1eyr4,12657 +mypy/typeshed/stdlib/VERSIONS,sha256=hlA0-POBC4H63tFwcDLIWXHNARvXe_scyqgt6layi6Q,6412 +mypy/typeshed/stdlib/__future__.pyi,sha256=qIwWDmjaw3XCiulKYoKBQB_eJjLxweesUKwBdpkgQkU,915 +mypy/typeshed/stdlib/__main__.pyi,sha256=hcfHKThQRiibOXGnPeEUHunrtviMdorj0MtdnIwLtl8,53 +mypy/typeshed/stdlib/_ast.pyi,sha256=Hn23xk9t6_M5kuMZHkcgjAWeNcwktovuyMbMLjZbtEQ,3344 +mypy/typeshed/stdlib/_asyncio.pyi,sha256=7yPRcejEUZ1k1CbRFVgPyvx1EHoLqCokMRHiKG0Y0-s,4872 +mypy/typeshed/stdlib/_bisect.pyi,sha256=FbUBdcUSPSGrnXSN89eA0gqCBVWMm8NlpxHKz6guO8Y,2651 +mypy/typeshed/stdlib/_blake2.pyi,sha256=_qavMws5SSi8fWHc3J4WsFll6N2SORWBegj7qQX5_u4,3542 +mypy/typeshed/stdlib/_bootlocale.pyi,sha256=vSVnoBvURsNzi7MPLR1b_wpuh-yySKzPValAwQ3OVT8,64 +mypy/typeshed/stdlib/_bz2.pyi,sha256=rFCr1AYojWvE59rRz5njFVK1m1vMC2wNmTm-F7nrx_E,678 +mypy/typeshed/stdlib/_codecs.pyi,sha256=ryICMIWdfKvRj4jRQTwniDw7bMOdhyCuduSX0nb6Z40,6721 +mypy/typeshed/stdlib/_collections_abc.pyi,sha256=oNlzePvyMeTwBFMX5cuBwwXMNQQuyQ90y-yHQ9HWzY8,3034 +mypy/typeshed/stdlib/_compat_pickle.pyi,sha256=3WH0XDg8YMQ_EzfT7cnBKrU5WjpXIRJ5aec8h0xrDbU,438 +mypy/typeshed/stdlib/_compression.pyi,sha256=qEdmEzjcfQ9rOKRLjVtkpPSfvNEViEc43FouPF3XAfA,1377 +mypy/typeshed/stdlib/_contextvars.pyi,sha256=CeOt4eddnDTydVq4R1qOE9J7n7hQN6Moo_vstCY0EfQ,2370 +mypy/typeshed/stdlib/_csv.pyi,sha256=AJt3gWebsJJwgTBsKC3aCwWKInuKZz00_5qNlKv4rKw,4041 +mypy/typeshed/stdlib/_ctypes.pyi,sha256=BBt5z7NwkzBJbi9LXybPklwdRf6ekBVIlAaYGRI3eXk,16743 +mypy/typeshed/stdlib/_curses.pyi,sha256=jl9jHbBJlbuMfFvl_ho97IjmwPXo-JUl9iKJ9Nq13MM,16854 +mypy/typeshed/stdlib/_curses_panel.pyi,sha256=w1RxG64bD2C8OGi2sXQmVIrNRv3q9aB7XrfYMGCkPF0,757 +mypy/typeshed/stdlib/_dbm.pyi,sha256=QtTE7l0xah6cJnDhlnIYOiqwsK3UsNIj9xm0WM-uVnw,1775 +mypy/typeshed/stdlib/_decimal.pyi,sha256=mU59vCxZe6aFeK7eCfAfYUUTy87ox8yD4bbpw0S-9sU,2058 +mypy/typeshed/stdlib/_frozen_importlib.pyi,sha256=7r6dPA7W9aOIdRmf4jNDEpqbSQDbT4ITSVmb6qmZQWc,4679 +mypy/typeshed/stdlib/_frozen_importlib_external.pyi,sha256=7NJlCPo5Cfq_ss_FFPEufATqNFrrmOgduK-ICX6U3aI,9339 +mypy/typeshed/stdlib/_gdbm.pyi,sha256=_xrxJzc3WMpbJSc5uzWb1v2-Q2rkyxRFtUg85Ze7r6g,1946 +mypy/typeshed/stdlib/_hashlib.pyi,sha256=Dwc-lt0zwAvmrJ_lvV58s7NKK85zmC19iXT2HkDaYsg,5593 +mypy/typeshed/stdlib/_heapq.pyi,sha256=SQZxb-0v3sujrUDvauFD1kgbQf9OuhRELp3J68-iC50,755 +mypy/typeshed/stdlib/_imp.pyi,sha256=yUAJduUklNqp7sHZ8mV-4MPnacchatn1xd4v23clNdc,1185 +mypy/typeshed/stdlib/_interpchannels.pyi,sha256=WvlfKacmFX89HoVOrEElyAmRsxotgbEiSAKKzUp4S1o,3204 +mypy/typeshed/stdlib/_interpqueues.pyi,sha256=0OTlJA5tszfEGyCralyqa1ZxyVdbd0jbDQLlqB7YeKg,866 +mypy/typeshed/stdlib/_interpreters.pyi,sha256=PR8fkrZPGLvMbTovssuuTn2j0SNhlLNQNUt0nx4eE8M,2654 +mypy/typeshed/stdlib/_io.pyi,sha256=rGC1-_e7nq2oiuFzIx7KOlIVF7626YoxZAFUl6RWoK4,13142 +mypy/typeshed/stdlib/_json.pyi,sha256=yhbNNpPlgcNr7UbLWYlJLRT_GJAs6cec5nR1Ze_lvDo,1532 +mypy/typeshed/stdlib/_locale.pyi,sha256=uK5szB547hvi-ZQ9mIhaQXhDKD5-oO1hWvXAhd2g4fk,3287 +mypy/typeshed/stdlib/_lsprof.pyi,sha256=SnrMRZ7reYUpO4aspszvPl08o-zfyIZwixON_SP97bU,1323 +mypy/typeshed/stdlib/_lzma.pyi,sha256=vhTkKz-psYRTdZS5z31HZ71LtUPiUs5jRnxti7w23VA,2090 +mypy/typeshed/stdlib/_markupbase.pyi,sha256=jXh5gaXQuqr2KmmACztGc8xDjWm48itq1NkaH1eupMA,730 +mypy/typeshed/stdlib/_msi.pyi,sha256=jm1bAk_feToRYsWSLcQknCl2T_X2XwGtTwDzzl32VJE,3652 +mypy/typeshed/stdlib/_multibytecodec.pyi,sha256=48jNSq3IH_FejpnGX0_qMinWaLFOVjox0b28466ywrU,1890 +mypy/typeshed/stdlib/_operator.pyi,sha256=HQ7u-2wGpnzgQgblV-EajBOxM0BHWHVMUvQvgEzfyxQ,4851 +mypy/typeshed/stdlib/_osx_support.pyi,sha256=3cwesRBNoUgiThjIsAiPNKoODAGoaRg9je4-A-QpOU8,1900 +mypy/typeshed/stdlib/_pickle.pyi,sha256=EiGJBPWfyp1VlFuXDsLX2tNAwNKpZc5oAsgdQnJUO2k,3295 +mypy/typeshed/stdlib/_posixsubprocess.pyi,sha256=sszb90KfUWD1Mnk8eJti8-O_PPr95u4vGJh4XSoyEUQ,1836 +mypy/typeshed/stdlib/_py_abc.pyi,sha256=yKisRv9tmwucBsWB1ILLo35NcNrZWwIkKRL6Pu8GH5s,397 +mypy/typeshed/stdlib/_pydecimal.pyi,sha256=wssuLOIKIuBbxUlyTcxx8ech_vGbA1A9EH9hqyHkG-4,995 +mypy/typeshed/stdlib/_queue.pyi,sha256=KX-GSZ_-tpD0mLw1WqfXW5WGCXaR7C10YwaZuexOUho,634 +mypy/typeshed/stdlib/_random.pyi,sha256=uOdUKAg2zvcoUlAwSYR5_1dc8Md0hsvJ6h7HGyvAeAU,571 +mypy/typeshed/stdlib/_sitebuiltins.pyi,sha256=Hw17bWzQybJdwlnQceJ8BMHzSuTYiAn65Ro7sZu5MoI,538 +mypy/typeshed/stdlib/_socket.pyi,sha256=rB2qYMdu0AAgOhZAJmLB32sIo6rLXTh1IGl9j2dpvK4,27964 +mypy/typeshed/stdlib/_sqlite3.pyi,sha256=WAqfvEdYY1Rcn0oByDEF_xh83BxJErT1FrKX7kSLkAE,10622 +mypy/typeshed/stdlib/_ssl.pyi,sha256=X9JlIuSp4SIFxu-Za5YoV0izW_T20Bp3vL-PTQRN8Nw,10060 +mypy/typeshed/stdlib/_stat.pyi,sha256=hUl5rnhbcV4UkNu4MASQinuAccNDU0MiHrdG8Bh_92Q,3441 +mypy/typeshed/stdlib/_struct.pyi,sha256=HwlQXLJV_jIckbuof-hIacm5RQpGsNyz6kO7L07MjQE,1196 +mypy/typeshed/stdlib/_thread.pyi,sha256=cMlchv8BCtM37Tqw5n5JU2oOs94g6k_X3EEJ4bX-sUs,4213 +mypy/typeshed/stdlib/_threading_local.pyi,sha256=vnzj6ILLO3h6_TkLIXlqQZmKnfRG6wxBfvA2IP1iHFE,880 +mypy/typeshed/stdlib/_tkinter.pyi,sha256=kkdNyPt2JpvPC6vUsMLkm9xkJn60UgJMmat_0vZna4Y,4925 +mypy/typeshed/stdlib/_tracemalloc.pyi,sha256=bPNYXniUfh6u6NCEkkmpNDClCx7JD1EYRdl-87cY8ps,500 +mypy/typeshed/stdlib/_typeshed/__init__.pyi,sha256=KhlPzjpyoFNedT5XCJDPKdvFzAO6eBDV5rvmxFBWMlE,13211 +mypy/typeshed/stdlib/_typeshed/_type_checker_internals.pyi,sha256=aS3E_CYU0Q1szNLqKjyGfub4-0rSBXK-znvd9FVVU08,4157 +mypy/typeshed/stdlib/_typeshed/dbapi.pyi,sha256=DbFvZC7aeSFuw_hopshe-nz6OL_btPB06zIoJ8O-9tA,1636 +mypy/typeshed/stdlib/_typeshed/importlib.pyi,sha256=iSR1SQrIgH39dZwu1o0M0qk8ZsxRUkn4DtG2_K5tO4o,727 +mypy/typeshed/stdlib/_typeshed/wsgi.pyi,sha256=qNH7QQT9Y_i8GxSoS2LUViFSmM4mH3-K5hxh7sGT5K4,1637 +mypy/typeshed/stdlib/_typeshed/xml.pyi,sha256=W4c9PcHw737FUoezcPAkfRuoMB--7Up7uKlZ0ShNIG0,499 +mypy/typeshed/stdlib/_warnings.pyi,sha256=fkbpcOeB7WnJshHYcNvXGB86qDMkDDtM52o_00I6Bzw,1569 +mypy/typeshed/stdlib/_weakref.pyi,sha256=UVIE-iE6GyVOBeCKC0CXABnd7t-PvxC8ZtrTV6IaI8M,643 +mypy/typeshed/stdlib/_weakrefset.pyi,sha256=_n_kXqzC0K_qb4uEtA6WUsgmHD70uCp6YuTUtZxfK3c,2345 +mypy/typeshed/stdlib/_winapi.pyi,sha256=4-6Mp01uvECRnH-UBMks_6XtQlxzLKkpxwnsQd1p4UA,10863 +mypy/typeshed/stdlib/_zstd.pyi,sha256=y4Nt3jnBtFV7nG2edkwtQiZWwYQ4ux1dnU6japhwC_I,3562 +mypy/typeshed/stdlib/abc.pyi,sha256=rEGmN6wFB3A5CTAx46Hid0xycNOKIAsRLAFrz9595gI,2122 +mypy/typeshed/stdlib/aifc.pyi,sha256=j7qx4qEI_YH90FPfekf7fqRBDs2HXALqfv--MuXo0DQ,2986 +mypy/typeshed/stdlib/annotationlib.pyi,sha256=kiftU52i0S5cIqUf_xPiyl6SgnewDWt_0c942HhGsus,5435 +mypy/typeshed/stdlib/antigravity.pyi,sha256=AT_uMXdsZR3AL8NfPU7aH05CAQaYpiM7yv2pBm7F78k,123 +mypy/typeshed/stdlib/argparse.pyi,sha256=bcvR09Y0kF3ZJgFX_WyguaCyIosKkCo8-v8a-csRxVM,30895 +mypy/typeshed/stdlib/array.pyi,sha256=c-TpI56La2GbXLnW9w3WWaaGGLPNkPGdFvurf5Fuz-s,4675 +mypy/typeshed/stdlib/ast.pyi,sha256=O7dOu-BpucY17aIKFEiO0gQPAcwikmCTmb4HMsoni-8,77826 +mypy/typeshed/stdlib/asynchat.pyi,sha256=jFTiOSXClcmhNvWXQc9JdRD44AT5o9Cq7xSC2fbVC2k,787 +mypy/typeshed/stdlib/asyncio/__init__.pyi,sha256=KEWLcIiMBDio8lNUZpHu3wW0G9JlMc-HDjpYh0kL3Cc,44530 +mypy/typeshed/stdlib/asyncio/base_events.pyi,sha256=gWOxOpnAmsjANjK-7QZ6oFEKuRkoN496gkQA2TYqf8Y,19731 +mypy/typeshed/stdlib/asyncio/base_futures.pyi,sha256=Jy9MidrOMwCSy-ZkrbjLoKNNPcuVK9ASOX_RBLDAS3M,609 +mypy/typeshed/stdlib/asyncio/base_subprocess.pyi,sha256=CjBQyvXQcYWcmmVfWAq3z6ZY3MhXntxMh_xgtJhwKUQ,2680 +mypy/typeshed/stdlib/asyncio/base_tasks.pyi,sha256=1qMENIsXTar5-dVXn33qy8hpWzOtFOs_I-kf5I92dsI,404 +mypy/typeshed/stdlib/asyncio/constants.pyi,sha256=-Eu35n-kT7I8W9YNfoY1lXmrZKATdDBojxBOwMiPw6g,556 +mypy/typeshed/stdlib/asyncio/coroutines.pyi,sha256=69Za0yrhcudJqRA3CJ0rN5KpIGvlQai8jMVdQqHliQM,1209 +mypy/typeshed/stdlib/asyncio/events.pyi,sha256=vD9LcAMeK7CjMSrsgC1F9tBunVQorD_Pqr8iwSXHjfU,25540 +mypy/typeshed/stdlib/asyncio/exceptions.pyi,sha256=livPkrVx3OkV5T5BXlmuiI0rQx-aRLCPkrkEOQlalh8,1163 +mypy/typeshed/stdlib/asyncio/format_helpers.pyi,sha256=2woQu8erzdP7jbY3DZcfWyOwEUjkKQbYIvGW5wwu59w,1353 +mypy/typeshed/stdlib/asyncio/futures.pyi,sha256=mcUrrMBgux4VvCIkAYEoXkiU5tb8hBrJsW0DJFcT7DM,721 +mypy/typeshed/stdlib/asyncio/graph.pyi,sha256=EMHKHA_90qYP4OB6lFtG2CGBtNKkefB-wzh8y1AR_WQ,1194 +mypy/typeshed/stdlib/asyncio/locks.pyi,sha256=QDXBtjjRszT62G52f6icc_qzozDPlr_qrVT7spcnj5E,3514 +mypy/typeshed/stdlib/asyncio/log.pyi,sha256=Ql97njxNKmNn76c8-vomSAM7P-V14o-17SOIgG47V-U,39 +mypy/typeshed/stdlib/asyncio/mixins.pyi,sha256=YqQRvFzqgxJ0BvStd6F56A4DaIEM3KvD4fDELKCYhco,215 +mypy/typeshed/stdlib/asyncio/proactor_events.pyi,sha256=vCZEY77LmyjcjJt_UgGuMqFSCG9BQmOTX-2aqArcYP8,2598 +mypy/typeshed/stdlib/asyncio/protocols.pyi,sha256=MV0bmW9iU_c5qw0vOdG9V7fvMA-RoB0PjxTRP7wIN-4,1927 +mypy/typeshed/stdlib/asyncio/queues.pyi,sha256=QFNRsudiTUFQ9eEVKvgQ4LKCGz7UxRFs3KEvsWqpW0s,1918 +mypy/typeshed/stdlib/asyncio/runners.pyi,sha256=cfkVPSuvnTj6_NVXAGi7oPxKYYGgySs5D3hojhFJj9o,1207 +mypy/typeshed/stdlib/asyncio/selector_events.pyi,sha256=99QJmKi-74k50L6pmkcfO9B716oIJt4uIU8g2nG6pCQ,315 +mypy/typeshed/stdlib/asyncio/sslproto.pyi,sha256=buih3k56xpku7kwDWaioIPO7ibOkN_d6df26ONKGDgo,6489 +mypy/typeshed/stdlib/asyncio/staggered.pyi,sha256=vtlD5Xfya4AEfvkwJmIL9zXXgRlsI8MmGOFitDK9h7g,341 +mypy/typeshed/stdlib/asyncio/streams.pyi,sha256=fTNtnQq_6s6ljW7B4xl-U4uO-zDTNsuS5zQi-ZJ61iw,6007 +mypy/typeshed/stdlib/asyncio/subprocess.pyi,sha256=7hdI_BmLWsvLtzz_mrabrGCibuHNHfbUlRyF2BB7MiE,9295 +mypy/typeshed/stdlib/asyncio/taskgroups.pyi,sha256=Md8DTfLwV_U_QCoPN8mGclbUTnFOIU86mLnu79TYkuM,858 +mypy/typeshed/stdlib/asyncio/tasks.pyi,sha256=mO8Zlqlzcd0mjthSJBRV7sTs0DW_UqW3vSa0eKSyJk4,17095 +mypy/typeshed/stdlib/asyncio/threads.pyi,sha256=mPM3TlwpYs5UUus7d-pob5vcrsehEp6Lp2a8JxwBbqk,330 +mypy/typeshed/stdlib/asyncio/timeouts.pyi,sha256=Py2VPr85sJCC48s63cQvCQQCVsk-T-9znyjQDaIs-o8,717 +mypy/typeshed/stdlib/asyncio/tools.pyi,sha256=zjh-ntujiZ_cIJIAgiFbV4R_CT_cYiOKF-mlP31PmMw,1489 +mypy/typeshed/stdlib/asyncio/transports.pyi,sha256=pzLqWKPGJmQyrtCoCMaK4hKVft-I-FwEmvNJe4rP08k,2390 +mypy/typeshed/stdlib/asyncio/trsock.pyi,sha256=e_tQvZ-kujzaJHG3V4Ngdq46Zsg3G0NZO0q9uf8ELt0,6095 +mypy/typeshed/stdlib/asyncio/unix_events.pyi,sha256=lv_QLe9gpKp7zJWB78MJL9p1hAeMNG0SVtB5EloIRcY,11341 +mypy/typeshed/stdlib/asyncio/windows_events.pyi,sha256=fL5j2FWLD1tixaAFnXOuFoveXgU--PktycxLPhCWAME,5397 +mypy/typeshed/stdlib/asyncio/windows_utils.pyi,sha256=muNyuM_00Nog2RVk6k46NCWwk7C761dbCNOe-HZIpI8,1955 +mypy/typeshed/stdlib/asyncore.pyi,sha256=xRANk6i8v5AshNfEgtRCInPWVEwL1NP40G7aRRqaaWs,3670 +mypy/typeshed/stdlib/atexit.pyi,sha256=YPzhxFxGPqJ1k5G-Iab8lqfJNum1kQ_UsmI84I_5zEk,398 +mypy/typeshed/stdlib/audioop.pyi,sha256=9k9vD1-ArGE3bl0iSGPn6Oh4-XOftsyuN5MbFi1W8xw,2122 +mypy/typeshed/stdlib/base64.pyi,sha256=wxttkEgPJROdDrk55N1uYnzCiTiqWZo54fDHJLKXS2I,2264 +mypy/typeshed/stdlib/bdb.pyi,sha256=b7zH0KfmVkWBje1oo3cwHw6VX2pvfONj300oua42YDs,5866 +mypy/typeshed/stdlib/binascii.pyi,sha256=tq0U58aXORL5lgMkufQHEcwVzYl0BUfJsXX8IR0AJFk,1822 +mypy/typeshed/stdlib/binhex.pyi,sha256=vyLQVbmIET6tr9sHDh-vewAJvpfCcaRIw3h9hRGs4xE,1274 +mypy/typeshed/stdlib/bisect.pyi,sha256=sQn9UUS0Cw5XZMEGcEj8Ka5VKPVobL43Pex_SagjXg8,67 +mypy/typeshed/stdlib/builtins.pyi,sha256=SIagus1oaebwU8W7ACOxImfkwatTg-efYD2DaaGIhAE,91702 +mypy/typeshed/stdlib/bz2.pyi,sha256=xOkSxmwk1lYrayAxjpWi5dY5-WiUmeyE04eBmX5vxkw,4008 +mypy/typeshed/stdlib/cProfile.pyi,sha256=gnkhMSDZOdLpA3atsotilOXWNzqok4SXcsnvAKQH31E,1313 +mypy/typeshed/stdlib/calendar.pyi,sha256=XCTyKj1kTW6hFYMKD2KhxKe4kqziKaUuOX_qia9WqmI,7344 +mypy/typeshed/stdlib/cgi.pyi,sha256=0roTC0MUUXUS4xO7mhzY13Oa0GvlaD6UZF6UheEs6Qc,3810 +mypy/typeshed/stdlib/cgitb.pyi,sha256=l7aliv3yXrfw0MM15pXDdgeNpbIK1N1e84OjSEt2TFU,1394 +mypy/typeshed/stdlib/chunk.pyi,sha256=691YVfWjwx20ngjDSBGS5Pjs7IrLViQinuTBg8ddmX4,614 +mypy/typeshed/stdlib/cmath.pyi,sha256=jusKnxW_Ppzy9SVRD2H7_uK8OJEdPdvruBR2QU4ngN4,1231 +mypy/typeshed/stdlib/cmd.pyi,sha256=Mbl8vjsuh_FXsT64NErKMK1FdPYlOdbC-jVLg7tiLoc,1783 +mypy/typeshed/stdlib/code.pyi,sha256=UnBJGRyi3C4lNtXYsC4wkMyAcF_k8FuM9aFq3uooSMc,2140 +mypy/typeshed/stdlib/codecs.pyi,sha256=VMDX8fxzC6CgtpSJdhGYXUQcFiP8__i3O7MaNQjiR4I,14045 +mypy/typeshed/stdlib/codeop.pyi,sha256=DAkqqHHsxirkzPomzRmpsL2dVW5FyhLdj27YTZ2jc7o,799 +mypy/typeshed/stdlib/collections/__init__.pyi,sha256=A-DN8bpJee2VdrAMnuWOhmTpP6tNBO-rOV4Uz_dLwv4,23610 +mypy/typeshed/stdlib/collections/abc.pyi,sha256=kBiZAN0VPf8qpkInbjqKZRRe0PXrZ7jxNmCGs4o5UOc,79 +mypy/typeshed/stdlib/colorsys.pyi,sha256=o4IcMv2OA9R-5pwBWvEpW_kOzFsjZWtWZARpwMHTkv8,696 +mypy/typeshed/stdlib/compileall.pyi,sha256=eqS3LbYNS0f_aKhX5g-NPgTaMuF1PIzwxT_TdRm_q7A,2757 +mypy/typeshed/stdlib/compression/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypy/typeshed/stdlib/compression/_common/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypy/typeshed/stdlib/compression/_common/_streams.pyi,sha256=GbmwTLxClok7eRsWhgTX9884eLyPByx_7Czu1XKD9NA,1339 +mypy/typeshed/stdlib/compression/bz2.pyi,sha256=_Tb-V0PrToWQt38Y_XXS0FmJfRWIc2X5jnGeEwH7gmE,18 +mypy/typeshed/stdlib/compression/gzip.pyi,sha256=HRqVg9hDWsKJ83Ur28v1-WB7sLQzxADzxJPP7FTumec,19 +mypy/typeshed/stdlib/compression/lzma.pyi,sha256=nNMih_mDY-jxO3tY65zkpT9nT6r293X7fxwXk1SAJjU,19 +mypy/typeshed/stdlib/compression/zlib.pyi,sha256=UQc5WSd45ZWjhEc1Za5pOtEsoBi-kbrrOidCv7aguBw,19 +mypy/typeshed/stdlib/compression/zstd/__init__.pyi,sha256=i2o9jpuYpEAZ3Pjy1vyoDn0gDAJOHW4qsC5PWNq04Vw,3037 +mypy/typeshed/stdlib/compression/zstd/_zstdfile.pyi,sha256=i3_euUWFMsMhfJkgZz8e4f6xVV3WJmMlmFzv_AeKI5A,3643 +mypy/typeshed/stdlib/concurrent/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypy/typeshed/stdlib/concurrent/futures/__init__.pyi,sha256=Ma8QgM22nhptstCwPahwrLn3yfYIokNZGBfVjRGvVnc,1765 +mypy/typeshed/stdlib/concurrent/futures/_base.pyi,sha256=cpRZfzzvOtgWhtlBGkCiTcps9InisdJIU_qJVDyfh2k,4346 +mypy/typeshed/stdlib/concurrent/futures/interpreter.pyi,sha256=6BNtvdAt0z5BDVO7YZPDepgTn9gV85C0FULjDKj9oOw,3053 +mypy/typeshed/stdlib/concurrent/futures/process.pyi,sha256=cBSyTr_2IMdujvLEIaJCWTzCrtJdQwcxj3nyYpqC_fM,8168 +mypy/typeshed/stdlib/concurrent/futures/thread.pyi,sha256=emnAXhk2GmHQoKx40vC3ihLDacCRzAAZssbT1OtX7CU,4738 +mypy/typeshed/stdlib/concurrent/interpreters/__init__.pyi,sha256=5E7AB0WNoIBkid5LhXlys78KudhHuHKve3dvKGLxbGE,2441 +mypy/typeshed/stdlib/concurrent/interpreters/_crossinterp.pyi,sha256=gIXVsaVcPvE6gAOlmSVCDFzmZ4pdssdmKQZufKikzNM,1257 +mypy/typeshed/stdlib/concurrent/interpreters/_queues.pyi,sha256=vHO4wi4vRMqtMw7GNdHTZXYe3v9FptKu2nhhDoIPSKE,2603 +mypy/typeshed/stdlib/configparser.pyi,sha256=8B8lFsXku9FCcxEgcfTofDajhOJoLjCaT8eg5ckPYX0,19610 +mypy/typeshed/stdlib/contextlib.pyi,sha256=tSub_4q9XEGSMt6YpFYjSbFBJAOMMU8PJicafaeWRxo,9577 +mypy/typeshed/stdlib/contextvars.pyi,sha256=dqUvNxlpq9-0XgvzzKlCz4kWsA7qWEEIXIn73jxpaf0,178 +mypy/typeshed/stdlib/copy.pyi,sha256=e0uVQHOXyuOSyeCibpPIDOjBiZhrc--BU1oYKJyosyQ,856 +mypy/typeshed/stdlib/copyreg.pyi,sha256=59YPSECQJ5ppsEmYJxcvb1NOac6UTAu5CqP3SMd6VL4,983 +mypy/typeshed/stdlib/crypt.pyi,sha256=OA9MXjgoTY0jyd4cn6ZypXL1TqHtOAA_udosFL0OldI,792 +mypy/typeshed/stdlib/csv.pyi,sha256=QoR9Mr8E9wHipM3ehLPHPb3ZWNmJqKFov1sRBGOdRjY,4535 +mypy/typeshed/stdlib/ctypes/__init__.pyi,sha256=b-m3_-qPJEOrITLFFgZSlkhD7qT8uZnhfqY-1cGFDkk,11431 +mypy/typeshed/stdlib/ctypes/_endian.pyi,sha256=raioVluH0dlcLoayGs1jELpcBkPKmO7Wbwr2OcpZAUE,461 +mypy/typeshed/stdlib/ctypes/macholib/__init__.pyi,sha256=G8DxraRu5jEG-ve06msp9XoHqQwtnFEjVtCSLe7vaTw,50 +mypy/typeshed/stdlib/ctypes/macholib/dyld.pyi,sha256=K0ZDg1MB-_Y_n49CDgrEJytsEVOWgXgHN1THza5UQ9k,467 +mypy/typeshed/stdlib/ctypes/macholib/dylib.pyi,sha256=HVkz1Oyol9QCJcjdnwtkgW5iq-yFJwiQ-jZCAGzPjTU,326 +mypy/typeshed/stdlib/ctypes/macholib/framework.pyi,sha256=bWwjubZ_zKOiGqAlqByzonpxD4AJQemGiFIfS4emGm8,342 +mypy/typeshed/stdlib/ctypes/util.pyi,sha256=4dyWcDlzw6yUtBFC1j17JsnSzhJV8q94ZJfvo-07kng,222 +mypy/typeshed/stdlib/ctypes/wintypes.pyi,sha256=s59HOt-zLzzEeXbTeCSsB-wXiEBL-wxiy5Yff43agtc,6967 +mypy/typeshed/stdlib/curses/__init__.pyi,sha256=YM2Eo7wYTP_QgiElaBzMTWsWJFHsaHFznnp-9o8errs,1284 +mypy/typeshed/stdlib/curses/ascii.pyi,sha256=7xKdmHqbNJu7zXzLc36MKVuEsOoWmqDAFWrdDwVT6Zc,1465 +mypy/typeshed/stdlib/curses/has_key.pyi,sha256=1EoxgUM4xlB7ggY4Ru4eqnSa0Wn2mP7ylUE7p9V7Yc0,40 +mypy/typeshed/stdlib/curses/panel.pyi,sha256=tiz6sEiozlgKp3eC7goXP0irXp9PwWHSfWiMahWMRRs,28 +mypy/typeshed/stdlib/curses/textpad.pyi,sha256=2UsLwIhJh5iwWSN-1SJlzwvn--sJqh8zJYQ8pYCP8f8,422 +mypy/typeshed/stdlib/dataclasses.pyi,sha256=Cl6Lvi5YAzPZ0rcZjnDkOMAOHtHJdJeYRIjBF92HRsw,14451 +mypy/typeshed/stdlib/datetime.pyi,sha256=CiWGFuYlJcGsCDxyBUhYGbRDesevvX0V5Ndi3wol_cM,12256 +mypy/typeshed/stdlib/dbm/__init__.pyi,sha256=KZb9l0guolhsjOyfqZk1_r0-vdN8nt4a7ach5lOXe20,2143 +mypy/typeshed/stdlib/dbm/dumb.pyi,sha256=dsAfzLKJnXAW6xJMnw-47D-xdaiwuXwaQC_-c1If2ZE,1467 +mypy/typeshed/stdlib/dbm/gnu.pyi,sha256=QR25FB7f-Rxi5RzWWki9npyEF1JKu5A5RjOWDmR7T2U,20 +mypy/typeshed/stdlib/dbm/ndbm.pyi,sha256=dc0BCDY0QiGbHA0lcqZL4NqOfCES4htigqb4uM5UaSo,19 +mypy/typeshed/stdlib/dbm/sqlite3.pyi,sha256=v1uUMPBFIH7G34tYJ9HrcoYAt4E1zq8EWYGbVEbIQsQ,1228 +mypy/typeshed/stdlib/decimal.pyi,sha256=TVKrr33muv01oCyW1j1ZcMtkRMDYgmY03R8IUwTqs1Q,14077 +mypy/typeshed/stdlib/difflib.pyi,sha256=pv2JXhDbVzQF1UdfSrblMHZvHFMPcldu-FbRwtdJuJw,4512 +mypy/typeshed/stdlib/dis.pyi,sha256=C-hGrrK552t7znaAYe0NBg5N5KO5AzsV5TTuhHjxjLw,9248 +mypy/typeshed/stdlib/distutils/__init__.pyi,sha256=o-D0LAC_8LmRTahqNjjRUXycRSMyJ537NHeFaduZKVc,351 +mypy/typeshed/stdlib/distutils/_msvccompiler.pyi,sha256=HOTrNPKFYHGnaIggO2_-F2BTCF878cRQf-ge7Ng425k,437 +mypy/typeshed/stdlib/distutils/archive_util.pyi,sha256=E6T3Q7SSWW8UvxEkJlXbW_wr4UaY_ddLEb8MWFN0_KA,1040 +mypy/typeshed/stdlib/distutils/bcppcompiler.pyi,sha256=fge2cMbG4jp--o0I2zNcwykh24tJWZtk6leQgAH2NJw,78 +mypy/typeshed/stdlib/distutils/ccompiler.pyi,sha256=BCSgVAvfMJVh8EyX_HqNcggwpF_NEcTTnVournS-UUk,7358 +mypy/typeshed/stdlib/distutils/cmd.pyi,sha256=Dm_n741c6n7C-8bk3RBLOjW5hPPOy4FvRYpxuATINo4,11116 +mypy/typeshed/stdlib/distutils/command/__init__.pyi,sha256=AtZpmh1mhLqsWO11mGBB-CrtRWprrdDs6ylbXlXeTeQ,711 +mypy/typeshed/stdlib/distutils/command/bdist.pyi,sha256=YLLeluU6gqN_RNDL463SiE3aNQaNKDscuCC_Zm0bj3I,875 +mypy/typeshed/stdlib/distutils/command/bdist_dumb.pyi,sha256=tIuYyjsOwPpl1hSJK24tcjom5dTJ98id0ckXH4wN6ME,614 +mypy/typeshed/stdlib/distutils/command/bdist_msi.pyi,sha256=GTn4n9D6tLwyQyuPtNH-hk1JL37l6Kkr3OHA3TMVv8o,1735 +mypy/typeshed/stdlib/distutils/command/bdist_packager.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypy/typeshed/stdlib/distutils/command/bdist_rpm.pyi,sha256=PeCQM0O6QWBoWD9se0GSzoLPwBf16HK5yANxdJ37qBs,1457 +mypy/typeshed/stdlib/distutils/command/bdist_wininst.pyi,sha256=wON2ucrSRQI8V1q2_wi8U4TW-GQpjUe_XRPvX4Z1lKw,646 +mypy/typeshed/stdlib/distutils/command/build.pyi,sha256=ufnjRjuH62Pb0e-cBDyvVEw67PFV4cOYv1_gP4MIkVE,1081 +mypy/typeshed/stdlib/distutils/command/build_clib.pyi,sha256=qR3q8TLdF63c0p33peUo284iEiLH9-2VLUJ2DleCpJA,918 +mypy/typeshed/stdlib/distutils/command/build_ext.pyi,sha256=a8kGtFnC6UtXGU7XOG80dfSd9G5zeD1fAQulKS7f1aI,1648 +mypy/typeshed/stdlib/distutils/command/build_py.pyi,sha256=omRTnXsWj7TFKmgC9IwhqGmC5Buh7ER9e3WdOB9l54A,1659 +mypy/typeshed/stdlib/distutils/command/build_scripts.pyi,sha256=Fv03MGtaBwwfdI66zTqJNPeD2BMAF98sdJnDRzBhLnM,703 +mypy/typeshed/stdlib/distutils/command/check.pyi,sha256=b_7HEEEs0Zr7lvIaDDg5yWeQBgoXInV-flLTTeg2KGw,1236 +mypy/typeshed/stdlib/distutils/command/clean.pyi,sha256=wagR3bxqh6UAXnvhsDf7qYQY_1jMTEP30QXuq6mJIoc,513 +mypy/typeshed/stdlib/distutils/command/config.pyi,sha256=d2h1OIMfkadYMGT96DPQEtL39QpLk-rCRjQRAGOGBFI,2781 +mypy/typeshed/stdlib/distutils/command/install.pyi,sha256=rFV8ukandp2i7iE23smdT8xL6jidpw3cY-HTSpzDMXo,2290 +mypy/typeshed/stdlib/distutils/command/install_data.pyi,sha256=09diiWpTZv5g5May_za9UEKw_m2fvwVbevMin63lxBs,558 +mypy/typeshed/stdlib/distutils/command/install_egg_info.pyi,sha256=pXV3L3dEjK0NnGiO7tyXgxIMzx5nwpu5v9OEF4jFyzk,532 +mypy/typeshed/stdlib/distutils/command/install_headers.pyi,sha256=2ZePm_2is9uIck2c7iIwLwJet7Ef6JopFVuHF6D6aGE,488 +mypy/typeshed/stdlib/distutils/command/install_lib.pyi,sha256=8hNzKDsNWLb_T9O0Kc75M4WuXpanTeJB-_CrFHebDnU,765 +mypy/typeshed/stdlib/distutils/command/install_scripts.pyi,sha256=lpExgrCH1wnyLS2S-bZwR12gqQTcHEffqWeezL51qu0,548 +mypy/typeshed/stdlib/distutils/command/register.pyi,sha256=P7m44QOal6qsDtw66FfA_amKyxLouynR7s5XtiBP6wE,697 +mypy/typeshed/stdlib/distutils/command/sdist.pyi,sha256=AxkvvnWR2K6xYmTKXqiDM7DUPaneriPiSgtJiYMG4O0,1517 +mypy/typeshed/stdlib/distutils/command/upload.pyi,sha256=re0EVwgTn6jWVMoOWTfsZStLXozX66LSiXokPEHM_74,511 +mypy/typeshed/stdlib/distutils/config.pyi,sha256=Bmpm5-txSuUYd92XnDnfpAevSl9bk5YfXO-I_wXC2QI,497 +mypy/typeshed/stdlib/distutils/core.pyi,sha256=oc3E79ctJ90TJsLmy88jM5XqkqmVyXNOxXYuDMOYy-E,1973 +mypy/typeshed/stdlib/distutils/cygwinccompiler.pyi,sha256=A22lj-kl_06GMoQcl-M7yhWbmjzsTXml6KDjB3in5gM,586 +mypy/typeshed/stdlib/distutils/debug.pyi,sha256=xsHjfIMduqS9E5C28fFERqXr8Ss8y1GGO1rR9VR8vLs,51 +mypy/typeshed/stdlib/distutils/dep_util.pyi,sha256=G_1dehLB4Nq9vEmNKFqTasQtG-A8Ybpqxs1M2-GZwjI,647 +mypy/typeshed/stdlib/distutils/dir_util.pyi,sha256=pGJrASr0CVE9JqaQMcOhV9rkgsXUCI4qoFpyvF3UB18,875 +mypy/typeshed/stdlib/distutils/dist.pyi,sha256=_NHkVAtGXaru8FyZX8cmB0wy1WUN0uM1fpJdiPCKUBo,15218 +mypy/typeshed/stdlib/distutils/errors.pyi,sha256=l1W_FgoP9L-D-hEPFA2BzZuybjN0lV4WBXl0VJ-k7J8,852 +mypy/typeshed/stdlib/distutils/extension.pyi,sha256=KosWjLSvvyfdQTtOCu3fibblHyiFIXm8iHHWrWk916E,1236 +mypy/typeshed/stdlib/distutils/fancy_getopt.pyi,sha256=WmGW8EhQqL1yd6jPgBcCc9JFo9PMqIS8dvslu2IrWpE,1673 +mypy/typeshed/stdlib/distutils/file_util.pyi,sha256=zikh8dhZ0i8b2Pmw3-OtqqpS-w3gAF5GwN9GXEy1EbM,1323 +mypy/typeshed/stdlib/distutils/filelist.pyi,sha256=RiXyurPBQ_d4U0siqqxHk22qsUqAP2EZbX5LWA40lm0,2292 +mypy/typeshed/stdlib/distutils/log.pyi,sha256=8Fv8JYP-w6djwB7ad2fkWaABI-1xk1loqdEJOZiS_go,940 +mypy/typeshed/stdlib/distutils/msvccompiler.pyi,sha256=qQLr26msfhjz-omJutWcRHik3shLh1CIt7CDI3jBd3I,78 +mypy/typeshed/stdlib/distutils/spawn.pyi,sha256=o36CbAwOl3mVBnlyasqqYIBrYT-3v7fjYjyAyL4dFzk,317 +mypy/typeshed/stdlib/distutils/sysconfig.pyi,sha256=AIHwWAmZHKRcRC4ce6Ti9NWJJKu7P2x6b3xTjjOwPic,1210 +mypy/typeshed/stdlib/distutils/text_file.pyi,sha256=t-pGs6Li5ySUocSO0CEUoRYDUl2Uk-RhswWeECigR_Y,787 +mypy/typeshed/stdlib/distutils/unixccompiler.pyi,sha256=R3VKldSfFPIPPIhygeq0KEphtTp0gxUzLoOHd0QoWW8,79 +mypy/typeshed/stdlib/distutils/util.pyi,sha256=HJpxYeb-4XG_U5o4-GOUnAtUR776jsgUbs3a3Din2ZU,1736 +mypy/typeshed/stdlib/distutils/version.pyi,sha256=yIGp2uvie77qTBWlT2ffBGNXIKJmPfJLPzaE2zua1fc,1308 +mypy/typeshed/stdlib/doctest.pyi,sha256=ItHbQuL_dcVa4ZYZI9Iy96yv_g94X4qveq1GWGX5k0U,7932 +mypy/typeshed/stdlib/email/__init__.pyi,sha256=36My0D09zCOfT26GrMGtXQcX8V4Xp2o5KIuT4ZLRUwY,2769 +mypy/typeshed/stdlib/email/_header_value_parser.pyi,sha256=ZUG-wlcIfE7myRjJ9W_gDFnt6m3JKQWXt7IFwXtt4DA,11430 +mypy/typeshed/stdlib/email/_policybase.pyi,sha256=swUeIOQDH3lf9NETZ3xa_21iW79bWP3WyDslKBuV5u8,3331 +mypy/typeshed/stdlib/email/base64mime.pyi,sha256=g98A7lvsErIaif8dVjP_LyoVFSXd6lNuJ_pOiTHudqs,559 +mypy/typeshed/stdlib/email/charset.pyi,sha256=K2kZQPiVXjmV9O1dRcRkIG-YkD6zRzimCJ6iy88z2KA,1713 +mypy/typeshed/stdlib/email/contentmanager.pyi,sha256=UwmeUcRuRTCDHXVEDzDASBN4lEtVG1A9BonNaMmv0b8,480 +mypy/typeshed/stdlib/email/encoders.pyi,sha256=dJc5t6R6TtZGffzRC_ji2O2KNj9n_fJHzkAnKWTbfcQ,293 +mypy/typeshed/stdlib/email/errors.pyi,sha256=YytDsUjPrDoI4fpZktf-mhKVCosZOedsS6pQfAFhDg4,1627 +mypy/typeshed/stdlib/email/feedparser.pyi,sha256=tCckWKeyn3VByGY8oWuHWJryiEMgrMI-ehJg0TaQeOI,978 +mypy/typeshed/stdlib/email/generator.pyi,sha256=rPe8JphvX-0-PtPiwWWPabik9A5DdyleteBCXSJi57g,2373 +mypy/typeshed/stdlib/email/header.pyi,sha256=qSEdPSMNtA22vkNbZ82enBddW0sZ6sq7GxBASj1-i6U,1332 +mypy/typeshed/stdlib/email/headerregistry.pyi,sha256=4SheTGAWbLOmWOEH43AU4Y7kotqjRYMCK47VyrU54pw,6284 +mypy/typeshed/stdlib/email/iterators.pyi,sha256=Vou7LSsfU52ckW-lKx4i49KGi0rd54LctjXHimRblrc,648 +mypy/typeshed/stdlib/email/message.pyi,sha256=S0Rj-3C9_AylKvSJEAtTHPLQDEQmpwxN2AoHFpO0vBw,9210 +mypy/typeshed/stdlib/email/mime/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypy/typeshed/stdlib/email/mime/application.pyi,sha256=PkqCQXJMdIRSXBV14unvCnpQTuNcEQO23W8CJ8hhtAc,498 +mypy/typeshed/stdlib/email/mime/audio.pyi,sha256=hsnNC5xAaI2pvS7DYMr58Y46U-hv4MjAKUF0wXWnDfs,482 +mypy/typeshed/stdlib/email/mime/base.pyi,sha256=zMUOzyzRFw00inwMFYk-GG8ap-SM9dtp1GRTxjfAiWU,271 +mypy/typeshed/stdlib/email/mime/image.pyi,sha256=E3zejA7f_g0NY09tvTj8y1jzGQ0IPrhsKDAofd6ZObA,482 +mypy/typeshed/stdlib/email/mime/message.pyi,sha256=bsaprH4pzYNkTvgmycx-y5dLBHIk9jCgnnyiBtVZ0VA,313 +mypy/typeshed/stdlib/email/mime/multipart.pyi,sha256=F2NodkSKHx_3Vzad43ESWUF1LVYzGjpJGszm8NGsb-Q,504 +mypy/typeshed/stdlib/email/mime/nonmultipart.pyi,sha256=YW7_zxIBEwStGGAuw7nQEYYS7Yz_TMuTW4-ZIFpIpM4,108 +mypy/typeshed/stdlib/email/mime/text.pyi,sha256=wgYFMCXnpeiM7zp8gpxkLRLCh-7wrGuZUNvRqSHzbG8,298 +mypy/typeshed/stdlib/email/parser.pyi,sha256=Nf0GvqrZFcpD2sQzUTByKAtvevJVUdnAYSXs7-k-Qs0,1975 +mypy/typeshed/stdlib/email/policy.pyi,sha256=HIZ7t_nZivqGEuZouZg9OwehEOggKg6-it_wsrKda5s,2813 +mypy/typeshed/stdlib/email/quoprimime.pyi,sha256=bSFnFlSadE1pXHmqDzvAEnWwNyeWSLm-i21Kczwrt6A,835 +mypy/typeshed/stdlib/email/utils.pyi,sha256=3vrH3BVda5HW4zJE2SrQjGGJELYOT4Og3M8SYGmIAhg,2930 +mypy/typeshed/stdlib/encodings/__init__.pyi,sha256=ol9IEdxlgg43QiUVMjqoosp3mS8NTdC8A0jNy8bhwIo,435 +mypy/typeshed/stdlib/encodings/aliases.pyi,sha256=NBl4ko1LeUclvHYI0p7ALF_qM5n2aJnXH5HXapTR95E,24 +mypy/typeshed/stdlib/encodings/ascii.pyi,sha256=JXS9tp2DG26Xrrhjf5-3JwR_6EK_qcZuK8WLcw_GCA8,1346 +mypy/typeshed/stdlib/encodings/base64_codec.pyi,sha256=BPqaBf4QojblNqNABf4bHmapZ9UJE0jlVB2C3depZ9I,1105 +mypy/typeshed/stdlib/encodings/big5.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/big5hkscs.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/bz2_codec.pyi,sha256=tpV7-M_UeQQDj6g3W2yDQz-rKjp2Ji_7A30aCc611t8,1099 +mypy/typeshed/stdlib/encodings/charmap.pyi,sha256=MrYgD5r621vymhH0pMTJOyoD9MBtf3GDmbnAG_7bhcA,1652 +mypy/typeshed/stdlib/encodings/cp037.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/cp1006.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/cp1026.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/cp1125.pyi,sha256=IuKRANFFOqIZW4rgRt7Uqnd2HxyegtYKuYOwVbRR9gY,733 +mypy/typeshed/stdlib/encodings/cp1140.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/cp1250.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/cp1251.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/cp1252.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/cp1253.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/cp1254.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/cp1255.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/cp1256.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/cp1257.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/cp1258.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/cp273.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/cp424.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/cp437.pyi,sha256=IuKRANFFOqIZW4rgRt7Uqnd2HxyegtYKuYOwVbRR9gY,733 +mypy/typeshed/stdlib/encodings/cp500.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/cp720.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/cp737.pyi,sha256=IuKRANFFOqIZW4rgRt7Uqnd2HxyegtYKuYOwVbRR9gY,733 +mypy/typeshed/stdlib/encodings/cp775.pyi,sha256=IuKRANFFOqIZW4rgRt7Uqnd2HxyegtYKuYOwVbRR9gY,733 +mypy/typeshed/stdlib/encodings/cp850.pyi,sha256=IuKRANFFOqIZW4rgRt7Uqnd2HxyegtYKuYOwVbRR9gY,733 +mypy/typeshed/stdlib/encodings/cp852.pyi,sha256=IuKRANFFOqIZW4rgRt7Uqnd2HxyegtYKuYOwVbRR9gY,733 +mypy/typeshed/stdlib/encodings/cp855.pyi,sha256=IuKRANFFOqIZW4rgRt7Uqnd2HxyegtYKuYOwVbRR9gY,733 +mypy/typeshed/stdlib/encodings/cp856.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/cp857.pyi,sha256=IuKRANFFOqIZW4rgRt7Uqnd2HxyegtYKuYOwVbRR9gY,733 +mypy/typeshed/stdlib/encodings/cp858.pyi,sha256=IuKRANFFOqIZW4rgRt7Uqnd2HxyegtYKuYOwVbRR9gY,733 +mypy/typeshed/stdlib/encodings/cp860.pyi,sha256=IuKRANFFOqIZW4rgRt7Uqnd2HxyegtYKuYOwVbRR9gY,733 +mypy/typeshed/stdlib/encodings/cp861.pyi,sha256=IuKRANFFOqIZW4rgRt7Uqnd2HxyegtYKuYOwVbRR9gY,733 +mypy/typeshed/stdlib/encodings/cp862.pyi,sha256=IuKRANFFOqIZW4rgRt7Uqnd2HxyegtYKuYOwVbRR9gY,733 +mypy/typeshed/stdlib/encodings/cp863.pyi,sha256=IuKRANFFOqIZW4rgRt7Uqnd2HxyegtYKuYOwVbRR9gY,733 +mypy/typeshed/stdlib/encodings/cp864.pyi,sha256=IuKRANFFOqIZW4rgRt7Uqnd2HxyegtYKuYOwVbRR9gY,733 +mypy/typeshed/stdlib/encodings/cp865.pyi,sha256=IuKRANFFOqIZW4rgRt7Uqnd2HxyegtYKuYOwVbRR9gY,733 +mypy/typeshed/stdlib/encodings/cp866.pyi,sha256=IuKRANFFOqIZW4rgRt7Uqnd2HxyegtYKuYOwVbRR9gY,733 +mypy/typeshed/stdlib/encodings/cp869.pyi,sha256=IuKRANFFOqIZW4rgRt7Uqnd2HxyegtYKuYOwVbRR9gY,733 +mypy/typeshed/stdlib/encodings/cp874.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/cp875.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/cp932.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/cp949.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/cp950.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/euc_jis_2004.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/euc_jisx0213.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/euc_jp.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/euc_kr.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/gb18030.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/gb2312.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/gbk.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/hex_codec.pyi,sha256=6bEBV4unOUo8eopKjspIfrarAnyMTCccp71cZNX9usQ,1099 +mypy/typeshed/stdlib/encodings/hp_roman8.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/hz.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/idna.pyi,sha256=B9b4Xh5OeA3WSMD62Q-0i8N4OOBVjPsoAth6Zwjqpik,924 +mypy/typeshed/stdlib/encodings/iso2022_jp.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/iso2022_jp_1.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/iso2022_jp_2.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/iso2022_jp_2004.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/iso2022_jp_3.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/iso2022_jp_ext.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/iso2022_kr.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/iso8859_1.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/iso8859_10.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/iso8859_11.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/iso8859_13.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/iso8859_14.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/iso8859_15.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/iso8859_16.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/iso8859_2.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/iso8859_3.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/iso8859_4.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/iso8859_5.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/iso8859_6.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/iso8859_7.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/iso8859_8.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/iso8859_9.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/johab.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/koi8_r.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/koi8_t.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/koi8_u.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/kz1048.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/latin_1.pyi,sha256=dGwLgYjYgcDVLAqJXzaOltDLDNgQNC-AkFvwfzgT2ls,1354 +mypy/typeshed/stdlib/encodings/mac_arabic.pyi,sha256=IuKRANFFOqIZW4rgRt7Uqnd2HxyegtYKuYOwVbRR9gY,733 +mypy/typeshed/stdlib/encodings/mac_croatian.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/mac_cyrillic.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/mac_farsi.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/mac_greek.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/mac_iceland.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/mac_latin2.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/mac_roman.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/mac_romanian.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/mac_turkish.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/mbcs.pyi,sha256=UiUp0WbMdEMZHIt8SnKjHg69ytiUNItKVAqF_DQMBGc,1091 +mypy/typeshed/stdlib/encodings/oem.pyi,sha256=N9CqMmApOhl7nSiLzjurOoNE2RLhmZNdP6HknNp_fm0,1087 +mypy/typeshed/stdlib/encodings/palmos.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/ptcp154.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/punycode.pyi,sha256=UkVSNYEFRPDcRIKMbI31RzSlUoE7zrV0CWXsXBHlne8,1593 +mypy/typeshed/stdlib/encodings/quopri_codec.pyi,sha256=vBA4qnjYHR5HgJtXe4fCziiPxHYkivKgS0kV-nppjX8,1105 +mypy/typeshed/stdlib/encodings/raw_unicode_escape.pyi,sha256=0fWozv5f1XYye3i6HZPnrrMUGqyqxJpDxq3KxJcsug0,1000 +mypy/typeshed/stdlib/encodings/rot_13.pyi,sha256=dU8Pz0tT7qe9xXipOvYcO6mcdPPXWvIIUJ7nsJVxkjQ,889 +mypy/typeshed/stdlib/encodings/shift_jis.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/shift_jis_2004.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/shift_jisx0213.pyi,sha256=lWSIa6c1POZybwhXwNOeTmw5ltJiFTiewyGFqrBU3-U,920 +mypy/typeshed/stdlib/encodings/tis_620.pyi,sha256=CGY8VLG2AMheowXmED0BuzUK5ByfDNUrJko2UJdFUb4,730 +mypy/typeshed/stdlib/encodings/undefined.pyi,sha256=kCUblX0Okd8hRsoTvKG6UvdQUbnLAbCr7z4MrfuhpU8,755 +mypy/typeshed/stdlib/encodings/unicode_escape.pyi,sha256=w981JOse_RTIukRgKiOqbCcktW5eaBUdyLNMoAIeWzA,992 +mypy/typeshed/stdlib/encodings/utf_16.pyi,sha256=SRka2t2ru2-Psn1H98shycpPpmxZhFn69wEFTXSuDck,761 +mypy/typeshed/stdlib/encodings/utf_16_be.pyi,sha256=k-ApL-tptz_Qby4BnAVsmUxfgzA2SDVcZBlgNdIyfes,1004 +mypy/typeshed/stdlib/encodings/utf_16_le.pyi,sha256=ShA30MIHkKSurNzu8CE0d1PxGrXqcO_JIpEshtIOALg,1004 +mypy/typeshed/stdlib/encodings/utf_32.pyi,sha256=PfqJtFEQglw65eSDJnvYofUWiQG-QZ9Z8RY3QHYr0yg,761 +mypy/typeshed/stdlib/encodings/utf_32_be.pyi,sha256=RF2NNlVWIYAzZ4kd97J5GaSYFm0uwiZeWMDdaWbXl0U,1004 +mypy/typeshed/stdlib/encodings/utf_32_le.pyi,sha256=wDx-GPbmCG3z_8VLu4Bm4VIcgBXXcxrmkyDt9-QW_0Y,1004 +mypy/typeshed/stdlib/encodings/utf_7.pyi,sha256=P-9LUl4xTXFeZofgd3n7OeR7euVOs5nM-zmMD_MJhf0,988 +mypy/typeshed/stdlib/encodings/utf_8.pyi,sha256=uENG0zdTfNG6D-Hwpjmtpsn937wGk9LcaXB05dqgKY4,988 +mypy/typeshed/stdlib/encodings/utf_8_sig.pyi,sha256=CAvKrplGLrXKmpdEW4-PjihiA5UICRtcD8YaJX5dhiM,1059 +mypy/typeshed/stdlib/encodings/uu_codec.pyi,sha256=Se3B9axmM6vAb3QORy3eL3ZXR9yrW96-3PhugTE0Ww0,1148 +mypy/typeshed/stdlib/encodings/zlib_codec.pyi,sha256=qVKqhqMfl5_AOj2gr6lfLi_KwOA3kotTpygyaGL6BZk,1101 +mypy/typeshed/stdlib/ensurepip/__init__.pyi,sha256=8tmoDM1Cy7ojGznNaYzp_-zzoTYP_FunKhPvKpsVU4I,264 +mypy/typeshed/stdlib/enum.pyi,sha256=VxVsOElEUiuPtAw-imDgyZGh2mUulzoK_Utc2FDOfTE,13383 +mypy/typeshed/stdlib/errno.pyi,sha256=9mBovztCB7JiBa_Vnqd8IcLEIY1SGQkoiXb0Ua6of_E,5522 +mypy/typeshed/stdlib/faulthandler.pyi,sha256=VNjMuy-w5imAEti29kIizJ9XUuulZS-70qrJ-WJUus4,899 +mypy/typeshed/stdlib/fcntl.pyi,sha256=_qHO3krIV030nllkmIUy9DZwPtus6ETGJ3XGXl02FUQ,5495 +mypy/typeshed/stdlib/filecmp.pyi,sha256=_8kalXH1bxz2bhbKjN37Axjbd-4VZ34Id3uW80IwHIE,2237 +mypy/typeshed/stdlib/fileinput.pyi,sha256=KcfS2Kniq5qq4ll0SDnrxVJX8G6kw4Cy1JwoqBmBkCk,7106 +mypy/typeshed/stdlib/fnmatch.pyi,sha256=a4FS6DccPaFufmc4RaWyx6_YUwJn5EY1Y1dAWiDeN6Q,525 +mypy/typeshed/stdlib/formatter.pyi,sha256=PoCFa7jJ7efz-ZO-IJU73MK_O9t7mjbYwjxBaSppqpU,3711 +mypy/typeshed/stdlib/fractions.pyi,sha256=Ittk8E5Yt-PTbrhkgsoyQkJ9x2JvUo2E0qE7ja0UwsU,5879 +mypy/typeshed/stdlib/ftplib.pyi,sha256=YYkUMUYQdMFZC7RlrDXKnAW3aaaZ3-AsPLFO9JD7WCg,5722 +mypy/typeshed/stdlib/functools.pyi,sha256=ExDYzZ1U9ILiJqtM0gZnJVgNONyVmK2OQHjK0L_YP-I,9859 +mypy/typeshed/stdlib/gc.pyi,sha256=Fvm1rZCpvIQevxa98rYoyYw4Cy6f0oFJJJ93ZE3sFAg,1157 +mypy/typeshed/stdlib/genericpath.pyi,sha256=z8Eq4qv3HvhYTk9WA69SzQ6n8-7eq59Kd0ROXfLkpHk,2384 +mypy/typeshed/stdlib/getopt.pyi,sha256=Gn-k7sstt-bKMRdLzdBORwZuWF12zbz53R4Lyp26NUk,909 +mypy/typeshed/stdlib/getpass.pyi,sha256=ftJHHXPw2nli4yEyEIaavmo2LKxaeuKthi4n0g1qS24,401 +mypy/typeshed/stdlib/gettext.pyi,sha256=z0oFjNUalU5YRjnAnPETfONgWcuMdgiB5eoBy2sk3_s,7574 +mypy/typeshed/stdlib/glob.pyi,sha256=nhNUCpB0XuONiJ7sX9hsRVPbcwo033QG5Dj7w3XR7bg,2201 +mypy/typeshed/stdlib/graphlib.pyi,sha256=3loMDkMk4j-vtp5dGRaOa_RNqyM3FUZCJhTJIyrplzE,917 +mypy/typeshed/stdlib/grp.pyi,sha256=2hJQL4kCKhQ-QBAa87oM83ldvW4WaOkWTlySGzB9VGg,702 +mypy/typeshed/stdlib/gzip.pyi,sha256=nRks2mwqCa2sb8Rw-hSzv4GGpd-z3LfLtGu-qyfamXk,5567 +mypy/typeshed/stdlib/hashlib.pyi,sha256=NYUM3R0PYFxdurLDOQhhn5GJiQ692MIr-PpwOmlzCp4,2207 +mypy/typeshed/stdlib/heapq.pyi,sha256=eynHYl_fbi5Xo-fbV5ON60Z4AwJLycSFosGxfTZf7ko,772 +mypy/typeshed/stdlib/hmac.pyi,sha256=40bupGra_ng8jZ3ACrLf6chzc-vdo9YVWDBPddosgy0,1258 +mypy/typeshed/stdlib/html/__init__.pyi,sha256=TKNt2K9D-oAvCTmt9_EtgRndcpb--8rawxYFMPHTSC0,157 +mypy/typeshed/stdlib/html/entities.pyi,sha256=RC8NoJ_AMu5IUWVuSXkGvKD5O-6ZNShjHOL0NQgXNpo,236 +mypy/typeshed/stdlib/html/parser.pyi,sha256=WuDpXERSFK3XaJaDI4cIxkFl9Iudvifpm-JpBL2XbyU,2098 +mypy/typeshed/stdlib/http/__init__.pyi,sha256=QP08bjXK5harBEh319rMNwQmB3WrtFYMvSNPGs3CH0w,3030 +mypy/typeshed/stdlib/http/client.pyi,sha256=KL3L_aTSmCOFa8ENSX99TZrZXoxmcyxF9fDVSTflyhY,8713 +mypy/typeshed/stdlib/http/cookiejar.pyi,sha256=K1OKZM_u4Tf-NlITqc6DkMzi63EgJeLyyIrQ6ZEQI1w,6667 +mypy/typeshed/stdlib/http/cookies.pyi,sha256=WrAEKmJJXRbqS6YpeSrieVO_oW7iUbkZSMTu5KhBYjQ,2229 +mypy/typeshed/stdlib/http/server.pyi,sha256=8aCsv0TBG8dfl_9vqf2BdcKDiVYuU2j9d_DRGhLNJ2A,5726 +mypy/typeshed/stdlib/imaplib.pyi,sha256=jypgDuDQY43YDfflARRgPfurWnEen6uLd11TR5j_o5o,8307 +mypy/typeshed/stdlib/imghdr.pyi,sha256=syq6jKngvEHgcopZE0xDIZRok2gbjdCW4mT6DBPDNtI,541 +mypy/typeshed/stdlib/imp.pyi,sha256=EZyon6q6zcsDFy2qM0f5RQqyIGAnQ598mt85lWiTo54,2484 +mypy/typeshed/stdlib/importlib/__init__.pyi,sha256=s5reSVWSp1cZl-U2HSPsg5QXbM1zpY1IcHnnQsrWV5U,724 +mypy/typeshed/stdlib/importlib/_abc.pyi,sha256=CZnLlR8x_ckCsQ4W9qGS-iIv1CiYoOYma6n8aCVTBTM,858 +mypy/typeshed/stdlib/importlib/_bootstrap.pyi,sha256=qdoz8OV6L4bWxFlroAznM-KSVf0bYNCQeYTz-Uk1cUU,129 +mypy/typeshed/stdlib/importlib/_bootstrap_external.pyi,sha256=pfdzy0vceWdL5QBZyMak6yLi9ULR0xR3PgGQbO6M2BI,117 +mypy/typeshed/stdlib/importlib/abc.pyi,sha256=0THKS1Fkt8M1WFd-y-xCJrlK8Hjs_88wInME7eEn-TU,8119 +mypy/typeshed/stdlib/importlib/machinery.pyi,sha256=UsDZB7rAhKUwQw3ZRWHisxYl9VT6sI764eEIVCtzQ_Q,1503 +mypy/typeshed/stdlib/importlib/metadata/__init__.pyi,sha256=rRYAJosHlfIW29h3hvm3NiDXxRi-QwGJES2chNlGbns,10488 +mypy/typeshed/stdlib/importlib/metadata/_meta.pyi,sha256=dtApBQ2RiMU-m2Y1B_7yLfMRlLHEXVD1OACZdPKwGVw,2552 +mypy/typeshed/stdlib/importlib/metadata/diagnose.pyi,sha256=sf4qsMlUFHtdxkUxCQbo-hL0app08cWTq9c-z0HaHy4,59 +mypy/typeshed/stdlib/importlib/readers.pyi,sha256=n-H-gHx8XAaX-q08_izCv7e6dgYdHB0n1QG7_BUGiX8,2729 +mypy/typeshed/stdlib/importlib/resources/__init__.pyi,sha256=juDRPr265zenEXFKtpTHr5RN2IrG7-Dvki77giGYjUE,2771 +mypy/typeshed/stdlib/importlib/resources/_common.pyi,sha256=5NPtrEBMicBxF4ozU69dVj7AtARLyl8Z1zsWSmkBlzU,1600 +mypy/typeshed/stdlib/importlib/resources/_functional.pyi,sha256=2WKLITIOtf0yZkrvsr9BcFB-Rn458TqFhmH7a_tUqzk,1597 +mypy/typeshed/stdlib/importlib/resources/abc.pyi,sha256=ZBS8iFCLoGB4YnqZnGxA7BjZ0U8Si0zqoxnJ5T-F6Oc,2137 +mypy/typeshed/stdlib/importlib/resources/readers.pyi,sha256=L9ISdjyiVx8ppnP2bTSjbdd_dzvr1lY3_aRn6ZfitsM,398 +mypy/typeshed/stdlib/importlib/resources/simple.pyi,sha256=QNzW9FV5ELG2KD3lk6-YrhAyKnzzCY7MiUN1ENqo18I,2200 +mypy/typeshed/stdlib/importlib/simple.pyi,sha256=Px9D1mMPoXrh__Iy1JacqIN2AEUSTLHrV2fVGRRkTZI,354 +mypy/typeshed/stdlib/importlib/util.pyi,sha256=ufNdpvJwZjkC2rnGtIcvcCnCJB02lLj7rFpBhAzYxjE,2832 +mypy/typeshed/stdlib/inspect.pyi,sha256=jzl75GS8bcgUBgbGPVtd7TGwblqy1dbX6FZBIO__O5U,23977 +mypy/typeshed/stdlib/io.pyi,sha256=VVN4o6zv2BYaAAO9tHPKmAMx_-euN2qG6E8A5n02rzo,1944 +mypy/typeshed/stdlib/ipaddress.pyi,sha256=eSUMLrAYTxbHRZpdmTtZoJFIAoYC-aNxgNBLAD380gA,8480 +mypy/typeshed/stdlib/itertools.pyi,sha256=W9Gy2gUshO2uRUIrK--QFf_VWJGnyBmux05FhxOuBWw,13160 +mypy/typeshed/stdlib/json/__init__.pyi,sha256=XhcpH-7ynXInaWJyf2TG0DKKt3fC_1Owvn2s6E6aefY,2061 +mypy/typeshed/stdlib/json/decoder.pyi,sha256=XdU0nhYShlWZbSXpxGdsgurtM3S_l0C9mDYCV9Tfaik,1117 +mypy/typeshed/stdlib/json/encoder.pyi,sha256=f9FjO4Rjf_lLqLjPkusYRgCdrtG8hq7myq7p6_c3Bec,1323 +mypy/typeshed/stdlib/json/scanner.pyi,sha256=4UhE-14W8W3ciJpWohjD7YmE8NJb5W1z-csoM_de_oY,171 +mypy/typeshed/stdlib/json/tool.pyi,sha256=d4f22QGwpb1ZtDk-1Sn72ftvo4incC5E2JAikmjzfJI,24 +mypy/typeshed/stdlib/keyword.pyi,sha256=eCmwLAJJZkNZG5hADH8mPsiQVmSLO8FGKYctaFcmabo,434 +mypy/typeshed/stdlib/lib2to3/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypy/typeshed/stdlib/lib2to3/btm_matcher.pyi,sha256=zWMSDahNavhi40hkU1rK-3lPsSgvlsDJtwhQfqAlmSU,860 +mypy/typeshed/stdlib/lib2to3/fixer_base.pyi,sha256=NacQW1e6fooBSu5crrweMC0KKcBhXDQmsQbe11U3cj0,1692 +mypy/typeshed/stdlib/lib2to3/fixes/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypy/typeshed/stdlib/lib2to3/fixes/fix_apply.pyi,sha256=xMqbvuWy1ujOd9odCGJi3UpeSLmlYk6jNK9L5jydnAc,215 +mypy/typeshed/stdlib/lib2to3/fixes/fix_asserts.pyi,sha256=UI605ggRRcCzfho7-zYV7NelkKfOxP4pG9518pIgQJM,259 +mypy/typeshed/stdlib/lib2to3/fixes/fix_basestring.pyi,sha256=lY1h20fQ_HpI-54CXXjhRpazbh-I8PMasjxPau1iJjc,240 +mypy/typeshed/stdlib/lib2to3/fixes/fix_buffer.pyi,sha256=RysLZN7QX0ouBHx4bD5sRCTtV_p6GQlF-PKTFpePqHo,224 +mypy/typeshed/stdlib/lib2to3/fixes/fix_dict.pyi,sha256=qcbZRE3X7cFCwAQ-BH_0Nkxk0wEvS04tPUOfJbcae4c,424 +mypy/typeshed/stdlib/lib2to3/fixes/fix_except.pyi,sha256=Df6KW8jrbtYWU_kWAqlY5FRLc8drkCgE8pmpp1S14Lo,415 +mypy/typeshed/stdlib/lib2to3/fixes/fix_exec.pyi,sha256=2qmp1Dmizd6ZgyeC8J5HvjLXpYBYT5oBBGCD9C7idWg,214 +mypy/typeshed/stdlib/lib2to3/fixes/fix_execfile.pyi,sha256=Ms7SLIU2e4kOB_ozEAs7Q5Oo86ka1k214dr8Iw_dQnc,218 +mypy/typeshed/stdlib/lib2to3/fixes/fix_exitfunc.pyi,sha256=gtX9wmo7ARiCUZdcYTFfmNsnbOXTudXM6yUzXf0Hui8,445 +mypy/typeshed/stdlib/lib2to3/fixes/fix_filter.pyi,sha256=J4qoUbRK8teogKnr91NZA17UlcWexU8YhcM8OCpYG60,280 +mypy/typeshed/stdlib/lib2to3/fixes/fix_funcattrs.pyi,sha256=2gPueWngu_FThhvFNXCbHzJzJfdez0m88wdfu_FBrCg,227 +mypy/typeshed/stdlib/lib2to3/fixes/fix_future.pyi,sha256=D2C_mtrnL2BZofMuWARTRX21Is6U1oKTUSy5T7-98d0,216 +mypy/typeshed/stdlib/lib2to3/fixes/fix_getcwdu.pyi,sha256=tredl7rFWBAEJs2ZPtiEd5jD0FP0Hyr7sQkwlms554A,225 +mypy/typeshed/stdlib/lib2to3/fixes/fix_has_key.pyi,sha256=oCI0xQcAIh4X8qW6ZF952bMpHCgp3lOJVFZLVwdTmDA,216 +mypy/typeshed/stdlib/lib2to3/fixes/fix_idioms.pyi,sha256=z73__dxQnDnXOmtl97XsDj0OcwwXVomz161RiJ-XmTY,459 +mypy/typeshed/stdlib/lib2to3/fixes/fix_import.pyi,sha256=atLYbUa9wDEYsm8ImyHmmlfT6WVdiddPK0FUr0jtS8o,507 +mypy/typeshed/stdlib/lib2to3/fixes/fix_imports.pyi,sha256=cBkDJycNw0JcPQ3U63ODaqb2LNVW3MWbvxFD1v7SDmg,653 +mypy/typeshed/stdlib/lib2to3/fixes/fix_imports2.pyi,sha256=QDdVrRYdDeFHQ7d5qar8TK7derNWpJdMrwfUvMm2NTI,150 +mypy/typeshed/stdlib/lib2to3/fixes/fix_input.pyi,sha256=QYFs7CJ4jZZ-JwEVl1tjfFdPZsyACH9VfYoY2GWkW-I,269 +mypy/typeshed/stdlib/lib2to3/fixes/fix_intern.pyi,sha256=kmI0_JFByQJiRnumNO6lHjHpVZqdiw0Qs-vVFoix2nk,252 +mypy/typeshed/stdlib/lib2to3/fixes/fix_isinstance.pyi,sha256=LCDztGYxR0NBoLaNfU6kBxBUUG_VInMqccPxIQ3LOA8,228 +mypy/typeshed/stdlib/lib2to3/fixes/fix_itertools.pyi,sha256=LMCpC8O9x9EmdL2QLy5QUHnCG3nMhx0hrW8KMsNgx60,245 +mypy/typeshed/stdlib/lib2to3/fixes/fix_itertools_imports.pyi,sha256=RSOaZS-pyByKNWYwmD91NHE51PCNZp8BkoR7V8N4K3k,230 +mypy/typeshed/stdlib/lib2to3/fixes/fix_long.pyi,sha256=DFYBWAAkdgf09ftU0Hkdv7X3KPiJgHVf6URHpnyCyCQ,240 +mypy/typeshed/stdlib/lib2to3/fixes/fix_map.pyi,sha256=LPR5jxzCxakicFoFqCKGF1iM4wQbSM1ZfO-gl2OiwRc,274 +mypy/typeshed/stdlib/lib2to3/fixes/fix_metaclass.pyi,sha256=vr0_fJbfJEDXgt0RGMprEsR3Jwq934IHVOYitO-Yvbk,587 +mypy/typeshed/stdlib/lib2to3/fixes/fix_methodattrs.pyi,sha256=rCRrKsYEcXFsMLayDRnWG_X4tE7vHqxMGgOAbYM26pw,264 +mypy/typeshed/stdlib/lib2to3/fixes/fix_ne.pyi,sha256=za5_699UQ5u7gExpW0rFQWLoz0gBWiaL2bdM-Uk8XkE,217 +mypy/typeshed/stdlib/lib2to3/fixes/fix_next.pyi,sha256=4OdBIkvhFM1Ek3QWe9ACAUW2aqBE48SLKiY69RIG9YA,518 +mypy/typeshed/stdlib/lib2to3/fixes/fix_nonzero.pyi,sha256=BJ-vs3pK9DLUFfA-BjFExiPExmcDwyoNofwrY0ZPu4I,225 +mypy/typeshed/stdlib/lib2to3/fixes/fix_numliterals.pyi,sha256=DA_3aqN1HEiG2MaxlQYmmwGFX3UkxGoTlq2ipJskiTk,226 +mypy/typeshed/stdlib/lib2to3/fixes/fix_operator.pyi,sha256=Qph9PS3cZMsJBpxSnyqvdLXm4Wz6MRoM9OhIChvkBBw,312 +mypy/typeshed/stdlib/lib2to3/fixes/fix_paren.pyi,sha256=LHA6O3-Pc0iVRgFxTnk5SwdrhzD9ibtsb2xYVjxN5zw,223 +mypy/typeshed/stdlib/lib2to3/fixes/fix_print.pyi,sha256=PCnNJjkjn32OyMZDGt9I-tyuNF2rDeulcCHPaCdnN1Y,334 +mypy/typeshed/stdlib/lib2to3/fixes/fix_raise.pyi,sha256=kWLdHKgrCjNgWxy8_S8t1GLE_-RmCxzdV6EUDPGFRLA,215 +mypy/typeshed/stdlib/lib2to3/fixes/fix_raw_input.pyi,sha256=6JYhq6treAVEVMIJWogmw7a_7b7OQnGQCTdSTPaUhJM,226 +mypy/typeshed/stdlib/lib2to3/fixes/fix_reduce.pyi,sha256=gx_ts6bc5MOHO2RI_uK5WsH-ibfTHoqewnOq6bMCttI,264 +mypy/typeshed/stdlib/lib2to3/fixes/fix_reload.pyi,sha256=jDt5tEFPpe5bpW2_xX_K7g2mXZHBgD5daf1YLq1xrgk,252 +mypy/typeshed/stdlib/lib2to3/fixes/fix_renames.pyi,sha256=CE1ZA-tAamCMq6Gft9Kl9z15jtuAr6OBywN3fo5fAJ8,507 +mypy/typeshed/stdlib/lib2to3/fixes/fix_repr.pyi,sha256=91OLJrm0eK1k1FzpE4563_J_RnHfERocHeSrj6DE8qw,214 +mypy/typeshed/stdlib/lib2to3/fixes/fix_set_literal.pyi,sha256=b-wHK26YV2c2R5cBOhIU63gf25JYqXbycta1zHqyrik,224 +mypy/typeshed/stdlib/lib2to3/fixes/fix_standarderror.pyi,sha256=RQP7prg227V7Lm5grhrzD7MHg3ApogLcb5SlgyR7DzI,223 +mypy/typeshed/stdlib/lib2to3/fixes/fix_sys_exc.pyi,sha256=izheZTCqKPd-Fl2aZWzhhcFOtdMIY4F_lVqtOmcdtng,250 +mypy/typeshed/stdlib/lib2to3/fixes/fix_throw.pyi,sha256=Ayft1UP88mwxNDsP3KXuyEUfacJ9myhP07vP3lXHL28,223 +mypy/typeshed/stdlib/lib2to3/fixes/fix_tuple_params.pyi,sha256=YIZEizRdrqqcuJwK6XW87OUSUQHo9aSMdkGMJls1nHk,451 +mypy/typeshed/stdlib/lib2to3/fixes/fix_types.pyi,sha256=p3quhtHggwMg-KDc2dmO8c9oTT6SOZLTG1feVPqXuVw,215 +mypy/typeshed/stdlib/lib2to3/fixes/fix_unicode.pyi,sha256=AVUm0QE0OxwwXgZLWITXQBHrp3WFQGyQo1kPN8tF6io,369 +mypy/typeshed/stdlib/lib2to3/fixes/fix_urllib.pyi,sha256=RMEF00r8pC3wq6PgHKTg52_Iwo8azKPRs5oGKmKAJAA,556 +mypy/typeshed/stdlib/lib2to3/fixes/fix_ws_comma.pyi,sha256=7WECM-TvzSNknzhW7fy7Q1RAAwLNsncFNP_ujjHKPZA,304 +mypy/typeshed/stdlib/lib2to3/fixes/fix_xrange.pyi,sha256=IE05vQKlmMkk9tPbvH-I3FOfSNd-XP1XLvdBX8K0Rb8,726 +mypy/typeshed/stdlib/lib2to3/fixes/fix_xreadlines.pyi,sha256=aXv8cGy3hLrtZerMtHLAiNQaSAQQGke3C6WJR-8A_Ok,228 +mypy/typeshed/stdlib/lib2to3/fixes/fix_zip.pyi,sha256=Qw4i-Jn3A2LoG-jSpHJ8BoUSHk38iegK1FyxtUqngD8,274 +mypy/typeshed/stdlib/lib2to3/main.pyi,sha256=MgUWnovV8WODrjmnR55Xgej8tjBSg3p9gOK1GRbVkJs,1532 +mypy/typeshed/stdlib/lib2to3/pgen2/__init__.pyi,sha256=J1r7O6-RC55RX9XuIU4QcT8sm-7ySY0eowiibNJz0kE,287 +mypy/typeshed/stdlib/lib2to3/pgen2/driver.pyi,sha256=PNvewWFDcgWCmmEwYEKtBrKrHkukMZqkryr6WauQZ1w,1067 +mypy/typeshed/stdlib/lib2to3/pgen2/grammar.pyi,sha256=dG17yFsbtkiDsvKCyWRZvc0zmaCLF83m_naTZzUziRU,682 +mypy/typeshed/stdlib/lib2to3/pgen2/literals.pyi,sha256=TtrXnXJiXUTSBXIP_3hJUoKM2h_rSNg5aTqQcL5tZIc,151 +mypy/typeshed/stdlib/lib2to3/pgen2/parse.pyi,sha256=dSjInOriPq4H6YhXCvsW0lUeCZKMV81mYmYc9ZbEh4Y,1133 +mypy/typeshed/stdlib/lib2to3/pgen2/pgen.pyi,sha256=suHtbvS7x64S7z70EMaFdw-ZJgu8_w7t0WwRvq1AzBo,2273 +mypy/typeshed/stdlib/lib2to3/pgen2/token.pyi,sha256=9kLlQlmffvLgVeS7cQC-OGDuzwKmP92YOOfqmaIDRUM,1418 +mypy/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi,sha256=mdjbHoIgTIFWGaGKpky1FqxpY6Ugih514SvAlNUT-8k,1972 +mypy/typeshed/stdlib/lib2to3/pygram.pyi,sha256=cMDHpJNWgsy0aJVrG2e2uBDq9DbXd30htXQBMjAO_pA,2253 +mypy/typeshed/stdlib/lib2to3/pytree.pyi,sha256=RowzuYJKhSUKX32E6Vrf_SHu9HS8ezr97-vQ-x2MFWY,4185 +mypy/typeshed/stdlib/lib2to3/refactor.pyi,sha256=vhGguYDE5gxdbUxG_LKxVPZ9KhkbryDjRT_hK7FSZ1U,3946 +mypy/typeshed/stdlib/linecache.pyi,sha256=HKDzUJPKyFXBXceGeeoYpMhxXIYapQBj9D2DyQrJ2R8,852 +mypy/typeshed/stdlib/locale.pyi,sha256=fQYiQf-xr8d02FmoEbMECafNv43Hhu0wJK9fZaT9m8s,4934 +mypy/typeshed/stdlib/logging/__init__.pyi,sha256=U45BxhJpzCxQRv6BUTtgYVF-6w7Ugpg39rrMzhwweG0,20445 +mypy/typeshed/stdlib/logging/config.pyi,sha256=sI5ap7DF4bpk1JItw8jsXGRuYBTj0BH5w11v-7M03io,6257 +mypy/typeshed/stdlib/logging/handlers.pyi,sha256=-tE5aQIqfaG3oNqT8O-bymyKzzMUlI8DFPs8QNueB8k,9192 +mypy/typeshed/stdlib/lzma.pyi,sha256=jW_lZJbwFU3LyHV4jqCm32diG1Qk1G0-2cDFHgJVJdk,4926 +mypy/typeshed/stdlib/mailbox.pyi,sha256=A59sq_iCF1kyKxZhioUeusZkVLfYfTyBnifwvinYGP8,10779 +mypy/typeshed/stdlib/mailcap.pyi,sha256=h3wCqy9SD2DA8-aB5k7vW17ShyhlL-AZV6iYKpRTyP4,388 +mypy/typeshed/stdlib/marshal.pyi,sha256=LqlTQDvNSGJGSKNjSQ832j-3opQFkcaeMVuy2v4RWM0,1605 +mypy/typeshed/stdlib/math.pyi,sha256=pBs28qdJCrqf9GbUFnZpWE3SRQDjnnobRt1z3fXNMS8,6156 +mypy/typeshed/stdlib/mimetypes.pyi,sha256=Eu7lIAaV-NyKth1YT6xkxucFsaMLzih0jQ9GOH-p0XA,2110 +mypy/typeshed/stdlib/mmap.pyi,sha256=TcSr_VYfd1ei9HSRfYmF8AmlM61dLYkJo0q7BG_oUTc,5504 +mypy/typeshed/stdlib/modulefinder.pyi,sha256=IbgQdklMWj-I-DZL4ceI5KzniZ1cNuwdBPLE5ZnD12k,3399 +mypy/typeshed/stdlib/msilib/__init__.pyi,sha256=AMuAaoRE_erzIsqazBOn53UVl2Ss8wIoFX-zgr_YP2Q,5854 +mypy/typeshed/stdlib/msilib/schema.pyi,sha256=2V7cxsSreXKr3UnBQVIgpUEVjXS1gbBGUXsggjFUltM,2173 +mypy/typeshed/stdlib/msilib/sequence.pyi,sha256=6MlkmUtknHyQU535LP_PPXGY5_QCxJo910--XfE0KwM,429 +mypy/typeshed/stdlib/msilib/text.pyi,sha256=1I58yqiApA7hjFdzhrhsw6D8WlVfH8JJtB1b4NkMNL8,216 +mypy/typeshed/stdlib/msvcrt.pyi,sha256=tWT8Hx0Ogy2Fwv_Gt3dVjub0ibtMcXLYoiGPLGiI4MQ,1196 +mypy/typeshed/stdlib/multiprocessing/__init__.pyi,sha256=KafkEHitV2NmXHJS956RawcoMZhXV6_mZ-io0ZpMSv4,3132 +mypy/typeshed/stdlib/multiprocessing/connection.pyi,sha256=QHxUSmeTedOmVlXk_Nn9rvls0rpyZL3XMSujfYuCPtU,3723 +mypy/typeshed/stdlib/multiprocessing/context.pyi,sha256=FCFuO3lfuMSzOeYuBJZJ3K1-PrS9yN5GrPM59i2isWc,8578 +mypy/typeshed/stdlib/multiprocessing/dummy/__init__.pyi,sha256=8Ra_8E5DWqZD_DtarXt3Z5R1kmAIsRJpHEUGJC7aNOc,1935 +mypy/typeshed/stdlib/multiprocessing/dummy/connection.pyi,sha256=WNsr78HeHz67VG14qLrc6xUkFNQkKt18jw95amhFBQg,1282 +mypy/typeshed/stdlib/multiprocessing/forkserver.pyi,sha256=HBYVfnK7F6NSVD6_8FdozMQUW6LbCn5PiDFxRw5pZfU,1424 +mypy/typeshed/stdlib/multiprocessing/heap.pyi,sha256=HTI4VlqHvZBggg775D8DsGkkDCTLsda4zbRqHvGCeL0,1084 +mypy/typeshed/stdlib/multiprocessing/managers.pyi,sha256=K2gcAHXUpnGZhWRnAB7kLYRTz8I_hmfLQFX85hVC4Xc,15832 +mypy/typeshed/stdlib/multiprocessing/pool.pyi,sha256=szTwi4UNMQZ37pSC3MMQuHTDs4MLs8AjEOGM5r9wDG0,3938 +mypy/typeshed/stdlib/multiprocessing/popen_fork.pyi,sha256=RHaJE_47OeQykmnXjxeT52Q7FlxznFVaknStQxZJlP8,810 +mypy/typeshed/stdlib/multiprocessing/popen_forkserver.pyi,sha256=-f851cHQEbM_L9oXaw6PrUHI6bKAVasRR17OirOSd60,353 +mypy/typeshed/stdlib/multiprocessing/popen_spawn_posix.pyi,sha256=kuKZmJxw4id8R5dTTp-B7E-5qDWTSexAOkCqStEMoKo,524 +mypy/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi,sha256=ZyXdPF2y4wvcgveiWOouv9Y9P9gnhn7D4XXpD9WF46Q,773 +mypy/typeshed/stdlib/multiprocessing/process.pyi,sha256=ys5dydqBBOoSL73rB51ywdRzzQArEhLd087HDtksgK4,1177 +mypy/typeshed/stdlib/multiprocessing/queues.pyi,sha256=e1ei3HzCZHjvlH307-6oBL5LOJX2mLXJCRsW6--t0Mc,1375 +mypy/typeshed/stdlib/multiprocessing/reduction.pyi,sha256=2br3XPuglTD-730a48n1VLgPA43Qx4IOVEwJGBpFl50,3127 +mypy/typeshed/stdlib/multiprocessing/resource_sharer.pyi,sha256=d9OjiE5L4aC3-u2-WC7csArCtkqs_IMOhhOVMEi6UjY,420 +mypy/typeshed/stdlib/multiprocessing/resource_tracker.pyi,sha256=zGYELoHtbABO37k1BWl3_LgSXHnFiBnIUj1fciE8Jb8,695 +mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi,sha256=JRimnBHWs_TwG96aTh2FnuVIGc9gtUjieRe4ZYJeFSA,1500 +mypy/typeshed/stdlib/multiprocessing/sharedctypes.pyi,sha256=5YVY233PCxvwJaKgWFEiE1D_oyP9HiegMPQG800AUss,5031 +mypy/typeshed/stdlib/multiprocessing/spawn.pyi,sha256=oy8FZLtca2ZmZ1OXzvU-kFGSLioeCghBO1iaqLwfy8c,904 +mypy/typeshed/stdlib/multiprocessing/synchronize.pyi,sha256=24f09k_6rwVgeiNA1cJvsCGphp7QNPXPtg2o9rNs2YU,2440 +mypy/typeshed/stdlib/multiprocessing/util.pyi,sha256=1H7c1aqVS6BpLXB7MCmHxUp53seiuD3O4McMk2Xbq5U,3090 +mypy/typeshed/stdlib/netrc.pyi,sha256=tvfrFw9uqNzt6Xt_fJVlbF2uXIoJy7YXEAOzveB8AEo,745 +mypy/typeshed/stdlib/nis.pyi,sha256=jnKh2Xj3mroOTpZpm-C7BYPVe5M18UAIVeh66AFGyw0,293 +mypy/typeshed/stdlib/nntplib.pyi,sha256=FYFknQ33uku7s3vEFrIPxHlEVA0KY82-a7L66EgPCK0,4279 +mypy/typeshed/stdlib/nt.pyi,sha256=FNIRcLSoTrcRDA2NIYrHiACEr3r3WgkyDwNhSujOlsc,3407 +mypy/typeshed/stdlib/ntpath.pyi,sha256=c_WZ6Pg1gmoyTxjZV5X1HasYC7znrU_9WY5_hYDg5N4,3049 +mypy/typeshed/stdlib/nturl2path.pyi,sha256=MeWnFTlqhvq6m2QsnYDaX_oGiUga6swI7bn_RjP6epE,412 +mypy/typeshed/stdlib/numbers.pyi,sha256=Jhh3FjInAcY76uLHr0aS4zbHOVyydjDR0sGvD8ddnK0,7546 +mypy/typeshed/stdlib/opcode.pyi,sha256=R_U_vPgmY_k42K6-2-WsZ7SKTJZ9C7vgBiTVAOZVufM,1119 +mypy/typeshed/stdlib/operator.pyi,sha256=7l31EfbCCMDMwLWVNn7M7Jgt2WYX2IzenDxtAnkgjv8,4927 +mypy/typeshed/stdlib/optparse.pyi,sha256=Eele1KUuIcC6Zv9c6SL4wD68JP-orWueZXtgFT127Nc,13186 +mypy/typeshed/stdlib/os/__init__.pyi,sha256=l661IJ8_nxsluK8iznbz94JuD9cn2bRQlZA-mJUjfUo,53922 +mypy/typeshed/stdlib/os/path.pyi,sha256=G76tJbvlG1_kzFd8gnCqS4Mht3gPzlC1ihIBqzurxDM,186 +mypy/typeshed/stdlib/ossaudiodev.pyi,sha256=mjHFd2y8BrdeToNwDIUX3YJtRHBgs9vaGo32sQP9uPI,4409 +mypy/typeshed/stdlib/parser.pyi,sha256=qESTvpCIEl06MJlFOeGt6pVTgqDvrwINj3Haj_g26DE,1100 +mypy/typeshed/stdlib/pathlib/__init__.pyi,sha256=SMtuqq0XLr215foBdPXMBhm4NFF2b4pwlxdxO9G5-VA,14096 +mypy/typeshed/stdlib/pathlib/types.pyi,sha256=zNj89KIAok7fdN8Lvm8ujgUnRKS5nZFJsIiSon5LEIY,333 +mypy/typeshed/stdlib/pdb.pyi,sha256=dQSEVcZHfYtZ98gfKhLBl5PAPVO2AOyZM-73uh2I1Qw,10630 +mypy/typeshed/stdlib/pickle.pyi,sha256=bUo6oXSAhLok_3nkRBw9mPQEjWMy0JW6rz1vqrCDfZc,5221 +mypy/typeshed/stdlib/pickletools.pyi,sha256=Avlw2PygqsaMj4zcx-b2jWXzMFFbz96NC82jefeYsYM,4232 +mypy/typeshed/stdlib/pipes.pyi,sha256=FvE1GTA5YU-JHBIO-mCAIfrAARL7g2Ck0HmgJ765gNc,502 +mypy/typeshed/stdlib/pkgutil.pyi,sha256=ApsHouaT21XnCdXiW4WVG_ck5eDv2vqt8RWS2zpnlGI,2591 +mypy/typeshed/stdlib/platform.pyi,sha256=-YjPVuiCJWNL8W4d1S6ceclr8vW-L04i--1--NqosLs,4109 +mypy/typeshed/stdlib/plistlib.pyi,sha256=YSN2FLch6L7BEJrNR-LpZPlNZQy6qBWBQYmwh-c7I4Y,2746 +mypy/typeshed/stdlib/poplib.pyi,sha256=VO_6eQxwmZKGzjP_l2gBZXEzkBQvnmkhWx3oVe9Allc,2497 +mypy/typeshed/stdlib/posix.pyi,sha256=gwKe-gvg6-QOeRCIIpMZmYhmpCybxGGeh54J6qS232I,14052 +mypy/typeshed/stdlib/posixpath.pyi,sha256=dJIklN8TxPNn_5Y-CSL6w5I3IEn5gQXGBXljc_CdhPs,4744 +mypy/typeshed/stdlib/pprint.pyi,sha256=dZlYga82u0YmsP_K_V6INIkqTdZhskEsFBp79l5Qrlk,4835 +mypy/typeshed/stdlib/profile.pyi,sha256=VENI6_XB1JcY18Kn3bY2Sm02efBuCFMG_beiLlpZOQY,1416 +mypy/typeshed/stdlib/pstats.pyi,sha256=sflUQEiROI_VVZtmYeTahkzcCU_MD-PpX6Qf8Q23kQk,3073 +mypy/typeshed/stdlib/pty.pyi,sha256=A0mqxGnMi5mqKwFeDbkRUC4bDbga0X9TovCDfbMLZt8,1072 +mypy/typeshed/stdlib/pwd.pyi,sha256=rXA9jXtUOJeQ5D06dv5C8twQxrOatqmQrlg1SZFfxUU,905 +mypy/typeshed/stdlib/py_compile.pyi,sha256=pRlpK44H98D9tnHGi5C0eDgOX68dk_82SizC7voWnH4,894 +mypy/typeshed/stdlib/pyclbr.pyi,sha256=xZ2POHrJZT7xe7eaueO6wxdgpFFkdf1BePdF0PuSbqc,2284 +mypy/typeshed/stdlib/pydoc.pyi,sha256=duT8xTK7AByDS3o28bOGk79djNYOwxWtlTYQ1AOmDkY,13928 +mypy/typeshed/stdlib/pydoc_data/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypy/typeshed/stdlib/pydoc_data/topics.pyi,sha256=gqCAY10yIjRcrCCE8hqoUVs5o5cE0NgOOft9X-wg3AE,56 +mypy/typeshed/stdlib/pyexpat/__init__.pyi,sha256=Imn8XvHkFsRH0FO7HRL195v58DZ70I7lwB_GIWcxuZM,3597 +mypy/typeshed/stdlib/pyexpat/errors.pyi,sha256=XA5Ik0oagDYSTmleaMQFSTqkcfMN_1xGwP19va6xeH8,2357 +mypy/typeshed/stdlib/pyexpat/model.pyi,sha256=WLYMeiykYnU6gpN35DItiUEjTPby-vu0DKAZ4afGefU,291 +mypy/typeshed/stdlib/queue.pyi,sha256=_Zq4o820Qh_giAIlnV7hnA9Ox1ws0dRjdGwWVFOUmFE,1910 +mypy/typeshed/stdlib/quopri.pyi,sha256=a-dyP2A0w_xQm3D2KXIZzDA99FPX15Kaq297GJTnK1o,669 +mypy/typeshed/stdlib/random.pyi,sha256=a6Z-7k0SrELimXkCeGf5nEe0764gQmke0mwnC-0tjPk,4990 +mypy/typeshed/stdlib/re.pyi,sha256=A3Er_MgsvnAX2T7YTLDGpIrc_bzIxp9P_cR79ThHWhM,12139 +mypy/typeshed/stdlib/readline.pyi,sha256=JoE2nbK4Fv-UIDbo7lgTp_38524hDx8kRRpL5Qhuo5M,1988 +mypy/typeshed/stdlib/reprlib.pyi,sha256=usKrUhRcCNeOVdYy3S6I8njRORXIQ9rXrGme00uGJd4,1986 +mypy/typeshed/stdlib/resource.pyi,sha256=OAlIxkLvLkdxOWk3nmG7kh0GPvPUPgU3HV8wkGx8Fkc,2972 +mypy/typeshed/stdlib/rlcompleter.pyi,sha256=FtTt0Z1sNrWz6EMCyowIRAq8tzAeTpee6rUVJ5b-Tsw,322 +mypy/typeshed/stdlib/runpy.pyi,sha256=hrHtuhkdU-vJb7E6trWXD-ITI33AOQT_HH5CEsURVdQ,811 +mypy/typeshed/stdlib/sched.pyi,sha256=aS9BsKWepU3NT4zTqocYU7uJvhlTjvCUqxfTNbaByPA,1477 +mypy/typeshed/stdlib/secrets.pyi,sha256=GTDHK_EMcCaMZ9h-8OploY5SQiAaqTDRbh3ROug0M4I,624 +mypy/typeshed/stdlib/select.pyi,sha256=p27dvLweW43Z1A_r4KPIVNmI6p35c_lxscixvBosQnI,5586 +mypy/typeshed/stdlib/selectors.pyi,sha256=2rN66ItZthqaO9SCb6Ix9ImpLfGkdn2-hYk9SexRGt4,2927 +mypy/typeshed/stdlib/shelve.pyi,sha256=iXAfiiZL63IYPfUdIS4g2pMfx6thq4K6NqQ2WfcY6ZE,2343 +mypy/typeshed/stdlib/shlex.pyi,sha256=CNq7RB3jlgSD8lpZVEb42Coh3pfeOpd6tjp3XCVXptQ,2191 +mypy/typeshed/stdlib/shutil.pyi,sha256=TjwVseRzVNWxQgerDoEBwtOWkr5F5nY5GMTPeyovNbU,8362 +mypy/typeshed/stdlib/signal.pyi,sha256=YDVZyjuYVKTZOK7Mvb1LcVyMIqkqDdfyGf_iRPtjED4,6094 +mypy/typeshed/stdlib/site.pyi,sha256=lDIaRFWoJkNeGfPKavcrMtvia4akAMWMR5BIDlDYvx0,1547 +mypy/typeshed/stdlib/smtpd.pyi,sha256=_DHsDMkG8iT4uDal27Ub0-kssEC73bncY-A8f_E_IEE,3082 +mypy/typeshed/stdlib/smtplib.pyi,sha256=fCK81zRuF3prdCVYrW4pxpoxOVULJNUUY2spDB49plo,6506 +mypy/typeshed/stdlib/sndhdr.pyi,sha256=4boTiWWf2o3VW6QhITP8JNEePP734AlxyMeU1cn74CM,353 +mypy/typeshed/stdlib/socket.pyi,sha256=ggtLkpFABhmdC6hptQjq_rHbmQtoXElV3tMHXsbvX2c,45289 +mypy/typeshed/stdlib/socketserver.pyi,sha256=gPN47U1Kh7UPFxuvK-you8aRwvHbtzgvMVnt9xBQM1A,6991 +mypy/typeshed/stdlib/spwd.pyi,sha256=hyZQp0XfNGpN05cq8qpIenyS2sUm6_H3odOvSyxacKo,1299 +mypy/typeshed/stdlib/sqlite3/__init__.pyi,sha256=MmXPD4P7zIjYdryMfhf5SIgztgpQXdWpCwI0PpmOUhg,21866 +mypy/typeshed/stdlib/sqlite3/dbapi2.pyi,sha256=5_auPrbYUGLd6BirR7xmDFUr_Ibdm7ZTAeZ3meU5KEQ,11438 +mypy/typeshed/stdlib/sqlite3/dump.pyi,sha256=kKrQ2CozgG8GoIXMDMNiMJz__B7tzZ0VQb2jzkH6p5g,90 +mypy/typeshed/stdlib/sre_compile.pyi,sha256=iZEu9IWFg3DwR32Ae8ysNio0sXnNFjs232h5LKQXFEw,346 +mypy/typeshed/stdlib/sre_constants.pyi,sha256=XTz8_VeEEAXJSnMdUviQINrLMOXclnaJbzKifN_zjuU,4781 +mypy/typeshed/stdlib/sre_parse.pyi,sha256=AsBSS835EPpGxhpKlo8Ce2Z_t_cwOB7CNwKmR494On0,3906 +mypy/typeshed/stdlib/ssl.pyi,sha256=iElshHxXHGQZlbABMwEcZr4utmAcnCO-rY40BoF18Fw,22605 +mypy/typeshed/stdlib/stat.pyi,sha256=YbUbmHZzGlieVnKK7uvAjRwv784LjinmOWsnmu05FJM,3354 +mypy/typeshed/stdlib/statistics.pyi,sha256=1eTpimBxGyoV6n0OQcpBfD1mtz3Rw_QwgNLXSWk1XqA,5724 +mypy/typeshed/stdlib/string/__init__.pyi,sha256=aRIqf-qZi4mc_N7u3HAALMWpXttjU48A6a0bcbPLuoY,3095 +mypy/typeshed/stdlib/string/templatelib.pyi,sha256=b2lwTPVujnkpNrmHIfwe1cz_NFhUJPpaM5UbLPWIAzs,1324 +mypy/typeshed/stdlib/stringprep.pyi,sha256=53l-7SuzjSdXnCCb5lHYSHxNWQNBXLQzbL8DX4usXYs,985 +mypy/typeshed/stdlib/struct.pyi,sha256=7xjDbX-Sh1C_E0rFZ-Z0DnwF6P27v088eMM03kL2R2g,155 +mypy/typeshed/stdlib/subprocess.pyi,sha256=-pTcYEBrcasV6XXlHzANEJmq3WeGdg96aSqQ0Ljxb5I,72992 +mypy/typeshed/stdlib/sunau.pyi,sha256=qTvLbam1UgmqH9Hr0mLgIFZ3jQqxSvBceIOq5QcnwMU,2991 +mypy/typeshed/stdlib/symbol.pyi,sha256=ADRkN_rhRV_foCYktb0NHR7jm7h_N4XOHo9VPWF1lFM,2144 +mypy/typeshed/stdlib/symtable.pyi,sha256=cisakBNhFUXqj4qVY5DPVHYhCQne9EXG0xvPFMZY0SI,3095 +mypy/typeshed/stdlib/sys/__init__.pyi,sha256=OYe28JBiW5YJNApqSVC_LcCI4KJw0VEJ2jgA3ynxD6c,17286 +mypy/typeshed/stdlib/sys/_monitoring.pyi,sha256=3Ax5Vwpq0Cp9tUSWmpw5Iq0x6WPSQFIGFhhnBWHkDzs,2112 +mypy/typeshed/stdlib/sysconfig.pyi,sha256=2ZxRzF3LAnFNM7xVELxpaOfZcLzht1S-RDROpGd6BOE,1806 +mypy/typeshed/stdlib/syslog.pyi,sha256=3QAS4AGmMqOxotxdD3CYjvp1iqtqaxznhn57ul8m-6s,1599 +mypy/typeshed/stdlib/tabnanny.pyi,sha256=qBHW9MY44U92xKdFbYgrSXljglOVtAY0GYTa41BHwbE,514 +mypy/typeshed/stdlib/tarfile.pyi,sha256=EQyxrNWv9KbA5ueTMcRmzOta3ooIv9zOvbd9DcLz5O0,27974 +mypy/typeshed/stdlib/telnetlib.pyi,sha256=Sh0M5cJCI6WicuEAhDCsDXYxejYsMcZMcLsu0lo0diY,3640 +mypy/typeshed/stdlib/tempfile.pyi,sha256=cbw3xYs5wg6rbl0YvaAfvTzkn3oPsOpbHlWp1f0gHIo,16591 +mypy/typeshed/stdlib/termios.pyi,sha256=z-dQ4Pz1e7IBBVKQx4GraNjNrHC4OI8vAZU4-FYM4TY,8171 +mypy/typeshed/stdlib/textwrap.pyi,sha256=6eEGWUkmDRU_-fA-aOIWWse9-1GIq8T89S4Vaf9aJ7Y,3233 +mypy/typeshed/stdlib/this.pyi,sha256=qeiwAiqbPK8iEcH4W--jUM_ickhZFNnx8cEvTqVPvCY,25 +mypy/typeshed/stdlib/threading.pyi,sha256=Hf4_lq8NZSg9I5hpEe9wla4_-UylKlNj35b1Gz6VcCM,6789 +mypy/typeshed/stdlib/time.pyi,sha256=5TIhQDkGSKRkp9sr4N6QUd_gVLw4j0dxfGPYqM5mrQQ,3887 +mypy/typeshed/stdlib/timeit.pyi,sha256=4yMgBR4T5Ame22l3SkRnXrq134Jivk3bJIclXNsp6lo,1240 +mypy/typeshed/stdlib/tkinter/__init__.pyi,sha256=4qUrv9_LrJ8Glr6651FrSPNIoSpQEHw6DRj9TDkTMQk,162635 +mypy/typeshed/stdlib/tkinter/colorchooser.pyi,sha256=WigYRTIs27oyuwzPwhvndb3z8DkuLuZngd2MsKM3DNA,360 +mypy/typeshed/stdlib/tkinter/commondialog.pyi,sha256=jJRmyjdOxTouujFvS2YtOg0qh1ngKyHQKPc0dnoumhw,468 +mypy/typeshed/stdlib/tkinter/constants.pyi,sha256=X7zXUbLHPHC-MiCDZoVRRpEX9jFGV3zgj1rBrpWG5l4,1844 +mypy/typeshed/stdlib/tkinter/dialog.pyi,sha256=oEV2mFaVp0ESk_8nFJOuKGlXWG4u5MAYVcRLedjoZik,324 +mypy/typeshed/stdlib/tkinter/dnd.pyi,sha256=hltw1-itHapOgnqlBgcmOgzV1XGMlu6CEsHjwwtqFYE,774 +mypy/typeshed/stdlib/tkinter/filedialog.pyi,sha256=1TUWUWpUtYN7aPDM7ANHPEDIW9tQoTfmY0m9rJhpdI0,5176 +mypy/typeshed/stdlib/tkinter/font.pyi,sha256=raU_THjHyDL6A61VUGyBR1RtojyWuQhhgArYqQ3E-hw,4606 +mypy/typeshed/stdlib/tkinter/messagebox.pyi,sha256=u_s-BST--wes-h8D1d1uSSj-9s-S6hRfJYcwZSHL9Bg,2742 +mypy/typeshed/stdlib/tkinter/scrolledtext.pyi,sha256=Hp_LlFfwVwR3W4iDZKthreGUofPbIbiOkjl1O-HEL9o,302 +mypy/typeshed/stdlib/tkinter/simpledialog.pyi,sha256=ZZxYKT7uNQ7t1FJ4RqlXX5BCJg9Zcs93e3uFRqt-bSU,1596 +mypy/typeshed/stdlib/tkinter/tix.pyi,sha256=c2OTQkpGaZtIUARkX1drGDhFwdl9ffl52ZNiGvkt39Y,14375 +mypy/typeshed/stdlib/tkinter/ttk.pyi,sha256=fBeogSdwyUuN228A8onSARJNHDpi-vWj47oO7xzq6uI,53705 +mypy/typeshed/stdlib/token.pyi,sha256=45u8j2PqFfchSf-9MPcS-Ep7nL_EKb1jSalRa1qr3Mo,3343 +mypy/typeshed/stdlib/tokenize.pyi,sha256=pB0iu_RYKwen5_ZtDW--R7aKiwH8JprdTQsJzKRCAGY,5445 +mypy/typeshed/stdlib/tomllib.pyi,sha256=v4XWpK6XmcXUQ8R3tQSmQBwtbOuBssM0Z5ZDjhYVIcM,937 +mypy/typeshed/stdlib/trace.pyi,sha256=cwRpCtKhsA0IdW7O8OgQJ3l03k-7f7EwrYTj3jQwrko,3605 +mypy/typeshed/stdlib/traceback.pyi,sha256=iNXxYq6OYLnQfySlzisnJ8PI6aEm4ssBhWzDy2R72eg,11630 +mypy/typeshed/stdlib/tracemalloc.pyi,sha256=8Sv_gFsBOtFFNPH-qnDcpX4dglcqPBqa8U4fwJ_pRCg,4559 +mypy/typeshed/stdlib/tty.pyi,sha256=fwxTycZe6nGL5DBdXGhiCcgA0yAlqF9hXIUKHmDkTWQ,871 +mypy/typeshed/stdlib/turtle.pyi,sha256=oixZtSAwes43tubfJp3oAF5bmVNebZIhBRhNva1y_tc,25277 +mypy/typeshed/stdlib/types.pyi,sha256=3VsMgvmSNsIcdkUKoHSBjNLuNdpqJrD40oqLJeMfy0M,26544 +mypy/typeshed/stdlib/typing.pyi,sha256=sC0TLyCjD4Y-G8i8XdiDwVp7oieNNNCx80CqnPiwHKA,41709 +mypy/typeshed/stdlib/typing_extensions.pyi,sha256=nDVd0WE5hE7CxCIhY-CjEL0RfXwLnN49DihgMFg3UFg,23330 +mypy/typeshed/stdlib/unicodedata.pyi,sha256=UdeHUdLrKlVMWZ1bttS8npUWtPcpDjt3PzS3y36SAZA,2589 +mypy/typeshed/stdlib/unittest/__init__.pyi,sha256=ARla4cG9E5nWe7hRFzZ82kH10g_awzGp55lY16IU6xA,1848 +mypy/typeshed/stdlib/unittest/_log.pyi,sha256=QnmSKoFS_D1bcRLqFAvfJinXn2-0-DjyBSqH_92vr4g,912 +mypy/typeshed/stdlib/unittest/async_case.pyi,sha256=owQLuLwHxWCY6UDv1wI6eVDGUi9xOKk5l_IWVCvGpXI,850 +mypy/typeshed/stdlib/unittest/case.pyi,sha256=fuVLYJh6s5nsGmSWhiWMWElSsOk3wFc1wl4e2T5hk1A,14468 +mypy/typeshed/stdlib/unittest/loader.pyi,sha256=tr-lR3NHul7QaC61k-MPUJnn5RRfqEbZZXri3qUnxes,3332 +mypy/typeshed/stdlib/unittest/main.pyi,sha256=rd2LDQ54q_gIamVlYfLBT97SE6-IYSknHgEpCcn1Xdw,2767 +mypy/typeshed/stdlib/unittest/mock.pyi,sha256=ZVj427F0SwmpvhudLCQSp_823CnjwAihdEQWzmTfwlM,20299 +mypy/typeshed/stdlib/unittest/result.pyi,sha256=HX5DXqQaIentVCiFufZh-tHpSfliUUGDjb1X8iAnk_8,2050 +mypy/typeshed/stdlib/unittest/runner.pyi,sha256=kKBdpnU1ahX_Uky1uL4C-DnkglLC1QkLUL1Eq6XfslU,3502 +mypy/typeshed/stdlib/unittest/signals.pyi,sha256=6rqsVHXOvSPHSkeF_vYPf5sUaLgqqFSmFihkaDqPhSw,488 +mypy/typeshed/stdlib/unittest/suite.pyi,sha256=FhS30BvL4niz3gI5Acnp2TX449CNPs2avEUEqGt14mo,1047 +mypy/typeshed/stdlib/unittest/util.pyi,sha256=or4A8A8A-AzIJWs-74Ofhm-TaEd29zUYzlnIRhwg-dA,1656 +mypy/typeshed/stdlib/urllib/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypy/typeshed/stdlib/urllib/error.pyi,sha256=UqUqcqbz8FPtlixaJP0Gp3ecUc6MpPJaYOPydkGRQN4,978 +mypy/typeshed/stdlib/urllib/parse.pyi,sha256=Y7xJVpKJHohxE6bPifub48wXO8bJAwYA08lTtNAovzM,6544 +mypy/typeshed/stdlib/urllib/request.pyi,sha256=fDi4DP0lDDyLs7ATksrS1EPlYYOCTAncXW5CvOYhsIU,19244 +mypy/typeshed/stdlib/urllib/response.pyi,sha256=Roz9K7VDlO_bOCqtZJz5eOXT3RX6NpTC2T7jx4Zxy2I,1580 +mypy/typeshed/stdlib/urllib/robotparser.pyi,sha256=sA7npNj2rB3b_aFOhXqGlQNh-G7kGmyYaZ3wz__N96o,683 +mypy/typeshed/stdlib/uu.pyi,sha256=yMt5ZRAepWSra-qWti133ZGibCtrJXkMZg5kKJe-MdM,431 +mypy/typeshed/stdlib/uuid.pyi,sha256=aqeMSWDDQ0aKOl8Y-cOT7e5OmfSgymoI4gEjy467v8k,3109 +mypy/typeshed/stdlib/venv/__init__.pyi,sha256=GLWFpMESaRETXfoH7IXs67daBAmbJG0aIfDHMU5Nx24,2951 +mypy/typeshed/stdlib/warnings.pyi,sha256=AztB0gbwUylgZH2A-T7FA3X_uOeUWwpjDLaDJOe9cQ0,4238 +mypy/typeshed/stdlib/wave.pyi,sha256=bbbWyMa3ag475svSkm6idBLX6cOdN60IINkp-JSZhww,3492 +mypy/typeshed/stdlib/weakref.pyi,sha256=ic0GBe2EkvHbsXXSgpyCY5Cx339sMv21AejZsOrvNXs,8475 +mypy/typeshed/stdlib/webbrowser.pyi,sha256=CURvTjznP2Tvs8sT8_hFAsEKOBXGsr2IYHki02fSBmE,3040 +mypy/typeshed/stdlib/winreg.pyi,sha256=Iu8TKxlLIEaYP8WJuTS7RZMAgcKUVOhqnv123BsUS7M,5567 +mypy/typeshed/stdlib/winsound.pyi,sha256=I4ogNevxTWqwIqNRIeg5WXvcm_aQDAyI6VKXqMG13xw,1259 +mypy/typeshed/stdlib/wsgiref/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypy/typeshed/stdlib/wsgiref/handlers.pyi,sha256=d4qMJ3ZNLNAnDk1I___l8nIUK9uxDol-bXsjtc9BsX0,3068 +mypy/typeshed/stdlib/wsgiref/headers.pyi,sha256=hwXvKMQQHbjmcdsvFZgJ3LeIdNLcmRUDf7IrT94ZCCo,1050 +mypy/typeshed/stdlib/wsgiref/simple_server.pyi,sha256=CGpqJaFTlao6-hnkzCqHv4sHcDQfpBGT0sIueL5jhMQ,1426 +mypy/typeshed/stdlib/wsgiref/types.pyi,sha256=89NSSgpDnuOWMCuBprU210FsnnMh3V6TPmT26md1aYc,1264 +mypy/typeshed/stdlib/wsgiref/util.pyi,sha256=NxqrfAJ7JBdP4BuWs90xyfdSCfnywSXYi80uCXRt21Q,1060 +mypy/typeshed/stdlib/wsgiref/validate.pyi,sha256=NCpbRPP9fTt21peGNlXLgegq6U1yZaeAxFO-SUfBlng,1737 +mypy/typeshed/stdlib/xdrlib.pyi,sha256=wxJVHCfO5rju29ihBF96XgK3dj5b-LbsVGeotGgp15k,2368 +mypy/typeshed/stdlib/xml/__init__.pyi,sha256=m6b7OtCfk4VfTktwgMovrcUyjhCV0671jAktSJMbdwE,249 +mypy/typeshed/stdlib/xml/dom/NodeFilter.pyi,sha256=W8h9o8Jp1CsxU7O3ciu-6GelYeoJQoVuy8jeUs6xRIs,735 +mypy/typeshed/stdlib/xml/dom/__init__.pyi,sha256=4P-wzCUKUSkybVJieQlmIt338OwpdO0Q_kUnlDDZ4Fc,2548 +mypy/typeshed/stdlib/xml/dom/domreg.pyi,sha256=LNRgIl78O0eH3m7E5GFqG0BKQ0JSsHxTBnwr5KznZvI,418 +mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi,sha256=ClpimZ5otjEESOeunAAT1KoM9d2FvbdJ7a8wCOqWowE,6487 +mypy/typeshed/stdlib/xml/dom/minicompat.pyi,sha256=IgywKLWtKGaXnNA5ZzsAd-VsXUKAgjVIzjPQCpklUFc,716 +mypy/typeshed/stdlib/xml/dom/minidom.pyi,sha256=rxqF7PasZaBH6vugDxx_jR7pEkxhTkWIyUlaV6W7iGs,28674 +mypy/typeshed/stdlib/xml/dom/pulldom.pyi,sha256=nlFB0Lbj0kzV7_F0teVTMTykZ3y_-ZmPQ4dL2cMEWHs,4844 +mypy/typeshed/stdlib/xml/dom/xmlbuilder.pyi,sha256=qQK35MzE_YE2hNt90AhQE5jq0jd2bnVTLiGKO0GtEgk,2952 +mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi,sha256=8G_tDfXrvOGm9g3LTVIKnH8JfB1ZzJGcUfVTOA4N-WY,1176 +mypy/typeshed/stdlib/xml/etree/ElementPath.pyi,sha256=TiZrEPQInm35MiIMWr94B77iftOXmBMNvM1WxiwKLYc,2035 +mypy/typeshed/stdlib/xml/etree/ElementTree.pyi,sha256=kOuRe4aoE2oomVcqm-ciGq-K-pKLTXMnNz0IxHa_Hyo,14886 +mypy/typeshed/stdlib/xml/etree/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypy/typeshed/stdlib/xml/etree/cElementTree.pyi,sha256=iYR7ebpdB3g9zfBvICnV1VzvQktMya-Dh6lX4C9u4Uo,36 +mypy/typeshed/stdlib/xml/parsers/__init__.pyi,sha256=PS75lzF6CFuo_xdO83zK-IOQrnoJQ3FkUoMSOMdwWJM,39 +mypy/typeshed/stdlib/xml/parsers/expat/__init__.pyi,sha256=8pm3z3heMEx09A84UjPVQw3lb9cH6X-UK86skDsfEfk,189 +mypy/typeshed/stdlib/xml/parsers/expat/errors.pyi,sha256=mH9YRZuV4quzksDMLEmxiisAFgNhMOhl8p07ZzlS2XE,29 +mypy/typeshed/stdlib/xml/parsers/expat/model.pyi,sha256=M7GVdd-AxOh6oGw6zfONEATLMsxAIYW2y9kROXnn-Zg,28 +mypy/typeshed/stdlib/xml/sax/__init__.pyi,sha256=7TU3Ez8RjqR041xvpGy6eEBajytpO2W8C7L7AxVOwB8,1616 +mypy/typeshed/stdlib/xml/sax/_exceptions.pyi,sha256=Q41LNt4ARdDs5ynBIAGP-YapjU08m5Kah2ZD939fO9c,804 +mypy/typeshed/stdlib/xml/sax/expatreader.pyi,sha256=D77gx2ZCLo4OI11kXiKHUHwidxAv61UfPUI5k3JqFVg,3800 +mypy/typeshed/stdlib/xml/sax/handler.pyi,sha256=rYfSg3SBNou6cpoO7sBwcP6T-kHClkCwzj0QDZW57Zw,4528 +mypy/typeshed/stdlib/xml/sax/saxutils.pyi,sha256=j-yIPACE-yvjnStAguxaR_HLfJN_Hq8KOLVd8DBGJzo,3804 +mypy/typeshed/stdlib/xml/sax/xmlreader.pyi,sha256=cGP-i2OzR9bUSuZct5ijA8bjWUApxdPVopuJ4yerDlU,4348 +mypy/typeshed/stdlib/xmlrpc/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypy/typeshed/stdlib/xmlrpc/client.pyi,sha256=dIN8JqkYO713oZIdUjdPddwe8-M1Na7v54d4zRR0FdQ,12035 +mypy/typeshed/stdlib/xmlrpc/server.pyi,sha256=OF2v_1XAjRYtbMLb_akHJrkXUKHX27cfzmrX7sfgm7E,6231 +mypy/typeshed/stdlib/xxlimited.pyi,sha256=SCWnX8bML0Kky9tWb8sMQSCrafe7gj7ZA_xhltk9Lik,491 +mypy/typeshed/stdlib/zipapp.pyi,sha256=gOkFhcdfGpy6PIboXe45wODMw-94YtC1ypUTCxBxTfU,553 +mypy/typeshed/stdlib/zipfile/__init__.pyi,sha256=p_Vhd6q1elFSzRsEI365MwZ7ABb_wg1AJ-68FZc1eQA,12859 +mypy/typeshed/stdlib/zipfile/_path/__init__.pyi,sha256=gO9xOHUT2f88yTLxpJmonZBC7Yh8WZ7GablfhsJaw-k,3063 +mypy/typeshed/stdlib/zipfile/_path/glob.pyi,sha256=7w_87VMRxyV1KnR7WaXktZtPbi0ZS8ibd9w0qsLxksc,943 +mypy/typeshed/stdlib/zipimport.pyi,sha256=SM9xql-crUor4aP7fFefFvGITxf0MpQPV3dNfTv9hVA,2677 +mypy/typeshed/stdlib/zlib.pyi,sha256=DIoGakJlb3pOGe_aAbSfPpLfaJq1HhdBCL1FbEijqcE,2395 +mypy/typeshed/stdlib/zoneinfo/__init__.pyi,sha256=qM_vgqSETLRL7D6D5upCnmwbnE_Pk_96i0RDnHjWYS8,1326 +mypy/typeshed/stdlib/zoneinfo/_common.pyi,sha256=QdqQFkqe8f-PN05bSbHLW90qfEqgdVLbIsKiPbW4lno,462 +mypy/typeshed/stdlib/zoneinfo/_tzpath.pyi,sha256=C5ve2ashiiq2Jm0EtWEdJDtcdxNc-_Ewff8I56pFfZE,524 +mypy/typeshed/stubs/librt/librt/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypy/typeshed/stubs/librt/librt/base64.pyi,sha256=zheHOkaBecV-EZJvGXFRKAVmikjuun2NtE2bcHiQLzA,82 +mypy/typeshed/stubs/librt/librt/internal.pyi,sha256=RneVcEnP7hwJKqNx7y_6JFzrjbmvycs-o2m58F5AMJw,814 +mypy/typeshed/stubs/mypy-extensions/mypy_extensions.pyi,sha256=M00bMpf1XZOilHhHPjPrdRK7a9qw35DqOWh0PeT8aj4,8892 +mypy/typestate.cpython-312-x86_64-linux-gnu.so,sha256=nywsAc9cZdkSzFoSOqonn2LjG3wZBhe2J8ZxuKDO8lc,15984 +mypy/typestate.py,sha256=P1GmLnCdnhtO21QR1AbX53tE9a-rIUESCirQ3yXw7qo,15987 +mypy/typetraverser.cpython-312-x86_64-linux-gnu.so,sha256=AcnSHxNw82zByuEZEvCRCj7wzpxaRqXf2OMZ3ELdgxY,15992 +mypy/typetraverser.py,sha256=eswFxl-86t2d9nukXA00-e8kwWsl8UY4wKudQdCDS3c,4456 +mypy/typevars.cpython-312-x86_64-linux-gnu.so,sha256=gb7jnOtVg8jwvJt0E0BwkMtJQ7gxgDI6JWqQ8g-vrpQ,15984 +mypy/typevars.py,sha256=8qw5kAfCaKm5hkk5Ze08HH9yMzsTSZOjBenjydTw5OA,2996 +mypy/typevartuples.cpython-312-x86_64-linux-gnu.so,sha256=h39jAskZDG_0cNxI83X7CBc4JxJg9oi31fSYKhA8dE4,15992 +mypy/typevartuples.py,sha256=jo6F1pu39vcaohI38BbkVhgtmvSy-2aoA3H8WYXzfJI,1058 +mypy/util.cpython-312-x86_64-linux-gnu.so,sha256=8dmhWK45PlBac7Zq8LARO7xp5yoc9voTwHFOin5Dl7Q,15976 +mypy/util.py,sha256=tksHkp1Yz-ApjFm0ZYvfjfTS7HyHZeWPuKjWaRjzCCk,32814 +mypy/version.py,sha256=RpcrQoR9OTKxJcUGch7tBAck5gXFRixMwDygqyMiSto,23 +mypy/visitor.cpython-312-x86_64-linux-gnu.so,sha256=Q5VtYl3TUClT_flHrSMiJ_3FFOAhdGpJNuNMPD2e6IE,15984 +mypy/visitor.py,sha256=AMh0CRXPkBBIWCDBpk9JDBHYm6JS8YFFwezTMJodyHw,18558 +mypy/xml/mypy-html.css,sha256=-e3IQLmSIuw_RVP8BzyIIsgGg-eOsefWawOg2b3H2KY,1409 +mypy/xml/mypy-html.xslt,sha256=19QUoO3-8HArENuzA1n5sgTiIuUHQEl1YuFy9pJCd3M,3824 +mypy/xml/mypy-txt.xslt,sha256=r94I7UBJQRb-QVytQdPlpRVi4R1AZ49vgf1HN-DPp4k,4686 +mypy/xml/mypy.xsd,sha256=RQw6a6mG9eTaXDT5p2xxLX8rRhfDUyCMCeyDrmLIhdE,2173 +mypyc/__init__.cpython-312-x86_64-linux-gnu.so,sha256=YHNAgEaWQujWgja32PY7glFAsxs1rHFmcN9_G5NAPos,15976 +mypyc/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypyc/__main__.py,sha256=19Zu2Z6FDpgBZU4_L47hAfYuQS9nvmYgiTr9X0m8c4s,1928 +mypyc/__pycache__/__init__.cpython-312.pyc,, +mypyc/__pycache__/__main__.cpython-312.pyc,, +mypyc/__pycache__/annotate.cpython-312.pyc,, +mypyc/__pycache__/build.cpython-312.pyc,, +mypyc/__pycache__/build_setup.cpython-312.pyc,, +mypyc/__pycache__/common.cpython-312.pyc,, +mypyc/__pycache__/crash.cpython-312.pyc,, +mypyc/__pycache__/errors.cpython-312.pyc,, +mypyc/__pycache__/namegen.cpython-312.pyc,, +mypyc/__pycache__/options.cpython-312.pyc,, +mypyc/__pycache__/rt_subtype.cpython-312.pyc,, +mypyc/__pycache__/sametype.cpython-312.pyc,, +mypyc/__pycache__/subtype.cpython-312.pyc,, +mypyc/analysis/__init__.cpython-312-x86_64-linux-gnu.so,sha256=mERsnltkBfypTkAqazCtXKfffDrUAVm04JDZ58cWYLE,15984 +mypyc/analysis/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypyc/analysis/__pycache__/__init__.cpython-312.pyc,, +mypyc/analysis/__pycache__/attrdefined.cpython-312.pyc,, +mypyc/analysis/__pycache__/blockfreq.cpython-312.pyc,, +mypyc/analysis/__pycache__/capsule_deps.cpython-312.pyc,, +mypyc/analysis/__pycache__/dataflow.cpython-312.pyc,, +mypyc/analysis/__pycache__/ircheck.cpython-312.pyc,, +mypyc/analysis/__pycache__/selfleaks.cpython-312.pyc,, +mypyc/analysis/attrdefined.cpython-312-x86_64-linux-gnu.so,sha256=gg74OUUZcAmQbGWjzxbDy0vTZ5nD0bZ_aSqh5LrVjMw,15992 +mypyc/analysis/attrdefined.py,sha256=6GizEEEnsA_pMmDkBQoxEVFd0beZ-WmbO3alJWUmZZo,15419 +mypyc/analysis/blockfreq.cpython-312-x86_64-linux-gnu.so,sha256=dBzlu6yrkItZaRgfhAJ_E_4H11lnwsDbUpb6bc2c3y0,15984 +mypyc/analysis/blockfreq.py,sha256=CjdVRFXgRdsuksk6e11cqbsFdj4e1z_8GHvvnY_Pgb8,1004 +mypyc/analysis/capsule_deps.cpython-312-x86_64-linux-gnu.so,sha256=OkIDU7eAv4cQ1hRcGfVmMRJs4_JwYKU6-T5g3yyBdDE,15992 +mypyc/analysis/capsule_deps.py,sha256=v2_eSwi_PCqFY8fa6US03P9U_Bqf518IXr4VbKOgtfw,1039 +mypyc/analysis/dataflow.cpython-312-x86_64-linux-gnu.so,sha256=QOj4Vw5GwiVpILtQ3fww8cGd3gIdn53LoWk6dCAnBEQ,15984 +mypyc/analysis/dataflow.py,sha256=pgPEmOrv97rJi83dzye63U6Q1RPgYSCmEIkqxL7kDQ0,19519 +mypyc/analysis/ircheck.cpython-312-x86_64-linux-gnu.so,sha256=10wl9hiLkXYfND0005jQUL41pxPwDPSoQhyRT47Sq2Q,15984 +mypyc/analysis/ircheck.py,sha256=ejC5zC5hQ4l9HirT6opqTfAQd6jXph-tKFzXtiHkVco,13713 +mypyc/analysis/selfleaks.cpython-312-x86_64-linux-gnu.so,sha256=rhm99Z9_nLgXxewSZsXUuLv3_1F9l18eISp_IzGyLpI,15984 +mypyc/analysis/selfleaks.py,sha256=6lHmd6wHvUDqGROMRRRpOIKaROSQZsRSq8OY_3GaELQ,5836 +mypyc/annotate.cpython-312-x86_64-linux-gnu.so,sha256=tTYk-4ksgn0HnlvXzA7bVLejcKwGoTxtjDLvszLvoLs,15984 +mypyc/annotate.py,sha256=XANWJWaGzXkyUn5J1jlnSj8y4OFucH3PUWwfjtMNfU8,17993 +mypyc/build.cpython-312-x86_64-linux-gnu.so,sha256=Xypbyb8OuecGw69hT9JRFcmuAmrtCv-ah-brNsJKE4c,15976 +mypyc/build.py,sha256=wyiZb2jmerHjOtHF3SxyzCn-lftBJV0xAFLGjtjE9Nc,28252 +mypyc/build_setup.py,sha256=qdZLITqcVQI2RkZsWXlsd32rxMxLTC8E9RBh-sDxlxk,2165 +mypyc/codegen/__init__.cpython-312-x86_64-linux-gnu.so,sha256=EfObOG6mISgypL3rpa2fKy28Qy2jyIpE8xYWcXc6bsQ,15984 +mypyc/codegen/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypyc/codegen/__pycache__/__init__.cpython-312.pyc,, +mypyc/codegen/__pycache__/cstring.cpython-312.pyc,, +mypyc/codegen/__pycache__/emit.cpython-312.pyc,, +mypyc/codegen/__pycache__/emitclass.cpython-312.pyc,, +mypyc/codegen/__pycache__/emitfunc.cpython-312.pyc,, +mypyc/codegen/__pycache__/emitmodule.cpython-312.pyc,, +mypyc/codegen/__pycache__/emitwrapper.cpython-312.pyc,, +mypyc/codegen/__pycache__/literals.cpython-312.pyc,, +mypyc/codegen/cstring.cpython-312-x86_64-linux-gnu.so,sha256=KU9Fe3bZPktmE4FJDfOi8nHKcDZdt_GdD7gd9-5Imw8,15984 +mypyc/codegen/cstring.py,sha256=yB_SJmahDpTC7Xq3vlCstPZhhyLpRzEy9yHBwdqdIa4,2004 +mypyc/codegen/emit.cpython-312-x86_64-linux-gnu.so,sha256=FQsBXxMDz4rBozEx0oIGWYIBUVMNBZFt4MCGnKxRLC4,15976 +mypyc/codegen/emit.py,sha256=rv0DL7z_2-48RKkxWp5EukK-Sk3uKDw-A20aFOE23cY,49601 +mypyc/codegen/emitclass.cpython-312-x86_64-linux-gnu.so,sha256=P2aKBZEIRI00nqnvLX3qgG0k77v51okNGyt1uG_04Zw,15984 +mypyc/codegen/emitclass.py,sha256=4eXj2FnJtfez_ripXBbOuJDaMK6MD53n-EP2laUsfIk,49348 +mypyc/codegen/emitfunc.cpython-312-x86_64-linux-gnu.so,sha256=bsDemCjyFnPsfCkYSXtaJwaKlwVba59OKk8k9fig39E,15984 +mypyc/codegen/emitfunc.py,sha256=O-Df6ywdg00ZIArx6bCnvuGnr_HIbcBxnSJZ1b3F2v4,37558 +mypyc/codegen/emitmodule.cpython-312-x86_64-linux-gnu.so,sha256=VIt-Egx3iJPrb9gyDZnHKzvMy0mRG1hDjW8658JfQtY,15992 +mypyc/codegen/emitmodule.py,sha256=B1odxP-taRoGR0AsYVhkMFSb0KbGP1I0-9ff8hqeofg,53610 +mypyc/codegen/emitwrapper.cpython-312-x86_64-linux-gnu.so,sha256=YVrFu0fJ55AxbW3dtCDm7vCb9r3ZZeRHINQpkTaeoXo,15992 +mypyc/codegen/emitwrapper.py,sha256=mGSIJkH2d45xJRBi6bMgKp_6QXpUfRXlG0fLWQu_mCo,37926 +mypyc/codegen/literals.cpython-312-x86_64-linux-gnu.so,sha256=OJ02V__9qmEbJ4B-do4BGVsXq7hiU10OvJEMm6ol07Q,15984 +mypyc/codegen/literals.py,sha256=eVwOOb4qH2YOgc19yIbYdevq9F-h-3-9pSAGfS0lVJM,10635 +mypyc/common.cpython-312-x86_64-linux-gnu.so,sha256=5yCrO54YgHJZNcmTSwvFF6jb8aYXU4eMWLZJ36vnUos,15984 +mypyc/common.py,sha256=D8prAhJguYKu6Pcyczj-YknE_sprzvRs1KP1wgh1DB8,4692 +mypyc/crash.cpython-312-x86_64-linux-gnu.so,sha256=kM4Jv3uDeFRRf5aquTD562glnMfESuxexYyFMGxJGvQ,15976 +mypyc/crash.py,sha256=ULZHLQqJqSK8oFBsoDvr1eOCLAIoe2lwkVCDi-f9eww,953 +mypyc/errors.cpython-312-x86_64-linux-gnu.so,sha256=Hx9Eu0SOALD2-hQUKl6riqNUflxSe6WvDd5TwXZma3s,15984 +mypyc/errors.py,sha256=0peshMAH657cILI2cTPGCMrGZIbfy9DchbDdmqVjtWU,945 +mypyc/ir/__init__.cpython-312-x86_64-linux-gnu.so,sha256=MQUYluoGwZXLR90vRZqxb2VVZ9E6IaRwHYz7286FTYQ,15976 +mypyc/ir/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypyc/ir/__pycache__/__init__.cpython-312.pyc,, +mypyc/ir/__pycache__/class_ir.cpython-312.pyc,, +mypyc/ir/__pycache__/func_ir.cpython-312.pyc,, +mypyc/ir/__pycache__/module_ir.cpython-312.pyc,, +mypyc/ir/__pycache__/ops.cpython-312.pyc,, +mypyc/ir/__pycache__/pprint.cpython-312.pyc,, +mypyc/ir/__pycache__/rtypes.cpython-312.pyc,, +mypyc/ir/class_ir.cpython-312-x86_64-linux-gnu.so,sha256=bWazE0J4Wk7ozOZqxsMsr1FE6xchX6HjET2CkCO3o6w,15984 +mypyc/ir/class_ir.py,sha256=FU82tQYf0XzpQhQWBcyZ36yJ_KkjfPqm64a9nRiHoMI,23572 +mypyc/ir/func_ir.cpython-312-x86_64-linux-gnu.so,sha256=iLaPY6ShUEa7bEnMRo0BfClWm-A5qQvbdNTWYh_MbNM,15984 +mypyc/ir/func_ir.py,sha256=ERRu35X1ZWirhIhqbhswpN9cv1-q7ZgWWU7QkPVPf2g,15944 +mypyc/ir/module_ir.cpython-312-x86_64-linux-gnu.so,sha256=p2fi7iU4y-o2KSq9kGccXh5fX1N9SP-usLCsNrcTcio,15984 +mypyc/ir/module_ir.py,sha256=9kf77Qk4RHsCpeMnzI4fe46_jSLPVoQi_R--hmwT7Hk,3717 +mypyc/ir/ops.cpython-312-x86_64-linux-gnu.so,sha256=o16S_VBosKYqTcbHu1SZIg1bmVts2u8SIhHZHqa8ddQ,15976 +mypyc/ir/ops.py,sha256=g6vIB-Bg2Gg6ZRUZ0TQ9asDxj9ecU7tsmhWYPUia10M,60057 +mypyc/ir/pprint.cpython-312-x86_64-linux-gnu.so,sha256=eN5MYsYE0iRl6E_qictxURPB6425S1ncmXk5Dm8Bk1s,15984 +mypyc/ir/pprint.py,sha256=pEFoF_QvpkvlBrwgpfmBp214vjxcl4qszbC2yRZ7e2k,18372 +mypyc/ir/rtypes.cpython-312-x86_64-linux-gnu.so,sha256=TDRKPtGcj-Z3RGm_3beKQPW4SN8EJtL5mDoxBAAlxvQ,15984 +mypyc/ir/rtypes.py,sha256=iT8NYaoDOmJO7iRzhJQT8JfX83IUM5k4hThUxdOpQMI,37650 +mypyc/irbuild/__init__.cpython-312-x86_64-linux-gnu.so,sha256=7lCD0cW5STMPRwr_pDXjZ2xrbnv_lzbM7t7TmgsXWMI,15984 +mypyc/irbuild/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypyc/irbuild/__pycache__/__init__.cpython-312.pyc,, +mypyc/irbuild/__pycache__/ast_helpers.cpython-312.pyc,, +mypyc/irbuild/__pycache__/builder.cpython-312.pyc,, +mypyc/irbuild/__pycache__/callable_class.cpython-312.pyc,, +mypyc/irbuild/__pycache__/classdef.cpython-312.pyc,, +mypyc/irbuild/__pycache__/constant_fold.cpython-312.pyc,, +mypyc/irbuild/__pycache__/context.cpython-312.pyc,, +mypyc/irbuild/__pycache__/env_class.cpython-312.pyc,, +mypyc/irbuild/__pycache__/expression.cpython-312.pyc,, +mypyc/irbuild/__pycache__/for_helpers.cpython-312.pyc,, +mypyc/irbuild/__pycache__/format_str_tokenizer.cpython-312.pyc,, +mypyc/irbuild/__pycache__/function.cpython-312.pyc,, +mypyc/irbuild/__pycache__/generator.cpython-312.pyc,, +mypyc/irbuild/__pycache__/ll_builder.cpython-312.pyc,, +mypyc/irbuild/__pycache__/main.cpython-312.pyc,, +mypyc/irbuild/__pycache__/mapper.cpython-312.pyc,, +mypyc/irbuild/__pycache__/match.cpython-312.pyc,, +mypyc/irbuild/__pycache__/missingtypevisitor.cpython-312.pyc,, +mypyc/irbuild/__pycache__/nonlocalcontrol.cpython-312.pyc,, +mypyc/irbuild/__pycache__/prebuildvisitor.cpython-312.pyc,, +mypyc/irbuild/__pycache__/prepare.cpython-312.pyc,, +mypyc/irbuild/__pycache__/specialize.cpython-312.pyc,, +mypyc/irbuild/__pycache__/statement.cpython-312.pyc,, +mypyc/irbuild/__pycache__/targets.cpython-312.pyc,, +mypyc/irbuild/__pycache__/util.cpython-312.pyc,, +mypyc/irbuild/__pycache__/visitor.cpython-312.pyc,, +mypyc/irbuild/__pycache__/vtable.cpython-312.pyc,, +mypyc/irbuild/ast_helpers.cpython-312-x86_64-linux-gnu.so,sha256=jNUk117o7FGYkhRzIQycjVryPn6YasL6WwiU0qpRaRA,15992 +mypyc/irbuild/ast_helpers.py,sha256=PO6OY7IezzhTSR34fcJ6fYar3XHsWaL-Gdm_L9rz8fU,4327 +mypyc/irbuild/builder.cpython-312-x86_64-linux-gnu.so,sha256=IN2ovwp2Oh7ttN6uiQ-dzrdDWbb1h0oPJkTb2Exe54s,15984 +mypyc/irbuild/builder.py,sha256=hMjhc88dWh5-3iXih2FRGKVJKKvDalQuAvj1Kto-mrA,65187 +mypyc/irbuild/callable_class.cpython-312-x86_64-linux-gnu.so,sha256=qZ7vFm-s47uPMRDCAb68alW28maxOJNgOpZ_oUhuR90,16000 +mypyc/irbuild/callable_class.py,sha256=nzUmNWVE0ZkVW1_MxrJZ_Cj8rAYdsxABdXjWCD0QM_E,7390 +mypyc/irbuild/classdef.cpython-312-x86_64-linux-gnu.so,sha256=v-1FBMSUidy3kCTZf_cdvEOUnRn66hSo5i7OETBMBX0,15984 +mypyc/irbuild/classdef.py,sha256=E4vnvAze5CBlMHalp4XAw_7oGpzAanziCfuaY3zzuS8,36941 +mypyc/irbuild/constant_fold.cpython-312-x86_64-linux-gnu.so,sha256=7o-VxLiiOkxmDbgyIR5rnqbWpvZdSyJHxD6a1AGY1fg,15992 +mypyc/irbuild/constant_fold.py,sha256=0mzsa5E6ckrVqCBtT4et4PQxJ5_P-I_eFGqMas83UMs,3345 +mypyc/irbuild/context.cpython-312-x86_64-linux-gnu.so,sha256=PJLpvjTdWv6zS33V3GyamE1asPB8jfDJUGMlCcKmQ9o,15984 +mypyc/irbuild/context.py,sha256=vu5uvaWgpcvm_NjShAFLqjO_uIQyofa1zVCdQdVA-bI,7448 +mypyc/irbuild/env_class.cpython-312-x86_64-linux-gnu.so,sha256=_1HX8D-fIfGWB0k6EQmzRtdh3Ah8ZgStpz4RJG5Gsew,15984 +mypyc/irbuild/env_class.py,sha256=ao4e3fNJk5L_4zo2QG4Oylb4j09JRiGiaRfqVZ5KMXQ,11755 +mypyc/irbuild/expression.cpython-312-x86_64-linux-gnu.so,sha256=MUEaM8FEGF0CktlopoRZsuq2sva0nu5-nI_zswS2KlQ,15992 +mypyc/irbuild/expression.py,sha256=WDrAIJ8U3g2N1y5jq7XULrj_K5uqba876rYLaDCcNyI,41220 +mypyc/irbuild/for_helpers.cpython-312-x86_64-linux-gnu.so,sha256=AYxGNVsi0MT7J8oilmrb1Fge4Opj2LyMjXowt6hoBHo,15992 +mypyc/irbuild/for_helpers.py,sha256=wp0Ay0GY6OwYoZBt_ppY6rWVyN6ueOsx-vxJjLSHWCE,49091 +mypyc/irbuild/format_str_tokenizer.cpython-312-x86_64-linux-gnu.so,sha256=Oi5U-NeOHzOhV-7IXm8fYYrqCCe0sinmRx2ni3ZtkWw,16008 +mypyc/irbuild/format_str_tokenizer.py,sha256=1wAXjyLCWhY4-d3RO7LRlmC0ung62WL4Mxkyg1HYGRI,8896 +mypyc/irbuild/function.cpython-312-x86_64-linux-gnu.so,sha256=Sv2f3AhQFb_4cZfNSjcqhtbDyb7cwaN6VlS14SLJHII,15984 +mypyc/irbuild/function.py,sha256=rk6yN_0epspp_uv66FmKdbix3WriUW8eJ1psrI-cmT8,48659 +mypyc/irbuild/generator.cpython-312-x86_64-linux-gnu.so,sha256=ngBvnbUpzaV-0HmmbSHb5qosXqOaEtmLr8w0339gAfg,15984 +mypyc/irbuild/generator.py,sha256=bpdOMNQ5qcrSyrov1RP2qvuaxGEbDTZfu5dRjNoWlOE,17110 +mypyc/irbuild/ll_builder.cpython-312-x86_64-linux-gnu.so,sha256=BdyjLNhJ65tLDqXTv-0xvkWwLEYqVeLvd8lc-e94qGs,15992 +mypyc/irbuild/ll_builder.py,sha256=oU0vC__m5C8Au3PohyjQqtSX6fbfptp3pGJ5BiFZ7ds,114884 +mypyc/irbuild/main.cpython-312-x86_64-linux-gnu.so,sha256=whz3dkZTBkf2R59k821SPLtvLkVOHckRT5f25BBX-nc,15976 +mypyc/irbuild/main.py,sha256=g2DGBAkLyxp072-0cIt8Xny-Xz8-bVM4vdHXONilqL0,5412 +mypyc/irbuild/mapper.cpython-312-x86_64-linux-gnu.so,sha256=owwjMFUXhHu2D8QGH7iS8FFHG4a4NAVUSQxh1jHa7LA,15984 +mypyc/irbuild/mapper.py,sha256=t6C_cRtXw9k938Ack8Xc5HFMqxCGCDYNAdQ1_US7wJk,9604 +mypyc/irbuild/match.cpython-312-x86_64-linux-gnu.so,sha256=Flzp-1FZwYPh0oI4VcQSIYvrj_ZDBZ__53QBR4mDetA,15976 +mypyc/irbuild/match.py,sha256=YP0wlx-mF4UPCcLpO78XHvnUWUwBhvP9JC-juUcQvv4,12246 +mypyc/irbuild/missingtypevisitor.cpython-312-x86_64-linux-gnu.so,sha256=6sQaHxVh7DEiYBaNOmMFl30CwrHj3LVLcrgWPymSUVo,16008 +mypyc/irbuild/missingtypevisitor.py,sha256=awXMMkRhXZq0AnbSGKfUzs9Lx526gFvDuiYaKjeqAq4,699 +mypyc/irbuild/nonlocalcontrol.cpython-312-x86_64-linux-gnu.so,sha256=0y0IncRJwqH_abg0E1ccDhM6xAL9GLWvhOgJ66qFxtg,16000 +mypyc/irbuild/nonlocalcontrol.py,sha256=lfL55ROwtespJR1vCGzVcfw9ZGBcFIhBDaNMrZibln4,8023 +mypyc/irbuild/prebuildvisitor.cpython-312-x86_64-linux-gnu.so,sha256=HcZfSbYRxFgzxwuyph-D2OA6Y08vZt1DOlCsHPPRkgA,16000 +mypyc/irbuild/prebuildvisitor.py,sha256=Ey0VhaigWy1pgm1tCiKlr_7tuGSu6SByR-eRplI2xsk,8636 +mypyc/irbuild/prepare.cpython-312-x86_64-linux-gnu.so,sha256=4vcWplZldr_Pk804e0aoWgoI1_9jZSrqo54mmb9nsGM,15984 +mypyc/irbuild/prepare.py,sha256=YJisxKSzHFKqj66XvNcIKrigOjql0P6EaOlQeEttFzk,35925 +mypyc/irbuild/specialize.cpython-312-x86_64-linux-gnu.so,sha256=ws9bZKwRxUHLVMUwN-tP_u3PpeZ7OCviZcgVjGzLiGg,15992 +mypyc/irbuild/specialize.py,sha256=_rlVrvOi1KzUqZ_0UmLYmaOHYc0V4xmU2GryKkE4DuM,41708 +mypyc/irbuild/statement.cpython-312-x86_64-linux-gnu.so,sha256=hKe4AUohdg8lR1bC0UpIeIEYWLBKwGV3d0GATJUV7o0,15984 +mypyc/irbuild/statement.py,sha256=X-crL-iagZ9ec2u6U4ZF8eH01o0JNmG0cKiBIlZDxCk,46685 +mypyc/irbuild/targets.cpython-312-x86_64-linux-gnu.so,sha256=jn-hLRf9v_RyXykAngHbBtM83OqbBgWhlPUcZiICei4,15984 +mypyc/irbuild/targets.py,sha256=r12ynPhUaTmIgnmV5c5eNbWJbqf2_L12RtW_PIQXhq8,2313 +mypyc/irbuild/util.cpython-312-x86_64-linux-gnu.so,sha256=0fqPc9T7FIX-N1HfCivk1r6AQ8Yx_AslVLR95YncHAw,15976 +mypyc/irbuild/util.py,sha256=Kfk-Gfr2wWWAZi_-ZX70Xj2erbwnSPPESBfNErv6jTI,10086 +mypyc/irbuild/visitor.cpython-312-x86_64-linux-gnu.so,sha256=A4iZC94LAbas7tYyTq3s0MXVXCffYoi5vwXThO-MUao,15984 +mypyc/irbuild/visitor.py,sha256=8JgG2tImrwMdnv9UK0vbZE_yuLXlXZD_WsC6l4jrCJA,13136 +mypyc/irbuild/vtable.cpython-312-x86_64-linux-gnu.so,sha256=8Oq3kTdN_lNg6LlVIRCaSoyLS3s7cQpgTrtCLsGKGcs,15984 +mypyc/irbuild/vtable.py,sha256=nuibAGp_OVSxX1Mpwq4qRPV92k1d5TrczwGNzkNMQk8,3304 +mypyc/lib-rt/CPy.h,sha256=L8UZXQ9cxq9hhdXzs0UTd12LErOo-v1eyh5jNCmRUig,35417 +mypyc/lib-rt/base64/arch/avx/codec.c,sha256=B1Zjto9WhvHdFYV9e-F-NO4eUw80dlMs3U9AgJyGzc8,1489 +mypyc/lib-rt/base64/arch/avx/enc_loop_asm.c,sha256=5x7KgCny9gCuv0Oo50lNCo2LDt-tPOYg3t5vyTaW-kY,9314 +mypyc/lib-rt/base64/arch/avx2/codec.c,sha256=HVZTuNLi3va5oyHj5-053Ykqj-XZIV-kBE73zDT7bqI,1194 +mypyc/lib-rt/base64/arch/avx2/dec_loop.c,sha256=hq_yeFr9wrlOXrU4lPUQ--xpW1D0_m6FJkenA1Yt-fE,3229 +mypyc/lib-rt/base64/arch/avx2/dec_reshuffle.c,sha256=_qr6r3_QSicmOgl8tNMjoSX9UAR0tLY2QTk6dpwDt3Q,1304 +mypyc/lib-rt/base64/arch/avx2/enc_loop.c,sha256=-dydoBxKt1X_sTVpSYKZJlR8sfIeYLUJOzv_6HD60XM,2293 +mypyc/lib-rt/base64/arch/avx2/enc_loop_asm.c,sha256=LseS09A_syLXv0C4n5C_loPIGg3B0sjOZZ5UVZ3D-po,10453 +mypyc/lib-rt/base64/arch/avx2/enc_reshuffle.c,sha256=CWOGlvQQEsOXzCjkA5KV5EYlgAyXqyMthfD09qQbJ8w,2714 +mypyc/lib-rt/base64/arch/avx2/enc_translate.c,sha256=ZbxW4tT0V5uZEOqiium6CtRnXaYK1gd-JdG-P77RRSM,1251 +mypyc/lib-rt/base64/arch/avx512/codec.c,sha256=ag2rOWJD2IWz7h8Qu0nbS43xaZy6Ea2RVBtnNhO9Wfc,923 +mypyc/lib-rt/base64/arch/avx512/enc_loop.c,sha256=c264nxVYfl3m72ArgsHGol9-FQW4e4yipzTm1yoSMW8,1506 +mypyc/lib-rt/base64/arch/avx512/enc_reshuffle_translate.c,sha256=lESfOAK6yVviMyCwAQrnpWQJdxds0m109czGSbxeuRw,2277 +mypyc/lib-rt/base64/arch/generic/32/dec_loop.c,sha256=XCo00web4l5L98dDc_4ZPjdwRhwJYJJyv6zTg2ia3kM,2218 +mypyc/lib-rt/base64/arch/generic/32/enc_loop.c,sha256=8WMMHEf-xmo5f6Z7DY5JVaSvAaf80pziA9vm--GvYj0,1932 +mypyc/lib-rt/base64/arch/generic/64/enc_loop.c,sha256=6jBEKBphrKK0f3a4ZUg4pY1YXvEnVW5yvJcXtoMAOyE,2128 +mypyc/lib-rt/base64/arch/generic/codec.c,sha256=7yiK4he-nAvRAMS3RF2bkL3Q0k19OusjyAjcuLniSAM,790 +mypyc/lib-rt/base64/arch/generic/dec_head.c,sha256=XG7_4Z7O5zIWkYbyhiCuyGz8kQOT1P3hd73WJDuGSsk,791 +mypyc/lib-rt/base64/arch/generic/dec_tail.c,sha256=l4QXLn3bJYYhPJzTz2Hi1OuISw0svJAsgLkMllTCX6s,1774 +mypyc/lib-rt/base64/arch/generic/enc_head.c,sha256=gY1NrRIVDBKz1krrqbmBFQCXvW7TsISu6SP2h0Ty1W4,585 +mypyc/lib-rt/base64/arch/generic/enc_tail.c,sha256=oIaNxx1iv54RB_sjfCt9y3iwcKeYVDrXmQdj28RT6sc,637 +mypyc/lib-rt/base64/arch/neon32/codec.c,sha256=OSnQicSBOazJecWFtqLAJswxO4XFQ_6zXUEyYyDzlwo,1984 +mypyc/lib-rt/base64/arch/neon32/dec_loop.c,sha256=RP9UALT25-0h9y0ZZT-UfcUPGPnRAdrCY7DFT_qQwtY,2906 +mypyc/lib-rt/base64/arch/neon32/enc_loop.c,sha256=9OFz_fJlYxFHOEWSqftSE7D1lrK2wYXW0XswNcZ1K1U,4655 +mypyc/lib-rt/base64/arch/neon32/enc_reshuffle.c,sha256=1KiwN8mbkMlHGt1Vdh9ZqFCxmPSGbFeirZF1r_KNr6o,982 +mypyc/lib-rt/base64/arch/neon32/enc_translate.c,sha256=6p36hJofziya1lgosMW2X7Q8Hp6pU01-DJmWsj_iNQw,2116 +mypyc/lib-rt/base64/arch/neon64/codec.c,sha256=bFxPmkQYP_mXvebGKnTHa5GLY_i429L505UbBRIIYXU,2166 +mypyc/lib-rt/base64/arch/neon64/dec_loop.c,sha256=QDwhq_CvCMo6p1JMjNW5S8-Z5woMVDxMeExeZXxO_qA,5263 +mypyc/lib-rt/base64/arch/neon64/enc_loop.c,sha256=blYyAtfGMaOj1jhqaeHbBxRvyXRbJEzXsmv2S5aIL-4,1857 +mypyc/lib-rt/base64/arch/neon64/enc_loop_asm.c,sha256=1EyqW3h__LuKAXbvQjSL7JZolvzoiEVdcgDXwDMalwU,5617 +mypyc/lib-rt/base64/arch/neon64/enc_reshuffle.c,sha256=re6811Zr3Tzd98VPMWfeTD6gi4LTznykA9QIqTxoEpc,988 +mypyc/lib-rt/base64/arch/sse41/codec.c,sha256=HMnJ1nKDppzPNqHOpctjMTDH2rg48z9XVMqpeGperSM,1248 +mypyc/lib-rt/base64/arch/sse42/codec.c,sha256=IYs-hW57DNWTWImv0vHgoCcjcoM3EIOGF53PqFXXvt8,1248 +mypyc/lib-rt/base64/arch/ssse3/codec.c,sha256=yjJ11HRvUV3XmRpLclkebd3fY_bjwRoHbtDhdq5y_8o,1334 +mypyc/lib-rt/base64/arch/ssse3/dec_loop.c,sha256=ijGEu4Sqie-yqQe6SBIsJUCYht___loZE-QWh97LOJk,6891 +mypyc/lib-rt/base64/arch/ssse3/dec_reshuffle.c,sha256=1he_eNgNRnjYzq8ggN_UQhm3iu6EMrfWEILfJ_PVDII,1118 +mypyc/lib-rt/base64/arch/ssse3/enc_loop.c,sha256=j04w3bGmfUhJfG1lfa5Q7L_R2NztzS96A-r_veHeTLg,1549 +mypyc/lib-rt/base64/arch/ssse3/enc_loop_asm.c,sha256=x8xD-nt8hZbrL40GvwAZHTchbCfBENgW_sBZ_AB3New,9310 +mypyc/lib-rt/base64/arch/ssse3/enc_reshuffle.c,sha256=PmfzUCjYt32d6GfeI-Y7yhISywFON1k63kT34PqRZAI,1514 +mypyc/lib-rt/base64/arch/ssse3/enc_translate.c,sha256=MubLb6H3NQxyBjDnqFednwREHp9BSpGr_uk_riGL62k,1171 +mypyc/lib-rt/base64/codec_choose.c,sha256=bc_oLC_HVv4jNnXKuJ3clxJZHpa8uCDxR_gnri7zg3w,7590 +mypyc/lib-rt/base64/codecs.h,sha256=UJ-aFppCj4u4NoCv9WJtqeBWXYMF_JY8Jg8p1PoerUo,1188 +mypyc/lib-rt/base64/config.h,sha256=syJIoDzie8r-QIbaAcPNN-HL62z5yMnvCSu_Glisv-w,538 +mypyc/lib-rt/base64/env.h,sha256=7BFrEAjO7eymj4U3t-GHkDb3CgDlRTvFOqQLl_ERWvQ,2453 +mypyc/lib-rt/base64/lib.c,sha256=Wsr16GjZknZVF19smY382ZXxbZb94JcL28jkFB0G_Wo,3270 +mypyc/lib-rt/base64/libbase64.h,sha256=NTBKkl1S6J7OGEqOAQiHIwEuNcS9CFC8HM7t4y7YRnY,4789 +mypyc/lib-rt/base64/tables/table_dec_32bit.h,sha256=huHBM0bQhO-qaY4VKQXZd_sPGr-hGuLoCQAsBYULbo0,25258 +mypyc/lib-rt/base64/tables/table_enc_12bit.h,sha256=d5pGp6iJjmRvuy53oS9QDQ_om9Ra4Y9BqnKTyRDA8mE,74858 +mypyc/lib-rt/base64/tables/tables.c,sha256=8EMu3og65__OzaHO_lbIk8ZMtdXyHqe4j7VpLRfPggY,2051 +mypyc/lib-rt/base64/tables/tables.h,sha256=Vnp9YLfiaFlKzM3JWSi3FJFG53rdzrMluwz1XqSRSvI,704 +mypyc/lib-rt/bytes_ops.c,sha256=CxEmjAo9A93x1Z9K8WR1OnkYc034DeuPLEZEiI9fXfA,5550 +mypyc/lib-rt/dict_ops.c,sha256=RcHN1Ye0jIauYJK5-xt3G2GcuhkIRg-jofFOSB_RuV4,13859 +mypyc/lib-rt/exc_ops.c,sha256=HqgqyYOtEjYkn7hoHJa-veGnvdxOqL5zkb3-cLZ6CY0,8315 +mypyc/lib-rt/float_ops.c,sha256=MdcyrPHS44ct8ELfLnGEuUXveD7Kiy4onTYSSHwKCiU,6326 +mypyc/lib-rt/generic_ops.c,sha256=yTHiCG4diW9B5a407M-5wQlIEqiCFs_eKVCMavHC1Zs,2419 +mypyc/lib-rt/getargs.c,sha256=nmMOQVUDFnMNLUu6KQSoC1VpOd6dQLODZpUu4CQRYtI,15779 +mypyc/lib-rt/getargsfast.c,sha256=la16ZxNQafGbzrr28N9vDKcC7kiXZCkt19tvVw9GGDs,18814 +mypyc/lib-rt/init.c,sha256=NLD1n-YUcaQCQrDfJg6mxUjDS9t5ZYGySkHI-pxWQgI,782 +mypyc/lib-rt/int_ops.c,sha256=eL0Gv6FiIQC32c-oBph43YAhl3ANEn-ZM76LC97W5M0,19383 +mypyc/lib-rt/librt_base64.c,sha256=O4kj_UfPxdjjmFUmkBhp6Mb4G6rsctJXYFveR9EQrao,9333 +mypyc/lib-rt/librt_base64.h,sha256=QlVQbVbqVNXWCyJkXD9U4wk4Iul_w0LmiINC8ExedF0,1843 +mypyc/lib-rt/librt_internal.c,sha256=LDTg-NAjSIWUBqvdWCN8X6YtP5aWlNNIKvWki1cZ-kQ,32133 +mypyc/lib-rt/librt_internal.h,sha256=X6D3MFpbWJswSNKa7fMUsj_Mlc3MRMCvSBCECHFp2wk,5181 +mypyc/lib-rt/list_ops.c,sha256=m2GS_7ebbZf096F9NXB9OVHhGdr633y4PmFbdkWR1bg,11864 +mypyc/lib-rt/misc_ops.c,sha256=MXPPuGO5-kl8vM-vuy06Y7UBWjzpBitoRBIZjjz-Lpw,35901 +mypyc/lib-rt/module_shim.tmpl,sha256=HciO4-fZWZ4de_Xjb1P3n20ajJuab5tt5INgt5Pab7g,670 +mypyc/lib-rt/module_shim_no_gil_multiphase.tmpl,sha256=dEyyzaG6xmIgCOyWNX7qC4YjhgSHc_5_6yNG-WbUKUo,1205 +mypyc/lib-rt/mypyc_util.h,sha256=tNFYVI-Pu5ewaGH2eRVY_Lq159nbQZi9bho_LS-cRjE,5529 +mypyc/lib-rt/pythoncapi_compat.h,sha256=TF6SIFdR6KYE_FlWc-2JL8GcZGpsdf_LoQSEj0ua0_o,70628 +mypyc/lib-rt/pythonsupport.c,sha256=9yxvoaiXO3fLB_TQuHmFyzWTVh61kR_xew454l8gnWU,2343 +mypyc/lib-rt/pythonsupport.h,sha256=71VRVBTUecFGYgSJHMEm4OeH0NxqQgwN_weG1-T4LS8,13516 +mypyc/lib-rt/set_ops.c,sha256=-fImDML6Aobq7-NCbb28O19g6C2YyHkGJ6NF1gueHJM,351 +mypyc/lib-rt/str_ops.c,sha256=nnduBPYut1BKa1RwtjidiH8hMZWeuHvvwgvISqFcgLs,20771 +mypyc/lib-rt/tuple_ops.c,sha256=Fe7Tj8ZVF6HTVrflq9um-rDCduF-M846sJ1TYbpX4p0,1956 +mypyc/lower/__init__.cpython-312-x86_64-linux-gnu.so,sha256=sS3XEcyASkZIn3HRzhzuvyEi4taUILhaUgb2lW_9fqU,15976 +mypyc/lower/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypyc/lower/__pycache__/__init__.cpython-312.pyc,, +mypyc/lower/__pycache__/int_ops.cpython-312.pyc,, +mypyc/lower/__pycache__/list_ops.cpython-312.pyc,, +mypyc/lower/__pycache__/misc_ops.cpython-312.pyc,, +mypyc/lower/__pycache__/registry.cpython-312.pyc,, +mypyc/lower/int_ops.cpython-312-x86_64-linux-gnu.so,sha256=2vAQnzne9EEnNnxpBJZepdbhT10jL9gUvwHITkndsNA,15984 +mypyc/lower/int_ops.py,sha256=3YiQ1qc5cjNJwp_Xm_8tztYcBcKvxr1_1YPmCqACOnY,4777 +mypyc/lower/list_ops.cpython-312-x86_64-linux-gnu.so,sha256=0TRt0pQ0aMjWdKos_ZLA_WwqAtb89rlkona_cONH9cI,15984 +mypyc/lower/list_ops.py,sha256=OZP8EEVq-IpEUm8lBnQqKNB3BvJDu3ILJtbm5_dgJyc,2522 +mypyc/lower/misc_ops.cpython-312-x86_64-linux-gnu.so,sha256=bqcc9iWhAjFft7nKrMPPeAyf6lKcyfieokMEc4jgeaI,15984 +mypyc/lower/misc_ops.py,sha256=Yx13PN3rOGATc73u0rNDY-GjxRDYmRq-qDDMWXBz4fY,894 +mypyc/lower/registry.cpython-312-x86_64-linux-gnu.so,sha256=kjwB6RUMOGcEAEpZLnqSze0mw6GhbA-wmwwZPJQECIo,15984 +mypyc/lower/registry.py,sha256=GFk4WDAoMTodItePDIz7IAY2xGNHxudGVtfbNI5fo0c,830 +mypyc/namegen.cpython-312-x86_64-linux-gnu.so,sha256=QRekcTlf47TrumPVam4pPzVr0mwVYNfHo_qWNN8sfDs,15984 +mypyc/namegen.py,sha256=c5p7yxxUyWqJRNqKmkVCR1xW0u45LbIrqTPnR0n-I64,4934 +mypyc/options.cpython-312-x86_64-linux-gnu.so,sha256=qMpTgEvGvlEc-EpQFUeuQ40KdjVpHM4eHdUArCdr76M,15984 +mypyc/options.py,sha256=h10pJwzGIFTP3pnUeCR6AvpnTeGkryXd87VR_rr9dYQ,3020 +mypyc/primitives/__init__.cpython-312-x86_64-linux-gnu.so,sha256=4ttKB-B1pDgpUGaO5jFPMlX8yj_jWx2-tkuQMb09mj8,15992 +mypyc/primitives/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypyc/primitives/__pycache__/__init__.cpython-312.pyc,, +mypyc/primitives/__pycache__/bytes_ops.cpython-312.pyc,, +mypyc/primitives/__pycache__/dict_ops.cpython-312.pyc,, +mypyc/primitives/__pycache__/exc_ops.cpython-312.pyc,, +mypyc/primitives/__pycache__/float_ops.cpython-312.pyc,, +mypyc/primitives/__pycache__/generic_ops.cpython-312.pyc,, +mypyc/primitives/__pycache__/int_ops.cpython-312.pyc,, +mypyc/primitives/__pycache__/list_ops.cpython-312.pyc,, +mypyc/primitives/__pycache__/misc_ops.cpython-312.pyc,, +mypyc/primitives/__pycache__/registry.cpython-312.pyc,, +mypyc/primitives/__pycache__/set_ops.cpython-312.pyc,, +mypyc/primitives/__pycache__/str_ops.cpython-312.pyc,, +mypyc/primitives/__pycache__/tuple_ops.cpython-312.pyc,, +mypyc/primitives/__pycache__/weakref_ops.cpython-312.pyc,, +mypyc/primitives/bytes_ops.cpython-312-x86_64-linux-gnu.so,sha256=sBb1bN3YWO4a5RtPazzLJWwQx2HiwTF6OQB7q79RS8w,15984 +mypyc/primitives/bytes_ops.py,sha256=UGPIN4UD_p1KoAeWp9bvRQcQokK7ACrpksdewJbIKYo,3101 +mypyc/primitives/dict_ops.cpython-312-x86_64-linux-gnu.so,sha256=odPnYpuLeiM3Y7jLYVfPTJ09Cw6iYLU4D6rbpUpaQNY,15984 +mypyc/primitives/dict_ops.py,sha256=PWEAZVXfGavfGGGA8kyVCwkD3-vR7Qrks13NACVF4wI,8825 +mypyc/primitives/exc_ops.cpython-312-x86_64-linux-gnu.so,sha256=ebeNGcAypzTy_aaMo8cAYczc7U7fXU0Lc5Bp-mEjKy0,15984 +mypyc/primitives/exc_ops.py,sha256=JMmg41FKPOwGFxNWwIhSFDCqwD7GaItxPmWeDajNMQE,3650 +mypyc/primitives/float_ops.cpython-312-x86_64-linux-gnu.so,sha256=KgursKADx4QSJMHCNEeIr9H8xSgCaY72u-9qbM5_3hQ,15984 +mypyc/primitives/float_ops.py,sha256=uWZfhN6ixM46o-Ryu9nRSLN-zopol8cnudIrnSRRf7I,4090 +mypyc/primitives/generic_ops.cpython-312-x86_64-linux-gnu.so,sha256=Q36SPt3hTzTOA0p2umDTX8bnxFsIqGwxXM7Kx30C2nk,15992 +mypyc/primitives/generic_ops.py,sha256=PkAUT1YlfpwbeDkMNNLTwc01eCqmLPv3-kU0ZtwwWsA,11749 +mypyc/primitives/int_ops.cpython-312-x86_64-linux-gnu.so,sha256=T0zw-mMIpOdLvDZTDMgrxvs2778p8QjVntx28QpT53E,15984 +mypyc/primitives/int_ops.py,sha256=ig1ojLXn3-3PP0dfLVnQsfw-oFTJ_XRLsKntacjlFjk,9464 +mypyc/primitives/list_ops.cpython-312-x86_64-linux-gnu.so,sha256=c8xhuYnR5hINXrPOYXQ0zuk6BWV7Qfg0vuuAHN2BEbQ,15984 +mypyc/primitives/list_ops.py,sha256=4q00qcasUpFYZOsGsSF5-9EQXgvh9XfD_WdCqC9NngU,9127 +mypyc/primitives/misc_ops.cpython-312-x86_64-linux-gnu.so,sha256=Ki2La_K_I6TZaz_snxwTzXEfxX1I0WOkPxmcNQ3rCDY,15984 +mypyc/primitives/misc_ops.py,sha256=uiF2ryHM-26SfDPYFZBW35OmMpgXNeaYHCWYywKcd8s,13893 +mypyc/primitives/registry.cpython-312-x86_64-linux-gnu.so,sha256=M_LKCWvatMsa99xRowL22vQ0Scfz1pgZ8potH8PPnxc,15984 +mypyc/primitives/registry.py,sha256=PTCFahzaI10GXFoOGM00GvKN7nKMN0z6mA2FKR5GhdE,12789 +mypyc/primitives/set_ops.cpython-312-x86_64-linux-gnu.so,sha256=fHMUxcIjjWAOJnxXa8QK1i5AI2ZCTdOrGXp6WgS7c1U,15984 +mypyc/primitives/set_ops.py,sha256=dgy_6M2ZuQglEYlf-AX1YNQPgaYK9uKQ9Vr3svMf768,3795 +mypyc/primitives/str_ops.cpython-312-x86_64-linux-gnu.so,sha256=4guJw59v-KPmU4CkcYN0WBsddp-UbFaLYh9kzkB8QR8,15984 +mypyc/primitives/str_ops.py,sha256=IBTohTKGPhyVr10uzrSZhlZQ6rd3EV07wXCvdtFYJP8,13347 +mypyc/primitives/tuple_ops.cpython-312-x86_64-linux-gnu.so,sha256=2N_4jhxwi-DBPLeA68-uuZgGSFhUwvlu6mgScbbBDdc,15984 +mypyc/primitives/tuple_ops.py,sha256=PeDVdwZkvOxvnnOP_iUi4c-rzPiF4LqaUqv5yu98-lQ,3752 +mypyc/primitives/weakref_ops.cpython-312-x86_64-linux-gnu.so,sha256=9q1LOSV-o4XXBNBrLBb-sH38a7cmkGI0ibL19VbAVuQ,15992 +mypyc/primitives/weakref_ops.py,sha256=jfh4j2yO5OAFKXsSQeJKPIKWl3j3mYnyDpjwgj9mUHI,1160 +mypyc/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypyc/rt_subtype.cpython-312-x86_64-linux-gnu.so,sha256=yfHLIfHNNV4fGSyKL3xVYZkQibxVpguV_qjEW_eWObk,15992 +mypyc/rt_subtype.py,sha256=rAoZ_IRp7MFVmd_xtbgL6wTeU9h0pxjlYjhldfgZEc4,2448 +mypyc/sametype.cpython-312-x86_64-linux-gnu.so,sha256=9ZXR80rvltmzgMoTmRslMSWPPqv6Uk5_8zc5GzYriQA,15984 +mypyc/sametype.py,sha256=T3wXw8XjNk-W2W2CW9giAjYtFYdrh2HBjsam9-jwvmU,2464 +mypyc/subtype.cpython-312-x86_64-linux-gnu.so,sha256=hhNvttdK4VcCgQP3W5-0MA9sAm9psz73DtvhJJZvNsI,15984 +mypyc/subtype.py,sha256=Tg3pYSXWBiDRMHKnfgDKPFiFyPYHiShnnA1vOhkECbg,2757 +mypyc/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypyc/test/__pycache__/__init__.cpython-312.pyc,, +mypyc/test/__pycache__/config.cpython-312.pyc,, +mypyc/test/__pycache__/test_alwaysdefined.cpython-312.pyc,, +mypyc/test/__pycache__/test_analysis.cpython-312.pyc,, +mypyc/test/__pycache__/test_annotate.cpython-312.pyc,, +mypyc/test/__pycache__/test_cheader.cpython-312.pyc,, +mypyc/test/__pycache__/test_commandline.cpython-312.pyc,, +mypyc/test/__pycache__/test_emit.cpython-312.pyc,, +mypyc/test/__pycache__/test_emitclass.cpython-312.pyc,, +mypyc/test/__pycache__/test_emitfunc.cpython-312.pyc,, +mypyc/test/__pycache__/test_emitwrapper.cpython-312.pyc,, +mypyc/test/__pycache__/test_exceptions.cpython-312.pyc,, +mypyc/test/__pycache__/test_external.cpython-312.pyc,, +mypyc/test/__pycache__/test_irbuild.cpython-312.pyc,, +mypyc/test/__pycache__/test_ircheck.cpython-312.pyc,, +mypyc/test/__pycache__/test_literals.cpython-312.pyc,, +mypyc/test/__pycache__/test_lowering.cpython-312.pyc,, +mypyc/test/__pycache__/test_misc.cpython-312.pyc,, +mypyc/test/__pycache__/test_namegen.cpython-312.pyc,, +mypyc/test/__pycache__/test_optimizations.cpython-312.pyc,, +mypyc/test/__pycache__/test_pprint.cpython-312.pyc,, +mypyc/test/__pycache__/test_rarray.cpython-312.pyc,, +mypyc/test/__pycache__/test_refcount.cpython-312.pyc,, +mypyc/test/__pycache__/test_run.cpython-312.pyc,, +mypyc/test/__pycache__/test_serialization.cpython-312.pyc,, +mypyc/test/__pycache__/test_struct.cpython-312.pyc,, +mypyc/test/__pycache__/test_tuplename.cpython-312.pyc,, +mypyc/test/__pycache__/test_typeops.cpython-312.pyc,, +mypyc/test/__pycache__/testutil.cpython-312.pyc,, +mypyc/test/config.py,sha256=ZnruYrojiT_ZG4RrYzoESoNTiZY1bWuk0SQ2CFZHTQA,406 +mypyc/test/test_alwaysdefined.py,sha256=NtJx8cYeU9wblyglViCc1Ww0yRyoEhElW1HV9-7i_ok,1528 +mypyc/test/test_analysis.py,sha256=XOCAxn-pn5a5N_gb02HAtZsLh_eXZDVlkHjVXWOFHWE,3259 +mypyc/test/test_annotate.py,sha256=WgWtYrPHQHu8PVhbHkk4GuG9Xf_r63Vu5adBkkzRPfo,2600 +mypyc/test/test_cheader.py,sha256=oZYAP92NynG-xabhrys7baBlRzBTqLaTjlrfHTTJ7Y8,1540 +mypyc/test/test_commandline.py,sha256=ULYaN9gmgBXwnGUVYIui_x8Ybny3Wy5KKHpuJaeXxFs,2823 +mypyc/test/test_emit.py,sha256=fozAGdzCila7weObkiTDsmdyBLjHRryb7wIpE-5doZE,6585 +mypyc/test/test_emitclass.py,sha256=DE9sG9K-05LjbDvT6CWidDJB-onab7O0t8l2GVhjYlM,1228 +mypyc/test/test_emitfunc.py,sha256=ovDuTvHtdaNFjk5Dx0HVQ-mHea7K-J0xqGF2nXvKbHM,36172 +mypyc/test/test_emitwrapper.py,sha256=yl-uO-yZLeYf44LzMzltCSnIASbZjAWLVlY5kOjbx3w,2213 +mypyc/test/test_exceptions.py,sha256=CvvGhQybOJxcxzH2lwWJPaxAbthE9aJcROpl22bZ5LE,2133 +mypyc/test/test_external.py,sha256=lHBiO7iyZ5G0bHOWdNjkgLHHg-ayg5_zVKkxsrc5obA,1832 +mypyc/test/test_irbuild.py,sha256=ZGwpFSCcuJwwu0n4VjZNuvCG3N2jzrROa-nMyNiSYEA,2871 +mypyc/test/test_ircheck.py,sha256=OxY-wNKtyD9CMvSRuzPLBrboKPlCOUXI1Ai41e1Jutc,6868 +mypyc/test/test_literals.py,sha256=VospqX81-sNRhInwnnwC81Kzk9z1hr7UsnIDjC1NXhs,3325 +mypyc/test/test_lowering.py,sha256=GXWA1AX5SVdOieVeYBPsYuqIr0NHyXj94Jq7kpCMCtQ,2433 +mypyc/test/test_misc.py,sha256=qUivgecP3SysLGw5I-dLMRxSo-39yahV9qHF_z3ZNWM,690 +mypyc/test/test_namegen.py,sha256=GZaE_OGUApOf-RzJKe-5XlVHL5-rwoINEhPotCuwx5Q,2720 +mypyc/test/test_optimizations.py,sha256=irBs4gjdlo3dXgbwQTZXH3xRB-YA0vXz7rNSeUAP7p4,2256 +mypyc/test/test_pprint.py,sha256=6kfSLDyEvNXPGmdbvDojM4wEdWFoi-6Oh23AHOjx-v4,1281 +mypyc/test/test_rarray.py,sha256=eVIfBeR2t6F-16QXznpycEN5DkRGYAvR-hNbkIkaRPw,1488 +mypyc/test/test_refcount.py,sha256=dZbntAtDE5TAv2wxRRRVaUVaR--8PoHQeDjQooDSPEc,2052 +mypyc/test/test_run.py,sha256=fPF7I156jywz3Hf7hgP_VLuhtl1WQOCFHnxCO7KlTsQ,18052 +mypyc/test/test_serialization.py,sha256=RcY1tx44PKApqinIQGnju3jvbZbYzqqBei68JqbiYEY,4059 +mypyc/test/test_struct.py,sha256=EEfu868uSm1wJmwowq1S_g1wInUaURX8tIhoPqGzs8w,3903 +mypyc/test/test_tuplename.py,sha256=P03_NcIw1n-g4vFOig_aKX5RgLqoBkO3xh7M2Zzerkg,1044 +mypyc/test/test_typeops.py,sha256=FQvUfsjTKL_eIPbBxcchG6zrsVJvgWpb5U316NrvFCw,3935 +mypyc/test/testutil.py,sha256=0ceG0Tmhj6ubTL2igZpb6DoALG5e89ky9Rkzyp2uviA,9738 +mypyc/transform/__init__.cpython-312-x86_64-linux-gnu.so,sha256=i968EOyJYCQvuuQNFlZrlemsXKRoct9rWK9XjEYXNn8,15984 +mypyc/transform/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mypyc/transform/__pycache__/__init__.cpython-312.pyc,, +mypyc/transform/__pycache__/copy_propagation.cpython-312.pyc,, +mypyc/transform/__pycache__/exceptions.cpython-312.pyc,, +mypyc/transform/__pycache__/flag_elimination.cpython-312.pyc,, +mypyc/transform/__pycache__/ir_transform.cpython-312.pyc,, +mypyc/transform/__pycache__/log_trace.cpython-312.pyc,, +mypyc/transform/__pycache__/lower.cpython-312.pyc,, +mypyc/transform/__pycache__/refcount.cpython-312.pyc,, +mypyc/transform/__pycache__/spill.cpython-312.pyc,, +mypyc/transform/__pycache__/uninit.cpython-312.pyc,, +mypyc/transform/copy_propagation.cpython-312-x86_64-linux-gnu.so,sha256=XlGRllNymc-fiXCCIOXuYEDXxbmevaxlRyqEiJcoJRQ,16000 +mypyc/transform/copy_propagation.py,sha256=JrbL3Y-qPlcSGyWI2_jBO-UezHDrMf2pIII9wRu6fJI,3435 +mypyc/transform/exceptions.cpython-312-x86_64-linux-gnu.so,sha256=17hLCp9TR-6WMoJ3vjb89u4-KsBf9B2FNrP_WVRLAK8,15992 +mypyc/transform/exceptions.py,sha256=K2z1piHIamVECHwNNgJLKyVpYZMSjEUDn6vStbR8JUk,6414 +mypyc/transform/flag_elimination.cpython-312-x86_64-linux-gnu.so,sha256=xWBkYWp0wYSJ7F3QM67KeuAZYuwVFK5m96T2okYmzy4,16000 +mypyc/transform/flag_elimination.py,sha256=84M5VUt57iEj-yJR02XVeJsV-nviYO5YoqiDuOK0kxI,3531 +mypyc/transform/ir_transform.cpython-312-x86_64-linux-gnu.so,sha256=u-Xxpdt8XeylHLt8CHMhQCuRIztmyvp3ptdfD9wVMEg,15992 +mypyc/transform/ir_transform.py,sha256=20dNEt2256ARi2dB2GndGQIa9jWIKy1DaJ-A35LrI0s,11501 +mypyc/transform/log_trace.cpython-312-x86_64-linux-gnu.so,sha256=s441ZaZWDyF3StrHx6agffC1TjdrDE_0N2gWnVyPZBY,15984 +mypyc/transform/log_trace.py,sha256=_vLi9moWRpRqi5w85dLZt7Olg0n0HCPwh_NJMu0gX_E,5374 +mypyc/transform/lower.cpython-312-x86_64-linux-gnu.so,sha256=hgKDQWRYGnIaYmlHWvaKZaT2GZ9ksR6Nrbvrbeomf88,15976 +mypyc/transform/lower.py,sha256=LfFFCqN5_XoISoHoAHDggVTo9E45eeBiRnIQzJWWHAg,1344 +mypyc/transform/refcount.cpython-312-x86_64-linux-gnu.so,sha256=Qyzk-7_0HiQZFzlWUGpKrX274RQTs51y7lSiF8tkuOY,15984 +mypyc/transform/refcount.py,sha256=iW00ioXptnZe_CK9yBXe1dIajeHqweo3LNlKqHVMobQ,10109 +mypyc/transform/spill.cpython-312-x86_64-linux-gnu.so,sha256=XyyPDt6TVUx5qY5fpUkVrsjfDsusoLKzCHccEjZXvVg,15976 +mypyc/transform/spill.py,sha256=dO7W2tChyzpb3E_Ut4RD510SIrvAu-mVM45_kR13JLk,4185 +mypyc/transform/uninit.cpython-312-x86_64-linux-gnu.so,sha256=3KbH5bDd1hSX5itgQ_tsgmc1xPxYCq7diEMvzs5gz8Q,15984 +mypyc/transform/uninit.py,sha256=SlQ_n9TZ7zeb5m3XCEevjiUp4FXAglwuIwBklgpjIto,7006 diff --git a/.venv/lib/python3.12/site-packages/mypy-1.19.1.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/mypy-1.19.1.dist-info/WHEEL new file mode 100644 index 0000000..f3e8a97 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy-1.19.1.dist-info/WHEEL @@ -0,0 +1,7 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: false +Tag: cp312-cp312-manylinux_2_17_x86_64 +Tag: cp312-cp312-manylinux2014_x86_64 +Tag: cp312-cp312-manylinux_2_28_x86_64 + diff --git a/.venv/lib/python3.12/site-packages/mypy-1.19.1.dist-info/entry_points.txt b/.venv/lib/python3.12/site-packages/mypy-1.19.1.dist-info/entry_points.txt new file mode 100644 index 0000000..0ece837 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy-1.19.1.dist-info/entry_points.txt @@ -0,0 +1,6 @@ +[console_scripts] +dmypy = mypy.dmypy.client:console_entry +mypy = mypy.__main__:console_entry +mypyc = mypyc.__main__:main +stubgen = mypy.stubgen:main +stubtest = mypy.stubtest:main diff --git a/.venv/lib/python3.12/site-packages/mypy-1.19.1.dist-info/licenses/LICENSE b/.venv/lib/python3.12/site-packages/mypy-1.19.1.dist-info/licenses/LICENSE new file mode 100644 index 0000000..080c5a4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy-1.19.1.dist-info/licenses/LICENSE @@ -0,0 +1,264 @@ +Mypy (and mypyc) are licensed under the terms of the MIT license, reproduced below. + += = = = = + +The MIT License + +Copyright (c) 2012-2023 Jukka Lehtosalo and contributors +Copyright (c) 2015-2023 Dropbox, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + += = = = = + +Portions of mypy and mypyc are licensed under different licenses. +The files +mypyc/lib-rt/pythonsupport.h, mypyc/lib-rt/getargs.c and +mypyc/lib-rt/getargsfast.c are licensed under the PSF 2 License, reproduced +below. + += = = = = + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012 Python Software Foundation; All Rights Reserved" are retained in Python +alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the Internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the Internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + += = = = = + +Files under lib-rt/base64 are licensed under the following license. + += = = = = + +Copyright (c) 2005-2007, Nick Galbreath +Copyright (c) 2015-2018, Wojciech Muła +Copyright (c) 2016-2017, Matthieu Darbois +Copyright (c) 2013-2022, Alfred Klomp +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +- Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + +- Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/lib/python3.12/site-packages/mypy-1.19.1.dist-info/top_level.txt b/.venv/lib/python3.12/site-packages/mypy-1.19.1.dist-info/top_level.txt new file mode 100644 index 0000000..8bf9ee8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy-1.19.1.dist-info/top_level.txt @@ -0,0 +1,3 @@ +4c842c94c09923bae9e4__mypyc +mypy +mypyc diff --git a/.venv/lib/python3.12/site-packages/mypy/__init__.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/__init__.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..56c98c1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/__init__.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/__init__.py b/.venv/lib/python3.12/site-packages/mypy/__init__.py new file mode 100644 index 0000000..fa40002 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/__init__.py @@ -0,0 +1 @@ +# This page intentionally left blank diff --git a/.venv/lib/python3.12/site-packages/mypy/__main__.py b/.venv/lib/python3.12/site-packages/mypy/__main__.py new file mode 100644 index 0000000..049553c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/__main__.py @@ -0,0 +1,37 @@ +"""Mypy type checker command line tool.""" + +from __future__ import annotations + +import os +import sys +import traceback + +from mypy.main import main, process_options +from mypy.util import FancyFormatter + + +def console_entry() -> None: + try: + main() + sys.stdout.flush() + sys.stderr.flush() + except BrokenPipeError: + # Python flushes standard streams on exit; redirect remaining output + # to devnull to avoid another BrokenPipeError at shutdown + devnull = os.open(os.devnull, os.O_WRONLY) + os.dup2(devnull, sys.stdout.fileno()) + sys.exit(2) + except KeyboardInterrupt: + _, options = process_options(args=sys.argv[1:]) + if options.show_traceback: + sys.stdout.write(traceback.format_exc()) + formatter = FancyFormatter(sys.stdout, sys.stderr, False) + msg = "Interrupted\n" + sys.stdout.write(formatter.style(msg, color="red", bold=True)) + sys.stdout.flush() + sys.stderr.flush() + sys.exit(2) + + +if __name__ == "__main__": + console_entry() diff --git a/.venv/lib/python3.12/site-packages/mypy/api.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/api.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..82618a0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/api.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/api.py b/.venv/lib/python3.12/site-packages/mypy/api.py new file mode 100644 index 0000000..e2179db --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/api.py @@ -0,0 +1,94 @@ +"""This module makes it possible to use mypy as part of a Python application. + +Since mypy still changes, the API was kept utterly simple and non-intrusive. +It just mimics command line activation without starting a new interpreter. +So the normal docs about the mypy command line apply. +Changes in the command line version of mypy will be immediately usable. + +Just import this module and then call the 'run' function with a parameter of +type List[str], containing what normally would have been the command line +arguments to mypy. + +Function 'run' returns a Tuple[str, str, int], namely +(, , ), +in which is what mypy normally writes to sys.stdout, + is what mypy normally writes to sys.stderr and exit_status is +the exit status mypy normally returns to the operating system. + +Any pretty formatting is left to the caller. + +The 'run_dmypy' function is similar, but instead mimics invocation of +dmypy. Note that run_dmypy is not thread-safe and modifies sys.stdout +and sys.stderr during its invocation. + +Note that these APIs don't support incremental generation of error +messages. + +Trivial example of code using this module: + +import sys +from mypy import api + +result = api.run(sys.argv[1:]) + +if result[0]: + print('\nType checking report:\n') + print(result[0]) # stdout + +if result[1]: + print('\nError report:\n') + print(result[1]) # stderr + +print('\nExit status:', result[2]) + +""" + +from __future__ import annotations + +import sys +from io import StringIO +from typing import Callable, TextIO + + +def _run(main_wrapper: Callable[[TextIO, TextIO], None]) -> tuple[str, str, int]: + stdout = StringIO() + stderr = StringIO() + + try: + main_wrapper(stdout, stderr) + exit_status = 0 + except SystemExit as system_exit: + assert isinstance(system_exit.code, int) + exit_status = system_exit.code + + return stdout.getvalue(), stderr.getvalue(), exit_status + + +def run(args: list[str]) -> tuple[str, str, int]: + # Lazy import to avoid needing to import all of mypy to call run_dmypy + from mypy.main import main + + return _run( + lambda stdout, stderr: main(args=args, stdout=stdout, stderr=stderr, clean_exit=True) + ) + + +def run_dmypy(args: list[str]) -> tuple[str, str, int]: + from mypy.dmypy.client import main + + # A bunch of effort has been put into threading stdout and stderr + # through the main API to avoid the threadsafety problems of + # modifying sys.stdout/sys.stderr, but that hasn't been done for + # the dmypy client, so we just do the non-threadsafe thing. + def f(stdout: TextIO, stderr: TextIO) -> None: + old_stdout = sys.stdout + old_stderr = sys.stderr + try: + sys.stdout = stdout + sys.stderr = stderr + main(args) + finally: + sys.stdout = old_stdout + sys.stderr = old_stderr + + return _run(f) diff --git a/.venv/lib/python3.12/site-packages/mypy/applytype.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/applytype.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..98bd502 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/applytype.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/applytype.py b/.venv/lib/python3.12/site-packages/mypy/applytype.py new file mode 100644 index 0000000..dfeaf77 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/applytype.py @@ -0,0 +1,304 @@ +from __future__ import annotations + +from collections.abc import Iterable, Sequence +from typing import Callable + +import mypy.subtypes +from mypy.erasetype import erase_typevars +from mypy.expandtype import expand_type +from mypy.nodes import Context, TypeInfo +from mypy.type_visitor import TypeTranslator +from mypy.typeops import get_all_type_vars +from mypy.types import ( + AnyType, + CallableType, + Instance, + Parameters, + ParamSpecFlavor, + ParamSpecType, + PartialType, + ProperType, + Type, + TypeAliasType, + TypeVarId, + TypeVarLikeType, + TypeVarTupleType, + TypeVarType, + UninhabitedType, + UnpackType, + get_proper_type, + remove_dups, +) + + +def get_target_type( + tvar: TypeVarLikeType, + type: Type, + callable: CallableType, + report_incompatible_typevar_value: Callable[[CallableType, Type, str, Context], None], + context: Context, + skip_unsatisfied: bool, +) -> Type | None: + p_type = get_proper_type(type) + if isinstance(p_type, UninhabitedType) and tvar.has_default(): + return tvar.default + if isinstance(tvar, ParamSpecType): + return type + if isinstance(tvar, TypeVarTupleType): + return type + assert isinstance(tvar, TypeVarType) + values = tvar.values + if values: + if isinstance(p_type, AnyType): + return type + if isinstance(p_type, TypeVarType) and p_type.values: + # Allow substituting T1 for T if every allowed value of T1 + # is also a legal value of T. + if all(any(mypy.subtypes.is_same_type(v, v1) for v in values) for v1 in p_type.values): + return type + matching = [] + for value in values: + if mypy.subtypes.is_subtype(type, value): + matching.append(value) + if matching: + best = matching[0] + # If there are more than one matching value, we select the narrowest + for match in matching[1:]: + if mypy.subtypes.is_subtype(match, best): + best = match + return best + if skip_unsatisfied: + return None + report_incompatible_typevar_value(callable, type, tvar.name, context) + else: + upper_bound = tvar.upper_bound + if tvar.name == "Self": + # Internally constructed Self-types contain class type variables in upper bound, + # so we need to erase them to avoid false positives. This is safe because we do + # not support type variables in upper bounds of user defined types. + upper_bound = erase_typevars(upper_bound) + if not mypy.subtypes.is_subtype(type, upper_bound): + if skip_unsatisfied: + return None + report_incompatible_typevar_value(callable, type, tvar.name, context) + return type + + +def apply_generic_arguments( + callable: CallableType, + orig_types: Sequence[Type | None], + report_incompatible_typevar_value: Callable[[CallableType, Type, str, Context], None], + context: Context, + skip_unsatisfied: bool = False, +) -> CallableType: + """Apply generic type arguments to a callable type. + + For example, applying [int] to 'def [T] (T) -> T' results in + 'def (int) -> int'. + + Note that each type can be None; in this case, it will not be applied. + + If `skip_unsatisfied` is True, then just skip the types that don't satisfy type variable + bound or constraints, instead of giving an error. + """ + tvars = callable.variables + assert len(orig_types) <= len(tvars) + # Check that inferred type variable values are compatible with allowed + # values and bounds. Also, promote subtype values to allowed values. + # Create a map from type variable id to target type. + id_to_type: dict[TypeVarId, Type] = {} + + for tvar, type in zip(tvars, orig_types): + assert not isinstance(type, PartialType), "Internal error: must never apply partial type" + if type is None: + continue + + target_type = get_target_type( + tvar, type, callable, report_incompatible_typevar_value, context, skip_unsatisfied + ) + if target_type is not None: + id_to_type[tvar.id] = target_type + + # TODO: validate arg_kinds/arg_names for ParamSpec and TypeVarTuple replacements, + # not just type variable bounds above. + param_spec = callable.param_spec() + if param_spec is not None: + nt = id_to_type.get(param_spec.id) + if nt is not None: + # ParamSpec expansion is special-cased, so we need to always expand callable + # as a whole, not expanding arguments individually. + callable = expand_type(callable, id_to_type) + assert isinstance(callable, CallableType) + return callable.copy_modified( + variables=[tv for tv in tvars if tv.id not in id_to_type] + ) + + # Apply arguments to argument types. + var_arg = callable.var_arg() + if var_arg is not None and isinstance(var_arg.typ, UnpackType): + # Same as for ParamSpec, callable with variadic types needs to be expanded as a whole. + callable = expand_type(callable, id_to_type) + assert isinstance(callable, CallableType) + return callable.copy_modified(variables=[tv for tv in tvars if tv.id not in id_to_type]) + else: + callable = callable.copy_modified( + arg_types=[expand_type(at, id_to_type) for at in callable.arg_types] + ) + + # Apply arguments to TypeGuard and TypeIs if any. + if callable.type_guard is not None: + type_guard = expand_type(callable.type_guard, id_to_type) + else: + type_guard = None + if callable.type_is is not None: + type_is = expand_type(callable.type_is, id_to_type) + else: + type_is = None + + # The callable may retain some type vars if only some were applied. + # TODO: move apply_poly() logic here when new inference + # becomes universally used (i.e. in all passes + in unification). + # With this new logic we can actually *add* some new free variables. + remaining_tvars: list[TypeVarLikeType] = [] + for tv in tvars: + if tv.id in id_to_type: + continue + if not tv.has_default(): + remaining_tvars.append(tv) + continue + # TypeVarLike isn't in id_to_type mapping. + # Only expand the TypeVar default here. + typ = expand_type(tv, id_to_type) + assert isinstance(typ, TypeVarLikeType) + remaining_tvars.append(typ) + + return callable.copy_modified( + ret_type=expand_type(callable.ret_type, id_to_type), + variables=remaining_tvars, + type_guard=type_guard, + type_is=type_is, + ) + + +def apply_poly(tp: CallableType, poly_tvars: Sequence[TypeVarLikeType]) -> CallableType | None: + """Make free type variables generic in the type if possible. + + This will translate the type `tp` while trying to create valid bindings for + type variables `poly_tvars` while traversing the type. This follows the same rules + as we do during semantic analysis phase, examples: + * Callable[Callable[[T], T], T] -> def [T] (def (T) -> T) -> T + * Callable[[], Callable[[T], T]] -> def () -> def [T] (T -> T) + * List[T] -> None (not possible) + """ + try: + return tp.copy_modified( + arg_types=[t.accept(PolyTranslator(poly_tvars)) for t in tp.arg_types], + ret_type=tp.ret_type.accept(PolyTranslator(poly_tvars)), + variables=[], + ) + except PolyTranslationError: + return None + + +class PolyTranslationError(Exception): + pass + + +class PolyTranslator(TypeTranslator): + """Make free type variables generic in the type if possible. + + See docstring for apply_poly() for details. + """ + + def __init__( + self, + poly_tvars: Iterable[TypeVarLikeType], + bound_tvars: frozenset[TypeVarLikeType] = frozenset(), + seen_aliases: frozenset[TypeInfo] = frozenset(), + ) -> None: + super().__init__() + self.poly_tvars = set(poly_tvars) + # This is a simplified version of TypeVarScope used during semantic analysis. + self.bound_tvars = bound_tvars + self.seen_aliases = seen_aliases + + def collect_vars(self, t: CallableType | Parameters) -> list[TypeVarLikeType]: + found_vars = [] + for arg in t.arg_types: + for tv in get_all_type_vars(arg): + if isinstance(tv, ParamSpecType): + normalized: TypeVarLikeType = tv.copy_modified( + flavor=ParamSpecFlavor.BARE, prefix=Parameters([], [], []) + ) + else: + normalized = tv + if normalized in self.poly_tvars and normalized not in self.bound_tvars: + found_vars.append(normalized) + return remove_dups(found_vars) + + def visit_callable_type(self, t: CallableType) -> Type: + found_vars = self.collect_vars(t) + self.bound_tvars |= set(found_vars) + result = super().visit_callable_type(t) + self.bound_tvars -= set(found_vars) + + assert isinstance(result, ProperType) and isinstance(result, CallableType) + result.variables = result.variables + tuple(found_vars) + return result + + def visit_type_var(self, t: TypeVarType) -> Type: + if t in self.poly_tvars and t not in self.bound_tvars: + raise PolyTranslationError() + return super().visit_type_var(t) + + def visit_param_spec(self, t: ParamSpecType) -> Type: + if t in self.poly_tvars and t not in self.bound_tvars: + raise PolyTranslationError() + return super().visit_param_spec(t) + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type: + if t in self.poly_tvars and t not in self.bound_tvars: + raise PolyTranslationError() + return super().visit_type_var_tuple(t) + + def visit_type_alias_type(self, t: TypeAliasType) -> Type: + if not t.args: + return t.copy_modified() + if not t.is_recursive: + return get_proper_type(t).accept(self) + # We can't handle polymorphic application for recursive generic aliases + # without risking an infinite recursion, just give up for now. + raise PolyTranslationError() + + def visit_instance(self, t: Instance) -> Type: + if t.type.has_param_spec_type: + # We need this special-casing to preserve the possibility to store a + # generic function in an instance type. Things like + # forall T . Foo[[x: T], T] + # are not really expressible in current type system, but this looks like + # a useful feature, so let's keep it. + param_spec_index = next( + i for (i, tv) in enumerate(t.type.defn.type_vars) if isinstance(tv, ParamSpecType) + ) + p = get_proper_type(t.args[param_spec_index]) + if isinstance(p, Parameters): + found_vars = self.collect_vars(p) + self.bound_tvars |= set(found_vars) + new_args = [a.accept(self) for a in t.args] + self.bound_tvars -= set(found_vars) + + repl = new_args[param_spec_index] + assert isinstance(repl, ProperType) and isinstance(repl, Parameters) + repl.variables = list(repl.variables) + list(found_vars) + return t.copy_modified(args=new_args) + # There is the same problem with callback protocols as with aliases + # (callback protocols are essentially more flexible aliases to callables). + if t.args and t.type.is_protocol and t.type.protocol_members == ["__call__"]: + if t.type in self.seen_aliases: + raise PolyTranslationError() + call = mypy.subtypes.find_member("__call__", t, t, is_operator=True) + assert call is not None + return call.accept( + PolyTranslator(self.poly_tvars, self.bound_tvars, self.seen_aliases | {t.type}) + ) + return super().visit_instance(t) diff --git a/.venv/lib/python3.12/site-packages/mypy/argmap.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/argmap.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..42a7a99 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/argmap.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/argmap.py b/.venv/lib/python3.12/site-packages/mypy/argmap.py new file mode 100644 index 0000000..a3e8f7f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/argmap.py @@ -0,0 +1,269 @@ +"""Utilities for mapping between actual and formal arguments (and their types).""" + +from __future__ import annotations + +from collections.abc import Sequence +from typing import TYPE_CHECKING, Callable + +from mypy import nodes +from mypy.maptype import map_instance_to_supertype +from mypy.types import ( + AnyType, + Instance, + ParamSpecType, + TupleType, + Type, + TypedDictType, + TypeOfAny, + TypeVarTupleType, + UnpackType, + get_proper_type, +) + +if TYPE_CHECKING: + from mypy.infer import ArgumentInferContext + + +def map_actuals_to_formals( + actual_kinds: list[nodes.ArgKind], + actual_names: Sequence[str | None] | None, + formal_kinds: list[nodes.ArgKind], + formal_names: Sequence[str | None], + actual_arg_type: Callable[[int], Type], +) -> list[list[int]]: + """Calculate mapping between actual (caller) args and formals. + + The result contains a list of caller argument indexes mapping to each + callee argument index, indexed by callee index. + + The caller_arg_type argument should evaluate to the type of the actual + argument type with the given index. + """ + nformals = len(formal_kinds) + formal_to_actual: list[list[int]] = [[] for i in range(nformals)] + ambiguous_actual_kwargs: list[int] = [] + fi = 0 + for ai, actual_kind in enumerate(actual_kinds): + if actual_kind == nodes.ARG_POS: + if fi < nformals: + if not formal_kinds[fi].is_star(): + formal_to_actual[fi].append(ai) + fi += 1 + elif formal_kinds[fi] == nodes.ARG_STAR: + formal_to_actual[fi].append(ai) + elif actual_kind == nodes.ARG_STAR: + # We need to know the actual type to map varargs. + actualt = get_proper_type(actual_arg_type(ai)) + if isinstance(actualt, TupleType): + # A tuple actual maps to a fixed number of formals. + for _ in range(len(actualt.items)): + if fi < nformals: + if formal_kinds[fi] != nodes.ARG_STAR2: + formal_to_actual[fi].append(ai) + else: + break + if formal_kinds[fi] != nodes.ARG_STAR: + fi += 1 + else: + # Assume that it is an iterable (if it isn't, there will be + # an error later). + while fi < nformals: + if formal_kinds[fi].is_named(star=True): + break + else: + formal_to_actual[fi].append(ai) + if formal_kinds[fi] == nodes.ARG_STAR: + break + fi += 1 + elif actual_kind.is_named(): + assert actual_names is not None, "Internal error: named kinds without names given" + name = actual_names[ai] + if name in formal_names and formal_kinds[formal_names.index(name)] != nodes.ARG_STAR: + formal_to_actual[formal_names.index(name)].append(ai) + elif nodes.ARG_STAR2 in formal_kinds: + formal_to_actual[formal_kinds.index(nodes.ARG_STAR2)].append(ai) + else: + assert actual_kind == nodes.ARG_STAR2 + actualt = get_proper_type(actual_arg_type(ai)) + if isinstance(actualt, TypedDictType): + for name in actualt.items: + if name in formal_names: + formal_to_actual[formal_names.index(name)].append(ai) + elif nodes.ARG_STAR2 in formal_kinds: + formal_to_actual[formal_kinds.index(nodes.ARG_STAR2)].append(ai) + else: + # We don't exactly know which **kwargs are provided by the + # caller, so we'll defer until all the other unambiguous + # actuals have been processed + ambiguous_actual_kwargs.append(ai) + + if ambiguous_actual_kwargs: + # Assume the ambiguous kwargs will fill the remaining arguments. + # + # TODO: If there are also tuple varargs, we might be missing some potential + # matches if the tuple was short enough to not match everything. + unmatched_formals = [ + fi + for fi in range(nformals) + if ( + formal_names[fi] + and ( + not formal_to_actual[fi] + or actual_kinds[formal_to_actual[fi][0]] == nodes.ARG_STAR + ) + and formal_kinds[fi] != nodes.ARG_STAR + ) + or formal_kinds[fi] == nodes.ARG_STAR2 + ] + for ai in ambiguous_actual_kwargs: + for fi in unmatched_formals: + formal_to_actual[fi].append(ai) + + return formal_to_actual + + +def map_formals_to_actuals( + actual_kinds: list[nodes.ArgKind], + actual_names: Sequence[str | None] | None, + formal_kinds: list[nodes.ArgKind], + formal_names: list[str | None], + actual_arg_type: Callable[[int], Type], +) -> list[list[int]]: + """Calculate the reverse mapping of map_actuals_to_formals.""" + formal_to_actual = map_actuals_to_formals( + actual_kinds, actual_names, formal_kinds, formal_names, actual_arg_type + ) + # Now reverse the mapping. + actual_to_formal: list[list[int]] = [[] for _ in actual_kinds] + for formal, actuals in enumerate(formal_to_actual): + for actual in actuals: + actual_to_formal[actual].append(formal) + return actual_to_formal + + +class ArgTypeExpander: + """Utility class for mapping actual argument types to formal arguments. + + One of the main responsibilities is to expand caller tuple *args and TypedDict + **kwargs, and to keep track of which tuple/TypedDict items have already been + consumed. + + Example: + + def f(x: int, *args: str) -> None: ... + f(*(1, 'x', 1.1)) + + We'd call expand_actual_type three times: + + 1. The first call would provide 'int' as the actual type of 'x' (from '1'). + 2. The second call would provide 'str' as one of the actual types for '*args'. + 2. The third call would provide 'float' as one of the actual types for '*args'. + + A single instance can process all the arguments for a single call. Each call + needs a separate instance since instances have per-call state. + """ + + def __init__(self, context: ArgumentInferContext) -> None: + # Next tuple *args index to use. + self.tuple_index = 0 + # Keyword arguments in TypedDict **kwargs used. + self.kwargs_used: set[str] | None = None + # Type context for `*` and `**` arg kinds. + self.context = context + + def expand_actual_type( + self, + actual_type: Type, + actual_kind: nodes.ArgKind, + formal_name: str | None, + formal_kind: nodes.ArgKind, + allow_unpack: bool = False, + ) -> Type: + """Return the actual (caller) type(s) of a formal argument with the given kinds. + + If the actual argument is a tuple *args, return the next individual tuple item that + maps to the formal arg. + + If the actual argument is a TypedDict **kwargs, return the next matching typed dict + value type based on formal argument name and kind. + + This is supposed to be called for each formal, in order. Call multiple times per + formal if multiple actuals map to a formal. + """ + original_actual = actual_type + actual_type = get_proper_type(actual_type) + if actual_kind == nodes.ARG_STAR: + if isinstance(actual_type, TypeVarTupleType): + # This code path is hit when *Ts is passed to a callable and various + # special-handling didn't catch this. The best thing we can do is to use + # the upper bound. + actual_type = get_proper_type(actual_type.upper_bound) + if isinstance(actual_type, Instance) and actual_type.args: + from mypy.subtypes import is_subtype + + if is_subtype(actual_type, self.context.iterable_type): + return map_instance_to_supertype( + actual_type, self.context.iterable_type.type + ).args[0] + else: + # We cannot properly unpack anything other + # than `Iterable` type with `*`. + # Just return `Any`, other parts of code would raise + # a different error for improper use. + return AnyType(TypeOfAny.from_error) + elif isinstance(actual_type, TupleType): + # Get the next tuple item of a tuple *arg. + if self.tuple_index >= len(actual_type.items): + # Exhausted a tuple -- continue to the next *args. + self.tuple_index = 1 + else: + self.tuple_index += 1 + item = actual_type.items[self.tuple_index - 1] + if isinstance(item, UnpackType) and not allow_unpack: + # An unpack item that doesn't have special handling, use upper bound as above. + unpacked = get_proper_type(item.type) + if isinstance(unpacked, TypeVarTupleType): + fallback = get_proper_type(unpacked.upper_bound) + else: + fallback = unpacked + assert ( + isinstance(fallback, Instance) + and fallback.type.fullname == "builtins.tuple" + ) + item = fallback.args[0] + return item + elif isinstance(actual_type, ParamSpecType): + # ParamSpec is valid in *args but it can't be unpacked. + return actual_type + else: + return AnyType(TypeOfAny.from_error) + elif actual_kind == nodes.ARG_STAR2: + from mypy.subtypes import is_subtype + + if isinstance(actual_type, TypedDictType): + if self.kwargs_used is None: + self.kwargs_used = set() + if formal_kind != nodes.ARG_STAR2 and formal_name in actual_type.items: + # Lookup type based on keyword argument name. + assert formal_name is not None + else: + # Pick an arbitrary item if no specified keyword is expected. + formal_name = (set(actual_type.items.keys()) - self.kwargs_used).pop() + self.kwargs_used.add(formal_name) + return actual_type.items[formal_name] + elif isinstance(actual_type, Instance) and is_subtype( + actual_type, self.context.mapping_type + ): + # Only `Mapping` type can be unpacked with `**`. + # Other types will produce an error somewhere else. + return map_instance_to_supertype(actual_type, self.context.mapping_type.type).args[ + 1 + ] + elif isinstance(actual_type, ParamSpecType): + # ParamSpec is valid in **kwargs but it can't be unpacked. + return actual_type + else: + return AnyType(TypeOfAny.from_error) + else: + # No translation for other kinds -- 1:1 mapping. + return original_actual diff --git a/.venv/lib/python3.12/site-packages/mypy/binder.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/binder.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..ccebcad Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/binder.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/binder.py b/.venv/lib/python3.12/site-packages/mypy/binder.py new file mode 100644 index 0000000..a83e652 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/binder.py @@ -0,0 +1,630 @@ +from __future__ import annotations + +from collections import defaultdict +from collections.abc import Iterator +from contextlib import contextmanager +from typing import NamedTuple, Optional, Union +from typing_extensions import TypeAlias as _TypeAlias + +from mypy.erasetype import remove_instance_last_known_values +from mypy.literals import Key, extract_var_from_literal_hash, literal, literal_hash, subkeys +from mypy.nodes import ( + LITERAL_NO, + Expression, + IndexExpr, + MemberExpr, + NameExpr, + RefExpr, + TypeInfo, + Var, +) +from mypy.options import Options +from mypy.subtypes import is_same_type, is_subtype +from mypy.typeops import make_simplified_union +from mypy.types import ( + AnyType, + Instance, + NoneType, + PartialType, + ProperType, + TupleType, + Type, + TypeOfAny, + TypeType, + TypeVarType, + UnionType, + UnpackType, + find_unpack_in_list, + get_proper_type, +) +from mypy.typevars import fill_typevars_with_any + +BindableExpression: _TypeAlias = Union[IndexExpr, MemberExpr, NameExpr] + + +class CurrentType(NamedTuple): + type: Type + from_assignment: bool + + +class Frame: + """A Frame represents a specific point in the execution of a program. + + It carries information about the current types of expressions at + that point, arising either from assignments to those expressions + or the result of isinstance checks and other type narrowing + operations. It also records whether it is possible to reach that + point at all. + + We add a new frame wherenever there is a new scope or control flow + branching. + + This information is not copied into a new Frame when it is pushed + onto the stack, so a given Frame only has information about types + that were assigned in that frame. + + Expressions are stored in dicts using 'literal hashes' as keys (type + "Key"). These are hashable values derived from expression AST nodes + (only those that can be narrowed). literal_hash(expr) is used to + calculate the hashes. Note that this isn't directly related to literal + types -- the concept predates literal types. + """ + + def __init__(self, id: int, conditional_frame: bool = False) -> None: + self.id = id + self.types: dict[Key, CurrentType] = {} + self.unreachable = False + self.conditional_frame = conditional_frame + self.suppress_unreachable_warnings = False + + def __repr__(self) -> str: + return f"Frame({self.id}, {self.types}, {self.unreachable}, {self.conditional_frame})" + + +Assigns = defaultdict[Expression, list[tuple[Type, Optional[Type]]]] + + +class ConditionalTypeBinder: + """Keep track of conditional types of variables. + + NB: Variables are tracked by literal hashes of expressions, so it is + possible to confuse the binder when there is aliasing. Example: + + class A: + a: int | str + + x = A() + lst = [x] + reveal_type(x.a) # int | str + x.a = 1 + reveal_type(x.a) # int + reveal_type(lst[0].a) # int | str + lst[0].a = 'a' + reveal_type(x.a) # int + reveal_type(lst[0].a) # str + """ + + # Stored assignments for situations with tuple/list lvalue and rvalue of union type. + # This maps an expression to a list of bound types for every item in the union type. + type_assignments: Assigns | None = None + + def __init__(self, options: Options) -> None: + # Each frame gets an increasing, distinct id. + self.next_id = 1 + + # The stack of frames currently used. These map + # literal_hash(expr) -- literals like 'foo.bar' -- + # to types. The last element of this list is the + # top-most, current frame. Each earlier element + # records the state as of when that frame was last + # on top of the stack. + self.frames = [Frame(self._get_id())] + + # For frames higher in the stack, we record the set of + # Frames that can escape there, either by falling off + # the end of the frame or by a loop control construct + # or raised exception. The last element of self.frames + # has no corresponding element in this list. + self.options_on_return: list[list[Frame]] = [] + + # Maps literal_hash(expr) to get_declaration(expr) + # for every expr stored in the binder + self.declarations: dict[Key, Type | None] = {} + # Set of other keys to invalidate if a key is changed, e.g. x -> {x.a, x[0]} + # Whenever a new key (e.g. x.a.b) is added, we update this + self.dependencies: dict[Key, set[Key]] = {} + + # Whether the last pop changed the newly top frame on exit + self.last_pop_changed = False + + # These are used to track control flow in try statements and loops. + self.try_frames: set[int] = set() + self.break_frames: list[int] = [] + self.continue_frames: list[int] = [] + + # If True, initial assignment to a simple variable (e.g. "x", but not "x.y") + # is added to the binder. This allows more precise narrowing and more + # flexible inference of variable types (--allow-redefinition-new). + self.bind_all = options.allow_redefinition_new + + # This tracks any externally visible changes in binder to invalidate + # expression caches when needed. + self.version = 0 + + def _get_id(self) -> int: + self.next_id += 1 + return self.next_id + + def _add_dependencies(self, key: Key, value: Key | None = None) -> None: + if value is None: + value = key + else: + self.dependencies.setdefault(key, set()).add(value) + for elt in subkeys(key): + self._add_dependencies(elt, value) + + def push_frame(self, conditional_frame: bool = False) -> Frame: + """Push a new frame into the binder.""" + f = Frame(self._get_id(), conditional_frame) + self.frames.append(f) + self.options_on_return.append([]) + return f + + def _put(self, key: Key, type: Type, from_assignment: bool, index: int = -1) -> None: + self.version += 1 + self.frames[index].types[key] = CurrentType(type, from_assignment) + + def _get(self, key: Key, index: int = -1) -> CurrentType | None: + if index < 0: + index += len(self.frames) + for i in range(index, -1, -1): + if key in self.frames[i].types: + return self.frames[i].types[key] + return None + + @classmethod + def can_put_directly(cls, expr: Expression) -> bool: + """Will `.put()` on this expression be successful? + + This is inlined in `.put()` because the logic is rather hot and must be kept + in sync. + """ + return isinstance(expr, (IndexExpr, MemberExpr, NameExpr)) and literal(expr) > LITERAL_NO + + def put(self, expr: Expression, typ: Type, *, from_assignment: bool = True) -> None: + """Directly set the narrowed type of expression (if it supports it). + + This is used for isinstance() etc. Assignments should go through assign_type(). + """ + if not isinstance(expr, (IndexExpr, MemberExpr, NameExpr)): + return + if not literal(expr): + return + key = literal_hash(expr) + assert key is not None, "Internal error: binder tried to put non-literal" + if key not in self.declarations: + self.declarations[key] = get_declaration(expr) + self._add_dependencies(key) + self._put(key, typ, from_assignment) + + def unreachable(self) -> None: + self.version += 1 + self.frames[-1].unreachable = True + + def suppress_unreachable_warnings(self) -> None: + self.frames[-1].suppress_unreachable_warnings = True + + def get(self, expr: Expression) -> Type | None: + key = literal_hash(expr) + assert key is not None, "Internal error: binder tried to get non-literal" + found = self._get(key) + if found is None: + return None + return found.type + + def is_unreachable(self) -> bool: + # TODO: Copy the value of unreachable into new frames to avoid + # this traversal on every statement? + return any(f.unreachable for f in self.frames) + + def is_unreachable_warning_suppressed(self) -> bool: + return any(f.suppress_unreachable_warnings for f in self.frames) + + def cleanse(self, expr: Expression) -> None: + """Remove all references to a Node from the binder.""" + key = literal_hash(expr) + assert key is not None, "Internal error: binder tried cleanse non-literal" + self._cleanse_key(key) + + def _cleanse_key(self, key: Key) -> None: + """Remove all references to a key from the binder.""" + for frame in self.frames: + if key in frame.types: + del frame.types[key] + + def update_from_options(self, frames: list[Frame]) -> bool: + """Update the frame to reflect that each key will be updated + as in one of the frames. Return whether any item changes. + + If a key is declared as AnyType, only update it if all the + options are the same. + """ + all_reachable = all(not f.unreachable for f in frames) + if not all_reachable: + frames = [f for f in frames if not f.unreachable] + changed = False + keys = [key for f in frames for key in f.types] + if len(keys) > 1: + keys = list(set(keys)) + for key in keys: + current_value = self._get(key) + resulting_values = [f.types.get(key, current_value) for f in frames] + # Keys can be narrowed using two different semantics. The new semantics + # is enabled for plain variables when bind_all is true, and it allows + # variable types to be widened using subsequent assignments. This is + # tricky to support for instance attributes (primarily due to deferrals), + # so we don't use it for them. + old_semantics = not self.bind_all or extract_var_from_literal_hash(key) is None + if old_semantics and any(x is None for x in resulting_values): + # We didn't know anything about key before + # (current_value must be None), and we still don't + # know anything about key in at least one possible frame. + continue + + resulting_values = [x for x in resulting_values if x is not None] + + if all_reachable and all( + x is not None and not x.from_assignment for x in resulting_values + ): + # Do not synthesize a new type if we encountered a conditional block + # (if, while or match-case) without assignments. + # See check-isinstance.test::testNoneCheckDoesNotMakeTypeVarOptional + # This is a safe assumption: the fact that we checked something with `is` + # or `isinstance` does not change the type of the value. + continue + + current_type = resulting_values[0] + assert current_type is not None + type = current_type.type + declaration_type = get_proper_type(self.declarations.get(key)) + if isinstance(declaration_type, AnyType): + # At this point resulting values can't contain None, see continue above + if not all( + t is not None and is_same_type(type, t.type) for t in resulting_values[1:] + ): + type = AnyType(TypeOfAny.from_another_any, source_any=declaration_type) + else: + possible_types = [] + for t in resulting_values: + assert t is not None + possible_types.append(t.type) + if len(possible_types) == 1: + # This is to avoid calling get_proper_type() unless needed, as this may + # interfere with our (hacky) TypeGuard support. + type = possible_types[0] + else: + type = make_simplified_union(possible_types) + # Legacy guard for corner case when the original type is TypeVarType. + if isinstance(declaration_type, TypeVarType) and not is_subtype( + type, declaration_type + ): + type = declaration_type + # Try simplifying resulting type for unions involving variadic tuples. + # Technically, everything is still valid without this step, but if we do + # not do this, this may create long unions after exiting an if check like: + # x: tuple[int, ...] + # if len(x) < 10: + # ... + # We want the type of x to be tuple[int, ...] after this block (if it is + # still equivalent to such type). + if isinstance(type, UnionType): + type = collapse_variadic_union(type) + if ( + old_semantics + and isinstance(type, ProperType) + and isinstance(type, UnionType) + ): + # Simplify away any extra Any's that were added to the declared + # type when popping a frame. + simplified = UnionType.make_union( + [t for t in type.items if not isinstance(get_proper_type(t), AnyType)] + ) + if simplified == self.declarations[key]: + type = simplified + if current_value is None or not is_same_type(type, current_value.type): + self._put(key, type, from_assignment=True) + changed = True + + self.frames[-1].unreachable = not frames + + return changed + + def pop_frame(self, can_skip: bool, fall_through: int) -> Frame: + """Pop a frame and return it. + + See frame_context() for documentation of fall_through. + """ + + if fall_through > 0: + self.allow_jump(-fall_through) + + result = self.frames.pop() + options = self.options_on_return.pop() + + if can_skip: + options.insert(0, self.frames[-1]) + + self.last_pop_changed = self.update_from_options(options) + + return result + + @contextmanager + def accumulate_type_assignments(self) -> Iterator[Assigns]: + """Push a new map to collect assigned types in multiassign from union. + + If this map is not None, actual binding is deferred until all items in + the union are processed (a union of collected items is later bound + manually by the caller). + """ + old_assignments = None + if self.type_assignments is not None: + old_assignments = self.type_assignments + self.type_assignments = defaultdict(list) + yield self.type_assignments + self.type_assignments = old_assignments + + def assign_type(self, expr: Expression, type: Type, declared_type: Type | None) -> None: + """Narrow type of expression through an assignment. + + Do nothing if the expression doesn't support narrowing. + + When not narrowing though an assignment (isinstance() etc.), use put() + directly. This omits some special-casing logic for assignments. + """ + # We should erase last known value in binder, because if we are using it, + # it means that the target is not final, and therefore can't hold a literal. + type = remove_instance_last_known_values(type) + + if self.type_assignments is not None: + # We are in a multiassign from union, defer the actual binding, + # just collect the types. + self.type_assignments[expr].append((type, declared_type)) + return + if not isinstance(expr, (IndexExpr, MemberExpr, NameExpr)): + return + if not literal(expr): + return + self.invalidate_dependencies(expr) + + if declared_type is None: + # Not sure why this happens. It seems to mainly happen in + # member initialization. + return + if not is_subtype(type, declared_type): + # Pretty sure this is only happens when there's a type error. + + # Ideally this function wouldn't be called if the + # expression has a type error, though -- do other kinds of + # errors cause this function to get called at invalid + # times? + return + + p_declared = get_proper_type(declared_type) + p_type = get_proper_type(type) + if isinstance(p_type, AnyType): + # Any type requires some special casing, for both historical reasons, + # and to optimise user experience without sacrificing correctness too much. + if isinstance(expr, RefExpr) and isinstance(expr.node, Var) and expr.node.is_inferred: + # First case: a local/global variable without explicit annotation, + # in this case we just assign Any (essentially following the SSA logic). + self.put(expr, type) + elif isinstance(p_declared, UnionType) and any( + isinstance(get_proper_type(item), NoneType) for item in p_declared.items + ): + # Second case: explicit optional type, in this case we optimize for a common + # pattern when an untyped value used as a fallback replacing None. + new_items = [ + type if isinstance(get_proper_type(item), NoneType) else item + for item in p_declared.items + ] + self.put(expr, UnionType(new_items)) + elif isinstance(p_declared, UnionType) and any( + isinstance(get_proper_type(item), AnyType) for item in p_declared.items + ): + # Third case: a union already containing Any (most likely from an un-imported + # name), in this case we allow assigning Any as well. + self.put(expr, type) + else: + # In all other cases we don't narrow to Any to minimize false negatives. + self.put(expr, declared_type) + elif isinstance(p_declared, AnyType): + # Mirroring the first case above, we don't narrow to a precise type if the variable + # has an explicit `Any` type annotation. + if isinstance(expr, RefExpr) and isinstance(expr.node, Var) and expr.node.is_inferred: + self.put(expr, type) + else: + self.put(expr, declared_type) + else: + self.put(expr, type) + + for i in self.try_frames: + # XXX This should probably not copy the entire frame, but + # just copy this variable into a single stored frame. + self.allow_jump(i) + + def invalidate_dependencies(self, expr: BindableExpression) -> None: + """Invalidate knowledge of types that include expr, but not expr itself. + + For example, when expr is foo.bar, invalidate foo.bar.baz. + + It is overly conservative: it invalidates globally, including + in code paths unreachable from here. + """ + key = literal_hash(expr) + assert key is not None + for dep in self.dependencies.get(key, set()): + self._cleanse_key(dep) + + def allow_jump(self, index: int) -> None: + # self.frames and self.options_on_return have different lengths + # so make sure the index is positive + if index < 0: + index += len(self.options_on_return) + frame = Frame(self._get_id()) + for f in self.frames[index + 1 :]: + frame.types.update(f.types) + if f.unreachable: + frame.unreachable = True + self.options_on_return[index].append(frame) + + def handle_break(self) -> None: + self.allow_jump(self.break_frames[-1]) + self.unreachable() + + def handle_continue(self) -> None: + self.allow_jump(self.continue_frames[-1]) + self.unreachable() + + @contextmanager + def frame_context( + self, + *, + can_skip: bool, + fall_through: int = 1, + break_frame: int = 0, + continue_frame: int = 0, + conditional_frame: bool = False, + try_frame: bool = False, + ) -> Iterator[Frame]: + """Return a context manager that pushes/pops frames on enter/exit. + + If can_skip is True, control flow is allowed to bypass the + newly-created frame. + + If fall_through > 0, then it will allow control flow that + falls off the end of the frame to escape to its ancestor + `fall_through` levels higher. Otherwise control flow ends + at the end of the frame. + + If break_frame > 0, then 'break' statements within this frame + will jump out to the frame break_frame levels higher than the + frame created by this call to frame_context. Similarly for + continue_frame and 'continue' statements. + + If try_frame is true, then execution is allowed to jump at any + point within the newly created frame (or its descendants) to + its parent (i.e., to the frame that was on top before this + call to frame_context). + + After the context manager exits, self.last_pop_changed indicates + whether any types changed in the newly-topmost frame as a result + of popping this frame. + """ + assert len(self.frames) > 1 + + if break_frame: + self.break_frames.append(len(self.frames) - break_frame) + if continue_frame: + self.continue_frames.append(len(self.frames) - continue_frame) + if try_frame: + self.try_frames.add(len(self.frames) - 1) + + new_frame = self.push_frame(conditional_frame) + if try_frame: + # An exception may occur immediately + self.allow_jump(-1) + yield new_frame + self.pop_frame(can_skip, fall_through) + + if break_frame: + self.break_frames.pop() + if continue_frame: + self.continue_frames.pop() + if try_frame: + self.try_frames.remove(len(self.frames) - 1) + + @contextmanager + def top_frame_context(self) -> Iterator[Frame]: + """A variant of frame_context for use at the top level of + a namespace (module, function, or class). + """ + assert len(self.frames) == 1 + yield self.push_frame() + self.pop_frame(True, 0) + assert len(self.frames) == 1 + + +def get_declaration(expr: BindableExpression) -> Type | None: + """Get the declared or inferred type of a RefExpr expression. + + Return None if there is no type or the expression is not a RefExpr. + This can return None if the type hasn't been inferred yet. + """ + if isinstance(expr, RefExpr): + if isinstance(expr.node, Var): + type = expr.node.type + if not isinstance(get_proper_type(type), PartialType): + return type + elif isinstance(expr.node, TypeInfo): + return TypeType(fill_typevars_with_any(expr.node)) + return None + + +def collapse_variadic_union(typ: UnionType) -> Type: + """Simplify a union involving variadic tuple if possible. + + This will collapse a type like e.g. + tuple[X, Z] | tuple[X, Y, Z] | tuple[X, Y, Y, *tuple[Y, ...], Z] + back to + tuple[X, *tuple[Y, ...], Z] + which is equivalent, but much simpler form of the same type. + """ + tuple_items = [] + other_items = [] + for t in typ.items: + p_t = get_proper_type(t) + if isinstance(p_t, TupleType): + tuple_items.append(p_t) + else: + other_items.append(t) + if len(tuple_items) <= 1: + # This type cannot be simplified further. + return typ + tuple_items = sorted(tuple_items, key=lambda t: len(t.items)) + first = tuple_items[0] + last = tuple_items[-1] + unpack_index = find_unpack_in_list(last.items) + if unpack_index is None: + return typ + unpack = last.items[unpack_index] + assert isinstance(unpack, UnpackType) + unpacked = get_proper_type(unpack.type) + if not isinstance(unpacked, Instance): + return typ + assert unpacked.type.fullname == "builtins.tuple" + suffix = last.items[unpack_index + 1 :] + + # Check that first item matches the expected pattern and infer prefix. + if len(first.items) < len(suffix): + return typ + if suffix and first.items[-len(suffix) :] != suffix: + return typ + if suffix: + prefix = first.items[: -len(suffix)] + else: + prefix = first.items + + # Check that all middle types match the expected pattern as well. + arg = unpacked.args[0] + for i, it in enumerate(tuple_items[1:-1]): + if it.items != prefix + [arg] * (i + 1) + suffix: + return typ + + # Check the last item (the one with unpack), and choose an appropriate simplified type. + if last.items != prefix + [arg] * (len(typ.items) - 1) + [unpack] + suffix: + return typ + if len(first.items) == 0: + simplified: Type = unpacked.copy_modified() + else: + simplified = TupleType(prefix + [unpack] + suffix, fallback=last.partial_fallback) + return UnionType.make_union([simplified] + other_items) diff --git a/.venv/lib/python3.12/site-packages/mypy/bogus_type.py b/.venv/lib/python3.12/site-packages/mypy/bogus_type.py new file mode 100644 index 0000000..1a61aba --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/bogus_type.py @@ -0,0 +1,27 @@ +"""A Bogus[T] type alias for marking when we subvert the type system + +We need this for compiling with mypyc, which inserts runtime +typechecks that cause problems when we subvert the type system. So +when compiling with mypyc, we turn those places into Any, while +keeping the types around for normal typechecks. + +Since this causes the runtime types to be Any, this is best used +in places where efficient access to properties is not important. +For those cases some other technique should be used. +""" + +from __future__ import annotations + +from typing import Any, TypeVar + +from mypy_extensions import FlexibleAlias + +T = TypeVar("T") + +# This won't ever be true at runtime, but we consider it true during +# mypyc compilations. +MYPYC = False +if MYPYC: + Bogus = FlexibleAlias[T, Any] +else: + Bogus = FlexibleAlias[T, T] diff --git a/.venv/lib/python3.12/site-packages/mypy/build.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/build.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..a17870d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/build.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/build.py b/.venv/lib/python3.12/site-packages/mypy/build.py new file mode 100644 index 0000000..aee099f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/build.py @@ -0,0 +1,3693 @@ +"""Facilities to analyze entire programs, including imported modules. + +Parse and analyze the source files of a program in the correct order +(based on file dependencies), and collect the results. + +This module only directs a build, which is performed in multiple passes per +file. The individual passes are implemented in separate modules. + +The function build() is the main interface to this module. +""" + +# TODO: More consistent terminology, e.g. path/fnam, module/id, state/file + +from __future__ import annotations + +import collections +import contextlib +import gc +import json +import os +import platform +import re +import stat +import sys +import time +import types +from collections.abc import Iterator, Mapping, Sequence, Set as AbstractSet +from typing import TYPE_CHECKING, Any, Callable, ClassVar, Final, NoReturn, TextIO, TypedDict +from typing_extensions import TypeAlias as _TypeAlias + +from librt.internal import cache_version + +import mypy.semanal_main +from mypy.cache import ( + CACHE_VERSION, + CacheMeta, + ReadBuffer, + SerializedError, + WriteBuffer, + write_json, +) +from mypy.checker import TypeChecker +from mypy.error_formatter import OUTPUT_CHOICES, ErrorFormatter +from mypy.errors import CompileError, ErrorInfo, Errors, ErrorTuple, report_internal_error +from mypy.graph_utils import prepare_sccs, strongly_connected_components, topsort +from mypy.indirection import TypeIndirectionVisitor +from mypy.messages import MessageBuilder +from mypy.nodes import Import, ImportAll, ImportBase, ImportFrom, MypyFile, SymbolTable +from mypy.partially_defined import PossiblyUndefinedVariableVisitor +from mypy.semanal import SemanticAnalyzer +from mypy.semanal_pass1 import SemanticAnalyzerPreAnalysis +from mypy.util import ( + DecodeError, + decode_python_encoding, + get_mypy_comments, + hash_digest, + hash_digest_bytes, + is_stub_package_file, + is_sub_path_normabs, + is_typeshed_file, + module_prefix, + read_py_file, + time_ref, + time_spent_us, +) + +if TYPE_CHECKING: + from mypy.report import Reports # Avoid unconditional slow import + +from mypy import errorcodes as codes +from mypy.config_parser import parse_mypy_comments +from mypy.fixup import fixup_module +from mypy.freetree import free_tree +from mypy.fscache import FileSystemCache +from mypy.metastore import FilesystemMetadataStore, MetadataStore, SqliteMetadataStore +from mypy.modulefinder import ( + BuildSource as BuildSource, + BuildSourceSet as BuildSourceSet, + FindModuleCache, + ModuleNotFoundReason, + ModuleSearchResult, + SearchPaths, + compute_search_paths, +) +from mypy.nodes import Expression +from mypy.options import Options +from mypy.parse import parse +from mypy.plugin import ChainedPlugin, Plugin, ReportConfigContext +from mypy.plugins.default import DefaultPlugin +from mypy.renaming import LimitedVariableRenameVisitor, VariableRenameVisitor +from mypy.stats import dump_type_stats +from mypy.stubinfo import is_module_from_legacy_bundled_package, stub_distribution_name +from mypy.types import Type, instance_cache +from mypy.typestate import reset_global_state, type_state +from mypy.util import json_dumps, json_loads +from mypy.version import __version__ + +# Switch to True to produce debug output related to fine-grained incremental +# mode only that is useful during development. This produces only a subset of +# output compared to --verbose output. We use a global flag to enable this so +# that it's easy to enable this when running tests. +DEBUG_FINE_GRAINED: Final = False + +# These modules are special and should always come from typeshed. +CORE_BUILTIN_MODULES: Final = { + "builtins", + "typing", + "types", + "typing_extensions", + "mypy_extensions", + "_typeshed", + "_collections_abc", + "collections", + "collections.abc", + "sys", + "abc", +} + +# We are careful now, we can increase this in future if safe/useful. +MAX_GC_FREEZE_CYCLES = 1 + +Graph: _TypeAlias = dict[str, "State"] + + +class SCC: + """A simple class that represents a strongly connected component (import cycle).""" + + id_counter: ClassVar[int] = 0 + + def __init__(self, ids: set[str]) -> None: + self.id = SCC.id_counter + SCC.id_counter += 1 + # Ids of modules in this cycle. + self.mod_ids = ids + # Direct dependencies, should be populated by the caller. + self.deps: set[int] = set() + # Direct dependencies that have not been processed yet. + # Should be populated by the caller. This set may change during graph + # processing, while the above stays constant. + self.not_ready_deps: set[int] = set() + # SCCs that (directly) depend on this SCC. Note this is a list to + # make processing order more predictable. Dependents will be notified + # that they may be ready in the order in this list. + self.direct_dependents: list[int] = [] + + +# TODO: Get rid of BuildResult. We might as well return a BuildManager. +class BuildResult: + """The result of a successful build. + + Attributes: + manager: The build manager. + files: Dictionary from module name to related AST node. + types: Dictionary from parse tree node to its inferred type. + used_cache: Whether the build took advantage of a pre-existing cache + errors: List of error messages. + """ + + def __init__(self, manager: BuildManager, graph: Graph) -> None: + self.manager = manager + self.graph = graph + self.files = manager.modules + self.types = manager.all_types # Non-empty if export_types True in options + self.used_cache = manager.cache_enabled + self.errors: list[str] = [] # Filled in by build if desired + + +def build_error(msg: str) -> NoReturn: + raise CompileError([f"mypy: error: {msg}"]) + + +def build( + sources: list[BuildSource], + options: Options, + alt_lib_path: str | None = None, + flush_errors: Callable[[str | None, list[str], bool], None] | None = None, + fscache: FileSystemCache | None = None, + stdout: TextIO | None = None, + stderr: TextIO | None = None, + extra_plugins: Sequence[Plugin] | None = None, +) -> BuildResult: + """Analyze a program. + + A single call to build performs parsing, semantic analysis and optionally + type checking for the program *and* all imported modules, recursively. + + Return BuildResult if successful or only non-blocking errors were found; + otherwise raise CompileError. + + If a flush_errors callback is provided, all error messages will be + passed to it and the errors and messages fields of BuildResult and + CompileError (respectively) will be empty. Otherwise those fields will + report any error messages. + + Args: + sources: list of sources to build + options: build options + alt_lib_path: an additional directory for looking up library modules + (takes precedence over other directories) + flush_errors: optional function to flush errors after a file is processed + fscache: optionally a file-system cacher + + """ + # If we were not given a flush_errors, we use one that will populate those + # fields for callers that want the traditional API. + messages = [] + + # This is mostly for the benefit of tests that use builtins fixtures. + instance_cache.reset() + + def default_flush_errors( + filename: str | None, new_messages: list[str], is_serious: bool + ) -> None: + messages.extend(new_messages) + + flush_errors = flush_errors or default_flush_errors + stdout = stdout or sys.stdout + stderr = stderr or sys.stderr + extra_plugins = extra_plugins or [] + + try: + result = _build( + sources, options, alt_lib_path, flush_errors, fscache, stdout, stderr, extra_plugins + ) + result.errors = messages + return result + except CompileError as e: + # CompileErrors raised from an errors object carry all the + # messages that have not been reported out by error streaming. + # Patch it up to contain either none or all none of the messages, + # depending on whether we are flushing errors. + serious = not e.use_stdout + flush_errors(None, e.messages, serious) + e.messages = messages + raise + + +def _build( + sources: list[BuildSource], + options: Options, + alt_lib_path: str | None, + flush_errors: Callable[[str | None, list[str], bool], None], + fscache: FileSystemCache | None, + stdout: TextIO, + stderr: TextIO, + extra_plugins: Sequence[Plugin], +) -> BuildResult: + if platform.python_implementation() == "CPython": + # Run gc less frequently, as otherwise we can spent a large fraction of + # cpu in gc. This seems the most reasonable place to tune garbage collection. + gc.set_threshold(200 * 1000, 30, 30) + + data_dir = default_data_dir() + fscache = fscache or FileSystemCache() + + search_paths = compute_search_paths(sources, options, data_dir, alt_lib_path) + + reports = None + if options.report_dirs: + # Import lazily to avoid slowing down startup. + from mypy.report import Reports + + reports = Reports(data_dir, options.report_dirs) + + source_set = BuildSourceSet(sources) + cached_read = fscache.read + errors = Errors(options, read_source=lambda path: read_py_file(path, cached_read)) + plugin, snapshot = load_plugins(options, errors, stdout, extra_plugins) + + # Validate error codes after plugins are loaded. + options.process_error_codes(error_callback=build_error) + + # Add catch-all .gitignore to cache dir if we created it + cache_dir_existed = os.path.isdir(options.cache_dir) + + # Construct a build manager object to hold state during the build. + # + # Ignore current directory prefix in error messages. + manager = BuildManager( + data_dir, + search_paths, + ignore_prefix=os.getcwd(), + source_set=source_set, + reports=reports, + options=options, + version_id=__version__, + plugin=plugin, + plugins_snapshot=snapshot, + errors=errors, + error_formatter=None if options.output is None else OUTPUT_CHOICES.get(options.output), + flush_errors=flush_errors, + fscache=fscache, + stdout=stdout, + stderr=stderr, + ) + if manager.verbosity() >= 2: + manager.trace(repr(options)) + + reset_global_state() + try: + graph = dispatch(sources, manager, stdout) + if not options.fine_grained_incremental: + type_state.reset_all_subtype_caches() + if options.timing_stats is not None: + dump_timing_stats(options.timing_stats, graph) + if options.line_checking_stats is not None: + dump_line_checking_stats(options.line_checking_stats, graph) + return BuildResult(manager, graph) + finally: + t0 = time.time() + manager.metastore.commit() + manager.add_stats(cache_commit_time=time.time() - t0) + manager.log( + "Build finished in %.3f seconds with %d modules, and %d errors" + % ( + time.time() - manager.start_time, + len(manager.modules), + manager.errors.num_messages(), + ) + ) + manager.dump_stats() + if reports is not None: + # Finish the HTML or XML reports even if CompileError was raised. + reports.finish() + if not cache_dir_existed and os.path.isdir(options.cache_dir): + add_catch_all_gitignore(options.cache_dir) + exclude_from_backups(options.cache_dir) + if os.path.isdir(options.cache_dir): + record_missing_stub_packages(options.cache_dir, manager.missing_stub_packages) + + +def default_data_dir() -> str: + """Returns directory containing typeshed directory.""" + return os.path.dirname(__file__) + + +def normpath(path: str, options: Options) -> str: + """Convert path to absolute; but to relative in bazel mode. + + (Bazel's distributed cache doesn't like filesystem metadata to + end up in output files.) + """ + # TODO: Could we always use relpath? (A worry in non-bazel + # mode would be that a moved file may change its full module + # name without changing its size, mtime or hash.) + if options.bazel: + return os.path.relpath(path) + else: + return os.path.abspath(path) + + +# NOTE: dependencies + suppressed == all reachable imports; +# suppressed contains those reachable imports that were prevented by +# silent mode or simply not found. + + +# Metadata for the fine-grained dependencies file associated with a module. +class FgDepMeta(TypedDict): + path: str + mtime: int + + +# Priorities used for imports. (Here, top-level includes inside a class.) +# These are used to determine a more predictable order in which the +# nodes in an import cycle are processed. +PRI_HIGH: Final = 5 # top-level "from X import blah" +PRI_MED: Final = 10 # top-level "import X" +PRI_LOW: Final = 20 # either form inside a function +PRI_MYPY: Final = 25 # inside "if MYPY" or "if TYPE_CHECKING" +PRI_INDIRECT: Final = 30 # an indirect dependency +PRI_ALL: Final = 99 # include all priorities + + +def import_priority(imp: ImportBase, toplevel_priority: int) -> int: + """Compute import priority from an import node.""" + if not imp.is_top_level: + # Inside a function + return PRI_LOW + if imp.is_mypy_only: + # Inside "if MYPY" or "if typing.TYPE_CHECKING" + return max(PRI_MYPY, toplevel_priority) + # A regular import; priority determined by argument. + return toplevel_priority + + +def load_plugins_from_config( + options: Options, errors: Errors, stdout: TextIO +) -> tuple[list[Plugin], dict[str, str]]: + """Load all configured plugins. + + Return a list of all the loaded plugins from the config file. + The second return value is a snapshot of versions/hashes of loaded user + plugins (for cache validation). + """ + import importlib + + snapshot: dict[str, str] = {} + + if not options.config_file: + return [], snapshot + + line = find_config_file_line_number(options.config_file, "mypy", "plugins") + if line == -1: + line = 1 # We need to pick some line number that doesn't look too confusing + + def plugin_error(message: str) -> NoReturn: + errors.report(line, 0, message) + errors.raise_error(use_stdout=False) + + custom_plugins: list[Plugin] = [] + errors.set_file(options.config_file, None, options) + for plugin_path in options.plugins: + func_name = "plugin" + plugin_dir: str | None = None + if ":" in os.path.basename(plugin_path): + plugin_path, func_name = plugin_path.rsplit(":", 1) + if plugin_path.endswith(".py"): + # Plugin paths can be relative to the config file location. + plugin_path = os.path.join(os.path.dirname(options.config_file), plugin_path) + if not os.path.isfile(plugin_path): + plugin_error(f'Can\'t find plugin "{plugin_path}"') + # Use an absolute path to avoid populating the cache entry + # for 'tmp' during tests, since it will be different in + # different tests. + plugin_dir = os.path.abspath(os.path.dirname(plugin_path)) + fnam = os.path.basename(plugin_path) + module_name = fnam[:-3] + sys.path.insert(0, plugin_dir) + elif re.search(r"[\\/]", plugin_path): + fnam = os.path.basename(plugin_path) + plugin_error(f'Plugin "{fnam}" does not have a .py extension') + else: + module_name = plugin_path + + try: + module = importlib.import_module(module_name) + except Exception as exc: + plugin_error(f'Error importing plugin "{plugin_path}": {exc}') + finally: + if plugin_dir is not None: + assert sys.path[0] == plugin_dir + del sys.path[0] + + if not hasattr(module, func_name): + plugin_error( + 'Plugin "{}" does not define entry point function "{}"'.format( + plugin_path, func_name + ) + ) + + try: + plugin_type = getattr(module, func_name)(__version__) + except Exception: + print(f"Error calling the plugin(version) entry point of {plugin_path}\n", file=stdout) + raise # Propagate to display traceback + + if not isinstance(plugin_type, type): + plugin_error( + 'Type object expected as the return value of "plugin"; got {!r} (in {})'.format( + plugin_type, plugin_path + ) + ) + if not issubclass(plugin_type, Plugin): + plugin_error( + 'Return value of "plugin" must be a subclass of "mypy.plugin.Plugin" ' + "(in {})".format(plugin_path) + ) + try: + custom_plugins.append(plugin_type(options)) + snapshot[module_name] = take_module_snapshot(module) + except Exception: + print(f"Error constructing plugin instance of {plugin_type.__name__}\n", file=stdout) + raise # Propagate to display traceback + + return custom_plugins, snapshot + + +def load_plugins( + options: Options, errors: Errors, stdout: TextIO, extra_plugins: Sequence[Plugin] +) -> tuple[Plugin, dict[str, str]]: + """Load all configured plugins. + + Return a plugin that encapsulates all plugins chained together. Always + at least include the default plugin (it's last in the chain). + The second return value is a snapshot of versions/hashes of loaded user + plugins (for cache validation). + """ + custom_plugins, snapshot = load_plugins_from_config(options, errors, stdout) + + custom_plugins += extra_plugins + + default_plugin: Plugin = DefaultPlugin(options) + if not custom_plugins: + return default_plugin, snapshot + + # Custom plugins take precedence over the default plugin. + return ChainedPlugin(options, custom_plugins + [default_plugin]), snapshot + + +def take_module_snapshot(module: types.ModuleType) -> str: + """Take plugin module snapshot by recording its version and hash. + + We record _both_ hash and the version to detect more possible changes + (e.g. if there is a change in modules imported by a plugin). + """ + if hasattr(module, "__file__"): + assert module.__file__ is not None + with open(module.__file__, "rb") as f: + digest = hash_digest(f.read()) + else: + digest = "unknown" + ver = getattr(module, "__version__", "none") + return f"{ver}:{digest}" + + +def find_config_file_line_number(path: str, section: str, setting_name: str) -> int: + """Return the approximate location of setting_name within mypy config file. + + Return -1 if can't determine the line unambiguously. + """ + in_desired_section = False + try: + results = [] + with open(path, encoding="UTF-8") as f: + for i, line in enumerate(f): + line = line.strip() + if line.startswith("[") and line.endswith("]"): + current_section = line[1:-1].strip() + in_desired_section = current_section == section + elif in_desired_section and re.match(rf"{setting_name}\s*=", line): + results.append(i + 1) + if len(results) == 1: + return results[0] + except OSError: + pass + return -1 + + +class BuildManager: + """This class holds shared state for building a mypy program. + + It is used to coordinate parsing, import processing, semantic + analysis and type checking. The actual build steps are carried + out by dispatch(). + + Attributes: + data_dir: Mypy data directory (contains stubs) + search_paths: SearchPaths instance indicating where to look for modules + modules: Mapping of module ID to MypyFile (shared by the passes) + semantic_analyzer: + Semantic analyzer, pass 2 + all_types: Map {Expression: Type} from all modules (enabled by export_types) + options: Build options + missing_modules: Set of modules that could not be imported encountered so far + stale_modules: Set of modules that needed to be rechecked (only used by tests) + fg_deps_meta: Metadata for fine-grained dependencies caches associated with modules + fg_deps: A fine-grained dependency map + version_id: The current mypy version (based on commit id when possible) + plugin: Active mypy plugin(s) + plugins_snapshot: + Snapshot of currently active user plugins (versions and hashes) + old_plugins_snapshot: + Plugins snapshot from previous incremental run (or None in + non-incremental mode and if cache was not found) + errors: Used for reporting all errors + flush_errors: A function for processing errors after each SCC + cache_enabled: Whether cache is being read. This is set based on options, + but is disabled if fine-grained cache loading fails + and after an initial fine-grained load. This doesn't + determine whether we write cache files or not. + quickstart_state: + A cache of filename -> mtime/size/hash info used to avoid + needing to hash source files when using a cache with mismatching mtimes + stats: Dict with various instrumentation numbers, it is used + not only for debugging, but also required for correctness, + in particular to check consistency of the fine-grained dependency cache. + fscache: A file system cacher + ast_cache: AST cache to speed up mypy daemon + """ + + def __init__( + self, + data_dir: str, + search_paths: SearchPaths, + ignore_prefix: str, + source_set: BuildSourceSet, + reports: Reports | None, + options: Options, + version_id: str, + plugin: Plugin, + plugins_snapshot: dict[str, str], + errors: Errors, + flush_errors: Callable[[str | None, list[str], bool], None], + fscache: FileSystemCache, + stdout: TextIO, + stderr: TextIO, + error_formatter: ErrorFormatter | None = None, + ) -> None: + self.stats: dict[str, Any] = {} # Values are ints or floats + self.stdout = stdout + self.stderr = stderr + self.start_time = time.time() + self.data_dir = data_dir + self.errors = errors + self.errors.set_ignore_prefix(ignore_prefix) + self.error_formatter = error_formatter + self.search_paths = search_paths + self.source_set = source_set + self.reports = reports + self.options = options + self.version_id = version_id + self.modules: dict[str, MypyFile] = {} + self.import_map: dict[str, set[str]] = {} + self.missing_modules: set[str] = set() + self.fg_deps_meta: dict[str, FgDepMeta] = {} + # fg_deps holds the dependencies of every module that has been + # processed. We store this in BuildManager so that we can compute + # dependencies as we go, which allows us to free ASTs and type information, + # saving a ton of memory on net. + self.fg_deps: dict[str, set[str]] = {} + # Always convert the plugin to a ChainedPlugin so that it can be manipulated if needed + if not isinstance(plugin, ChainedPlugin): + plugin = ChainedPlugin(options, [plugin]) + self.plugin = plugin + # Set of namespaces (module or class) that are being populated during semantic + # analysis and may have missing definitions. + self.incomplete_namespaces: set[str] = set() + self.semantic_analyzer = SemanticAnalyzer( + self.modules, + self.missing_modules, + self.incomplete_namespaces, + self.errors, + self.plugin, + self.import_map, + ) + self.all_types: dict[Expression, Type] = {} # Enabled by export_types + self.indirection_detector = TypeIndirectionVisitor() + self.stale_modules: set[str] = set() + self.rechecked_modules: set[str] = set() + self.flush_errors = flush_errors + has_reporters = reports is not None and reports.reporters + self.cache_enabled = ( + options.incremental + and (not options.fine_grained_incremental or options.use_fine_grained_cache) + and not has_reporters + ) + self.fscache = fscache + self.find_module_cache = FindModuleCache( + self.search_paths, self.fscache, self.options, source_set=self.source_set + ) + for module in CORE_BUILTIN_MODULES: + if options.use_builtins_fixtures: + continue + path = self.find_module_cache.find_module(module, fast_path=True) + if not isinstance(path, str): + raise CompileError( + [f"Failed to find builtin module {module}, perhaps typeshed is broken?"] + ) + if is_typeshed_file(options.abs_custom_typeshed_dir, path) or is_stub_package_file( + path + ): + continue + + raise CompileError( + [ + f'mypy: "{os.path.relpath(path)}" shadows library module "{module}"', + f'note: A user-defined top-level module with name "{module}" is not supported', + ] + ) + + self.metastore = create_metastore(options) + + # a mapping from source files to their corresponding shadow files + # for efficient lookup + self.shadow_map: dict[str, str] = {} + if self.options.shadow_file is not None: + self.shadow_map = dict(self.options.shadow_file) + # a mapping from each file being typechecked to its possible shadow file + self.shadow_equivalence_map: dict[str, str | None] = {} + self.plugin = plugin + self.plugins_snapshot = plugins_snapshot + self.old_plugins_snapshot = read_plugins_snapshot(self) + self.quickstart_state = read_quickstart_file(options, self.stdout) + # Fine grained targets (module top levels and top level functions) processed by + # the semantic analyzer, used only for testing. Currently used only by the new + # semantic analyzer. Tuple of module and target name. + self.processed_targets: list[tuple[str, str]] = [] + # Missing stub packages encountered. + self.missing_stub_packages: set[str] = set() + # Cache for mypy ASTs that have completed semantic analysis + # pass 1. When multiple files are added to the build in a + # single daemon increment, only one of the files gets added + # per step and the others are discarded. This gets repeated + # until all the files have been added. This means that a + # new file can be processed O(n**2) times. This cache + # avoids most of this redundant work. + self.ast_cache: dict[str, tuple[MypyFile, list[ErrorInfo]]] = {} + # Number of times we used GC optimization hack for fresh SCCs. + self.gc_freeze_cycles = 0 + # Mapping from SCC id to corresponding SCC instance. This is populated + # in process_graph(). + self.scc_by_id: dict[int, SCC] = {} + # Global topological order for SCCs. This exists to make order of processing + # SCCs more predictable. + self.top_order: list[int] = [] + # Stale SCCs that are queued for processing. Note that as of now we have just + # one worker, that is the same process. In the future, we will support multiple + # parallel worker processes. + self.scc_queue: list[SCC] = [] + # SCCs that have been fully processed. + self.done_sccs: set[int] = set() + + def dump_stats(self) -> None: + if self.options.dump_build_stats: + print("Stats:") + for key, value in sorted(self.stats_summary().items()): + print(f"{key + ':':24}{value}") + + def use_fine_grained_cache(self) -> bool: + return self.cache_enabled and self.options.use_fine_grained_cache + + def maybe_swap_for_shadow_path(self, path: str) -> str: + if not self.shadow_map: + return path + + path = normpath(path, self.options) + + previously_checked = path in self.shadow_equivalence_map + if not previously_checked: + for source, shadow in self.shadow_map.items(): + if self.fscache.samefile(path, source): + self.shadow_equivalence_map[path] = shadow + break + else: + self.shadow_equivalence_map[path] = None + + shadow_file = self.shadow_equivalence_map.get(path) + return shadow_file if shadow_file else path + + def get_stat(self, path: str) -> os.stat_result | None: + return self.fscache.stat_or_none(self.maybe_swap_for_shadow_path(path)) + + def getmtime(self, path: str) -> int: + """Return a file's mtime; but 0 in bazel mode. + + (Bazel's distributed cache doesn't like filesystem metadata to + end up in output files.) + """ + if self.options.bazel: + return 0 + else: + return int(self.metastore.getmtime(path)) + + def correct_rel_imp(self, file: MypyFile, imp: ImportFrom | ImportAll) -> str: + """Function to correct for relative imports.""" + file_id = file.fullname + rel = imp.relative + if rel == 0: + return imp.id + if os.path.basename(file.path).startswith("__init__."): + rel -= 1 + if rel != 0: + file_id = ".".join(file_id.split(".")[:-rel]) + new_id = file_id + "." + imp.id if imp.id else file_id + + if not new_id: + self.errors.set_file(file.path, file.name, self.options) + self.errors.report( + imp.line, 0, "No parent module -- cannot perform relative import", blocker=True + ) + + return new_id + + def all_imported_modules_in_file(self, file: MypyFile) -> list[tuple[int, str, int]]: + """Find all reachable import statements in a file. + + Return list of tuples (priority, module id, import line number) + for all modules imported in file; lower numbers == higher priority. + + Can generate blocking errors on bogus relative imports. + """ + res: list[tuple[int, str, int]] = [] + for imp in file.imports: + if not imp.is_unreachable: + if isinstance(imp, Import): + pri = import_priority(imp, PRI_MED) + ancestor_pri = import_priority(imp, PRI_LOW) + for id, _ in imp.ids: + res.append((pri, id, imp.line)) + ancestor_parts = id.split(".")[:-1] + ancestors = [] + for part in ancestor_parts: + ancestors.append(part) + res.append((ancestor_pri, ".".join(ancestors), imp.line)) + elif isinstance(imp, ImportFrom): + cur_id = self.correct_rel_imp(file, imp) + all_are_submodules = True + # Also add any imported names that are submodules. + pri = import_priority(imp, PRI_MED) + for name, __ in imp.names: + sub_id = cur_id + "." + name + if self.is_module(sub_id): + res.append((pri, sub_id, imp.line)) + else: + all_are_submodules = False + # Add cur_id as a dependency, even if all the + # imports are submodules. Processing import from will try + # to look through cur_id, so we should depend on it. + # As a workaround for some bugs in cycle handling (#4498), + # if all the imports are submodules, do the import at a lower + # priority. + pri = import_priority(imp, PRI_HIGH if not all_are_submodules else PRI_LOW) + res.append((pri, cur_id, imp.line)) + elif isinstance(imp, ImportAll): + pri = import_priority(imp, PRI_HIGH) + res.append((pri, self.correct_rel_imp(file, imp), imp.line)) + + # Sort such that module (e.g. foo.bar.baz) comes before its ancestors (e.g. foo + # and foo.bar) so that, if FindModuleCache finds the target module in a + # package marked with py.typed underneath a namespace package installed in + # site-packages, (gasp), that cache's knowledge of the ancestors + # (aka FindModuleCache.ns_ancestors) can be primed when it is asked to find + # the parent. + res.sort(key=lambda x: -x[1].count(".")) + return res + + def is_module(self, id: str) -> bool: + """Is there a file in the file system corresponding to module id?""" + return find_module_simple(id, self) is not None + + def parse_file( + self, id: str, path: str, source: str, ignore_errors: bool, options: Options + ) -> MypyFile: + """Parse the source of a file with the given name. + + Raise CompileError if there is a parse error. + """ + t0 = time.time() + if ignore_errors: + self.errors.ignored_files.add(path) + tree = parse(source, path, id, self.errors, options=options) + tree._fullname = id + self.add_stats( + files_parsed=1, + modules_parsed=int(not tree.is_stub), + stubs_parsed=int(tree.is_stub), + parse_time=time.time() - t0, + ) + + if self.errors.is_blockers(): + self.log("Bailing due to parse errors") + self.errors.raise_error() + + self.errors.set_file_ignored_lines(path, tree.ignored_lines, ignore_errors) + return tree + + def load_fine_grained_deps(self, id: str) -> dict[str, set[str]]: + t0 = time.time() + if id in self.fg_deps_meta: + # TODO: Assert deps file wasn't changed. + deps = json_loads(self.metastore.read(self.fg_deps_meta[id]["path"])) + else: + deps = {} + val = {k: set(v) for k, v in deps.items()} + self.add_stats(load_fg_deps_time=time.time() - t0) + return val + + def report_file( + self, file: MypyFile, type_map: dict[Expression, Type], options: Options + ) -> None: + if self.reports is not None and self.source_set.is_source(file): + self.reports.file(file, self.modules, type_map, options) + + def verbosity(self) -> int: + return self.options.verbosity + + def log(self, *message: str) -> None: + if self.verbosity() >= 1: + if message: + print("LOG: ", *message, file=self.stderr) + else: + print(file=self.stderr) + self.stderr.flush() + + def log_fine_grained(self, *message: str) -> None: + if self.verbosity() >= 1: + self.log("fine-grained:", *message) + elif mypy.build.DEBUG_FINE_GRAINED: + # Output log in a simplified format that is quick to browse. + if message: + print(*message, file=self.stderr) + else: + print(file=self.stderr) + self.stderr.flush() + + def trace(self, *message: str) -> None: + if self.verbosity() >= 2: + print("TRACE:", *message, file=self.stderr) + self.stderr.flush() + + def add_stats(self, **kwds: Any) -> None: + for key, value in kwds.items(): + if key in self.stats: + self.stats[key] += value + else: + self.stats[key] = value + + def stats_summary(self) -> Mapping[str, object]: + return self.stats + + def submit(self, sccs: list[SCC]) -> None: + """Submit a stale SCC for processing in current process.""" + self.scc_queue.extend(sccs) + + def wait_for_done(self, graph: Graph) -> tuple[list[SCC], bool]: + """Wait for a stale SCC processing (in process) to finish. + + Return next processed SCC and whether we have more in the queue. + This emulates the API we will have for parallel processing + in multiple worker processes. + """ + if not self.scc_queue: + return [], False + next_scc = self.scc_queue.pop(0) + process_stale_scc(graph, next_scc, self) + return [next_scc], bool(self.scc_queue) + + +def deps_to_json(x: dict[str, set[str]]) -> bytes: + return json_dumps({k: list(v) for k, v in x.items()}) + + +# File for storing metadata about all the fine-grained dependency caches +DEPS_META_FILE: Final = "@deps.meta.json" +# File for storing fine-grained dependencies that didn't a parent in the build +DEPS_ROOT_FILE: Final = "@root.deps.json" + +# The name of the fake module used to store fine-grained dependencies that +# have no other place to go. +FAKE_ROOT_MODULE: Final = "@root" + + +def write_deps_cache( + rdeps: dict[str, dict[str, set[str]]], manager: BuildManager, graph: Graph +) -> None: + """Write cache files for fine-grained dependencies. + + Serialize fine-grained dependencies map for fine-grained mode. + + Dependencies on some module 'm' is stored in the dependency cache + file m.deps.json. This entails some spooky action at a distance: + if module 'n' depends on 'm', that produces entries in m.deps.json. + When there is a dependency on a module that does not exist in the + build, it is stored with its first existing parent module. If no + such module exists, it is stored with the fake module FAKE_ROOT_MODULE. + + This means that the validity of the fine-grained dependency caches + are a global property, so we store validity checking information for + fine-grained dependencies in a global cache file: + * We take a snapshot of current sources to later check consistency + between the fine-grained dependency cache and module cache metadata + * We store the mtime of all the dependency files to verify they + haven't changed + """ + metastore = manager.metastore + + error = False + + fg_deps_meta = manager.fg_deps_meta.copy() + + for id in rdeps: + if id != FAKE_ROOT_MODULE: + _, _, deps_json = get_cache_names(id, graph[id].xpath, manager.options) + else: + deps_json = DEPS_ROOT_FILE + assert deps_json + manager.log("Writing deps cache", deps_json) + if not manager.metastore.write(deps_json, deps_to_json(rdeps[id])): + manager.log(f"Error writing fine-grained deps JSON file {deps_json}") + error = True + else: + fg_deps_meta[id] = {"path": deps_json, "mtime": manager.getmtime(deps_json)} + + meta_snapshot: dict[str, str] = {} + for id, st in graph.items(): + # If we didn't parse a file (so it doesn't have a + # source_hash), then it must be a module with a fresh cache, + # so use the hash from that. + if st.source_hash: + hash = st.source_hash + else: + if st.meta: + hash = st.meta.hash + else: + hash = "" + meta_snapshot[id] = hash + + meta = {"snapshot": meta_snapshot, "deps_meta": fg_deps_meta} + + if not metastore.write(DEPS_META_FILE, json_dumps(meta)): + manager.log(f"Error writing fine-grained deps meta JSON file {DEPS_META_FILE}") + error = True + + if error: + manager.errors.set_file(_cache_dir_prefix(manager.options), None, manager.options) + manager.errors.report(0, 0, "Error writing fine-grained dependencies cache", blocker=True) + + +def invert_deps(deps: dict[str, set[str]], graph: Graph) -> dict[str, dict[str, set[str]]]: + """Splits fine-grained dependencies based on the module of the trigger. + + Returns a dictionary from module ids to all dependencies on that + module. Dependencies not associated with a module in the build will be + associated with the nearest parent module that is in the build, or the + fake module FAKE_ROOT_MODULE if none are. + """ + # Lazy import to speed up startup + from mypy.server.target import trigger_to_target + + # Prepopulate the map for all the modules that have been processed, + # so that we always generate files for processed modules (even if + # there aren't any dependencies to them.) + rdeps: dict[str, dict[str, set[str]]] = {id: {} for id, st in graph.items() if st.tree} + for trigger, targets in deps.items(): + module = module_prefix(graph, trigger_to_target(trigger)) + if not module or not graph[module].tree: + module = FAKE_ROOT_MODULE + + mod_rdeps = rdeps.setdefault(module, {}) + mod_rdeps.setdefault(trigger, set()).update(targets) + + return rdeps + + +def generate_deps_for_cache(manager: BuildManager, graph: Graph) -> dict[str, dict[str, set[str]]]: + """Generate fine-grained dependencies into a form suitable for serializing. + + This does a couple things: + 1. Splits fine-grained deps based on the module of the trigger + 2. For each module we generated fine-grained deps for, load any previous + deps and merge them in. + + Returns a dictionary from module ids to all dependencies on that + module. Dependencies not associated with a module in the build will be + associated with the nearest parent module that is in the build, or the + fake module FAKE_ROOT_MODULE if none are. + """ + from mypy.server.deps import merge_dependencies # Lazy import to speed up startup + + # Split the dependencies out into based on the module that is depended on. + rdeps = invert_deps(manager.fg_deps, graph) + + # We can't just clobber existing dependency information, so we + # load the deps for every module we've generated new dependencies + # to and merge the new deps into them. + for module, mdeps in rdeps.items(): + old_deps = manager.load_fine_grained_deps(module) + merge_dependencies(old_deps, mdeps) + + return rdeps + + +PLUGIN_SNAPSHOT_FILE: Final = "@plugins_snapshot.json" + + +def write_plugins_snapshot(manager: BuildManager) -> None: + """Write snapshot of versions and hashes of currently active plugins.""" + snapshot = json_dumps(manager.plugins_snapshot) + if ( + not manager.metastore.write(PLUGIN_SNAPSHOT_FILE, snapshot) + and manager.options.cache_dir != os.devnull + ): + manager.errors.set_file(_cache_dir_prefix(manager.options), None, manager.options) + manager.errors.report(0, 0, "Error writing plugins snapshot", blocker=True) + + +def read_plugins_snapshot(manager: BuildManager) -> dict[str, str] | None: + """Read cached snapshot of versions and hashes of plugins from previous run.""" + snapshot = _load_json_file( + PLUGIN_SNAPSHOT_FILE, + manager, + log_success="Plugins snapshot ", + log_error="Could not load plugins snapshot: ", + ) + if snapshot is None: + return None + if not isinstance(snapshot, dict): + manager.log(f"Could not load plugins snapshot: cache is not a dict: {type(snapshot)}") # type: ignore[unreachable] + return None + return snapshot + + +def read_quickstart_file( + options: Options, stdout: TextIO +) -> dict[str, tuple[float, int, str]] | None: + quickstart: dict[str, tuple[float, int, str]] | None = None + if options.quickstart_file: + # This is very "best effort". If the file is missing or malformed, + # just ignore it. + raw_quickstart: dict[str, Any] = {} + try: + with open(options.quickstart_file, "rb") as f: + raw_quickstart = json_loads(f.read()) + + quickstart = {} + for file, (x, y, z) in raw_quickstart.items(): + quickstart[file] = (x, y, z) + except Exception as e: + print(f"Warning: Failed to load quickstart file: {str(e)}\n", file=stdout) + return quickstart + + +def read_deps_cache(manager: BuildManager, graph: Graph) -> dict[str, FgDepMeta] | None: + """Read and validate the fine-grained dependencies cache. + + See the write_deps_cache documentation for more information on + the details of the cache. + + Returns None if the cache was invalid in some way. + """ + deps_meta = _load_json_file( + DEPS_META_FILE, + manager, + log_success="Deps meta ", + log_error="Could not load fine-grained dependency metadata: ", + ) + if deps_meta is None: + return None + meta_snapshot = deps_meta["snapshot"] + # Take a snapshot of the source hashes from all the metas we found. + # (Including the ones we rejected because they were out of date.) + # We use this to verify that they match up with the proto_deps. + current_meta_snapshot = { + id: st.meta_source_hash for id, st in graph.items() if st.meta_source_hash is not None + } + + common = set(meta_snapshot.keys()) & set(current_meta_snapshot.keys()) + if any(meta_snapshot[id] != current_meta_snapshot[id] for id in common): + # TODO: invalidate also if options changed (like --strict-optional)? + manager.log("Fine-grained dependencies cache inconsistent, ignoring") + return None + + module_deps_metas = deps_meta["deps_meta"] + assert isinstance(module_deps_metas, dict) + if not manager.options.skip_cache_mtime_checks: + for meta in module_deps_metas.values(): + try: + matched = manager.getmtime(meta["path"]) == meta["mtime"] + except FileNotFoundError: + matched = False + if not matched: + manager.log(f"Invalid or missing fine-grained deps cache: {meta['path']}") + return None + + return module_deps_metas + + +def _load_ff_file(file: str, manager: BuildManager, log_error: str) -> bytes | None: + t0 = time.time() + try: + data = manager.metastore.read(file) + except OSError: + manager.log(log_error + file) + return None + manager.add_stats(metastore_read_time=time.time() - t0) + return data + + +def _load_json_file( + file: str, manager: BuildManager, log_success: str, log_error: str +) -> dict[str, Any] | None: + """A simple helper to read a JSON file with logging.""" + t0 = time.time() + try: + data = manager.metastore.read(file) + except OSError: + manager.log(log_error + file) + return None + manager.add_stats(metastore_read_time=time.time() - t0) + # Only bother to compute the log message if we are logging it, since it could be big + if manager.verbosity() >= 2: + manager.trace(log_success + data.rstrip().decode()) + try: + t1 = time.time() + result = json_loads(data) + manager.add_stats(data_file_load_time=time.time() - t1) + except json.JSONDecodeError: + manager.errors.set_file(file, None, manager.options) + manager.errors.report( + -1, + -1, + "Error reading JSON file;" + " you likely have a bad cache.\n" + "Try removing the {cache_dir} directory" + " and run mypy again.".format(cache_dir=manager.options.cache_dir), + blocker=True, + ) + return None + else: + assert isinstance(result, dict) + return result + + +def _cache_dir_prefix(options: Options) -> str: + """Get current cache directory (or file if id is given).""" + if options.bazel: + # This is needed so the cache map works. + return os.curdir + cache_dir = options.cache_dir + pyversion = options.python_version + base = os.path.join(cache_dir, "%d.%d" % pyversion) + return base + + +def add_catch_all_gitignore(target_dir: str) -> None: + """Add catch-all .gitignore to an existing directory. + + No-op if the .gitignore already exists. + """ + gitignore = os.path.join(target_dir, ".gitignore") + try: + with open(gitignore, "x") as f: + print("# Automatically created by mypy", file=f) + print("*", file=f) + except FileExistsError: + pass + + +def exclude_from_backups(target_dir: str) -> None: + """Exclude the directory from various archives and backups supporting CACHEDIR.TAG. + + If the CACHEDIR.TAG file exists the function is a no-op. + """ + cachedir_tag = os.path.join(target_dir, "CACHEDIR.TAG") + try: + with open(cachedir_tag, "x") as f: + f.write( + """Signature: 8a477f597d28d172789f06886806bc55 +# This file is a cache directory tag automatically created by mypy. +# For information about cache directory tags see https://bford.info/cachedir/ +""" + ) + except FileExistsError: + pass + + +def create_metastore(options: Options) -> MetadataStore: + """Create the appropriate metadata store.""" + if options.sqlite_cache: + mds: MetadataStore = SqliteMetadataStore(_cache_dir_prefix(options)) + else: + mds = FilesystemMetadataStore(_cache_dir_prefix(options)) + return mds + + +def get_cache_names(id: str, path: str, options: Options) -> tuple[str, str, str | None]: + """Return the file names for the cache files. + + Args: + id: module ID + path: module path + options: build options + + Returns: + A tuple with the file names to be used for the meta file, the + data file, and the fine-grained deps JSON, respectively. + """ + if options.cache_map: + pair = options.cache_map.get(normpath(path, options)) + else: + pair = None + if pair is not None: + # The cache map paths were specified relative to the base directory, + # but the filesystem metastore APIs operates relative to the cache + # prefix directory. + # Solve this by rewriting the paths as relative to the root dir. + # This only makes sense when using the filesystem backed cache. + root = _cache_dir_prefix(options) + return os.path.relpath(pair[0], root), os.path.relpath(pair[1], root), None + prefix = os.path.join(*id.split(".")) + is_package = os.path.basename(path).startswith("__init__.py") + if is_package: + prefix = os.path.join(prefix, "__init__") + + deps_json = None + if options.cache_fine_grained: + deps_json = prefix + ".deps.json" + if options.fixed_format_cache: + data_suffix = ".data.ff" + meta_suffix = ".meta.ff" + else: + data_suffix = ".data.json" + meta_suffix = ".meta.json" + return prefix + meta_suffix, prefix + data_suffix, deps_json + + +def options_snapshot(id: str, manager: BuildManager) -> dict[str, object]: + """Make compact snapshot of options for a module. + + Separately store only the options we may compare individually, and take a hash + of everything else. If --debug-cache is specified, fall back to full snapshot. + """ + snapshot = manager.options.clone_for_module(id).select_options_affecting_cache() + if manager.options.debug_cache: + return snapshot + platform_opt = snapshot.pop("platform") + return {"platform": platform_opt, "other_options": hash_digest(json_dumps(snapshot))} + + +def find_cache_meta(id: str, path: str, manager: BuildManager) -> CacheMeta | None: + """Find cache data for a module. + + Args: + id: module ID + path: module path + manager: the build manager (for pyversion, log/trace, and build options) + + Returns: + A CacheMeta instance if the cache data was found and appears + valid; otherwise None. + """ + # TODO: May need to take more build options into account + meta_file, data_file, _ = get_cache_names(id, path, manager.options) + manager.trace(f"Looking for {id} at {meta_file}") + meta: bytes | dict[str, Any] | None + t0 = time.time() + if manager.options.fixed_format_cache: + meta = _load_ff_file(meta_file, manager, log_error=f"Could not load cache for {id}: ") + if meta is None: + return None + else: + meta = _load_json_file( + meta_file, + manager, + log_success=f"Meta {id} ", + log_error=f"Could not load cache for {id}: ", + ) + if meta is None: + return None + if not isinstance(meta, dict): + manager.log( # type: ignore[unreachable] + f"Could not load cache for {id}: meta cache is not a dict: {repr(meta)}" + ) + return None + t1 = time.time() + if isinstance(meta, bytes): + # If either low-level buffer format or high-level cache layout changed, we + # cannot use the cache files, even with --skip-version-check. + # TODO: switch to something like librt.internal.read_byte() if this is slow. + if meta[0] != cache_version() or meta[1] != CACHE_VERSION: + manager.log(f"Metadata abandoned for {id}: incompatible cache format") + return None + data_io = ReadBuffer(meta[2:]) + m = CacheMeta.read(data_io, data_file) + else: + m = CacheMeta.deserialize(meta, data_file) + if m is None: + manager.log(f"Metadata abandoned for {id}: cannot deserialize data") + return None + t2 = time.time() + manager.add_stats( + load_meta_time=t2 - t0, load_meta_load_time=t1 - t0, load_meta_from_dict_time=t2 - t1 + ) + + # Ignore cache if generated by an older mypy version. + if m.version_id != manager.version_id and not manager.options.skip_version_check: + manager.log(f"Metadata abandoned for {id}: different mypy version") + return None + + total_deps = len(m.dependencies) + len(m.suppressed) + if len(m.dep_prios) != total_deps or len(m.dep_lines) != total_deps: + manager.log(f"Metadata abandoned for {id}: broken dependencies") + return None + + # Ignore cache if (relevant) options aren't the same. + # Note that it's fine to mutilate cached_options since it's only used here. + cached_options = m.options + current_options = options_snapshot(id, manager) + if manager.options.skip_version_check: + # When we're lax about version we're also lax about platform. + cached_options["platform"] = current_options["platform"] + if "debug_cache" in cached_options: + # Older versions included debug_cache, but it's silly to compare it. + del cached_options["debug_cache"] + if cached_options != current_options: + manager.log(f"Metadata abandoned for {id}: options differ") + if manager.options.verbosity >= 2: + for key in sorted(set(cached_options) | set(current_options)): + if cached_options.get(key) != current_options.get(key): + manager.trace( + " {}: {} != {}".format( + key, cached_options.get(key), current_options.get(key) + ) + ) + return None + if manager.old_plugins_snapshot and manager.plugins_snapshot: + # Check if plugins are still the same. + if manager.plugins_snapshot != manager.old_plugins_snapshot: + manager.log(f"Metadata abandoned for {id}: plugins differ") + return None + # So that plugins can return data with tuples in it without + # things silently always invalidating modules, we round-trip + # the config data. This isn't beautiful. + plugin_data = json_loads( + json_dumps(manager.plugin.report_config_data(ReportConfigContext(id, path, is_check=True))) + ) + if m.plugin_data != plugin_data: + manager.log(f"Metadata abandoned for {id}: plugin configuration differs") + return None + + manager.add_stats(fresh_metas=1) + return m + + +def validate_meta( + meta: CacheMeta | None, id: str, path: str | None, ignore_all: bool, manager: BuildManager +) -> CacheMeta | None: + """Checks whether the cached AST of this module can be used. + + Returns: + None, if the cached AST is unusable. + Original meta, if mtime/size matched. + Meta with mtime updated to match source file, if hash/size matched but mtime/path didn't. + """ + # This requires two steps. The first one is obvious: we check that the module source file + # contents is the same as it was when the cache data file was created. The second one is not + # too obvious: we check that the cache data file mtime has not changed; it is needed because + # we use cache data file mtime to propagate information about changes in the dependencies. + + if meta is None: + manager.log(f"Metadata not found for {id}") + return None + + if meta.ignore_all and not ignore_all: + manager.log(f"Metadata abandoned for {id}: errors were previously ignored") + return None + + t0 = time.time() + bazel = manager.options.bazel + assert path is not None, "Internal error: meta was provided without a path" + if not manager.options.skip_cache_mtime_checks: + # Check data_file; assume if its mtime matches it's good. + try: + data_mtime = manager.getmtime(meta.data_file) + except OSError: + manager.log(f"Metadata abandoned for {id}: failed to stat data_file") + return None + if data_mtime != meta.data_mtime: + manager.log(f"Metadata abandoned for {id}: data cache is modified") + return None + + if bazel: + # Normalize path under bazel to make sure it isn't absolute + path = normpath(path, manager.options) + + st = manager.get_stat(path) + if st is None: + return None + if not stat.S_ISDIR(st.st_mode) and not stat.S_ISREG(st.st_mode): + manager.log(f"Metadata abandoned for {id}: file or directory {path} does not exist") + return None + + manager.add_stats(validate_stat_time=time.time() - t0) + + # When we are using a fine-grained cache, we want our initial + # build() to load all of the cache information and then do a + # fine-grained incremental update to catch anything that has + # changed since the cache was generated. We *don't* want to do a + # coarse-grained incremental rebuild, so we accept the cache + # metadata even if it doesn't match the source file. + # + # We still *do* the mtime/hash checks, however, to enable + # fine-grained mode to take advantage of the mtime-updating + # optimization when mtimes differ but hashes match. There is + # essentially no extra time cost to computing the hash here, since + # it will be cached and will be needed for finding changed files + # later anyways. + fine_grained_cache = manager.use_fine_grained_cache() + + size = st.st_size + # Bazel ensures the cache is valid. + if size != meta.size and not bazel and not fine_grained_cache: + manager.log(f"Metadata abandoned for {id}: file {path} has different size") + return None + + # Bazel ensures the cache is valid. + mtime = 0 if bazel else int(st.st_mtime) + if not bazel and (mtime != meta.mtime or path != meta.path): + if manager.quickstart_state and path in manager.quickstart_state: + # If the mtime and the size of the file recorded in the quickstart dump matches + # what we see on disk, we know (assume) that the hash matches the quickstart + # data as well. If that hash matches the hash in the metadata, then we know + # the file is up to date even though the mtime is wrong, without needing to hash it. + qmtime, qsize, qhash = manager.quickstart_state[path] + if int(qmtime) == mtime and qsize == size and qhash == meta.hash: + manager.log(f"Metadata fresh (by quickstart) for {id}: file {path}") + meta.mtime = mtime + meta.path = path + return meta + + t0 = time.time() + try: + # dir means it is a namespace package + if stat.S_ISDIR(st.st_mode): + source_hash = "" + else: + source_hash = manager.fscache.hash_digest(path) + except (OSError, UnicodeDecodeError, DecodeError): + return None + manager.add_stats(validate_hash_time=time.time() - t0) + if source_hash != meta.hash: + if fine_grained_cache: + manager.log(f"Using stale metadata for {id}: file {path}") + return meta + else: + manager.log(f"Metadata abandoned for {id}: file {path} has different hash") + return None + else: + t0 = time.time() + # Optimization: update mtime and path (otherwise, this mismatch will reappear). + meta.mtime = mtime + meta.path = path + meta.size = size + meta.options = options_snapshot(id, manager) + meta_file, _, _ = get_cache_names(id, path, manager.options) + manager.log( + "Updating mtime for {}: file {}, meta {}, mtime {}".format( + id, path, meta_file, meta.mtime + ) + ) + write_cache_meta(meta, manager, meta_file) + t1 = time.time() + manager.add_stats(validate_update_time=time.time() - t1, validate_munging_time=t1 - t0) + return meta + + # It's a match on (id, path, size, hash, mtime). + manager.log(f"Metadata fresh for {id}: file {path}") + return meta + + +def compute_hash(text: str) -> str: + # We use a crypto hash instead of the builtin hash(...) function + # because the output of hash(...) can differ between runs due to + # hash randomization (enabled by default in Python 3.3). See the + # note in + # https://docs.python.org/3/reference/datamodel.html#object.__hash__. + return hash_digest(text.encode("utf-8")) + + +def write_cache( + id: str, + path: str, + tree: MypyFile, + dependencies: list[str], + suppressed: list[str], + dep_prios: list[int], + dep_lines: list[int], + old_interface_hash: bytes, + source_hash: str, + ignore_all: bool, + manager: BuildManager, +) -> tuple[bytes, tuple[CacheMeta, str] | None]: + """Write cache files for a module. + + Note that this mypy's behavior is still correct when any given + write_cache() call is replaced with a no-op, so error handling + code that bails without writing anything is okay. + + Args: + id: module ID + path: module path + tree: the fully checked module data + dependencies: module IDs on which this module depends + suppressed: module IDs which were suppressed as dependencies + dep_prios: priorities (parallel array to dependencies) + dep_lines: import line locations (parallel array to dependencies) + old_interface_hash: the hash from the previous version of the data cache file + source_hash: the hash of the source code + ignore_all: the ignore_all flag for this module + manager: the build manager (for pyversion, log/trace) + + Returns: + A tuple containing the interface hash and inner tuple with CacheMeta + that should be written and path to cache file (inner tuple may be None, + if the cache data could not be written). + """ + metastore = manager.metastore + # For Bazel we use relative paths and zero mtimes. + bazel = manager.options.bazel + + # Obtain file paths. + meta_file, data_file, _ = get_cache_names(id, path, manager.options) + manager.log(f"Writing {id} {path} {meta_file} {data_file}") + + # Update tree.path so that in bazel mode it's made relative (since + # sometimes paths leak out). + if bazel: + tree.path = path + + plugin_data = manager.plugin.report_config_data(ReportConfigContext(id, path, is_check=False)) + + # Serialize data and analyze interface + if manager.options.fixed_format_cache: + data_io = WriteBuffer() + tree.write(data_io) + data_bytes = data_io.getvalue() + else: + data = tree.serialize() + data_bytes = json_dumps(data, manager.options.debug_cache) + interface_hash = hash_digest_bytes(data_bytes + json_dumps(plugin_data)) + + # Obtain and set up metadata + st = manager.get_stat(path) + if st is None: + manager.log(f"Cannot get stat for {path}") + # Remove apparently-invalid cache files. + # (This is purely an optimization.) + for filename in [data_file, meta_file]: + try: + os.remove(filename) + except OSError: + pass + # Still return the interface hash we computed. + return interface_hash, None + + # Write data cache file, if applicable + # Note that for Bazel we don't record the data file's mtime. + if old_interface_hash == interface_hash: + manager.trace(f"Interface for {id} is unchanged") + else: + manager.trace(f"Interface for {id} has changed") + if not metastore.write(data_file, data_bytes): + # Most likely the error is the replace() call + # (see https://github.com/python/mypy/issues/3215). + manager.log(f"Error writing cache data file {data_file}") + # Let's continue without writing the meta file. Analysis: + # If the replace failed, we've changed nothing except left + # behind an extraneous temporary file; if the replace + # worked but the getmtime() call failed, the meta file + # will be considered invalid on the next run because the + # data_mtime field won't match the data file's mtime. + # Both have the effect of slowing down the next run a + # little bit due to an out-of-date cache file. + return interface_hash, None + + try: + data_mtime = manager.getmtime(data_file) + except OSError: + manager.log(f"Error in os.stat({data_file!r}), skipping cache write") + return interface_hash, None + + mtime = 0 if bazel else int(st.st_mtime) + size = st.st_size + # Note that the options we store in the cache are the options as + # specified by the command line/config file and *don't* reflect + # updates made by inline config directives in the file. This is + # important, or otherwise the options would never match when + # verifying the cache. + assert source_hash is not None + meta = CacheMeta( + id=id, + path=path, + mtime=mtime, + size=size, + hash=source_hash, + dependencies=dependencies, + data_mtime=data_mtime, + data_file=data_file, + suppressed=suppressed, + options=options_snapshot(id, manager), + dep_prios=dep_prios, + dep_lines=dep_lines, + interface_hash=interface_hash, + version_id=manager.version_id, + ignore_all=ignore_all, + plugin_data=plugin_data, + # These two will be filled by the caller. + dep_hashes=[], + error_lines=[], + ) + return interface_hash, (meta, meta_file) + + +def write_cache_meta(meta: CacheMeta, manager: BuildManager, meta_file: str) -> None: + # Write meta cache file + metastore = manager.metastore + if manager.options.fixed_format_cache: + data_io = WriteBuffer() + meta.write(data_io) + # Prefix with both low- and high-level cache format versions for future validation. + # TODO: switch to something like librt.internal.write_byte() if this is slow. + meta_bytes = bytes([cache_version(), CACHE_VERSION]) + data_io.getvalue() + else: + meta_dict = meta.serialize() + meta_bytes = json_dumps(meta_dict, manager.options.debug_cache) + if not metastore.write(meta_file, meta_bytes): + # Most likely the error is the replace() call + # (see https://github.com/python/mypy/issues/3215). + # The next run will simply find the cache entry out of date. + manager.log(f"Error writing cache meta file {meta_file}") + + +"""Dependency manager. + +Design +====== + +Ideally +------- + +A. Collapse cycles (each SCC -- strongly connected component -- + becomes one "supernode"). + +B. Topologically sort nodes based on dependencies. + +C. Process from leaves towards roots. + +Wrinkles +-------- + +a. Need to parse source modules to determine dependencies. + +b. Processing order for modules within an SCC. + +c. Must order mtimes of files to decide whether to re-process; depends + on clock never resetting. + +d. from P import M; checks filesystem whether module P.M exists in + filesystem. + +e. Race conditions, where somebody modifies a file while we're + processing. Solved by using a FileSystemCache. + + +Steps +----- + +1. For each explicitly given module find the source file location. + +2. For each such module load and check the cache metadata, and decide + whether it's valid. + +3. Now recursively (or iteratively) find dependencies and add those to + the graph: + + - for cached nodes use the list of dependencies from the cache + metadata (this will be valid even if we later end up re-parsing + the same source); + + - for uncached nodes parse the file and process all imports found, + taking care of (a) above. + +Step 3 should also address (d) above. + +Once step 3 terminates we have the entire dependency graph, and for +each module we've either loaded the cache metadata or parsed the +source code. (However, we may still need to parse those modules for +which we have cache metadata but that depend, directly or indirectly, +on at least one module for which the cache metadata is stale.) + +Now we can execute steps A-C from the first section. Finding SCCs for +step A shouldn't be hard; there's a recipe here: +https://code.activestate.com/recipes/578507/. There's also a plethora +of topsort recipes, e.g. https://code.activestate.com/recipes/577413/. + +For single nodes, processing is simple. If the node was cached, we +deserialize the cache data and fix up cross-references. Otherwise, we +do semantic analysis followed by type checking. Once we (re-)processed +an SCC we check whether its interface (symbol table) is still fresh +(matches previous cached value). If it is not, we consider dependent SCCs +stale so that they need to be re-parsed as well. + +Note on indirect dependencies: normally dependencies are determined from +imports, but since our interfaces are "opaque" (i.e. symbol tables can +contain cross-references as well as types identified by name), these are not +enough. We *must* also add "indirect" dependencies from symbols and types to +their definitions. For this purpose, we record all accessed symbols during +semantic analysis, and after we finished processing a module, we traverse its +type map, and for each type we find (transitively) on which named types it +depends. + +Import cycles +------------- + +Finally we have to decide how to handle (b), import cycles. Here +we'll need a modified version of the original state machine +(build.py), but we only need to do this per SCC, and we won't have to +deal with changes to the list of nodes while we're processing it. + +If all nodes in the SCC have valid cache metadata and all dependencies +outside the SCC are still valid, we can proceed as follows: + + 1. Load cache data for all nodes in the SCC. + + 2. Fix up cross-references for all nodes in the SCC. + +Otherwise, the simplest (but potentially slow) way to proceed is to +invalidate all cache data in the SCC and re-parse all nodes in the SCC +from source. We can do this as follows: + + 1. Parse source for all nodes in the SCC. + + 2. Semantic analysis for all nodes in the SCC. + + 3. Type check all nodes in the SCC. + +(If there are more passes the process is the same -- each pass should +be done for all nodes before starting the next pass for any nodes in +the SCC.) + +We could process the nodes in the SCC in any order. For sentimental +reasons, I've decided to process them in the reverse order in which we +encountered them when originally constructing the graph. That's how +the old build.py deals with cycles, and at least this reproduces the +previous implementation more accurately. + +Can we do better than re-parsing all nodes in the SCC when any of its +dependencies are out of date? It's doubtful. The optimization +mentioned at the end of the previous section would require re-parsing +and type-checking a node and then comparing its symbol table to the +cached data; but because the node is part of a cycle we can't +technically type-check it until the semantic analysis of all other +nodes in the cycle has completed. (This is an important issue because +Dropbox has a very large cycle in production code. But I'd like to +deal with it later.) + +Additional wrinkles +------------------- + +During implementation more wrinkles were found. + +- When a submodule of a package (e.g. x.y) is encountered, the parent + package (e.g. x) must also be loaded, but it is not strictly a + dependency. See State.add_ancestors() below. +""" + + +class ModuleNotFound(Exception): + """Control flow exception to signal that a module was not found.""" + + +class State: + """The state for a module. + + The source is only used for the -c command line option; in that + case path is None. Otherwise source is None and path isn't. + """ + + manager: BuildManager + order_counter: ClassVar[int] = 0 + order: int # Order in which modules were encountered + id: str # Fully qualified module name + path: str | None = None # Path to module source + abspath: str | None = None # Absolute path to module source + xpath: str # Path or '' + source: str | None = None # Module source code + source_hash: str | None = None # Hash calculated based on the source code + meta_source_hash: str | None = None # Hash of the source given in the meta, if any + meta: CacheMeta | None = None + data: str | None = None + tree: MypyFile | None = None + # We keep both a list and set of dependencies. A set because it makes it efficient to + # prevent duplicates and the list because I am afraid of changing the order of + # iteration over dependencies. + # They should be managed with add_dependency and suppress_dependency. + dependencies: list[str] # Modules directly imported by the module + dependencies_set: set[str] # The same but as a set for deduplication purposes + suppressed: list[str] # Suppressed/missing dependencies + suppressed_set: set[str] # Suppressed/missing dependencies + priorities: dict[str, int] + + # Map each dependency to the line number where it is first imported + dep_line_map: dict[str, int] + + # Map from dependency id to its last observed interface hash + dep_hashes: dict[str, bytes] = {} + + # List of errors reported for this file last time. + error_lines: list[SerializedError] = [] + + # Parent package, its parent, etc. + ancestors: list[str] | None = None + + # List of (path, line number) tuples giving context for import + import_context: list[tuple[str, int]] + + # If caller_state is set, the line number in the caller where the import occurred + caller_line = 0 + + # Contains a hash of the public interface in incremental mode + interface_hash: bytes = b"" + + # Options, specialized for this file + options: Options + + # Whether to ignore all errors + ignore_all = False + + # Errors reported before semantic analysis, to allow fine-grained + # mode to keep reporting them. + early_errors: list[ErrorInfo] + + # Type checker used for checking this file. Use type_checker() for + # access and to construct this on demand. + _type_checker: TypeChecker | None = None + + fine_grained_deps_loaded = False + + # Cumulative time spent on this file, in microseconds (for profiling stats) + time_spent_us: int = 0 + + # Per-line type-checking time (cumulative time spent type-checking expressions + # on a given source code line). + per_line_checking_time_ns: dict[int, int] + + def __init__( + self, + id: str | None, + path: str | None, + source: str | None, + manager: BuildManager, + caller_state: State | None = None, + caller_line: int = 0, + ancestor_for: State | None = None, + root_source: bool = False, + # If `temporary` is True, this State is being created to just + # quickly parse/load the tree, without an intention to further + # process it. With this flag, any changes to external state as well + # as error reporting should be avoided. + temporary: bool = False, + ) -> None: + if not temporary: + assert id or path or source is not None, "Neither id, path nor source given" + self.manager = manager + State.order_counter += 1 + self.order = State.order_counter + self.caller_line = caller_line + if caller_state: + self.import_context = caller_state.import_context.copy() + self.import_context.append((caller_state.xpath, caller_line)) + else: + self.import_context = [] + self.id = id or "__main__" + self.options = manager.options.clone_for_module(self.id) + self.early_errors = [] + self._type_checker = None + if not path and source is None: + assert id is not None + try: + path, follow_imports = find_module_and_diagnose( + manager, + id, + self.options, + caller_state, + caller_line, + ancestor_for, + root_source, + skip_diagnose=temporary, + ) + except ModuleNotFound: + if not temporary: + manager.missing_modules.add(id) + raise + if follow_imports == "silent": + self.ignore_all = True + elif path and is_silent_import_module(manager, path) and not root_source: + self.ignore_all = True + self.path = path + if path: + self.abspath = os.path.abspath(path) + self.xpath = path or "" + if path and source is None and self.manager.cache_enabled: + self.meta = find_cache_meta(self.id, path, manager) + # TODO: Get mtime if not cached. + if self.meta is not None: + self.interface_hash = self.meta.interface_hash + self.meta_source_hash = self.meta.hash + if path and source is None and self.manager.fscache.isdir(path): + source = "" + self.source = source + self.add_ancestors() + self.per_line_checking_time_ns = collections.defaultdict(int) + t0 = time.time() + self.meta = validate_meta(self.meta, self.id, self.path, self.ignore_all, manager) + self.manager.add_stats(validate_meta_time=time.time() - t0) + if self.meta: + # Make copies, since we may modify these and want to + # compare them to the originals later. + self.dependencies = list(self.meta.dependencies) + self.dependencies_set = set(self.dependencies) + self.suppressed = list(self.meta.suppressed) + self.suppressed_set = set(self.suppressed) + all_deps = self.dependencies + self.suppressed + assert len(all_deps) == len(self.meta.dep_prios) + self.priorities = {id: pri for id, pri in zip(all_deps, self.meta.dep_prios)} + assert len(all_deps) == len(self.meta.dep_lines) + self.dep_line_map = {id: line for id, line in zip(all_deps, self.meta.dep_lines)} + assert len(self.meta.dep_hashes) == len(self.meta.dependencies) + self.dep_hashes = { + k: v for (k, v) in zip(self.meta.dependencies, self.meta.dep_hashes) + } + self.error_lines = self.meta.error_lines + if temporary: + self.load_tree(temporary=True) + if not manager.use_fine_grained_cache(): + # Special case: if there were a previously missing package imported here + # and it is not present, then we need to re-calculate dependencies. + # This is to support patterns like this: + # from missing_package import missing_module # type: ignore + # At first mypy doesn't know that `missing_module` is a module + # (it may be a variable, a class, or a function), so it is not added to + # suppressed dependencies. Therefore, when the package with module is added, + # we need to re-calculate dependencies. + # NOTE: see comment below for why we skip this in fine grained mode. + if exist_added_packages(self.suppressed, manager, self.options): + self.parse_file() # This is safe because the cache is anyway stale. + self.compute_dependencies() + else: + # When doing a fine-grained cache load, pretend we only + # know about modules that have cache information and defer + # handling new modules until the fine-grained update. + if manager.use_fine_grained_cache(): + manager.log(f"Deferring module to fine-grained update {path} ({id})") + raise ModuleNotFound + + # Parse the file (and then some) to get the dependencies. + self.parse_file(temporary=temporary) + self.compute_dependencies() + + def add_ancestors(self) -> None: + if self.path is not None: + _, name = os.path.split(self.path) + base, _ = os.path.splitext(name) + if "." in base: + # This is just a weird filename, don't add anything + self.ancestors = [] + return + # All parent packages are new ancestors. + ancestors = [] + parent = self.id + while "." in parent: + parent, _ = parent.rsplit(".", 1) + ancestors.append(parent) + self.ancestors = ancestors + + def is_fresh(self) -> bool: + """Return whether the cache data for this file is fresh.""" + # NOTE: self.dependencies may differ from + # self.meta.dependencies when a dependency is dropped due to + # suppression by silent mode. However, when a suppressed + # dependency is added back we find out later in the process. + return self.meta is not None and self.dependencies == self.meta.dependencies + + def mark_as_rechecked(self) -> None: + """Marks this module as having been fully re-analyzed by the type-checker.""" + self.manager.rechecked_modules.add(self.id) + + def mark_interface_stale(self) -> None: + """Marks this module as having a stale public interface, and discards the cache data.""" + self.manager.stale_modules.add(self.id) + + def check_blockers(self) -> None: + """Raise CompileError if a blocking error is detected.""" + if self.manager.errors.is_blockers(): + self.manager.log("Bailing due to blocking errors") + self.manager.errors.raise_error() + + @contextlib.contextmanager + def wrap_context(self, check_blockers: bool = True) -> Iterator[None]: + """Temporarily change the error import context to match this state. + + Also report an internal error if an unexpected exception was raised + and raise an exception on a blocking error, unless + check_blockers is False. Skipping blocking error reporting is used + in the semantic analyzer so that we can report all blocking errors + for a file (across multiple targets) to maintain backward + compatibility. + """ + save_import_context = self.manager.errors.import_context() + self.manager.errors.set_import_context(self.import_context) + try: + yield + except CompileError: + raise + except Exception as err: + report_internal_error( + err, + self.path, + 0, + self.manager.errors, + self.options, + self.manager.stdout, + self.manager.stderr, + ) + self.manager.errors.set_import_context(save_import_context) + # TODO: Move this away once we've removed the old semantic analyzer? + if check_blockers: + self.check_blockers() + + def load_fine_grained_deps(self) -> dict[str, set[str]]: + return self.manager.load_fine_grained_deps(self.id) + + def load_tree(self, temporary: bool = False) -> None: + assert ( + self.meta is not None + ), "Internal error: this method must be called only for cached modules" + + data: bytes | dict[str, Any] | None + if self.options.fixed_format_cache: + data = _load_ff_file(self.meta.data_file, self.manager, "Could not load tree: ") + else: + data = _load_json_file( + self.meta.data_file, self.manager, "Load tree ", "Could not load tree: " + ) + if data is None: + return + + t0 = time.time() + # TODO: Assert data file wasn't changed. + if isinstance(data, bytes): + data_io = ReadBuffer(data) + self.tree = MypyFile.read(data_io) + else: + self.tree = MypyFile.deserialize(data) + t1 = time.time() + self.manager.add_stats(deserialize_time=t1 - t0) + if not temporary: + self.manager.modules[self.id] = self.tree + self.manager.add_stats(fresh_trees=1) + + def fix_cross_refs(self) -> None: + assert self.tree is not None, "Internal error: method must be called on parsed file only" + # We need to set allow_missing when doing a fine grained cache + # load because we need to gracefully handle missing modules. + fixup_module(self.tree, self.manager.modules, self.options.use_fine_grained_cache) + + # Methods for processing modules from source code. + + def parse_file(self, *, temporary: bool = False) -> None: + """Parse file and run first pass of semantic analysis. + + Everything done here is local to the file. Don't depend on imported + modules in any way. Also record module dependencies based on imports. + """ + if self.tree is not None: + # The file was already parsed (in __init__()). + return + + manager = self.manager + + # Can we reuse a previously parsed AST? This avoids redundant work in daemon. + cached = self.id in manager.ast_cache + modules = manager.modules + if not cached: + manager.log(f"Parsing {self.xpath} ({self.id})") + else: + manager.log(f"Using cached AST for {self.xpath} ({self.id})") + + t0 = time_ref() + + with self.wrap_context(): + source = self.source + self.source = None # We won't need it again. + if self.path and source is None: + try: + path = manager.maybe_swap_for_shadow_path(self.path) + source = decode_python_encoding(manager.fscache.read(path)) + self.source_hash = manager.fscache.hash_digest(path) + except OSError as ioerr: + # ioerr.strerror differs for os.stat failures between Windows and + # other systems, but os.strerror(ioerr.errno) does not, so we use that. + # (We want the error messages to be platform-independent so that the + # tests have predictable output.) + assert ioerr.errno is not None + raise CompileError( + [ + "mypy: can't read file '{}': {}".format( + self.path.replace(os.getcwd() + os.sep, ""), + os.strerror(ioerr.errno), + ) + ], + module_with_blocker=self.id, + ) from ioerr + except (UnicodeDecodeError, DecodeError) as decodeerr: + if self.path.endswith(".pyd"): + err = f"mypy: stubgen does not support .pyd files: '{self.path}'" + else: + err = f"mypy: can't decode file '{self.path}': {str(decodeerr)}" + raise CompileError([err], module_with_blocker=self.id) from decodeerr + elif self.path and self.manager.fscache.isdir(self.path): + source = "" + self.source_hash = "" + else: + assert source is not None + self.source_hash = compute_hash(source) + + self.parse_inline_configuration(source) + if not cached: + self.tree = manager.parse_file( + self.id, + self.xpath, + source, + ignore_errors=self.ignore_all or self.options.ignore_errors, + options=self.options, + ) + + else: + # Reuse a cached AST + self.tree = manager.ast_cache[self.id][0] + manager.errors.set_file_ignored_lines( + self.xpath, + self.tree.ignored_lines, + self.ignore_all or self.options.ignore_errors, + ) + + self.time_spent_us += time_spent_us(t0) + + if not cached: + # Make a copy of any errors produced during parse time so that + # fine-grained mode can repeat them when the module is + # reprocessed. + self.early_errors = list(manager.errors.error_info_map.get(self.xpath, [])) + else: + self.early_errors = manager.ast_cache[self.id][1] + + if not temporary: + modules[self.id] = self.tree + + if not cached: + self.semantic_analysis_pass1() + + if not temporary: + self.check_blockers() + + manager.ast_cache[self.id] = (self.tree, self.early_errors) + + def parse_inline_configuration(self, source: str) -> None: + """Check for inline mypy: options directive and parse them.""" + flags = get_mypy_comments(source) + if flags: + changes, config_errors = parse_mypy_comments(flags, self.options) + self.options = self.options.apply_changes(changes) + self.manager.errors.set_file(self.xpath, self.id, self.options) + for lineno, error in config_errors: + self.manager.errors.report(lineno, 0, error) + + def semantic_analysis_pass1(self) -> None: + """Perform pass 1 of semantic analysis, which happens immediately after parsing. + + This pass can't assume that any other modules have been processed yet. + """ + options = self.options + assert self.tree is not None + + t0 = time_ref() + + # Do the first pass of semantic analysis: analyze the reachability + # of blocks and import statements. We must do this before + # processing imports, since this may mark some import statements as + # unreachable. + # + # TODO: This should not be considered as a semantic analysis + # pass -- it's an independent pass. + analyzer = SemanticAnalyzerPreAnalysis() + with self.wrap_context(): + analyzer.visit_file(self.tree, self.xpath, self.id, options) + self.manager.errors.set_skipped_lines(self.xpath, self.tree.skipped_lines) + # TODO: Do this while constructing the AST? + self.tree.names = SymbolTable() + if not self.tree.is_stub: + if not self.options.allow_redefinition_new: + # Perform some low-key variable renaming when assignments can't + # widen inferred types + self.tree.accept(LimitedVariableRenameVisitor()) + if options.allow_redefinition: + # Perform more renaming across the AST to allow variable redefinitions + self.tree.accept(VariableRenameVisitor()) + self.time_spent_us += time_spent_us(t0) + + def add_dependency(self, dep: str) -> None: + if dep not in self.dependencies_set: + self.dependencies.append(dep) + self.dependencies_set.add(dep) + if dep in self.suppressed_set: + self.suppressed.remove(dep) + self.suppressed_set.remove(dep) + + def suppress_dependency(self, dep: str) -> None: + if dep in self.dependencies_set: + self.dependencies.remove(dep) + self.dependencies_set.remove(dep) + if dep not in self.suppressed_set: + self.suppressed.append(dep) + self.suppressed_set.add(dep) + + def compute_dependencies(self) -> None: + """Compute a module's dependencies after parsing it. + + This is used when we parse a file that we didn't have + up-to-date cache information for. When we have an up-to-date + cache, we just use the cached info. + """ + manager = self.manager + assert self.tree is not None + + # Compute (direct) dependencies. + # Add all direct imports (this is why we needed the first pass). + # Also keep track of each dependency's source line. + # Missing dependencies will be moved from dependencies to + # suppressed when they fail to be loaded in load_graph. + + self.dependencies = [] + self.dependencies_set = set() + self.suppressed = [] + self.suppressed_set = set() + self.priorities = {} # id -> priority + self.dep_line_map = {} # id -> line + self.dep_hashes = {} + dep_entries = manager.all_imported_modules_in_file( + self.tree + ) + self.manager.plugin.get_additional_deps(self.tree) + for pri, id, line in dep_entries: + self.priorities[id] = min(pri, self.priorities.get(id, PRI_ALL)) + if id == self.id: + continue + self.add_dependency(id) + if id not in self.dep_line_map: + self.dep_line_map[id] = line + # Every module implicitly depends on builtins. + if self.id != "builtins": + self.add_dependency("builtins") + + self.check_blockers() # Can fail due to bogus relative imports + + def type_check_first_pass(self) -> None: + if self.options.semantic_analysis_only: + return + t0 = time_ref() + with self.wrap_context(): + self.type_checker().check_first_pass() + self.time_spent_us += time_spent_us(t0) + + def type_checker(self) -> TypeChecker: + if not self._type_checker: + assert self.tree is not None, "Internal error: must be called on parsed file only" + manager = self.manager + self._type_checker = TypeChecker( + manager.errors, + manager.modules, + self.options, + self.tree, + self.xpath, + manager.plugin, + self.per_line_checking_time_ns, + ) + return self._type_checker + + def type_map(self) -> dict[Expression, Type]: + # We can extract the master type map directly since at this + # point no temporary type maps can be active. + assert len(self.type_checker()._type_maps) == 1 + return self.type_checker()._type_maps[0] + + def type_check_second_pass(self) -> bool: + if self.options.semantic_analysis_only: + return False + t0 = time_ref() + with self.wrap_context(): + result = self.type_checker().check_second_pass() + self.time_spent_us += time_spent_us(t0) + return result + + def detect_possibly_undefined_vars(self) -> None: + assert self.tree is not None, "Internal error: method must be called on parsed file only" + if self.tree.is_stub: + # We skip stub files because they aren't actually executed. + return + manager = self.manager + manager.errors.set_file(self.xpath, self.tree.fullname, options=self.options) + if manager.errors.is_error_code_enabled( + codes.POSSIBLY_UNDEFINED + ) or manager.errors.is_error_code_enabled(codes.USED_BEFORE_DEF): + self.tree.accept( + PossiblyUndefinedVariableVisitor( + MessageBuilder(manager.errors, manager.modules), + self.type_map(), + self.options, + self.tree.names, + ) + ) + + def finish_passes(self) -> None: + assert self.tree is not None, "Internal error: method must be called on parsed file only" + manager = self.manager + if self.options.semantic_analysis_only: + return + t0 = time_ref() + with self.wrap_context(): + # Some tests (and tools) want to look at the set of all types. + options = manager.options + if options.export_types: + manager.all_types.update(self.type_map()) + + # We should always patch indirect dependencies, even in full (non-incremental) builds, + # because the cache still may be written, and it must be correct. + self.patch_indirect_dependencies( + # Two possible sources of indirect dependencies: + # * Symbols not directly imported in this module but accessed via an attribute + # or via a re-export (vast majority of these recorded in semantic analysis). + # * For each expression type we need to record definitions of type components + # since "meaning" of the type may be updated when definitions are updated. + self.tree.module_refs | self.type_checker().module_refs, + set(self.type_map().values()), + ) + + if self.options.dump_inference_stats: + dump_type_stats( + self.tree, + self.xpath, + modules=self.manager.modules, + inferred=True, + typemap=self.type_map(), + ) + manager.report_file(self.tree, self.type_map(), self.options) + + self.update_fine_grained_deps(self.manager.fg_deps) + + if manager.options.export_ref_info: + write_undocumented_ref_info( + self, manager.metastore, manager.options, self.type_map() + ) + + self.free_state() + if not manager.options.fine_grained_incremental and not manager.options.preserve_asts: + free_tree(self.tree) + self.time_spent_us += time_spent_us(t0) + + def free_state(self) -> None: + if self._type_checker: + self._type_checker.reset() + self._type_checker = None + + def patch_indirect_dependencies(self, module_refs: set[str], types: set[Type]) -> None: + assert self.ancestors is not None + existing_deps = set(self.dependencies + self.suppressed + self.ancestors) + existing_deps.add(self.id) + + encountered = self.manager.indirection_detector.find_modules(types) | module_refs + for dep in sorted(encountered - existing_deps): + if dep not in self.manager.modules: + continue + self.add_dependency(dep) + self.priorities[dep] = PRI_INDIRECT + + def compute_fine_grained_deps(self) -> dict[str, set[str]]: + assert self.tree is not None + if self.id in ("builtins", "typing", "types", "sys", "_typeshed"): + # We don't track changes to core parts of typeshed -- the + # assumption is that they are only changed as part of mypy + # updates, which will invalidate everything anyway. These + # will always be processed in the initial non-fine-grained + # build. Other modules may be brought in as a result of an + # fine-grained increment, and we may need these + # dependencies then to handle cyclic imports. + return {} + from mypy.server.deps import get_dependencies # Lazy import to speed up startup + + return get_dependencies( + target=self.tree, + type_map=self.type_map(), + python_version=self.options.python_version, + options=self.manager.options, + ) + + def update_fine_grained_deps(self, deps: dict[str, set[str]]) -> None: + options = self.manager.options + if options.cache_fine_grained or options.fine_grained_incremental: + from mypy.server.deps import merge_dependencies # Lazy import to speed up startup + + merge_dependencies(self.compute_fine_grained_deps(), deps) + type_state.update_protocol_deps(deps) + + def write_cache(self) -> tuple[CacheMeta, str] | None: + assert self.tree is not None, "Internal error: method must be called on parsed file only" + # We don't support writing cache files in fine-grained incremental mode. + if ( + not self.path + or self.options.cache_dir == os.devnull + or self.options.fine_grained_incremental + ): + if self.options.debug_serialize: + try: + if self.manager.options.fixed_format_cache: + data = WriteBuffer() + self.tree.write(data) + else: + self.tree.serialize() + except Exception: + print(f"Error serializing {self.id}", file=self.manager.stdout) + raise # Propagate to display traceback + return None + dep_prios = self.dependency_priorities() + dep_lines = self.dependency_lines() + assert self.source_hash is not None + assert len(set(self.dependencies)) == len( + self.dependencies + ), f"Duplicates in dependencies list for {self.id} ({self.dependencies})" + new_interface_hash, meta_tuple = write_cache( + self.id, + self.path, + self.tree, + list(self.dependencies), + list(self.suppressed), + dep_prios, + dep_lines, + self.interface_hash, + self.source_hash, + self.ignore_all, + self.manager, + ) + if new_interface_hash == self.interface_hash: + self.manager.log(f"Cached module {self.id} has same interface") + else: + self.manager.log(f"Cached module {self.id} has changed interface") + self.mark_interface_stale() + self.interface_hash = new_interface_hash + return meta_tuple + + def verify_dependencies(self, suppressed_only: bool = False) -> None: + """Report errors for import targets in modules that don't exist. + + If suppressed_only is set, only check suppressed dependencies. + """ + manager = self.manager + assert self.ancestors is not None + # Strip out indirect dependencies. See comment in build.load_graph(). + if suppressed_only: + all_deps = [dep for dep in self.suppressed if self.priorities.get(dep) != PRI_INDIRECT] + else: + dependencies = [ + dep + for dep in self.dependencies + self.suppressed + if self.priorities.get(dep) != PRI_INDIRECT + ] + all_deps = dependencies + self.ancestors + for dep in all_deps: + if dep in manager.modules: + continue + options = manager.options.clone_for_module(dep) + if options.ignore_missing_imports: + continue + line = self.dep_line_map.get(dep, 1) + try: + if dep in self.ancestors: + state: State | None = None + ancestor: State | None = self + else: + state, ancestor = self, None + # Called just for its side effects of producing diagnostics. + find_module_and_diagnose( + manager, + dep, + options, + caller_state=state, + caller_line=line, + ancestor_for=ancestor, + ) + except (ModuleNotFound, CompileError): + # Swallow up any ModuleNotFounds or CompilerErrors while generating + # a diagnostic. CompileErrors may get generated in + # fine-grained mode when an __init__.py is deleted, if a module + # that was in that package has targets reprocessed before + # it is renamed. + pass + + def dependency_priorities(self) -> list[int]: + return [self.priorities.get(dep, PRI_HIGH) for dep in self.dependencies + self.suppressed] + + def dependency_lines(self) -> list[int]: + return [self.dep_line_map.get(dep, 1) for dep in self.dependencies + self.suppressed] + + def generate_unused_ignore_notes(self) -> None: + if ( + self.options.warn_unused_ignores + or codes.UNUSED_IGNORE in self.options.enabled_error_codes + ) and codes.UNUSED_IGNORE not in self.options.disabled_error_codes: + # If this file was initially loaded from the cache, it may have suppressed + # dependencies due to imports with ignores on them. We need to generate + # those errors to avoid spuriously flagging them as unused ignores. + if self.meta: + self.verify_dependencies(suppressed_only=True) + self.manager.errors.generate_unused_ignore_errors(self.xpath) + + def generate_ignore_without_code_notes(self) -> None: + if self.manager.errors.is_error_code_enabled(codes.IGNORE_WITHOUT_CODE): + self.manager.errors.generate_ignore_without_code_errors( + self.xpath, self.options.warn_unused_ignores + ) + + +# Module import and diagnostic glue + + +def find_module_and_diagnose( + manager: BuildManager, + id: str, + options: Options, + caller_state: State | None = None, + caller_line: int = 0, + ancestor_for: State | None = None, + root_source: bool = False, + skip_diagnose: bool = False, +) -> tuple[str, str]: + """Find a module by name, respecting follow_imports and producing diagnostics. + + If the module is not found, then the ModuleNotFound exception is raised. + + Args: + id: module to find + options: the options for the module being loaded + caller_state: the state of the importing module, if applicable + caller_line: the line number of the import + ancestor_for: the child module this is an ancestor of, if applicable + root_source: whether this source was specified on the command line + skip_diagnose: skip any error diagnosis and reporting (but ModuleNotFound is + still raised if the module is missing) + + The specified value of follow_imports for a module can be overridden + if the module is specified on the command line or if it is a stub, + so we compute and return the "effective" follow_imports of the module. + + Returns a tuple containing (file path, target's effective follow_imports setting) + """ + result = find_module_with_reason(id, manager) + if isinstance(result, str): + # For non-stubs, look at options.follow_imports: + # - normal (default) -> fully analyze + # - silent -> analyze but silence errors + # - skip -> don't analyze, make the type Any + follow_imports = options.follow_imports + if ( + root_source # Honor top-level modules + or ( + result.endswith(".pyi") # Stubs are always normal + and not options.follow_imports_for_stubs # except when they aren't + ) + or id in CORE_BUILTIN_MODULES # core is always normal + ): + follow_imports = "normal" + if skip_diagnose: + pass + elif follow_imports == "silent": + # Still import it, but silence non-blocker errors. + manager.log(f"Silencing {result} ({id})") + elif follow_imports == "skip" or follow_imports == "error": + # In 'error' mode, produce special error messages. + if id not in manager.missing_modules: + manager.log(f"Skipping {result} ({id})") + if follow_imports == "error": + if ancestor_for: + skipping_ancestor(manager, id, result, ancestor_for) + else: + skipping_module(manager, caller_line, caller_state, id, result) + raise ModuleNotFound + if is_silent_import_module(manager, result) and not root_source: + follow_imports = "silent" + return (result, follow_imports) + else: + # Could not find a module. Typically the reason is a + # misspelled module name, missing stub, module not in + # search path or the module has not been installed. + + ignore_missing_imports = options.ignore_missing_imports + + # Don't honor a global (not per-module) ignore_missing_imports + # setting for modules that used to have bundled stubs, as + # otherwise updating mypy can silently result in new false + # negatives. (Unless there are stubs but they are incomplete.) + global_ignore_missing_imports = manager.options.ignore_missing_imports + if ( + is_module_from_legacy_bundled_package(id) + and global_ignore_missing_imports + and not options.ignore_missing_imports_per_module + and result is ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED + ): + ignore_missing_imports = False + + if skip_diagnose: + raise ModuleNotFound + if caller_state: + if not (ignore_missing_imports or in_partial_package(id, manager)): + module_not_found(manager, caller_line, caller_state, id, result) + raise ModuleNotFound + elif root_source: + # If we can't find a root source it's always fatal. + # TODO: This might hide non-fatal errors from + # root sources processed earlier. + raise CompileError([f"mypy: can't find module '{id}'"]) + else: + raise ModuleNotFound + + +def exist_added_packages(suppressed: list[str], manager: BuildManager, options: Options) -> bool: + """Find if there are any newly added packages that were previously suppressed. + + Exclude everything not in build for follow-imports=skip. + """ + for dep in suppressed: + if dep in manager.source_set.source_modules: + # We don't need to add any special logic for this. If a module + # is added to build, importers will be invalidated by normal mechanism. + continue + path = find_module_simple(dep, manager) + if not path: + continue + if options.follow_imports == "skip" and ( + not path.endswith(".pyi") or options.follow_imports_for_stubs + ): + continue + if "__init__.py" in path: + # It is better to have a bit lenient test, this will only slightly reduce + # performance, while having a too strict test may affect correctness. + return True + return False + + +def find_module_simple(id: str, manager: BuildManager) -> str | None: + """Find a filesystem path for module `id` or `None` if not found.""" + t0 = time.time() + x = manager.find_module_cache.find_module(id, fast_path=True) + manager.add_stats(find_module_time=time.time() - t0, find_module_calls=1) + if isinstance(x, ModuleNotFoundReason): + return None + return x + + +def find_module_with_reason(id: str, manager: BuildManager) -> ModuleSearchResult: + """Find a filesystem path for module `id` or the reason it can't be found.""" + t0 = time.time() + x = manager.find_module_cache.find_module(id, fast_path=False) + manager.add_stats(find_module_time=time.time() - t0, find_module_calls=1) + return x + + +def in_partial_package(id: str, manager: BuildManager) -> bool: + """Check if a missing module can potentially be a part of a package. + + This checks if there is any existing parent __init__.pyi stub that + defines a module-level __getattr__ (a.k.a. partial stub package). + """ + while "." in id: + parent, _ = id.rsplit(".", 1) + if parent in manager.modules: + parent_mod: MypyFile | None = manager.modules[parent] + else: + # Parent is not in build, try quickly if we can find it. + try: + parent_st = State( + id=parent, path=None, source=None, manager=manager, temporary=True + ) + except (ModuleNotFound, CompileError): + parent_mod = None + else: + parent_mod = parent_st.tree + if parent_mod is not None: + # Bail out soon, complete subpackage found + return parent_mod.is_partial_stub_package + id = parent + return False + + +def module_not_found( + manager: BuildManager, + line: int, + caller_state: State, + target: str, + reason: ModuleNotFoundReason, +) -> None: + errors = manager.errors + save_import_context = errors.import_context() + errors.set_import_context(caller_state.import_context) + errors.set_file(caller_state.xpath, caller_state.id, caller_state.options) + if target == "builtins": + errors.report( + line, 0, "Cannot find 'builtins' module. Typeshed appears broken!", blocker=True + ) + errors.raise_error() + else: + daemon = manager.options.fine_grained_incremental + msg, notes = reason.error_message_templates(daemon) + if reason == ModuleNotFoundReason.NOT_FOUND: + code = codes.IMPORT_NOT_FOUND + elif ( + reason == ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS + or reason == ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED + ): + code = codes.IMPORT_UNTYPED + else: + code = codes.IMPORT + errors.report(line, 0, msg.format(module=target), code=code) + + dist = stub_distribution_name(target) + for note in notes: + if "{stub_dist}" in note: + assert dist is not None + note = note.format(stub_dist=dist) + errors.report(line, 0, note, severity="note", only_once=True, code=code) + if reason is ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED: + assert dist is not None + manager.missing_stub_packages.add(dist) + errors.set_import_context(save_import_context) + + +def skipping_module( + manager: BuildManager, line: int, caller_state: State | None, id: str, path: str +) -> None: + """Produce an error for an import ignored due to --follow_imports=error""" + assert caller_state, (id, path) + save_import_context = manager.errors.import_context() + manager.errors.set_import_context(caller_state.import_context) + manager.errors.set_file(caller_state.xpath, caller_state.id, manager.options) + manager.errors.report(line, 0, f'Import of "{id}" ignored', severity="error") + manager.errors.report( + line, + 0, + "(Using --follow-imports=error, module not passed on command line)", + severity="note", + only_once=True, + ) + manager.errors.set_import_context(save_import_context) + + +def skipping_ancestor(manager: BuildManager, id: str, path: str, ancestor_for: State) -> None: + """Produce an error for an ancestor ignored due to --follow_imports=error""" + # TODO: Read the path (the __init__.py file) and return + # immediately if it's empty or only contains comments. + # But beware, some package may be the ancestor of many modules, + # so we'd need to cache the decision. + manager.errors.set_import_context([]) + manager.errors.set_file(ancestor_for.xpath, ancestor_for.id, manager.options) + manager.errors.report( + -1, -1, f'Ancestor package "{id}" ignored', severity="error", only_once=True + ) + manager.errors.report( + -1, + -1, + "(Using --follow-imports=error, submodule passed on command line)", + severity="note", + only_once=True, + ) + + +def log_configuration(manager: BuildManager, sources: list[BuildSource]) -> None: + """Output useful configuration information to LOG and TRACE""" + + config_file = manager.options.config_file + if config_file: + config_file = os.path.abspath(config_file) + + manager.log() + configuration_vars = [ + ("Mypy Version", __version__), + ("Config File", (config_file or "Default")), + ("Configured Executable", manager.options.python_executable or "None"), + ("Current Executable", sys.executable), + ("Cache Dir", manager.options.cache_dir), + ("Compiled", str(not __file__.endswith(".py"))), + ("Exclude", manager.options.exclude), + ] + + for conf_name, conf_value in configuration_vars: + manager.log(f"{conf_name + ':':24}{conf_value}") + + for source in sources: + manager.log(f"{'Found source:':24}{source}") + + # Complete list of searched paths can get very long, put them under TRACE + for path_type, paths in manager.search_paths.asdict().items(): + if not paths: + manager.trace(f"No {path_type}") + continue + + manager.trace(f"{path_type}:") + + for pth in paths: + manager.trace(f" {pth}") + + +# The driver + + +def dispatch(sources: list[BuildSource], manager: BuildManager, stdout: TextIO) -> Graph: + log_configuration(manager, sources) + + t0 = time.time() + graph = load_graph(sources, manager) + + # This is a kind of unfortunate hack to work around some of fine-grained's + # fragility: if we have loaded less than 50% of the specified files from + # cache in fine-grained cache mode, load the graph again honestly. + # In this case, we just turn the cache off entirely, so we don't need + # to worry about some files being loaded and some from cache and so + # that fine-grained mode never *writes* to the cache. + if manager.use_fine_grained_cache() and len(graph) < 0.50 * len(sources): + manager.log("Redoing load_graph without cache because too much was missing") + manager.cache_enabled = False + graph = load_graph(sources, manager) + + for id in graph: + manager.import_map[id] = set(graph[id].dependencies + graph[id].suppressed) + + t1 = time.time() + manager.add_stats( + graph_size=len(graph), + stubs_found=sum(g.path is not None and g.path.endswith(".pyi") for g in graph.values()), + graph_load_time=(t1 - t0), + fm_cache_size=len(manager.find_module_cache.results), + ) + if not graph: + print("Nothing to do?!", file=stdout) + return graph + manager.log(f"Loaded graph with {len(graph)} nodes ({t1 - t0:.3f} sec)") + if manager.options.dump_graph: + dump_graph(graph, stdout) + return graph + + # Fine grained dependencies that didn't have an associated module in the build + # are serialized separately, so we read them after we load the graph. + # We need to read them both for running in daemon mode and if we are generating + # a fine-grained cache (so that we can properly update them incrementally). + # The `read_deps_cache` will also validate + # the deps cache against the loaded individual cache files. + if manager.options.cache_fine_grained or manager.use_fine_grained_cache(): + t2 = time.time() + fg_deps_meta = read_deps_cache(manager, graph) + manager.add_stats(load_fg_deps_time=time.time() - t2) + if fg_deps_meta is not None: + manager.fg_deps_meta = fg_deps_meta + elif manager.stats.get("fresh_metas", 0) > 0: + # Clear the stats so we don't infinite loop because of positive fresh_metas + manager.stats.clear() + # There were some cache files read, but no fine-grained dependencies loaded. + manager.log("Error reading fine-grained dependencies cache -- aborting cache load") + manager.cache_enabled = False + manager.log("Falling back to full run -- reloading graph...") + return dispatch(sources, manager, stdout) + + # If we are loading a fine-grained incremental mode cache, we + # don't want to do a real incremental reprocess of the + # graph---we'll handle it all later. + if not manager.use_fine_grained_cache(): + process_graph(graph, manager) + # Update plugins snapshot. + write_plugins_snapshot(manager) + manager.old_plugins_snapshot = manager.plugins_snapshot + if manager.options.cache_fine_grained or manager.options.fine_grained_incremental: + # If we are running a daemon or are going to write cache for further fine grained use, + # then we need to collect fine grained protocol dependencies. + # Since these are a global property of the program, they are calculated after we + # processed the whole graph. + type_state.add_all_protocol_deps(manager.fg_deps) + if not manager.options.fine_grained_incremental: + rdeps = generate_deps_for_cache(manager, graph) + write_deps_cache(rdeps, manager, graph) + + if manager.options.dump_deps: + # This speeds up startup a little when not using the daemon mode. + from mypy.server.deps import dump_all_dependencies + + dump_all_dependencies( + manager.modules, manager.all_types, manager.options.python_version, manager.options + ) + + return graph + + +class NodeInfo: + """Some info about a node in the graph of SCCs.""" + + def __init__(self, index: int, scc: list[str]) -> None: + self.node_id = "n%d" % index + self.scc = scc + self.sizes: dict[str, int] = {} # mod -> size in bytes + self.deps: dict[str, int] = {} # node_id -> pri + + def dumps(self) -> str: + """Convert to JSON string.""" + total_size = sum(self.sizes.values()) + return "[{}, {}, {},\n {},\n {}]".format( + json.dumps(self.node_id), + json.dumps(total_size), + json.dumps(self.scc), + json.dumps(self.sizes), + json.dumps(self.deps), + ) + + +def dump_timing_stats(path: str, graph: Graph) -> None: + """Dump timing stats for each file in the given graph.""" + with open(path, "w") as f: + for id in sorted(graph): + f.write(f"{id} {graph[id].time_spent_us}\n") + + +def dump_line_checking_stats(path: str, graph: Graph) -> None: + """Dump per-line expression type checking stats.""" + with open(path, "w") as f: + for id in sorted(graph): + if not graph[id].per_line_checking_time_ns: + continue + f.write(f"{id}:\n") + for line in sorted(graph[id].per_line_checking_time_ns): + line_time = graph[id].per_line_checking_time_ns[line] + f.write(f"{line:>5} {line_time/1000:8.1f}\n") + + +def dump_graph(graph: Graph, stdout: TextIO | None = None) -> None: + """Dump the graph as a JSON string to stdout. + + This copies some of the work by process_graph() + (sorted_components() and order_ascc()). + """ + stdout = stdout or sys.stdout + nodes = [] + sccs = sorted_components(graph) + for i, ascc in enumerate(sccs): + scc = order_ascc(graph, ascc.mod_ids) + node = NodeInfo(i, scc) + nodes.append(node) + inv_nodes = {} # module -> node_id + for node in nodes: + for mod in node.scc: + inv_nodes[mod] = node.node_id + for node in nodes: + for mod in node.scc: + state = graph[mod] + size = 0 + if state.path: + try: + size = os.path.getsize(state.path) + except OSError: + pass + node.sizes[mod] = size + for dep in state.dependencies: + if dep in state.priorities: + pri = state.priorities[dep] + if dep in inv_nodes: + dep_id = inv_nodes[dep] + if dep_id != node.node_id and ( + dep_id not in node.deps or pri < node.deps[dep_id] + ): + node.deps[dep_id] = pri + print("[" + ",\n ".join(node.dumps() for node in nodes) + "\n]", file=stdout) + + +def load_graph( + sources: list[BuildSource], + manager: BuildManager, + old_graph: Graph | None = None, + new_modules: list[State] | None = None, +) -> Graph: + """Given some source files, load the full dependency graph. + + If an old_graph is passed in, it is used as the starting point and + modified during graph loading. + + If a new_modules is passed in, any modules that are loaded are + added to the list. This is an argument and not a return value + so that the caller can access it even if load_graph fails. + + As this may need to parse files, this can raise CompileError in case + there are syntax errors. + """ + + graph: Graph = old_graph if old_graph is not None else {} + + # The deque is used to implement breadth-first traversal. + # TODO: Consider whether to go depth-first instead. This may + # affect the order in which we process files within import cycles. + new = new_modules if new_modules is not None else [] + entry_points: set[str] = set() + # Seed the graph with the initial root sources. + for bs in sources: + try: + st = State( + id=bs.module, + path=bs.path, + source=bs.text, + manager=manager, + root_source=not bs.followed, + ) + except ModuleNotFound: + continue + if st.id in graph: + manager.errors.set_file(st.xpath, st.id, manager.options) + manager.errors.report( + -1, + -1, + f'Duplicate module named "{st.id}" (also at "{graph[st.id].xpath}")', + blocker=True, + ) + manager.errors.report( + -1, + -1, + "See https://mypy.readthedocs.io/en/stable/running_mypy.html#mapping-file-paths-to-modules " + "for more info", + severity="note", + ) + manager.errors.report( + -1, + -1, + "Common resolutions include: a) using `--exclude` to avoid checking one of them, " + "b) adding `__init__.py` somewhere, c) using `--explicit-package-bases` or " + "adjusting MYPYPATH", + severity="note", + ) + + manager.errors.raise_error() + graph[st.id] = st + new.append(st) + entry_points.add(bs.module) + + # Note: Running this each time could be slow in the daemon. If it's a problem, we + # can do more work to maintain this incrementally. + seen_files = {st.abspath: st for st in graph.values() if st.path} + + # Collect dependencies. We go breadth-first. + # More nodes might get added to new as we go, but that's fine. + for st in new: + assert st.ancestors is not None + # Strip out indirect dependencies. These will be dealt with + # when they show up as direct dependencies, and there's a + # scenario where they hurt: + # - Suppose A imports B and B imports C. + # - Suppose on the next round: + # - C is deleted; + # - B is updated to remove the dependency on C; + # - A is unchanged. + # - In this case A's cached *direct* dependencies are still valid + # (since direct dependencies reflect the imports found in the source) + # but A's cached *indirect* dependency on C is wrong. + dependencies = [dep for dep in st.dependencies if st.priorities.get(dep) != PRI_INDIRECT] + if not manager.use_fine_grained_cache(): + # TODO: Ideally we could skip here modules that appeared in st.suppressed + # because they are not in build with `follow-imports=skip`. + # This way we could avoid overhead of cloning options in `State.__init__()` + # below to get the option value. This is quite minor performance loss however. + added = [dep for dep in st.suppressed if find_module_simple(dep, manager)] + else: + # During initial loading we don't care about newly added modules, + # they will be taken care of during fine grained update. See also + # comment about this in `State.__init__()`. + added = [] + for dep in st.ancestors + dependencies + st.suppressed: + ignored = dep in st.suppressed_set and dep not in entry_points + if ignored and dep not in added: + manager.missing_modules.add(dep) + elif dep not in graph: + try: + if dep in st.ancestors: + # TODO: Why not 'if dep not in st.dependencies' ? + # Ancestors don't have import context. + newst = State( + id=dep, path=None, source=None, manager=manager, ancestor_for=st + ) + else: + newst = State( + id=dep, + path=None, + source=None, + manager=manager, + caller_state=st, + caller_line=st.dep_line_map.get(dep, 1), + ) + except ModuleNotFound: + if dep in st.dependencies_set: + st.suppress_dependency(dep) + else: + if newst.path: + newst_path = os.path.abspath(newst.path) + + if newst_path in seen_files: + manager.errors.report( + -1, + 0, + "Source file found twice under different module names: " + '"{}" and "{}"'.format(seen_files[newst_path].id, newst.id), + blocker=True, + ) + manager.errors.report( + -1, + 0, + "See https://mypy.readthedocs.io/en/stable/running_mypy.html#mapping-file-paths-to-modules " + "for more info", + severity="note", + ) + manager.errors.report( + -1, + 0, + "Common resolutions include: a) adding `__init__.py` somewhere, " + "b) using `--explicit-package-bases` or adjusting MYPYPATH", + severity="note", + ) + manager.errors.raise_error() + + seen_files[newst_path] = newst + + assert newst.id not in graph, newst.id + graph[newst.id] = newst + new.append(newst) + if dep in graph and dep in st.suppressed_set: + # Previously suppressed file is now visible + st.add_dependency(dep) + # In the loop above we skip indirect dependencies, so to make indirect dependencies behave + # more consistently with regular ones, we suppress them manually here (when needed). + for st in graph.values(): + indirect = [dep for dep in st.dependencies if st.priorities.get(dep) == PRI_INDIRECT] + for dep in indirect: + if dep not in graph: + st.suppress_dependency(dep) + manager.plugin.set_modules(manager.modules) + return graph + + +def order_ascc_ex(graph: Graph, ascc: SCC) -> list[str]: + """Apply extra heuristics on top of order_ascc(). + + This should be used only for actual SCCs, not for "inner" SCCs + we create recursively during ordering of the SCC. Currently, this + has only some special handling for builtin SCC. + """ + scc = order_ascc(graph, ascc.mod_ids) + # Make the order of the SCC that includes 'builtins' and 'typing', + # among other things, predictable. Various things may break if + # the order changes. + if "builtins" in ascc.mod_ids: + scc = sorted(scc, reverse=True) + # If builtins is in the list, move it last. (This is a bit of + # a hack, but it's necessary because the builtins module is + # part of a small cycle involving at least {builtins, abc, + # typing}. Of these, builtins must be processed last or else + # some builtin objects will be incompletely processed.) + scc.remove("builtins") + scc.append("builtins") + return scc + + +def find_stale_sccs( + sccs: list[SCC], graph: Graph, manager: BuildManager +) -> tuple[list[SCC], list[SCC]]: + """Split a list of ready SCCs into stale and fresh. + + Fresh SCCs are those where: + * We have valid cache files for all modules in the SCC. + * There are no changes in dependencies (files removed from/added to the build). + * The interface hashes of direct dependents matches those recorded in the cache. + The first and second conditions are verified by is_fresh(). + """ + stale_sccs = [] + fresh_sccs = [] + for ascc in sccs: + stale_scc = {id for id in ascc.mod_ids if not graph[id].is_fresh()} + fresh = not stale_scc + + # Verify that interfaces of dependencies still present in graph are up-to-date (fresh). + stale_deps = set() + for id in ascc.mod_ids: + for dep in graph[id].dep_hashes: + if dep in graph and graph[dep].interface_hash != graph[id].dep_hashes[dep]: + stale_deps.add(dep) + fresh = fresh and not stale_deps + + if fresh: + fresh_msg = "fresh" + elif stale_scc: + fresh_msg = "inherently stale" + if stale_scc != ascc.mod_ids: + fresh_msg += f" ({' '.join(sorted(stale_scc))})" + if stale_deps: + fresh_msg += f" with stale deps ({' '.join(sorted(stale_deps))})" + else: + fresh_msg = f"stale due to deps ({' '.join(sorted(stale_deps))})" + + scc_str = " ".join(ascc.mod_ids) + if fresh: + manager.trace(f"Found {fresh_msg} SCC ({scc_str})") + # If there is at most one file with errors we can skip the ordering to save time. + mods_with_errors = [id for id in ascc.mod_ids if graph[id].error_lines] + if len(mods_with_errors) <= 1: + scc = mods_with_errors + else: + # Use exactly the same order as for stale SCCs for stability. + scc = order_ascc_ex(graph, ascc) + for id in scc: + if graph[id].error_lines: + path = manager.errors.simplify_path(graph[id].xpath) + formatted = manager.errors.format_messages( + path, + deserialize_codes(graph[id].error_lines), + formatter=manager.error_formatter, + ) + manager.flush_errors(path, formatted, False) + fresh_sccs.append(ascc) + else: + size = len(ascc.mod_ids) + if size == 1: + manager.log(f"Scheduling SCC singleton ({scc_str}) as {fresh_msg}") + else: + manager.log("Scheduling SCC of size %d (%s) as %s" % (size, scc_str, fresh_msg)) + stale_sccs.append(ascc) + return stale_sccs, fresh_sccs + + +def process_graph(graph: Graph, manager: BuildManager) -> None: + """Process everything in dependency order.""" + sccs = sorted_components(graph) + manager.log( + "Found %d SCCs; largest has %d nodes" % (len(sccs), max(len(scc.mod_ids) for scc in sccs)) + ) + + scc_by_id = {scc.id: scc for scc in sccs} + manager.scc_by_id = scc_by_id + manager.top_order = [scc.id for scc in sccs] + + # Prime the ready list with leaf SCCs (that have no dependencies). + ready = [] + not_ready = [] + for scc in sccs: + if not scc.deps: + ready.append(scc) + else: + not_ready.append(scc) + + still_working = False + while ready or not_ready or still_working: + stale, fresh = find_stale_sccs(ready, graph, manager) + if stale: + manager.submit(stale) + still_working = True + # We eagerly walk over fresh SCCs to reach as many stale SCCs as soon + # as possible. Only when there are no fresh SCCs, we wait on scheduled stale ones. + # This strategy, similar to a naive strategy in minesweeper game, will allow us + # to leverage parallelism as much as possible. + if fresh: + done = fresh + else: + done, still_working = manager.wait_for_done(graph) + ready = [] + for done_scc in done: + for dependent in done_scc.direct_dependents: + scc_by_id[dependent].not_ready_deps.discard(done_scc.id) + if not scc_by_id[dependent].not_ready_deps: + not_ready.remove(scc_by_id[dependent]) + ready.append(scc_by_id[dependent]) + + +def order_ascc(graph: Graph, ascc: AbstractSet[str], pri_max: int = PRI_INDIRECT) -> list[str]: + """Come up with the ideal processing order within an SCC. + + Using the priorities assigned by all_imported_modules_in_file(), + try to reduce the cycle to a DAG, by omitting arcs representing + dependencies of lower priority. + + In the simplest case, if we have A <--> B where A has a top-level + "import B" (medium priority) but B only has the reverse "import A" + inside a function (low priority), we turn the cycle into a DAG by + dropping the B --> A arc, which leaves only A --> B. + + If all arcs have the same priority, we fall back to sorting by + reverse global order (the order in which modules were first + encountered). + + The algorithm is recursive, as follows: when as arcs of different + priorities are present, drop all arcs of the lowest priority, + identify SCCs in the resulting graph, and apply the algorithm to + each SCC thus found. The recursion is bounded because at each + recursion the spread in priorities is (at least) one less. + + In practice there are only a few priority levels (less than a + dozen) and in the worst case we just carry out the same algorithm + for finding SCCs N times. Thus, the complexity is no worse than + the complexity of the original SCC-finding algorithm -- see + strongly_connected_components() below for a reference. + """ + if len(ascc) == 1: + return list(ascc) + pri_spread = set() + for id in ascc: + state = graph[id] + for dep in state.dependencies: + if dep in ascc: + pri = state.priorities.get(dep, PRI_HIGH) + if pri < pri_max: + pri_spread.add(pri) + if len(pri_spread) == 1: + # Filtered dependencies are uniform -- order by global order. + return sorted(ascc, key=lambda id: -graph[id].order) + pri_max = max(pri_spread) + sccs = sorted_components_inner(graph, ascc, pri_max) + # The recursion is bounded by the len(pri_spread) check above. + return [s for ss in sccs for s in order_ascc(graph, ss, pri_max)] + + +def process_fresh_modules(graph: Graph, modules: list[str], manager: BuildManager) -> None: + """Process the modules in one group of modules from their cached data. + + This can be used to process an SCC of modules. This involves loading the tree (i.e. + module symbol tables) from cache file and then fixing cross-references in the symbols. + """ + t0 = time.time() + for id in modules: + graph[id].load_tree() + t1 = time.time() + for id in modules: + graph[id].fix_cross_refs() + t2 = time.time() + manager.add_stats(process_fresh_time=t2 - t0, load_tree_time=t1 - t0) + + +def process_stale_scc(graph: Graph, ascc: SCC, manager: BuildManager) -> None: + """Process the modules in one SCC from source code.""" + # First verify if all transitive dependencies are loaded in the current process. + missing_sccs = set() + sccs_to_find = ascc.deps.copy() + while sccs_to_find: + dep_scc = sccs_to_find.pop() + if dep_scc in manager.done_sccs or dep_scc in missing_sccs: + continue + missing_sccs.add(dep_scc) + sccs_to_find.update(manager.scc_by_id[dep_scc].deps) + + if missing_sccs: + # Load missing SCCs from cache. + # TODO: speed-up ordering if this causes problems for large builds. + fresh_sccs_to_load = [ + manager.scc_by_id[sid] for sid in manager.top_order if sid in missing_sccs + ] + manager.log(f"Processing {len(fresh_sccs_to_load)} fresh SCCs") + if ( + not manager.options.test_env + and platform.python_implementation() == "CPython" + and manager.gc_freeze_cycles < MAX_GC_FREEZE_CYCLES + ): + # When deserializing cache we create huge amount of new objects, so even + # with our generous GC thresholds, GC is still doing a lot of pointless + # work searching for garbage. So, we temporarily disable it when + # processing fresh SCCs, and then move all the new objects to the oldest + # generation with the freeze()/unfreeze() trick below. This is arguably + # a hack, but it gives huge performance wins for large third-party + # libraries, like torch. + gc.collect() + gc.disable() + for prev_scc in fresh_sccs_to_load: + manager.done_sccs.add(prev_scc.id) + process_fresh_modules(graph, sorted(prev_scc.mod_ids), manager) + if ( + not manager.options.test_env + and platform.python_implementation() == "CPython" + and manager.gc_freeze_cycles < MAX_GC_FREEZE_CYCLES + ): + manager.gc_freeze_cycles += 1 + gc.freeze() + gc.unfreeze() + gc.enable() + + # Process the SCC in stable order. + scc = order_ascc_ex(graph, ascc) + stale = scc + for id in stale: + # We may already have parsed the module, or not. + # If the former, parse_file() is a no-op. + graph[id].parse_file() + if "typing" in scc: + # For historical reasons we need to manually add typing aliases + # for built-in generic collections, see docstring of + # SemanticAnalyzerPass2.add_builtin_aliases for details. + typing_mod = graph["typing"].tree + assert typing_mod, "The typing module was not parsed" + mypy.semanal_main.semantic_analysis_for_scc(graph, scc, manager.errors) + + # Track what modules aren't yet done, so we can finish them as soon + # as possible, saving memory. + unfinished_modules = set(stale) + for id in stale: + graph[id].type_check_first_pass() + if not graph[id].type_checker().deferred_nodes: + unfinished_modules.discard(id) + graph[id].detect_possibly_undefined_vars() + graph[id].finish_passes() + + while unfinished_modules: + for id in stale: + if id not in unfinished_modules: + continue + if not graph[id].type_check_second_pass(): + unfinished_modules.discard(id) + graph[id].detect_possibly_undefined_vars() + graph[id].finish_passes() + for id in stale: + graph[id].generate_unused_ignore_notes() + graph[id].generate_ignore_without_code_notes() + + # Flush errors, and write cache in two phases: first data files, then meta files. + meta_tuples = {} + errors_by_id = {} + formatted_by_id = {} + for id in stale: + if graph[id].xpath not in manager.errors.ignored_files: + errors = manager.errors.file_messages(graph[id].xpath) + formatted = manager.errors.format_messages( + graph[id].xpath, errors, formatter=manager.error_formatter + ) + manager.flush_errors(manager.errors.simplify_path(graph[id].xpath), formatted, False) + errors_by_id[id] = errors + formatted_by_id[id] = formatted + meta_tuples[id] = graph[id].write_cache() + graph[id].mark_as_rechecked() + for id in stale: + meta_tuple = meta_tuples[id] + if meta_tuple is None: + continue + meta, meta_file = meta_tuple + meta.dep_hashes = [graph[dep].interface_hash for dep in graph[id].dependencies] + meta.error_lines = serialize_codes(errors_by_id.get(id, [])) + write_cache_meta(meta, manager, meta_file) + manager.done_sccs.add(ascc.id) + + +def prepare_sccs_full( + raw_sccs: Iterator[set[str]], edges: dict[str, list[str]] +) -> dict[SCC, set[SCC]]: + """Turn raw SCC sets into SCC objects and build dependency graph for SCCs.""" + sccs = [SCC(raw_scc) for raw_scc in raw_sccs] + scc_map = {} + for scc in sccs: + for id in scc.mod_ids: + scc_map[id] = scc + scc_deps_map: dict[SCC, set[SCC]] = {} + for scc in sccs: + for id in scc.mod_ids: + scc_deps_map.setdefault(scc, set()).update(scc_map[dep] for dep in edges[id]) + for scc in sccs: + # Remove trivial dependency on itself. + scc_deps_map[scc].discard(scc) + for dep_scc in scc_deps_map[scc]: + scc.deps.add(dep_scc.id) + scc.not_ready_deps.add(dep_scc.id) + return scc_deps_map + + +def sorted_components(graph: Graph) -> list[SCC]: + """Return the graph's SCCs, topologically sorted by dependencies. + + The sort order is from leaves (nodes without dependencies) to + roots (nodes on which no other nodes depend). + """ + # Compute SCCs. + vertices = set(graph) + edges = {id: deps_filtered(graph, vertices, id, PRI_INDIRECT) for id in vertices} + scc_dep_map = prepare_sccs_full(strongly_connected_components(vertices, edges), edges) + # Topsort. + res = [] + for ready in topsort(scc_dep_map): + # Sort the sets in ready by reversed smallest State.order. Examples: + # + # - If ready is [{x}, {y}], x.order == 1, y.order == 2, we get + # [{y}, {x}]. + # + # - If ready is [{a, b}, {c, d}], a.order == 1, b.order == 3, + # c.order == 2, d.order == 4, the sort keys become [1, 2] + # and the result is [{c, d}, {a, b}]. + sorted_ready = sorted(ready, key=lambda scc: -min(graph[id].order for id in scc.mod_ids)) + for scc in sorted_ready: + for dep in scc_dep_map[scc]: + dep.direct_dependents.append(scc.id) + res.extend(sorted_ready) + return res + + +def sorted_components_inner( + graph: Graph, vertices: AbstractSet[str], pri_max: int +) -> list[AbstractSet[str]]: + """Simplified version of sorted_components() to work with sub-graphs. + + This doesn't create SCC objects, and operates with raw sets. This function + also allows filtering dependencies to take into account when building SCCs. + This is used for heuristic ordering of modules within actual SCCs. + """ + edges = {id: deps_filtered(graph, vertices, id, pri_max) for id in vertices} + sccs = list(strongly_connected_components(vertices, edges)) + res = [] + for ready in topsort(prepare_sccs(sccs, edges)): + res.extend(sorted(ready, key=lambda scc: -min(graph[id].order for id in scc))) + return res + + +def deps_filtered(graph: Graph, vertices: AbstractSet[str], id: str, pri_max: int) -> list[str]: + """Filter dependencies for id with pri < pri_max.""" + if id not in vertices: + return [] + state = graph[id] + return [ + dep + for dep in state.dependencies + if dep in vertices and state.priorities.get(dep, PRI_HIGH) < pri_max + ] + + +def missing_stubs_file(cache_dir: str) -> str: + return os.path.join(cache_dir, "missing_stubs") + + +def record_missing_stub_packages(cache_dir: str, missing_stub_packages: set[str]) -> None: + """Write a file containing missing stub packages. + + This allows a subsequent "mypy --install-types" run (without other arguments) + to install missing stub packages. + """ + fnam = missing_stubs_file(cache_dir) + if missing_stub_packages: + with open(fnam, "w") as f: + for pkg in sorted(missing_stub_packages): + f.write(f"{pkg}\n") + else: + if os.path.isfile(fnam): + os.remove(fnam) + + +def is_silent_import_module(manager: BuildManager, path: str) -> bool: + if manager.options.no_silence_site_packages: + return False + # Silence errors in site-package dirs and typeshed + if any(is_sub_path_normabs(path, dir) for dir in manager.search_paths.package_path): + return True + return any(is_sub_path_normabs(path, dir) for dir in manager.search_paths.typeshed_path) + + +def write_undocumented_ref_info( + state: State, metastore: MetadataStore, options: Options, type_map: dict[Expression, Type] +) -> None: + # This exports some dependency information in a rather ad-hoc fashion, which + # can be helpful for some tools. This is all highly experimental and could be + # removed at any time. + + from mypy.refinfo import get_undocumented_ref_info_json + + if not state.tree: + # We need a full AST for this. + return + + _, data_file, _ = get_cache_names(state.id, state.xpath, options) + ref_info_file = ".".join(data_file.split(".")[:-2]) + ".refs.json" + assert not ref_info_file.startswith(".") + + deps_json = get_undocumented_ref_info_json(state.tree, type_map) + metastore.write(ref_info_file, json_dumps(deps_json)) + + +def sources_to_bytes(sources: list[BuildSource]) -> bytes: + source_tuples = [(s.path, s.module, s.text, s.base_dir, s.followed) for s in sources] + buf = WriteBuffer() + write_json(buf, {"sources": source_tuples}) + return buf.getvalue() + + +def sccs_to_bytes(sccs: list[SCC]) -> bytes: + scc_tuples = [(list(scc.mod_ids), scc.id, list(scc.deps)) for scc in sccs] + buf = WriteBuffer() + write_json(buf, {"sccs": scc_tuples}) + return buf.getvalue() + + +def serialize_codes(errs: list[ErrorTuple]) -> list[SerializedError]: + return [ + (path, line, column, end_line, end_column, severity, message, code.code if code else None) + for path, line, column, end_line, end_column, severity, message, code in errs + ] + + +def deserialize_codes(errs: list[SerializedError]) -> list[ErrorTuple]: + return [ + ( + path, + line, + column, + end_line, + end_column, + severity, + message, + codes.error_codes.get(code) if code else None, + ) + for path, line, column, end_line, end_column, severity, message, code in errs + ] diff --git a/.venv/lib/python3.12/site-packages/mypy/cache.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/cache.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..b1c36a2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/cache.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/cache.py b/.venv/lib/python3.12/site-packages/mypy/cache.py new file mode 100644 index 0000000..7755755 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/cache.py @@ -0,0 +1,509 @@ +""" +This module contains high-level logic for fixed format serialization. + +Lower-level parts are implemented in C in mypyc/lib-rt/librt_internal.c +Short summary of low-level functionality: +* integers are automatically serialized as 1, 2, or 4 bytes, or arbitrary length. +* str/bytes are serialized as size (1, 2, or 4 bytes) followed by bytes buffer. +* floats are serialized as C doubles. + +At high-level we add type tags as needed so that our format is self-descriptive. +More precisely: +* False, True, and None are stored as just a tag: 0, 1, 2 correspondingly. +* builtin primitives like int/str/bytes/float are stored as their type tag followed + by bare (low-level) representation of the value. Reserved tag range for primitives is + 3 ... 19. +* generic (heterogeneous) list are stored as tag, followed by bare size, followed by + sequence of tagged values. +* homogeneous lists of primitives are stored as tag, followed by bare size, followed + by sequence of bare values. +* reserved tag range for sequence-like builtins is 20 ... 29 +* currently we have only one mapping-like format: string-keyed dictionary with heterogeneous + values. It is stored as tag, followed by bare size, followed by sequence of pairs: bare + string key followed by tagged value. +* reserved tag range for mapping-like builtins is 30 ... 39 +* there is an additional reserved tag range 40 ... 49 for any other builtin collections. +* custom classes (like types, symbols etc.) are stored as tag, followed by a sequence of + tagged field values, followed by a special end tag 255. Names of class fields are + *not* stored, the caller should know the field names and order for the given class tag. +* reserved tag range for symbols (TypeInfo, Var, etc) is 50 ... 79. +* class Instance is the only exception from the above format (since it is the most common one). + It has two extra formats: few most common instances like "builtins.object" are stored as + instance tag followed by a secondary tag, other plain non-generic instances are stored as + instance tag followed by secondary tag followed by fullname as bare string. All generic + readers must handle these. +* reserved tag range for Instance type formats is 80 ... 99, for other types it is 100 ... 149. +* tag 254 is reserved for if we would ever need to extend the tag range to indicated second tag + page. Tags 150 ... 253 are free for everything else (e.g. AST nodes etc). + +General convention is that custom classes implement write() and read() methods for FF +serialization. The write method should write both class tag and end tag. The read method +conventionally *does not* read the start tag (to simplify logic for unions). Known exceptions +are MypyFile.read() and SymbolTableNode.read(), since those two never appear in a union. + +If any of these details change, or if the structure of CacheMeta changes please +bump CACHE_VERSION below. +""" + +from __future__ import annotations + +from collections.abc import Sequence +from typing import Any, Final, Optional, Union +from typing_extensions import TypeAlias as _TypeAlias + +from librt.internal import ( + ReadBuffer as ReadBuffer, + WriteBuffer as WriteBuffer, + read_bool as read_bool, + read_bytes as read_bytes_bare, + read_float as read_float_bare, + read_int as read_int_bare, + read_str as read_str_bare, + read_tag as read_tag, + write_bool as write_bool, + write_bytes as write_bytes_bare, + write_float as write_float_bare, + write_int as write_int_bare, + write_str as write_str_bare, + write_tag as write_tag, +) +from mypy_extensions import u8 + +# High-level cache layout format +CACHE_VERSION: Final = 1 + +SerializedError: _TypeAlias = tuple[Optional[str], int, int, int, int, str, str, Optional[str]] + + +class CacheMeta: + """Class representing cache metadata for a module.""" + + def __init__( + self, + *, + id: str, + path: str, + mtime: int, + size: int, + hash: str, + dependencies: list[str], + data_mtime: int, + data_file: str, + suppressed: list[str], + options: dict[str, object], + dep_prios: list[int], + dep_lines: list[int], + dep_hashes: list[bytes], + interface_hash: bytes, + error_lines: list[SerializedError], + version_id: str, + ignore_all: bool, + plugin_data: Any, + ) -> None: + self.id = id + self.path = path + self.mtime = mtime # source file mtime + self.size = size # source file size + self.hash = hash # source file hash (as a hex string for historical reasons) + self.dependencies = dependencies # names of imported modules + self.data_mtime = data_mtime # mtime of data_file + self.data_file = data_file # path of .data.json or .data.ff + self.suppressed = suppressed # dependencies that weren't imported + self.options = options # build options snapshot + # dep_prios and dep_lines are both aligned with dependencies + suppressed + self.dep_prios = dep_prios + self.dep_lines = dep_lines + # dep_hashes list is aligned with dependencies only + self.dep_hashes = dep_hashes # list of interface_hash for dependencies + self.interface_hash = interface_hash # hash representing the public interface + self.error_lines = error_lines + self.version_id = version_id # mypy version for cache invalidation + self.ignore_all = ignore_all # if errors were ignored + self.plugin_data = plugin_data # config data from plugins + + def serialize(self) -> dict[str, Any]: + return { + "id": self.id, + "path": self.path, + "mtime": self.mtime, + "size": self.size, + "hash": self.hash, + "data_mtime": self.data_mtime, + "dependencies": self.dependencies, + "suppressed": self.suppressed, + "options": self.options, + "dep_prios": self.dep_prios, + "dep_lines": self.dep_lines, + "dep_hashes": [dep.hex() for dep in self.dep_hashes], + "interface_hash": self.interface_hash.hex(), + "error_lines": self.error_lines, + "version_id": self.version_id, + "ignore_all": self.ignore_all, + "plugin_data": self.plugin_data, + } + + @classmethod + def deserialize(cls, meta: dict[str, Any], data_file: str) -> CacheMeta | None: + try: + return CacheMeta( + id=meta["id"], + path=meta["path"], + mtime=meta["mtime"], + size=meta["size"], + hash=meta["hash"], + dependencies=meta["dependencies"], + data_mtime=meta["data_mtime"], + data_file=data_file, + suppressed=meta["suppressed"], + options=meta["options"], + dep_prios=meta["dep_prios"], + dep_lines=meta["dep_lines"], + dep_hashes=[bytes.fromhex(dep) for dep in meta["dep_hashes"]], + interface_hash=bytes.fromhex(meta["interface_hash"]), + error_lines=[tuple(err) for err in meta["error_lines"]], + version_id=meta["version_id"], + ignore_all=meta["ignore_all"], + plugin_data=meta["plugin_data"], + ) + except (KeyError, ValueError): + return None + + def write(self, data: WriteBuffer) -> None: + write_str(data, self.id) + write_str(data, self.path) + write_int(data, self.mtime) + write_int(data, self.size) + write_str(data, self.hash) + write_str_list(data, self.dependencies) + write_int(data, self.data_mtime) + write_str_list(data, self.suppressed) + write_json(data, self.options) + write_int_list(data, self.dep_prios) + write_int_list(data, self.dep_lines) + write_bytes_list(data, self.dep_hashes) + write_bytes(data, self.interface_hash) + write_errors(data, self.error_lines) + write_str(data, self.version_id) + write_bool(data, self.ignore_all) + # Plugin data may be not a dictionary, so we use + # a more generic write_json_value() here. + write_json_value(data, self.plugin_data) + + @classmethod + def read(cls, data: ReadBuffer, data_file: str) -> CacheMeta | None: + try: + return CacheMeta( + id=read_str(data), + path=read_str(data), + mtime=read_int(data), + size=read_int(data), + hash=read_str(data), + dependencies=read_str_list(data), + data_mtime=read_int(data), + data_file=data_file, + suppressed=read_str_list(data), + options=read_json(data), + dep_prios=read_int_list(data), + dep_lines=read_int_list(data), + dep_hashes=read_bytes_list(data), + interface_hash=read_bytes(data), + error_lines=read_errors(data), + version_id=read_str(data), + ignore_all=read_bool(data), + plugin_data=read_json_value(data), + ) + except ValueError: + return None + + +# Always use this type alias to refer to type tags. +Tag = u8 + +# Primitives. +LITERAL_FALSE: Final[Tag] = 0 +LITERAL_TRUE: Final[Tag] = 1 +LITERAL_NONE: Final[Tag] = 2 +LITERAL_INT: Final[Tag] = 3 +LITERAL_STR: Final[Tag] = 4 +LITERAL_BYTES: Final[Tag] = 5 +LITERAL_FLOAT: Final[Tag] = 6 +LITERAL_COMPLEX: Final[Tag] = 7 + +# Collections. +LIST_GEN: Final[Tag] = 20 +LIST_INT: Final[Tag] = 21 +LIST_STR: Final[Tag] = 22 +LIST_BYTES: Final[Tag] = 23 +TUPLE_GEN: Final[Tag] = 24 +DICT_STR_GEN: Final[Tag] = 30 + +# Misc classes. +EXTRA_ATTRS: Final[Tag] = 150 +DT_SPEC: Final[Tag] = 151 + +END_TAG: Final[Tag] = 255 + + +def read_literal(data: ReadBuffer, tag: Tag) -> int | str | bool | float: + if tag == LITERAL_INT: + return read_int_bare(data) + elif tag == LITERAL_STR: + return read_str_bare(data) + elif tag == LITERAL_FALSE: + return False + elif tag == LITERAL_TRUE: + return True + elif tag == LITERAL_FLOAT: + return read_float_bare(data) + assert False, f"Unknown literal tag {tag}" + + +# There is an intentional asymmetry between read and write for literals because +# None and/or complex values are only allowed in some contexts but not in others. +def write_literal(data: WriteBuffer, value: int | str | bool | float | complex | None) -> None: + if isinstance(value, bool): + write_bool(data, value) + elif isinstance(value, int): + write_tag(data, LITERAL_INT) + write_int_bare(data, value) + elif isinstance(value, str): + write_tag(data, LITERAL_STR) + write_str_bare(data, value) + elif isinstance(value, float): + write_tag(data, LITERAL_FLOAT) + write_float_bare(data, value) + elif isinstance(value, complex): + write_tag(data, LITERAL_COMPLEX) + write_float_bare(data, value.real) + write_float_bare(data, value.imag) + else: + write_tag(data, LITERAL_NONE) + + +def read_int(data: ReadBuffer) -> int: + assert read_tag(data) == LITERAL_INT + return read_int_bare(data) + + +def write_int(data: WriteBuffer, value: int) -> None: + write_tag(data, LITERAL_INT) + write_int_bare(data, value) + + +def read_str(data: ReadBuffer) -> str: + assert read_tag(data) == LITERAL_STR + return read_str_bare(data) + + +def write_str(data: WriteBuffer, value: str) -> None: + write_tag(data, LITERAL_STR) + write_str_bare(data, value) + + +def read_bytes(data: ReadBuffer) -> bytes: + assert read_tag(data) == LITERAL_BYTES + return read_bytes_bare(data) + + +def write_bytes(data: WriteBuffer, value: bytes) -> None: + write_tag(data, LITERAL_BYTES) + write_bytes_bare(data, value) + + +def read_int_opt(data: ReadBuffer) -> int | None: + tag = read_tag(data) + if tag == LITERAL_NONE: + return None + assert tag == LITERAL_INT + return read_int_bare(data) + + +def write_int_opt(data: WriteBuffer, value: int | None) -> None: + if value is not None: + write_tag(data, LITERAL_INT) + write_int_bare(data, value) + else: + write_tag(data, LITERAL_NONE) + + +def read_str_opt(data: ReadBuffer) -> str | None: + tag = read_tag(data) + if tag == LITERAL_NONE: + return None + assert tag == LITERAL_STR + return read_str_bare(data) + + +def write_str_opt(data: WriteBuffer, value: str | None) -> None: + if value is not None: + write_tag(data, LITERAL_STR) + write_str_bare(data, value) + else: + write_tag(data, LITERAL_NONE) + + +def read_int_list(data: ReadBuffer) -> list[int]: + assert read_tag(data) == LIST_INT + size = read_int_bare(data) + return [read_int_bare(data) for _ in range(size)] + + +def write_int_list(data: WriteBuffer, value: list[int]) -> None: + write_tag(data, LIST_INT) + write_int_bare(data, len(value)) + for item in value: + write_int_bare(data, item) + + +def read_str_list(data: ReadBuffer) -> list[str]: + assert read_tag(data) == LIST_STR + size = read_int_bare(data) + return [read_str_bare(data) for _ in range(size)] + + +def write_str_list(data: WriteBuffer, value: Sequence[str]) -> None: + write_tag(data, LIST_STR) + write_int_bare(data, len(value)) + for item in value: + write_str_bare(data, item) + + +def read_bytes_list(data: ReadBuffer) -> list[bytes]: + assert read_tag(data) == LIST_BYTES + size = read_int_bare(data) + return [read_bytes_bare(data) for _ in range(size)] + + +def write_bytes_list(data: WriteBuffer, value: Sequence[bytes]) -> None: + write_tag(data, LIST_BYTES) + write_int_bare(data, len(value)) + for item in value: + write_bytes_bare(data, item) + + +def read_str_opt_list(data: ReadBuffer) -> list[str | None]: + assert read_tag(data) == LIST_GEN + size = read_int_bare(data) + return [read_str_opt(data) for _ in range(size)] + + +def write_str_opt_list(data: WriteBuffer, value: list[str | None]) -> None: + write_tag(data, LIST_GEN) + write_int_bare(data, len(value)) + for item in value: + write_str_opt(data, item) + + +Value: _TypeAlias = Union[None, int, str, bool] + +# Our JSON format is somewhat non-standard as we distinguish lists and tuples. +# This is convenient for some internal things, like mypyc plugin and error serialization. +JsonValue: _TypeAlias = Union[ + Value, list["JsonValue"], dict[str, "JsonValue"], tuple["JsonValue", ...] +] + + +def read_json_value(data: ReadBuffer) -> JsonValue: + tag = read_tag(data) + if tag == LITERAL_NONE: + return None + if tag == LITERAL_FALSE: + return False + if tag == LITERAL_TRUE: + return True + if tag == LITERAL_INT: + return read_int_bare(data) + if tag == LITERAL_STR: + return read_str_bare(data) + if tag == LIST_GEN: + size = read_int_bare(data) + return [read_json_value(data) for _ in range(size)] + if tag == TUPLE_GEN: + size = read_int_bare(data) + return tuple(read_json_value(data) for _ in range(size)) + if tag == DICT_STR_GEN: + size = read_int_bare(data) + return {read_str_bare(data): read_json_value(data) for _ in range(size)} + assert False, f"Invalid JSON tag: {tag}" + + +def write_json_value(data: WriteBuffer, value: JsonValue) -> None: + if value is None: + write_tag(data, LITERAL_NONE) + elif isinstance(value, bool): + write_bool(data, value) + elif isinstance(value, int): + write_tag(data, LITERAL_INT) + write_int_bare(data, value) + elif isinstance(value, str): + write_tag(data, LITERAL_STR) + write_str_bare(data, value) + elif isinstance(value, list): + write_tag(data, LIST_GEN) + write_int_bare(data, len(value)) + for val in value: + write_json_value(data, val) + elif isinstance(value, tuple): + write_tag(data, TUPLE_GEN) + write_int_bare(data, len(value)) + for val in value: + write_json_value(data, val) + elif isinstance(value, dict): + write_tag(data, DICT_STR_GEN) + write_int_bare(data, len(value)) + for key in sorted(value): + write_str_bare(data, key) + write_json_value(data, value[key]) + else: + assert False, f"Invalid JSON value: {value}" + + +# These are functions for JSON *dictionaries* specifically. Unfortunately, we +# must use imprecise types here, because the callers use imprecise types. +def read_json(data: ReadBuffer) -> dict[str, Any]: + assert read_tag(data) == DICT_STR_GEN + size = read_int_bare(data) + return {read_str_bare(data): read_json_value(data) for _ in range(size)} + + +def write_json(data: WriteBuffer, value: dict[str, Any]) -> None: + write_tag(data, DICT_STR_GEN) + write_int_bare(data, len(value)) + for key in sorted(value): + write_str_bare(data, key) + write_json_value(data, value[key]) + + +def write_errors(data: WriteBuffer, errs: list[SerializedError]) -> None: + write_tag(data, LIST_GEN) + write_int_bare(data, len(errs)) + for path, line, column, end_line, end_column, severity, message, code in errs: + write_tag(data, TUPLE_GEN) + write_str_opt(data, path) + write_int(data, line) + write_int(data, column) + write_int(data, end_line) + write_int(data, end_column) + write_str(data, severity) + write_str(data, message) + write_str_opt(data, code) + + +def read_errors(data: ReadBuffer) -> list[SerializedError]: + assert read_tag(data) == LIST_GEN + result = [] + for _ in range(read_int_bare(data)): + assert read_tag(data) == TUPLE_GEN + result.append( + ( + read_str_opt(data), + read_int(data), + read_int(data), + read_int(data), + read_int(data), + read_str(data), + read_str(data), + read_str_opt(data), + ) + ) + return result diff --git a/.venv/lib/python3.12/site-packages/mypy/checker.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/checker.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..ad4d82d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/checker.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/checker.py b/.venv/lib/python3.12/site-packages/mypy/checker.py new file mode 100644 index 0000000..ad7eb3d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/checker.py @@ -0,0 +1,9445 @@ +"""Mypy type checker.""" + +from __future__ import annotations + +import itertools +from collections import defaultdict +from collections.abc import Iterable, Iterator, Mapping, Sequence, Set as AbstractSet +from contextlib import ExitStack, contextmanager +from typing import ( + Callable, + Final, + Generic, + Literal, + NamedTuple, + Optional, + TypeVar, + Union, + cast, + overload, +) +from typing_extensions import TypeAlias as _TypeAlias, TypeGuard + +import mypy.checkexpr +from mypy import errorcodes as codes, join, message_registry, nodes, operators +from mypy.binder import ConditionalTypeBinder, Frame, get_declaration +from mypy.checker_shared import CheckerScope, TypeCheckerSharedApi, TypeRange +from mypy.checker_state import checker_state +from mypy.checkmember import ( + MemberContext, + analyze_class_attribute_access, + analyze_instance_member_access, + analyze_member_access, + is_instance_var, +) +from mypy.checkpattern import PatternChecker +from mypy.constraints import SUPERTYPE_OF +from mypy.erasetype import erase_type, erase_typevars, remove_instance_last_known_values +from mypy.errorcodes import TYPE_VAR, UNUSED_AWAITABLE, UNUSED_COROUTINE, ErrorCode +from mypy.errors import ( + ErrorInfo, + Errors, + ErrorWatcher, + IterationDependentErrors, + IterationErrorWatcher, + report_internal_error, +) +from mypy.expandtype import expand_type +from mypy.literals import Key, extract_var_from_literal_hash, literal, literal_hash +from mypy.maptype import map_instance_to_supertype +from mypy.meet import is_overlapping_erased_types, is_overlapping_types, meet_types +from mypy.message_registry import ErrorMessage +from mypy.messages import ( + SUGGESTED_TEST_FIXTURES, + MessageBuilder, + append_invariance_notes, + append_union_note, + format_type, + format_type_bare, + format_type_distinctly, + make_inferred_type_note, + pretty_seq, +) +from mypy.mro import MroError, calculate_mro +from mypy.nodes import ( + ARG_NAMED, + ARG_POS, + ARG_STAR, + CONTRAVARIANT, + COVARIANT, + FUNC_NO_INFO, + GDEF, + IMPLICITLY_ABSTRACT, + INVARIANT, + IS_ABSTRACT, + LDEF, + LITERAL_TYPE, + MDEF, + NOT_ABSTRACT, + SYMBOL_FUNCBASE_TYPES, + AssertStmt, + AssignmentExpr, + AssignmentStmt, + AwaitExpr, + Block, + BreakStmt, + BytesExpr, + CallExpr, + ClassDef, + ComparisonExpr, + Context, + ContinueStmt, + Decorator, + DelStmt, + DictExpr, + EllipsisExpr, + Expression, + ExpressionStmt, + FloatExpr, + ForStmt, + FuncBase, + FuncDef, + FuncItem, + GlobalDecl, + IfStmt, + Import, + ImportAll, + ImportBase, + ImportFrom, + IndexExpr, + IntExpr, + LambdaExpr, + ListExpr, + Lvalue, + MatchStmt, + MemberExpr, + MypyFile, + NameExpr, + Node, + NonlocalDecl, + OperatorAssignmentStmt, + OpExpr, + OverloadedFuncDef, + OverloadPart, + PassStmt, + PromoteExpr, + RaiseStmt, + RefExpr, + ReturnStmt, + SetExpr, + StarExpr, + Statement, + StrExpr, + SymbolNode, + SymbolTable, + SymbolTableNode, + TempNode, + TryStmt, + TupleExpr, + TypeAlias, + TypeAliasStmt, + TypeInfo, + UnaryExpr, + Var, + WhileStmt, + WithStmt, + YieldExpr, + get_func_def, + is_final_node, +) +from mypy.operators import flip_ops, int_op_to_method, neg_ops +from mypy.options import PRECISE_TUPLE_TYPES, Options +from mypy.patterns import AsPattern, StarredPattern +from mypy.plugin import Plugin +from mypy.plugins import dataclasses as dataclasses_plugin +from mypy.scope import Scope +from mypy.semanal import is_trivial_body, refers_to_fullname, set_callable_name +from mypy.semanal_enum import ENUM_BASES, ENUM_SPECIAL_PROPS +from mypy.semanal_shared import SemanticAnalyzerCoreInterface +from mypy.sharedparse import BINARY_MAGIC_METHODS +from mypy.state import state +from mypy.subtypes import ( + find_member, + infer_class_variances, + is_callable_compatible, + is_equivalent, + is_more_precise, + is_proper_subtype, + is_same_type, + is_subtype, + restrict_subtype_away, + unify_generic_callable, +) +from mypy.traverser import TraverserVisitor, all_return_statements, has_return_statement +from mypy.treetransform import TransformVisitor +from mypy.typeanal import check_for_explicit_any, has_any_from_unimported_type, make_optional_type +from mypy.typeops import ( + bind_self, + can_have_shared_disjoint_base, + coerce_to_literal, + custom_special_method, + erase_def_to_union_or_bound, + erase_to_bound, + erase_to_union_or_bound, + false_only, + fixup_partial_type, + function_type, + is_literal_type_like, + is_singleton_type, + make_simplified_union, + true_only, + try_expanding_sum_type_to_union, + try_getting_int_literals_from_type, + try_getting_str_literals, + try_getting_str_literals_from_type, + tuple_fallback, + type_object_type, +) +from mypy.types import ( + ANY_STRATEGY, + MYPYC_NATIVE_INT_NAMES, + NOT_IMPLEMENTED_TYPE_NAMES, + OVERLOAD_NAMES, + AnyType, + BoolTypeQuery, + CallableType, + DeletedType, + ErasedType, + FunctionLike, + Instance, + LiteralType, + NoneType, + Overloaded, + PartialType, + ProperType, + TupleType, + Type, + TypeAliasType, + TypedDictType, + TypeGuardedType, + TypeOfAny, + TypeTranslator, + TypeType, + TypeVarId, + TypeVarLikeType, + TypeVarTupleType, + TypeVarType, + UnboundType, + UninhabitedType, + UnionType, + UnpackType, + find_unpack_in_list, + flatten_nested_unions, + get_proper_type, + get_proper_types, + instance_cache, + is_literal_type, + is_named_instance, +) +from mypy.types_utils import is_overlapping_none, remove_optional, store_argument_type, strip_type +from mypy.typetraverser import TypeTraverserVisitor +from mypy.typevars import fill_typevars, fill_typevars_with_any, has_no_typevars +from mypy.util import is_dunder, is_sunder +from mypy.visitor import NodeVisitor + +T = TypeVar("T") + +DEFAULT_LAST_PASS: Final = 2 # Pass numbers start at 0 + +# Maximum length of fixed tuple types inferred when narrowing from variadic tuples. +MAX_PRECISE_TUPLE_SIZE: Final = 8 + +DeferredNodeType: _TypeAlias = Union[FuncDef, OverloadedFuncDef, Decorator] +FineGrainedDeferredNodeType: _TypeAlias = Union[FuncDef, MypyFile, OverloadedFuncDef] + + +# A node which is postponed to be processed during the next pass. +# In normal mode one can defer functions and methods (also decorated and/or overloaded) +# but not lambda expressions. Nested functions can't be deferred -- only top-level functions +# and methods of classes not defined within a function can be deferred. +class DeferredNode(NamedTuple): + node: DeferredNodeType + # And its TypeInfo (for semantic analysis self type handling) + active_typeinfo: TypeInfo | None + + +# Same as above, but for fine-grained mode targets. Only top-level functions/methods +# and module top levels are allowed as such. +class FineGrainedDeferredNode(NamedTuple): + node: FineGrainedDeferredNodeType + active_typeinfo: TypeInfo | None + + +# Data structure returned by find_isinstance_check representing +# information learned from the truth or falsehood of a condition. The +# dict maps nodes representing expressions like 'a[0].x' to their +# refined types under the assumption that the condition has a +# particular truth value. A value of None means that the condition can +# never have that truth value. + +# NB: The keys of this dict are nodes in the original source program, +# which are compared by reference equality--effectively, being *the +# same* expression of the program, not just two identical expressions +# (such as two references to the same variable). TODO: it would +# probably be better to have the dict keyed by the nodes' literal_hash +# field instead. +TypeMap: _TypeAlias = Optional[dict[Expression, Type]] + + +# Keeps track of partial types in a single scope. In fine-grained incremental +# mode partial types initially defined at the top level cannot be completed in +# a function, and we use the 'is_function' attribute to enforce this. +class PartialTypeScope(NamedTuple): + map: dict[Var, Context] + is_function: bool + is_local: bool + + +class LocalTypeMap: + """Store inferred types into a temporary type map (returned). + + This can be used to perform type checking "experiments" without + affecting exported types (which are used by mypyc). + """ + + def __init__(self, chk: TypeChecker) -> None: + self.chk = chk + + def __enter__(self) -> dict[Expression, Type]: + temp_type_map: dict[Expression, Type] = {} + self.chk._type_maps.append(temp_type_map) + return temp_type_map + + def __exit__(self, exc_type: object, exc_val: object, exc_tb: object) -> Literal[False]: + self.chk._type_maps.pop() + return False + + +class TypeChecker(NodeVisitor[None], TypeCheckerSharedApi): + """Mypy type checker. + + Type check mypy source files that have been semantically analyzed. + + You must create a separate instance for each source file. + """ + + # Are we type checking a stub? + is_stub = False + # Error message reporter + errors: Errors + # Utility for generating messages + msg: MessageBuilder + # Types of type checked nodes. The first item is the "master" type + # map that will store the final, exported types. Additional items + # are temporary type maps used during type inference, and these + # will be eventually popped and either discarded or merged into + # the master type map. + # + # Avoid accessing this directly, but prefer the lookup_type(), + # has_type() etc. helpers instead. + _type_maps: list[dict[Expression, Type]] + + # Helper for managing conditional types + binder: ConditionalTypeBinder + # Helper for type checking expressions + _expr_checker: mypy.checkexpr.ExpressionChecker + + pattern_checker: PatternChecker + + tscope: Scope + scope: CheckerScope + # Innermost enclosing type + type: TypeInfo | None + # Stack of function return types + return_types: list[Type] + # Flags; true for dynamically typed functions + dynamic_funcs: list[bool] + # Stack of collections of variables with partial types + partial_types: list[PartialTypeScope] + # Vars for which partial type errors are already reported + # (to avoid logically duplicate errors with different error context). + partial_reported: set[Var] + # Short names of Var nodes whose previous inferred type has been widened via assignment. + # NOTE: The names might not be unique, they are only for debugging purposes. + widened_vars: list[str] + globals: SymbolTable + modules: dict[str, MypyFile] + # Nodes that couldn't be checked because some types weren't available. We'll run + # another pass and try these again. + deferred_nodes: list[DeferredNode] + # Type checking pass number (0 = first pass) + pass_num = 0 + # Last pass number to take + last_pass = DEFAULT_LAST_PASS + # Have we deferred the current function? If yes, don't infer additional + # types during this pass within the function. + current_node_deferred = False + # Is this file a typeshed stub? + is_typeshed_stub = False + options: Options + # Used for collecting inferred attribute types so that they can be checked + # for consistency. + inferred_attribute_types: dict[Var, Type] | None = None + # Don't infer partial None types if we are processing assignment from Union + no_partial_types: bool = False + # Extra module references not detected during semantic analysis (these are rare cases + # e.g. access to class-level import via instance). + module_refs: set[str] + # A map from variable nodes to a snapshot of the frame ids of the + # frames that were active when the variable was declared. This can + # be used to determine nearest common ancestor frame of a variable's + # declaration and the current frame, which lets us determine if it + # was declared in a different branch of the same `if` statement + # (if that frame is a conditional_frame). + var_decl_frames: dict[Var, set[int]] + + # Plugin that provides special type checking rules for specific library + # functions such as open(), etc. + plugin: Plugin + + # A helper state to produce unique temporary names on demand. + _unique_id: int + # Fake concrete type used when checking variance + _variance_dummy_type: Instance | None + + def __init__( + self, + errors: Errors, + modules: dict[str, MypyFile], + options: Options, + tree: MypyFile, + path: str, + plugin: Plugin, + per_line_checking_time_ns: dict[int, int], + ) -> None: + """Construct a type checker. + + Use errors to report type check errors. + """ + self.errors = errors + self.modules = modules + self.options = options + self.tree = tree + self.path = path + self.msg = MessageBuilder(errors, modules) + self.plugin = plugin + self.tscope = Scope() + self.scope = CheckerScope(tree) + self.binder = ConditionalTypeBinder(options) + self.globals = tree.names + self.type = None + self.return_types = [] + self.dynamic_funcs = [] + self.partial_types = [] + self.partial_reported = set() + self.var_decl_frames = {} + self.deferred_nodes = [] + self.widened_vars = [] + self._type_maps = [{}] + self.module_refs = set() + self.pass_num = 0 + self.current_node_deferred = False + self.is_stub = tree.is_stub + self.is_typeshed_stub = tree.is_typeshed_file(options) + self.inferred_attribute_types = None + self.allow_constructor_cache = True + self.local_type_map = LocalTypeMap(self) + + # If True, process function definitions. If False, don't. This is used + # for processing module top levels in fine-grained incremental mode. + self.recurse_into_functions = True + # This internal flag is used to track whether we a currently type-checking + # a final declaration (assignment), so that some errors should be suppressed. + # Should not be set manually, use get_final_context/enter_final_context instead. + # NOTE: we use the context manager to avoid "threading" an additional `is_final_def` + # argument through various `checker` and `checkmember` functions. + self._is_final_def = False + + # Track when we enter an overload implementation. Some checks should not be applied + # to the implementation signature when specific overloads are available. + # Use `enter_overload_impl` to modify. + self.overload_impl_stack: list[OverloadPart] = [] + + # This flag is set when we run type-check or attribute access check for the purpose + # of giving a note on possibly missing "await". It is used to avoid infinite recursion. + self.checking_missing_await = False + + # While this is True, allow passing an abstract class where Type[T] is expected. + # although this is technically unsafe, this is desirable in some context, for + # example when type-checking class decorators. + self.allow_abstract_call = False + + # Child checker objects for specific AST node types + self._expr_checker = mypy.checkexpr.ExpressionChecker( + self, self.msg, self.plugin, per_line_checking_time_ns + ) + + self.pattern_checker = PatternChecker(self, self.msg, self.plugin, options) + self._unique_id = 0 + self._variance_dummy_type = None + + @property + def expr_checker(self) -> mypy.checkexpr.ExpressionChecker: + return self._expr_checker + + @property + def type_context(self) -> list[Type | None]: + return self._expr_checker.type_context + + def reset(self) -> None: + """Cleanup stale state that might be left over from a typechecking run. + + This allows us to reuse TypeChecker objects in fine-grained + incremental mode. + """ + # TODO: verify this is still actually worth it over creating new checkers + self.partial_reported.clear() + self.module_refs.clear() + self.binder = ConditionalTypeBinder(self.options) + self._type_maps[1:] = [] + self._type_maps[0].clear() + self.expr_checker.reset() + self.deferred_nodes = [] + self.partial_types = [] + self.inferred_attribute_types = None + self.scope = CheckerScope(self.tree) + + def check_first_pass(self) -> None: + """Type check the entire file, but defer functions with unresolved references. + + Unresolved references are forward references to variables + whose types haven't been inferred yet. They may occur later + in the same file or in a different file that's being processed + later (usually due to an import cycle). + + Deferred functions will be processed by check_second_pass(). + """ + self.recurse_into_functions = True + with state.strict_optional_set(self.options.strict_optional), checker_state.set(self): + self.errors.set_file( + self.path, self.tree.fullname, scope=self.tscope, options=self.options + ) + with self.tscope.module_scope(self.tree.fullname): + with self.enter_partial_types(), self.binder.top_frame_context(): + for d in self.tree.defs: + if self.binder.is_unreachable(): + if not self.should_report_unreachable_issues(): + break + if not self.is_noop_for_reachability(d): + self.msg.unreachable_statement(d) + break + else: + self.accept(d) + + assert not self.current_node_deferred + + all_ = self.globals.get("__all__") + if all_ is not None and all_.type is not None: + all_node = all_.node + assert all_node is not None + seq_str = self.named_generic_type( + "typing.Sequence", [self.named_type("builtins.str")] + ) + if not is_subtype(all_.type, seq_str): + str_seq_s, all_s = format_type_distinctly( + seq_str, all_.type, options=self.options + ) + self.fail( + message_registry.ALL_MUST_BE_SEQ_STR.format(str_seq_s, all_s), all_node + ) + + def check_second_pass( + self, + todo: Sequence[DeferredNode | FineGrainedDeferredNode] | None = None, + *, + allow_constructor_cache: bool = True, + ) -> bool: + """Run second or following pass of type checking. + + This goes through deferred nodes, returning True if there were any. + """ + self.allow_constructor_cache = allow_constructor_cache + self.recurse_into_functions = True + with state.strict_optional_set(self.options.strict_optional), checker_state.set(self): + if not todo and not self.deferred_nodes: + return False + self.errors.set_file( + self.path, self.tree.fullname, scope=self.tscope, options=self.options + ) + with self.tscope.module_scope(self.tree.fullname): + self.pass_num += 1 + if not todo: + todo = self.deferred_nodes + else: + assert not self.deferred_nodes + self.deferred_nodes = [] + done: set[DeferredNodeType | FineGrainedDeferredNodeType] = set() + for node, active_typeinfo in todo: + if node in done: + continue + # This is useful for debugging: + # print("XXX in pass %d, class %s, function %s" % + # (self.pass_num, type_name, node.fullname or node.name)) + done.add(node) + with ExitStack() as stack: + if active_typeinfo: + stack.enter_context(self.tscope.class_scope(active_typeinfo)) + stack.enter_context(self.scope.push_class(active_typeinfo)) + self.check_partial(node) + return True + + def check_partial(self, node: DeferredNodeType | FineGrainedDeferredNodeType) -> None: + self.widened_vars = [] + if isinstance(node, MypyFile): + self.check_top_level(node) + else: + self.recurse_into_functions = True + with self.binder.top_frame_context(): + self.accept(node) + + def check_top_level(self, node: MypyFile) -> None: + """Check only the top-level of a module, skipping function definitions.""" + self.recurse_into_functions = False + with self.enter_partial_types(): + with self.binder.top_frame_context(): + for d in node.defs: + d.accept(self) + + assert not self.current_node_deferred + # TODO: Handle __all__ + + def defer_node(self, node: DeferredNodeType, enclosing_class: TypeInfo | None) -> None: + """Defer a node for processing during next type-checking pass. + + Args: + node: function/method being deferred + enclosing_class: for methods, the class where the method is defined + NOTE: this can't handle nested functions/methods. + """ + # We don't freeze the entire scope since only top-level functions and methods + # can be deferred. Only module/class level scope information is needed. + # Module-level scope information is preserved in the TypeChecker instance. + self.deferred_nodes.append(DeferredNode(node, enclosing_class)) + + def handle_cannot_determine_type(self, name: str, context: Context) -> None: + node = self.scope.top_level_function() + if self.pass_num < self.last_pass and isinstance(node, FuncDef): + # Don't report an error yet. Just defer. Note that we don't defer + # lambdas because they are coupled to the surrounding function + # through the binder and the inferred type of the lambda, so it + # would get messy. + enclosing_class = self.scope.enclosing_class(node) + self.defer_node(node, enclosing_class) + # Set a marker so that we won't infer additional types in this + # function. Any inferred types could be bogus, because there's at + # least one type that we don't know. + self.current_node_deferred = True + else: + self.msg.cannot_determine_type(name, context) + + def accept(self, stmt: Statement) -> None: + """Type check a node in the given type context.""" + try: + stmt.accept(self) + except Exception as err: + report_internal_error(err, self.errors.file, stmt.line, self.errors, self.options) + + def accept_loop( + self, + body: Statement, + else_body: Statement | None = None, + *, + exit_condition: Expression | None = None, + on_enter_body: Callable[[], None] | None = None, + ) -> None: + """Repeatedly type check a loop body until the frame doesn't change.""" + + # The outer frame accumulates the results of all iterations: + with self.binder.frame_context(can_skip=False, conditional_frame=True): + # Check for potential decreases in the number of partial types so as not to stop the + # iteration too early: + partials_old = sum(len(pts.map) for pts in self.partial_types) + # Check if assignment widened the inferred type of a variable; in this case we + # need to iterate again (we only do one extra iteration, since this could go + # on without bound otherwise) + widened_old = len(self.widened_vars) + + iter_errors = IterationDependentErrors() + iter = 1 + while True: + with self.binder.frame_context(can_skip=True, break_frame=2, continue_frame=1): + if on_enter_body is not None: + on_enter_body() + + with IterationErrorWatcher(self.msg.errors, iter_errors): + self.accept(body) + + partials_new = sum(len(pts.map) for pts in self.partial_types) + widened_new = len(self.widened_vars) + # Perform multiple iterations if something changed that might affect + # inferred types. Also limit the number of iterations. The limits are + # somewhat arbitrary, but they were chosen to 1) avoid slowdown from + # multiple iterations in common cases and 2) support common, valid use + # cases. Limits are needed since otherwise we could infer infinitely + # complex types. + if ( + (partials_new == partials_old) + and (not self.binder.last_pop_changed or iter > 3) + and (widened_new == widened_old or iter > 1) + ): + break + partials_old = partials_new + widened_old = widened_new + iter += 1 + if iter == 20: + raise RuntimeError("Too many iterations when checking a loop") + + self.msg.iteration_dependent_errors(iter_errors) + + # If exit_condition is set, assume it must be False on exit from the loop: + if exit_condition: + _, else_map = self.find_isinstance_check(exit_condition) + self.push_type_map(else_map) + + # Check the else body: + if else_body: + self.accept(else_body) + + # + # Definitions + # + + def visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: + if not self.recurse_into_functions: + return + with self.tscope.function_scope(defn): + self._visit_overloaded_func_def(defn) + + def _visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: + num_abstract = 0 + if not defn.items: + # In this case we have already complained about none of these being + # valid overloads. + return + if len(defn.items) == 1: + self.fail(message_registry.MULTIPLE_OVERLOADS_REQUIRED, defn) + + if defn.is_property: + # HACK: Infer the type of the property. + assert isinstance(defn.items[0], Decorator) + self.visit_decorator(defn.items[0]) + if defn.items[0].var.is_settable_property: + # Perform a reduced visit just to infer the actual setter type. + self.visit_decorator_inner(defn.setter, skip_first_item=True) + setter_type = defn.setter.var.type + # Check if the setter can accept two positional arguments. + any_type = AnyType(TypeOfAny.special_form) + fallback_setter_type = CallableType( + arg_types=[any_type, any_type], + arg_kinds=[ARG_POS, ARG_POS], + arg_names=[None, None], + ret_type=any_type, + fallback=self.named_type("builtins.function"), + ) + if setter_type and not is_subtype(setter_type, fallback_setter_type): + self.fail("Invalid property setter signature", defn.setter.func) + setter_type = self.extract_callable_type(setter_type, defn) + if not isinstance(setter_type, CallableType) or len(setter_type.arg_types) != 2: + # TODO: keep precise type for callables with tricky but valid signatures. + setter_type = fallback_setter_type + defn.items[0].var.setter_type = setter_type + if isinstance(defn.type, Overloaded): + # Update legacy property type for decorated properties. + getter_type = self.extract_callable_type(defn.items[0].var.type, defn) + if getter_type is not None: + getter_type.definition = defn.items[0] + defn.type.items[0] = getter_type + for i, fdef in enumerate(defn.items): + assert isinstance(fdef, Decorator) + if defn.is_property: + assert isinstance(defn.items[0], Decorator) + settable = defn.items[0].var.is_settable_property + # Do not visit the second time the items we checked above. + if (settable and i > 1) or (not settable and i > 0): + self.check_func_item(fdef.func, name=fdef.func.name, allow_empty=True) + else: + # Perform full check for real overloads to infer type of all decorated + # overload variants. + self.visit_decorator_inner(fdef, allow_empty=True) + if fdef.func.abstract_status in (IS_ABSTRACT, IMPLICITLY_ABSTRACT): + num_abstract += 1 + if num_abstract not in (0, len(defn.items)): + self.fail(message_registry.INCONSISTENT_ABSTRACT_OVERLOAD, defn) + if defn.impl: + with self.enter_overload_impl(defn.impl): + defn.impl.accept(self) + if not defn.is_property: + self.check_overlapping_overloads(defn) + if defn.type is None: + item_types = [] + for item in defn.items: + assert isinstance(item, Decorator) + item_type = self.extract_callable_type(item.var.type, item) + if item_type is not None: + item_type.definition = item + item_types.append(item_type) + if item_types: + defn.type = Overloaded(item_types) + elif defn.type is None: + # We store the getter type as an overall overload type, as some + # code paths are getting property type this way. + assert isinstance(defn.items[0], Decorator) + var_type = self.extract_callable_type(defn.items[0].var.type, defn) + if not isinstance(var_type, CallableType): + # Construct a fallback type, invalid types should be already reported. + any_type = AnyType(TypeOfAny.special_form) + var_type = CallableType( + arg_types=[any_type], + arg_kinds=[ARG_POS], + arg_names=[None], + ret_type=any_type, + fallback=self.named_type("builtins.function"), + ) + defn.type = Overloaded([var_type]) + # Check override validity after we analyzed current definition. + if defn.info: + found_method_base_classes = self.check_method_override(defn) + if ( + defn.is_explicit_override + and not found_method_base_classes + and found_method_base_classes is not None + # If the class has Any fallback, we can't be certain that a method + # is really missing - it might come from unfollowed import. + and not defn.info.fallback_to_any + ): + self.msg.no_overridable_method(defn.name, defn) + self.check_explicit_override_decorator(defn, found_method_base_classes, defn.impl) + self.check_inplace_operator_method(defn) + + @contextmanager + def enter_overload_impl(self, impl: OverloadPart) -> Iterator[None]: + self.overload_impl_stack.append(impl) + try: + yield + finally: + assert self.overload_impl_stack.pop() == impl + + def extract_callable_type(self, inner_type: Type | None, ctx: Context) -> CallableType | None: + """Get type as seen by an overload item caller.""" + inner_type = get_proper_type(inner_type) + outer_type: FunctionLike | None = None + if inner_type is None or isinstance(inner_type, AnyType): + return None + if isinstance(inner_type, TypeVarLikeType): + inner_type = get_proper_type(inner_type.upper_bound) + if isinstance(inner_type, TypeType): + inner_type = get_proper_type( + self.expr_checker.analyze_type_type_callee(inner_type.item, ctx) + ) + + if isinstance(inner_type, FunctionLike): + outer_type = inner_type + elif isinstance(inner_type, Instance): + inner_call = get_proper_type( + analyze_member_access( + name="__call__", + typ=inner_type, + context=ctx, + is_lvalue=False, + is_super=False, + is_operator=True, + original_type=inner_type, + chk=self, + ) + ) + if isinstance(inner_call, FunctionLike): + outer_type = inner_call + elif isinstance(inner_type, UnionType): + union_type = make_simplified_union(inner_type.items) + if isinstance(union_type, UnionType): + items = [] + for item in union_type.items: + callable_item = self.extract_callable_type(item, ctx) + if callable_item is None: + break + items.append(callable_item) + else: + joined_type = get_proper_type(join.join_type_list(items)) + if isinstance(joined_type, FunctionLike): + outer_type = joined_type + else: + return self.extract_callable_type(union_type, ctx) + + if outer_type is None: + self.msg.not_callable(inner_type, ctx) + return None + if isinstance(outer_type, Overloaded): + return None + + assert isinstance(outer_type, CallableType) + return outer_type + + def check_overlapping_overloads(self, defn: OverloadedFuncDef) -> None: + # At this point we should have set the impl already, and all remaining + # items are decorators + + if ( + self.options.ignore_errors + or self.msg.errors.file in self.msg.errors.ignored_files + or (self.is_typeshed_stub and self.options.test_env) + ): + # This is a little hacky, however, the quadratic check here is really expensive, this + # method has no side effects, so we should skip it if we aren't going to report + # anything. In some other places we swallow errors in stubs, but this error is very + # useful for stubs! + return + + # Compute some info about the implementation (if it exists) for use below + impl_type: CallableType | None = None + if defn.impl: + if isinstance(defn.impl, FuncDef): + inner_type: Type | None = defn.impl.type + elif isinstance(defn.impl, Decorator): + inner_type = defn.impl.var.type + else: + assert False, "Impl isn't the right type" + + # This can happen if we've got an overload with a different + # decorator or if the implementation is untyped -- we gave up on the types. + impl_type = self.extract_callable_type(inner_type, defn.impl) + + is_descriptor_get = defn.info and defn.name == "__get__" + for i, item in enumerate(defn.items): + assert isinstance(item, Decorator) + sig1 = self.extract_callable_type(item.var.type, item) + if sig1 is None: + continue + + for j, item2 in enumerate(defn.items[i + 1 :]): + assert isinstance(item2, Decorator) + sig2 = self.extract_callable_type(item2.var.type, item2) + if sig2 is None: + continue + + if not are_argument_counts_overlapping(sig1, sig2): + continue + + if overload_can_never_match(sig1, sig2): + self.msg.overloaded_signature_will_never_match(i + 1, i + j + 2, item2.func) + elif not is_descriptor_get: + # Note: we force mypy to check overload signatures in strict-optional mode + # so we don't incorrectly report errors when a user tries typing an overload + # that happens to have a 'if the argument is None' fallback. + # + # For example, the following is fine in strict-optional mode but would throw + # the unsafe overlap error when strict-optional is disabled: + # + # @overload + # def foo(x: None) -> int: ... + # @overload + # def foo(x: str) -> str: ... + # + # See Python 2's map function for a concrete example of this kind of overload. + current_class = self.scope.active_class() + type_vars = current_class.defn.type_vars if current_class else [] + with state.strict_optional_set(True): + if is_unsafe_overlapping_overload_signatures(sig1, sig2, type_vars): + flip_note = ( + j == 0 + and not is_unsafe_overlapping_overload_signatures( + sig2, sig1, type_vars + ) + and not overload_can_never_match(sig2, sig1) + ) + self.msg.overloaded_signatures_overlap( + i + 1, i + j + 2, flip_note, item.func + ) + + if impl_type is not None: + assert defn.impl is not None + + # This is what we want from implementation, it should accept all arguments + # of an overload, but the return types should go the opposite way. + if is_callable_compatible( + impl_type, + sig1, + is_compat=is_subtype, + is_proper_subtype=False, + is_compat_return=lambda l, r: is_subtype(r, l), + ): + continue + # If the above check didn't work, we repeat some key steps in + # is_callable_compatible() to give a better error message. + + # We perform a unification step that's very similar to what + # 'is_callable_compatible' does -- the only difference is that + # we check and see if the impl_type's return value is a + # *supertype* of the overload alternative, not a *subtype*. + # + # This is to match the direction the implementation's return + # needs to be compatible in. + if impl_type.variables: + impl: CallableType | None = unify_generic_callable( + # Normalize both before unifying + impl_type.with_unpacked_kwargs(), + sig1.with_unpacked_kwargs(), + ignore_return=False, + return_constraint_direction=SUPERTYPE_OF, + ) + if impl is None: + self.msg.overloaded_signatures_typevar_specific(i + 1, defn.impl) + continue + else: + impl = impl_type + + # Prevent extra noise from inconsistent use of @classmethod by copying + # the first arg from the method being checked against. + if sig1.arg_types and defn.info: + impl = impl.copy_modified(arg_types=[sig1.arg_types[0]] + impl.arg_types[1:]) + + # Is the overload alternative's arguments subtypes of the implementation's? + if not is_callable_compatible( + impl, sig1, is_compat=is_subtype, is_proper_subtype=False, ignore_return=True + ): + self.msg.overloaded_signatures_arg_specific(i + 1, defn.impl) + + # Is the overload alternative's return type a subtype of the implementation's? + if not ( + is_subtype(sig1.ret_type, impl.ret_type) + or is_subtype(impl.ret_type, sig1.ret_type) + ): + self.msg.overloaded_signatures_ret_specific(i + 1, defn.impl) + + # Here's the scoop about generators and coroutines. + # + # There are two kinds of generators: classic generators (functions + # with `yield` or `yield from` in the body) and coroutines + # (functions declared with `async def`). The latter are specified + # in PEP 492 and only available in Python >= 3.5. + # + # Classic generators can be parameterized with three types: + # - ty is the Yield type (the type of y in `yield y`) + # - tc is the type reCeived by yield (the type of c in `c = yield`). + # - tr is the Return type (the type of r in `return r`) + # + # A classic generator must define a return type that's either + # `Generator[ty, tc, tr]`, Iterator[ty], or Iterable[ty] (or + # object or Any). If tc/tr are not given, both are None. + # + # A coroutine must define a return type corresponding to tr; the + # other two are unconstrained. The "external" return type (seen + # by the caller) is Awaitable[tr]. + # + # In addition, there's the synthetic type AwaitableGenerator: it + # inherits from both Awaitable and Generator and can be used both + # in `yield from` and in `await`. This type is set automatically + # for functions decorated with `@types.coroutine` or + # `@asyncio.coroutine`. Its single parameter corresponds to tr. + # + # PEP 525 adds a new type, the asynchronous generator, which was + # first released in Python 3.6. Async generators are `async def` + # functions that can also `yield` values. They can be parameterized + # with two types, ty and tc, because they cannot return a value. + # + # There are several useful methods, each taking a type t and a + # flag c indicating whether it's for a generator or coroutine: + # + # - is_generator_return_type(t, c) returns whether t is a Generator, + # Iterator, Iterable (if not c), or Awaitable (if c), or + # AwaitableGenerator (regardless of c). + # - is_async_generator_return_type(t) returns whether t is an + # AsyncGenerator. + # - get_generator_yield_type(t, c) returns ty. + # - get_generator_receive_type(t, c) returns tc. + # - get_generator_return_type(t, c) returns tr. + + def is_generator_return_type(self, typ: Type, is_coroutine: bool) -> bool: + """Is `typ` a valid type for a generator/coroutine? + + True if `typ` is a *supertype* of Generator or Awaitable. + Also true it it's *exactly* AwaitableGenerator (modulo type parameters). + """ + typ = get_proper_type(typ) + if is_coroutine: + # This means we're in Python 3.5 or later. + at = self.named_generic_type("typing.Awaitable", [AnyType(TypeOfAny.special_form)]) + if is_subtype(at, typ): + return True + else: + any_type = AnyType(TypeOfAny.special_form) + gt = self.named_generic_type("typing.Generator", [any_type, any_type, any_type]) + if is_subtype(gt, typ): + return True + return isinstance(typ, Instance) and typ.type.fullname == "typing.AwaitableGenerator" + + def is_async_generator_return_type(self, typ: Type) -> bool: + """Is `typ` a valid type for an async generator? + + True if `typ` is a supertype of AsyncGenerator. + """ + try: + any_type = AnyType(TypeOfAny.special_form) + agt = self.named_generic_type("typing.AsyncGenerator", [any_type, any_type]) + except KeyError: + # we're running on a version of typing that doesn't have AsyncGenerator yet + return False + return is_subtype(agt, typ) + + def get_generator_yield_type(self, return_type: Type, is_coroutine: bool) -> Type: + """Given the declared return type of a generator (t), return the type it yields (ty).""" + return_type = get_proper_type(return_type) + + if isinstance(return_type, AnyType): + return AnyType(TypeOfAny.from_another_any, source_any=return_type) + elif isinstance(return_type, UnionType): + return make_simplified_union( + [self.get_generator_yield_type(item, is_coroutine) for item in return_type.items] + ) + elif not self.is_generator_return_type( + return_type, is_coroutine + ) and not self.is_async_generator_return_type(return_type): + # If the function doesn't have a proper Generator (or + # Awaitable) return type, anything is permissible. + return AnyType(TypeOfAny.from_error) + elif not isinstance(return_type, Instance): + # Same as above, but written as a separate branch so the typechecker can understand. + return AnyType(TypeOfAny.from_error) + elif return_type.type.fullname == "typing.Awaitable": + # Awaitable: ty is Any. + return AnyType(TypeOfAny.special_form) + elif return_type.args: + # AwaitableGenerator, Generator, AsyncGenerator, Iterator, or Iterable; ty is args[0]. + ret_type = return_type.args[0] + # TODO not best fix, better have dedicated yield token + return ret_type + else: + # If the function's declared supertype of Generator has no type + # parameters (i.e. is `object`), then the yielded values can't + # be accessed so any type is acceptable. IOW, ty is Any. + # (However, see https://github.com/python/mypy/issues/1933) + return AnyType(TypeOfAny.special_form) + + def get_generator_receive_type(self, return_type: Type, is_coroutine: bool) -> Type: + """Given a declared generator return type (t), return the type its yield receives (tc).""" + return_type = get_proper_type(return_type) + + if isinstance(return_type, AnyType): + return AnyType(TypeOfAny.from_another_any, source_any=return_type) + elif isinstance(return_type, UnionType): + return make_simplified_union( + [self.get_generator_receive_type(item, is_coroutine) for item in return_type.items] + ) + elif not self.is_generator_return_type( + return_type, is_coroutine + ) and not self.is_async_generator_return_type(return_type): + # If the function doesn't have a proper Generator (or + # Awaitable) return type, anything is permissible. + return AnyType(TypeOfAny.from_error) + elif not isinstance(return_type, Instance): + # Same as above, but written as a separate branch so the typechecker can understand. + return AnyType(TypeOfAny.from_error) + elif return_type.type.fullname == "typing.Awaitable": + # Awaitable, AwaitableGenerator: tc is Any. + return AnyType(TypeOfAny.special_form) + elif ( + return_type.type.fullname in ("typing.Generator", "typing.AwaitableGenerator") + and len(return_type.args) >= 3 + ): + # Generator: tc is args[1]. + return return_type.args[1] + elif return_type.type.fullname == "typing.AsyncGenerator" and len(return_type.args) >= 2: + return return_type.args[1] + else: + # `return_type` is a supertype of Generator, so callers won't be able to send it + # values. IOW, tc is None. + return NoneType() + + def get_coroutine_return_type(self, return_type: Type) -> Type: + return_type = get_proper_type(return_type) + if isinstance(return_type, AnyType): + return AnyType(TypeOfAny.from_another_any, source_any=return_type) + assert isinstance(return_type, Instance), "Should only be called on coroutine functions." + # Note: return type is the 3rd type parameter of Coroutine. + return return_type.args[2] + + def get_generator_return_type(self, return_type: Type, is_coroutine: bool) -> Type: + """Given the declared return type of a generator (t), return the type it returns (tr).""" + return_type = get_proper_type(return_type) + + if isinstance(return_type, AnyType): + return AnyType(TypeOfAny.from_another_any, source_any=return_type) + elif isinstance(return_type, UnionType): + return make_simplified_union( + [self.get_generator_return_type(item, is_coroutine) for item in return_type.items] + ) + elif not self.is_generator_return_type(return_type, is_coroutine): + # If the function doesn't have a proper Generator (or + # Awaitable) return type, anything is permissible. + return AnyType(TypeOfAny.from_error) + elif not isinstance(return_type, Instance): + # Same as above, but written as a separate branch so the typechecker can understand. + return AnyType(TypeOfAny.from_error) + elif return_type.type.fullname == "typing.Awaitable" and len(return_type.args) == 1: + # Awaitable: tr is args[0]. + return return_type.args[0] + elif ( + return_type.type.fullname in ("typing.Generator", "typing.AwaitableGenerator") + and len(return_type.args) >= 3 + ): + # AwaitableGenerator, Generator: tr is args[2]. + return return_type.args[2] + else: + # We have a supertype of Generator (Iterator, Iterable, object) + # Treat `Iterator[X]` as a shorthand for `Generator[X, Any, None]`. + return NoneType() + + def visit_func_def(self, defn: FuncDef) -> None: + if not self.recurse_into_functions: + return + with self.tscope.function_scope(defn): + self._visit_func_def(defn) + + def _visit_func_def(self, defn: FuncDef) -> None: + """Type check a function definition.""" + self.check_func_item(defn, name=defn.name) + if defn.info: + if not defn.is_overload and not defn.is_decorated: + # If the definition is the implementation for an + # overload, the legality of the override has already + # been typechecked, and decorated methods will be + # checked when the decorator is. + found_method_base_classes = self.check_method_override(defn) + self.check_explicit_override_decorator(defn, found_method_base_classes) + self.check_inplace_operator_method(defn) + if defn.original_def: + # Override previous definition. + new_type = self.function_type(defn) + self.check_func_def_override(defn, new_type) + + def check_func_item( + self, + defn: FuncItem, + type_override: CallableType | None = None, + name: str | None = None, + allow_empty: bool = False, + ) -> None: + """Type check a function. + + If type_override is provided, use it as the function type. + """ + self.dynamic_funcs.append(defn.is_dynamic() and not type_override) + + enclosing_node_deferred = self.current_node_deferred + with self.enter_partial_types(is_function=True): + typ = self.function_type(defn) + if type_override: + typ = type_override.copy_modified(line=typ.line, column=typ.column) + if isinstance(typ, CallableType): + with self.enter_attribute_inference_context(): + self.check_func_def(defn, typ, name, allow_empty) + else: + raise RuntimeError("Not supported") + + self.dynamic_funcs.pop() + self.current_node_deferred = enclosing_node_deferred + + if name == "__exit__": + self.check__exit__return_type(defn) + # TODO: the following logic should move to the dataclasses plugin + # https://github.com/python/mypy/issues/15515 + if name == "__post_init__": + if dataclasses_plugin.is_processed_dataclass(defn.info): + dataclasses_plugin.check_post_init(self, defn, defn.info) + + def check_func_def_override(self, defn: FuncDef, new_type: FunctionLike) -> None: + assert defn.original_def is not None + if isinstance(defn.original_def, FuncDef): + # Function definition overrides function definition. + old_type = self.function_type(defn.original_def) + if not is_same_type(new_type, old_type): + self.msg.incompatible_conditional_function_def(defn, old_type, new_type) + else: + # Function definition overrides a variable initialized via assignment or a + # decorated function. + orig_type = defn.original_def.type + if orig_type is None: + # If other branch is unreachable, we don't type check it and so we might + # not have a type for the original definition + return + if isinstance(orig_type, PartialType): + if orig_type.type is None: + # Ah this is a partial type. Give it the type of the function. + orig_def = defn.original_def + if isinstance(orig_def, Decorator): + var = orig_def.var + else: + var = orig_def + partial_types = self.find_partial_types(var) + if partial_types is not None: + var.type = new_type + del partial_types[var] + else: + # Trying to redefine something like partial empty list as function. + self.fail(message_registry.INCOMPATIBLE_REDEFINITION, defn) + else: + name_expr = NameExpr(defn.name) + name_expr.node = defn.original_def + self.binder.assign_type(name_expr, new_type, orig_type) + self.check_subtype( + new_type, + orig_type, + defn, + message_registry.INCOMPATIBLE_REDEFINITION, + "redefinition with type", + "original type", + ) + + @contextmanager + def enter_attribute_inference_context(self) -> Iterator[None]: + old_types = self.inferred_attribute_types + self.inferred_attribute_types = {} + yield None + self.inferred_attribute_types = old_types + + def check_func_def( + self, defn: FuncItem, typ: CallableType, name: str | None, allow_empty: bool = False + ) -> None: + """Type check a function definition.""" + # Expand type variables with value restrictions to ordinary types. + self.check_typevar_defaults(typ.variables) + expanded = self.expand_typevars(defn, typ) + original_typ = typ + for item, typ in expanded: + old_binder = self.binder + self.binder = ConditionalTypeBinder(self.options) + with self.binder.top_frame_context(): + defn.expanded.append(item) + + # We may be checking a function definition or an anonymous + # function. In the first case, set up another reference with the + # precise type. + if isinstance(item, FuncDef): + fdef = item + # Check if __init__ has an invalid return type. + if ( + fdef.info + and fdef.name in ("__init__", "__init_subclass__") + and not isinstance( + get_proper_type(typ.ret_type), (NoneType, UninhabitedType) + ) + and not self.dynamic_funcs[-1] + ): + self.fail( + message_registry.MUST_HAVE_NONE_RETURN_TYPE.format(fdef.name), item + ) + + # Check validity of __new__ signature + if fdef.info and fdef.name == "__new__": + self.check___new___signature(fdef, typ) + + self.check_for_missing_annotations(fdef) + if self.options.disallow_any_unimported: + if fdef.type and isinstance(fdef.type, CallableType): + ret_type = fdef.type.ret_type + if has_any_from_unimported_type(ret_type): + self.msg.unimported_type_becomes_any("Return type", ret_type, fdef) + for idx, arg_type in enumerate(fdef.type.arg_types): + if has_any_from_unimported_type(arg_type): + prefix = f'Argument {idx + 1} to "{fdef.name}"' + self.msg.unimported_type_becomes_any(prefix, arg_type, fdef) + check_for_explicit_any( + fdef.type, self.options, self.is_typeshed_stub, self.msg, context=fdef + ) + + if name: # Special method names + if ( + defn.info + and self.is_reverse_op_method(name) + and defn not in self.overload_impl_stack + ): + self.check_reverse_op_method(item, typ, name, defn) + elif name in ("__getattr__", "__getattribute__"): + self.check_getattr_method(typ, defn, name) + elif name == "__setattr__": + self.check_setattr_method(typ, defn) + + # Refuse contravariant return type variable + if isinstance(typ.ret_type, TypeVarType): + if typ.ret_type.variance == CONTRAVARIANT: + self.fail( + message_registry.RETURN_TYPE_CANNOT_BE_CONTRAVARIANT, typ.ret_type + ) + self.check_unbound_return_typevar(typ) + elif ( + isinstance(original_typ.ret_type, TypeVarType) and original_typ.ret_type.values + ): + # Since type vars with values are expanded, the return type is changed + # to a raw value. This is a hack to get it back. + self.check_unbound_return_typevar(original_typ) + + # Check that Generator functions have the appropriate return type. + if defn.is_generator: + if defn.is_async_generator: + if not self.is_async_generator_return_type(typ.ret_type): + self.fail( + message_registry.INVALID_RETURN_TYPE_FOR_ASYNC_GENERATOR, typ + ) + else: + if not self.is_generator_return_type(typ.ret_type, defn.is_coroutine): + self.fail(message_registry.INVALID_RETURN_TYPE_FOR_GENERATOR, typ) + + # Fix the type if decorated with `@types.coroutine` or `@asyncio.coroutine`. + if defn.is_awaitable_coroutine: + # Update the return type to AwaitableGenerator. + # (This doesn't exist in typing.py, only in typing.pyi.) + t = typ.ret_type + c = defn.is_coroutine + ty = self.get_generator_yield_type(t, c) + tc = self.get_generator_receive_type(t, c) + if c: + tr = self.get_coroutine_return_type(t) + else: + tr = self.get_generator_return_type(t, c) + ret_type = self.named_generic_type( + "typing.AwaitableGenerator", [ty, tc, tr, t] + ) + typ = typ.copy_modified(ret_type=ret_type) + defn.type = typ + + # Push return type. + self.return_types.append(typ.ret_type) + + with self.scope.push_function(defn): + # We temporary push the definition to get the self type as + # visible from *inside* of this function/method. + ref_type: Type | None = self.scope.active_self_type() + + if typ.type_is: + arg_index = 0 + # For methods and classmethods, we want the second parameter + if ref_type is not None and defn.has_self_or_cls_argument: + arg_index = 1 + if arg_index < len(typ.arg_types) and not is_subtype( + typ.type_is, typ.arg_types[arg_index] + ): + self.fail( + message_registry.NARROWED_TYPE_NOT_SUBTYPE.format( + format_type(typ.type_is, self.options), + format_type(typ.arg_types[arg_index], self.options), + ), + item, + ) + + # Store argument types. + found_self = False + if isinstance(defn, FuncDef) and not defn.is_decorated: + found_self = self.require_correct_self_argument(typ, defn) + for i in range(len(typ.arg_types)): + arg_type = typ.arg_types[i] + if isinstance(arg_type, TypeVarType): + # Refuse covariant parameter type variables + # TODO: check recursively for inner type variables + if ( + arg_type.variance == COVARIANT + and defn.name not in ("__init__", "__new__", "__post_init__") + and not is_private(defn.name) # private methods are not inherited + and (i != 0 or not found_self) + ): + ctx: Context = arg_type + if ctx.line < 0: + ctx = typ + self.fail(message_registry.FUNCTION_PARAMETER_CANNOT_BE_COVARIANT, ctx) + # Need to store arguments again for the expanded item. + store_argument_type(item, i, typ, self.named_generic_type) + + # Type check initialization expressions. + body_is_trivial = is_trivial_body(defn.body) + self.check_default_args(item, body_is_trivial) + + # Type check body in a new scope. + with self.binder.top_frame_context(): + # Copy some type narrowings from an outer function when it seems safe enough + # (i.e. we can't find an assignment that might change the type of the + # variable afterwards). + new_frame: Frame | None = None + for frame in old_binder.frames: + for key, narrowed_type in frame.types.items(): + key_var = extract_var_from_literal_hash(key) + if key_var is not None and not self.is_var_redefined_in_outer_context( + key_var, defn.line + ): + # It seems safe to propagate the type narrowing to a nested scope. + if new_frame is None: + new_frame = self.binder.push_frame() + new_frame.types[key] = narrowed_type + self.binder.declarations[key] = old_binder.declarations[key] + + if self.options.allow_redefinition_new and not self.is_stub: + # Add formal argument types to the binder. + for arg in defn.arguments: + # TODO: Add these directly using a fast path (possibly "put") + v = arg.variable + if v.type is not None: + n = NameExpr(v.name) + n.node = v + self.binder.assign_type(n, v.type, v.type) + + with self.scope.push_function(defn): + # We suppress reachability warnings for empty generator functions + # (return; yield) which have a "yield" that's unreachable by definition + # since it's only there to promote the function into a generator function. + # + # We also suppress reachability warnings when we use TypeVars with value + # restrictions: we only want to report a warning if a certain statement is + # marked as being suppressed in *all* of the expansions, but we currently + # have no good way of doing this. + # + # TODO: Find a way of working around this limitation + if _is_empty_generator_function(item) or len(expanded) >= 2: + self.binder.suppress_unreachable_warnings() + # When checking a third-party library, we can skip function body, + # if during semantic analysis we found that there are no attributes + # defined via self here. + if ( + not ( + self.options.ignore_errors + or self.msg.errors.file in self.msg.errors.ignored_files + ) + or self.options.preserve_asts + or not isinstance(defn, FuncDef) + or defn.has_self_attr_def + ): + self.accept(item.body) + unreachable = self.binder.is_unreachable() + if new_frame is not None: + self.binder.pop_frame(True, 0) + + if not unreachable: + if defn.is_generator or is_named_instance( + self.return_types[-1], "typing.AwaitableGenerator" + ): + return_type = self.get_generator_return_type( + self.return_types[-1], defn.is_coroutine + ) + elif defn.is_coroutine: + return_type = self.get_coroutine_return_type(self.return_types[-1]) + else: + return_type = self.return_types[-1] + return_type = get_proper_type(return_type) + + allow_empty = allow_empty or self.options.allow_empty_bodies + + show_error = ( + not body_is_trivial + or + # Allow empty bodies for abstract methods, overloads, in tests and stubs. + ( + not allow_empty + and not ( + isinstance(defn, FuncDef) and defn.abstract_status != NOT_ABSTRACT + ) + and not self.is_stub + ) + ) + + # Ignore plugin generated methods, these usually don't need any bodies. + if defn.info is not FUNC_NO_INFO and ( + defn.name not in defn.info.names or defn.info.names[defn.name].plugin_generated + ): + show_error = False + + # Ignore also definitions that appear in `if TYPE_CHECKING: ...` blocks. + # These can't be called at runtime anyway (similar to plugin-generated). + if isinstance(defn, FuncDef) and defn.is_mypy_only: + show_error = False + + # We want to minimize the fallout from checking empty bodies + # that was absent in many mypy versions. + if body_is_trivial and is_subtype(NoneType(), return_type): + show_error = False + + may_be_abstract = ( + body_is_trivial + and defn.info is not FUNC_NO_INFO + and defn.info.metaclass_type is not None + and defn.info.metaclass_type.type.has_base("abc.ABCMeta") + ) + + if self.options.warn_no_return: + if ( + not self.current_node_deferred + and not isinstance(return_type, (NoneType, AnyType)) + and show_error + ): + # Control flow fell off the end of a function that was + # declared to return a non-None type. + if isinstance(return_type, UninhabitedType): + # This is a NoReturn function + msg = message_registry.INVALID_IMPLICIT_RETURN + else: + msg = message_registry.MISSING_RETURN_STATEMENT + if body_is_trivial: + msg = msg._replace(code=codes.EMPTY_BODY) + self.fail(msg, defn) + if may_be_abstract: + self.note(message_registry.EMPTY_BODY_ABSTRACT, defn) + elif show_error: + msg = message_registry.INCOMPATIBLE_RETURN_VALUE_TYPE + if body_is_trivial: + msg = msg._replace(code=codes.EMPTY_BODY) + # similar to code in check_return_stmt + if ( + not self.check_subtype( + subtype_label="implicitly returns", + subtype=NoneType(), + supertype_label="expected", + supertype=return_type, + context=defn, + msg=msg, + ) + and may_be_abstract + ): + self.note(message_registry.EMPTY_BODY_ABSTRACT, defn) + + self.return_types.pop() + + self.binder = old_binder + + def require_correct_self_argument(self, func: Type, defn: FuncDef) -> bool: + func = get_proper_type(func) + if not isinstance(func, CallableType): + return False + + # Do not report errors for untyped methods in classes nested in untyped funcs. + if not ( + self.options.check_untyped_defs + or len(self.dynamic_funcs) < 2 + or not self.dynamic_funcs[-2] + or not defn.is_dynamic() + ): + return bool(func.arg_types) + + with self.scope.push_function(defn): + # We temporary push the definition to get the self type as + # visible from *inside* of this function/method. + ref_type: Type | None = self.scope.active_self_type() + if ref_type is None: + return False + + if not defn.has_self_or_cls_argument or ( + func.arg_kinds and func.arg_kinds[0] in [nodes.ARG_STAR, nodes.ARG_STAR2] + ): + return False + + if not func.arg_types: + self.fail( + 'Method must have at least one argument. Did you forget the "self" argument?', defn + ) + return False + + arg_type = func.arg_types[0] + if defn.is_class or defn.name == "__new__": + ref_type = mypy.types.TypeType.make_normalized(ref_type) + if is_same_type(arg_type, ref_type): + return True + + # This level of erasure matches the one in checkmember.check_self_arg(), + # better keep these two checks consistent. + erased = get_proper_type(erase_typevars(erase_to_bound(arg_type))) + if not is_subtype(ref_type, erased, ignore_type_params=True): + if ( + isinstance(erased, Instance) + and erased.type.is_protocol + or isinstance(erased, TypeType) + and isinstance(erased.item, Instance) + and erased.item.type.is_protocol + ): + # We allow the explicit self-type to be not a supertype of + # the current class if it is a protocol. For such cases + # the consistency check will be performed at call sites. + msg = None + elif func.arg_names[0] in {"self", "cls"}: + msg = message_registry.ERASED_SELF_TYPE_NOT_SUPERTYPE.format( + erased.str_with_options(self.options), ref_type.str_with_options(self.options) + ) + else: + msg = message_registry.MISSING_OR_INVALID_SELF_TYPE + if msg: + self.fail(msg, defn) + return True + + def is_var_redefined_in_outer_context(self, v: Var, after_line: int) -> bool: + """Can the variable be assigned to at module top level or outer function? + + Note that this doesn't do a full CFG analysis but uses a line number based + heuristic that isn't correct in some (rare) cases. + """ + if v.is_final: + # Final vars are definitely never reassigned. + return False + + outers = self.tscope.outer_functions() + if not outers: + # Top-level function -- outer context is top level, and we can't reason about + # globals + return True + for outer in outers: + if isinstance(outer, FuncDef): + if find_last_var_assignment_line(outer.body, v) >= after_line: + return True + return False + + def check_unbound_return_typevar(self, typ: CallableType) -> None: + """Fails when the return typevar is not defined in arguments.""" + if isinstance(typ.ret_type, TypeVarType) and typ.ret_type in typ.variables: + arg_type_visitor = CollectArgTypeVarTypes() + for argtype in typ.arg_types: + argtype.accept(arg_type_visitor) + + if typ.ret_type not in arg_type_visitor.arg_types: + self.fail(message_registry.UNBOUND_TYPEVAR, typ.ret_type, code=TYPE_VAR) + upper_bound = get_proper_type(typ.ret_type.upper_bound) + if not ( + isinstance(upper_bound, Instance) + and upper_bound.type.fullname == "builtins.object" + ): + self.note( + "Consider using the upper bound " + f"{format_type(typ.ret_type.upper_bound, self.options)} instead", + context=typ.ret_type, + ) + + def check_default_args(self, item: FuncItem, body_is_trivial: bool) -> None: + for arg in item.arguments: + if arg.initializer is None: + continue + if body_is_trivial and isinstance(arg.initializer, EllipsisExpr): + continue + name = arg.variable.name + msg = "Incompatible default for " + if name.startswith("__tuple_arg_"): + msg += f"tuple argument {name[12:]}" + else: + msg += f'argument "{name}"' + if ( + not self.options.implicit_optional + and isinstance(arg.initializer, NameExpr) + and arg.initializer.fullname == "builtins.None" + ): + notes = [ + "PEP 484 prohibits implicit Optional. " + "Accordingly, mypy has changed its default to no_implicit_optional=True", + "Use https://github.com/hauntsaninja/no_implicit_optional to automatically " + "upgrade your codebase", + ] + else: + notes = None + self.check_simple_assignment( + arg.variable.type, + arg.initializer, + context=arg.initializer, + msg=ErrorMessage(msg, code=codes.ASSIGNMENT), + lvalue_name="argument", + rvalue_name="default", + notes=notes, + ) + + def is_forward_op_method(self, method_name: str) -> bool: + return method_name in operators.reverse_op_methods + + def is_reverse_op_method(self, method_name: str) -> bool: + return method_name in operators.reverse_op_method_set + + def check_for_missing_annotations(self, fdef: FuncItem) -> None: + # Check for functions with unspecified/not fully specified types. + def is_unannotated_any(t: Type) -> bool: + if not isinstance(t, ProperType): + return False + return isinstance(t, AnyType) and t.type_of_any == TypeOfAny.unannotated + + has_explicit_annotation = isinstance(fdef.type, CallableType) and any( + not is_unannotated_any(t) for t in fdef.type.arg_types + [fdef.type.ret_type] + ) + + show_untyped = not self.is_typeshed_stub or self.options.warn_incomplete_stub + check_incomplete_defs = self.options.disallow_incomplete_defs and has_explicit_annotation + if show_untyped and (self.options.disallow_untyped_defs or check_incomplete_defs): + if fdef.type is None and self.options.disallow_untyped_defs: + if not fdef.arguments or ( + len(fdef.arguments) == 1 + and (fdef.arg_names[0] == "self" or fdef.arg_names[0] == "cls") + ): + self.fail(message_registry.RETURN_TYPE_EXPECTED, fdef) + if not has_return_statement(fdef) and not fdef.is_generator: + self.note( + 'Use "-> None" if function does not return a value', + fdef, + code=codes.NO_UNTYPED_DEF, + ) + else: + self.fail(message_registry.FUNCTION_TYPE_EXPECTED, fdef) + elif isinstance(fdef.type, CallableType): + ret_type = get_proper_type(fdef.type.ret_type) + if is_unannotated_any(ret_type): + self.fail(message_registry.RETURN_TYPE_EXPECTED, fdef) + elif fdef.is_generator: + if is_unannotated_any( + self.get_generator_return_type(ret_type, fdef.is_coroutine) + ): + self.fail(message_registry.RETURN_TYPE_EXPECTED, fdef) + elif fdef.is_coroutine and isinstance(ret_type, Instance): + if is_unannotated_any(self.get_coroutine_return_type(ret_type)): + self.fail(message_registry.RETURN_TYPE_EXPECTED, fdef) + if any(is_unannotated_any(t) for t in fdef.type.arg_types): + self.fail(message_registry.ARGUMENT_TYPE_EXPECTED, fdef) + + def check___new___signature(self, fdef: FuncDef, typ: CallableType) -> None: + self_type = fill_typevars_with_any(fdef.info) + bound_type = bind_self(typ, self_type, is_classmethod=True) + # Check that __new__ (after binding cls) returns an instance + # type (or any). + if fdef.info.is_metaclass(): + # This is a metaclass, so it must return a new unrelated type. + self.check_subtype( + bound_type.ret_type, + self.type_type(), + fdef, + message_registry.INVALID_NEW_TYPE, + "returns", + "but must return a subtype of", + ) + elif not isinstance( + get_proper_type(bound_type.ret_type), + (AnyType, Instance, TupleType, UninhabitedType, LiteralType), + ): + self.fail( + message_registry.NON_INSTANCE_NEW_TYPE.format( + format_type(bound_type.ret_type, self.options) + ), + fdef, + ) + else: + # And that it returns a subtype of the class + self.check_subtype( + bound_type.ret_type, + self_type, + fdef, + message_registry.INVALID_NEW_TYPE, + "returns", + "but must return a subtype of", + ) + + def check_reverse_op_method( + self, defn: FuncItem, reverse_type: CallableType, reverse_name: str, context: Context + ) -> None: + """Check a reverse operator method such as __radd__.""" + # Decides whether it's worth calling check_overlapping_op_methods(). + + # This used to check for some very obscure scenario. It now + # just decides whether it's worth calling + # check_overlapping_op_methods(). + + assert defn.info + + # First check for a valid signature + method_type = CallableType( + [AnyType(TypeOfAny.special_form), AnyType(TypeOfAny.special_form)], + [nodes.ARG_POS, nodes.ARG_POS], + [None, None], + AnyType(TypeOfAny.special_form), + self.named_type("builtins.function"), + ) + if not is_subtype(reverse_type, method_type): + self.msg.invalid_signature(reverse_type, context) + return + + if reverse_name in ("__eq__", "__ne__"): + # These are defined for all objects => can't cause trouble. + return + + # With 'Any' or 'object' return type we are happy, since any possible + # return value is valid. + ret_type = get_proper_type(reverse_type.ret_type) + if isinstance(ret_type, AnyType): + return + if isinstance(ret_type, Instance): + if ret_type.type.fullname == "builtins.object": + return + if reverse_type.arg_kinds[0] == ARG_STAR: + reverse_type = reverse_type.copy_modified( + arg_types=[reverse_type.arg_types[0]] * 2, + arg_kinds=[ARG_POS] * 2, + arg_names=[reverse_type.arg_names[0], "_"], + ) + assert len(reverse_type.arg_types) >= 2 + + forward_name = operators.normal_from_reverse_op[reverse_name] + forward_inst = get_proper_type(reverse_type.arg_types[1]) + if isinstance(forward_inst, TypeVarType): + forward_inst = get_proper_type(forward_inst.upper_bound) + elif isinstance(forward_inst, TupleType): + forward_inst = tuple_fallback(forward_inst) + elif isinstance(forward_inst, (FunctionLike, TypedDictType, LiteralType)): + forward_inst = forward_inst.fallback + if isinstance(forward_inst, TypeType): + item = forward_inst.item + if isinstance(item, Instance): + opt_meta = item.type.metaclass_type + if opt_meta is not None: + forward_inst = opt_meta + + def has_readable_member(typ: UnionType | Instance, name: str) -> bool: + # TODO: Deal with attributes of TupleType etc. + if isinstance(typ, Instance): + return typ.type.has_readable_member(name) + return all( + (isinstance(x, UnionType) and has_readable_member(x, name)) + or (isinstance(x, Instance) and x.type.has_readable_member(name)) + for x in get_proper_types(typ.relevant_items()) + ) + + if not ( + isinstance(forward_inst, (Instance, UnionType)) + and has_readable_member(forward_inst, forward_name) + ): + return + forward_base = reverse_type.arg_types[1] + forward_type = self.expr_checker.analyze_external_member_access( + forward_name, forward_base, context=defn + ) + self.check_overlapping_op_methods( + reverse_type, + reverse_name, + defn.info, + forward_type, + forward_name, + forward_base, + context=defn, + ) + + def check_overlapping_op_methods( + self, + reverse_type: CallableType, + reverse_name: str, + reverse_class: TypeInfo, + forward_type: Type, + forward_name: str, + forward_base: Type, + context: Context, + ) -> None: + """Check for overlapping method and reverse method signatures. + + This function assumes that: + + - The reverse method has valid argument count and kinds. + - If the reverse operator method accepts some argument of type + X, the forward operator method also belong to class X. + + For example, if we have the reverse operator `A.__radd__(B)`, then the + corresponding forward operator must have the type `B.__add__(...)`. + """ + + # Note: Suppose we have two operator methods "A.__rOP__(B) -> R1" and + # "B.__OP__(C) -> R2". We check if these two methods are unsafely overlapping + # by using the following algorithm: + # + # 1. Rewrite "B.__OP__(C) -> R1" to "temp1(B, C) -> R1" + # + # 2. Rewrite "A.__rOP__(B) -> R2" to "temp2(B, A) -> R2" + # + # 3. Treat temp1 and temp2 as if they were both variants in the same + # overloaded function. (This mirrors how the Python runtime calls + # operator methods: we first try __OP__, then __rOP__.) + # + # If the first signature is unsafely overlapping with the second, + # report an error. + # + # 4. However, if temp1 shadows temp2 (e.g. the __rOP__ method can never + # be called), do NOT report an error. + # + # This behavior deviates from how we handle overloads -- many of the + # modules in typeshed seem to define __OP__ methods that shadow the + # corresponding __rOP__ method. + # + # Note: we do not attempt to handle unsafe overlaps related to multiple + # inheritance. (This is consistent with how we handle overloads: we also + # do not try checking unsafe overlaps due to multiple inheritance there.) + + for forward_item in flatten_nested_unions([forward_type]): + forward_item = get_proper_type(forward_item) + if isinstance(forward_item, CallableType): + if self.is_unsafe_overlapping_op(forward_item, forward_base, reverse_type): + self.msg.operator_method_signatures_overlap( + reverse_class, reverse_name, forward_base, forward_name, context + ) + elif isinstance(forward_item, Overloaded): + for item in forward_item.items: + if self.is_unsafe_overlapping_op(item, forward_base, reverse_type): + self.msg.operator_method_signatures_overlap( + reverse_class, reverse_name, forward_base, forward_name, context + ) + elif not isinstance(forward_item, AnyType): + self.msg.forward_operator_not_callable(forward_name, context) + + def is_unsafe_overlapping_op( + self, forward_item: CallableType, forward_base: Type, reverse_type: CallableType + ) -> bool: + # TODO: check argument kinds? + if len(forward_item.arg_types) < 1: + # Not a valid operator method -- can't succeed anyway. + return False + + # Erase the type if necessary to make sure we don't have a single + # TypeVar in forward_tweaked. (Having a function signature containing + # just a single TypeVar can lead to unpredictable behavior.) + forward_base_erased = forward_base + if isinstance(forward_base, TypeVarType): + forward_base_erased = erase_to_bound(forward_base) + + # Construct normalized function signatures corresponding to the + # operator methods. The first argument is the left operand and the + # second operand is the right argument -- we switch the order of + # the arguments of the reverse method. + + # TODO: this manipulation is dangerous if callables are generic. + # Shuffling arguments between callables can create meaningless types. + forward_tweaked = forward_item.copy_modified( + arg_types=[forward_base_erased, forward_item.arg_types[0]], + arg_kinds=[nodes.ARG_POS] * 2, + arg_names=[None] * 2, + ) + reverse_tweaked = reverse_type.copy_modified( + arg_types=[reverse_type.arg_types[1], reverse_type.arg_types[0]], + arg_kinds=[nodes.ARG_POS] * 2, + arg_names=[None] * 2, + ) + + reverse_base_erased = reverse_type.arg_types[0] + if isinstance(reverse_base_erased, TypeVarType): + reverse_base_erased = erase_to_bound(reverse_base_erased) + + if is_same_type(reverse_base_erased, forward_base_erased): + return False + elif is_subtype(reverse_base_erased, forward_base_erased): + first = reverse_tweaked + second = forward_tweaked + else: + first = forward_tweaked + second = reverse_tweaked + + current_class = self.scope.active_class() + type_vars = current_class.defn.type_vars if current_class else [] + return is_unsafe_overlapping_overload_signatures( + first, second, type_vars, partial_only=False + ) + + def check_inplace_operator_method(self, defn: FuncBase) -> None: + """Check an inplace operator method such as __iadd__. + + They cannot arbitrarily overlap with __add__. + """ + method = defn.name + if method not in operators.inplace_operator_methods: + return + typ = bind_self(self.function_type(defn)) + cls = defn.info + other_method = "__" + method[3:] + if cls.has_readable_member(other_method): + instance = fill_typevars(cls) + typ2 = get_proper_type( + self.expr_checker.analyze_external_member_access(other_method, instance, defn) + ) + fail = False + if isinstance(typ2, FunctionLike): + if not is_more_general_arg_prefix(typ, typ2): + fail = True + else: + # TODO overloads + fail = True + if fail: + self.msg.signatures_incompatible(method, other_method, defn) + + def check_getattr_method(self, typ: Type, context: Context, name: str) -> None: + if len(self.scope.stack) == 1: + # module scope + if name == "__getattribute__": + self.fail(message_registry.MODULE_LEVEL_GETATTRIBUTE, context) + return + # __getattr__ is fine at the module level as of Python 3.7 (PEP 562). We could + # show an error for Python < 3.7, but that would be annoying in code that supports + # both 3.7 and older versions. + method_type = CallableType( + [self.named_type("builtins.str")], + [nodes.ARG_POS], + [None], + AnyType(TypeOfAny.special_form), + self.named_type("builtins.function"), + ) + elif self.scope.active_class(): + method_type = CallableType( + [AnyType(TypeOfAny.special_form), self.named_type("builtins.str")], + [nodes.ARG_POS, nodes.ARG_POS], + [None, None], + AnyType(TypeOfAny.special_form), + self.named_type("builtins.function"), + ) + else: + return + if not is_subtype(typ, method_type): + self.msg.invalid_signature_for_special_method(typ, context, name) + + def check_setattr_method(self, typ: Type, context: Context) -> None: + if not self.scope.active_class(): + return + method_type = CallableType( + [ + AnyType(TypeOfAny.special_form), + self.named_type("builtins.str"), + AnyType(TypeOfAny.special_form), + ], + [nodes.ARG_POS, nodes.ARG_POS, nodes.ARG_POS], + [None, None, None], + NoneType(), + self.named_type("builtins.function"), + ) + if not is_subtype(typ, method_type): + self.msg.invalid_signature_for_special_method(typ, context, "__setattr__") + + def check_slots_definition(self, typ: Type, context: Context) -> None: + """Check the type of __slots__.""" + str_type = self.named_type("builtins.str") + expected_type = UnionType( + [str_type, self.named_generic_type("typing.Iterable", [str_type])] + ) + self.check_subtype( + typ, + expected_type, + context, + message_registry.INVALID_TYPE_FOR_SLOTS, + "actual type", + "expected type", + code=codes.ASSIGNMENT, + ) + + def check_match_args(self, var: Var, typ: Type, context: Context) -> None: + """Check that __match_args__ contains literal strings""" + if not self.scope.active_class(): + return + typ = get_proper_type(typ) + if not isinstance(typ, TupleType) or not all( + is_string_literal(item) for item in typ.items + ): + self.msg.note( + "__match_args__ must be a tuple containing string literals for checking " + "of match statements to work", + context, + code=codes.LITERAL_REQ, + ) + + def expand_typevars( + self, defn: FuncItem, typ: CallableType + ) -> list[tuple[FuncItem, CallableType]]: + # TODO use generator + subst: list[list[tuple[TypeVarId, Type]]] = [] + tvars = list(typ.variables) or [] + if defn.info: + # Class type variables + tvars += defn.info.defn.type_vars or [] + for tvar in tvars: + if isinstance(tvar, TypeVarType) and tvar.values: + subst.append([(tvar.id, value) for value in tvar.values]) + # Make a copy of the function to check for each combination of + # value restricted type variables. (Except when running mypyc, + # where we need one canonical version of the function.) + if subst and not (self.options.mypyc or self.options.inspections): + result: list[tuple[FuncItem, CallableType]] = [] + for substitutions in itertools.product(*subst): + mapping = dict(substitutions) + result.append((expand_func(defn, mapping), expand_type(typ, mapping))) + return result + else: + return [(defn, typ)] + + def check_explicit_override_decorator( + self, + defn: FuncDef | OverloadedFuncDef, + found_method_base_classes: list[TypeInfo] | None, + context: Context | None = None, + ) -> None: + plugin_generated = False + if defn.info and (node := defn.info.get(defn.name)) and node.plugin_generated: + # Do not report issues for plugin generated nodes, + # they can't realistically use `@override` for their methods. + plugin_generated = True + + if ( + not plugin_generated + and found_method_base_classes + and not defn.is_explicit_override + and defn.name not in ("__init__", "__new__") + and not is_private(defn.name) + ): + self.msg.explicit_override_decorator_missing( + defn.name, found_method_base_classes[0].fullname, context or defn + ) + + def check_method_override( + self, defn: FuncDef | OverloadedFuncDef | Decorator + ) -> list[TypeInfo] | None: + """Check if function definition is compatible with base classes. + + This may defer the method if a signature is not available in at least one base class. + Return ``None`` if that happens. + + Return a list of base classes which contain an attribute with the method name. + """ + if self.options.ignore_errors or self.msg.errors.file in self.msg.errors.ignored_files: + # Method override checks may be expensive, so skip them in third-party libraries. + return None + # Check against definitions in base classes. + check_override_compatibility = ( + defn.name not in ("__init__", "__new__", "__init_subclass__", "__post_init__") + and (self.options.check_untyped_defs or not defn.is_dynamic()) + and ( + # don't check override for synthesized __replace__ methods from dataclasses + defn.name != "__replace__" + or defn.info.metadata.get("dataclass_tag") is None + ) + ) + found_method_base_classes: list[TypeInfo] = [] + for base in defn.info.mro[1:]: + result = self.check_method_or_accessor_override_for_base( + defn, base, check_override_compatibility + ) + if result is None: + # Node was deferred, we will have another attempt later. + return None + if result: + found_method_base_classes.append(base) + return found_method_base_classes + + def check_method_or_accessor_override_for_base( + self, + defn: FuncDef | OverloadedFuncDef | Decorator, + base: TypeInfo, + check_override_compatibility: bool, + ) -> bool | None: + """Check if method definition is compatible with a base class. + + Return ``None`` if the node was deferred because one of the corresponding + superclass nodes is not ready. + + Return ``True`` if an attribute with the method name was found in the base class. + """ + found_base_method = False + if base: + name = defn.name + base_attr = base.names.get(name) + if base_attr: + # First, check if we override a final (always an error, even with Any types). + if is_final_node(base_attr.node) and not is_private(name): + self.msg.cant_override_final(name, base.name, defn) + # Second, final can't override anything writeable independently of types. + if defn.is_final: + self.check_if_final_var_override_writable(name, base_attr.node, defn) + found_base_method = True + if check_override_compatibility: + # Check compatibility of the override signature + # (__init__, __new__, __init_subclass__ are special). + if self.check_method_override_for_base_with_name(defn, name, base): + return None + if name in operators.inplace_operator_methods: + # Figure out the name of the corresponding operator method. + method = "__" + name[3:] + # An inplace operator method such as __iadd__ might not be + # always introduced safely if a base class defined __add__. + # TODO can't come up with an example where this is + # necessary; now it's "just in case" + if self.check_method_override_for_base_with_name(defn, method, base): + return None + return found_base_method + + def check_setter_type_override(self, defn: OverloadedFuncDef, base: TypeInfo) -> None: + """Check override of a setter type of a mutable attribute. + + Currently, this should be only called when either base node or the current node + is a custom settable property (i.e. where setter type is different from getter type). + Note that this check is contravariant. + """ + typ, _ = self.node_type_from_base(defn.name, defn.info, defn, setter_type=True) + original_type, _ = self.node_type_from_base(defn.name, base, defn, setter_type=True) + # The caller should handle deferrals. + assert typ is not None and original_type is not None + + if not is_subtype(original_type, typ): + self.msg.incompatible_setter_override(defn.setter, typ, original_type, base) + + def check_method_override_for_base_with_name( + self, defn: FuncDef | OverloadedFuncDef | Decorator, name: str, base: TypeInfo + ) -> bool: + """Check if overriding an attribute `name` of `base` with `defn` is valid. + + Return True if the supertype node was not analysed yet, and `defn` was deferred. + """ + base_attr = base.names.get(name) + if not base_attr: + return False + # The name of the method is defined in the base class. + + # Point errors at the 'def' line (important for backward compatibility + # of type ignores). + if not isinstance(defn, Decorator): + context = defn + else: + context = defn.func + + # Construct the type of the overriding method. + if isinstance(defn, (FuncDef, OverloadedFuncDef)): + override_class_or_static = defn.is_class or defn.is_static + else: + override_class_or_static = defn.func.is_class or defn.func.is_static + typ, _ = self.node_type_from_base(defn.name, defn.info, defn) + if typ is None: + # This may only happen if we're checking `x-redefinition` member + # and `x` itself is for some reason gone. Normally the node should + # be reachable from the containing class by its name. + # The redefinition is never removed, use this as a sanity check to verify + # the reasoning above. + assert f"{defn.name}-redefinition" in defn.info.names + return False + + original_node = base_attr.node + # `original_type` can be partial if (e.g.) it is originally an + # instance variable from an `__init__` block that becomes deferred. + supertype_ready = True + original_type, _ = self.node_type_from_base(name, base, defn) + if original_type is None: + supertype_ready = False + if self.pass_num < self.last_pass: + # If there are passes left, defer this node until next pass, + # otherwise try reconstructing the method type from available information. + # For consistency, defer an enclosing top-level function (if any). + top_level = self.scope.top_level_function() + if isinstance(top_level, FuncDef): + self.defer_node(top_level, self.scope.enclosing_class(top_level)) + else: + # Specify enclosing class explicitly, as we check type override before + # entering e.g. decorators or overloads. + self.defer_node(defn, defn.info) + return True + elif isinstance(original_node, (FuncDef, OverloadedFuncDef)): + original_type = self.function_type(original_node) + elif isinstance(original_node, Decorator): + original_type = self.function_type(original_node.func) + elif isinstance(original_node, Var): + # Super type can define method as an attribute. + # See https://github.com/python/mypy/issues/10134 + + # We also check that sometimes `original_node.type` is None. + # This is the case when we use something like `__hash__ = None`. + if original_node.type is not None: + original_type = get_proper_type(original_node.type) + else: + original_type = NoneType() + else: + # Will always fail to typecheck below, since we know the node is a method + original_type = NoneType() + + always_allow_covariant = False + if is_settable_property(defn) and ( + is_settable_property(original_node) or isinstance(original_node, Var) + ): + if is_custom_settable_property(defn) or (is_custom_settable_property(original_node)): + # Unlike with getter, where we try to construct some fallback type in case of + # deferral during last_pass, we can't make meaningful setter checks if the + # supertype is not known precisely. + if supertype_ready: + always_allow_covariant = True + self.check_setter_type_override(defn, base) + + if isinstance(original_node, (FuncDef, OverloadedFuncDef)): + original_class_or_static = original_node.is_class or original_node.is_static + elif isinstance(original_node, Decorator): + fdef = original_node.func + original_class_or_static = fdef.is_class or fdef.is_static + else: + original_class_or_static = False # a variable can't be class or static + + typ = get_proper_type(typ) + original_type = get_proper_type(original_type) + + if ( + is_property(defn) + and isinstance(original_node, Var) + and not original_node.is_final + and (not original_node.is_property or original_node.is_settable_property) + and isinstance(defn, Decorator) + ): + # We only give an error where no other similar errors will be given. + if not isinstance(original_type, AnyType): + self.msg.fail( + "Cannot override writeable attribute with read-only property", + # Give an error on function line to match old behaviour. + defn.func, + code=codes.OVERRIDE, + ) + + if isinstance(original_type, AnyType) or isinstance(typ, AnyType): + pass + elif isinstance(original_type, FunctionLike) and isinstance(typ, FunctionLike): + # Check that the types are compatible. + ok = self.check_override( + typ, + original_type, + defn.name, + name, + base.name if base.module_name == self.tree.fullname else base.fullname, + original_class_or_static, + override_class_or_static, + context, + ) + # Check if this override is covariant. + if ( + ok + and original_node + and codes.MUTABLE_OVERRIDE in self.options.enabled_error_codes + and self.is_writable_attribute(original_node) + and not always_allow_covariant + and not is_subtype(original_type, typ, ignore_pos_arg_names=True) + ): + base_str, override_str = format_type_distinctly( + original_type, typ, options=self.options + ) + msg = message_registry.COVARIANT_OVERRIDE_OF_MUTABLE_ATTRIBUTE.with_additional_msg( + f' (base class "{base.name}" defined the type as {base_str},' + f" override has type {override_str})" + ) + self.fail(msg, context) + elif isinstance(original_type, UnionType) and any( + is_subtype(typ, orig_typ, ignore_pos_arg_names=True) + for orig_typ in original_type.items + ): + # This method is a subtype of at least one union variant. + if ( + original_node + and codes.MUTABLE_OVERRIDE in self.options.enabled_error_codes + and self.is_writable_attribute(original_node) + and not always_allow_covariant + ): + # Covariant override of mutable attribute. + base_str, override_str = format_type_distinctly( + original_type, typ, options=self.options + ) + msg = message_registry.COVARIANT_OVERRIDE_OF_MUTABLE_ATTRIBUTE.with_additional_msg( + f' (base class "{base.name}" defined the type as {base_str},' + f" override has type {override_str})" + ) + self.fail(msg, context) + elif is_equivalent(original_type, typ): + # Assume invariance for a non-callable attribute here. Note + # that this doesn't affect read-only properties which can have + # covariant overrides. + pass + elif ( + original_node + and (not self.is_writable_attribute(original_node) or always_allow_covariant) + and is_subtype(typ, original_type) + ): + # If the attribute is read-only, allow covariance + pass + else: + self.msg.signature_incompatible_with_supertype( + defn.name, name, base.name, context, original=original_type, override=typ + ) + return False + + def get_op_other_domain(self, tp: FunctionLike) -> Type | None: + if isinstance(tp, CallableType): + if tp.arg_kinds and tp.arg_kinds[0] == ARG_POS: + # For generic methods, domain comparison is tricky, as a first + # approximation erase all remaining type variables. + return erase_typevars(tp.arg_types[0], {v.id for v in tp.variables}) + return None + elif isinstance(tp, Overloaded): + raw_items = [self.get_op_other_domain(it) for it in tp.items] + items = [it for it in raw_items if it] + if items: + return make_simplified_union(items) + return None + else: + assert False, "Need to check all FunctionLike subtypes here" + + def check_override( + self, + override: FunctionLike, + original: FunctionLike, + name: str, + name_in_super: str, + supertype: str, + original_class_or_static: bool, + override_class_or_static: bool, + node: Context, + ) -> bool: + """Check a method override with given signatures. + + Arguments: + override: The signature of the overriding method. + original: The signature of the original supertype method. + name: The name of the overriding method. + Used primarily for generating error messages. + name_in_super: The name of the overridden in the superclass. + Used for generating error messages only. + supertype: The name of the supertype. + original_class_or_static: Indicates whether the original method (from the superclass) + is either a class method or a static method. + override_class_or_static: Indicates whether the overriding method (from the subclass) + is either a class method or a static method. + node: Context node. + """ + # Use boolean variable to clarify code. + fail = False + op_method_wider_note = False + if not is_subtype(override, original, ignore_pos_arg_names=True): + fail = True + elif isinstance(override, Overloaded) and self.is_forward_op_method(name): + # Operator method overrides cannot extend the domain, as + # this could be unsafe with reverse operator methods. + original_domain = self.get_op_other_domain(original) + override_domain = self.get_op_other_domain(override) + if ( + original_domain + and override_domain + and not is_subtype(override_domain, original_domain) + ): + fail = True + op_method_wider_note = True + if isinstance(override, FunctionLike): + if original_class_or_static and not override_class_or_static: + fail = True + elif isinstance(original, CallableType) and isinstance(override, CallableType): + if original.type_guard is not None and override.type_guard is None: + fail = True + if original.type_is is not None and override.type_is is None: + fail = True + + if is_private(name): + fail = False + + if fail: + emitted_msg = False + + offset_arguments = isinstance(override, CallableType) and override.unpack_kwargs + # Normalize signatures, so we get better diagnostics. + if isinstance(override, (CallableType, Overloaded)): + override = override.with_unpacked_kwargs() + if isinstance(original, (CallableType, Overloaded)): + original = original.with_unpacked_kwargs() + + if ( + isinstance(override, CallableType) + and isinstance(original, CallableType) + and len(override.arg_types) == len(original.arg_types) + and override.min_args == original.min_args + ): + # Give more detailed messages for the common case of both + # signatures having the same number of arguments and no + # overloads. + + # override might have its own generic function type + # variables. If an argument or return type of override + # does not have the correct subtyping relationship + # with the original type even after these variables + # are erased, then it is definitely an incompatibility. + + override_ids = override.type_var_ids() + type_name = None + definition = get_func_def(override) + if isinstance(definition, FuncDef): + type_name = definition.info.name + + def erase_override(t: Type) -> Type: + return erase_typevars(t, ids_to_erase=override_ids) + + for i, (sub_kind, super_kind) in enumerate( + zip(override.arg_kinds, original.arg_kinds) + ): + if sub_kind.is_positional() and super_kind.is_positional(): + override_arg_type = override.arg_types[i] + original_arg_type = original.arg_types[i] + elif sub_kind.is_named() and super_kind.is_named() and not offset_arguments: + arg_name = override.arg_names[i] + if arg_name in original.arg_names: + override_arg_type = override.arg_types[i] + original_i = original.arg_names.index(arg_name) + original_arg_type = original.arg_types[original_i] + else: + continue + else: + continue + if not is_subtype(original_arg_type, erase_override(override_arg_type)): + context: Context = node + if ( + isinstance(node, FuncDef) + and not node.is_property + and ( + not node.is_decorated # fast path + # allow trivial decorators like @classmethod and @override + or not (sym := node.info.get(node.name)) + or not isinstance(sym.node, Decorator) + or not sym.node.decorators + ) + ): + # If there's any decorator, we can no longer map arguments 1:1 reliably. + arg_node = node.arguments[i + override.bound()] + if arg_node.line != -1: + context = arg_node + self.msg.argument_incompatible_with_supertype( + i + 1, + name, + type_name, + name_in_super, + original_arg_type, + supertype, + context, + secondary_context=node, + ) + emitted_msg = True + + if not is_subtype(erase_override(override.ret_type), original.ret_type): + self.msg.return_type_incompatible_with_supertype( + name, name_in_super, supertype, original.ret_type, override.ret_type, node + ) + emitted_msg = True + elif isinstance(override, Overloaded) and isinstance(original, Overloaded): + # Give a more detailed message in the case where the user is trying to + # override an overload, and the subclass's overload is plausible, except + # that the order of the variants are wrong. + # + # For example, if the parent defines the overload f(int) -> int and f(str) -> str + # (in that order), and if the child swaps the two and does f(str) -> str and + # f(int) -> int + order = [] + for child_variant in override.items: + for i, parent_variant in enumerate(original.items): + if is_subtype(child_variant, parent_variant): + order.append(i) + break + + if len(order) == len(original.items) and order != sorted(order): + self.msg.overload_signature_incompatible_with_supertype( + name, name_in_super, supertype, node + ) + emitted_msg = True + + if not emitted_msg: + # Fall back to generic incompatibility message. + self.msg.signature_incompatible_with_supertype( + name, name_in_super, supertype, node, original=original, override=override + ) + if op_method_wider_note: + self.note( + "Overloaded operator methods can't have wider argument types in overrides", + node, + code=codes.OVERRIDE, + ) + return not fail + + def check__exit__return_type(self, defn: FuncItem) -> None: + """Generate error if the return type of __exit__ is problematic. + + If __exit__ always returns False but the return type is declared + as bool, mypy thinks that a with statement may "swallow" + exceptions even though this is not the case, resulting in + invalid reachability inference. + """ + if not defn.type or not isinstance(defn.type, CallableType): + return + + ret_type = get_proper_type(defn.type.ret_type) + if not has_bool_item(ret_type): + return + + returns = all_return_statements(defn) + if not returns: + return + + if all( + isinstance(ret.expr, NameExpr) and ret.expr.fullname == "builtins.False" + for ret in returns + ): + self.msg.incorrect__exit__return(defn) + + def visit_class_def(self, defn: ClassDef) -> None: + """Type check a class definition.""" + typ = defn.info + for base in typ.mro[1:]: + if base.is_final: + self.fail(message_registry.CANNOT_INHERIT_FROM_FINAL.format(base.name), defn) + if not can_have_shared_disjoint_base(typ.bases): + self.fail(message_registry.INCOMPATIBLE_DISJOINT_BASES.format(typ.name), defn) + with ( + self.tscope.class_scope(defn.info), + self.enter_partial_types(is_class=True), + self.enter_class(defn.info), + ): + old_binder = self.binder + self.binder = ConditionalTypeBinder(self.options) + with self.binder.top_frame_context(): + with self.scope.push_class(defn.info): + self.accept(defn.defs) + self.binder = old_binder + if not (defn.info.typeddict_type or defn.info.tuple_type or defn.info.is_enum): + # If it is not a normal class (not a special form) check class keywords. + self.check_init_subclass(defn) + if not defn.has_incompatible_baseclass: + # Otherwise we've already found errors; more errors are not useful + self.check_multiple_inheritance(typ) + self.check_metaclass_compatibility(typ) + self.check_final_deletable(typ) + + if defn.decorators: + sig: Type = type_object_type(defn.info, self.named_type) + # Decorators are applied in reverse order. + for decorator in reversed(defn.decorators): + if isinstance(decorator, CallExpr) and isinstance( + decorator.analyzed, PromoteExpr + ): + # _promote is a special type checking related construct. + continue + + dec = self.expr_checker.accept(decorator) + temp = self.temp_node(sig, context=decorator) + fullname = None + if isinstance(decorator, RefExpr): + fullname = decorator.fullname or None + + # TODO: Figure out how to have clearer error messages. + # (e.g. "class decorator must be a function that accepts a type." + old_allow_abstract_call = self.allow_abstract_call + self.allow_abstract_call = True + sig, _ = self.expr_checker.check_call( + dec, [temp], [nodes.ARG_POS], defn, callable_name=fullname + ) + self.allow_abstract_call = old_allow_abstract_call + # TODO: Apply the sig to the actual TypeInfo so we can handle decorators + # that completely swap out the type. (e.g. Callable[[Type[A]], Type[B]]) + if typ.defn.type_vars and typ.defn.type_args is None: + for base_inst in typ.bases: + for base_tvar, base_decl_tvar in zip( + base_inst.args, base_inst.type.defn.type_vars + ): + if ( + isinstance(base_tvar, TypeVarType) + and base_tvar.variance != INVARIANT + and isinstance(base_decl_tvar, TypeVarType) + and base_decl_tvar.variance != base_tvar.variance + ): + self.fail( + f'Variance of TypeVar "{base_tvar.name}" incompatible ' + "with variance in parent type", + context=defn, + code=codes.TYPE_VAR, + ) + if typ.defn.type_vars: + self.check_typevar_defaults(typ.defn.type_vars) + + if typ.is_protocol and typ.defn.type_vars: + self.check_protocol_variance(defn) + if not defn.has_incompatible_baseclass and defn.info.is_enum: + self.check_enum(defn) + infer_class_variances(defn.info) + + @contextmanager + def enter_class(self, type: TypeInfo) -> Iterator[None]: + original_type = self.type + self.type = type + try: + yield + finally: + self.type = original_type + + def check_final_deletable(self, typ: TypeInfo) -> None: + # These checks are only for mypyc. Only perform some checks that are easier + # to implement here than in mypyc. + for attr in typ.deletable_attributes: + node = typ.names.get(attr) + if node and isinstance(node.node, Var) and node.node.is_final: + self.fail(message_registry.CANNOT_MAKE_DELETABLE_FINAL, node.node) + + def check_init_subclass(self, defn: ClassDef) -> None: + """Check that keywords in a class definition are valid arguments for __init_subclass__(). + + In this example: + 1 class Base: + 2 def __init_subclass__(cls, thing: int): + 3 pass + 4 class Child(Base, thing=5): + 5 def __init_subclass__(cls): + 6 pass + 7 Child() + + Base.__init_subclass__(thing=5) is called at line 4. This is what we simulate here. + Child.__init_subclass__ is never called. + """ + if defn.info.metaclass_type and defn.info.metaclass_type.type.fullname not in ( + "builtins.type", + "abc.ABCMeta", + ): + # We can't safely check situations when both __init_subclass__ and a custom + # metaclass are present. + return + # At runtime, only Base.__init_subclass__ will be called, so + # we skip the current class itself. + for base in defn.info.mro[1:]: + if "__init_subclass__" not in base.names: + continue + name_expr = NameExpr(defn.name) + name_expr.node = base + callee = MemberExpr(name_expr, "__init_subclass__") + args = list(defn.keywords.values()) + arg_names: list[str | None] = list(defn.keywords.keys()) + # 'metaclass' keyword is consumed by the rest of the type machinery, + # and is never passed to __init_subclass__ implementations + if "metaclass" in arg_names: + idx = arg_names.index("metaclass") + arg_names.pop(idx) + args.pop(idx) + arg_kinds = [ARG_NAMED] * len(args) + call_expr = CallExpr(callee, args, arg_kinds, arg_names) + call_expr.line = defn.line + call_expr.column = defn.column + call_expr.end_line = defn.end_line + self.expr_checker.accept(call_expr, allow_none_return=True, always_allow_any=True) + # We are only interested in the first Base having __init_subclass__, + # all other bases have already been checked. + break + + def check_typevar_defaults(self, tvars: Sequence[TypeVarLikeType]) -> None: + for tv in tvars: + if not (isinstance(tv, TypeVarType) and tv.has_default()): + continue + if not is_subtype(tv.default, tv.upper_bound): + self.fail("TypeVar default must be a subtype of the bound type", tv) + if tv.values and not any(is_same_type(tv.default, value) for value in tv.values): + self.fail("TypeVar default must be one of the constraint types", tv) + + def check_enum(self, defn: ClassDef) -> None: + assert defn.info.is_enum + if defn.info.fullname not in ENUM_BASES and "__members__" in defn.info.names: + sym = defn.info.names["__members__"] + if isinstance(sym.node, Var) and sym.node.has_explicit_value: + # `__members__` will always be overwritten by `Enum` and is considered + # read-only so we disallow assigning a value to it + self.fail(message_registry.ENUM_MEMBERS_ATTR_WILL_BE_OVERRIDDEN, sym.node) + for base in defn.info.mro[1:-1]: # we don't need self and `object` + if base.is_enum and base.fullname not in ENUM_BASES: + self.check_final_enum(defn, base) + + if self.is_stub and self.tree.fullname not in {"enum", "_typeshed"}: + if not defn.info.enum_members: + self.fail( + f'Detected enum "{defn.info.fullname}" in a type stub with zero members. ' + "There is a chance this is due to a recent change in the semantics of " + "enum membership. If so, use `member = value` to mark an enum member, " + "instead of `member: type`", + defn, + ) + self.note( + "See https://typing.readthedocs.io/en/latest/spec/enums.html#defining-members", + defn, + ) + + self.check_enum_bases(defn) + self.check_enum_new(defn) + + def check_final_enum(self, defn: ClassDef, base: TypeInfo) -> None: + if base.enum_members: + self.fail(f'Cannot extend enum with existing members: "{base.name}"', defn) + + def is_final_enum_value(self, sym: SymbolTableNode) -> bool: + if isinstance(sym.node, (FuncBase, Decorator)): + return False # A method is fine + if not isinstance(sym.node, Var): + return True # Can be a class or anything else + + # Now, only `Var` is left, we need to check: + # 1. Private name like in `__prop = 1` + # 2. Dunder name like `__hash__ = some_hasher` + # 3. Sunder name like `_order_ = 'a, b, c'` + # 4. If it is a method / descriptor like in `method = classmethod(func)` + if ( + is_private(sym.node.name) + or is_dunder(sym.node.name) + or is_sunder(sym.node.name) + # TODO: make sure that `x = @class/staticmethod(func)` + # and `x = property(prop)` both work correctly. + # Now they are incorrectly counted as enum members. + or isinstance(get_proper_type(sym.node.type), FunctionLike) + ): + return False + + return self.is_stub or sym.node.has_explicit_value + + def check_enum_bases(self, defn: ClassDef) -> None: + """ + Non-enum mixins cannot appear after enum bases; this is disallowed at runtime: + + class Foo: ... + class Bar(enum.Enum, Foo): ... + + But any number of enum mixins can appear in a class definition + (even if multiple enum bases define __new__). So this is fine: + + class Foo(enum.Enum): + def __new__(cls, val): ... + class Bar(enum.Enum): + def __new__(cls, val): ... + class Baz(int, Foo, Bar, enum.Flag): ... + """ + enum_base: Instance | None = None + for base in defn.info.bases: + if enum_base is None and base.type.is_enum: + enum_base = base + continue + elif enum_base is not None and not base.type.is_enum: + self.fail( + f'No non-enum mixin classes are allowed after "{enum_base.str_with_options(self.options)}"', + defn, + ) + break + + def check_enum_new(self, defn: ClassDef) -> None: + def has_new_method(info: TypeInfo) -> bool: + new_method = info.get("__new__") + return bool( + new_method + and new_method.node + and new_method.node.fullname != "builtins.object.__new__" + ) + + has_new = False + for base in defn.info.bases: + candidate = False + + if base.type.is_enum: + # If we have an `Enum`, then we need to check all its bases. + candidate = any(not b.is_enum and has_new_method(b) for b in base.type.mro[1:-1]) + else: + candidate = has_new_method(base.type) + + if candidate and has_new: + self.fail( + "Only a single data type mixin is allowed for Enum subtypes, " + 'found extra "{}"'.format(base.str_with_options(self.options)), + defn, + ) + elif candidate: + has_new = True + + def check_protocol_variance(self, defn: ClassDef) -> None: + """Check that protocol definition is compatible with declared + variances of type variables. + + Note that we also prohibit declaring protocol classes as invariant + if they are actually covariant/contravariant, since this may break + transitivity of subtyping, see PEP 544. + """ + if defn.type_args is not None: + # Using new-style syntax (PEP 695), so variance will be inferred + return + info = defn.info + object_type = Instance(info.mro[-1], []) + tvars = info.defn.type_vars + if self._variance_dummy_type is None: + _, dummy_info = self.make_fake_typeinfo("", "Dummy", "Dummy", []) + self._variance_dummy_type = Instance(dummy_info, []) + dummy = self._variance_dummy_type + for i, tvar in enumerate(tvars): + if not isinstance(tvar, TypeVarType): + # Variance of TypeVarTuple and ParamSpec is underspecified by PEPs. + continue + up_args: list[Type] = [ + object_type if i == j else dummy.copy_modified() for j, _ in enumerate(tvars) + ] + down_args: list[Type] = [ + UninhabitedType() if i == j else dummy.copy_modified() for j, _ in enumerate(tvars) + ] + up, down = Instance(info, up_args), Instance(info, down_args) + # TODO: add advanced variance checks for recursive protocols + if is_subtype(down, up, ignore_declared_variance=True): + expected = COVARIANT + elif is_subtype(up, down, ignore_declared_variance=True): + expected = CONTRAVARIANT + else: + expected = INVARIANT + if expected != tvar.variance: + self.msg.bad_proto_variance(tvar.variance, tvar.name, expected, defn) + + def check_multiple_inheritance(self, typ: TypeInfo) -> None: + """Check for multiple inheritance related errors.""" + if len(typ.bases) <= 1: + # No multiple inheritance. + return + # Verify that inherited attributes are compatible. + mro = typ.mro[1:] + all_names = {name for base in mro for name in base.names} + for name in sorted(all_names - typ.names.keys()): + # Sort for reproducible message order. + # Attributes defined in both the type and base are skipped. + # Normal checks for attribute compatibility should catch any problems elsewhere. + if is_private(name): + continue + # Compare the first base defining a name with the rest. + # Remaining bases may not be pairwise compatible as the first base provides + # the used definition. + i, base = next((i, base) for i, base in enumerate(mro) if name in base.names) + for base2 in mro[i + 1 :]: + if name in base2.names and base2 not in base.mro: + self.check_compatibility(name, base, base2, typ) + + def check_compatibility( + self, name: str, base1: TypeInfo, base2: TypeInfo, ctx: TypeInfo + ) -> None: + """Check if attribute name in base1 is compatible with base2 in multiple inheritance. + + Assume base1 comes before base2 in the MRO, and that base1 and base2 don't have + a direct subclass relationship (i.e., the compatibility requirement only derives from + multiple inheritance). + + This check verifies that a definition taken from base1 (and mapped to the current + class ctx), is type compatible with the definition taken from base2 (also mapped), so + that unsafe subclassing like this can be detected: + class A(Generic[T]): + def foo(self, x: T) -> None: ... + + class B: + def foo(self, x: str) -> None: ... + + class C(B, A[int]): ... # this is unsafe because... + + x: A[int] = C() + x.foo # ...runtime type is (str) -> None, while static type is (int) -> None + """ + if name in ("__init__", "__new__", "__init_subclass__"): + # __init__ and friends can be incompatible -- it's a special case. + return + first = base1.names[name] + second = base2.names[name] + # Specify current_class explicitly as this function is called after leaving the class. + first_type, _ = self.node_type_from_base(name, base1, ctx, current_class=ctx) + second_type, _ = self.node_type_from_base(name, base2, ctx, current_class=ctx) + + # TODO: use more principled logic to decide is_subtype() vs is_equivalent(). + # We should rely on mutability of superclass node, not on types being Callable. + # (in particular handle settable properties with setter type different from getter). + + p_first_type = get_proper_type(first_type) + p_second_type = get_proper_type(second_type) + if isinstance(p_first_type, FunctionLike) and isinstance(p_second_type, FunctionLike): + if p_first_type.is_type_obj() and p_second_type.is_type_obj(): + # For class objects only check the subtype relationship of the classes, + # since we allow incompatible overrides of '__init__'/'__new__' + ok = is_subtype( + left=fill_typevars_with_any(p_first_type.type_object()), + right=fill_typevars_with_any(p_second_type.type_object()), + ) + else: + assert first_type and second_type + ok = is_subtype(first_type, second_type, ignore_pos_arg_names=True) + elif first_type and second_type: + if second.node is not None and not self.is_writable_attribute(second.node): + ok = is_subtype(first_type, second_type) + else: + ok = is_equivalent(first_type, second_type) + if ok: + if ( + first.node + and second.node + and self.is_writable_attribute(second.node) + and is_property(first.node) + and isinstance(first.node, Decorator) + and not isinstance(p_second_type, AnyType) + ): + self.msg.fail( + f'Cannot override writeable attribute "{name}" in base "{base2.name}"' + f' with read-only property in base "{base1.name}"', + ctx, + code=codes.OVERRIDE, + ) + else: + if first_type is None: + self.msg.cannot_determine_type_in_base(name, base1.name, ctx) + if second_type is None: + self.msg.cannot_determine_type_in_base(name, base2.name, ctx) + ok = True + # Final attributes can never be overridden, but can override + # non-final read-only attributes. + if is_final_node(second.node) and not is_private(name): + self.msg.cant_override_final(name, base2.name, ctx) + if is_final_node(first.node): + self.check_if_final_var_override_writable(name, second.node, ctx) + # Some attributes like __slots__ and __deletable__ are special, and the type can + # vary across class hierarchy. + if isinstance(second.node, Var) and second.node.allow_incompatible_override: + ok = True + if not ok: + self.msg.base_class_definitions_incompatible(name, base1, base2, ctx) + + def check_metaclass_compatibility(self, typ: TypeInfo) -> None: + """Ensures that metaclasses of all parent types are compatible.""" + if ( + typ.is_metaclass() + or typ.is_protocol + or typ.is_named_tuple + or typ.is_enum + or typ.typeddict_type is not None + ): + return # Reasonable exceptions from this check + + if typ.metaclass_type is None and any( + base.type.metaclass_type is not None for base in typ.bases + ): + self.fail( + "Metaclass conflict: the metaclass of a derived class must be " + "a (non-strict) subclass of the metaclasses of all its bases", + typ, + code=codes.METACLASS, + ) + explanation = typ.explain_metaclass_conflict() + if explanation: + self.note(explanation, typ, code=codes.METACLASS) + + def visit_import_from(self, node: ImportFrom) -> None: + for name, _ in node.names: + if (sym := self.globals.get(name)) is not None: + self.warn_deprecated(sym.node, node) + self.check_import(node) + + def visit_import_all(self, node: ImportAll) -> None: + self.check_import(node) + + def visit_import(self, node: Import) -> None: + self.check_import(node) + + def check_import(self, node: ImportBase) -> None: + for assign in node.assignments: + lvalue = assign.lvalues[0] + lvalue_type, _, __ = self.check_lvalue(lvalue) + if lvalue_type is None: + # TODO: This is broken. + lvalue_type = AnyType(TypeOfAny.special_form) + assert isinstance(assign.rvalue, NameExpr) + message = message_registry.INCOMPATIBLE_IMPORT_OF.format(assign.rvalue.name) + self.check_simple_assignment( + lvalue_type, + assign.rvalue, + node, + msg=message, + lvalue_name="local name", + rvalue_name="imported name", + ) + + # + # Statements + # + + def visit_block(self, b: Block) -> None: + if b.is_unreachable: + # This block was marked as being unreachable during semantic analysis. + # It turns out any blocks marked in this way are *intentionally* marked + # as unreachable -- so we don't display an error. + self.binder.unreachable() + return + for s in b.body: + if self.binder.is_unreachable(): + if not self.should_report_unreachable_issues(): + break + if not self.is_noop_for_reachability(s): + self.msg.unreachable_statement(s) + break + else: + self.accept(s) + # Clear expression cache after each statement to avoid unlimited growth. + self.expr_checker.expr_cache.clear() + + def should_report_unreachable_issues(self) -> bool: + return ( + self.in_checked_function() + and self.options.warn_unreachable + and not self.current_node_deferred + and not self.binder.is_unreachable_warning_suppressed() + ) + + def is_noop_for_reachability(self, s: Statement) -> bool: + """Returns 'true' if the given statement either throws an error of some kind + or is a no-op. + + We use this function while handling the '--warn-unreachable' flag. When + that flag is present, we normally report an error on any unreachable statement. + But if that statement is just something like a 'pass' or a just-in-case 'assert False', + reporting an error would be annoying. + """ + if isinstance(s, AssertStmt) and is_false_literal(s.expr): + return True + elif isinstance(s, ReturnStmt) and is_literal_not_implemented(s.expr): + return True + elif isinstance(s, (RaiseStmt, PassStmt)): + return True + elif isinstance(s, ExpressionStmt): + if isinstance(s.expr, EllipsisExpr): + return True + elif isinstance(s.expr, CallExpr): + with self.expr_checker.msg.filter_errors(filter_revealed_type=True): + typ = get_proper_type( + self.expr_checker.accept( + s.expr, allow_none_return=True, always_allow_any=True + ) + ) + + if isinstance(typ, UninhabitedType): + return True + return False + + def visit_assignment_stmt(self, s: AssignmentStmt) -> None: + """Type check an assignment statement. + + Handle all kinds of assignment statements (simple, indexed, multiple). + """ + + # Avoid type checking type aliases in stubs to avoid false + # positives about modern type syntax available in stubs such + # as X | Y. + if not (s.is_alias_def and self.is_stub): + with self.enter_final_context(s.is_final_def): + self.check_assignment(s.lvalues[-1], s.rvalue, s.type is None, s.new_syntax) + + if s.is_alias_def: + self.check_type_alias_rvalue(s) + + if ( + s.type is not None + and self.options.disallow_any_unimported + and has_any_from_unimported_type(s.type) + ): + if isinstance(s.lvalues[-1], TupleExpr): + # This is a multiple assignment. Instead of figuring out which type is problematic, + # give a generic error message. + self.msg.unimported_type_becomes_any( + "A type on this line", AnyType(TypeOfAny.special_form), s + ) + else: + self.msg.unimported_type_becomes_any("Type of variable", s.type, s) + check_for_explicit_any(s.type, self.options, self.is_typeshed_stub, self.msg, context=s) + + if len(s.lvalues) > 1: + # Chained assignment (e.g. x = y = ...). + # Make sure that rvalue type will not be reinferred. + if not self.has_type(s.rvalue): + self.expr_checker.accept(s.rvalue) + rvalue = self.temp_node(self.lookup_type(s.rvalue), s) + for lv in s.lvalues[:-1]: + with self.enter_final_context(s.is_final_def): + self.check_assignment(lv, rvalue, s.type is None) + + self.check_final(s) + if ( + s.is_final_def + and s.type + and not has_no_typevars(s.type) + and self.scope.active_class() is not None + ): + self.fail(message_registry.DEPENDENT_FINAL_IN_CLASS_BODY, s) + + if s.unanalyzed_type and not self.in_checked_function(): + self.msg.annotation_in_unchecked_function(context=s) + + def check_type_alias_rvalue(self, s: AssignmentStmt) -> None: + with self.msg.filter_errors(): + alias_type = self.expr_checker.accept(s.rvalue) + self.store_type(s.lvalues[-1], alias_type) + + def check_assignment( + self, + lvalue: Lvalue, + rvalue: Expression, + infer_lvalue_type: bool = True, + new_syntax: bool = False, + ) -> None: + """Type check a single assignment: lvalue = rvalue.""" + if isinstance(lvalue, (TupleExpr, ListExpr)): + self.check_assignment_to_multiple_lvalues( + lvalue.items, rvalue, rvalue, infer_lvalue_type + ) + else: + self.try_infer_partial_generic_type_from_assignment(lvalue, rvalue, "=") + lvalue_type, index_lvalue, inferred = self.check_lvalue(lvalue, rvalue) + # If we're assigning to __getattr__ or similar methods, check that the signature is + # valid. + if isinstance(lvalue, NameExpr) and lvalue.node: + name = lvalue.node.name + if name in ("__setattr__", "__getattribute__", "__getattr__"): + # If an explicit type is given, use that. + if lvalue_type: + signature = lvalue_type + else: + signature = self.expr_checker.accept(rvalue) + if signature: + if name == "__setattr__": + self.check_setattr_method(signature, lvalue) + else: + self.check_getattr_method(signature, lvalue, name) + + if name == "__slots__" and self.scope.active_class() is not None: + typ = lvalue_type or self.expr_checker.accept(rvalue) + self.check_slots_definition(typ, lvalue) + if name == "__match_args__" and inferred is not None: + typ = self.expr_checker.accept(rvalue) + self.check_match_args(inferred, typ, lvalue) + if name == "__post_init__": + active_class = self.scope.active_class() + if active_class and dataclasses_plugin.is_processed_dataclass(active_class): + self.fail(message_registry.DATACLASS_POST_INIT_MUST_BE_A_FUNCTION, rvalue) + + if isinstance(lvalue, MemberExpr) and lvalue.name == "__match_args__": + self.fail(message_registry.CANNOT_MODIFY_MATCH_ARGS, lvalue) + + if lvalue_type: + if isinstance(lvalue_type, PartialType) and lvalue_type.type is None: + # Try to infer a proper type for a variable with a partial None type. + rvalue_type = self.expr_checker.accept(rvalue) + if isinstance(get_proper_type(rvalue_type), NoneType): + # This doesn't actually provide any additional information -- multiple + # None initializers preserve the partial None type. + return + + var = lvalue_type.var + if is_valid_inferred_type( + rvalue_type, self.options, is_lvalue_final=var.is_final + ): + partial_types = self.find_partial_types(var) + if partial_types is not None: + if not self.current_node_deferred: + # Partial type can't be final, so strip any literal values. + rvalue_type = remove_instance_last_known_values(rvalue_type) + inferred_type = make_simplified_union([rvalue_type, NoneType()]) + self.set_inferred_type(var, lvalue, inferred_type) + else: + var.type = None + del partial_types[var] + lvalue_type = var.type + else: + # Try to infer a partial type. + if not self.infer_partial_type(var, lvalue, rvalue_type): + # If that also failed, give up and let the caller know that we + # cannot read their mind. The definition site will be reported later. + # Calling .put() directly because the newly inferred type is + # not a subtype of None - we are not looking for narrowing + fallback = self.inference_error_fallback_type(rvalue_type) + self.binder.put(lvalue, fallback) + # Same as self.set_inference_error_fallback_type but inlined + # to avoid computing fallback twice. + # We are replacing partial now, so the variable type + # should remain optional. + self.set_inferred_type(var, lvalue, make_optional_type(fallback)) + elif ( + is_literal_none(rvalue) + and isinstance(lvalue, NameExpr) + and isinstance(lvalue.node, Var) + and lvalue.node.is_initialized_in_class + and not new_syntax + ): + # Allow None's to be assigned to class variables with non-Optional types. + rvalue_type = lvalue_type + elif ( + isinstance(lvalue, MemberExpr) and lvalue.kind is None + ): # Ignore member access to modules + instance_type = self.expr_checker.accept(lvalue.expr) + rvalue_type, lvalue_type, infer_lvalue_type = self.check_member_assignment( + lvalue, instance_type, lvalue_type, rvalue, context=rvalue + ) + else: + # Hacky special case for assigning a literal None + # to a variable defined in a previous if + # branch. When we detect this, we'll go back and + # make the type optional. This is somewhat + # unpleasant, and a generalization of this would + # be an improvement! + if ( + not self.options.allow_redefinition_new + and is_literal_none(rvalue) + and isinstance(lvalue, NameExpr) + and lvalue.kind == LDEF + and isinstance(lvalue.node, Var) + and lvalue.node.type + and lvalue.node in self.var_decl_frames + and not isinstance(get_proper_type(lvalue_type), AnyType) + ): + decl_frame_map = self.var_decl_frames[lvalue.node] + # Check if the nearest common ancestor frame for the definition site + # and the current site is the enclosing frame of an if/elif/else block. + has_if_ancestor = False + for frame in reversed(self.binder.frames): + if frame.id in decl_frame_map: + has_if_ancestor = frame.conditional_frame + break + if has_if_ancestor: + lvalue_type = make_optional_type(lvalue_type) + self.set_inferred_type(lvalue.node, lvalue, lvalue_type) + + rvalue_type, lvalue_type = self.check_simple_assignment( + lvalue_type, rvalue, context=rvalue, inferred=inferred, lvalue=lvalue + ) + # The above call may update inferred variable type. Prevent further + # inference. + inferred = None + + # Special case: only non-abstract non-protocol classes can be assigned to + # variables with explicit type Type[A], where A is protocol or abstract. + p_rvalue_type = get_proper_type(rvalue_type) + p_lvalue_type = get_proper_type(lvalue_type) + if ( + isinstance(p_rvalue_type, FunctionLike) + and p_rvalue_type.is_type_obj() + and ( + p_rvalue_type.type_object().is_abstract + or p_rvalue_type.type_object().is_protocol + ) + and isinstance(p_lvalue_type, TypeType) + and isinstance(p_lvalue_type.item, Instance) + and ( + p_lvalue_type.item.type.is_abstract or p_lvalue_type.item.type.is_protocol + ) + ): + self.msg.concrete_only_assign(p_lvalue_type, rvalue) + return + if rvalue_type and infer_lvalue_type and not isinstance(lvalue_type, PartialType): + # Don't use type binder for definitions of special forms, like named tuples. + if not (isinstance(lvalue, NameExpr) and lvalue.is_special_form): + self.binder.assign_type(lvalue, rvalue_type, lvalue_type) + if ( + isinstance(lvalue, NameExpr) + and isinstance(lvalue.node, Var) + and lvalue.node.is_inferred + and lvalue.node.is_index_var + and lvalue_type is not None + ): + lvalue.node.type = remove_instance_last_known_values(lvalue_type) + elif ( + self.options.allow_redefinition_new + and lvalue_type is not None + and not isinstance(lvalue_type, PartialType) + ): + # TODO: Can we use put() here? + self.binder.assign_type(lvalue, lvalue_type, lvalue_type) + + elif index_lvalue: + self.check_indexed_assignment(index_lvalue, rvalue, lvalue) + + if inferred: + type_context = self.get_variable_type_context(inferred, rvalue) + rvalue_type = self.expr_checker.accept(rvalue, type_context=type_context) + if not ( + inferred.is_final + or inferred.is_index_var + or (isinstance(lvalue, NameExpr) and lvalue.name == "__match_args__") + ): + rvalue_type = remove_instance_last_known_values(rvalue_type) + self.infer_variable_type(inferred, lvalue, rvalue_type, rvalue) + self.check_assignment_to_slots(lvalue) + if isinstance(lvalue, RefExpr) and not ( + isinstance(lvalue, NameExpr) and lvalue.name == "__match_args__" + ): + # We check override here at the end after storing the inferred type, since + # override check will try to access the current attribute via symbol tables + # (like a regular attribute access). + self.check_compatibility_all_supers(lvalue, rvalue) + + # (type, operator) tuples for augmented assignments supported with partial types + partial_type_augmented_ops: Final = {("builtins.list", "+"), ("builtins.set", "|")} + + def get_variable_type_context(self, inferred: Var, rvalue: Expression) -> Type | None: + type_contexts = [] + if inferred.info: + for base in inferred.info.mro[1:]: + if inferred.name not in base.names: + continue + # For inference within class body, get supertype attribute as it would look on + # a class object for lambdas overriding methods, etc. + base_node = base.names[inferred.name].node + base_type, _ = self.node_type_from_base( + inferred.name, + base, + inferred, + is_class=is_method(base_node) + or isinstance(base_node, Var) + and not is_instance_var(base_node), + ) + if ( + base_type + and not (isinstance(base_node, Var) and base_node.invalid_partial_type) + and not isinstance(base_type, PartialType) + ): + type_contexts.append(base_type) + # Use most derived supertype as type context if available. + if not type_contexts: + if inferred.name == "__slots__" and self.scope.active_class() is not None: + str_type = self.named_type("builtins.str") + return self.named_generic_type("typing.Iterable", [str_type]) + if inferred.name == "__all__" and self.scope.is_top_level(): + str_type = self.named_type("builtins.str") + return self.named_generic_type("typing.Sequence", [str_type]) + return None + candidate = type_contexts[0] + for other in type_contexts: + if is_proper_subtype(other, candidate): + candidate = other + elif not is_subtype(candidate, other): + # Multiple incompatible candidates, cannot use any of them as context. + return None + return candidate + + def try_infer_partial_generic_type_from_assignment( + self, lvalue: Lvalue, rvalue: Expression, op: str + ) -> None: + """Try to infer a precise type for partial generic type from assignment. + + 'op' is '=' for normal assignment and a binary operator ('+', ...) for + augmented assignment. + + Example where this happens: + + x = [] + if foo(): + x = [1] # Infer List[int] as type of 'x' + """ + var = None + if ( + isinstance(lvalue, NameExpr) + and isinstance(lvalue.node, Var) + and isinstance(lvalue.node.type, PartialType) + ): + var = lvalue.node + elif isinstance(lvalue, MemberExpr): + var = self.expr_checker.get_partial_self_var(lvalue) + if var is not None: + typ = var.type + assert isinstance(typ, PartialType) + if typ.type is None: + return + # Return if this is an unsupported augmented assignment. + if op != "=" and (typ.type.fullname, op) not in self.partial_type_augmented_ops: + return + # TODO: some logic here duplicates the None partial type counterpart + # inlined in check_assignment(), see #8043. + partial_types = self.find_partial_types(var) + if partial_types is None: + return + rvalue_type = self.expr_checker.accept(rvalue) + rvalue_type = get_proper_type(rvalue_type) + if isinstance(rvalue_type, Instance): + if rvalue_type.type == typ.type and is_valid_inferred_type( + rvalue_type, self.options + ): + var.type = rvalue_type + del partial_types[var] + elif isinstance(rvalue_type, AnyType): + var.type = fill_typevars_with_any(typ.type) + del partial_types[var] + + def check_compatibility_all_supers(self, lvalue: RefExpr, rvalue: Expression) -> None: + lvalue_node = lvalue.node + # Check if we are a class variable with at least one base class + if ( + isinstance(lvalue_node, Var) + # If we have explicit annotation, there is no point in checking the override + # for each assignment, so we check only for the first one. + # TODO: for some reason annotated attributes on self are stored as inferred vars. + and ( + lvalue_node.line == lvalue.line + or lvalue_node.is_inferred + and not lvalue_node.explicit_self_type + ) + and lvalue.kind in (MDEF, None) # None for Vars defined via self + and len(lvalue_node.info.bases) > 0 + ): + for base in lvalue_node.info.mro[1:]: + tnode = base.names.get(lvalue_node.name) + if tnode is not None: + if not self.check_compatibility_classvar_super(lvalue_node, base, tnode.node): + # Show only one error per variable + break + + if not self.check_compatibility_final_super(lvalue_node, base, tnode.node): + # Show only one error per variable + break + + direct_bases = lvalue_node.info.direct_base_classes() + last_immediate_base = direct_bases[-1] if direct_bases else None + + # The historical behavior for inferred vars was to compare rvalue type against + # the type declared in a superclass. To preserve this behavior, we temporarily + # store the rvalue type on the variable. + actual_lvalue_type = None + if lvalue_node.is_inferred and not lvalue_node.explicit_self_type: + # Don't use partial types as context, similar to regular code path. + ctx = lvalue_node.type if not isinstance(lvalue_node.type, PartialType) else None + rvalue_type = self.expr_checker.accept(rvalue, ctx) + actual_lvalue_type = lvalue_node.type + lvalue_node.type = rvalue_type + lvalue_type, _ = self.node_type_from_base(lvalue_node.name, lvalue_node.info, lvalue) + if lvalue_node.is_inferred and not lvalue_node.explicit_self_type: + lvalue_node.type = actual_lvalue_type + + if not lvalue_type: + return + + for base in lvalue_node.info.mro[1:]: + # The type of "__slots__" and some other attributes usually doesn't need to + # be compatible with a base class. We'll still check the type of "__slots__" + # against "object" as an exception. + if lvalue_node.allow_incompatible_override and not ( + lvalue_node.name == "__slots__" and base.fullname == "builtins.object" + ): + continue + + if is_private(lvalue_node.name): + continue + + base_type, base_node = self.node_type_from_base(lvalue_node.name, base, lvalue) + # TODO: if the r.h.s. is a descriptor, we should check setter override as well. + custom_setter = is_custom_settable_property(base_node) + if isinstance(base_type, PartialType): + base_type = None + + if base_type: + assert base_node is not None + if not self.check_compatibility_super( + lvalue_type, + rvalue, + base, + base_type, + base_node, + always_allow_covariant=custom_setter, + ): + # Only show one error per variable; even if other + # base classes are also incompatible + return + if lvalue_type and custom_setter: + base_type, _ = self.node_type_from_base( + lvalue_node.name, base, lvalue, setter_type=True + ) + # Setter type for a custom property must be ready if + # the getter type is ready. + assert base_type is not None + if not is_subtype(base_type, lvalue_type): + self.msg.incompatible_setter_override( + lvalue, lvalue_type, base_type, base + ) + return + if base is last_immediate_base: + # At this point, the attribute was found to be compatible with all + # immediate parents. + break + + def check_compatibility_super( + self, + compare_type: Type, + rvalue: Expression, + base: TypeInfo, + base_type: Type, + base_node: Node, + always_allow_covariant: bool, + ) -> bool: + # TODO: check __set__() type override for custom descriptors. + # TODO: for descriptors check also class object access override. + ok = self.check_subtype( + compare_type, + base_type, + rvalue, + message_registry.INCOMPATIBLE_TYPES_IN_ASSIGNMENT, + "expression has type", + f'base class "{base.name}" defined the type as', + ) + if ( + ok + and codes.MUTABLE_OVERRIDE in self.options.enabled_error_codes + and self.is_writable_attribute(base_node) + and not always_allow_covariant + ): + ok = self.check_subtype( + base_type, + compare_type, + rvalue, + message_registry.COVARIANT_OVERRIDE_OF_MUTABLE_ATTRIBUTE, + f'base class "{base.name}" defined the type as', + "expression has type", + ) + return ok + + def node_type_from_base( + self, + name: str, + base: TypeInfo, + context: Context, + *, + setter_type: bool = False, + is_class: bool = False, + current_class: TypeInfo | None = None, + ) -> tuple[Type | None, SymbolNode | None]: + """Find a type for a name in base class. + + Return the type found and the corresponding node defining the name or None + for both if the name is not defined in base or the node type is not known (yet). + The type returned is already properly mapped/bound to the subclass. + If setter_type is True, return setter types for settable properties (otherwise the + getter type is returned). + """ + base_node = base.names.get(name) + + # TODO: defer current node if the superclass node is not ready. + if ( + not base_node + or isinstance(base_node.node, (Var, Decorator)) + and not base_node.type + or isinstance(base_node.type, PartialType) + and base_node.type.type is not None + ): + return None, None + + if current_class is None: + self_type = self.scope.current_self_type() + else: + self_type = fill_typevars(current_class) + assert self_type is not None, "Internal error: base lookup outside class" + if isinstance(self_type, TupleType): + instance = tuple_fallback(self_type) + else: + instance = self_type + + mx = MemberContext( + is_lvalue=setter_type, + is_super=False, + is_operator=mypy.checkexpr.is_operator_method(name), + original_type=self_type, + context=context, + chk=self, + suppress_errors=True, + ) + # TODO: we should not filter "cannot determine type" errors here. + with self.msg.filter_errors(filter_deprecated=True): + if is_class: + fallback = instance.type.metaclass_type or mx.named_type("builtins.type") + base_type = analyze_class_attribute_access( + instance, name, mx, mcs_fallback=fallback, override_info=base + ) + else: + base_type = analyze_instance_member_access(name, instance, mx, base) + return base_type, base_node.node + + def check_compatibility_classvar_super( + self, node: Var, base: TypeInfo, base_node: Node | None + ) -> bool: + if not isinstance(base_node, Var): + return True + if node.is_classvar and not base_node.is_classvar: + self.fail(message_registry.CANNOT_OVERRIDE_INSTANCE_VAR.format(base.name), node) + return False + elif not node.is_classvar and base_node.is_classvar: + self.fail(message_registry.CANNOT_OVERRIDE_CLASS_VAR.format(base.name), node) + return False + return True + + def check_compatibility_final_super( + self, node: Var, base: TypeInfo, base_node: Node | None + ) -> bool: + """Check if an assignment overrides a final attribute in a base class. + + This only checks situations where either a node in base class is not a variable + but a final method, or where override is explicitly declared as final. + In these cases we give a more detailed error message. In addition, we check that + a final variable doesn't override writeable attribute, which is not safe. + + Other situations are checked in `check_final()`. + """ + if not isinstance(base_node, (Var, FuncBase, Decorator)): + return True + if is_private(node.name): + return True + if base_node.is_final and (node.is_final or not isinstance(base_node, Var)): + # Give this error only for explicit override attempt with `Final`, or + # if we are overriding a final method with variable. + # Other override attempts will be flagged as assignment to constant + # in `check_final()`. + self.msg.cant_override_final(node.name, base.name, node) + return False + if node.is_final: + if base.fullname in ENUM_BASES or node.name in ENUM_SPECIAL_PROPS: + return True + self.check_if_final_var_override_writable(node.name, base_node, node) + return True + + def check_if_final_var_override_writable( + self, name: str, base_node: Node | None, ctx: Context + ) -> None: + """Check that a final variable doesn't override writeable attribute. + + This is done to prevent situations like this: + class C: + attr = 1 + class D(C): + attr: Final = 2 + + x: C = D() + x.attr = 3 # Oops! + """ + writable = True + if base_node: + writable = self.is_writable_attribute(base_node) + if writable: + self.msg.final_cant_override_writable(name, ctx) + + def get_final_context(self) -> bool: + """Check whether we a currently checking a final declaration.""" + return self._is_final_def + + @contextmanager + def enter_final_context(self, is_final_def: bool) -> Iterator[None]: + """Store whether the current checked assignment is a final declaration.""" + old_ctx = self._is_final_def + self._is_final_def = is_final_def + try: + yield + finally: + self._is_final_def = old_ctx + + def check_final(self, s: AssignmentStmt | OperatorAssignmentStmt | AssignmentExpr) -> None: + """Check if this assignment does not assign to a final attribute. + + This function performs the check only for name assignments at module + and class scope. The assignments to `obj.attr` and `Cls.attr` are checked + in checkmember.py. + """ + if isinstance(s, AssignmentStmt): + lvs = self.flatten_lvalues(s.lvalues) + elif isinstance(s, AssignmentExpr): + lvs = [s.target] + else: + lvs = [s.lvalue] + is_final_decl = s.is_final_def if isinstance(s, AssignmentStmt) else False + if is_final_decl and (active_class := self.scope.active_class()): + lv = lvs[0] + assert isinstance(lv, RefExpr) + if lv.node is not None: + assert isinstance(lv.node, Var) + if ( + lv.node.final_unset_in_class + and not lv.node.final_set_in_init + and not self.is_stub # It is OK to skip initializer in stub files. + and + # Avoid extra error messages, if there is no type in Final[...], + # then we already reported the error about missing r.h.s. + isinstance(s, AssignmentStmt) + and s.type is not None + # Avoid extra error message for NamedTuples, + # they were reported during semanal + and not active_class.is_named_tuple + ): + self.msg.final_without_value(s) + for lv in lvs: + if isinstance(lv, RefExpr) and isinstance(lv.node, Var): + name = lv.node.name + cls = self.scope.active_class() + if cls is not None: + # These additional checks exist to give more error messages + # even if the final attribute was overridden with a new symbol + # (which is itself an error)... + for base in cls.mro[1:]: + sym = base.names.get(name) + # We only give this error if base node is variable, + # overriding final method will be caught in + # `check_compatibility_final_super()`. + if sym and isinstance(sym.node, Var): + if sym.node.is_final and not is_final_decl: + self.msg.cant_assign_to_final(name, sym.node.info is None, s) + # ...but only once + break + if lv.node.is_final and not is_final_decl: + self.msg.cant_assign_to_final(name, lv.node.info is None, s) + + def check_assignment_to_slots(self, lvalue: Lvalue) -> None: + if not isinstance(lvalue, MemberExpr): + return + + inst = get_proper_type(self.expr_checker.accept(lvalue.expr)) + if isinstance(inst, TypeVarType) and inst.id.is_self(): + # Unwrap self type + inst = get_proper_type(inst.upper_bound) + if not isinstance(inst, Instance): + return + if inst.type.slots is None: + return # Slots do not exist, we can allow any assignment + if lvalue.name in inst.type.slots: + return # We are assigning to an existing slot + for base_info in inst.type.mro[:-1]: + if base_info.names.get("__setattr__") is not None: + # When type has `__setattr__` defined, + # we can assign any dynamic value. + # We exclude object, because it always has `__setattr__`. + return + + definition = inst.type.get(lvalue.name) + if definition is None: + # We don't want to duplicate + # `"SomeType" has no attribute "some_attr"` + # error twice. + return + if self.is_assignable_slot(lvalue, definition.type): + return + + self.fail( + message_registry.NAME_NOT_IN_SLOTS.format(lvalue.name, inst.type.fullname), lvalue + ) + + def is_assignable_slot(self, lvalue: Lvalue, typ: Type | None) -> bool: + if getattr(lvalue, "node", None): + return False # This is a definition + + typ = get_proper_type(typ) + if typ is None or isinstance(typ, AnyType): + return True # Any can be literally anything, like `@property` + if isinstance(typ, Instance): + # When working with instances, we need to know if they contain + # `__set__` special method. Like `@property` does. + # This makes assigning to properties possible, + # even without extra slot spec. + return typ.type.get("__set__") is not None + if isinstance(typ, FunctionLike): + return True # Can be a property, or some other magic + if isinstance(typ, UnionType): + return all(self.is_assignable_slot(lvalue, u) for u in typ.items) + return False + + def flatten_rvalues(self, rvalues: list[Expression]) -> list[Expression]: + """Flatten expression list by expanding those * items that have tuple type. + + For each regular type item in the tuple type use a TempNode(), for an Unpack + item use a corresponding StarExpr(TempNode()). + """ + new_rvalues = [] + for rv in rvalues: + if not isinstance(rv, StarExpr): + new_rvalues.append(rv) + continue + typ = get_proper_type(self.expr_checker.accept(rv.expr)) + if not isinstance(typ, TupleType): + new_rvalues.append(rv) + continue + for t in typ.items: + if not isinstance(t, UnpackType): + new_rvalues.append(TempNode(t)) + else: + unpacked = get_proper_type(t.type) + if isinstance(unpacked, TypeVarTupleType): + fallback = unpacked.upper_bound + else: + assert ( + isinstance(unpacked, Instance) + and unpacked.type.fullname == "builtins.tuple" + ) + fallback = unpacked + new_rvalues.append(StarExpr(TempNode(fallback))) + return new_rvalues + + def check_assignment_to_multiple_lvalues( + self, + lvalues: list[Lvalue], + rvalue: Expression, + context: Context, + infer_lvalue_type: bool = True, + ) -> None: + if isinstance(rvalue, (TupleExpr, ListExpr)): + # Recursively go into Tuple or List expression rhs instead of + # using the type of rhs, because this allows more fine-grained + # control in cases like: a, b = [int, str] where rhs would get + # type List[object] + rvalues: list[Expression] = [] + iterable_type: Type | None = None + last_idx: int | None = None + for idx_rval, rval in enumerate(self.flatten_rvalues(rvalue.items)): + if isinstance(rval, StarExpr): + typs = get_proper_type(self.expr_checker.accept(rval.expr)) + if self.type_is_iterable(typs) and isinstance(typs, Instance): + if iterable_type is not None and iterable_type != self.iterable_item_type( + typs, rvalue + ): + self.fail(message_registry.CONTIGUOUS_ITERABLE_EXPECTED, context) + else: + if last_idx is None or last_idx + 1 == idx_rval: + rvalues.append(rval) + last_idx = idx_rval + iterable_type = self.iterable_item_type(typs, rvalue) + else: + self.fail(message_registry.CONTIGUOUS_ITERABLE_EXPECTED, context) + else: + self.fail(message_registry.ITERABLE_TYPE_EXPECTED.format(typs), context) + else: + rvalues.append(rval) + iterable_start: int | None = None + iterable_end: int | None = None + for i, rval in enumerate(rvalues): + if isinstance(rval, StarExpr): + typs = get_proper_type(self.expr_checker.accept(rval.expr)) + if self.type_is_iterable(typs) and isinstance(typs, Instance): + if iterable_start is None: + iterable_start = i + iterable_end = i + if ( + iterable_start is not None + and iterable_end is not None + and iterable_type is not None + ): + iterable_num = iterable_end - iterable_start + 1 + rvalue_needed = len(lvalues) - (len(rvalues) - iterable_num) + if rvalue_needed > 0: + rvalues = ( + rvalues[0:iterable_start] + + [TempNode(iterable_type, context=rval) for _ in range(rvalue_needed)] + + rvalues[iterable_end + 1 :] + ) + + if self.check_rvalue_count_in_assignment(lvalues, len(rvalues), context): + star_index = next( + (i for i, lv in enumerate(lvalues) if isinstance(lv, StarExpr)), len(lvalues) + ) + + left_lvs = lvalues[:star_index] + star_lv = ( + cast(StarExpr, lvalues[star_index]) if star_index != len(lvalues) else None + ) + right_lvs = lvalues[star_index + 1 :] + + left_rvs, star_rvs, right_rvs = self.split_around_star( + rvalues, star_index, len(lvalues) + ) + + lr_pairs = list(zip(left_lvs, left_rvs)) + if star_lv: + rv_list = ListExpr(star_rvs) + rv_list.set_line(rvalue) + lr_pairs.append((star_lv.expr, rv_list)) + lr_pairs.extend(zip(right_lvs, right_rvs)) + + for lv, rv in lr_pairs: + self.check_assignment(lv, rv, infer_lvalue_type) + else: + self.check_multi_assignment(lvalues, rvalue, context, infer_lvalue_type) + + def check_rvalue_count_in_assignment( + self, + lvalues: list[Lvalue], + rvalue_count: int, + context: Context, + rvalue_unpack: int | None = None, + ) -> bool: + if rvalue_unpack is not None: + if not any(isinstance(e, StarExpr) for e in lvalues): + self.fail("Variadic tuple unpacking requires a star target", context) + return False + if len(lvalues) > rvalue_count: + self.fail(message_registry.TOO_MANY_TARGETS_FOR_VARIADIC_UNPACK, context) + return False + left_star_index = next(i for i, lv in enumerate(lvalues) if isinstance(lv, StarExpr)) + left_prefix = left_star_index + left_suffix = len(lvalues) - left_star_index - 1 + right_prefix = rvalue_unpack + right_suffix = rvalue_count - rvalue_unpack - 1 + if left_suffix > right_suffix or left_prefix > right_prefix: + # Case of asymmetric unpack like: + # rv: tuple[int, *Ts, int, int] + # x, y, *xs, z = rv + # it is technically valid, but is tricky to reason about. + # TODO: support this (at least if the r.h.s. unpack is a homogeneous tuple). + self.fail(message_registry.TOO_MANY_TARGETS_FOR_VARIADIC_UNPACK, context) + return True + if any(isinstance(lvalue, StarExpr) for lvalue in lvalues): + if len(lvalues) - 1 > rvalue_count: + self.msg.wrong_number_values_to_unpack(rvalue_count, len(lvalues) - 1, context) + return False + elif rvalue_count != len(lvalues): + self.msg.wrong_number_values_to_unpack(rvalue_count, len(lvalues), context) + return False + return True + + def check_multi_assignment( + self, + lvalues: list[Lvalue], + rvalue: Expression, + context: Context, + infer_lvalue_type: bool = True, + rv_type: Type | None = None, + undefined_rvalue: bool = False, + ) -> None: + """Check the assignment of one rvalue to a number of lvalues.""" + + # Infer the type of an ordinary rvalue expression. + # TODO: maybe elsewhere; redundant. + rvalue_type = get_proper_type(rv_type or self.expr_checker.accept(rvalue)) + + if isinstance(rvalue_type, TypeVarLikeType): + rvalue_type = get_proper_type(rvalue_type.upper_bound) + + if isinstance(rvalue_type, UnionType): + # If this is an Optional type in non-strict Optional code, unwrap it. + relevant_items = rvalue_type.relevant_items() + if len(relevant_items) == 1: + rvalue_type = get_proper_type(relevant_items[0]) + + if ( + isinstance(rvalue_type, TupleType) + and find_unpack_in_list(rvalue_type.items) is not None + ): + # Normalize for consistent handling with "old-style" homogeneous tuples. + rvalue_type = expand_type(rvalue_type, {}) + + if isinstance(rvalue_type, AnyType): + for lv in lvalues: + if isinstance(lv, StarExpr): + lv = lv.expr + temp_node = self.temp_node( + AnyType(TypeOfAny.from_another_any, source_any=rvalue_type), context + ) + self.check_assignment(lv, temp_node, infer_lvalue_type) + elif isinstance(rvalue_type, TupleType): + self.check_multi_assignment_from_tuple( + lvalues, rvalue, rvalue_type, context, undefined_rvalue, infer_lvalue_type + ) + elif isinstance(rvalue_type, UnionType): + self.check_multi_assignment_from_union( + lvalues, rvalue, rvalue_type, context, infer_lvalue_type + ) + elif isinstance(rvalue_type, Instance) and rvalue_type.type.fullname == "builtins.str": + self.msg.unpacking_strings_disallowed(context) + else: + self.check_multi_assignment_from_iterable( + lvalues, rvalue_type, context, infer_lvalue_type + ) + + def check_multi_assignment_from_union( + self, + lvalues: list[Expression], + rvalue: Expression, + rvalue_type: UnionType, + context: Context, + infer_lvalue_type: bool, + ) -> None: + """Check assignment to multiple lvalue targets when rvalue type is a Union[...]. + For example: + + t: Union[Tuple[int, int], Tuple[str, str]] + x, y = t + reveal_type(x) # Union[int, str] + + The idea in this case is to process the assignment for every item of the union. + Important note: the types are collected in two places, 'union_types' contains + inferred types for first assignments, 'assignments' contains the narrowed types + for binder. + """ + self.no_partial_types = True + transposed: tuple[list[Type], ...] = tuple([] for _ in self.flatten_lvalues(lvalues)) + # Notify binder that we want to defer bindings and instead collect types. + with self.binder.accumulate_type_assignments() as assignments: + for item in rvalue_type.items: + # Type check the assignment separately for each union item and collect + # the inferred lvalue types for each union item. + self.check_multi_assignment( + lvalues, + rvalue, + context, + infer_lvalue_type=infer_lvalue_type, + rv_type=item, + undefined_rvalue=True, + ) + for t, lv in zip(transposed, self.flatten_lvalues(lvalues)): + # We can access _type_maps directly since temporary type maps are + # only created within expressions. + t.append(self._type_maps[-1].pop(lv, AnyType(TypeOfAny.special_form))) + union_types = tuple(make_simplified_union(col) for col in transposed) + for expr, items in assignments.items(): + # Bind a union of types collected in 'assignments' to every expression. + if isinstance(expr, StarExpr): + expr = expr.expr + + # TODO: See comment in binder.py, ConditionalTypeBinder.assign_type + # It's unclear why the 'declared_type' param is sometimes 'None' + clean_items: list[tuple[Type, Type]] = [] + for type, declared_type in items: + assert declared_type is not None + clean_items.append((type, declared_type)) + + types, declared_types = zip(*clean_items) + self.binder.assign_type( + expr, + make_simplified_union(list(types)), + make_simplified_union(list(declared_types)), + ) + for union, lv in zip(union_types, self.flatten_lvalues(lvalues)): + # Properly store the inferred types. + _1, _2, inferred = self.check_lvalue(lv) + if inferred: + self.set_inferred_type(inferred, lv, union) + else: + self.store_type(lv, union) + self.no_partial_types = False + + def flatten_lvalues(self, lvalues: list[Expression]) -> list[Expression]: + res: list[Expression] = [] + for lv in lvalues: + if isinstance(lv, (TupleExpr, ListExpr)): + res.extend(self.flatten_lvalues(lv.items)) + if isinstance(lv, StarExpr): + # Unwrap StarExpr, since it is unwrapped by other helpers. + lv = lv.expr + res.append(lv) + return res + + def check_multi_assignment_from_tuple( + self, + lvalues: list[Lvalue], + rvalue: Expression, + rvalue_type: TupleType, + context: Context, + undefined_rvalue: bool, + infer_lvalue_type: bool = True, + ) -> None: + rvalue_unpack = find_unpack_in_list(rvalue_type.items) + if self.check_rvalue_count_in_assignment( + lvalues, len(rvalue_type.items), context, rvalue_unpack=rvalue_unpack + ): + star_index = next( + (i for i, lv in enumerate(lvalues) if isinstance(lv, StarExpr)), len(lvalues) + ) + + left_lvs = lvalues[:star_index] + star_lv = cast(StarExpr, lvalues[star_index]) if star_index != len(lvalues) else None + right_lvs = lvalues[star_index + 1 :] + + if not undefined_rvalue: + # Infer rvalue again, now in the correct type context. + lvalue_type = self.lvalue_type_for_inference(lvalues, rvalue_type) + reinferred_rvalue_type = get_proper_type( + self.expr_checker.accept(rvalue, lvalue_type) + ) + + if isinstance(reinferred_rvalue_type, TypeVarLikeType): + reinferred_rvalue_type = get_proper_type(reinferred_rvalue_type.upper_bound) + if isinstance(reinferred_rvalue_type, UnionType): + # If this is an Optional type in non-strict Optional code, unwrap it. + relevant_items = reinferred_rvalue_type.relevant_items() + if len(relevant_items) == 1: + reinferred_rvalue_type = get_proper_type(relevant_items[0]) + if isinstance(reinferred_rvalue_type, UnionType): + self.check_multi_assignment_from_union( + lvalues, rvalue, reinferred_rvalue_type, context, infer_lvalue_type + ) + return + if isinstance(reinferred_rvalue_type, AnyType): + # We can get Any if the current node is + # deferred. Doing more inference in deferred nodes + # is hard, so give up for now. We can also get + # here if reinferring types above changes the + # inferred return type for an overloaded function + # to be ambiguous. + return + assert isinstance(reinferred_rvalue_type, TupleType) + rvalue_type = reinferred_rvalue_type + + left_rv_types, star_rv_types, right_rv_types = self.split_around_star( + rvalue_type.items, star_index, len(lvalues) + ) + + for lv, rv_type in zip(left_lvs, left_rv_types): + self.check_assignment(lv, self.temp_node(rv_type, context), infer_lvalue_type) + if star_lv: + list_expr = ListExpr( + [ + ( + self.temp_node(rv_type, context) + if not isinstance(rv_type, UnpackType) + else StarExpr(self.temp_node(rv_type.type, context)) + ) + for rv_type in star_rv_types + ] + ) + list_expr.set_line(context) + self.check_assignment(star_lv.expr, list_expr, infer_lvalue_type) + for lv, rv_type in zip(right_lvs, right_rv_types): + self.check_assignment(lv, self.temp_node(rv_type, context), infer_lvalue_type) + else: + # Store meaningful Any types for lvalues, errors are already given + # by check_rvalue_count_in_assignment() + if infer_lvalue_type: + for lv in lvalues: + if ( + isinstance(lv, NameExpr) + and isinstance(lv.node, Var) + and lv.node.type is None + ): + lv.node.type = AnyType(TypeOfAny.from_error) + elif isinstance(lv, StarExpr): + if ( + isinstance(lv.expr, NameExpr) + and isinstance(lv.expr.node, Var) + and lv.expr.node.type is None + ): + lv.expr.node.type = self.named_generic_type( + "builtins.list", [AnyType(TypeOfAny.from_error)] + ) + + def lvalue_type_for_inference(self, lvalues: list[Lvalue], rvalue_type: TupleType) -> Type: + star_index = next( + (i for i, lv in enumerate(lvalues) if isinstance(lv, StarExpr)), len(lvalues) + ) + left_lvs = lvalues[:star_index] + star_lv = cast(StarExpr, lvalues[star_index]) if star_index != len(lvalues) else None + right_lvs = lvalues[star_index + 1 :] + left_rv_types, star_rv_types, right_rv_types = self.split_around_star( + rvalue_type.items, star_index, len(lvalues) + ) + + type_parameters: list[Type] = [] + + def append_types_for_inference(lvs: list[Expression], rv_types: list[Type]) -> None: + for lv, rv_type in zip(lvs, rv_types): + sub_lvalue_type, index_expr, inferred = self.check_lvalue(lv) + if sub_lvalue_type and not isinstance(sub_lvalue_type, PartialType): + type_parameters.append(sub_lvalue_type) + else: # index lvalue + # TODO Figure out more precise type context, probably + # based on the type signature of the _set method. + type_parameters.append(rv_type) + + append_types_for_inference(left_lvs, left_rv_types) + + if star_lv: + sub_lvalue_type, index_expr, inferred = self.check_lvalue(star_lv.expr) + if sub_lvalue_type and not isinstance(sub_lvalue_type, PartialType): + type_parameters.extend([sub_lvalue_type] * len(star_rv_types)) + else: # index lvalue + # TODO Figure out more precise type context, probably + # based on the type signature of the _set method. + type_parameters.extend(star_rv_types) + + append_types_for_inference(right_lvs, right_rv_types) + + return TupleType(type_parameters, self.named_type("builtins.tuple")) + + def split_around_star( + self, items: list[T], star_index: int, length: int + ) -> tuple[list[T], list[T], list[T]]: + """Splits a list of items in three to match another list of length 'length' + that contains a starred expression at 'star_index' in the following way: + + star_index = 2, length = 5 (i.e., [a,b,*,c,d]), items = [1,2,3,4,5,6,7] + returns in: ([1,2], [3,4,5], [6,7]) + """ + nr_right_of_star = length - star_index - 1 + right_index = -nr_right_of_star if nr_right_of_star != 0 else len(items) + left = items[:star_index] + star = items[star_index:right_index] + right = items[right_index:] + return left, star, right + + def type_is_iterable(self, type: Type) -> bool: + type = get_proper_type(type) + if isinstance(type, FunctionLike) and type.is_type_obj(): + type = type.fallback + return is_subtype( + type, self.named_generic_type("typing.Iterable", [AnyType(TypeOfAny.special_form)]) + ) + + def check_multi_assignment_from_iterable( + self, + lvalues: list[Lvalue], + rvalue_type: Type, + context: Context, + infer_lvalue_type: bool = True, + ) -> None: + rvalue_type = get_proper_type(rvalue_type) + if self.type_is_iterable(rvalue_type) and isinstance( + rvalue_type, (Instance, CallableType, TypeType, Overloaded) + ): + item_type = self.iterable_item_type(rvalue_type, context) + for lv in lvalues: + if isinstance(lv, StarExpr): + items_type = self.named_generic_type("builtins.list", [item_type]) + self.check_assignment( + lv.expr, self.temp_node(items_type, context), infer_lvalue_type + ) + else: + self.check_assignment( + lv, self.temp_node(item_type, context), infer_lvalue_type + ) + else: + self.msg.type_not_iterable(rvalue_type, context) + + def check_lvalue( + self, lvalue: Lvalue, rvalue: Expression | None = None + ) -> tuple[Type | None, IndexExpr | None, Var | None]: + lvalue_type = None + index_lvalue = None + inferred = None + + if self.is_definition(lvalue) and ( + not isinstance(lvalue, NameExpr) or isinstance(lvalue.node, Var) + ): + if isinstance(lvalue, NameExpr): + assert isinstance(lvalue.node, Var) + inferred = lvalue.node + else: + assert isinstance(lvalue, MemberExpr) + self.expr_checker.accept(lvalue.expr) + inferred = lvalue.def_var + elif isinstance(lvalue, IndexExpr): + index_lvalue = lvalue + elif isinstance(lvalue, MemberExpr): + lvalue_type = self.expr_checker.analyze_ordinary_member_access(lvalue, True, rvalue) + self.store_type(lvalue, lvalue_type) + elif isinstance(lvalue, NameExpr): + lvalue_type = self.expr_checker.analyze_ref_expr(lvalue, lvalue=True) + if ( + self.options.allow_redefinition_new + and isinstance(lvalue.node, Var) + and lvalue.node.is_inferred + ): + inferred = lvalue.node + self.store_type(lvalue, lvalue_type) + elif isinstance(lvalue, (TupleExpr, ListExpr)): + types = [ + self.check_lvalue(sub_expr)[0] or + # This type will be used as a context for further inference of rvalue, + # we put Uninhabited if there is no information available from lvalue. + UninhabitedType() + for sub_expr in lvalue.items + ] + lvalue_type = TupleType(types, self.named_type("builtins.tuple")) + elif isinstance(lvalue, StarExpr): + lvalue_type, _, _ = self.check_lvalue(lvalue.expr) + else: + lvalue_type = self.expr_checker.accept(lvalue) + + return lvalue_type, index_lvalue, inferred + + def is_definition(self, s: Lvalue) -> bool: + if isinstance(s, NameExpr): + if s.is_inferred_def: + return True + # If the node type is not defined, this must the first assignment + # that we process => this is a definition, even though the semantic + # analyzer did not recognize this as such. This can arise in code + # that uses isinstance checks, if type checking of the primary + # definition is skipped due to an always False type check. + node = s.node + if isinstance(node, Var): + return node.type is None + elif isinstance(s, MemberExpr): + return s.is_inferred_def + return False + + def infer_variable_type( + self, name: Var, lvalue: Lvalue, init_type: Type, context: Context + ) -> None: + """Infer the type of initialized variables from initializer type.""" + if isinstance(init_type, DeletedType): + self.msg.deleted_as_rvalue(init_type, context) + elif ( + not is_valid_inferred_type( + init_type, + self.options, + is_lvalue_final=name.is_final, + is_lvalue_member=isinstance(lvalue, MemberExpr), + ) + and not self.no_partial_types + ): + # We cannot use the type of the initialization expression for full type + # inference (it's not specific enough), but we might be able to give + # partial type which will be made more specific later. A partial type + # gets generated in assignment like 'x = []' where item type is not known. + if name.name != "_" and not self.infer_partial_type(name, lvalue, init_type): + self.msg.need_annotation_for_var(name, context, self.options) + self.set_inference_error_fallback_type(name, lvalue, init_type) + elif ( + isinstance(lvalue, MemberExpr) + and self.inferred_attribute_types is not None + and lvalue.def_var + and lvalue.def_var in self.inferred_attribute_types + and not is_same_type(self.inferred_attribute_types[lvalue.def_var], init_type) + ): + # Multiple, inconsistent types inferred for an attribute. + self.msg.need_annotation_for_var(name, context, self.options) + name.type = AnyType(TypeOfAny.from_error) + else: + # Infer type of the target. + + # Make the type more general (strip away function names etc.). + init_type = strip_type(init_type) + + self.set_inferred_type(name, lvalue, init_type) + if self.options.allow_redefinition_new: + self.binder.assign_type(lvalue, init_type, init_type) + + def infer_partial_type(self, name: Var, lvalue: Lvalue, init_type: Type) -> bool: + init_type = get_proper_type(init_type) + if isinstance(init_type, NoneType) and ( + isinstance(lvalue, MemberExpr) or not self.options.allow_redefinition_new + ): + # When using --allow-redefinition-new, None types aren't special + # when inferring simple variable types. + partial_type = PartialType(None, name) + elif isinstance(init_type, Instance): + fullname = init_type.type.fullname + is_ref = isinstance(lvalue, RefExpr) + if ( + is_ref + and ( + fullname == "builtins.list" + or fullname == "builtins.set" + or fullname == "builtins.dict" + or fullname == "collections.OrderedDict" + ) + and all( + isinstance(t, (NoneType, UninhabitedType)) + for t in get_proper_types(init_type.args) + ) + ): + partial_type = PartialType(init_type.type, name) + elif is_ref and fullname == "collections.defaultdict": + arg0 = get_proper_type(init_type.args[0]) + arg1 = get_proper_type(init_type.args[1]) + if isinstance( + arg0, (NoneType, UninhabitedType) + ) and self.is_valid_defaultdict_partial_value_type(arg1): + arg1 = erase_type(arg1) + assert isinstance(arg1, Instance) + partial_type = PartialType(init_type.type, name, arg1) + else: + return False + else: + return False + else: + return False + self.set_inferred_type(name, lvalue, partial_type) + self.partial_types[-1].map[name] = lvalue + return True + + def is_valid_defaultdict_partial_value_type(self, t: ProperType) -> bool: + """Check if t can be used as the basis for a partial defaultdict value type. + + Examples: + + * t is 'int' --> True + * t is 'list[Never]' --> True + * t is 'dict[...]' --> False (only generic types with a single type + argument supported) + """ + if not isinstance(t, Instance): + return False + if len(t.args) == 0: + return True + if len(t.args) == 1: + arg = get_proper_type(t.args[0]) + if self.options.old_type_inference: + # Allow leaked TypeVars for legacy inference logic. + allowed = isinstance(arg, (UninhabitedType, NoneType, TypeVarType)) + else: + allowed = isinstance(arg, (UninhabitedType, NoneType)) + if allowed: + return True + return False + + def set_inferred_type(self, var: Var, lvalue: Lvalue, type: Type) -> None: + """Store inferred variable type. + + Store the type to both the variable node and the expression node that + refers to the variable (lvalue). If var is None, do nothing. + """ + if var and not self.current_node_deferred: + var.type = type + var.is_inferred = True + var.is_ready = True + if var not in self.var_decl_frames: + # Used for the hack to improve optional type inference in conditionals + self.var_decl_frames[var] = {frame.id for frame in self.binder.frames} + if isinstance(lvalue, MemberExpr) and self.inferred_attribute_types is not None: + # Store inferred attribute type so that we can check consistency afterwards. + if lvalue.def_var is not None: + self.inferred_attribute_types[lvalue.def_var] = type + self.store_type(lvalue, type) + p_type = get_proper_type(type) + definition = None + if isinstance(p_type, CallableType): + definition = p_type.definition + elif isinstance(p_type, Overloaded): + # Randomly select first item, if items are different, there will + # be an error during semantic analysis. + definition = p_type.items[0].definition + if definition: + if is_node_static(definition): + var.is_staticmethod = True + elif is_classmethod_node(definition): + var.is_classmethod = True + elif is_property(definition): + var.is_property = True + if isinstance(p_type, Overloaded): + # TODO: in theory we can have a property with a deleter only. + var.is_settable_property = True + assert isinstance(definition, Decorator), definition + var.setter_type = definition.var.setter_type + + def set_inference_error_fallback_type(self, var: Var, lvalue: Lvalue, type: Type) -> None: + """Store best known type for variable if type inference failed. + + If a program ignores error on type inference error, the variable should get some + inferred type so that it can used later on in the program. Example: + + x = [] # type: ignore + x.append(1) # Should be ok! + + We implement this here by giving x a valid type (replacing inferred Never with Any). + """ + fallback = self.inference_error_fallback_type(type) + self.set_inferred_type(var, lvalue, fallback) + + def inference_error_fallback_type(self, type: Type) -> Type: + fallback = type.accept(SetNothingToAny()) + # Type variables may leak from inference, see https://github.com/python/mypy/issues/5738, + # we therefore need to erase them. + return erase_typevars(fallback) + + def simple_rvalue(self, rvalue: Expression) -> bool: + """Returns True for expressions for which inferred type should not depend on context. + + Note that this function can still return False for some expressions where inferred type + does not depend on context. It only exists for performance optimizations. + """ + if isinstance(rvalue, (IntExpr, StrExpr, BytesExpr, FloatExpr, RefExpr)): + return True + if isinstance(rvalue, CallExpr): + if isinstance(rvalue.callee, RefExpr) and isinstance( + rvalue.callee.node, SYMBOL_FUNCBASE_TYPES + ): + typ = rvalue.callee.node.type + if isinstance(typ, CallableType): + return not typ.variables + elif isinstance(typ, Overloaded): + return not any(item.variables for item in typ.items) + return False + + def check_simple_assignment( + self, + lvalue_type: Type | None, + rvalue: Expression, + context: Context, + msg: ErrorMessage = message_registry.INCOMPATIBLE_TYPES_IN_ASSIGNMENT, + lvalue_name: str = "variable", + rvalue_name: str = "expression", + *, + notes: list[str] | None = None, + lvalue: Expression | None = None, + inferred: Var | None = None, + ) -> tuple[Type, Type | None]: + if self.is_stub and isinstance(rvalue, EllipsisExpr): + # '...' is always a valid initializer in a stub. + return AnyType(TypeOfAny.special_form), lvalue_type + else: + always_allow_any = lvalue_type is not None and not isinstance( + get_proper_type(lvalue_type), AnyType + ) + if inferred is None or is_typeddict_type_context(lvalue_type): + type_context = lvalue_type + else: + type_context = None + rvalue_type = self.expr_checker.accept( + rvalue, type_context=type_context, always_allow_any=always_allow_any + ) + if ( + lvalue_type is not None + and type_context is None + and not is_valid_inferred_type(rvalue_type, self.options) + ): + # Inference in an empty type context didn't produce a valid type, so + # try using lvalue type as context instead. + rvalue_type = self.expr_checker.accept( + rvalue, type_context=lvalue_type, always_allow_any=always_allow_any + ) + if not is_valid_inferred_type(rvalue_type, self.options) and inferred is not None: + self.msg.need_annotation_for_var(inferred, context, self.options) + rvalue_type = rvalue_type.accept(SetNothingToAny()) + + if ( + isinstance(lvalue, NameExpr) + and inferred is not None + and inferred.type is not None + and not inferred.is_final + ): + new_inferred = remove_instance_last_known_values(rvalue_type) + if not is_same_type(inferred.type, new_inferred): + # Should we widen the inferred type or the lvalue? Variables defined + # at module level or class bodies can't be widened in functions, or + # in another module. + if not self.refers_to_different_scope(lvalue): + lvalue_type = make_simplified_union([inferred.type, new_inferred]) + if not is_same_type(lvalue_type, inferred.type) and not isinstance( + inferred.type, PartialType + ): + # Widen the type to the union of original and new type. + self.widened_vars.append(inferred.name) + self.set_inferred_type(inferred, lvalue, lvalue_type) + self.binder.put(lvalue, rvalue_type) + # TODO: A bit hacky, maybe add a binder method that does put and + # updates declaration? + lit = literal_hash(lvalue) + if lit is not None: + self.binder.declarations[lit] = lvalue_type + if ( + isinstance(get_proper_type(lvalue_type), UnionType) + # Skip literal types, as they have special logic (for better errors). + and not is_literal_type_like(rvalue_type) + and not self.simple_rvalue(rvalue) + ): + # Try re-inferring r.h.s. in empty context, and use that if it + # results in a narrower type. We don't do this always because this + # may cause some perf impact, plus we want to partially preserve + # the old behavior. This helps with various practical examples, see + # e.g. testOptionalTypeNarrowedByGenericCall. + with self.msg.filter_errors() as local_errors, self.local_type_map as type_map: + alt_rvalue_type = self.expr_checker.accept( + rvalue, None, always_allow_any=always_allow_any + ) + if ( + not local_errors.has_new_errors() + # Skip Any type, since it is special cased in binder. + and not isinstance(get_proper_type(alt_rvalue_type), AnyType) + and is_valid_inferred_type(alt_rvalue_type, self.options) + and is_proper_subtype(alt_rvalue_type, rvalue_type) + ): + rvalue_type = alt_rvalue_type + self.store_types(type_map) + if isinstance(rvalue_type, DeletedType): + self.msg.deleted_as_rvalue(rvalue_type, context) + if isinstance(lvalue_type, DeletedType): + self.msg.deleted_as_lvalue(lvalue_type, context) + elif lvalue_type: + self.check_subtype( + # Preserve original aliases for error messages when possible. + rvalue_type, + lvalue_type, + context, + msg, + f"{rvalue_name} has type", + f"{lvalue_name} has type", + notes=notes, + ) + return rvalue_type, lvalue_type + + def refers_to_different_scope(self, name: NameExpr) -> bool: + if name.kind == LDEF: + # TODO: Consider reference to outer function as a different scope? + return False + elif self.scope.top_level_function() is not None: + # A non-local reference from within a function must refer to a different scope + return True + elif name.kind == GDEF and name.fullname.rpartition(".")[0] != self.tree.fullname: + # Reference to global definition from another module + return True + return False + + def check_member_assignment( + self, + lvalue: MemberExpr, + instance_type: Type, + set_lvalue_type: Type, + rvalue: Expression, + context: Context, + ) -> tuple[Type, Type, bool]: + """Type member assignment. + + This defers to check_simple_assignment, unless the member expression + is a descriptor, in which case this checks descriptor semantics as well. + + Return the inferred rvalue_type, inferred lvalue_type, and whether to use the binder + for this assignment. + """ + instance_type = get_proper_type(instance_type) + # Descriptors don't participate in class-attribute access + if (isinstance(instance_type, FunctionLike) and instance_type.is_type_obj()) or isinstance( + instance_type, TypeType + ): + rvalue_type, _ = self.check_simple_assignment(set_lvalue_type, rvalue, context) + return rvalue_type, set_lvalue_type, True + + with self.msg.filter_errors(filter_deprecated=True): + get_lvalue_type = self.expr_checker.analyze_ordinary_member_access( + lvalue, is_lvalue=False + ) + + # Special case: if the rvalue_type is a subtype of '__get__' type, and + # '__get__' type is narrower than '__set__', then we invoke the binder to narrow type + # by this assignment. Technically, this is not safe, but in practice this is + # what a user expects. + rvalue_type, _ = self.check_simple_assignment(set_lvalue_type, rvalue, context) + rvalue_type = rvalue_type if is_subtype(rvalue_type, get_lvalue_type) else get_lvalue_type + return rvalue_type, set_lvalue_type, is_subtype(get_lvalue_type, set_lvalue_type) + + def check_indexed_assignment( + self, lvalue: IndexExpr, rvalue: Expression, context: Context + ) -> None: + """Type check indexed assignment base[index] = rvalue. + + The lvalue argument is the base[index] expression. + """ + self.try_infer_partial_type_from_indexed_assignment(lvalue, rvalue) + basetype = get_proper_type(self.expr_checker.accept(lvalue.base)) + method_type = self.expr_checker.analyze_external_member_access( + "__setitem__", basetype, lvalue + ) + + lvalue.method_type = method_type + res_type, _ = self.expr_checker.check_method_call( + "__setitem__", + basetype, + method_type, + [lvalue.index, rvalue], + [nodes.ARG_POS, nodes.ARG_POS], + context, + ) + res_type = get_proper_type(res_type) + if isinstance(res_type, UninhabitedType) and not res_type.ambiguous: + self.binder.unreachable() + + def replace_partial_type( + self, var: Var, new_type: Type, partial_types: dict[Var, Context] + ) -> None: + """Replace the partial type of var with a non-partial type.""" + var.type = new_type + # Updating a partial type should invalidate expression caches. + self.binder.version += 1 + del partial_types[var] + if self.options.allow_redefinition_new: + # When using --allow-redefinition-new, binder tracks all types of + # simple variables. + n = NameExpr(var.name) + n.node = var + self.binder.assign_type(n, new_type, new_type) + + def try_infer_partial_type_from_indexed_assignment( + self, lvalue: IndexExpr, rvalue: Expression + ) -> None: + # TODO: Should we share some of this with try_infer_partial_type? + var = None + if isinstance(lvalue.base, RefExpr) and isinstance(lvalue.base.node, Var): + var = lvalue.base.node + elif isinstance(lvalue.base, MemberExpr): + var = self.expr_checker.get_partial_self_var(lvalue.base) + if isinstance(var, Var): + if isinstance(var.type, PartialType): + type_type = var.type.type + if type_type is None: + return # The partial type is None. + partial_types = self.find_partial_types(var) + if partial_types is None: + return + typename = type_type.fullname + if ( + typename == "builtins.dict" + or typename == "collections.OrderedDict" + or typename == "collections.defaultdict" + ): + # TODO: Don't infer things twice. + key_type = self.expr_checker.accept(lvalue.index) + value_type = self.expr_checker.accept(rvalue) + if ( + is_valid_inferred_type(key_type, self.options) + and is_valid_inferred_type(value_type, self.options) + and not self.current_node_deferred + and not ( + typename == "collections.defaultdict" + and var.type.value_type is not None + and not is_equivalent(value_type, var.type.value_type) + ) + ): + new_type = self.named_generic_type(typename, [key_type, value_type]) + self.replace_partial_type(var, new_type, partial_types) + + def type_requires_usage(self, typ: Type) -> tuple[str, ErrorCode] | None: + """Some types require usage in all cases. The classic example is + an unused coroutine. + + In the case that it does require usage, returns a note to attach + to the error message. + """ + proper_type = get_proper_type(typ) + if isinstance(proper_type, Instance): + # We use different error codes for generic awaitable vs coroutine. + # Coroutines are on by default, whereas generic awaitables are not. + if proper_type.type.fullname == "typing.Coroutine": + return ("Are you missing an await?", UNUSED_COROUTINE) + if proper_type.type.get("__await__") is not None: + return ("Are you missing an await?", UNUSED_AWAITABLE) + return None + + def visit_expression_stmt(self, s: ExpressionStmt) -> None: + expr_type = self.expr_checker.accept(s.expr, allow_none_return=True, always_allow_any=True) + error_note_and_code = self.type_requires_usage(expr_type) + if error_note_and_code: + error_note, code = error_note_and_code + self.fail( + message_registry.TYPE_MUST_BE_USED.format(format_type(expr_type, self.options)), + s, + code=code, + ) + self.note(error_note, s, code=code) + + def visit_return_stmt(self, s: ReturnStmt) -> None: + """Type check a return statement.""" + self.check_return_stmt(s) + self.binder.unreachable() + + def infer_context_dependent( + self, expr: Expression, type_ctx: Type, allow_none_func_call: bool + ) -> ProperType: + """Infer type of an expression with fallback to empty type context.""" + with self.msg.filter_errors( + filter_errors=True, filter_deprecated=True, save_filtered_errors=True + ) as msg: + with self.local_type_map as type_map: + typ = get_proper_type( + self.expr_checker.accept( + expr, type_ctx, allow_none_return=allow_none_func_call + ) + ) + if not msg.has_new_errors(): + self.store_types(type_map) + return typ + + # If there are errors with the original type context, try re-inferring in empty context. + original_messages = msg.filtered_errors() + original_type_map = type_map + with self.msg.filter_errors( + filter_errors=True, filter_deprecated=True, save_filtered_errors=True + ) as msg: + with self.local_type_map as type_map: + alt_typ = get_proper_type( + self.expr_checker.accept(expr, None, allow_none_return=allow_none_func_call) + ) + if not msg.has_new_errors() and is_subtype(alt_typ, type_ctx): + self.store_types(type_map) + return alt_typ + + # If empty fallback didn't work, use results from the original type context. + self.msg.add_errors(original_messages) + self.store_types(original_type_map) + return typ + + def check_return_stmt(self, s: ReturnStmt) -> None: + defn = self.scope.current_function() + if defn is not None: + if defn.is_generator: + return_type = self.get_generator_return_type( + self.return_types[-1], defn.is_coroutine + ) + elif defn.is_coroutine: + return_type = self.get_coroutine_return_type(self.return_types[-1]) + else: + return_type = self.return_types[-1] + return_type = get_proper_type(return_type) + + is_lambda = isinstance(defn, LambdaExpr) + if isinstance(return_type, UninhabitedType): + # Avoid extra error messages for failed inference in lambdas + if not is_lambda and not return_type.ambiguous: + self.fail(message_registry.NO_RETURN_EXPECTED, s) + return + + if s.expr: + declared_none_return = isinstance(return_type, NoneType) + declared_any_return = isinstance(return_type, AnyType) + + # This controls whether or not we allow a function call that + # returns None as the expression of this return statement. + # E.g. `return f()` for some `f` that returns None. We allow + # this only if we're in a lambda or in a function that returns + # `None` or `Any`. + allow_none_func_call = is_lambda or declared_none_return or declared_any_return + + # Return with a value. + if ( + isinstance(s.expr, (CallExpr, ListExpr, TupleExpr, DictExpr, SetExpr, OpExpr)) + or isinstance(s.expr, AwaitExpr) + and isinstance(s.expr.expr, CallExpr) + ): + # For expressions that (strongly) depend on type context (i.e. those that + # are handled like a function call), we allow fallback to empty type context + # in case of errors, this improves user experience in some cases, + # see e.g. testReturnFallbackInference. + typ = self.infer_context_dependent(s.expr, return_type, allow_none_func_call) + else: + typ = get_proper_type( + self.expr_checker.accept( + s.expr, return_type, allow_none_return=allow_none_func_call + ) + ) + # Treat NotImplemented as having type Any, consistent with its + # definition in typeshed prior to python/typeshed#4222. + if isinstance(typ, Instance) and typ.type.fullname in NOT_IMPLEMENTED_TYPE_NAMES: + typ = AnyType(TypeOfAny.special_form) + + if defn.is_async_generator: + self.fail(message_registry.RETURN_IN_ASYNC_GENERATOR, s) + return + # Returning a value of type Any is always fine. + if isinstance(typ, AnyType): + # (Unless you asked to be warned in that case, and the + # function is not declared to return Any) + if ( + self.options.warn_return_any + and not self.current_node_deferred + and not is_proper_subtype(AnyType(TypeOfAny.special_form), return_type) + and not ( + defn.name in BINARY_MAGIC_METHODS + and is_literal_not_implemented(s.expr) + ) + and not ( + isinstance(return_type, Instance) + and return_type.type.fullname == "builtins.object" + ) + and not is_lambda + ): + self.msg.incorrectly_returning_any(return_type, s) + return + + # Disallow return expressions in functions declared to return + # None, subject to two exceptions below. + if declared_none_return: + # Lambdas are allowed to have None returns. + # Functions returning a value of type None are allowed to have a None return. + if is_lambda or isinstance(typ, NoneType): + return + self.fail(message_registry.NO_RETURN_VALUE_EXPECTED, s) + else: + self.check_subtype( + subtype_label="got", + subtype=typ, + supertype_label="expected", + supertype=return_type, + context=s.expr, + outer_context=s, + msg=message_registry.INCOMPATIBLE_RETURN_VALUE_TYPE, + ) + else: + # Empty returns are valid in Generators with Any typed returns, but not in + # coroutines. + if ( + defn.is_generator + and not defn.is_coroutine + and isinstance(return_type, AnyType) + ): + return + + if isinstance(return_type, (NoneType, AnyType)): + return + + if self.in_checked_function(): + self.fail(message_registry.RETURN_VALUE_EXPECTED, s) + + def visit_if_stmt(self, s: IfStmt) -> None: + """Type check an if statement.""" + # This frame records the knowledge from previous if/elif clauses not being taken. + # Fall-through to the original frame is handled explicitly in each block. + with self.binder.frame_context(can_skip=False, conditional_frame=True, fall_through=0): + for e, b in zip(s.expr, s.body): + t = get_proper_type(self.expr_checker.accept(e)) + + if isinstance(t, DeletedType): + self.msg.deleted_as_rvalue(t, s) + + if_map, else_map = self.find_isinstance_check(e) + + # XXX Issue a warning if condition is always False? + with self.binder.frame_context(can_skip=True, fall_through=2): + self.push_type_map(if_map, from_assignment=False) + self.accept(b) + + # XXX Issue a warning if condition is always True? + self.push_type_map(else_map, from_assignment=False) + + with self.binder.frame_context(can_skip=False, fall_through=2): + if s.else_body: + self.accept(s.else_body) + + def visit_while_stmt(self, s: WhileStmt) -> None: + """Type check a while statement.""" + if_stmt = IfStmt([s.expr], [s.body], None) + if_stmt.set_line(s) + self.accept_loop(if_stmt, s.else_body, exit_condition=s.expr) + + def visit_operator_assignment_stmt(self, s: OperatorAssignmentStmt) -> None: + """Type check an operator assignment statement, e.g. x += 1.""" + self.try_infer_partial_generic_type_from_assignment(s.lvalue, s.rvalue, s.op) + if isinstance(s.lvalue, MemberExpr): + # Special case, some additional errors may be given for + # assignments to read-only or final attributes. + lvalue_type = self.expr_checker.visit_member_expr(s.lvalue, True) + else: + lvalue_type = self.expr_checker.accept(s.lvalue) + inplace, method = infer_operator_assignment_method(lvalue_type, s.op) + if inplace: + # There is __ifoo__, treat as x = x.__ifoo__(y) + rvalue_type, _ = self.expr_checker.check_op(method, lvalue_type, s.rvalue, s) + if not is_subtype(rvalue_type, lvalue_type): + self.msg.incompatible_operator_assignment(s.op, s) + else: + # There is no __ifoo__, treat as x = x y + expr = OpExpr(s.op, s.lvalue, s.rvalue) + expr.set_line(s) + self.check_assignment( + lvalue=s.lvalue, rvalue=expr, infer_lvalue_type=True, new_syntax=False + ) + self.check_final(s) + + def visit_assert_stmt(self, s: AssertStmt) -> None: + self.expr_checker.accept(s.expr) + + if isinstance(s.expr, TupleExpr) and len(s.expr.items) > 0: + self.fail(message_registry.MALFORMED_ASSERT, s) + + # If this is asserting some isinstance check, bind that type in the following code + true_map, else_map = self.find_isinstance_check(s.expr) + if s.msg is not None: + self.expr_checker.analyze_cond_branch( + else_map, s.msg, None, suppress_unreachable_errors=False + ) + self.push_type_map(true_map) + + def visit_raise_stmt(self, s: RaiseStmt) -> None: + """Type check a raise statement.""" + if s.expr: + self.type_check_raise(s.expr, s) + if s.from_expr: + self.type_check_raise(s.from_expr, s, optional=True) + self.binder.unreachable() + + def type_check_raise(self, e: Expression, s: RaiseStmt, optional: bool = False) -> None: + typ = get_proper_type(self.expr_checker.accept(e)) + if isinstance(typ, DeletedType): + self.msg.deleted_as_rvalue(typ, e) + return + + exc_type = self.named_type("builtins.BaseException") + expected_type_items = [exc_type, TypeType(exc_type)] + if optional: + # This is used for `x` part in a case like `raise e from x`, + # where we allow `raise e from None`. + expected_type_items.append(NoneType()) + + self.check_subtype( + typ, UnionType.make_union(expected_type_items), s, message_registry.INVALID_EXCEPTION + ) + + if isinstance(typ, FunctionLike): + # https://github.com/python/mypy/issues/11089 + self.expr_checker.check_call(typ, [], [], e) + + if (isinstance(typ, Instance) and typ.type.fullname in NOT_IMPLEMENTED_TYPE_NAMES) or ( + isinstance(e, CallExpr) + and isinstance(e.callee, RefExpr) + and e.callee.fullname == "builtins.NotImplemented" + ): + self.fail( + message_registry.INVALID_EXCEPTION.with_additional_msg( + '; did you mean "NotImplementedError"?' + ), + s, + ) + + def visit_try_stmt(self, s: TryStmt) -> None: + """Type check a try statement.""" + + iter_errors = None + + # Our enclosing frame will get the result if the try/except falls through. + # This one gets all possible states after the try block exited abnormally + # (by exception, return, break, etc.) + with self.binder.frame_context(can_skip=False, fall_through=0): + # Not only might the body of the try statement exit + # abnormally, but so might an exception handler or else + # clause. The finally clause runs in *all* cases, so we + # need an outer try frame to catch all intermediate states + # in case an exception is raised during an except or else + # clause. As an optimization, only create the outer try + # frame when there actually is a finally clause. + self.visit_try_without_finally(s, try_frame=bool(s.finally_body)) + if s.finally_body: + # First we check finally_body is type safe on all abnormal exit paths + iter_errors = IterationDependentErrors() + with IterationErrorWatcher(self.msg.errors, iter_errors): + self.accept(s.finally_body) + + if s.finally_body: + # Then we try again for the more restricted set of options + # that can fall through. (Why do we need to check the + # finally clause twice? Depending on whether the finally + # clause was reached by the try clause falling off the end + # or exiting abnormally, after completing the finally clause + # either flow will continue to after the entire try statement + # or the exception/return/etc. will be processed and control + # flow will escape. We need to check that the finally clause + # type checks in both contexts, but only the resulting types + # from the latter context affect the type state in the code + # that follows the try statement.) + assert iter_errors is not None + if not self.binder.is_unreachable(): + with IterationErrorWatcher(self.msg.errors, iter_errors): + self.accept(s.finally_body) + self.msg.iteration_dependent_errors(iter_errors) + + def visit_try_without_finally(self, s: TryStmt, try_frame: bool) -> None: + """Type check a try statement, ignoring the finally block. + + On entry, the top frame should receive all flow that exits the + try block abnormally (i.e., such that the else block does not + execute), and its parent should receive all flow that exits + the try block normally. + """ + # This frame will run the else block if the try fell through. + # In that case, control flow continues to the parent of what + # was the top frame on entry. + with self.binder.frame_context(can_skip=False, fall_through=2, try_frame=try_frame): + # This frame receives exit via exception, and runs exception handlers + with self.binder.frame_context(can_skip=False, conditional_frame=True, fall_through=2): + # Finally, the body of the try statement + with self.binder.frame_context(can_skip=False, fall_through=2, try_frame=True): + self.accept(s.body) + for i in range(len(s.handlers)): + with self.binder.frame_context(can_skip=True, fall_through=4): + typ = s.types[i] + if typ: + t = self.check_except_handler_test(typ, s.is_star) + var = s.vars[i] + if var: + # To support local variables, we make this a definition line, + # causing assignment to set the variable's type. + var.is_inferred_def = True + self.check_assignment(var, self.temp_node(t, var)) + self.accept(s.handlers[i]) + var = s.vars[i] + if var: + # Exception variables are deleted. + # Unfortunately, this doesn't let us detect usage before the + # try/except block. + source = var.name + if isinstance(var.node, Var): + new_type = DeletedType(source=source) + var.node.type = new_type + if self.options.allow_redefinition_new: + # TODO: Should we use put() here? + self.binder.assign_type(var, new_type, new_type) + if not self.options.allow_redefinition_new: + self.binder.cleanse(var) + if s.else_body: + self.accept(s.else_body) + + def check_except_handler_test(self, n: Expression, is_star: bool) -> Type: + """Type check an exception handler test clause.""" + typ = self.expr_checker.accept(n) + + all_types: list[Type] = [] + test_types = self.get_types_from_except_handler(typ, n) + + for ttype in get_proper_types(test_types): + if isinstance(ttype, AnyType): + all_types.append(ttype) + continue + + if isinstance(ttype, FunctionLike): + item = ttype.items[0] + if not item.is_type_obj(): + self.fail(message_registry.INVALID_EXCEPTION_TYPE, n) + return self.default_exception_type(is_star) + exc_type = erase_typevars(item.ret_type) + elif isinstance(ttype, TypeType): + exc_type = ttype.item + else: + self.fail(message_registry.INVALID_EXCEPTION_TYPE, n) + return self.default_exception_type(is_star) + + if not is_subtype(exc_type, self.named_type("builtins.BaseException")): + self.fail(message_registry.INVALID_EXCEPTION_TYPE, n) + return self.default_exception_type(is_star) + + all_types.append(exc_type) + + if is_star: + new_all_types: list[Type] = [] + for typ in all_types: + if is_proper_subtype(typ, self.named_type("builtins.BaseExceptionGroup")): + self.fail(message_registry.INVALID_EXCEPTION_GROUP, n) + new_all_types.append(AnyType(TypeOfAny.from_error)) + else: + new_all_types.append(typ) + return self.wrap_exception_group(new_all_types) + return make_simplified_union(all_types) + + def default_exception_type(self, is_star: bool) -> Type: + """Exception type to return in case of a previous type error.""" + any_type = AnyType(TypeOfAny.from_error) + if is_star: + return self.named_generic_type("builtins.ExceptionGroup", [any_type]) + return any_type + + def wrap_exception_group(self, types: Sequence[Type]) -> Type: + """Transform except* variable type into an appropriate exception group.""" + arg = make_simplified_union(types) + if is_subtype(arg, self.named_type("builtins.Exception")): + base = "builtins.ExceptionGroup" + else: + base = "builtins.BaseExceptionGroup" + return self.named_generic_type(base, [arg]) + + def get_types_from_except_handler(self, typ: Type, n: Expression) -> list[Type]: + """Helper for check_except_handler_test to retrieve handler types.""" + typ = get_proper_type(typ) + if isinstance(typ, TupleType): + return typ.items + elif isinstance(typ, UnionType): + return [ + union_typ + for item in typ.relevant_items() + for union_typ in self.get_types_from_except_handler(item, n) + ] + elif is_named_instance(typ, "builtins.tuple"): + # variadic tuple + return [typ.args[0]] + else: + return [typ] + + def visit_for_stmt(self, s: ForStmt) -> None: + """Type check a for statement.""" + if s.is_async: + iterator_type, item_type = self.analyze_async_iterable_item_type(s.expr) + else: + iterator_type, item_type = self.analyze_iterable_item_type(s.expr) + s.inferred_item_type = item_type + s.inferred_iterator_type = iterator_type + + self.accept_loop( + s.body, + s.else_body, + on_enter_body=lambda: self.analyze_index_variables( + s.index, item_type, s.index_type is None, s + ), + ) + + def analyze_async_iterable_item_type(self, expr: Expression) -> tuple[Type, Type]: + """Analyse async iterable expression and return iterator and iterator item types.""" + echk = self.expr_checker + iterable = echk.accept(expr) + iterator = echk.check_method_call_by_name("__aiter__", iterable, [], [], expr)[0] + awaitable = echk.check_method_call_by_name("__anext__", iterator, [], [], expr)[0] + item_type = echk.check_awaitable_expr( + awaitable, expr, message_registry.INCOMPATIBLE_TYPES_IN_ASYNC_FOR + ) + return iterator, item_type + + def analyze_iterable_item_type(self, expr: Expression) -> tuple[Type, Type]: + """Analyse iterable expression and return iterator and iterator item types.""" + iterator, iterable = self.analyze_iterable_item_type_without_expression( + self.expr_checker.accept(expr), context=expr + ) + int_type = self.analyze_range_native_int_type(expr) + if int_type: + return iterator, int_type + return iterator, iterable + + def analyze_iterable_item_type_without_expression( + self, type: Type, context: Context + ) -> tuple[Type, Type]: + """Analyse iterable type and return iterator and iterator item types.""" + echk = self.expr_checker + iterable: Type + iterable = get_proper_type(type) + iterator = echk.check_method_call_by_name("__iter__", iterable, [], [], context)[0] + + if ( + isinstance(iterable, TupleType) + and iterable.partial_fallback.type.fullname == "builtins.tuple" + ): + return iterator, tuple_fallback(iterable).args[0] + else: + # Non-tuple iterable. + iterable = echk.check_method_call_by_name("__next__", iterator, [], [], context)[0] + return iterator, iterable + + def analyze_range_native_int_type(self, expr: Expression) -> Type | None: + """Try to infer native int item type from arguments to range(...). + + For example, return i64 if the expression is "range(0, i64(n))". + + Return None if unsuccessful. + """ + if ( + isinstance(expr, CallExpr) + and isinstance(expr.callee, RefExpr) + and expr.callee.fullname == "builtins.range" + and 1 <= len(expr.args) <= 3 + and all(kind == ARG_POS for kind in expr.arg_kinds) + ): + native_int: Type | None = None + ok = True + for arg in expr.args: + argt = get_proper_type(self.lookup_type(arg)) + if isinstance(argt, Instance) and argt.type.fullname in MYPYC_NATIVE_INT_NAMES: + if native_int is None: + native_int = argt + elif argt != native_int: + ok = False + if ok and native_int: + return native_int + return None + + def analyze_container_item_type(self, typ: Type) -> Type | None: + """Check if a type is a nominal container of a union of such. + + Return the corresponding container item type. + """ + typ = get_proper_type(typ) + if isinstance(typ, UnionType): + types: list[Type] = [] + for item in typ.items: + c_type = self.analyze_container_item_type(item) + if c_type: + types.append(c_type) + return UnionType.make_union(types) + if isinstance(typ, Instance) and typ.type.has_base("typing.Container"): + supertype = self.named_type("typing.Container").type + super_instance = map_instance_to_supertype(typ, supertype) + assert len(super_instance.args) == 1 + return super_instance.args[0] + if isinstance(typ, TupleType): + return self.analyze_container_item_type(tuple_fallback(typ)) + return None + + def analyze_index_variables( + self, index: Expression, item_type: Type, infer_lvalue_type: bool, context: Context + ) -> None: + """Type check or infer for loop or list comprehension index vars.""" + self.check_assignment(index, self.temp_node(item_type, context), infer_lvalue_type) + + def visit_del_stmt(self, s: DelStmt) -> None: + if isinstance(s.expr, IndexExpr): + e = s.expr + m = MemberExpr(e.base, "__delitem__") + m.line = s.line + m.column = s.column + c = CallExpr(m, [e.index], [nodes.ARG_POS], [None]) + c.line = s.line + c.column = s.column + self.expr_checker.accept(c, allow_none_return=True) + else: + s.expr.accept(self.expr_checker) + for elt in flatten(s.expr): + if isinstance(elt, NameExpr): + self.binder.assign_type( + elt, DeletedType(source=elt.name), get_declaration(elt) + ) + + def visit_decorator(self, e: Decorator) -> None: + for d in e.decorators: + if isinstance(d, RefExpr): + if d.fullname == "typing.no_type_check": + e.var.type = AnyType(TypeOfAny.special_form) + e.var.is_ready = True + return + self.visit_decorator_inner(e) + + def visit_decorator_inner( + self, e: Decorator, allow_empty: bool = False, skip_first_item: bool = False + ) -> None: + if self.recurse_into_functions: + with self.tscope.function_scope(e.func): + self.check_func_item(e.func, name=e.func.name, allow_empty=allow_empty) + + # Process decorators from the inside out to determine decorated signature, which + # may be different from the declared signature. + sig: Type = self.function_type(e.func) + non_trivial_decorator = False + # For settable properties skip the first decorator (that is @foo.setter). + for d in reversed(e.decorators[1:] if skip_first_item else e.decorators): + if refers_to_fullname(d, "abc.abstractmethod"): + # This is a hack to avoid spurious errors because of incomplete type + # of @abstractmethod in the test fixtures. + continue + if refers_to_fullname(d, OVERLOAD_NAMES): + if not allow_empty: + self.fail(message_registry.MULTIPLE_OVERLOADS_REQUIRED, e) + continue + non_trivial_decorator = True + dec = self.expr_checker.accept(d) + temp = self.temp_node(sig, context=d) + fullname = None + if isinstance(d, RefExpr): + fullname = d.fullname or None + # if this is an expression like @b.a where b is an object, get the type of b, + # so we can pass it the method hook in the plugins + object_type: Type | None = None + if fullname is None and isinstance(d, MemberExpr) and self.has_type(d.expr): + object_type = self.lookup_type(d.expr) + fullname = self.expr_checker.method_fullname(object_type, d.name) + self.check_for_untyped_decorator(e.func, dec, d) + sig, t2 = self.expr_checker.check_call( + dec, [temp], [nodes.ARG_POS], e, callable_name=fullname, object_type=object_type + ) + if non_trivial_decorator: + self.check_untyped_after_decorator(sig, e.func) + self.require_correct_self_argument(sig, e.func) + sig = set_callable_name(sig, e.func) + if isinstance(sig, CallableType): + sig.definition = e + e.var.type = sig + e.var.is_ready = True + if e.func.is_property: + if isinstance(sig, CallableType): + if len([k for k in sig.arg_kinds if k.is_required()]) > 1: + self.msg.fail("Too many arguments for property", e) + self.check_incompatible_property_override(e) + # For overloaded functions/properties we already checked override for overload as a whole. + if allow_empty or skip_first_item: + return + if e.func.info and not e.is_overload: + found_method_base_classes = self.check_method_override(e) + if ( + e.func.is_explicit_override + and not found_method_base_classes + and found_method_base_classes is not None + # If the class has Any fallback, we can't be certain that a method + # is really missing - it might come from unfollowed import. + and not e.func.info.fallback_to_any + ): + self.msg.no_overridable_method(e.func.name, e.func) + self.check_explicit_override_decorator(e.func, found_method_base_classes) + + if e.func.info and e.func.name in ("__init__", "__new__"): + if e.type and not isinstance(get_proper_type(e.type), (FunctionLike, AnyType)): + self.fail(message_registry.BAD_CONSTRUCTOR_TYPE, e) + + if e.func.original_def and isinstance(sig, FunctionLike): + # Function definition overrides function definition. + self.check_func_def_override(e.func, sig) + + def check_for_untyped_decorator( + self, func: FuncDef, dec_type: Type, dec_expr: Expression + ) -> None: + if ( + self.options.disallow_untyped_decorators + and is_typed_callable(func.type) + and is_untyped_decorator(dec_type) + and not self.current_node_deferred + ): + self.msg.typed_function_untyped_decorator(func.name, dec_expr) + + def check_incompatible_property_override(self, e: Decorator) -> None: + if not e.var.is_settable_property and e.func.info: + name = e.func.name + for base in e.func.info.mro[1:]: + base_attr = base.names.get(name) + if not base_attr: + continue + if ( + isinstance(base_attr.node, OverloadedFuncDef) + and base_attr.node.is_property + and cast(Decorator, base_attr.node.items[0]).var.is_settable_property + ): + self.fail(message_registry.READ_ONLY_PROPERTY_OVERRIDES_READ_WRITE, e) + + def visit_with_stmt(self, s: WithStmt) -> None: + exceptions_maybe_suppressed = False + for expr, target in zip(s.expr, s.target): + if s.is_async: + exit_ret_type = self.check_async_with_item(expr, target, s.unanalyzed_type is None) + else: + exit_ret_type = self.check_with_item(expr, target, s.unanalyzed_type is None) + + # Based on the return type, determine if this context manager 'swallows' + # exceptions or not. We determine this using a heuristic based on the + # return type of the __exit__ method -- see the discussion in + # https://github.com/python/mypy/issues/7214 and the section about context managers + # in https://github.com/python/typeshed/blob/main/CONTRIBUTING.md#conventions + # for more details. + + exit_ret_type = get_proper_type(exit_ret_type) + if is_literal_type(exit_ret_type, "builtins.bool", False): + continue + + if is_literal_type(exit_ret_type, "builtins.bool", True) or ( + isinstance(exit_ret_type, Instance) + and exit_ret_type.type.fullname == "builtins.bool" + and state.strict_optional + ): + # Note: if strict-optional is disabled, this bool instance + # could actually be an Optional[bool]. + exceptions_maybe_suppressed = True + + if exceptions_maybe_suppressed: + # Treat this 'with' block in the same way we'd treat a 'try: BODY; except: pass' + # block. This means control flow can continue after the 'with' even if the 'with' + # block immediately returns. + with self.binder.frame_context(can_skip=True, try_frame=True): + self.accept(s.body) + else: + self.accept(s.body) + + def check_untyped_after_decorator(self, typ: Type, func: FuncDef) -> None: + if not self.options.disallow_any_decorated or self.is_stub or self.current_node_deferred: + return + + if mypy.checkexpr.has_any_type(typ): + self.msg.untyped_decorated_function(typ, func) + + def check_async_with_item( + self, expr: Expression, target: Expression | None, infer_lvalue_type: bool + ) -> Type: + echk = self.expr_checker + ctx = echk.accept(expr) + obj = echk.check_method_call_by_name("__aenter__", ctx, [], [], expr)[0] + obj = echk.check_awaitable_expr( + obj, expr, message_registry.INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AENTER + ) + if target: + self.check_assignment(target, self.temp_node(obj, expr), infer_lvalue_type) + arg = self.temp_node(AnyType(TypeOfAny.special_form), expr) + res, _ = echk.check_method_call_by_name( + "__aexit__", ctx, [arg] * 3, [nodes.ARG_POS] * 3, expr + ) + return echk.check_awaitable_expr( + res, expr, message_registry.INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AEXIT + ) + + def check_with_item( + self, expr: Expression, target: Expression | None, infer_lvalue_type: bool + ) -> Type: + echk = self.expr_checker + ctx = echk.accept(expr) + obj = echk.check_method_call_by_name("__enter__", ctx, [], [], expr)[0] + if target: + self.check_assignment(target, self.temp_node(obj, expr), infer_lvalue_type) + arg = self.temp_node(AnyType(TypeOfAny.special_form), expr) + res, _ = echk.check_method_call_by_name( + "__exit__", ctx, [arg] * 3, [nodes.ARG_POS] * 3, expr + ) + return res + + def visit_break_stmt(self, s: BreakStmt) -> None: + self.binder.handle_break() + + def visit_continue_stmt(self, s: ContinueStmt) -> None: + self.binder.handle_continue() + return + + def visit_match_stmt(self, s: MatchStmt) -> None: + # In sync with similar actions elsewhere, narrow the target if + # we are matching an AssignmentExpr + unwrapped_subject = collapse_walrus(s.subject) + named_subject = self._make_named_statement_for_match(s, unwrapped_subject) + with self.binder.frame_context(can_skip=False, fall_through=0): + subject_type = get_proper_type(self.expr_checker.accept(s.subject)) + + if isinstance(subject_type, DeletedType): + self.msg.deleted_as_rvalue(subject_type, s) + + # We infer types of patterns twice. The first pass is used + # to infer the types of capture variables. The type of a + # capture variable may depend on multiple patterns (it + # will be a union of all capture types). This pass ignores + # guard expressions. + pattern_types = [self.pattern_checker.accept(p, subject_type) for p in s.patterns] + type_maps: list[TypeMap] = [t.captures for t in pattern_types] + inferred_types = self.infer_variable_types_from_type_maps(type_maps) + + # The second pass narrows down the types and type checks bodies. + unmatched_types: TypeMap = None + for p, g, b in zip(s.patterns, s.guards, s.bodies): + current_subject_type = self.expr_checker.narrow_type_from_binder( + named_subject, subject_type + ) + pattern_type = self.pattern_checker.accept(p, current_subject_type) + with self.binder.frame_context(can_skip=True, fall_through=2): + if b.is_unreachable or isinstance( + get_proper_type(pattern_type.type), UninhabitedType + ): + self.push_type_map(None, from_assignment=False) + else_map: TypeMap = {} + else: + pattern_map, else_map = conditional_types_to_typemaps( + named_subject, pattern_type.type, pattern_type.rest_type + ) + # Maybe the subject type can be inferred from constraints on + # its attribute/item? + if pattern_map and named_subject in pattern_map: + pattern_map[unwrapped_subject] = pattern_map[named_subject] + if else_map and named_subject in else_map: + else_map[unwrapped_subject] = else_map[named_subject] + pattern_map = self.propagate_up_typemap_info(pattern_map) + else_map = self.propagate_up_typemap_info(else_map) + self.remove_capture_conflicts(pattern_type.captures, inferred_types) + self.push_type_map(pattern_map, from_assignment=False) + if pattern_map: + for expr, typ in pattern_map.items(): + self.push_type_map( + self._get_recursive_sub_patterns_map(expr, typ), + from_assignment=False, + ) + self.push_type_map(pattern_type.captures, from_assignment=False) + if g is not None: + with self.binder.frame_context(can_skip=False, fall_through=3): + gt = get_proper_type(self.expr_checker.accept(g)) + + if isinstance(gt, DeletedType): + self.msg.deleted_as_rvalue(gt, s) + + guard_map, guard_else_map = self.find_isinstance_check(g) + else_map = or_conditional_maps(else_map, guard_else_map) + + # If the guard narrowed the subject, copy the narrowed types over + if isinstance(p, AsPattern): + case_target = p.pattern or p.name + if isinstance(case_target, NameExpr): + for type_map in (guard_map, else_map): + if not type_map: + continue + for expr in list(type_map): + if not ( + isinstance(expr, NameExpr) + and expr.fullname == case_target.fullname + ): + continue + type_map[named_subject] = type_map[expr] + + self.push_type_map(guard_map, from_assignment=False) + self.accept(b) + else: + self.accept(b) + self.push_type_map(else_map, from_assignment=False) + unmatched_types = else_map + + if unmatched_types is not None and not self.current_node_deferred: + for typ in unmatched_types.values(): + self.msg.match_statement_inexhaustive_match(typ, s) + + # This is needed due to a quirk in frame_context. Without it types will stay narrowed + # after the match. + with self.binder.frame_context(can_skip=False, fall_through=2): + pass + + def _make_named_statement_for_match(self, s: MatchStmt, subject: Expression) -> Expression: + """Construct a fake NameExpr for inference if a match clause is complex.""" + if self.binder.can_put_directly(subject): + # Already named - we should infer type of it as given + return subject + elif s.subject_dummy is not None: + return s.subject_dummy + else: + # Create a dummy subject expression to handle cases where a match statement's subject + # is not a literal value. This lets us correctly narrow types and check exhaustivity + # This is hack! + name = self.new_unique_dummy_name("match") + v = Var(name) + named_subject = NameExpr(name) + named_subject.node = v + s.subject_dummy = named_subject + return named_subject + + def _get_recursive_sub_patterns_map( + self, expr: Expression, typ: Type + ) -> dict[Expression, Type]: + sub_patterns_map: dict[Expression, Type] = {} + typ_ = get_proper_type(typ) + if isinstance(expr, TupleExpr) and isinstance(typ_, TupleType): + # When matching a tuple expression with a sequence pattern, narrow individual tuple items + assert len(expr.items) == len(typ_.items) + for item_expr, item_typ in zip(expr.items, typ_.items): + sub_patterns_map[item_expr] = item_typ + sub_patterns_map.update(self._get_recursive_sub_patterns_map(item_expr, item_typ)) + + return sub_patterns_map + + def infer_variable_types_from_type_maps( + self, type_maps: list[TypeMap] + ) -> dict[SymbolNode, Type]: + # Type maps may contain variables inherited from previous code which are not + # necessary `Var`s (e.g. a function defined earlier with the same name). + all_captures: dict[SymbolNode, list[tuple[NameExpr, Type]]] = defaultdict(list) + for tm in type_maps: + if tm is not None: + for expr, typ in tm.items(): + if isinstance(expr, NameExpr): + node = expr.node + assert node is not None + all_captures[node].append((expr, typ)) + + inferred_types: dict[SymbolNode, Type] = {} + for var, captures in all_captures.items(): + already_exists = False + types: list[Type] = [] + for expr, typ in captures: + types.append(typ) + + previous_type, _, _ = self.check_lvalue(expr) + if previous_type is not None: + already_exists = True + if isinstance(expr.node, Var) and expr.node.is_final: + self.msg.cant_assign_to_final(expr.name, False, expr) + if self.check_subtype( + typ, + previous_type, + expr, + msg=message_registry.INCOMPATIBLE_TYPES_IN_CAPTURE, + subtype_label="pattern captures type", + supertype_label="variable has type", + ): + inferred_types[var] = previous_type + + if not already_exists: + new_type = UnionType.make_union(types) + # Infer the union type at the first occurrence + first_occurrence, _ = captures[0] + # If it didn't exist before ``match``, it's a Var. + assert isinstance(var, Var) + inferred_types[var] = new_type + self.infer_variable_type(var, first_occurrence, new_type, first_occurrence) + return inferred_types + + def remove_capture_conflicts( + self, type_map: TypeMap, inferred_types: dict[SymbolNode, Type] + ) -> None: + if type_map: + for expr, typ in list(type_map.items()): + if isinstance(expr, NameExpr): + node = expr.node + if node not in inferred_types or not is_subtype(typ, inferred_types[node]): + del type_map[expr] + + def visit_type_alias_stmt(self, o: TypeAliasStmt) -> None: + if o.alias_node: + self.check_typevar_defaults(o.alias_node.alias_tvars) + + with self.msg.filter_errors(): + self.expr_checker.accept(o.value) + + def make_fake_typeinfo( + self, + curr_module_fullname: str, + class_gen_name: str, + class_short_name: str, + bases: list[Instance], + ) -> tuple[ClassDef, TypeInfo]: + # Build the fake ClassDef and TypeInfo together. + # The ClassDef is full of lies and doesn't actually contain a body. + # Use format_bare to generate a nice name for error messages. + # We skip fully filling out a handful of TypeInfo fields because they + # should be irrelevant for a generated type like this: + # is_protocol, protocol_members, is_abstract + cdef = ClassDef(class_short_name, Block([])) + cdef.fullname = curr_module_fullname + "." + class_gen_name + info = TypeInfo(SymbolTable(), cdef, curr_module_fullname) + cdef.info = info + info.bases = bases + calculate_mro(info) + info.metaclass_type = info.calculate_metaclass_type() + return cdef, info + + def intersect_instances( + self, instances: tuple[Instance, Instance], errors: list[tuple[str, str]] + ) -> Instance | None: + """Try creating an ad-hoc intersection of the given instances. + + Note that this function does *not* try and create a full-fledged + intersection type. Instead, it returns an instance of a new ad-hoc + subclass of the given instances. + + This is mainly useful when you need a way of representing some + theoretical subclass of the instances the user may be trying to use + the generated intersection can serve as a placeholder. + + This function will create a fresh subclass the first time you call it. + So this means calling `self.intersect_intersection([inst_1, inst_2], ctx)` + twice will return the same subclass of inst_1 and inst_2. + + Returns None if creating the subclass is impossible (e.g. due to + MRO errors or incompatible signatures). If we do successfully create + a subclass, its TypeInfo will automatically be added to the global scope. + """ + curr_module = self.scope.stack[0] + assert isinstance(curr_module, MypyFile) + + # First, retry narrowing while allowing promotions (they are disabled by default + # for isinstance() checks, etc). This way we will still type-check branches like + # x: complex = 1 + # if isinstance(x, int): + # ... + left, right = instances + if is_proper_subtype(left, right, ignore_promotions=False): + return left + if is_proper_subtype(right, left, ignore_promotions=False): + return right + + def _get_base_classes(instances_: tuple[Instance, Instance]) -> list[Instance]: + base_classes_ = [] + for inst in instances_: + if inst.type.is_intersection: + expanded = inst.type.bases + else: + expanded = [inst] + + for expanded_inst in expanded: + base_classes_.append(expanded_inst) + return base_classes_ + + def _make_fake_typeinfo_and_full_name( + base_classes_: list[Instance], curr_module_: MypyFile, options: Options + ) -> tuple[TypeInfo, str]: + names = [format_type_bare(x, options=options, verbosity=2) for x in base_classes_] + name = f"" + if (symbol := curr_module_.names.get(name)) is not None: + assert isinstance(symbol.node, TypeInfo) + return symbol.node, name + cdef, info_ = self.make_fake_typeinfo(curr_module_.fullname, name, name, base_classes_) + return info_, name + + base_classes = _get_base_classes(instances) + # We use the pretty_names_list for error messages but for the real name that goes + # into the symbol table because it is not specific enough. + pretty_names_list = pretty_seq( + format_type_distinctly(*base_classes, options=self.options, bare=True), "and" + ) + + if not can_have_shared_disjoint_base(base_classes): + errors.append((pretty_names_list, "have distinct disjoint bases")) + return None + + new_errors = [] + for base in base_classes: + if base.type.is_final: + new_errors.append((pretty_names_list, f'"{base.type.name}" is final')) + if new_errors: + errors.extend(new_errors) + return None + + try: + info, full_name = _make_fake_typeinfo_and_full_name( + base_classes, curr_module, self.options + ) + with self.msg.filter_errors() as local_errors: + self.check_multiple_inheritance(info) + if local_errors.has_new_errors(): + # "class A(B, C)" unsafe, now check "class A(C, B)": + base_classes = _get_base_classes(instances[::-1]) + info, full_name = _make_fake_typeinfo_and_full_name( + base_classes, curr_module, self.options + ) + with self.msg.filter_errors() as local_errors: + self.check_multiple_inheritance(info) + info.is_intersection = True + except MroError: + errors.append((pretty_names_list, "would have inconsistent method resolution order")) + return None + if local_errors.has_new_errors(): + errors.append((pretty_names_list, "would have incompatible method signatures")) + return None + + curr_module.names[full_name] = SymbolTableNode(GDEF, info) + return Instance(info, [], extra_attrs=instances[0].extra_attrs or instances[1].extra_attrs) + + def intersect_instance_callable(self, typ: Instance, callable_type: CallableType) -> Instance: + """Creates a fake type that represents the intersection of an Instance and a CallableType. + + It operates by creating a bare-minimum dummy TypeInfo that + subclasses type and adds a __call__ method matching callable_type. + """ + + # In order for this to work in incremental mode, the type we generate needs to + # have a valid fullname and a corresponding entry in a symbol table. We generate + # a unique name inside the symbol table of the current module. + cur_module = self.scope.stack[0] + assert isinstance(cur_module, MypyFile) + gen_name = gen_unique_name(f"", cur_module.names) + + # Synthesize a fake TypeInfo + short_name = format_type_bare(typ, self.options) + cdef, info = self.make_fake_typeinfo(cur_module.fullname, gen_name, short_name, [typ]) + + # Build up a fake FuncDef so we can populate the symbol table. + func_def = FuncDef("__call__", [], Block([]), callable_type) + func_def._fullname = cdef.fullname + ".__call__" + func_def.info = info + info.names["__call__"] = SymbolTableNode(MDEF, func_def) + + cur_module.names[gen_name] = SymbolTableNode(GDEF, info) + + return Instance(info, [], extra_attrs=typ.extra_attrs) + + def make_fake_callable(self, typ: Instance) -> Instance: + """Produce a new type that makes type Callable with a generic callable type.""" + + fallback = self.named_type("builtins.function") + callable_type = CallableType( + [AnyType(TypeOfAny.explicit), AnyType(TypeOfAny.explicit)], + [nodes.ARG_STAR, nodes.ARG_STAR2], + [None, None], + ret_type=AnyType(TypeOfAny.explicit), + fallback=fallback, + is_ellipsis_args=True, + ) + + return self.intersect_instance_callable(typ, callable_type) + + def partition_by_callable( + self, typ: Type, unsound_partition: bool + ) -> tuple[list[Type], list[Type]]: + """Partitions a type into callable subtypes and uncallable subtypes. + + Thus, given: + `callables, uncallables = partition_by_callable(type)` + + If we assert `callable(type)` then `type` has type Union[*callables], and + If we assert `not callable(type)` then `type` has type Union[*uncallables] + + If unsound_partition is set, assume that anything that is not + clearly callable is in fact not callable. Otherwise we generate a + new subtype that *is* callable. + + Guaranteed to not return [], []. + """ + typ = get_proper_type(typ) + + if isinstance(typ, (FunctionLike, TypeType)): + return [typ], [] + + if isinstance(typ, AnyType): + return [typ], [typ] + + if isinstance(typ, NoneType): + return [], [typ] + + if isinstance(typ, UnionType): + callables = [] + uncallables = [] + for subtype in typ.items: + # Use unsound_partition when handling unions in order to + # allow the expected type discrimination. + subcallables, subuncallables = self.partition_by_callable( + subtype, unsound_partition=True + ) + callables.extend(subcallables) + uncallables.extend(subuncallables) + return callables, uncallables + + if isinstance(typ, TypeVarType): + # We could do better probably? + # Refine the type variable's bound as our type in the case that + # callable() is true. This unfortunately loses the information that + # the type is a type variable in that branch. + # This matches what is done for isinstance, but it may be possible to + # do better. + # If it is possible for the false branch to execute, return the original + # type to avoid losing type information. + callables, uncallables = self.partition_by_callable( + erase_to_union_or_bound(typ), unsound_partition + ) + uncallables = [typ] if uncallables else [] + return callables, uncallables + + # A TupleType is callable if its fallback is, but needs special handling + # when we dummy up a new type. + ityp = typ + if isinstance(typ, TupleType): + ityp = tuple_fallback(typ) + + if isinstance(ityp, Instance): + method = ityp.type.get_method("__call__") + if method and method.type: + callables, uncallables = self.partition_by_callable( + method.type, unsound_partition=False + ) + if callables and not uncallables: + # Only consider the type callable if its __call__ method is + # definitely callable. + return [typ], [] + + if not unsound_partition: + fake = self.make_fake_callable(ityp) + if isinstance(typ, TupleType): + fake.type.tuple_type = TupleType(typ.items, fake) + return [fake.type.tuple_type], [typ] + return [fake], [typ] + + if unsound_partition: + return [], [typ] + else: + # We don't know how properly make the type callable. + return [typ], [typ] + + def conditional_callable_type_map( + self, expr: Expression, current_type: Type | None + ) -> tuple[TypeMap, TypeMap]: + """Takes in an expression and the current type of the expression. + + Returns a 2-tuple: The first element is a map from the expression to + the restricted type if it were callable. The second element is a + map from the expression to the type it would hold if it weren't + callable. + """ + if not current_type: + return {}, {} + + if isinstance(get_proper_type(current_type), AnyType): + return {}, {} + + callables, uncallables = self.partition_by_callable(current_type, unsound_partition=False) + + if callables and uncallables: + callable_map = {expr: UnionType.make_union(callables)} if callables else None + uncallable_map = {expr: UnionType.make_union(uncallables)} if uncallables else None + return callable_map, uncallable_map + + elif callables: + return {}, None + + return None, {} + + def conditional_types_for_iterable( + self, item_type: Type, iterable_type: Type + ) -> tuple[Type | None, Type | None]: + """ + Narrows the type of `iterable_type` based on the type of `item_type`. + For now, we only support narrowing unions of TypedDicts based on left operand being literal string(s). + """ + if_types: list[Type] = [] + else_types: list[Type] = [] + + iterable_type = get_proper_type(iterable_type) + if isinstance(iterable_type, UnionType): + possible_iterable_types = get_proper_types(iterable_type.relevant_items()) + else: + possible_iterable_types = [iterable_type] + + item_str_literals = try_getting_str_literals_from_type(item_type) + + for possible_iterable_type in possible_iterable_types: + if item_str_literals and isinstance(possible_iterable_type, TypedDictType): + for key in item_str_literals: + if key in possible_iterable_type.required_keys: + if_types.append(possible_iterable_type) + elif ( + key in possible_iterable_type.items or not possible_iterable_type.is_final + ): + if_types.append(possible_iterable_type) + else_types.append(possible_iterable_type) + else: + else_types.append(possible_iterable_type) + else: + if_types.append(possible_iterable_type) + else_types.append(possible_iterable_type) + + return ( + UnionType.make_union(if_types) if if_types else None, + UnionType.make_union(else_types) if else_types else None, + ) + + def _is_truthy_type(self, t: ProperType) -> bool: + return ( + ( + isinstance(t, Instance) + and bool(t.type) + and not t.type.has_readable_member("__bool__") + and not t.type.has_readable_member("__len__") + and t.type.fullname != "builtins.object" + ) + or isinstance(t, FunctionLike) + or ( + isinstance(t, UnionType) + and all(self._is_truthy_type(t) for t in get_proper_types(t.items)) + ) + ) + + def check_for_truthy_type(self, t: Type, expr: Expression) -> None: + """ + Check if a type can have a truthy value. + + Used in checks like:: + + if x: # <--- + + not x # <--- + """ + if not state.strict_optional: + return # if everything can be None, all bets are off + + t = get_proper_type(t) + if not self._is_truthy_type(t): + return + + def format_expr_type() -> str: + typ = format_type(t, self.options) + if isinstance(expr, MemberExpr): + return f'Member "{expr.name}" has type {typ}' + elif isinstance(expr, RefExpr) and expr.fullname: + return f'"{expr.fullname}" has type {typ}' + elif isinstance(expr, CallExpr): + if isinstance(expr.callee, MemberExpr): + return f'"{expr.callee.name}" returns {typ}' + elif isinstance(expr.callee, RefExpr) and expr.callee.fullname: + return f'"{expr.callee.fullname}" returns {typ}' + return f"Call returns {typ}" + else: + return f"Expression has type {typ}" + + def get_expr_name() -> str: + if isinstance(expr, (NameExpr, MemberExpr)): + return f'"{expr.name}"' + else: + # return type if expr has no name + return format_type(t, self.options) + + if isinstance(t, FunctionLike): + self.fail(message_registry.FUNCTION_ALWAYS_TRUE.format(get_expr_name()), expr) + elif isinstance(t, UnionType): + self.fail(message_registry.TYPE_ALWAYS_TRUE_UNIONTYPE.format(format_expr_type()), expr) + elif isinstance(t, Instance) and t.type.fullname == "typing.Iterable": + _, info = self.make_fake_typeinfo("typing", "Collection", "Collection", []) + self.fail( + message_registry.ITERABLE_ALWAYS_TRUE.format( + format_expr_type(), format_type(Instance(info, t.args), self.options) + ), + expr, + ) + else: + self.fail(message_registry.TYPE_ALWAYS_TRUE.format(format_expr_type()), expr) + + def find_type_equals_check( + self, node: ComparisonExpr, expr_indices: list[int] + ) -> tuple[TypeMap, TypeMap]: + """Narrow types based on any checks of the type ``type(x) == T`` + + Args: + node: The node that might contain the comparison + expr_indices: The list of indices of expressions in ``node`` that are being + compared + """ + + def is_type_call(expr: CallExpr) -> bool: + """Is expr a call to type with one argument?""" + return refers_to_fullname(expr.callee, "builtins.type") and len(expr.args) == 1 + + # exprs that are being passed into type + exprs_in_type_calls: list[Expression] = [] + # type that is being compared to type(expr) + type_being_compared: list[TypeRange] | None = None + # whether the type being compared to is final + is_final = False + + for index in expr_indices: + expr = node.operands[index] + + if isinstance(expr, CallExpr) and is_type_call(expr): + exprs_in_type_calls.append(expr.args[0]) + else: + current_type = self.get_isinstance_type(expr) + if current_type is None: + continue + if type_being_compared is not None: + # It doesn't really make sense to have several types being + # compared to the output of type (like type(x) == int == str) + # because whether that's true is solely dependent on what the + # types being compared are, so we don't try to narrow types any + # further because we can't really get any information about the + # type of x from that check + return {}, {} + else: + if isinstance(expr, RefExpr) and isinstance(expr.node, TypeInfo): + is_final = expr.node.is_final + type_being_compared = current_type + + if not exprs_in_type_calls: + return {}, {} + + if_maps: list[TypeMap] = [] + else_maps: list[TypeMap] = [] + for expr in exprs_in_type_calls: + current_if_type, current_else_type = self.conditional_types_with_intersection( + self.lookup_type(expr), type_being_compared, expr + ) + current_if_map, current_else_map = conditional_types_to_typemaps( + expr, current_if_type, current_else_type + ) + if_maps.append(current_if_map) + else_maps.append(current_else_map) + + def combine_maps(list_maps: list[TypeMap]) -> TypeMap: + """Combine all typemaps in list_maps into one typemap""" + if all(m is None for m in list_maps): + return None + result_map = {} + for d in list_maps: + if d is not None: + result_map.update(d) + return result_map + + if_map = combine_maps(if_maps) + # type(x) == T is only true when x has the same type as T, meaning + # that it can be false if x is an instance of a subclass of T. That means + # we can't do any narrowing in the else case unless T is final, in which + # case T can't be subclassed + if is_final: + else_map = combine_maps(else_maps) + else: + else_map = {} + return if_map, else_map + + def find_isinstance_check( + self, node: Expression, *, in_boolean_context: bool = True + ) -> tuple[TypeMap, TypeMap]: + """Find any isinstance checks (within a chain of ands). Includes + implicit and explicit checks for None and calls to callable. + Also includes TypeGuard and TypeIs functions. + + Return value is a map of variables to their types if the condition + is true and a map of variables to their types if the condition is false. + + If either of the values in the tuple is None, then that particular + branch can never occur. + + If `in_boolean_context=True` is passed, it means that we handle + a walrus expression. We treat rhs values + in expressions like `(a := A())` specially: + for example, some errors are suppressed. + + May return {}, {}. + Can return None, None in situations involving NoReturn. + """ + if_map, else_map = self.find_isinstance_check_helper( + node, in_boolean_context=in_boolean_context + ) + new_if_map = self.propagate_up_typemap_info(if_map) + new_else_map = self.propagate_up_typemap_info(else_map) + return new_if_map, new_else_map + + def find_isinstance_check_helper( + self, node: Expression, *, in_boolean_context: bool = True + ) -> tuple[TypeMap, TypeMap]: + if is_true_literal(node): + return {}, None + if is_false_literal(node): + return None, {} + + if isinstance(node, CallExpr) and len(node.args) != 0: + expr = collapse_walrus(node.args[0]) + if refers_to_fullname(node.callee, "builtins.isinstance"): + if len(node.args) != 2: # the error will be reported elsewhere + return {}, {} + if literal(expr) == LITERAL_TYPE: + return conditional_types_to_typemaps( + expr, + *self.conditional_types_with_intersection( + self.lookup_type(expr), self.get_isinstance_type(node.args[1]), expr + ), + ) + elif refers_to_fullname(node.callee, "builtins.issubclass"): + if len(node.args) != 2: # the error will be reported elsewhere + return {}, {} + if literal(expr) == LITERAL_TYPE: + return self.infer_issubclass_maps(node, expr) + elif refers_to_fullname(node.callee, "builtins.callable"): + if len(node.args) != 1: # the error will be reported elsewhere + return {}, {} + if literal(expr) == LITERAL_TYPE: + vartype = self.lookup_type(expr) + return self.conditional_callable_type_map(expr, vartype) + elif refers_to_fullname(node.callee, "builtins.hasattr"): + if len(node.args) != 2: # the error will be reported elsewhere + return {}, {} + attr = try_getting_str_literals(node.args[1], self.lookup_type(node.args[1])) + if literal(expr) == LITERAL_TYPE and attr and len(attr) == 1: + return self.hasattr_type_maps(expr, self.lookup_type(expr), attr[0]) + else: + type_is, type_guard = None, None + called_type = self.lookup_type_or_none(node.callee) + if called_type is not None: + called_type = get_proper_type(called_type) + # TODO: there are some more cases in check_call() to handle. + # If the callee is an instance, try to extract TypeGuard/TypeIs from its __call__ method. + if isinstance(called_type, Instance): + call = find_member("__call__", called_type, called_type, is_operator=True) + if call is not None: + called_type = get_proper_type(call) + if isinstance(called_type, CallableType): + type_is, type_guard = called_type.type_is, called_type.type_guard + + # If the callee is a RefExpr, extract TypeGuard/TypeIs directly. + if isinstance(node.callee, RefExpr): + type_is, type_guard = node.callee.type_is, node.callee.type_guard + if type_guard is not None or type_is is not None: + # TODO: Follow *args, **kwargs + if node.arg_kinds[0] != nodes.ARG_POS: + # *assuming* the overloaded function is correct, there's a couple cases: + # 1) The first argument has different names, but is pos-only. We don't + # care about this case, the argument must be passed positionally. + # 2) The first argument allows keyword reference, therefore must be the + # same between overloads. + if isinstance(called_type, (CallableType, Overloaded)): + name = called_type.items[0].arg_names[0] + if name in node.arg_names: + idx = node.arg_names.index(name) + # we want the idx-th variable to be narrowed + expr = collapse_walrus(node.args[idx]) + else: + kind = "guard" if type_guard is not None else "narrower" + self.fail( + message_registry.TYPE_GUARD_POS_ARG_REQUIRED.format(kind), node + ) + return {}, {} + if literal(expr) == LITERAL_TYPE: + # Note: we wrap the target type, so that we can special case later. + # Namely, for isinstance() we use a normal meet, while TypeGuard is + # considered "always right" (i.e. even if the types are not overlapping). + # Also note that a care must be taken to unwrap this back at read places + # where we use this to narrow down declared type. + if type_guard is not None: + return {expr: TypeGuardedType(type_guard)}, {} + else: + assert type_is is not None + return conditional_types_to_typemaps( + expr, + *self.conditional_types_with_intersection( + self.lookup_type(expr), + [TypeRange(type_is, is_upper_bound=False)], + expr, + consider_runtime_isinstance=False, + ), + ) + elif isinstance(node, ComparisonExpr): + return self.comparison_type_narrowing_helper(node) + elif isinstance(node, AssignmentExpr): + if_map: dict[Expression, Type] | None + else_map: dict[Expression, Type] | None + if_map = {} + else_map = {} + + if_assignment_map, else_assignment_map = self.find_isinstance_check(node.target) + + if if_assignment_map is not None: + if_map.update(if_assignment_map) + if else_assignment_map is not None: + else_map.update(else_assignment_map) + + if_condition_map, else_condition_map = self.find_isinstance_check( + node.value, in_boolean_context=False + ) + + if if_condition_map is not None: + if_map.update(if_condition_map) + if else_condition_map is not None: + else_map.update(else_condition_map) + + return ( + (None if if_assignment_map is None or if_condition_map is None else if_map), + (None if else_assignment_map is None or else_condition_map is None else else_map), + ) + elif isinstance(node, OpExpr) and node.op == "and": + left_if_vars, left_else_vars = self.find_isinstance_check(node.left) + right_if_vars, right_else_vars = self.find_isinstance_check(node.right) + + # (e1 and e2) is true if both e1 and e2 are true, + # and false if at least one of e1 and e2 is false. + return ( + and_conditional_maps(left_if_vars, right_if_vars), + # Note that if left else type is Any, we can't add any additional + # types to it, since the right maps were computed assuming + # the left is True, which may be not the case in the else branch. + or_conditional_maps(left_else_vars, right_else_vars, coalesce_any=True), + ) + elif isinstance(node, OpExpr) and node.op == "or": + left_if_vars, left_else_vars = self.find_isinstance_check(node.left) + right_if_vars, right_else_vars = self.find_isinstance_check(node.right) + + # (e1 or e2) is true if at least one of e1 or e2 is true, + # and false if both e1 and e2 are false. + return ( + or_conditional_maps(left_if_vars, right_if_vars), + and_conditional_maps(left_else_vars, right_else_vars), + ) + elif isinstance(node, UnaryExpr) and node.op == "not": + left, right = self.find_isinstance_check(node.expr) + return right, left + elif ( + literal(node) == LITERAL_TYPE + and self.has_type(node) + and self.can_be_narrowed_with_len(self.lookup_type(node)) + # Only translate `if x` to `if len(x) > 0` when possible. + and not custom_special_method(self.lookup_type(node), "__bool__") + and self.options.strict_optional + ): + # Combine a `len(x) > 0` check with the default logic below. + yes_type, no_type = self.narrow_with_len(self.lookup_type(node), ">", 0) + if yes_type is not None: + yes_type = true_only(yes_type) + else: + yes_type = UninhabitedType() + if no_type is not None: + no_type = false_only(no_type) + else: + no_type = UninhabitedType() + if_map = {node: yes_type} if not isinstance(yes_type, UninhabitedType) else None + else_map = {node: no_type} if not isinstance(no_type, UninhabitedType) else None + return if_map, else_map + + # Restrict the type of the variable to True-ish/False-ish in the if and else branches + # respectively + original_vartype = self.lookup_type(node) + if in_boolean_context: + # We don't check `:=` values in expressions like `(a := A())`, + # because they produce two error messages. + self.check_for_truthy_type(original_vartype, node) + vartype = try_expanding_sum_type_to_union(original_vartype, "builtins.bool") + + if_type = true_only(vartype) + else_type = false_only(vartype) + if_map = {node: if_type} if not isinstance(if_type, UninhabitedType) else None + else_map = {node: else_type} if not isinstance(else_type, UninhabitedType) else None + return if_map, else_map + + def comparison_type_narrowing_helper(self, node: ComparisonExpr) -> tuple[TypeMap, TypeMap]: + """Infer type narrowing from a comparison expression.""" + # Step 1: Obtain the types of each operand and whether or not we can + # narrow their types. (For example, we shouldn't try narrowing the + # types of literal string or enum expressions). + + operands = [collapse_walrus(x) for x in node.operands] + operand_types = [] + narrowable_operand_index_to_hash = {} + for i, expr in enumerate(operands): + if not self.has_type(expr): + return {}, {} + expr_type = self.lookup_type(expr) + operand_types.append(expr_type) + + if ( + literal(expr) == LITERAL_TYPE + and not is_literal_none(expr) + and not self.is_literal_enum(expr) + ): + h = literal_hash(expr) + if h is not None: + narrowable_operand_index_to_hash[i] = h + + # Step 2: Group operands chained by either the 'is' or '==' operands + # together. For all other operands, we keep them in groups of size 2. + # So the expression: + # + # x0 == x1 == x2 < x3 < x4 is x5 is x6 is not x7 is not x8 + # + # ...is converted into the simplified operator list: + # + # [("==", [0, 1, 2]), ("<", [2, 3]), ("<", [3, 4]), + # ("is", [4, 5, 6]), ("is not", [6, 7]), ("is not", [7, 8])] + # + # We group identity/equality expressions so we can propagate information + # we discover about one operand across the entire chain. We don't bother + # handling 'is not' and '!=' chains in a special way: those are very rare + # in practice. + + simplified_operator_list = group_comparison_operands( + node.pairwise(), narrowable_operand_index_to_hash, {"==", "is"} + ) + + # Step 3: Analyze each group and infer more precise type maps for each + # assignable operand, if possible. We combine these type maps together + # in the final step. + + partial_type_maps = [] + for operator, expr_indices in simplified_operator_list: + if operator in {"is", "is not", "==", "!="}: + if_map, else_map = self.equality_type_narrowing_helper( + node, + operator, + operands, + operand_types, + expr_indices, + narrowable_operand_index_to_hash, + ) + elif operator in {"in", "not in"}: + assert len(expr_indices) == 2 + left_index, right_index = expr_indices + item_type = operand_types[left_index] + iterable_type = operand_types[right_index] + + if_map, else_map = {}, {} + + if left_index in narrowable_operand_index_to_hash: + # We only try and narrow away 'None' for now + if is_overlapping_none(item_type): + collection_item_type = get_proper_type(builtin_item_type(iterable_type)) + if ( + collection_item_type is not None + and not is_overlapping_none(collection_item_type) + and not ( + isinstance(collection_item_type, Instance) + and collection_item_type.type.fullname == "builtins.object" + ) + and is_overlapping_erased_types(item_type, collection_item_type) + ): + if_map[operands[left_index]] = remove_optional(item_type) + + if right_index in narrowable_operand_index_to_hash: + if_type, else_type = self.conditional_types_for_iterable( + item_type, iterable_type + ) + expr = operands[right_index] + if if_type is None: + if_map = None + else: + if_map[expr] = if_type + if else_type is None: + else_map = None + else: + else_map[expr] = else_type + + else: + if_map = {} + else_map = {} + + if operator in {"is not", "!=", "not in"}: + if_map, else_map = else_map, if_map + + partial_type_maps.append((if_map, else_map)) + + # If we have found non-trivial restrictions from the regular comparisons, + # then return soon. Otherwise try to infer restrictions involving `len(x)`. + # TODO: support regular and len() narrowing in the same chain. + if any(m != ({}, {}) for m in partial_type_maps): + return reduce_conditional_maps(partial_type_maps) + else: + # Use meet for `and` maps to get correct results for chained checks + # like `if 1 < len(x) < 4: ...` + return reduce_conditional_maps(self.find_tuple_len_narrowing(node), use_meet=True) + + def equality_type_narrowing_helper( + self, + node: ComparisonExpr, + operator: str, + operands: list[Expression], + operand_types: list[Type], + expr_indices: list[int], + narrowable_operand_index_to_hash: dict[int, tuple[Key, ...]], + ) -> tuple[TypeMap, TypeMap]: + """Calculate type maps for '==', '!=', 'is' or 'is not' expression.""" + # is_valid_target: + # Controls which types we're allowed to narrow exprs to. Note that + # we cannot use 'is_literal_type_like' in both cases since doing + # 'x = 10000 + 1; x is 10001' is not always True in all Python + # implementations. + # + # coerce_only_in_literal_context: + # If true, coerce types into literal types only if one or more of + # the provided exprs contains an explicit Literal type. This could + # technically be set to any arbitrary value, but it seems being liberal + # with narrowing when using 'is' and conservative when using '==' seems + # to break the least amount of real-world code. + # + # should_narrow_by_identity: + # Set to 'false' only if the user defines custom __eq__ or __ne__ methods + # that could cause identity-based narrowing to produce invalid results. + if operator in {"is", "is not"}: + is_valid_target: Callable[[Type], bool] = is_singleton_type + coerce_only_in_literal_context = False + should_narrow_by_identity = True + else: + + def is_exactly_literal_type(t: Type) -> bool: + return isinstance(get_proper_type(t), LiteralType) + + def has_no_custom_eq_checks(t: Type) -> bool: + return not custom_special_method( + t, "__eq__", check_all=False + ) and not custom_special_method(t, "__ne__", check_all=False) + + is_valid_target = is_exactly_literal_type + coerce_only_in_literal_context = True + + expr_types = [operand_types[i] for i in expr_indices] + should_narrow_by_identity = all( + map(has_no_custom_eq_checks, expr_types) + ) and not is_ambiguous_mix_of_enums(expr_types) + + if_map: TypeMap = {} + else_map: TypeMap = {} + if should_narrow_by_identity: + if_map, else_map = self.refine_identity_comparison_expression( + operands, + operand_types, + expr_indices, + narrowable_operand_index_to_hash.keys(), + is_valid_target, + coerce_only_in_literal_context, + ) + + if if_map == {} and else_map == {}: + if_map, else_map = self.refine_away_none_in_comparison( + operands, operand_types, expr_indices, narrowable_operand_index_to_hash.keys() + ) + + # If we haven't been able to narrow types yet, we might be dealing with a + # explicit type(x) == some_type check + if if_map == {} and else_map == {}: + if_map, else_map = self.find_type_equals_check(node, expr_indices) + return if_map, else_map + + def propagate_up_typemap_info(self, new_types: TypeMap) -> TypeMap: + """Attempts refining parent expressions of any MemberExpr or IndexExprs in new_types. + + Specifically, this function accepts two mappings of expression to original types: + the original mapping (existing_types), and a new mapping (new_types) intended to + update the original. + + This function iterates through new_types and attempts to use the information to try + refining any parent types that happen to be unions. + + For example, suppose there are two types "A = Tuple[int, int]" and "B = Tuple[str, str]". + Next, suppose that 'new_types' specifies the expression 'foo[0]' has a refined type + of 'int' and that 'foo' was previously deduced to be of type Union[A, B]. + + Then, this function will observe that since A[0] is an int and B[0] is not, the type of + 'foo' can be further refined from Union[A, B] into just B. + + We perform this kind of "parent narrowing" for member lookup expressions and indexing + expressions into tuples, namedtuples, and typeddicts. We repeat this narrowing + recursively if the parent is also a "lookup expression". So for example, if we have + the expression "foo['bar'].baz[0]", we'd potentially end up refining types for the + expressions "foo", "foo['bar']", and "foo['bar'].baz". + + We return the newly refined map. This map is guaranteed to be a superset of 'new_types'. + """ + if new_types is None: + return None + output_map = {} + for expr, expr_type in new_types.items(): + # The original inferred type should always be present in the output map, of course + output_map[expr] = expr_type + + # Next, try using this information to refine the parent types, if applicable. + new_mapping = self.refine_parent_types(expr, expr_type) + for parent_expr, proposed_parent_type in new_mapping.items(): + # We don't try inferring anything if we've already inferred something for + # the parent expression. + # TODO: Consider picking the narrower type instead of always discarding this? + if parent_expr in new_types: + continue + output_map[parent_expr] = proposed_parent_type + return output_map + + def refine_parent_types(self, expr: Expression, expr_type: Type) -> Mapping[Expression, Type]: + """Checks if the given expr is a 'lookup operation' into a union and iteratively refines + the parent types based on the 'expr_type'. + + For example, if 'expr' is an expression like 'a.b.c.d', we'll potentially return refined + types for expressions 'a', 'a.b', and 'a.b.c'. + + For more details about what a 'lookup operation' is and how we use the expr_type to refine + the parent types of lookup_expr, see the docstring in 'propagate_up_typemap_info'. + """ + output: dict[Expression, Type] = {} + + # Note: parent_expr and parent_type are progressively refined as we crawl up the + # parent lookup chain. + while True: + # First, check if this expression is one that's attempting to + # "lookup" some key in the parent type. If so, save the parent type + # and create function that will try replaying the same lookup + # operation against arbitrary types. + if isinstance(expr, MemberExpr): + parent_expr = self._propagate_walrus_assignments(expr.expr, output) + parent_type = self.lookup_type_or_none(parent_expr) + member_name = expr.name + + def replay_lookup(new_parent_type: ProperType) -> Type | None: + with self.msg.filter_errors() as w: + member_type = analyze_member_access( + name=member_name, + typ=new_parent_type, + context=parent_expr, + is_lvalue=False, + is_super=False, + is_operator=False, + original_type=new_parent_type, + chk=self, + in_literal_context=False, + ) + if w.has_new_errors(): + return None + else: + return member_type + + elif isinstance(expr, IndexExpr): + parent_expr = self._propagate_walrus_assignments(expr.base, output) + parent_type = self.lookup_type_or_none(parent_expr) + + self._propagate_walrus_assignments(expr.index, output) + index_type = self.lookup_type_or_none(expr.index) + if index_type is None: + return output + + str_literals = try_getting_str_literals_from_type(index_type) + if str_literals is not None: + # Refactoring these two indexing replay functions is surprisingly + # tricky -- see https://github.com/python/mypy/pull/7917, which + # was blocked by https://github.com/mypyc/mypyc/issues/586 + def replay_lookup(new_parent_type: ProperType) -> Type | None: + if not isinstance(new_parent_type, TypedDictType): + return None + try: + assert str_literals is not None + member_types = [new_parent_type.items[key] for key in str_literals] + except KeyError: + return None + return make_simplified_union(member_types) + + else: + int_literals = try_getting_int_literals_from_type(index_type) + if int_literals is not None: + + def replay_lookup(new_parent_type: ProperType) -> Type | None: + if not isinstance(new_parent_type, TupleType): + return None + try: + assert int_literals is not None + member_types = [new_parent_type.items[key] for key in int_literals] + except IndexError: + return None + return make_simplified_union(member_types) + + else: + return output + else: + return output + + # If we somehow didn't previously derive the parent type, abort completely + # with what we have so far: something went wrong at an earlier stage. + if parent_type is None: + return output + + # We currently only try refining the parent type if it's a Union. + # If not, there's no point in trying to refine any further parents + # since we have no further information we can use to refine the lookup + # chain, so we end early as an optimization. + parent_type = get_proper_type(parent_type) + if not isinstance(parent_type, UnionType): + return output + + # Take each element in the parent union and replay the original lookup procedure + # to figure out which parents are compatible. + new_parent_types = [] + for item in flatten_nested_unions(parent_type.items): + member_type = replay_lookup(get_proper_type(item)) + if member_type is None: + # We were unable to obtain the member type. So, we give up on refining this + # parent type entirely and abort. + return output + + if is_overlapping_types(member_type, expr_type): + new_parent_types.append(item) + + # If none of the parent types overlap (if we derived an empty union), something + # went wrong. We should never hit this case, but deriving the uninhabited type or + # reporting an error both seem unhelpful. So we abort. + if not new_parent_types: + return output + + expr = parent_expr + expr_type = output[parent_expr] = make_simplified_union(new_parent_types) + + def _propagate_walrus_assignments( + self, expr: Expression, type_map: dict[Expression, Type] + ) -> Expression: + """Add assignments from walrus expressions to inferred types. + + Only considers nested assignment exprs, does not recurse into other types. + This may be added later if necessary by implementing a dedicated visitor. + """ + if isinstance(expr, AssignmentExpr): + if isinstance(expr.value, AssignmentExpr): + self._propagate_walrus_assignments(expr.value, type_map) + assigned_type = self.lookup_type_or_none(expr.value) + parent_expr = collapse_walrus(expr) + if assigned_type is not None: + type_map[parent_expr] = assigned_type + return parent_expr + return expr + + def refine_identity_comparison_expression( + self, + operands: list[Expression], + operand_types: list[Type], + chain_indices: list[int], + narrowable_operand_indices: AbstractSet[int], + is_valid_target: Callable[[ProperType], bool], + coerce_only_in_literal_context: bool, + ) -> tuple[TypeMap, TypeMap]: + """Produce conditional type maps refining expressions by an identity/equality comparison. + + The 'operands' and 'operand_types' lists should be the full list of operands used + in the overall comparison expression. The 'chain_indices' list is the list of indices + actually used within this identity comparison chain. + + So if we have the expression: + + a <= b is c is d <= e + + ...then 'operands' and 'operand_types' would be lists of length 5 and 'chain_indices' + would be the list [1, 2, 3]. + + The 'narrowable_operand_indices' parameter is the set of all indices we are allowed + to refine the types of: that is, all operands that will potentially be a part of + the output TypeMaps. + + Although this function could theoretically try setting the types of the operands + in the chains to the meet, doing that causes too many issues in real-world code. + Instead, we use 'is_valid_target' to identify which of the given chain types + we could plausibly use as the refined type for the expressions in the chain. + + Similarly, 'coerce_only_in_literal_context' controls whether we should try coercing + expressions in the chain to a Literal type. Performing this coercion is sometimes + too aggressive of a narrowing, depending on context. + """ + should_coerce = True + if coerce_only_in_literal_context: + + def should_coerce_inner(typ: Type) -> bool: + typ = get_proper_type(typ) + return is_literal_type_like(typ) or ( + isinstance(typ, Instance) and typ.type.is_enum + ) + + should_coerce = any(should_coerce_inner(operand_types[i]) for i in chain_indices) + + target: Type | None = None + possible_target_indices = [] + for i in chain_indices: + expr_type = operand_types[i] + if should_coerce: + expr_type = coerce_to_literal(expr_type) + if not is_valid_target(get_proper_type(expr_type)): + continue + if target and not is_same_type(target, expr_type): + # We have multiple disjoint target types. So the 'if' branch + # must be unreachable. + return None, {} + target = expr_type + possible_target_indices.append(i) + + # There's nothing we can currently infer if none of the operands are valid targets, + # so we end early and infer nothing. + if target is None: + return {}, {} + + # If possible, use an unassignable expression as the target. + # We skip refining the type of the target below, so ideally we'd + # want to pick an expression we were going to skip anyways. + singleton_index = -1 + for i in possible_target_indices: + if i not in narrowable_operand_indices: + singleton_index = i + + # But if none of the possible singletons are unassignable ones, we give up + # and arbitrarily pick the last item, mostly because other parts of the + # type narrowing logic bias towards picking the rightmost item and it'd be + # nice to stay consistent. + # + # That said, it shouldn't matter which index we pick. For example, suppose we + # have this if statement, where 'x' and 'y' both have singleton types: + # + # if x is y: + # reveal_type(x) + # reveal_type(y) + # else: + # reveal_type(x) + # reveal_type(y) + # + # At this point, 'x' and 'y' *must* have the same singleton type: we would have + # ended early in the first for-loop in this function if they weren't. + # + # So, we should always get the same result in the 'if' case no matter which + # index we pick. And while we do end up getting different results in the 'else' + # case depending on the index (e.g. if we pick 'y', then its type stays the same + # while 'x' is narrowed to ''), this distinction is also moot: mypy + # currently will just mark the whole branch as unreachable if either operand is + # narrowed to . + if singleton_index == -1: + singleton_index = possible_target_indices[-1] + + sum_type_name = None + target = get_proper_type(target) + if isinstance(target, LiteralType) and ( + target.is_enum_literal() or isinstance(target.value, bool) + ): + sum_type_name = target.fallback.type.fullname + + target_type = [TypeRange(target, is_upper_bound=False)] + + partial_type_maps = [] + for i in chain_indices: + # If we try refining a type against itself, conditional_type_map + # will end up assuming that the 'else' branch is unreachable. This is + # typically not what we want: generally the user will intend for the + # target type to be some fixed 'sentinel' value and will want to refine + # the other exprs against this one instead. + if i == singleton_index: + continue + + # Naturally, we can't refine operands which are not permitted to be refined. + if i not in narrowable_operand_indices: + continue + + expr = operands[i] + expr_type = coerce_to_literal(operand_types[i]) + + if sum_type_name is not None: + expr_type = try_expanding_sum_type_to_union(expr_type, sum_type_name) + + # We intentionally use 'conditional_types' directly here instead of + # 'self.conditional_types_with_intersection': we only compute ad-hoc + # intersections when working with pure instances. + types = conditional_types(expr_type, target_type) + partial_type_maps.append(conditional_types_to_typemaps(expr, *types)) + + return reduce_conditional_maps(partial_type_maps) + + def refine_away_none_in_comparison( + self, + operands: list[Expression], + operand_types: list[Type], + chain_indices: list[int], + narrowable_operand_indices: AbstractSet[int], + ) -> tuple[TypeMap, TypeMap]: + """Produces conditional type maps refining away None in an identity/equality chain. + + For more details about what the different arguments mean, see the + docstring of 'refine_identity_comparison_expression' up above. + """ + + non_optional_types = [] + for i in chain_indices: + typ = operand_types[i] + if not is_overlapping_none(typ): + non_optional_types.append(typ) + + if_map, else_map = {}, {} + + if not non_optional_types or (len(non_optional_types) != len(chain_indices)): + + # Narrow e.g. `Optional[A] == "x"` or `Optional[A] is "x"` to `A` (which may be + # convenient but is strictly not type-safe): + for i in narrowable_operand_indices: + expr_type = operand_types[i] + if not is_overlapping_none(expr_type): + continue + if any(is_overlapping_erased_types(expr_type, t) for t in non_optional_types): + if_map[operands[i]] = remove_optional(expr_type) + + # Narrow e.g. `Optional[A] != None` to `A` (which is stricter than the above step and + # so type-safe but less convenient, because e.g. `Optional[A] == None` still results + # in `Optional[A]`): + if any(isinstance(get_proper_type(ot), NoneType) for ot in operand_types): + for i in narrowable_operand_indices: + expr_type = operand_types[i] + if is_overlapping_none(expr_type): + else_map[operands[i]] = remove_optional(expr_type) + + return if_map, else_map + + def is_len_of_tuple(self, expr: Expression) -> bool: + """Is this expression a `len(x)` call where x is a tuple or union of tuples?""" + if not isinstance(expr, CallExpr): + return False + if not refers_to_fullname(expr.callee, "builtins.len"): + return False + if len(expr.args) != 1: + return False + expr = expr.args[0] + if literal(expr) != LITERAL_TYPE: + return False + if not self.has_type(expr): + return False + return self.can_be_narrowed_with_len(self.lookup_type(expr)) + + def can_be_narrowed_with_len(self, typ: Type) -> bool: + """Is this a type that can benefit from length check type restrictions? + + Currently supported types are TupleTypes, Instances of builtins.tuple, and + unions involving such types. + """ + if custom_special_method(typ, "__len__"): + # If user overrides builtin behavior, we can't do anything. + return False + p_typ = get_proper_type(typ) + # Note: we are conservative about tuple subclasses, because some code may rely on + # the fact that tuple_type of fallback TypeInfo matches the original TupleType. + if isinstance(p_typ, TupleType): + if any(isinstance(t, UnpackType) for t in p_typ.items): + return p_typ.partial_fallback.type.fullname == "builtins.tuple" + return True + if isinstance(p_typ, Instance): + return p_typ.type.has_base("builtins.tuple") + if isinstance(p_typ, UnionType): + return any(self.can_be_narrowed_with_len(t) for t in p_typ.items) + return False + + def literal_int_expr(self, expr: Expression) -> int | None: + """Is this expression an int literal, or a reference to an int constant? + + If yes, return the corresponding int value, otherwise return None. + """ + if not self.has_type(expr): + return None + expr_type = self.lookup_type(expr) + expr_type = coerce_to_literal(expr_type) + proper_type = get_proper_type(expr_type) + if not isinstance(proper_type, LiteralType): + return None + if not isinstance(proper_type.value, int): + return None + return proper_type.value + + def find_tuple_len_narrowing(self, node: ComparisonExpr) -> list[tuple[TypeMap, TypeMap]]: + """Top-level logic to find type restrictions from a length check on tuples. + + We try to detect `if` checks like the following: + x: tuple[int, int] | tuple[int, int, int] + y: tuple[int, int] | tuple[int, int, int] + if len(x) == len(y) == 2: + a, b = x # OK + c, d = y # OK + + z: tuple[int, ...] + if 1 < len(z) < 4: + x = z # OK + and report corresponding type restrictions to the binder. + """ + # First step: group consecutive `is` and `==` comparisons together. + # This is essentially a simplified version of group_comparison_operands(), + # tuned to the len()-like checks. Note that we don't propagate indirect + # restrictions like e.g. `len(x) > foo() > 1` yet, since it is tricky. + # TODO: propagate indirect len() comparison restrictions. + chained = [] + last_group = set() + for op, left, right in node.pairwise(): + if isinstance(left, AssignmentExpr): + left = left.value + if isinstance(right, AssignmentExpr): + right = right.value + if op in ("is", "=="): + last_group.add(left) + last_group.add(right) + else: + if last_group: + chained.append(("==", list(last_group))) + last_group = set() + if op in {"is not", "!=", "<", "<=", ">", ">="}: + chained.append((op, [left, right])) + if last_group: + chained.append(("==", list(last_group))) + + # Second step: infer type restrictions from each group found above. + type_maps = [] + for op, items in chained: + # TODO: support unions of literal types as len() comparison targets. + if not any(self.literal_int_expr(it) is not None for it in items): + continue + if not any(self.is_len_of_tuple(it) for it in items): + continue + + # At this step we know there is at least one len(x) and one literal in the group. + if op in ("is", "=="): + literal_values = set() + tuples = [] + for it in items: + lit = self.literal_int_expr(it) + if lit is not None: + literal_values.add(lit) + continue + if self.is_len_of_tuple(it): + assert isinstance(it, CallExpr) + tuples.append(it.args[0]) + if len(literal_values) > 1: + # More than one different literal value found, like 1 == len(x) == 2, + # so the corresponding branch is unreachable. + return [(None, {})] + size = literal_values.pop() + if size > MAX_PRECISE_TUPLE_SIZE: + # Avoid creating huge tuples from checks like if len(x) == 300. + continue + for tpl in tuples: + yes_type, no_type = self.narrow_with_len(self.lookup_type(tpl), op, size) + yes_map = None if yes_type is None else {tpl: yes_type} + no_map = None if no_type is None else {tpl: no_type} + type_maps.append((yes_map, no_map)) + else: + left, right = items + if self.is_len_of_tuple(right): + # Normalize `1 < len(x)` and similar as `len(x) > 1`. + left, right = right, left + op = flip_ops.get(op, op) + r_size = self.literal_int_expr(right) + assert r_size is not None + if r_size > MAX_PRECISE_TUPLE_SIZE: + # Avoid creating huge unions from checks like if len(x) > 300. + continue + assert isinstance(left, CallExpr) + yes_type, no_type = self.narrow_with_len( + self.lookup_type(left.args[0]), op, r_size + ) + yes_map = None if yes_type is None else {left.args[0]: yes_type} + no_map = None if no_type is None else {left.args[0]: no_type} + type_maps.append((yes_map, no_map)) + return type_maps + + def narrow_with_len(self, typ: Type, op: str, size: int) -> tuple[Type | None, Type | None]: + """Dispatch tuple type narrowing logic depending on the kind of type we got.""" + typ = get_proper_type(typ) + if isinstance(typ, TupleType): + return self.refine_tuple_type_with_len(typ, op, size) + elif isinstance(typ, Instance): + return self.refine_instance_type_with_len(typ, op, size) + elif isinstance(typ, UnionType): + yes_types = [] + no_types = [] + other_types = [] + for t in typ.items: + if not self.can_be_narrowed_with_len(t): + other_types.append(t) + continue + yt, nt = self.narrow_with_len(t, op, size) + if yt is not None: + yes_types.append(yt) + if nt is not None: + no_types.append(nt) + yes_types += other_types + no_types += other_types + if yes_types: + yes_type = make_simplified_union(yes_types) + else: + yes_type = None + if no_types: + no_type = make_simplified_union(no_types) + else: + no_type = None + return yes_type, no_type + else: + assert False, "Unsupported type for len narrowing" + + def refine_tuple_type_with_len( + self, typ: TupleType, op: str, size: int + ) -> tuple[Type | None, Type | None]: + """Narrow a TupleType using length restrictions.""" + unpack_index = find_unpack_in_list(typ.items) + if unpack_index is None: + # For fixed length tuple situation is trivial, it is either reachable or not, + # depending on the current length, expected length, and the comparison op. + method = int_op_to_method[op] + if method(typ.length(), size): + return typ, None + return None, typ + unpack = typ.items[unpack_index] + assert isinstance(unpack, UnpackType) + unpacked = get_proper_type(unpack.type) + if isinstance(unpacked, TypeVarTupleType): + # For tuples involving TypeVarTuple unpack we can't do much except + # inferring reachability, and recording the restrictions on TypeVarTuple + # for further "manual" use elsewhere. + min_len = typ.length() - 1 + unpacked.min_len + if op in ("==", "is"): + if min_len <= size: + return typ, typ + return None, typ + elif op in ("<", "<="): + if op == "<=": + size += 1 + if min_len < size: + prefix = typ.items[:unpack_index] + suffix = typ.items[unpack_index + 1 :] + # TODO: also record max_len to avoid false negatives? + unpack = UnpackType(unpacked.copy_modified(min_len=size - typ.length() + 1)) + return typ, typ.copy_modified(items=prefix + [unpack] + suffix) + return None, typ + else: + yes_type, no_type = self.refine_tuple_type_with_len(typ, neg_ops[op], size) + return no_type, yes_type + # Homogeneous variadic item is the case where we are most flexible. Essentially, + # we adjust the variadic item by "eating away" from it to satisfy the restriction. + assert isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple" + min_len = typ.length() - 1 + arg = unpacked.args[0] + prefix = typ.items[:unpack_index] + suffix = typ.items[unpack_index + 1 :] + if op in ("==", "is"): + if min_len <= size: + # TODO: return fixed union + prefixed variadic tuple for no_type? + return typ.copy_modified(items=prefix + [arg] * (size - min_len) + suffix), typ + return None, typ + elif op in ("<", "<="): + if op == "<=": + size += 1 + if min_len < size: + # Note: there is some ambiguity w.r.t. to where to put the additional + # items: before or after the unpack. However, such types are equivalent, + # so we always put them before for consistency. + no_type = typ.copy_modified( + items=prefix + [arg] * (size - min_len) + [unpack] + suffix + ) + yes_items = [] + for n in range(size - min_len): + yes_items.append(typ.copy_modified(items=prefix + [arg] * n + suffix)) + return UnionType.make_union(yes_items, typ.line, typ.column), no_type + return None, typ + else: + yes_type, no_type = self.refine_tuple_type_with_len(typ, neg_ops[op], size) + return no_type, yes_type + + def refine_instance_type_with_len( + self, typ: Instance, op: str, size: int + ) -> tuple[Type | None, Type | None]: + """Narrow a homogeneous tuple using length restrictions.""" + base = map_instance_to_supertype(typ, self.lookup_typeinfo("builtins.tuple")) + arg = base.args[0] + # Again, we are conservative about subclasses until we gain more confidence. + allow_precise = ( + PRECISE_TUPLE_TYPES in self.options.enable_incomplete_feature + ) and typ.type.fullname == "builtins.tuple" + if op in ("==", "is"): + # TODO: return fixed union + prefixed variadic tuple for no_type? + return TupleType(items=[arg] * size, fallback=typ), typ + elif op in ("<", "<="): + if op == "<=": + size += 1 + if allow_precise: + unpack = UnpackType(self.named_generic_type("builtins.tuple", [arg])) + no_type: Type | None = TupleType(items=[arg] * size + [unpack], fallback=typ) + else: + no_type = typ + if allow_precise: + items = [] + for n in range(size): + items.append(TupleType([arg] * n, fallback=typ)) + yes_type: Type | None = UnionType.make_union(items, typ.line, typ.column) + else: + yes_type = typ + return yes_type, no_type + else: + yes_type, no_type = self.refine_instance_type_with_len(typ, neg_ops[op], size) + return no_type, yes_type + + # + # Helpers + # + @overload + def check_subtype( + self, + subtype: Type, + supertype: Type, + context: Context, + msg: str, + subtype_label: str | None = None, + supertype_label: str | None = None, + *, + notes: list[str] | None = None, + code: ErrorCode | None = None, + outer_context: Context | None = None, + ) -> bool: ... + + @overload + def check_subtype( + self, + subtype: Type, + supertype: Type, + context: Context, + msg: ErrorMessage, + subtype_label: str | None = None, + supertype_label: str | None = None, + *, + notes: list[str] | None = None, + outer_context: Context | None = None, + ) -> bool: ... + + def check_subtype( + self, + subtype: Type, + supertype: Type, + context: Context, + msg: str | ErrorMessage, + subtype_label: str | None = None, + supertype_label: str | None = None, + *, + notes: list[str] | None = None, + code: ErrorCode | None = None, + outer_context: Context | None = None, + ) -> bool: + """Generate an error if the subtype is not compatible with supertype.""" + if is_subtype(subtype, supertype, options=self.options): + return True + + if isinstance(msg, str): + msg = ErrorMessage(msg, code=code) + + if self.msg.prefer_simple_messages(): + self.fail(msg, context) # Fast path -- skip all fancy logic + return False + + orig_subtype = subtype + subtype = get_proper_type(subtype) + orig_supertype = supertype + supertype = get_proper_type(supertype) + if self.msg.try_report_long_tuple_assignment_error( + subtype, supertype, context, msg, subtype_label, supertype_label + ): + return False + extra_info: list[str] = [] + note_msg = "" + notes = notes or [] + if subtype_label is not None or supertype_label is not None: + subtype_str, supertype_str = format_type_distinctly( + orig_subtype, orig_supertype, options=self.options + ) + if subtype_label is not None: + extra_info.append(subtype_label + " " + subtype_str) + if supertype_label is not None: + extra_info.append(supertype_label + " " + supertype_str) + note_msg = make_inferred_type_note( + outer_context or context, subtype, supertype, supertype_str + ) + if isinstance(subtype, Instance) and isinstance(supertype, Instance): + notes = append_invariance_notes(notes, subtype, supertype) + if isinstance(subtype, UnionType) and isinstance(supertype, UnionType): + notes = append_union_note(notes, subtype, supertype, self.options) + if extra_info: + msg = msg.with_additional_msg(" (" + ", ".join(extra_info) + ")") + + error = self.fail(msg, context) + for note in notes: + self.msg.note(note, context, code=msg.code) + if note_msg: + self.note(note_msg, context, code=msg.code) + self.msg.maybe_note_concatenate_pos_args(subtype, supertype, context, code=msg.code) + if ( + isinstance(supertype, Instance) + and supertype.type.is_protocol + and isinstance(subtype, (CallableType, Instance, TupleType, TypedDictType, TypeType)) + ): + self.msg.report_protocol_problems(subtype, supertype, context, parent_error=error) + if isinstance(supertype, CallableType) and isinstance(subtype, Instance): + call = find_member("__call__", subtype, subtype, is_operator=True) + if call: + self.msg.note_call(subtype, call, context, code=msg.code) + if isinstance(subtype, (CallableType, Overloaded)) and isinstance(supertype, Instance): + if supertype.type.is_protocol and "__call__" in supertype.type.protocol_members: + call = find_member("__call__", supertype, subtype, is_operator=True) + assert call is not None + if not is_subtype(subtype, call, options=self.options): + self.msg.note_call(supertype, call, context, code=msg.code) + self.check_possible_missing_await(subtype, supertype, context, code=msg.code) + return False + + def get_precise_awaitable_type(self, typ: Type, local_errors: ErrorWatcher) -> Type | None: + """If type implements Awaitable[X] with non-Any X, return X. + + In all other cases return None. This method must be called in context + of local_errors. + """ + if isinstance(get_proper_type(typ), PartialType): + # Partial types are special, ignore them here. + return None + try: + aw_type = self.expr_checker.check_awaitable_expr( + typ, Context(), "", ignore_binder=True + ) + except KeyError: + # This is a hack to speed up tests by not including Awaitable in all typing stubs. + return None + if local_errors.has_new_errors(): + return None + if isinstance(get_proper_type(aw_type), (AnyType, UnboundType)): + return None + return aw_type + + @contextmanager + def checking_await_set(self) -> Iterator[None]: + self.checking_missing_await = True + try: + yield + finally: + self.checking_missing_await = False + + def check_possible_missing_await( + self, subtype: Type, supertype: Type, context: Context, code: ErrorCode | None + ) -> None: + """Check if the given type becomes a subtype when awaited.""" + if self.checking_missing_await: + # Avoid infinite recursion. + return + with self.checking_await_set(), self.msg.filter_errors() as local_errors: + aw_type = self.get_precise_awaitable_type(subtype, local_errors) + if aw_type is None: + return + if not self.check_subtype( + aw_type, supertype, context, msg=message_registry.INCOMPATIBLE_TYPES + ): + return + self.msg.possible_missing_await(context, code) + + def named_type(self, name: str) -> Instance: + """Return an instance type with given name and implicit Any type args. + + For example, named_type('builtins.object') produces the 'object' type. + """ + if name == "builtins.str": + if instance_cache.str_type is None: + instance_cache.str_type = self._named_type(name) + return instance_cache.str_type + if name == "builtins.function": + if instance_cache.function_type is None: + instance_cache.function_type = self._named_type(name) + return instance_cache.function_type + if name == "builtins.int": + if instance_cache.int_type is None: + instance_cache.int_type = self._named_type(name) + return instance_cache.int_type + if name == "builtins.bool": + if instance_cache.bool_type is None: + instance_cache.bool_type = self._named_type(name) + return instance_cache.bool_type + if name == "builtins.object": + if instance_cache.object_type is None: + instance_cache.object_type = self._named_type(name) + return instance_cache.object_type + return self._named_type(name) + + def _named_type(self, name: str) -> Instance: + # Assume that the name refers to a type. + sym = self.lookup_qualified(name) + node = sym.node + if isinstance(node, TypeAlias): + assert isinstance(node.target, Instance) # type: ignore[misc] + node = node.target.type + assert isinstance(node, TypeInfo), node + any_type = AnyType(TypeOfAny.from_omitted_generics) + return Instance(node, [any_type] * len(node.defn.type_vars)) + + def named_generic_type(self, name: str, args: list[Type]) -> Instance: + """Return an instance with the given name and type arguments. + + Assume that the number of arguments is correct. Assume that + the name refers to a compatible generic type. + """ + info = self.lookup_typeinfo(name) + args = [remove_instance_last_known_values(arg) for arg in args] + # TODO: assert len(args) == len(info.defn.type_vars) + return Instance(info, args) + + def lookup_typeinfo(self, fullname: str) -> TypeInfo: + # Assume that the name refers to a class. + sym = self.lookup_qualified(fullname) + node = sym.node + assert isinstance(node, TypeInfo), node + return node + + def type_type(self) -> Instance: + """Return instance type 'type'.""" + return self.named_type("builtins.type") + + def str_type(self) -> Instance: + """Return instance type 'str'.""" + return self.named_type("builtins.str") + + def store_type(self, node: Expression, typ: Type) -> None: + """Store the type of a node in the type map.""" + self._type_maps[-1][node] = typ + + def has_type(self, node: Expression) -> bool: + return any(node in m for m in reversed(self._type_maps)) + + def lookup_type_or_none(self, node: Expression) -> Type | None: + for m in reversed(self._type_maps): + if node in m: + return m[node] + return None + + def lookup_type(self, node: Expression) -> Type: + for m in reversed(self._type_maps): + t = m.get(node) + if t is not None: + return t + raise KeyError(node) + + def store_types(self, d: dict[Expression, Type]) -> None: + self._type_maps[-1].update(d) + + def in_checked_function(self) -> bool: + """Should we type-check the current function? + + - Yes if --check-untyped-defs is set. + - Yes outside functions. + - Yes in annotated functions. + - No otherwise. + """ + return ( + self.options.check_untyped_defs or not self.dynamic_funcs or not self.dynamic_funcs[-1] + ) + + def lookup(self, name: str) -> SymbolTableNode: + """Look up a definition from the symbol table with the given name.""" + if name in self.globals: + return self.globals[name] + else: + b = self.globals.get("__builtins__", None) + if b: + assert isinstance(b.node, MypyFile) + table = b.node.names + if name in table: + return table[name] + raise KeyError(f"Failed lookup: {name}") + + def lookup_qualified(self, name: str) -> SymbolTableNode: + if "." not in name: + return self.lookup(name) + else: + parts = name.split(".") + n = self.modules[parts[0]] + for i in range(1, len(parts) - 1): + sym = n.names.get(parts[i]) + assert sym is not None, "Internal error: attempted lookup of unknown name" + assert isinstance(sym.node, MypyFile) + n = sym.node + last = parts[-1] + if last in n.names: + return n.names[last] + elif len(parts) == 2 and parts[0] in ("builtins", "typing"): + fullname = ".".join(parts) + if fullname in SUGGESTED_TEST_FIXTURES: + suggestion = ", e.g. add '[{} fixtures/{}]' to your test".format( + parts[0], SUGGESTED_TEST_FIXTURES[fullname] + ) + else: + suggestion = "" + raise KeyError( + "Could not find builtin symbol '{}' (If you are running a " + "test case, use a fixture that " + "defines this symbol{})".format(last, suggestion) + ) + else: + msg = "Failed qualified lookup: '{}' (fullname = '{}')." + raise KeyError(msg.format(last, name)) + + @contextmanager + def enter_partial_types( + self, *, is_function: bool = False, is_class: bool = False + ) -> Iterator[None]: + """Enter a new scope for collecting partial types. + + Also report errors for (some) variables which still have partial + types, i.e. we couldn't infer a complete type. + """ + is_local = (self.partial_types and self.partial_types[-1].is_local) or is_function + self.partial_types.append(PartialTypeScope({}, is_function, is_local)) + yield + + # Don't complain about not being able to infer partials if it is + # at the toplevel (with allow_untyped_globals) or if it is in an + # untyped function being checked with check_untyped_defs. + permissive = (self.options.allow_untyped_globals and not is_local) or ( + self.options.check_untyped_defs and self.dynamic_funcs and self.dynamic_funcs[-1] + ) + + partial_types, _, _ = self.partial_types.pop() + if not self.current_node_deferred: + for var, context in partial_types.items(): + # If we require local partial types, there are a few exceptions where + # we fall back to inferring just "None" as the type from a None initializer: + # + # 1. If all happens within a single function this is acceptable, since only + # the topmost function is a separate target in fine-grained incremental mode. + # We primarily want to avoid "splitting" partial types across targets. + # + # 2. A None initializer in the class body if the attribute is defined in a base + # class is fine, since the attribute is already defined and it's currently okay + # to vary the type of an attribute covariantly. The None type will still be + # checked for compatibility with base classes elsewhere. Without this exception + # mypy could require an annotation for an attribute that already has been + # declared in a base class, which would be bad. + allow_none = ( + not self.options.local_partial_types + or is_function + or (is_class and self.is_defined_in_base_class(var)) + ) + if ( + allow_none + and isinstance(var.type, PartialType) + and var.type.type is None + and not permissive + ): + var.type = NoneType() + else: + if var not in self.partial_reported and not permissive: + self.msg.need_annotation_for_var(var, context, self.options) + self.partial_reported.add(var) + if var.type: + fixed = fixup_partial_type(var.type) + var.invalid_partial_type = fixed != var.type + var.type = fixed + + def handle_partial_var_type( + self, typ: PartialType, is_lvalue: bool, node: Var, context: Context + ) -> Type: + """Handle a reference to a partial type through a var. + + (Used by checkexpr and checkmember.) + """ + in_scope, is_local, partial_types = self.find_partial_types_in_all_scopes(node) + if typ.type is None and in_scope: + # 'None' partial type. It has a well-defined type. In an lvalue context + # we want to preserve the knowledge of it being a partial type. + if not is_lvalue: + return NoneType() + else: + return typ + else: + if partial_types is not None and not self.current_node_deferred: + if in_scope: + context = partial_types[node] + if is_local or not self.options.allow_untyped_globals: + self.msg.need_annotation_for_var(node, context, self.options) + self.partial_reported.add(node) + else: + # Defer the node -- we might get a better type in the outer scope + self.handle_cannot_determine_type(node.name, context) + return fixup_partial_type(typ) + + def is_defined_in_base_class(self, var: Var) -> bool: + if not var.info: + return False + return var.info.fallback_to_any or any( + base.get(var.name) is not None for base in var.info.mro[1:] + ) + + def find_partial_types(self, var: Var) -> dict[Var, Context] | None: + """Look for an active partial type scope containing variable. + + A scope is active if assignments in the current context can refine a partial + type originally defined in the scope. This is affected by the local_partial_types + configuration option. + """ + in_scope, _, partial_types = self.find_partial_types_in_all_scopes(var) + if in_scope: + return partial_types + return None + + def find_partial_types_in_all_scopes( + self, var: Var + ) -> tuple[bool, bool, dict[Var, Context] | None]: + """Look for partial type scope containing variable. + + Return tuple (is the scope active, is the scope a local scope, scope). + """ + for scope in reversed(self.partial_types): + if var in scope.map: + # All scopes within the outermost function are active. Scopes out of + # the outermost function are inactive to allow local reasoning (important + # for fine-grained incremental mode). + disallow_other_scopes = self.options.local_partial_types + + if isinstance(var.type, PartialType) and var.type.type is not None and var.info: + # This is an ugly hack to make partial generic self attributes behave + # as if --local-partial-types is always on (because it used to be like this). + disallow_other_scopes = True + + scope_active = ( + not disallow_other_scopes or scope.is_local == self.partial_types[-1].is_local + ) + return scope_active, scope.is_local, scope.map + return False, False, None + + def temp_node(self, t: Type, context: Context | None = None) -> TempNode: + """Create a temporary node with the given, fixed type.""" + return TempNode(t, context=context) + + def fail( + self, msg: str | ErrorMessage, context: Context, *, code: ErrorCode | None = None + ) -> ErrorInfo: + """Produce an error message.""" + if isinstance(msg, ErrorMessage): + return self.msg.fail(msg.value, context, code=msg.code) + return self.msg.fail(msg, context, code=code) + + def note( + self, + msg: str | ErrorMessage, + context: Context, + offset: int = 0, + *, + code: ErrorCode | None = None, + ) -> None: + """Produce a note.""" + if isinstance(msg, ErrorMessage): + self.msg.note(msg.value, context, code=msg.code) + return + self.msg.note(msg, context, offset=offset, code=code) + + def iterable_item_type( + self, it: Instance | CallableType | TypeType | Overloaded, context: Context + ) -> Type: + if isinstance(it, Instance): + iterable = map_instance_to_supertype(it, self.lookup_typeinfo("typing.Iterable")) + item_type = iterable.args[0] + if not isinstance(get_proper_type(item_type), AnyType): + # This relies on 'map_instance_to_supertype' returning 'Iterable[Any]' + # in case there is no explicit base class. + return item_type + # Try also structural typing. + return self.analyze_iterable_item_type_without_expression(it, context)[1] + + def function_type(self, func: FuncBase) -> FunctionLike: + return function_type(func, self.named_type("builtins.function")) + + def push_type_map(self, type_map: TypeMap, *, from_assignment: bool = True) -> None: + if type_map is None: + self.binder.unreachable() + else: + for expr, type in type_map.items(): + self.binder.put(expr, type, from_assignment=from_assignment) + + def infer_issubclass_maps(self, node: CallExpr, expr: Expression) -> tuple[TypeMap, TypeMap]: + """Infer type restrictions for an expression in issubclass call.""" + vartype = self.lookup_type(expr) + type = self.get_isinstance_type(node.args[1]) + if isinstance(vartype, TypeVarType): + vartype = vartype.upper_bound + vartype = get_proper_type(vartype) + if isinstance(vartype, UnionType): + union_list = [] + for t in get_proper_types(vartype.items): + if isinstance(t, TypeType): + union_list.append(t.item) + else: + # This is an error that should be reported earlier + # if we reach here, we refuse to do any type inference. + return {}, {} + vartype = UnionType(union_list) + elif isinstance(vartype, TypeType): + vartype = vartype.item + elif isinstance(vartype, Instance) and vartype.type.is_metaclass(): + vartype = self.named_type("builtins.object") + else: + # Any other object whose type we don't know precisely + # for example, Any or a custom metaclass. + return {}, {} # unknown type + yes_type, no_type = self.conditional_types_with_intersection(vartype, type, expr) + yes_map, no_map = conditional_types_to_typemaps(expr, yes_type, no_type) + yes_map, no_map = map(convert_to_typetype, (yes_map, no_map)) + return yes_map, no_map + + @overload + def conditional_types_with_intersection( + self, + expr_type: Type, + type_ranges: list[TypeRange] | None, + ctx: Context, + default: None = None, + *, + consider_runtime_isinstance: bool = True, + ) -> tuple[Type | None, Type | None]: ... + + @overload + def conditional_types_with_intersection( + self, + expr_type: Type, + type_ranges: list[TypeRange] | None, + ctx: Context, + default: Type, + *, + consider_runtime_isinstance: bool = True, + ) -> tuple[Type, Type]: ... + + def conditional_types_with_intersection( + self, + expr_type: Type, + type_ranges: list[TypeRange] | None, + ctx: Context, + default: Type | None = None, + *, + consider_runtime_isinstance: bool = True, + ) -> tuple[Type | None, Type | None]: + initial_types = conditional_types( + expr_type, + type_ranges, + default, + consider_runtime_isinstance=consider_runtime_isinstance, + ) + # For some reason, doing "yes_map, no_map = conditional_types_to_typemaps(...)" + # doesn't work: mypyc will decide that 'yes_map' is of type None if we try. + yes_type: Type | None = initial_types[0] + no_type: Type | None = initial_types[1] + + if not isinstance(get_proper_type(yes_type), UninhabitedType) or type_ranges is None: + return yes_type, no_type + + # If conditional_types was unable to successfully narrow the expr_type + # using the type_ranges and concluded if-branch is unreachable, we try + # computing it again using a different algorithm that tries to generate + # an ad-hoc intersection between the expr_type and the type_ranges. + proper_type = get_proper_type(expr_type) + if isinstance(proper_type, UnionType): + possible_expr_types = get_proper_types(proper_type.relevant_items()) + else: + possible_expr_types = [proper_type] + + possible_target_types = [] + for tr in type_ranges: + item = get_proper_type(tr.item) + if isinstance(item, (Instance, NoneType)): + possible_target_types.append(item) + if not possible_target_types: + return yes_type, no_type + + out = [] + errors: list[tuple[str, str]] = [] + for v in possible_expr_types: + if not isinstance(v, Instance): + return yes_type, no_type + for t in possible_target_types: + if isinstance(t, NoneType): + errors.append((f'"{v.type.name}" and "NoneType"', '"NoneType" is final')) + continue + intersection = self.intersect_instances((v, t), errors) + if intersection is None: + continue + out.append(intersection) + if not out: + # Only report errors if no element in the union worked. + if self.should_report_unreachable_issues(): + for types, reason in errors: + self.msg.impossible_intersection(types, reason, ctx) + return UninhabitedType(), expr_type + new_yes_type = make_simplified_union(out) + return new_yes_type, expr_type + + def is_writable_attribute(self, node: Node) -> bool: + """Check if an attribute is writable""" + if isinstance(node, Var): + if node.is_property and not node.is_settable_property: + return False + return True + elif isinstance(node, OverloadedFuncDef) and node.is_property: + first_item = node.items[0] + assert isinstance(first_item, Decorator) + return first_item.var.is_settable_property + return False + + def get_isinstance_type(self, expr: Expression) -> list[TypeRange] | None: + """Get the type(s) resulting from an isinstance check. + + Returns an empty list for isinstance(x, ()). + """ + if isinstance(expr, OpExpr) and expr.op == "|": + left = self.get_isinstance_type(expr.left) + if left is None and is_literal_none(expr.left): + left = [TypeRange(NoneType(), is_upper_bound=False)] + right = self.get_isinstance_type(expr.right) + if right is None and is_literal_none(expr.right): + right = [TypeRange(NoneType(), is_upper_bound=False)] + if left is None or right is None: + return None + return left + right + all_types = get_proper_types(flatten_types(self.lookup_type(expr))) + types: list[TypeRange] = [] + for typ in all_types: + if isinstance(typ, FunctionLike) and typ.is_type_obj(): + # If a type is generic, `isinstance` can only narrow its variables to Any. + any_parameterized = fill_typevars_with_any(typ.type_object()) + # Tuples may have unattended type variables among their items + if isinstance(any_parameterized, TupleType): + erased_type = erase_typevars(any_parameterized) + else: + erased_type = any_parameterized + types.append(TypeRange(erased_type, is_upper_bound=False)) + elif isinstance(typ, TypeType): + # Type[A] means "any type that is a subtype of A" rather than "precisely type A" + # we indicate this by setting is_upper_bound flag + is_upper_bound = True + if isinstance(typ.item, NoneType): + # except for Type[None], because "'NoneType' is not an acceptable base type" + is_upper_bound = False + types.append(TypeRange(typ.item, is_upper_bound=is_upper_bound)) + elif isinstance(typ, Instance) and typ.type.fullname == "builtins.type": + object_type = Instance(typ.type.mro[-1], []) + types.append(TypeRange(object_type, is_upper_bound=True)) + elif isinstance(typ, Instance) and typ.type.fullname == "types.UnionType" and typ.args: + types.append(TypeRange(UnionType(typ.args), is_upper_bound=False)) + elif isinstance(typ, AnyType): + types.append(TypeRange(typ, is_upper_bound=False)) + else: # we didn't see an actual type, but rather a variable with unknown value + return None + return types + + def is_literal_enum(self, n: Expression) -> bool: + """Returns true if this expression (with the given type context) is an Enum literal. + + For example, if we had an enum: + + class Foo(Enum): + A = 1 + B = 2 + + ...and if the expression 'Foo' referred to that enum within the current type context, + then the expression 'Foo.A' would be a literal enum. However, if we did 'a = Foo.A', + then the variable 'a' would *not* be a literal enum. + + We occasionally special-case expressions like 'Foo.A' and treat them as a single primitive + unit for the same reasons we sometimes treat 'True', 'False', or 'None' as a single + primitive unit. + """ + if not isinstance(n, MemberExpr) or not isinstance(n.expr, NameExpr): + return False + + parent_type = self.lookup_type_or_none(n.expr) + member_type = self.lookup_type_or_none(n) + if member_type is None or parent_type is None: + return False + + parent_type = get_proper_type(parent_type) + member_type = get_proper_type(coerce_to_literal(member_type)) + if not isinstance(parent_type, FunctionLike) or not isinstance(member_type, LiteralType): + return False + + if not parent_type.is_type_obj(): + return False + + return ( + member_type.is_enum_literal() + and member_type.fallback.type == parent_type.type_object() + ) + + def add_any_attribute_to_type(self, typ: Type, name: str) -> Type: + """Inject an extra attribute with Any type using fallbacks.""" + orig_typ = typ + typ = get_proper_type(typ) + any_type = AnyType(TypeOfAny.unannotated) + if isinstance(typ, Instance): + result = typ.copy_with_extra_attr(name, any_type) + # For instances, we erase the possible module name, so that restrictions + # become anonymous types.ModuleType instances, allowing hasattr() to + # have effect on modules. + assert result.extra_attrs is not None + result.extra_attrs.mod_name = None + return result + if isinstance(typ, TupleType): + fallback = typ.partial_fallback.copy_with_extra_attr(name, any_type) + return typ.copy_modified(fallback=fallback) + if isinstance(typ, CallableType): + fallback = typ.fallback.copy_with_extra_attr(name, any_type) + return typ.copy_modified(fallback=fallback) + if isinstance(typ, TypeType) and isinstance(typ.item, Instance): + return TypeType.make_normalized( + self.add_any_attribute_to_type(typ.item, name), is_type_form=typ.is_type_form + ) + if isinstance(typ, TypeVarType): + return typ.copy_modified( + upper_bound=self.add_any_attribute_to_type(typ.upper_bound, name), + values=[self.add_any_attribute_to_type(v, name) for v in typ.values], + ) + if isinstance(typ, UnionType): + with_attr, without_attr = self.partition_union_by_attr(typ, name) + return make_simplified_union( + with_attr + [self.add_any_attribute_to_type(typ, name) for typ in without_attr] + ) + return orig_typ + + def hasattr_type_maps( + self, expr: Expression, source_type: Type, name: str + ) -> tuple[TypeMap, TypeMap]: + """Simple support for hasattr() checks. + + Essentially the logic is following: + * In the if branch, keep types that already has a valid attribute as is, + for other inject an attribute with `Any` type. + * In the else branch, remove types that already have a valid attribute, + while keeping the rest. + """ + if self.has_valid_attribute(source_type, name): + return {expr: source_type}, {} + + source_type = get_proper_type(source_type) + if isinstance(source_type, UnionType): + _, without_attr = self.partition_union_by_attr(source_type, name) + yes_map = {expr: self.add_any_attribute_to_type(source_type, name)} + return yes_map, {expr: make_simplified_union(without_attr)} + + type_with_attr = self.add_any_attribute_to_type(source_type, name) + if type_with_attr != source_type: + return {expr: type_with_attr}, {} + return {}, {} + + def partition_union_by_attr( + self, source_type: UnionType, name: str + ) -> tuple[list[Type], list[Type]]: + with_attr = [] + without_attr = [] + for item in source_type.items: + if self.has_valid_attribute(item, name): + with_attr.append(item) + else: + without_attr.append(item) + return with_attr, without_attr + + def has_valid_attribute(self, typ: Type, name: str) -> bool: + p_typ = get_proper_type(typ) + if isinstance(p_typ, AnyType): + return False + if isinstance(p_typ, Instance) and p_typ.extra_attrs and p_typ.extra_attrs.mod_name: + # Presence of module_symbol_table means this check will skip ModuleType.__getattr__ + module_symbol_table = p_typ.type.names + else: + module_symbol_table = None + with self.msg.filter_errors() as watcher: + analyze_member_access( + name, + typ, + TempNode(AnyType(TypeOfAny.special_form)), + is_lvalue=False, + is_super=False, + is_operator=False, + original_type=typ, + chk=self, + # This is not a real attribute lookup so don't mess with deferring nodes. + no_deferral=True, + module_symbol_table=module_symbol_table, + ) + return not watcher.has_new_errors() + + def get_expression_type(self, node: Expression, type_context: Type | None = None) -> Type: + return self.expr_checker.accept(node, type_context=type_context) + + def is_defined_in_stub(self, typ: Instance, /) -> bool: + return self.modules[typ.type.module_name].is_stub + + def check_deprecated(self, node: Node | None, context: Context) -> None: + """Warn if deprecated and not directly imported with a `from` statement.""" + if isinstance(node, Decorator): + node = node.func + if isinstance(node, (FuncDef, OverloadedFuncDef, TypeInfo)) and ( + node.deprecated is not None + ): + for imp in self.tree.imports: + if isinstance(imp, ImportFrom) and any(node.name == n[0] for n in imp.names): + break + else: + self.warn_deprecated(node, context) + + def warn_deprecated(self, node: Node | None, context: Context) -> None: + """Warn if deprecated.""" + if isinstance(node, Decorator): + node = node.func + if ( + isinstance(node, (FuncDef, OverloadedFuncDef, TypeInfo)) + and (deprecated := node.deprecated) is not None + and not self.is_typeshed_stub + and not any( + node.fullname == p or node.fullname.startswith(f"{p}.") + for p in self.options.deprecated_calls_exclude + ) + ): + warn = self.msg.note if self.options.report_deprecated_as_note else self.msg.fail + warn(deprecated, context, code=codes.DEPRECATED) + + def new_unique_dummy_name(self, namespace: str) -> str: + """Generate a name that is guaranteed to be unique for this TypeChecker instance.""" + name = f"dummy-{namespace}-{self._unique_id}" + self._unique_id += 1 + return name + + # leafs + + def visit_pass_stmt(self, o: PassStmt, /) -> None: + return None + + def visit_nonlocal_decl(self, o: NonlocalDecl, /) -> None: + return None + + def visit_global_decl(self, o: GlobalDecl, /) -> None: + return None + + +class TypeCheckerAsSemanticAnalyzer(SemanticAnalyzerCoreInterface): + """ + Adapts TypeChecker to the SemanticAnalyzerCoreInterface, + allowing most type expressions to be parsed during the TypeChecker pass. + + See ExpressionChecker.try_parse_as_type_expression() to understand how this + class is used. + """ + + _chk: TypeChecker + _names: dict[str, SymbolTableNode] + did_fail: bool + + def __init__(self, chk: TypeChecker, names: dict[str, SymbolTableNode]) -> None: + self._chk = chk + self._names = names + self.did_fail = False + + def lookup_qualified( + self, name: str, ctx: Context, suppress_errors: bool = False + ) -> SymbolTableNode | None: + sym = self._names.get(name) + # All names being looked up should have been previously gathered, + # even if the related SymbolTableNode does not refer to a valid SymbolNode + assert sym is not None, name + return sym + + def lookup_fully_qualified(self, fullname: str, /) -> SymbolTableNode: + ret = self.lookup_fully_qualified_or_none(fullname) + assert ret is not None, fullname + return ret + + def lookup_fully_qualified_or_none(self, fullname: str, /) -> SymbolTableNode | None: + try: + return self._chk.lookup_qualified(fullname) + except KeyError: + return None + + def fail( + self, + msg: str, + ctx: Context, + serious: bool = False, + *, + blocker: bool = False, + code: ErrorCode | None = None, + ) -> None: + self.did_fail = True + + def note(self, msg: str, ctx: Context, *, code: ErrorCode | None = None) -> None: + pass + + def incomplete_feature_enabled(self, feature: str, ctx: Context) -> bool: + if feature not in self._chk.options.enable_incomplete_feature: + self.fail("__ignored__", ctx) + return False + return True + + def record_incomplete_ref(self) -> None: + pass + + def defer(self, debug_context: Context | None = None, force_progress: bool = False) -> None: + pass + + def is_incomplete_namespace(self, fullname: str) -> bool: + return False + + @property + def final_iteration(self) -> bool: + return True + + def is_future_flag_set(self, flag: str) -> bool: + return self._chk.tree.is_future_flag_set(flag) + + @property + def is_stub_file(self) -> bool: + return self._chk.tree.is_stub + + def is_func_scope(self) -> bool: + # Return arbitrary value. + # + # This method is currently only used to decide whether to pair + # a fail() message with a note() message or not. Both of those + # message types are ignored. + return False + + @property + def type(self) -> TypeInfo | None: + return self._chk.type + + +class CollectArgTypeVarTypes(TypeTraverserVisitor): + """Collects the non-nested argument types in a set.""" + + def __init__(self) -> None: + self.arg_types: set[TypeVarType] = set() + + def visit_type_var(self, t: TypeVarType) -> None: + self.arg_types.add(t) + + +@overload +def conditional_types( + current_type: Type, + proposed_type_ranges: list[TypeRange] | None, + default: None = None, + *, + consider_runtime_isinstance: bool = True, +) -> tuple[Type | None, Type | None]: ... + + +@overload +def conditional_types( + current_type: Type, + proposed_type_ranges: list[TypeRange] | None, + default: Type, + *, + consider_runtime_isinstance: bool = True, +) -> tuple[Type, Type]: ... + + +def conditional_types( + current_type: Type, + proposed_type_ranges: list[TypeRange] | None, + default: Type | None = None, + *, + consider_runtime_isinstance: bool = True, +) -> tuple[Type | None, Type | None]: + """Takes in the current type and a proposed type of an expression. + + Returns a 2-tuple: + The first element is the proposed type, if the expression can be the proposed type. + (or default, if default is set and the expression is a subtype of the proposed type). + The second element is the type it would hold if it was not the proposed type, if any. + (or default, if default is set and the expression is not a subtype of the proposed type). + + UninhabitedType means unreachable. + None means no new information can be inferred. + """ + if proposed_type_ranges is None: + # An isinstance check, but we don't understand the type + return current_type, default + + if not proposed_type_ranges: + # This is the case for `if isinstance(x, ())` which always returns False. + return UninhabitedType(), default + + if len(proposed_type_ranges) == 1: + # expand e.g. bool -> Literal[True] | Literal[False] + target = proposed_type_ranges[0].item + target = get_proper_type(target) + if isinstance(target, LiteralType) and ( + target.is_enum_literal() or isinstance(target.value, bool) + ): + enum_name = target.fallback.type.fullname + current_type = try_expanding_sum_type_to_union(current_type, enum_name) + + proper_type = get_proper_type(current_type) + # factorize over union types: isinstance(A|B, C) -> yes = A_yes | B_yes + if isinstance(proper_type, UnionType): + result: list[tuple[Type | None, Type | None]] = [ + conditional_types( + union_item, + proposed_type_ranges, + default=union_item, + consider_runtime_isinstance=consider_runtime_isinstance, + ) + for union_item in get_proper_types(proper_type.items) + ] + # separate list of tuples into two lists + yes_types, no_types = zip(*result) + proposed_type = make_simplified_union([t for t in yes_types if t is not None]) + else: + proposed_items = [type_range.item for type_range in proposed_type_ranges] + proposed_type = make_simplified_union(proposed_items) + + if isinstance(proper_type, AnyType): + return proposed_type, current_type + elif isinstance(proposed_type, AnyType): + # We don't really know much about the proposed type, so we shouldn't + # attempt to narrow anything. Instead, we broaden the expr to Any to + # avoid false positives + return proposed_type, default + elif not any(type_range.is_upper_bound for type_range in proposed_type_ranges) and ( + # concrete subtypes + is_proper_subtype(current_type, proposed_type, ignore_promotions=True) + # structural subtypes + or ( + ( + isinstance(proposed_type, CallableType) + or (isinstance(proposed_type, Instance) and proposed_type.type.is_protocol) + ) + and is_subtype(current_type, proposed_type, ignore_promotions=True) + ) + ): + # Expression is always of one of the types in proposed_type_ranges + return default, UninhabitedType() + elif not is_overlapping_types(current_type, proposed_type, ignore_promotions=True): + # Expression is never of any type in proposed_type_ranges + return UninhabitedType(), default + else: + # we can only restrict when the type is precise, not bounded + proposed_precise_type = UnionType.make_union( + [ + type_range.item + for type_range in proposed_type_ranges + if not type_range.is_upper_bound + ] + ) + remaining_type = restrict_subtype_away( + current_type, + proposed_precise_type, + consider_runtime_isinstance=consider_runtime_isinstance, + ) + return proposed_type, remaining_type + + +def conditional_types_to_typemaps( + expr: Expression, yes_type: Type | None, no_type: Type | None +) -> tuple[TypeMap, TypeMap]: + expr = collapse_walrus(expr) + maps: list[TypeMap] = [] + for typ in (yes_type, no_type): + proper_type = get_proper_type(typ) + if isinstance(proper_type, UninhabitedType): + maps.append(None) + elif proper_type is None: + maps.append({}) + else: + assert typ is not None + maps.append({expr: typ}) + + return cast(tuple[TypeMap, TypeMap], tuple(maps)) + + +def gen_unique_name(base: str, table: SymbolTable) -> str: + """Generate a name that does not appear in table by appending numbers to base.""" + if base not in table: + return base + i = 1 + while base + str(i) in table: + i += 1 + return base + str(i) + + +def is_true_literal(n: Expression) -> bool: + """Returns true if this expression is the 'True' literal/keyword.""" + return refers_to_fullname(n, "builtins.True") or isinstance(n, IntExpr) and n.value != 0 + + +def is_false_literal(n: Expression) -> bool: + """Returns true if this expression is the 'False' literal/keyword.""" + return refers_to_fullname(n, "builtins.False") or isinstance(n, IntExpr) and n.value == 0 + + +def is_literal_none(n: Expression) -> bool: + """Returns true if this expression is the 'None' literal/keyword.""" + return isinstance(n, NameExpr) and n.fullname == "builtins.None" + + +def is_literal_not_implemented(n: Expression | None) -> bool: + return isinstance(n, NameExpr) and n.fullname == "builtins.NotImplemented" + + +def _is_empty_generator_function(func: FuncItem) -> bool: + """ + Checks whether a function's body is 'return; yield' (the yield being added only + to promote the function into a generator function). + """ + body = func.body.body + return ( + len(body) == 2 + and isinstance(ret_stmt := body[0], ReturnStmt) + and (ret_stmt.expr is None or is_literal_none(ret_stmt.expr)) + and isinstance(expr_stmt := body[1], ExpressionStmt) + and isinstance(yield_expr := expr_stmt.expr, YieldExpr) + and (yield_expr.expr is None or is_literal_none(yield_expr.expr)) + ) + + +def builtin_item_type(tp: Type) -> Type | None: + """Get the item type of a builtin container. + + If 'tp' is not one of the built containers (these includes NamedTuple and TypedDict) + or if the container is not parameterized (like List or List[Any]) + return None. This function is used to narrow optional types in situations like this: + + x: Optional[int] + if x in (1, 2, 3): + x + 42 # OK + + Note: this is only OK for built-in containers, where we know the behavior + of __contains__. + """ + tp = get_proper_type(tp) + + if isinstance(tp, Instance): + if tp.type.fullname in [ + "builtins.list", + "builtins.tuple", + "builtins.dict", + "builtins.set", + "builtins.frozenset", + "_collections_abc.dict_keys", + "typing.KeysView", + ]: + if not tp.args: + # TODO: fix tuple in lib-stub/builtins.pyi (it should be generic). + return None + if not isinstance(get_proper_type(tp.args[0]), AnyType): + return tp.args[0] + elif isinstance(tp, TupleType): + normalized_items = [] + for it in tp.items: + # This use case is probably rare, but not handling unpacks here can cause crashes. + if isinstance(it, UnpackType): + unpacked = get_proper_type(it.type) + if isinstance(unpacked, TypeVarTupleType): + unpacked = get_proper_type(unpacked.upper_bound) + assert ( + isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple" + ) + normalized_items.append(unpacked.args[0]) + else: + normalized_items.append(it) + if all(not isinstance(it, AnyType) for it in get_proper_types(normalized_items)): + return make_simplified_union(normalized_items) # this type is not externally visible + elif isinstance(tp, TypedDictType): + # TypedDict always has non-optional string keys. Find the key type from the Mapping + # base class. + for base in tp.fallback.type.mro: + if base.fullname == "typing.Mapping": + return map_instance_to_supertype(tp.fallback, base).args[0] + assert False, "No Mapping base class found for TypedDict fallback" + return None + + +def and_conditional_maps(m1: TypeMap, m2: TypeMap, use_meet: bool = False) -> TypeMap: + """Calculate what information we can learn from the truth of (e1 and e2) + in terms of the information that we can learn from the truth of e1 and + the truth of e2. + """ + + if m1 is None or m2 is None: + # One of the conditions can never be true. + return None + # Both conditions can be true; combine the information. Anything + # we learn from either conditions' truth is valid. If the same + # expression's type is refined by both conditions, we somewhat + # arbitrarily give precedence to m2 unless m1 value is Any. + # In the future, we could use an intersection type or meet_types(). + result = m2.copy() + m2_keys = {literal_hash(n2) for n2 in m2} + for n1 in m1: + if literal_hash(n1) not in m2_keys or isinstance(get_proper_type(m1[n1]), AnyType): + result[n1] = m1[n1] + if use_meet: + # For now, meet common keys only if specifically requested. + # This is currently used for tuple types narrowing, where having + # a precise result is important. + for n1 in m1: + for n2 in m2: + if literal_hash(n1) == literal_hash(n2): + result[n1] = meet_types(m1[n1], m2[n2]) + return result + + +def or_conditional_maps(m1: TypeMap, m2: TypeMap, coalesce_any: bool = False) -> TypeMap: + """Calculate what information we can learn from the truth of (e1 or e2) + in terms of the information that we can learn from the truth of e1 and + the truth of e2. If coalesce_any is True, consider Any a supertype when + joining restrictions. + """ + + if m1 is None: + return m2 + if m2 is None: + return m1 + # Both conditions can be true. Combine information about + # expressions whose type is refined by both conditions. (We do not + # learn anything about expressions whose type is refined by only + # one condition.) + result: dict[Expression, Type] = {} + for n1 in m1: + for n2 in m2: + if literal_hash(n1) == literal_hash(n2): + if coalesce_any and isinstance(get_proper_type(m1[n1]), AnyType): + result[n1] = m1[n1] + else: + result[n1] = make_simplified_union([m1[n1], m2[n2]]) + return result + + +def reduce_conditional_maps( + type_maps: list[tuple[TypeMap, TypeMap]], use_meet: bool = False +) -> tuple[TypeMap, TypeMap]: + """Reduces a list containing pairs of if/else TypeMaps into a single pair. + + We "and" together all of the if TypeMaps and "or" together the else TypeMaps. So + for example, if we had the input: + + [ + ({x: TypeIfX, shared: TypeIfShared1}, {x: TypeElseX, shared: TypeElseShared1}), + ({y: TypeIfY, shared: TypeIfShared2}, {y: TypeElseY, shared: TypeElseShared2}), + ] + + ...we'd return the output: + + ( + {x: TypeIfX, y: TypeIfY, shared: PseudoIntersection[TypeIfShared1, TypeIfShared2]}, + {shared: Union[TypeElseShared1, TypeElseShared2]}, + ) + + ...where "PseudoIntersection[X, Y] == Y" because mypy actually doesn't understand intersections + yet, so we settle for just arbitrarily picking the right expr's type. + + We only retain the shared expression in the 'else' case because we don't actually know + whether x was refined or y was refined -- only just that one of the two was refined. + """ + if len(type_maps) == 0: + return {}, {} + elif len(type_maps) == 1: + return type_maps[0] + else: + final_if_map, final_else_map = type_maps[0] + for if_map, else_map in type_maps[1:]: + final_if_map = and_conditional_maps(final_if_map, if_map, use_meet=use_meet) + final_else_map = or_conditional_maps(final_else_map, else_map) + + return final_if_map, final_else_map + + +def convert_to_typetype(type_map: TypeMap) -> TypeMap: + converted_type_map: dict[Expression, Type] = {} + if type_map is None: + return None + for expr, typ in type_map.items(): + t = typ + if isinstance(t, TypeVarType): + t = t.upper_bound + # TODO: should we only allow unions of instances as per PEP 484? + if not isinstance(get_proper_type(t), (UnionType, Instance, NoneType)): + # unknown type; error was likely reported earlier + return {} + converted_type_map[expr] = TypeType.make_normalized(typ) + return converted_type_map + + +def flatten(t: Expression) -> list[Expression]: + """Flatten a nested sequence of tuples/lists into one list of nodes.""" + if isinstance(t, (TupleExpr, ListExpr)): + return [b for a in t.items for b in flatten(a)] + elif isinstance(t, StarExpr): + return flatten(t.expr) + else: + return [t] + + +def flatten_types(t: Type) -> list[Type]: + """Flatten a nested sequence of tuples into one list of nodes.""" + t = get_proper_type(t) + if isinstance(t, TupleType): + return [b for a in t.items for b in flatten_types(a)] + elif is_named_instance(t, "builtins.tuple"): + return [t.args[0]] + else: + return [t] + + +def expand_func(defn: FuncItem, map: dict[TypeVarId, Type]) -> FuncItem: + visitor = TypeTransformVisitor(map) + ret = visitor.node(defn) + assert isinstance(ret, FuncItem) + return ret + + +class TypeTransformVisitor(TransformVisitor): + def __init__(self, map: dict[TypeVarId, Type]) -> None: + super().__init__() + self.map = map + + def type(self, type: Type) -> Type: + return expand_type(type, self.map) + + +def are_argument_counts_overlapping(t: CallableType, s: CallableType) -> bool: + """Can a single call match both t and s, based just on positional argument counts?""" + min_args = max(t.min_args, s.min_args) + max_args = min(t.max_possible_positional_args(), s.max_possible_positional_args()) + return min_args <= max_args + + +def expand_callable_variants(c: CallableType) -> list[CallableType]: + """Expand a generic callable using all combinations of type variables' values/bounds.""" + for tv in c.variables: + # We need to expand self-type before other variables, because this is the only + # type variable that can have other type variables in the upper bound. + if tv.id.is_self(): + c = expand_type(c, {tv.id: tv.upper_bound}).copy_modified( + variables=[v for v in c.variables if not v.id.is_self()] + ) + break + + if not c.is_generic(): + # Fast path. + return [c] + + tvar_values = [] + for tvar in c.variables: + if isinstance(tvar, TypeVarType) and tvar.values: + tvar_values.append(tvar.values) + else: + tvar_values.append([tvar.upper_bound]) + + variants = [] + for combination in itertools.product(*tvar_values): + tvar_map = {tv.id: subst for (tv, subst) in zip(c.variables, combination)} + variants.append(expand_type(c, tvar_map).copy_modified(variables=[])) + return variants + + +def is_unsafe_overlapping_overload_signatures( + signature: CallableType, + other: CallableType, + class_type_vars: list[TypeVarLikeType], + partial_only: bool = True, +) -> bool: + """Check if two overloaded signatures are unsafely overlapping or partially overlapping. + + We consider two functions 's' and 't' to be unsafely overlapping if three + conditions hold: + + 1. s's parameters are partially overlapping with t's. i.e. there are calls that are + valid for both signatures. + 2. for these common calls, some of t's parameters types are wider that s's. + 3. s's return type is NOT a subset of t's. + + Note that we use subset rather than subtype relationship in these checks because: + * Overload selection happens at runtime, not statically. + * This results in more lenient behavior. + This can cause false negatives (e.g. if overloaded function returns an externally + visible attribute with invariant type), but such situations are rare. In general, + overloads in Python are generally unsafe, so we intentionally try to avoid giving + non-actionable errors (see more details in comments below). + + Assumes that 'signature' appears earlier in the list of overload + alternatives then 'other' and that their argument counts are overlapping. + """ + # Try detaching callables from the containing class so that all TypeVars + # are treated as being free, i.e. the signature is as seen from inside the class, + # where "self" is not yet bound to anything. + signature = detach_callable(signature, class_type_vars) + other = detach_callable(other, class_type_vars) + + # Note: We repeat this check twice in both directions compensate for slight + # asymmetries in 'is_callable_compatible'. + + for sig_variant in expand_callable_variants(signature): + for other_variant in expand_callable_variants(other): + # Using only expanded callables may cause false negatives, we can add + # more variants (e.g. using inference between callables) in the future. + if is_subset_no_promote(sig_variant.ret_type, other_variant.ret_type): + continue + if not ( + is_callable_compatible( + sig_variant, + other_variant, + is_compat=is_overlapping_types_for_overload, + check_args_covariantly=False, + is_proper_subtype=False, + is_compat_return=lambda l, r: not is_subset_no_promote(l, r), + allow_partial_overlap=True, + ) + or is_callable_compatible( + other_variant, + sig_variant, + is_compat=is_overlapping_types_for_overload, + check_args_covariantly=True, + is_proper_subtype=False, + is_compat_return=lambda l, r: not is_subset_no_promote(r, l), + allow_partial_overlap=True, + ) + ): + continue + # Using the same `allow_partial_overlap` flag as before, can cause false + # negatives in case where star argument is used in a catch-all fallback overload. + # But again, practicality beats purity here. + if not partial_only or not is_callable_compatible( + other_variant, + sig_variant, + is_compat=is_subset_no_promote, + check_args_covariantly=True, + is_proper_subtype=False, + ignore_return=True, + allow_partial_overlap=True, + ): + return True + return False + + +def detach_callable(typ: CallableType, class_type_vars: list[TypeVarLikeType]) -> CallableType: + """Ensures that the callable's type variables are 'detached' and independent of the context. + + A callable normally keeps track of the type variables it uses within its 'variables' field. + However, if the callable is from a method and that method is using a class type variable, + the callable will not keep track of that type variable since it belongs to the class. + """ + if not class_type_vars: + # Fast path, nothing to update. + return typ + return typ.copy_modified(variables=list(typ.variables) + class_type_vars) + + +def overload_can_never_match(signature: CallableType, other: CallableType) -> bool: + """Check if the 'other' method can never be matched due to 'signature'. + + This can happen if signature's parameters are all strictly broader then + other's parameters. + + Assumes that both signatures have overlapping argument counts. + """ + # The extra erasure is needed to prevent spurious errors + # in situations where an `Any` overload is used as a fallback + # for an overload with type variables. The spurious error appears + # because the type variables turn into `Any` during unification in + # the below subtype check and (surprisingly?) `is_proper_subtype(Any, Any)` + # returns `True`. + # TODO: find a cleaner solution instead of this ad-hoc erasure. + exp_signature = expand_type( + signature, {tvar.id: erase_def_to_union_or_bound(tvar) for tvar in signature.variables} + ) + return is_callable_compatible( + exp_signature, other, is_compat=is_more_precise, is_proper_subtype=True, ignore_return=True + ) + + +def is_more_general_arg_prefix(t: FunctionLike, s: FunctionLike) -> bool: + """Does t have wider arguments than s?""" + # TODO should an overload with additional items be allowed to be more + # general than one with fewer items (or just one item)? + if isinstance(t, CallableType): + if isinstance(s, CallableType): + return is_callable_compatible( + t, s, is_compat=is_proper_subtype, is_proper_subtype=True, ignore_return=True + ) + elif isinstance(t, FunctionLike): + if isinstance(s, FunctionLike): + if len(t.items) == len(s.items): + return all( + is_same_arg_prefix(items, itemt) for items, itemt in zip(t.items, s.items) + ) + return False + + +def is_same_arg_prefix(t: CallableType, s: CallableType) -> bool: + return is_callable_compatible( + t, + s, + is_compat=is_same_type, + is_proper_subtype=True, + ignore_return=True, + check_args_covariantly=True, + ignore_pos_arg_names=True, + ) + + +def infer_operator_assignment_method(typ: Type, operator: str) -> tuple[bool, str]: + """Determine if operator assignment on given value type is in-place, and the method name. + + For example, if operator is '+', return (True, '__iadd__') or (False, '__add__') + depending on which method is supported by the type. + """ + typ = get_proper_type(typ) + method = operators.op_methods[operator] + existing_method = None + if isinstance(typ, Instance): + existing_method = _find_inplace_method(typ, method, operator) + elif isinstance(typ, TypedDictType): + existing_method = _find_inplace_method(typ.fallback, method, operator) + + if existing_method is not None: + return True, existing_method + return False, method + + +def _find_inplace_method(inst: Instance, method: str, operator: str) -> str | None: + if operator in operators.ops_with_inplace_method: + inplace_method = "__i" + method[2:] + if inst.type.has_readable_member(inplace_method): + return inplace_method + return None + + +def is_valid_inferred_type( + typ: Type, options: Options, is_lvalue_final: bool = False, is_lvalue_member: bool = False +) -> bool: + """Is an inferred type valid and needs no further refinement? + + Examples of invalid types include the None type (when we are not assigning + None to a final lvalue) or List[]. + + When not doing strict Optional checking, all types containing None are + invalid. When doing strict Optional checking, only None and types that are + incompletely defined (i.e. contain UninhabitedType) are invalid. + """ + proper_type = get_proper_type(typ) + if isinstance(proper_type, NoneType): + # If the lvalue is final, we may immediately infer NoneType when the + # initializer is None. + # + # If not, we want to defer making this decision. The final inferred + # type could either be NoneType or an Optional type, depending on + # the context. This resolution happens in leave_partial_types when + # we pop a partial types scope. + return is_lvalue_final or (not is_lvalue_member and options.allow_redefinition_new) + elif isinstance(proper_type, UninhabitedType): + return False + return not typ.accept(InvalidInferredTypes()) + + +class InvalidInferredTypes(BoolTypeQuery): + """Find type components that are not valid for an inferred type. + + These include type, and any uninhabited types resulting from failed + (ambiguous) type inference. + """ + + def __init__(self) -> None: + super().__init__(ANY_STRATEGY) + + def visit_uninhabited_type(self, t: UninhabitedType) -> bool: + return t.ambiguous + + def visit_erased_type(self, t: ErasedType) -> bool: + # This can happen inside a lambda. + return True + + def visit_type_var(self, t: TypeVarType) -> bool: + # This is needed to prevent leaking into partial types during + # multi-step type inference. + return t.id.is_meta_var() + + def visit_tuple_type(self, t: TupleType, /) -> bool: + # Exclude fallback to avoid bogus "need type annotation" errors + return self.query_types(t.items) + + +class SetNothingToAny(TypeTranslator): + """Replace all ambiguous Uninhabited types with Any (to avoid spurious extra errors).""" + + def visit_uninhabited_type(self, t: UninhabitedType) -> Type: + if t.ambiguous: + return AnyType(TypeOfAny.from_error) + return t + + def visit_type_alias_type(self, t: TypeAliasType) -> Type: + # Target of the alias cannot be an ambiguous UninhabitedType, so we just + # replace the arguments. + return t.copy_modified(args=[a.accept(self) for a in t.args]) + + +def is_classmethod_node(node: SymbolNode | None) -> bool | None: + """Find out if a node describes a classmethod.""" + if isinstance(node, Decorator): + node = node.func + if isinstance(node, FuncDef): + return node.is_class + if isinstance(node, Var): + return node.is_classmethod + return None + + +def is_node_static(node: SymbolNode | None) -> bool | None: + """Find out if a node describes a static function method.""" + if isinstance(node, Decorator): + node = node.func + if isinstance(node, FuncDef): + return node.is_static + if isinstance(node, Var): + return node.is_staticmethod + return None + + +TKey = TypeVar("TKey") +TValue = TypeVar("TValue") + + +class DisjointDict(Generic[TKey, TValue]): + """An variation of the union-find algorithm/data structure where instead of keeping + track of just disjoint sets, we keep track of disjoint dicts -- keep track of multiple + Set[Key] -> Set[Value] mappings, where each mapping's keys are guaranteed to be disjoint. + + This data structure is currently used exclusively by 'group_comparison_operands' below + to merge chains of '==' and 'is' comparisons when two or more chains use the same expression + in best-case O(n), where n is the number of operands. + + Specifically, the `add_mapping()` function and `items()` functions will take on average + O(k + v) and O(n) respectively, where k and v are the number of keys and values we're adding + for a given chain. Note that k <= n and v <= n. + + We hit these average/best-case scenarios for most user code: e.g. when the user has just + a single chain like 'a == b == c == d == ...' or multiple disjoint chains like + 'a==b < c==d < e==f < ...'. (Note that a naive iterative merging would be O(n^2) for + the latter case). + + In comparison, this data structure will make 'group_comparison_operands' have a worst-case + runtime of O(n*log(n)): 'add_mapping()' and 'items()' are worst-case O(k*log(n) + v) and + O(k*log(n)) respectively. This happens only in the rare case where the user keeps repeatedly + making disjoint mappings before merging them in a way that persistently dodges the path + compression optimization in '_lookup_root_id', which would end up constructing a single + tree of height log_2(n). This makes root lookups no longer amoritized constant time when we + finally call 'items()'. + """ + + def __init__(self) -> None: + # Each key maps to a unique ID + self._key_to_id: dict[TKey, int] = {} + + # Each id points to the parent id, forming a forest of upwards-pointing trees. If the + # current id already is the root, it points to itself. We gradually flatten these trees + # as we perform root lookups: eventually all nodes point directly to its root. + self._id_to_parent_id: dict[int, int] = {} + + # Each root id in turn maps to the set of values. + self._root_id_to_values: dict[int, set[TValue]] = {} + + def add_mapping(self, keys: set[TKey], values: set[TValue]) -> None: + """Adds a 'Set[TKey] -> Set[TValue]' mapping. If there already exists a mapping + containing one or more of the given keys, we merge the input mapping with the old one. + + Note that the given set of keys must be non-empty -- otherwise, nothing happens. + """ + if not keys: + return + + subtree_roots = [self._lookup_or_make_root_id(key) for key in keys] + new_root = subtree_roots[0] + + root_values = self._root_id_to_values[new_root] + root_values.update(values) + for subtree_root in subtree_roots[1:]: + if subtree_root == new_root or subtree_root not in self._root_id_to_values: + continue + self._id_to_parent_id[subtree_root] = new_root + root_values.update(self._root_id_to_values.pop(subtree_root)) + + def items(self) -> list[tuple[set[TKey], set[TValue]]]: + """Returns all disjoint mappings in key-value pairs.""" + root_id_to_keys: dict[int, set[TKey]] = {} + for key in self._key_to_id: + root_id = self._lookup_root_id(key) + if root_id not in root_id_to_keys: + root_id_to_keys[root_id] = set() + root_id_to_keys[root_id].add(key) + + output = [] + for root_id, keys in root_id_to_keys.items(): + output.append((keys, self._root_id_to_values[root_id])) + + return output + + def _lookup_or_make_root_id(self, key: TKey) -> int: + if key in self._key_to_id: + return self._lookup_root_id(key) + else: + new_id = len(self._key_to_id) + self._key_to_id[key] = new_id + self._id_to_parent_id[new_id] = new_id + self._root_id_to_values[new_id] = set() + return new_id + + def _lookup_root_id(self, key: TKey) -> int: + i = self._key_to_id[key] + while i != self._id_to_parent_id[i]: + # Optimization: make keys directly point to their grandparents to speed up + # future traversals. This prevents degenerate trees of height n from forming. + new_parent = self._id_to_parent_id[self._id_to_parent_id[i]] + self._id_to_parent_id[i] = new_parent + i = new_parent + return i + + +def group_comparison_operands( + pairwise_comparisons: Iterable[tuple[str, Expression, Expression]], + operand_to_literal_hash: Mapping[int, Key], + operators_to_group: set[str], +) -> list[tuple[str, list[int]]]: + """Group a series of comparison operands together chained by any operand + in the 'operators_to_group' set. All other pairwise operands are kept in + groups of size 2. + + For example, suppose we have the input comparison expression: + + x0 == x1 == x2 < x3 < x4 is x5 is x6 is not x7 is not x8 + + If we get these expressions in a pairwise way (e.g. by calling ComparisonExpr's + 'pairwise()' method), we get the following as input: + + [('==', x0, x1), ('==', x1, x2), ('<', x2, x3), ('<', x3, x4), + ('is', x4, x5), ('is', x5, x6), ('is not', x6, x7), ('is not', x7, x8)] + + If `operators_to_group` is the set {'==', 'is'}, this function will produce + the following "simplified operator list": + + [("==", [0, 1, 2]), ("<", [2, 3]), ("<", [3, 4]), + ("is", [4, 5, 6]), ("is not", [6, 7]), ("is not", [7, 8])] + + Note that (a) we yield *indices* to the operands rather then the operand + expressions themselves and that (b) operands used in a consecutive chain + of '==' or 'is' are grouped together. + + If two of these chains happen to contain operands with the same underlying + literal hash (e.g. are assignable and correspond to the same expression), + we combine those chains together. For example, if we had: + + same == x < y == same + + ...and if 'operand_to_literal_hash' contained the same values for the indices + 0 and 3, we'd produce the following output: + + [("==", [0, 1, 2, 3]), ("<", [1, 2])] + + But if the 'operand_to_literal_hash' did *not* contain an entry, we'd instead + default to returning: + + [("==", [0, 1]), ("<", [1, 2]), ("==", [2, 3])] + + This function is currently only used to assist with type-narrowing refinements + and is extracted out to a helper function so we can unit test it. + """ + groups: dict[str, DisjointDict[Key, int]] = {op: DisjointDict() for op in operators_to_group} + + simplified_operator_list: list[tuple[str, list[int]]] = [] + last_operator: str | None = None + current_indices: set[int] = set() + current_hashes: set[Key] = set() + for i, (operator, left_expr, right_expr) in enumerate(pairwise_comparisons): + if last_operator is None: + last_operator = operator + + if current_indices and (operator != last_operator or operator not in operators_to_group): + # If some of the operands in the chain are assignable, defer adding it: we might + # end up needing to merge it with other chains that appear later. + if not current_hashes: + simplified_operator_list.append((last_operator, sorted(current_indices))) + else: + groups[last_operator].add_mapping(current_hashes, current_indices) + last_operator = operator + current_indices = set() + current_hashes = set() + + # Note: 'i' corresponds to the left operand index, so 'i + 1' is the + # right operand. + current_indices.add(i) + current_indices.add(i + 1) + + # We only ever want to combine operands/combine chains for these operators + if operator in operators_to_group: + left_hash = operand_to_literal_hash.get(i) + if left_hash is not None: + current_hashes.add(left_hash) + right_hash = operand_to_literal_hash.get(i + 1) + if right_hash is not None: + current_hashes.add(right_hash) + + if last_operator is not None: + if not current_hashes: + simplified_operator_list.append((last_operator, sorted(current_indices))) + else: + groups[last_operator].add_mapping(current_hashes, current_indices) + + # Now that we know which chains happen to contain the same underlying expressions + # and can be merged together, add in this info back to the output. + for operator, disjoint_dict in groups.items(): + for keys, indices in disjoint_dict.items(): + simplified_operator_list.append((operator, sorted(indices))) + + # For stability, reorder list by the first operand index to appear + simplified_operator_list.sort(key=lambda item: item[1][0]) + return simplified_operator_list + + +def is_typed_callable(c: Type | None) -> bool: + c = get_proper_type(c) + if not c or not isinstance(c, CallableType): + return False + return not all( + isinstance(t, AnyType) and t.type_of_any == TypeOfAny.unannotated + for t in get_proper_types(c.arg_types + [c.ret_type]) + ) + + +def is_untyped_decorator(typ: Type | None) -> bool: + typ = get_proper_type(typ) + if not typ: + return True + elif isinstance(typ, CallableType): + return not is_typed_callable(typ) + elif isinstance(typ, Instance): + method = typ.type.get_method("__call__") + if method: + if isinstance(method, Decorator): + return is_untyped_decorator(method.func.type) or is_untyped_decorator( + method.var.type + ) + + if isinstance(method.type, Overloaded): + return any(is_untyped_decorator(item) for item in method.type.items) + else: + return not is_typed_callable(method.type) + else: + return False + elif isinstance(typ, Overloaded): + return any(is_untyped_decorator(item) for item in typ.items) + return True + + +def is_static(func: FuncBase | Decorator) -> bool: + if isinstance(func, Decorator): + return is_static(func.func) + elif isinstance(func, FuncBase): + return func.is_static + assert False, f"Unexpected func type: {type(func)}" + + +def is_property(defn: SymbolNode) -> bool: + if isinstance(defn, FuncDef): + return defn.is_property + if isinstance(defn, Decorator): + return defn.func.is_property + if isinstance(defn, OverloadedFuncDef): + if defn.items and isinstance(defn.items[0], Decorator): + return defn.items[0].func.is_property + return False + + +def is_settable_property(defn: SymbolNode | None) -> TypeGuard[OverloadedFuncDef]: + if isinstance(defn, OverloadedFuncDef): + if defn.items and isinstance(defn.items[0], Decorator): + return defn.items[0].func.is_property + return False + + +def is_custom_settable_property(defn: SymbolNode | None) -> bool: + """Check if a node is a settable property with a non-trivial setter type. + + By non-trivial here we mean that it is known (i.e. definition was already type + checked), it is not Any, and it is different from the property getter type. + """ + if defn is None: + return False + if not is_settable_property(defn): + return False + first_item = defn.items[0] + assert isinstance(first_item, Decorator) + if not first_item.var.is_settable_property: + return False + var = first_item.var + if var.type is None or var.setter_type is None or isinstance(var.type, PartialType): + # The caller should defer in case of partial types or not ready variables. + return False + setter_type = var.setter_type.arg_types[1] + if isinstance(get_proper_type(setter_type), AnyType): + return False + return not is_same_type(get_property_type(get_proper_type(var.type)), setter_type) + + +def get_property_type(t: ProperType) -> ProperType: + if isinstance(t, CallableType): + return get_proper_type(t.ret_type) + if isinstance(t, Overloaded): + return get_proper_type(t.items[0].ret_type) + return t + + +def is_subset_no_promote(left: Type, right: Type) -> bool: + return is_subtype(left, right, ignore_promotions=True, always_covariant=True) + + +def is_overlapping_types_for_overload(left: Type, right: Type) -> bool: + # Note that among other effects 'overlap_for_overloads' flag will effectively + # ignore possible overlap between type variables and None. This is technically + # unsafe, but unsafety is tiny and this prevents some common use cases like: + # @overload + # def foo(x: None) -> None: .. + # @overload + # def foo(x: T) -> Foo[T]: ... + return is_overlapping_types( + left, + right, + ignore_promotions=True, + prohibit_none_typevar_overlap=True, + overlap_for_overloads=True, + ) + + +def is_private(node_name: str) -> bool: + """Check if node is private to class definition.""" + return node_name.startswith("__") and not node_name.endswith("__") + + +def is_string_literal(typ: Type) -> bool: + strs = try_getting_str_literals_from_type(typ) + return strs is not None and len(strs) == 1 + + +def has_bool_item(typ: ProperType) -> bool: + """Return True if type is 'bool' or a union with a 'bool' item.""" + if is_named_instance(typ, "builtins.bool"): + return True + if isinstance(typ, UnionType): + return any(is_named_instance(item, "builtins.bool") for item in typ.items) + return False + + +def collapse_walrus(e: Expression) -> Expression: + """If an expression is an AssignmentExpr, pull out the assignment target. + + We don't make any attempt to pull out all the targets in code like `x := (y := z)`. + We could support narrowing those if that sort of code turns out to be common. + """ + if isinstance(e, AssignmentExpr): + return e.target + return e + + +def find_last_var_assignment_line(n: Node, v: Var) -> int: + """Find the highest line number of a potential assignment to variable within node. + + This supports local and global variables. + + Return -1 if no assignment was found. + """ + visitor = VarAssignVisitor(v) + n.accept(visitor) + return visitor.last_line + + +class VarAssignVisitor(TraverserVisitor): + def __init__(self, v: Var) -> None: + self.last_line = -1 + self.lvalue = False + self.var_node = v + + def visit_assignment_stmt(self, s: AssignmentStmt) -> None: + self.lvalue = True + for lv in s.lvalues: + lv.accept(self) + self.lvalue = False + + def visit_name_expr(self, e: NameExpr) -> None: + if self.lvalue and e.node is self.var_node: + self.last_line = max(self.last_line, e.line) + + def visit_member_expr(self, e: MemberExpr) -> None: + old_lvalue = self.lvalue + self.lvalue = False + super().visit_member_expr(e) + self.lvalue = old_lvalue + + def visit_index_expr(self, e: IndexExpr) -> None: + old_lvalue = self.lvalue + self.lvalue = False + super().visit_index_expr(e) + self.lvalue = old_lvalue + + def visit_with_stmt(self, s: WithStmt) -> None: + self.lvalue = True + for lv in s.target: + if lv is not None: + lv.accept(self) + self.lvalue = False + s.body.accept(self) + + def visit_for_stmt(self, s: ForStmt) -> None: + self.lvalue = True + s.index.accept(self) + self.lvalue = False + s.body.accept(self) + if s.else_body: + s.else_body.accept(self) + + def visit_assignment_expr(self, e: AssignmentExpr) -> None: + self.lvalue = True + e.target.accept(self) + self.lvalue = False + e.value.accept(self) + + def visit_as_pattern(self, p: AsPattern) -> None: + if p.pattern is not None: + p.pattern.accept(self) + if p.name is not None: + self.lvalue = True + p.name.accept(self) + self.lvalue = False + + def visit_starred_pattern(self, p: StarredPattern) -> None: + if p.capture is not None: + self.lvalue = True + p.capture.accept(self) + self.lvalue = False + + +def is_ambiguous_mix_of_enums(types: list[Type]) -> bool: + """Do types have IntEnum/StrEnum types that are potentially overlapping with other types? + + If True, we shouldn't attempt type narrowing based on enum values, as it gets + too ambiguous. + + For example, return True if there's an 'int' type together with an IntEnum literal. + However, IntEnum together with a literal of the same IntEnum type is not ambiguous. + """ + # We need these things for this to be ambiguous: + # (1) an IntEnum or StrEnum type + # (2) either a different IntEnum/StrEnum type or a non-enum type ("") + # + # It would be slightly more correct to calculate this separately for IntEnum and + # StrEnum related types, as an IntEnum can't be confused with a StrEnum. + return len(_ambiguous_enum_variants(types)) > 1 + + +def _ambiguous_enum_variants(types: list[Type]) -> set[str]: + result = set() + for t in types: + t = get_proper_type(t) + if isinstance(t, UnionType): + result.update(_ambiguous_enum_variants(t.items)) + elif isinstance(t, Instance): + if t.last_known_value: + result.update(_ambiguous_enum_variants([t.last_known_value])) + elif t.type.is_enum and any( + base.fullname in ("enum.IntEnum", "enum.StrEnum") for base in t.type.mro + ): + result.add(t.type.fullname) + elif not t.type.is_enum: + # These might compare equal to IntEnum/StrEnum types (e.g. Decimal), so + # let's be conservative + result.add("") + elif isinstance(t, LiteralType): + result.update(_ambiguous_enum_variants([t.fallback])) + elif isinstance(t, NoneType): + pass + else: + result.add("") + return result + + +def is_typeddict_type_context(lvalue_type: Type | None) -> bool: + if lvalue_type is None: + return False + lvalue_proper = get_proper_type(lvalue_type) + return isinstance(lvalue_proper, TypedDictType) + + +def is_method(node: SymbolNode | None) -> bool: + if isinstance(node, OverloadedFuncDef): + return not node.is_property + if isinstance(node, Decorator): + return not node.var.is_property + return isinstance(node, FuncDef) diff --git a/.venv/lib/python3.12/site-packages/mypy/checker_shared.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/checker_shared.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..b023516 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/checker_shared.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/checker_shared.py b/.venv/lib/python3.12/site-packages/mypy/checker_shared.py new file mode 100644 index 0000000..0014d2c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/checker_shared.py @@ -0,0 +1,354 @@ +"""Shared definitions used by different parts of type checker.""" + +from __future__ import annotations + +from abc import abstractmethod +from collections.abc import Iterator, Sequence +from contextlib import contextmanager +from typing import NamedTuple, overload + +from mypy_extensions import trait + +from mypy.errorcodes import ErrorCode +from mypy.errors import ErrorWatcher +from mypy.message_registry import ErrorMessage +from mypy.nodes import ( + ArgKind, + Context, + Expression, + FuncItem, + LambdaExpr, + MypyFile, + Node, + RefExpr, + SymbolNode, + TypeInfo, + Var, +) +from mypy.plugin import CheckerPluginInterface, Plugin +from mypy.types import ( + CallableType, + Instance, + LiteralValue, + Overloaded, + PartialType, + TupleType, + Type, + TypedDictType, + TypeType, +) +from mypy.typevars import fill_typevars + + +# An object that represents either a precise type or a type with an upper bound; +# it is important for correct type inference with isinstance. +class TypeRange(NamedTuple): + item: Type + is_upper_bound: bool # False => precise type + + +@trait +class ExpressionCheckerSharedApi: + @abstractmethod + def accept( + self, + node: Expression, + type_context: Type | None = None, + allow_none_return: bool = False, + always_allow_any: bool = False, + is_callee: bool = False, + ) -> Type: + raise NotImplementedError + + @abstractmethod + def analyze_ref_expr(self, e: RefExpr, lvalue: bool = False) -> Type: + raise NotImplementedError + + @abstractmethod + def check_call( + self, + callee: Type, + args: list[Expression], + arg_kinds: list[ArgKind], + context: Context, + arg_names: Sequence[str | None] | None = None, + callable_node: Expression | None = None, + callable_name: str | None = None, + object_type: Type | None = None, + original_type: Type | None = None, + ) -> tuple[Type, Type]: + raise NotImplementedError + + @abstractmethod + def transform_callee_type( + self, + callable_name: str | None, + callee: Type, + args: list[Expression], + arg_kinds: list[ArgKind], + context: Context, + arg_names: Sequence[str | None] | None = None, + object_type: Type | None = None, + ) -> Type: + raise NotImplementedError + + @abstractmethod + def method_fullname(self, object_type: Type, method_name: str) -> str | None: + raise NotImplementedError + + @abstractmethod + def check_method_call_by_name( + self, + method: str, + base_type: Type, + args: list[Expression], + arg_kinds: list[ArgKind], + context: Context, + original_type: Type | None = None, + ) -> tuple[Type, Type]: + raise NotImplementedError + + @abstractmethod + def visit_typeddict_index_expr( + self, td_type: TypedDictType, index: Expression, setitem: bool = False + ) -> tuple[Type, set[str]]: + raise NotImplementedError + + @abstractmethod + def infer_literal_expr_type(self, value: LiteralValue, fallback_name: str) -> Type: + raise NotImplementedError + + @abstractmethod + def analyze_static_reference( + self, + node: SymbolNode, + ctx: Context, + is_lvalue: bool, + *, + include_modules: bool = True, + suppress_errors: bool = False, + ) -> Type: + raise NotImplementedError + + +@trait +class TypeCheckerSharedApi(CheckerPluginInterface): + plugin: Plugin + module_refs: set[str] + scope: CheckerScope + checking_missing_await: bool + allow_constructor_cache: bool + + @property + @abstractmethod + def expr_checker(self) -> ExpressionCheckerSharedApi: + raise NotImplementedError + + @abstractmethod + def named_type(self, name: str) -> Instance: + raise NotImplementedError + + @abstractmethod + def lookup_typeinfo(self, fullname: str) -> TypeInfo: + raise NotImplementedError + + @abstractmethod + def lookup_type(self, node: Expression) -> Type: + raise NotImplementedError + + @abstractmethod + def handle_cannot_determine_type(self, name: str, context: Context) -> None: + raise NotImplementedError + + @abstractmethod + def handle_partial_var_type( + self, typ: PartialType, is_lvalue: bool, node: Var, context: Context + ) -> Type: + raise NotImplementedError + + @overload + @abstractmethod + def check_subtype( + self, + subtype: Type, + supertype: Type, + context: Context, + msg: str, + subtype_label: str | None = None, + supertype_label: str | None = None, + *, + notes: list[str] | None = None, + code: ErrorCode | None = None, + outer_context: Context | None = None, + ) -> bool: ... + + @overload + @abstractmethod + def check_subtype( + self, + subtype: Type, + supertype: Type, + context: Context, + msg: ErrorMessage, + subtype_label: str | None = None, + supertype_label: str | None = None, + *, + notes: list[str] | None = None, + outer_context: Context | None = None, + ) -> bool: ... + + # Unfortunately, mypyc doesn't support abstract overloads yet. + @abstractmethod + def check_subtype( + self, + subtype: Type, + supertype: Type, + context: Context, + msg: str | ErrorMessage, + subtype_label: str | None = None, + supertype_label: str | None = None, + *, + notes: list[str] | None = None, + code: ErrorCode | None = None, + outer_context: Context | None = None, + ) -> bool: + raise NotImplementedError + + @abstractmethod + def get_final_context(self) -> bool: + raise NotImplementedError + + @overload + @abstractmethod + def conditional_types_with_intersection( + self, + expr_type: Type, + type_ranges: list[TypeRange] | None, + ctx: Context, + default: None = None, + ) -> tuple[Type | None, Type | None]: ... + + @overload + @abstractmethod + def conditional_types_with_intersection( + self, expr_type: Type, type_ranges: list[TypeRange] | None, ctx: Context, default: Type + ) -> tuple[Type, Type]: ... + + # Unfortunately, mypyc doesn't support abstract overloads yet. + @abstractmethod + def conditional_types_with_intersection( + self, + expr_type: Type, + type_ranges: list[TypeRange] | None, + ctx: Context, + default: Type | None = None, + ) -> tuple[Type | None, Type | None]: + raise NotImplementedError + + @abstractmethod + def check_deprecated(self, node: Node | None, context: Context) -> None: + raise NotImplementedError + + @abstractmethod + def warn_deprecated(self, node: Node | None, context: Context) -> None: + raise NotImplementedError + + @abstractmethod + def type_is_iterable(self, type: Type) -> bool: + raise NotImplementedError + + @abstractmethod + def iterable_item_type( + self, it: Instance | CallableType | TypeType | Overloaded, context: Context + ) -> Type: + raise NotImplementedError + + @abstractmethod + @contextmanager + def checking_await_set(self) -> Iterator[None]: + raise NotImplementedError + + @abstractmethod + def get_precise_awaitable_type(self, typ: Type, local_errors: ErrorWatcher) -> Type | None: + raise NotImplementedError + + @abstractmethod + def add_any_attribute_to_type(self, typ: Type, name: str) -> Type: + raise NotImplementedError + + @abstractmethod + def is_defined_in_stub(self, typ: Instance, /) -> bool: + raise NotImplementedError + + +class CheckerScope: + # We keep two stacks combined, to maintain the relative order + stack: list[TypeInfo | FuncItem | MypyFile] + + def __init__(self, module: MypyFile) -> None: + self.stack = [module] + + def current_function(self) -> FuncItem | None: + for e in reversed(self.stack): + if isinstance(e, FuncItem): + return e + return None + + def top_level_function(self) -> FuncItem | None: + """Return top-level non-lambda function.""" + for e in self.stack: + if isinstance(e, FuncItem) and not isinstance(e, LambdaExpr): + return e + return None + + def active_class(self) -> TypeInfo | None: + if isinstance(self.stack[-1], TypeInfo): + return self.stack[-1] + return None + + def enclosing_class(self, func: FuncItem | None = None) -> TypeInfo | None: + """Is there a class *directly* enclosing this function?""" + func = func or self.current_function() + assert func, "This method must be called from inside a function" + index = self.stack.index(func) + assert index, "CheckerScope stack must always start with a module" + enclosing = self.stack[index - 1] + if isinstance(enclosing, TypeInfo): + return enclosing + return None + + def active_self_type(self) -> Instance | TupleType | None: + """An instance or tuple type representing the current class. + + This returns None unless we are in class body or in a method. + In particular, inside a function nested in method this returns None. + """ + info = self.active_class() + if not info and self.current_function(): + info = self.enclosing_class() + if info: + return fill_typevars(info) + return None + + def current_self_type(self) -> Instance | TupleType | None: + """Same as active_self_type() but handle functions nested in methods.""" + for item in reversed(self.stack): + if isinstance(item, TypeInfo): + return fill_typevars(item) + return None + + def is_top_level(self) -> bool: + """Is current scope top-level (no classes or functions)?""" + return len(self.stack) == 1 + + @contextmanager + def push_function(self, item: FuncItem) -> Iterator[None]: + self.stack.append(item) + yield + self.stack.pop() + + @contextmanager + def push_class(self, info: TypeInfo) -> Iterator[None]: + self.stack.append(info) + yield + self.stack.pop() diff --git a/.venv/lib/python3.12/site-packages/mypy/checker_state.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/checker_state.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..1d8a46f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/checker_state.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/checker_state.py b/.venv/lib/python3.12/site-packages/mypy/checker_state.py new file mode 100644 index 0000000..9b988ad --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/checker_state.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +from collections.abc import Iterator +from contextlib import contextmanager +from typing import Final + +from mypy.checker_shared import TypeCheckerSharedApi + +# This is global mutable state. Don't add anything here unless there's a very +# good reason. + + +class TypeCheckerState: + # Wrap this in a class since it's faster that using a module-level attribute. + + def __init__(self, type_checker: TypeCheckerSharedApi | None) -> None: + # Value varies by file being processed + self.type_checker = type_checker + + @contextmanager + def set(self, value: TypeCheckerSharedApi) -> Iterator[None]: + saved = self.type_checker + self.type_checker = value + try: + yield + finally: + self.type_checker = saved + + +checker_state: Final = TypeCheckerState(type_checker=None) diff --git a/.venv/lib/python3.12/site-packages/mypy/checkexpr.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/checkexpr.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..7330ab4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/checkexpr.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/checkexpr.py b/.venv/lib/python3.12/site-packages/mypy/checkexpr.py new file mode 100644 index 0000000..03ebc50 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/checkexpr.py @@ -0,0 +1,6895 @@ +"""Expression type checker. This file is conceptually part of TypeChecker.""" + +from __future__ import annotations + +import enum +import itertools +import time +from collections import defaultdict +from collections.abc import Iterable, Iterator, Sequence +from contextlib import contextmanager, nullcontext +from typing import Callable, ClassVar, Final, Optional, cast, overload +from typing_extensions import TypeAlias as _TypeAlias, assert_never + +import mypy.checker +import mypy.errorcodes as codes +from mypy import applytype, erasetype, join, message_registry, nodes, operators, types +from mypy.argmap import ArgTypeExpander, map_actuals_to_formals, map_formals_to_actuals +from mypy.checker_shared import ExpressionCheckerSharedApi +from mypy.checkmember import analyze_member_access, has_operator +from mypy.checkstrformat import StringFormatterChecker +from mypy.constant_fold import constant_fold_expr +from mypy.erasetype import erase_type, remove_instance_last_known_values, replace_meta_vars +from mypy.errors import ErrorInfo, ErrorWatcher, report_internal_error +from mypy.expandtype import ( + expand_type, + expand_type_by_instance, + freshen_all_functions_type_vars, + freshen_function_type_vars, +) +from mypy.exprtotype import TypeTranslationError, expr_to_unanalyzed_type +from mypy.infer import ArgumentInferContext, infer_function_type_arguments, infer_type_arguments +from mypy.literals import literal +from mypy.maptype import map_instance_to_supertype +from mypy.meet import is_overlapping_types, narrow_declared_type +from mypy.message_registry import ErrorMessage +from mypy.messages import MessageBuilder, format_type +from mypy.nodes import ( + ARG_NAMED, + ARG_POS, + ARG_STAR, + ARG_STAR2, + IMPLICITLY_ABSTRACT, + LAMBDA_NAME, + LITERAL_TYPE, + REVEAL_LOCALS, + REVEAL_TYPE, + UNBOUND_IMPORTED, + ArgKind, + AssertTypeExpr, + AssignmentExpr, + AwaitExpr, + BytesExpr, + CallExpr, + CastExpr, + ComparisonExpr, + ComplexExpr, + ConditionalExpr, + Context, + Decorator, + DictExpr, + DictionaryComprehension, + EllipsisExpr, + EnumCallExpr, + Expression, + FloatExpr, + FuncDef, + GeneratorExpr, + IndexExpr, + IntExpr, + LambdaExpr, + ListComprehension, + ListExpr, + MaybeTypeExpression, + MemberExpr, + MypyFile, + NamedTupleExpr, + NameExpr, + NewTypeExpr, + NotParsed, + OpExpr, + OverloadedFuncDef, + ParamSpecExpr, + PlaceholderNode, + PromoteExpr, + RefExpr, + RevealExpr, + SetComprehension, + SetExpr, + SliceExpr, + StarExpr, + StrExpr, + SuperExpr, + SymbolNode, + SymbolTableNode, + TempNode, + TupleExpr, + TypeAlias, + TypeAliasExpr, + TypeApplication, + TypedDictExpr, + TypeFormExpr, + TypeInfo, + TypeVarExpr, + TypeVarLikeExpr, + TypeVarTupleExpr, + UnaryExpr, + Var, + YieldExpr, + YieldFromExpr, + get_member_expr_fullname, +) +from mypy.options import PRECISE_TUPLE_TYPES +from mypy.plugin import ( + FunctionContext, + FunctionSigContext, + MethodContext, + MethodSigContext, + Plugin, +) +from mypy.semanal_enum import ENUM_BASES +from mypy.state import state +from mypy.subtypes import ( + find_member, + is_equivalent, + is_same_type, + is_subtype, + non_method_protocol_members, +) +from mypy.traverser import ( + all_name_and_member_expressions, + has_await_expression, + has_str_expression, +) +from mypy.tvar_scope import TypeVarLikeScope +from mypy.typeanal import ( + TypeAnalyser, + check_for_explicit_any, + fix_instance, + has_any_from_unimported_type, + instantiate_type_alias, + make_optional_type, + set_any_tvars, + validate_instance, +) +from mypy.typeops import ( + callable_type, + custom_special_method, + erase_to_union_or_bound, + false_only, + fixup_partial_type, + freeze_all_type_vars, + function_type, + get_all_type_vars, + get_type_vars, + is_literal_type_like, + make_simplified_union, + simple_literal_type, + true_only, + try_expanding_sum_type_to_union, + try_getting_str_literals, + tuple_fallback, + type_object_type, +) +from mypy.types import ( + LITERAL_TYPE_NAMES, + TUPLE_LIKE_INSTANCE_NAMES, + AnyType, + CallableType, + DeletedType, + ErasedType, + ExtraAttrs, + FunctionLike, + Instance, + LiteralType, + LiteralValue, + NoneType, + Overloaded, + Parameters, + ParamSpecFlavor, + ParamSpecType, + PartialType, + ProperType, + TupleType, + Type, + TypeAliasType, + TypedDictType, + TypeOfAny, + TypeType, + TypeVarId, + TypeVarLikeType, + TypeVarTupleType, + TypeVarType, + UnboundType, + UninhabitedType, + UnionType, + UnpackType, + find_unpack_in_list, + flatten_nested_tuples, + flatten_nested_unions, + get_proper_type, + get_proper_types, + has_recursive_types, + has_type_vars, + is_named_instance, + split_with_prefix_and_suffix, +) +from mypy.types_utils import ( + is_generic_instance, + is_overlapping_none, + is_self_type_like, + remove_optional, +) +from mypy.typestate import type_state +from mypy.typevars import fill_typevars +from mypy.visitor import ExpressionVisitor + +# Type of callback user for checking individual function arguments. See +# check_args() below for details. +ArgChecker: _TypeAlias = Callable[ + [Type, Type, ArgKind, Type, int, int, CallableType, Optional[Type], Context, Context], None +] + +# Maximum nesting level for math union in overloads, setting this to large values +# may cause performance issues. The reason is that although union math algorithm we use +# nicely captures most corner cases, its worst case complexity is exponential, +# see https://github.com/python/mypy/pull/5255#discussion_r196896335 for discussion. +MAX_UNIONS: Final = 5 + + +# Types considered safe for comparisons with --strict-equality due to known behaviour of __eq__. +# NOTE: All these types are subtypes of AbstractSet. +OVERLAPPING_TYPES_ALLOWLIST: Final = [ + "builtins.set", + "builtins.frozenset", + "typing.KeysView", + "typing.ItemsView", + "builtins._dict_keys", + "builtins._dict_items", + "_collections_abc.dict_keys", + "_collections_abc.dict_items", +] +OVERLAPPING_BYTES_ALLOWLIST: Final = { + "builtins.bytes", + "builtins.bytearray", + "builtins.memoryview", +} + + +class TooManyUnions(Exception): + """Indicates that we need to stop splitting unions in an attempt + to match an overload in order to save performance. + """ + + +def allow_fast_container_literal(t: Type) -> bool: + if isinstance(t, TypeAliasType) and t.is_recursive: + return False + t = get_proper_type(t) + return isinstance(t, Instance) or ( + isinstance(t, TupleType) and all(allow_fast_container_literal(it) for it in t.items) + ) + + +class Finished(Exception): + """Raised if we can terminate overload argument check early (no match).""" + + +@enum.unique +class UseReverse(enum.Enum): + """Used in `visit_op_expr` to enable or disable reverse method checks.""" + + DEFAULT = 0 + ALWAYS = 1 + NEVER = 2 + + +USE_REVERSE_DEFAULT: Final = UseReverse.DEFAULT +USE_REVERSE_ALWAYS: Final = UseReverse.ALWAYS +USE_REVERSE_NEVER: Final = UseReverse.NEVER + + +class ExpressionChecker(ExpressionVisitor[Type], ExpressionCheckerSharedApi): + """Expression type checker. + + This class works closely together with checker.TypeChecker. + """ + + # Some services are provided by a TypeChecker instance. + chk: mypy.checker.TypeChecker + # This is shared with TypeChecker, but stored also here for convenience. + msg: MessageBuilder + # Type context for type inference + type_context: list[Type | None] + + # cache resolved types in some cases + resolved_type: dict[Expression, ProperType] + + strfrm_checker: StringFormatterChecker + plugin: Plugin + + _arg_infer_context_cache: ArgumentInferContext | None + + def __init__( + self, + chk: mypy.checker.TypeChecker, + msg: MessageBuilder, + plugin: Plugin, + per_line_checking_time_ns: dict[int, int], + ) -> None: + """Construct an expression type checker.""" + self.chk = chk + self.msg = msg + self.plugin = plugin + self.per_line_checking_time_ns = per_line_checking_time_ns + self.collect_line_checking_stats = chk.options.line_checking_stats is not None + # Are we already visiting some expression? This is used to avoid double counting + # time for nested expressions. + self.in_expression = False + self.type_context = [None] + + # Temporary overrides for expression types. This is currently + # used by the union math in overloads. + # TODO: refactor this to use a pattern similar to one in + # multiassign_from_union, or maybe even combine the two? + self.type_overrides: dict[Expression, Type] = {} + self.strfrm_checker = StringFormatterChecker(self.chk, self.msg) + + self.resolved_type = {} + + # Callee in a call expression is in some sense both runtime context and + # type context, because we support things like C[int](...). Store information + # on whether current expression is a callee, to give better error messages + # related to type context. + self.is_callee = False + type_state.infer_polymorphic = not self.chk.options.old_type_inference + + self._arg_infer_context_cache = None + self.expr_cache: dict[ + tuple[Expression, Type | None], + tuple[int, Type, list[ErrorInfo], dict[Expression, Type]], + ] = {} + self.in_lambda_expr = False + + self._literal_true: Instance | None = None + self._literal_false: Instance | None = None + + def reset(self) -> None: + self.resolved_type = {} + self.expr_cache.clear() + + def visit_name_expr(self, e: NameExpr) -> Type: + """Type check a name expression. + + It can be of any kind: local, member or global. + """ + result = self.analyze_ref_expr(e) + narrowed = self.narrow_type_from_binder(e, result) + self.chk.check_deprecated(e.node, e) + return narrowed + + def analyze_ref_expr(self, e: RefExpr, lvalue: bool = False) -> Type: + result: Type | None = None + node = e.node + + if isinstance(e, NameExpr) and e.is_special_form: + # A special form definition, nothing to check here. + return AnyType(TypeOfAny.special_form) + + if isinstance(node, Var): + # Variable reference. + result = self.analyze_var_ref(node, e) + if isinstance(result, PartialType): + result = self.chk.handle_partial_var_type(result, lvalue, node, e) + elif isinstance(node, Decorator): + result = self.analyze_var_ref(node.var, e) + elif isinstance(node, OverloadedFuncDef): + if node.type is None: + if self.chk.in_checked_function() and node.items: + self.chk.handle_cannot_determine_type(node.name, e) + result = AnyType(TypeOfAny.from_error) + else: + result = node.type + elif isinstance(node, (FuncDef, TypeInfo, TypeAlias, MypyFile, TypeVarLikeExpr)): + result = self.analyze_static_reference(node, e, e.is_alias_rvalue or lvalue) + else: + if isinstance(node, PlaceholderNode): + assert False, f"PlaceholderNode {node.fullname!r} leaked to checker" + # Unknown reference; use any type implicitly to avoid + # generating extra type errors. + result = AnyType(TypeOfAny.from_error) + if isinstance(node, TypeInfo): + if isinstance(result, CallableType) and isinstance( # type: ignore[misc] + result.ret_type, Instance + ): + # We need to set correct line and column + # TODO: always do this in type_object_type by passing the original context + result.ret_type.line = e.line + result.ret_type.column = e.column + if is_type_type_context(self.type_context[-1]): + # This is the type in a type[] expression, so substitute type + # variables with Any. + result = erasetype.erase_typevars(result) + assert result is not None + return result + + def analyze_static_reference( + self, + node: SymbolNode, + ctx: Context, + is_lvalue: bool, + *, + include_modules: bool = True, + suppress_errors: bool = False, + ) -> Type: + """ + This is the version of analyze_ref_expr() that doesn't do any deferrals. + + This function can be used by member access to "static" attributes. For example, + when accessing module attributes in protocol checks, or accessing attributes of + special kinds (like TypeAlias, TypeInfo, etc.) on an instance or class object. + # TODO: merge with analyze_ref_expr() when we are confident about performance. + """ + if isinstance(node, (Var, Decorator, OverloadedFuncDef)): + return node.type or AnyType(TypeOfAny.special_form) + elif isinstance(node, FuncDef): + return function_type(node, self.named_type("builtins.function")) + elif isinstance(node, TypeInfo): + # Reference to a type object. + if node.typeddict_type: + # We special-case TypedDict, because they don't define any constructor. + return self.typeddict_callable(node) + elif node.fullname == "types.NoneType": + # We special case NoneType, because its stub definition is not related to None. + return TypeType(NoneType()) + else: + return type_object_type(node, self.named_type) + elif isinstance(node, TypeAlias): + # Something that refers to a type alias appears in runtime context. + # Note that we suppress bogus errors for alias redefinitions, + # they are already reported in semanal.py. + with self.msg.filter_errors() if suppress_errors else nullcontext(): + return self.alias_type_in_runtime_context( + node, ctx=ctx, alias_definition=is_lvalue + ) + elif isinstance(node, TypeVarExpr): + return self.named_type("typing.TypeVar") + elif isinstance(node, (ParamSpecExpr, TypeVarTupleExpr)): + return self.object_type() + elif isinstance(node, MypyFile): + # Reference to a module object. + return self.module_type(node) if include_modules else AnyType(TypeOfAny.special_form) + return AnyType(TypeOfAny.from_error) + + def analyze_var_ref(self, var: Var, context: Context) -> Type: + if var.type: + var_type = get_proper_type(var.type) + if isinstance(var_type, Instance): + if var.fullname == "typing.Any": + # The typeshed type is 'object'; give a more useful type in runtime context + return self.named_type("typing._SpecialForm") + if self.is_literal_context() and var_type.last_known_value is not None: + return var_type.last_known_value + if var.name in {"True", "False"}: + return self.infer_literal_expr_type(var.name == "True", "builtins.bool") + return var.type + else: + if not var.is_ready and self.chk.in_checked_function(): + self.chk.handle_cannot_determine_type(var.name, context) + # Implicit 'Any' type. + return AnyType(TypeOfAny.special_form) + + def module_type(self, node: MypyFile) -> Instance: + try: + result = self.named_type("types.ModuleType") + except KeyError: + # In test cases might 'types' may not be available. + # Fall back to a dummy 'object' type instead to + # avoid a crash. + # Make a copy so that we don't set extra_attrs (below) on a shared instance. + result = self.named_type("builtins.object").copy_modified() + module_attrs: dict[str, Type] = {} + immutable = set() + for name, n in node.names.items(): + if not n.module_public: + continue + if isinstance(n.node, Var) and n.node.is_final: + immutable.add(name) + if n.node is None: + module_attrs[name] = AnyType(TypeOfAny.from_error) + else: + # TODO: what to do about nested module references? + # They are non-trivial because there may be import cycles. + module_attrs[name] = self.analyze_static_reference( + n.node, n.node, False, include_modules=False, suppress_errors=True + ) + result.extra_attrs = ExtraAttrs(module_attrs, immutable, node.fullname) + return result + + def visit_call_expr(self, e: CallExpr, allow_none_return: bool = False) -> Type: + """Type check a call expression.""" + if e.analyzed: + if isinstance(e.analyzed, NamedTupleExpr) and not e.analyzed.is_typed: + # Type check the arguments, but ignore the results. This relies + # on the typeshed stubs to type check the arguments. + self.visit_call_expr_inner(e) + # It's really a special form that only looks like a call. + return self.accept(e.analyzed, self.type_context[-1]) + return self.visit_call_expr_inner(e, allow_none_return=allow_none_return) + + def refers_to_typeddict(self, base: Expression) -> bool: + if not isinstance(base, RefExpr): + return False + if isinstance(base.node, TypeInfo) and base.node.typeddict_type is not None: + # Direct reference. + return True + return isinstance(base.node, TypeAlias) and isinstance( + get_proper_type(base.node.target), TypedDictType + ) + + def visit_call_expr_inner(self, e: CallExpr, allow_none_return: bool = False) -> Type: + if ( + self.refers_to_typeddict(e.callee) + or isinstance(e.callee, IndexExpr) + and self.refers_to_typeddict(e.callee.base) + ): + typeddict_callable = get_proper_type(self.accept(e.callee, is_callee=True)) + if isinstance(typeddict_callable, CallableType): + typeddict_type = get_proper_type(typeddict_callable.ret_type) + assert isinstance(typeddict_type, TypedDictType) + return self.check_typeddict_call( + typeddict_type, e.arg_kinds, e.arg_names, e.args, e, typeddict_callable + ) + if ( + isinstance(e.callee, NameExpr) + and e.callee.name in ("isinstance", "issubclass") + and len(e.args) == 2 + ): + for typ in mypy.checker.flatten(e.args[1]): + node = None + if isinstance(typ, NameExpr): + try: + node = self.chk.lookup_qualified(typ.name) + except KeyError: + # Undefined names should already be reported in semantic analysis. + pass + if is_expr_literal_type(typ): + self.msg.cannot_use_function_with_type(e.callee.name, "Literal", e) + continue + if node and isinstance(node.node, TypeAlias): + target = get_proper_type(node.node.target) + if isinstance(target, AnyType): + self.msg.cannot_use_function_with_type(e.callee.name, "Any", e) + continue + if isinstance(target, NoneType): + continue + if ( + isinstance(typ, IndexExpr) + and isinstance(typ.analyzed, (TypeApplication, TypeAliasExpr)) + ) or ( + isinstance(typ, NameExpr) + and node + and isinstance(node.node, TypeAlias) + and not node.node.no_args + and not ( + isinstance(union_target := get_proper_type(node.node.target), UnionType) + and ( + union_target.uses_pep604_syntax + or self.chk.options.python_version >= (3, 10) + ) + ) + ): + self.msg.type_arguments_not_allowed(e) + if isinstance(typ, RefExpr) and isinstance(typ.node, TypeInfo): + if typ.node.typeddict_type: + self.msg.cannot_use_function_with_type(e.callee.name, "TypedDict", e) + elif typ.node.is_newtype: + self.msg.cannot_use_function_with_type(e.callee.name, "NewType", e) + self.try_infer_partial_type(e) + type_context = None + if isinstance(e.callee, LambdaExpr): + formal_to_actual = map_actuals_to_formals( + e.arg_kinds, + e.arg_names, + e.callee.arg_kinds, + e.callee.arg_names, + lambda i: self.accept(e.args[i]), + ) + + arg_types = [ + join.join_type_list([self.accept(e.args[j]) for j in formal_to_actual[i]]) + for i in range(len(e.callee.arg_kinds)) + ] + type_context = CallableType( + arg_types, + e.callee.arg_kinds, + e.callee.arg_names, + ret_type=self.object_type(), + fallback=self.named_type("builtins.function"), + ) + callee_type = get_proper_type( + self.accept(e.callee, type_context, always_allow_any=True, is_callee=True) + ) + + # Figure out the full name of the callee for plugin lookup. + object_type = None + member = None + fullname = None + if isinstance(e.callee, RefExpr): + # There are two special cases where plugins might act: + # * A "static" reference/alias to a class or function; + # get_function_hook() will be invoked for these. + fullname = e.callee.fullname or None + if isinstance(e.callee.node, TypeAlias): + target = get_proper_type(e.callee.node.target) + if isinstance(target, Instance): + fullname = target.type.fullname + # * Call to a method on object that has a full name (see + # method_fullname() for details on supported objects); + # get_method_hook() and get_method_signature_hook() will + # be invoked for these. + if ( + not fullname + and isinstance(e.callee, MemberExpr) + and self.chk.has_type(e.callee.expr) + ): + member = e.callee.name + object_type = self.chk.lookup_type(e.callee.expr) + + if ( + self.chk.options.disallow_untyped_calls + and self.chk.in_checked_function() + and isinstance(callee_type, CallableType) + and callee_type.implicit + and callee_type.name != LAMBDA_NAME + ): + if fullname is None and member is not None: + assert object_type is not None + fullname = self.method_fullname(object_type, member) + if not fullname or not any( + fullname == p or fullname.startswith(f"{p}.") + for p in self.chk.options.untyped_calls_exclude + ): + self.msg.untyped_function_call(callee_type, e) + + ret_type = self.check_call_expr_with_callee_type( + callee_type, e, fullname, object_type, member + ) + if isinstance(e.callee, RefExpr) and len(e.args) == 2: + if e.callee.fullname in ("builtins.isinstance", "builtins.issubclass"): + self.check_runtime_protocol_test(e) + if e.callee.fullname == "builtins.issubclass": + self.check_protocol_issubclass(e) + if isinstance(e.callee, MemberExpr) and e.callee.name == "format": + self.check_str_format_call(e) + ret_type = get_proper_type(ret_type) + if isinstance(ret_type, UnionType): + ret_type = make_simplified_union(ret_type.items) + if isinstance(ret_type, UninhabitedType) and not ret_type.ambiguous: + self.chk.binder.unreachable() + # Warn on calls to functions that always return None. The check + # of ret_type is both a common-case optimization and prevents reporting + # the error in dynamic functions (where it will be Any). + if ( + not allow_none_return + and isinstance(ret_type, NoneType) + and self.always_returns_none(e.callee) + ): + self.chk.msg.does_not_return_value(callee_type, e) + return AnyType(TypeOfAny.from_error) + return ret_type + + def check_str_format_call(self, e: CallExpr) -> None: + """More precise type checking for str.format() calls on literals and folded constants.""" + assert isinstance(e.callee, MemberExpr) + format_value = None + folded_callee_expr = constant_fold_expr(e.callee.expr, "") + if isinstance(folded_callee_expr, str): + format_value = folded_callee_expr + elif self.chk.has_type(e.callee.expr): + typ = get_proper_type(self.chk.lookup_type(e.callee.expr)) + if ( + isinstance(typ, Instance) + and typ.type.is_enum + and isinstance(typ.last_known_value, LiteralType) + and isinstance(typ.last_known_value.value, str) + ): + value_type = typ.type.names[typ.last_known_value.value].type + if isinstance(value_type, Type): + typ = get_proper_type(value_type) + base_typ = try_getting_literal(typ) + if isinstance(base_typ, LiteralType) and isinstance(base_typ.value, str): + format_value = base_typ.value + if format_value is not None: + self.strfrm_checker.check_str_format_call(e, format_value) + + def method_fullname(self, object_type: Type, method_name: str) -> str | None: + """Convert a method name to a fully qualified name, based on the type of the object that + it is invoked on. Return `None` if the name of `object_type` cannot be determined. + """ + object_type = get_proper_type(object_type) + + if isinstance(object_type, CallableType) and object_type.is_type_obj(): + # For class method calls, object_type is a callable representing the class object. + # We "unwrap" it to a regular type, as the class/instance method difference doesn't + # affect the fully qualified name. + object_type = get_proper_type(object_type.ret_type) + elif isinstance(object_type, TypeType): + object_type = object_type.item + + type_name = None + if isinstance(object_type, Instance): + type_name = object_type.type.fullname + elif isinstance(object_type, (TypedDictType, LiteralType)): + info = object_type.fallback.type.get_containing_type_info(method_name) + type_name = info.fullname if info is not None else None + elif isinstance(object_type, TupleType): + type_name = tuple_fallback(object_type).type.fullname + + if type_name: + return f"{type_name}.{method_name}" + else: + return None + + def always_returns_none(self, node: Expression) -> bool: + """Check if `node` refers to something explicitly annotated as only returning None.""" + if isinstance(node, RefExpr): + if self.defn_returns_none(node.node): + return True + if isinstance(node, MemberExpr) and node.node is None: # instance or class attribute + typ = get_proper_type(self.chk.lookup_type(node.expr)) + if isinstance(typ, Instance): + info = typ.type + elif isinstance(typ, CallableType) and typ.is_type_obj(): + ret_type = get_proper_type(typ.ret_type) + if isinstance(ret_type, Instance): + info = ret_type.type + else: + return False + else: + return False + sym = info.get(node.name) + if sym and self.defn_returns_none(sym.node): + return True + return False + + def defn_returns_none(self, defn: SymbolNode | None) -> bool: + """Check if `defn` can _only_ return None.""" + if isinstance(defn, FuncDef): + return isinstance(defn.type, CallableType) and isinstance( + get_proper_type(defn.type.ret_type), NoneType + ) + if isinstance(defn, OverloadedFuncDef): + return all(self.defn_returns_none(item) for item in defn.items) + if isinstance(defn, Var): + typ = get_proper_type(defn.type) + if ( + not defn.is_inferred + and isinstance(typ, CallableType) + and isinstance(get_proper_type(typ.ret_type), NoneType) + ): + return True + if isinstance(typ, Instance): + sym = typ.type.get("__call__") + if sym and self.defn_returns_none(sym.node): + return True + return False + + def check_runtime_protocol_test(self, e: CallExpr) -> None: + for expr in mypy.checker.flatten(e.args[1]): + tp = get_proper_type(self.chk.lookup_type(expr)) + if ( + isinstance(tp, FunctionLike) + and tp.is_type_obj() + and tp.type_object().is_protocol + and not tp.type_object().runtime_protocol + ): + self.chk.fail(message_registry.RUNTIME_PROTOCOL_EXPECTED, e) + + def check_protocol_issubclass(self, e: CallExpr) -> None: + for expr in mypy.checker.flatten(e.args[1]): + tp = get_proper_type(self.chk.lookup_type(expr)) + if isinstance(tp, FunctionLike) and tp.is_type_obj() and tp.type_object().is_protocol: + attr_members = non_method_protocol_members(tp.type_object()) + if attr_members: + self.chk.msg.report_non_method_protocol(tp.type_object(), attr_members, e) + + def check_typeddict_call( + self, + callee: TypedDictType, + arg_kinds: list[ArgKind], + arg_names: Sequence[str | None], + args: list[Expression], + context: Context, + orig_callee: Type | None, + ) -> Type: + if args and all(ak in (ARG_NAMED, ARG_STAR2) for ak in arg_kinds): + # ex: Point(x=42, y=1337, **extras) + # This is a bit ugly, but this is a price for supporting all possible syntax + # variants for TypedDict constructors. + kwargs = zip([StrExpr(n) if n is not None else None for n in arg_names], args) + result = self.validate_typeddict_kwargs(kwargs=kwargs, callee=callee) + if result is not None: + validated_kwargs, always_present_keys = result + return self.check_typeddict_call_with_kwargs( + callee, validated_kwargs, context, orig_callee, always_present_keys + ) + return AnyType(TypeOfAny.from_error) + + if len(args) == 1 and arg_kinds[0] == ARG_POS: + unique_arg = args[0] + if isinstance(unique_arg, DictExpr): + # ex: Point({'x': 42, 'y': 1337, **extras}) + return self.check_typeddict_call_with_dict( + callee, unique_arg.items, context, orig_callee + ) + if isinstance(unique_arg, CallExpr) and isinstance(unique_arg.analyzed, DictExpr): + # ex: Point(dict(x=42, y=1337, **extras)) + return self.check_typeddict_call_with_dict( + callee, unique_arg.analyzed.items, context, orig_callee + ) + + if not args: + # ex: EmptyDict() + return self.check_typeddict_call_with_kwargs(callee, {}, context, orig_callee, set()) + + self.chk.fail(message_registry.INVALID_TYPEDDICT_ARGS, context) + return AnyType(TypeOfAny.from_error) + + def validate_typeddict_kwargs( + self, kwargs: Iterable[tuple[Expression | None, Expression]], callee: TypedDictType + ) -> tuple[dict[str, list[Expression]], set[str]] | None: + # All (actual or mapped from ** unpacks) expressions that can match given key. + result = defaultdict(list) + # Keys that are guaranteed to be present no matter what (e.g. for all items of a union) + always_present_keys = set() + # Indicates latest encountered ** unpack among items. + last_star_found = None + + for item_name_expr, item_arg in kwargs: + if item_name_expr: + key_type = self.accept(item_name_expr) + values = try_getting_str_literals(item_name_expr, key_type) + literal_value = None + if values and len(values) == 1: + literal_value = values[0] + if literal_value is None: + key_context = item_name_expr or item_arg + self.chk.fail( + message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, + key_context, + code=codes.LITERAL_REQ, + ) + return None + else: + # A directly present key unconditionally shadows all previously found + # values from ** items. + # TODO: for duplicate keys, type-check all values. + result[literal_value] = [item_arg] + always_present_keys.add(literal_value) + else: + last_star_found = item_arg + if not self.validate_star_typeddict_item( + item_arg, callee, result, always_present_keys + ): + return None + if self.chk.options.extra_checks and last_star_found is not None: + absent_keys = [] + for key in callee.items: + if key not in callee.required_keys and key not in result: + absent_keys.append(key) + if absent_keys: + # Having an optional key not explicitly declared by a ** unpacked + # TypedDict is unsafe, it may be an (incompatible) subtype at runtime. + # TODO: catch the cases where a declared key is overridden by a subsequent + # ** item without it (and not again overridden with complete ** item). + self.msg.non_required_keys_absent_with_star(absent_keys, last_star_found) + return result, always_present_keys + + def validate_star_typeddict_item( + self, + item_arg: Expression, + callee: TypedDictType, + result: dict[str, list[Expression]], + always_present_keys: set[str], + ) -> bool: + """Update keys/expressions from a ** expression in TypedDict constructor. + + Note `result` and `always_present_keys` are updated in place. Return true if the + expression `item_arg` may valid in `callee` TypedDict context. + """ + inferred = get_proper_type(self.accept(item_arg, type_context=callee)) + possible_tds = [] + if isinstance(inferred, TypedDictType): + possible_tds = [inferred] + elif isinstance(inferred, UnionType): + for item in get_proper_types(inferred.relevant_items()): + if isinstance(item, TypedDictType): + possible_tds.append(item) + elif not self.valid_unpack_fallback_item(item): + self.msg.unsupported_target_for_star_typeddict(item, item_arg) + return False + elif not self.valid_unpack_fallback_item(inferred): + self.msg.unsupported_target_for_star_typeddict(inferred, item_arg) + return False + all_keys: set[str] = set() + for td in possible_tds: + all_keys |= td.items.keys() + for key in all_keys: + arg = TempNode( + UnionType.make_union([td.items[key] for td in possible_tds if key in td.items]) + ) + arg.set_line(item_arg) + if all(key in td.required_keys for td in possible_tds): + always_present_keys.add(key) + # Always present keys override previously found values. This is done + # to support use cases like `Config({**defaults, **overrides})`, where + # some `overrides` types are narrower that types in `defaults`, and + # former are too wide for `Config`. + if result[key]: + first = result[key][0] + if not isinstance(first, TempNode): + # We must always preserve any non-synthetic values, so that + # we will accept them even if they are shadowed. + result[key] = [first, arg] + else: + result[key] = [arg] + else: + result[key] = [arg] + else: + # If this key is not required at least in some item of a union + # it may not shadow previous item, so we need to type check both. + result[key].append(arg) + return True + + def valid_unpack_fallback_item(self, typ: ProperType) -> bool: + if isinstance(typ, AnyType): + return True + if not isinstance(typ, Instance) or not typ.type.has_base("typing.Mapping"): + return False + mapped = map_instance_to_supertype(typ, self.chk.lookup_typeinfo("typing.Mapping")) + return all(isinstance(a, AnyType) for a in get_proper_types(mapped.args)) + + def match_typeddict_call_with_dict( + self, + callee: TypedDictType, + kwargs: list[tuple[Expression | None, Expression]], + context: Context, + ) -> bool: + result = self.validate_typeddict_kwargs(kwargs=kwargs, callee=callee) + if result is not None: + validated_kwargs, _ = result + return callee.required_keys <= set(validated_kwargs.keys()) <= set(callee.items.keys()) + else: + return False + + def check_typeddict_call_with_dict( + self, + callee: TypedDictType, + kwargs: list[tuple[Expression | None, Expression]], + context: Context, + orig_callee: Type | None, + ) -> Type: + result = self.validate_typeddict_kwargs(kwargs=kwargs, callee=callee) + if result is not None: + validated_kwargs, always_present_keys = result + return self.check_typeddict_call_with_kwargs( + callee, + kwargs=validated_kwargs, + context=context, + orig_callee=orig_callee, + always_present_keys=always_present_keys, + ) + else: + return AnyType(TypeOfAny.from_error) + + def typeddict_callable(self, info: TypeInfo) -> CallableType: + """Construct a reasonable type for a TypedDict type in runtime context. + + If it appears as a callee, it will be special-cased anyway, e.g. it is + also allowed to accept a single positional argument if it is a dict literal. + + Note it is not safe to move this to type_object_type() since it will crash + on plugin-generated TypedDicts, that may not have the special_alias. + """ + assert info.special_alias is not None + target = info.special_alias.target + assert isinstance(target, ProperType) and isinstance(target, TypedDictType) + return self.typeddict_callable_from_context(target, info.defn.type_vars) + + def typeddict_callable_from_context( + self, callee: TypedDictType, variables: Sequence[TypeVarLikeType] | None = None + ) -> CallableType: + return CallableType( + list(callee.items.values()), + [ + ArgKind.ARG_NAMED if name in callee.required_keys else ArgKind.ARG_NAMED_OPT + for name in callee.items + ], + list(callee.items.keys()), + callee, + self.named_type("builtins.type"), + variables=variables, + is_bound=True, + ) + + def check_typeddict_call_with_kwargs( + self, + callee: TypedDictType, + kwargs: dict[str, list[Expression]], + context: Context, + orig_callee: Type | None, + always_present_keys: set[str], + ) -> Type: + actual_keys = kwargs.keys() + if callee.to_be_mutated: + assigned_readonly_keys = actual_keys & callee.readonly_keys + if assigned_readonly_keys: + self.msg.readonly_keys_mutated(assigned_readonly_keys, context=context) + if not ( + callee.required_keys <= always_present_keys and actual_keys <= callee.items.keys() + ): + if not (actual_keys <= callee.items.keys()): + self.msg.unexpected_typeddict_keys( + callee, + expected_keys=[ + key + for key in callee.items.keys() + if key in callee.required_keys or key in actual_keys + ], + actual_keys=list(actual_keys), + context=context, + ) + if not (callee.required_keys <= always_present_keys): + self.msg.unexpected_typeddict_keys( + callee, + expected_keys=[ + key for key in callee.items.keys() if key in callee.required_keys + ], + actual_keys=[ + key for key in always_present_keys if key in callee.required_keys + ], + context=context, + ) + if callee.required_keys > actual_keys: + # found_set is a sub-set of the required_keys + # This means we're missing some keys and as such, we can't + # properly type the object + return AnyType(TypeOfAny.from_error) + + orig_callee = get_proper_type(orig_callee) + if isinstance(orig_callee, CallableType): + infer_callee = orig_callee + else: + # Try reconstructing from type context. + if callee.fallback.type.special_alias is not None: + infer_callee = self.typeddict_callable(callee.fallback.type) + else: + # Likely a TypedDict type generated by a plugin. + infer_callee = self.typeddict_callable_from_context(callee) + + # We don't show any errors, just infer types in a generic TypedDict type, + # a custom error message will be given below, if there are errors. + with self.msg.filter_errors(), self.chk.local_type_map: + orig_ret_type, _ = self.check_callable_call( + infer_callee, + # We use first expression for each key to infer type variables of a generic + # TypedDict. This is a bit arbitrary, but in most cases will work better than + # trying to infer a union or a join. + [args[0] for args in kwargs.values()], + [ArgKind.ARG_NAMED] * len(kwargs), + context, + list(kwargs.keys()), + None, + None, + None, + ) + + ret_type = get_proper_type(orig_ret_type) + if not isinstance(ret_type, TypedDictType): + # If something went really wrong, type-check call with original type, + # this may give a better error message. + ret_type = callee + + for item_name, item_expected_type in ret_type.items.items(): + if item_name in kwargs: + item_values = kwargs[item_name] + for item_value in item_values: + self.chk.check_simple_assignment( + lvalue_type=item_expected_type, + rvalue=item_value, + context=item_value, + msg=ErrorMessage( + message_registry.INCOMPATIBLE_TYPES.value, code=codes.TYPEDDICT_ITEM + ), + lvalue_name=f'TypedDict item "{item_name}"', + rvalue_name="expression", + ) + + return orig_ret_type + + def get_partial_self_var(self, expr: MemberExpr) -> Var | None: + """Get variable node for a partial self attribute. + + If the expression is not a self attribute, or attribute is not variable, + or variable is not partial, return None. + """ + if not ( + isinstance(expr.expr, NameExpr) + and isinstance(expr.expr.node, Var) + and expr.expr.node.is_self + ): + # Not a self.attr expression. + return None + info = self.chk.scope.enclosing_class() + if not info or expr.name not in info.names: + # Don't mess with partial types in superclasses. + return None + sym = info.names[expr.name] + if isinstance(sym.node, Var) and isinstance(sym.node.type, PartialType): + return sym.node + return None + + # Types and methods that can be used to infer partial types. + item_args: ClassVar[dict[str, list[str]]] = { + "builtins.list": ["append"], + "builtins.set": ["add", "discard"], + } + container_args: ClassVar[dict[str, dict[str, list[str]]]] = { + "builtins.list": {"extend": ["builtins.list"]}, + "builtins.dict": {"update": ["builtins.dict"]}, + "collections.OrderedDict": {"update": ["builtins.dict"]}, + "builtins.set": {"update": ["builtins.set", "builtins.list"]}, + } + + def try_infer_partial_type(self, e: CallExpr) -> None: + """Try to make partial type precise from a call.""" + if not isinstance(e.callee, MemberExpr): + return + callee = e.callee + if isinstance(callee.expr, RefExpr): + # Call a method with a RefExpr callee, such as 'x.method(...)'. + ret = self.get_partial_var(callee.expr) + if ret is None: + return + var, partial_types = ret + typ = self.try_infer_partial_value_type_from_call(e, callee.name, var) + # Var may be deleted from partial_types in try_infer_partial_value_type_from_call + if typ is not None and var in partial_types: + self.chk.replace_partial_type(var, typ, partial_types) + elif isinstance(callee.expr, IndexExpr) and isinstance(callee.expr.base, RefExpr): + # Call 'x[y].method(...)'; may infer type of 'x' if it's a partial defaultdict. + if callee.expr.analyzed is not None: + return # A special form + base = callee.expr.base + index = callee.expr.index + ret = self.get_partial_var(base) + if ret is None: + return + var, partial_types = ret + partial_type = get_partial_instance_type(var.type) + if partial_type is None or partial_type.value_type is None: + return + value_type = self.try_infer_partial_value_type_from_call(e, callee.name, var) + if value_type is not None: + # Infer key type. + key_type = self.accept(index) + if mypy.checker.is_valid_inferred_type(key_type, self.chk.options): + # Store inferred partial type. + assert partial_type.type is not None + typename = partial_type.type.fullname + new_type = self.chk.named_generic_type(typename, [key_type, value_type]) + self.chk.replace_partial_type(var, new_type, partial_types) + + def get_partial_var(self, ref: RefExpr) -> tuple[Var, dict[Var, Context]] | None: + var = ref.node + if var is None and isinstance(ref, MemberExpr): + var = self.get_partial_self_var(ref) + if not isinstance(var, Var): + return None + partial_types = self.chk.find_partial_types(var) + if partial_types is None: + return None + return var, partial_types + + def try_infer_partial_value_type_from_call( + self, e: CallExpr, methodname: str, var: Var + ) -> Instance | None: + """Try to make partial type precise from a call such as 'x.append(y)'.""" + if self.chk.current_node_deferred: + return None + partial_type = get_partial_instance_type(var.type) + if partial_type is None: + return None + if partial_type.value_type: + typename = partial_type.value_type.type.fullname + else: + assert partial_type.type is not None + typename = partial_type.type.fullname + # Sometimes we can infer a full type for a partial List, Dict or Set type. + # TODO: Don't infer argument expression twice. + if ( + typename in self.item_args + and methodname in self.item_args[typename] + and e.arg_kinds == [ARG_POS] + ): + item_type = self.accept(e.args[0]) + if mypy.checker.is_valid_inferred_type(item_type, self.chk.options): + return self.chk.named_generic_type(typename, [item_type]) + elif ( + typename in self.container_args + and methodname in self.container_args[typename] + and e.arg_kinds == [ARG_POS] + ): + arg_type = get_proper_type(self.accept(e.args[0])) + if isinstance(arg_type, Instance): + arg_typename = arg_type.type.fullname + if arg_typename in self.container_args[typename][methodname]: + if all( + mypy.checker.is_valid_inferred_type(item_type, self.chk.options) + for item_type in arg_type.args + ): + return self.chk.named_generic_type(typename, list(arg_type.args)) + elif isinstance(arg_type, AnyType): + return self.chk.named_type(typename) + + return None + + def apply_function_plugin( + self, + callee: CallableType, + arg_kinds: list[ArgKind], + arg_types: list[Type], + arg_names: Sequence[str | None] | None, + formal_to_actual: list[list[int]], + args: list[Expression], + fullname: str, + object_type: Type | None, + context: Context, + ) -> Type: + """Use special case logic to infer the return type of a specific named function/method. + + Caller must ensure that a plugin hook exists. There are two different cases: + + - If object_type is None, the caller must ensure that a function hook exists + for fullname. + - If object_type is not None, the caller must ensure that a method hook exists + for fullname. + + Return the inferred return type. + """ + num_formals = len(callee.arg_types) + formal_arg_types: list[list[Type]] = [[] for _ in range(num_formals)] + formal_arg_exprs: list[list[Expression]] = [[] for _ in range(num_formals)] + formal_arg_names: list[list[str | None]] = [[] for _ in range(num_formals)] + formal_arg_kinds: list[list[ArgKind]] = [[] for _ in range(num_formals)] + for formal, actuals in enumerate(formal_to_actual): + for actual in actuals: + formal_arg_types[formal].append(arg_types[actual]) + formal_arg_exprs[formal].append(args[actual]) + if arg_names: + formal_arg_names[formal].append(arg_names[actual]) + else: + formal_arg_names[formal].append(None) + formal_arg_kinds[formal].append(arg_kinds[actual]) + + if object_type is None: + # Apply function plugin + callback = self.plugin.get_function_hook(fullname) + assert callback is not None # Assume that caller ensures this + return callback( + FunctionContext( + arg_types=formal_arg_types, + arg_kinds=formal_arg_kinds, + callee_arg_names=callee.arg_names, + arg_names=formal_arg_names, + default_return_type=callee.ret_type, + args=formal_arg_exprs, + context=context, + api=self.chk, + ) + ) + else: + # Apply method plugin + method_callback = self.plugin.get_method_hook(fullname) + assert method_callback is not None # Assume that caller ensures this + object_type = get_proper_type(object_type) + return method_callback( + MethodContext( + type=object_type, + arg_types=formal_arg_types, + arg_kinds=formal_arg_kinds, + callee_arg_names=callee.arg_names, + arg_names=formal_arg_names, + default_return_type=callee.ret_type, + args=formal_arg_exprs, + context=context, + api=self.chk, + ) + ) + + def apply_signature_hook( + self, + callee: FunctionLike, + args: list[Expression], + arg_kinds: list[ArgKind], + arg_names: Sequence[str | None] | None, + hook: Callable[[list[list[Expression]], CallableType], FunctionLike], + ) -> FunctionLike: + """Helper to apply a signature hook for either a function or method""" + if isinstance(callee, CallableType): + num_formals = len(callee.arg_kinds) + formal_to_actual = map_actuals_to_formals( + arg_kinds, + arg_names, + callee.arg_kinds, + callee.arg_names, + lambda i: self.accept(args[i]), + ) + formal_arg_exprs: list[list[Expression]] = [[] for _ in range(num_formals)] + for formal, actuals in enumerate(formal_to_actual): + for actual in actuals: + formal_arg_exprs[formal].append(args[actual]) + return hook(formal_arg_exprs, callee) + else: + assert isinstance(callee, Overloaded) + items = [] + for item in callee.items: + adjusted = self.apply_signature_hook(item, args, arg_kinds, arg_names, hook) + assert isinstance(adjusted, CallableType) + items.append(adjusted) + return Overloaded(items) + + def apply_function_signature_hook( + self, + callee: FunctionLike, + args: list[Expression], + arg_kinds: list[ArgKind], + context: Context, + arg_names: Sequence[str | None] | None, + signature_hook: Callable[[FunctionSigContext], FunctionLike], + ) -> FunctionLike: + """Apply a plugin hook that may infer a more precise signature for a function.""" + return self.apply_signature_hook( + callee, + args, + arg_kinds, + arg_names, + (lambda args, sig: signature_hook(FunctionSigContext(args, sig, context, self.chk))), + ) + + def apply_method_signature_hook( + self, + callee: FunctionLike, + args: list[Expression], + arg_kinds: list[ArgKind], + context: Context, + arg_names: Sequence[str | None] | None, + object_type: Type, + signature_hook: Callable[[MethodSigContext], FunctionLike], + ) -> FunctionLike: + """Apply a plugin hook that may infer a more precise signature for a method.""" + pobject_type = get_proper_type(object_type) + return self.apply_signature_hook( + callee, + args, + arg_kinds, + arg_names, + ( + lambda args, sig: signature_hook( + MethodSigContext(pobject_type, args, sig, context, self.chk) + ) + ), + ) + + def transform_callee_type( + self, + callable_name: str | None, + callee: Type, + args: list[Expression], + arg_kinds: list[ArgKind], + context: Context, + arg_names: Sequence[str | None] | None = None, + object_type: Type | None = None, + ) -> Type: + """Attempt to determine a more accurate signature for a method call. + + This is done by looking up and applying a method signature hook (if one exists for the + given method name). + + If no matching method signature hook is found, callee is returned unmodified. The same + happens if the arguments refer to a non-method callable (this is allowed so that the code + calling transform_callee_type needs to perform fewer boilerplate checks). + + Note: this method is *not* called automatically as part of check_call, because in some + cases check_call is called multiple times while checking a single call (for example when + dealing with overloads). Instead, this method needs to be called explicitly + (if appropriate) before the signature is passed to check_call. + """ + callee = get_proper_type(callee) + if callable_name is not None and isinstance(callee, FunctionLike): + if object_type is not None: + method_sig_hook = self.plugin.get_method_signature_hook(callable_name) + if method_sig_hook: + return self.apply_method_signature_hook( + callee, args, arg_kinds, context, arg_names, object_type, method_sig_hook + ) + else: + function_sig_hook = self.plugin.get_function_signature_hook(callable_name) + if function_sig_hook: + return self.apply_function_signature_hook( + callee, args, arg_kinds, context, arg_names, function_sig_hook + ) + + return callee + + def is_generic_decorator_overload_call( + self, callee_type: CallableType, args: list[Expression] + ) -> Overloaded | None: + """Check if this looks like an application of a generic function to overload argument.""" + assert callee_type.variables + if len(callee_type.arg_types) != 1 or len(args) != 1: + # TODO: can we handle more general cases? + return None + if not isinstance(get_proper_type(callee_type.arg_types[0]), CallableType): + return None + if not isinstance(get_proper_type(callee_type.ret_type), CallableType): + return None + with self.chk.local_type_map: + with self.msg.filter_errors(): + arg_type = get_proper_type(self.accept(args[0], type_context=None)) + if isinstance(arg_type, Overloaded): + return arg_type + return None + + def handle_decorator_overload_call( + self, callee_type: CallableType, overloaded: Overloaded, ctx: Context + ) -> tuple[Type, Type] | None: + """Type-check application of a generic callable to an overload. + + We check call on each individual overload item, and then combine results into a new + overload. This function should be only used if callee_type takes and returns a Callable. + """ + result = [] + inferred_args = [] + for item in overloaded.items: + arg = TempNode(typ=item) + with self.msg.filter_errors() as err: + item_result, inferred_arg = self.check_call(callee_type, [arg], [ARG_POS], ctx) + if err.has_new_errors(): + # This overload doesn't match. + continue + p_item_result = get_proper_type(item_result) + if not isinstance(p_item_result, CallableType): + continue + p_inferred_arg = get_proper_type(inferred_arg) + if not isinstance(p_inferred_arg, CallableType): + continue + inferred_args.append(p_inferred_arg) + result.append(p_item_result) + if not result or not inferred_args: + # None of the overload matched (or overload was initially malformed). + return None + return Overloaded(result), Overloaded(inferred_args) + + def check_call_expr_with_callee_type( + self, + callee_type: Type, + e: CallExpr, + callable_name: str | None, + object_type: Type | None, + member: str | None = None, + ) -> Type: + """Type check call expression. + + The callee_type should be used as the type of callee expression. In particular, + in case of a union type this can be a particular item of the union, so that we can + apply plugin hooks to each item. + + The 'member', 'callable_name' and 'object_type' are only used to call plugin hooks. + If 'callable_name' is None but 'member' is not None (member call), try constructing + 'callable_name' using 'object_type' (the base type on which the method is called), + for example 'typing.Mapping.get'. + """ + if callable_name is None and member is not None: + assert object_type is not None + callable_name = self.method_fullname(object_type, member) + object_type = get_proper_type(object_type) + if callable_name: + # Try to refine the call signature using plugin hooks before checking the call. + callee_type = self.transform_callee_type( + callable_name, callee_type, e.args, e.arg_kinds, e, e.arg_names, object_type + ) + # Unions are special-cased to allow plugins to act on each item in the union. + elif member is not None and isinstance(object_type, UnionType): + return self.check_union_call_expr(e, object_type, member) + ret_type, callee_type = self.check_call( + callee_type, + e.args, + e.arg_kinds, + e, + e.arg_names, + callable_node=e.callee, + callable_name=callable_name, + object_type=object_type, + ) + proper_callee = get_proper_type(callee_type) + if isinstance(e.callee, RefExpr) and isinstance(proper_callee, CallableType): + # Cache it for find_isinstance_check() + if proper_callee.type_guard is not None: + e.callee.type_guard = proper_callee.type_guard + if proper_callee.type_is is not None: + e.callee.type_is = proper_callee.type_is + return ret_type + + def check_union_call_expr(self, e: CallExpr, object_type: UnionType, member: str) -> Type: + """Type check calling a member expression where the base type is a union.""" + res: list[Type] = [] + for typ in flatten_nested_unions(object_type.relevant_items()): + # Member access errors are already reported when visiting the member expression. + with self.msg.filter_errors(): + item = analyze_member_access( + member, + typ, + e, + is_lvalue=False, + is_super=False, + is_operator=False, + original_type=object_type, + chk=self.chk, + in_literal_context=self.is_literal_context(), + self_type=typ, + ) + narrowed = self.narrow_type_from_binder(e.callee, item, skip_non_overlapping=True) + if narrowed is None: + continue + callable_name = self.method_fullname(typ, member) + item_object_type = typ if callable_name else None + res.append( + self.check_call_expr_with_callee_type(narrowed, e, callable_name, item_object_type) + ) + return make_simplified_union(res) + + def check_call( + self, + callee: Type, + args: list[Expression], + arg_kinds: list[ArgKind], + context: Context, + arg_names: Sequence[str | None] | None = None, + callable_node: Expression | None = None, + callable_name: str | None = None, + object_type: Type | None = None, + original_type: Type | None = None, + ) -> tuple[Type, Type]: + """Type check a call. + + Also infer type arguments if the callee is a generic function. + + Return (result type, inferred callee type). + + Arguments: + callee: type of the called value + args: actual argument expressions + arg_kinds: contains nodes.ARG_* constant for each argument in args + describing whether the argument is positional, *arg, etc. + context: current expression context, used for inference. + arg_names: names of arguments (optional) + callable_node: associate the inferred callable type to this node, + if specified + callable_name: Fully-qualified name of the function/method to call, + or None if unavailable (examples: 'builtins.open', 'typing.Mapping.get') + object_type: If callable_name refers to a method, the type of the object + on which the method is being called + """ + callee = get_proper_type(callee) + + if isinstance(callee, CallableType): + if callee.variables: + overloaded = self.is_generic_decorator_overload_call(callee, args) + if overloaded is not None: + # Special casing for inline application of generic callables to overloads. + # Supporting general case would be tricky, but this should cover 95% of cases. + overloaded_result = self.handle_decorator_overload_call( + callee, overloaded, context + ) + if overloaded_result is not None: + return overloaded_result + + return self.check_callable_call( + callee, + args, + arg_kinds, + context, + arg_names, + callable_node, + callable_name, + object_type, + ) + elif isinstance(callee, Overloaded): + return self.check_overload_call( + callee, args, arg_kinds, arg_names, callable_name, object_type, context + ) + elif isinstance(callee, AnyType) or not self.chk.in_checked_function(): + return self.check_any_type_call(args, callee) + elif isinstance(callee, UnionType): + return self.check_union_call(callee, args, arg_kinds, arg_names, context) + elif isinstance(callee, Instance): + call_function = analyze_member_access( + "__call__", + callee, + context, + is_lvalue=False, + is_super=False, + is_operator=True, + original_type=original_type or callee, + chk=self.chk, + in_literal_context=self.is_literal_context(), + ) + callable_name = callee.type.fullname + ".__call__" + # Apply method signature hook, if one exists + call_function = self.transform_callee_type( + callable_name, call_function, args, arg_kinds, context, arg_names, callee + ) + result = self.check_call( + call_function, + args, + arg_kinds, + context, + arg_names, + callable_node, + callable_name, + callee, + ) + if callable_node: + # check_call() stored "call_function" as the type, which is incorrect. + # Override the type. + self.chk.store_type(callable_node, callee) + return result + elif isinstance(callee, TypeVarType): + return self.check_call( + callee.upper_bound, args, arg_kinds, context, arg_names, callable_node + ) + elif isinstance(callee, TypeType): + item = self.analyze_type_type_callee(callee.item, context) + return self.check_call(item, args, arg_kinds, context, arg_names, callable_node) + elif isinstance(callee, TupleType): + return self.check_call( + tuple_fallback(callee), + args, + arg_kinds, + context, + arg_names, + callable_node, + callable_name, + object_type, + original_type=callee, + ) + elif isinstance(callee, UninhabitedType): + ret = UninhabitedType() + ret.ambiguous = callee.ambiguous + return callee, ret + else: + return self.msg.not_callable(callee, context), AnyType(TypeOfAny.from_error) + + def check_callable_call( + self, + callee: CallableType, + args: list[Expression], + arg_kinds: list[ArgKind], + context: Context, + arg_names: Sequence[str | None] | None, + callable_node: Expression | None, + callable_name: str | None, + object_type: Type | None, + ) -> tuple[Type, Type]: + """Type check a call that targets a callable value. + + See the docstring of check_call for more information. + """ + # Always unpack **kwargs before checking a call. + callee = callee.with_unpacked_kwargs().with_normalized_var_args() + if callable_name is None and callee.name: + callable_name = callee.name + ret_type = get_proper_type(callee.ret_type) + if callee.is_type_obj() and isinstance(ret_type, Instance): + callable_name = ret_type.type.fullname + if isinstance(callable_node, RefExpr) and callable_node.fullname in ENUM_BASES: + # An Enum() call that failed SemanticAnalyzerPass2.check_enum_call(). + return callee.ret_type, callee + + if ( + callee.is_type_obj() + and callee.type_object().is_protocol + # Exception for Type[...] + and not callee.from_type_type + ): + self.chk.fail( + message_registry.CANNOT_INSTANTIATE_PROTOCOL.format(callee.type_object().name), + context, + ) + elif ( + callee.is_type_obj() + and callee.type_object().is_abstract + # Exception for Type[...] + and not callee.from_type_type + and not callee.type_object().fallback_to_any + ): + type = callee.type_object() + # Determine whether the implicitly abstract attributes are functions with + # None-compatible return types. + abstract_attributes: dict[str, bool] = {} + for attr_name, abstract_status in type.abstract_attributes: + if abstract_status == IMPLICITLY_ABSTRACT: + abstract_attributes[attr_name] = self.can_return_none(type, attr_name) + else: + abstract_attributes[attr_name] = False + self.msg.cannot_instantiate_abstract_class( + callee.type_object().name, abstract_attributes, context + ) + + var_arg = callee.var_arg() + if var_arg and isinstance(var_arg.typ, UnpackType): + # It is hard to support multiple variadic unpacks (except for old-style *args: int), + # fail gracefully to avoid crashes later. + seen_unpack = False + for arg, arg_kind in zip(args, arg_kinds): + if arg_kind != ARG_STAR: + continue + arg_type = get_proper_type(self.accept(arg)) + if not isinstance(arg_type, TupleType) or any( + isinstance(t, UnpackType) for t in arg_type.items + ): + if seen_unpack: + self.msg.fail( + "Passing multiple variadic unpacks in a call is not supported", + context, + code=codes.CALL_ARG, + ) + return AnyType(TypeOfAny.from_error), callee + seen_unpack = True + + # This is tricky: return type may contain its own type variables, like in + # def [S] (S) -> def [T] (T) -> tuple[S, T], so we need to update their ids + # to avoid possible id clashes if this call itself appears in a generic + # function body. + ret_type = get_proper_type(callee.ret_type) + if isinstance(ret_type, CallableType) and ret_type.variables: + fresh_ret_type = freshen_all_functions_type_vars(callee.ret_type) + freeze_all_type_vars(fresh_ret_type) + callee = callee.copy_modified(ret_type=fresh_ret_type) + + if callee.is_generic(): + callee = freshen_function_type_vars(callee) + callee = self.infer_function_type_arguments_using_context(callee, context) + + formal_to_actual = map_actuals_to_formals( + arg_kinds, + arg_names, + callee.arg_kinds, + callee.arg_names, + lambda i: self.accept(args[i]), + ) + + if callee.is_generic(): + need_refresh = any( + isinstance(v, (ParamSpecType, TypeVarTupleType)) for v in callee.variables + ) + callee = self.infer_function_type_arguments( + callee, args, arg_kinds, arg_names, formal_to_actual, need_refresh, context + ) + if need_refresh: + # Argument kinds etc. may have changed due to + # ParamSpec or TypeVarTuple variables being replaced with an arbitrary + # number of arguments; recalculate actual-to-formal map + formal_to_actual = map_actuals_to_formals( + arg_kinds, + arg_names, + callee.arg_kinds, + callee.arg_names, + lambda i: self.accept(args[i]), + ) + + param_spec = callee.param_spec() + if ( + param_spec is not None + and arg_kinds == [ARG_STAR, ARG_STAR2] + and len(formal_to_actual) == 2 + ): + arg1 = self.accept(args[0]) + arg2 = self.accept(args[1]) + if ( + isinstance(arg1, ParamSpecType) + and isinstance(arg2, ParamSpecType) + and arg1.flavor == ParamSpecFlavor.ARGS + and arg2.flavor == ParamSpecFlavor.KWARGS + and arg1.id == arg2.id == param_spec.id + ): + return callee.ret_type, callee + + arg_types = self.infer_arg_types_in_context(callee, args, arg_kinds, formal_to_actual) + + self.check_argument_count( + callee, + arg_types, + arg_kinds, + arg_names, + formal_to_actual, + context, + object_type, + callable_name, + ) + + self.check_argument_types( + arg_types, arg_kinds, args, callee, formal_to_actual, context, object_type=object_type + ) + + if ( + callee.is_type_obj() + and (len(arg_types) == 1) + and is_equivalent(callee.ret_type, self.named_type("builtins.type")) + ): + callee = callee.copy_modified(ret_type=TypeType.make_normalized(arg_types[0])) + + if callable_node: + # Store the inferred callable type. + self.chk.store_type(callable_node, callee) + + if callable_name and ( + (object_type is None and self.plugin.get_function_hook(callable_name)) + or (object_type is not None and self.plugin.get_method_hook(callable_name)) + ): + new_ret_type = self.apply_function_plugin( + callee, + arg_kinds, + arg_types, + arg_names, + formal_to_actual, + args, + callable_name, + object_type, + context, + ) + callee = callee.copy_modified(ret_type=new_ret_type) + return callee.ret_type, callee + + def can_return_none(self, type: TypeInfo, attr_name: str) -> bool: + """Is the given attribute a method with a None-compatible return type? + + Overloads are only checked if there is an implementation. + """ + if not state.strict_optional: + # If strict-optional is not set, is_subtype(NoneType(), T) is always True. + # So, we cannot do anything useful here in that case. + return False + for base in type.mro: + symnode = base.names.get(attr_name) + if symnode is None: + continue + node = symnode.node + if isinstance(node, OverloadedFuncDef): + node = node.impl + if isinstance(node, Decorator): + node = node.func + if isinstance(node, FuncDef): + if node.type is not None: + assert isinstance(node.type, CallableType) + return is_subtype(NoneType(), node.type.ret_type) + return False + + def analyze_type_type_callee(self, item: ProperType, context: Context) -> Type: + """Analyze the callee X in X(...) where X is Type[item]. + + Return a Y that we can pass to check_call(Y, ...). + """ + if isinstance(item, AnyType): + return AnyType(TypeOfAny.from_another_any, source_any=item) + if isinstance(item, Instance): + res = type_object_type(item.type, self.named_type) + if isinstance(res, CallableType): + res = res.copy_modified(from_type_type=True) + expanded = expand_type_by_instance(res, item) + if isinstance(expanded, CallableType): + # Callee of the form Type[...] should never be generic, only + # proper class objects can be. + expanded = expanded.copy_modified(variables=[]) + return expanded + if isinstance(item, UnionType): + return UnionType( + [ + self.analyze_type_type_callee(get_proper_type(tp), context) + for tp in item.relevant_items() + ], + item.line, + ) + if isinstance(item, TypeVarType): + # Pretend we're calling the typevar's upper bound, + # i.e. its constructor (a poor approximation for reality, + # but better than AnyType...), but replace the return type + # with typevar. + callee = self.analyze_type_type_callee(get_proper_type(item.upper_bound), context) + callee = get_proper_type(callee) + if isinstance(callee, CallableType): + callee = callee.copy_modified(ret_type=item) + elif isinstance(callee, Overloaded): + callee = Overloaded([c.copy_modified(ret_type=item) for c in callee.items]) + return callee + # We support Type of namedtuples but not of tuples in general + if isinstance(item, TupleType) and tuple_fallback(item).type.fullname != "builtins.tuple": + return self.analyze_type_type_callee(tuple_fallback(item), context) + if isinstance(item, TypedDictType): + return self.typeddict_callable_from_context(item) + + self.msg.unsupported_type_type(item, context) + return AnyType(TypeOfAny.from_error) + + def infer_arg_types_in_empty_context(self, args: list[Expression]) -> list[Type]: + """Infer argument expression types in an empty context. + + In short, we basically recurse on each argument without considering + in what context the argument was called. + """ + res: list[Type] = [] + + for arg in args: + arg_type = self.accept(arg) + if has_erased_component(arg_type): + res.append(NoneType()) + else: + res.append(arg_type) + return res + + def infer_more_unions_for_recursive_type(self, type_context: Type) -> bool: + """Adjust type inference of unions if type context has a recursive type. + + Return the old state. The caller must assign it to type_state.infer_unions + afterwards. + + This is a hack to better support inference for recursive types. + + Note: This is performance-sensitive and must not be a context manager + until mypyc supports them better. + """ + old = type_state.infer_unions + if has_recursive_types(type_context): + type_state.infer_unions = True + return old + + def infer_arg_types_in_context( + self, + callee: CallableType, + args: list[Expression], + arg_kinds: list[ArgKind], + formal_to_actual: list[list[int]], + ) -> list[Type]: + """Infer argument expression types using a callable type as context. + + For example, if callee argument 2 has type List[int], infer the + argument expression with List[int] type context. + + Returns the inferred types of *actual arguments*. + """ + res: list[Type | None] = [None] * len(args) + + for i, actuals in enumerate(formal_to_actual): + for ai in actuals: + if not arg_kinds[ai].is_star(): + arg_type = callee.arg_types[i] + # When the outer context for a function call is known to be recursive, + # we solve type constraints inferred from arguments using unions instead + # of joins. This is a bit arbitrary, but in practice it works for most + # cases. A cleaner alternative would be to switch to single bin type + # inference, but this is a lot of work. + old = self.infer_more_unions_for_recursive_type(arg_type) + res[ai] = self.accept(args[ai], arg_type) + # We need to manually restore union inference state, ugh. + type_state.infer_unions = old + + # Fill in the rest of the argument types. + for i, t in enumerate(res): + if not t: + res[i] = self.accept(args[i]) + assert all(tp is not None for tp in res) + return cast(list[Type], res) + + def infer_function_type_arguments_using_context( + self, callable: CallableType, error_context: Context + ) -> CallableType: + """Unify callable return type to type context to infer type vars. + + For example, if the return type is set[t] where 't' is a type variable + of callable, and if the context is set[int], return callable modified + by substituting 't' with 'int'. + """ + ctx = self.type_context[-1] + if not ctx: + return callable + # The return type may have references to type metavariables that + # we are inferring right now. We must consider them as indeterminate + # and they are not potential results; thus we replace them with the + # special ErasedType type. On the other hand, class type variables are + # valid results. + erased_ctx = replace_meta_vars(ctx, ErasedType()) + ret_type = callable.ret_type + if is_overlapping_none(ret_type) and is_overlapping_none(ctx): + # If both the context and the return type are optional, unwrap the optional, + # since in 99% cases this is what a user expects. In other words, we replace + # Optional[T] <: Optional[int] + # with + # T <: int + # while the former would infer T <: Optional[int]. + ret_type = remove_optional(ret_type) + erased_ctx = remove_optional(erased_ctx) + # + # TODO: Instead of this hack and the one below, we need to use outer and + # inner contexts at the same time. This is however not easy because of two + # reasons: + # * We need to support constraints like [1 <: 2, 2 <: X], i.e. with variables + # on both sides. (This is not too hard.) + # * We need to update all the inference "infrastructure", so that all + # variables in an expression are inferred at the same time. + # (And this is hard, also we need to be careful with lambdas that require + # two passes.) + proper_ret = get_proper_type(ret_type) + if ( + isinstance(proper_ret, TypeVarType) + or isinstance(proper_ret, UnionType) + and all(isinstance(get_proper_type(u), TypeVarType) for u in proper_ret.items) + ): + # Another special case: the return type is a type variable. If it's unrestricted, + # we could infer a too general type for the type variable if we use context, + # and this could result in confusing and spurious type errors elsewhere. + # + # So we give up and just use function arguments for type inference, with just two + # exceptions: + # + # 1. If the context is a generic instance type, actually use it as context, as + # this *seems* to usually be the reasonable thing to do. + # + # See also github issues #462 and #360. + # + # 2. If the context is some literal type, we want to "propagate" that information + # down so that we infer a more precise type for literal expressions. For example, + # the expression `3` normally has an inferred type of `builtins.int`: but if it's + # in a literal context like below, we want it to infer `Literal[3]` instead. + # + # def expects_literal(x: Literal[3]) -> None: pass + # def identity(x: T) -> T: return x + # + # expects_literal(identity(3)) # Should type-check + # TODO: we may want to add similar exception if all arguments are lambdas, since + # in this case external context is almost everything we have. + if not is_generic_instance(ctx) and not is_literal_type_like(ctx): + return callable.copy_modified() + args = infer_type_arguments( + callable.variables, ret_type, erased_ctx, skip_unsatisfied=True + ) + # Only substitute non-Uninhabited and non-erased types. + new_args: list[Type | None] = [] + for arg in args: + if has_uninhabited_component(arg) or has_erased_component(arg): + new_args.append(None) + else: + new_args.append(arg) + # Don't show errors after we have only used the outer context for inference. + # We will use argument context to infer more variables. + return self.apply_generic_arguments( + callable, new_args, error_context, skip_unsatisfied=True + ) + + def infer_function_type_arguments( + self, + callee_type: CallableType, + args: list[Expression], + arg_kinds: list[ArgKind], + arg_names: Sequence[str | None] | None, + formal_to_actual: list[list[int]], + need_refresh: bool, + context: Context, + ) -> CallableType: + """Infer the type arguments for a generic callee type. + + Infer based on the types of arguments. + + Return a derived callable type that has the arguments applied. + """ + if self.chk.in_checked_function(): + # Disable type errors during type inference. There may be errors + # due to partial available context information at this time, but + # these errors can be safely ignored as the arguments will be + # inferred again later. + with self.msg.filter_errors(): + arg_types = self.infer_arg_types_in_context( + callee_type, args, arg_kinds, formal_to_actual + ) + + arg_pass_nums = self.get_arg_infer_passes( + callee_type, args, arg_types, formal_to_actual, len(args) + ) + + pass1_args: list[Type | None] = [] + for i, arg in enumerate(arg_types): + if arg_pass_nums[i] > 1: + pass1_args.append(None) + else: + pass1_args.append(arg) + + inferred_args, _ = infer_function_type_arguments( + callee_type, + pass1_args, + arg_kinds, + arg_names, + formal_to_actual, + context=self.argument_infer_context(), + strict=self.chk.in_checked_function(), + ) + + if 2 in arg_pass_nums: + # Second pass of type inference. + (callee_type, inferred_args) = self.infer_function_type_arguments_pass2( + callee_type, + args, + arg_kinds, + arg_names, + formal_to_actual, + inferred_args, + need_refresh, + context, + ) + + if ( + callee_type.special_sig == "dict" + and len(inferred_args) == 2 + and (ARG_NAMED in arg_kinds or ARG_STAR2 in arg_kinds) + ): + # HACK: Infer str key type for dict(...) with keyword args. The type system + # can't represent this so we special case it, as this is a pretty common + # thing. This doesn't quite work with all possible subclasses of dict + # if they shuffle type variables around, as we assume that there is a 1-1 + # correspondence with dict type variables. This is a marginal issue and + # a little tricky to fix so it's left unfixed for now. + first_arg = get_proper_type(inferred_args[0]) + if isinstance(first_arg, (NoneType, UninhabitedType)): + inferred_args[0] = self.named_type("builtins.str") + elif not first_arg or not is_subtype(self.named_type("builtins.str"), first_arg): + self.chk.fail(message_registry.KEYWORD_ARGUMENT_REQUIRES_STR_KEY_TYPE, context) + + if not self.chk.options.old_type_inference and any( + a is None + or isinstance(get_proper_type(a), UninhabitedType) + or set(get_type_vars(a)) & set(callee_type.variables) + for a in inferred_args + ): + if need_refresh: + # Technically we need to refresh formal_to_actual after *each* inference pass, + # since each pass can expand ParamSpec or TypeVarTuple. Although such situations + # are very rare, not doing this can cause crashes. + formal_to_actual = map_actuals_to_formals( + arg_kinds, + arg_names, + callee_type.arg_kinds, + callee_type.arg_names, + lambda a: self.accept(args[a]), + ) + # If the regular two-phase inference didn't work, try inferring type + # variables while allowing for polymorphic solutions, i.e. for solutions + # potentially involving free variables. + # TODO: support the similar inference for return type context. + poly_inferred_args, free_vars = infer_function_type_arguments( + callee_type, + arg_types, + arg_kinds, + arg_names, + formal_to_actual, + context=self.argument_infer_context(), + strict=self.chk.in_checked_function(), + allow_polymorphic=True, + ) + poly_callee_type = self.apply_generic_arguments( + callee_type, poly_inferred_args, context + ) + # Try applying inferred polymorphic type if possible, e.g. Callable[[T], T] can + # be interpreted as def [T] (T) -> T, but dict[T, T] cannot be expressed. + applied = applytype.apply_poly(poly_callee_type, free_vars) + if applied is not None and all( + a is not None and not isinstance(get_proper_type(a), UninhabitedType) + for a in poly_inferred_args + ): + freeze_all_type_vars(applied) + return applied + # If it didn't work, erase free variables as uninhabited, to avoid confusing errors. + unknown = UninhabitedType() + unknown.ambiguous = True + inferred_args = [ + ( + expand_type( + a, {v.id: unknown for v in list(callee_type.variables) + free_vars} + ) + if a is not None + else None + ) + for a in poly_inferred_args + ] + else: + # In dynamically typed functions use implicit 'Any' types for + # type variables. + inferred_args = [AnyType(TypeOfAny.unannotated)] * len(callee_type.variables) + return self.apply_inferred_arguments(callee_type, inferred_args, context) + + def infer_function_type_arguments_pass2( + self, + callee_type: CallableType, + args: list[Expression], + arg_kinds: list[ArgKind], + arg_names: Sequence[str | None] | None, + formal_to_actual: list[list[int]], + old_inferred_args: Sequence[Type | None], + need_refresh: bool, + context: Context, + ) -> tuple[CallableType, list[Type | None]]: + """Perform second pass of generic function type argument inference. + + The second pass is needed for arguments with types such as Callable[[T], S], + where both T and S are type variables, when the actual argument is a + lambda with inferred types. The idea is to infer the type variable T + in the first pass (based on the types of other arguments). This lets + us infer the argument and return type of the lambda expression and + thus also the type variable S in this second pass. + + Return (the callee with type vars applied, inferred actual arg types). + """ + # None or erased types in inferred types mean that there was not enough + # information to infer the argument. Replace them with None values so + # that they are not applied yet below. + inferred_args = list(old_inferred_args) + for i, arg in enumerate(get_proper_types(inferred_args)): + if isinstance(arg, (NoneType, UninhabitedType)) or has_erased_component(arg): + inferred_args[i] = None + callee_type = self.apply_generic_arguments(callee_type, inferred_args, context) + + if not callee_type.is_generic(): + # Fast path, second pass can't give new information. + return callee_type, [] + + if need_refresh: + formal_to_actual = map_actuals_to_formals( + arg_kinds, + arg_names, + callee_type.arg_kinds, + callee_type.arg_names, + lambda a: self.accept(args[a]), + ) + + # Same as during first pass, disable type errors (we still have partial context). + with self.msg.filter_errors(): + arg_types = self.infer_arg_types_in_context( + callee_type, args, arg_kinds, formal_to_actual + ) + + inferred_args, _ = infer_function_type_arguments( + callee_type, + arg_types, + arg_kinds, + arg_names, + formal_to_actual, + context=self.argument_infer_context(), + ) + + return callee_type, inferred_args + + def argument_infer_context(self) -> ArgumentInferContext: + if self._arg_infer_context_cache is None: + self._arg_infer_context_cache = ArgumentInferContext( + self.chk.named_type("typing.Mapping"), self.chk.named_type("typing.Iterable") + ) + return self._arg_infer_context_cache + + def get_arg_infer_passes( + self, + callee: CallableType, + args: list[Expression], + arg_types: list[Type], + formal_to_actual: list[list[int]], + num_actuals: int, + ) -> list[int]: + """Return pass numbers for args for two-pass argument type inference. + + For each actual, the pass number is either 1 (first pass) or 2 (second + pass). + + Two-pass argument type inference primarily lets us infer types of + lambdas more effectively. + """ + res = [1] * num_actuals + for i, arg in enumerate(callee.arg_types): + skip_param_spec = False + p_formal = get_proper_type(callee.arg_types[i]) + if isinstance(p_formal, CallableType) and p_formal.param_spec(): + for j in formal_to_actual[i]: + p_actual = get_proper_type(arg_types[j]) + # This is an exception from the usual logic where we put generic Callable + # arguments in the second pass. If we have a non-generic actual, it is + # likely to infer good constraints, for example if we have: + # def run(Callable[P, None], *args: P.args, **kwargs: P.kwargs) -> None: ... + # def test(x: int, y: int) -> int: ... + # run(test, 1, 2) + # we will use `test` for inference, since it will allow to infer also + # argument *names* for P <: [x: int, y: int]. + if isinstance(p_actual, Instance): + call_method = find_member("__call__", p_actual, p_actual, is_operator=True) + if call_method is not None: + p_actual = get_proper_type(call_method) + if ( + isinstance(p_actual, CallableType) + and not p_actual.variables + and not isinstance(args[j], LambdaExpr) + ): + skip_param_spec = True + break + if not skip_param_spec and arg.accept(ArgInferSecondPassQuery()): + for j in formal_to_actual[i]: + res[j] = 2 + return res + + def apply_inferred_arguments( + self, callee_type: CallableType, inferred_args: Sequence[Type | None], context: Context + ) -> CallableType: + """Apply inferred values of type arguments to a generic function. + + Inferred_args contains the values of function type arguments. + """ + # Report error if some of the variables could not be solved. In that + # case assume that all variables have type Any to avoid extra + # bogus error messages. + for inferred_type, tv in zip(inferred_args, callee_type.variables): + if not inferred_type or has_erased_component(inferred_type): + # Could not infer a non-trivial type for a type variable. + self.msg.could_not_infer_type_arguments(callee_type, tv, context) + inferred_args = [AnyType(TypeOfAny.from_error)] * len(inferred_args) + # Apply the inferred types to the function type. In this case the + # return type must be CallableType, since we give the right number of type + # arguments. + return self.apply_generic_arguments(callee_type, inferred_args, context) + + def check_argument_count( + self, + callee: CallableType, + actual_types: list[Type], + actual_kinds: list[ArgKind], + actual_names: Sequence[str | None] | None, + formal_to_actual: list[list[int]], + context: Context | None, + object_type: Type | None = None, + callable_name: str | None = None, + ) -> bool: + """Check that there is a value for all required arguments to a function. + + Also check that there are no duplicate values for arguments. Report found errors + using 'messages' if it's not None. If 'messages' is given, 'context' must also be given. + + Return False if there were any errors. Otherwise return True + """ + if context is None: + # Avoid "is None" checks + context = TempNode(AnyType(TypeOfAny.special_form)) + + # TODO(jukka): We could return as soon as we find an error if messages is None. + + # Collect dict of all actual arguments matched to formal arguments, with occurrence count + all_actuals: dict[int, int] = {} + for actuals in formal_to_actual: + for a in actuals: + all_actuals[a] = all_actuals.get(a, 0) + 1 + + ok, is_unexpected_arg_error = self.check_for_extra_actual_arguments( + callee, actual_types, actual_kinds, actual_names, all_actuals, context + ) + + # Check for too many or few values for formals. + for i, kind in enumerate(callee.arg_kinds): + mapped_args = formal_to_actual[i] + if kind.is_required() and not mapped_args and not is_unexpected_arg_error: + # No actual for a mandatory formal + if kind.is_positional(): + self.msg.too_few_arguments(callee, context, actual_names) + if object_type and callable_name and "." in callable_name: + self.missing_classvar_callable_note(object_type, callable_name, context) + else: + argname = callee.arg_names[i] or "?" + self.msg.missing_named_argument(callee, context, argname) + ok = False + elif not kind.is_star() and is_duplicate_mapping( + mapped_args, actual_types, actual_kinds + ): + if self.chk.in_checked_function() or isinstance( + get_proper_type(actual_types[mapped_args[0]]), TupleType + ): + self.msg.duplicate_argument_value(callee, i, context) + ok = False + elif ( + kind.is_named() + and mapped_args + and actual_kinds[mapped_args[0]] not in [nodes.ARG_NAMED, nodes.ARG_STAR2] + ): + # Positional argument when expecting a keyword argument. + self.msg.too_many_positional_arguments(callee, context) + ok = False + elif callee.param_spec() is not None: + if not mapped_args and callee.special_sig != "partial": + self.msg.too_few_arguments(callee, context, actual_names) + ok = False + elif len(mapped_args) > 1: + paramspec_entries = sum( + isinstance(get_proper_type(actual_types[k]), ParamSpecType) + for k in mapped_args + ) + if actual_kinds[mapped_args[0]] == nodes.ARG_STAR and paramspec_entries > 1: + self.msg.fail("ParamSpec.args should only be passed once", context) + ok = False + if actual_kinds[mapped_args[0]] == nodes.ARG_STAR2 and paramspec_entries > 1: + self.msg.fail("ParamSpec.kwargs should only be passed once", context) + ok = False + return ok + + def check_for_extra_actual_arguments( + self, + callee: CallableType, + actual_types: list[Type], + actual_kinds: list[ArgKind], + actual_names: Sequence[str | None] | None, + all_actuals: dict[int, int], + context: Context, + ) -> tuple[bool, bool]: + """Check for extra actual arguments. + + Return tuple (was everything ok, + was there an extra keyword argument error [used to avoid duplicate errors]). + """ + + is_unexpected_arg_error = False # Keep track of errors to avoid duplicate errors + ok = True # False if we've found any error + + for i, kind in enumerate(actual_kinds): + if ( + i not in all_actuals + and + # We accept the other iterables than tuple (including Any) + # as star arguments because they could be empty, resulting no arguments. + (kind != nodes.ARG_STAR or is_non_empty_tuple(actual_types[i])) + and + # Accept all types for double-starred arguments, because they could be empty + # dictionaries and we can't tell it from their types + kind != nodes.ARG_STAR2 + ): + # Extra actual: not matched by a formal argument. + ok = False + if kind != nodes.ARG_NAMED: + self.msg.too_many_arguments(callee, context) + else: + assert actual_names, "Internal error: named kinds without names given" + act_name = actual_names[i] + assert act_name is not None + act_type = actual_types[i] + self.msg.unexpected_keyword_argument(callee, act_name, act_type, context) + is_unexpected_arg_error = True + elif ( + kind == nodes.ARG_STAR and nodes.ARG_STAR not in callee.arg_kinds + ) or kind == nodes.ARG_STAR2: + actual_type = get_proper_type(actual_types[i]) + if isinstance(actual_type, (TupleType, TypedDictType)): + if all_actuals.get(i, 0) < len(actual_type.items): + # Too many tuple/dict items as some did not match. + if kind != nodes.ARG_STAR2 or not isinstance(actual_type, TypedDictType): + self.msg.too_many_arguments(callee, context) + else: + self.msg.too_many_arguments_from_typed_dict( + callee, actual_type, context + ) + is_unexpected_arg_error = True + ok = False + # *args/**kwargs can be applied even if the function takes a fixed + # number of positional arguments. This may succeed at runtime. + + return ok, is_unexpected_arg_error + + def missing_classvar_callable_note( + self, object_type: Type, callable_name: str, context: Context + ) -> None: + if isinstance(object_type, ProperType) and isinstance(object_type, Instance): + _, var_name = callable_name.rsplit(".", maxsplit=1) + node = object_type.type.get(var_name) + if node is not None and isinstance(node.node, Var): + if not node.node.is_inferred and not node.node.is_classvar: + self.msg.note( + f'"{var_name}" is considered instance variable,' + " to make it class variable use ClassVar[...]", + context, + ) + + def check_argument_types( + self, + arg_types: list[Type], + arg_kinds: list[ArgKind], + args: list[Expression], + callee: CallableType, + formal_to_actual: list[list[int]], + context: Context, + check_arg: ArgChecker | None = None, + object_type: Type | None = None, + ) -> None: + """Check argument types against a callable type. + + Report errors if the argument types are not compatible. + + The check_call docstring describes some of the arguments. + """ + check_arg = check_arg or self.check_arg + # Keep track of consumed tuple *arg items. + mapper = ArgTypeExpander(self.argument_infer_context()) + + for arg_type, arg_kind in zip(arg_types, arg_kinds): + arg_type = get_proper_type(arg_type) + if arg_kind == nodes.ARG_STAR and not self.is_valid_var_arg(arg_type): + self.msg.invalid_var_arg(arg_type, context) + if arg_kind == nodes.ARG_STAR2 and not self.is_valid_keyword_var_arg(arg_type): + is_mapping = is_subtype( + arg_type, self.chk.named_type("_typeshed.SupportsKeysAndGetItem") + ) + self.msg.invalid_keyword_var_arg(arg_type, is_mapping, context) + + for i, actuals in enumerate(formal_to_actual): + orig_callee_arg_type = get_proper_type(callee.arg_types[i]) + + # Checking the case that we have more than one item but the first argument + # is an unpack, so this would be something like: + # [Tuple[Unpack[Ts]], int] + # + # In this case we have to check everything together, we do this by re-unifying + # the suffices to the tuple, e.g. a single actual like + # Tuple[Unpack[Ts], int] + expanded_tuple = False + actual_kinds = [arg_kinds[a] for a in actuals] + if len(actuals) > 1: + p_actual_type = get_proper_type(arg_types[actuals[0]]) + if ( + isinstance(p_actual_type, TupleType) + and len(p_actual_type.items) == 1 + and isinstance(p_actual_type.items[0], UnpackType) + and actual_kinds == [nodes.ARG_STAR] + [nodes.ARG_POS] * (len(actuals) - 1) + ): + actual_types = [p_actual_type.items[0]] + [arg_types[a] for a in actuals[1:]] + if isinstance(orig_callee_arg_type, UnpackType): + p_callee_type = get_proper_type(orig_callee_arg_type.type) + if isinstance(p_callee_type, TupleType): + assert p_callee_type.items + callee_arg_types = p_callee_type.items + callee_arg_kinds = [nodes.ARG_STAR] + [nodes.ARG_POS] * ( + len(p_callee_type.items) - 1 + ) + expanded_tuple = True + + if not expanded_tuple: + actual_types = [arg_types[a] for a in actuals] + if isinstance(orig_callee_arg_type, UnpackType): + unpacked_type = get_proper_type(orig_callee_arg_type.type) + if isinstance(unpacked_type, TupleType): + inner_unpack_index = find_unpack_in_list(unpacked_type.items) + if inner_unpack_index is None: + callee_arg_types = unpacked_type.items + callee_arg_kinds = [ARG_POS] * len(actuals) + else: + inner_unpack = unpacked_type.items[inner_unpack_index] + assert isinstance(inner_unpack, UnpackType) + inner_unpacked_type = get_proper_type(inner_unpack.type) + if isinstance(inner_unpacked_type, TypeVarTupleType): + # This branch mimics the expanded_tuple case above but for + # the case where caller passed a single * unpacked tuple argument. + callee_arg_types = unpacked_type.items + callee_arg_kinds = [ + ARG_POS if i != inner_unpack_index else ARG_STAR + for i in range(len(unpacked_type.items)) + ] + else: + # We assume heterogeneous tuples are desugared earlier. + assert isinstance(inner_unpacked_type, Instance) + assert inner_unpacked_type.type.fullname == "builtins.tuple" + callee_arg_types = ( + unpacked_type.items[:inner_unpack_index] + + [inner_unpacked_type.args[0]] + * (len(actuals) - len(unpacked_type.items) + 1) + + unpacked_type.items[inner_unpack_index + 1 :] + ) + callee_arg_kinds = [ARG_POS] * len(actuals) + elif isinstance(unpacked_type, TypeVarTupleType): + callee_arg_types = [orig_callee_arg_type] + callee_arg_kinds = [ARG_STAR] + else: + assert isinstance(unpacked_type, Instance) + assert unpacked_type.type.fullname == "builtins.tuple" + callee_arg_types = [unpacked_type.args[0]] * len(actuals) + callee_arg_kinds = [ARG_POS] * len(actuals) + else: + callee_arg_types = [orig_callee_arg_type] * len(actuals) + callee_arg_kinds = [callee.arg_kinds[i]] * len(actuals) + + assert len(actual_types) == len(actuals) == len(actual_kinds) + + if len(callee_arg_types) != len(actual_types): + if len(actual_types) > len(callee_arg_types): + self.chk.msg.too_many_arguments(callee, context) + else: + self.chk.msg.too_few_arguments(callee, context, None) + continue + + assert len(callee_arg_types) == len(actual_types) + assert len(callee_arg_types) == len(callee_arg_kinds) + for actual, actual_type, actual_kind, callee_arg_type, callee_arg_kind in zip( + actuals, actual_types, actual_kinds, callee_arg_types, callee_arg_kinds + ): + # Check that a *arg is valid as varargs. + expanded_actual = mapper.expand_actual_type( + actual_type, + actual_kind, + callee.arg_names[i], + callee_arg_kind, + allow_unpack=isinstance(callee_arg_type, UnpackType), + ) + check_arg( + expanded_actual, + actual_type, + actual_kind, + callee_arg_type, + actual + 1, + i + 1, + callee, + object_type, + args[actual], + context, + ) + + def check_arg( + self, + caller_type: Type, + original_caller_type: Type, + caller_kind: ArgKind, + callee_type: Type, + n: int, + m: int, + callee: CallableType, + object_type: Type | None, + context: Context, + outer_context: Context, + ) -> None: + """Check the type of a single argument in a call.""" + caller_type = get_proper_type(caller_type) + original_caller_type = get_proper_type(original_caller_type) + callee_type = get_proper_type(callee_type) + + if isinstance(caller_type, DeletedType): + self.msg.deleted_as_rvalue(caller_type, context) + # Only non-abstract non-protocol class can be given where Type[...] is expected... + elif self.has_abstract_type_part(caller_type, callee_type): + self.msg.concrete_only_call(callee_type, context) + elif not is_subtype(caller_type, callee_type, options=self.chk.options): + error = self.msg.incompatible_argument( + n, + m, + callee, + original_caller_type, + caller_kind, + object_type=object_type, + context=context, + outer_context=outer_context, + ) + if not caller_kind.is_star(): + # For *args and **kwargs this note would be incorrect - we're comparing + # iterable/mapping type with union of relevant arg types. + self.msg.incompatible_argument_note( + original_caller_type, callee_type, context, parent_error=error + ) + if not self.msg.prefer_simple_messages(): + self.chk.check_possible_missing_await( + caller_type, callee_type, context, error.code + ) + + def check_overload_call( + self, + callee: Overloaded, + args: list[Expression], + arg_kinds: list[ArgKind], + arg_names: Sequence[str | None] | None, + callable_name: str | None, + object_type: Type | None, + context: Context, + ) -> tuple[Type, Type]: + """Checks a call to an overloaded function.""" + # Normalize unpacked kwargs before checking the call. + callee = callee.with_unpacked_kwargs() + arg_types = self.infer_arg_types_in_empty_context(args) + # Step 1: Filter call targets to remove ones where the argument counts don't match + plausible_targets = self.plausible_overload_call_targets( + arg_types, arg_kinds, arg_names, callee + ) + + # Step 2: If the arguments contain a union, we try performing union math first, + # instead of picking the first matching overload. + # This is because picking the first overload often ends up being too greedy: + # for example, when we have a fallback alternative that accepts an unrestricted + # typevar. See https://github.com/python/mypy/issues/4063 for related discussion. + erased_targets: list[CallableType] | None = None + inferred_types: list[Type] | None = None + unioned_result: tuple[Type, Type] | None = None + + # Determine whether we need to encourage union math. This should be generally safe, + # as union math infers better results in the vast majority of cases, but it is very + # computationally intensive. + none_type_var_overlap = self.possible_none_type_var_overlap(arg_types, plausible_targets) + union_interrupted = False # did we try all union combinations? + if any(self.real_union(arg) for arg in arg_types): + try: + with self.msg.filter_errors(): + unioned_return = self.union_overload_result( + plausible_targets, + args, + arg_types, + arg_kinds, + arg_names, + callable_name, + object_type, + none_type_var_overlap, + context, + ) + except TooManyUnions: + union_interrupted = True + else: + # Record if we succeeded. Next we need to see if maybe normal procedure + # gives a narrower type. + if unioned_return: + returns = [u[0] for u in unioned_return] + inferred_types = [u[1] for u in unioned_return] + # Note that we use `combine_function_signatures` instead of just returning + # a union of inferred callables because for example a call + # Union[int -> int, str -> str](Union[int, str]) is invalid and + # we don't want to introduce internal inconsistencies. + unioned_result = ( + make_simplified_union(returns, context.line, context.column), + self.combine_function_signatures(get_proper_types(inferred_types)), + ) + + # Step 3: We try checking each branch one-by-one. + inferred_result = self.infer_overload_return_type( + plausible_targets, + args, + arg_types, + arg_kinds, + arg_names, + callable_name, + object_type, + context, + ) + # If any of checks succeed, perform deprecation tests and stop early. + if inferred_result is not None and unioned_result is not None: + # Both unioned and direct checks succeeded, choose the more precise type. + if ( + is_subtype(inferred_result[0], unioned_result[0]) + and not isinstance(get_proper_type(inferred_result[0]), AnyType) + and not none_type_var_overlap + ): + unioned_result = None + else: + inferred_result = None + if unioned_result is not None: + if inferred_types is not None: + for inferred_type in inferred_types: + if isinstance(c := get_proper_type(inferred_type), CallableType): + self.chk.warn_deprecated(c.definition, context) + return unioned_result + if inferred_result is not None: + if isinstance(c := get_proper_type(inferred_result[1]), CallableType): + self.chk.warn_deprecated(c.definition, context) + return inferred_result + + # Step 4: Failure. At this point, we know there is no match. We fall back to trying + # to find a somewhat plausible overload target using the erased types + # so we can produce a nice error message. + # + # For example, suppose the user passes a value of type 'List[str]' into an + # overload with signatures f(x: int) -> int and f(x: List[int]) -> List[int]. + # + # Neither alternative matches, but we can guess the user probably wants the + # second one. + erased_targets = self.overload_erased_call_targets( + plausible_targets, arg_types, arg_kinds, arg_names, args, context + ) + + # Step 5: We try and infer a second-best alternative if possible. If not, fall back + # to using 'Any'. + if len(erased_targets) > 0: + # Pick the first plausible erased target as the fallback + # TODO: Adjust the error message here to make it clear there was no match. + # In order to do this, we need to find a clean way of associating + # a note with whatever error message 'self.check_call' will generate. + # In particular, the note's line and column numbers need to be the same + # as the error's. + target: Type = erased_targets[0] + else: + # There was no plausible match: give up + target = AnyType(TypeOfAny.from_error) + if not is_operator_method(callable_name): + code = None + else: + code = codes.OPERATOR + self.msg.no_variant_matches_arguments(callee, arg_types, context, code=code) + + result = self.check_call( + target, + args, + arg_kinds, + context, + arg_names, + callable_name=callable_name, + object_type=object_type, + ) + # Do not show the extra error if the union math was forced. + if union_interrupted and not none_type_var_overlap: + self.chk.fail(message_registry.TOO_MANY_UNION_COMBINATIONS, context) + return result + + def plausible_overload_call_targets( + self, + arg_types: list[Type], + arg_kinds: list[ArgKind], + arg_names: Sequence[str | None] | None, + overload: Overloaded, + ) -> list[CallableType]: + """Returns all overload call targets that having matching argument counts. + + If the given args contains a star-arg (*arg or **kwarg argument, except for + ParamSpec), this method will ensure all star-arg overloads appear at the start + of the list, instead of their usual location. + + The only exception is if the starred argument is something like a Tuple or a + NamedTuple, which has a definitive "shape". If so, we don't move the corresponding + alternative to the front since we can infer a more precise match using the original + order.""" + + def has_shape(typ: Type) -> bool: + typ = get_proper_type(typ) + return isinstance(typ, (TupleType, TypedDictType)) or ( + isinstance(typ, Instance) and typ.type.is_named_tuple + ) + + matches: list[CallableType] = [] + star_matches: list[CallableType] = [] + + args_have_var_arg = False + args_have_kw_arg = False + for kind, typ in zip(arg_kinds, arg_types): + if kind == ARG_STAR and not has_shape(typ): + args_have_var_arg = True + if kind == ARG_STAR2 and not has_shape(typ): + args_have_kw_arg = True + + for typ in overload.items: + formal_to_actual = map_actuals_to_formals( + arg_kinds, arg_names, typ.arg_kinds, typ.arg_names, lambda i: arg_types[i] + ) + with self.msg.filter_errors(): + if typ.param_spec() is not None: + # ParamSpec can be expanded in a lot of different ways. We may try + # to expand it here instead, but picking an impossible overload + # is safe: it will be filtered out later. + # Unlike other var-args signatures, ParamSpec produces essentially + # a fixed signature, so there's no need to push them to the top. + matches.append(typ) + elif self.check_argument_count( + typ, arg_types, arg_kinds, arg_names, formal_to_actual, None + ): + if args_have_var_arg and typ.is_var_arg: + star_matches.append(typ) + elif args_have_kw_arg and typ.is_kw_arg: + star_matches.append(typ) + else: + matches.append(typ) + + return star_matches + matches + + def infer_overload_return_type( + self, + plausible_targets: list[CallableType], + args: list[Expression], + arg_types: list[Type], + arg_kinds: list[ArgKind], + arg_names: Sequence[str | None] | None, + callable_name: str | None, + object_type: Type | None, + context: Context, + ) -> tuple[Type, Type] | None: + """Attempts to find the first matching callable from the given list. + + If a match is found, returns a tuple containing the result type and the inferred + callee type. (This tuple is meant to be eventually returned by check_call.) + If multiple targets match due to ambiguous Any parameters, returns (AnyType, AnyType). + If no targets match, returns None. + + Assumes all of the given targets have argument counts compatible with the caller. + """ + + matches: list[CallableType] = [] + return_types: list[Type] = [] + inferred_types: list[Type] = [] + args_contain_any = any(map(has_any_type, arg_types)) + type_maps: list[dict[Expression, Type]] = [] + + for typ in plausible_targets: + assert self.msg is self.chk.msg + with self.msg.filter_errors() as w: + with self.chk.local_type_map as m: + ret_type, infer_type = self.check_call( + callee=typ, + args=args, + arg_kinds=arg_kinds, + arg_names=arg_names, + context=context, + callable_name=callable_name, + object_type=object_type, + ) + is_match = not w.has_new_errors() + if is_match: + # Return early if possible; otherwise record info, so we can + # check for ambiguity due to 'Any' below. + if not args_contain_any: + self.chk.store_types(m) + return ret_type, infer_type + p_infer_type = get_proper_type(infer_type) + if isinstance(p_infer_type, CallableType): + # Prefer inferred types if possible, this will avoid false triggers for + # Any-ambiguity caused by arguments with Any passed to generic overloads. + matches.append(p_infer_type) + else: + matches.append(typ) + return_types.append(ret_type) + inferred_types.append(infer_type) + type_maps.append(m) + + if not matches: + return None + elif any_causes_overload_ambiguity(matches, return_types, arg_types, arg_kinds, arg_names): + # An argument of type or containing the type 'Any' caused ambiguity. + # We try returning a precise type if we can. If not, we give up and just return 'Any'. + if all_same_types(return_types): + self.chk.store_types(type_maps[0]) + return return_types[0], inferred_types[0] + elif all_same_types([erase_type(typ) for typ in return_types]): + self.chk.store_types(type_maps[0]) + return erase_type(return_types[0]), erase_type(inferred_types[0]) + else: + return self.check_call( + callee=AnyType(TypeOfAny.special_form), + args=args, + arg_kinds=arg_kinds, + arg_names=arg_names, + context=context, + callable_name=callable_name, + object_type=object_type, + ) + else: + # Success! No ambiguity; return the first match. + self.chk.store_types(type_maps[0]) + return return_types[0], inferred_types[0] + + def overload_erased_call_targets( + self, + plausible_targets: list[CallableType], + arg_types: list[Type], + arg_kinds: list[ArgKind], + arg_names: Sequence[str | None] | None, + args: list[Expression], + context: Context, + ) -> list[CallableType]: + """Returns a list of all targets that match the caller after erasing types. + + Assumes all of the given targets have argument counts compatible with the caller. + """ + matches: list[CallableType] = [] + for typ in plausible_targets: + if self.erased_signature_similarity( + arg_types, arg_kinds, arg_names, args, typ, context + ): + matches.append(typ) + return matches + + def possible_none_type_var_overlap( + self, arg_types: list[Type], plausible_targets: list[CallableType] + ) -> bool: + """Heuristic to determine whether we need to try forcing union math. + + This is needed to avoid greedy type variable match in situations like this: + @overload + def foo(x: None) -> None: ... + @overload + def foo(x: T) -> list[T]: ... + + x: int | None + foo(x) + we want this call to infer list[int] | None, not list[int | None]. + """ + if not plausible_targets or not arg_types: + return False + has_optional_arg = False + for arg_type in get_proper_types(arg_types): + if not isinstance(arg_type, UnionType): + continue + for item in get_proper_types(arg_type.items): + if isinstance(item, NoneType): + has_optional_arg = True + break + if not has_optional_arg: + return False + + min_prefix = min(len(c.arg_types) for c in plausible_targets) + for i in range(min_prefix): + if any( + isinstance(get_proper_type(c.arg_types[i]), NoneType) for c in plausible_targets + ) and any( + isinstance(get_proper_type(c.arg_types[i]), TypeVarType) for c in plausible_targets + ): + return True + return False + + def union_overload_result( + self, + plausible_targets: list[CallableType], + args: list[Expression], + arg_types: list[Type], + arg_kinds: list[ArgKind], + arg_names: Sequence[str | None] | None, + callable_name: str | None, + object_type: Type | None, + none_type_var_overlap: bool, + context: Context, + level: int = 0, + ) -> list[tuple[Type, Type]] | None: + """Accepts a list of overload signatures and attempts to match calls by destructuring + the first union. + + Return a list of (, ) if call succeeds for every + item of the desctructured union. Returns None if there is no match. + """ + # Step 1: If we are already too deep, then stop immediately. Otherwise mypy might + # hang for long time because of a weird overload call. The caller will get + # the exception and generate an appropriate note message, if needed. + if level >= MAX_UNIONS: + raise TooManyUnions + + # Step 2: Find position of the first union in arguments. Return the normal inferred + # type if no more unions left. + for idx, typ in enumerate(arg_types): + if self.real_union(typ): + break + else: + # No unions in args, just fall back to normal inference + with self.type_overrides_set(args, arg_types): + res = self.infer_overload_return_type( + plausible_targets, + args, + arg_types, + arg_kinds, + arg_names, + callable_name, + object_type, + context, + ) + if res is not None: + return [res] + return None + + # Step 3: Try a direct match before splitting to avoid unnecessary union splits + # and save performance. + if not none_type_var_overlap: + with self.type_overrides_set(args, arg_types): + direct = self.infer_overload_return_type( + plausible_targets, + args, + arg_types, + arg_kinds, + arg_names, + callable_name, + object_type, + context, + ) + if direct is not None and not isinstance( + get_proper_type(direct[0]), (UnionType, AnyType) + ): + # We only return non-unions soon, to avoid greedy match. + return [direct] + + # Step 4: Split the first remaining union type in arguments into items and + # try to match each item individually (recursive). + first_union = get_proper_type(arg_types[idx]) + assert isinstance(first_union, UnionType) + res_items = [] + for item in first_union.relevant_items(): + new_arg_types = arg_types.copy() + new_arg_types[idx] = item + sub_result = self.union_overload_result( + plausible_targets, + args, + new_arg_types, + arg_kinds, + arg_names, + callable_name, + object_type, + none_type_var_overlap, + context, + level + 1, + ) + if sub_result is not None: + res_items.extend(sub_result) + else: + # Some item doesn't match, return soon. + return None + + # Step 5: If splitting succeeded, then filter out duplicate items before returning. + seen: set[tuple[Type, Type]] = set() + result = [] + for pair in res_items: + if pair not in seen: + seen.add(pair) + result.append(pair) + return result + + def real_union(self, typ: Type) -> bool: + typ = get_proper_type(typ) + return isinstance(typ, UnionType) and len(typ.relevant_items()) > 1 + + @contextmanager + def type_overrides_set( + self, exprs: Sequence[Expression], overrides: Sequence[Type] + ) -> Iterator[None]: + """Set _temporary_ type overrides for given expressions.""" + assert len(exprs) == len(overrides) + for expr, typ in zip(exprs, overrides): + self.type_overrides[expr] = typ + try: + yield + finally: + for expr in exprs: + del self.type_overrides[expr] + + def combine_function_signatures(self, types: list[ProperType]) -> AnyType | CallableType: + """Accepts a list of function signatures and attempts to combine them together into a + new CallableType consisting of the union of all of the given arguments and return types. + + If there is at least one non-callable type, return Any (this can happen if there is + an ambiguity because of Any in arguments). + """ + assert types, "Trying to merge no callables" + if not all(isinstance(c, CallableType) for c in types): + return AnyType(TypeOfAny.special_form) + callables = cast("list[CallableType]", types) + if len(callables) == 1: + return callables[0] + + # Note: we are assuming here that if a user uses some TypeVar 'T' in + # two different functions, they meant for that TypeVar to mean the + # same thing. + # + # This function will make sure that all instances of that TypeVar 'T' + # refer to the same underlying TypeVarType objects to simplify the union-ing + # logic below. + # + # (If the user did *not* mean for 'T' to be consistently bound to the + # same type in their overloads, well, their code is probably too + # confusing and ought to be re-written anyways.) + callables, variables = merge_typevars_in_callables_by_name(callables) + + new_args: list[list[Type]] = [[] for _ in range(len(callables[0].arg_types))] + new_kinds = list(callables[0].arg_kinds) + new_returns: list[Type] = [] + + too_complex = False + for target in callables: + # We fall back to Callable[..., Union[]] if the functions do not have + # the exact same signature. The only exception is if one arg is optional and + # the other is positional: in that case, we continue unioning (and expect a + # positional arg). + # TODO: Enhance the merging logic to handle a wider variety of signatures. + if len(new_kinds) != len(target.arg_kinds): + too_complex = True + break + for i, (new_kind, target_kind) in enumerate(zip(new_kinds, target.arg_kinds)): + if new_kind == target_kind: + continue + elif new_kind.is_positional() and target_kind.is_positional(): + new_kinds[i] = ARG_POS + else: + too_complex = True + break + + if too_complex: + break # outer loop + + for i, arg in enumerate(target.arg_types): + new_args[i].append(arg) + new_returns.append(target.ret_type) + + union_return = make_simplified_union(new_returns) + if too_complex: + any = AnyType(TypeOfAny.special_form) + return callables[0].copy_modified( + arg_types=[any, any], + arg_kinds=[ARG_STAR, ARG_STAR2], + arg_names=[None, None], + ret_type=union_return, + variables=variables, + implicit=True, + ) + + final_args = [] + for args_list in new_args: + new_type = make_simplified_union(args_list) + final_args.append(new_type) + + return callables[0].copy_modified( + arg_types=final_args, + arg_kinds=new_kinds, + ret_type=union_return, + variables=variables, + implicit=True, + ) + + def erased_signature_similarity( + self, + arg_types: list[Type], + arg_kinds: list[ArgKind], + arg_names: Sequence[str | None] | None, + args: list[Expression], + callee: CallableType, + context: Context, + ) -> bool: + """Determine whether arguments could match the signature at runtime, after + erasing types.""" + formal_to_actual = map_actuals_to_formals( + arg_kinds, arg_names, callee.arg_kinds, callee.arg_names, lambda i: arg_types[i] + ) + + with self.msg.filter_errors(): + if not self.check_argument_count( + callee, arg_types, arg_kinds, arg_names, formal_to_actual, None + ): + # Too few or many arguments -> no match. + return False + + def check_arg( + caller_type: Type, + original_ccaller_type: Type, + caller_kind: ArgKind, + callee_type: Type, + n: int, + m: int, + callee: CallableType, + object_type: Type | None, + context: Context, + outer_context: Context, + ) -> None: + if not arg_approximate_similarity(caller_type, callee_type): + # No match -- exit early since none of the remaining work can change + # the result. + raise Finished + + try: + self.check_argument_types( + arg_types, + arg_kinds, + args, + callee, + formal_to_actual, + context=context, + check_arg=check_arg, + ) + return True + except Finished: + return False + + def apply_generic_arguments( + self, + callable: CallableType, + types: Sequence[Type | None], + context: Context, + skip_unsatisfied: bool = False, + ) -> CallableType: + """Simple wrapper around mypy.applytype.apply_generic_arguments.""" + return applytype.apply_generic_arguments( + callable, + types, + self.msg.incompatible_typevar_value, + context, + skip_unsatisfied=skip_unsatisfied, + ) + + def check_any_type_call(self, args: list[Expression], callee: Type) -> tuple[Type, Type]: + self.infer_arg_types_in_empty_context(args) + callee = get_proper_type(callee) + if isinstance(callee, AnyType): + return ( + AnyType(TypeOfAny.from_another_any, source_any=callee), + AnyType(TypeOfAny.from_another_any, source_any=callee), + ) + else: + return AnyType(TypeOfAny.special_form), AnyType(TypeOfAny.special_form) + + def check_union_call( + self, + callee: UnionType, + args: list[Expression], + arg_kinds: list[ArgKind], + arg_names: Sequence[str | None] | None, + context: Context, + ) -> tuple[Type, Type]: + with self.msg.disable_type_names(): + results = [ + self.check_call(subtype, args, arg_kinds, context, arg_names) + for subtype in callee.relevant_items() + ] + + return (make_simplified_union([res[0] for res in results]), callee) + + def visit_member_expr(self, e: MemberExpr, is_lvalue: bool = False) -> Type: + """Visit member expression (of form e.id).""" + result = self.analyze_ordinary_member_access(e, is_lvalue) + narrowed = self.narrow_type_from_binder(e, result) + self.chk.warn_deprecated(e.node, e) + return narrowed + + def analyze_ordinary_member_access( + self, e: MemberExpr, is_lvalue: bool, rvalue: Expression | None = None + ) -> Type: + """Analyse member expression or member lvalue. + + An rvalue can be provided optionally to infer better setter type when is_lvalue is True. + """ + if e.kind is not None: + # This is a reference to a module attribute. + return self.analyze_ref_expr(e) + else: + # This is a reference to a non-module attribute. + original_type = self.accept(e.expr, is_callee=self.is_callee) + base = e.expr + module_symbol_table = None + + if isinstance(base, RefExpr) and isinstance(base.node, MypyFile): + module_symbol_table = base.node.names + if isinstance(base, RefExpr) and isinstance(base.node, Var): + # This is needed to special case self-types, so we don't need to track + # these flags separately in checkmember.py. + is_self = base.node.is_self or base.node.is_cls + else: + is_self = False + + member_type = analyze_member_access( + e.name, + original_type, + e, + is_lvalue=is_lvalue, + is_super=False, + is_operator=False, + original_type=original_type, + chk=self.chk, + in_literal_context=self.is_literal_context(), + module_symbol_table=module_symbol_table, + is_self=is_self, + rvalue=rvalue, + ) + + return member_type + + def analyze_external_member_access( + self, member: str, base_type: Type, context: Context + ) -> Type: + """Analyse member access that is external, i.e. it cannot + refer to private definitions. Return the result type. + """ + # TODO remove; no private definitions in mypy + return analyze_member_access( + member, + base_type, + context, + is_lvalue=False, + is_super=False, + is_operator=False, + original_type=base_type, + chk=self.chk, + in_literal_context=self.is_literal_context(), + ) + + def is_literal_context(self) -> bool: + return is_literal_type_like(self.type_context[-1]) + + def infer_literal_expr_type(self, value: LiteralValue, fallback_name: str) -> Type: + """Analyzes the given literal expression and determines if we should be + inferring an Instance type, a Literal[...] type, or an Instance that + remembers the original literal. We... + + 1. ...Infer a normal Instance in most circumstances. + + 2. ...Infer a Literal[...] if we're in a literal context. For example, if we + were analyzing the "3" in "foo(3)" where "foo" has a signature of + "def foo(Literal[3]) -> None", we'd want to infer that the "3" has a + type of Literal[3] instead of Instance. + + 3. ...Infer an Instance that remembers the original Literal if we're declaring + a Final variable with an inferred type -- for example, "bar" in "bar: Final = 3" + would be assigned an Instance that remembers it originated from a '3'. See + the comments in Instance's constructor for more details. + """ + typ = self.named_type(fallback_name) + if self.is_literal_context(): + return LiteralType(value=value, fallback=typ) + else: + if value is True: + if self._literal_true is None: + self._literal_true = typ.copy_modified( + last_known_value=LiteralType(value=value, fallback=typ) + ) + return self._literal_true + if value is False: + if self._literal_false is None: + self._literal_false = typ.copy_modified( + last_known_value=LiteralType(value=value, fallback=typ) + ) + return self._literal_false + return typ.copy_modified(last_known_value=LiteralType(value=value, fallback=typ)) + + def concat_tuples(self, left: TupleType, right: TupleType) -> TupleType: + """Concatenate two fixed length tuples.""" + assert not (find_unpack_in_list(left.items) and find_unpack_in_list(right.items)) + return TupleType( + items=left.items + right.items, fallback=self.named_type("builtins.tuple") + ) + + def visit_int_expr(self, e: IntExpr) -> Type: + """Type check an integer literal (trivial).""" + return self.infer_literal_expr_type(e.value, "builtins.int") + + def visit_str_expr(self, e: StrExpr) -> Type: + """Type check a string literal (trivial).""" + return self.infer_literal_expr_type(e.value, "builtins.str") + + def visit_bytes_expr(self, e: BytesExpr) -> Type: + """Type check a bytes literal (trivial).""" + return self.infer_literal_expr_type(e.value, "builtins.bytes") + + def visit_float_expr(self, e: FloatExpr) -> Type: + """Type check a float literal (trivial).""" + return self.named_type("builtins.float") + + def visit_complex_expr(self, e: ComplexExpr) -> Type: + """Type check a complex literal.""" + return self.named_type("builtins.complex") + + def visit_ellipsis(self, e: EllipsisExpr) -> Type: + """Type check '...'.""" + return self.named_type("builtins.ellipsis") + + def visit_op_expr(self, e: OpExpr) -> Type: + """Type check a binary operator expression.""" + if e.analyzed: + # It's actually a type expression X | Y. + return self.accept(e.analyzed) + if e.op == "and" or e.op == "or": + return self.check_boolean_op(e) + if e.op == "*" and isinstance(e.left, ListExpr): + # Expressions of form [...] * e get special type inference. + return self.check_list_multiply(e) + if e.op == "%": + if isinstance(e.left, BytesExpr): + return self.strfrm_checker.check_str_interpolation(e.left, e.right) + if isinstance(e.left, StrExpr): + return self.strfrm_checker.check_str_interpolation(e.left, e.right) + left_type = self.accept(e.left) + + proper_left_type = get_proper_type(left_type) + if isinstance(proper_left_type, TupleType) and e.op == "+": + left_add_method = proper_left_type.partial_fallback.type.get("__add__") + if left_add_method and left_add_method.fullname == "builtins.tuple.__add__": + proper_right_type = get_proper_type(self.accept(e.right)) + if isinstance(proper_right_type, TupleType): + right_radd_method = proper_right_type.partial_fallback.type.get("__radd__") + if right_radd_method is None: + # One cannot have two variadic items in the same tuple. + if ( + find_unpack_in_list(proper_left_type.items) is None + or find_unpack_in_list(proper_right_type.items) is None + ): + return self.concat_tuples(proper_left_type, proper_right_type) + elif ( + PRECISE_TUPLE_TYPES in self.chk.options.enable_incomplete_feature + and isinstance(proper_right_type, Instance) + and self.chk.type_is_iterable(proper_right_type) + ): + # Handle tuple[X, Y] + tuple[Z, ...] = tuple[X, Y, *tuple[Z, ...]]. + right_radd_method = proper_right_type.type.get("__radd__") + if ( + right_radd_method is None + and proper_left_type.partial_fallback.type.fullname == "builtins.tuple" + and find_unpack_in_list(proper_left_type.items) is None + ): + item_type = self.chk.iterable_item_type(proper_right_type, e) + mapped = self.chk.named_generic_type("builtins.tuple", [item_type]) + return proper_left_type.copy_modified( + items=proper_left_type.items + [UnpackType(mapped)] + ) + + use_reverse: UseReverse = USE_REVERSE_DEFAULT + if e.op == "|": + if is_named_instance(proper_left_type, "builtins.dict"): + # This is a special case for `dict | TypedDict`. + # 1. Find `dict | TypedDict` case + # 2. Switch `dict.__or__` to `TypedDict.__ror__` (the same from both runtime and typing perspective) + proper_right_type = get_proper_type(self.accept(e.right)) + if isinstance(proper_right_type, TypedDictType): + use_reverse = USE_REVERSE_ALWAYS + if isinstance(proper_left_type, TypedDictType): + # This is the reverse case: `TypedDict | dict`, + # simply do not allow the reverse checking: + # do not call `__dict__.__ror__`. + proper_right_type = get_proper_type(self.accept(e.right)) + if is_named_instance(proper_right_type, "builtins.dict"): + use_reverse = USE_REVERSE_NEVER + + if PRECISE_TUPLE_TYPES in self.chk.options.enable_incomplete_feature: + # Handle tuple[X, ...] + tuple[Y, Z] = tuple[*tuple[X, ...], Y, Z]. + if ( + e.op == "+" + and isinstance(proper_left_type, Instance) + and proper_left_type.type.fullname == "builtins.tuple" + ): + proper_right_type = get_proper_type(self.accept(e.right)) + if ( + isinstance(proper_right_type, TupleType) + and proper_right_type.partial_fallback.type.fullname == "builtins.tuple" + and find_unpack_in_list(proper_right_type.items) is None + ): + return proper_right_type.copy_modified( + items=[UnpackType(proper_left_type)] + proper_right_type.items + ) + + if e.op in operators.op_methods: + method = operators.op_methods[e.op] + if use_reverse is UseReverse.DEFAULT or use_reverse is UseReverse.NEVER: + result, method_type = self.check_op( + method, + base_type=left_type, + arg=e.right, + context=e, + allow_reverse=use_reverse is UseReverse.DEFAULT, + ) + elif use_reverse is UseReverse.ALWAYS: + result, method_type = self.check_op( + # The reverse operator here gives better error messages: + operators.reverse_op_methods[method], + base_type=self.accept(e.right), + arg=e.left, + context=e, + allow_reverse=False, + ) + else: + assert_never(use_reverse) + e.method_type = method_type + return result + else: + raise RuntimeError(f"Unknown operator {e.op}") + + def visit_comparison_expr(self, e: ComparisonExpr) -> Type: + """Type check a comparison expression. + + Comparison expressions are type checked consecutive-pair-wise + That is, 'a < b > c == d' is check as 'a < b and b > c and c == d' + """ + result: Type | None = None + sub_result: Type + + # Check each consecutive operand pair and their operator + for left, right, operator in zip(e.operands, e.operands[1:], e.operators): + left_type = self.accept(left) + + if operator == "in" or operator == "not in": + # This case covers both iterables and containers, which have different meanings. + # For a container, the in operator calls the __contains__ method. + # For an iterable, the in operator iterates over the iterable, and compares each item one-by-one. + # We allow `in` for a union of containers and iterables as long as at least one of them matches the + # type of the left operand, as the operation will simply return False if the union's container/iterator + # type doesn't match the left operand. + + # If the right operand has partial type, look it up without triggering + # a "Need type annotation ..." message, as it would be noise. + right_type = self.find_partial_type_ref_fast_path(right) + if right_type is None: + right_type = self.accept(right) # Validate the right operand + + right_type = get_proper_type(right_type) + item_types: Sequence[Type] = [right_type] + if isinstance(right_type, UnionType): + item_types = list(right_type.relevant_items()) + + sub_result = self.bool_type() + + container_types: list[Type] = [] + iterable_types: list[Type] = [] + failed_out = False + encountered_partial_type = False + + for item_type in item_types: + # Keep track of whether we get type check errors (these won't be reported, they + # are just to verify whether something is valid typing wise). + with self.msg.filter_errors(save_filtered_errors=True) as container_errors: + _, method_type = self.check_method_call_by_name( + method="__contains__", + base_type=item_type, + args=[left], + arg_kinds=[ARG_POS], + context=e, + original_type=right_type, + ) + # Container item type for strict type overlap checks. Note: we need to only + # check for nominal type, because a usual "Unsupported operands for in" + # will be reported for types incompatible with __contains__(). + # See testCustomContainsCheckStrictEquality for an example. + cont_type = self.chk.analyze_container_item_type(item_type) + + if isinstance(item_type, PartialType): + # We don't really know if this is an error or not, so just shut up. + encountered_partial_type = True + pass + elif ( + container_errors.has_new_errors() + and + # is_valid_var_arg is True for any Iterable + self.is_valid_var_arg(item_type) + ): + # it's not a container, but it is an iterable + with self.msg.filter_errors(save_filtered_errors=True) as iterable_errors: + _, itertype = self.chk.analyze_iterable_item_type_without_expression( + item_type, e + ) + if iterable_errors.has_new_errors(): + self.msg.add_errors(iterable_errors.filtered_errors()) + failed_out = True + else: + method_type = CallableType( + [left_type], + [nodes.ARG_POS], + [None], + self.bool_type(), + self.named_type("builtins.function"), + ) + e.method_types.append(method_type) + iterable_types.append(itertype) + elif not container_errors.has_new_errors() and cont_type: + container_types.append(cont_type) + e.method_types.append(method_type) + else: + self.msg.add_errors(container_errors.filtered_errors()) + failed_out = True + + if not encountered_partial_type and not failed_out: + iterable_type = UnionType.make_union(iterable_types) + if not is_subtype(left_type, iterable_type): + if not container_types: + self.msg.unsupported_operand_types("in", left_type, right_type, e) + else: + container_type = UnionType.make_union(container_types) + if self.dangerous_comparison( + left_type, + container_type, + original_container=right_type, + prefer_literal=False, + ): + self.msg.dangerous_comparison( + left_type, container_type, "container", e + ) + + elif operator in operators.op_methods: + method = operators.op_methods[operator] + + with ErrorWatcher(self.msg.errors) as w: + sub_result, method_type = self.check_op( + method, left_type, right, e, allow_reverse=True + ) + e.method_types.append(method_type) + + # Only show dangerous overlap if there are no other errors. See + # testCustomEqCheckStrictEquality for an example. + if not w.has_new_errors() and operator in ("==", "!="): + right_type = self.accept(right) + if self.dangerous_comparison(left_type, right_type): + # Show the most specific literal types possible + left_type = try_getting_literal(left_type) + right_type = try_getting_literal(right_type) + self.msg.dangerous_comparison(left_type, right_type, "equality", e) + + elif operator == "is" or operator == "is not": + right_type = self.accept(right) # validate the right operand + sub_result = self.bool_type() + if self.dangerous_comparison(left_type, right_type, identity_check=True): + # Show the most specific literal types possible + left_type = try_getting_literal(left_type) + right_type = try_getting_literal(right_type) + self.msg.dangerous_comparison(left_type, right_type, "identity", e) + e.method_types.append(None) + else: + raise RuntimeError(f"Unknown comparison operator {operator}") + + # Determine type of boolean-and of result and sub_result + if result is None: + result = sub_result + else: + result = join.join_types(result, sub_result) + + assert result is not None + return result + + def find_partial_type_ref_fast_path(self, expr: Expression) -> Type | None: + """If expression has a partial generic type, return it without additional checks. + + In particular, this does not generate an error about a missing annotation. + + Otherwise, return None. + """ + if not isinstance(expr, RefExpr): + return None + if isinstance(expr.node, Var): + result = self.analyze_var_ref(expr.node, expr) + if isinstance(result, PartialType) and result.type is not None: + self.chk.store_type(expr, fixup_partial_type(result)) + return result + return None + + def dangerous_comparison( + self, + left: Type, + right: Type, + *, + original_container: Type | None = None, + seen_types: set[tuple[Type, Type]] | None = None, + prefer_literal: bool = True, + identity_check: bool = False, + ) -> bool: + """Check for dangerous non-overlapping comparisons like 42 == 'no'. + + The original_container is the original container type for 'in' checks + (and None for equality checks). + + Rules: + * X and None are overlapping even in strict-optional mode. This is to allow + 'assert x is not None' for x defined as 'x = None # type: str' in class body + (otherwise mypy itself would have couple dozen errors because of this). + * Optional[X] and Optional[Y] are non-overlapping if X and Y are + non-overlapping, although technically None is overlap, it is most + likely an error. + * Any overlaps with everything, i.e. always safe. + * Special case: b'abc' in b'cde' is safe. + """ + if not self.chk.options.strict_equality: + return False + + if seen_types is None: + seen_types = set() + if (left, right) in seen_types: + return False + seen_types.add((left, right)) + + left, right = get_proper_types((left, right)) + + # We suppress the error for equality and container checks if there is a custom __eq__() + # method on either side. User defined (or even standard library) classes can define this + # to return True for comparisons between non-overlapping types. + if ( + custom_special_method(left, "__eq__") or custom_special_method(right, "__eq__") + ) and not identity_check: + return False + + if prefer_literal: + # Also flag non-overlapping literals in situations like: + # x: Literal['a', 'b'] + # if x == 'c': + # ... + left = try_getting_literal(left) + right = try_getting_literal(right) + + if self.chk.binder.is_unreachable_warning_suppressed(): + # We are inside a function that contains type variables with value restrictions in + # its signature. In this case we just suppress all strict-equality checks to avoid + # false positives for code like: + # + # T = TypeVar('T', str, int) + # def f(x: T) -> T: + # if x == 0: + # ... + # return x + # + # TODO: find a way of disabling the check only for types resulted from the expansion. + return False + if self.chk.options.strict_equality_for_none: + if isinstance(left, NoneType) and isinstance(right, NoneType): + return False + elif isinstance(left, NoneType) or isinstance(right, NoneType): + return False + if isinstance(left, UnionType) and isinstance(right, UnionType): + left = remove_optional(left) + right = remove_optional(right) + left, right = get_proper_types((left, right)) + if ( + original_container + and has_bytes_component(original_container) + and has_bytes_component(left) + ): + # We need to special case bytes and bytearray, because 97 in b'abc', b'a' in b'abc', + # b'a' in bytearray(b'abc') etc. all return True (and we want to show the error only + # if the check can _never_ be True). + return False + if isinstance(left, Instance) and isinstance(right, Instance): + # Special case some builtin implementations of AbstractSet. + left_name = left.type.fullname + right_name = right.type.fullname + if ( + left_name in OVERLAPPING_TYPES_ALLOWLIST + and right_name in OVERLAPPING_TYPES_ALLOWLIST + ): + abstract_set = self.chk.lookup_typeinfo("typing.AbstractSet") + left = map_instance_to_supertype(left, abstract_set) + right = map_instance_to_supertype(right, abstract_set) + return self.dangerous_comparison( + left.args[0], right.args[0], seen_types=seen_types + ) + elif left.type.has_base("typing.Mapping") and right.type.has_base("typing.Mapping"): + # Similar to above: Mapping ignores the classes, it just compares items. + abstract_map = self.chk.lookup_typeinfo("typing.Mapping") + left = map_instance_to_supertype(left, abstract_map) + right = map_instance_to_supertype(right, abstract_map) + return self.dangerous_comparison( + left.args[0], right.args[0], seen_types=seen_types + ) or self.dangerous_comparison(left.args[1], right.args[1], seen_types=seen_types) + elif left_name in ("builtins.list", "builtins.tuple") and right_name == left_name: + return self.dangerous_comparison( + left.args[0], right.args[0], seen_types=seen_types + ) + elif left_name in OVERLAPPING_BYTES_ALLOWLIST and right_name in ( + OVERLAPPING_BYTES_ALLOWLIST + ): + return False + if isinstance(left, LiteralType) and isinstance(right, LiteralType): + if isinstance(left.value, bool) and isinstance(right.value, bool): + # Comparing different booleans is not dangerous. + return False + if isinstance(left, LiteralType) and isinstance(right, Instance): + # bytes/bytearray comparisons are supported + if left.fallback.type.fullname == "builtins.bytes" and right.type.has_base( + "builtins.bytearray" + ): + return False + if isinstance(right, LiteralType) and isinstance(left, Instance): + # bytes/bytearray comparisons are supported + if right.fallback.type.fullname == "builtins.bytes" and left.type.has_base( + "builtins.bytearray" + ): + return False + return not is_overlapping_types(left, right, ignore_promotions=False) + + def check_method_call_by_name( + self, + method: str, + base_type: Type, + args: list[Expression], + arg_kinds: list[ArgKind], + context: Context, + original_type: Type | None = None, + self_type: Type | None = None, + ) -> tuple[Type, Type]: + """Type check a call to a named method on an object. + + Return tuple (result type, inferred method type). The 'original_type' + is used for error messages. The self_type is to bind self in methods + (see analyze_member_access for more details). + """ + original_type = original_type or base_type + self_type = self_type or base_type + # Unions are special-cased to allow plugins to act on each element of the union. + base_type = get_proper_type(base_type) + if isinstance(base_type, UnionType): + return self.check_union_method_call_by_name( + method, base_type, args, arg_kinds, context, original_type + ) + + method_type = analyze_member_access( + method, + base_type, + context, + is_lvalue=False, + is_super=False, + is_operator=True, + original_type=original_type, + self_type=self_type, + chk=self.chk, + in_literal_context=self.is_literal_context(), + ) + return self.check_method_call(method, base_type, method_type, args, arg_kinds, context) + + def check_union_method_call_by_name( + self, + method: str, + base_type: UnionType, + args: list[Expression], + arg_kinds: list[ArgKind], + context: Context, + original_type: Type | None = None, + ) -> tuple[Type, Type]: + """Type check a call to a named method on an object with union type. + + This essentially checks the call using check_method_call_by_name() for each + union item and unions the result. We do this to allow plugins to act on + individual union items. + """ + res: list[Type] = [] + meth_res: list[Type] = [] + for typ in base_type.relevant_items(): + # Format error messages consistently with + # mypy.checkmember.analyze_union_member_access(). + with self.msg.disable_type_names(): + item, meth_item = self.check_method_call_by_name( + method, typ, args, arg_kinds, context, original_type + ) + res.append(item) + meth_res.append(meth_item) + return make_simplified_union(res), make_simplified_union(meth_res) + + def check_method_call( + self, + method_name: str, + base_type: Type, + method_type: Type, + args: list[Expression], + arg_kinds: list[ArgKind], + context: Context, + ) -> tuple[Type, Type]: + """Type check a call to a method with the given name and type on an object. + + Return tuple (result type, inferred method type). + """ + callable_name = self.method_fullname(base_type, method_name) + object_type = base_type if callable_name is not None else None + + # Try to refine the method signature using plugin hooks before checking the call. + method_type = self.transform_callee_type( + callable_name, method_type, args, arg_kinds, context, object_type=object_type + ) + + return self.check_call( + method_type, + args, + arg_kinds, + context, + callable_name=callable_name, + object_type=base_type, + ) + + def check_op_reversible( + self, + op_name: str, + left_type: Type, + left_expr: Expression, + right_type: Type, + right_expr: Expression, + context: Context, + ) -> tuple[Type, Type]: + def lookup_operator(op_name: str, base_type: Type) -> Type | None: + """Looks up the given operator and returns the corresponding type, + if it exists.""" + + # This check is an important performance optimization. + if not has_operator(base_type, op_name, self.named_type): + return None + + with self.msg.filter_errors() as w: + member = analyze_member_access( + name=op_name, + typ=base_type, + is_lvalue=False, + is_super=False, + is_operator=True, + original_type=base_type, + context=context, + chk=self.chk, + in_literal_context=self.is_literal_context(), + ) + return None if w.has_new_errors() else member + + def lookup_definer(typ: Instance, attr_name: str) -> str | None: + """Returns the name of the class that contains the actual definition of attr_name. + + So if class A defines foo and class B subclasses A, running + 'get_class_defined_in(B, "foo")` would return the full name of A. + + However, if B were to override and redefine foo, that method call would + return the full name of B instead. + + If the attr name is not present in the given class or its MRO, returns None. + """ + for cls in typ.type.mro: + if cls.names.get(attr_name): + return cls.fullname + return None + + left_type = get_proper_type(left_type) + right_type = get_proper_type(right_type) + + # If either the LHS or the RHS are Any, we can't really concluding anything + # about the operation since the Any type may or may not define an + # __op__ or __rop__ method. So, we punt and return Any instead. + + if isinstance(left_type, AnyType): + any_type = AnyType(TypeOfAny.from_another_any, source_any=left_type) + return any_type, any_type + if isinstance(right_type, AnyType): + any_type = AnyType(TypeOfAny.from_another_any, source_any=right_type) + return any_type, any_type + + # STEP 1: + # We start by getting the __op__ and __rop__ methods, if they exist. + + rev_op_name = operators.reverse_op_methods[op_name] + + left_op = lookup_operator(op_name, left_type) + right_op = lookup_operator(rev_op_name, right_type) + + # STEP 2a: + # We figure out in which order Python will call the operator methods. As it + # turns out, it's not as simple as just trying to call __op__ first and + # __rop__ second. + # + # We store the determined order inside the 'variants_raw' variable, + # which records tuples containing the method, base type, and the argument. + + if op_name in operators.op_methods_that_shortcut and is_same_type(left_type, right_type): + # When we do "A() + A()", for example, Python will only call the __add__ method, + # never the __radd__ method. + # + # This is the case even if the __add__ method is completely missing and the __radd__ + # method is defined. + + variants_raw = [(op_name, left_op, left_type, right_expr)] + elif ( + is_subtype(right_type, left_type) + and isinstance(left_type, Instance) + and isinstance(right_type, Instance) + and not ( + left_type.type.alt_promote is not None + and left_type.type.alt_promote.type is right_type.type + ) + and lookup_definer(left_type, op_name) != lookup_definer(right_type, rev_op_name) + ): + # When we do "A() + B()" where B is a subclass of A, we'll actually try calling + # B's __radd__ method first, but ONLY if B explicitly defines or overrides the + # __radd__ method. + # + # This mechanism lets subclasses "refine" the expected outcome of the operation, even + # if they're located on the RHS. + # + # As a special case, the alt_promote check makes sure that we don't use the + # __radd__ method of int if the LHS is a native int type. + + variants_raw = [ + (rev_op_name, right_op, right_type, left_expr), + (op_name, left_op, left_type, right_expr), + ] + else: + # In all other cases, we do the usual thing and call __add__ first and + # __radd__ second when doing "A() + B()". + + variants_raw = [ + (op_name, left_op, left_type, right_expr), + (rev_op_name, right_op, right_type, left_expr), + ] + + # STEP 3: + # We now filter out all non-existent operators. The 'variants' list contains + # all operator methods that are actually present, in the order that Python + # attempts to invoke them. + + variants = [(na, op, obj, arg) for (na, op, obj, arg) in variants_raw if op is not None] + + # STEP 4: + # We now try invoking each one. If an operation succeeds, end early and return + # the corresponding result. Otherwise, return the result and errors associated + # with the first entry. + + errors = [] + results = [] + for name, method, obj, arg in variants: + with self.msg.filter_errors(save_filtered_errors=True) as local_errors: + result = self.check_method_call(name, obj, method, [arg], [ARG_POS], context) + if local_errors.has_new_errors(): + errors.append(local_errors.filtered_errors()) + results.append(result) + else: + return result + + # We finish invoking above operators and no early return happens. Therefore, + # we check if either the LHS or the RHS is Instance and fallbacks to Any, + # if so, we also return Any + if (isinstance(left_type, Instance) and left_type.type.fallback_to_any) or ( + isinstance(right_type, Instance) and right_type.type.fallback_to_any + ): + any_type = AnyType(TypeOfAny.special_form) + return any_type, any_type + + # STEP 4b: + # Sometimes, the variants list is empty. In that case, we fall-back to attempting to + # call the __op__ method (even though it's missing). + + if not variants: + with self.msg.filter_errors(save_filtered_errors=True) as local_errors: + result = self.check_method_call_by_name( + op_name, left_type, [right_expr], [ARG_POS], context + ) + + if local_errors.has_new_errors(): + errors.append(local_errors.filtered_errors()) + results.append(result) + else: + # Although we should not need this case anymore, we keep it just in case, as + # otherwise we will get a crash if we introduce inconsistency in checkmember.py + return result + + self.msg.add_errors(errors[0]) + if len(results) == 1: + return results[0] + else: + error_any = AnyType(TypeOfAny.from_error) + result = error_any, error_any + return result + + def check_op( + self, + method: str, + base_type: Type, + arg: Expression, + context: Context, + allow_reverse: bool = False, + ) -> tuple[Type, Type]: + """Type check a binary operation which maps to a method call. + + Return tuple (result type, inferred operator method type). + """ + + if allow_reverse: + left_variants = [base_type] + base_type = get_proper_type(base_type) + if isinstance(base_type, UnionType): + left_variants = list(flatten_nested_unions(base_type.relevant_items())) + right_type = self.accept(arg) + + # Step 1: We first try leaving the right arguments alone and destructure + # just the left ones. (Mypy can sometimes perform some more precise inference + # if we leave the right operands a union -- see testOperatorWithEmptyListAndSum.) + all_results = [] + all_inferred = [] + + with self.msg.filter_errors() as local_errors: + for left_possible_type in left_variants: + result, inferred = self.check_op_reversible( + op_name=method, + left_type=left_possible_type, + left_expr=TempNode(left_possible_type, context=context), + right_type=right_type, + right_expr=arg, + context=context, + ) + all_results.append(result) + all_inferred.append(inferred) + + if not local_errors.has_new_errors(): + results_final = make_simplified_union(all_results) + inferred_final = make_simplified_union(all_inferred) + return results_final, inferred_final + + # Step 2: If that fails, we try again but also destructure the right argument. + # This is also necessary to make certain edge cases work -- see + # testOperatorDoubleUnionInterwovenUnionAdd, for example. + + # Note: We want to pass in the original 'arg' for 'left_expr' and 'right_expr' + # whenever possible so that plugins and similar things can introspect on the original + # node if possible. + # + # We don't do the same for the base expression because it could lead to weird + # type inference errors -- e.g. see 'testOperatorDoubleUnionSum'. + # TODO: Can we use `type_overrides_set()` here? + right_variants = [(right_type, arg)] + right_type = get_proper_type(right_type) + if isinstance(right_type, UnionType): + right_variants = [ + (item, TempNode(item, context=context)) + for item in flatten_nested_unions(right_type.relevant_items()) + ] + + all_results = [] + all_inferred = [] + + with self.msg.filter_errors(save_filtered_errors=True) as local_errors: + for left_possible_type in left_variants: + for right_possible_type, right_expr in right_variants: + result, inferred = self.check_op_reversible( + op_name=method, + left_type=left_possible_type, + left_expr=TempNode(left_possible_type, context=context), + right_type=right_possible_type, + right_expr=right_expr, + context=context, + ) + all_results.append(result) + all_inferred.append(inferred) + + if local_errors.has_new_errors(): + self.msg.add_errors(local_errors.filtered_errors()) + # Point any notes to the same location as an existing message. + err = local_errors.filtered_errors()[-1] + recent_context = TempNode(NoneType()) + recent_context.line = err.line + recent_context.column = err.column + if len(left_variants) >= 2 and len(right_variants) >= 2: + self.msg.warn_both_operands_are_from_unions(recent_context) + elif len(left_variants) >= 2: + self.msg.warn_operand_was_from_union("Left", base_type, context=recent_context) + elif len(right_variants) >= 2: + self.msg.warn_operand_was_from_union( + "Right", right_type, context=recent_context + ) + + # See the comment in 'check_overload_call' for more details on why + # we call 'combine_function_signature' instead of just unioning the inferred + # callable types. + results_final = make_simplified_union(all_results) + inferred_final = self.combine_function_signatures(get_proper_types(all_inferred)) + return results_final, inferred_final + else: + return self.check_method_call_by_name( + method=method, + base_type=base_type, + args=[arg], + arg_kinds=[ARG_POS], + context=context, + ) + + def check_boolean_op(self, e: OpExpr) -> Type: + """Type check a boolean operation ('and' or 'or').""" + + # A boolean operation can evaluate to either of the operands. + + # We use the current type context to guide the type inference of + # the left operand. We also use the left operand type to guide the type + # inference of the right operand so that expressions such as + # '[1] or []' are inferred correctly. + ctx = self.type_context[-1] + left_type = self.accept(e.left, ctx) + expanded_left_type = try_expanding_sum_type_to_union(left_type, "builtins.bool") + + assert e.op in ("and", "or") # Checked by visit_op_expr + + if e.right_always: + left_map: mypy.checker.TypeMap = None + right_map: mypy.checker.TypeMap = {} + elif e.right_unreachable: + left_map, right_map = {}, None + elif e.op == "and": + right_map, left_map = self.chk.find_isinstance_check(e.left) + elif e.op == "or": + left_map, right_map = self.chk.find_isinstance_check(e.left) + + # If left_map is None then we know mypy considers the left expression + # to be redundant. + if ( + codes.REDUNDANT_EXPR in self.chk.options.enabled_error_codes + and left_map is None + # don't report an error if it's intentional + and not e.right_always + ): + self.msg.redundant_left_operand(e.op, e.left) + + if ( + self.chk.should_report_unreachable_issues() + and right_map is None + # don't report an error if it's intentional + and not e.right_unreachable + ): + self.msg.unreachable_right_operand(e.op, e.right) + + right_type = self.analyze_cond_branch( + right_map, e.right, self._combined_context(expanded_left_type) + ) + + if left_map is None and right_map is None: + return UninhabitedType() + + if right_map is None: + # The boolean expression is statically known to be the left value + assert left_map is not None + return left_type + if left_map is None: + # The boolean expression is statically known to be the right value + assert right_map is not None + return right_type + + if e.op == "and": + restricted_left_type = false_only(expanded_left_type) + result_is_left = not expanded_left_type.can_be_true + elif e.op == "or": + restricted_left_type = true_only(expanded_left_type) + result_is_left = not expanded_left_type.can_be_false + + if isinstance(restricted_left_type, UninhabitedType): + # The left operand can never be the result + return right_type + elif result_is_left: + # The left operand is always the result + return left_type + else: + return make_simplified_union([restricted_left_type, right_type]) + + def check_list_multiply(self, e: OpExpr) -> Type: + """Type check an expression of form '[...] * e'. + + Type inference is special-cased for this common construct. + """ + right_type = self.accept(e.right) + if is_subtype(right_type, self.named_type("builtins.int")): + # Special case: [...] * . Use the type context of the + # OpExpr, since the multiplication does not affect the type. + left_type = self.accept(e.left, type_context=self.type_context[-1]) + else: + left_type = self.accept(e.left) + result, method_type = self.check_op("__mul__", left_type, e.right, e) + e.method_type = method_type + return result + + def visit_assignment_expr(self, e: AssignmentExpr) -> Type: + value = self.accept(e.value) + self.chk.check_assignment(e.target, e.value) + self.chk.check_final(e) + if not has_uninhabited_component(value): + # TODO: can we get rid of this extra store_type()? + # Usually, check_assignment() already stores the lvalue type correctly. + self.chk.store_type(e.target, value) + self.find_partial_type_ref_fast_path(e.target) + return value + + def visit_unary_expr(self, e: UnaryExpr) -> Type: + """Type check an unary operation ('not', '-', '+' or '~').""" + operand_type = self.accept(e.expr) + op = e.op + if op == "not": + result: Type = self.bool_type() + self.chk.check_for_truthy_type(operand_type, e.expr) + else: + method = operators.unary_op_methods[op] + result, method_type = self.check_method_call_by_name(method, operand_type, [], [], e) + e.method_type = method_type + return result + + def visit_index_expr(self, e: IndexExpr) -> Type: + """Type check an index expression (base[index]). + + It may also represent type application. + """ + result = self.visit_index_expr_helper(e) + result = self.narrow_type_from_binder(e, result) + p_result = get_proper_type(result) + if ( + self.is_literal_context() + and isinstance(p_result, Instance) + and p_result.last_known_value is not None + ): + result = p_result.last_known_value + return result + + def visit_index_expr_helper(self, e: IndexExpr) -> Type: + if e.analyzed: + # It's actually a type application. + return self.accept(e.analyzed) + left_type = self.accept(e.base) + return self.visit_index_with_type(left_type, e) + + def visit_index_with_type( + self, + left_type: Type, + e: IndexExpr, + original_type: ProperType | None = None, + self_type: Type | None = None, + ) -> Type: + """Analyze type of an index expression for a given type of base expression. + + The 'original_type' is used for error messages (currently used for union types). The + 'self_type' is to bind self in methods (see analyze_member_access for more details). + """ + index = e.index + self_type = self_type or left_type + left_type = get_proper_type(left_type) + + # Visit the index, just to make sure we have a type for it available + self.accept(index) + + if isinstance(left_type, TupleType) and any( + isinstance(it, UnpackType) for it in left_type.items + ): + # Normalize variadic tuples for consistency. + left_type = expand_type(left_type, {}) + + if isinstance(left_type, UnionType): + original_type = original_type or left_type + # Don't combine literal types, since we may need them for type narrowing. + return make_simplified_union( + [ + self.visit_index_with_type(typ, e, original_type) + for typ in left_type.relevant_items() + ], + contract_literals=False, + ) + elif isinstance(left_type, TupleType) and self.chk.in_checked_function(): + # Special case for tuples. They return a more specific type when + # indexed by an integer literal. + if isinstance(index, SliceExpr): + return self.visit_tuple_slice_helper(left_type, index) + + ns = self.try_getting_int_literals(index) + if ns is not None: + out = [] + for n in ns: + item = self.visit_tuple_index_helper(left_type, n) + if item is not None: + out.append(item) + else: + self.chk.fail(message_registry.TUPLE_INDEX_OUT_OF_RANGE, e) + if any(isinstance(t, UnpackType) for t in left_type.items): + min_len = self.min_tuple_length(left_type) + self.chk.note(f"Variadic tuple can have length {min_len}", e) + return AnyType(TypeOfAny.from_error) + return make_simplified_union(out) + else: + return self.nonliteral_tuple_index_helper(left_type, index) + elif isinstance(left_type, TypedDictType): + return self.visit_typeddict_index_expr(left_type, e.index)[0] + elif isinstance(left_type, FunctionLike) and left_type.is_type_obj(): + if left_type.type_object().is_enum: + return self.visit_enum_index_expr(left_type.type_object(), e.index, e) + elif ( + left_type.type_object().type_vars + or left_type.type_object().fullname == "builtins.type" + ): + return self.named_type("types.GenericAlias") + + if isinstance(left_type, TypeVarType): + return self.visit_index_with_type( + left_type.values_or_bound(), e, original_type, left_type + ) + elif isinstance(left_type, Instance) and left_type.type.fullname == "typing._SpecialForm": + # Allow special forms to be indexed and used to create union types + return self.named_type("typing._SpecialForm") + else: + result, method_type = self.check_method_call_by_name( + "__getitem__", + left_type, + [e.index], + [ARG_POS], + e, + original_type=original_type, + self_type=self_type, + ) + e.method_type = method_type + return result + + def min_tuple_length(self, left: TupleType) -> int: + unpack_index = find_unpack_in_list(left.items) + if unpack_index is None: + return left.length() + unpack = left.items[unpack_index] + assert isinstance(unpack, UnpackType) + if isinstance(unpack.type, TypeVarTupleType): + return left.length() - 1 + unpack.type.min_len + return left.length() - 1 + + def visit_tuple_index_helper(self, left: TupleType, n: int) -> Type | None: + unpack_index = find_unpack_in_list(left.items) + if unpack_index is None: + if n < 0: + n += len(left.items) + if 0 <= n < len(left.items): + return left.items[n] + return None + unpack = left.items[unpack_index] + assert isinstance(unpack, UnpackType) + unpacked = get_proper_type(unpack.type) + if isinstance(unpacked, TypeVarTupleType): + # Usually we say that TypeVarTuple can't be split, be in case of + # indexing it seems benign to just return the upper bound item, similar + # to what we do when indexing a regular TypeVar. + bound = get_proper_type(unpacked.upper_bound) + assert isinstance(bound, Instance) + assert bound.type.fullname == "builtins.tuple" + middle = bound.args[0] + else: + assert isinstance(unpacked, Instance) + assert unpacked.type.fullname == "builtins.tuple" + middle = unpacked.args[0] + + extra_items = self.min_tuple_length(left) - left.length() + 1 + if n >= 0: + if n >= self.min_tuple_length(left): + # For tuple[int, *tuple[str, ...], int] we allow either index 0 or 1, + # since variadic item may have zero items. + return None + if n < unpack_index: + return left.items[n] + return UnionType.make_union( + [middle] + + left.items[unpack_index + 1 : max(n - extra_items + 2, unpack_index + 1)], + left.line, + left.column, + ) + n += self.min_tuple_length(left) + if n < 0: + # Similar to above, we only allow -1, and -2 for tuple[int, *tuple[str, ...], int] + return None + if n >= unpack_index + extra_items: + return left.items[n - extra_items + 1] + return UnionType.make_union( + left.items[min(n, unpack_index) : unpack_index] + [middle], left.line, left.column + ) + + def visit_tuple_slice_helper(self, left_type: TupleType, slic: SliceExpr) -> Type: + begin: Sequence[int | None] = [None] + end: Sequence[int | None] = [None] + stride: Sequence[int | None] = [None] + + if slic.begin_index: + begin_raw = self.try_getting_int_literals(slic.begin_index) + if begin_raw is None: + return self.nonliteral_tuple_index_helper(left_type, slic) + begin = begin_raw + + if slic.end_index: + end_raw = self.try_getting_int_literals(slic.end_index) + if end_raw is None: + return self.nonliteral_tuple_index_helper(left_type, slic) + end = end_raw + + if slic.stride: + stride_raw = self.try_getting_int_literals(slic.stride) + if stride_raw is None: + return self.nonliteral_tuple_index_helper(left_type, slic) + stride = stride_raw + + items: list[Type] = [] + for b, e, s in itertools.product(begin, end, stride): + item = left_type.slice(b, e, s, fallback=self.named_type("builtins.tuple")) + if item is None: + self.chk.fail(message_registry.AMBIGUOUS_SLICE_OF_VARIADIC_TUPLE, slic) + return AnyType(TypeOfAny.from_error) + items.append(item) + return make_simplified_union(items) + + def try_getting_int_literals(self, index: Expression) -> list[int] | None: + """If the given expression or type corresponds to an int literal + or a union of int literals, returns a list of the underlying ints. + Otherwise, returns None. + + Specifically, this function is guaranteed to return a list with + one or more ints if one the following is true: + + 1. 'expr' is a IntExpr or a UnaryExpr backed by an IntExpr + 2. 'typ' is a LiteralType containing an int + 3. 'typ' is a UnionType containing only LiteralType of ints + """ + if isinstance(index, IntExpr): + return [index.value] + elif isinstance(index, UnaryExpr): + if index.op == "-": + operand = index.expr + if isinstance(operand, IntExpr): + return [-1 * operand.value] + if index.op == "+": + operand = index.expr + if isinstance(operand, IntExpr): + return [operand.value] + typ = get_proper_type(self.accept(index)) + if isinstance(typ, Instance) and typ.last_known_value is not None: + typ = typ.last_known_value + if isinstance(typ, LiteralType) and isinstance(typ.value, int): + return [typ.value] + if isinstance(typ, UnionType): + out = [] + for item in get_proper_types(typ.items): + if isinstance(item, LiteralType) and isinstance(item.value, int): + out.append(item.value) + else: + return None + return out + return None + + def nonliteral_tuple_index_helper(self, left_type: TupleType, index: Expression) -> Type: + self.check_method_call_by_name("__getitem__", left_type, [index], [ARG_POS], context=index) + # We could return the return type from above, but unions are often better than the join + union = self.union_tuple_fallback_item(left_type) + if isinstance(index, SliceExpr): + return self.chk.named_generic_type("builtins.tuple", [union]) + return union + + def union_tuple_fallback_item(self, left_type: TupleType) -> Type: + # TODO: this duplicates logic in typeops.tuple_fallback(). + items = [] + for item in left_type.items: + if isinstance(item, UnpackType): + unpacked_type = get_proper_type(item.type) + if isinstance(unpacked_type, TypeVarTupleType): + unpacked_type = get_proper_type(unpacked_type.upper_bound) + if ( + isinstance(unpacked_type, Instance) + and unpacked_type.type.fullname == "builtins.tuple" + ): + items.append(unpacked_type.args[0]) + else: + raise NotImplementedError + else: + items.append(item) + return make_simplified_union(items) + + def visit_typeddict_index_expr( + self, td_type: TypedDictType, index: Expression, setitem: bool = False + ) -> tuple[Type, set[str]]: + if isinstance(index, StrExpr): + key_names = [index.value] + else: + typ = get_proper_type(self.accept(index)) + if isinstance(typ, UnionType): + key_types: list[Type] = list(typ.items) + else: + key_types = [typ] + + key_names = [] + for key_type in get_proper_types(key_types): + if isinstance(key_type, Instance) and key_type.last_known_value is not None: + key_type = key_type.last_known_value + + if ( + isinstance(key_type, LiteralType) + and isinstance(key_type.value, str) + and key_type.fallback.type.fullname != "builtins.bytes" + ): + key_names.append(key_type.value) + else: + self.msg.typeddict_key_must_be_string_literal(td_type, index) + return AnyType(TypeOfAny.from_error), set() + + value_types = [] + for key_name in key_names: + value_type = td_type.items.get(key_name) + if value_type is None: + self.msg.typeddict_key_not_found(td_type, key_name, index, setitem) + return AnyType(TypeOfAny.from_error), set() + else: + value_types.append(value_type) + return make_simplified_union(value_types), set(key_names) + + def visit_enum_index_expr( + self, enum_type: TypeInfo, index: Expression, context: Context + ) -> Type: + string_type: Type = self.named_type("builtins.str") + self.chk.check_subtype( + self.accept(index), + string_type, + context, + "Enum index should be a string", + "actual index type", + ) + return Instance(enum_type, []) + + def visit_cast_expr(self, expr: CastExpr) -> Type: + """Type check a cast expression.""" + source_type = self.accept( + expr.expr, + type_context=AnyType(TypeOfAny.special_form), + allow_none_return=True, + always_allow_any=True, + ) + target_type = expr.type + options = self.chk.options + if ( + options.warn_redundant_casts + and not is_same_type(target_type, AnyType(TypeOfAny.special_form)) + and is_same_type(source_type, target_type) + ): + self.msg.redundant_cast(target_type, expr) + if options.disallow_any_unimported and has_any_from_unimported_type(target_type): + self.msg.unimported_type_becomes_any("Target type of cast", target_type, expr) + check_for_explicit_any( + target_type, self.chk.options, self.chk.is_typeshed_stub, self.msg, context=expr + ) + return target_type + + def visit_type_form_expr(self, expr: TypeFormExpr) -> Type: + typ = expr.type + return TypeType.make_normalized(typ, line=typ.line, column=typ.column, is_type_form=True) + + def visit_assert_type_expr(self, expr: AssertTypeExpr) -> Type: + source_type = self.accept( + expr.expr, + type_context=self.type_context[-1], + allow_none_return=True, + always_allow_any=True, + ) + if self.chk.current_node_deferred: + return source_type + + target_type = expr.type + proper_source_type = get_proper_type(source_type) + if ( + isinstance(proper_source_type, mypy.types.Instance) + and proper_source_type.last_known_value is not None + ): + source_type = proper_source_type.last_known_value + if not is_same_type(source_type, target_type): + if not self.chk.in_checked_function(): + self.msg.note( + '"assert_type" expects everything to be "Any" in unchecked functions', + expr.expr, + ) + self.msg.assert_type_fail(source_type, target_type, expr) + return source_type + + def visit_reveal_expr(self, expr: RevealExpr) -> Type: + """Type check a reveal_type expression.""" + if expr.kind == REVEAL_TYPE: + assert expr.expr is not None + revealed_type = self.accept( + expr.expr, type_context=self.type_context[-1], allow_none_return=True + ) + if not self.chk.current_node_deferred: + self.msg.reveal_type(revealed_type, expr.expr) + if not self.chk.in_checked_function(): + self.msg.note( + "'reveal_type' always outputs 'Any' in unchecked functions", expr.expr + ) + self.check_reveal_imported(expr) + return revealed_type + else: + # REVEAL_LOCALS + if not self.chk.current_node_deferred: + # the RevealExpr contains a local_nodes attribute, + # calculated at semantic analysis time. Use it to pull out the + # corresponding subset of variables in self.chk.type_map + names_to_types = ( + {var_node.name: var_node.type for var_node in expr.local_nodes} + if expr.local_nodes is not None + else {} + ) + + self.msg.reveal_locals(names_to_types, expr) + self.check_reveal_imported(expr) + return NoneType() + + def check_reveal_imported(self, expr: RevealExpr) -> None: + if codes.UNIMPORTED_REVEAL not in self.chk.options.enabled_error_codes: + return + + name = "" + if expr.kind == REVEAL_LOCALS: + name = "reveal_locals" + elif expr.kind == REVEAL_TYPE and not expr.is_imported: + name = "reveal_type" + else: + return + + self.chk.fail(f'Name "{name}" is not defined', expr, code=codes.UNIMPORTED_REVEAL) + if name == "reveal_type": + module = ( + "typing" if self.chk.options.python_version >= (3, 11) else "typing_extensions" + ) + hint = ( + 'Did you forget to import it from "{module}"?' + ' (Suggestion: "from {module} import {name}")' + ).format(module=module, name=name) + self.chk.note(hint, expr, code=codes.UNIMPORTED_REVEAL) + + def visit_type_application(self, tapp: TypeApplication) -> Type: + """Type check a type application (expr[type, ...]). + + There are two different options here, depending on whether expr refers + to a type alias or directly to a generic class. In the first case we need + to use a dedicated function typeanal.instantiate_type_alias(). This + is due to slight differences in how type arguments are applied and checked. + """ + if isinstance(tapp.expr, RefExpr) and isinstance(tapp.expr.node, TypeAlias): + if tapp.expr.node.python_3_12_type_alias: + return self.type_alias_type_type() + # Subscription of a (generic) alias in runtime context, expand the alias. + item = instantiate_type_alias( + tapp.expr.node, + tapp.types, + self.chk.fail, + tapp.expr.node.no_args, + tapp, + self.chk.options, + ) + item = get_proper_type(item) + if isinstance(item, Instance): + tp = type_object_type(item.type, self.named_type) + return self.apply_type_arguments_to_callable(tp, item.args, tapp) + elif isinstance(item, TupleType) and item.partial_fallback.type.is_named_tuple: + tp = type_object_type(item.partial_fallback.type, self.named_type) + return self.apply_type_arguments_to_callable(tp, item.partial_fallback.args, tapp) + elif isinstance(item, TypedDictType): + return self.typeddict_callable_from_context(item) + else: + self.chk.fail(message_registry.ONLY_CLASS_APPLICATION, tapp) + return AnyType(TypeOfAny.from_error) + # Type application of a normal generic class in runtime context. + # This is typically used as `x = G[int]()`. + tp = get_proper_type(self.accept(tapp.expr)) + if isinstance(tp, (CallableType, Overloaded)): + if not tp.is_type_obj(): + self.chk.fail(message_registry.ONLY_CLASS_APPLICATION, tapp) + return self.apply_type_arguments_to_callable(tp, tapp.types, tapp) + if isinstance(tp, AnyType): + return AnyType(TypeOfAny.from_another_any, source_any=tp) + return AnyType(TypeOfAny.special_form) + + def visit_type_alias_expr(self, alias: TypeAliasExpr) -> Type: + """Right hand side of a type alias definition. + + It has the same type as if the alias itself was used in a runtime context. + For example, here: + + A = reveal_type(List[T]) + reveal_type(A) + + both `reveal_type` instances will reveal the same type `def (...) -> builtins.list[Any]`. + Note that type variables are implicitly substituted with `Any`. + """ + return self.alias_type_in_runtime_context(alias.node, ctx=alias, alias_definition=True) + + def alias_type_in_runtime_context( + self, alias: TypeAlias, *, ctx: Context, alias_definition: bool = False + ) -> Type: + """Get type of a type alias (could be generic) in a runtime expression. + + Note that this function can be called only if the alias appears _not_ + as a target of type application, which is treated separately in the + visit_type_application method. Some examples where this method is called are + casts and instantiation: + + class LongName(Generic[T]): ... + A = LongName[int] + + x = A() + y = cast(A, ...) + """ + if alias.python_3_12_type_alias: + return self.type_alias_type_type() + if isinstance(alias.target, Instance) and alias.target.invalid: # type: ignore[misc] + # An invalid alias, error already has been reported + return AnyType(TypeOfAny.from_error) + # If this is a generic alias, we set all variables to `Any`. + # For example: + # A = List[Tuple[T, T]] + # x = A() <- same as List[Tuple[Any, Any]], see PEP 484. + disallow_any = self.chk.options.disallow_any_generics and self.is_callee + item = get_proper_type( + set_any_tvars( + alias, + [], + ctx.line, + ctx.column, + self.chk.options, + disallow_any=disallow_any, + fail=self.msg.fail, + ) + ) + if isinstance(item, Instance): + # Normally we get a callable type (or overloaded) with .is_type_obj() true + # representing the class's constructor + tp = type_object_type(item.type, self.named_type) + if alias.no_args: + return tp + return self.apply_type_arguments_to_callable(tp, item.args, ctx) + elif ( + isinstance(item, TupleType) + and + # Tuple[str, int]() fails at runtime, only named tuples and subclasses work. + tuple_fallback(item).type.fullname != "builtins.tuple" + ): + return type_object_type(tuple_fallback(item).type, self.named_type) + elif isinstance(item, TypedDictType): + return self.typeddict_callable_from_context(item) + elif isinstance(item, NoneType): + return TypeType(item, line=item.line, column=item.column) + elif isinstance(item, AnyType): + return AnyType(TypeOfAny.from_another_any, source_any=item) + elif ( + isinstance(item, UnionType) + and item.uses_pep604_syntax + and self.chk.options.python_version >= (3, 10) + ): + return self.chk.named_generic_type("types.UnionType", item.items) + else: + if alias_definition: + return AnyType(TypeOfAny.special_form) + # The _SpecialForm type can be used in some runtime contexts (e.g. it may have __or__). + return self.named_type("typing._SpecialForm") + + def split_for_callable( + self, t: CallableType, args: Sequence[Type], ctx: Context + ) -> list[Type]: + """Handle directly applying type arguments to a variadic Callable. + + This is needed in situations where e.g. variadic class object appears in + runtime context. For example: + class C(Generic[T, Unpack[Ts]]): ... + x = C[int, str]() + + We simply group the arguments that need to go into Ts variable into a TupleType, + similar to how it is done in other places using split_with_prefix_and_suffix(). + """ + if t.is_type_obj(): + # Type arguments must map to class type variables, ignoring constructor vars. + vars = t.type_object().defn.type_vars + else: + vars = list(t.variables) + args = flatten_nested_tuples(args) + + # TODO: this logic is duplicated with semanal_typeargs. + for tv, arg in zip(t.variables, args): + if isinstance(tv, ParamSpecType): + if not isinstance( + get_proper_type(arg), (Parameters, ParamSpecType, AnyType, UnboundType) + ): + self.chk.fail( + "Can only replace ParamSpec with a parameter types list or" + f" another ParamSpec, got {format_type(arg, self.chk.options)}", + ctx, + ) + return [AnyType(TypeOfAny.from_error)] * len(vars) + + if not vars or not any(isinstance(v, TypeVarTupleType) for v in vars): + return list(args) + # TODO: in future we may want to support type application to variadic functions. + assert t.is_type_obj() + info = t.type_object() + # We reuse the logic from semanal phase to reduce code duplication. + fake = Instance(info, args, line=ctx.line, column=ctx.column) + # This code can be only called either from checking a type application, or from + # checking a type alias (after the caller handles no_args aliases), so we know it + # was initially an IndexExpr, and we allow empty tuple type arguments. + if not validate_instance(fake, self.chk.fail, empty_tuple_index=True): + fix_instance( + fake, self.chk.fail, self.chk.note, disallow_any=False, options=self.chk.options + ) + args = list(fake.args) + + prefix = next(i for (i, v) in enumerate(vars) if isinstance(v, TypeVarTupleType)) + suffix = len(vars) - prefix - 1 + tvt = vars[prefix] + assert isinstance(tvt, TypeVarTupleType) + start, middle, end = split_with_prefix_and_suffix(tuple(args), prefix, suffix) + return list(start) + [TupleType(list(middle), tvt.tuple_fallback)] + list(end) + + def apply_type_arguments_to_callable( + self, tp: Type, args: Sequence[Type], ctx: Context + ) -> Type: + """Apply type arguments to a generic callable type coming from a type object. + + This will first perform type arguments count checks, report the + error as needed, and return the correct kind of Any. As a special + case this returns Any for non-callable types, because if type object type + is not callable, then an error should be already reported. + """ + tp = get_proper_type(tp) + + if isinstance(tp, CallableType): + if tp.is_type_obj(): + # If we have a class object in runtime context, then the available type + # variables are those of the class, we don't include additional variables + # of the constructor. So that with + # class C(Generic[T]): + # def __init__(self, f: Callable[[S], T], x: S) -> None + # C[int] is valid + # C[int, str] is invalid (although C as a callable has 2 type variables) + # Note: various logic below and in applytype.py relies on the fact that + # class type variables appear *before* constructor variables. + type_vars = tp.type_object().defn.type_vars + else: + type_vars = list(tp.variables) + min_arg_count = sum(not v.has_default() for v in type_vars) + has_type_var_tuple = any(isinstance(v, TypeVarTupleType) for v in type_vars) + if ( + len(args) < min_arg_count or len(args) > len(type_vars) + ) and not has_type_var_tuple: + if tp.is_type_obj() and tp.type_object().fullname == "builtins.tuple": + # e.g. expression tuple[X, Y] + # - want the type of the expression i.e. a function with that as its return type + # - tp is type of tuple (note it won't have params as we are only called + # with generic callable type) + # - tuple[X, Y]() takes a single arg that is a tuple containing an X and a Y + return CallableType( + [TupleType(list(args), self.chk.named_type("tuple"))], + [ARG_POS], + [None], + TupleType(list(args), self.chk.named_type("tuple")), + tp.fallback, + name="tuple", + definition=tp.definition, + is_bound=tp.is_bound, + ) + self.msg.incompatible_type_application( + min_arg_count, len(type_vars), len(args), ctx + ) + return AnyType(TypeOfAny.from_error) + return self.apply_generic_arguments(tp, self.split_for_callable(tp, args, ctx), ctx) + if isinstance(tp, Overloaded): + for it in tp.items: + if tp.is_type_obj(): + # Same as above. + type_vars = tp.type_object().defn.type_vars + else: + type_vars = list(it.variables) + min_arg_count = sum(not v.has_default() for v in type_vars) + has_type_var_tuple = any(isinstance(v, TypeVarTupleType) for v in type_vars) + if ( + len(args) < min_arg_count or len(args) > len(type_vars) + ) and not has_type_var_tuple: + self.msg.incompatible_type_application( + min_arg_count, len(type_vars), len(args), ctx + ) + return AnyType(TypeOfAny.from_error) + return Overloaded( + [ + self.apply_generic_arguments(it, self.split_for_callable(it, args, ctx), ctx) + for it in tp.items + ] + ) + return AnyType(TypeOfAny.special_form) + + def visit_list_expr(self, e: ListExpr) -> Type: + """Type check a list expression [...].""" + return self.check_lst_expr(e, "builtins.list", "") + + def visit_set_expr(self, e: SetExpr) -> Type: + return self.check_lst_expr(e, "builtins.set", "") + + def fast_container_type( + self, e: ListExpr | SetExpr | TupleExpr, container_fullname: str + ) -> Type | None: + """ + Fast path to determine the type of a list or set literal, + based on the list of entries. This mostly impacts large + module-level constant definitions. + + Limitations: + + - no active type context + - at least one item + - no star expressions + - not after deferral + - either exactly one distinct type inside, + or the joined type of all entries is an Instance or Tuple type, + """ + ctx = self.type_context[-1] + if ctx or not e.items: + return None + if self.chk.current_node_deferred: + # Guarantees that all items will be Any, we'll reject it anyway. + return None + rt = self.resolved_type.get(e, None) + if rt is not None: + return rt if isinstance(rt, Instance) else None + values: list[Type] = [] + # Preserve join order while avoiding O(n) lookups at every iteration + values_set: set[Type] = set() + for item in e.items: + if isinstance(item, StarExpr): + # fallback to slow path + self.resolved_type[e] = NoneType() + return None + + typ = self.accept(item) + if typ not in values_set: + values.append(typ) + values_set.add(typ) + + vt = self._first_or_join_fast_item(values) + if vt is None: + self.resolved_type[e] = NoneType() + return None + ct = self.chk.named_generic_type(container_fullname, [vt]) + if not self.in_lambda_expr: + # We cannot cache results in lambdas - their bodies can be accepted in + # error-suppressing watchers too early + self.resolved_type[e] = ct + return ct + + def _first_or_join_fast_item(self, items: list[Type]) -> Type | None: + if len(items) == 1 and not self.chk.current_node_deferred: + return items[0] + typ = join.join_type_list(items) + if not allow_fast_container_literal(typ): + # TODO: This is overly strict, many other types can be joined safely here. + # However, our join implementation isn't bug-free, and some joins may produce + # undesired `Any`s or even more surprising results. + return None + return typ + + def check_lst_expr(self, e: ListExpr | SetExpr | TupleExpr, fullname: str, tag: str) -> Type: + # fast path + t = self.fast_container_type(e, fullname) + if t: + return t + + # Translate into type checking a generic function call. + # Used for list and set expressions, as well as for tuples + # containing star expressions that don't refer to a + # Tuple. (Note: "lst" stands for list-set-tuple. :-) + tv = TypeVarType( + "T", + "T", + id=TypeVarId(-1, namespace=""), + values=[], + upper_bound=self.object_type(), + default=AnyType(TypeOfAny.from_omitted_generics), + ) + constructor = CallableType( + [tv], + [nodes.ARG_STAR], + [None], + self.chk.named_generic_type(fullname, [tv]), + self.named_type("builtins.function"), + name=tag, + variables=[tv], + ) + out = self.check_call( + constructor, + [(i.expr if isinstance(i, StarExpr) else i) for i in e.items], + [(nodes.ARG_STAR if isinstance(i, StarExpr) else nodes.ARG_POS) for i in e.items], + e, + )[0] + return remove_instance_last_known_values(out) + + def tuple_context_matches(self, expr: TupleExpr, ctx: TupleType) -> bool: + ctx_unpack_index = find_unpack_in_list(ctx.items) + if ctx_unpack_index is None: + # For fixed tuples accept everything that can possibly match, even if this + # requires all star items to be empty. + return len([e for e in expr.items if not isinstance(e, StarExpr)]) <= len(ctx.items) + # For variadic context, the only easy case is when structure matches exactly. + # TODO: try using tuple type context in more cases. + if len([e for e in expr.items if isinstance(e, StarExpr)]) != 1: + return False + expr_star_index = next(i for i, lv in enumerate(expr.items) if isinstance(lv, StarExpr)) + return len(expr.items) == len(ctx.items) and ctx_unpack_index == expr_star_index + + def visit_tuple_expr(self, e: TupleExpr) -> Type: + """Type check a tuple expression.""" + # Try to determine type context for type inference. + type_context = get_proper_type(self.type_context[-1]) + type_context_items = None + if isinstance(type_context, UnionType): + tuples_in_context = [ + t + for t in get_proper_types(type_context.items) + if (isinstance(t, TupleType) and self.tuple_context_matches(e, t)) + or is_named_instance(t, TUPLE_LIKE_INSTANCE_NAMES) + ] + if len(tuples_in_context) == 1: + type_context = tuples_in_context[0] + else: + # There are either no relevant tuples in the Union, or there is + # more than one. Either way, we can't decide on a context. + pass + + if isinstance(type_context, TupleType) and self.tuple_context_matches(e, type_context): + type_context_items = type_context.items + elif type_context and is_named_instance(type_context, TUPLE_LIKE_INSTANCE_NAMES): + assert isinstance(type_context, Instance) + if type_context.args: + type_context_items = [type_context.args[0]] * len(e.items) + # NOTE: it's possible for the context to have a different + # number of items than e. In that case we use those context + # items that match a position in e, and we'll worry about type + # mismatches later. + + unpack_in_context = False + if type_context_items is not None: + unpack_in_context = find_unpack_in_list(type_context_items) is not None + seen_unpack_in_items = False + allow_precise_tuples = ( + unpack_in_context or PRECISE_TUPLE_TYPES in self.chk.options.enable_incomplete_feature + ) + + # Infer item types. Give up if there's a star expression + # that's not a Tuple. + items: list[Type] = [] + j = 0 # Index into type_context_items; irrelevant if type_context_items is none + for i in range(len(e.items)): + item = e.items[i] + if isinstance(item, StarExpr): + # Special handling for star expressions. + # TODO: If there's a context, and item.expr is a + # TupleExpr, flatten it, so we can benefit from the + # context? Counterargument: Why would anyone write + # (1, *(2, 3)) instead of (1, 2, 3) except in a test? + if unpack_in_context: + # Note: this logic depends on full structure match in tuple_context_matches(). + assert type_context_items + ctx_item = type_context_items[j] + assert isinstance(ctx_item, UnpackType) + ctx = ctx_item.type + else: + ctx = None + tt = self.accept(item.expr, ctx) + tt = get_proper_type(tt) + if isinstance(tt, TupleType): + if find_unpack_in_list(tt.items) is not None: + if seen_unpack_in_items: + # Multiple unpack items are not allowed in tuples, + # fall back to instance type. + return self.check_lst_expr(e, "builtins.tuple", "") + else: + seen_unpack_in_items = True + items.extend(tt.items) + # Note: this logic depends on full structure match in tuple_context_matches(). + if unpack_in_context: + j += 1 + else: + # If there is an unpack in expressions, but not in context, this will + # result in an error later, just do something predictable here. + j += len(tt.items) + else: + if allow_precise_tuples and not seen_unpack_in_items: + # Handle (x, *y, z), where y is e.g. tuple[Y, ...]. + if isinstance(tt, Instance) and self.chk.type_is_iterable(tt): + item_type = self.chk.iterable_item_type(tt, e) + mapped = self.chk.named_generic_type("builtins.tuple", [item_type]) + items.append(UnpackType(mapped)) + seen_unpack_in_items = True + continue + # A star expression that's not a Tuple. + # Treat the whole thing as a variable-length tuple. + return self.check_lst_expr(e, "builtins.tuple", "") + else: + if not type_context_items or j >= len(type_context_items): + tt = self.accept(item) + else: + tt = self.accept(item, type_context_items[j]) + j += 1 + items.append(tt) + # This is a partial fallback item type. A precise type will be calculated on demand. + fallback_item = AnyType(TypeOfAny.special_form) + result: ProperType = TupleType( + items, self.chk.named_generic_type("builtins.tuple", [fallback_item]) + ) + if seen_unpack_in_items: + # Return already normalized tuple type just in case. + result = expand_type(result, {}) + return result + + def fast_dict_type(self, e: DictExpr) -> Type | None: + """ + Fast path to determine the type of a dict literal, + based on the list of entries. This mostly impacts large + module-level constant definitions. + + Limitations: + + - no active type context + - at least one item + - only supported star expressions are other dict instances + - either exactly one distinct type (keys and values separately) inside, + or the joined type of all entries is an Instance or Tuple type + """ + ctx = self.type_context[-1] + if ctx or not e.items: + return None + + if self.chk.current_node_deferred: + # Guarantees that all items will be Any, we'll reject it anyway. + return None + + rt = self.resolved_type.get(e, None) + if rt is not None: + return rt if isinstance(rt, Instance) else None + + keys: list[Type] = [] + values: list[Type] = [] + # Preserve join order while avoiding O(n) lookups at every iteration + keys_set: set[Type] = set() + values_set: set[Type] = set() + stargs: tuple[Type, Type] | None = None + for key, value in e.items: + if key is None: + st = get_proper_type(self.accept(value)) + if ( + isinstance(st, Instance) + and st.type.fullname == "builtins.dict" + and len(st.args) == 2 + ): + stargs = (st.args[0], st.args[1]) + else: + self.resolved_type[e] = NoneType() + return None + else: + key_t = self.accept(key) + if key_t not in keys_set: + keys.append(key_t) + keys_set.add(key_t) + value_t = self.accept(value) + if value_t not in values_set: + values.append(value_t) + values_set.add(value_t) + + kt = self._first_or_join_fast_item(keys) + if kt is None: + self.resolved_type[e] = NoneType() + return None + + vt = self._first_or_join_fast_item(values) + if vt is None: + self.resolved_type[e] = NoneType() + return None + + if stargs and (stargs[0] != kt or stargs[1] != vt): + self.resolved_type[e] = NoneType() + return None + dt = self.chk.named_generic_type("builtins.dict", [kt, vt]) + if not self.in_lambda_expr: + # We cannot cache results in lambdas - their bodies can be accepted in + # error-suppressing watchers too early + self.resolved_type[e] = dt + return dt + + def check_typeddict_literal_in_context( + self, e: DictExpr, typeddict_context: TypedDictType + ) -> Type: + orig_ret_type = self.check_typeddict_call_with_dict( + callee=typeddict_context, kwargs=e.items, context=e, orig_callee=None + ) + ret_type = get_proper_type(orig_ret_type) + if isinstance(ret_type, TypedDictType): + return ret_type.copy_modified() + return typeddict_context.copy_modified() + + def visit_dict_expr(self, e: DictExpr) -> Type: + """Type check a dict expression. + + Translate it into a call to dict(), with provisions for **expr. + """ + # if the dict literal doesn't match TypedDict, check_typeddict_call_with_dict reports + # an error, but returns the TypedDict type that matches the literal it found + # that would cause a second error when that TypedDict type is returned upstream + # to avoid the second error, we always return TypedDict type that was requested + typeddict_contexts, exhaustive = self.find_typeddict_context(self.type_context[-1], e) + if typeddict_contexts: + if len(typeddict_contexts) == 1 and exhaustive: + return self.check_typeddict_literal_in_context(e, typeddict_contexts[0]) + # Multiple items union, check if at least one of them matches cleanly. + for typeddict_context in typeddict_contexts: + with self.msg.filter_errors() as err, self.chk.local_type_map as tmap: + ret_type = self.check_typeddict_literal_in_context(e, typeddict_context) + if err.has_new_errors(): + continue + self.chk.store_types(tmap) + return ret_type + # No item matched without an error, so we can't unambiguously choose the item. + if exhaustive: + self.msg.typeddict_context_ambiguous(typeddict_contexts, e) + + # fast path attempt + dt = self.fast_dict_type(e) + if dt: + return dt + + # Define type variables (used in constructors below). + kt = TypeVarType( + "KT", + "KT", + id=TypeVarId(-1, namespace=""), + values=[], + upper_bound=self.object_type(), + default=AnyType(TypeOfAny.from_omitted_generics), + ) + vt = TypeVarType( + "VT", + "VT", + id=TypeVarId(-2, namespace=""), + values=[], + upper_bound=self.object_type(), + default=AnyType(TypeOfAny.from_omitted_generics), + ) + + # Collect function arguments, watching out for **expr. + args: list[Expression] = [] + expected_types: list[Type] = [] + for key, value in e.items: + if key is None: + args.append(value) + expected_types.append( + self.chk.named_generic_type("_typeshed.SupportsKeysAndGetItem", [kt, vt]) + ) + else: + tup = TupleExpr([key, value]) + if key.line >= 0: + tup.line = key.line + tup.column = key.column + else: + tup.line = value.line + tup.column = value.column + tup.end_line = value.end_line + tup.end_column = value.end_column + args.append(tup) + expected_types.append(TupleType([kt, vt], self.named_type("builtins.tuple"))) + + # The callable type represents a function like this (except we adjust for **expr): + # def (*v: Tuple[kt, vt]) -> Dict[kt, vt]: ... + constructor = CallableType( + expected_types, + [nodes.ARG_POS] * len(expected_types), + [None] * len(expected_types), + self.chk.named_generic_type("builtins.dict", [kt, vt]), + self.named_type("builtins.function"), + name="", + variables=[kt, vt], + ) + return self.check_call(constructor, args, [nodes.ARG_POS] * len(args), e)[0] + + def find_typeddict_context( + self, context: Type | None, dict_expr: DictExpr + ) -> tuple[list[TypedDictType], bool]: + """Extract `TypedDict` members of the enclosing context. + + Returns: + a 2-tuple, (found_candidates, is_exhaustive) + """ + context = get_proper_type(context) + if isinstance(context, TypedDictType): + return [context], True + elif isinstance(context, UnionType): + items = [] + exhaustive = True + for item in context.items: + item_contexts, item_exhaustive = self.find_typeddict_context(item, dict_expr) + for item_context in item_contexts: + if self.match_typeddict_call_with_dict( + item_context, dict_expr.items, dict_expr + ): + items.append(item_context) + exhaustive = exhaustive and item_exhaustive + return items, exhaustive + # No TypedDict type in context. + return [], False + + def visit_lambda_expr(self, e: LambdaExpr) -> Type: + """Type check lambda expression.""" + old_in_lambda = self.in_lambda_expr + self.in_lambda_expr = True + self.chk.check_default_args(e, body_is_trivial=False) + inferred_type, type_override = self.infer_lambda_type_using_context(e) + if not inferred_type: + self.chk.return_types.append(AnyType(TypeOfAny.special_form)) + # Type check everything in the body except for the final return + # statement (it can contain tuple unpacking before return). + with ( + self.chk.binder.frame_context(can_skip=True, fall_through=0), + self.chk.scope.push_function(e), + ): + # Lambdas can have more than one element in body, + # when we add "fictional" AssignmentStatement nodes, like in: + # `lambda (a, b): a` + for stmt in e.body.body[:-1]: + stmt.accept(self.chk) + # Only type check the return expression, not the return statement. + # There's no useful type context. + ret_type = self.accept(e.expr(), allow_none_return=True) + fallback = self.named_type("builtins.function") + self.chk.return_types.pop() + self.in_lambda_expr = old_in_lambda + return callable_type(e, fallback, ret_type) + else: + # Type context available. + self.chk.return_types.append(inferred_type.ret_type) + with self.chk.tscope.function_scope(e): + self.chk.check_func_item(e, type_override=type_override) + if not self.chk.has_type(e.expr()): + # TODO: return expression must be accepted before exiting function scope. + with self.chk.binder.frame_context(can_skip=True, fall_through=0): + self.accept(e.expr(), allow_none_return=True) + ret_type = self.chk.lookup_type(e.expr()) + self.chk.return_types.pop() + self.in_lambda_expr = old_in_lambda + return replace_callable_return_type(inferred_type, ret_type) + + def infer_lambda_type_using_context( + self, e: LambdaExpr + ) -> tuple[CallableType | None, CallableType | None]: + """Try to infer lambda expression type using context. + + Return None if could not infer type. + The second item in the return type is the type_override parameter for check_func_item. + """ + # TODO also accept 'Any' context + ctx = get_proper_type(self.type_context[-1]) + + if isinstance(ctx, UnionType): + callables = [ + t for t in get_proper_types(ctx.relevant_items()) if isinstance(t, CallableType) + ] + if len(callables) == 1: + ctx = callables[0] + + if not ctx or not isinstance(ctx, CallableType): + return None, None + + # The context may have function type variables in it. We replace them + # since these are the type variables we are ultimately trying to infer; + # they must be considered as indeterminate. We use ErasedType since it + # does not affect type inference results (it is for purposes like this + # only). + if not self.chk.options.old_type_inference: + # With new type inference we can preserve argument types even if they + # are generic, since new inference algorithm can handle constraints + # like S <: T (we still erase return type since it's ultimately unknown). + extra_vars = [] + for arg in ctx.arg_types: + meta_vars = [tv for tv in get_all_type_vars(arg) if tv.id.is_meta_var()] + extra_vars.extend([tv for tv in meta_vars if tv not in extra_vars]) + callable_ctx = ctx.copy_modified( + ret_type=replace_meta_vars(ctx.ret_type, ErasedType()), + variables=list(ctx.variables) + extra_vars, + ) + else: + erased_ctx = replace_meta_vars(ctx, ErasedType()) + assert isinstance(erased_ctx, ProperType) and isinstance(erased_ctx, CallableType) + callable_ctx = erased_ctx + + # The callable_ctx may have a fallback of builtins.type if the context + # is a constructor -- but this fallback doesn't make sense for lambdas. + callable_ctx = callable_ctx.copy_modified(fallback=self.named_type("builtins.function")) + + if callable_ctx.type_guard is not None or callable_ctx.type_is is not None: + # Lambda's return type cannot be treated as a `TypeGuard`, + # because it is implicit. And `TypeGuard`s must be explicit. + # See https://github.com/python/mypy/issues/9927 + return None, None + + arg_kinds = [arg.kind for arg in e.arguments] + + if callable_ctx.is_ellipsis_args or ctx.param_spec() is not None: + # Fill in Any arguments to match the arguments of the lambda. + callable_ctx = callable_ctx.copy_modified( + is_ellipsis_args=False, + arg_types=[AnyType(TypeOfAny.special_form)] * len(arg_kinds), + arg_kinds=arg_kinds, + arg_names=e.arg_names.copy(), + ) + + if ARG_STAR in arg_kinds or ARG_STAR2 in arg_kinds: + # TODO treat this case appropriately + return callable_ctx, None + + if callable_ctx.arg_kinds != arg_kinds: + # Incompatible context; cannot use it to infer types. + self.chk.fail(message_registry.CANNOT_INFER_LAMBDA_TYPE, e) + return None, None + + # Type of lambda must have correct argument names, to prevent false + # negatives when lambdas appear in `ParamSpec` context. + return callable_ctx.copy_modified(arg_names=e.arg_names), callable_ctx + + def visit_super_expr(self, e: SuperExpr) -> Type: + """Type check a super expression (non-lvalue).""" + + # We have an expression like super(T, var).member + + # First compute the types of T and var + types = self._super_arg_types(e) + if isinstance(types, tuple): + type_type, instance_type = types + else: + return types + + # Now get the MRO + type_info = type_info_from_type(type_type) + if type_info is None: + self.chk.fail(message_registry.UNSUPPORTED_ARG_1_FOR_SUPER, e) + return AnyType(TypeOfAny.from_error) + + instance_info = type_info_from_type(instance_type) + if instance_info is None: + self.chk.fail(message_registry.UNSUPPORTED_ARG_2_FOR_SUPER, e) + return AnyType(TypeOfAny.from_error) + + mro = instance_info.mro + + # The base is the first MRO entry *after* type_info that has a member + # with the right name + index = None + if type_info in mro: + index = mro.index(type_info) + else: + method = self.chk.scope.current_function() + # Mypy explicitly allows supertype upper bounds (and no upper bound at all) + # for annotating self-types. However, if such an annotation is used for + # checking super() we will still get an error. So to be consistent, we also + # allow such imprecise annotations for use with super(), where we fall back + # to the current class MRO instead. This works only from inside a method. + if method is not None and is_self_type_like( + instance_type, is_classmethod=method.is_class + ): + if e.info and type_info in e.info.mro: + mro = e.info.mro + index = mro.index(type_info) + if index is None: + if ( + instance_info.is_protocol + and instance_info != type_info + and not type_info.is_protocol + ): + # A special case for mixins, in this case super() should point + # directly to the host protocol, this is not safe, since the real MRO + # is not known yet for mixin, but this feature is more like an escape hatch. + index = -1 + else: + self.chk.fail(message_registry.SUPER_ARG_2_NOT_INSTANCE_OF_ARG_1, e) + return AnyType(TypeOfAny.from_error) + + if len(mro) == index + 1: + self.chk.fail(message_registry.TARGET_CLASS_HAS_NO_BASE_CLASS, e) + return AnyType(TypeOfAny.from_error) + + for base in mro[index + 1 :]: + if e.name in base.names or base == mro[-1]: + if e.info and e.info.fallback_to_any and base == mro[-1]: + # There's an undefined base class, and we're at the end of the + # chain. That's not an error. + return AnyType(TypeOfAny.special_form) + + return analyze_member_access( + name=e.name, + typ=instance_type, + is_lvalue=False, + is_super=True, + is_operator=False, + original_type=instance_type, + override_info=base, + context=e, + chk=self.chk, + in_literal_context=self.is_literal_context(), + ) + + assert False, "unreachable" + + def _super_arg_types(self, e: SuperExpr) -> Type | tuple[Type, Type]: + """ + Computes the types of the type and instance expressions in super(T, instance), or the + implicit ones for zero-argument super() expressions. Returns a single type for the whole + super expression when possible (for errors, anys), otherwise the pair of computed types. + """ + + if not self.chk.in_checked_function(): + return AnyType(TypeOfAny.unannotated) + elif len(e.call.args) == 0: + if not e.info: + # This has already been reported by the semantic analyzer. + return AnyType(TypeOfAny.from_error) + elif self.chk.scope.active_class(): + self.chk.fail(message_registry.SUPER_OUTSIDE_OF_METHOD_NOT_SUPPORTED, e) + return AnyType(TypeOfAny.from_error) + + # Zero-argument super() is like super(, ) + current_type = fill_typevars(e.info) + type_type: ProperType = TypeType(current_type) + + # Use the type of the self argument, in case it was annotated + method = self.chk.scope.current_function() + assert method is not None + if method.arguments: + instance_type: Type = method.arguments[0].variable.type or current_type + else: + self.chk.fail(message_registry.SUPER_ENCLOSING_POSITIONAL_ARGS_REQUIRED, e) + return AnyType(TypeOfAny.from_error) + elif ARG_STAR in e.call.arg_kinds: + self.chk.fail(message_registry.SUPER_VARARGS_NOT_SUPPORTED, e) + return AnyType(TypeOfAny.from_error) + elif set(e.call.arg_kinds) != {ARG_POS}: + self.chk.fail(message_registry.SUPER_POSITIONAL_ARGS_REQUIRED, e) + return AnyType(TypeOfAny.from_error) + elif len(e.call.args) == 1: + self.chk.fail(message_registry.SUPER_WITH_SINGLE_ARG_NOT_SUPPORTED, e) + return AnyType(TypeOfAny.from_error) + elif len(e.call.args) == 2: + type_type = get_proper_type(self.accept(e.call.args[0])) + instance_type = self.accept(e.call.args[1]) + else: + self.chk.fail(message_registry.TOO_MANY_ARGS_FOR_SUPER, e) + return AnyType(TypeOfAny.from_error) + + # Imprecisely assume that the type is the current class + if isinstance(type_type, AnyType): + if e.info: + type_type = TypeType(fill_typevars(e.info)) + else: + return AnyType(TypeOfAny.from_another_any, source_any=type_type) + elif isinstance(type_type, TypeType): + type_item = type_type.item + if isinstance(type_item, AnyType): + if e.info: + type_type = TypeType(fill_typevars(e.info)) + else: + return AnyType(TypeOfAny.from_another_any, source_any=type_item) + + if not isinstance(type_type, TypeType) and not ( + isinstance(type_type, FunctionLike) and type_type.is_type_obj() + ): + self.msg.first_argument_for_super_must_be_type(type_type, e) + return AnyType(TypeOfAny.from_error) + + # Imprecisely assume that the instance is of the current class + instance_type = get_proper_type(instance_type) + if isinstance(instance_type, AnyType): + if e.info: + instance_type = fill_typevars(e.info) + else: + return AnyType(TypeOfAny.from_another_any, source_any=instance_type) + elif isinstance(instance_type, TypeType): + instance_item = instance_type.item + if isinstance(instance_item, AnyType): + if e.info: + instance_type = TypeType(fill_typevars(e.info)) + else: + return AnyType(TypeOfAny.from_another_any, source_any=instance_item) + + return type_type, instance_type + + def visit_slice_expr(self, e: SliceExpr) -> Type: + try: + supports_index = self.chk.named_type("typing_extensions.SupportsIndex") + except KeyError: + supports_index = self.chk.named_type("builtins.int") # thanks, fixture life + expected = make_optional_type(supports_index) + type_args = [] + for index in [e.begin_index, e.end_index, e.stride]: + if index: + t = self.accept(index) + self.chk.check_subtype(t, expected, index, message_registry.INVALID_SLICE_INDEX) + type_args.append(t) + else: + type_args.append(NoneType()) + return self.chk.named_generic_type("builtins.slice", type_args) + + def visit_list_comprehension(self, e: ListComprehension) -> Type: + return self.check_generator_or_comprehension( + e.generator, "builtins.list", "" + ) + + def visit_set_comprehension(self, e: SetComprehension) -> Type: + return self.check_generator_or_comprehension( + e.generator, "builtins.set", "" + ) + + def visit_generator_expr(self, e: GeneratorExpr) -> Type: + # If any of the comprehensions use async for, the expression will return an async generator + # object, or await is used anywhere but in the leftmost sequence. + if ( + any(e.is_async) + or has_await_expression(e.left_expr) + or any(has_await_expression(sequence) for sequence in e.sequences[1:]) + or any(has_await_expression(cond) for condlist in e.condlists for cond in condlist) + ): + typ = "typing.AsyncGenerator" + # received type is always None in async generator expressions + additional_args: list[Type] = [NoneType()] + else: + typ = "typing.Generator" + # received type and returned type are None + additional_args = [NoneType(), NoneType()] + return self.check_generator_or_comprehension( + e, typ, "", additional_args=additional_args + ) + + def check_generator_or_comprehension( + self, + gen: GeneratorExpr, + type_name: str, + id_for_messages: str, + additional_args: list[Type] | None = None, + ) -> Type: + """Type check a generator expression or a list comprehension.""" + additional_args = additional_args or [] + with self.chk.binder.frame_context(can_skip=True, fall_through=0): + self.check_for_comp(gen) + + # Infer the type of the list comprehension by using a synthetic generic + # callable type. + tv = TypeVarType( + "T", + "T", + id=TypeVarId(-1, namespace=""), + values=[], + upper_bound=self.object_type(), + default=AnyType(TypeOfAny.from_omitted_generics), + ) + tv_list: list[Type] = [tv] + constructor = CallableType( + tv_list, + [nodes.ARG_POS], + [None], + self.chk.named_generic_type(type_name, tv_list + additional_args), + self.chk.named_type("builtins.function"), + name=id_for_messages, + variables=[tv], + ) + return self.check_call(constructor, [gen.left_expr], [nodes.ARG_POS], gen)[0] + + def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> Type: + """Type check a dictionary comprehension.""" + with self.chk.binder.frame_context(can_skip=True, fall_through=0): + self.check_for_comp(e) + + # Infer the type of the list comprehension by using a synthetic generic + # callable type. + ktdef = TypeVarType( + "KT", + "KT", + id=TypeVarId(-1, namespace=""), + values=[], + upper_bound=self.object_type(), + default=AnyType(TypeOfAny.from_omitted_generics), + ) + vtdef = TypeVarType( + "VT", + "VT", + id=TypeVarId(-2, namespace=""), + values=[], + upper_bound=self.object_type(), + default=AnyType(TypeOfAny.from_omitted_generics), + ) + constructor = CallableType( + [ktdef, vtdef], + [nodes.ARG_POS, nodes.ARG_POS], + [None, None], + self.chk.named_generic_type("builtins.dict", [ktdef, vtdef]), + self.chk.named_type("builtins.function"), + name="", + variables=[ktdef, vtdef], + ) + return self.check_call( + constructor, [e.key, e.value], [nodes.ARG_POS, nodes.ARG_POS], e + )[0] + + def check_for_comp(self, e: GeneratorExpr | DictionaryComprehension) -> None: + """Check the for_comp part of comprehensions. That is the part from 'for': + ... for x in y if z + + Note: This adds the type information derived from the condlists to the current binder. + """ + for index, sequence, conditions, is_async in zip( + e.indices, e.sequences, e.condlists, e.is_async + ): + if is_async: + _, sequence_type = self.chk.analyze_async_iterable_item_type(sequence) + else: + _, sequence_type = self.chk.analyze_iterable_item_type(sequence) + if ( + isinstance(get_proper_type(sequence_type), UninhabitedType) + and isinstance(index, NameExpr) + and index.name == "_" + ): + # To preserve backward compatibility, avoid inferring Never for "_" + sequence_type = AnyType(TypeOfAny.special_form) + + self.chk.analyze_index_variables(index, sequence_type, True, e) + for condition in conditions: + self.accept(condition) + + # values are only part of the comprehension when all conditions are true + true_map, false_map = self.chk.find_isinstance_check(condition) + + if true_map: + self.chk.push_type_map(true_map) + + if codes.REDUNDANT_EXPR in self.chk.options.enabled_error_codes: + if true_map is None: + self.msg.redundant_condition_in_comprehension(False, condition) + elif false_map is None: + self.msg.redundant_condition_in_comprehension(True, condition) + + def visit_conditional_expr(self, e: ConditionalExpr, allow_none_return: bool = False) -> Type: + self.accept(e.cond) + ctx = self.type_context[-1] + + # Gain type information from isinstance if it is there + # but only for the current expression + if_map, else_map = self.chk.find_isinstance_check(e.cond) + if codes.REDUNDANT_EXPR in self.chk.options.enabled_error_codes: + if if_map is None: + self.msg.redundant_condition_in_if(False, e.cond) + elif else_map is None: + self.msg.redundant_condition_in_if(True, e.cond) + + if_type = self.analyze_cond_branch( + if_map, e.if_expr, context=ctx, allow_none_return=allow_none_return + ) + + # we want to keep the narrowest value of if_type for union'ing the branches + # however, it would be silly to pass a literal as a type context. Pass the + # underlying fallback type instead. + if_type_fallback = simple_literal_type(get_proper_type(if_type)) or if_type + + # Analyze the right branch using full type context and store the type + full_context_else_type = self.analyze_cond_branch( + else_map, e.else_expr, context=ctx, allow_none_return=allow_none_return + ) + + if not mypy.checker.is_valid_inferred_type(if_type, self.chk.options): + # Analyze the right branch disregarding the left branch. + else_type = full_context_else_type + # we want to keep the narrowest value of else_type for union'ing the branches + # however, it would be silly to pass a literal as a type context. Pass the + # underlying fallback type instead. + else_type_fallback = simple_literal_type(get_proper_type(else_type)) or else_type + + # If it would make a difference, re-analyze the left + # branch using the right branch's type as context. + if ctx is None or not is_equivalent(else_type_fallback, ctx): + # TODO: If it's possible that the previous analysis of + # the left branch produced errors that are avoided + # using this context, suppress those errors. + if_type = self.analyze_cond_branch( + if_map, + e.if_expr, + context=else_type_fallback, + allow_none_return=allow_none_return, + ) + + elif if_type_fallback == ctx: + # There is no point re-running the analysis if if_type is equal to ctx. + # That would be an exact duplicate of the work we just did. + # This optimization is particularly important to avoid exponential blowup with nested + # if/else expressions: https://github.com/python/mypy/issues/9591 + # TODO: would checking for is_proper_subtype also work and cover more cases? + else_type = full_context_else_type + else: + # Analyze the right branch in the context of the left + # branch's type. + else_type = self.analyze_cond_branch( + else_map, + e.else_expr, + context=if_type_fallback, + allow_none_return=allow_none_return, + ) + + # In most cases using if_type as a context for right branch gives better inferred types. + # This is however not the case for literal types, so use the full context instead. + if is_literal_type_like(full_context_else_type) and not is_literal_type_like(else_type): + else_type = full_context_else_type + + res: Type = make_simplified_union([if_type, else_type]) + if has_uninhabited_component(res) and not isinstance( + get_proper_type(self.type_context[-1]), UnionType + ): + # In rare cases with empty collections join may give a better result. + alternative = join.join_types(if_type, else_type) + p_alt = get_proper_type(alternative) + if not isinstance(p_alt, Instance) or p_alt.type.fullname != "builtins.object": + res = alternative + return res + + def analyze_cond_branch( + self, + map: dict[Expression, Type] | None, + node: Expression, + context: Type | None, + allow_none_return: bool = False, + suppress_unreachable_errors: bool = True, + ) -> Type: + with self.chk.binder.frame_context(can_skip=True, fall_through=0): + if map is None: + # We still need to type check node, in case we want to + # process it for isinstance checks later. Since the branch was + # determined to be unreachable, any errors should be suppressed. + with self.msg.filter_errors(filter_errors=suppress_unreachable_errors): + self.accept(node, type_context=context, allow_none_return=allow_none_return) + return UninhabitedType() + self.chk.push_type_map(map) + return self.accept(node, type_context=context, allow_none_return=allow_none_return) + + def _combined_context(self, ty: Type | None) -> Type | None: + ctx_items = [] + if ty is not None: + if has_any_type(ty): + # HACK: Any should be contagious, `dict[str, Any] or ` should still + # infer Any in x. + return ty + ctx_items.append(ty) + if self.type_context and self.type_context[-1] is not None: + ctx_items.append(self.type_context[-1]) + if ctx_items: + return make_simplified_union(ctx_items) + return None + + # + # Helpers + # + + def accept( + self, + node: Expression, + type_context: Type | None = None, + allow_none_return: bool = False, + always_allow_any: bool = False, + is_callee: bool = False, + ) -> Type: + """Type check a node in the given type context. If allow_none_return + is True and this expression is a call, allow it to return None. This + applies only to this expression and not any subexpressions. + """ + if node in self.type_overrides: + # This branch is very fast, there is no point timing it. + return self.type_overrides[node] + # We don't use context manager here to get most precise data (and avoid overhead). + record_time = False + if self.collect_line_checking_stats and not self.in_expression: + t0 = time.perf_counter_ns() + self.in_expression = True + record_time = True + self.type_context.append(type_context) + old_is_callee = self.is_callee + self.is_callee = is_callee + try: + p_type_context = get_proper_type(type_context) + if allow_none_return and isinstance(node, CallExpr): + typ = self.visit_call_expr(node, allow_none_return=True) + elif allow_none_return and isinstance(node, YieldFromExpr): + typ = self.visit_yield_from_expr(node, allow_none_return=True) + elif allow_none_return and isinstance(node, ConditionalExpr): + typ = self.visit_conditional_expr(node, allow_none_return=True) + elif allow_none_return and isinstance(node, AwaitExpr): + typ = self.visit_await_expr(node, allow_none_return=True) + + elif ( + isinstance(p_type_context, TypeType) + and p_type_context.is_type_form + and (node_as_type := self.try_parse_as_type_expression(node)) is not None + ): + typ = TypeType.make_normalized( + node_as_type, + line=node_as_type.line, + column=node_as_type.column, + is_type_form=True, + ) # r-value type, when interpreted as a type expression + elif ( + isinstance(p_type_context, UnionType) + and any( + isinstance(p_item := get_proper_type(item), TypeType) and p_item.is_type_form + for item in p_type_context.items + ) + and (node_as_type := self.try_parse_as_type_expression(node)) is not None + ): + typ1 = TypeType.make_normalized( + node_as_type, + line=node_as_type.line, + column=node_as_type.column, + is_type_form=True, + ) + if is_subtype(typ1, p_type_context): + typ = typ1 # r-value type, when interpreted as a type expression + else: + typ2 = node.accept(self) + typ = typ2 # r-value type, when interpreted as a value expression + # Deeply nested generic calls can deteriorate performance dramatically. + # Although in most cases caching makes little difference, in worst case + # it avoids exponential complexity. + # We cannot use cache inside lambdas, because they skip immediate type + # context, and use enclosing one, see infer_lambda_type_using_context(). + # TODO: consider using cache for more expression kinds. + elif ( + isinstance(node, (CallExpr, ListExpr, TupleExpr, DictExpr, OpExpr)) + and not (self.in_lambda_expr or self.chk.current_node_deferred) + and not self.chk.options.disable_expression_cache + ): + if (node, type_context) in self.expr_cache: + binder_version, typ, messages, type_map = self.expr_cache[(node, type_context)] + if binder_version == self.chk.binder.version: + self.chk.store_types(type_map) + self.msg.add_errors(messages) + else: + typ = self.accept_maybe_cache(node, type_context=type_context) + else: + typ = self.accept_maybe_cache(node, type_context=type_context) + else: + typ = node.accept(self) # r-value type, when interpreted as a value expression + except Exception as err: + report_internal_error( + err, self.chk.errors.file, node.line, self.chk.errors, self.chk.options + ) + self.is_callee = old_is_callee + self.type_context.pop() + assert typ is not None + self.chk.store_type(node, typ) + + if ( + self.chk.options.disallow_any_expr + and not always_allow_any + and not self.chk.is_stub + and self.chk.in_checked_function() + and has_any_type(typ) + and not self.chk.current_node_deferred + ): + self.msg.disallowed_any_type(typ, node) + + if not self.chk.in_checked_function() or self.chk.current_node_deferred: + result: Type = AnyType(TypeOfAny.unannotated) + else: + result = typ + if record_time: + self.per_line_checking_time_ns[node.line] += time.perf_counter_ns() - t0 + self.in_expression = False + return result + + def accept_maybe_cache(self, node: Expression, type_context: Type | None = None) -> Type: + binder_version = self.chk.binder.version + with self.msg.filter_errors(filter_errors=True, save_filtered_errors=True) as msg: + with self.chk.local_type_map as type_map: + typ = node.accept(self) + messages = msg.filtered_errors() + if binder_version == self.chk.binder.version and not self.chk.current_node_deferred: + self.expr_cache[(node, type_context)] = (binder_version, typ, messages, type_map) + self.chk.store_types(type_map) + self.msg.add_errors(messages) + return typ + + def named_type(self, name: str) -> Instance: + """Return an instance type with type given by the name and no type + arguments. Alias for TypeChecker.named_type. + """ + return self.chk.named_type(name) + + def type_alias_type_type(self) -> Instance: + """Returns a `typing.TypeAliasType` or `typing_extensions.TypeAliasType`.""" + if self.chk.options.python_version >= (3, 12): + return self.named_type("typing.TypeAliasType") + return self.named_type("typing_extensions.TypeAliasType") + + def is_valid_var_arg(self, typ: Type) -> bool: + """Is a type valid as a *args argument?""" + typ = get_proper_type(typ) + return isinstance(typ, (TupleType, AnyType, ParamSpecType, UnpackType)) or is_subtype( + typ, self.chk.named_generic_type("typing.Iterable", [AnyType(TypeOfAny.special_form)]) + ) + + def is_valid_keyword_var_arg(self, typ: Type) -> bool: + """Is a type valid as a **kwargs argument?""" + return ( + is_subtype( + typ, + self.chk.named_generic_type( + "_typeshed.SupportsKeysAndGetItem", + [self.named_type("builtins.str"), AnyType(TypeOfAny.special_form)], + ), + ) + or is_subtype( + typ, + self.chk.named_generic_type( + "_typeshed.SupportsKeysAndGetItem", [UninhabitedType(), UninhabitedType()] + ), + ) + or isinstance(typ, ParamSpecType) + ) + + def not_ready_callback(self, name: str, context: Context) -> None: + """Called when we can't infer the type of a variable because it's not ready yet. + + Either defer type checking of the enclosing function to the next + pass or report an error. + """ + self.chk.handle_cannot_determine_type(name, context) + + def visit_yield_expr(self, e: YieldExpr) -> Type: + return_type = self.chk.return_types[-1] + expected_item_type = self.chk.get_generator_yield_type(return_type, False) + if e.expr is None: + if ( + not isinstance(get_proper_type(expected_item_type), (NoneType, AnyType)) + and self.chk.in_checked_function() + ): + self.chk.fail(message_registry.YIELD_VALUE_EXPECTED, e) + else: + actual_item_type = self.accept(e.expr, expected_item_type) + self.chk.check_subtype( + actual_item_type, + expected_item_type, + e, + message_registry.INCOMPATIBLE_TYPES_IN_YIELD, + "actual type", + "expected type", + ) + return self.chk.get_generator_receive_type(return_type, False) + + def visit_await_expr(self, e: AwaitExpr, allow_none_return: bool = False) -> Type: + expected_type = self.type_context[-1] + if expected_type is not None: + expected_type = self.chk.named_generic_type("typing.Awaitable", [expected_type]) + actual_type = get_proper_type(self.accept(e.expr, expected_type)) + if isinstance(actual_type, AnyType): + return AnyType(TypeOfAny.from_another_any, source_any=actual_type) + ret = self.check_awaitable_expr( + actual_type, e, message_registry.INCOMPATIBLE_TYPES_IN_AWAIT + ) + if not allow_none_return and isinstance(get_proper_type(ret), NoneType): + self.chk.msg.does_not_return_value(None, e) + return ret + + def check_awaitable_expr( + self, t: Type, ctx: Context, msg: str | ErrorMessage, ignore_binder: bool = False + ) -> Type: + """Check the argument to `await` and extract the type of value. + + Also used by `async for` and `async with`. + """ + if not self.chk.check_subtype( + t, self.named_type("typing.Awaitable"), ctx, msg, "actual type", "expected type" + ): + return AnyType(TypeOfAny.special_form) + else: + generator = self.check_method_call_by_name("__await__", t, [], [], ctx)[0] + ret_type = self.chk.get_generator_return_type(generator, False) + ret_type = get_proper_type(ret_type) + if ( + not ignore_binder + and isinstance(ret_type, UninhabitedType) + and not ret_type.ambiguous + ): + self.chk.binder.unreachable() + return ret_type + + def visit_yield_from_expr(self, e: YieldFromExpr, allow_none_return: bool = False) -> Type: + # NOTE: Whether `yield from` accepts an `async def` decorated + # with `@types.coroutine` (or `@asyncio.coroutine`) depends on + # whether the generator containing the `yield from` is itself + # thus decorated. But it accepts a generator regardless of + # how it's decorated. + return_type = self.chk.return_types[-1] + # TODO: What should the context for the sub-expression be? + # If the containing function has type Generator[X, Y, ...], + # the context should be Generator[X, Y, T], where T is the + # context of the 'yield from' itself (but it isn't known). + subexpr_type = get_proper_type(self.accept(e.expr)) + + # Check that the expr is an instance of Iterable and get the type of the iterator produced + # by __iter__. + if isinstance(subexpr_type, AnyType): + iter_type: Type = AnyType(TypeOfAny.from_another_any, source_any=subexpr_type) + elif self.chk.type_is_iterable(subexpr_type): + if is_async_def(subexpr_type) and not has_coroutine_decorator(return_type): + self.chk.msg.yield_from_invalid_operand_type(subexpr_type, e) + + any_type = AnyType(TypeOfAny.special_form) + generic_generator_type = self.chk.named_generic_type( + "typing.Generator", [any_type, any_type, any_type] + ) + generic_generator_type.set_line(e) + iter_type, _ = self.check_method_call_by_name( + "__iter__", subexpr_type, [], [], context=generic_generator_type + ) + else: + if not (is_async_def(subexpr_type) and has_coroutine_decorator(return_type)): + self.chk.msg.yield_from_invalid_operand_type(subexpr_type, e) + iter_type = AnyType(TypeOfAny.from_error) + else: + iter_type = self.check_awaitable_expr( + subexpr_type, e, message_registry.INCOMPATIBLE_TYPES_IN_YIELD_FROM + ) + + # Check that the iterator's item type matches the type yielded by the Generator function + # containing this `yield from` expression. + expected_item_type = self.chk.get_generator_yield_type(return_type, False) + actual_item_type = self.chk.get_generator_yield_type(iter_type, False) + + self.chk.check_subtype( + actual_item_type, + expected_item_type, + e, + message_registry.INCOMPATIBLE_TYPES_IN_YIELD_FROM, + "actual type", + "expected type", + ) + + # Determine the type of the entire yield from expression. + iter_type = get_proper_type(iter_type) + expr_type = self.chk.get_generator_return_type(iter_type, is_coroutine=False) + + if not allow_none_return and isinstance(get_proper_type(expr_type), NoneType): + self.chk.msg.does_not_return_value(None, e) + return expr_type + + def visit_temp_node(self, e: TempNode) -> Type: + return e.type + + def visit_type_var_expr(self, e: TypeVarExpr) -> Type: + p_default = get_proper_type(e.default) + if not ( + isinstance(p_default, AnyType) + and p_default.type_of_any == TypeOfAny.from_omitted_generics + ): + if not is_subtype(p_default, e.upper_bound): + self.chk.fail("TypeVar default must be a subtype of the bound type", e) + if e.values and not any(is_same_type(p_default, value) for value in e.values): + self.chk.fail("TypeVar default must be one of the constraint types", e) + return AnyType(TypeOfAny.special_form) + + def visit_paramspec_expr(self, e: ParamSpecExpr) -> Type: + return AnyType(TypeOfAny.special_form) + + def visit_type_var_tuple_expr(self, e: TypeVarTupleExpr) -> Type: + return AnyType(TypeOfAny.special_form) + + def visit_newtype_expr(self, e: NewTypeExpr) -> Type: + return AnyType(TypeOfAny.special_form) + + def visit_namedtuple_expr(self, e: NamedTupleExpr) -> Type: + tuple_type = e.info.tuple_type + if tuple_type: + if self.chk.options.disallow_any_unimported and has_any_from_unimported_type( + tuple_type + ): + self.msg.unimported_type_becomes_any("NamedTuple type", tuple_type, e) + check_for_explicit_any( + tuple_type, self.chk.options, self.chk.is_typeshed_stub, self.msg, context=e + ) + return AnyType(TypeOfAny.special_form) + + def visit_enum_call_expr(self, e: EnumCallExpr) -> Type: + for name, value in zip(e.items, e.values): + if value is not None: + typ = self.accept(value) + if not isinstance(get_proper_type(typ), AnyType): + var = e.info.names[name].node + if isinstance(var, Var): + # Inline TypeChecker.set_inferred_type(), + # without the lvalue. (This doesn't really do + # much, since the value attribute is defined + # to have type Any in the typeshed stub.) + var.type = typ + var.is_inferred = True + return AnyType(TypeOfAny.special_form) + + def visit_typeddict_expr(self, e: TypedDictExpr) -> Type: + return AnyType(TypeOfAny.special_form) + + def visit__promote_expr(self, e: PromoteExpr) -> Type: + return e.type + + def visit_star_expr(self, e: StarExpr) -> Type: + # TODO: should this ever be called (see e.g. mypyc visitor)? + return self.accept(e.expr) + + def object_type(self) -> Instance: + """Return instance type 'object'.""" + return self.named_type("builtins.object") + + def bool_type(self) -> Instance: + """Return instance type 'bool'.""" + return self.named_type("builtins.bool") + + @overload + def narrow_type_from_binder(self, expr: Expression, known_type: Type) -> Type: ... + + @overload + def narrow_type_from_binder( + self, expr: Expression, known_type: Type, skip_non_overlapping: bool + ) -> Type | None: ... + + def narrow_type_from_binder( + self, expr: Expression, known_type: Type, skip_non_overlapping: bool = False + ) -> Type | None: + """Narrow down a known type of expression using information in conditional type binder. + + If 'skip_non_overlapping' is True, return None if the type and restriction are + non-overlapping. + """ + if literal(expr) >= LITERAL_TYPE: + restriction = self.chk.binder.get(expr) + # If the current node is deferred, some variables may get Any types that they + # otherwise wouldn't have. We don't want to narrow down these since it may + # produce invalid inferred Optional[Any] types, at least. + if restriction and not ( + isinstance(get_proper_type(known_type), AnyType) and self.chk.current_node_deferred + ): + # Note: this call should match the one in narrow_declared_type(). + if skip_non_overlapping and not is_overlapping_types( + known_type, restriction, prohibit_none_typevar_overlap=True + ): + return None + narrowed = narrow_declared_type(known_type, restriction) + if isinstance(get_proper_type(narrowed), UninhabitedType): + # If we hit this case, it means that we can't reliably mark the code as + # unreachable, but the resulting type can't be expressed in type system. + # Falling back to restriction is more intuitive in most cases. + return restriction + return narrowed + return known_type + + def has_abstract_type_part(self, caller_type: ProperType, callee_type: ProperType) -> bool: + # TODO: support other possible types here + if isinstance(caller_type, TupleType) and isinstance(callee_type, TupleType): + return any( + self.has_abstract_type(get_proper_type(caller), get_proper_type(callee)) + for caller, callee in zip(caller_type.items, callee_type.items) + ) + return self.has_abstract_type(caller_type, callee_type) + + def has_abstract_type(self, caller_type: ProperType, callee_type: ProperType) -> bool: + return ( + isinstance(caller_type, FunctionLike) + and isinstance(callee_type, TypeType) + and caller_type.is_type_obj() + and (caller_type.type_object().is_abstract or caller_type.type_object().is_protocol) + and isinstance(callee_type.item, Instance) + and (callee_type.item.type.is_abstract or callee_type.item.type.is_protocol) + and not self.chk.allow_abstract_call + ) + + def try_parse_as_type_expression(self, maybe_type_expr: Expression) -> Type | None: + """Try to parse a value Expression as a type expression. + If success then return the type that it spells. + If fails then return None. + + A value expression that is parsable as a type expression may be used + where a TypeForm is expected to represent the spelled type. + + Unlike SemanticAnalyzer.try_parse_as_type_expression() + (used in the earlier SemanticAnalyzer pass), this function can only + recognize type expressions which contain no string annotations.""" + if not isinstance(maybe_type_expr, MaybeTypeExpression): + return None + + # Check whether has already been parsed as a type expression + # by SemanticAnalyzer.try_parse_as_type_expression(), + # perhaps containing a string annotation + if ( + isinstance(maybe_type_expr, (StrExpr, IndexExpr, OpExpr)) + and maybe_type_expr.as_type != NotParsed.VALUE + ): + return maybe_type_expr.as_type + + # If is potentially a type expression containing a string annotation, + # don't try to parse it because there isn't enough information + # available to the TypeChecker pass to resolve string annotations + if has_str_expression(maybe_type_expr): + self.chk.fail( + "TypeForm containing a string annotation cannot be recognized here. " + "Surround with TypeForm(...) to recognize.", + maybe_type_expr, + code=codes.MAYBE_UNRECOGNIZED_STR_TYPEFORM, + ) + return None + + # Collect symbols targeted by NameExprs and MemberExprs, + # to be looked up by TypeAnalyser when binding the + # UnboundTypes corresponding to those expressions. + (name_exprs, member_exprs) = all_name_and_member_expressions(maybe_type_expr) + sym_for_name = {e.name: SymbolTableNode(UNBOUND_IMPORTED, e.node) for e in name_exprs} | { + e_name: SymbolTableNode(UNBOUND_IMPORTED, e.node) + for e in member_exprs + if (e_name := get_member_expr_fullname(e)) is not None + } + + chk_sem = mypy.checker.TypeCheckerAsSemanticAnalyzer(self.chk, sym_for_name) + tpan = TypeAnalyser( + chk_sem, + # NOTE: Will never need to lookup type vars in this scope because + # SemanticAnalyzer.try_parse_as_type_expression() will have + # already recognized any type var referenced in a NameExpr. + # String annotations (which may also reference type vars) + # can't be resolved in the TypeChecker pass anyway. + TypeVarLikeScope(), # empty scope + self.plugin, + self.chk.options, + self.chk.tree, + self.chk.is_typeshed_stub, + ) + + try: + typ1 = expr_to_unanalyzed_type( + maybe_type_expr, self.chk.options, self.chk.is_typeshed_stub + ) + typ2 = typ1.accept(tpan) + if chk_sem.did_fail: + return None + return typ2 + except TypeTranslationError: + return None + + +def has_any_type(t: Type, ignore_in_type_obj: bool = False) -> bool: + """Whether t contains an Any type""" + return t.accept(HasAnyType(ignore_in_type_obj)) + + +class HasAnyType(types.BoolTypeQuery): + def __init__(self, ignore_in_type_obj: bool) -> None: + super().__init__(types.ANY_STRATEGY) + self.ignore_in_type_obj = ignore_in_type_obj + + def visit_any(self, t: AnyType) -> bool: + return t.type_of_any != TypeOfAny.special_form # special forms are not real Any types + + def visit_callable_type(self, t: CallableType) -> bool: + if self.ignore_in_type_obj and t.is_type_obj(): + return False + return super().visit_callable_type(t) + + def visit_type_var(self, t: TypeVarType) -> bool: + default = [t.default] if t.has_default() else [] + return self.query_types([t.upper_bound, *default] + t.values) + + def visit_param_spec(self, t: ParamSpecType) -> bool: + default = [t.default] if t.has_default() else [] + return self.query_types([t.upper_bound, *default, t.prefix]) + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> bool: + default = [t.default] if t.has_default() else [] + return self.query_types([t.upper_bound, *default]) + + +def has_coroutine_decorator(t: Type) -> bool: + """Whether t came from a function decorated with `@coroutine`.""" + t = get_proper_type(t) + return isinstance(t, Instance) and t.type.fullname == "typing.AwaitableGenerator" + + +def is_async_def(t: Type) -> bool: + """Whether t came from a function defined using `async def`.""" + # In check_func_def(), when we see a function decorated with + # `@typing.coroutine` or `@async.coroutine`, we change the + # return type to typing.AwaitableGenerator[...], so that its + # type is compatible with either Generator or Awaitable. + # But for the check here we need to know whether the original + # function (before decoration) was an `async def`. The + # AwaitableGenerator type conveniently preserves the original + # type as its 4th parameter (3rd when using 0-origin indexing + # :-), so that we can recover that information here. + # (We really need to see whether the original, undecorated + # function was an `async def`, which is orthogonal to its + # decorations.) + t = get_proper_type(t) + if ( + isinstance(t, Instance) + and t.type.fullname == "typing.AwaitableGenerator" + and len(t.args) >= 4 + ): + t = get_proper_type(t.args[3]) + return isinstance(t, Instance) and t.type.fullname == "typing.Coroutine" + + +def is_non_empty_tuple(t: Type) -> bool: + t = get_proper_type(t) + return isinstance(t, TupleType) and bool(t.items) + + +def is_duplicate_mapping( + mapping: list[int], actual_types: list[Type], actual_kinds: list[ArgKind] +) -> bool: + return ( + len(mapping) > 1 + # Multiple actuals can map to the same formal if they both come from + # varargs (*args and **kwargs); in this case at runtime it is possible + # that here are no duplicates. We need to allow this, as the convention + # f(..., *args, **kwargs) is common enough. + and not ( + len(mapping) == 2 + and actual_kinds[mapping[0]] == nodes.ARG_STAR + and actual_kinds[mapping[1]] == nodes.ARG_STAR2 + ) + # Multiple actuals can map to the same formal if there are multiple + # **kwargs which cannot be mapped with certainty (non-TypedDict + # **kwargs). + and not all( + actual_kinds[m] == nodes.ARG_STAR2 + and not isinstance(get_proper_type(actual_types[m]), TypedDictType) + for m in mapping + ) + ) + + +def replace_callable_return_type(c: CallableType, new_ret_type: Type) -> CallableType: + """Return a copy of a callable type with a different return type.""" + return c.copy_modified(ret_type=new_ret_type) + + +class ArgInferSecondPassQuery(types.BoolTypeQuery): + """Query whether an argument type should be inferred in the second pass. + + The result is True if the type has a type variable in a callable return + type anywhere. For example, the result for Callable[[], T] is True if t is + a type variable. + """ + + def __init__(self) -> None: + super().__init__(types.ANY_STRATEGY) + + def visit_callable_type(self, t: CallableType) -> bool: + # TODO: we need to check only for type variables of original callable. + return self.query_types(t.arg_types) or has_type_vars(t) + + +def has_erased_component(t: Type | None) -> bool: + return t is not None and t.accept(HasErasedComponentsQuery()) + + +class HasErasedComponentsQuery(types.BoolTypeQuery): + """Visitor for querying whether a type has an erased component.""" + + def __init__(self) -> None: + super().__init__(types.ANY_STRATEGY) + + def visit_erased_type(self, t: ErasedType) -> bool: + return True + + +def has_uninhabited_component(t: Type | None) -> bool: + return t is not None and t.accept(HasUninhabitedComponentsQuery()) + + +class HasUninhabitedComponentsQuery(types.BoolTypeQuery): + """Visitor for querying whether a type has an UninhabitedType component.""" + + def __init__(self) -> None: + super().__init__(types.ANY_STRATEGY) + + def visit_uninhabited_type(self, t: UninhabitedType) -> bool: + return True + + +def arg_approximate_similarity(actual: Type, formal: Type) -> bool: + """Return if caller argument (actual) is roughly compatible with signature arg (formal). + + This function is deliberately loose and will report two types are similar + as long as their "shapes" are plausibly the same. + + This is useful when we're doing error reporting: for example, if we're trying + to select an overload alternative and there's no exact match, we can use + this function to help us identify which alternative the user might have + *meant* to match. + """ + actual = get_proper_type(actual) + formal = get_proper_type(formal) + + # Erase typevars: we'll consider them all to have the same "shape". + if isinstance(actual, TypeVarType): + actual = erase_to_union_or_bound(actual) + if isinstance(formal, TypeVarType): + formal = erase_to_union_or_bound(formal) + + # Callable or Type[...]-ish types + def is_typetype_like(typ: ProperType) -> bool: + return ( + isinstance(typ, TypeType) + or (isinstance(typ, FunctionLike) and typ.is_type_obj()) + or (isinstance(typ, Instance) and typ.type.fullname == "builtins.type") + ) + + if isinstance(formal, CallableType): + if isinstance(actual, (CallableType, Overloaded, TypeType)): + return True + if is_typetype_like(actual) and is_typetype_like(formal): + return True + + # Unions + if isinstance(actual, UnionType): + return any(arg_approximate_similarity(item, formal) for item in actual.relevant_items()) + if isinstance(formal, UnionType): + return any(arg_approximate_similarity(actual, item) for item in formal.relevant_items()) + + # TypedDicts + if isinstance(actual, TypedDictType): + if isinstance(formal, TypedDictType): + return True + return arg_approximate_similarity(actual.fallback, formal) + + # Instances + # For instances, we mostly defer to the existing is_subtype check. + if isinstance(formal, Instance): + if isinstance(actual, CallableType): + actual = actual.fallback + if isinstance(actual, Overloaded): + actual = actual.items[0].fallback + if isinstance(actual, TupleType): + actual = tuple_fallback(actual) + if isinstance(actual, Instance) and formal.type in actual.type.mro: + # Try performing a quick check as an optimization + return True + + # Fall back to a standard subtype check for the remaining kinds of type. + return is_subtype(erasetype.erase_type(actual), erasetype.erase_type(formal)) + + +def any_causes_overload_ambiguity( + items: list[CallableType], + return_types: list[Type], + arg_types: list[Type], + arg_kinds: list[ArgKind], + arg_names: Sequence[str | None] | None, +) -> bool: + """May an argument containing 'Any' cause ambiguous result type on call to overloaded function? + + Note that this sometimes returns True even if there is no ambiguity, since a correct + implementation would be complex (and the call would be imprecisely typed due to Any + types anyway). + + Args: + items: Overload items matching the actual arguments + arg_types: Actual argument types + arg_kinds: Actual argument kinds + arg_names: Actual argument names + """ + if all_same_types(return_types): + return False + + actual_to_formal = [ + map_formals_to_actuals( + arg_kinds, arg_names, item.arg_kinds, item.arg_names, lambda i: arg_types[i] + ) + for item in items + ] + + for arg_idx, arg_type in enumerate(arg_types): + # We ignore Anys in type object callables as ambiguity + # creators, since that can lead to falsely claiming ambiguity + # for overloads between Type and Callable. + if has_any_type(arg_type, ignore_in_type_obj=True): + matching_formals_unfiltered = [ + (item_idx, lookup[arg_idx]) + for item_idx, lookup in enumerate(actual_to_formal) + if lookup[arg_idx] + ] + + matching_returns = [] + matching_formals = [] + for item_idx, formals in matching_formals_unfiltered: + matched_callable = items[item_idx] + matching_returns.append(matched_callable.ret_type) + + # Note: if an actual maps to multiple formals of differing types within + # a single callable, then we know at least one of those formals must be + # a different type then the formal(s) in some other callable. + # So it's safe to just append everything to the same list. + for formal in formals: + matching_formals.append(matched_callable.arg_types[formal]) + if not all_same_types(matching_formals) and not all_same_types(matching_returns): + # Any maps to multiple different types, and the return types of these items differ. + return True + return False + + +def all_same_types(types: list[Type]) -> bool: + if not types: + return True + return all(is_same_type(t, types[0]) for t in types[1:]) + + +def merge_typevars_in_callables_by_name( + callables: Sequence[CallableType], +) -> tuple[list[CallableType], list[TypeVarType]]: + """Takes all the typevars present in the callables and 'combines' the ones with the same name. + + For example, suppose we have two callables with signatures "f(x: T, y: S) -> T" and + "f(x: List[Tuple[T, S]]) -> Tuple[T, S]". Both callables use typevars named "T" and + "S", but we treat them as distinct, unrelated typevars. (E.g. they could both have + distinct ids.) + + If we pass in both callables into this function, it returns a list containing two + new callables that are identical in signature, but use the same underlying TypeVarType + for T and S. + + This is useful if we want to take the output lists and "merge" them into one callable + in some way -- for example, when unioning together overloads. + + Returns both the new list of callables and a list of all distinct TypeVarType objects used. + """ + output: list[CallableType] = [] + unique_typevars: dict[str, TypeVarType] = {} + variables: list[TypeVarType] = [] + + for target in callables: + if target.is_generic(): + target = freshen_function_type_vars(target) + + rename = {} # Dict[TypeVarId, TypeVar] + for tv in target.variables: + name = tv.fullname + if name not in unique_typevars: + # TODO: support ParamSpecType and TypeVarTuple. + if isinstance(tv, (ParamSpecType, TypeVarTupleType)): + continue + assert isinstance(tv, TypeVarType) + unique_typevars[name] = tv + variables.append(tv) + rename[tv.id] = unique_typevars[name] + + target = expand_type(target, rename) + output.append(target) + + return output, variables + + +def try_getting_literal(typ: Type) -> ProperType: + """If possible, get a more precise literal type for a given type.""" + typ = get_proper_type(typ) + if isinstance(typ, Instance) and typ.last_known_value is not None: + return typ.last_known_value + return typ + + +def is_expr_literal_type(node: Expression) -> bool: + """Returns 'true' if the given node is a Literal""" + if isinstance(node, IndexExpr): + base = node.base + return isinstance(base, RefExpr) and base.fullname in LITERAL_TYPE_NAMES + if isinstance(node, NameExpr): + underlying = node.node + return isinstance(underlying, TypeAlias) and isinstance( + get_proper_type(underlying.target), LiteralType + ) + return False + + +def has_bytes_component(typ: Type) -> bool: + """Is this one of builtin byte types, or a union that contains it?""" + typ = get_proper_type(typ) + byte_types = {"builtins.bytes", "builtins.bytearray"} + if isinstance(typ, UnionType): + return any(has_bytes_component(t) for t in typ.items) + if isinstance(typ, Instance) and typ.type.fullname in byte_types: + return True + return False + + +def type_info_from_type(typ: Type) -> TypeInfo | None: + """Gets the TypeInfo for a type, indirecting through things like type variables and tuples.""" + typ = get_proper_type(typ) + if isinstance(typ, FunctionLike) and typ.is_type_obj(): + return typ.type_object() + if isinstance(typ, TypeType): + typ = typ.item + if isinstance(typ, TypeVarType): + typ = get_proper_type(typ.upper_bound) + if isinstance(typ, TupleType): + typ = tuple_fallback(typ) + if isinstance(typ, Instance): + return typ.type + + # A complicated type. Too tricky, give up. + # TODO: Do something more clever here. + return None + + +def is_operator_method(fullname: str | None) -> bool: + if not fullname: + return False + short_name = fullname.split(".")[-1] + return ( + short_name in operators.op_methods.values() + or short_name in operators.reverse_op_methods.values() + or short_name in operators.unary_op_methods.values() + ) + + +def get_partial_instance_type(t: Type | None) -> PartialType | None: + if t is None or not isinstance(t, PartialType) or t.type is None: + return None + return t + + +def is_type_type_context(context: Type | None) -> bool: + context = get_proper_type(context) + if isinstance(context, TypeType): + return True + if isinstance(context, UnionType): + return any(is_type_type_context(item) for item in context.items) + return False diff --git a/.venv/lib/python3.12/site-packages/mypy/checkmember.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/checkmember.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..89dbcb6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/checkmember.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/checkmember.py b/.venv/lib/python3.12/site-packages/mypy/checkmember.py new file mode 100644 index 0000000..719b48b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/checkmember.py @@ -0,0 +1,1576 @@ +"""Type checking of attribute access""" + +from __future__ import annotations + +from collections.abc import Sequence +from typing import Callable, TypeVar, cast + +from mypy import message_registry, state +from mypy.checker_shared import TypeCheckerSharedApi +from mypy.erasetype import erase_typevars +from mypy.expandtype import ( + expand_self_type, + expand_type_by_instance, + freshen_all_functions_type_vars, +) +from mypy.maptype import map_instance_to_supertype +from mypy.meet import is_overlapping_types +from mypy.messages import MessageBuilder +from mypy.nodes import ( + ARG_POS, + ARG_STAR, + ARG_STAR2, + EXCLUDED_ENUM_ATTRIBUTES, + SYMBOL_FUNCBASE_TYPES, + Context, + Decorator, + Expression, + FuncBase, + FuncDef, + IndexExpr, + MypyFile, + NameExpr, + OverloadedFuncDef, + SymbolTable, + TempNode, + TypeAlias, + TypeInfo, + TypeVarLikeExpr, + Var, + is_final_node, +) +from mypy.plugin import AttributeContext +from mypy.subtypes import is_subtype +from mypy.typeops import ( + bind_self, + erase_to_bound, + freeze_all_type_vars, + function_type, + get_all_type_vars, + make_simplified_union, + supported_self_type, + tuple_fallback, +) +from mypy.types import ( + AnyType, + CallableType, + DeletedType, + FunctionLike, + Instance, + LiteralType, + NoneType, + Overloaded, + ParamSpecType, + PartialType, + ProperType, + TupleType, + Type, + TypedDictType, + TypeOfAny, + TypeType, + TypeVarLikeType, + TypeVarTupleType, + TypeVarType, + UninhabitedType, + UnionType, + get_proper_type, +) + + +class MemberContext: + """Information and objects needed to type check attribute access. + + Look at the docstring of analyze_member_access for more information. + """ + + def __init__( + self, + *, + is_lvalue: bool, + is_super: bool, + is_operator: bool, + original_type: Type, + context: Context, + chk: TypeCheckerSharedApi, + self_type: Type | None = None, + module_symbol_table: SymbolTable | None = None, + no_deferral: bool = False, + is_self: bool = False, + rvalue: Expression | None = None, + suppress_errors: bool = False, + preserve_type_var_ids: bool = False, + ) -> None: + self.is_lvalue = is_lvalue + self.is_super = is_super + self.is_operator = is_operator + self.original_type = original_type + self.self_type = self_type or original_type + self.context = context # Error context + self.chk = chk + self.msg = chk.msg + self.module_symbol_table = module_symbol_table + self.no_deferral = no_deferral + self.is_self = is_self + if rvalue is not None: + assert is_lvalue + self.rvalue = rvalue + self.suppress_errors = suppress_errors + # This attribute is only used to preserve old protocol member access logic. + # It is needed to avoid infinite recursion in cases involving self-referential + # generic methods, see find_member() for details. Do not use for other purposes! + self.preserve_type_var_ids = preserve_type_var_ids + + def named_type(self, name: str) -> Instance: + return self.chk.named_type(name) + + def not_ready_callback(self, name: str, context: Context) -> None: + self.chk.handle_cannot_determine_type(name, context) + + def fail(self, msg: str) -> None: + if not self.suppress_errors: + self.msg.fail(msg, self.context) + + def copy_modified( + self, + *, + self_type: Type | None = None, + is_lvalue: bool | None = None, + original_type: Type | None = None, + ) -> MemberContext: + mx = MemberContext( + is_lvalue=self.is_lvalue, + is_super=self.is_super, + is_operator=self.is_operator, + original_type=self.original_type, + context=self.context, + chk=self.chk, + self_type=self.self_type, + module_symbol_table=self.module_symbol_table, + no_deferral=self.no_deferral, + rvalue=self.rvalue, + suppress_errors=self.suppress_errors, + preserve_type_var_ids=self.preserve_type_var_ids, + ) + if self_type is not None: + mx.self_type = self_type + if is_lvalue is not None: + mx.is_lvalue = is_lvalue + if original_type is not None: + mx.original_type = original_type + return mx + + +def analyze_member_access( + name: str, + typ: Type, + context: Context, + *, + is_lvalue: bool, + is_super: bool, + is_operator: bool, + original_type: Type, + chk: TypeCheckerSharedApi, + override_info: TypeInfo | None = None, + in_literal_context: bool = False, + self_type: Type | None = None, + module_symbol_table: SymbolTable | None = None, + no_deferral: bool = False, + is_self: bool = False, + rvalue: Expression | None = None, + suppress_errors: bool = False, +) -> Type: + """Return the type of attribute 'name' of 'typ'. + + The actual implementation is in '_analyze_member_access' and this docstring + also applies to it. + + This is a general operation that supports various different variations: + + 1. lvalue or non-lvalue access (setter or getter access) + 2. supertype access when using super() (is_super == True and + 'override_info' should refer to the supertype) + + 'original_type' is the most precise inferred or declared type of the base object + that we have available. When looking for an attribute of 'typ', we may perform + recursive calls targeting the fallback type, and 'typ' may become some supertype + of 'original_type'. 'original_type' is always preserved as the 'typ' type used in + the initial, non-recursive call. The 'self_type' is a component of 'original_type' + to which generic self should be bound (a narrower type that has a fallback to instance). + Currently, this is used only for union types. + + 'module_symbol_table' is passed to this function if 'typ' is actually a module, + and we want to keep track of the available attributes of the module (since they + are not available via the type object directly) + + 'rvalue' can be provided optionally to infer better setter type when is_lvalue is True, + most notably this helps for descriptors with overloaded __set__() method. + + 'suppress_errors' will skip any logic that is only needed to generate error messages. + Note that this more of a performance optimization, one should not rely on this to not + show any messages, as some may be show e.g. by callbacks called here, + use msg.filter_errors(), if needed. + """ + mx = MemberContext( + is_lvalue=is_lvalue, + is_super=is_super, + is_operator=is_operator, + original_type=original_type, + context=context, + chk=chk, + self_type=self_type, + module_symbol_table=module_symbol_table, + no_deferral=no_deferral, + is_self=is_self, + rvalue=rvalue, + suppress_errors=suppress_errors, + ) + result = _analyze_member_access(name, typ, mx, override_info) + possible_literal = get_proper_type(result) + if ( + in_literal_context + and isinstance(possible_literal, Instance) + and possible_literal.last_known_value is not None + ): + return possible_literal.last_known_value + else: + return result + + +def _analyze_member_access( + name: str, typ: Type, mx: MemberContext, override_info: TypeInfo | None = None +) -> Type: + typ = get_proper_type(typ) + if isinstance(typ, Instance): + return analyze_instance_member_access(name, typ, mx, override_info) + elif isinstance(typ, AnyType): + # The base object has dynamic type. + return AnyType(TypeOfAny.from_another_any, source_any=typ) + elif isinstance(typ, UnionType): + return analyze_union_member_access(name, typ, mx) + elif isinstance(typ, FunctionLike) and typ.is_type_obj(): + return analyze_type_callable_member_access(name, typ, mx) + elif isinstance(typ, TypeType): + return analyze_type_type_member_access(name, typ, mx, override_info) + elif isinstance(typ, TupleType): + # Actually look up from the fallback instance type. + return _analyze_member_access(name, tuple_fallback(typ), mx, override_info) + elif isinstance(typ, (LiteralType, FunctionLike)): + # Actually look up from the fallback instance type. + return _analyze_member_access(name, typ.fallback, mx, override_info) + elif isinstance(typ, TypedDictType): + return analyze_typeddict_access(name, typ, mx, override_info) + elif isinstance(typ, NoneType): + return analyze_none_member_access(name, typ, mx) + elif isinstance(typ, TypeVarLikeType): + if isinstance(typ, TypeVarType) and typ.values: + return _analyze_member_access( + name, make_simplified_union(typ.values), mx, override_info + ) + return _analyze_member_access(name, typ.upper_bound, mx, override_info) + elif isinstance(typ, DeletedType): + if not mx.suppress_errors: + mx.msg.deleted_as_rvalue(typ, mx.context) + return AnyType(TypeOfAny.from_error) + elif isinstance(typ, UninhabitedType): + attr_type = UninhabitedType() + attr_type.ambiguous = typ.ambiguous + return attr_type + return report_missing_attribute(mx.original_type, typ, name, mx) + + +def may_be_awaitable_attribute( + name: str, typ: Type, mx: MemberContext, override_info: TypeInfo | None = None +) -> bool: + """Check if the given type has the attribute when awaited.""" + if mx.chk.checking_missing_await: + # Avoid infinite recursion. + return False + with mx.chk.checking_await_set(), mx.msg.filter_errors() as local_errors: + aw_type = mx.chk.get_precise_awaitable_type(typ, local_errors) + if aw_type is None: + return False + _ = _analyze_member_access( + name, aw_type, mx.copy_modified(self_type=aw_type), override_info + ) + return not local_errors.has_new_errors() + + +def report_missing_attribute( + original_type: Type, + typ: Type, + name: str, + mx: MemberContext, + override_info: TypeInfo | None = None, +) -> Type: + if mx.suppress_errors: + return AnyType(TypeOfAny.from_error) + error_code = mx.msg.has_no_attr(original_type, typ, name, mx.context, mx.module_symbol_table) + if not mx.msg.prefer_simple_messages(): + if may_be_awaitable_attribute(name, typ, mx, override_info): + mx.msg.possible_missing_await(mx.context, error_code) + return AnyType(TypeOfAny.from_error) + + +# The several functions that follow implement analyze_member_access for various +# types and aren't documented individually. + + +def analyze_instance_member_access( + name: str, typ: Instance, mx: MemberContext, override_info: TypeInfo | None +) -> Type: + info = typ.type + if override_info: + info = override_info + + method = info.get_method(name) + + if name == "__init__" and not mx.is_super and not info.is_final: + if not method or not method.is_final: + # Accessing __init__ in statically typed code would compromise + # type safety unless used via super() or the method/class is final. + mx.fail(message_registry.CANNOT_ACCESS_INIT) + return AnyType(TypeOfAny.from_error) + + # The base object has an instance type. + + if ( + state.find_occurrences + and info.name == state.find_occurrences[0] + and name == state.find_occurrences[1] + and not mx.suppress_errors + ): + mx.msg.note("Occurrence of '{}.{}'".format(*state.find_occurrences), mx.context) + + # Look up the member. First look up the method dictionary. + if method and not isinstance(method, Decorator): + if mx.is_super and not mx.suppress_errors: + validate_super_call(method, mx) + + if method.is_property: + assert isinstance(method, OverloadedFuncDef) + getter = method.items[0] + assert isinstance(getter, Decorator) + if mx.is_lvalue and getter.var.is_settable_property: + mx.chk.warn_deprecated(method.setter, mx.context) + return analyze_var(name, getter.var, typ, mx) + + if mx.is_lvalue and not mx.suppress_errors: + mx.msg.cant_assign_to_method(mx.context) + if not isinstance(method, OverloadedFuncDef): + signature = function_type(method, mx.named_type("builtins.function")) + else: + if method.type is None: + # Overloads may be not ready if they are decorated. Handle this in same + # manner as we would handle a regular decorated function: defer if possible. + if not mx.no_deferral and method.items: + mx.not_ready_callback(method.name, mx.context) + return AnyType(TypeOfAny.special_form) + assert isinstance(method.type, Overloaded) + signature = method.type + if not mx.preserve_type_var_ids: + signature = freshen_all_functions_type_vars(signature) + if not method.is_static: + if isinstance(method, (FuncDef, OverloadedFuncDef)) and method.is_trivial_self: + signature = bind_self_fast(signature, mx.self_type) + else: + signature = check_self_arg( + signature, mx.self_type, method.is_class, mx.context, name, mx.msg + ) + signature = bind_self(signature, mx.self_type, is_classmethod=method.is_class) + typ = map_instance_to_supertype(typ, method.info) + member_type = expand_type_by_instance(signature, typ) + freeze_all_type_vars(member_type) + return member_type + else: + # Not a method. + return analyze_member_var_access(name, typ, info, mx) + + +def validate_super_call(node: FuncBase, mx: MemberContext) -> None: + unsafe_super = False + if isinstance(node, FuncDef) and node.is_trivial_body: + unsafe_super = True + elif isinstance(node, OverloadedFuncDef): + if node.impl: + impl = node.impl if isinstance(node.impl, FuncDef) else node.impl.func + unsafe_super = impl.is_trivial_body + elif not node.is_property and node.items: + assert isinstance(node.items[0], Decorator) + unsafe_super = node.items[0].func.is_trivial_body + if unsafe_super: + mx.msg.unsafe_super(node.name, node.info.name, mx.context) + + +def analyze_type_callable_member_access(name: str, typ: FunctionLike, mx: MemberContext) -> Type: + # Class attribute. + # TODO super? + ret_type = typ.items[0].ret_type + assert isinstance(ret_type, ProperType) + if isinstance(ret_type, TupleType): + ret_type = tuple_fallback(ret_type) + if isinstance(ret_type, TypedDictType): + ret_type = ret_type.fallback + if isinstance(ret_type, LiteralType): + ret_type = ret_type.fallback + if isinstance(ret_type, Instance): + if not mx.is_operator: + # When Python sees an operator (eg `3 == 4`), it automatically translates that + # into something like `int.__eq__(3, 4)` instead of `(3).__eq__(4)` as an + # optimization. + # + # While it normally it doesn't matter which of the two versions are used, it + # does cause inconsistencies when working with classes. For example, translating + # `int == int` to `int.__eq__(int)` would not work since `int.__eq__` is meant to + # compare two int _instances_. What we really want is `type(int).__eq__`, which + # is meant to compare two types or classes. + # + # This check makes sure that when we encounter an operator, we skip looking up + # the corresponding method in the current instance to avoid this edge case. + # See https://github.com/python/mypy/pull/1787 for more info. + # TODO: do not rely on same type variables being present in all constructor overloads. + result = analyze_class_attribute_access( + ret_type, name, mx, original_vars=typ.items[0].variables, mcs_fallback=typ.fallback + ) + if result: + return result + # Look up from the 'type' type. + return _analyze_member_access(name, typ.fallback, mx) + else: + assert False, f"Unexpected type {ret_type!r}" + + +def analyze_type_type_member_access( + name: str, typ: TypeType, mx: MemberContext, override_info: TypeInfo | None +) -> Type: + # Similar to analyze_type_callable_attribute_access. + item = None + fallback = mx.named_type("builtins.type") + if isinstance(typ.item, Instance): + item = typ.item + elif isinstance(typ.item, AnyType): + with mx.msg.filter_errors(): + return _analyze_member_access(name, fallback, mx, override_info) + elif isinstance(typ.item, TypeVarType): + upper_bound = get_proper_type(typ.item.upper_bound) + if isinstance(upper_bound, Instance): + item = upper_bound + elif isinstance(upper_bound, UnionType): + return _analyze_member_access( + name, + TypeType.make_normalized(upper_bound, line=typ.line, column=typ.column), + mx, + override_info, + ) + elif isinstance(upper_bound, TupleType): + item = tuple_fallback(upper_bound) + elif isinstance(upper_bound, AnyType): + with mx.msg.filter_errors(): + return _analyze_member_access(name, fallback, mx, override_info) + elif isinstance(typ.item, TupleType): + item = tuple_fallback(typ.item) + elif isinstance(typ.item, FunctionLike) and typ.item.is_type_obj(): + item = typ.item.fallback + elif isinstance(typ.item, TypeType): + # Access member on metaclass object via Type[Type[C]] + if isinstance(typ.item.item, Instance): + item = typ.item.item.type.metaclass_type + ignore_messages = False + + if item is not None: + fallback = item.type.metaclass_type or fallback + + if item and not mx.is_operator: + # See comment above for why operators are skipped + result = analyze_class_attribute_access( + item, name, mx, mcs_fallback=fallback, override_info=override_info + ) + if result: + if not (isinstance(get_proper_type(result), AnyType) and item.type.fallback_to_any): + return result + else: + # We don't want errors on metaclass lookup for classes with Any fallback + ignore_messages = True + + with mx.msg.filter_errors(filter_errors=ignore_messages): + return _analyze_member_access(name, fallback, mx, override_info) + + +def analyze_union_member_access(name: str, typ: UnionType, mx: MemberContext) -> Type: + with mx.msg.disable_type_names(): + results = [] + for subtype in typ.relevant_items(): + # Self types should be bound to every individual item of a union. + item_mx = mx.copy_modified(self_type=subtype) + results.append(_analyze_member_access(name, subtype, item_mx)) + return make_simplified_union(results) + + +def analyze_none_member_access(name: str, typ: NoneType, mx: MemberContext) -> Type: + if name == "__bool__": + literal_false = LiteralType(False, fallback=mx.named_type("builtins.bool")) + return CallableType( + arg_types=[], + arg_kinds=[], + arg_names=[], + ret_type=literal_false, + fallback=mx.named_type("builtins.function"), + ) + else: + return _analyze_member_access(name, mx.named_type("builtins.object"), mx) + + +def analyze_member_var_access( + name: str, itype: Instance, info: TypeInfo, mx: MemberContext +) -> Type: + """Analyse attribute access that does not target a method. + + This is logically part of analyze_member_access and the arguments are similar. + + original_type is the type of E in the expression E.var + """ + # It was not a method. Try looking up a variable. + node = info.get(name) + v = node.node if node else None + + mx.chk.warn_deprecated(v, mx.context) + + vv = v + is_trivial_self = False + if isinstance(vv, Decorator): + # The associated Var node of a decorator contains the type. + v = vv.var + is_trivial_self = vv.func.is_trivial_self and not vv.decorators + if mx.is_super and not mx.suppress_errors: + validate_super_call(vv.func, mx) + if isinstance(v, FuncDef): + assert False, "Did not expect a function" + if isinstance(v, MypyFile): + # Special case: accessing module on instances is allowed, but will not + # be recorded by semantic analyzer. + mx.chk.module_refs.add(v.fullname) + + if isinstance(vv, (TypeInfo, TypeAlias, MypyFile, TypeVarLikeExpr)): + # If the associated variable is a TypeInfo synthesize a Var node for + # the purposes of type checking. This enables us to type check things + # like accessing class attributes on an inner class. Similar we allow + # using qualified type aliases in runtime context. For example: + # class C: + # A = List[int] + # x = C.A() <- this is OK + typ = mx.chk.expr_checker.analyze_static_reference(vv, mx.context, mx.is_lvalue) + v = Var(name, type=typ) + v.info = info + + if isinstance(v, Var): + implicit = info[name].implicit + + # An assignment to final attribute is always an error, + # independently of types. + if mx.is_lvalue and not mx.chk.get_final_context(): + check_final_member(name, info, mx.msg, mx.context) + + return analyze_var(name, v, itype, mx, implicit=implicit, is_trivial_self=is_trivial_self) + elif ( + not v + and name not in ["__getattr__", "__setattr__", "__getattribute__"] + and not mx.is_operator + and mx.module_symbol_table is None + ): + # Above we skip ModuleType.__getattr__ etc. if we have a + # module symbol table, since the symbol table allows precise + # checking. + if not mx.is_lvalue: + for method_name in ("__getattribute__", "__getattr__"): + method = info.get_method(method_name) + + # __getattribute__ is defined on builtins.object and returns Any, so without + # the guard this search will always find object.__getattribute__ and conclude + # that the attribute exists + if method and method.info.fullname != "builtins.object": + bound_method = analyze_decorator_or_funcbase_access( + defn=method, itype=itype, name=method_name, mx=mx + ) + typ = map_instance_to_supertype(itype, method.info) + getattr_type = get_proper_type(expand_type_by_instance(bound_method, typ)) + if isinstance(getattr_type, CallableType): + result = getattr_type.ret_type + else: + result = getattr_type + + # Call the attribute hook before returning. + fullname = f"{method.info.fullname}.{name}" + hook = mx.chk.plugin.get_attribute_hook(fullname) + if hook: + result = hook( + AttributeContext( + get_proper_type(mx.original_type), + result, + mx.is_lvalue, + mx.context, + mx.chk, + ) + ) + return result + else: + setattr_meth = info.get_method("__setattr__") + if setattr_meth and setattr_meth.info.fullname != "builtins.object": + bound_type = analyze_decorator_or_funcbase_access( + defn=setattr_meth, + itype=itype, + name="__setattr__", + mx=mx.copy_modified(is_lvalue=False), + ) + typ = map_instance_to_supertype(itype, setattr_meth.info) + setattr_type = get_proper_type(expand_type_by_instance(bound_type, typ)) + if isinstance(setattr_type, CallableType) and len(setattr_type.arg_types) > 0: + return setattr_type.arg_types[-1] + + if itype.type.fallback_to_any: + return AnyType(TypeOfAny.special_form) + + # Could not find the member. + if itype.extra_attrs and name in itype.extra_attrs.attrs: + # For modules use direct symbol table lookup. + if not itype.extra_attrs.mod_name: + return itype.extra_attrs.attrs[name] + + if mx.is_super and not mx.suppress_errors: + mx.msg.undefined_in_superclass(name, mx.context) + return AnyType(TypeOfAny.from_error) + else: + ret = report_missing_attribute(mx.original_type, itype, name, mx) + # Avoid paying double jeopardy if we can't find the member due to --no-implicit-reexport + if ( + mx.module_symbol_table is not None + and name in mx.module_symbol_table + and not mx.module_symbol_table[name].module_public + ): + v = mx.module_symbol_table[name].node + e = NameExpr(name) + e.set_line(mx.context) + e.node = v + return mx.chk.expr_checker.analyze_ref_expr(e, lvalue=mx.is_lvalue) + return ret + + +def check_final_member(name: str, info: TypeInfo, msg: MessageBuilder, ctx: Context) -> None: + """Give an error if the name being assigned was declared as final.""" + for base in info.mro: + sym = base.names.get(name) + if sym and is_final_node(sym.node): + msg.cant_assign_to_final(name, attr_assign=True, ctx=ctx) + + +def analyze_descriptor_access(descriptor_type: Type, mx: MemberContext) -> Type: + """Type check descriptor access. + + Arguments: + descriptor_type: The type of the descriptor attribute being accessed + (the type of ``f`` in ``a.f`` when ``f`` is a descriptor). + mx: The current member access context. + Return: + The return type of the appropriate ``__get__/__set__`` overload for the descriptor. + """ + instance_type = get_proper_type(mx.self_type) + orig_descriptor_type = descriptor_type + descriptor_type = get_proper_type(descriptor_type) + + if isinstance(descriptor_type, UnionType): + # Map the access over union types + return make_simplified_union( + [analyze_descriptor_access(typ, mx) for typ in descriptor_type.items] + ) + elif not isinstance(descriptor_type, Instance): + return orig_descriptor_type + + if not mx.is_lvalue and not descriptor_type.type.has_readable_member("__get__"): + return orig_descriptor_type + + # We do this check first to accommodate for descriptors with only __set__ method. + # If there is no __set__, we type-check that the assigned value matches + # the return type of __get__. This doesn't match the python semantics, + # (which allow you to override the descriptor with any value), but preserves + # the type of accessing the attribute (even after the override). + if mx.is_lvalue and descriptor_type.type.has_readable_member("__set__"): + return analyze_descriptor_assign(descriptor_type, mx) + + if mx.is_lvalue and not descriptor_type.type.has_readable_member("__get__"): + # This turned out to be not a descriptor after all. + return orig_descriptor_type + + dunder_get = descriptor_type.type.get_method("__get__") + if dunder_get is None: + mx.fail( + message_registry.DESCRIPTOR_GET_NOT_CALLABLE.format( + descriptor_type.str_with_options(mx.msg.options) + ) + ) + return AnyType(TypeOfAny.from_error) + + bound_method = analyze_decorator_or_funcbase_access( + defn=dunder_get, + itype=descriptor_type, + name="__get__", + mx=mx.copy_modified(self_type=descriptor_type), + ) + + typ = map_instance_to_supertype(descriptor_type, dunder_get.info) + dunder_get_type = expand_type_by_instance(bound_method, typ) + + if isinstance(instance_type, FunctionLike) and instance_type.is_type_obj(): + owner_type = instance_type.items[0].ret_type + instance_type = NoneType() + elif isinstance(instance_type, TypeType): + owner_type = instance_type.item + instance_type = NoneType() + else: + owner_type = instance_type + + callable_name = mx.chk.expr_checker.method_fullname(descriptor_type, "__get__") + dunder_get_type = mx.chk.expr_checker.transform_callee_type( + callable_name, + dunder_get_type, + [ + TempNode(instance_type, context=mx.context), + TempNode(TypeType.make_normalized(owner_type), context=mx.context), + ], + [ARG_POS, ARG_POS], + mx.context, + object_type=descriptor_type, + ) + + _, inferred_dunder_get_type = mx.chk.expr_checker.check_call( + dunder_get_type, + [ + TempNode(instance_type, context=mx.context), + TempNode(TypeType.make_normalized(owner_type), context=mx.context), + ], + [ARG_POS, ARG_POS], + mx.context, + object_type=descriptor_type, + callable_name=callable_name, + ) + + # Search for possible deprecations: + mx.chk.warn_deprecated(dunder_get, mx.context) + + inferred_dunder_get_type = get_proper_type(inferred_dunder_get_type) + if isinstance(inferred_dunder_get_type, AnyType): + # check_call failed, and will have reported an error + return inferred_dunder_get_type + + if not isinstance(inferred_dunder_get_type, CallableType): + mx.fail( + message_registry.DESCRIPTOR_GET_NOT_CALLABLE.format( + descriptor_type.str_with_options(mx.msg.options) + ) + ) + return AnyType(TypeOfAny.from_error) + + return inferred_dunder_get_type.ret_type + + +def analyze_descriptor_assign(descriptor_type: Instance, mx: MemberContext) -> Type: + instance_type = get_proper_type(mx.self_type) + dunder_set = descriptor_type.type.get_method("__set__") + if dunder_set is None: + mx.fail( + message_registry.DESCRIPTOR_SET_NOT_CALLABLE.format( + descriptor_type.str_with_options(mx.msg.options) + ).value + ) + return AnyType(TypeOfAny.from_error) + + bound_method = analyze_decorator_or_funcbase_access( + defn=dunder_set, + itype=descriptor_type, + name="__set__", + mx=mx.copy_modified(is_lvalue=False, self_type=descriptor_type), + ) + typ = map_instance_to_supertype(descriptor_type, dunder_set.info) + dunder_set_type = expand_type_by_instance(bound_method, typ) + + callable_name = mx.chk.expr_checker.method_fullname(descriptor_type, "__set__") + rvalue = mx.rvalue or TempNode(AnyType(TypeOfAny.special_form), context=mx.context) + dunder_set_type = mx.chk.expr_checker.transform_callee_type( + callable_name, + dunder_set_type, + [TempNode(instance_type, context=mx.context), rvalue], + [ARG_POS, ARG_POS], + mx.context, + object_type=descriptor_type, + ) + + # For non-overloaded setters, the result should be type-checked like a regular assignment. + # Hence, we first only try to infer the type by using the rvalue as type context. + type_context = rvalue + with mx.msg.filter_errors(): + _, inferred_dunder_set_type = mx.chk.expr_checker.check_call( + dunder_set_type, + [TempNode(instance_type, context=mx.context), type_context], + [ARG_POS, ARG_POS], + mx.context, + object_type=descriptor_type, + callable_name=callable_name, + ) + + # And now we in fact type check the call, to show errors related to wrong arguments + # count, etc., replacing the type context for non-overloaded setters only. + inferred_dunder_set_type = get_proper_type(inferred_dunder_set_type) + if isinstance(inferred_dunder_set_type, CallableType): + type_context = TempNode(AnyType(TypeOfAny.special_form), context=mx.context) + mx.chk.expr_checker.check_call( + dunder_set_type, + [TempNode(instance_type, context=mx.context), type_context], + [ARG_POS, ARG_POS], + mx.context, + object_type=descriptor_type, + callable_name=callable_name, + ) + + # Search for possible deprecations: + mx.chk.warn_deprecated(dunder_set, mx.context) + + # In the following cases, a message already will have been recorded in check_call. + if (not isinstance(inferred_dunder_set_type, CallableType)) or ( + len(inferred_dunder_set_type.arg_types) < 2 + ): + return AnyType(TypeOfAny.from_error) + return inferred_dunder_set_type.arg_types[1] + + +def is_instance_var(var: Var) -> bool: + """Return if var is an instance variable according to PEP 526.""" + return ( + # check the type_info node is the var (not a decorated function, etc.) + var.name in var.info.names + and var.info.names[var.name].node is var + and not var.is_classvar + # variables without annotations are treated as classvar + and not var.is_inferred + ) + + +def analyze_var( + name: str, + var: Var, + itype: Instance, + mx: MemberContext, + *, + implicit: bool = False, + is_trivial_self: bool = False, +) -> Type: + """Analyze access to an attribute via a Var node. + + This is conceptually part of analyze_member_access and the arguments are similar. + itype is the instance type in which attribute should be looked up + original_type is the type of E in the expression E.var + if implicit is True, the original Var was created as an assignment to self + if is_trivial_self is True, we can use fast path for bind_self(). + """ + # Found a member variable. + original_itype = itype + itype = map_instance_to_supertype(itype, var.info) + if var.is_settable_property and mx.is_lvalue: + typ: Type | None = var.setter_type + if typ is None and var.is_ready: + # Existing synthetic properties may not set setter type. Fall back to getter. + typ = var.type + else: + typ = var.type + if typ: + if isinstance(typ, PartialType): + return mx.chk.handle_partial_var_type(typ, mx.is_lvalue, var, mx.context) + if mx.is_lvalue and not mx.suppress_errors: + if var.is_property and not var.is_settable_property: + mx.msg.read_only_property(name, itype.type, mx.context) + if var.is_classvar: + mx.msg.cant_assign_to_classvar(name, mx.context) + # This is the most common case for variables, so start with this. + result = expand_without_binding(typ, var, itype, original_itype, mx) + + # A non-None value indicates that we should actually bind self for this variable. + call_type: ProperType | None = None + if var.is_initialized_in_class and (not is_instance_var(var) or mx.is_operator): + typ = get_proper_type(typ) + if isinstance(typ, FunctionLike) and not typ.is_type_obj(): + call_type = typ + elif var.is_property: + deco_mx = mx.copy_modified(original_type=typ, self_type=typ, is_lvalue=False) + call_type = get_proper_type(_analyze_member_access("__call__", typ, deco_mx)) + else: + call_type = typ + + # Bound variables with callable types are treated like methods + # (these are usually method aliases like __rmul__ = __mul__). + if isinstance(call_type, FunctionLike) and not call_type.is_type_obj(): + if mx.is_lvalue and not var.is_property and not mx.suppress_errors: + mx.msg.cant_assign_to_method(mx.context) + + # Bind the self type for each callable component (when needed). + if call_type and not var.is_staticmethod: + bound_items = [] + for ct in call_type.items if isinstance(call_type, UnionType) else [call_type]: + p_ct = get_proper_type(ct) + if isinstance(p_ct, FunctionLike) and (not p_ct.bound() or var.is_property): + item = expand_and_bind_callable(p_ct, var, itype, name, mx, is_trivial_self) + else: + item = expand_without_binding(ct, var, itype, original_itype, mx) + bound_items.append(item) + result = UnionType.make_union(bound_items) + else: + if not var.is_ready and not mx.no_deferral: + mx.not_ready_callback(var.name, mx.context) + # Implicit 'Any' type. + result = AnyType(TypeOfAny.special_form) + fullname = f"{var.info.fullname}.{name}" + hook = mx.chk.plugin.get_attribute_hook(fullname) + + if var.info.is_enum and not mx.is_lvalue: + if name in var.info.enum_members and name not in {"name", "value"}: + enum_literal = LiteralType(name, fallback=itype) + result = itype.copy_modified(last_known_value=enum_literal) + elif ( + isinstance(p_result := get_proper_type(result), Instance) + and p_result.type.fullname == "enum.nonmember" + and p_result.args + ): + # Unwrap nonmember similar to class-level access + result = p_result.args[0] + if result and not (implicit or var.info.is_protocol and is_instance_var(var)): + result = analyze_descriptor_access(result, mx) + if hook: + result = hook( + AttributeContext( + get_proper_type(mx.original_type), result, mx.is_lvalue, mx.context, mx.chk + ) + ) + return result + + +def expand_without_binding( + typ: Type, var: Var, itype: Instance, original_itype: Instance, mx: MemberContext +) -> Type: + if not mx.preserve_type_var_ids: + typ = freshen_all_functions_type_vars(typ) + typ = expand_self_type_if_needed(typ, mx, var, original_itype) + expanded = expand_type_by_instance(typ, itype) + freeze_all_type_vars(expanded) + return expanded + + +def expand_and_bind_callable( + functype: FunctionLike, + var: Var, + itype: Instance, + name: str, + mx: MemberContext, + is_trivial_self: bool, +) -> Type: + if not mx.preserve_type_var_ids: + functype = freshen_all_functions_type_vars(functype) + typ = get_proper_type(expand_self_type(var, functype, mx.self_type)) + assert isinstance(typ, FunctionLike) + if is_trivial_self: + typ = bind_self_fast(typ, mx.self_type) + else: + typ = check_self_arg(typ, mx.self_type, var.is_classmethod, mx.context, name, mx.msg) + typ = bind_self(typ, mx.self_type, var.is_classmethod) + expanded = expand_type_by_instance(typ, itype) + freeze_all_type_vars(expanded) + if not var.is_property: + return expanded + if isinstance(expanded, Overloaded): + # Legacy way to store settable properties is with overloads. Also in case it is + # an actual overloaded property, selecting first item that passed check_self_arg() + # is a good approximation, long-term we should use check_call() inference below. + if not expanded.items: + # A broken overload, error should be already reported. + return AnyType(TypeOfAny.from_error) + expanded = expanded.items[0] + assert isinstance(expanded, CallableType), expanded + if var.is_settable_property and mx.is_lvalue and var.setter_type is not None: + if expanded.variables: + type_ctx = mx.rvalue or TempNode(AnyType(TypeOfAny.special_form), context=mx.context) + _, inferred_expanded = mx.chk.expr_checker.check_call( + expanded, [type_ctx], [ARG_POS], mx.context + ) + expanded = get_proper_type(inferred_expanded) + assert isinstance(expanded, CallableType) + if not expanded.arg_types: + # This can happen when accessing invalid property from its own body, + # error will be reported elsewhere. + return AnyType(TypeOfAny.from_error) + return expanded.arg_types[0] + else: + return expanded.ret_type + + +def expand_self_type_if_needed( + t: Type, mx: MemberContext, var: Var, itype: Instance, is_class: bool = False +) -> Type: + """Expand special Self type in a backwards compatible manner. + + This should ensure that mixing old-style and new-style self-types work + seamlessly. Also, re-bind new style self-types in subclasses if needed. + """ + original = get_proper_type(mx.self_type) + if not (mx.is_self or mx.is_super): + repl = mx.self_type + if is_class: + if isinstance(original, TypeType): + repl = original.item + elif isinstance(original, CallableType): + # Problematic access errors should have been already reported. + repl = erase_typevars(original.ret_type) + else: + repl = itype + return expand_self_type(var, t, repl) + elif supported_self_type( + # Support compatibility with plain old style T -> T and Type[T] -> T only. + get_proper_type(mx.self_type), + allow_instances=False, + allow_callable=False, + ): + repl = mx.self_type + if is_class and isinstance(original, TypeType): + repl = original.item + return expand_self_type(var, t, repl) + elif ( + mx.is_self + and itype.type != var.info + # If an attribute with Self-type was defined in a supertype, we need to + # rebind the Self type variable to Self type variable of current class... + and itype.type.self_type is not None + # ...unless `self` has an explicit non-trivial annotation. + and itype == mx.chk.scope.active_self_type() + ): + return expand_self_type(var, t, itype.type.self_type) + else: + return t + + +def check_self_arg( + functype: FunctionLike, + dispatched_arg_type: Type, + is_classmethod: bool, + context: Context, + name: str, + msg: MessageBuilder, +) -> FunctionLike: + """Check that an instance has a valid type for a method with annotated 'self'. + + For example if the method is defined as: + class A: + def f(self: S) -> T: ... + then for 'x.f' we check that type(x) <: S. If the method is overloaded, we select + only overloads items that satisfy this requirement. If there are no matching + overloads, an error is generated. + """ + items = functype.items + if not items: + return functype + new_items = [] + if is_classmethod: + dispatched_arg_type = TypeType.make_normalized(dispatched_arg_type) + p_dispatched_arg_type = get_proper_type(dispatched_arg_type) + + for item in items: + if not item.arg_types or item.arg_kinds[0] not in (ARG_POS, ARG_STAR): + # No positional first (self) argument (*args is okay). + msg.no_formal_self(name, item, context) + # This is pretty bad, so just return the original signature if + # there is at least one such error. + return functype + selfarg = get_proper_type(item.arg_types[0]) + if isinstance(selfarg, Instance) and isinstance(p_dispatched_arg_type, Instance): + if selfarg.type is p_dispatched_arg_type.type and selfarg.args: + if not is_overlapping_types(p_dispatched_arg_type, selfarg): + # This special casing is needed since `actual <: erased(template)` + # logic below doesn't always work, and a more correct approach may + # be tricky. + continue + new_items.append(item) + + if new_items: + items = new_items + new_items = [] + + for item in items: + selfarg = get_proper_type(item.arg_types[0]) + # This matches similar special-casing in bind_self(), see more details there. + self_callable = name == "__call__" and isinstance(selfarg, CallableType) + if self_callable or is_subtype( + dispatched_arg_type, + # This level of erasure matches the one in checker.check_func_def(), + # better keep these two checks consistent. + erase_typevars(erase_to_bound(selfarg)), + # This is to work around the fact that erased ParamSpec and TypeVarTuple + # callables are not always compatible with non-erased ones both ways. + always_covariant=any( + not isinstance(tv, TypeVarType) for tv in get_all_type_vars(selfarg) + ), + ignore_pos_arg_names=True, + ): + new_items.append(item) + elif isinstance(selfarg, ParamSpecType): + # TODO: This is not always right. What's the most reasonable thing to do here? + new_items.append(item) + elif isinstance(selfarg, TypeVarTupleType): + raise NotImplementedError + if not new_items: + # Choose first item for the message (it may be not very helpful for overloads). + msg.incompatible_self_argument( + name, dispatched_arg_type, items[0], is_classmethod, context + ) + return functype + if len(new_items) == 1: + return new_items[0] + return Overloaded(new_items) + + +def analyze_class_attribute_access( + itype: Instance, + name: str, + mx: MemberContext, + *, + mcs_fallback: Instance, + override_info: TypeInfo | None = None, + original_vars: Sequence[TypeVarLikeType] | None = None, +) -> Type | None: + """Analyze access to an attribute on a class object. + + itype is the return type of the class object callable, original_type is the type + of E in the expression E.var, original_vars are type variables of the class callable + (for generic classes). + """ + info = itype.type + if override_info: + info = override_info + + fullname = f"{info.fullname}.{name}" + hook = mx.chk.plugin.get_class_attribute_hook(fullname) + + node = info.get(name) + if not node: + if itype.extra_attrs and name in itype.extra_attrs.attrs: + # For modules use direct symbol table lookup. + if not itype.extra_attrs.mod_name: + return itype.extra_attrs.attrs[name] + if info.fallback_to_any or info.meta_fallback_to_any: + return apply_class_attr_hook(mx, hook, AnyType(TypeOfAny.special_form)) + return None + + if ( + isinstance(node.node, Var) + and not node.node.is_classvar + and not hook + and mcs_fallback.type.get(name) + ): + # If the same attribute is declared on the metaclass and the class but with different types, + # and the attribute on the class is not a ClassVar, + # the type of the attribute on the metaclass should take priority + # over the type of the attribute on the class, + # when the attribute is being accessed from the class object itself. + # + # Return `None` here to signify that the name should be looked up + # on the class object itself rather than the instance. + return None + + mx.chk.warn_deprecated(node.node, mx.context) + + is_decorated = isinstance(node.node, Decorator) + is_method = is_decorated or isinstance(node.node, FuncBase) + if mx.is_lvalue and not mx.suppress_errors: + if is_method: + mx.msg.cant_assign_to_method(mx.context) + if isinstance(node.node, TypeInfo): + mx.fail(message_registry.CANNOT_ASSIGN_TO_TYPE) + + # Refuse class attribute access if slot defined + if info.slots and name in info.slots: + mx.fail(message_registry.CLASS_VAR_CONFLICTS_SLOTS.format(name)) + + # If a final attribute was declared on `self` in `__init__`, then it + # can't be accessed on the class object. + if node.implicit and isinstance(node.node, Var) and node.node.is_final: + mx.fail(message_registry.CANNOT_ACCESS_FINAL_INSTANCE_ATTR.format(node.node.name)) + + # An assignment to final attribute on class object is also always an error, + # independently of types. + if mx.is_lvalue and not mx.chk.get_final_context(): + check_final_member(name, info, mx.msg, mx.context) + + if info.is_enum and not (mx.is_lvalue or is_decorated or is_method): + enum_class_attribute_type = analyze_enum_class_attribute_access(itype, name, mx) + if enum_class_attribute_type: + return apply_class_attr_hook(mx, hook, enum_class_attribute_type) + + t = node.type + if t: + if isinstance(t, PartialType): + symnode = node.node + assert isinstance(symnode, Var) + return apply_class_attr_hook( + mx, hook, mx.chk.handle_partial_var_type(t, mx.is_lvalue, symnode, mx.context) + ) + + # Find the class where method/variable was defined. + if isinstance(node.node, Decorator): + super_info: TypeInfo | None = node.node.var.info + elif isinstance(node.node, (Var, SYMBOL_FUNCBASE_TYPES)): + super_info = node.node.info + else: + super_info = None + + # Map the type to how it would look as a defining class. For example: + # class C(Generic[T]): ... + # class D(C[Tuple[T, S]]): ... + # D[int, str].method() + # Here itype is D[int, str], isuper is C[Tuple[int, str]]. + if not super_info: + isuper = None + else: + isuper = map_instance_to_supertype(itype, super_info) + + if isinstance(node.node, Var): + assert isuper is not None + object_type = get_proper_type(mx.self_type) + # Check if original variable type has type variables. For example: + # class C(Generic[T]): + # x: T + # C.x # Error, ambiguous access + # C[int].x # Also an error, since C[int] is same as C at runtime + # Exception is Self type wrapped in ClassVar, that is safe. + prohibit_self = not node.node.is_classvar + def_vars = set(node.node.info.defn.type_vars) + if prohibit_self and node.node.info.self_type: + def_vars.add(node.node.info.self_type) + # Exception: access on Type[...], including first argument of class methods is OK. + prohibit_generic = not isinstance(object_type, TypeType) or node.implicit + if prohibit_generic and def_vars & set(get_all_type_vars(t)): + if node.node.is_classvar: + message = message_registry.GENERIC_CLASS_VAR_ACCESS + else: + message = message_registry.GENERIC_INSTANCE_VAR_CLASS_ACCESS + mx.fail(message) + t = expand_self_type_if_needed(t, mx, node.node, itype, is_class=True) + t = expand_type_by_instance(t, isuper) + # Erase non-mapped variables, but keep mapped ones, even if there is an error. + # In the above example this means that we infer following types: + # C.x -> Any + # C[int].x -> int + if prohibit_generic: + erase_vars = set(itype.type.defn.type_vars) + if prohibit_self and itype.type.self_type: + erase_vars.add(itype.type.self_type) + t = erase_typevars(t, {tv.id for tv in erase_vars}) + + is_classmethod = ( + (is_decorated and cast(Decorator, node.node).func.is_class) + or (isinstance(node.node, SYMBOL_FUNCBASE_TYPES) and node.node.is_class) + or isinstance(node.node, Var) + and node.node.is_classmethod + ) + t = get_proper_type(t) + is_trivial_self = False + if isinstance(node.node, Decorator): + # Use fast path if there are trivial decorators like @classmethod or @property + is_trivial_self = node.node.func.is_trivial_self and not node.node.decorators + elif isinstance(node.node, (FuncDef, OverloadedFuncDef)): + is_trivial_self = node.node.is_trivial_self + if ( + isinstance(t, FunctionLike) + and is_classmethod + and not is_trivial_self + and not t.bound() + ): + t = check_self_arg(t, mx.self_type, False, mx.context, name, mx.msg) + t = add_class_tvars( + t, + isuper, + is_classmethod, + mx, + original_vars=original_vars, + is_trivial_self=is_trivial_self, + ) + if is_decorated: + t = expand_self_type_if_needed( + t, mx, cast(Decorator, node.node).var, itype, is_class=is_classmethod + ) + + result = t + # __set__ is not called on class objects. + if not mx.is_lvalue: + result = analyze_descriptor_access(result, mx) + + return apply_class_attr_hook(mx, hook, result) + elif isinstance(node.node, Var): + mx.not_ready_callback(name, mx.context) + return AnyType(TypeOfAny.special_form) + + if isinstance(node.node, (TypeInfo, TypeAlias, MypyFile, TypeVarLikeExpr)): + # TODO: should we apply class plugin here (similar to instance access)? + return mx.chk.expr_checker.analyze_static_reference(node.node, mx.context, mx.is_lvalue) + + if is_decorated: + assert isinstance(node.node, Decorator) + if node.node.type: + return apply_class_attr_hook(mx, hook, node.node.type) + else: + mx.not_ready_callback(name, mx.context) + return AnyType(TypeOfAny.from_error) + else: + assert isinstance(node.node, SYMBOL_FUNCBASE_TYPES) + typ = function_type(node.node, mx.named_type("builtins.function")) + # Note: if we are accessing class method on class object, the cls argument is bound. + # Annotated and/or explicit class methods go through other code paths above, for + # unannotated implicit class methods we do this here. + if node.node.is_class: + typ = bind_self_fast(typ) + return apply_class_attr_hook(mx, hook, typ) + + +def apply_class_attr_hook( + mx: MemberContext, hook: Callable[[AttributeContext], Type] | None, result: Type +) -> Type | None: + if hook: + result = hook( + AttributeContext( + get_proper_type(mx.original_type), result, mx.is_lvalue, mx.context, mx.chk + ) + ) + return result + + +def analyze_enum_class_attribute_access( + itype: Instance, name: str, mx: MemberContext +) -> Type | None: + # Skip these since Enum will remove it + if name in EXCLUDED_ENUM_ATTRIBUTES: + return report_missing_attribute(mx.original_type, itype, name, mx) + # Dunders and private names are not Enum members + if name.startswith("__") and name.replace("_", "") != "": + return None + + node = itype.type.get(name) + if node and node.type: + proper = get_proper_type(node.type) + # Support `A = nonmember(1)` function call and decorator. + if ( + isinstance(proper, Instance) + and proper.type.fullname == "enum.nonmember" + and proper.args + ): + return proper.args[0] + + enum_literal = LiteralType(name, fallback=itype) + return itype.copy_modified(last_known_value=enum_literal) + + +def analyze_typeddict_access( + name: str, typ: TypedDictType, mx: MemberContext, override_info: TypeInfo | None +) -> Type: + if name == "__setitem__": + if isinstance(mx.context, IndexExpr): + # Since we can get this during `a['key'] = ...` + # it is safe to assume that the context is `IndexExpr`. + item_type, key_names = mx.chk.expr_checker.visit_typeddict_index_expr( + typ, mx.context.index, setitem=True + ) + assigned_readonly_keys = typ.readonly_keys & key_names + if assigned_readonly_keys and not mx.suppress_errors: + mx.msg.readonly_keys_mutated(assigned_readonly_keys, context=mx.context) + else: + # It can also be `a.__setitem__(...)` direct call. + # In this case `item_type` can be `Any`, + # because we don't have args available yet. + # TODO: check in `default` plugin that `__setitem__` is correct. + item_type = AnyType(TypeOfAny.implementation_artifact) + return CallableType( + arg_types=[mx.chk.named_type("builtins.str"), item_type], + arg_kinds=[ARG_POS, ARG_POS], + arg_names=[None, None], + ret_type=NoneType(), + fallback=mx.chk.named_type("builtins.function"), + name=name, + ) + elif name == "__delitem__": + return CallableType( + arg_types=[mx.chk.named_type("builtins.str")], + arg_kinds=[ARG_POS], + arg_names=[None], + ret_type=NoneType(), + fallback=mx.chk.named_type("builtins.function"), + name=name, + ) + return _analyze_member_access(name, typ.fallback, mx, override_info) + + +def add_class_tvars( + t: ProperType, + isuper: Instance | None, + is_classmethod: bool, + mx: MemberContext, + original_vars: Sequence[TypeVarLikeType] | None = None, + is_trivial_self: bool = False, +) -> Type: + """Instantiate type variables during analyze_class_attribute_access, + e.g T and Q in the following: + + class A(Generic[T]): + @classmethod + def foo(cls: Type[Q]) -> Tuple[T, Q]: ... + + class B(A[str]): pass + B.foo() + + Args: + t: Declared type of the method (or property) + isuper: Current instance mapped to the superclass where method was defined, this + is usually done by map_instance_to_supertype() + is_classmethod: True if this method is decorated with @classmethod + original_vars: Type variables of the class callable on which the method was accessed + is_trivial_self: if True, we can use fast path for bind_self(). + Returns: + Expanded method type with added type variables (when needed). + """ + # TODO: verify consistency between Q and T + + # We add class type variables if the class method is accessed on class object + # without applied type arguments, this matches the behavior of __init__(). + # For example (continuing the example in docstring): + # A # The type of callable is def [T] () -> A[T], _not_ def () -> A[Any] + # A[int] # The type of callable is def () -> A[int] + # and + # A.foo # The type is generic def [T] () -> Tuple[T, A[T]] + # A[int].foo # The type is non-generic def () -> Tuple[int, A[int]] + # + # This behaviour is useful for defining alternative constructors for generic classes. + # To achieve such behaviour, we add the class type variables that are still free + # (i.e. appear in the return type of the class object on which the method was accessed). + if isinstance(t, CallableType): + tvars = original_vars if original_vars is not None else [] + if not mx.preserve_type_var_ids: + t = freshen_all_functions_type_vars(t) + if is_classmethod and not t.is_bound: + if is_trivial_self: + t = bind_self_fast(t, mx.self_type) + else: + t = bind_self(t, mx.self_type, is_classmethod=True) + if isuper is not None: + t = expand_type_by_instance(t, isuper) + freeze_all_type_vars(t) + return t.copy_modified(variables=list(tvars) + list(t.variables)) + elif isinstance(t, Overloaded): + return Overloaded( + [ + cast( + CallableType, + add_class_tvars(item, isuper, is_classmethod, mx, original_vars=original_vars), + ) + for item in t.items + ] + ) + if isuper is not None: + t = expand_type_by_instance(t, isuper) + return t + + +def analyze_decorator_or_funcbase_access( + defn: Decorator | FuncBase, itype: Instance, name: str, mx: MemberContext +) -> Type: + """Analyzes the type behind method access. + + The function itself can possibly be decorated. + See: https://github.com/python/mypy/issues/10409 + """ + if isinstance(defn, Decorator): + return analyze_var(name, defn.var, itype, mx) + typ = function_type(defn, mx.chk.named_type("builtins.function")) + if isinstance(defn, (FuncDef, OverloadedFuncDef)) and defn.is_trivial_self: + return bind_self_fast(typ, mx.self_type) + typ = check_self_arg(typ, mx.self_type, defn.is_class, mx.context, name, mx.msg) + return bind_self(typ, original_type=mx.self_type, is_classmethod=defn.is_class) + + +F = TypeVar("F", bound=FunctionLike) + + +def bind_self_fast(method: F, original_type: Type | None = None) -> F: + """Return a copy of `method`, with the type of its first parameter (usually + self or cls) bound to original_type. + + This is a faster version of mypy.typeops.bind_self() that can be used for methods + with trivial self/cls annotations. + """ + if isinstance(method, Overloaded): + items = [bind_self_fast(c, original_type) for c in method.items] + return cast(F, Overloaded(items)) + assert isinstance(method, CallableType) + if not method.arg_types: + # Invalid method, return something. + return method + if method.arg_kinds[0] in (ARG_STAR, ARG_STAR2): + # See typeops.py for details. + return method + return method.copy_modified( + arg_types=method.arg_types[1:], + arg_kinds=method.arg_kinds[1:], + arg_names=method.arg_names[1:], + is_bound=True, + ) + + +def has_operator(typ: Type, op_method: str, named_type: Callable[[str], Instance]) -> bool: + """Does type have operator with the given name? + + Note: this follows the rules for operator access, in particular: + * __getattr__ is not considered + * for class objects we only look in metaclass + * instance level attributes (i.e. extra_attrs) are not considered + """ + # This is much faster than analyze_member_access, and so using + # it first as a filter is important for performance. This is mostly relevant + # in situations where we can't expect that method is likely present, + # e.g. for __OP__ vs __rOP__. + typ = get_proper_type(typ) + + if isinstance(typ, TypeVarLikeType): + typ = typ.values_or_bound() + if isinstance(typ, AnyType): + return True + if isinstance(typ, UnionType): + return all(has_operator(x, op_method, named_type) for x in typ.relevant_items()) + if isinstance(typ, FunctionLike) and typ.is_type_obj(): + return typ.fallback.type.has_readable_member(op_method) + if isinstance(typ, TypeType): + # Type[Union[X, ...]] is always normalized to Union[Type[X], ...], + # so we don't need to care about unions here, but we need to care about + # Type[T], where upper bound of T is a union. + item = typ.item + if isinstance(item, TypeVarType): + item = item.values_or_bound() + if isinstance(item, UnionType): + return all(meta_has_operator(x, op_method, named_type) for x in item.relevant_items()) + return meta_has_operator(item, op_method, named_type) + return instance_fallback(typ, named_type).type.has_readable_member(op_method) + + +def instance_fallback(typ: ProperType, named_type: Callable[[str], Instance]) -> Instance: + if isinstance(typ, Instance): + return typ + if isinstance(typ, TupleType): + return tuple_fallback(typ) + if isinstance(typ, (LiteralType, TypedDictType)): + return typ.fallback + return named_type("builtins.object") + + +def meta_has_operator(item: Type, op_method: str, named_type: Callable[[str], Instance]) -> bool: + item = get_proper_type(item) + if isinstance(item, AnyType): + return True + item = instance_fallback(item, named_type) + meta = item.type.metaclass_type or named_type("builtins.type") + return meta.type.has_readable_member(op_method) diff --git a/.venv/lib/python3.12/site-packages/mypy/checkpattern.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/checkpattern.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..80a4f09 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/checkpattern.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/checkpattern.py b/.venv/lib/python3.12/site-packages/mypy/checkpattern.py new file mode 100644 index 0000000..cafc694 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/checkpattern.py @@ -0,0 +1,829 @@ +"""Pattern checker. This file is conceptually part of TypeChecker.""" + +from __future__ import annotations + +from collections import defaultdict +from typing import Final, NamedTuple + +from mypy import message_registry +from mypy.checker_shared import TypeCheckerSharedApi, TypeRange +from mypy.checkmember import analyze_member_access +from mypy.expandtype import expand_type_by_instance +from mypy.join import join_types +from mypy.literals import literal_hash +from mypy.maptype import map_instance_to_supertype +from mypy.meet import narrow_declared_type +from mypy.messages import MessageBuilder +from mypy.nodes import ARG_POS, Context, Expression, NameExpr, TypeAlias, Var +from mypy.options import Options +from mypy.patterns import ( + AsPattern, + ClassPattern, + MappingPattern, + OrPattern, + Pattern, + SequencePattern, + SingletonPattern, + StarredPattern, + ValuePattern, +) +from mypy.plugin import Plugin +from mypy.subtypes import is_subtype +from mypy.typeops import ( + coerce_to_literal, + make_simplified_union, + try_getting_str_literals_from_type, + tuple_fallback, +) +from mypy.types import ( + AnyType, + FunctionLike, + Instance, + LiteralType, + NoneType, + ProperType, + TupleType, + Type, + TypedDictType, + TypeOfAny, + TypeType, + TypeVarTupleType, + TypeVarType, + UninhabitedType, + UnionType, + UnpackType, + callable_with_ellipsis, + find_unpack_in_list, + get_proper_type, + split_with_prefix_and_suffix, +) +from mypy.typevars import fill_typevars, fill_typevars_with_any +from mypy.visitor import PatternVisitor + +self_match_type_names: Final = [ + "builtins.bool", + "builtins.bytearray", + "builtins.bytes", + "builtins.dict", + "builtins.float", + "builtins.frozenset", + "builtins.int", + "builtins.list", + "builtins.set", + "builtins.str", + "builtins.tuple", +] + +non_sequence_match_type_names: Final = ["builtins.str", "builtins.bytes", "builtins.bytearray"] + + +# For every Pattern a PatternType can be calculated. This requires recursively calculating +# the PatternTypes of the sub-patterns first. +# Using the data in the PatternType the match subject and captured names can be narrowed/inferred. +class PatternType(NamedTuple): + type: Type # The type the match subject can be narrowed to + rest_type: Type # The remaining type if the pattern didn't match + captures: dict[Expression, Type] # The variables captured by the pattern + + +class PatternChecker(PatternVisitor[PatternType]): + """Pattern checker. + + This class checks if a pattern can match a type, what the type can be narrowed to, and what + type capture patterns should be inferred as. + """ + + # Some services are provided by a TypeChecker instance. + chk: TypeCheckerSharedApi + # This is shared with TypeChecker, but stored also here for convenience. + msg: MessageBuilder + # Currently unused + plugin: Plugin + # The expression being matched against the pattern + subject: Expression + + subject_type: Type + # Type of the subject to check the (sub)pattern against + type_context: list[Type] + # Types that match against self instead of their __match_args__ if used as a class pattern + # Filled in from self_match_type_names + self_match_types: list[Type] + # Types that are sequences, but don't match sequence patterns. Filled in from + # non_sequence_match_type_names + non_sequence_match_types: list[Type] + + options: Options + + def __init__( + self, chk: TypeCheckerSharedApi, msg: MessageBuilder, plugin: Plugin, options: Options + ) -> None: + self.chk = chk + self.msg = msg + self.plugin = plugin + + self.type_context = [] + self.self_match_types = self.generate_types_from_names(self_match_type_names) + self.non_sequence_match_types = self.generate_types_from_names( + non_sequence_match_type_names + ) + self.options = options + + def accept(self, o: Pattern, type_context: Type) -> PatternType: + self.type_context.append(type_context) + result = o.accept(self) + self.type_context.pop() + + return result + + def visit_as_pattern(self, o: AsPattern) -> PatternType: + current_type = self.type_context[-1] + if o.pattern is not None: + pattern_type = self.accept(o.pattern, current_type) + typ, rest_type, type_map = pattern_type + else: + typ, rest_type, type_map = current_type, UninhabitedType(), {} + + if not is_uninhabited(typ) and o.name is not None: + typ, _ = self.chk.conditional_types_with_intersection( + current_type, [get_type_range(typ)], o, default=current_type + ) + if not is_uninhabited(typ): + type_map[o.name] = typ + + return PatternType(typ, rest_type, type_map) + + def visit_or_pattern(self, o: OrPattern) -> PatternType: + current_type = self.type_context[-1] + + # + # Check all the subpatterns + # + pattern_types = [] + for pattern in o.patterns: + pattern_type = self.accept(pattern, current_type) + pattern_types.append(pattern_type) + if not is_uninhabited(pattern_type.type): + current_type = pattern_type.rest_type + + # + # Collect the final type + # + types = [] + for pattern_type in pattern_types: + if not is_uninhabited(pattern_type.type): + types.append(pattern_type.type) + + # + # Check the capture types + # + capture_types: dict[Var, list[tuple[Expression, Type]]] = defaultdict(list) + # Collect captures from the first subpattern + for expr, typ in pattern_types[0].captures.items(): + node = get_var(expr) + capture_types[node].append((expr, typ)) + + # Check if other subpatterns capture the same names + for i, pattern_type in enumerate(pattern_types[1:]): + vars = {get_var(expr) for expr, _ in pattern_type.captures.items()} + if capture_types.keys() != vars: + self.msg.fail(message_registry.OR_PATTERN_ALTERNATIVE_NAMES, o.patterns[i]) + for expr, typ in pattern_type.captures.items(): + node = get_var(expr) + capture_types[node].append((expr, typ)) + + captures: dict[Expression, Type] = {} + for capture_list in capture_types.values(): + typ = UninhabitedType() + for _, other in capture_list: + typ = make_simplified_union([typ, other]) + + captures[capture_list[0][0]] = typ + + union_type = make_simplified_union(types) + return PatternType(union_type, current_type, captures) + + def visit_value_pattern(self, o: ValuePattern) -> PatternType: + current_type = self.type_context[-1] + typ = self.chk.expr_checker.accept(o.expr) + typ = coerce_to_literal(typ) + narrowed_type, rest_type = self.chk.conditional_types_with_intersection( + current_type, [get_type_range(typ)], o, default=get_proper_type(typ) + ) + if not isinstance(get_proper_type(narrowed_type), (LiteralType, UninhabitedType)): + return PatternType(narrowed_type, UnionType.make_union([narrowed_type, rest_type]), {}) + return PatternType(narrowed_type, rest_type, {}) + + def visit_singleton_pattern(self, o: SingletonPattern) -> PatternType: + current_type = self.type_context[-1] + value: bool | None = o.value + if isinstance(value, bool): + typ = self.chk.expr_checker.infer_literal_expr_type(value, "builtins.bool") + elif value is None: + typ = NoneType() + else: + assert False + + narrowed_type, rest_type = self.chk.conditional_types_with_intersection( + current_type, [get_type_range(typ)], o, default=current_type + ) + return PatternType(narrowed_type, rest_type, {}) + + def visit_sequence_pattern(self, o: SequencePattern) -> PatternType: + # + # check for existence of a starred pattern + # + current_type = get_proper_type(self.type_context[-1]) + if not self.can_match_sequence(current_type): + return self.early_non_match() + star_positions = [i for i, p in enumerate(o.patterns) if isinstance(p, StarredPattern)] + star_position: int | None = None + if len(star_positions) == 1: + star_position = star_positions[0] + elif len(star_positions) >= 2: + assert False, "Parser should prevent multiple starred patterns" + required_patterns = len(o.patterns) + if star_position is not None: + required_patterns -= 1 + + # + # get inner types of original type + # + unpack_index = None + if isinstance(current_type, TupleType): + inner_types = current_type.items + unpack_index = find_unpack_in_list(inner_types) + if unpack_index is None: + size_diff = len(inner_types) - required_patterns + if size_diff < 0: + return self.early_non_match() + elif size_diff > 0 and star_position is None: + return self.early_non_match() + else: + normalized_inner_types = [] + for it in inner_types: + # Unfortunately, it is not possible to "split" the TypeVarTuple + # into individual items, so we just use its upper bound for the whole + # analysis instead. + if isinstance(it, UnpackType) and isinstance(it.type, TypeVarTupleType): + it = UnpackType(it.type.upper_bound) + normalized_inner_types.append(it) + inner_types = normalized_inner_types + current_type = current_type.copy_modified(items=normalized_inner_types) + if len(inner_types) - 1 > required_patterns and star_position is None: + return self.early_non_match() + else: + inner_type = self.get_sequence_type(current_type, o) + if inner_type is None: + inner_type = self.chk.named_type("builtins.object") + inner_types = [inner_type] * len(o.patterns) + + # + # match inner patterns + # + contracted_new_inner_types: list[Type] = [] + contracted_rest_inner_types: list[Type] = [] + captures: dict[Expression, Type] = {} + + contracted_inner_types = self.contract_starred_pattern_types( + inner_types, star_position, required_patterns + ) + for p, t in zip(o.patterns, contracted_inner_types): + pattern_type = self.accept(p, t) + typ, rest, type_map = pattern_type + contracted_new_inner_types.append(typ) + contracted_rest_inner_types.append(rest) + self.update_type_map(captures, type_map) + + new_inner_types = self.expand_starred_pattern_types( + contracted_new_inner_types, star_position, len(inner_types), unpack_index is not None + ) + rest_inner_types = self.expand_starred_pattern_types( + contracted_rest_inner_types, star_position, len(inner_types), unpack_index is not None + ) + + # + # Calculate new type + # + new_type: Type + rest_type: Type = current_type + if isinstance(current_type, TupleType) and unpack_index is None: + narrowed_inner_types = [] + inner_rest_types = [] + for inner_type, new_inner_type in zip(inner_types, new_inner_types): + (narrowed_inner_type, inner_rest_type) = ( + self.chk.conditional_types_with_intersection( + inner_type, [get_type_range(new_inner_type)], o, default=inner_type + ) + ) + narrowed_inner_types.append(narrowed_inner_type) + inner_rest_types.append(inner_rest_type) + if all(not is_uninhabited(typ) for typ in narrowed_inner_types): + new_type = TupleType(narrowed_inner_types, current_type.partial_fallback) + else: + new_type = UninhabitedType() + + if all(is_uninhabited(typ) for typ in inner_rest_types): + # All subpatterns always match, so we can apply negative narrowing + rest_type = TupleType(rest_inner_types, current_type.partial_fallback) + elif sum(not is_uninhabited(typ) for typ in inner_rest_types) == 1: + # Exactly one subpattern may conditionally match, the rest always match. + # We can apply negative narrowing to this one position. + rest_type = TupleType( + [ + curr if is_uninhabited(rest) else rest + for curr, rest in zip(inner_types, inner_rest_types) + ], + current_type.partial_fallback, + ) + elif isinstance(current_type, TupleType): + # For variadic tuples it is too tricky to match individual items like for fixed + # tuples, so we instead try to narrow the entire type. + # TODO: use more precise narrowing when possible (e.g. for identical shapes). + new_tuple_type = TupleType(new_inner_types, current_type.partial_fallback) + new_type, rest_type = self.chk.conditional_types_with_intersection( + new_tuple_type, [get_type_range(current_type)], o, default=new_tuple_type + ) + else: + new_inner_type = UninhabitedType() + for typ in new_inner_types: + new_inner_type = join_types(new_inner_type, typ) + if isinstance(current_type, TypeVarType): + new_bound = self.narrow_sequence_child(current_type.upper_bound, new_inner_type, o) + new_type = current_type.copy_modified(upper_bound=new_bound) + else: + new_type = self.narrow_sequence_child(current_type, new_inner_type, o) + return PatternType(new_type, rest_type, captures) + + def get_sequence_type(self, t: Type, context: Context) -> Type | None: + t = get_proper_type(t) + if isinstance(t, AnyType): + return AnyType(TypeOfAny.from_another_any, t) + if isinstance(t, UnionType): + items = [self.get_sequence_type(item, context) for item in t.items] + not_none_items = [item for item in items if item is not None] + if not_none_items: + return make_simplified_union(not_none_items) + else: + return None + + if self.chk.type_is_iterable(t) and isinstance(t, (Instance, TupleType)): + if isinstance(t, TupleType): + t = tuple_fallback(t) + return self.chk.iterable_item_type(t, context) + else: + return None + + def contract_starred_pattern_types( + self, types: list[Type], star_pos: int | None, num_patterns: int + ) -> list[Type]: + """ + Contracts a list of types in a sequence pattern depending on the position of a starred + capture pattern. + + For example if the sequence pattern [a, *b, c] is matched against types [bool, int, str, + bytes] the contracted types are [bool, Union[int, str], bytes]. + + If star_pos in None the types are returned unchanged. + """ + unpack_index = find_unpack_in_list(types) + if unpack_index is not None: + # Variadic tuples require "re-shaping" to match the requested pattern. + unpack = types[unpack_index] + assert isinstance(unpack, UnpackType) + unpacked = get_proper_type(unpack.type) + # This should be guaranteed by the normalization in the caller. + assert isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple" + if star_pos is None: + missing = num_patterns - len(types) + 1 + new_types = types[:unpack_index] + new_types += [unpacked.args[0]] * missing + new_types += types[unpack_index + 1 :] + return new_types + prefix, middle, suffix = split_with_prefix_and_suffix( + tuple([UnpackType(unpacked) if isinstance(t, UnpackType) else t for t in types]), + star_pos, + num_patterns - star_pos, + ) + new_middle = [] + for m in middle: + # The existing code expects the star item type, rather than the type of + # the whole tuple "slice". + if isinstance(m, UnpackType): + new_middle.append(unpacked.args[0]) + else: + new_middle.append(m) + return list(prefix) + [make_simplified_union(new_middle)] + list(suffix) + else: + if star_pos is None: + return types + new_types = types[:star_pos] + star_length = len(types) - num_patterns + new_types.append(make_simplified_union(types[star_pos : star_pos + star_length])) + new_types += types[star_pos + star_length :] + return new_types + + def expand_starred_pattern_types( + self, types: list[Type], star_pos: int | None, num_types: int, original_unpack: bool + ) -> list[Type]: + """Undoes the contraction done by contract_starred_pattern_types. + + For example if the sequence pattern is [a, *b, c] and types [bool, int, str] are extended + to length 4 the result is [bool, int, int, str]. + """ + if star_pos is None: + return types + if original_unpack: + # In the case where original tuple type has an unpack item, it is not practical + # to coerce pattern type back to the original shape (and may not even be possible), + # so we only restore the type of the star item. + res = [] + for i, t in enumerate(types): + if i != star_pos: + res.append(t) + else: + res.append(UnpackType(self.chk.named_generic_type("builtins.tuple", [t]))) + return res + new_types = types[:star_pos] + star_length = num_types - len(types) + 1 + new_types += [types[star_pos]] * star_length + new_types += types[star_pos + 1 :] + + return new_types + + def narrow_sequence_child(self, outer_type: Type, inner_type: Type, ctx: Context) -> Type: + new_type = self.construct_sequence_child(outer_type, inner_type) + if is_subtype(new_type, outer_type): + new_type, _ = self.chk.conditional_types_with_intersection( + outer_type, [get_type_range(new_type)], ctx, default=outer_type + ) + else: + new_type = outer_type + return new_type + + def visit_starred_pattern(self, o: StarredPattern) -> PatternType: + captures: dict[Expression, Type] = {} + if o.capture is not None: + list_type = self.chk.named_generic_type("builtins.list", [self.type_context[-1]]) + captures[o.capture] = list_type + return PatternType(self.type_context[-1], UninhabitedType(), captures) + + def visit_mapping_pattern(self, o: MappingPattern) -> PatternType: + current_type = get_proper_type(self.type_context[-1]) + can_match = True + captures: dict[Expression, Type] = {} + for key, value in zip(o.keys, o.values): + inner_type = self.get_mapping_item_type(o, current_type, key) + if inner_type is None: + can_match = False + inner_type = self.chk.named_type("builtins.object") + pattern_type = self.accept(value, inner_type) + if is_uninhabited(pattern_type.type): + can_match = False + else: + self.update_type_map(captures, pattern_type.captures) + + if o.rest is not None: + mapping = self.chk.named_type("typing.Mapping") + if is_subtype(current_type, mapping) and isinstance(current_type, Instance): + mapping_inst = map_instance_to_supertype(current_type, mapping.type) + dict_typeinfo = self.chk.lookup_typeinfo("builtins.dict") + rest_type = Instance(dict_typeinfo, mapping_inst.args) + else: + object_type = self.chk.named_type("builtins.object") + rest_type = self.chk.named_generic_type( + "builtins.dict", [object_type, object_type] + ) + + captures[o.rest] = rest_type + + if can_match: + # We can't narrow the type here, as Mapping key is invariant. + new_type = self.type_context[-1] + else: + new_type = UninhabitedType() + return PatternType(new_type, current_type, captures) + + def get_mapping_item_type( + self, pattern: MappingPattern, mapping_type: Type, key: Expression + ) -> Type | None: + mapping_type = get_proper_type(mapping_type) + if isinstance(mapping_type, TypedDictType): + with self.msg.filter_errors() as local_errors: + result: Type | None = self.chk.expr_checker.visit_typeddict_index_expr( + mapping_type, key + )[0] + has_local_errors = local_errors.has_new_errors() + # If we can't determine the type statically fall back to treating it as a normal + # mapping + if has_local_errors: + with self.msg.filter_errors() as local_errors: + result = self.get_simple_mapping_item_type(pattern, mapping_type, key) + + if local_errors.has_new_errors(): + result = None + else: + with self.msg.filter_errors(): + result = self.get_simple_mapping_item_type(pattern, mapping_type, key) + return result + + def get_simple_mapping_item_type( + self, pattern: MappingPattern, mapping_type: Type, key: Expression + ) -> Type: + result, _ = self.chk.expr_checker.check_method_call_by_name( + "__getitem__", mapping_type, [key], [ARG_POS], pattern + ) + return result + + def visit_class_pattern(self, o: ClassPattern) -> PatternType: + current_type = get_proper_type(self.type_context[-1]) + + # + # Check class type + # + type_info = o.class_ref.node + typ = self.chk.expr_checker.accept(o.class_ref) + p_typ = get_proper_type(typ) + if isinstance(type_info, TypeAlias) and not type_info.no_args: + self.msg.fail(message_registry.CLASS_PATTERN_GENERIC_TYPE_ALIAS, o) + return self.early_non_match() + elif isinstance(p_typ, FunctionLike) and p_typ.is_type_obj(): + typ = fill_typevars_with_any(p_typ.type_object()) + elif ( + isinstance(type_info, Var) + and type_info.type is not None + and type_info.fullname == "typing.Callable" + ): + # Create a `Callable[..., Any]` + fallback = self.chk.named_type("builtins.function") + any_type = AnyType(TypeOfAny.unannotated) + typ = callable_with_ellipsis(any_type, ret_type=any_type, fallback=fallback) + elif isinstance(p_typ, TypeType) and isinstance(p_typ.item, NoneType): + typ = p_typ.item + elif not isinstance(p_typ, AnyType): + self.msg.fail( + message_registry.CLASS_PATTERN_TYPE_REQUIRED.format( + typ.str_with_options(self.options) + ), + o, + ) + return self.early_non_match() + + new_type, rest_type = self.chk.conditional_types_with_intersection( + current_type, [get_type_range(typ)], o, default=current_type + ) + if is_uninhabited(new_type): + return self.early_non_match() + # TODO: Do I need this? + narrowed_type = narrow_declared_type(current_type, new_type) + + # + # Convert positional to keyword patterns + # + keyword_pairs: list[tuple[str | None, Pattern]] = [] + match_arg_set: set[str] = set() + + captures: dict[Expression, Type] = {} + + if len(o.positionals) != 0: + if self.should_self_match(typ): + if len(o.positionals) > 1: + self.msg.fail(message_registry.CLASS_PATTERN_TOO_MANY_POSITIONAL_ARGS, o) + pattern_type = self.accept(o.positionals[0], narrowed_type) + if not is_uninhabited(pattern_type.type): + return PatternType( + pattern_type.type, + join_types(rest_type, pattern_type.rest_type), + pattern_type.captures, + ) + captures = pattern_type.captures + else: + with self.msg.filter_errors() as local_errors: + match_args_type = analyze_member_access( + "__match_args__", + typ, + o, + is_lvalue=False, + is_super=False, + is_operator=False, + original_type=typ, + chk=self.chk, + ) + has_local_errors = local_errors.has_new_errors() + if has_local_errors: + self.msg.fail( + message_registry.MISSING_MATCH_ARGS.format( + typ.str_with_options(self.options) + ), + o, + ) + return self.early_non_match() + + proper_match_args_type = get_proper_type(match_args_type) + if isinstance(proper_match_args_type, TupleType): + match_arg_names = get_match_arg_names(proper_match_args_type) + + if len(o.positionals) > len(match_arg_names): + self.msg.fail(message_registry.CLASS_PATTERN_TOO_MANY_POSITIONAL_ARGS, o) + return self.early_non_match() + else: + match_arg_names = [None] * len(o.positionals) + + for arg_name, pos in zip(match_arg_names, o.positionals): + keyword_pairs.append((arg_name, pos)) + if arg_name is not None: + match_arg_set.add(arg_name) + + # + # Check for duplicate patterns + # + keyword_arg_set = set() + has_duplicates = False + for key, value in zip(o.keyword_keys, o.keyword_values): + keyword_pairs.append((key, value)) + if key in match_arg_set: + self.msg.fail( + message_registry.CLASS_PATTERN_KEYWORD_MATCHES_POSITIONAL.format(key), value + ) + has_duplicates = True + elif key in keyword_arg_set: + self.msg.fail( + message_registry.CLASS_PATTERN_DUPLICATE_KEYWORD_PATTERN.format(key), value + ) + has_duplicates = True + keyword_arg_set.add(key) + + if has_duplicates: + return self.early_non_match() + + # + # Check keyword patterns + # + can_match = True + for keyword, pattern in keyword_pairs: + key_type: Type | None = None + with self.msg.filter_errors() as local_errors: + if keyword is not None: + key_type = analyze_member_access( + keyword, + narrowed_type, + pattern, + is_lvalue=False, + is_super=False, + is_operator=False, + original_type=new_type, + chk=self.chk, + ) + else: + key_type = AnyType(TypeOfAny.from_error) + has_local_errors = local_errors.has_new_errors() + if has_local_errors or key_type is None: + key_type = AnyType(TypeOfAny.from_error) + if not (type_info and type_info.fullname == "builtins.object"): + self.msg.fail( + message_registry.CLASS_PATTERN_UNKNOWN_KEYWORD.format( + typ.str_with_options(self.options), keyword + ), + pattern, + ) + elif keyword is not None: + new_type = self.chk.add_any_attribute_to_type(new_type, keyword) + + inner_type, inner_rest_type, inner_captures = self.accept(pattern, key_type) + if is_uninhabited(inner_type): + can_match = False + else: + self.update_type_map(captures, inner_captures) + if not is_uninhabited(inner_rest_type): + rest_type = current_type + + if not can_match: + new_type = UninhabitedType() + return PatternType(new_type, rest_type, captures) + + def should_self_match(self, typ: Type) -> bool: + typ = get_proper_type(typ) + if isinstance(typ, TupleType): + typ = typ.partial_fallback + if isinstance(typ, AnyType): + return False + if isinstance(typ, Instance) and typ.type.get("__match_args__") is not None: + # Named tuples and other subtypes of builtins that define __match_args__ + # should not self match. + return False + for other in self.self_match_types: + if is_subtype(typ, other): + return True + return False + + def can_match_sequence(self, typ: ProperType) -> bool: + if isinstance(typ, AnyType): + return True + if isinstance(typ, UnionType): + return any(self.can_match_sequence(get_proper_type(item)) for item in typ.items) + for other in self.non_sequence_match_types: + # We have to ignore promotions, as memoryview should match, but bytes, + # which it can be promoted to, shouldn't + if is_subtype(typ, other, ignore_promotions=True): + return False + sequence = self.chk.named_type("typing.Sequence") + # If the static type is more general than sequence the actual type could still match + return is_subtype(typ, sequence) or is_subtype(sequence, typ) + + def generate_types_from_names(self, type_names: list[str]) -> list[Type]: + types: list[Type] = [] + for name in type_names: + try: + types.append(self.chk.named_type(name)) + except KeyError as e: + # Some built in types are not defined in all test cases + if not name.startswith("builtins."): + raise e + return types + + def update_type_map( + self, original_type_map: dict[Expression, Type], extra_type_map: dict[Expression, Type] + ) -> None: + # Calculating this would not be needed if TypeMap directly used literal hashes instead of + # expressions, as suggested in the TODO above it's definition + already_captured = {literal_hash(expr) for expr in original_type_map} + for expr, typ in extra_type_map.items(): + if literal_hash(expr) in already_captured: + node = get_var(expr) + self.msg.fail( + message_registry.MULTIPLE_ASSIGNMENTS_IN_PATTERN.format(node.name), expr + ) + else: + original_type_map[expr] = typ + + def construct_sequence_child(self, outer_type: Type, inner_type: Type) -> Type: + """ + If outer_type is a child class of typing.Sequence returns a new instance of + outer_type, that is a Sequence of inner_type. If outer_type is not a child class of + typing.Sequence just returns a Sequence of inner_type + + For example: + construct_sequence_child(List[int], str) = List[str] + + TODO: this doesn't make sense. For example if one has class S(Sequence[int], Generic[T]) + or class T(Sequence[Tuple[T, T]]), there is no way any of those can map to Sequence[str]. + """ + proper_type = get_proper_type(outer_type) + if isinstance(proper_type, AnyType): + return outer_type + if isinstance(proper_type, UnionType): + types = [ + self.construct_sequence_child(item, inner_type) + for item in proper_type.items + if self.can_match_sequence(get_proper_type(item)) + ] + return make_simplified_union(types) + sequence = self.chk.named_generic_type("typing.Sequence", [inner_type]) + if is_subtype(outer_type, self.chk.named_type("typing.Sequence")): + if isinstance(proper_type, TupleType): + proper_type = tuple_fallback(proper_type) + assert isinstance(proper_type, Instance) + empty_type = fill_typevars(proper_type.type) + partial_type = expand_type_by_instance(empty_type, sequence) + return expand_type_by_instance(partial_type, proper_type) + else: + return sequence + + def early_non_match(self) -> PatternType: + return PatternType(UninhabitedType(), self.type_context[-1], {}) + + +def get_match_arg_names(typ: TupleType) -> list[str | None]: + args: list[str | None] = [] + for item in typ.items: + values = try_getting_str_literals_from_type(item) + if values is None or len(values) != 1: + args.append(None) + else: + args.append(values[0]) + return args + + +def get_var(expr: Expression) -> Var: + """ + Warning: this in only true for expressions captured by a match statement. + Don't call it from anywhere else + """ + assert isinstance(expr, NameExpr), expr + node = expr.node + assert isinstance(node, Var), node + return node + + +def get_type_range(typ: Type) -> TypeRange: + typ = get_proper_type(typ) + if ( + isinstance(typ, Instance) + and typ.last_known_value + and isinstance(typ.last_known_value.value, bool) + ): + typ = typ.last_known_value + return TypeRange(typ, is_upper_bound=False) + + +def is_uninhabited(typ: Type) -> bool: + return isinstance(get_proper_type(typ), UninhabitedType) diff --git a/.venv/lib/python3.12/site-packages/mypy/checkstrformat.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/checkstrformat.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..e61bbbf Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/checkstrformat.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/checkstrformat.py b/.venv/lib/python3.12/site-packages/mypy/checkstrformat.py new file mode 100644 index 0000000..45075bd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/checkstrformat.py @@ -0,0 +1,1099 @@ +""" +Format expression type checker. + +This file is conceptually part of ExpressionChecker and TypeChecker. Main functionality +is located in StringFormatterChecker.check_str_format_call() for '{}'.format(), and in +StringFormatterChecker.check_str_interpolation() for printf-style % interpolation. + +Note that although at runtime format strings are parsed using custom parsers, +here we use a regexp-based approach. This way we 99% match runtime behaviour while keeping +implementation simple. +""" + +from __future__ import annotations + +import re +from re import Match, Pattern +from typing import Callable, Final, Union, cast +from typing_extensions import TypeAlias as _TypeAlias + +import mypy.errorcodes as codes +from mypy import message_registry +from mypy.checker_shared import TypeCheckerSharedApi +from mypy.errors import Errors +from mypy.maptype import map_instance_to_supertype +from mypy.messages import MessageBuilder +from mypy.nodes import ( + ARG_NAMED, + ARG_POS, + ARG_STAR, + ARG_STAR2, + BytesExpr, + CallExpr, + Context, + DictExpr, + Expression, + ExpressionStmt, + IndexExpr, + IntExpr, + MemberExpr, + MypyFile, + NameExpr, + Node, + StarExpr, + StrExpr, + TempNode, + TupleExpr, +) +from mypy.parse import parse +from mypy.subtypes import is_subtype +from mypy.typeops import custom_special_method +from mypy.types import ( + AnyType, + Instance, + LiteralType, + TupleType, + Type, + TypeOfAny, + TypeVarTupleType, + TypeVarType, + UnionType, + UnpackType, + find_unpack_in_list, + get_proper_type, + get_proper_types, +) + +FormatStringExpr: _TypeAlias = Union[StrExpr, BytesExpr] +Checkers: _TypeAlias = tuple[Callable[[Expression], None], Callable[[Type], bool]] +MatchMap: _TypeAlias = dict[tuple[int, int], Match[str]] # span -> match + + +def compile_format_re() -> Pattern[str]: + """Construct regexp to match format conversion specifiers in % interpolation. + + See https://docs.python.org/3/library/stdtypes.html#printf-style-string-formatting + The regexp is intentionally a bit wider to report better errors. + """ + key_re = r"(\((?P[^)]*)\))?" # (optional) parenthesised sequence of characters. + flags_re = r"(?P[#0\-+ ]*)" # (optional) sequence of flags. + width_re = r"(?P[1-9][0-9]*|\*)?" # (optional) minimum field width (* or numbers). + precision_re = r"(?:\.(?P\*|[0-9]+)?)?" # (optional) . followed by * of numbers. + length_mod_re = r"[hlL]?" # (optional) length modifier (unused). + type_re = r"(?P.)?" # conversion type. + format_re = "%" + key_re + flags_re + width_re + precision_re + length_mod_re + type_re + return re.compile(format_re) + + +def compile_new_format_re(custom_spec: bool) -> Pattern[str]: + """Construct regexps to match format conversion specifiers in str.format() calls. + + See After https://docs.python.org/3/library/string.html#formatspec for + specifications. The regexps are intentionally wider, to report better errors, + instead of just not matching. + """ + + # Field (optional) is an integer/identifier possibly followed by several .attr and [index]. + field = r"(?P(?P[^.[!:]*)([^:!]+)?)" + + # Conversion (optional) is ! followed by one of letters for forced repr(), str(), or ascii(). + conversion = r"(?P![^:])?" + + # Format specification (optional) follows its own mini-language: + if not custom_spec: + # Fill and align is valid for all builtin types. + fill_align = r"(?P.?[<>=^])?" + # Number formatting options are only valid for int, float, complex, and Decimal, + # except if only width is given (it is valid for all types). + # This contains sign, flags (sign, # and/or 0), width, grouping (_ or ,) and precision. + num_spec = r"(?P[+\- ]?#?0?)(?P\d+)?[_,]?(?P\.\d+)?" + # The last element is type. + conv_type = r"(?P.)?" # only some are supported, but we want to give a better error + format_spec = r"(?P:" + fill_align + num_spec + conv_type + r")?" + else: + # Custom types can define their own form_spec using __format__(). + format_spec = r"(?P:.*)?" + + return re.compile(field + conversion + format_spec) + + +FORMAT_RE: Final = compile_format_re() +FORMAT_RE_NEW: Final = compile_new_format_re(False) +FORMAT_RE_NEW_CUSTOM: Final = compile_new_format_re(True) +DUMMY_FIELD_NAME: Final = "__dummy_name__" + +# Types that require either int or float. +NUMERIC_TYPES_OLD: Final = {"d", "i", "o", "u", "x", "X", "e", "E", "f", "F", "g", "G"} +NUMERIC_TYPES_NEW: Final = {"b", "d", "o", "e", "E", "f", "F", "g", "G", "n", "x", "X", "%"} + +# These types accept _only_ int. +REQUIRE_INT_OLD: Final = {"o", "x", "X"} +REQUIRE_INT_NEW: Final = {"b", "d", "o", "x", "X"} + +# These types fall back to SupportsFloat with % (other fall back to SupportsInt) +FLOAT_TYPES: Final = {"e", "E", "f", "F", "g", "G"} + + +class ConversionSpecifier: + def __init__( + self, match: Match[str], start_pos: int = -1, non_standard_format_spec: bool = False + ) -> None: + self.whole_seq = match.group() + self.start_pos = start_pos + + m_dict = match.groupdict() + self.key = m_dict.get("key") + + # Replace unmatched optional groups with empty matches (for convenience). + self.conv_type = m_dict.get("type", "") + self.flags = m_dict.get("flags", "") + self.width = m_dict.get("width", "") + self.precision = m_dict.get("precision", "") + + # Used only for str.format() calls (it may be custom for types with __format__()). + self.format_spec = m_dict.get("format_spec") + self.non_standard_format_spec = non_standard_format_spec + # Used only for str.format() calls. + self.conversion = m_dict.get("conversion") + # Full formatted expression (i.e. key plus following attributes and/or indexes). + # Used only for str.format() calls. + self.field = m_dict.get("field") + + def has_key(self) -> bool: + return self.key is not None + + def has_star(self) -> bool: + return self.width == "*" or self.precision == "*" + + +def parse_conversion_specifiers(format_str: str) -> list[ConversionSpecifier]: + """Parse c-printf-style format string into list of conversion specifiers.""" + specifiers: list[ConversionSpecifier] = [] + for m in re.finditer(FORMAT_RE, format_str): + specifiers.append(ConversionSpecifier(m, start_pos=m.start())) + return specifiers + + +def parse_format_value( + format_value: str, ctx: Context, msg: MessageBuilder, nested: bool = False +) -> list[ConversionSpecifier] | None: + """Parse format string into list of conversion specifiers. + + The specifiers may be nested (two levels maximum), in this case they are ordered as + '{0:{1}}, {2:{3}{4}}'. Return None in case of an error. + """ + top_targets = find_non_escaped_targets(format_value, ctx, msg) + if top_targets is None: + return None + + result: list[ConversionSpecifier] = [] + for target, start_pos in top_targets: + match = FORMAT_RE_NEW.fullmatch(target) + if match: + conv_spec = ConversionSpecifier(match, start_pos=start_pos) + else: + custom_match = FORMAT_RE_NEW_CUSTOM.fullmatch(target) + if custom_match: + conv_spec = ConversionSpecifier( + custom_match, start_pos=start_pos, non_standard_format_spec=True + ) + else: + msg.fail( + "Invalid conversion specifier in format string", + ctx, + code=codes.STRING_FORMATTING, + ) + return None + + if conv_spec.key and ("{" in conv_spec.key or "}" in conv_spec.key): + msg.fail("Conversion value must not contain { or }", ctx, code=codes.STRING_FORMATTING) + return None + result.append(conv_spec) + + # Parse nested conversions that are allowed in format specifier. + if ( + conv_spec.format_spec + and conv_spec.non_standard_format_spec + and ("{" in conv_spec.format_spec or "}" in conv_spec.format_spec) + ): + if nested: + msg.fail( + "Formatting nesting must be at most two levels deep", + ctx, + code=codes.STRING_FORMATTING, + ) + return None + sub_conv_specs = parse_format_value(conv_spec.format_spec, ctx, msg, nested=True) + if sub_conv_specs is None: + return None + result.extend(sub_conv_specs) + return result + + +def find_non_escaped_targets( + format_value: str, ctx: Context, msg: MessageBuilder +) -> list[tuple[str, int]] | None: + """Return list of raw (un-parsed) format specifiers in format string. + + Format specifiers don't include enclosing braces. We don't use regexp for + this because they don't work well with nested/repeated patterns + (both greedy and non-greedy), and these are heavily used internally for + representation of f-strings. + + Return None in case of an error. + """ + result = [] + next_spec = "" + pos = 0 + nesting = 0 + while pos < len(format_value): + c = format_value[pos] + if not nesting: + # Skip any paired '{{' and '}}', enter nesting on '{', report error on '}'. + if c == "{": + if pos < len(format_value) - 1 and format_value[pos + 1] == "{": + pos += 1 + else: + nesting = 1 + if c == "}": + if pos < len(format_value) - 1 and format_value[pos + 1] == "}": + pos += 1 + else: + msg.fail( + "Invalid conversion specifier in format string: unexpected }", + ctx, + code=codes.STRING_FORMATTING, + ) + return None + else: + # Adjust nesting level, then either continue adding chars or move on. + if c == "{": + nesting += 1 + if c == "}": + nesting -= 1 + if nesting: + next_spec += c + else: + result.append((next_spec, pos - len(next_spec))) + next_spec = "" + pos += 1 + if nesting: + msg.fail( + "Invalid conversion specifier in format string: unmatched {", + ctx, + code=codes.STRING_FORMATTING, + ) + return None + return result + + +class StringFormatterChecker: + """String interpolation/formatter type checker. + + This class works closely together with checker.ExpressionChecker. + """ + + # Some services are provided by a TypeChecker instance. + chk: TypeCheckerSharedApi + # This is shared with TypeChecker, but stored also here for convenience. + msg: MessageBuilder + + def __init__(self, chk: TypeCheckerSharedApi, msg: MessageBuilder) -> None: + """Construct an expression type checker.""" + self.chk = chk + self.msg = msg + + def check_str_format_call(self, call: CallExpr, format_value: str) -> None: + """Perform more precise checks for str.format() calls when possible. + + Currently the checks are performed for: + * Actual string literals + * Literal types with string values + * Final names with string values + + The checks that we currently perform: + * Check generic validity (e.g. unmatched { or }, and {} in invalid positions) + * Check consistency of specifiers' auto-numbering + * Verify that replacements can be found for all conversion specifiers, + and all arguments were used + * Non-standard format specs are only allowed for types with custom __format__ + * Type check replacements with accessors applied (if any). + * Verify that specifier type is known and matches replacement type + * Perform special checks for some specifier types: + - 'c' requires a single character string + - 's' must not accept bytes + - non-empty flags are only allowed for numeric types + """ + conv_specs = parse_format_value(format_value, call, self.msg) + if conv_specs is None: + return + if not self.auto_generate_keys(conv_specs, call): + return + self.check_specs_in_format_call(call, conv_specs, format_value) + + def check_specs_in_format_call( + self, call: CallExpr, specs: list[ConversionSpecifier], format_value: str + ) -> None: + """Perform pairwise checks for conversion specifiers vs their replacements. + + The core logic for format checking is implemented in this method. + """ + assert all(s.key for s in specs), "Keys must be auto-generated first!" + replacements = self.find_replacements_in_call(call, [cast(str, s.key) for s in specs]) + assert len(replacements) == len(specs) + for spec, repl in zip(specs, replacements): + repl = self.apply_field_accessors(spec, repl, ctx=call) + actual_type = repl.type if isinstance(repl, TempNode) else self.chk.lookup_type(repl) + assert actual_type is not None + + # Special case custom formatting. + if ( + spec.format_spec + and spec.non_standard_format_spec + and + # Exclude "dynamic" specifiers (i.e. containing nested formatting). + not ("{" in spec.format_spec or "}" in spec.format_spec) + ): + if ( + not custom_special_method(actual_type, "__format__", check_all=True) + or spec.conversion + ): + # TODO: add support for some custom specs like datetime? + self.msg.fail( + f'Unrecognized format specification "{spec.format_spec[1:]}"', + call, + code=codes.STRING_FORMATTING, + ) + continue + # Adjust expected and actual types. + if not spec.conv_type: + expected_type: Type | None = AnyType(TypeOfAny.special_form) + else: + assert isinstance(call.callee, MemberExpr) + if isinstance(call.callee.expr, StrExpr): + format_str = call.callee.expr + else: + format_str = StrExpr(format_value) + expected_type = self.conversion_type( + spec.conv_type, call, format_str, format_call=True + ) + if spec.conversion is not None: + # If the explicit conversion is given, then explicit conversion is called _first_. + if spec.conversion[1] not in "rsa": + self.msg.fail( + ( + f'Invalid conversion type "{spec.conversion[1]}", ' + f'must be one of "r", "s" or "a"' + ), + call, + code=codes.STRING_FORMATTING, + ) + actual_type = self.named_type("builtins.str") + + # Perform the checks for given types. + if expected_type is None: + continue + + a_type = get_proper_type(actual_type) + actual_items = ( + get_proper_types(a_type.items) if isinstance(a_type, UnionType) else [a_type] + ) + for a_type in actual_items: + if custom_special_method(a_type, "__format__"): + continue + self.check_placeholder_type(a_type, expected_type, call) + self.perform_special_format_checks(spec, call, repl, a_type, expected_type) + + def perform_special_format_checks( + self, + spec: ConversionSpecifier, + call: CallExpr, + repl: Expression, + actual_type: Type, + expected_type: Type, + ) -> None: + # TODO: try refactoring to combine this logic with % formatting. + if spec.conv_type == "c": + if isinstance(repl, (StrExpr, BytesExpr)) and len(repl.value) != 1: + self.msg.requires_int_or_char(call, format_call=True) + c_typ = get_proper_type(self.chk.lookup_type(repl)) + if isinstance(c_typ, Instance) and c_typ.last_known_value: + c_typ = c_typ.last_known_value + if isinstance(c_typ, LiteralType) and isinstance(c_typ.value, str): + if len(c_typ.value) != 1: + self.msg.requires_int_or_char(call, format_call=True) + if (not spec.conv_type or spec.conv_type == "s") and not spec.conversion: + if has_type_component(actual_type, "builtins.bytes") and not custom_special_method( + actual_type, "__str__" + ): + self.msg.fail( + 'If x = b\'abc\' then f"{x}" or "{}".format(x) produces "b\'abc\'", ' + 'not "abc". If this is desired behavior, use f"{x!r}" or "{!r}".format(x). ' + "Otherwise, decode the bytes", + call, + code=codes.STR_BYTES_PY3, + ) + if spec.flags: + numeric_types = UnionType( + [self.named_type("builtins.int"), self.named_type("builtins.float")] + ) + if ( + spec.conv_type + and spec.conv_type not in NUMERIC_TYPES_NEW + or not spec.conv_type + and not is_subtype(actual_type, numeric_types) + and not custom_special_method(actual_type, "__format__") + ): + self.msg.fail( + "Numeric flags are only allowed for numeric types", + call, + code=codes.STRING_FORMATTING, + ) + + def find_replacements_in_call(self, call: CallExpr, keys: list[str]) -> list[Expression]: + """Find replacement expression for every specifier in str.format() call. + + In case of an error use TempNode(AnyType). + """ + result: list[Expression] = [] + used: set[Expression] = set() + for key in keys: + if key.isdecimal(): + expr = self.get_expr_by_position(int(key), call) + if not expr: + self.msg.fail( + f"Cannot find replacement for positional format specifier {key}", + call, + code=codes.STRING_FORMATTING, + ) + expr = TempNode(AnyType(TypeOfAny.from_error)) + else: + expr = self.get_expr_by_name(key, call) + if not expr: + self.msg.fail( + f'Cannot find replacement for named format specifier "{key}"', + call, + code=codes.STRING_FORMATTING, + ) + expr = TempNode(AnyType(TypeOfAny.from_error)) + result.append(expr) + if not isinstance(expr, TempNode): + used.add(expr) + # Strictly speaking not using all replacements is not a type error, but most likely + # a typo in user code, so we show an error like we do for % formatting. + total_explicit = len([kind for kind in call.arg_kinds if kind in (ARG_POS, ARG_NAMED)]) + if len(used) < total_explicit: + self.msg.too_many_string_formatting_arguments(call) + return result + + def get_expr_by_position(self, pos: int, call: CallExpr) -> Expression | None: + """Get positional replacement expression from '{0}, {1}'.format(x, y, ...) call. + + If the type is from *args, return TempNode(). Return None in case of + an error. + """ + pos_args = [arg for arg, kind in zip(call.args, call.arg_kinds) if kind == ARG_POS] + if pos < len(pos_args): + return pos_args[pos] + star_args = [arg for arg, kind in zip(call.args, call.arg_kinds) if kind == ARG_STAR] + if not star_args: + return None + + # Fall back to *args when present in call. + star_arg = star_args[0] + varargs_type = get_proper_type(self.chk.lookup_type(star_arg)) + if not isinstance(varargs_type, Instance) or not varargs_type.type.has_base( + "typing.Sequence" + ): + # Error should be already reported. + return TempNode(AnyType(TypeOfAny.special_form)) + iter_info = self.chk.named_generic_type( + "typing.Sequence", [AnyType(TypeOfAny.special_form)] + ).type + return TempNode(map_instance_to_supertype(varargs_type, iter_info).args[0]) + + def get_expr_by_name(self, key: str, call: CallExpr) -> Expression | None: + """Get named replacement expression from '{name}'.format(name=...) call. + + If the type is from **kwargs, return TempNode(). Return None in case of + an error. + """ + named_args = [ + arg + for arg, kind, name in zip(call.args, call.arg_kinds, call.arg_names) + if kind == ARG_NAMED and name == key + ] + if named_args: + return named_args[0] + star_args_2 = [arg for arg, kind in zip(call.args, call.arg_kinds) if kind == ARG_STAR2] + if not star_args_2: + return None + star_arg_2 = star_args_2[0] + kwargs_type = get_proper_type(self.chk.lookup_type(star_arg_2)) + if not isinstance(kwargs_type, Instance) or not kwargs_type.type.has_base( + "typing.Mapping" + ): + # Error should be already reported. + return TempNode(AnyType(TypeOfAny.special_form)) + any_type = AnyType(TypeOfAny.special_form) + mapping_info = self.chk.named_generic_type("typing.Mapping", [any_type, any_type]).type + return TempNode(map_instance_to_supertype(kwargs_type, mapping_info).args[1]) + + def auto_generate_keys(self, all_specs: list[ConversionSpecifier], ctx: Context) -> bool: + """Translate '{} {name} {}' to '{0} {name} {1}'. + + Return True if generation was successful, otherwise report an error and return false. + """ + some_defined = any(s.key and s.key.isdecimal() for s in all_specs) + all_defined = all(bool(s.key) for s in all_specs) + if some_defined and not all_defined: + self.msg.fail( + "Cannot combine automatic field numbering and manual field specification", + ctx, + code=codes.STRING_FORMATTING, + ) + return False + if all_defined: + return True + next_index = 0 + for spec in all_specs: + if not spec.key: + str_index = str(next_index) + spec.key = str_index + # Update also the full field (i.e. turn {.x} into {0.x}). + if not spec.field: + spec.field = str_index + else: + spec.field = str_index + spec.field + next_index += 1 + return True + + def apply_field_accessors( + self, spec: ConversionSpecifier, repl: Expression, ctx: Context + ) -> Expression: + """Transform and validate expr in '{.attr[item]}'.format(expr) into expr.attr['item']. + + If validation fails, return TempNode(AnyType). + """ + assert spec.key, "Keys must be auto-generated first!" + if spec.field == spec.key: + return repl + assert spec.field + + temp_errors = Errors(self.chk.options) + dummy = DUMMY_FIELD_NAME + spec.field[len(spec.key) :] + temp_ast: Node = parse( + dummy, fnam="", module=None, options=self.chk.options, errors=temp_errors + ) + if temp_errors.is_errors(): + self.msg.fail( + f'Syntax error in format specifier "{spec.field}"', + ctx, + code=codes.STRING_FORMATTING, + ) + return TempNode(AnyType(TypeOfAny.from_error)) + + # These asserts are guaranteed by the original regexp. + assert isinstance(temp_ast, MypyFile) + temp_ast = temp_ast.defs[0] + assert isinstance(temp_ast, ExpressionStmt) + temp_ast = temp_ast.expr + if not self.validate_and_transform_accessors(temp_ast, repl, spec, ctx=ctx): + return TempNode(AnyType(TypeOfAny.from_error)) + + # Check if there are any other errors (like missing members). + # TODO: fix column to point to actual start of the format specifier _within_ string. + temp_ast.line = ctx.line + temp_ast.column = ctx.column + self.chk.expr_checker.accept(temp_ast) + return temp_ast + + def validate_and_transform_accessors( + self, + temp_ast: Expression, + original_repl: Expression, + spec: ConversionSpecifier, + ctx: Context, + ) -> bool: + """Validate and transform (in-place) format field accessors. + + On error, report it and return False. The transformations include replacing the dummy + variable with actual replacement expression and translating any name expressions in an + index into strings, so that this will work: + + class User(TypedDict): + name: str + id: int + u: User + '{[id]:d} -> {[name]}'.format(u) + """ + if not isinstance(temp_ast, (MemberExpr, IndexExpr)): + self.msg.fail( + "Only index and member expressions are allowed in" + ' format field accessors; got "{}"'.format(spec.field), + ctx, + code=codes.STRING_FORMATTING, + ) + return False + if isinstance(temp_ast, MemberExpr): + node = temp_ast.expr + else: + node = temp_ast.base + if not isinstance(temp_ast.index, (NameExpr, IntExpr)): + assert spec.key, "Call this method only after auto-generating keys!" + assert spec.field + self.msg.fail( + 'Invalid index expression in format field accessor "{}"'.format( + spec.field[len(spec.key) :] + ), + ctx, + code=codes.STRING_FORMATTING, + ) + return False + if isinstance(temp_ast.index, NameExpr): + temp_ast.index = StrExpr(temp_ast.index.name) + if isinstance(node, NameExpr) and node.name == DUMMY_FIELD_NAME: + # Replace it with the actual replacement expression. + assert isinstance(temp_ast, (IndexExpr, MemberExpr)) # XXX: this is redundant + if isinstance(temp_ast, IndexExpr): + temp_ast.base = original_repl + else: + temp_ast.expr = original_repl + return True + node.line = ctx.line + node.column = ctx.column + return self.validate_and_transform_accessors( + node, original_repl=original_repl, spec=spec, ctx=ctx + ) + + # TODO: In Python 3, the bytes formatting has a more restricted set of options + # compared to string formatting. + def check_str_interpolation(self, expr: FormatStringExpr, replacements: Expression) -> Type: + """Check the types of the 'replacements' in a string interpolation + expression: str % replacements. + """ + self.chk.expr_checker.accept(expr) + specifiers = parse_conversion_specifiers(expr.value) + has_mapping_keys = self.analyze_conversion_specifiers(specifiers, expr) + if has_mapping_keys is None: + pass # Error was reported + elif has_mapping_keys: + self.check_mapping_str_interpolation(specifiers, replacements, expr) + else: + self.check_simple_str_interpolation(specifiers, replacements, expr) + + if isinstance(expr, BytesExpr): + return self.named_type("builtins.bytes") + elif isinstance(expr, StrExpr): + return self.named_type("builtins.str") + else: + assert False + + def analyze_conversion_specifiers( + self, specifiers: list[ConversionSpecifier], context: Context + ) -> bool | None: + has_star = any(specifier.has_star() for specifier in specifiers) + has_key = any(specifier.has_key() for specifier in specifiers) + all_have_keys = all( + specifier.has_key() or specifier.conv_type == "%" for specifier in specifiers + ) + + if has_key and has_star: + self.msg.string_interpolation_with_star_and_key(context) + return None + if has_key and not all_have_keys: + self.msg.string_interpolation_mixing_key_and_non_keys(context) + return None + return has_key + + def check_simple_str_interpolation( + self, + specifiers: list[ConversionSpecifier], + replacements: Expression, + expr: FormatStringExpr, + ) -> None: + """Check % string interpolation with positional specifiers '%s, %d' % ('yes, 42').""" + checkers = self.build_replacement_checkers(specifiers, replacements, expr) + if checkers is None: + return + + rhs_type = get_proper_type(self.accept(replacements)) + rep_types: list[Type] = [] + if isinstance(rhs_type, TupleType): + rep_types = rhs_type.items + unpack_index = find_unpack_in_list(rep_types) + if unpack_index is not None: + # TODO: we should probably warn about potentially short tuple. + # However, without special-casing for tuple(f(i) for in other_tuple) + # this causes false positive on mypy self-check in report.py. + extras = max(0, len(checkers) - len(rep_types) + 1) + unpacked = rep_types[unpack_index] + assert isinstance(unpacked, UnpackType) + unpacked = get_proper_type(unpacked.type) + if isinstance(unpacked, TypeVarTupleType): + unpacked = get_proper_type(unpacked.upper_bound) + assert ( + isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple" + ) + unpack_items = [unpacked.args[0]] * extras + rep_types = rep_types[:unpack_index] + unpack_items + rep_types[unpack_index + 1 :] + elif isinstance(rhs_type, AnyType): + return + elif isinstance(rhs_type, Instance) and rhs_type.type.fullname == "builtins.tuple": + # Assume that an arbitrary-length tuple has the right number of items. + rep_types = [rhs_type.args[0]] * len(checkers) + elif isinstance(rhs_type, UnionType): + for typ in rhs_type.relevant_items(): + temp_node = TempNode(typ) + temp_node.line = replacements.line + self.check_simple_str_interpolation(specifiers, temp_node, expr) + return + else: + rep_types = [rhs_type] + + if len(checkers) > len(rep_types): + # Only check the fix-length Tuple type. Other Iterable types would skip. + if is_subtype(rhs_type, self.chk.named_type("typing.Iterable")) and not isinstance( + rhs_type, TupleType + ): + return + else: + self.msg.too_few_string_formatting_arguments(replacements) + elif len(checkers) < len(rep_types): + self.msg.too_many_string_formatting_arguments(replacements) + else: + if len(checkers) == 1: + check_node, check_type = checkers[0] + if isinstance(rhs_type, TupleType) and len(rhs_type.items) == 1: + check_type(rhs_type.items[0]) + else: + check_node(replacements) + elif isinstance(replacements, TupleExpr) and not any( + isinstance(item, StarExpr) for item in replacements.items + ): + for checks, rep_node in zip(checkers, replacements.items): + check_node, check_type = checks + check_node(rep_node) + else: + for checks, rep_type in zip(checkers, rep_types): + check_node, check_type = checks + check_type(rep_type) + + def check_mapping_str_interpolation( + self, + specifiers: list[ConversionSpecifier], + replacements: Expression, + expr: FormatStringExpr, + ) -> None: + """Check % string interpolation with names specifiers '%(name)s' % {'name': 'John'}.""" + if isinstance(replacements, DictExpr) and all( + isinstance(k, (StrExpr, BytesExpr)) for k, v in replacements.items + ): + mapping: dict[str, Type] = {} + for k, v in replacements.items: + if isinstance(expr, BytesExpr): + # Special case: for bytes formatting keys must be bytes. + if not isinstance(k, BytesExpr): + self.msg.fail( + "Dictionary keys in bytes formatting must be bytes, not strings", + expr, + code=codes.STRING_FORMATTING, + ) + key_str = cast(FormatStringExpr, k).value + mapping[key_str] = self.accept(v) + + for specifier in specifiers: + if specifier.conv_type == "%": + # %% is allowed in mappings, no checking is required + continue + assert specifier.key is not None + if specifier.key not in mapping: + self.msg.key_not_in_mapping(specifier.key, replacements) + return + rep_type = mapping[specifier.key] + assert specifier.conv_type is not None + expected_type = self.conversion_type(specifier.conv_type, replacements, expr) + if expected_type is None: + return + self.chk.check_subtype( + rep_type, + expected_type, + replacements, + message_registry.INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION, + "expression has type", + f"placeholder with key '{specifier.key}' has type", + code=codes.STRING_FORMATTING, + ) + if specifier.conv_type == "s": + self.check_s_special_cases(expr, rep_type, expr) + else: + rep_type = self.accept(replacements) + dict_type = self.build_dict_type(expr) + self.chk.check_subtype( + rep_type, + dict_type, + replacements, + message_registry.FORMAT_REQUIRES_MAPPING, + "expression has type", + "expected type for mapping is", + code=codes.STRING_FORMATTING, + ) + + def build_dict_type(self, expr: FormatStringExpr) -> Type: + """Build expected mapping type for right operand in % formatting.""" + any_type = AnyType(TypeOfAny.special_form) + if isinstance(expr, BytesExpr): + bytes_type = self.chk.named_generic_type("builtins.bytes", []) + return self.chk.named_generic_type( + "_typeshed.SupportsKeysAndGetItem", [bytes_type, any_type] + ) + elif isinstance(expr, StrExpr): + str_type = self.chk.named_generic_type("builtins.str", []) + return self.chk.named_generic_type( + "_typeshed.SupportsKeysAndGetItem", [str_type, any_type] + ) + else: + assert False, "Unreachable" + + def build_replacement_checkers( + self, specifiers: list[ConversionSpecifier], context: Context, expr: FormatStringExpr + ) -> list[Checkers] | None: + checkers: list[Checkers] = [] + for specifier in specifiers: + checker = self.replacement_checkers(specifier, context, expr) + if checker is None: + return None + checkers.extend(checker) + return checkers + + def replacement_checkers( + self, specifier: ConversionSpecifier, context: Context, expr: FormatStringExpr + ) -> list[Checkers] | None: + """Returns a list of tuples of two functions that check whether a replacement is + of the right type for the specifier. The first function takes a node and checks + its type in the right type context. The second function just checks a type. + """ + checkers: list[Checkers] = [] + + if specifier.width == "*": + checkers.append(self.checkers_for_star(context)) + if specifier.precision == "*": + checkers.append(self.checkers_for_star(context)) + + if specifier.conv_type == "c": + c = self.checkers_for_c_type(specifier.conv_type, context, expr) + if c is None: + return None + checkers.append(c) + elif specifier.conv_type is not None and specifier.conv_type != "%": + c = self.checkers_for_regular_type(specifier.conv_type, context, expr) + if c is None: + return None + checkers.append(c) + return checkers + + def checkers_for_star(self, context: Context) -> Checkers: + """Returns a tuple of check functions that check whether, respectively, + a node or a type is compatible with a star in a conversion specifier. + """ + expected = self.named_type("builtins.int") + + def check_type(type: Type) -> bool: + expected = self.named_type("builtins.int") + return self.chk.check_subtype( + type, expected, context, "* wants int", code=codes.STRING_FORMATTING + ) + + def check_expr(expr: Expression) -> None: + type = self.accept(expr, expected) + check_type(type) + + return check_expr, check_type + + def check_placeholder_type(self, typ: Type, expected_type: Type, context: Context) -> bool: + return self.chk.check_subtype( + typ, + expected_type, + context, + message_registry.INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION, + "expression has type", + "placeholder has type", + code=codes.STRING_FORMATTING, + ) + + def checkers_for_regular_type( + self, conv_type: str, context: Context, expr: FormatStringExpr + ) -> Checkers | None: + """Returns a tuple of check functions that check whether, respectively, + a node or a type is compatible with 'type'. Return None in case of an error. + """ + expected_type = self.conversion_type(conv_type, context, expr) + if expected_type is None: + return None + + def check_type(typ: Type) -> bool: + assert expected_type is not None + ret = self.check_placeholder_type(typ, expected_type, context) + if ret and conv_type == "s": + ret = self.check_s_special_cases(expr, typ, context) + return ret + + def check_expr(expr: Expression) -> None: + type = self.accept(expr, expected_type) + check_type(type) + + return check_expr, check_type + + def check_s_special_cases(self, expr: FormatStringExpr, typ: Type, context: Context) -> bool: + """Additional special cases for %s in bytes vs string context.""" + if isinstance(expr, StrExpr): + # Couple special cases for string formatting. + if has_type_component(typ, "builtins.bytes"): + self.msg.fail( + 'If x = b\'abc\' then "%s" % x produces "b\'abc\'", not "abc". ' + 'If this is desired behavior use "%r" % x. Otherwise, decode the bytes', + context, + code=codes.STR_BYTES_PY3, + ) + return False + if isinstance(expr, BytesExpr): + # A special case for bytes formatting: b'%s' actually requires bytes on Python 3. + if has_type_component(typ, "builtins.str"): + self.msg.fail( + "On Python 3 b'%s' requires bytes, not string", + context, + code=codes.STRING_FORMATTING, + ) + return False + return True + + def checkers_for_c_type( + self, type: str, context: Context, format_expr: FormatStringExpr + ) -> Checkers | None: + """Returns a tuple of check functions that check whether, respectively, + a node or a type is compatible with 'type' that is a character type. + """ + expected_type = self.conversion_type(type, context, format_expr) + if expected_type is None: + return None + + def check_type(type: Type) -> bool: + assert expected_type is not None + if isinstance(format_expr, BytesExpr): + err_msg = '"%c" requires an integer in range(256) or a single byte' + else: + err_msg = '"%c" requires int or char' + return self.chk.check_subtype( + type, + expected_type, + context, + err_msg, + "expression has type", + code=codes.STRING_FORMATTING, + ) + + def check_expr(expr: Expression) -> None: + """int, or str with length 1""" + type = self.accept(expr, expected_type) + # We need further check with expr to make sure that + # it has exact one char or one single byte. + if check_type(type): + # Python 3 doesn't support b'%c' % str + if ( + isinstance(format_expr, BytesExpr) + and isinstance(expr, BytesExpr) + and len(expr.value) != 1 + ): + self.msg.requires_int_or_single_byte(context) + elif isinstance(expr, (StrExpr, BytesExpr)) and len(expr.value) != 1: + self.msg.requires_int_or_char(context) + + return check_expr, check_type + + def conversion_type( + self, p: str, context: Context, expr: FormatStringExpr, format_call: bool = False + ) -> Type | None: + """Return the type that is accepted for a string interpolation conversion specifier type. + + Note that both Python's float (e.g. %f) and integer (e.g. %d) + specifier types accept both float and integers. + + The 'format_call' argument indicates whether this type came from % interpolation or from + a str.format() call, the meaning of few formatting types are different. + """ + NUMERIC_TYPES = NUMERIC_TYPES_NEW if format_call else NUMERIC_TYPES_OLD + INT_TYPES = REQUIRE_INT_NEW if format_call else REQUIRE_INT_OLD + if p == "b" and not format_call: + if not isinstance(expr, BytesExpr): + self.msg.fail( + 'Format character "b" is only supported on bytes patterns', + context, + code=codes.STRING_FORMATTING, + ) + return None + return self.named_type("builtins.bytes") + elif p == "a": + # TODO: return type object? + return AnyType(TypeOfAny.special_form) + elif p in ["s", "r"]: + return AnyType(TypeOfAny.special_form) + elif p in NUMERIC_TYPES: + if p in INT_TYPES: + numeric_types = [self.named_type("builtins.int")] + else: + numeric_types = [ + self.named_type("builtins.int"), + self.named_type("builtins.float"), + ] + if not format_call: + if p in FLOAT_TYPES: + numeric_types.append(self.named_type("typing.SupportsFloat")) + else: + numeric_types.append(self.named_type("typing.SupportsInt")) + return UnionType.make_union(numeric_types) + elif p in ["c"]: + if isinstance(expr, BytesExpr): + return UnionType( + [self.named_type("builtins.int"), self.named_type("builtins.bytes")] + ) + else: + return UnionType( + [self.named_type("builtins.int"), self.named_type("builtins.str")] + ) + else: + self.msg.unsupported_placeholder(p, context) + return None + + # + # Helpers + # + + def named_type(self, name: str) -> Instance: + """Return an instance type with type given by the name and no type + arguments. Alias for TypeChecker.named_type. + """ + return self.chk.named_type(name) + + def accept(self, expr: Expression, context: Type | None = None) -> Type: + """Type check a node. Alias for TypeChecker.accept.""" + return self.chk.expr_checker.accept(expr, context) + + +def has_type_component(typ: Type, fullname: str) -> bool: + """Is this a specific instance type, or a union that contains it? + + We use this ad-hoc function instead of a proper visitor or subtype check + because some str vs bytes errors are strictly speaking not runtime errors, + but rather highly counter-intuitive behavior. This is similar to what is used for + --strict-equality. + """ + typ = get_proper_type(typ) + if isinstance(typ, Instance): + return typ.type.has_base(fullname) + elif isinstance(typ, TypeVarType): + return has_type_component(typ.upper_bound, fullname) or any( + has_type_component(v, fullname) for v in typ.values + ) + elif isinstance(typ, UnionType): + return any(has_type_component(t, fullname) for t in typ.relevant_items()) + return False diff --git a/.venv/lib/python3.12/site-packages/mypy/config_parser.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/config_parser.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..299448e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/config_parser.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/config_parser.py b/.venv/lib/python3.12/site-packages/mypy/config_parser.py new file mode 100644 index 0000000..2bfd2a1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/config_parser.py @@ -0,0 +1,729 @@ +from __future__ import annotations + +import argparse +import configparser +import glob as fileglob +import os +import re +import sys +from io import StringIO + +if sys.version_info >= (3, 11): + import tomllib +else: + import tomli as tomllib + +from collections.abc import Mapping, MutableMapping, Sequence +from typing import Any, Callable, Final, TextIO, Union +from typing_extensions import Never, TypeAlias + +from mypy import defaults +from mypy.options import PER_MODULE_OPTIONS, Options + +_CONFIG_VALUE_TYPES: TypeAlias = Union[ + str, bool, int, float, dict[str, str], list[str], tuple[int, int] +] +_INI_PARSER_CALLABLE: TypeAlias = Callable[[Any], _CONFIG_VALUE_TYPES] + + +class VersionTypeError(argparse.ArgumentTypeError): + """Provide a fallback value if the Python version is unsupported.""" + + def __init__(self, *args: Any, fallback: tuple[int, int]) -> None: + self.fallback = fallback + super().__init__(*args) + + +def parse_version(v: str | float) -> tuple[int, int]: + m = re.match(r"\A(\d)\.(\d+)\Z", str(v)) + if not m: + raise argparse.ArgumentTypeError(f"Invalid python version '{v}' (expected format: 'x.y')") + major, minor = int(m.group(1)), int(m.group(2)) + if major == 2 and minor == 7: + pass # Error raised elsewhere + elif major == 3: + if minor < defaults.PYTHON3_VERSION_MIN[1]: + msg = "Python 3.{} is not supported (must be {}.{} or higher)".format( + minor, *defaults.PYTHON3_VERSION_MIN + ) + + if isinstance(v, float): + msg += ". You may need to put quotes around your Python version" + + raise VersionTypeError(msg, fallback=defaults.PYTHON3_VERSION_MIN) + else: + raise argparse.ArgumentTypeError( + f"Python major version '{major}' out of range (must be 3)" + ) + return major, minor + + +def try_split(v: str | Sequence[str] | object, split_regex: str = ",") -> list[str]: + """Split and trim a str or sequence (eg: list) of str into a list of str. + If an element of the input is not str, a type error will be raised.""" + + def complain(x: object, additional_info: str = "") -> Never: + raise argparse.ArgumentTypeError( + f"Expected a list or a stringified version thereof, but got: '{x}', of type {type(x).__name__}.{additional_info}" + ) + + if isinstance(v, str): + items = [p.strip() for p in re.split(split_regex, v)] + if items and items[-1] == "": + items.pop(-1) + return items + elif isinstance(v, Sequence): + return [ + ( + p.strip() + if isinstance(p, str) + else complain(p, additional_info=" (As an element of the list.)") + ) + for p in v + ] + else: + complain(v) + + +def validate_package_allow_list(allow_list: list[str]) -> list[str]: + for p in allow_list: + msg = f"Invalid allow list entry: {p}" + if "*" in p: + raise argparse.ArgumentTypeError( + f"{msg} (entries are already prefixes so must not contain *)" + ) + if "\\" in p or "/" in p: + raise argparse.ArgumentTypeError( + f"{msg} (entries must be packages like foo.bar not directories or files)" + ) + return allow_list + + +def expand_path(path: str) -> str: + """Expand the user home directory and any environment variables contained within + the provided path. + """ + + return os.path.expandvars(os.path.expanduser(path)) + + +def str_or_array_as_list(v: str | Sequence[str]) -> list[str]: + if isinstance(v, str): + return [v.strip()] if v.strip() else [] + return [p.strip() for p in v if p.strip()] + + +def split_and_match_files_list(paths: Sequence[str]) -> list[str]: + """Take a list of files/directories (with support for globbing through the glob library). + + Where a path/glob matches no file, we still include the raw path in the resulting list. + + Returns a list of file paths + """ + expanded_paths = [] + + for path in paths: + path = expand_path(path.strip()) + globbed_files = fileglob.glob(path, recursive=True) + if globbed_files: + expanded_paths.extend(globbed_files) + else: + expanded_paths.append(path) + + return expanded_paths + + +def split_and_match_files(paths: str) -> list[str]: + """Take a string representing a list of files/directories (with support for globbing + through the glob library). + + Where a path/glob matches no file, we still include the raw path in the resulting list. + + Returns a list of file paths + """ + + return split_and_match_files_list(split_commas(paths)) + + +def check_follow_imports(choice: str) -> str: + choices = ["normal", "silent", "skip", "error"] + if choice not in choices: + raise argparse.ArgumentTypeError( + "invalid choice '{}' (choose from {})".format( + choice, ", ".join(f"'{x}'" for x in choices) + ) + ) + return choice + + +def check_junit_format(choice: str) -> str: + choices = ["global", "per_file"] + if choice not in choices: + raise argparse.ArgumentTypeError( + "invalid choice '{}' (choose from {})".format( + choice, ", ".join(f"'{x}'" for x in choices) + ) + ) + return choice + + +def split_commas(value: str) -> list[str]: + # Uses a bit smarter technique to allow last trailing comma + # and to remove last `""` item from the split. + items = value.split(",") + if items and items[-1] == "": + items.pop(-1) + return items + + +# For most options, the type of the default value set in options.py is +# sufficient, and we don't have to do anything here. This table +# exists to specify types for values initialized to None or container +# types. +ini_config_types: Final[dict[str, _INI_PARSER_CALLABLE]] = { + "python_version": parse_version, + "custom_typing_module": str, + "custom_typeshed_dir": expand_path, + "mypy_path": lambda s: [expand_path(p.strip()) for p in re.split("[,:]", s)], + "files": split_and_match_files, + "quickstart_file": expand_path, + "junit_xml": expand_path, + "junit_format": check_junit_format, + "follow_imports": check_follow_imports, + "no_site_packages": bool, + "plugins": lambda s: [p.strip() for p in split_commas(s)], + "always_true": lambda s: [p.strip() for p in split_commas(s)], + "always_false": lambda s: [p.strip() for p in split_commas(s)], + "untyped_calls_exclude": lambda s: validate_package_allow_list( + [p.strip() for p in split_commas(s)] + ), + "enable_incomplete_feature": lambda s: [p.strip() for p in split_commas(s)], + "disable_error_code": lambda s: [p.strip() for p in split_commas(s)], + "enable_error_code": lambda s: [p.strip() for p in split_commas(s)], + "package_root": lambda s: [p.strip() for p in split_commas(s)], + "cache_dir": expand_path, + "python_executable": expand_path, + "strict": bool, + "exclude": lambda s: [s.strip()], + "packages": try_split, + "modules": try_split, +} + +# Reuse the ini_config_types and overwrite the diff +toml_config_types: Final[dict[str, _INI_PARSER_CALLABLE]] = ini_config_types.copy() +toml_config_types.update( + { + "python_version": parse_version, + "mypy_path": lambda s: [expand_path(p) for p in try_split(s, "[,:]")], + "files": lambda s: split_and_match_files_list(try_split(s)), + "junit_format": lambda s: check_junit_format(str(s)), + "follow_imports": lambda s: check_follow_imports(str(s)), + "plugins": try_split, + "always_true": try_split, + "always_false": try_split, + "untyped_calls_exclude": lambda s: validate_package_allow_list(try_split(s)), + "enable_incomplete_feature": try_split, + "disable_error_code": lambda s: try_split(s), + "enable_error_code": lambda s: try_split(s), + "package_root": try_split, + "exclude": str_or_array_as_list, + "packages": try_split, + "modules": try_split, + } +) + + +def _parse_individual_file( + config_file: str, stderr: TextIO | None = None +) -> tuple[MutableMapping[str, Any], dict[str, _INI_PARSER_CALLABLE], str] | None: + + if not os.path.exists(config_file): + return None + + parser: MutableMapping[str, Any] + try: + if is_toml(config_file): + with open(config_file, "rb") as f: + toml_data = tomllib.load(f) + # Filter down to just mypy relevant toml keys + toml_data = toml_data.get("tool", {}) + if "mypy" not in toml_data: + return None + toml_data = {"mypy": toml_data["mypy"]} + parser = destructure_overrides(toml_data) + config_types = toml_config_types + else: + parser = configparser.RawConfigParser() + parser.read(config_file) + config_types = ini_config_types + + except (tomllib.TOMLDecodeError, configparser.Error, ConfigTOMLValueError) as err: + print(f"{config_file}: {err}", file=stderr) + return None + + if os.path.basename(config_file) in defaults.SHARED_CONFIG_NAMES and "mypy" not in parser: + return None + + return parser, config_types, config_file + + +def _find_config_file( + stderr: TextIO | None = None, +) -> tuple[MutableMapping[str, Any], dict[str, _INI_PARSER_CALLABLE], str] | None: + + current_dir = os.path.abspath(os.getcwd()) + + while True: + for name in defaults.CONFIG_NAMES + defaults.SHARED_CONFIG_NAMES: + config_file = os.path.relpath(os.path.join(current_dir, name)) + ret = _parse_individual_file(config_file, stderr) + if ret is None: + continue + return ret + + if any( + os.path.exists(os.path.join(current_dir, cvs_root)) for cvs_root in (".git", ".hg") + ): + break + parent_dir = os.path.dirname(current_dir) + if parent_dir == current_dir: + break + current_dir = parent_dir + + for config_file in defaults.USER_CONFIG_FILES: + ret = _parse_individual_file(config_file, stderr) + if ret is None: + continue + return ret + + return None + + +def parse_config_file( + options: Options, + set_strict_flags: Callable[[], None], + filename: str | None, + stdout: TextIO | None = None, + stderr: TextIO | None = None, +) -> None: + """Parse a config file into an Options object. + + Errors are written to stderr but are not fatal. + + If filename is None, fall back to default config files. + """ + stdout = stdout or sys.stdout + stderr = stderr or sys.stderr + + ret = ( + _parse_individual_file(filename, stderr) + if filename is not None + else _find_config_file(stderr) + ) + if ret is None: + return + parser, config_types, file_read = ret + + options.config_file = file_read + os.environ["MYPY_CONFIG_FILE_DIR"] = os.path.dirname(os.path.abspath(file_read)) + + if "mypy" not in parser: + if filename or os.path.basename(file_read) not in defaults.SHARED_CONFIG_NAMES: + print(f"{file_read}: No [mypy] section in config file", file=stderr) + else: + section = parser["mypy"] + prefix = f"{file_read}: [mypy]: " + updates, report_dirs = parse_section( + prefix, options, set_strict_flags, section, config_types, stderr + ) + for k, v in updates.items(): + setattr(options, k, v) + options.report_dirs.update(report_dirs) + + for name, section in parser.items(): + if name.startswith("mypy-"): + prefix = get_prefix(file_read, name) + updates, report_dirs = parse_section( + prefix, options, set_strict_flags, section, config_types, stderr + ) + if report_dirs: + print( + prefix, + "Per-module sections should not specify reports ({})".format( + ", ".join(s + "_report" for s in sorted(report_dirs)) + ), + file=stderr, + ) + if set(updates) - PER_MODULE_OPTIONS: + print( + prefix, + "Per-module sections should only specify per-module flags ({})".format( + ", ".join(sorted(set(updates) - PER_MODULE_OPTIONS)) + ), + file=stderr, + ) + updates = {k: v for k, v in updates.items() if k in PER_MODULE_OPTIONS} + + globs = name[5:] + for glob in globs.split(","): + # For backwards compatibility, replace (back)slashes with dots. + glob = glob.replace(os.sep, ".") + if os.altsep: + glob = glob.replace(os.altsep, ".") + + if any(c in glob for c in "?[]!") or any( + "*" in x and x != "*" for x in glob.split(".") + ): + print( + prefix, + "Patterns must be fully-qualified module names, optionally " + "with '*' in some components (e.g spam.*.eggs.*)", + file=stderr, + ) + else: + options.per_module_options[glob] = updates + + +def get_prefix(file_read: str, name: str) -> str: + if is_toml(file_read): + module_name_str = 'module = "%s"' % "-".join(name.split("-")[1:]) + else: + module_name_str = name + + return f"{file_read}: [{module_name_str}]:" + + +def is_toml(filename: str) -> bool: + return filename.lower().endswith(".toml") + + +def destructure_overrides(toml_data: dict[str, Any]) -> dict[str, Any]: + """Take the new [[tool.mypy.overrides]] section array in the pyproject.toml file, + and convert it back to a flatter structure that the existing config_parser can handle. + + E.g. the following pyproject.toml file: + + [[tool.mypy.overrides]] + module = [ + "a.b", + "b.*" + ] + disallow_untyped_defs = true + + [[tool.mypy.overrides]] + module = 'c' + disallow_untyped_defs = false + + Would map to the following config dict that it would have gotten from parsing an equivalent + ini file: + + { + "mypy-a.b": { + disallow_untyped_defs = true, + }, + "mypy-b.*": { + disallow_untyped_defs = true, + }, + "mypy-c": { + disallow_untyped_defs: false, + }, + } + """ + if "overrides" not in toml_data["mypy"]: + return toml_data + + if not isinstance(toml_data["mypy"]["overrides"], list): + raise ConfigTOMLValueError( + "tool.mypy.overrides sections must be an array. Please make " + "sure you are using double brackets like so: [[tool.mypy.overrides]]" + ) + + result = toml_data.copy() + for override in result["mypy"]["overrides"]: + if "module" not in override: + raise ConfigTOMLValueError( + "toml config file contains a [[tool.mypy.overrides]] " + "section, but no module to override was specified." + ) + + if isinstance(override["module"], str): + modules = [override["module"]] + elif isinstance(override["module"], list): + modules = override["module"] + else: + raise ConfigTOMLValueError( + "toml config file contains a [[tool.mypy.overrides]] " + "section with a module value that is not a string or a list of " + "strings" + ) + + for module in modules: + module_overrides = override.copy() + del module_overrides["module"] + old_config_name = f"mypy-{module}" + if old_config_name not in result: + result[old_config_name] = module_overrides + else: + for new_key, new_value in module_overrides.items(): + if ( + new_key in result[old_config_name] + and result[old_config_name][new_key] != new_value + ): + raise ConfigTOMLValueError( + "toml config file contains " + "[[tool.mypy.overrides]] sections with conflicting " + f"values. Module '{module}' has two different values for '{new_key}'" + ) + result[old_config_name][new_key] = new_value + + del result["mypy"]["overrides"] + return result + + +def parse_section( + prefix: str, + template: Options, + set_strict_flags: Callable[[], None], + section: Mapping[str, Any], + config_types: dict[str, Any], + stderr: TextIO = sys.stderr, +) -> tuple[dict[str, object], dict[str, str]]: + """Parse one section of a config file. + + Returns a dict of option values encountered, and a dict of report directories. + """ + results: dict[str, object] = {} + report_dirs: dict[str, str] = {} + + # Because these fields exist on Options, without proactive checking, we would accept them + # and crash later + invalid_options = { + "enabled_error_codes": "enable_error_code", + "disabled_error_codes": "disable_error_code", + } + + for key in section: + invert = False + options_key = key + if key in config_types: + ct = config_types[key] + elif key in invalid_options: + print( + f"{prefix}Unrecognized option: {key} = {section[key]}" + f" (did you mean {invalid_options[key]}?)", + file=stderr, + ) + continue + else: + dv = getattr(template, key, None) + if dv is None: + if key.endswith("_report"): + report_type = key[:-7].replace("_", "-") + if report_type in defaults.REPORTER_NAMES: + report_dirs[report_type] = str(section[key]) + else: + print(f"{prefix}Unrecognized report type: {key}", file=stderr) + continue + if key.startswith("x_"): + pass # Don't complain about `x_blah` flags + elif key.startswith("no_") and hasattr(template, key[3:]): + options_key = key[3:] + invert = True + elif key.startswith("allow") and hasattr(template, "dis" + key): + options_key = "dis" + key + invert = True + elif key.startswith("disallow") and hasattr(template, key[3:]): + options_key = key[3:] + invert = True + elif key.startswith("show_") and hasattr(template, "hide_" + key[5:]): + options_key = "hide_" + key[5:] + invert = True + elif key == "strict": + pass # Special handling below + else: + print(f"{prefix}Unrecognized option: {key} = {section[key]}", file=stderr) + if invert: + dv = getattr(template, options_key, None) + else: + continue + ct = type(dv) + v: Any = None + try: + if ct is bool: + if isinstance(section, dict): + v = convert_to_boolean(section.get(key)) + else: + v = section.getboolean(key) # type: ignore[attr-defined] # Until better stub + if invert: + v = not v + elif callable(ct): + if invert: + print(f"{prefix}Can not invert non-boolean key {options_key}", file=stderr) + continue + try: + v = ct(section.get(key)) + except VersionTypeError as err_version: + print(f"{prefix}{key}: {err_version}", file=stderr) + v = err_version.fallback + except argparse.ArgumentTypeError as err: + print(f"{prefix}{key}: {err}", file=stderr) + continue + else: + print(f"{prefix}Don't know what type {key} should have", file=stderr) + continue + except ValueError as err: + print(f"{prefix}{key}: {err}", file=stderr) + continue + if key == "strict": + if v: + set_strict_flags() + continue + results[options_key] = v + + # These two flags act as per-module overrides, so store the empty defaults. + if "disable_error_code" not in results: + results["disable_error_code"] = [] + if "enable_error_code" not in results: + results["enable_error_code"] = [] + + return results, report_dirs + + +def convert_to_boolean(value: Any | None) -> bool: + """Return a boolean value translating from other types if necessary.""" + if isinstance(value, bool): + return value + if not isinstance(value, str): + value = str(value) + if value.lower() not in configparser.RawConfigParser.BOOLEAN_STATES: + raise ValueError(f"Not a boolean: {value}") + return configparser.RawConfigParser.BOOLEAN_STATES[value.lower()] + + +def split_directive(s: str) -> tuple[list[str], list[str]]: + """Split s on commas, except during quoted sections. + + Returns the parts and a list of error messages.""" + parts = [] + cur: list[str] = [] + errors = [] + i = 0 + while i < len(s): + if s[i] == ",": + parts.append("".join(cur).strip()) + cur = [] + elif s[i] == '"': + i += 1 + while i < len(s) and s[i] != '"': + cur.append(s[i]) + i += 1 + if i == len(s): + errors.append("Unterminated quote in configuration comment") + cur.clear() + else: + cur.append(s[i]) + i += 1 + if cur: + parts.append("".join(cur).strip()) + + return parts, errors + + +def mypy_comments_to_config_map(line: str, template: Options) -> tuple[dict[str, str], list[str]]: + """Rewrite the mypy comment syntax into ini file syntax.""" + options = {} + entries, errors = split_directive(line) + for entry in entries: + if "=" not in entry: + name = entry + value = None + else: + name, value = (x.strip() for x in entry.split("=", 1)) + + name = name.replace("-", "_") + if value is None: + value = "True" + options[name] = value + + return options, errors + + +def parse_mypy_comments( + args: list[tuple[int, str]], template: Options +) -> tuple[dict[str, object], list[tuple[int, str]]]: + """Parse a collection of inline mypy: configuration comments. + + Returns a dictionary of options to be applied and a list of error messages + generated. + """ + errors: list[tuple[int, str]] = [] + sections: dict[str, object] = {"enable_error_code": [], "disable_error_code": []} + + for lineno, line in args: + # In order to easily match the behavior for bools, we abuse configparser. + # Oddly, the only way to get the SectionProxy object with the getboolean + # method is to create a config parser. + parser = configparser.RawConfigParser() + options, parse_errors = mypy_comments_to_config_map(line, template) + if "python_version" in options: + errors.append((lineno, "python_version not supported in inline configuration")) + del options["python_version"] + + parser["dummy"] = options + errors.extend((lineno, x) for x in parse_errors) + + stderr = StringIO() + strict_found = False + + def set_strict_flags() -> None: + nonlocal strict_found + strict_found = True + + new_sections, reports = parse_section( + "", template, set_strict_flags, parser["dummy"], ini_config_types, stderr=stderr + ) + errors.extend((lineno, x) for x in stderr.getvalue().strip().split("\n") if x) + if reports: + errors.append((lineno, "Reports not supported in inline configuration")) + if strict_found: + errors.append( + ( + lineno, + 'Setting "strict" not supported in inline configuration: specify it in ' + "a configuration file instead, or set individual inline flags " + '(see "mypy -h" for the list of flags enabled in strict mode)', + ) + ) + # Because this is currently special-cased + # (the new_sections for an inline config *always* includes 'disable_error_code' and + # 'enable_error_code' fields, usually empty, which overwrite the old ones), + # we have to manipulate them specially. + # This could use a refactor, but so could the whole subsystem. + if ( + "enable_error_code" in new_sections + and isinstance(neec := new_sections["enable_error_code"], list) + and isinstance(eec := sections.get("enable_error_code", []), list) + ): + new_sections["enable_error_code"] = sorted(set(neec + eec)) + if ( + "disable_error_code" in new_sections + and isinstance(ndec := new_sections["disable_error_code"], list) + and isinstance(dec := sections.get("disable_error_code", []), list) + ): + new_sections["disable_error_code"] = sorted(set(ndec + dec)) + sections.update(new_sections) + return sections, errors + + +def get_config_module_names(filename: str | None, modules: list[str]) -> str: + if not filename or not modules: + return "" + + if not is_toml(filename): + return ", ".join(f"[mypy-{module}]" for module in modules) + + return "module = ['%s']" % ("', '".join(sorted(modules))) + + +class ConfigTOMLValueError(ValueError): + pass diff --git a/.venv/lib/python3.12/site-packages/mypy/constant_fold.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/constant_fold.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..287e8e8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/constant_fold.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/constant_fold.py b/.venv/lib/python3.12/site-packages/mypy/constant_fold.py new file mode 100644 index 0000000..4582b2a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/constant_fold.py @@ -0,0 +1,187 @@ +"""Constant folding of expressions. + +For example, 3 + 5 can be constant folded into 8. +""" + +from __future__ import annotations + +from typing import Final, Union + +from mypy.nodes import ( + ComplexExpr, + Expression, + FloatExpr, + IntExpr, + NameExpr, + OpExpr, + StrExpr, + UnaryExpr, + Var, +) + +# All possible result types of constant folding +ConstantValue = Union[int, bool, float, complex, str] +CONST_TYPES: Final = (int, bool, float, complex, str) + + +def constant_fold_expr(expr: Expression, cur_mod_id: str) -> ConstantValue | None: + """Return the constant value of an expression for supported operations. + + Among other things, support int arithmetic and string + concatenation. For example, the expression 3 + 5 has the constant + value 8. + + Also bind simple references to final constants defined in the + current module (cur_mod_id). Binding to references is best effort + -- we don't bind references to other modules. Mypyc trusts these + to be correct in compiled modules, so that it can replace a + constant expression (or a reference to one) with the statically + computed value. We don't want to infer constant values based on + stubs, in particular, as these might not match the implementation + (due to version skew, for example). + + Return None if unsuccessful. + """ + if isinstance(expr, IntExpr): + return expr.value + if isinstance(expr, StrExpr): + return expr.value + if isinstance(expr, FloatExpr): + return expr.value + if isinstance(expr, ComplexExpr): + return expr.value + elif isinstance(expr, NameExpr): + if expr.name == "True": + return True + elif expr.name == "False": + return False + node = expr.node + if ( + isinstance(node, Var) + and node.is_final + and node.fullname.rsplit(".", 1)[0] == cur_mod_id + ): + value = node.final_value + if isinstance(value, (CONST_TYPES)): + return value + elif isinstance(expr, OpExpr): + left = constant_fold_expr(expr.left, cur_mod_id) + right = constant_fold_expr(expr.right, cur_mod_id) + if left is not None and right is not None: + return constant_fold_binary_op(expr.op, left, right) + elif isinstance(expr, UnaryExpr): + value = constant_fold_expr(expr.expr, cur_mod_id) + if value is not None: + return constant_fold_unary_op(expr.op, value) + return None + + +def constant_fold_binary_op( + op: str, left: ConstantValue, right: ConstantValue +) -> ConstantValue | None: + if isinstance(left, int) and isinstance(right, int): + return constant_fold_binary_int_op(op, left, right) + + # Float and mixed int/float arithmetic. + if isinstance(left, float) and isinstance(right, float): + return constant_fold_binary_float_op(op, left, right) + elif isinstance(left, float) and isinstance(right, int): + return constant_fold_binary_float_op(op, left, right) + elif isinstance(left, int) and isinstance(right, float): + return constant_fold_binary_float_op(op, left, right) + + # String concatenation and multiplication. + if op == "+" and isinstance(left, str) and isinstance(right, str): + return left + right + elif op == "*" and isinstance(left, str) and isinstance(right, int): + return left * right + elif op == "*" and isinstance(left, int) and isinstance(right, str): + return left * right + + # Complex construction. + if op == "+" and isinstance(left, (int, float)) and isinstance(right, complex): + return left + right + elif op == "+" and isinstance(left, complex) and isinstance(right, (int, float)): + return left + right + elif op == "-" and isinstance(left, (int, float)) and isinstance(right, complex): + return left - right + elif op == "-" and isinstance(left, complex) and isinstance(right, (int, float)): + return left - right + + return None + + +def constant_fold_binary_int_op(op: str, left: int, right: int) -> int | float | None: + if op == "+": + return left + right + if op == "-": + return left - right + elif op == "*": + return left * right + elif op == "/": + if right != 0: + return left / right + elif op == "//": + if right != 0: + return left // right + elif op == "%": + if right != 0: + return left % right + elif op == "&": + return left & right + elif op == "|": + return left | right + elif op == "^": + return left ^ right + elif op == "<<": + if right >= 0: + return left << right + elif op == ">>": + if right >= 0: + return left >> right + elif op == "**": + if right >= 0: + ret = left**right + assert isinstance(ret, int) + return ret + return None + + +def constant_fold_binary_float_op(op: str, left: int | float, right: int | float) -> float | None: + assert not (isinstance(left, int) and isinstance(right, int)), (op, left, right) + if op == "+": + return left + right + elif op == "-": + return left - right + elif op == "*": + return left * right + elif op == "/": + if right != 0: + return left / right + elif op == "//": + if right != 0: + return left // right + elif op == "%": + if right != 0: + return left % right + elif op == "**": + if (left < 0 and isinstance(right, int)) or left > 0: + try: + ret = left**right + except OverflowError: + return None + else: + assert isinstance(ret, float), ret + return ret + + return None + + +def constant_fold_unary_op(op: str, value: ConstantValue) -> int | float | None: + if op == "-" and isinstance(value, (int, float)): + return -value + elif op == "~" and isinstance(value, int): + return ~value + elif op == "+" and isinstance(value, (int, float)): + return value + return None diff --git a/.venv/lib/python3.12/site-packages/mypy/constraints.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/constraints.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..30cfe11 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/constraints.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/constraints.py b/.venv/lib/python3.12/site-packages/mypy/constraints.py new file mode 100644 index 0000000..96c0c7c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/constraints.py @@ -0,0 +1,1713 @@ +"""Type inference constraints.""" + +from __future__ import annotations + +from collections.abc import Iterable, Sequence +from typing import TYPE_CHECKING, Final, cast +from typing_extensions import TypeGuard + +import mypy.subtypes +import mypy.typeops +from mypy.argmap import ArgTypeExpander +from mypy.erasetype import erase_typevars +from mypy.maptype import map_instance_to_supertype +from mypy.nodes import ( + ARG_OPT, + ARG_POS, + ARG_STAR, + ARG_STAR2, + CONTRAVARIANT, + COVARIANT, + ArgKind, + TypeInfo, +) +from mypy.type_visitor import ALL_STRATEGY, BoolTypeQuery +from mypy.types import ( + TUPLE_LIKE_INSTANCE_NAMES, + AnyType, + CallableType, + DeletedType, + ErasedType, + Instance, + LiteralType, + NoneType, + NormalizedCallableType, + Overloaded, + Parameters, + ParamSpecType, + PartialType, + ProperType, + TupleType, + Type, + TypeAliasType, + TypedDictType, + TypeOfAny, + TypeType, + TypeVarId, + TypeVarLikeType, + TypeVarTupleType, + TypeVarType, + TypeVisitor, + UnboundType, + UninhabitedType, + UnionType, + UnpackType, + find_unpack_in_list, + flatten_nested_tuples, + get_proper_type, + has_recursive_types, + has_type_vars, + is_named_instance, + split_with_prefix_and_suffix, +) +from mypy.types_utils import is_union_with_any +from mypy.typestate import type_state + +if TYPE_CHECKING: + from mypy.infer import ArgumentInferContext + +SUBTYPE_OF: Final = 0 +SUPERTYPE_OF: Final = 1 + + +class Constraint: + """A representation of a type constraint. + + It can be either T <: type or T :> type (T is a type variable). + """ + + type_var: TypeVarId + op = 0 # SUBTYPE_OF or SUPERTYPE_OF + target: Type + + def __init__(self, type_var: TypeVarLikeType, op: int, target: Type) -> None: + self.type_var = type_var.id + self.op = op + # TODO: should we add "assert not isinstance(target, UnpackType)"? + # UnpackType is a synthetic type, and is never valid as a constraint target. + self.target = target + self.origin_type_var = type_var + # These are additional type variables that should be solved for together with type_var. + # TODO: A cleaner solution may be to modify the return type of infer_constraints() + # to include these instead, but this is a rather big refactoring. + self.extra_tvars: list[TypeVarLikeType] = [] + + def __repr__(self) -> str: + op_str = "<:" + if self.op == SUPERTYPE_OF: + op_str = ":>" + return f"{self.type_var} {op_str} {self.target}" + + def __hash__(self) -> int: + return hash((self.type_var, self.op, self.target)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Constraint): + return False + return (self.type_var, self.op, self.target) == (other.type_var, other.op, other.target) + + +def infer_constraints_for_callable( + callee: CallableType, + arg_types: Sequence[Type | None], + arg_kinds: list[ArgKind], + arg_names: Sequence[str | None] | None, + formal_to_actual: list[list[int]], + context: ArgumentInferContext, +) -> list[Constraint]: + """Infer type variable constraints for a callable and actual arguments. + + Return a list of constraints. + """ + constraints: list[Constraint] = [] + mapper = ArgTypeExpander(context) + + param_spec = callee.param_spec() + param_spec_arg_types = [] + param_spec_arg_names = [] + param_spec_arg_kinds = [] + + incomplete_star_mapping = False + for i, actuals in enumerate(formal_to_actual): # TODO: isn't this `enumerate(arg_types)`? + for actual in actuals: + if actual is None and callee.arg_kinds[i] in (ARG_STAR, ARG_STAR2): # type: ignore[unreachable] + # We can't use arguments to infer ParamSpec constraint, if only some + # are present in the current inference pass. + incomplete_star_mapping = True # type: ignore[unreachable] + break + + for i, actuals in enumerate(formal_to_actual): + if isinstance(callee.arg_types[i], UnpackType): + unpack_type = callee.arg_types[i] + assert isinstance(unpack_type, UnpackType) + + # In this case we are binding all the actuals to *args, + # and we want a constraint that the typevar tuple being unpacked + # is equal to a type list of all the actuals. + actual_types = [] + + unpacked_type = get_proper_type(unpack_type.type) + if isinstance(unpacked_type, TypeVarTupleType): + tuple_instance = unpacked_type.tuple_fallback + elif isinstance(unpacked_type, TupleType): + tuple_instance = unpacked_type.partial_fallback + else: + assert False, "mypy bug: unhandled constraint inference case" + + for actual in actuals: + actual_arg_type = arg_types[actual] + if actual_arg_type is None: + continue + + expanded_actual = mapper.expand_actual_type( + actual_arg_type, + arg_kinds[actual], + callee.arg_names[i], + callee.arg_kinds[i], + allow_unpack=True, + ) + + if arg_kinds[actual] != ARG_STAR or isinstance( + get_proper_type(actual_arg_type), TupleType + ): + actual_types.append(expanded_actual) + else: + # If we are expanding an iterable inside * actual, append a homogeneous item instead + actual_types.append( + UnpackType(tuple_instance.copy_modified(args=[expanded_actual])) + ) + + if isinstance(unpacked_type, TypeVarTupleType): + constraints.append( + Constraint( + unpacked_type, + SUPERTYPE_OF, + TupleType(actual_types, unpacked_type.tuple_fallback), + ) + ) + elif isinstance(unpacked_type, TupleType): + # Prefixes get converted to positional args, so technically the only case we + # should have here is like Tuple[Unpack[Ts], Y1, Y2, Y3]. If this turns out + # not to hold we can always handle the prefixes too. + inner_unpack = unpacked_type.items[0] + assert isinstance(inner_unpack, UnpackType) + inner_unpacked_type = get_proper_type(inner_unpack.type) + suffix_len = len(unpacked_type.items) - 1 + if isinstance(inner_unpacked_type, TypeVarTupleType): + # Variadic item can be either *Ts... + constraints.append( + Constraint( + inner_unpacked_type, + SUPERTYPE_OF, + TupleType( + actual_types[:-suffix_len], inner_unpacked_type.tuple_fallback + ), + ) + ) + else: + # ...or it can be a homogeneous tuple. + assert ( + isinstance(inner_unpacked_type, Instance) + and inner_unpacked_type.type.fullname == "builtins.tuple" + ) + for at in actual_types[:-suffix_len]: + constraints.extend( + infer_constraints(inner_unpacked_type.args[0], at, SUPERTYPE_OF) + ) + # Now handle the suffix (if any). + if suffix_len: + for tt, at in zip(unpacked_type.items[1:], actual_types[-suffix_len:]): + constraints.extend(infer_constraints(tt, at, SUPERTYPE_OF)) + else: + assert False, "mypy bug: unhandled constraint inference case" + else: + for actual in actuals: + actual_arg_type = arg_types[actual] + if actual_arg_type is None: + continue + + if param_spec and callee.arg_kinds[i] in (ARG_STAR, ARG_STAR2): + # If actual arguments are mapped to ParamSpec type, we can't infer individual + # constraints, instead store them and infer single constraint at the end. + # It is impossible to map actual kind to formal kind, so use some heuristic. + # This inference is used as a fallback, so relying on heuristic should be OK. + if not incomplete_star_mapping: + param_spec_arg_types.append( + mapper.expand_actual_type( + actual_arg_type, arg_kinds[actual], None, arg_kinds[actual] + ) + ) + actual_kind = arg_kinds[actual] + param_spec_arg_kinds.append( + ARG_POS if actual_kind not in (ARG_STAR, ARG_STAR2) else actual_kind + ) + param_spec_arg_names.append(arg_names[actual] if arg_names else None) + else: + actual_type = mapper.expand_actual_type( + actual_arg_type, + arg_kinds[actual], + callee.arg_names[i], + callee.arg_kinds[i], + ) + c = infer_constraints(callee.arg_types[i], actual_type, SUPERTYPE_OF) + constraints.extend(c) + if ( + param_spec + and not any(c.type_var == param_spec.id for c in constraints) + and not incomplete_star_mapping + ): + # Use ParamSpec constraint from arguments only if there are no other constraints, + # since as explained above it is quite ad-hoc. + constraints.append( + Constraint( + param_spec, + SUPERTYPE_OF, + Parameters( + arg_types=param_spec_arg_types, + arg_kinds=param_spec_arg_kinds, + arg_names=param_spec_arg_names, + imprecise_arg_kinds=True, + ), + ) + ) + if any(isinstance(v, ParamSpecType) for v in callee.variables): + # As a perf optimization filter imprecise constraints only when we can have them. + constraints = filter_imprecise_kinds(constraints) + return constraints + + +def infer_constraints( + template: Type, actual: Type, direction: int, skip_neg_op: bool = False +) -> list[Constraint]: + """Infer type constraints. + + Match a template type, which may contain type variable references, + recursively against a type which does not contain (the same) type + variable references. The result is a list of type constrains of + form 'T is a supertype/subtype of x', where T is a type variable + present in the template and x is a type without reference to type + variables present in the template. + + Assume T and S are type variables. Now the following results can be + calculated (read as '(template, actual) --> result'): + + (T, X) --> T :> X + (X[T], X[Y]) --> T <: Y and T :> Y + ((T, T), (X, Y)) --> T :> X and T :> Y + ((T, S), (X, Y)) --> T :> X and S :> Y + (X[T], Any) --> T <: Any and T :> Any + + The constraints are represented as Constraint objects. If skip_neg_op == True, + then skip adding reverse (polymorphic) constraints (since this is already a call + to infer such constraints). + """ + if any( + get_proper_type(template) == get_proper_type(t) + and get_proper_type(actual) == get_proper_type(a) + for (t, a) in reversed(type_state.inferring) + ): + return [] + if has_recursive_types(template) or isinstance(get_proper_type(template), Instance): + # This case requires special care because it may cause infinite recursion. + # Note that we include Instances because the may be recursive as str(Sequence[str]). + if not has_type_vars(template): + # Return early on an empty branch. + return [] + type_state.inferring.append((template, actual)) + res = _infer_constraints(template, actual, direction, skip_neg_op) + type_state.inferring.pop() + return res + return _infer_constraints(template, actual, direction, skip_neg_op) + + +def _infer_constraints( + template: Type, actual: Type, direction: int, skip_neg_op: bool +) -> list[Constraint]: + orig_template = template + template = get_proper_type(template) + actual = get_proper_type(actual) + + # Type inference shouldn't be affected by whether union types have been simplified. + # We however keep any ErasedType items, so that the caller will see it when using + # checkexpr.has_erased_component(). + if isinstance(template, UnionType): + template = mypy.typeops.make_simplified_union(template.items, keep_erased=True) + if isinstance(actual, UnionType): + actual = mypy.typeops.make_simplified_union(actual.items, keep_erased=True) + + # Ignore Any types from the type suggestion engine to avoid them + # causing us to infer Any in situations where a better job could + # be done otherwise. (This can produce false positives but that + # doesn't really matter because it is all heuristic anyway.) + if isinstance(actual, AnyType) and actual.type_of_any == TypeOfAny.suggestion_engine: + return [] + + # type[A | B] is always represented as type[A] | type[B] internally. + # This makes our constraint solver choke on type[T] <: type[A] | type[B], + # solving T as generic meet(A, B) which is often `object`. Force unwrap such unions + # if both sides are type[...] or unions thereof. See `testTypeVarType` test + type_type_unwrapped = False + if _is_type_type(template) and _is_type_type(actual): + type_type_unwrapped = True + template = _unwrap_type_type(template) + actual = _unwrap_type_type(actual) + + # If the template is simply a type variable, emit a Constraint directly. + # We need to handle this case before handling Unions for two reasons: + # 1. "T <: Union[U1, U2]" is not equivalent to "T <: U1 or T <: U2", + # because T can itself be a union (notably, Union[U1, U2] itself). + # 2. "T :> Union[U1, U2]" is logically equivalent to "T :> U1 and + # T :> U2", but they are not equivalent to the constraint solver, + # which never introduces new Union types (it uses join() instead). + if isinstance(template, TypeVarType): + return [Constraint(template, direction, actual)] + + if ( + isinstance(actual, TypeVarType) + and not actual.id.is_meta_var() + and direction == SUPERTYPE_OF + ): + # Unless template is also a type variable (or a union that contains one), using the upper + # bound for inference will usually give better result for actual that is a type variable. + if not isinstance(template, UnionType) or not any( + isinstance(t, TypeVarType) for t in template.items + ): + actual = get_proper_type(actual.upper_bound) + + # Now handle the case of either template or actual being a Union. + # For a Union to be a subtype of another type, every item of the Union + # must be a subtype of that type, so concatenate the constraints. + if direction == SUBTYPE_OF and isinstance(template, UnionType): + res = [] + for t_item in template.items: + res.extend(infer_constraints(t_item, actual, direction)) + return res + if direction == SUPERTYPE_OF and isinstance(actual, UnionType): + res = [] + for a_item in actual.items: + # `orig_template` has to be preserved intact in case it's recursive. + # If we unwrapped ``type[...]`` previously, wrap the item back again, + # as ``type[...]`` can't be removed from `orig_template`. + if type_type_unwrapped: + a_item = TypeType.make_normalized(a_item) + res.extend(infer_constraints(orig_template, a_item, direction)) + return res + + # Now the potential subtype is known not to be a Union or a type + # variable that we are solving for. In that case, for a Union to + # be a supertype of the potential subtype, some item of the Union + # must be a supertype of it. + if direction == SUBTYPE_OF and isinstance(actual, UnionType): + # If some of items is not a complete type, disregard that. + items = simplify_away_incomplete_types(actual.items) + # We infer constraints eagerly -- try to find constraints for a type + # variable if possible. This seems to help with some real-world + # use cases. + return any_constraints( + [infer_constraints_if_possible(template, a_item, direction) for a_item in items], + eager=True, + ) + if direction == SUPERTYPE_OF and isinstance(template, UnionType): + # When the template is a union, we are okay with leaving some + # type variables indeterminate. This helps with some special + # cases, though this isn't very principled. + result = any_constraints( + [ + infer_constraints_if_possible(t_item, actual, direction) + for t_item in template.items + ], + eager=isinstance(actual, AnyType), + ) + if result: + return result + elif has_recursive_types(template) and not has_recursive_types(actual): + return handle_recursive_union(template, actual, direction) + return [] + + # Remaining cases are handled by ConstraintBuilderVisitor. + return template.accept(ConstraintBuilderVisitor(actual, direction, skip_neg_op)) + + +def _is_type_type(tp: ProperType) -> TypeGuard[TypeType | UnionType]: + """Is ``tp`` a ``type[...]`` or a union thereof? + + ``Type[A | B]`` is internally represented as ``type[A] | type[B]``, and this + troubles the solver sometimes. + """ + return ( + isinstance(tp, TypeType) + or isinstance(tp, UnionType) + and all(isinstance(get_proper_type(o), TypeType) for o in tp.items) + ) + + +def _unwrap_type_type(tp: TypeType | UnionType) -> ProperType: + """Extract the inner type from ``type[...]`` expression or a union thereof.""" + if isinstance(tp, TypeType): + return tp.item + return UnionType.make_union([cast(TypeType, get_proper_type(o)).item for o in tp.items]) + + +def infer_constraints_if_possible( + template: Type, actual: Type, direction: int +) -> list[Constraint] | None: + """Like infer_constraints, but return None if the input relation is + known to be unsatisfiable, for example if template=List[T] and actual=int. + (In this case infer_constraints would return [], just like it would for + an automatically satisfied relation like template=List[T] and actual=object.) + """ + if direction == SUBTYPE_OF and not mypy.subtypes.is_subtype(erase_typevars(template), actual): + return None + if direction == SUPERTYPE_OF and not mypy.subtypes.is_subtype( + actual, erase_typevars(template) + ): + return None + if ( + direction == SUPERTYPE_OF + and isinstance(template, TypeVarType) + and not mypy.subtypes.is_subtype(actual, erase_typevars(template.upper_bound)) + ): + # This is not caught by the above branch because of the erase_typevars() call, + # that would return 'Any' for a type variable. + return None + return infer_constraints(template, actual, direction) + + +def select_trivial(options: Sequence[list[Constraint] | None]) -> list[list[Constraint]]: + """Select only those lists where each item is a constraint against Any.""" + res = [] + for option in options: + if option is None: + continue + if all(isinstance(get_proper_type(c.target), AnyType) for c in option): + res.append(option) + return res + + +def merge_with_any(constraint: Constraint) -> Constraint: + """Transform a constraint target into a union with given Any type.""" + target = constraint.target + if is_union_with_any(target): + # Do not produce redundant unions. + return constraint + # TODO: if we will support multiple sources Any, use this here instead. + any_type = AnyType(TypeOfAny.implementation_artifact) + return Constraint( + constraint.origin_type_var, + constraint.op, + UnionType.make_union([target, any_type], target.line, target.column), + ) + + +def handle_recursive_union(template: UnionType, actual: Type, direction: int) -> list[Constraint]: + # This is a hack to special-case things like Union[T, Inst[T]] in recursive types. Although + # it is quite arbitrary, it is a relatively common pattern, so we should handle it well. + # This function may be called when inferring against such union resulted in different + # constraints for each item. Normally we give up in such case, but here we instead split + # the union in two parts, and try inferring sequentially. + non_type_var_items = [t for t in template.items if not isinstance(t, TypeVarType)] + type_var_items = [t for t in template.items if isinstance(t, TypeVarType)] + return infer_constraints( + UnionType.make_union(non_type_var_items), actual, direction + ) or infer_constraints(UnionType.make_union(type_var_items), actual, direction) + + +def any_constraints(options: list[list[Constraint] | None], *, eager: bool) -> list[Constraint]: + """Deduce what we can from a collection of constraint lists. + + It's a given that at least one of the lists must be satisfied. A + None element in the list of options represents an unsatisfiable + constraint and is ignored. Ignore empty constraint lists if eager + is true -- they are always trivially satisfiable. + """ + if eager: + valid_options = [option for option in options if option] + else: + valid_options = [option for option in options if option is not None] + + if not valid_options: + return [] + + if len(valid_options) == 1: + return valid_options[0] + + if all(is_same_constraints(valid_options[0], c) for c in valid_options[1:]): + # Multiple sets of constraints that are all the same. Just pick any one of them. + return valid_options[0] + + if all(is_similar_constraints(valid_options[0], c) for c in valid_options[1:]): + # All options have same structure. In this case we can merge-in trivial + # options (i.e. those that only have Any) and try again. + # TODO: More generally, if a given (variable, direction) pair appears in + # every option, combine the bounds with meet/join always, not just for Any. + trivial_options = select_trivial(valid_options) + if trivial_options and len(trivial_options) < len(valid_options): + merged_options = [] + for option in valid_options: + if option in trivial_options: + continue + merged_options.append([merge_with_any(c) for c in option]) + return any_constraints(list(merged_options), eager=eager) + + # If normal logic didn't work, try excluding trivially unsatisfiable constraint (due to + # upper bounds) from each option, and comparing them again. + filtered_options = [filter_satisfiable(o) for o in options] + if filtered_options != options: + return any_constraints(filtered_options, eager=eager) + + # Try harder: if that didn't work, try to strip typevars that aren't meta vars. + # Note this is what we would always do, but unfortunately some callers may not + # set the meta var status correctly (for historical reasons), so we use this as + # a fallback only. + filtered_options = [exclude_non_meta_vars(o) for o in options] + if filtered_options != options: + return any_constraints(filtered_options, eager=eager) + + # Otherwise, there are either no valid options or multiple, inconsistent valid + # options. Give up and deduce nothing. + return [] + + +def filter_satisfiable(option: list[Constraint] | None) -> list[Constraint] | None: + """Keep only constraints that can possibly be satisfied. + + Currently, we filter out constraints where target is not a subtype of the upper bound. + Since those can be never satisfied. We may add more cases in future if it improves type + inference. + """ + if not option: + return option + + satisfiable = [] + for c in option: + if isinstance(c.origin_type_var, TypeVarType) and c.origin_type_var.values: + if any( + mypy.subtypes.is_subtype(c.target, value) for value in c.origin_type_var.values + ): + satisfiable.append(c) + elif mypy.subtypes.is_subtype(c.target, c.origin_type_var.upper_bound): + satisfiable.append(c) + if not satisfiable: + return None + return satisfiable + + +def exclude_non_meta_vars(option: list[Constraint] | None) -> list[Constraint] | None: + # If we had an empty list, keep it intact + if not option: + return option + # However, if none of the options actually references meta vars, better remove + # this constraint entirely. + return [c for c in option if c.type_var.is_meta_var()] or None + + +def is_same_constraints(x: list[Constraint], y: list[Constraint]) -> bool: + for c1 in x: + if not any(is_same_constraint(c1, c2) for c2 in y): + return False + for c1 in y: + if not any(is_same_constraint(c1, c2) for c2 in x): + return False + return True + + +def is_same_constraint(c1: Constraint, c2: Constraint) -> bool: + # Ignore direction when comparing constraints against Any. + skip_op_check = isinstance(get_proper_type(c1.target), AnyType) and isinstance( + get_proper_type(c2.target), AnyType + ) + return ( + c1.type_var == c2.type_var + and (c1.op == c2.op or skip_op_check) + and mypy.subtypes.is_same_type(c1.target, c2.target) + ) + + +def is_similar_constraints(x: list[Constraint], y: list[Constraint]) -> bool: + """Check that two lists of constraints have similar structure. + + This means that each list has same type variable plus direction pairs (i.e we + ignore the target). Except for constraints where target is Any type, there + we ignore direction as well. + """ + return _is_similar_constraints(x, y) and _is_similar_constraints(y, x) + + +def _is_similar_constraints(x: list[Constraint], y: list[Constraint]) -> bool: + """Check that every constraint in the first list has a similar one in the second. + + See docstring above for definition of similarity. + """ + for c1 in x: + has_similar = False + for c2 in y: + # Ignore direction when either constraint is against Any. + skip_op_check = isinstance(get_proper_type(c1.target), AnyType) or isinstance( + get_proper_type(c2.target), AnyType + ) + if c1.type_var == c2.type_var and (c1.op == c2.op or skip_op_check): + has_similar = True + break + if not has_similar: + return False + return True + + +def simplify_away_incomplete_types(types: Iterable[Type]) -> list[Type]: + complete = [typ for typ in types if is_complete_type(typ)] + if complete: + return complete + else: + return list(types) + + +def is_complete_type(typ: Type) -> bool: + """Is a type complete? + + A complete doesn't have uninhabited type components or (when not in strict + optional mode) None components. + """ + return typ.accept(CompleteTypeVisitor()) + + +class CompleteTypeVisitor(BoolTypeQuery): + def __init__(self) -> None: + super().__init__(ALL_STRATEGY) + + def visit_uninhabited_type(self, t: UninhabitedType) -> bool: + return False + + +class ConstraintBuilderVisitor(TypeVisitor[list[Constraint]]): + """Visitor class for inferring type constraints.""" + + # The type that is compared against a template + # TODO: The value may be None. Is that actually correct? + actual: ProperType + + def __init__(self, actual: ProperType, direction: int, skip_neg_op: bool) -> None: + # Direction must be SUBTYPE_OF or SUPERTYPE_OF. + self.actual = actual + self.direction = direction + # Whether to skip polymorphic inference (involves inference in opposite direction) + # this is used to prevent infinite recursion when both template and actual are + # generic callables. + self.skip_neg_op = skip_neg_op + + # Trivial leaf types + + def visit_unbound_type(self, template: UnboundType) -> list[Constraint]: + return [] + + def visit_any(self, template: AnyType) -> list[Constraint]: + return [] + + def visit_none_type(self, template: NoneType) -> list[Constraint]: + return [] + + def visit_uninhabited_type(self, template: UninhabitedType) -> list[Constraint]: + return [] + + def visit_erased_type(self, template: ErasedType) -> list[Constraint]: + return [] + + def visit_deleted_type(self, template: DeletedType) -> list[Constraint]: + return [] + + def visit_literal_type(self, template: LiteralType) -> list[Constraint]: + return [] + + # Errors + + def visit_partial_type(self, template: PartialType) -> list[Constraint]: + # We can't do anything useful with a partial type here. + assert False, "Internal error" + + # Non-trivial leaf type + + def visit_type_var(self, template: TypeVarType) -> list[Constraint]: + assert False, ( + "Unexpected TypeVarType in ConstraintBuilderVisitor" + " (should have been handled in infer_constraints)" + ) + + def visit_param_spec(self, template: ParamSpecType) -> list[Constraint]: + # Can't infer ParamSpecs from component values (only via Callable[P, T]). + return [] + + def visit_type_var_tuple(self, template: TypeVarTupleType) -> list[Constraint]: + raise NotImplementedError + + def visit_unpack_type(self, template: UnpackType) -> list[Constraint]: + raise RuntimeError("Mypy bug: unpack should be handled at a higher level.") + + def visit_parameters(self, template: Parameters) -> list[Constraint]: + # Constraining Any against C[P] turns into infer_against_any([P], Any) + if isinstance(self.actual, AnyType): + return self.infer_against_any(template.arg_types, self.actual) + if type_state.infer_polymorphic and isinstance(self.actual, Parameters): + # For polymorphic inference we need to be able to infer secondary constraints + # in situations like [x: T] <: P <: [x: int]. + return infer_callable_arguments_constraints(template, self.actual, self.direction) + if type_state.infer_polymorphic and isinstance(self.actual, ParamSpecType): + # Similar for [x: T] <: Q <: Concatenate[int, P]. + return infer_callable_arguments_constraints( + template, self.actual.prefix, self.direction + ) + # There also may be unpatched types after a user error, simply ignore them. + return [] + + # Non-leaf types + + def visit_instance(self, template: Instance) -> list[Constraint]: + original_actual = actual = self.actual + res: list[Constraint] = [] + if isinstance(actual, (CallableType, Overloaded)) and template.type.is_protocol: + if "__call__" in template.type.protocol_members: + # Special case: a generic callback protocol + if not any(template == t for t in template.type.inferring): + template.type.inferring.append(template) + call = mypy.subtypes.find_member( + "__call__", template, actual, is_operator=True + ) + assert call is not None + if ( + self.direction == SUPERTYPE_OF + and mypy.subtypes.is_subtype(actual, erase_typevars(call)) + or self.direction == SUBTYPE_OF + and mypy.subtypes.is_subtype(erase_typevars(call), actual) + ): + res.extend(infer_constraints(call, actual, self.direction)) + template.type.inferring.pop() + if isinstance(actual, CallableType) and actual.fallback is not None: + if ( + actual.is_type_obj() + and template.type.is_protocol + and self.direction == SUPERTYPE_OF + ): + ret_type = get_proper_type(actual.ret_type) + if isinstance(ret_type, TupleType): + ret_type = mypy.typeops.tuple_fallback(ret_type) + if isinstance(ret_type, Instance): + res.extend( + self.infer_constraints_from_protocol_members( + ret_type, template, ret_type, template, class_obj=True + ) + ) + actual = actual.fallback + if isinstance(actual, TypeType) and template.type.is_protocol: + if self.direction == SUPERTYPE_OF: + a_item = actual.item + if isinstance(a_item, Instance): + res.extend( + self.infer_constraints_from_protocol_members( + a_item, template, a_item, template, class_obj=True + ) + ) + # Infer constraints for Type[T] via metaclass of T when it makes sense. + if isinstance(a_item, TypeVarType): + a_item = get_proper_type(a_item.upper_bound) + if isinstance(a_item, Instance) and a_item.type.metaclass_type: + res.extend( + self.infer_constraints_from_protocol_members( + a_item.type.metaclass_type, template, actual, template + ) + ) + + if isinstance(actual, Overloaded) and actual.fallback is not None: + actual = actual.fallback + if isinstance(actual, TypedDictType): + actual = actual.as_anonymous().fallback + if isinstance(actual, LiteralType): + actual = actual.fallback + if isinstance(actual, Instance): + instance = actual + erased = erase_typevars(template) + assert isinstance(erased, Instance) # type: ignore[misc] + # We always try nominal inference if possible, + # it is much faster than the structural one. + if self.direction == SUBTYPE_OF and template.type.has_base(instance.type.fullname): + mapped = map_instance_to_supertype(template, instance.type) + tvars = mapped.type.defn.type_vars + + if instance.type.has_type_var_tuple_type: + # Variadic types need special handling to map each type argument to + # the correct corresponding type variable. + assert instance.type.type_var_tuple_prefix is not None + assert instance.type.type_var_tuple_suffix is not None + prefix_len = instance.type.type_var_tuple_prefix + suffix_len = instance.type.type_var_tuple_suffix + tvt = instance.type.defn.type_vars[prefix_len] + assert isinstance(tvt, TypeVarTupleType) + fallback = tvt.tuple_fallback + i_prefix, i_middle, i_suffix = split_with_prefix_and_suffix( + instance.args, prefix_len, suffix_len + ) + m_prefix, m_middle, m_suffix = split_with_prefix_and_suffix( + mapped.args, prefix_len, suffix_len + ) + instance_args = i_prefix + (TupleType(list(i_middle), fallback),) + i_suffix + mapped_args = m_prefix + (TupleType(list(m_middle), fallback),) + m_suffix + else: + mapped_args = mapped.args + instance_args = instance.args + + # N.B: We use zip instead of indexing because the lengths might have + # mismatches during daemon reprocessing. + for tvar, mapped_arg, instance_arg in zip(tvars, mapped_args, instance_args): + if isinstance(tvar, TypeVarType): + # The constraints for generic type parameters depend on variance. + # Include constraints from both directions if invariant. + if tvar.variance != CONTRAVARIANT: + res.extend(infer_constraints(mapped_arg, instance_arg, self.direction)) + if tvar.variance != COVARIANT: + res.extend( + infer_constraints(mapped_arg, instance_arg, neg_op(self.direction)) + ) + elif isinstance(tvar, ParamSpecType) and isinstance(mapped_arg, ParamSpecType): + prefix = mapped_arg.prefix + if isinstance(instance_arg, Parameters): + # No such thing as variance for ParamSpecs, consider them invariant + # TODO: constraints between prefixes using + # infer_callable_arguments_constraints() + suffix: Type = instance_arg.copy_modified( + instance_arg.arg_types[len(prefix.arg_types) :], + instance_arg.arg_kinds[len(prefix.arg_kinds) :], + instance_arg.arg_names[len(prefix.arg_names) :], + ) + res.append(Constraint(mapped_arg, SUBTYPE_OF, suffix)) + res.append(Constraint(mapped_arg, SUPERTYPE_OF, suffix)) + elif isinstance(instance_arg, ParamSpecType): + suffix = instance_arg.copy_modified( + prefix=Parameters( + instance_arg.prefix.arg_types[len(prefix.arg_types) :], + instance_arg.prefix.arg_kinds[len(prefix.arg_kinds) :], + instance_arg.prefix.arg_names[len(prefix.arg_names) :], + ) + ) + res.append(Constraint(mapped_arg, SUBTYPE_OF, suffix)) + res.append(Constraint(mapped_arg, SUPERTYPE_OF, suffix)) + elif isinstance(tvar, TypeVarTupleType): + # Handle variadic type variables covariantly for consistency. + res.extend(infer_constraints(mapped_arg, instance_arg, self.direction)) + + return res + elif self.direction == SUPERTYPE_OF and instance.type.has_base(template.type.fullname): + mapped = map_instance_to_supertype(instance, template.type) + tvars = template.type.defn.type_vars + if template.type.has_type_var_tuple_type: + # Variadic types need special handling to map each type argument to + # the correct corresponding type variable. + assert template.type.type_var_tuple_prefix is not None + assert template.type.type_var_tuple_suffix is not None + prefix_len = template.type.type_var_tuple_prefix + suffix_len = template.type.type_var_tuple_suffix + tvt = template.type.defn.type_vars[prefix_len] + assert isinstance(tvt, TypeVarTupleType) + fallback = tvt.tuple_fallback + t_prefix, t_middle, t_suffix = split_with_prefix_and_suffix( + template.args, prefix_len, suffix_len + ) + m_prefix, m_middle, m_suffix = split_with_prefix_and_suffix( + mapped.args, prefix_len, suffix_len + ) + template_args = t_prefix + (TupleType(list(t_middle), fallback),) + t_suffix + mapped_args = m_prefix + (TupleType(list(m_middle), fallback),) + m_suffix + else: + mapped_args = mapped.args + template_args = template.args + # N.B: We use zip instead of indexing because the lengths might have + # mismatches during daemon reprocessing. + for tvar, mapped_arg, template_arg in zip(tvars, mapped_args, template_args): + if isinstance(tvar, TypeVarType): + # The constraints for generic type parameters depend on variance. + # Include constraints from both directions if invariant. + if tvar.variance != CONTRAVARIANT: + res.extend(infer_constraints(template_arg, mapped_arg, self.direction)) + if tvar.variance != COVARIANT: + res.extend( + infer_constraints(template_arg, mapped_arg, neg_op(self.direction)) + ) + elif isinstance(tvar, ParamSpecType) and isinstance( + template_arg, ParamSpecType + ): + prefix = template_arg.prefix + if isinstance(mapped_arg, Parameters): + # No such thing as variance for ParamSpecs, consider them invariant + # TODO: constraints between prefixes using + # infer_callable_arguments_constraints() + suffix = mapped_arg.copy_modified( + mapped_arg.arg_types[len(prefix.arg_types) :], + mapped_arg.arg_kinds[len(prefix.arg_kinds) :], + mapped_arg.arg_names[len(prefix.arg_names) :], + ) + res.append(Constraint(template_arg, SUBTYPE_OF, suffix)) + res.append(Constraint(template_arg, SUPERTYPE_OF, suffix)) + elif isinstance(mapped_arg, ParamSpecType): + suffix = mapped_arg.copy_modified( + prefix=Parameters( + mapped_arg.prefix.arg_types[len(prefix.arg_types) :], + mapped_arg.prefix.arg_kinds[len(prefix.arg_kinds) :], + mapped_arg.prefix.arg_names[len(prefix.arg_names) :], + ) + ) + res.append(Constraint(template_arg, SUBTYPE_OF, suffix)) + res.append(Constraint(template_arg, SUPERTYPE_OF, suffix)) + elif isinstance(tvar, TypeVarTupleType): + # Consider variadic type variables to be invariant. + res.extend(infer_constraints(template_arg, mapped_arg, SUBTYPE_OF)) + res.extend(infer_constraints(template_arg, mapped_arg, SUPERTYPE_OF)) + return res + if ( + template.type.is_protocol + and self.direction == SUPERTYPE_OF + and + # We avoid infinite recursion for structural subtypes by checking + # whether this type already appeared in the inference chain. + # This is a conservative way to break the inference cycles. + # It never produces any "false" constraints but gives up soon + # on purely structural inference cycles, see #3829. + # Note that we use is_protocol_implementation instead of is_subtype + # because some type may be considered a subtype of a protocol + # due to _promote, but still not implement the protocol. + not any(template == t for t in reversed(template.type.inferring)) + and mypy.subtypes.is_protocol_implementation(instance, erased, skip=["__call__"]) + ): + template.type.inferring.append(template) + res.extend( + self.infer_constraints_from_protocol_members( + instance, template, original_actual, template + ) + ) + template.type.inferring.pop() + return res + elif ( + instance.type.is_protocol + and self.direction == SUBTYPE_OF + and + # We avoid infinite recursion for structural subtypes also here. + not any(instance == i for i in reversed(instance.type.inferring)) + and mypy.subtypes.is_protocol_implementation(erased, instance, skip=["__call__"]) + ): + instance.type.inferring.append(instance) + res.extend( + self.infer_constraints_from_protocol_members( + instance, template, template, instance + ) + ) + instance.type.inferring.pop() + return res + if res: + return res + + if isinstance(actual, AnyType): + return self.infer_against_any(template.args, actual) + if ( + isinstance(actual, TupleType) + and is_named_instance(template, TUPLE_LIKE_INSTANCE_NAMES) + and self.direction == SUPERTYPE_OF + ): + for item in actual.items: + if isinstance(item, UnpackType): + unpacked = get_proper_type(item.type) + if isinstance(unpacked, TypeVarTupleType): + # Cannot infer anything for T from [T, ...] <: *Ts + continue + assert ( + isinstance(unpacked, Instance) + and unpacked.type.fullname == "builtins.tuple" + ) + item = unpacked.args[0] + cb = infer_constraints(template.args[0], item, SUPERTYPE_OF) + res.extend(cb) + return res + elif isinstance(actual, TupleType) and self.direction == SUPERTYPE_OF: + return infer_constraints(template, mypy.typeops.tuple_fallback(actual), self.direction) + elif isinstance(actual, TypeVarType): + if not actual.values and not actual.id.is_meta_var(): + return infer_constraints(template, actual.upper_bound, self.direction) + return [] + elif isinstance(actual, ParamSpecType): + return infer_constraints(template, actual.upper_bound, self.direction) + elif isinstance(actual, TypeVarTupleType): + raise NotImplementedError + else: + return [] + + def infer_constraints_from_protocol_members( + self, + instance: Instance, + template: Instance, + subtype: Type, + protocol: Instance, + class_obj: bool = False, + ) -> list[Constraint]: + """Infer constraints for situations where either 'template' or 'instance' is a protocol. + + The 'protocol' is the one of two that is an instance of protocol type, 'subtype' + is the type used to bind self during inference. Currently, we just infer constrains for + every protocol member type (both ways for settable members). + """ + res = [] + for member in protocol.type.protocol_members: + inst = mypy.subtypes.find_member(member, instance, subtype, class_obj=class_obj) + temp = mypy.subtypes.find_member(member, template, subtype) + if inst is None or temp is None: + if member == "__call__": + continue + return [] # See #11020 + # The above is safe since at this point we know that 'instance' is a subtype + # of (erased) 'template', therefore it defines all protocol members + if class_obj: + # For class objects we must only infer constraints if possible, otherwise it + # can lead to confusion between class and instance, for example StrEnum is + # Iterable[str] for an instance, but Iterable[StrEnum] for a class object. + if not mypy.subtypes.is_subtype( + inst, erase_typevars(temp), ignore_pos_arg_names=True + ): + continue + # This exception matches the one in typeops.py, see PR #14121 for context. + if member == "__call__" and instance.type.is_metaclass(precise=True): + continue + res.extend(infer_constraints(temp, inst, self.direction)) + if mypy.subtypes.IS_SETTABLE in mypy.subtypes.get_member_flags(member, protocol): + # Settable members are invariant, add opposite constraints + res.extend(infer_constraints(temp, inst, neg_op(self.direction))) + return res + + def visit_callable_type(self, template: CallableType) -> list[Constraint]: + # Normalize callables before matching against each other. + # Note that non-normalized callables can be created in annotations + # using e.g. callback protocols. + # TODO: check that callables match? Ideally we should not infer constraints + # callables that can never be subtypes of one another in given direction. + template = template.with_unpacked_kwargs().with_normalized_var_args() + extra_tvars = False + if isinstance(self.actual, CallableType): + res: list[Constraint] = [] + cactual = self.actual.with_unpacked_kwargs().with_normalized_var_args() + param_spec = template.param_spec() + + template_ret_type, cactual_ret_type = template.ret_type, cactual.ret_type + if template.type_guard is not None and cactual.type_guard is not None: + template_ret_type = template.type_guard + cactual_ret_type = cactual.type_guard + + if template.type_is is not None and cactual.type_is is not None: + template_ret_type = template.type_is + cactual_ret_type = cactual.type_is + + res.extend(infer_constraints(template_ret_type, cactual_ret_type, self.direction)) + + if param_spec is None: + # TODO: Erase template variables if it is generic? + if ( + type_state.infer_polymorphic + and cactual.variables + and not self.skip_neg_op + # Technically, the correct inferred type for application of e.g. + # Callable[..., T] -> Callable[..., T] (with literal ellipsis), to a generic + # like U -> U, should be Callable[..., Any], but if U is a self-type, we can + # allow it to leak, to be later bound to self. A bunch of existing code + # depends on this old behaviour. + and not ( + any(tv.id.is_self() for tv in cactual.variables) + and template.is_ellipsis_args + ) + ): + # If the actual callable is generic, infer constraints in the opposite + # direction, and indicate to the solver there are extra type variables + # to solve for (see more details in mypy/solve.py). + res.extend( + infer_constraints( + cactual, template, neg_op(self.direction), skip_neg_op=True + ) + ) + extra_tvars = True + + # We can't infer constraints from arguments if the template is Callable[..., T] + # (with literal '...'). + if not template.is_ellipsis_args: + unpack_present = find_unpack_in_list(template.arg_types) + # When both ParamSpec and TypeVarTuple are present, things become messy + # quickly. For now, we only allow ParamSpec to "capture" TypeVarTuple, + # but not vice versa. + # TODO: infer more from prefixes when possible. + if unpack_present is not None and not cactual.param_spec(): + # We need to re-normalize args to the form they appear in tuples, + # for callables we always pack the suffix inside another tuple. + unpack = template.arg_types[unpack_present] + assert isinstance(unpack, UnpackType) + tuple_type = get_tuple_fallback_from_unpack(unpack) + template_types = repack_callable_args(template, tuple_type) + actual_types = repack_callable_args(cactual, tuple_type) + # Now we can use the same general helper as for tuple types. + unpack_constraints = build_constraints_for_simple_unpack( + template_types, actual_types, neg_op(self.direction) + ) + res.extend(unpack_constraints) + else: + # TODO: do we need some special-casing when unpack is present in actual + # callable but not in template callable? + res.extend( + infer_callable_arguments_constraints(template, cactual, self.direction) + ) + else: + prefix = param_spec.prefix + prefix_len = len(prefix.arg_types) + cactual_ps = cactual.param_spec() + + if type_state.infer_polymorphic and cactual.variables and not self.skip_neg_op: + # Similar logic to the branch above. + res.extend( + infer_constraints( + cactual, template, neg_op(self.direction), skip_neg_op=True + ) + ) + extra_tvars = True + + # Compare prefixes as well + cactual_prefix = cactual.copy_modified( + arg_types=cactual.arg_types[:prefix_len], + arg_kinds=cactual.arg_kinds[:prefix_len], + arg_names=cactual.arg_names[:prefix_len], + ) + res.extend( + infer_callable_arguments_constraints(prefix, cactual_prefix, self.direction) + ) + + param_spec_target: Type | None = None + if not cactual_ps: + max_prefix_len = len([k for k in cactual.arg_kinds if k in (ARG_POS, ARG_OPT)]) + prefix_len = min(prefix_len, max_prefix_len) + param_spec_target = Parameters( + arg_types=cactual.arg_types[prefix_len:], + arg_kinds=cactual.arg_kinds[prefix_len:], + arg_names=cactual.arg_names[prefix_len:], + variables=cactual.variables if not type_state.infer_polymorphic else [], + imprecise_arg_kinds=cactual.imprecise_arg_kinds, + ) + else: + if len(param_spec.prefix.arg_types) <= len(cactual_ps.prefix.arg_types): + param_spec_target = cactual_ps.copy_modified( + prefix=Parameters( + arg_types=cactual_ps.prefix.arg_types[prefix_len:], + arg_kinds=cactual_ps.prefix.arg_kinds[prefix_len:], + arg_names=cactual_ps.prefix.arg_names[prefix_len:], + imprecise_arg_kinds=cactual_ps.prefix.imprecise_arg_kinds, + ) + ) + if param_spec_target is not None: + res.append(Constraint(param_spec, self.direction, param_spec_target)) + if extra_tvars: + for c in res: + c.extra_tvars += cactual.variables + return res + elif isinstance(self.actual, AnyType): + param_spec = template.param_spec() + any_type = AnyType(TypeOfAny.from_another_any, source_any=self.actual) + if param_spec is None: + # FIX what if generic + res = self.infer_against_any(template.arg_types, self.actual) + else: + res = [ + Constraint( + param_spec, + SUBTYPE_OF, + Parameters([any_type, any_type], [ARG_STAR, ARG_STAR2], [None, None]), + ) + ] + res.extend(infer_constraints(template.ret_type, any_type, self.direction)) + return res + elif isinstance(self.actual, Overloaded): + return self.infer_against_overloaded(self.actual, template) + elif isinstance(self.actual, TypeType): + return infer_constraints(template.ret_type, self.actual.item, self.direction) + elif isinstance(self.actual, Instance): + # Instances with __call__ method defined are considered structural + # subtypes of Callable with a compatible signature. + call = mypy.subtypes.find_member( + "__call__", self.actual, self.actual, is_operator=True + ) + if call: + return infer_constraints(template, call, self.direction) + else: + return [] + else: + return [] + + def infer_against_overloaded( + self, overloaded: Overloaded, template: CallableType + ) -> list[Constraint]: + # Create constraints by matching an overloaded type against a template. + # This is tricky to do in general. We cheat by only matching against + # the first overload item that is callable compatible. This + # seems to work somewhat well, but we should really use a more + # reliable technique. + item = find_matching_overload_item(overloaded, template) + return infer_constraints(template, item, self.direction) + + def visit_tuple_type(self, template: TupleType) -> list[Constraint]: + actual = self.actual + unpack_index = find_unpack_in_list(template.items) + is_varlength_tuple = ( + isinstance(actual, Instance) and actual.type.fullname == "builtins.tuple" + ) + + if isinstance(actual, TupleType) or is_varlength_tuple: + res: list[Constraint] = [] + if unpack_index is not None: + if is_varlength_tuple: + # Variadic tuple can be only a supertype of a tuple type, but even if + # direction is opposite, inferring something may give better error messages. + unpack_type = template.items[unpack_index] + assert isinstance(unpack_type, UnpackType) + unpacked_type = get_proper_type(unpack_type.type) + if isinstance(unpacked_type, TypeVarTupleType): + res = [ + Constraint(type_var=unpacked_type, op=self.direction, target=actual) + ] + else: + assert ( + isinstance(unpacked_type, Instance) + and unpacked_type.type.fullname == "builtins.tuple" + ) + res = infer_constraints(unpacked_type, actual, self.direction) + assert isinstance(actual, Instance) # ensured by is_varlength_tuple == True + for i, ti in enumerate(template.items): + if i == unpack_index: + # This one we just handled above. + continue + # For Tuple[T, *Ts, S] <: tuple[X, ...] infer also T <: X and S <: X. + res.extend(infer_constraints(ti, actual.args[0], self.direction)) + return res + else: + assert isinstance(actual, TupleType) + unpack_constraints = build_constraints_for_simple_unpack( + template.items, actual.items, self.direction + ) + actual_items: tuple[Type, ...] = () + template_items: tuple[Type, ...] = () + res.extend(unpack_constraints) + elif isinstance(actual, TupleType): + a_unpack_index = find_unpack_in_list(actual.items) + if a_unpack_index is not None: + # The case where template tuple doesn't have an unpack, but actual tuple + # has an unpack. We can infer something if actual unpack is a variadic tuple. + # Tuple[T, S, U] <: tuple[X, *tuple[Y, ...], Z] => T <: X, S <: Y, U <: Z. + a_unpack = actual.items[a_unpack_index] + assert isinstance(a_unpack, UnpackType) + a_unpacked = get_proper_type(a_unpack.type) + if len(actual.items) + 1 <= len(template.items): + a_prefix_len = a_unpack_index + a_suffix_len = len(actual.items) - a_unpack_index - 1 + t_prefix, t_middle, t_suffix = split_with_prefix_and_suffix( + tuple(template.items), a_prefix_len, a_suffix_len + ) + actual_items = tuple(actual.items[:a_prefix_len]) + if a_suffix_len: + actual_items += tuple(actual.items[-a_suffix_len:]) + template_items = t_prefix + t_suffix + if isinstance(a_unpacked, Instance): + assert a_unpacked.type.fullname == "builtins.tuple" + for tm in t_middle: + res.extend( + infer_constraints(tm, a_unpacked.args[0], self.direction) + ) + else: + actual_items = () + template_items = () + else: + actual_items = tuple(actual.items) + template_items = tuple(template.items) + else: + return res + + # Cases above will return if actual wasn't a TupleType. + assert isinstance(actual, TupleType) + if len(actual_items) == len(template_items): + if ( + actual.partial_fallback.type.is_named_tuple + and template.partial_fallback.type.is_named_tuple + ): + # For named tuples using just the fallbacks usually gives better results. + return res + infer_constraints( + template.partial_fallback, actual.partial_fallback, self.direction + ) + for i in range(len(template_items)): + res.extend( + infer_constraints(template_items[i], actual_items[i], self.direction) + ) + res.extend( + infer_constraints( + template.partial_fallback, actual.partial_fallback, self.direction + ) + ) + return res + elif isinstance(actual, AnyType): + return self.infer_against_any(template.items, actual) + else: + return [] + + def visit_typeddict_type(self, template: TypedDictType) -> list[Constraint]: + actual = self.actual + if isinstance(actual, TypedDictType): + res: list[Constraint] = [] + # NOTE: Non-matching keys are ignored. Compatibility is checked + # elsewhere so this shouldn't be unsafe. + for item_name, template_item_type, actual_item_type in template.zip(actual): + res.extend(infer_constraints(template_item_type, actual_item_type, self.direction)) + return res + elif isinstance(actual, AnyType): + return self.infer_against_any(template.items.values(), actual) + else: + return [] + + def visit_union_type(self, template: UnionType) -> list[Constraint]: + assert False, ( + "Unexpected UnionType in ConstraintBuilderVisitor" + " (should have been handled in infer_constraints)" + ) + + def visit_type_alias_type(self, template: TypeAliasType) -> list[Constraint]: + assert False, f"This should be never called, got {template}" + + def infer_against_any(self, types: Iterable[Type], any_type: AnyType) -> list[Constraint]: + res: list[Constraint] = [] + # Some items may be things like `*Tuple[*Ts, T]` for example from callable types with + # suffix after *arg, so flatten them. + for t in flatten_nested_tuples(types): + if isinstance(t, UnpackType): + if isinstance(t.type, TypeVarTupleType): + res.append(Constraint(t.type, self.direction, any_type)) + else: + unpacked = get_proper_type(t.type) + assert isinstance(unpacked, Instance) + res.extend(infer_constraints(unpacked, any_type, self.direction)) + else: + # Note that we ignore variance and simply always use the + # original direction. This is because for Any targets direction is + # irrelevant in most cases, see e.g. is_same_constraint(). + res.extend(infer_constraints(t, any_type, self.direction)) + return res + + def visit_overloaded(self, template: Overloaded) -> list[Constraint]: + if isinstance(self.actual, CallableType): + items = find_matching_overload_items(template, self.actual) + else: + items = template.items + res: list[Constraint] = [] + for t in items: + res.extend(infer_constraints(t, self.actual, self.direction)) + return res + + def visit_type_type(self, template: TypeType) -> list[Constraint]: + if isinstance(self.actual, CallableType): + return infer_constraints(template.item, self.actual.ret_type, self.direction) + elif isinstance(self.actual, Overloaded): + return infer_constraints(template.item, self.actual.items[0].ret_type, self.direction) + elif isinstance(self.actual, TypeType): + return infer_constraints(template.item, self.actual.item, self.direction) + elif isinstance(self.actual, AnyType): + return infer_constraints(template.item, self.actual, self.direction) + else: + return [] + + +def neg_op(op: int) -> int: + """Map SubtypeOf to SupertypeOf and vice versa.""" + + if op == SUBTYPE_OF: + return SUPERTYPE_OF + elif op == SUPERTYPE_OF: + return SUBTYPE_OF + else: + raise ValueError(f"Invalid operator {op}") + + +def find_matching_overload_item(overloaded: Overloaded, template: CallableType) -> CallableType: + """Disambiguate overload item against a template.""" + items = overloaded.items + for item in items: + # Return type may be indeterminate in the template, so ignore it when performing a + # subtype check. + if mypy.subtypes.is_callable_compatible( + item, + template, + is_compat=mypy.subtypes.is_subtype, + is_proper_subtype=False, + ignore_return=True, + ): + return item + # Fall back to the first item if we can't find a match. This is totally arbitrary -- + # maybe we should just bail out at this point. + return items[0] + + +def find_matching_overload_items( + overloaded: Overloaded, template: CallableType +) -> list[CallableType]: + """Like find_matching_overload_item, but return all matches, not just the first.""" + items = overloaded.items + res = [] + for item in items: + # Return type may be indeterminate in the template, so ignore it when performing a + # subtype check. + if mypy.subtypes.is_callable_compatible( + item, + template, + is_compat=mypy.subtypes.is_subtype, + is_proper_subtype=False, + ignore_return=True, + ): + res.append(item) + if not res: + # Falling back to all items if we can't find a match is pretty arbitrary, but + # it maintains backward compatibility. + res = items.copy() + return res + + +def get_tuple_fallback_from_unpack(unpack: UnpackType) -> TypeInfo: + """Get builtins.tuple type from available types to construct homogeneous tuples.""" + tp = get_proper_type(unpack.type) + if isinstance(tp, Instance) and tp.type.fullname == "builtins.tuple": + return tp.type + if isinstance(tp, TypeVarTupleType): + return tp.tuple_fallback.type + if isinstance(tp, TupleType): + for base in tp.partial_fallback.type.mro: + if base.fullname == "builtins.tuple": + return base + assert False, "Invalid unpack type" + + +def repack_callable_args(callable: CallableType, tuple_type: TypeInfo) -> list[Type]: + """Present callable with star unpack in a normalized form. + + Since positional arguments cannot follow star argument, they are packed in a suffix, + while prefix is represented as individual positional args. We want to put all in a single + list with unpack in the middle, and prefix/suffix on the sides (as they would appear + in e.g. a TupleType). + """ + if ARG_STAR not in callable.arg_kinds: + return callable.arg_types + star_index = callable.arg_kinds.index(ARG_STAR) + arg_types = callable.arg_types[:star_index] + star_type = callable.arg_types[star_index] + suffix_types = [] + if not isinstance(star_type, UnpackType): + # Re-normalize *args: X -> *args: *tuple[X, ...] + star_type = UnpackType(Instance(tuple_type, [star_type])) + else: + tp = get_proper_type(star_type.type) + if isinstance(tp, TupleType): + assert isinstance(tp.items[0], UnpackType) + star_type = tp.items[0] + suffix_types = tp.items[1:] + return arg_types + [star_type] + suffix_types + + +def build_constraints_for_simple_unpack( + template_args: list[Type], actual_args: list[Type], direction: int +) -> list[Constraint]: + """Infer constraints between two lists of types with variadic items. + + This function is only supposed to be called when a variadic item is present in templates. + If there is no variadic item the actuals, we simply use split_with_prefix_and_suffix() + and infer prefix <: prefix, suffix <: suffix, variadic <: middle. If there is a variadic + item in the actuals we need to be more careful, only common prefix/suffix can generate + constraints, also we can only infer constraints for variadic template item, if template + prefix/suffix are shorter that actual ones, otherwise there may be partial overlap + between variadic items, for example if template prefix is longer: + + templates: T1, T2, Ts, Ts, Ts, ... + actuals: A1, As, As, As, ... + + Note: this function can only be called for builtin variadic constructors: Tuple and Callable. + For instances, you should first find correct type argument mapping. + """ + template_unpack = find_unpack_in_list(template_args) + assert template_unpack is not None + template_prefix = template_unpack + template_suffix = len(template_args) - template_prefix - 1 + + t_unpack = None + res = [] + + actual_unpack = find_unpack_in_list(actual_args) + if actual_unpack is None: + t_unpack = template_args[template_unpack] + if template_prefix + template_suffix > len(actual_args): + # These can't be subtypes of each-other, return fast. + assert isinstance(t_unpack, UnpackType) + if isinstance(t_unpack.type, TypeVarTupleType): + # Set TypeVarTuple to empty to improve error messages. + return [ + Constraint( + t_unpack.type, direction, TupleType([], t_unpack.type.tuple_fallback) + ) + ] + else: + return [] + common_prefix = template_prefix + common_suffix = template_suffix + else: + actual_prefix = actual_unpack + actual_suffix = len(actual_args) - actual_prefix - 1 + common_prefix = min(template_prefix, actual_prefix) + common_suffix = min(template_suffix, actual_suffix) + if actual_prefix >= template_prefix and actual_suffix >= template_suffix: + # This is the only case where we can guarantee there will be no partial overlap + # (note however partial overlap is OK for variadic tuples, it is handled below). + t_unpack = template_args[template_unpack] + + # Handle constraints from prefixes/suffixes first. + start, middle, end = split_with_prefix_and_suffix( + tuple(actual_args), common_prefix, common_suffix + ) + for t, a in zip(template_args[:common_prefix], start): + res.extend(infer_constraints(t, a, direction)) + if common_suffix: + for t, a in zip(template_args[-common_suffix:], end): + res.extend(infer_constraints(t, a, direction)) + + if t_unpack is not None: + # Add constraint(s) for variadic item when possible. + assert isinstance(t_unpack, UnpackType) + tp = get_proper_type(t_unpack.type) + if isinstance(tp, Instance) and tp.type.fullname == "builtins.tuple": + # Homogeneous case *tuple[T, ...] <: [X, Y, Z, ...]. + for a in middle: + # TODO: should we use union instead of join here? + if not isinstance(a, UnpackType): + res.extend(infer_constraints(tp.args[0], a, direction)) + else: + a_tp = get_proper_type(a.type) + # This is the case *tuple[T, ...] <: *tuple[A, ...]. + if isinstance(a_tp, Instance) and a_tp.type.fullname == "builtins.tuple": + res.extend(infer_constraints(tp.args[0], a_tp.args[0], direction)) + elif isinstance(tp, TypeVarTupleType): + res.append(Constraint(tp, direction, TupleType(list(middle), tp.tuple_fallback))) + elif actual_unpack is not None: + # A special case for a variadic tuple unpack, we simply infer T <: X from + # Tuple[..., *tuple[T, ...], ...] <: Tuple[..., *tuple[X, ...], ...]. + actual_unpack_type = actual_args[actual_unpack] + assert isinstance(actual_unpack_type, UnpackType) + a_unpacked = get_proper_type(actual_unpack_type.type) + if isinstance(a_unpacked, Instance) and a_unpacked.type.fullname == "builtins.tuple": + t_unpack = template_args[template_unpack] + assert isinstance(t_unpack, UnpackType) + tp = get_proper_type(t_unpack.type) + if isinstance(tp, Instance) and tp.type.fullname == "builtins.tuple": + res.extend(infer_constraints(tp.args[0], a_unpacked.args[0], direction)) + return res + + +def infer_directed_arg_constraints(left: Type, right: Type, direction: int) -> list[Constraint]: + """Infer constraints between two arguments using direction between original callables.""" + if isinstance(left, (ParamSpecType, UnpackType)) or isinstance( + right, (ParamSpecType, UnpackType) + ): + # This avoids bogus constraints like T <: P.args + # TODO: can we infer something useful for *T vs P? + return [] + if direction == SUBTYPE_OF: + # We invert direction to account for argument contravariance. + return infer_constraints(left, right, neg_op(direction)) + else: + return infer_constraints(right, left, neg_op(direction)) + + +def infer_callable_arguments_constraints( + template: NormalizedCallableType | Parameters, + actual: NormalizedCallableType | Parameters, + direction: int, +) -> list[Constraint]: + """Infer constraints between argument types of two callables. + + This function essentially extracts four steps from are_parameters_compatible() in + subtypes.py that involve subtype checks between argument types. We keep the argument + matching logic, but ignore various strictness flags present there, and checks that + do not involve subtyping. Then in place of every subtype check we put an infer_constraints() + call for the same types. + """ + res = [] + if direction == SUBTYPE_OF: + left, right = template, actual + else: + left, right = actual, template + left_star = left.var_arg() + left_star2 = left.kw_arg() + right_star = right.var_arg() + right_star2 = right.kw_arg() + + # Numbering of steps below matches the one in are_parameters_compatible() for convenience. + # Phase 1a: compare star vs star arguments. + if left_star is not None and right_star is not None: + res.extend(infer_directed_arg_constraints(left_star.typ, right_star.typ, direction)) + if left_star2 is not None and right_star2 is not None: + res.extend(infer_directed_arg_constraints(left_star2.typ, right_star2.typ, direction)) + + # Phase 1b: compare left args with corresponding non-star right arguments. + for right_arg in right.formal_arguments(): + left_arg = mypy.typeops.callable_corresponding_argument(left, right_arg) + if left_arg is None: + continue + res.extend(infer_directed_arg_constraints(left_arg.typ, right_arg.typ, direction)) + + # Phase 1c: compare left args with right *args. + if right_star is not None: + right_by_position = right.try_synthesizing_arg_from_vararg(None) + assert right_by_position is not None + i = right_star.pos + assert i is not None + while i < len(left.arg_kinds) and left.arg_kinds[i].is_positional(): + left_by_position = left.argument_by_position(i) + assert left_by_position is not None + res.extend( + infer_directed_arg_constraints( + left_by_position.typ, right_by_position.typ, direction + ) + ) + i += 1 + + # Phase 1d: compare left args with right **kwargs. + if right_star2 is not None: + right_names = {name for name in right.arg_names if name is not None} + left_only_names = set() + for name, kind in zip(left.arg_names, left.arg_kinds): + if name is None or kind.is_star() or name in right_names: + continue + left_only_names.add(name) + + right_by_name = right.try_synthesizing_arg_from_kwarg(None) + assert right_by_name is not None + for name in left_only_names: + left_by_name = left.argument_by_name(name) + assert left_by_name is not None + res.extend( + infer_directed_arg_constraints(left_by_name.typ, right_by_name.typ, direction) + ) + return res + + +def filter_imprecise_kinds(cs: list[Constraint]) -> list[Constraint]: + """For each ParamSpec remove all imprecise constraints, if at least one precise available.""" + have_precise = set() + for c in cs: + if not isinstance(c.origin_type_var, ParamSpecType): + continue + if ( + isinstance(c.target, ParamSpecType) + or isinstance(c.target, Parameters) + and not c.target.imprecise_arg_kinds + ): + have_precise.add(c.type_var) + new_cs = [] + for c in cs: + if not isinstance(c.origin_type_var, ParamSpecType) or c.type_var not in have_precise: + new_cs.append(c) + if not isinstance(c.target, Parameters) or not c.target.imprecise_arg_kinds: + new_cs.append(c) + return new_cs diff --git a/.venv/lib/python3.12/site-packages/mypy/copytype.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/copytype.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..f2c92f4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/copytype.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/copytype.py b/.venv/lib/python3.12/site-packages/mypy/copytype.py new file mode 100644 index 0000000..a890431 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/copytype.py @@ -0,0 +1,135 @@ +from __future__ import annotations + +from typing import Any, cast + +from mypy.types import ( + AnyType, + CallableType, + DeletedType, + ErasedType, + Instance, + LiteralType, + NoneType, + Overloaded, + Parameters, + ParamSpecType, + PartialType, + ProperType, + TupleType, + TypeAliasType, + TypedDictType, + TypeType, + TypeVarTupleType, + TypeVarType, + UnboundType, + UninhabitedType, + UnionType, + UnpackType, +) + +# type_visitor needs to be imported after types +from mypy.type_visitor import TypeVisitor # ruff: isort: skip + + +def copy_type(t: ProperType) -> ProperType: + """Create a shallow copy of a type. + + This can be used to mutate the copy with truthiness information. + + Classes compiled with mypyc don't support copy.copy(), so we need + a custom implementation. + """ + return t.accept(TypeShallowCopier()) + + +class TypeShallowCopier(TypeVisitor[ProperType]): + def visit_unbound_type(self, t: UnboundType) -> ProperType: + return t + + def visit_any(self, t: AnyType) -> ProperType: + return self.copy_common(t, AnyType(t.type_of_any, t.source_any, t.missing_import_name)) + + def visit_none_type(self, t: NoneType) -> ProperType: + return self.copy_common(t, NoneType()) + + def visit_uninhabited_type(self, t: UninhabitedType) -> ProperType: + dup = UninhabitedType() + dup.ambiguous = t.ambiguous + return self.copy_common(t, dup) + + def visit_erased_type(self, t: ErasedType) -> ProperType: + return self.copy_common(t, ErasedType()) + + def visit_deleted_type(self, t: DeletedType) -> ProperType: + return self.copy_common(t, DeletedType(t.source)) + + def visit_instance(self, t: Instance) -> ProperType: + dup = Instance(t.type, t.args, last_known_value=t.last_known_value) + dup.invalid = t.invalid + return self.copy_common(t, dup) + + def visit_type_var(self, t: TypeVarType) -> ProperType: + return self.copy_common(t, t.copy_modified()) + + def visit_param_spec(self, t: ParamSpecType) -> ProperType: + dup = ParamSpecType( + t.name, t.fullname, t.id, t.flavor, t.upper_bound, t.default, prefix=t.prefix + ) + return self.copy_common(t, dup) + + def visit_parameters(self, t: Parameters) -> ProperType: + dup = Parameters( + t.arg_types, + t.arg_kinds, + t.arg_names, + variables=t.variables, + is_ellipsis_args=t.is_ellipsis_args, + ) + return self.copy_common(t, dup) + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> ProperType: + dup = TypeVarTupleType( + t.name, t.fullname, t.id, t.upper_bound, t.tuple_fallback, t.default + ) + return self.copy_common(t, dup) + + def visit_unpack_type(self, t: UnpackType) -> ProperType: + dup = UnpackType(t.type) + return self.copy_common(t, dup) + + def visit_partial_type(self, t: PartialType) -> ProperType: + return self.copy_common(t, PartialType(t.type, t.var, t.value_type)) + + def visit_callable_type(self, t: CallableType) -> ProperType: + return self.copy_common(t, t.copy_modified()) + + def visit_tuple_type(self, t: TupleType) -> ProperType: + return self.copy_common(t, TupleType(t.items, t.partial_fallback, implicit=t.implicit)) + + def visit_typeddict_type(self, t: TypedDictType) -> ProperType: + return self.copy_common( + t, TypedDictType(t.items, t.required_keys, t.readonly_keys, t.fallback) + ) + + def visit_literal_type(self, t: LiteralType) -> ProperType: + return self.copy_common(t, LiteralType(value=t.value, fallback=t.fallback)) + + def visit_union_type(self, t: UnionType) -> ProperType: + return self.copy_common(t, UnionType(t.items)) + + def visit_overloaded(self, t: Overloaded) -> ProperType: + return self.copy_common(t, Overloaded(items=t.items)) + + def visit_type_type(self, t: TypeType) -> ProperType: + # Use cast since the type annotations in TypeType are imprecise. + return self.copy_common(t, TypeType(cast(Any, t.item), is_type_form=t.is_type_form)) + + def visit_type_alias_type(self, t: TypeAliasType) -> ProperType: + assert False, "only ProperTypes supported" + + def copy_common(self, t: ProperType, t2: ProperType) -> ProperType: + t2.line = t.line + t2.column = t.column + t2.can_be_false = t.can_be_false + t2.can_be_true = t.can_be_true + return t2 diff --git a/.venv/lib/python3.12/site-packages/mypy/defaults.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/defaults.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..765c15d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/defaults.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/defaults.py b/.venv/lib/python3.12/site-packages/mypy/defaults.py new file mode 100644 index 0000000..58a74a4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/defaults.py @@ -0,0 +1,44 @@ +from __future__ import annotations + +import os +from typing import Final + +# Earliest fully supported Python 3.x version. Used as the default Python +# version in tests. Mypy wheels should be built starting with this version, +# and CI tests should be run on this version (and later versions). +PYTHON3_VERSION: Final = (3, 9) + +# Earliest Python 3.x version supported via --python-version 3.x. To run +# mypy, at least version PYTHON3_VERSION is needed. +PYTHON3_VERSION_MIN: Final = (3, 9) # Keep in sync with typeshed's python support + +CACHE_DIR: Final = ".mypy_cache" + +CONFIG_NAMES: Final = ["mypy.ini", ".mypy.ini"] +SHARED_CONFIG_NAMES: Final = ["pyproject.toml", "setup.cfg"] + +USER_CONFIG_FILES: list[str] = ["~/.config/mypy/config", "~/.mypy.ini"] +if os.environ.get("XDG_CONFIG_HOME"): + USER_CONFIG_FILES.insert(0, os.path.join(os.environ["XDG_CONFIG_HOME"], "mypy/config")) +USER_CONFIG_FILES = [os.path.expanduser(f) for f in USER_CONFIG_FILES] + +# This must include all reporters defined in mypy.report. This is defined here +# to make reporter names available without importing mypy.report -- this speeds +# up startup. +REPORTER_NAMES: Final = [ + "linecount", + "any-exprs", + "linecoverage", + "memory-xml", + "cobertura-xml", + "xml", + "xslt-html", + "xslt-txt", + "html", + "txt", + "lineprecision", +] + +# Threshold after which we sometimes filter out most errors to avoid very +# verbose output. The default is to show all errors. +MANY_ERRORS_THRESHOLD: Final = -1 diff --git a/.venv/lib/python3.12/site-packages/mypy/dmypy/__init__.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/dmypy/__init__.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..3d4f606 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/dmypy/__init__.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/dmypy/__init__.py b/.venv/lib/python3.12/site-packages/mypy/dmypy/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypy/dmypy/__main__.py b/.venv/lib/python3.12/site-packages/mypy/dmypy/__main__.py new file mode 100644 index 0000000..5441b9f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/dmypy/__main__.py @@ -0,0 +1,6 @@ +from __future__ import annotations + +from mypy.dmypy.client import console_entry + +if __name__ == "__main__": + console_entry() diff --git a/.venv/lib/python3.12/site-packages/mypy/dmypy/client.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/dmypy/client.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..92b0a5f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/dmypy/client.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/dmypy/client.py b/.venv/lib/python3.12/site-packages/mypy/dmypy/client.py new file mode 100644 index 0000000..3db47f8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/dmypy/client.py @@ -0,0 +1,759 @@ +"""Client for mypy daemon mode. + +This manages a daemon process which keeps useful state in memory +rather than having to read it back from disk on each run. +""" + +from __future__ import annotations + +import argparse +import base64 +import json +import os +import pickle +import sys +import time +import traceback +from collections.abc import Mapping +from typing import Any, Callable, NoReturn + +from mypy.dmypy_os import alive, kill +from mypy.dmypy_util import DEFAULT_STATUS_FILE, receive, send +from mypy.ipc import IPCClient, IPCException +from mypy.main import RECURSION_LIMIT +from mypy.util import check_python_version, get_terminal_width, should_force_color +from mypy.version import __version__ + +# Argument parser. Subparsers are tied to action functions by the +# @action(subparse) decorator. + + +class AugmentedHelpFormatter(argparse.RawDescriptionHelpFormatter): + def __init__(self, prog: str, **kwargs: Any) -> None: + super().__init__(prog=prog, max_help_position=30, **kwargs) + + +parser = argparse.ArgumentParser( + prog="dmypy", description="Client for mypy daemon mode", fromfile_prefix_chars="@" +) +parser.set_defaults(action=None) +parser.add_argument( + "--status-file", default=DEFAULT_STATUS_FILE, help="status file to retrieve daemon details" +) +parser.add_argument( + "-V", + "--version", + action="version", + version="%(prog)s " + __version__, + help="Show program's version number and exit", +) +subparsers = parser.add_subparsers() + +start_parser = p = subparsers.add_parser("start", help="Start daemon") +p.add_argument("--log-file", metavar="FILE", type=str, help="Direct daemon stdout/stderr to FILE") +p.add_argument( + "--timeout", metavar="TIMEOUT", type=int, help="Server shutdown timeout (in seconds)" +) +p.add_argument( + "flags", metavar="FLAG", nargs="*", type=str, help="Regular mypy flags (precede with --)" +) + +restart_parser = p = subparsers.add_parser( + "restart", help="Restart daemon (stop or kill followed by start)" +) +p.add_argument("--log-file", metavar="FILE", type=str, help="Direct daemon stdout/stderr to FILE") +p.add_argument( + "--timeout", metavar="TIMEOUT", type=int, help="Server shutdown timeout (in seconds)" +) +p.add_argument( + "flags", metavar="FLAG", nargs="*", type=str, help="Regular mypy flags (precede with --)" +) + +status_parser = p = subparsers.add_parser("status", help="Show daemon status") +p.add_argument("-v", "--verbose", action="store_true", help="Print detailed status") +p.add_argument("--fswatcher-dump-file", help="Collect information about the current file state") + +stop_parser = p = subparsers.add_parser("stop", help="Stop daemon (asks it politely to go away)") + +kill_parser = p = subparsers.add_parser("kill", help="Kill daemon (kills the process)") + +check_parser = p = subparsers.add_parser( + "check", formatter_class=AugmentedHelpFormatter, help="Check some files (requires daemon)" +) +p.add_argument("-v", "--verbose", action="store_true", help="Print detailed status") +p.add_argument("-q", "--quiet", action="store_true", help=argparse.SUPPRESS) # Deprecated +p.add_argument("--junit-xml", help="Write junit.xml to the given file") +p.add_argument("--perf-stats-file", help="write performance information to the given file") +p.add_argument("files", metavar="FILE", nargs="+", help="File (or directory) to check") +p.add_argument( + "--export-types", + action="store_true", + help="Store types of all expressions in a shared location (useful for inspections)", +) + +run_parser = p = subparsers.add_parser( + "run", + formatter_class=AugmentedHelpFormatter, + help="Check some files, [re]starting daemon if necessary", +) +p.add_argument("-v", "--verbose", action="store_true", help="Print detailed status") +p.add_argument("--junit-xml", help="Write junit.xml to the given file") +p.add_argument("--perf-stats-file", help="write performance information to the given file") +p.add_argument( + "--timeout", metavar="TIMEOUT", type=int, help="Server shutdown timeout (in seconds)" +) +p.add_argument("--log-file", metavar="FILE", type=str, help="Direct daemon stdout/stderr to FILE") +p.add_argument( + "--export-types", + action="store_true", + help="Store types of all expressions in a shared location (useful for inspections)", +) +p.add_argument( + "flags", + metavar="ARG", + nargs="*", + type=str, + help="Regular mypy flags and files (precede with --)", +) + +recheck_parser = p = subparsers.add_parser( + "recheck", + formatter_class=AugmentedHelpFormatter, + help="Re-check the previous list of files, with optional modifications (requires daemon)", +) +p.add_argument("-v", "--verbose", action="store_true", help="Print detailed status") +p.add_argument("-q", "--quiet", action="store_true", help=argparse.SUPPRESS) # Deprecated +p.add_argument("--junit-xml", help="Write junit.xml to the given file") +p.add_argument("--perf-stats-file", help="write performance information to the given file") +p.add_argument( + "--export-types", + action="store_true", + help="Store types of all expressions in a shared location (useful for inspections)", +) +p.add_argument( + "--update", + metavar="FILE", + nargs="*", + help="Files in the run to add or check again (default: all from previous run)", +) +p.add_argument("--remove", metavar="FILE", nargs="*", help="Files to remove from the run") + +suggest_parser = p = subparsers.add_parser( + "suggest", help="Suggest a signature or show call sites for a specific function" +) +p.add_argument( + "function", + metavar="FUNCTION", + type=str, + help="Function specified as '[package.]module.[class.]function'", +) +p.add_argument( + "--json", + action="store_true", + help="Produce json that pyannotate can use to apply a suggestion", +) +p.add_argument( + "--no-errors", action="store_true", help="Only produce suggestions that cause no errors" +) +p.add_argument( + "--no-any", action="store_true", help="Only produce suggestions that don't contain Any" +) +p.add_argument( + "--flex-any", + type=float, + help="Allow anys in types if they go above a certain score (scores are from 0-1)", +) +p.add_argument( + "--callsites", action="store_true", help="Find callsites instead of suggesting a type" +) +p.add_argument( + "--use-fixme", + metavar="NAME", + type=str, + help="A dummy name to use instead of Any for types that can't be inferred", +) +p.add_argument( + "--max-guesses", + type=int, + help="Set the maximum number of types to try for a function (default 64)", +) + +inspect_parser = p = subparsers.add_parser( + "inspect", help="Locate and statically inspect expression(s)" +) +p.add_argument( + "location", + metavar="LOCATION", + type=str, + help="Location specified as path/to/file.py:line:column[:end_line:end_column]." + " If position is given (i.e. only line and column), this will return all" + " enclosing expressions", +) +p.add_argument( + "--show", + metavar="INSPECTION", + type=str, + default="type", + choices=["type", "attrs", "definition"], + help="What kind of inspection to run", +) +p.add_argument( + "--verbose", + "-v", + action="count", + default=0, + help="Increase verbosity of the type string representation (can be repeated)", +) +p.add_argument( + "--limit", + metavar="NUM", + type=int, + default=0, + help="Return at most NUM innermost expressions (if position is given); 0 means no limit", +) +p.add_argument( + "--include-span", + action="store_true", + help="Prepend each inspection result with the span of corresponding expression" + ' (e.g. 1:2:3:4:"int")', +) +p.add_argument( + "--include-kind", + action="store_true", + help="Prepend each inspection result with the kind of corresponding expression" + ' (e.g. NameExpr:"int")', +) +p.add_argument( + "--include-object-attrs", + action="store_true", + help='Include attributes of "object" in "attrs" inspection', +) +p.add_argument( + "--union-attrs", + action="store_true", + help="Include attributes valid for some of possible expression types" + " (by default an intersection is returned)", +) +p.add_argument( + "--force-reload", + action="store_true", + help="Re-parse and re-type-check file before inspection (may be slow)", +) + +hang_parser = p = subparsers.add_parser("hang", help="Hang for 100 seconds") + +daemon_parser = p = subparsers.add_parser("daemon", help="Run daemon in foreground") +p.add_argument( + "--timeout", metavar="TIMEOUT", type=int, help="Server shutdown timeout (in seconds)" +) +p.add_argument("--log-file", metavar="FILE", type=str, help="Direct daemon stdout/stderr to FILE") +p.add_argument( + "flags", metavar="FLAG", nargs="*", type=str, help="Regular mypy flags (precede with --)" +) +p.add_argument("--options-data", help=argparse.SUPPRESS) +help_parser = p = subparsers.add_parser("help") + +del p + + +class BadStatus(Exception): + """Exception raised when there is something wrong with the status file. + + For example: + - No status file found + - Status file malformed + - Process whose pid is in the status file does not exist + """ + + +def main(argv: list[str]) -> None: + """The code is top-down.""" + check_python_version("dmypy") + + # set recursion limit consistent with mypy/main.py + sys.setrecursionlimit(RECURSION_LIMIT) + + args = parser.parse_args(argv) + if not args.action: + parser.print_usage() + else: + try: + args.action(args) + except BadStatus as err: + fail(err.args[0]) + except Exception: + # We do this explicitly to avoid exceptions percolating up + # through mypy.api invocations + traceback.print_exc() + sys.exit(2) + + +def fail(msg: str) -> NoReturn: + print(msg, file=sys.stderr) + sys.exit(2) + + +ActionFunction = Callable[[argparse.Namespace], None] + + +def action(subparser: argparse.ArgumentParser) -> Callable[[ActionFunction], ActionFunction]: + """Decorator to tie an action function to a subparser.""" + + def register(func: ActionFunction) -> ActionFunction: + subparser.set_defaults(action=func) + return func + + return register + + +# Action functions (run in client from command line). + + +@action(start_parser) +def do_start(args: argparse.Namespace) -> None: + """Start daemon (it must not already be running). + + This is where mypy flags are set from the command line. + + Setting flags is a bit awkward; you have to use e.g.: + + dmypy start -- --strict + + since we don't want to duplicate mypy's huge list of flags. + """ + try: + get_status(args.status_file) + except BadStatus: + # Bad or missing status file or dead process; good to start. + pass + else: + fail("Daemon is still alive") + start_server(args) + + +@action(restart_parser) +def do_restart(args: argparse.Namespace) -> None: + """Restart daemon (it may or may not be running; but not hanging). + + We first try to stop it politely if it's running. This also sets + mypy flags from the command line (see do_start()). + """ + restart_server(args) + + +def restart_server(args: argparse.Namespace, allow_sources: bool = False) -> None: + """Restart daemon (it may or may not be running; but not hanging).""" + try: + do_stop(args) + except BadStatus: + # Bad or missing status file or dead process; good to start. + pass + start_server(args, allow_sources) + + +def start_server(args: argparse.Namespace, allow_sources: bool = False) -> None: + """Start the server from command arguments and wait for it.""" + # Lazy import so this import doesn't slow down other commands. + from mypy.dmypy_server import daemonize, process_start_options + + start_options = process_start_options(args.flags, allow_sources) + if daemonize(start_options, args.status_file, timeout=args.timeout, log_file=args.log_file): + sys.exit(2) + wait_for_server(args.status_file) + + +def wait_for_server(status_file: str, timeout: float = 5.0) -> None: + """Wait until the server is up. + + Exit if it doesn't happen within the timeout. + """ + endtime = time.time() + timeout + while time.time() < endtime: + try: + data = read_status(status_file) + except BadStatus: + # If the file isn't there yet, retry later. + time.sleep(0.1) + continue + # If the file's content is bogus or the process is dead, fail. + check_status(data) + print("Daemon started") + return + fail("Timed out waiting for daemon to start") + + +@action(run_parser) +def do_run(args: argparse.Namespace) -> None: + """Do a check, starting (or restarting) the daemon as necessary + + Restarts the daemon if the running daemon reports that it is + required (due to a configuration change, for example). + + Setting flags is a bit awkward; you have to use e.g.: + + dmypy run -- --strict a.py b.py ... + + since we don't want to duplicate mypy's huge list of flags. + (The -- is only necessary if flags are specified.) + """ + if not is_running(args.status_file): + # Bad or missing status file or dead process; good to start. + start_server(args, allow_sources=True) + t0 = time.time() + response = request( + args.status_file, + "run", + version=__version__, + args=args.flags, + export_types=args.export_types, + ) + # If the daemon signals that a restart is necessary, do it + if "restart" in response: + print(f"Restarting: {response['restart']}") + restart_server(args, allow_sources=True) + response = request( + args.status_file, + "run", + version=__version__, + args=args.flags, + export_types=args.export_types, + ) + + t1 = time.time() + response["roundtrip_time"] = t1 - t0 + check_output(response, args.verbose, args.junit_xml, args.perf_stats_file) + + +@action(status_parser) +def do_status(args: argparse.Namespace) -> None: + """Print daemon status. + + This verifies that it is responsive to requests. + """ + status = read_status(args.status_file) + if args.verbose: + show_stats(status) + # Both check_status() and request() may raise BadStatus, + # which will be handled by main(). + check_status(status) + response = request( + args.status_file, "status", fswatcher_dump_file=args.fswatcher_dump_file, timeout=5 + ) + if args.verbose or "error" in response: + show_stats(response) + if "error" in response: + fail(f"Daemon may be busy processing; if this persists, consider {sys.argv[0]} kill") + print("Daemon is up and running") + + +@action(stop_parser) +def do_stop(args: argparse.Namespace) -> None: + """Stop daemon via a 'stop' request.""" + # May raise BadStatus, which will be handled by main(). + response = request(args.status_file, "stop", timeout=5) + if "error" in response: + show_stats(response) + fail(f"Daemon may be busy processing; if this persists, consider {sys.argv[0]} kill") + else: + print("Daemon stopped") + + +@action(kill_parser) +def do_kill(args: argparse.Namespace) -> None: + """Kill daemon process with SIGKILL.""" + pid, _ = get_status(args.status_file) + try: + kill(pid) + except OSError as err: + fail(str(err)) + else: + print("Daemon killed") + + +@action(check_parser) +def do_check(args: argparse.Namespace) -> None: + """Ask the daemon to check a list of files.""" + t0 = time.time() + response = request(args.status_file, "check", files=args.files, export_types=args.export_types) + t1 = time.time() + response["roundtrip_time"] = t1 - t0 + check_output(response, args.verbose, args.junit_xml, args.perf_stats_file) + + +@action(recheck_parser) +def do_recheck(args: argparse.Namespace) -> None: + """Ask the daemon to recheck the previous list of files, with optional modifications. + + If at least one of --remove or --update is given, the server will + update the list of files to check accordingly and assume that any other files + are unchanged. If none of these flags are given, the server will call stat() + on each file last checked to determine its status. + + Files given in --update ought to exist. Files given in --remove need not exist; + if they don't they will be ignored. + The lists may be empty but oughtn't contain duplicates or overlap. + + NOTE: The list of files is lost when the daemon is restarted. + """ + t0 = time.time() + if args.remove is not None or args.update is not None: + response = request( + args.status_file, + "recheck", + export_types=args.export_types, + remove=args.remove, + update=args.update, + ) + else: + response = request(args.status_file, "recheck", export_types=args.export_types) + t1 = time.time() + response["roundtrip_time"] = t1 - t0 + check_output(response, args.verbose, args.junit_xml, args.perf_stats_file) + + +@action(suggest_parser) +def do_suggest(args: argparse.Namespace) -> None: + """Ask the daemon for a suggested signature. + + This just prints whatever the daemon reports as output. + For now it may be closer to a list of call sites. + """ + response = request( + args.status_file, + "suggest", + function=args.function, + json=args.json, + callsites=args.callsites, + no_errors=args.no_errors, + no_any=args.no_any, + flex_any=args.flex_any, + use_fixme=args.use_fixme, + max_guesses=args.max_guesses, + ) + check_output(response, verbose=False, junit_xml=None, perf_stats_file=None) + + +@action(inspect_parser) +def do_inspect(args: argparse.Namespace) -> None: + """Ask daemon to print the type of an expression.""" + response = request( + args.status_file, + "inspect", + show=args.show, + location=args.location, + verbosity=args.verbose, + limit=args.limit, + include_span=args.include_span, + include_kind=args.include_kind, + include_object_attrs=args.include_object_attrs, + union_attrs=args.union_attrs, + force_reload=args.force_reload, + ) + check_output(response, verbose=False, junit_xml=None, perf_stats_file=None) + + +def check_output( + response: dict[str, Any], verbose: bool, junit_xml: str | None, perf_stats_file: str | None +) -> None: + """Print the output from a check or recheck command. + + Call sys.exit() unless the status code is zero. + """ + if os.name == "nt": + # Enable ANSI color codes for Windows cmd using this strange workaround + # ( see https://github.com/python/cpython/issues/74261 ) + os.system("") + if "error" in response: + fail(response["error"]) + try: + out, err, status_code = response["out"], response["err"], response["status"] + except KeyError: + fail(f"Response: {str(response)}") + sys.stdout.write(out) + sys.stdout.flush() + sys.stderr.write(err) + sys.stderr.flush() + if verbose: + show_stats(response) + if junit_xml: + # Lazy import so this import doesn't slow things down when not writing junit + from mypy.util import write_junit_xml + + messages = (out + err).splitlines() + write_junit_xml( + response["roundtrip_time"], + bool(err), + {None: messages} if messages else {}, + junit_xml, + response["python_version"], + response["platform"], + ) + if perf_stats_file: + telemetry = response.get("stats", {}) + with open(perf_stats_file, "w") as f: + json.dump(telemetry, f) + + if status_code: + sys.exit(status_code) + + +def show_stats(response: Mapping[str, object]) -> None: + for key, value in sorted(response.items()): + if key in ("out", "err", "stdout", "stderr"): + # Special case text output to display just 40 characters of text + value = repr(value)[1:-1] + if len(value) > 50: + value = f"{value[:40]} ... {len(value)-40} more characters" + print("%-24s: %s" % (key, value)) + continue + print("%-24s: %10s" % (key, "%.3f" % value if isinstance(value, float) else value)) + + +@action(hang_parser) +def do_hang(args: argparse.Namespace) -> None: + """Hang for 100 seconds, as a debug hack.""" + print(request(args.status_file, "hang", timeout=1)) + + +@action(daemon_parser) +def do_daemon(args: argparse.Namespace) -> None: + """Serve requests in the foreground.""" + # Lazy import so this import doesn't slow down other commands. + from mypy.dmypy_server import Server, process_start_options + + if args.log_file: + sys.stdout = sys.stderr = open(args.log_file, "a", buffering=1) + fd = sys.stdout.fileno() + os.dup2(fd, 2) + os.dup2(fd, 1) + + if args.options_data: + from mypy.options import Options + + options_dict = pickle.loads(base64.b64decode(args.options_data)) + options_obj = Options() + options = options_obj.apply_changes(options_dict) + else: + options = process_start_options(args.flags, allow_sources=False) + + Server(options, args.status_file, timeout=args.timeout).serve() + + +@action(help_parser) +def do_help(args: argparse.Namespace) -> None: + """Print full help (same as dmypy --help).""" + parser.print_help() + + +# Client-side infrastructure. + + +def request( + status_file: str, command: str, *, timeout: int | None = None, **kwds: object +) -> dict[str, Any]: + """Send a request to the daemon. + + Return the JSON dict with the response. + + Raise BadStatus if there is something wrong with the status file + or if the process whose pid is in the status file has died. + + Return {'error': } if an IPC operation or receive() + raised OSError. This covers cases such as connection refused or + closed prematurely as well as invalid JSON received. + """ + response: dict[str, str] = {} + args = dict(kwds) + args["command"] = command + # Tell the server whether this request was initiated from a human-facing terminal, + # so that it can format the type checking output accordingly. + args["is_tty"] = sys.stdout.isatty() or should_force_color() + args["terminal_width"] = get_terminal_width() + _, name = get_status(status_file) + try: + with IPCClient(name, timeout) as client: + send(client, args) + + final = False + while not final: + response = receive(client) + final = bool(response.pop("final", False)) + # Display debugging output written to stdout/stderr in the server process for convenience. + # This should not be confused with "out" and "err" fields in the response. + # Those fields hold the output of the "check" command, and are handled in check_output(). + stdout = response.pop("stdout", None) + if stdout: + sys.stdout.write(stdout) + stderr = response.pop("stderr", None) + if stderr: + sys.stderr.write(stderr) + except (OSError, IPCException) as err: + return {"error": str(err)} + # TODO: Other errors, e.g. ValueError, UnicodeError + + return response + + +def get_status(status_file: str) -> tuple[int, str]: + """Read status file and check if the process is alive. + + Return (pid, connection_name) on success. + + Raise BadStatus if something's wrong. + """ + data = read_status(status_file) + return check_status(data) + + +def check_status(data: dict[str, Any]) -> tuple[int, str]: + """Check if the process is alive. + + Return (pid, connection_name) on success. + + Raise BadStatus if something's wrong. + """ + if "pid" not in data: + raise BadStatus("Invalid status file (no pid field)") + pid = data["pid"] + if not isinstance(pid, int): + raise BadStatus("pid field is not an int") + if not alive(pid): + raise BadStatus("Daemon has died") + if "connection_name" not in data: + raise BadStatus("Invalid status file (no connection_name field)") + connection_name = data["connection_name"] + if not isinstance(connection_name, str): + raise BadStatus("connection_name field is not a string") + return pid, connection_name + + +def read_status(status_file: str) -> dict[str, object]: + """Read status file. + + Raise BadStatus if the status file doesn't exist or contains + invalid JSON or the JSON is not a dict. + """ + if not os.path.isfile(status_file): + raise BadStatus("No status file found") + with open(status_file) as f: + try: + data = json.load(f) + except Exception as e: + raise BadStatus("Malformed status file (not JSON)") from e + if not isinstance(data, dict): + raise BadStatus("Invalid status file (not a dict)") + return data + + +def is_running(status_file: str) -> bool: + """Check if the server is running cleanly""" + try: + get_status(status_file) + except BadStatus: + return False + return True + + +# Run main(). +def console_entry() -> None: + main(sys.argv[1:]) diff --git a/.venv/lib/python3.12/site-packages/mypy/dmypy_os.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/dmypy_os.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..2cf075a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/dmypy_os.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/dmypy_os.py b/.venv/lib/python3.12/site-packages/mypy/dmypy_os.py new file mode 100644 index 0000000..63c3e4c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/dmypy_os.py @@ -0,0 +1,42 @@ +from __future__ import annotations + +import sys +from typing import Any, Callable + +if sys.platform == "win32": + import ctypes + import subprocess + from ctypes.wintypes import DWORD, HANDLE + + PROCESS_QUERY_LIMITED_INFORMATION = ctypes.c_ulong(0x1000) + + kernel32 = ctypes.windll.kernel32 + OpenProcess: Callable[[DWORD, int, int], HANDLE] = kernel32.OpenProcess + GetExitCodeProcess: Callable[[HANDLE, Any], int] = kernel32.GetExitCodeProcess +else: + import os + import signal + + +def alive(pid: int) -> bool: + """Is the process alive?""" + if sys.platform == "win32": + # why can't anything be easy... + status = DWORD() + handle = OpenProcess(PROCESS_QUERY_LIMITED_INFORMATION, 0, pid) + GetExitCodeProcess(handle, ctypes.byref(status)) + return status.value == 259 # STILL_ACTIVE + else: + try: + os.kill(pid, 0) + except OSError: + return False + return True + + +def kill(pid: int) -> None: + """Kill the process.""" + if sys.platform == "win32": + subprocess.check_output(f"taskkill /pid {pid} /f /t") + else: + os.kill(pid, signal.SIGKILL) diff --git a/.venv/lib/python3.12/site-packages/mypy/dmypy_server.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/dmypy_server.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..1a8f83c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/dmypy_server.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/dmypy_server.py b/.venv/lib/python3.12/site-packages/mypy/dmypy_server.py new file mode 100644 index 0000000..33e9e07 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/dmypy_server.py @@ -0,0 +1,1126 @@ +"""Server for mypy daemon mode. + +This implements a daemon process which keeps useful state in memory +to enable fine-grained incremental reprocessing of changes. +""" + +from __future__ import annotations + +import argparse +import base64 +import io +import json +import os +import pickle +import subprocess +import sys +import time +import traceback +from collections.abc import Sequence, Set as AbstractSet +from contextlib import redirect_stderr, redirect_stdout +from typing import Any, Callable, Final +from typing_extensions import TypeAlias as _TypeAlias + +import mypy.build +import mypy.errors +import mypy.main +from mypy.dmypy_util import WriteToConn, receive, send +from mypy.find_sources import InvalidSourceList, create_source_list +from mypy.fscache import FileSystemCache +from mypy.fswatcher import FileData, FileSystemWatcher +from mypy.inspections import InspectionEngine +from mypy.ipc import IPCServer +from mypy.modulefinder import BuildSource, FindModuleCache, SearchPaths, compute_search_paths +from mypy.options import Options +from mypy.server.update import FineGrainedBuildManager, refresh_suppressed_submodules +from mypy.suggestions import SuggestionEngine, SuggestionFailure +from mypy.typestate import reset_global_state +from mypy.util import FancyFormatter, count_stats +from mypy.version import __version__ + +MEM_PROFILE: Final = False # If True, dump memory profile after initialization + +if sys.platform == "win32": + from subprocess import STARTUPINFO + + def daemonize( + options: Options, status_file: str, timeout: int | None = None, log_file: str | None = None + ) -> int: + """Create the daemon process via "dmypy daemon" and pass options via command line + + When creating the daemon grandchild, we create it in a new console, which is + started hidden. We cannot use DETACHED_PROCESS since it will cause console windows + to pop up when starting. See + https://github.com/python/cpython/pull/4150#issuecomment-340215696 + for more on why we can't have nice things. + + It also pickles the options to be unpickled by mypy. + """ + command = [sys.executable, "-m", "mypy.dmypy", "--status-file", status_file, "daemon"] + pickled_options = pickle.dumps(options.snapshot()) + command.append(f'--options-data="{base64.b64encode(pickled_options).decode()}"') + if timeout: + command.append(f"--timeout={timeout}") + if log_file: + command.append(f"--log-file={log_file}") + info = STARTUPINFO() + info.dwFlags = 0x1 # STARTF_USESHOWWINDOW aka use wShowWindow's value + info.wShowWindow = 0 # SW_HIDE aka make the window invisible + try: + subprocess.Popen(command, creationflags=0x10, startupinfo=info) # CREATE_NEW_CONSOLE + return 0 + except subprocess.CalledProcessError as e: + return e.returncode + +else: + + def _daemonize_cb(func: Callable[[], None], log_file: str | None = None) -> int: + """Arrange to call func() in a grandchild of the current process. + + Return 0 for success, exit status for failure, negative if + subprocess killed by signal. + """ + # See https://stackoverflow.com/questions/473620/how-do-you-create-a-daemon-in-python + sys.stdout.flush() + sys.stderr.flush() + pid = os.fork() + if pid: + # Parent process: wait for child in case things go bad there. + npid, sts = os.waitpid(pid, 0) + sig = sts & 0xFF + if sig: + print("Child killed by signal", sig) + return -sig + sts = sts >> 8 + if sts: + print("Child exit status", sts) + return sts + # Child process: do a bunch of UNIX stuff and then fork a grandchild. + try: + os.setsid() # Detach controlling terminal + os.umask(0o27) + devnull = os.open("/dev/null", os.O_RDWR) + os.dup2(devnull, 0) + os.dup2(devnull, 1) + os.dup2(devnull, 2) + os.close(devnull) + pid = os.fork() + if pid: + # Child is done, exit to parent. + os._exit(0) + # Grandchild: run the server. + if log_file: + sys.stdout = sys.stderr = open(log_file, "a", buffering=1) + fd = sys.stdout.fileno() + os.dup2(fd, 2) + os.dup2(fd, 1) + func() + finally: + # Make sure we never get back into the caller. + os._exit(1) + + def daemonize( + options: Options, status_file: str, timeout: int | None = None, log_file: str | None = None + ) -> int: + """Run the mypy daemon in a grandchild of the current process + + Return 0 for success, exit status for failure, negative if + subprocess killed by signal. + """ + return _daemonize_cb(Server(options, status_file, timeout).serve, log_file) + + +# Server code. + +CONNECTION_NAME: Final = "dmypy" + + +def process_start_options(flags: list[str], allow_sources: bool) -> Options: + _, options = mypy.main.process_options( + ["-i"] + flags, require_targets=False, server_options=True + ) + if options.report_dirs: + print("dmypy: Ignoring report generation settings. Start/restart cannot generate reports.") + if options.junit_xml: + print( + "dmypy: Ignoring report generation settings. " + "Start/restart does not support --junit-xml. Pass it to check/recheck instead" + ) + options.junit_xml = None + if not options.incremental: + sys.exit("dmypy: start/restart should not disable incremental mode") + if options.follow_imports not in ("skip", "error", "normal"): + sys.exit("dmypy: follow-imports=silent not supported") + return options + + +def ignore_suppressed_imports(module: str) -> bool: + """Can we skip looking for newly unsuppressed imports to module?""" + # Various submodules of 'encodings' can be suppressed, since it + # uses module-level '__getattr__'. Skip them since there are many + # of them, and following imports to them is kind of pointless. + return module.startswith("encodings.") + + +ModulePathPair: _TypeAlias = tuple[str, str] +ModulePathPairs: _TypeAlias = list[ModulePathPair] +ChangesAndRemovals: _TypeAlias = tuple[ModulePathPairs, ModulePathPairs] + + +class Server: + # NOTE: the instance is constructed in the parent process but + # serve() is called in the grandchild (by daemonize()). + + def __init__(self, options: Options, status_file: str, timeout: int | None = None) -> None: + """Initialize the server with the desired mypy flags.""" + self.options = options + # Snapshot the options info before we muck with it, to detect changes + self.options_snapshot = options.snapshot() + self.timeout = timeout + self.fine_grained_manager: FineGrainedBuildManager | None = None + + if os.path.isfile(status_file): + os.unlink(status_file) + + self.fscache = FileSystemCache() + + options.raise_exceptions = True + options.incremental = True + options.fine_grained_incremental = True + options.show_traceback = True + if options.use_fine_grained_cache: + # Using fine_grained_cache implies generating and caring + # about the fine grained cache + options.cache_fine_grained = True + else: + options.cache_dir = os.devnull + # Fine-grained incremental doesn't support general partial types + # (details in https://github.com/python/mypy/issues/4492) + options.local_partial_types = True + self.status_file = status_file + + # Since the object is created in the parent process we can check + # the output terminal options here. + self.formatter = FancyFormatter(sys.stdout, sys.stderr, options.hide_error_codes) + + def _response_metadata(self) -> dict[str, str]: + py_version = f"{self.options.python_version[0]}_{self.options.python_version[1]}" + return {"platform": self.options.platform, "python_version": py_version} + + def serve(self) -> None: + """Serve requests, synchronously (no thread or fork).""" + + command = None + server = IPCServer(CONNECTION_NAME, self.timeout) + orig_stdout = sys.stdout + orig_stderr = sys.stderr + + try: + with open(self.status_file, "w") as f: + json.dump({"pid": os.getpid(), "connection_name": server.connection_name}, f) + f.write("\n") # I like my JSON with a trailing newline + while True: + with server: + data = receive(server) + sys.stdout = WriteToConn(server, "stdout", sys.stdout.isatty()) + sys.stderr = WriteToConn(server, "stderr", sys.stderr.isatty()) + resp: dict[str, Any] = {} + if "command" not in data: + resp = {"error": "No command found in request"} + else: + command = data["command"] + if not isinstance(command, str): + resp = {"error": "Command is not a string"} + else: + command = data.pop("command") + try: + resp = self.run_command(command, data) + except Exception: + # If we are crashing, report the crash to the client + tb = traceback.format_exception(*sys.exc_info()) + resp = {"error": "Daemon crashed!\n" + "".join(tb)} + resp.update(self._response_metadata()) + resp["final"] = True + send(server, resp) + raise + resp["final"] = True + try: + resp.update(self._response_metadata()) + send(server, resp) + except OSError: + pass # Maybe the client hung up + if command == "stop": + reset_global_state() + sys.exit(0) + finally: + # Revert stdout/stderr so we can see any errors. + sys.stdout = orig_stdout + sys.stderr = orig_stderr + + # If the final command is something other than a clean + # stop, remove the status file. (We can't just + # simplify the logic and always remove the file, since + # that could cause us to remove a future server's + # status file.) + if command != "stop": + os.unlink(self.status_file) + try: + server.cleanup() # try to remove the socket dir on Linux + except OSError: + pass + exc_info = sys.exc_info() + if exc_info[0] and exc_info[0] is not SystemExit: + traceback.print_exception(*exc_info) + + def run_command(self, command: str, data: dict[str, object]) -> dict[str, object]: + """Run a specific command from the registry.""" + key = "cmd_" + command + method = getattr(self.__class__, key, None) + if method is None: + return {"error": f"Unrecognized command '{command}'"} + else: + if command not in {"check", "recheck", "run"}: + # Only the above commands use some error formatting. + del data["is_tty"] + del data["terminal_width"] + ret = method(self, **data) + assert isinstance(ret, dict) + return ret + + # Command functions (run in the server via RPC). + + def cmd_status(self, fswatcher_dump_file: str | None = None) -> dict[str, object]: + """Return daemon status.""" + res: dict[str, object] = {} + res.update(get_meminfo()) + if fswatcher_dump_file: + data = self.fswatcher.dump_file_data() if hasattr(self, "fswatcher") else {} + # Using .dumps and then writing was noticeably faster than using dump + s = json.dumps(data) + with open(fswatcher_dump_file, "w") as f: + f.write(s) + return res + + def cmd_stop(self) -> dict[str, object]: + """Stop daemon.""" + # We need to remove the status file *before* we complete the + # RPC. Otherwise a race condition exists where a subsequent + # command can see a status file from a dying server and think + # it is a live one. + os.unlink(self.status_file) + return {} + + def cmd_run( + self, + version: str, + args: Sequence[str], + export_types: bool, + is_tty: bool, + terminal_width: int, + ) -> dict[str, object]: + """Check a list of files, triggering a restart if needed.""" + stderr = io.StringIO() + stdout = io.StringIO() + try: + # Process options can exit on improper arguments, so we need to catch that and + # capture stderr so the client can report it + with redirect_stderr(stderr): + with redirect_stdout(stdout): + sources, options = mypy.main.process_options( + ["-i"] + list(args), + require_targets=True, + server_options=True, + fscache=self.fscache, + program="mypy-daemon", + header=argparse.SUPPRESS, + ) + # Signal that we need to restart if the options have changed + if not options.compare_stable(self.options_snapshot): + return {"restart": "configuration changed"} + if __version__ != version: + return {"restart": "mypy version changed"} + if self.fine_grained_manager: + manager = self.fine_grained_manager.manager + start_plugins_snapshot = manager.plugins_snapshot + _, current_plugins_snapshot = mypy.build.load_plugins( + options, manager.errors, sys.stdout, extra_plugins=() + ) + if current_plugins_snapshot != start_plugins_snapshot: + return {"restart": "plugins changed"} + except InvalidSourceList as err: + return {"out": "", "err": str(err), "status": 2} + except SystemExit as e: + return {"out": stdout.getvalue(), "err": stderr.getvalue(), "status": e.code} + return self.check(sources, export_types, is_tty, terminal_width) + + def cmd_check( + self, files: Sequence[str], export_types: bool, is_tty: bool, terminal_width: int + ) -> dict[str, object]: + """Check a list of files.""" + try: + sources = create_source_list(files, self.options, self.fscache) + except InvalidSourceList as err: + return {"out": "", "err": str(err), "status": 2} + return self.check(sources, export_types, is_tty, terminal_width) + + def cmd_recheck( + self, + is_tty: bool, + terminal_width: int, + export_types: bool, + remove: list[str] | None = None, + update: list[str] | None = None, + ) -> dict[str, object]: + """Check the same list of files we checked most recently. + + If remove/update is given, they modify the previous list; + if all are None, stat() is called for each file in the previous list. + """ + t0 = time.time() + if not self.fine_grained_manager: + return {"error": "Command 'recheck' is only valid after a 'check' command"} + sources = self.previous_sources + if remove: + removals = set(remove) + sources = [s for s in sources if s.path and s.path not in removals] + if update: + # Sort list of file updates by extension, so *.pyi files are first. + update.sort(key=lambda f: os.path.splitext(f)[1], reverse=True) + + known = {s.path for s in sources if s.path} + added = [p for p in update if p not in known] + try: + added_sources = create_source_list(added, self.options, self.fscache) + except InvalidSourceList as err: + return {"out": "", "err": str(err), "status": 2} + sources = sources + added_sources # Make a copy! + t1 = time.time() + manager = self.fine_grained_manager.manager + manager.log(f"fine-grained increment: cmd_recheck: {t1 - t0:.3f}s") + old_export_types = self.options.export_types + self.options.export_types = self.options.export_types or export_types + if not self.following_imports(): + messages = self.fine_grained_increment( + sources, remove, update, explicit_export_types=export_types + ) + else: + assert remove is None and update is None + messages = self.fine_grained_increment_follow_imports( + sources, explicit_export_types=export_types + ) + res = self.increment_output(messages, sources, is_tty, terminal_width) + self.flush_caches() + self.update_stats(res) + self.options.export_types = old_export_types + return res + + def check( + self, sources: list[BuildSource], export_types: bool, is_tty: bool, terminal_width: int + ) -> dict[str, Any]: + """Check using fine-grained incremental mode. + + If is_tty is True format the output nicely with colors and summary line + (unless disabled in self.options). Also pass the terminal_width to formatter. + """ + old_export_types = self.options.export_types + self.options.export_types = self.options.export_types or export_types + if not self.fine_grained_manager: + res = self.initialize_fine_grained(sources, is_tty, terminal_width) + else: + if not self.following_imports(): + messages = self.fine_grained_increment(sources, explicit_export_types=export_types) + else: + messages = self.fine_grained_increment_follow_imports( + sources, explicit_export_types=export_types + ) + res = self.increment_output(messages, sources, is_tty, terminal_width) + self.flush_caches() + self.update_stats(res) + self.options.export_types = old_export_types + return res + + def flush_caches(self) -> None: + self.fscache.flush() + if self.fine_grained_manager: + self.fine_grained_manager.flush_cache() + + def update_stats(self, res: dict[str, Any]) -> None: + if self.fine_grained_manager: + manager = self.fine_grained_manager.manager + manager.dump_stats() + res["stats"] = manager.stats + manager.stats = {} + + def following_imports(self) -> bool: + """Are we following imports?""" + # TODO: What about silent? + return self.options.follow_imports == "normal" + + def initialize_fine_grained( + self, sources: list[BuildSource], is_tty: bool, terminal_width: int + ) -> dict[str, Any]: + self.fswatcher = FileSystemWatcher(self.fscache) + t0 = time.time() + self.update_sources(sources) + t1 = time.time() + try: + result = mypy.build.build(sources=sources, options=self.options, fscache=self.fscache) + except mypy.errors.CompileError as e: + output = "".join(s + "\n" for s in e.messages) + if e.use_stdout: + out, err = output, "" + else: + out, err = "", output + return {"out": out, "err": err, "status": 2} + messages = result.errors + self.fine_grained_manager = FineGrainedBuildManager(result) + + original_sources_len = len(sources) + if self.following_imports(): + sources = find_all_sources_in_build(self.fine_grained_manager.graph, sources) + self.update_sources(sources) + + self.previous_sources = sources + + # If we are using the fine-grained cache, build hasn't actually done + # the typechecking on the updated files yet. + # Run a fine-grained update starting from the cached data + if result.used_cache: + t2 = time.time() + # Pull times and hashes out of the saved_cache and stick them into + # the fswatcher, so we pick up the changes. + for state in self.fine_grained_manager.graph.values(): + meta = state.meta + if meta is None: + continue + assert state.path is not None + self.fswatcher.set_file_data( + state.path, + FileData(st_mtime=float(meta.mtime), st_size=meta.size, hash=meta.hash), + ) + + changed, removed = self.find_changed(sources) + changed += self.find_added_suppressed( + self.fine_grained_manager.graph, + set(), + self.fine_grained_manager.manager.search_paths, + ) + + # Find anything that has had its dependency list change + for state in self.fine_grained_manager.graph.values(): + if not state.is_fresh(): + assert state.path is not None + changed.append((state.id, state.path)) + + t3 = time.time() + # Run an update + messages = self.fine_grained_manager.update(changed, removed) + + if self.following_imports(): + # We need to do another update to any new files found by following imports. + messages = self.fine_grained_increment_follow_imports(sources) + + t4 = time.time() + self.fine_grained_manager.manager.add_stats( + update_sources_time=t1 - t0, + build_time=t2 - t1, + find_changes_time=t3 - t2, + fg_update_time=t4 - t3, + files_changed=len(removed) + len(changed), + ) + + else: + # Stores the initial state of sources as a side effect. + self.fswatcher.find_changed() + + if MEM_PROFILE: + from mypy.memprofile import print_memory_profile + + print_memory_profile(run_gc=False) + + __, n_notes, __ = count_stats(messages) + status = 1 if messages and n_notes < len(messages) else 0 + # We use explicit sources length to match the logic in non-incremental mode. + messages = self.pretty_messages(messages, original_sources_len, is_tty, terminal_width) + return {"out": "".join(s + "\n" for s in messages), "err": "", "status": status} + + def fine_grained_increment( + self, + sources: list[BuildSource], + remove: list[str] | None = None, + update: list[str] | None = None, + explicit_export_types: bool = False, + ) -> list[str]: + """Perform a fine-grained type checking increment. + + If remove and update are None, determine changed paths by using + fswatcher. Otherwise, assume that only these files have changes. + + Args: + sources: sources passed on the command line + remove: paths of files that have been removed + update: paths of files that have been changed or created + explicit_export_types: --export-type was passed in a check command + (as opposite to being set in dmypy start) + """ + assert self.fine_grained_manager is not None + manager = self.fine_grained_manager.manager + + t0 = time.time() + if remove is None and update is None: + # Use the fswatcher to determine which files were changed + # (updated or added) or removed. + self.update_sources(sources) + changed, removed = self.find_changed(sources) + else: + # Use the remove/update lists to update fswatcher. + # This avoids calling stat() for unchanged files. + changed, removed = self.update_changed(sources, remove or [], update or []) + if explicit_export_types: + # If --export-types is given, we need to force full re-checking of all + # explicitly passed files, since we need to visit each expression. + add_all_sources_to_changed(sources, changed) + changed += self.find_added_suppressed( + self.fine_grained_manager.graph, set(), manager.search_paths + ) + manager.search_paths = compute_search_paths(sources, manager.options, manager.data_dir) + t1 = time.time() + manager.log(f"fine-grained increment: find_changed: {t1 - t0:.3f}s") + messages = self.fine_grained_manager.update(changed, removed) + t2 = time.time() + manager.log(f"fine-grained increment: update: {t2 - t1:.3f}s") + manager.add_stats( + find_changes_time=t1 - t0, + fg_update_time=t2 - t1, + files_changed=len(removed) + len(changed), + ) + + self.previous_sources = sources + return messages + + def fine_grained_increment_follow_imports( + self, sources: list[BuildSource], explicit_export_types: bool = False + ) -> list[str]: + """Like fine_grained_increment, but follow imports.""" + t0 = time.time() + + # TODO: Support file events + + assert self.fine_grained_manager is not None + fine_grained_manager = self.fine_grained_manager + graph = fine_grained_manager.graph + manager = fine_grained_manager.manager + + orig_modules = list(graph.keys()) + + self.update_sources(sources) + changed_paths = self.fswatcher.find_changed() + manager.search_paths = compute_search_paths(sources, manager.options, manager.data_dir) + + t1 = time.time() + manager.log(f"fine-grained increment: find_changed: {t1 - t0:.3f}s") + + # Track all modules encountered so far. New entries for all dependencies + # are added below by other module finding methods below. All dependencies + # in graph but not in `seen` are considered deleted at the end of this method. + seen = {source.module for source in sources} + + # Find changed modules reachable from roots (or in roots) already in graph. + changed, new_files = self.find_reachable_changed_modules( + sources, graph, seen, changed_paths + ) + # Same as in fine_grained_increment(). + self.add_explicitly_new(sources, changed) + if explicit_export_types: + # Same as in fine_grained_increment(). + add_all_sources_to_changed(sources, changed) + sources.extend(new_files) + + # Process changes directly reachable from roots. + messages = fine_grained_manager.update(changed, [], followed=True) + + # Follow deps from changed modules (still within graph). + worklist = changed.copy() + while worklist: + module = worklist.pop() + if module[0] not in graph: + continue + sources2 = self.direct_imports(module, graph) + # Filter anything already seen before. This prevents + # infinite looping if there are any self edges. (Self + # edges are maybe a bug, but...) + sources2 = [source for source in sources2 if source.module not in seen] + changed, new_files = self.find_reachable_changed_modules( + sources2, graph, seen, changed_paths + ) + self.update_sources(new_files) + messages = fine_grained_manager.update(changed, [], followed=True) + worklist.extend(changed) + + t2 = time.time() + + def refresh_file(module: str, path: str) -> list[str]: + return fine_grained_manager.update([(module, path)], [], followed=True) + + for module_id, state in list(graph.items()): + new_messages = refresh_suppressed_submodules( + module_id, state.path, fine_grained_manager.deps, graph, self.fscache, refresh_file + ) + if new_messages is not None: + messages = new_messages + + t3 = time.time() + + # There may be new files that became available, currently treated as + # suppressed imports. Process them. + while True: + new_unsuppressed = self.find_added_suppressed(graph, seen, manager.search_paths) + if not new_unsuppressed: + break + new_files = [BuildSource(mod[1], mod[0], followed=True) for mod in new_unsuppressed] + sources.extend(new_files) + self.update_sources(new_files) + messages = fine_grained_manager.update(new_unsuppressed, [], followed=True) + + for module_id, path in new_unsuppressed: + new_messages = refresh_suppressed_submodules( + module_id, path, fine_grained_manager.deps, graph, self.fscache, refresh_file + ) + if new_messages is not None: + messages = new_messages + + t4 = time.time() + + # Find all original modules in graph that were not reached -- they are deleted. + to_delete = [] + for module_id in orig_modules: + if module_id not in graph: + continue + if module_id not in seen: + module_path = graph[module_id].path + assert module_path is not None + to_delete.append((module_id, module_path)) + if to_delete: + messages = fine_grained_manager.update([], to_delete) + + fix_module_deps(graph) + + self.previous_sources = find_all_sources_in_build(graph) + self.update_sources(self.previous_sources) + + # Store current file state as side effect + self.fswatcher.find_changed() + + t5 = time.time() + + manager.log(f"fine-grained increment: update: {t5 - t1:.3f}s") + manager.add_stats( + find_changes_time=t1 - t0, + fg_update_time=t2 - t1, + refresh_suppressed_time=t3 - t2, + find_added_suppressed_time=t4 - t3, + cleanup_time=t5 - t4, + ) + + return messages + + def find_reachable_changed_modules( + self, + roots: list[BuildSource], + graph: mypy.build.Graph, + seen: set[str], + changed_paths: AbstractSet[str], + ) -> tuple[list[tuple[str, str]], list[BuildSource]]: + """Follow imports within graph from given sources until hitting changed modules. + + If we find a changed module, we can't continue following imports as the imports + may have changed. + + Args: + roots: modules where to start search from + graph: module graph to use for the search + seen: modules we've seen before that won't be visited (mutated here!!). + Needed to accumulate all modules encountered during update and remove + everything that no longer exists. + changed_paths: which paths have changed (stop search here and return any found) + + Return (encountered reachable changed modules, + unchanged files not in sources_set traversed). + """ + changed = [] + new_files = [] + worklist = roots.copy() + seen.update(source.module for source in worklist) + while worklist: + nxt = worklist.pop() + if nxt.module not in seen: + seen.add(nxt.module) + new_files.append(nxt) + if nxt.path in changed_paths: + assert nxt.path is not None # TODO + changed.append((nxt.module, nxt.path)) + elif nxt.module in graph: + state = graph[nxt.module] + ancestors = state.ancestors or [] + for dep in state.dependencies + ancestors: + if dep not in seen: + seen.add(dep) + worklist.append(BuildSource(graph[dep].path, graph[dep].id, followed=True)) + return changed, new_files + + def direct_imports( + self, module: tuple[str, str], graph: mypy.build.Graph + ) -> list[BuildSource]: + """Return the direct imports of module not included in seen.""" + state = graph[module[0]] + return [BuildSource(graph[dep].path, dep, followed=True) for dep in state.dependencies] + + def find_added_suppressed( + self, graph: mypy.build.Graph, seen: set[str], search_paths: SearchPaths + ) -> list[tuple[str, str]]: + """Find suppressed modules that have been added (and not included in seen). + + Args: + seen: reachable modules we've seen before (mutated here!!). + Needed to accumulate all modules encountered during update and remove + everything that no longer exists. + + Return suppressed, added modules. + """ + all_suppressed = set() + for state in graph.values(): + all_suppressed |= state.suppressed_set + + # Filter out things that shouldn't actually be considered suppressed. + # + # TODO: Figure out why these are treated as suppressed + all_suppressed = { + module + for module in all_suppressed + if module not in graph and not ignore_suppressed_imports(module) + } + + # Optimization: skip top-level packages that are obviously not + # there, to avoid calling the relatively slow find_module() + # below too many times. + packages = {module.split(".", 1)[0] for module in all_suppressed} + packages = filter_out_missing_top_level_packages(packages, search_paths, self.fscache) + + # TODO: Namespace packages + + finder = FindModuleCache(search_paths, self.fscache, self.options) + + found = [] + + for module in all_suppressed: + top_level_pkg = module.split(".", 1)[0] + if top_level_pkg not in packages: + # Fast path: non-existent top-level package + continue + result = finder.find_module(module, fast_path=True) + if isinstance(result, str) and module not in seen: + # When not following imports, we only follow imports to .pyi files. + if not self.following_imports() and not result.endswith(".pyi"): + continue + found.append((module, result)) + seen.add(module) + + return found + + def increment_output( + self, messages: list[str], sources: list[BuildSource], is_tty: bool, terminal_width: int + ) -> dict[str, Any]: + status = 1 if messages else 0 + messages = self.pretty_messages(messages, len(sources), is_tty, terminal_width) + return {"out": "".join(s + "\n" for s in messages), "err": "", "status": status} + + def pretty_messages( + self, + messages: list[str], + n_sources: int, + is_tty: bool = False, + terminal_width: int | None = None, + ) -> list[str]: + use_color = self.options.color_output and is_tty + fit_width = self.options.pretty and is_tty + if fit_width: + messages = self.formatter.fit_in_terminal( + messages, fixed_terminal_width=terminal_width + ) + if self.options.error_summary: + summary: str | None = None + n_errors, n_notes, n_files = count_stats(messages) + if n_errors: + summary = self.formatter.format_error( + n_errors, n_files, n_sources, use_color=use_color + ) + elif not messages or n_notes == len(messages): + summary = self.formatter.format_success(n_sources, use_color) + if summary: + # Create new list to avoid appending multiple summaries on successive runs. + messages = messages + [summary] + if use_color: + messages = [self.formatter.colorize(m) for m in messages] + return messages + + def update_sources(self, sources: list[BuildSource]) -> None: + paths = [source.path for source in sources if source.path is not None] + if self.following_imports(): + # Filter out directories (used for namespace packages). + paths = [path for path in paths if self.fscache.isfile(path)] + self.fswatcher.add_watched_paths(paths) + + def update_changed( + self, sources: list[BuildSource], remove: list[str], update: list[str] + ) -> ChangesAndRemovals: + changed_paths = self.fswatcher.update_changed(remove, update) + return self._find_changed(sources, changed_paths) + + def find_changed(self, sources: list[BuildSource]) -> ChangesAndRemovals: + changed_paths = self.fswatcher.find_changed() + return self._find_changed(sources, changed_paths) + + def _find_changed( + self, sources: list[BuildSource], changed_paths: AbstractSet[str] + ) -> ChangesAndRemovals: + # Find anything that has been added or modified + changed = [ + (source.module, source.path) + for source in sources + if source.path and source.path in changed_paths + ] + + # Now find anything that has been removed from the build + modules = {source.module for source in sources} + omitted = [source for source in self.previous_sources if source.module not in modules] + removed = [] + for source in omitted: + path = source.path + assert path + removed.append((source.module, path)) + + self.add_explicitly_new(sources, changed) + + # Find anything that has had its module path change because of added or removed __init__s + last = {s.path: s.module for s in self.previous_sources} + for s in sources: + assert s.path + if s.path in last and last[s.path] != s.module: + # Mark it as removed from its old name and changed at its new name + removed.append((last[s.path], s.path)) + changed.append((s.module, s.path)) + + return changed, removed + + def add_explicitly_new( + self, sources: list[BuildSource], changed: list[tuple[str, str]] + ) -> None: + # Always add modules that were (re-)added, since they may be detected as not changed by + # fswatcher (if they were actually not changed), but they may still need to be checked + # in case they had errors before they were deleted from sources on previous runs. + previous_modules = {source.module for source in self.previous_sources} + changed_set = set(changed) + changed.extend( + [ + (source.module, source.path) + for source in sources + if source.path + and source.module not in previous_modules + and (source.module, source.path) not in changed_set + ] + ) + + def cmd_inspect( + self, + show: str, + location: str, + verbosity: int = 0, + limit: int = 0, + include_span: bool = False, + include_kind: bool = False, + include_object_attrs: bool = False, + union_attrs: bool = False, + force_reload: bool = False, + ) -> dict[str, object]: + """Locate and inspect expression(s).""" + if not self.fine_grained_manager: + return { + "error": 'Command "inspect" is only valid after a "check" command' + " (that produces no parse errors)" + } + engine = InspectionEngine( + self.fine_grained_manager, + verbosity=verbosity, + limit=limit, + include_span=include_span, + include_kind=include_kind, + include_object_attrs=include_object_attrs, + union_attrs=union_attrs, + force_reload=force_reload, + ) + old_inspections = self.options.inspections + self.options.inspections = True + try: + if show == "type": + result = engine.get_type(location) + elif show == "attrs": + result = engine.get_attrs(location) + elif show == "definition": + result = engine.get_definition(location) + else: + assert False, "Unknown inspection kind" + finally: + self.options.inspections = old_inspections + if "out" in result: + assert isinstance(result["out"], str) + result["out"] += "\n" + return result + + def cmd_suggest(self, function: str, callsites: bool, **kwargs: Any) -> dict[str, object]: + """Suggest a signature for a function.""" + if not self.fine_grained_manager: + return { + "error": "Command 'suggest' is only valid after a 'check' command" + " (that produces no parse errors)" + } + engine = SuggestionEngine(self.fine_grained_manager, **kwargs) + try: + if callsites: + out = engine.suggest_callsites(function) + else: + out = engine.suggest(function) + except SuggestionFailure as err: + return {"error": str(err)} + else: + if not out: + out = "No suggestions\n" + elif not out.endswith("\n"): + out += "\n" + return {"out": out, "err": "", "status": 0} + finally: + self.flush_caches() + + def cmd_hang(self) -> dict[str, object]: + """Hang for 100 seconds, as a debug hack.""" + time.sleep(100) + return {} + + +# Misc utilities. + + +MiB: Final = 2**20 + + +def get_meminfo() -> dict[str, Any]: + res: dict[str, Any] = {} + try: + import psutil + except ImportError: + res["memory_psutil_missing"] = ( + "psutil not found, run pip install mypy[dmypy] " + "to install the needed components for dmypy" + ) + else: + process = psutil.Process() + meminfo = process.memory_info() + res["memory_rss_mib"] = meminfo.rss / MiB + res["memory_vms_mib"] = meminfo.vms / MiB + if sys.platform == "win32": + res["memory_maxrss_mib"] = meminfo.peak_wset / MiB + else: + # See https://stackoverflow.com/questions/938733/total-memory-used-by-python-process + import resource # Since it doesn't exist on Windows. + + rusage = resource.getrusage(resource.RUSAGE_SELF) + if sys.platform == "darwin": + factor = 1 + else: + factor = 1024 # Linux + res["memory_maxrss_mib"] = rusage.ru_maxrss * factor / MiB + return res + + +def find_all_sources_in_build( + graph: mypy.build.Graph, extra: Sequence[BuildSource] = () +) -> list[BuildSource]: + result = list(extra) + seen = {source.module for source in result} + for module, state in graph.items(): + if module not in seen: + result.append(BuildSource(state.path, module)) + return result + + +def add_all_sources_to_changed(sources: list[BuildSource], changed: list[tuple[str, str]]) -> None: + """Add all (explicit) sources to the list changed files in place. + + Use this when re-processing of unchanged files is needed (e.g. for + the purpose of exporting types for inspections). + """ + changed_set = set(changed) + changed.extend( + [ + (bs.module, bs.path) + for bs in sources + if bs.path and (bs.module, bs.path) not in changed_set + ] + ) + + +def fix_module_deps(graph: mypy.build.Graph) -> None: + """After an incremental update, update module dependencies to reflect the new state. + + This can make some suppressed dependencies non-suppressed, and vice versa (if modules + have been added to or removed from the build). + """ + for state in graph.values(): + new_suppressed = [] + new_dependencies = [] + for dep in state.dependencies + state.suppressed: + if dep in graph: + new_dependencies.append(dep) + else: + new_suppressed.append(dep) + state.dependencies = new_dependencies + state.dependencies_set = set(new_dependencies) + state.suppressed = new_suppressed + state.suppressed_set = set(new_suppressed) + + +def filter_out_missing_top_level_packages( + packages: set[str], search_paths: SearchPaths, fscache: FileSystemCache +) -> set[str]: + """Quickly filter out obviously missing top-level packages. + + Return packages with entries that can't be found removed. + + This is approximate: some packages that aren't actually valid may be + included. However, all potentially valid packages must be returned. + """ + # Start with a empty set and add all potential top-level packages. + found = set() + paths = ( + search_paths.python_path + + search_paths.mypy_path + + search_paths.package_path + + search_paths.typeshed_path + ) + for p in paths: + try: + entries = fscache.listdir(p) + except Exception: + entries = [] + for entry in entries: + # The code is hand-optimized for mypyc since this may be somewhat + # performance-critical. + if entry.endswith(".py"): + entry = entry[:-3] + elif entry.endswith(".pyi"): + entry = entry[:-4] + elif entry.endswith("-stubs"): + # Possible PEP 561 stub package + entry = entry[:-6] + if entry in packages: + found.add(entry) + return found diff --git a/.venv/lib/python3.12/site-packages/mypy/dmypy_util.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/dmypy_util.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..8d26700 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/dmypy_util.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/dmypy_util.py b/.venv/lib/python3.12/site-packages/mypy/dmypy_util.py new file mode 100644 index 0000000..eeb918b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/dmypy_util.py @@ -0,0 +1,117 @@ +"""Shared code between dmypy.py and dmypy_server.py. + +This should be pretty lightweight and not depend on other mypy code (other than ipc). +""" + +from __future__ import annotations + +import io +import json +from collections.abc import Iterable, Iterator +from types import TracebackType +from typing import Any, Final, TextIO + +from mypy.ipc import IPCBase + +DEFAULT_STATUS_FILE: Final = ".dmypy.json" + + +def receive(connection: IPCBase) -> Any: + """Receive single JSON data frame from a connection. + + Raise OSError if the data received is not valid JSON or if it is + not a dict. + """ + bdata = connection.read() + if not bdata: + raise OSError("No data received") + try: + data = json.loads(bdata) + except Exception as e: + raise OSError("Data received is not valid JSON") from e + if not isinstance(data, dict): + raise OSError(f"Data received is not a dict ({type(data)})") + return data + + +def send(connection: IPCBase, data: Any) -> None: + """Send data to a connection encoded and framed. + + The data must be JSON-serializable. We assume that a single send call is a + single frame to be sent on the connect. + """ + connection.write(json.dumps(data)) + + +class WriteToConn(TextIO): + """Helper class to write to a connection instead of standard output.""" + + def __init__(self, server: IPCBase, output_key: str, isatty: bool) -> None: + self.server = server + self.output_key = output_key + self._isatty = isatty + + def __enter__(self) -> TextIO: + return self + + def __exit__( + self, + t: type[BaseException] | None, + value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + pass + + def __iter__(self) -> Iterator[str]: + raise io.UnsupportedOperation + + def __next__(self) -> str: + raise io.UnsupportedOperation + + def close(self) -> None: + pass + + def fileno(self) -> int: + raise OSError + + def flush(self) -> None: + pass + + def isatty(self) -> bool: + return self._isatty + + def read(self, n: int = 0) -> str: + raise io.UnsupportedOperation + + def readable(self) -> bool: + return False + + def readline(self, limit: int = 0) -> str: + raise io.UnsupportedOperation + + def readlines(self, hint: int = 0) -> list[str]: + raise io.UnsupportedOperation + + def seek(self, offset: int, whence: int = 0) -> int: + raise io.UnsupportedOperation + + def seekable(self) -> bool: + return False + + def tell(self) -> int: + raise io.UnsupportedOperation + + def truncate(self, size: int | None = 0) -> int: + raise io.UnsupportedOperation + + def write(self, output: str) -> int: + resp: dict[str, Any] = {self.output_key: output} + send(self.server, resp) + return len(output) + + def writable(self) -> bool: + return True + + def writelines(self, lines: Iterable[str]) -> None: + for s in lines: + self.write(s) diff --git a/.venv/lib/python3.12/site-packages/mypy/erasetype.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/erasetype.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..5ef0390 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/erasetype.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/erasetype.py b/.venv/lib/python3.12/site-packages/mypy/erasetype.py new file mode 100644 index 0000000..500d8fd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/erasetype.py @@ -0,0 +1,287 @@ +from __future__ import annotations + +from collections.abc import Container +from typing import Callable, cast + +from mypy.nodes import ARG_STAR, ARG_STAR2 +from mypy.types import ( + AnyType, + CallableType, + DeletedType, + ErasedType, + Instance, + LiteralType, + NoneType, + Overloaded, + Parameters, + ParamSpecType, + PartialType, + ProperType, + TupleType, + Type, + TypeAliasType, + TypedDictType, + TypeOfAny, + TypeTranslator, + TypeType, + TypeVarId, + TypeVarTupleType, + TypeVarType, + TypeVisitor, + UnboundType, + UninhabitedType, + UnionType, + UnpackType, + get_proper_type, + get_proper_types, +) +from mypy.typevartuples import erased_vars + + +def erase_type(typ: Type) -> ProperType: + """Erase any type variables from a type. + + Also replace tuple types with the corresponding concrete types. + + Examples: + A -> A + B[X] -> B[Any] + Tuple[A, B] -> tuple + Callable[[A1, A2, ...], R] -> Callable[..., Any] + Type[X] -> Type[Any] + """ + typ = get_proper_type(typ) + return typ.accept(EraseTypeVisitor()) + + +class EraseTypeVisitor(TypeVisitor[ProperType]): + def visit_unbound_type(self, t: UnboundType) -> ProperType: + # TODO: replace with an assert after UnboundType can't leak from semantic analysis. + return AnyType(TypeOfAny.from_error) + + def visit_any(self, t: AnyType) -> ProperType: + return t + + def visit_none_type(self, t: NoneType) -> ProperType: + return t + + def visit_uninhabited_type(self, t: UninhabitedType) -> ProperType: + return t + + def visit_erased_type(self, t: ErasedType) -> ProperType: + return t + + def visit_partial_type(self, t: PartialType) -> ProperType: + # Should not get here. + raise RuntimeError("Cannot erase partial types") + + def visit_deleted_type(self, t: DeletedType) -> ProperType: + return t + + def visit_instance(self, t: Instance) -> ProperType: + args = erased_vars(t.type.defn.type_vars, TypeOfAny.special_form) + return Instance(t.type, args, t.line) + + def visit_type_var(self, t: TypeVarType) -> ProperType: + return AnyType(TypeOfAny.special_form) + + def visit_param_spec(self, t: ParamSpecType) -> ProperType: + return AnyType(TypeOfAny.special_form) + + def visit_parameters(self, t: Parameters) -> ProperType: + raise RuntimeError("Parameters should have been bound to a class") + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> ProperType: + # Likely, we can never get here because of aggressive erasure of types that + # can contain this, but better still return a valid replacement. + return t.tuple_fallback.copy_modified(args=[AnyType(TypeOfAny.special_form)]) + + def visit_unpack_type(self, t: UnpackType) -> ProperType: + return AnyType(TypeOfAny.special_form) + + def visit_callable_type(self, t: CallableType) -> ProperType: + # We must preserve the fallback type for overload resolution to work. + any_type = AnyType(TypeOfAny.special_form) + return CallableType( + arg_types=[any_type, any_type], + arg_kinds=[ARG_STAR, ARG_STAR2], + arg_names=[None, None], + ret_type=any_type, + fallback=t.fallback, + is_ellipsis_args=True, + implicit=True, + ) + + def visit_overloaded(self, t: Overloaded) -> ProperType: + return t.fallback.accept(self) + + def visit_tuple_type(self, t: TupleType) -> ProperType: + return t.partial_fallback.accept(self) + + def visit_typeddict_type(self, t: TypedDictType) -> ProperType: + return t.fallback.accept(self) + + def visit_literal_type(self, t: LiteralType) -> ProperType: + # The fallback for literal types should always be either + # something like int or str, or an enum class -- types that + # don't contain any TypeVars. So there's no need to visit it. + return t + + def visit_union_type(self, t: UnionType) -> ProperType: + erased_items = [erase_type(item) for item in t.items] + from mypy.typeops import make_simplified_union + + return make_simplified_union(erased_items) + + def visit_type_type(self, t: TypeType) -> ProperType: + return TypeType.make_normalized( + t.item.accept(self), line=t.line, is_type_form=t.is_type_form + ) + + def visit_type_alias_type(self, t: TypeAliasType) -> ProperType: + raise RuntimeError("Type aliases should be expanded before accepting this visitor") + + +def erase_typevars(t: Type, ids_to_erase: Container[TypeVarId] | None = None) -> Type: + """Replace all type variables in a type with any, + or just the ones in the provided collection. + """ + + if ids_to_erase is None: + return t.accept(TypeVarEraser(None, AnyType(TypeOfAny.special_form))) + + def erase_id(id: TypeVarId) -> bool: + return id in ids_to_erase + + return t.accept(TypeVarEraser(erase_id, AnyType(TypeOfAny.special_form))) + + +def erase_meta_id(id: TypeVarId) -> bool: + return id.is_meta_var() + + +def replace_meta_vars(t: Type, target_type: Type) -> Type: + """Replace unification variables in a type with the target type.""" + return t.accept(TypeVarEraser(erase_meta_id, target_type)) + + +class TypeVarEraser(TypeTranslator): + """Implementation of type erasure""" + + def __init__(self, erase_id: Callable[[TypeVarId], bool] | None, replacement: Type) -> None: + super().__init__() + self.erase_id = erase_id + self.replacement = replacement + + def visit_type_var(self, t: TypeVarType) -> Type: + if self.erase_id is None or self.erase_id(t.id): + return self.replacement + return t + + # TODO: below two methods duplicate some logic with expand_type(). + # In fact, we may want to refactor this whole visitor to use expand_type(). + def visit_instance(self, t: Instance) -> Type: + result = super().visit_instance(t) + assert isinstance(result, ProperType) and isinstance(result, Instance) + if t.type.fullname == "builtins.tuple": + # Normalize Tuple[*Tuple[X, ...], ...] -> Tuple[X, ...] + arg = result.args[0] + if isinstance(arg, UnpackType): + unpacked = get_proper_type(arg.type) + if isinstance(unpacked, Instance): + assert unpacked.type.fullname == "builtins.tuple" + return unpacked + return result + + def visit_tuple_type(self, t: TupleType) -> Type: + result = super().visit_tuple_type(t) + assert isinstance(result, ProperType) and isinstance(result, TupleType) + if len(result.items) == 1: + # Normalize Tuple[*Tuple[X, ...]] -> Tuple[X, ...] + item = result.items[0] + if isinstance(item, UnpackType): + unpacked = get_proper_type(item.type) + if isinstance(unpacked, Instance): + assert unpacked.type.fullname == "builtins.tuple" + if result.partial_fallback.type.fullname != "builtins.tuple": + # If it is a subtype (like named tuple) we need to preserve it, + # this essentially mimics the logic in tuple_fallback(). + return result.partial_fallback.accept(self) + return unpacked + return result + + def visit_callable_type(self, t: CallableType) -> Type: + result = super().visit_callable_type(t) + assert isinstance(result, ProperType) and isinstance(result, CallableType) + # Usually this is done in semanal_typeargs.py, but erasure can create + # a non-normal callable from normal one. + result.normalize_trivial_unpack() + return result + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type: + if self.erase_id is None or self.erase_id(t.id): + return t.tuple_fallback.copy_modified(args=[self.replacement]) + return t + + def visit_param_spec(self, t: ParamSpecType) -> Type: + # TODO: we should probably preserve prefix here. + if self.erase_id is None or self.erase_id(t.id): + return self.replacement + return t + + def visit_type_alias_type(self, t: TypeAliasType) -> Type: + # Type alias target can't contain bound type variables (not bound by the type + # alias itself), so it is safe to just erase the arguments. + return t.copy_modified(args=[a.accept(self) for a in t.args]) + + +def remove_instance_last_known_values(t: Type) -> Type: + return t.accept(LastKnownValueEraser()) + + +class LastKnownValueEraser(TypeTranslator): + """Removes the Literal[...] type that may be associated with any + Instance types.""" + + def visit_instance(self, t: Instance) -> Type: + if not t.last_known_value and not t.args: + return t + return t.copy_modified(args=[a.accept(self) for a in t.args], last_known_value=None) + + def visit_type_alias_type(self, t: TypeAliasType) -> Type: + # Type aliases can't contain literal values, because they are + # always constructed as explicit types. + return t + + def visit_union_type(self, t: UnionType) -> Type: + new = cast(UnionType, super().visit_union_type(t)) + # Erasure can result in many duplicate items; merge them. + # Call make_simplified_union only on lists of instance types + # that all have the same fullname, to avoid simplifying too + # much. + instances = [item for item in new.items if isinstance(get_proper_type(item), Instance)] + # Avoid merge in simple cases such as optional types. + if len(instances) > 1: + instances_by_name: dict[str, list[Instance]] = {} + p_new_items = get_proper_types(new.items) + for p_item in p_new_items: + if isinstance(p_item, Instance) and not p_item.args: + instances_by_name.setdefault(p_item.type.fullname, []).append(p_item) + merged: list[Type] = [] + for item in new.items: + orig_item = item + item = get_proper_type(item) + if isinstance(item, Instance) and not item.args: + types = instances_by_name.get(item.type.fullname) + if types is not None: + if len(types) == 1: + merged.append(item) + else: + from mypy.typeops import make_simplified_union + + merged.append(make_simplified_union(types)) + del instances_by_name[item.type.fullname] + else: + merged.append(orig_item) + return UnionType.make_union(merged) + return new diff --git a/.venv/lib/python3.12/site-packages/mypy/error_formatter.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/error_formatter.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..8f9bcb6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/error_formatter.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/error_formatter.py b/.venv/lib/python3.12/site-packages/mypy/error_formatter.py new file mode 100644 index 0000000..ffc6b67 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/error_formatter.py @@ -0,0 +1,37 @@ +"""Defines the different custom formats in which mypy can output.""" + +import json +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from mypy.errors import MypyError + + +class ErrorFormatter(ABC): + """Base class to define how errors are formatted before being printed.""" + + @abstractmethod + def report_error(self, error: "MypyError") -> str: + raise NotImplementedError + + +class JSONFormatter(ErrorFormatter): + """Formatter for basic JSON output format.""" + + def report_error(self, error: "MypyError") -> str: + """Prints out the errors as simple, static JSON lines.""" + return json.dumps( + { + "file": error.file_path, + "line": error.line, + "column": error.column, + "message": error.message, + "hint": None if len(error.hints) == 0 else "\n".join(error.hints), + "code": None if error.errorcode is None else error.errorcode.code, + "severity": error.severity, + } + ) + + +OUTPUT_CHOICES = {"json": JSONFormatter()} diff --git a/.venv/lib/python3.12/site-packages/mypy/errorcodes.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/errorcodes.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..5a00b2a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/errorcodes.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/errorcodes.py b/.venv/lib/python3.12/site-packages/mypy/errorcodes.py new file mode 100644 index 0000000..785b616 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/errorcodes.py @@ -0,0 +1,332 @@ +"""Classification of possible errors mypy can detect. + +These can be used for filtering specific errors. +""" + +from __future__ import annotations + +from collections import defaultdict +from typing import Final + +from mypy_extensions import mypyc_attr + +error_codes: dict[str, ErrorCode] = {} +sub_code_map: dict[str, set[str]] = defaultdict(set) + + +@mypyc_attr(allow_interpreted_subclasses=True) +class ErrorCode: + def __init__( + self, + code: str, + description: str, + category: str, + default_enabled: bool = True, + sub_code_of: ErrorCode | None = None, + ) -> None: + self.code = code + self.description = description + self.category = category + self.default_enabled = default_enabled + self.sub_code_of = sub_code_of + if sub_code_of is not None: + assert sub_code_of.sub_code_of is None, "Nested subcategories are not supported" + sub_code_map[sub_code_of.code].add(code) + error_codes[code] = self + + def __str__(self) -> str: + return f"" + + def __repr__(self) -> str: + """This doesn't fulfill the goals of repr but it's better than the default view.""" + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, ErrorCode): + return False + return self.code == other.code + + def __hash__(self) -> int: + return hash((self.code,)) + + +ATTR_DEFINED: Final = ErrorCode("attr-defined", "Check that attribute exists", "General") +NAME_DEFINED: Final = ErrorCode("name-defined", "Check that name is defined", "General") +CALL_ARG: Final = ErrorCode( + "call-arg", "Check number, names and kinds of arguments in calls", "General" +) +ARG_TYPE: Final = ErrorCode("arg-type", "Check argument types in calls", "General") +CALL_OVERLOAD: Final = ErrorCode( + "call-overload", "Check that an overload variant matches arguments", "General" +) +VALID_TYPE: Final = ErrorCode("valid-type", "Check that type (annotation) is valid", "General") +VAR_ANNOTATED: Final = ErrorCode( + "var-annotated", "Require variable annotation if type can't be inferred", "General" +) +OVERRIDE: Final = ErrorCode( + "override", "Check that method override is compatible with base class", "General" +) +RETURN: Final = ErrorCode("return", "Check that function always returns a value", "General") +RETURN_VALUE: Final = ErrorCode( + "return-value", "Check that return value is compatible with signature", "General" +) +ASSIGNMENT: Final = ErrorCode( + "assignment", "Check that assigned value is compatible with target", "General" +) +METHOD_ASSIGN: Final = ErrorCode( + "method-assign", + "Check that assignment target is not a method", + "General", + sub_code_of=ASSIGNMENT, +) +TYPE_ARG: Final = ErrorCode("type-arg", "Check that generic type arguments are present", "General") +TYPE_VAR: Final = ErrorCode("type-var", "Check that type variable values are valid", "General") +UNION_ATTR: Final = ErrorCode( + "union-attr", "Check that attribute exists in each item of a union", "General" +) +INDEX: Final = ErrorCode("index", "Check indexing operations", "General") +OPERATOR: Final = ErrorCode("operator", "Check that operator is valid for operands", "General") +LIST_ITEM: Final = ErrorCode( + "list-item", "Check list items in a list expression [item, ...]", "General" +) +DICT_ITEM: Final = ErrorCode( + "dict-item", "Check dict items in a dict expression {key: value, ...}", "General" +) +TYPEDDICT_ITEM: Final = ErrorCode( + "typeddict-item", "Check items when constructing TypedDict", "General" +) +TYPEDDICT_UNKNOWN_KEY: Final = ErrorCode( + "typeddict-unknown-key", + "Check unknown keys when constructing TypedDict", + "General", + sub_code_of=TYPEDDICT_ITEM, +) +HAS_TYPE: Final = ErrorCode( + "has-type", "Check that type of reference can be determined", "General" +) +IMPORT: Final = ErrorCode( + "import", "Require that imported module can be found or has stubs", "General" +) +IMPORT_NOT_FOUND: Final = ErrorCode( + "import-not-found", "Require that imported module can be found", "General", sub_code_of=IMPORT +) +IMPORT_UNTYPED: Final = ErrorCode( + "import-untyped", "Require that imported module has stubs", "General", sub_code_of=IMPORT +) +NO_REDEF: Final = ErrorCode("no-redef", "Check that each name is defined once", "General") +FUNC_RETURNS_VALUE: Final = ErrorCode( + "func-returns-value", "Check that called function returns a value in value context", "General" +) +ABSTRACT: Final = ErrorCode( + "abstract", "Prevent instantiation of classes with abstract attributes", "General" +) +TYPE_ABSTRACT: Final = ErrorCode( + "type-abstract", "Require only concrete classes where Type[...] is expected", "General" +) +VALID_NEWTYPE: Final = ErrorCode( + "valid-newtype", "Check that argument 2 to NewType is valid", "General" +) +STRING_FORMATTING: Final = ErrorCode( + "str-format", "Check that string formatting/interpolation is type-safe", "General" +) +STR_BYTES_PY3: Final = ErrorCode( + "str-bytes-safe", "Warn about implicit coercions related to bytes and string types", "General" +) +EXIT_RETURN: Final = ErrorCode( + "exit-return", "Warn about too general return type for '__exit__'", "General" +) +LITERAL_REQ: Final = ErrorCode("literal-required", "Check that value is a literal", "General") +UNUSED_COROUTINE: Final = ErrorCode( + "unused-coroutine", "Ensure that all coroutines are used", "General" +) +EMPTY_BODY: Final = ErrorCode( + "empty-body", + "A dedicated error code to opt out return errors for empty/trivial bodies", + "General", +) +SAFE_SUPER: Final = ErrorCode( + "safe-super", "Warn about calls to abstract methods with empty/trivial bodies", "General" +) +TOP_LEVEL_AWAIT: Final = ErrorCode( + "top-level-await", "Warn about top level await expressions", "General" +) +AWAIT_NOT_ASYNC: Final = ErrorCode( + "await-not-async", 'Warn about "await" outside coroutine ("async def")', "General" +) +# These error codes aren't enabled by default. +NO_UNTYPED_DEF: Final = ErrorCode( + "no-untyped-def", "Check that every function has an annotation", "General" +) +NO_UNTYPED_CALL: Final = ErrorCode( + "no-untyped-call", + "Disallow calling functions without type annotations from annotated functions", + "General", +) +REDUNDANT_CAST: Final = ErrorCode( + "redundant-cast", "Check that cast changes type of expression", "General" +) +ASSERT_TYPE: Final = ErrorCode("assert-type", "Check that assert_type() call succeeds", "General") +COMPARISON_OVERLAP: Final = ErrorCode( + "comparison-overlap", "Check that types in comparisons and 'in' expressions overlap", "General" +) +NO_ANY_UNIMPORTED: Final = ErrorCode( + "no-any-unimported", 'Reject "Any" types from unfollowed imports', "General" +) +NO_ANY_RETURN: Final = ErrorCode( + "no-any-return", + 'Reject returning value with "Any" type if return type is not "Any"', + "General", +) +UNREACHABLE: Final = ErrorCode( + "unreachable", "Warn about unreachable statements or expressions", "General" +) +ANNOTATION_UNCHECKED: Final = ErrorCode( + "annotation-unchecked", "Notify about type annotations in unchecked functions", "General" +) +TYPEDDICT_READONLY_MUTATED: Final = ErrorCode( + "typeddict-readonly-mutated", "TypedDict's ReadOnly key is mutated", "General" +) +POSSIBLY_UNDEFINED: Final = ErrorCode( + "possibly-undefined", + "Warn about variables that are defined only in some execution paths", + "General", + default_enabled=False, +) +REDUNDANT_EXPR: Final = ErrorCode( + "redundant-expr", "Warn about redundant expressions", "General", default_enabled=False +) +TRUTHY_BOOL: Final = ErrorCode( + "truthy-bool", + "Warn about expressions that could always evaluate to true in boolean contexts", + "General", + default_enabled=False, +) +TRUTHY_FUNCTION: Final = ErrorCode( + "truthy-function", + "Warn about function that always evaluate to true in boolean contexts", + "General", +) +TRUTHY_ITERABLE: Final = ErrorCode( + "truthy-iterable", + "Warn about Iterable expressions that could always evaluate to true in boolean contexts", + "General", + default_enabled=False, +) +NAME_MATCH: Final = ErrorCode( + "name-match", "Check that type definition has consistent naming", "General" +) +NO_OVERLOAD_IMPL: Final = ErrorCode( + "no-overload-impl", + "Check that overloaded functions outside stub files have an implementation", + "General", +) +IGNORE_WITHOUT_CODE: Final = ErrorCode( + "ignore-without-code", + "Warn about '# type: ignore' comments which do not have error codes", + "General", + default_enabled=False, +) +UNUSED_AWAITABLE: Final = ErrorCode( + "unused-awaitable", + "Ensure that all awaitable values are used", + "General", + default_enabled=False, +) +REDUNDANT_SELF_TYPE: Final = ErrorCode( + "redundant-self", + "Warn about redundant Self type annotations on method first argument", + "General", + default_enabled=False, +) +USED_BEFORE_DEF: Final = ErrorCode( + "used-before-def", "Warn about variables that are used before they are defined", "General" +) +UNUSED_IGNORE: Final = ErrorCode( + "unused-ignore", "Ensure that all type ignores are used", "General", default_enabled=False +) +EXPLICIT_OVERRIDE_REQUIRED: Final = ErrorCode( + "explicit-override", + "Require @override decorator if method is overriding a base class method", + "General", + default_enabled=False, +) +UNIMPORTED_REVEAL: Final = ErrorCode( + "unimported-reveal", + "Require explicit import from typing or typing_extensions for reveal_type", + "General", + default_enabled=False, +) +MUTABLE_OVERRIDE: Final = ErrorCode( + "mutable-override", + "Reject covariant overrides for mutable attributes", + "General", + default_enabled=False, +) +EXHAUSTIVE_MATCH: Final = ErrorCode( + "exhaustive-match", + "Reject match statements that are not exhaustive", + "General", + default_enabled=False, +) +METACLASS: Final = ErrorCode("metaclass", "Ensure that metaclass is valid", "General") +MAYBE_UNRECOGNIZED_STR_TYPEFORM: Final = ErrorCode( + "maybe-unrecognized-str-typeform", + "Error when a string is used where a TypeForm is expected but a string annotation cannot be recognized", + "General", +) + +# Syntax errors are often blocking. +SYNTAX: Final = ErrorCode("syntax", "Report syntax errors", "General") + +# This is an internal marker code for a whole-file ignore. It is not intended to +# be user-visible. +FILE: Final = ErrorCode("file", "Internal marker for a whole file being ignored", "General") +del error_codes[FILE.code] + +# This is a catch-all for remaining uncategorized errors. +MISC: Final = ErrorCode("misc", "Miscellaneous other checks", "General") + +OVERLOAD_CANNOT_MATCH: Final = ErrorCode( + "overload-cannot-match", + "Warn if an @overload signature can never be matched", + "General", + sub_code_of=MISC, +) + +OVERLOAD_OVERLAP: Final = ErrorCode( + "overload-overlap", + "Warn if multiple @overload variants overlap in unsafe ways", + "General", + sub_code_of=MISC, +) + +PROPERTY_DECORATOR: Final = ErrorCode( + "prop-decorator", + "Decorators on top of @property are not supported", + "General", + sub_code_of=MISC, +) + +UNTYPED_DECORATOR: Final = ErrorCode( + "untyped-decorator", "Error if an untyped decorator makes a typed function untyped", "General" +) + +NARROWED_TYPE_NOT_SUBTYPE: Final = ErrorCode( + "narrowed-type-not-subtype", + "Warn if a TypeIs function's narrowed type is not a subtype of the original type", + "General", +) + +EXPLICIT_ANY: Final = ErrorCode( + "explicit-any", "Warn about explicit Any type annotations", "General" +) + +DEPRECATED: Final = ErrorCode( + "deprecated", + "Warn when importing or using deprecated (overloaded) functions, methods or classes", + "General", + default_enabled=False, +) + +# This copy will not include any error codes defined later in the plugins. +mypy_error_codes = error_codes.copy() diff --git a/.venv/lib/python3.12/site-packages/mypy/errors.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/errors.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..679cd36 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/errors.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/errors.py b/.venv/lib/python3.12/site-packages/mypy/errors.py new file mode 100644 index 0000000..ce5c6cc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/errors.py @@ -0,0 +1,1411 @@ +from __future__ import annotations + +import os.path +import sys +import traceback +from collections import defaultdict +from collections.abc import Iterable, Iterator +from itertools import chain +from typing import Callable, Final, NoReturn, Optional, TextIO, TypeVar +from typing_extensions import Literal, Self, TypeAlias as _TypeAlias + +from mypy import errorcodes as codes +from mypy.error_formatter import ErrorFormatter +from mypy.errorcodes import IMPORT, IMPORT_NOT_FOUND, IMPORT_UNTYPED, ErrorCode, mypy_error_codes +from mypy.nodes import Context +from mypy.options import Options +from mypy.scope import Scope +from mypy.types import Type +from mypy.util import DEFAULT_SOURCE_OFFSET, is_typeshed_file +from mypy.version import __version__ as mypy_version + +T = TypeVar("T") + +# Show error codes for some note-level messages (these usually appear alone +# and not as a comment for a previous error-level message). +SHOW_NOTE_CODES: Final = {codes.ANNOTATION_UNCHECKED, codes.DEPRECATED} + +# Do not add notes with links to error code docs to errors with these codes. +# We can tweak this set as we get more experience about what is helpful and what is not. +HIDE_LINK_CODES: Final = { + # This is a generic error code, so it has no useful docs + codes.MISC, + # These are trivial and have some custom notes (e.g. for list being invariant) + codes.ASSIGNMENT, + codes.ARG_TYPE, + codes.RETURN_VALUE, + # Undefined name/attribute errors are self-explanatory + codes.ATTR_DEFINED, + codes.NAME_DEFINED, + # Overrides have a custom link to docs + codes.OVERRIDE, +} + +BASE_RTD_URL: Final = "https://mypy.rtfd.io/en/stable/_refs.html#code" + +# Keep track of the original error code when the error code of a message is changed. +# This is used to give notes about out-of-date "type: ignore" comments. +original_error_codes: Final = {codes.LITERAL_REQ: codes.MISC, codes.TYPE_ABSTRACT: codes.MISC} + + +class ErrorInfo: + """Representation of a single error message.""" + + # Description of a sequence of imports that refer to the source file + # related to this error. Each item is a (path, line number) tuple. + import_ctx: list[tuple[str, int]] + + # The path to source file that was the source of this error. + file = "" + + # The fully-qualified id of the source module for this error. + module: str | None = None + + # The name of the type in which this error is located at. + type: str | None = "" # Unqualified, may be None + + # The name of the function or member in which this error is located at. + function_or_member: str | None = "" # Unqualified, may be None + + # The line number related to this error within file. + line = 0 # -1 if unknown + + # The column number related to this error with file. + column = 0 # -1 if unknown + + # The end line number related to this error within file. + end_line = 0 # -1 if unknown + + # The end column number related to this error with file. + end_column = 0 # -1 if unknown + + # Either 'error' or 'note' + severity = "" + + # The error message. + message = "" + + # The error code. + code: ErrorCode | None = None + + # If True, we should halt build after the file that generated this error. + blocker = False + + # Only report this particular messages once per program. + only_once = False + + # Actual origin of the error message as tuple (path, line number, end line number) + # If end line number is unknown, use line number. + origin: tuple[str, Iterable[int]] + + # Fine-grained incremental target where this was reported + target: str | None = None + + # If True, don't show this message in output, but still record the error (needed + # by mypy daemon) + hidden = False + + # For notes, specifies (optionally) the error this note is attached to. This is used to + # simplify error code matching and de-duplication logic for complex multi-line notes. + parent_error: ErrorInfo | None = None + + def __init__( + self, + import_ctx: list[tuple[str, int]], + *, + file: str, + module: str | None, + typ: str | None, + function_or_member: str | None, + line: int, + column: int, + end_line: int, + end_column: int, + severity: str, + message: str, + code: ErrorCode | None, + blocker: bool, + only_once: bool, + origin: tuple[str, Iterable[int]] | None = None, + target: str | None = None, + priority: int = 0, + parent_error: ErrorInfo | None = None, + ) -> None: + self.import_ctx = import_ctx + self.file = file + self.module = module + self.type = typ + self.function_or_member = function_or_member + self.line = line + self.column = column + self.end_line = end_line + self.end_column = end_column + self.severity = severity + self.message = message + self.code = code + self.blocker = blocker + self.only_once = only_once + self.origin = origin or (file, [line]) + self.target = target + self.priority = priority + if parent_error is not None: + assert severity == "note", "Only notes can specify parent errors" + self.parent_error = parent_error + + +# Type used internally to represent errors: +# (path, line, column, end_line, end_column, severity, message, code) +ErrorTuple: _TypeAlias = tuple[Optional[str], int, int, int, int, str, str, Optional[ErrorCode]] + + +class ErrorWatcher: + """Context manager that can be used to keep track of new errors recorded + around a given operation. + + Errors maintain a stack of such watchers. The handler is called starting + at the top of the stack, and is propagated down the stack unless filtered + out by one of the ErrorWatcher instances. + """ + + # public attribute for the special treatment of `reveal_type` by + # `MessageBuilder.reveal_type`: + filter_revealed_type: bool + + def __init__( + self, + errors: Errors, + *, + filter_errors: bool | Callable[[str, ErrorInfo], bool] = False, + save_filtered_errors: bool = False, + filter_deprecated: bool = False, + filter_revealed_type: bool = False, + ) -> None: + self.errors = errors + self._has_new_errors = False + self._filter = filter_errors + self._filter_deprecated = filter_deprecated + self.filter_revealed_type = filter_revealed_type + self._filtered: list[ErrorInfo] | None = [] if save_filtered_errors else None + + def __enter__(self) -> Self: + self.errors._watchers.append(self) + return self + + def __exit__(self, exc_type: object, exc_val: object, exc_tb: object) -> Literal[False]: + last = self.errors._watchers.pop() + assert last == self + return False + + def on_error(self, file: str, info: ErrorInfo) -> bool: + """Handler called when a new error is recorded. + + The default implementation just sets the has_new_errors flag + + Return True to filter out the error, preventing it from being seen by other + ErrorWatcher further down the stack and from being recorded by Errors + """ + if info.code == codes.DEPRECATED: + # Deprecated is not a type error, so it is handled on opt-in basis here. + if not self._filter_deprecated: + return False + + self._has_new_errors = True + if isinstance(self._filter, bool): + should_filter = self._filter + elif callable(self._filter): + should_filter = self._filter(file, info) + else: + raise AssertionError(f"invalid error filter: {type(self._filter)}") + if should_filter and self._filtered is not None: + self._filtered.append(info) + + return should_filter + + def has_new_errors(self) -> bool: + return self._has_new_errors + + def filtered_errors(self) -> list[ErrorInfo]: + assert self._filtered is not None + return self._filtered + + +class IterationDependentErrors: + """An `IterationDependentErrors` instance serves to collect the `unreachable`, + `redundant-expr`, and `redundant-casts` errors, as well as the revealed types, + handled by the individual `IterationErrorWatcher` instances sequentially applied to + the same code section.""" + + # One set of `unreachable`, `redundant-expr`, and `redundant-casts` errors per + # iteration step. Meaning of the tuple items: ErrorCode, message, line, column, + # end_line, end_column. + uselessness_errors: list[set[tuple[ErrorCode, str, int, int, int, int]]] + + # One set of unreachable line numbers per iteration step. Not only the lines where + # the error report occurs but really all unreachable lines. + unreachable_lines: list[set[int]] + + # One list of revealed types for each `reveal_type` statement. Each created list + # can grow during the iteration. Meaning of the tuple items: line, column, + # end_line, end_column: + revealed_types: dict[tuple[int, int, int | None, int | None], list[Type]] + + def __init__(self) -> None: + self.uselessness_errors = [] + self.unreachable_lines = [] + self.revealed_types = defaultdict(list) + + def yield_uselessness_error_infos(self) -> Iterator[tuple[str, Context, ErrorCode]]: + """Report only those `unreachable`, `redundant-expr`, and `redundant-casts` + errors that could not be ruled out in any iteration step.""" + + persistent_uselessness_errors = set() + for candidate in set(chain(*self.uselessness_errors)): + if all( + (candidate in errors) or (candidate[2] in lines) + for errors, lines in zip(self.uselessness_errors, self.unreachable_lines) + ): + persistent_uselessness_errors.add(candidate) + for error_info in persistent_uselessness_errors: + context = Context(line=error_info[2], column=error_info[3]) + context.end_line = error_info[4] + context.end_column = error_info[5] + yield error_info[1], context, error_info[0] + + def yield_revealed_type_infos(self) -> Iterator[tuple[list[Type], Context]]: + """Yield all types revealed in at least one iteration step.""" + + for note_info, types in self.revealed_types.items(): + context = Context(line=note_info[0], column=note_info[1]) + context.end_line = note_info[2] + context.end_column = note_info[3] + yield types, context + + +class IterationErrorWatcher(ErrorWatcher): + """Error watcher that filters and separately collects `unreachable` errors, + `redundant-expr` and `redundant-casts` errors, and revealed types when analysing + code sections iteratively to help avoid making too-hasty reports.""" + + iteration_dependent_errors: IterationDependentErrors + + def __init__( + self, + errors: Errors, + iteration_dependent_errors: IterationDependentErrors, + *, + filter_errors: bool | Callable[[str, ErrorInfo], bool] = False, + save_filtered_errors: bool = False, + filter_deprecated: bool = False, + ) -> None: + super().__init__( + errors, + filter_errors=filter_errors, + save_filtered_errors=save_filtered_errors, + filter_deprecated=filter_deprecated, + ) + self.iteration_dependent_errors = iteration_dependent_errors + iteration_dependent_errors.uselessness_errors.append(set()) + iteration_dependent_errors.unreachable_lines.append(set()) + + def on_error(self, file: str, info: ErrorInfo) -> bool: + """Filter out the "iteration-dependent" errors and notes and store their + information to handle them after iteration is completed.""" + + iter_errors = self.iteration_dependent_errors + + if info.code in (codes.UNREACHABLE, codes.REDUNDANT_EXPR, codes.REDUNDANT_CAST): + iter_errors.uselessness_errors[-1].add( + (info.code, info.message, info.line, info.column, info.end_line, info.end_column) + ) + if info.code == codes.UNREACHABLE: + iter_errors.unreachable_lines[-1].update(range(info.line, info.end_line + 1)) + return True + + return super().on_error(file, info) + + +class Errors: + """Container for compile errors. + + This class generates and keeps tracks of compile errors and the + current error context (nested imports). + """ + + # Map from files to generated error messages. Is an OrderedDict so + # that it can be used to order messages based on the order the + # files were processed. + error_info_map: dict[str, list[ErrorInfo]] + + # optimization for legacy codebases with many files with errors + has_blockers: set[str] + + # Files that we have reported the errors for + flushed_files: set[str] + + # Current error context: nested import context/stack, as a list of (path, line) pairs. + import_ctx: list[tuple[str, int]] + + # Path name prefix that is removed from all paths, if set. + ignore_prefix: str | None = None + + # Path to current file. + file: str = "" + + # Ignore some errors on these lines of each file + # (path -> line -> error-codes) + ignored_lines: dict[str, dict[int, list[str]]] + + # Lines that were skipped during semantic analysis e.g. due to ALWAYS_FALSE, MYPY_FALSE, + # or platform/version checks. Those lines would not be type-checked. + skipped_lines: dict[str, set[int]] + + # Lines on which an error was actually ignored. + used_ignored_lines: dict[str, dict[int, list[str]]] + + # Files where all errors should be ignored. + ignored_files: set[str] + + # Collection of reported only_once messages. + only_once_messages: set[str] + + # Set to True to show "In function "foo":" messages. + show_error_context: bool = False + + # Set to True to show column numbers in error messages. + show_column_numbers: bool = False + + # Set to True to show end line and end column in error messages. + # This implies `show_column_numbers`. + show_error_end: bool = False + + # Set to True to show absolute file paths in error messages. + show_absolute_path: bool = False + + # State for keeping track of the current fine-grained incremental mode target. + # (See mypy.server.update for more about targets.) + # Current module id. + target_module: str | None = None + scope: Scope | None = None + + # Have we seen an import-related error so far? If yes, we filter out other messages + # in some cases to avoid reporting huge numbers of errors. + seen_import_error = False + + _watchers: list[ErrorWatcher] + + def __init__( + self, + options: Options, + *, + read_source: Callable[[str], list[str] | None] | None = None, + hide_error_codes: bool | None = None, + ) -> None: + self.options = options + self.hide_error_codes = ( + hide_error_codes if hide_error_codes is not None else options.hide_error_codes + ) + # We use fscache to read source code when showing snippets. + self.read_source = read_source + self.initialize() + + def initialize(self) -> None: + self.error_info_map = {} + self.flushed_files = set() + self.import_ctx = [] + self.function_or_member = [None] + self.ignored_lines = {} + self.skipped_lines = {} + self.used_ignored_lines = defaultdict(lambda: defaultdict(list)) + self.ignored_files = set() + self.only_once_messages = set() + self.has_blockers = set() + self.scope = None + self.target_module = None + self.seen_import_error = False + self._watchers = [] + + def reset(self) -> None: + self.initialize() + + def set_ignore_prefix(self, prefix: str) -> None: + """Set path prefix that will be removed from all paths.""" + prefix = os.path.normpath(prefix) + # Add separator to the end, if not given. + if os.path.basename(prefix) != "": + prefix += os.sep + self.ignore_prefix = prefix + + def simplify_path(self, file: str) -> str: + if self.options.show_absolute_path: + return os.path.abspath(file) + else: + file = os.path.normpath(file) + return remove_path_prefix(file, self.ignore_prefix) + + def set_file( + self, file: str, module: str | None, options: Options, scope: Scope | None = None + ) -> None: + """Set the path and module id of the current file.""" + # The path will be simplified later, in render_messages. That way + # * 'file' is always a key that uniquely identifies a source file + # that mypy read (simplified paths might not be unique); and + # * we only have to simplify in one place, while still supporting + # reporting errors for files other than the one currently being + # processed. + self.file = file + self.target_module = module + self.scope = scope + self.options = options + + def set_file_ignored_lines( + self, file: str, ignored_lines: dict[int, list[str]], ignore_all: bool = False + ) -> None: + self.ignored_lines[file] = ignored_lines + if ignore_all: + self.ignored_files.add(file) + + def set_skipped_lines(self, file: str, skipped_lines: set[int]) -> None: + self.skipped_lines[file] = skipped_lines + + def current_target(self) -> str | None: + """Retrieves the current target from the associated scope. + + If there is no associated scope, use the target module.""" + if self.scope is not None: + return self.scope.current_target() + return self.target_module + + def current_module(self) -> str | None: + return self.target_module + + def import_context(self) -> list[tuple[str, int]]: + """Return a copy of the import context.""" + return self.import_ctx.copy() + + def set_import_context(self, ctx: list[tuple[str, int]]) -> None: + """Replace the entire import context with a new value.""" + self.import_ctx = ctx.copy() + + def report( + self, + line: int, + column: int | None, + message: str, + code: ErrorCode | None = None, + *, + blocker: bool = False, + severity: str = "error", + file: str | None = None, + only_once: bool = False, + origin_span: Iterable[int] | None = None, + offset: int = 0, + end_line: int | None = None, + end_column: int | None = None, + parent_error: ErrorInfo | None = None, + ) -> ErrorInfo: + """Report message at the given line using the current error context. + + Args: + line: line number of error + column: column number of error + message: message to report + code: error code (defaults to 'misc'; not shown for notes) + blocker: if True, don't continue analysis after this error + severity: 'error' or 'note' + file: if non-None, override current file as context + only_once: if True, only report this exact message once per build + origin_span: if non-None, override current context as origin + (type: ignores have effect here) + end_line: if non-None, override current context as end + parent_error: an error this note is attached to (for notes only). + """ + if self.scope: + type = self.scope.current_type_name() + if self.scope.ignored > 0: + type = None # Omit type context if nested function + function = self.scope.current_function_name() + else: + type = None + function = None + + if column is None: + column = -1 + if end_column is None: + if column == -1: + end_column = -1 + else: + end_column = column + 1 + + if file is None: + file = self.file + if offset: + message = " " * offset + message + + if origin_span is None: + origin_span = [line] + + if end_line is None: + end_line = line + + code = code or (parent_error.code if parent_error else None) + code = code or (codes.MISC if not blocker else None) + + info = ErrorInfo( + import_ctx=self.import_context(), + file=file, + module=self.current_module(), + typ=type, + function_or_member=function, + line=line, + column=column, + end_line=end_line, + end_column=end_column, + severity=severity, + message=message, + code=code, + blocker=blocker, + only_once=only_once, + origin=(self.file, origin_span), + target=self.current_target(), + parent_error=parent_error, + ) + self.add_error_info(info) + return info + + def _add_error_info(self, file: str, info: ErrorInfo) -> None: + assert file not in self.flushed_files + # process the stack of ErrorWatchers before modifying any internal state + # in case we need to filter out the error entirely + if self._filter_error(file, info): + return + if file not in self.error_info_map: + self.error_info_map[file] = [] + self.error_info_map[file].append(info) + if info.blocker: + self.has_blockers.add(file) + if info.code in (IMPORT, IMPORT_UNTYPED, IMPORT_NOT_FOUND): + self.seen_import_error = True + + def get_watchers(self) -> Iterator[ErrorWatcher]: + """Yield the `ErrorWatcher` stack from top to bottom.""" + i = len(self._watchers) + while i > 0: + i -= 1 + yield self._watchers[i] + + def _filter_error(self, file: str, info: ErrorInfo) -> bool: + """ + process ErrorWatcher stack from top to bottom, + stopping early if error needs to be filtered out + """ + return any(w.on_error(file, info) for w in self.get_watchers()) + + def add_error_info(self, info: ErrorInfo) -> None: + file, lines = info.origin + # process the stack of ErrorWatchers before modifying any internal state + # in case we need to filter out the error entirely + # NB: we need to do this both here and in _add_error_info, otherwise we + # might incorrectly update the sets of ignored or only_once messages + if self._filter_error(file, info): + return + if not info.blocker: # Blockers cannot be ignored + if file in self.ignored_lines: + # Check each line in this context for "type: ignore" comments. + # line == end_line for most nodes, so we only loop once. + for scope_line in lines: + if self.is_ignored_error(scope_line, info, self.ignored_lines[file]): + err_code = info.code or codes.MISC + if not self.is_error_code_enabled(err_code): + # Error code is disabled - don't mark the current + # "type: ignore" comment as used. + return + # Annotation requests us to ignore all errors on this line. + self.used_ignored_lines[file][scope_line].append(err_code.code) + return + if file in self.ignored_files: + return + if info.only_once: + if info.message in self.only_once_messages: + return + self.only_once_messages.add(info.message) + if ( + self.seen_import_error + and info.code not in (IMPORT, IMPORT_UNTYPED, IMPORT_NOT_FOUND) + and self.has_many_errors() + ): + # Missing stubs can easily cause thousands of errors about + # Any types, especially when upgrading to mypy 0.900, + # which no longer bundles third-party library stubs. Avoid + # showing too many errors to make it easier to see + # import-related errors. + info.hidden = True + self.report_hidden_errors(info) + self._add_error_info(file, info) + ignored_codes = self.ignored_lines.get(file, {}).get(info.line, []) + if ignored_codes and info.code: + # Something is ignored on the line, but not this error, so maybe the error + # code is incorrect. + msg = f'Error code "{info.code.code}" not covered by "type: ignore" comment' + if info.code in original_error_codes: + # If there seems to be a "type: ignore" with a stale error + # code, report a more specific note. + old_code = original_error_codes[info.code].code + if old_code in ignored_codes: + msg = ( + f'Error code changed to {info.code.code}; "type: ignore" comment ' + + "may be out of date" + ) + note = ErrorInfo( + import_ctx=info.import_ctx, + file=info.file, + module=info.module, + typ=info.type, + function_or_member=info.function_or_member, + line=info.line, + column=info.column, + end_line=info.end_line, + end_column=info.end_column, + severity="note", + message=msg, + code=None, + blocker=False, + only_once=False, + ) + self._add_error_info(file, note) + if ( + self.options.show_error_code_links + and not self.options.hide_error_codes + and info.code is not None + and info.code not in HIDE_LINK_CODES + and info.code.code in mypy_error_codes + ): + message = f"See {BASE_RTD_URL}-{info.code.code} for more info" + if message in self.only_once_messages: + return + self.only_once_messages.add(message) + info = ErrorInfo( + import_ctx=info.import_ctx, + file=info.file, + module=info.module, + typ=info.type, + function_or_member=info.function_or_member, + line=info.line, + column=info.column, + end_line=info.end_line, + end_column=info.end_column, + severity="note", + message=message, + code=info.code, + blocker=False, + only_once=True, + priority=20, + ) + self._add_error_info(file, info) + + def has_many_errors(self) -> bool: + if self.options.many_errors_threshold < 0: + return False + if len(self.error_info_map) >= self.options.many_errors_threshold: + return True + if ( + sum(len(errors) for errors in self.error_info_map.values()) + >= self.options.many_errors_threshold + ): + return True + return False + + def report_hidden_errors(self, info: ErrorInfo) -> None: + message = ( + "(Skipping most remaining errors due to unresolved imports or missing stubs; " + + "fix these first)" + ) + if message in self.only_once_messages: + return + self.only_once_messages.add(message) + new_info = ErrorInfo( + import_ctx=info.import_ctx, + file=info.file, + module=info.module, + typ=None, + function_or_member=None, + line=info.line, + column=info.column, + end_line=info.end_line, + end_column=info.end_column, + severity="note", + message=message, + code=None, + blocker=False, + only_once=True, + origin=info.origin, + target=info.target, + ) + self._add_error_info(info.origin[0], new_info) + + def is_ignored_error(self, line: int, info: ErrorInfo, ignores: dict[int, list[str]]) -> bool: + if info.blocker: + # Blocking errors can never be ignored + return False + if info.code and not self.is_error_code_enabled(info.code): + return True + if line not in ignores: + return False + if not ignores[line]: + # Empty list means that we ignore all errors + return True + if info.code and self.is_error_code_enabled(info.code): + return ( + info.code.code in ignores[line] + or info.code.sub_code_of is not None + and info.code.sub_code_of.code in ignores[line] + ) + return False + + def is_error_code_enabled(self, error_code: ErrorCode) -> bool: + if self.options: + current_mod_disabled = self.options.disabled_error_codes + current_mod_enabled = self.options.enabled_error_codes + else: + current_mod_disabled = set() + current_mod_enabled = set() + + if error_code in current_mod_disabled: + return False + elif error_code in current_mod_enabled: + return True + elif error_code.sub_code_of is not None and error_code.sub_code_of in current_mod_disabled: + return False + else: + return error_code.default_enabled + + def clear_errors_in_targets(self, path: str, targets: set[str]) -> None: + """Remove errors in specific fine-grained targets within a file.""" + if path in self.error_info_map: + new_errors = [] + has_blocker = False + for info in self.error_info_map[path]: + if info.target not in targets: + new_errors.append(info) + has_blocker |= info.blocker + elif info.only_once: + self.only_once_messages.remove(info.message) + self.error_info_map[path] = new_errors + if not has_blocker and path in self.has_blockers: + self.has_blockers.remove(path) + + def generate_unused_ignore_errors(self, file: str) -> None: + if ( + is_typeshed_file(self.options.abs_custom_typeshed_dir if self.options else None, file) + or file in self.ignored_files + ): + return + ignored_lines = self.ignored_lines[file] + used_ignored_lines = self.used_ignored_lines[file] + for line, ignored_codes in ignored_lines.items(): + if line in self.skipped_lines[file]: + continue + if codes.UNUSED_IGNORE.code in ignored_codes: + continue + used_ignored_codes = set(used_ignored_lines[line]) + unused_ignored_codes = [c for c in ignored_codes if c not in used_ignored_codes] + # `ignore` is used + if not ignored_codes and used_ignored_codes: + continue + # All codes appearing in `ignore[...]` are used + if ignored_codes and not unused_ignored_codes: + continue + # Display detail only when `ignore[...]` specifies more than one error code + unused_codes_message = "" + if len(ignored_codes) > 1 and unused_ignored_codes: + unused_codes_message = f"[{', '.join(unused_ignored_codes)}]" + message = f'Unused "type: ignore{unused_codes_message}" comment' + for unused in unused_ignored_codes: + narrower = set(used_ignored_codes) & codes.sub_code_map[unused] + if narrower: + message += f", use narrower [{', '.join(narrower)}] instead of [{unused}] code" + # Don't use report since add_error_info will ignore the error! + info = ErrorInfo( + import_ctx=self.import_context(), + file=file, + module=self.current_module(), + typ=None, + function_or_member=None, + line=line, + column=-1, + end_line=line, + end_column=-1, + severity="error", + message=message, + code=codes.UNUSED_IGNORE, + blocker=False, + only_once=False, + origin=(self.file, [line]), + target=self.target_module, + ) + self._add_error_info(file, info) + + def generate_ignore_without_code_errors( + self, file: str, is_warning_unused_ignores: bool + ) -> None: + if ( + is_typeshed_file(self.options.abs_custom_typeshed_dir if self.options else None, file) + or file in self.ignored_files + ): + return + + used_ignored_lines = self.used_ignored_lines[file] + + # If the whole file is ignored, ignore it. + if used_ignored_lines: + _, used_codes = min(used_ignored_lines.items()) + if codes.FILE.code in used_codes: + return + + for line, ignored_codes in self.ignored_lines[file].items(): + if ignored_codes: + continue + + # If the ignore is itself unused and that would be warned about, let + # that error stand alone + if is_warning_unused_ignores and not used_ignored_lines[line]: + continue + + codes_hint = "" + ignored_codes = sorted(set(used_ignored_lines[line])) + if ignored_codes: + codes_hint = f' (consider "type: ignore[{", ".join(ignored_codes)}]" instead)' + + message = f'"type: ignore" comment without error code{codes_hint}' + # Don't use report since add_error_info will ignore the error! + info = ErrorInfo( + import_ctx=self.import_context(), + file=file, + module=self.current_module(), + typ=None, + function_or_member=None, + line=line, + column=-1, + end_line=line, + end_column=-1, + severity="error", + message=message, + code=codes.IGNORE_WITHOUT_CODE, + blocker=False, + only_once=False, + origin=(self.file, [line]), + target=self.target_module, + ) + self._add_error_info(file, info) + + def num_messages(self) -> int: + """Return the number of generated messages.""" + return sum(len(x) for x in self.error_info_map.values()) + + def is_errors(self) -> bool: + """Are there any generated messages?""" + return bool(self.error_info_map) + + def is_blockers(self) -> bool: + """Are the any errors that are blockers?""" + return bool(self.has_blockers) + + def blocker_module(self) -> str | None: + """Return the module with a blocking error, or None if not possible.""" + for path in self.has_blockers: + for err in self.error_info_map[path]: + if err.blocker: + return err.module + return None + + def is_errors_for_file(self, file: str) -> bool: + """Are there any errors for the given file?""" + return file in self.error_info_map and file not in self.ignored_files + + def prefer_simple_messages(self) -> bool: + """Should we generate simple/fast error messages? + + Return True if errors are not shown to user, i.e. errors are ignored + or they are collected for internal use only. + + If True, we should prefer to generate a simple message quickly. + All normal errors should still be reported. + """ + if self.file in self.ignored_files: + # Errors ignored, so no point generating fancy messages + return True + if self._watchers: + _watcher = self._watchers[-1] + if _watcher._filter is True and _watcher._filtered is None: + # Errors are filtered + return True + return False + + def raise_error(self, use_stdout: bool = True) -> NoReturn: + """Raise a CompileError with the generated messages. + + Render the messages suitable for displaying. + """ + # self.new_messages() will format all messages that haven't already + # been returned from a file_messages() call. + raise CompileError( + self.new_messages(), use_stdout=use_stdout, module_with_blocker=self.blocker_module() + ) + + def format_messages_default( + self, error_tuples: list[ErrorTuple], source_lines: list[str] | None + ) -> list[str]: + """Return a string list that represents the error messages. + + Use a form suitable for displaying to the user. If self.pretty + is True also append a relevant trimmed source code line (only for + severity 'error'). + """ + a: list[str] = [] + for file, line, column, end_line, end_column, severity, message, code in error_tuples: + s = "" + if file is not None: + if self.options.show_column_numbers and line >= 0 and column >= 0: + srcloc = f"{file}:{line}:{1 + column}" + if self.options.show_error_end and end_line >= 0 and end_column >= 0: + srcloc += f":{end_line}:{end_column}" + elif line >= 0: + srcloc = f"{file}:{line}" + else: + srcloc = file + s = f"{srcloc}: {severity}: {message}" + else: + s = message + if ( + not self.hide_error_codes + and code + and (severity != "note" or code in SHOW_NOTE_CODES) + ): + # If note has an error code, it is related to a previous error. Avoid + # displaying duplicate error codes. + s = f"{s} [{code.code}]" + a.append(s) + if self.options.pretty: + # Add source code fragment and a location marker. + if severity == "error" and source_lines and line > 0: + source_line = source_lines[line - 1] + source_line_expanded = source_line.expandtabs() + if column < 0: + # Something went wrong, take first non-empty column. + column = len(source_line) - len(source_line.lstrip()) + + # Shifts column after tab expansion + column = len(source_line[:column].expandtabs()) + end_column = len(source_line[:end_column].expandtabs()) + + # Note, currently coloring uses the offset to detect source snippets, + # so these offsets should not be arbitrary. + a.append(" " * DEFAULT_SOURCE_OFFSET + source_line_expanded) + marker = "^" + if end_line == line and end_column > column: + marker = f'^{"~" * (end_column - column - 1)}' + elif end_line != line: + # just highlight the first line instead + marker = f'^{"~" * (len(source_line_expanded) - column - 1)}' + a.append(" " * (DEFAULT_SOURCE_OFFSET + column) + marker) + return a + + def file_messages(self, path: str) -> list[ErrorTuple]: + """Return an error tuple list of new error messages from a given file.""" + if path not in self.error_info_map: + return [] + + error_info = self.error_info_map[path] + error_info = [info for info in error_info if not info.hidden] + error_info = self.remove_duplicates(self.sort_messages(error_info)) + return self.render_messages(error_info) + + def format_messages( + self, path: str, error_tuples: list[ErrorTuple], formatter: ErrorFormatter | None = None + ) -> list[str]: + """Return a string list of new error messages from a given file. + + Use a form suitable for displaying to the user. + """ + self.flushed_files.add(path) + if formatter is not None: + errors = create_errors(error_tuples) + return [formatter.report_error(err) for err in errors] + + source_lines = None + if self.options.pretty and self.read_source: + # Find shadow file mapping and read source lines if a shadow file exists for the given path. + # If shadow file mapping is not found, read source lines + mapped_path = self.find_shadow_file_mapping(path) + if mapped_path: + source_lines = self.read_source(mapped_path) + else: + source_lines = self.read_source(path) + return self.format_messages_default(error_tuples, source_lines) + + def find_shadow_file_mapping(self, path: str) -> str | None: + """Return the shadow file path for a given source file path or None.""" + if self.options.shadow_file is None: + return None + + for i in self.options.shadow_file: + if i[0] == path: + return i[1] + return None + + def new_messages(self) -> list[str]: + """Return a string list of new error messages. + + Use a form suitable for displaying to the user. + Errors from different files are ordered based on the order in which + they first generated an error. + """ + msgs = [] + for path in self.error_info_map.keys(): + if path not in self.flushed_files: + error_tuples = self.file_messages(path) + msgs.extend(self.format_messages(path, error_tuples)) + return msgs + + def targets(self) -> set[str]: + """Return a set of all targets that contain errors.""" + # TODO: Make sure that either target is always defined or that not being defined + # is okay for fine-grained incremental checking. + return { + info.target for errs in self.error_info_map.values() for info in errs if info.target + } + + def render_messages(self, errors: list[ErrorInfo]) -> list[ErrorTuple]: + """Translate the messages into a sequence of tuples. + + Each tuple is of form (path, line, col, severity, message, code). + The rendered sequence includes information about error contexts. + The path item may be None. If the line item is negative, the + line number is not defined for the tuple. + """ + result: list[ErrorTuple] = [] + prev_import_context: list[tuple[str, int]] = [] + prev_function_or_member: str | None = None + prev_type: str | None = None + + for e in errors: + # Report module import context, if different from previous message. + if not self.options.show_error_context: + pass + elif e.import_ctx != prev_import_context: + last = len(e.import_ctx) - 1 + i = last + while i >= 0: + path, line = e.import_ctx[i] + fmt = "{}:{}: note: In module imported here" + if i < last: + fmt = "{}:{}: note: ... from here" + if i > 0: + fmt += "," + else: + fmt += ":" + # Remove prefix to ignore from path (if present) to + # simplify path. + path = remove_path_prefix(path, self.ignore_prefix) + result.append((None, -1, -1, -1, -1, "note", fmt.format(path, line), None)) + i -= 1 + + file = self.simplify_path(e.file) + + # Report context within a source file. + if not self.options.show_error_context: + pass + elif e.function_or_member != prev_function_or_member or e.type != prev_type: + if e.function_or_member is None: + if e.type is None: + result.append((file, -1, -1, -1, -1, "note", "At top level:", None)) + else: + result.append( + (file, -1, -1, -1, -1, "note", f'In class "{e.type}":', None) + ) + else: + if e.type is None: + result.append( + ( + file, + -1, + -1, + -1, + -1, + "note", + f'In function "{e.function_or_member}":', + None, + ) + ) + else: + result.append( + ( + file, + -1, + -1, + -1, + -1, + "note", + 'In member "{}" of class "{}":'.format( + e.function_or_member, e.type + ), + None, + ) + ) + elif e.type != prev_type: + if e.type is None: + result.append((file, -1, -1, -1, -1, "note", "At top level:", None)) + else: + result.append((file, -1, -1, -1, -1, "note", f'In class "{e.type}":', None)) + + result.append( + (file, e.line, e.column, e.end_line, e.end_column, e.severity, e.message, e.code) + ) + + prev_import_context = e.import_ctx + prev_function_or_member = e.function_or_member + prev_type = e.type + + return result + + def sort_messages(self, errors: list[ErrorInfo]) -> list[ErrorInfo]: + """Sort an array of error messages locally by line number. + + I.e., sort a run of consecutive messages with the same + context by line number, but otherwise retain the general + ordering of the messages. + """ + result: list[ErrorInfo] = [] + i = 0 + while i < len(errors): + i0 = i + # Find neighbouring errors with the same context and file. + while ( + i + 1 < len(errors) + and errors[i + 1].import_ctx == errors[i].import_ctx + and errors[i + 1].file == errors[i].file + ): + i += 1 + i += 1 + + # Sort the errors specific to a file according to line number and column. + a = sorted(errors[i0:i], key=lambda x: (x.line, x.column)) + a = self.sort_within_context(a) + result.extend(a) + return result + + def sort_within_context(self, errors: list[ErrorInfo]) -> list[ErrorInfo]: + """For the same location decide which messages to show first/last. + + Currently, we only compare within the same error code, to decide the + order of various additional notes. + """ + result = [] + i = 0 + while i < len(errors): + i0 = i + # Find neighbouring errors with the same position and error code. + while ( + i + 1 < len(errors) + and errors[i + 1].line == errors[i].line + and errors[i + 1].column == errors[i].column + and errors[i + 1].end_line == errors[i].end_line + and errors[i + 1].end_column == errors[i].end_column + and errors[i + 1].code == errors[i].code + ): + i += 1 + i += 1 + + # Sort the messages specific to a given error by priority. + a = sorted(errors[i0:i], key=lambda x: x.priority) + result.extend(a) + return result + + def remove_duplicates(self, errors: list[ErrorInfo]) -> list[ErrorInfo]: + filtered_errors = [] + seen_by_line: defaultdict[int, set[tuple[str, str]]] = defaultdict(set) + removed = set() + for err in errors: + if err.parent_error is not None: + # Notes with specified parent are removed together with error below. + filtered_errors.append(err) + elif (err.severity, err.message) not in seen_by_line[err.line]: + filtered_errors.append(err) + seen_by_line[err.line].add((err.severity, err.message)) + else: + removed.add(err) + return [ + err + for err in filtered_errors + if err.parent_error is None or err.parent_error not in removed + ] + + +class CompileError(Exception): + """Exception raised when there is a compile error. + + It can be a parse, semantic analysis, type check or other + compilation-related error. + + CompileErrors raised from an errors object carry all of the + messages that have not been reported out by error streaming. + This is patched up by build.build to contain either all error + messages (if errors were streamed) or none (if they were not). + + """ + + messages: list[str] + use_stdout = False + # Can be set in case there was a module with a blocking error + module_with_blocker: str | None = None + + def __init__( + self, messages: list[str], use_stdout: bool = False, module_with_blocker: str | None = None + ) -> None: + super().__init__("\n".join(messages)) + self.messages = messages + self.use_stdout = use_stdout + self.module_with_blocker = module_with_blocker + + +def remove_path_prefix(path: str, prefix: str | None) -> str: + """If path starts with prefix, return copy of path with the prefix removed. + Otherwise, return path. If path is None, return None. + """ + if prefix is not None and path.startswith(prefix): + return path[len(prefix) :] + else: + return path + + +def report_internal_error( + err: Exception, + file: str | None, + line: int, + errors: Errors, + options: Options, + stdout: TextIO | None = None, + stderr: TextIO | None = None, +) -> NoReturn: + """Report internal error and exit. + + This optionally starts pdb or shows a traceback. + """ + stdout = stdout or sys.stdout + stderr = stderr or sys.stderr + # Dump out errors so far, they often provide a clue. + # But catch unexpected errors rendering them. + try: + for msg in errors.new_messages(): + print(msg) + except Exception as e: + print("Failed to dump errors:", repr(e), file=stderr) + + # Compute file:line prefix for official-looking error messages. + if file: + if line: + prefix = f"{file}:{line}: " + else: + prefix = f"{file}: " + else: + prefix = "" + + # Print "INTERNAL ERROR" message. + print( + f"{prefix}error: INTERNAL ERROR --", + "Please try using mypy master on GitHub:\n" + "https://mypy.readthedocs.io/en/stable/common_issues.html" + "#using-a-development-mypy-build", + file=stderr, + ) + if options.show_traceback: + print("Please report a bug at https://github.com/python/mypy/issues", file=stderr) + else: + print( + "If this issue continues with mypy master, " + "please report a bug at https://github.com/python/mypy/issues", + file=stderr, + ) + print(f"version: {mypy_version}", file=stderr) + + # If requested, drop into pdb. This overrides show_tb. + if options.pdb: + print("Dropping into pdb", file=stderr) + import pdb + + pdb.post_mortem(sys.exc_info()[2]) + + # If requested, print traceback, else print note explaining how to get one. + if options.raise_exceptions: + raise err + if not options.show_traceback: + if not options.pdb: + print( + "{}: note: please use --show-traceback to print a traceback " + "when reporting a bug".format(prefix), + file=stderr, + ) + else: + tb = traceback.extract_stack()[:-2] + tb2 = traceback.extract_tb(sys.exc_info()[2]) + print("Traceback (most recent call last):") + for s in traceback.format_list(tb + tb2): + print(s.rstrip("\n")) + print(f"{type(err).__name__}: {err}", file=stdout) + print(f"{prefix}: note: use --pdb to drop into pdb", file=stderr) + + # Exit. The caller has nothing more to say. + # We use exit code 2 to signal that this is no ordinary error. + raise SystemExit(2) + + +class MypyError: + def __init__( + self, + file_path: str, + line: int, + column: int, + message: str, + errorcode: ErrorCode | None, + severity: Literal["error", "note"], + ) -> None: + self.file_path = file_path + self.line = line + self.column = column + self.message = message + self.errorcode = errorcode + self.severity = severity + self.hints: list[str] = [] + + +# (file_path, line, column) +_ErrorLocation = tuple[str, int, int] + + +def create_errors(error_tuples: list[ErrorTuple]) -> list[MypyError]: + errors: list[MypyError] = [] + latest_error_at_location: dict[_ErrorLocation, MypyError] = {} + + for error_tuple in error_tuples: + file_path, line, column, _, _, severity, message, errorcode = error_tuple + if file_path is None: + continue + + assert severity in ("error", "note") + if severity == "note": + error_location = (file_path, line, column) + error = latest_error_at_location.get(error_location) + if error is None: + # This is purely a note, with no error correlated to it + error = MypyError(file_path, line, column, message, errorcode, severity="note") + errors.append(error) + continue + + error.hints.append(message) + + else: + error = MypyError(file_path, line, column, message, errorcode, severity="error") + errors.append(error) + error_location = (file_path, line, column) + latest_error_at_location[error_location] = error + + return errors diff --git a/.venv/lib/python3.12/site-packages/mypy/evalexpr.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/evalexpr.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..ef904a4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/evalexpr.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/evalexpr.py b/.venv/lib/python3.12/site-packages/mypy/evalexpr.py new file mode 100644 index 0000000..218d50e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/evalexpr.py @@ -0,0 +1,208 @@ +""" + +Evaluate an expression. + +Used by stubtest; in a separate file because things break if we don't +put it in a mypyc-compiled file. + +""" + +import ast +from typing import Final + +import mypy.nodes +from mypy.visitor import ExpressionVisitor + +UNKNOWN = object() + + +class _NodeEvaluator(ExpressionVisitor[object]): + def visit_int_expr(self, o: mypy.nodes.IntExpr) -> int: + return o.value + + def visit_str_expr(self, o: mypy.nodes.StrExpr) -> str: + return o.value + + def visit_bytes_expr(self, o: mypy.nodes.BytesExpr) -> object: + # The value of a BytesExpr is a string created from the repr() + # of the bytes object. Get the original bytes back. + try: + return ast.literal_eval(f"b'{o.value}'") + except SyntaxError: + return ast.literal_eval(f'b"{o.value}"') + + def visit_float_expr(self, o: mypy.nodes.FloatExpr) -> float: + return o.value + + def visit_complex_expr(self, o: mypy.nodes.ComplexExpr) -> object: + return o.value + + def visit_ellipsis(self, o: mypy.nodes.EllipsisExpr) -> object: + return Ellipsis + + def visit_star_expr(self, o: mypy.nodes.StarExpr) -> object: + return UNKNOWN + + def visit_name_expr(self, o: mypy.nodes.NameExpr) -> object: + if o.name == "True": + return True + elif o.name == "False": + return False + elif o.name == "None": + return None + # TODO: Handle more names by figuring out a way to hook into the + # symbol table. + return UNKNOWN + + def visit_member_expr(self, o: mypy.nodes.MemberExpr) -> object: + return UNKNOWN + + def visit_yield_from_expr(self, o: mypy.nodes.YieldFromExpr) -> object: + return UNKNOWN + + def visit_yield_expr(self, o: mypy.nodes.YieldExpr) -> object: + return UNKNOWN + + def visit_call_expr(self, o: mypy.nodes.CallExpr) -> object: + return UNKNOWN + + def visit_op_expr(self, o: mypy.nodes.OpExpr) -> object: + return UNKNOWN + + def visit_comparison_expr(self, o: mypy.nodes.ComparisonExpr) -> object: + return UNKNOWN + + def visit_cast_expr(self, o: mypy.nodes.CastExpr) -> object: + return o.expr.accept(self) + + def visit_type_form_expr(self, o: mypy.nodes.TypeFormExpr) -> object: + return UNKNOWN + + def visit_assert_type_expr(self, o: mypy.nodes.AssertTypeExpr) -> object: + return o.expr.accept(self) + + def visit_reveal_expr(self, o: mypy.nodes.RevealExpr) -> object: + return UNKNOWN + + def visit_super_expr(self, o: mypy.nodes.SuperExpr) -> object: + return UNKNOWN + + def visit_unary_expr(self, o: mypy.nodes.UnaryExpr) -> object: + operand = o.expr.accept(self) + if operand is UNKNOWN: + return UNKNOWN + if o.op == "-": + if isinstance(operand, (int, float, complex)): + return -operand + elif o.op == "+": + if isinstance(operand, (int, float, complex)): + return +operand + elif o.op == "~": + if isinstance(operand, int): + return ~operand + elif o.op == "not": + if isinstance(operand, (bool, int, float, str, bytes)): + return not operand + return UNKNOWN + + def visit_assignment_expr(self, o: mypy.nodes.AssignmentExpr) -> object: + return o.value.accept(self) + + def visit_list_expr(self, o: mypy.nodes.ListExpr) -> object: + items = [item.accept(self) for item in o.items] + if all(item is not UNKNOWN for item in items): + return items + return UNKNOWN + + def visit_dict_expr(self, o: mypy.nodes.DictExpr) -> object: + items = [ + (UNKNOWN if key is None else key.accept(self), value.accept(self)) + for key, value in o.items + ] + if all(key is not UNKNOWN and value is not None for key, value in items): + return dict(items) + return UNKNOWN + + def visit_tuple_expr(self, o: mypy.nodes.TupleExpr) -> object: + items = [item.accept(self) for item in o.items] + if all(item is not UNKNOWN for item in items): + return tuple(items) + return UNKNOWN + + def visit_set_expr(self, o: mypy.nodes.SetExpr) -> object: + items = [item.accept(self) for item in o.items] + if all(item is not UNKNOWN for item in items): + return set(items) + return UNKNOWN + + def visit_index_expr(self, o: mypy.nodes.IndexExpr) -> object: + return UNKNOWN + + def visit_type_application(self, o: mypy.nodes.TypeApplication) -> object: + return UNKNOWN + + def visit_lambda_expr(self, o: mypy.nodes.LambdaExpr) -> object: + return UNKNOWN + + def visit_list_comprehension(self, o: mypy.nodes.ListComprehension) -> object: + return UNKNOWN + + def visit_set_comprehension(self, o: mypy.nodes.SetComprehension) -> object: + return UNKNOWN + + def visit_dictionary_comprehension(self, o: mypy.nodes.DictionaryComprehension) -> object: + return UNKNOWN + + def visit_generator_expr(self, o: mypy.nodes.GeneratorExpr) -> object: + return UNKNOWN + + def visit_slice_expr(self, o: mypy.nodes.SliceExpr) -> object: + return UNKNOWN + + def visit_conditional_expr(self, o: mypy.nodes.ConditionalExpr) -> object: + return UNKNOWN + + def visit_type_var_expr(self, o: mypy.nodes.TypeVarExpr) -> object: + return UNKNOWN + + def visit_paramspec_expr(self, o: mypy.nodes.ParamSpecExpr) -> object: + return UNKNOWN + + def visit_type_var_tuple_expr(self, o: mypy.nodes.TypeVarTupleExpr) -> object: + return UNKNOWN + + def visit_type_alias_expr(self, o: mypy.nodes.TypeAliasExpr) -> object: + return UNKNOWN + + def visit_namedtuple_expr(self, o: mypy.nodes.NamedTupleExpr) -> object: + return UNKNOWN + + def visit_enum_call_expr(self, o: mypy.nodes.EnumCallExpr) -> object: + return UNKNOWN + + def visit_typeddict_expr(self, o: mypy.nodes.TypedDictExpr) -> object: + return UNKNOWN + + def visit_newtype_expr(self, o: mypy.nodes.NewTypeExpr) -> object: + return UNKNOWN + + def visit__promote_expr(self, o: mypy.nodes.PromoteExpr) -> object: + return UNKNOWN + + def visit_await_expr(self, o: mypy.nodes.AwaitExpr) -> object: + return UNKNOWN + + def visit_temp_node(self, o: mypy.nodes.TempNode) -> object: + return UNKNOWN + + +_evaluator: Final = _NodeEvaluator() + + +def evaluate_expression(expr: mypy.nodes.Expression) -> object: + """Evaluate an expression at runtime. + + Return the result of the expression, or UNKNOWN if the expression cannot be + evaluated. + """ + return expr.accept(_evaluator) diff --git a/.venv/lib/python3.12/site-packages/mypy/expandtype.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/expandtype.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..8b7fd70 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/expandtype.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/expandtype.py b/.venv/lib/python3.12/site-packages/mypy/expandtype.py new file mode 100644 index 0000000..891ea4d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/expandtype.py @@ -0,0 +1,584 @@ +from __future__ import annotations + +from collections.abc import Iterable, Mapping +from typing import Final, TypeVar, cast, overload + +from mypy.nodes import ARG_STAR, FakeInfo, Var +from mypy.state import state +from mypy.types import ( + ANY_STRATEGY, + AnyType, + BoolTypeQuery, + CallableType, + DeletedType, + ErasedType, + FunctionLike, + Instance, + LiteralType, + NoneType, + Overloaded, + Parameters, + ParamSpecFlavor, + ParamSpecType, + PartialType, + ProperType, + TrivialSyntheticTypeTranslator, + TupleType, + Type, + TypeAliasType, + TypedDictType, + TypeOfAny, + TypeType, + TypeVarId, + TypeVarLikeType, + TypeVarTupleType, + TypeVarType, + UnboundType, + UninhabitedType, + UnionType, + UnpackType, + flatten_nested_unions, + get_proper_type, + split_with_prefix_and_suffix, +) +from mypy.typevartuples import split_with_instance + +# Solving the import cycle: +import mypy.type_visitor # ruff: isort: skip + +# WARNING: these functions should never (directly or indirectly) depend on +# is_subtype(), meet_types(), join_types() etc. +# TODO: add a static dependency test for this. + + +@overload +def expand_type(typ: CallableType, env: Mapping[TypeVarId, Type]) -> CallableType: ... + + +@overload +def expand_type(typ: ProperType, env: Mapping[TypeVarId, Type]) -> ProperType: ... + + +@overload +def expand_type(typ: Type, env: Mapping[TypeVarId, Type]) -> Type: ... + + +def expand_type(typ: Type, env: Mapping[TypeVarId, Type]) -> Type: + """Substitute any type variable references in a type given by a type + environment. + """ + return typ.accept(ExpandTypeVisitor(env)) + + +@overload +def expand_type_by_instance(typ: CallableType, instance: Instance) -> CallableType: ... + + +@overload +def expand_type_by_instance(typ: ProperType, instance: Instance) -> ProperType: ... + + +@overload +def expand_type_by_instance(typ: Type, instance: Instance) -> Type: ... + + +def expand_type_by_instance(typ: Type, instance: Instance) -> Type: + """Substitute type variables in type using values from an Instance. + Type variables are considered to be bound by the class declaration.""" + if not instance.args and not instance.type.has_type_var_tuple_type: + return typ + else: + variables: dict[TypeVarId, Type] = {} + if instance.type.has_type_var_tuple_type: + assert instance.type.type_var_tuple_prefix is not None + assert instance.type.type_var_tuple_suffix is not None + + args_prefix, args_middle, args_suffix = split_with_instance(instance) + tvars_prefix, tvars_middle, tvars_suffix = split_with_prefix_and_suffix( + tuple(instance.type.defn.type_vars), + instance.type.type_var_tuple_prefix, + instance.type.type_var_tuple_suffix, + ) + tvar = tvars_middle[0] + assert isinstance(tvar, TypeVarTupleType) + variables = {tvar.id: TupleType(list(args_middle), tvar.tuple_fallback)} + instance_args = args_prefix + args_suffix + tvars = tvars_prefix + tvars_suffix + else: + tvars = tuple(instance.type.defn.type_vars) + instance_args = instance.args + + for binder, arg in zip(tvars, instance_args): + assert isinstance(binder, TypeVarLikeType) + variables[binder.id] = arg + + return expand_type(typ, variables) + + +F = TypeVar("F", bound=FunctionLike) + + +def freshen_function_type_vars(callee: F) -> F: + """Substitute fresh type variables for generic function type variables.""" + if isinstance(callee, CallableType): + if not callee.is_generic(): + return callee + tvs = [] + tvmap: dict[TypeVarId, Type] = {} + for v in callee.variables: + tv = v.new_unification_variable(v) + tvs.append(tv) + tvmap[v.id] = tv + fresh = expand_type(callee, tvmap).copy_modified(variables=tvs) + return cast(F, fresh) + else: + assert isinstance(callee, Overloaded) + fresh_overload = Overloaded([freshen_function_type_vars(item) for item in callee.items]) + return cast(F, fresh_overload) + + +class HasGenericCallable(BoolTypeQuery): + def __init__(self) -> None: + super().__init__(ANY_STRATEGY) + + def visit_callable_type(self, t: CallableType) -> bool: + return t.is_generic() or super().visit_callable_type(t) + + +# Share a singleton since this is performance sensitive +has_generic_callable: Final = HasGenericCallable() + + +T = TypeVar("T", bound=Type) + + +def freshen_all_functions_type_vars(t: T) -> T: + result: Type + has_generic_callable.reset() + if not t.accept(has_generic_callable): + return t # Fast path to avoid expensive freshening + else: + result = t.accept(FreshenCallableVisitor()) + assert isinstance(result, type(t)) + return result + + +class FreshenCallableVisitor(mypy.type_visitor.TypeTranslator): + def visit_callable_type(self, t: CallableType) -> Type: + result = super().visit_callable_type(t) + assert isinstance(result, ProperType) and isinstance(result, CallableType) + return freshen_function_type_vars(result) + + def visit_type_alias_type(self, t: TypeAliasType) -> Type: + # Same as for ExpandTypeVisitor + return t.copy_modified(args=[arg.accept(self) for arg in t.args]) + + +class ExpandTypeVisitor(TrivialSyntheticTypeTranslator): + """Visitor that substitutes type variables with values.""" + + variables: Mapping[TypeVarId, Type] # TypeVar id -> TypeVar value + + def __init__(self, variables: Mapping[TypeVarId, Type]) -> None: + super().__init__() + self.variables = variables + self.recursive_tvar_guard: dict[TypeVarId, Type | None] | None = None + + def visit_unbound_type(self, t: UnboundType) -> Type: + return t + + def visit_any(self, t: AnyType) -> Type: + return t + + def visit_none_type(self, t: NoneType) -> Type: + return t + + def visit_uninhabited_type(self, t: UninhabitedType) -> Type: + return t + + def visit_deleted_type(self, t: DeletedType) -> Type: + return t + + def visit_erased_type(self, t: ErasedType) -> Type: + # This may happen during type inference if some function argument + # type is a generic callable, and its erased form will appear in inferred + # constraints, then solver may check subtyping between them, which will trigger + # unify_generic_callables(), this is why we can get here. Another example is + # when inferring type of lambda in generic context, the lambda body contains + # a generic method in generic class. + return t + + def visit_instance(self, t: Instance) -> Type: + if len(t.args) == 0: + return t + + args = self.expand_type_tuple_with_unpack(t.args) + + if isinstance(t.type, FakeInfo): + # The type checker expands function definitions and bodies + # if they depend on constrained type variables but the body + # might contain a tuple type comment (e.g., # type: (int, float)), + # in which case 't.type' is not yet available. + # + # See: https://github.com/python/mypy/issues/16649 + return t.copy_modified(args=args) + + if t.type.fullname == "builtins.tuple": + # Normalize Tuple[*Tuple[X, ...], ...] -> Tuple[X, ...] + arg = args[0] + if isinstance(arg, UnpackType): + unpacked = get_proper_type(arg.type) + if isinstance(unpacked, Instance): + # TODO: this and similar asserts below may be unsafe because get_proper_type() + # may be called during semantic analysis before all invalid types are removed. + assert unpacked.type.fullname == "builtins.tuple" + args = list(unpacked.args) + return t.copy_modified(args=args) + + def visit_type_var(self, t: TypeVarType) -> Type: + # Normally upper bounds can't contain other type variables, the only exception is + # special type variable Self`0 <: C[T, S], where C is the class where Self is used. + if t.id.is_self(): + t = t.copy_modified(upper_bound=t.upper_bound.accept(self)) + repl = self.variables.get(t.id, t) + if isinstance(repl, ProperType) and isinstance(repl, Instance): + # TODO: do we really need to do this? + # If I try to remove this special-casing ~40 tests fail on reveal_type(). + return repl.copy_modified(last_known_value=None) + if isinstance(repl, TypeVarType) and repl.has_default(): + if self.recursive_tvar_guard is None: + self.recursive_tvar_guard = {} + if (tvar_id := repl.id) in self.recursive_tvar_guard: + return self.recursive_tvar_guard[tvar_id] or repl + self.recursive_tvar_guard[tvar_id] = None + repl.default = repl.default.accept(self) + expanded = repl.accept(self) # Note: `expanded is repl` may be true. + repl = repl if isinstance(expanded, TypeVarType) else expanded + self.recursive_tvar_guard[tvar_id] = repl + return repl + + def visit_param_spec(self, t: ParamSpecType) -> Type: + # Set prefix to something empty, so we don't duplicate it below. + repl = self.variables.get(t.id, t.copy_modified(prefix=Parameters([], [], []))) + if isinstance(repl, ParamSpecType): + return repl.copy_modified( + flavor=t.flavor, + prefix=t.prefix.copy_modified( + arg_types=self.expand_types(t.prefix.arg_types) + repl.prefix.arg_types, + arg_kinds=t.prefix.arg_kinds + repl.prefix.arg_kinds, + arg_names=t.prefix.arg_names + repl.prefix.arg_names, + ), + ) + elif isinstance(repl, Parameters): + assert t.flavor == ParamSpecFlavor.BARE + return Parameters( + self.expand_types(t.prefix.arg_types) + repl.arg_types, + t.prefix.arg_kinds + repl.arg_kinds, + t.prefix.arg_names + repl.arg_names, + variables=[*t.prefix.variables, *repl.variables], + imprecise_arg_kinds=repl.imprecise_arg_kinds, + ) + else: + # We could encode Any as trivial parameters etc., but it would be too verbose. + # TODO: assert this is a trivial type, like Any, Never, or object. + return repl + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type: + # Sometimes solver may need to expand a type variable with (a copy of) itself + # (usually together with other TypeVars, but it is hard to filter out TypeVarTuples). + repl = self.variables.get(t.id, t) + if isinstance(repl, TypeVarTupleType): + return repl + elif isinstance(repl, ProperType) and isinstance(repl, (AnyType, UninhabitedType)): + # Some failed inference scenarios will try to set all type variables to Never. + # Instead of being picky and require all the callers to wrap them, + # do this here instead. + # Note: most cases when this happens are handled in expand unpack below, but + # in rare cases (e.g. ParamSpec containing Unpack star args) it may be skipped. + return t.tuple_fallback.copy_modified(args=[repl]) + raise NotImplementedError + + def visit_unpack_type(self, t: UnpackType) -> Type: + # It is impossible to reasonably implement visit_unpack_type, because + # unpacking inherently expands to something more like a list of types. + # + # Relevant sections that can call unpack should call expand_unpack() + # instead. + # However, if the item is a variadic tuple, we can simply carry it over. + # In particular, if we expand A[*tuple[T, ...]] with substitutions {T: str}, + # it is hard to assert this without getting proper type. Another important + # example is non-normalized types when called from semanal.py. + return UnpackType(t.type.accept(self)) + + def expand_unpack(self, t: UnpackType) -> list[Type]: + assert isinstance(t.type, TypeVarTupleType) + repl = get_proper_type(self.variables.get(t.type.id, t.type)) + if isinstance(repl, UnpackType): + repl = get_proper_type(repl.type) + if isinstance(repl, TupleType): + return repl.items + elif ( + isinstance(repl, Instance) + and repl.type.fullname == "builtins.tuple" + or isinstance(repl, TypeVarTupleType) + ): + return [UnpackType(typ=repl)] + elif isinstance(repl, (AnyType, UninhabitedType)): + # Replace *Ts = Any with *Ts = *tuple[Any, ...] and same for Never. + # These types may appear here as a result of user error or failed inference. + return [UnpackType(t.type.tuple_fallback.copy_modified(args=[repl]))] + else: + raise RuntimeError(f"Invalid type replacement to expand: {repl}") + + def visit_parameters(self, t: Parameters) -> Type: + return t.copy_modified(arg_types=self.expand_types(t.arg_types)) + + def interpolate_args_for_unpack(self, t: CallableType, var_arg: UnpackType) -> list[Type]: + star_index = t.arg_kinds.index(ARG_STAR) + prefix = self.expand_types(t.arg_types[:star_index]) + suffix = self.expand_types(t.arg_types[star_index + 1 :]) + + var_arg_type = get_proper_type(var_arg.type) + new_unpack: Type + if isinstance(var_arg_type, TupleType): + # We have something like Unpack[Tuple[Unpack[Ts], X1, X2]] + expanded_tuple = var_arg_type.accept(self) + assert isinstance(expanded_tuple, ProperType) and isinstance(expanded_tuple, TupleType) + expanded_items = expanded_tuple.items + fallback = var_arg_type.partial_fallback + new_unpack = UnpackType(TupleType(expanded_items, fallback)) + elif isinstance(var_arg_type, TypeVarTupleType): + # We have plain Unpack[Ts] + fallback = var_arg_type.tuple_fallback + expanded_items = self.expand_unpack(var_arg) + new_unpack = UnpackType(TupleType(expanded_items, fallback)) + # Since get_proper_type() may be called in semanal.py before callable + # normalization happens, we need to also handle non-normal cases here. + elif isinstance(var_arg_type, Instance): + # we have something like Unpack[Tuple[Any, ...]] + new_unpack = UnpackType(var_arg.type.accept(self)) + else: + # We have invalid type in Unpack. This can happen when expanding aliases + # to Callable[[*Invalid], Ret] + new_unpack = AnyType(TypeOfAny.from_error, line=var_arg.line, column=var_arg.column) + return prefix + [new_unpack] + suffix + + def visit_callable_type(self, t: CallableType) -> CallableType: + param_spec = t.param_spec() + if param_spec is not None: + repl = self.variables.get(param_spec.id) + # If a ParamSpec in a callable type is substituted with a + # callable type, we can't use normal substitution logic, + # since ParamSpec is actually split into two components + # *P.args and **P.kwargs in the original type. Instead, we + # must expand both of them with all the argument types, + # kinds and names in the replacement. The return type in + # the replacement is ignored. + if isinstance(repl, Parameters): + # We need to expand both the types in the prefix and the ParamSpec itself + expanded = t.copy_modified( + arg_types=self.expand_types(t.arg_types[:-2]) + repl.arg_types, + arg_kinds=t.arg_kinds[:-2] + repl.arg_kinds, + arg_names=t.arg_names[:-2] + repl.arg_names, + ret_type=t.ret_type.accept(self), + type_guard=(t.type_guard.accept(self) if t.type_guard is not None else None), + type_is=(t.type_is.accept(self) if t.type_is is not None else None), + imprecise_arg_kinds=(t.imprecise_arg_kinds or repl.imprecise_arg_kinds), + variables=[*repl.variables, *t.variables], + ) + var_arg = expanded.var_arg() + if var_arg is not None and isinstance(var_arg.typ, UnpackType): + # Sometimes we get new unpacks after expanding ParamSpec. + expanded.normalize_trivial_unpack() + return expanded + elif isinstance(repl, ParamSpecType): + # We're substituting one ParamSpec for another; this can mean that the prefix + # changes, e.g. substitute Concatenate[int, P] in place of Q. + prefix = repl.prefix + clean_repl = repl.copy_modified(prefix=Parameters([], [], [])) + return t.copy_modified( + arg_types=self.expand_types(t.arg_types[:-2]) + + prefix.arg_types + + [ + clean_repl.with_flavor(ParamSpecFlavor.ARGS), + clean_repl.with_flavor(ParamSpecFlavor.KWARGS), + ], + arg_kinds=t.arg_kinds[:-2] + prefix.arg_kinds + t.arg_kinds[-2:], + arg_names=t.arg_names[:-2] + prefix.arg_names + t.arg_names[-2:], + ret_type=t.ret_type.accept(self), + from_concatenate=t.from_concatenate or bool(repl.prefix.arg_types), + imprecise_arg_kinds=(t.imprecise_arg_kinds or prefix.imprecise_arg_kinds), + ) + + var_arg = t.var_arg() + needs_normalization = False + if var_arg is not None and isinstance(var_arg.typ, UnpackType): + needs_normalization = True + arg_types = self.interpolate_args_for_unpack(t, var_arg.typ) + else: + arg_types = self.expand_types(t.arg_types) + expanded = t.copy_modified( + arg_types=arg_types, + ret_type=t.ret_type.accept(self), + type_guard=(t.type_guard.accept(self) if t.type_guard is not None else None), + type_is=(t.type_is.accept(self) if t.type_is is not None else None), + ) + if needs_normalization: + return expanded.with_normalized_var_args() + return expanded + + def visit_overloaded(self, t: Overloaded) -> Type: + items: list[CallableType] = [] + for item in t.items: + new_item = item.accept(self) + assert isinstance(new_item, ProperType) + assert isinstance(new_item, CallableType) + items.append(new_item) + return Overloaded(items) + + def expand_type_list_with_unpack(self, typs: list[Type]) -> list[Type]: + """Expands a list of types that has an unpack.""" + items: list[Type] = [] + for item in typs: + if isinstance(item, UnpackType) and isinstance(item.type, TypeVarTupleType): + items.extend(self.expand_unpack(item)) + else: + items.append(item.accept(self)) + return items + + def expand_type_tuple_with_unpack(self, typs: tuple[Type, ...]) -> list[Type]: + """Expands a tuple of types that has an unpack.""" + # Micro-optimization: Specialized variant of expand_type_list_with_unpack + items: list[Type] = [] + for item in typs: + if isinstance(item, UnpackType) and isinstance(item.type, TypeVarTupleType): + items.extend(self.expand_unpack(item)) + else: + items.append(item.accept(self)) + return items + + def visit_tuple_type(self, t: TupleType) -> Type: + items = self.expand_type_list_with_unpack(t.items) + if len(items) == 1: + # Normalize Tuple[*Tuple[X, ...]] -> Tuple[X, ...] + item = items[0] + if isinstance(item, UnpackType): + unpacked = get_proper_type(item.type) + if isinstance(unpacked, Instance): + assert unpacked.type.fullname == "builtins.tuple" + if t.partial_fallback.type.fullname != "builtins.tuple": + # If it is a subtype (like named tuple) we need to preserve it, + # this essentially mimics the logic in tuple_fallback(). + return t.partial_fallback.accept(self) + return unpacked + fallback = t.partial_fallback.accept(self) + assert isinstance(fallback, ProperType) and isinstance(fallback, Instance) + return t.copy_modified(items=items, fallback=fallback) + + def visit_typeddict_type(self, t: TypedDictType) -> Type: + if cached := self.get_cached(t): + return cached + fallback = t.fallback.accept(self) + assert isinstance(fallback, ProperType) and isinstance(fallback, Instance) + result = t.copy_modified(item_types=self.expand_types(t.items.values()), fallback=fallback) + self.set_cached(t, result) + return result + + def visit_literal_type(self, t: LiteralType) -> Type: + # TODO: Verify this implementation is correct + return t + + def visit_union_type(self, t: UnionType) -> Type: + # Use cache to avoid O(n**2) or worse expansion of types during translation + # (only for large unions, since caching adds overhead) + use_cache = len(t.items) > 3 + if use_cache and (cached := self.get_cached(t)): + return cached + + expanded = self.expand_types(t.items) + # After substituting for type variables in t.items, some resulting types + # might be subtypes of others, however calling make_simplified_union() + # can cause recursion, so we just remove strict duplicates. + simplified = UnionType.make_union( + remove_trivial(flatten_nested_unions(expanded)), t.line, t.column + ) + # This call to get_proper_type() is unfortunate but is required to preserve + # the invariant that ProperType will stay ProperType after applying expand_type(), + # otherwise a single item union of a type alias will break it. Note this should not + # cause infinite recursion since pathological aliases like A = Union[A, B] are + # banned at the semantic analysis level. + result = get_proper_type(simplified) + + if use_cache: + self.set_cached(t, result) + return result + + def visit_partial_type(self, t: PartialType) -> Type: + return t + + def visit_type_type(self, t: TypeType) -> Type: + # TODO: Verify that the new item type is valid (instance or + # union of instances or Any). Sadly we can't report errors + # here yet. + item = t.item.accept(self) + return TypeType.make_normalized(item, is_type_form=t.is_type_form) + + def visit_type_alias_type(self, t: TypeAliasType) -> Type: + # Target of the type alias cannot contain type variables (not bound by the type + # alias itself), so we just expand the arguments. + if len(t.args) == 0: + return t + args = self.expand_type_list_with_unpack(t.args) + # TODO: normalize if target is Tuple, and args are [*tuple[X, ...]]? + return t.copy_modified(args=args) + + def expand_types(self, types: Iterable[Type]) -> list[Type]: + a: list[Type] = [] + for t in types: + a.append(t.accept(self)) + return a + + +@overload +def expand_self_type(var: Var, typ: ProperType, replacement: ProperType) -> ProperType: ... + + +@overload +def expand_self_type(var: Var, typ: Type, replacement: Type) -> Type: ... + + +def expand_self_type(var: Var, typ: Type, replacement: Type) -> Type: + """Expand appearances of Self type in a variable type.""" + if var.info.self_type is not None and not var.is_property: + return expand_type(typ, {var.info.self_type.id: replacement}) + return typ + + +def remove_trivial(types: Iterable[Type]) -> list[Type]: + """Make trivial simplifications on a list of types without calling is_subtype(). + + This makes following simplifications: + * Remove bottom types (taking into account strict optional setting) + * Remove everything else if there is an `object` + * Remove strict duplicate types + """ + removed_none = False + new_types = [] + all_types = set() + for t in types: + p_t = get_proper_type(t) + if isinstance(p_t, UninhabitedType): + continue + if isinstance(p_t, NoneType) and not state.strict_optional: + removed_none = True + continue + if isinstance(p_t, Instance) and p_t.type.fullname == "builtins.object": + return [p_t] + if p_t not in all_types: + new_types.append(t) + all_types.add(p_t) + if new_types: + return new_types + if removed_none: + return [NoneType()] + return [UninhabitedType()] diff --git a/.venv/lib/python3.12/site-packages/mypy/exportjson.py b/.venv/lib/python3.12/site-packages/mypy/exportjson.py new file mode 100644 index 0000000..dfc1cf5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/exportjson.py @@ -0,0 +1,578 @@ +"""Tool to convert mypy cache file to a JSON format (print to stdout). + +Usage: + python -m mypy.exportjson .mypy_cache/.../my_module.data.ff + +The idea is to make caches introspectable once we've switched to a binary +cache format and removed support for the older JSON cache format. + +This is primarily to support existing use cases that need to inspect +cache files, and to support debugging mypy caching issues. This means that +this doesn't necessarily need to be kept 1:1 up to date with changes in the +binary cache format (to simplify maintenance -- we don't want this to slow +down mypy development). +""" + +import argparse +import json +import sys +from typing import Any, Union +from typing_extensions import TypeAlias as _TypeAlias + +from librt.internal import ReadBuffer + +from mypy.nodes import ( + FUNCBASE_FLAGS, + FUNCDEF_FLAGS, + VAR_FLAGS, + ClassDef, + DataclassTransformSpec, + Decorator, + FuncDef, + MypyFile, + OverloadedFuncDef, + OverloadPart, + ParamSpecExpr, + SymbolNode, + SymbolTable, + SymbolTableNode, + TypeAlias, + TypeInfo, + TypeVarExpr, + TypeVarTupleExpr, + Var, + get_flags, + node_kinds, +) +from mypy.types import ( + NOT_READY, + AnyType, + CallableType, + ExtraAttrs, + Instance, + LiteralType, + NoneType, + Overloaded, + Parameters, + ParamSpecType, + TupleType, + Type, + TypeAliasType, + TypedDictType, + TypeType, + TypeVarTupleType, + TypeVarType, + UnboundType, + UninhabitedType, + UnionType, + UnpackType, + get_proper_type, +) + +Json: _TypeAlias = Union[dict[str, Any], str] + + +class Config: + def __init__(self, *, implicit_names: bool = True) -> None: + self.implicit_names = implicit_names + + +def convert_binary_cache_to_json(data: bytes, *, implicit_names: bool = True) -> Json: + tree = MypyFile.read(ReadBuffer(data)) + return convert_mypy_file_to_json(tree, Config(implicit_names=implicit_names)) + + +def convert_mypy_file_to_json(self: MypyFile, cfg: Config) -> Json: + return { + ".class": "MypyFile", + "_fullname": self._fullname, + "names": convert_symbol_table(self.names, cfg), + "is_stub": self.is_stub, + "path": self.path, + "is_partial_stub_package": self.is_partial_stub_package, + "future_import_flags": sorted(self.future_import_flags), + } + + +def convert_symbol_table(self: SymbolTable, cfg: Config) -> Json: + data: dict[str, Any] = {".class": "SymbolTable"} + for key, value in self.items(): + # Skip __builtins__: it's a reference to the builtins + # module that gets added to every module by + # SemanticAnalyzerPass2.visit_file(), but it shouldn't be + # accessed by users of the module. + if key == "__builtins__" or value.no_serialize: + continue + if not cfg.implicit_names and key in { + "__spec__", + "__package__", + "__file__", + "__doc__", + "__annotations__", + "__name__", + }: + continue + data[key] = convert_symbol_table_node(value, cfg) + return data + + +def convert_symbol_table_node(self: SymbolTableNode, cfg: Config) -> Json: + data: dict[str, Any] = {".class": "SymbolTableNode", "kind": node_kinds[self.kind]} + if self.module_hidden: + data["module_hidden"] = True + if not self.module_public: + data["module_public"] = False + if self.implicit: + data["implicit"] = True + if self.plugin_generated: + data["plugin_generated"] = True + if self.cross_ref: + data["cross_ref"] = self.cross_ref + elif self.node is not None: + data["node"] = convert_symbol_node(self.node, cfg) + return data + + +def convert_symbol_node(self: SymbolNode, cfg: Config) -> Json: + if isinstance(self, FuncDef): + return convert_func_def(self) + elif isinstance(self, OverloadedFuncDef): + return convert_overloaded_func_def(self) + elif isinstance(self, Decorator): + return convert_decorator(self) + elif isinstance(self, Var): + return convert_var(self) + elif isinstance(self, TypeInfo): + return convert_type_info(self, cfg) + elif isinstance(self, TypeAlias): + return convert_type_alias(self) + elif isinstance(self, TypeVarExpr): + return convert_type_var_expr(self) + elif isinstance(self, ParamSpecExpr): + return convert_param_spec_expr(self) + elif isinstance(self, TypeVarTupleExpr): + return convert_type_var_tuple_expr(self) + return {"ERROR": f"{type(self)!r} unrecognized"} + + +def convert_func_def(self: FuncDef) -> Json: + return { + ".class": "FuncDef", + "name": self._name, + "fullname": self._fullname, + "arg_names": self.arg_names, + "arg_kinds": [int(x.value) for x in self.arg_kinds], + "type": None if self.type is None else convert_type(self.type), + "flags": get_flags(self, FUNCDEF_FLAGS), + "abstract_status": self.abstract_status, + # TODO: Do we need expanded, original_def? + "dataclass_transform_spec": ( + None + if self.dataclass_transform_spec is None + else convert_dataclass_transform_spec(self.dataclass_transform_spec) + ), + "deprecated": self.deprecated, + "original_first_arg": self.original_first_arg, + } + + +def convert_dataclass_transform_spec(self: DataclassTransformSpec) -> Json: + return { + "eq_default": self.eq_default, + "order_default": self.order_default, + "kw_only_default": self.kw_only_default, + "frozen_default": self.frozen_default, + "field_specifiers": list(self.field_specifiers), + } + + +def convert_overloaded_func_def(self: OverloadedFuncDef) -> Json: + return { + ".class": "OverloadedFuncDef", + "items": [convert_overload_part(i) for i in self.items], + "type": None if self.type is None else convert_type(self.type), + "fullname": self._fullname, + "impl": None if self.impl is None else convert_overload_part(self.impl), + "flags": get_flags(self, FUNCBASE_FLAGS), + "deprecated": self.deprecated, + "setter_index": self.setter_index, + } + + +def convert_overload_part(self: OverloadPart) -> Json: + if isinstance(self, FuncDef): + return convert_func_def(self) + else: + return convert_decorator(self) + + +def convert_decorator(self: Decorator) -> Json: + return { + ".class": "Decorator", + "func": convert_func_def(self.func), + "var": convert_var(self.var), + "is_overload": self.is_overload, + } + + +def convert_var(self: Var) -> Json: + data: dict[str, Any] = { + ".class": "Var", + "name": self._name, + "fullname": self._fullname, + "type": None if self.type is None else convert_type(self.type), + "setter_type": None if self.setter_type is None else convert_type(self.setter_type), + "flags": get_flags(self, VAR_FLAGS), + } + if self.final_value is not None: + data["final_value"] = self.final_value + return data + + +def convert_type_info(self: TypeInfo, cfg: Config) -> Json: + data = { + ".class": "TypeInfo", + "module_name": self.module_name, + "fullname": self.fullname, + "names": convert_symbol_table(self.names, cfg), + "defn": convert_class_def(self.defn), + "abstract_attributes": self.abstract_attributes, + "type_vars": self.type_vars, + "has_param_spec_type": self.has_param_spec_type, + "bases": [convert_type(b) for b in self.bases], + "mro": self._mro_refs, + "_promote": [convert_type(p) for p in self._promote], + "alt_promote": None if self.alt_promote is None else convert_type(self.alt_promote), + "declared_metaclass": ( + None if self.declared_metaclass is None else convert_type(self.declared_metaclass) + ), + "metaclass_type": ( + None if self.metaclass_type is None else convert_type(self.metaclass_type) + ), + "tuple_type": None if self.tuple_type is None else convert_type(self.tuple_type), + "typeddict_type": ( + None if self.typeddict_type is None else convert_typeddict_type(self.typeddict_type) + ), + "flags": get_flags(self, TypeInfo.FLAGS), + "metadata": self.metadata, + "slots": sorted(self.slots) if self.slots is not None else None, + "deletable_attributes": self.deletable_attributes, + "self_type": convert_type(self.self_type) if self.self_type is not None else None, + "dataclass_transform_spec": ( + convert_dataclass_transform_spec(self.dataclass_transform_spec) + if self.dataclass_transform_spec is not None + else None + ), + "deprecated": self.deprecated, + } + return data + + +def convert_class_def(self: ClassDef) -> Json: + return { + ".class": "ClassDef", + "name": self.name, + "fullname": self.fullname, + "type_vars": [convert_type(v) for v in self.type_vars], + } + + +def convert_type_alias(self: TypeAlias) -> Json: + data: Json = { + ".class": "TypeAlias", + "fullname": self._fullname, + "module": self.module, + "target": convert_type(self.target), + "alias_tvars": [convert_type(v) for v in self.alias_tvars], + "no_args": self.no_args, + "normalized": self.normalized, + "python_3_12_type_alias": self.python_3_12_type_alias, + } + return data + + +def convert_type_var_expr(self: TypeVarExpr) -> Json: + return { + ".class": "TypeVarExpr", + "name": self._name, + "fullname": self._fullname, + "values": [convert_type(t) for t in self.values], + "upper_bound": convert_type(self.upper_bound), + "default": convert_type(self.default), + "variance": self.variance, + } + + +def convert_param_spec_expr(self: ParamSpecExpr) -> Json: + return { + ".class": "ParamSpecExpr", + "name": self._name, + "fullname": self._fullname, + "upper_bound": convert_type(self.upper_bound), + "default": convert_type(self.default), + "variance": self.variance, + } + + +def convert_type_var_tuple_expr(self: TypeVarTupleExpr) -> Json: + return { + ".class": "TypeVarTupleExpr", + "name": self._name, + "fullname": self._fullname, + "upper_bound": convert_type(self.upper_bound), + "tuple_fallback": convert_type(self.tuple_fallback), + "default": convert_type(self.default), + "variance": self.variance, + } + + +def convert_type(typ: Type) -> Json: + if type(typ) is TypeAliasType: + return convert_type_alias_type(typ) + typ = get_proper_type(typ) + if isinstance(typ, Instance): + return convert_instance(typ) + elif isinstance(typ, AnyType): + return convert_any_type(typ) + elif isinstance(typ, NoneType): + return convert_none_type(typ) + elif isinstance(typ, UnionType): + return convert_union_type(typ) + elif isinstance(typ, TupleType): + return convert_tuple_type(typ) + elif isinstance(typ, CallableType): + return convert_callable_type(typ) + elif isinstance(typ, Overloaded): + return convert_overloaded(typ) + elif isinstance(typ, LiteralType): + return convert_literal_type(typ) + elif isinstance(typ, TypeVarType): + return convert_type_var_type(typ) + elif isinstance(typ, TypeType): + return convert_type_type(typ) + elif isinstance(typ, UninhabitedType): + return convert_uninhabited_type(typ) + elif isinstance(typ, UnpackType): + return convert_unpack_type(typ) + elif isinstance(typ, ParamSpecType): + return convert_param_spec_type(typ) + elif isinstance(typ, TypeVarTupleType): + return convert_type_var_tuple_type(typ) + elif isinstance(typ, Parameters): + return convert_parameters(typ) + elif isinstance(typ, TypedDictType): + return convert_typeddict_type(typ) + elif isinstance(typ, UnboundType): + return convert_unbound_type(typ) + return {"ERROR": f"{type(typ)!r} unrecognized"} + + +def convert_instance(self: Instance) -> Json: + ready = self.type is not NOT_READY + if not self.args and not self.last_known_value and not self.extra_attrs: + if ready: + return self.type.fullname + elif self.type_ref: + return self.type_ref + + data: dict[str, Any] = { + ".class": "Instance", + "type_ref": self.type.fullname if ready else self.type_ref, + "args": [convert_type(arg) for arg in self.args], + } + if self.last_known_value is not None: + data["last_known_value"] = convert_type(self.last_known_value) + data["extra_attrs"] = convert_extra_attrs(self.extra_attrs) if self.extra_attrs else None + return data + + +def convert_extra_attrs(self: ExtraAttrs) -> Json: + return { + ".class": "ExtraAttrs", + "attrs": {k: convert_type(v) for k, v in self.attrs.items()}, + "immutable": sorted(self.immutable), + "mod_name": self.mod_name, + } + + +def convert_type_alias_type(self: TypeAliasType) -> Json: + data: Json = { + ".class": "TypeAliasType", + "type_ref": self.type_ref, + "args": [convert_type(arg) for arg in self.args], + } + return data + + +def convert_any_type(self: AnyType) -> Json: + return { + ".class": "AnyType", + "type_of_any": self.type_of_any, + "source_any": convert_type(self.source_any) if self.source_any is not None else None, + "missing_import_name": self.missing_import_name, + } + + +def convert_none_type(self: NoneType) -> Json: + return {".class": "NoneType"} + + +def convert_union_type(self: UnionType) -> Json: + return { + ".class": "UnionType", + "items": [convert_type(t) for t in self.items], + "uses_pep604_syntax": self.uses_pep604_syntax, + } + + +def convert_tuple_type(self: TupleType) -> Json: + return { + ".class": "TupleType", + "items": [convert_type(t) for t in self.items], + "partial_fallback": convert_type(self.partial_fallback), + "implicit": self.implicit, + } + + +def convert_literal_type(self: LiteralType) -> Json: + return {".class": "LiteralType", "value": self.value, "fallback": convert_type(self.fallback)} + + +def convert_type_var_type(self: TypeVarType) -> Json: + assert not self.id.is_meta_var() + return { + ".class": "TypeVarType", + "name": self.name, + "fullname": self.fullname, + "id": self.id.raw_id, + "namespace": self.id.namespace, + "values": [convert_type(v) for v in self.values], + "upper_bound": convert_type(self.upper_bound), + "default": convert_type(self.default), + "variance": self.variance, + } + + +def convert_callable_type(self: CallableType) -> Json: + return { + ".class": "CallableType", + "arg_types": [convert_type(t) for t in self.arg_types], + "arg_kinds": [int(x.value) for x in self.arg_kinds], + "arg_names": self.arg_names, + "ret_type": convert_type(self.ret_type), + "fallback": convert_type(self.fallback), + "name": self.name, + # We don't serialize the definition (only used for error messages). + "variables": [convert_type(v) for v in self.variables], + "is_ellipsis_args": self.is_ellipsis_args, + "implicit": self.implicit, + "is_bound": self.is_bound, + "type_guard": convert_type(self.type_guard) if self.type_guard is not None else None, + "type_is": convert_type(self.type_is) if self.type_is is not None else None, + "from_concatenate": self.from_concatenate, + "imprecise_arg_kinds": self.imprecise_arg_kinds, + "unpack_kwargs": self.unpack_kwargs, + } + + +def convert_overloaded(self: Overloaded) -> Json: + return {".class": "Overloaded", "items": [convert_type(t) for t in self.items]} + + +def convert_type_type(self: TypeType) -> Json: + return {".class": "TypeType", "item": convert_type(self.item)} + + +def convert_uninhabited_type(self: UninhabitedType) -> Json: + return {".class": "UninhabitedType"} + + +def convert_unpack_type(self: UnpackType) -> Json: + return {".class": "UnpackType", "type": convert_type(self.type)} + + +def convert_param_spec_type(self: ParamSpecType) -> Json: + assert not self.id.is_meta_var() + return { + ".class": "ParamSpecType", + "name": self.name, + "fullname": self.fullname, + "id": self.id.raw_id, + "namespace": self.id.namespace, + "flavor": self.flavor, + "upper_bound": convert_type(self.upper_bound), + "default": convert_type(self.default), + "prefix": convert_type(self.prefix), + } + + +def convert_type_var_tuple_type(self: TypeVarTupleType) -> Json: + assert not self.id.is_meta_var() + return { + ".class": "TypeVarTupleType", + "name": self.name, + "fullname": self.fullname, + "id": self.id.raw_id, + "namespace": self.id.namespace, + "upper_bound": convert_type(self.upper_bound), + "tuple_fallback": convert_type(self.tuple_fallback), + "default": convert_type(self.default), + "min_len": self.min_len, + } + + +def convert_parameters(self: Parameters) -> Json: + return { + ".class": "Parameters", + "arg_types": [convert_type(t) for t in self.arg_types], + "arg_kinds": [int(x.value) for x in self.arg_kinds], + "arg_names": self.arg_names, + "variables": [convert_type(tv) for tv in self.variables], + "imprecise_arg_kinds": self.imprecise_arg_kinds, + } + + +def convert_typeddict_type(self: TypedDictType) -> Json: + return { + ".class": "TypedDictType", + "items": [[n, convert_type(t)] for (n, t) in self.items.items()], + "required_keys": sorted(self.required_keys), + "readonly_keys": sorted(self.readonly_keys), + "fallback": convert_type(self.fallback), + } + + +def convert_unbound_type(self: UnboundType) -> Json: + return { + ".class": "UnboundType", + "name": self.name, + "args": [convert_type(a) for a in self.args], + "expr": self.original_str_expr, + "expr_fallback": self.original_str_fallback, + } + + +def main() -> None: + parser = argparse.ArgumentParser( + description="Convert binary cache files to JSON. " + "Create files in the same directory with extra .json extension." + ) + parser.add_argument( + "path", nargs="+", help="mypy cache data file to convert (.data.ff extension)" + ) + args = parser.parse_args() + fnams: list[str] = args.path + for fnam in fnams: + if not fnam.endswith(".data.ff"): + sys.exit(f"error: Expected .data.ff extension, but got {fnam}") + with open(fnam, "rb") as f: + data = f.read() + json_data = convert_binary_cache_to_json(data) + new_fnam = fnam + ".json" + with open(new_fnam, "w") as f: + json.dump(json_data, f) + print(f"{fnam} -> {new_fnam}") + + +if __name__ == "__main__": + main() diff --git a/.venv/lib/python3.12/site-packages/mypy/exprtotype.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/exprtotype.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..2d08b0b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/exprtotype.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/exprtotype.py b/.venv/lib/python3.12/site-packages/mypy/exprtotype.py new file mode 100644 index 0000000..6fd43c0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/exprtotype.py @@ -0,0 +1,289 @@ +"""Translate an Expression to a Type value.""" + +from __future__ import annotations + +from typing import Callable + +from mypy.fastparse import parse_type_string +from mypy.nodes import ( + MISSING_FALLBACK, + BytesExpr, + CallExpr, + ComplexExpr, + Context, + DictExpr, + EllipsisExpr, + Expression, + FloatExpr, + IndexExpr, + IntExpr, + ListExpr, + MemberExpr, + NameExpr, + OpExpr, + RefExpr, + StarExpr, + StrExpr, + SymbolTableNode, + TupleExpr, + UnaryExpr, + get_member_expr_fullname, +) +from mypy.options import Options +from mypy.types import ( + ANNOTATED_TYPE_NAMES, + AnyType, + CallableArgument, + EllipsisType, + Instance, + ProperType, + RawExpressionType, + Type, + TypedDictType, + TypeList, + TypeOfAny, + UnboundType, + UnionType, + UnpackType, +) + + +class TypeTranslationError(Exception): + """Exception raised when an expression is not valid as a type.""" + + +def _extract_argument_name(expr: Expression) -> str | None: + if isinstance(expr, NameExpr) and expr.name == "None": + return None + elif isinstance(expr, StrExpr): + return expr.value + else: + raise TypeTranslationError() + + +def expr_to_unanalyzed_type( + expr: Expression, + options: Options, + allow_new_syntax: bool = False, + _parent: Expression | None = None, + allow_unpack: bool = False, + lookup_qualified: Callable[[str, Context], SymbolTableNode | None] | None = None, +) -> ProperType: + """Translate an expression to the corresponding type. + + The result is not semantically analyzed. It can be UnboundType or TypeList. + Raise TypeTranslationError if the expression cannot represent a type. + + If lookup_qualified is not provided, the expression is expected to be semantically + analyzed. + + If allow_new_syntax is True, allow all type syntax independent of the target + Python version (used in stubs). + + # TODO: a lot of code here is duplicated in fastparse.py, refactor this. + """ + # The `parent` parameter is used in recursive calls to provide context for + # understanding whether an CallableArgument is ok. + name: str | None = None + if isinstance(expr, NameExpr): + name = expr.name + if name == "True": + return RawExpressionType(True, "builtins.bool", line=expr.line, column=expr.column) + elif name == "False": + return RawExpressionType(False, "builtins.bool", line=expr.line, column=expr.column) + else: + return UnboundType(name, line=expr.line, column=expr.column) + elif isinstance(expr, MemberExpr): + fullname = get_member_expr_fullname(expr) + if fullname: + return UnboundType(fullname, line=expr.line, column=expr.column) + else: + raise TypeTranslationError() + elif isinstance(expr, IndexExpr): + base = expr_to_unanalyzed_type( + expr.base, options, allow_new_syntax, expr, lookup_qualified=lookup_qualified + ) + if isinstance(base, UnboundType): + if base.args: + raise TypeTranslationError() + if isinstance(expr.index, TupleExpr): + args = expr.index.items + else: + args = [expr.index] + + if isinstance(expr.base, RefExpr): + # Check if the type is Annotated[...]. For this we need the fullname, + # which must be looked up if the expression hasn't been semantically analyzed. + base_fullname = None + if lookup_qualified is not None: + sym = lookup_qualified(base.name, expr) + if sym and sym.node: + base_fullname = sym.node.fullname + else: + base_fullname = expr.base.fullname + + if base_fullname is not None and base_fullname in ANNOTATED_TYPE_NAMES: + # TODO: this is not the optimal solution as we are basically getting rid + # of the Annotation definition and only returning the type information, + # losing all the annotations. + return expr_to_unanalyzed_type( + args[0], options, allow_new_syntax, expr, lookup_qualified=lookup_qualified + ) + base.args = tuple( + expr_to_unanalyzed_type( + arg, + options, + allow_new_syntax, + expr, + allow_unpack=True, + lookup_qualified=lookup_qualified, + ) + for arg in args + ) + if not base.args: + base.empty_tuple_index = True + return base + else: + raise TypeTranslationError() + elif ( + isinstance(expr, OpExpr) + and expr.op == "|" + and ((options.python_version >= (3, 10)) or allow_new_syntax) + ): + return UnionType( + [ + expr_to_unanalyzed_type( + expr.left, options, allow_new_syntax, lookup_qualified=lookup_qualified + ), + expr_to_unanalyzed_type( + expr.right, options, allow_new_syntax, lookup_qualified=lookup_qualified + ), + ], + uses_pep604_syntax=True, + ) + elif isinstance(expr, CallExpr) and isinstance(_parent, ListExpr): + c = expr.callee + names = [] + # Go through the dotted member expr chain to get the full arg + # constructor name to look up + while True: + if isinstance(c, NameExpr): + names.append(c.name) + break + elif isinstance(c, MemberExpr): + names.append(c.name) + c = c.expr + else: + raise TypeTranslationError() + arg_const = ".".join(reversed(names)) + + # Go through the constructor args to get its name and type. + name = None + default_type = AnyType(TypeOfAny.unannotated) + typ: Type = default_type + for i, arg in enumerate(expr.args): + if expr.arg_names[i] is not None: + if expr.arg_names[i] == "name": + if name is not None: + # Two names + raise TypeTranslationError() + name = _extract_argument_name(arg) + continue + elif expr.arg_names[i] == "type": + if typ is not default_type: + # Two types + raise TypeTranslationError() + typ = expr_to_unanalyzed_type( + arg, options, allow_new_syntax, expr, lookup_qualified=lookup_qualified + ) + continue + else: + raise TypeTranslationError() + elif i == 0: + typ = expr_to_unanalyzed_type( + arg, options, allow_new_syntax, expr, lookup_qualified=lookup_qualified + ) + elif i == 1: + name = _extract_argument_name(arg) + else: + raise TypeTranslationError() + return CallableArgument(typ, name, arg_const, expr.line, expr.column) + elif isinstance(expr, ListExpr): + return TypeList( + [ + expr_to_unanalyzed_type( + t, + options, + allow_new_syntax, + expr, + allow_unpack=True, + lookup_qualified=lookup_qualified, + ) + for t in expr.items + ], + line=expr.line, + column=expr.column, + ) + elif isinstance(expr, StrExpr): + return parse_type_string(expr.value, "builtins.str", expr.line, expr.column) + elif isinstance(expr, BytesExpr): + return parse_type_string(expr.value, "builtins.bytes", expr.line, expr.column) + elif isinstance(expr, UnaryExpr): + typ = expr_to_unanalyzed_type( + expr.expr, options, allow_new_syntax, lookup_qualified=lookup_qualified + ) + if isinstance(typ, RawExpressionType): + if isinstance(typ.literal_value, int): + if expr.op == "-": + typ.literal_value *= -1 + return typ + elif expr.op == "+": + return typ + raise TypeTranslationError() + elif isinstance(expr, IntExpr): + return RawExpressionType(expr.value, "builtins.int", line=expr.line, column=expr.column) + elif isinstance(expr, FloatExpr): + # Floats are not valid parameters for RawExpressionType , so we just + # pass in 'None' for now. We'll report the appropriate error at a later stage. + return RawExpressionType(None, "builtins.float", line=expr.line, column=expr.column) + elif isinstance(expr, ComplexExpr): + # Same thing as above with complex numbers. + return RawExpressionType(None, "builtins.complex", line=expr.line, column=expr.column) + elif isinstance(expr, EllipsisExpr): + return EllipsisType(expr.line) + elif allow_unpack and isinstance(expr, StarExpr): + return UnpackType( + expr_to_unanalyzed_type( + expr.expr, options, allow_new_syntax, lookup_qualified=lookup_qualified + ), + from_star_syntax=True, + ) + elif isinstance(expr, DictExpr): + if not expr.items: + raise TypeTranslationError() + items: dict[str, Type] = {} + extra_items_from = [] + for item_name, value in expr.items: + if not isinstance(item_name, StrExpr): + if item_name is None: + extra_items_from.append( + expr_to_unanalyzed_type( + value, + options, + allow_new_syntax, + expr, + lookup_qualified=lookup_qualified, + ) + ) + continue + raise TypeTranslationError() + items[item_name.value] = expr_to_unanalyzed_type( + value, options, allow_new_syntax, expr, lookup_qualified=lookup_qualified + ) + result = TypedDictType( + items, set(), set(), Instance(MISSING_FALLBACK, ()), expr.line, expr.column + ) + result.extra_items_from = extra_items_from + return result + else: + raise TypeTranslationError() diff --git a/.venv/lib/python3.12/site-packages/mypy/fastparse.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/fastparse.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..5c5ba02 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/fastparse.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/fastparse.py b/.venv/lib/python3.12/site-packages/mypy/fastparse.py new file mode 100644 index 0000000..0e7b418 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/fastparse.py @@ -0,0 +1,2279 @@ +from __future__ import annotations + +import re +import sys +import warnings +from collections.abc import Sequence +from typing import Any, Callable, Final, Literal, Optional, TypeVar, Union, cast, overload + +from mypy import defaults, errorcodes as codes, message_registry +from mypy.errors import Errors +from mypy.message_registry import ErrorMessage +from mypy.nodes import ( + ARG_NAMED, + ARG_NAMED_OPT, + ARG_OPT, + ARG_POS, + ARG_STAR, + ARG_STAR2, + MISSING_FALLBACK, + PARAM_SPEC_KIND, + TYPE_VAR_KIND, + TYPE_VAR_TUPLE_KIND, + ArgKind, + Argument, + AssertStmt, + AssignmentExpr, + AssignmentStmt, + AwaitExpr, + Block, + BreakStmt, + BytesExpr, + CallExpr, + ClassDef, + ComparisonExpr, + ComplexExpr, + ConditionalExpr, + ContinueStmt, + Decorator, + DelStmt, + DictExpr, + DictionaryComprehension, + EllipsisExpr, + Expression, + ExpressionStmt, + FloatExpr, + ForStmt, + FuncDef, + GeneratorExpr, + GlobalDecl, + IfStmt, + Import, + ImportAll, + ImportBase, + ImportFrom, + IndexExpr, + IntExpr, + LambdaExpr, + ListComprehension, + ListExpr, + MatchStmt, + MemberExpr, + MypyFile, + NameExpr, + Node, + NonlocalDecl, + OperatorAssignmentStmt, + OpExpr, + OverloadedFuncDef, + OverloadPart, + PassStmt, + RaiseStmt, + RefExpr, + ReturnStmt, + SetComprehension, + SetExpr, + SliceExpr, + StarExpr, + Statement, + StrExpr, + SuperExpr, + TempNode, + TryStmt, + TupleExpr, + TypeAliasStmt, + TypeParam, + UnaryExpr, + Var, + WhileStmt, + WithStmt, + YieldExpr, + YieldFromExpr, + check_arg_names, +) +from mypy.options import Options +from mypy.patterns import ( + AsPattern, + ClassPattern, + MappingPattern, + OrPattern, + SequencePattern, + SingletonPattern, + StarredPattern, + ValuePattern, +) +from mypy.reachability import infer_reachability_of_if_statement, mark_block_unreachable +from mypy.sharedparse import argument_elide_name, special_function_elide_names +from mypy.traverser import TraverserVisitor +from mypy.types import ( + AnyType, + CallableArgument, + CallableType, + EllipsisType, + Instance, + ProperType, + RawExpressionType, + TupleType, + Type, + TypedDictType, + TypeList, + TypeOfAny, + UnboundType, + UnionType, + UnpackType, +) +from mypy.util import bytes_to_human_readable_repr, unnamed_function + +# pull this into a final variable to make mypyc be quiet about the +# the default argument warning +PY_MINOR_VERSION: Final = sys.version_info[1] + +import ast as ast3 +from ast import AST, Attribute, Call, FunctionType, Name, Starred, UAdd, UnaryOp, USub + + +def ast3_parse( + source: str | bytes, filename: str, mode: str, feature_version: int = PY_MINOR_VERSION +) -> AST: + # Ignore warnings that look like: + # :1: SyntaxWarning: invalid escape sequence '\.' + # because `source` could be anything, including literals like r'(re\.match)' + with warnings.catch_warnings(): + warnings.simplefilter("ignore", SyntaxWarning) + return ast3.parse( + source, + filename, + mode, + type_comments=True, # This works the magic + feature_version=feature_version, + ) + + +if sys.version_info >= (3, 10): + Match = ast3.Match + MatchValue = ast3.MatchValue + MatchSingleton = ast3.MatchSingleton + MatchSequence = ast3.MatchSequence + MatchStar = ast3.MatchStar + MatchMapping = ast3.MatchMapping + MatchClass = ast3.MatchClass + MatchAs = ast3.MatchAs + MatchOr = ast3.MatchOr + AstNode = Union[ast3.expr, ast3.stmt, ast3.pattern, ast3.ExceptHandler] +else: + Match = Any + MatchValue = Any + MatchSingleton = Any + MatchSequence = Any + MatchStar = Any + MatchMapping = Any + MatchClass = Any + MatchAs = Any + MatchOr = Any + AstNode = Union[ast3.expr, ast3.stmt, ast3.ExceptHandler] + +if sys.version_info >= (3, 11): + TryStar = ast3.TryStar +else: + TryStar = Any + +if sys.version_info >= (3, 12): + ast_TypeAlias = ast3.TypeAlias + ast_ParamSpec = ast3.ParamSpec + ast_TypeVar = ast3.TypeVar + ast_TypeVarTuple = ast3.TypeVarTuple +else: + ast_TypeAlias = Any + ast_ParamSpec = Any + ast_TypeVar = Any + ast_TypeVarTuple = Any + +if sys.version_info >= (3, 14): + ast_TemplateStr = ast3.TemplateStr + ast_Interpolation = ast3.Interpolation +else: + ast_TemplateStr = Any + ast_Interpolation = Any + +N = TypeVar("N", bound=Node) + +# There is no way to create reasonable fallbacks at this stage, +# they must be patched later. +_dummy_fallback: Final = Instance(MISSING_FALLBACK, [], -1) + +TYPE_IGNORE_PATTERN: Final = re.compile(r"[^#]*#\s*type:\s*ignore\s*(.*)") + + +def parse( + source: str | bytes, + fnam: str, + module: str | None, + errors: Errors, + options: Options | None = None, +) -> MypyFile: + """Parse a source file, without doing any semantic analysis. + + Return the parse tree. If errors is not provided, raise ParseError + on failure. Otherwise, use the errors object to report parse errors. + """ + ignore_errors = (options is not None and options.ignore_errors) or ( + fnam in errors.ignored_files + ) + # If errors are ignored, we can drop many function bodies to speed up type checking. + strip_function_bodies = ignore_errors and (options is None or not options.preserve_asts) + + if options is None: + options = Options() + errors.set_file(fnam, module, options=options) + is_stub_file = fnam.endswith(".pyi") + if is_stub_file: + feature_version = defaults.PYTHON3_VERSION[1] + if options.python_version[0] == 3 and options.python_version[1] > feature_version: + feature_version = options.python_version[1] + else: + assert options.python_version[0] >= 3 + feature_version = options.python_version[1] + try: + # Disable + # - deprecation warnings for 'invalid escape sequence' (Python 3.11 and below) + # - syntax warnings for 'invalid escape sequence' (3.12+) and 'return in finally' (3.14+) + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + warnings.filterwarnings("ignore", category=SyntaxWarning) + ast = ast3_parse(source, fnam, "exec", feature_version=feature_version) + + tree = ASTConverter( + options=options, + is_stub=is_stub_file, + errors=errors, + strip_function_bodies=strip_function_bodies, + path=fnam, + ).visit(ast) + + except RecursionError as e: + # For very complex expressions it is possible to hit recursion limit + # before reaching a leaf node. + # Should reject at top level instead at bottom, since bottom would already + # be at the threshold of the recursion limit, and may fail again later. + # E.G. x1+x2+x3+...+xn -> BinOp(left=BinOp(left=BinOp(left=... + try: + # But to prove that is the cause of this particular recursion error, + # try to walk the tree using builtin visitor + ast3.NodeVisitor().visit(ast) + except RecursionError: + errors.report( + -1, -1, "Source expression too complex to parse", blocker=False, code=codes.MISC + ) + + tree = MypyFile([], [], False, {}) + + else: + # re-raise original recursion error if it *can* be unparsed, + # maybe this is some other issue that shouldn't be silenced/misdirected + raise e + + except SyntaxError as e: + message = e.msg + if feature_version > sys.version_info.minor and message.startswith("invalid syntax"): + python_version_str = f"{options.python_version[0]}.{options.python_version[1]}" + message += f"; you likely need to run mypy using Python {python_version_str} or newer" + errors.report( + e.lineno if e.lineno is not None else -1, + e.offset, + re.sub( + r"^(\s*\w)", lambda m: m.group(1).upper(), message + ), # Standardizing error message + blocker=True, + code=codes.SYNTAX, + ) + tree = MypyFile([], [], False, {}) + + assert isinstance(tree, MypyFile) + return tree + + +def parse_type_ignore_tag(tag: str | None) -> list[str] | None: + """Parse optional "[code, ...]" tag after "# type: ignore". + + Return: + * [] if no tag was found (ignore all errors) + * list of ignored error codes if a tag was found + * None if the tag was invalid. + """ + if not tag or tag.strip() == "" or tag.strip().startswith("#"): + # No tag -- ignore all errors. + return [] + m = re.match(r"\s*\[([^]#]*)\]\s*(#.*)?$", tag) + if m is None: + # Invalid "# type: ignore" comment. + return None + return [code.strip() for code in m.group(1).split(",")] + + +def parse_type_comment( + type_comment: str, line: int, column: int, errors: Errors | None +) -> tuple[list[str] | None, ProperType | None]: + """Parse type portion of a type comment (+ optional type ignore). + + Return (ignore info, parsed type). + """ + try: + typ = ast3_parse(type_comment, "", "eval") + except SyntaxError: + if errors is not None: + stripped_type = type_comment.split("#", 2)[0].strip() + err_msg = message_registry.TYPE_COMMENT_SYNTAX_ERROR_VALUE.format(stripped_type) + errors.report(line, column, err_msg.value, blocker=True, code=err_msg.code) + return None, None + else: + raise + else: + extra_ignore = TYPE_IGNORE_PATTERN.match(type_comment) + if extra_ignore: + tag: str | None = extra_ignore.group(1) + ignored: list[str] | None = parse_type_ignore_tag(tag) + if ignored is None: + if errors is not None: + errors.report( + line, column, message_registry.INVALID_TYPE_IGNORE.value, code=codes.SYNTAX + ) + else: + raise SyntaxError + else: + ignored = None + assert isinstance(typ, ast3.Expression) + converted = TypeConverter( + errors, line=line, override_column=column, is_evaluated=False + ).visit(typ.body) + return ignored, converted + + +def parse_type_string( + expr_string: str, expr_fallback_name: str, line: int, column: int +) -> ProperType: + """Parses a type that was originally present inside of an explicit string. + + For example, suppose we have the type `Foo["blah"]`. We should parse the + string expression "blah" using this function. + """ + try: + _, node = parse_type_comment(f"({expr_string})", line=line, column=column, errors=None) + if isinstance(node, (UnboundType, UnionType)) and node.original_str_expr is None: + node.original_str_expr = expr_string + node.original_str_fallback = expr_fallback_name + return node + else: + return RawExpressionType(expr_string, expr_fallback_name, line, column) + except (SyntaxError, ValueError): + # Note: the parser will raise a `ValueError` instead of a SyntaxError if + # the string happens to contain things like \x00. + return RawExpressionType(expr_string, expr_fallback_name, line, column) + + +def is_no_type_check_decorator(expr: ast3.expr) -> bool: + if isinstance(expr, Name): + return expr.id == "no_type_check" + elif isinstance(expr, Attribute): + if isinstance(expr.value, Name): + return expr.value.id == "typing" and expr.attr == "no_type_check" + return False + + +def find_disallowed_expression_in_annotation_scope(expr: ast3.expr | None) -> ast3.expr | None: + if expr is None: + return None + for node in ast3.walk(expr): + if isinstance(node, (ast3.Yield, ast3.YieldFrom, ast3.NamedExpr, ast3.Await)): + return node + return None + + +class ASTConverter: + def __init__( + self, + options: Options, + is_stub: bool, + errors: Errors, + *, + strip_function_bodies: bool, + path: str, + ) -> None: + # 'C' for class, 'D' for function signature, 'F' for function, 'L' for lambda + self.class_and_function_stack: list[Literal["C", "D", "F", "L"]] = [] + self.imports: list[ImportBase] = [] + + self.options = options + self.is_stub = is_stub + self.errors = errors + self.strip_function_bodies = strip_function_bodies + self.path = path + + self.type_ignores: dict[int, list[str]] = {} + + # Cache of visit_X methods keyed by type of visited object + self.visitor_cache: dict[type, Callable[[AST | None], Any]] = {} + + def note(self, msg: str, line: int, column: int) -> None: + self.errors.report(line, column, msg, severity="note", code=codes.SYNTAX) + + def fail(self, msg: ErrorMessage, line: int, column: int, blocker: bool) -> None: + if blocker or not self.options.ignore_errors: + # Make sure self.errors reflects any type ignores that we have parsed + self.errors.set_file_ignored_lines( + self.path, self.type_ignores, self.options.ignore_errors + ) + self.errors.report(line, column, msg.value, blocker=blocker, code=msg.code) + + def fail_merge_overload(self, node: IfStmt) -> None: + self.fail( + message_registry.FAILED_TO_MERGE_OVERLOADS, + line=node.line, + column=node.column, + blocker=False, + ) + + def visit(self, node: AST | None) -> Any: + if node is None: + return None + typeobj = type(node) + visitor = self.visitor_cache.get(typeobj) + if visitor is None: + method = "visit_" + node.__class__.__name__ + visitor = getattr(self, method) + self.visitor_cache[typeobj] = visitor + + return visitor(node) + + def set_line(self, node: N, n: AstNode) -> N: + node.line = n.lineno + node.column = n.col_offset + node.end_line = getattr(n, "end_lineno", None) + node.end_column = getattr(n, "end_col_offset", None) + + return node + + def translate_opt_expr_list(self, l: Sequence[AST | None]) -> list[Expression | None]: + res: list[Expression | None] = [] + for e in l: + exp = self.visit(e) + res.append(exp) + return res + + def translate_expr_list(self, l: Sequence[AST]) -> list[Expression]: + return cast(list[Expression], self.translate_opt_expr_list(l)) + + def get_lineno(self, node: ast3.expr | ast3.stmt) -> int: + if ( + isinstance(node, (ast3.AsyncFunctionDef, ast3.ClassDef, ast3.FunctionDef)) + and node.decorator_list + ): + return node.decorator_list[0].lineno + return node.lineno + + def translate_stmt_list( + self, + stmts: Sequence[ast3.stmt], + *, + ismodule: bool = False, + can_strip: bool = False, + is_coroutine: bool = False, + ) -> list[Statement]: + # A "# type: ignore" comment before the first statement of a module + # ignores the whole module: + if ( + ismodule + and stmts + and self.type_ignores + and min(self.type_ignores) < self.get_lineno(stmts[0]) + ): + ignores = self.type_ignores[min(self.type_ignores)] + if ignores: + joined_ignores = ", ".join(ignores) + self.fail( + message_registry.TYPE_IGNORE_WITH_ERRCODE_ON_MODULE.format(joined_ignores), + line=min(self.type_ignores), + column=0, + blocker=False, + ) + self.errors.used_ignored_lines[self.errors.file][min(self.type_ignores)].append( + codes.FILE.code + ) + block = Block(self.fix_function_overloads(self.translate_stmt_list(stmts))) + self.set_block_lines(block, stmts) + mark_block_unreachable(block) + return [block] + + stack = self.class_and_function_stack + # Fast case for stripping function bodies + if ( + can_strip + and self.strip_function_bodies + and len(stack) == 1 + and stack[0] == "F" + and not is_coroutine + ): + return [] + + res: list[Statement] = [] + for stmt in stmts: + node = self.visit(stmt) + res.append(node) + + # Slow case for stripping function bodies + if can_strip and self.strip_function_bodies: + if stack[-2:] == ["C", "F"]: + if is_possible_trivial_body(res): + can_strip = False + else: + # We only strip method bodies if they don't assign to an attribute, as + # this may define an attribute which has an externally visible effect. + visitor = FindAttributeAssign() + for s in res: + s.accept(visitor) + if visitor.found: + can_strip = False + break + + if can_strip and stack[-1] == "F" and is_coroutine: + # Yields inside an async function affect the return type and should not + # be stripped. + yield_visitor = FindYield() + for s in res: + s.accept(yield_visitor) + if yield_visitor.found: + can_strip = False + break + + if can_strip: + return [] + return res + + def translate_type_comment( + self, n: ast3.stmt | ast3.arg, type_comment: str | None + ) -> ProperType | None: + if type_comment is None: + return None + else: + lineno = n.lineno + extra_ignore, typ = parse_type_comment(type_comment, lineno, n.col_offset, self.errors) + if extra_ignore is not None: + self.type_ignores[lineno] = extra_ignore + return typ + + op_map: Final[dict[type[AST], str]] = { + ast3.Add: "+", + ast3.Sub: "-", + ast3.Mult: "*", + ast3.MatMult: "@", + ast3.Div: "/", + ast3.Mod: "%", + ast3.Pow: "**", + ast3.LShift: "<<", + ast3.RShift: ">>", + ast3.BitOr: "|", + ast3.BitXor: "^", + ast3.BitAnd: "&", + ast3.FloorDiv: "//", + } + + def from_operator(self, op: ast3.operator) -> str: + op_name = ASTConverter.op_map.get(type(op)) + if op_name is None: + raise RuntimeError("Unknown operator " + str(type(op))) + else: + return op_name + + comp_op_map: Final[dict[type[AST], str]] = { + ast3.Gt: ">", + ast3.Lt: "<", + ast3.Eq: "==", + ast3.GtE: ">=", + ast3.LtE: "<=", + ast3.NotEq: "!=", + ast3.Is: "is", + ast3.IsNot: "is not", + ast3.In: "in", + ast3.NotIn: "not in", # codespell:ignore notin + } + + def from_comp_operator(self, op: ast3.cmpop) -> str: + op_name = ASTConverter.comp_op_map.get(type(op)) + if op_name is None: + raise RuntimeError("Unknown comparison operator " + str(type(op))) + else: + return op_name + + def set_block_lines(self, b: Block, stmts: Sequence[ast3.stmt]) -> None: + first, last = stmts[0], stmts[-1] + b.line = first.lineno + b.column = first.col_offset + b.end_line = getattr(last, "end_lineno", None) + b.end_column = getattr(last, "end_col_offset", None) + if not b.body: + return + new_first = b.body[0] + if isinstance(new_first, (Decorator, OverloadedFuncDef)): + # Decorated function lines are different between Python versions. + # copy the normalization we do for them to block first lines. + b.line = new_first.line + b.column = new_first.column + + def as_block(self, stmts: list[ast3.stmt]) -> Block | None: + b = None + if stmts: + b = Block(self.fix_function_overloads(self.translate_stmt_list(stmts))) + self.set_block_lines(b, stmts) + return b + + def as_required_block( + self, stmts: list[ast3.stmt], *, can_strip: bool = False, is_coroutine: bool = False + ) -> Block: + assert stmts # must be non-empty + b = Block( + self.fix_function_overloads( + self.translate_stmt_list(stmts, can_strip=can_strip, is_coroutine=is_coroutine) + ) + ) + self.set_block_lines(b, stmts) + return b + + def fix_function_overloads(self, stmts: list[Statement]) -> list[Statement]: + ret: list[Statement] = [] + current_overload: list[OverloadPart] = [] + current_overload_name: str | None = None + last_unconditional_func_def: str | None = None + last_if_stmt: IfStmt | None = None + last_if_overload: Decorator | FuncDef | OverloadedFuncDef | None = None + last_if_stmt_overload_name: str | None = None + last_if_unknown_truth_value: IfStmt | None = None + skipped_if_stmts: list[IfStmt] = [] + for stmt in stmts: + if_overload_name: str | None = None + if_block_with_overload: Block | None = None + if_unknown_truth_value: IfStmt | None = None + if isinstance(stmt, IfStmt): + # Check IfStmt block to determine if function overloads can be merged + if_overload_name = self._check_ifstmt_for_overloads(stmt, current_overload_name) + if if_overload_name is not None: + (if_block_with_overload, if_unknown_truth_value) = ( + self._get_executable_if_block_with_overloads(stmt) + ) + + if ( + current_overload_name is not None + and isinstance(stmt, (Decorator, FuncDef)) + and stmt.name == current_overload_name + ): + if last_if_stmt is not None: + skipped_if_stmts.append(last_if_stmt) + if last_if_overload is not None: + # Last stmt was an IfStmt with same overload name + # Add overloads to current_overload + if isinstance(last_if_overload, OverloadedFuncDef): + current_overload.extend(last_if_overload.items) + else: + current_overload.append(last_if_overload) + last_if_stmt, last_if_overload = None, None + if last_if_unknown_truth_value: + self.fail_merge_overload(last_if_unknown_truth_value) + last_if_unknown_truth_value = None + current_overload.append(stmt) + if isinstance(stmt, FuncDef): + # This is, strictly speaking, wrong: there might be a decorated + # implementation. However, it only affects the error message we show: + # ideally it's "already defined", but "implementation must come last" + # is also reasonable. + # TODO: can we get rid of this completely and just always emit + # "implementation must come last" instead? + last_unconditional_func_def = stmt.name + elif ( + current_overload_name is not None + and isinstance(stmt, IfStmt) + and if_overload_name == current_overload_name + and last_unconditional_func_def != current_overload_name + ): + # IfStmt only contains stmts relevant to current_overload. + # Check if stmts are reachable and add them to current_overload, + # otherwise skip IfStmt to allow subsequent overload + # or function definitions. + skipped_if_stmts.append(stmt) + if if_block_with_overload is None: + if if_unknown_truth_value is not None: + self.fail_merge_overload(if_unknown_truth_value) + continue + if last_if_overload is not None: + # Last stmt was an IfStmt with same overload name + # Add overloads to current_overload + if isinstance(last_if_overload, OverloadedFuncDef): + current_overload.extend(last_if_overload.items) + else: + current_overload.append(last_if_overload) + last_if_stmt, last_if_overload = None, None + if isinstance(if_block_with_overload.body[-1], OverloadedFuncDef): + skipped_if_stmts.extend(cast(list[IfStmt], if_block_with_overload.body[:-1])) + current_overload.extend(if_block_with_overload.body[-1].items) + else: + current_overload.append( + cast(Union[Decorator, FuncDef], if_block_with_overload.body[0]) + ) + else: + if last_if_stmt is not None: + ret.append(last_if_stmt) + last_if_stmt_overload_name = current_overload_name + last_if_stmt, last_if_overload = None, None + last_if_unknown_truth_value = None + + if current_overload and current_overload_name == last_if_stmt_overload_name: + # Remove last stmt (IfStmt) from ret if the overload names matched + # Only happens if no executable block had been found in IfStmt + popped = ret.pop() + assert isinstance(popped, IfStmt) + skipped_if_stmts.append(popped) + if current_overload and skipped_if_stmts: + # Add bare IfStmt (without overloads) to ret + # Required for mypy to be able to still check conditions + for if_stmt in skipped_if_stmts: + self._strip_contents_from_if_stmt(if_stmt) + ret.append(if_stmt) + skipped_if_stmts = [] + if len(current_overload) == 1: + ret.append(current_overload[0]) + elif len(current_overload) > 1: + ret.append(OverloadedFuncDef(current_overload)) + + # If we have multiple decorated functions named "_" next to each, we want to treat + # them as a series of regular FuncDefs instead of one OverloadedFuncDef because + # most of mypy/mypyc assumes that all the functions in an OverloadedFuncDef are + # related, but multiple underscore functions next to each other aren't necessarily + # related + last_unconditional_func_def = None + if isinstance(stmt, Decorator) and not unnamed_function(stmt.name): + current_overload = [stmt] + current_overload_name = stmt.name + elif isinstance(stmt, IfStmt) and if_overload_name is not None: + current_overload = [] + current_overload_name = if_overload_name + last_if_stmt = stmt + last_if_stmt_overload_name = None + if if_block_with_overload is not None: + skipped_if_stmts.extend( + cast(list[IfStmt], if_block_with_overload.body[:-1]) + ) + last_if_overload = cast( + Union[Decorator, FuncDef, OverloadedFuncDef], + if_block_with_overload.body[-1], + ) + last_if_unknown_truth_value = if_unknown_truth_value + else: + current_overload = [] + current_overload_name = None + ret.append(stmt) + + if current_overload and skipped_if_stmts: + # Add bare IfStmt (without overloads) to ret + # Required for mypy to be able to still check conditions + for if_stmt in skipped_if_stmts: + self._strip_contents_from_if_stmt(if_stmt) + ret.append(if_stmt) + if len(current_overload) == 1: + ret.append(current_overload[0]) + elif len(current_overload) > 1: + ret.append(OverloadedFuncDef(current_overload)) + elif last_if_overload is not None: + ret.append(last_if_overload) + elif last_if_stmt is not None: + ret.append(last_if_stmt) + return ret + + def _check_ifstmt_for_overloads( + self, stmt: IfStmt, current_overload_name: str | None = None + ) -> str | None: + """Check if IfStmt contains only overloads with the same name. + Return overload_name if found, None otherwise. + """ + # Check that block only contains a single Decorator, FuncDef, or OverloadedFuncDef. + # Multiple overloads have already been merged as OverloadedFuncDef. + if not ( + len(stmt.body[0].body) == 1 + and ( + isinstance(stmt.body[0].body[0], (Decorator, OverloadedFuncDef)) + or current_overload_name is not None + and isinstance(stmt.body[0].body[0], FuncDef) + ) + or len(stmt.body[0].body) > 1 + and isinstance(stmt.body[0].body[-1], OverloadedFuncDef) + and all(self._is_stripped_if_stmt(if_stmt) for if_stmt in stmt.body[0].body[:-1]) + ): + return None + + overload_name = cast( + Union[Decorator, FuncDef, OverloadedFuncDef], stmt.body[0].body[-1] + ).name + if stmt.else_body is None: + return overload_name + + if len(stmt.else_body.body) == 1: + # For elif: else_body contains an IfStmt itself -> do a recursive check. + if ( + isinstance(stmt.else_body.body[0], (Decorator, FuncDef, OverloadedFuncDef)) + and stmt.else_body.body[0].name == overload_name + ): + return overload_name + if ( + isinstance(stmt.else_body.body[0], IfStmt) + and self._check_ifstmt_for_overloads(stmt.else_body.body[0], current_overload_name) + == overload_name + ): + return overload_name + + return None + + def _get_executable_if_block_with_overloads( + self, stmt: IfStmt + ) -> tuple[Block | None, IfStmt | None]: + """Return block from IfStmt that will get executed. + + Return + 0 -> A block if sure that alternative blocks are unreachable. + 1 -> An IfStmt if the reachability of it can't be inferred, + i.e. the truth value is unknown. + """ + infer_reachability_of_if_statement(stmt, self.options) + if stmt.else_body is None and stmt.body[0].is_unreachable is True: + # always False condition with no else + return None, None + if ( + stmt.else_body is None + or stmt.body[0].is_unreachable is False + and stmt.else_body.is_unreachable is False + ): + # The truth value is unknown, thus not conclusive + return None, stmt + if stmt.else_body.is_unreachable is True: + # else_body will be set unreachable if condition is always True + return stmt.body[0], None + if stmt.body[0].is_unreachable is True: + # body will be set unreachable if condition is always False + # else_body can contain an IfStmt itself (for elif) -> do a recursive check + if isinstance(stmt.else_body.body[0], IfStmt): + return self._get_executable_if_block_with_overloads(stmt.else_body.body[0]) + return stmt.else_body, None + return None, stmt + + def _strip_contents_from_if_stmt(self, stmt: IfStmt) -> None: + """Remove contents from IfStmt. + + Needed to still be able to check the conditions after the contents + have been merged with the surrounding function overloads. + """ + if len(stmt.body) == 1: + stmt.body[0].body = [] + if stmt.else_body and len(stmt.else_body.body) == 1: + if isinstance(stmt.else_body.body[0], IfStmt): + self._strip_contents_from_if_stmt(stmt.else_body.body[0]) + else: + stmt.else_body.body = [] + + def _is_stripped_if_stmt(self, stmt: Statement) -> bool: + """Check stmt to make sure it is a stripped IfStmt. + + See also: _strip_contents_from_if_stmt + """ + if not isinstance(stmt, IfStmt): + return False + + if not (len(stmt.body) == 1 and len(stmt.body[0].body) == 0): + # Body not empty + return False + + if not stmt.else_body or len(stmt.else_body.body) == 0: + # No or empty else_body + return True + + # For elif, IfStmt are stored recursively in else_body + return self._is_stripped_if_stmt(stmt.else_body.body[0]) + + def translate_module_id(self, id: str) -> str: + """Return the actual, internal module id for a source text id.""" + if id == self.options.custom_typing_module: + return "typing" + return id + + def visit_Module(self, mod: ast3.Module) -> MypyFile: + self.type_ignores = {} + for ti in mod.type_ignores: + parsed = parse_type_ignore_tag(ti.tag) + if parsed is not None: + self.type_ignores[ti.lineno] = parsed + else: + self.fail(message_registry.INVALID_TYPE_IGNORE, ti.lineno, -1, blocker=False) + + body = self.fix_function_overloads(self.translate_stmt_list(mod.body, ismodule=True)) + + ret = MypyFile(body, self.imports, False, ignored_lines=self.type_ignores) + ret.is_stub = self.is_stub + ret.path = self.path + return ret + + # --- stmt --- + # FunctionDef(identifier name, arguments args, + # stmt* body, expr* decorator_list, expr? returns, string? type_comment) + # arguments = (arg* args, arg? vararg, arg* kwonlyargs, expr* kw_defaults, + # arg? kwarg, expr* defaults) + def visit_FunctionDef(self, n: ast3.FunctionDef) -> FuncDef | Decorator: + return self.do_func_def(n) + + # AsyncFunctionDef(identifier name, arguments args, + # stmt* body, expr* decorator_list, expr? returns, string? type_comment) + def visit_AsyncFunctionDef(self, n: ast3.AsyncFunctionDef) -> FuncDef | Decorator: + return self.do_func_def(n, is_coroutine=True) + + def do_func_def( + self, n: ast3.FunctionDef | ast3.AsyncFunctionDef, is_coroutine: bool = False + ) -> FuncDef | Decorator: + """Helper shared between visit_FunctionDef and visit_AsyncFunctionDef.""" + self.class_and_function_stack.append("D") + no_type_check = bool( + n.decorator_list and any(is_no_type_check_decorator(d) for d in n.decorator_list) + ) + + lineno = n.lineno + args = self.transform_args(n.args, lineno, no_type_check=no_type_check) + if special_function_elide_names(n.name): + for arg in args: + arg.pos_only = True + + arg_kinds = [arg.kind for arg in args] + arg_names = [None if arg.pos_only else arg.variable.name for arg in args] + # Type parameters, if using new syntax for generics (PEP 695) + explicit_type_params: list[TypeParam] | None = None + + arg_types: list[Type | None] = [] + if no_type_check: + arg_types = [None] * len(args) + return_type = None + elif n.type_comment is not None: + try: + func_type_ast = ast3_parse(n.type_comment, "", "func_type") + assert isinstance(func_type_ast, FunctionType) + # for ellipsis arg + if ( + len(func_type_ast.argtypes) == 1 + and isinstance(func_type_ast.argtypes[0], ast3.Constant) + and func_type_ast.argtypes[0].value is Ellipsis + ): + if n.returns: + # PEP 484 disallows both type annotations and type comments + self.fail( + message_registry.DUPLICATE_TYPE_SIGNATURES, + lineno, + n.col_offset, + blocker=False, + ) + arg_types = [ + ( + a.type_annotation + if a.type_annotation is not None + else AnyType(TypeOfAny.unannotated) + ) + for a in args + ] + else: + # PEP 484 disallows both type annotations and type comments + if n.returns or any(a.type_annotation is not None for a in args): + self.fail( + message_registry.DUPLICATE_TYPE_SIGNATURES, + lineno, + n.col_offset, + blocker=False, + ) + translated_args: list[Type] = TypeConverter( + self.errors, line=lineno, override_column=n.col_offset + ).translate_expr_list(func_type_ast.argtypes) + # Use a cast to work around `list` invariance + arg_types = cast(list[Optional[Type]], translated_args) + return_type = TypeConverter(self.errors, line=lineno).visit(func_type_ast.returns) + + # add implicit self type + in_method_scope = self.class_and_function_stack[-2:] == ["C", "D"] + if in_method_scope and len(arg_types) < len(args): + arg_types.insert(0, AnyType(TypeOfAny.special_form)) + except SyntaxError: + stripped_type = n.type_comment.split("#", 2)[0].strip() + err_msg = message_registry.TYPE_COMMENT_SYNTAX_ERROR_VALUE.format(stripped_type) + self.fail(err_msg, lineno, n.col_offset, blocker=False) + if n.type_comment and n.type_comment[0] not in ["(", "#"]: + self.note( + "Suggestion: wrap argument types in parentheses", lineno, n.col_offset + ) + arg_types = [AnyType(TypeOfAny.from_error)] * len(args) + return_type = AnyType(TypeOfAny.from_error) + else: + if sys.version_info >= (3, 12) and n.type_params: + explicit_type_params = self.translate_type_params(n.type_params) + + arg_types = [a.type_annotation for a in args] + return_type = TypeConverter( + self.errors, line=n.returns.lineno if n.returns else lineno + ).visit(n.returns) + + for arg, arg_type in zip(args, arg_types): + self.set_type_optional(arg_type, arg.initializer) + + func_type = None + if any(arg_types) or return_type: + if len(arg_types) != 1 and any(isinstance(t, EllipsisType) for t in arg_types): + self.fail( + message_registry.ELLIPSIS_WITH_OTHER_TYPEARGS, + lineno, + n.col_offset, + blocker=False, + ) + elif len(arg_types) > len(arg_kinds): + self.fail( + message_registry.TYPE_SIGNATURE_TOO_MANY_ARGS, + lineno, + n.col_offset, + blocker=False, + ) + elif len(arg_types) < len(arg_kinds): + self.fail( + message_registry.TYPE_SIGNATURE_TOO_FEW_ARGS, + lineno, + n.col_offset, + blocker=False, + ) + else: + func_type = CallableType( + [a if a is not None else AnyType(TypeOfAny.unannotated) for a in arg_types], + arg_kinds, + arg_names, + return_type if return_type is not None else AnyType(TypeOfAny.unannotated), + _dummy_fallback, + ) + + # End position is always the same. + end_line = getattr(n, "end_lineno", None) + end_column = getattr(n, "end_col_offset", None) + + self.class_and_function_stack.pop() + self.class_and_function_stack.append("F") + body = self.as_required_block(n.body, can_strip=True, is_coroutine=is_coroutine) + func_def = FuncDef(n.name, args, body, func_type, explicit_type_params) + if isinstance(func_def.type, CallableType): + # semanal.py does some in-place modifications we want to avoid + func_def.unanalyzed_type = func_def.type.copy_modified() + if is_coroutine: + func_def.is_coroutine = True + if func_type is not None: + func_type.definition = func_def + func_type.set_line(lineno) + + if n.decorator_list: + var = Var(func_def.name) + var.is_ready = False + var.set_line(lineno) + + func_def.is_decorated = True + self.set_line(func_def, n) + + deco = Decorator(func_def, self.translate_expr_list(n.decorator_list), var) + first = n.decorator_list[0] + deco.set_line(first.lineno, first.col_offset, end_line, end_column) + retval: FuncDef | Decorator = deco + else: + self.set_line(func_def, n) + retval = func_def + if self.options.include_docstrings: + func_def.docstring = ast3.get_docstring(n, clean=False) + self.class_and_function_stack.pop() + return retval + + def set_type_optional(self, type: Type | None, initializer: Expression | None) -> None: + if not self.options.implicit_optional: + return + # Indicate that type should be wrapped in an Optional if arg is initialized to None. + optional = isinstance(initializer, NameExpr) and initializer.name == "None" + if isinstance(type, UnboundType): + type.optional = optional + + def transform_args( + self, args: ast3.arguments, line: int, no_type_check: bool = False + ) -> list[Argument]: + new_args = [] + names: list[ast3.arg] = [] + posonlyargs = getattr(args, "posonlyargs", cast(list[ast3.arg], [])) + args_args = posonlyargs + args.args + args_defaults = args.defaults + num_no_defaults = len(args_args) - len(args_defaults) + # positional arguments without defaults + for i, a in enumerate(args_args[:num_no_defaults]): + pos_only = i < len(posonlyargs) + new_args.append(self.make_argument(a, None, ARG_POS, no_type_check, pos_only)) + names.append(a) + + # positional arguments with defaults + for i, (a, d) in enumerate(zip(args_args[num_no_defaults:], args_defaults)): + pos_only = num_no_defaults + i < len(posonlyargs) + new_args.append(self.make_argument(a, d, ARG_OPT, no_type_check, pos_only)) + names.append(a) + + # *arg + if args.vararg is not None: + new_args.append(self.make_argument(args.vararg, None, ARG_STAR, no_type_check)) + names.append(args.vararg) + + # keyword-only arguments with defaults + for a, kd in zip(args.kwonlyargs, args.kw_defaults): + new_args.append( + self.make_argument( + a, kd, ARG_NAMED if kd is None else ARG_NAMED_OPT, no_type_check + ) + ) + names.append(a) + + # **kwarg + if args.kwarg is not None: + new_args.append(self.make_argument(args.kwarg, None, ARG_STAR2, no_type_check)) + names.append(args.kwarg) + + check_arg_names([arg.variable.name for arg in new_args], names, self.fail_arg) + + return new_args + + def make_argument( + self, + arg: ast3.arg, + default: ast3.expr | None, + kind: ArgKind, + no_type_check: bool, + pos_only: bool = False, + ) -> Argument: + if no_type_check: + arg_type = None + else: + annotation = arg.annotation + type_comment = arg.type_comment + if annotation is not None and type_comment is not None: + self.fail( + message_registry.DUPLICATE_TYPE_SIGNATURES, + arg.lineno, + arg.col_offset, + blocker=False, + ) + arg_type = None + if annotation is not None: + arg_type = TypeConverter(self.errors, line=arg.lineno).visit(annotation) + else: + arg_type = self.translate_type_comment(arg, type_comment) + if argument_elide_name(arg.arg): + pos_only = True + + var = Var(arg.arg, arg_type) + var.is_inferred = False + argument = Argument(var, arg_type, self.visit(default), kind, pos_only) + argument.set_line( + arg.lineno, + arg.col_offset, + getattr(arg, "end_lineno", None), + getattr(arg, "end_col_offset", None), + ) + return argument + + def fail_arg(self, msg: str, arg: ast3.arg) -> None: + self.fail(ErrorMessage(msg), arg.lineno, arg.col_offset, blocker=True) + + # ClassDef(identifier name, + # expr* bases, + # keyword* keywords, + # stmt* body, + # expr* decorator_list) + def visit_ClassDef(self, n: ast3.ClassDef) -> ClassDef: + self.class_and_function_stack.append("C") + keywords = [(kw.arg, self.visit(kw.value)) for kw in n.keywords if kw.arg] + + # Type parameters, if using new syntax for generics (PEP 695) + explicit_type_params: list[TypeParam] | None = None + + if sys.version_info >= (3, 12) and n.type_params: + explicit_type_params = self.translate_type_params(n.type_params) + + cdef = ClassDef( + n.name, + self.as_required_block(n.body), + None, + self.translate_expr_list(n.bases), + metaclass=dict(keywords).get("metaclass"), + keywords=keywords, + type_args=explicit_type_params, + ) + cdef.decorators = self.translate_expr_list(n.decorator_list) + self.set_line(cdef, n) + + if self.options.include_docstrings: + cdef.docstring = ast3.get_docstring(n, clean=False) + cdef.column = n.col_offset + cdef.end_line = getattr(n, "end_lineno", None) + cdef.end_column = getattr(n, "end_col_offset", None) + self.class_and_function_stack.pop() + return cdef + + def validate_type_param(self, type_param: ast_TypeVar) -> None: + incorrect_expr = find_disallowed_expression_in_annotation_scope(type_param.bound) + if incorrect_expr is None: + return + if isinstance(incorrect_expr, (ast3.Yield, ast3.YieldFrom)): + self.fail( + message_registry.TYPE_VAR_YIELD_EXPRESSION_IN_BOUND, + type_param.lineno, + type_param.col_offset, + blocker=True, + ) + if isinstance(incorrect_expr, ast3.NamedExpr): + self.fail( + message_registry.TYPE_VAR_NAMED_EXPRESSION_IN_BOUND, + type_param.lineno, + type_param.col_offset, + blocker=True, + ) + if isinstance(incorrect_expr, ast3.Await): + self.fail( + message_registry.TYPE_VAR_AWAIT_EXPRESSION_IN_BOUND, + type_param.lineno, + type_param.col_offset, + blocker=True, + ) + + def translate_type_params(self, type_params: list[Any]) -> list[TypeParam]: + explicit_type_params = [] + for p in type_params: + bound: Type | None = None + values: list[Type] = [] + default: Type | None = None + if sys.version_info >= (3, 13): + default = TypeConverter(self.errors, line=p.lineno).visit(p.default_value) + if isinstance(p, ast_ParamSpec): # type: ignore[misc] + explicit_type_params.append(TypeParam(p.name, PARAM_SPEC_KIND, None, [], default)) + elif isinstance(p, ast_TypeVarTuple): # type: ignore[misc] + explicit_type_params.append( + TypeParam(p.name, TYPE_VAR_TUPLE_KIND, None, [], default) + ) + else: + if isinstance(p.bound, ast3.Tuple): + if len(p.bound.elts) < 2: + self.fail( + message_registry.TYPE_VAR_TOO_FEW_CONSTRAINED_TYPES, + p.lineno, + p.col_offset, + blocker=False, + ) + else: + conv = TypeConverter(self.errors, line=p.lineno) + values = [conv.visit(t) for t in p.bound.elts] + elif p.bound is not None: + self.validate_type_param(p) + bound = TypeConverter(self.errors, line=p.lineno).visit(p.bound) + explicit_type_params.append( + TypeParam(p.name, TYPE_VAR_KIND, bound, values, default) + ) + return explicit_type_params + + # Return(expr? value) + def visit_Return(self, n: ast3.Return) -> ReturnStmt: + node = ReturnStmt(self.visit(n.value)) + return self.set_line(node, n) + + # Delete(expr* targets) + def visit_Delete(self, n: ast3.Delete) -> DelStmt: + if len(n.targets) > 1: + tup = TupleExpr(self.translate_expr_list(n.targets)) + tup.set_line(n.lineno) + node = DelStmt(tup) + else: + node = DelStmt(self.visit(n.targets[0])) + return self.set_line(node, n) + + # Assign(expr* targets, expr? value, string? type_comment, expr? annotation) + def visit_Assign(self, n: ast3.Assign) -> AssignmentStmt: + lvalues = self.translate_expr_list(n.targets) + rvalue = self.visit(n.value) + typ = self.translate_type_comment(n, n.type_comment) + s = AssignmentStmt(lvalues, rvalue, type=typ, new_syntax=False) + return self.set_line(s, n) + + # AnnAssign(expr target, expr annotation, expr? value, int simple) + def visit_AnnAssign(self, n: ast3.AnnAssign) -> AssignmentStmt: + line = n.lineno + if n.value is None: # always allow 'x: int' + rvalue: Expression = TempNode(AnyType(TypeOfAny.special_form), no_rhs=True) + self.set_line(rvalue, n) + else: + rvalue = self.visit(n.value) + typ = TypeConverter(self.errors, line=line).visit(n.annotation) + assert typ is not None + typ.column = n.annotation.col_offset + s = AssignmentStmt([self.visit(n.target)], rvalue, type=typ, new_syntax=True) + return self.set_line(s, n) + + # AugAssign(expr target, operator op, expr value) + def visit_AugAssign(self, n: ast3.AugAssign) -> OperatorAssignmentStmt: + s = OperatorAssignmentStmt( + self.from_operator(n.op), self.visit(n.target), self.visit(n.value) + ) + return self.set_line(s, n) + + # For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) + def visit_For(self, n: ast3.For) -> ForStmt: + target_type = self.translate_type_comment(n, n.type_comment) + node = ForStmt( + self.visit(n.target), + self.visit(n.iter), + self.as_required_block(n.body), + self.as_block(n.orelse), + target_type, + ) + return self.set_line(node, n) + + # AsyncFor(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) + def visit_AsyncFor(self, n: ast3.AsyncFor) -> ForStmt: + target_type = self.translate_type_comment(n, n.type_comment) + node = ForStmt( + self.visit(n.target), + self.visit(n.iter), + self.as_required_block(n.body), + self.as_block(n.orelse), + target_type, + ) + node.is_async = True + return self.set_line(node, n) + + # While(expr test, stmt* body, stmt* orelse) + def visit_While(self, n: ast3.While) -> WhileStmt: + node = WhileStmt( + self.visit(n.test), self.as_required_block(n.body), self.as_block(n.orelse) + ) + return self.set_line(node, n) + + # If(expr test, stmt* body, stmt* orelse) + def visit_If(self, n: ast3.If) -> IfStmt: + node = IfStmt( + [self.visit(n.test)], [self.as_required_block(n.body)], self.as_block(n.orelse) + ) + return self.set_line(node, n) + + # With(withitem* items, stmt* body, string? type_comment) + def visit_With(self, n: ast3.With) -> WithStmt: + target_type = self.translate_type_comment(n, n.type_comment) + node = WithStmt( + [self.visit(i.context_expr) for i in n.items], + [self.visit(i.optional_vars) for i in n.items], + self.as_required_block(n.body), + target_type, + ) + return self.set_line(node, n) + + # AsyncWith(withitem* items, stmt* body, string? type_comment) + def visit_AsyncWith(self, n: ast3.AsyncWith) -> WithStmt: + target_type = self.translate_type_comment(n, n.type_comment) + s = WithStmt( + [self.visit(i.context_expr) for i in n.items], + [self.visit(i.optional_vars) for i in n.items], + self.as_required_block(n.body), + target_type, + ) + s.is_async = True + return self.set_line(s, n) + + # Raise(expr? exc, expr? cause) + def visit_Raise(self, n: ast3.Raise) -> RaiseStmt: + node = RaiseStmt(self.visit(n.exc), self.visit(n.cause)) + return self.set_line(node, n) + + # Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) + def visit_Try(self, n: ast3.Try) -> TryStmt: + vs = [ + self.set_line(NameExpr(h.name), h) if h.name is not None else None for h in n.handlers + ] + types = [self.visit(h.type) for h in n.handlers] + handlers = [self.as_required_block(h.body) for h in n.handlers] + + node = TryStmt( + self.as_required_block(n.body), + vs, + types, + handlers, + self.as_block(n.orelse), + self.as_block(n.finalbody), + ) + return self.set_line(node, n) + + def visit_TryStar(self, n: TryStar) -> TryStmt: + vs = [ + self.set_line(NameExpr(h.name), h) if h.name is not None else None for h in n.handlers + ] + types = [self.visit(h.type) for h in n.handlers] + handlers = [self.as_required_block(h.body) for h in n.handlers] + + node = TryStmt( + self.as_required_block(n.body), + vs, + types, + handlers, + self.as_block(n.orelse), + self.as_block(n.finalbody), + ) + node.is_star = True + return self.set_line(node, n) + + # Assert(expr test, expr? msg) + def visit_Assert(self, n: ast3.Assert) -> AssertStmt: + node = AssertStmt(self.visit(n.test), self.visit(n.msg)) + return self.set_line(node, n) + + # Import(alias* names) + def visit_Import(self, n: ast3.Import) -> Import: + names: list[tuple[str, str | None]] = [] + for alias in n.names: + name = self.translate_module_id(alias.name) + asname = alias.asname + if asname is None and name != alias.name: + # if the module name has been translated (and it's not already + # an explicit import-as), make it an implicit import-as the + # original name + asname = alias.name + names.append((name, asname)) + i = Import(names) + self.imports.append(i) + return self.set_line(i, n) + + # ImportFrom(identifier? module, alias* names, int? level) + def visit_ImportFrom(self, n: ast3.ImportFrom) -> ImportBase: + assert n.level is not None + if len(n.names) == 1 and n.names[0].name == "*": + mod = n.module if n.module is not None else "" + i: ImportBase = ImportAll(mod, n.level) + else: + i = ImportFrom( + self.translate_module_id(n.module) if n.module is not None else "", + n.level, + [(a.name, a.asname) for a in n.names], + ) + self.imports.append(i) + return self.set_line(i, n) + + # Global(identifier* names) + def visit_Global(self, n: ast3.Global) -> GlobalDecl: + g = GlobalDecl(n.names) + return self.set_line(g, n) + + # Nonlocal(identifier* names) + def visit_Nonlocal(self, n: ast3.Nonlocal) -> NonlocalDecl: + d = NonlocalDecl(n.names) + return self.set_line(d, n) + + # Expr(expr value) + def visit_Expr(self, n: ast3.Expr) -> ExpressionStmt: + value = self.visit(n.value) + node = ExpressionStmt(value) + return self.set_line(node, n) + + # Pass + def visit_Pass(self, n: ast3.Pass) -> PassStmt: + s = PassStmt() + return self.set_line(s, n) + + # Break + def visit_Break(self, n: ast3.Break) -> BreakStmt: + s = BreakStmt() + return self.set_line(s, n) + + # Continue + def visit_Continue(self, n: ast3.Continue) -> ContinueStmt: + s = ContinueStmt() + return self.set_line(s, n) + + # --- expr --- + + def visit_NamedExpr(self, n: ast3.NamedExpr) -> AssignmentExpr: + s = AssignmentExpr(self.visit(n.target), self.visit(n.value)) + return self.set_line(s, n) + + # BoolOp(boolop op, expr* values) + def visit_BoolOp(self, n: ast3.BoolOp) -> OpExpr: + # mypy translates (1 and 2 and 3) as (1 and (2 and 3)) + assert len(n.values) >= 2 + op_node = n.op + if isinstance(op_node, ast3.And): + op = "and" + elif isinstance(op_node, ast3.Or): + op = "or" + else: + raise RuntimeError("unknown BoolOp " + str(type(n))) + + # potentially inefficient! + return self.group(op, self.translate_expr_list(n.values), n) + + def group(self, op: str, vals: list[Expression], n: ast3.expr) -> OpExpr: + if len(vals) == 2: + e = OpExpr(op, vals[0], vals[1]) + else: + e = OpExpr(op, vals[0], self.group(op, vals[1:], n)) + return self.set_line(e, n) + + # BinOp(expr left, operator op, expr right) + def visit_BinOp(self, n: ast3.BinOp) -> OpExpr: + op = self.from_operator(n.op) + + if op is None: + raise RuntimeError("cannot translate BinOp " + str(type(n.op))) + + e = OpExpr(op, self.visit(n.left), self.visit(n.right)) + return self.set_line(e, n) + + # UnaryOp(unaryop op, expr operand) + def visit_UnaryOp(self, n: ast3.UnaryOp) -> UnaryExpr: + op = None + if isinstance(n.op, ast3.Invert): + op = "~" + elif isinstance(n.op, ast3.Not): + op = "not" + elif isinstance(n.op, ast3.UAdd): + op = "+" + elif isinstance(n.op, ast3.USub): + op = "-" + + if op is None: + raise RuntimeError("cannot translate UnaryOp " + str(type(n.op))) + + e = UnaryExpr(op, self.visit(n.operand)) + return self.set_line(e, n) + + # Lambda(arguments args, expr body) + def visit_Lambda(self, n: ast3.Lambda) -> LambdaExpr: + body = ast3.Return(n.body) + body.lineno = n.body.lineno + body.col_offset = n.body.col_offset + + self.class_and_function_stack.append("L") + e = LambdaExpr(self.transform_args(n.args, n.lineno), self.as_required_block([body])) + self.class_and_function_stack.pop() + e.set_line(n.lineno, n.col_offset) # Overrides set_line -- can't use self.set_line + return e + + # IfExp(expr test, expr body, expr orelse) + def visit_IfExp(self, n: ast3.IfExp) -> ConditionalExpr: + e = ConditionalExpr(self.visit(n.test), self.visit(n.body), self.visit(n.orelse)) + return self.set_line(e, n) + + # Dict(expr* keys, expr* values) + def visit_Dict(self, n: ast3.Dict) -> DictExpr: + e = DictExpr( + list(zip(self.translate_opt_expr_list(n.keys), self.translate_expr_list(n.values))) + ) + return self.set_line(e, n) + + # Set(expr* elts) + def visit_Set(self, n: ast3.Set) -> SetExpr: + e = SetExpr(self.translate_expr_list(n.elts)) + return self.set_line(e, n) + + # ListComp(expr elt, comprehension* generators) + def visit_ListComp(self, n: ast3.ListComp) -> ListComprehension: + e = ListComprehension(self.visit_GeneratorExp(cast(ast3.GeneratorExp, n))) + return self.set_line(e, n) + + # SetComp(expr elt, comprehension* generators) + def visit_SetComp(self, n: ast3.SetComp) -> SetComprehension: + e = SetComprehension(self.visit_GeneratorExp(cast(ast3.GeneratorExp, n))) + return self.set_line(e, n) + + # DictComp(expr key, expr value, comprehension* generators) + def visit_DictComp(self, n: ast3.DictComp) -> DictionaryComprehension: + targets = [self.visit(c.target) for c in n.generators] + iters = [self.visit(c.iter) for c in n.generators] + ifs_list = [self.translate_expr_list(c.ifs) for c in n.generators] + is_async = [bool(c.is_async) for c in n.generators] + e = DictionaryComprehension( + self.visit(n.key), self.visit(n.value), targets, iters, ifs_list, is_async + ) + return self.set_line(e, n) + + # GeneratorExp(expr elt, comprehension* generators) + def visit_GeneratorExp(self, n: ast3.GeneratorExp) -> GeneratorExpr: + targets = [self.visit(c.target) for c in n.generators] + iters = [self.visit(c.iter) for c in n.generators] + ifs_list = [self.translate_expr_list(c.ifs) for c in n.generators] + is_async = [bool(c.is_async) for c in n.generators] + e = GeneratorExpr(self.visit(n.elt), targets, iters, ifs_list, is_async) + return self.set_line(e, n) + + # Await(expr value) + def visit_Await(self, n: ast3.Await) -> AwaitExpr: + v = self.visit(n.value) + e = AwaitExpr(v) + return self.set_line(e, n) + + # Yield(expr? value) + def visit_Yield(self, n: ast3.Yield) -> YieldExpr: + e = YieldExpr(self.visit(n.value)) + return self.set_line(e, n) + + # YieldFrom(expr value) + def visit_YieldFrom(self, n: ast3.YieldFrom) -> YieldFromExpr: + e = YieldFromExpr(self.visit(n.value)) + return self.set_line(e, n) + + # Compare(expr left, cmpop* ops, expr* comparators) + def visit_Compare(self, n: ast3.Compare) -> ComparisonExpr: + operators = [self.from_comp_operator(o) for o in n.ops] + operands = self.translate_expr_list([n.left] + n.comparators) + e = ComparisonExpr(operators, operands) + return self.set_line(e, n) + + # Call(expr func, expr* args, keyword* keywords) + # keyword = (identifier? arg, expr value) + def visit_Call(self, n: Call) -> CallExpr: + args = n.args + keywords = n.keywords + keyword_names = [k.arg for k in keywords] + arg_types = self.translate_expr_list( + [a.value if isinstance(a, Starred) else a for a in args] + [k.value for k in keywords] + ) + arg_kinds = [ARG_STAR if type(a) is Starred else ARG_POS for a in args] + [ + ARG_STAR2 if arg is None else ARG_NAMED for arg in keyword_names + ] + e = CallExpr( + self.visit(n.func), + arg_types, + arg_kinds, + cast("list[Optional[str]]", [None] * len(args)) + keyword_names, + ) + return self.set_line(e, n) + + # Constant(object value) + def visit_Constant(self, n: ast3.Constant) -> Any: + val = n.value + e: Any = None + if val is None: + e = NameExpr("None") + elif isinstance(val, str): + e = StrExpr(val) + elif isinstance(val, bytes): + e = BytesExpr(bytes_to_human_readable_repr(val)) + elif isinstance(val, bool): # Must check before int! + e = NameExpr(str(val)) + elif isinstance(val, int): + e = IntExpr(val) + elif isinstance(val, float): + e = FloatExpr(val) + elif isinstance(val, complex): + e = ComplexExpr(val) + elif val is Ellipsis: + e = EllipsisExpr() + else: + raise RuntimeError("Constant not implemented for " + str(type(val))) + return self.set_line(e, n) + + # JoinedStr(expr* values) + def visit_JoinedStr(self, n: ast3.JoinedStr) -> Expression: + # Each of n.values is a str or FormattedValue; we just concatenate + # them all using ''.join. + empty_string = StrExpr("") + empty_string.set_line(n.lineno, n.col_offset) + strs_to_join = ListExpr(self.translate_expr_list(n.values)) + strs_to_join.set_line(empty_string) + # Don't make unnecessary join call if there is only one str to join + if len(strs_to_join.items) == 1: + return self.set_line(strs_to_join.items[0], n) + elif len(strs_to_join.items) > 1: + last = strs_to_join.items[-1] + if isinstance(last, StrExpr) and last.value == "": + # 3.12 can add an empty literal at the end. Delete it for consistency + # between Python versions. + del strs_to_join.items[-1:] + join_method = MemberExpr(empty_string, "join") + join_method.set_line(empty_string) + result_expression = CallExpr(join_method, [strs_to_join], [ARG_POS], [None]) + return self.set_line(result_expression, n) + + # FormattedValue(expr value) + def visit_FormattedValue(self, n: ast3.FormattedValue) -> Expression: + # A FormattedValue is a component of a JoinedStr, or it can exist + # on its own. We translate them to individual '{}'.format(value) + # calls. Format specifier and conversion information is passed along + # to allow mypyc to support f-strings with format specifiers and conversions. + val_exp = self.visit(n.value) + val_exp.set_line(n.lineno, n.col_offset) + conv_str = "" if n.conversion < 0 else "!" + chr(n.conversion) + format_string = StrExpr("{" + conv_str + ":{}}") + format_spec_exp = self.visit(n.format_spec) if n.format_spec is not None else StrExpr("") + format_string.set_line(n.lineno, n.col_offset) + format_method = MemberExpr(format_string, "format") + format_method.set_line(format_string) + result_expression = CallExpr( + format_method, [val_exp, format_spec_exp], [ARG_POS, ARG_POS], [None, None] + ) + return self.set_line(result_expression, n) + + # TemplateStr(expr* values) + def visit_TemplateStr(self, n: ast_TemplateStr) -> Expression: + self.fail( + ErrorMessage("PEP 750 template strings are not yet supported"), + n.lineno, + n.col_offset, + blocker=False, + ) + e = TempNode(AnyType(TypeOfAny.from_error)) + return self.set_line(e, n) + + # Interpolation(expr value, constant str, int conversion, expr? format_spec) + def visit_Interpolation(self, n: ast_Interpolation) -> Expression: + assert False, "Unreachable" + + # Attribute(expr value, identifier attr, expr_context ctx) + def visit_Attribute(self, n: Attribute) -> MemberExpr | SuperExpr: + value = n.value + member_expr = MemberExpr(self.visit(value), n.attr) + obj = member_expr.expr + if ( + isinstance(obj, CallExpr) + and isinstance(obj.callee, NameExpr) + and obj.callee.name == "super" + ): + e: MemberExpr | SuperExpr = SuperExpr(member_expr.name, obj) + else: + e = member_expr + return self.set_line(e, n) + + # Subscript(expr value, slice slice, expr_context ctx) + def visit_Subscript(self, n: ast3.Subscript) -> IndexExpr: + e = IndexExpr(self.visit(n.value), self.visit(n.slice)) + return self.set_line(e, n) + + # Starred(expr value, expr_context ctx) + def visit_Starred(self, n: Starred) -> StarExpr: + e = StarExpr(self.visit(n.value)) + return self.set_line(e, n) + + # Name(identifier id, expr_context ctx) + def visit_Name(self, n: Name) -> NameExpr: + e = NameExpr(n.id) + return self.set_line(e, n) + + # List(expr* elts, expr_context ctx) + def visit_List(self, n: ast3.List) -> ListExpr | TupleExpr: + expr_list: list[Expression] = [self.visit(e) for e in n.elts] + if isinstance(n.ctx, ast3.Store): + # [x, y] = z and (x, y) = z means exactly the same thing + e: ListExpr | TupleExpr = TupleExpr(expr_list) + else: + e = ListExpr(expr_list) + return self.set_line(e, n) + + # Tuple(expr* elts, expr_context ctx) + def visit_Tuple(self, n: ast3.Tuple) -> TupleExpr: + e = TupleExpr(self.translate_expr_list(n.elts)) + return self.set_line(e, n) + + # Slice(expr? lower, expr? upper, expr? step) + def visit_Slice(self, n: ast3.Slice) -> SliceExpr: + e = SliceExpr(self.visit(n.lower), self.visit(n.upper), self.visit(n.step)) + return self.set_line(e, n) + + # Match(expr subject, match_case* cases) # python 3.10 and later + def visit_Match(self, n: Match) -> MatchStmt: + node = MatchStmt( + self.visit(n.subject), + [self.visit(c.pattern) for c in n.cases], + [self.visit(c.guard) for c in n.cases], + [self.as_required_block(c.body) for c in n.cases], + ) + return self.set_line(node, n) + + def visit_MatchValue(self, n: MatchValue) -> ValuePattern: + node = ValuePattern(self.visit(n.value)) + return self.set_line(node, n) + + def visit_MatchSingleton(self, n: MatchSingleton) -> SingletonPattern: + node = SingletonPattern(n.value) + return self.set_line(node, n) + + def visit_MatchSequence(self, n: MatchSequence) -> SequencePattern: + patterns = [self.visit(p) for p in n.patterns] + stars = [p for p in patterns if isinstance(p, StarredPattern)] + assert len(stars) < 2 + + node = SequencePattern(patterns) + return self.set_line(node, n) + + def visit_MatchStar(self, n: MatchStar) -> StarredPattern: + if n.name is None: + node = StarredPattern(None) + else: + name = self.set_line(NameExpr(n.name), n) + node = StarredPattern(name) + + return self.set_line(node, n) + + def visit_MatchMapping(self, n: MatchMapping) -> MappingPattern: + keys = [self.visit(k) for k in n.keys] + values = [self.visit(v) for v in n.patterns] + + if n.rest is None: + rest = None + else: + rest = NameExpr(n.rest) + + node = MappingPattern(keys, values, rest) + return self.set_line(node, n) + + def visit_MatchClass(self, n: MatchClass) -> ClassPattern: + class_ref = self.visit(n.cls) + assert isinstance(class_ref, RefExpr) + positionals = [self.visit(p) for p in n.patterns] + keyword_keys = n.kwd_attrs + keyword_values = [self.visit(p) for p in n.kwd_patterns] + + node = ClassPattern(class_ref, positionals, keyword_keys, keyword_values) + return self.set_line(node, n) + + # MatchAs(expr pattern, identifier name) + def visit_MatchAs(self, n: MatchAs) -> AsPattern: + if n.name is None: + name = None + else: + name = NameExpr(n.name) + name = self.set_line(name, n) + node = AsPattern(self.visit(n.pattern), name) + return self.set_line(node, n) + + # MatchOr(expr* pattern) + def visit_MatchOr(self, n: MatchOr) -> OrPattern: + node = OrPattern([self.visit(pattern) for pattern in n.patterns]) + return self.set_line(node, n) + + def validate_type_alias(self, n: ast_TypeAlias) -> None: + incorrect_expr = find_disallowed_expression_in_annotation_scope(n.value) + if incorrect_expr is None: + return + if isinstance(incorrect_expr, (ast3.Yield, ast3.YieldFrom)): + self.fail( + message_registry.TYPE_ALIAS_WITH_YIELD_EXPRESSION, + n.lineno, + n.col_offset, + blocker=True, + ) + if isinstance(incorrect_expr, ast3.NamedExpr): + self.fail( + message_registry.TYPE_ALIAS_WITH_NAMED_EXPRESSION, + n.lineno, + n.col_offset, + blocker=True, + ) + if isinstance(incorrect_expr, ast3.Await): + self.fail( + message_registry.TYPE_ALIAS_WITH_AWAIT_EXPRESSION, + n.lineno, + n.col_offset, + blocker=True, + ) + + # TypeAlias(identifier name, type_param* type_params, expr value) + def visit_TypeAlias(self, n: ast_TypeAlias) -> TypeAliasStmt | AssignmentStmt: + node: TypeAliasStmt | AssignmentStmt + type_params = self.translate_type_params(n.type_params) + self.validate_type_alias(n) + value = self.visit(n.value) + # Since the value is evaluated lazily, wrap the value inside a lambda. + # This helps mypyc. + ret = ReturnStmt(value) + self.set_line(ret, n.value) + value_func = LambdaExpr(body=Block([ret])) + self.set_line(value_func, n.value) + node = TypeAliasStmt(self.visit_Name(n.name), type_params, value_func) + return self.set_line(node, n) + + +class TypeConverter: + def __init__( + self, + errors: Errors | None, + line: int = -1, + override_column: int = -1, + is_evaluated: bool = True, + ) -> None: + self.errors = errors + self.line = line + self.override_column = override_column + self.node_stack: list[AST] = [] + self.is_evaluated = is_evaluated + + def convert_column(self, column: int) -> int: + """Apply column override if defined; otherwise return column. + + Column numbers are sometimes incorrect in the AST and the column + override can be used to work around that. + """ + if self.override_column < 0: + return column + else: + return self.override_column + + def invalid_type(self, node: AST, note: str | None = None) -> RawExpressionType: + """Constructs a type representing some expression that normally forms an invalid type. + For example, if we see a type hint that says "3 + 4", we would transform that + expression into a RawExpressionType. + + The semantic analysis layer will report an "Invalid type" error when it + encounters this type, along with the given note if one is provided. + + See RawExpressionType's docstring for more details on how it's used. + """ + return RawExpressionType( + None, "typing.Any", line=self.line, column=getattr(node, "col_offset", -1), note=note + ) + + @overload + def visit(self, node: ast3.expr) -> ProperType: ... + + @overload + def visit(self, node: AST | None) -> ProperType | None: ... + + def visit(self, node: AST | None) -> ProperType | None: + """Modified visit -- keep track of the stack of nodes""" + if node is None: + return None + self.node_stack.append(node) + try: + method = "visit_" + node.__class__.__name__ + visitor = getattr(self, method, None) + if visitor is not None: + typ = visitor(node) + assert isinstance(typ, ProperType) + return typ + else: + return self.invalid_type(node) + finally: + self.node_stack.pop() + + def parent(self) -> AST | None: + """Return the AST node above the one we are processing""" + if len(self.node_stack) < 2: + return None + return self.node_stack[-2] + + def fail(self, msg: ErrorMessage, line: int, column: int) -> None: + if self.errors: + self.errors.report(line, column, msg.value, blocker=True, code=msg.code) + + def note(self, msg: str, line: int, column: int) -> None: + if self.errors: + self.errors.report(line, column, msg, severity="note", code=codes.SYNTAX) + + def translate_expr_list(self, l: Sequence[ast3.expr]) -> list[Type]: + return [self.visit(e) for e in l] + + def visit_Call(self, e: Call) -> Type: + # Parse the arg constructor + f = e.func + constructor = stringify_name(f) + + if not isinstance(self.parent(), ast3.List): + note = None + if constructor: + note = "Suggestion: use {0}[...] instead of {0}(...)".format(constructor) + return self.invalid_type(e, note=note) + if not constructor: + self.fail(message_registry.ARG_CONSTRUCTOR_NAME_EXPECTED, e.lineno, e.col_offset) + + name: str | None = None + default_type = AnyType(TypeOfAny.special_form) + typ: Type = default_type + for i, arg in enumerate(e.args): + if i == 0: + converted = self.visit(arg) + assert converted is not None + typ = converted + elif i == 1: + name = self._extract_argument_name(arg) + else: + self.fail(message_registry.ARG_CONSTRUCTOR_TOO_MANY_ARGS, f.lineno, f.col_offset) + for k in e.keywords: + value = k.value + if k.arg == "name": + if name is not None: + self.fail( + message_registry.MULTIPLE_VALUES_FOR_NAME_KWARG.format(constructor), + f.lineno, + f.col_offset, + ) + name = self._extract_argument_name(value) + elif k.arg == "type": + if typ is not default_type: + self.fail( + message_registry.MULTIPLE_VALUES_FOR_TYPE_KWARG.format(constructor), + f.lineno, + f.col_offset, + ) + converted = self.visit(value) + assert converted is not None + typ = converted + else: + self.fail( + message_registry.ARG_CONSTRUCTOR_UNEXPECTED_ARG.format(k.arg), + value.lineno, + value.col_offset, + ) + return CallableArgument(typ, name, constructor, e.lineno, e.col_offset) + + def translate_argument_list(self, l: Sequence[ast3.expr]) -> TypeList: + return TypeList([self.visit(e) for e in l], line=self.line) + + def _extract_argument_name(self, n: ast3.expr) -> str | None: + if isinstance(n, ast3.Constant) and isinstance(n.value, str): + return n.value.strip() + elif isinstance(n, ast3.Constant) and n.value is None: + return None + self.fail( + message_registry.ARG_NAME_EXPECTED_STRING_LITERAL.format(type(n).__name__), + self.line, + 0, + ) + return None + + def visit_Name(self, n: Name) -> Type: + return UnboundType(n.id, line=self.line, column=self.convert_column(n.col_offset)) + + def visit_BinOp(self, n: ast3.BinOp) -> Type: + if not isinstance(n.op, ast3.BitOr): + return self.invalid_type(n) + + left = self.visit(n.left) + right = self.visit(n.right) + return UnionType( + [left, right], + line=self.line, + column=self.convert_column(n.col_offset), + is_evaluated=self.is_evaluated, + uses_pep604_syntax=True, + ) + + def visit_Constant(self, n: ast3.Constant) -> Type: + val = n.value + if val is None: + # None is a type. + return UnboundType("None", line=self.line) + if isinstance(val, str): + # Parse forward reference. + return parse_type_string(val, "builtins.str", self.line, n.col_offset) + if val is Ellipsis: + # '...' is valid in some types. + return EllipsisType(line=self.line) + if isinstance(val, bool): + # Special case for True/False. + return RawExpressionType(val, "builtins.bool", line=self.line) + if isinstance(val, (int, float, complex)): + return self.numeric_type(val, n) + if isinstance(val, bytes): + contents = bytes_to_human_readable_repr(val) + return RawExpressionType(contents, "builtins.bytes", self.line, column=n.col_offset) + # Everything else is invalid. + + # UnaryOp(op, operand) + def visit_UnaryOp(self, n: UnaryOp) -> Type: + # We support specifically Literal[-4], Literal[+4], and nothing else. + # For example, Literal[~6] or Literal[not False] is not supported. + typ = self.visit(n.operand) + if ( + isinstance(typ, RawExpressionType) + # Use type() because we do not want to allow bools. + and type(typ.literal_value) is int + ): + if isinstance(n.op, USub): + typ.literal_value *= -1 + return typ + if isinstance(n.op, UAdd): + return typ + return self.invalid_type(n) + + def numeric_type(self, value: object, n: AST) -> Type: + # The node's field has the type complex, but complex isn't *really* + # a parent of int and float, and this causes isinstance below + # to think that the complex branch is always picked. Avoid + # this by throwing away the type. + if isinstance(value, int): + numeric_value: int | None = value + type_name = "builtins.int" + else: + # Other kinds of numbers (floats, complex) are not valid parameters for + # RawExpressionType so we just pass in 'None' for now. We'll report the + # appropriate error at a later stage. + numeric_value = None + type_name = f"builtins.{type(value).__name__}" + return RawExpressionType( + numeric_value, type_name, line=self.line, column=getattr(n, "col_offset", -1) + ) + + def visit_Slice(self, n: ast3.Slice) -> Type: + return self.invalid_type(n, note="did you mean to use ',' instead of ':' ?") + + # Subscript(expr value, expr slice, expr_context ctx) + def visit_Subscript(self, n: ast3.Subscript) -> Type: + empty_tuple_index = False + if isinstance(n.slice, ast3.Tuple): + params = self.translate_expr_list(n.slice.elts) + if len(n.slice.elts) == 0: + empty_tuple_index = True + else: + params = [self.visit(n.slice)] + + value = self.visit(n.value) + if isinstance(value, UnboundType) and not value.args: + result = UnboundType( + value.name, + params, + line=self.line, + column=value.column, + empty_tuple_index=empty_tuple_index, + ) + result.end_column = getattr(n, "end_col_offset", None) + result.end_line = getattr(n, "end_lineno", None) + return result + else: + return self.invalid_type(n) + + def visit_Tuple(self, n: ast3.Tuple) -> Type: + return TupleType( + self.translate_expr_list(n.elts), + _dummy_fallback, + implicit=True, + line=self.line, + column=self.convert_column(n.col_offset), + ) + + def visit_Dict(self, n: ast3.Dict) -> Type: + if not n.keys: + return self.invalid_type(n) + items: dict[str, Type] = {} + extra_items_from = [] + for item_name, value in zip(n.keys, n.values): + if not isinstance(item_name, ast3.Constant) or not isinstance(item_name.value, str): + if item_name is None: + extra_items_from.append(self.visit(value)) + continue + return self.invalid_type(n) + items[item_name.value] = self.visit(value) + result = TypedDictType(items, set(), set(), _dummy_fallback, n.lineno, n.col_offset) + result.extra_items_from = extra_items_from + return result + + # Attribute(expr value, identifier attr, expr_context ctx) + def visit_Attribute(self, n: Attribute) -> Type: + before_dot = self.visit(n.value) + + if isinstance(before_dot, UnboundType) and not before_dot.args: + return UnboundType(f"{before_dot.name}.{n.attr}", line=self.line, column=n.col_offset) + else: + return self.invalid_type(n) + + # Used for Callable[[X *Ys, Z], R] etc. + def visit_Starred(self, n: ast3.Starred) -> Type: + return UnpackType(self.visit(n.value), from_star_syntax=True) + + # List(expr* elts, expr_context ctx) + def visit_List(self, n: ast3.List) -> Type: + assert isinstance(n.ctx, ast3.Load) + result = self.translate_argument_list(n.elts) + return result + + +def stringify_name(n: AST) -> str | None: + if isinstance(n, Name): + return n.id + elif isinstance(n, Attribute): + sv = stringify_name(n.value) + if sv is not None: + return f"{sv}.{n.attr}" + return None # Can't do it. + + +class FindAttributeAssign(TraverserVisitor): + """Check if an AST contains attribute assignments (e.g. self.x = 0).""" + + def __init__(self) -> None: + self.lvalue = False + self.found = False + + def visit_assignment_stmt(self, s: AssignmentStmt) -> None: + self.lvalue = True + for lv in s.lvalues: + lv.accept(self) + self.lvalue = False + + def visit_with_stmt(self, s: WithStmt) -> None: + self.lvalue = True + for lv in s.target: + if lv is not None: + lv.accept(self) + self.lvalue = False + s.body.accept(self) + + def visit_for_stmt(self, s: ForStmt) -> None: + self.lvalue = True + s.index.accept(self) + self.lvalue = False + s.body.accept(self) + if s.else_body: + s.else_body.accept(self) + + def visit_expression_stmt(self, s: ExpressionStmt) -> None: + # No need to look inside these + pass + + def visit_call_expr(self, e: CallExpr) -> None: + # No need to look inside these + pass + + def visit_index_expr(self, e: IndexExpr) -> None: + # No need to look inside these + pass + + def visit_member_expr(self, e: MemberExpr) -> None: + if self.lvalue and isinstance(e.expr, NameExpr): + self.found = True + + +class FindYield(TraverserVisitor): + """Check if an AST contains yields or yield froms.""" # codespell:ignore froms + + def __init__(self) -> None: + self.found = False + + def visit_yield_expr(self, e: YieldExpr) -> None: + self.found = True + + def visit_yield_from_expr(self, e: YieldFromExpr) -> None: + self.found = True + + +def is_possible_trivial_body(s: list[Statement]) -> bool: + """Could the statements form a "trivial" function body, such as 'pass'? + + This mimics mypy.semanal.is_trivial_body, but this runs before + semantic analysis so some checks must be conservative. + """ + l = len(s) + if l == 0: + return False + i = 0 + if isinstance(s[0], ExpressionStmt) and isinstance(s[0].expr, StrExpr): + # Skip docstring + i += 1 + if i == l: + return True + if l > i + 1: + return False + stmt = s[i] + return isinstance(stmt, (PassStmt, RaiseStmt)) or ( + isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, EllipsisExpr) + ) diff --git a/.venv/lib/python3.12/site-packages/mypy/find_sources.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/find_sources.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..9263647 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/find_sources.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/find_sources.py b/.venv/lib/python3.12/site-packages/mypy/find_sources.py new file mode 100644 index 0000000..ececbf9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/find_sources.py @@ -0,0 +1,254 @@ +"""Routines for finding the sources that mypy will check""" + +from __future__ import annotations + +import functools +import os +from collections.abc import Sequence +from typing import Final + +from mypy.fscache import FileSystemCache +from mypy.modulefinder import ( + PYTHON_EXTENSIONS, + BuildSource, + matches_exclude, + matches_gitignore, + mypy_path, +) +from mypy.options import Options + +PY_EXTENSIONS: Final = tuple(PYTHON_EXTENSIONS) + + +class InvalidSourceList(Exception): + """Exception indicating a problem in the list of sources given to mypy.""" + + +def create_source_list( + paths: Sequence[str], + options: Options, + fscache: FileSystemCache | None = None, + allow_empty_dir: bool = False, +) -> list[BuildSource]: + """From a list of source files/directories, makes a list of BuildSources. + + Raises InvalidSourceList on errors. + """ + fscache = fscache or FileSystemCache() + finder = SourceFinder(fscache, options) + + sources = [] + for path in paths: + path = os.path.normpath(path) + if path.endswith(PY_EXTENSIONS): + # Can raise InvalidSourceList if a directory doesn't have a valid module name. + name, base_dir = finder.crawl_up(path) + sources.append(BuildSource(path, name, None, base_dir)) + elif fscache.isdir(path): + sub_sources = finder.find_sources_in_dir(path) + if not sub_sources and not allow_empty_dir: + raise InvalidSourceList(f"There are no .py[i] files in directory '{path}'") + sources.extend(sub_sources) + else: + mod = os.path.basename(path) if options.scripts_are_modules else None + sources.append(BuildSource(path, mod, None)) + return sources + + +def keyfunc(name: str) -> tuple[bool, int, str]: + """Determines sort order for directory listing. + + The desirable properties are: + 1) foo < foo.pyi < foo.py + 2) __init__.py[i] < foo + """ + base, suffix = os.path.splitext(name) + for i, ext in enumerate(PY_EXTENSIONS): + if suffix == ext: + return (base != "__init__", i, base) + return (base != "__init__", -1, name) + + +def normalise_package_base(root: str) -> str: + if not root: + root = os.curdir + root = os.path.abspath(root) + if root.endswith(os.sep): + root = root[:-1] + return root + + +def get_explicit_package_bases(options: Options) -> list[str] | None: + """Returns explicit package bases to use if the option is enabled, or None if disabled. + + We currently use MYPYPATH and the current directory as the package bases. In the future, + when --namespace-packages is the default could also use the values passed with the + --package-root flag, see #9632. + + Values returned are normalised so we can use simple string comparisons in + SourceFinder.is_explicit_package_base + """ + if not options.explicit_package_bases: + return None + roots = mypy_path() + options.mypy_path + [os.getcwd()] + return [normalise_package_base(root) for root in roots] + + +class SourceFinder: + def __init__(self, fscache: FileSystemCache, options: Options) -> None: + self.fscache = fscache + self.explicit_package_bases = get_explicit_package_bases(options) + self.namespace_packages = options.namespace_packages + self.exclude = options.exclude + self.exclude_gitignore = options.exclude_gitignore + self.verbosity = options.verbosity + + def is_explicit_package_base(self, path: str) -> bool: + assert self.explicit_package_bases + return normalise_package_base(path) in self.explicit_package_bases + + def find_sources_in_dir(self, path: str) -> list[BuildSource]: + sources = [] + + seen: set[str] = set() + names = sorted(self.fscache.listdir(path), key=keyfunc) + for name in names: + # Skip certain names altogether + if name in ("__pycache__", "site-packages", "node_modules") or name.startswith("."): + continue + subpath = os.path.join(path, name) + + if matches_exclude(subpath, self.exclude, self.fscache, self.verbosity >= 2): + continue + if self.exclude_gitignore and matches_gitignore( + subpath, self.fscache, self.verbosity >= 2 + ): + continue + + if self.fscache.isdir(subpath): + sub_sources = self.find_sources_in_dir(subpath) + if sub_sources: + seen.add(name) + sources.extend(sub_sources) + else: + stem, suffix = os.path.splitext(name) + if stem not in seen and suffix in PY_EXTENSIONS: + seen.add(stem) + module, base_dir = self.crawl_up(subpath) + sources.append(BuildSource(subpath, module, None, base_dir)) + + return sources + + def crawl_up(self, path: str) -> tuple[str, str]: + """Given a .py[i] filename, return module and base directory. + + For example, given "xxx/yyy/foo/bar.py", we might return something like: + ("foo.bar", "xxx/yyy") + + If namespace packages is off, we crawl upwards until we find a directory without + an __init__.py + + If namespace packages is on, we crawl upwards until the nearest explicit base directory. + Failing that, we return one past the highest directory containing an __init__.py + + We won't crawl past directories with invalid package names. + The base directory returned is an absolute path. + """ + path = os.path.abspath(path) + parent, filename = os.path.split(path) + + module_name = strip_py(filename) or filename + + parent_module, base_dir = self.crawl_up_dir(parent) + if module_name == "__init__": + return parent_module, base_dir + + # Note that module_name might not actually be a valid identifier, but that's okay + # Ignoring this possibility sidesteps some search path confusion + module = module_join(parent_module, module_name) + return module, base_dir + + def crawl_up_dir(self, dir: str) -> tuple[str, str]: + return self._crawl_up_helper(dir) or ("", dir) + + @functools.lru_cache # noqa: B019 + def _crawl_up_helper(self, dir: str) -> tuple[str, str] | None: + """Given a directory, maybe returns module and base directory. + + We return a non-None value if we were able to find something clearly intended as a base + directory (as adjudicated by being an explicit base directory or by containing a package + with __init__.py). + + This distinction is necessary for namespace packages, so that we know when to treat + ourselves as a subpackage. + """ + # stop crawling if we're an explicit base directory + if self.explicit_package_bases is not None and self.is_explicit_package_base(dir): + return "", dir + + parent, name = os.path.split(dir) + name = name.removesuffix("-stubs") # PEP-561 stub-only directory + + # recurse if there's an __init__.py + init_file = self.get_init_file(dir) + if init_file is not None: + if not name.isidentifier(): + # in most cases the directory name is invalid, we'll just stop crawling upwards + # but if there's an __init__.py in the directory, something is messed up + raise InvalidSourceList(f"{name} is not a valid Python package name") + # we're definitely a package, so we always return a non-None value + mod_prefix, base_dir = self.crawl_up_dir(parent) + return module_join(mod_prefix, name), base_dir + + # stop crawling if we're out of path components or our name is an invalid identifier + if not name or not parent or not name.isidentifier(): + return None + + # stop crawling if namespace packages is off (since we don't have an __init__.py) + if not self.namespace_packages: + return None + + # at this point: namespace packages is on, we don't have an __init__.py and we're not an + # explicit base directory + result = self._crawl_up_helper(parent) + if result is None: + # we're not an explicit base directory and we don't have an __init__.py + # and none of our parents are either, so return + return None + # one of our parents was an explicit base directory or had an __init__.py, so we're + # definitely a subpackage! chain our name to the module. + mod_prefix, base_dir = result + return module_join(mod_prefix, name), base_dir + + def get_init_file(self, dir: str) -> str | None: + """Check whether a directory contains a file named __init__.py[i]. + + If so, return the file's name (with dir prefixed). If not, return None. + + This prefers .pyi over .py (because of the ordering of PY_EXTENSIONS). + """ + for ext in PY_EXTENSIONS: + f = os.path.join(dir, "__init__" + ext) + if self.fscache.isfile(f): + return f + if ext == ".py" and self.fscache.init_under_package_root(f): + return f + return None + + +def module_join(parent: str, child: str) -> str: + """Join module ids, accounting for a possibly empty parent.""" + if parent: + return parent + "." + child + return child + + +def strip_py(arg: str) -> str | None: + """Strip a trailing .py or .pyi suffix. + + Return None if no such suffix is found. + """ + for ext in PY_EXTENSIONS: + if arg.endswith(ext): + return arg[: -len(ext)] + return None diff --git a/.venv/lib/python3.12/site-packages/mypy/fixup.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/fixup.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..c4e487d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/fixup.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/fixup.py b/.venv/lib/python3.12/site-packages/mypy/fixup.py new file mode 100644 index 0000000..d0205f6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/fixup.py @@ -0,0 +1,444 @@ +"""Fix up various things after deserialization.""" + +from __future__ import annotations + +from typing import Any, Final + +from mypy.lookup import lookup_fully_qualified +from mypy.nodes import ( + Block, + ClassDef, + Decorator, + FuncDef, + MypyFile, + OverloadedFuncDef, + ParamSpecExpr, + SymbolTable, + TypeAlias, + TypeInfo, + TypeVarExpr, + TypeVarTupleExpr, + Var, +) +from mypy.types import ( + NOT_READY, + AnyType, + CallableType, + Instance, + LiteralType, + Overloaded, + Parameters, + ParamSpecType, + ProperType, + TupleType, + TypeAliasType, + TypedDictType, + TypeOfAny, + TypeType, + TypeVarTupleType, + TypeVarType, + TypeVisitor, + UnboundType, + UnionType, + UnpackType, +) +from mypy.visitor import NodeVisitor + + +# N.B: we do a allow_missing fixup when fixing up a fine-grained +# incremental cache load (since there may be cross-refs into deleted +# modules) +def fixup_module(tree: MypyFile, modules: dict[str, MypyFile], allow_missing: bool) -> None: + node_fixer = NodeFixer(modules, allow_missing) + node_fixer.visit_symbol_table(tree.names, tree.fullname) + + +# TODO: Fix up .info when deserializing, i.e. much earlier. +class NodeFixer(NodeVisitor[None]): + current_info: TypeInfo | None = None + + def __init__(self, modules: dict[str, MypyFile], allow_missing: bool) -> None: + self.modules = modules + self.allow_missing = allow_missing + self.type_fixer = TypeFixer(self.modules, allow_missing) + + # NOTE: This method isn't (yet) part of the NodeVisitor API. + def visit_type_info(self, info: TypeInfo) -> None: + save_info = self.current_info + try: + self.current_info = info + if info.defn: + info.defn.accept(self) + if info.names: + self.visit_symbol_table(info.names, info.fullname) + if info.bases: + for base in info.bases: + base.accept(self.type_fixer) + if info._promote: + for p in info._promote: + p.accept(self.type_fixer) + if info.tuple_type: + info.tuple_type.accept(self.type_fixer) + info.update_tuple_type(info.tuple_type) + if info.special_alias: + info.special_alias.alias_tvars = list(info.defn.type_vars) + for i, t in enumerate(info.defn.type_vars): + if isinstance(t, TypeVarTupleType): + info.special_alias.tvar_tuple_index = i + if info.typeddict_type: + info.typeddict_type.accept(self.type_fixer) + info.update_typeddict_type(info.typeddict_type) + if info.special_alias: + info.special_alias.alias_tvars = list(info.defn.type_vars) + for i, t in enumerate(info.defn.type_vars): + if isinstance(t, TypeVarTupleType): + info.special_alias.tvar_tuple_index = i + if info.declared_metaclass: + info.declared_metaclass.accept(self.type_fixer) + if info.metaclass_type: + info.metaclass_type.accept(self.type_fixer) + if info.self_type: + info.self_type.accept(self.type_fixer) + if info.alt_promote: + info.alt_promote.accept(self.type_fixer) + instance = Instance(info, []) + # Hack: We may also need to add a backwards promotion (from int to native int), + # since it might not be serialized. + if instance not in info.alt_promote.type._promote: + info.alt_promote.type._promote.append(instance) + if info._mro_refs: + info.mro = [ + lookup_fully_qualified_typeinfo( + self.modules, name, allow_missing=self.allow_missing + ) + for name in info._mro_refs + ] + info._mro_refs = None + finally: + self.current_info = save_info + + # NOTE: This method *definitely* isn't part of the NodeVisitor API. + def visit_symbol_table(self, symtab: SymbolTable, table_fullname: str) -> None: + # Copy the items because we may mutate symtab. + for key in list(symtab): + value = symtab[key] + cross_ref = value.cross_ref + if cross_ref is not None: # Fix up cross-reference. + value.cross_ref = None + if cross_ref in self.modules: + value.node = self.modules[cross_ref] + else: + stnode = lookup_fully_qualified( + cross_ref, self.modules, raise_on_missing=not self.allow_missing + ) + if stnode is not None: + if stnode is value: + # The node seems to refer to itself, which can mean that + # the target is a deleted submodule of the current module, + # and thus lookup falls back to the symbol table of the parent + # package. Here's how this may happen: + # + # pkg/__init__.py: + # from pkg import sub + # + # Now if pkg.sub is deleted, the pkg.sub symbol table entry + # appears to refer to itself. Replace the entry with a + # placeholder to avoid a crash. We can't delete the entry, + # as it would stop dependency propagation. + value.node = Var(key + "@deleted") + else: + assert stnode.node is not None, (table_fullname + "." + key, cross_ref) + value.node = stnode.node + elif not self.allow_missing: + assert False, f"Could not find cross-ref {cross_ref}" + else: + # We have a missing crossref in allow missing mode, need to put something + value.node = missing_info(self.modules) + else: + if isinstance(value.node, TypeInfo): + # TypeInfo has no accept(). TODO: Add it? + self.visit_type_info(value.node) + elif value.node is not None: + value.node.accept(self) + else: + assert False, f"Unexpected empty node {key!r}: {value}" + + def visit_func_def(self, func: FuncDef) -> None: + if self.current_info is not None: + func.info = self.current_info + if func.type is not None: + func.type.accept(self.type_fixer) + if isinstance(func.type, CallableType): + func.type.definition = func + + def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None: + if self.current_info is not None: + o.info = self.current_info + if o.type: + o.type.accept(self.type_fixer) + for item in o.items: + item.accept(self) + if o.impl: + o.impl.accept(self) + if isinstance(o.type, Overloaded): + # For error messages we link the original definition for each item. + for typ, item in zip(o.type.items, o.items): + typ.definition = item + + def visit_decorator(self, d: Decorator) -> None: + if self.current_info is not None: + d.var.info = self.current_info + if d.func: + d.func.accept(self) + if d.var: + d.var.accept(self) + for node in d.decorators: + node.accept(self) + typ = d.var.type + if isinstance(typ, ProperType) and isinstance(typ, CallableType): + typ.definition = d.func + + def visit_class_def(self, c: ClassDef) -> None: + for v in c.type_vars: + v.accept(self.type_fixer) + + def visit_type_var_expr(self, tv: TypeVarExpr) -> None: + for value in tv.values: + value.accept(self.type_fixer) + tv.upper_bound.accept(self.type_fixer) + tv.default.accept(self.type_fixer) + + def visit_paramspec_expr(self, p: ParamSpecExpr) -> None: + p.upper_bound.accept(self.type_fixer) + p.default.accept(self.type_fixer) + + def visit_type_var_tuple_expr(self, tv: TypeVarTupleExpr) -> None: + tv.upper_bound.accept(self.type_fixer) + tv.tuple_fallback.accept(self.type_fixer) + tv.default.accept(self.type_fixer) + + def visit_var(self, v: Var) -> None: + if self.current_info is not None: + v.info = self.current_info + if v.type is not None: + v.type.accept(self.type_fixer) + if v.setter_type is not None: + v.setter_type.accept(self.type_fixer) + + def visit_type_alias(self, a: TypeAlias) -> None: + a.target.accept(self.type_fixer) + for v in a.alias_tvars: + v.accept(self.type_fixer) + + +class TypeFixer(TypeVisitor[None]): + def __init__(self, modules: dict[str, MypyFile], allow_missing: bool) -> None: + self.modules = modules + self.allow_missing = allow_missing + + def visit_instance(self, inst: Instance) -> None: + # TODO: Combine Instances that are exactly the same? + type_ref = inst.type_ref + if type_ref is None: + return # We've already been here. + inst.type_ref = None + inst.type = lookup_fully_qualified_typeinfo( + self.modules, type_ref, allow_missing=self.allow_missing + ) + # TODO: Is this needed or redundant? + # Also fix up the bases, just in case. + for base in inst.type.bases: + if base.type is NOT_READY: + base.accept(self) + for a in inst.args: + a.accept(self) + if inst.last_known_value is not None: + inst.last_known_value.accept(self) + if inst.extra_attrs: + for v in inst.extra_attrs.attrs.values(): + v.accept(self) + + def visit_type_alias_type(self, t: TypeAliasType) -> None: + type_ref = t.type_ref + if type_ref is None: + return # We've already been here. + t.type_ref = None + t.alias = lookup_fully_qualified_alias( + self.modules, type_ref, allow_missing=self.allow_missing + ) + for a in t.args: + a.accept(self) + + def visit_any(self, o: Any) -> None: + pass # Nothing to descend into. + + def visit_callable_type(self, ct: CallableType) -> None: + if ct.fallback: + ct.fallback.accept(self) + for argt in ct.arg_types: + # argt may be None, e.g. for __self in NamedTuple constructors. + if argt is not None: + argt.accept(self) + if ct.ret_type is not None: + ct.ret_type.accept(self) + for v in ct.variables: + v.accept(self) + if ct.type_guard is not None: + ct.type_guard.accept(self) + if ct.type_is is not None: + ct.type_is.accept(self) + + def visit_overloaded(self, t: Overloaded) -> None: + for ct in t.items: + ct.accept(self) + + def visit_erased_type(self, o: Any) -> None: + # This type should exist only temporarily during type inference + raise RuntimeError("Shouldn't get here", o) + + def visit_deleted_type(self, o: Any) -> None: + pass # Nothing to descend into. + + def visit_none_type(self, o: Any) -> None: + pass # Nothing to descend into. + + def visit_uninhabited_type(self, o: Any) -> None: + pass # Nothing to descend into. + + def visit_partial_type(self, o: Any) -> None: + raise RuntimeError("Shouldn't get here", o) + + def visit_tuple_type(self, tt: TupleType) -> None: + if tt.items: + for it in tt.items: + it.accept(self) + if tt.partial_fallback is not None: + tt.partial_fallback.accept(self) + + def visit_typeddict_type(self, tdt: TypedDictType) -> None: + if tdt.items: + for it in tdt.items.values(): + it.accept(self) + if tdt.fallback is not None: + if tdt.fallback.type_ref is not None: + if ( + lookup_fully_qualified( + tdt.fallback.type_ref, + self.modules, + raise_on_missing=not self.allow_missing, + ) + is None + ): + # We reject fake TypeInfos for TypedDict fallbacks because + # the latter are used in type checking and must be valid. + tdt.fallback.type_ref = "typing._TypedDict" + tdt.fallback.accept(self) + + def visit_literal_type(self, lt: LiteralType) -> None: + lt.fallback.accept(self) + + def visit_type_var(self, tvt: TypeVarType) -> None: + if tvt.values: + for vt in tvt.values: + vt.accept(self) + tvt.upper_bound.accept(self) + tvt.default.accept(self) + + def visit_param_spec(self, p: ParamSpecType) -> None: + p.upper_bound.accept(self) + p.default.accept(self) + p.prefix.accept(self) + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> None: + t.tuple_fallback.accept(self) + t.upper_bound.accept(self) + t.default.accept(self) + + def visit_unpack_type(self, u: UnpackType) -> None: + u.type.accept(self) + + def visit_parameters(self, p: Parameters) -> None: + for argt in p.arg_types: + if argt is not None: + argt.accept(self) + for var in p.variables: + var.accept(self) + + def visit_unbound_type(self, o: UnboundType) -> None: + for a in o.args: + a.accept(self) + + def visit_union_type(self, ut: UnionType) -> None: + if ut.items: + for it in ut.items: + it.accept(self) + + def visit_type_type(self, t: TypeType) -> None: + t.item.accept(self) + + +def lookup_fully_qualified_typeinfo( + modules: dict[str, MypyFile], name: str, *, allow_missing: bool +) -> TypeInfo: + stnode = lookup_fully_qualified(name, modules, raise_on_missing=not allow_missing) + node = stnode.node if stnode else None + if isinstance(node, TypeInfo): + return node + else: + # Looks like a missing TypeInfo during an initial daemon load, put something there + assert ( + allow_missing + ), "Should never get here in normal mode, got {}:{} instead of TypeInfo".format( + type(node).__name__, node.fullname if node else "" + ) + return missing_info(modules) + + +def lookup_fully_qualified_alias( + modules: dict[str, MypyFile], name: str, *, allow_missing: bool +) -> TypeAlias: + stnode = lookup_fully_qualified(name, modules, raise_on_missing=not allow_missing) + node = stnode.node if stnode else None + if isinstance(node, TypeAlias): + return node + elif isinstance(node, TypeInfo): + if node.special_alias: + # Already fixed up. + return node.special_alias + if node.tuple_type: + alias = TypeAlias.from_tuple_type(node) + elif node.typeddict_type: + alias = TypeAlias.from_typeddict_type(node) + else: + assert allow_missing + return missing_alias() + node.special_alias = alias + return alias + else: + # Looks like a missing TypeAlias during an initial daemon load, put something there + assert ( + allow_missing + ), "Should never get here in normal mode, got {}:{} instead of TypeAlias".format( + type(node).__name__, node.fullname if node else "" + ) + return missing_alias() + + +_SUGGESTION: Final = "" + + +def missing_info(modules: dict[str, MypyFile]) -> TypeInfo: + suggestion = _SUGGESTION.format("info") + dummy_def = ClassDef(suggestion, Block([])) + dummy_def.fullname = suggestion + + info = TypeInfo(SymbolTable(), dummy_def, "") + obj_type = lookup_fully_qualified_typeinfo(modules, "builtins.object", allow_missing=False) + info.bases = [Instance(obj_type, [])] + info.mro = [info, obj_type] + return info + + +def missing_alias() -> TypeAlias: + suggestion = _SUGGESTION.format("alias") + return TypeAlias(AnyType(TypeOfAny.special_form), suggestion, "", line=-1, column=-1) diff --git a/.venv/lib/python3.12/site-packages/mypy/freetree.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/freetree.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..3c45fa6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/freetree.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/freetree.py b/.venv/lib/python3.12/site-packages/mypy/freetree.py new file mode 100644 index 0000000..75b89e2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/freetree.py @@ -0,0 +1,23 @@ +"""Generic node traverser visitor""" + +from __future__ import annotations + +from mypy.nodes import Block, MypyFile +from mypy.traverser import TraverserVisitor + + +class TreeFreer(TraverserVisitor): + def visit_block(self, block: Block) -> None: + super().visit_block(block) + block.body.clear() + + +def free_tree(tree: MypyFile) -> None: + """Free all the ASTs associated with a module. + + This needs to be done recursively, since symbol tables contain + references to definitions, so those won't be freed but we want their + contents to be. + """ + tree.accept(TreeFreer()) + tree.defs.clear() diff --git a/.venv/lib/python3.12/site-packages/mypy/fscache.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/fscache.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..7197676 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/fscache.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/fscache.py b/.venv/lib/python3.12/site-packages/mypy/fscache.py new file mode 100644 index 0000000..2403701 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/fscache.py @@ -0,0 +1,307 @@ +"""Interface for accessing the file system with automatic caching. + +The idea is to cache the results of any file system state reads during +a single transaction. This has two main benefits: + +* This avoids redundant syscalls, as we won't perform the same OS + operations multiple times. + +* This makes it easier to reason about concurrent FS updates, as different + operations targeting the same paths can't report different state during + a transaction. + +Note that this only deals with reading state, not writing. + +Properties maintained by the API: + +* The contents of the file are always from the same or later time compared + to the reported mtime of the file, even if mtime is queried after reading + a file. + +* Repeating an operation produces the same result as the first one during + a transaction. + +* Call flush() to start a new transaction (flush the caches). + +The API is a bit limited. It's easy to add new cached operations, however. +You should perform all file system reads through the API to actually take +advantage of the benefits. +""" + +from __future__ import annotations + +import os +import stat + +from mypy_extensions import mypyc_attr + +from mypy.util import hash_digest + + +@mypyc_attr(allow_interpreted_subclasses=True) # for tests +class FileSystemCache: + def __init__(self) -> None: + # The package root is not flushed with the caches. + # It is set by set_package_root() below. + self.package_root: list[str] = [] + self.flush() + + def set_package_root(self, package_root: list[str]) -> None: + self.package_root = package_root + + def flush(self) -> None: + """Start another transaction and empty all caches.""" + self.stat_or_none_cache: dict[str, os.stat_result | None] = {} + + self.listdir_cache: dict[str, list[str]] = {} + self.listdir_error_cache: dict[str, OSError] = {} + self.isfile_case_cache: dict[str, bool] = {} + self.exists_case_cache: dict[str, bool] = {} + self.read_cache: dict[str, bytes] = {} + self.read_error_cache: dict[str, Exception] = {} + self.hash_cache: dict[str, str] = {} + self.fake_package_cache: set[str] = set() + + def stat_or_none(self, path: str) -> os.stat_result | None: + if path in self.stat_or_none_cache: + return self.stat_or_none_cache[path] + + st = None + try: + st = os.stat(path) + except OSError: + if self.init_under_package_root(path): + try: + st = self._fake_init(path) + except OSError: + pass + + self.stat_or_none_cache[path] = st + return st + + def init_under_package_root(self, path: str) -> bool: + """Is this path an __init__.py under a package root? + + This is used to detect packages that don't contain __init__.py + files, which is needed to support Bazel. The function should + only be called for non-existing files. + + It will return True if it refers to a __init__.py file that + Bazel would create, so that at runtime Python would think the + directory containing it is a package. For this to work you + must pass one or more package roots using the --package-root + flag. + + As an exceptional case, any directory that is a package root + itself will not be considered to contain a __init__.py file. + This is different from the rules Bazel itself applies, but is + necessary for mypy to properly distinguish packages from other + directories. + + See https://docs.bazel.build/versions/master/be/python.html, + where this behavior is described under legacy_create_init. + """ + if not self.package_root: + return False + dirname, basename = os.path.split(path) + if basename != "__init__.py": + return False + if not os.path.basename(dirname).isidentifier(): + # Can't put an __init__.py in a place that's not an identifier + return False + + st = self.stat_or_none(dirname) + if st is None: + return False + else: + if not stat.S_ISDIR(st.st_mode): + return False + ok = False + + # skip if on a different drive + current_drive, _ = os.path.splitdrive(os.getcwd()) + drive, _ = os.path.splitdrive(path) + if drive != current_drive: + return False + if os.path.isabs(path): + path = os.path.relpath(path) + path = os.path.normpath(path) + for root in self.package_root: + if path.startswith(root): + if path == root + basename: + # A package root itself is never a package. + ok = False + break + else: + ok = True + return ok + + def _fake_init(self, path: str) -> os.stat_result: + """Prime the cache with a fake __init__.py file. + + This makes code that looks for path believe an empty file by + that name exists. Should only be called after + init_under_package_root() returns True. + """ + dirname, basename = os.path.split(path) + assert basename == "__init__.py", path + assert not os.path.exists(path), path # Not cached! + dirname = os.path.normpath(dirname) + st = os.stat(dirname) # May raise OSError + # Get stat result as a list so we can modify it. + seq: list[float] = list(st) + seq[stat.ST_MODE] = stat.S_IFREG | 0o444 + seq[stat.ST_INO] = 1 + seq[stat.ST_NLINK] = 1 + seq[stat.ST_SIZE] = 0 + st = os.stat_result(seq) + # Make listdir() and read() also pretend this file exists. + self.fake_package_cache.add(dirname) + return st + + def listdir(self, path: str) -> list[str]: + path = os.path.normpath(path) + if path in self.listdir_cache: + res = self.listdir_cache[path] + # Check the fake cache. + if path in self.fake_package_cache and "__init__.py" not in res: + res.append("__init__.py") # Updates the result as well as the cache + return res + if path in self.listdir_error_cache: + raise copy_os_error(self.listdir_error_cache[path]) + try: + results = os.listdir(path) + except OSError as err: + # Like above, take a copy to reduce memory use. + self.listdir_error_cache[path] = copy_os_error(err) + raise err + self.listdir_cache[path] = results + # Check the fake cache. + if path in self.fake_package_cache and "__init__.py" not in results: + results.append("__init__.py") + return results + + def isfile(self, path: str) -> bool: + st = self.stat_or_none(path) + if st is None: + return False + return stat.S_ISREG(st.st_mode) + + def isfile_case(self, path: str, prefix: str) -> bool: + """Return whether path exists and is a file. + + On case-insensitive filesystems (like Mac or Windows) this returns + False if the case of path's last component does not exactly match + the case found in the filesystem. + + We check also the case of other path components up to prefix. + For example, if path is 'user-stubs/pack/mod.pyi' and prefix is 'user-stubs', + we check that the case of 'pack' and 'mod.py' matches exactly, 'user-stubs' will be + case insensitive on case insensitive filesystems. + + The caller must ensure that prefix is a valid file system prefix of path. + """ + if not self.isfile(path): + # Fast path + return False + if path in self.isfile_case_cache: + return self.isfile_case_cache[path] + head, tail = os.path.split(path) + if not tail: + self.isfile_case_cache[path] = False + return False + try: + names = self.listdir(head) + # This allows one to check file name case sensitively in + # case-insensitive filesystems. + res = tail in names + except OSError: + res = False + if res: + # Also recursively check the other path components in case sensitive way. + res = self.exists_case(head, prefix) + self.isfile_case_cache[path] = res + return res + + def exists_case(self, path: str, prefix: str) -> bool: + """Return whether path exists - checking path components in case sensitive + fashion, up to prefix. + """ + if path in self.exists_case_cache: + return self.exists_case_cache[path] + head, tail = os.path.split(path) + if not head.startswith(prefix) or not tail: + # Only perform the check for paths under prefix. + self.exists_case_cache[path] = True + return True + try: + names = self.listdir(head) + # This allows one to check file name case sensitively in + # case-insensitive filesystems. + res = tail in names + except OSError: + res = False + if res: + # Also recursively check other path components. + res = self.exists_case(head, prefix) + self.exists_case_cache[path] = res + return res + + def isdir(self, path: str) -> bool: + st = self.stat_or_none(path) + if st is None: + return False + return stat.S_ISDIR(st.st_mode) + + def exists(self, path: str) -> bool: + st = self.stat_or_none(path) + return st is not None + + def read(self, path: str) -> bytes: + if path in self.read_cache: + return self.read_cache[path] + if path in self.read_error_cache: + raise self.read_error_cache[path] + + # Need to stat first so that the contents of file are from no + # earlier instant than the mtime reported by self.stat(). + self.stat_or_none(path) + + dirname, basename = os.path.split(path) + dirname = os.path.normpath(dirname) + # Check the fake cache. + if basename == "__init__.py" and dirname in self.fake_package_cache: + data = b"" + else: + try: + with open(path, "rb") as f: + data = f.read() + except OSError as err: + self.read_error_cache[path] = err + raise + + self.read_cache[path] = data + self.hash_cache[path] = hash_digest(data) + return data + + def hash_digest(self, path: str) -> str: + if path not in self.hash_cache: + self.read(path) + return self.hash_cache[path] + + def samefile(self, f1: str, f2: str) -> bool: + s1 = self.stat_or_none(f1) + s2 = self.stat_or_none(f2) + if s1 is None or s2 is None: + return False + return os.path.samestat(s1, s2) + + +def copy_os_error(e: OSError) -> OSError: + new = OSError(*e.args) + new.errno = e.errno + new.strerror = e.strerror + new.filename = e.filename + if e.filename2: + new.filename2 = e.filename2 + return new diff --git a/.venv/lib/python3.12/site-packages/mypy/fswatcher.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/fswatcher.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..475f422 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/fswatcher.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/fswatcher.py b/.venv/lib/python3.12/site-packages/mypy/fswatcher.py new file mode 100644 index 0000000..d5873f3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/fswatcher.py @@ -0,0 +1,106 @@ +"""Watch parts of the file system for changes.""" + +from __future__ import annotations + +import os +from collections.abc import Iterable, Set as AbstractSet +from typing import NamedTuple + +from mypy.fscache import FileSystemCache + + +class FileData(NamedTuple): + st_mtime: float + st_size: int + hash: str + + +class FileSystemWatcher: + """Watcher for file system changes among specific paths. + + All file system access is performed using FileSystemCache. We + detect changed files by stat()ing them all and comparing hashes + of potentially changed files. If a file has both size and mtime + unmodified, the file is assumed to be unchanged. + + An important goal of this class is to make it easier to eventually + use file system events to detect file changes. + + Note: This class doesn't flush the file system cache. If you don't + manually flush it, changes won't be seen. + """ + + # TODO: Watching directories? + # TODO: Handle non-files + + def __init__(self, fs: FileSystemCache) -> None: + self.fs = fs + self._paths: set[str] = set() + self._file_data: dict[str, FileData | None] = {} + + def dump_file_data(self) -> dict[str, tuple[float, int, str]]: + return {k: v for k, v in self._file_data.items() if v is not None} + + def set_file_data(self, path: str, data: FileData) -> None: + self._file_data[path] = data + + def add_watched_paths(self, paths: Iterable[str]) -> None: + for path in paths: + if path not in self._paths: + # By storing None this path will get reported as changed by + # find_changed if it exists. + self._file_data[path] = None + self._paths |= set(paths) + + def remove_watched_paths(self, paths: Iterable[str]) -> None: + for path in paths: + if path in self._file_data: + del self._file_data[path] + self._paths -= set(paths) + + def _update(self, path: str, st: os.stat_result) -> None: + hash_digest = self.fs.hash_digest(path) + self._file_data[path] = FileData(st.st_mtime, st.st_size, hash_digest) + + def _find_changed(self, paths: Iterable[str]) -> AbstractSet[str]: + changed = set() + for path in paths: + old = self._file_data[path] + st = self.fs.stat_or_none(path) + if st is None: + if old is not None: + # File was deleted. + changed.add(path) + self._file_data[path] = None + else: + if old is None: + # File is new. + changed.add(path) + self._update(path, st) + # Round mtimes down, to match the mtimes we write to meta files + elif st.st_size != old.st_size or int(st.st_mtime) != int(old.st_mtime): + # Only look for changes if size or mtime has changed as an + # optimization, since calculating hash is expensive. + new_hash = self.fs.hash_digest(path) + self._update(path, st) + if st.st_size != old.st_size or new_hash != old.hash: + # Changed file. + changed.add(path) + return changed + + def find_changed(self) -> AbstractSet[str]: + """Return paths that have changes since the last call, in the watched set.""" + return self._find_changed(self._paths) + + def update_changed(self, remove: list[str], update: list[str]) -> AbstractSet[str]: + """Alternative to find_changed() given explicit changes. + + This only calls self.fs.stat() on added or updated files, not + on all files. It believes all other files are unchanged! + + Implies add_watched_paths() for add and update, and + remove_watched_paths() for remove. + """ + self.remove_watched_paths(remove) + self.add_watched_paths(update) + return self._find_changed(update) diff --git a/.venv/lib/python3.12/site-packages/mypy/gclogger.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/gclogger.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..fda22b7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/gclogger.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/gclogger.py b/.venv/lib/python3.12/site-packages/mypy/gclogger.py new file mode 100644 index 0000000..bc908bd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/gclogger.py @@ -0,0 +1,48 @@ +from __future__ import annotations + +import gc +import time +from collections.abc import Mapping + + +class GcLogger: + """Context manager to log GC stats and overall time.""" + + def __enter__(self) -> GcLogger: + self.gc_start_time: float | None = None + self.gc_time = 0.0 + self.gc_calls = 0 + self.gc_collected = 0 + self.gc_uncollectable = 0 + gc.callbacks.append(self.gc_callback) + self.start_time = time.time() + return self + + def gc_callback(self, phase: str, info: Mapping[str, int]) -> None: + if phase == "start": + assert self.gc_start_time is None, "Start phase out of sequence" + self.gc_start_time = time.time() + elif phase == "stop": + assert self.gc_start_time is not None, "Stop phase out of sequence" + self.gc_calls += 1 + self.gc_time += time.time() - self.gc_start_time + self.gc_start_time = None + self.gc_collected += info["collected"] + self.gc_uncollectable += info["uncollectable"] + else: + assert False, f"Unrecognized gc phase ({phase!r})" + + def __exit__(self, *args: object) -> None: + while self.gc_callback in gc.callbacks: + gc.callbacks.remove(self.gc_callback) + + def get_stats(self) -> Mapping[str, float]: + end_time = time.time() + result = { + "gc_time": self.gc_time, + "gc_calls": self.gc_calls, + "gc_collected": self.gc_collected, + "gc_uncollectable": self.gc_uncollectable, + "build_time": end_time - self.start_time, + } + return result diff --git a/.venv/lib/python3.12/site-packages/mypy/git.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/git.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..8f543b3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/git.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/git.py b/.venv/lib/python3.12/site-packages/mypy/git.py new file mode 100644 index 0000000..1c63bf6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/git.py @@ -0,0 +1,34 @@ +"""Git utilities.""" + +# Used also from setup.py, so don't pull in anything additional here (like mypy or typing): +from __future__ import annotations + +import os +import subprocess + + +def is_git_repo(dir: str) -> bool: + """Is the given directory version-controlled with git?""" + return os.path.exists(os.path.join(dir, ".git")) + + +def have_git() -> bool: + """Can we run the git executable?""" + try: + subprocess.check_output(["git", "--help"]) + return True + except subprocess.CalledProcessError: + return False + except OSError: + return False + + +def git_revision(dir: str) -> bytes: + """Get the SHA-1 of the HEAD of a git repository.""" + return subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=dir).strip() + + +def is_dirty(dir: str) -> bool: + """Check whether a git repository has uncommitted changes.""" + output = subprocess.check_output(["git", "status", "-uno", "--porcelain"], cwd=dir) + return output.strip() != b"" diff --git a/.venv/lib/python3.12/site-packages/mypy/graph_utils.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/graph_utils.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..d768466 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/graph_utils.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/graph_utils.py b/.venv/lib/python3.12/site-packages/mypy/graph_utils.py new file mode 100644 index 0000000..154efce --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/graph_utils.py @@ -0,0 +1,117 @@ +"""Helpers for manipulations with graphs.""" + +from __future__ import annotations + +from collections.abc import Iterable, Iterator, Set as AbstractSet +from typing import TypeVar + +T = TypeVar("T") + + +def strongly_connected_components( + vertices: AbstractSet[T], edges: dict[T, list[T]] +) -> Iterator[set[T]]: + """Compute Strongly Connected Components of a directed graph. + + Args: + vertices: the labels for the vertices + edges: for each vertex, gives the target vertices of its outgoing edges + + Returns: + An iterator yielding strongly connected components, each + represented as a set of vertices. Each input vertex will occur + exactly once; vertices not part of a SCC are returned as + singleton sets. + + From https://code.activestate.com/recipes/578507/. + """ + identified: set[T] = set() + stack: list[T] = [] + index: dict[T, int] = {} + boundaries: list[int] = [] + + def dfs(v: T) -> Iterator[set[T]]: + index[v] = len(stack) + stack.append(v) + boundaries.append(index[v]) + + for w in edges[v]: + if w not in index: + yield from dfs(w) + elif w not in identified: + while index[w] < boundaries[-1]: + boundaries.pop() + + if boundaries[-1] == index[v]: + boundaries.pop() + scc = set(stack[index[v] :]) + del stack[index[v] :] + identified.update(scc) + yield scc + + for v in vertices: + if v not in index: + yield from dfs(v) + + +def prepare_sccs( + sccs: list[set[T]], edges: dict[T, list[T]] +) -> dict[AbstractSet[T], set[AbstractSet[T]]]: + """Use original edges to organize SCCs in a graph by dependencies between them.""" + sccsmap = {} + for scc in sccs: + scc_frozen = frozenset(scc) + for v in scc: + sccsmap[v] = scc_frozen + data: dict[AbstractSet[T], set[AbstractSet[T]]] = {} + for scc in sccs: + deps: set[AbstractSet[T]] = set() + for v in scc: + deps.update(sccsmap[x] for x in edges[v]) + data[frozenset(scc)] = deps + return data + + +def topsort(data: dict[T, set[T]]) -> Iterable[set[T]]: + """Topological sort. + + Args: + data: A map from vertices to all vertices that it has an edge + connecting it to. NOTE: This data structure + is modified in place -- for normalization purposes, + self-dependencies are removed and entries representing + orphans are added. + + Returns: + An iterator yielding sets of vertices that have an equivalent + ordering. + + Example: + Suppose the input has the following structure: + + {A: {B, C}, B: {D}, C: {D}} + + This is normalized to: + + {A: {B, C}, B: {D}, C: {D}, D: {}} + + The algorithm will yield the following values: + + {D} + {B, C} + {A} + + From https://code.activestate.com/recipes/577413/. + """ + # TODO: Use a faster algorithm? + for k, v in data.items(): + v.discard(k) # Ignore self dependencies. + for item in set.union(*data.values()) - set(data.keys()): + data[item] = set() + while True: + ready = {item for item, dep in data.items() if not dep} + if not ready: + break + yield ready + data = {item: (dep - ready) for item, dep in data.items() if item not in ready} + assert not data, f"A cyclic dependency exists amongst {data!r}" diff --git a/.venv/lib/python3.12/site-packages/mypy/indirection.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/indirection.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..f874870 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/indirection.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/indirection.py b/.venv/lib/python3.12/site-packages/mypy/indirection.py new file mode 100644 index 0000000..95023e3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/indirection.py @@ -0,0 +1,163 @@ +from __future__ import annotations + +from collections.abc import Iterable + +import mypy.types as types +from mypy.types import TypeVisitor + + +class TypeIndirectionVisitor(TypeVisitor[None]): + """Returns all module references within a particular type.""" + + def __init__(self) -> None: + # Module references are collected here + self.modules: set[str] = set() + # User to avoid infinite recursion with recursive types + self.seen_types: set[types.TypeAliasType | types.Instance] = set() + + def find_modules(self, typs: Iterable[types.Type]) -> set[str]: + self.modules = set() + self.seen_types = set() + for typ in typs: + self._visit(typ) + return self.modules + + def _visit(self, typ: types.Type) -> None: + # Note: instances are needed for `class str(Sequence[str]): ...` + if ( + isinstance(typ, types.TypeAliasType) + or isinstance(typ, types.ProperType) + and isinstance(typ, types.Instance) + ): + # Avoid infinite recursion for recursive types. + if typ in self.seen_types: + return + self.seen_types.add(typ) + typ.accept(self) + + def _visit_type_tuple(self, typs: tuple[types.Type, ...]) -> None: + # Micro-optimization: Specialized version of _visit for lists + for typ in typs: + if ( + isinstance(typ, types.TypeAliasType) + or isinstance(typ, types.ProperType) + and isinstance(typ, types.Instance) + ): + # Avoid infinite recursion for recursive types. + if typ in self.seen_types: + continue + self.seen_types.add(typ) + typ.accept(self) + + def _visit_type_list(self, typs: list[types.Type]) -> None: + # Micro-optimization: Specialized version of _visit for tuples + for typ in typs: + if ( + isinstance(typ, types.TypeAliasType) + or isinstance(typ, types.ProperType) + and isinstance(typ, types.Instance) + ): + # Avoid infinite recursion for recursive types. + if typ in self.seen_types: + continue + self.seen_types.add(typ) + typ.accept(self) + + def visit_unbound_type(self, t: types.UnboundType) -> None: + self._visit_type_tuple(t.args) + + def visit_any(self, t: types.AnyType) -> None: + pass + + def visit_none_type(self, t: types.NoneType) -> None: + pass + + def visit_uninhabited_type(self, t: types.UninhabitedType) -> None: + pass + + def visit_erased_type(self, t: types.ErasedType) -> None: + pass + + def visit_deleted_type(self, t: types.DeletedType) -> None: + pass + + def visit_type_var(self, t: types.TypeVarType) -> None: + self._visit_type_list(t.values) + self._visit(t.upper_bound) + self._visit(t.default) + + def visit_param_spec(self, t: types.ParamSpecType) -> None: + self._visit(t.upper_bound) + self._visit(t.default) + self._visit(t.prefix) + + def visit_type_var_tuple(self, t: types.TypeVarTupleType) -> None: + self._visit(t.upper_bound) + self._visit(t.default) + + def visit_unpack_type(self, t: types.UnpackType) -> None: + t.type.accept(self) + + def visit_parameters(self, t: types.Parameters) -> None: + self._visit_type_list(t.arg_types) + + def visit_instance(self, t: types.Instance) -> None: + # Instance is named, record its definition and continue digging into + # components that constitute semantic meaning of this type: bases, metaclass, + # tuple type, and typeddict type. + # Note: we cannot simply record the MRO, in case an intermediate base contains + # a reference to type alias, this affects meaning of map_instance_to_supertype(), + # see e.g. testDoubleReexportGenericUpdated. + self._visit_type_tuple(t.args) + if t.type: + # Important optimization: instead of simply recording the definition and + # recursing into bases, record the MRO and only traverse generic bases. + for s in t.type.mro: + self.modules.add(s.module_name) + for base in s.bases: + if base.args: + self._visit_type_tuple(base.args) + if t.type.metaclass_type: + self._visit(t.type.metaclass_type) + if t.type.typeddict_type: + self._visit(t.type.typeddict_type) + if t.type.tuple_type: + self._visit(t.type.tuple_type) + + def visit_callable_type(self, t: types.CallableType) -> None: + self._visit_type_list(t.arg_types) + self._visit(t.ret_type) + self._visit_type_tuple(t.variables) + + def visit_overloaded(self, t: types.Overloaded) -> None: + for item in t.items: + self._visit(item) + self._visit(t.fallback) + + def visit_tuple_type(self, t: types.TupleType) -> None: + self._visit_type_list(t.items) + self._visit(t.partial_fallback) + + def visit_typeddict_type(self, t: types.TypedDictType) -> None: + self._visit_type_list(list(t.items.values())) + self._visit(t.fallback) + + def visit_literal_type(self, t: types.LiteralType) -> None: + self._visit(t.fallback) + + def visit_union_type(self, t: types.UnionType) -> None: + self._visit_type_list(t.items) + + def visit_partial_type(self, t: types.PartialType) -> None: + pass + + def visit_type_type(self, t: types.TypeType) -> None: + self._visit(t.item) + + def visit_type_alias_type(self, t: types.TypeAliasType) -> None: + # Type alias is named, record its definition and continue digging into + # components that constitute semantic meaning of this type: target and args. + if t.alias: + self.modules.add(t.alias.module) + self._visit(t.alias.target) + self._visit_type_list(t.args) diff --git a/.venv/lib/python3.12/site-packages/mypy/infer.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/infer.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..4b020f4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/infer.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/infer.py b/.venv/lib/python3.12/site-packages/mypy/infer.py new file mode 100644 index 0000000..cdc4379 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/infer.py @@ -0,0 +1,76 @@ +"""Utilities for type argument inference.""" + +from __future__ import annotations + +from collections.abc import Sequence +from typing import NamedTuple + +from mypy.constraints import ( + SUBTYPE_OF, + SUPERTYPE_OF, + infer_constraints, + infer_constraints_for_callable, +) +from mypy.nodes import ArgKind +from mypy.solve import solve_constraints +from mypy.types import CallableType, Instance, Type, TypeVarLikeType + + +class ArgumentInferContext(NamedTuple): + """Type argument inference context. + + We need this because we pass around ``Mapping`` and ``Iterable`` types. + These types are only known by ``TypeChecker`` itself. + It is required for ``*`` and ``**`` argument inference. + + https://github.com/python/mypy/issues/11144 + """ + + mapping_type: Instance + iterable_type: Instance + + +def infer_function_type_arguments( + callee_type: CallableType, + arg_types: Sequence[Type | None], + arg_kinds: list[ArgKind], + arg_names: Sequence[str | None] | None, + formal_to_actual: list[list[int]], + context: ArgumentInferContext, + strict: bool = True, + allow_polymorphic: bool = False, +) -> tuple[list[Type | None], list[TypeVarLikeType]]: + """Infer the type arguments of a generic function. + + Return an array of lower bound types for the type variables -1 (at + index 0), -2 (at index 1), etc. A lower bound is None if a value + could not be inferred. + + Arguments: + callee_type: the target generic function + arg_types: argument types at the call site (each optional; if None, + we are not considering this argument in the current pass) + arg_kinds: nodes.ARG_* values for arg_types + formal_to_actual: mapping from formal to actual variable indices + """ + # Infer constraints. + constraints = infer_constraints_for_callable( + callee_type, arg_types, arg_kinds, arg_names, formal_to_actual, context + ) + + # Solve constraints. + type_vars = callee_type.variables + return solve_constraints(type_vars, constraints, strict, allow_polymorphic) + + +def infer_type_arguments( + type_vars: Sequence[TypeVarLikeType], + template: Type, + actual: Type, + is_supertype: bool = False, + skip_unsatisfied: bool = False, +) -> list[Type | None]: + # Like infer_function_type_arguments, but only match a single type + # against a generic type. + constraints = infer_constraints(template, actual, SUPERTYPE_OF if is_supertype else SUBTYPE_OF) + return solve_constraints(type_vars, constraints, skip_unsatisfied=skip_unsatisfied)[0] diff --git a/.venv/lib/python3.12/site-packages/mypy/inspections.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/inspections.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..0de8134 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/inspections.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/inspections.py b/.venv/lib/python3.12/site-packages/mypy/inspections.py new file mode 100644 index 0000000..ac48fac --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/inspections.py @@ -0,0 +1,626 @@ +from __future__ import annotations + +import os +from collections import defaultdict +from functools import cmp_to_key +from typing import Callable + +from mypy.build import State +from mypy.messages import format_type +from mypy.modulefinder import PYTHON_EXTENSIONS +from mypy.nodes import ( + LDEF, + Decorator, + Expression, + FuncBase, + MemberExpr, + MypyFile, + Node, + OverloadedFuncDef, + RefExpr, + SymbolNode, + TypeInfo, + Var, +) +from mypy.server.update import FineGrainedBuildManager +from mypy.traverser import ExtendedTraverserVisitor +from mypy.typeops import tuple_fallback +from mypy.types import ( + FunctionLike, + Instance, + LiteralType, + ProperType, + TupleType, + TypedDictType, + TypeVarType, + UnionType, + get_proper_type, +) +from mypy.typevars import fill_typevars_with_any + + +def node_starts_after(o: Node, line: int, column: int) -> bool: + return o.line > line or o.line == line and o.column > column + + +def node_ends_before(o: Node, line: int, column: int) -> bool: + # Unfortunately, end positions for some statements are a mess, + # e.g. overloaded functions, so we return False when we don't know. + if o.end_line is not None and o.end_column is not None: + if o.end_line < line or o.end_line == line and o.end_column < column: + return True + return False + + +def expr_span(expr: Expression) -> str: + """Format expression span as in mypy error messages.""" + return f"{expr.line}:{expr.column + 1}:{expr.end_line}:{expr.end_column}" + + +def get_instance_fallback(typ: ProperType) -> list[Instance]: + """Returns the Instance fallback for this type if one exists or None.""" + if isinstance(typ, Instance): + return [typ] + elif isinstance(typ, TupleType): + return [tuple_fallback(typ)] + elif isinstance(typ, TypedDictType): + return [typ.fallback] + elif isinstance(typ, FunctionLike): + return [typ.fallback] + elif isinstance(typ, LiteralType): + return [typ.fallback] + elif isinstance(typ, TypeVarType): + if typ.values: + res = [] + for t in typ.values: + res.extend(get_instance_fallback(get_proper_type(t))) + return res + return get_instance_fallback(get_proper_type(typ.upper_bound)) + elif isinstance(typ, UnionType): + res = [] + for t in typ.items: + res.extend(get_instance_fallback(get_proper_type(t))) + return res + return [] + + +def find_node(name: str, info: TypeInfo) -> Var | FuncBase | None: + """Find the node defining member 'name' in given TypeInfo.""" + # TODO: this code shares some logic with checkmember.py + method = info.get_method(name) + if method: + if isinstance(method, Decorator): + return method.var + if method.is_property: + assert isinstance(method, OverloadedFuncDef) + dec = method.items[0] + assert isinstance(dec, Decorator) + return dec.var + return method + else: + # don't have such method, maybe variable? + node = info.get(name) + v = node.node if node else None + if isinstance(v, Var): + return v + return None + + +def find_module_by_fullname(fullname: str, modules: dict[str, State]) -> State | None: + """Find module by a node fullname. + + This logic mimics the one we use in fixup, so should be good enough. + """ + head = fullname + # Special case: a module symbol is considered to be defined in itself, not in enclosing + # package, since this is what users want when clicking go to definition on a module. + if head in modules: + return modules[head] + while True: + if "." not in head: + return None + head, tail = head.rsplit(".", maxsplit=1) + mod = modules.get(head) + if mod is not None: + return mod + + +class SearchVisitor(ExtendedTraverserVisitor): + """Visitor looking for an expression whose span matches given one exactly.""" + + def __init__(self, line: int, column: int, end_line: int, end_column: int) -> None: + self.line = line + self.column = column + self.end_line = end_line + self.end_column = end_column + self.result: Expression | None = None + + def visit(self, o: Node) -> bool: + if node_starts_after(o, self.line, self.column): + return False + if node_ends_before(o, self.end_line, self.end_column): + return False + if ( + o.line == self.line + and o.end_line == self.end_line + and o.column == self.column + and o.end_column == self.end_column + ): + if isinstance(o, Expression): + self.result = o + return self.result is None + + +def find_by_location( + tree: MypyFile, line: int, column: int, end_line: int, end_column: int +) -> Expression | None: + """Find an expression matching given span, or None if not found.""" + if end_line < line: + raise ValueError('"end_line" must not be before "line"') + if end_line == line and end_column <= column: + raise ValueError('"end_column" must be after "column"') + visitor = SearchVisitor(line, column, end_line, end_column) + tree.accept(visitor) + return visitor.result + + +class SearchAllVisitor(ExtendedTraverserVisitor): + """Visitor looking for all expressions whose spans enclose given position.""" + + def __init__(self, line: int, column: int) -> None: + self.line = line + self.column = column + self.result: list[Expression] = [] + + def visit(self, o: Node) -> bool: + if node_starts_after(o, self.line, self.column): + return False + if node_ends_before(o, self.line, self.column): + return False + if isinstance(o, Expression): + self.result.append(o) + return True + + +def find_all_by_location(tree: MypyFile, line: int, column: int) -> list[Expression]: + """Find all expressions enclosing given position starting from innermost.""" + visitor = SearchAllVisitor(line, column) + tree.accept(visitor) + return list(reversed(visitor.result)) + + +class InspectionEngine: + """Engine for locating and statically inspecting expressions.""" + + def __init__( + self, + fg_manager: FineGrainedBuildManager, + *, + verbosity: int = 0, + limit: int = 0, + include_span: bool = False, + include_kind: bool = False, + include_object_attrs: bool = False, + union_attrs: bool = False, + force_reload: bool = False, + ) -> None: + self.fg_manager = fg_manager + self.verbosity = verbosity + self.limit = limit + self.include_span = include_span + self.include_kind = include_kind + self.include_object_attrs = include_object_attrs + self.union_attrs = union_attrs + self.force_reload = force_reload + # Module for which inspection was requested. + self.module: State | None = None + + def reload_module(self, state: State) -> None: + """Reload given module while temporary exporting types.""" + old = self.fg_manager.manager.options.export_types + self.fg_manager.manager.options.export_types = True + try: + self.fg_manager.flush_cache() + assert state.path is not None + self.fg_manager.update([(state.id, state.path)], []) + finally: + self.fg_manager.manager.options.export_types = old + + def expr_type(self, expression: Expression) -> tuple[str, bool]: + """Format type for an expression using current options. + + If type is known, second item returned is True. If type is not known, an error + message is returned instead, and second item returned is False. + """ + expr_type = self.fg_manager.manager.all_types.get(expression) + if expr_type is None: + return self.missing_type(expression), False + + type_str = format_type( + expr_type, self.fg_manager.manager.options, verbosity=self.verbosity + ) + return self.add_prefixes(type_str, expression), True + + def object_type(self) -> Instance: + builtins = self.fg_manager.graph["builtins"].tree + assert builtins is not None + object_node = builtins.names["object"].node + assert isinstance(object_node, TypeInfo) + return Instance(object_node, []) + + def collect_attrs(self, instances: list[Instance]) -> dict[TypeInfo, list[str]]: + """Collect attributes from all union/typevar variants.""" + + def item_attrs(attr_dict: dict[TypeInfo, list[str]]) -> set[str]: + attrs = set() + for base in attr_dict: + attrs |= set(attr_dict[base]) + return attrs + + def cmp_types(x: TypeInfo, y: TypeInfo) -> int: + if x in y.mro: + return 1 + if y in x.mro: + return -1 + return 0 + + # First gather all attributes for every union variant. + assert instances + all_attrs = [] + for instance in instances: + attrs = {} + mro = instance.type.mro + if not self.include_object_attrs: + mro = mro[:-1] + for base in mro: + attrs[base] = sorted(base.names) + all_attrs.append(attrs) + + # Find attributes valid for all variants in a union or type variable. + intersection = item_attrs(all_attrs[0]) + for item in all_attrs[1:]: + intersection &= item_attrs(item) + + # Combine attributes from all variants into a single dict while + # also removing invalid attributes (unless using --union-attrs). + combined_attrs = defaultdict(list) + for item in all_attrs: + for base in item: + if base in combined_attrs: + continue + for name in item[base]: + if self.union_attrs or name in intersection: + combined_attrs[base].append(name) + + # Sort bases by MRO, unrelated will appear in the order they appeared as union variants. + sorted_bases = sorted(combined_attrs.keys(), key=cmp_to_key(cmp_types)) + result = {} + for base in sorted_bases: + if not combined_attrs[base]: + # Skip bases where everytihng was filtered out. + continue + result[base] = combined_attrs[base] + return result + + def _fill_from_dict( + self, attrs_strs: list[str], attrs_dict: dict[TypeInfo, list[str]] + ) -> None: + for base in attrs_dict: + cls_name = base.name if self.verbosity < 1 else base.fullname + attrs = [f'"{attr}"' for attr in attrs_dict[base]] + attrs_strs.append(f'"{cls_name}": [{", ".join(attrs)}]') + + def expr_attrs(self, expression: Expression) -> tuple[str, bool]: + """Format attributes that are valid for a given expression. + + If expression type is not an Instance, try using fallback. Attributes are + returned as a JSON (ordered by MRO) that maps base class name to list of + attributes. Attributes may appear in multiple bases if overridden (we simply + follow usual mypy logic for creating new Vars etc). + """ + expr_type = self.fg_manager.manager.all_types.get(expression) + if expr_type is None: + return self.missing_type(expression), False + + expr_type = get_proper_type(expr_type) + instances = get_instance_fallback(expr_type) + if not instances: + # Everything is an object in Python. + instances = [self.object_type()] + + attrs_dict = self.collect_attrs(instances) + + # Special case: modules have names apart from those from ModuleType. + if isinstance(expression, RefExpr) and isinstance(expression.node, MypyFile): + node = expression.node + names = sorted(node.names) + if "__builtins__" in names: + # This is just to make tests stable. No one will really need this name. + names.remove("__builtins__") + mod_dict = {f'"<{node.fullname}>"': [f'"{name}"' for name in names]} + else: + mod_dict = {} + + # Special case: for class callables, prepend with the class attributes. + # TODO: also handle cases when such callable appears in a union. + if isinstance(expr_type, FunctionLike) and expr_type.is_type_obj(): + template = fill_typevars_with_any(expr_type.type_object()) + class_dict = self.collect_attrs(get_instance_fallback(template)) + else: + class_dict = {} + + # We don't use JSON dump to be sure keys order is always preserved. + base_attrs = [] + if mod_dict: + for mod in mod_dict: + base_attrs.append(f'{mod}: [{", ".join(mod_dict[mod])}]') + self._fill_from_dict(base_attrs, class_dict) + self._fill_from_dict(base_attrs, attrs_dict) + return self.add_prefixes(f'{{{", ".join(base_attrs)}}}', expression), True + + def format_node(self, module: State, node: FuncBase | SymbolNode) -> str: + return f"{module.path}:{node.line}:{node.column + 1}:{node.name}" + + def collect_nodes(self, expression: RefExpr) -> list[FuncBase | SymbolNode]: + """Collect nodes that can be referred to by an expression. + + Note: it can be more than one for example in case of a union attribute. + """ + node: FuncBase | SymbolNode | None = expression.node + nodes: list[FuncBase | SymbolNode] + if node is None: + # Tricky case: instance attribute + if isinstance(expression, MemberExpr) and expression.kind is None: + base_type = self.fg_manager.manager.all_types.get(expression.expr) + if base_type is None: + return [] + + # Now we use the base type to figure out where the attribute is defined. + base_type = get_proper_type(base_type) + instances = get_instance_fallback(base_type) + nodes = [] + for instance in instances: + node = find_node(expression.name, instance.type) + if node: + nodes.append(node) + if not nodes: + # Try checking class namespace if attribute is on a class object. + if isinstance(base_type, FunctionLike) and base_type.is_type_obj(): + instances = get_instance_fallback( + fill_typevars_with_any(base_type.type_object()) + ) + for instance in instances: + node = find_node(expression.name, instance.type) + if node: + nodes.append(node) + else: + # Still no luck, give up. + return [] + else: + return [] + else: + # Easy case: a module-level definition + nodes = [node] + return nodes + + def modules_for_nodes( + self, nodes: list[FuncBase | SymbolNode], expression: RefExpr + ) -> tuple[dict[FuncBase | SymbolNode, State], bool]: + """Gather modules where given nodes where defined. + + Also check if they need to be refreshed (cached nodes may have + lines/columns missing). + """ + modules = {} + reload_needed = False + for node in nodes: + module = find_module_by_fullname(node.fullname, self.fg_manager.graph) + if not module: + if expression.kind == LDEF and self.module: + module = self.module + else: + continue + modules[node] = module + if not module.tree or module.tree.is_cache_skeleton or self.force_reload: + reload_needed |= not module.tree or module.tree.is_cache_skeleton + self.reload_module(module) + return modules, reload_needed + + def expression_def(self, expression: Expression) -> tuple[str, bool]: + """Find and format definition location for an expression. + + If it is not a RefExpr, it is effectively skipped by returning an + empty result. + """ + if not isinstance(expression, RefExpr): + # If there are no suitable matches at all, we return error later. + return "", True + + nodes = self.collect_nodes(expression) + + if not nodes: + return self.missing_node(expression), False + + modules, reload_needed = self.modules_for_nodes(nodes, expression) + if reload_needed: + # TODO: line/column are not stored in cache for vast majority of symbol nodes. + # Adding them will make thing faster, but will have visible memory impact. + nodes = self.collect_nodes(expression) + modules, reload_needed = self.modules_for_nodes(nodes, expression) + assert not reload_needed + + result = [] + for node in modules: + result.append(self.format_node(modules[node], node)) + + if not result: + return self.missing_node(expression), False + + return self.add_prefixes(", ".join(result), expression), True + + def missing_type(self, expression: Expression) -> str: + alt_suggestion = "" + if not self.force_reload: + alt_suggestion = " or try --force-reload" + return ( + f'No known type available for "{type(expression).__name__}"' + f" (maybe unreachable{alt_suggestion})" + ) + + def missing_node(self, expression: Expression) -> str: + return ( + f'Cannot find definition for "{type(expression).__name__}" at {expr_span(expression)}' + ) + + def add_prefixes(self, result: str, expression: Expression) -> str: + prefixes = [] + if self.include_kind: + prefixes.append(f"{type(expression).__name__}") + if self.include_span: + prefixes.append(expr_span(expression)) + if prefixes: + prefix = ":".join(prefixes) + " -> " + else: + prefix = "" + return prefix + result + + def run_inspection_by_exact_location( + self, + tree: MypyFile, + line: int, + column: int, + end_line: int, + end_column: int, + method: Callable[[Expression], tuple[str, bool]], + ) -> dict[str, object]: + """Get type of an expression matching a span. + + Type or error is returned as a standard daemon response dict. + """ + try: + expression = find_by_location(tree, line, column - 1, end_line, end_column) + except ValueError as err: + return {"error": str(err)} + + if expression is None: + span = f"{line}:{column}:{end_line}:{end_column}" + return {"out": f"Can't find expression at span {span}", "err": "", "status": 1} + + inspection_str, success = method(expression) + return {"out": inspection_str, "err": "", "status": 0 if success else 1} + + def run_inspection_by_position( + self, + tree: MypyFile, + line: int, + column: int, + method: Callable[[Expression], tuple[str, bool]], + ) -> dict[str, object]: + """Get types of all expressions enclosing a position. + + Types and/or errors are returned as a standard daemon response dict. + """ + expressions = find_all_by_location(tree, line, column - 1) + if not expressions: + position = f"{line}:{column}" + return { + "out": f"Can't find any expressions at position {position}", + "err": "", + "status": 1, + } + + inspection_strs = [] + status = 0 + for expression in expressions: + inspection_str, success = method(expression) + if not success: + status = 1 + if inspection_str: + inspection_strs.append(inspection_str) + if self.limit: + inspection_strs = inspection_strs[: self.limit] + return {"out": "\n".join(inspection_strs), "err": "", "status": status} + + def find_module(self, file: str) -> tuple[State | None, dict[str, object]]: + """Find module by path, or return a suitable error message. + + Note we don't use exceptions to simplify handling 1 vs 2 statuses. + """ + if not any(file.endswith(ext) for ext in PYTHON_EXTENSIONS): + return None, {"error": "Source file is not a Python file"} + + # We are using a bit slower but robust way to find a module by path, + # to be sure that namespace packages are handled properly. + abs_path = os.path.abspath(file) + state = next((s for s in self.fg_manager.graph.values() if s.abspath == abs_path), None) + self.module = state + return ( + state, + {"out": f"Unknown module: {file}", "err": "", "status": 1} if state is None else {}, + ) + + def run_inspection( + self, location: str, method: Callable[[Expression], tuple[str, bool]] + ) -> dict[str, object]: + """Top-level logic to inspect expression(s) at a location. + + This can be reused by various simple inspections. + """ + try: + file, pos = parse_location(location) + except ValueError as err: + return {"error": str(err)} + + state, err_dict = self.find_module(file) + if state is None: + assert err_dict + return err_dict + + # Force reloading to load from cache, account for any edits, etc. + if not state.tree or state.tree.is_cache_skeleton or self.force_reload: + self.reload_module(state) + assert state.tree is not None + + if len(pos) == 4: + # Full span, return an exact match only. + line, column, end_line, end_column = pos + return self.run_inspection_by_exact_location( + state.tree, line, column, end_line, end_column, method + ) + assert len(pos) == 2 + # Inexact location, return all expressions. + line, column = pos + return self.run_inspection_by_position(state.tree, line, column, method) + + def get_type(self, location: str) -> dict[str, object]: + """Get types of expression(s) at a location.""" + return self.run_inspection(location, self.expr_type) + + def get_attrs(self, location: str) -> dict[str, object]: + """Get attributes of expression(s) at a location.""" + return self.run_inspection(location, self.expr_attrs) + + def get_definition(self, location: str) -> dict[str, object]: + """Get symbol definitions of expression(s) at a location.""" + result = self.run_inspection(location, self.expression_def) + if "out" in result and not result["out"]: + # None of the expressions found turns out to be a RefExpr. + _, location = location.split(":", maxsplit=1) + result["out"] = f"No name or member expressions at {location}" + result["status"] = 1 + return result + + +def parse_location(location: str) -> tuple[str, list[int]]: + if location.count(":") < 2: + raise ValueError("Format should be file:line:column[:end_line:end_column]") + parts = location.rsplit(":", maxsplit=2) + start, *rest = parts + # Note: we must allow drive prefix like `C:` on Windows. + if start.count(":") < 2: + return start, [int(p) for p in rest] + parts = start.rsplit(":", maxsplit=2) + start, *start_rest = parts + if start.count(":") < 2: + return start, [int(p) for p in start_rest + rest] + raise ValueError("Format should be file:line:column[:end_line:end_column]") diff --git a/.venv/lib/python3.12/site-packages/mypy/ipc.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/ipc.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..13a5c4e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/ipc.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/ipc.py b/.venv/lib/python3.12/site-packages/mypy/ipc.py new file mode 100644 index 0000000..b2046a4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/ipc.py @@ -0,0 +1,313 @@ +"""Cross platform abstractions for inter-process communication + +On Unix, this uses AF_UNIX sockets. +On Windows, this uses NamedPipes. +""" + +from __future__ import annotations + +import base64 +import codecs +import os +import shutil +import sys +import tempfile +from types import TracebackType +from typing import Callable, Final + +if sys.platform == "win32": + # This may be private, but it is needed for IPC on Windows, and is basically stable + import _winapi + import ctypes + + _IPCHandle = int + + kernel32 = ctypes.windll.kernel32 + DisconnectNamedPipe: Callable[[_IPCHandle], int] = kernel32.DisconnectNamedPipe + FlushFileBuffers: Callable[[_IPCHandle], int] = kernel32.FlushFileBuffers +else: + import socket + + _IPCHandle = socket.socket + + +class IPCException(Exception): + """Exception for IPC issues.""" + + +class IPCBase: + """Base class for communication between the dmypy client and server. + + This contains logic shared between the client and server, such as reading + and writing. + We want to be able to send multiple "messages" over a single connection and + to be able to separate the messages. We do this by encoding the messages + in an alphabet that does not contain spaces, then adding a space for + separation. The last framed message is also followed by a space. + """ + + connection: _IPCHandle + + def __init__(self, name: str, timeout: float | None) -> None: + self.name = name + self.timeout = timeout + self.buffer = bytearray() + + def frame_from_buffer(self) -> bytearray | None: + """Return a full frame from the bytes we have in the buffer.""" + space_pos = self.buffer.find(b" ") + if space_pos == -1: + return None + # We have a full frame + bdata = self.buffer[:space_pos] + self.buffer = self.buffer[space_pos + 1 :] + return bdata + + def read(self, size: int = 100000) -> str: + """Read bytes from an IPC connection until we have a full frame.""" + bdata: bytearray | None = bytearray() + if sys.platform == "win32": + while True: + # Check if we already have a message in the buffer before + # receiving any more data from the socket. + bdata = self.frame_from_buffer() + if bdata is not None: + break + + # Receive more data into the buffer. + ov, err = _winapi.ReadFile(self.connection, size, overlapped=True) + try: + if err == _winapi.ERROR_IO_PENDING: + timeout = int(self.timeout * 1000) if self.timeout else _winapi.INFINITE + res = _winapi.WaitForSingleObject(ov.event, timeout) + if res != _winapi.WAIT_OBJECT_0: + raise IPCException(f"Bad result from I/O wait: {res}") + except BaseException: + ov.cancel() + raise + _, err = ov.GetOverlappedResult(True) + more = ov.getbuffer() + if more: + self.buffer.extend(more) + bdata = self.frame_from_buffer() + if bdata is not None: + break + if err == 0: + # we are done! + break + elif err == _winapi.ERROR_MORE_DATA: + # read again + continue + elif err == _winapi.ERROR_OPERATION_ABORTED: + raise IPCException("ReadFile operation aborted.") + else: + while True: + # Check if we already have a message in the buffer before + # receiving any more data from the socket. + bdata = self.frame_from_buffer() + if bdata is not None: + break + + # Receive more data into the buffer. + more = self.connection.recv(size) + if not more: + # Connection closed + break + self.buffer.extend(more) + + if not bdata: + # Socket was empty and we didn't get any frame. + # This should only happen if the socket was closed. + return "" + return codecs.decode(bdata, "base64").decode("utf8") + + def write(self, data: str) -> None: + """Write to an IPC connection.""" + + # Frame the data by urlencoding it and separating by space. + encoded_data = codecs.encode(data.encode("utf8"), "base64") + b" " + + if sys.platform == "win32": + try: + ov, err = _winapi.WriteFile(self.connection, encoded_data, overlapped=True) + try: + if err == _winapi.ERROR_IO_PENDING: + timeout = int(self.timeout * 1000) if self.timeout else _winapi.INFINITE + res = _winapi.WaitForSingleObject(ov.event, timeout) + if res != _winapi.WAIT_OBJECT_0: + raise IPCException(f"Bad result from I/O wait: {res}") + elif err != 0: + raise IPCException(f"Failed writing to pipe with error: {err}") + except BaseException: + ov.cancel() + raise + bytes_written, err = ov.GetOverlappedResult(True) + assert err == 0, err + assert bytes_written == len(encoded_data) + except OSError as e: + raise IPCException(f"Failed to write with error: {e.winerror}") from e + else: + self.connection.sendall(encoded_data) + + def close(self) -> None: + if sys.platform == "win32": + if self.connection != _winapi.NULL: + _winapi.CloseHandle(self.connection) + else: + self.connection.close() + + +class IPCClient(IPCBase): + """The client side of an IPC connection.""" + + def __init__(self, name: str, timeout: float | None) -> None: + super().__init__(name, timeout) + if sys.platform == "win32": + timeout = int(self.timeout * 1000) if self.timeout else _winapi.NMPWAIT_WAIT_FOREVER + try: + _winapi.WaitNamedPipe(self.name, timeout) + except FileNotFoundError as e: + raise IPCException(f"The NamedPipe at {self.name} was not found.") from e + except OSError as e: + if e.winerror == _winapi.ERROR_SEM_TIMEOUT: + raise IPCException("Timed out waiting for connection.") from e + else: + raise + try: + self.connection = _winapi.CreateFile( + self.name, + _winapi.GENERIC_READ | _winapi.GENERIC_WRITE, + 0, + _winapi.NULL, + _winapi.OPEN_EXISTING, + _winapi.FILE_FLAG_OVERLAPPED, + _winapi.NULL, + ) + except OSError as e: + if e.winerror == _winapi.ERROR_PIPE_BUSY: + raise IPCException("The connection is busy.") from e + else: + raise + _winapi.SetNamedPipeHandleState( + self.connection, _winapi.PIPE_READMODE_MESSAGE, None, None + ) + else: + self.connection = socket.socket(socket.AF_UNIX) + self.connection.settimeout(timeout) + self.connection.connect(name) + + def __enter__(self) -> IPCClient: + return self + + def __exit__( + self, + exc_ty: type[BaseException] | None = None, + exc_val: BaseException | None = None, + exc_tb: TracebackType | None = None, + ) -> None: + self.close() + + +class IPCServer(IPCBase): + BUFFER_SIZE: Final = 2**16 + + def __init__(self, name: str, timeout: float | None = None) -> None: + if sys.platform == "win32": + name = r"\\.\pipe\{}-{}.pipe".format( + name, base64.urlsafe_b64encode(os.urandom(6)).decode() + ) + else: + name = f"{name}.sock" + super().__init__(name, timeout) + if sys.platform == "win32": + self.connection = _winapi.CreateNamedPipe( + self.name, + _winapi.PIPE_ACCESS_DUPLEX + | _winapi.FILE_FLAG_FIRST_PIPE_INSTANCE + | _winapi.FILE_FLAG_OVERLAPPED, + _winapi.PIPE_READMODE_MESSAGE + | _winapi.PIPE_TYPE_MESSAGE + | _winapi.PIPE_WAIT + | 0x8, # PIPE_REJECT_REMOTE_CLIENTS + 1, # one instance + self.BUFFER_SIZE, + self.BUFFER_SIZE, + _winapi.NMPWAIT_WAIT_FOREVER, + 0, # Use default security descriptor + ) + if self.connection == -1: # INVALID_HANDLE_VALUE + err = _winapi.GetLastError() + raise IPCException(f"Invalid handle to pipe: {err}") + else: + self.sock_directory = tempfile.mkdtemp() + sockfile = os.path.join(self.sock_directory, self.name) + self.sock = socket.socket(socket.AF_UNIX) + self.sock.bind(sockfile) + self.sock.listen(1) + if timeout is not None: + self.sock.settimeout(timeout) + + def __enter__(self) -> IPCServer: + if sys.platform == "win32": + # NOTE: It is theoretically possible that this will hang forever if the + # client never connects, though this can be "solved" by killing the server + try: + ov = _winapi.ConnectNamedPipe(self.connection, overlapped=True) + except OSError as e: + # Don't raise if the client already exists, or the client already connected + if e.winerror not in (_winapi.ERROR_PIPE_CONNECTED, _winapi.ERROR_NO_DATA): + raise + else: + try: + timeout = int(self.timeout * 1000) if self.timeout else _winapi.INFINITE + res = _winapi.WaitForSingleObject(ov.event, timeout) + assert res == _winapi.WAIT_OBJECT_0 + except BaseException: + ov.cancel() + _winapi.CloseHandle(self.connection) + raise + _, err = ov.GetOverlappedResult(True) + assert err == 0 + else: + try: + self.connection, _ = self.sock.accept() + except socket.timeout as e: + raise IPCException("The socket timed out") from e + return self + + def __exit__( + self, + exc_ty: type[BaseException] | None = None, + exc_val: BaseException | None = None, + exc_tb: TracebackType | None = None, + ) -> None: + if sys.platform == "win32": + try: + # Wait for the client to finish reading the last write before disconnecting + if not FlushFileBuffers(self.connection): + raise IPCException( + "Failed to flush NamedPipe buffer, maybe the client hung up?" + ) + finally: + DisconnectNamedPipe(self.connection) + else: + self.close() + + def cleanup(self) -> None: + if sys.platform == "win32": + self.close() + else: + shutil.rmtree(self.sock_directory) + + @property + def connection_name(self) -> str: + if sys.platform == "win32": + return self.name + elif sys.platform == "gnu0": + # GNU/Hurd returns empty string from getsockname() + # for AF_UNIX sockets + return os.path.join(self.sock_directory, self.name) + else: + name = self.sock.getsockname() + assert isinstance(name, str) + return name diff --git a/.venv/lib/python3.12/site-packages/mypy/join.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/join.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..c7ee6c8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/join.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/join.py b/.venv/lib/python3.12/site-packages/mypy/join.py new file mode 100644 index 0000000..a074fa5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/join.py @@ -0,0 +1,906 @@ +"""Calculation of the least upper bound types (joins).""" + +from __future__ import annotations + +from collections.abc import Sequence +from typing import overload + +import mypy.typeops +from mypy.expandtype import expand_type +from mypy.maptype import map_instance_to_supertype +from mypy.nodes import CONTRAVARIANT, COVARIANT, INVARIANT, VARIANCE_NOT_READY, TypeInfo +from mypy.state import state +from mypy.subtypes import ( + SubtypeContext, + find_member, + is_equivalent, + is_proper_subtype, + is_protocol_implementation, + is_subtype, +) +from mypy.types import ( + AnyType, + CallableType, + DeletedType, + ErasedType, + FunctionLike, + Instance, + LiteralType, + NoneType, + Overloaded, + Parameters, + ParamSpecType, + PartialType, + ProperType, + TupleType, + Type, + TypeAliasType, + TypedDictType, + TypeOfAny, + TypeType, + TypeVarId, + TypeVarLikeType, + TypeVarTupleType, + TypeVarType, + TypeVisitor, + UnboundType, + UninhabitedType, + UnionType, + UnpackType, + find_unpack_in_list, + get_proper_type, + get_proper_types, + split_with_prefix_and_suffix, +) + + +class InstanceJoiner: + def __init__(self) -> None: + self.seen_instances: list[tuple[Instance, Instance]] = [] + + def join_instances(self, t: Instance, s: Instance) -> ProperType: + if (t, s) in self.seen_instances or (s, t) in self.seen_instances: + return object_from_instance(t) + + self.seen_instances.append((t, s)) + + # Calculate the join of two instance types + if t.type == s.type: + # Simplest case: join two types with the same base type (but + # potentially different arguments). + + # Combine type arguments. + args: list[Type] = [] + # N.B: We use zip instead of indexing because the lengths might have + # mismatches during daemon reprocessing. + if t.type.has_type_var_tuple_type: + # We handle joins of variadic instances by simply creating correct mapping + # for type arguments and compute the individual joins same as for regular + # instances. All the heavy lifting is done in the join of tuple types. + assert s.type.type_var_tuple_prefix is not None + assert s.type.type_var_tuple_suffix is not None + prefix = s.type.type_var_tuple_prefix + suffix = s.type.type_var_tuple_suffix + tvt = s.type.defn.type_vars[prefix] + assert isinstance(tvt, TypeVarTupleType) + fallback = tvt.tuple_fallback + s_prefix, s_middle, s_suffix = split_with_prefix_and_suffix(s.args, prefix, suffix) + t_prefix, t_middle, t_suffix = split_with_prefix_and_suffix(t.args, prefix, suffix) + s_args = s_prefix + (TupleType(list(s_middle), fallback),) + s_suffix + t_args = t_prefix + (TupleType(list(t_middle), fallback),) + t_suffix + else: + t_args = t.args + s_args = s.args + for ta, sa, type_var in zip(t_args, s_args, t.type.defn.type_vars): + ta_proper = get_proper_type(ta) + sa_proper = get_proper_type(sa) + new_type: Type | None = None + if isinstance(ta_proper, AnyType): + new_type = AnyType(TypeOfAny.from_another_any, ta_proper) + elif isinstance(sa_proper, AnyType): + new_type = AnyType(TypeOfAny.from_another_any, sa_proper) + elif isinstance(type_var, TypeVarType): + if type_var.variance in (COVARIANT, VARIANCE_NOT_READY): + new_type = join_types(ta, sa, self) + if len(type_var.values) != 0 and new_type not in type_var.values: + self.seen_instances.pop() + return object_from_instance(t) + if not is_subtype(new_type, type_var.upper_bound): + self.seen_instances.pop() + return object_from_instance(t) + # TODO: contravariant case should use meet but pass seen instances as + # an argument to keep track of recursive checks. + elif type_var.variance in (INVARIANT, CONTRAVARIANT): + if isinstance(ta_proper, UninhabitedType) and ta_proper.ambiguous: + new_type = sa + elif isinstance(sa_proper, UninhabitedType) and sa_proper.ambiguous: + new_type = ta + elif not is_equivalent(ta, sa): + self.seen_instances.pop() + return object_from_instance(t) + else: + # If the types are different but equivalent, then an Any is involved + # so using a join in the contravariant case is also OK. + new_type = join_types(ta, sa, self) + elif isinstance(type_var, TypeVarTupleType): + new_type = get_proper_type(join_types(ta, sa, self)) + # Put the joined arguments back into instance in the normal form: + # a) Tuple[X, Y, Z] -> [X, Y, Z] + # b) tuple[X, ...] -> [*tuple[X, ...]] + if isinstance(new_type, Instance): + assert new_type.type.fullname == "builtins.tuple" + new_type = UnpackType(new_type) + else: + assert isinstance(new_type, TupleType) + args.extend(new_type.items) + continue + else: + # ParamSpec type variables behave the same, independent of variance + if not is_equivalent(ta, sa): + return get_proper_type(type_var.upper_bound) + new_type = join_types(ta, sa, self) + assert new_type is not None + args.append(new_type) + result: ProperType = Instance(t.type, args) + elif t.type.bases and is_proper_subtype( + t, s, subtype_context=SubtypeContext(ignore_type_params=True) + ): + result = self.join_instances_via_supertype(t, s) + else: + # Now t is not a subtype of s, and t != s. Now s could be a subtype + # of t; alternatively, we need to find a common supertype. This works + # in of the both cases. + result = self.join_instances_via_supertype(s, t) + + self.seen_instances.pop() + return result + + def join_instances_via_supertype(self, t: Instance, s: Instance) -> ProperType: + # Give preference to joins via duck typing relationship, so that + # join(int, float) == float, for example. + for p in t.type._promote: + if is_subtype(p, s): + return join_types(p, s, self) + for p in s.type._promote: + if is_subtype(p, t): + return join_types(t, p, self) + + # Compute the "best" supertype of t when joined with s. + # The definition of "best" may evolve; for now it is the one with + # the longest MRO. Ties are broken by using the earlier base. + + # Go over both sets of bases in case there's an explicit Protocol base. This is important + # to ensure commutativity of join (although in cases where both classes have relevant + # Protocol bases this maybe might still not be commutative) + base_types: dict[TypeInfo, None] = {} # dict to deduplicate but preserve order + for base in t.type.bases: + base_types[base.type] = None + for base in s.type.bases: + if base.type.is_protocol and is_subtype(t, base): + base_types[base.type] = None + + best: ProperType | None = None + for base_type in base_types: + mapped = map_instance_to_supertype(t, base_type) + res = self.join_instances(mapped, s) + if best is None or is_better(res, best): + best = res + assert best is not None + for promote in t.type._promote: + if isinstance(promote, Instance): + res = self.join_instances(promote, s) + if is_better(res, best): + best = res + return best + + +def trivial_join(s: Type, t: Type) -> Type: + """Return one of types (expanded) if it is a supertype of other, otherwise top type.""" + if is_subtype(s, t): + return t + elif is_subtype(t, s): + return s + else: + return object_or_any_from_type(get_proper_type(t)) + + +@overload +def join_types( + s: ProperType, t: ProperType, instance_joiner: InstanceJoiner | None = None +) -> ProperType: ... + + +@overload +def join_types(s: Type, t: Type, instance_joiner: InstanceJoiner | None = None) -> Type: ... + + +def join_types(s: Type, t: Type, instance_joiner: InstanceJoiner | None = None) -> Type: + """Return the least upper bound of s and t. + + For example, the join of 'int' and 'object' is 'object'. + """ + if mypy.typeops.is_recursive_pair(s, t): + # This case can trigger an infinite recursion, general support for this will be + # tricky so we use a trivial join (like for protocols). + return trivial_join(s, t) + s = get_proper_type(s) + t = get_proper_type(t) + + if (s.can_be_true, s.can_be_false) != (t.can_be_true, t.can_be_false): + # if types are restricted in different ways, use the more general versions + s = mypy.typeops.true_or_false(s) + t = mypy.typeops.true_or_false(t) + + if isinstance(s, UnionType) and not isinstance(t, UnionType): + s, t = t, s + + if isinstance(s, AnyType): + return s + + if isinstance(s, ErasedType): + return t + + if isinstance(s, NoneType) and not isinstance(t, NoneType): + s, t = t, s + + if isinstance(s, UninhabitedType) and not isinstance(t, UninhabitedType): + s, t = t, s + + # Meets/joins require callable type normalization. + s, t = normalize_callables(s, t) + + # Use a visitor to handle non-trivial cases. + return t.accept(TypeJoinVisitor(s, instance_joiner)) + + +class TypeJoinVisitor(TypeVisitor[ProperType]): + """Implementation of the least upper bound algorithm. + + Attributes: + s: The other (left) type operand. + """ + + def __init__(self, s: ProperType, instance_joiner: InstanceJoiner | None = None) -> None: + self.s = s + self.instance_joiner = instance_joiner + + def visit_unbound_type(self, t: UnboundType) -> ProperType: + return AnyType(TypeOfAny.special_form) + + def visit_union_type(self, t: UnionType) -> ProperType: + if is_proper_subtype(self.s, t): + return t + else: + return mypy.typeops.make_simplified_union([self.s, t]) + + def visit_any(self, t: AnyType) -> ProperType: + return t + + def visit_none_type(self, t: NoneType) -> ProperType: + if state.strict_optional: + if isinstance(self.s, (NoneType, UninhabitedType)): + return t + elif isinstance(self.s, (UnboundType, AnyType)): + return AnyType(TypeOfAny.special_form) + else: + return mypy.typeops.make_simplified_union([self.s, t]) + else: + return self.s + + def visit_uninhabited_type(self, t: UninhabitedType) -> ProperType: + return self.s + + def visit_deleted_type(self, t: DeletedType) -> ProperType: + return self.s + + def visit_erased_type(self, t: ErasedType) -> ProperType: + return self.s + + def visit_type_var(self, t: TypeVarType) -> ProperType: + if isinstance(self.s, TypeVarType): + if self.s.id == t.id: + if self.s.upper_bound == t.upper_bound: + return self.s + return self.s.copy_modified( + upper_bound=join_types(self.s.upper_bound, t.upper_bound) + ) + # Fix non-commutative joins + return get_proper_type(join_types(self.s.upper_bound, t.upper_bound)) + else: + return self.default(self.s) + + def visit_param_spec(self, t: ParamSpecType) -> ProperType: + if self.s == t: + return t + return self.default(self.s) + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> ProperType: + if self.s == t: + return t + if isinstance(self.s, Instance) and is_subtype(t.upper_bound, self.s): + # TODO: should we do this more generally and for all TypeVarLikeTypes? + return self.s + return self.default(self.s) + + def visit_unpack_type(self, t: UnpackType) -> UnpackType: + raise NotImplementedError + + def visit_parameters(self, t: Parameters) -> ProperType: + if isinstance(self.s, Parameters): + if not is_similar_params(t, self.s): + # TODO: it would be prudent to return [*object, **object] instead of Any. + return self.default(self.s) + from mypy.meet import meet_types + + return t.copy_modified( + arg_types=[ + meet_types(s_a, t_a) for s_a, t_a in zip(self.s.arg_types, t.arg_types) + ], + arg_names=combine_arg_names(self.s, t), + ) + else: + return self.default(self.s) + + def visit_instance(self, t: Instance) -> ProperType: + if isinstance(self.s, Instance): + if self.instance_joiner is None: + self.instance_joiner = InstanceJoiner() + nominal = self.instance_joiner.join_instances(t, self.s) + structural: Instance | None = None + if t.type.is_protocol and is_protocol_implementation(self.s, t): + structural = t + elif self.s.type.is_protocol and is_protocol_implementation(t, self.s): + structural = self.s + # Structural join is preferred in the case where we have found both + # structural and nominal and they have same MRO length (see two comments + # in join_instances_via_supertype). Otherwise, just return the nominal join. + if not structural or is_better(nominal, structural): + return nominal + return structural + elif isinstance(self.s, FunctionLike): + if t.type.is_protocol: + call = unpack_callback_protocol(t) + if call: + return join_types(call, self.s) + return join_types(t, self.s.fallback) + elif isinstance(self.s, TypeType): + return join_types(t, self.s) + elif isinstance(self.s, TypedDictType): + return join_types(t, self.s) + elif isinstance(self.s, TupleType): + return join_types(t, self.s) + elif isinstance(self.s, LiteralType): + return join_types(t, self.s) + elif isinstance(self.s, TypeVarTupleType) and is_subtype(self.s.upper_bound, t): + return t + else: + return self.default(self.s) + + def visit_callable_type(self, t: CallableType) -> ProperType: + if isinstance(self.s, CallableType) and is_similar_callables(t, self.s): + if is_equivalent(t, self.s): + return combine_similar_callables(t, self.s) + result = join_similar_callables(t, self.s) + # We set the from_type_type flag to suppress error when a collection of + # concrete class objects gets inferred as their common abstract superclass. + if not ( + (t.is_type_obj() and t.type_object().is_abstract) + or (self.s.is_type_obj() and self.s.type_object().is_abstract) + ): + result.from_type_type = True + if any( + isinstance(tp, (NoneType, UninhabitedType)) + for tp in get_proper_types(result.arg_types) + ): + # We don't want to return unusable Callable, attempt fallback instead. + return join_types(t.fallback, self.s) + return result + elif isinstance(self.s, Overloaded): + # Switch the order of arguments to that we'll get to visit_overloaded. + return join_types(t, self.s) + elif isinstance(self.s, Instance) and self.s.type.is_protocol: + call = unpack_callback_protocol(self.s) + if call: + return join_types(t, call) + return join_types(t.fallback, self.s) + + def visit_overloaded(self, t: Overloaded) -> ProperType: + # This is more complex than most other cases. Here are some + # examples that illustrate how this works. + # + # First let's define a concise notation: + # - Cn are callable types (for n in 1, 2, ...) + # - Ov(C1, C2, ...) is an overloaded type with items C1, C2, ... + # - Callable[[T, ...], S] is written as [T, ...] -> S. + # + # We want some basic properties to hold (assume Cn are all + # unrelated via Any-similarity): + # + # join(Ov(C1, C2), C1) == C1 + # join(Ov(C1, C2), Ov(C1, C2)) == Ov(C1, C2) + # join(Ov(C1, C2), Ov(C1, C3)) == C1 + # join(Ov(C2, C2), C3) == join of fallback types + # + # The presence of Any types makes things more interesting. The join is the + # most general type we can get with respect to Any: + # + # join(Ov([int] -> int, [str] -> str), [Any] -> str) == Any -> str + # + # We could use a simplification step that removes redundancies, but that's not + # implemented right now. Consider this example, where we get a redundancy: + # + # join(Ov([int, Any] -> Any, [str, Any] -> Any), [Any, int] -> Any) == + # Ov([Any, int] -> Any, [Any, int] -> Any) + # + # TODO: Consider more cases of callable subtyping. + result: list[CallableType] = [] + s = self.s + if isinstance(s, FunctionLike): + # The interesting case where both types are function types. + for t_item in t.items: + for s_item in s.items: + if is_similar_callables(t_item, s_item): + if is_equivalent(t_item, s_item): + result.append(combine_similar_callables(t_item, s_item)) + elif is_subtype(t_item, s_item): + result.append(s_item) + if result: + # TODO: Simplify redundancies from the result. + if len(result) == 1: + return result[0] + else: + return Overloaded(result) + return join_types(t.fallback, s.fallback) + elif isinstance(s, Instance) and s.type.is_protocol: + call = unpack_callback_protocol(s) + if call: + return join_types(t, call) + return join_types(t.fallback, s) + + def join_tuples(self, s: TupleType, t: TupleType) -> list[Type] | None: + """Join two tuple types while handling variadic entries. + + This is surprisingly tricky, and we don't handle some tricky corner cases. + Most of the trickiness comes from the variadic tuple items like *tuple[X, ...] + since they can have arbitrary partial overlaps (while *Ts can't be split). + """ + s_unpack_index = find_unpack_in_list(s.items) + t_unpack_index = find_unpack_in_list(t.items) + if s_unpack_index is None and t_unpack_index is None: + if s.length() == t.length(): + items: list[Type] = [] + for i in range(t.length()): + items.append(join_types(t.items[i], s.items[i])) + return items + return None + if s_unpack_index is not None and t_unpack_index is not None: + # The most complex case: both tuples have an unpack item. + s_unpack = s.items[s_unpack_index] + assert isinstance(s_unpack, UnpackType) + s_unpacked = get_proper_type(s_unpack.type) + t_unpack = t.items[t_unpack_index] + assert isinstance(t_unpack, UnpackType) + t_unpacked = get_proper_type(t_unpack.type) + if s.length() == t.length() and s_unpack_index == t_unpack_index: + # We can handle a case where arity is perfectly aligned, e.g. + # join(Tuple[X1, *tuple[Y1, ...], Z1], Tuple[X2, *tuple[Y2, ...], Z2]). + # We can essentially perform the join elementwise. + prefix_len = t_unpack_index + suffix_len = t.length() - t_unpack_index - 1 + items = [] + for si, ti in zip(s.items[:prefix_len], t.items[:prefix_len]): + items.append(join_types(si, ti)) + joined = join_types(s_unpacked, t_unpacked) + if isinstance(joined, TypeVarTupleType): + items.append(UnpackType(joined)) + elif isinstance(joined, Instance) and joined.type.fullname == "builtins.tuple": + items.append(UnpackType(joined)) + else: + if isinstance(t_unpacked, Instance): + assert t_unpacked.type.fullname == "builtins.tuple" + tuple_instance = t_unpacked + else: + assert isinstance(t_unpacked, TypeVarTupleType) + tuple_instance = t_unpacked.tuple_fallback + items.append( + UnpackType( + tuple_instance.copy_modified( + args=[object_from_instance(tuple_instance)] + ) + ) + ) + if suffix_len: + for si, ti in zip(s.items[-suffix_len:], t.items[-suffix_len:]): + items.append(join_types(si, ti)) + return items + if s.length() == 1 or t.length() == 1: + # Another case we can handle is when one of tuple is purely variadic + # (i.e. a non-normalized form of tuple[X, ...]), in this case the join + # will be again purely variadic. + if not (isinstance(s_unpacked, Instance) and isinstance(t_unpacked, Instance)): + return None + assert s_unpacked.type.fullname == "builtins.tuple" + assert t_unpacked.type.fullname == "builtins.tuple" + mid_joined = join_types(s_unpacked.args[0], t_unpacked.args[0]) + t_other = [a for i, a in enumerate(t.items) if i != t_unpack_index] + s_other = [a for i, a in enumerate(s.items) if i != s_unpack_index] + other_joined = join_type_list(s_other + t_other) + mid_joined = join_types(mid_joined, other_joined) + return [UnpackType(s_unpacked.copy_modified(args=[mid_joined]))] + # TODO: are there other case we can handle (e.g. both prefix/suffix are shorter)? + return None + if s_unpack_index is not None: + variadic = s + unpack_index = s_unpack_index + fixed = t + else: + assert t_unpack_index is not None + variadic = t + unpack_index = t_unpack_index + fixed = s + # Case where one tuple has variadic item and the other one doesn't. The join will + # be variadic, since fixed tuple is a subtype of variadic, but not vice versa. + unpack = variadic.items[unpack_index] + assert isinstance(unpack, UnpackType) + unpacked = get_proper_type(unpack.type) + if not isinstance(unpacked, Instance): + return None + if fixed.length() < variadic.length() - 1: + # There are no non-trivial types that are supertype of both. + return None + prefix_len = unpack_index + suffix_len = variadic.length() - prefix_len - 1 + prefix, middle, suffix = split_with_prefix_and_suffix( + tuple(fixed.items), prefix_len, suffix_len + ) + items = [] + for fi, vi in zip(prefix, variadic.items[:prefix_len]): + items.append(join_types(fi, vi)) + mid_joined = join_type_list(list(middle)) + mid_joined = join_types(mid_joined, unpacked.args[0]) + items.append(UnpackType(unpacked.copy_modified(args=[mid_joined]))) + if suffix_len: + for fi, vi in zip(suffix, variadic.items[-suffix_len:]): + items.append(join_types(fi, vi)) + return items + + def visit_tuple_type(self, t: TupleType) -> ProperType: + # When given two fixed-length tuples: + # * If they have the same length, join their subtypes item-wise: + # Tuple[int, bool] + Tuple[bool, bool] becomes Tuple[int, bool] + # * If lengths do not match, return a variadic tuple: + # Tuple[bool, int] + Tuple[bool] becomes Tuple[int, ...] + # + # Otherwise, `t` is a fixed-length tuple but `self.s` is NOT: + # * Joining with a variadic tuple returns variadic tuple: + # Tuple[int, bool] + Tuple[bool, ...] becomes Tuple[int, ...] + # * Joining with any Sequence also returns a Sequence: + # Tuple[int, bool] + List[bool] becomes Sequence[int] + if isinstance(self.s, TupleType): + if self.instance_joiner is None: + self.instance_joiner = InstanceJoiner() + fallback = self.instance_joiner.join_instances( + mypy.typeops.tuple_fallback(self.s), mypy.typeops.tuple_fallback(t) + ) + assert isinstance(fallback, Instance) + items = self.join_tuples(self.s, t) + if items is not None: + if len(items) == 1 and isinstance(item := items[0], UnpackType): + if isinstance(unpacked := get_proper_type(item.type), Instance): + # Avoid double-wrapping tuple[*tuple[X, ...]] + return unpacked + return TupleType(items, fallback) + else: + # TODO: should this be a default fallback behaviour like for meet? + if is_proper_subtype(self.s, t): + return t + if is_proper_subtype(t, self.s): + return self.s + return fallback + else: + return join_types(self.s, mypy.typeops.tuple_fallback(t)) + + def visit_typeddict_type(self, t: TypedDictType) -> ProperType: + if isinstance(self.s, TypedDictType): + items = { + item_name: s_item_type + for (item_name, s_item_type, t_item_type) in self.s.zip(t) + if ( + is_equivalent(s_item_type, t_item_type) + and (item_name in t.required_keys) == (item_name in self.s.required_keys) + ) + } + fallback = self.s.create_anonymous_fallback() + all_keys = set(items.keys()) + # We need to filter by items.keys() since some required keys present in both t and + # self.s might be missing from the join if the types are incompatible. + required_keys = all_keys & t.required_keys & self.s.required_keys + # If one type has a key as readonly, we mark it as readonly for both: + readonly_keys = (t.readonly_keys | t.readonly_keys) & all_keys + return TypedDictType(items, required_keys, readonly_keys, fallback) + elif isinstance(self.s, Instance): + return join_types(self.s, t.fallback) + else: + return self.default(self.s) + + def visit_literal_type(self, t: LiteralType) -> ProperType: + if isinstance(self.s, LiteralType): + if t == self.s: + return t + if self.s.fallback.type.is_enum and t.fallback.type.is_enum: + return mypy.typeops.make_simplified_union([self.s, t]) + return join_types(self.s.fallback, t.fallback) + elif isinstance(self.s, Instance) and self.s.last_known_value == t: + return t + else: + return join_types(self.s, t.fallback) + + def visit_partial_type(self, t: PartialType) -> ProperType: + # We only have partial information so we can't decide the join result. We should + # never get here. + assert False, "Internal error" + + def visit_type_type(self, t: TypeType) -> ProperType: + if isinstance(self.s, TypeType): + return TypeType.make_normalized( + join_types(t.item, self.s.item), + line=t.line, + is_type_form=self.s.is_type_form or t.is_type_form, + ) + elif isinstance(self.s, Instance) and self.s.type.fullname == "builtins.type": + return self.s + else: + return self.default(self.s) + + def visit_type_alias_type(self, t: TypeAliasType) -> ProperType: + assert False, f"This should be never called, got {t}" + + def default(self, typ: Type) -> ProperType: + typ = get_proper_type(typ) + if isinstance(typ, Instance): + return object_from_instance(typ) + elif isinstance(typ, TypeType): + return self.default(typ.item) + elif isinstance(typ, UnboundType): + return AnyType(TypeOfAny.special_form) + elif isinstance(typ, TupleType): + return self.default(mypy.typeops.tuple_fallback(typ)) + elif isinstance(typ, TypedDictType): + return self.default(typ.fallback) + elif isinstance(typ, FunctionLike): + return self.default(typ.fallback) + elif isinstance(typ, TypeVarType): + return self.default(typ.upper_bound) + elif isinstance(typ, ParamSpecType): + return self.default(typ.upper_bound) + else: + return AnyType(TypeOfAny.special_form) + + +def is_better(t: Type, s: Type) -> bool: + # Given two possible results from join_instances_via_supertype(), + # indicate whether t is the better one. + t = get_proper_type(t) + s = get_proper_type(s) + + if isinstance(t, Instance): + if not isinstance(s, Instance): + return True + if t.type.is_protocol != s.type.is_protocol: + if t.type.fullname != "builtins.object" and s.type.fullname != "builtins.object": + # mro of protocol is not really relevant + return not t.type.is_protocol + # Use len(mro) as a proxy for the better choice. + if len(t.type.mro) > len(s.type.mro): + return True + return False + + +def normalize_callables(s: ProperType, t: ProperType) -> tuple[ProperType, ProperType]: + if isinstance(s, (CallableType, Overloaded)): + s = s.with_unpacked_kwargs() + if isinstance(t, (CallableType, Overloaded)): + t = t.with_unpacked_kwargs() + return s, t + + +def is_similar_callables(t: CallableType, s: CallableType) -> bool: + """Return True if t and s have identical numbers of + arguments, default arguments and varargs. + """ + return ( + len(t.arg_types) == len(s.arg_types) + and t.min_args == s.min_args + and t.is_var_arg == s.is_var_arg + ) + + +def is_similar_params(t: Parameters, s: Parameters) -> bool: + # This matches the logic in is_similar_callables() above. + return ( + len(t.arg_types) == len(s.arg_types) + and t.min_args == s.min_args + and (t.var_arg() is not None) == (s.var_arg() is not None) + ) + + +def update_callable_ids(c: CallableType, ids: list[TypeVarId]) -> CallableType: + tv_map = {} + tvs = [] + for tv, new_id in zip(c.variables, ids): + new_tv = tv.copy_modified(id=new_id) + tvs.append(new_tv) + tv_map[tv.id] = new_tv + return expand_type(c, tv_map).copy_modified(variables=tvs) + + +def match_generic_callables(t: CallableType, s: CallableType) -> tuple[CallableType, CallableType]: + # The case where we combine/join/meet similar callables, situation where both are generic + # requires special care. A more principled solution may involve unify_generic_callable(), + # but it would have two problems: + # * This adds risk of infinite recursion: e.g. join -> unification -> solver -> join + # * Using unification is an incorrect thing for meets, as it "widens" the types + # Finally, this effectively falls back to an old behaviour before namespaces were added to + # type variables, and it worked relatively well. + max_len = max(len(t.variables), len(s.variables)) + min_len = min(len(t.variables), len(s.variables)) + if min_len == 0: + return t, s + new_ids = [TypeVarId.new(meta_level=0) for _ in range(max_len)] + # Note: this relies on variables being in order they appear in function definition. + return update_callable_ids(t, new_ids), update_callable_ids(s, new_ids) + + +def join_similar_callables(t: CallableType, s: CallableType) -> CallableType: + t, s = match_generic_callables(t, s) + arg_types: list[Type] = [] + for i in range(len(t.arg_types)): + arg_types.append(safe_meet(t.arg_types[i], s.arg_types[i])) + # TODO in combine_similar_callables also applies here (names and kinds; user metaclasses) + # The fallback type can be either 'function', 'type', or some user-provided metaclass. + # The result should always use 'function' as a fallback if either operands are using it. + if t.fallback.type.fullname == "builtins.function": + fallback = t.fallback + else: + fallback = s.fallback + return t.copy_modified( + arg_types=arg_types, + arg_names=combine_arg_names(t, s), + ret_type=join_types(t.ret_type, s.ret_type), + fallback=fallback, + name=None, + ) + + +def safe_join(t: Type, s: Type) -> Type: + # This is a temporary solution to prevent crashes in combine_similar_callables() etc., + # until relevant TODOs on handling arg_kinds will be addressed there. + if not isinstance(t, UnpackType) and not isinstance(s, UnpackType): + return join_types(t, s) + if isinstance(t, UnpackType) and isinstance(s, UnpackType): + return UnpackType(join_types(t.type, s.type)) + return object_or_any_from_type(get_proper_type(t)) + + +def safe_meet(t: Type, s: Type) -> Type: + # Similar to above but for meet_types(). + from mypy.meet import meet_types + + if not isinstance(t, UnpackType) and not isinstance(s, UnpackType): + return meet_types(t, s) + if isinstance(t, UnpackType) and isinstance(s, UnpackType): + unpacked = get_proper_type(t.type) + if isinstance(unpacked, TypeVarTupleType): + fallback_type = unpacked.tuple_fallback.type + elif isinstance(unpacked, TupleType): + fallback_type = unpacked.partial_fallback.type + else: + assert isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple" + fallback_type = unpacked.type + res = meet_types(t.type, s.type) + if isinstance(res, UninhabitedType): + res = Instance(fallback_type, [res]) + return UnpackType(res) + return UninhabitedType() + + +def combine_similar_callables(t: CallableType, s: CallableType) -> CallableType: + t, s = match_generic_callables(t, s) + arg_types: list[Type] = [] + for i in range(len(t.arg_types)): + arg_types.append(safe_join(t.arg_types[i], s.arg_types[i])) + # TODO kinds and argument names + # TODO what should happen if one fallback is 'type' and the other is a user-provided metaclass? + # The fallback type can be either 'function', 'type', or some user-provided metaclass. + # The result should always use 'function' as a fallback if either operands are using it. + if t.fallback.type.fullname == "builtins.function": + fallback = t.fallback + else: + fallback = s.fallback + return t.copy_modified( + arg_types=arg_types, + arg_names=combine_arg_names(t, s), + ret_type=join_types(t.ret_type, s.ret_type), + fallback=fallback, + name=None, + ) + + +def combine_arg_names( + t: CallableType | Parameters, s: CallableType | Parameters +) -> list[str | None]: + """Produces a list of argument names compatible with both callables. + + For example, suppose 't' and 's' have the following signatures: + + - t: (a: int, b: str, X: str) -> None + - s: (a: int, b: str, Y: str) -> None + + This function would return ["a", "b", None]. This information + is then used above to compute the join of t and s, which results + in a signature of (a: int, b: str, str) -> None. + + Note that the third argument's name is omitted and 't' and 's' + are both valid subtypes of this inferred signature. + + Precondition: is_similar_types(t, s) is true. + """ + num_args = len(t.arg_types) + new_names = [] + for i in range(num_args): + t_name = t.arg_names[i] + s_name = s.arg_names[i] + if t_name == s_name or t.arg_kinds[i].is_named() or s.arg_kinds[i].is_named(): + new_names.append(t_name) + else: + new_names.append(None) + return new_names + + +def object_from_instance(instance: Instance) -> Instance: + """Construct the type 'builtins.object' from an instance type.""" + # Use the fact that 'object' is always the last class in the mro. + res = Instance(instance.type.mro[-1], []) + return res + + +def object_or_any_from_type(typ: ProperType) -> ProperType: + # Similar to object_from_instance() but tries hard for all types. + # TODO: find a better way to get object, or make this more reliable. + if isinstance(typ, Instance): + return object_from_instance(typ) + elif isinstance(typ, (CallableType, TypedDictType, LiteralType)): + return object_from_instance(typ.fallback) + elif isinstance(typ, TupleType): + return object_from_instance(typ.partial_fallback) + elif isinstance(typ, TypeType): + return object_or_any_from_type(typ.item) + elif isinstance(typ, TypeVarLikeType) and isinstance(typ.upper_bound, ProperType): + return object_or_any_from_type(typ.upper_bound) + elif isinstance(typ, UnionType): + for item in typ.items: + if isinstance(item, ProperType): + candidate = object_or_any_from_type(item) + if isinstance(candidate, Instance): + return candidate + elif isinstance(typ, UnpackType): + object_or_any_from_type(get_proper_type(typ.type)) + return AnyType(TypeOfAny.implementation_artifact) + + +def join_type_list(types: Sequence[Type]) -> Type: + if not types: + # This is a little arbitrary but reasonable. Any empty tuple should be compatible + # with all variable length tuples, and this makes it possible. + return UninhabitedType() + joined = types[0] + for t in types[1:]: + joined = join_types(joined, t) + return joined + + +def unpack_callback_protocol(t: Instance) -> ProperType | None: + assert t.type.is_protocol + if t.type.protocol_members == ["__call__"]: + return get_proper_type(find_member("__call__", t, t, is_operator=True)) + return None diff --git a/.venv/lib/python3.12/site-packages/mypy/literals.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/literals.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..ff1e4b8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/literals.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/literals.py b/.venv/lib/python3.12/site-packages/mypy/literals.py new file mode 100644 index 0000000..fd17e04 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/literals.py @@ -0,0 +1,324 @@ +from __future__ import annotations + +from collections.abc import Iterable +from typing import Any, Final, Optional +from typing_extensions import TypeAlias as _TypeAlias + +from mypy.nodes import ( + LITERAL_NO, + LITERAL_TYPE, + LITERAL_YES, + AssertTypeExpr, + AssignmentExpr, + AwaitExpr, + BytesExpr, + CallExpr, + CastExpr, + ComparisonExpr, + ComplexExpr, + ConditionalExpr, + DictExpr, + DictionaryComprehension, + EllipsisExpr, + EnumCallExpr, + Expression, + FloatExpr, + GeneratorExpr, + IndexExpr, + IntExpr, + LambdaExpr, + ListComprehension, + ListExpr, + MemberExpr, + NamedTupleExpr, + NameExpr, + NewTypeExpr, + OpExpr, + ParamSpecExpr, + PromoteExpr, + RevealExpr, + SetComprehension, + SetExpr, + SliceExpr, + StarExpr, + StrExpr, + SuperExpr, + TempNode, + TupleExpr, + TypeAliasExpr, + TypeApplication, + TypedDictExpr, + TypeFormExpr, + TypeVarExpr, + TypeVarTupleExpr, + UnaryExpr, + Var, + YieldExpr, + YieldFromExpr, +) +from mypy.visitor import ExpressionVisitor + +# [Note Literals and literal_hash] +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Mypy uses the term "literal" to refer to any expression built out of +# the following: +# +# * Plain literal expressions, like `1` (integer, float, string, etc.) +# +# * Compound literal expressions, like `(lit1, lit2)` (list, dict, +# set, or tuple) +# +# * Operator expressions, like `lit1 + lit2` +# +# * Variable references, like `x` +# +# * Member references, like `lit.m` +# +# * Index expressions, like `lit[0]` +# +# A typical "literal" looks like `x[(i,j+1)].m`. +# +# An expression that is a literal has a `literal_hash`, with the +# following properties. +# +# * `literal_hash` is a Key: a tuple containing basic data types and +# possibly other Keys. So it can be used as a key in a dictionary +# that will be compared by value (as opposed to the Node itself, +# which is compared by identity). +# +# * Two expressions have equal `literal_hash`es if and only if they +# are syntactically equal expressions. (NB: Actually, we also +# identify as equal expressions like `3` and `3.0`; is this a good +# idea?) +# +# * The elements of `literal_hash` that are tuples are exactly the +# subexpressions of the original expression (e.g. the base and index +# of an index expression, or the operands of an operator expression). + + +Key: _TypeAlias = tuple[Any, ...] + + +def literal_hash(e: Expression) -> Key | None: + """Generate a hashable, (mostly) opaque key for expressions supported by the binder. + + These allow using expressions as dictionary keys based on structural/value + matching (instead of based on expression identity). + + Return None if the expression type is not supported (it cannot be narrowed). + + See the comment above for more information. + + NOTE: This is not directly related to literal types. + """ + return e.accept(_hasher) + + +def literal(e: Expression) -> int: + """Return the literal kind for an expression.""" + + if isinstance(e, ComparisonExpr): + return min(literal(o) for o in e.operands) + + elif isinstance(e, OpExpr): + return min(literal(e.left), literal(e.right)) + + elif isinstance(e, (MemberExpr, UnaryExpr, StarExpr)): + return literal(e.expr) + + elif isinstance(e, AssignmentExpr): + return literal(e.target) + + elif isinstance(e, IndexExpr): + if literal(e.index) == LITERAL_YES: + return literal(e.base) + else: + return LITERAL_NO + + elif isinstance(e, NameExpr): + if isinstance(e.node, Var) and e.node.is_final and e.node.final_value is not None: + return LITERAL_YES + return LITERAL_TYPE + + if isinstance(e, (IntExpr, FloatExpr, ComplexExpr, StrExpr, BytesExpr)): + return LITERAL_YES + + if literal_hash(e): + return LITERAL_YES + + return LITERAL_NO + + +def subkeys(key: Key) -> Iterable[Key]: + return [elt for elt in key if isinstance(elt, tuple)] + + +def extract_var_from_literal_hash(key: Key) -> Var | None: + """If key refers to a Var node, return it. + + Return None otherwise. + """ + if len(key) == 2 and key[0] == "Var" and isinstance(key[1], Var): + return key[1] + return None + + +class _Hasher(ExpressionVisitor[Optional[Key]]): + def visit_int_expr(self, e: IntExpr) -> Key: + return ("Literal", e.value) + + def visit_str_expr(self, e: StrExpr) -> Key: + return ("Literal", e.value) + + def visit_bytes_expr(self, e: BytesExpr) -> Key: + return ("Literal", e.value) + + def visit_float_expr(self, e: FloatExpr) -> Key: + return ("Literal", e.value) + + def visit_complex_expr(self, e: ComplexExpr) -> Key: + return ("Literal", e.value) + + def visit_star_expr(self, e: StarExpr) -> Key: + return ("Star", literal_hash(e.expr)) + + def visit_name_expr(self, e: NameExpr) -> Key: + if isinstance(e.node, Var) and e.node.is_final and e.node.final_value is not None: + return ("Literal", e.node.final_value) + # N.B: We use the node itself as the key, and not the name, + # because using the name causes issues when there is shadowing + # (for example, in list comprehensions). + return ("Var", e.node) + + def visit_member_expr(self, e: MemberExpr) -> Key: + return ("Member", literal_hash(e.expr), e.name) + + def visit_op_expr(self, e: OpExpr) -> Key: + return ("Binary", e.op, literal_hash(e.left), literal_hash(e.right)) + + def visit_comparison_expr(self, e: ComparisonExpr) -> Key: + rest: tuple[str | Key | None, ...] = tuple(e.operators) + rest += tuple(literal_hash(o) for o in e.operands) + return ("Comparison",) + rest + + def visit_unary_expr(self, e: UnaryExpr) -> Key: + return ("Unary", e.op, literal_hash(e.expr)) + + def seq_expr(self, e: ListExpr | TupleExpr | SetExpr, name: str) -> Key | None: + if all(literal(x) == LITERAL_YES for x in e.items): + rest: tuple[Key | None, ...] = tuple(literal_hash(x) for x in e.items) + return (name,) + rest + return None + + def visit_list_expr(self, e: ListExpr) -> Key | None: + return self.seq_expr(e, "List") + + def visit_dict_expr(self, e: DictExpr) -> Key | None: + if all(a and literal(a) == literal(b) == LITERAL_YES for a, b in e.items): + rest: tuple[Key | None, ...] = tuple( + (literal_hash(a) if a else None, literal_hash(b)) for a, b in e.items + ) + return ("Dict",) + rest + return None + + def visit_tuple_expr(self, e: TupleExpr) -> Key | None: + return self.seq_expr(e, "Tuple") + + def visit_set_expr(self, e: SetExpr) -> Key | None: + return self.seq_expr(e, "Set") + + def visit_index_expr(self, e: IndexExpr) -> Key | None: + if literal(e.index) == LITERAL_YES: + return ("Index", literal_hash(e.base), literal_hash(e.index)) + return None + + def visit_assignment_expr(self, e: AssignmentExpr) -> Key | None: + return literal_hash(e.target) + + def visit_call_expr(self, e: CallExpr) -> None: + return None + + def visit_slice_expr(self, e: SliceExpr) -> None: + return None + + def visit_cast_expr(self, e: CastExpr) -> None: + return None + + def visit_type_form_expr(self, e: TypeFormExpr) -> None: + return None + + def visit_assert_type_expr(self, e: AssertTypeExpr) -> None: + return None + + def visit_conditional_expr(self, e: ConditionalExpr) -> None: + return None + + def visit_ellipsis(self, e: EllipsisExpr) -> None: + return None + + def visit_yield_from_expr(self, e: YieldFromExpr) -> None: + return None + + def visit_yield_expr(self, e: YieldExpr) -> None: + return None + + def visit_reveal_expr(self, e: RevealExpr) -> None: + return None + + def visit_super_expr(self, e: SuperExpr) -> None: + return None + + def visit_type_application(self, e: TypeApplication) -> None: + return None + + def visit_lambda_expr(self, e: LambdaExpr) -> None: + return None + + def visit_list_comprehension(self, e: ListComprehension) -> None: + return None + + def visit_set_comprehension(self, e: SetComprehension) -> None: + return None + + def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> None: + return None + + def visit_generator_expr(self, e: GeneratorExpr) -> None: + return None + + def visit_type_var_expr(self, e: TypeVarExpr) -> None: + return None + + def visit_paramspec_expr(self, e: ParamSpecExpr) -> None: + return None + + def visit_type_var_tuple_expr(self, e: TypeVarTupleExpr) -> None: + return None + + def visit_type_alias_expr(self, e: TypeAliasExpr) -> None: + return None + + def visit_namedtuple_expr(self, e: NamedTupleExpr) -> None: + return None + + def visit_enum_call_expr(self, e: EnumCallExpr) -> None: + return None + + def visit_typeddict_expr(self, e: TypedDictExpr) -> None: + return None + + def visit_newtype_expr(self, e: NewTypeExpr) -> None: + return None + + def visit__promote_expr(self, e: PromoteExpr) -> None: + return None + + def visit_await_expr(self, e: AwaitExpr) -> None: + return None + + def visit_temp_node(self, e: TempNode) -> None: + return None + + +_hasher: Final = _Hasher() diff --git a/.venv/lib/python3.12/site-packages/mypy/lookup.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/lookup.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..94926f2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/lookup.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/lookup.py b/.venv/lib/python3.12/site-packages/mypy/lookup.py new file mode 100644 index 0000000..640481f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/lookup.py @@ -0,0 +1,65 @@ +""" +This is a module for various lookup functions: +functions that will find a semantic node by its name. +""" + +from __future__ import annotations + +from mypy.nodes import MypyFile, SymbolTableNode, TypeInfo + +# TODO: gradually move existing lookup functions to this module. + + +def lookup_fully_qualified( + name: str, modules: dict[str, MypyFile], *, raise_on_missing: bool = False +) -> SymbolTableNode | None: + """Find a symbol using it fully qualified name. + + The algorithm has two steps: first we try splitting the name on '.' to find + the module, then iteratively look for each next chunk after a '.' (e.g. for + nested classes). + + This function should *not* be used to find a module. Those should be looked + in the modules dictionary. + """ + # 1. Exclude the names of ad hoc instance intersections from step 2. + i = name.find(" os.stat_result: + try: + st = orig_stat(path) + except OSError as err: + print(f"stat({path!r}) -> {err}") + raise + else: + print( + "stat(%r) -> (st_mode=%o, st_mtime=%d, st_size=%d)" + % (path, st.st_mode, st.st_mtime, st.st_size) + ) + return st + + +def main( + *, + args: list[str] | None = None, + stdout: TextIO = sys.stdout, + stderr: TextIO = sys.stderr, + clean_exit: bool = False, +) -> None: + """Main entry point to the type checker. + + Args: + args: Custom command-line arguments. If not given, sys.argv[1:] will + be used. + clean_exit: Don't hard kill the process on exit. This allows catching + SystemExit. + """ + util.check_python_version("mypy") + t0 = time.time() + # To log stat() calls: os.stat = stat_proxy + sys.setrecursionlimit(RECURSION_LIMIT) + if args is None: + args = sys.argv[1:] + + # Write an escape sequence instead of raising an exception on encoding errors. + if isinstance(stdout, TextIOWrapper) and stdout.errors == "strict": + stdout.reconfigure(errors="backslashreplace") + + fscache = FileSystemCache() + sources, options = process_options(args, stdout=stdout, stderr=stderr, fscache=fscache) + if clean_exit: + options.fast_exit = False + + formatter = util.FancyFormatter( + stdout, stderr, options.hide_error_codes, hide_success=bool(options.output) + ) + + if options.allow_redefinition_new and not options.local_partial_types: + fail( + "error: --local-partial-types must be enabled if using --allow-redefinition-new", + stderr, + options, + ) + + if options.install_types and (stdout is not sys.stdout or stderr is not sys.stderr): + # Since --install-types performs user input, we want regular stdout and stderr. + fail("error: --install-types not supported in this mode of running mypy", stderr, options) + + if options.non_interactive and not options.install_types: + fail("error: --non-interactive is only supported with --install-types", stderr, options) + + if options.install_types and not options.incremental: + fail( + "error: --install-types not supported with incremental mode disabled", stderr, options + ) + + if options.install_types and options.python_executable is None: + fail( + "error: --install-types not supported without python executable or site packages", + stderr, + options, + ) + + if options.install_types and not sources: + install_types(formatter, options, non_interactive=options.non_interactive) + return + + res, messages, blockers = run_build(sources, options, fscache, t0, stdout, stderr) + + if options.non_interactive: + missing_pkgs = read_types_packages_to_install(options.cache_dir, after_run=True) + if missing_pkgs: + # Install missing type packages and rerun build. + install_types(formatter, options, after_run=True, non_interactive=True) + fscache.flush() + print() + res, messages, blockers = run_build(sources, options, fscache, t0, stdout, stderr) + show_messages(messages, stderr, formatter, options) + + if MEM_PROFILE: + from mypy.memprofile import print_memory_profile + + print_memory_profile() + + code = 0 + n_errors, n_notes, n_files = util.count_stats(messages) + if messages and n_notes < len(messages): + code = 2 if blockers else 1 + if options.error_summary: + if n_errors: + summary = formatter.format_error( + n_errors, n_files, len(sources), blockers=blockers, use_color=options.color_output + ) + stdout.write(summary + "\n") + # Only notes should also output success + elif not messages or n_notes == len(messages): + stdout.write(formatter.format_success(len(sources), options.color_output) + "\n") + stdout.flush() + + if options.install_types and not options.non_interactive: + result = install_types(formatter, options, after_run=True, non_interactive=False) + if result: + print() + print("note: Run mypy again for up-to-date results with installed types") + code = 2 + + if options.fast_exit: + # Exit without freeing objects -- it's faster. + # + # NOTE: We don't flush all open files on exit (or run other destructors)! + util.hard_exit(code) + elif code: + sys.exit(code) + + # HACK: keep res alive so that mypyc won't free it before the hard_exit + list([res]) # noqa: C410 + + +def run_build( + sources: list[BuildSource], + options: Options, + fscache: FileSystemCache, + t0: float, + stdout: TextIO, + stderr: TextIO, +) -> tuple[build.BuildResult | None, list[str], bool]: + formatter = util.FancyFormatter( + stdout, stderr, options.hide_error_codes, hide_success=bool(options.output) + ) + + messages = [] + messages_by_file = defaultdict(list) + + def flush_errors(filename: str | None, new_messages: list[str], serious: bool) -> None: + if options.pretty: + new_messages = formatter.fit_in_terminal(new_messages) + messages.extend(new_messages) + if new_messages: + messages_by_file[filename].extend(new_messages) + if options.non_interactive: + # Collect messages and possibly show them later. + return + f = stderr if serious else stdout + show_messages(new_messages, f, formatter, options) + + serious = False + blockers = False + res = None + try: + # Keep a dummy reference (res) for memory profiling afterwards, as otherwise + # the result could be freed. + res = build.build(sources, options, None, flush_errors, fscache, stdout, stderr) + except CompileError as e: + blockers = True + if not e.use_stdout: + serious = True + if ( + options.warn_unused_configs + and options.unused_configs + and not options.incremental + and not options.non_interactive + ): + print( + "Warning: unused section(s) in {}: {}".format( + options.config_file, + get_config_module_names( + options.config_file, + [ + glob + for glob in options.per_module_options.keys() + if glob in options.unused_configs + ], + ), + ), + file=stderr, + ) + maybe_write_junit_xml(time.time() - t0, serious, messages, messages_by_file, options) + return res, messages, blockers + + +def show_messages( + messages: list[str], f: TextIO, formatter: util.FancyFormatter, options: Options +) -> None: + for msg in messages: + if options.color_output: + msg = formatter.colorize(msg) + f.write(msg + "\n") + f.flush() + + +# Make the help output a little less jarring. +class AugmentedHelpFormatter(argparse.RawDescriptionHelpFormatter): + def __init__(self, prog: str, **kwargs: Any) -> None: + super().__init__(prog=prog, max_help_position=28, **kwargs) + + def _fill_text(self, text: str, width: int, indent: str) -> str: + if "\n" in text: + # Assume we want to manually format the text + return super()._fill_text(text, width, indent) + # Format the text like argparse, but overflow rather than + # breaking long words (like URLs) + text = self._whitespace_matcher.sub(" ", text).strip() + import textwrap + + return textwrap.fill( + text, + width, + initial_indent=indent, + subsequent_indent=indent, + break_on_hyphens=False, + break_long_words=False, + ) + + +# Define pairs of flag prefixes with inverse meaning. +flag_prefix_pairs: Final = [("allow", "disallow"), ("show", "hide")] +flag_prefix_map: Final[dict[str, str]] = {} +for a, b in flag_prefix_pairs: + flag_prefix_map[a] = b + flag_prefix_map[b] = a + + +def invert_flag_name(flag: str) -> str: + split = flag[2:].split("-", 1) + if len(split) == 2: + prefix, rest = split + if prefix in flag_prefix_map: + return f"--{flag_prefix_map[prefix]}-{rest}" + elif prefix == "no": + return f"--{rest}" + + return f"--no-{flag[2:]}" + + +class PythonExecutableInferenceError(Exception): + """Represents a failure to infer the version or executable while searching.""" + + +def python_executable_prefix(v: str) -> list[str]: + if sys.platform == "win32": + # on Windows, all Python executables are named `python`. To handle this, there + # is the `py` launcher, which can be passed a version e.g. `py -3.8`, and it will + # execute an installed Python 3.8 interpreter. See also: + # https://docs.python.org/3/using/windows.html#python-launcher-for-windows + return ["py", f"-{v}"] + else: + return [f"python{v}"] + + +def _python_executable_from_version(python_version: tuple[int, int]) -> str: + if sys.version_info[:2] == python_version: + return sys.executable + str_ver = ".".join(map(str, python_version)) + try: + sys_exe = ( + subprocess.check_output( + python_executable_prefix(str_ver) + ["-c", "import sys; print(sys.executable)"], + stderr=subprocess.STDOUT, + ) + .decode() + .strip() + ) + return sys_exe + except (subprocess.CalledProcessError, FileNotFoundError) as e: + raise PythonExecutableInferenceError( + "failed to find a Python executable matching version {}," + " perhaps try --python-executable, or --no-site-packages?".format(python_version) + ) from e + + +def infer_python_executable(options: Options, special_opts: argparse.Namespace) -> None: + """Infer the Python executable from the given version. + + This function mutates options based on special_opts to infer the correct Python executable + to use. + """ + # TODO: (ethanhs) Look at folding these checks and the site packages subprocess calls into + # one subprocess call for speed. + + # Use the command line specified executable, or fall back to one set in the + # config file. If an executable is not specified, infer it from the version + # (unless no_executable is set) + python_executable = special_opts.python_executable or options.python_executable + + if python_executable is None: + if not special_opts.no_executable and not options.no_site_packages: + python_executable = _python_executable_from_version(options.python_version) + options.python_executable = python_executable + + +HEADER: Final = """%(prog)s [-h] [-v] [-V] [more options; see below] + [-m MODULE] [-p PACKAGE] [-c PROGRAM_TEXT] [files ...]""" + + +DESCRIPTION: Final = """ +Mypy is a program that will type check your Python code. + +Pass in any files or folders you want to type check. Mypy will +recursively traverse any provided folders to find .py files: + + $ mypy my_program.py my_src_folder + +For more information on getting started, see: + +- https://mypy.readthedocs.io/en/stable/getting_started.html + +For more details on both running mypy and using the flags below, see: + +- https://mypy.readthedocs.io/en/stable/running_mypy.html +- https://mypy.readthedocs.io/en/stable/command_line.html + +You can also use a config file to configure mypy instead of using +command line flags. For more details, see: + +- https://mypy.readthedocs.io/en/stable/config_file.html +""" + +FOOTER: Final = """Environment variables: + Define MYPYPATH for additional module search path entries. + Define MYPY_CACHE_DIR to override configuration cache_dir path.""" + + +class CapturableArgumentParser(argparse.ArgumentParser): + """Override ArgumentParser methods that use sys.stdout/sys.stderr directly. + + This is needed because hijacking sys.std* is not thread-safe, + yet output must be captured to properly support mypy.api.run. + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + self.stdout = kwargs.pop("stdout", sys.stdout) + self.stderr = kwargs.pop("stderr", sys.stderr) + super().__init__(*args, **kwargs) + + # ===================== + # Help-printing methods + # ===================== + def print_usage(self, file: SupportsWrite[str] | None = None) -> None: + if file is None: + file = self.stdout + self._print_message(self.format_usage(), file) + + def print_help(self, file: SupportsWrite[str] | None = None) -> None: + if file is None: + file = self.stdout + self._print_message(self.format_help(), file) + + def _print_message(self, message: str, file: SupportsWrite[str] | None = None) -> None: + if message: + if file is None: + file = self.stderr + file.write(message) + + # =============== + # Exiting methods + # =============== + def exit(self, status: int = 0, message: str | None = None) -> NoReturn: + if message: + self._print_message(message, self.stderr) + sys.exit(status) + + def error(self, message: str) -> NoReturn: + """error(message: string) + + Prints a usage message incorporating the message to stderr and + exits. + + If you override this in a subclass, it should not return -- it + should either exit or raise an exception. + """ + self.print_usage(self.stderr) + args = {"prog": self.prog, "message": message} + self.exit(2, gettext("%(prog)s: error: %(message)s\n") % args) + + +class CapturableVersionAction(argparse.Action): + """Supplement CapturableArgumentParser to handle --version. + + This is nearly identical to argparse._VersionAction except, + like CapturableArgumentParser, it allows output to be captured. + + Another notable difference is that version is mandatory. + This allows removing a line in __call__ that falls back to parser.version + (which does not appear to exist). + """ + + def __init__( + self, + option_strings: Sequence[str], + version: str, + dest: str = argparse.SUPPRESS, + default: str = argparse.SUPPRESS, + help: str = "show program's version number and exit", + stdout: IO[str] | None = None, + ) -> None: + super().__init__( + option_strings=option_strings, dest=dest, default=default, nargs=0, help=help + ) + self.version = version + self.stdout = stdout or sys.stdout + + def __call__( + self, + parser: argparse.ArgumentParser, + namespace: argparse.Namespace, + values: str | Sequence[Any] | None, + option_string: str | None = None, + ) -> NoReturn: + formatter = parser._get_formatter() + formatter.add_text(self.version) + parser._print_message(formatter.format_help(), self.stdout) + parser.exit() + + +def define_options( + program: str = "mypy", + header: str = HEADER, + stdout: TextIO = sys.stdout, + stderr: TextIO = sys.stderr, + server_options: bool = False, +) -> tuple[CapturableArgumentParser, list[str], list[tuple[str, bool]]]: + """Define the options in the parser (by calling a bunch of methods that express/build our desired command-line flags). + Returns a tuple of: + a parser object, that can parse command line arguments to mypy (expected consumer: main's process_options), + a list of what flags are strict (expected consumer: docs' html_builder's _add_strict_list), + strict_flag_assignments (expected consumer: main's process_options).""" + parser = CapturableArgumentParser( + prog=program, + usage=header, + description=DESCRIPTION, + epilog=FOOTER, + fromfile_prefix_chars="@", + formatter_class=AugmentedHelpFormatter, + add_help=False, + stdout=stdout, + stderr=stderr, + ) + + strict_flag_names: list[str] = [] + strict_flag_assignments: list[tuple[str, bool]] = [] + + def add_invertible_flag( + flag: str, + *, + inverse: str | None = None, + default: bool, + dest: str | None = None, + help: str, + strict_flag: bool = False, + group: argparse._ActionsContainer | None = None, + ) -> None: + if inverse is None: + inverse = invert_flag_name(flag) + if group is None: + group = parser + + if help is not argparse.SUPPRESS: + help += f" (inverse: {inverse})" + + arg = group.add_argument( + flag, action="store_false" if default else "store_true", dest=dest, help=help + ) + dest = arg.dest + group.add_argument( + inverse, + action="store_true" if default else "store_false", + dest=dest, + help=argparse.SUPPRESS, + ) + if strict_flag: + assert dest is not None + strict_flag_names.append(flag) + strict_flag_assignments.append((dest, not default)) + + # Unless otherwise specified, arguments will be parsed directly onto an + # Options object. Options that require further processing should have + # their `dest` prefixed with `special-opts:`, which will cause them to be + # parsed into the separate special_opts namespace object. + + # Our style guide for formatting the output of running `mypy --help`: + # Flags: + # 1. The flag help text should start with a capital letter but never end with a period. + # 2. Keep the flag help text brief -- ideally just a single sentence. + # 3. All flags must be a part of a group, unless the flag is deprecated or suppressed. + # 4. Avoid adding new flags to the "miscellaneous" groups -- instead add them to an + # existing group or, if applicable, create a new group. Feel free to move existing + # flags to a new group: just be sure to also update the documentation to match. + # + # Groups: + # 1. The group title and description should start with a capital letter. + # 2. The first sentence of a group description should be written in the bare infinitive. + # Tip: try substituting the group title and description into the following sentence: + # > {group_title}: these flags will {group_description} + # Feel free to add subsequent sentences that add additional details. + # 3. If you cannot think of a meaningful description for a new group, omit it entirely. + # (E.g. see the "miscellaneous" sections). + # 4. The text of the group description should end with a period, optionally followed + # by a documentation reference (URL). + # 5. If you want to include a documentation reference, place it at the end of the + # description. Feel free to open with a brief reference ("See also:", "For more + # information:", etc.), followed by a space, then the entire URL including + # "https://" scheme identifier and fragment ("#some-target-heading"), if any. + # Do not end with a period (or any other characters not part of the URL). + # URLs longer than the available terminal width will overflow without being + # broken apart. This facilitates both URL detection, and manual copy-pasting. + + general_group = parser.add_argument_group(title="Optional arguments") + general_group.add_argument( + "-h", "--help", action="help", help="Show this help message and exit" + ) + general_group.add_argument( + "-v", "--verbose", action="count", dest="verbosity", help="More verbose messages" + ) + + compilation_status = "no" if __file__.endswith(".py") else "yes" + general_group.add_argument( + "-V", + "--version", + action=CapturableVersionAction, + version="%(prog)s " + __version__ + f" (compiled: {compilation_status})", + help="Show program's version number and exit", + stdout=stdout, + ) + + general_group.add_argument( + "-O", + "--output", + metavar="FORMAT", + help="Set a custom output format", + choices=OUTPUT_CHOICES, + ) + + config_group = parser.add_argument_group( + title="Config file", + description="Use a config file instead of command line arguments. " + "This is useful if you are using many flags or want " + "to set different options per each module.", + ) + config_group.add_argument( + "--config-file", + help=( + f"Configuration file, must have a [mypy] section " + f"(defaults to {', '.join(defaults.CONFIG_NAMES + defaults.SHARED_CONFIG_NAMES)})" + ), + ) + add_invertible_flag( + "--warn-unused-configs", + default=False, + strict_flag=True, + help="Warn about unused '[mypy-]' or '[[tool.mypy.overrides]]' config sections", + group=config_group, + ) + + imports_group = parser.add_argument_group( + title="Import discovery", description="Configure how imports are discovered and followed." + ) + add_invertible_flag( + "--no-namespace-packages", + dest="namespace_packages", + default=True, + help="Disable support for namespace packages (PEP 420, __init__.py-less)", + group=imports_group, + ) + imports_group.add_argument( + "--ignore-missing-imports", + action="store_true", + help="Silently ignore imports of missing modules", + ) + imports_group.add_argument( + "--follow-untyped-imports", + action="store_true", + help="Typecheck modules without stubs or py.typed marker", + ) + imports_group.add_argument( + "--follow-imports", + choices=["normal", "silent", "skip", "error"], + default="normal", + help="How to treat imports (default normal)", + ) + imports_group.add_argument( + "--python-executable", + action="store", + metavar="EXECUTABLE", + help="Python executable used for finding PEP 561 compliant installed packages and stubs", + dest="special-opts:python_executable", + ) + imports_group.add_argument( + "--no-site-packages", + action="store_true", + dest="special-opts:no_executable", + help="Do not search for installed PEP 561 compliant packages", + ) + imports_group.add_argument( + "--no-silence-site-packages", + action="store_true", + help="Do not silence errors in PEP 561 compliant installed packages", + ) + + platform_group = parser.add_argument_group( + title="Platform configuration", + description="Type check code assuming it will be run under certain " + "runtime conditions. By default, mypy assumes your code " + "will be run using the same operating system and Python " + "version you are using to run mypy itself.", + ) + platform_group.add_argument( + "--python-version", + type=parse_version, + metavar="x.y", + help="Type check code assuming it will be running on Python x.y", + dest="special-opts:python_version", + ) + platform_group.add_argument( + "--platform", + action="store", + metavar="PLATFORM", + help="Type check special-cased code for the given OS platform (defaults to sys.platform)", + ) + platform_group.add_argument( + "--always-true", + metavar="NAME", + action="append", + default=[], + help="Additional variable to be considered True (may be repeated)", + ) + platform_group.add_argument( + "--always-false", + metavar="NAME", + action="append", + default=[], + help="Additional variable to be considered False (may be repeated)", + ) + + disallow_any_group = parser.add_argument_group( + title="Disallow dynamic typing", + description="Disallow the use of the dynamic 'Any' type under certain conditions.", + ) + disallow_any_group.add_argument( + "--disallow-any-expr", + default=False, + action="store_true", + help="Disallow all expressions that have type Any", + ) + disallow_any_group.add_argument( + "--disallow-any-decorated", + default=False, + action="store_true", + help="Disallow functions that have Any in their signature after decorator transformation", + ) + disallow_any_group.add_argument( + "--disallow-any-explicit", + default=False, + action="store_true", + help="Disallow explicit Any in type positions", + ) + add_invertible_flag( + "--disallow-any-generics", + default=False, + strict_flag=True, + help="Disallow usage of generic types that do not specify explicit type parameters", + group=disallow_any_group, + ) + add_invertible_flag( + "--disallow-any-unimported", + default=False, + help="Disallow Any types resulting from unfollowed imports", + group=disallow_any_group, + ) + add_invertible_flag( + "--disallow-subclassing-any", + default=False, + strict_flag=True, + help="Disallow subclassing values of type 'Any' when defining classes", + group=disallow_any_group, + ) + + untyped_group = parser.add_argument_group( + title="Untyped definitions and calls", + description="Configure how untyped definitions and calls are handled. " + "Note: by default, mypy ignores any untyped function definitions " + "and assumes any calls to such functions have a return " + "type of 'Any'.", + ) + add_invertible_flag( + "--disallow-untyped-calls", + default=False, + strict_flag=True, + help="Disallow calling functions without type annotations" + " from functions with type annotations", + group=untyped_group, + ) + untyped_group.add_argument( + "--untyped-calls-exclude", + metavar="MODULE", + action="append", + default=[], + help="Disable --disallow-untyped-calls for functions/methods coming" + " from specific package, module, or class", + ) + add_invertible_flag( + "--disallow-untyped-defs", + default=False, + strict_flag=True, + help="Disallow defining functions without type annotations" + " or with incomplete type annotations", + group=untyped_group, + ) + add_invertible_flag( + "--disallow-incomplete-defs", + default=False, + strict_flag=True, + help="Disallow defining functions with incomplete type annotations " + "(while still allowing entirely unannotated definitions)", + group=untyped_group, + ) + add_invertible_flag( + "--check-untyped-defs", + default=False, + strict_flag=True, + help="Type check the interior of functions without type annotations", + group=untyped_group, + ) + add_invertible_flag( + "--disallow-untyped-decorators", + default=False, + strict_flag=True, + help="Disallow decorating typed functions with untyped decorators", + group=untyped_group, + ) + + none_group = parser.add_argument_group( + title="None and Optional handling", + description="Adjust how values of type 'None' are handled. For more context on " + "how mypy handles values of type 'None', see: " + "https://mypy.readthedocs.io/en/stable/kinds_of_types.html#optional-types-and-the-none-type", + ) + add_invertible_flag( + "--implicit-optional", + default=False, + help="Assume arguments with default values of None are Optional", + group=none_group, + ) + none_group.add_argument("--strict-optional", action="store_true", help=argparse.SUPPRESS) + none_group.add_argument( + "--no-strict-optional", + action="store_false", + dest="strict_optional", + help="Disable strict Optional checks (inverse: --strict-optional)", + ) + + # This flag is deprecated, Mypy only supports Python 3.9+ + add_invertible_flag( + "--force-uppercase-builtins", default=False, help=argparse.SUPPRESS, group=none_group + ) + + add_invertible_flag( + "--force-union-syntax", default=False, help=argparse.SUPPRESS, group=none_group + ) + + lint_group = parser.add_argument_group( + title="Configuring warnings", + description="Detect code that is sound but redundant or problematic.", + ) + add_invertible_flag( + "--warn-redundant-casts", + default=False, + strict_flag=True, + help="Warn about casting an expression to its inferred type", + group=lint_group, + ) + add_invertible_flag( + "--warn-unused-ignores", + default=False, + strict_flag=True, + help="Warn about unneeded '# type: ignore' comments", + group=lint_group, + ) + add_invertible_flag( + "--no-warn-no-return", + dest="warn_no_return", + default=True, + help="Do not warn about functions that end without returning", + group=lint_group, + ) + add_invertible_flag( + "--warn-return-any", + default=False, + strict_flag=True, + help="Warn about returning values of type Any from non-Any typed functions", + group=lint_group, + ) + add_invertible_flag( + "--warn-unreachable", + default=False, + strict_flag=False, + help="Warn about statements or expressions inferred to be unreachable", + group=lint_group, + ) + add_invertible_flag( + "--report-deprecated-as-note", + default=False, + strict_flag=False, + help="Report importing or using deprecated features as notes instead of errors", + group=lint_group, + ) + lint_group.add_argument( + "--deprecated-calls-exclude", + metavar="MODULE", + action="append", + default=[], + help="Disable deprecated warnings for functions/methods coming" + " from specific package, module, or class", + ) + + # Note: this group is intentionally added here even though we don't add + # --strict to this group near the end. + # + # That way, this group will appear after the various strictness groups + # but before the remaining flags. + # We add `--strict` near the end so we don't accidentally miss any strictness + # flags that are added after this group. + strictness_group = parser.add_argument_group(title="Miscellaneous strictness flags") + + add_invertible_flag( + "--allow-untyped-globals", + default=False, + strict_flag=False, + help="Suppress toplevel errors caused by missing annotations", + group=strictness_group, + ) + + add_invertible_flag( + "--allow-redefinition", + default=False, + strict_flag=False, + help="Allow restricted, unconditional variable redefinition with a new type", + group=strictness_group, + ) + + add_invertible_flag( + "--allow-redefinition-new", + default=False, + strict_flag=False, + help="Allow more flexible variable redefinition semantics (experimental)", + group=strictness_group, + ) + + add_invertible_flag( + "--no-implicit-reexport", + default=True, + strict_flag=True, + dest="implicit_reexport", + help="Treat imports as private unless aliased", + group=strictness_group, + ) + + add_invertible_flag( + "--strict-equality", + default=False, + strict_flag=True, + help="Prohibit equality, identity, and container checks for non-overlapping types " + "(except `None`)", + group=strictness_group, + ) + + add_invertible_flag( + "--strict-equality-for-none", + default=False, + strict_flag=False, + help="Extend `--strict-equality` for `None` checks", + group=strictness_group, + ) + + add_invertible_flag( + "--strict-bytes", + default=False, + strict_flag=True, + help="Disable treating bytearray and memoryview as subtypes of bytes", + group=strictness_group, + ) + + add_invertible_flag( + "--extra-checks", + default=False, + strict_flag=True, + help="Enable additional checks that are technically correct but may be impractical " + "in real code. For example, this prohibits partial overlap in TypedDict updates, " + "and makes arguments prepended via Concatenate positional-only", + group=strictness_group, + ) + + strict_help = "Strict mode; enables the following flags: {}".format( + ", ".join(strict_flag_names) + ) + strictness_group.add_argument( + "--strict", action="store_true", dest="special-opts:strict", help=strict_help + ) + + strictness_group.add_argument( + "--disable-error-code", + metavar="NAME", + action="append", + default=[], + help="Disable a specific error code", + ) + strictness_group.add_argument( + "--enable-error-code", + metavar="NAME", + action="append", + default=[], + help="Enable a specific error code", + ) + + error_group = parser.add_argument_group( + title="Configuring error messages", + description="Adjust the amount of detail shown in error messages.", + ) + add_invertible_flag( + "--show-error-context", + default=False, + dest="show_error_context", + help='Precede errors with "note:" messages explaining context', + group=error_group, + ) + add_invertible_flag( + "--show-column-numbers", + default=False, + help="Show column numbers in error messages", + group=error_group, + ) + add_invertible_flag( + "--show-error-end", + default=False, + help="Show end line/end column numbers in error messages." + " This implies --show-column-numbers", + group=error_group, + ) + add_invertible_flag( + "--hide-error-codes", + default=False, + help="Hide error codes in error messages", + group=error_group, + ) + add_invertible_flag( + "--show-error-code-links", + default=False, + help="Show links to error code documentation", + group=error_group, + ) + add_invertible_flag( + "--pretty", + default=False, + help="Use visually nicer output in error messages:" + " Use soft word wrap, show source code snippets," + " and show error location markers", + group=error_group, + ) + add_invertible_flag( + "--no-color-output", + dest="color_output", + default=True, + help="Do not colorize error messages", + group=error_group, + ) + add_invertible_flag( + "--no-error-summary", + dest="error_summary", + default=True, + help="Do not show error stats summary", + group=error_group, + ) + add_invertible_flag( + "--show-absolute-path", + default=False, + help="Show absolute paths to files", + group=error_group, + ) + error_group.add_argument( + "--soft-error-limit", + default=defaults.MANY_ERRORS_THRESHOLD, + type=int, + dest="many_errors_threshold", + help=argparse.SUPPRESS, + ) + + incremental_group = parser.add_argument_group( + title="Incremental mode", + description="Adjust how mypy incrementally type checks and caches modules. " + "Mypy caches type information about modules into a cache to " + "let you speed up future invocations of mypy. Also see " + "mypy's daemon mode: " + "https://mypy.readthedocs.io/en/stable/mypy_daemon.html#mypy-daemon", + ) + incremental_group.add_argument( + "-i", "--incremental", action="store_true", help=argparse.SUPPRESS + ) + incremental_group.add_argument( + "--no-incremental", + action="store_false", + dest="incremental", + help="Disable module cache (inverse: --incremental)", + ) + incremental_group.add_argument( + "--cache-dir", + action="store", + metavar="DIR", + help="Store module cache info in the given folder in incremental mode " + "(defaults to '{}')".format(defaults.CACHE_DIR), + ) + add_invertible_flag( + "--sqlite-cache", + default=False, + help="Use a sqlite database to store the cache", + group=incremental_group, + ) + incremental_group.add_argument( + "--cache-fine-grained", + action="store_true", + help="Include fine-grained dependency information in the cache for the mypy daemon", + ) + incremental_group.add_argument( + "--fixed-format-cache", + action="store_true", + help="Use new fast and compact fixed format cache", + ) + incremental_group.add_argument( + "--skip-version-check", + action="store_true", + help="Allow using cache written by older mypy version", + ) + incremental_group.add_argument( + "--skip-cache-mtime-checks", + action="store_true", + help="Skip cache internal consistency checks based on mtime", + ) + + internals_group = parser.add_argument_group( + title="Advanced options", description="Debug and customize mypy internals." + ) + internals_group.add_argument("--pdb", action="store_true", help="Invoke pdb on fatal error") + internals_group.add_argument( + "--show-traceback", "--tb", action="store_true", help="Show traceback on fatal error" + ) + internals_group.add_argument( + "--raise-exceptions", action="store_true", help="Raise exception on fatal error" + ) + internals_group.add_argument( + "--custom-typing-module", + metavar="MODULE", + dest="custom_typing_module", + help="Use a custom typing module", + ) + internals_group.add_argument( + "--old-type-inference", action="store_true", help=argparse.SUPPRESS + ) + internals_group.add_argument( + "--disable-expression-cache", action="store_true", help=argparse.SUPPRESS + ) + parser.add_argument( + "--enable-incomplete-feature", + action="append", + metavar="{" + ",".join(sorted(INCOMPLETE_FEATURES)) + "}", + help="Enable support of incomplete/experimental features for early preview", + ) + internals_group.add_argument( + "--custom-typeshed-dir", metavar="DIR", help="Use the custom typeshed in DIR" + ) + add_invertible_flag( + "--warn-incomplete-stub", + default=False, + help="Warn if missing type annotation in typeshed, only relevant with" + " --disallow-untyped-defs or --disallow-incomplete-defs enabled", + group=internals_group, + ) + internals_group.add_argument( + "--shadow-file", + nargs=2, + metavar=("SOURCE_FILE", "SHADOW_FILE"), + dest="shadow_file", + action="append", + help="When encountering SOURCE_FILE, read and type check " + "the contents of SHADOW_FILE instead.", + ) + internals_group.add_argument("--fast-exit", action="store_true", help=argparse.SUPPRESS) + internals_group.add_argument( + "--no-fast-exit", action="store_false", dest="fast_exit", help=argparse.SUPPRESS + ) + # This flag is useful for mypy tests, where function bodies may be omitted. Plugin developers + # may want to use this as well in their tests. + add_invertible_flag( + "--allow-empty-bodies", default=False, help=argparse.SUPPRESS, group=internals_group + ) + # This undocumented feature exports limited line-level dependency information. + internals_group.add_argument("--export-ref-info", action="store_true", help=argparse.SUPPRESS) + + report_group = parser.add_argument_group( + title="Report generation", description="Generate a report in the specified format." + ) + for report_type in sorted(defaults.REPORTER_NAMES): + if report_type not in {"memory-xml"}: + report_group.add_argument( + f"--{report_type.replace('_', '-')}-report", + metavar="DIR", + dest=f"special-opts:{report_type}_report", + ) + + # Undocumented mypyc feature: generate annotated HTML source file + report_group.add_argument( + "-a", dest="mypyc_annotation_file", type=str, default=None, help=argparse.SUPPRESS + ) + # Hidden mypyc feature: do not write any C files (keep existing ones and assume they exist). + # This can be useful when debugging mypyc bugs. + report_group.add_argument( + "--skip-c-gen", dest="mypyc_skip_c_generation", action="store_true", help=argparse.SUPPRESS + ) + + misc_group = parser.add_argument_group(title="Miscellaneous") + misc_group.add_argument("--quickstart-file", help=argparse.SUPPRESS) + misc_group.add_argument( + "--junit-xml", + metavar="JUNIT_XML_OUTPUT_FILE", + help="Write a JUnit XML test result document with type checking results to the given file", + ) + misc_group.add_argument( + "--junit-format", + choices=["global", "per_file"], + default="global", + help="If --junit-xml is set, specifies format. global (default): single test with all errors; per_file: one test entry per file with failures", + ) + misc_group.add_argument( + "--find-occurrences", + metavar="CLASS.MEMBER", + dest="special-opts:find_occurrences", + help="Print out all usages of a class member (experimental)", + ) + misc_group.add_argument( + "--scripts-are-modules", + action="store_true", + help="Script x becomes module x instead of __main__", + ) + + add_invertible_flag( + "--install-types", + default=False, + strict_flag=False, + help="Install detected missing library stub packages using pip", + group=misc_group, + ) + add_invertible_flag( + "--non-interactive", + default=False, + strict_flag=False, + help=( + "Install stubs without asking for confirmation and hide " + + "errors, with --install-types" + ), + group=misc_group, + inverse="--interactive", + ) + + if server_options: + misc_group.add_argument( + "--use-fine-grained-cache", + action="store_true", + help="Use the cache in fine-grained incremental mode", + ) + + # hidden options + parser.add_argument( + "--stats", action="store_true", dest="dump_type_stats", help=argparse.SUPPRESS + ) + parser.add_argument( + "--inferstats", action="store_true", dest="dump_inference_stats", help=argparse.SUPPRESS + ) + parser.add_argument("--dump-build-stats", action="store_true", help=argparse.SUPPRESS) + # Dump timing stats for each processed file into the given output file + parser.add_argument("--timing-stats", dest="timing_stats", help=argparse.SUPPRESS) + # Dump per line type checking timing stats for each processed file into the given + # output file. Only total time spent in each top level expression will be shown. + # Times are show in microseconds. + parser.add_argument( + "--line-checking-stats", dest="line_checking_stats", help=argparse.SUPPRESS + ) + # --debug-cache will disable any cache-related compressions/optimizations, + # which will make the cache writing process output pretty-printed JSON (which + # is easier to debug). + parser.add_argument("--debug-cache", action="store_true", help=argparse.SUPPRESS) + # --dump-deps will dump all fine-grained dependencies to stdout + parser.add_argument("--dump-deps", action="store_true", help=argparse.SUPPRESS) + # --dump-graph will dump the contents of the graph of SCCs and exit. + parser.add_argument("--dump-graph", action="store_true", help=argparse.SUPPRESS) + # --semantic-analysis-only does exactly that. + parser.add_argument("--semantic-analysis-only", action="store_true", help=argparse.SUPPRESS) + # Some tests use this to tell mypy that we are running a test. + parser.add_argument("--test-env", action="store_true", help=argparse.SUPPRESS) + # --local-partial-types disallows partial types spanning module top level and a function + # (implicitly defined in fine-grained incremental mode) + add_invertible_flag("--local-partial-types", default=False, help=argparse.SUPPRESS) + # --logical-deps adds some more dependencies that are not semantically needed, but + # may be helpful to determine relative importance of classes and functions for overall + # type precision in a code base. It also _removes_ some deps, so this flag should be never + # used except for generating code stats. This also automatically enables --cache-fine-grained. + # NOTE: This is an experimental option that may be modified or removed at any time. + parser.add_argument("--logical-deps", action="store_true", help=argparse.SUPPRESS) + # --bazel changes some behaviors for use with Bazel (https://bazel.build). + parser.add_argument("--bazel", action="store_true", help=argparse.SUPPRESS) + # --package-root adds a directory below which directories are considered + # packages even without __init__.py. May be repeated. + parser.add_argument( + "--package-root", metavar="ROOT", action="append", default=[], help=argparse.SUPPRESS + ) + # --cache-map FILE ... gives a mapping from source files to cache files. + # Each triple of arguments is a source file, a cache meta file, and a cache data file. + # Modules not mentioned in the file will go through cache_dir. + # Must be followed by another flag or by '--' (and then only file args may follow). + parser.add_argument( + "--cache-map", nargs="+", dest="special-opts:cache_map", help=argparse.SUPPRESS + ) + # --debug-serialize will run tree.serialize() even if cache generation is disabled. + # Useful for mypy_primer to detect serialize errors earlier. + parser.add_argument("--debug-serialize", action="store_true", help=argparse.SUPPRESS) + + parser.add_argument( + "--disable-bytearray-promotion", action="store_true", help=argparse.SUPPRESS + ) + parser.add_argument( + "--disable-memoryview-promotion", action="store_true", help=argparse.SUPPRESS + ) + # This flag is deprecated, it has been moved to --extra-checks + parser.add_argument("--strict-concatenate", action="store_true", help=argparse.SUPPRESS) + + # options specifying code to check + code_group = parser.add_argument_group( + title="Running code", + description="Specify the code you want to type check. For more details, see " + "https://mypy.readthedocs.io/en/stable/running_mypy.html#running-mypy", + ) + add_invertible_flag( + "--explicit-package-bases", + default=False, + help="Use current directory and MYPYPATH to determine module names of files passed", + group=code_group, + ) + add_invertible_flag( + "--fast-module-lookup", default=False, help=argparse.SUPPRESS, group=code_group + ) + code_group.add_argument( + "--exclude", + action="append", + metavar="PATTERN", + default=[], + help=( + "Regular expression to match file names, directory names or paths which mypy should " + "ignore while recursively discovering files to check, e.g. --exclude '/setup\\.py$'. " + "May be specified more than once, eg. --exclude a --exclude b" + ), + ) + add_invertible_flag( + "--exclude-gitignore", + default=False, + help=( + "Use .gitignore file(s) to exclude files from checking " + "(in addition to any explicit --exclude if present)" + ), + group=code_group, + ) + code_group.add_argument( + "-m", + "--module", + action="append", + metavar="MODULE", + default=[], + dest="special-opts:modules", + help="Type-check module; can repeat for more modules", + ) + code_group.add_argument( + "-p", + "--package", + action="append", + metavar="PACKAGE", + default=[], + dest="special-opts:packages", + help="Type-check package recursively; can be repeated", + ) + code_group.add_argument( + "-c", + "--command", + action="append", + metavar="PROGRAM_TEXT", + dest="special-opts:command", + help="Type-check program passed in as string", + ) + code_group.add_argument( + metavar="files", + nargs="*", + dest="special-opts:files", + help="Type-check given files or directories", + ) + return parser, strict_flag_names, strict_flag_assignments + + +def process_options( + args: list[str], + stdout: TextIO | None = None, + stderr: TextIO | None = None, + require_targets: bool = True, + server_options: bool = False, + fscache: FileSystemCache | None = None, + program: str = "mypy", + header: str = HEADER, +) -> tuple[list[BuildSource], Options]: + """Parse command line arguments. + + If a FileSystemCache is passed in, and package_root options are given, + call fscache.set_package_root() to set the cache's package root. + + Returns a tuple of: a list of source files, an Options collected from flags. + """ + stdout = stdout if stdout is not None else sys.stdout + stderr = stderr if stderr is not None else sys.stderr + + parser, _, strict_flag_assignments = define_options( + program, header, stdout, stderr, server_options + ) + + # Parse arguments once into a dummy namespace so we can get the + # filename for the config file and know if the user requested all strict options. + dummy = argparse.Namespace() + parser.parse_args(args, dummy) + config_file = dummy.config_file + # Don't explicitly test if "config_file is not None" for this check. + # This lets `--config-file=` (an empty string) be used to disable all config files. + if config_file and not os.path.exists(config_file): + parser.error(f"Cannot find config file '{config_file}'") + + options = Options() + strict_option_set = False + + def set_strict_flags() -> None: + nonlocal strict_option_set + strict_option_set = True + for dest, value in strict_flag_assignments: + setattr(options, dest, value) + + # Parse config file first, so command line can override. + parse_config_file(options, set_strict_flags, config_file, stdout, stderr) + + # Set strict flags before parsing (if strict mode enabled), so other command + # line options can override. + if getattr(dummy, "special-opts:strict"): + set_strict_flags() + + # Override cache_dir if provided in the environment + environ_cache_dir = os.getenv("MYPY_CACHE_DIR", "") + if environ_cache_dir.strip(): + options.cache_dir = environ_cache_dir + options.cache_dir = os.path.expanduser(options.cache_dir) + + # Parse command line for real, using a split namespace. + special_opts = argparse.Namespace() + parser.parse_args(args, SplitNamespace(options, special_opts, "special-opts:")) + + # The python_version is either the default, which can be overridden via a config file, + # or stored in special_opts and is passed via the command line. + options.python_version = special_opts.python_version or options.python_version + if options.python_version < (3,): + parser.error( + "Mypy no longer supports checking Python 2 code. " + "Consider pinning to mypy<0.980 if you need to check Python 2 code." + ) + try: + infer_python_executable(options, special_opts) + except PythonExecutableInferenceError as e: + parser.error(str(e)) + + if special_opts.no_executable or options.no_site_packages: + options.python_executable = None + + # Paths listed in the config file will be ignored if any paths, modules or packages + # are passed on the command line. + if not (special_opts.files or special_opts.packages or special_opts.modules): + if options.files: + special_opts.files = options.files + if options.packages: + special_opts.packages = options.packages + if options.modules: + special_opts.modules = options.modules + + # Check for invalid argument combinations. + if require_targets: + code_methods = sum( + bool(c) + for c in [ + special_opts.modules + special_opts.packages, + special_opts.command, + special_opts.files, + ] + ) + if code_methods == 0 and not options.install_types: + parser.error("Missing target module, package, files, or command.") + elif code_methods > 1: + parser.error("May only specify one of: module/package, files, or command.") + if options.explicit_package_bases and not options.namespace_packages: + parser.error( + "Can only use --explicit-package-bases with --namespace-packages, since otherwise " + "examining __init__.py's is sufficient to determine module names for files" + ) + + # Check for overlapping `--always-true` and `--always-false` flags. + overlap = set(options.always_true) & set(options.always_false) + if overlap: + parser.error( + "You can't make a variable always true and always false (%s)" + % ", ".join(sorted(overlap)) + ) + + validate_package_allow_list(options.untyped_calls_exclude) + validate_package_allow_list(options.deprecated_calls_exclude) + + options.process_incomplete_features(error_callback=parser.error, warning_callback=print) + + # Compute absolute path for custom typeshed (if present). + if options.custom_typeshed_dir is not None: + options.abs_custom_typeshed_dir = os.path.abspath(options.custom_typeshed_dir) + + # Set build flags. + if special_opts.find_occurrences: + _find_occurrences = tuple(special_opts.find_occurrences.split(".")) + if len(_find_occurrences) < 2: + parser.error("Can only find occurrences of class members.") + if len(_find_occurrences) != 2: + parser.error("Can only find occurrences of non-nested class members.") + state.find_occurrences = _find_occurrences + + # Set reports. + for flag, val in vars(special_opts).items(): + if flag.endswith("_report") and val is not None: + report_type = flag[:-7].replace("_", "-") + report_dir = val + options.report_dirs[report_type] = report_dir + + # Process --package-root. + if options.package_root: + process_package_roots(fscache, parser, options) + + # Process --cache-map. + if special_opts.cache_map: + if options.sqlite_cache: + parser.error("--cache-map is incompatible with --sqlite-cache") + + process_cache_map(parser, special_opts, options) + + # Process --strict-bytes + options.process_strict_bytes() + + # An explicitly specified cache_fine_grained implies local_partial_types + # (because otherwise the cache is not compatible with dmypy) + if options.cache_fine_grained: + options.local_partial_types = True + + # Implicitly show column numbers if error location end is shown + if options.show_error_end: + options.show_column_numbers = True + + # Let logical_deps imply cache_fine_grained (otherwise the former is useless). + if options.logical_deps: + options.cache_fine_grained = True + + if options.strict_concatenate and not strict_option_set: + print("Warning: --strict-concatenate is deprecated; use --extra-checks instead") + + if options.force_uppercase_builtins: + print("Warning: --force-uppercase-builtins is deprecated; mypy only supports Python 3.9+") + + # Set target. + if special_opts.modules + special_opts.packages: + options.build_type = BuildType.MODULE + sys_path, _ = get_search_dirs(options.python_executable) + search_paths = SearchPaths( + (os.getcwd(),), tuple(mypy_path() + options.mypy_path), tuple(sys_path), () + ) + targets = [] + # TODO: use the same cache that the BuildManager will + cache = FindModuleCache(search_paths, fscache, options) + for p in special_opts.packages: + if os.sep in p or os.altsep and os.altsep in p: + fail(f"Package name '{p}' cannot have a slash in it.", stderr, options) + p_targets = cache.find_modules_recursive(p) + if not p_targets: + reason = cache.find_module(p) + if reason is ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS: + fail( + f"Package '{p}' cannot be type checked due to missing py.typed marker. See https://mypy.readthedocs.io/en/stable/installed_packages.html for more details", + stderr, + options, + ) + else: + fail(f"Can't find package '{p}'", stderr, options) + targets.extend(p_targets) + for m in special_opts.modules: + targets.append(BuildSource(None, m, None)) + elif special_opts.command: + options.build_type = BuildType.PROGRAM_TEXT + targets = [BuildSource(None, None, "\n".join(special_opts.command))] + else: + try: + targets = create_source_list(special_opts.files, options, fscache) + # Variable named e2 instead of e to work around mypyc bug #620 + # which causes issues when using the same variable to catch + # exceptions of different types. + except InvalidSourceList as e2: + fail(str(e2), stderr, options) + return targets, options + + +def process_package_roots( + fscache: FileSystemCache | None, parser: argparse.ArgumentParser, options: Options +) -> None: + """Validate and normalize package_root.""" + if fscache is None: + parser.error("--package-root does not work here (no fscache)") + assert fscache is not None # Since mypy doesn't know parser.error() raises. + # Do some stuff with drive letters to make Windows happy (esp. tests). + current_drive, _ = os.path.splitdrive(os.getcwd()) + dot = os.curdir + dotslash = os.curdir + os.sep + dotdotslash = os.pardir + os.sep + trivial_paths = {dot, dotslash} + package_root = [] + for root in options.package_root: + if os.path.isabs(root): + parser.error(f"Package root cannot be absolute: {root!r}") + drive, root = os.path.splitdrive(root) + if drive and drive != current_drive: + parser.error(f"Package root must be on current drive: {drive + root!r}") + # Empty package root is always okay. + if root: + root = os.path.relpath(root) # Normalize the heck out of it. + if not root.endswith(os.sep): + root = root + os.sep + if root.startswith(dotdotslash): + parser.error(f"Package root cannot be above current directory: {root!r}") + if root in trivial_paths: + root = "" + package_root.append(root) + options.package_root = package_root + # Pass the package root on the filesystem cache. + fscache.set_package_root(package_root) + + +def process_cache_map( + parser: argparse.ArgumentParser, special_opts: argparse.Namespace, options: Options +) -> None: + """Validate cache_map and copy into options.cache_map.""" + n = len(special_opts.cache_map) + if n % 3 != 0: + parser.error("--cache-map requires one or more triples (see source)") + for i in range(0, n, 3): + source, meta_file, data_file = special_opts.cache_map[i : i + 3] + if source in options.cache_map: + parser.error(f"Duplicate --cache-map source {source})") + if not source.endswith(".py") and not source.endswith(".pyi"): + parser.error(f"Invalid --cache-map source {source} (triple[0] must be *.py[i])") + if not meta_file.endswith(".meta.json"): + parser.error( + "Invalid --cache-map meta_file %s (triple[1] must be *.meta.json)" % meta_file + ) + if not data_file.endswith(".data.json"): + parser.error( + "Invalid --cache-map data_file %s (triple[2] must be *.data.json)" % data_file + ) + options.cache_map[source] = (meta_file, data_file) + + +def maybe_write_junit_xml( + td: float, + serious: bool, + all_messages: list[str], + messages_by_file: dict[str | None, list[str]], + options: Options, +) -> None: + if options.junit_xml: + py_version = f"{options.python_version[0]}_{options.python_version[1]}" + if options.junit_format == "global": + util.write_junit_xml( + td, + serious, + {None: all_messages} if all_messages else {}, + options.junit_xml, + py_version, + options.platform, + ) + else: + # per_file + util.write_junit_xml( + td, serious, messages_by_file, options.junit_xml, py_version, options.platform + ) + + +def fail(msg: str, stderr: TextIO, options: Options) -> NoReturn: + """Fail with a serious error.""" + stderr.write(f"{msg}\n") + maybe_write_junit_xml( + 0.0, serious=True, all_messages=[msg], messages_by_file={None: [msg]}, options=options + ) + sys.exit(2) + + +def read_types_packages_to_install(cache_dir: str, after_run: bool) -> list[str]: + if not os.path.isdir(cache_dir): + if not after_run: + sys.stderr.write( + "error: Can't determine which types to install with no files to check " + + "(and no cache from previous mypy run)\n" + ) + else: + sys.stderr.write( + "error: --install-types failed (an error blocked analysis of which types to install)\n" + ) + fnam = build.missing_stubs_file(cache_dir) + if not os.path.isfile(fnam): + # No missing stubs. + return [] + with open(fnam) as f: + return [line.strip() for line in f] + + +def install_types( + formatter: util.FancyFormatter, + options: Options, + *, + after_run: bool = False, + non_interactive: bool = False, +) -> bool: + """Install stub packages using pip if some missing stubs were detected.""" + packages = read_types_packages_to_install(options.cache_dir, after_run) + if not packages: + # If there are no missing stubs, generate no output. + return False + if after_run and not non_interactive: + print() + print("Installing missing stub packages:") + assert options.python_executable, "Python executable required to install types" + cmd = [options.python_executable, "-m", "pip", "install"] + packages + print(formatter.style(" ".join(cmd), "none", bold=True)) + print() + if not non_interactive: + x = input("Install? [yN] ") + if not x.strip() or not x.lower().startswith("y"): + print(formatter.style("mypy: Skipping installation", "red", bold=True)) + sys.exit(2) + print() + subprocess.run(cmd) + return True diff --git a/.venv/lib/python3.12/site-packages/mypy/maptype.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/maptype.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..751f32b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/maptype.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/maptype.py b/.venv/lib/python3.12/site-packages/mypy/maptype.py new file mode 100644 index 0000000..59ecb2b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/maptype.py @@ -0,0 +1,106 @@ +from __future__ import annotations + +from mypy.expandtype import expand_type_by_instance +from mypy.nodes import TypeInfo +from mypy.types import AnyType, Instance, TupleType, TypeOfAny, has_type_vars + + +def map_instance_to_supertype(instance: Instance, superclass: TypeInfo) -> Instance: + """Produce a supertype of `instance` that is an Instance + of `superclass`, mapping type arguments up the chain of bases. + + If `superclass` is not a nominal superclass of `instance.type`, + then all type arguments are mapped to 'Any'. + """ + if instance.type == superclass: + # Fast path: `instance` already belongs to `superclass`. + return instance + + if superclass.fullname == "builtins.tuple" and instance.type.tuple_type: + if has_type_vars(instance.type.tuple_type): + # We special case mapping generic tuple types to tuple base, because for + # such tuples fallback can't be calculated before applying type arguments. + alias = instance.type.special_alias + assert alias is not None + if not alias._is_recursive: + # Unfortunately we can't support this for generic recursive tuples. + # If we skip this special casing we will fall back to tuple[Any, ...]. + tuple_type = expand_type_by_instance(instance.type.tuple_type, instance) + if isinstance(tuple_type, TupleType): + # Make the import here to avoid cyclic imports. + import mypy.typeops + + return mypy.typeops.tuple_fallback(tuple_type) + elif isinstance(tuple_type, Instance): + # This can happen after normalizing variadic tuples. + return tuple_type + + if not superclass.type_vars: + # Fast path: `superclass` has no type variables to map to. + return Instance(superclass, []) + + return map_instance_to_supertypes(instance, superclass)[0] + + +def map_instance_to_supertypes(instance: Instance, supertype: TypeInfo) -> list[Instance]: + # FIX: Currently we should only have one supertype per interface, so no + # need to return an array + result: list[Instance] = [] + for path in class_derivation_paths(instance.type, supertype): + types = [instance] + for sup in path: + a: list[Instance] = [] + for t in types: + a.extend(map_instance_to_direct_supertypes(t, sup)) + types = a + result.extend(types) + if result: + return result + else: + # Nothing. Presumably due to an error. Construct a dummy using Any. + any_type = AnyType(TypeOfAny.from_error) + return [Instance(supertype, [any_type] * len(supertype.type_vars))] + + +def class_derivation_paths(typ: TypeInfo, supertype: TypeInfo) -> list[list[TypeInfo]]: + """Return an array of non-empty paths of direct base classes from + type to supertype. Return [] if no such path could be found. + + InterfaceImplementationPaths(A, B) == [[B]] if A inherits B + InterfaceImplementationPaths(A, C) == [[B, C]] if A inherits B and + B inherits C + """ + # FIX: Currently we might only ever have a single path, so this could be + # simplified + result: list[list[TypeInfo]] = [] + + for base in typ.bases: + btype = base.type + if btype == supertype: + result.append([btype]) + else: + # Try constructing a longer path via the base class. + for path in class_derivation_paths(btype, supertype): + result.append([btype] + path) + + return result + + +def map_instance_to_direct_supertypes(instance: Instance, supertype: TypeInfo) -> list[Instance]: + # FIX: There should only be one supertypes, always. + typ = instance.type + result: list[Instance] = [] + + for b in typ.bases: + if b.type == supertype: + t = expand_type_by_instance(b, instance) + assert isinstance(t, Instance) + result.append(t) + + if result: + return result + else: + # Relationship with the supertype not specified explicitly. Use dynamic + # type arguments implicitly. + any_type = AnyType(TypeOfAny.unannotated) + return [Instance(supertype, [any_type] * len(supertype.type_vars))] diff --git a/.venv/lib/python3.12/site-packages/mypy/meet.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/meet.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..c98c458 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/meet.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/meet.py b/.venv/lib/python3.12/site-packages/mypy/meet.py new file mode 100644 index 0000000..42229f9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/meet.py @@ -0,0 +1,1281 @@ +from __future__ import annotations + +from typing import Callable + +from mypy import join +from mypy.erasetype import erase_type +from mypy.maptype import map_instance_to_supertype +from mypy.state import state +from mypy.subtypes import ( + are_parameters_compatible, + find_member, + is_callable_compatible, + is_equivalent, + is_proper_subtype, + is_same_type, + is_subtype, +) +from mypy.typeops import is_recursive_pair, make_simplified_union, tuple_fallback +from mypy.types import ( + MYPYC_NATIVE_INT_NAMES, + TUPLE_LIKE_INSTANCE_NAMES, + AnyType, + CallableType, + DeletedType, + ErasedType, + FunctionLike, + Instance, + LiteralType, + NoneType, + Overloaded, + Parameters, + ParamSpecType, + PartialType, + ProperType, + TupleType, + Type, + TypeAliasType, + TypedDictType, + TypeGuardedType, + TypeOfAny, + TypeType, + TypeVarLikeType, + TypeVarTupleType, + TypeVarType, + TypeVisitor, + UnboundType, + UninhabitedType, + UnionType, + UnpackType, + find_unpack_in_list, + get_proper_type, + get_proper_types, + has_type_vars, + is_named_instance, + split_with_prefix_and_suffix, +) + +# TODO Describe this module. + + +def trivial_meet(s: Type, t: Type) -> ProperType: + """Return one of types (expanded) if it is a subtype of other, otherwise bottom type.""" + if is_subtype(s, t): + return get_proper_type(s) + elif is_subtype(t, s): + return get_proper_type(t) + else: + if state.strict_optional: + return UninhabitedType() + else: + return NoneType() + + +def meet_types(s: Type, t: Type) -> ProperType: + """Return the greatest lower bound of two types.""" + if is_recursive_pair(s, t): + # This case can trigger an infinite recursion, general support for this will be + # tricky, so we use a trivial meet (like for protocols). + return trivial_meet(s, t) + s = get_proper_type(s) + t = get_proper_type(t) + + if isinstance(s, Instance) and isinstance(t, Instance) and s.type == t.type: + # Code in checker.py should merge any extra_items where possible, so we + # should have only compatible extra_items here. We check this before + # the below subtype check, so that extra_attrs will not get erased. + if (s.extra_attrs or t.extra_attrs) and is_same_type(s, t): + if s.extra_attrs and t.extra_attrs: + if len(s.extra_attrs.attrs) > len(t.extra_attrs.attrs): + # Return the one that has more precise information. + return s + return t + if s.extra_attrs: + return s + return t + + if not isinstance(s, UnboundType) and not isinstance(t, UnboundType): + if is_proper_subtype(s, t, ignore_promotions=True): + return s + if is_proper_subtype(t, s, ignore_promotions=True): + return t + + if isinstance(s, ErasedType): + return s + if isinstance(s, AnyType): + return t + if isinstance(s, UnionType) and not isinstance(t, UnionType): + s, t = t, s + + # Meets/joins require callable type normalization. + s, t = join.normalize_callables(s, t) + + return t.accept(TypeMeetVisitor(s)) + + +def narrow_declared_type(declared: Type, narrowed: Type) -> Type: + """Return the declared type narrowed down to another type.""" + # TODO: check infinite recursion for aliases here. + if isinstance(narrowed, TypeGuardedType): + # A type guard forces the new type even if it doesn't overlap the old... + if is_proper_subtype(declared, narrowed.type_guard, ignore_promotions=True): + # ...unless it is a proper supertype of declared type. + return declared + return narrowed.type_guard + + original_declared = declared + original_narrowed = narrowed + declared = get_proper_type(declared) + narrowed = get_proper_type(narrowed) + + if declared == narrowed: + return original_declared + if isinstance(declared, UnionType): + declared_items = declared.relevant_items() + if isinstance(narrowed, UnionType): + narrowed_items = narrowed.relevant_items() + else: + narrowed_items = [narrowed] + return make_simplified_union( + [ + narrow_declared_type(d, n) + for d in declared_items + for n in narrowed_items + # This (ugly) special-casing is needed to support checking + # branches like this: + # x: Union[float, complex] + # if isinstance(x, int): + # ... + # And assignments like this: + # x: float | None + # y: int | None + # x = y + if ( + is_overlapping_types(d, n, ignore_promotions=True) + or is_subtype(n, d, ignore_promotions=False) + ) + ] + ) + if is_enum_overlapping_union(declared, narrowed): + # Quick check before reaching `is_overlapping_types`. If it's enum/literal overlap, + # avoid full expansion and make it faster. + assert isinstance(narrowed, UnionType) + return make_simplified_union( + [narrow_declared_type(declared, x) for x in narrowed.relevant_items()] + ) + elif ( + isinstance(declared, TypeVarType) + and not has_type_vars(original_narrowed) + and is_subtype(original_narrowed, declared.upper_bound) + ): + # We put this branch early to get T(bound=Union[A, B]) instead of + # Union[T(bound=A), T(bound=B)] that will be confusing for users. + return declared.copy_modified( + upper_bound=narrow_declared_type(declared.upper_bound, original_narrowed) + ) + elif not is_overlapping_types(declared, narrowed, prohibit_none_typevar_overlap=True): + if state.strict_optional: + return UninhabitedType() + else: + return NoneType() + elif isinstance(narrowed, UnionType): + return make_simplified_union( + [narrow_declared_type(declared, x) for x in narrowed.relevant_items()] + ) + elif isinstance(narrowed, AnyType): + return original_narrowed + elif isinstance(narrowed, TypeVarType) and is_subtype(narrowed.upper_bound, declared): + return narrowed + elif isinstance(declared, TypeType) and isinstance(narrowed, TypeType): + return TypeType.make_normalized( + narrow_declared_type(declared.item, narrowed.item), + is_type_form=declared.is_type_form and narrowed.is_type_form, + ) + elif ( + isinstance(declared, TypeType) + and isinstance(narrowed, Instance) + and narrowed.type.is_metaclass() + ): + if declared.is_type_form: + # The declared TypeForm[T] after narrowing must be a kind of + # type object at least as narrow as Type[T] + return narrow_declared_type( + TypeType.make_normalized( + declared.item, line=declared.line, column=declared.column, is_type_form=False + ), + original_narrowed, + ) + # We'd need intersection types, so give up. + return original_declared + elif isinstance(declared, Instance): + if declared.type.alt_promote: + # Special case: low-level integer type can't be narrowed + return original_declared + if ( + isinstance(narrowed, Instance) + and narrowed.type.alt_promote + and narrowed.type.alt_promote.type is declared.type + ): + # Special case: 'int' can't be narrowed down to a native int type such as + # i64, since they have different runtime representations. + return original_declared + return meet_types(original_declared, original_narrowed) + elif isinstance(declared, (TupleType, TypeType, LiteralType)): + return meet_types(original_declared, original_narrowed) + elif isinstance(declared, TypedDictType) and isinstance(narrowed, Instance): + # Special case useful for selecting TypedDicts from unions using isinstance(x, dict). + if narrowed.type.fullname == "builtins.dict" and all( + isinstance(t, AnyType) for t in get_proper_types(narrowed.args) + ): + return original_declared + return meet_types(original_declared, original_narrowed) + return original_narrowed + + +def get_possible_variants(typ: Type) -> list[Type]: + """This function takes any "Union-like" type and returns a list of the available "options". + + Specifically, there are currently exactly three different types that can have + "variants" or are "union-like": + + - Unions + - TypeVars with value restrictions + - Overloads + + This function will return a list of each "option" present in those types. + + If this function receives any other type, we return a list containing just that + original type. (E.g. pretend the type was contained within a singleton union). + + The only current exceptions are regular TypeVars and ParamSpecs. For these "TypeVarLike"s, + we return a list containing that TypeVarLike's upper bound. + + This function is useful primarily when checking to see if two types are overlapping: + the algorithm to check if two unions are overlapping is fundamentally the same as + the algorithm for checking if two overloads are overlapping. + + Normalizing both kinds of types in the same way lets us reuse the same algorithm + for both. + """ + typ = get_proper_type(typ) + + if isinstance(typ, TypeVarType): + if len(typ.values) > 0: + return typ.values + else: + return [typ.upper_bound] + elif isinstance(typ, ParamSpecType): + # Extract 'object' from the final mro item + upper_bound = get_proper_type(typ.upper_bound) + if isinstance(upper_bound, Instance): + return [Instance(upper_bound.type.mro[-1], [])] + return [AnyType(TypeOfAny.implementation_artifact)] + elif isinstance(typ, TypeVarTupleType): + return [typ.upper_bound] + elif isinstance(typ, UnionType): + return list(typ.items) + elif isinstance(typ, Overloaded): + # Note: doing 'return typ.items()' makes mypy + # infer a too-specific return type of List[CallableType] + return list(typ.items) + else: + return [typ] + + +def is_enum_overlapping_union(x: ProperType, y: ProperType) -> bool: + """Return True if x is an Enum, and y is an Union with at least one Literal from x""" + return ( + isinstance(x, Instance) + and x.type.is_enum + and isinstance(y, UnionType) + and any( + isinstance(p := get_proper_type(z), LiteralType) and x.type == p.fallback.type + for z in y.relevant_items() + ) + ) + + +def is_literal_in_union(x: ProperType, y: ProperType) -> bool: + """Return True if x is a Literal and y is an Union that includes x""" + return ( + isinstance(x, LiteralType) + and isinstance(y, UnionType) + and any(x == get_proper_type(z) for z in y.items) + ) + + +def is_object(t: ProperType) -> bool: + return isinstance(t, Instance) and t.type.fullname == "builtins.object" + + +def is_none_typevarlike_overlap(t1: ProperType, t2: ProperType) -> bool: + return isinstance(t1, NoneType) and isinstance(t2, TypeVarLikeType) + + +def is_none_object_overlap(t1: ProperType, t2: ProperType) -> bool: + return ( + isinstance(t1, NoneType) + and isinstance(t2, Instance) + and t2.type.fullname == "builtins.object" + ) + + +def are_related_types( + left: Type, right: Type, *, proper_subtype: bool, ignore_promotions: bool +) -> bool: + if proper_subtype: + return is_proper_subtype( + left, right, ignore_promotions=ignore_promotions + ) or is_proper_subtype(right, left, ignore_promotions=ignore_promotions) + else: + return is_subtype(left, right, ignore_promotions=ignore_promotions) or is_subtype( + right, left, ignore_promotions=ignore_promotions + ) + + +def is_overlapping_types( + left: Type, + right: Type, + ignore_promotions: bool = False, + prohibit_none_typevar_overlap: bool = False, + overlap_for_overloads: bool = False, + seen_types: set[tuple[Type, Type]] | None = None, +) -> bool: + """Can a value of type 'left' also be of type 'right' or vice-versa? + + If 'ignore_promotions' is True, we ignore promotions while checking for overlaps. + If 'prohibit_none_typevar_overlap' is True, we disallow None from overlapping with + TypeVars (in both strict-optional and non-strict-optional mode). + If 'overlap_for_overloads' is True, we check for overlaps more strictly (to avoid false + positives), for example: None only overlaps with explicitly optional types, Any + doesn't overlap with anything except object, we don't ignore positional argument names. + """ + if isinstance(left, TypeGuardedType) or isinstance(right, TypeGuardedType): + # A type guard forces the new type even if it doesn't overlap the old. + return True + + if seen_types is None: + seen_types = set() + elif (left, right) in seen_types: + return True + if is_recursive_pair(left, right): + seen_types.add((left, right)) + + left, right = get_proper_types((left, right)) + + # We should never encounter this type. + if isinstance(left, PartialType) or isinstance(right, PartialType): + assert False, "Unexpectedly encountered partial type" + + # We should also never encounter these types, but it's possible a few + # have snuck through due to unrelated bugs. For now, we handle these + # in the same way we handle 'Any'. + # + # TODO: Replace these with an 'assert False' once we are more confident. + illegal_types = (UnboundType, ErasedType, DeletedType) + if isinstance(left, illegal_types) or isinstance(right, illegal_types): + return True + + # When running under non-strict optional mode, simplify away types of + # the form 'Union[A, B, C, None]' into just 'Union[A, B, C]'. + + if not state.strict_optional: + if isinstance(left, UnionType): + left = UnionType.make_union(left.relevant_items()) + if isinstance(right, UnionType): + right = UnionType.make_union(right.relevant_items()) + left, right = get_proper_types((left, right)) + + # 'Any' may or may not be overlapping with the other type + if isinstance(left, AnyType) or isinstance(right, AnyType): + return not overlap_for_overloads or is_object(left) or is_object(right) + + # We check for complete overlaps next as a general-purpose failsafe. + # If this check fails, we start checking to see if there exists a + # *partial* overlap between types. + # + # These checks will also handle the NoneType and UninhabitedType cases for us. + + # enums are sometimes expanded into an Union of Literals + # when that happens we want to make sure we treat the two as overlapping + # and crucially, we want to do that *fast* in case the enum is large + # so we do it before expanding variants below to avoid O(n**2) behavior + if ( + is_enum_overlapping_union(left, right) + or is_enum_overlapping_union(right, left) + or is_literal_in_union(left, right) + or is_literal_in_union(right, left) + ): + return True + + if overlap_for_overloads: + if is_none_object_overlap(left, right) or is_none_object_overlap(right, left): + return False + + if are_related_types( + left, right, proper_subtype=overlap_for_overloads, ignore_promotions=ignore_promotions + ): + return True + + # See the docstring for 'get_possible_variants' for more info on what the + # following lines are doing. + + left_possible = get_possible_variants(left) + right_possible = get_possible_variants(right) + + # Now move on to checking multi-variant types like Unions. We also perform + # the same logic if either type happens to be a TypeVar/ParamSpec/TypeVarTuple. + # + # Handling the TypeVarLikes now lets us simulate having them bind to the corresponding + # type -- if we deferred these checks, the "return-early" logic of the other + # checks will prevent us from detecting certain overlaps. + # + # If both types are singleton variants (and are not TypeVarLikes), we've hit the base case: + # we skip these checks to avoid infinitely recursing. + + if prohibit_none_typevar_overlap: + if is_none_typevarlike_overlap(left, right) or is_none_typevarlike_overlap(right, left): + return False + + def _is_overlapping_types(left: Type, right: Type) -> bool: + """Encode the kind of overlapping check to perform. + + This function mostly exists, so we don't have to repeat keyword arguments everywhere. + """ + return is_overlapping_types( + left, + right, + ignore_promotions=ignore_promotions, + prohibit_none_typevar_overlap=prohibit_none_typevar_overlap, + overlap_for_overloads=overlap_for_overloads, + seen_types=seen_types.copy(), + ) + + if ( + len(left_possible) > 1 + or len(right_possible) > 1 + or isinstance(left, TypeVarLikeType) + or isinstance(right, TypeVarLikeType) + ): + for l in left_possible: + for r in right_possible: + if _is_overlapping_types(l, r): + return True + return False + + # Now that we've finished handling TypeVarLikes, we're free to end early + # if one one of the types is None and we're running in strict-optional mode. + # (None only overlaps with None in strict-optional mode). + # + # We must perform this check after the TypeVarLike checks because + # a TypeVar could be bound to None, for example. + + if state.strict_optional and isinstance(left, NoneType) != isinstance(right, NoneType): + return False + + # Next, we handle single-variant types that may be inherently partially overlapping: + # + # - TypedDicts + # - Tuples + # + # If we cannot identify a partial overlap and end early, we degrade these two types + # into their 'Instance' fallbacks. + + if isinstance(left, TypedDictType) and isinstance(right, TypedDictType): + return are_typed_dicts_overlapping(left, right, _is_overlapping_types) + elif typed_dict_mapping_pair(left, right): + # Overlaps between TypedDicts and Mappings require dedicated logic. + return typed_dict_mapping_overlap(left, right, overlapping=_is_overlapping_types) + elif isinstance(left, TypedDictType): + left = left.fallback + elif isinstance(right, TypedDictType): + right = right.fallback + + if is_tuple(left) and is_tuple(right): + return are_tuples_overlapping(left, right, _is_overlapping_types) + elif isinstance(left, TupleType): + left = tuple_fallback(left) + elif isinstance(right, TupleType): + right = tuple_fallback(right) + + # Next, we handle single-variant types that cannot be inherently partially overlapping, + # but do require custom logic to inspect. + # + # As before, we degrade into 'Instance' whenever possible. + + if isinstance(left, TypeType) and isinstance(right, TypeType): + return _is_overlapping_types(left.item, right.item) + + if isinstance(left, TypeType) or isinstance(right, TypeType): + + def _type_object_overlap(left: Type, right: Type) -> bool: + """Special cases for type object types overlaps.""" + # TODO: these checks are a bit in gray area, adjust if they cause problems. + left, right = get_proper_types((left, right)) + # 1. Type[C] vs Callable[..., C] overlap even if the latter is not class object. + if isinstance(left, TypeType) and isinstance(right, CallableType): + return _is_overlapping_types(left.item, right.ret_type) + # 2. Type[C] vs Meta, where Meta is a metaclass for C. + if isinstance(left, TypeType) and isinstance(right, Instance): + if isinstance(left.item, Instance): + left_meta = left.item.type.metaclass_type + if left_meta is not None: + return _is_overlapping_types(left_meta, right) + # builtins.type (default metaclass) overlaps with all metaclasses + return right.type.has_base("builtins.type") + elif isinstance(left.item, AnyType): + return right.type.has_base("builtins.type") + # 3. Callable[..., C] vs Meta is considered below, when we switch to fallbacks. + return False + + return _type_object_overlap(left, right) or _type_object_overlap(right, left) + + if isinstance(left, Parameters) and isinstance(right, Parameters): + return are_parameters_compatible( + left, + right, + is_compat=_is_overlapping_types, + is_proper_subtype=False, + ignore_pos_arg_names=not overlap_for_overloads, + allow_partial_overlap=True, + ) + # A `Parameters` does not overlap with anything else, however + if isinstance(left, Parameters) or isinstance(right, Parameters): + return False + + if isinstance(left, CallableType) and isinstance(right, CallableType): + return is_callable_compatible( + left, + right, + is_compat=_is_overlapping_types, + is_proper_subtype=False, + ignore_pos_arg_names=not overlap_for_overloads, + allow_partial_overlap=True, + ) + + call = None + other = None + if isinstance(left, CallableType) and isinstance(right, Instance): + call = find_member("__call__", right, right, is_operator=True) + other = left + if isinstance(right, CallableType) and isinstance(left, Instance): + call = find_member("__call__", left, left, is_operator=True) + other = right + if isinstance(get_proper_type(call), FunctionLike): + assert call is not None and other is not None + return _is_overlapping_types(call, other) + + if isinstance(left, CallableType): + left = left.fallback + if isinstance(right, CallableType): + right = right.fallback + + if isinstance(left, LiteralType) and isinstance(right, LiteralType): + if left.value == right.value: + # If values are the same, we still need to check if fallbacks are overlapping, + # this is done below. + left = left.fallback + right = right.fallback + else: + return False + elif isinstance(left, LiteralType): + left = left.fallback + elif isinstance(right, LiteralType): + right = right.fallback + + # Finally, we handle the case where left and right are instances. + + if isinstance(left, Instance) and isinstance(right, Instance): + # First we need to handle promotions and structural compatibility for instances + # that came as fallbacks, so simply call is_subtype() to avoid code duplication. + if are_related_types( + left, right, proper_subtype=overlap_for_overloads, ignore_promotions=ignore_promotions + ): + return True + + if right.type.fullname == "builtins.int" and left.type.fullname in MYPYC_NATIVE_INT_NAMES: + return True + + # Two unrelated types cannot be partially overlapping: they're disjoint. + if left.type.has_base(right.type.fullname): + left = map_instance_to_supertype(left, right.type) + elif right.type.has_base(left.type.fullname): + right = map_instance_to_supertype(right, left.type) + else: + return False + + if right.type.has_type_var_tuple_type: + # Similar to subtyping, we delegate the heavy lifting to the tuple overlap. + assert right.type.type_var_tuple_prefix is not None + assert right.type.type_var_tuple_suffix is not None + prefix = right.type.type_var_tuple_prefix + suffix = right.type.type_var_tuple_suffix + tvt = right.type.defn.type_vars[prefix] + assert isinstance(tvt, TypeVarTupleType) + fallback = tvt.tuple_fallback + left_prefix, left_middle, left_suffix = split_with_prefix_and_suffix( + left.args, prefix, suffix + ) + right_prefix, right_middle, right_suffix = split_with_prefix_and_suffix( + right.args, prefix, suffix + ) + left_args = left_prefix + (TupleType(list(left_middle), fallback),) + left_suffix + right_args = right_prefix + (TupleType(list(right_middle), fallback),) + right_suffix + else: + left_args = left.args + right_args = right.args + if len(left_args) == len(right_args): + # Note: we don't really care about variance here, since the overlapping check + # is symmetric and since we want to return 'True' even for partial overlaps. + # + # For example, suppose we have two types Wrapper[Parent] and Wrapper[Child]. + # It doesn't matter whether Wrapper is covariant or contravariant since + # either way, one of the two types will overlap with the other. + # + # Similarly, if Wrapper was invariant, the two types could still be partially + # overlapping -- what if Wrapper[Parent] happened to contain only instances of + # specifically Child? + # + # Or, to use a more concrete example, List[Union[A, B]] and List[Union[B, C]] + # would be considered partially overlapping since it's possible for both lists + # to contain only instances of B at runtime. + if all( + _is_overlapping_types(left_arg, right_arg) + for left_arg, right_arg in zip(left_args, right_args) + ): + return True + + return False + + # We ought to have handled every case by now: we conclude the + # two types are not overlapping, either completely or partially. + # + # Note: it's unclear however, whether returning False is the right thing + # to do when inferring reachability -- see https://github.com/python/mypy/issues/5529 + + assert type(left) != type(right), f"{type(left)} vs {type(right)}" + return False + + +def is_overlapping_erased_types( + left: Type, right: Type, *, ignore_promotions: bool = False +) -> bool: + """The same as 'is_overlapping_erased_types', except the types are erased first.""" + return is_overlapping_types( + erase_type(left), + erase_type(right), + ignore_promotions=ignore_promotions, + prohibit_none_typevar_overlap=True, + ) + + +def are_typed_dicts_overlapping( + left: TypedDictType, right: TypedDictType, is_overlapping: Callable[[Type, Type], bool] +) -> bool: + """Returns 'true' if left and right are overlapping TypeDictTypes.""" + # All required keys in left are present and overlapping with something in right + for key in left.required_keys: + if key not in right.items: + return False + if not is_overlapping(left.items[key], right.items[key]): + return False + + # Repeat check in the other direction + for key in right.required_keys: + if key not in left.items: + return False + if not is_overlapping(left.items[key], right.items[key]): + return False + + # The presence of any additional optional keys does not affect whether the two + # TypedDicts are partially overlapping: the dicts would be overlapping if the + # keys happened to be missing. + return True + + +def are_tuples_overlapping( + left: Type, right: Type, is_overlapping: Callable[[Type, Type], bool] +) -> bool: + """Returns true if left and right are overlapping tuples.""" + left, right = get_proper_types((left, right)) + left = adjust_tuple(left, right) or left + right = adjust_tuple(right, left) or right + assert isinstance(left, TupleType), f"Type {left} is not a tuple" + assert isinstance(right, TupleType), f"Type {right} is not a tuple" + + # This algorithm works well if only one tuple is variadic, if both are + # variadic we may get rare false negatives for overlapping prefix/suffix. + # Also, this ignores empty unpack case, but it is probably consistent with + # how we handle e.g. empty lists in overload overlaps. + # TODO: write a more robust algorithm for cases where both types are variadic. + left_unpack = find_unpack_in_list(left.items) + right_unpack = find_unpack_in_list(right.items) + if left_unpack is not None: + left = expand_tuple_if_possible(left, len(right.items)) + if right_unpack is not None: + right = expand_tuple_if_possible(right, len(left.items)) + + if len(left.items) != len(right.items): + return False + if not all(is_overlapping(l, r) for l, r in zip(left.items, right.items)): + return False + + # Check that the tuples aren't from e.g. different NamedTuples. + if is_named_instance(right.partial_fallback, "builtins.tuple") or is_named_instance( + left.partial_fallback, "builtins.tuple" + ): + return True + else: + return is_overlapping(left.partial_fallback, right.partial_fallback) + + +def expand_tuple_if_possible(tup: TupleType, target: int) -> TupleType: + if len(tup.items) > target + 1: + return tup + extra = target + 1 - len(tup.items) + new_items = [] + for it in tup.items: + if not isinstance(it, UnpackType): + new_items.append(it) + continue + unpacked = get_proper_type(it.type) + if isinstance(unpacked, TypeVarTupleType): + instance = unpacked.tuple_fallback + else: + # Nested non-variadic tuples should be normalized at this point. + assert isinstance(unpacked, Instance) + instance = unpacked + assert instance.type.fullname == "builtins.tuple" + new_items.extend([instance.args[0]] * extra) + return tup.copy_modified(items=new_items) + + +def adjust_tuple(left: ProperType, r: ProperType) -> TupleType | None: + """Find out if `left` is a Tuple[A, ...], and adjust its length to `right`""" + if isinstance(left, Instance) and left.type.fullname == "builtins.tuple": + n = r.length() if isinstance(r, TupleType) else 1 + return TupleType([left.args[0]] * n, left) + return None + + +def is_tuple(typ: Type) -> bool: + typ = get_proper_type(typ) + return isinstance(typ, TupleType) or ( + isinstance(typ, Instance) and typ.type.fullname == "builtins.tuple" + ) + + +class TypeMeetVisitor(TypeVisitor[ProperType]): + def __init__(self, s: ProperType) -> None: + self.s = s + + def visit_unbound_type(self, t: UnboundType) -> ProperType: + if isinstance(self.s, NoneType): + if state.strict_optional: + return UninhabitedType() + else: + return self.s + elif isinstance(self.s, UninhabitedType): + return self.s + else: + return AnyType(TypeOfAny.special_form) + + def visit_any(self, t: AnyType) -> ProperType: + return self.s + + def visit_union_type(self, t: UnionType) -> ProperType: + if isinstance(self.s, UnionType): + meets: list[Type] = [] + for x in t.items: + for y in self.s.items: + meets.append(meet_types(x, y)) + else: + meets = [meet_types(x, self.s) for x in t.items] + return make_simplified_union(meets) + + def visit_none_type(self, t: NoneType) -> ProperType: + if state.strict_optional: + if isinstance(self.s, NoneType) or ( + isinstance(self.s, Instance) and self.s.type.fullname == "builtins.object" + ): + return t + else: + return UninhabitedType() + else: + return t + + def visit_uninhabited_type(self, t: UninhabitedType) -> ProperType: + return t + + def visit_deleted_type(self, t: DeletedType) -> ProperType: + if isinstance(self.s, NoneType): + if state.strict_optional: + return t + else: + return self.s + elif isinstance(self.s, UninhabitedType): + return self.s + else: + return t + + def visit_erased_type(self, t: ErasedType) -> ProperType: + return self.s + + def visit_type_var(self, t: TypeVarType) -> ProperType: + if isinstance(self.s, TypeVarType) and self.s.id == t.id: + if self.s.upper_bound == t.upper_bound: + return self.s + return self.s.copy_modified(upper_bound=self.meet(self.s.upper_bound, t.upper_bound)) + else: + return self.default(self.s) + + def visit_param_spec(self, t: ParamSpecType) -> ProperType: + if self.s == t: + return self.s + else: + return self.default(self.s) + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> ProperType: + if isinstance(self.s, TypeVarTupleType) and self.s.id == t.id: + return self.s if self.s.min_len > t.min_len else t + else: + return self.default(self.s) + + def visit_unpack_type(self, t: UnpackType) -> ProperType: + raise NotImplementedError + + def visit_parameters(self, t: Parameters) -> ProperType: + if isinstance(self.s, Parameters): + if len(t.arg_types) != len(self.s.arg_types): + return self.default(self.s) + from mypy.join import join_types + + return t.copy_modified( + arg_types=[join_types(s_a, t_a) for s_a, t_a in zip(self.s.arg_types, t.arg_types)] + ) + else: + return self.default(self.s) + + def visit_instance(self, t: Instance) -> ProperType: + if isinstance(self.s, Instance): + if t.type == self.s.type: + if is_subtype(t, self.s) or is_subtype(self.s, t): + # Combine type arguments. We could have used join below + # equivalently. + args: list[Type] = [] + # N.B: We use zip instead of indexing because the lengths might have + # mismatches during daemon reprocessing. + if t.type.has_type_var_tuple_type: + # We handle meet of variadic instances by simply creating correct mapping + # for type arguments and compute the individual meets same as for regular + # instances. All the heavy lifting is done in the meet of tuple types. + s = self.s + assert s.type.type_var_tuple_prefix is not None + assert s.type.type_var_tuple_suffix is not None + prefix = s.type.type_var_tuple_prefix + suffix = s.type.type_var_tuple_suffix + tvt = s.type.defn.type_vars[prefix] + assert isinstance(tvt, TypeVarTupleType) + fallback = tvt.tuple_fallback + s_prefix, s_middle, s_suffix = split_with_prefix_and_suffix( + s.args, prefix, suffix + ) + t_prefix, t_middle, t_suffix = split_with_prefix_and_suffix( + t.args, prefix, suffix + ) + s_args = s_prefix + (TupleType(list(s_middle), fallback),) + s_suffix + t_args = t_prefix + (TupleType(list(t_middle), fallback),) + t_suffix + else: + t_args = t.args + s_args = self.s.args + for ta, sa, tv in zip(t_args, s_args, t.type.defn.type_vars): + meet = self.meet(ta, sa) + if isinstance(tv, TypeVarTupleType): + # Correctly unpack possible outcomes of meets of tuples: it can be + # either another tuple type or Never (normalized as *tuple[Never, ...]) + if isinstance(meet, TupleType): + args.extend(meet.items) + continue + else: + assert isinstance(meet, UninhabitedType) + meet = UnpackType(tv.tuple_fallback.copy_modified(args=[meet])) + args.append(meet) + return Instance(t.type, args) + else: + if state.strict_optional: + return UninhabitedType() + else: + return NoneType() + else: + alt_promote = t.type.alt_promote + if alt_promote and alt_promote.type is self.s.type: + return t + alt_promote = self.s.type.alt_promote + if alt_promote and alt_promote.type is t.type: + return self.s + if is_subtype(t, self.s): + return t + elif is_subtype(self.s, t): + # See also above comment. + return self.s + else: + if state.strict_optional: + return UninhabitedType() + else: + return NoneType() + elif isinstance(self.s, FunctionLike) and t.type.is_protocol: + call = join.unpack_callback_protocol(t) + if call: + return meet_types(call, self.s) + elif isinstance(self.s, FunctionLike) and self.s.is_type_obj() and t.type.is_metaclass(): + if is_subtype(self.s.fallback, t): + return self.s + return self.default(self.s) + elif isinstance(self.s, TypeType): + return meet_types(t, self.s) + elif isinstance(self.s, TupleType): + return meet_types(t, self.s) + elif isinstance(self.s, LiteralType): + return meet_types(t, self.s) + elif isinstance(self.s, TypedDictType): + return meet_types(t, self.s) + return self.default(self.s) + + def visit_callable_type(self, t: CallableType) -> ProperType: + if isinstance(self.s, CallableType) and join.is_similar_callables(t, self.s): + if is_equivalent(t, self.s): + return join.combine_similar_callables(t, self.s) + result = meet_similar_callables(t, self.s) + # We set the from_type_type flag to suppress error when a collection of + # concrete class objects gets inferred as their common abstract superclass. + if not ( + (t.is_type_obj() and t.type_object().is_abstract) + or (self.s.is_type_obj() and self.s.type_object().is_abstract) + ): + result.from_type_type = True + if isinstance(get_proper_type(result.ret_type), UninhabitedType): + # Return a plain None or instead of a weird function. + return self.default(self.s) + return result + elif isinstance(self.s, TypeType) and t.is_type_obj() and not t.is_generic(): + # In this case we are able to potentially produce a better meet. + res = meet_types(self.s.item, t.ret_type) + if not isinstance(res, (NoneType, UninhabitedType)): + return TypeType.make_normalized(res) + return self.default(self.s) + elif isinstance(self.s, Instance) and self.s.type.is_protocol: + call = join.unpack_callback_protocol(self.s) + if call: + return meet_types(t, call) + return self.default(self.s) + + def visit_overloaded(self, t: Overloaded) -> ProperType: + # TODO: Implement a better algorithm that covers at least the same cases + # as TypeJoinVisitor.visit_overloaded(). + s = self.s + if isinstance(s, FunctionLike): + if s.items == t.items: + return Overloaded(t.items) + elif is_subtype(s, t): + return s + elif is_subtype(t, s): + return t + else: + return meet_types(t.fallback, s.fallback) + elif isinstance(self.s, Instance) and self.s.type.is_protocol: + call = join.unpack_callback_protocol(self.s) + if call: + return meet_types(t, call) + return meet_types(t.fallback, s) + + def meet_tuples(self, s: TupleType, t: TupleType) -> list[Type] | None: + """Meet two tuple types while handling variadic entries. + + This is surprisingly tricky, and we don't handle some tricky corner cases. + Most of the trickiness comes from the variadic tuple items like *tuple[X, ...] + since they can have arbitrary partial overlaps (while *Ts can't be split). This + function is roughly a mirror of join_tuples() w.r.t. to the fact that fixed + tuples are subtypes of variadic ones but not vice versa. + """ + s_unpack_index = find_unpack_in_list(s.items) + t_unpack_index = find_unpack_in_list(t.items) + if s_unpack_index is None and t_unpack_index is None: + if s.length() == t.length(): + items: list[Type] = [] + for i in range(t.length()): + items.append(self.meet(t.items[i], s.items[i])) + return items + return None + if s_unpack_index is not None and t_unpack_index is not None: + # The only simple case we can handle if both tuples are variadic + # is when their structure fully matches. Other cases are tricky because + # a variadic item is effectively a union of tuples of all length, thus + # potentially causing overlap between a suffix in `s` and a prefix + # in `t` (see how this is handled in is_subtype() for details). + # TODO: handle more cases (like when both prefix/suffix are shorter in s or t). + if s.length() == t.length() and s_unpack_index == t_unpack_index: + unpack_index = s_unpack_index + s_unpack = s.items[unpack_index] + assert isinstance(s_unpack, UnpackType) + s_unpacked = get_proper_type(s_unpack.type) + t_unpack = t.items[unpack_index] + assert isinstance(t_unpack, UnpackType) + t_unpacked = get_proper_type(t_unpack.type) + if not (isinstance(s_unpacked, Instance) and isinstance(t_unpacked, Instance)): + return None + meet = self.meet(s_unpacked, t_unpacked) + if not isinstance(meet, Instance): + return None + m_prefix: list[Type] = [] + for si, ti in zip(s.items[:unpack_index], t.items[:unpack_index]): + m_prefix.append(meet_types(si, ti)) + m_suffix: list[Type] = [] + for si, ti in zip(s.items[unpack_index + 1 :], t.items[unpack_index + 1 :]): + m_suffix.append(meet_types(si, ti)) + return m_prefix + [UnpackType(meet)] + m_suffix + return None + if s_unpack_index is not None: + variadic = s + unpack_index = s_unpack_index + fixed = t + else: + assert t_unpack_index is not None + variadic = t + unpack_index = t_unpack_index + fixed = s + # If one tuple is variadic one, and the other one is fixed, the meet will be fixed. + unpack = variadic.items[unpack_index] + assert isinstance(unpack, UnpackType) + unpacked = get_proper_type(unpack.type) + if not isinstance(unpacked, Instance): + return None + if fixed.length() < variadic.length() - 1: + return None + prefix_len = unpack_index + suffix_len = variadic.length() - prefix_len - 1 + prefix, middle, suffix = split_with_prefix_and_suffix( + tuple(fixed.items), prefix_len, suffix_len + ) + items = [] + for fi, vi in zip(prefix, variadic.items[:prefix_len]): + items.append(self.meet(fi, vi)) + for mi in middle: + items.append(self.meet(mi, unpacked.args[0])) + if suffix_len: + for fi, vi in zip(suffix, variadic.items[-suffix_len:]): + items.append(self.meet(fi, vi)) + return items + + def visit_tuple_type(self, t: TupleType) -> ProperType: + if isinstance(self.s, TupleType): + items = self.meet_tuples(self.s, t) + if items is None: + return self.default(self.s) + # TODO: What if the fallbacks are different? + return TupleType(items, tuple_fallback(t)) + elif isinstance(self.s, Instance): + # meet(Tuple[t1, t2, <...>], Tuple[s, ...]) == Tuple[meet(t1, s), meet(t2, s), <...>]. + if self.s.type.fullname in TUPLE_LIKE_INSTANCE_NAMES and self.s.args: + return t.copy_modified(items=[meet_types(it, self.s.args[0]) for it in t.items]) + elif is_proper_subtype(t, self.s): + # A named tuple that inherits from a normal class + return t + elif self.s.type.has_type_var_tuple_type and is_subtype(t, self.s): + # This is a bit ad-hoc but more principled handling is tricky, and this + # special case is important for type narrowing in binder to work. + return t + return self.default(self.s) + + def visit_typeddict_type(self, t: TypedDictType) -> ProperType: + if isinstance(self.s, TypedDictType): + for name, l, r in self.s.zip(t): + if not is_equivalent(l, r) or (name in t.required_keys) != ( + name in self.s.required_keys + ): + return self.default(self.s) + item_list: list[tuple[str, Type]] = [] + for item_name, s_item_type, t_item_type in self.s.zipall(t): + if s_item_type is not None: + item_list.append((item_name, s_item_type)) + else: + # at least one of s_item_type and t_item_type is not None + assert t_item_type is not None + item_list.append((item_name, t_item_type)) + items = dict(item_list) + fallback = self.s.create_anonymous_fallback() + required_keys = t.required_keys | self.s.required_keys + readonly_keys = t.readonly_keys | self.s.readonly_keys + return TypedDictType(items, required_keys, readonly_keys, fallback) + elif isinstance(self.s, Instance) and is_subtype(t, self.s): + return t + else: + return self.default(self.s) + + def visit_literal_type(self, t: LiteralType) -> ProperType: + if isinstance(self.s, LiteralType) and self.s == t: + return t + elif isinstance(self.s, Instance) and is_subtype(t.fallback, self.s): + return t + else: + return self.default(self.s) + + def visit_partial_type(self, t: PartialType) -> ProperType: + # We can't determine the meet of partial types. We should never get here. + assert False, "Internal error" + + def visit_type_type(self, t: TypeType) -> ProperType: + if isinstance(self.s, TypeType): + typ = self.meet(t.item, self.s.item) + if not isinstance(typ, NoneType): + typ = TypeType.make_normalized( + typ, line=t.line, is_type_form=self.s.is_type_form and t.is_type_form + ) + return typ + elif isinstance(self.s, Instance) and self.s.type.fullname == "builtins.type": + return t + elif isinstance(self.s, CallableType): + return self.meet(t, self.s) + else: + return self.default(self.s) + + def visit_type_alias_type(self, t: TypeAliasType) -> ProperType: + assert False, f"This should be never called, got {t}" + + def meet(self, s: Type, t: Type) -> ProperType: + return meet_types(s, t) + + def default(self, typ: Type) -> ProperType: + if isinstance(typ, UnboundType): + return AnyType(TypeOfAny.special_form) + else: + if state.strict_optional: + return UninhabitedType() + else: + return NoneType() + + +def meet_similar_callables(t: CallableType, s: CallableType) -> CallableType: + from mypy.join import match_generic_callables, safe_join + + t, s = match_generic_callables(t, s) + arg_types: list[Type] = [] + for i in range(len(t.arg_types)): + arg_types.append(safe_join(t.arg_types[i], s.arg_types[i])) + # TODO in combine_similar_callables also applies here (names and kinds) + # The fallback type can be either 'function' or 'type'. The result should have 'function' as + # fallback only if both operands have it as 'function'. + if t.fallback.type.fullname != "builtins.function": + fallback = t.fallback + else: + fallback = s.fallback + return t.copy_modified( + arg_types=arg_types, + ret_type=meet_types(t.ret_type, s.ret_type), + fallback=fallback, + name=None, + ) + + +def meet_type_list(types: list[Type]) -> Type: + if not types: + # This should probably be builtins.object but that is hard to get and + # it doesn't matter for any current users. + return AnyType(TypeOfAny.implementation_artifact) + met = types[0] + for t in types[1:]: + met = meet_types(met, t) + return met + + +def typed_dict_mapping_pair(left: Type, right: Type) -> bool: + """Is this a pair where one type is a TypedDict and another one is an instance of Mapping? + + This case requires a precise/principled consideration because there are two use cases + that push the boundary the opposite ways: we need to avoid spurious overlaps to avoid + false positives for overloads, but we also need to avoid spuriously non-overlapping types + to avoid false positives with --strict-equality. + """ + left, right = get_proper_types((left, right)) + assert not isinstance(left, TypedDictType) or not isinstance(right, TypedDictType) + + if isinstance(left, TypedDictType): + _, other = left, right + elif isinstance(right, TypedDictType): + _, other = right, left + else: + return False + return isinstance(other, Instance) and other.type.has_base("typing.Mapping") + + +def typed_dict_mapping_overlap( + left: Type, right: Type, overlapping: Callable[[Type, Type], bool] +) -> bool: + """Check if a TypedDict type is overlapping with a Mapping. + + The basic logic here consists of two rules: + + * A TypedDict with some required keys is overlapping with Mapping[str, ] + if and only if every key type is overlapping with . For example: + + - TypedDict(x=int, y=str) overlaps with Dict[str, Union[str, int]] + - TypedDict(x=int, y=str) doesn't overlap with Dict[str, int] + + Note that any additional non-required keys can't change the above result. + + * A TypedDict with no required keys overlaps with Mapping[str, ] if and + only if at least one of key types overlaps with . For example: + + - TypedDict(x=str, y=str, total=False) overlaps with Dict[str, str] + - TypedDict(x=str, y=str, total=False) doesn't overlap with Dict[str, int] + - TypedDict(x=int, y=str, total=False) overlaps with Dict[str, str] + + * A TypedDict with at least one ReadOnly[] key does not overlap + with Dict or MutableMapping, because they assume mutable data. + + As usual empty, dictionaries lie in a gray area. In general, List[str] and List[str] + are considered non-overlapping despite empty list belongs to both. However, List[int] + and List[Never] are considered overlapping. + + So here we follow the same logic: a TypedDict with no required keys is considered + non-overlapping with Mapping[str, ], but is considered overlapping with + Mapping[Never, Never]. This way we avoid false positives for overloads, and also + avoid false positives for comparisons like SomeTypedDict == {} under --strict-equality. + """ + left, right = get_proper_types((left, right)) + assert not isinstance(left, TypedDictType) or not isinstance(right, TypedDictType) + + if isinstance(left, TypedDictType): + assert isinstance(right, Instance) + typed, other = left, right + else: + assert isinstance(left, Instance) + assert isinstance(right, TypedDictType) + typed, other = right, left + + mutable_mapping = next( + (base for base in other.type.mro if base.fullname == "typing.MutableMapping"), None + ) + if mutable_mapping is not None and typed.readonly_keys: + return False + + mapping = next(base for base in other.type.mro if base.fullname == "typing.Mapping") + other = map_instance_to_supertype(other, mapping) + key_type, value_type = get_proper_types(other.args) + + # TODO: is there a cleaner way to get str_type here? + fallback = typed.as_anonymous().fallback + str_type = fallback.type.bases[0].args[0] # typing._TypedDict inherits Mapping[str, object] + + # Special case: a TypedDict with no required keys overlaps with an empty dict. + if isinstance(key_type, UninhabitedType) and isinstance(value_type, UninhabitedType): + return not typed.required_keys + + if typed.required_keys: + if not overlapping(key_type, str_type): + return False + return all(overlapping(typed.items[k], value_type) for k in typed.required_keys) + else: + if not overlapping(key_type, str_type): + return False + non_required = set(typed.items.keys()) - typed.required_keys + return any(overlapping(typed.items[k], value_type) for k in non_required) diff --git a/.venv/lib/python3.12/site-packages/mypy/memprofile.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/memprofile.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..f6b3502 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/memprofile.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/memprofile.py b/.venv/lib/python3.12/site-packages/mypy/memprofile.py new file mode 100644 index 0000000..4bab4ec --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/memprofile.py @@ -0,0 +1,122 @@ +"""Utility for dumping memory usage stats. + +This is tailored to mypy and knows (a little) about which list objects are +owned by particular AST nodes, etc. +""" + +from __future__ import annotations + +import gc +import sys +from collections import defaultdict +from collections.abc import Iterable +from typing import cast + +from mypy.nodes import FakeInfo, Node +from mypy.types import Type +from mypy.util import get_class_descriptors + + +def collect_memory_stats() -> tuple[dict[str, int], dict[str, int]]: + """Return stats about memory use. + + Return a tuple with these items: + - Dict from object kind to number of instances of that kind + - Dict from object kind to total bytes used by all instances of that kind + """ + objs = gc.get_objects() + find_recursive_objects(objs) + + inferred = {} + for obj in objs: + if type(obj) is FakeInfo: + # Processing these would cause a crash. + continue + n = type(obj).__name__ + if hasattr(obj, "__dict__"): + # Keep track of which class a particular __dict__ is associated with. + inferred[id(obj.__dict__)] = f"{n} (__dict__)" + if isinstance(obj, (Node, Type)): # type: ignore[misc] + if hasattr(obj, "__dict__"): + for x in obj.__dict__.values(): + if isinstance(x, list): + # Keep track of which node a list is associated with. + inferred[id(x)] = f"{n} (list)" + if isinstance(x, tuple): + # Keep track of which node a list is associated with. + inferred[id(x)] = f"{n} (tuple)" + + for k in get_class_descriptors(type(obj)): + x = getattr(obj, k, None) + if isinstance(x, list): + inferred[id(x)] = f"{n} (list)" + if isinstance(x, tuple): + inferred[id(x)] = f"{n} (tuple)" + + freqs: dict[str, int] = {} + memuse: dict[str, int] = {} + for obj in objs: + if id(obj) in inferred: + name = inferred[id(obj)] + else: + name = type(obj).__name__ + freqs[name] = freqs.get(name, 0) + 1 + memuse[name] = memuse.get(name, 0) + sys.getsizeof(obj) + + return freqs, memuse + + +def print_memory_profile(run_gc: bool = True) -> None: + if not sys.platform.startswith("win"): + import resource + + system_memuse = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss + else: + system_memuse = -1 # TODO: Support this on Windows + if run_gc: + gc.collect() + freqs, memuse = collect_memory_stats() + print("%7s %7s %7s %s" % ("Freq", "Size(k)", "AvgSize", "Type")) + print("-------------------------------------------") + totalmem = 0 + i = 0 + for n, mem in sorted(memuse.items(), key=lambda x: -x[1]): + f = freqs[n] + if i < 50: + print("%7d %7d %7.0f %s" % (f, mem // 1024, mem / f, n)) + i += 1 + totalmem += mem + print() + print("Mem usage RSS ", system_memuse // 1024) + print("Total reachable ", totalmem // 1024) + + +def find_recursive_objects(objs: list[object]) -> None: + """Find additional objects referenced by objs and append them to objs. + + We use this since gc.get_objects() does not return objects without pointers + in them such as strings. + """ + seen = {id(o) for o in objs} + + def visit(o: object) -> None: + if id(o) not in seen: + objs.append(o) + seen.add(id(o)) + + for obj in objs.copy(): + if type(obj) is FakeInfo: + # Processing these would cause a crash. + continue + if type(obj) in (dict, defaultdict): + for key, val in cast(dict[object, object], obj).items(): + visit(key) + visit(val) + if type(obj) in (list, tuple, set): + for x in cast(Iterable[object], obj): + visit(x) + if hasattr(obj, "__slots__"): + for base in type.mro(type(obj)): + for slot in getattr(base, "__slots__", ()): + if hasattr(obj, slot): + visit(getattr(obj, slot)) diff --git a/.venv/lib/python3.12/site-packages/mypy/message_registry.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/message_registry.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..56780d7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/message_registry.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/message_registry.py b/.venv/lib/python3.12/site-packages/mypy/message_registry.py new file mode 100644 index 0000000..b0f9ed1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/message_registry.py @@ -0,0 +1,373 @@ +"""Message constants for generating error messages during type checking. + +Literal messages should be defined as constants in this module so they won't get out of sync +if used in more than one place, and so that they can be easily introspected. These messages are +ultimately consumed by messages.MessageBuilder.fail(). For more non-trivial message generation, +add a method to MessageBuilder and call this instead. +""" + +from __future__ import annotations + +from typing import Final, NamedTuple + +from mypy import errorcodes as codes +from mypy.errorcodes import ErrorCode + + +class ErrorMessage(NamedTuple): + value: str + code: ErrorCode | None = None + + def format(self, *args: object, **kwargs: object) -> ErrorMessage: + return ErrorMessage(self.value.format(*args, **kwargs), code=self.code) + + def with_additional_msg(self, info: str) -> ErrorMessage: + return ErrorMessage(self.value + info, code=self.code) + + +# Invalid types +INVALID_TYPE_RAW_ENUM_VALUE: Final = ErrorMessage( + "Invalid type: try using Literal[{}.{}] instead?", codes.VALID_TYPE +) + +# Type checker error message constants +NO_RETURN_VALUE_EXPECTED: Final = ErrorMessage("No return value expected", codes.RETURN_VALUE) +MISSING_RETURN_STATEMENT: Final = ErrorMessage("Missing return statement", codes.RETURN) +EMPTY_BODY_ABSTRACT: Final = ErrorMessage( + "If the method is meant to be abstract, use @abc.abstractmethod", codes.EMPTY_BODY +) +INVALID_IMPLICIT_RETURN: Final = ErrorMessage("Implicit return in function which does not return") +INCOMPATIBLE_RETURN_VALUE_TYPE: Final = ErrorMessage( + "Incompatible return value type", codes.RETURN_VALUE +) +RETURN_VALUE_EXPECTED: Final = ErrorMessage("Return value expected", codes.RETURN_VALUE) +NO_RETURN_EXPECTED: Final = ErrorMessage("Return statement in function which does not return") +INVALID_EXCEPTION: Final = ErrorMessage("Exception must be derived from BaseException") +INVALID_EXCEPTION_TYPE: Final = ErrorMessage( + "Exception type must be derived from BaseException (or be a tuple of exception classes)" +) +INVALID_EXCEPTION_GROUP: Final = ErrorMessage( + "Exception type in except* cannot derive from BaseExceptionGroup" +) +RETURN_IN_ASYNC_GENERATOR: Final = ErrorMessage( + '"return" with value in async generator is not allowed' +) +INVALID_RETURN_TYPE_FOR_GENERATOR: Final = ErrorMessage( + 'The return type of a generator function should be "Generator" or one of its supertypes' +) +INVALID_RETURN_TYPE_FOR_ASYNC_GENERATOR: Final = ErrorMessage( + 'The return type of an async generator function should be "AsyncGenerator" or one of its ' + "supertypes" +) +YIELD_VALUE_EXPECTED: Final = ErrorMessage("Yield value expected") +INCOMPATIBLE_TYPES: Final = ErrorMessage("Incompatible types") +INCOMPATIBLE_TYPES_IN_ASSIGNMENT: Final = ErrorMessage( + "Incompatible types in assignment", code=codes.ASSIGNMENT +) +COVARIANT_OVERRIDE_OF_MUTABLE_ATTRIBUTE: Final = ErrorMessage( + "Covariant override of a mutable attribute", code=codes.MUTABLE_OVERRIDE +) +INCOMPATIBLE_TYPES_IN_AWAIT: Final = ErrorMessage('Incompatible types in "await"') +INCOMPATIBLE_REDEFINITION: Final = ErrorMessage("Incompatible redefinition") +INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AENTER: Final = ( + 'Incompatible types in "async with" for "__aenter__"' +) +INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AEXIT: Final = ( + 'Incompatible types in "async with" for "__aexit__"' +) +INCOMPATIBLE_TYPES_IN_ASYNC_FOR: Final = 'Incompatible types in "async for"' +INVALID_TYPE_FOR_SLOTS: Final = 'Invalid type for "__slots__"' + +ASYNC_FOR_OUTSIDE_COROUTINE: Final = '"async for" outside async function' +ASYNC_WITH_OUTSIDE_COROUTINE: Final = '"async with" outside async function' + +INCOMPATIBLE_TYPES_IN_YIELD: Final = ErrorMessage('Incompatible types in "yield"') +INCOMPATIBLE_TYPES_IN_YIELD_FROM: Final = ErrorMessage('Incompatible types in "yield from"') +INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION: Final = "Incompatible types in string interpolation" +INCOMPATIBLE_TYPES_IN_CAPTURE: Final = ErrorMessage("Incompatible types in capture pattern") +MUST_HAVE_NONE_RETURN_TYPE: Final = ErrorMessage('The return type of "{}" must be None') +TUPLE_INDEX_OUT_OF_RANGE: Final = ErrorMessage("Tuple index out of range") +AMBIGUOUS_SLICE_OF_VARIADIC_TUPLE: Final = ErrorMessage("Ambiguous slice of a variadic tuple") +TOO_MANY_TARGETS_FOR_VARIADIC_UNPACK: Final = ErrorMessage( + "Too many assignment targets for variadic unpack" +) +INVALID_SLICE_INDEX: Final = ErrorMessage("Slice index must be an integer, SupportsIndex or None") +CANNOT_INFER_LAMBDA_TYPE: Final = ErrorMessage("Cannot infer type of lambda") +CANNOT_ACCESS_INIT: Final = ( + 'Accessing "__init__" on an instance is unsound, since instance.__init__ could be from' + " an incompatible subclass" +) +NON_INSTANCE_NEW_TYPE: Final = ErrorMessage('"__new__" must return a class instance (got {})') +INVALID_NEW_TYPE: Final = ErrorMessage('Incompatible return type for "__new__"') +BAD_CONSTRUCTOR_TYPE: Final = ErrorMessage("Unsupported decorated constructor type") +CANNOT_ASSIGN_TO_METHOD: Final = "Cannot assign to a method" +CANNOT_ASSIGN_TO_TYPE: Final = "Cannot assign to a type" +INCONSISTENT_ABSTRACT_OVERLOAD: Final = ErrorMessage( + "Overloaded method has both abstract and non-abstract variants" +) +MULTIPLE_OVERLOADS_REQUIRED: Final = ErrorMessage("Single overload definition, multiple required") +READ_ONLY_PROPERTY_OVERRIDES_READ_WRITE: Final = ErrorMessage( + "Read-only property cannot override read-write property" +) +FORMAT_REQUIRES_MAPPING: Final = "Format requires a mapping" +RETURN_TYPE_CANNOT_BE_CONTRAVARIANT: Final = ErrorMessage( + "Cannot use a contravariant type variable as return type" +) +FUNCTION_PARAMETER_CANNOT_BE_COVARIANT: Final = ErrorMessage( + "Cannot use a covariant type variable as a parameter" +) +INCOMPATIBLE_IMPORT_OF: Final = ErrorMessage('Incompatible import of "{}"', code=codes.ASSIGNMENT) +FUNCTION_TYPE_EXPECTED: Final = ErrorMessage( + "Function is missing a type annotation", codes.NO_UNTYPED_DEF +) +ONLY_CLASS_APPLICATION: Final = ErrorMessage( + "Type application is only supported for generic classes" +) +RETURN_TYPE_EXPECTED: Final = ErrorMessage( + "Function is missing a return type annotation", codes.NO_UNTYPED_DEF +) +ARGUMENT_TYPE_EXPECTED: Final = ErrorMessage( + "Function is missing a type annotation for one or more arguments", codes.NO_UNTYPED_DEF +) +KEYWORD_ARGUMENT_REQUIRES_STR_KEY_TYPE: Final = ErrorMessage( + 'Keyword argument only valid with "str" key type in call to "dict"' +) +ALL_MUST_BE_SEQ_STR: Final = ErrorMessage("Type of __all__ must be {}, not {}") +INVALID_TYPEDDICT_ARGS: Final = ErrorMessage( + "Expected keyword arguments, {...}, or dict(...) in TypedDict constructor" +) +TYPEDDICT_KEY_MUST_BE_STRING_LITERAL: Final = ErrorMessage( + "Expected TypedDict key to be string literal" +) +TYPEDDICT_OVERRIDE_MERGE: Final = 'Overwriting TypedDict field "{}" while merging' +MALFORMED_ASSERT: Final = ErrorMessage("Assertion is always true, perhaps remove parentheses?") +DUPLICATE_TYPE_SIGNATURES: Final = ErrorMessage("Function has duplicate type signatures") +DESCRIPTOR_SET_NOT_CALLABLE: Final = ErrorMessage("{}.__set__ is not callable") +DESCRIPTOR_GET_NOT_CALLABLE: Final = "{}.__get__ is not callable" +MODULE_LEVEL_GETATTRIBUTE: Final = ErrorMessage( + "__getattribute__ is not valid at the module level" +) +CLASS_VAR_CONFLICTS_SLOTS: Final = '"{}" in __slots__ conflicts with class variable access' +NAME_NOT_IN_SLOTS: Final = ErrorMessage( + 'Trying to assign name "{}" that is not in "__slots__" of type "{}"' +) +TYPE_ALWAYS_TRUE: Final = ErrorMessage( + "{} which does not implement __bool__ or __len__ " + "so it could always be true in boolean context", + code=codes.TRUTHY_BOOL, +) +TYPE_ALWAYS_TRUE_UNIONTYPE: Final = ErrorMessage( + "{} of which no members implement __bool__ or __len__ " + "so it could always be true in boolean context", + code=codes.TRUTHY_BOOL, +) +FUNCTION_ALWAYS_TRUE: Final = ErrorMessage( + "Function {} could always be true in boolean context", code=codes.TRUTHY_FUNCTION +) +ITERABLE_ALWAYS_TRUE: Final = ErrorMessage( + "{} which can always be true in boolean context. Consider using {} instead.", + code=codes.TRUTHY_ITERABLE, +) +NOT_CALLABLE: Final = "{} not callable" +TYPE_MUST_BE_USED: Final = "Value of type {} must be used" + +# Generic +GENERIC_INSTANCE_VAR_CLASS_ACCESS: Final = ( + "Access to generic instance variables via class is ambiguous" +) +GENERIC_CLASS_VAR_ACCESS: Final = "Access to generic class variables is ambiguous" +BARE_GENERIC: Final = "Missing type parameters for generic type {}" +IMPLICIT_GENERIC_ANY_BUILTIN: Final = ( + 'Implicit generic "Any". Use "{}" and specify generic parameters' +) +INVALID_UNPACK: Final = "{} cannot be unpacked (must be tuple or TypeVarTuple)" +INVALID_UNPACK_POSITION: Final = "Unpack is only valid in a variadic position" +INVALID_PARAM_SPEC_LOCATION: Final = "Invalid location for ParamSpec {}" +INVALID_PARAM_SPEC_LOCATION_NOTE: Final = ( + 'You can use ParamSpec as the first argument to Callable, e.g., "Callable[{}, int]"' +) + +# TypeVar +INCOMPATIBLE_TYPEVAR_VALUE: Final = 'Value of type variable "{}" of {} cannot be {}' +INVALID_TYPEVAR_AS_TYPEARG: Final = 'Type variable "{}" not valid as type argument value for "{}"' +INVALID_TYPEVAR_ARG_BOUND: Final = 'Type argument {} of "{}" must be a subtype of {}' +INVALID_TYPEVAR_ARG_VALUE: Final = 'Invalid type argument value for "{}"' +TYPEVAR_VARIANCE_DEF: Final = 'TypeVar "{}" may only be a literal bool' +TYPEVAR_ARG_MUST_BE_TYPE: Final = '{} "{}" must be a type' +TYPEVAR_UNEXPECTED_ARGUMENT: Final = 'Unexpected argument to "TypeVar()"' +UNBOUND_TYPEVAR: Final = ( + "A function returning TypeVar should receive at least one argument containing the same TypeVar" +) +TYPE_PARAMETERS_SHOULD_BE_DECLARED: Final = ( + "All type parameters should be declared ({} not declared)" +) + +# Super +TOO_MANY_ARGS_FOR_SUPER: Final = ErrorMessage('Too many arguments for "super"') +SUPER_WITH_SINGLE_ARG_NOT_SUPPORTED: Final = ErrorMessage( + '"super" with a single argument not supported' +) +UNSUPPORTED_ARG_1_FOR_SUPER: Final = ErrorMessage('Unsupported argument 1 for "super"') +UNSUPPORTED_ARG_2_FOR_SUPER: Final = ErrorMessage('Unsupported argument 2 for "super"') +SUPER_VARARGS_NOT_SUPPORTED: Final = ErrorMessage('Varargs not supported with "super"') +SUPER_POSITIONAL_ARGS_REQUIRED: Final = ErrorMessage('"super" only accepts positional arguments') +SUPER_ARG_2_NOT_INSTANCE_OF_ARG_1: Final = ErrorMessage( + 'Argument 2 for "super" not an instance of argument 1' +) +TARGET_CLASS_HAS_NO_BASE_CLASS: Final = ErrorMessage("Target class has no base class") +SUPER_OUTSIDE_OF_METHOD_NOT_SUPPORTED: Final = ErrorMessage( + '"super()" outside of a method is not supported' +) +SUPER_ENCLOSING_POSITIONAL_ARGS_REQUIRED: Final = ErrorMessage( + '"super()" requires one or two positional arguments in enclosing function' +) + +# Self-type +MISSING_OR_INVALID_SELF_TYPE: Final = ErrorMessage( + "Self argument missing for a non-static method (or an invalid type for self)" +) +ERASED_SELF_TYPE_NOT_SUPERTYPE: Final = ErrorMessage( + 'The erased type of self "{}" is not a supertype of its class "{}"' +) + +# Final +CANNOT_INHERIT_FROM_FINAL: Final = ErrorMessage('Cannot inherit from final class "{}"') +DEPENDENT_FINAL_IN_CLASS_BODY: Final = ErrorMessage( + "Final name declared in class body cannot depend on type variables" +) +CANNOT_ACCESS_FINAL_INSTANCE_ATTR: Final = ( + 'Cannot access final instance attribute "{}" on class object' +) +CANNOT_MAKE_DELETABLE_FINAL: Final = ErrorMessage("Deletable attribute cannot be final") + +# Disjoint bases +INCOMPATIBLE_DISJOINT_BASES: Final = ErrorMessage('Class "{}" has incompatible disjoint bases') + +# Enum +ENUM_MEMBERS_ATTR_WILL_BE_OVERRIDDEN: Final = ErrorMessage( + 'Assigned "__members__" will be overridden by "Enum" internally' +) + +# ClassVar +CANNOT_OVERRIDE_INSTANCE_VAR: Final = ErrorMessage( + 'Cannot override instance variable (previously declared on base class "{}") with class ' + "variable" +) +CANNOT_OVERRIDE_CLASS_VAR: Final = ErrorMessage( + 'Cannot override class variable (previously declared on base class "{}") with instance ' + "variable" +) +CLASS_VAR_WITH_GENERIC_SELF: Final = "ClassVar cannot contain Self type in generic classes" +CLASS_VAR_OUTSIDE_OF_CLASS: Final = "ClassVar can only be used for assignments in class body" + +# Protocol +RUNTIME_PROTOCOL_EXPECTED: Final = ErrorMessage( + "Only @runtime_checkable protocols can be used with instance and class checks" +) +CANNOT_INSTANTIATE_PROTOCOL: Final = ErrorMessage('Cannot instantiate protocol class "{}"') +TOO_MANY_UNION_COMBINATIONS: Final = ErrorMessage( + "Not all union combinations were tried because there are too many unions" +) + +CONTIGUOUS_ITERABLE_EXPECTED: Final = ErrorMessage("Contiguous iterable with same type expected") +ITERABLE_TYPE_EXPECTED: Final = ErrorMessage("Invalid type '{}' for *expr (iterable expected)") +TYPE_GUARD_POS_ARG_REQUIRED: Final = ErrorMessage("Type {} requires positional argument") + +# Match Statement +MISSING_MATCH_ARGS: Final = 'Class "{}" doesn\'t define "__match_args__"' +OR_PATTERN_ALTERNATIVE_NAMES: Final = "Alternative patterns bind different names" +CLASS_PATTERN_GENERIC_TYPE_ALIAS: Final = ( + "Class pattern class must not be a type alias with type parameters" +) +CLASS_PATTERN_TYPE_REQUIRED: Final = 'Expected type in class pattern; found "{}"' +CLASS_PATTERN_TOO_MANY_POSITIONAL_ARGS: Final = "Too many positional patterns for class pattern" +CLASS_PATTERN_KEYWORD_MATCHES_POSITIONAL: Final = ( + 'Keyword "{}" already matches a positional pattern' +) +CLASS_PATTERN_DUPLICATE_KEYWORD_PATTERN: Final = 'Duplicate keyword pattern "{}"' +CLASS_PATTERN_UNKNOWN_KEYWORD: Final = 'Class "{}" has no attribute "{}"' +CLASS_PATTERN_CLASS_OR_STATIC_METHOD: Final = "Cannot have both classmethod and staticmethod" +MULTIPLE_ASSIGNMENTS_IN_PATTERN: Final = 'Multiple assignments to name "{}" in pattern' +CANNOT_MODIFY_MATCH_ARGS: Final = 'Cannot assign to "__match_args__"' + +DATACLASS_FIELD_ALIAS_MUST_BE_LITERAL: Final = ( + '"alias" argument to dataclass field must be a string literal' +) +DATACLASS_POST_INIT_MUST_BE_A_FUNCTION: Final = '"__post_init__" method must be an instance method' + +# fastparse +FAILED_TO_MERGE_OVERLOADS: Final = ErrorMessage( + "Condition can't be inferred, unable to merge overloads" +) +TYPE_IGNORE_WITH_ERRCODE_ON_MODULE: Final = ErrorMessage( + "type ignore with error code is not supported for modules; " + 'use `# mypy: disable-error-code="{}"`', + codes.SYNTAX, +) +INVALID_TYPE_IGNORE: Final = ErrorMessage('Invalid "type: ignore" comment', codes.SYNTAX) +TYPE_COMMENT_SYNTAX_ERROR_VALUE: Final = ErrorMessage( + 'Syntax error in type comment "{}"', codes.SYNTAX +) +ELLIPSIS_WITH_OTHER_TYPEARGS: Final = ErrorMessage( + "Ellipses cannot accompany other argument types in function type signature", codes.SYNTAX +) +TYPE_SIGNATURE_TOO_MANY_ARGS: Final = ErrorMessage( + "Type signature has too many arguments", codes.SYNTAX +) +TYPE_SIGNATURE_TOO_FEW_ARGS: Final = ErrorMessage( + "Type signature has too few arguments", codes.SYNTAX +) +ARG_CONSTRUCTOR_NAME_EXPECTED: Final = ErrorMessage("Expected arg constructor name", codes.SYNTAX) +ARG_CONSTRUCTOR_TOO_MANY_ARGS: Final = ErrorMessage( + "Too many arguments for argument constructor", codes.SYNTAX +) +MULTIPLE_VALUES_FOR_NAME_KWARG: Final = ErrorMessage( + '"{}" gets multiple values for keyword argument "name"', codes.SYNTAX +) +MULTIPLE_VALUES_FOR_TYPE_KWARG: Final = ErrorMessage( + '"{}" gets multiple values for keyword argument "type"', codes.SYNTAX +) +ARG_CONSTRUCTOR_UNEXPECTED_ARG: Final = ErrorMessage( + 'Unexpected argument "{}" for argument constructor', codes.SYNTAX +) +ARG_NAME_EXPECTED_STRING_LITERAL: Final = ErrorMessage( + "Expected string literal for argument name, got {}", codes.SYNTAX +) +NARROWED_TYPE_NOT_SUBTYPE: Final = ErrorMessage( + "Narrowed type {} is not a subtype of input type {}", codes.NARROWED_TYPE_NOT_SUBTYPE +) +TYPE_VAR_TOO_FEW_CONSTRAINED_TYPES: Final = ErrorMessage( + "Type variable must have at least two constrained types", codes.MISC +) + +TYPE_VAR_YIELD_EXPRESSION_IN_BOUND: Final = ErrorMessage( + "Yield expression cannot be used as a type variable bound", codes.SYNTAX +) + +TYPE_VAR_NAMED_EXPRESSION_IN_BOUND: Final = ErrorMessage( + "Named expression cannot be used as a type variable bound", codes.SYNTAX +) + +TYPE_VAR_AWAIT_EXPRESSION_IN_BOUND: Final = ErrorMessage( + "Await expression cannot be used as a type variable bound", codes.SYNTAX +) + +TYPE_VAR_GENERIC_CONSTRAINT_TYPE: Final = ErrorMessage( + "TypeVar constraint type cannot be parametrized by type variables", codes.MISC +) + +TYPE_VAR_REDECLARED_IN_NESTED_CLASS: Final = ErrorMessage( + 'Type variable "{}" is bound by an outer class', codes.VALID_TYPE +) + +TYPE_ALIAS_WITH_YIELD_EXPRESSION: Final = ErrorMessage( + "Yield expression cannot be used within a type alias", codes.SYNTAX +) + +TYPE_ALIAS_WITH_NAMED_EXPRESSION: Final = ErrorMessage( + "Named expression cannot be used within a type alias", codes.SYNTAX +) + +TYPE_ALIAS_WITH_AWAIT_EXPRESSION: Final = ErrorMessage( + "Await expression cannot be used within a type alias", codes.SYNTAX +) diff --git a/.venv/lib/python3.12/site-packages/mypy/messages.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/messages.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..9c1f2f9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/messages.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/messages.py b/.venv/lib/python3.12/site-packages/mypy/messages.py new file mode 100644 index 0000000..9fdfb74 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/messages.py @@ -0,0 +1,3440 @@ +"""Facilities for generating error messages during type checking. + +Don't add any non-trivial message construction logic to the type +checker, as it can compromise clarity and make messages less +consistent. Add such logic to this module instead. Literal messages, including those +with format args, should be defined as constants in mypy.message_registry. + +Historically we tried to avoid all message string literals in the type +checker but we are moving away from this convention. +""" + +from __future__ import annotations + +import difflib +import itertools +import re +from collections.abc import Collection, Iterable, Iterator, Sequence +from contextlib import contextmanager +from textwrap import dedent +from typing import Any, Callable, Final, cast + +import mypy.typeops +from mypy import errorcodes as codes, message_registry +from mypy.erasetype import erase_type +from mypy.errorcodes import ErrorCode +from mypy.errors import ( + ErrorInfo, + Errors, + ErrorWatcher, + IterationDependentErrors, + IterationErrorWatcher, +) +from mypy.nodes import ( + ARG_NAMED, + ARG_NAMED_OPT, + ARG_OPT, + ARG_POS, + ARG_STAR, + ARG_STAR2, + CONTRAVARIANT, + COVARIANT, + SYMBOL_FUNCBASE_TYPES, + ArgKind, + CallExpr, + ClassDef, + Context, + Expression, + FuncDef, + IndexExpr, + MypyFile, + NameExpr, + ReturnStmt, + StrExpr, + SymbolNode, + SymbolTable, + TypeInfo, + Var, + get_func_def, + reverse_builtin_aliases, +) +from mypy.operators import op_methods, op_methods_to_symbols +from mypy.options import Options +from mypy.subtypes import ( + IS_CLASS_OR_STATIC, + IS_CLASSVAR, + IS_EXPLICIT_SETTER, + IS_SETTABLE, + IS_VAR, + find_member, + get_member_flags, + is_same_type, + is_subtype, +) +from mypy.typeops import separate_union_literals +from mypy.types import ( + AnyType, + CallableType, + DeletedType, + FunctionLike, + Instance, + LiteralType, + NoneType, + Overloaded, + Parameters, + ParamSpecType, + PartialType, + ProperType, + TupleType, + Type, + TypeAliasType, + TypedDictType, + TypeOfAny, + TypeStrVisitor, + TypeType, + TypeVarLikeType, + TypeVarTupleType, + TypeVarType, + UnboundType, + UninhabitedType, + UnionType, + UnpackType, + flatten_nested_unions, + get_proper_type, + get_proper_types, +) +from mypy.typetraverser import TypeTraverserVisitor +from mypy.util import plural_s, unmangle + +TYPES_FOR_UNIMPORTED_HINTS: Final = { + "typing.Any", + "typing.Callable", + "typing.Dict", + "typing.Iterable", + "typing.Iterator", + "typing.List", + "typing.Optional", + "typing.Set", + "typing.Tuple", + "typing.TypeVar", + "typing.Union", + "typing.cast", +} + + +ARG_CONSTRUCTOR_NAMES: Final = { + ARG_POS: "Arg", + ARG_OPT: "DefaultArg", + ARG_NAMED: "NamedArg", + ARG_NAMED_OPT: "DefaultNamedArg", + ARG_STAR: "VarArg", + ARG_STAR2: "KwArg", +} + + +# Map from the full name of a missing definition to the test fixture (under +# test-data/unit/fixtures/) that provides the definition. This is used for +# generating better error messages when running mypy tests only. +SUGGESTED_TEST_FIXTURES: Final = { + "builtins.set": "set.pyi", + "builtins.tuple": "tuple.pyi", + "builtins.bool": "bool.pyi", + "builtins.Exception": "exception.pyi", + "builtins.BaseException": "exception.pyi", + "builtins.isinstance": "isinstancelist.pyi", + "builtins.property": "property.pyi", + "builtins.classmethod": "classmethod.pyi", + "typing._SpecialForm": "typing-medium.pyi", +} + +UNSUPPORTED_NUMBERS_TYPES: Final = { + "numbers.Number", + "numbers.Complex", + "numbers.Real", + "numbers.Rational", + "numbers.Integral", +} + +MAX_TUPLE_ITEMS = 10 +MAX_UNION_ITEMS = 10 + + +class MessageBuilder: + """Helper class for reporting type checker error messages with parameters. + + The methods of this class need to be provided with the context within a + file; the errors member manages the wider context. + + IDEA: Support a 'verbose mode' that includes full information about types + in error messages and that may otherwise produce more detailed error + messages. + """ + + # Report errors using this instance. It knows about the current file and + # import context. + errors: Errors + + modules: dict[str, MypyFile] + + # Hack to deduplicate error messages from union types + _disable_type_names: list[bool] + + def __init__(self, errors: Errors, modules: dict[str, MypyFile]) -> None: + self.errors = errors + self.options = errors.options + self.modules = modules + self._disable_type_names = [] + + # + # Helpers + # + + def filter_errors( + self, + *, + filter_errors: bool | Callable[[str, ErrorInfo], bool] = True, + save_filtered_errors: bool = False, + filter_deprecated: bool = False, + filter_revealed_type: bool = False, + ) -> ErrorWatcher: + return ErrorWatcher( + self.errors, + filter_errors=filter_errors, + save_filtered_errors=save_filtered_errors, + filter_deprecated=filter_deprecated, + filter_revealed_type=filter_revealed_type, + ) + + def add_errors(self, errors: list[ErrorInfo]) -> None: + """Add errors in messages to this builder.""" + for info in errors: + self.errors.add_error_info(info) + + @contextmanager + def disable_type_names(self) -> Iterator[None]: + self._disable_type_names.append(True) + try: + yield + finally: + self._disable_type_names.pop() + + def are_type_names_disabled(self) -> bool: + return len(self._disable_type_names) > 0 and self._disable_type_names[-1] + + def prefer_simple_messages(self) -> bool: + """Should we generate simple/fast error messages? + + If errors aren't shown to the user, we don't want to waste cycles producing + complex error messages. + """ + return self.errors.prefer_simple_messages() + + def report( + self, + msg: str, + context: Context | None, + severity: str, + *, + code: ErrorCode | None = None, + file: str | None = None, + origin: Context | None = None, + offset: int = 0, + secondary_context: Context | None = None, + parent_error: ErrorInfo | None = None, + ) -> ErrorInfo: + """Report an error or note (unless disabled). + + Note that context controls where error is reported, while origin controls + where # type: ignore comments have effect. + """ + + def span_from_context(ctx: Context) -> Iterable[int]: + """This determines where a type: ignore for a given context has effect. + + Current logic is a bit tricky, to keep as much backwards compatibility as + possible. We may reconsider this to always be a single line (or otherwise + simplify it) when we drop Python 3.7. + + TODO: address this in follow up PR + """ + if isinstance(ctx, (ClassDef, FuncDef)): + return range(ctx.line, ctx.line + 1) + elif not isinstance(ctx, Expression): + return [ctx.line] + else: + return range(ctx.line, (ctx.end_line or ctx.line) + 1) + + origin_span: Iterable[int] | None + if origin is not None: + origin_span = span_from_context(origin) + elif context is not None: + origin_span = span_from_context(context) + else: + origin_span = None + + if secondary_context is not None: + assert origin_span is not None + origin_span = itertools.chain(origin_span, span_from_context(secondary_context)) + + return self.errors.report( + context.line if context else -1, + context.column if context else -1, + msg, + severity=severity, + file=file, + offset=offset, + origin_span=origin_span, + end_line=context.end_line if context else -1, + end_column=context.end_column if context else -1, + code=code, + parent_error=parent_error, + ) + + def fail( + self, + msg: str, + context: Context | None, + *, + code: ErrorCode | None = None, + file: str | None = None, + secondary_context: Context | None = None, + ) -> ErrorInfo: + """Report an error message (unless disabled).""" + return self.report( + msg, context, "error", code=code, file=file, secondary_context=secondary_context + ) + + def note( + self, + msg: str, + context: Context, + file: str | None = None, + origin: Context | None = None, + offset: int = 0, + *, + code: ErrorCode | None = None, + secondary_context: Context | None = None, + parent_error: ErrorInfo | None = None, + ) -> None: + """Report a note (unless disabled).""" + self.report( + msg, + context, + "note", + file=file, + origin=origin, + offset=offset, + code=code, + secondary_context=secondary_context, + parent_error=parent_error, + ) + + def note_multiline( + self, + messages: str, + context: Context, + file: str | None = None, + offset: int = 0, + code: ErrorCode | None = None, + *, + secondary_context: Context | None = None, + ) -> None: + """Report as many notes as lines in the message (unless disabled).""" + for msg in messages.splitlines(): + self.report( + msg, + context, + "note", + file=file, + offset=offset, + code=code, + secondary_context=secondary_context, + ) + + # + # Specific operations + # + + # The following operations are for generating specific error messages. They + # get some information as arguments, and they build an error message based + # on them. + + def has_no_attr( + self, + original_type: Type, + typ: Type, + member: str, + context: Context, + module_symbol_table: SymbolTable | None = None, + ) -> ErrorCode | None: + """Report a missing or non-accessible member. + + original_type is the top-level type on which the error occurred. + typ is the actual type that is missing the member. These can be + different, e.g., in a union, original_type will be the union and typ + will be the specific item in the union that does not have the member + attribute. + + 'module_symbol_table' is passed to this function if the type for which we + are trying to get a member was originally a module. The SymbolTable allows + us to look up and suggests attributes of the module since they are not + directly available on original_type + + If member corresponds to an operator, use the corresponding operator + name in the messages. Return the error code that was produced, if any. + """ + original_type = get_proper_type(original_type) + typ = get_proper_type(typ) + + if isinstance(original_type, Instance) and original_type.type.has_readable_member(member): + self.fail(f'Member "{member}" is not assignable', context) + return None + elif member == "__contains__": + self.fail( + f"Unsupported right operand type for in ({format_type(original_type, self.options)})", + context, + code=codes.OPERATOR, + ) + return codes.OPERATOR + elif member in op_methods.values(): + # Access to a binary operator member (e.g. _add). This case does + # not handle indexing operations. + for op, method in op_methods.items(): + if method == member: + self.unsupported_left_operand(op, original_type, context) + return codes.OPERATOR + elif member == "__neg__": + self.fail( + f"Unsupported operand type for unary - ({format_type(original_type, self.options)})", + context, + code=codes.OPERATOR, + ) + return codes.OPERATOR + elif member == "__pos__": + self.fail( + f"Unsupported operand type for unary + ({format_type(original_type, self.options)})", + context, + code=codes.OPERATOR, + ) + return codes.OPERATOR + elif member == "__invert__": + self.fail( + f"Unsupported operand type for ~ ({format_type(original_type, self.options)})", + context, + code=codes.OPERATOR, + ) + return codes.OPERATOR + elif member == "__getitem__": + # Indexed get. + # TODO: Fix this consistently in format_type + if isinstance(original_type, FunctionLike) and original_type.is_type_obj(): + self.fail( + "The type {} is not generic and not indexable".format( + format_type(original_type, self.options) + ), + context, + ) + return None + else: + self.fail( + f"Value of type {format_type(original_type, self.options)} is not indexable", + context, + code=codes.INDEX, + ) + return codes.INDEX + elif member == "__setitem__": + # Indexed set. + self.fail( + "Unsupported target for indexed assignment ({})".format( + format_type(original_type, self.options) + ), + context, + code=codes.INDEX, + ) + return codes.INDEX + elif member == "__call__": + if isinstance(original_type, Instance) and ( + original_type.type.fullname == "builtins.function" + ): + # "'function' not callable" is a confusing error message. + # Explain that the problem is that the type of the function is not known. + self.fail("Cannot call function of unknown type", context, code=codes.OPERATOR) + return codes.OPERATOR + else: + self.fail( + message_registry.NOT_CALLABLE.format(format_type(original_type, self.options)), + context, + code=codes.OPERATOR, + ) + return codes.OPERATOR + else: + # The non-special case: a missing ordinary attribute. + extra = "" + if member == "__iter__": + extra = " (not iterable)" + elif member == "__aiter__": + extra = " (not async iterable)" + if not self.are_type_names_disabled(): + failed = False + if isinstance(original_type, Instance) and original_type.type.names: + if ( + module_symbol_table is not None + and member in module_symbol_table + and not module_symbol_table[member].module_public + ): + self.fail( + f"{format_type(original_type, self.options, module_names=True)} does not " + f'explicitly export attribute "{member}"', + context, + code=codes.ATTR_DEFINED, + ) + failed = True + else: + alternatives = set(original_type.type.names.keys()) + if module_symbol_table is not None: + alternatives |= { + k for k, v in module_symbol_table.items() if v.module_public + } + # Rare but possible, see e.g. testNewAnalyzerCyclicDefinitionCrossModule + alternatives.discard(member) + + matches = [m for m in COMMON_MISTAKES.get(member, []) if m in alternatives] + matches.extend(best_matches(member, alternatives, n=3)) + if member == "__aiter__" and matches == ["__iter__"]: + matches = [] # Avoid misleading suggestion + if matches: + self.fail( + '{} has no attribute "{}"; maybe {}?{}'.format( + format_type(original_type, self.options), + member, + pretty_seq(matches, "or"), + extra, + ), + context, + code=codes.ATTR_DEFINED, + ) + failed = True + if not failed: + self.fail( + '{} has no attribute "{}"{}'.format( + format_type(original_type, self.options), member, extra + ), + context, + code=codes.ATTR_DEFINED, + ) + return codes.ATTR_DEFINED + elif isinstance(original_type, UnionType): + # The checker passes "object" in lieu of "None" for attribute + # checks, so we manually convert it back. + typ_format, orig_type_format = format_type_distinctly( + typ, original_type, options=self.options + ) + if typ_format == '"object"' and any( + type(item) == NoneType for item in original_type.items + ): + typ_format = '"None"' + self.fail( + 'Item {} of {} has no attribute "{}"{}'.format( + typ_format, orig_type_format, member, extra + ), + context, + code=codes.UNION_ATTR, + ) + return codes.UNION_ATTR + elif isinstance(original_type, TypeVarType): + bound = get_proper_type(original_type.upper_bound) + if isinstance(bound, UnionType): + typ_fmt, bound_fmt = format_type_distinctly(typ, bound, options=self.options) + original_type_fmt = format_type(original_type, self.options) + self.fail( + "Item {} of the upper bound {} of type variable {} has no " + 'attribute "{}"{}'.format( + typ_fmt, bound_fmt, original_type_fmt, member, extra + ), + context, + code=codes.UNION_ATTR, + ) + return codes.UNION_ATTR + else: + self.fail( + '{} has no attribute "{}"{}'.format( + format_type(original_type, self.options), member, extra + ), + context, + code=codes.ATTR_DEFINED, + ) + return codes.ATTR_DEFINED + return None + + def unsupported_operand_types( + self, + op: str, + left_type: Any, + right_type: Any, + context: Context, + *, + code: ErrorCode = codes.OPERATOR, + ) -> ErrorInfo: + """Report unsupported operand types for a binary operation. + + Types can be Type objects or strings. + """ + left_str = "" + if isinstance(left_type, str): + left_str = left_type + else: + left_str = format_type(left_type, self.options) + + right_str = "" + if isinstance(right_type, str): + right_str = right_type + else: + right_str = format_type(right_type, self.options) + + if self.are_type_names_disabled(): + msg = f"Unsupported operand types for {op} (likely involving Union)" + else: + msg = f"Unsupported operand types for {op} ({left_str} and {right_str})" + return self.fail(msg, context, code=code) + + def unsupported_left_operand(self, op: str, typ: Type, context: Context) -> None: + if self.are_type_names_disabled(): + msg = f"Unsupported left operand type for {op} (some union)" + else: + msg = f"Unsupported left operand type for {op} ({format_type(typ, self.options)})" + self.fail(msg, context, code=codes.OPERATOR) + + def not_callable(self, typ: Type, context: Context) -> Type: + self.fail(message_registry.NOT_CALLABLE.format(format_type(typ, self.options)), context) + return AnyType(TypeOfAny.from_error) + + def untyped_function_call(self, callee: CallableType, context: Context) -> Type: + name = callable_name(callee) or "(unknown)" + self.fail( + f"Call to untyped function {name} in typed context", + context, + code=codes.NO_UNTYPED_CALL, + ) + return AnyType(TypeOfAny.from_error) + + def incompatible_argument( + self, + n: int, + m: int, + callee: CallableType, + arg_type: Type, + arg_kind: ArgKind, + object_type: Type | None, + context: Context, + outer_context: Context, + ) -> ErrorInfo: + """Report an error about an incompatible argument type. + + The argument type is arg_type, argument number is n and the + callee type is 'callee'. If the callee represents a method + that corresponds to an operator, use the corresponding + operator name in the messages. + + Return the error code that used for the argument (multiple error + codes are possible). + """ + arg_type = get_proper_type(arg_type) + + target = "" + callee_name = callable_name(callee) + if callee_name is not None: + name = callee_name + if object_type is not None: + base = format_type(object_type, self.options) + else: + base = extract_type(name) + + if name.startswith('"__getitem__" of'): + return self.invalid_index_type( + arg_type, callee.arg_types[n - 1], base, context, code=codes.INDEX + ) + elif name.startswith('"__setitem__" of'): + if n == 1: + return self.invalid_index_type( + arg_type, callee.arg_types[n - 1], base, context, code=codes.INDEX + ) + else: + arg_type_str, callee_type_str = format_type_distinctly( + arg_type, callee.arg_types[n - 1], options=self.options + ) + info = ( + f" (expression has type {arg_type_str}, target has type {callee_type_str})" + ) + error_msg = ( + message_registry.INCOMPATIBLE_TYPES_IN_ASSIGNMENT.with_additional_msg(info) + ) + return self.fail(error_msg.value, context, code=error_msg.code) + elif name.startswith('"__'): + for method, op in op_methods_to_symbols.items(): + for variant in method, "__r" + method[2:]: + # FIX: do not rely on textual formatting + if name.startswith(f'"{variant}" of'): + if op == "in" or variant != method: + # Reversed order of base/argument. + return self.unsupported_operand_types( + op, arg_type, base, context, code=codes.OPERATOR + ) + else: + return self.unsupported_operand_types( + op, base, arg_type, context, code=codes.OPERATOR + ) + + target = f"to {name} " + + msg = "" + code = codes.MISC + notes: list[str] = [] + if callee_name == "": + name = callee_name[1:-1] + n -= 1 + actual_type_str, expected_type_str = format_type_distinctly( + arg_type, callee.arg_types[0], options=self.options + ) + msg = "{} item {} has incompatible type {}; expected {}".format( + name.title(), n, actual_type_str, expected_type_str + ) + code = codes.LIST_ITEM + elif callee_name == "" and isinstance( + get_proper_type(callee.arg_types[n - 1]), TupleType + ): + name = callee_name[1:-1] + n -= 1 + key_type, value_type = cast(TupleType, arg_type).items + expected_key_type, expected_value_type = cast(TupleType, callee.arg_types[n]).items + + # don't increase verbosity unless there is need to do so + if is_subtype(key_type, expected_key_type): + key_type_str = format_type(key_type, self.options) + expected_key_type_str = format_type(expected_key_type, self.options) + else: + key_type_str, expected_key_type_str = format_type_distinctly( + key_type, expected_key_type, options=self.options + ) + if is_subtype(value_type, expected_value_type): + value_type_str = format_type(value_type, self.options) + expected_value_type_str = format_type(expected_value_type, self.options) + else: + value_type_str, expected_value_type_str = format_type_distinctly( + value_type, expected_value_type, options=self.options + ) + + msg = "{} entry {} has incompatible type {}: {}; expected {}: {}".format( + name.title(), + n, + key_type_str, + value_type_str, + expected_key_type_str, + expected_value_type_str, + ) + code = codes.DICT_ITEM + elif callee_name == "": + value_type_str, expected_value_type_str = format_type_distinctly( + arg_type, callee.arg_types[n - 1], options=self.options + ) + msg = "Unpacked dict entry {} has incompatible type {}; expected {}".format( + n - 1, value_type_str, expected_value_type_str + ) + code = codes.DICT_ITEM + elif callee_name == "": + actual_type_str, expected_type_str = map( + strip_quotes, + format_type_distinctly(arg_type, callee.arg_types[0], options=self.options), + ) + msg = "List comprehension has incompatible type List[{}]; expected List[{}]".format( + actual_type_str, expected_type_str + ) + elif callee_name == "": + actual_type_str, expected_type_str = map( + strip_quotes, + format_type_distinctly(arg_type, callee.arg_types[0], options=self.options), + ) + msg = "Set comprehension has incompatible type Set[{}]; expected Set[{}]".format( + actual_type_str, expected_type_str + ) + elif callee_name == "": + actual_type_str, expected_type_str = format_type_distinctly( + arg_type, callee.arg_types[n - 1], options=self.options + ) + msg = ( + "{} expression in dictionary comprehension has incompatible type {}; " + "expected type {}" + ).format("Key" if n == 1 else "Value", actual_type_str, expected_type_str) + elif callee_name == "": + actual_type_str, expected_type_str = format_type_distinctly( + arg_type, callee.arg_types[0], options=self.options + ) + msg = "Generator has incompatible item type {}; expected {}".format( + actual_type_str, expected_type_str + ) + else: + if self.prefer_simple_messages(): + msg = "Argument has incompatible type" + else: + try: + expected_type = callee.arg_types[m - 1] + except IndexError: # Varargs callees + expected_type = callee.arg_types[-1] + arg_type_str, expected_type_str = format_type_distinctly( + arg_type, expected_type, bare=True, options=self.options + ) + if arg_kind == ARG_STAR: + arg_type_str = "*" + arg_type_str + elif arg_kind == ARG_STAR2: + arg_type_str = "**" + arg_type_str + + # For function calls with keyword arguments, display the argument name rather + # than the number. + arg_label = str(n) + if isinstance(outer_context, CallExpr) and len(outer_context.arg_names) >= n: + arg_name = outer_context.arg_names[n - 1] + if arg_name is not None: + arg_label = f'"{arg_name}"' + if ( + arg_kind == ARG_STAR2 + and isinstance(arg_type, TypedDictType) + and m <= len(callee.arg_names) + and callee.arg_names[m - 1] is not None + and callee.arg_kinds[m - 1] != ARG_STAR2 + ): + arg_name = callee.arg_names[m - 1] + assert arg_name is not None + arg_type_str, expected_type_str = format_type_distinctly( + arg_type.items[arg_name], expected_type, bare=True, options=self.options + ) + arg_label = f'"{arg_name}"' + if isinstance(outer_context, IndexExpr) and isinstance( + outer_context.index, StrExpr + ): + msg = 'Value of "{}" has incompatible type {}; expected {}'.format( + outer_context.index.value, + quote_type_string(arg_type_str), + quote_type_string(expected_type_str), + ) + else: + msg = "Argument {} {}has incompatible type {}; expected {}".format( + arg_label, + target, + quote_type_string(arg_type_str), + quote_type_string(expected_type_str), + ) + expected_type = get_proper_type(expected_type) + if isinstance(expected_type, UnionType): + expected_types = list(expected_type.items) + else: + expected_types = [expected_type] + for type in get_proper_types(expected_types): + if isinstance(arg_type, Instance) and isinstance(type, Instance): + notes = append_invariance_notes(notes, arg_type, type) + notes = append_numbers_notes(notes, arg_type, type) + object_type = get_proper_type(object_type) + if isinstance(object_type, TypedDictType): + code = codes.TYPEDDICT_ITEM + else: + code = codes.ARG_TYPE + error = self.fail(msg, context, code=code) + if notes: + for note_msg in notes: + self.note(note_msg, context, code=code) + return error + + def incompatible_argument_note( + self, + original_caller_type: ProperType, + callee_type: ProperType, + context: Context, + parent_error: ErrorInfo, + ) -> None: + if self.prefer_simple_messages(): + return + if isinstance( + original_caller_type, (Instance, TupleType, TypedDictType, TypeType, CallableType) + ): + if isinstance(callee_type, Instance) and callee_type.type.is_protocol: + self.report_protocol_problems( + original_caller_type, callee_type, context, parent_error=parent_error + ) + if isinstance(callee_type, UnionType): + for item in callee_type.items: + item = get_proper_type(item) + if isinstance(item, Instance) and item.type.is_protocol: + self.report_protocol_problems( + original_caller_type, item, context, parent_error=parent_error + ) + if isinstance(callee_type, CallableType) and isinstance(original_caller_type, Instance): + call = find_member( + "__call__", original_caller_type, original_caller_type, is_operator=True + ) + if call: + self.note_call(original_caller_type, call, context, code=parent_error.code) + if isinstance(callee_type, Instance) and callee_type.type.is_protocol: + call = find_member("__call__", callee_type, callee_type, is_operator=True) + if call: + self.note_call(callee_type, call, context, code=parent_error.code) + self.maybe_note_concatenate_pos_args( + original_caller_type, callee_type, context, parent_error.code + ) + + def maybe_note_concatenate_pos_args( + self, + original_caller_type: ProperType, + callee_type: ProperType, + context: Context, + code: ErrorCode | None = None, + ) -> None: + # pos-only vs positional can be confusing, with Concatenate + if ( + isinstance(callee_type, CallableType) + and isinstance(original_caller_type, CallableType) + and (original_caller_type.from_concatenate or callee_type.from_concatenate) + ): + names: list[str] = [] + for c, o in zip( + callee_type.formal_arguments(), original_caller_type.formal_arguments() + ): + if None in (c.pos, o.pos): + # non-positional + continue + if c.name != o.name and c.name is None and o.name is not None: + names.append(o.name) + + if names: + missing_arguments = '"' + '", "'.join(names) + '"' + self.note( + f'This is likely because "{original_caller_type.name}" has named arguments: ' + f"{missing_arguments}. Consider marking them positional-only", + context, + code=code, + ) + + def invalid_index_type( + self, + index_type: Type, + expected_type: Type, + base_str: str, + context: Context, + *, + code: ErrorCode, + ) -> ErrorInfo: + index_str, expected_str = format_type_distinctly( + index_type, expected_type, options=self.options + ) + return self.fail( + "Invalid index type {} for {}; expected type {}".format( + index_str, base_str, expected_str + ), + context, + code=code, + ) + + def readonly_keys_mutated(self, keys: set[str], context: Context) -> None: + if len(keys) == 1: + suffix = "is" + else: + suffix = "are" + self.fail( + "ReadOnly {} TypedDict {} mutated".format(format_key_list(sorted(keys)), suffix), + code=codes.TYPEDDICT_READONLY_MUTATED, + context=context, + ) + + def too_few_arguments( + self, callee: CallableType, context: Context, argument_names: Sequence[str | None] | None + ) -> None: + if self.prefer_simple_messages(): + msg = "Too few arguments" + elif argument_names is not None: + num_positional_args = sum(k is None for k in argument_names) + arguments_left = callee.arg_names[num_positional_args : callee.min_args] + diff = [k for k in arguments_left if k not in argument_names] + if len(diff) == 1: + msg = "Missing positional argument" + else: + msg = "Missing positional arguments" + callee_name = callable_name(callee) + if callee_name is not None and diff and all(d is not None for d in diff): + args = '", "'.join(cast(list[str], diff)) + msg += f' "{args}" in call to {callee_name}' + else: + msg = "Too few arguments" + for_function(callee) + + else: + msg = "Too few arguments" + for_function(callee) + self.fail(msg, context, code=codes.CALL_ARG) + + def missing_named_argument(self, callee: CallableType, context: Context, name: str) -> None: + msg = f'Missing named argument "{name}"' + for_function(callee) + self.fail(msg, context, code=codes.CALL_ARG) + + def too_many_arguments(self, callee: CallableType, context: Context) -> None: + if self.prefer_simple_messages(): + msg = "Too many arguments" + else: + msg = "Too many arguments" + for_function(callee) + self.fail(msg, context, code=codes.CALL_ARG) + self.maybe_note_about_special_args(callee, context) + + def too_many_arguments_from_typed_dict( + self, callee: CallableType, arg_type: TypedDictType, context: Context + ) -> None: + # Try to determine the name of the extra argument. + for key in arg_type.items: + if key not in callee.arg_names: + msg = f'Extra argument "{key}" from **args' + for_function(callee) + break + else: + self.too_many_arguments(callee, context) + return + self.fail(msg, context) + + def too_many_positional_arguments(self, callee: CallableType, context: Context) -> None: + if self.prefer_simple_messages(): + msg = "Too many positional arguments" + else: + msg = "Too many positional arguments" + for_function(callee) + self.fail(msg, context) + self.maybe_note_about_special_args(callee, context) + + def maybe_note_about_special_args(self, callee: CallableType, context: Context) -> None: + if self.prefer_simple_messages(): + return + # https://github.com/python/mypy/issues/11309 + first_arg = get_first_arg(callee) + if first_arg and first_arg not in {"self", "cls", "mcs"}: + self.note( + "Looks like the first special argument in a method " + 'is not named "self", "cls", or "mcs", ' + "maybe it is missing?", + context, + ) + + def unexpected_keyword_argument_for_function( + self, for_func: str, name: str, context: Context, *, matches: list[str] | None = None + ) -> None: + msg = f'Unexpected keyword argument "{name}"' + for_func + if matches: + msg += f"; did you mean {pretty_seq(matches, 'or')}?" + self.fail(msg, context, code=codes.CALL_ARG) + + def unexpected_keyword_argument( + self, callee: CallableType, name: str, arg_type: Type, context: Context + ) -> None: + # Suggest intended keyword, look for type match else fallback on any match. + matching_type_args = [] + not_matching_type_args = [] + for i, kwarg_type in enumerate(callee.arg_types): + callee_arg_name = callee.arg_names[i] + if callee_arg_name is not None and callee.arg_kinds[i] != ARG_STAR: + if is_subtype(arg_type, kwarg_type): + matching_type_args.append(callee_arg_name) + else: + not_matching_type_args.append(callee_arg_name) + matches = best_matches(name, matching_type_args, n=3) + if not matches: + matches = best_matches(name, not_matching_type_args, n=3) + self.unexpected_keyword_argument_for_function( + for_function(callee), name, context, matches=matches + ) + module = find_defining_module(self.modules, callee) + if module: + assert callee.definition is not None + fname = callable_name(callee) + if not fname: # an alias to function with a different name + fname = "Called function" + self.note( + f"{fname} defined here", + callee.definition, + file=module.path, + origin=context, + code=codes.CALL_ARG, + ) + + def duplicate_argument_value(self, callee: CallableType, index: int, context: Context) -> None: + self.fail( + '{} gets multiple values for keyword argument "{}"'.format( + callable_name(callee) or "Function", callee.arg_names[index] + ), + context, + ) + + def does_not_return_value(self, callee_type: Type | None, context: Context) -> None: + """Report an error about use of an unusable type.""" + callee_type = get_proper_type(callee_type) + callee_name = callable_name(callee_type) if isinstance(callee_type, FunctionLike) else None + name = callee_name or "Function" + message = f"{name} does not return a value (it only ever returns None)" + self.fail(message, context, code=codes.FUNC_RETURNS_VALUE) + + def deleted_as_rvalue(self, typ: DeletedType, context: Context) -> None: + """Report an error about using an deleted type as an rvalue.""" + if typ.source is None: + s = "" + else: + s = f' "{typ.source}"' + self.fail(f"Trying to read deleted variable{s}", context) + + def deleted_as_lvalue(self, typ: DeletedType, context: Context) -> None: + """Report an error about using an deleted type as an lvalue. + + Currently, this only occurs when trying to assign to an + exception variable outside the local except: blocks. + """ + if typ.source is None: + s = "" + else: + s = f' "{typ.source}"' + self.fail(f"Assignment to variable{s} outside except: block", context) + + def no_variant_matches_arguments( + self, + overload: Overloaded, + arg_types: list[Type], + context: Context, + *, + code: ErrorCode | None = None, + ) -> None: + code = code or codes.CALL_OVERLOAD + name = callable_name(overload) + if name: + name_str = f" of {name}" + else: + name_str = "" + arg_types_str = ", ".join(format_type(arg, self.options) for arg in arg_types) + num_args = len(arg_types) + if num_args == 0: + self.fail( + f"All overload variants{name_str} require at least one argument", + context, + code=code, + ) + elif num_args == 1: + self.fail( + f"No overload variant{name_str} matches argument type {arg_types_str}", + context, + code=code, + ) + else: + self.fail( + f"No overload variant{name_str} matches argument types {arg_types_str}", + context, + code=code, + ) + + self.note(f"Possible overload variant{plural_s(len(overload.items))}:", context, code=code) + for item in overload.items: + self.note(pretty_callable(item, self.options), context, offset=4, code=code) + + def wrong_number_values_to_unpack( + self, provided: int, expected: int, context: Context + ) -> None: + if provided < expected: + if provided == 1: + self.fail(f"Need more than 1 value to unpack ({expected} expected)", context) + else: + self.fail( + f"Need more than {provided} values to unpack ({expected} expected)", context + ) + elif provided > expected: + self.fail( + f"Too many values to unpack ({expected} expected, {provided} provided)", context + ) + + def unpacking_strings_disallowed(self, context: Context) -> None: + self.fail("Unpacking a string is disallowed", context) + + def type_not_iterable(self, type: Type, context: Context) -> None: + self.fail(f"{format_type(type, self.options)} object is not iterable", context) + + def possible_missing_await(self, context: Context, code: ErrorCode | None) -> None: + self.note('Maybe you forgot to use "await"?', context, code=code) + + def incompatible_operator_assignment(self, op: str, context: Context) -> None: + self.fail(f"Result type of {op} incompatible in assignment", context) + + def overload_signature_incompatible_with_supertype( + self, name: str, name_in_super: str, supertype: str, context: Context + ) -> None: + target = self.override_target(name, name_in_super, supertype) + self.fail( + f'Signature of "{name}" incompatible with {target}', context, code=codes.OVERRIDE + ) + + note_template = 'Overload variants must be defined in the same order as they are in "{}"' + self.note(note_template.format(supertype), context, code=codes.OVERRIDE) + + def incompatible_setter_override( + self, defn: Context, typ: Type, original_type: Type, base: TypeInfo + ) -> None: + self.fail("Incompatible override of a setter type", defn, code=codes.OVERRIDE) + base_str, override_str = format_type_distinctly(original_type, typ, options=self.options) + self.note( + f' (base class "{base.name}" defined the type as {base_str},', + defn, + code=codes.OVERRIDE, + ) + self.note(f" override has type {override_str})", defn, code=codes.OVERRIDE) + if is_subtype(typ, original_type): + self.note(" Setter types should behave contravariantly", defn, code=codes.OVERRIDE) + + def signature_incompatible_with_supertype( + self, + name: str, + name_in_super: str, + supertype: str, + context: Context, + *, + original: ProperType, + override: ProperType, + ) -> None: + target = self.override_target(name, name_in_super, supertype) + error = self.fail( + f'Signature of "{name}" incompatible with {target}', context, code=codes.OVERRIDE + ) + + original_str, override_str = format_type_distinctly( + original, override, options=self.options, bare=True + ) + + INCLUDE_DECORATOR = True # Include @classmethod and @staticmethod decorators, if any + ALIGN_OFFSET = 1 # One space, to account for the difference between error and note + OFFSET = 4 # Four spaces, so that notes will look like this: + # error: Signature of "f" incompatible with supertype "A" + # note: Superclass: + # note: def f(self) -> str + # note: Subclass: + # note: def f(self, x: str) -> None + self.note("Superclass:", context, offset=ALIGN_OFFSET + OFFSET, parent_error=error) + if isinstance(original, (CallableType, Overloaded)): + self.pretty_callable_or_overload( + original, + context, + offset=ALIGN_OFFSET + 2 * OFFSET, + add_class_or_static_decorator=INCLUDE_DECORATOR, + parent_error=error, + ) + else: + self.note(original_str, context, offset=ALIGN_OFFSET + 2 * OFFSET, parent_error=error) + + self.note("Subclass:", context, offset=ALIGN_OFFSET + OFFSET, parent_error=error) + if isinstance(override, (CallableType, Overloaded)): + self.pretty_callable_or_overload( + override, + context, + offset=ALIGN_OFFSET + 2 * OFFSET, + add_class_or_static_decorator=INCLUDE_DECORATOR, + parent_error=error, + ) + else: + self.note(override_str, context, offset=ALIGN_OFFSET + 2 * OFFSET, parent_error=error) + + def pretty_callable_or_overload( + self, + tp: CallableType | Overloaded, + context: Context, + *, + parent_error: ErrorInfo, + offset: int = 0, + add_class_or_static_decorator: bool = False, + ) -> None: + if isinstance(tp, CallableType): + if add_class_or_static_decorator: + decorator = pretty_class_or_static_decorator(tp) + if decorator is not None: + self.note(decorator, context, offset=offset, parent_error=parent_error) + self.note( + pretty_callable(tp, self.options), + context, + offset=offset, + parent_error=parent_error, + ) + elif isinstance(tp, Overloaded): + self.pretty_overload( + tp, + context, + offset, + add_class_or_static_decorator=add_class_or_static_decorator, + parent_error=parent_error, + ) + + def argument_incompatible_with_supertype( + self, + arg_num: int, + name: str, + type_name: str | None, + name_in_supertype: str, + arg_type_in_supertype: Type, + supertype: str, + context: Context, + secondary_context: Context, + ) -> None: + target = self.override_target(name, name_in_supertype, supertype) + arg_type_in_supertype_f = format_type_bare(arg_type_in_supertype, self.options) + self.fail( + 'Argument {} of "{}" is incompatible with {}; ' + 'supertype defines the argument type as "{}"'.format( + arg_num, name, target, arg_type_in_supertype_f + ), + context, + code=codes.OVERRIDE, + secondary_context=secondary_context, + ) + if name != "__post_init__": + # `__post_init__` is special, it can be incompatible by design. + # So, this note is misleading. + self.note( + "This violates the Liskov substitution principle", + context, + code=codes.OVERRIDE, + secondary_context=secondary_context, + ) + self.note( + "See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides", + context, + code=codes.OVERRIDE, + secondary_context=secondary_context, + ) + + if name == "__eq__" and type_name: + multiline_msg = self.comparison_method_example_msg(class_name=type_name) + self.note_multiline( + multiline_msg, context, code=codes.OVERRIDE, secondary_context=secondary_context + ) + + def comparison_method_example_msg(self, class_name: str) -> str: + return dedent( + """\ + It is recommended for "__eq__" to work with arbitrary objects, for example: + def __eq__(self, other: object) -> bool: + if not isinstance(other, {class_name}): + return NotImplemented + return + """.format( + class_name=class_name + ) + ) + + def return_type_incompatible_with_supertype( + self, + name: str, + name_in_supertype: str, + supertype: str, + original: Type, + override: Type, + context: Context, + ) -> None: + target = self.override_target(name, name_in_supertype, supertype) + override_str, original_str = format_type_distinctly( + override, original, options=self.options + ) + self.fail( + 'Return type {} of "{}" incompatible with return type {} in {}'.format( + override_str, name, original_str, target + ), + context, + code=codes.OVERRIDE, + ) + + original = get_proper_type(original) + override = get_proper_type(override) + if ( + isinstance(original, Instance) + and isinstance(override, Instance) + and override.type.fullname == "typing.AsyncIterator" + and original.type.fullname == "typing.Coroutine" + and len(original.args) == 3 + and original.args[2] == override + ): + self.note(f'Consider declaring "{name}" in {target} without "async"', context) + self.note( + "See https://mypy.readthedocs.io/en/stable/more_types.html#asynchronous-iterators", + context, + ) + + def override_target(self, name: str, name_in_super: str, supertype: str) -> str: + target = f'supertype "{supertype}"' + if name_in_super != name: + target = f'"{name_in_super}" of {target}' + return target + + def incompatible_type_application( + self, min_arg_count: int, max_arg_count: int, actual_arg_count: int, context: Context + ) -> None: + if max_arg_count == 0: + self.fail("Type application targets a non-generic function or class", context) + return + + if min_arg_count == max_arg_count: + s = f"{max_arg_count} expected" + else: + s = f"expected between {min_arg_count} and {max_arg_count}" + + if actual_arg_count > max_arg_count: + self.fail(f"Type application has too many types ({s})", context) + else: + self.fail(f"Type application has too few types ({s})", context) + + def could_not_infer_type_arguments( + self, callee_type: CallableType, tv: TypeVarLikeType, context: Context + ) -> None: + callee_name = callable_name(callee_type) + if callee_name is not None: + self.fail( + f"Cannot infer value of type parameter {format_type(tv, self.options)} of {callee_name}", + context, + ) + if callee_name == "": + # Invariance in key type causes more of these errors than we would want. + self.note( + "Try assigning the literal to a variable annotated as dict[, ]", + context, + ) + else: + self.fail("Cannot infer function type argument", context) + + def invalid_var_arg(self, typ: Type, context: Context) -> None: + self.fail("Expected iterable as variadic argument", context) + + def invalid_keyword_var_arg(self, typ: Type, is_mapping: bool, context: Context) -> None: + typ = get_proper_type(typ) + if isinstance(typ, Instance) and is_mapping: + self.fail("Keywords must be strings", context) + else: + self.fail( + f"Argument after ** must be a mapping, not {format_type(typ, self.options)}", + context, + code=codes.ARG_TYPE, + ) + + def undefined_in_superclass(self, member: str, context: Context) -> None: + self.fail(f'"{member}" undefined in superclass', context) + + def variable_may_be_undefined(self, name: str, context: Context) -> None: + self.fail(f'Name "{name}" may be undefined', context, code=codes.POSSIBLY_UNDEFINED) + + def var_used_before_def(self, name: str, context: Context) -> None: + self.fail(f'Name "{name}" is used before definition', context, code=codes.USED_BEFORE_DEF) + + def first_argument_for_super_must_be_type(self, actual: Type, context: Context) -> None: + actual = get_proper_type(actual) + if isinstance(actual, Instance): + # Don't include type of instance, because it can look confusingly like a type + # object. + type_str = "a non-type instance" + else: + type_str = format_type(actual, self.options) + self.fail( + f'Argument 1 for "super" must be a type object; got {type_str}', + context, + code=codes.ARG_TYPE, + ) + + def unsafe_super(self, method: str, cls: str, ctx: Context) -> None: + self.fail( + f'Call to abstract method "{method}" of "{cls}" with trivial body via super() is unsafe', + ctx, + code=codes.SAFE_SUPER, + ) + + def too_few_string_formatting_arguments(self, context: Context) -> None: + self.fail("Not enough arguments for format string", context, code=codes.STRING_FORMATTING) + + def too_many_string_formatting_arguments(self, context: Context) -> None: + self.fail( + "Not all arguments converted during string formatting", + context, + code=codes.STRING_FORMATTING, + ) + + def unsupported_placeholder(self, placeholder: str, context: Context) -> None: + self.fail( + f'Unsupported format character "{placeholder}"', context, code=codes.STRING_FORMATTING + ) + + def string_interpolation_with_star_and_key(self, context: Context) -> None: + self.fail( + "String interpolation contains both stars and mapping keys", + context, + code=codes.STRING_FORMATTING, + ) + + def requires_int_or_single_byte(self, context: Context, format_call: bool = False) -> None: + self.fail( + '"{}c" requires an integer in range(256) or a single byte'.format( + ":" if format_call else "%" + ), + context, + code=codes.STRING_FORMATTING, + ) + + def requires_int_or_char(self, context: Context, format_call: bool = False) -> None: + self.fail( + '"{}c" requires int or char'.format(":" if format_call else "%"), + context, + code=codes.STRING_FORMATTING, + ) + + def key_not_in_mapping(self, key: str, context: Context) -> None: + self.fail(f'Key "{key}" not found in mapping', context, code=codes.STRING_FORMATTING) + + def string_interpolation_mixing_key_and_non_keys(self, context: Context) -> None: + self.fail( + "String interpolation mixes specifier with and without mapping keys", + context, + code=codes.STRING_FORMATTING, + ) + + def cannot_determine_type(self, name: str, context: Context) -> None: + self.fail(f'Cannot determine type of "{name}"', context, code=codes.HAS_TYPE) + + def cannot_determine_type_in_base(self, name: str, base: str, context: Context) -> None: + self.fail(f'Cannot determine type of "{name}" in base class "{base}"', context) + + def no_formal_self(self, name: str, item: CallableType, context: Context) -> None: + type = format_type(item, self.options) + self.fail( + f'Attribute function "{name}" with type {type} does not accept self argument', context + ) + + def incompatible_self_argument( + self, name: str, arg: Type, sig: CallableType, is_classmethod: bool, context: Context + ) -> None: + kind = "class attribute function" if is_classmethod else "attribute function" + arg_type = format_type(arg, self.options) + sig_type = format_type(sig, self.options) + self.fail( + f'Invalid self argument {arg_type} to {kind} "{name}" with type {sig_type}', context + ) + + def incompatible_conditional_function_def( + self, defn: FuncDef, old_type: FunctionLike, new_type: FunctionLike + ) -> None: + error = self.fail("All conditional function variants must have identical signatures", defn) + if isinstance(old_type, (CallableType, Overloaded)) and isinstance( + new_type, (CallableType, Overloaded) + ): + self.note("Original:", defn) + self.pretty_callable_or_overload(old_type, defn, offset=4, parent_error=error) + self.note("Redefinition:", defn) + self.pretty_callable_or_overload(new_type, defn, offset=4, parent_error=error) + + def cannot_instantiate_abstract_class( + self, class_name: str, abstract_attributes: dict[str, bool], context: Context + ) -> None: + attrs = format_string_list([f'"{a}"' for a in abstract_attributes]) + self.fail( + f'Cannot instantiate abstract class "{class_name}" with abstract ' + f"attribute{plural_s(abstract_attributes)} {attrs}", + context, + code=codes.ABSTRACT, + ) + attrs_with_none = [ + f'"{a}"' + for a, implicit_and_can_return_none in abstract_attributes.items() + if implicit_and_can_return_none + ] + if not attrs_with_none: + return + if len(attrs_with_none) == 1: + note = ( + f"{attrs_with_none[0]} is implicitly abstract because it has an empty function " + "body. If it is not meant to be abstract, explicitly `return` or `return None`." + ) + else: + note = ( + "The following methods were marked implicitly abstract because they have empty " + f"function bodies: {format_string_list(attrs_with_none)}. " + "If they are not meant to be abstract, explicitly `return` or `return None`." + ) + self.note(note, context, code=codes.ABSTRACT) + + def base_class_definitions_incompatible( + self, name: str, base1: TypeInfo, base2: TypeInfo, context: Context + ) -> None: + self.fail( + 'Definition of "{}" in base class "{}" is incompatible ' + 'with definition in base class "{}"'.format(name, base1.name, base2.name), + context, + ) + + def cant_assign_to_method(self, context: Context) -> None: + self.fail(message_registry.CANNOT_ASSIGN_TO_METHOD, context, code=codes.METHOD_ASSIGN) + + def cant_assign_to_classvar(self, name: str, context: Context) -> None: + self.fail(f'Cannot assign to class variable "{name}" via instance', context) + + def no_overridable_method(self, name: str, context: Context) -> None: + self.fail( + f'Method "{name}" is marked as an override, ' + "but no base method was found with this name", + context, + ) + + def explicit_override_decorator_missing( + self, name: str, base_name: str, context: Context + ) -> None: + self.fail( + f'Method "{name}" is not using @override ' + f'but is overriding a method in class "{base_name}"', + context, + code=codes.EXPLICIT_OVERRIDE_REQUIRED, + ) + + def final_cant_override_writable(self, name: str, ctx: Context) -> None: + self.fail(f'Cannot override writable attribute "{name}" with a final one', ctx) + + def cant_override_final(self, name: str, base_name: str, ctx: Context) -> None: + self.fail( + ( + f'Cannot override final attribute "{name}" ' + f'(previously declared in base class "{base_name}")' + ), + ctx, + ) + + def cant_assign_to_final(self, name: str, attr_assign: bool, ctx: Context) -> None: + """Warn about a prohibited assignment to a final attribute. + + Pass `attr_assign=True` if the assignment assigns to an attribute. + """ + kind = "attribute" if attr_assign else "name" + self.fail(f'Cannot assign to final {kind} "{unmangle(name)}"', ctx) + + def protocol_members_cant_be_final(self, ctx: Context) -> None: + self.fail("Protocol member cannot be final", ctx) + + def final_without_value(self, ctx: Context) -> None: + self.fail("Final name must be initialized with a value", ctx) + + def read_only_property(self, name: str, type: TypeInfo, context: Context) -> None: + self.fail(f'Property "{name}" defined in "{type.name}" is read-only', context) + + def incompatible_typevar_value( + self, callee: CallableType, typ: Type, typevar_name: str, context: Context + ) -> None: + self.fail( + message_registry.INCOMPATIBLE_TYPEVAR_VALUE.format( + typevar_name, callable_name(callee) or "function", format_type(typ, self.options) + ), + context, + code=codes.TYPE_VAR, + ) + + def dangerous_comparison(self, left: Type, right: Type, kind: str, ctx: Context) -> None: + left_str = "element" if kind == "container" else "left operand" + right_str = "container item" if kind == "container" else "right operand" + message = "Non-overlapping {} check ({} type: {}, {} type: {})" + left_typ, right_typ = format_type_distinctly(left, right, options=self.options) + self.fail( + message.format(kind, left_str, left_typ, right_str, right_typ), + ctx, + code=codes.COMPARISON_OVERLAP, + ) + + def overload_inconsistently_applies_decorator(self, decorator: str, context: Context) -> None: + self.fail( + f'Overload does not consistently use the "@{decorator}" ' + + "decorator on all function signatures.", + context, + ) + + def overloaded_signatures_overlap( + self, index1: int, index2: int, flip_note: bool, context: Context + ) -> None: + self.fail( + "Overloaded function signatures {} and {} overlap with " + "incompatible return types".format(index1, index2), + context, + code=codes.OVERLOAD_OVERLAP, + ) + if flip_note: + self.note( + "Flipping the order of overloads will fix this error", + context, + code=codes.OVERLOAD_OVERLAP, + ) + + def overloaded_signature_will_never_match( + self, index1: int, index2: int, context: Context + ) -> None: + self.fail( + "Overloaded function signature {index2} will never be matched: " + "signature {index1}'s parameter type(s) are the same or broader".format( + index1=index1, index2=index2 + ), + context, + code=codes.OVERLOAD_CANNOT_MATCH, + ) + + def overloaded_signatures_typevar_specific(self, index: int, context: Context) -> None: + self.fail( + f"Overloaded function implementation cannot satisfy signature {index} " + + "due to inconsistencies in how they use type variables", + context, + ) + + def overloaded_signatures_arg_specific(self, index: int, context: Context) -> None: + self.fail( + ( + f"Overloaded function implementation does not accept all possible arguments " + f"of signature {index}" + ), + context, + ) + + def overloaded_signatures_ret_specific(self, index: int, context: Context) -> None: + self.fail( + f"Overloaded function implementation cannot produce return type of signature {index}", + context, + ) + + def warn_both_operands_are_from_unions(self, context: Context) -> None: + self.note("Both left and right operands are unions", context, code=codes.OPERATOR) + + def warn_operand_was_from_union(self, side: str, original: Type, context: Context) -> None: + self.note( + f"{side} operand is of type {format_type(original, self.options)}", + context, + code=codes.OPERATOR, + ) + + def operator_method_signatures_overlap( + self, + reverse_class: TypeInfo, + reverse_method: str, + forward_class: Type, + forward_method: str, + context: Context, + ) -> None: + self.fail( + 'Signatures of "{}" of "{}" and "{}" of {} are unsafely overlapping'.format( + reverse_method, + reverse_class.name, + forward_method, + format_type(forward_class, self.options), + ), + context, + ) + + def forward_operator_not_callable(self, forward_method: str, context: Context) -> None: + self.fail(f'Forward operator "{forward_method}" is not callable', context) + + def signatures_incompatible(self, method: str, other_method: str, context: Context) -> None: + self.fail(f'Signatures of "{method}" and "{other_method}" are incompatible', context) + + def yield_from_invalid_operand_type(self, expr: Type, context: Context) -> Type: + text = ( + format_type(expr, self.options) + if format_type(expr, self.options) != "object" + else expr + ) + self.fail(f'"yield from" can\'t be applied to {text}', context) + return AnyType(TypeOfAny.from_error) + + def invalid_signature(self, func_type: Type, context: Context) -> None: + self.fail(f"Invalid signature {format_type(func_type, self.options)}", context) + + def invalid_signature_for_special_method( + self, func_type: Type, context: Context, method_name: str + ) -> None: + self.fail( + f'Invalid signature {format_type(func_type, self.options)} for "{method_name}"', + context, + ) + + def reveal_type(self, typ: Type, context: Context) -> None: + + # Search for an error watcher that modifies the "normal" behaviour (we do not + # rely on the normal `ErrorWatcher` filtering approach because we might need to + # collect the original types for a later unionised response): + for watcher in self.errors.get_watchers(): + # The `reveal_type` statement should be ignored: + if watcher.filter_revealed_type: + return + # The `reveal_type` statement might be visited iteratively due to being + # placed in a loop or so. Hence, we collect the respective types of + # individual iterations so that we can report them all in one step later: + if isinstance(watcher, IterationErrorWatcher): + watcher.iteration_dependent_errors.revealed_types[ + (context.line, context.column, context.end_line, context.end_column) + ].append(typ) + return + + # Nothing special here; just create the note: + visitor = TypeStrVisitor(options=self.options) + self.note(f'Revealed type is "{typ.accept(visitor)}"', context) + + def reveal_locals(self, type_map: dict[str, Type | None], context: Context) -> None: + # To ensure that the output is predictable on Python < 3.6, + # use an ordered dictionary sorted by variable name + sorted_locals = dict(sorted(type_map.items(), key=lambda t: t[0])) + if sorted_locals: + self.note("Revealed local types are:", context) + for k, v in sorted_locals.items(): + visitor = TypeStrVisitor(options=self.options) + self.note(f" {k}: {v.accept(visitor) if v is not None else None}", context) + else: + self.note("There are no locals to reveal", context) + + def unsupported_type_type(self, item: Type, context: Context) -> None: + self.fail( + f'Cannot instantiate type "type[{format_type_bare(item, self.options)}]"', context + ) + + def redundant_cast(self, typ: Type, context: Context) -> None: + self.fail( + f"Redundant cast to {format_type(typ, self.options)}", + context, + code=codes.REDUNDANT_CAST, + ) + + def assert_type_fail(self, source_type: Type, target_type: Type, context: Context) -> None: + (source, target) = format_type_distinctly(source_type, target_type, options=self.options) + self.fail(f"Expression is of type {source}, not {target}", context, code=codes.ASSERT_TYPE) + + def unimported_type_becomes_any(self, prefix: str, typ: Type, ctx: Context) -> None: + self.fail( + f"{prefix} becomes {format_type(typ, self.options)} due to an unfollowed import", + ctx, + code=codes.NO_ANY_UNIMPORTED, + ) + + def need_annotation_for_var( + self, node: SymbolNode, context: Context, options: Options | None = None + ) -> None: + hint = "" + # type to recommend the user adds + recommended_type = None + # Only gives hint if it's a variable declaration and the partial type is a builtin type + if options and isinstance(node, Var) and isinstance(node.type, PartialType): + type_dec = "" + if not node.type.type: + # partial None + if options.use_or_syntax(): + recommended_type = f"{type_dec} | None" + else: + recommended_type = f"Optional[{type_dec}]" + elif node.type.type.fullname in reverse_builtin_aliases: + # partial types other than partial None + name = node.type.type.fullname.partition(".")[2] + if name == "dict": + type_dec = f"{type_dec}, {type_dec}" + recommended_type = f"{name}[{type_dec}]" + if recommended_type is not None: + hint = f' (hint: "{node.name}: {recommended_type} = ...")' + + self.fail( + f'Need type annotation for "{unmangle(node.name)}"{hint}', + context, + code=codes.VAR_ANNOTATED, + ) + + def explicit_any(self, ctx: Context) -> None: + self.fail('Explicit "Any" is not allowed', ctx, code=codes.EXPLICIT_ANY) + + def unsupported_target_for_star_typeddict(self, typ: Type, ctx: Context) -> None: + self.fail( + "Unsupported type {} for ** expansion in TypedDict".format( + format_type(typ, self.options) + ), + ctx, + code=codes.TYPEDDICT_ITEM, + ) + + def non_required_keys_absent_with_star(self, keys: list[str], ctx: Context) -> None: + self.fail( + "Non-required {} not explicitly found in any ** item".format( + format_key_list(keys, short=True) + ), + ctx, + code=codes.TYPEDDICT_ITEM, + ) + + def unexpected_typeddict_keys( + self, + typ: TypedDictType, + expected_keys: list[str], + actual_keys: list[str], + context: Context, + ) -> None: + actual_set = set(actual_keys) + expected_set = set(expected_keys) + if not typ.is_anonymous(): + # Generate simpler messages for some common special cases. + # Use list comprehension instead of set operations to preserve order. + missing = [key for key in expected_keys if key not in actual_set] + if missing: + self.fail( + "Missing {} for TypedDict {}".format( + format_key_list(missing, short=True), format_type(typ, self.options) + ), + context, + code=codes.TYPEDDICT_ITEM, + ) + extra = [key for key in actual_keys if key not in expected_set] + if extra: + self.fail( + "Extra {} for TypedDict {}".format( + format_key_list(extra, short=True), format_type(typ, self.options) + ), + context, + code=codes.TYPEDDICT_UNKNOWN_KEY, + ) + if missing or extra: + # No need to check for further errors + return + found = format_key_list(actual_keys, short=True) + if not expected_keys: + self.fail(f"Unexpected TypedDict {found}", context) + return + expected = format_key_list(expected_keys) + if actual_keys and actual_set < expected_set: + found = f"only {found}" + self.fail(f"Expected {expected} but found {found}", context, code=codes.TYPEDDICT_ITEM) + + def typeddict_key_must_be_string_literal(self, typ: TypedDictType, context: Context) -> None: + self.fail( + "TypedDict key must be a string literal; expected one of {}".format( + format_item_name_list(typ.items.keys()) + ), + context, + code=codes.LITERAL_REQ, + ) + + def typeddict_key_not_found( + self, typ: TypedDictType, item_name: str, context: Context, setitem: bool = False + ) -> None: + """Handle error messages for TypedDicts that have unknown keys. + + Note, that we differentiate in between reading a value and setting a + value. + Setting a value on a TypedDict is an 'unknown-key' error, whereas + reading it is the more serious/general 'item' error. + """ + if typ.is_anonymous(): + self.fail( + '"{}" is not a valid TypedDict key; expected one of {}'.format( + item_name, format_item_name_list(typ.items.keys()) + ), + context, + ) + else: + err_code = codes.TYPEDDICT_UNKNOWN_KEY if setitem else codes.TYPEDDICT_ITEM + self.fail( + f'TypedDict {format_type(typ, self.options)} has no key "{item_name}"', + context, + code=err_code, + ) + matches = best_matches(item_name, typ.items.keys(), n=3) + if matches: + self.note( + "Did you mean {}?".format(pretty_seq(matches, "or")), context, code=err_code + ) + + def typeddict_context_ambiguous(self, types: list[TypedDictType], context: Context) -> None: + formatted_types = ", ".join(list(format_type_distinctly(*types, options=self.options))) + self.fail( + f"Type of TypedDict is ambiguous, none of ({formatted_types}) matches cleanly", context + ) + + def typeddict_key_cannot_be_deleted( + self, typ: TypedDictType, item_name: str, context: Context + ) -> None: + if typ.is_anonymous(): + self.fail(f'TypedDict key "{item_name}" cannot be deleted', context) + else: + self.fail( + f'Key "{item_name}" of TypedDict {format_type(typ, self.options)} cannot be deleted', + context, + ) + + def typeddict_setdefault_arguments_inconsistent( + self, default: Type, expected: Type, context: Context + ) -> None: + msg = 'Argument 2 to "setdefault" of "TypedDict" has incompatible type {}; expected {}' + self.fail( + msg.format(format_type(default, self.options), format_type(expected, self.options)), + context, + code=codes.TYPEDDICT_ITEM, + ) + + def type_arguments_not_allowed(self, context: Context) -> None: + self.fail("Parameterized generics cannot be used with class or instance checks", context) + + def disallowed_any_type(self, typ: Type, context: Context) -> None: + typ = get_proper_type(typ) + if isinstance(typ, AnyType): + message = 'Expression has type "Any"' + else: + message = f'Expression type contains "Any" (has type {format_type(typ, self.options)})' + self.fail(message, context) + + def incorrectly_returning_any(self, typ: Type, context: Context) -> None: + message = ( + f"Returning Any from function declared to return {format_type(typ, self.options)}" + ) + self.fail(message, context, code=codes.NO_ANY_RETURN) + + def incorrect__exit__return(self, context: Context) -> None: + self.fail( + '"bool" is invalid as return type for "__exit__" that always returns False', + context, + code=codes.EXIT_RETURN, + ) + self.note( + 'Use "typing.Literal[False]" as the return type or change it to "None"', + context, + code=codes.EXIT_RETURN, + ) + self.note( + 'If return type of "__exit__" implies that it may return True, ' + "the context manager may swallow exceptions", + context, + code=codes.EXIT_RETURN, + ) + + def untyped_decorated_function(self, typ: Type, context: Context) -> None: + typ = get_proper_type(typ) + if isinstance(typ, AnyType): + self.fail("Function is untyped after decorator transformation", context) + else: + self.fail( + f'Type of decorated function contains type "Any" ({format_type(typ, self.options)})', + context, + ) + + def typed_function_untyped_decorator(self, func_name: str, context: Context) -> None: + self.fail( + f'Untyped decorator makes function "{func_name}" untyped', + context, + code=codes.UNTYPED_DECORATOR, + ) + + def bad_proto_variance( + self, actual: int, tvar_name: str, expected: int, context: Context + ) -> None: + msg = capitalize( + '{} type variable "{}" used in protocol where {} one is expected'.format( + variance_string(actual), tvar_name, variance_string(expected) + ) + ) + self.fail(msg, context) + + def concrete_only_assign(self, typ: Type, context: Context) -> None: + self.fail( + f"Can only assign concrete classes to a variable of type {format_type(typ, self.options)}", + context, + code=codes.TYPE_ABSTRACT, + ) + + def concrete_only_call(self, typ: Type, context: Context) -> None: + self.fail( + f"Only concrete class can be given where {format_type(typ, self.options)} is expected", + context, + code=codes.TYPE_ABSTRACT, + ) + + def cannot_use_function_with_type( + self, method_name: str, type_name: str, context: Context + ) -> None: + self.fail(f"Cannot use {method_name}() with {type_name} type", context) + + def report_non_method_protocol( + self, tp: TypeInfo, members: list[str], context: Context + ) -> None: + self.fail( + "Only protocols that don't have non-method members can be used with issubclass()", + context, + ) + if len(members) < 3: + attrs = ", ".join(members) + self.note(f'Protocol "{tp.name}" has non-method member(s): {attrs}', context) + + def note_call( + self, subtype: Type, call: Type, context: Context, *, code: ErrorCode | None + ) -> None: + self.note( + '"{}.__call__" has type {}'.format( + format_type_bare(subtype, self.options), + format_type(call, self.options, verbosity=1), + ), + context, + code=code, + ) + + def unreachable_statement(self, context: Context) -> None: + self.fail("Statement is unreachable", context, code=codes.UNREACHABLE) + + def redundant_left_operand(self, op_name: str, context: Context) -> None: + """Indicates that the left operand of a boolean expression is redundant: + it does not change the truth value of the entire condition as a whole. + 'op_name' should either be the string "and" or the string "or". + """ + self.redundant_expr(f'Left operand of "{op_name}"', op_name == "and", context) + + def unreachable_right_operand(self, op_name: str, context: Context) -> None: + """Indicates that the right operand of a boolean expression is redundant: + it does not change the truth value of the entire condition as a whole. + 'op_name' should either be the string "and" or the string "or". + """ + self.fail( + f'Right operand of "{op_name}" is never evaluated', context, code=codes.UNREACHABLE + ) + + def redundant_condition_in_comprehension(self, truthiness: bool, context: Context) -> None: + self.redundant_expr("If condition in comprehension", truthiness, context) + + def redundant_condition_in_if(self, truthiness: bool, context: Context) -> None: + self.redundant_expr("If condition", truthiness, context) + + def redundant_expr(self, description: str, truthiness: bool, context: Context) -> None: + self.fail( + f"{description} is always {str(truthiness).lower()}", + context, + code=codes.REDUNDANT_EXPR, + ) + + def impossible_intersection( + self, formatted_base_class_list: str, reason: str, context: Context + ) -> None: + template = "Subclass of {} cannot exist: {}" + self.fail( + template.format(formatted_base_class_list, reason), context, code=codes.UNREACHABLE + ) + + def tvar_without_default_type( + self, tvar_name: str, last_tvar_name_with_default: str, context: Context + ) -> None: + self.fail( + f'"{tvar_name}" cannot appear after "{last_tvar_name_with_default}" ' + "in type parameter list because it has no default type", + context, + ) + + def report_protocol_problems( + self, + subtype: Instance | TupleType | TypedDictType | TypeType | CallableType, + supertype: Instance, + context: Context, + *, + parent_error: ErrorInfo, + ) -> None: + """Report possible protocol conflicts between 'subtype' and 'supertype'. + + This includes missing members, incompatible types, and incompatible + attribute flags, such as settable vs read-only or class variable vs + instance variable. + """ + OFFSET = 4 # Four spaces, so that notes will look like this: + # note: 'Cls' is missing following 'Proto' members: + # note: method, attr + MAX_ITEMS = 2 # Maximum number of conflicts, missing members, and overloads shown + # List of special situations where we don't want to report additional problems + exclusions: dict[type, list[str]] = { + TypedDictType: ["typing.Mapping"], + TupleType: ["typing.Iterable", "typing.Sequence"], + } + if supertype.type.fullname in exclusions.get(type(subtype), []): + return + if any(isinstance(tp, UninhabitedType) for tp in get_proper_types(supertype.args)): + # We don't want to add notes for failed inference (e.g. Iterable[Never]). + # This will be only confusing a user even more. + return + + class_obj = False + is_module = False + skip = [] + if isinstance(subtype, TupleType): + subtype = subtype.partial_fallback + elif isinstance(subtype, TypedDictType): + subtype = subtype.fallback + elif isinstance(subtype, TypeType): + if not isinstance(subtype.item, Instance): + return + class_obj = True + subtype = subtype.item + elif isinstance(subtype, CallableType): + if subtype.is_type_obj(): + ret_type = get_proper_type(subtype.ret_type) + if isinstance(ret_type, TupleType): + ret_type = ret_type.partial_fallback + if not isinstance(ret_type, Instance): + return + class_obj = True + subtype = ret_type + else: + subtype = subtype.fallback + skip = ["__call__"] + if subtype.extra_attrs and subtype.extra_attrs.mod_name: + is_module = True + + # Report missing members + missing = get_missing_protocol_members(subtype, supertype, skip=skip) + if ( + missing + and (len(missing) < len(supertype.type.protocol_members) or missing == ["__call__"]) + and len(missing) <= MAX_ITEMS + ): + if missing == ["__call__"] and class_obj: + self.note( + '"{}" has constructor incompatible with "__call__" of "{}"'.format( + subtype.type.name, supertype.type.name + ), + context, + parent_error=parent_error, + ) + else: + self.note( + '"{}" is missing following "{}" protocol member{}:'.format( + subtype.type.name, supertype.type.name, plural_s(missing) + ), + context, + parent_error=parent_error, + ) + self.note(", ".join(missing), context, offset=OFFSET, parent_error=parent_error) + elif len(missing) > MAX_ITEMS or len(missing) == len(supertype.type.protocol_members): + # This is an obviously wrong type: too many missing members + return + + # Report member type conflicts + conflict_types = get_conflict_protocol_types( + subtype, supertype, class_obj=class_obj, options=self.options + ) + if conflict_types and ( + not is_subtype(subtype, erase_type(supertype), options=self.options) + or not subtype.type.defn.type_vars + or not supertype.type.defn.type_vars + # Always show detailed message for ParamSpec + or subtype.type.has_param_spec_type + or supertype.type.has_param_spec_type + ): + type_name = format_type(subtype, self.options, module_names=True) + self.note( + f"Following member(s) of {type_name} have conflicts:", + context, + parent_error=parent_error, + ) + for name, got, exp, is_lvalue in conflict_types[:MAX_ITEMS]: + exp = get_proper_type(exp) + got = get_proper_type(got) + setter_suffix = " setter type" if is_lvalue else "" + if ( + not isinstance(exp, (CallableType, Overloaded)) + or not isinstance(got, (CallableType, Overloaded)) + # If expected type is a type object, it means it is a nested class. + # Showing constructor signature in errors would be confusing in this case, + # since we don't check the signature, only subclassing of type objects. + or exp.is_type_obj() + ): + self.note( + "{}: expected{} {}, got {}".format( + name, + setter_suffix, + *format_type_distinctly(exp, got, options=self.options), + ), + context, + offset=OFFSET, + parent_error=parent_error, + ) + if is_lvalue and is_subtype(got, exp, options=self.options): + self.note( + "Setter types should behave contravariantly", + context, + offset=OFFSET, + parent_error=parent_error, + ) + else: + self.note( + "Expected{}:".format(setter_suffix), + context, + offset=OFFSET, + parent_error=parent_error, + ) + if isinstance(exp, CallableType): + self.note( + pretty_callable(exp, self.options, skip_self=class_obj or is_module), + context, + offset=2 * OFFSET, + parent_error=parent_error, + ) + else: + assert isinstance(exp, Overloaded) + self.pretty_overload( + exp, + context, + 2 * OFFSET, + parent_error=parent_error, + skip_self=class_obj or is_module, + ) + self.note("Got:", context, offset=OFFSET, parent_error=parent_error) + if isinstance(got, CallableType): + self.note( + pretty_callable(got, self.options, skip_self=class_obj or is_module), + context, + offset=2 * OFFSET, + parent_error=parent_error, + ) + else: + assert isinstance(got, Overloaded) + self.pretty_overload( + got, + context, + 2 * OFFSET, + parent_error=parent_error, + skip_self=class_obj or is_module, + ) + self.print_more(conflict_types, context, OFFSET, MAX_ITEMS, code=parent_error.code) + + # Report flag conflicts (i.e. settable vs read-only etc.) + conflict_flags = get_bad_protocol_flags(subtype, supertype, class_obj=class_obj) + for name, subflags, superflags in conflict_flags[:MAX_ITEMS]: + if not class_obj and IS_CLASSVAR in subflags and IS_CLASSVAR not in superflags: + self.note( + "Protocol member {}.{} expected instance variable, got class variable".format( + supertype.type.name, name + ), + context, + parent_error=parent_error, + ) + if not class_obj and IS_CLASSVAR in superflags and IS_CLASSVAR not in subflags: + self.note( + "Protocol member {}.{} expected class variable, got instance variable".format( + supertype.type.name, name + ), + context, + parent_error=parent_error, + ) + if IS_SETTABLE in superflags and IS_SETTABLE not in subflags: + self.note( + "Protocol member {}.{} expected settable variable," + " got read-only attribute".format(supertype.type.name, name), + context, + parent_error=parent_error, + ) + if IS_CLASS_OR_STATIC in superflags and IS_CLASS_OR_STATIC not in subflags: + self.note( + "Protocol member {}.{} expected class or static method".format( + supertype.type.name, name + ), + context, + parent_error=parent_error, + ) + if ( + class_obj + and IS_VAR in superflags + and (IS_VAR in subflags and IS_CLASSVAR not in subflags) + ): + self.note( + "Only class variables allowed for class object access on protocols," + ' {} is an instance variable of "{}"'.format(name, subtype.type.name), + context, + parent_error=parent_error, + ) + if class_obj and IS_CLASSVAR in superflags: + self.note( + "ClassVar protocol member {}.{} can never be matched by a class object".format( + supertype.type.name, name + ), + context, + parent_error=parent_error, + ) + self.print_more(conflict_flags, context, OFFSET, MAX_ITEMS, code=parent_error.code) + + def pretty_overload( + self, + tp: Overloaded, + context: Context, + offset: int, + *, + parent_error: ErrorInfo, + add_class_or_static_decorator: bool = False, + skip_self: bool = False, + ) -> None: + for item in tp.items: + self.note("@overload", context, offset=offset, parent_error=parent_error) + + if add_class_or_static_decorator: + decorator = pretty_class_or_static_decorator(item) + if decorator is not None: + self.note(decorator, context, offset=offset, parent_error=parent_error) + + self.note( + pretty_callable(item, self.options, skip_self=skip_self), + context, + offset=offset, + parent_error=parent_error, + ) + + def print_more( + self, + conflicts: Sequence[Any], + context: Context, + offset: int, + max_items: int, + *, + code: ErrorCode | None = None, + ) -> None: + if len(conflicts) > max_items: + self.note( + f"<{len(conflicts) - max_items} more conflict(s) not shown>", + context, + offset=offset, + code=code, + ) + + def try_report_long_tuple_assignment_error( + self, + subtype: ProperType, + supertype: ProperType, + context: Context, + msg: message_registry.ErrorMessage, + subtype_label: str | None = None, + supertype_label: str | None = None, + ) -> bool: + """Try to generate meaningful error message for very long tuple assignment + + Returns a bool: True when generating long tuple assignment error, + False when no such error reported + """ + if isinstance(subtype, TupleType): + if ( + len(subtype.items) > MAX_TUPLE_ITEMS + and isinstance(supertype, Instance) + and supertype.type.fullname == "builtins.tuple" + ): + lhs_type = supertype.args[0] + lhs_types = [lhs_type] * len(subtype.items) + self.generate_incompatible_tuple_error(lhs_types, subtype.items, context, msg) + return True + elif isinstance(supertype, TupleType) and ( + len(subtype.items) > MAX_TUPLE_ITEMS or len(supertype.items) > MAX_TUPLE_ITEMS + ): + if len(subtype.items) != len(supertype.items): + if supertype_label is not None and subtype_label is not None: + msg = msg.with_additional_msg( + " ({} {}, {} {})".format( + subtype_label, + self.format_long_tuple_type(subtype), + supertype_label, + self.format_long_tuple_type(supertype), + ) + ) + self.fail(msg.value, context, code=msg.code) + return True + self.generate_incompatible_tuple_error( + supertype.items, subtype.items, context, msg + ) + return True + return False + + def format_long_tuple_type(self, typ: TupleType) -> str: + """Format very long tuple type using an ellipsis notation""" + item_cnt = len(typ.items) + if item_cnt > MAX_TUPLE_ITEMS: + return '"tuple[{}, {}, ... <{} more items>]"'.format( + format_type_bare(typ.items[0], self.options), + format_type_bare(typ.items[1], self.options), + str(item_cnt - 2), + ) + else: + return format_type(typ, self.options) + + def generate_incompatible_tuple_error( + self, + lhs_types: list[Type], + rhs_types: list[Type], + context: Context, + msg: message_registry.ErrorMessage, + ) -> None: + """Generate error message for individual incompatible tuple pairs""" + error_cnt = 0 + notes: list[str] = [] + for i, (lhs_t, rhs_t) in enumerate(zip(lhs_types, rhs_types)): + if not is_subtype(rhs_t, lhs_t): + if error_cnt < 3: + notes.append( + "Expression tuple item {} has type {}; {} expected; ".format( + str(i), + format_type(rhs_t, self.options), + format_type(lhs_t, self.options), + ) + ) + error_cnt += 1 + + info = f" ({str(error_cnt)} tuple items are incompatible" + if error_cnt - 3 > 0: + info += f"; {str(error_cnt - 3)} items are omitted)" + else: + info += ")" + msg = msg.with_additional_msg(info) + self.fail(msg.value, context, code=msg.code) + for note in notes: + self.note(note, context, code=msg.code) + + def add_fixture_note(self, fullname: str, ctx: Context) -> None: + self.note(f'Maybe your test fixture does not define "{fullname}"?', ctx) + if fullname in SUGGESTED_TEST_FIXTURES: + self.note( + "Consider adding [builtins fixtures/{}] to your test description".format( + SUGGESTED_TEST_FIXTURES[fullname] + ), + ctx, + ) + + def annotation_in_unchecked_function(self, context: Context) -> None: + self.note( + "By default the bodies of untyped functions are not checked," + " consider using --check-untyped-defs", + context, + code=codes.ANNOTATION_UNCHECKED, + ) + + def type_parameters_should_be_declared(self, undeclared: list[str], context: Context) -> None: + names = ", ".join('"' + n + '"' for n in undeclared) + self.fail( + message_registry.TYPE_PARAMETERS_SHOULD_BE_DECLARED.format(names), + context, + code=codes.VALID_TYPE, + ) + + def match_statement_inexhaustive_match(self, typ: Type, context: Context) -> None: + type_str = format_type(typ, self.options) + msg = f"Match statement has unhandled case for values of type {type_str}" + self.fail(msg, context, code=codes.EXHAUSTIVE_MATCH) + self.note( + "If match statement is intended to be non-exhaustive, add `case _: pass`", + context, + code=codes.EXHAUSTIVE_MATCH, + ) + + def iteration_dependent_errors(self, iter_errors: IterationDependentErrors) -> None: + for error_info in iter_errors.yield_uselessness_error_infos(): + self.fail(*error_info[:2], code=error_info[2]) + for types, context in iter_errors.yield_revealed_type_infos(): + self.reveal_type(mypy.typeops.make_simplified_union(types), context) + + +def quote_type_string(type_string: str) -> str: + """Quotes a type representation for use in messages.""" + if ( + type_string in ["Module", "overloaded function", ""] + or type_string.startswith("Module ") + or type_string.endswith("?") + ): + # These messages are easier to read if these aren't quoted. + return type_string + return f'"{type_string}"' + + +def should_format_arg_as_type(arg_kind: ArgKind, arg_name: str | None, verbosity: int) -> bool: + """ + Determine whether a function argument should be formatted as its Type or with name. + """ + return (arg_kind == ARG_POS and arg_name is None) or ( + verbosity == 0 and arg_kind.is_positional() + ) + + +def format_callable_args( + arg_types: list[Type], + arg_kinds: list[ArgKind], + arg_names: list[str | None], + format: Callable[[Type], str], + verbosity: int, +) -> str: + """Format a bunch of Callable arguments into a string""" + arg_strings = [] + for arg_name, arg_type, arg_kind in zip(arg_names, arg_types, arg_kinds): + if should_format_arg_as_type(arg_kind, arg_name, verbosity): + arg_strings.append(format(arg_type)) + else: + constructor = ARG_CONSTRUCTOR_NAMES[arg_kind] + if arg_kind.is_star() or arg_name is None: + arg_strings.append(f"{constructor}({format(arg_type)})") + else: + arg_strings.append(f"{constructor}({format(arg_type)}, {repr(arg_name)})") + + return ", ".join(arg_strings) + + +def format_type_inner( + typ: Type, + verbosity: int, + options: Options, + fullnames: set[str] | None, + module_names: bool = False, + use_pretty_callable: bool = True, +) -> str: + """ + Convert a type to a relatively short string suitable for error messages. + + Args: + typ: type to be formatted + verbosity: a coarse grained control on the verbosity of the type + options: Options object controlling formatting + fullnames: a set of names that should be printed in full + module_names: whether to show module names for module types + use_pretty_callable: use pretty_callable to format Callable types. + """ + + def format(typ: Type) -> str: + return format_type_inner(typ, verbosity, options, fullnames) + + def format_list(types: Sequence[Type]) -> str: + return ", ".join(format(typ) for typ in types) + + def format_union_items(types: Sequence[Type]) -> list[str]: + formatted = [format(typ) for typ in types if format(typ) != "None"] + if len(formatted) > MAX_UNION_ITEMS and verbosity == 0: + more = len(formatted) - MAX_UNION_ITEMS // 2 + formatted = formatted[: MAX_UNION_ITEMS // 2] + else: + more = 0 + if more: + formatted.append(f"<{more} more items>") + if any(format(typ) == "None" for typ in types): + formatted.append("None") + return formatted + + def format_union(types: Sequence[Type]) -> str: + return " | ".join(format_union_items(types)) + + def format_literal_value(typ: LiteralType) -> str: + if typ.is_enum_literal(): + underlying_type = format(typ.fallback) + return f"{underlying_type}.{typ.value}" + else: + return typ.value_repr() + + if isinstance(typ, TypeAliasType) and typ.is_recursive: + if typ.alias is None: + type_str = "" + else: + if verbosity >= 2 or (fullnames and typ.alias.fullname in fullnames): + type_str = typ.alias.fullname + else: + type_str = typ.alias.name + if typ.args: + type_str += f"[{format_list(typ.args)}]" + return type_str + + # TODO: always mention type alias names in errors. + typ = get_proper_type(typ) + + if isinstance(typ, Instance): + itype = typ + # Get the short name of the type. + if itype.type.fullname == "types.ModuleType": + # Make some common error messages simpler and tidier. + base_str = "Module" + if itype.extra_attrs and itype.extra_attrs.mod_name and module_names: + return f'{base_str} "{itype.extra_attrs.mod_name}"' + return base_str + if itype.type.fullname == "typing._SpecialForm": + # This is not a real type but used for some typing-related constructs. + return "" + if verbosity >= 2 or (fullnames and itype.type.fullname in fullnames): + base_str = itype.type.fullname + else: + base_str = itype.type.name + if not itype.args: + if itype.type.has_type_var_tuple_type and len(itype.type.type_vars) == 1: + return base_str + "[()]" + # No type arguments, just return the type name + return base_str + elif itype.type.fullname == "builtins.tuple": + item_type_str = format(itype.args[0]) + return f"tuple[{item_type_str}, ...]" + else: + # There are type arguments. Convert the arguments to strings. + return f"{base_str}[{format_list(itype.args)}]" + elif isinstance(typ, UnpackType): + if options.use_star_unpack(): + return f"*{format(typ.type)}" + return f"Unpack[{format(typ.type)}]" + elif isinstance(typ, TypeVarType): + # This is similar to non-generic instance types. + fullname = scoped_type_var_name(typ) + if verbosity >= 2 or (fullnames and fullname in fullnames): + return fullname + return typ.name + elif isinstance(typ, TypeVarTupleType): + # This is similar to non-generic instance types. + fullname = scoped_type_var_name(typ) + if verbosity >= 2 or (fullnames and fullname in fullnames): + return fullname + return typ.name + elif isinstance(typ, ParamSpecType): + # Concatenate[..., P] + if typ.prefix.arg_types: + args = format_callable_args( + typ.prefix.arg_types, typ.prefix.arg_kinds, typ.prefix.arg_names, format, verbosity + ) + + return f"[{args}, **{typ.name_with_suffix()}]" + else: + # TODO: better disambiguate ParamSpec name clashes. + return typ.name_with_suffix() + elif isinstance(typ, TupleType): + # Prefer the name of the fallback class (if not tuple), as it's more informative. + if typ.partial_fallback.type.fullname != "builtins.tuple": + return format(typ.partial_fallback) + type_items = format_list(typ.items) or "()" + return f"tuple[{type_items}]" + elif isinstance(typ, TypedDictType): + # If the TypedDictType is named, return the name + if not typ.is_anonymous(): + return format(typ.fallback) + items = [] + for item_name, item_type in typ.items.items(): + modifier = "" + if item_name not in typ.required_keys: + modifier += "?" + if item_name in typ.readonly_keys: + modifier += "=" + items.append(f"{item_name!r}{modifier}: {format(item_type)}") + return f"TypedDict({{{', '.join(items)}}})" + elif isinstance(typ, LiteralType): + return f"Literal[{format_literal_value(typ)}]" + elif isinstance(typ, UnionType): + typ = get_proper_type(ignore_last_known_values(typ)) + if not isinstance(typ, UnionType): + return format(typ) + literal_items, union_items = separate_union_literals(typ) + + # Coalesce multiple Literal[] members. This also changes output order. + # If there's just one Literal item, retain the original ordering. + if len(literal_items) > 1: + literal_str = "Literal[{}]".format( + ", ".join(format_literal_value(t) for t in literal_items) + ) + + if len(union_items) == 1 and isinstance(get_proper_type(union_items[0]), NoneType): + return ( + f"{literal_str} | None" + if options.use_or_syntax() + else f"Optional[{literal_str}]" + ) + elif union_items: + return ( + f"{literal_str} | {format_union(union_items)}" + if options.use_or_syntax() + else f"Union[{', '.join(format_union_items(union_items))}, {literal_str}]" + ) + else: + return literal_str + else: + # Only print Union as Optional if the Optional wouldn't have to contain another Union + print_as_optional = ( + len(typ.items) - sum(isinstance(get_proper_type(t), NoneType) for t in typ.items) + == 1 + ) + if print_as_optional: + rest = [t for t in typ.items if not isinstance(get_proper_type(t), NoneType)] + return ( + f"{format(rest[0])} | None" + if options.use_or_syntax() + else f"Optional[{format(rest[0])}]" + ) + else: + s = ( + format_union(typ.items) + if options.use_or_syntax() + else f"Union[{', '.join(format_union_items(typ.items))}]" + ) + return s + elif isinstance(typ, NoneType): + return "None" + elif isinstance(typ, AnyType): + return "Any" + elif isinstance(typ, DeletedType): + return "" + elif isinstance(typ, UninhabitedType): + return "Never" + elif isinstance(typ, TypeType): + if typ.is_type_form: + type_name = "TypeForm" + else: + type_name = "type" + return f"{type_name}[{format(typ.item)}]" + elif isinstance(typ, FunctionLike): + func = typ + if func.is_type_obj(): + # The type of a type object type can be derived from the + # return type (this always works). + return format(TypeType.make_normalized(func.items[0].ret_type)) + elif isinstance(func, CallableType): + if func.type_guard is not None: + return_type = f"TypeGuard[{format(func.type_guard)}]" + elif func.type_is is not None: + return_type = f"TypeIs[{format(func.type_is)}]" + else: + return_type = format(func.ret_type) + if func.is_ellipsis_args: + return f"Callable[..., {return_type}]" + param_spec = func.param_spec() + if param_spec is not None: + return f"Callable[{format(param_spec)}, {return_type}]" + + # Use pretty format (def-style) for complex signatures with named, optional, or star args. + # Use compact Callable[[...], ...] only for signatures with all simple positional args. + if use_pretty_callable: + if any( + not should_format_arg_as_type(kind, name, verbosity) + for kind, name in zip(func.arg_kinds, func.arg_names) + ): + return pretty_callable(func, options) + + args = format_callable_args( + func.arg_types, func.arg_kinds, func.arg_names, format, verbosity + ) + return f"Callable[[{args}], {return_type}]" + else: + # Use a simple representation for function types; proper + # function types may result in long and difficult-to-read + # error messages. + return "overloaded function" + elif isinstance(typ, UnboundType): + return typ.accept(TypeStrVisitor(options=options)) + elif isinstance(typ, Parameters): + args = format_callable_args(typ.arg_types, typ.arg_kinds, typ.arg_names, format, verbosity) + return f"[{args}]" + elif typ is None: + raise RuntimeError("Type is None") + else: + # Default case; we simply have to return something meaningful here. + return "object" + + +def collect_all_named_types(t: Type) -> list[Type]: + """Return all instances/aliases/type variables that `t` contains (including `t`). + + This is similar to collect_all_inner_types from typeanal but only + returns instances and will recurse into fallbacks. + """ + visitor = CollectAllNamedTypesQuery() + t.accept(visitor) + return visitor.types + + +class CollectAllNamedTypesQuery(TypeTraverserVisitor): + def __init__(self) -> None: + self.types: list[Type] = [] + + def visit_instance(self, t: Instance) -> None: + self.types.append(t) + super().visit_instance(t) + + def visit_type_alias_type(self, t: TypeAliasType) -> None: + if t.alias and not t.is_recursive: + get_proper_type(t).accept(self) + else: + self.types.append(t) + super().visit_type_alias_type(t) + + def visit_type_var(self, t: TypeVarType) -> None: + self.types.append(t) + super().visit_type_var(t) + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> None: + self.types.append(t) + super().visit_type_var_tuple(t) + + def visit_param_spec(self, t: ParamSpecType) -> None: + self.types.append(t) + super().visit_param_spec(t) + + +def scoped_type_var_name(t: TypeVarLikeType) -> str: + if not t.id.namespace: + return t.name + # TODO: support rare cases when both TypeVar name and namespace suffix coincide. + *_, suffix = t.id.namespace.split(".") + return f"{t.name}@{suffix}" + + +def find_type_overlaps(*types: Type) -> set[str]: + """Return a set of fullnames that share a short name and appear in either type. + + This is used to ensure that distinct types with the same short name are printed + with their fullname. + """ + d: dict[str, set[str]] = {} + for type in types: + for t in collect_all_named_types(type): + if isinstance(t, ProperType) and isinstance(t, Instance): + d.setdefault(t.type.name, set()).add(t.type.fullname) + elif isinstance(t, TypeAliasType) and t.alias: + d.setdefault(t.alias.name, set()).add(t.alias.fullname) + else: + assert isinstance(t, TypeVarLikeType) + d.setdefault(t.name, set()).add(scoped_type_var_name(t)) + for shortname in d.keys(): + if f"typing.{shortname}" in TYPES_FOR_UNIMPORTED_HINTS: + d[shortname].add(f"typing.{shortname}") + + overlaps: set[str] = set() + for fullnames in d.values(): + if len(fullnames) > 1: + overlaps.update(fullnames) + return overlaps + + +def format_type( + typ: Type, options: Options, verbosity: int = 0, module_names: bool = False +) -> str: + """ + Convert a type to a relatively short string suitable for error messages. + + `verbosity` is a coarse-grained control on the verbosity of the type + + This function returns a string appropriate for unmodified use in error + messages; this means that it will be quoted in most cases. If + modification of the formatted string is required, callers should use + format_type_bare. + """ + return quote_type_string(format_type_bare(typ, options, verbosity, module_names)) + + +def format_type_bare( + typ: Type, options: Options, verbosity: int = 0, module_names: bool = False +) -> str: + """ + Convert a type to a relatively short string suitable for error messages. + + `verbosity` is a coarse-grained control on the verbosity of the type + `fullnames` specifies a set of names that should be printed in full + + This function will return an unquoted string. If a caller doesn't need to + perform post-processing on the string output, format_type should be used + instead. (The caller may want to use quote_type_string after + processing has happened, to maintain consistent quoting in messages.) + """ + return format_type_inner(typ, verbosity, options, find_type_overlaps(typ), module_names) + + +def format_type_distinctly(*types: Type, options: Options, bare: bool = False) -> tuple[str, ...]: + """Jointly format types to distinct strings. + + Increase the verbosity of the type strings until they become distinct + while also requiring that distinct types with the same short name are + formatted distinctly. + + By default, the returned strings are created using format_type() and will be + quoted accordingly. If ``bare`` is True, the returned strings will not + be quoted; callers who need to do post-processing of the strings before + quoting them (such as prepending * or **) should use this. + """ + overlapping = find_type_overlaps(*types) + + def format_single(arg: Type) -> str: + return format_type_inner(arg, verbosity=0, options=options, fullnames=overlapping) + + min_verbosity = 0 + # Prevent emitting weird errors like: + # ... has incompatible type "Callable[[int], Child]"; expected "Callable[[int], Parent]" + if len(types) == 2: + left, right = types + left = get_proper_type(left) + right = get_proper_type(right) + # If the right type has named arguments, they may be the reason for incompatibility. + # This excludes cases when right is Callable[[Something], None] without named args, + # because that's usually the right thing to do. + if ( + isinstance(left, CallableType) + and isinstance(right, CallableType) + and any(right.arg_names) + and is_subtype(left, right, ignore_pos_arg_names=True) + ): + min_verbosity = 1 + + for verbosity in range(min_verbosity, 2): + strs = [ + format_type_inner(type, verbosity=verbosity, options=options, fullnames=overlapping) + for type in types + ] + if len(set(strs)) == len(strs): + break + if bare: + return tuple(strs) + else: + return tuple(quote_type_string(s) for s in strs) + + +def pretty_class_or_static_decorator(tp: CallableType) -> str | None: + """Return @classmethod or @staticmethod, if any, for the given callable type.""" + definition = get_func_def(tp) + if definition is not None and isinstance(definition, SYMBOL_FUNCBASE_TYPES): + if definition.is_class: + return "@classmethod" + if definition.is_static: + return "@staticmethod" + return None + + +def pretty_callable(tp: CallableType, options: Options, skip_self: bool = False) -> str: + """Return a nice easily-readable representation of a callable type. + For example: + def [T <: int] f(self, x: int, y: T) -> None + + If skip_self is True, print an actual callable type, as it would appear + when bound on an instance/class, rather than how it would appear in the + defining statement. + """ + s = "" + asterisk = False + slash = False + for i in range(len(tp.arg_types)): + if s: + s += ", " + if tp.arg_kinds[i].is_named() and not asterisk: + s += "*, " + asterisk = True + if tp.arg_kinds[i] == ARG_STAR: + s += "*" + asterisk = True + if tp.arg_kinds[i] == ARG_STAR2: + s += "**" + name = tp.arg_names[i] + if name: + s += name + ": " + type_str = format_type_bare(tp.arg_types[i], options) + if tp.arg_kinds[i] == ARG_STAR2 and tp.unpack_kwargs: + type_str = f"Unpack[{type_str}]" + s += type_str + if tp.arg_kinds[i].is_optional(): + s += " = ..." + if ( + not slash + and tp.arg_kinds[i].is_positional() + and name is None + and ( + i == len(tp.arg_types) - 1 + or (tp.arg_names[i + 1] is not None or not tp.arg_kinds[i + 1].is_positional()) + ) + ): + s += ", /" + slash = True + + # If we got a "special arg" (i.e: self, cls, etc...), prepend it to the arg list + definition = get_func_def(tp) + if ( + isinstance(definition, FuncDef) + and hasattr(definition, "arguments") + and not tp.from_concatenate + ): + definition_arg_names = [arg.variable.name for arg in definition.arguments] + if ( + len(definition_arg_names) > len(tp.arg_names) + and definition_arg_names[0] + and not skip_self + ): + if s: + s = ", " + s + s = definition_arg_names[0] + s + s = f"{definition.name}({s})" + elif tp.name: + first_arg = get_first_arg(tp) + if first_arg: + if s: + s = ", " + s + s = first_arg + s + s = f"{tp.name.split()[0]}({s})" # skip "of Class" part + else: + s = f"({s})" + + s += " -> " + if tp.type_guard is not None: + s += f"TypeGuard[{format_type_bare(tp.type_guard, options)}]" + elif tp.type_is is not None: + s += f"TypeIs[{format_type_bare(tp.type_is, options)}]" + else: + s += format_type_bare(tp.ret_type, options) + + if tp.variables: + tvars = [] + for tvar in tp.variables: + if isinstance(tvar, TypeVarType): + upper_bound = get_proper_type(tvar.upper_bound) + if not ( + isinstance(upper_bound, Instance) + and upper_bound.type.fullname == "builtins.object" + ): + tvars.append(f"{tvar.name}: {format_type_bare(upper_bound, options)}") + elif tvar.values: + tvars.append( + "{}: ({})".format( + tvar.name, + ", ".join([format_type_bare(tp, options) for tp in tvar.values]), + ) + ) + else: + tvars.append(tvar.name) + else: + # For other TypeVarLikeTypes, just use the repr + tvars.append(repr(tvar)) + s = f"[{', '.join(tvars)}] {s}" + return f"def {s}" + + +def get_first_arg(tp: CallableType) -> str | None: + definition = get_func_def(tp) + if not isinstance(definition, FuncDef) or not definition.info or definition.is_static: + return None + return definition.original_first_arg + + +def variance_string(variance: int) -> str: + if variance == COVARIANT: + return "covariant" + elif variance == CONTRAVARIANT: + return "contravariant" + else: + return "invariant" + + +def get_missing_protocol_members(left: Instance, right: Instance, skip: list[str]) -> list[str]: + """Find all protocol members of 'right' that are not implemented + (i.e. completely missing) in 'left'. + """ + assert right.type.is_protocol + missing: list[str] = [] + for member in right.type.protocol_members: + if member in skip: + continue + if not find_member(member, left, left): + missing.append(member) + return missing + + +def get_conflict_protocol_types( + left: Instance, right: Instance, class_obj: bool = False, options: Options | None = None +) -> list[tuple[str, Type, Type, bool]]: + """Find members that are defined in 'left' but have incompatible types. + Return them as a list of ('member', 'got', 'expected', 'is_lvalue'). + """ + assert right.type.is_protocol + conflicts: list[tuple[str, Type, Type, bool]] = [] + for member in right.type.protocol_members: + if member in ("__init__", "__new__"): + continue + supertype = find_member(member, right, left) + assert supertype is not None + subtype = mypy.typeops.get_protocol_member(left, member, class_obj) + if not subtype: + continue + is_compat = is_subtype(subtype, supertype, ignore_pos_arg_names=True, options=options) + if not is_compat: + conflicts.append((member, subtype, supertype, False)) + superflags = get_member_flags(member, right) + if IS_SETTABLE not in superflags: + continue + different_setter = False + if IS_EXPLICIT_SETTER in superflags: + set_supertype = find_member(member, right, left, is_lvalue=True) + if set_supertype and not is_same_type(set_supertype, supertype): + different_setter = True + supertype = set_supertype + if IS_EXPLICIT_SETTER in get_member_flags(member, left): + set_subtype = mypy.typeops.get_protocol_member(left, member, class_obj, is_lvalue=True) + if set_subtype and not is_same_type(set_subtype, subtype): + different_setter = True + subtype = set_subtype + if not is_compat and not different_setter: + # We already have this conflict listed, avoid duplicates. + continue + assert supertype is not None and subtype is not None + is_compat = is_subtype(supertype, subtype, options=options) + if not is_compat: + conflicts.append((member, subtype, supertype, different_setter)) + return conflicts + + +def get_bad_protocol_flags( + left: Instance, right: Instance, class_obj: bool = False +) -> list[tuple[str, set[int], set[int]]]: + """Return all incompatible attribute flags for members that are present in both + 'left' and 'right'. + """ + assert right.type.is_protocol + all_flags: list[tuple[str, set[int], set[int]]] = [] + for member in right.type.protocol_members: + if find_member(member, left, left, class_obj=class_obj): + all_flags.append( + ( + member, + get_member_flags(member, left, class_obj=class_obj), + get_member_flags(member, right), + ) + ) + bad_flags = [] + for name, subflags, superflags in all_flags: + if ( + IS_CLASSVAR in subflags + and IS_CLASSVAR not in superflags + and IS_SETTABLE in superflags + or IS_CLASSVAR in superflags + and IS_CLASSVAR not in subflags + or IS_SETTABLE in superflags + and IS_SETTABLE not in subflags + or IS_CLASS_OR_STATIC in superflags + and IS_CLASS_OR_STATIC not in subflags + or class_obj + and IS_VAR in superflags + and IS_CLASSVAR not in subflags + or class_obj + and IS_CLASSVAR in superflags + ): + bad_flags.append((name, subflags, superflags)) + return bad_flags + + +def capitalize(s: str) -> str: + """Capitalize the first character of a string.""" + if s == "": + return "" + else: + return s[0].upper() + s[1:] + + +def extract_type(name: str) -> str: + """If the argument is the name of a method (of form C.m), return + the type portion in quotes (e.g. "y"). Otherwise, return the string + unmodified. + """ + name = re.sub('^"[a-zA-Z0-9_]+" of ', "", name) + return name + + +def strip_quotes(s: str) -> str: + """Strip a double quote at the beginning and end of the string, if any.""" + s = re.sub('^"', "", s) + s = re.sub('"$', "", s) + return s + + +def format_string_list(lst: list[str]) -> str: + assert lst + if len(lst) == 1: + return lst[0] + elif len(lst) <= 5: + return f"{', '.join(lst[:-1])} and {lst[-1]}" + else: + return "%s, ... and %s (%i methods suppressed)" % ( + ", ".join(lst[:2]), + lst[-1], + len(lst) - 3, + ) + + +def format_item_name_list(s: Iterable[str]) -> str: + lst = list(s) + if len(lst) <= 5: + return "(" + ", ".join([f'"{name}"' for name in lst]) + ")" + else: + return "(" + ", ".join([f'"{name}"' for name in lst[:5]]) + ", ...)" + + +def callable_name(type: FunctionLike) -> str | None: + name = type.get_name() + if name is not None and name[0] != "<": + return f'"{name}"'.replace(" of ", '" of "') + return name + + +def for_function(callee: CallableType) -> str: + name = callable_name(callee) + if name is not None: + return f" for {name}" + return "" + + +def wrong_type_arg_count(low: int, high: int, act: str, name: str) -> str: + if low == high: + s = f"{low} type arguments" + if low == 0: + s = "no type arguments" + elif low == 1: + s = "1 type argument" + else: + s = f"between {low} and {high} type arguments" + if act == "0": + act = "none" + return f'"{name}" expects {s}, but {act} given' + + +def find_defining_module(modules: dict[str, MypyFile], typ: CallableType) -> MypyFile | None: + if not typ.definition: + return None + fullname = typ.definition.fullname + if "." in fullname: + for i in range(fullname.count(".")): + module_name = fullname.rsplit(".", i + 1)[0] + try: + return modules[module_name] + except KeyError: + pass + assert False, "Couldn't determine module from CallableType" + return None + + +# For hard-coding suggested missing member alternatives. +COMMON_MISTAKES: Final[dict[str, Sequence[str]]] = {"add": ("append", "extend")} + + +def _real_quick_ratio(a: str, b: str) -> float: + # this is an upper bound on difflib.SequenceMatcher.ratio + # similar to difflib.SequenceMatcher.real_quick_ratio, but faster since we don't instantiate + al = len(a) + bl = len(b) + return 2.0 * min(al, bl) / (al + bl) + + +def best_matches(current: str, options: Collection[str], n: int) -> list[str]: + if not current: + return [] + # narrow down options cheaply + options = [o for o in options if _real_quick_ratio(current, o) > 0.75] + if len(options) >= 50: + options = [o for o in options if abs(len(o) - len(current)) <= 1] + + ratios = {option: difflib.SequenceMatcher(a=current, b=option).ratio() for option in options} + options = [option for option, ratio in ratios.items() if ratio > 0.75] + return sorted(options, key=lambda v: (-ratios[v], v))[:n] + + +def pretty_seq(args: Sequence[str], conjunction: str) -> str: + quoted = ['"' + a + '"' for a in args] + if len(quoted) == 1: + return quoted[0] + if len(quoted) == 2: + return f"{quoted[0]} {conjunction} {quoted[1]}" + last_sep = ", " + conjunction + " " + return ", ".join(quoted[:-1]) + last_sep + quoted[-1] + + +def append_invariance_notes( + notes: list[str], arg_type: Instance, expected_type: Instance +) -> list[str]: + """Explain that the type is invariant and give notes for how to solve the issue.""" + invariant_type = "" + covariant_suggestion = "" + if ( + arg_type.type.fullname == "builtins.list" + and expected_type.type.fullname == "builtins.list" + and is_subtype(arg_type.args[0], expected_type.args[0]) + ): + invariant_type = "list" + covariant_suggestion = 'Consider using "Sequence" instead, which is covariant' + elif ( + arg_type.type.fullname == "builtins.dict" + and expected_type.type.fullname == "builtins.dict" + and is_same_type(arg_type.args[0], expected_type.args[0]) + and is_subtype(arg_type.args[1], expected_type.args[1]) + ): + invariant_type = "dict" + covariant_suggestion = ( + 'Consider using "Mapping" instead, which is covariant in the value type' + ) + if invariant_type and covariant_suggestion: + notes.append( + f'"{invariant_type}" is invariant -- see ' + + "https://mypy.readthedocs.io/en/stable/common_issues.html#variance" + ) + notes.append(covariant_suggestion) + return notes + + +def append_union_note( + notes: list[str], arg_type: UnionType, expected_type: UnionType, options: Options +) -> list[str]: + """Point to specific union item(s) that may cause failure in subtype check.""" + non_matching = [] + items = flatten_nested_unions(arg_type.items) + if len(items) < MAX_UNION_ITEMS: + return notes + for item in items: + if not is_subtype(item, expected_type): + non_matching.append(item) + if non_matching: + types = ", ".join([format_type(typ, options) for typ in non_matching]) + notes.append(f"Item{plural_s(non_matching)} in the first union not in the second: {types}") + return notes + + +def append_numbers_notes( + notes: list[str], arg_type: Instance, expected_type: Instance +) -> list[str]: + """Explain if an unsupported type from "numbers" is used in a subtype check.""" + if expected_type.type.fullname in UNSUPPORTED_NUMBERS_TYPES: + notes.append('Types from "numbers" aren\'t supported for static type checking') + notes.append("See https://peps.python.org/pep-0484/#the-numeric-tower") + notes.append("Consider using a protocol instead, such as typing.SupportsFloat") + return notes + + +def make_inferred_type_note( + context: Context, subtype: Type, supertype: Type, supertype_str: str +) -> str: + """Explain that the user may have forgotten to type a variable. + + The user does not expect an error if the inferred container type is the same as the return + type of a function and the argument type(s) are a subtype of the argument type(s) of the + return type. This note suggests that they add a type annotation with the return type instead + of relying on the inferred type. + """ + subtype = get_proper_type(subtype) + supertype = get_proper_type(supertype) + if ( + isinstance(subtype, Instance) + and isinstance(supertype, Instance) + and subtype.type.fullname == supertype.type.fullname + and subtype.args + and supertype.args + and isinstance(context, ReturnStmt) + and isinstance(context.expr, NameExpr) + and isinstance(context.expr.node, Var) + and context.expr.node.is_inferred + ): + for subtype_arg, supertype_arg in zip(subtype.args, supertype.args): + if not is_subtype(subtype_arg, supertype_arg): + return "" + var_name = context.expr.name + return 'Perhaps you need a type annotation for "{}"? Suggestion: {}'.format( + var_name, supertype_str + ) + return "" + + +def format_key_list(keys: list[str], *, short: bool = False) -> str: + formatted_keys = [f'"{key}"' for key in keys] + td = "" if short else "TypedDict " + if len(keys) == 0: + return f"no {td}keys" + elif len(keys) == 1: + return f"{td}key {formatted_keys[0]}" + else: + return f"{td}keys ({', '.join(formatted_keys)})" + + +def ignore_last_known_values(t: UnionType) -> Type: + """This will avoid types like str | str in error messages. + + last_known_values are kept during union simplification, but may cause + weird formatting for e.g. tuples of literals. + """ + union_items: list[Type] = [] + seen_instances = set() + for item in t.items: + if isinstance(item, ProperType) and isinstance(item, Instance): + erased = item.copy_modified(last_known_value=None) + if erased in seen_instances: + continue + seen_instances.add(erased) + union_items.append(erased) + else: + union_items.append(item) + return UnionType.make_union(union_items, t.line, t.column) diff --git a/.venv/lib/python3.12/site-packages/mypy/metastore.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/metastore.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..63fc5a3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/metastore.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/metastore.py b/.venv/lib/python3.12/site-packages/mypy/metastore.py new file mode 100644 index 0000000..442c7dc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/metastore.py @@ -0,0 +1,219 @@ +"""Interfaces for accessing metadata. + +We provide two implementations. + * The "classic" file system implementation, which uses a directory + structure of files. + * A hokey sqlite backed implementation, which basically simulates + the file system in an effort to work around poor file system performance + on OS X. +""" + +from __future__ import annotations + +import binascii +import os +import time +from abc import abstractmethod +from collections.abc import Iterable +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + # We avoid importing sqlite3 unless we are using it so we can mostly work + # on semi-broken pythons that are missing it. + import sqlite3 + + +class MetadataStore: + """Generic interface for metadata storage.""" + + @abstractmethod + def getmtime(self, name: str) -> float: + """Read the mtime of a metadata entry. + + Raises FileNotFound if the entry does not exist. + """ + + @abstractmethod + def read(self, name: str) -> bytes: + """Read the contents of a metadata entry. + + Raises FileNotFound if the entry does not exist. + """ + + @abstractmethod + def write(self, name: str, data: bytes, mtime: float | None = None) -> bool: + """Write a metadata entry. + + If mtime is specified, set it as the mtime of the entry. Otherwise, + the current time is used. + + Returns True if the entry is successfully written, False otherwise. + """ + + @abstractmethod + def remove(self, name: str) -> None: + """Delete a metadata entry""" + + @abstractmethod + def commit(self) -> None: + """If the backing store requires a commit, do it. + + But N.B. that this is not *guaranteed* to do anything, and + there is no guarantee that changes are not made until it is + called. + """ + + @abstractmethod + def list_all(self) -> Iterable[str]: ... + + +def random_string() -> str: + return binascii.hexlify(os.urandom(8)).decode("ascii") + + +class FilesystemMetadataStore(MetadataStore): + def __init__(self, cache_dir_prefix: str) -> None: + # We check startswith instead of equality because the version + # will have already been appended by the time the cache dir is + # passed here. + if cache_dir_prefix.startswith(os.devnull): + self.cache_dir_prefix = None + else: + self.cache_dir_prefix = cache_dir_prefix + + def getmtime(self, name: str) -> float: + if not self.cache_dir_prefix: + raise FileNotFoundError() + + return int(os.path.getmtime(os.path.join(self.cache_dir_prefix, name))) + + def read(self, name: str) -> bytes: + assert os.path.normpath(name) != os.path.abspath(name), "Don't use absolute paths!" + + if not self.cache_dir_prefix: + raise FileNotFoundError() + + with open(os.path.join(self.cache_dir_prefix, name), "rb") as f: + return f.read() + + def write(self, name: str, data: bytes, mtime: float | None = None) -> bool: + assert os.path.normpath(name) != os.path.abspath(name), "Don't use absolute paths!" + + if not self.cache_dir_prefix: + return False + + path = os.path.join(self.cache_dir_prefix, name) + tmp_filename = path + "." + random_string() + try: + os.makedirs(os.path.dirname(path), exist_ok=True) + with open(tmp_filename, "wb") as f: + f.write(data) + os.replace(tmp_filename, path) + if mtime is not None: + os.utime(path, times=(mtime, mtime)) + + except OSError: + return False + return True + + def remove(self, name: str) -> None: + if not self.cache_dir_prefix: + raise FileNotFoundError() + + os.remove(os.path.join(self.cache_dir_prefix, name)) + + def commit(self) -> None: + pass + + def list_all(self) -> Iterable[str]: + if not self.cache_dir_prefix: + return + + for dir, _, files in os.walk(self.cache_dir_prefix): + dir = os.path.relpath(dir, self.cache_dir_prefix) + for file in files: + yield os.path.normpath(os.path.join(dir, file)) + + +SCHEMA = """ +CREATE TABLE IF NOT EXISTS files2 ( + path TEXT UNIQUE NOT NULL, + mtime REAL, + data BLOB +); +CREATE INDEX IF NOT EXISTS path_idx on files2(path); +""" + + +def connect_db(db_file: str) -> sqlite3.Connection: + import sqlite3.dbapi2 + + db = sqlite3.dbapi2.connect(db_file) + db.executescript(SCHEMA) + return db + + +class SqliteMetadataStore(MetadataStore): + def __init__(self, cache_dir_prefix: str) -> None: + # We check startswith instead of equality because the version + # will have already been appended by the time the cache dir is + # passed here. + if cache_dir_prefix.startswith(os.devnull): + self.db = None + return + + os.makedirs(cache_dir_prefix, exist_ok=True) + self.db = connect_db(os.path.join(cache_dir_prefix, "cache.db")) + + def _query(self, name: str, field: str) -> Any: + # Raises FileNotFound for consistency with the file system version + if not self.db: + raise FileNotFoundError() + + cur = self.db.execute(f"SELECT {field} FROM files2 WHERE path = ?", (name,)) + results = cur.fetchall() + if not results: + raise FileNotFoundError() + assert len(results) == 1 + return results[0][0] + + def getmtime(self, name: str) -> float: + mtime = self._query(name, "mtime") + assert isinstance(mtime, float) + return mtime + + def read(self, name: str) -> bytes: + data = self._query(name, "data") + assert isinstance(data, bytes) + return data + + def write(self, name: str, data: bytes, mtime: float | None = None) -> bool: + import sqlite3 + + if not self.db: + return False + try: + if mtime is None: + mtime = time.time() + self.db.execute( + "INSERT OR REPLACE INTO files2(path, mtime, data) VALUES(?, ?, ?)", + (name, mtime, data), + ) + except sqlite3.OperationalError: + return False + return True + + def remove(self, name: str) -> None: + if not self.db: + raise FileNotFoundError() + + self.db.execute("DELETE FROM files2 WHERE path = ?", (name,)) + + def commit(self) -> None: + if self.db: + self.db.commit() + + def list_all(self) -> Iterable[str]: + if self.db: + for row in self.db.execute("SELECT path FROM files2"): + yield row[0] diff --git a/.venv/lib/python3.12/site-packages/mypy/mixedtraverser.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/mixedtraverser.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..b7916e0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/mixedtraverser.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/mixedtraverser.py b/.venv/lib/python3.12/site-packages/mypy/mixedtraverser.py new file mode 100644 index 0000000..39fba49 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/mixedtraverser.py @@ -0,0 +1,130 @@ +from __future__ import annotations + +from mypy.nodes import ( + AssertTypeExpr, + AssignmentStmt, + CastExpr, + ClassDef, + ForStmt, + FuncItem, + NamedTupleExpr, + NewTypeExpr, + PromoteExpr, + TypeAlias, + TypeAliasExpr, + TypeAliasStmt, + TypeApplication, + TypedDictExpr, + TypeFormExpr, + TypeVarExpr, + Var, + WithStmt, +) +from mypy.traverser import TraverserVisitor +from mypy.types import Type +from mypy.typetraverser import TypeTraverserVisitor + + +class MixedTraverserVisitor(TraverserVisitor, TypeTraverserVisitor): + """Recursive traversal of both Node and Type objects.""" + + def __init__(self) -> None: + self.in_type_alias_expr = False + + # Symbol nodes + + def visit_var(self, var: Var, /) -> None: + self.visit_optional_type(var.type) + + def visit_func(self, o: FuncItem, /) -> None: + super().visit_func(o) + self.visit_optional_type(o.type) + + def visit_class_def(self, o: ClassDef, /) -> None: + # TODO: Should we visit generated methods/variables as well, either here or in + # TraverserVisitor? + super().visit_class_def(o) + info = o.info + if info: + for base in info.bases: + base.accept(self) + if info.special_alias: + info.special_alias.accept(self) + + def visit_type_alias_expr(self, o: TypeAliasExpr, /) -> None: + super().visit_type_alias_expr(o) + o.node.accept(self) + + def visit_type_var_expr(self, o: TypeVarExpr, /) -> None: + super().visit_type_var_expr(o) + o.upper_bound.accept(self) + for value in o.values: + value.accept(self) + + def visit_typeddict_expr(self, o: TypedDictExpr, /) -> None: + super().visit_typeddict_expr(o) + self.visit_optional_type(o.info.typeddict_type) + + def visit_namedtuple_expr(self, o: NamedTupleExpr, /) -> None: + super().visit_namedtuple_expr(o) + assert o.info.tuple_type + o.info.tuple_type.accept(self) + + def visit__promote_expr(self, o: PromoteExpr, /) -> None: + super().visit__promote_expr(o) + o.type.accept(self) + + def visit_newtype_expr(self, o: NewTypeExpr, /) -> None: + super().visit_newtype_expr(o) + self.visit_optional_type(o.old_type) + + # Statements + + def visit_assignment_stmt(self, o: AssignmentStmt, /) -> None: + super().visit_assignment_stmt(o) + self.visit_optional_type(o.type) + + def visit_type_alias_stmt(self, o: TypeAliasStmt, /) -> None: + super().visit_type_alias_stmt(o) + if o.alias_node is not None: + o.alias_node.accept(self) + + def visit_type_alias(self, o: TypeAlias, /) -> None: + super().visit_type_alias(o) + self.in_type_alias_expr = True + o.target.accept(self) + self.in_type_alias_expr = False + + def visit_for_stmt(self, o: ForStmt, /) -> None: + super().visit_for_stmt(o) + self.visit_optional_type(o.index_type) + + def visit_with_stmt(self, o: WithStmt, /) -> None: + super().visit_with_stmt(o) + for typ in o.analyzed_types: + typ.accept(self) + + # Expressions + + def visit_cast_expr(self, o: CastExpr, /) -> None: + super().visit_cast_expr(o) + o.type.accept(self) + + def visit_type_form_expr(self, o: TypeFormExpr, /) -> None: + super().visit_type_form_expr(o) + o.type.accept(self) + + def visit_assert_type_expr(self, o: AssertTypeExpr, /) -> None: + super().visit_assert_type_expr(o) + o.type.accept(self) + + def visit_type_application(self, o: TypeApplication, /) -> None: + super().visit_type_application(o) + for t in o.types: + t.accept(self) + + # Helpers + + def visit_optional_type(self, t: Type | None, /) -> None: + if t: + t.accept(self) diff --git a/.venv/lib/python3.12/site-packages/mypy/modulefinder.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/modulefinder.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..5929ea4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/modulefinder.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/modulefinder.py b/.venv/lib/python3.12/site-packages/mypy/modulefinder.py new file mode 100644 index 0000000..5176b7e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/modulefinder.py @@ -0,0 +1,1003 @@ +"""Low-level infrastructure to find modules. + +This builds on fscache.py; find_sources.py builds on top of this. +""" + +from __future__ import annotations + +import ast +import collections +import functools +import os +import re +import subprocess +import sys +from enum import Enum, unique +from typing import Final, Optional, Union +from typing_extensions import TypeAlias as _TypeAlias + +from pathspec import PathSpec +from pathspec.patterns.gitwildmatch import GitWildMatchPatternError + +from mypy import pyinfo +from mypy.errors import CompileError +from mypy.fscache import FileSystemCache +from mypy.nodes import MypyFile +from mypy.options import Options +from mypy.stubinfo import stub_distribution_name +from mypy.util import os_path_join + + +# Paths to be searched in find_module(). +class SearchPaths: + def __init__( + self, + python_path: tuple[str, ...], + mypy_path: tuple[str, ...], + package_path: tuple[str, ...], + typeshed_path: tuple[str, ...], + ) -> None: + # where user code is found + self.python_path = tuple(map(os.path.abspath, python_path)) + # from $MYPYPATH or config variable + self.mypy_path = tuple(map(os.path.abspath, mypy_path)) + # from get_site_packages_dirs() + self.package_path = tuple(map(os.path.abspath, package_path)) + # paths in typeshed + self.typeshed_path = tuple(map(os.path.abspath, typeshed_path)) + + def asdict(self) -> dict[str, tuple[str, ...]]: + return { + "python_path": self.python_path, + "mypy_path": self.mypy_path, + "package_path": self.package_path, + "typeshed_path": self.typeshed_path, + } + + +# Package dirs are a two-tuple of path to search and whether to verify the module +OnePackageDir = tuple[str, bool] +PackageDirs = list[OnePackageDir] + +# Minimum and maximum Python versions for modules in stdlib as (major, minor) +StdlibVersions: _TypeAlias = dict[str, tuple[tuple[int, int], Optional[tuple[int, int]]]] + +PYTHON_EXTENSIONS: Final = [".pyi", ".py"] + + +# TODO: Consider adding more reasons here? +# E.g. if we deduce a module would likely be found if the user were +# to set the --namespace-packages flag. +@unique +class ModuleNotFoundReason(Enum): + # The module was not found: we found neither stubs nor a plausible code + # implementation (with or without a py.typed file). + NOT_FOUND = 0 + + # The implementation for this module plausibly exists (e.g. we + # found a matching folder or *.py file), but either the parent package + # did not contain a py.typed file or we were unable to find a + # corresponding *-stubs package. + FOUND_WITHOUT_TYPE_HINTS = 1 + + # The module was not found in the current working directory, but + # was able to be found in the parent directory. + WRONG_WORKING_DIRECTORY = 2 + + # Stub PyPI package (typically types-pkgname) known to exist but not installed. + APPROVED_STUBS_NOT_INSTALLED = 3 + + def error_message_templates(self, daemon: bool) -> tuple[str, list[str]]: + doc_link = "See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports" + if self is ModuleNotFoundReason.NOT_FOUND: + msg = 'Cannot find implementation or library stub for module named "{module}"' + notes = [doc_link] + elif self is ModuleNotFoundReason.WRONG_WORKING_DIRECTORY: + msg = 'Cannot find implementation or library stub for module named "{module}"' + notes = [ + "You may be running mypy in a subpackage, mypy should be run on the package root" + ] + elif self is ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS: + msg = ( + 'Skipping analyzing "{module}": module is installed, but missing library stubs ' + "or py.typed marker" + ) + notes = [doc_link] + elif self is ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED: + msg = 'Library stubs not installed for "{module}"' + notes = ['Hint: "python3 -m pip install {stub_dist}"'] + if not daemon: + notes.append( + '(or run "mypy --install-types" to install all missing stub packages)' + ) + notes.append(doc_link) + else: + assert False + return msg, notes + + +# If we found the module, returns the path to the module as a str. +# Otherwise, returns the reason why the module wasn't found. +ModuleSearchResult = Union[str, ModuleNotFoundReason] + + +class BuildSource: + """A single source file.""" + + def __init__( + self, + path: str | None, + module: str | None, + text: str | None = None, + base_dir: str | None = None, + followed: bool = False, + ) -> None: + self.path = path # File where it's found (e.g. 'xxx/yyy/foo/bar.py') + self.module = module or "__main__" # Module name (e.g. 'foo.bar') + self.text = text # Source code, if initially supplied, else None + self.base_dir = base_dir # Directory where the package is rooted (e.g. 'xxx/yyy') + self.followed = followed # Was this found by following imports? + + def __repr__(self) -> str: + return ( + "BuildSource(path={!r}, module={!r}, has_text={}, base_dir={!r}, followed={})".format( + self.path, self.module, self.text is not None, self.base_dir, self.followed + ) + ) + + +class BuildSourceSet: + """Helper to efficiently test a file's membership in a set of build sources.""" + + def __init__(self, sources: list[BuildSource]) -> None: + self.source_text_present = False + self.source_modules: dict[str, str] = {} + self.source_paths: set[str] = set() + + for source in sources: + if source.text is not None: + self.source_text_present = True + if source.path: + self.source_paths.add(source.path) + if source.module: + self.source_modules[source.module] = source.path or "" + + def is_source(self, file: MypyFile) -> bool: + return ( + (file.path and file.path in self.source_paths) + or file._fullname in self.source_modules + or self.source_text_present + ) + + +class FindModuleCache: + """Module finder with integrated cache. + + Module locations and some intermediate results are cached internally + and can be cleared with the clear() method. + + All file system accesses are performed through a FileSystemCache, + which is not ever cleared by this class. If necessary it must be + cleared by client code. + """ + + def __init__( + self, + search_paths: SearchPaths, + fscache: FileSystemCache | None, + options: Options | None, + stdlib_py_versions: StdlibVersions | None = None, + source_set: BuildSourceSet | None = None, + ) -> None: + self.search_paths = search_paths + self.source_set = source_set + self.fscache = fscache or FileSystemCache() + # Cache for get_toplevel_possibilities: + # search_paths -> (toplevel_id -> list(package_dirs)) + self.initial_components: dict[tuple[str, ...], dict[str, list[str]]] = {} + # Cache find_module: id -> result + self.results: dict[str, ModuleSearchResult] = {} + self.ns_ancestors: dict[str, str] = {} + self.options = options + custom_typeshed_dir = None + if options: + custom_typeshed_dir = options.custom_typeshed_dir + self.stdlib_py_versions = stdlib_py_versions or load_stdlib_py_versions( + custom_typeshed_dir + ) + + def clear(self) -> None: + self.results.clear() + self.initial_components.clear() + self.ns_ancestors.clear() + + def find_module_via_source_set(self, id: str) -> ModuleSearchResult | None: + """Fast path to find modules by looking through the input sources + + This is only used when --fast-module-lookup is passed on the command line.""" + if not self.source_set: + return None + + p = self.source_set.source_modules.get(id, None) + if p and self.fscache.isfile(p): + # We need to make sure we still have __init__.py all the way up + # otherwise we might have false positives compared to slow path + # in case of deletion of init files, which is covered by some tests. + # TODO: are there some combination of flags in which this check should be skipped? + d = os.path.dirname(p) + for _ in range(id.count(".")): + if not any( + self.fscache.isfile(os_path_join(d, "__init__" + x)) for x in PYTHON_EXTENSIONS + ): + return None + d = os.path.dirname(d) + return p + + idx = id.rfind(".") + if idx != -1: + # When we're looking for foo.bar.baz and can't find a matching module + # in the source set, look up for a foo.bar module. + parent = self.find_module_via_source_set(id[:idx]) + if parent is None or not isinstance(parent, str): + return None + + basename, ext = os.path.splitext(parent) + if not any(parent.endswith("__init__" + x) for x in PYTHON_EXTENSIONS) and ( + ext in PYTHON_EXTENSIONS and not self.fscache.isdir(basename) + ): + # If we do find such a *module* (and crucially, we don't want a package, + # hence the filtering out of __init__ files, and checking for the presence + # of a folder with a matching name), then we can be pretty confident that + # 'baz' will either be a top-level variable in foo.bar, or will not exist. + # + # Either way, spelunking in other search paths for another 'foo.bar.baz' + # module should be avoided because: + # 1. in the unlikely event that one were found, it's highly likely that + # it would be unrelated to the source being typechecked and therefore + # more likely to lead to erroneous results + # 2. as described in _find_module, in some cases the search itself could + # potentially waste significant amounts of time + return ModuleNotFoundReason.NOT_FOUND + return None + + def find_lib_path_dirs(self, id: str, lib_path: tuple[str, ...]) -> PackageDirs: + """Find which elements of a lib_path have the directory a module needs to exist.""" + components = id.split(".") + dir_chain = os.sep.join(components[:-1]) # e.g., 'foo/bar' + + dirs = [] + for pathitem in self.get_toplevel_possibilities(lib_path, components[0]): + # e.g., '/usr/lib/python3.4/foo/bar' + if dir_chain: + dir = os_path_join(pathitem, dir_chain) + else: + dir = pathitem + if self.fscache.isdir(dir): + dirs.append((dir, True)) + return dirs + + def get_toplevel_possibilities(self, lib_path: tuple[str, ...], id: str) -> list[str]: + """Find which elements of lib_path could contain a particular top-level module. + + In practice, almost all modules can be routed to the correct entry in + lib_path by looking at just the first component of the module name. + + We take advantage of this by enumerating the contents of all of the + directories on the lib_path and building a map of which entries in + the lib_path could contain each potential top-level module that appears. + """ + + if lib_path in self.initial_components: + return self.initial_components[lib_path].get(id, []) + + # Enumerate all the files in the directories on lib_path and produce the map + components: dict[str, list[str]] = {} + for dir in lib_path: + try: + contents = self.fscache.listdir(dir) + except OSError: + contents = [] + # False positives are fine for correctness here, since we will check + # precisely later, so we only look at the root of every filename without + # any concern for the exact details. + for name in contents: + name = os.path.splitext(name)[0] + components.setdefault(name, []).append(dir) + + self.initial_components[lib_path] = components + return components.get(id, []) + + def find_module(self, id: str, *, fast_path: bool = False) -> ModuleSearchResult: + """Return the path of the module source file or why it wasn't found. + + If fast_path is True, prioritize performance over generating detailed + error descriptions. + """ + if id not in self.results: + top_level = id.partition(".")[0] + use_typeshed = True + if id in self.stdlib_py_versions: + use_typeshed = self._typeshed_has_version(id) + elif top_level in self.stdlib_py_versions: + use_typeshed = self._typeshed_has_version(top_level) + result, should_cache = self._find_module(id, use_typeshed) + if should_cache: + if ( + not ( + fast_path or (self.options is not None and self.options.fast_module_lookup) + ) + and result is ModuleNotFoundReason.NOT_FOUND + and self._can_find_module_in_parent_dir(id) + ): + self.results[id] = ModuleNotFoundReason.WRONG_WORKING_DIRECTORY + else: + self.results[id] = result + return self.results[id] + else: + return result + return self.results[id] + + def _typeshed_has_version(self, module: str) -> bool: + if not self.options: + return True + version = typeshed_py_version(self.options) + min_version, max_version = self.stdlib_py_versions[module] + return version >= min_version and (max_version is None or version <= max_version) + + def _find_module_non_stub_helper( + self, id: str, pkg_dir: str + ) -> OnePackageDir | ModuleNotFoundReason: + plausible_match = False + dir_path = pkg_dir + components = id.split(".") + for index, component in enumerate(components): + dir_path = os_path_join(dir_path, component) + if self.fscache.isfile(os_path_join(dir_path, "py.typed")): + return os.path.join(pkg_dir, *components[:-1]), index == 0 + elif not plausible_match and ( + self.fscache.isdir(dir_path) or self.fscache.isfile(dir_path + ".py") + ): + plausible_match = True + # If this is not a directory then we can't traverse further into it + if not self.fscache.isdir(dir_path): + break + if plausible_match: + if self.options: + module_specific_options = self.options.clone_for_module(id) + if module_specific_options.follow_untyped_imports: + return os.path.join(pkg_dir, *components[:-1]), False + return ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS + else: + return ModuleNotFoundReason.NOT_FOUND + + def _update_ns_ancestors(self, components: list[str], match: tuple[str, bool]) -> None: + path, verify = match + for i in range(1, len(components)): + pkg_id = ".".join(components[:-i]) + if pkg_id not in self.ns_ancestors and self.fscache.isdir(path): + self.ns_ancestors[pkg_id] = path + path = os.path.dirname(path) + + def _can_find_module_in_parent_dir(self, id: str) -> bool: + """Test if a module can be found by checking the parent directories + of the current working directory. + """ + working_dir = os.getcwd() + parent_search = FindModuleCache( + SearchPaths((), (), (), ()), + self.fscache, + self.options, + stdlib_py_versions=self.stdlib_py_versions, + ) + while any(is_init_file(file) for file in os.listdir(working_dir)): + working_dir = os.path.dirname(working_dir) + parent_search.search_paths = SearchPaths((working_dir,), (), (), ()) + if not isinstance(parent_search._find_module(id, False)[0], ModuleNotFoundReason): + return True + return False + + def _find_module(self, id: str, use_typeshed: bool) -> tuple[ModuleSearchResult, bool]: + """Try to find a module in all available sources. + + Returns: + ``(result, can_be_cached)`` pair. + """ + fscache = self.fscache + + # Fast path for any modules in the current source set. + # This is particularly important when there are a large number of search + # paths which share the first (few) component(s) due to the use of namespace + # packages, for instance: + # foo/ + # company/ + # __init__.py + # foo/ + # bar/ + # company/ + # __init__.py + # bar/ + # baz/ + # company/ + # __init__.py + # baz/ + # + # mypy gets [foo/company/foo, bar/company/bar, baz/company/baz, ...] as input + # and computes [foo, bar, baz, ...] as the module search path. + # + # This would result in O(n) search for every import of company.*, leading to + # O(n**2) behavior in load_graph as such imports are unsurprisingly present + # at least once, and usually many more times than that, in each and every file + # being parsed. + # + # Thankfully, such cases are efficiently handled by looking up the module path + # via BuildSourceSet. + p = ( + self.find_module_via_source_set(id) + if (self.options is not None and self.options.fast_module_lookup) + else None + ) + if p: + return p, True + + # If we're looking for a module like 'foo.bar.baz', it's likely that most of the + # many elements of lib_path don't even have a subdirectory 'foo/bar'. Discover + # that only once and cache it for when we look for modules like 'foo.bar.blah' + # that will require the same subdirectory. + components = id.split(".") + dir_chain = os.sep.join(components[:-1]) # e.g., 'foo/bar' + + # We have two sets of folders so that we collect *all* stubs folders and + # put them in the front of the search path + third_party_inline_dirs: PackageDirs = [] + third_party_stubs_dirs: PackageDirs = [] + found_possible_third_party_missing_type_hints = False + # Third-party stub/typed packages + candidate_package_dirs = { + package_dir[0] + for component in (components[0], components[0] + "-stubs") + for package_dir in self.find_lib_path_dirs(component, self.search_paths.package_path) + } + # Caching FOUND_WITHOUT_TYPE_HINTS is not always safe. That causes issues with + # typed subpackages in namespace packages. + can_cache_any_result = True + for pkg_dir in self.search_paths.package_path: + if pkg_dir not in candidate_package_dirs: + continue + stub_name = components[0] + "-stubs" + stub_dir = os_path_join(pkg_dir, stub_name) + if fscache.isdir(stub_dir): + stub_typed_file = os_path_join(stub_dir, "py.typed") + stub_components = [stub_name] + components[1:] + path = os.path.join(pkg_dir, *stub_components[:-1]) + if fscache.isdir(path): + if fscache.isfile(stub_typed_file): + # Stub packages can have a py.typed file, which must include + # 'partial\n' to make the package partial + # Partial here means that mypy should look at the runtime + # package if installed. + if fscache.read(stub_typed_file).decode().strip() == "partial": + runtime_path = os_path_join(pkg_dir, dir_chain) + third_party_inline_dirs.append((runtime_path, True)) + # if the package is partial, we don't verify the module, as + # the partial stub package may not have a __init__.pyi + third_party_stubs_dirs.append((path, False)) + else: + # handle the edge case where people put a py.typed file + # in a stub package, but it isn't partial + third_party_stubs_dirs.append((path, True)) + else: + third_party_stubs_dirs.append((path, True)) + non_stub_match = self._find_module_non_stub_helper(id, pkg_dir) + if isinstance(non_stub_match, ModuleNotFoundReason): + if non_stub_match is ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS: + found_possible_third_party_missing_type_hints = True + can_cache_any_result = False + else: + third_party_inline_dirs.append(non_stub_match) + self._update_ns_ancestors(components, non_stub_match) + + if self.options and self.options.use_builtins_fixtures: + # Everything should be in fixtures. + third_party_inline_dirs.clear() + third_party_stubs_dirs.clear() + found_possible_third_party_missing_type_hints = False + python_mypy_path = self.search_paths.mypy_path + self.search_paths.python_path + candidate_base_dirs = self.find_lib_path_dirs(id, python_mypy_path) + if use_typeshed: + # Search for stdlib stubs in typeshed before installed + # stubs to avoid picking up backports (dataclasses, for + # example) when the library is included in stdlib. + candidate_base_dirs += self.find_lib_path_dirs(id, self.search_paths.typeshed_path) + candidate_base_dirs += third_party_stubs_dirs + third_party_inline_dirs + + # If we're looking for a module like 'foo.bar.baz', then candidate_base_dirs now + # contains just the subdirectories 'foo/bar' that actually exist under the + # elements of lib_path. This is probably much shorter than lib_path itself. + # Now just look for 'baz.pyi', 'baz/__init__.py', etc., inside those directories. + seplast = os.sep + components[-1] # so e.g. '/baz' + sepinit = os.sep + "__init__" + near_misses = [] # Collect near misses for namespace mode (see below). + for base_dir, verify in candidate_base_dirs: + base_path = base_dir + seplast # so e.g. '/usr/lib/python3.4/foo/bar/baz' + has_init = False + dir_prefix = base_dir + for _ in range(len(components) - 1): + dir_prefix = os.path.dirname(dir_prefix) + + # Stubs-only packages always take precedence over py.typed packages + path_stubs = f"{base_path}-stubs{sepinit}.pyi" + if fscache.isfile_case(path_stubs, dir_prefix): + if verify and not verify_module(fscache, id, path_stubs, dir_prefix): + near_misses.append((path_stubs, dir_prefix)) + else: + return path_stubs, True + + # Prefer package over module, i.e. baz/__init__.py* over baz.py*. + for extension in PYTHON_EXTENSIONS: + path = base_path + sepinit + extension + if fscache.isfile_case(path, dir_prefix): + has_init = True + if verify and not verify_module(fscache, id, path, dir_prefix): + near_misses.append((path, dir_prefix)) + continue + return path, True + + # In namespace mode, register a potential namespace package + if self.options and self.options.namespace_packages: + if ( + not has_init + and fscache.exists_case(base_path, dir_prefix) + and not fscache.isfile_case(base_path, dir_prefix) + ): + near_misses.append((base_path, dir_prefix)) + + # No package, look for module. + for extension in PYTHON_EXTENSIONS: + path = base_path + extension + if fscache.isfile_case(path, dir_prefix): + if verify and not verify_module(fscache, id, path, dir_prefix): + near_misses.append((path, dir_prefix)) + continue + return path, True + + # In namespace mode, re-check those entries that had 'verify'. + # Assume search path entries xxx, yyy and zzz, and we're + # looking for foo.bar.baz. Suppose near_misses has: + # + # - xxx/foo/bar/baz.py + # - yyy/foo/bar/baz/__init__.py + # - zzz/foo/bar/baz.pyi + # + # If any of the foo directories has __init__.py[i], it wins. + # Else, we look for foo/bar/__init__.py[i], etc. If there are + # none, the first hit wins. Note that this does not take into + # account whether the lowest-level module is a file (baz.py), + # a package (baz/__init__.py), or a stub file (baz.pyi) -- for + # these the first one encountered along the search path wins. + # + # The helper function highest_init_level() returns an int that + # indicates the highest level at which a __init__.py[i] file + # is found; if no __init__ was found it returns 0, if we find + # only foo/bar/__init__.py it returns 1, and if we have + # foo/__init__.py it returns 2 (regardless of what's in + # foo/bar). It doesn't look higher than that. + if self.options and self.options.namespace_packages and near_misses: + levels = [ + highest_init_level(fscache, id, path, dir_prefix) + for path, dir_prefix in near_misses + ] + index = levels.index(max(levels)) + return near_misses[index][0], True + + # Finally, we may be asked to produce an ancestor for an + # installed package with a py.typed marker that is a + # subpackage of a namespace package. We only fess up to these + # if we would otherwise return "not found". + ancestor = self.ns_ancestors.get(id) + if ancestor is not None: + return ancestor, True + + approved_dist_name = stub_distribution_name(id) + if approved_dist_name: + if len(components) == 1: + return ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED, True + # If we're a missing submodule of an already installed approved stubs, we don't want to + # error with APPROVED_STUBS_NOT_INSTALLED, but rather want to return NOT_FOUND. + for i in range(1, len(components)): + parent_id = ".".join(components[:i]) + if stub_distribution_name(parent_id) == approved_dist_name: + break + else: + return ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED, True + if self.find_module(parent_id) is ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED: + return ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED, True + return ModuleNotFoundReason.NOT_FOUND, True + + if found_possible_third_party_missing_type_hints: + return ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS, can_cache_any_result + return ModuleNotFoundReason.NOT_FOUND, True + + def find_modules_recursive(self, module: str) -> list[BuildSource]: + module_path = self.find_module(module, fast_path=True) + if isinstance(module_path, ModuleNotFoundReason): + return [] + sources = [BuildSource(module_path, module, None)] + + package_path = None + if is_init_file(module_path): + package_path = os.path.dirname(module_path) + elif self.fscache.isdir(module_path): + package_path = module_path + if package_path is None: + return sources + + # This logic closely mirrors that in find_sources. One small but important difference is + # that we do not sort names with keyfunc. The recursive call to find_modules_recursive + # calls find_module, which will handle the preference between packages, pyi and py. + # Another difference is it doesn't handle nested search paths / package roots. + + seen: set[str] = set() + names = sorted(self.fscache.listdir(package_path)) + for name in names: + # Skip certain names altogether + if name in ("__pycache__", "site-packages", "node_modules") or name.startswith("."): + continue + subpath = os_path_join(package_path, name) + + if self.options and matches_exclude( + subpath, self.options.exclude, self.fscache, self.options.verbosity >= 2 + ): + continue + if ( + self.options + and self.options.exclude_gitignore + and matches_gitignore(subpath, self.fscache, self.options.verbosity >= 2) + ): + continue + + if self.fscache.isdir(subpath): + # Only recurse into packages + if (self.options and self.options.namespace_packages) or ( + self.fscache.isfile(os_path_join(subpath, "__init__.py")) + or self.fscache.isfile(os_path_join(subpath, "__init__.pyi")) + ): + seen.add(name) + sources.extend(self.find_modules_recursive(module + "." + name)) + else: + stem, suffix = os.path.splitext(name) + if stem == "__init__": + continue + if stem not in seen and "." not in stem and suffix in PYTHON_EXTENSIONS: + # (If we sorted names by keyfunc) we could probably just make the BuildSource + # ourselves, but this ensures compatibility with find_module / the cache + seen.add(stem) + sources.extend(self.find_modules_recursive(module + "." + stem)) + return sources + + +def matches_exclude( + subpath: str, excludes: list[str], fscache: FileSystemCache, verbose: bool +) -> bool: + if not excludes: + return False + subpath_str = os.path.relpath(subpath).replace(os.sep, "/") + if fscache.isdir(subpath): + subpath_str += "/" + for exclude in excludes: + try: + if re.search(exclude, subpath_str): + if verbose: + print( + f"TRACE: Excluding {subpath_str} (matches pattern {exclude})", + file=sys.stderr, + ) + return True + except re.error as e: + print( + f"error: The exclude {exclude} is an invalid regular expression, because: {e}" + + ( + "\n(Hint: use / as a path separator, even if you're on Windows!)" + if "\\" in exclude + else "" + ) + + "\nFor more information on Python's flavor of regex, see:" + + " https://docs.python.org/3/library/re.html", + file=sys.stderr, + ) + sys.exit(2) + return False + + +def matches_gitignore(subpath: str, fscache: FileSystemCache, verbose: bool) -> bool: + dir, _ = os.path.split(subpath) + for gi_path, gi_spec in find_gitignores(dir): + relative_path = os.path.relpath(subpath, gi_path) + if fscache.isdir(relative_path): + relative_path = relative_path + "/" + if gi_spec.match_file(relative_path): + if verbose: + print( + f"TRACE: Excluding {relative_path} (matches .gitignore) in {gi_path}", + file=sys.stderr, + ) + return True + return False + + +@functools.lru_cache +def find_gitignores(dir: str) -> list[tuple[str, PathSpec]]: + parent_dir = os.path.dirname(dir) + if parent_dir == dir: + parent_gitignores = [] + else: + parent_gitignores = find_gitignores(parent_dir) + + gitignore = os.path.join(dir, ".gitignore") + if os.path.isfile(gitignore): + with open(gitignore) as f: + lines = f.readlines() + try: + return parent_gitignores + [(dir, PathSpec.from_lines("gitwildmatch", lines))] + except GitWildMatchPatternError: + print(f"error: could not parse {gitignore}", file=sys.stderr) + return parent_gitignores + return parent_gitignores + + +def is_init_file(path: str) -> bool: + return os.path.basename(path) in ("__init__.py", "__init__.pyi") + + +def verify_module(fscache: FileSystemCache, id: str, path: str, prefix: str) -> bool: + """Check that all packages containing id have a __init__ file.""" + if is_init_file(path): + path = os.path.dirname(path) + for i in range(id.count(".")): + path = os.path.dirname(path) + if not any( + fscache.isfile_case(os_path_join(path, f"__init__{extension}"), prefix) + for extension in PYTHON_EXTENSIONS + ): + return False + return True + + +def highest_init_level(fscache: FileSystemCache, id: str, path: str, prefix: str) -> int: + """Compute the highest level where an __init__ file is found.""" + if is_init_file(path): + path = os.path.dirname(path) + level = 0 + for i in range(id.count(".")): + path = os.path.dirname(path) + if any( + fscache.isfile_case(os_path_join(path, f"__init__{extension}"), prefix) + for extension in PYTHON_EXTENSIONS + ): + level = i + 1 + return level + + +def mypy_path() -> list[str]: + path_env = os.getenv("MYPYPATH") + if not path_env: + return [] + return path_env.split(os.pathsep) + + +def default_lib_path( + data_dir: str, pyversion: tuple[int, int], custom_typeshed_dir: str | None +) -> list[str]: + """Return default standard library search paths. Guaranteed to be normalised.""" + + data_dir = os.path.abspath(data_dir) + path: list[str] = [] + + if custom_typeshed_dir: + custom_typeshed_dir = os.path.abspath(custom_typeshed_dir) + typeshed_dir = os.path.join(custom_typeshed_dir, "stdlib") + mypy_extensions_dir = os.path.join(custom_typeshed_dir, "stubs", "mypy-extensions") + librt_dir = os.path.join(custom_typeshed_dir, "stubs", "librt") + versions_file = os.path.join(typeshed_dir, "VERSIONS") + if not os.path.isdir(typeshed_dir) or not os.path.isfile(versions_file): + print( + "error: --custom-typeshed-dir does not point to a valid typeshed ({})".format( + custom_typeshed_dir + ), + file=sys.stderr, + ) + sys.exit(2) + else: + auto = os.path.join(data_dir, "stubs-auto") + if os.path.isdir(auto): + data_dir = auto + typeshed_dir = os.path.join(data_dir, "typeshed", "stdlib") + mypy_extensions_dir = os.path.join(data_dir, "typeshed", "stubs", "mypy-extensions") + librt_dir = os.path.join(data_dir, "typeshed", "stubs", "librt") + path.append(typeshed_dir) + + # Get mypy-extensions and librt stubs from typeshed, since we treat them as + # "internal" libraries, similar to typing and typing-extensions. + path.append(mypy_extensions_dir) + path.append(librt_dir) + + # Add fallback path that can be used if we have a broken installation. + if sys.platform != "win32": + path.append("/usr/local/lib/mypy") + if not path: + print( + "Could not resolve typeshed subdirectories. Your mypy install is broken.\n" + "Python executable is located at {}.\nMypy located at {}".format( + sys.executable, data_dir + ), + file=sys.stderr, + ) + sys.exit(1) + return path + + +@functools.cache +def get_search_dirs(python_executable: str | None) -> tuple[list[str], list[str]]: + """Find package directories for given python. Guaranteed to return absolute paths. + + This runs a subprocess call, which generates a list of the directories in sys.path. + To avoid repeatedly calling a subprocess (which can be slow!) we + lru_cache the results. + """ + + if python_executable is None: + return ([], []) + elif python_executable == sys.executable: + # Use running Python's package dirs + sys_path, site_packages = pyinfo.getsearchdirs() + else: + # Use subprocess to get the package directory of given Python + # executable + env = {**dict(os.environ), "PYTHONSAFEPATH": "1"} + try: + sys_path, site_packages = ast.literal_eval( + subprocess.check_output( + [python_executable, pyinfo.__file__, "getsearchdirs"], + env=env, + stderr=subprocess.PIPE, + ).decode() + ) + except subprocess.CalledProcessError as err: + print(err.stderr) + print(err.stdout) + raise + except OSError as err: + assert err.errno is not None + reason = os.strerror(err.errno) + raise CompileError( + [f"mypy: Invalid python executable '{python_executable}': {reason}"] + ) from err + return sys_path, site_packages + + +def compute_search_paths( + sources: list[BuildSource], options: Options, data_dir: str, alt_lib_path: str | None = None +) -> SearchPaths: + """Compute the search paths as specified in PEP 561. + + There are the following 4 members created: + - User code (from `sources`) + - MYPYPATH (set either via config or environment variable) + - installed package directories (which will later be split into stub-only and inline) + - typeshed + """ + # Determine the default module search path. + lib_path = collections.deque( + default_lib_path( + data_dir, options.python_version, custom_typeshed_dir=options.custom_typeshed_dir + ) + ) + + if options.use_builtins_fixtures: + # Use stub builtins (to speed up test cases and to make them easier to + # debug). This is a test-only feature, so assume our files are laid out + # as in the source tree. + # We also need to allow overriding where to look for it. Argh. + root_dir = os.getenv("MYPY_TEST_PREFIX", None) + if not root_dir: + root_dir = os.path.dirname(os.path.dirname(__file__)) + root_dir = os.path.abspath(root_dir) + lib_path.appendleft(os.path.join(root_dir, "test-data", "unit", "lib-stub")) + # alt_lib_path is used by some tests to bypass the normal lib_path mechanics. + # If we don't have one, grab directories of source files. + python_path: list[str] = [] + if not alt_lib_path: + for source in sources: + # Include directory of the program file in the module search path. + if source.base_dir: + dir = source.base_dir + if dir not in python_path: + python_path.append(dir) + + # Do this even if running as a file, for sanity (mainly because with + # multiple builds, there could be a mix of files/modules, so its easier + # to just define the semantics that we always add the current director + # to the lib_path + # TODO: Don't do this in some cases; for motivation see see + # https://github.com/python/mypy/issues/4195#issuecomment-341915031 + if options.bazel: + dir = "." + else: + dir = os.getcwd() + if dir not in lib_path: + python_path.insert(0, dir) + + # Start with a MYPYPATH environment variable at the front of the mypy_path, if defined. + mypypath = mypy_path() + + # Add a config-defined mypy path. + mypypath.extend(options.mypy_path) + + # If provided, insert the caller-supplied extra module path to the + # beginning (highest priority) of the search path. + if alt_lib_path: + mypypath.insert(0, alt_lib_path) + + sys_path, site_packages = get_search_dirs(options.python_executable) + # We only use site packages for this check + for site in site_packages: + assert site not in lib_path + if ( + site in mypypath + or any(p.startswith(site + os.path.sep) for p in mypypath) + or (os.path.altsep and any(p.startswith(site + os.path.altsep) for p in mypypath)) + ): + print(f"{site} is in the MYPYPATH. Please remove it.", file=sys.stderr) + print( + "See https://mypy.readthedocs.io/en/stable/running_mypy.html" + "#how-mypy-handles-imports for more info", + file=sys.stderr, + ) + sys.exit(1) + + return SearchPaths( + python_path=tuple(reversed(python_path)), + mypy_path=tuple(mypypath), + package_path=tuple(sys_path + site_packages), + typeshed_path=tuple(lib_path), + ) + + +def load_stdlib_py_versions(custom_typeshed_dir: str | None) -> StdlibVersions: + """Return dict with minimum and maximum Python versions of stdlib modules. + + The contents look like + {..., 'secrets': ((3, 6), None), 'symbol': ((2, 7), (3, 9)), ...} + + None means there is no maximum version. + """ + typeshed_dir = custom_typeshed_dir or os_path_join(os.path.dirname(__file__), "typeshed") + stdlib_dir = os_path_join(typeshed_dir, "stdlib") + result = {} + + versions_path = os_path_join(stdlib_dir, "VERSIONS") + assert os.path.isfile(versions_path), (custom_typeshed_dir, versions_path, __file__) + with open(versions_path) as f: + for line in f: + line = line.split("#")[0].strip() + if line == "": + continue + module, version_range = line.split(":") + versions = version_range.split("-") + min_version = parse_version(versions[0]) + max_version = ( + parse_version(versions[1]) if len(versions) >= 2 and versions[1].strip() else None + ) + result[module] = min_version, max_version + return result + + +def parse_version(version: str) -> tuple[int, int]: + major, minor = version.strip().split(".") + return int(major), int(minor) + + +def typeshed_py_version(options: Options) -> tuple[int, int]: + """Return Python version used for checking whether module supports typeshed.""" + # Typeshed no longer covers Python 3.x versions before 3.9, so 3.9 is + # the earliest we can support. + return max(options.python_version, (3, 9)) diff --git a/.venv/lib/python3.12/site-packages/mypy/moduleinspect.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/moduleinspect.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..a71b2da Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/moduleinspect.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/moduleinspect.py b/.venv/lib/python3.12/site-packages/mypy/moduleinspect.py new file mode 100644 index 0000000..35db213 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/moduleinspect.py @@ -0,0 +1,184 @@ +"""Basic introspection of modules.""" + +from __future__ import annotations + +import importlib +import inspect +import os +import pkgutil +import queue +import sys +from multiprocessing import Queue, get_context +from types import ModuleType + + +class ModuleProperties: + # Note that all __init__ args must have default values + def __init__( + self, + name: str = "", + file: str | None = None, + path: list[str] | None = None, + all: list[str] | None = None, + is_c_module: bool = False, + subpackages: list[str] | None = None, + ) -> None: + self.name = name # __name__ attribute + self.file = file # __file__ attribute + self.path = path # __path__ attribute + self.all = all # __all__ attribute + self.is_c_module = is_c_module + self.subpackages = subpackages or [] + + +def is_c_module(module: ModuleType) -> bool: + if module.__dict__.get("__file__") is None: + # Could be a namespace package. These must be handled through + # introspection, since there is no source file. + return True + return os.path.splitext(module.__dict__["__file__"])[-1] in [".so", ".pyd", ".dll"] + + +def is_pyc_only(file: str | None) -> bool: + return bool(file and file.endswith(".pyc") and not os.path.exists(file[:-1])) + + +class InspectError(Exception): + pass + + +def get_package_properties(package_id: str) -> ModuleProperties: + """Use runtime introspection to get information about a module/package.""" + try: + package = importlib.import_module(package_id) + except BaseException as e: + raise InspectError(str(e)) from e + name = getattr(package, "__name__", package_id) + file = getattr(package, "__file__", None) + path: list[str] | None = getattr(package, "__path__", None) + if not isinstance(path, list): + path = None + pkg_all = getattr(package, "__all__", None) + if pkg_all is not None: + try: + pkg_all = list(pkg_all) + except Exception: + pkg_all = None + is_c = is_c_module(package) + + if path is None: + # Object has no path; this means it's either a module inside a package + # (and thus no sub-packages), or it could be a C extension package. + if is_c: + # This is a C extension module, now get the list of all sub-packages + # using the inspect module + subpackages = [ + package.__name__ + "." + name + for name, val in inspect.getmembers(package) + if inspect.ismodule(val) and val.__name__ == package.__name__ + "." + name + ] + else: + # It's a module inside a package. There's nothing else to walk/yield. + subpackages = [] + else: + all_packages = pkgutil.walk_packages( + path, prefix=package.__name__ + ".", onerror=lambda r: None + ) + subpackages = [qualified_name for importer, qualified_name, ispkg in all_packages] + return ModuleProperties( + name=name, file=file, path=path, all=pkg_all, is_c_module=is_c, subpackages=subpackages + ) + + +def worker(tasks: Queue[str], results: Queue[str | ModuleProperties], sys_path: list[str]) -> None: + """The main loop of a worker introspection process.""" + sys.path = sys_path + while True: + mod = tasks.get() + try: + prop = get_package_properties(mod) + except InspectError as e: + results.put(str(e)) + continue + results.put(prop) + + +class ModuleInspect: + """Perform runtime introspection of modules in a separate process. + + Reuse the process for multiple modules for efficiency. However, if there is an + error, retry using a fresh process to avoid cross-contamination of state between + modules. + + We use a separate process to isolate us from many side effects. For example, the + import of a module may kill the current process, and we want to recover from that. + + Always use in a with statement for proper clean-up: + + with ModuleInspect() as m: + p = m.get_package_properties('urllib.parse') + """ + + def __init__(self) -> None: + self._start() + + def _start(self) -> None: + if sys.platform == "linux": + ctx = get_context("forkserver") + else: + ctx = get_context("spawn") + self.tasks: Queue[str] = ctx.Queue() + self.results: Queue[ModuleProperties | str] = ctx.Queue() + self.proc = ctx.Process(target=worker, args=(self.tasks, self.results, sys.path)) + self.proc.start() + self.counter = 0 # Number of successful roundtrips + + def close(self) -> None: + """Free any resources used.""" + self.proc.terminate() + + def get_package_properties(self, package_id: str) -> ModuleProperties: + """Return some properties of a module/package using runtime introspection. + + Raise InspectError if the target couldn't be imported. + """ + self.tasks.put(package_id) + res = self._get_from_queue() + if res is None: + # The process died; recover and report error. + self._start() + raise InspectError(f"Process died when importing {package_id!r}") + if isinstance(res, str): + # Error importing module + if self.counter > 0: + # Also try with a fresh process. Maybe one of the previous imports has + # corrupted some global state. + self.close() + self._start() + return self.get_package_properties(package_id) + raise InspectError(res) + self.counter += 1 + return res + + def _get_from_queue(self) -> ModuleProperties | str | None: + """Get value from the queue. + + Return the value read from the queue, or None if the process unexpectedly died. + """ + max_iter = 600 + n = 0 + while True: + if n == max_iter: + raise RuntimeError("Timeout waiting for subprocess") + try: + return self.results.get(timeout=0.05) + except queue.Empty: + if not self.proc.is_alive(): + return None + n += 1 + + def __enter__(self) -> ModuleInspect: + return self + + def __exit__(self, *args: object) -> None: + self.close() diff --git a/.venv/lib/python3.12/site-packages/mypy/mro.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/mro.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..f9e40c8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/mro.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/mro.py b/.venv/lib/python3.12/site-packages/mypy/mro.py new file mode 100644 index 0000000..f34f3fa --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/mro.py @@ -0,0 +1,62 @@ +from __future__ import annotations + +from typing import Callable + +from mypy.nodes import TypeInfo +from mypy.types import Instance +from mypy.typestate import type_state + + +def calculate_mro(info: TypeInfo, obj_type: Callable[[], Instance] | None = None) -> None: + """Calculate and set mro (method resolution order). + + Raise MroError if cannot determine mro. + """ + mro = linearize_hierarchy(info, obj_type) + assert mro, f"Could not produce a MRO at all for {info}" + info.mro = mro + # The property of falling back to Any is inherited. + info.fallback_to_any = any(baseinfo.fallback_to_any for baseinfo in info.mro) + type_state.reset_all_subtype_caches_for(info) + + +class MroError(Exception): + """Raised if a consistent mro cannot be determined for a class.""" + + +def linearize_hierarchy( + info: TypeInfo, obj_type: Callable[[], Instance] | None = None +) -> list[TypeInfo]: + # TODO describe + if info.mro: + return info.mro + bases = info.direct_base_classes() + if not bases and info.fullname != "builtins.object" and obj_type is not None: + # Probably an error, add a dummy `object` base class, + # otherwise MRO calculation may spuriously fail. + bases = [obj_type().type] + lin_bases = [] + for base in bases: + assert base is not None, f"Cannot linearize bases for {info.fullname} {bases}" + lin_bases.append(linearize_hierarchy(base, obj_type)) + lin_bases.append(bases) + return [info] + merge(lin_bases) + + +def merge(seqs: list[list[TypeInfo]]) -> list[TypeInfo]: + seqs = [s.copy() for s in seqs] + result: list[TypeInfo] = [] + while True: + seqs = [s for s in seqs if s] + if not seqs: + return result + for seq in seqs: + head = seq[0] + if not [s for s in seqs if head in s[1:]]: + break + else: + raise MroError() + result.append(head) + for s in seqs: + if s[0] is head: + del s[0] diff --git a/.venv/lib/python3.12/site-packages/mypy/nodes.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/nodes.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..9e156fe Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/nodes.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/nodes.py b/.venv/lib/python3.12/site-packages/mypy/nodes.py new file mode 100644 index 0000000..e7d7e84 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/nodes.py @@ -0,0 +1,5037 @@ +"""Abstract syntax tree node classes (i.e. parse tree).""" + +from __future__ import annotations + +import os +from abc import abstractmethod +from collections import defaultdict +from collections.abc import Iterator, Sequence +from enum import Enum, unique +from typing import TYPE_CHECKING, Any, Callable, Final, Optional, TypeVar, Union, cast +from typing_extensions import TypeAlias as _TypeAlias, TypeGuard + +from librt.internal import ( + read_float as read_float_bare, + read_int as read_int_bare, + read_str as read_str_bare, + write_int as write_int_bare, + write_str as write_str_bare, +) +from mypy_extensions import trait + +import mypy.strconv +from mypy.cache import ( + DICT_STR_GEN, + DT_SPEC, + END_TAG, + LIST_GEN, + LIST_STR, + LITERAL_COMPLEX, + LITERAL_NONE, + ReadBuffer, + Tag, + WriteBuffer, + read_bool, + read_int, + read_int_list, + read_int_opt, + read_json, + read_literal, + read_str, + read_str_list, + read_str_opt, + read_str_opt_list, + read_tag, + write_bool, + write_int, + write_int_list, + write_int_opt, + write_json, + write_literal, + write_str, + write_str_list, + write_str_opt, + write_str_opt_list, + write_tag, +) +from mypy.options import Options +from mypy.util import is_sunder, is_typeshed_file, short_type +from mypy.visitor import ExpressionVisitor, NodeVisitor, StatementVisitor + +if TYPE_CHECKING: + from mypy.patterns import Pattern + + +@unique +class NotParsed(Enum): + VALUE = "NotParsed" + + +class Context: + """Base type for objects that are valid as error message locations.""" + + __slots__ = ("line", "column", "end_line", "end_column") + + def __init__(self, line: int = -1, column: int = -1) -> None: + self.line = line + self.column = column + self.end_line: int | None = None + self.end_column: int | None = None + + def set_line( + self, + target: Context | int, + column: int | None = None, + end_line: int | None = None, + end_column: int | None = None, + ) -> None: + """If target is a node, pull line (and column) information + into this node. If column is specified, this will override any column + information coming from a node. + """ + if isinstance(target, int): + self.line = target + else: + self.line = target.line + self.column = target.column + self.end_line = target.end_line + self.end_column = target.end_column + + if column is not None: + self.column = column + + if end_line is not None: + self.end_line = end_line + + if end_column is not None: + self.end_column = end_column + + +if TYPE_CHECKING: + # break import cycle only needed for mypy + import mypy.types + + +T = TypeVar("T") + +JsonDict: _TypeAlias = dict[str, Any] + + +# Symbol table node kinds +# +# TODO rename to use more descriptive names + +LDEF: Final = 0 +GDEF: Final = 1 +MDEF: Final = 2 + +# Placeholder for a name imported via 'from ... import'. Second phase of +# semantic will replace this the actual imported reference. This is +# needed so that we can detect whether a name has been imported during +# XXX what? +UNBOUND_IMPORTED: Final = 3 + +# RevealExpr node kinds +REVEAL_TYPE: Final = 0 +REVEAL_LOCALS: Final = 1 + +# Kinds of 'literal' expressions. +# +# Use the function mypy.literals.literal to calculate these. +# +# TODO: Can we make these less confusing? +LITERAL_YES: Final = 2 # Value of expression known statically +LITERAL_TYPE: Final = 1 # Type of expression can be narrowed (e.g. variable reference) +LITERAL_NO: Final = 0 # None of the above + +node_kinds: Final = {LDEF: "Ldef", GDEF: "Gdef", MDEF: "Mdef", UNBOUND_IMPORTED: "UnboundImported"} +inverse_node_kinds: Final = {_kind: _name for _name, _kind in node_kinds.items()} + + +implicit_module_attrs: Final = { + "__name__": "__builtins__.str", + "__doc__": None, # depends on Python version, see semanal.py + "__path__": None, # depends on if the module is a package + "__file__": "__builtins__.str", + "__package__": "__builtins__.str", + "__annotations__": None, # dict[str, Any] bounded in add_implicit_module_attrs() + "__spec__": None, # importlib.machinery.ModuleSpec bounded in add_implicit_module_attrs() +} + + +# These aliases exist because built-in class objects are not subscriptable. +# For example `list[int]` fails at runtime. Instead List[int] should be used. +type_aliases: Final = { + "typing.List": "builtins.list", + "typing.Dict": "builtins.dict", + "typing.Set": "builtins.set", + "typing.FrozenSet": "builtins.frozenset", + "typing.ChainMap": "collections.ChainMap", + "typing.Counter": "collections.Counter", + "typing.DefaultDict": "collections.defaultdict", + "typing.Deque": "collections.deque", + "typing.OrderedDict": "collections.OrderedDict", + # HACK: a lie in lieu of actual support for PEP 675 + "typing.LiteralString": "builtins.str", +} + +# This keeps track of the oldest supported Python version where the corresponding +# alias source is available. +type_aliases_source_versions: Final = {"typing.LiteralString": (3, 11)} + +# This keeps track of aliases in `typing_extensions`, which we treat specially. +typing_extensions_aliases: Final = { + # See: https://github.com/python/mypy/issues/11528 + "typing_extensions.OrderedDict": "collections.OrderedDict", + # HACK: a lie in lieu of actual support for PEP 675 + "typing_extensions.LiteralString": "builtins.str", +} + +reverse_builtin_aliases: Final = { + "builtins.list": "typing.List", + "builtins.dict": "typing.Dict", + "builtins.set": "typing.Set", + "builtins.frozenset": "typing.FrozenSet", +} + + +RUNTIME_PROTOCOL_DECOS: Final = ( + "typing.runtime_checkable", + "typing_extensions.runtime", + "typing_extensions.runtime_checkable", +) + +LAMBDA_NAME: Final = "" + + +class Node(Context): + """Common base class for all non-type parse tree nodes.""" + + __slots__ = () + + def __str__(self) -> str: + return self.accept(mypy.strconv.StrConv(options=Options())) + + def str_with_options(self, options: Options) -> str: + a = self.accept(mypy.strconv.StrConv(options=options)) + assert a + return a + + def accept(self, visitor: NodeVisitor[T]) -> T: + raise RuntimeError("Not implemented", type(self)) + + +@trait +class Statement(Node): + """A statement node.""" + + __slots__ = () + + def accept(self, visitor: StatementVisitor[T]) -> T: + raise RuntimeError("Not implemented", type(self)) + + +@trait +class Expression(Node): + """An expression node.""" + + __slots__ = () + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + raise RuntimeError("Not implemented", type(self)) + + +class FakeExpression(Expression): + """A dummy expression. + + We need a dummy expression in one place, and can't instantiate Expression + because it is a trait and mypyc barfs. + """ + + __slots__ = () + + +# TODO: +# Lvalue = Union['NameExpr', 'MemberExpr', 'IndexExpr', 'SuperExpr', 'StarExpr' +# 'TupleExpr']; see #1783. +Lvalue: _TypeAlias = Expression + + +@trait +class SymbolNode(Node): + """Nodes that can be stored in a symbol table.""" + + __slots__ = () + + @property + @abstractmethod + def name(self) -> str: + pass + + # Fully qualified name + @property + @abstractmethod + def fullname(self) -> str: + pass + + @abstractmethod + def serialize(self) -> JsonDict: + pass + + @classmethod + def deserialize(cls, data: JsonDict) -> SymbolNode: + classname = data[".class"] + method = deserialize_map.get(classname) + if method is not None: + return method(data) + raise NotImplementedError(f"unexpected .class {classname}") + + def write(self, data: WriteBuffer) -> None: + raise NotImplementedError(f"Cannot serialize {self.__class__.__name__} instance") + + @classmethod + def read(cls, data: ReadBuffer) -> SymbolNode: + raise NotImplementedError(f"Cannot deserialize {cls.__name__} instance") + + +# Items: fullname, related symbol table node, surrounding type (if any) +Definition: _TypeAlias = tuple[str, "SymbolTableNode", Optional["TypeInfo"]] + + +class MypyFile(SymbolNode): + """The abstract syntax tree of a single source file.""" + + __slots__ = ( + "_fullname", + "path", + "defs", + "alias_deps", + "module_refs", + "is_bom", + "names", + "imports", + "ignored_lines", + "skipped_lines", + "is_stub", + "is_cache_skeleton", + "is_partial_stub_package", + "plugin_deps", + "future_import_flags", + "_is_typeshed_file", + ) + + __match_args__ = ("name", "path", "defs") + + # Fully qualified module name + _fullname: str + # Path to the file (empty string if not known) + path: str + # Top-level definitions and statements + defs: list[Statement] + # Type alias dependencies as mapping from target to set of alias full names + alias_deps: defaultdict[str, set[str]] + # The set of all dependencies (suppressed or not) that this module accesses, either + # directly or indirectly. + module_refs: set[str] + # Is there a UTF-8 BOM at the start? + is_bom: bool + names: SymbolTable + # All import nodes within the file (also ones within functions etc.) + imports: list[ImportBase] + # Lines on which to ignore certain errors when checking. + # If the value is empty, ignore all errors; otherwise, the list contains all + # error codes to ignore. + ignored_lines: dict[int, list[str]] + # Lines that were skipped during semantic analysis e.g. due to ALWAYS_FALSE, MYPY_FALSE, + # or platform/version checks. Those lines would not be type-checked. + skipped_lines: set[int] + # Is this file represented by a stub file (.pyi)? + is_stub: bool + # Is this loaded from the cache and thus missing the actual body of the file? + is_cache_skeleton: bool + # Does this represent an __init__.pyi stub with a module __getattr__ + # (i.e. a partial stub package), for such packages we suppress any missing + # module errors in addition to missing attribute errors. + is_partial_stub_package: bool + # Plugin-created dependencies + plugin_deps: dict[str, set[str]] + # Future imports defined in this file. Populated during semantic analysis. + future_import_flags: set[str] + _is_typeshed_file: bool | None + + def __init__( + self, + defs: list[Statement], + imports: list[ImportBase], + is_bom: bool = False, + ignored_lines: dict[int, list[str]] | None = None, + ) -> None: + super().__init__() + self.defs = defs + self.line = 1 # Dummy line number + self.column = 0 # Dummy column + self.imports = imports + self.is_bom = is_bom + self.alias_deps = defaultdict(set) + self.module_refs = set() + self.plugin_deps = {} + if ignored_lines: + self.ignored_lines = ignored_lines + else: + self.ignored_lines = {} + self.skipped_lines = set() + + self.path = "" + self.is_stub = False + self.is_cache_skeleton = False + self.is_partial_stub_package = False + self.future_import_flags = set() + self._is_typeshed_file = None + + def local_definitions(self) -> Iterator[Definition]: + """Return all definitions within the module (including nested). + + This doesn't include imported definitions. + """ + return local_definitions(self.names, self.fullname) + + @property + def name(self) -> str: + return "" if not self._fullname else self._fullname.split(".")[-1] + + @property + def fullname(self) -> str: + return self._fullname + + def accept(self, visitor: NodeVisitor[T]) -> T: + return visitor.visit_mypy_file(self) + + def is_package_init_file(self) -> bool: + return len(self.path) != 0 and os.path.basename(self.path).startswith("__init__.") + + def is_future_flag_set(self, flag: str) -> bool: + return flag in self.future_import_flags + + def is_typeshed_file(self, options: Options) -> bool: + # Cache result since this is called a lot + if self._is_typeshed_file is None: + self._is_typeshed_file = is_typeshed_file(options.abs_custom_typeshed_dir, self.path) + return self._is_typeshed_file + + def serialize(self) -> JsonDict: + return { + ".class": "MypyFile", + "_fullname": self._fullname, + "names": self.names.serialize(self._fullname), + "is_stub": self.is_stub, + "path": self.path, + "is_partial_stub_package": self.is_partial_stub_package, + "future_import_flags": sorted(self.future_import_flags), + } + + @classmethod + def deserialize(cls, data: JsonDict) -> MypyFile: + assert data[".class"] == "MypyFile", data + tree = MypyFile([], []) + tree._fullname = data["_fullname"] + tree.names = SymbolTable.deserialize(data["names"]) + tree.is_stub = data["is_stub"] + tree.path = data["path"] + tree.is_partial_stub_package = data["is_partial_stub_package"] + tree.is_cache_skeleton = True + tree.future_import_flags = set(data["future_import_flags"]) + return tree + + def write(self, data: WriteBuffer) -> None: + write_tag(data, MYPY_FILE) + write_str(data, self._fullname) + self.names.write(data, self._fullname) + write_bool(data, self.is_stub) + write_str(data, self.path) + write_bool(data, self.is_partial_stub_package) + write_str_list(data, sorted(self.future_import_flags)) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> MypyFile: + assert read_tag(data) == MYPY_FILE + tree = MypyFile([], []) + tree._fullname = read_str(data) + tree.names = SymbolTable.read(data) + tree.is_stub = read_bool(data) + tree.path = read_str(data) + tree.is_partial_stub_package = read_bool(data) + tree.future_import_flags = set(read_str_list(data)) + tree.is_cache_skeleton = True + assert read_tag(data) == END_TAG + return tree + + +class ImportBase(Statement): + """Base class for all import statements.""" + + __slots__ = ("is_unreachable", "is_top_level", "is_mypy_only", "assignments") + + is_unreachable: bool # Set by semanal.SemanticAnalyzerPass1 if inside `if False` etc. + is_top_level: bool # Ditto if outside any class or def + is_mypy_only: bool # Ditto if inside `if TYPE_CHECKING` or `if MYPY` + + # If an import replaces existing definitions, we construct dummy assignment + # statements that assign the imported names to the names in the current scope, + # for type checking purposes. Example: + # + # x = 1 + # from m import x <-- add assignment representing "x = m.x" + assignments: list[AssignmentStmt] + + def __init__(self) -> None: + super().__init__() + self.assignments = [] + self.is_unreachable = False + self.is_top_level = False + self.is_mypy_only = False + + +class Import(ImportBase): + """import m [as n]""" + + __slots__ = ("ids",) + + __match_args__ = ("ids",) + + ids: list[tuple[str, str | None]] # (module id, as id) + + def __init__(self, ids: list[tuple[str, str | None]]) -> None: + super().__init__() + self.ids = ids + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_import(self) + + +class ImportFrom(ImportBase): + """from m import x [as y], ...""" + + __slots__ = ("id", "names", "relative") + + __match_args__ = ("id", "names", "relative") + + id: str + relative: int + names: list[tuple[str, str | None]] # Tuples (name, as name) + + def __init__(self, id: str, relative: int, names: list[tuple[str, str | None]]) -> None: + super().__init__() + self.id = id + self.names = names + self.relative = relative + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_import_from(self) + + +class ImportAll(ImportBase): + """from m import *""" + + __slots__ = ("id", "relative") + + __match_args__ = ("id", "relative") + + id: str + relative: int + + def __init__(self, id: str, relative: int) -> None: + super().__init__() + self.id = id + self.relative = relative + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_import_all(self) + + +FUNCBASE_FLAGS: Final = ["is_property", "is_class", "is_static", "is_final"] + + +class FuncBase(Node): + """Abstract base class for function-like nodes. + + N.B: Although this has SymbolNode subclasses (FuncDef, + OverloadedFuncDef), avoid calling isinstance(..., FuncBase) on + something that is typed as SymbolNode. This is to work around + mypy bug #3603, in which mypy doesn't understand multiple + inheritance very well, and will assume that a SymbolNode + cannot be a FuncBase. + + Instead, test against SYMBOL_FUNCBASE_TYPES, which enumerates + SymbolNode subclasses that are also FuncBase subclasses. + """ + + __slots__ = ( + "type", + "unanalyzed_type", + "info", + "is_property", + "is_class", # Uses "@classmethod" (explicit or implicit) + "is_static", # Uses "@staticmethod" (explicit or implicit) + "is_final", # Uses "@final" + "is_explicit_override", # Uses "@override" + "is_type_check_only", # Uses "@type_check_only" + "_fullname", + ) + + def __init__(self) -> None: + super().__init__() + # Type signature. This is usually CallableType or Overloaded, but it can be + # something else for decorated functions. + self.type: mypy.types.ProperType | None = None + # Original, not semantically analyzed type (used for reprocessing) + self.unanalyzed_type: mypy.types.ProperType | None = None + # If method, reference to TypeInfo + self.info = FUNC_NO_INFO + self.is_property = False + self.is_class = False + # Is this a `@staticmethod` (explicit or implicit)? + # Note: use has_self_or_cls_argument to check if there is `self` or `cls` argument + self.is_static = False + self.is_final = False + self.is_explicit_override = False + self.is_type_check_only = False + # Name with module prefix + self._fullname = "" + + @property + @abstractmethod + def name(self) -> str: + pass + + @property + def fullname(self) -> str: + return self._fullname + + @property + def has_self_or_cls_argument(self) -> bool: + """If used as a method, does it have an argument for method binding (`self`, `cls`)? + + This is true for `__new__` even though `__new__` does not undergo method binding, + because we still usually assume that `cls` corresponds to the enclosing class. + """ + return not self.is_static or self.name == "__new__" + + +OverloadPart: _TypeAlias = Union["FuncDef", "Decorator"] + + +class OverloadedFuncDef(FuncBase, SymbolNode, Statement): + """A logical node representing all the variants of a multi-declaration function. + + A multi-declaration function is often an @overload, but can also be a + @property with a setter and a/or a deleter. + + This node has no explicit representation in the source program. + Overloaded variants must be consecutive in the source file. + """ + + __slots__ = ( + "items", + "unanalyzed_items", + "impl", + "deprecated", + "setter_index", + "_is_trivial_self", + ) + + items: list[OverloadPart] + unanalyzed_items: list[OverloadPart] + impl: OverloadPart | None + deprecated: str | None + setter_index: int | None + + def __init__(self, items: list[OverloadPart]) -> None: + super().__init__() + self.items = items + self.unanalyzed_items = items.copy() + self.impl = None + self.deprecated = None + self.setter_index = None + self._is_trivial_self: bool | None = None + if items: + # TODO: figure out how to reliably set end position (we don't know the impl here). + self.set_line(items[0].line, items[0].column) + + @property + def name(self) -> str: + if self.items: + return self.items[0].name + else: + # This may happen for malformed overload + assert self.impl is not None + return self.impl.name + + @property + def is_trivial_self(self) -> bool: + """Check we can use bind_self() fast path for this overload. + + This will return False if at least one overload: + * Has an explicit self annotation, or Self in signature. + * Has a non-trivial decorator. + """ + if self._is_trivial_self is not None: + return self._is_trivial_self + for i, item in enumerate(self.items): + # Note: bare @property is removed in visit_decorator(). + trivial = 1 if i > 0 or not self.is_property else 0 + if isinstance(item, FuncDef): + if not item.is_trivial_self: + self._is_trivial_self = False + return False + elif len(item.decorators) > trivial or not item.func.is_trivial_self: + self._is_trivial_self = False + return False + self._is_trivial_self = True + return True + + @property + def setter(self) -> Decorator: + # Do some consistency checks first. + first_item = self.items[0] + assert isinstance(first_item, Decorator) + assert first_item.var.is_settable_property + assert self.setter_index is not None + item = self.items[self.setter_index] + assert isinstance(item, Decorator) + return item + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_overloaded_func_def(self) + + def serialize(self) -> JsonDict: + return { + ".class": "OverloadedFuncDef", + "items": [i.serialize() for i in self.items], + "type": None if self.type is None else self.type.serialize(), + "fullname": self._fullname, + "impl": None if self.impl is None else self.impl.serialize(), + "flags": get_flags(self, FUNCBASE_FLAGS), + "deprecated": self.deprecated, + "setter_index": self.setter_index, + } + + @classmethod + def deserialize(cls, data: JsonDict) -> OverloadedFuncDef: + assert data[".class"] == "OverloadedFuncDef" + res = OverloadedFuncDef( + [cast(OverloadPart, SymbolNode.deserialize(d)) for d in data["items"]] + ) + if data.get("impl") is not None: + res.impl = cast(OverloadPart, SymbolNode.deserialize(data["impl"])) + # set line for empty overload items, as not set in __init__ + if len(res.items) > 0: + res.set_line(res.impl.line) + if data.get("type") is not None: + typ = mypy.types.deserialize_type(data["type"]) + assert isinstance(typ, mypy.types.ProperType) + res.type = typ + res._fullname = data["fullname"] + set_flags(res, data["flags"]) + res.deprecated = data["deprecated"] + res.setter_index = data["setter_index"] + # NOTE: res.info will be set in the fixup phase. + return res + + def write(self, data: WriteBuffer) -> None: + write_tag(data, OVERLOADED_FUNC_DEF) + write_tag(data, LIST_GEN) + write_int_bare(data, len(self.items)) + for item in self.items: + item.write(data) + mypy.types.write_type_opt(data, self.type) + write_str(data, self._fullname) + if self.impl is None: + write_tag(data, LITERAL_NONE) + else: + self.impl.write(data) + write_flags(data, self, FUNCBASE_FLAGS) + write_str_opt(data, self.deprecated) + write_int_opt(data, self.setter_index) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> OverloadedFuncDef: + assert read_tag(data) == LIST_GEN + res = OverloadedFuncDef([read_overload_part(data) for _ in range(read_int_bare(data))]) + typ = mypy.types.read_type_opt(data) + if typ is not None: + assert isinstance(typ, mypy.types.ProperType) + res.type = typ + res._fullname = read_str(data) + tag = read_tag(data) + if tag != LITERAL_NONE: + res.impl = read_overload_part(data, tag) + # set line for empty overload items, as not set in __init__ + if len(res.items) > 0: + res.set_line(res.impl.line) + read_flags(data, res, FUNCBASE_FLAGS) + res.deprecated = read_str_opt(data) + res.setter_index = read_int_opt(data) + # NOTE: res.info will be set in the fixup phase. + assert read_tag(data) == END_TAG + return res + + def is_dynamic(self) -> bool: + return all(item.is_dynamic() for item in self.items) + + +class Argument(Node): + """A single argument in a FuncItem.""" + + __slots__ = ("variable", "type_annotation", "initializer", "kind", "pos_only") + + __match_args__ = ("variable", "type_annotation", "initializer", "kind", "pos_only") + + def __init__( + self, + variable: Var, + type_annotation: mypy.types.Type | None, + initializer: Expression | None, + kind: ArgKind, + pos_only: bool = False, + ) -> None: + super().__init__() + self.variable = variable + self.type_annotation = type_annotation + self.initializer = initializer + self.kind = kind # must be an ARG_* constant + self.pos_only = pos_only + + def set_line( + self, + target: Context | int, + column: int | None = None, + end_line: int | None = None, + end_column: int | None = None, + ) -> None: + super().set_line(target, column, end_line, end_column) + + if self.initializer and self.initializer.line < 0: + self.initializer.set_line(self.line, self.column, self.end_line, self.end_column) + + self.variable.set_line(self.line, self.column, self.end_line, self.end_column) + + +# These specify the kind of a TypeParam +TYPE_VAR_KIND: Final = 0 +PARAM_SPEC_KIND: Final = 1 +TYPE_VAR_TUPLE_KIND: Final = 2 + + +class TypeParam: + __slots__ = ("name", "kind", "upper_bound", "values", "default") + + def __init__( + self, + name: str, + kind: int, + upper_bound: mypy.types.Type | None, + values: list[mypy.types.Type], + default: mypy.types.Type | None, + ) -> None: + self.name = name + self.kind = kind + self.upper_bound = upper_bound + self.values = values + self.default = default + + +FUNCITEM_FLAGS: Final = FUNCBASE_FLAGS + [ + "is_overload", + "is_generator", + "is_coroutine", + "is_async_generator", + "is_awaitable_coroutine", +] + + +class FuncItem(FuncBase): + """Base class for nodes usable as overloaded function items.""" + + __slots__ = ( + "arguments", # Note that can be unset if deserialized (type is a lie!) + "arg_names", # Names of arguments + "arg_kinds", # Kinds of arguments + "min_args", # Minimum number of arguments + "max_pos", # Maximum number of positional arguments, -1 if no explicit + # limit (*args not included) + "type_args", # New-style type parameters (PEP 695) + "body", # Body of the function + "is_overload", # Is this an overload variant of function with more than + # one overload variant? + "is_generator", # Contains a yield statement? + "is_coroutine", # Defined using 'async def' syntax? + "is_async_generator", # Is an async def generator? + "is_awaitable_coroutine", # Decorated with '@{typing,asyncio}.coroutine'? + "expanded", # Variants of function with type variables with values expanded + ) + + __deletable__ = ("arguments", "max_pos", "min_args") + + def __init__( + self, + arguments: list[Argument] | None = None, + body: Block | None = None, + typ: mypy.types.FunctionLike | None = None, + type_args: list[TypeParam] | None = None, + ) -> None: + super().__init__() + self.arguments = arguments or [] + self.arg_names = [None if arg.pos_only else arg.variable.name for arg in self.arguments] + self.arg_kinds: list[ArgKind] = [arg.kind for arg in self.arguments] + self.max_pos: int = self.arg_kinds.count(ARG_POS) + self.arg_kinds.count(ARG_OPT) + self.type_args: list[TypeParam] | None = type_args + self.body: Block = body or Block([]) + self.type = typ + self.unanalyzed_type = typ + self.is_overload: bool = False + self.is_generator: bool = False + self.is_coroutine: bool = False + self.is_async_generator: bool = False + self.is_awaitable_coroutine: bool = False + self.expanded: list[FuncItem] = [] + + self.min_args = 0 + for i in range(len(self.arguments)): + if self.arguments[i] is None and i < self.max_fixed_argc(): + self.min_args = i + 1 + + def max_fixed_argc(self) -> int: + return self.max_pos + + def is_dynamic(self) -> bool: + return self.type is None + + +FUNCDEF_FLAGS: Final = FUNCITEM_FLAGS + [ + "is_decorated", + "is_conditional", + "is_trivial_body", + "is_trivial_self", + "is_mypy_only", +] + +# Abstract status of a function +NOT_ABSTRACT: Final = 0 +# Explicitly abstract (with @abstractmethod or overload without implementation) +IS_ABSTRACT: Final = 1 +# Implicitly abstract: used for functions with trivial bodies defined in Protocols +IMPLICITLY_ABSTRACT: Final = 2 + + +class FuncDef(FuncItem, SymbolNode, Statement): + """Function definition. + + This is a non-lambda function defined using 'def'. + """ + + __slots__ = ( + "_name", + "is_decorated", + "is_conditional", + "abstract_status", + "original_def", + "is_trivial_body", + "is_trivial_self", + "has_self_attr_def", + "is_mypy_only", + # Present only when a function is decorated with @typing.dataclass_transform or similar + "dataclass_transform_spec", + "docstring", + "deprecated", + "original_first_arg", + ) + + __match_args__ = ("name", "arguments", "type", "body") + + # Note that all __init__ args must have default values + def __init__( + self, + name: str = "", # Function name + arguments: list[Argument] | None = None, + body: Block | None = None, + typ: mypy.types.FunctionLike | None = None, + type_args: list[TypeParam] | None = None, + ) -> None: + super().__init__(arguments, body, typ, type_args) + self._name = name + self.is_decorated = False + self.is_conditional = False # Defined conditionally (within block)? + self.abstract_status = NOT_ABSTRACT + # Is this an abstract method with trivial body? + # Such methods can't be called via super(). + self.is_trivial_body = False + # Original conditional definition + self.original_def: None | FuncDef | Var | Decorator = None + # Definitions that appear in if TYPE_CHECKING are marked with this flag. + self.is_mypy_only = False + self.dataclass_transform_spec: DataclassTransformSpec | None = None + self.docstring: str | None = None + self.deprecated: str | None = None + # This is used to simplify bind_self() logic in trivial cases (which are + # the majority). In cases where self is not annotated and there are no Self + # in the signature we can simply drop the first argument. + self.is_trivial_self = False + # Keep track of functions where self attributes are defined. + self.has_self_attr_def = False + # This is needed because for positional-only arguments the name is set to None, + # but we sometimes still want to show it in error messages. + if arguments: + self.original_first_arg: str | None = arguments[0].variable.name + else: + self.original_first_arg = None + + @property + def name(self) -> str: + return self._name + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_func_def(self) + + def serialize(self) -> JsonDict: + # We're deliberating omitting arguments and storing only arg_names and + # arg_kinds for space-saving reasons (arguments is not used in later + # stages of mypy). + # TODO: After a FuncDef is deserialized, the only time we use `arg_names` + # and `arg_kinds` is when `type` is None and we need to infer a type. Can + # we store the inferred type ahead of time? + return { + ".class": "FuncDef", + "name": self._name, + "fullname": self._fullname, + "arg_names": self.arg_names, + "arg_kinds": [int(x.value) for x in self.arg_kinds], + "type": None if self.type is None else self.type.serialize(), + "flags": get_flags(self, FUNCDEF_FLAGS), + "abstract_status": self.abstract_status, + # TODO: Do we need expanded, original_def? + "dataclass_transform_spec": ( + None + if self.dataclass_transform_spec is None + else self.dataclass_transform_spec.serialize() + ), + "deprecated": self.deprecated, + "original_first_arg": self.original_first_arg, + } + + @classmethod + def deserialize(cls, data: JsonDict) -> FuncDef: + assert data[".class"] == "FuncDef" + body = Block([]) + ret = FuncDef( + data["name"], + [], + body, + ( + None + if data["type"] is None + else cast(mypy.types.FunctionLike, mypy.types.deserialize_type(data["type"])) + ), + ) + ret._fullname = data["fullname"] + set_flags(ret, data["flags"]) + # NOTE: ret.info is set in the fixup phase. + ret.arg_names = data["arg_names"] + ret.original_first_arg = data.get("original_first_arg") + ret.arg_kinds = [ARG_KINDS[x] for x in data["arg_kinds"]] + ret.abstract_status = data["abstract_status"] + ret.dataclass_transform_spec = ( + DataclassTransformSpec.deserialize(data["dataclass_transform_spec"]) + if data["dataclass_transform_spec"] is not None + else None + ) + ret.deprecated = data["deprecated"] + # Leave these uninitialized so that future uses will trigger an error + del ret.arguments + del ret.max_pos + del ret.min_args + return ret + + def write(self, data: WriteBuffer) -> None: + write_tag(data, FUNC_DEF) + write_str(data, self._name) + mypy.types.write_type_opt(data, self.type) + write_str(data, self._fullname) + write_flags(data, self, FUNCDEF_FLAGS) + write_str_opt_list(data, self.arg_names) + write_int_list(data, [int(ak.value) for ak in self.arg_kinds]) + write_int(data, self.abstract_status) + if self.dataclass_transform_spec is None: + write_tag(data, LITERAL_NONE) + else: + self.dataclass_transform_spec.write(data) + write_str_opt(data, self.deprecated) + write_str_opt(data, self.original_first_arg) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> FuncDef: + name = read_str(data) + typ: mypy.types.FunctionLike | None = None + tag = read_tag(data) + if tag != LITERAL_NONE: + typ = mypy.types.read_function_like(data, tag) + ret = FuncDef(name, [], Block([]), typ) + ret._fullname = read_str(data) + read_flags(data, ret, FUNCDEF_FLAGS) + # NOTE: ret.info is set in the fixup phase. + ret.arg_names = read_str_opt_list(data) + ret.arg_kinds = [ARG_KINDS[ak] for ak in read_int_list(data)] + ret.abstract_status = read_int(data) + tag = read_tag(data) + if tag != LITERAL_NONE: + assert tag == DT_SPEC + ret.dataclass_transform_spec = DataclassTransformSpec.read(data) + ret.deprecated = read_str_opt(data) + ret.original_first_arg = read_str_opt(data) + # Leave these uninitialized so that future uses will trigger an error + del ret.arguments + del ret.max_pos + del ret.min_args + assert read_tag(data) == END_TAG + return ret + + +# All types that are both SymbolNodes and FuncBases. See the FuncBase +# docstring for the rationale. +# See https://github.com/python/mypy/pull/13607#issuecomment-1236357236 +# TODO: we want to remove this at some point and just use `FuncBase` ideally. +SYMBOL_FUNCBASE_TYPES: Final = (OverloadedFuncDef, FuncDef) + + +class Decorator(SymbolNode, Statement): + """A decorated function. + + A single Decorator object can include any number of function decorators. + """ + + __slots__ = ("func", "decorators", "original_decorators", "var", "is_overload") + + __match_args__ = ("decorators", "var", "func") + + func: FuncDef # Decorated function + decorators: list[Expression] # Decorators (may be empty) + # Some decorators are removed by semanal, keep the original here. + original_decorators: list[Expression] + # TODO: This is mostly used for the type; consider replacing with a 'type' attribute + var: Var # Represents the decorated function obj + is_overload: bool + + def __init__(self, func: FuncDef, decorators: list[Expression], var: Var) -> None: + super().__init__() + self.func = func + self.decorators = decorators + self.original_decorators = decorators.copy() + self.var = var + self.is_overload = False + + @property + def name(self) -> str: + return self.func.name + + @property + def fullname(self) -> str: + return self.func.fullname + + @property + def is_final(self) -> bool: + return self.func.is_final + + @property + def info(self) -> TypeInfo: + return self.func.info + + @property + def type(self) -> mypy.types.Type | None: + return self.var.type + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_decorator(self) + + def serialize(self) -> JsonDict: + return { + ".class": "Decorator", + "func": self.func.serialize(), + "var": self.var.serialize(), + "is_overload": self.is_overload, + } + + @classmethod + def deserialize(cls, data: JsonDict) -> Decorator: + assert data[".class"] == "Decorator" + dec = Decorator(FuncDef.deserialize(data["func"]), [], Var.deserialize(data["var"])) + dec.is_overload = data["is_overload"] + return dec + + def write(self, data: WriteBuffer) -> None: + write_tag(data, DECORATOR) + self.func.write(data) + self.var.write(data) + write_bool(data, self.is_overload) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> Decorator: + assert read_tag(data) == FUNC_DEF + func = FuncDef.read(data) + assert read_tag(data) == VAR + var = Var.read(data) + dec = Decorator(func, [], var) + dec.is_overload = read_bool(data) + assert read_tag(data) == END_TAG + return dec + + def is_dynamic(self) -> bool: + return self.func.is_dynamic() + + +VAR_FLAGS: Final = [ + "is_self", + "is_cls", + "is_initialized_in_class", + "is_staticmethod", + "is_classmethod", + "is_property", + "is_settable_property", + "is_suppressed_import", + "is_classvar", + "is_abstract_var", + "is_final", + "is_index_var", + "final_unset_in_class", + "final_set_in_init", + "explicit_self_type", + "is_ready", + "is_inferred", + "invalid_partial_type", + "from_module_getattr", + "has_explicit_value", + "allow_incompatible_override", +] + + +class Var(SymbolNode): + """A variable. + + It can refer to global/local variable or a data attribute. + """ + + __slots__ = ( + "_name", + "_fullname", + "info", + "type", + "setter_type", + "final_value", + "is_self", + "is_cls", + "is_ready", + "is_inferred", + "is_initialized_in_class", + "is_staticmethod", + "is_classmethod", + "is_property", + "is_settable_property", + "is_classvar", + "is_abstract_var", + "is_final", + "is_index_var", + "final_unset_in_class", + "final_set_in_init", + "is_suppressed_import", + "explicit_self_type", + "from_module_getattr", + "has_explicit_value", + "allow_incompatible_override", + "invalid_partial_type", + ) + + __match_args__ = ("name", "type", "final_value") + + def __init__(self, name: str, type: mypy.types.Type | None = None) -> None: + super().__init__() + self._name = name # Name without module prefix + # TODO: Should be Optional[str] + self._fullname = "" # Name with module prefix + # TODO: Should be Optional[TypeInfo] + self.info = VAR_NO_INFO + self.type: mypy.types.Type | None = type # Declared or inferred type, or None + # The setter type for settable properties. + self.setter_type: mypy.types.CallableType | None = None + # Is this the first argument to an ordinary method (usually "self")? + self.is_self = False + # Is this the first argument to a classmethod (typically "cls")? + self.is_cls = False + self.is_ready = True # If inferred, is the inferred type available? + self.is_inferred = self.type is None + # Is this initialized explicitly to a non-None value in class body? + self.is_initialized_in_class = False + self.is_staticmethod = False + self.is_classmethod = False + self.is_property = False + self.is_settable_property = False + self.is_classvar = False + self.is_abstract_var = False + self.is_index_var = False + # Set to true when this variable refers to a module we were unable to + # parse for some reason (eg a silenced module) + self.is_suppressed_import = False + # Was this "variable" (rather a constant) defined as Final[...]? + self.is_final = False + # If constant value is a simple literal, + # store the literal value (unboxed) for the benefit of + # tools like mypyc. + self.final_value: int | float | complex | bool | str | None = None + # Where the value was set (only for class attributes) + self.final_unset_in_class = False + self.final_set_in_init = False + # This is True for a variable that was declared on self with an explicit type: + # class C: + # def __init__(self) -> None: + # self.x: int + # This case is important because this defines a new Var, even if there is one + # present in a superclass (without explicit type this doesn't create a new Var). + # See SemanticAnalyzer.analyze_member_lvalue() for details. + self.explicit_self_type = False + # If True, this is an implicit Var created due to module-level __getattr__. + self.from_module_getattr = False + # Var can be created with an explicit value `a = 1` or without one `a: int`, + # we need a way to tell which one is which. + self.has_explicit_value = False + # If True, subclasses can override this with an incompatible type. + self.allow_incompatible_override = False + # If True, this means we didn't manage to infer full type and fall back to + # something like list[Any]. We may decide to not use such types as context. + self.invalid_partial_type = False + + @property + def name(self) -> str: + return self._name + + @property + def fullname(self) -> str: + return self._fullname + + def __repr__(self) -> str: + name = self.fullname or self.name + return f"" + + def accept(self, visitor: NodeVisitor[T]) -> T: + return visitor.visit_var(self) + + def serialize(self) -> JsonDict: + # TODO: Leave default values out? + # NOTE: Sometimes self.is_ready is False here, but we don't care. + data: JsonDict = { + ".class": "Var", + "name": self._name, + "fullname": self._fullname, + "type": None if self.type is None else self.type.serialize(), + "setter_type": None if self.setter_type is None else self.setter_type.serialize(), + "flags": get_flags(self, VAR_FLAGS), + } + if self.final_value is not None: + data["final_value"] = self.final_value + return data + + @classmethod + def deserialize(cls, data: JsonDict) -> Var: + assert data[".class"] == "Var" + name = data["name"] + type = None if data["type"] is None else mypy.types.deserialize_type(data["type"]) + setter_type = ( + None + if data["setter_type"] is None + else mypy.types.deserialize_type(data["setter_type"]) + ) + v = Var(name, type) + assert ( + setter_type is None + or isinstance(setter_type, mypy.types.ProperType) + and isinstance(setter_type, mypy.types.CallableType) + ) + v.setter_type = setter_type + v.is_ready = False # Override True default set in __init__ + v._fullname = data["fullname"] + set_flags(v, data["flags"]) + v.final_value = data.get("final_value") + return v + + def write(self, data: WriteBuffer) -> None: + write_tag(data, VAR) + write_str(data, self._name) + mypy.types.write_type_opt(data, self.type) + mypy.types.write_type_opt(data, self.setter_type) + write_str(data, self._fullname) + write_flags(data, self, VAR_FLAGS) + write_literal(data, self.final_value) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> Var: + name = read_str(data) + typ = mypy.types.read_type_opt(data) + v = Var(name, typ) + setter_type: mypy.types.CallableType | None = None + tag = read_tag(data) + if tag != LITERAL_NONE: + assert tag == mypy.types.CALLABLE_TYPE + setter_type = mypy.types.CallableType.read(data) + v.setter_type = setter_type + v.is_ready = False # Override True default set in __init__ + v._fullname = read_str(data) + read_flags(data, v, VAR_FLAGS) + tag = read_tag(data) + if tag == LITERAL_COMPLEX: + v.final_value = complex(read_float_bare(data), read_float_bare(data)) + elif tag != LITERAL_NONE: + v.final_value = read_literal(data, tag) + assert read_tag(data) == END_TAG + return v + + +class ClassDef(Statement): + """Class definition""" + + __slots__ = ( + "name", + "_fullname", + "defs", + "type_args", + "type_vars", + "base_type_exprs", + "removed_base_type_exprs", + "info", + "metaclass", + "decorators", + "keywords", + "analyzed", + "has_incompatible_baseclass", + "docstring", + "removed_statements", + ) + + __match_args__ = ("name", "defs") + + name: str # Name of the class without module prefix + _fullname: str # Fully qualified name of the class + defs: Block + # New-style type parameters (PEP 695), unanalyzed + type_args: list[TypeParam] | None + # Semantically analyzed type parameters (all syntax variants) + type_vars: list[mypy.types.TypeVarLikeType] + # Base class expressions (not semantically analyzed -- can be arbitrary expressions) + base_type_exprs: list[Expression] + # Special base classes like Generic[...] get moved here during semantic analysis + removed_base_type_exprs: list[Expression] + info: TypeInfo # Related TypeInfo + metaclass: Expression | None + decorators: list[Expression] + keywords: dict[str, Expression] + analyzed: Expression | None + has_incompatible_baseclass: bool + # Used by special forms like NamedTuple and TypedDict to store invalid statements + removed_statements: list[Statement] + + def __init__( + self, + name: str, + defs: Block, + type_vars: list[mypy.types.TypeVarLikeType] | None = None, + base_type_exprs: list[Expression] | None = None, + metaclass: Expression | None = None, + keywords: list[tuple[str, Expression]] | None = None, + type_args: list[TypeParam] | None = None, + ) -> None: + super().__init__() + self.name = name + self._fullname = "" + self.defs = defs + self.type_vars = type_vars or [] + self.type_args = type_args + self.base_type_exprs = base_type_exprs or [] + self.removed_base_type_exprs = [] + self.info = CLASSDEF_NO_INFO + self.metaclass = metaclass + self.decorators = [] + self.keywords = dict(keywords) if keywords else {} + self.analyzed = None + self.has_incompatible_baseclass = False + self.docstring: str | None = None + self.removed_statements = [] + + @property + def fullname(self) -> str: + return self._fullname + + @fullname.setter + def fullname(self, v: str) -> None: + self._fullname = v + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_class_def(self) + + def is_generic(self) -> bool: + return self.info.is_generic() + + def serialize(self) -> JsonDict: + # Not serialized: defs, base_type_exprs, metaclass, decorators, + # analyzed (for named tuples etc.) + return { + ".class": "ClassDef", + "name": self.name, + "fullname": self.fullname, + "type_vars": [v.serialize() for v in self.type_vars], + } + + @classmethod + def deserialize(cls, data: JsonDict) -> ClassDef: + assert data[".class"] == "ClassDef" + res = ClassDef( + data["name"], + Block([]), + # https://github.com/python/mypy/issues/12257 + [ + cast(mypy.types.TypeVarLikeType, mypy.types.deserialize_type(v)) + for v in data["type_vars"] + ], + ) + res.fullname = data["fullname"] + return res + + def write(self, data: WriteBuffer) -> None: + write_tag(data, CLASS_DEF) + write_str(data, self.name) + mypy.types.write_type_list(data, self.type_vars) + write_str(data, self.fullname) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> ClassDef: + res = ClassDef(read_str(data), Block([]), mypy.types.read_type_var_likes(data)) + res.fullname = read_str(data) + assert read_tag(data) == END_TAG + return res + + +class GlobalDecl(Statement): + """Declaration global x, y, ...""" + + __slots__ = ("names",) + + __match_args__ = ("names",) + + names: list[str] + + def __init__(self, names: list[str]) -> None: + super().__init__() + self.names = names + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_global_decl(self) + + +class NonlocalDecl(Statement): + """Declaration nonlocal x, y, ...""" + + __slots__ = ("names",) + + __match_args__ = ("names",) + + names: list[str] + + def __init__(self, names: list[str]) -> None: + super().__init__() + self.names = names + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_nonlocal_decl(self) + + +class Block(Statement): + __slots__ = ("body", "is_unreachable") + + __match_args__ = ("body", "is_unreachable") + + def __init__(self, body: list[Statement], *, is_unreachable: bool = False) -> None: + super().__init__() + self.body = body + # True if we can determine that this block is not executed during semantic + # analysis. For example, this applies to blocks that are protected by + # something like "if PY3:" when using Python 2. However, some code is + # only considered unreachable during type checking and this is not true + # in those cases. + self.is_unreachable = is_unreachable + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_block(self) + + +# Statements + + +class ExpressionStmt(Statement): + """An expression as a statement, such as print(s).""" + + __slots__ = ("expr",) + + __match_args__ = ("expr",) + + expr: Expression + + def __init__(self, expr: Expression) -> None: + super().__init__() + self.expr = expr + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_expression_stmt(self) + + +class AssignmentStmt(Statement): + """Assignment statement. + + The same node class is used for single assignment, multiple assignment + (e.g. x, y = z) and chained assignment (e.g. x = y = z), assignments + that define new names, and assignments with explicit types ("# type: t" + or "x: t [= ...]"). + + An lvalue can be NameExpr, TupleExpr, ListExpr, MemberExpr, or IndexExpr. + """ + + __slots__ = ( + "lvalues", + "rvalue", + "type", + "unanalyzed_type", + "new_syntax", + "is_alias_def", + "is_final_def", + "invalid_recursive_alias", + ) + + __match_args__ = ("lvalues", "rvalues", "type") + + lvalues: list[Lvalue] + # This is a TempNode if and only if no rvalue (x: t). + rvalue: Expression + # Declared type in a comment, may be None. + type: mypy.types.Type | None + # Original, not semantically analyzed type in annotation (used for reprocessing) + unanalyzed_type: mypy.types.Type | None + # This indicates usage of PEP 526 type annotation syntax in assignment. + new_syntax: bool + # Does this assignment define a type alias? + is_alias_def: bool + # Is this a final definition? + # Final attributes can't be re-assigned once set, and can't be overridden + # in a subclass. This flag is not set if an attempted declaration was found to + # be invalid during semantic analysis. It is still set to `True` if + # a final declaration overrides another final declaration (this is checked + # during type checking when MROs are known). + is_final_def: bool + # Stop further processing of this assignment, to prevent flipping back and forth + # during semantic analysis passes. + invalid_recursive_alias: bool + + def __init__( + self, + lvalues: list[Lvalue], + rvalue: Expression, + type: mypy.types.Type | None = None, + new_syntax: bool = False, + ) -> None: + super().__init__() + self.lvalues = lvalues + self.rvalue = rvalue + self.type = type + self.unanalyzed_type = type + self.new_syntax = new_syntax + self.is_alias_def = False + self.is_final_def = False + self.invalid_recursive_alias = False + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_assignment_stmt(self) + + +class OperatorAssignmentStmt(Statement): + """Operator assignment statement such as x += 1""" + + __slots__ = ("op", "lvalue", "rvalue") + + __match_args__ = ("lvalue", "op", "rvalue") + + op: str # TODO: Enum? + lvalue: Lvalue + rvalue: Expression + + def __init__(self, op: str, lvalue: Lvalue, rvalue: Expression) -> None: + super().__init__() + self.op = op + self.lvalue = lvalue + self.rvalue = rvalue + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_operator_assignment_stmt(self) + + +class WhileStmt(Statement): + __slots__ = ("expr", "body", "else_body") + + __match_args__ = ("expr", "body", "else_body") + + expr: Expression + body: Block + else_body: Block | None + + def __init__(self, expr: Expression, body: Block, else_body: Block | None) -> None: + super().__init__() + self.expr = expr + self.body = body + self.else_body = else_body + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_while_stmt(self) + + +class ForStmt(Statement): + __slots__ = ( + "index", + "index_type", + "unanalyzed_index_type", + "inferred_item_type", + "inferred_iterator_type", + "expr", + "body", + "else_body", + "is_async", + ) + + __match_args__ = ("index", "index_type", "expr", "body", "else_body") + + # Index variables + index: Lvalue + # Type given by type comments for index, can be None + index_type: mypy.types.Type | None + # Original, not semantically analyzed type in annotation (used for reprocessing) + unanalyzed_index_type: mypy.types.Type | None + # Inferred iterable item type + inferred_item_type: mypy.types.Type | None + # Inferred iterator type + inferred_iterator_type: mypy.types.Type | None + # Expression to iterate + expr: Expression + body: Block + else_body: Block | None + is_async: bool # True if `async for ...` (PEP 492, Python 3.5) + + def __init__( + self, + index: Lvalue, + expr: Expression, + body: Block, + else_body: Block | None, + index_type: mypy.types.Type | None = None, + ) -> None: + super().__init__() + self.index = index + self.index_type = index_type + self.unanalyzed_index_type = index_type + self.inferred_item_type = None + self.inferred_iterator_type = None + self.expr = expr + self.body = body + self.else_body = else_body + self.is_async = False + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_for_stmt(self) + + +class ReturnStmt(Statement): + __slots__ = ("expr",) + + __match_args__ = ("expr",) + + expr: Expression | None + + def __init__(self, expr: Expression | None) -> None: + super().__init__() + self.expr = expr + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_return_stmt(self) + + +class AssertStmt(Statement): + __slots__ = ("expr", "msg") + + __match_args__ = ("expr", "msg") + + expr: Expression + msg: Expression | None + + def __init__(self, expr: Expression, msg: Expression | None = None) -> None: + super().__init__() + self.expr = expr + self.msg = msg + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_assert_stmt(self) + + +class DelStmt(Statement): + __slots__ = ("expr",) + + __match_args__ = ("expr",) + + expr: Lvalue + + def __init__(self, expr: Lvalue) -> None: + super().__init__() + self.expr = expr + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_del_stmt(self) + + +class BreakStmt(Statement): + __slots__ = () + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_break_stmt(self) + + +class ContinueStmt(Statement): + __slots__ = () + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_continue_stmt(self) + + +class PassStmt(Statement): + __slots__ = () + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_pass_stmt(self) + + +class IfStmt(Statement): + __slots__ = ("expr", "body", "else_body") + + __match_args__ = ("expr", "body", "else_body") + + expr: list[Expression] + body: list[Block] + else_body: Block | None + + def __init__(self, expr: list[Expression], body: list[Block], else_body: Block | None) -> None: + super().__init__() + self.expr = expr + self.body = body + self.else_body = else_body + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_if_stmt(self) + + +class RaiseStmt(Statement): + __slots__ = ("expr", "from_expr") + + __match_args__ = ("expr", "from_expr") + + # Plain 'raise' is a valid statement. + expr: Expression | None + from_expr: Expression | None + + def __init__(self, expr: Expression | None, from_expr: Expression | None) -> None: + super().__init__() + self.expr = expr + self.from_expr = from_expr + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_raise_stmt(self) + + +class TryStmt(Statement): + __slots__ = ("body", "types", "vars", "handlers", "else_body", "finally_body", "is_star") + + __match_args__ = ("body", "types", "vars", "handlers", "else_body", "finally_body", "is_star") + + body: Block # Try body + # Plain 'except:' also possible + types: list[Expression | None] # Except type expressions + vars: list[NameExpr | None] # Except variable names + handlers: list[Block] # Except bodies + else_body: Block | None + finally_body: Block | None + # Whether this is try ... except* (added in Python 3.11) + is_star: bool + + def __init__( + self, + body: Block, + vars: list[NameExpr | None], + types: list[Expression | None], + handlers: list[Block], + else_body: Block | None, + finally_body: Block | None, + ) -> None: + super().__init__() + self.body = body + self.vars = vars + self.types = types + self.handlers = handlers + self.else_body = else_body + self.finally_body = finally_body + self.is_star = False + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_try_stmt(self) + + +class WithStmt(Statement): + __slots__ = ("expr", "target", "unanalyzed_type", "analyzed_types", "body", "is_async") + + __match_args__ = ("expr", "target", "body") + + expr: list[Expression] + target: list[Lvalue | None] + # Type given by type comments for target, can be None + unanalyzed_type: mypy.types.Type | None + # Semantically analyzed types from type comment (TypeList type expanded) + analyzed_types: list[mypy.types.Type] + body: Block + is_async: bool # True if `async with ...` (PEP 492, Python 3.5) + + def __init__( + self, + expr: list[Expression], + target: list[Lvalue | None], + body: Block, + target_type: mypy.types.Type | None = None, + ) -> None: + super().__init__() + self.expr = expr + self.target = target + self.unanalyzed_type = target_type + self.analyzed_types = [] + self.body = body + self.is_async = False + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_with_stmt(self) + + +class MatchStmt(Statement): + __slots__ = ("subject", "subject_dummy", "patterns", "guards", "bodies") + + __match_args__ = ("subject", "patterns", "guards", "bodies") + + subject: Expression + subject_dummy: NameExpr | None + patterns: list[Pattern] + guards: list[Expression | None] + bodies: list[Block] + + def __init__( + self, + subject: Expression, + patterns: list[Pattern], + guards: list[Expression | None], + bodies: list[Block], + ) -> None: + super().__init__() + assert len(patterns) == len(guards) == len(bodies) + self.subject = subject + self.subject_dummy = None + self.patterns = patterns + self.guards = guards + self.bodies = bodies + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_match_stmt(self) + + +class TypeAliasStmt(Statement): + __slots__ = ("name", "type_args", "value", "invalid_recursive_alias", "alias_node") + + __match_args__ = ("name", "type_args", "value") + + name: NameExpr + type_args: list[TypeParam] + value: LambdaExpr # Return value will get translated into a type + invalid_recursive_alias: bool + alias_node: TypeAlias | None + + def __init__(self, name: NameExpr, type_args: list[TypeParam], value: LambdaExpr) -> None: + super().__init__() + self.name = name + self.type_args = type_args + self.value = value + self.invalid_recursive_alias = False + self.alias_node = None + + def accept(self, visitor: StatementVisitor[T]) -> T: + return visitor.visit_type_alias_stmt(self) + + +# Expressions + + +class IntExpr(Expression): + """Integer literal""" + + __slots__ = ("value",) + + __match_args__ = ("value",) + + value: int # 0 by default + + def __init__(self, value: int) -> None: + super().__init__() + self.value = value + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_int_expr(self) + + +# How mypy uses StrExpr and BytesExpr: +# +# b'x' -> BytesExpr +# 'x', u'x' -> StrExpr + + +class StrExpr(Expression): + """String literal""" + + __slots__ = ("value", "as_type") + + __match_args__ = ("value",) + + value: str # '' by default + # If this value expression can also be parsed as a valid type expression, + # represents the type denoted by the type expression. + # None means "is not a type expression". + as_type: NotParsed | mypy.types.Type | None + + def __init__(self, value: str) -> None: + super().__init__() + self.value = value + self.as_type = NotParsed.VALUE + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_str_expr(self) + + +def is_StrExpr_list(seq: list[Expression]) -> TypeGuard[list[StrExpr]]: # noqa: N802 + return all(isinstance(item, StrExpr) for item in seq) + + +class BytesExpr(Expression): + """Bytes literal""" + + __slots__ = ("value",) + + __match_args__ = ("value",) + + # Note: we deliberately do NOT use bytes here because it ends up + # unnecessarily complicating a lot of the result logic. For example, + # we'd have to worry about converting the bytes into a format we can + # easily serialize/deserialize to and from JSON, would have to worry + # about turning the bytes into a human-readable representation in + # error messages... + # + # It's more convenient to just store the human-readable representation + # from the very start. + value: str + + def __init__(self, value: str) -> None: + super().__init__() + self.value = value + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_bytes_expr(self) + + +class FloatExpr(Expression): + """Float literal""" + + __slots__ = ("value",) + + __match_args__ = ("value",) + + value: float # 0.0 by default + + def __init__(self, value: float) -> None: + super().__init__() + self.value = value + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_float_expr(self) + + +class ComplexExpr(Expression): + """Complex literal""" + + __slots__ = ("value",) + + __match_args__ = ("value",) + + value: complex + + def __init__(self, value: complex) -> None: + super().__init__() + self.value = value + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_complex_expr(self) + + +class EllipsisExpr(Expression): + """Ellipsis (...)""" + + __slots__ = () + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_ellipsis(self) + + +class StarExpr(Expression): + """Star expression""" + + __slots__ = ("expr", "valid") + + __match_args__ = ("expr", "valid") + + expr: Expression + valid: bool + + def __init__(self, expr: Expression) -> None: + super().__init__() + self.expr = expr + + # Whether this starred expression is used in a tuple/list and as lvalue + self.valid = False + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_star_expr(self) + + +class RefExpr(Expression): + """Abstract base class for name-like constructs""" + + __slots__ = ( + "kind", + "node", + "_fullname", + "is_new_def", + "is_inferred_def", + "is_alias_rvalue", + "type_guard", + "type_is", + ) + + def __init__(self) -> None: + super().__init__() + # LDEF/GDEF/MDEF/... (None if not available) + self.kind: int | None = None + # Var, FuncDef or TypeInfo that describes this + self.node: SymbolNode | None = None + # Fully qualified name (or name if not global) + self._fullname = "" + # Does this define a new name? + self.is_new_def = False + # Does this define a new name with inferred type? + # + # For members, after semantic analysis, this does not take base + # classes into consideration at all; the type checker deals with these. + self.is_inferred_def = False + # Is this expression appears as an rvalue of a valid type alias definition? + self.is_alias_rvalue = False + # Cache type guard from callable_type.type_guard + self.type_guard: mypy.types.Type | None = None + # And same for TypeIs + self.type_is: mypy.types.Type | None = None + + @property + def fullname(self) -> str: + return self._fullname + + @fullname.setter + def fullname(self, v: str) -> None: + self._fullname = v + + +class NameExpr(RefExpr): + """Name expression + + This refers to a local name, global name or a module. + """ + + __slots__ = ("name", "is_special_form") + + __match_args__ = ("name", "node") + + def __init__(self, name: str) -> None: + super().__init__() + self.name = name # Name referred to + # Is this a l.h.s. of a special form assignment like typed dict or type variable? + self.is_special_form = False + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_name_expr(self) + + def serialize(self) -> JsonDict: + assert False, f"Serializing NameExpr: {self}" + + +class MemberExpr(RefExpr): + """Member access expression x.y""" + + __slots__ = ("expr", "name", "def_var") + + __match_args__ = ("expr", "name", "node") + + def __init__(self, expr: Expression, name: str) -> None: + super().__init__() + self.expr = expr + self.name = name + # The variable node related to a definition through 'self.x = '. + # The nodes of other kinds of member expressions are resolved during type checking. + self.def_var: Var | None = None + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_member_expr(self) + + +# Kinds of arguments +@unique +class ArgKind(Enum): + # Positional argument + ARG_POS = 0 + # Positional, optional argument (functions only, not calls) + ARG_OPT = 1 + # *arg argument + ARG_STAR = 2 + # Keyword argument x=y in call, or keyword-only function arg + ARG_NAMED = 3 + # **arg argument + ARG_STAR2 = 4 + # In an argument list, keyword-only and also optional + ARG_NAMED_OPT = 5 + + def is_positional(self, star: bool = False) -> bool: + return self == ARG_POS or self == ARG_OPT or (star and self == ARG_STAR) + + def is_named(self, star: bool = False) -> bool: + return self == ARG_NAMED or self == ARG_NAMED_OPT or (star and self == ARG_STAR2) + + def is_required(self) -> bool: + return self == ARG_POS or self == ARG_NAMED + + def is_optional(self) -> bool: + return self == ARG_OPT or self == ARG_NAMED_OPT + + def is_star(self) -> bool: + return self == ARG_STAR or self == ARG_STAR2 + + +ARG_POS: Final = ArgKind.ARG_POS +ARG_OPT: Final = ArgKind.ARG_OPT +ARG_STAR: Final = ArgKind.ARG_STAR +ARG_NAMED: Final = ArgKind.ARG_NAMED +ARG_STAR2: Final = ArgKind.ARG_STAR2 +ARG_NAMED_OPT: Final = ArgKind.ARG_NAMED_OPT + +ARG_KINDS: Final = (ARG_POS, ARG_OPT, ARG_STAR, ARG_NAMED, ARG_STAR2, ARG_NAMED_OPT) + + +class CallExpr(Expression): + """Call expression. + + This can also represent several special forms that are syntactically calls + such as cast(...) and None # type: .... + """ + + __slots__ = ("callee", "args", "arg_kinds", "arg_names", "analyzed") + + __match_args__ = ("callee", "args", "arg_kinds", "arg_names") + + def __init__( + self, + callee: Expression, + args: list[Expression], + arg_kinds: list[ArgKind], + arg_names: list[str | None], + analyzed: Expression | None = None, + ) -> None: + super().__init__() + if not arg_names: + arg_names = [None] * len(args) + + self.callee = callee + self.args = args + self.arg_kinds = arg_kinds # ARG_ constants + # Each name can be None if not a keyword argument. + self.arg_names: list[str | None] = arg_names + # If not None, the node that represents the meaning of the CallExpr. For + # cast(...) this is a CastExpr. + self.analyzed = analyzed + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_call_expr(self) + + +class YieldFromExpr(Expression): + __slots__ = ("expr",) + + __match_args__ = ("expr",) + + expr: Expression + + def __init__(self, expr: Expression) -> None: + super().__init__() + self.expr = expr + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_yield_from_expr(self) + + +class YieldExpr(Expression): + __slots__ = ("expr",) + + __match_args__ = ("expr",) + + expr: Expression | None + + def __init__(self, expr: Expression | None) -> None: + super().__init__() + self.expr = expr + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_yield_expr(self) + + +class IndexExpr(Expression): + """Index expression x[y]. + + Also wraps type application such as List[int] as a special form. + """ + + __slots__ = ("base", "index", "method_type", "analyzed", "as_type") + + __match_args__ = ("base", "index") + + base: Expression + index: Expression + # Inferred __getitem__ method type + method_type: mypy.types.Type | None + # If not None, this is actually semantically a type application + # Class[type, ...] or a type alias initializer. + analyzed: TypeApplication | TypeAliasExpr | None + # If this value expression can also be parsed as a valid type expression, + # represents the type denoted by the type expression. + # None means "is not a type expression". + as_type: NotParsed | mypy.types.Type | None + + def __init__(self, base: Expression, index: Expression) -> None: + super().__init__() + self.base = base + self.index = index + self.method_type = None + self.analyzed = None + self.as_type = NotParsed.VALUE + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_index_expr(self) + + +class UnaryExpr(Expression): + """Unary operation""" + + __slots__ = ("op", "expr", "method_type") + + __match_args__ = ("op", "expr") + + op: str # TODO: Enum? + expr: Expression + # Inferred operator method type + method_type: mypy.types.Type | None + + def __init__(self, op: str, expr: Expression) -> None: + super().__init__() + self.op = op + self.expr = expr + self.method_type = None + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_unary_expr(self) + + +class AssignmentExpr(Expression): + """Assignment expressions in Python 3.8+, like "a := 2".""" + + __slots__ = ("target", "value") + + __match_args__ = ("target", "value") + + def __init__(self, target: NameExpr, value: Expression) -> None: + super().__init__() + self.target = target + self.value = value + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_assignment_expr(self) + + +class OpExpr(Expression): + """Binary operation. + + The dot (.), [] and comparison operators have more specific nodes. + """ + + __slots__ = ( + "op", + "left", + "right", + "method_type", + "right_always", + "right_unreachable", + "analyzed", + "as_type", + ) + + __match_args__ = ("left", "op", "right") + + op: str # TODO: Enum? + left: Expression + right: Expression + # Inferred type for the operator method type (when relevant). + method_type: mypy.types.Type | None + # Per static analysis only: Is the right side going to be evaluated every time? + right_always: bool + # Per static analysis only: Is the right side unreachable? + right_unreachable: bool + # Used for expressions that represent a type "X | Y" in some contexts + analyzed: TypeAliasExpr | None + # If this value expression can also be parsed as a valid type expression, + # represents the type denoted by the type expression. + # None means "is not a type expression". + as_type: NotParsed | mypy.types.Type | None + + def __init__( + self, op: str, left: Expression, right: Expression, analyzed: TypeAliasExpr | None = None + ) -> None: + super().__init__() + self.op = op + self.left = left + self.right = right + self.method_type = None + self.right_always = False + self.right_unreachable = False + self.analyzed = analyzed + self.as_type = NotParsed.VALUE + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_op_expr(self) + + +# Expression subtypes that could represent the root of a valid type expression. +# +# May have an "as_type" attribute to hold the type for a type expression parsed +# during the SemanticAnalyzer pass. +MaybeTypeExpression = (IndexExpr, MemberExpr, NameExpr, OpExpr, StrExpr) + + +class ComparisonExpr(Expression): + """Comparison expression (e.g. a < b > c < d).""" + + __slots__ = ("operators", "operands", "method_types") + + __match_args__ = ("operands", "operators") + + operators: list[str] + operands: list[Expression] + # Inferred type for the operator methods (when relevant; None for 'is'). + method_types: list[mypy.types.Type | None] + + def __init__(self, operators: list[str], operands: list[Expression]) -> None: + super().__init__() + self.operators = operators + self.operands = operands + self.method_types = [] + + def pairwise(self) -> Iterator[tuple[str, Expression, Expression]]: + """If this comparison expr is "a < b is c == d", yields the sequence + ("<", a, b), ("is", b, c), ("==", c, d) + """ + for i, operator in enumerate(self.operators): + yield operator, self.operands[i], self.operands[i + 1] + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_comparison_expr(self) + + +class SliceExpr(Expression): + """Slice expression (e.g. 'x:y', 'x:', '::2' or ':'). + + This is only valid as index in index expressions. + """ + + __slots__ = ("begin_index", "end_index", "stride") + + __match_args__ = ("begin_index", "end_index", "stride") + + begin_index: Expression | None + end_index: Expression | None + stride: Expression | None + + def __init__( + self, + begin_index: Expression | None, + end_index: Expression | None, + stride: Expression | None, + ) -> None: + super().__init__() + self.begin_index = begin_index + self.end_index = end_index + self.stride = stride + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_slice_expr(self) + + +class CastExpr(Expression): + """Cast expression cast(type, expr).""" + + __slots__ = ("expr", "type") + + __match_args__ = ("expr", "type") + + expr: Expression + type: mypy.types.Type + + def __init__(self, expr: Expression, typ: mypy.types.Type) -> None: + super().__init__() + self.expr = expr + self.type = typ + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_cast_expr(self) + + +class TypeFormExpr(Expression): + """TypeForm(type) expression.""" + + __slots__ = ("type",) + + __match_args__ = ("type",) + + type: mypy.types.Type + + def __init__(self, typ: mypy.types.Type) -> None: + super().__init__() + self.type = typ + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_type_form_expr(self) + + +class AssertTypeExpr(Expression): + """Represents a typing.assert_type(expr, type) call.""" + + __slots__ = ("expr", "type") + + __match_args__ = ("expr", "type") + + expr: Expression + type: mypy.types.Type + + def __init__(self, expr: Expression, typ: mypy.types.Type) -> None: + super().__init__() + self.expr = expr + self.type = typ + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_assert_type_expr(self) + + +class RevealExpr(Expression): + """Reveal type expression reveal_type(expr) or reveal_locals() expression.""" + + __slots__ = ("expr", "kind", "local_nodes", "is_imported") + + __match_args__ = ("expr", "kind", "local_nodes", "is_imported") + + expr: Expression | None + kind: int + local_nodes: list[Var] | None + + def __init__( + self, + kind: int, + expr: Expression | None = None, + local_nodes: list[Var] | None = None, + is_imported: bool = False, + ) -> None: + super().__init__() + self.expr = expr + self.kind = kind + self.local_nodes = local_nodes + self.is_imported = is_imported + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_reveal_expr(self) + + +class SuperExpr(Expression): + """Expression super().name""" + + __slots__ = ("name", "info", "call") + + __match_args__ = ("name", "call", "info") + + name: str + info: TypeInfo | None # Type that contains this super expression + call: CallExpr # The expression super(...) + + def __init__(self, name: str, call: CallExpr) -> None: + super().__init__() + self.name = name + self.call = call + self.info = None + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_super_expr(self) + + +class LambdaExpr(FuncItem, Expression): + """Lambda expression""" + + __match_args__ = ("arguments", "arg_names", "arg_kinds", "body") + + @property + def name(self) -> str: + return LAMBDA_NAME + + def expr(self) -> Expression: + """Return the expression (the body) of the lambda.""" + ret = self.body.body[-1] + assert isinstance(ret, ReturnStmt) + expr = ret.expr + assert expr is not None # lambda can't have empty body + return expr + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_lambda_expr(self) + + def is_dynamic(self) -> bool: + return False + + +class ListExpr(Expression): + """List literal expression [...].""" + + __slots__ = ("items",) + + __match_args__ = ("items",) + + items: list[Expression] + + def __init__(self, items: list[Expression]) -> None: + super().__init__() + self.items = items + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_list_expr(self) + + +class DictExpr(Expression): + """Dictionary literal expression {key: value, ...}.""" + + __slots__ = ("items",) + + __match_args__ = ("items",) + + items: list[tuple[Expression | None, Expression]] + + def __init__(self, items: list[tuple[Expression | None, Expression]]) -> None: + super().__init__() + self.items = items + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_dict_expr(self) + + +class TupleExpr(Expression): + """Tuple literal expression (..., ...) + + Also lvalue sequences (..., ...) and [..., ...]""" + + __slots__ = ("items",) + + __match_args__ = ("items",) + + items: list[Expression] + + def __init__(self, items: list[Expression]) -> None: + super().__init__() + self.items = items + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_tuple_expr(self) + + +class SetExpr(Expression): + """Set literal expression {value, ...}.""" + + __slots__ = ("items",) + + __match_args__ = ("items",) + + items: list[Expression] + + def __init__(self, items: list[Expression]) -> None: + super().__init__() + self.items = items + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_set_expr(self) + + +class GeneratorExpr(Expression): + """Generator expression ... for ... in ... [ for ... in ... ] [ if ... ].""" + + __slots__ = ("left_expr", "sequences", "condlists", "is_async", "indices") + + __match_args__ = ("left_expr", "indices", "sequences", "condlists") + + left_expr: Expression + sequences: list[Expression] + condlists: list[list[Expression]] + is_async: list[bool] + indices: list[Lvalue] + + def __init__( + self, + left_expr: Expression, + indices: list[Lvalue], + sequences: list[Expression], + condlists: list[list[Expression]], + is_async: list[bool], + ) -> None: + super().__init__() + self.left_expr = left_expr + self.sequences = sequences + self.condlists = condlists + self.indices = indices + self.is_async = is_async + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_generator_expr(self) + + +class ListComprehension(Expression): + """List comprehension (e.g. [x + 1 for x in a])""" + + __slots__ = ("generator",) + + __match_args__ = ("generator",) + + generator: GeneratorExpr + + def __init__(self, generator: GeneratorExpr) -> None: + super().__init__() + self.generator = generator + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_list_comprehension(self) + + +class SetComprehension(Expression): + """Set comprehension (e.g. {x + 1 for x in a})""" + + __slots__ = ("generator",) + + __match_args__ = ("generator",) + + generator: GeneratorExpr + + def __init__(self, generator: GeneratorExpr) -> None: + super().__init__() + self.generator = generator + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_set_comprehension(self) + + +class DictionaryComprehension(Expression): + """Dictionary comprehension (e.g. {k: v for k, v in a}""" + + __slots__ = ("key", "value", "sequences", "condlists", "is_async", "indices") + + __match_args__ = ("key", "value", "indices", "sequences", "condlists") + + key: Expression + value: Expression + sequences: list[Expression] + condlists: list[list[Expression]] + is_async: list[bool] + indices: list[Lvalue] + + def __init__( + self, + key: Expression, + value: Expression, + indices: list[Lvalue], + sequences: list[Expression], + condlists: list[list[Expression]], + is_async: list[bool], + ) -> None: + super().__init__() + self.key = key + self.value = value + self.sequences = sequences + self.condlists = condlists + self.indices = indices + self.is_async = is_async + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_dictionary_comprehension(self) + + +class ConditionalExpr(Expression): + """Conditional expression (e.g. x if y else z)""" + + __slots__ = ("cond", "if_expr", "else_expr") + + __match_args__ = ("if_expr", "cond", "else_expr") + + cond: Expression + if_expr: Expression + else_expr: Expression + + def __init__(self, cond: Expression, if_expr: Expression, else_expr: Expression) -> None: + super().__init__() + self.cond = cond + self.if_expr = if_expr + self.else_expr = else_expr + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_conditional_expr(self) + + +class TypeApplication(Expression): + """Type application expr[type, ...]""" + + __slots__ = ("expr", "types") + + __match_args__ = ("expr", "types") + + expr: Expression + types: list[mypy.types.Type] + + def __init__(self, expr: Expression, types: list[mypy.types.Type]) -> None: + super().__init__() + self.expr = expr + self.types = types + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_type_application(self) + + +# Variance of a type variable. For example, T in the definition of +# List[T] is invariant, so List[int] is not a subtype of List[object], +# and also List[object] is not a subtype of List[int]. +# +# The T in Iterable[T] is covariant, so Iterable[int] is a subtype of +# Iterable[object], but not vice versa. +# +# If T is contravariant in Foo[T], Foo[object] is a subtype of +# Foo[int], but not vice versa. +INVARIANT: Final = 0 +COVARIANT: Final = 1 +CONTRAVARIANT: Final = 2 +VARIANCE_NOT_READY: Final = 3 # Variance hasn't been inferred (using Python 3.12 syntax) + + +class TypeVarLikeExpr(SymbolNode, Expression): + """Base class for TypeVarExpr, ParamSpecExpr and TypeVarTupleExpr. + + Note that they are constructed by the semantic analyzer. + """ + + __slots__ = ("_name", "_fullname", "upper_bound", "default", "variance", "is_new_style") + + _name: str + _fullname: str + # Upper bound: only subtypes of upper_bound are valid as values. By default + # this is 'object', meaning no restriction. + upper_bound: mypy.types.Type + # Default: used to resolve the TypeVar if the default is not explicitly given. + # By default this is 'AnyType(TypeOfAny.from_omitted_generics)'. See PEP 696. + default: mypy.types.Type + # Variance of the type variable. Invariant is the default. + # TypeVar(..., covariant=True) defines a covariant type variable. + # TypeVar(..., contravariant=True) defines a contravariant type + # variable. + variance: int + + def __init__( + self, + name: str, + fullname: str, + upper_bound: mypy.types.Type, + default: mypy.types.Type, + variance: int = INVARIANT, + is_new_style: bool = False, + line: int = -1, + ) -> None: + super().__init__(line=line) + self._name = name + self._fullname = fullname + self.upper_bound = upper_bound + self.default = default + self.variance = variance + self.is_new_style = is_new_style + + @property + def name(self) -> str: + return self._name + + @property + def fullname(self) -> str: + return self._fullname + + +# All types that are both SymbolNodes and Expressions. +# Use when common children of them are needed. +SYMBOL_NODE_EXPRESSION_TYPES: Final = (TypeVarLikeExpr,) + + +class TypeVarExpr(TypeVarLikeExpr): + """Type variable expression TypeVar(...). + + This is also used to represent type variables in symbol tables. + + A type variable is not valid as a type unless bound in a TypeVarLikeScope. + That happens within: + + 1. a generic class that uses the type variable as a type argument or + 2. a generic function that refers to the type variable in its signature. + """ + + __slots__ = ("values",) + + __match_args__ = ("name", "values", "upper_bound", "default") + + # Value restriction: only types in the list are valid as values. If the + # list is empty, there is no restriction. + values: list[mypy.types.Type] + + def __init__( + self, + name: str, + fullname: str, + values: list[mypy.types.Type], + upper_bound: mypy.types.Type, + default: mypy.types.Type, + variance: int = INVARIANT, + is_new_style: bool = False, + line: int = -1, + ) -> None: + super().__init__(name, fullname, upper_bound, default, variance, is_new_style, line=line) + self.values = values + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_type_var_expr(self) + + def serialize(self) -> JsonDict: + return { + ".class": "TypeVarExpr", + "name": self._name, + "fullname": self._fullname, + "values": [t.serialize() for t in self.values], + "upper_bound": self.upper_bound.serialize(), + "default": self.default.serialize(), + "variance": self.variance, + } + + @classmethod + def deserialize(cls, data: JsonDict) -> TypeVarExpr: + assert data[".class"] == "TypeVarExpr" + return TypeVarExpr( + data["name"], + data["fullname"], + [mypy.types.deserialize_type(v) for v in data["values"]], + mypy.types.deserialize_type(data["upper_bound"]), + mypy.types.deserialize_type(data["default"]), + data["variance"], + ) + + def write(self, data: WriteBuffer) -> None: + write_tag(data, TYPE_VAR_EXPR) + write_str(data, self._name) + write_str(data, self._fullname) + mypy.types.write_type_list(data, self.values) + self.upper_bound.write(data) + self.default.write(data) + write_int(data, self.variance) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> TypeVarExpr: + ret = TypeVarExpr( + read_str(data), + read_str(data), + mypy.types.read_type_list(data), + mypy.types.read_type(data), + mypy.types.read_type(data), + read_int(data), + ) + assert read_tag(data) == END_TAG + return ret + + +class ParamSpecExpr(TypeVarLikeExpr): + __slots__ = () + + __match_args__ = ("name", "upper_bound", "default") + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_paramspec_expr(self) + + def serialize(self) -> JsonDict: + return { + ".class": "ParamSpecExpr", + "name": self._name, + "fullname": self._fullname, + "upper_bound": self.upper_bound.serialize(), + "default": self.default.serialize(), + "variance": self.variance, + } + + @classmethod + def deserialize(cls, data: JsonDict) -> ParamSpecExpr: + assert data[".class"] == "ParamSpecExpr" + return ParamSpecExpr( + data["name"], + data["fullname"], + mypy.types.deserialize_type(data["upper_bound"]), + mypy.types.deserialize_type(data["default"]), + data["variance"], + ) + + def write(self, data: WriteBuffer) -> None: + write_tag(data, PARAM_SPEC_EXPR) + write_str(data, self._name) + write_str(data, self._fullname) + self.upper_bound.write(data) + self.default.write(data) + write_int(data, self.variance) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> ParamSpecExpr: + ret = ParamSpecExpr( + read_str(data), + read_str(data), + mypy.types.read_type(data), + mypy.types.read_type(data), + read_int(data), + ) + assert read_tag(data) == END_TAG + return ret + + +class TypeVarTupleExpr(TypeVarLikeExpr): + """Type variable tuple expression TypeVarTuple(...).""" + + __slots__ = "tuple_fallback" + + tuple_fallback: mypy.types.Instance + + __match_args__ = ("name", "upper_bound", "default") + + def __init__( + self, + name: str, + fullname: str, + upper_bound: mypy.types.Type, + tuple_fallback: mypy.types.Instance, + default: mypy.types.Type, + variance: int = INVARIANT, + is_new_style: bool = False, + line: int = -1, + ) -> None: + super().__init__(name, fullname, upper_bound, default, variance, is_new_style, line=line) + self.tuple_fallback = tuple_fallback + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_type_var_tuple_expr(self) + + def serialize(self) -> JsonDict: + return { + ".class": "TypeVarTupleExpr", + "name": self._name, + "fullname": self._fullname, + "upper_bound": self.upper_bound.serialize(), + "tuple_fallback": self.tuple_fallback.serialize(), + "default": self.default.serialize(), + "variance": self.variance, + } + + @classmethod + def deserialize(cls, data: JsonDict) -> TypeVarTupleExpr: + assert data[".class"] == "TypeVarTupleExpr" + return TypeVarTupleExpr( + data["name"], + data["fullname"], + mypy.types.deserialize_type(data["upper_bound"]), + mypy.types.Instance.deserialize(data["tuple_fallback"]), + mypy.types.deserialize_type(data["default"]), + data["variance"], + ) + + def write(self, data: WriteBuffer) -> None: + write_tag(data, TYPE_VAR_TUPLE_EXPR) + self.tuple_fallback.write(data) + write_str(data, self._name) + write_str(data, self._fullname) + self.upper_bound.write(data) + self.default.write(data) + write_int(data, self.variance) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> TypeVarTupleExpr: + assert read_tag(data) == mypy.types.INSTANCE + fallback = mypy.types.Instance.read(data) + ret = TypeVarTupleExpr( + read_str(data), + read_str(data), + mypy.types.read_type(data), + fallback, + mypy.types.read_type(data), + read_int(data), + ) + assert read_tag(data) == END_TAG + return ret + + +class TypeAliasExpr(Expression): + """Type alias expression (rvalue).""" + + __slots__ = ("node",) + + __match_args__ = ("node",) + + node: TypeAlias + + def __init__(self, node: TypeAlias) -> None: + super().__init__() + self.node = node + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_type_alias_expr(self) + + +class NamedTupleExpr(Expression): + """Named tuple expression namedtuple(...) or NamedTuple(...).""" + + __slots__ = ("info", "is_typed") + + __match_args__ = ("info",) + + # The class representation of this named tuple (its tuple_type attribute contains + # the tuple item types) + info: TypeInfo + is_typed: bool # whether this class was created with typing(_extensions).NamedTuple + + def __init__(self, info: TypeInfo, is_typed: bool = False) -> None: + super().__init__() + self.info = info + self.is_typed = is_typed + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_namedtuple_expr(self) + + +class TypedDictExpr(Expression): + """Typed dict expression TypedDict(...).""" + + __slots__ = ("info",) + + __match_args__ = ("info",) + + # The class representation of this typed dict + info: TypeInfo + + def __init__(self, info: TypeInfo) -> None: + super().__init__() + self.info = info + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_typeddict_expr(self) + + +class EnumCallExpr(Expression): + """Named tuple expression Enum('name', 'val1 val2 ...').""" + + __slots__ = ("info", "items", "values") + + __match_args__ = ("info", "items", "values") + + # The class representation of this enumerated type + info: TypeInfo + # The item names (for debugging) + items: list[str] + values: list[Expression | None] + + def __init__(self, info: TypeInfo, items: list[str], values: list[Expression | None]) -> None: + super().__init__() + self.info = info + self.items = items + self.values = values + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_enum_call_expr(self) + + +class PromoteExpr(Expression): + """Ducktype class decorator expression _promote(...).""" + + __slots__ = ("type",) + + type: mypy.types.ProperType + + def __init__(self, type: mypy.types.ProperType) -> None: + super().__init__() + self.type = type + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit__promote_expr(self) + + +class NewTypeExpr(Expression): + """NewType expression NewType(...).""" + + __slots__ = ("name", "old_type", "info") + + __match_args__ = ("name", "old_type", "info") + + name: str + # The base type (the second argument to NewType) + old_type: mypy.types.Type | None + # The synthesized class representing the new type (inherits old_type) + info: TypeInfo | None + + def __init__( + self, name: str, old_type: mypy.types.Type | None, line: int, column: int + ) -> None: + super().__init__(line=line, column=column) + self.name = name + self.old_type = old_type + self.info = None + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_newtype_expr(self) + + +class AwaitExpr(Expression): + """Await expression (await ...).""" + + __slots__ = ("expr",) + + __match_args__ = ("expr",) + + expr: Expression + + def __init__(self, expr: Expression) -> None: + super().__init__() + self.expr = expr + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_await_expr(self) + + +# Constants + + +class TempNode(Expression): + """Temporary dummy node used during type checking. + + This node is not present in the original program; it is just an artifact + of the type checker implementation. It only represents an opaque node with + some fixed type. + """ + + __slots__ = ("type", "no_rhs") + + type: mypy.types.Type + # Is this TempNode used to indicate absence of a right hand side in an annotated assignment? + # (e.g. for 'x: int' the rvalue is TempNode(AnyType(TypeOfAny.special_form), no_rhs=True)) + no_rhs: bool + + def __init__( + self, typ: mypy.types.Type, no_rhs: bool = False, *, context: Context | None = None + ) -> None: + """Construct a dummy node; optionally borrow line/column from context object.""" + super().__init__() + self.type = typ + self.no_rhs = no_rhs + if context is not None: + self.line = context.line + self.column = context.column + + def __repr__(self) -> str: + return "TempNode:%d(%s)" % (self.line, str(self.type)) + + def accept(self, visitor: ExpressionVisitor[T]) -> T: + return visitor.visit_temp_node(self) + + +# Special attributes not collected as protocol members by Python 3.12 +# See typing._SPECIAL_NAMES +EXCLUDED_PROTOCOL_ATTRIBUTES: Final = frozenset( + { + "__abstractmethods__", + "__annotations__", + "__dict__", + "__doc__", + "__init__", + "__module__", + "__new__", + "__slots__", + "__subclasshook__", + "__weakref__", + "__class_getitem__", # Since Python 3.9 + } +) + +# Attributes that can optionally be defined in the body of a subclass of +# enum.Enum but are removed from the class __dict__ by EnumMeta. +EXCLUDED_ENUM_ATTRIBUTES: Final = frozenset({"_ignore_", "_order_", "__order__"}) + + +class TypeInfo(SymbolNode): + """The type structure of a single class. + + Each TypeInfo corresponds one-to-one to a ClassDef, which + represents the AST of the class. + + In type-theory terms, this is a "type constructor", and if the + class is generic then it will be a type constructor of higher kind. + Where the class is used in an actual type, it's in the form of an + Instance, which amounts to a type application of the tycon to + the appropriate number of arguments. + """ + + __slots__ = ( + "_fullname", + "module_name", + "defn", + "mro", + "_mro_refs", + "bad_mro", + "is_final", + "is_disjoint_base", + "declared_metaclass", + "metaclass_type", + "names", + "is_abstract", + "is_protocol", + "runtime_protocol", + "abstract_attributes", + "deletable_attributes", + "slots", + "assuming", + "assuming_proper", + "inferring", + "is_enum", + "fallback_to_any", + "meta_fallback_to_any", + "type_vars", + "has_param_spec_type", + "bases", + "_promote", + "tuple_type", + "special_alias", + "is_named_tuple", + "typeddict_type", + "is_newtype", + "is_intersection", + "metadata", + "alt_promote", + "has_type_var_tuple_type", + "type_var_tuple_prefix", + "type_var_tuple_suffix", + "self_type", + "dataclass_transform_spec", + "is_type_check_only", + "deprecated", + "type_object_type", + ) + + _fullname: str # Fully qualified name + # Fully qualified name for the module this type was defined in. This + # information is also in the fullname, but is harder to extract in the + # case of nested class definitions. + module_name: str + defn: ClassDef # Corresponding ClassDef + # Method Resolution Order: the order of looking up attributes. The first + # value always to refers to this class. + mro: list[TypeInfo] + # Used to stash the names of the mro classes temporarily between + # deserialization and fixup. See deserialize() for why. + _mro_refs: list[str] | None + bad_mro: bool # Could not construct full MRO + is_final: bool + is_disjoint_base: bool + + declared_metaclass: mypy.types.Instance | None + metaclass_type: mypy.types.Instance | None + + names: SymbolTable # Names defined directly in this type + is_abstract: bool # Does the class have any abstract attributes? + is_protocol: bool # Is this a protocol class? + runtime_protocol: bool # Does this protocol support isinstance checks? + # List of names of abstract attributes together with their abstract status. + # The abstract status must be one of `NOT_ABSTRACT`, `IS_ABSTRACT`, `IMPLICITLY_ABSTRACT`. + abstract_attributes: list[tuple[str, int]] + deletable_attributes: list[str] # Used by mypyc only + # Does this type have concrete `__slots__` defined? + # If class does not have `__slots__` defined then it is `None`, + # if it has empty `__slots__` then it is an empty set. + slots: set[str] | None + + # The attributes 'assuming' and 'assuming_proper' represent structural subtype matrices. + # + # In languages with structural subtyping, one can keep a global subtype matrix like this: + # . A B C . + # A 1 0 0 + # B 1 1 1 + # C 1 0 1 + # . + # where 1 indicates that the type in corresponding row is a subtype of the type + # in corresponding column. This matrix typically starts filled with all 1's and + # a typechecker tries to "disprove" every subtyping relation using atomic (or nominal) types. + # However, we don't want to keep this huge global state. Instead, we keep the subtype + # information in the form of list of pairs (subtype, supertype) shared by all Instances + # with given supertype's TypeInfo. When we enter a subtype check we push a pair in this list + # thus assuming that we started with 1 in corresponding matrix element. Such algorithm allows + # to treat recursive and mutually recursive protocols and other kinds of complex situations. + # + # If concurrent/parallel type checking will be added in future, + # then there should be one matrix per thread/process to avoid false negatives + # during the type checking phase. + assuming: list[tuple[mypy.types.Instance, mypy.types.Instance]] + assuming_proper: list[tuple[mypy.types.Instance, mypy.types.Instance]] + # Ditto for temporary 'inferring' stack of recursive constraint inference. + # It contains Instances of protocol types that appeared as an argument to + # constraints.infer_constraints(). We need 'inferring' to avoid infinite recursion for + # recursive and mutually recursive protocols. + # + # We make 'assuming' and 'inferring' attributes here instead of passing they as kwargs, + # since this would require to pass them in many dozens of calls. In particular, + # there is a dependency infer_constraint -> is_subtype -> is_callable_subtype -> + # -> infer_constraints. + inferring: list[mypy.types.Instance] + # 'inferring' and 'assuming' can't be made sets, since we need to use + # is_same_type to correctly treat unions. + + # Classes inheriting from Enum shadow their true members with a __getattr__, so we + # have to treat them as a special case. + is_enum: bool + # If true, any unknown attributes should have type 'Any' instead + # of generating a type error. This would be true if there is a + # base class with type 'Any', but other use cases may be + # possible. This is similar to having __getattr__ that returns Any + # (and __setattr__), but without the __getattr__ method. + fallback_to_any: bool + + # Same as above but for cases where metaclass has type Any. This will suppress + # all attribute errors only for *class object* access. + meta_fallback_to_any: bool + + # Information related to type annotations. + + # Generic type variable names (full names) + type_vars: list[str] + + # Whether this class has a ParamSpec type variable + has_param_spec_type: bool + + # Direct base classes. + bases: list[mypy.types.Instance] + + # Another type which this type will be treated as a subtype of, + # even though it's not a subclass in Python. The non-standard + # `@_promote` decorator introduces this, and there are also + # several builtin examples, in particular `int` -> `float`. + _promote: list[mypy.types.ProperType] + + # This is used for promoting native integer types such as 'i64' to + # 'int'. (_promote is used for the other direction.) This only + # supports one-step promotions (e.g., i64 -> int, not + # i64 -> int -> float, and this isn't used to promote in joins. + # + # This results in some unintuitive results, such as that even + # though i64 is compatible with int and int is compatible with + # float, i64 is *not* compatible with float. + alt_promote: mypy.types.Instance | None + + # Representation of a Tuple[...] base class, if the class has any + # (e.g., for named tuples). If this is not None, the actual Type + # object used for this class is not an Instance but a TupleType; + # the corresponding Instance is set as the fallback type of the + # tuple type. + tuple_type: mypy.types.TupleType | None + + # Is this a named tuple type? + is_named_tuple: bool + + # If this class is defined by the TypedDict type constructor, + # then this is not None. + typeddict_type: mypy.types.TypedDictType | None + + # Is this a newtype type? + is_newtype: bool + + # Is this a synthesized intersection type? + is_intersection: bool + + # This is a dictionary that will be serialized and un-serialized as is. + # It is useful for plugins to add their data to save in the cache. + metadata: dict[str, JsonDict] + + # Store type alias representing this type (for named tuples and TypedDicts). + # Although definitions of these types are stored in symbol tables as TypeInfo, + # when a type analyzer will find them, it should construct a TupleType, or + # a TypedDict type. However, we can't use the plain types, since if the definition + # is recursive, this will create an actual recursive structure of types (i.e. as + # internal Python objects) causing infinite recursions everywhere during type checking. + # To overcome this, we create a TypeAlias node, that will point to these types. + # We store this node in the `special_alias` attribute, because it must be the same node + # in case we are doing multiple semantic analysis passes. + special_alias: TypeAlias | None + + # Shared type variable for typing.Self in this class (if used, otherwise None). + self_type: mypy.types.TypeVarType | None + + # Added if the corresponding class is directly decorated with `typing.dataclass_transform` + dataclass_transform_spec: DataclassTransformSpec | None + + # Is set to `True` when class is decorated with `@typing.type_check_only` + is_type_check_only: bool + + # The type's deprecation message (in case it is deprecated) + deprecated: str | None + + # Cached value of class constructor type, i.e. the type of class object when it + # appears in runtime context. + type_object_type: mypy.types.FunctionLike | None + + FLAGS: Final = [ + "is_abstract", + "is_enum", + "fallback_to_any", + "meta_fallback_to_any", + "is_named_tuple", + "is_newtype", + "is_protocol", + "runtime_protocol", + "is_final", + "is_disjoint_base", + "is_intersection", + ] + + def __init__(self, names: SymbolTable, defn: ClassDef, module_name: str) -> None: + """Initialize a TypeInfo.""" + super().__init__() + self._fullname = defn.fullname + self.names = names + self.defn = defn + self.module_name = module_name + self.type_vars = [] + self.has_param_spec_type = False + self.has_type_var_tuple_type = False + self.bases = [] + self.mro = [] + self._mro_refs = None + self.bad_mro = False + self.declared_metaclass = None + self.metaclass_type = None + self.is_abstract = False + self.abstract_attributes = [] + self.deletable_attributes = [] + self.slots = None + self.assuming = [] + self.assuming_proper = [] + self.inferring = [] + self.is_protocol = False + self.runtime_protocol = False + self.type_var_tuple_prefix: int | None = None + self.type_var_tuple_suffix: int | None = None + self.add_type_vars() + self.is_final = False + self.is_disjoint_base = False + self.is_enum = False + self.fallback_to_any = False + self.meta_fallback_to_any = False + self._promote = [] + self.alt_promote = None + self.tuple_type = None + self.special_alias = None + self.is_named_tuple = False + self.typeddict_type = None + self.is_newtype = False + self.is_intersection = False + self.metadata = {} + self.self_type = None + self.dataclass_transform_spec = None + self.is_type_check_only = False + self.deprecated = None + self.type_object_type = None + + def add_type_vars(self) -> None: + self.has_type_var_tuple_type = False + if self.defn.type_vars: + for i, vd in enumerate(self.defn.type_vars): + if isinstance(vd, mypy.types.ParamSpecType): + self.has_param_spec_type = True + if isinstance(vd, mypy.types.TypeVarTupleType): + assert not self.has_type_var_tuple_type + self.has_type_var_tuple_type = True + self.type_var_tuple_prefix = i + self.type_var_tuple_suffix = len(self.defn.type_vars) - i - 1 + self.type_vars.append(vd.name) + + @property + def name(self) -> str: + """Short name.""" + return self.defn.name + + @property + def fullname(self) -> str: + return self._fullname + + def is_generic(self) -> bool: + """Is the type generic (i.e. does it have type variables)?""" + return len(self.type_vars) > 0 + + def get(self, name: str) -> SymbolTableNode | None: + for cls in self.mro: + n = cls.names.get(name) + if n: + return n + return None + + def get_containing_type_info(self, name: str) -> TypeInfo | None: + for cls in self.mro: + if name in cls.names: + return cls + return None + + @property + def protocol_members(self) -> list[str]: + # Protocol members are names of all attributes/methods defined in a protocol + # and in all its supertypes (except for 'object'). + members: set[str] = set() + assert self.mro, "This property can be only accessed after MRO is (re-)calculated" + for base in self.mro[:-1]: # we skip "object" since everyone implements it + if base.is_protocol: + for name, node in base.names.items(): + if isinstance(node.node, (TypeAlias, TypeVarExpr, MypyFile)): + # These are auxiliary definitions (and type aliases are prohibited). + continue + if name in EXCLUDED_PROTOCOL_ATTRIBUTES: + continue + members.add(name) + return sorted(members) + + @property + def enum_members(self) -> list[str]: + # TODO: cache the results? + members = [] + for name, sym in self.names.items(): + # Case 1: + # + # class MyEnum(Enum): + # @member + # def some(self): ... + if isinstance(sym.node, Decorator): + if any( + dec.fullname == "enum.member" + for dec in sym.node.decorators + if isinstance(dec, RefExpr) + ): + members.append(name) + continue + # Case 2: + # + # class MyEnum(Enum): + # x = 1 + # + # Case 3: + # + # class MyEnum(Enum): + # class Other: ... + elif isinstance(sym.node, (Var, TypeInfo)): + if ( + # TODO: properly support ignored names from `_ignore_` + name in EXCLUDED_ENUM_ATTRIBUTES + or is_sunder(name) + or name.startswith("__") # dunder and private + ): + continue # name is excluded + + if isinstance(sym.node, Var): + if not sym.node.has_explicit_value: + continue # unannotated value not a member + + typ = mypy.types.get_proper_type(sym.node.type) + if ( + isinstance(typ, mypy.types.FunctionLike) and not typ.is_type_obj() + ) or ( # explicit `@member` is required + isinstance(typ, mypy.types.Instance) + and typ.type.fullname == "enum.nonmember" + ): + continue # name is not a member + + members.append(name) + return members + + def __getitem__(self, name: str) -> SymbolTableNode: + n = self.get(name) + if n: + return n + else: + raise KeyError(name) + + def __repr__(self) -> str: + return f"" + + def __bool__(self) -> bool: + # We defined this here instead of just overriding it in + # FakeInfo so that mypyc can generate a direct call instead of + # using the generic bool handling. + return not isinstance(self, FakeInfo) + + def has_readable_member(self, name: str) -> bool: + return self.get(name) is not None + + def get_method(self, name: str) -> FuncBase | Decorator | None: + for cls in self.mro: + if name in cls.names: + node = cls.names[name].node + if isinstance(node, SYMBOL_FUNCBASE_TYPES): + return node + elif isinstance(node, Decorator): # Two `if`s make `mypyc` happy + return node + else: + return None + return None + + def calculate_metaclass_type(self) -> mypy.types.Instance | None: + declared = self.declared_metaclass + if declared is not None and not declared.type.has_base("builtins.type"): + return declared + if self._fullname == "builtins.type": + return mypy.types.Instance(self, []) + + winner = declared + for super_class in self.mro[1:]: + super_meta = super_class.declared_metaclass + if super_meta is None or super_meta.type is None: + continue + if winner is None: + winner = super_meta + continue + if winner.type.has_base(super_meta.type.fullname): + continue + if super_meta.type.has_base(winner.type.fullname): + winner = super_meta + continue + # metaclass conflict + winner = None + break + + return winner + + def explain_metaclass_conflict(self) -> str | None: + # Compare to logic in calculate_metaclass_type + declared = self.declared_metaclass + if declared is not None and not declared.type.has_base("builtins.type"): + return None + if self._fullname == "builtins.type": + return None + + winner = declared + if declared is None: + resolution_steps = [] + else: + resolution_steps = [f'"{declared.type.fullname}" (metaclass of "{self.fullname}")'] + for super_class in self.mro[1:]: + super_meta = super_class.declared_metaclass + if super_meta is None or super_meta.type is None: + continue + if winner is None: + winner = super_meta + resolution_steps.append( + f'"{winner.type.fullname}" (metaclass of "{super_class.fullname}")' + ) + continue + if winner.type.has_base(super_meta.type.fullname): + continue + if super_meta.type.has_base(winner.type.fullname): + winner = super_meta + resolution_steps.append( + f'"{winner.type.fullname}" (metaclass of "{super_class.fullname}")' + ) + continue + # metaclass conflict + conflict = f'"{super_meta.type.fullname}" (metaclass of "{super_class.fullname}")' + return f"{' > '.join(resolution_steps)} conflicts with {conflict}" + + return None + + def is_metaclass(self, *, precise: bool = False) -> bool: + return ( + self.has_base("builtins.type") + or self.fullname == "abc.ABCMeta" + or (self.fallback_to_any and not precise) + ) + + def has_base(self, fullname: str) -> bool: + """Return True if type has a base type with the specified name. + + This can be either via extension or via implementation. + """ + for cls in self.mro: + if cls.fullname == fullname: + return True + return False + + def direct_base_classes(self) -> list[TypeInfo]: + """Return a direct base classes. + + Omit base classes of other base classes. + """ + return [base.type for base in self.bases] + + def update_tuple_type(self, typ: mypy.types.TupleType) -> None: + """Update tuple_type and special_alias as needed.""" + self.tuple_type = typ + alias = TypeAlias.from_tuple_type(self) + if not self.special_alias: + self.special_alias = alias + else: + self.special_alias.target = alias.target + # Invalidate recursive status cache in case it was previously set. + self.special_alias._is_recursive = None + + def update_typeddict_type(self, typ: mypy.types.TypedDictType) -> None: + """Update typeddict_type and special_alias as needed.""" + self.typeddict_type = typ + alias = TypeAlias.from_typeddict_type(self) + if not self.special_alias: + self.special_alias = alias + else: + self.special_alias.target = alias.target + # Invalidate recursive status cache in case it was previously set. + self.special_alias._is_recursive = None + + def __str__(self) -> str: + """Return a string representation of the type. + + This includes the most important information about the type. + """ + options = Options() + return self.dump( + str_conv=mypy.strconv.StrConv(options=options), + type_str_conv=mypy.types.TypeStrVisitor(options=options), + ) + + def dump( + self, str_conv: mypy.strconv.StrConv, type_str_conv: mypy.types.TypeStrVisitor + ) -> str: + """Return a string dump of the contents of the TypeInfo.""" + + base: str = "" + + def type_str(typ: mypy.types.Type) -> str: + return typ.accept(type_str_conv) + + head = "TypeInfo" + str_conv.format_id(self) + if self.bases: + base = f"Bases({', '.join(type_str(base) for base in self.bases)})" + mro = "Mro({})".format( + ", ".join(item.fullname + str_conv.format_id(item) for item in self.mro) + ) + names = [] + for name in sorted(self.names): + description = name + str_conv.format_id(self.names[name].node) + node = self.names[name].node + if isinstance(node, Var) and node.type: + description += f" ({type_str(node.type)})" + names.append(description) + items = [f"Name({self.fullname})", base, mro, ("Names", names)] + if self.declared_metaclass: + items.append(f"DeclaredMetaclass({type_str(self.declared_metaclass)})") + if self.metaclass_type: + items.append(f"MetaclassType({type_str(self.metaclass_type)})") + return mypy.strconv.dump_tagged(items, head, str_conv=str_conv) + + def serialize(self) -> JsonDict: + # NOTE: This is where all ClassDefs originate, so there shouldn't be duplicates. + data = { + ".class": "TypeInfo", + "module_name": self.module_name, + "fullname": self.fullname, + "names": self.names.serialize(self.fullname), + "defn": self.defn.serialize(), + "abstract_attributes": self.abstract_attributes, + "type_vars": self.type_vars, + "has_param_spec_type": self.has_param_spec_type, + "bases": [b.serialize() for b in self.bases], + "mro": [c.fullname for c in self.mro], + "_promote": [p.serialize() for p in self._promote], + "alt_promote": None if self.alt_promote is None else self.alt_promote.serialize(), + "declared_metaclass": ( + None if self.declared_metaclass is None else self.declared_metaclass.serialize() + ), + "metaclass_type": ( + None if self.metaclass_type is None else self.metaclass_type.serialize() + ), + "tuple_type": None if self.tuple_type is None else self.tuple_type.serialize(), + "typeddict_type": ( + None if self.typeddict_type is None else self.typeddict_type.serialize() + ), + "flags": get_flags(self, TypeInfo.FLAGS), + "metadata": self.metadata, + "slots": sorted(self.slots) if self.slots is not None else None, + "deletable_attributes": self.deletable_attributes, + "self_type": self.self_type.serialize() if self.self_type is not None else None, + "dataclass_transform_spec": ( + self.dataclass_transform_spec.serialize() + if self.dataclass_transform_spec is not None + else None + ), + "deprecated": self.deprecated, + } + return data + + @classmethod + def deserialize(cls, data: JsonDict) -> TypeInfo: + names = SymbolTable.deserialize(data["names"]) + defn = ClassDef.deserialize(data["defn"]) + module_name = data["module_name"] + ti = TypeInfo(names, defn, module_name) + ti._fullname = data["fullname"] + ti.abstract_attributes = [(attr[0], attr[1]) for attr in data["abstract_attributes"]] + ti.type_vars = data["type_vars"] + ti.has_param_spec_type = data["has_param_spec_type"] + ti.bases = [mypy.types.Instance.deserialize(b) for b in data["bases"]] + _promote = [] + for p in data["_promote"]: + t = mypy.types.deserialize_type(p) + assert isinstance(t, mypy.types.ProperType) + _promote.append(t) + ti._promote = _promote + ti.alt_promote = ( + None + if data["alt_promote"] is None + else mypy.types.Instance.deserialize(data["alt_promote"]) + ) + ti.declared_metaclass = ( + None + if data["declared_metaclass"] is None + else mypy.types.Instance.deserialize(data["declared_metaclass"]) + ) + ti.metaclass_type = ( + None + if data["metaclass_type"] is None + else mypy.types.Instance.deserialize(data["metaclass_type"]) + ) + # NOTE: ti.mro will be set in the fixup phase based on these + # names. The reason we need to store the mro instead of just + # recomputing it from base classes has to do with a subtle + # point about fine-grained incremental: the cache files might + # not be loaded until after a class in the mro has changed its + # bases, which causes the mro to change. If we recomputed our + # mro, we would compute the *new* mro, which leaves us with no + # way to detect that the mro has changed! Thus we need to make + # sure to load the original mro so that once the class is + # rechecked, it can tell that the mro has changed. + ti._mro_refs = data["mro"] + ti.tuple_type = ( + None + if data["tuple_type"] is None + else mypy.types.TupleType.deserialize(data["tuple_type"]) + ) + ti.typeddict_type = ( + None + if data["typeddict_type"] is None + else mypy.types.TypedDictType.deserialize(data["typeddict_type"]) + ) + ti.metadata = data["metadata"] + ti.slots = set(data["slots"]) if data["slots"] is not None else None + ti.deletable_attributes = data["deletable_attributes"] + set_flags(ti, data["flags"]) + st = data["self_type"] + ti.self_type = mypy.types.TypeVarType.deserialize(st) if st is not None else None + if data.get("dataclass_transform_spec") is not None: + ti.dataclass_transform_spec = DataclassTransformSpec.deserialize( + data["dataclass_transform_spec"] + ) + ti.deprecated = data.get("deprecated") + return ti + + def write(self, data: WriteBuffer) -> None: + write_tag(data, TYPE_INFO) + self.names.write(data, self.fullname) + self.defn.write(data) + write_str(data, self.module_name) + write_str(data, self.fullname) + write_str_list(data, [a for a, _ in self.abstract_attributes]) + write_int_list(data, [s for _, s in self.abstract_attributes]) + write_str_list(data, self.type_vars) + write_bool(data, self.has_param_spec_type) + mypy.types.write_type_list(data, self.bases) + write_str_list(data, [c.fullname for c in self.mro]) + mypy.types.write_type_list(data, self._promote) + mypy.types.write_type_opt(data, self.alt_promote) + mypy.types.write_type_opt(data, self.declared_metaclass) + mypy.types.write_type_opt(data, self.metaclass_type) + mypy.types.write_type_opt(data, self.tuple_type) + mypy.types.write_type_opt(data, self.typeddict_type) + write_flags(data, self, TypeInfo.FLAGS) + write_json(data, self.metadata) + if self.slots is None: + write_tag(data, LITERAL_NONE) + else: + write_str_list(data, sorted(self.slots)) + write_str_list(data, self.deletable_attributes) + mypy.types.write_type_opt(data, self.self_type) + if self.dataclass_transform_spec is None: + write_tag(data, LITERAL_NONE) + else: + self.dataclass_transform_spec.write(data) + write_str_opt(data, self.deprecated) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> TypeInfo: + names = SymbolTable.read(data) + assert read_tag(data) == CLASS_DEF + defn = ClassDef.read(data) + module_name = read_str(data) + ti = TypeInfo(names, defn, module_name) + ti._fullname = read_str(data) + attrs = read_str_list(data) + statuses = read_int_list(data) + ti.abstract_attributes = list(zip(attrs, statuses)) + ti.type_vars = read_str_list(data) + ti.has_param_spec_type = read_bool(data) + ti.bases = [] + assert read_tag(data) == LIST_GEN + for _ in range(read_int_bare(data)): + assert read_tag(data) == mypy.types.INSTANCE + ti.bases.append(mypy.types.Instance.read(data)) + # NOTE: ti.mro will be set in the fixup phase based on these + # names. The reason we need to store the mro instead of just + # recomputing it from base classes has to do with a subtle + # point about fine-grained incremental: the cache files might + # not be loaded until after a class in the mro has changed its + # bases, which causes the mro to change. If we recomputed our + # mro, we would compute the *new* mro, which leaves us with no + # way to detect that the mro has changed! Thus, we need to make + # sure to load the original mro so that once the class is + # rechecked, it can tell that the mro has changed. + ti._mro_refs = read_str_list(data) + ti._promote = cast(list[mypy.types.ProperType], mypy.types.read_type_list(data)) + if (tag := read_tag(data)) != LITERAL_NONE: + assert tag == mypy.types.INSTANCE + ti.alt_promote = mypy.types.Instance.read(data) + if (tag := read_tag(data)) != LITERAL_NONE: + assert tag == mypy.types.INSTANCE + ti.declared_metaclass = mypy.types.Instance.read(data) + if (tag := read_tag(data)) != LITERAL_NONE: + assert tag == mypy.types.INSTANCE + ti.metaclass_type = mypy.types.Instance.read(data) + if (tag := read_tag(data)) != LITERAL_NONE: + assert tag == mypy.types.TUPLE_TYPE + ti.tuple_type = mypy.types.TupleType.read(data) + if (tag := read_tag(data)) != LITERAL_NONE: + assert tag == mypy.types.TYPED_DICT_TYPE + ti.typeddict_type = mypy.types.TypedDictType.read(data) + read_flags(data, ti, TypeInfo.FLAGS) + ti.metadata = read_json(data) + tag = read_tag(data) + if tag != LITERAL_NONE: + assert tag == LIST_STR + ti.slots = {read_str_bare(data) for _ in range(read_int_bare(data))} + ti.deletable_attributes = read_str_list(data) + if (tag := read_tag(data)) != LITERAL_NONE: + assert tag == mypy.types.TYPE_VAR_TYPE + ti.self_type = mypy.types.TypeVarType.read(data) + tag = read_tag(data) + if tag != LITERAL_NONE: + assert tag == DT_SPEC + ti.dataclass_transform_spec = DataclassTransformSpec.read(data) + ti.deprecated = read_str_opt(data) + assert read_tag(data) == END_TAG + return ti + + +class FakeInfo(TypeInfo): + __slots__ = ("msg",) + + # types.py defines a single instance of this class, called types.NOT_READY. + # This instance is used as a temporary placeholder in the process of de-serialization + # of 'Instance' types. The de-serialization happens in two steps: In the first step, + # Instance.type is set to NOT_READY. In the second step (in fixup.py) it is replaced by + # an actual TypeInfo. If you see the assertion error below, then most probably something + # went wrong during the second step and an 'Instance' that raised this error was not fixed. + # Note: + # 'None' is not used as a dummy value for two reasons: + # 1. This will require around 80-100 asserts to make 'mypy --strict-optional mypy' + # pass cleanly. + # 2. If NOT_READY value is accidentally used somewhere, it will be obvious where the value + # is from, whereas a 'None' value could come from anywhere. + # + # Additionally, this serves as a more general-purpose placeholder + # for missing TypeInfos in a number of places where the excuses + # for not being Optional are a little weaker. + # + # TypeInfo defines a __bool__ method that returns False for FakeInfo + # so that it can be conveniently tested against in the same way that it + # would be if things were properly optional. + def __init__(self, msg: str) -> None: + self.msg = msg + + def __getattribute__(self, attr: str) -> type: + # Handle __class__ so that isinstance still works... + if attr == "__class__": + return object.__getattribute__(self, attr) # type: ignore[no-any-return] + raise AssertionError(object.__getattribute__(self, "msg")) + + +VAR_NO_INFO: Final[TypeInfo] = FakeInfo("Var is lacking info") +CLASSDEF_NO_INFO: Final[TypeInfo] = FakeInfo("ClassDef is lacking info") +FUNC_NO_INFO: Final[TypeInfo] = FakeInfo("FuncBase for non-methods lack info") +MISSING_FALLBACK: Final = FakeInfo("fallback can't be filled out until semanal") + + +class TypeAlias(SymbolNode): + """ + A symbol node representing a type alias. + + Type alias is a static concept, in contrast to variables with types + like Type[...]. Namely: + * type aliases + - can be used in type context (annotations) + - cannot be re-assigned + * variables with type Type[...] + - cannot be used in type context + - but can be re-assigned + + An alias can be defined only by an assignment to a name (not any other lvalues). + + Such assignment defines an alias by default. To define a variable, + an explicit Type[...] annotation is required. As an exception, + at non-global scope non-subscripted rvalue creates a variable even without + an annotation. This exception exists to accommodate the common use case of + class-valued attributes. See SemanticAnalyzerPass2.check_and_set_up_type_alias + for details. + + Aliases can be generic. We use bound type variables for generic aliases, similar + to classes. Essentially, type aliases work as macros that expand textually. + The definition and expansion rules are following: + + 1. An alias targeting a generic class without explicit variables act as + the given class (this doesn't apply to TypedDict, Tuple and Callable, which + are not proper classes but special type constructors): + + A = List + AA = List[Any] + + x: A # same as List[Any] + x: A[int] # same as List[int] + + x: AA # same as List[Any] + x: AA[int] # Error! + + C = Callable # Same as Callable[..., Any] + T = Tuple # Same as Tuple[Any, ...] + + 2. An alias using explicit type variables in its rvalue expects + replacements (type arguments) for these variables. If missing, they + are treated as Any, like for other generics: + + B = List[Tuple[T, T]] + + x: B # same as List[Tuple[Any, Any]] + x: B[int] # same as List[Tuple[int, int]] + + def f(x: B[T]) -> T: ... # without T, Any would be used here + + 3. An alias can be defined using another aliases. In the definition + rvalue the Any substitution doesn't happen for top level unsubscripted + generic classes: + + A = List + B = A # here A is expanded to List, _not_ List[Any], + # to match the Python runtime behaviour + x: B[int] # same as List[int] + C = List[A] # this expands to List[List[Any]] + + AA = List[T] + D = AA # here AA expands to List[Any] + x: D[int] # Error! + + Note: the fact that we support aliases like `A = List` means that the target + type will be initially an instance type with wrong number of type arguments. + Such instances are all fixed either during or after main semantic analysis passes. + We therefore store the difference between `List` and `List[Any]` rvalues (targets) + using the `no_args` flag. + + Meaning of other fields: + + target: The target type. For generic aliases contains bound type variables + as nested types (currently TypeVar and ParamSpec are supported). + _fullname: Qualified name of this type alias. This is used in particular + to track fine-grained dependencies from aliases. + module: Module where the alias was defined. + alias_tvars: Type variables used to define this alias. + normalized: Used to distinguish between `A = List`, and `A = list`. Both + are internally stored using `builtins.list` (because `typing.List` is + itself an alias), while the second cannot be subscripted because of + Python runtime limitation. + line and column: Line and column on the original alias definition. + eager: If True, immediately expand alias when referred to (useful for aliases + within functions that can't be looked up from the symbol table) + """ + + __slots__ = ( + "target", + "_fullname", + "module", + "alias_tvars", + "no_args", + "normalized", + "_is_recursive", + "eager", + "tvar_tuple_index", + "python_3_12_type_alias", + ) + + __match_args__ = ("name", "target", "alias_tvars", "no_args") + + def __init__( + self, + target: mypy.types.Type, + fullname: str, + module: str, + line: int, + column: int, + *, + alias_tvars: list[mypy.types.TypeVarLikeType] | None = None, + no_args: bool = False, + normalized: bool = False, + eager: bool = False, + python_3_12_type_alias: bool = False, + ) -> None: + self._fullname = fullname + self.module = module + self.target = target + if alias_tvars is None: + alias_tvars = [] + self.alias_tvars = alias_tvars + self.no_args = no_args + self.normalized = normalized + # This attribute is manipulated by TypeAliasType. If non-None, + # it is the cached value. + self._is_recursive: bool | None = None + self.eager = eager + self.python_3_12_type_alias = python_3_12_type_alias + self.tvar_tuple_index = None + for i, t in enumerate(alias_tvars): + if isinstance(t, mypy.types.TypeVarTupleType): + self.tvar_tuple_index = i + super().__init__(line, column) + + @classmethod + def from_tuple_type(cls, info: TypeInfo) -> TypeAlias: + """Generate an alias to the tuple type described by a given TypeInfo. + + NOTE: this doesn't set type alias type variables (for generic tuple types), + they must be set by the caller (when fully analyzed). + """ + assert info.tuple_type + # TODO: is it possible to refactor this to set the correct type vars here? + return TypeAlias( + info.tuple_type.copy_modified( + # Create an Instance similar to fill_typevars(). + fallback=mypy.types.Instance( + info, mypy.types.type_vars_as_args(info.defn.type_vars) + ) + ), + info.fullname, + info.module_name, + info.line, + info.column, + ) + + @classmethod + def from_typeddict_type(cls, info: TypeInfo) -> TypeAlias: + """Generate an alias to the TypedDict type described by a given TypeInfo. + + NOTE: this doesn't set type alias type variables (for generic TypedDicts), + they must be set by the caller (when fully analyzed). + """ + assert info.typeddict_type + # TODO: is it possible to refactor this to set the correct type vars here? + return TypeAlias( + info.typeddict_type.copy_modified( + # Create an Instance similar to fill_typevars(). + fallback=mypy.types.Instance( + info, mypy.types.type_vars_as_args(info.defn.type_vars) + ) + ), + info.fullname, + info.module_name, + info.line, + info.column, + ) + + @property + def name(self) -> str: + return self._fullname.split(".")[-1] + + @property + def fullname(self) -> str: + return self._fullname + + @property + def has_param_spec_type(self) -> bool: + return any(isinstance(v, mypy.types.ParamSpecType) for v in self.alias_tvars) + + def accept(self, visitor: NodeVisitor[T]) -> T: + return visitor.visit_type_alias(self) + + def serialize(self) -> JsonDict: + data: JsonDict = { + ".class": "TypeAlias", + "fullname": self._fullname, + "module": self.module, + "target": self.target.serialize(), + "alias_tvars": [v.serialize() for v in self.alias_tvars], + "no_args": self.no_args, + "normalized": self.normalized, + "python_3_12_type_alias": self.python_3_12_type_alias, + } + return data + + @classmethod + def deserialize(cls, data: JsonDict) -> TypeAlias: + assert data[".class"] == "TypeAlias" + fullname = data["fullname"] + module = data["module"] + alias_tvars = [mypy.types.deserialize_type(v) for v in data["alias_tvars"]] + assert all(isinstance(t, mypy.types.TypeVarLikeType) for t in alias_tvars) + target = mypy.types.deserialize_type(data["target"]) + no_args = data["no_args"] + normalized = data["normalized"] + python_3_12_type_alias = data["python_3_12_type_alias"] + return cls( + target, + fullname, + module, + -1, + -1, + alias_tvars=cast(list[mypy.types.TypeVarLikeType], alias_tvars), + no_args=no_args, + normalized=normalized, + python_3_12_type_alias=python_3_12_type_alias, + ) + + def write(self, data: WriteBuffer) -> None: + write_tag(data, TYPE_ALIAS) + write_str(data, self._fullname) + write_str(data, self.module) + self.target.write(data) + mypy.types.write_type_list(data, self.alias_tvars) + write_bool(data, self.no_args) + write_bool(data, self.normalized) + write_bool(data, self.python_3_12_type_alias) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> TypeAlias: + fullname = read_str(data) + module = read_str(data) + target = mypy.types.read_type(data) + alias_tvars = mypy.types.read_type_var_likes(data) + ret = TypeAlias( + target, + fullname, + module, + -1, + -1, + alias_tvars=alias_tvars, + no_args=read_bool(data), + normalized=read_bool(data), + python_3_12_type_alias=read_bool(data), + ) + assert read_tag(data) == END_TAG + return ret + + +class PlaceholderNode(SymbolNode): + """Temporary symbol node that will later become a real SymbolNode. + + These are only present during semantic analysis when using the new + semantic analyzer. These are created if some essential dependencies + of a definition are not yet complete. + + A typical use is for names imported from a module which is still + incomplete (within an import cycle): + + from m import f # Initially may create PlaceholderNode + + This is particularly important if the imported shadows a name from + an enclosing scope or builtins: + + from m import int # Placeholder avoids mixups with builtins.int + + Another case where this is useful is when there is another definition + or assignment: + + from m import f + def f() -> None: ... + + In the above example, the presence of PlaceholderNode allows us to + handle the second definition as a redefinition. + + They are also used to create PlaceholderType instances for types + that refer to incomplete types. Example: + + class C(Sequence[C]): ... + + We create a PlaceholderNode (with becomes_typeinfo=True) for C so + that the type C in Sequence[C] can be bound. + + Attributes: + + fullname: Full name of the PlaceholderNode. + node: AST node that contains the definition that caused this to + be created. This is useful for tracking order of incomplete definitions + and for debugging. + becomes_typeinfo: If True, this refers something that could later + become a TypeInfo. It can't be used with type variables, in + particular, as this would cause issues with class type variable + detection. + + The long-term purpose of placeholder nodes/types is to evolve into + something that can support general recursive types. + """ + + __slots__ = ("_fullname", "node", "becomes_typeinfo") + + def __init__( + self, fullname: str, node: Node, line: int, *, becomes_typeinfo: bool = False + ) -> None: + self._fullname = fullname + self.node = node + self.becomes_typeinfo = becomes_typeinfo + self.line = line + + @property + def name(self) -> str: + return self._fullname.split(".")[-1] + + @property + def fullname(self) -> str: + return self._fullname + + def serialize(self) -> JsonDict: + assert False, "PlaceholderNode can't be serialized" + + def accept(self, visitor: NodeVisitor[T]) -> T: + return visitor.visit_placeholder_node(self) + + +class SymbolTableNode: + """Description of a name binding in a symbol table. + + These are only used as values in module (global), function (local) + and class symbol tables (see SymbolTable). The name that is bound is + the key in SymbolTable. + + Symbol tables don't contain direct references to AST nodes primarily + because there can be multiple symbol table references to a single + AST node (due to imports and aliases), and different references can + behave differently. This class describes the unique properties of + each reference. + + The most fundamental attribute is 'node', which is the AST node that + the name refers to. + + The kind is usually one of LDEF, GDEF or MDEF, depending on the scope + of the definition. These three kinds can usually be used + interchangeably and the difference between local, global and class + scopes is mostly descriptive, with no semantic significance. + However, some tools that consume mypy ASTs may care about these so + they should be correct. + + Attributes: + node: AST node of definition. Among others, this can be one of + FuncDef, Var, TypeInfo, TypeVarExpr or MypyFile -- or None + for cross_ref that hasn't been fixed up yet. + kind: Kind of node. Possible values: + - LDEF: local definition + - GDEF: global (module-level) definition + - MDEF: class member definition + - UNBOUND_IMPORTED: temporary kind for imported names (we + don't know the final kind yet) + module_public: If False, this name won't be imported via + 'from import *'. This has no effect on names within + classes. + module_hidden: If True, the name will be never exported (needed for + stub files) + cross_ref: For deserialized MypyFile nodes, the referenced module + name; for other nodes, optionally the name of the referenced object. + implicit: Was this defined by assignment to self attribute? + plugin_generated: Was this symbol generated by a plugin? + (And therefore needs to be removed in aststrip.) + no_serialize: Do not serialize this node if True. This is used to prevent + keys in the cache that refer to modules on which this file does not + depend. Currently this can happen if there is a module not in build + used e.g. like this: + import a.b.c # type: ignore + This will add a submodule symbol to parent module `a` symbol table, + but `a.b` is _not_ added as its dependency. Therefore, we should + not serialize these symbols as they may not be found during fixup + phase, instead they will be re-added during subsequent patch parents + phase. + TODO: Refactor build.py to make dependency tracking more transparent + and/or refactor look-up functions to not require parent patching. + + NOTE: No other attributes should be added to this class unless they + are shared by all node kinds. + """ + + __slots__ = ( + "kind", + "node", + "module_public", + "module_hidden", + "cross_ref", + "implicit", + "plugin_generated", + "no_serialize", + ) + + def __init__( + self, + kind: int, + node: SymbolNode | None, + module_public: bool = True, + implicit: bool = False, + module_hidden: bool = False, + *, + plugin_generated: bool = False, + no_serialize: bool = False, + ) -> None: + self.kind = kind + self.node = node + self.module_public = module_public + self.implicit = implicit + self.module_hidden = module_hidden + self.cross_ref: str | None = None + self.plugin_generated = plugin_generated + self.no_serialize = no_serialize + + @property + def fullname(self) -> str | None: + if self.node is not None: + return self.node.fullname + else: + return None + + @property + def type(self) -> mypy.types.Type | None: + node = self.node + if isinstance(node, (Var, SYMBOL_FUNCBASE_TYPES)) and node.type is not None: + return node.type + elif isinstance(node, Decorator): + return node.var.type + else: + return None + + def copy(self) -> SymbolTableNode: + new = SymbolTableNode( + self.kind, self.node, self.module_public, self.implicit, self.module_hidden + ) + new.cross_ref = self.cross_ref + return new + + def __str__(self) -> str: + s = f"{node_kinds[self.kind]}/{short_type(self.node)}" + if isinstance(self.node, SymbolNode): + s += f" ({self.node.fullname})" + # Include declared type of variables and functions. + if self.type is not None: + s += f" : {self.type}" + if self.cross_ref: + s += f" cross_ref:{self.cross_ref}" + return s + + def serialize(self, prefix: str, name: str) -> JsonDict: + """Serialize a SymbolTableNode. + + Args: + prefix: full name of the containing module or class; or None + name: name of this object relative to the containing object + """ + data: JsonDict = {".class": "SymbolTableNode", "kind": node_kinds[self.kind]} + if self.module_hidden: + data["module_hidden"] = True + if not self.module_public: + data["module_public"] = False + if self.implicit: + data["implicit"] = True + if self.plugin_generated: + data["plugin_generated"] = True + if isinstance(self.node, MypyFile): + data["cross_ref"] = self.node.fullname + else: + assert self.node is not None, f"{prefix}:{name}" + if prefix is not None: + fullname = self.node.fullname + if ( + "." in fullname + and fullname != prefix + "." + name + and not (isinstance(self.node, Var) and self.node.from_module_getattr) + ): + assert not isinstance( + self.node, PlaceholderNode + ), f"Definition of {fullname} is unexpectedly incomplete" + data["cross_ref"] = fullname + return data + data["node"] = self.node.serialize() + return data + + @classmethod + def deserialize(cls, data: JsonDict) -> SymbolTableNode: + assert data[".class"] == "SymbolTableNode" + kind = inverse_node_kinds[data["kind"]] + if "cross_ref" in data: + # This will be fixed up later. + stnode = SymbolTableNode(kind, None) + stnode.cross_ref = data["cross_ref"] + else: + assert "node" in data, data + node = SymbolNode.deserialize(data["node"]) + stnode = SymbolTableNode(kind, node) + if "module_hidden" in data: + stnode.module_hidden = data["module_hidden"] + if "module_public" in data: + stnode.module_public = data["module_public"] + if "implicit" in data: + stnode.implicit = data["implicit"] + if "plugin_generated" in data: + stnode.plugin_generated = data["plugin_generated"] + return stnode + + def write(self, data: WriteBuffer, prefix: str, name: str) -> None: + write_tag(data, SYMBOL_TABLE_NODE) + write_int(data, self.kind) + write_bool(data, self.module_hidden) + write_bool(data, self.module_public) + write_bool(data, self.implicit) + write_bool(data, self.plugin_generated) + + cross_ref = None + if isinstance(self.node, MypyFile): + cross_ref = self.node.fullname + else: + assert self.node is not None, f"{prefix}:{name}" + if prefix is not None: + fullname = self.node.fullname + if ( + "." in fullname + and fullname != prefix + "." + name + and not (isinstance(self.node, Var) and self.node.from_module_getattr) + ): + assert not isinstance( + self.node, PlaceholderNode + ), f"Definition of {fullname} is unexpectedly incomplete" + cross_ref = fullname + + write_str_opt(data, cross_ref) + if cross_ref is None: + assert self.node is not None + self.node.write(data) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> SymbolTableNode: + assert read_tag(data) == SYMBOL_TABLE_NODE + sym = SymbolTableNode(read_int(data), None) + sym.module_hidden = read_bool(data) + sym.module_public = read_bool(data) + sym.implicit = read_bool(data) + sym.plugin_generated = read_bool(data) + cross_ref = read_str_opt(data) + if cross_ref is None: + sym.node = read_symbol(data) + else: + sym.cross_ref = cross_ref + assert read_tag(data) == END_TAG + return sym + + +class SymbolTable(dict[str, SymbolTableNode]): + """Static representation of a namespace dictionary. + + This is used for module, class and function namespaces. + """ + + __slots__ = () + + def __str__(self) -> str: + a: list[str] = [] + for key, value in self.items(): + # Filter out the implicit import of builtins. + if isinstance(value, SymbolTableNode): + if ( + value.fullname != "builtins" + and (value.fullname or "").split(".")[-1] not in implicit_module_attrs + ): + a.append(" " + str(key) + " : " + str(value)) + else: + # Used in debugging: + a.append(" ") # type: ignore[unreachable] + a = sorted(a) + a.insert(0, "SymbolTable(") + a[-1] += ")" + return "\n".join(a) + + def copy(self) -> SymbolTable: + return SymbolTable([(key, node.copy()) for key, node in self.items()]) + + def serialize(self, fullname: str) -> JsonDict: + data: JsonDict = {".class": "SymbolTable"} + for key, value in self.items(): + # Skip __builtins__: it's a reference to the builtins + # module that gets added to every module by + # SemanticAnalyzerPass2.visit_file(), but it shouldn't be + # accessed by users of the module. + if key == "__builtins__" or value.no_serialize: + continue + data[key] = value.serialize(fullname, key) + return data + + @classmethod + def deserialize(cls, data: JsonDict) -> SymbolTable: + assert data[".class"] == "SymbolTable" + st = SymbolTable() + for key, value in data.items(): + if key != ".class": + st[key] = SymbolTableNode.deserialize(value) + return st + + def write(self, data: WriteBuffer, fullname: str) -> None: + size = 0 + for key, value in self.items(): + # Skip __builtins__: it's a reference to the builtins + # module that gets added to every module by + # SemanticAnalyzerPass2.visit_file(), but it shouldn't be + # accessed by users of the module. + if key == "__builtins__" or value.no_serialize: + continue + size += 1 + # We intentionally tag SymbolTable as a simple dictionary str -> SymbolTableNode. + write_tag(data, DICT_STR_GEN) + write_int_bare(data, size) + for key in sorted(self): + value = self[key] + if key == "__builtins__" or value.no_serialize: + continue + write_str_bare(data, key) + value.write(data, fullname, key) + + @classmethod + def read(cls, data: ReadBuffer) -> SymbolTable: + assert read_tag(data) == DICT_STR_GEN + size = read_int_bare(data) + return SymbolTable( + [(read_str_bare(data), SymbolTableNode.read(data)) for _ in range(size)] + ) + + +class DataclassTransformSpec: + """Specifies how a dataclass-like transform should be applied. The fields here are based on the + parameters accepted by `typing.dataclass_transform`.""" + + __slots__ = ( + "eq_default", + "order_default", + "kw_only_default", + "frozen_default", + "field_specifiers", + ) + + def __init__( + self, + *, + eq_default: bool | None = None, + order_default: bool | None = None, + kw_only_default: bool | None = None, + field_specifiers: tuple[str, ...] | None = None, + # Specified outside of PEP 681: + # frozen_default was added to CPythonin https://github.com/python/cpython/pull/99958 citing + # positive discussion in typing-sig + frozen_default: bool | None = None, + ) -> None: + self.eq_default = eq_default if eq_default is not None else True + self.order_default = order_default if order_default is not None else False + self.kw_only_default = kw_only_default if kw_only_default is not None else False + self.frozen_default = frozen_default if frozen_default is not None else False + self.field_specifiers = field_specifiers if field_specifiers is not None else () + + def serialize(self) -> JsonDict: + return { + "eq_default": self.eq_default, + "order_default": self.order_default, + "kw_only_default": self.kw_only_default, + "frozen_default": self.frozen_default, + "field_specifiers": list(self.field_specifiers), + } + + @classmethod + def deserialize(cls, data: JsonDict) -> DataclassTransformSpec: + return DataclassTransformSpec( + eq_default=data.get("eq_default"), + order_default=data.get("order_default"), + kw_only_default=data.get("kw_only_default"), + frozen_default=data.get("frozen_default"), + field_specifiers=tuple(data.get("field_specifiers", [])), + ) + + def write(self, data: WriteBuffer) -> None: + write_tag(data, DT_SPEC) + write_bool(data, self.eq_default) + write_bool(data, self.order_default) + write_bool(data, self.kw_only_default) + write_bool(data, self.frozen_default) + write_str_list(data, self.field_specifiers) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> DataclassTransformSpec: + ret = DataclassTransformSpec( + eq_default=read_bool(data), + order_default=read_bool(data), + kw_only_default=read_bool(data), + frozen_default=read_bool(data), + field_specifiers=tuple(read_str_list(data)), + ) + assert read_tag(data) == END_TAG + return ret + + +def get_flags(node: Node, names: list[str]) -> list[str]: + return [name for name in names if getattr(node, name)] + + +def set_flags(node: Node, flags: list[str]) -> None: + for name in flags: + setattr(node, name, True) + + +def write_flags(data: WriteBuffer, node: SymbolNode, flags: list[str]) -> None: + for flag in flags: + write_bool(data, getattr(node, flag)) + + +def read_flags(data: ReadBuffer, node: SymbolNode, flags: list[str]) -> None: + for flag in flags: + if read_bool(data): + setattr(node, flag, True) + + +def get_member_expr_fullname(expr: MemberExpr) -> str | None: + """Return the qualified name representation of a member expression. + + Return a string of form foo.bar, foo.bar.baz, or similar, or None if the + argument cannot be represented in this form. + """ + initial: str | None = None + if isinstance(expr.expr, NameExpr): + initial = expr.expr.name + elif isinstance(expr.expr, MemberExpr): + initial = get_member_expr_fullname(expr.expr) + if initial is None: + return None + return f"{initial}.{expr.name}" + + +deserialize_map: Final = { + key: obj.deserialize + for key, obj in globals().items() + if type(obj) is not FakeInfo + and isinstance(obj, type) + and issubclass(obj, SymbolNode) + and obj is not SymbolNode +} + + +def check_arg_kinds( + arg_kinds: list[ArgKind], nodes: list[T], fail: Callable[[str, T], None] +) -> None: + is_var_arg = False + is_kw_arg = False + seen_named = False + seen_opt = False + for kind, node in zip(arg_kinds, nodes): + if kind == ARG_POS: + if is_var_arg or is_kw_arg or seen_named or seen_opt: + fail( + "Required positional args may not appear after default, named or var args", + node, + ) + break + elif kind == ARG_OPT: + if is_var_arg or is_kw_arg or seen_named: + fail("Positional default args may not appear after named or var args", node) + break + seen_opt = True + elif kind == ARG_STAR: + if is_var_arg or is_kw_arg or seen_named: + fail("Var args may not appear after named or var args", node) + break + is_var_arg = True + elif kind == ARG_NAMED or kind == ARG_NAMED_OPT: + seen_named = True + if is_kw_arg: + fail("A **kwargs argument must be the last argument", node) + break + elif kind == ARG_STAR2: + if is_kw_arg: + fail("You may only have one **kwargs argument", node) + break + is_kw_arg = True + + +def check_arg_names( + names: Sequence[str | None], + nodes: list[T], + fail: Callable[[str, T], None], + description: str = "function definition", +) -> None: + seen_names: set[str | None] = set() + for name, node in zip(names, nodes): + if name is not None and name in seen_names: + fail(f'Duplicate argument "{name}" in {description}', node) + break + seen_names.add(name) + + +def is_class_var(expr: NameExpr) -> bool: + """Return whether the expression is ClassVar[...]""" + if isinstance(expr.node, Var): + return expr.node.is_classvar + return False + + +def is_final_node(node: SymbolNode | None) -> bool: + """Check whether `node` corresponds to a final attribute.""" + return isinstance(node, (Var, FuncDef, OverloadedFuncDef, Decorator)) and node.is_final + + +def get_func_def(typ: mypy.types.CallableType) -> SymbolNode | None: + definition = typ.definition + if isinstance(definition, Decorator): + definition = definition.func + return definition + + +def local_definitions( + names: SymbolTable, name_prefix: str, info: TypeInfo | None = None +) -> Iterator[Definition]: + """Iterate over local definitions (not imported) in a symbol table. + + Recursively iterate over class members and nested classes. + """ + # TODO: What should the name be? Or maybe remove it? + for name, symnode in names.items(): + shortname = name + if "-redef" in name: + # Restore original name from mangled name of multiply defined function + shortname = name.split("-redef")[0] + fullname = name_prefix + "." + shortname + node = symnode.node + if node and node.fullname == fullname: + yield fullname, symnode, info + if isinstance(node, TypeInfo): + yield from local_definitions(node.names, fullname, node) + + +# See docstring for mypy/cache.py for reserved tag ranges. +MYPY_FILE: Final[Tag] = 50 +OVERLOADED_FUNC_DEF: Final[Tag] = 51 +FUNC_DEF: Final[Tag] = 52 +DECORATOR: Final[Tag] = 53 +VAR: Final[Tag] = 54 +TYPE_VAR_EXPR: Final[Tag] = 55 +PARAM_SPEC_EXPR: Final[Tag] = 56 +TYPE_VAR_TUPLE_EXPR: Final[Tag] = 57 +TYPE_INFO: Final[Tag] = 58 +TYPE_ALIAS: Final[Tag] = 59 +CLASS_DEF: Final[Tag] = 60 +SYMBOL_TABLE_NODE: Final[Tag] = 61 + + +def read_symbol(data: ReadBuffer) -> SymbolNode: + tag = read_tag(data) + # The branches here are ordered manually by type "popularity". + if tag == VAR: + return Var.read(data) + if tag == FUNC_DEF: + return FuncDef.read(data) + if tag == DECORATOR: + return Decorator.read(data) + if tag == TYPE_INFO: + return TypeInfo.read(data) + if tag == OVERLOADED_FUNC_DEF: + return OverloadedFuncDef.read(data) + if tag == TYPE_VAR_EXPR: + return TypeVarExpr.read(data) + if tag == TYPE_ALIAS: + return TypeAlias.read(data) + if tag == PARAM_SPEC_EXPR: + return ParamSpecExpr.read(data) + if tag == TYPE_VAR_TUPLE_EXPR: + return TypeVarTupleExpr.read(data) + assert False, f"Unknown symbol tag {tag}" + + +def read_overload_part(data: ReadBuffer, tag: Tag | None = None) -> OverloadPart: + if tag is None: + tag = read_tag(data) + if tag == DECORATOR: + return Decorator.read(data) + if tag == FUNC_DEF: + return FuncDef.read(data) + assert False, f"Invalid tag for an OverloadPart {tag}" diff --git a/.venv/lib/python3.12/site-packages/mypy/operators.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/operators.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..766187a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/operators.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/operators.py b/.venv/lib/python3.12/site-packages/mypy/operators.py new file mode 100644 index 0000000..d1f050b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/operators.py @@ -0,0 +1,126 @@ +"""Information about Python operators""" + +from __future__ import annotations + +from typing import Final + +# Map from binary operator id to related method name (in Python 3). +op_methods: Final = { + "+": "__add__", + "-": "__sub__", + "*": "__mul__", + "/": "__truediv__", + "%": "__mod__", + "divmod": "__divmod__", + "//": "__floordiv__", + "**": "__pow__", + "@": "__matmul__", + "&": "__and__", + "|": "__or__", + "^": "__xor__", + "<<": "__lshift__", + ">>": "__rshift__", + "==": "__eq__", + "!=": "__ne__", + "<": "__lt__", + ">=": "__ge__", + ">": "__gt__", + "<=": "__le__", + "in": "__contains__", +} + +op_methods_to_symbols: Final = {v: k for (k, v) in op_methods.items()} + +ops_falling_back_to_cmp: Final = {"__ne__", "__eq__", "__lt__", "__le__", "__gt__", "__ge__"} + + +ops_with_inplace_method: Final = { + "+", + "-", + "*", + "/", + "%", + "//", + "**", + "@", + "&", + "|", + "^", + "<<", + ">>", +} + +inplace_operator_methods: Final = {"__i" + op_methods[op][2:] for op in ops_with_inplace_method} + +reverse_op_methods: Final = { + "__add__": "__radd__", + "__sub__": "__rsub__", + "__mul__": "__rmul__", + "__truediv__": "__rtruediv__", + "__mod__": "__rmod__", + "__divmod__": "__rdivmod__", + "__floordiv__": "__rfloordiv__", + "__pow__": "__rpow__", + "__matmul__": "__rmatmul__", + "__and__": "__rand__", + "__or__": "__ror__", + "__xor__": "__rxor__", + "__lshift__": "__rlshift__", + "__rshift__": "__rrshift__", + "__eq__": "__eq__", + "__ne__": "__ne__", + "__lt__": "__gt__", + "__ge__": "__le__", + "__gt__": "__lt__", + "__le__": "__ge__", +} + +reverse_op_method_names: Final = set(reverse_op_methods.values()) + +# Suppose we have some class A. When we do A() + A(), Python will only check +# the output of A().__add__(A()) and skip calling the __radd__ method entirely. +# This shortcut is used only for the following methods: +op_methods_that_shortcut: Final = { + "__add__", + "__sub__", + "__mul__", + "__truediv__", + "__mod__", + "__divmod__", + "__floordiv__", + "__pow__", + "__matmul__", + "__and__", + "__or__", + "__xor__", + "__lshift__", + "__rshift__", +} + +normal_from_reverse_op: Final = {m: n for n, m in reverse_op_methods.items()} +reverse_op_method_set: Final = set(reverse_op_methods.values()) + +unary_op_methods: Final = {"-": "__neg__", "+": "__pos__", "~": "__invert__"} + +int_op_to_method: Final = { + "==": int.__eq__, + "is": int.__eq__, + "<": int.__lt__, + "<=": int.__le__, + "!=": int.__ne__, + "is not": int.__ne__, + ">": int.__gt__, + ">=": int.__ge__, +} + +flip_ops: Final = {"<": ">", "<=": ">=", ">": "<", ">=": "<="} +neg_ops: Final = { + "==": "!=", + "!=": "==", + "is": "is not", + "is not": "is", + "<": ">=", + "<=": ">", + ">": "<=", + ">=": "<", +} diff --git a/.venv/lib/python3.12/site-packages/mypy/options.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/options.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..a0df807 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/options.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/options.py b/.venv/lib/python3.12/site-packages/mypy/options.py new file mode 100644 index 0000000..39490c9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/options.py @@ -0,0 +1,631 @@ +from __future__ import annotations + +import pprint +import re +import sys +import sysconfig +import warnings +from re import Pattern +from typing import Any, Callable, Final + +from mypy import defaults +from mypy.errorcodes import ErrorCode, error_codes +from mypy.util import get_class_descriptors, replace_object_state + + +class BuildType: + STANDARD: Final = 0 + MODULE: Final = 1 + PROGRAM_TEXT: Final = 2 + + +PER_MODULE_OPTIONS: Final = { + # Please keep this list sorted + "allow_redefinition", + "allow_redefinition_new", + "allow_untyped_globals", + "always_false", + "always_true", + "check_untyped_defs", + "debug_cache", + "disable_error_code", + "disabled_error_codes", + "disallow_any_decorated", + "disallow_any_explicit", + "disallow_any_expr", + "disallow_any_generics", + "disallow_any_unimported", + "disallow_incomplete_defs", + "disallow_subclassing_any", + "disallow_untyped_calls", + "disallow_untyped_decorators", + "disallow_untyped_defs", + "enable_error_code", + "enabled_error_codes", + "extra_checks", + "follow_imports_for_stubs", + "follow_imports", + "follow_untyped_imports", + "ignore_errors", + "ignore_missing_imports", + "implicit_optional", + "implicit_reexport", + "local_partial_types", + "mypyc", + "strict_concatenate", + "strict_equality", + "strict_equality_for_none", + "strict_optional", + "warn_no_return", + "warn_return_any", + "warn_unreachable", + "warn_unused_ignores", +} + +OPTIONS_AFFECTING_CACHE: Final = ( + PER_MODULE_OPTIONS + | { + "platform", + "bazel", + "old_type_inference", + "plugins", + "disable_bytearray_promotion", + "disable_memoryview_promotion", + "strict_bytes", + "fixed_format_cache", + "untyped_calls_exclude", + } +) - {"debug_cache"} + +# Features that are currently (or were recently) incomplete/experimental +TYPE_VAR_TUPLE: Final = "TypeVarTuple" +UNPACK: Final = "Unpack" +PRECISE_TUPLE_TYPES: Final = "PreciseTupleTypes" +NEW_GENERIC_SYNTAX: Final = "NewGenericSyntax" +INLINE_TYPEDDICT: Final = "InlineTypedDict" +TYPE_FORM: Final = "TypeForm" +INCOMPLETE_FEATURES: Final = frozenset((PRECISE_TUPLE_TYPES, INLINE_TYPEDDICT, TYPE_FORM)) +COMPLETE_FEATURES: Final = frozenset((TYPE_VAR_TUPLE, UNPACK, NEW_GENERIC_SYNTAX)) + + +class Options: + """Options collected from flags.""" + + def __init__(self) -> None: + # Cache for clone_for_module() + self._per_module_cache: dict[str, Options] | None = None + + # -- build options -- + self.build_type = BuildType.STANDARD + self.python_version: tuple[int, int] = sys.version_info[:2] + # The executable used to search for PEP 561 packages. If this is None, + # then mypy does not search for PEP 561 packages. + self.python_executable: str | None = sys.executable + + # When cross compiling to emscripten, we need to rely on MACHDEP because + # sys.platform is the host build platform, not emscripten. + MACHDEP = sysconfig.get_config_var("MACHDEP") + if MACHDEP == "emscripten": + self.platform = MACHDEP + else: + self.platform = sys.platform + + self.custom_typing_module: str | None = None + self.custom_typeshed_dir: str | None = None + # The abspath() version of the above, we compute it once as an optimization. + self.abs_custom_typeshed_dir: str | None = None + self.mypy_path: list[str] = [] + self.report_dirs: dict[str, str] = {} + # Show errors in PEP 561 packages/site-packages modules + self.no_silence_site_packages = False + self.no_site_packages = False + self.ignore_missing_imports = False + # Is ignore_missing_imports set in a per-module section + self.ignore_missing_imports_per_module = False + # Typecheck modules without stubs or py.typed marker + self.follow_untyped_imports = False + self.follow_imports = "normal" # normal|silent|skip|error + # Whether to respect the follow_imports setting even for stub files. + # Intended to be used for disabling specific stubs. + self.follow_imports_for_stubs = False + # PEP 420 namespace packages + # This allows definitions of packages without __init__.py and allows packages to span + # multiple directories. This flag affects both import discovery and the association of + # input files/modules/packages to the relevant file and fully qualified module name. + self.namespace_packages = True + # Use current directory and MYPYPATH to determine fully qualified module names of files + # passed by automatically considering their subdirectories as packages. This is only + # relevant if namespace packages are enabled, since otherwise examining __init__.py's is + # sufficient to determine module names for files. As a possible alternative, add a single + # top-level __init__.py to your packages. + self.explicit_package_bases = False + # File names, directory names or subpaths to avoid checking + self.exclude: list[str] = [] + self.exclude_gitignore: bool = False + + # disallow_any options + self.disallow_any_generics = False + self.disallow_any_unimported = False + self.disallow_any_expr = False + self.disallow_any_decorated = False + self.disallow_any_explicit = False + + # Disallow calling untyped functions from typed ones + self.disallow_untyped_calls = False + + # Always allow untyped calls for function coming from modules/packages + # in this list (each item effectively acts as a prefix match) + self.untyped_calls_exclude: list[str] = [] + + # Disallow defining untyped (or incompletely typed) functions + self.disallow_untyped_defs = False + + # Disallow defining incompletely typed functions + self.disallow_incomplete_defs = False + + # Type check unannotated functions + self.check_untyped_defs = False + + # Disallow decorating typed functions with untyped decorators + self.disallow_untyped_decorators = False + + # Disallow subclassing values of type 'Any' + self.disallow_subclassing_any = False + + # Also check typeshed for missing annotations + self.warn_incomplete_stub = False + + # Warn about casting an expression to its inferred type + self.warn_redundant_casts = False + + # Warn about falling off the end of a function returning non-None + self.warn_no_return = True + + # Warn about returning objects of type Any when the function is + # declared with a precise type + self.warn_return_any = False + + # Report importing or using deprecated features as errors instead of notes. + self.report_deprecated_as_note = False + + # Allow deprecated calls from function coming from modules/packages + # in this list (each item effectively acts as a prefix match) + self.deprecated_calls_exclude: list[str] = [] + + # Warn about unused '# type: ignore' comments + self.warn_unused_ignores = False + + # Warn about unused '[mypy-]' or '[[tool.mypy.overrides]]' config sections + self.warn_unused_configs = False + + # Files in which to ignore all non-fatal errors + self.ignore_errors = False + + # Apply strict None checking + self.strict_optional = True + + # Show "note: In function "foo":" messages. + self.show_error_context = False + + # Use nicer output (when possible). + self.color_output = True + self.error_summary = True + + # Assume arguments with default values of None are Optional + self.implicit_optional = False + + # Don't re-export names unless they are imported with `from ... as ...` + self.implicit_reexport = True + + # Suppress toplevel errors caused by missing annotations + self.allow_untyped_globals = False + + # Allow variable to be redefined with an arbitrary type in the same block + # and the same nesting level as the initialization + self.allow_redefinition = False + + # Allow flexible variable redefinition with an arbitrary type, in different + # blocks and and at different nesting levels + self.allow_redefinition_new = False + + # Prohibit equality, identity, and container checks for non-overlapping types. + # This makes 1 == '1', 1 in ['1'], and 1 is '1' errors. + self.strict_equality = False + + # Extend the logic of `strict_equality` to comparisons with `None`. + self.strict_equality_for_none = False + + # Disable treating bytearray and memoryview as subtypes of bytes + self.strict_bytes = False + + # Deprecated, use extra_checks instead. + self.strict_concatenate = False + + # Enable additional checks that are technically correct but impractical. + self.extra_checks = False + + # Report an error for any branches inferred to be unreachable as a result of + # type analysis. + self.warn_unreachable = False + + # Variable names considered True + self.always_true: list[str] = [] + + # Variable names considered False + self.always_false: list[str] = [] + + # Error codes to disable + self.disable_error_code: list[str] = [] + self.disabled_error_codes: set[ErrorCode] = set() + + # Error codes to enable + self.enable_error_code: list[str] = [] + self.enabled_error_codes: set[ErrorCode] = set() + + # Use script name instead of __main__ + self.scripts_are_modules = False + + # Config file name + self.config_file: str | None = None + + # A filename containing a JSON mapping from filenames to + # mtime/size/hash arrays, used to avoid having to recalculate + # source hashes as often. + self.quickstart_file: str | None = None + + # A comma-separated list of files/directories for mypy to type check; + # supports globbing + self.files: list[str] | None = None + + # A list of packages for mypy to type check + self.packages: list[str] | None = None + + # A list of modules for mypy to type check + self.modules: list[str] | None = None + + # Write junit.xml to given file + self.junit_xml: str | None = None + + self.junit_format: str = "global" # global|per_file + + # Caching and incremental checking options + self.incremental = True + self.cache_dir = defaults.CACHE_DIR + self.sqlite_cache = False + self.fixed_format_cache = False + self.debug_cache = False + self.skip_version_check = False + self.skip_cache_mtime_checks = False + self.fine_grained_incremental = False + # Include fine-grained dependencies in written cache files + self.cache_fine_grained = False + # Read cache files in fine-grained incremental mode (cache must include dependencies) + self.use_fine_grained_cache = False + + # Run tree.serialize() even if cache generation is disabled + self.debug_serialize = False + + # Tune certain behaviors when being used as a front-end to mypyc. Set per-module + # in modules being compiled. Not in the config file or command line. + self.mypyc = False + + # An internal flag to modify some type-checking logic while + # running inspections (e.g. don't expand function definitions). + # Not in the config file or command line. + self.inspections = False + + # Disable the memory optimization of freeing ASTs when + # possible. This isn't exposed as a command line option + # because it is intended for software integrating with + # mypy. (Like mypyc.) + self.preserve_asts = False + + # If True, function and class docstrings will be extracted and retained. + # This isn't exposed as a command line option + # because it is intended for software integrating with + # mypy. (Like stubgen.) + self.include_docstrings = False + + # Paths of user plugins + self.plugins: list[str] = [] + + # Per-module options (raw) + self.per_module_options: dict[str, dict[str, object]] = {} + self._glob_options: list[tuple[str, Pattern[str]]] = [] + self.unused_configs: set[str] = set() + + # -- development options -- + self.verbosity = 0 # More verbose messages (for troubleshooting) + self.pdb = False + self.show_traceback = False + self.raise_exceptions = False + self.dump_type_stats = False + self.dump_inference_stats = False + self.dump_build_stats = False + self.enable_incomplete_feature: list[str] = [] + self.timing_stats: str | None = None + self.line_checking_stats: str | None = None + + # -- test options -- + # Stop after the semantic analysis phase + self.semantic_analysis_only = False + + # Use stub builtins fixtures to speed up tests + self.use_builtins_fixtures = False + + # This should only be set when running certain mypy tests. + # Use this sparingly to avoid tests diverging from non-test behavior. + self.test_env = False + + # -- experimental options -- + self.shadow_file: list[list[str]] | None = None + self.show_column_numbers: bool = False + self.show_error_end: bool = False + self.hide_error_codes = False + self.show_error_code_links = False + # Use soft word wrap and show trimmed source snippets with error location markers. + self.pretty = False + self.dump_graph = False + self.dump_deps = False + self.logical_deps = False + # If True, partial types can't span a module top level and a function + self.local_partial_types = False + # Some behaviors are changed when using Bazel (https://bazel.build). + self.bazel = False + # If True, export inferred types for all expressions as BuildResult.types + self.export_types = False + # List of package roots -- directories under these are packages even + # if they don't have __init__.py. + self.package_root: list[str] = [] + self.cache_map: dict[str, tuple[str, str]] = {} + # Don't properly free objects on exit, just kill the current process. + self.fast_exit = True + # fast path for finding modules from source set + self.fast_module_lookup = False + # Allow empty function bodies even if it is not safe, used for testing only. + self.allow_empty_bodies = False + # Used to transform source code before parsing if not None + # TODO: Make the type precise (AnyStr -> AnyStr) + self.transform_source: Callable[[Any], Any] | None = None + # Print full path to each file in the report. + self.show_absolute_path: bool = False + # Install missing stub packages if True + self.install_types = False + # Install missing stub packages in non-interactive mode (don't prompt for + # confirmation, and don't show any errors) + self.non_interactive = False + # When we encounter errors that may cause many additional errors, + # skip most errors after this many messages have been reported. + # -1 means unlimited. + self.many_errors_threshold = defaults.MANY_ERRORS_THRESHOLD + # Disable new type inference algorithm. + self.old_type_inference = False + # Disable expression cache (for debugging). + self.disable_expression_cache = False + # Export line-level, limited, fine-grained dependency information in cache data + # (undocumented feature). + self.export_ref_info = False + + self.disable_bytearray_promotion = False + self.disable_memoryview_promotion = False + # Deprecated, Mypy only supports Python 3.9+ + self.force_uppercase_builtins = False + self.force_union_syntax = False + + # Sets custom output format + self.output: str | None = None + + # Output html file for mypyc -a + self.mypyc_annotation_file: str | None = None + # Skip writing C output files, but perform all other steps of a build (allows + # preserving manual tweaks to generated C file) + self.mypyc_skip_c_generation = False + + def use_lowercase_names(self) -> bool: + warnings.warn( + "options.use_lowercase_names() is deprecated and will be removed in a future version", + DeprecationWarning, + stacklevel=2, + ) + return True + + def use_or_syntax(self) -> bool: + if self.python_version >= (3, 10): + return not self.force_union_syntax + return False + + def use_star_unpack(self) -> bool: + return self.python_version >= (3, 11) + + def snapshot(self) -> dict[str, object]: + """Produce a comparable snapshot of this Option""" + # Under mypyc, we don't have a __dict__, so we need to do worse things. + d = dict(getattr(self, "__dict__", ())) + for k in get_class_descriptors(Options): + if hasattr(self, k): + d[k] = getattr(self, k) + # Remove private attributes from snapshot + d = {k: v for k, v in d.items() if not k.startswith("_")} + return d + + def __repr__(self) -> str: + return f"Options({pprint.pformat(self.snapshot())})" + + def process_error_codes(self, *, error_callback: Callable[[str], Any]) -> None: + # Process `--enable-error-code` and `--disable-error-code` flags + disabled_codes = set(self.disable_error_code) + enabled_codes = set(self.enable_error_code) + + valid_error_codes = set(error_codes.keys()) + + invalid_codes = (enabled_codes | disabled_codes) - valid_error_codes + if invalid_codes: + error_callback(f"Invalid error code(s): {', '.join(sorted(invalid_codes))}") + + self.disabled_error_codes |= {error_codes[code] for code in disabled_codes} + self.enabled_error_codes |= {error_codes[code] for code in enabled_codes} + + # Enabling an error code always overrides disabling + self.disabled_error_codes -= self.enabled_error_codes + + def process_incomplete_features( + self, *, error_callback: Callable[[str], Any], warning_callback: Callable[[str], Any] + ) -> None: + # Validate incomplete features. + for feature in self.enable_incomplete_feature: + if feature not in INCOMPLETE_FEATURES | COMPLETE_FEATURES: + error_callback(f"Unknown incomplete feature: {feature}") + if feature in COMPLETE_FEATURES: + warning_callback(f"Warning: {feature} is already enabled by default") + + def process_strict_bytes(self) -> None: + # Sync `--strict-bytes` and `--disable-{bytearray,memoryview}-promotion` + if self.strict_bytes: + # backwards compatibility + self.disable_bytearray_promotion = True + self.disable_memoryview_promotion = True + elif self.disable_bytearray_promotion and self.disable_memoryview_promotion: + # forwards compatibility + self.strict_bytes = True + + def apply_changes(self, changes: dict[str, object]) -> Options: + # Note: effects of this method *must* be idempotent. + new_options = Options() + # Under mypyc, we don't have a __dict__, so we need to do worse things. + replace_object_state(new_options, self, copy_dict=True) + for key, value in changes.items(): + setattr(new_options, key, value) + if changes.get("ignore_missing_imports"): + # This is the only option for which a per-module and a global + # option sometimes beheave differently. + new_options.ignore_missing_imports_per_module = True + + # These two act as overrides, so apply them when cloning. + # Similar to global codes enabling overrides disabling, so we start from latter. + new_options.disabled_error_codes = self.disabled_error_codes.copy() + new_options.enabled_error_codes = self.enabled_error_codes.copy() + for code_str in new_options.disable_error_code: + code = error_codes[code_str] + new_options.disabled_error_codes.add(code) + new_options.enabled_error_codes.discard(code) + for code_str in new_options.enable_error_code: + code = error_codes[code_str] + new_options.enabled_error_codes.add(code) + new_options.disabled_error_codes.discard(code) + return new_options + + def compare_stable(self, other_snapshot: dict[str, object]) -> bool: + """Compare options in a way that is stable for snapshot() -> apply_changes() roundtrip. + + This is needed because apply_changes() has non-trivial effects for some flags, so + Options().apply_changes(options.snapshot()) may result in a (slightly) different object. + """ + return ( + Options().apply_changes(self.snapshot()).snapshot() + == Options().apply_changes(other_snapshot).snapshot() + ) + + def build_per_module_cache(self) -> None: + self._per_module_cache = {} + + # Config precedence is as follows: + # 1. Concrete section names: foo.bar.baz + # 2. "Unstructured" glob patterns: foo.*.baz, in the order + # they appear in the file (last wins) + # 3. "Well-structured" wildcard patterns: foo.bar.*, in specificity order. + + # Since structured configs inherit from structured configs above them in the hierarchy, + # we need to process per-module configs in a careful order. + # We have to process foo.* before foo.bar.* before foo.bar, + # and we need to apply *.bar to foo.bar but not to foo.bar.*. + # To do this, process all well-structured glob configs before non-glob configs and + # exploit the fact that foo.* sorts earlier ASCIIbetically (unicodebetically?) + # than foo.bar.*. + # (A section being "processed last" results in its config "winning".) + # Unstructured glob configs are stored and are all checked for each module. + unstructured_glob_keys = [k for k in self.per_module_options.keys() if "*" in k[:-1]] + structured_keys = [k for k in self.per_module_options.keys() if "*" not in k[:-1]] + wildcards = sorted(k for k in structured_keys if k.endswith(".*")) + concrete = [k for k in structured_keys if not k.endswith(".*")] + + for glob in unstructured_glob_keys: + self._glob_options.append((glob, self.compile_glob(glob))) + + # We (for ease of implementation) treat unstructured glob + # sections as used if any real modules use them or if any + # concrete config sections use them. This means we need to + # track which get used while constructing. + self.unused_configs = set(unstructured_glob_keys) + + for key in wildcards + concrete: + # Find what the options for this key would be, just based + # on inheriting from parent configs. + options = self.clone_for_module(key) + # And then update it with its per-module options. + self._per_module_cache[key] = options.apply_changes(self.per_module_options[key]) + + # Add the more structured sections into unused configs, since + # they only count as used if actually used by a real module. + self.unused_configs.update(structured_keys) + + def clone_for_module(self, module: str) -> Options: + """Create an Options object that incorporates per-module options. + + NOTE: Once this method is called all Options objects should be + considered read-only, else the caching might be incorrect. + """ + if self._per_module_cache is None: + self.build_per_module_cache() + assert self._per_module_cache is not None + + # If the module just directly has a config entry, use it. + if module in self._per_module_cache: + self.unused_configs.discard(module) + return self._per_module_cache[module] + + # If not, search for glob paths at all the parents. So if we are looking for + # options for foo.bar.baz, we search foo.bar.baz.*, foo.bar.*, foo.*, + # in that order, looking for an entry. + # This is technically quadratic in the length of the path, but module paths + # don't actually get all that long. + options = self + path = module.split(".") + for i in range(len(path), 0, -1): + key = ".".join(path[:i] + ["*"]) + if key in self._per_module_cache: + self.unused_configs.discard(key) + options = self._per_module_cache[key] + break + + # OK and *now* we need to look for unstructured glob matches. + # We only do this for concrete modules, not structured wildcards. + if not module.endswith(".*"): + for key, pattern in self._glob_options: + if pattern.match(module): + self.unused_configs.discard(key) + options = options.apply_changes(self.per_module_options[key]) + + # We could update the cache to directly point to modules once + # they have been looked up, but in testing this made things + # slower and not faster, so we don't bother. + + return options + + def compile_glob(self, s: str) -> Pattern[str]: + # Compile one of the glob patterns to a regex so that '.*' can + # match *zero or more* module sections. This means we compile + # '.*' into '(\..*)?'. + parts = s.split(".") + expr = re.escape(parts[0]) if parts[0] != "*" else ".*" + for part in parts[1:]: + expr += re.escape("." + part) if part != "*" else r"(\..*)?" + return re.compile(expr + "\\Z") + + def select_options_affecting_cache(self) -> dict[str, object]: + result: dict[str, object] = {} + for opt in OPTIONS_AFFECTING_CACHE: + val = getattr(self, opt) + if opt in ("disabled_error_codes", "enabled_error_codes"): + val = sorted([code.code for code in val]) + result[opt] = val + return result diff --git a/.venv/lib/python3.12/site-packages/mypy/parse.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/parse.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..d6a59a5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/parse.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/parse.py b/.venv/lib/python3.12/site-packages/mypy/parse.py new file mode 100644 index 0000000..ee61760 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/parse.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +from mypy.errors import Errors +from mypy.nodes import MypyFile +from mypy.options import Options + + +def parse( + source: str | bytes, + fnam: str, + module: str | None, + errors: Errors, + options: Options, + raise_on_error: bool = False, +) -> MypyFile: + """Parse a source file, without doing any semantic analysis. + + Return the parse tree. If errors is not provided, raise ParseError + on failure. Otherwise, use the errors object to report parse errors. + + The python_version (major, minor) option determines the Python syntax variant. + """ + if options.transform_source is not None: + source = options.transform_source(source) + import mypy.fastparse + + tree = mypy.fastparse.parse(source, fnam=fnam, module=module, errors=errors, options=options) + if raise_on_error and errors.is_errors(): + errors.raise_error() + return tree diff --git a/.venv/lib/python3.12/site-packages/mypy/partially_defined.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/partially_defined.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..8ddef5f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/partially_defined.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/partially_defined.py b/.venv/lib/python3.12/site-packages/mypy/partially_defined.py new file mode 100644 index 0000000..38154cf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/partially_defined.py @@ -0,0 +1,681 @@ +from __future__ import annotations + +from enum import Enum + +from mypy import checker, errorcodes +from mypy.messages import MessageBuilder +from mypy.nodes import ( + AssertStmt, + AssignmentExpr, + AssignmentStmt, + BreakStmt, + ClassDef, + Context, + ContinueStmt, + DictionaryComprehension, + Expression, + ExpressionStmt, + ForStmt, + FuncDef, + FuncItem, + GeneratorExpr, + GlobalDecl, + IfStmt, + Import, + ImportFrom, + LambdaExpr, + ListExpr, + Lvalue, + MatchStmt, + MypyFile, + NameExpr, + NonlocalDecl, + RaiseStmt, + ReturnStmt, + StarExpr, + SymbolTable, + TryStmt, + TupleExpr, + TypeAliasStmt, + WhileStmt, + WithStmt, + implicit_module_attrs, +) +from mypy.options import Options +from mypy.patterns import AsPattern, StarredPattern +from mypy.reachability import ALWAYS_TRUE, infer_pattern_value +from mypy.traverser import ExtendedTraverserVisitor +from mypy.types import Type, UninhabitedType, get_proper_type + + +class BranchState: + """BranchState contains information about variable definition at the end of a branching statement. + `if` and `match` are examples of branching statements. + + `may_be_defined` contains variables that were defined in only some branches. + `must_be_defined` contains variables that were defined in all branches. + """ + + def __init__( + self, + must_be_defined: set[str] | None = None, + may_be_defined: set[str] | None = None, + skipped: bool = False, + ) -> None: + if may_be_defined is None: + may_be_defined = set() + if must_be_defined is None: + must_be_defined = set() + + self.may_be_defined = set(may_be_defined) + self.must_be_defined = set(must_be_defined) + self.skipped = skipped + + def copy(self) -> BranchState: + return BranchState( + must_be_defined=set(self.must_be_defined), + may_be_defined=set(self.may_be_defined), + skipped=self.skipped, + ) + + +class BranchStatement: + def __init__(self, initial_state: BranchState | None = None) -> None: + if initial_state is None: + initial_state = BranchState() + self.initial_state = initial_state + self.branches: list[BranchState] = [ + BranchState( + must_be_defined=self.initial_state.must_be_defined, + may_be_defined=self.initial_state.may_be_defined, + ) + ] + + def copy(self) -> BranchStatement: + result = BranchStatement(self.initial_state) + result.branches = [b.copy() for b in self.branches] + return result + + def next_branch(self) -> None: + self.branches.append( + BranchState( + must_be_defined=self.initial_state.must_be_defined, + may_be_defined=self.initial_state.may_be_defined, + ) + ) + + def record_definition(self, name: str) -> None: + assert len(self.branches) > 0 + self.branches[-1].must_be_defined.add(name) + self.branches[-1].may_be_defined.discard(name) + + def delete_var(self, name: str) -> None: + assert len(self.branches) > 0 + self.branches[-1].must_be_defined.discard(name) + self.branches[-1].may_be_defined.discard(name) + + def record_nested_branch(self, state: BranchState) -> None: + assert len(self.branches) > 0 + current_branch = self.branches[-1] + if state.skipped: + current_branch.skipped = True + return + current_branch.must_be_defined.update(state.must_be_defined) + current_branch.may_be_defined.update(state.may_be_defined) + current_branch.may_be_defined.difference_update(current_branch.must_be_defined) + + def skip_branch(self) -> None: + assert len(self.branches) > 0 + self.branches[-1].skipped = True + + def is_possibly_undefined(self, name: str) -> bool: + assert len(self.branches) > 0 + return name in self.branches[-1].may_be_defined + + def is_undefined(self, name: str) -> bool: + assert len(self.branches) > 0 + branch = self.branches[-1] + return name not in branch.may_be_defined and name not in branch.must_be_defined + + def is_defined_in_a_branch(self, name: str) -> bool: + assert len(self.branches) > 0 + for b in self.branches: + if name in b.must_be_defined or name in b.may_be_defined: + return True + return False + + def done(self) -> BranchState: + # First, compute all vars, including skipped branches. We include skipped branches + # because our goal is to capture all variables that semantic analyzer would + # consider defined. + all_vars = set() + for b in self.branches: + all_vars.update(b.may_be_defined) + all_vars.update(b.must_be_defined) + # For the rest of the things, we only care about branches that weren't skipped. + non_skipped_branches = [b for b in self.branches if not b.skipped] + if non_skipped_branches: + must_be_defined = non_skipped_branches[0].must_be_defined + for b in non_skipped_branches[1:]: + must_be_defined.intersection_update(b.must_be_defined) + else: + must_be_defined = set() + # Everything that wasn't defined in all branches but was defined + # in at least one branch should be in `may_be_defined`! + may_be_defined = all_vars.difference(must_be_defined) + return BranchState( + must_be_defined=must_be_defined, + may_be_defined=may_be_defined, + skipped=len(non_skipped_branches) == 0, + ) + + +class ScopeType(Enum): + Global = 1 + Class = 2 + Func = 3 + Generator = 4 + + +class Scope: + def __init__(self, stmts: list[BranchStatement], scope_type: ScopeType) -> None: + self.branch_stmts: list[BranchStatement] = stmts + self.scope_type = scope_type + self.undefined_refs: dict[str, set[NameExpr]] = {} + + def copy(self) -> Scope: + result = Scope([s.copy() for s in self.branch_stmts], self.scope_type) + result.undefined_refs = self.undefined_refs.copy() + return result + + def record_undefined_ref(self, o: NameExpr) -> None: + if o.name not in self.undefined_refs: + self.undefined_refs[o.name] = set() + self.undefined_refs[o.name].add(o) + + def pop_undefined_ref(self, name: str) -> set[NameExpr]: + return self.undefined_refs.pop(name, set()) + + +class DefinedVariableTracker: + """DefinedVariableTracker manages the state and scope for the UndefinedVariablesVisitor.""" + + def __init__(self) -> None: + # There's always at least one scope. Within each scope, there's at least one "global" BranchingStatement. + self.scopes: list[Scope] = [Scope([BranchStatement()], ScopeType.Global)] + # disable_branch_skip is used to disable skipping a branch due to a return/raise/etc. This is useful + # in things like try/except/finally statements. + self.disable_branch_skip = False + + def copy(self) -> DefinedVariableTracker: + result = DefinedVariableTracker() + result.scopes = [s.copy() for s in self.scopes] + result.disable_branch_skip = self.disable_branch_skip + return result + + def _scope(self) -> Scope: + assert len(self.scopes) > 0 + return self.scopes[-1] + + def enter_scope(self, scope_type: ScopeType) -> None: + assert len(self._scope().branch_stmts) > 0 + initial_state = None + if scope_type == ScopeType.Generator: + # Generators are special because they inherit the outer scope. + initial_state = self._scope().branch_stmts[-1].branches[-1] + self.scopes.append(Scope([BranchStatement(initial_state)], scope_type)) + + def exit_scope(self) -> None: + self.scopes.pop() + + def in_scope(self, scope_type: ScopeType) -> bool: + return self._scope().scope_type == scope_type + + def start_branch_statement(self) -> None: + assert len(self._scope().branch_stmts) > 0 + self._scope().branch_stmts.append( + BranchStatement(self._scope().branch_stmts[-1].branches[-1]) + ) + + def next_branch(self) -> None: + assert len(self._scope().branch_stmts) > 1 + self._scope().branch_stmts[-1].next_branch() + + def end_branch_statement(self) -> None: + assert len(self._scope().branch_stmts) > 1 + result = self._scope().branch_stmts.pop().done() + self._scope().branch_stmts[-1].record_nested_branch(result) + + def skip_branch(self) -> None: + # Only skip branch if we're outside of "root" branch statement. + if len(self._scope().branch_stmts) > 1 and not self.disable_branch_skip: + self._scope().branch_stmts[-1].skip_branch() + + def record_definition(self, name: str) -> None: + assert len(self.scopes) > 0 + assert len(self.scopes[-1].branch_stmts) > 0 + self._scope().branch_stmts[-1].record_definition(name) + + def delete_var(self, name: str) -> None: + assert len(self.scopes) > 0 + assert len(self.scopes[-1].branch_stmts) > 0 + self._scope().branch_stmts[-1].delete_var(name) + + def record_undefined_ref(self, o: NameExpr) -> None: + """Records an undefined reference. These can later be retrieved via `pop_undefined_ref`.""" + assert len(self.scopes) > 0 + self._scope().record_undefined_ref(o) + + def pop_undefined_ref(self, name: str) -> set[NameExpr]: + """If name has previously been reported as undefined, the NameExpr that was called will be returned.""" + assert len(self.scopes) > 0 + return self._scope().pop_undefined_ref(name) + + def is_possibly_undefined(self, name: str) -> bool: + assert len(self._scope().branch_stmts) > 0 + # A variable is undefined if it's in a set of `may_be_defined` but not in `must_be_defined`. + return self._scope().branch_stmts[-1].is_possibly_undefined(name) + + def is_defined_in_different_branch(self, name: str) -> bool: + """This will return true if a variable is defined in a branch that's not the current branch.""" + assert len(self._scope().branch_stmts) > 0 + stmt = self._scope().branch_stmts[-1] + if not stmt.is_undefined(name): + return False + for stmt in self._scope().branch_stmts: + if stmt.is_defined_in_a_branch(name): + return True + return False + + def is_undefined(self, name: str) -> bool: + assert len(self._scope().branch_stmts) > 0 + return self._scope().branch_stmts[-1].is_undefined(name) + + +class Loop: + def __init__(self) -> None: + self.has_break = False + + +class PossiblyUndefinedVariableVisitor(ExtendedTraverserVisitor): + """Detects the following cases: + - A variable that's defined only part of the time. + - If a variable is used before definition + + An example of a partial definition: + if foo(): + x = 1 + print(x) # Error: "x" may be undefined. + + Example of a used before definition: + x = y + y: int = 2 + + Note that this code does not detect variables not defined in any of the branches -- that is + handled by the semantic analyzer. + """ + + def __init__( + self, + msg: MessageBuilder, + type_map: dict[Expression, Type], + options: Options, + names: SymbolTable, + ) -> None: + self.msg = msg + self.type_map = type_map + self.options = options + self.builtins = SymbolTable() + builtins_mod = names.get("__builtins__", None) + if builtins_mod: + assert isinstance(builtins_mod.node, MypyFile) + self.builtins = builtins_mod.node.names + self.loops: list[Loop] = [] + self.try_depth = 0 + self.tracker = DefinedVariableTracker() + for name in implicit_module_attrs: + self.tracker.record_definition(name) + + def var_used_before_def(self, name: str, context: Context) -> None: + if self.msg.errors.is_error_code_enabled(errorcodes.USED_BEFORE_DEF): + self.msg.var_used_before_def(name, context) + + def variable_may_be_undefined(self, name: str, context: Context) -> None: + if self.msg.errors.is_error_code_enabled(errorcodes.POSSIBLY_UNDEFINED): + self.msg.variable_may_be_undefined(name, context) + + def process_definition(self, name: str) -> None: + # Was this name previously used? If yes, it's a used-before-definition error. + if not self.tracker.in_scope(ScopeType.Class): + refs = self.tracker.pop_undefined_ref(name) + for ref in refs: + if self.loops: + self.variable_may_be_undefined(name, ref) + else: + self.var_used_before_def(name, ref) + else: + # Errors in class scopes are caught by the semantic analyzer. + pass + self.tracker.record_definition(name) + + def visit_global_decl(self, o: GlobalDecl) -> None: + for name in o.names: + self.process_definition(name) + super().visit_global_decl(o) + + def visit_nonlocal_decl(self, o: NonlocalDecl) -> None: + for name in o.names: + self.process_definition(name) + super().visit_nonlocal_decl(o) + + def process_lvalue(self, lvalue: Lvalue | None) -> None: + if isinstance(lvalue, NameExpr): + self.process_definition(lvalue.name) + elif isinstance(lvalue, StarExpr): + self.process_lvalue(lvalue.expr) + elif isinstance(lvalue, (ListExpr, TupleExpr)): + for item in lvalue.items: + self.process_lvalue(item) + + def visit_assignment_stmt(self, o: AssignmentStmt) -> None: + for lvalue in o.lvalues: + self.process_lvalue(lvalue) + super().visit_assignment_stmt(o) + + def visit_assignment_expr(self, o: AssignmentExpr) -> None: + o.value.accept(self) + self.process_lvalue(o.target) + + def visit_if_stmt(self, o: IfStmt) -> None: + for e in o.expr: + e.accept(self) + self.tracker.start_branch_statement() + for b in o.body: + if b.is_unreachable: + continue + b.accept(self) + self.tracker.next_branch() + if o.else_body: + if not o.else_body.is_unreachable: + o.else_body.accept(self) + else: + self.tracker.skip_branch() + self.tracker.end_branch_statement() + + def visit_match_stmt(self, o: MatchStmt) -> None: + o.subject.accept(self) + self.tracker.start_branch_statement() + for i in range(len(o.patterns)): + pattern = o.patterns[i] + pattern.accept(self) + guard = o.guards[i] + if guard is not None: + guard.accept(self) + if not o.bodies[i].is_unreachable: + o.bodies[i].accept(self) + else: + self.tracker.skip_branch() + is_catchall = infer_pattern_value(pattern) == ALWAYS_TRUE + if not is_catchall: + self.tracker.next_branch() + self.tracker.end_branch_statement() + + def visit_func_def(self, o: FuncDef) -> None: + self.process_definition(o.name) + super().visit_func_def(o) + + def visit_func(self, o: FuncItem) -> None: + if o.is_dynamic() and not self.options.check_untyped_defs: + return + + args = o.arguments or [] + # Process initializers (defaults) outside the function scope. + for arg in args: + if arg.initializer is not None: + arg.initializer.accept(self) + + self.tracker.enter_scope(ScopeType.Func) + for arg in args: + self.process_definition(arg.variable.name) + super().visit_var(arg.variable) + o.body.accept(self) + self.tracker.exit_scope() + + def visit_generator_expr(self, o: GeneratorExpr) -> None: + self.tracker.enter_scope(ScopeType.Generator) + for idx in o.indices: + self.process_lvalue(idx) + super().visit_generator_expr(o) + self.tracker.exit_scope() + + def visit_dictionary_comprehension(self, o: DictionaryComprehension) -> None: + self.tracker.enter_scope(ScopeType.Generator) + for idx in o.indices: + self.process_lvalue(idx) + super().visit_dictionary_comprehension(o) + self.tracker.exit_scope() + + def visit_for_stmt(self, o: ForStmt) -> None: + o.expr.accept(self) + self.process_lvalue(o.index) + o.index.accept(self) + self.tracker.start_branch_statement() + loop = Loop() + self.loops.append(loop) + o.body.accept(self) + self.tracker.next_branch() + self.tracker.end_branch_statement() + if o.else_body is not None: + # If the loop has a `break` inside, `else` is executed conditionally. + # If the loop doesn't have a `break` either the function will return or + # execute the `else`. + has_break = loop.has_break + if has_break: + self.tracker.start_branch_statement() + self.tracker.next_branch() + o.else_body.accept(self) + if has_break: + self.tracker.end_branch_statement() + self.loops.pop() + + def visit_return_stmt(self, o: ReturnStmt) -> None: + super().visit_return_stmt(o) + self.tracker.skip_branch() + + def visit_lambda_expr(self, o: LambdaExpr) -> None: + self.tracker.enter_scope(ScopeType.Func) + super().visit_lambda_expr(o) + self.tracker.exit_scope() + + def visit_assert_stmt(self, o: AssertStmt) -> None: + super().visit_assert_stmt(o) + if checker.is_false_literal(o.expr): + self.tracker.skip_branch() + + def visit_raise_stmt(self, o: RaiseStmt) -> None: + super().visit_raise_stmt(o) + self.tracker.skip_branch() + + def visit_continue_stmt(self, o: ContinueStmt) -> None: + super().visit_continue_stmt(o) + self.tracker.skip_branch() + + def visit_break_stmt(self, o: BreakStmt) -> None: + super().visit_break_stmt(o) + if self.loops: + self.loops[-1].has_break = True + self.tracker.skip_branch() + + def visit_expression_stmt(self, o: ExpressionStmt) -> None: + typ = self.type_map.get(o.expr) + if typ is None or isinstance(get_proper_type(typ), UninhabitedType): + self.tracker.skip_branch() + super().visit_expression_stmt(o) + + def visit_try_stmt(self, o: TryStmt) -> None: + """ + Note that finding undefined vars in `finally` requires different handling from + the rest of the code. In particular, we want to disallow skipping branches due to jump + statements in except/else clauses for finally but not for other cases. Imagine a case like: + def f() -> int: + try: + x = 1 + except: + # This jump statement needs to be handled differently depending on whether or + # not we're trying to process `finally` or not. + return 0 + finally: + # `x` may be undefined here. + pass + # `x` is always defined here. + return x + """ + self.try_depth += 1 + if o.finally_body is not None: + # In order to find undefined vars in `finally`, we need to + # process try/except with branch skipping disabled. However, for the rest of the code + # after finally, we need to process try/except with branch skipping enabled. + # Therefore, we need to process try/finally twice. + # Because processing is not idempotent, we should make a copy of the tracker. + old_tracker = self.tracker.copy() + self.tracker.disable_branch_skip = True + self.process_try_stmt(o) + self.tracker = old_tracker + self.process_try_stmt(o) + self.try_depth -= 1 + + def process_try_stmt(self, o: TryStmt) -> None: + """ + Processes try statement decomposing it into the following: + if ...: + body + else_body + elif ...: + except 1 + elif ...: + except 2 + else: + except n + finally + """ + self.tracker.start_branch_statement() + o.body.accept(self) + if o.else_body is not None: + o.else_body.accept(self) + if len(o.handlers) > 0: + assert len(o.handlers) == len(o.vars) == len(o.types) + for i in range(len(o.handlers)): + self.tracker.next_branch() + exc_type = o.types[i] + if exc_type is not None: + exc_type.accept(self) + var = o.vars[i] + if var is not None: + self.process_definition(var.name) + var.accept(self) + o.handlers[i].accept(self) + if var is not None: + self.tracker.delete_var(var.name) + self.tracker.end_branch_statement() + + if o.finally_body is not None: + o.finally_body.accept(self) + + def visit_while_stmt(self, o: WhileStmt) -> None: + o.expr.accept(self) + self.tracker.start_branch_statement() + loop = Loop() + self.loops.append(loop) + o.body.accept(self) + has_break = loop.has_break + if not checker.is_true_literal(o.expr): + # If this is a loop like `while True`, we can consider the body to be + # a single branch statement (we're guaranteed that the body is executed at least once). + # If not, call next_branch() to make all variables defined there conditional. + self.tracker.next_branch() + self.tracker.end_branch_statement() + if o.else_body is not None: + # If the loop has a `break` inside, `else` is executed conditionally. + # If the loop doesn't have a `break` either the function will return or + # execute the `else`. + if has_break: + self.tracker.start_branch_statement() + self.tracker.next_branch() + if o.else_body: + o.else_body.accept(self) + if has_break: + self.tracker.end_branch_statement() + self.loops.pop() + + def visit_as_pattern(self, o: AsPattern) -> None: + if o.name is not None: + self.process_lvalue(o.name) + super().visit_as_pattern(o) + + def visit_starred_pattern(self, o: StarredPattern) -> None: + if o.capture is not None: + self.process_lvalue(o.capture) + super().visit_starred_pattern(o) + + def visit_name_expr(self, o: NameExpr) -> None: + if o.name in self.builtins and self.tracker.in_scope(ScopeType.Global): + return + if self.tracker.is_possibly_undefined(o.name): + # A variable is only defined in some branches. + self.variable_may_be_undefined(o.name, o) + # We don't want to report the error on the same variable multiple times. + self.tracker.record_definition(o.name) + elif self.tracker.is_defined_in_different_branch(o.name): + # A variable is defined in one branch but used in a different branch. + if self.loops or self.try_depth > 0: + # If we're in a loop or in a try, we can't be sure that this variable + # is undefined. Report it as "may be undefined". + self.variable_may_be_undefined(o.name, o) + else: + self.var_used_before_def(o.name, o) + elif self.tracker.is_undefined(o.name): + # A variable is undefined. It could be due to two things: + # 1. A variable is just totally undefined + # 2. The variable is defined later in the code. + # Case (1) will be caught by semantic analyzer. Case (2) is a forward ref that should + # be caught by this visitor. Save the ref for later, so that if we see a definition, + # we know it's a used-before-definition scenario. + self.tracker.record_undefined_ref(o) + super().visit_name_expr(o) + + def visit_with_stmt(self, o: WithStmt) -> None: + for expr, idx in zip(o.expr, o.target): + expr.accept(self) + self.process_lvalue(idx) + o.body.accept(self) + + def visit_class_def(self, o: ClassDef) -> None: + self.process_definition(o.name) + self.tracker.enter_scope(ScopeType.Class) + super().visit_class_def(o) + self.tracker.exit_scope() + + def visit_import(self, o: Import) -> None: + for mod, alias in o.ids: + if alias is not None: + self.tracker.record_definition(alias) + else: + # When you do `import x.y`, only `x` becomes defined. + names = mod.split(".") + if names: + # `names` should always be nonempty, but we don't want mypy + # to crash on invalid code. + self.tracker.record_definition(names[0]) + super().visit_import(o) + + def visit_import_from(self, o: ImportFrom) -> None: + for mod, alias in o.names: + name = alias + if name is None: + name = mod + self.tracker.record_definition(name) + super().visit_import_from(o) + + def visit_type_alias_stmt(self, o: TypeAliasStmt) -> None: + # Type alias target may contain forward references + self.tracker.record_definition(o.name.name) diff --git a/.venv/lib/python3.12/site-packages/mypy/patterns.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/patterns.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..4c27af9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/patterns.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/patterns.py b/.venv/lib/python3.12/site-packages/mypy/patterns.py new file mode 100644 index 0000000..a01bf6a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/patterns.py @@ -0,0 +1,150 @@ +"""Classes for representing match statement patterns.""" + +from __future__ import annotations + +from typing import TypeVar + +from mypy_extensions import trait + +from mypy.nodes import Expression, NameExpr, Node, RefExpr +from mypy.visitor import PatternVisitor + +T = TypeVar("T") + + +@trait +class Pattern(Node): + """A pattern node.""" + + __slots__ = () + + def accept(self, visitor: PatternVisitor[T]) -> T: + raise RuntimeError("Not implemented", type(self)) + + +class AsPattern(Pattern): + """The pattern as """ + + # The python ast, and therefore also our ast merges capture, wildcard and as patterns into one + # for easier handling. + # If pattern is None this is a capture pattern. If name and pattern are both none this is a + # wildcard pattern. + # Only name being None should not happen but also won't break anything. + pattern: Pattern | None + name: NameExpr | None + + def __init__(self, pattern: Pattern | None, name: NameExpr | None) -> None: + super().__init__() + self.pattern = pattern + self.name = name + + def accept(self, visitor: PatternVisitor[T]) -> T: + return visitor.visit_as_pattern(self) + + +class OrPattern(Pattern): + """The pattern | | ...""" + + patterns: list[Pattern] + + def __init__(self, patterns: list[Pattern]) -> None: + super().__init__() + self.patterns = patterns + + def accept(self, visitor: PatternVisitor[T]) -> T: + return visitor.visit_or_pattern(self) + + +class ValuePattern(Pattern): + """The pattern x.y (or x.y.z, ...)""" + + expr: Expression + + def __init__(self, expr: Expression) -> None: + super().__init__() + self.expr = expr + + def accept(self, visitor: PatternVisitor[T]) -> T: + return visitor.visit_value_pattern(self) + + +class SingletonPattern(Pattern): + # This can be exactly True, False or None + value: bool | None + + def __init__(self, value: bool | None) -> None: + super().__init__() + self.value = value + + def accept(self, visitor: PatternVisitor[T]) -> T: + return visitor.visit_singleton_pattern(self) + + +class SequencePattern(Pattern): + """The pattern [, ...]""" + + patterns: list[Pattern] + + def __init__(self, patterns: list[Pattern]) -> None: + super().__init__() + self.patterns = patterns + + def accept(self, visitor: PatternVisitor[T]) -> T: + return visitor.visit_sequence_pattern(self) + + +class StarredPattern(Pattern): + # None corresponds to *_ in a list pattern. It will match multiple items but won't bind them to + # a name. + capture: NameExpr | None + + def __init__(self, capture: NameExpr | None) -> None: + super().__init__() + self.capture = capture + + def accept(self, visitor: PatternVisitor[T]) -> T: + return visitor.visit_starred_pattern(self) + + +class MappingPattern(Pattern): + keys: list[Expression] + values: list[Pattern] + rest: NameExpr | None + + def __init__( + self, keys: list[Expression], values: list[Pattern], rest: NameExpr | None + ) -> None: + super().__init__() + assert len(keys) == len(values) + self.keys = keys + self.values = values + self.rest = rest + + def accept(self, visitor: PatternVisitor[T]) -> T: + return visitor.visit_mapping_pattern(self) + + +class ClassPattern(Pattern): + """The pattern Cls(...)""" + + class_ref: RefExpr + positionals: list[Pattern] + keyword_keys: list[str] + keyword_values: list[Pattern] + + def __init__( + self, + class_ref: RefExpr, + positionals: list[Pattern], + keyword_keys: list[str], + keyword_values: list[Pattern], + ) -> None: + super().__init__() + assert len(keyword_keys) == len(keyword_values) + self.class_ref = class_ref + self.positionals = positionals + self.keyword_keys = keyword_keys + self.keyword_values = keyword_values + + def accept(self, visitor: PatternVisitor[T]) -> T: + return visitor.visit_class_pattern(self) diff --git a/.venv/lib/python3.12/site-packages/mypy/plugin.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/plugin.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..f8cbf77 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/plugin.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/plugin.py b/.venv/lib/python3.12/site-packages/mypy/plugin.py new file mode 100644 index 0000000..9019e3c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/plugin.py @@ -0,0 +1,927 @@ +"""Plugin system for extending mypy. + +At large scale the plugin system works as following: + +* Plugins are collected from the corresponding mypy config file option + (either via paths to Python files, or installed Python modules) + and imported using importlib. + +* Every module should get an entry point function (called 'plugin' by default, + but may be overridden in the config file) that should accept a single string + argument that is a full mypy version (includes git commit hash for dev + versions) and return a subclass of mypy.plugins.Plugin. + +* All plugin class constructors should match the signature of mypy.plugin.Plugin + (i.e. should accept an mypy.options.Options object), and *must* call + super().__init__(). + +* At several steps during semantic analysis and type checking mypy calls + special `get_xxx` methods on user plugins with a single string argument that + is a fully qualified name (full name) of a relevant definition + (see mypy.plugin.Plugin method docstrings for details). + +* The plugins are called in the order they are passed in the config option. + Every plugin must decide whether to act on a given full name. The first + plugin that returns non-None object will be used. + +* The above decision should be made using the limited common API specified by + mypy.plugin.CommonPluginApi. + +* The callback returned by the plugin will be called with a larger context that + includes relevant current state (e.g. a default return type, or a default + attribute type) and a wider relevant API provider (e.g. + SemanticAnalyzerPluginInterface or CheckerPluginInterface). + +* The result of this is used for further processing. See various `XxxContext` + named tuples for details about which information is given to each hook. + +Plugin developers should ensure that their plugins work well in incremental and +daemon modes. In particular, plugins should not hold global state, and should +always call add_plugin_dependency() in plugin hooks called during semantic +analysis. See the method docstring for more details. + +There is no dedicated cache storage for plugins, but plugins can store +per-TypeInfo data in a special .metadata attribute that is serialized to the +mypy caches between incremental runs. To avoid collisions between plugins, they +are encouraged to store their state under a dedicated key coinciding with +plugin name in the metadata dictionary. Every value stored there must be +JSON-serializable. + +## Notes about the semantic analyzer + +Mypy 0.710 introduced a new semantic analyzer that changed how plugins are +expected to work in several notable ways (from mypy 0.730 the old semantic +analyzer is no longer available): + +1. The order of processing AST nodes in modules is different. The old semantic + analyzer processed modules in textual order, one module at a time. The new + semantic analyzer first processes the module top levels, including bodies of + any top-level classes and classes nested within classes. ("Top-level" here + means "not nested within a function/method".) Functions and methods are + processed only after module top levels have been finished. If there is an + import cycle, all module top levels in the cycle are processed before + processing any functions or methods. Each unit of processing (a module top + level or a function/method) is called a *target*. + + This also means that function signatures in the same module have not been + analyzed yet when analyzing the module top level. If you need access to + a function signature, you'll need to explicitly analyze the signature first + using `anal_type()`. + +2. Each target can be processed multiple times. This may happen if some forward + references are not ready yet, for example. This means that semantic analyzer + related plugin hooks can be called multiple times for the same full name. + These plugin methods must thus be idempotent. + +3. The `anal_type` API function returns None if some part of the type is not + available yet. If this happens, the current target being analyzed will be + *deferred*, which means that it will be processed again soon, in the hope + that additional dependencies will be available. This may happen if there are + forward references to types or inter-module references to types within an + import cycle. + + Note that if there is a circular definition, mypy may decide to stop + processing to avoid an infinite number of iterations. When this happens, + `anal_type` will generate an error and return an `AnyType` type object + during the final iteration (instead of None). + +4. There is a new API method `defer()`. This can be used to explicitly request + the current target to be reprocessed one more time. You don't need this + to call this if `anal_type` returns None, however. + +5. There is a new API property `final_iteration`, which is true once mypy + detected no progress during the previous iteration or if the maximum + semantic analysis iteration count has been reached. You must never + defer during the final iteration, as it will cause a crash. + +6. The `node` attribute of SymbolTableNode objects may contain a reference to + a PlaceholderNode object. This object means that this definition has not + been fully processed yet. If you encounter a PlaceholderNode, you should + defer unless it's the final iteration. If it's the final iteration, you + should generate an error message. It usually means that there's a cyclic + definition that cannot be resolved by mypy. PlaceholderNodes can only refer + to references inside an import cycle. If you are looking up things from + another module, such as the builtins, that is outside the current module or + import cycle, you can safely assume that you won't receive a placeholder. + +When testing your plugin, you should have a test case that forces a module top +level to be processed multiple times. The easiest way to do this is to include +a forward reference to a class in a top-level annotation. Example: + + c: C # Forward reference causes second analysis pass + class C: pass + +Note that a forward reference in a function signature won't trigger another +pass, since all functions are processed only after the top level has been fully +analyzed. +""" + +from __future__ import annotations + +from abc import abstractmethod +from typing import TYPE_CHECKING, Any, Callable, NamedTuple, TypeVar + +from mypy_extensions import mypyc_attr, trait + +from mypy.errorcodes import ErrorCode +from mypy.errors import ErrorInfo +from mypy.lookup import lookup_fully_qualified +from mypy.message_registry import ErrorMessage +from mypy.nodes import ( + ArgKind, + CallExpr, + ClassDef, + Context, + Expression, + MypyFile, + SymbolTableNode, + TypeInfo, +) +from mypy.options import Options +from mypy.types import ( + CallableType, + FunctionLike, + Instance, + ProperType, + Type, + TypeList, + UnboundType, +) + +if TYPE_CHECKING: + from mypy.messages import MessageBuilder + from mypy.tvar_scope import TypeVarLikeScope + + +@trait +class TypeAnalyzerPluginInterface: + """Interface for accessing semantic analyzer functionality in plugins. + + Methods docstrings contain only basic info. Look for corresponding implementation + docstrings in typeanal.py for more details. + """ + + # An options object. Note: these are the cloned options for the current file. + # This might be different from Plugin.options (that contains default/global options) + # if there are per-file options in the config. This applies to all other interfaces + # in this file. + options: Options + + @abstractmethod + def fail(self, msg: str, ctx: Context, *, code: ErrorCode | None = None) -> None: + """Emit an error message at given location.""" + raise NotImplementedError + + @abstractmethod + def named_type(self, fullname: str, args: list[Type], /) -> Instance: + """Construct an instance of a builtin type with given name.""" + raise NotImplementedError + + @abstractmethod + def analyze_type(self, typ: Type, /) -> Type: + """Analyze an unbound type using the default mypy logic.""" + raise NotImplementedError + + @abstractmethod + def analyze_callable_args( + self, arglist: TypeList + ) -> tuple[list[Type], list[ArgKind], list[str | None]] | None: + """Find types, kinds, and names of arguments from extended callable syntax.""" + raise NotImplementedError + + +# A context for a hook that semantically analyzes an unbound type. +class AnalyzeTypeContext(NamedTuple): + type: UnboundType # Type to analyze + context: Context # Relevant location context (e.g. for error messages) + api: TypeAnalyzerPluginInterface + + +@mypyc_attr(allow_interpreted_subclasses=True) +class CommonPluginApi: + """ + A common plugin API (shared between semantic analysis and type checking phases) + that all plugin hooks get independently of the context. + """ + + # Global mypy options. + # Per-file options can be only accessed on various + # XxxPluginInterface classes. + options: Options + + @abstractmethod + def lookup_fully_qualified(self, fullname: str) -> SymbolTableNode | None: + """Lookup a symbol by its full name (including module). + + This lookup function available for all plugins. Return None if a name + is not found. This function doesn't support lookup from current scope. + Use SemanticAnalyzerPluginInterface.lookup_qualified() for this.""" + raise NotImplementedError + + +@trait +class CheckerPluginInterface: + """Interface for accessing type checker functionality in plugins. + + Methods docstrings contain only basic info. Look for corresponding implementation + docstrings in checker.py for more details. + """ + + msg: MessageBuilder + options: Options + path: str + + # Type context for type inference + @property + @abstractmethod + def type_context(self) -> list[Type | None]: + """Return the type context of the plugin""" + raise NotImplementedError + + @abstractmethod + def fail( + self, msg: str | ErrorMessage, ctx: Context, /, *, code: ErrorCode | None = None + ) -> ErrorInfo | None: + """Emit an error message at given location.""" + raise NotImplementedError + + @abstractmethod + def named_generic_type(self, name: str, args: list[Type]) -> Instance: + """Construct an instance of a generic type with given type arguments.""" + raise NotImplementedError + + @abstractmethod + def get_expression_type(self, node: Expression, type_context: Type | None = None) -> Type: + """Checks the type of the given expression.""" + raise NotImplementedError + + +@trait +class SemanticAnalyzerPluginInterface: + """Interface for accessing semantic analyzer functionality in plugins. + + Methods docstrings contain only basic info. Look for corresponding implementation + docstrings in semanal.py for more details. + + # TODO: clean-up lookup functions. + """ + + modules: dict[str, MypyFile] + # Options for current file. + options: Options + cur_mod_id: str + msg: MessageBuilder + + @abstractmethod + def named_type(self, fullname: str, args: list[Type] | None = None) -> Instance: + """Construct an instance of a builtin type with given type arguments.""" + raise NotImplementedError + + @abstractmethod + def builtin_type(self, fully_qualified_name: str) -> Instance: + """Legacy function -- use named_type() instead.""" + # NOTE: Do not delete this since many plugins may still use it. + raise NotImplementedError + + @abstractmethod + def named_type_or_none(self, fullname: str, args: list[Type] | None = None) -> Instance | None: + """Construct an instance of a type with given type arguments. + + Return None if a type could not be constructed for the qualified + type name. This is possible when the qualified name includes a + module name and the module has not been imported. + """ + raise NotImplementedError + + @abstractmethod + def basic_new_typeinfo(self, name: str, basetype_or_fallback: Instance, line: int) -> TypeInfo: + raise NotImplementedError + + @abstractmethod + def parse_bool(self, expr: Expression) -> bool | None: + """Parse True/False literals.""" + raise NotImplementedError + + @abstractmethod + def parse_str_literal(self, expr: Expression) -> str | None: + """Parse string literals.""" + + @abstractmethod + def fail( + self, + msg: str, + ctx: Context, + serious: bool = False, + *, + blocker: bool = False, + code: ErrorCode | None = None, + ) -> None: + """Emit an error message at given location.""" + raise NotImplementedError + + @abstractmethod + def anal_type( + self, + typ: Type, + /, + *, + tvar_scope: TypeVarLikeScope | None = None, + allow_tuple_literal: bool = False, + allow_unbound_tvars: bool = False, + report_invalid_types: bool = True, + ) -> Type | None: + """Analyze an unbound type. + + Return None if some part of the type is not ready yet. In this + case the current target being analyzed will be deferred and + analyzed again. + """ + raise NotImplementedError + + @abstractmethod + def class_type(self, self_type: Type) -> Type: + """Generate type of first argument of class methods from type of self.""" + raise NotImplementedError + + @abstractmethod + def lookup_fully_qualified(self, fullname: str, /) -> SymbolTableNode: + """Lookup a symbol by its fully qualified name. + + Raise an error if not found. + """ + raise NotImplementedError + + @abstractmethod + def lookup_fully_qualified_or_none(self, fullname: str, /) -> SymbolTableNode | None: + """Lookup a symbol by its fully qualified name. + + Return None if not found. + """ + raise NotImplementedError + + @abstractmethod + def lookup_qualified( + self, name: str, ctx: Context, suppress_errors: bool = False + ) -> SymbolTableNode | None: + """Lookup symbol using a name in current scope. + + This follows Python local->non-local->global->builtins rules. + """ + raise NotImplementedError + + @abstractmethod + def add_plugin_dependency(self, trigger: str, target: str | None = None) -> None: + """Specify semantic dependencies for generated methods/variables. + + If the symbol with full name given by trigger is found to be stale by mypy, + then the body of node with full name given by target will be re-checked. + By default, this is the node that is currently analyzed. + + For example, the dataclass plugin adds a generated __init__ method with + a signature that depends on types of attributes in ancestor classes. If any + attribute in an ancestor class gets stale (modified), we need to reprocess + the subclasses (and thus regenerate __init__ methods). + + This is used by fine-grained incremental mode (mypy daemon). See mypy/server/deps.py + for more details. + """ + raise NotImplementedError + + @abstractmethod + def add_symbol_table_node(self, name: str, symbol: SymbolTableNode) -> Any: + """Add node to global symbol table (or to nearest class if there is one).""" + raise NotImplementedError + + @abstractmethod + def qualified_name(self, name: str) -> str: + """Make qualified name using current module and enclosing class (if any).""" + raise NotImplementedError + + @abstractmethod + def defer(self) -> None: + """Call this to defer the processing of the current node. + + This will request an additional iteration of semantic analysis. + """ + raise NotImplementedError + + @property + @abstractmethod + def final_iteration(self) -> bool: + """Is this the final iteration of semantic analysis?""" + raise NotImplementedError + + @property + @abstractmethod + def is_stub_file(self) -> bool: + raise NotImplementedError + + @abstractmethod + def analyze_simple_literal_type(self, rvalue: Expression, is_final: bool) -> Type | None: + raise NotImplementedError + + +# A context for querying for configuration data about a module for +# cache invalidation purposes. +class ReportConfigContext(NamedTuple): + id: str # Module name + path: str # Module file path + is_check: bool # Is this invocation for checking whether the config matches + + +# A context for a function signature hook that infers a better signature for a +# function. Note that argument types aren't available yet. If you need them, +# you have to use a method hook instead. +class FunctionSigContext(NamedTuple): + args: list[list[Expression]] # Actual expressions for each formal argument + default_signature: CallableType # Original signature of the method + context: Context # Relevant location context (e.g. for error messages) + api: CheckerPluginInterface + + +# A context for a function hook that infers the return type of a function with +# a special signature. +# +# A no-op callback would just return the inferred return type, but a useful +# callback at least sometimes can infer a more precise type. +class FunctionContext(NamedTuple): + arg_types: list[list[Type]] # List of actual caller types for each formal argument + arg_kinds: list[list[ArgKind]] # Ditto for argument kinds, see nodes.ARG_* constants + # Names of formal parameters from the callee definition, + # these will be sufficient in most cases. + callee_arg_names: list[str | None] + # Names of actual arguments in the call expression. For example, + # in a situation like this: + # def func(**kwargs) -> None: + # pass + # func(kw1=1, kw2=2) + # callee_arg_names will be ['kwargs'] and arg_names will be [['kw1', 'kw2']]. + arg_names: list[list[str | None]] + default_return_type: Type # Return type inferred from signature + args: list[list[Expression]] # Actual expressions for each formal argument + context: Context # Relevant location context (e.g. for error messages) + api: CheckerPluginInterface + + +# A context for a method signature hook that infers a better signature for a +# method. Note that argument types aren't available yet. If you need them, +# you have to use a method hook instead. +# TODO: document ProperType in the plugin changelog/update issue. +class MethodSigContext(NamedTuple): + type: ProperType # Base object type for method call + args: list[list[Expression]] # Actual expressions for each formal argument + default_signature: CallableType # Original signature of the method + context: Context # Relevant location context (e.g. for error messages) + api: CheckerPluginInterface + + +# A context for a method hook that infers the return type of a method with a +# special signature. +# +# This is very similar to FunctionContext (only differences are documented). +class MethodContext(NamedTuple): + type: ProperType # Base object type for method call + arg_types: list[list[Type]] # List of actual caller types for each formal argument + # see FunctionContext for details about names and kinds + arg_kinds: list[list[ArgKind]] + callee_arg_names: list[str | None] + arg_names: list[list[str | None]] + default_return_type: Type # Return type inferred by mypy + args: list[list[Expression]] # Lists of actual expressions for every formal argument + context: Context + api: CheckerPluginInterface + + +# A context for an attribute type hook that infers the type of an attribute. +class AttributeContext(NamedTuple): + type: ProperType # Type of object with attribute + default_attr_type: Type # Original attribute type + is_lvalue: bool # Whether the attribute is the target of an assignment + context: Context # Relevant location context (e.g. for error messages) + api: CheckerPluginInterface + + +# A context for a class hook that modifies the class definition. +class ClassDefContext(NamedTuple): + cls: ClassDef # The class definition + reason: Expression # The expression being applied (decorator, metaclass, base class) + api: SemanticAnalyzerPluginInterface + + +# A context for dynamic class definitions like +# Base = declarative_base() +class DynamicClassDefContext(NamedTuple): + call: CallExpr # The r.h.s. of dynamic class definition + name: str # The name this class is being assigned to + api: SemanticAnalyzerPluginInterface + + +@mypyc_attr(allow_interpreted_subclasses=True) +class Plugin(CommonPluginApi): + """Base class of all type checker plugins. + + This defines a no-op plugin. Subclasses can override some methods to + provide some actual functionality. + + All get_ methods are treated as pure functions (you should assume that + results might be cached). A plugin should return None from a get_ method + to give way to other plugins. + + Look at the comments of various *Context objects for additional information on + various hooks. + """ + + def __init__(self, options: Options) -> None: + self.options = options + self.python_version = options.python_version + # This can't be set in __init__ because it is executed too soon in build.py. + # Therefore, build.py *must* set it later before graph processing starts + # by calling set_modules(). + self._modules: dict[str, MypyFile] | None = None + + def set_modules(self, modules: dict[str, MypyFile]) -> None: + self._modules = modules + + def lookup_fully_qualified(self, fullname: str) -> SymbolTableNode | None: + assert self._modules is not None + return lookup_fully_qualified(fullname, self._modules) + + def report_config_data(self, ctx: ReportConfigContext) -> Any: + """Get representation of configuration data for a module. + + The data must be encodable as JSON and will be stored in the + cache metadata for the module. A mismatch between the cached + values and the returned will result in that module's cache + being invalidated and the module being rechecked. + + This can be called twice for each module, once after loading + the cache to check if it is valid and once while writing new + cache information. + + If is_check in the context is true, then the return of this + call will be checked against the cached version. Otherwise the + call is being made to determine what to put in the cache. This + can be used to allow consulting extra cache files in certain + complex situations. + + This can be used to incorporate external configuration information + that might require changes to typechecking. + """ + return None + + def get_additional_deps(self, file: MypyFile) -> list[tuple[int, str, int]]: + """Customize dependencies for a module. + + This hook allows adding in new dependencies for a module. It + is called after parsing a file but before analysis. This can + be useful if a library has dependencies that are dynamic based + on configuration information, for example. + + Returns a list of (priority, module name, line number) tuples. + + The line number can be -1 when there is not a known real line number. + + Priorities are defined in mypy.build (but maybe shouldn't be). + 10 is a good choice for priority. + """ + return [] + + def get_type_analyze_hook(self, fullname: str) -> Callable[[AnalyzeTypeContext], Type] | None: + """Customize behaviour of the type analyzer for given full names. + + This method is called during the semantic analysis pass whenever mypy sees an + unbound type. For example, while analysing this code: + + from lib import Special, Other + + var: Special + def func(x: Other[int]) -> None: + ... + + this method will be called with 'lib.Special', and then with 'lib.Other'. + The callback returned by plugin must return an analyzed type, + i.e. an instance of `mypy.types.Type`. + """ + return None + + def get_function_signature_hook( + self, fullname: str + ) -> Callable[[FunctionSigContext], FunctionLike] | None: + """Adjust the signature of a function. + + This method is called before type checking a function call. Plugin + may infer a better type for the function. + + from lib import Class, do_stuff + + do_stuff(42) + Class() + + This method will be called with 'lib.do_stuff' and then with 'lib.Class'. + """ + return None + + def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None: + """Adjust the return type of a function call. + + This method is called after type checking a call. Plugin may adjust the return + type inferred by mypy, and/or emit some error messages. Note, this hook is also + called for class instantiation calls, so that in this example: + + from lib import Class, do_stuff + + do_stuff(42) + Class() + + This method will be called with 'lib.do_stuff' and then with 'lib.Class'. + """ + return None + + def get_method_signature_hook( + self, fullname: str + ) -> Callable[[MethodSigContext], FunctionLike] | None: + """Adjust the signature of a method. + + This method is called before type checking a method call. Plugin + may infer a better type for the method. The hook is also called for special + Python dunder methods except __init__ and __new__ (use get_function_hook to customize + class instantiation). This function is called with the method full name using + the class where it was _defined_. For example, in this code: + + from lib import Special + + class Base: + def method(self, arg: Any) -> Any: + ... + class Derived(Base): + ... + + var: Derived + var.method(42) + + x: Special + y = x[0] + + this method is called with '__main__.Base.method', and then with + 'lib.Special.__getitem__'. + """ + return None + + def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None: + """Adjust return type of a method call. + + This is the same as get_function_hook(), but is called with the + method full name (again, using the class where the method is defined). + """ + return None + + def get_attribute_hook(self, fullname: str) -> Callable[[AttributeContext], Type] | None: + """Adjust type of an instance attribute. + + This method is called with attribute full name using the class of the instance where + the attribute was defined (or Var.info.fullname for generated attributes). + + For classes without __getattr__ or __getattribute__, this hook is only called for + names of fields/properties (but not methods) that exist in the instance MRO. + + For classes that implement __getattr__ or __getattribute__, this hook is called + for all fields/properties, including nonexistent ones (but still not methods). + + For example: + + class Base: + x: Any + def __getattr__(self, attr: str) -> Any: ... + + class Derived(Base): + ... + + var: Derived + var.x + var.y + + get_attribute_hook is called with '__main__.Base.x' and '__main__.Base.y'. + However, if we had not implemented __getattr__ on Base, you would only get + the callback for 'var.x'; 'var.y' would produce an error without calling the hook. + """ + return None + + def get_class_attribute_hook(self, fullname: str) -> Callable[[AttributeContext], Type] | None: + """ + Adjust type of a class attribute. + + This method is called with attribute full name using the class where the attribute was + defined (or Var.info.fullname for generated attributes). + + For example: + + class Cls: + x: Any + + Cls.x + + get_class_attribute_hook is called with '__main__.Cls.x' as fullname. + """ + return None + + def get_class_decorator_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: + """Update class definition for given class decorators. + + The plugin can modify a TypeInfo _in place_ (for example add some generated + methods to the symbol table). This hook is called after the class body was + semantically analyzed, but *there may still be placeholders* (typically + caused by forward references). + + NOTE: Usually get_class_decorator_hook_2 is the better option, since it + guarantees that there are no placeholders. + + The hook is called with full names of all class decorators. + + The hook can be called multiple times per class, so it must be + idempotent. + """ + return None + + def get_class_decorator_hook_2( + self, fullname: str + ) -> Callable[[ClassDefContext], bool] | None: + """Update class definition for given class decorators. + + Similar to get_class_decorator_hook, but this runs in a later pass when + placeholders have been resolved. + + The hook can return False if some base class hasn't been + processed yet using class hooks. It causes all class hooks + (that are run in this same pass) to be invoked another time for + the file(s) currently being processed. + + The hook can be called multiple times per class, so it must be + idempotent. + """ + return None + + def get_metaclass_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: + """Update class definition for given declared metaclasses. + + Same as get_class_decorator_hook() but for metaclasses. Note: + this hook will be only called for explicit metaclasses, not for + inherited ones. + + TODO: probably it should also be called on inherited metaclasses. + """ + return None + + def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: + """Update class definition for given base classes. + + Same as get_class_decorator_hook() but for base classes. Base classes + don't need to refer to TypeInfos, if a base class refers to a variable with + Any type, this hook will still be called. + """ + return None + + def get_customize_class_mro_hook( + self, fullname: str + ) -> Callable[[ClassDefContext], None] | None: + """Customize MRO for given classes. + + The plugin can modify the class MRO _in place_. This method is called + with the class full name before its body was semantically analyzed. + """ + return None + + def get_dynamic_class_hook( + self, fullname: str + ) -> Callable[[DynamicClassDefContext], None] | None: + """Semantically analyze a dynamic class definition. + + This plugin hook allows one to semantically analyze dynamic class definitions like: + + from lib import dynamic_class + + X = dynamic_class('X', []) + + For such definition, this hook will be called with 'lib.dynamic_class'. + The plugin should create the corresponding TypeInfo, and place it into a relevant + symbol table, e.g. using ctx.api.add_symbol_table_node(). + """ + return None + + +T = TypeVar("T") + + +class ChainedPlugin(Plugin): + """A plugin that represents a sequence of chained plugins. + + Each lookup method returns the hook for the first plugin that + reports a match. + + This class should not be subclassed -- use Plugin as the base class + for all plugins. + """ + + # TODO: Support caching of lookup results (through a LRU cache, for example). + + def __init__(self, options: Options, plugins: list[Plugin]) -> None: + """Initialize chained plugin. + + Assume that the child plugins aren't mutated (results may be cached). + """ + super().__init__(options) + self._plugins = plugins + + def set_modules(self, modules: dict[str, MypyFile]) -> None: + for plugin in self._plugins: + plugin.set_modules(modules) + + def report_config_data(self, ctx: ReportConfigContext) -> Any: + config_data = [plugin.report_config_data(ctx) for plugin in self._plugins] + return config_data if any(x is not None for x in config_data) else None + + def get_additional_deps(self, file: MypyFile) -> list[tuple[int, str, int]]: + deps = [] + for plugin in self._plugins: + deps.extend(plugin.get_additional_deps(file)) + return deps + + def get_type_analyze_hook(self, fullname: str) -> Callable[[AnalyzeTypeContext], Type] | None: + # Micro-optimization: Inline iteration over plugins + for plugin in self._plugins: + hook = plugin.get_type_analyze_hook(fullname) + if hook is not None: + return hook + return None + + def get_function_signature_hook( + self, fullname: str + ) -> Callable[[FunctionSigContext], FunctionLike] | None: + # Micro-optimization: Inline iteration over plugins + for plugin in self._plugins: + hook = plugin.get_function_signature_hook(fullname) + if hook is not None: + return hook + return None + + def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None: + return self._find_hook(lambda plugin: plugin.get_function_hook(fullname)) + + def get_method_signature_hook( + self, fullname: str + ) -> Callable[[MethodSigContext], FunctionLike] | None: + # Micro-optimization: Inline iteration over plugins + for plugin in self._plugins: + hook = plugin.get_method_signature_hook(fullname) + if hook is not None: + return hook + return None + + def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None: + # Micro-optimization: Inline iteration over plugins + for plugin in self._plugins: + hook = plugin.get_method_hook(fullname) + if hook is not None: + return hook + return None + + def get_attribute_hook(self, fullname: str) -> Callable[[AttributeContext], Type] | None: + # Micro-optimization: Inline iteration over plugins + for plugin in self._plugins: + hook = plugin.get_attribute_hook(fullname) + if hook is not None: + return hook + return None + + def get_class_attribute_hook(self, fullname: str) -> Callable[[AttributeContext], Type] | None: + return self._find_hook(lambda plugin: plugin.get_class_attribute_hook(fullname)) + + def get_class_decorator_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: + return self._find_hook(lambda plugin: plugin.get_class_decorator_hook(fullname)) + + def get_class_decorator_hook_2( + self, fullname: str + ) -> Callable[[ClassDefContext], bool] | None: + return self._find_hook(lambda plugin: plugin.get_class_decorator_hook_2(fullname)) + + def get_metaclass_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: + return self._find_hook(lambda plugin: plugin.get_metaclass_hook(fullname)) + + def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: + return self._find_hook(lambda plugin: plugin.get_base_class_hook(fullname)) + + def get_customize_class_mro_hook( + self, fullname: str + ) -> Callable[[ClassDefContext], None] | None: + return self._find_hook(lambda plugin: plugin.get_customize_class_mro_hook(fullname)) + + def get_dynamic_class_hook( + self, fullname: str + ) -> Callable[[DynamicClassDefContext], None] | None: + return self._find_hook(lambda plugin: plugin.get_dynamic_class_hook(fullname)) + + def _find_hook(self, lookup: Callable[[Plugin], T]) -> T | None: + for plugin in self._plugins: + hook = lookup(plugin) + if hook is not None: + return hook + return None diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/__init__.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/plugins/__init__.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..d9dc61f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/plugins/__init__.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/__init__.py b/.venv/lib/python3.12/site-packages/mypy/plugins/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/attrs.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/plugins/attrs.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..69acc33 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/plugins/attrs.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/attrs.py b/.venv/lib/python3.12/site-packages/mypy/plugins/attrs.py new file mode 100644 index 0000000..47c6ad9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/plugins/attrs.py @@ -0,0 +1,1183 @@ +"""Plugin for supporting the attrs library (http://www.attrs.org)""" + +from __future__ import annotations + +from collections import defaultdict +from collections.abc import Iterable, Mapping +from functools import reduce +from typing import Final, Literal, cast + +import mypy.plugin # To avoid circular imports. +from mypy.applytype import apply_generic_arguments +from mypy.errorcodes import LITERAL_REQ +from mypy.expandtype import expand_type, expand_type_by_instance +from mypy.exprtotype import TypeTranslationError, expr_to_unanalyzed_type +from mypy.meet import meet_types +from mypy.messages import format_type_bare +from mypy.nodes import ( + ARG_NAMED, + ARG_NAMED_OPT, + ARG_OPT, + ARG_POS, + MDEF, + Argument, + AssignmentStmt, + CallExpr, + Context, + Decorator, + Expression, + FuncDef, + IndexExpr, + JsonDict, + LambdaExpr, + ListExpr, + MemberExpr, + NameExpr, + OverloadedFuncDef, + PlaceholderNode, + RefExpr, + SymbolTableNode, + TempNode, + TupleExpr, + TypeApplication, + TypeInfo, + TypeVarExpr, + Var, + is_class_var, +) +from mypy.plugin import SemanticAnalyzerPluginInterface +from mypy.plugins.common import ( + _get_argument, + _get_bool_argument, + _get_decorator_bool_argument, + add_attribute_to_class, + add_method_to_class, + deserialize_and_fixup_type, +) +from mypy.server.trigger import make_wildcard_trigger +from mypy.state import state +from mypy.typeops import ( + get_type_vars, + make_simplified_union, + map_type_from_supertype, + type_object_type, +) +from mypy.types import ( + AnyType, + CallableType, + FunctionLike, + Instance, + LiteralType, + NoneType, + Overloaded, + ProperType, + TupleType, + Type, + TypeOfAny, + TypeType, + TypeVarId, + TypeVarType, + UninhabitedType, + UnionType, + get_proper_type, +) +from mypy.typevars import fill_typevars +from mypy.util import unmangle + +# The names of the different functions that create classes or arguments. +attr_class_makers: Final = {"attr.s", "attr.attrs", "attr.attributes"} +attr_dataclass_makers: Final = {"attr.dataclass"} +attr_frozen_makers: Final = {"attr.frozen", "attrs.frozen"} +attr_define_makers: Final = {"attr.define", "attr.mutable", "attrs.define", "attrs.mutable"} +attr_attrib_makers: Final = {"attr.ib", "attr.attrib", "attr.attr", "attr.field", "attrs.field"} +attr_optional_converters: Final = {"attr.converters.optional", "attrs.converters.optional"} + +SELF_TVAR_NAME: Final = "_AT" +MAGIC_ATTR_NAME: Final = "__attrs_attrs__" +MAGIC_ATTR_CLS_NAME_TEMPLATE: Final = "__{}_AttrsAttributes__" # The tuple subclass pattern. +ATTRS_INIT_NAME: Final = "__attrs_init__" + + +class Converter: + """Holds information about a `converter=` argument""" + + def __init__(self, init_type: Type | None = None, ret_type: Type | None = None) -> None: + self.init_type = init_type + self.ret_type = ret_type + + +class Attribute: + """The value of an attr.ib() call.""" + + def __init__( + self, + name: str, + alias: str | None, + info: TypeInfo, + has_default: bool, + init: bool, + kw_only: bool, + converter: Converter | None, + context: Context, + init_type: Type | None, + ) -> None: + self.name = name + self.alias = alias + self.info = info + self.has_default = has_default + self.init = init + self.kw_only = kw_only + self.converter = converter + self.context = context + self.init_type = init_type + + def argument(self, ctx: mypy.plugin.ClassDefContext) -> Argument: + """Return this attribute as an argument to __init__.""" + assert self.init + init_type: Type | None = None + if self.converter: + if self.converter.init_type: + init_type = self.converter.init_type + if init_type and self.init_type and self.converter.ret_type: + # The converter return type should be the same type as the attribute type. + # Copy type vars from attr type to converter. + converter_vars = get_type_vars(self.converter.ret_type) + init_vars = get_type_vars(self.init_type) + if converter_vars and len(converter_vars) == len(init_vars): + variables = { + binder.id: arg for binder, arg in zip(converter_vars, init_vars) + } + init_type = expand_type(init_type, variables) + else: + ctx.api.fail("Cannot determine __init__ type from converter", self.context) + init_type = AnyType(TypeOfAny.from_error) + else: # There is no converter, the init type is the normal type. + init_type = self.init_type or self.info[self.name].type + + unannotated = False + if init_type is None: + unannotated = True + # Convert type not set to Any. + init_type = AnyType(TypeOfAny.unannotated) + else: + proper_type = get_proper_type(init_type) + if isinstance(proper_type, AnyType): + if proper_type.type_of_any == TypeOfAny.unannotated: + unannotated = True + + if unannotated and ctx.api.options.disallow_untyped_defs: + # This is a compromise. If you don't have a type here then the + # __init__ will be untyped. But since the __init__ is added it's + # pointing at the decorator. So instead we also show the error in the + # assignment, which is where you would fix the issue. + node = self.info[self.name].node + assert node is not None + ctx.api.msg.need_annotation_for_var(node, self.context) + + if self.kw_only: + arg_kind = ARG_NAMED_OPT if self.has_default else ARG_NAMED + else: + arg_kind = ARG_OPT if self.has_default else ARG_POS + + # Attrs removes leading underscores when creating the __init__ arguments. + name = self.alias or self.name.lstrip("_") + return Argument(Var(name, init_type), init_type, None, arg_kind) + + def serialize(self) -> JsonDict: + """Serialize this object so it can be saved and restored.""" + return { + "name": self.name, + "alias": self.alias, + "has_default": self.has_default, + "init": self.init, + "kw_only": self.kw_only, + "has_converter": self.converter is not None, + "converter_init_type": ( + self.converter.init_type.serialize() + if self.converter and self.converter.init_type + else None + ), + "context_line": self.context.line, + "context_column": self.context.column, + "init_type": self.init_type.serialize() if self.init_type else None, + } + + @classmethod + def deserialize( + cls, info: TypeInfo, data: JsonDict, api: SemanticAnalyzerPluginInterface + ) -> Attribute: + """Return the Attribute that was serialized.""" + raw_init_type = data["init_type"] + init_type = deserialize_and_fixup_type(raw_init_type, api) if raw_init_type else None + raw_converter_init_type = data["converter_init_type"] + converter_init_type = ( + deserialize_and_fixup_type(raw_converter_init_type, api) + if raw_converter_init_type + else None + ) + + return Attribute( + data["name"], + data["alias"], + info, + data["has_default"], + data["init"], + data["kw_only"], + Converter(converter_init_type) if data["has_converter"] else None, + Context(line=data["context_line"], column=data["context_column"]), + init_type, + ) + + def expand_typevar_from_subtype(self, sub_type: TypeInfo) -> None: + """Expands type vars in the context of a subtype when an attribute is inherited + from a generic super type.""" + if self.init_type: + self.init_type = map_type_from_supertype(self.init_type, sub_type, self.info) + else: + self.init_type = None + + +def _determine_eq_order(ctx: mypy.plugin.ClassDefContext) -> bool: + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + value of order. + """ + cmp = _get_decorator_optional_bool_argument(ctx, "cmp") + eq = _get_decorator_optional_bool_argument(ctx, "eq") + order = _get_decorator_optional_bool_argument(ctx, "order") + + if cmp is not None and any((eq is not None, order is not None)): + ctx.api.fail('Don\'t mix "cmp" with "eq" and "order"', ctx.reason) + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + return cmp + + # If left None, equality is on and ordering mirrors equality. + if eq is None: + eq = True + + if order is None: + order = eq + + if eq is False and order is True: + ctx.api.fail("eq must be True if order is True", ctx.reason) + + return order + + +def _get_decorator_optional_bool_argument( + ctx: mypy.plugin.ClassDefContext, name: str, default: bool | None = None +) -> bool | None: + """Return the Optional[bool] argument for the decorator. + + This handles both @decorator(...) and @decorator. + """ + if isinstance(ctx.reason, CallExpr): + attr_value = _get_argument(ctx.reason, name) + if attr_value: + if isinstance(attr_value, NameExpr): + if attr_value.fullname == "builtins.True": + return True + if attr_value.fullname == "builtins.False": + return False + if attr_value.fullname == "builtins.None": + return None + ctx.api.fail( + f'"{name}" argument must be a True, False, or None literal', + ctx.reason, + code=LITERAL_REQ, + ) + return default + return default + else: + return default + + +def attr_tag_callback(ctx: mypy.plugin.ClassDefContext) -> None: + """Record that we have an attrs class in the main semantic analysis pass. + + The later pass implemented by attr_class_maker_callback will use this + to detect attrs classes in base classes. + """ + # The value is ignored, only the existence matters. + ctx.cls.info.metadata["attrs_tag"] = {} + + +def attr_class_maker_callback( + ctx: mypy.plugin.ClassDefContext, + auto_attribs_default: bool | None = False, + frozen_default: bool = False, + slots_default: bool = False, +) -> bool: + """Add necessary dunder methods to classes decorated with attr.s. + + attrs is a package that lets you define classes without writing dull boilerplate code. + + At a quick glance, the decorator searches the class body for assignments of `attr.ib`s (or + annotated variables if auto_attribs=True), then depending on how the decorator is called, + it will add an __init__ or all the compare methods. + For frozen=True it will turn the attrs into properties. + + Hashability will be set according to https://www.attrs.org/en/stable/hashing.html. + + See https://www.attrs.org/en/stable/how-does-it-work.html for information on how attrs works. + + If this returns False, some required metadata was not ready yet, and we need another + pass. + """ + with state.strict_optional_set(ctx.api.options.strict_optional): + # This hook is called during semantic analysis, but it uses a bunch of + # type-checking ops, so it needs the strict optional set properly. + return attr_class_maker_callback_impl( + ctx, auto_attribs_default, frozen_default, slots_default + ) + + +def attr_class_maker_callback_impl( + ctx: mypy.plugin.ClassDefContext, + auto_attribs_default: bool | None, + frozen_default: bool, + slots_default: bool, +) -> bool: + info = ctx.cls.info + + init = _get_decorator_bool_argument(ctx, "init", True) + frozen = _get_frozen(ctx, frozen_default) + order = _determine_eq_order(ctx) + slots = _get_decorator_bool_argument(ctx, "slots", slots_default) + + auto_attribs = _get_decorator_optional_bool_argument(ctx, "auto_attribs", auto_attribs_default) + kw_only = _get_decorator_bool_argument(ctx, "kw_only", False) + match_args = _get_decorator_bool_argument(ctx, "match_args", True) + + for super_info in ctx.cls.info.mro[1:-1]: + if "attrs_tag" in super_info.metadata and "attrs" not in super_info.metadata: + # Super class is not ready yet. Request another pass. + return False + + attributes = _analyze_class(ctx, auto_attribs, kw_only) + + # Check if attribute types are ready. + for attr in attributes: + node = info.get(attr.name) + if node is None: + # This name is likely blocked by some semantic analysis error that + # should have been reported already. + _add_empty_metadata(info) + return True + + _add_attrs_magic_attribute(ctx, [(attr.name, info[attr.name].type) for attr in attributes]) + if slots: + _add_slots(ctx, attributes) + if match_args and ctx.api.options.python_version[:2] >= (3, 10): + # `.__match_args__` is only added for python3.10+, but the argument + # exists for earlier versions as well. + _add_match_args(ctx, attributes) + + # Save the attributes so that subclasses can reuse them. + ctx.cls.info.metadata["attrs"] = { + "attributes": [attr.serialize() for attr in attributes], + "frozen": frozen, + } + + adder = MethodAdder(ctx) + # If __init__ is not being generated, attrs still generates it as __attrs_init__ instead. + _add_init(ctx, attributes, adder, "__init__" if init else ATTRS_INIT_NAME) + + if order: + _add_order(ctx, adder) + if frozen: + _make_frozen(ctx, attributes) + # Frozen classes are hashable by default, even if inheriting from non-frozen ones. + hashable: bool | None = _get_decorator_bool_argument( + ctx, "hash", True + ) and _get_decorator_bool_argument(ctx, "unsafe_hash", True) + else: + hashable = _get_decorator_optional_bool_argument(ctx, "unsafe_hash") + if hashable is None: # unspecified + hashable = _get_decorator_optional_bool_argument(ctx, "hash") + + eq = _get_decorator_optional_bool_argument(ctx, "eq") + has_own_hash = "__hash__" in ctx.cls.info.names + + if has_own_hash or (hashable is None and eq is False): + pass # Do nothing. + elif hashable: + # We copy the `__hash__` signature from `object` to make them hashable. + ctx.cls.info.names["__hash__"] = ctx.cls.info.mro[-1].names["__hash__"] + else: + _remove_hashability(ctx) + + return True + + +def _get_frozen(ctx: mypy.plugin.ClassDefContext, frozen_default: bool) -> bool: + """Return whether this class is frozen.""" + if _get_decorator_bool_argument(ctx, "frozen", frozen_default): + return True + # Subclasses of frozen classes are frozen so check that. + for super_info in ctx.cls.info.mro[1:-1]: + if "attrs" in super_info.metadata and super_info.metadata["attrs"]["frozen"]: + return True + return False + + +def _analyze_class( + ctx: mypy.plugin.ClassDefContext, auto_attribs: bool | None, kw_only: bool +) -> list[Attribute]: + """Analyze the class body of an attr maker, its parents, and return the Attributes found. + + auto_attribs=True means we'll generate attributes from type annotations also. + auto_attribs=None means we'll detect which mode to use. + kw_only=True means that all attributes created here will be keyword only args in __init__. + """ + own_attrs: dict[str, Attribute] = {} + if auto_attribs is None: + auto_attribs = _detect_auto_attribs(ctx) + + # Walk the body looking for assignments and decorators. + for stmt in ctx.cls.defs.body: + if isinstance(stmt, AssignmentStmt): + for attr in _attributes_from_assignment(ctx, stmt, auto_attribs, kw_only): + # When attrs are defined twice in the same body we want to use the 2nd definition + # in the 2nd location. So remove it from the OrderedDict. + # Unless it's auto_attribs in which case we want the 2nd definition in the + # 1st location. + if not auto_attribs and attr.name in own_attrs: + del own_attrs[attr.name] + own_attrs[attr.name] = attr + elif isinstance(stmt, Decorator): + _cleanup_decorator(stmt, own_attrs) + + for attribute in own_attrs.values(): + # Even though these look like class level assignments we want them to look like + # instance level assignments. + if attribute.name in ctx.cls.info.names: + node = ctx.cls.info.names[attribute.name].node + if isinstance(node, PlaceholderNode): + # This node is not ready yet. + continue + assert isinstance(node, Var), node + node.is_initialized_in_class = False + + # Traverse the MRO and collect attributes from the parents. + taken_attr_names = set(own_attrs) + super_attrs = [] + for super_info in ctx.cls.info.mro[1:-1]: + if "attrs" in super_info.metadata: + # Each class depends on the set of attributes in its attrs ancestors. + ctx.api.add_plugin_dependency(make_wildcard_trigger(super_info.fullname)) + + for data in super_info.metadata["attrs"]["attributes"]: + # Only add an attribute if it hasn't been defined before. This + # allows for overwriting attribute definitions by subclassing. + if data["name"] not in taken_attr_names: + a = Attribute.deserialize(super_info, data, ctx.api) + a.expand_typevar_from_subtype(ctx.cls.info) + super_attrs.append(a) + taken_attr_names.add(a.name) + attributes = super_attrs + list(own_attrs.values()) + + # Check the init args for correct default-ness. Note: This has to be done after all the + # attributes for all classes have been read, because subclasses can override parents. + last_default = False + + for i, attribute in enumerate(attributes): + if not attribute.init: + continue + + if attribute.kw_only: + # Keyword-only attributes don't care whether they are default or not. + continue + + # If the issue comes from merging different classes, report it + # at the class definition point. + context = attribute.context if i >= len(super_attrs) else ctx.cls + + if not attribute.has_default and last_default: + ctx.api.fail("Non-default attributes not allowed after default attributes.", context) + last_default |= attribute.has_default + + return attributes + + +def _add_empty_metadata(info: TypeInfo) -> None: + """Add empty metadata to mark that we've finished processing this class.""" + info.metadata["attrs"] = {"attributes": [], "frozen": False} + + +def _detect_auto_attribs(ctx: mypy.plugin.ClassDefContext) -> bool: + """Return whether auto_attribs should be enabled or disabled. + + It's disabled if there are any unannotated attribs() + """ + for stmt in ctx.cls.defs.body: + if isinstance(stmt, AssignmentStmt): + for lvalue in stmt.lvalues: + lvalues, rvalues = _parse_assignments(lvalue, stmt) + + if len(lvalues) != len(rvalues): + # This means we have some assignment that isn't 1 to 1. + # It can't be an attrib. + continue + + for lhs, rvalue in zip(lvalues, rvalues): + # Check if the right hand side is a call to an attribute maker. + if ( + isinstance(rvalue, CallExpr) + and isinstance(rvalue.callee, RefExpr) + and rvalue.callee.fullname in attr_attrib_makers + and not stmt.new_syntax + ): + # This means we have an attrib without an annotation and so + # we can't do auto_attribs=True + return False + return True + + +def _attributes_from_assignment( + ctx: mypy.plugin.ClassDefContext, stmt: AssignmentStmt, auto_attribs: bool, kw_only: bool +) -> Iterable[Attribute]: + """Return Attribute objects that are created by this assignment. + + The assignments can look like this: + x = attr.ib() + x = y = attr.ib() + x, y = attr.ib(), attr.ib() + or if auto_attribs is enabled also like this: + x: type + x: type = default_value + x: type = attr.ib(...) + """ + for lvalue in stmt.lvalues: + lvalues, rvalues = _parse_assignments(lvalue, stmt) + + if len(lvalues) != len(rvalues): + # This means we have some assignment that isn't 1 to 1. + # It can't be an attrib. + continue + + for lhs, rvalue in zip(lvalues, rvalues): + # Check if the right hand side is a call to an attribute maker. + if ( + isinstance(rvalue, CallExpr) + and isinstance(rvalue.callee, RefExpr) + and rvalue.callee.fullname in attr_attrib_makers + ): + attr = _attribute_from_attrib_maker(ctx, auto_attribs, kw_only, lhs, rvalue, stmt) + if attr: + yield attr + elif auto_attribs and stmt.type and stmt.new_syntax and not is_class_var(lhs): + yield _attribute_from_auto_attrib(ctx, kw_only, lhs, rvalue, stmt) + + +def _cleanup_decorator(stmt: Decorator, attr_map: dict[str, Attribute]) -> None: + """Handle decorators in class bodies. + + `x.default` will set a default value on x + `x.validator` and `x.default` will get removed to avoid throwing a type error. + """ + remove_me = [] + for func_decorator in stmt.decorators: + if ( + isinstance(func_decorator, MemberExpr) + and isinstance(func_decorator.expr, NameExpr) + and func_decorator.expr.name in attr_map + ): + if func_decorator.name == "default": + attr_map[func_decorator.expr.name].has_default = True + + if func_decorator.name in ("default", "validator"): + # These are decorators on the attrib object that only exist during + # class creation time. In order to not trigger a type error later we + # just remove them. This might leave us with a Decorator with no + # decorators (Emperor's new clothes?) + # TODO: It would be nice to type-check these rather than remove them. + # default should be Callable[[], T] + # validator should be Callable[[Any, 'Attribute', T], Any] + # where T is the type of the attribute. + remove_me.append(func_decorator) + for dec in remove_me: + stmt.decorators.remove(dec) + + +def _attribute_from_auto_attrib( + ctx: mypy.plugin.ClassDefContext, + kw_only: bool, + lhs: NameExpr, + rvalue: Expression, + stmt: AssignmentStmt, +) -> Attribute: + """Return an Attribute for a new type assignment.""" + name = unmangle(lhs.name) + # `x: int` (without equal sign) assigns rvalue to TempNode(AnyType()) + has_rhs = not isinstance(rvalue, TempNode) + sym = ctx.cls.info.names.get(name) + init_type = sym.type if sym else None + return Attribute(name, None, ctx.cls.info, has_rhs, True, kw_only, None, stmt, init_type) + + +def _attribute_from_attrib_maker( + ctx: mypy.plugin.ClassDefContext, + auto_attribs: bool, + kw_only: bool, + lhs: NameExpr, + rvalue: CallExpr, + stmt: AssignmentStmt, +) -> Attribute | None: + """Return an Attribute from the assignment or None if you can't make one.""" + if auto_attribs and not stmt.new_syntax: + # auto_attribs requires an annotation on *every* attr.ib. + assert lhs.node is not None + ctx.api.msg.need_annotation_for_var(lhs.node, stmt) + return None + + if len(stmt.lvalues) > 1: + ctx.api.fail("Too many names for one attribute", stmt) + return None + + # This is the type that belongs in the __init__ method for this attrib. + init_type = stmt.type + + # Read all the arguments from the call. + init = _get_bool_argument(ctx, rvalue, "init", True) + # Note: If the class decorator says kw_only=True the attribute is ignored. + # See https://github.com/python-attrs/attrs/issues/481 for explanation. + kw_only |= _get_bool_argument(ctx, rvalue, "kw_only", False) + + # TODO: Check for attr.NOTHING + attr_has_default = bool(_get_argument(rvalue, "default")) + attr_has_factory = bool(_get_argument(rvalue, "factory")) + + if attr_has_default and attr_has_factory: + ctx.api.fail('Can\'t pass both "default" and "factory".', rvalue) + elif attr_has_factory: + attr_has_default = True + + # If the type isn't set through annotation but is passed through `type=` use that. + type_arg = _get_argument(rvalue, "type") + if type_arg and not init_type: + try: + un_type = expr_to_unanalyzed_type(type_arg, ctx.api.options, ctx.api.is_stub_file) + except TypeTranslationError: + ctx.api.fail("Invalid argument to type", type_arg) + else: + init_type = ctx.api.anal_type(un_type) + if init_type and isinstance(lhs.node, Var) and not lhs.node.type: + # If there is no annotation, add one. + lhs.node.type = init_type + lhs.is_inferred_def = False + + # Note: convert is deprecated but works the same as converter. + converter = _get_argument(rvalue, "converter") + convert = _get_argument(rvalue, "convert") + if convert and converter: + ctx.api.fail('Can\'t pass both "convert" and "converter".', rvalue) + elif convert: + ctx.api.fail("convert is deprecated, use converter", rvalue) + converter = convert + converter_info = _parse_converter(ctx, converter) + + # Custom alias might be defined: + alias = None + alias_expr = _get_argument(rvalue, "alias") + if alias_expr: + alias = ctx.api.parse_str_literal(alias_expr) + if alias is None: + ctx.api.fail( + '"alias" argument to attrs field must be a string literal', + rvalue, + code=LITERAL_REQ, + ) + name = unmangle(lhs.name) + return Attribute( + name, alias, ctx.cls.info, attr_has_default, init, kw_only, converter_info, stmt, init_type + ) + + +def _parse_converter( + ctx: mypy.plugin.ClassDefContext, converter_expr: Expression | None +) -> Converter | None: + """Return the Converter object from an Expression.""" + # TODO: Support complex converters, e.g. lambdas, calls, etc. + if not converter_expr: + return None + converter_info = Converter() + if ( + isinstance(converter_expr, CallExpr) + and isinstance(converter_expr.callee, RefExpr) + and converter_expr.callee.fullname in attr_optional_converters + and converter_expr.args + and converter_expr.args[0] + ): + # Special handling for attr.converters.optional(type) + # We extract the type and add make the init_args Optional in Attribute.argument + converter_expr = converter_expr.args[0] + is_attr_converters_optional = True + else: + is_attr_converters_optional = False + + converter_type: Type | None = None + if isinstance(converter_expr, RefExpr) and converter_expr.node: + if isinstance(converter_expr.node, FuncDef): + if converter_expr.node.type and isinstance(converter_expr.node.type, FunctionLike): + converter_type = converter_expr.node.type + else: # The converter is an unannotated function. + converter_info.init_type = AnyType(TypeOfAny.unannotated) + return converter_info + elif isinstance(converter_expr.node, OverloadedFuncDef) and is_valid_overloaded_converter( + converter_expr.node + ): + converter_type = converter_expr.node.type + elif isinstance(converter_expr.node, TypeInfo): + converter_type = type_object_type(converter_expr.node, ctx.api.named_type) + elif ( + isinstance(converter_expr, IndexExpr) + and isinstance(converter_expr.analyzed, TypeApplication) + and isinstance(converter_expr.base, RefExpr) + and isinstance(converter_expr.base.node, TypeInfo) + ): + # The converter is a generic type. + converter_type = type_object_type(converter_expr.base.node, ctx.api.named_type) + if isinstance(converter_type, CallableType): + converter_type = apply_generic_arguments( + converter_type, + converter_expr.analyzed.types, + ctx.api.msg.incompatible_typevar_value, + converter_type, + ) + else: + converter_type = None + + if isinstance(converter_expr, LambdaExpr): + # TODO: should we send a fail if converter_expr.min_args > 1? + converter_info.init_type = AnyType(TypeOfAny.unannotated) + return converter_info + + if not converter_type: + # Signal that we have an unsupported converter. + ctx.api.fail( + "Unsupported converter, only named functions, types and lambdas are currently " + "supported", + converter_expr, + ) + converter_info.init_type = AnyType(TypeOfAny.from_error) + return converter_info + + converter_type = get_proper_type(converter_type) + if isinstance(converter_type, CallableType) and converter_type.arg_types: + converter_info.init_type = converter_type.arg_types[0] + if not is_attr_converters_optional: + converter_info.ret_type = converter_type.ret_type + elif isinstance(converter_type, Overloaded): + types: list[Type] = [] + for item in converter_type.items: + # Walk the overloads looking for methods that can accept one argument. + num_arg_types = len(item.arg_types) + if not num_arg_types: + continue + if num_arg_types > 1 and any(kind == ARG_POS for kind in item.arg_kinds[1:]): + continue + types.append(item.arg_types[0]) + # Make a union of all the valid types. + if types: + converter_info.init_type = make_simplified_union(types) + + if is_attr_converters_optional and converter_info.init_type: + # If the converter was attr.converter.optional(type) then add None to + # the allowed init_type. + converter_info.init_type = UnionType.make_union([converter_info.init_type, NoneType()]) + + return converter_info + + +def is_valid_overloaded_converter(defn: OverloadedFuncDef) -> bool: + return all( + (not isinstance(item, Decorator) or isinstance(item.func.type, FunctionLike)) + for item in defn.items + ) + + +def _parse_assignments( + lvalue: Expression, stmt: AssignmentStmt +) -> tuple[list[NameExpr], list[Expression]]: + """Convert a possibly complex assignment expression into lists of lvalues and rvalues.""" + lvalues: list[NameExpr] = [] + rvalues: list[Expression] = [] + if isinstance(lvalue, (TupleExpr, ListExpr)): + if all(isinstance(item, NameExpr) for item in lvalue.items): + lvalues = cast(list[NameExpr], lvalue.items) + if isinstance(stmt.rvalue, (TupleExpr, ListExpr)): + rvalues = stmt.rvalue.items + elif isinstance(lvalue, NameExpr): + lvalues = [lvalue] + rvalues = [stmt.rvalue] + return lvalues, rvalues + + +def _add_order(ctx: mypy.plugin.ClassDefContext, adder: MethodAdder) -> None: + """Generate all the ordering methods for this class.""" + bool_type = ctx.api.named_type("builtins.bool") + object_type = ctx.api.named_type("builtins.object") + # Make the types be: + # AT = TypeVar('AT') + # def __lt__(self: AT, other: AT) -> bool + # This way comparisons with subclasses will work correctly. + fullname = f"{ctx.cls.info.fullname}.{SELF_TVAR_NAME}" + tvd = TypeVarType( + SELF_TVAR_NAME, + fullname, + # Namespace is patched per-method below. + id=TypeVarId(-1, namespace=""), + values=[], + upper_bound=object_type, + default=AnyType(TypeOfAny.from_omitted_generics), + ) + self_tvar_expr = TypeVarExpr( + SELF_TVAR_NAME, fullname, [], object_type, AnyType(TypeOfAny.from_omitted_generics) + ) + ctx.cls.info.names[SELF_TVAR_NAME] = SymbolTableNode(MDEF, self_tvar_expr) + + for method in ["__lt__", "__le__", "__gt__", "__ge__"]: + namespace = f"{ctx.cls.info.fullname}.{method}" + tvd = tvd.copy_modified(id=TypeVarId(tvd.id.raw_id, namespace=namespace)) + args = [Argument(Var("other", tvd), tvd, None, ARG_POS)] + adder.add_method(method, args, bool_type, self_type=tvd, tvd=tvd) + + +def _make_frozen(ctx: mypy.plugin.ClassDefContext, attributes: list[Attribute]) -> None: + """Turn all the attributes into properties to simulate frozen classes.""" + for attribute in attributes: + if attribute.name in ctx.cls.info.names: + # This variable belongs to this class so we can modify it. + node = ctx.cls.info.names[attribute.name].node + if not isinstance(node, Var): + # The superclass attribute was overridden with a non-variable. + # No need to do anything here, override will be verified during + # type checking. + continue + node.is_property = True + else: + # This variable belongs to a super class so create new Var so we + # can modify it. + var = Var(attribute.name, attribute.init_type) + var.info = ctx.cls.info + var._fullname = f"{ctx.cls.info.fullname}.{var.name}" + ctx.cls.info.names[var.name] = SymbolTableNode(MDEF, var) + var.is_property = True + + +def _add_init( + ctx: mypy.plugin.ClassDefContext, + attributes: list[Attribute], + adder: MethodAdder, + method_name: Literal["__init__", "__attrs_init__"], +) -> None: + """Generate an __init__ method for the attributes and add it to the class.""" + # Convert attributes to arguments with kw_only arguments at the end of + # the argument list + pos_args = [] + kw_only_args = [] + sym_table = ctx.cls.info.names + for attribute in attributes: + if not attribute.init: + continue + if attribute.kw_only: + kw_only_args.append(attribute.argument(ctx)) + else: + pos_args.append(attribute.argument(ctx)) + + # If the attribute is Final, present in `__init__` and has + # no default, make sure it doesn't error later. + if not attribute.has_default and attribute.name in sym_table: + sym_node = sym_table[attribute.name].node + if isinstance(sym_node, Var) and sym_node.is_final: + sym_node.final_set_in_init = True + args = pos_args + kw_only_args + if all( + # We use getattr rather than instance checks because the variable.type + # might be wrapped into a Union or some other type, but even non-Any + # types reliably track the fact that the argument was not annotated. + getattr(arg.variable.type, "type_of_any", None) == TypeOfAny.unannotated + for arg in args + ): + # This workaround makes --disallow-incomplete-defs usable with attrs, + # but is definitely suboptimal as a long-term solution. + # See https://github.com/python/mypy/issues/5954 for discussion. + for a in args: + a.variable.type = AnyType(TypeOfAny.implementation_artifact) + a.type_annotation = AnyType(TypeOfAny.implementation_artifact) + adder.add_method(method_name, args, NoneType()) + + +def _add_attrs_magic_attribute( + ctx: mypy.plugin.ClassDefContext, attrs: list[tuple[str, Type | None]] +) -> None: + any_type = AnyType(TypeOfAny.explicit) + attributes_types: list[Type] = [ + ctx.api.named_type_or_none("attr.Attribute", [attr_type or any_type]) or any_type + for _, attr_type in attrs + ] + fallback_type = ctx.api.named_type( + "builtins.tuple", [ctx.api.named_type_or_none("attr.Attribute", [any_type]) or any_type] + ) + + attr_name = MAGIC_ATTR_CLS_NAME_TEMPLATE.format(ctx.cls.fullname.replace(".", "_")) + ti = ctx.api.basic_new_typeinfo(attr_name, fallback_type, 0) + for (name, _), attr_type in zip(attrs, attributes_types): + var = Var(name, attr_type) + var._fullname = name + var.is_property = True + proper_type = get_proper_type(attr_type) + if isinstance(proper_type, Instance): + var.info = proper_type.type + ti.names[name] = SymbolTableNode(MDEF, var, plugin_generated=True) + attributes_type = Instance(ti, []) + + # We need to stash the type of the magic attribute so it can be + # loaded on cached runs. + ctx.cls.info.names[attr_name] = SymbolTableNode(MDEF, ti, plugin_generated=True) + + add_attribute_to_class( + ctx.api, + ctx.cls, + MAGIC_ATTR_NAME, + TupleType(attributes_types, fallback=attributes_type), + fullname=f"{ctx.cls.fullname}.{MAGIC_ATTR_NAME}", + override_allow_incompatible=True, + is_classvar=True, + ) + + +def _add_slots(ctx: mypy.plugin.ClassDefContext, attributes: list[Attribute]) -> None: + if any(p.slots is None for p in ctx.cls.info.mro[1:-1]): + # At least one type in mro (excluding `self` and `object`) + # does not have concrete `__slots__` defined. Ignoring. + return + + # Unlike `@dataclasses.dataclass`, `__slots__` is rewritten here. + ctx.cls.info.slots = {attr.name for attr in attributes} + + # Also, inject `__slots__` attribute to class namespace: + slots_type = TupleType( + [ctx.api.named_type("builtins.str") for _ in attributes], + fallback=ctx.api.named_type("builtins.tuple"), + ) + add_attribute_to_class(api=ctx.api, cls=ctx.cls, name="__slots__", typ=slots_type) + + +def _add_match_args(ctx: mypy.plugin.ClassDefContext, attributes: list[Attribute]) -> None: + if ( + "__match_args__" not in ctx.cls.info.names + or ctx.cls.info.names["__match_args__"].plugin_generated + ): + str_type = ctx.api.named_type("builtins.str") + match_args = TupleType( + [ + str_type.copy_modified(last_known_value=LiteralType(attr.name, fallback=str_type)) + for attr in attributes + if not attr.kw_only and attr.init + ], + fallback=ctx.api.named_type("builtins.tuple"), + ) + add_attribute_to_class(api=ctx.api, cls=ctx.cls, name="__match_args__", typ=match_args) + + +def _remove_hashability(ctx: mypy.plugin.ClassDefContext) -> None: + """Remove hashability from a class.""" + add_attribute_to_class( + ctx.api, ctx.cls, "__hash__", NoneType(), is_classvar=True, overwrite_existing=True + ) + + +class MethodAdder: + """Helper to add methods to a TypeInfo. + + ctx: The ClassDefCtx we are using on which we will add methods. + """ + + # TODO: Combine this with the code build_namedtuple_typeinfo to support both. + + def __init__(self, ctx: mypy.plugin.ClassDefContext) -> None: + self.ctx = ctx + self.self_type = fill_typevars(ctx.cls.info) + + def add_method( + self, + method_name: str, + args: list[Argument], + ret_type: Type, + self_type: Type | None = None, + tvd: TypeVarType | None = None, + ) -> None: + """Add a method: def (self, ) -> ): ... to info. + + self_type: The type to use for the self argument or None to use the inferred self type. + tvd: If the method is generic these should be the type variables. + """ + self_type = self_type if self_type is not None else self.self_type + add_method_to_class( + self.ctx.api, self.ctx.cls, method_name, args, ret_type, self_type, tvd + ) + + +def _get_attrs_init_type(typ: Instance) -> CallableType | None: + """ + If `typ` refers to an attrs class, get the type of its initializer method. + """ + magic_attr = typ.type.get(MAGIC_ATTR_NAME) + if magic_attr is None or not magic_attr.plugin_generated: + return None + init_method = typ.type.get_method("__init__") or typ.type.get_method(ATTRS_INIT_NAME) + if not isinstance(init_method, FuncDef) or not isinstance(init_method.type, CallableType): + return None + return init_method.type + + +def _fail_not_attrs_class(ctx: mypy.plugin.FunctionSigContext, t: Type, parent_t: Type) -> None: + t_name = format_type_bare(t, ctx.api.options) + if parent_t is t: + msg = ( + f'Argument 1 to "evolve" has a variable type "{t_name}" not bound to an attrs class' + if isinstance(t, TypeVarType) + else f'Argument 1 to "evolve" has incompatible type "{t_name}"; expected an attrs class' + ) + else: + pt_name = format_type_bare(parent_t, ctx.api.options) + msg = ( + f'Argument 1 to "evolve" has type "{pt_name}" whose item "{t_name}" is not bound to an attrs class' + if isinstance(t, TypeVarType) + else f'Argument 1 to "evolve" has incompatible type "{pt_name}" whose item "{t_name}" is not an attrs class' + ) + + ctx.api.fail(msg, ctx.context) + + +def _get_expanded_attr_types( + ctx: mypy.plugin.FunctionSigContext, + typ: ProperType, + display_typ: ProperType, + parent_typ: ProperType, +) -> list[Mapping[str, Type]] | None: + """ + For a given type, determine what attrs classes it can be: for each class, return the field types. + For generic classes, the field types are expanded. + If the type contains Any or a non-attrs type, returns None; in the latter case, also reports an error. + """ + if isinstance(typ, AnyType): + return None + elif isinstance(typ, UnionType): + ret: list[Mapping[str, Type]] | None = [] + for item in typ.relevant_items(): + item = get_proper_type(item) + item_types = _get_expanded_attr_types(ctx, item, item, parent_typ) + if ret is not None and item_types is not None: + ret += item_types + else: + ret = None # but keep iterating to emit all errors + return ret + elif isinstance(typ, TypeVarType): + return _get_expanded_attr_types( + ctx, get_proper_type(typ.upper_bound), display_typ, parent_typ + ) + elif isinstance(typ, Instance): + init_func = _get_attrs_init_type(typ) + if init_func is None: + _fail_not_attrs_class(ctx, display_typ, parent_typ) + return None + init_func = expand_type_by_instance(init_func, typ) + # [1:] to skip the self argument of AttrClass.__init__ + field_names = cast(list[str], init_func.arg_names[1:]) + field_types = init_func.arg_types[1:] + return [dict(zip(field_names, field_types))] + else: + _fail_not_attrs_class(ctx, display_typ, parent_typ) + return None + + +def _meet_fields(types: list[Mapping[str, Type]]) -> Mapping[str, Type]: + """ + "Meet" the fields of a list of attrs classes, i.e. for each field, its new type will be the lower bound. + """ + field_to_types = defaultdict(list) + for fields in types: + for name, typ in fields.items(): + field_to_types[name].append(typ) + + return { + name: ( + get_proper_type(reduce(meet_types, f_types)) + if len(f_types) == len(types) + else UninhabitedType() + ) + for name, f_types in field_to_types.items() + } + + +def evolve_function_sig_callback(ctx: mypy.plugin.FunctionSigContext) -> CallableType: + """ + Generate a signature for the 'attr.evolve' function that's specific to the call site + and dependent on the type of the first argument. + """ + if len(ctx.args) != 2: + # Ideally the name and context should be callee's, but we don't have it in FunctionSigContext. + ctx.api.fail(f'"{ctx.default_signature.name}" has unexpected type annotation', ctx.context) + return ctx.default_signature + + if len(ctx.args[0]) != 1: + return ctx.default_signature # leave it to the type checker to complain + + inst_arg = ctx.args[0][0] + inst_type = get_proper_type(ctx.api.get_expression_type(inst_arg)) + inst_type_str = format_type_bare(inst_type, ctx.api.options) + + attr_types = _get_expanded_attr_types(ctx, inst_type, inst_type, inst_type) + if attr_types is None: + return ctx.default_signature + fields = _meet_fields(attr_types) + + return CallableType( + arg_names=["inst", *fields.keys()], + arg_kinds=[ARG_POS] + [ARG_NAMED_OPT] * len(fields), + arg_types=[inst_type, *fields.values()], + ret_type=inst_type, + fallback=ctx.default_signature.fallback, + name=f"{ctx.default_signature.name} of {inst_type_str}", + ) + + +def fields_function_sig_callback(ctx: mypy.plugin.FunctionSigContext) -> CallableType: + """Provide the signature for `attrs.fields`.""" + if len(ctx.args) != 1 or len(ctx.args[0]) != 1: + return ctx.default_signature + + proper_type = get_proper_type(ctx.api.get_expression_type(ctx.args[0][0])) + + # fields(Any) -> Any, fields(type[Any]) -> Any + if ( + isinstance(proper_type, AnyType) + or isinstance(proper_type, TypeType) + and isinstance(proper_type.item, AnyType) + ): + return ctx.default_signature + + cls = None + arg_types = ctx.default_signature.arg_types + + if isinstance(proper_type, TypeVarType): + inner = get_proper_type(proper_type.upper_bound) + if isinstance(inner, Instance): + # We need to work arg_types to compensate for the attrs stubs. + arg_types = [proper_type] + cls = inner.type + elif isinstance(proper_type, CallableType): + cls = proper_type.type_object() + + if cls is not None and MAGIC_ATTR_NAME in cls.names: + # This is a proper attrs class. + ret_type = cls.names[MAGIC_ATTR_NAME].type + assert ret_type is not None + return ctx.default_signature.copy_modified(arg_types=arg_types, ret_type=ret_type) + + return ctx.default_signature diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/common.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/plugins/common.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..0dc4318 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/plugins/common.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/common.py b/.venv/lib/python3.12/site-packages/mypy/plugins/common.py new file mode 100644 index 0000000..ed2a91d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/plugins/common.py @@ -0,0 +1,439 @@ +from __future__ import annotations + +from typing import NamedTuple + +from mypy.argmap import map_actuals_to_formals +from mypy.fixup import TypeFixer +from mypy.nodes import ( + ARG_POS, + MDEF, + SYMBOL_FUNCBASE_TYPES, + Argument, + Block, + CallExpr, + ClassDef, + Decorator, + Expression, + FuncDef, + JsonDict, + NameExpr, + Node, + OverloadedFuncDef, + PassStmt, + RefExpr, + SymbolTableNode, + TypeInfo, + Var, +) +from mypy.plugin import CheckerPluginInterface, ClassDefContext, SemanticAnalyzerPluginInterface +from mypy.semanal_shared import ( + ALLOW_INCOMPATIBLE_OVERRIDE, + parse_bool, + require_bool_literal_argument, + set_callable_name, +) +from mypy.typeops import try_getting_str_literals as try_getting_str_literals +from mypy.types import ( + AnyType, + CallableType, + Instance, + LiteralType, + NoneType, + Overloaded, + Type, + TypeOfAny, + TypeType, + TypeVarType, + deserialize_type, + get_proper_type, +) +from mypy.types_utils import is_overlapping_none +from mypy.typevars import fill_typevars +from mypy.util import get_unique_redefinition_name + + +def _get_decorator_bool_argument(ctx: ClassDefContext, name: str, default: bool) -> bool: + """Return the bool argument for the decorator. + + This handles both @decorator(...) and @decorator. + """ + if isinstance(ctx.reason, CallExpr): + return _get_bool_argument(ctx, ctx.reason, name, default) + else: + return default + + +def _get_bool_argument(ctx: ClassDefContext, expr: CallExpr, name: str, default: bool) -> bool: + """Return the boolean value for an argument to a call or the + default if it's not found. + """ + attr_value = _get_argument(expr, name) + if attr_value: + return require_bool_literal_argument(ctx.api, attr_value, name, default) + return default + + +def _get_argument(call: CallExpr, name: str) -> Expression | None: + """Return the expression for the specific argument.""" + # To do this we use the CallableType of the callee to find the FormalArgument, + # then walk the actual CallExpr looking for the appropriate argument. + # + # Note: I'm not hard-coding the index so that in the future we can support other + # attrib and class makers. + callee_type = _get_callee_type(call) + if not callee_type: + return None + + argument = callee_type.argument_by_name(name) + if not argument: + return None + assert argument.name + + for i, (attr_name, attr_value) in enumerate(zip(call.arg_names, call.args)): + if argument.pos is not None and not attr_name and i == argument.pos: + return attr_value + if attr_name == argument.name: + return attr_value + + return None + + +def find_shallow_matching_overload_item(overload: Overloaded, call: CallExpr) -> CallableType: + """Perform limited lookup of a matching overload item. + + Full overload resolution is only supported during type checking, but plugins + sometimes need to resolve overloads. This can be used in some such use cases. + + Resolve overloads based on these things only: + + * Match using argument kinds and names + * If formal argument has type None, only accept the "None" expression in the callee + * If formal argument has type Literal[True] or Literal[False], only accept the + relevant bool literal + + Return the first matching overload item, or the last one if nothing matches. + """ + for item in overload.items[:-1]: + ok = True + mapped = map_actuals_to_formals( + call.arg_kinds, + call.arg_names, + item.arg_kinds, + item.arg_names, + lambda i: AnyType(TypeOfAny.special_form), + ) + + # Look for extra actuals + matched_actuals = set() + for actuals in mapped: + matched_actuals.update(actuals) + if any(i not in matched_actuals for i in range(len(call.args))): + ok = False + + for arg_type, kind, actuals in zip(item.arg_types, item.arg_kinds, mapped): + if kind.is_required() and not actuals: + # Missing required argument + ok = False + break + elif actuals: + args = [call.args[i] for i in actuals] + arg_type = get_proper_type(arg_type) + arg_none = any(isinstance(arg, NameExpr) and arg.name == "None" for arg in args) + if isinstance(arg_type, NoneType): + if not arg_none: + ok = False + break + elif ( + arg_none + and not is_overlapping_none(arg_type) + and not ( + isinstance(arg_type, Instance) + and arg_type.type.fullname == "builtins.object" + ) + and not isinstance(arg_type, AnyType) + ): + ok = False + break + elif isinstance(arg_type, LiteralType) and isinstance(arg_type.value, bool): + if not any(parse_bool(arg) == arg_type.value for arg in args): + ok = False + break + if ok: + return item + return overload.items[-1] + + +def _get_callee_type(call: CallExpr) -> CallableType | None: + """Return the type of the callee, regardless of its syntactic form.""" + + callee_node: Node | None = call.callee + + if isinstance(callee_node, RefExpr): + callee_node = callee_node.node + + # Some decorators may be using typing.dataclass_transform, which is itself a decorator, so we + # need to unwrap them to get at the true callee + if isinstance(callee_node, Decorator): + callee_node = callee_node.func + + if isinstance(callee_node, (Var, SYMBOL_FUNCBASE_TYPES)) and callee_node.type: + callee_node_type = get_proper_type(callee_node.type) + if isinstance(callee_node_type, Overloaded): + return find_shallow_matching_overload_item(callee_node_type, call) + elif isinstance(callee_node_type, CallableType): + return callee_node_type + + return None + + +def add_method( + ctx: ClassDefContext, + name: str, + args: list[Argument], + return_type: Type, + self_type: Type | None = None, + tvar_def: TypeVarType | None = None, + is_classmethod: bool = False, + is_staticmethod: bool = False, +) -> None: + """ + Adds a new method to a class. + Deprecated, use add_method_to_class() instead. + """ + add_method_to_class( + ctx.api, + ctx.cls, + name=name, + args=args, + return_type=return_type, + self_type=self_type, + tvar_def=tvar_def, + is_classmethod=is_classmethod, + is_staticmethod=is_staticmethod, + ) + + +class MethodSpec(NamedTuple): + """Represents a method signature to be added, except for `name`.""" + + args: list[Argument] + return_type: Type + self_type: Type | None = None + tvar_defs: list[TypeVarType] | None = None + + +def add_method_to_class( + api: SemanticAnalyzerPluginInterface | CheckerPluginInterface, + cls: ClassDef, + name: str, + # MethodSpec items kept for backward compatibility: + args: list[Argument], + return_type: Type, + self_type: Type | None = None, + tvar_def: list[TypeVarType] | TypeVarType | None = None, + is_classmethod: bool = False, + is_staticmethod: bool = False, +) -> FuncDef | Decorator: + """Adds a new method to a class definition.""" + _prepare_class_namespace(cls, name) + + if tvar_def is not None and not isinstance(tvar_def, list): + tvar_def = [tvar_def] + + func, sym = _add_method_by_spec( + api, + cls.info, + name, + MethodSpec(args=args, return_type=return_type, self_type=self_type, tvar_defs=tvar_def), + is_classmethod=is_classmethod, + is_staticmethod=is_staticmethod, + ) + cls.info.names[name] = sym + cls.info.defn.defs.body.append(func) + return func + + +def add_overloaded_method_to_class( + api: SemanticAnalyzerPluginInterface | CheckerPluginInterface, + cls: ClassDef, + name: str, + items: list[MethodSpec], + is_classmethod: bool = False, + is_staticmethod: bool = False, +) -> OverloadedFuncDef: + """Adds a new overloaded method to a class definition.""" + assert len(items) >= 2, "Overloads must contain at least two cases" + + # Save old definition, if it exists. + _prepare_class_namespace(cls, name) + + # Create function bodies for each passed method spec. + funcs: list[Decorator | FuncDef] = [] + for item in items: + func, _sym = _add_method_by_spec( + api, + cls.info, + name=name, + spec=item, + is_classmethod=is_classmethod, + is_staticmethod=is_staticmethod, + ) + if isinstance(func, FuncDef): + var = Var(func.name, func.type) + var.set_line(func.line) + func.is_decorated = True + + deco = Decorator(func, [], var) + else: + deco = func + deco.is_overload = True + funcs.append(deco) + + # Create the final OverloadedFuncDef node: + overload_def = OverloadedFuncDef(funcs) + overload_def.info = cls.info + overload_def.is_class = is_classmethod + overload_def.is_static = is_staticmethod + sym = SymbolTableNode(MDEF, overload_def) + sym.plugin_generated = True + + cls.info.names[name] = sym + cls.info.defn.defs.body.append(overload_def) + return overload_def + + +def _prepare_class_namespace(cls: ClassDef, name: str) -> None: + info = cls.info + assert info + + # First remove any previously generated methods with the same name + # to avoid clashes and problems in the semantic analyzer. + if name in info.names: + sym = info.names[name] + if sym.plugin_generated and isinstance(sym.node, FuncDef): + cls.defs.body.remove(sym.node) + + # NOTE: we would like the plugin generated node to dominate, but we still + # need to keep any existing definitions so they get semantically analyzed. + if name in info.names: + # Get a nice unique name instead. + r_name = get_unique_redefinition_name(name, info.names) + info.names[r_name] = info.names[name] + + +def _add_method_by_spec( + api: SemanticAnalyzerPluginInterface | CheckerPluginInterface, + info: TypeInfo, + name: str, + spec: MethodSpec, + *, + is_classmethod: bool, + is_staticmethod: bool, +) -> tuple[FuncDef | Decorator, SymbolTableNode]: + args, return_type, self_type, tvar_defs = spec + + assert not ( + is_classmethod is True and is_staticmethod is True + ), "Can't add a new method that's both staticmethod and classmethod." + + if isinstance(api, SemanticAnalyzerPluginInterface): + function_type = api.named_type("builtins.function") + else: + function_type = api.named_generic_type("builtins.function", []) + + if is_classmethod: + self_type = self_type or TypeType(fill_typevars(info)) + first = [Argument(Var("_cls"), self_type, None, ARG_POS, True)] + elif is_staticmethod: + first = [] + else: + self_type = self_type or fill_typevars(info) + first = [Argument(Var("self"), self_type, None, ARG_POS)] + args = first + args + + arg_types, arg_names, arg_kinds = [], [], [] + for arg in args: + assert arg.type_annotation, "All arguments must be fully typed." + arg_types.append(arg.type_annotation) + arg_names.append(arg.variable.name) + arg_kinds.append(arg.kind) + + signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type) + if tvar_defs: + signature.variables = tuple(tvar_defs) + + func = FuncDef(name, args, Block([PassStmt()])) + func.info = info + func.type = set_callable_name(signature, func) + func.is_class = is_classmethod + func.is_static = is_staticmethod + func._fullname = info.fullname + "." + name + func.line = info.line + + # Add decorator for is_staticmethod. It's unnecessary for is_classmethod. + if is_staticmethod: + func.is_decorated = True + v = Var(name, func.type) + v.info = info + v._fullname = func._fullname + v.is_staticmethod = True + dec = Decorator(func, [], v) + dec.line = info.line + sym = SymbolTableNode(MDEF, dec) + sym.plugin_generated = True + return dec, sym + + sym = SymbolTableNode(MDEF, func) + sym.plugin_generated = True + return func, sym + + +def add_attribute_to_class( + api: SemanticAnalyzerPluginInterface, + cls: ClassDef, + name: str, + typ: Type, + final: bool = False, + no_serialize: bool = False, + override_allow_incompatible: bool = False, + fullname: str | None = None, + is_classvar: bool = False, + overwrite_existing: bool = False, +) -> Var: + """ + Adds a new attribute to a class definition. + This currently only generates the symbol table entry and no corresponding AssignmentStatement + """ + info = cls.info + + # NOTE: we would like the plugin generated node to dominate, but we still + # need to keep any existing definitions so they get semantically analyzed. + if name in info.names and not overwrite_existing: + # Get a nice unique name instead. + r_name = get_unique_redefinition_name(name, info.names) + info.names[r_name] = info.names[name] + + node = Var(name, typ) + node.info = info + node.is_final = final + node.is_classvar = is_classvar + if name in ALLOW_INCOMPATIBLE_OVERRIDE: + node.allow_incompatible_override = True + else: + node.allow_incompatible_override = override_allow_incompatible + + if fullname: + node._fullname = fullname + else: + node._fullname = info.fullname + "." + name + + info.names[name] = SymbolTableNode( + MDEF, node, plugin_generated=True, no_serialize=no_serialize + ) + return node + + +def deserialize_and_fixup_type(data: str | JsonDict, api: SemanticAnalyzerPluginInterface) -> Type: + typ = deserialize_type(data) + typ.accept(TypeFixer(api.modules, allow_missing=False)) + return typ diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/constants.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/plugins/constants.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..3f0a8c0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/plugins/constants.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/constants.py b/.venv/lib/python3.12/site-packages/mypy/plugins/constants.py new file mode 100644 index 0000000..9a09e89 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/plugins/constants.py @@ -0,0 +1,20 @@ +"""Constant definitions for plugins kept here to help with import cycles.""" + +from typing import Final + +from mypy.semanal_enum import ENUM_BASES + +SINGLEDISPATCH_TYPE: Final = "functools._SingleDispatchCallable" +SINGLEDISPATCH_REGISTER_METHOD: Final = f"{SINGLEDISPATCH_TYPE}.register" +SINGLEDISPATCH_CALLABLE_CALL_METHOD: Final = f"{SINGLEDISPATCH_TYPE}.__call__" +SINGLEDISPATCH_REGISTER_RETURN_CLASS: Final = "_SingleDispatchRegisterCallable" +SINGLEDISPATCH_REGISTER_CALLABLE_CALL_METHOD: Final = ( + f"functools.{SINGLEDISPATCH_REGISTER_RETURN_CLASS}.__call__" +) + +ENUM_NAME_ACCESS: Final = {f"{prefix}.name" for prefix in ENUM_BASES} | { + f"{prefix}._name_" for prefix in ENUM_BASES +} +ENUM_VALUE_ACCESS: Final = {f"{prefix}.value" for prefix in ENUM_BASES} | { + f"{prefix}._value_" for prefix in ENUM_BASES +} diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/ctypes.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/plugins/ctypes.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..f2bc681 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/plugins/ctypes.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/ctypes.py b/.venv/lib/python3.12/site-packages/mypy/plugins/ctypes.py new file mode 100644 index 0000000..b6dbec1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/plugins/ctypes.py @@ -0,0 +1,245 @@ +"""Plugin to provide accurate types for some parts of the ctypes module.""" + +from __future__ import annotations + +# Fully qualified instead of "from mypy.plugin import ..." to avoid circular import problems. +import mypy.plugin +from mypy import nodes +from mypy.maptype import map_instance_to_supertype +from mypy.messages import format_type +from mypy.subtypes import is_subtype +from mypy.typeops import make_simplified_union +from mypy.types import ( + AnyType, + CallableType, + Instance, + NoneType, + ProperType, + Type, + TypeOfAny, + UnionType, + flatten_nested_unions, + get_proper_type, +) + + +def _find_simplecdata_base_arg( + tp: Instance, api: mypy.plugin.CheckerPluginInterface +) -> ProperType | None: + """Try to find a parametrized _SimpleCData in tp's bases and return its single type argument. + + None is returned if _SimpleCData appears nowhere in tp's (direct or indirect) bases. + """ + if tp.type.has_base("_ctypes._SimpleCData"): + simplecdata_base = map_instance_to_supertype( + tp, + api.named_generic_type("_ctypes._SimpleCData", [AnyType(TypeOfAny.special_form)]).type, + ) + assert len(simplecdata_base.args) == 1, "_SimpleCData takes exactly one type argument" + return get_proper_type(simplecdata_base.args[0]) + return None + + +def _autoconvertible_to_cdata(tp: Type, api: mypy.plugin.CheckerPluginInterface) -> Type: + """Get a type that is compatible with all types that can be implicitly converted to the given + CData type. + + Examples: + * c_int -> Union[c_int, int] + * c_char_p -> Union[c_char_p, bytes, int, NoneType] + * MyStructure -> MyStructure + """ + allowed_types = [] + # If tp is a union, we allow all types that are convertible to at least one of the union + # items. This is not quite correct - strictly speaking, only types convertible to *all* of the + # union items should be allowed. This may be worth changing in the future, but the more + # correct algorithm could be too strict to be useful. + for t in flatten_nested_unions([tp]): + t = get_proper_type(t) + # Every type can be converted from itself (obviously). + allowed_types.append(t) + if isinstance(t, Instance): + unboxed = _find_simplecdata_base_arg(t, api) + if unboxed is not None: + # If _SimpleCData appears in tp's (direct or indirect) bases, its type argument + # specifies the type's "unboxed" version, which can always be converted back to + # the original "boxed" type. + allowed_types.append(unboxed) + + if t.type.has_base("ctypes._PointerLike"): + # Pointer-like _SimpleCData subclasses can also be converted from + # an int or None. + allowed_types.append(api.named_generic_type("builtins.int", [])) + allowed_types.append(NoneType()) + + return make_simplified_union(allowed_types) + + +def _autounboxed_cdata(tp: Type) -> ProperType: + """Get the auto-unboxed version of a CData type, if applicable. + + For *direct* _SimpleCData subclasses, the only type argument of _SimpleCData in the bases list + is returned. + For all other CData types, including indirect _SimpleCData subclasses, tp is returned as-is. + """ + tp = get_proper_type(tp) + + if isinstance(tp, UnionType): + return make_simplified_union([_autounboxed_cdata(t) for t in tp.items]) + elif isinstance(tp, Instance): + for base in tp.type.bases: + if base.type.fullname == "_ctypes._SimpleCData": + # If tp has _SimpleCData as a direct base class, + # the auto-unboxed type is the single type argument of the _SimpleCData type. + assert len(base.args) == 1 + return get_proper_type(base.args[0]) + # If tp is not a concrete type, or if there is no _SimpleCData in the bases, + # the type is not auto-unboxed. + return tp + + +def _get_array_element_type(tp: Type) -> ProperType | None: + """Get the element type of the Array type tp, or None if not specified.""" + tp = get_proper_type(tp) + if isinstance(tp, Instance): + assert tp.type.fullname == "_ctypes.Array" + if len(tp.args) == 1: + return get_proper_type(tp.args[0]) + return None + + +def array_constructor_callback(ctx: mypy.plugin.FunctionContext) -> Type: + """Callback to provide an accurate signature for the ctypes.Array constructor.""" + # Extract the element type from the constructor's return type, i. e. the type of the array + # being constructed. + et = _get_array_element_type(ctx.default_return_type) + if et is not None: + allowed = _autoconvertible_to_cdata(et, ctx.api) + assert ( + len(ctx.arg_types) == 1 + ), "The stub of the ctypes.Array constructor should have a single vararg parameter" + for arg_num, (arg_kind, arg_type) in enumerate(zip(ctx.arg_kinds[0], ctx.arg_types[0]), 1): + if arg_kind == nodes.ARG_POS and not is_subtype(arg_type, allowed): + ctx.api.msg.fail( + "Array constructor argument {} of type {}" + " is not convertible to the array element type {}".format( + arg_num, + format_type(arg_type, ctx.api.options), + format_type(et, ctx.api.options), + ), + ctx.context, + ) + elif arg_kind == nodes.ARG_STAR: + ty = ctx.api.named_generic_type("typing.Iterable", [allowed]) + if not is_subtype(arg_type, ty): + it = ctx.api.named_generic_type("typing.Iterable", [et]) + ctx.api.msg.fail( + "Array constructor argument {} of type {}" + " is not convertible to the array element type {}".format( + arg_num, + format_type(arg_type, ctx.api.options), + format_type(it, ctx.api.options), + ), + ctx.context, + ) + + return ctx.default_return_type + + +def array_getitem_callback(ctx: mypy.plugin.MethodContext) -> Type: + """Callback to provide an accurate return type for ctypes.Array.__getitem__.""" + et = _get_array_element_type(ctx.type) + if et is not None: + unboxed = _autounboxed_cdata(et) + assert ( + len(ctx.arg_types) == 1 + ), "The stub of ctypes.Array.__getitem__ should have exactly one parameter" + assert ( + len(ctx.arg_types[0]) == 1 + ), "ctypes.Array.__getitem__'s parameter should not be variadic" + index_type = get_proper_type(ctx.arg_types[0][0]) + if isinstance(index_type, Instance): + if index_type.type.has_base("builtins.int"): + return unboxed + elif index_type.type.has_base("builtins.slice"): + return ctx.api.named_generic_type("builtins.list", [unboxed]) + return ctx.default_return_type + + +def array_setitem_callback(ctx: mypy.plugin.MethodSigContext) -> CallableType: + """Callback to provide an accurate signature for ctypes.Array.__setitem__.""" + et = _get_array_element_type(ctx.type) + if et is not None: + allowed = _autoconvertible_to_cdata(et, ctx.api) + assert len(ctx.default_signature.arg_types) == 2 + index_type = get_proper_type(ctx.default_signature.arg_types[0]) + if isinstance(index_type, Instance): + arg_type = None + if index_type.type.has_base("builtins.int"): + arg_type = allowed + elif index_type.type.has_base("builtins.slice"): + arg_type = ctx.api.named_generic_type("builtins.list", [allowed]) + if arg_type is not None: + # Note: arg_type can only be None if index_type is invalid, in which case we use + # the default signature and let mypy report an error about it. + return ctx.default_signature.copy_modified( + arg_types=ctx.default_signature.arg_types[:1] + [arg_type] + ) + return ctx.default_signature + + +def array_iter_callback(ctx: mypy.plugin.MethodContext) -> Type: + """Callback to provide an accurate return type for ctypes.Array.__iter__.""" + et = _get_array_element_type(ctx.type) + if et is not None: + unboxed = _autounboxed_cdata(et) + return ctx.api.named_generic_type("typing.Iterator", [unboxed]) + return ctx.default_return_type + + +def array_value_callback(ctx: mypy.plugin.AttributeContext) -> Type: + """Callback to provide an accurate type for ctypes.Array.value.""" + et = _get_array_element_type(ctx.type) + if et is not None: + types: list[Type] = [] + for tp in flatten_nested_unions([et]): + tp = get_proper_type(tp) + if isinstance(tp, AnyType): + types.append(AnyType(TypeOfAny.from_another_any, source_any=tp)) + elif isinstance(tp, Instance) and tp.type.fullname == "ctypes.c_char": + types.append(ctx.api.named_generic_type("builtins.bytes", [])) + elif isinstance(tp, Instance) and tp.type.fullname == "ctypes.c_wchar": + types.append(ctx.api.named_generic_type("builtins.str", [])) + else: + ctx.api.msg.fail( + 'Array attribute "value" is only available' + ' with element type "c_char" or "c_wchar", not {}'.format( + format_type(et, ctx.api.options) + ), + ctx.context, + ) + return make_simplified_union(types) + return ctx.default_attr_type + + +def array_raw_callback(ctx: mypy.plugin.AttributeContext) -> Type: + """Callback to provide an accurate type for ctypes.Array.raw.""" + et = _get_array_element_type(ctx.type) + if et is not None: + types: list[Type] = [] + for tp in flatten_nested_unions([et]): + tp = get_proper_type(tp) + if ( + isinstance(tp, AnyType) + or isinstance(tp, Instance) + and tp.type.fullname == "ctypes.c_char" + ): + types.append(ctx.api.named_generic_type("builtins.bytes", [])) + else: + ctx.api.msg.fail( + 'Array attribute "raw" is only available' + ' with element type "c_char", not {}'.format(format_type(et, ctx.api.options)), + ctx.context, + ) + return make_simplified_union(types) + return ctx.default_attr_type diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/dataclasses.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/plugins/dataclasses.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..2b2e88d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/plugins/dataclasses.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/dataclasses.py b/.venv/lib/python3.12/site-packages/mypy/plugins/dataclasses.py new file mode 100644 index 0000000..e916ded --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/plugins/dataclasses.py @@ -0,0 +1,1133 @@ +"""Plugin that provides support for dataclasses.""" + +from __future__ import annotations + +from collections.abc import Iterator +from typing import TYPE_CHECKING, Final, Literal + +from mypy import errorcodes, message_registry +from mypy.expandtype import expand_type, expand_type_by_instance +from mypy.meet import meet_types +from mypy.messages import format_type_bare +from mypy.nodes import ( + ARG_NAMED, + ARG_NAMED_OPT, + ARG_OPT, + ARG_POS, + ARG_STAR, + ARG_STAR2, + MDEF, + Argument, + AssignmentStmt, + Block, + CallExpr, + ClassDef, + Context, + DataclassTransformSpec, + Decorator, + EllipsisExpr, + Expression, + FuncDef, + FuncItem, + IfStmt, + JsonDict, + NameExpr, + Node, + PlaceholderNode, + RefExpr, + Statement, + SymbolTableNode, + TempNode, + TypeAlias, + TypeInfo, + TypeVarExpr, + Var, +) +from mypy.plugin import ClassDefContext, FunctionSigContext, SemanticAnalyzerPluginInterface +from mypy.plugins.common import ( + _get_callee_type, + _get_decorator_bool_argument, + add_attribute_to_class, + add_method_to_class, + deserialize_and_fixup_type, +) +from mypy.semanal_shared import find_dataclass_transform_spec, require_bool_literal_argument +from mypy.server.trigger import make_wildcard_trigger +from mypy.state import state +from mypy.typeops import map_type_from_supertype, try_getting_literals_from_type +from mypy.types import ( + AnyType, + CallableType, + FunctionLike, + Instance, + LiteralType, + NoneType, + ProperType, + TupleType, + Type, + TypeOfAny, + TypeVarId, + TypeVarType, + UninhabitedType, + UnionType, + get_proper_type, +) +from mypy.typevars import fill_typevars + +if TYPE_CHECKING: + from mypy.checker import TypeChecker + +# The set of decorators that generate dataclasses. +dataclass_makers: Final = {"dataclass", "dataclasses.dataclass"} +# Default field specifiers for dataclasses +DATACLASS_FIELD_SPECIFIERS: Final = ("dataclasses.Field", "dataclasses.field") + + +SELF_TVAR_NAME: Final = "_DT" +_TRANSFORM_SPEC_FOR_DATACLASSES: Final = DataclassTransformSpec( + eq_default=True, + order_default=False, + kw_only_default=False, + frozen_default=False, + field_specifiers=DATACLASS_FIELD_SPECIFIERS, +) +_INTERNAL_REPLACE_SYM_NAME: Final = "__mypy-replace" +_INTERNAL_POST_INIT_SYM_NAME: Final = "__mypy-post_init" + + +class DataclassAttribute: + def __init__( + self, + name: str, + alias: str | None, + is_in_init: bool, + is_init_var: bool, + has_default: bool, + line: int, + column: int, + type: Type | None, + info: TypeInfo, + kw_only: bool, + is_neither_frozen_nor_nonfrozen: bool, + api: SemanticAnalyzerPluginInterface, + ) -> None: + self.name = name + self.alias = alias + self.is_in_init = is_in_init + self.is_init_var = is_init_var + self.has_default = has_default + self.line = line + self.column = column + self.type = type # Type as __init__ argument + self.info = info + self.kw_only = kw_only + self.is_neither_frozen_nor_nonfrozen = is_neither_frozen_nor_nonfrozen + self._api = api + + def to_argument( + self, current_info: TypeInfo, *, of: Literal["__init__", "replace", "__post_init__"] + ) -> Argument: + if of == "__init__": + arg_kind = ARG_POS + if self.kw_only and self.has_default: + arg_kind = ARG_NAMED_OPT + elif self.kw_only and not self.has_default: + arg_kind = ARG_NAMED + elif not self.kw_only and self.has_default: + arg_kind = ARG_OPT + elif of == "replace": + arg_kind = ARG_NAMED if self.is_init_var and not self.has_default else ARG_NAMED_OPT + elif of == "__post_init__": + # We always use `ARG_POS` without a default value, because it is practical. + # Consider this case: + # + # @dataclass + # class My: + # y: dataclasses.InitVar[str] = 'a' + # def __post_init__(self, y: str) -> None: ... + # + # We would be *required* to specify `y: str = ...` if default is added here. + # But, most people won't care about adding default values to `__post_init__`, + # because it is not designed to be called directly, and duplicating default values + # for the sake of type-checking is unpleasant. + arg_kind = ARG_POS + return Argument( + variable=self.to_var(current_info), + type_annotation=self.expand_type(current_info), + initializer=EllipsisExpr() if self.has_default else None, # Only used by stubgen + kind=arg_kind, + ) + + def expand_type(self, current_info: TypeInfo) -> Type | None: + if self.type is not None and self.info.self_type is not None: + # In general, it is not safe to call `expand_type()` during semantic analysis, + # however this plugin is called very late, so all types should be fully ready. + # Also, it is tricky to avoid eager expansion of Self types here (e.g. because + # we serialize attributes). + with state.strict_optional_set(self._api.options.strict_optional): + return expand_type( + self.type, {self.info.self_type.id: fill_typevars(current_info)} + ) + return self.type + + def to_var(self, current_info: TypeInfo) -> Var: + return Var(self.alias or self.name, self.expand_type(current_info)) + + def serialize(self) -> JsonDict: + assert self.type + return { + "name": self.name, + "alias": self.alias, + "is_in_init": self.is_in_init, + "is_init_var": self.is_init_var, + "has_default": self.has_default, + "line": self.line, + "column": self.column, + "type": self.type.serialize(), + "kw_only": self.kw_only, + "is_neither_frozen_nor_nonfrozen": self.is_neither_frozen_nor_nonfrozen, + } + + @classmethod + def deserialize( + cls, info: TypeInfo, data: JsonDict, api: SemanticAnalyzerPluginInterface + ) -> DataclassAttribute: + data = data.copy() + typ = deserialize_and_fixup_type(data.pop("type"), api) + return cls(type=typ, info=info, **data, api=api) + + def expand_typevar_from_subtype(self, sub_type: TypeInfo) -> None: + """Expands type vars in the context of a subtype when an attribute is inherited + from a generic super type.""" + if self.type is not None: + with state.strict_optional_set(self._api.options.strict_optional): + self.type = map_type_from_supertype(self.type, sub_type, self.info) + + +class DataclassTransformer: + """Implement the behavior of @dataclass. + + Note that this may be executed multiple times on the same class, so + everything here must be idempotent. + + This runs after the main semantic analysis pass, so you can assume that + there are no placeholders. + """ + + def __init__( + self, + cls: ClassDef, + # Statement must also be accepted since class definition itself may be passed as the reason + # for subclass/metaclass-based uses of `typing.dataclass_transform` + reason: Expression | Statement, + spec: DataclassTransformSpec, + api: SemanticAnalyzerPluginInterface, + ) -> None: + self._cls = cls + self._reason = reason + self._spec = spec + self._api = api + + def transform(self) -> bool: + """Apply all the necessary transformations to the underlying + dataclass so as to ensure it is fully type checked according + to the rules in PEP 557. + """ + info = self._cls.info + attributes = self.collect_attributes() + if attributes is None: + # Some definitions are not ready. We need another pass. + return False + for attr in attributes: + if attr.type is None: + return False + decorator_arguments = { + "init": self._get_bool_arg("init", True), + "eq": self._get_bool_arg("eq", self._spec.eq_default), + "order": self._get_bool_arg("order", self._spec.order_default), + "frozen": self._get_bool_arg("frozen", self._spec.frozen_default), + "slots": self._get_bool_arg("slots", False), + "match_args": self._get_bool_arg("match_args", True), + } + py_version = self._api.options.python_version + + # If there are no attributes, it may be that the semantic analyzer has not + # processed them yet. In order to work around this, we can simply skip generating + # __init__ if there are no attributes, because if the user truly did not define any, + # then the object default __init__ with an empty signature will be present anyway. + if ( + decorator_arguments["init"] + and ("__init__" not in info.names or info.names["__init__"].plugin_generated) + and attributes + ): + args = [ + attr.to_argument(info, of="__init__") + for attr in attributes + if attr.is_in_init and not self._is_kw_only_type(attr.type) + ] + + if info.fallback_to_any: + # Make positional args optional since we don't know their order. + # This will at least allow us to typecheck them if they are called + # as kwargs + for arg in args: + if arg.kind == ARG_POS: + arg.kind = ARG_OPT + + existing_args_names = {arg.variable.name for arg in args} + gen_args_name = "generated_args" + while gen_args_name in existing_args_names: + gen_args_name += "_" + gen_kwargs_name = "generated_kwargs" + while gen_kwargs_name in existing_args_names: + gen_kwargs_name += "_" + args = [ + Argument(Var(gen_args_name), AnyType(TypeOfAny.explicit), None, ARG_STAR), + *args, + Argument(Var(gen_kwargs_name), AnyType(TypeOfAny.explicit), None, ARG_STAR2), + ] + + add_method_to_class( + self._api, self._cls, "__init__", args=args, return_type=NoneType() + ) + + if ( + decorator_arguments["eq"] + and info.get("__eq__") is None + or decorator_arguments["order"] + ): + # Type variable for self types in generated methods. + obj_type = self._api.named_type("builtins.object") + self_tvar_expr = TypeVarExpr( + SELF_TVAR_NAME, + info.fullname + "." + SELF_TVAR_NAME, + [], + obj_type, + AnyType(TypeOfAny.from_omitted_generics), + ) + info.names[SELF_TVAR_NAME] = SymbolTableNode(MDEF, self_tvar_expr) + + # Add <, >, <=, >=, but only if the class has an eq method. + if decorator_arguments["order"]: + if not decorator_arguments["eq"]: + self._api.fail('"eq" must be True if "order" is True', self._reason) + + for method_name in ["__lt__", "__gt__", "__le__", "__ge__"]: + # Like for __eq__ and __ne__, we want "other" to match + # the self type. + obj_type = self._api.named_type("builtins.object") + order_tvar_def = TypeVarType( + SELF_TVAR_NAME, + f"{info.fullname}.{SELF_TVAR_NAME}", + id=TypeVarId(-1, namespace=f"{info.fullname}.{method_name}"), + values=[], + upper_bound=obj_type, + default=AnyType(TypeOfAny.from_omitted_generics), + ) + order_return_type = self._api.named_type("builtins.bool") + order_args = [ + Argument(Var("other", order_tvar_def), order_tvar_def, None, ARG_POS) + ] + + existing_method = info.get(method_name) + if existing_method is not None and not existing_method.plugin_generated: + assert existing_method.node + self._api.fail( + f'You may not have a custom "{method_name}" method when "order" is True', + existing_method.node, + ) + + add_method_to_class( + self._api, + self._cls, + method_name, + args=order_args, + return_type=order_return_type, + self_type=order_tvar_def, + tvar_def=order_tvar_def, + ) + + parent_decorator_arguments = [] + for parent in info.mro[1:-1]: + parent_args = parent.metadata.get("dataclass") + + # Ignore parent classes that directly specify a dataclass transform-decorated metaclass + # when searching for usage of the frozen parameter. PEP 681 states that a class that + # directly specifies such a metaclass must be treated as neither frozen nor non-frozen. + if parent_args and not _has_direct_dataclass_transform_metaclass(parent): + parent_decorator_arguments.append(parent_args) + + if decorator_arguments["frozen"]: + if any(not parent["frozen"] for parent in parent_decorator_arguments): + self._api.fail("Frozen dataclass cannot inherit from a non-frozen dataclass", info) + self._propertize_callables(attributes, settable=False) + self._freeze(attributes) + else: + if any(parent["frozen"] for parent in parent_decorator_arguments): + self._api.fail("Non-frozen dataclass cannot inherit from a frozen dataclass", info) + self._propertize_callables(attributes) + + if decorator_arguments["slots"]: + self.add_slots(info, attributes, correct_version=py_version >= (3, 10)) + + self.reset_init_only_vars(info, attributes) + + if ( + decorator_arguments["match_args"] + and ( + "__match_args__" not in info.names or info.names["__match_args__"].plugin_generated + ) + and py_version >= (3, 10) + ): + str_type = self._api.named_type("builtins.str") + literals: list[Type] = [ + LiteralType(attr.name, str_type) + for attr in attributes + if attr.is_in_init and not attr.kw_only + ] + match_args_type = TupleType(literals, self._api.named_type("builtins.tuple")) + add_attribute_to_class(self._api, self._cls, "__match_args__", match_args_type) + + self._add_dataclass_fields_magic_attribute() + self._add_internal_replace_method(attributes) + if self._api.options.python_version >= (3, 13): + self._add_dunder_replace(attributes) + + if "__post_init__" in info.names: + self._add_internal_post_init_method(attributes) + + info.metadata["dataclass"] = { + "attributes": [attr.serialize() for attr in attributes], + "frozen": decorator_arguments["frozen"], + } + + return True + + def _add_dunder_replace(self, attributes: list[DataclassAttribute]) -> None: + """Add a `__replace__` method to the class, which is used to replace attributes in the `copy` module.""" + args = [ + attr.to_argument(self._cls.info, of="replace") + for attr in attributes + if attr.is_in_init + ] + add_method_to_class( + self._api, + self._cls, + "__replace__", + args=args, + return_type=fill_typevars(self._cls.info), + ) + + def _add_internal_replace_method(self, attributes: list[DataclassAttribute]) -> None: + """ + Stashes the signature of 'dataclasses.replace(...)' for this specific dataclass + to be used later whenever 'dataclasses.replace' is called for this dataclass. + """ + add_method_to_class( + self._api, + self._cls, + _INTERNAL_REPLACE_SYM_NAME, + args=[attr.to_argument(self._cls.info, of="replace") for attr in attributes], + return_type=NoneType(), + is_staticmethod=True, + ) + + def _add_internal_post_init_method(self, attributes: list[DataclassAttribute]) -> None: + add_method_to_class( + self._api, + self._cls, + _INTERNAL_POST_INIT_SYM_NAME, + args=[ + attr.to_argument(self._cls.info, of="__post_init__") + for attr in attributes + if attr.is_init_var + ], + return_type=NoneType(), + ) + + def add_slots( + self, info: TypeInfo, attributes: list[DataclassAttribute], *, correct_version: bool + ) -> None: + if not correct_version: + # This means that version is lower than `3.10`, + # it is just a non-existent argument for `dataclass` function. + self._api.fail( + 'Keyword argument "slots" for "dataclass" is only valid in Python 3.10 and higher', + self._reason, + ) + return + + generated_slots = {attr.name for attr in attributes} + if (info.slots is not None and info.slots != generated_slots) or info.names.get( + "__slots__" + ): + # This means we have a slots conflict. + # Class explicitly specifies a different `__slots__` field. + # And `@dataclass(slots=True)` is used. + # In runtime this raises a type error. + self._api.fail( + '"{}" both defines "__slots__" and is used with "slots=True"'.format( + self._cls.name + ), + self._cls, + ) + return + + if any(p.slots is None for p in info.mro[1:-1]): + # At least one type in mro (excluding `self` and `object`) + # does not have concrete `__slots__` defined. Ignoring. + return + + info.slots = generated_slots + + # Now, insert `.__slots__` attribute to class namespace: + slots_type = TupleType( + [self._api.named_type("builtins.str") for _ in generated_slots], + self._api.named_type("builtins.tuple"), + ) + add_attribute_to_class(self._api, self._cls, "__slots__", slots_type) + + def reset_init_only_vars(self, info: TypeInfo, attributes: list[DataclassAttribute]) -> None: + """Remove init-only vars from the class and reset init var declarations.""" + for attr in attributes: + if attr.is_init_var: + if attr.name in info.names: + del info.names[attr.name] + else: + # Nodes of superclass InitVars not used in __init__ cannot be reached. + assert attr.is_init_var + for stmt in info.defn.defs.body: + if isinstance(stmt, AssignmentStmt) and stmt.unanalyzed_type: + lvalue = stmt.lvalues[0] + if isinstance(lvalue, NameExpr) and lvalue.name == attr.name: + # Reset node so that another semantic analysis pass will + # recreate a symbol node for this attribute. + lvalue.node = None + + def _get_assignment_statements_from_if_statement( + self, stmt: IfStmt + ) -> Iterator[AssignmentStmt]: + for body in stmt.body: + if not body.is_unreachable: + yield from self._get_assignment_statements_from_block(body) + if stmt.else_body is not None and not stmt.else_body.is_unreachable: + yield from self._get_assignment_statements_from_block(stmt.else_body) + + def _get_assignment_statements_from_block(self, block: Block) -> Iterator[AssignmentStmt]: + for stmt in block.body: + if isinstance(stmt, AssignmentStmt): + yield stmt + elif isinstance(stmt, IfStmt): + yield from self._get_assignment_statements_from_if_statement(stmt) + + def collect_attributes(self) -> list[DataclassAttribute] | None: + """Collect all attributes declared in the dataclass and its parents. + + All assignments of the form + + a: SomeType + b: SomeOtherType = ... + + are collected. + + Return None if some dataclass base class hasn't been processed + yet and thus we'll need to ask for another pass. + """ + cls = self._cls + + # First, collect attributes belonging to any class in the MRO, ignoring duplicates. + # + # We iterate through the MRO in reverse because attrs defined in the parent must appear + # earlier in the attributes list than attrs defined in the child. See: + # https://docs.python.org/3/library/dataclasses.html#inheritance + # + # However, we also want attributes defined in the subtype to override ones defined + # in the parent. We can implement this via a dict without disrupting the attr order + # because dicts preserve insertion order in Python 3.7+. + found_attrs: dict[str, DataclassAttribute] = {} + for info in reversed(cls.info.mro[1:-1]): + if "dataclass_tag" in info.metadata and "dataclass" not in info.metadata: + # We haven't processed the base class yet. Need another pass. + return None + if "dataclass" not in info.metadata: + continue + + # Each class depends on the set of attributes in its dataclass ancestors. + self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) + + for data in info.metadata["dataclass"]["attributes"]: + name: str = data["name"] + + attr = DataclassAttribute.deserialize(info, data, self._api) + # TODO: We shouldn't be performing type operations during the main + # semantic analysis pass, since some TypeInfo attributes might + # still be in flux. This should be performed in a later phase. + attr.expand_typevar_from_subtype(cls.info) + found_attrs[name] = attr + + sym_node = cls.info.names.get(name) + if sym_node and sym_node.node and not isinstance(sym_node.node, Var): + self._api.fail( + "Dataclass attribute may only be overridden by another attribute", + sym_node.node, + ) + + # Second, collect attributes belonging to the current class. + current_attr_names: set[str] = set() + kw_only = self._get_bool_arg("kw_only", self._spec.kw_only_default) + for stmt in self._get_assignment_statements_from_block(cls.defs): + # Any assignment that doesn't use the new type declaration + # syntax can be ignored out of hand. + if not stmt.new_syntax: + continue + + # a: int, b: str = 1, 'foo' is not supported syntax so we + # don't have to worry about it. + lhs = stmt.lvalues[0] + if not isinstance(lhs, NameExpr): + continue + + sym = cls.info.names.get(lhs.name) + if sym is None: + # There was probably a semantic analysis error. + continue + + node = sym.node + assert not isinstance(node, PlaceholderNode) + + if isinstance(node, TypeAlias): + self._api.fail( + ("Type aliases inside dataclass definitions are not supported at runtime"), + node, + ) + # Skip processing this node. This doesn't match the runtime behaviour, + # but the only alternative would be to modify the SymbolTable, + # and it's a little hairy to do that in a plugin. + continue + if isinstance(node, Decorator): + # This might be a property / field name clash. + # We will issue an error later. + continue + + assert isinstance(node, Var), node + + # x: ClassVar[int] is ignored by dataclasses. + if node.is_classvar: + continue + + # x: InitVar[int] is turned into x: int and is removed from the class. + is_init_var = False + node_type = get_proper_type(node.type) + if ( + isinstance(node_type, Instance) + and node_type.type.fullname == "dataclasses.InitVar" + ): + is_init_var = True + node.type = node_type.args[0] + + if self._is_kw_only_type(node_type): + kw_only = True + + has_field_call, field_args = self._collect_field_args(stmt.rvalue) + + is_in_init_param = field_args.get("init") + if is_in_init_param is None: + is_in_init = self._get_default_init_value_for_field_specifier(stmt.rvalue) + else: + is_in_init = bool(self._api.parse_bool(is_in_init_param)) + + has_default = False + # Ensure that something like x: int = field() is rejected + # after an attribute with a default. + if has_field_call: + has_default = ( + "default" in field_args + or "default_factory" in field_args + # alias for default_factory defined in PEP 681 + or "factory" in field_args + ) + + # All other assignments are already type checked. + elif not isinstance(stmt.rvalue, TempNode): + has_default = True + + if not has_default and self._spec is _TRANSFORM_SPEC_FOR_DATACLASSES: + # Make all non-default dataclass attributes implicit because they are de-facto + # set on self in the generated __init__(), not in the class body. On the other + # hand, we don't know how custom dataclass transforms initialize attributes, + # so we don't treat them as implicit. This is required to support descriptors + # (https://github.com/python/mypy/issues/14868). + sym.implicit = True + + is_kw_only = kw_only + # Use the kw_only field arg if it is provided. Otherwise use the + # kw_only value from the decorator parameter. + field_kw_only_param = field_args.get("kw_only") + if field_kw_only_param is not None: + value = self._api.parse_bool(field_kw_only_param) + if value is not None: + is_kw_only = value + else: + self._api.fail('"kw_only" argument must be a boolean literal', stmt.rvalue) + + if sym.type is None and node.is_final and node.is_inferred: + # This is a special case, assignment like x: Final = 42 is classified + # annotated above, but mypy strips the `Final` turning it into x = 42. + # We do not support inferred types in dataclasses, so we can try inferring + # type for simple literals, and otherwise require an explicit type + # argument for Final[...]. + typ = self._api.analyze_simple_literal_type(stmt.rvalue, is_final=True) + if typ: + node.type = typ + else: + self._api.fail( + "Need type argument for Final[...] with non-literal default in dataclass", + stmt, + ) + node.type = AnyType(TypeOfAny.from_error) + + alias = None + if "alias" in field_args: + alias = self._api.parse_str_literal(field_args["alias"]) + if alias is None: + self._api.fail( + message_registry.DATACLASS_FIELD_ALIAS_MUST_BE_LITERAL, + stmt.rvalue, + code=errorcodes.LITERAL_REQ, + ) + + current_attr_names.add(lhs.name) + with state.strict_optional_set(self._api.options.strict_optional): + init_type = self._infer_dataclass_attr_init_type(sym, lhs.name, stmt) + found_attrs[lhs.name] = DataclassAttribute( + name=lhs.name, + alias=alias, + is_in_init=is_in_init, + is_init_var=is_init_var, + has_default=has_default, + line=stmt.line, + column=stmt.column, + type=init_type, + info=cls.info, + kw_only=is_kw_only, + is_neither_frozen_nor_nonfrozen=_has_direct_dataclass_transform_metaclass( + cls.info + ), + api=self._api, + ) + + all_attrs = list(found_attrs.values()) + all_attrs.sort(key=lambda a: a.kw_only) + + # Third, ensure that arguments without a default don't follow + # arguments that have a default and that the KW_ONLY sentinel + # is only provided once. + found_default = False + found_kw_sentinel = False + for attr in all_attrs: + # If we find any attribute that is_in_init, not kw_only, and that + # doesn't have a default after one that does have one, + # then that's an error. + if found_default and attr.is_in_init and not attr.has_default and not attr.kw_only: + # If the issue comes from merging different classes, report it + # at the class definition point. + context: Context = cls + if attr.name in current_attr_names: + context = Context(line=attr.line, column=attr.column) + self._api.fail( + "Attributes without a default cannot follow attributes with one", context + ) + + found_default = found_default or (attr.has_default and attr.is_in_init) + if found_kw_sentinel and self._is_kw_only_type(attr.type): + context = cls + if attr.name in current_attr_names: + context = Context(line=attr.line, column=attr.column) + self._api.fail( + "There may not be more than one field with the KW_ONLY type", context + ) + found_kw_sentinel = found_kw_sentinel or self._is_kw_only_type(attr.type) + return all_attrs + + def _freeze(self, attributes: list[DataclassAttribute]) -> None: + """Converts all attributes to @property methods in order to + emulate frozen classes. + """ + info = self._cls.info + for attr in attributes: + # Classes that directly specify a dataclass_transform metaclass must be neither frozen + # non non-frozen per PEP681. Though it is surprising, this means that attributes from + # such a class must be writable even if the rest of the class hierarchy is frozen. This + # matches the behavior of Pyright (the reference implementation). + if attr.is_neither_frozen_nor_nonfrozen: + continue + + sym_node = info.names.get(attr.name) + if sym_node is not None: + var = sym_node.node + if isinstance(var, Var): + if var.is_final: + continue # do not turn `Final` attrs to `@property` + var.is_property = True + else: + var = attr.to_var(info) + var.info = info + var.is_property = True + var._fullname = info.fullname + "." + var.name + info.names[var.name] = SymbolTableNode(MDEF, var) + + def _propertize_callables( + self, attributes: list[DataclassAttribute], settable: bool = True + ) -> None: + """Converts all attributes with callable types to @property methods. + + This avoids the typechecker getting confused and thinking that + `my_dataclass_instance.callable_attr(foo)` is going to receive a + `self` argument (it is not). + + """ + info = self._cls.info + for attr in attributes: + if isinstance(get_proper_type(attr.type), CallableType): + var = attr.to_var(info) + var.info = info + var.is_property = True + var.is_settable_property = settable + var._fullname = info.fullname + "." + var.name + info.names[var.name] = SymbolTableNode(MDEF, var) + + def _is_kw_only_type(self, node: Type | None) -> bool: + """Checks if the type of the node is the KW_ONLY sentinel value.""" + if node is None: + return False + node_type = get_proper_type(node) + if not isinstance(node_type, Instance): + return False + return node_type.type.fullname == "dataclasses.KW_ONLY" + + def _add_dataclass_fields_magic_attribute(self) -> None: + attr_name = "__dataclass_fields__" + any_type = AnyType(TypeOfAny.explicit) + # For `dataclasses`, use the type `dict[str, Field[Any]]` for accuracy. For dataclass + # transforms, it's inaccurate to use `Field` since a given transform may use a completely + # different type (or none); fall back to `Any` there. + # + # In either case, we're aiming to match the Typeshed stub for `is_dataclass`, which expects + # the instance to have a `__dataclass_fields__` attribute of type `dict[str, Field[Any]]`. + if self._spec is _TRANSFORM_SPEC_FOR_DATACLASSES: + field_type = self._api.named_type_or_none("dataclasses.Field", [any_type]) or any_type + else: + field_type = any_type + attr_type = self._api.named_type( + "builtins.dict", [self._api.named_type("builtins.str"), field_type] + ) + var = Var(name=attr_name, type=attr_type) + var.info = self._cls.info + var._fullname = self._cls.info.fullname + "." + attr_name + var.is_classvar = True + self._cls.info.names[attr_name] = SymbolTableNode( + kind=MDEF, node=var, plugin_generated=True + ) + + def _collect_field_args(self, expr: Expression) -> tuple[bool, dict[str, Expression]]: + """Returns a tuple where the first value represents whether or not + the expression is a call to dataclass.field and the second is a + dictionary of the keyword arguments that field() was called with. + """ + if ( + isinstance(expr, CallExpr) + and isinstance(expr.callee, RefExpr) + and expr.callee.fullname in self._spec.field_specifiers + ): + # field() only takes keyword arguments. + args = {} + for name, arg, kind in zip(expr.arg_names, expr.args, expr.arg_kinds): + if not kind.is_named(): + if kind.is_named(star=True): + # This means that `field` is used with `**` unpacking, + # the best we can do for now is not to fail. + # TODO: we can infer what's inside `**` and try to collect it. + message = 'Unpacking **kwargs in "field()" is not supported' + elif self._spec is not _TRANSFORM_SPEC_FOR_DATACLASSES: + # dataclasses.field can only be used with keyword args, but this + # restriction is only enforced for the *standardized* arguments to + # dataclass_transform field specifiers. If this is not a + # dataclasses.dataclass class, we can just skip positional args safely. + continue + else: + message = '"field()" does not accept positional arguments' + self._api.fail(message, expr) + return True, {} + assert name is not None + args[name] = arg + return True, args + return False, {} + + def _get_bool_arg(self, name: str, default: bool) -> bool: + # Expressions are always CallExprs (either directly or via a wrapper like Decorator), so + # we can use the helpers from common + if isinstance(self._reason, Expression): + return _get_decorator_bool_argument( + ClassDefContext(self._cls, self._reason, self._api), name, default + ) + + # Subclass/metaclass use of `typing.dataclass_transform` reads the parameters from the + # class's keyword arguments (ie `class Subclass(Parent, kwarg1=..., kwarg2=...)`) + expression = self._cls.keywords.get(name) + if expression is not None: + return require_bool_literal_argument(self._api, expression, name, default) + return default + + def _get_default_init_value_for_field_specifier(self, call: Expression) -> bool: + """ + Find a default value for the `init` parameter of the specifier being called. If the + specifier's type signature includes an `init` parameter with a type of `Literal[True]` or + `Literal[False]`, return the appropriate boolean value from the literal. Otherwise, + fall back to the standard default of `True`. + """ + if not isinstance(call, CallExpr): + return True + + specifier_type = _get_callee_type(call) + if specifier_type is None: + return True + + parameter = specifier_type.argument_by_name("init") + if parameter is None: + return True + + literals = try_getting_literals_from_type(parameter.typ, bool, "builtins.bool") + if literals is None or len(literals) != 1: + return True + + return literals[0] + + def _infer_dataclass_attr_init_type( + self, sym: SymbolTableNode, name: str, context: Context + ) -> Type | None: + """Infer __init__ argument type for an attribute. + + In particular, possibly use the signature of __set__. + """ + default = sym.type + if sym.implicit: + return default + t = get_proper_type(sym.type) + + # Perform a simple-minded inference from the signature of __set__, if present. + # We can't use mypy.checkmember here, since this plugin runs before type checking. + # We only support some basic scanerios here, which is hopefully sufficient for + # the vast majority of use cases. + if not isinstance(t, Instance): + return default + setter = t.type.get("__set__") + if setter: + if isinstance(setter.node, FuncDef): + super_info = t.type.get_containing_type_info("__set__") + assert super_info + if setter.type: + setter_type = get_proper_type( + map_type_from_supertype(setter.type, t.type, super_info) + ) + else: + return AnyType(TypeOfAny.unannotated) + if isinstance(setter_type, CallableType) and setter_type.arg_kinds == [ + ARG_POS, + ARG_POS, + ARG_POS, + ]: + return expand_type_by_instance(setter_type.arg_types[2], t) + else: + self._api.fail( + f'Unsupported signature for "__set__" in "{t.type.name}"', context + ) + else: + self._api.fail(f'Unsupported "__set__" in "{t.type.name}"', context) + + return default + + +def add_dataclass_tag(info: TypeInfo) -> None: + # The value is ignored, only the existence matters. + info.metadata["dataclass_tag"] = {} + + +def dataclass_tag_callback(ctx: ClassDefContext) -> None: + """Record that we have a dataclass in the main semantic analysis pass. + + The later pass implemented by DataclassTransformer will use this + to detect dataclasses in base classes. + """ + add_dataclass_tag(ctx.cls.info) + + +def dataclass_class_maker_callback(ctx: ClassDefContext) -> bool: + """Hooks into the class typechecking process to add support for dataclasses.""" + if any(i.is_named_tuple for i in ctx.cls.info.mro): + ctx.api.fail("A NamedTuple cannot be a dataclass", ctx=ctx.cls.info) + return True + transformer = DataclassTransformer( + ctx.cls, ctx.reason, _get_transform_spec(ctx.reason), ctx.api + ) + return transformer.transform() + + +def _get_transform_spec(reason: Expression) -> DataclassTransformSpec: + """Find the relevant transform parameters from the decorator/parent class/metaclass that + triggered the dataclasses plugin. + + Although the resulting DataclassTransformSpec is based on the typing.dataclass_transform + function, we also use it for traditional dataclasses.dataclass classes as well for simplicity. + In those cases, we return a default spec rather than one based on a call to + `typing.dataclass_transform`. + """ + if _is_dataclasses_decorator(reason): + return _TRANSFORM_SPEC_FOR_DATACLASSES + + spec = find_dataclass_transform_spec(reason) + assert spec is not None, ( + "trying to find dataclass transform spec, but reason is neither dataclasses.dataclass nor " + "decorated with typing.dataclass_transform" + ) + return spec + + +def _is_dataclasses_decorator(node: Node) -> bool: + if isinstance(node, CallExpr): + node = node.callee + if isinstance(node, RefExpr): + return node.fullname in dataclass_makers + return False + + +def _has_direct_dataclass_transform_metaclass(info: TypeInfo) -> bool: + return ( + info.declared_metaclass is not None + and info.declared_metaclass.type.dataclass_transform_spec is not None + ) + + +def _get_expanded_dataclasses_fields( + ctx: FunctionSigContext, typ: ProperType, display_typ: ProperType, parent_typ: ProperType +) -> list[CallableType] | None: + """ + For a given type, determine what dataclasses it can be: for each class, return the field types. + For generic classes, the field types are expanded. + If the type contains Any or a non-dataclass, returns None; in the latter case, also reports an error. + """ + if isinstance(typ, UnionType): + ret: list[CallableType] | None = [] + for item in typ.relevant_items(): + item = get_proper_type(item) + item_types = _get_expanded_dataclasses_fields(ctx, item, item, parent_typ) + if ret is not None and item_types is not None: + ret += item_types + else: + ret = None # but keep iterating to emit all errors + return ret + elif isinstance(typ, TypeVarType): + return _get_expanded_dataclasses_fields( + ctx, get_proper_type(typ.upper_bound), display_typ, parent_typ + ) + elif isinstance(typ, Instance): + replace_sym = typ.type.get_method(_INTERNAL_REPLACE_SYM_NAME) + if replace_sym is None: + return None + replace_sig = replace_sym.type + assert isinstance(replace_sig, ProperType) + assert isinstance(replace_sig, CallableType) + return [expand_type_by_instance(replace_sig, typ)] + else: + return None + + +# TODO: we can potentially get the function signature hook to allow returning a union +# and leave this to the regular machinery of resolving a union of callables +# (https://github.com/python/mypy/issues/15457) +def _meet_replace_sigs(sigs: list[CallableType]) -> CallableType: + """ + Produces the lowest bound of the 'replace' signatures of multiple dataclasses. + """ + args = { + name: (typ, kind) + for name, typ, kind in zip(sigs[0].arg_names, sigs[0].arg_types, sigs[0].arg_kinds) + } + + for sig in sigs[1:]: + sig_args = { + name: (typ, kind) + for name, typ, kind in zip(sig.arg_names, sig.arg_types, sig.arg_kinds) + } + for name in (*args.keys(), *sig_args.keys()): + sig_typ, sig_kind = args.get(name, (UninhabitedType(), ARG_NAMED_OPT)) + sig2_typ, sig2_kind = sig_args.get(name, (UninhabitedType(), ARG_NAMED_OPT)) + args[name] = ( + meet_types(sig_typ, sig2_typ), + ARG_NAMED_OPT if sig_kind == sig2_kind == ARG_NAMED_OPT else ARG_NAMED, + ) + + return sigs[0].copy_modified( + arg_names=list(args.keys()), + arg_types=[typ for typ, _ in args.values()], + arg_kinds=[kind for _, kind in args.values()], + ) + + +def replace_function_sig_callback(ctx: FunctionSigContext) -> CallableType: + """ + Returns a signature for the 'dataclasses.replace' function that's dependent on the type + of the first positional argument. + """ + if len(ctx.args) != 2: + # Ideally the name and context should be callee's, but we don't have it in FunctionSigContext. + ctx.api.fail(f'"{ctx.default_signature.name}" has unexpected type annotation', ctx.context) + return ctx.default_signature + + if len(ctx.args[0]) != 1: + return ctx.default_signature # leave it to the type checker to complain + + obj_arg = ctx.args[0][0] + obj_type = get_proper_type(ctx.api.get_expression_type(obj_arg)) + inst_type_str = format_type_bare(obj_type, ctx.api.options) + + replace_sigs = _get_expanded_dataclasses_fields(ctx, obj_type, obj_type, obj_type) + if replace_sigs is None: + return ctx.default_signature + replace_sig = _meet_replace_sigs(replace_sigs) + + return replace_sig.copy_modified( + arg_names=[None, *replace_sig.arg_names], + arg_kinds=[ARG_POS, *replace_sig.arg_kinds], + arg_types=[obj_type, *replace_sig.arg_types], + ret_type=obj_type, + fallback=ctx.default_signature.fallback, + name=f"{ctx.default_signature.name} of {inst_type_str}", + ) + + +def is_processed_dataclass(info: TypeInfo) -> bool: + return bool(info) and "dataclass" in info.metadata + + +def check_post_init(api: TypeChecker, defn: FuncItem, info: TypeInfo) -> None: + if defn.type is None: + return + assert isinstance(defn.type, FunctionLike) + + ideal_sig_method = info.get_method(_INTERNAL_POST_INIT_SYM_NAME) + assert ideal_sig_method is not None and ideal_sig_method.type is not None + ideal_sig = ideal_sig_method.type + assert isinstance(ideal_sig, ProperType) # we set it ourselves + assert isinstance(ideal_sig, CallableType) + ideal_sig = ideal_sig.copy_modified(name="__post_init__") + + api.check_override( + override=defn.type, + original=ideal_sig, + name="__post_init__", + name_in_super="__post_init__", + supertype="dataclass", + original_class_or_static=False, + override_class_or_static=False, + node=defn, + ) diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/default.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/plugins/default.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..4875487 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/plugins/default.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/default.py b/.venv/lib/python3.12/site-packages/mypy/plugins/default.py new file mode 100644 index 0000000..7a58307 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/plugins/default.py @@ -0,0 +1,596 @@ +from __future__ import annotations + +from functools import partial +from typing import Callable, Final + +import mypy.errorcodes as codes +from mypy import message_registry +from mypy.nodes import DictExpr, Expression, IntExpr, StrExpr, UnaryExpr +from mypy.plugin import ( + AttributeContext, + ClassDefContext, + FunctionContext, + FunctionSigContext, + MethodContext, + MethodSigContext, + Plugin, +) +from mypy.plugins.attrs import ( + attr_class_maker_callback, + attr_class_makers, + attr_dataclass_makers, + attr_define_makers, + attr_frozen_makers, + attr_tag_callback, + evolve_function_sig_callback, + fields_function_sig_callback, +) +from mypy.plugins.common import try_getting_str_literals +from mypy.plugins.constants import ( + ENUM_NAME_ACCESS, + ENUM_VALUE_ACCESS, + SINGLEDISPATCH_CALLABLE_CALL_METHOD, + SINGLEDISPATCH_REGISTER_CALLABLE_CALL_METHOD, + SINGLEDISPATCH_REGISTER_METHOD, +) +from mypy.plugins.ctypes import ( + array_constructor_callback, + array_getitem_callback, + array_iter_callback, + array_raw_callback, + array_setitem_callback, + array_value_callback, +) +from mypy.plugins.dataclasses import ( + dataclass_class_maker_callback, + dataclass_makers, + dataclass_tag_callback, + replace_function_sig_callback, +) +from mypy.plugins.enums import enum_member_callback, enum_name_callback, enum_value_callback +from mypy.plugins.functools import ( + functools_total_ordering_maker_callback, + functools_total_ordering_makers, + partial_call_callback, + partial_new_callback, +) +from mypy.plugins.singledispatch import ( + call_singledispatch_function_after_register_argument, + call_singledispatch_function_callback, + create_singledispatch_function_callback, + singledispatch_register_callback, +) +from mypy.subtypes import is_subtype +from mypy.typeops import is_literal_type_like, make_simplified_union +from mypy.types import ( + TPDICT_FB_NAMES, + AnyType, + CallableType, + FunctionLike, + Instance, + LiteralType, + NoneType, + TupleType, + Type, + TypedDictType, + TypeOfAny, + TypeVarType, + UnionType, + get_proper_type, + get_proper_types, +) + +TD_SETDEFAULT_NAMES: Final = {n + ".setdefault" for n in TPDICT_FB_NAMES} +TD_POP_NAMES: Final = {n + ".pop" for n in TPDICT_FB_NAMES} +TD_DELITEM_NAMES: Final = {n + ".__delitem__" for n in TPDICT_FB_NAMES} + +TD_UPDATE_METHOD_NAMES: Final = ( + {n + ".update" for n in TPDICT_FB_NAMES} + | {n + ".__or__" for n in TPDICT_FB_NAMES} + | {n + ".__ror__" for n in TPDICT_FB_NAMES} + | {n + ".__ior__" for n in TPDICT_FB_NAMES} +) + + +class DefaultPlugin(Plugin): + """Type checker plugin that is enabled by default.""" + + def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None: + if fullname == "_ctypes.Array": + return array_constructor_callback + elif fullname == "functools.singledispatch": + return create_singledispatch_function_callback + elif fullname == "functools.partial": + return partial_new_callback + elif fullname == "enum.member": + return enum_member_callback + return None + + def get_function_signature_hook( + self, fullname: str + ) -> Callable[[FunctionSigContext], FunctionLike] | None: + if fullname in ("attr.evolve", "attrs.evolve", "attr.assoc", "attrs.assoc"): + return evolve_function_sig_callback + elif fullname in ("attr.fields", "attrs.fields"): + return fields_function_sig_callback + elif fullname == "dataclasses.replace": + return replace_function_sig_callback + return None + + def get_method_signature_hook( + self, fullname: str + ) -> Callable[[MethodSigContext], FunctionLike] | None: + if fullname == "typing.Mapping.get": + return typed_dict_get_signature_callback + elif fullname in TD_SETDEFAULT_NAMES: + return typed_dict_setdefault_signature_callback + elif fullname in TD_POP_NAMES: + return typed_dict_pop_signature_callback + elif fullname == "_ctypes.Array.__setitem__": + return array_setitem_callback + elif fullname == SINGLEDISPATCH_CALLABLE_CALL_METHOD: + return call_singledispatch_function_callback + elif fullname in TD_UPDATE_METHOD_NAMES: + return typed_dict_update_signature_callback + return None + + def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None: + if fullname == "typing.Mapping.get": + return typed_dict_get_callback + elif fullname == "builtins.int.__pow__": + return int_pow_callback + elif fullname == "builtins.int.__neg__": + return int_neg_callback + elif fullname == "builtins.int.__pos__": + return int_pos_callback + elif fullname in ("builtins.tuple.__mul__", "builtins.tuple.__rmul__"): + return tuple_mul_callback + elif fullname in TD_SETDEFAULT_NAMES: + return typed_dict_setdefault_callback + elif fullname in TD_POP_NAMES: + return typed_dict_pop_callback + elif fullname in TD_DELITEM_NAMES: + return typed_dict_delitem_callback + elif fullname == "_ctypes.Array.__getitem__": + return array_getitem_callback + elif fullname == "_ctypes.Array.__iter__": + return array_iter_callback + elif fullname == SINGLEDISPATCH_REGISTER_METHOD: + return singledispatch_register_callback + elif fullname == SINGLEDISPATCH_REGISTER_CALLABLE_CALL_METHOD: + return call_singledispatch_function_after_register_argument + elif fullname == "functools.partial.__call__": + return partial_call_callback + return None + + def get_attribute_hook(self, fullname: str) -> Callable[[AttributeContext], Type] | None: + if fullname == "_ctypes.Array.value": + return array_value_callback + elif fullname == "_ctypes.Array.raw": + return array_raw_callback + elif fullname in ENUM_NAME_ACCESS: + return enum_name_callback + elif fullname in ENUM_VALUE_ACCESS: + return enum_value_callback + return None + + def get_class_decorator_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: + # These dataclass and attrs hooks run in the main semantic analysis pass + # and only tag known dataclasses/attrs classes, so that the second + # hooks (in get_class_decorator_hook_2) can detect dataclasses/attrs classes + # in the MRO. + if fullname in dataclass_makers: + return dataclass_tag_callback + if ( + fullname in attr_class_makers + or fullname in attr_dataclass_makers + or fullname in attr_frozen_makers + or fullname in attr_define_makers + ): + return attr_tag_callback + return None + + def get_class_decorator_hook_2( + self, fullname: str + ) -> Callable[[ClassDefContext], bool] | None: + if fullname in dataclass_makers: + return dataclass_class_maker_callback + elif fullname in functools_total_ordering_makers: + return functools_total_ordering_maker_callback + elif fullname in attr_class_makers: + return attr_class_maker_callback + elif fullname in attr_dataclass_makers: + return partial(attr_class_maker_callback, auto_attribs_default=True) + elif fullname in attr_frozen_makers: + return partial( + attr_class_maker_callback, auto_attribs_default=None, frozen_default=True + ) + elif fullname in attr_define_makers: + return partial( + attr_class_maker_callback, auto_attribs_default=None, slots_default=True + ) + return None + + +def typed_dict_get_signature_callback(ctx: MethodSigContext) -> CallableType: + """Try to infer a better signature type for TypedDict.get. + + This is used to get better type context for the second argument that + depends on a TypedDict value type. + """ + signature = ctx.default_signature + if ( + isinstance(ctx.type, TypedDictType) + and len(ctx.args) == 2 + and len(ctx.args[0]) == 1 + and isinstance(ctx.args[0][0], StrExpr) + and len(signature.arg_types) == 2 + and len(signature.variables) == 1 + and len(ctx.args[1]) == 1 + ): + key = ctx.args[0][0].value + value_type = get_proper_type(ctx.type.items.get(key)) + ret_type = signature.ret_type + if value_type: + default_arg = ctx.args[1][0] + if ( + isinstance(value_type, TypedDictType) + and isinstance(default_arg, DictExpr) + and len(default_arg.items) == 0 + ): + # Caller has empty dict {} as default for typed dict. + value_type = value_type.copy_modified(required_keys=set()) + # Tweak the signature to include the value type as context. It's + # only needed for type inference since there's a union with a type + # variable that accepts everything. + tv = signature.variables[0] + assert isinstance(tv, TypeVarType) + return signature.copy_modified( + arg_types=[signature.arg_types[0], make_simplified_union([value_type, tv])], + ret_type=ret_type, + ) + return signature + + +def typed_dict_get_callback(ctx: MethodContext) -> Type: + """Infer a precise return type for TypedDict.get with literal first argument.""" + if ( + isinstance(ctx.type, TypedDictType) + and len(ctx.arg_types) >= 1 + and len(ctx.arg_types[0]) == 1 + ): + keys = try_getting_str_literals(ctx.args[0][0], ctx.arg_types[0][0]) + if keys is None: + return ctx.default_return_type + + default_type: Type + default_arg: Expression | None + if len(ctx.arg_types) <= 1 or not ctx.arg_types[1]: + default_arg = None + default_type = NoneType() + elif len(ctx.arg_types[1]) == 1 and len(ctx.args[1]) == 1: + default_arg = ctx.args[1][0] + default_type = ctx.arg_types[1][0] + else: + return ctx.default_return_type + + output_types: list[Type] = [] + for key in keys: + value_type: Type | None = ctx.type.items.get(key) + if value_type is None: + return ctx.default_return_type + + if key in ctx.type.required_keys: + output_types.append(value_type) + else: + # HACK to deal with get(key, {}) + if ( + isinstance(default_arg, DictExpr) + and len(default_arg.items) == 0 + and isinstance(vt := get_proper_type(value_type), TypedDictType) + ): + output_types.append(vt.copy_modified(required_keys=set())) + else: + output_types.append(value_type) + output_types.append(default_type) + + # for nicer reveal_type, put default at the end, if it is present + if default_type in output_types: + output_types = [t for t in output_types if t != default_type] + [default_type] + return make_simplified_union(output_types) + return ctx.default_return_type + + +def typed_dict_pop_signature_callback(ctx: MethodSigContext) -> CallableType: + """Try to infer a better signature type for TypedDict.pop. + + This is used to get better type context for the second argument that + depends on a TypedDict value type. + """ + signature = ctx.default_signature + str_type = ctx.api.named_generic_type("builtins.str", []) + if ( + isinstance(ctx.type, TypedDictType) + and len(ctx.args) == 2 + and len(ctx.args[0]) == 1 + and isinstance(ctx.args[0][0], StrExpr) + and len(signature.arg_types) == 2 + and len(signature.variables) == 1 + and len(ctx.args[1]) == 1 + ): + key = ctx.args[0][0].value + value_type = ctx.type.items.get(key) + if value_type: + # Tweak the signature to include the value type as context. It's + # only needed for type inference since there's a union with a type + # variable that accepts everything. + tv = signature.variables[0] + assert isinstance(tv, TypeVarType) + typ = make_simplified_union([value_type, tv]) + return signature.copy_modified(arg_types=[str_type, typ], ret_type=typ) + return signature.copy_modified(arg_types=[str_type, signature.arg_types[1]]) + + +def typed_dict_pop_callback(ctx: MethodContext) -> Type: + """Type check and infer a precise return type for TypedDict.pop.""" + if ( + isinstance(ctx.type, TypedDictType) + and len(ctx.arg_types) >= 1 + and len(ctx.arg_types[0]) == 1 + ): + key_expr = ctx.args[0][0] + keys = try_getting_str_literals(key_expr, ctx.arg_types[0][0]) + if keys is None: + ctx.api.fail( + message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, + key_expr, + code=codes.LITERAL_REQ, + ) + return AnyType(TypeOfAny.from_error) + + value_types = [] + for key in keys: + if key in ctx.type.required_keys or key in ctx.type.readonly_keys: + ctx.api.msg.typeddict_key_cannot_be_deleted(ctx.type, key, key_expr) + + value_type = ctx.type.items.get(key) + if value_type: + value_types.append(value_type) + else: + ctx.api.msg.typeddict_key_not_found(ctx.type, key, key_expr) + return AnyType(TypeOfAny.from_error) + + if len(ctx.args[1]) == 0: + return make_simplified_union(value_types) + elif len(ctx.arg_types) == 2 and len(ctx.arg_types[1]) == 1 and len(ctx.args[1]) == 1: + return make_simplified_union([*value_types, ctx.arg_types[1][0]]) + return ctx.default_return_type + + +def typed_dict_setdefault_signature_callback(ctx: MethodSigContext) -> CallableType: + """Try to infer a better signature type for TypedDict.setdefault. + + This is used to get better type context for the second argument that + depends on a TypedDict value type. + """ + signature = ctx.default_signature + str_type = ctx.api.named_generic_type("builtins.str", []) + if ( + isinstance(ctx.type, TypedDictType) + and len(ctx.args) == 2 + and len(ctx.args[0]) == 1 + and isinstance(ctx.args[0][0], StrExpr) + and len(signature.arg_types) == 2 + and len(ctx.args[1]) == 1 + ): + key = ctx.args[0][0].value + value_type = ctx.type.items.get(key) + if value_type: + return signature.copy_modified(arg_types=[str_type, value_type]) + return signature.copy_modified(arg_types=[str_type, signature.arg_types[1]]) + + +def typed_dict_setdefault_callback(ctx: MethodContext) -> Type: + """Type check TypedDict.setdefault and infer a precise return type.""" + if ( + isinstance(ctx.type, TypedDictType) + and len(ctx.arg_types) == 2 + and len(ctx.arg_types[0]) == 1 + and len(ctx.arg_types[1]) == 1 + ): + key_expr = ctx.args[0][0] + keys = try_getting_str_literals(key_expr, ctx.arg_types[0][0]) + if keys is None: + ctx.api.fail( + message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, + key_expr, + code=codes.LITERAL_REQ, + ) + return AnyType(TypeOfAny.from_error) + + assigned_readonly_keys = ctx.type.readonly_keys & set(keys) + if assigned_readonly_keys: + ctx.api.msg.readonly_keys_mutated(assigned_readonly_keys, context=key_expr) + + default_type = ctx.arg_types[1][0] + default_expr = ctx.args[1][0] + + value_types = [] + for key in keys: + value_type = ctx.type.items.get(key) + + if value_type is None: + ctx.api.msg.typeddict_key_not_found(ctx.type, key, key_expr) + return AnyType(TypeOfAny.from_error) + + # The signature_callback above can't always infer the right signature + # (e.g. when the expression is a variable that happens to be a Literal str) + # so we need to handle the check ourselves here and make sure the provided + # default can be assigned to all key-value pairs we're updating. + if not is_subtype(default_type, value_type): + ctx.api.msg.typeddict_setdefault_arguments_inconsistent( + default_type, value_type, default_expr + ) + return AnyType(TypeOfAny.from_error) + + value_types.append(value_type) + + return make_simplified_union(value_types) + return ctx.default_return_type + + +def typed_dict_delitem_callback(ctx: MethodContext) -> Type: + """Type check TypedDict.__delitem__.""" + if ( + isinstance(ctx.type, TypedDictType) + and len(ctx.arg_types) == 1 + and len(ctx.arg_types[0]) == 1 + ): + key_expr = ctx.args[0][0] + keys = try_getting_str_literals(key_expr, ctx.arg_types[0][0]) + if keys is None: + ctx.api.fail( + message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, + key_expr, + code=codes.LITERAL_REQ, + ) + return AnyType(TypeOfAny.from_error) + + for key in keys: + if key in ctx.type.required_keys or key in ctx.type.readonly_keys: + ctx.api.msg.typeddict_key_cannot_be_deleted(ctx.type, key, key_expr) + elif key not in ctx.type.items: + ctx.api.msg.typeddict_key_not_found(ctx.type, key, key_expr) + return ctx.default_return_type + + +_TP_DICT_MUTATING_METHODS: Final = frozenset({"update of TypedDict", "__ior__ of TypedDict"}) + + +def typed_dict_update_signature_callback(ctx: MethodSigContext) -> CallableType: + """Try to infer a better signature type for methods that update `TypedDict`. + + This includes: `TypedDict.update`, `TypedDict.__or__`, `TypedDict.__ror__`, + and `TypedDict.__ior__`. + """ + signature = ctx.default_signature + if isinstance(ctx.type, TypedDictType) and len(signature.arg_types) == 1: + arg_type = get_proper_type(signature.arg_types[0]) + if not isinstance(arg_type, TypedDictType): + return signature + arg_type = arg_type.as_anonymous() + arg_type = arg_type.copy_modified(required_keys=set()) + if ctx.args and ctx.args[0]: + if signature.name in _TP_DICT_MUTATING_METHODS: + # If we want to mutate this object in place, we need to set this flag, + # it will trigger an extra check in TypedDict's checker. + arg_type.to_be_mutated = True + with ctx.api.msg.filter_errors( + filter_errors=lambda name, info: info.code != codes.TYPEDDICT_READONLY_MUTATED, + save_filtered_errors=True, + ): + inferred = get_proper_type( + ctx.api.get_expression_type(ctx.args[0][0], type_context=arg_type) + ) + if arg_type.to_be_mutated: + arg_type.to_be_mutated = False # Done! + possible_tds = [] + if isinstance(inferred, TypedDictType): + possible_tds = [inferred] + elif isinstance(inferred, UnionType): + possible_tds = [ + t + for t in get_proper_types(inferred.relevant_items()) + if isinstance(t, TypedDictType) + ] + items = [] + for td in possible_tds: + item = arg_type.copy_modified( + required_keys=(arg_type.required_keys | td.required_keys) + & arg_type.items.keys() + ) + if not ctx.api.options.extra_checks: + item = item.copy_modified(item_names=list(td.items)) + items.append(item) + if items: + arg_type = make_simplified_union(items) + return signature.copy_modified(arg_types=[arg_type]) + return signature + + +def int_pow_callback(ctx: MethodContext) -> Type: + """Infer a more precise return type for int.__pow__.""" + # int.__pow__ has an optional modulo argument, + # so we expect 2 argument positions + if len(ctx.arg_types) == 2 and len(ctx.arg_types[0]) == 1 and len(ctx.arg_types[1]) == 0: + arg = ctx.args[0][0] + if isinstance(arg, IntExpr): + exponent = arg.value + elif isinstance(arg, UnaryExpr) and arg.op == "-" and isinstance(arg.expr, IntExpr): + exponent = -arg.expr.value + else: + # Right operand not an int literal or a negated literal -- give up. + return ctx.default_return_type + if exponent >= 0: + return ctx.api.named_generic_type("builtins.int", []) + else: + return ctx.api.named_generic_type("builtins.float", []) + return ctx.default_return_type + + +def int_neg_callback(ctx: MethodContext, multiplier: int = -1) -> Type: + """Infer a more precise return type for int.__neg__ and int.__pos__. + + This is mainly used to infer the return type as LiteralType + if the original underlying object is a LiteralType object. + """ + if isinstance(ctx.type, Instance) and ctx.type.last_known_value is not None: + value = ctx.type.last_known_value.value + fallback = ctx.type.last_known_value.fallback + if isinstance(value, int): + if is_literal_type_like(ctx.api.type_context[-1]): + return LiteralType(value=multiplier * value, fallback=fallback) + else: + return ctx.type.copy_modified( + last_known_value=LiteralType( + value=multiplier * value, + fallback=fallback, + line=ctx.type.line, + column=ctx.type.column, + ) + ) + elif isinstance(ctx.type, LiteralType): + value = ctx.type.value + fallback = ctx.type.fallback + if isinstance(value, int): + return LiteralType(value=multiplier * value, fallback=fallback) + return ctx.default_return_type + + +def int_pos_callback(ctx: MethodContext) -> Type: + """Infer a more precise return type for int.__pos__. + + This is identical to __neg__, except the value is not inverted. + """ + return int_neg_callback(ctx, +1) + + +def tuple_mul_callback(ctx: MethodContext) -> Type: + """Infer a more precise return type for tuple.__mul__ and tuple.__rmul__. + + This is used to return a specific sized tuple if multiplied by Literal int + """ + if not isinstance(ctx.type, TupleType): + return ctx.default_return_type + + arg_type = get_proper_type(ctx.arg_types[0][0]) + if isinstance(arg_type, Instance) and arg_type.last_known_value is not None: + value = arg_type.last_known_value.value + if isinstance(value, int): + return ctx.type.copy_modified(items=ctx.type.items * value) + elif isinstance(arg_type, LiteralType): + value = arg_type.value + if isinstance(value, int): + return ctx.type.copy_modified(items=ctx.type.items * value) + + return ctx.default_return_type diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/enums.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/plugins/enums.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..1fe875a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/plugins/enums.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/enums.py b/.venv/lib/python3.12/site-packages/mypy/plugins/enums.py new file mode 100644 index 0000000..0be2e08 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/plugins/enums.py @@ -0,0 +1,299 @@ +""" +This file contains a variety of plugins for refining how mypy infers types of +expressions involving Enums. + +Currently, this file focuses on providing better inference for expressions like +'SomeEnum.FOO.name' and 'SomeEnum.FOO.value'. Note that the type of both expressions +will vary depending on exactly which instance of SomeEnum we're looking at. + +Note that this file does *not* contain all special-cased logic related to enums: +we actually bake some of it directly in to the semantic analysis layer (see +semanal_enum.py). +""" + +from __future__ import annotations + +from collections.abc import Iterable, Sequence +from typing import TypeVar, cast + +import mypy.plugin # To avoid circular imports. +from mypy.checker_shared import TypeCheckerSharedApi +from mypy.nodes import TypeInfo, Var +from mypy.subtypes import is_equivalent +from mypy.typeops import fixup_partial_type, make_simplified_union +from mypy.types import ( + ELLIPSIS_TYPE_NAMES, + CallableType, + Instance, + LiteralType, + ProperType, + Type, + get_proper_type, + is_named_instance, +) + + +def enum_name_callback(ctx: mypy.plugin.AttributeContext) -> Type: + """This plugin refines the 'name' attribute in enums to act as if + they were declared to be final. + + For example, the expression 'MyEnum.FOO.name' normally is inferred + to be of type 'str'. + + This plugin will instead make the inferred type be a 'str' where the + last known value is 'Literal["FOO"]'. This means it would be legal to + use 'MyEnum.FOO.name' in contexts that expect a Literal type, just like + any other Final variable or attribute. + + This plugin assumes that the provided context is an attribute access + matching one of the strings found in 'ENUM_NAME_ACCESS'. + """ + enum_field_name = _extract_underlying_field_name(ctx.type) + if enum_field_name is None: + return ctx.default_attr_type + else: + str_type = ctx.api.named_generic_type("builtins.str", []) + literal_type = LiteralType(enum_field_name, fallback=str_type) + return str_type.copy_modified(last_known_value=literal_type) + + +_T = TypeVar("_T") + + +def _first(it: Iterable[_T]) -> _T | None: + """Return the first value from any iterable. + + Returns ``None`` if the iterable is empty. + """ + for val in it: + return val + return None + + +def _infer_value_type_with_auto_fallback( + ctx: mypy.plugin.AttributeContext, proper_type: ProperType | None +) -> Type | None: + """Figure out the type of an enum value accounting for `auto()`. + + This method is a no-op for a `None` proper_type and also in the case where + the type is not "enum.auto" + """ + if proper_type is None: + return None + proper_type = get_proper_type(fixup_partial_type(proper_type)) + # Enums in stubs may have ... instead of actual values. If `_value_` is annotated + # (manually or inherited from IntEnum, for example), it is a more reasonable guess + # than literal ellipsis type. + if ( + _is_defined_in_stub(ctx) + and isinstance(proper_type, Instance) + and proper_type.type.fullname in ELLIPSIS_TYPE_NAMES + and isinstance(ctx.type, Instance) + ): + value_type = ctx.type.type.get("_value_") + if value_type is not None and isinstance(var := value_type.node, Var): + return var.type + return proper_type + if not (isinstance(proper_type, Instance) and proper_type.type.fullname == "enum.auto"): + if is_named_instance(proper_type, "enum.member") and proper_type.args: + return proper_type.args[0] + return proper_type + assert isinstance(ctx.type, Instance), "An incorrect ctx.type was passed." + info = ctx.type.type + # Find the first _generate_next_value_ on the mro. We need to know + # if it is `Enum` because `Enum` types say that the return-value of + # `_generate_next_value_` is `Any`. In reality the default `auto()` + # returns an `int` (presumably the `Any` in typeshed is to make it + # easier to subclass and change the returned type). + type_with_gnv = _first(ti for ti in info.mro if ti.names.get("_generate_next_value_")) + if type_with_gnv is None: + return ctx.default_attr_type + + stnode = type_with_gnv.names["_generate_next_value_"] + + # This should be a `CallableType` + node_type = get_proper_type(stnode.type) + if isinstance(node_type, CallableType): + if type_with_gnv.fullname == "enum.Enum": + int_type = ctx.api.named_generic_type("builtins.int", []) + return int_type + return get_proper_type(node_type.ret_type) + return ctx.default_attr_type + + +def _is_defined_in_stub(ctx: mypy.plugin.AttributeContext) -> bool: + assert isinstance(ctx.api, TypeCheckerSharedApi) + return isinstance(ctx.type, Instance) and ctx.api.is_defined_in_stub(ctx.type) + + +def _implements_new(info: TypeInfo) -> bool: + """Check whether __new__ comes from enum.Enum or was implemented in a + subclass. In the latter case, we must infer Any as long as mypy can't infer + the type of _value_ from assignments in __new__. + """ + type_with_new = _first( + ti + for ti in info.mro + if ti.names.get("__new__") and not ti.fullname.startswith("builtins.") + ) + if type_with_new is None: + return False + return type_with_new.fullname not in ("enum.Enum", "enum.IntEnum", "enum.StrEnum") + + +def enum_member_callback(ctx: mypy.plugin.FunctionContext) -> Type: + """By default `member(1)` will be inferred as `member[int]`, + we want to improve the inference to be `Literal[1]` here.""" + if ctx.arg_types and ctx.arg_types[0]: + arg = get_proper_type(ctx.arg_types[0][0]) + proper_return = get_proper_type(ctx.default_return_type) + if ( + isinstance(arg, Instance) + and arg.last_known_value + and isinstance(proper_return, Instance) + and len(proper_return.args) == 1 + ): + return proper_return.copy_modified(args=[arg]) + return ctx.default_return_type + + +def enum_value_callback(ctx: mypy.plugin.AttributeContext) -> Type: + """This plugin refines the 'value' attribute in enums to refer to + the original underlying value. For example, suppose we have the + following: + + class SomeEnum: + FOO = A() + BAR = B() + + By default, mypy will infer that 'SomeEnum.FOO.value' and + 'SomeEnum.BAR.value' both are of type 'Any'. This plugin refines + this inference so that mypy understands the expressions are + actually of types 'A' and 'B' respectively. This better reflects + the actual runtime behavior. + + This plugin works simply by looking up the original value assigned + to the enum. For example, when this plugin sees 'SomeEnum.BAR.value', + it will look up whatever type 'BAR' had in the SomeEnum TypeInfo and + use that as the inferred type of the overall expression. + + This plugin assumes that the provided context is an attribute access + matching one of the strings found in 'ENUM_VALUE_ACCESS'. + """ + enum_field_name = _extract_underlying_field_name(ctx.type) + if enum_field_name is None: + # We do not know the enum field name (perhaps it was passed to a + # function and we only know that it _is_ a member). All is not lost + # however, if we can prove that the all of the enum members have the + # same value-type, then it doesn't matter which member was passed in. + # The value-type is still known. + if isinstance(ctx.type, Instance): + info = ctx.type.type + + # As long as mypy doesn't understand attribute creation in __new__, + # there is no way to predict the value type if the enum class has a + # custom implementation + if _implements_new(info): + return ctx.default_attr_type + + stnodes = (info.get(name) for name in info.names) + + # Enums _can_ have methods, instance attributes, and `nonmember`s. + # Omit methods and attributes created by assigning to self.* + # for our value inference. + node_types = ( + get_proper_type(n.type) if n else None + for n in stnodes + if n is None or not n.implicit + ) + proper_types = [ + _infer_value_type_with_auto_fallback(ctx, t) + for t in node_types + if t is None + or (not isinstance(t, CallableType) and not is_named_instance(t, "enum.nonmember")) + ] + underlying_type = _first(proper_types) + if underlying_type is None: + return ctx.default_attr_type + + # At first we try to predict future `value` type if all other items + # have the same type. For example, `int`. + # If this is the case, we simply return this type. + # See https://github.com/python/mypy/pull/9443 + all_same_value_type = all( + proper_type is not None and proper_type == underlying_type + for proper_type in proper_types + ) + if all_same_value_type: + if underlying_type is not None: + return underlying_type + + # But, after we started treating all `Enum` values as `Final`, + # we start to infer types in + # `item = 1` as `Literal[1]`, not just `int`. + # So, for example types in this `Enum` will all be different: + # + # class Ordering(IntEnum): + # one = 1 + # two = 2 + # three = 3 + # + # We will infer three `Literal` types here. + # They are not the same, but they are equivalent. + # So, we unify them to make sure `.value` prediction still works. + # Result will be `Literal[1] | Literal[2] | Literal[3]` for this case. + all_equivalent_types = all( + proper_type is not None and is_equivalent(proper_type, underlying_type) + for proper_type in proper_types + ) + if all_equivalent_types: + return make_simplified_union(cast(Sequence[Type], proper_types)) + return ctx.default_attr_type + + assert isinstance(ctx.type, Instance) + info = ctx.type.type + + # As long as mypy doesn't understand attribute creation in __new__, + # there is no way to predict the value type if the enum class has a + # custom implementation + if _implements_new(info): + return ctx.default_attr_type + + stnode = info.get(enum_field_name) + if stnode is None: + return ctx.default_attr_type + + underlying_type = _infer_value_type_with_auto_fallback(ctx, get_proper_type(stnode.type)) + if underlying_type is None: + return ctx.default_attr_type + + return underlying_type + + +def _extract_underlying_field_name(typ: Type) -> str | None: + """If the given type corresponds to some Enum instance, returns the + original name of that enum. For example, if we receive in the type + corresponding to 'SomeEnum.FOO', we return the string "SomeEnum.Foo". + + This helper takes advantage of the fact that Enum instances are valid + to use inside Literal[...] types. An expression like 'SomeEnum.FOO' is + actually represented by an Instance type with a Literal enum fallback. + + We can examine this Literal fallback to retrieve the string. + """ + typ = get_proper_type(typ) + if not isinstance(typ, Instance): + return None + + if not typ.type.is_enum: + return None + + underlying_literal = typ.last_known_value + if underlying_literal is None: + return None + + # The checks above have verified this LiteralType is representing an enum value, + # which means the 'value' field is guaranteed to be the name of the enum field + # as a string. + assert isinstance(underlying_literal.value, str) + return underlying_literal.value diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/functools.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/plugins/functools.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..b4bfba8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/plugins/functools.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/functools.py b/.venv/lib/python3.12/site-packages/mypy/plugins/functools.py new file mode 100644 index 0000000..c8b370f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/plugins/functools.py @@ -0,0 +1,395 @@ +"""Plugin for supporting the functools standard library module.""" + +from __future__ import annotations + +from typing import Final, NamedTuple + +import mypy.checker +import mypy.plugin +import mypy.semanal +from mypy.argmap import map_actuals_to_formals +from mypy.erasetype import erase_typevars +from mypy.nodes import ( + ARG_POS, + ARG_STAR2, + SYMBOL_FUNCBASE_TYPES, + ArgKind, + Argument, + CallExpr, + NameExpr, + Var, +) +from mypy.plugins.common import add_method_to_class +from mypy.typeops import get_all_type_vars +from mypy.types import ( + AnyType, + CallableType, + Instance, + Overloaded, + ParamSpecFlavor, + ParamSpecType, + Type, + TypeOfAny, + TypeVarType, + UnboundType, + UnionType, + get_proper_type, +) + +functools_total_ordering_makers: Final = {"functools.total_ordering"} + +_ORDERING_METHODS: Final = {"__lt__", "__le__", "__gt__", "__ge__"} + +PARTIAL: Final = "functools.partial" + + +class _MethodInfo(NamedTuple): + is_static: bool + type: CallableType + + +def functools_total_ordering_maker_callback( + ctx: mypy.plugin.ClassDefContext, auto_attribs_default: bool = False +) -> bool: + """Add dunder methods to classes decorated with functools.total_ordering.""" + comparison_methods = _analyze_class(ctx) + if not comparison_methods: + ctx.api.fail( + 'No ordering operation defined when using "functools.total_ordering": < > <= >=', + ctx.reason, + ) + return True + + # prefer __lt__ to __le__ to __gt__ to __ge__ + root = max(comparison_methods, key=lambda k: (comparison_methods[k] is None, k)) + root_method = comparison_methods[root] + if not root_method: + # None of the defined comparison methods can be analysed + return True + + other_type = _find_other_type(root_method) + bool_type = ctx.api.named_type("builtins.bool") + ret_type: Type = bool_type + if root_method.type.ret_type != ctx.api.named_type("builtins.bool"): + proper_ret_type = get_proper_type(root_method.type.ret_type) + if not ( + isinstance(proper_ret_type, UnboundType) + and proper_ret_type.name.split(".")[-1] == "bool" + ): + ret_type = AnyType(TypeOfAny.implementation_artifact) + for additional_op in _ORDERING_METHODS: + # Either the method is not implemented + # or has an unknown signature that we can now extrapolate. + if not comparison_methods.get(additional_op): + args = [Argument(Var("other", other_type), other_type, None, ARG_POS)] + add_method_to_class(ctx.api, ctx.cls, additional_op, args, ret_type) + + return True + + +def _find_other_type(method: _MethodInfo) -> Type: + """Find the type of the ``other`` argument in a comparison method.""" + first_arg_pos = 0 if method.is_static else 1 + cur_pos_arg = 0 + other_arg = None + for arg_kind, arg_type in zip(method.type.arg_kinds, method.type.arg_types): + if arg_kind.is_positional(): + if cur_pos_arg == first_arg_pos: + other_arg = arg_type + break + + cur_pos_arg += 1 + elif arg_kind != ARG_STAR2: + other_arg = arg_type + break + + if other_arg is None: + return AnyType(TypeOfAny.implementation_artifact) + + return other_arg + + +def _analyze_class(ctx: mypy.plugin.ClassDefContext) -> dict[str, _MethodInfo | None]: + """Analyze the class body, its parents, and return the comparison methods found.""" + # Traverse the MRO and collect ordering methods. + comparison_methods: dict[str, _MethodInfo | None] = {} + # Skip object because total_ordering does not use methods from object + for cls in ctx.cls.info.mro[:-1]: + for name in _ORDERING_METHODS: + if name in cls.names and name not in comparison_methods: + node = cls.names[name].node + if isinstance(node, SYMBOL_FUNCBASE_TYPES) and isinstance(node.type, CallableType): + comparison_methods[name] = _MethodInfo(node.is_static, node.type) + continue + + if isinstance(node, Var): + proper_type = get_proper_type(node.type) + if isinstance(proper_type, CallableType): + comparison_methods[name] = _MethodInfo(node.is_staticmethod, proper_type) + continue + + comparison_methods[name] = None + + return comparison_methods + + +def partial_new_callback(ctx: mypy.plugin.FunctionContext) -> Type: + """Infer a more precise return type for functools.partial""" + if not isinstance(ctx.api, mypy.checker.TypeChecker): # use internals + return ctx.default_return_type + if len(ctx.arg_types) != 3: # fn, *args, **kwargs + return ctx.default_return_type + if len(ctx.arg_types[0]) != 1: + return ctx.default_return_type + + if isinstance(get_proper_type(ctx.arg_types[0][0]), Overloaded): + # TODO: handle overloads, just fall back to whatever the non-plugin code does + return ctx.default_return_type + return handle_partial_with_callee(ctx, callee=ctx.arg_types[0][0]) + + +def handle_partial_with_callee(ctx: mypy.plugin.FunctionContext, callee: Type) -> Type: + if not isinstance(ctx.api, mypy.checker.TypeChecker): # use internals + return ctx.default_return_type + + if isinstance(callee_proper := get_proper_type(callee), UnionType): + return UnionType.make_union( + [handle_partial_with_callee(ctx, item) for item in callee_proper.items] + ) + + fn_type = ctx.api.extract_callable_type(callee, ctx=ctx.default_return_type) + if fn_type is None: + return ctx.default_return_type + + # We must normalize from the start to have coherent view together with TypeChecker. + fn_type = fn_type.with_unpacked_kwargs().with_normalized_var_args() + + last_context = ctx.api.type_context[-1] + if not fn_type.is_type_obj(): + # We wrap the return type to get use of a possible type context provided by caller. + # We cannot do this in case of class objects, since otherwise the plugin may get + # falsely triggered when evaluating the constructed call itself. + ret_type: Type = ctx.api.named_generic_type(PARTIAL, [fn_type.ret_type]) + wrapped_return = True + else: + ret_type = fn_type.ret_type + # Instead, for class objects we ignore any type context to avoid spurious errors, + # since the type context will be partial[X] etc., not X. + ctx.api.type_context[-1] = None + wrapped_return = False + + # Flatten actual to formal mapping, since this is what check_call() expects. + actual_args = [] + actual_arg_kinds = [] + actual_arg_names = [] + actual_types = [] + seen_args = set() + for i, param in enumerate(ctx.args[1:], start=1): + for j, a in enumerate(param): + if a in seen_args: + # Same actual arg can map to multiple formals, but we need to include + # each one only once. + continue + # Here we rely on the fact that expressions are essentially immutable, so + # they can be compared by identity. + seen_args.add(a) + actual_args.append(a) + actual_arg_kinds.append(ctx.arg_kinds[i][j]) + actual_arg_names.append(ctx.arg_names[i][j]) + actual_types.append(ctx.arg_types[i][j]) + + formal_to_actual = map_actuals_to_formals( + actual_kinds=actual_arg_kinds, + actual_names=actual_arg_names, + formal_kinds=fn_type.arg_kinds, + formal_names=fn_type.arg_names, + actual_arg_type=lambda i: actual_types[i], + ) + + # We need to remove any type variables that appear only in formals that have + # no actuals, to avoid eagerly binding them in check_call() below. + can_infer_ids = set() + for i, arg_type in enumerate(fn_type.arg_types): + if not formal_to_actual[i]: + continue + can_infer_ids.update({tv.id for tv in get_all_type_vars(arg_type)}) + + # special_sig="partial" allows omission of args/kwargs typed with ParamSpec + defaulted = fn_type.copy_modified( + arg_kinds=[ + ( + ArgKind.ARG_OPT + if k == ArgKind.ARG_POS + else (ArgKind.ARG_NAMED_OPT if k == ArgKind.ARG_NAMED else k) + ) + for k in fn_type.arg_kinds + ], + ret_type=ret_type, + variables=[ + tv + for tv in fn_type.variables + # Keep TypeVarTuple/ParamSpec to avoid spurious errors on empty args. + if tv.id in can_infer_ids or not isinstance(tv, TypeVarType) + ], + special_sig="partial", + ) + if defaulted.line < 0: + # Make up a line number if we don't have one + defaulted.set_line(ctx.default_return_type) + + # Create a valid context for various ad-hoc inspections in check_call(). + call_expr = CallExpr( + callee=ctx.args[0][0], + args=actual_args, + arg_kinds=actual_arg_kinds, + arg_names=actual_arg_names, + analyzed=ctx.context.analyzed if isinstance(ctx.context, CallExpr) else None, + ) + call_expr.set_line(ctx.context) + + _, bound = ctx.api.expr_checker.check_call( + callee=defaulted, + args=actual_args, + arg_kinds=actual_arg_kinds, + arg_names=actual_arg_names, + context=call_expr, + ) + if not wrapped_return: + # Restore previously ignored context. + ctx.api.type_context[-1] = last_context + + bound = get_proper_type(bound) + if not isinstance(bound, CallableType): + return ctx.default_return_type + + if wrapped_return: + # Reverse the wrapping we did above. + ret_type = get_proper_type(bound.ret_type) + if not isinstance(ret_type, Instance) or ret_type.type.fullname != PARTIAL: + return ctx.default_return_type + bound = bound.copy_modified(ret_type=ret_type.args[0]) + + partial_kinds = [] + partial_types = [] + partial_names = [] + # We need to fully apply any positional arguments (they cannot be respecified) + # However, keyword arguments can be respecified, so just give them a default + for i, actuals in enumerate(formal_to_actual): + if len(bound.arg_types) == len(fn_type.arg_types): + arg_type = bound.arg_types[i] + if not mypy.checker.is_valid_inferred_type(arg_type, ctx.api.options): + arg_type = fn_type.arg_types[i] # bit of a hack + else: + # TODO: I assume that bound and fn_type have the same arguments. It appears this isn't + # true when PEP 646 things are happening. See testFunctoolsPartialTypeVarTuple + arg_type = fn_type.arg_types[i] + + if not actuals or fn_type.arg_kinds[i] in (ArgKind.ARG_STAR, ArgKind.ARG_STAR2): + partial_kinds.append(fn_type.arg_kinds[i]) + partial_types.append(arg_type) + partial_names.append(fn_type.arg_names[i]) + else: + assert actuals + if any(actual_arg_kinds[j] in (ArgKind.ARG_POS, ArgKind.ARG_STAR) for j in actuals): + # Don't add params for arguments passed positionally + continue + # Add defaulted params for arguments passed via keyword + kind = actual_arg_kinds[actuals[0]] + if kind == ArgKind.ARG_NAMED or kind == ArgKind.ARG_STAR2: + kind = ArgKind.ARG_NAMED_OPT + partial_kinds.append(kind) + partial_types.append(arg_type) + partial_names.append(fn_type.arg_names[i]) + + ret_type = bound.ret_type + if not mypy.checker.is_valid_inferred_type(ret_type, ctx.api.options): + ret_type = fn_type.ret_type # same kind of hack as above + + partially_applied = fn_type.copy_modified( + arg_types=partial_types, + arg_kinds=partial_kinds, + arg_names=partial_names, + ret_type=ret_type, + special_sig="partial", + ) + + # Do not leak typevars from generic functions - they cannot be usable. + # Keep them in the wrapped callable, but avoid `partial[SomeStrayTypeVar]` + erased_ret_type = erase_typevars(ret_type, [tv.id for tv in fn_type.variables]) + + ret = ctx.api.named_generic_type(PARTIAL, [erased_ret_type]) + ret = ret.copy_with_extra_attr("__mypy_partial", partially_applied) + if partially_applied.param_spec(): + assert ret.extra_attrs is not None # copy_with_extra_attr above ensures this + attrs = ret.extra_attrs.copy() + if ArgKind.ARG_STAR in actual_arg_kinds: + attrs.immutable.add("__mypy_partial_paramspec_args_bound") + if ArgKind.ARG_STAR2 in actual_arg_kinds: + attrs.immutable.add("__mypy_partial_paramspec_kwargs_bound") + ret.extra_attrs = attrs + return ret + + +def partial_call_callback(ctx: mypy.plugin.MethodContext) -> Type: + """Infer a more precise return type for functools.partial.__call__.""" + if ( + not isinstance(ctx.api, mypy.checker.TypeChecker) # use internals + or not isinstance(ctx.type, Instance) + or ctx.type.type.fullname != PARTIAL + or not ctx.type.extra_attrs + or "__mypy_partial" not in ctx.type.extra_attrs.attrs + ): + return ctx.default_return_type + + extra_attrs = ctx.type.extra_attrs + partial_type = get_proper_type(extra_attrs.attrs["__mypy_partial"]) + if len(ctx.arg_types) != 2: # *args, **kwargs + return ctx.default_return_type + + # See comments for similar actual to formal code above + actual_args = [] + actual_arg_kinds = [] + actual_arg_names = [] + seen_args = set() + for i, param in enumerate(ctx.args): + for j, a in enumerate(param): + if a in seen_args: + continue + seen_args.add(a) + actual_args.append(a) + actual_arg_kinds.append(ctx.arg_kinds[i][j]) + actual_arg_names.append(ctx.arg_names[i][j]) + + result, _ = ctx.api.expr_checker.check_call( + callee=partial_type, + args=actual_args, + arg_kinds=actual_arg_kinds, + arg_names=actual_arg_names, + context=ctx.context, + ) + if not isinstance(partial_type, CallableType) or partial_type.param_spec() is None: + return result + + args_bound = "__mypy_partial_paramspec_args_bound" in extra_attrs.immutable + kwargs_bound = "__mypy_partial_paramspec_kwargs_bound" in extra_attrs.immutable + + passed_paramspec_parts = [ + arg.node.type + for arg in actual_args + if isinstance(arg, NameExpr) + and isinstance(arg.node, Var) + and isinstance(arg.node.type, ParamSpecType) + ] + # ensure *args: P.args + args_passed = any(part.flavor == ParamSpecFlavor.ARGS for part in passed_paramspec_parts) + if not args_bound and not args_passed: + ctx.api.expr_checker.msg.too_few_arguments(partial_type, ctx.context, actual_arg_names) + elif args_bound and args_passed: + ctx.api.expr_checker.msg.too_many_arguments(partial_type, ctx.context) + + # ensure **kwargs: P.kwargs + kwargs_passed = any(part.flavor == ParamSpecFlavor.KWARGS for part in passed_paramspec_parts) + if not kwargs_bound and not kwargs_passed: + ctx.api.expr_checker.msg.too_few_arguments(partial_type, ctx.context, actual_arg_names) + + return result diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/proper_plugin.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/plugins/proper_plugin.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..34eab45 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/plugins/proper_plugin.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/proper_plugin.py b/.venv/lib/python3.12/site-packages/mypy/plugins/proper_plugin.py new file mode 100644 index 0000000..872903e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/plugins/proper_plugin.py @@ -0,0 +1,178 @@ +""" +This plugin is helpful for mypy development itself. +By default, it is not enabled for mypy users. + +It also can be used by plugin developers as a part of their CI checks. + +It finds missing ``get_proper_type()`` call, which can lead to multiple errors. +""" + +from __future__ import annotations + +from typing import Callable + +from mypy.checker import TypeChecker +from mypy.nodes import TypeInfo +from mypy.plugin import FunctionContext, Plugin +from mypy.subtypes import is_proper_subtype +from mypy.types import ( + AnyType, + FunctionLike, + Instance, + NoneTyp, + ProperType, + TupleType, + Type, + UnionType, + get_proper_type, + get_proper_types, +) + + +class ProperTypePlugin(Plugin): + """ + A plugin to ensure that every type is expanded before doing any special-casing. + + This solves the problem that we have hundreds of call sites like: + + if isinstance(typ, UnionType): + ... # special-case union + + But after introducing a new type TypeAliasType (and removing immediate expansion) + all these became dangerous because typ may be e.g. an alias to union. + """ + + def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None: + if fullname == "builtins.isinstance": + return isinstance_proper_hook + if fullname == "mypy.types.get_proper_type": + return proper_type_hook + if fullname == "mypy.types.get_proper_types": + return proper_types_hook + return None + + +def isinstance_proper_hook(ctx: FunctionContext) -> Type: + if len(ctx.arg_types) != 2 or not ctx.arg_types[1]: + return ctx.default_return_type + + right = get_proper_type(ctx.arg_types[1][0]) + for arg in ctx.arg_types[0]: + if ( + is_improper_type(arg) or isinstance(get_proper_type(arg), AnyType) + ) and is_dangerous_target(right): + if is_special_target(right): + return ctx.default_return_type + ctx.api.fail( + "Never apply isinstance() to unexpanded types;" + " use mypy.types.get_proper_type() first", + ctx.context, + ) + ctx.api.note( # type: ignore[attr-defined] + "If you pass on the original type" + " after the check, always use its unexpanded version", + ctx.context, + ) + return ctx.default_return_type + + +def is_special_target(right: ProperType) -> bool: + """Whitelist some special cases for use in isinstance() with improper types.""" + if isinstance(right, FunctionLike) and right.is_type_obj(): + if right.type_object().fullname == "builtins.tuple": + # Used with Union[Type, Tuple[Type, ...]]. + return True + if right.type_object().fullname in ( + "mypy.types.Type", + "mypy.types.ProperType", + "mypy.types.TypeAliasType", + ): + # Special case: things like assert isinstance(typ, ProperType) are always OK. + return True + if right.type_object().fullname in ( + "mypy.types.UnboundType", + "mypy.types.TypeVarLikeType", + "mypy.types.TypeVarType", + "mypy.types.UnpackType", + "mypy.types.TypeVarTupleType", + "mypy.types.ParamSpecType", + "mypy.types.Parameters", + "mypy.types.RawExpressionType", + "mypy.types.EllipsisType", + "mypy.types.StarType", + "mypy.types.TypeList", + "mypy.types.CallableArgument", + "mypy.types.PartialType", + "mypy.types.ErasedType", + "mypy.types.DeletedType", + "mypy.types.RequiredType", + "mypy.types.ReadOnlyType", + "mypy.types.TypeGuardedType", + "mypy.types.PlaceholderType", + ): + # Special case: these are not valid targets for a type alias and thus safe. + # TODO: introduce a SyntheticType base to simplify this? + return True + elif isinstance(right, TupleType): + return all(is_special_target(t) for t in get_proper_types(right.items)) + return False + + +def is_improper_type(typ: Type) -> bool: + """Is this a type that is not a subtype of ProperType?""" + typ = get_proper_type(typ) + if isinstance(typ, Instance): + info = typ.type + return info.has_base("mypy.types.Type") and not info.has_base("mypy.types.ProperType") + if isinstance(typ, UnionType): + return any(is_improper_type(t) for t in typ.items) + return False + + +def is_dangerous_target(typ: ProperType) -> bool: + """Is this a dangerous target (right argument) for an isinstance() check?""" + if isinstance(typ, TupleType): + return any(is_dangerous_target(get_proper_type(t)) for t in typ.items) + if isinstance(typ, FunctionLike) and typ.is_type_obj(): + return typ.type_object().has_base("mypy.types.Type") + return False + + +def proper_type_hook(ctx: FunctionContext) -> Type: + """Check if this get_proper_type() call is not redundant.""" + arg_types = ctx.arg_types[0] + if arg_types: + arg_type = get_proper_type(arg_types[0]) + proper_type = get_proper_type_instance(ctx) + if is_proper_subtype(arg_type, UnionType.make_union([NoneTyp(), proper_type])): + # Minimize amount of spurious errors from overload machinery. + # TODO: call the hook on the overload as a whole? + if isinstance(arg_type, (UnionType, Instance)): + ctx.api.fail("Redundant call to get_proper_type()", ctx.context) + return ctx.default_return_type + + +def proper_types_hook(ctx: FunctionContext) -> Type: + """Check if this get_proper_types() call is not redundant.""" + arg_types = ctx.arg_types[0] + if arg_types: + arg_type = arg_types[0] + proper_type = get_proper_type_instance(ctx) + item_type = UnionType.make_union([NoneTyp(), proper_type]) + ok_type = ctx.api.named_generic_type("typing.Iterable", [item_type]) + if is_proper_subtype(arg_type, ok_type): + ctx.api.fail("Redundant call to get_proper_types()", ctx.context) + return ctx.default_return_type + + +def get_proper_type_instance(ctx: FunctionContext) -> Instance: + checker = ctx.api + assert isinstance(checker, TypeChecker) + types = checker.modules["mypy.types"] + proper_type_info = types.names["ProperType"] + assert isinstance(proper_type_info.node, TypeInfo) + return Instance(proper_type_info.node, []) + + +def plugin(version: str) -> type[ProperTypePlugin]: + return ProperTypePlugin diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/singledispatch.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/plugins/singledispatch.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..f125f0b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/plugins/singledispatch.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/plugins/singledispatch.py b/.venv/lib/python3.12/site-packages/mypy/plugins/singledispatch.py new file mode 100644 index 0000000..eb2bbe1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/plugins/singledispatch.py @@ -0,0 +1,214 @@ +from __future__ import annotations + +from collections.abc import Sequence +from typing import NamedTuple, TypeVar, Union +from typing_extensions import TypeAlias as _TypeAlias + +from mypy.messages import format_type +from mypy.nodes import ARG_POS, Argument, Block, ClassDef, Context, SymbolTable, TypeInfo, Var +from mypy.options import Options +from mypy.plugin import CheckerPluginInterface, FunctionContext, MethodContext, MethodSigContext +from mypy.plugins.common import add_method_to_class +from mypy.plugins.constants import SINGLEDISPATCH_REGISTER_RETURN_CLASS +from mypy.subtypes import is_subtype +from mypy.types import ( + AnyType, + CallableType, + FunctionLike, + Instance, + NoneType, + Overloaded, + Type, + TypeOfAny, + get_proper_type, +) + + +class SingledispatchTypeVars(NamedTuple): + return_type: Type + fallback: CallableType + + +class RegisterCallableInfo(NamedTuple): + register_type: Type + singledispatch_obj: Instance + + +def get_singledispatch_info(typ: Instance) -> SingledispatchTypeVars | None: + if len(typ.args) == 2: + return SingledispatchTypeVars(*typ.args) # type: ignore[arg-type] + return None + + +T = TypeVar("T") + + +def get_first_arg(args: list[list[T]]) -> T | None: + """Get the element that corresponds to the first argument passed to the function""" + if args and args[0]: + return args[0][0] + return None + + +def make_fake_register_class_instance( + api: CheckerPluginInterface, type_args: Sequence[Type] +) -> Instance: + defn = ClassDef(SINGLEDISPATCH_REGISTER_RETURN_CLASS, Block([])) + defn.fullname = f"functools.{SINGLEDISPATCH_REGISTER_RETURN_CLASS}" + info = TypeInfo(SymbolTable(), defn, "functools") + obj_type = api.named_generic_type("builtins.object", []).type + info.bases = [Instance(obj_type, [])] + info.mro = [info, obj_type] + defn.info = info + + func_arg = Argument(Var("name"), AnyType(TypeOfAny.implementation_artifact), None, ARG_POS) + add_method_to_class(api, defn, "__call__", [func_arg], NoneType()) + + return Instance(info, type_args) + + +PluginContext: _TypeAlias = Union[FunctionContext, MethodContext] + + +def fail(ctx: PluginContext, msg: str, context: Context | None) -> None: + """Emit an error message. + + This tries to emit an error message at the location specified by `context`, falling back to the + location specified by `ctx.context`. This is helpful when the only context information about + where you want to put the error message may be None (like it is for `CallableType.definition`) + and falling back to the location of the calling function is fine.""" + # TODO: figure out if there is some more reliable way of getting context information, so this + # function isn't necessary + if context is not None: + err_context = context + else: + err_context = ctx.context + ctx.api.fail(msg, err_context) + + +def create_singledispatch_function_callback(ctx: FunctionContext) -> Type: + """Called for functools.singledispatch""" + func_type = get_proper_type(get_first_arg(ctx.arg_types)) + if isinstance(func_type, CallableType): + if len(func_type.arg_kinds) < 1: + fail( + ctx, "Singledispatch function requires at least one argument", func_type.definition + ) + return ctx.default_return_type + + elif not func_type.arg_kinds[0].is_positional(star=True): + fail( + ctx, + "First argument to singledispatch function must be a positional argument", + func_type.definition, + ) + return ctx.default_return_type + + # singledispatch returns an instance of functools._SingleDispatchCallable according to + # typeshed + singledispatch_obj = get_proper_type(ctx.default_return_type) + assert isinstance(singledispatch_obj, Instance) + singledispatch_obj.args += (func_type,) + + return ctx.default_return_type + + +def singledispatch_register_callback(ctx: MethodContext) -> Type: + """Called for functools._SingleDispatchCallable.register""" + assert isinstance(ctx.type, Instance) + # TODO: check that there's only one argument + first_arg_type = get_proper_type(get_first_arg(ctx.arg_types)) + if isinstance(first_arg_type, (CallableType, Overloaded)) and first_arg_type.is_type_obj(): + # HACK: We received a class as an argument to register. We need to be able + # to access the function that register is being applied to, and the typeshed definition + # of register has it return a generic Callable, so we create a new + # SingleDispatchRegisterCallable class, define a __call__ method, and then add a + # plugin hook for that. + + # is_subtype doesn't work when the right type is Overloaded, so we need the + # actual type + register_type = first_arg_type.items[0].ret_type + type_args = RegisterCallableInfo(register_type, ctx.type) + register_callable = make_fake_register_class_instance(ctx.api, type_args) + return register_callable + elif isinstance(first_arg_type, CallableType): + # TODO: do more checking for registered functions + register_function(ctx, ctx.type, first_arg_type, ctx.api.options) + # The typeshed stubs for register say that the function returned is Callable[..., T], even + # though the function returned is the same as the one passed in. We return the type of the + # function so that mypy can properly type check cases where the registered function is used + # directly (instead of through singledispatch) + return first_arg_type + + # fallback in case we don't recognize the arguments + return ctx.default_return_type + + +def register_function( + ctx: PluginContext, + singledispatch_obj: Instance, + func: Type, + options: Options, + register_arg: Type | None = None, +) -> None: + """Register a function""" + + func = get_proper_type(func) + if not isinstance(func, CallableType): + return + metadata = get_singledispatch_info(singledispatch_obj) + if metadata is None: + # if we never added the fallback to the type variables, we already reported an error, so + # just don't do anything here + return + dispatch_type = get_dispatch_type(func, register_arg) + if dispatch_type is None: + # TODO: report an error here that singledispatch requires at least one argument + # (might want to do the error reporting in get_dispatch_type) + return + fallback = metadata.fallback + + fallback_dispatch_type = fallback.arg_types[0] + if not is_subtype(dispatch_type, fallback_dispatch_type): + fail( + ctx, + "Dispatch type {} must be subtype of fallback function first argument {}".format( + format_type(dispatch_type, options), format_type(fallback_dispatch_type, options) + ), + func.definition, + ) + return + return + + +def get_dispatch_type(func: CallableType, register_arg: Type | None) -> Type | None: + if register_arg is not None: + return register_arg + if func.arg_types: + return func.arg_types[0] + return None + + +def call_singledispatch_function_after_register_argument(ctx: MethodContext) -> Type: + """Called on the function after passing a type to register""" + register_callable = ctx.type + if isinstance(register_callable, Instance): + type_args = RegisterCallableInfo(*register_callable.args) # type: ignore[arg-type] + func = get_first_arg(ctx.arg_types) + if func is not None: + register_function( + ctx, type_args.singledispatch_obj, func, ctx.api.options, type_args.register_type + ) + # see call to register_function in the callback for register + return func + return ctx.default_return_type + + +def call_singledispatch_function_callback(ctx: MethodSigContext) -> FunctionLike: + """Called for functools._SingleDispatchCallable.__call__""" + if not isinstance(ctx.type, Instance): + return ctx.default_signature + metadata = get_singledispatch_info(ctx.type) + if metadata is None: + return ctx.default_signature + return metadata.fallback diff --git a/.venv/lib/python3.12/site-packages/mypy/py.typed b/.venv/lib/python3.12/site-packages/mypy/py.typed new file mode 100644 index 0000000..9724ed5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. The mypy package uses inline types. diff --git a/.venv/lib/python3.12/site-packages/mypy/pyinfo.py b/.venv/lib/python3.12/site-packages/mypy/pyinfo.py new file mode 100644 index 0000000..98350f4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/pyinfo.py @@ -0,0 +1,79 @@ +from __future__ import annotations + +"""Utilities to find the site and prefix information of a Python executable. + +This file MUST remain compatible with all Python 3.9+ versions. Since we cannot make any +assumptions about the Python being executed, this module should not use *any* dependencies outside +of the standard library found in Python 3.9. This file is run each mypy run, so it should be kept +as fast as possible. +""" +import sys + +if __name__ == "__main__": + # HACK: We don't want to pick up mypy.types as the top-level types + # module. This could happen if this file is run as a script. + # This workaround fixes this for Python versions before 3.11. + if sys.version_info < (3, 11): + old_sys_path = sys.path + sys.path = sys.path[1:] + import types # noqa: F401 + + sys.path = old_sys_path + +import os +import site +import sysconfig + + +def getsitepackages() -> list[str]: + res = [] + if hasattr(site, "getsitepackages"): + res.extend(site.getsitepackages()) + + if hasattr(site, "getusersitepackages") and site.ENABLE_USER_SITE: + res.insert(0, site.getusersitepackages()) + else: + res = [sysconfig.get_paths()["purelib"]] + return res + + +def getsyspath() -> list[str]: + # Do not include things from the standard library + # because those should come from typeshed. + stdlib_zip = os.path.join( + sys.base_exec_prefix, + getattr(sys, "platlibdir", "lib"), + f"python{sys.version_info.major}{sys.version_info.minor}.zip", + ) + stdlib = sysconfig.get_path("stdlib") + stdlib_ext = os.path.join(stdlib, "lib-dynload") + excludes = {stdlib_zip, stdlib, stdlib_ext} + + # Drop the first entry of sys.path + # - If pyinfo.py is executed as a script (in a subprocess), this is the directory + # containing pyinfo.py + # - Otherwise, if mypy launched via console script, this is the directory of the script + # - Otherwise, if mypy launched via python -m mypy, this is the current directory + # In all these cases, it is desirable to drop the first entry + # Note that mypy adds the cwd to SearchPaths.python_path, so we still find things on the + # cwd consistently (the return value here sets SearchPaths.package_path) + + # Python 3.11 adds a "safe_path" flag wherein Python won't automatically prepend + # anything to sys.path. In this case, the first entry of sys.path is no longer special. + offset = 0 if sys.version_info >= (3, 11) and sys.flags.safe_path else 1 + + abs_sys_path = (os.path.abspath(p) for p in sys.path[offset:]) + return [p for p in abs_sys_path if p not in excludes] + + +def getsearchdirs() -> tuple[list[str], list[str]]: + return (getsyspath(), getsitepackages()) + + +if __name__ == "__main__": + sys.stdout.reconfigure(encoding="utf-8") # type: ignore[union-attr] + if sys.argv[-1] == "getsearchdirs": + print(repr(getsearchdirs())) + else: + print("ERROR: incorrect argument to pyinfo.py.", file=sys.stderr) + sys.exit(1) diff --git a/.venv/lib/python3.12/site-packages/mypy/reachability.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/reachability.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..2dde6a9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/reachability.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/reachability.py b/.venv/lib/python3.12/site-packages/mypy/reachability.py new file mode 100644 index 0000000..132c269 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/reachability.py @@ -0,0 +1,373 @@ +"""Utilities related to determining the reachability of code (in semantic analysis).""" + +from __future__ import annotations + +from typing import Final, TypeVar + +from mypy.literals import literal +from mypy.nodes import ( + LITERAL_YES, + AssertStmt, + Block, + CallExpr, + ComparisonExpr, + Expression, + FuncDef, + IfStmt, + Import, + ImportAll, + ImportFrom, + IndexExpr, + IntExpr, + MatchStmt, + MemberExpr, + NameExpr, + OpExpr, + SliceExpr, + StrExpr, + TupleExpr, + UnaryExpr, +) +from mypy.options import Options +from mypy.patterns import AsPattern, OrPattern, Pattern +from mypy.traverser import TraverserVisitor + +# Inferred truth value of an expression. +ALWAYS_TRUE: Final = 1 +MYPY_TRUE: Final = 2 # True in mypy, False at runtime +ALWAYS_FALSE: Final = 3 +MYPY_FALSE: Final = 4 # False in mypy, True at runtime +TRUTH_VALUE_UNKNOWN: Final = 5 + +inverted_truth_mapping: Final = { + ALWAYS_TRUE: ALWAYS_FALSE, + ALWAYS_FALSE: ALWAYS_TRUE, + TRUTH_VALUE_UNKNOWN: TRUTH_VALUE_UNKNOWN, + MYPY_TRUE: MYPY_FALSE, + MYPY_FALSE: MYPY_TRUE, +} + +reverse_op: Final = {"==": "==", "!=": "!=", "<": ">", ">": "<", "<=": ">=", ">=": "<="} + + +def infer_reachability_of_if_statement(s: IfStmt, options: Options) -> None: + for i in range(len(s.expr)): + result = infer_condition_value(s.expr[i], options) + if result in (ALWAYS_FALSE, MYPY_FALSE): + # The condition is considered always false, so we skip the if/elif body. + mark_block_unreachable(s.body[i]) + elif result in (ALWAYS_TRUE, MYPY_TRUE): + # This condition is considered always true, so all of the remaining + # elif/else bodies should not be checked. + if result == MYPY_TRUE: + # This condition is false at runtime; this will affect + # import priorities. + mark_block_mypy_only(s.body[i]) + for body in s.body[i + 1 :]: + mark_block_unreachable(body) + + # Make sure else body always exists and is marked as + # unreachable so the type checker always knows that + # all control flow paths will flow through the if + # statement body. + if not s.else_body: + s.else_body = Block([]) + mark_block_unreachable(s.else_body) + break + + +def infer_reachability_of_match_statement(s: MatchStmt, options: Options) -> None: + for i, guard in enumerate(s.guards): + pattern_value = infer_pattern_value(s.patterns[i]) + + if guard is not None: + guard_value = infer_condition_value(guard, options) + else: + guard_value = ALWAYS_TRUE + + if pattern_value in (ALWAYS_FALSE, MYPY_FALSE) or guard_value in ( + ALWAYS_FALSE, + MYPY_FALSE, + ): + # The case is considered always false, so we skip the case body. + mark_block_unreachable(s.bodies[i]) + elif pattern_value in (ALWAYS_FALSE, MYPY_TRUE) and guard_value in ( + ALWAYS_TRUE, + MYPY_TRUE, + ): + for body in s.bodies[i + 1 :]: + mark_block_unreachable(body) + + if guard_value == MYPY_TRUE: + # This condition is false at runtime; this will affect + # import priorities. + mark_block_mypy_only(s.bodies[i]) + + +def assert_will_always_fail(s: AssertStmt, options: Options) -> bool: + return infer_condition_value(s.expr, options) in (ALWAYS_FALSE, MYPY_FALSE) + + +def infer_condition_value(expr: Expression, options: Options) -> int: + """Infer whether the given condition is always true/false. + + Return ALWAYS_TRUE if always true, ALWAYS_FALSE if always false, + MYPY_TRUE if true under mypy and false at runtime, MYPY_FALSE if + false under mypy and true at runtime, else TRUTH_VALUE_UNKNOWN. + """ + if isinstance(expr, UnaryExpr) and expr.op == "not": + positive = infer_condition_value(expr.expr, options) + return inverted_truth_mapping[positive] + + pyversion = options.python_version + name = "" + + result = TRUTH_VALUE_UNKNOWN + if isinstance(expr, NameExpr): + name = expr.name + elif isinstance(expr, MemberExpr): + name = expr.name + elif isinstance(expr, OpExpr): + if expr.op not in ("or", "and"): + return TRUTH_VALUE_UNKNOWN + + left = infer_condition_value(expr.left, options) + right = infer_condition_value(expr.right, options) + results = {left, right} + if expr.op == "or": + if ALWAYS_TRUE in results: + return ALWAYS_TRUE + elif MYPY_TRUE in results: + return MYPY_TRUE + elif left == right == MYPY_FALSE: + return MYPY_FALSE + elif results <= {ALWAYS_FALSE, MYPY_FALSE}: + return ALWAYS_FALSE + elif expr.op == "and": + if ALWAYS_FALSE in results: + return ALWAYS_FALSE + elif MYPY_FALSE in results: + return MYPY_FALSE + elif left == right == ALWAYS_TRUE: + return ALWAYS_TRUE + elif results <= {ALWAYS_TRUE, MYPY_TRUE}: + return MYPY_TRUE + return TRUTH_VALUE_UNKNOWN + else: + result = consider_sys_version_info(expr, pyversion) + if result == TRUTH_VALUE_UNKNOWN: + result = consider_sys_platform(expr, options.platform) + if result == TRUTH_VALUE_UNKNOWN: + if name == "PY2": + result = ALWAYS_FALSE + elif name == "PY3": + result = ALWAYS_TRUE + elif name == "MYPY" or name == "TYPE_CHECKING": + result = MYPY_TRUE + elif name in options.always_true: + result = ALWAYS_TRUE + elif name in options.always_false: + result = ALWAYS_FALSE + return result + + +def infer_pattern_value(pattern: Pattern) -> int: + if isinstance(pattern, AsPattern) and pattern.pattern is None: + return ALWAYS_TRUE + elif isinstance(pattern, OrPattern) and any( + infer_pattern_value(p) == ALWAYS_TRUE for p in pattern.patterns + ): + return ALWAYS_TRUE + else: + return TRUTH_VALUE_UNKNOWN + + +def consider_sys_version_info(expr: Expression, pyversion: tuple[int, ...]) -> int: + """Consider whether expr is a comparison involving sys.version_info. + + Return ALWAYS_TRUE, ALWAYS_FALSE, or TRUTH_VALUE_UNKNOWN. + """ + # Cases supported: + # - sys.version_info[] + # - sys.version_info[:] + # - sys.version_info + # (in this case must be >, >=, <, <=, but cannot be ==, !=) + if not isinstance(expr, ComparisonExpr): + return TRUTH_VALUE_UNKNOWN + # Let's not yet support chained comparisons. + if len(expr.operators) > 1: + return TRUTH_VALUE_UNKNOWN + op = expr.operators[0] + if op not in ("==", "!=", "<=", ">=", "<", ">"): + return TRUTH_VALUE_UNKNOWN + + index = contains_sys_version_info(expr.operands[0]) + thing = contains_int_or_tuple_of_ints(expr.operands[1]) + if index is None or thing is None: + index = contains_sys_version_info(expr.operands[1]) + thing = contains_int_or_tuple_of_ints(expr.operands[0]) + op = reverse_op[op] + if isinstance(index, int) and isinstance(thing, int): + # sys.version_info[i] k + if 0 <= index <= 1: + return fixed_comparison(pyversion[index], op, thing) + else: + return TRUTH_VALUE_UNKNOWN + elif isinstance(index, tuple) and isinstance(thing, tuple): + lo, hi = index + if lo is None: + lo = 0 + if hi is None: + hi = 2 + if 0 <= lo < hi <= 2: + val = pyversion[lo:hi] + if len(val) == len(thing) or len(val) > len(thing) and op not in ("==", "!="): + return fixed_comparison(val, op, thing) + return TRUTH_VALUE_UNKNOWN + + +def consider_sys_platform(expr: Expression, platform: str) -> int: + """Consider whether expr is a comparison involving sys.platform. + + Return ALWAYS_TRUE, ALWAYS_FALSE, or TRUTH_VALUE_UNKNOWN. + """ + # Cases supported: + # - sys.platform == 'linux' + # - sys.platform != 'win32' + # - sys.platform.startswith('win') + if isinstance(expr, ComparisonExpr): + # Let's not yet support chained comparisons. + if len(expr.operators) > 1: + return TRUTH_VALUE_UNKNOWN + op = expr.operators[0] + if op not in ("==", "!="): + return TRUTH_VALUE_UNKNOWN + if not is_sys_attr(expr.operands[0], "platform"): + return TRUTH_VALUE_UNKNOWN + right = expr.operands[1] + if not isinstance(right, StrExpr): + return TRUTH_VALUE_UNKNOWN + return fixed_comparison(platform, op, right.value) + elif isinstance(expr, CallExpr): + if not isinstance(expr.callee, MemberExpr): + return TRUTH_VALUE_UNKNOWN + if len(expr.args) != 1 or not isinstance(expr.args[0], StrExpr): + return TRUTH_VALUE_UNKNOWN + if not is_sys_attr(expr.callee.expr, "platform"): + return TRUTH_VALUE_UNKNOWN + if expr.callee.name != "startswith": + return TRUTH_VALUE_UNKNOWN + if platform.startswith(expr.args[0].value): + return ALWAYS_TRUE + else: + return ALWAYS_FALSE + else: + return TRUTH_VALUE_UNKNOWN + + +Targ = TypeVar("Targ", int, str, tuple[int, ...]) + + +def fixed_comparison(left: Targ, op: str, right: Targ) -> int: + rmap = {False: ALWAYS_FALSE, True: ALWAYS_TRUE} + if op == "==": + return rmap[left == right] + if op == "!=": + return rmap[left != right] + if op == "<=": + return rmap[left <= right] + if op == ">=": + return rmap[left >= right] + if op == "<": + return rmap[left < right] + if op == ">": + return rmap[left > right] + return TRUTH_VALUE_UNKNOWN + + +def contains_int_or_tuple_of_ints(expr: Expression) -> None | int | tuple[int, ...]: + if isinstance(expr, IntExpr): + return expr.value + if isinstance(expr, TupleExpr): + if literal(expr) == LITERAL_YES: + thing = [] + for x in expr.items: + if not isinstance(x, IntExpr): + return None + thing.append(x.value) + return tuple(thing) + return None + + +def contains_sys_version_info(expr: Expression) -> None | int | tuple[int | None, int | None]: + if is_sys_attr(expr, "version_info"): + return (None, None) # Same as sys.version_info[:] + if isinstance(expr, IndexExpr) and is_sys_attr(expr.base, "version_info"): + index = expr.index + if isinstance(index, IntExpr): + return index.value + if isinstance(index, SliceExpr): + if index.stride is not None: + if not isinstance(index.stride, IntExpr) or index.stride.value != 1: + return None + begin = end = None + if index.begin_index is not None: + if not isinstance(index.begin_index, IntExpr): + return None + begin = index.begin_index.value + if index.end_index is not None: + if not isinstance(index.end_index, IntExpr): + return None + end = index.end_index.value + return (begin, end) + return None + + +def is_sys_attr(expr: Expression, name: str) -> bool: + # TODO: This currently doesn't work with code like this: + # - import sys as _sys + # - from sys import version_info + if isinstance(expr, MemberExpr) and expr.name == name: + if isinstance(expr.expr, NameExpr) and expr.expr.name == "sys": + # TODO: Guard against a local named sys, etc. + # (Though later passes will still do most checking.) + return True + return False + + +def mark_block_unreachable(block: Block) -> None: + block.is_unreachable = True + block.accept(MarkImportsUnreachableVisitor()) + + +class MarkImportsUnreachableVisitor(TraverserVisitor): + """Visitor that flags all imports nested within a node as unreachable.""" + + def visit_import(self, node: Import) -> None: + node.is_unreachable = True + + def visit_import_from(self, node: ImportFrom) -> None: + node.is_unreachable = True + + def visit_import_all(self, node: ImportAll) -> None: + node.is_unreachable = True + + +def mark_block_mypy_only(block: Block) -> None: + block.accept(MarkImportsMypyOnlyVisitor()) + + +class MarkImportsMypyOnlyVisitor(TraverserVisitor): + """Visitor that sets is_mypy_only (which affects priority).""" + + def visit_import(self, node: Import) -> None: + node.is_mypy_only = True + + def visit_import_from(self, node: ImportFrom) -> None: + node.is_mypy_only = True + + def visit_import_all(self, node: ImportAll) -> None: + node.is_mypy_only = True + + def visit_func_def(self, node: FuncDef) -> None: + node.is_mypy_only = True diff --git a/.venv/lib/python3.12/site-packages/mypy/refinfo.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/refinfo.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..8d5eae2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/refinfo.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/refinfo.py b/.venv/lib/python3.12/site-packages/mypy/refinfo.py new file mode 100644 index 0000000..a5b9283 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/refinfo.py @@ -0,0 +1,92 @@ +"""Find line-level reference information from a mypy AST (undocumented feature)""" + +from __future__ import annotations + +from mypy.nodes import ( + LDEF, + Expression, + FuncDef, + MemberExpr, + MypyFile, + NameExpr, + RefExpr, + SymbolNode, + TypeInfo, +) +from mypy.traverser import TraverserVisitor +from mypy.typeops import tuple_fallback +from mypy.types import ( + FunctionLike, + Instance, + TupleType, + Type, + TypeType, + TypeVarLikeType, + get_proper_type, +) + + +class RefInfoVisitor(TraverserVisitor): + def __init__(self, type_map: dict[Expression, Type]) -> None: + super().__init__() + self.type_map = type_map + self.data: list[dict[str, object]] = [] + + def visit_name_expr(self, expr: NameExpr) -> None: + super().visit_name_expr(expr) + self.record_ref_expr(expr) + + def visit_member_expr(self, expr: MemberExpr) -> None: + super().visit_member_expr(expr) + self.record_ref_expr(expr) + + def visit_func_def(self, func: FuncDef) -> None: + if func.expanded: + for item in func.expanded: + if isinstance(item, FuncDef): + super().visit_func_def(item) + else: + super().visit_func_def(func) + + def record_ref_expr(self, expr: RefExpr) -> None: + fullname = None + if expr.kind != LDEF and "." in expr.fullname: + fullname = expr.fullname + elif isinstance(expr, MemberExpr): + typ = self.type_map.get(expr.expr) + sym = None + if isinstance(expr.expr, RefExpr): + sym = expr.expr.node + if typ: + tfn = type_fullname(typ, sym) + if tfn: + fullname = f"{tfn}.{expr.name}" + if not fullname: + fullname = f"*.{expr.name}" + if fullname is not None: + self.data.append({"line": expr.line, "column": expr.column, "target": fullname}) + + +def type_fullname(typ: Type, node: SymbolNode | None = None) -> str | None: + typ = get_proper_type(typ) + if isinstance(typ, Instance): + return typ.type.fullname + elif isinstance(typ, TypeType): + return type_fullname(typ.item) + elif isinstance(typ, FunctionLike) and typ.is_type_obj(): + if isinstance(node, TypeInfo): + return node.fullname + return type_fullname(typ.fallback) + elif isinstance(typ, TupleType): + return type_fullname(tuple_fallback(typ)) + elif isinstance(typ, TypeVarLikeType): + return type_fullname(typ.upper_bound) + return None + + +def get_undocumented_ref_info_json( + tree: MypyFile, type_map: dict[Expression, Type] +) -> list[dict[str, object]]: + visitor = RefInfoVisitor(type_map) + tree.accept(visitor) + return visitor.data diff --git a/.venv/lib/python3.12/site-packages/mypy/renaming.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/renaming.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..39cb8fc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/renaming.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/renaming.py b/.venv/lib/python3.12/site-packages/mypy/renaming.py new file mode 100644 index 0000000..dff76b1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/renaming.py @@ -0,0 +1,583 @@ +from __future__ import annotations + +from collections.abc import Iterator +from contextlib import contextmanager +from typing import Final + +from mypy.nodes import ( + AssignmentStmt, + Block, + BreakStmt, + ClassDef, + ContinueStmt, + ForStmt, + FuncDef, + Import, + ImportAll, + ImportFrom, + IndexExpr, + ListExpr, + Lvalue, + MatchStmt, + MemberExpr, + MypyFile, + NameExpr, + StarExpr, + TryStmt, + TupleExpr, + WhileStmt, + WithStmt, +) +from mypy.patterns import AsPattern +from mypy.traverser import TraverserVisitor + +# Scope kinds +FILE: Final = 0 +FUNCTION: Final = 1 +CLASS: Final = 2 + + +class VariableRenameVisitor(TraverserVisitor): + """Rename variables to allow redefinition of variables. + + For example, consider this code: + + x = 0 + f(x) + + x = "a" + g(x) + + It will be transformed like this: + + x' = 0 + f(x') + + x = "a" + g(x) + + There will be two independent variables (x' and x) that will have separate + inferred types. The publicly exposed variant will get the non-suffixed name. + This is the last definition at module top level and the first definition + (argument) within a function. + + Renaming only happens for assignments within the same block. Renaming is + performed before semantic analysis, immediately after parsing. + + The implementation performs a rudimentary static analysis. The analysis is + overly conservative to keep things simple. + """ + + def __init__(self) -> None: + # Counter for labeling new blocks + self.block_id = 0 + # Number of surrounding try statements that disallow variable redefinition + self.disallow_redef_depth = 0 + # Number of surrounding loop statements + self.loop_depth = 0 + # Map block id to loop depth. + self.block_loop_depth: dict[int, int] = {} + # Stack of block ids being processed. + self.blocks: list[int] = [] + # List of scopes; each scope maps short (unqualified) name to block id. + self.var_blocks: list[dict[str, int]] = [] + + # References to variables that we may need to rename. List of + # scopes; each scope is a mapping from name to list of collections + # of names that refer to the same logical variable. + self.refs: list[dict[str, list[list[NameExpr]]]] = [] + # Number of reads of the most recent definition of a variable (per scope) + self.num_reads: list[dict[str, int]] = [] + # Kinds of nested scopes (FILE, FUNCTION or CLASS) + self.scope_kinds: list[int] = [] + + def visit_mypy_file(self, file_node: MypyFile) -> None: + """Rename variables within a file. + + This is the main entry point to this class. + """ + self.clear() + with self.enter_scope(FILE), self.enter_block(): + for d in file_node.defs: + d.accept(self) + + def visit_func_def(self, fdef: FuncDef) -> None: + # Conservatively do not allow variable defined before a function to + # be redefined later, since function could refer to either definition. + self.reject_redefinition_of_vars_in_scope() + + with self.enter_scope(FUNCTION), self.enter_block(): + for arg in fdef.arguments: + name = arg.variable.name + # 'self' can't be redefined since it's special as it allows definition of + # attributes. 'cls' can't be used to define attributes so we can ignore it. + can_be_redefined = name != "self" # TODO: Proper check + self.record_assignment(arg.variable.name, can_be_redefined) + self.handle_arg(name) + + for stmt in fdef.body.body: + stmt.accept(self) + + def visit_class_def(self, cdef: ClassDef) -> None: + self.reject_redefinition_of_vars_in_scope() + with self.enter_scope(CLASS): + super().visit_class_def(cdef) + + def visit_block(self, block: Block) -> None: + with self.enter_block(): + super().visit_block(block) + + def visit_while_stmt(self, stmt: WhileStmt) -> None: + with self.enter_loop(): + super().visit_while_stmt(stmt) + + def visit_for_stmt(self, stmt: ForStmt) -> None: + stmt.expr.accept(self) + self.analyze_lvalue(stmt.index, True) + # Also analyze as non-lvalue so that every for loop index variable is assumed to be read. + stmt.index.accept(self) + with self.enter_loop(): + stmt.body.accept(self) + if stmt.else_body: + stmt.else_body.accept(self) + + def visit_break_stmt(self, stmt: BreakStmt) -> None: + self.reject_redefinition_of_vars_in_loop() + + def visit_continue_stmt(self, stmt: ContinueStmt) -> None: + self.reject_redefinition_of_vars_in_loop() + + def visit_try_stmt(self, stmt: TryStmt) -> None: + # Variables defined by a try statement get special treatment in the + # type checker which allows them to be always redefined, so no need to + # do renaming here. + with self.enter_try(): + stmt.body.accept(self) + + for var, tp, handler in zip(stmt.vars, stmt.types, stmt.handlers): + with self.enter_block(): + # Handle except variable together with its body + if tp is not None: + tp.accept(self) + if var is not None: + self.handle_def(var) + for s in handler.body: + s.accept(self) + if stmt.else_body is not None: + stmt.else_body.accept(self) + if stmt.finally_body is not None: + stmt.finally_body.accept(self) + + def visit_with_stmt(self, stmt: WithStmt) -> None: + for expr in stmt.expr: + expr.accept(self) + for target in stmt.target: + if target is not None: + self.analyze_lvalue(target) + # We allow redefinitions in the body of a with statement for + # convenience. This is unsafe since with statements can affect control + # flow by catching exceptions, but this is rare except for + # assertRaises() and other similar functions, where the exception is + # raised by the last statement in the body, which usually isn't a + # problem. + stmt.body.accept(self) + + def visit_import(self, imp: Import) -> None: + for id, as_id in imp.ids: + self.record_assignment(as_id or id, False) + + def visit_import_from(self, imp: ImportFrom) -> None: + for id, as_id in imp.names: + self.record_assignment(as_id or id, False) + + def visit_assignment_stmt(self, s: AssignmentStmt) -> None: + s.rvalue.accept(self) + for lvalue in s.lvalues: + self.analyze_lvalue(lvalue) + + def visit_match_stmt(self, s: MatchStmt) -> None: + s.subject.accept(self) + for i in range(len(s.patterns)): + with self.enter_block(): + s.patterns[i].accept(self) + guard = s.guards[i] + if guard is not None: + guard.accept(self) + # We already entered a block, so visit this block's statements directly + for stmt in s.bodies[i].body: + stmt.accept(self) + + def visit_capture_pattern(self, p: AsPattern) -> None: + if p.name is not None: + self.analyze_lvalue(p.name) + + def analyze_lvalue(self, lvalue: Lvalue, is_nested: bool = False) -> None: + """Process assignment; in particular, keep track of (re)defined names. + + Args: + is_nested: True for non-outermost Lvalue in a multiple assignment such as + "x, y = ..." + """ + if isinstance(lvalue, NameExpr): + name = lvalue.name + is_new = self.record_assignment(name, True) + if is_new: + self.handle_def(lvalue) + else: + self.handle_refine(lvalue) + if is_nested: + # This allows these to be redefined freely even if never read. Multiple + # assignment like "x, _ _ = y" defines dummy variables that are never read. + self.handle_ref(lvalue) + elif isinstance(lvalue, (ListExpr, TupleExpr)): + for item in lvalue.items: + self.analyze_lvalue(item, is_nested=True) + elif isinstance(lvalue, MemberExpr): + lvalue.expr.accept(self) + elif isinstance(lvalue, IndexExpr): + lvalue.base.accept(self) + lvalue.index.accept(self) + elif isinstance(lvalue, StarExpr): + # Propagate is_nested since in a typical use case like "x, *rest = ..." 'rest' may + # be freely reused. + self.analyze_lvalue(lvalue.expr, is_nested=is_nested) + + def visit_name_expr(self, expr: NameExpr) -> None: + self.handle_ref(expr) + + # Helpers for renaming references + + def handle_arg(self, name: str) -> None: + """Store function argument.""" + self.refs[-1][name] = [[]] + self.num_reads[-1][name] = 0 + + def handle_def(self, expr: NameExpr) -> None: + """Store new name definition.""" + name = expr.name + names = self.refs[-1].setdefault(name, []) + names.append([expr]) + self.num_reads[-1][name] = 0 + + def handle_refine(self, expr: NameExpr) -> None: + """Store assignment to an existing name (that replaces previous value, if any).""" + name = expr.name + if name in self.refs[-1]: + names = self.refs[-1][name] + if not names: + names.append([]) + names[-1].append(expr) + + def handle_ref(self, expr: NameExpr) -> None: + """Store reference to defined name.""" + name = expr.name + if name in self.refs[-1]: + names = self.refs[-1][name] + if not names: + names.append([]) + names[-1].append(expr) + num_reads = self.num_reads[-1] + num_reads[name] = num_reads.get(name, 0) + 1 + + def flush_refs(self) -> None: + """Rename all references within the current scope. + + This will be called at the end of a scope. + """ + is_func = self.scope_kinds[-1] == FUNCTION + for refs in self.refs[-1].values(): + if len(refs) == 1: + # Only one definition -- no renaming needed. + continue + if is_func: + # In a function, don't rename the first definition, as it + # may be an argument that must preserve the name. + to_rename = refs[1:] + else: + # At module top level, don't rename the final definition, + # as it will be publicly visible outside the module. + to_rename = refs[:-1] + for i, item in enumerate(to_rename): + rename_refs(item, i) + self.refs.pop() + + # Helpers for determining which assignments define new variables + + def clear(self) -> None: + self.blocks = [] + self.var_blocks = [] + + @contextmanager + def enter_block(self) -> Iterator[None]: + self.block_id += 1 + self.blocks.append(self.block_id) + self.block_loop_depth[self.block_id] = self.loop_depth + try: + yield + finally: + self.blocks.pop() + + @contextmanager + def enter_try(self) -> Iterator[None]: + self.disallow_redef_depth += 1 + try: + yield + finally: + self.disallow_redef_depth -= 1 + + @contextmanager + def enter_loop(self) -> Iterator[None]: + self.loop_depth += 1 + try: + yield + finally: + self.loop_depth -= 1 + + def current_block(self) -> int: + return self.blocks[-1] + + @contextmanager + def enter_scope(self, kind: int) -> Iterator[None]: + self.var_blocks.append({}) + self.refs.append({}) + self.num_reads.append({}) + self.scope_kinds.append(kind) + try: + yield + finally: + self.flush_refs() + self.var_blocks.pop() + self.num_reads.pop() + self.scope_kinds.pop() + + def is_nested(self) -> int: + return len(self.var_blocks) > 1 + + def reject_redefinition_of_vars_in_scope(self) -> None: + """Make it impossible to redefine defined variables in the current scope. + + This is used if we encounter a function definition that + can make it ambiguous which definition is live. Example: + + x = 0 + + def f() -> int: + return x + + x = '' # Error -- cannot redefine x across function definition + """ + var_blocks = self.var_blocks[-1] + for key in var_blocks: + var_blocks[key] = -1 + + def reject_redefinition_of_vars_in_loop(self) -> None: + """Reject redefinition of variables in the innermost loop. + + If there is an early exit from a loop, there may be ambiguity about which + value may escape the loop. Example where this matters: + + while f(): + x = 0 + if g(): + break + x = '' # Error -- not a redefinition + reveal_type(x) # int + + This method ensures that the second assignment to 'x' doesn't introduce a new + variable. + """ + var_blocks = self.var_blocks[-1] + for key, block in var_blocks.items(): + if self.block_loop_depth.get(block) == self.loop_depth: + var_blocks[key] = -1 + + def record_assignment(self, name: str, can_be_redefined: bool) -> bool: + """Record assignment to given name and return True if it defines a new variable. + + Args: + can_be_redefined: If True, allows assignment in the same block to redefine + this name (if this is a new definition) + """ + if self.num_reads[-1].get(name, -1) == 0: + # Only set, not read, so no reason to redefine + return False + if self.disallow_redef_depth > 0: + # Can't redefine within try/with a block. + can_be_redefined = False + block = self.current_block() + var_blocks = self.var_blocks[-1] + if name not in var_blocks: + # New definition in this scope. + if can_be_redefined: + # Store the block where this was defined to allow redefinition in + # the same block only. + var_blocks[name] = block + else: + # This doesn't support arbitrary redefinition. + var_blocks[name] = -1 + return True + elif var_blocks[name] == block: + # Redefinition -- defines a new variable with the same name. + return True + else: + # Assigns to an existing variable. + return False + + +class LimitedVariableRenameVisitor(TraverserVisitor): + """Perform some limited variable renaming in with statements. + + This allows reusing a variable in multiple with statements with + different types. For example, the two instances of 'x' can have + incompatible types: + + with C() as x: + f(x) + with D() as x: + g(x) + + The above code gets renamed conceptually into this (not valid Python!): + + with C() as x': + f(x') + with D() as x: + g(x) + + If there's a reference to a variable defined in 'with' outside the + statement, or if there's any trickiness around variable visibility + (e.g. function definitions), we give up and won't perform renaming. + + The main use case is to allow binding both readable and writable + binary files into the same variable. These have different types: + + with open(fnam, 'rb') as f: ... + with open(fnam, 'wb') as f: ... + """ + + def __init__(self) -> None: + # Short names of variables bound in with statements using "as" + # in a surrounding scope + self.bound_vars: list[str] = [] + # Stack of names that can't be safely renamed, per scope ('*' means that + # no names can be renamed) + self.skipped: list[set[str]] = [] + # References to variables that we may need to rename. Stack of + # scopes; each scope is a mapping from name to list of collections + # of names that refer to the same logical variable. + self.refs: list[dict[str, list[list[NameExpr]]]] = [] + + def visit_mypy_file(self, file_node: MypyFile) -> None: + """Rename variables within a file. + + This is the main entry point to this class. + """ + with self.enter_scope(): + for d in file_node.defs: + d.accept(self) + + def visit_func_def(self, fdef: FuncDef) -> None: + self.reject_redefinition_of_vars_in_scope() + with self.enter_scope(): + for arg in fdef.arguments: + self.record_skipped(arg.variable.name) + super().visit_func_def(fdef) + + def visit_class_def(self, cdef: ClassDef) -> None: + self.reject_redefinition_of_vars_in_scope() + with self.enter_scope(): + super().visit_class_def(cdef) + + def visit_with_stmt(self, stmt: WithStmt) -> None: + for expr in stmt.expr: + expr.accept(self) + old_len = len(self.bound_vars) + for target in stmt.target: + if target is not None: + self.analyze_lvalue(target) + for target in stmt.target: + if target: + target.accept(self) + stmt.body.accept(self) + + while len(self.bound_vars) > old_len: + self.bound_vars.pop() + + def analyze_lvalue(self, lvalue: Lvalue) -> None: + if isinstance(lvalue, NameExpr): + name = lvalue.name + if name in self.bound_vars: + # Name bound in a surrounding with statement, so it can be renamed + self.visit_name_expr(lvalue) + else: + var_info = self.refs[-1] + if name not in var_info: + var_info[name] = [] + var_info[name].append([]) + self.bound_vars.append(name) + elif isinstance(lvalue, (ListExpr, TupleExpr)): + for item in lvalue.items: + self.analyze_lvalue(item) + elif isinstance(lvalue, MemberExpr): + lvalue.expr.accept(self) + elif isinstance(lvalue, IndexExpr): + lvalue.base.accept(self) + lvalue.index.accept(self) + elif isinstance(lvalue, StarExpr): + self.analyze_lvalue(lvalue.expr) + + def visit_import(self, imp: Import) -> None: + # We don't support renaming imports + for id, as_id in imp.ids: + self.record_skipped(as_id or id) + + def visit_import_from(self, imp: ImportFrom) -> None: + # We don't support renaming imports + for id, as_id in imp.names: + self.record_skipped(as_id or id) + + def visit_import_all(self, imp: ImportAll) -> None: + # Give up, since we don't know all imported names yet + self.reject_redefinition_of_vars_in_scope() + + def visit_name_expr(self, expr: NameExpr) -> None: + name = expr.name + if name in self.bound_vars: + # Record reference so that it can be renamed later + for scope in reversed(self.refs): + if name in scope: + scope[name][-1].append(expr) + else: + self.record_skipped(name) + + @contextmanager + def enter_scope(self) -> Iterator[None]: + self.skipped.append(set()) + self.refs.append({}) + yield None + self.flush_refs() + + def reject_redefinition_of_vars_in_scope(self) -> None: + self.record_skipped("*") + + def record_skipped(self, name: str) -> None: + self.skipped[-1].add(name) + + def flush_refs(self) -> None: + ref_dict = self.refs.pop() + skipped = self.skipped.pop() + if "*" not in skipped: + for name, refs in ref_dict.items(): + if len(refs) <= 1 or name in skipped: + continue + # At module top level we must not rename the final definition, + # as it may be publicly visible + to_rename = refs[:-1] + for i, item in enumerate(to_rename): + rename_refs(item, i) + + +def rename_refs(names: list[NameExpr], index: int) -> None: + name = names[0].name + new_name = name + "'" * (index + 1) + for expr in names: + expr.name = new_name diff --git a/.venv/lib/python3.12/site-packages/mypy/report.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/report.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..3d25992 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/report.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/report.py b/.venv/lib/python3.12/site-packages/mypy/report.py new file mode 100644 index 0000000..4a0b965 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/report.py @@ -0,0 +1,926 @@ +"""Classes for producing HTML reports about imprecision.""" + +from __future__ import annotations + +import collections +import itertools +import json +import os +import shutil +import sys +import time +import tokenize +from abc import ABCMeta, abstractmethod +from collections.abc import Iterator +from operator import attrgetter +from typing import Any, Callable, Final +from typing_extensions import TypeAlias as _TypeAlias +from urllib.request import pathname2url + +from mypy import stats +from mypy.defaults import REPORTER_NAMES +from mypy.nodes import Expression, FuncDef, MypyFile +from mypy.options import Options +from mypy.traverser import TraverserVisitor +from mypy.types import Type, TypeOfAny +from mypy.version import __version__ + +try: + from lxml import etree # type: ignore[import-untyped] + + LXML_INSTALLED = True +except ImportError: + LXML_INSTALLED = False + +type_of_any_name_map: Final[collections.OrderedDict[int, str]] = collections.OrderedDict( + [ + (TypeOfAny.unannotated, "Unannotated"), + (TypeOfAny.explicit, "Explicit"), + (TypeOfAny.from_unimported_type, "Unimported"), + (TypeOfAny.from_omitted_generics, "Omitted Generics"), + (TypeOfAny.from_error, "Error"), + (TypeOfAny.special_form, "Special Form"), + (TypeOfAny.implementation_artifact, "Implementation Artifact"), + ] +) + +ReporterClasses: _TypeAlias = dict[ + str, tuple[Callable[["Reports", str], "AbstractReporter"], bool] +] + +reporter_classes: Final[ReporterClasses] = {} + + +class Reports: + def __init__(self, data_dir: str, report_dirs: dict[str, str]) -> None: + self.data_dir = data_dir + self.reporters: list[AbstractReporter] = [] + self.named_reporters: dict[str, AbstractReporter] = {} + + for report_type, report_dir in sorted(report_dirs.items()): + self.add_report(report_type, report_dir) + + def add_report(self, report_type: str, report_dir: str) -> AbstractReporter: + try: + return self.named_reporters[report_type] + except KeyError: + pass + reporter_cls, needs_lxml = reporter_classes[report_type] + if needs_lxml and not LXML_INSTALLED: + print( + ( + "You must install the lxml package before you can run mypy" + " with `--{}-report`.\n" + "You can do this with `python3 -m pip install lxml`." + ).format(report_type), + file=sys.stderr, + ) + raise ImportError + reporter = reporter_cls(self, report_dir) + self.reporters.append(reporter) + self.named_reporters[report_type] = reporter + return reporter + + def file( + self, + tree: MypyFile, + modules: dict[str, MypyFile], + type_map: dict[Expression, Type], + options: Options, + ) -> None: + for reporter in self.reporters: + reporter.on_file(tree, modules, type_map, options) + + def finish(self) -> None: + for reporter in self.reporters: + reporter.on_finish() + + +class AbstractReporter(metaclass=ABCMeta): + def __init__(self, reports: Reports, output_dir: str) -> None: + self.output_dir = output_dir + if output_dir != "": + os.makedirs(output_dir, exist_ok=True) + + @abstractmethod + def on_file( + self, + tree: MypyFile, + modules: dict[str, MypyFile], + type_map: dict[Expression, Type], + options: Options, + ) -> None: + pass + + @abstractmethod + def on_finish(self) -> None: + pass + + +def register_reporter( + report_name: str, + reporter: Callable[[Reports, str], AbstractReporter], + needs_lxml: bool = False, +) -> None: + reporter_classes[report_name] = (reporter, needs_lxml) + + +def alias_reporter(source_reporter: str, target_reporter: str) -> None: + reporter_classes[target_reporter] = reporter_classes[source_reporter] + + +def should_skip_path(path: str) -> bool: + if stats.is_special_module(path): + return True + if path.startswith(".."): + return True + if "stubs" in path.split("/") or "stubs" in path.split(os.sep): + return True + return False + + +def iterate_python_lines(path: str) -> Iterator[tuple[int, str]]: + """Return an iterator over (line number, line text) from a Python file.""" + if not os.path.isdir(path): # can happen with namespace packages + with tokenize.open(path) as input_file: + yield from enumerate(input_file, 1) + + +class FuncCounterVisitor(TraverserVisitor): + def __init__(self) -> None: + super().__init__() + self.counts = [0, 0] + + def visit_func_def(self, defn: FuncDef) -> None: + self.counts[defn.type is not None] += 1 + + +class LineCountReporter(AbstractReporter): + def __init__(self, reports: Reports, output_dir: str) -> None: + super().__init__(reports, output_dir) + self.counts: dict[str, tuple[int, int, int, int]] = {} + + def on_file( + self, + tree: MypyFile, + modules: dict[str, MypyFile], + type_map: dict[Expression, Type], + options: Options, + ) -> None: + # Count physical lines. This assumes the file's encoding is a + # superset of ASCII (or at least uses \n in its line endings). + if not os.path.isdir(tree.path): # can happen with namespace packages + with open(tree.path, "rb") as f: + physical_lines = len(f.readlines()) + else: + physical_lines = 0 + + func_counter = FuncCounterVisitor() + tree.accept(func_counter) + unannotated_funcs, annotated_funcs = func_counter.counts + total_funcs = annotated_funcs + unannotated_funcs + + # Don't count lines or functions as annotated if they have their errors ignored. + if options.ignore_errors: + annotated_funcs = 0 + + imputed_annotated_lines = ( + physical_lines * annotated_funcs // total_funcs if total_funcs else physical_lines + ) + + self.counts[tree._fullname] = ( + imputed_annotated_lines, + physical_lines, + annotated_funcs, + total_funcs, + ) + + def on_finish(self) -> None: + counts: list[tuple[tuple[int, int, int, int], str]] = sorted( + ((c, p) for p, c in self.counts.items()), reverse=True + ) + total_counts = tuple(sum(c[i] for c, p in counts) for i in range(4)) + with open(os.path.join(self.output_dir, "linecount.txt"), "w") as f: + f.write("{:7} {:7} {:6} {:6} total\n".format(*total_counts)) + for c, p in counts: + f.write(f"{c[0]:7} {c[1]:7} {c[2]:6} {c[3]:6} {p}\n") + + +register_reporter("linecount", LineCountReporter) + + +class AnyExpressionsReporter(AbstractReporter): + """Report frequencies of different kinds of Any types.""" + + def __init__(self, reports: Reports, output_dir: str) -> None: + super().__init__(reports, output_dir) + self.counts: dict[str, tuple[int, int]] = {} + self.any_types_counter: dict[str, collections.Counter[int]] = {} + + def on_file( + self, + tree: MypyFile, + modules: dict[str, MypyFile], + type_map: dict[Expression, Type], + options: Options, + ) -> None: + visitor = stats.StatisticsVisitor( + inferred=True, + filename=tree.fullname, + modules=modules, + typemap=type_map, + all_nodes=True, + visit_untyped_defs=False, + ) + tree.accept(visitor) + self.any_types_counter[tree.fullname] = visitor.type_of_any_counter + num_unanalyzed_lines = list(visitor.line_map.values()).count(stats.TYPE_UNANALYZED) + # count each line of dead code as one expression of type "Any" + num_any = visitor.num_any_exprs + num_unanalyzed_lines + num_total = visitor.num_imprecise_exprs + visitor.num_precise_exprs + num_any + if num_total > 0: + self.counts[tree.fullname] = (num_any, num_total) + + def on_finish(self) -> None: + self._report_any_exprs() + self._report_types_of_anys() + + def _write_out_report( + self, filename: str, header: list[str], rows: list[list[str]], footer: list[str] + ) -> None: + row_len = len(header) + assert all(len(row) == row_len for row in rows + [header, footer]) + min_column_distance = 3 # minimum distance between numbers in two columns + widths = [-1] * row_len + for row in rows + [header, footer]: + for i, value in enumerate(row): + widths[i] = max(widths[i], len(value)) + for i, w in enumerate(widths): + # Do not add min_column_distance to the first column. + if i > 0: + widths[i] = w + min_column_distance + with open(os.path.join(self.output_dir, filename), "w") as f: + header_str = ("{:>{}}" * len(widths)).format(*itertools.chain(*zip(header, widths))) + separator = "-" * len(header_str) + f.write(header_str + "\n") + f.write(separator + "\n") + for row_values in rows: + r = ("{:>{}}" * len(widths)).format(*itertools.chain(*zip(row_values, widths))) + f.write(r + "\n") + f.write(separator + "\n") + footer_str = ("{:>{}}" * len(widths)).format(*itertools.chain(*zip(footer, widths))) + f.write(footer_str + "\n") + + def _report_any_exprs(self) -> None: + total_any = sum(num_any for num_any, _ in self.counts.values()) + total_expr = sum(total for _, total in self.counts.values()) + total_coverage = 100.0 + if total_expr > 0: + total_coverage = (float(total_expr - total_any) / float(total_expr)) * 100 + + column_names = ["Name", "Anys", "Exprs", "Coverage"] + rows: list[list[str]] = [] + for filename in sorted(self.counts): + (num_any, num_total) = self.counts[filename] + coverage = (float(num_total - num_any) / float(num_total)) * 100 + coverage_str = f"{coverage:.2f}%" + rows.append([filename, str(num_any), str(num_total), coverage_str]) + rows.sort(key=lambda x: x[0]) + total_row = ["Total", str(total_any), str(total_expr), f"{total_coverage:.2f}%"] + self._write_out_report("any-exprs.txt", column_names, rows, total_row) + + def _report_types_of_anys(self) -> None: + total_counter: collections.Counter[int] = collections.Counter() + for counter in self.any_types_counter.values(): + for any_type, value in counter.items(): + total_counter[any_type] += value + file_column_name = "Name" + total_row_name = "Total" + column_names = [file_column_name] + list(type_of_any_name_map.values()) + rows: list[list[str]] = [] + for filename, counter in self.any_types_counter.items(): + rows.append([filename] + [str(counter[typ]) for typ in type_of_any_name_map]) + rows.sort(key=lambda x: x[0]) + total_row = [total_row_name] + [str(total_counter[typ]) for typ in type_of_any_name_map] + self._write_out_report("types-of-anys.txt", column_names, rows, total_row) + + +register_reporter("any-exprs", AnyExpressionsReporter) + + +class LineCoverageVisitor(TraverserVisitor): + def __init__(self, source: list[str]) -> None: + self.source = source + + # For each line of source, we maintain a pair of + # * the indentation level of the surrounding function + # (-1 if not inside a function), and + # * whether the surrounding function is typed. + # Initially, everything is covered at indentation level -1. + self.lines_covered = [(-1, True) for l in source] + + # The Python AST has position information for the starts of + # elements, but not for their ends. Fortunately the + # indentation-based syntax makes it pretty easy to find where a + # block ends without doing any real parsing. + + # TODO: Handle line continuations (explicit and implicit) and + # multi-line string literals. (But at least line continuations + # are normally more indented than their surrounding block anyways, + # by PEP 8.) + + def indentation_level(self, line_number: int) -> int | None: + """Return the indentation of a line of the source (specified by + zero-indexed line number). Returns None for blank lines or comments.""" + line = self.source[line_number] + indent = 0 + for char in list(line): + if char == " ": + indent += 1 + elif char == "\t": + indent = 8 * ((indent + 8) // 8) + elif char == "#": + # Line is a comment; ignore it + return None + elif char == "\n": + # Line is entirely whitespace; ignore it + return None + # TODO line continuation (\) + else: + # Found a non-whitespace character + return indent + # Line is entirely whitespace, and at end of file + # with no trailing newline; ignore it + return None + + def visit_func_def(self, defn: FuncDef) -> None: + start_line = defn.line - 1 + start_indent = None + # When a function is decorated, sometimes the start line will point to + # whitespace or comments between the decorator and the function, so + # we have to look for the start. + while start_line < len(self.source): + start_indent = self.indentation_level(start_line) + if start_indent is not None: + break + start_line += 1 + # If we can't find the function give up and don't annotate anything. + # Our line numbers are not reliable enough to be asserting on. + if start_indent is None: + return + + cur_line = start_line + 1 + end_line = cur_line + # After this loop, function body will be lines [start_line, end_line) + while cur_line < len(self.source): + cur_indent = self.indentation_level(cur_line) + if cur_indent is None: + # Consume the line, but don't mark it as belonging to the function yet. + cur_line += 1 + elif cur_indent > start_indent: + # A non-blank line that belongs to the function. + cur_line += 1 + end_line = cur_line + else: + # We reached a line outside the function definition. + break + + is_typed = defn.type is not None + for line in range(start_line, end_line): + old_indent, _ = self.lines_covered[line] + # If there was an old indent level for this line, and the new + # level isn't increasing the indentation, ignore it. + # This is to be defensive against funniness in our line numbers, + # which are not always reliable. + if old_indent <= start_indent: + self.lines_covered[line] = (start_indent, is_typed) + + # Visit the body, in case there are nested functions + super().visit_func_def(defn) + + +class LineCoverageReporter(AbstractReporter): + """Exact line coverage reporter. + + This reporter writes a JSON dictionary with one field 'lines' to + the file 'coverage.json' in the specified report directory. The + value of that field is a dictionary which associates to each + source file's absolute pathname the list of line numbers that + belong to typed functions in that file. + """ + + def __init__(self, reports: Reports, output_dir: str) -> None: + super().__init__(reports, output_dir) + self.lines_covered: dict[str, list[int]] = {} + + def on_file( + self, + tree: MypyFile, + modules: dict[str, MypyFile], + type_map: dict[Expression, Type], + options: Options, + ) -> None: + if os.path.isdir(tree.path): # can happen with namespace packages + return + + with open(tree.path) as f: + tree_source = f.readlines() + + coverage_visitor = LineCoverageVisitor(tree_source) + tree.accept(coverage_visitor) + + covered_lines = [] + for line_number, (_, typed) in enumerate(coverage_visitor.lines_covered): + if typed: + covered_lines.append(line_number + 1) + + self.lines_covered[os.path.abspath(tree.path)] = covered_lines + + def on_finish(self) -> None: + with open(os.path.join(self.output_dir, "coverage.json"), "w") as f: + json.dump({"lines": self.lines_covered}, f) + + +register_reporter("linecoverage", LineCoverageReporter) + + +class FileInfo: + def __init__(self, name: str, module: str) -> None: + self.name = name + self.module = module + self.counts = [0] * len(stats.precision_names) + + def total(self) -> int: + return sum(self.counts) + + def attrib(self) -> dict[str, str]: + return {name: str(val) for name, val in sorted(zip(stats.precision_names, self.counts))} + + +class MemoryXmlReporter(AbstractReporter): + """Internal reporter that generates XML in memory. + + This is used by all other XML-based reporters to avoid duplication. + """ + + def __init__(self, reports: Reports, output_dir: str) -> None: + super().__init__(reports, output_dir) + + self.xslt_html_path = os.path.join(reports.data_dir, "xml", "mypy-html.xslt") + self.xslt_txt_path = os.path.join(reports.data_dir, "xml", "mypy-txt.xslt") + self.css_html_path = os.path.join(reports.data_dir, "xml", "mypy-html.css") + xsd_path = os.path.join(reports.data_dir, "xml", "mypy.xsd") + self.schema = etree.XMLSchema(etree.parse(xsd_path)) + self.last_xml: Any | None = None + self.files: list[FileInfo] = [] + + # XML doesn't like control characters, but they are sometimes + # legal in source code (e.g. comments, string literals). + # Tabs (#x09) are allowed in XML content. + control_fixer: Final = str.maketrans("".join(chr(i) for i in range(32) if i != 9), "?" * 31) + + def on_file( + self, + tree: MypyFile, + modules: dict[str, MypyFile], + type_map: dict[Expression, Type], + options: Options, + ) -> None: + self.last_xml = None + + try: + path = os.path.relpath(tree.path) + except ValueError: + return + + if should_skip_path(path) or os.path.isdir(path): + return # `path` can sometimes be a directory, see #11334 + + visitor = stats.StatisticsVisitor( + inferred=True, + filename=tree.fullname, + modules=modules, + typemap=type_map, + all_nodes=True, + ) + tree.accept(visitor) + + root = etree.Element("mypy-report-file", name=path, module=tree._fullname) + doc = etree.ElementTree(root) + file_info = FileInfo(path, tree._fullname) + + for lineno, line_text in iterate_python_lines(path): + status = visitor.line_map.get(lineno, stats.TYPE_EMPTY) + file_info.counts[status] += 1 + etree.SubElement( + root, + "line", + any_info=self._get_any_info_for_line(visitor, lineno), + content=line_text.rstrip("\n").translate(self.control_fixer), + number=str(lineno), + precision=stats.precision_names[status], + ) + # Assumes a layout similar to what XmlReporter uses. + xslt_path = os.path.relpath("mypy-html.xslt", path) + transform_pi = etree.ProcessingInstruction( + "xml-stylesheet", f'type="text/xsl" href="{pathname2url(xslt_path)}"' + ) + root.addprevious(transform_pi) + self.schema.assertValid(doc) + + self.last_xml = doc + self.files.append(file_info) + + @staticmethod + def _get_any_info_for_line(visitor: stats.StatisticsVisitor, lineno: int) -> str: + if lineno in visitor.any_line_map: + result = "Any Types on this line: " + counter: collections.Counter[int] = collections.Counter() + for typ in visitor.any_line_map[lineno]: + counter[typ.type_of_any] += 1 + for any_type, occurrences in counter.items(): + result += f"\n{type_of_any_name_map[any_type]} (x{occurrences})" + return result + else: + return "No Anys on this line!" + + def on_finish(self) -> None: + self.last_xml = None + # index_path = os.path.join(self.output_dir, 'index.xml') + output_files = sorted(self.files, key=lambda x: x.module) + + root = etree.Element("mypy-report-index", name="index") + doc = etree.ElementTree(root) + + for file_info in output_files: + etree.SubElement( + root, + "file", + file_info.attrib(), + module=file_info.module, + name=pathname2url(file_info.name), + total=str(file_info.total()), + ) + xslt_path = os.path.relpath("mypy-html.xslt", ".") + transform_pi = etree.ProcessingInstruction( + "xml-stylesheet", f'type="text/xsl" href="{pathname2url(xslt_path)}"' + ) + root.addprevious(transform_pi) + self.schema.assertValid(doc) + + self.last_xml = doc + + +register_reporter("memory-xml", MemoryXmlReporter, needs_lxml=True) + + +def get_line_rate(covered_lines: int, total_lines: int) -> str: + if total_lines == 0: + return str(1.0) + else: + return f"{covered_lines / total_lines:.4f}" + + +class CoberturaPackage: + """Container for XML and statistics mapping python modules to Cobertura package.""" + + def __init__(self, name: str) -> None: + self.name = name + self.classes: dict[str, Any] = {} + self.packages: dict[str, CoberturaPackage] = {} + self.total_lines = 0 + self.covered_lines = 0 + + def as_xml(self) -> Any: + package_element = etree.Element("package", complexity="1.0", name=self.name) + package_element.attrib["branch-rate"] = "0" + package_element.attrib["line-rate"] = get_line_rate(self.covered_lines, self.total_lines) + classes_element = etree.SubElement(package_element, "classes") + for class_name in sorted(self.classes): + classes_element.append(self.classes[class_name]) + self.add_packages(package_element) + return package_element + + def add_packages(self, parent_element: Any) -> None: + if self.packages: + packages_element = etree.SubElement(parent_element, "packages") + for package in sorted(self.packages.values(), key=attrgetter("name")): + packages_element.append(package.as_xml()) + + +class CoberturaXmlReporter(AbstractReporter): + """Reporter for generating Cobertura compliant XML.""" + + def __init__(self, reports: Reports, output_dir: str) -> None: + super().__init__(reports, output_dir) + + self.root = etree.Element("coverage", timestamp=str(int(time.time())), version=__version__) + self.doc = etree.ElementTree(self.root) + self.root_package = CoberturaPackage(".") + + def on_file( + self, + tree: MypyFile, + modules: dict[str, MypyFile], + type_map: dict[Expression, Type], + options: Options, + ) -> None: + path = os.path.relpath(tree.path) + visitor = stats.StatisticsVisitor( + inferred=True, + filename=tree.fullname, + modules=modules, + typemap=type_map, + all_nodes=True, + ) + tree.accept(visitor) + + class_name = os.path.basename(path) + file_info = FileInfo(path, tree._fullname) + class_element = etree.Element("class", complexity="1.0", filename=path, name=class_name) + etree.SubElement(class_element, "methods") + lines_element = etree.SubElement(class_element, "lines") + + class_lines_covered = 0 + class_total_lines = 0 + for lineno, _ in iterate_python_lines(path): + status = visitor.line_map.get(lineno, stats.TYPE_EMPTY) + hits = 0 + branch = False + if status == stats.TYPE_EMPTY: + continue + class_total_lines += 1 + if status != stats.TYPE_ANY: + class_lines_covered += 1 + hits = 1 + if status == stats.TYPE_IMPRECISE: + branch = True + file_info.counts[status] += 1 + line_element = etree.SubElement( + lines_element, + "line", + branch=str(branch).lower(), + hits=str(hits), + number=str(lineno), + precision=stats.precision_names[status], + ) + if branch: + line_element.attrib["condition-coverage"] = "50% (1/2)" + class_element.attrib["branch-rate"] = "0" + class_element.attrib["line-rate"] = get_line_rate(class_lines_covered, class_total_lines) + # parent_module is set to whichever module contains this file. For most files, we want + # to simply strip the last element off of the module. But for __init__.py files, + # the module == the parent module. + parent_module = file_info.module.rsplit(".", 1)[0] + if file_info.name.endswith("__init__.py"): + parent_module = file_info.module + + if parent_module not in self.root_package.packages: + self.root_package.packages[parent_module] = CoberturaPackage(parent_module) + current_package = self.root_package.packages[parent_module] + packages_to_update = [self.root_package, current_package] + for package in packages_to_update: + package.total_lines += class_total_lines + package.covered_lines += class_lines_covered + current_package.classes[class_name] = class_element + + def on_finish(self) -> None: + self.root.attrib["line-rate"] = get_line_rate( + self.root_package.covered_lines, self.root_package.total_lines + ) + self.root.attrib["branch-rate"] = "0" + self.root.attrib["lines-covered"] = str(self.root_package.covered_lines) + self.root.attrib["lines-valid"] = str(self.root_package.total_lines) + sources = etree.SubElement(self.root, "sources") + source_element = etree.SubElement(sources, "source") + source_element.text = os.getcwd() + self.root_package.add_packages(self.root) + out_path = os.path.join(self.output_dir, "cobertura.xml") + self.doc.write(out_path, encoding="utf-8", pretty_print=True) + print("Generated Cobertura report:", os.path.abspath(out_path)) + + +register_reporter("cobertura-xml", CoberturaXmlReporter, needs_lxml=True) + + +class AbstractXmlReporter(AbstractReporter): + """Internal abstract class for reporters that work via XML.""" + + def __init__(self, reports: Reports, output_dir: str) -> None: + super().__init__(reports, output_dir) + + memory_reporter = reports.add_report("memory-xml", "") + assert isinstance(memory_reporter, MemoryXmlReporter) + # The dependency will be called first. + self.memory_xml = memory_reporter + + +class XmlReporter(AbstractXmlReporter): + """Public reporter that exports XML. + + The produced XML files contain a reference to the absolute path + of the html transform, so they will be locally viewable in a browser. + + However, there is a bug in Chrome and all other WebKit-based browsers + that makes it fail from file:// URLs but work on http:// URLs. + """ + + def on_file( + self, + tree: MypyFile, + modules: dict[str, MypyFile], + type_map: dict[Expression, Type], + options: Options, + ) -> None: + last_xml = self.memory_xml.last_xml + if last_xml is None: + return + path = os.path.relpath(tree.path) + if path.startswith(".."): + return + out_path = os.path.join(self.output_dir, "xml", path + ".xml") + os.makedirs(os.path.dirname(out_path), exist_ok=True) + last_xml.write(out_path, encoding="utf-8") + + def on_finish(self) -> None: + last_xml = self.memory_xml.last_xml + assert last_xml is not None + out_path = os.path.join(self.output_dir, "index.xml") + out_xslt = os.path.join(self.output_dir, "mypy-html.xslt") + out_css = os.path.join(self.output_dir, "mypy-html.css") + last_xml.write(out_path, encoding="utf-8") + shutil.copyfile(self.memory_xml.xslt_html_path, out_xslt) + shutil.copyfile(self.memory_xml.css_html_path, out_css) + print("Generated XML report:", os.path.abspath(out_path)) + + +register_reporter("xml", XmlReporter, needs_lxml=True) + + +class XsltHtmlReporter(AbstractXmlReporter): + """Public reporter that exports HTML via XSLT. + + This is slightly different than running `xsltproc` on the .xml files, + because it passes a parameter to rewrite the links. + """ + + def __init__(self, reports: Reports, output_dir: str) -> None: + super().__init__(reports, output_dir) + + self.xslt_html = etree.XSLT(etree.parse(self.memory_xml.xslt_html_path)) + self.param_html = etree.XSLT.strparam("html") + + def on_file( + self, + tree: MypyFile, + modules: dict[str, MypyFile], + type_map: dict[Expression, Type], + options: Options, + ) -> None: + last_xml = self.memory_xml.last_xml + if last_xml is None: + return + path = os.path.relpath(tree.path) + if path.startswith(".."): + return + out_path = os.path.join(self.output_dir, "html", path + ".html") + os.makedirs(os.path.dirname(out_path), exist_ok=True) + transformed_html = bytes(self.xslt_html(last_xml, ext=self.param_html)) + with open(out_path, "wb") as out_file: + out_file.write(transformed_html) + + def on_finish(self) -> None: + last_xml = self.memory_xml.last_xml + assert last_xml is not None + out_path = os.path.join(self.output_dir, "index.html") + out_css = os.path.join(self.output_dir, "mypy-html.css") + transformed_html = bytes(self.xslt_html(last_xml, ext=self.param_html)) + with open(out_path, "wb") as out_file: + out_file.write(transformed_html) + shutil.copyfile(self.memory_xml.css_html_path, out_css) + print("Generated HTML report (via XSLT):", os.path.abspath(out_path)) + + +register_reporter("xslt-html", XsltHtmlReporter, needs_lxml=True) + + +class XsltTxtReporter(AbstractXmlReporter): + """Public reporter that exports TXT via XSLT. + + Currently this only does the summary, not the individual reports. + """ + + def __init__(self, reports: Reports, output_dir: str) -> None: + super().__init__(reports, output_dir) + + self.xslt_txt = etree.XSLT(etree.parse(self.memory_xml.xslt_txt_path)) + + def on_file( + self, + tree: MypyFile, + modules: dict[str, MypyFile], + type_map: dict[Expression, Type], + options: Options, + ) -> None: + pass + + def on_finish(self) -> None: + last_xml = self.memory_xml.last_xml + assert last_xml is not None + out_path = os.path.join(self.output_dir, "index.txt") + transformed_txt = bytes(self.xslt_txt(last_xml)) + with open(out_path, "wb") as out_file: + out_file.write(transformed_txt) + print("Generated TXT report (via XSLT):", os.path.abspath(out_path)) + + +register_reporter("xslt-txt", XsltTxtReporter, needs_lxml=True) + +alias_reporter("xslt-html", "html") +alias_reporter("xslt-txt", "txt") + + +class LinePrecisionReporter(AbstractReporter): + """Report per-module line counts for typing precision. + + Each line is classified into one of these categories: + + * precise (fully type checked) + * imprecise (Any types in a type component, such as List[Any]) + * any (something with an Any type, implicit or explicit) + * empty (empty line, comment or docstring) + * unanalyzed (mypy considers line unreachable) + + The meaning of these categories varies slightly depending on + context. + """ + + def __init__(self, reports: Reports, output_dir: str) -> None: + super().__init__(reports, output_dir) + self.files: list[FileInfo] = [] + + def on_file( + self, + tree: MypyFile, + modules: dict[str, MypyFile], + type_map: dict[Expression, Type], + options: Options, + ) -> None: + try: + path = os.path.relpath(tree.path) + except ValueError: + return + + if should_skip_path(path): + return + + visitor = stats.StatisticsVisitor( + inferred=True, + filename=tree.fullname, + modules=modules, + typemap=type_map, + all_nodes=True, + ) + tree.accept(visitor) + + file_info = FileInfo(path, tree._fullname) + for lineno, _ in iterate_python_lines(path): + status = visitor.line_map.get(lineno, stats.TYPE_EMPTY) + file_info.counts[status] += 1 + + self.files.append(file_info) + + def on_finish(self) -> None: + if not self.files: + # Nothing to do. + return + output_files = sorted(self.files, key=lambda x: x.module) + report_file = os.path.join(self.output_dir, "lineprecision.txt") + width = max(4, max(len(info.module) for info in output_files)) + titles = ("Lines", "Precise", "Imprecise", "Any", "Empty", "Unanalyzed") + widths = (width,) + tuple(len(t) for t in titles) + fmt = "{:%d} {:%d} {:%d} {:%d} {:%d} {:%d} {:%d}\n" % widths + with open(report_file, "w") as f: + f.write(fmt.format("Name", *titles)) + f.write("-" * (width + 51) + "\n") + for file_info in output_files: + counts = file_info.counts + f.write( + fmt.format( + file_info.module.ljust(width), + file_info.total(), + counts[stats.TYPE_PRECISE], + counts[stats.TYPE_IMPRECISE], + counts[stats.TYPE_ANY], + counts[stats.TYPE_EMPTY], + counts[stats.TYPE_UNANALYZED], + ) + ) + + +register_reporter("lineprecision", LinePrecisionReporter) + + +# Reporter class names are defined twice to speed up mypy startup, as this +# module is slow to import. Ensure that the two definitions match. +assert set(reporter_classes) == set(REPORTER_NAMES) diff --git a/.venv/lib/python3.12/site-packages/mypy/scope.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/scope.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..e1d0137 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/scope.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/scope.py b/.venv/lib/python3.12/site-packages/mypy/scope.py new file mode 100644 index 0000000..766048c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/scope.py @@ -0,0 +1,126 @@ +"""Track current scope to easily calculate the corresponding fine-grained target. + +TODO: Use everywhere where we track targets, including in mypy.errors. +""" + +from __future__ import annotations + +from collections.abc import Iterator +from contextlib import contextmanager, nullcontext +from typing import Optional +from typing_extensions import TypeAlias as _TypeAlias + +from mypy.nodes import FuncBase, TypeInfo + +SavedScope: _TypeAlias = tuple[str, Optional[TypeInfo], Optional[FuncBase]] + + +class Scope: + """Track which target we are processing at any given time.""" + + def __init__(self) -> None: + self.module: str | None = None + self.classes: list[TypeInfo] = [] + self.function: FuncBase | None = None + self.functions: list[FuncBase] = [] + # Number of nested scopes ignored (that don't get their own separate targets) + self.ignored = 0 + + def current_module_id(self) -> str: + assert self.module + return self.module + + def current_target(self) -> str: + """Return the current target (non-class; for a class return enclosing module).""" + assert self.module + if self.function: + fullname = self.function.fullname + return fullname or "" + return self.module + + def current_full_target(self) -> str: + """Return the current target (may be a class).""" + assert self.module + if self.function: + return self.function.fullname + if self.classes: + return self.classes[-1].fullname + return self.module + + def current_type_name(self) -> str | None: + """Return the current type's short name if it exists""" + return self.classes[-1].name if self.classes else None + + def current_function_name(self) -> str | None: + """Return the current function's short name if it exists""" + return self.function.name if self.function else None + + @contextmanager + def module_scope(self, prefix: str) -> Iterator[None]: + self.module = prefix + self.classes = [] + self.function = None + self.ignored = 0 + yield + assert self.module + self.module = None + + @contextmanager + def function_scope(self, fdef: FuncBase) -> Iterator[None]: + self.functions.append(fdef) + if not self.function: + self.function = fdef + else: + # Nested functions are part of the topmost function target. + self.ignored += 1 + yield + self.functions.pop() + if self.ignored: + # Leave a scope that's included in the enclosing target. + self.ignored -= 1 + else: + assert self.function + self.function = None + + def outer_functions(self) -> list[FuncBase]: + return self.functions[:-1] + + def enter_class(self, info: TypeInfo) -> None: + """Enter a class target scope.""" + if not self.function: + self.classes.append(info) + else: + # Classes within functions are part of the enclosing function target. + self.ignored += 1 + + def leave_class(self) -> None: + """Leave a class target scope.""" + if self.ignored: + # Leave a scope that's included in the enclosing target. + self.ignored -= 1 + else: + assert self.classes + # Leave the innermost class. + self.classes.pop() + + @contextmanager + def class_scope(self, info: TypeInfo) -> Iterator[None]: + self.enter_class(info) + yield + self.leave_class() + + def save(self) -> SavedScope: + """Produce a saved scope that can be entered with saved_scope()""" + assert self.module + # We only save the innermost class, which is sufficient since + # the rest are only needed for when classes are left. + cls = self.classes[-1] if self.classes else None + return self.module, cls, self.function + + @contextmanager + def saved_scope(self, saved: SavedScope) -> Iterator[None]: + module, info, function = saved + with self.module_scope(module): + with self.class_scope(info) if info else nullcontext(): + with self.function_scope(function) if function else nullcontext(): + yield diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/semanal.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..47745bc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/semanal.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal.py b/.venv/lib/python3.12/site-packages/mypy/semanal.py new file mode 100644 index 0000000..1035efb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/semanal.py @@ -0,0 +1,8277 @@ +"""The semantic analyzer. + +Bind names to definitions and do various other simple consistency +checks. Populate symbol tables. The semantic analyzer also detects +special forms which reuse generic syntax such as NamedTuple and +cast(). Multiple analysis iterations may be needed to analyze forward +references and import cycles. Each iteration "fills in" additional +bindings and references until everything has been bound. + +For example, consider this program: + + x = 1 + y = x + +Here semantic analysis would detect that the assignment 'x = 1' +defines a new variable, the type of which is to be inferred (in a +later pass; type inference or type checking is not part of semantic +analysis). Also, it would bind both references to 'x' to the same +module-level variable (Var) node. The second assignment would also +be analyzed, and the type of 'y' marked as being inferred. + +Semantic analysis of types is implemented in typeanal.py. + +See semanal_main.py for the top-level logic. + +Some important properties: + +* After semantic analysis is complete, no PlaceholderNode and + PlaceholderType instances should remain. During semantic analysis, + if we encounter one of these, the current target should be deferred. + +* A TypeInfo is only created once we know certain basic information about + a type, such as the MRO, existence of a Tuple base class (e.g., for named + tuples), and whether we have a TypedDict. We use a temporary + PlaceholderNode node in the symbol table if some such information is + missing. + +* For assignments, we only add a non-placeholder symbol table entry once + we know the sort of thing being defined (variable, NamedTuple, type alias, + etc.). + +* Every part of the analysis step must support multiple iterations over + the same AST nodes, and each iteration must be able to fill in arbitrary + things that were missing or incomplete in previous iterations. + +* Changes performed by the analysis need to be reversible, since mypy + daemon strips and reuses existing ASTs (to improve performance and/or + reduce memory use). +""" + +from __future__ import annotations + +import re +from collections.abc import Collection, Iterable, Iterator +from contextlib import contextmanager +from typing import Any, Callable, Final, TypeVar, cast +from typing_extensions import TypeAlias as _TypeAlias, TypeGuard, assert_never + +from mypy import errorcodes as codes, message_registry +from mypy.constant_fold import constant_fold_expr +from mypy.errorcodes import PROPERTY_DECORATOR, ErrorCode +from mypy.errors import Errors, report_internal_error +from mypy.exprtotype import TypeTranslationError, expr_to_unanalyzed_type +from mypy.message_registry import ErrorMessage +from mypy.messages import ( + SUGGESTED_TEST_FIXTURES, + TYPES_FOR_UNIMPORTED_HINTS, + MessageBuilder, + best_matches, + pretty_seq, +) +from mypy.mro import MroError, calculate_mro +from mypy.nodes import ( + ARG_NAMED, + ARG_POS, + ARG_STAR2, + CONTRAVARIANT, + COVARIANT, + GDEF, + IMPLICITLY_ABSTRACT, + INVARIANT, + IS_ABSTRACT, + LDEF, + MDEF, + NOT_ABSTRACT, + PARAM_SPEC_KIND, + REVEAL_LOCALS, + REVEAL_TYPE, + RUNTIME_PROTOCOL_DECOS, + SYMBOL_FUNCBASE_TYPES, + TYPE_VAR_KIND, + TYPE_VAR_TUPLE_KIND, + VARIANCE_NOT_READY, + ArgKind, + AssertStmt, + AssertTypeExpr, + AssignmentExpr, + AssignmentStmt, + AwaitExpr, + Block, + BreakStmt, + BytesExpr, + CallExpr, + CastExpr, + ClassDef, + ComparisonExpr, + ComplexExpr, + ConditionalExpr, + Context, + ContinueStmt, + DataclassTransformSpec, + Decorator, + DelStmt, + DictExpr, + DictionaryComprehension, + EllipsisExpr, + EnumCallExpr, + Expression, + ExpressionStmt, + FakeExpression, + FloatExpr, + ForStmt, + FuncBase, + FuncDef, + FuncItem, + GeneratorExpr, + GlobalDecl, + IfStmt, + Import, + ImportAll, + ImportBase, + ImportFrom, + IndexExpr, + IntExpr, + LambdaExpr, + ListComprehension, + ListExpr, + Lvalue, + MatchStmt, + MaybeTypeExpression, + MemberExpr, + MypyFile, + NamedTupleExpr, + NameExpr, + Node, + NonlocalDecl, + OperatorAssignmentStmt, + OpExpr, + OverloadedFuncDef, + OverloadPart, + ParamSpecExpr, + PassStmt, + PlaceholderNode, + PromoteExpr, + RaiseStmt, + RefExpr, + ReturnStmt, + RevealExpr, + SetComprehension, + SetExpr, + SliceExpr, + StarExpr, + Statement, + StrExpr, + SuperExpr, + SymbolNode, + SymbolTable, + SymbolTableNode, + TempNode, + TryStmt, + TupleExpr, + TypeAlias, + TypeAliasExpr, + TypeAliasStmt, + TypeApplication, + TypedDictExpr, + TypeFormExpr, + TypeInfo, + TypeParam, + TypeVarExpr, + TypeVarLikeExpr, + TypeVarTupleExpr, + UnaryExpr, + Var, + WhileStmt, + WithStmt, + YieldExpr, + YieldFromExpr, + get_member_expr_fullname, + implicit_module_attrs, + is_final_node, + type_aliases, + type_aliases_source_versions, + typing_extensions_aliases, +) +from mypy.options import TYPE_FORM, Options +from mypy.patterns import ( + AsPattern, + ClassPattern, + MappingPattern, + OrPattern, + SequencePattern, + SingletonPattern, + StarredPattern, + ValuePattern, +) +from mypy.plugin import ( + ClassDefContext, + DynamicClassDefContext, + Plugin, + SemanticAnalyzerPluginInterface, +) +from mypy.plugins import dataclasses as dataclasses_plugin +from mypy.reachability import ( + ALWAYS_FALSE, + ALWAYS_TRUE, + MYPY_FALSE, + MYPY_TRUE, + infer_condition_value, + infer_reachability_of_if_statement, + infer_reachability_of_match_statement, +) +from mypy.scope import Scope +from mypy.semanal_enum import EnumCallAnalyzer +from mypy.semanal_namedtuple import NamedTupleAnalyzer +from mypy.semanal_newtype import NewTypeAnalyzer +from mypy.semanal_shared import ( + ALLOW_INCOMPATIBLE_OVERRIDE, + PRIORITY_FALLBACKS, + SemanticAnalyzerInterface, + calculate_tuple_fallback, + find_dataclass_transform_spec, + has_placeholder, + parse_bool, + require_bool_literal_argument, + set_callable_name as set_callable_name, +) +from mypy.semanal_typeddict import TypedDictAnalyzer +from mypy.tvar_scope import TypeVarLikeScope +from mypy.typeanal import ( + SELF_TYPE_NAMES, + FindTypeVarVisitor, + TypeAnalyser, + TypeVarDefaultTranslator, + TypeVarLikeList, + analyze_type_alias, + check_for_explicit_any, + detect_diverging_alias, + find_self_type, + fix_instance, + has_any_from_unimported_type, + type_constructors, + validate_instance, +) +from mypy.typeops import function_type, get_type_vars, try_getting_str_literals_from_type +from mypy.types import ( + ASSERT_TYPE_NAMES, + DATACLASS_TRANSFORM_NAMES, + DEPRECATED_TYPE_NAMES, + DISJOINT_BASE_DECORATOR_NAMES, + FINAL_DECORATOR_NAMES, + FINAL_TYPE_NAMES, + IMPORTED_REVEAL_TYPE_NAMES, + NEVER_NAMES, + OVERLOAD_NAMES, + OVERRIDE_DECORATOR_NAMES, + PROTOCOL_NAMES, + REVEAL_TYPE_NAMES, + TPDICT_NAMES, + TYPE_ALIAS_NAMES, + TYPE_CHECK_ONLY_NAMES, + TYPE_NAMES, + TYPE_VAR_LIKE_NAMES, + TYPED_NAMEDTUPLE_NAMES, + UNPACK_TYPE_NAMES, + AnyType, + CallableType, + FunctionLike, + Instance, + LiteralType, + NoneType, + Overloaded, + Parameters, + ParamSpecType, + PlaceholderType, + ProperType, + TrivialSyntheticTypeTranslator, + TupleType, + Type, + TypeAliasType, + TypedDictType, + TypeOfAny, + TypeType, + TypeVarId, + TypeVarLikeType, + TypeVarTupleType, + TypeVarType, + UnboundType, + UnionType, + UnpackType, + flatten_nested_tuples, + get_proper_type, + get_proper_types, + has_type_vars, + is_named_instance, + remove_dups, + type_vars_as_args, +) +from mypy.types_utils import is_invalid_recursive_alias, store_argument_type +from mypy.typevars import fill_typevars +from mypy.util import correct_relative_import, is_dunder, module_prefix, unmangle, unnamed_function +from mypy.visitor import NodeVisitor + +T = TypeVar("T") + + +# Whether to print diagnostic information for failed full parses +# in SemanticAnalyzer.try_parse_as_type_expression(). +# +# See also: misc/analyze_typeform_stats.py +DEBUG_TYPE_EXPRESSION_FULL_PARSE_FAILURES: Final = False + + +FUTURE_IMPORTS: Final = { + "__future__.nested_scopes": "nested_scopes", + "__future__.generators": "generators", + "__future__.division": "division", + "__future__.absolute_import": "absolute_import", + "__future__.with_statement": "with_statement", + "__future__.print_function": "print_function", + "__future__.unicode_literals": "unicode_literals", + "__future__.barry_as_FLUFL": "barry_as_FLUFL", + "__future__.generator_stop": "generator_stop", + "__future__.annotations": "annotations", +} + + +# Special cased built-in classes that are needed for basic functionality and need to be +# available very early on. +CORE_BUILTIN_CLASSES: Final = ["object", "bool", "function"] + + +# Python has several different scope/namespace kinds with subtly different semantics. +SCOPE_GLOBAL: Final = 0 # Module top level +SCOPE_CLASS: Final = 1 # Class body +SCOPE_FUNC: Final = 2 # Function or lambda +SCOPE_COMPREHENSION: Final = 3 # Comprehension or generator expression +SCOPE_ANNOTATION: Final = 4 # Annotation scopes for type parameters and aliases (PEP 695) + + +# Used for tracking incomplete references +Tag: _TypeAlias = int + + +# Matches two words separated by whitespace, where each word lacks +# any symbols which have special meaning in a type expression. +# +# Any string literal matching this common pattern cannot be a valid +# type expression and can be ignored quickly when attempting to parse a +# string literal as a type expression. +_MULTIPLE_WORDS_NONTYPE_RE = re.compile(r'\s*[^\s.\'"|\[]+\s+[^\s.\'"|\[]') + +# Matches any valid Python identifier, including identifiers with Unicode characters. +# +# [^\d\W] = word character that is not a digit +# \w = word character +# \Z = match end of string; does not allow a trailing \n, unlike $ +_IDENTIFIER_RE = re.compile(r"^[^\d\W]\w*\Z", re.UNICODE) + + +class SemanticAnalyzer( + NodeVisitor[None], SemanticAnalyzerInterface, SemanticAnalyzerPluginInterface +): + """Semantically analyze parsed mypy files. + + The analyzer binds names and does various consistency checks for an + AST. Note that type checking is performed as a separate pass. + """ + + __deletable__ = ["patches", "options", "cur_mod_node"] + + # Module name space + modules: dict[str, MypyFile] + # Global name space for current module + globals: SymbolTable + # Names declared using "global" (separate set for each scope) + global_decls: list[set[str]] + # Names declared using "nonlocal" (separate set for each scope) + nonlocal_decls: list[set[str]] + # Local names of function scopes; None for non-function scopes. + locals: list[SymbolTable | None] + # Type of each scope (SCOPE_*, indexes match locals) + scope_stack: list[int] + # Nested block depths of scopes + block_depth: list[int] + # TypeInfo of directly enclosing class (or None) + _type: TypeInfo | None = None + # Stack of outer classes (the second tuple item contains tvars). + type_stack: list[TypeInfo | None] + # Type variables bound by the current scope, be it class or function + tvar_scope: TypeVarLikeScope + # Per-module options + options: Options + + # Stack of functions being analyzed + function_stack: list[FuncItem] + + # Set to True if semantic analysis defines a name, or replaces a + # placeholder definition. If some iteration makes no progress, + # there can be at most one additional final iteration (see below). + progress = False + deferred = False # Set to true if another analysis pass is needed + incomplete = False # Set to true if current module namespace is missing things + # Is this the final iteration of semantic analysis (where we report + # unbound names due to cyclic definitions and should not defer)? + _final_iteration = False + # These names couldn't be added to the symbol table due to incomplete deps. + # Note that missing names are per module, _not_ per namespace. This means that e.g. + # a missing name at global scope will block adding same name at a class scope. + # This should not affect correctness and is purely a performance issue, + # since it can cause unnecessary deferrals. These are represented as + # PlaceholderNodes in the symbol table. We use this to ensure that the first + # definition takes precedence even if it's incomplete. + # + # Note that a star import adds a special name '*' to the set, this blocks + # adding _any_ names in the current file. + missing_names: list[set[str]] + # Callbacks that will be called after semantic analysis to tweak things. + patches: list[tuple[int, Callable[[], None]]] + loop_depth: list[int] # Depth of breakable loops + cur_mod_id = "" # Current module id (or None) (phase 2) + _is_stub_file = False # Are we analyzing a stub file? + _is_typeshed_stub_file = False # Are we analyzing a typeshed stub file? + imports: set[str] # Imported modules (during phase 2 analysis) + # Note: some imports (and therefore dependencies) might + # not be found in phase 1, for example due to * imports. + errors: Errors # Keeps track of generated errors + plugin: Plugin # Mypy plugin for special casing of library features + statement: Statement | None = None # Statement/definition being analyzed + + # Mapping from 'async def' function definitions to their return type wrapped as a + # 'Coroutine[Any, Any, T]'. Used to keep track of whether a function definition's + # return type has already been wrapped, by checking if the function definition's + # type is stored in this mapping and that it still matches. + wrapped_coro_return_types: dict[FuncDef, Type] = {} + + def __init__( + self, + modules: dict[str, MypyFile], + missing_modules: set[str], + incomplete_namespaces: set[str], + errors: Errors, + plugin: Plugin, + import_map: dict[str, set[str]], + ) -> None: + """Construct semantic analyzer. + + We reuse the same semantic analyzer instance across multiple modules. + + Args: + modules: Global modules dictionary + missing_modules: Modules that could not be imported encountered so far + incomplete_namespaces: Namespaces that are being populated during semantic analysis + (can contain modules and classes within the current SCC; mutated by the caller) + errors: Report analysis errors using this instance + """ + self.locals = [None] + self.scope_stack = [SCOPE_GLOBAL] + # Saved namespaces from previous iteration. Every top-level function/method body is + # analyzed in several iterations until all names are resolved. We need to save + # the local namespaces for the top level function and all nested functions between + # these iterations. See also semanal_main.process_top_level_function(). + self.saved_locals: dict[ + FuncItem | GeneratorExpr | DictionaryComprehension, SymbolTable + ] = {} + self.imports = set() + self._type = None + self.type_stack = [] + # Are the namespaces of classes being processed complete? + self.incomplete_type_stack: list[bool] = [] + self.tvar_scope = TypeVarLikeScope() + self.function_stack = [] + self.block_depth = [0] + self.loop_depth = [0] + self.errors = errors + self.modules = modules + self.import_map = import_map + self.msg = MessageBuilder(errors, modules) + self.missing_modules = missing_modules + self.missing_names = [set()] + # These namespaces are still in process of being populated. If we encounter a + # missing name in these namespaces, we need to defer the current analysis target, + # since it's possible that the name will be there once the namespace is complete. + self.incomplete_namespaces = incomplete_namespaces + self.all_exports: list[str] = [] + # Map from module id to list of explicitly exported names (i.e. names in __all__). + self.export_map: dict[str, list[str]] = {} + self.plugin = plugin + # If True, process function definitions. If False, don't. This is used + # for processing module top levels in fine-grained incremental mode. + self.recurse_into_functions = True + self.scope = Scope() + + # Trace line numbers for every file where deferral happened during analysis of + # current SCC or top-level function. + self.deferral_debug_context: list[tuple[str, int]] = [] + + # This is needed to properly support recursive type aliases. The problem is that + # Foo[Bar] could mean three things depending on context: a target for type alias, + # a normal index expression (including enum index), or a type application. + # The latter is particularly problematic as it can falsely create incomplete + # refs while analysing rvalues of type aliases. To avoid this we first analyse + # rvalues while temporarily setting this to True. + self.basic_type_applications = False + + # Used to temporarily enable unbound type variables in some contexts. Namely, + # in base class expressions, and in right hand sides of type aliases. Do not add + # new uses of this, as this may cause leaking `UnboundType`s to type checking. + self.allow_unbound_tvars = False + + # Used to pass information about current overload index to visit_func_def(). + self.current_overload_item: int | None = None + + # Used to track whether currently inside an except* block. This helps + # to invoke errors when continue/break/return is used inside except* block. + self.inside_except_star_block: bool = False + # Used to track edge case when return is still inside except* if it enters a loop + self.return_stmt_inside_except_star_block: bool = False + + self._str_type: Instance | None = None + self._function_type: Instance | None = None + self._object_type: Instance | None = None + + # TypeForm profiling counters + self.type_expression_parse_count: int = 0 # Total try_parse_as_type_expression calls + self.type_expression_full_parse_success_count: int = 0 # Successful full parses + self.type_expression_full_parse_failure_count: int = 0 # Failed full parses + + # Imports of submodules transitively visible from given module. + # This is needed to support patterns like this + # [a.py] + # import b + # import foo + # foo.bar # <- this should work even if bar is not re-exported in foo + # [b.py] + # import foo.bar + self.transitive_submodule_imports: dict[str, set[str]] = {} + + # mypyc doesn't properly handle implementing an abstractproperty + # with a regular attribute so we make them properties + @property + def type(self) -> TypeInfo | None: + return self._type + + @property + def is_stub_file(self) -> bool: + return self._is_stub_file + + @property + def is_typeshed_stub_file(self) -> bool: + return self._is_typeshed_stub_file + + @property + def final_iteration(self) -> bool: + return self._final_iteration + + @contextmanager + def allow_unbound_tvars_set(self) -> Iterator[None]: + old = self.allow_unbound_tvars + self.allow_unbound_tvars = True + try: + yield + finally: + self.allow_unbound_tvars = old + + @contextmanager + def inside_except_star_block_set( + self, value: bool, entering_loop: bool = False + ) -> Iterator[None]: + old = self.inside_except_star_block + self.inside_except_star_block = value + + # Return statement would still be in except* scope if entering loops + if not entering_loop: + old_return_stmt_flag = self.return_stmt_inside_except_star_block + self.return_stmt_inside_except_star_block = value + + try: + yield + finally: + self.inside_except_star_block = old + if not entering_loop: + self.return_stmt_inside_except_star_block = old_return_stmt_flag + + # + # Preparing module (performed before semantic analysis) + # + + def prepare_file(self, file_node: MypyFile) -> None: + """Prepare a freshly parsed file for semantic analysis.""" + if "builtins" in self.modules: + file_node.names["__builtins__"] = SymbolTableNode(GDEF, self.modules["builtins"]) + if file_node.fullname == "builtins": + self.prepare_builtins_namespace(file_node) + if file_node.fullname == "typing": + self.prepare_typing_namespace(file_node, type_aliases) + if file_node.fullname == "typing_extensions": + self.prepare_typing_namespace(file_node, typing_extensions_aliases) + + def prepare_typing_namespace(self, file_node: MypyFile, aliases: dict[str, str]) -> None: + """Remove dummy alias definitions such as List = TypeAlias(object) from typing. + + They will be replaced with real aliases when corresponding targets are ready. + """ + + # This is all pretty unfortunate. typeshed now has a + # sys.version_info check for OrderedDict, and we shouldn't + # take it out, because it is correct and a typechecker should + # use that as a source of truth. But instead we rummage + # through IfStmts to remove the info first. (I tried to + # remove this whole machinery and ran into issues with the + # builtins/typing import cycle.) + def helper(defs: list[Statement]) -> None: + for stmt in defs.copy(): + if isinstance(stmt, IfStmt): + for body in stmt.body: + helper(body.body) + if stmt.else_body: + helper(stmt.else_body.body) + if ( + isinstance(stmt, AssignmentStmt) + and len(stmt.lvalues) == 1 + and isinstance(stmt.lvalues[0], NameExpr) + ): + # Assignment to a simple name, remove it if it is a dummy alias. + if f"{file_node.fullname}.{stmt.lvalues[0].name}" in aliases: + defs.remove(stmt) + + helper(file_node.defs) + + def prepare_builtins_namespace(self, file_node: MypyFile) -> None: + """Add certain special-cased definitions to the builtins module. + + Some definitions are too special or fundamental to be processed + normally from the AST. + """ + names = file_node.names + + # Add empty definition for core built-in classes, since they are required for basic + # operation. These will be completed later on. + for name in CORE_BUILTIN_CLASSES: + cdef = ClassDef(name, Block([])) # Dummy ClassDef, will be replaced later + info = TypeInfo(SymbolTable(), cdef, "builtins") + info._fullname = f"builtins.{name}" + names[name] = SymbolTableNode(GDEF, info) + + bool_info = names["bool"].node + assert isinstance(bool_info, TypeInfo) + bool_type = Instance(bool_info, []) + + special_var_types: list[tuple[str, Type]] = [ + ("None", NoneType()), + # reveal_type is a mypy-only function that gives an error with + # the type of its arg. + ("reveal_type", AnyType(TypeOfAny.special_form)), + # reveal_locals is a mypy-only function that gives an error with the types of + # locals + ("reveal_locals", AnyType(TypeOfAny.special_form)), + ("True", bool_type), + ("False", bool_type), + ("__debug__", bool_type), + ] + + for name, typ in special_var_types: + v = Var(name, typ) + v._fullname = f"builtins.{name}" + file_node.names[name] = SymbolTableNode(GDEF, v) + + # + # Analyzing a target + # + + def refresh_partial( + self, + node: MypyFile | FuncDef | OverloadedFuncDef, + patches: list[tuple[int, Callable[[], None]]], + final_iteration: bool, + file_node: MypyFile, + options: Options, + active_type: TypeInfo | None = None, + ) -> None: + """Refresh a stale target in fine-grained incremental mode.""" + self.patches = patches + self.deferred = False + self.incomplete = False + self._final_iteration = final_iteration + self.missing_names[-1] = set() + + with self.file_context(file_node, options, active_type): + if isinstance(node, MypyFile): + self.refresh_top_level(node) + else: + self.recurse_into_functions = True + self.accept(node) + del self.patches + + def refresh_top_level(self, file_node: MypyFile) -> None: + """Reanalyze a stale module top-level in fine-grained incremental mode.""" + if self.options.allow_redefinition_new and not self.options.local_partial_types: + n = TempNode(AnyType(TypeOfAny.special_form)) + n.line = 1 + n.column = 0 + n.end_line = 1 + n.end_column = 0 + self.fail("--local-partial-types must be enabled if using --allow-redefinition-new", n) + self.recurse_into_functions = False + self.add_implicit_module_attrs(file_node) + for d in file_node.defs: + self.accept(d) + if file_node.fullname == "typing": + self.add_builtin_aliases(file_node) + if file_node.fullname == "typing_extensions": + self.add_typing_extension_aliases(file_node) + self.adjust_public_exports() + self.export_map[self.cur_mod_id] = self.all_exports + self.all_exports = [] + + def add_implicit_module_attrs(self, file_node: MypyFile) -> None: + """Manually add implicit definitions of module '__name__' etc.""" + str_type: Type | None = self.named_type_or_none("builtins.str") + if str_type is None: + str_type = UnboundType("builtins.str") + inst: Type | None + for name, t in implicit_module_attrs.items(): + if name == "__doc__": + typ: Type = str_type + elif name == "__path__": + if not file_node.is_package_init_file(): + continue + # Need to construct the type ourselves, to avoid issues with __builtins__.list + # not being subscriptable or typing.List not getting bound + inst = self.named_type_or_none("builtins.list", [str_type]) + if inst is None: + assert not self.final_iteration, "Cannot find builtins.list to add __path__" + self.defer() + return + typ = inst + elif name == "__annotations__": + inst = self.named_type_or_none( + "builtins.dict", [str_type, AnyType(TypeOfAny.special_form)] + ) + if inst is None: + assert ( + not self.final_iteration + ), "Cannot find builtins.dict to add __annotations__" + self.defer() + return + typ = inst + elif name == "__spec__": + if self.options.use_builtins_fixtures: + inst = self.named_type_or_none("builtins.object") + else: + inst = self.named_type_or_none("importlib.machinery.ModuleSpec") + if inst is None: + if ( + self.final_iteration + or self.options.clone_for_module("importlib.machinery").follow_imports + == "skip" + ): + # If we are not allowed to resolve imports from `importlib.machinery`, + # ModuleSpec will not be available at any iteration. + # Use the fallback earlier. + # (see https://github.com/python/mypy/issues/18237) + inst = self.named_type_or_none("builtins.object") + assert inst is not None, "Cannot find builtins.object" + else: + self.defer() + return + if file_node.name == "__main__": + # https://docs.python.org/3/reference/import.html#main-spec + inst = UnionType.make_union([inst, NoneType()]) + typ = inst + else: + assert t is not None, f"type should be specified for {name}" + typ = UnboundType(t) + + existing = file_node.names.get(name) + if existing is not None and not isinstance(existing.node, PlaceholderNode): + # Already exists. + continue + + an_type = self.anal_type(typ) + if an_type: + var = Var(name, an_type) + var._fullname = self.qualified_name(name) + var.is_ready = True + self.add_symbol(name, var, dummy_context()) + else: + self.add_symbol( + name, + PlaceholderNode(self.qualified_name(name), file_node, -1), + dummy_context(), + ) + + def add_builtin_aliases(self, tree: MypyFile) -> None: + """Add builtin type aliases to typing module. + + For historical reasons, the aliases like `List = list` are not defined + in typeshed stubs for typing module. Instead we need to manually add the + corresponding nodes on the fly. We explicitly mark these aliases as normalized, + so that a user can write `typing.List[int]`. + """ + assert tree.fullname == "typing" + for alias, target_name in type_aliases.items(): + if ( + alias in type_aliases_source_versions + and type_aliases_source_versions[alias] > self.options.python_version + ): + # This alias is not available on this Python version. + continue + name = alias.split(".")[-1] + if name in tree.names and not isinstance(tree.names[name].node, PlaceholderNode): + continue + self.create_alias(tree, target_name, alias, name) + + def add_typing_extension_aliases(self, tree: MypyFile) -> None: + """Typing extensions module does contain some type aliases. + + We need to analyze them as such, because in typeshed + they are just defined as `_Alias()` call. + Which is not supported natively. + """ + assert tree.fullname == "typing_extensions" + + for alias, target_name in typing_extensions_aliases.items(): + name = alias.split(".")[-1] + if name in tree.names and isinstance(tree.names[name].node, TypeAlias): + continue # Do not reset TypeAliases on the second pass. + + # We need to remove any node that is there at the moment. It is invalid. + tree.names.pop(name, None) + + # Now, create a new alias. + self.create_alias(tree, target_name, alias, name) + + def create_alias(self, tree: MypyFile, target_name: str, alias: str, name: str) -> None: + tag = self.track_incomplete_refs() + n = self.lookup_fully_qualified_or_none(target_name) + if n: + if isinstance(n.node, PlaceholderNode): + self.mark_incomplete(name, tree) + else: + # Found built-in class target. Create alias. + target = self.named_type_or_none(target_name, []) + assert target is not None + # Transform List to List[Any], etc. + fix_instance( + target, self.fail, self.note, disallow_any=False, options=self.options + ) + alias_node = TypeAlias( + target, + alias, + tree.fullname, + line=-1, + column=-1, # there is no context + no_args=True, + normalized=True, + ) + self.add_symbol(name, alias_node, tree) + elif self.found_incomplete_ref(tag): + # Built-in class target may not ready yet -- defer. + self.mark_incomplete(name, tree) + else: + # Test fixtures may be missing some builtin classes, which is okay. + # Kill the placeholder if there is one. + if name in tree.names: + assert isinstance(tree.names[name].node, PlaceholderNode) + del tree.names[name] + + def adjust_public_exports(self) -> None: + """Adjust the module visibility of globals due to __all__.""" + if "__all__" in self.globals: + for name, g in self.globals.items(): + # Being included in __all__ explicitly exports and makes public. + if name in self.all_exports: + g.module_public = True + g.module_hidden = False + # But when __all__ is defined, and a symbol is not included in it, + # it cannot be public. + else: + g.module_public = False + + @contextmanager + def file_context( + self, file_node: MypyFile, options: Options, active_type: TypeInfo | None = None + ) -> Iterator[None]: + """Configure analyzer for analyzing targets within a file/class. + + Args: + file_node: target file + options: options specific to the file + active_type: must be the surrounding class to analyze method targets + """ + scope = self.scope + self.options = options + self.errors.set_file(file_node.path, file_node.fullname, scope=scope, options=options) + self.cur_mod_node = file_node + self.cur_mod_id = file_node.fullname + with scope.module_scope(self.cur_mod_id): + self._is_stub_file = file_node.path.lower().endswith(".pyi") + self._is_typeshed_stub_file = file_node.is_typeshed_file(options) + self.globals = file_node.names + self.tvar_scope = TypeVarLikeScope() + + self.named_tuple_analyzer = NamedTupleAnalyzer(options, self, self.msg) + self.typed_dict_analyzer = TypedDictAnalyzer(options, self, self.msg) + self.enum_call_analyzer = EnumCallAnalyzer(options, self) + self.newtype_analyzer = NewTypeAnalyzer(options, self, self.msg) + + # Counter that keeps track of references to undefined things potentially caused by + # incomplete namespaces. + self.num_incomplete_refs = 0 + + if active_type: + enclosing_fullname = active_type.fullname.rsplit(".", 1)[0] + if "." in enclosing_fullname: + enclosing_node = self.lookup_fully_qualified_or_none(enclosing_fullname) + if enclosing_node and isinstance(enclosing_node.node, TypeInfo): + self._type = enclosing_node.node + self.push_type_args(active_type.defn.type_args, active_type.defn) + self.incomplete_type_stack.append(False) + scope.enter_class(active_type) + self.enter_class(active_type.defn.info) + for tvar in active_type.defn.type_vars: + self.tvar_scope.bind_existing(tvar) + + yield + + if active_type: + scope.leave_class() + self.leave_class() + self._type = None + self.incomplete_type_stack.pop() + self.pop_type_args(active_type.defn.type_args) + del self.options + + # + # Functions + # + + def visit_func_def(self, defn: FuncDef) -> None: + self.statement = defn + + # Visit default values because they may contain assignment expressions. + for arg in defn.arguments: + if arg.initializer: + arg.initializer.accept(self) + + defn.is_conditional = self.block_depth[-1] > 0 + + # Set full names even for those definitions that aren't added + # to a symbol table. For example, for overload items. + defn._fullname = self.qualified_name(defn.name) + + # We don't add module top-level functions to symbol tables + # when we analyze their bodies in the second phase on analysis, + # since they were added in the first phase. Nested functions + # get always added, since they aren't separate targets. + if not self.recurse_into_functions or len(self.function_stack) > 0: + if not defn.is_decorated and not defn.is_overload: + self.add_function_to_symbol_table(defn) + + if not self.recurse_into_functions: + return + + with self.scope.function_scope(defn): + with self.inside_except_star_block_set(value=False): + self.analyze_func_def(defn) + + def function_fullname(self, fullname: str) -> str: + if self.current_overload_item is None: + return fullname + return f"{fullname}#{self.current_overload_item}" + + def analyze_func_def(self, defn: FuncDef) -> None: + if self.push_type_args(defn.type_args, defn) is None: + self.defer(defn) + return + + self.function_stack.append(defn) + + if defn.type: + assert isinstance(defn.type, CallableType) + has_self_type = self.update_function_type_variables(defn.type, defn) + else: + has_self_type = False + + self.function_stack.pop() + + if self.is_class_scope(): + # Method definition + assert self.type is not None + defn.info = self.type + if defn.type is not None and defn.name in ("__init__", "__init_subclass__"): + assert isinstance(defn.type, CallableType) + if isinstance(get_proper_type(defn.type.ret_type), AnyType): + defn.type = defn.type.copy_modified(ret_type=NoneType()) + self.prepare_method_signature(defn, self.type, has_self_type) + + # Analyze function signature + fullname = self.function_fullname(defn.fullname) + with self.tvar_scope_frame(self.tvar_scope.method_frame(fullname)): + if defn.type: + self.check_classvar_in_signature(defn.type) + assert isinstance(defn.type, CallableType) + # Signature must be analyzed in the surrounding scope so that + # class-level imported names and type variables are in scope. + analyzer = self.type_analyzer() + tag = self.track_incomplete_refs() + result = analyzer.visit_callable_type(defn.type, nested=False, namespace=fullname) + # Don't store not ready types (including placeholders). + if self.found_incomplete_ref(tag) or has_placeholder(result): + self.defer(defn) + self.pop_type_args(defn.type_args) + return + assert isinstance(result, ProperType) + if isinstance(result, CallableType): + # type guards need to have a positional argument, to spec + skip_self = self.is_class_scope() and not defn.is_static + if result.type_guard and ARG_POS not in result.arg_kinds[skip_self:]: + self.fail( + "TypeGuard functions must have a positional argument", + result, + code=codes.VALID_TYPE, + ) + # in this case, we just kind of just ... remove the type guard. + result = result.copy_modified(type_guard=None) + if result.type_is and ARG_POS not in result.arg_kinds[skip_self:]: + self.fail( + '"TypeIs" functions must have a positional argument', + result, + code=codes.VALID_TYPE, + ) + result = result.copy_modified(type_is=None) + + result = self.remove_unpack_kwargs(defn, result) + if has_self_type and self.type is not None: + info = self.type + if info.self_type is not None: + result.variables = (info.self_type,) + result.variables + defn.type = result + self.add_type_alias_deps(analyzer.aliases_used) + self.check_function_signature(defn) + if isinstance(defn, FuncDef): + assert isinstance(defn.type, CallableType) + defn.type = set_callable_name(defn.type, defn) + + self.analyze_arg_initializers(defn) + self.analyze_function_body(defn) + + if self.is_class_scope(): + assert self.type is not None + # Mark protocol methods with empty bodies as implicitly abstract. + # This makes explicit protocol subclassing type-safe. + if ( + self.type.is_protocol + and not self.is_stub_file # Bodies in stub files are always empty. + and (not isinstance(self.scope.function, OverloadedFuncDef) or defn.is_property) + and defn.abstract_status != IS_ABSTRACT + and is_trivial_body(defn.body) + ): + defn.abstract_status = IMPLICITLY_ABSTRACT + if ( + is_trivial_body(defn.body) + and not self.is_stub_file + and defn.abstract_status != NOT_ABSTRACT + ): + defn.is_trivial_body = True + + if ( + defn.is_coroutine + and isinstance(defn.type, CallableType) + and self.wrapped_coro_return_types.get(defn) != defn.type + ): + if defn.is_async_generator: + # Async generator types are handled elsewhere + pass + else: + # A coroutine defined as `async def foo(...) -> T: ...` + # has external return type `Coroutine[Any, Any, T]`. + any_type = AnyType(TypeOfAny.special_form) + ret_type = self.named_type_or_none( + "typing.Coroutine", [any_type, any_type, defn.type.ret_type] + ) + assert ret_type is not None, "Internal error: typing.Coroutine not found" + defn.type = defn.type.copy_modified(ret_type=ret_type) + self.wrapped_coro_return_types[defn] = defn.type + + self.pop_type_args(defn.type_args) + + def remove_unpack_kwargs(self, defn: FuncDef, typ: CallableType) -> CallableType: + if not typ.arg_kinds or typ.arg_kinds[-1] is not ArgKind.ARG_STAR2: + return typ + last_type = typ.arg_types[-1] + if not isinstance(last_type, UnpackType): + return typ + p_last_type = get_proper_type(last_type.type) + if not isinstance(p_last_type, TypedDictType): + self.fail("Unpack item in ** argument must be a TypedDict", last_type) + new_arg_types = typ.arg_types[:-1] + [AnyType(TypeOfAny.from_error)] + return typ.copy_modified(arg_types=new_arg_types) + overlap = set(typ.arg_names) & set(p_last_type.items) + # It is OK for TypedDict to have a key named 'kwargs'. + overlap.discard(typ.arg_names[-1]) + if overlap: + overlapped = ", ".join([f'"{name}"' for name in sorted(filter(None, overlap))]) + self.fail(f"Overlap between argument names and ** TypedDict items: {overlapped}", defn) + new_arg_types = typ.arg_types[:-1] + [AnyType(TypeOfAny.from_error)] + return typ.copy_modified(arg_types=new_arg_types) + # OK, everything looks right now, mark the callable type as using unpack. + new_arg_types = typ.arg_types[:-1] + [p_last_type] + return typ.copy_modified(arg_types=new_arg_types, unpack_kwargs=True) + + def prepare_method_signature(self, func: FuncDef, info: TypeInfo, has_self_type: bool) -> None: + """Check basic signature validity and tweak annotation of self/cls argument.""" + # Only non-static methods are special, as well as __new__. + functype = func.type + if func.name == "__new__": + func.is_static = True + if func.has_self_or_cls_argument: + if func.name in ["__init_subclass__", "__class_getitem__"]: + func.is_class = True + if func.arguments and isinstance(functype, CallableType): + self_type = get_proper_type(functype.arg_types[0]) + if isinstance(self_type, AnyType): + if has_self_type: + assert self.type is not None and self.type.self_type is not None + leading_type: Type = self.type.self_type + else: + func.is_trivial_self = True + leading_type = fill_typevars(info) + if func.is_class or func.name == "__new__": + leading_type = self.class_type(leading_type) + func.type = replace_implicit_first_type(functype, leading_type) + elif has_self_type and isinstance(func.unanalyzed_type, CallableType): + if not isinstance(get_proper_type(func.unanalyzed_type.arg_types[0]), AnyType): + if self.is_expected_self_type( + self_type, func.is_class or func.name == "__new__" + ): + # This error is off by default, since it is explicitly allowed + # by the PEP 673. + self.fail( + 'Redundant "Self" annotation for the first method argument', + func, + code=codes.REDUNDANT_SELF_TYPE, + ) + else: + self.fail( + "Method cannot have explicit self annotation and Self type", func + ) + elif has_self_type: + self.fail("Static methods cannot use Self type", func) + + def is_expected_self_type(self, typ: Type, is_classmethod: bool) -> bool: + """Does this (analyzed or not) type represent the expected Self type for a method?""" + assert self.type is not None + typ = get_proper_type(typ) + if is_classmethod: + if isinstance(typ, TypeType): + return self.is_expected_self_type(typ.item, is_classmethod=False) + if isinstance(typ, UnboundType): + sym = self.lookup_qualified(typ.name, typ, suppress_errors=True) + if sym is not None and sym.fullname in TYPE_NAMES and typ.args: + return self.is_expected_self_type(typ.args[0], is_classmethod=False) + return False + if isinstance(typ, TypeVarType): + return typ == self.type.self_type + if isinstance(typ, UnboundType): + sym = self.lookup_qualified(typ.name, typ, suppress_errors=True) + return sym is not None and sym.fullname in SELF_TYPE_NAMES + return False + + def set_original_def(self, previous: Node | None, new: FuncDef | Decorator) -> bool: + """If 'new' conditionally redefine 'previous', set 'previous' as original + + We reject straight redefinitions of functions, as they are usually + a programming error. For example: + + def f(): ... + def f(): ... # Error: 'f' redefined + """ + if isinstance(new, Decorator): + new = new.func + if ( + isinstance(previous, (FuncDef, Decorator)) + and unnamed_function(new.name) + and unnamed_function(previous.name) + ): + return True + if isinstance(previous, (FuncDef, Var, Decorator)) and new.is_conditional: + new.original_def = previous + return True + else: + return False + + def update_function_type_variables(self, fun_type: CallableType, defn: FuncItem) -> bool: + """Make any type variables in the signature of defn explicit. + + Update the signature of defn to contain type variable definitions + if defn is generic. Return True, if the signature contains typing.Self + type, or False otherwise. + """ + fullname = self.function_fullname(defn.fullname) + with self.tvar_scope_frame(self.tvar_scope.method_frame(fullname)): + a = self.type_analyzer() + fun_type.variables, has_self_type = a.bind_function_type_variables(fun_type, defn) + if has_self_type and self.type is not None: + self.setup_self_type() + if defn.type_args: + bound_fullnames = {v.fullname for v in fun_type.variables} + declared_fullnames = {self.qualified_name(p.name) for p in defn.type_args} + extra = sorted(bound_fullnames - declared_fullnames) + if extra: + self.msg.type_parameters_should_be_declared( + [n.split(".")[-1] for n in extra], defn + ) + return has_self_type + + def setup_self_type(self) -> None: + """Setup a (shared) Self type variable for current class. + + We intentionally don't add it to the class symbol table, + so it can be accessed only by mypy and will not cause + clashes with user defined names. + """ + assert self.type is not None + info = self.type + if info.self_type is not None: + if has_placeholder(info.self_type.upper_bound): + # Similar to regular (user defined) type variables. + self.process_placeholder( + None, + "Self upper bound", + info, + force_progress=info.self_type.upper_bound != fill_typevars(info), + ) + else: + return + info.self_type = TypeVarType( + "Self", + f"{info.fullname}.Self", + id=TypeVarId(0), # 0 is a special value for self-types. + values=[], + upper_bound=fill_typevars(info), + default=AnyType(TypeOfAny.from_omitted_generics), + ) + + def visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: + self.statement = defn + self.add_function_to_symbol_table(defn) + + if not self.recurse_into_functions: + return + + # NB: Since _visit_overloaded_func_def will call accept on the + # underlying FuncDefs, the function might get entered twice. + # This is fine, though, because only the outermost function is + # used to compute targets. + with self.scope.function_scope(defn): + self.analyze_overloaded_func_def(defn) + + @contextmanager + def overload_item_set(self, item: int | None) -> Iterator[None]: + self.current_overload_item = item + try: + yield + finally: + self.current_overload_item = None + + def analyze_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: + # OverloadedFuncDef refers to any legitimate situation where you have + # more than one declaration for the same function in a row. This occurs + # with a @property with a setter or a deleter, and for a classic + # @overload. + + defn._fullname = self.qualified_name(defn.name) + # TODO: avoid modifying items. + defn.items = defn.unanalyzed_items.copy() + + first_item = defn.items[0] + first_item.is_overload = True + with self.overload_item_set(0): + first_item.accept(self) + + bare_setter_type = None + is_property = False + if isinstance(first_item, Decorator) and first_item.func.is_property: + is_property = True + # This is a property. + first_item.func.is_overload = True + bare_setter_type = self.analyze_property_with_multi_part_definition(defn) + typ = function_type(first_item.func, self.function_type()) + assert isinstance(typ, CallableType) + typ.definition = first_item + types = [typ] + else: + # This is a normal overload. Find the item signatures, the + # implementation (if outside a stub), and any missing @overload + # decorators. + types, impl, non_overload_indexes = self.analyze_overload_sigs_and_impl(defn) + defn.impl = impl + if non_overload_indexes: + self.handle_missing_overload_decorators( + defn, non_overload_indexes, some_overload_decorators=len(types) > 0 + ) + # If we found an implementation, remove it from the overload item list, + # as it's special. + if impl is not None: + assert impl is defn.items[-1] + defn.items = defn.items[:-1] + elif not non_overload_indexes: + self.handle_missing_overload_implementation(defn) + + if types and not any( + # If some overload items are decorated with other decorators, then + # the overload type will be determined during type checking. + # Note: bare @property is removed in visit_decorator(). + isinstance(it, Decorator) + and len(it.decorators) > (1 if i > 0 or not is_property else 0) + for i, it in enumerate(defn.items) + ): + # TODO: should we enforce decorated overloads consistency somehow? + # Some existing code uses both styles: + # * Put decorator only on implementation, use "effective" types in overloads + # * Put decorator everywhere, use "bare" types in overloads. + defn.type = Overloaded(types) + defn.type.line = defn.line + # In addition, we can set the getter/setter type for valid properties as some + # code paths may either use the above type, or var.type etc. of the first item. + if isinstance(first_item, Decorator) and bare_setter_type: + first_item.var.type = types[0] + first_item.var.setter_type = bare_setter_type + + if not defn.items: + # It was not a real overload after all, but function redefinition. We've + # visited the redefinition(s) already. + if not defn.impl: + # For really broken overloads with no items and no implementation we need to keep + # at least one item to hold basic information like function name. + defn.impl = defn.unanalyzed_items[-1] + return + + # We know this is an overload def. Infer properties and perform some checks. + self.process_deprecated_overload(defn) + self.process_final_in_overload(defn) + self.process_static_or_class_method_in_overload(defn) + self.process_overload_impl(defn) + + def process_deprecated_overload(self, defn: OverloadedFuncDef) -> None: + if defn.is_property: + return + + if isinstance(impl := defn.impl, Decorator) and ( + (deprecated := impl.func.deprecated) is not None + ): + defn.deprecated = deprecated + for item in defn.items: + if isinstance(item, Decorator): + item.func.deprecated = deprecated + + for item in defn.items: + deprecation = False + if isinstance(item, Decorator): + for d in item.decorators: + if deprecation and refers_to_fullname(d, OVERLOAD_NAMES): + self.msg.note("@overload should be placed before @deprecated", d) + elif (deprecated := self.get_deprecated(d)) is not None: + deprecation = True + if isinstance(typ := item.func.type, CallableType): + typestr = f" {typ} " + else: + typestr = " " + item.func.deprecated = ( + f"overload{typestr}of function {defn.fullname} is deprecated: " + f"{deprecated}" + ) + + @staticmethod + def get_deprecated(expression: Expression) -> str | None: + if ( + isinstance(expression, CallExpr) + and refers_to_fullname(expression.callee, DEPRECATED_TYPE_NAMES) + and (len(args := expression.args) >= 1) + and isinstance(deprecated := args[0], StrExpr) + ): + return deprecated.value + return None + + def process_overload_impl(self, defn: OverloadedFuncDef) -> None: + """Set flags for an overload implementation. + + Currently, this checks for a trivial body in protocols classes, + where it makes the method implicitly abstract. + """ + if defn.impl is None: + return + impl = defn.impl if isinstance(defn.impl, FuncDef) else defn.impl.func + if is_trivial_body(impl.body) and self.is_class_scope() and not self.is_stub_file: + assert self.type is not None + if self.type.is_protocol: + impl.abstract_status = IMPLICITLY_ABSTRACT + if impl.abstract_status != NOT_ABSTRACT: + impl.is_trivial_body = True + + def analyze_overload_sigs_and_impl( + self, defn: OverloadedFuncDef + ) -> tuple[list[CallableType], OverloadPart | None, list[int]]: + """Find overload signatures, the implementation, and items with missing @overload. + + Assume that the first was already analyzed. As a side effect: + analyzes remaining items and updates 'is_overload' flags. + """ + types = [] + non_overload_indexes = [] + impl: OverloadPart | None = None + for i, item in enumerate(defn.items): + if i != 0: + # Assume that the first item was already visited + item.is_overload = True + with self.overload_item_set(i if i < len(defn.items) - 1 else None): + item.accept(self) + # TODO: support decorated overloaded functions properly + if isinstance(item, Decorator): + callable = function_type(item.func, self.function_type()) + assert isinstance(callable, CallableType) + callable.definition = item + if not any(refers_to_fullname(dec, OVERLOAD_NAMES) for dec in item.decorators): + if i == len(defn.items) - 1 and not self.is_stub_file: + # Last item outside a stub is impl + impl = item + else: + # Oops it wasn't an overload after all. A clear error + # will vary based on where in the list it is, record + # that. + non_overload_indexes.append(i) + else: + item.func.is_overload = True + types.append(callable) + if item.var.is_property: + self.fail("An overload can not be a property", item) + # If any item was decorated with `@override`, the whole overload + # becomes an explicit override. + defn.is_explicit_override |= item.func.is_explicit_override + elif isinstance(item, FuncDef): + if i == len(defn.items) - 1 and not self.is_stub_file: + impl = item + else: + non_overload_indexes.append(i) + return types, impl, non_overload_indexes + + def handle_missing_overload_decorators( + self, + defn: OverloadedFuncDef, + non_overload_indexes: list[int], + some_overload_decorators: bool, + ) -> None: + """Generate errors for overload items without @overload. + + Side effect: remote non-overload items. + """ + if some_overload_decorators: + # Some of them were overloads, but not all. + for idx in non_overload_indexes: + if self.is_stub_file: + self.fail( + "An implementation for an overloaded function " + "is not allowed in a stub file", + defn.items[idx], + ) + else: + self.fail( + "The implementation for an overloaded function must come last", + defn.items[idx], + ) + else: + for idx in non_overload_indexes[1:]: + self.name_already_defined(defn.name, defn.items[idx], defn.items[0]) + if defn.impl: + self.name_already_defined(defn.name, defn.impl, defn.items[0]) + # Remove the non-overloads + for idx in reversed(non_overload_indexes): + del defn.items[idx] + + def handle_missing_overload_implementation(self, defn: OverloadedFuncDef) -> None: + """Generate error about missing overload implementation (only if needed).""" + if not self.is_stub_file: + if self.type and self.type.is_protocol and not self.is_func_scope(): + # An overloaded protocol method doesn't need an implementation, + # but if it doesn't have one, then it is considered abstract. + for item in defn.items: + if isinstance(item, Decorator): + item.func.abstract_status = IS_ABSTRACT + else: + item.abstract_status = IS_ABSTRACT + elif all( + isinstance(item, Decorator) and item.func.abstract_status == IS_ABSTRACT + for item in defn.items + ): + # Since there is no implementation, it can't be called via super(). + if defn.items: + assert isinstance(defn.items[0], Decorator) + defn.items[0].func.is_trivial_body = True + else: + self.fail( + "An overloaded function outside a stub file must have an implementation", + defn, + code=codes.NO_OVERLOAD_IMPL, + ) + + def process_final_in_overload(self, defn: OverloadedFuncDef) -> None: + """Detect the @final status of an overloaded function (and perform checks).""" + # If the implementation is marked as @final (or the first overload in + # stubs), then the whole overloaded definition if @final. + if any(item.is_final for item in defn.items): + # We anyway mark it as final because it was probably the intention. + defn.is_final = True + # Only show the error once per overload + bad_final = next(ov for ov in defn.items if ov.is_final) + if not self.is_stub_file: + self.fail("@final should be applied only to overload implementation", bad_final) + elif any(item.is_final for item in defn.items[1:]): + bad_final = next(ov for ov in defn.items[1:] if ov.is_final) + self.fail( + "In a stub file @final must be applied only to the first overload", bad_final + ) + if defn.impl is not None and defn.impl.is_final: + defn.is_final = True + + def process_static_or_class_method_in_overload(self, defn: OverloadedFuncDef) -> None: + class_status = [] + static_status = [] + for item in defn.items: + if isinstance(item, Decorator): + inner = item.func + elif isinstance(item, FuncDef): + inner = item + else: + assert False, f"The 'item' variable is an unexpected type: {type(item)}" + class_status.append(inner.is_class) + static_status.append(inner.is_static) + + if defn.impl is not None: + if isinstance(defn.impl, Decorator): + inner = defn.impl.func + elif isinstance(defn.impl, FuncDef): + inner = defn.impl + else: + assert False, f"Unexpected impl type: {type(defn.impl)}" + class_status.append(inner.is_class) + static_status.append(inner.is_static) + + if len(set(class_status)) != 1: + self.msg.overload_inconsistently_applies_decorator("classmethod", defn) + elif len(set(static_status)) != 1: + self.msg.overload_inconsistently_applies_decorator("staticmethod", defn) + else: + defn.is_class = class_status[0] + defn.is_static = static_status[0] + + def analyze_property_with_multi_part_definition( + self, defn: OverloadedFuncDef + ) -> CallableType | None: + """Analyze a property defined using multiple methods (e.g., using @x.setter). + + Assume that the first method (@property) has already been analyzed. + Return bare setter type (without any other decorators applied), this may be used + by the caller for performance optimizations. + """ + defn.is_property = True + items = defn.items + first_item = defn.items[0] + assert isinstance(first_item, Decorator) + deleted_items = [] + bare_setter_type = None + func_name = first_item.func.name + for i, item in enumerate(items[1:]): + if isinstance(item, Decorator): + item.func.accept(self) + if item.decorators: + first_node = item.decorators[0] + if self._is_valid_property_decorator(first_node, func_name): + # Get abstractness from the original definition. + item.func.abstract_status = first_item.func.abstract_status + if first_node.name == "setter": + # The first item represents the entire property. + first_item.var.is_settable_property = True + setter_func_type = function_type(item.func, self.function_type()) + assert isinstance(setter_func_type, CallableType) + bare_setter_type = setter_func_type + defn.setter_index = i + 1 + for other_node in item.decorators[1:]: + other_node.accept(self) + else: + self.fail( + f'Only supported top decorators are "@{func_name}.setter" and "@{func_name}.deleter"', + first_node, + ) + else: + self.fail(f'Unexpected definition for property "{func_name}"', item) + deleted_items.append(i + 1) + for i in reversed(deleted_items): + del items[i] + + for item in items[1:]: + if isinstance(item, Decorator): + for d in item.decorators: + if (deprecated := self.get_deprecated(d)) is not None: + item.func.deprecated = ( + f"function {item.fullname} is deprecated: {deprecated}" + ) + return bare_setter_type + + def _is_valid_property_decorator( + self, deco: Expression, property_name: str + ) -> TypeGuard[MemberExpr]: + if not isinstance(deco, MemberExpr): + return False + if not isinstance(deco.expr, NameExpr) or deco.expr.name != property_name: + return False + if deco.name not in {"setter", "deleter"}: + # This intentionally excludes getter. While `@prop.getter` is valid at + # runtime, that would mean replacing the already processed getter type. + # Such usage is almost definitely a mistake (except for overrides in + # subclasses but we don't support them anyway) and might be a typo + # (only one letter away from `setter`), it's likely almost never used, + # so supporting it properly won't pay off. + return False + return True + + def add_function_to_symbol_table(self, func: FuncDef | OverloadedFuncDef) -> None: + if self.is_class_scope(): + assert self.type is not None + func.info = self.type + func._fullname = self.qualified_name(func.name) + self.add_symbol(func.name, func, func) + + def analyze_arg_initializers(self, defn: FuncItem) -> None: + fullname = self.function_fullname(defn.fullname) + with self.tvar_scope_frame(self.tvar_scope.method_frame(fullname)): + # Analyze default arguments + for arg in defn.arguments: + if arg.initializer: + arg.initializer.accept(self) + + def analyze_function_body(self, defn: FuncItem) -> None: + is_method = self.is_class_scope() + fullname = self.function_fullname(defn.fullname) + with self.tvar_scope_frame(self.tvar_scope.method_frame(fullname)): + # Bind the type variables again to visit the body. + if defn.type: + a = self.type_analyzer() + typ = defn.type + assert isinstance(typ, CallableType) + a.bind_function_type_variables(typ, defn) + for i in range(len(typ.arg_types)): + store_argument_type(defn, i, typ, self.named_type) + self.function_stack.append(defn) + with self.enter(defn): + for arg in defn.arguments: + self.add_local(arg.variable, defn) + + # The first argument of a non-static, non-class method is like 'self' + # (though the name could be different), having the enclosing class's + # instance type. + if is_method and defn.has_self_or_cls_argument and defn.arguments: + if not defn.is_class: + defn.arguments[0].variable.is_self = True + else: + defn.arguments[0].variable.is_cls = True + + defn.body.accept(self) + self.function_stack.pop() + + def check_classvar_in_signature(self, typ: ProperType) -> None: + t: ProperType + if isinstance(typ, Overloaded): + for t in typ.items: + self.check_classvar_in_signature(t) + return + if not isinstance(typ, CallableType): + return + for t in get_proper_types(typ.arg_types) + [get_proper_type(typ.ret_type)]: + if self.is_classvar(t): + self.fail_invalid_classvar(t) + # Show only one error per signature + break + + def check_function_signature(self, fdef: FuncItem) -> None: + sig = fdef.type + assert isinstance(sig, CallableType) + if len(sig.arg_types) < len(fdef.arguments): + self.fail("Type signature has too few arguments", fdef) + # Add dummy Any arguments to prevent crashes later. + num_extra_anys = len(fdef.arguments) - len(sig.arg_types) + extra_anys = [AnyType(TypeOfAny.from_error)] * num_extra_anys + sig.arg_types.extend(extra_anys) + elif len(sig.arg_types) > len(fdef.arguments): + self.fail("Type signature has too many arguments", fdef, blocker=True) + + def visit_decorator(self, dec: Decorator) -> None: + self.statement = dec + # TODO: better don't modify them at all. + dec.decorators = dec.original_decorators.copy() + dec.func.is_conditional = self.block_depth[-1] > 0 + if not dec.is_overload: + self.add_symbol(dec.name, dec, dec) + dec.func._fullname = self.qualified_name(dec.name) + dec.var._fullname = self.qualified_name(dec.name) + for d in dec.decorators: + d.accept(self) + removed: list[int] = [] + no_type_check = False + could_be_decorated_property = False + for i, d in enumerate(dec.decorators): + # A bunch of decorators are special cased here. + if refers_to_fullname(d, "abc.abstractmethod"): + removed.append(i) + dec.func.abstract_status = IS_ABSTRACT + self.check_decorated_function_is_method("abstractmethod", dec) + elif refers_to_fullname(d, ("asyncio.coroutines.coroutine", "types.coroutine")): + removed.append(i) + dec.func.is_awaitable_coroutine = True + elif refers_to_fullname(d, "builtins.staticmethod"): + removed.append(i) + dec.func.is_static = True + dec.var.is_staticmethod = True + self.check_decorated_function_is_method("staticmethod", dec) + elif refers_to_fullname(d, "builtins.classmethod"): + removed.append(i) + dec.func.is_class = True + dec.var.is_classmethod = True + self.check_decorated_function_is_method("classmethod", dec) + elif refers_to_fullname(d, OVERRIDE_DECORATOR_NAMES): + removed.append(i) + dec.func.is_explicit_override = True + self.check_decorated_function_is_method("override", dec) + elif refers_to_fullname( + d, + ( + "builtins.property", + "abc.abstractproperty", + "functools.cached_property", + "enum.property", + "types.DynamicClassAttribute", + ), + ): + removed.append(i) + dec.func.is_property = True + dec.var.is_property = True + if refers_to_fullname(d, "abc.abstractproperty"): + dec.func.abstract_status = IS_ABSTRACT + elif refers_to_fullname(d, "functools.cached_property"): + dec.var.is_settable_property = True + self.check_decorated_function_is_method("property", dec) + elif refers_to_fullname(d, "typing.no_type_check"): + dec.var.type = AnyType(TypeOfAny.special_form) + no_type_check = True + elif refers_to_fullname(d, FINAL_DECORATOR_NAMES): + if self.is_class_scope(): + assert self.type is not None, "No type set at class scope" + if self.type.is_protocol: + self.msg.protocol_members_cant_be_final(d) + else: + dec.func.is_final = True + dec.var.is_final = True + removed.append(i) + else: + self.fail("@final cannot be used with non-method functions", d) + elif refers_to_fullname(d, TYPE_CHECK_ONLY_NAMES): + # TODO: support `@overload` funcs. + dec.func.is_type_check_only = True + elif isinstance(d, CallExpr) and refers_to_fullname( + d.callee, DATACLASS_TRANSFORM_NAMES + ): + dec.func.dataclass_transform_spec = self.parse_dataclass_transform_spec(d) + elif (deprecated := self.get_deprecated(d)) is not None: + dec.func.deprecated = f"function {dec.fullname} is deprecated: {deprecated}" + elif not dec.var.is_property: + # We have seen a "non-trivial" decorator before seeing @property, if + # we will see a @property later, give an error, as we don't support this. + could_be_decorated_property = True + for i in reversed(removed): + del dec.decorators[i] + if (not dec.is_overload or dec.var.is_property) and self.type: + dec.var.info = self.type + dec.var.is_initialized_in_class = True + if not no_type_check and self.recurse_into_functions: + dec.func.accept(self) + if could_be_decorated_property and dec.decorators and dec.var.is_property: + self.fail( + "Decorators on top of @property are not supported", dec, code=PROPERTY_DECORATOR + ) + if (dec.func.is_static or dec.func.is_class) and dec.var.is_property: + self.fail("Only instance methods can be decorated with @property", dec) + if dec.func.abstract_status == IS_ABSTRACT and dec.func.is_final: + self.fail(f"Method {dec.func.name} is both abstract and final", dec) + if dec.func.is_static and dec.func.is_class: + self.fail(message_registry.CLASS_PATTERN_CLASS_OR_STATIC_METHOD, dec) + + def check_decorated_function_is_method(self, decorator: str, context: Context) -> None: + if not self.type or self.is_func_scope(): + self.fail(f'"{decorator}" used with a non-method', context) + + # + # Classes + # + + def visit_class_def(self, defn: ClassDef) -> None: + self.statement = defn + self.incomplete_type_stack.append(not defn.info) + namespace = self.qualified_name(defn.name) + with self.tvar_scope_frame(self.tvar_scope.class_frame(namespace)): + if self.push_type_args(defn.type_args, defn) is None: + self.mark_incomplete(defn.name, defn) + return + + self.analyze_class(defn) + self.pop_type_args(defn.type_args) + self.incomplete_type_stack.pop() + + def push_type_args( + self, type_args: list[TypeParam] | None, context: Context + ) -> list[tuple[str, TypeVarLikeExpr]] | None: + if not type_args: + return [] + self.locals.append(SymbolTable()) + self.scope_stack.append(SCOPE_ANNOTATION) + tvs: list[tuple[str, TypeVarLikeExpr]] = [] + for p in type_args: + tv = self.analyze_type_param(p, context) + if tv is None: + return None + tvs.append((p.name, tv)) + + if self.is_defined_type_param(p.name): + self.fail(f'"{p.name}" already defined as a type parameter', context) + else: + assert self.add_symbol( + p.name, tv, context, no_progress=True, type_param=True + ), "Type parameter should not be discarded" + + return tvs + + def is_defined_type_param(self, name: str) -> bool: + for names in self.locals: + if names is None: + continue + if name in names: + node = names[name].node + if isinstance(node, TypeVarLikeExpr): + return True + return False + + def analyze_type_param( + self, type_param: TypeParam, context: Context + ) -> TypeVarLikeExpr | None: + fullname = self.qualified_name(type_param.name) + if type_param.upper_bound: + upper_bound = self.anal_type(type_param.upper_bound, allow_placeholder=True) + # TODO: we should validate the upper bound is valid for a given kind. + if upper_bound is None: + # This and below copies special-casing for old-style type variables, that + # is equally necessary for new-style classes to break a vicious circle. + upper_bound = PlaceholderType(None, [], context.line) + else: + if type_param.kind == TYPE_VAR_TUPLE_KIND: + upper_bound = self.named_type("builtins.tuple", [self.object_type()]) + else: + upper_bound = self.object_type() + if type_param.default: + default = self.anal_type( + type_param.default, + allow_placeholder=True, + allow_unbound_tvars=True, + report_invalid_types=False, + allow_param_spec_literals=type_param.kind == PARAM_SPEC_KIND, + allow_tuple_literal=type_param.kind == PARAM_SPEC_KIND, + allow_unpack=type_param.kind == TYPE_VAR_TUPLE_KIND, + ) + if default is None: + default = PlaceholderType(None, [], context.line) + elif type_param.kind == TYPE_VAR_KIND: + default = self.check_typevar_default(default, type_param.default) + elif type_param.kind == PARAM_SPEC_KIND: + default = self.check_paramspec_default(default, type_param.default) + elif type_param.kind == TYPE_VAR_TUPLE_KIND: + default = self.check_typevartuple_default(default, type_param.default) + else: + default = AnyType(TypeOfAny.from_omitted_generics) + if type_param.kind == TYPE_VAR_KIND: + values: list[Type] = [] + if type_param.values: + for value in type_param.values: + analyzed = self.anal_type(value, allow_placeholder=True) + if analyzed is None: + analyzed = PlaceholderType(None, [], context.line) + if has_type_vars(analyzed): + self.fail(message_registry.TYPE_VAR_GENERIC_CONSTRAINT_TYPE, context) + values.append(AnyType(TypeOfAny.from_error)) + else: + values.append(analyzed) + return TypeVarExpr( + name=type_param.name, + fullname=fullname, + values=values, + upper_bound=upper_bound, + default=default, + variance=VARIANCE_NOT_READY, + is_new_style=True, + line=context.line, + ) + elif type_param.kind == PARAM_SPEC_KIND: + return ParamSpecExpr( + name=type_param.name, + fullname=fullname, + upper_bound=upper_bound, + default=default, + is_new_style=True, + line=context.line, + ) + else: + assert type_param.kind == TYPE_VAR_TUPLE_KIND + tuple_fallback = self.named_type("builtins.tuple", [self.object_type()]) + return TypeVarTupleExpr( + name=type_param.name, + fullname=fullname, + upper_bound=upper_bound, + tuple_fallback=tuple_fallback, + default=default, + is_new_style=True, + line=context.line, + ) + + def pop_type_args(self, type_args: list[TypeParam] | None) -> None: + if not type_args: + return + self.locals.pop() + self.scope_stack.pop() + + def analyze_class(self, defn: ClassDef) -> None: + fullname = self.qualified_name(defn.name) + if not defn.info and not self.is_core_builtin_class(defn): + # Add placeholder so that self-references in base classes can be + # resolved. We don't want this to cause a deferral, since if there + # are no incomplete references, we'll replace this with a TypeInfo + # before returning. + placeholder = PlaceholderNode(fullname, defn, defn.line, becomes_typeinfo=True) + self.add_symbol(defn.name, placeholder, defn, can_defer=False) + + tag = self.track_incomplete_refs() + + # Restore base classes after previous iteration (things like Generic[T] might be removed). + defn.base_type_exprs.extend(defn.removed_base_type_exprs) + defn.removed_base_type_exprs.clear() + + self.infer_metaclass_and_bases_from_compat_helpers(defn) + + bases = defn.base_type_exprs + bases, tvar_defs, is_protocol = self.clean_up_bases_and_infer_type_variables( + defn, bases, context=defn + ) + + self.check_type_alias_bases(bases) + + for tvd in tvar_defs: + if isinstance(tvd, TypeVarType) and any( + has_placeholder(t) for t in [tvd.upper_bound] + tvd.values + ): + # Some type variable bounds or values are not ready, we need + # to re-analyze this class. + self.defer() + if has_placeholder(tvd.default): + # Placeholder values in TypeVarLikeTypes may get substituted in. + # Defer current target until they are ready. + self.mark_incomplete(defn.name, defn) + return + + self.analyze_class_keywords(defn) + bases_result = self.analyze_base_classes(bases) + if bases_result is None or self.found_incomplete_ref(tag): + # Something was incomplete. Defer current target. + self.mark_incomplete(defn.name, defn) + return + + base_types, base_error = bases_result + if any(isinstance(base, PlaceholderType) for base, _ in base_types): + # We need to know the TypeInfo of each base to construct the MRO. Placeholder types + # are okay in nested positions, since they can't affect the MRO. + self.mark_incomplete(defn.name, defn) + return + + declared_metaclass, should_defer, any_meta = self.get_declared_metaclass( + defn.name, defn.metaclass + ) + if should_defer or self.found_incomplete_ref(tag): + # Metaclass was not ready. Defer current target. + self.mark_incomplete(defn.name, defn) + return + + if self.analyze_typeddict_classdef(defn): + if defn.info: + self.setup_type_vars(defn, tvar_defs) + self.setup_alias_type_vars(defn) + return + + if self.analyze_namedtuple_classdef(defn, tvar_defs): + return + + # Create TypeInfo for class now that base classes and the MRO can be calculated. + self.prepare_class_def(defn) + self.setup_type_vars(defn, tvar_defs) + if base_error: + defn.info.fallback_to_any = True + if any_meta: + defn.info.meta_fallback_to_any = True + + with self.scope.class_scope(defn.info): + self.configure_base_classes(defn, base_types) + defn.info.is_protocol = is_protocol + self.recalculate_metaclass(defn, declared_metaclass) + defn.info.runtime_protocol = False + + if defn.type_args: + # PEP 695 type parameters are not in scope in class decorators, so + # temporarily disable type parameter namespace. + type_params_names = self.locals.pop() + self.scope_stack.pop() + for decorator in defn.decorators: + self.analyze_class_decorator(defn, decorator) + if defn.type_args: + self.locals.append(type_params_names) + self.scope_stack.append(SCOPE_ANNOTATION) + + self.analyze_class_body_common(defn) + + def check_type_alias_bases(self, bases: list[Expression]) -> None: + for base in bases: + if isinstance(base, IndexExpr): + base = base.base + if ( + isinstance(base, RefExpr) + and isinstance(base.node, TypeAlias) + and base.node.python_3_12_type_alias + ): + self.fail( + 'Type alias defined using "type" statement not valid as base class', base + ) + + def setup_type_vars(self, defn: ClassDef, tvar_defs: list[TypeVarLikeType]) -> None: + defn.type_vars = tvar_defs + defn.info.type_vars = [] + # we want to make sure any additional logic in add_type_vars gets run + defn.info.add_type_vars() + + def setup_alias_type_vars(self, defn: ClassDef) -> None: + assert defn.info.special_alias is not None + defn.info.special_alias.alias_tvars = list(defn.type_vars) + # It is a bit unfortunate that we need to inline some logic from TypeAlias constructor, + # but it is required, since type variables may change during semantic analyzer passes. + for i, t in enumerate(defn.type_vars): + if isinstance(t, TypeVarTupleType): + defn.info.special_alias.tvar_tuple_index = i + target = defn.info.special_alias.target + assert isinstance(target, ProperType) + if isinstance(target, TypedDictType): + target.fallback.args = type_vars_as_args(defn.type_vars) + elif isinstance(target, TupleType): + target.partial_fallback.args = type_vars_as_args(defn.type_vars) + else: + assert False, f"Unexpected special alias type: {type(target)}" + + def is_core_builtin_class(self, defn: ClassDef) -> bool: + return self.cur_mod_id == "builtins" and defn.name in CORE_BUILTIN_CLASSES + + def analyze_class_body_common(self, defn: ClassDef) -> None: + """Parts of class body analysis that are common to all kinds of class defs.""" + self.enter_class(defn.info) + if any(b.self_type is not None for b in defn.info.mro): + self.setup_self_type() + defn.defs.accept(self) + self.apply_class_plugin_hooks(defn) + self.leave_class() + + def analyze_typeddict_classdef(self, defn: ClassDef) -> bool: + if ( + defn.info + and defn.info.typeddict_type + and not has_placeholder(defn.info.typeddict_type) + ): + # This is a valid TypedDict, and it is fully analyzed. + return True + is_typeddict, info = self.typed_dict_analyzer.analyze_typeddict_classdef(defn) + if is_typeddict: + for decorator in defn.decorators: + decorator.accept(self) + if info is not None: + self.analyze_class_decorator_common(defn, info, decorator) + if info is None: + self.mark_incomplete(defn.name, defn) + else: + self.prepare_class_def(defn, info, custom_names=True) + return True + return False + + def analyze_namedtuple_classdef( + self, defn: ClassDef, tvar_defs: list[TypeVarLikeType] + ) -> bool: + """Check if this class can define a named tuple.""" + if ( + defn.info + and defn.info.is_named_tuple + and defn.info.tuple_type + and not has_placeholder(defn.info.tuple_type) + ): + # Don't reprocess everything. We just need to process methods defined + # in the named tuple class body. + is_named_tuple = True + info: TypeInfo | None = defn.info + else: + is_named_tuple, info = self.named_tuple_analyzer.analyze_namedtuple_classdef( + defn, self.is_stub_file, self.is_func_scope() + ) + if is_named_tuple: + if info is None: + self.mark_incomplete(defn.name, defn) + else: + self.prepare_class_def(defn, info, custom_names=True) + self.setup_type_vars(defn, tvar_defs) + self.setup_alias_type_vars(defn) + with self.scope.class_scope(defn.info): + for deco in defn.decorators: + deco.accept(self) + self.analyze_class_decorator_common(defn, defn.info, deco) + with self.named_tuple_analyzer.save_namedtuple_body(info): + self.analyze_class_body_common(defn) + return True + return False + + def apply_class_plugin_hooks(self, defn: ClassDef) -> None: + """Apply a plugin hook that may infer a more precise definition for a class.""" + + for decorator in defn.decorators: + decorator_name = self.get_fullname_for_hook(decorator) + if decorator_name: + hook = self.plugin.get_class_decorator_hook(decorator_name) + # Special case: if the decorator is itself decorated with + # typing.dataclass_transform, apply the hook for the dataclasses plugin + # TODO: remove special casing here + if hook is None and find_dataclass_transform_spec(decorator): + hook = dataclasses_plugin.dataclass_tag_callback + if hook: + hook(ClassDefContext(defn, decorator, self)) + + if defn.metaclass: + metaclass_name = self.get_fullname_for_hook(defn.metaclass) + if metaclass_name: + hook = self.plugin.get_metaclass_hook(metaclass_name) + if hook: + hook(ClassDefContext(defn, defn.metaclass, self)) + + for base_expr in defn.base_type_exprs: + base_name = self.get_fullname_for_hook(base_expr) + if base_name: + hook = self.plugin.get_base_class_hook(base_name) + if hook: + hook(ClassDefContext(defn, base_expr, self)) + + # Check if the class definition itself triggers a dataclass transform (via a parent class/ + # metaclass) + spec = find_dataclass_transform_spec(defn) + if spec is not None: + dataclasses_plugin.add_dataclass_tag(defn.info) + + def get_fullname_for_hook(self, expr: Expression) -> str | None: + if isinstance(expr, CallExpr): + return self.get_fullname_for_hook(expr.callee) + elif isinstance(expr, IndexExpr): + return self.get_fullname_for_hook(expr.base) + elif isinstance(expr, RefExpr): + if expr.fullname: + return expr.fullname + # If we don't have a fullname look it up. This happens because base classes are + # analyzed in a different manner (see exprtotype.py) and therefore those AST + # nodes will not have full names. + sym = self.lookup_type_node(expr) + if sym: + return sym.fullname + return None + + def analyze_class_keywords(self, defn: ClassDef) -> None: + for value in defn.keywords.values(): + value.accept(self) + + def enter_class(self, info: TypeInfo) -> None: + # Remember previous active class + self.type_stack.append(self.type) + self.locals.append(None) # Add class scope + self.scope_stack.append(SCOPE_CLASS) + self.block_depth.append(-1) # The class body increments this to 0 + self.loop_depth.append(0) + self._type = info + self.missing_names.append(set()) + + def leave_class(self) -> None: + """Restore analyzer state.""" + self.block_depth.pop() + self.loop_depth.pop() + self.locals.pop() + self.scope_stack.pop() + self._type = self.type_stack.pop() + self.missing_names.pop() + + def analyze_class_decorator(self, defn: ClassDef, decorator: Expression) -> None: + decorator.accept(self) + self.analyze_class_decorator_common(defn, defn.info, decorator) + if isinstance(decorator, RefExpr): + if decorator.fullname in RUNTIME_PROTOCOL_DECOS: + if defn.info.is_protocol: + defn.info.runtime_protocol = True + else: + self.fail("@runtime_checkable can only be used with protocol classes", defn) + elif isinstance(decorator, CallExpr) and refers_to_fullname( + decorator.callee, DATACLASS_TRANSFORM_NAMES + ): + defn.info.dataclass_transform_spec = self.parse_dataclass_transform_spec(decorator) + + def analyze_class_decorator_common( + self, defn: ClassDef, info: TypeInfo, decorator: Expression + ) -> None: + """Common method for applying class decorators. + + Called on regular classes, typeddicts, and namedtuples. + """ + if refers_to_fullname(decorator, FINAL_DECORATOR_NAMES): + info.is_final = True + elif refers_to_fullname(decorator, DISJOINT_BASE_DECORATOR_NAMES): + info.is_disjoint_base = True + elif refers_to_fullname(decorator, TYPE_CHECK_ONLY_NAMES): + info.is_type_check_only = True + elif (deprecated := self.get_deprecated(decorator)) is not None: + info.deprecated = f"class {defn.fullname} is deprecated: {deprecated}" + + def clean_up_bases_and_infer_type_variables( + self, defn: ClassDef, base_type_exprs: list[Expression], context: Context + ) -> tuple[list[Expression], list[TypeVarLikeType], bool]: + """Remove extra base classes such as Generic and infer type vars. + + For example, consider this class: + + class Foo(Bar, Generic[T]): ... + + Now we will remove Generic[T] from bases of Foo and infer that the + type variable 'T' is a type argument of Foo. + + Note that this is performed *before* semantic analysis. + + Returns (remaining base expressions, inferred type variables, is protocol). + """ + removed: list[int] = [] + declared_tvars: TypeVarLikeList = [] + is_protocol = False + if defn.type_args is not None: + for p in defn.type_args: + node = self.lookup(p.name, context) + assert node is not None + assert isinstance(node.node, TypeVarLikeExpr) + declared_tvars.append((p.name, node.node)) + + for i, base_expr in enumerate(base_type_exprs): + if isinstance(base_expr, StarExpr): + base_expr.valid = True + self.analyze_type_expr(base_expr) + + try: + base = self.expr_to_unanalyzed_type(base_expr) + except TypeTranslationError: + # This error will be caught later. + continue + result = self.analyze_class_typevar_declaration(base) + if result is not None: + tvars = result[0] + is_protocol |= result[1] + if declared_tvars: + if defn.type_args: + if is_protocol: + self.fail('No arguments expected for "Protocol" base class', context) + else: + self.fail("Generic[...] base class is redundant", context) + else: + self.fail( + "Only single Generic[...] or Protocol[...] can be in bases", context + ) + removed.append(i) + declared_tvars.extend(tvars) + if isinstance(base, UnboundType): + sym = self.lookup_qualified(base.name, base) + if sym is not None and sym.node is not None: + if sym.node.fullname in PROTOCOL_NAMES and i not in removed: + # also remove bare 'Protocol' bases + removed.append(i) + is_protocol = True + + all_tvars = self.get_all_bases_tvars(base_type_exprs, removed) + if declared_tvars: + if len(remove_dups(declared_tvars)) < len(declared_tvars) and not defn.type_args: + self.fail("Duplicate type variables in Generic[...] or Protocol[...]", context) + declared_tvars = remove_dups(declared_tvars) + if not set(all_tvars).issubset(set(declared_tvars)): + if defn.type_args: + undeclared = sorted(set(all_tvars) - set(declared_tvars)) + self.msg.type_parameters_should_be_declared( + [tv[0] for tv in undeclared], context + ) + else: + self.fail( + "If Generic[...] or Protocol[...] is present" + " it should list all type variables", + context, + ) + # In case of error, Generic tvars will go first + declared_tvars = remove_dups(declared_tvars + all_tvars) + else: + declared_tvars = all_tvars + for i in reversed(removed): + # We need to actually remove the base class expressions like Generic[T], + # mostly because otherwise they will create spurious dependencies in fine + # grained incremental mode. + defn.removed_base_type_exprs.append(defn.base_type_exprs[i]) + del base_type_exprs[i] + tvar_defs = self.tvar_defs_from_tvars(declared_tvars, context) + return base_type_exprs, tvar_defs, is_protocol + + def analyze_class_typevar_declaration(self, base: Type) -> tuple[TypeVarLikeList, bool] | None: + """Analyze type variables declared using Generic[...] or Protocol[...]. + + Args: + base: Non-analyzed base class + + Return None if the base class does not declare type variables. Otherwise, + return the type variables. + """ + if not isinstance(base, UnboundType): + return None + unbound = base + sym = self.lookup_qualified(unbound.name, unbound) + if sym is None or sym.node is None: + return None + if ( + sym.node.fullname == "typing.Generic" + or sym.node.fullname in PROTOCOL_NAMES + and base.args + ): + is_proto = sym.node.fullname != "typing.Generic" + tvars: TypeVarLikeList = [] + have_type_var_tuple = False + for arg in unbound.args: + tag = self.track_incomplete_refs() + tvar = self.analyze_unbound_tvar(arg) + if tvar: + if isinstance(tvar[1], TypeVarTupleExpr): + if have_type_var_tuple: + self.fail("Can only use one type var tuple in a class def", base) + continue + have_type_var_tuple = True + tvars.append(tvar) + elif not self.found_incomplete_ref(tag): + self.fail("Free type variable expected in %s[...]" % sym.node.name, base) + return tvars, is_proto + return None + + def analyze_unbound_tvar(self, t: Type) -> tuple[str, TypeVarLikeExpr] | None: + if isinstance(t, UnpackType) and isinstance(t.type, UnboundType): + return self.analyze_unbound_tvar_impl(t.type, is_unpacked=True) + if isinstance(t, UnboundType): + sym = self.lookup_qualified(t.name, t) + if sym and sym.fullname in UNPACK_TYPE_NAMES: + inner_t = t.args[0] + if isinstance(inner_t, UnboundType): + return self.analyze_unbound_tvar_impl(inner_t, is_unpacked=True) + return None + return self.analyze_unbound_tvar_impl(t) + return None + + def analyze_unbound_tvar_impl( + self, t: UnboundType, is_unpacked: bool = False, is_typealias_param: bool = False + ) -> tuple[str, TypeVarLikeExpr] | None: + assert not is_unpacked or not is_typealias_param, "Mutually exclusive conditions" + sym = self.lookup_qualified(t.name, t) + if sym and isinstance(sym.node, PlaceholderNode): + self.record_incomplete_ref() + if not is_unpacked and sym and isinstance(sym.node, ParamSpecExpr): + if sym.fullname and not self.tvar_scope.allow_binding(sym.fullname): + # It's bound by our type variable scope + return None + return t.name, sym.node + if (is_unpacked or is_typealias_param) and sym and isinstance(sym.node, TypeVarTupleExpr): + if sym.fullname and not self.tvar_scope.allow_binding(sym.fullname): + # It's bound by our type variable scope + return None + return t.name, sym.node + if sym is None or not isinstance(sym.node, TypeVarExpr) or is_unpacked: + return None + elif sym.fullname and not self.tvar_scope.allow_binding(sym.fullname): + # It's bound by our type variable scope + return None + else: + assert isinstance(sym.node, TypeVarExpr) + return t.name, sym.node + + def find_type_var_likes(self, t: Type) -> TypeVarLikeList: + visitor = FindTypeVarVisitor(self, self.tvar_scope) + t.accept(visitor) + return visitor.type_var_likes + + def get_all_bases_tvars( + self, base_type_exprs: list[Expression], removed: list[int] + ) -> TypeVarLikeList: + """Return all type variable references in bases.""" + tvars: TypeVarLikeList = [] + for i, base_expr in enumerate(base_type_exprs): + if i not in removed: + try: + base = self.expr_to_unanalyzed_type(base_expr) + except TypeTranslationError: + # This error will be caught later. + continue + base_tvars = self.find_type_var_likes(base) + tvars.extend(base_tvars) + return remove_dups(tvars) + + def tvar_defs_from_tvars( + self, tvars: TypeVarLikeList, context: Context + ) -> list[TypeVarLikeType]: + tvar_defs: list[TypeVarLikeType] = [] + last_tvar_name_with_default: str | None = None + for name, tvar_expr in tvars: + tvar_expr.default = tvar_expr.default.accept( + TypeVarDefaultTranslator(self, tvar_expr.name, context) + ) + # PEP-695 type variables that are redeclared in an inner scope are warned + # about elsewhere. + if not tvar_expr.is_new_style and not self.tvar_scope.allow_binding( + tvar_expr.fullname + ): + self.fail( + message_registry.TYPE_VAR_REDECLARED_IN_NESTED_CLASS.format(name), context + ) + tvar_def = self.tvar_scope.bind_new(name, tvar_expr) + if last_tvar_name_with_default is not None and not tvar_def.has_default(): + self.msg.tvar_without_default_type( + tvar_def.name, last_tvar_name_with_default, context + ) + tvar_def.default = AnyType(TypeOfAny.from_error) + elif tvar_def.has_default(): + last_tvar_name_with_default = tvar_def.name + tvar_defs.append(tvar_def) + return tvar_defs + + def get_and_bind_all_tvars(self, type_exprs: list[Expression]) -> list[TypeVarLikeType]: + """Return all type variable references in item type expressions. + + This is a helper for generic TypedDicts and NamedTuples. Essentially it is + a simplified version of the logic we use for ClassDef bases. We duplicate + some amount of code, because it is hard to refactor common pieces. + """ + tvars = [] + for base_expr in type_exprs: + try: + base = self.expr_to_unanalyzed_type(base_expr) + except TypeTranslationError: + # This error will be caught later. + continue + base_tvars = self.find_type_var_likes(base) + tvars.extend(base_tvars) + tvars = remove_dups(tvars) # Variables are defined in order of textual appearance. + tvar_defs = [] + for name, tvar_expr in tvars: + tvar_def = self.tvar_scope.bind_new(name, tvar_expr) + tvar_defs.append(tvar_def) + return tvar_defs + + def prepare_class_def( + self, defn: ClassDef, info: TypeInfo | None = None, custom_names: bool = False + ) -> None: + """Prepare for the analysis of a class definition. + + Create an empty TypeInfo and store it in a symbol table, or if the 'info' + argument is provided, store it instead (used for magic type definitions). + """ + if not defn.info: + defn.fullname = self.qualified_name(defn.name) + # TODO: Nested classes + info = info or self.make_empty_type_info(defn) + defn.info = info + info.defn = defn + if not custom_names: + # Some special classes (in particular NamedTuples) use custom fullname logic. + # Don't override it here (also see comment below, this needs cleanup). + if not self.is_func_scope(): + info._fullname = self.qualified_name(defn.name) + else: + info._fullname = info.name + local_name = defn.name + if "@" in local_name: + local_name = local_name.split("@")[0] + self.add_symbol(local_name, defn.info, defn) + if self.is_nested_within_func_scope(): + # We need to preserve local classes, let's store them + # in globals under mangled unique names + # + # TODO: Putting local classes into globals breaks assumptions in fine-grained + # incremental mode and we should avoid it. In general, this logic is too + # ad-hoc and needs to be removed/refactored. + if "@" not in defn.info._fullname: + global_name = defn.info.name + "@" + str(defn.line) + defn.info._fullname = self.cur_mod_id + "." + global_name + else: + # Preserve name from previous fine-grained incremental run. + global_name = defn.info.name + defn.fullname = defn.info._fullname + if defn.info.is_named_tuple or defn.info.typeddict_type: + # Named tuples and Typed dicts nested within a class are stored + # in the class symbol table. + self.add_symbol_skip_local(global_name, defn.info) + else: + self.globals[global_name] = SymbolTableNode(GDEF, defn.info) + + def make_empty_type_info(self, defn: ClassDef) -> TypeInfo: + if ( + self.is_module_scope() + and self.cur_mod_id == "builtins" + and defn.name in CORE_BUILTIN_CLASSES + ): + # Special case core built-in classes. A TypeInfo was already + # created for it before semantic analysis, but with a dummy + # ClassDef. Patch the real ClassDef object. + info = self.globals[defn.name].node + assert isinstance(info, TypeInfo) + else: + info = TypeInfo(SymbolTable(), defn, self.cur_mod_id) + info.set_line(defn) + return info + + def get_name_repr_of_expr(self, expr: Expression) -> str | None: + """Try finding a short simplified textual representation of a base class expression.""" + if isinstance(expr, NameExpr): + return expr.name + if isinstance(expr, MemberExpr): + return get_member_expr_fullname(expr) + if isinstance(expr, IndexExpr): + return self.get_name_repr_of_expr(expr.base) + if isinstance(expr, CallExpr): + return self.get_name_repr_of_expr(expr.callee) + return None + + def analyze_base_classes( + self, base_type_exprs: list[Expression] + ) -> tuple[list[tuple[ProperType, Expression]], bool] | None: + """Analyze base class types. + + Return None if some definition was incomplete. Otherwise, return a tuple + with these items: + + * List of (analyzed type, original expression) tuples + * Boolean indicating whether one of the bases had a semantic analysis error + """ + is_error = False + bases = [] + for base_expr in base_type_exprs: + if ( + isinstance(base_expr, RefExpr) + and base_expr.fullname in TYPED_NAMEDTUPLE_NAMES + TPDICT_NAMES + ) or ( + isinstance(base_expr, CallExpr) + and isinstance(base_expr.callee, RefExpr) + and base_expr.callee.fullname in TPDICT_NAMES + ): + # Ignore magic bases for now. + # For example: + # class Foo(TypedDict): ... # RefExpr + # class Foo(NamedTuple): ... # RefExpr + # class Foo(TypedDict("Foo", {"a": int})): ... # CallExpr + continue + + try: + base = self.expr_to_analyzed_type( + base_expr, allow_placeholder=True, allow_type_any=True + ) + except TypeTranslationError: + name = self.get_name_repr_of_expr(base_expr) + if isinstance(base_expr, CallExpr): + msg = "Unsupported dynamic base class" + else: + msg = "Invalid base class" + if name: + msg += f' "{name}"' + self.fail(msg, base_expr) + is_error = True + continue + if base is None: + return None + base = get_proper_type(base) + bases.append((base, base_expr)) + return bases, is_error + + def configure_base_classes( + self, defn: ClassDef, bases: list[tuple[ProperType, Expression]] + ) -> None: + """Set up base classes. + + This computes several attributes on the corresponding TypeInfo defn.info + related to the base classes: defn.info.bases, defn.info.mro, and + miscellaneous others (at least tuple_type, fallback_to_any, and is_enum.) + """ + base_types: list[Instance] = [] + info = defn.info + + for base, base_expr in bases: + if isinstance(base, TupleType): + actual_base = self.configure_tuple_base_class(defn, base) + base_types.append(actual_base) + elif isinstance(base, Instance): + if base.type.is_newtype: + self.fail('Cannot subclass "NewType"', defn) + base_types.append(base) + elif isinstance(base, AnyType): + if self.options.disallow_subclassing_any: + if isinstance(base_expr, (NameExpr, MemberExpr)): + msg = f'Class cannot subclass "{base_expr.name}" (has type "Any")' + else: + msg = 'Class cannot subclass value of type "Any"' + self.fail(msg, base_expr) + info.fallback_to_any = True + elif isinstance(base, TypedDictType): + base_types.append(base.fallback) + else: + msg = "Invalid base class" + name = self.get_name_repr_of_expr(base_expr) + if name: + msg += f' "{name}"' + self.fail(msg, base_expr) + info.fallback_to_any = True + if self.options.disallow_any_unimported and has_any_from_unimported_type(base): + if isinstance(base_expr, (NameExpr, MemberExpr)): + prefix = f"Base type {base_expr.name}" + else: + prefix = "Base type" + self.msg.unimported_type_becomes_any(prefix, base, base_expr) + check_for_explicit_any( + base, self.options, self.is_typeshed_stub_file, self.msg, context=base_expr + ) + + # Add 'object' as implicit base if there is no other base class. + if not base_types and defn.fullname != "builtins.object": + base_types.append(self.object_type()) + + info.bases = base_types + + # Calculate the MRO. + if not self.verify_base_classes(defn): + self.set_dummy_mro(defn.info) + return + if not self.verify_duplicate_base_classes(defn): + # We don't want to block the typechecking process, + # so, we just insert `Any` as the base class and show an error. + self.set_any_mro(defn.info) + self.calculate_class_mro(defn, self.object_type) + + def configure_tuple_base_class(self, defn: ClassDef, base: TupleType) -> Instance: + info = defn.info + + # There may be an existing valid tuple type from previous semanal iterations. + # Use equality to check if it is the case. + if info.tuple_type and info.tuple_type != base and not has_placeholder(info.tuple_type): + self.fail("Class has two incompatible bases derived from tuple", defn) + defn.has_incompatible_baseclass = True + if info.special_alias and has_placeholder(info.special_alias.target): + self.process_placeholder( + None, "tuple base", defn, force_progress=base != info.tuple_type + ) + info.update_tuple_type(base) + self.setup_alias_type_vars(defn) + + if base.partial_fallback.type.fullname == "builtins.tuple" and not has_placeholder(base): + # Fallback can only be safely calculated after semantic analysis, since base + # classes may be incomplete. Postpone the calculation. + self.schedule_patch(PRIORITY_FALLBACKS, lambda: calculate_tuple_fallback(base)) + + return base.partial_fallback + + def set_dummy_mro(self, info: TypeInfo) -> None: + # Give it an MRO consisting of just the class itself and object. + info.mro = [info, self.object_type().type] + info.bad_mro = True + + def set_any_mro(self, info: TypeInfo) -> None: + # Give it an MRO consisting direct `Any` subclass. + info.fallback_to_any = True + info.mro = [info, self.object_type().type] + + def calculate_class_mro( + self, defn: ClassDef, obj_type: Callable[[], Instance] | None = None + ) -> None: + """Calculate method resolution order for a class. + + `obj_type` exists just to fill in empty base class list in case of an error. + """ + try: + calculate_mro(defn.info, obj_type) + except MroError: + self.fail( + f'Cannot determine consistent method resolution order (MRO) for "{defn.name}"', + defn, + ) + self.set_dummy_mro(defn.info) + # Allow plugins to alter the MRO to handle the fact that `def mro()` + # on metaclasses permits MRO rewriting. + if defn.fullname: + hook = self.plugin.get_customize_class_mro_hook(defn.fullname) + if hook: + hook(ClassDefContext(defn, FakeExpression(), self)) + + def infer_metaclass_and_bases_from_compat_helpers(self, defn: ClassDef) -> None: + """Lookup for special metaclass declarations, and update defn fields accordingly. + + * six.with_metaclass(M, B1, B2, ...) + * @six.add_metaclass(M) + * future.utils.with_metaclass(M, B1, B2, ...) + * past.utils.with_metaclass(M, B1, B2, ...) + """ + + # Look for six.with_metaclass(M, B1, B2, ...) + with_meta_expr: Expression | None = None + if len(defn.base_type_exprs) == 1: + base_expr = defn.base_type_exprs[0] + if isinstance(base_expr, CallExpr) and isinstance(base_expr.callee, RefExpr): + self.analyze_type_expr(base_expr) + if ( + base_expr.callee.fullname + in { + "six.with_metaclass", + "future.utils.with_metaclass", + "past.utils.with_metaclass", + } + and len(base_expr.args) >= 1 + and all(kind == ARG_POS for kind in base_expr.arg_kinds) + ): + with_meta_expr = base_expr.args[0] + defn.base_type_exprs = base_expr.args[1:] + + # Look for @six.add_metaclass(M) + add_meta_expr: Expression | None = None + for dec_expr in defn.decorators: + if isinstance(dec_expr, CallExpr) and isinstance(dec_expr.callee, RefExpr): + dec_expr.callee.accept(self) + if ( + dec_expr.callee.fullname == "six.add_metaclass" + and len(dec_expr.args) == 1 + and dec_expr.arg_kinds[0] == ARG_POS + ): + add_meta_expr = dec_expr.args[0] + break + + metas = {defn.metaclass, with_meta_expr, add_meta_expr} - {None} + if len(metas) == 0: + return + if len(metas) > 1: + self.fail("Multiple metaclass definitions", defn, code=codes.METACLASS) + return + defn.metaclass = metas.pop() + + def verify_base_classes(self, defn: ClassDef) -> bool: + info = defn.info + cycle = False + for base in info.bases: + baseinfo = base.type + if self.is_base_class(info, baseinfo): + self.fail("Cycle in inheritance hierarchy", defn) + cycle = True + return not cycle + + def verify_duplicate_base_classes(self, defn: ClassDef) -> bool: + dup = find_duplicate(defn.info.direct_base_classes()) + if dup: + self.fail(f'Duplicate base class "{dup.name}"', defn) + return not dup + + def is_base_class(self, t: TypeInfo, s: TypeInfo) -> bool: + """Determine if t is a base class of s (but do not use mro).""" + # Search the base class graph for t, starting from s. + worklist = [s] + visited = {s} + while worklist: + nxt = worklist.pop() + if nxt == t: + return True + for base in nxt.bases: + if base.type not in visited: + worklist.append(base.type) + visited.add(base.type) + return False + + def get_declared_metaclass( + self, name: str, metaclass_expr: Expression | None + ) -> tuple[Instance | None, bool, bool]: + """Get declared metaclass from metaclass expression. + + Returns a tuple of three values: + * A metaclass instance or None + * A boolean indicating whether we should defer + * A boolean indicating whether we should set metaclass Any fallback + (either for Any metaclass or invalid/dynamic metaclass). + + The two boolean flags can only be True if instance is None. + """ + declared_metaclass = None + if metaclass_expr: + metaclass_name = None + if isinstance(metaclass_expr, NameExpr): + metaclass_name = metaclass_expr.name + elif isinstance(metaclass_expr, MemberExpr): + metaclass_name = get_member_expr_fullname(metaclass_expr) + if metaclass_name is None: + self.fail( + f'Dynamic metaclass not supported for "{name}"', + metaclass_expr, + code=codes.METACLASS, + ) + return None, False, True + sym = self.lookup_qualified(metaclass_name, metaclass_expr) + if sym is None: + # Probably a name error - it is already handled elsewhere + return None, False, True + if isinstance(sym.node, Var) and isinstance(get_proper_type(sym.node.type), AnyType): + if self.options.disallow_subclassing_any: + self.fail( + f'Class cannot use "{sym.node.name}" as a metaclass (has type "Any")', + metaclass_expr, + code=codes.METACLASS, + ) + return None, False, True + if isinstance(sym.node, PlaceholderNode): + return None, True, False # defer later in the caller + + # Support type aliases, like `_Meta: TypeAlias = type` + metaclass_info: Node | None = sym.node + if ( + isinstance(sym.node, TypeAlias) + and not sym.node.python_3_12_type_alias + and not sym.node.alias_tvars + ): + target = get_proper_type(sym.node.target) + if isinstance(target, Instance): + metaclass_info = target.type + + if not isinstance(metaclass_info, TypeInfo) or metaclass_info.tuple_type is not None: + self.fail( + f'Invalid metaclass "{metaclass_name}"', metaclass_expr, code=codes.METACLASS + ) + return None, False, False + if not metaclass_info.is_metaclass(): + self.fail( + 'Metaclasses not inheriting from "type" are not supported', + metaclass_expr, + code=codes.METACLASS, + ) + return None, False, False + inst = fill_typevars(metaclass_info) + assert isinstance(inst, Instance) + declared_metaclass = inst + return declared_metaclass, False, False + + def recalculate_metaclass(self, defn: ClassDef, declared_metaclass: Instance | None) -> None: + defn.info.declared_metaclass = declared_metaclass + defn.info.metaclass_type = defn.info.calculate_metaclass_type() + if any(info.is_protocol for info in defn.info.mro): + if ( + not defn.info.metaclass_type + or defn.info.metaclass_type.type.fullname == "builtins.type" + ): + # All protocols and their subclasses have ABCMeta metaclass by default. + # TODO: add a metaclass conflict check if there is another metaclass. + abc_meta = self.named_type_or_none("abc.ABCMeta", []) + if abc_meta is not None: # May be None in tests with incomplete lib-stub. + defn.info.metaclass_type = abc_meta + if defn.info.metaclass_type and defn.info.metaclass_type.type.has_base("enum.EnumMeta"): + defn.info.is_enum = True + if defn.type_vars: + self.fail("Enum class cannot be generic", defn) + + # + # Imports + # + + def visit_import(self, i: Import) -> None: + self.statement = i + for id, as_id in i.ids: + # Modules imported in a stub file without using 'import X as X' won't get exported + # When implicit re-exporting is disabled, we have the same behavior as stubs. + use_implicit_reexport = not self.is_stub_file and self.options.implicit_reexport + if as_id is not None: + base_id = id + imported_id = as_id + module_public = use_implicit_reexport or id == as_id + else: + base_id = id.split(".")[0] + imported_id = base_id + module_public = use_implicit_reexport + + if base_id in self.modules: + node = self.modules[base_id] + if self.is_func_scope(): + kind = LDEF + elif self.type is not None: + kind = MDEF + else: + kind = GDEF + symbol = SymbolTableNode( + kind, node, module_public=module_public, module_hidden=not module_public + ) + self.add_imported_symbol( + imported_id, + symbol, + context=i, + module_public=module_public, + module_hidden=not module_public, + ) + else: + self.add_unknown_imported_symbol( + imported_id, + context=i, + target_name=base_id, + module_public=module_public, + module_hidden=not module_public, + ) + + def visit_import_from(self, imp: ImportFrom) -> None: + self.statement = imp + module_id = self.correct_relative_import(imp) + module = self.modules.get(module_id) + for id, as_id in imp.names: + fullname = module_id + "." + id + self.set_future_import_flags(fullname) + if module is None: + node = None + elif module_id == self.cur_mod_id and fullname in self.modules: + # Submodule takes precedence over definition in surround package, for + # compatibility with runtime semantics in typical use cases. This + # could more precisely model runtime semantics by taking into account + # the line number beyond which the local definition should take + # precedence, but doesn't seem to be important in most use cases. + node = SymbolTableNode(GDEF, self.modules[fullname]) + else: + if id == as_id == "__all__" and module_id in self.export_map: + self.all_exports[:] = self.export_map[module_id] + node = module.names.get(id) + + missing_submodule = False + imported_id = as_id or id + + # Modules imported in a stub file without using 'from Y import X as X' will + # not get exported. + # When implicit re-exporting is disabled, we have the same behavior as stubs. + use_implicit_reexport = not self.is_stub_file and self.options.implicit_reexport + module_public = use_implicit_reexport or (as_id is not None and id == as_id) + + # If the module does not contain a symbol with the name 'id', + # try checking if it's a module instead. + if not node: + mod = self.modules.get(fullname) + if mod is not None: + kind = self.current_symbol_kind() + node = SymbolTableNode(kind, mod) + elif fullname in self.missing_modules: + missing_submodule = True + # If it is still not resolved, check for a module level __getattr__ + if module and not node and "__getattr__" in module.names: + # We store the fullname of the original definition so that we can + # detect whether two imported names refer to the same thing. + fullname = module_id + "." + id + gvar = self.create_getattr_var(module.names["__getattr__"], imported_id, fullname) + if gvar: + self.add_symbol( + imported_id, + gvar, + imp, + module_public=module_public, + module_hidden=not module_public, + ) + continue + + if node: + self.process_imported_symbol( + node, module_id, id, imported_id, fullname, module_public, context=imp + ) + if node.module_hidden: + self.report_missing_module_attribute( + module_id, + id, + imported_id, + module_public=module_public, + module_hidden=not module_public, + context=imp, + add_unknown_imported_symbol=False, + ) + elif module and not missing_submodule: + # Target module exists but the imported name is missing or hidden. + self.report_missing_module_attribute( + module_id, + id, + imported_id, + module_public=module_public, + module_hidden=not module_public, + context=imp, + ) + else: + # Import of a missing (sub)module. + self.add_unknown_imported_symbol( + imported_id, + imp, + target_name=fullname, + module_public=module_public, + module_hidden=not module_public, + ) + + def process_imported_symbol( + self, + node: SymbolTableNode, + module_id: str, + id: str, + imported_id: str, + fullname: str, + module_public: bool, + context: ImportBase, + ) -> None: + module_hidden = not module_public and ( + # `from package import submodule` should work regardless of whether package + # re-exports submodule, so we shouldn't hide it + not isinstance(node.node, MypyFile) + or fullname not in self.modules + # but given `from somewhere import random_unrelated_module` we should hide + # random_unrelated_module + or not fullname.startswith(self.cur_mod_id + ".") + ) + + if isinstance(node.node, PlaceholderNode): + if self.final_iteration: + self.report_missing_module_attribute( + module_id, + id, + imported_id, + module_public=module_public, + module_hidden=module_hidden, + context=context, + ) + return + else: + # This might become a type. + self.mark_incomplete( + imported_id, + node.node, + module_public=module_public, + module_hidden=module_hidden, + becomes_typeinfo=True, + ) + # NOTE: we take the original node even for final `Var`s. This is to support + # a common pattern when constants are re-exported (same applies to import *). + self.add_imported_symbol( + imported_id, node, context, module_public=module_public, module_hidden=module_hidden + ) + + def report_missing_module_attribute( + self, + import_id: str, + source_id: str, + imported_id: str, + module_public: bool, + module_hidden: bool, + context: Node, + add_unknown_imported_symbol: bool = True, + ) -> None: + # Missing attribute. + if self.is_incomplete_namespace(import_id): + # We don't know whether the name will be there, since the namespace + # is incomplete. Defer the current target. + self.mark_incomplete( + imported_id, context, module_public=module_public, module_hidden=module_hidden + ) + return + message = f'Module "{import_id}" has no attribute "{source_id}"' + # Suggest alternatives, if any match is found. + module = self.modules.get(import_id) + if module: + if source_id in module.names.keys() and not module.names[source_id].module_public: + message = ( + f'Module "{import_id}" does not explicitly export attribute "{source_id}"' + ) + elif not ( + self.options.ignore_errors or self.cur_mod_node.path in self.errors.ignored_files + ): + alternatives = set(module.names.keys()).difference({source_id}) + matches = best_matches(source_id, alternatives, n=3) + if matches: + suggestion = f"; maybe {pretty_seq(matches, 'or')}?" + message += f"{suggestion}" + self.fail(message, context, code=codes.ATTR_DEFINED) + if add_unknown_imported_symbol: + self.add_unknown_imported_symbol( + imported_id, + context, + target_name=None, + module_public=module_public, + module_hidden=not module_public, + ) + + if import_id == "typing": + # The user probably has a missing definition in a test fixture. Let's verify. + fullname = f"builtins.{source_id.lower()}" + if ( + self.lookup_fully_qualified_or_none(fullname) is None + and fullname in SUGGESTED_TEST_FIXTURES + ): + # Yes. Generate a helpful note. + self.msg.add_fixture_note(fullname, context) + else: + typing_extensions = self.modules.get("typing_extensions") + if typing_extensions and source_id in typing_extensions.names: + self.msg.note( + f"Use `from typing_extensions import {source_id}` instead", + context, + code=codes.ATTR_DEFINED, + ) + self.msg.note( + "See https://mypy.readthedocs.io/en/stable/runtime_troubles.html#using-new-additions-to-the-typing-module", + context, + code=codes.ATTR_DEFINED, + ) + + def process_import_over_existing_name( + self, + imported_id: str, + existing_symbol: SymbolTableNode, + module_symbol: SymbolTableNode, + import_node: ImportBase, + ) -> bool: + if existing_symbol.node is module_symbol.node: + # We added this symbol on previous iteration. + return False + if existing_symbol.kind in (LDEF, GDEF, MDEF) and isinstance( + existing_symbol.node, (Var, FuncDef, TypeInfo, Decorator, TypeAlias) + ): + # This is a valid import over an existing definition in the file. Construct a dummy + # assignment that we'll use to type check the import. + lvalue = NameExpr(imported_id) + lvalue.kind = existing_symbol.kind + lvalue.node = existing_symbol.node + rvalue = NameExpr(imported_id) + rvalue.kind = module_symbol.kind + rvalue.node = module_symbol.node + if isinstance(rvalue.node, TypeAlias): + # Suppress bogus errors from the dummy assignment if rvalue is an alias. + # Otherwise mypy may complain that alias is invalid in runtime context. + rvalue.is_alias_rvalue = True + assignment = AssignmentStmt([lvalue], rvalue) + for node in assignment, lvalue, rvalue: + node.set_line(import_node) + import_node.assignments.append(assignment) + return True + return False + + def correct_relative_import(self, node: ImportFrom | ImportAll) -> str: + import_id, ok = correct_relative_import( + self.cur_mod_id, node.relative, node.id, self.cur_mod_node.is_package_init_file() + ) + if not ok: + self.fail("Relative import climbs too many namespaces", node) + return import_id + + def visit_import_all(self, i: ImportAll) -> None: + i_id = self.correct_relative_import(i) + if i_id in self.modules: + m = self.modules[i_id] + if self.is_incomplete_namespace(i_id): + # Any names could be missing from the current namespace if the target module + # namespace is incomplete. + self.mark_incomplete("*", i) + for name, node in m.names.items(): + if node.no_serialize: + # This is either internal or generated symbol, skip it to avoid problems + # like accidental name conflicts or invalid cross-references. + continue + fullname = i_id + "." + name + self.set_future_import_flags(fullname) + # if '__all__' exists, all nodes not included have had module_public set to + # False, and we can skip checking '_' because it's been explicitly included. + if node.module_public and (not name.startswith("_") or "__all__" in m.names): + if isinstance(node.node, MypyFile): + # Star import of submodule from a package, add it as a dependency. + self.imports.add(node.node.fullname) + # `from x import *` always reexports symbols + self.add_imported_symbol( + name, node, context=i, module_public=True, module_hidden=False + ) + + else: + # Don't add any dummy symbols for 'from x import *' if 'x' is unknown. + pass + + # + # Assignment + # + + def visit_assignment_expr(self, s: AssignmentExpr) -> None: + s.value.accept(self) + if self.is_func_scope(): + if not self.check_valid_comprehension(s): + return + self.analyze_lvalue(s.target, escape_comprehensions=True, has_explicit_value=True) + + def check_valid_comprehension(self, s: AssignmentExpr) -> bool: + """Check that assignment expression is not nested within comprehension at class scope. + + class C: + [(j := i) for i in [1, 2, 3]] + is a syntax error that is not enforced by Python parser, but at later steps. + """ + for i, scope_type in enumerate(reversed(self.scope_stack)): + if scope_type != SCOPE_COMPREHENSION and i < len(self.locals) - 1: + if self.locals[-1 - i] is None: + self.fail( + "Assignment expression within a comprehension" + " cannot be used in a class body", + s, + code=codes.SYNTAX, + serious=True, + blocker=True, + ) + return False + break + return True + + def visit_assignment_stmt(self, s: AssignmentStmt) -> None: + self.statement = s + + # Special case assignment like X = X. + if self.analyze_identity_global_assignment(s): + return + + tag = self.track_incomplete_refs() + + # Here we have a chicken and egg problem: at this stage we can't call + # can_be_type_alias(), because we have not enough information about rvalue. + # But we can't use a full visit because it may emit extra incomplete refs (namely + # when analysing any type applications there) thus preventing the further analysis. + # To break the tie, we first analyse rvalue partially, if it can be a type alias. + if self.can_possibly_be_type_form(s): + old_basic_type_applications = self.basic_type_applications + self.basic_type_applications = True + with self.allow_unbound_tvars_set(): + s.rvalue.accept(self) + self.basic_type_applications = old_basic_type_applications + elif self.can_possibly_be_typevarlike_declaration(s): + # Allow unbound tvars inside TypeVarLike defaults to be evaluated later + with self.allow_unbound_tvars_set(): + s.rvalue.accept(self) + else: + s.rvalue.accept(self) + + if self.found_incomplete_ref(tag) or self.should_wait_rhs(s.rvalue): + # Initializer couldn't be fully analyzed. Defer the current node and give up. + # Make sure that if we skip the definition of some local names, they can't be + # added later in this scope, since an earlier definition should take precedence. + for expr in names_modified_by_assignment(s): + self.mark_incomplete(expr.name, expr) + return + if self.can_possibly_be_type_form(s): + # Now re-visit those rvalues that were we skipped type applications above. + # This should be safe as generally semantic analyzer is idempotent. + with self.allow_unbound_tvars_set(): + s.rvalue.accept(self) + + # The r.h.s. is now ready to be classified, first check if it is a special form: + special_form = False + # * type alias + if self.check_and_set_up_type_alias(s): + s.is_alias_def = True + special_form = True + elif isinstance(s.rvalue, CallExpr): + # * type variable definition + if self.process_typevar_declaration(s): + special_form = True + elif self.process_paramspec_declaration(s): + special_form = True + elif self.process_typevartuple_declaration(s): + special_form = True + # * type constructors + elif self.analyze_namedtuple_assign(s): + special_form = True + elif self.analyze_typeddict_assign(s): + special_form = True + elif self.newtype_analyzer.process_newtype_declaration(s): + special_form = True + elif self.analyze_enum_assign(s): + special_form = True + + if special_form: + self.record_special_form_lvalue(s) + return + # Clear the alias flag if assignment turns out not a special form after all. It + # may be set to True while there were still placeholders due to forward refs. + s.is_alias_def = False + + # OK, this is a regular assignment, perform the necessary analysis steps. + s.is_final_def = self.unwrap_final(s) + self.analyze_lvalues(s) + self.check_final_implicit_def(s) + self.store_final_status(s) + self.check_classvar(s) + self.process_type_annotation(s) + self.analyze_rvalue_as_type_form(s) + self.apply_dynamic_class_hook(s) + if not s.type: + self.process_module_assignment(s.lvalues, s.rvalue, s) + self.process__all__(s) + self.process__deletable__(s) + self.process__slots__(s) + + def analyze_identity_global_assignment(self, s: AssignmentStmt) -> bool: + """Special case 'X = X' in global scope. + + This allows supporting some important use cases. + + Return true if special casing was applied. + """ + if not isinstance(s.rvalue, NameExpr) or len(s.lvalues) != 1: + # Not of form 'X = X' + return False + lvalue = s.lvalues[0] + if not isinstance(lvalue, NameExpr) or s.rvalue.name != lvalue.name: + # Not of form 'X = X' + return False + if self.type is not None or self.is_func_scope(): + # Not in global scope + return False + # It's an assignment like 'X = X' in the global scope. + name = lvalue.name + sym = self.lookup(name, s) + if sym is None: + if self.final_iteration: + # Fall back to normal assignment analysis. + return False + else: + self.defer() + return True + else: + if sym.node is None: + # Something special -- fall back to normal assignment analysis. + return False + if name not in self.globals: + # The name is from builtins. Add an alias to the current module. + self.add_symbol(name, sym.node, s) + if not isinstance(sym.node, PlaceholderNode): + for node in s.rvalue, lvalue: + node.node = sym.node + node.kind = GDEF + node.fullname = sym.node.fullname + return True + + def should_wait_rhs(self, rv: Expression) -> bool: + """Can we already classify this r.h.s. of an assignment or should we wait? + + This returns True if we don't have enough information to decide whether + an assignment is just a normal variable definition or a special form. + Always return False if this is a final iteration. This will typically cause + the lvalue to be classified as a variable plus emit an error. + """ + if self.final_iteration: + # No chance, nothing has changed. + return False + if isinstance(rv, NameExpr): + n = self.lookup(rv.name, rv) + if n and isinstance(n.node, PlaceholderNode) and not n.node.becomes_typeinfo: + return True + elif isinstance(rv, MemberExpr): + fname = get_member_expr_fullname(rv) + if fname: + n = self.lookup_qualified(fname, rv, suppress_errors=True) + if n and isinstance(n.node, PlaceholderNode) and not n.node.becomes_typeinfo: + return True + elif isinstance(rv, IndexExpr) and isinstance(rv.base, RefExpr): + return self.should_wait_rhs(rv.base) + elif isinstance(rv, CallExpr) and isinstance(rv.callee, RefExpr): + # This is only relevant for builtin SCC where things like 'TypeVar' + # may be not ready. + return self.should_wait_rhs(rv.callee) + return False + + def can_be_type_alias(self, rv: Expression, allow_none: bool = False) -> bool: + """Is this a valid r.h.s. for an alias definition? + + Note: this function should be only called for expressions where self.should_wait_rhs() + returns False. + """ + if isinstance(rv, RefExpr) and self.is_type_ref(rv, bare=True): + return True + if isinstance(rv, IndexExpr) and self.is_type_ref(rv.base, bare=False): + return True + if self.is_none_alias(rv): + return True + if allow_none and isinstance(rv, NameExpr) and rv.fullname == "builtins.None": + return True + if isinstance(rv, OpExpr) and rv.op == "|": + if self.is_stub_file: + return True + if self.can_be_type_alias(rv.left, allow_none=True) and self.can_be_type_alias( + rv.right, allow_none=True + ): + return True + return False + + def can_possibly_be_type_form(self, s: AssignmentStmt) -> bool: + """Like can_be_type_alias(), but simpler and doesn't require fully analyzed rvalue. + + Instead, use lvalues/annotations structure to figure out whether this can potentially be + a type alias definition, NamedTuple, or TypedDict. Another difference from above function + is that we are only interested IndexExpr, CallExpr and OpExpr rvalues, since only those + can be potentially recursive (things like `A = A` are never valid). + """ + if len(s.lvalues) > 1: + return False + if isinstance(s.rvalue, CallExpr) and isinstance(s.rvalue.callee, RefExpr): + ref = s.rvalue.callee.fullname + return ref in TPDICT_NAMES or ref in TYPED_NAMEDTUPLE_NAMES + if not isinstance(s.lvalues[0], NameExpr): + return False + if s.unanalyzed_type is not None and not self.is_pep_613(s): + return False + if not isinstance(s.rvalue, (IndexExpr, OpExpr)): + return False + # Something that looks like Foo = Bar[Baz, ...] + return True + + def can_possibly_be_typevarlike_declaration(self, s: AssignmentStmt) -> bool: + """Check if r.h.s. can be a TypeVarLike declaration.""" + if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], NameExpr): + return False + if not isinstance(s.rvalue, CallExpr) or not isinstance(s.rvalue.callee, NameExpr): + return False + ref = s.rvalue.callee + ref.accept(self) + return ref.fullname in TYPE_VAR_LIKE_NAMES + + def is_type_ref(self, rv: Expression, bare: bool = False) -> bool: + """Does this expression refer to a type? + + This includes: + * Special forms, like Any or Union + * Classes (except subscripted enums) + * Other type aliases + * PlaceholderNodes with becomes_typeinfo=True (these can be not ready class + definitions, and not ready aliases). + + If bare is True, this is not a base of an index expression, so some special + forms are not valid (like a bare Union). + + Note: This method should be only used in context of a type alias definition. + This method can only return True for RefExprs, to check if C[int] is a valid + target for type alias call this method on expr.base (i.e. on C in C[int]). + See also can_be_type_alias(). + """ + if not isinstance(rv, RefExpr): + return False + if isinstance(rv.node, TypeVarLikeExpr): + self.fail(f'Type variable "{rv.fullname}" is invalid as target for type alias', rv) + return False + + if bare: + # These three are valid even if bare, for example + # A = Tuple is just equivalent to A = Tuple[Any, ...]. + valid_refs = {"typing.Any", "typing.Tuple", "typing.Callable"} + else: + valid_refs = type_constructors + + if isinstance(rv.node, TypeAlias) or rv.fullname in valid_refs: + return True + if isinstance(rv.node, TypeInfo): + if bare: + return True + # Assignment color = Color['RED'] defines a variable, not an alias. + return not rv.node.is_enum + if isinstance(rv.node, Var): + return rv.node.fullname in NEVER_NAMES + + if isinstance(rv, NameExpr): + n = self.lookup(rv.name, rv) + if n and isinstance(n.node, PlaceholderNode) and n.node.becomes_typeinfo: + return True + elif isinstance(rv, MemberExpr): + fname = get_member_expr_fullname(rv) + if fname: + # The r.h.s. for variable definitions may not be a type reference but just + # an instance attribute, so suppress the errors. + n = self.lookup_qualified(fname, rv, suppress_errors=True) + if n and isinstance(n.node, PlaceholderNode) and n.node.becomes_typeinfo: + return True + return False + + def is_none_alias(self, node: Expression) -> bool: + """Is this a r.h.s. for a None alias? + + We special case the assignments like Void = type(None), to allow using + Void in type annotations. + """ + if isinstance(node, CallExpr): + if ( + isinstance(node.callee, NameExpr) + and len(node.args) == 1 + and isinstance(node.args[0], NameExpr) + ): + call = self.lookup_qualified(node.callee.name, node.callee) + arg = self.lookup_qualified(node.args[0].name, node.args[0]) + if ( + call is not None + and call.node + and call.node.fullname == "builtins.type" + and arg is not None + and arg.node + and arg.node.fullname == "builtins.None" + ): + return True + return False + + def record_special_form_lvalue(self, s: AssignmentStmt) -> None: + """Record minimal necessary information about l.h.s. of a special form. + + This exists mostly for compatibility with the old semantic analyzer. + """ + lvalue = s.lvalues[0] + assert isinstance(lvalue, NameExpr) + lvalue.is_special_form = True + if self.current_symbol_kind() == GDEF: + lvalue.fullname = self.qualified_name(lvalue.name) + lvalue.kind = self.current_symbol_kind() + + def analyze_enum_assign(self, s: AssignmentStmt) -> bool: + """Check if s defines an Enum.""" + if isinstance(s.rvalue, CallExpr) and isinstance(s.rvalue.analyzed, EnumCallExpr): + # This is an analyzed enum definition. + # It is valid iff it can be stored correctly, failures were already reported. + return self._is_single_name_assignment(s) + return self.enum_call_analyzer.process_enum_call(s, self.is_func_scope()) + + def analyze_namedtuple_assign(self, s: AssignmentStmt) -> bool: + """Check if s defines a namedtuple.""" + if isinstance(s.rvalue, CallExpr) and isinstance(s.rvalue.analyzed, NamedTupleExpr): + if s.rvalue.analyzed.info.tuple_type and not has_placeholder( + s.rvalue.analyzed.info.tuple_type + ): + # This is an analyzed named tuple definition. + # It is valid iff it can be stored correctly, failures were already reported. + return self._is_single_name_assignment(s) + if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], (NameExpr, MemberExpr)): + return False + lvalue = s.lvalues[0] + if isinstance(lvalue, MemberExpr): + if isinstance(s.rvalue, CallExpr) and isinstance(s.rvalue.callee, RefExpr): + fullname = s.rvalue.callee.fullname + if fullname == "collections.namedtuple" or fullname in TYPED_NAMEDTUPLE_NAMES: + self.fail("NamedTuple type as an attribute is not supported", lvalue) + return False + name = lvalue.name + namespace = self.qualified_name(name) + with self.tvar_scope_frame(self.tvar_scope.class_frame(namespace)): + internal_name, info, tvar_defs = self.named_tuple_analyzer.check_namedtuple( + s.rvalue, name, self.is_func_scope() + ) + if internal_name is None: + return False + if internal_name != name: + self.fail( + 'First argument to namedtuple() should be "{}", not "{}"'.format( + name, internal_name + ), + s.rvalue, + code=codes.NAME_MATCH, + ) + return True + # Yes, it's a valid namedtuple, but defer if it is not ready. + if not info: + self.mark_incomplete(name, lvalue, becomes_typeinfo=True) + else: + self.setup_type_vars(info.defn, tvar_defs) + self.setup_alias_type_vars(info.defn) + return True + + def analyze_typeddict_assign(self, s: AssignmentStmt) -> bool: + """Check if s defines a typed dict.""" + if isinstance(s.rvalue, CallExpr) and isinstance(s.rvalue.analyzed, TypedDictExpr): + if s.rvalue.analyzed.info.typeddict_type and not has_placeholder( + s.rvalue.analyzed.info.typeddict_type + ): + # This is an analyzed typed dict definition. + # It is valid iff it can be stored correctly, failures were already reported. + return self._is_single_name_assignment(s) + if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], (NameExpr, MemberExpr)): + return False + lvalue = s.lvalues[0] + name = lvalue.name + namespace = self.qualified_name(name) + with self.tvar_scope_frame(self.tvar_scope.class_frame(namespace)): + is_typed_dict, info, tvar_defs = self.typed_dict_analyzer.check_typeddict( + s.rvalue, name, self.is_func_scope() + ) + if not is_typed_dict: + return False + if isinstance(lvalue, MemberExpr): + self.fail("TypedDict type as attribute is not supported", lvalue) + return False + # Yes, it's a valid typed dict, but defer if it is not ready. + if not info: + self.mark_incomplete(name, lvalue, becomes_typeinfo=True) + else: + defn = info.defn + self.setup_type_vars(defn, tvar_defs) + self.setup_alias_type_vars(defn) + return True + + def _is_single_name_assignment(self, s: AssignmentStmt) -> bool: + return len(s.lvalues) == 1 and isinstance(s.lvalues[0], NameExpr) + + def analyze_lvalues(self, s: AssignmentStmt) -> None: + # We cannot use s.type, because analyze_simple_literal_type() will set it. + explicit = s.unanalyzed_type is not None + if self.is_final_type(s.unanalyzed_type): + # We need to exclude bare Final. + assert isinstance(s.unanalyzed_type, UnboundType) + if not s.unanalyzed_type.args: + explicit = False + + if s.rvalue: + if isinstance(s.rvalue, TempNode): + has_explicit_value = not s.rvalue.no_rhs + else: + has_explicit_value = True + else: + has_explicit_value = False + + for lval in s.lvalues: + self.analyze_lvalue( + lval, + explicit_type=explicit, + is_final=s.is_final_def, + has_explicit_value=has_explicit_value, + ) + + def analyze_rvalue_as_type_form(self, s: AssignmentStmt) -> None: + if TYPE_FORM in self.options.enable_incomplete_feature: + self.try_parse_as_type_expression(s.rvalue) + + def apply_dynamic_class_hook(self, s: AssignmentStmt) -> None: + if not isinstance(s.rvalue, CallExpr): + return + fname = "" + call = s.rvalue + while True: + if isinstance(call.callee, RefExpr): + fname = call.callee.fullname + # check if method call + if not fname and isinstance(call.callee, MemberExpr): + callee_expr = call.callee.expr + if isinstance(callee_expr, RefExpr) and callee_expr.fullname: + method_name = call.callee.name + fname = callee_expr.fullname + "." + method_name + elif ( + isinstance(callee_expr, IndexExpr) + and isinstance(callee_expr.base, RefExpr) + and isinstance(callee_expr.analyzed, TypeApplication) + ): + method_name = call.callee.name + fname = callee_expr.base.fullname + "." + method_name + elif isinstance(callee_expr, CallExpr): + # check if chain call + call = callee_expr + continue + break + if not fname: + return + hook = self.plugin.get_dynamic_class_hook(fname) + if not hook: + return + for lval in s.lvalues: + if not isinstance(lval, NameExpr): + continue + hook(DynamicClassDefContext(call, lval.name, self)) + + def unwrap_final(self, s: AssignmentStmt) -> bool: + """Strip Final[...] if present in an assignment. + + This is done to invoke type inference during type checking phase for this + assignment. Also, Final[...] doesn't affect type in any way -- it is rather an + access qualifier for given `Var`. + + Also perform various consistency checks. + + Returns True if Final[...] was present. + """ + if not s.unanalyzed_type or not self.is_final_type(s.unanalyzed_type): + return False + assert isinstance(s.unanalyzed_type, UnboundType) + if len(s.unanalyzed_type.args) > 1: + self.fail("Final[...] takes at most one type argument", s.unanalyzed_type) + invalid_bare_final = False + if not s.unanalyzed_type.args: + s.type = None + if ( + isinstance(s.rvalue, TempNode) + and s.rvalue.no_rhs + # Filter duplicate errors, we already reported this: + and not (self.type and self.type.is_named_tuple) + ): + invalid_bare_final = True + self.fail("Type in Final[...] can only be omitted if there is an initializer", s) + else: + s.type = s.unanalyzed_type.args[0] + + if ( + s.type is not None + and self.options.python_version < (3, 13) + and self.is_classvar(s.type) + ): + self.fail("Variable should not be annotated with both ClassVar and Final", s) + return False + + if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], RefExpr): + self.fail("Invalid final declaration", s) + return False + lval = s.lvalues[0] + assert isinstance(lval, RefExpr) + + # Reset inferred status if it was set due to simple literal rvalue on previous iteration. + # TODO: this is a best-effort quick fix, we should avoid the need to manually sync this, + # see https://github.com/python/mypy/issues/6458. + if lval.is_new_def: + lval.is_inferred_def = s.type is None + + if self.loop_depth[-1] > 0: + self.fail("Cannot use Final inside a loop", s) + if self.type and self.type.is_protocol: + if self.is_class_scope(): + self.msg.protocol_members_cant_be_final(s) + if ( + isinstance(s.rvalue, TempNode) + and s.rvalue.no_rhs + and not self.is_stub_file + and not self.is_class_scope() + ): + if not invalid_bare_final: # Skip extra error messages. + self.msg.final_without_value(s) + return True + + def check_final_implicit_def(self, s: AssignmentStmt) -> None: + """Do basic checks for final declaration on self in __init__. + + Additional re-definition checks are performed by `analyze_lvalue`. + """ + if not s.is_final_def: + return + lval = s.lvalues[0] + assert isinstance(lval, RefExpr) + if isinstance(lval, MemberExpr): + if not self.is_self_member_ref(lval): + self.fail("Final can be only applied to a name or an attribute on self", s) + s.is_final_def = False + return + else: + assert self.function_stack + if self.function_stack[-1].name != "__init__": + self.fail("Can only declare a final attribute in class body or __init__", s) + s.is_final_def = False + return + + def store_final_status(self, s: AssignmentStmt) -> None: + """If this is a locally valid final declaration, set the corresponding flag on `Var`.""" + if s.is_final_def: + if len(s.lvalues) == 1 and isinstance(s.lvalues[0], RefExpr): + node = s.lvalues[0].node + if isinstance(node, Var): + node.is_final = True + if s.type: + node.final_value = constant_fold_expr(s.rvalue, self.cur_mod_id) + if self.is_class_scope() and ( + isinstance(s.rvalue, TempNode) and s.rvalue.no_rhs + ): + node.final_unset_in_class = True + else: + for lval in self.flatten_lvalues(s.lvalues): + # Special case: we are working with an `Enum`: + # + # class MyEnum(Enum): + # key = 'some value' + # + # Here `key` is implicitly final. In runtime, code like + # + # MyEnum.key = 'modified' + # + # will fail with `AttributeError: Cannot reassign members.` + # That's why we need to replicate this. + if ( + isinstance(lval, NameExpr) + and isinstance(self.type, TypeInfo) + and self.type.is_enum + ): + cur_node = self.type.names.get(lval.name, None) + if ( + cur_node + and isinstance(cur_node.node, Var) + and not (isinstance(s.rvalue, TempNode) and s.rvalue.no_rhs) + ): + # Double underscored members are writable on an `Enum`. + # (Except read-only `__members__` but that is handled in type checker) + cur_node.node.is_final = s.is_final_def = not is_dunder(cur_node.node.name) + + # Special case: deferred initialization of a final attribute in __init__. + # In this case we just pretend this is a valid final definition to suppress + # errors about assigning to final attribute. + if isinstance(lval, MemberExpr) and self.is_self_member_ref(lval): + assert self.type, "Self member outside a class" + cur_node = self.type.names.get(lval.name, None) + if cur_node and isinstance(cur_node.node, Var) and cur_node.node.is_final: + assert self.function_stack + current_function = self.function_stack[-1] + if ( + current_function.name == "__init__" + and cur_node.node.final_unset_in_class + and not cur_node.node.final_set_in_init + and not (isinstance(s.rvalue, TempNode) and s.rvalue.no_rhs) + ): + cur_node.node.final_set_in_init = True + s.is_final_def = True + + def flatten_lvalues(self, lvalues: list[Expression]) -> list[Expression]: + res: list[Expression] = [] + for lv in lvalues: + if isinstance(lv, (TupleExpr, ListExpr)): + res.extend(self.flatten_lvalues(lv.items)) + else: + res.append(lv) + return res + + def process_type_annotation(self, s: AssignmentStmt) -> None: + """Analyze type annotation or infer simple literal type.""" + if s.type: + lvalue = s.lvalues[-1] + allow_tuple_literal = isinstance(lvalue, TupleExpr) + analyzed = self.anal_type(s.type, allow_tuple_literal=allow_tuple_literal) + # Don't store not ready types (including placeholders). + if analyzed is None or has_placeholder(analyzed): + self.defer(s) + return + s.type = analyzed + if ( + self.type + and self.type.is_protocol + and isinstance(lvalue, NameExpr) + and isinstance(s.rvalue, TempNode) + and s.rvalue.no_rhs + ): + if isinstance(lvalue.node, Var): + lvalue.node.is_abstract_var = True + else: + if ( + self.type + and self.type.is_protocol + and self.is_annotated_protocol_member(s) + and not self.is_func_scope() + ): + self.fail("All protocol members must have explicitly declared types", s) + # Set the type if the rvalue is a simple literal (even if the above error occurred). + if len(s.lvalues) == 1 and isinstance(s.lvalues[0], RefExpr): + ref_expr = s.lvalues[0] + safe_literal_inference = True + if self.type and isinstance(ref_expr, NameExpr) and len(self.type.mro) > 1: + # Check if there is a definition in supertype. If yes, we can't safely + # decide here what to infer: int or Literal[42]. + safe_literal_inference = self.type.mro[1].get(ref_expr.name) is None + if safe_literal_inference and ref_expr.is_inferred_def: + s.type = self.analyze_simple_literal_type(s.rvalue, s.is_final_def) + if s.type: + # Store type into nodes. + for lvalue in s.lvalues: + self.store_declared_types(lvalue, s.type) + + def is_annotated_protocol_member(self, s: AssignmentStmt) -> bool: + """Check whether a protocol member is annotated. + + There are some exceptions that can be left unannotated, like ``__slots__``.""" + return any( + (isinstance(lv, NameExpr) and lv.name != "__slots__" and lv.is_inferred_def) + for lv in s.lvalues + ) + + def analyze_simple_literal_type(self, rvalue: Expression, is_final: bool) -> Type | None: + """Return builtins.int if rvalue is an int literal, etc. + + If this is a 'Final' context, we return "Literal[...]" instead. + """ + if self.function_stack: + # Skip inside a function; this is to avoid confusing + # the code that handles dead code due to isinstance() + # inside type variables with value restrictions (like + # AnyStr). + return None + + value = constant_fold_expr(rvalue, self.cur_mod_id) + if value is None or isinstance(value, complex): + return None + + if isinstance(value, bool): + type_name = "builtins.bool" + elif isinstance(value, int): + type_name = "builtins.int" + elif isinstance(value, str): + type_name = "builtins.str" + elif isinstance(value, float): + type_name = "builtins.float" + + typ = self.named_type_or_none(type_name) + if typ and is_final: + return typ.copy_modified(last_known_value=LiteralType(value=value, fallback=typ)) + return typ + + def analyze_alias( + self, + name: str, + rvalue: Expression, + allow_placeholder: bool = False, + declared_type_vars: TypeVarLikeList | None = None, + all_declared_type_params_names: list[str] | None = None, + python_3_12_type_alias: bool = False, + ) -> tuple[Type | None, list[TypeVarLikeType], set[str], bool]: + """Check if 'rvalue' is a valid type allowed for aliasing (e.g. not a type variable). + + If yes, return the corresponding type, a list of type variables for generic aliases, + a set of names the alias depends on, and True if the original type has empty tuple index. + An example for the dependencies: + A = int + B = str + analyze_alias(dict[A, B])[2] == {'__main__.A', '__main__.B'} + """ + dynamic = bool(self.function_stack and self.function_stack[-1].is_dynamic()) + global_scope = not self.type and not self.function_stack + try: + typ = expr_to_unanalyzed_type( + rvalue, self.options, self.is_stub_file, lookup_qualified=self.lookup_qualified + ) + except TypeTranslationError: + self.fail( + "Invalid type alias: expression is not a valid type", rvalue, code=codes.VALID_TYPE + ) + return None, [], set(), False + + found_type_vars = self.find_type_var_likes(typ) + namespace = self.qualified_name(name) + alias_type_vars = found_type_vars if declared_type_vars is None else declared_type_vars + with self.tvar_scope_frame(self.tvar_scope.class_frame(namespace)): + tvar_defs = self.tvar_defs_from_tvars(alias_type_vars, typ) + + if python_3_12_type_alias: + with self.allow_unbound_tvars_set(): + rvalue.accept(self) + + analyzed, depends_on = analyze_type_alias( + typ, + self, + self.tvar_scope, + self.plugin, + self.options, + self.cur_mod_node, + self.is_typeshed_stub_file, + allow_placeholder=allow_placeholder, + in_dynamic_func=dynamic, + global_scope=global_scope, + allowed_alias_tvars=tvar_defs, + alias_type_params_names=all_declared_type_params_names, + python_3_12_type_alias=python_3_12_type_alias, + ) + + # There can be only one variadic variable at most, the error is reported elsewhere. + new_tvar_defs = [] + variadic = False + for td in tvar_defs: + if isinstance(td, TypeVarTupleType): + if variadic: + continue + variadic = True + new_tvar_defs.append(td) + + empty_tuple_index = typ.empty_tuple_index if isinstance(typ, UnboundType) else False + return analyzed, new_tvar_defs, depends_on, empty_tuple_index + + def is_pep_613(self, s: AssignmentStmt) -> bool: + if s.unanalyzed_type is not None and isinstance(s.unanalyzed_type, UnboundType): + lookup = self.lookup_qualified(s.unanalyzed_type.name, s, suppress_errors=True) + if lookup and lookup.fullname in TYPE_ALIAS_NAMES: + return True + return False + + def check_and_set_up_type_alias(self, s: AssignmentStmt) -> bool: + """Check if assignment creates a type alias and set it up as needed. + + Return True if it is a type alias (even if the target is not ready), + or False otherwise. + + Note: the resulting types for subscripted (including generic) aliases + are also stored in rvalue.analyzed. + """ + if s.invalid_recursive_alias: + return True + lvalue = s.lvalues[0] + if len(s.lvalues) > 1 or not isinstance(lvalue, NameExpr): + # First rule: Only simple assignments like Alias = ... create aliases. + return False + + pep_613 = self.is_pep_613(s) + if not pep_613 and s.unanalyzed_type is not None: + # Second rule: Explicit type (cls: Type[A] = A) always creates variable, not alias. + # unless using PEP 613 `cls: TypeAlias = A` + return False + + # It can be `A = TypeAliasType('A', ...)` call, in this case, + # we just take the second argument and analyze it: + type_params: TypeVarLikeList | None + all_type_params_names: list[str] | None + if self.check_type_alias_type_call(s.rvalue, name=lvalue.name): + rvalue = s.rvalue.args[1] + pep_695 = True + type_params, all_type_params_names = self.analyze_type_alias_type_params(s.rvalue) + else: + rvalue = s.rvalue + pep_695 = False + type_params = None + all_type_params_names = None + + if isinstance(rvalue, CallExpr) and rvalue.analyzed: + return False + + existing = self.current_symbol_table().get(lvalue.name) + # Third rule: type aliases can't be re-defined. For example: + # A: Type[float] = int + # A = float # OK, but this doesn't define an alias + # B = int + # B = float # Error! + # Don't create an alias in these cases: + if existing and ( + isinstance(existing.node, Var) # existing variable + or (isinstance(existing.node, TypeAlias) and not s.is_alias_def) # existing alias + or (isinstance(existing.node, PlaceholderNode) and existing.node.node.line < s.line) + ): # previous incomplete definition + # TODO: find a more robust way to track the order of definitions. + # Note: if is_alias_def=True, this is just a node from previous iteration. + if isinstance(existing.node, TypeAlias) and not s.is_alias_def: + self.fail( + 'Cannot assign multiple types to name "{}"' + ' without an explicit "type[...]" annotation'.format(lvalue.name), + lvalue, + ) + return False + + non_global_scope = self.type or self.is_func_scope() + if not pep_613 and not pep_695 and isinstance(rvalue, RefExpr) and non_global_scope: + # Fourth rule (special case): Non-subscripted right hand side creates a variable + # at class and function scopes. For example: + # + # class Model: + # ... + # class C: + # model = Model # this is automatically a variable with type 'Type[Model]' + # + # without this rule, this typical use case will require a lot of explicit + # annotations (see the second rule). + return False + if not pep_613 and not pep_695 and not self.can_be_type_alias(rvalue): + return False + + if existing and not isinstance(existing.node, (PlaceholderNode, TypeAlias)): + # Cannot redefine existing node as type alias. + return False + + res: Type | None = None + if self.is_none_alias(rvalue): + res = NoneType() + alias_tvars: list[TypeVarLikeType] = [] + depends_on: set[str] = set() + empty_tuple_index = False + else: + tag = self.track_incomplete_refs() + res, alias_tvars, depends_on, empty_tuple_index = self.analyze_alias( + lvalue.name, + rvalue, + allow_placeholder=True, + declared_type_vars=type_params, + all_declared_type_params_names=all_type_params_names, + ) + if not res: + return False + if not self.is_func_scope(): + # Only marking incomplete for top-level placeholders makes recursive aliases like + # `A = Sequence[str | A]` valid here, similar to how we treat base classes in class + # definitions, allowing `class str(Sequence[str]): ...` + incomplete_target = isinstance(res, ProperType) and isinstance( + res, PlaceholderType + ) + else: + incomplete_target = has_placeholder(res) + if self.found_incomplete_ref(tag) or incomplete_target: + # Since we have got here, we know this must be a type alias (incomplete refs + # may appear in nested positions), therefore use becomes_typeinfo=True. + self.mark_incomplete(lvalue.name, rvalue, becomes_typeinfo=True) + return True + self.add_type_alias_deps(depends_on) + check_for_explicit_any(res, self.options, self.is_typeshed_stub_file, self.msg, context=s) + # When this type alias gets "inlined", the Any is not explicit anymore, + # so we need to replace it with non-explicit Anys. + res = make_any_non_explicit(res) + if self.options.disallow_any_unimported and has_any_from_unimported_type(res): + # Only show error message once, when the type is fully analyzed. + if not has_placeholder(res): + self.msg.unimported_type_becomes_any("Type alias target", res, s) + res = make_any_non_unimported(res) + # Note: with the new (lazy) type alias representation we only need to set no_args to True + # if the expected number of arguments is non-zero, so that aliases like `A = List` work + # but not aliases like `A = TypeAliasType("A", List)` as these need explicit type params. + # However, eagerly expanding aliases like Text = str is a nice performance optimization. + no_args = ( + isinstance(res, ProperType) + and isinstance(res, Instance) + and not res.args + and not empty_tuple_index + and not pep_695 + ) + if isinstance(res, ProperType) and isinstance(res, Instance): + if not validate_instance(res, self.fail, empty_tuple_index): + fix_instance(res, self.fail, self.note, disallow_any=False, options=self.options) + # Aliases defined within functions can't be accessed outside + # the function, since the symbol table will no longer + # exist. Work around by expanding them eagerly when used. + eager = self.is_func_scope() + alias_node = TypeAlias( + res, + self.qualified_name(lvalue.name), + self.cur_mod_id, + s.line, + s.column, + alias_tvars=alias_tvars, + no_args=no_args, + eager=eager, + python_3_12_type_alias=pep_695, + ) + if isinstance(s.rvalue, (IndexExpr, CallExpr, OpExpr)) and ( + not isinstance(rvalue, OpExpr) + or (self.options.python_version >= (3, 10) or self.is_stub_file) + ): + # Note: CallExpr is for "void = type(None)" and OpExpr is for "X | Y" union syntax. + if not isinstance(s.rvalue.analyzed, TypeAliasExpr): + # Any existing node will be updated in-place below. + s.rvalue.analyzed = TypeAliasExpr(alias_node) + s.rvalue.analyzed.line = s.line + # we use the column from resulting target, to get better location for errors + s.rvalue.analyzed.column = res.column + elif isinstance(s.rvalue, RefExpr): + s.rvalue.is_alias_rvalue = True + + if existing: + # An alias gets updated. + updated = False + if isinstance(existing.node, TypeAlias): + if existing.node.target != res: + # Copy expansion to the existing alias, this matches how we update base classes + # for a TypeInfo _in place_ if there are nested placeholders. + existing.node.target = res + existing.node.alias_tvars = alias_tvars + existing.node.no_args = no_args + updated = True + # Invalidate recursive status cache in case it was previously set. + existing.node._is_recursive = None + else: + # Otherwise just replace existing placeholder with type alias. + existing.node = alias_node + updated = True + if updated: + if self.final_iteration: + self.cannot_resolve_name(lvalue.name, "name", s) + return True + else: + # We need to defer so that this change can get propagated to base classes. + self.defer(s, force_progress=True) + else: + self.add_symbol(lvalue.name, alias_node, s) + if isinstance(rvalue, RefExpr) and isinstance(rvalue.node, TypeAlias): + alias_node.normalized = rvalue.node.normalized + current_node = existing.node if existing else alias_node + assert isinstance(current_node, TypeAlias) + self.disable_invalid_recursive_aliases(s, current_node, s.rvalue) + if self.is_class_scope(): + assert self.type is not None + if self.type.is_protocol: + self.fail("Type aliases are prohibited in protocol bodies", s) + if not lvalue.name[0].isupper(): + self.note("Use variable annotation syntax to define protocol members", s) + return True + + def check_type_alias_type_call(self, rvalue: Expression, *, name: str) -> TypeGuard[CallExpr]: + if not isinstance(rvalue, CallExpr): + return False + + names = ["typing_extensions.TypeAliasType"] + if self.options.python_version >= (3, 12): + names.append("typing.TypeAliasType") + if not refers_to_fullname(rvalue.callee, tuple(names)): + return False + if not self.check_typevarlike_name(rvalue, name, rvalue): + return False + if rvalue.arg_kinds.count(ARG_POS) != 2: + return False + + return True + + def analyze_type_alias_type_params( + self, rvalue: CallExpr + ) -> tuple[TypeVarLikeList, list[str]]: + """Analyze type_params of TypeAliasType. + + Returns declared unbound type variable expressions and a list of all declared type + variable names for error reporting. + """ + if "type_params" in rvalue.arg_names: + type_params_arg = rvalue.args[rvalue.arg_names.index("type_params")] + if not isinstance(type_params_arg, TupleExpr): + self.fail( + "Tuple literal expected as the type_params argument to TypeAliasType", + type_params_arg, + ) + return [], [] + type_params = type_params_arg.items + else: + return [], [] + + declared_tvars: TypeVarLikeList = [] + all_declared_tvar_names: list[str] = [] # includes bound type variables + have_type_var_tuple = False + for tp_expr in type_params: + if isinstance(tp_expr, StarExpr): + tp_expr.valid = False + self.analyze_type_expr(tp_expr) + try: + base = self.expr_to_unanalyzed_type(tp_expr) + except TypeTranslationError: + continue + if not isinstance(base, UnboundType): + continue + + tag = self.track_incomplete_refs() + tvar = self.analyze_unbound_tvar_impl(base, is_typealias_param=True) + if tvar: + if isinstance(tvar[1], TypeVarTupleExpr): + if have_type_var_tuple: + self.fail( + "Can only use one TypeVarTuple in type_params argument to TypeAliasType", + base, + code=codes.TYPE_VAR, + ) + have_type_var_tuple = True + continue + have_type_var_tuple = True + elif not self.found_incomplete_ref(tag): + sym = self.lookup_qualified(base.name, base) + if sym and isinstance(sym.node, TypeVarLikeExpr): + all_declared_tvar_names.append(sym.node.name) # Error will be reported later + else: + self.fail( + "Free type variable expected in type_params argument to TypeAliasType", + base, + code=codes.TYPE_VAR, + ) + if sym and sym.fullname in UNPACK_TYPE_NAMES: + self.note( + "Don't Unpack type variables in type_params", base, code=codes.TYPE_VAR + ) + continue + if tvar in declared_tvars: + self.fail( + f'Duplicate type variable "{tvar[0]}" in type_params argument to TypeAliasType', + base, + code=codes.TYPE_VAR, + ) + continue + if tvar: + all_declared_tvar_names.append(tvar[0]) + declared_tvars.append(tvar) + return declared_tvars, all_declared_tvar_names + + def disable_invalid_recursive_aliases( + self, s: AssignmentStmt | TypeAliasStmt, current_node: TypeAlias, ctx: Context + ) -> None: + """Prohibit and fix recursive type aliases that are invalid/unsupported.""" + messages = [] + if ( + isinstance(current_node.target, TypeAliasType) + and current_node.target.alias is current_node + ): + # We want to have consistent error messages, but not calling name_not_defined(), + # since it will do a bunch of unrelated things we don't want here. + messages.append( + f'Cannot resolve name "{current_node.name}" (possible cyclic definition)' + ) + elif is_invalid_recursive_alias({current_node}, current_node.target): + target = ( + "tuple" if isinstance(get_proper_type(current_node.target), TupleType) else "union" + ) + messages.append(f"Invalid recursive alias: a {target} item of itself") + if detect_diverging_alias( + current_node, current_node.target, self.lookup_qualified, self.tvar_scope + ): + messages.append("Invalid recursive alias: type variable nesting on right hand side") + if messages: + current_node.target = AnyType(TypeOfAny.from_error) + s.invalid_recursive_alias = True + for msg in messages: + self.fail(msg, ctx) + + def analyze_lvalue( + self, + lval: Lvalue, + nested: bool = False, + explicit_type: bool = False, + is_final: bool = False, + escape_comprehensions: bool = False, + has_explicit_value: bool = False, + is_index_var: bool = False, + ) -> None: + """Analyze an lvalue or assignment target. + + Args: + lval: The target lvalue + nested: If true, the lvalue is within a tuple or list lvalue expression + explicit_type: Assignment has type annotation + escape_comprehensions: If we are inside a comprehension, set the variable + in the enclosing scope instead. This implements + https://www.python.org/dev/peps/pep-0572/#scope-of-the-target + is_index_var: If lval is the index variable in a for loop + """ + if escape_comprehensions: + assert isinstance(lval, NameExpr), "assignment expression target must be NameExpr" + if isinstance(lval, NameExpr): + self.analyze_name_lvalue( + lval, + explicit_type, + is_final, + escape_comprehensions, + has_explicit_value=has_explicit_value, + is_index_var=is_index_var, + ) + elif isinstance(lval, MemberExpr): + self.analyze_member_lvalue(lval, explicit_type, is_final, has_explicit_value) + if explicit_type and not self.is_self_member_ref(lval): + self.fail("Type cannot be declared in assignment to non-self attribute", lval) + elif isinstance(lval, IndexExpr): + if explicit_type: + self.fail("Unexpected type declaration", lval) + lval.accept(self) + elif isinstance(lval, TupleExpr): + self.analyze_tuple_or_list_lvalue(lval, explicit_type) + elif isinstance(lval, StarExpr): + if nested: + self.analyze_lvalue(lval.expr, nested, explicit_type) + else: + self.fail("Starred assignment target must be in a list or tuple", lval) + else: + self.fail("Invalid assignment target", lval) + + def analyze_name_lvalue( + self, + lvalue: NameExpr, + explicit_type: bool, + is_final: bool, + escape_comprehensions: bool, + has_explicit_value: bool, + is_index_var: bool, + ) -> None: + """Analyze an lvalue that targets a name expression. + + Arguments are similar to "analyze_lvalue". + """ + if lvalue.node: + # This has been bound already in a previous iteration. + return + + name = lvalue.name + if self.is_alias_for_final_name(name): + if is_final: + self.fail("Cannot redefine an existing name as final", lvalue) + else: + self.msg.cant_assign_to_final(name, self.type is not None, lvalue) + + kind = self.current_symbol_kind() + names = self.current_symbol_table(escape_comprehensions=escape_comprehensions) + existing = names.get(name) + + outer = self.is_global_or_nonlocal(name) + if ( + kind == MDEF + and isinstance(self.type, TypeInfo) + and self.type.is_enum + and not name.startswith("__") + ): + # Special case: we need to be sure that `Enum` keys are unique. + if existing is not None and not isinstance(existing.node, PlaceholderNode): + self.fail( + 'Attempted to reuse member name "{}" in Enum definition "{}"'.format( + name, self.type.name + ), + lvalue, + ) + + if explicit_type and has_explicit_value: + self.fail("Enum members must be left unannotated", lvalue) + self.note( + "See https://typing.readthedocs.io/en/latest/spec/enums.html#defining-members", + lvalue, + ) + + if (not existing or isinstance(existing.node, PlaceholderNode)) and not outer: + # Define new variable. + var = self.make_name_lvalue_var( + lvalue, kind, not explicit_type, has_explicit_value, is_index_var + ) + added = self.add_symbol(name, var, lvalue, escape_comprehensions=escape_comprehensions) + # Only bind expression if we successfully added name to symbol table. + if added: + lvalue.is_new_def = True + lvalue.is_inferred_def = True + lvalue.kind = kind + lvalue.node = var + if kind == GDEF: + lvalue.fullname = var._fullname + else: + lvalue.fullname = lvalue.name + if self.is_func_scope(): + if unmangle(name) == "_" and not self.options.allow_redefinition_new: + # Special case for assignment to local named '_': always infer 'Any'. + # This isn't needed with --allow-redefinition-new, since arbitrary + # types can be assigned to '_' anyway. + typ = AnyType(TypeOfAny.special_form) + self.store_declared_types(lvalue, typ) + if is_final and self.is_final_redefinition(kind, name): + self.fail("Cannot redefine an existing name as final", lvalue) + else: + self.make_name_lvalue_point_to_existing_def(lvalue, explicit_type, is_final) + + def is_final_redefinition(self, kind: int, name: str) -> bool: + if kind == GDEF: + return self.is_mangled_global(name) and not self.is_initial_mangled_global(name) + elif kind == MDEF and self.type: + return unmangle(name) + "'" in self.type.names + return False + + def is_alias_for_final_name(self, name: str) -> bool: + if self.is_func_scope(): + if not name.endswith("'"): + # Not a mangled name -- can't be an alias + return False + name = unmangle(name) + assert self.locals[-1] is not None, "No locals at function scope" + existing = self.locals[-1].get(name) + return existing is not None and is_final_node(existing.node) + elif self.type is not None: + orig_name = unmangle(name) + "'" + if name == orig_name: + return False + existing = self.type.names.get(orig_name) + return existing is not None and is_final_node(existing.node) + else: + orig_name = unmangle(name) + "'" + if name == orig_name: + return False + existing = self.globals.get(orig_name) + return existing is not None and is_final_node(existing.node) + + def make_name_lvalue_var( + self, + lvalue: NameExpr, + kind: int, + inferred: bool, + has_explicit_value: bool, + is_index_var: bool, + ) -> Var: + """Return a Var node for an lvalue that is a name expression.""" + name = lvalue.name + v = Var(name) + v.set_line(lvalue) + v.is_inferred = inferred + if kind == MDEF: + assert self.type is not None + v.info = self.type + v.is_initialized_in_class = True + v.allow_incompatible_override = name in ALLOW_INCOMPATIBLE_OVERRIDE + if kind != LDEF: + v._fullname = self.qualified_name(name) + else: + # fullname should never stay None + v._fullname = name + v.is_ready = False # Type not inferred yet + v.has_explicit_value = has_explicit_value + v.is_index_var = is_index_var + return v + + def make_name_lvalue_point_to_existing_def( + self, lval: NameExpr, explicit_type: bool, is_final: bool + ) -> None: + """Update an lvalue to point to existing definition in the same scope. + + Arguments are similar to "analyze_lvalue". + + Assume that an existing name exists. + """ + if is_final: + # Redefining an existing name with final is always an error. + self.fail("Cannot redefine an existing name as final", lval) + original_def = self.lookup(lval.name, lval, suppress_errors=True) + if original_def is None and self.type and not self.is_func_scope(): + # Workaround to allow "x, x = ..." in class body. + original_def = self.type.get(lval.name) + if explicit_type: + # Don't re-bind if there is a type annotation. + self.name_already_defined(lval.name, lval, original_def) + else: + # Bind to an existing name. + if original_def: + self.bind_name_expr(lval, original_def) + else: + self.name_not_defined(lval.name, lval) + self.check_lvalue_validity(lval.node, lval) + + def analyze_tuple_or_list_lvalue(self, lval: TupleExpr, explicit_type: bool = False) -> None: + """Analyze an lvalue or assignment target that is a list or tuple.""" + items = lval.items + star_exprs = [item for item in items if isinstance(item, StarExpr)] + + if len(star_exprs) > 1: + self.fail("Two starred expressions in assignment", lval) + else: + if len(star_exprs) == 1: + star_exprs[0].valid = True + for i in items: + self.analyze_lvalue( + lval=i, + nested=True, + explicit_type=explicit_type, + # Lists and tuples always have explicit values defined: + # `a, b, c = value` + has_explicit_value=True, + ) + + def analyze_member_lvalue( + self, lval: MemberExpr, explicit_type: bool, is_final: bool, has_explicit_value: bool + ) -> None: + """Analyze lvalue that is a member expression. + + Arguments: + lval: The target lvalue + explicit_type: Assignment has type annotation + is_final: Is the target final + """ + if lval.node: + # This has been bound already in a previous iteration. + return + lval.accept(self) + if self.is_self_member_ref(lval): + assert self.type, "Self member outside a class" + cur_node = self.type.names.get(lval.name) + node = self.type.get(lval.name) + if cur_node and is_final: + # Overrides will be checked in type checker. + self.fail("Cannot redefine an existing name as final", lval) + # On first encounter with this definition, if this attribute was defined before + # with an inferred type and it's marked with an explicit type now, give an error. + if ( + not lval.node + and cur_node + and isinstance(cur_node.node, Var) + and cur_node.node.is_inferred + and explicit_type + ): + self.attribute_already_defined(lval.name, lval, cur_node) + if self.type.is_protocol and has_explicit_value and cur_node is not None: + # Make this variable non-abstract, it would be safer to do this only if we + # are inside __init__, but we do this always to preserve historical behaviour. + if isinstance(cur_node.node, Var): + cur_node.node.is_abstract_var = False + if ( + # If the attribute of self is not defined, create a new Var, ... + node is None + # ... or if it is defined as abstract in a *superclass*. + or (cur_node is None and isinstance(node.node, Var) and node.node.is_abstract_var) + # ... also an explicit declaration on self also creates a new Var. + # Note that `explicit_type` might have been erased for bare `Final`, + # so we also check if `is_final` is passed. + or (cur_node is None and (explicit_type or is_final)) + ): + if self.type.is_protocol and node is None: + self.fail("Protocol members cannot be defined via assignment to self", lval) + else: + # Implicit attribute definition in __init__. + lval.is_new_def = True + lval.is_inferred_def = True + v = Var(lval.name) + v.set_line(lval) + v._fullname = self.qualified_name(lval.name) + v.info = self.type + v.is_ready = False + v.explicit_self_type = explicit_type or is_final + lval.def_var = v + lval.node = v + # TODO: should we also set lval.kind = MDEF? + self.type.names[lval.name] = SymbolTableNode(MDEF, v, implicit=True) + for func in self.scope.functions: + if isinstance(func, FuncDef): + func.has_self_attr_def = True + self.check_lvalue_validity(lval.node, lval) + + def is_self_member_ref(self, memberexpr: MemberExpr) -> bool: + """Does memberexpr to refer to an attribute of self?""" + if not isinstance(memberexpr.expr, NameExpr): + return False + node = memberexpr.expr.node + return isinstance(node, Var) and node.is_self + + def check_lvalue_validity(self, node: Expression | SymbolNode | None, ctx: Context) -> None: + if isinstance(node, TypeVarExpr): + self.fail("Invalid assignment target", ctx) + elif isinstance(node, TypeInfo): + self.fail(message_registry.CANNOT_ASSIGN_TO_TYPE, ctx) + + def store_declared_types(self, lvalue: Lvalue, typ: Type) -> None: + if isinstance(lvalue, RefExpr): + lvalue.is_inferred_def = False + if isinstance(lvalue.node, Var): + var = lvalue.node + var.type = typ + var.is_ready = True + typ = get_proper_type(typ) + if ( + var.is_final + and isinstance(typ, Instance) + and typ.last_known_value + and (not self.type or not self.type.is_enum) + ): + var.final_value = typ.last_known_value.value + # If node is not a variable, we'll catch it elsewhere. + elif isinstance(lvalue, TupleExpr): + typ = get_proper_type(typ) + if isinstance(typ, TupleType): + if len(lvalue.items) != len(typ.items): + self.fail("Incompatible number of tuple items", lvalue) + return + for item, itemtype in zip(lvalue.items, typ.items): + self.store_declared_types(item, itemtype) + else: + self.fail("Tuple type expected for multiple variables", lvalue) + elif isinstance(lvalue, StarExpr): + # Historical behavior for the old parser + self.store_declared_types(lvalue.expr, typ) + else: + # This has been flagged elsewhere as an error, so just ignore here. + pass + + def process_typevar_declaration(self, s: AssignmentStmt) -> bool: + """Check if s declares a TypeVar; it yes, store it in symbol table. + + Return True if this looks like a type variable declaration (but maybe + with errors), otherwise return False. + """ + call = self.get_typevarlike_declaration(s, ("typing.TypeVar", "typing_extensions.TypeVar")) + if not call: + return False + + name = self.extract_typevarlike_name(s, call) + if name is None: + return False + + # Constraining types + n_values = call.arg_kinds[1:].count(ARG_POS) + values = self.analyze_value_types(call.args[1 : 1 + n_values]) + + res = self.process_typevar_parameters( + call.args[1 + n_values :], + call.arg_names[1 + n_values :], + call.arg_kinds[1 + n_values :], + n_values, + s, + ) + if res is None: + return False + variance, upper_bound, default = res + + existing = self.current_symbol_table().get(name) + if existing and not ( + isinstance(existing.node, PlaceholderNode) + or + # Also give error for another type variable with the same name. + (isinstance(existing.node, TypeVarExpr) and existing.node is call.analyzed) + ): + self.fail(f'Cannot redefine "{name}" as a type variable', s) + return False + + if self.options.disallow_any_unimported: + for idx, constraint in enumerate(values, start=1): + if has_any_from_unimported_type(constraint): + prefix = f"Constraint {idx}" + self.msg.unimported_type_becomes_any(prefix, constraint, s) + + if has_any_from_unimported_type(upper_bound): + prefix = "Upper bound of type variable" + self.msg.unimported_type_becomes_any(prefix, upper_bound, s) + + for t in values + [upper_bound, default]: + check_for_explicit_any( + t, self.options, self.is_typeshed_stub_file, self.msg, context=s + ) + + # mypyc suppresses making copies of a function to check each + # possible type, so set the upper bound to Any to prevent that + # from causing errors. + if values and self.options.mypyc: + upper_bound = AnyType(TypeOfAny.implementation_artifact) + + # Yes, it's a valid type variable definition! Add it to the symbol table. + if not call.analyzed: + type_var = TypeVarExpr( + name, self.qualified_name(name), values, upper_bound, default, variance + ) + type_var.line = call.line + call.analyzed = type_var + updated = True + else: + assert isinstance(call.analyzed, TypeVarExpr) + updated = ( + values != call.analyzed.values + or upper_bound != call.analyzed.upper_bound + or default != call.analyzed.default + ) + call.analyzed.upper_bound = upper_bound + call.analyzed.values = values + call.analyzed.default = default + if any(has_placeholder(v) for v in values): + self.process_placeholder(None, "TypeVar values", s, force_progress=updated) + elif has_placeholder(upper_bound): + self.process_placeholder(None, "TypeVar upper bound", s, force_progress=updated) + elif has_placeholder(default): + self.process_placeholder(None, "TypeVar default", s, force_progress=updated) + + self.add_symbol(name, call.analyzed, s) + return True + + def check_typevar_default(self, default: Type, context: Context) -> Type: + typ = get_proper_type(default) + if isinstance(typ, AnyType) and typ.is_from_error: + self.fail( + message_registry.TYPEVAR_ARG_MUST_BE_TYPE.format("TypeVar", "default"), context + ) + return default + + def check_paramspec_default(self, default: Type, context: Context) -> Type: + typ = get_proper_type(default) + if isinstance(typ, Parameters): + for i, arg_type in enumerate(typ.arg_types): + arg_ptype = get_proper_type(arg_type) + if isinstance(arg_ptype, AnyType) and arg_ptype.is_from_error: + self.fail(f"Argument {i} of ParamSpec default must be a type", context) + elif ( + isinstance(typ, AnyType) + and typ.is_from_error + or not isinstance(typ, (AnyType, UnboundType)) + ): + self.fail( + "The default argument to ParamSpec must be a list expression, ellipsis, or a ParamSpec", + context, + ) + default = AnyType(TypeOfAny.from_error) + return default + + def check_typevartuple_default(self, default: Type, context: Context) -> Type: + typ = get_proper_type(default) + if not isinstance(typ, UnpackType): + self.fail("The default argument to TypeVarTuple must be an Unpacked tuple", context) + default = AnyType(TypeOfAny.from_error) + return default + + def check_typevarlike_name(self, call: CallExpr, name: str, context: Context) -> bool: + """Checks that the name of a TypeVar or ParamSpec matches its variable.""" + name = unmangle(name) + assert isinstance(call.callee, RefExpr) + typevarlike_type = ( + call.callee.name if isinstance(call.callee, NameExpr) else call.callee.fullname + ) + if len(call.args) < 1: + self.fail(f"Too few arguments for {typevarlike_type}()", context) + return False + if not isinstance(call.args[0], StrExpr) or call.arg_kinds[0] != ARG_POS: + self.fail(f"{typevarlike_type}() expects a string literal as first argument", context) + return False + elif call.args[0].value != name: + msg = 'String argument 1 "{}" to {}(...) does not match variable name "{}"' + self.fail(msg.format(call.args[0].value, typevarlike_type, name), context) + return False + return True + + def get_typevarlike_declaration( + self, s: AssignmentStmt, typevarlike_types: tuple[str, ...] + ) -> CallExpr | None: + """Returns the call expression if `s` is a declaration of `typevarlike_type` + (TypeVar or ParamSpec), or None otherwise. + """ + if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], NameExpr): + return None + if not isinstance(s.rvalue, CallExpr): + return None + call = s.rvalue + callee = call.callee + if not isinstance(callee, RefExpr): + return None + if callee.fullname not in typevarlike_types: + return None + return call + + def process_typevar_parameters( + self, + args: list[Expression], + names: list[str | None], + kinds: list[ArgKind], + num_values: int, + context: Context, + ) -> tuple[int, Type, Type] | None: + has_values = num_values > 0 + covariant = False + contravariant = False + upper_bound: Type = self.object_type() + default: Type = AnyType(TypeOfAny.from_omitted_generics) + for param_value, param_name, param_kind in zip(args, names, kinds): + if not param_kind.is_named(): + self.fail(message_registry.TYPEVAR_UNEXPECTED_ARGUMENT, context) + return None + if param_name == "covariant": + if isinstance(param_value, NameExpr) and param_value.name in ("True", "False"): + covariant = param_value.name == "True" + else: + self.fail(message_registry.TYPEVAR_VARIANCE_DEF.format("covariant"), context) + return None + elif param_name == "contravariant": + if isinstance(param_value, NameExpr) and param_value.name in ("True", "False"): + contravariant = param_value.name == "True" + else: + self.fail( + message_registry.TYPEVAR_VARIANCE_DEF.format("contravariant"), context + ) + return None + elif param_name == "bound": + if has_values: + self.fail("TypeVar cannot have both values and an upper bound", context) + return None + tv_arg = self.get_typevarlike_argument("TypeVar", param_name, param_value, context) + if tv_arg is None: + return None + upper_bound = tv_arg + elif param_name == "default": + tv_arg = self.get_typevarlike_argument( + "TypeVar", param_name, param_value, context, allow_unbound_tvars=True + ) + default = tv_arg or AnyType(TypeOfAny.from_error) + elif param_name == "values": + # Probably using obsolete syntax with values=(...). Explain the current syntax. + self.fail('TypeVar "values" argument not supported', context) + self.fail( + "Use TypeVar('T', t, ...) instead of TypeVar('T', values=(t, ...))", context + ) + return None + else: + self.fail( + f'{message_registry.TYPEVAR_UNEXPECTED_ARGUMENT}: "{param_name}"', context + ) + return None + + if covariant and contravariant: + self.fail("TypeVar cannot be both covariant and contravariant", context) + return None + elif num_values == 1: + self.fail(message_registry.TYPE_VAR_TOO_FEW_CONSTRAINED_TYPES, context) + return None + elif covariant: + variance = COVARIANT + elif contravariant: + variance = CONTRAVARIANT + else: + variance = INVARIANT + return variance, upper_bound, default + + def get_typevarlike_argument( + self, + typevarlike_name: str, + param_name: str, + param_value: Expression, + context: Context, + *, + allow_unbound_tvars: bool = False, + allow_param_spec_literals: bool = False, + allow_unpack: bool = False, + report_invalid_typevar_arg: bool = True, + ) -> ProperType | None: + try: + # We want to use our custom error message below, so we suppress + # the default error message for invalid types here. + analyzed = self.expr_to_analyzed_type( + param_value, + allow_placeholder=True, + report_invalid_types=False, + allow_unbound_tvars=allow_unbound_tvars, + allow_param_spec_literals=allow_param_spec_literals, + allow_unpack=allow_unpack, + ) + if analyzed is None: + # Type variables are special: we need to place them in the symbol table + # soon, even if upper bound is not ready yet. Otherwise, avoiding + # a "deadlock" in this common pattern would be tricky: + # T = TypeVar('T', bound=Custom[Any]) + # class Custom(Generic[T]): + # ... + analyzed = PlaceholderType(None, [], context.line) + typ = get_proper_type(analyzed) + if report_invalid_typevar_arg and isinstance(typ, AnyType) and typ.is_from_error: + self.fail( + message_registry.TYPEVAR_ARG_MUST_BE_TYPE.format(typevarlike_name, param_name), + param_value, + ) + # Note: we do not return 'None' here -- we want to continue + # using the AnyType. + return typ + except TypeTranslationError: + if report_invalid_typevar_arg: + self.fail( + message_registry.TYPEVAR_ARG_MUST_BE_TYPE.format(typevarlike_name, param_name), + param_value, + ) + return None + + def extract_typevarlike_name(self, s: AssignmentStmt, call: CallExpr) -> str | None: + if not call: + return None + + lvalue = s.lvalues[0] + assert isinstance(lvalue, NameExpr) + if s.type: + self.fail("Cannot declare the type of a TypeVar or similar construct", s) + return None + + if not self.check_typevarlike_name(call, lvalue.name, s): + return None + return lvalue.name + + def process_paramspec_declaration(self, s: AssignmentStmt) -> bool: + """Checks if s declares a ParamSpec; if yes, store it in symbol table. + + Return True if this looks like a ParamSpec (maybe with errors), otherwise return False. + + In the future, ParamSpec may accept bounds and variance arguments, in which + case more aggressive sharing of code with process_typevar_declaration should be pursued. + """ + call = self.get_typevarlike_declaration( + s, ("typing_extensions.ParamSpec", "typing.ParamSpec") + ) + if not call: + return False + + name = self.extract_typevarlike_name(s, call) + if name is None: + return False + + n_values = call.arg_kinds[1:].count(ARG_POS) + if n_values != 0: + self.fail('Too many positional arguments for "ParamSpec"', s) + + default: Type = AnyType(TypeOfAny.from_omitted_generics) + for param_value, param_name in zip( + call.args[1 + n_values :], call.arg_names[1 + n_values :] + ): + if param_name == "default": + tv_arg = self.get_typevarlike_argument( + "ParamSpec", + param_name, + param_value, + s, + allow_unbound_tvars=True, + allow_param_spec_literals=True, + report_invalid_typevar_arg=False, + ) + default = tv_arg or AnyType(TypeOfAny.from_error) + default = self.check_paramspec_default(default, param_value) + else: + # ParamSpec is different from a regular TypeVar: + # arguments are not semantically valid. But, allowed in runtime. + # So, we need to warn users about possible invalid usage. + self.fail( + "The variance and bound arguments to ParamSpec do not have defined semantics yet", + s, + ) + + # PEP 612 reserves the right to define bound, covariant and contravariant arguments to + # ParamSpec in a later PEP. If and when that happens, we should do something + # on the lines of process_typevar_parameters + + if not call.analyzed: + paramspec_var = ParamSpecExpr( + name, self.qualified_name(name), self.object_type(), default, INVARIANT + ) + paramspec_var.line = call.line + call.analyzed = paramspec_var + updated = True + else: + assert isinstance(call.analyzed, ParamSpecExpr) + updated = default != call.analyzed.default + call.analyzed.default = default + if has_placeholder(default): + self.process_placeholder(None, "ParamSpec default", s, force_progress=updated) + + self.add_symbol(name, call.analyzed, s) + return True + + def process_typevartuple_declaration(self, s: AssignmentStmt) -> bool: + """Checks if s declares a TypeVarTuple; if yes, store it in symbol table. + + Return True if this looks like a TypeVarTuple (maybe with errors), otherwise return False. + """ + call = self.get_typevarlike_declaration( + s, ("typing_extensions.TypeVarTuple", "typing.TypeVarTuple") + ) + if not call: + return False + + n_values = call.arg_kinds[1:].count(ARG_POS) + if n_values != 0: + self.fail('Too many positional arguments for "TypeVarTuple"', s) + + default: Type = AnyType(TypeOfAny.from_omitted_generics) + for param_value, param_name in zip( + call.args[1 + n_values :], call.arg_names[1 + n_values :] + ): + if param_name == "default": + tv_arg = self.get_typevarlike_argument( + "TypeVarTuple", + param_name, + param_value, + s, + allow_unbound_tvars=True, + report_invalid_typevar_arg=False, + allow_unpack=True, + ) + default = tv_arg or AnyType(TypeOfAny.from_error) + default = self.check_typevartuple_default(default, param_value) + else: + self.fail(f'Unexpected keyword argument "{param_name}" for "TypeVarTuple"', s) + + name = self.extract_typevarlike_name(s, call) + if name is None: + return False + + # PEP 646 does not specify the behavior of variance, constraints, or bounds. + if not call.analyzed: + tuple_fallback = self.named_type("builtins.tuple", [self.object_type()]) + typevartuple_var = TypeVarTupleExpr( + name, + self.qualified_name(name), + # Upper bound for *Ts is *tuple[object, ...], it can never be object. + tuple_fallback.copy_modified(), + tuple_fallback, + default, + INVARIANT, + ) + typevartuple_var.line = call.line + call.analyzed = typevartuple_var + updated = True + else: + assert isinstance(call.analyzed, TypeVarTupleExpr) + updated = default != call.analyzed.default + call.analyzed.default = default + if has_placeholder(default): + self.process_placeholder(None, "TypeVarTuple default", s, force_progress=updated) + + self.add_symbol(name, call.analyzed, s) + return True + + def basic_new_typeinfo(self, name: str, basetype_or_fallback: Instance, line: int) -> TypeInfo: + if self.is_func_scope() and not self.type and "@" not in name: + name += "@" + str(line) + class_def = ClassDef(name, Block([])) + if self.is_func_scope() and not self.type: + # Full names of generated classes should always be prefixed with the module names + # even if they are nested in a function, since these classes will be (de-)serialized. + # (Note that the caller should append @line to the name to avoid collisions.) + # TODO: clean this up, see #6422. + class_def.fullname = self.cur_mod_id + "." + self.qualified_name(name) + else: + class_def.fullname = self.qualified_name(name) + + info = TypeInfo(SymbolTable(), class_def, self.cur_mod_id) + class_def.info = info + mro = basetype_or_fallback.type.mro + if not mro: + # Probably an error, we should not crash so generate something meaningful. + mro = [basetype_or_fallback.type, self.object_type().type] + info.mro = [info] + mro + info.bases = [basetype_or_fallback] + return info + + def analyze_value_types(self, items: list[Expression]) -> list[Type]: + """Analyze types from values expressions in type variable definition.""" + result: list[Type] = [] + for node in items: + try: + analyzed = self.anal_type( + self.expr_to_unanalyzed_type(node), allow_placeholder=True + ) + if analyzed is None: + # Type variables are special: we need to place them in the symbol table + # soon, even if some value is not ready yet, see process_typevar_parameters() + # for an example. + analyzed = PlaceholderType(None, [], node.line) + if has_type_vars(analyzed): + self.fail(message_registry.TYPE_VAR_GENERIC_CONSTRAINT_TYPE, node) + result.append(AnyType(TypeOfAny.from_error)) + else: + result.append(analyzed) + except TypeTranslationError: + self.fail("Type expected", node) + result.append(AnyType(TypeOfAny.from_error)) + return result + + def check_classvar(self, s: AssignmentStmt) -> None: + """Check if assignment defines a class variable.""" + lvalue = s.lvalues[0] + if len(s.lvalues) != 1 or not isinstance(lvalue, RefExpr): + return + if not s.type or not self.is_classvar(s.type): + return + assert isinstance(s.type, UnboundType) + if self.is_class_scope() and isinstance(lvalue, NameExpr): + node = lvalue.node + if isinstance(node, Var): + node.is_classvar = True + analyzed = self.anal_type(s.type) + assert self.type is not None + if ( + analyzed is not None + and self.type.self_type in get_type_vars(analyzed) + and self.type.defn.type_vars + ): + self.fail(message_registry.CLASS_VAR_WITH_GENERIC_SELF, s) + elif not isinstance(lvalue, MemberExpr) or self.is_self_member_ref(lvalue): + # In case of member access, report error only when assigning to self + # Other kinds of member assignments should be already reported + self.fail_invalid_classvar(lvalue) + if not s.type.args: + if isinstance(s.rvalue, TempNode) and s.rvalue.no_rhs: + if self.options.disallow_any_generics: + self.fail("ClassVar without type argument becomes Any", s, code=codes.TYPE_ARG) + return + s.type = None + + def is_classvar(self, typ: Type) -> bool: + if not isinstance(typ, UnboundType): + return False + sym = self.lookup_qualified(typ.name, typ) + if not sym or not sym.node: + return False + return sym.node.fullname == "typing.ClassVar" + + def is_final_type(self, typ: Type | None) -> bool: + if not isinstance(typ, UnboundType): + return False + sym = self.lookup_qualified(typ.name, typ) + if not sym or not sym.node: + return False + return sym.node.fullname in FINAL_TYPE_NAMES + + def fail_invalid_classvar(self, context: Context) -> None: + self.fail(message_registry.CLASS_VAR_OUTSIDE_OF_CLASS, context) + + def process_module_assignment( + self, lvals: list[Lvalue], rval: Expression, ctx: AssignmentStmt + ) -> None: + """Propagate module references across assignments. + + Recursively handles the simple form of iterable unpacking; doesn't + handle advanced unpacking with *rest, dictionary unpacking, etc. + + In an expression like x = y = z, z is the rval and lvals will be [x, + y]. + + """ + if isinstance(rval, (TupleExpr, ListExpr)) and all( + isinstance(v, TupleExpr) for v in lvals + ): + # rval and all lvals are either list or tuple, so we are dealing + # with unpacking assignment like `x, y = a, b`. Mypy didn't + # understand our all(isinstance(...)), so cast them as TupleExpr + # so mypy knows it is safe to access their .items attribute. + seq_lvals = cast(list[TupleExpr], lvals) + # given an assignment like: + # (x, y) = (m, n) = (a, b) + # we now have: + # seq_lvals = [(x, y), (m, n)] + # seq_rval = (a, b) + # We now zip this into: + # elementwise_assignments = [(a, x, m), (b, y, n)] + # where each elementwise assignment includes one element of rval and the + # corresponding element of each lval. Basically we unpack + # (x, y) = (m, n) = (a, b) + # into elementwise assignments + # x = m = a + # y = n = b + # and then we recursively call this method for each of those assignments. + # If the rval and all lvals are not all of the same length, zip will just ignore + # extra elements, so no error will be raised here; mypy will later complain + # about the length mismatch in type-checking. + elementwise_assignments = zip(rval.items, *[v.items for v in seq_lvals]) + for rv, *lvs in elementwise_assignments: + self.process_module_assignment(lvs, rv, ctx) + elif isinstance(rval, RefExpr): + rnode = self.lookup_type_node(rval) + if rnode and isinstance(rnode.node, MypyFile): + for lval in lvals: + if not isinstance(lval, RefExpr): + continue + # respect explicitly annotated type + if isinstance(lval.node, Var) and lval.node.type is not None: + continue + + # We can handle these assignments to locals and to self + if isinstance(lval, NameExpr): + lnode = self.current_symbol_table().get(lval.name) + elif isinstance(lval, MemberExpr) and self.is_self_member_ref(lval): + assert self.type is not None + lnode = self.type.names.get(lval.name) + else: + continue + + if lnode: + if isinstance(lnode.node, MypyFile) and lnode.node is not rnode.node: + assert isinstance(lval, (NameExpr, MemberExpr)) + self.fail( + 'Cannot assign multiple modules to name "{}" ' + 'without explicit "types.ModuleType" annotation'.format(lval.name), + ctx, + ) + # never create module alias except on initial var definition + elif lval.is_inferred_def: + assert rnode.node is not None + lnode.node = rnode.node + + def process__all__(self, s: AssignmentStmt) -> None: + """Export names if argument is a __all__ assignment.""" + if ( + len(s.lvalues) == 1 + and isinstance(s.lvalues[0], NameExpr) + and s.lvalues[0].name == "__all__" + and s.lvalues[0].kind == GDEF + and isinstance(s.rvalue, (ListExpr, TupleExpr)) + ): + self.add_exports(s.rvalue.items) + + def process__deletable__(self, s: AssignmentStmt) -> None: + if not self.options.mypyc: + return + if ( + len(s.lvalues) == 1 + and isinstance(s.lvalues[0], NameExpr) + and s.lvalues[0].name == "__deletable__" + and s.lvalues[0].kind == MDEF + ): + rvalue = s.rvalue + if not isinstance(rvalue, (ListExpr, TupleExpr)): + self.fail('"__deletable__" must be initialized with a list or tuple expression', s) + return + items = rvalue.items + attrs = [] + for item in items: + if not isinstance(item, StrExpr): + self.fail('Invalid "__deletable__" item; string literal expected', item) + else: + attrs.append(item.value) + assert self.type + self.type.deletable_attributes = attrs + + def process__slots__(self, s: AssignmentStmt) -> None: + """ + Processing ``__slots__`` if defined in type. + + See: https://docs.python.org/3/reference/datamodel.html#slots + """ + # Later we can support `__slots__` defined as `__slots__ = other = ('a', 'b')` + if ( + isinstance(self.type, TypeInfo) + and len(s.lvalues) == 1 + and isinstance(s.lvalues[0], NameExpr) + and s.lvalues[0].name == "__slots__" + and s.lvalues[0].kind == MDEF + ): + # We understand `__slots__` defined as string, tuple, list, set, and dict: + if not isinstance(s.rvalue, (StrExpr, ListExpr, TupleExpr, SetExpr, DictExpr)): + # For example, `__slots__` can be defined as a variable, + # we don't support it for now. + return + + if any(p.slots is None for p in self.type.mro[1:-1]): + # At least one type in mro (excluding `self` and `object`) + # does not have concrete `__slots__` defined. Ignoring. + return + + concrete_slots = True + rvalue: list[Expression] = [] + if isinstance(s.rvalue, StrExpr): + rvalue.append(s.rvalue) + elif isinstance(s.rvalue, (ListExpr, TupleExpr, SetExpr)): + rvalue.extend(s.rvalue.items) + else: + # We have a special treatment of `dict` with possible `{**kwargs}` usage. + # In this case we consider all `__slots__` to be non-concrete. + for key, _ in s.rvalue.items: + if concrete_slots and key is not None: + rvalue.append(key) + else: + concrete_slots = False + + slots = [] + for item in rvalue: + # Special case for `'__dict__'` value: + # when specified it will still allow any attribute assignment. + if isinstance(item, StrExpr) and item.value != "__dict__": + slots.append(item.value) + else: + concrete_slots = False + if not concrete_slots: + # Some slot items are dynamic, we don't want any false positives, + # so, we just pretend that this type does not have any slots at all. + return + + # We need to copy all slots from super types: + for super_type in self.type.mro[1:-1]: + assert super_type.slots is not None + slots.extend(super_type.slots) + self.type.slots = set(slots) + + # + # Misc statements + # + + def visit_block(self, b: Block) -> None: + if b.is_unreachable: + return + self.block_depth[-1] += 1 + for s in b.body: + self.accept(s) + self.block_depth[-1] -= 1 + + def visit_block_maybe(self, b: Block | None) -> None: + if b: + self.visit_block(b) + + def visit_expression_stmt(self, s: ExpressionStmt) -> None: + self.statement = s + s.expr.accept(self) + + def visit_return_stmt(self, s: ReturnStmt) -> None: + old = self.statement + self.statement = s + if not self.is_func_scope(): + self.fail('"return" outside function', s) + if self.return_stmt_inside_except_star_block: + self.fail('"return" not allowed in except* block', s, serious=True) + if s.expr: + s.expr.accept(self) + if TYPE_FORM in self.options.enable_incomplete_feature: + self.try_parse_as_type_expression(s.expr) + self.statement = old + + def visit_raise_stmt(self, s: RaiseStmt) -> None: + self.statement = s + if s.expr: + s.expr.accept(self) + if s.from_expr: + s.from_expr.accept(self) + + def visit_assert_stmt(self, s: AssertStmt) -> None: + self.statement = s + if s.expr: + s.expr.accept(self) + if s.msg: + s.msg.accept(self) + + def visit_operator_assignment_stmt(self, s: OperatorAssignmentStmt) -> None: + self.statement = s + s.lvalue.accept(self) + s.rvalue.accept(self) + if ( + isinstance(s.lvalue, NameExpr) + and s.lvalue.name == "__all__" + and s.lvalue.kind == GDEF + and isinstance(s.rvalue, (ListExpr, TupleExpr)) + ): + self.add_exports(s.rvalue.items) + + def visit_while_stmt(self, s: WhileStmt) -> None: + self.statement = s + s.expr.accept(self) + self.loop_depth[-1] += 1 + with self.inside_except_star_block_set(value=False, entering_loop=True): + s.body.accept(self) + self.loop_depth[-1] -= 1 + self.visit_block_maybe(s.else_body) + + def visit_for_stmt(self, s: ForStmt) -> None: + if s.is_async: + if not self.is_func_scope() or not self.function_stack[-1].is_coroutine: + self.fail(message_registry.ASYNC_FOR_OUTSIDE_COROUTINE, s, code=codes.SYNTAX) + + self.statement = s + s.expr.accept(self) + + # Bind index variables and check if they define new names. + self.analyze_lvalue(s.index, explicit_type=s.index_type is not None, is_index_var=True) + if s.index_type: + if self.is_classvar(s.index_type): + self.fail_invalid_classvar(s.index) + allow_tuple_literal = isinstance(s.index, TupleExpr) + analyzed = self.anal_type(s.index_type, allow_tuple_literal=allow_tuple_literal) + if analyzed is not None: + self.store_declared_types(s.index, analyzed) + s.index_type = analyzed + + self.loop_depth[-1] += 1 + with self.inside_except_star_block_set(value=False, entering_loop=True): + self.visit_block(s.body) + self.loop_depth[-1] -= 1 + self.visit_block_maybe(s.else_body) + + def visit_break_stmt(self, s: BreakStmt) -> None: + self.statement = s + if self.loop_depth[-1] == 0: + self.fail('"break" outside loop', s, serious=True, blocker=True) + if self.inside_except_star_block: + self.fail('"break" not allowed in except* block', s, serious=True) + + def visit_continue_stmt(self, s: ContinueStmt) -> None: + self.statement = s + if self.loop_depth[-1] == 0: + self.fail('"continue" outside loop', s, serious=True, blocker=True) + if self.inside_except_star_block: + self.fail('"continue" not allowed in except* block', s, serious=True) + + def visit_if_stmt(self, s: IfStmt) -> None: + self.statement = s + infer_reachability_of_if_statement(s, self.options) + for i in range(len(s.expr)): + s.expr[i].accept(self) + self.visit_block(s.body[i]) + self.visit_block_maybe(s.else_body) + + def visit_try_stmt(self, s: TryStmt) -> None: + self.statement = s + self.analyze_try_stmt(s, self) + + def analyze_try_stmt(self, s: TryStmt, visitor: NodeVisitor[None]) -> None: + s.body.accept(visitor) + for type, var, handler in zip(s.types, s.vars, s.handlers): + if type: + type.accept(visitor) + if var: + self.analyze_lvalue(var) + with self.inside_except_star_block_set(self.inside_except_star_block or s.is_star): + handler.accept(visitor) + if s.else_body: + s.else_body.accept(visitor) + if s.finally_body: + s.finally_body.accept(visitor) + + def visit_with_stmt(self, s: WithStmt) -> None: + self.statement = s + types: list[Type] = [] + + if s.is_async: + if not self.is_func_scope() or not self.function_stack[-1].is_coroutine: + self.fail(message_registry.ASYNC_WITH_OUTSIDE_COROUTINE, s, code=codes.SYNTAX) + + if s.unanalyzed_type: + assert isinstance(s.unanalyzed_type, ProperType) + actual_targets = [t for t in s.target if t is not None] + if len(actual_targets) == 0: + # We have a type for no targets + self.fail('Invalid type comment: "with" statement has no targets', s) + elif len(actual_targets) == 1: + # We have one target and one type + types = [s.unanalyzed_type] + elif isinstance(s.unanalyzed_type, TupleType): + # We have multiple targets and multiple types + if len(actual_targets) == len(s.unanalyzed_type.items): + types = s.unanalyzed_type.items.copy() + else: + # But it's the wrong number of items + self.fail('Incompatible number of types for "with" targets', s) + else: + # We have multiple targets and one type + self.fail('Multiple types expected for multiple "with" targets', s) + + new_types: list[Type] = [] + for e, n in zip(s.expr, s.target): + e.accept(self) + if n: + self.analyze_lvalue(n, explicit_type=s.unanalyzed_type is not None) + + # Since we have a target, pop the next type from types + if types: + t = types.pop(0) + if self.is_classvar(t): + self.fail_invalid_classvar(n) + allow_tuple_literal = isinstance(n, TupleExpr) + analyzed = self.anal_type(t, allow_tuple_literal=allow_tuple_literal) + if analyzed is not None: + # TODO: Deal with this better + new_types.append(analyzed) + self.store_declared_types(n, analyzed) + + s.analyzed_types = new_types + + self.visit_block(s.body) + + def visit_del_stmt(self, s: DelStmt) -> None: + self.statement = s + s.expr.accept(self) + if not self.is_valid_del_target(s.expr): + self.fail("Invalid delete target", s) + + def is_valid_del_target(self, s: Expression) -> bool: + if isinstance(s, (IndexExpr, NameExpr, MemberExpr)): + return True + elif isinstance(s, (TupleExpr, ListExpr)): + return all(self.is_valid_del_target(item) for item in s.items) + else: + return False + + def visit_global_decl(self, g: GlobalDecl) -> None: + self.statement = g + for name in g.names: + if name in self.nonlocal_decls[-1]: + self.fail(f'Name "{name}" is nonlocal and global', g) + self.global_decls[-1].add(name) + + def visit_nonlocal_decl(self, d: NonlocalDecl) -> None: + self.statement = d + if self.is_module_scope(): + self.fail("nonlocal declaration not allowed at module level", d) + else: + for name in d.names: + for table, scope_type in zip( + reversed(self.locals[:-1]), reversed(self.scope_stack[:-1]) + ): + if table is not None and name in table: + if scope_type == SCOPE_ANNOTATION: + self.fail( + f'nonlocal binding not allowed for type parameter "{name}"', d + ) + break + else: + self.fail(f'No binding for nonlocal "{name}" found', d) + + if self.locals[-1] is not None and name in self.locals[-1]: + self.fail( + 'Name "{}" is already defined in local ' + "scope before nonlocal declaration".format(name), + d, + ) + + if name in self.global_decls[-1]: + self.fail(f'Name "{name}" is nonlocal and global', d) + self.nonlocal_decls[-1].add(name) + + def visit_match_stmt(self, s: MatchStmt) -> None: + self.statement = s + infer_reachability_of_match_statement(s, self.options) + s.subject.accept(self) + for i in range(len(s.patterns)): + s.patterns[i].accept(self) + guard = s.guards[i] + if guard is not None: + guard.accept(self) + self.visit_block(s.bodies[i]) + + def visit_type_alias_stmt(self, s: TypeAliasStmt) -> None: + if s.invalid_recursive_alias: + return + self.statement = s + type_params = self.push_type_args(s.type_args, s) + if type_params is None: + self.defer(s) + return + all_type_params_names = [p.name for p in s.type_args] + + try: + existing = self.current_symbol_table().get(s.name.name) + if existing and not ( + isinstance(existing.node, TypeAlias) + or (isinstance(existing.node, PlaceholderNode) and existing.node.line == s.line) + ): + self.already_defined(s.name.name, s, existing, "Name") + return + + tag = self.track_incomplete_refs() + res, alias_tvars, depends_on, empty_tuple_index = self.analyze_alias( + s.name.name, + s.value.expr(), + allow_placeholder=True, + declared_type_vars=type_params, + all_declared_type_params_names=all_type_params_names, + python_3_12_type_alias=True, + ) + if not res: + res = AnyType(TypeOfAny.from_error) + + if not self.is_func_scope(): + # Only marking incomplete for top-level placeholders makes recursive aliases like + # `A = Sequence[str | A]` valid here, similar to how we treat base classes in class + # definitions, allowing `class str(Sequence[str]): ...` + incomplete_target = isinstance(res, ProperType) and isinstance( + res, PlaceholderType + ) + else: + incomplete_target = has_placeholder(res) + + if self.found_incomplete_ref(tag) or incomplete_target: + # Since we have got here, we know this must be a type alias (incomplete refs + # may appear in nested positions), therefore use becomes_typeinfo=True. + self.mark_incomplete(s.name.name, s.value, becomes_typeinfo=True) + return + + # Now go through all new variables and temporary replace all tvars that still + # refer to some placeholders. We defer the whole alias and will revisit it again, + # as well as all its dependents. + for i, tv in enumerate(alias_tvars): + if has_placeholder(tv): + self.mark_incomplete(s.name.name, s.value, becomes_typeinfo=True) + alias_tvars[i] = self._trivial_typevarlike_like(tv) + + self.add_type_alias_deps(depends_on) + check_for_explicit_any( + res, self.options, self.is_typeshed_stub_file, self.msg, context=s + ) + # When this type alias gets "inlined", the Any is not explicit anymore, + # so we need to replace it with non-explicit Anys. + res = make_any_non_explicit(res) + if self.options.disallow_any_unimported and has_any_from_unimported_type(res): + self.msg.unimported_type_becomes_any("Type alias target", res, s) + res = make_any_non_unimported(res) + eager = self.is_func_scope() + if isinstance(res, ProperType) and isinstance(res, Instance): + fix_instance(res, self.fail, self.note, disallow_any=False, options=self.options) + alias_node = TypeAlias( + res, + self.qualified_name(s.name.name), + self.cur_mod_id, + s.line, + s.column, + alias_tvars=alias_tvars, + no_args=False, + eager=eager, + python_3_12_type_alias=True, + ) + s.alias_node = alias_node + + if ( + existing + and isinstance(existing.node, (PlaceholderNode, TypeAlias)) + and existing.node.line == s.line + ): + updated = False + if isinstance(existing.node, TypeAlias): + if ( + existing.node.target != res + or existing.node.alias_tvars != alias_node.alias_tvars + ): + # Copy expansion to the existing alias, this matches how we update base classes + # for a TypeInfo _in place_ if there are nested placeholders. + existing.node.target = res + existing.node.alias_tvars = alias_tvars + updated = True + # Invalidate recursive status cache in case it was previously set. + existing.node._is_recursive = None + else: + # Otherwise just replace existing placeholder with type alias. + existing.node = alias_node + updated = True + + if updated: + if self.final_iteration: + self.cannot_resolve_name(s.name.name, "name", s) + return + else: + # We need to defer so that this change can get propagated to base classes. + self.defer(s, force_progress=True) + else: + self.add_symbol(s.name.name, alias_node, s) + + current_node = existing.node if existing else alias_node + assert isinstance(current_node, TypeAlias) + self.disable_invalid_recursive_aliases(s, current_node, s.value) + s.name.accept(self) + finally: + self.pop_type_args(s.type_args) + + def _trivial_typevarlike_like(self, tv: TypeVarLikeType) -> TypeVarLikeType: + object_type = self.named_type("builtins.object") + if isinstance(tv, TypeVarType): + return TypeVarType( + tv.name, + tv.fullname, + tv.id, + values=[], + upper_bound=object_type, + default=AnyType(TypeOfAny.from_omitted_generics), + variance=tv.variance, + line=tv.line, + column=tv.column, + ) + elif isinstance(tv, TypeVarTupleType): + tuple_type = self.named_type("builtins.tuple", [object_type]) + return TypeVarTupleType( + tv.name, + tv.fullname, + tv.id, + upper_bound=tuple_type, + tuple_fallback=tuple_type, + default=AnyType(TypeOfAny.from_omitted_generics), + line=tv.line, + column=tv.column, + ) + elif isinstance(tv, ParamSpecType): + return ParamSpecType( + tv.name, + tv.fullname, + tv.id, + flavor=tv.flavor, + upper_bound=object_type, + default=AnyType(TypeOfAny.from_omitted_generics), + line=tv.line, + column=tv.column, + ) + else: + assert False, f"Unknown TypeVarLike: {tv!r}" + + # + # Expressions + # + + def visit_name_expr(self, expr: NameExpr) -> None: + n = self.lookup(expr.name, expr) + if n: + self.bind_name_expr(expr, n) + + def bind_name_expr(self, expr: NameExpr, sym: SymbolTableNode) -> None: + """Bind name expression to a symbol table node.""" + if ( + isinstance(sym.node, TypeVarExpr) + and self.tvar_scope.get_binding(sym) + and not self.allow_unbound_tvars + ): + self.fail(f'"{expr.name}" is a type variable and only valid in type context', expr) + elif isinstance(sym.node, PlaceholderNode): + self.process_placeholder(expr.name, "name", expr) + else: + expr.kind = sym.kind + expr.node = sym.node + expr.fullname = sym.fullname or "" + + def visit_super_expr(self, expr: SuperExpr) -> None: + if not self.type and not expr.call.args: + self.fail('"super" used outside class', expr) + return + expr.info = self.type + for arg in expr.call.args: + arg.accept(self) + + def visit_tuple_expr(self, expr: TupleExpr) -> None: + for item in expr.items: + if isinstance(item, StarExpr): + item.valid = True + item.accept(self) + + def visit_list_expr(self, expr: ListExpr) -> None: + for item in expr.items: + if isinstance(item, StarExpr): + item.valid = True + item.accept(self) + + def visit_set_expr(self, expr: SetExpr) -> None: + for item in expr.items: + if isinstance(item, StarExpr): + item.valid = True + item.accept(self) + + def visit_dict_expr(self, expr: DictExpr) -> None: + for key, value in expr.items: + if key is not None: + key.accept(self) + value.accept(self) + + def visit_star_expr(self, expr: StarExpr) -> None: + if not expr.valid: + self.fail("can't use starred expression here", expr, blocker=True) + else: + expr.expr.accept(self) + + def visit_yield_from_expr(self, e: YieldFromExpr) -> None: + if not self.is_func_scope(): + self.fail('"yield from" outside function', e, serious=True, blocker=True) + elif self.scope_stack[-1] == SCOPE_COMPREHENSION: + self.fail( + '"yield from" inside comprehension or generator expression', + e, + serious=True, + blocker=True, + ) + elif self.function_stack[-1].is_coroutine: + self.fail('"yield from" in async function', e, serious=True, blocker=True) + else: + self.function_stack[-1].is_generator = True + if e.expr: + e.expr.accept(self) + + def visit_call_expr(self, expr: CallExpr) -> None: + """Analyze a call expression. + + Some call expressions are recognized as special forms, including + cast(...). + """ + expr.callee.accept(self) + if refers_to_fullname(expr.callee, "typing.cast"): + # Special form cast(...). + if not self.check_fixed_args(expr, 2, "cast"): + return + # Translate first argument to an unanalyzed type. + try: + target = self.expr_to_unanalyzed_type(expr.args[0]) + except TypeTranslationError: + self.fail("Cast target is not a type", expr) + return + # Piggyback CastExpr object to the CallExpr object; it takes + # precedence over the CallExpr semantics. + expr.analyzed = CastExpr(expr.args[1], target) + expr.analyzed.line = expr.line + expr.analyzed.column = expr.column + expr.analyzed.accept(self) + elif refers_to_fullname(expr.callee, ASSERT_TYPE_NAMES): + if not self.check_fixed_args(expr, 2, "assert_type"): + return + # Translate second argument to an unanalyzed type. + try: + target = self.expr_to_unanalyzed_type(expr.args[1]) + except TypeTranslationError: + self.fail("assert_type() type is not a type", expr) + return + expr.analyzed = AssertTypeExpr(expr.args[0], target) + expr.analyzed.line = expr.line + expr.analyzed.column = expr.column + expr.analyzed.accept(self) + elif refers_to_fullname(expr.callee, REVEAL_TYPE_NAMES): + if not self.check_fixed_args(expr, 1, "reveal_type"): + return + reveal_imported = False + reveal_type_node = self.lookup("reveal_type", expr, suppress_errors=True) + if ( + reveal_type_node + and isinstance(reveal_type_node.node, SYMBOL_FUNCBASE_TYPES) + and reveal_type_node.fullname in IMPORTED_REVEAL_TYPE_NAMES + ): + reveal_imported = True + expr.analyzed = RevealExpr( + kind=REVEAL_TYPE, expr=expr.args[0], is_imported=reveal_imported + ) + expr.analyzed.line = expr.line + expr.analyzed.column = expr.column + expr.analyzed.accept(self) + elif refers_to_fullname(expr.callee, "builtins.reveal_locals"): + # Store the local variable names into the RevealExpr for use in the + # type checking pass + local_nodes: list[Var] = [] + if self.is_module_scope(): + # try to determine just the variable declarations in module scope + # self.globals.values() contains SymbolTableNode's + # Each SymbolTableNode has an attribute node that is nodes.Var + # look for variable nodes that marked as is_inferred + # Each symboltable node has a Var node as .node + local_nodes = [ + n.node + for name, n in self.globals.items() + if getattr(n.node, "is_inferred", False) and isinstance(n.node, Var) + ] + elif self.is_class_scope(): + # type = None # type: Optional[TypeInfo] + if self.type is not None: + local_nodes = [ + st.node for st in self.type.names.values() if isinstance(st.node, Var) + ] + elif self.is_func_scope(): + # locals = None # type: List[Optional[SymbolTable]] + if self.locals is not None: + symbol_table = self.locals[-1] + if symbol_table is not None: + local_nodes = [ + st.node for st in symbol_table.values() if isinstance(st.node, Var) + ] + expr.analyzed = RevealExpr(kind=REVEAL_LOCALS, local_nodes=local_nodes) + expr.analyzed.line = expr.line + expr.analyzed.column = expr.column + expr.analyzed.accept(self) + elif refers_to_fullname(expr.callee, "typing.Any"): + # Special form Any(...) no longer supported. + self.fail("Any(...) is no longer supported. Use cast(Any, ...) instead", expr) + elif refers_to_fullname(expr.callee, "typing._promote"): + # Special form _promote(...). + if not self.check_fixed_args(expr, 1, "_promote"): + return + # Translate first argument to an unanalyzed type. + try: + target = self.expr_to_unanalyzed_type(expr.args[0]) + except TypeTranslationError: + self.fail("Argument 1 to _promote is not a type", expr) + return + expr.analyzed = PromoteExpr(target) + expr.analyzed.line = expr.line + expr.analyzed.accept(self) + elif refers_to_fullname(expr.callee, "builtins.dict") and not ( + isinstance(expr.callee, RefExpr) + and isinstance(expr.callee.node, TypeAlias) + and not expr.callee.node.no_args + ): + expr.analyzed = self.translate_dict_call(expr) + elif refers_to_fullname(expr.callee, "builtins.divmod"): + if not self.check_fixed_args(expr, 2, "divmod"): + return + expr.analyzed = OpExpr("divmod", expr.args[0], expr.args[1]) + expr.analyzed.line = expr.line + expr.analyzed.accept(self) + elif refers_to_fullname( + expr.callee, ("typing.TypeAliasType", "typing_extensions.TypeAliasType") + ): + with self.allow_unbound_tvars_set(): + for a in expr.args: + a.accept(self) + elif refers_to_fullname(expr.callee, ("typing.TypeForm", "typing_extensions.TypeForm")): + # Special form TypeForm(...). + if not self.check_fixed_args(expr, 1, "TypeForm"): + return + # Translate first argument to an unanalyzed type. + try: + typ = self.expr_to_unanalyzed_type(expr.args[0]) + except TypeTranslationError: + self.fail("TypeForm argument is not a type", expr) + # Suppress future error: "" not callable + expr.analyzed = CastExpr(expr.args[0], AnyType(TypeOfAny.from_error)) + return + # Piggyback TypeFormExpr object to the CallExpr object; it takes + # precedence over the CallExpr semantics. + expr.analyzed = TypeFormExpr(typ) + expr.analyzed.line = expr.line + expr.analyzed.column = expr.column + expr.analyzed.accept(self) + else: + # Normal call expression. + calculate_type_forms = TYPE_FORM in self.options.enable_incomplete_feature + for a in expr.args: + a.accept(self) + if calculate_type_forms: + self.try_parse_as_type_expression(a) + + if ( + isinstance(expr.callee, MemberExpr) + and isinstance(expr.callee.expr, NameExpr) + and expr.callee.expr.name == "__all__" + and expr.callee.expr.kind == GDEF + and expr.callee.name in ("append", "extend", "remove") + ): + if expr.callee.name == "append" and expr.args: + self.add_exports(expr.args[0]) + elif ( + expr.callee.name == "extend" + and expr.args + and isinstance(expr.args[0], (ListExpr, TupleExpr)) + ): + self.add_exports(expr.args[0].items) + elif ( + expr.callee.name == "remove" + and expr.args + and isinstance(expr.args[0], StrExpr) + ): + self.all_exports = [n for n in self.all_exports if n != expr.args[0].value] + + def translate_dict_call(self, call: CallExpr) -> DictExpr | None: + """Translate 'dict(x=y, ...)' to {'x': y, ...} and 'dict()' to {}. + + For other variants of dict(...), return None. + """ + if not all(kind in (ARG_NAMED, ARG_STAR2) for kind in call.arg_kinds): + # Must still accept those args. + for a in call.args: + a.accept(self) + return None + expr = DictExpr( + [ + (StrExpr(key) if key is not None else None, value) + for key, value in zip(call.arg_names, call.args) + ] + ) + expr.set_line(call) + expr.accept(self) + return expr + + def check_fixed_args(self, expr: CallExpr, numargs: int, name: str) -> bool: + """Verify that expr has specified number of positional args. + + Return True if the arguments are valid. + """ + s = "s" + if numargs == 1: + s = "" + if len(expr.args) != numargs: + self.fail('"%s" expects %d argument%s' % (name, numargs, s), expr) + return False + if expr.arg_kinds != [ARG_POS] * numargs: + self.fail(f'"{name}" must be called with {numargs} positional argument{s}', expr) + return False + return True + + def visit_member_expr(self, expr: MemberExpr) -> None: + base = expr.expr + base.accept(self) + if isinstance(base, RefExpr) and isinstance(base.node, MypyFile): + # Handle module attribute. + sym = self.get_module_symbol(base.node, expr.name) + if sym: + if isinstance(sym.node, PlaceholderNode): + self.process_placeholder(expr.name, "attribute", expr) + return + self.record_imported_symbol(sym) + expr.kind = sym.kind + expr.fullname = sym.fullname or "" + expr.node = sym.node + elif isinstance(base, RefExpr): + # This branch handles the case C.bar (or cls.bar or self.bar inside + # a classmethod/method), where C is a class and bar is a type + # definition or a module resulting from `import bar` (or a module + # assignment) inside class C. We look up bar in the class' TypeInfo + # namespace. This is done only when bar is a module or a type; + # other things (e.g. methods) are handled by other code in + # checkmember. + type_info = None + if isinstance(base.node, TypeInfo): + # C.bar where C is a class + type_info = base.node + elif isinstance(base.node, Var) and self.type and self.function_stack: + # check for self.bar or cls.bar in method/classmethod + func_def = self.function_stack[-1] + if not func_def.is_static and isinstance(func_def.type, CallableType): + formal_arg = func_def.type.argument_by_name(base.node.name) + if formal_arg and formal_arg.pos == 0: + type_info = self.type + elif isinstance(base.node, TypeAlias) and base.node.no_args: + assert isinstance(base.node.target, ProperType) + if isinstance(base.node.target, Instance): + type_info = base.node.target.type + + if type_info: + n = type_info.names.get(expr.name) + if n is not None and isinstance(n.node, (MypyFile, TypeInfo, TypeAlias)): + self.record_imported_symbol(n) + expr.kind = n.kind + expr.fullname = n.fullname or "" + expr.node = n.node + + def visit_op_expr(self, expr: OpExpr) -> None: + expr.left.accept(self) + + if expr.op in ("and", "or"): + inferred = infer_condition_value(expr.left, self.options) + if (inferred in (ALWAYS_FALSE, MYPY_FALSE) and expr.op == "and") or ( + inferred in (ALWAYS_TRUE, MYPY_TRUE) and expr.op == "or" + ): + expr.right_unreachable = True + return + elif (inferred in (ALWAYS_TRUE, MYPY_TRUE) and expr.op == "and") or ( + inferred in (ALWAYS_FALSE, MYPY_FALSE) and expr.op == "or" + ): + expr.right_always = True + + expr.right.accept(self) + + def visit_comparison_expr(self, expr: ComparisonExpr) -> None: + for operand in expr.operands: + operand.accept(self) + + def visit_unary_expr(self, expr: UnaryExpr) -> None: + expr.expr.accept(self) + + def visit_index_expr(self, expr: IndexExpr) -> None: + base = expr.base + base.accept(self) + if ( + isinstance(base, RefExpr) + and isinstance(base.node, TypeInfo) + and not base.node.is_generic() + ): + expr.index.accept(self) + elif ( + isinstance(base, RefExpr) and isinstance(base.node, TypeAlias) + ) or refers_to_class_or_function(base): + # We need to do full processing on every iteration, since some type + # arguments may contain placeholder types. + self.analyze_type_application(expr) + else: + expr.index.accept(self) + + def analyze_type_application(self, expr: IndexExpr) -> None: + """Analyze special form -- type application (either direct or via type aliasing).""" + types = self.analyze_type_application_args(expr) + if types is None: + return + base = expr.base + expr.analyzed = TypeApplication(base, types) + expr.analyzed.line = expr.line + expr.analyzed.column = expr.column + + def analyze_type_application_args(self, expr: IndexExpr) -> list[Type] | None: + """Analyze type arguments (index) in a type application. + + Return None if anything was incomplete. + """ + index = expr.index + tag = self.track_incomplete_refs() + self.analyze_type_expr(index) + if self.found_incomplete_ref(tag): + return None + if self.basic_type_applications: + # Postpone the rest until we have more information (for r.h.s. of an assignment) + return None + types: list[Type] = [] + if isinstance(index, TupleExpr): + items = index.items + is_tuple = isinstance(expr.base, RefExpr) and expr.base.fullname == "builtins.tuple" + if is_tuple and len(items) == 2 and isinstance(items[-1], EllipsisExpr): + items = items[:-1] + else: + items = [index] + + # TODO: this needs a clean-up. + # Probably always allow Parameters literals, and validate in semanal_typeargs.py + base = expr.base + if isinstance(base, RefExpr) and isinstance(base.node, TypeAlias): + allow_unpack = base.node.tvar_tuple_index is not None + alias = base.node + if any(isinstance(t, ParamSpecType) for t in alias.alias_tvars): + has_param_spec = True + num_args = len(alias.alias_tvars) + else: + has_param_spec = False + num_args = -1 + elif isinstance(base, RefExpr) and isinstance(base.node, TypeInfo): + allow_unpack = ( + base.node.has_type_var_tuple_type or base.node.fullname == "builtins.tuple" + ) + has_param_spec = base.node.has_param_spec_type + num_args = len(base.node.type_vars) + else: + allow_unpack = False + has_param_spec = False + num_args = -1 + + for item in items: + try: + typearg = self.expr_to_unanalyzed_type(item, allow_unpack=True) + except TypeTranslationError: + self.fail("Type expected within [...]", expr) + return None + analyzed = self.anal_type( + typearg, + # The type application may appear in base class expression, + # where type variables are not bound yet. Or when accepting + # r.h.s. of type alias before we figured out it is a type alias. + allow_unbound_tvars=self.allow_unbound_tvars, + allow_placeholder=True, + allow_param_spec_literals=has_param_spec, + allow_unpack=allow_unpack, + ) + if analyzed is None: + return None + types.append(analyzed) + + if allow_unpack: + # need to flatten away harmless unpacks like Unpack[tuple[int]] + flattened_items = flatten_nested_tuples(types) + types = self.type_analyzer().check_unpacks_in_list(flattened_items) + if has_param_spec and num_args == 1 and types: + first_arg = get_proper_type(types[0]) + single_any = len(types) == 1 and isinstance(first_arg, AnyType) + if not (single_any or any(isinstance(t, (Parameters, ParamSpecType)) for t in types)): + types = [Parameters(types, [ARG_POS] * len(types), [None] * len(types))] + + return types + + def visit_slice_expr(self, expr: SliceExpr) -> None: + if expr.begin_index: + expr.begin_index.accept(self) + if expr.end_index: + expr.end_index.accept(self) + if expr.stride: + expr.stride.accept(self) + + def visit_cast_expr(self, expr: CastExpr) -> None: + expr.expr.accept(self) + analyzed = self.anal_type(expr.type) + if analyzed is not None: + expr.type = analyzed + + def visit_type_form_expr(self, expr: TypeFormExpr) -> None: + analyzed = self.anal_type(expr.type) + if analyzed is not None: + expr.type = analyzed + + def visit_assert_type_expr(self, expr: AssertTypeExpr) -> None: + expr.expr.accept(self) + analyzed = self.anal_type(expr.type) + if analyzed is not None: + expr.type = analyzed + + def visit_reveal_expr(self, expr: RevealExpr) -> None: + if expr.kind == REVEAL_TYPE: + if expr.expr is not None: + expr.expr.accept(self) + else: + # Reveal locals doesn't have an inner expression, there's no + # need to traverse inside it + pass + + def visit_type_application(self, expr: TypeApplication) -> None: + expr.expr.accept(self) + for i in range(len(expr.types)): + analyzed = self.anal_type(expr.types[i]) + if analyzed is not None: + expr.types[i] = analyzed + + def visit_list_comprehension(self, expr: ListComprehension) -> None: + if any(expr.generator.is_async): + if not self.is_func_scope() or not self.function_stack[-1].is_coroutine: + self.fail(message_registry.ASYNC_FOR_OUTSIDE_COROUTINE, expr, code=codes.SYNTAX) + + expr.generator.accept(self) + + def visit_set_comprehension(self, expr: SetComprehension) -> None: + if any(expr.generator.is_async): + if not self.is_func_scope() or not self.function_stack[-1].is_coroutine: + self.fail(message_registry.ASYNC_FOR_OUTSIDE_COROUTINE, expr, code=codes.SYNTAX) + + expr.generator.accept(self) + + def visit_dictionary_comprehension(self, expr: DictionaryComprehension) -> None: + if any(expr.is_async): + if not self.is_func_scope() or not self.function_stack[-1].is_coroutine: + self.fail(message_registry.ASYNC_FOR_OUTSIDE_COROUTINE, expr, code=codes.SYNTAX) + + with self.enter(expr): + self.analyze_comp_for(expr) + expr.key.accept(self) + expr.value.accept(self) + self.analyze_comp_for_2(expr) + + def visit_generator_expr(self, expr: GeneratorExpr) -> None: + with self.enter(expr): + self.analyze_comp_for(expr) + expr.left_expr.accept(self) + self.analyze_comp_for_2(expr) + + def analyze_comp_for(self, expr: GeneratorExpr | DictionaryComprehension) -> None: + """Analyses the 'comp_for' part of comprehensions (part 1). + + That is the part after 'for' in (x for x in l if p). This analyzes + variables and conditions which are analyzed in a local scope. + """ + for i, (index, sequence, conditions) in enumerate( + zip(expr.indices, expr.sequences, expr.condlists) + ): + if i > 0: + sequence.accept(self) + # Bind index variables. + self.analyze_lvalue(index) + for cond in conditions: + cond.accept(self) + + def analyze_comp_for_2(self, expr: GeneratorExpr | DictionaryComprehension) -> None: + """Analyses the 'comp_for' part of comprehensions (part 2). + + That is the part after 'for' in (x for x in l if p). This analyzes + the 'l' part which is analyzed in the surrounding scope. + """ + expr.sequences[0].accept(self) + + def visit_lambda_expr(self, expr: LambdaExpr) -> None: + self.analyze_arg_initializers(expr) + with self.inside_except_star_block_set(False, entering_loop=False): + self.analyze_function_body(expr) + + def visit_conditional_expr(self, expr: ConditionalExpr) -> None: + expr.if_expr.accept(self) + expr.cond.accept(self) + expr.else_expr.accept(self) + + def visit__promote_expr(self, expr: PromoteExpr) -> None: + analyzed = self.anal_type(expr.type) + if analyzed is not None: + assert isinstance(analyzed, ProperType), "Cannot use type aliases for promotions" + expr.type = analyzed + + def visit_yield_expr(self, e: YieldExpr) -> None: + if not self.is_func_scope(): + self.fail('"yield" outside function', e, serious=True, blocker=True) + elif self.scope_stack[-1] == SCOPE_COMPREHENSION: + self.fail( + '"yield" inside comprehension or generator expression', + e, + serious=True, + blocker=True, + ) + elif self.function_stack[-1].is_coroutine: + self.function_stack[-1].is_generator = True + self.function_stack[-1].is_async_generator = True + else: + self.function_stack[-1].is_generator = True + if e.expr: + e.expr.accept(self) + + def visit_await_expr(self, expr: AwaitExpr) -> None: + if not self.is_func_scope() or not self.function_stack: + # We check both because is_function_scope() returns True inside comprehensions. + # This is not a blocker, because some environments (like ipython) + # support top level awaits. + self.fail('"await" outside function', expr, serious=True, code=codes.TOP_LEVEL_AWAIT) + elif not self.function_stack[-1].is_coroutine: + self.fail( + '"await" outside coroutine ("async def")', + expr, + serious=True, + code=codes.AWAIT_NOT_ASYNC, + ) + expr.expr.accept(self) + + # + # Patterns + # + + def visit_as_pattern(self, p: AsPattern) -> None: + if p.pattern is not None: + p.pattern.accept(self) + if p.name is not None: + self.analyze_lvalue(p.name) + + def visit_or_pattern(self, p: OrPattern) -> None: + for pattern in p.patterns: + pattern.accept(self) + + def visit_value_pattern(self, p: ValuePattern) -> None: + p.expr.accept(self) + + def visit_sequence_pattern(self, p: SequencePattern) -> None: + for pattern in p.patterns: + pattern.accept(self) + + def visit_starred_pattern(self, p: StarredPattern) -> None: + if p.capture is not None: + self.analyze_lvalue(p.capture) + + def visit_mapping_pattern(self, p: MappingPattern) -> None: + for key in p.keys: + key.accept(self) + for value in p.values: + value.accept(self) + if p.rest is not None: + self.analyze_lvalue(p.rest) + + def visit_class_pattern(self, p: ClassPattern) -> None: + p.class_ref.accept(self) + for pos in p.positionals: + pos.accept(self) + for v in p.keyword_values: + v.accept(self) + + # + # Lookup functions + # + + def lookup( + self, name: str, ctx: Context, suppress_errors: bool = False + ) -> SymbolTableNode | None: + node = self._lookup(name, ctx, suppress_errors) + if node is not None: + # This call is unfortunate from performance point of view, but + # needed for rare cases like e.g. testIncrementalChangingAlias. + self.record_imported_symbol(node) + return node + + def record_imported_symbol(self, sym: SymbolTableNode) -> None: + """If the symbol was not defined in current module, add its module to module_refs.""" + if sym.kind == LDEF or sym.node is None: + return + node = sym.node + if isinstance(node, PlaceholderNode) or not node.fullname: + # This node is not ready yet. + return + if node.fullname.startswith(("builtins.", "typing.")): + # Skip dependencies on builtins/typing. + return + # Modules, classes, and type aliases store defining module directly. + if isinstance(node, MypyFile): + fullname = node.fullname + elif isinstance(node, TypeInfo): + fullname = node.module_name + elif isinstance(node, TypeAlias): + fullname = node.module + elif isinstance(node, (Var, FuncDef, OverloadedFuncDef, Decorator)): + # For functions/variables infer defining module from enclosing class. + info = node.var.info if isinstance(node, Decorator) else node.info + if info: + fullname = info.module_name + else: + # global function/variable + fullname = node.fullname.rsplit(".", maxsplit=1)[0] + else: + # Some nodes (currently only TypeVarLikeExpr subclasses) don't store + # module fullname explicitly, infer it from the node fullname iteratively. + # TODO: this is not 100% robust for type variables nested within a class + # with a name that matches name of a submodule. + fullname = node.fullname.rsplit(".", maxsplit=1)[0] + if fullname == self.cur_mod_id: + return + while "." in fullname and fullname not in self.modules: + fullname = fullname.rsplit(".", maxsplit=1)[0] + if fullname != self.cur_mod_id: + self.cur_mod_node.module_refs.add(fullname) + + def _lookup( + self, name: str, ctx: Context, suppress_errors: bool = False + ) -> SymbolTableNode | None: + """Look up an unqualified (no dots) name in all active namespaces. + + Note that the result may contain a PlaceholderNode. The caller may + want to defer in that case. + + Generate an error if the name is not defined unless suppress_errors + is true or the current namespace is incomplete. In the latter case + defer. + """ + implicit_name = False + # 1a. Name declared using 'global x' takes precedence + if name in self.global_decls[-1]: + if name in self.globals: + return self.globals[name] + if not suppress_errors: + self.name_not_defined(name, ctx) + return None + # 1b. Name declared using 'nonlocal x' takes precedence + if name in self.nonlocal_decls[-1]: + for table in reversed(self.locals[:-1]): + if table is not None and name in table: + return table[name] + if not suppress_errors: + self.name_not_defined(name, ctx) + return None + # 2a. Class attributes (if within class definition) + if self.type and not self.is_func_scope() and name in self.type.names: + node = self.type.names[name] + if not node.implicit: + if self.is_active_symbol_in_class_body(node.node): + return node + else: + # Defined through self.x assignment + implicit_name = True + implicit_node = node + # 2b. Class attributes __qualname__ and __module__ + if self.type and not self.is_func_scope() and name in {"__qualname__", "__module__"}: + return SymbolTableNode(MDEF, Var(name, self.str_type())) + # 3. Local (function) scopes + for table in reversed(self.locals): + if table is not None and name in table: + return table[name] + + # 4. Current file global scope + if name in self.globals: + return self.globals[name] + # 5. Builtins + b = self.globals.get("__builtins__", None) + if b: + assert isinstance(b.node, MypyFile) + table = b.node.names + if name in table: + if len(name) > 1 and name[0] == "_" and name[1] != "_": + if not suppress_errors: + self.name_not_defined(name, ctx) + return None + node = table[name] + return node + # Give up. + if not implicit_name and not suppress_errors: + self.name_not_defined(name, ctx) + else: + if implicit_name: + return implicit_node + return None + + def is_active_symbol_in_class_body(self, node: SymbolNode | None) -> bool: + """Can a symbol defined in class body accessed at current statement? + + Only allow access to class attributes textually after + the definition, so that it's possible to fall back to the + outer scope. Example: + + class X: ... + + class C: + X = X # Initializer refers to outer scope + + Nested classes are an exception, since we want to support + arbitrary forward references in type annotations. Also, we + allow forward references to type aliases to support recursive + types. + """ + # TODO: Forward reference to name imported in class body is not + # caught. + if self.statement is None: + # Assume it's fine -- don't have enough context to check + return True + if ( + node is None + or self.is_textually_before_statement(node) + or not self.is_defined_in_current_module(node.fullname) + ): + return True + if self.is_type_like(node): + # Allow forward references to classes/type aliases (see docstring), but + # a forward reference should never shadow an existing regular reference. + if node.name not in self.globals: + return True + global_node = self.globals[node.name] + if not self.is_textually_before_class(global_node.node): + return True + return not self.is_type_like(global_node.node) + return False + + def is_type_like(self, node: SymbolNode | None) -> bool: + return isinstance(node, (TypeInfo, TypeAlias)) or ( + isinstance(node, PlaceholderNode) and node.becomes_typeinfo + ) + + def is_textually_before_statement(self, node: SymbolNode) -> bool: + """Check if a node is defined textually before the current statement + + Note that decorated functions' line number are the same as + the top decorator. + """ + assert self.statement + line_diff = self.statement.line - node.line + + # The first branch handles reference an overloaded function variant inside itself, + # this is a corner case where mypy technically deviates from runtime name resolution, + # but it is fine because we want an overloaded function to be treated as a single unit. + if self.is_overloaded_item(node, self.statement): + return False + elif isinstance(node, Decorator) and not node.is_overload: + return line_diff > len(node.original_decorators) + else: + return line_diff > 0 + + def is_textually_before_class(self, node: SymbolNode | None) -> bool: + """Similar to above, but check if a node is defined before current class.""" + assert self.type is not None + if node is None: + return False + return node.line < self.type.defn.line + + def is_overloaded_item(self, node: SymbolNode, statement: Statement) -> bool: + """Check whether the function belongs to the overloaded variants""" + if isinstance(node, OverloadedFuncDef) and isinstance(statement, FuncDef): + in_items = statement in { + item.func if isinstance(item, Decorator) else item for item in node.items + } + in_impl = node.impl is not None and ( + (isinstance(node.impl, Decorator) and statement is node.impl.func) + or statement is node.impl + ) + return in_items or in_impl + return False + + def is_defined_in_current_module(self, fullname: str | None) -> bool: + if not fullname: + return False + return module_prefix(self.modules, fullname) == self.cur_mod_id + + def lookup_qualified( + self, name: str, ctx: Context, suppress_errors: bool = False + ) -> SymbolTableNode | None: + """Lookup a qualified name in all activate namespaces. + + Note that the result may contain a PlaceholderNode. The caller may + want to defer in that case. + + Generate an error if the name is not defined unless suppress_errors + is true or the current namespace is incomplete. In the latter case + defer. + """ + if "." not in name: + # Simple case: look up a short name. + return self.lookup(name, ctx, suppress_errors=suppress_errors) + parts = name.split(".") + namespace = self.cur_mod_id + sym = self.lookup(parts[0], ctx, suppress_errors=suppress_errors) + if sym: + for i in range(1, len(parts)): + node = sym.node + part = parts[i] + if isinstance(node, TypeInfo): + nextsym = node.get(part) + elif isinstance(node, MypyFile): + nextsym = self.get_module_symbol(node, part) + namespace = node.fullname + elif isinstance(node, PlaceholderNode): + return sym + elif isinstance(node, TypeAlias) and node.no_args: + assert isinstance(node.target, ProperType) + if isinstance(node.target, Instance): + nextsym = node.target.type.get(part) + else: + nextsym = None + else: + if isinstance(node, Var): + typ = get_proper_type(node.type) + if isinstance(typ, AnyType): + # Allow access through Var with Any type without error. + return self.implicit_symbol(sym, name, parts[i:], typ) + # This might be something like valid `P.args` or invalid `P.__bound__` access. + # Important note that `ParamSpecExpr` is also ignored in other places. + # See https://github.com/python/mypy/pull/13468 + if isinstance(node, ParamSpecExpr) and part in ("args", "kwargs"): + return None + # Lookup through invalid node, such as variable or function + nextsym = None + if not nextsym or nextsym.module_hidden: + if not suppress_errors: + self.name_not_defined(name, ctx, namespace=namespace) + return None + sym = nextsym + if sym is not None: + self.record_imported_symbol(sym) + return sym + + def lookup_type_node(self, expr: Expression) -> SymbolTableNode | None: + try: + t = self.expr_to_unanalyzed_type(expr) + except TypeTranslationError: + return None + if isinstance(t, UnboundType): + n = self.lookup_qualified(t.name, expr, suppress_errors=True) + return n + return None + + def get_module_symbol(self, node: MypyFile, name: str) -> SymbolTableNode | None: + """Look up a symbol from a module. + + Return None if no matching symbol could be bound. + """ + module = node.fullname + names = node.names + sym = names.get(name) + if not sym: + fullname = module + "." + name + if fullname in self.modules and self.is_visible_import(module, fullname): + sym = SymbolTableNode(GDEF, self.modules[fullname]) + elif self.is_incomplete_namespace(module): + self.record_incomplete_ref() + elif "__getattr__" in names: + gvar = self.create_getattr_var(names["__getattr__"], name, fullname) + if gvar: + sym = SymbolTableNode(GDEF, gvar) + elif self.is_missing_module(fullname): + # We use the fullname of the original definition so that we can + # detect whether two names refer to the same thing. + var_type = AnyType(TypeOfAny.from_unimported_type) + v = Var(name, type=var_type) + v._fullname = fullname + sym = SymbolTableNode(GDEF, v) + elif sym.module_hidden: + sym = None + return sym + + def is_visible_import(self, base_id: str, id: str) -> bool: + if id in self.import_map[self.cur_mod_id]: + # Fast path: module is imported locally. + return True + if base_id not in self.transitive_submodule_imports: + # This is a performance optimization for a common pattern. If one module + # in a codebase uses import numpy as np; np.foo.bar, then it is likely that + # other modules use similar pattern as well. So we pre-compute transitive + # dependencies for np, to avoid possible duplicate work in the future. + self.add_transitive_submodule_imports(base_id) + if self.cur_mod_id not in self.transitive_submodule_imports: + self.add_transitive_submodule_imports(self.cur_mod_id) + return id in self.transitive_submodule_imports[self.cur_mod_id] + + def add_transitive_submodule_imports(self, mod_id: str) -> None: + if mod_id not in self.import_map: + return + todo = self.import_map[mod_id] + seen = {mod_id} + result = {mod_id} + while todo: + dep = todo.pop() + if dep in seen: + continue + seen.add(dep) + if "." in dep: + result.add(dep) + if dep in self.transitive_submodule_imports: + result |= self.transitive_submodule_imports[dep] + continue + if dep in self.import_map: + todo |= self.import_map[dep] + self.transitive_submodule_imports[mod_id] = result + + def is_missing_module(self, module: str) -> bool: + return module in self.missing_modules + + def implicit_symbol( + self, sym: SymbolTableNode, name: str, parts: list[str], source_type: AnyType + ) -> SymbolTableNode: + """Create symbol for a qualified name reference through Any type.""" + if sym.node is None: + basename = None + else: + basename = sym.node.fullname + if basename is None: + fullname = name + else: + fullname = basename + "." + ".".join(parts) + var_type = AnyType(TypeOfAny.from_another_any, source_type) + var = Var(parts[-1], var_type) + var._fullname = fullname + return SymbolTableNode(GDEF, var) + + def create_getattr_var( + self, getattr_defn: SymbolTableNode, name: str, fullname: str + ) -> Var | None: + """Create a dummy variable using module-level __getattr__ return type. + + If not possible, return None. + + Note that multiple Var nodes can be created for a single name. We + can use the from_module_getattr and the fullname attributes to + check if two dummy Var nodes refer to the same thing. Reusing Var + nodes would require non-local mutable state, which we prefer to + avoid. + """ + if isinstance(getattr_defn.node, (FuncDef, Var)): + node_type = get_proper_type(getattr_defn.node.type) + if isinstance(node_type, CallableType): + typ = node_type.ret_type + else: + typ = AnyType(TypeOfAny.from_error) + v = Var(name, type=typ) + v._fullname = fullname + v.from_module_getattr = True + return v + return None + + def lookup_fully_qualified(self, fullname: str) -> SymbolTableNode: + ret = self.lookup_fully_qualified_or_none(fullname) + assert ret is not None, fullname + return ret + + def lookup_fully_qualified_or_none(self, fullname: str) -> SymbolTableNode | None: + """Lookup a fully qualified name that refers to a module-level definition. + + Don't assume that the name is defined. This happens in the global namespace -- + the local module namespace is ignored. This does not dereference indirect + refs. + + Note that this can't be used for names nested in class namespaces. + """ + # TODO: unify/clean-up/simplify lookup methods, see #4157. + module, name = fullname.rsplit(".", maxsplit=1) + + if module in self.modules: + # If the module exists, look up the name in the module. + # This is the common case. + filenode = self.modules[module] + result = filenode.names.get(name) + if result is None and self.is_incomplete_namespace(module): + # TODO: More explicit handling of incomplete refs? + self.record_incomplete_ref() + return result + else: + # Else, try to find the longest prefix of the module name that is in the modules dictionary. + splitted_modules = fullname.split(".") + names = [] + + while splitted_modules and ".".join(splitted_modules) not in self.modules: + names.append(splitted_modules.pop()) + + if not splitted_modules or not names: + # If no module or name is found, return None. + return None + + # Reverse the names list to get the correct order of names. + names.reverse() + + module = ".".join(splitted_modules) + filenode = self.modules[module] + result = filenode.names.get(names[0]) + + if result is None and self.is_incomplete_namespace(module): + # TODO: More explicit handling of incomplete refs? + self.record_incomplete_ref() + + for part in names[1:]: + if result is not None and isinstance(result.node, TypeInfo): + result = result.node.names.get(part) + else: + return None + return result + + def object_type(self) -> Instance: + if self._object_type is None: + self._object_type = self.named_type("builtins.object") + return self._object_type + + def str_type(self) -> Instance: + if self._str_type is None: + self._str_type = self.named_type("builtins.str") + return self._str_type + + def function_type(self) -> Instance: + if self._function_type is None: + self._function_type = self.named_type("builtins.function") + return self._function_type + + def named_type(self, fullname: str, args: list[Type] | None = None) -> Instance: + sym = self.lookup_fully_qualified(fullname) + assert sym, "Internal error: attempted to construct unknown type" + node = sym.node + assert isinstance(node, TypeInfo), node + if args: + # TODO: assert len(args) == len(node.defn.type_vars) + return Instance(node, args) + return Instance(node, [AnyType(TypeOfAny.special_form)] * len(node.defn.type_vars)) + + def named_type_or_none(self, fullname: str, args: list[Type] | None = None) -> Instance | None: + sym = self.lookup_fully_qualified_or_none(fullname) + if not sym or isinstance(sym.node, PlaceholderNode): + return None + node = sym.node + if isinstance(node, TypeAlias): + assert isinstance(node.target, Instance) # type: ignore[misc] + node = node.target.type + assert isinstance(node, TypeInfo), node + if args is not None: + # TODO: assert len(args) == len(node.defn.type_vars) + return Instance(node, args) + return Instance(node, [AnyType(TypeOfAny.unannotated)] * len(node.defn.type_vars)) + + def builtin_type(self, fully_qualified_name: str) -> Instance: + """Legacy function -- use named_type() instead.""" + return self.named_type(fully_qualified_name) + + def lookup_current_scope(self, name: str) -> SymbolTableNode | None: + if self.locals[-1] is not None: + return self.locals[-1].get(name) + elif self.type is not None: + return self.type.names.get(name) + else: + return self.globals.get(name) + + # + # Adding symbols + # + + def add_symbol( + self, + name: str, + node: SymbolNode, + context: Context, + module_public: bool = True, + module_hidden: bool = False, + can_defer: bool = True, + escape_comprehensions: bool = False, + no_progress: bool = False, + type_param: bool = False, + ) -> bool: + """Add symbol to the currently active symbol table. + + Generally additions to symbol table should go through this method or + one of the methods below so that kinds, redefinitions, conditional + definitions, and skipped names are handled consistently. + + Return True if we actually added the symbol, or False if we refused to do so + (because something is not ready). + + If can_defer is True, defer current target if adding a placeholder. + """ + if self.is_func_scope(): + kind = LDEF + elif self.type is not None: + kind = MDEF + else: + kind = GDEF + symbol = SymbolTableNode( + kind, node, module_public=module_public, module_hidden=module_hidden + ) + return self.add_symbol_table_node( + name, symbol, context, can_defer, escape_comprehensions, no_progress, type_param + ) + + def add_symbol_skip_local(self, name: str, node: SymbolNode) -> None: + """Same as above, but skipping the local namespace. + + This doesn't check for previous definition and is only used + for serialization of method-level classes. + + Classes defined within methods can be exposed through an + attribute type, but method-level symbol tables aren't serialized. + This method can be used to add such classes to an enclosing, + serialized symbol table. + """ + # TODO: currently this is only used by named tuples and typed dicts. + # Use this method also by normal classes, see issue #6422. + if self.type is not None: + names = self.type.names + kind = MDEF + else: + names = self.globals + kind = GDEF + symbol = SymbolTableNode(kind, node) + names[name] = symbol + + def add_symbol_table_node( + self, + name: str, + symbol: SymbolTableNode, + context: Context | None = None, + can_defer: bool = True, + escape_comprehensions: bool = False, + no_progress: bool = False, + type_param: bool = False, + ) -> bool: + """Add symbol table node to the currently active symbol table. + + Return True if we actually added the symbol, or False if we refused + to do so (because something is not ready or it was a no-op). + + Generate an error if there is an invalid redefinition. + + If context is None, unconditionally add node, since we can't report + an error. Note that this is used by plugins to forcibly replace nodes! + + TODO: Prevent plugins from replacing nodes, as it could cause problems? + + Args: + name: short name of symbol + symbol: Node to add + can_defer: if True, defer current target if adding a placeholder + context: error context (see above about None value) + """ + names = self.current_symbol_table( + escape_comprehensions=escape_comprehensions, type_param=type_param + ) + existing = names.get(name) + if isinstance(symbol.node, PlaceholderNode) and can_defer: + if context is not None: + self.process_placeholder(name, "name", context) + else: + # see note in docstring describing None contexts + self.defer() + + if ( + existing is not None + and context is not None + and not is_valid_replacement(existing, symbol) + ): + # There is an existing node, so this may be a redefinition. + # If the new node points to the same node as the old one, + # or if both old and new nodes are placeholders, we don't + # need to do anything. + old = existing.node + new = symbol.node + if isinstance(new, PlaceholderNode): + # We don't know whether this is okay. Let's wait until the next iteration. + return False + if not is_same_symbol(old, new): + if isinstance(new, (FuncDef, Decorator, OverloadedFuncDef, TypeInfo)): + self.add_redefinition(names, name, symbol) + if not (isinstance(new, (FuncDef, Decorator)) and self.set_original_def(old, new)): + self.name_already_defined(name, context, existing) + elif type_param or ( + name not in self.missing_names[-1] and "*" not in self.missing_names[-1] + ): + names[name] = symbol + if not no_progress: + self.progress = True + return True + return False + + def add_redefinition(self, names: SymbolTable, name: str, symbol: SymbolTableNode) -> None: + """Add a symbol table node that reflects a redefinition as a function or a class. + + Redefinitions need to be added to the symbol table so that they can be found + through AST traversal, but they have dummy names of form 'name-redefinition[N]', + where N ranges over 2, 3, ... (omitted for the first redefinition). + + Note: we always store redefinitions independently of whether they are valid or not + (so they will be semantically analyzed), the caller should give an error for invalid + redefinitions (such as e.g. variable redefined as a class). + """ + i = 1 + # Don't serialize redefined nodes. They are likely to have + # busted internal references which can cause problems with + # serialization and they can't have any external references to + # them. + symbol.no_serialize = True + while True: + if i == 1: + new_name = f"{name}-redefinition" + else: + new_name = f"{name}-redefinition{i}" + existing = names.get(new_name) + if existing is None: + names[new_name] = symbol + return + elif existing.node is symbol.node: + # Already there + return + i += 1 + + def add_local(self, node: Var | FuncDef | OverloadedFuncDef, context: Context) -> None: + """Add local variable or function.""" + assert self.is_func_scope() + name = node.name + node._fullname = name + self.add_symbol(name, node, context) + + def _get_node_for_class_scoped_import( + self, name: str, symbol_node: SymbolNode | None, context: Context + ) -> SymbolNode | None: + if symbol_node is None: + return None + # I promise this type checks; I'm just making mypyc issues go away. + # mypyc is absolutely convinced that `symbol_node` narrows to a Var in the following, + # when it can also be a FuncBase. Once fixed, `f` in the following can be removed. + # See also https://github.com/mypyc/mypyc/issues/892 + f: Callable[[object], Any] = lambda x: x + if isinstance(f(symbol_node), (Decorator, FuncBase, Var)): + # For imports in class scope, we construct a new node to represent the symbol and + # set its `info` attribute to `self.type`. + existing = self.current_symbol_table().get(name) + if ( + # The redefinition checks in `add_symbol_table_node` don't work for our + # constructed Var / FuncBase, so check for possible redefinitions here. + existing is not None + and isinstance(f(existing.node), (Decorator, FuncBase, Var)) + and ( + isinstance(f(existing.type), f(AnyType)) + or f(existing.type) == f(symbol_node).type + ) + ): + return existing.node + + # Construct the new node + if isinstance(f(symbol_node), (FuncBase, Decorator)): + # In theory we could construct a new node here as well, but in practice + # it doesn't work well, see #12197 + typ: Type | None = AnyType(TypeOfAny.from_error) + self.fail("Unsupported class scoped import", context) + else: + typ = f(symbol_node).type + symbol_node = Var(name, typ) + symbol_node._fullname = self.qualified_name(name) + assert self.type is not None # guaranteed by is_class_scope + symbol_node.info = self.type + symbol_node.line = context.line + symbol_node.column = context.column + return symbol_node + + def add_imported_symbol( + self, + name: str, + node: SymbolTableNode, + context: ImportBase, + module_public: bool, + module_hidden: bool, + ) -> None: + """Add an alias to an existing symbol through import.""" + assert not module_hidden or not module_public + + existing_symbol = self.lookup_current_scope(name) + if ( + existing_symbol + and not isinstance(existing_symbol.node, PlaceholderNode) + and not isinstance(node.node, PlaceholderNode) + ): + # Import can redefine a variable. They get special treatment. + if self.process_import_over_existing_name(name, existing_symbol, node, context): + return + + symbol_node: SymbolNode | None = node.node + + if self.is_class_scope(): + symbol_node = self._get_node_for_class_scoped_import(name, symbol_node, context) + + symbol = SymbolTableNode( + node.kind, symbol_node, module_public=module_public, module_hidden=module_hidden + ) + self.add_symbol_table_node(name, symbol, context) + + def add_unknown_imported_symbol( + self, + name: str, + context: Context, + target_name: str | None, + module_public: bool, + module_hidden: bool, + ) -> None: + """Add symbol that we don't know what it points to because resolving an import failed. + + This can happen if a module is missing, or it is present, but doesn't have + the imported attribute. The `target_name` is the name of symbol in the namespace + it is imported from. For example, for 'from mod import x as y' the target_name is + 'mod.x'. This is currently used only to track logical dependencies. + """ + existing = self.current_symbol_table().get(name) + if existing and isinstance(existing.node, Var) and existing.node.is_suppressed_import: + # This missing import was already added -- nothing to do here. + return + var = Var(name) + if self.options.logical_deps and target_name is not None: + # This makes it possible to add logical fine-grained dependencies + # from a missing module. We can't use this by default, since in a + # few places we assume that the full name points to a real + # definition, but this name may point to nothing. + var._fullname = target_name + elif self.type: + var._fullname = self.type.fullname + "." + name + var.info = self.type + else: + var._fullname = self.qualified_name(name) + var.is_ready = True + any_type = AnyType(TypeOfAny.from_unimported_type, missing_import_name=var._fullname) + var.type = any_type + var.is_suppressed_import = True + self.add_symbol( + name, var, context, module_public=module_public, module_hidden=module_hidden + ) + + # + # Other helpers + # + + @contextmanager + def tvar_scope_frame(self, frame: TypeVarLikeScope) -> Iterator[None]: + old_scope = self.tvar_scope + self.tvar_scope = frame + yield + self.tvar_scope = old_scope + + def defer(self, debug_context: Context | None = None, force_progress: bool = False) -> None: + """Defer current analysis target to be analyzed again. + + This must be called if something in the current target is + incomplete or has a placeholder node. However, this must *not* + be called during the final analysis iteration! Instead, an error + should be generated. Often 'process_placeholder' is a good + way to either defer or generate an error. + + NOTE: Some methods, such as 'anal_type', 'mark_incomplete' and + 'record_incomplete_ref', call this implicitly, or when needed. + They are usually preferable to a direct defer() call. + """ + assert not self.final_iteration, "Must not defer during final iteration" + if force_progress: + # Usually, we report progress if we have replaced a placeholder node + # with an actual valid node. However, sometimes we need to update an + # existing node *in-place*. For example, this is used by type aliases + # in context of forward references and/or recursive aliases, and in + # similar situations (recursive named tuples etc). + self.progress = True + self.deferred = True + # Store debug info for this deferral. + line = ( + debug_context.line if debug_context else self.statement.line if self.statement else -1 + ) + self.deferral_debug_context.append((self.cur_mod_id, line)) + + def track_incomplete_refs(self) -> Tag: + """Return tag that can be used for tracking references to incomplete names.""" + return self.num_incomplete_refs + + def found_incomplete_ref(self, tag: Tag) -> bool: + """Have we encountered an incomplete reference since starting tracking?""" + return self.num_incomplete_refs != tag + + def record_incomplete_ref(self) -> None: + """Record the encounter of an incomplete reference and defer current analysis target.""" + self.defer() + self.num_incomplete_refs += 1 + + def mark_incomplete( + self, + name: str, + node: Node, + becomes_typeinfo: bool = False, + module_public: bool = True, + module_hidden: bool = False, + ) -> None: + """Mark a definition as incomplete (and defer current analysis target). + + Also potentially mark the current namespace as incomplete. + + Args: + name: The name that we weren't able to define (or '*' if the name is unknown) + node: The node that refers to the name (definition or lvalue) + becomes_typeinfo: Pass this to PlaceholderNode (used by special forms like + named tuples that will create TypeInfos). + """ + self.defer(node) + if name == "*": + self.incomplete = True + elif not self.is_global_or_nonlocal(name): + fullname = self.qualified_name(name) + assert self.statement + placeholder = PlaceholderNode( + fullname, node, self.statement.line, becomes_typeinfo=becomes_typeinfo + ) + self.add_symbol( + name, + placeholder, + module_public=module_public, + module_hidden=module_hidden, + context=dummy_context(), + ) + self.missing_names[-1].add(name) + + def is_incomplete_namespace(self, fullname: str) -> bool: + """Is a module or class namespace potentially missing some definitions? + + If a name is missing from an incomplete namespace, we'll need to defer the + current analysis target. + """ + return fullname in self.incomplete_namespaces + + def process_placeholder( + self, name: str | None, kind: str, ctx: Context, force_progress: bool = False + ) -> None: + """Process a reference targeting placeholder node. + + If this is not a final iteration, defer current node, + otherwise report an error. + + The 'kind' argument indicates if this a name or attribute expression + (used for better error message). + """ + if self.final_iteration: + self.cannot_resolve_name(name, kind, ctx) + else: + self.defer(ctx, force_progress=force_progress) + + def cannot_resolve_name(self, name: str | None, kind: str, ctx: Context) -> None: + name_format = f' "{name}"' if name else "" + self.fail(f"Cannot resolve {kind}{name_format} (possible cyclic definition)", ctx) + if self.is_func_scope(): + self.note("Recursive types are not allowed at function scope", ctx) + + def qualified_name(self, name: str) -> str: + if self.type is not None: + return self.type._fullname + "." + name + elif self.is_func_scope(): + return name + else: + return self.cur_mod_id + "." + name + + @contextmanager + def enter( + self, function: FuncItem | GeneratorExpr | DictionaryComprehension + ) -> Iterator[None]: + """Enter a function, generator or comprehension scope.""" + names = self.saved_locals.setdefault(function, SymbolTable()) + self.locals.append(names) + is_comprehension = isinstance(function, (GeneratorExpr, DictionaryComprehension)) + self.scope_stack.append(SCOPE_FUNC if not is_comprehension else SCOPE_COMPREHENSION) + self.global_decls.append(set()) + self.nonlocal_decls.append(set()) + # -1 since entering block will increment this to 0. + self.block_depth.append(-1) + self.loop_depth.append(0) + self.missing_names.append(set()) + try: + yield + finally: + self.locals.pop() + self.scope_stack.pop() + self.global_decls.pop() + self.nonlocal_decls.pop() + self.block_depth.pop() + self.loop_depth.pop() + self.missing_names.pop() + + def is_func_scope(self) -> bool: + scope_type = self.scope_stack[-1] + if scope_type == SCOPE_ANNOTATION: + scope_type = self.scope_stack[-2] + return scope_type in (SCOPE_FUNC, SCOPE_COMPREHENSION) + + def is_nested_within_func_scope(self) -> bool: + """Are we underneath a function scope, even if we are in a nested class also?""" + return any(s in (SCOPE_FUNC, SCOPE_COMPREHENSION) for s in self.scope_stack) + + def is_class_scope(self) -> bool: + return self.type is not None and not self.is_func_scope() + + def is_module_scope(self) -> bool: + return not (self.is_class_scope() or self.is_func_scope()) + + def current_symbol_kind(self) -> int: + if self.is_class_scope(): + kind = MDEF + elif self.is_func_scope(): + kind = LDEF + else: + kind = GDEF + return kind + + def current_symbol_table( + self, escape_comprehensions: bool = False, type_param: bool = False + ) -> SymbolTable: + if type_param and self.scope_stack[-1] == SCOPE_ANNOTATION: + n = self.locals[-1] + assert n is not None + return n + elif self.is_func_scope(): + if self.scope_stack[-1] == SCOPE_ANNOTATION: + n = self.locals[-2] + else: + n = self.locals[-1] + assert n is not None + if escape_comprehensions: + assert len(self.locals) == len(self.scope_stack) + # Retrieve the symbol table from the enclosing non-comprehension scope. + for i, scope_type in enumerate(reversed(self.scope_stack)): + if scope_type != SCOPE_COMPREHENSION: + if i == len(self.locals) - 1: # The last iteration. + # The caller of the comprehension is in the global space. + names = self.globals + else: + names_candidate = self.locals[-1 - i] + assert ( + names_candidate is not None + ), "Escaping comprehension from invalid scope" + names = names_candidate + break + else: + assert False, "Should have at least one non-comprehension scope" + else: + names = n + assert names is not None + elif self.type is not None: + names = self.type.names + else: + names = self.globals + return names + + def is_global_or_nonlocal(self, name: str) -> bool: + return self.is_func_scope() and ( + name in self.global_decls[-1] or name in self.nonlocal_decls[-1] + ) + + def add_exports(self, exp_or_exps: Iterable[Expression] | Expression) -> None: + exps = [exp_or_exps] if isinstance(exp_or_exps, Expression) else exp_or_exps + for exp in exps: + if isinstance(exp, StrExpr): + self.all_exports.append(exp.value) + + def name_not_defined(self, name: str, ctx: Context, namespace: str | None = None) -> None: + incomplete = self.is_incomplete_namespace(namespace or self.cur_mod_id) + if ( + namespace is None + and self.type + and not self.is_func_scope() + and self.incomplete_type_stack + and self.incomplete_type_stack[-1] + and not self.final_iteration + ): + # We are processing a class body for the first time, so it is incomplete. + incomplete = True + if incomplete: + # Target namespace is incomplete, so it's possible that the name will be defined + # later on. Defer current target. + self.record_incomplete_ref() + return + message = f'Name "{name}" is not defined' + self.fail(message, ctx, code=codes.NAME_DEFINED) + + if f"builtins.{name}" in SUGGESTED_TEST_FIXTURES: + # The user probably has a missing definition in a test fixture. Let's verify. + fullname = f"builtins.{name}" + if self.lookup_fully_qualified_or_none(fullname) is None: + # Yes. Generate a helpful note. + self.msg.add_fixture_note(fullname, ctx) + + modules_with_unimported_hints = { + name.split(".", 1)[0] for name in TYPES_FOR_UNIMPORTED_HINTS + } + lowercased = {name.lower(): name for name in TYPES_FOR_UNIMPORTED_HINTS} + for module in modules_with_unimported_hints: + fullname = f"{module}.{name}".lower() + if fullname not in lowercased: + continue + # User probably forgot to import these types. + hint = ( + 'Did you forget to import it from "{module}"?' + ' (Suggestion: "from {module} import {name}")' + ).format(module=module, name=lowercased[fullname].rsplit(".", 1)[-1]) + self.note(hint, ctx, code=codes.NAME_DEFINED) + + def already_defined( + self, name: str, ctx: Context, original_ctx: SymbolTableNode | SymbolNode | None, noun: str + ) -> None: + if isinstance(original_ctx, SymbolTableNode): + node: SymbolNode | None = original_ctx.node + elif isinstance(original_ctx, SymbolNode): + node = original_ctx + else: + node = None + + if isinstance(original_ctx, SymbolTableNode) and isinstance(original_ctx.node, MypyFile): + # Since this is an import, original_ctx.node points to the module definition. + # Therefore its line number is always 1, which is not useful for this + # error message. + extra_msg = " (by an import)" + elif node and node.line != -1 and self.is_local_name(node.fullname): + # TODO: Using previous symbol node may give wrong line. We should use + # the line number where the binding was established instead. + extra_msg = f" on line {node.line}" + else: + extra_msg = " (possibly by an import)" + self.fail( + f'{noun} "{unmangle(name)}" already defined{extra_msg}', ctx, code=codes.NO_REDEF + ) + + def name_already_defined( + self, name: str, ctx: Context, original_ctx: SymbolTableNode | SymbolNode | None = None + ) -> None: + self.already_defined(name, ctx, original_ctx, noun="Name") + + def attribute_already_defined( + self, name: str, ctx: Context, original_ctx: SymbolTableNode | SymbolNode | None = None + ) -> None: + self.already_defined(name, ctx, original_ctx, noun="Attribute") + + def is_local_name(self, name: str) -> bool: + """Does name look like reference to a definition in the current module?""" + return self.is_defined_in_current_module(name) or "." not in name + + def in_checked_function(self) -> bool: + """Should we type-check the current function? + + - Yes if --check-untyped-defs is set. + - Yes outside functions. + - Yes in annotated functions. + - No otherwise. + """ + if self.options.check_untyped_defs or not self.function_stack: + return True + + current_index = len(self.function_stack) - 1 + while current_index >= 0: + current_func = self.function_stack[current_index] + if not isinstance(current_func, LambdaExpr): + return not current_func.is_dynamic() + + # Special case, `lambda` inherits the "checked" state from its parent. + # Because `lambda` itself cannot be annotated. + # `lambdas` can be deeply nested, so we try to find at least one other parent. + current_index -= 1 + + # This means that we only have a stack of `lambda` functions, + # no regular functions. + return True + + def fail( + self, + msg: str | ErrorMessage, + ctx: Context, + serious: bool = False, + *, + code: ErrorCode | None = None, + blocker: bool = False, + ) -> None: + if not serious and not self.in_checked_function(): + return + # In case it's a bug and we don't really have context + assert ctx is not None, msg + if isinstance(msg, ErrorMessage): + if code is None: + code = msg.code + msg = msg.value + self.errors.report( + ctx.line, + ctx.column, + msg, + blocker=blocker, + code=code, + end_line=ctx.end_line, + end_column=ctx.end_column, + ) + + def note(self, msg: str, ctx: Context, code: ErrorCode | None = None) -> None: + if not self.in_checked_function(): + return + self.errors.report(ctx.line, ctx.column, msg, severity="note", code=code) + + def incomplete_feature_enabled(self, feature: str, ctx: Context) -> bool: + if feature not in self.options.enable_incomplete_feature: + self.fail( + f'"{feature}" support is experimental,' + f" use --enable-incomplete-feature={feature} to enable", + ctx, + ) + return False + return True + + def accept(self, node: Node) -> None: + try: + node.accept(self) + except Exception as err: + report_internal_error(err, self.errors.file, node.line, self.errors, self.options) + + def expr_to_analyzed_type( + self, + expr: Expression, + report_invalid_types: bool = True, + allow_placeholder: bool = False, + allow_type_any: bool = False, + allow_unbound_tvars: bool = False, + allow_param_spec_literals: bool = False, + allow_unpack: bool = False, + ) -> Type | None: + if isinstance(expr, CallExpr): + # This is a legacy syntax intended mostly for Python 2, we keep it for + # backwards compatibility, but new features like generic named tuples + # and recursive named tuples will be not supported. + expr.accept(self) + internal_name, info, tvar_defs = self.named_tuple_analyzer.check_namedtuple( + expr, None, self.is_func_scope() + ) + if tvar_defs: + self.fail("Generic named tuples are not supported for legacy class syntax", expr) + self.note("Use either Python 3 class syntax, or the assignment syntax", expr) + if internal_name is None: + # Some form of namedtuple is the only valid type that looks like a call + # expression. This isn't a valid type. + raise TypeTranslationError() + elif not info: + self.defer(expr) + return None + assert info.tuple_type, "NamedTuple without tuple type" + fallback = Instance(info, []) + return TupleType(info.tuple_type.items, fallback=fallback) + typ = self.expr_to_unanalyzed_type(expr) + return self.anal_type( + typ, + report_invalid_types=report_invalid_types, + allow_placeholder=allow_placeholder, + allow_type_any=allow_type_any, + allow_unbound_tvars=allow_unbound_tvars, + allow_param_spec_literals=allow_param_spec_literals, + allow_unpack=allow_unpack, + ) + + def analyze_type_expr(self, expr: Expression) -> None: + # There are certain expressions that mypy does not need to semantically analyze, + # since they analyzed solely as type. (For example, indexes in type alias definitions + # and base classes in class defs). External consumers of the mypy AST may need + # them semantically analyzed, however, if they need to treat it as an expression + # and not a type. (Which is to say, mypyc needs to do this.) Do the analysis + # in a fresh tvar scope in order to suppress any errors about using type variables. + with self.tvar_scope_frame(TypeVarLikeScope()), self.allow_unbound_tvars_set(): + expr.accept(self) + + def type_analyzer( + self, + *, + tvar_scope: TypeVarLikeScope | None = None, + allow_tuple_literal: bool = False, + allow_unbound_tvars: bool = False, + allow_placeholder: bool = False, + allow_typed_dict_special_forms: bool = False, + allow_final: bool = False, + allow_param_spec_literals: bool = False, + allow_unpack: bool = False, + report_invalid_types: bool = True, + prohibit_self_type: str | None = None, + prohibit_special_class_field_types: str | None = None, + allow_type_any: bool = False, + ) -> TypeAnalyser: + if tvar_scope is None: + tvar_scope = self.tvar_scope + tpan = TypeAnalyser( + self, + tvar_scope, + self.plugin, + self.options, + self.cur_mod_node, + self.is_typeshed_stub_file, + allow_unbound_tvars=allow_unbound_tvars, + allow_tuple_literal=allow_tuple_literal, + report_invalid_types=report_invalid_types, + allow_placeholder=allow_placeholder, + allow_typed_dict_special_forms=allow_typed_dict_special_forms, + allow_final=allow_final, + allow_param_spec_literals=allow_param_spec_literals, + allow_unpack=allow_unpack, + prohibit_self_type=prohibit_self_type, + prohibit_special_class_field_types=prohibit_special_class_field_types, + allow_type_any=allow_type_any, + ) + tpan.in_dynamic_func = bool(self.function_stack and self.function_stack[-1].is_dynamic()) + tpan.global_scope = not self.type and not self.function_stack + return tpan + + def expr_to_unanalyzed_type(self, node: Expression, allow_unpack: bool = False) -> ProperType: + return expr_to_unanalyzed_type( + node, self.options, self.is_stub_file, allow_unpack=allow_unpack + ) + + def anal_type( + self, + typ: Type, + *, + tvar_scope: TypeVarLikeScope | None = None, + allow_tuple_literal: bool = False, + allow_unbound_tvars: bool = False, + allow_placeholder: bool = False, + allow_typed_dict_special_forms: bool = False, + allow_final: bool = False, + allow_param_spec_literals: bool = False, + allow_unpack: bool = False, + report_invalid_types: bool = True, + prohibit_self_type: str | None = None, + prohibit_special_class_field_types: str | None = None, + allow_type_any: bool = False, + ) -> Type | None: + """Semantically analyze a type. + + Args: + typ: Type to analyze (if already analyzed, this is a no-op) + allow_placeholder: If True, may return PlaceholderType if + encountering an incomplete definition + + Return None only if some part of the type couldn't be bound *and* it + referred to an incomplete namespace or definition. In this case also + defer as needed. During a final iteration this won't return None; + instead report an error if the type can't be analyzed and return + AnyType. + + In case of other errors, report an error message and return AnyType. + + NOTE: The caller shouldn't defer even if this returns None or a + placeholder type. + """ + has_self_type = find_self_type( + typ, lambda name: self.lookup_qualified(name, typ, suppress_errors=True) + ) + if has_self_type and self.type and prohibit_self_type is None: + self.setup_self_type() + a = self.type_analyzer( + tvar_scope=tvar_scope, + allow_unbound_tvars=allow_unbound_tvars, + allow_tuple_literal=allow_tuple_literal, + allow_placeholder=allow_placeholder, + allow_typed_dict_special_forms=allow_typed_dict_special_forms, + allow_final=allow_final, + allow_param_spec_literals=allow_param_spec_literals, + allow_unpack=allow_unpack, + report_invalid_types=report_invalid_types, + prohibit_self_type=prohibit_self_type, + prohibit_special_class_field_types=prohibit_special_class_field_types, + allow_type_any=allow_type_any, + ) + tag = self.track_incomplete_refs() + typ = typ.accept(a) + if self.found_incomplete_ref(tag): + # Something could not be bound yet. + return None + self.add_type_alias_deps(a.aliases_used) + return typ + + def class_type(self, self_type: Type) -> Type: + return TypeType.make_normalized(self_type) + + def schedule_patch(self, priority: int, patch: Callable[[], None]) -> None: + self.patches.append((priority, patch)) + + def report_hang(self) -> None: + print("Deferral trace:") + for mod, line in self.deferral_debug_context: + print(f" {mod}:{line}") + self.errors.report( + -1, + -1, + "INTERNAL ERROR: maximum semantic analysis iteration count reached", + blocker=True, + ) + + def add_plugin_dependency(self, trigger: str, target: str | None = None) -> None: + """Add dependency from trigger to a target. + + If the target is not given explicitly, use the current target. + """ + if target is None: + target = self.scope.current_target() + self.cur_mod_node.plugin_deps.setdefault(trigger, set()).add(target) + + def add_type_alias_deps( + self, aliases_used: Collection[str], target: str | None = None + ) -> None: + """Add full names of type aliases on which the current node depends. + + This is used by fine-grained incremental mode to re-check the corresponding nodes. + If `target` is None, then the target node used will be the current scope. + """ + if not aliases_used: + return + if target is None: + target = self.scope.current_target() + self.cur_mod_node.alias_deps[target].update(aliases_used) + + def is_mangled_global(self, name: str) -> bool: + # A global is mangled if there exists at least one renamed variant. + return unmangle(name) + "'" in self.globals + + def is_initial_mangled_global(self, name: str) -> bool: + # If there are renamed definitions for a global, the first one has exactly one prime. + return name == unmangle(name) + "'" + + def parse_bool(self, expr: Expression) -> bool | None: + # This wrapper is preserved for plugins. + return parse_bool(expr) + + def parse_str_literal(self, expr: Expression) -> str | None: + """Attempt to find the string literal value of the given expression. Returns `None` if no + literal value can be found.""" + if isinstance(expr, StrExpr): + return expr.value + if isinstance(expr, RefExpr) and isinstance(expr.node, Var) and expr.node.type is not None: + values = try_getting_str_literals_from_type(expr.node.type) + if values is not None and len(values) == 1: + return values[0] + return None + + def set_future_import_flags(self, module_name: str) -> None: + if module_name in FUTURE_IMPORTS: + self.modules[self.cur_mod_id].future_import_flags.add(FUTURE_IMPORTS[module_name]) + + def is_future_flag_set(self, flag: str) -> bool: + return self.modules[self.cur_mod_id].is_future_flag_set(flag) + + def parse_dataclass_transform_spec(self, call: CallExpr) -> DataclassTransformSpec: + """Build a DataclassTransformSpec from the arguments passed to the given call to + typing.dataclass_transform.""" + parameters = DataclassTransformSpec() + for name, value in zip(call.arg_names, call.args): + # Skip any positional args. Note that any such args are invalid, but we can rely on + # typeshed to enforce this and don't need an additional error here. + if name is None: + continue + + # field_specifiers is currently the only non-boolean argument; check for it first so + # so the rest of the block can fail through to handling booleans + if name == "field_specifiers": + parameters.field_specifiers = self.parse_dataclass_transform_field_specifiers( + value + ) + continue + + boolean = require_bool_literal_argument(self, value, name) + if boolean is None: + continue + + if name == "eq_default": + parameters.eq_default = boolean + elif name == "order_default": + parameters.order_default = boolean + elif name == "kw_only_default": + parameters.kw_only_default = boolean + elif name == "frozen_default": + parameters.frozen_default = boolean + else: + self.fail(f'Unrecognized dataclass_transform parameter "{name}"', call) + + return parameters + + def parse_dataclass_transform_field_specifiers(self, arg: Expression) -> tuple[str, ...]: + if not isinstance(arg, TupleExpr): + self.fail('"field_specifiers" argument must be a tuple literal', arg) + return () + + names = [] + for specifier in arg.items: + if not isinstance(specifier, RefExpr): + self.fail('"field_specifiers" must only contain identifiers', specifier) + return () + names.append(specifier.fullname) + return tuple(names) + + # leafs + def visit_int_expr(self, o: IntExpr, /) -> None: + return None + + def visit_str_expr(self, o: StrExpr, /) -> None: + return None + + def visit_bytes_expr(self, o: BytesExpr, /) -> None: + return None + + def visit_float_expr(self, o: FloatExpr, /) -> None: + return None + + def visit_complex_expr(self, o: ComplexExpr, /) -> None: + return None + + def visit_ellipsis(self, o: EllipsisExpr, /) -> None: + return None + + def visit_temp_node(self, o: TempNode, /) -> None: + return None + + def visit_pass_stmt(self, o: PassStmt, /) -> None: + return None + + def visit_singleton_pattern(self, o: SingletonPattern, /) -> None: + return None + + def try_parse_as_type_expression(self, maybe_type_expr: Expression) -> None: + """Try to parse a value Expression as a type expression. + If success then annotate the Expression with the type that it spells. + If fails then emit no errors and take no further action. + + A value expression that is parsable as a type expression may be used + where a TypeForm is expected to represent the spelled type. + + Unlike ExpressionChecker.try_parse_as_type_expression() + (used in the later TypeChecker pass), this function can recognize + ALL kinds of type expressions, including type expressions containing + string annotations. + + If the provided Expression will be parsable later in + ExpressionChecker.try_parse_as_type_expression(), this function will + skip parsing the Expression to improve performance, because the later + function is called many fewer times (i.e. only lazily in a rare TypeForm + type context) than this function is called (i.e. eagerly for EVERY + expression in certain syntactic positions). + """ + # Count every call to this method for profiling + self.type_expression_parse_count += 1 + + # Bail ASAP if the Expression matches a common pattern that cannot possibly + # be a valid type expression, because this function is called very frequently + if not isinstance(maybe_type_expr, MaybeTypeExpression): + return + # Check types in order from most common to least common, for best performance + if isinstance(maybe_type_expr, (NameExpr, MemberExpr)): + # Defer parsing to the later TypeChecker pass, + # and only lazily in contexts where a TypeForm is expected + return + elif isinstance(maybe_type_expr, StrExpr): + str_value = maybe_type_expr.value # cache + # Filter out string literals with common patterns that could not + # possibly be in a type expression + if _MULTIPLE_WORDS_NONTYPE_RE.match(str_value): + # A common pattern in string literals containing a sentence. + # But cannot be a type expression. + maybe_type_expr.as_type = None + return + # Filter out string literals which look like an identifier but + # cannot be a type expression, for a few common reasons + if _IDENTIFIER_RE.fullmatch(str_value): + sym = self.lookup(str_value, UnboundType(str_value), suppress_errors=True) + if sym is None: + # Does not refer to anything in the local symbol table + maybe_type_expr.as_type = None + return + else: # sym is not None + node = sym.node # cache + if isinstance(node, PlaceholderNode) and not node.becomes_typeinfo: + # Either: + # 1. f'Cannot resolve name "{t.name}" (possible cyclic definition)' + # 2. Reference to an unknown placeholder node. + maybe_type_expr.as_type = None + return + unbound_tvar_or_paramspec = ( + isinstance(node, (TypeVarExpr, TypeVarTupleExpr, ParamSpecExpr)) + and self.tvar_scope.get_binding(sym) is None + ) + if unbound_tvar_or_paramspec: + # Either: + # 1. unbound_tvar: 'Type variable "{}" is unbound' [codes.VALID_TYPE] + # 2. unbound_paramspec: f'ParamSpec "{name}" is unbound' [codes.VALID_TYPE] + maybe_type_expr.as_type = None + return + else: # does not look like an identifier + if '"' in str_value or "'" in str_value: + # Only valid inside a Literal[...] type + if "[" not in str_value: + # Cannot be a Literal[...] type + maybe_type_expr.as_type = None + return + elif str_value == "": + # Empty string is not a valid type + maybe_type_expr.as_type = None + return + elif isinstance(maybe_type_expr, IndexExpr): + if isinstance(maybe_type_expr.base, NameExpr): + if isinstance( + maybe_type_expr.base.node, Var + ) and not self.var_is_typing_special_form(maybe_type_expr.base.node): + # Leftmost part of IndexExpr refers to a Var. Not a valid type. + maybe_type_expr.as_type = None + return + elif isinstance(maybe_type_expr.base, MemberExpr): + next_leftmost = maybe_type_expr.base + while True: + leftmost = next_leftmost.expr + if not isinstance(leftmost, MemberExpr): + break + next_leftmost = leftmost + if isinstance(leftmost, NameExpr): + if isinstance(leftmost.node, Var) and not self.var_is_typing_special_form( + leftmost.node + ): + # Leftmost part of IndexExpr refers to a Var. Not a valid type. + maybe_type_expr.as_type = None + return + else: + # Leftmost part of IndexExpr is not a NameExpr. Not a valid type. + maybe_type_expr.as_type = None + return + else: + # IndexExpr base is neither a NameExpr nor MemberExpr. Not a valid type. + maybe_type_expr.as_type = None + return + elif isinstance(maybe_type_expr, OpExpr): + if maybe_type_expr.op != "|": + # Binary operators other than '|' never spell a valid type + maybe_type_expr.as_type = None + return + else: + assert_never(maybe_type_expr) + + with self.isolated_error_analysis(): + try: + t = self.expr_to_analyzed_type(maybe_type_expr) + if self.errors.is_errors(): + t = None + except TypeTranslationError: + # Not a type expression + t = None + + if DEBUG_TYPE_EXPRESSION_FULL_PARSE_FAILURES and t is None: + original_flushed_files = set(self.errors.flushed_files) # save + try: + errors = self.errors.new_messages() # capture + finally: + self.errors.flushed_files = original_flushed_files # restore + + print( + f"SA.try_parse_as_type_expression: Full parse failure: {maybe_type_expr}, errors={errors!r}" + ) + + # Count full parse attempts for profiling + if t is not None: + self.type_expression_full_parse_success_count += 1 + else: + self.type_expression_full_parse_failure_count += 1 + + maybe_type_expr.as_type = t + + @staticmethod + def var_is_typing_special_form(var: Var) -> bool: + return var.fullname.startswith("typing") and var.fullname in [ + "typing.Annotated", + "typing_extensions.Annotated", + "typing.Callable", + "typing.Literal", + "typing_extensions.Literal", + "typing.Optional", + "typing.TypeGuard", + "typing_extensions.TypeGuard", + "typing.TypeIs", + "typing_extensions.TypeIs", + "typing.Union", + ] + + @contextmanager + def isolated_error_analysis(self) -> Iterator[None]: + """ + Context manager for performing error analysis that should not + affect the main SemanticAnalyzer state. + + Upon entering this context, `self.errors` will start empty. + Within this context, you can analyze expressions for errors. + Upon exiting this context, the original `self.errors` will be restored, + and any errors collected during the analysis will be discarded. + """ + # Save state + original_errors = self.errors + original_num_incomplete_refs = self.num_incomplete_refs + original_progress = self.progress + original_deferred = self.deferred + original_deferral_debug_context_len = len(self.deferral_debug_context) + + self.errors = Errors(Options()) + try: + yield + finally: + # Restore state + self.errors = original_errors + self.num_incomplete_refs = original_num_incomplete_refs + self.progress = original_progress + self.deferred = original_deferred + del self.deferral_debug_context[original_deferral_debug_context_len:] + + +def replace_implicit_first_type(sig: FunctionLike, new: Type) -> FunctionLike: + if isinstance(sig, CallableType): + if len(sig.arg_types) == 0: + return sig + return sig.copy_modified(arg_types=[new] + sig.arg_types[1:]) + elif isinstance(sig, Overloaded): + return Overloaded( + [cast(CallableType, replace_implicit_first_type(i, new)) for i in sig.items] + ) + else: + assert False + + +def refers_to_fullname(node: Expression, fullnames: str | tuple[str, ...]) -> bool: + """Is node a name or member expression with the given full name?""" + if not isinstance(fullnames, tuple): + fullnames = (fullnames,) + + if not isinstance(node, RefExpr): + return False + if node.fullname in fullnames: + return True + if isinstance(node.node, TypeAlias) and not node.node.python_3_12_type_alias: + return is_named_instance(node.node.target, fullnames) + return False + + +def refers_to_class_or_function(node: Expression) -> bool: + """Does semantically analyzed node refer to a class?""" + return isinstance(node, RefExpr) and isinstance( + node.node, (TypeInfo, FuncDef, OverloadedFuncDef) + ) + + +def find_duplicate(list: list[T]) -> T | None: + """If the list has duplicates, return one of the duplicates. + + Otherwise, return None. + """ + for i in range(1, len(list)): + if list[i] in list[:i]: + return list[i] + return None + + +def remove_imported_names_from_symtable(names: SymbolTable, module: str) -> None: + """Remove all imported names from the symbol table of a module.""" + removed: list[str] = [] + for name, node in names.items(): + if node.node is None: + continue + fullname = node.node.fullname + prefix = fullname[: fullname.rfind(".")] + if prefix != module: + removed.append(name) + for name in removed: + del names[name] + + +def make_any_non_explicit(t: Type) -> Type: + """Replace all Any types within in with Any that has attribute 'explicit' set to False""" + return t.accept(MakeAnyNonExplicit()) + + +class MakeAnyNonExplicit(TrivialSyntheticTypeTranslator): + def visit_any(self, t: AnyType) -> Type: + if t.type_of_any == TypeOfAny.explicit: + return t.copy_modified(TypeOfAny.special_form) + return t + + def visit_type_alias_type(self, t: TypeAliasType) -> Type: + return t.copy_modified(args=[a.accept(self) for a in t.args]) + + +def make_any_non_unimported(t: Type) -> Type: + """Replace all Any types that come from unimported types with special form Any.""" + return t.accept(MakeAnyNonUnimported()) + + +class MakeAnyNonUnimported(TrivialSyntheticTypeTranslator): + def visit_any(self, t: AnyType) -> Type: + if t.type_of_any == TypeOfAny.from_unimported_type: + return t.copy_modified(TypeOfAny.special_form, missing_import_name=None) + return t + + def visit_type_alias_type(self, t: TypeAliasType) -> Type: + return t.copy_modified(args=[a.accept(self) for a in t.args]) + + +def apply_semantic_analyzer_patches(patches: list[tuple[int, Callable[[], None]]]) -> None: + """Call patch callbacks in the right order. + + This should happen after semantic analyzer pass 3. + """ + patches_by_priority = sorted(patches, key=lambda x: x[0]) + for priority, patch_func in patches_by_priority: + patch_func() + + +def names_modified_by_assignment(s: AssignmentStmt) -> list[NameExpr]: + """Return all unqualified (short) names assigned to in an assignment statement.""" + result: list[NameExpr] = [] + for lvalue in s.lvalues: + result += names_modified_in_lvalue(lvalue) + return result + + +def names_modified_in_lvalue(lvalue: Lvalue) -> list[NameExpr]: + """Return all NameExpr assignment targets in an Lvalue.""" + if isinstance(lvalue, NameExpr): + return [lvalue] + elif isinstance(lvalue, StarExpr): + return names_modified_in_lvalue(lvalue.expr) + elif isinstance(lvalue, (ListExpr, TupleExpr)): + result: list[NameExpr] = [] + for item in lvalue.items: + result += names_modified_in_lvalue(item) + return result + return [] + + +def is_same_var_from_getattr(n1: SymbolNode | None, n2: SymbolNode | None) -> bool: + """Do n1 and n2 refer to the same Var derived from module-level __getattr__?""" + return ( + isinstance(n1, Var) + and n1.from_module_getattr + and isinstance(n2, Var) + and n2.from_module_getattr + and n1.fullname == n2.fullname + ) + + +def dummy_context() -> Context: + return TempNode(AnyType(TypeOfAny.special_form)) + + +def is_valid_replacement(old: SymbolTableNode, new: SymbolTableNode) -> bool: + """Can symbol table node replace an existing one? + + These are the only valid cases: + + 1. Placeholder gets replaced with a non-placeholder + 2. Placeholder that isn't known to become type replaced with a + placeholder that can become a type + """ + if isinstance(old.node, PlaceholderNode): + if isinstance(new.node, PlaceholderNode): + return not old.node.becomes_typeinfo and new.node.becomes_typeinfo + else: + return True + return False + + +def is_same_symbol(a: SymbolNode | None, b: SymbolNode | None) -> bool: + return ( + a == b + or (isinstance(a, PlaceholderNode) and isinstance(b, PlaceholderNode)) + or is_same_var_from_getattr(a, b) + ) + + +def is_trivial_body(block: Block) -> bool: + """Returns 'true' if the given body is "trivial" -- if it contains just a "pass", + "..." (ellipsis), or "raise NotImplementedError()". A trivial body may also + start with a statement containing just a string (e.g. a docstring). + + Note: Functions that raise other kinds of exceptions do not count as + "trivial". We use this function to help us determine when it's ok to + relax certain checks on body, but functions that raise arbitrary exceptions + are more likely to do non-trivial work. For example: + + def halt(self, reason: str = ...) -> NoReturn: + raise MyCustomError("Fatal error: " + reason, self.line, self.context) + + A function that raises just NotImplementedError is much less likely to be + this complex. + + Note: If you update this, you may also need to update + mypy.fastparse.is_possible_trivial_body! + """ + body = block.body + if not body: + # Functions have empty bodies only if the body is stripped or the function is + # generated or deserialized. In these cases the body is unknown. + return False + + # Skip a docstring + if isinstance(body[0], ExpressionStmt) and isinstance(body[0].expr, StrExpr): + body = block.body[1:] + + if len(body) == 0: + # There's only a docstring (or no body at all). + return True + elif len(body) > 1: + return False + + stmt = body[0] + + if isinstance(stmt, RaiseStmt): + expr = stmt.expr + if expr is None: + return False + if isinstance(expr, CallExpr): + expr = expr.callee + + return isinstance(expr, NameExpr) and expr.fullname == "builtins.NotImplementedError" + + return isinstance(stmt, PassStmt) or ( + isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, EllipsisExpr) + ) diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal_classprop.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/semanal_classprop.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..634fdc3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/semanal_classprop.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal_classprop.py b/.venv/lib/python3.12/site-packages/mypy/semanal_classprop.py new file mode 100644 index 0000000..c5ad341 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/semanal_classprop.py @@ -0,0 +1,188 @@ +"""Calculate some properties of classes. + +These happen after semantic analysis and before type checking. +""" + +from __future__ import annotations + +from typing import Final + +from mypy.errors import Errors +from mypy.nodes import ( + IMPLICITLY_ABSTRACT, + IS_ABSTRACT, + CallExpr, + Decorator, + FuncDef, + Node, + OverloadedFuncDef, + PromoteExpr, + SymbolTable, + TypeInfo, + Var, +) +from mypy.options import Options +from mypy.types import MYPYC_NATIVE_INT_NAMES, Instance, ProperType + +# Hard coded type promotions (shared between all Python versions). +# These add extra ad-hoc edges to the subtyping relation. For example, +# int is considered a subtype of float, even though there is no +# subclass relationship. +# Note that the bytearray -> bytes promotion is a little unsafe +# as some functions only accept bytes objects. Here convenience +# trumps safety. +TYPE_PROMOTIONS: Final = { + "builtins.int": "float", + "builtins.float": "complex", + "builtins.bytearray": "bytes", + "builtins.memoryview": "bytes", +} + + +def calculate_class_abstract_status(typ: TypeInfo, is_stub_file: bool, errors: Errors) -> None: + """Calculate abstract status of a class. + + Set is_abstract of the type to True if the type has an unimplemented + abstract attribute. Also compute a list of abstract attributes. + Report error is required ABCMeta metaclass is missing. + """ + typ.is_abstract = False + typ.abstract_attributes = [] + if typ.typeddict_type: + return # TypedDict can't be abstract + concrete: set[str] = set() + # List of abstract attributes together with their abstract status + abstract: list[tuple[str, int]] = [] + abstract_in_this_class: list[str] = [] + if typ.is_newtype: + # Special case: NewTypes are considered as always non-abstract, so they can be used as: + # Config = NewType('Config', Mapping[str, str]) + # default = Config({'cannot': 'modify'}) # OK + return + for base in typ.mro: + for name, symnode in base.names.items(): + node = symnode.node + if isinstance(node, OverloadedFuncDef): + # Unwrap an overloaded function definition. We can just + # check arbitrarily the first overload item. If the + # different items have a different abstract status, there + # should be an error reported elsewhere. + if node.items: # can be empty for invalid overloads + func: Node | None = node.items[0] + else: + func = None + else: + func = node + if isinstance(func, Decorator): + func = func.func + if isinstance(func, FuncDef): + if ( + func.abstract_status in (IS_ABSTRACT, IMPLICITLY_ABSTRACT) + and name not in concrete + ): + typ.is_abstract = True + abstract.append((name, func.abstract_status)) + if base is typ: + abstract_in_this_class.append(name) + elif isinstance(node, Var): + if node.is_abstract_var and name not in concrete: + typ.is_abstract = True + abstract.append((name, IS_ABSTRACT)) + if base is typ: + abstract_in_this_class.append(name) + concrete.add(name) + # In stubs, abstract classes need to be explicitly marked because it is too + # easy to accidentally leave a concrete class abstract by forgetting to + # implement some methods. + typ.abstract_attributes = sorted(abstract) + if is_stub_file: + if typ.declared_metaclass and typ.declared_metaclass.type.has_base("abc.ABCMeta"): + return + if typ.is_protocol: + return + if abstract and not abstract_in_this_class: + + def report(message: str, severity: str) -> None: + errors.report(typ.line, typ.column, message, severity=severity) + + attrs = ", ".join(f'"{attr}"' for attr, _ in sorted(abstract)) + report(f"Class {typ.fullname} has abstract attributes {attrs}", "error") + report( + "If it is meant to be abstract, add 'abc.ABCMeta' as an explicit metaclass", "note" + ) + if typ.is_final and abstract: + attrs = ", ".join(f'"{attr}"' for attr, _ in sorted(abstract)) + errors.report( + typ.line, typ.column, f"Final class {typ.fullname} has abstract attributes {attrs}" + ) + + +def check_protocol_status(info: TypeInfo, errors: Errors) -> None: + """Check that all classes in MRO of a protocol are protocols""" + if info.is_protocol: + for type in info.bases: + if not type.type.is_protocol and type.type.fullname != "builtins.object": + errors.report( + info.line, + info.column, + "All bases of a protocol must be protocols", + severity="error", + ) + + +def calculate_class_vars(info: TypeInfo) -> None: + """Try to infer additional class variables. + + Subclass attribute assignments with no type annotation are assumed + to be classvar if overriding a declared classvar from the base + class. + + This must happen after the main semantic analysis pass, since + this depends on base class bodies having been fully analyzed. + """ + for name, sym in info.names.items(): + node = sym.node + if isinstance(node, Var) and node.info and node.is_inferred and not node.is_classvar: + for base in info.mro[1:]: + member = base.names.get(name) + if member is not None and isinstance(member.node, Var) and member.node.is_classvar: + node.is_classvar = True + + +def add_type_promotion( + info: TypeInfo, module_names: SymbolTable, options: Options, builtin_names: SymbolTable +) -> None: + """Setup extra, ad-hoc subtyping relationships between classes (promotion). + + This includes things like 'int' being compatible with 'float'. + """ + defn = info.defn + promote_targets: list[ProperType] = [] + for decorator in defn.decorators: + if isinstance(decorator, CallExpr): + analyzed = decorator.analyzed + if isinstance(analyzed, PromoteExpr): + # _promote class decorator (undocumented feature). + promote_targets.append(analyzed.type) + if not promote_targets: + if defn.fullname in TYPE_PROMOTIONS: + target_sym = module_names.get(TYPE_PROMOTIONS[defn.fullname]) + if defn.fullname == "builtins.bytearray" and options.disable_bytearray_promotion: + target_sym = None + elif defn.fullname == "builtins.memoryview" and options.disable_memoryview_promotion: + target_sym = None + # With test stubs, the target may not exist. + if target_sym: + target_info = target_sym.node + assert isinstance(target_info, TypeInfo) + promote_targets.append(Instance(target_info, [])) + # Special case the promotions between 'int' and native integer types. + # These have promotions going both ways, such as from 'int' to 'i64' + # and 'i64' to 'int', for convenience. + if defn.fullname in MYPYC_NATIVE_INT_NAMES: + int_sym = builtin_names["int"] + assert isinstance(int_sym.node, TypeInfo) + int_sym.node._promote.append(Instance(defn.info, [])) + defn.info.alt_promote = Instance(int_sym.node, []) + if promote_targets: + defn.info._promote.extend(promote_targets) diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal_enum.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/semanal_enum.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..370818e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/semanal_enum.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal_enum.py b/.venv/lib/python3.12/site-packages/mypy/semanal_enum.py new file mode 100644 index 0000000..b1e267b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/semanal_enum.py @@ -0,0 +1,269 @@ +"""Semantic analysis of call-based Enum definitions. + +This is conceptually part of mypy.semanal (semantic analyzer pass 2). +""" + +from __future__ import annotations + +from typing import Final, cast + +from mypy.nodes import ( + ARG_NAMED, + ARG_POS, + EXCLUDED_ENUM_ATTRIBUTES, + MDEF, + AssignmentStmt, + CallExpr, + Context, + DictExpr, + EnumCallExpr, + Expression, + ListExpr, + MemberExpr, + NameExpr, + RefExpr, + StrExpr, + SymbolTableNode, + TupleExpr, + TypeInfo, + Var, + is_StrExpr_list, +) +from mypy.options import Options +from mypy.semanal_shared import SemanticAnalyzerInterface +from mypy.types import LiteralType, get_proper_type + +# Note: 'enum.EnumMeta' is deliberately excluded from this list. Classes that directly use +# enum.EnumMeta do not necessarily automatically have the 'name' and 'value' attributes. +ENUM_BASES: Final = frozenset( + ("enum.Enum", "enum.IntEnum", "enum.Flag", "enum.IntFlag", "enum.StrEnum") +) +ENUM_SPECIAL_PROPS: Final = frozenset( + ( + "name", + "value", + "_name_", + "_value_", + *EXCLUDED_ENUM_ATTRIBUTES, + # Also attributes from `object`: + "__module__", + "__annotations__", + "__doc__", + "__slots__", + "__dict__", + ) +) + + +class EnumCallAnalyzer: + def __init__(self, options: Options, api: SemanticAnalyzerInterface) -> None: + self.options = options + self.api = api + + def process_enum_call(self, s: AssignmentStmt, is_func_scope: bool) -> bool: + """Check if s defines an Enum; if yes, store the definition in symbol table. + + Return True if this looks like an Enum definition (but maybe with errors), + otherwise return False. + """ + if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], (NameExpr, MemberExpr)): + return False + lvalue = s.lvalues[0] + name = lvalue.name + enum_call = self.check_enum_call(s.rvalue, name, is_func_scope) + if enum_call is None: + return False + if isinstance(lvalue, MemberExpr): + self.fail("Enum type as attribute is not supported", lvalue) + return False + # Yes, it's a valid Enum definition. Add it to the symbol table. + self.api.add_symbol(name, enum_call, s) + return True + + def check_enum_call( + self, node: Expression, var_name: str, is_func_scope: bool + ) -> TypeInfo | None: + """Check if a call defines an Enum. + + Example: + + A = enum.Enum('A', 'foo bar') + + is equivalent to: + + class A(enum.Enum): + foo = 1 + bar = 2 + """ + if not isinstance(node, CallExpr): + return None + call = node + callee = call.callee + if not isinstance(callee, RefExpr): + return None + fullname = callee.fullname + if fullname not in ENUM_BASES: + return None + + new_class_name, items, values, ok = self.parse_enum_call_args( + call, fullname.split(".")[-1] + ) + if not ok: + # Error. Construct dummy return value. + name = var_name + if is_func_scope: + name += "@" + str(call.line) + info = self.build_enum_call_typeinfo(name, [], fullname, node.line) + else: + if new_class_name != var_name: + msg = f'String argument 1 "{new_class_name}" to {fullname}(...) does not match variable name "{var_name}"' + self.fail(msg, call) + + name = cast(StrExpr, call.args[0]).value + if name != var_name or is_func_scope: + # Give it a unique name derived from the line number. + name += "@" + str(call.line) + info = self.build_enum_call_typeinfo(name, items, fullname, call.line) + # Store generated TypeInfo under both names, see semanal_namedtuple for more details. + if name != var_name or is_func_scope: + self.api.add_symbol_skip_local(name, info) + call.analyzed = EnumCallExpr(info, items, values) + call.analyzed.set_line(call) + info.line = node.line + return info + + def build_enum_call_typeinfo( + self, name: str, items: list[str], fullname: str, line: int + ) -> TypeInfo: + base = self.api.named_type_or_none(fullname) + assert base is not None + info = self.api.basic_new_typeinfo(name, base, line) + info.metaclass_type = info.calculate_metaclass_type() + info.is_enum = True + for item in items: + var = Var(item) + var.info = info + var.is_property = True + # When an enum is created by its functional form `Enum(name, values)` + # - if it is a string it is first split by commas/whitespace + # - if it is an iterable of single items each item is assigned a value starting at `start` + # - if it is an iterable of (name, value) then the given values will be used + # either way, each item should be treated as if it has an explicit value. + var.has_explicit_value = True + var._fullname = f"{info.fullname}.{item}" + info.names[item] = SymbolTableNode(MDEF, var) + return info + + def parse_enum_call_args( + self, call: CallExpr, class_name: str + ) -> tuple[str, list[str], list[Expression | None], bool]: + """Parse arguments of an Enum call. + + Return a tuple of fields, values, was there an error. + """ + args = call.args + if not all(arg_kind in [ARG_POS, ARG_NAMED] for arg_kind in call.arg_kinds): + return self.fail_enum_call_arg(f"Unexpected arguments to {class_name}()", call) + if len(args) < 2: + return self.fail_enum_call_arg(f"Too few arguments for {class_name}()", call) + if len(args) > 6: + return self.fail_enum_call_arg(f"Too many arguments for {class_name}()", call) + valid_name = [None, "value", "names", "module", "qualname", "type", "start"] + for arg_name in call.arg_names: + if arg_name not in valid_name: + self.fail_enum_call_arg(f'Unexpected keyword argument "{arg_name}"', call) + value, names = None, None + for arg_name, arg in zip(call.arg_names, args): + if arg_name == "value": + value = arg + if arg_name == "names": + names = arg + if value is None: + value = args[0] + if names is None: + names = args[1] + if not isinstance(value, StrExpr): + return self.fail_enum_call_arg( + f"{class_name}() expects a string literal as the first argument", call + ) + new_class_name = value.value + + items = [] + values: list[Expression | None] = [] + if isinstance(names, StrExpr): + fields = names.value + for field in fields.replace(",", " ").split(): + items.append(field) + elif isinstance(names, (TupleExpr, ListExpr)): + seq_items = names.items + if is_StrExpr_list(seq_items): + items = [seq_item.value for seq_item in seq_items] + elif all( + isinstance(seq_item, (TupleExpr, ListExpr)) + and len(seq_item.items) == 2 + and isinstance(seq_item.items[0], StrExpr) + for seq_item in seq_items + ): + for seq_item in seq_items: + assert isinstance(seq_item, (TupleExpr, ListExpr)) + name, value = seq_item.items + assert isinstance(name, StrExpr) + items.append(name.value) + values.append(value) + else: + return self.fail_enum_call_arg( + "%s() with tuple or list expects strings or (name, value) pairs" % class_name, + call, + ) + elif isinstance(names, DictExpr): + for key, value in names.items: + if not isinstance(key, StrExpr): + return self.fail_enum_call_arg( + f"{class_name}() with dict literal requires string literals", call + ) + items.append(key.value) + values.append(value) + elif isinstance(args[1], RefExpr) and isinstance(args[1].node, Var): + proper_type = get_proper_type(args[1].node.type) + if ( + proper_type is not None + and isinstance(proper_type, LiteralType) + and isinstance(proper_type.value, str) + ): + fields = proper_type.value + for field in fields.replace(",", " ").split(): + items.append(field) + elif args[1].node.is_final and isinstance(args[1].node.final_value, str): + fields = args[1].node.final_value + for field in fields.replace(",", " ").split(): + items.append(field) + else: + return self.fail_enum_call_arg( + "Second argument of %s() must be string, tuple, list or dict literal for mypy to determine Enum members" + % class_name, + call, + ) + else: + # TODO: Allow dict(x=1, y=2) as a substitute for {'x': 1, 'y': 2}? + return self.fail_enum_call_arg( + "Second argument of %s() must be string, tuple, list or dict literal for mypy to determine Enum members" + % class_name, + call, + ) + if not items: + return self.fail_enum_call_arg(f"{class_name}() needs at least one item", call) + if not values: + values = [None] * len(items) + assert len(items) == len(values) + return new_class_name, items, values, True + + def fail_enum_call_arg( + self, message: str, context: Context + ) -> tuple[str, list[str], list[Expression | None], bool]: + self.fail(message, context) + return "", [], [], False + + # Helpers + + def fail(self, msg: str, ctx: Context) -> None: + self.api.fail(msg, ctx) diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal_infer.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/semanal_infer.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..96ea82f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/semanal_infer.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal_infer.py b/.venv/lib/python3.12/site-packages/mypy/semanal_infer.py new file mode 100644 index 0000000..89a073c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/semanal_infer.py @@ -0,0 +1,131 @@ +"""Simple type inference for decorated functions during semantic analysis.""" + +from __future__ import annotations + +from mypy.nodes import ARG_POS, CallExpr, Decorator, Expression, FuncDef, RefExpr, Var +from mypy.semanal_shared import SemanticAnalyzerInterface +from mypy.typeops import function_type +from mypy.types import ( + AnyType, + CallableType, + ProperType, + Type, + TypeOfAny, + TypeVarType, + get_proper_type, +) +from mypy.typevars import has_no_typevars + + +def infer_decorator_signature_if_simple( + dec: Decorator, analyzer: SemanticAnalyzerInterface +) -> None: + """Try to infer the type of the decorated function. + + This lets us resolve additional references to decorated functions + during type checking. Otherwise the type might not be available + when we need it, since module top levels can't be deferred. + + This basically uses a simple special-purpose type inference + engine just for decorators. + """ + if dec.var.is_property: + # Decorators are expected to have a callable type (it's a little odd). + # TODO: this may result in wrong type if @property is applied to decorated method. + if dec.func.type is None: + dec.var.type = CallableType( + [AnyType(TypeOfAny.special_form)], + [ARG_POS], + [None], + AnyType(TypeOfAny.special_form), + analyzer.named_type("builtins.function"), + name=dec.var.name, + ) + elif isinstance(dec.func.type, CallableType): + dec.var.type = dec.func.type + return + decorator_preserves_type = True + for expr in dec.decorators: + preserve_type = False + if isinstance(expr, RefExpr) and isinstance(expr.node, FuncDef): + if expr.fullname == "typing.no_type_check": + return + if expr.node.type and is_identity_signature(expr.node.type): + preserve_type = True + if not preserve_type: + decorator_preserves_type = False + break + if decorator_preserves_type: + # No non-identity decorators left. We can trivially infer the type + # of the function here. + dec.var.type = function_type(dec.func, analyzer.named_type("builtins.function")) + if dec.decorators: + return_type = calculate_return_type(dec.decorators[0]) + if return_type and isinstance(return_type, AnyType): + # The outermost decorator will return Any so we know the type of the + # decorated function. + dec.var.type = AnyType(TypeOfAny.from_another_any, source_any=return_type) + sig = find_fixed_callable_return(dec.decorators[0]) + if sig: + # The outermost decorator always returns the same kind of function, + # so we know that this is the type of the decorated function. + orig_sig = function_type(dec.func, analyzer.named_type("builtins.function")) + sig.name = orig_sig.items[0].name + dec.var.type = sig + + +def is_identity_signature(sig: Type) -> bool: + """Is type a callable of form T -> T (where T is a type variable)?""" + sig = get_proper_type(sig) + if isinstance(sig, CallableType) and sig.arg_kinds == [ARG_POS]: + if isinstance(sig.arg_types[0], TypeVarType) and isinstance(sig.ret_type, TypeVarType): + return sig.arg_types[0].id == sig.ret_type.id + return False + + +def calculate_return_type(expr: Expression) -> ProperType | None: + """Return the return type if we can calculate it. + + This only uses information available during semantic analysis so this + will sometimes return None because of insufficient information (as + type inference hasn't run yet). + """ + if isinstance(expr, RefExpr): + if isinstance(expr.node, FuncDef): + typ = expr.node.type + if typ is None: + # No signature -> default to Any. + return AnyType(TypeOfAny.unannotated) + # Explicit Any return? + if isinstance(typ, CallableType): + return get_proper_type(typ.ret_type) + return None + elif isinstance(expr.node, Var): + return get_proper_type(expr.node.type) + elif isinstance(expr, CallExpr): + return calculate_return_type(expr.callee) + return None + + +def find_fixed_callable_return(expr: Expression) -> CallableType | None: + """Return the return type, if expression refers to a callable that returns a callable. + + But only do this if the return type has no type variables. Return None otherwise. + This approximates things a lot as this is supposed to be called before type checking + when full type information is not available yet. + """ + if isinstance(expr, RefExpr): + if isinstance(expr.node, FuncDef): + typ = expr.node.type + if typ: + if isinstance(typ, CallableType) and has_no_typevars(typ.ret_type): + ret_type = get_proper_type(typ.ret_type) + if isinstance(ret_type, CallableType): + return ret_type + elif isinstance(expr, CallExpr): + t = find_fixed_callable_return(expr.callee) + if t: + ret_type = get_proper_type(t.ret_type) + if isinstance(ret_type, CallableType): + return ret_type + return None diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal_main.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/semanal_main.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..cc14c27 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/semanal_main.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal_main.py b/.venv/lib/python3.12/site-packages/mypy/semanal_main.py new file mode 100644 index 0000000..b2c43e6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/semanal_main.py @@ -0,0 +1,572 @@ +"""Top-level logic for the semantic analyzer. + +The semantic analyzer binds names, resolves imports, detects various +special constructs that don't have dedicated AST nodes after parse +(such as 'cast' which looks like a call), populates symbol tables, and +performs various simple consistency checks. + +Semantic analysis of each SCC (strongly connected component; import +cycle) is performed in one unit. Each module is analyzed as multiple +separate *targets*; the module top level is one target and each function +is a target. Nested functions are not separate targets, however. This is +mostly identical to targets used by mypy daemon (but classes aren't +targets in semantic analysis). + +We first analyze each module top level in an SCC. If we encounter some +names that we can't bind because the target of the name may not have +been processed yet, we *defer* the current target for further +processing. Deferred targets will be analyzed additional times until +everything can be bound, or we reach a maximum number of iterations. + +We keep track of a set of incomplete namespaces, i.e. namespaces that we +haven't finished populating yet. References to these namespaces cause a +deferral if they can't be satisfied. Initially every module in the SCC +will be incomplete. +""" + +from __future__ import annotations + +from collections.abc import Iterator +from contextlib import nullcontext +from itertools import groupby +from typing import TYPE_CHECKING, Callable, Final, Optional, Union +from typing_extensions import TypeAlias as _TypeAlias + +import mypy.build +import mypy.state +from mypy.checker import FineGrainedDeferredNode +from mypy.errors import Errors +from mypy.nodes import Decorator, FuncDef, MypyFile, OverloadedFuncDef, TypeInfo, Var +from mypy.options import Options +from mypy.plugin import ClassDefContext +from mypy.plugins import dataclasses as dataclasses_plugin +from mypy.semanal import ( + SemanticAnalyzer, + apply_semantic_analyzer_patches, + remove_imported_names_from_symtable, +) +from mypy.semanal_classprop import ( + add_type_promotion, + calculate_class_abstract_status, + calculate_class_vars, + check_protocol_status, +) +from mypy.semanal_infer import infer_decorator_signature_if_simple +from mypy.semanal_shared import find_dataclass_transform_spec +from mypy.semanal_typeargs import TypeArgumentAnalyzer +from mypy.server.aststrip import SavedAttributes +from mypy.util import is_typeshed_file + +if TYPE_CHECKING: + from mypy.build import Graph, State + + +Patches: _TypeAlias = list[tuple[int, Callable[[], None]]] + + +# If we perform this many iterations, raise an exception since we are likely stuck. +MAX_ITERATIONS: Final = 20 + + +# Number of passes over core modules before going on to the rest of the builtin SCC. +CORE_WARMUP: Final = 2 +core_modules: Final = [ + "typing", + "_collections_abc", + "builtins", + "abc", + "collections", + "collections.abc", +] + + +def semantic_analysis_for_scc(graph: Graph, scc: list[str], errors: Errors) -> None: + """Perform semantic analysis for all modules in a SCC (import cycle). + + Assume that reachability analysis has already been performed. + + The scc will be processed roughly in the order the modules are included + in the list. + """ + patches: Patches = [] + # Note that functions can't define new module-level attributes + # using 'global x', since module top levels are fully processed + # before functions. This limitation is unlikely to go away soon. + process_top_levels(graph, scc, patches) + process_functions(graph, scc, patches) + # We use patch callbacks to fix up things when we expect relatively few + # callbacks to be required. + apply_semantic_analyzer_patches(patches) + # Run class decorator hooks (they requite complete MROs and no placeholders). + apply_class_plugin_hooks(graph, scc, errors) + # This pass might need fallbacks calculated above and the results of hooks. + check_type_arguments(graph, scc, errors) + calculate_class_properties(graph, scc, errors) + check_blockers(graph, scc) + # Clean-up builtins, so that TypeVar etc. are not accessible without importing. + if "builtins" in scc: + cleanup_builtin_scc(graph["builtins"]) + + # Report TypeForm profiling stats + if len(scc) >= 1: + # Get manager from any state in the SCC (they all share the same manager) + manager = graph[scc[0]].manager + analyzer = manager.semantic_analyzer + manager.add_stats( + type_expression_parse_count=analyzer.type_expression_parse_count, + type_expression_full_parse_success_count=analyzer.type_expression_full_parse_success_count, + type_expression_full_parse_failure_count=analyzer.type_expression_full_parse_failure_count, + ) + + +def cleanup_builtin_scc(state: State) -> None: + """Remove imported names from builtins namespace. + + This way names imported from typing in builtins.pyi aren't available + by default (without importing them). We can only do this after processing + the whole SCC is finished, when the imported names aren't needed for + processing builtins.pyi itself. + """ + assert state.tree is not None + remove_imported_names_from_symtable(state.tree.names, "builtins") + + +def semantic_analysis_for_targets( + state: State, nodes: list[FineGrainedDeferredNode], graph: Graph, saved_attrs: SavedAttributes +) -> None: + """Semantically analyze only selected nodes in a given module. + + This essentially mirrors the logic of semantic_analysis_for_scc() + except that we process only some targets. This is used in fine grained + incremental mode, when propagating an update. + + The saved_attrs are implicitly declared instance attributes (attributes + defined on self) removed by AST stripper that may need to be reintroduced + here. They must be added before any methods are analyzed. + """ + patches: Patches = [] + if any(isinstance(n.node, MypyFile) for n in nodes): + # Process module top level first (if needed). + process_top_levels(graph, [state.id], patches) + restore_saved_attrs(saved_attrs) + analyzer = state.manager.semantic_analyzer + for n in nodes: + if isinstance(n.node, MypyFile): + # Already done above. + continue + process_top_level_function( + analyzer, state, state.id, n.node.fullname, n.node, n.active_typeinfo, patches + ) + apply_semantic_analyzer_patches(patches) + apply_class_plugin_hooks(graph, [state.id], state.manager.errors) + check_type_arguments_in_targets(nodes, state, state.manager.errors) + calculate_class_properties(graph, [state.id], state.manager.errors) + + +def restore_saved_attrs(saved_attrs: SavedAttributes) -> None: + """Restore instance variables removed during AST strip that haven't been added yet.""" + for (cdef, name), sym in saved_attrs.items(): + info = cdef.info + existing = info.get(name) + defined_in_this_class = name in info.names + assert isinstance(sym.node, Var) + # This needs to mimic the logic in SemanticAnalyzer.analyze_member_lvalue() + # regarding the existing variable in class body or in a superclass: + # If the attribute of self is not defined in superclasses, create a new Var. + if ( + existing is None + or + # (An abstract Var is considered as not defined.) + (isinstance(existing.node, Var) and existing.node.is_abstract_var) + or + # Also an explicit declaration on self creates a new Var unless + # there is already one defined in the class body. + sym.node.explicit_self_type + and not defined_in_this_class + ): + info.names[name] = sym + + +def process_top_levels(graph: Graph, scc: list[str], patches: Patches) -> None: + # Process top levels until everything has been bound. + + # Reverse order of the scc so the first modules in the original list will be + # be processed first. This helps with performance. + scc = list(reversed(scc)) # noqa: FURB187 intentional copy + + # Initialize ASTs and symbol tables. + for id in scc: + state = graph[id] + assert state.tree is not None + state.manager.semantic_analyzer.prepare_file(state.tree) + + # Initially all namespaces in the SCC are incomplete (well they are empty). + state.manager.incomplete_namespaces.update(scc) + + worklist = scc.copy() + # HACK: process core stuff first. This is mostly needed to support defining + # named tuples in builtin SCC. + if all(m in worklist for m in core_modules): + worklist += list(reversed(core_modules)) * CORE_WARMUP + final_iteration = False + iteration = 0 + analyzer = state.manager.semantic_analyzer + analyzer.deferral_debug_context.clear() + + while worklist: + iteration += 1 + if iteration > MAX_ITERATIONS: + # Just pick some module inside the current SCC for error context. + assert state.tree is not None + with analyzer.file_context(state.tree, state.options): + analyzer.report_hang() + break + if final_iteration: + # Give up. It's impossible to bind all names. + state.manager.incomplete_namespaces.clear() + all_deferred: list[str] = [] + any_progress = False + while worklist: + next_id = worklist.pop() + state = graph[next_id] + assert state.tree is not None + deferred, incomplete, progress = semantic_analyze_target( + next_id, next_id, state, state.tree, None, final_iteration, patches + ) + all_deferred += deferred + any_progress = any_progress or progress + if not incomplete: + state.manager.incomplete_namespaces.discard(next_id) + if final_iteration: + assert not all_deferred, "Must not defer during final iteration" + # Reverse to process the targets in the same order on every iteration. This avoids + # processing the same target twice in a row, which is inefficient. + worklist = list(reversed(all_deferred)) + final_iteration = not any_progress + + +def order_by_subclassing(targets: list[FullTargetInfo]) -> Iterator[FullTargetInfo]: + """Make sure that superclass methods are always processed before subclass methods. + + This algorithm is not very optimal, but it is simple and should work well for lists + that are already almost correctly ordered. + """ + + # First, group the targets by their TypeInfo (since targets are sorted by line, + # we know that each TypeInfo will appear as group key only once). + grouped = [(k, list(g)) for k, g in groupby(targets, key=lambda x: x[3])] + remaining_infos = {info for info, _ in grouped if info is not None} + + next_group = 0 + while grouped: + if next_group >= len(grouped): + # This should never happen, if there is an MRO cycle, it should be reported + # and fixed during top-level processing. + raise ValueError("Cannot order method targets by MRO") + next_info, group = grouped[next_group] + if next_info is None: + # Trivial case, not methods but functions, process them straight away. + yield from group + grouped.pop(next_group) + continue + if any(parent in remaining_infos for parent in next_info.mro[1:]): + # We cannot process this method group yet, try a next one. + next_group += 1 + continue + yield from group + grouped.pop(next_group) + remaining_infos.discard(next_info) + # Each time after processing a method group we should retry from start, + # since there may be some groups that are not blocked on parents anymore. + next_group = 0 + + +def process_functions(graph: Graph, scc: list[str], patches: Patches) -> None: + # Process functions. + all_targets = [] + for module in scc: + tree = graph[module].tree + assert tree is not None + # In principle, functions can be processed in arbitrary order, + # but _methods_ must be processed in the order they are defined, + # because some features (most notably partial types) depend on + # order of definitions on self. + # + # There can be multiple generated methods per line. Use target + # name as the second sort key to get a repeatable sort order. + targets = sorted(get_all_leaf_targets(tree), key=lambda x: (x[1].line, x[0])) + all_targets.extend( + [(module, target, node, active_type) for target, node, active_type in targets] + ) + + for module, target, node, active_type in order_by_subclassing(all_targets): + analyzer = graph[module].manager.semantic_analyzer + assert isinstance(node, (FuncDef, OverloadedFuncDef, Decorator)), node + process_top_level_function( + analyzer, graph[module], module, target, node, active_type, patches + ) + + +def process_top_level_function( + analyzer: SemanticAnalyzer, + state: State, + module: str, + target: str, + node: FuncDef | OverloadedFuncDef | Decorator, + active_type: TypeInfo | None, + patches: Patches, +) -> None: + """Analyze single top-level function or method. + + Process the body of the function (including nested functions) again and again, + until all names have been resolved (or iteration limit reached). + """ + # We need one more iteration after incomplete is False (e.g. to report errors, if any). + final_iteration = False + incomplete = True + # Start in the incomplete state (no missing names will be reported on first pass). + # Note that we use module name, since functions don't create qualified names. + deferred = [module] + analyzer.deferral_debug_context.clear() + analyzer.incomplete_namespaces.add(module) + iteration = 0 + while deferred: + iteration += 1 + if iteration == MAX_ITERATIONS: + # Just pick some module inside the current SCC for error context. + assert state.tree is not None + with analyzer.file_context(state.tree, state.options): + analyzer.report_hang() + break + if not (deferred or incomplete) or final_iteration: + # OK, this is one last pass, now missing names will be reported. + analyzer.incomplete_namespaces.discard(module) + deferred, incomplete, progress = semantic_analyze_target( + target, module, state, node, active_type, final_iteration, patches + ) + if not incomplete: + state.manager.incomplete_namespaces.discard(module) + if final_iteration: + assert not deferred, "Must not defer during final iteration" + if not progress: + final_iteration = True + + analyzer.incomplete_namespaces.discard(module) + # After semantic analysis is done, discard local namespaces + # to avoid memory hoarding. + analyzer.saved_locals.clear() + + +TargetInfo: _TypeAlias = tuple[ + str, Union[MypyFile, FuncDef, OverloadedFuncDef, Decorator], Optional[TypeInfo] +] + +# Same as above but includes module as first item. +FullTargetInfo: _TypeAlias = tuple[ + str, str, Union[MypyFile, FuncDef, OverloadedFuncDef, Decorator], Optional[TypeInfo] +] + + +def get_all_leaf_targets(file: MypyFile) -> list[TargetInfo]: + """Return all leaf targets in a symbol table (module-level and methods).""" + result: list[TargetInfo] = [] + for fullname, node, active_type in file.local_definitions(): + if isinstance(node.node, (FuncDef, OverloadedFuncDef, Decorator)): + result.append((fullname, node.node, active_type)) + return result + + +def semantic_analyze_target( + target: str, + module: str, + state: State, + node: MypyFile | FuncDef | OverloadedFuncDef | Decorator, + active_type: TypeInfo | None, + final_iteration: bool, + patches: Patches, +) -> tuple[list[str], bool, bool]: + """Semantically analyze a single target. + + Return tuple with these items: + - list of deferred targets + - was some definition incomplete (need to run another pass) + - were any new names defined (or placeholders replaced) + """ + state.manager.processed_targets.append((module, target)) + tree = state.tree + assert tree is not None + analyzer = state.manager.semantic_analyzer + # TODO: Move initialization to somewhere else + analyzer.global_decls = [set()] + analyzer.nonlocal_decls = [set()] + analyzer.globals = tree.names + analyzer.imports = set() + analyzer.progress = False + with state.wrap_context(check_blockers=False): + refresh_node = node + if isinstance(refresh_node, Decorator): + # Decorator expressions will be processed as part of the module top level. + refresh_node = refresh_node.func + analyzer.refresh_partial( + refresh_node, + patches, + final_iteration, + file_node=tree, + options=state.options, + active_type=active_type, + ) + if isinstance(node, Decorator): + infer_decorator_signature_if_simple(node, analyzer) + for dep in analyzer.imports: + state.add_dependency(dep) + priority = mypy.build.PRI_LOW + if priority <= state.priorities.get(dep, priority): + state.priorities[dep] = priority + + # Clear out some stale data to avoid memory leaks and astmerge + # validity check confusion + analyzer.statement = None + del analyzer.cur_mod_node + + if analyzer.deferred: + return [target], analyzer.incomplete, analyzer.progress + else: + return [], analyzer.incomplete, analyzer.progress + + +def check_type_arguments(graph: Graph, scc: list[str], errors: Errors) -> None: + for module in scc: + state = graph[module] + assert state.tree + analyzer = TypeArgumentAnalyzer( + errors, + state.options, + state.tree.is_typeshed_file(state.options), + state.manager.semantic_analyzer.named_type, + ) + with state.wrap_context(): + with mypy.state.state.strict_optional_set(state.options.strict_optional): + state.tree.accept(analyzer) + + +def check_type_arguments_in_targets( + targets: list[FineGrainedDeferredNode], state: State, errors: Errors +) -> None: + """Check type arguments against type variable bounds and restrictions. + + This mirrors the logic in check_type_arguments() except that we process only + some targets. This is used in fine grained incremental mode. + """ + analyzer = TypeArgumentAnalyzer( + errors, + state.options, + is_typeshed_file(state.options.abs_custom_typeshed_dir, state.path or ""), + state.manager.semantic_analyzer.named_type, + ) + with state.wrap_context(): + with mypy.state.state.strict_optional_set(state.options.strict_optional): + for target in targets: + func: FuncDef | OverloadedFuncDef | None = None + if isinstance(target.node, (FuncDef, OverloadedFuncDef)): + func = target.node + saved = (state.id, target.active_typeinfo, func) # module, class, function + with errors.scope.saved_scope(saved) if errors.scope else nullcontext(): + analyzer.recurse_into_functions = func is not None + target.node.accept(analyzer) + + +def apply_class_plugin_hooks(graph: Graph, scc: list[str], errors: Errors) -> None: + """Apply class plugin hooks within a SCC. + + We run these after to the main semantic analysis so that the hooks + don't need to deal with incomplete definitions such as placeholder + types. + + Note that some hooks incorrectly run during the main semantic + analysis pass, for historical reasons. + """ + num_passes = 0 + incomplete = True + # If we encounter a base class that has not been processed, we'll run another + # pass. This should eventually reach a fixed point. + while incomplete: + assert num_passes < 10, "Internal error: too many class plugin hook passes" + num_passes += 1 + incomplete = False + for module in scc: + state = graph[module] + tree = state.tree + assert tree + for _, node, _ in tree.local_definitions(): + if isinstance(node.node, TypeInfo): + if not apply_hooks_to_class( + state.manager.semantic_analyzer, + module, + node.node, + state.options, + tree, + errors, + ): + incomplete = True + + +def apply_hooks_to_class( + self: SemanticAnalyzer, + module: str, + info: TypeInfo, + options: Options, + file_node: MypyFile, + errors: Errors, +) -> bool: + # TODO: Move more class-related hooks here? + defn = info.defn + ok = True + for decorator in defn.decorators: + with self.file_context(file_node, options, info): + hook = None + + decorator_name = self.get_fullname_for_hook(decorator) + if decorator_name: + hook = self.plugin.get_class_decorator_hook_2(decorator_name) + # Special case: if the decorator is itself decorated with + # typing.dataclass_transform, apply the hook for the dataclasses plugin + # TODO: remove special casing here + if hook is None and find_dataclass_transform_spec(decorator): + hook = dataclasses_plugin.dataclass_class_maker_callback + + if hook: + ok = ok and hook(ClassDefContext(defn, decorator, self)) + + # Check if the class definition itself triggers a dataclass transform (via a parent class/ + # metaclass) + spec = find_dataclass_transform_spec(info) + if spec is not None: + with self.file_context(file_node, options, info): + # We can't use the normal hook because reason = defn, and ClassDefContext only accepts + # an Expression for reason + ok = ok and dataclasses_plugin.DataclassTransformer(defn, defn, spec, self).transform() + + return ok + + +def calculate_class_properties(graph: Graph, scc: list[str], errors: Errors) -> None: + builtins = graph["builtins"].tree + assert builtins + for module in scc: + state = graph[module] + tree = state.tree + assert tree + for _, node, _ in tree.local_definitions(): + if isinstance(node.node, TypeInfo): + with state.manager.semantic_analyzer.file_context(tree, state.options, node.node): + calculate_class_abstract_status(node.node, tree.is_stub, errors) + check_protocol_status(node.node, errors) + calculate_class_vars(node.node) + add_type_promotion( + node.node, tree.names, graph[module].options, builtins.names + ) + + +def check_blockers(graph: Graph, scc: list[str]) -> None: + for module in scc: + graph[module].check_blockers() diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal_namedtuple.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/semanal_namedtuple.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..80a74a4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/semanal_namedtuple.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal_namedtuple.py b/.venv/lib/python3.12/site-packages/mypy/semanal_namedtuple.py new file mode 100644 index 0000000..f27c89e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/semanal_namedtuple.py @@ -0,0 +1,726 @@ +"""Semantic analysis of named tuple definitions. + +This is conceptually part of mypy.semanal. +""" + +from __future__ import annotations + +import keyword +from collections.abc import Container, Iterator, Mapping +from contextlib import contextmanager +from typing import Final, cast + +from mypy.errorcodes import ARG_TYPE, ErrorCode +from mypy.exprtotype import TypeTranslationError, expr_to_unanalyzed_type +from mypy.messages import MessageBuilder +from mypy.nodes import ( + ARG_NAMED_OPT, + ARG_OPT, + ARG_POS, + MDEF, + Argument, + AssignmentStmt, + Block, + CallExpr, + ClassDef, + Context, + Decorator, + EllipsisExpr, + Expression, + ExpressionStmt, + FuncBase, + FuncDef, + ListExpr, + NamedTupleExpr, + NameExpr, + PassStmt, + PlaceholderNode, + RefExpr, + Statement, + StrExpr, + SymbolTable, + SymbolTableNode, + TempNode, + TupleExpr, + TypeInfo, + TypeVarExpr, + Var, + is_StrExpr_list, +) +from mypy.options import Options +from mypy.semanal_shared import ( + PRIORITY_FALLBACKS, + SemanticAnalyzerInterface, + calculate_tuple_fallback, + has_placeholder, + set_callable_name, +) +from mypy.types import ( + TYPED_NAMEDTUPLE_NAMES, + AnyType, + CallableType, + LiteralType, + TupleType, + Type, + TypeOfAny, + TypeType, + TypeVarId, + TypeVarLikeType, + TypeVarType, + UnboundType, + has_type_vars, +) +from mypy.util import get_unique_redefinition_name + +# Matches "_prohibited" in typing.py, but adds __annotations__, which works at runtime but can't +# easily be supported in a static checker. +NAMEDTUPLE_PROHIBITED_NAMES: Final = ( + "__new__", + "__init__", + "__slots__", + "__getnewargs__", + "_fields", + "_field_defaults", + "_field_types", + "_make", + "_replace", + "_asdict", + "_source", + "__annotations__", +) + +NAMEDTUP_CLASS_ERROR: Final = ( + 'Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]"' +) + +SELF_TVAR_NAME: Final = "_NT" + + +class NamedTupleAnalyzer: + def __init__( + self, options: Options, api: SemanticAnalyzerInterface, msg: MessageBuilder + ) -> None: + self.options = options + self.api = api + self.msg = msg + + def analyze_namedtuple_classdef( + self, defn: ClassDef, is_stub_file: bool, is_func_scope: bool + ) -> tuple[bool, TypeInfo | None]: + """Analyze if given class definition can be a named tuple definition. + + Return a tuple where first item indicates whether this can possibly be a named tuple, + and the second item is the corresponding TypeInfo (may be None if not ready and should be + deferred). + """ + for base_expr in defn.base_type_exprs: + if isinstance(base_expr, RefExpr): + self.api.accept(base_expr) + if base_expr.fullname in TYPED_NAMEDTUPLE_NAMES: + result = self.check_namedtuple_classdef(defn, is_stub_file) + if result is None: + # This is a valid named tuple, but some types are incomplete. + return True, None + items, types, default_items, statements = result + if is_func_scope and "@" not in defn.name: + defn.name += "@" + str(defn.line) + existing_info = None + if isinstance(defn.analyzed, NamedTupleExpr): + existing_info = defn.analyzed.info + info = self.build_namedtuple_typeinfo( + defn.name, items, types, default_items, defn.line, existing_info + ) + defn.analyzed = NamedTupleExpr(info, is_typed=True) + defn.analyzed.line = defn.line + defn.analyzed.column = defn.column + defn.defs.body = statements + # All done: this is a valid named tuple with all types known. + return True, info + # This can't be a valid named tuple. + return False, None + + def check_namedtuple_classdef( + self, defn: ClassDef, is_stub_file: bool + ) -> tuple[list[str], list[Type], dict[str, Expression], list[Statement]] | None: + """Parse and validate fields in named tuple class definition. + + Return a four tuple: + * field names + * field types + * field default values + * valid statements + or None, if any of the types are not ready. + """ + if len(defn.base_type_exprs) > 1: + self.fail("NamedTuple should be a single base", defn) + items: list[str] = [] + types: list[Type] = [] + default_items: dict[str, Expression] = {} + statements: list[Statement] = [] + for stmt in defn.defs.body: + statements.append(stmt) + if not isinstance(stmt, AssignmentStmt): + # Still allow pass or ... (for empty namedtuples). + if isinstance(stmt, PassStmt) or ( + isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, EllipsisExpr) + ): + continue + # Also allow methods, including decorated ones. + if isinstance(stmt, (Decorator, FuncBase)): + continue + # And docstrings. + if isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, StrExpr): + continue + statements.pop() + defn.removed_statements.append(stmt) + self.fail(NAMEDTUP_CLASS_ERROR, stmt) + elif len(stmt.lvalues) > 1 or not isinstance(stmt.lvalues[0], NameExpr): + # An assignment, but an invalid one. + statements.pop() + defn.removed_statements.append(stmt) + self.fail(NAMEDTUP_CLASS_ERROR, stmt) + else: + # Append name and type in this case... + name = stmt.lvalues[0].name + items.append(name) + if stmt.type is None: + types.append(AnyType(TypeOfAny.unannotated)) + else: + # We never allow recursive types at function scope. Although it is + # possible to support this for named tuples, it is still tricky, and + # it would be inconsistent with type aliases. + analyzed = self.api.anal_type( + stmt.type, + allow_placeholder=not self.api.is_func_scope(), + prohibit_self_type="NamedTuple item type", + prohibit_special_class_field_types="NamedTuple", + ) + if analyzed is None: + # Something is incomplete. We need to defer this named tuple. + return None + types.append(analyzed) + # ...despite possible minor failures that allow further analysis. + if name.startswith("_"): + self.fail( + f"NamedTuple field name cannot start with an underscore: {name}", stmt + ) + if stmt.type is None or hasattr(stmt, "new_syntax") and not stmt.new_syntax: + self.fail(NAMEDTUP_CLASS_ERROR, stmt) + elif isinstance(stmt.rvalue, TempNode): + # x: int assigns rvalue to TempNode(AnyType()) + if default_items: + self.fail( + "Non-default NamedTuple fields cannot follow default fields", stmt + ) + else: + default_items[name] = stmt.rvalue + if defn.keywords: + for_function = ' for "__init_subclass__" of "NamedTuple"' + for key in defn.keywords: + self.msg.unexpected_keyword_argument_for_function(for_function, key, defn) + return items, types, default_items, statements + + def check_namedtuple( + self, node: Expression, var_name: str | None, is_func_scope: bool + ) -> tuple[str | None, TypeInfo | None, list[TypeVarLikeType]]: + """Check if a call defines a namedtuple. + + The optional var_name argument is the name of the variable to + which this is assigned, if any. + + Return a tuple of two items: + * Internal name of the named tuple (e.g. the name passed as an argument to namedtuple) + or None if it is not a valid named tuple + * Corresponding TypeInfo, or None if not ready. + + If the definition is invalid but looks like a namedtuple, + report errors but return (some) TypeInfo. + """ + if not isinstance(node, CallExpr): + return None, None, [] + call = node + callee = call.callee + if not isinstance(callee, RefExpr): + return None, None, [] + fullname = callee.fullname + if fullname == "collections.namedtuple": + is_typed = False + elif fullname in TYPED_NAMEDTUPLE_NAMES: + is_typed = True + else: + return None, None, [] + result = self.parse_namedtuple_args(call, fullname) + if result: + items, types, defaults, typename, tvar_defs, ok = result + else: + # Error. Construct dummy return value. + if var_name: + name = var_name + if is_func_scope: + name += "@" + str(call.line) + else: + name = var_name = "namedtuple@" + str(call.line) + info = self.build_namedtuple_typeinfo(name, [], [], {}, node.line, None) + self.store_namedtuple_info(info, var_name, call, is_typed) + if name != var_name or is_func_scope: + # NOTE: we skip local namespaces since they are not serialized. + self.api.add_symbol_skip_local(name, info) + return var_name, info, [] + if not ok: + # This is a valid named tuple but some types are not ready. + return typename, None, [] + + # We use the variable name as the class name if it exists. If + # it doesn't, we use the name passed as an argument. We prefer + # the variable name because it should be unique inside a + # module, and so we don't need to disambiguate it with a line + # number. + if var_name: + name = var_name + else: + name = typename + + if var_name is None or is_func_scope: + # There are two special cases where need to give it a unique name derived + # from the line number: + # * This is a base class expression, since it often matches the class name: + # class NT(NamedTuple('NT', [...])): + # ... + # * This is a local (function or method level) named tuple, since + # two methods of a class can define a named tuple with the same name, + # and they will be stored in the same namespace (see below). + name += "@" + str(call.line) + if defaults: + default_items = { + arg_name: default for arg_name, default in zip(items[-len(defaults) :], defaults) + } + else: + default_items = {} + + existing_info = None + if isinstance(node.analyzed, NamedTupleExpr): + existing_info = node.analyzed.info + info = self.build_namedtuple_typeinfo( + name, items, types, default_items, node.line, existing_info + ) + + # If var_name is not None (i.e. this is not a base class expression), we always + # store the generated TypeInfo under var_name in the current scope, so that + # other definitions can use it. + if var_name: + self.store_namedtuple_info(info, var_name, call, is_typed) + else: + call.analyzed = NamedTupleExpr(info, is_typed=is_typed) + call.analyzed.set_line(call) + # There are three cases where we need to store the generated TypeInfo + # second time (for the purpose of serialization): + # * If there is a name mismatch like One = NamedTuple('Other', [...]) + # we also store the info under name 'Other@lineno', this is needed + # because classes are (de)serialized using their actual fullname, not + # the name of l.h.s. + # * If this is a method level named tuple. It can leak from the method + # via assignment to self attribute and therefore needs to be serialized + # (local namespaces are not serialized). + # * If it is a base class expression. It was not stored above, since + # there is no var_name (but it still needs to be serialized + # since it is in MRO of some class). + if name != var_name or is_func_scope: + # NOTE: we skip local namespaces since they are not serialized. + self.api.add_symbol_skip_local(name, info) + return typename, info, tvar_defs + + def store_namedtuple_info( + self, info: TypeInfo, name: str, call: CallExpr, is_typed: bool + ) -> None: + self.api.add_symbol(name, info, call) + call.analyzed = NamedTupleExpr(info, is_typed=is_typed) + call.analyzed.set_line(call) + + def parse_namedtuple_args( + self, call: CallExpr, fullname: str + ) -> None | (tuple[list[str], list[Type], list[Expression], str, list[TypeVarLikeType], bool]): + """Parse a namedtuple() call into data needed to construct a type. + + Returns a 6-tuple: + - List of argument names + - List of argument types + - List of default values + - First argument of namedtuple + - All typevars found in the field definition + - Whether all types are ready. + + Return None if the definition didn't typecheck. + """ + type_name = "NamedTuple" if fullname in TYPED_NAMEDTUPLE_NAMES else "namedtuple" + # TODO: Share code with check_argument_count in checkexpr.py? + args = call.args + if len(args) < 2: + self.fail(f'Too few arguments for "{type_name}()"', call) + return None + defaults: list[Expression] = [] + rename = False + if len(args) > 2: + # Typed namedtuple doesn't support additional arguments. + if fullname in TYPED_NAMEDTUPLE_NAMES: + self.fail('Too many arguments for "NamedTuple()"', call) + return None + for i, arg_name in enumerate(call.arg_names[2:], 2): + if arg_name == "defaults": + arg = args[i] + # We don't care what the values are, as long as the argument is an iterable + # and we can count how many defaults there are. + if isinstance(arg, (ListExpr, TupleExpr)): + defaults = list(arg.items) + else: + self.fail( + "List or tuple literal expected as the defaults argument to " + "{}()".format(type_name), + arg, + ) + elif arg_name == "rename": + arg = args[i] + if isinstance(arg, NameExpr) and arg.name in ("True", "False"): + rename = arg.name == "True" + else: + self.fail( + f'Boolean literal expected as the "rename" argument to {type_name}()', + arg, + code=ARG_TYPE, + ) + if call.arg_kinds[:2] != [ARG_POS, ARG_POS]: + self.fail(f'Unexpected arguments to "{type_name}()"', call) + return None + if not isinstance(args[0], StrExpr): + self.fail(f'"{type_name}()" expects a string literal as the first argument', call) + return None + typename = args[0].value + types: list[Type] = [] + tvar_defs = [] + if not isinstance(args[1], (ListExpr, TupleExpr)): + if fullname == "collections.namedtuple" and isinstance(args[1], StrExpr): + str_expr = args[1] + items = str_expr.value.replace(",", " ").split() + else: + self.fail( + 'List or tuple literal expected as the second argument to "{}()"'.format( + type_name + ), + call, + ) + return None + else: + listexpr = args[1] + if fullname == "collections.namedtuple": + # The fields argument contains just names, with implicit Any types. + if not is_StrExpr_list(listexpr.items): + self.fail('String literal expected as "namedtuple()" item', call) + return None + items = [item.value for item in listexpr.items] + else: + type_exprs = [ + t.items[1] + for t in listexpr.items + if isinstance(t, TupleExpr) and len(t.items) == 2 + ] + tvar_defs = self.api.get_and_bind_all_tvars(type_exprs) + # The fields argument contains (name, type) tuples. + result = self.parse_namedtuple_fields_with_types(listexpr.items, call) + if result is None: + # One of the types is not ready, defer. + return None + items, types, _, ok = result + if not ok: + return [], [], [], typename, [], False + if not types: + types = [AnyType(TypeOfAny.unannotated) for _ in items] + processed_items = [] + seen_names: set[str] = set() + for i, item in enumerate(items): + problem = self.check_namedtuple_field_name(item, seen_names) + if problem is None: + processed_items.append(item) + seen_names.add(item) + else: + if not rename: + self.fail(f'"{type_name}()" {problem}', call) + # Even if rename=False, we pretend that it is True. + # At runtime namedtuple creation would throw an error; + # applying the rename logic means we create a more sensible + # namedtuple. + new_name = f"_{i}" + processed_items.append(new_name) + seen_names.add(new_name) + + if len(defaults) > len(items): + self.fail(f'Too many defaults given in call to "{type_name}()"', call) + defaults = defaults[: len(items)] + return processed_items, types, defaults, typename, tvar_defs, True + + def parse_namedtuple_fields_with_types( + self, nodes: list[Expression], context: Context + ) -> tuple[list[str], list[Type], list[Expression], bool] | None: + """Parse typed named tuple fields. + + Return (names, types, defaults, whether types are all ready), or None if error occurred. + """ + items: list[str] = [] + types: list[Type] = [] + for item in nodes: + if isinstance(item, TupleExpr): + if len(item.items) != 2: + self.fail('Invalid "NamedTuple()" field definition', item) + return None + name, type_node = item.items + if isinstance(name, StrExpr): + items.append(name.value) + else: + self.fail('Invalid "NamedTuple()" field name', item) + return None + try: + type = expr_to_unanalyzed_type(type_node, self.options, self.api.is_stub_file) + except TypeTranslationError: + self.fail("Invalid field type", type_node) + return None + # We never allow recursive types at function scope. + analyzed = self.api.anal_type( + type, + allow_placeholder=not self.api.is_func_scope(), + prohibit_self_type="NamedTuple item type", + prohibit_special_class_field_types="NamedTuple", + ) + # Workaround #4987 and avoid introducing a bogus UnboundType + if isinstance(analyzed, UnboundType): + analyzed = AnyType(TypeOfAny.from_error) + # These should be all known, otherwise we would defer in visit_assignment_stmt(). + if analyzed is None: + return [], [], [], False + types.append(analyzed) + else: + self.fail('Tuple expected as "NamedTuple()" field', item) + return None + return items, types, [], True + + def build_namedtuple_typeinfo( + self, + name: str, + items: list[str], + types: list[Type], + default_items: Mapping[str, Expression], + line: int, + existing_info: TypeInfo | None, + ) -> TypeInfo: + strtype = self.api.named_type("builtins.str") + implicit_any = AnyType(TypeOfAny.special_form) + basetuple_type = self.api.named_type("builtins.tuple", [implicit_any]) + dictype = self.api.named_type("builtins.dict", [strtype, implicit_any]) + # Actual signature should return OrderedDict[str, Union[types]] + ordereddictype = self.api.named_type("builtins.dict", [strtype, implicit_any]) + fallback = self.api.named_type("builtins.tuple", [implicit_any]) + # Note: actual signature should accept an invariant version of Iterable[UnionType[types]]. + # but it can't be expressed. 'new' and 'len' should be callable types. + iterable_type = self.api.named_type_or_none("typing.Iterable", [implicit_any]) + function_type = self.api.named_type("builtins.function") + + literals: list[Type] = [LiteralType(item, strtype) for item in items] + match_args_type = TupleType(literals, basetuple_type) + + info = existing_info or self.api.basic_new_typeinfo(name, fallback, line) + info.is_named_tuple = True + tuple_base = TupleType(types, fallback) + if info.special_alias and has_placeholder(info.special_alias.target): + self.api.process_placeholder( + None, "NamedTuple item", info, force_progress=tuple_base != info.tuple_type + ) + info.update_tuple_type(tuple_base) + info.line = line + # For use by mypyc. + info.metadata["namedtuple"] = {"fields": items.copy()} + + # We can't calculate the complete fallback type until after semantic + # analysis, since otherwise base classes might be incomplete. Postpone a + # callback function that patches the fallback. + if not has_placeholder(tuple_base) and not has_type_vars(tuple_base): + self.api.schedule_patch( + PRIORITY_FALLBACKS, lambda: calculate_tuple_fallback(tuple_base) + ) + + def add_field( + var: Var, is_initialized_in_class: bool = False, is_property: bool = False + ) -> None: + var.info = info + var.is_initialized_in_class = is_initialized_in_class + var.is_property = is_property + var._fullname = f"{info.fullname}.{var.name}" + info.names[var.name] = SymbolTableNode(MDEF, var) + + fields = [Var(item, typ) for item, typ in zip(items, types)] + for var in fields: + add_field(var, is_property=True) + # We can't share Vars between fields and method arguments, since they + # have different full names (the latter are normally used as local variables + # in functions, so their full names are set to short names when generated methods + # are analyzed). + vars = [Var(item, typ) for item, typ in zip(items, types)] + + tuple_of_strings = TupleType([strtype for _ in items], basetuple_type) + add_field(Var("_fields", tuple_of_strings), is_initialized_in_class=True) + add_field(Var("_field_types", dictype), is_initialized_in_class=True) + add_field(Var("_field_defaults", dictype), is_initialized_in_class=True) + add_field(Var("_source", strtype), is_initialized_in_class=True) + add_field(Var("__annotations__", ordereddictype), is_initialized_in_class=True) + add_field(Var("__doc__", strtype), is_initialized_in_class=True) + if self.options.python_version >= (3, 10): + add_field(Var("__match_args__", match_args_type), is_initialized_in_class=True) + + assert info.tuple_type is not None # Set by update_tuple_type() above. + shared_self_type = TypeVarType( + name=SELF_TVAR_NAME, + fullname=f"{info.fullname}.{SELF_TVAR_NAME}", + # Namespace is patched per-method below. + id=self.api.tvar_scope.new_unique_func_id(), + values=[], + upper_bound=info.tuple_type, + default=AnyType(TypeOfAny.from_omitted_generics), + ) + + def add_method( + funcname: str, + ret: Type | None, # None means use (patched) self-type + args: list[Argument], + is_classmethod: bool = False, + is_new: bool = False, + ) -> None: + fullname = f"{info.fullname}.{funcname}" + self_type = shared_self_type.copy_modified( + id=TypeVarId(shared_self_type.id.raw_id, namespace=fullname) + ) + if ret is None: + ret = self_type + if is_classmethod or is_new: + first = [Argument(Var("_cls"), TypeType.make_normalized(self_type), None, ARG_POS)] + else: + first = [Argument(Var("_self"), self_type, None, ARG_POS)] + args = first + args + + types = [arg.type_annotation for arg in args] + items = [arg.variable.name for arg in args] + arg_kinds = [arg.kind for arg in args] + assert None not in types + signature = CallableType(cast(list[Type], types), arg_kinds, items, ret, function_type) + signature.variables = (self_type,) + func = FuncDef(funcname, args, Block([])) + func.info = info + func.is_class = is_classmethod + func.type = set_callable_name(signature, func) + func._fullname = fullname + func.line = line + if is_classmethod: + v = Var(funcname, func.type) + v.is_classmethod = True + v.info = info + v._fullname = func._fullname + func.is_decorated = True + dec = Decorator(func, [NameExpr("classmethod")], v) + dec.line = line + sym = SymbolTableNode(MDEF, dec) + else: + sym = SymbolTableNode(MDEF, func) + sym.plugin_generated = True + info.names[funcname] = sym + + add_method( + "_replace", + ret=None, + args=[Argument(var, var.type, EllipsisExpr(), ARG_NAMED_OPT) for var in vars], + ) + if self.options.python_version >= (3, 13): + add_method( + "__replace__", + ret=None, + args=[Argument(var, var.type, EllipsisExpr(), ARG_NAMED_OPT) for var in vars], + ) + + def make_init_arg(var: Var) -> Argument: + default = default_items.get(var.name, None) + kind = ARG_POS if default is None else ARG_OPT + return Argument(var, var.type, default, kind) + + add_method("__new__", ret=None, args=[make_init_arg(var) for var in vars], is_new=True) + add_method("_asdict", args=[], ret=ordereddictype) + add_method( + "_make", + ret=None, + is_classmethod=True, + args=[Argument(Var("iterable", iterable_type), iterable_type, None, ARG_POS)], + ) + + self_tvar_expr = TypeVarExpr( + SELF_TVAR_NAME, + info.fullname + "." + SELF_TVAR_NAME, + [], + info.tuple_type, + AnyType(TypeOfAny.from_omitted_generics), + ) + info.names[SELF_TVAR_NAME] = SymbolTableNode(MDEF, self_tvar_expr) + return info + + @contextmanager + def save_namedtuple_body(self, named_tuple_info: TypeInfo) -> Iterator[None]: + """Preserve the generated body of class-based named tuple and then restore it. + + Temporarily clear the names dict so we don't get errors about duplicate names + that were already set in build_namedtuple_typeinfo (we already added the tuple + field names while generating the TypeInfo, and actual duplicates are + already reported). + """ + nt_names = named_tuple_info.names + named_tuple_info.names = SymbolTable() + + yield + + # Make sure we didn't use illegal names, then reset the names in the typeinfo. + for prohibited in NAMEDTUPLE_PROHIBITED_NAMES: + if prohibited in named_tuple_info.names: + if nt_names.get(prohibited) is named_tuple_info.names[prohibited]: + continue + ctx = named_tuple_info.names[prohibited].node + assert ctx is not None + self.fail(f'Cannot overwrite NamedTuple attribute "{prohibited}"', ctx) + + # Restore the names in the original symbol table. This ensures that the symbol + # table contains the field objects created by build_namedtuple_typeinfo. Exclude + # __doc__, which can legally be overwritten by the class. + for key, value in nt_names.items(): + if key in named_tuple_info.names: + if key == "__doc__": + continue + sym = named_tuple_info.names[key] + if isinstance(sym.node, (FuncBase, Decorator)) and not sym.plugin_generated: + # Keep user-defined methods as is. + continue + # Do not retain placeholders - we'll get back here if they cease to + # be placeholders later. If we keep placeholders alive, they may never + # be reached again, making it to cacheable symtable. + if not isinstance(sym.node, PlaceholderNode): + # Keep existing (user-provided) definitions under mangled names, so they + # get semantically analyzed. + r_key = get_unique_redefinition_name(key, named_tuple_info.names) + named_tuple_info.names[r_key] = sym + named_tuple_info.names[key] = value + + # Helpers + + def check_namedtuple_field_name(self, field: str, seen_names: Container[str]) -> str | None: + """Return None for valid fields, a string description for invalid ones.""" + if field in seen_names: + return f'has duplicate field name "{field}"' + elif not field.isidentifier(): + return f'field name "{field}" is not a valid identifier' + elif field.startswith("_"): + return f'field name "{field}" starts with an underscore' + elif keyword.iskeyword(field): + return f'field name "{field}" is a keyword' + return None + + def fail(self, msg: str, ctx: Context, code: ErrorCode | None = None) -> None: + self.api.fail(msg, ctx, code=code) diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal_newtype.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/semanal_newtype.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..6e5a9bc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/semanal_newtype.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal_newtype.py b/.venv/lib/python3.12/site-packages/mypy/semanal_newtype.py new file mode 100644 index 0000000..0c717b5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/semanal_newtype.py @@ -0,0 +1,273 @@ +"""Semantic analysis of NewType definitions. + +This is conceptually part of mypy.semanal (semantic analyzer pass 2). +""" + +from __future__ import annotations + +from mypy import errorcodes as codes +from mypy.errorcodes import ErrorCode +from mypy.exprtotype import TypeTranslationError, expr_to_unanalyzed_type +from mypy.messages import MessageBuilder, format_type +from mypy.nodes import ( + ARG_POS, + MDEF, + Argument, + AssignmentStmt, + Block, + CallExpr, + Context, + FuncDef, + NameExpr, + NewTypeExpr, + PlaceholderNode, + RefExpr, + StrExpr, + SymbolTableNode, + TypeInfo, + Var, +) +from mypy.options import Options +from mypy.semanal_shared import SemanticAnalyzerInterface, has_placeholder +from mypy.typeanal import check_for_explicit_any, has_any_from_unimported_type +from mypy.types import ( + AnyType, + CallableType, + Instance, + NoneType, + PlaceholderType, + TupleType, + Type, + TypeOfAny, + get_proper_type, +) + + +class NewTypeAnalyzer: + def __init__( + self, options: Options, api: SemanticAnalyzerInterface, msg: MessageBuilder + ) -> None: + self.options = options + self.api = api + self.msg = msg + + def process_newtype_declaration(self, s: AssignmentStmt) -> bool: + """Check if s declares a NewType; if yes, store it in symbol table. + + Return True if it's a NewType declaration. The current target may be + deferred as a side effect if the base type is not ready, even if + the return value is True. + + The logic in this function mostly copies the logic for visit_class_def() + with a single (non-Generic) base. + """ + var_name, call = self.analyze_newtype_declaration(s) + if var_name is None or call is None: + return False + name = var_name + # OK, now we know this is a NewType. But the base type may be not ready yet, + # add placeholder as we do for ClassDef. + + if self.api.is_func_scope(): + name += "@" + str(s.line) + fullname = self.api.qualified_name(name) + + if not call.analyzed or isinstance(call.analyzed, NewTypeExpr) and not call.analyzed.info: + # Start from labeling this as a future class, as we do for normal ClassDefs. + placeholder = PlaceholderNode(fullname, s, s.line, becomes_typeinfo=True) + self.api.add_symbol(var_name, placeholder, s, can_defer=False) + + old_type, should_defer = self.check_newtype_args(var_name, call, s) + old_type = get_proper_type(old_type) + if not isinstance(call.analyzed, NewTypeExpr): + call.analyzed = NewTypeExpr(var_name, old_type, line=call.line, column=call.column) + else: + call.analyzed.old_type = old_type + if old_type is None: + if should_defer: + # Base type is not ready. + self.api.defer() + return True + + # Create the corresponding class definition if the aliased type is subtypeable + assert isinstance(call.analyzed, NewTypeExpr) + if isinstance(old_type, TupleType): + newtype_class_info = self.build_newtype_typeinfo( + name, old_type, old_type.partial_fallback, s.line, call.analyzed.info + ) + newtype_class_info.update_tuple_type(old_type) + elif isinstance(old_type, Instance): + if old_type.type.is_protocol: + self.fail("NewType cannot be used with protocol classes", s) + newtype_class_info = self.build_newtype_typeinfo( + name, old_type, old_type, s.line, call.analyzed.info + ) + else: + if old_type is not None: + message = "Argument 2 to NewType(...) must be subclassable (got {})" + self.fail( + message.format(format_type(old_type, self.options)), + s, + code=codes.VALID_NEWTYPE, + ) + # Otherwise the error was already reported. + old_type = AnyType(TypeOfAny.from_error) + object_type = self.api.named_type("builtins.object") + newtype_class_info = self.build_newtype_typeinfo( + name, old_type, object_type, s.line, call.analyzed.info + ) + newtype_class_info.fallback_to_any = True + + check_for_explicit_any( + old_type, self.options, self.api.is_typeshed_stub_file, self.msg, context=s + ) + + if self.options.disallow_any_unimported and has_any_from_unimported_type(old_type): + self.msg.unimported_type_becomes_any("Argument 2 to NewType(...)", old_type, s) + + # If so, add it to the symbol table. + assert isinstance(call.analyzed, NewTypeExpr) + # As we do for normal classes, create the TypeInfo only once, then just + # update base classes on next iterations (to get rid of placeholders there). + if not call.analyzed.info: + call.analyzed.info = newtype_class_info + else: + call.analyzed.info.bases = newtype_class_info.bases + self.api.add_symbol(var_name, call.analyzed.info, s) + if self.api.is_func_scope(): + self.api.add_symbol_skip_local(name, call.analyzed.info) + newtype_class_info.line = s.line + return True + + def analyze_newtype_declaration(self, s: AssignmentStmt) -> tuple[str | None, CallExpr | None]: + """Return the NewType call expression if `s` is a newtype declaration or None otherwise.""" + name, call = None, None + if ( + len(s.lvalues) == 1 + and isinstance(s.lvalues[0], NameExpr) + and isinstance(s.rvalue, CallExpr) + and isinstance(s.rvalue.callee, RefExpr) + and (s.rvalue.callee.fullname in ("typing.NewType", "typing_extensions.NewType")) + ): + name = s.lvalues[0].name + + if s.type: + self.fail("Cannot declare the type of a NewType declaration", s) + + names = self.api.current_symbol_table() + existing = names.get(name) + # Give a better error message than generic "Name already defined". + if ( + existing + and not isinstance(existing.node, PlaceholderNode) + and not s.rvalue.analyzed + ): + self.fail(f'Cannot redefine "{name}" as a NewType', s) + + # This dummy NewTypeExpr marks the call as sufficiently analyzed; it will be + # overwritten later with a fully complete NewTypeExpr if there are no other + # errors with the NewType() call. + call = s.rvalue + + return name, call + + def check_newtype_args( + self, name: str, call: CallExpr, context: Context + ) -> tuple[Type | None, bool]: + """Analyze base type in NewType call. + + Return a tuple (type, should defer). + """ + has_failed = False + args, arg_kinds = call.args, call.arg_kinds + if len(args) != 2 or arg_kinds[0] != ARG_POS or arg_kinds[1] != ARG_POS: + self.fail("NewType(...) expects exactly two positional arguments", context) + return None, False + + # Check first argument + if not isinstance(args[0], StrExpr): + self.fail("Argument 1 to NewType(...) must be a string literal", context) + has_failed = True + elif args[0].value != name: + msg = 'String argument 1 "{}" to NewType(...) does not match variable name "{}"' + self.fail(msg.format(args[0].value, name), context) + has_failed = True + + # Check second argument + msg = "Argument 2 to NewType(...) must be a valid type" + try: + unanalyzed_type = expr_to_unanalyzed_type(args[1], self.options, self.api.is_stub_file) + except TypeTranslationError: + self.fail(msg, context) + return None, False + + # We want to use our custom error message (see above), so we suppress + # the default error message for invalid types here. + old_type = get_proper_type( + self.api.anal_type( + unanalyzed_type, + report_invalid_types=False, + allow_placeholder=not self.api.is_func_scope(), + ) + ) + should_defer = False + if isinstance(old_type, PlaceholderType): + old_type = None + if old_type is None: + should_defer = True + + # The caller of this function assumes that if we return a Type, it's always + # a valid one. So, we translate AnyTypes created from errors into None. + if isinstance(old_type, AnyType) and old_type.is_from_error: + self.fail(msg, context) + return None, False + + return None if has_failed else old_type, should_defer + + def build_newtype_typeinfo( + self, + name: str, + old_type: Type, + base_type: Instance, + line: int, + existing_info: TypeInfo | None, + ) -> TypeInfo: + info = existing_info or self.api.basic_new_typeinfo(name, base_type, line) + info.bases = [base_type] # Update in case there were nested placeholders. + info.is_newtype = True + + # Add __init__ method + args = [ + Argument(Var("self"), NoneType(), None, ARG_POS), + self.make_argument("item", old_type), + ] + signature = CallableType( + arg_types=[Instance(info, []), old_type], + arg_kinds=[arg.kind for arg in args], + arg_names=["self", "item"], + ret_type=NoneType(), + fallback=self.api.named_type("builtins.function"), + name=name, + ) + init_func = FuncDef("__init__", args, Block([]), typ=signature) + init_func.info = info + init_func._fullname = info.fullname + ".__init__" + if not existing_info: + updated = True + else: + previous_sym = info.names["__init__"].node + assert isinstance(previous_sym, FuncDef) + updated = old_type != previous_sym.arguments[1].variable.type + info.names["__init__"] = SymbolTableNode(MDEF, init_func) + + if has_placeholder(old_type): + self.api.process_placeholder(None, "NewType base", info, force_progress=updated) + return info + + # Helpers + + def make_argument(self, name: str, type: Type) -> Argument: + return Argument(Var(name), type, None, ARG_POS) + + def fail(self, msg: str, ctx: Context, *, code: ErrorCode | None = None) -> None: + self.api.fail(msg, ctx, code=code) diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal_pass1.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/semanal_pass1.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..23bf6de Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/semanal_pass1.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal_pass1.py b/.venv/lib/python3.12/site-packages/mypy/semanal_pass1.py new file mode 100644 index 0000000..266fd23 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/semanal_pass1.py @@ -0,0 +1,159 @@ +"""Block/import reachability analysis.""" + +from __future__ import annotations + +from mypy.nodes import ( + AssertStmt, + AssignmentStmt, + Block, + ClassDef, + ExpressionStmt, + ForStmt, + FuncDef, + IfStmt, + Import, + ImportAll, + ImportFrom, + MatchStmt, + MypyFile, + ReturnStmt, +) +from mypy.options import Options +from mypy.reachability import ( + assert_will_always_fail, + infer_reachability_of_if_statement, + infer_reachability_of_match_statement, +) +from mypy.traverser import TraverserVisitor + + +class SemanticAnalyzerPreAnalysis(TraverserVisitor): + """Analyze reachability of blocks and imports and other local things. + + This runs before semantic analysis, so names have not been bound. Imports are + also not resolved yet, so we can only access the current module. + + This determines static reachability of blocks and imports due to version and + platform checks, among others. + + The main entry point is 'visit_file'. + + Reachability of imports needs to be determined very early in the build since + this affects which modules will ultimately be processed. + + Consider this example: + + import sys + + def do_stuff() -> None: + if sys.version_info >= (3, 10): + import xyz # Only available in Python 3.10+ + xyz.whatever() + ... + + The block containing 'import xyz' is unreachable in Python 3 mode. The import + shouldn't be processed in Python 3 mode, even if the module happens to exist. + """ + + def visit_file(self, file: MypyFile, fnam: str, mod_id: str, options: Options) -> None: + self.platform = options.platform + self.cur_mod_id = mod_id + self.cur_mod_node = file + self.options = options + self.is_global_scope = True + self.skipped_lines: set[int] = set() + + for i, defn in enumerate(file.defs): + defn.accept(self) + if isinstance(defn, AssertStmt) and assert_will_always_fail(defn, options): + # We've encountered an assert that's always false, + # e.g. assert sys.platform == 'lol'. Truncate the + # list of statements. This mutates file.defs too. + if i < len(file.defs) - 1: + next_def, last = file.defs[i + 1], file.defs[-1] + if last.end_line is not None: + # We are on a Python version recent enough to support end lines. + self.skipped_lines |= set(range(next_def.line, last.end_line + 1)) + file.imports = [ + i for i in file.imports if (i.line, i.column) <= (defn.line, defn.column) + ] + del file.defs[i + 1 :] + break + file.skipped_lines = self.skipped_lines + + def visit_func_def(self, node: FuncDef) -> None: + old_global_scope = self.is_global_scope + self.is_global_scope = False + super().visit_func_def(node) + self.is_global_scope = old_global_scope + file_node = self.cur_mod_node + if ( + self.is_global_scope + and file_node.is_stub + and node.name == "__getattr__" + and file_node.is_package_init_file() + ): + # __init__.pyi with __getattr__ means that any submodules are assumed + # to exist, even if there is no stub. Note that we can't verify that the + # return type is compatible, since we haven't bound types yet. + file_node.is_partial_stub_package = True + + def visit_class_def(self, node: ClassDef) -> None: + old_global_scope = self.is_global_scope + self.is_global_scope = False + super().visit_class_def(node) + self.is_global_scope = old_global_scope + + def visit_import_from(self, node: ImportFrom) -> None: + node.is_top_level = self.is_global_scope + super().visit_import_from(node) + + def visit_import_all(self, node: ImportAll) -> None: + node.is_top_level = self.is_global_scope + super().visit_import_all(node) + + def visit_import(self, node: Import) -> None: + node.is_top_level = self.is_global_scope + super().visit_import(node) + + def visit_if_stmt(self, s: IfStmt) -> None: + infer_reachability_of_if_statement(s, self.options) + for expr in s.expr: + expr.accept(self) + for node in s.body: + node.accept(self) + if s.else_body: + s.else_body.accept(self) + + def visit_block(self, b: Block) -> None: + if b.is_unreachable: + if b.end_line is not None: + # We are on a Python version recent enough to support end lines. + self.skipped_lines |= set(range(b.line, b.end_line + 1)) + return + super().visit_block(b) + + def visit_match_stmt(self, s: MatchStmt) -> None: + infer_reachability_of_match_statement(s, self.options) + for guard in s.guards: + if guard is not None: + guard.accept(self) + for body in s.bodies: + body.accept(self) + + # The remaining methods are an optimization: don't visit nested expressions + # of common statements, since they can have no effect. + + def visit_assignment_stmt(self, s: AssignmentStmt) -> None: + pass + + def visit_expression_stmt(self, s: ExpressionStmt) -> None: + pass + + def visit_return_stmt(self, s: ReturnStmt) -> None: + pass + + def visit_for_stmt(self, s: ForStmt) -> None: + s.body.accept(self) + if s.else_body is not None: + s.else_body.accept(self) diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal_shared.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/semanal_shared.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..3a54c25 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/semanal_shared.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal_shared.py b/.venv/lib/python3.12/site-packages/mypy/semanal_shared.py new file mode 100644 index 0000000..c49b13d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/semanal_shared.py @@ -0,0 +1,494 @@ +"""Shared definitions used by different parts of semantic analysis.""" + +from __future__ import annotations + +from abc import abstractmethod +from typing import Callable, Final, Literal, Protocol, overload + +from mypy_extensions import trait + +from mypy.errorcodes import LITERAL_REQ, ErrorCode +from mypy.nodes import ( + CallExpr, + ClassDef, + Context, + DataclassTransformSpec, + Decorator, + Expression, + FuncDef, + NameExpr, + Node, + OverloadedFuncDef, + RefExpr, + SymbolNode, + SymbolTable, + SymbolTableNode, + TypeInfo, +) +from mypy.plugin import SemanticAnalyzerPluginInterface +from mypy.tvar_scope import TypeVarLikeScope +from mypy.type_visitor import ANY_STRATEGY, BoolTypeQuery +from mypy.typeops import make_simplified_union +from mypy.types import ( + TPDICT_FB_NAMES, + AnyType, + FunctionLike, + Instance, + Parameters, + ParamSpecFlavor, + ParamSpecType, + PlaceholderType, + ProperType, + TupleType, + Type, + TypeOfAny, + TypeVarId, + TypeVarLikeType, + TypeVarTupleType, + UnpackType, + flatten_nested_tuples, + get_proper_type, +) + +# Subclasses can override these Var attributes with incompatible types. This can also be +# set for individual attributes using 'allow_incompatible_override' of Var. +ALLOW_INCOMPATIBLE_OVERRIDE: Final = ("__slots__", "__deletable__", "__match_args__") + + +# Priorities for ordering of patches within the "patch" phase of semantic analysis +# (after the main pass): + +# Fix fallbacks (does subtype checks). +PRIORITY_FALLBACKS: Final = 1 + + +@trait +class SemanticAnalyzerCoreInterface: + """A core abstract interface to generic semantic analyzer functionality. + + This is implemented by both semantic analyzer passes 2 and 3. + """ + + @abstractmethod + def lookup_qualified( + self, name: str, ctx: Context, suppress_errors: bool = False + ) -> SymbolTableNode | None: + raise NotImplementedError + + @abstractmethod + def lookup_fully_qualified(self, fullname: str, /) -> SymbolTableNode: + raise NotImplementedError + + @abstractmethod + def lookup_fully_qualified_or_none(self, fullname: str, /) -> SymbolTableNode | None: + raise NotImplementedError + + @abstractmethod + def fail( + self, + msg: str, + ctx: Context, + serious: bool = False, + *, + blocker: bool = False, + code: ErrorCode | None = None, + ) -> None: + raise NotImplementedError + + @abstractmethod + def note(self, msg: str, ctx: Context, *, code: ErrorCode | None = None) -> None: + raise NotImplementedError + + @abstractmethod + def incomplete_feature_enabled(self, feature: str, ctx: Context) -> bool: + raise NotImplementedError + + @abstractmethod + def record_incomplete_ref(self) -> None: + raise NotImplementedError + + @abstractmethod + def defer(self, debug_context: Context | None = None, force_progress: bool = False) -> None: + raise NotImplementedError + + @abstractmethod + def is_incomplete_namespace(self, fullname: str) -> bool: + """Is a module or class namespace potentially missing some definitions?""" + raise NotImplementedError + + @property + @abstractmethod + def final_iteration(self) -> bool: + """Is this the final iteration of semantic analysis?""" + raise NotImplementedError + + @abstractmethod + def is_future_flag_set(self, flag: str) -> bool: + """Is the specific __future__ feature imported""" + raise NotImplementedError + + @property + @abstractmethod + def is_stub_file(self) -> bool: + raise NotImplementedError + + @abstractmethod + def is_func_scope(self) -> bool: + raise NotImplementedError + + @property + @abstractmethod + def type(self) -> TypeInfo | None: + raise NotImplementedError + + +@trait +class SemanticAnalyzerInterface(SemanticAnalyzerCoreInterface): + """A limited abstract interface to some generic semantic analyzer pass 2 functionality. + + We use this interface for various reasons: + + * Looser coupling + * Cleaner import graph + * Less need to pass around callback functions + """ + + tvar_scope: TypeVarLikeScope + + @abstractmethod + def lookup( + self, name: str, ctx: Context, suppress_errors: bool = False + ) -> SymbolTableNode | None: + raise NotImplementedError + + @abstractmethod + def named_type(self, fullname: str, args: list[Type] | None = None) -> Instance: + raise NotImplementedError + + @abstractmethod + def named_type_or_none(self, fullname: str, args: list[Type] | None = None) -> Instance | None: + raise NotImplementedError + + @abstractmethod + def accept(self, node: Node) -> None: + raise NotImplementedError + + @abstractmethod + def anal_type( + self, + typ: Type, + /, + *, + tvar_scope: TypeVarLikeScope | None = None, + allow_tuple_literal: bool = False, + allow_unbound_tvars: bool = False, + allow_typed_dict_special_forms: bool = False, + allow_placeholder: bool = False, + report_invalid_types: bool = True, + prohibit_self_type: str | None = None, + prohibit_special_class_field_types: str | None = None, + ) -> Type | None: + raise NotImplementedError + + @abstractmethod + def get_and_bind_all_tvars(self, type_exprs: list[Expression]) -> list[TypeVarLikeType]: + raise NotImplementedError + + @abstractmethod + def basic_new_typeinfo(self, name: str, basetype_or_fallback: Instance, line: int) -> TypeInfo: + raise NotImplementedError + + @abstractmethod + def schedule_patch(self, priority: int, patch: Callable[[], None]) -> None: + raise NotImplementedError + + @abstractmethod + def add_symbol_table_node(self, name: str, symbol: SymbolTableNode) -> bool: + """Add node to the current symbol table.""" + raise NotImplementedError + + @abstractmethod + def current_symbol_table(self) -> SymbolTable: + """Get currently active symbol table. + + May be module, class, or local namespace. + """ + raise NotImplementedError + + @abstractmethod + def add_symbol( + self, + name: str, + node: SymbolNode, + context: Context, + module_public: bool = True, + module_hidden: bool = False, + can_defer: bool = True, + ) -> bool: + """Add symbol to the current symbol table.""" + raise NotImplementedError + + @abstractmethod + def add_symbol_skip_local(self, name: str, node: SymbolNode) -> None: + """Add symbol to the current symbol table, skipping locals. + + This is used to store symbol nodes in a symbol table that + is going to be serialized (local namespaces are not serialized). + See implementation docstring for more details. + """ + raise NotImplementedError + + @abstractmethod + def parse_bool(self, expr: Expression) -> bool | None: + raise NotImplementedError + + @abstractmethod + def qualified_name(self, name: str) -> str: + raise NotImplementedError + + @property + @abstractmethod + def is_typeshed_stub_file(self) -> bool: + raise NotImplementedError + + @abstractmethod + def process_placeholder( + self, name: str | None, kind: str, ctx: Context, force_progress: bool = False + ) -> None: + raise NotImplementedError + + +def set_callable_name(sig: Type, fdef: FuncDef) -> ProperType: + sig = get_proper_type(sig) + if isinstance(sig, FunctionLike): + if fdef.info: + if fdef.info.fullname in TPDICT_FB_NAMES: + # Avoid exposing the internal _TypedDict name. + class_name = "TypedDict" + else: + class_name = fdef.info.name + return sig.with_name(f"{fdef.name} of {class_name}") + else: + return sig.with_name(fdef.name) + else: + return sig + + +def calculate_tuple_fallback(typ: TupleType) -> None: + """Calculate a precise item type for the fallback of a tuple type. + + This must be called only after the main semantic analysis pass, since joins + aren't available before that. + + Note that there is an apparent chicken and egg problem with respect + to verifying type arguments against bounds. Verifying bounds might + require fallbacks, but we might use the bounds to calculate the + fallbacks. In practice this is not a problem, since the worst that + can happen is that we have invalid type argument values, and these + can happen in later stages as well (they will generate errors, but + we don't prevent their existence). + """ + fallback = typ.partial_fallback + assert fallback.type.fullname == "builtins.tuple" + items = [] + for item in flatten_nested_tuples(typ.items): + # TODO: this duplicates some logic in typeops.tuple_fallback(). + if isinstance(item, UnpackType): + unpacked_type = get_proper_type(item.type) + if isinstance(unpacked_type, TypeVarTupleType): + unpacked_type = get_proper_type(unpacked_type.upper_bound) + if ( + isinstance(unpacked_type, Instance) + and unpacked_type.type.fullname == "builtins.tuple" + ): + items.append(unpacked_type.args[0]) + else: + # This is called before semanal_typeargs.py fixes broken unpacks, + # where the error should also be generated. + items.append(AnyType(TypeOfAny.from_error)) + else: + items.append(item) + fallback.args = (make_simplified_union(items),) + + +class _NamedTypeCallback(Protocol): + def __call__(self, fullname: str, args: list[Type] | None = None) -> Instance: ... + + +def paramspec_args( + name: str, + fullname: str, + id: TypeVarId, + *, + named_type_func: _NamedTypeCallback, + line: int = -1, + column: int = -1, + prefix: Parameters | None = None, +) -> ParamSpecType: + return ParamSpecType( + name, + fullname, + id, + flavor=ParamSpecFlavor.ARGS, + upper_bound=named_type_func("builtins.tuple", [named_type_func("builtins.object")]), + default=AnyType(TypeOfAny.from_omitted_generics), + line=line, + column=column, + prefix=prefix, + ) + + +def paramspec_kwargs( + name: str, + fullname: str, + id: TypeVarId, + *, + named_type_func: _NamedTypeCallback, + line: int = -1, + column: int = -1, + prefix: Parameters | None = None, +) -> ParamSpecType: + return ParamSpecType( + name, + fullname, + id, + flavor=ParamSpecFlavor.KWARGS, + upper_bound=named_type_func( + "builtins.dict", [named_type_func("builtins.str"), named_type_func("builtins.object")] + ), + default=AnyType(TypeOfAny.from_omitted_generics), + line=line, + column=column, + prefix=prefix, + ) + + +class HasPlaceholders(BoolTypeQuery): + def __init__(self) -> None: + super().__init__(ANY_STRATEGY) + + def visit_placeholder_type(self, t: PlaceholderType) -> bool: + return True + + +def has_placeholder(typ: Type) -> bool: + """Check if a type contains any placeholder types (recursively).""" + return typ.accept(HasPlaceholders()) + + +def find_dataclass_transform_spec(node: Node | None) -> DataclassTransformSpec | None: + """ + Find the dataclass transform spec for the given node, if any exists. + + Per PEP 681 (https://peps.python.org/pep-0681/#the-dataclass-transform-decorator), dataclass + transforms can be specified in multiple ways, including decorator functions and + metaclasses/base classes. This function resolves the spec from any of these variants. + """ + + # The spec only lives on the function/class definition itself, so we need to unwrap down to that + # point + if isinstance(node, CallExpr): + # Like dataclasses.dataclass, transform-based decorators can be applied either with or + # without parameters; ie, both of these forms are accepted: + # + # @typing.dataclass_transform + # class Foo: ... + # @typing.dataclass_transform(eq=True, order=True, ...) + # class Bar: ... + # + # We need to unwrap the call for the second variant. + node = node.callee + + if isinstance(node, RefExpr): + node = node.node + + if isinstance(node, Decorator): + # typing.dataclass_transform usage must always result in a Decorator; it always uses the + # `@dataclass_transform(...)` syntax and never `@dataclass_transform` + node = node.func + + if isinstance(node, OverloadedFuncDef): + # The dataclass_transform decorator may be attached to any single overload, so we must + # search them all. + # Note that using more than one decorator is undefined behavior, so we can just take the + # first that we find. + for candidate in node.items: + spec = find_dataclass_transform_spec(candidate) + if spec is not None: + return spec + return find_dataclass_transform_spec(node.impl) + + # For functions, we can directly consult the AST field for the spec + if isinstance(node, FuncDef): + return node.dataclass_transform_spec + + if isinstance(node, ClassDef): + node = node.info + if isinstance(node, TypeInfo): + # Search all parent classes to see if any are decorated with `typing.dataclass_transform` + for base in node.mro[1:]: + if base.dataclass_transform_spec is not None: + return base.dataclass_transform_spec + + # Check if there is a metaclass that is decorated with `typing.dataclass_transform` + # + # Note that PEP 681 only discusses using a metaclass that is directly decorated with + # `typing.dataclass_transform`; subclasses thereof should be treated with dataclass + # semantics rather than as transforms: + # + # > If dataclass_transform is applied to a class, dataclass-like semantics will be assumed + # > for any class that directly or indirectly derives from the decorated class or uses the + # > decorated class as a metaclass. + # + # The wording doesn't make this entirely explicit, but Pyright (the reference + # implementation for this PEP) only handles directly-decorated metaclasses. + metaclass_type = node.metaclass_type + if metaclass_type is not None and metaclass_type.type.dataclass_transform_spec is not None: + return metaclass_type.type.dataclass_transform_spec + + return None + + +# Never returns `None` if a default is given +@overload +def require_bool_literal_argument( + api: SemanticAnalyzerInterface | SemanticAnalyzerPluginInterface, + expression: Expression, + name: str, + default: Literal[True, False], +) -> bool: ... + + +@overload +def require_bool_literal_argument( + api: SemanticAnalyzerInterface | SemanticAnalyzerPluginInterface, + expression: Expression, + name: str, + default: None = None, +) -> bool | None: ... + + +def require_bool_literal_argument( + api: SemanticAnalyzerInterface | SemanticAnalyzerPluginInterface, + expression: Expression, + name: str, + default: bool | None = None, +) -> bool | None: + """Attempt to interpret an expression as a boolean literal, and fail analysis if we can't.""" + value = parse_bool(expression) + if value is None: + api.fail( + f'"{name}" argument must be a True or False literal', expression, code=LITERAL_REQ + ) + return default + + return value + + +def parse_bool(expr: Expression) -> bool | None: + if isinstance(expr, NameExpr): + if expr.fullname == "builtins.True": + return True + if expr.fullname == "builtins.False": + return False + return None diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal_typeargs.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/semanal_typeargs.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..5e4df3a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/semanal_typeargs.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal_typeargs.py b/.venv/lib/python3.12/site-packages/mypy/semanal_typeargs.py new file mode 100644 index 0000000..9d1ce1f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/semanal_typeargs.py @@ -0,0 +1,292 @@ +"""Verify properties of type arguments, like 'int' in C[int] being valid. + +This must happen after semantic analysis since there can be placeholder +types until the end of semantic analysis, and these break various type +operations, including subtype checks. +""" + +from __future__ import annotations + +from typing import Callable + +from mypy import errorcodes as codes, message_registry +from mypy.errorcodes import ErrorCode +from mypy.errors import Errors +from mypy.message_registry import INVALID_PARAM_SPEC_LOCATION, INVALID_PARAM_SPEC_LOCATION_NOTE +from mypy.messages import format_type +from mypy.mixedtraverser import MixedTraverserVisitor +from mypy.nodes import Block, ClassDef, Context, FakeInfo, FuncItem, MypyFile +from mypy.options import Options +from mypy.scope import Scope +from mypy.subtypes import is_same_type, is_subtype +from mypy.types import ( + AnyType, + CallableType, + Instance, + Parameters, + ParamSpecType, + TupleType, + Type, + TypeAliasType, + TypeOfAny, + TypeVarLikeType, + TypeVarTupleType, + TypeVarType, + UnboundType, + UnpackType, + flatten_nested_tuples, + get_proper_type, + get_proper_types, + split_with_prefix_and_suffix, +) +from mypy.typevartuples import erased_vars + + +class TypeArgumentAnalyzer(MixedTraverserVisitor): + def __init__( + self, + errors: Errors, + options: Options, + is_typeshed_file: bool, + named_type: Callable[[str, list[Type]], Instance], + ) -> None: + super().__init__() + self.errors = errors + self.options = options + self.is_typeshed_file = is_typeshed_file + self.named_type = named_type + self.scope = Scope() + # Should we also analyze function definitions, or only module top-levels? + self.recurse_into_functions = True + # Keep track of the type aliases already visited. This is needed to avoid + # infinite recursion on types like A = Union[int, List[A]]. + self.seen_aliases: set[TypeAliasType] = set() + + def visit_mypy_file(self, o: MypyFile) -> None: + self.errors.set_file(o.path, o.fullname, scope=self.scope, options=self.options) + with self.scope.module_scope(o.fullname): + super().visit_mypy_file(o) + + def visit_func(self, defn: FuncItem) -> None: + if not self.recurse_into_functions: + return + with self.scope.function_scope(defn): + super().visit_func(defn) + + def visit_class_def(self, defn: ClassDef) -> None: + with self.scope.class_scope(defn.info): + super().visit_class_def(defn) + + def visit_block(self, o: Block) -> None: + if not o.is_unreachable: + super().visit_block(o) + + def visit_type_alias_type(self, t: TypeAliasType) -> None: + super().visit_type_alias_type(t) + if t.is_recursive: + if t in self.seen_aliases: + # Avoid infinite recursion on recursive type aliases. + return + self.seen_aliases.add(t) + assert t.alias is not None, f"Unfixed type alias {t.type_ref}" + is_error, is_invalid = self.validate_args( + t.alias.name, tuple(t.args), t.alias.alias_tvars, t + ) + if is_invalid: + # If there is an arity error (e.g. non-Parameters used for ParamSpec etc.), + # then it is safer to erase the arguments completely, to avoid crashes later. + # TODO: can we move this logic to typeanal.py? + t.args = erased_vars(t.alias.alias_tvars, TypeOfAny.from_error) + if not is_error: + # If there was already an error for the alias itself, there is no point in checking + # the expansion, most likely it will result in the same kind of error. + if t.args: + # Since we always allow unbounded type variables in alias definitions, we need + # to verify the arguments satisfy the upper bounds of the expansion as well. + get_proper_type(t).accept(self) + if t.is_recursive: + self.seen_aliases.discard(t) + + def visit_tuple_type(self, t: TupleType) -> None: + t.items = flatten_nested_tuples(t.items) + # We could also normalize Tuple[*tuple[X, ...]] -> tuple[X, ...] like in + # expand_type() but we can't do this here since it is not a translator visitor, + # and we need to return an Instance instead of TupleType. + super().visit_tuple_type(t) + + def visit_callable_type(self, t: CallableType) -> None: + super().visit_callable_type(t) + t.normalize_trivial_unpack() + + def visit_instance(self, t: Instance) -> None: + super().visit_instance(t) + # Type argument counts were checked in the main semantic analyzer pass. We assume + # that the counts are correct here. + info = t.type + if isinstance(info, FakeInfo): + return # https://github.com/python/mypy/issues/11079 + _, is_invalid = self.validate_args(info.name, t.args, info.defn.type_vars, t) + if is_invalid: + t.args = tuple(erased_vars(info.defn.type_vars, TypeOfAny.from_error)) + if t.type.fullname == "builtins.tuple" and len(t.args) == 1: + # Normalize Tuple[*Tuple[X, ...], ...] -> Tuple[X, ...] + arg = t.args[0] + if isinstance(arg, UnpackType): + unpacked = get_proper_type(arg.type) + if isinstance(unpacked, Instance): + assert unpacked.type.fullname == "builtins.tuple" + t.args = unpacked.args + + def validate_args( + self, name: str, args: tuple[Type, ...], type_vars: list[TypeVarLikeType], ctx: Context + ) -> tuple[bool, bool]: + if any(isinstance(v, TypeVarTupleType) for v in type_vars): + prefix = next(i for (i, v) in enumerate(type_vars) if isinstance(v, TypeVarTupleType)) + tvt = type_vars[prefix] + assert isinstance(tvt, TypeVarTupleType) + start, middle, end = split_with_prefix_and_suffix( + tuple(args), prefix, len(type_vars) - prefix - 1 + ) + args = start + (TupleType(list(middle), tvt.tuple_fallback),) + end + + is_error = False + is_invalid = False + for arg, tvar in zip(args, type_vars): + context = ctx if arg.line < 0 else arg + if isinstance(tvar, TypeVarType): + if isinstance(arg, ParamSpecType): + is_invalid = True + self.fail( + INVALID_PARAM_SPEC_LOCATION.format(format_type(arg, self.options)), + context, + code=codes.VALID_TYPE, + ) + self.note( + INVALID_PARAM_SPEC_LOCATION_NOTE.format(arg.name), + context, + code=codes.VALID_TYPE, + ) + continue + if isinstance(arg, Parameters): + is_invalid = True + self.fail( + f"Cannot use {format_type(arg, self.options)} for regular type variable," + " only for ParamSpec", + context, + code=codes.VALID_TYPE, + ) + continue + if self.in_type_alias_expr and isinstance(arg, TypeVarType): + # Type aliases are allowed to use unconstrained type variables + # error will be checked at substitution point. + continue + if tvar.values: + if isinstance(arg, TypeVarType): + arg_values = arg.values + if not arg_values: + is_error = True + self.fail( + message_registry.INVALID_TYPEVAR_AS_TYPEARG.format(arg.name, name), + context, + code=codes.TYPE_VAR, + ) + continue + else: + arg_values = [arg] + if self.check_type_var_values( + name, arg_values, tvar.name, tvar.values, context + ): + is_error = True + # Check against upper bound. Since it's object the vast majority of the time, + # add fast path to avoid a potentially slow subtype check. + upper_bound = tvar.upper_bound + object_upper_bound = ( + type(upper_bound) is Instance + and upper_bound.type.fullname == "builtins.object" + ) + if not object_upper_bound and not is_subtype(arg, upper_bound): + is_error = True + self.fail( + message_registry.INVALID_TYPEVAR_ARG_BOUND.format( + format_type(arg, self.options), + name, + format_type(upper_bound, self.options), + ), + context, + code=codes.TYPE_VAR, + ) + elif isinstance(tvar, ParamSpecType): + if not isinstance( + get_proper_type(arg), (ParamSpecType, Parameters, AnyType, UnboundType) + ): + is_invalid = True + self.fail( + "Can only replace ParamSpec with a parameter types list or" + f" another ParamSpec, got {format_type(arg, self.options)}", + context, + code=codes.VALID_TYPE, + ) + if is_invalid: + is_error = True + return is_error, is_invalid + + def visit_unpack_type(self, typ: UnpackType) -> None: + super().visit_unpack_type(typ) + proper_type = get_proper_type(typ.type) + if isinstance(proper_type, TupleType): + return + if isinstance(proper_type, TypeVarTupleType): + return + # TODO: this should probably be .has_base("builtins.tuple"), also elsewhere. This is + # tricky however, since this needs map_instance_to_supertype() available in many places. + if isinstance(proper_type, Instance) and proper_type.type.fullname == "builtins.tuple": + return + if not isinstance(proper_type, (UnboundType, AnyType)): + # Avoid extra errors if there were some errors already. Also interpret plain Any + # as tuple[Any, ...] (this is better for the code in type checker). + self.fail( + message_registry.INVALID_UNPACK.format(format_type(proper_type, self.options)), + typ.type, + code=codes.VALID_TYPE, + ) + typ.type = self.named_type("builtins.tuple", [AnyType(TypeOfAny.from_error)]) + + def check_type_var_values( + self, name: str, actuals: list[Type], arg_name: str, valids: list[Type], context: Context + ) -> bool: + if self.in_type_alias_expr: + # See testValidTypeAliasValues - we do not enforce typevar compatibility + # at the definition site. We check instantiation validity later. + return False + is_error = False + for actual in get_proper_types(actuals): + # We skip UnboundType here, since they may appear in defn.bases, + # the error will be caught when visiting info.bases, that have bound type + # variables. + if not isinstance(actual, (AnyType, UnboundType)) and not any( + is_same_type(actual, value) for value in valids + ): + is_error = True + if len(actuals) > 1 or not isinstance(actual, Instance): + self.fail( + message_registry.INVALID_TYPEVAR_ARG_VALUE.format(name), + context, + code=codes.TYPE_VAR, + ) + else: + class_name = f'"{name}"' + actual_type_name = f'"{actual.type.name}"' + self.fail( + message_registry.INCOMPATIBLE_TYPEVAR_VALUE.format( + arg_name, class_name, actual_type_name + ), + context, + code=codes.TYPE_VAR, + ) + return is_error + + def fail(self, msg: str, context: Context, *, code: ErrorCode | None = None) -> None: + self.errors.report(context.line, context.column, msg, code=code) + + def note(self, msg: str, context: Context, *, code: ErrorCode | None = None) -> None: + self.errors.report(context.line, context.column, msg, severity="note", code=code) diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal_typeddict.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/semanal_typeddict.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..a2522ab Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/semanal_typeddict.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/semanal_typeddict.py b/.venv/lib/python3.12/site-packages/mypy/semanal_typeddict.py new file mode 100644 index 0000000..8bf073d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/semanal_typeddict.py @@ -0,0 +1,631 @@ +"""Semantic analysis of TypedDict definitions.""" + +from __future__ import annotations + +from collections.abc import Collection +from typing import Final + +from mypy import errorcodes as codes, message_registry +from mypy.errorcodes import ErrorCode +from mypy.expandtype import expand_type +from mypy.exprtotype import TypeTranslationError, expr_to_unanalyzed_type +from mypy.message_registry import TYPEDDICT_OVERRIDE_MERGE +from mypy.messages import MessageBuilder +from mypy.nodes import ( + ARG_NAMED, + ARG_POS, + AssignmentStmt, + CallExpr, + ClassDef, + Context, + DictExpr, + EllipsisExpr, + Expression, + ExpressionStmt, + IndexExpr, + NameExpr, + PassStmt, + RefExpr, + Statement, + StrExpr, + TempNode, + TupleExpr, + TypeAlias, + TypedDictExpr, + TypeInfo, +) +from mypy.options import Options +from mypy.semanal_shared import ( + SemanticAnalyzerInterface, + has_placeholder, + require_bool_literal_argument, +) +from mypy.state import state +from mypy.typeanal import check_for_explicit_any, has_any_from_unimported_type +from mypy.types import ( + TPDICT_NAMES, + AnyType, + ReadOnlyType, + RequiredType, + Type, + TypedDictType, + TypeOfAny, + TypeVarLikeType, + get_proper_type, +) + +TPDICT_CLASS_ERROR: Final = ( + 'Invalid statement in TypedDict definition; expected "field_name: field_type"' +) + + +class TypedDictAnalyzer: + def __init__( + self, options: Options, api: SemanticAnalyzerInterface, msg: MessageBuilder + ) -> None: + self.options = options + self.api = api + self.msg = msg + + def analyze_typeddict_classdef(self, defn: ClassDef) -> tuple[bool, TypeInfo | None]: + """Analyze a class that may define a TypedDict. + + Assume that base classes have been analyzed already. + + Note: Unlike normal classes, we won't create a TypeInfo until + the whole definition of the TypeDict (including the body and all + key names and types) is complete. This is mostly because we + store the corresponding TypedDictType in the TypeInfo. + + Return (is this a TypedDict, new TypeInfo). Specifics: + * If we couldn't finish due to incomplete reference anywhere in + the definition, return (True, None). + * If this is not a TypedDict, return (False, None). + """ + possible = False + for base_expr in defn.base_type_exprs: + if isinstance(base_expr, CallExpr): + base_expr = base_expr.callee + if isinstance(base_expr, IndexExpr): + base_expr = base_expr.base + if isinstance(base_expr, RefExpr): + self.api.accept(base_expr) + if base_expr.fullname in TPDICT_NAMES or self.is_typeddict(base_expr): + possible = True + if isinstance(base_expr.node, TypeInfo) and base_expr.node.is_final: + err = message_registry.CANNOT_INHERIT_FROM_FINAL + self.fail(err.format(base_expr.node.name).value, defn, code=err.code) + if not possible: + return False, None + existing_info = None + if isinstance(defn.analyzed, TypedDictExpr): + existing_info = defn.analyzed.info + + field_types: dict[str, Type] | None + if ( + len(defn.base_type_exprs) == 1 + and isinstance(defn.base_type_exprs[0], RefExpr) + and defn.base_type_exprs[0].fullname in TPDICT_NAMES + ): + # Building a new TypedDict + field_types, statements, required_keys, readonly_keys = ( + self.analyze_typeddict_classdef_fields(defn) + ) + if field_types is None: + return True, None # Defer + if self.api.is_func_scope() and "@" not in defn.name: + defn.name += "@" + str(defn.line) + info = self.build_typeddict_typeinfo( + defn.name, field_types, required_keys, readonly_keys, defn.line, existing_info + ) + defn.analyzed = TypedDictExpr(info) + defn.analyzed.line = defn.line + defn.analyzed.column = defn.column + defn.defs.body = statements + return True, info + + # Extending/merging existing TypedDicts + typeddict_bases: list[Expression] = [] + typeddict_bases_set = set() + for expr in defn.base_type_exprs: + ok, maybe_type_info, _ = self.check_typeddict(expr, None, False) + if ok and maybe_type_info is not None: + # expr is a CallExpr + info = maybe_type_info + typeddict_bases_set.add(info.fullname) + typeddict_bases.append(expr) + elif isinstance(expr, RefExpr) and expr.fullname in TPDICT_NAMES: + if "TypedDict" not in typeddict_bases_set: + typeddict_bases_set.add("TypedDict") + else: + self.fail('Duplicate base class "TypedDict"', defn) + elif ( + isinstance(expr, RefExpr) + and self.is_typeddict(expr) + or isinstance(expr, IndexExpr) + and self.is_typeddict(expr.base) + ): + info = self._parse_typeddict_base(expr, defn) + if info.fullname not in typeddict_bases_set: + typeddict_bases_set.add(info.fullname) + typeddict_bases.append(expr) + else: + self.fail(f'Duplicate base class "{info.name}"', defn) + else: + self.fail("All bases of a new TypedDict must be TypedDict types", defn) + + field_types = {} + required_keys = set() + readonly_keys = set() + # Iterate over bases in reverse order so that leftmost base class' keys take precedence + for base in reversed(typeddict_bases): + self.add_keys_and_types_from_base( + base, field_types, required_keys, readonly_keys, defn + ) + (new_field_types, new_statements, new_required_keys, new_readonly_keys) = ( + self.analyze_typeddict_classdef_fields(defn, oldfields=field_types) + ) + if new_field_types is None: + return True, None # Defer + field_types.update(new_field_types) + required_keys.update(new_required_keys) + readonly_keys.update(new_readonly_keys) + info = self.build_typeddict_typeinfo( + defn.name, field_types, required_keys, readonly_keys, defn.line, existing_info + ) + defn.analyzed = TypedDictExpr(info) + defn.analyzed.line = defn.line + defn.analyzed.column = defn.column + defn.defs.body = new_statements + return True, info + + def add_keys_and_types_from_base( + self, + base: Expression, + field_types: dict[str, Type], + required_keys: set[str], + readonly_keys: set[str], + ctx: Context, + ) -> None: + info = self._parse_typeddict_base(base, ctx) + base_args: list[Type] = [] + if isinstance(base, IndexExpr): + args = self.analyze_base_args(base, ctx) + if args is None: + return + base_args = args + + assert info.typeddict_type is not None + base_typed_dict = info.typeddict_type + base_items = base_typed_dict.items + valid_items = base_items.copy() + + # Always fix invalid bases to avoid crashes. + tvars = info.defn.type_vars + if len(base_args) != len(tvars): + any_kind = TypeOfAny.from_omitted_generics + if base_args: + self.fail(f'Invalid number of type arguments for "{info.name}"', ctx) + any_kind = TypeOfAny.from_error + base_args = [AnyType(any_kind) for _ in tvars] + + with state.strict_optional_set(self.options.strict_optional): + valid_items = self.map_items_to_base(valid_items, tvars, base_args) + for key in base_items: + if key in field_types: + self.fail(TYPEDDICT_OVERRIDE_MERGE.format(key), ctx) + + field_types.update(valid_items) + required_keys.update(base_typed_dict.required_keys) + readonly_keys.update(base_typed_dict.readonly_keys) + + def _parse_typeddict_base(self, base: Expression, ctx: Context) -> TypeInfo: + if isinstance(base, RefExpr): + if isinstance(base.node, TypeInfo): + return base.node + elif isinstance(base.node, TypeAlias): + # Only old TypeAlias / plain assignment, PEP695 `type` stmt + # cannot be used as a base class + target = get_proper_type(base.node.target) + assert isinstance(target, TypedDictType) + return target.fallback.type + else: + assert False + elif isinstance(base, IndexExpr): + assert isinstance(base.base, RefExpr) + return self._parse_typeddict_base(base.base, ctx) + else: + assert isinstance(base, CallExpr) + assert isinstance(base.analyzed, TypedDictExpr) + return base.analyzed.info + + def analyze_base_args(self, base: IndexExpr, ctx: Context) -> list[Type] | None: + """Analyze arguments of base type expressions as types. + + We need to do this, because normal base class processing happens after + the TypedDict special-casing (plus we get a custom error message). + """ + base_args = [] + if isinstance(base.index, TupleExpr): + args = base.index.items + else: + args = [base.index] + + for arg_expr in args: + try: + type = expr_to_unanalyzed_type(arg_expr, self.options, self.api.is_stub_file) + except TypeTranslationError: + self.fail("Invalid TypedDict type argument", ctx) + return None + analyzed = self.api.anal_type( + type, + allow_typed_dict_special_forms=True, + allow_placeholder=not self.api.is_func_scope(), + ) + if analyzed is None: + return None + base_args.append(analyzed) + return base_args + + def map_items_to_base( + self, valid_items: dict[str, Type], tvars: list[TypeVarLikeType], base_args: list[Type] + ) -> dict[str, Type]: + """Map item types to how they would look in their base with type arguments applied. + + Note it is safe to use expand_type() during semantic analysis, because it should never + (indirectly) call is_subtype(). + """ + mapped_items = {} + for key in valid_items: + type_in_base = valid_items[key] + if not tvars: + mapped_items[key] = type_in_base + continue + # TODO: simple zip can't be used for variadic types. + mapped_items[key] = expand_type( + type_in_base, {t.id: a for (t, a) in zip(tvars, base_args)} + ) + return mapped_items + + def analyze_typeddict_classdef_fields( + self, defn: ClassDef, oldfields: Collection[str] | None = None + ) -> tuple[dict[str, Type] | None, list[Statement], set[str], set[str]]: + """Analyze fields defined in a TypedDict class definition. + + This doesn't consider inherited fields (if any). Also consider totality, + if given. + + Return tuple with these items: + * Dict of key -> type (or None if found an incomplete reference -> deferral) + * List of statements from defn.defs.body that are legally allowed to be a + part of a TypedDict definition + * Set of required keys + """ + fields: dict[str, Type] = {} + readonly_keys = set[str]() + required_keys = set[str]() + statements: list[Statement] = [] + + total: bool | None = True + for key in defn.keywords: + if key == "total": + total = require_bool_literal_argument( + self.api, defn.keywords["total"], "total", True + ) + continue + for_function = ' for "__init_subclass__" of "TypedDict"' + self.msg.unexpected_keyword_argument_for_function(for_function, key, defn) + + for stmt in defn.defs.body: + if not isinstance(stmt, AssignmentStmt): + # Still allow pass or ... (for empty TypedDict's) and docstrings + if isinstance(stmt, PassStmt) or ( + isinstance(stmt, ExpressionStmt) + and isinstance(stmt.expr, (EllipsisExpr, StrExpr)) + ): + statements.append(stmt) + else: + defn.removed_statements.append(stmt) + self.fail(TPDICT_CLASS_ERROR, stmt) + elif len(stmt.lvalues) > 1 or not isinstance(stmt.lvalues[0], NameExpr): + # An assignment, but an invalid one. + defn.removed_statements.append(stmt) + self.fail(TPDICT_CLASS_ERROR, stmt) + else: + name = stmt.lvalues[0].name + if name in (oldfields or []): + self.fail(f'Overwriting TypedDict field "{name}" while extending', stmt) + if name in fields: + self.fail(f'Duplicate TypedDict key "{name}"', stmt) + continue + # Append stmt, name, and type in this case... + statements.append(stmt) + + field_type: Type + if stmt.unanalyzed_type is None: + field_type = AnyType(TypeOfAny.unannotated) + else: + analyzed = self.api.anal_type( + stmt.unanalyzed_type, + allow_typed_dict_special_forms=True, + allow_placeholder=not self.api.is_func_scope(), + prohibit_self_type="TypedDict item type", + prohibit_special_class_field_types="TypedDict", + ) + if analyzed is None: + return None, [], set(), set() # Need to defer + field_type = analyzed + if not has_placeholder(analyzed): + stmt.type = self.extract_meta_info(analyzed, stmt)[0] + + field_type, required, readonly = self.extract_meta_info(field_type) + fields[name] = field_type + + if (total or required is True) and required is not False: + required_keys.add(name) + if readonly: + readonly_keys.add(name) + + # ...despite possible minor failures that allow further analysis. + if stmt.type is None or hasattr(stmt, "new_syntax") and not stmt.new_syntax: + self.fail(TPDICT_CLASS_ERROR, stmt) + elif not isinstance(stmt.rvalue, TempNode): + # x: int assigns rvalue to TempNode(AnyType()) + self.fail("Right hand side values are not supported in TypedDict", stmt) + + return fields, statements, required_keys, readonly_keys + + def extract_meta_info( + self, typ: Type, context: Context | None = None + ) -> tuple[Type, bool | None, bool]: + """Unwrap all metadata types.""" + is_required = None # default, no modification + readonly = False # by default all is mutable + + seen_required = False + seen_readonly = False + while isinstance(typ, (RequiredType, ReadOnlyType)): + if isinstance(typ, RequiredType): + if context is not None and seen_required: + self.fail( + '"{}" type cannot be nested'.format( + "Required[]" if typ.required else "NotRequired[]" + ), + context, + code=codes.VALID_TYPE, + ) + is_required = typ.required + seen_required = True + typ = typ.item + if isinstance(typ, ReadOnlyType): + if context is not None and seen_readonly: + self.fail('"ReadOnly[]" type cannot be nested', context, code=codes.VALID_TYPE) + readonly = True + seen_readonly = True + typ = typ.item + return typ, is_required, readonly + + def check_typeddict( + self, node: Expression, var_name: str | None, is_func_scope: bool + ) -> tuple[bool, TypeInfo | None, list[TypeVarLikeType]]: + """Check if a call defines a TypedDict. + + The optional var_name argument is the name of the variable to + which this is assigned, if any. + + Return a pair (is it a typed dict, corresponding TypeInfo). + + If the definition is invalid but looks like a TypedDict, + report errors but return (some) TypeInfo. If some type is not ready, + return (True, None). + """ + if not isinstance(node, CallExpr): + return False, None, [] + call = node + callee = call.callee + if not isinstance(callee, RefExpr): + return False, None, [] + fullname = callee.fullname + if fullname not in TPDICT_NAMES: + return False, None, [] + res = self.parse_typeddict_args(call) + if res is None: + # This is a valid typed dict, but some type is not ready. + # The caller should defer this until next iteration. + return True, None, [] + name, items, types, total, tvar_defs, ok = res + if not ok: + # Error. Construct dummy return value. + if var_name: + name = var_name + if is_func_scope: + name += "@" + str(call.line) + else: + name = var_name = "TypedDict@" + str(call.line) + info = self.build_typeddict_typeinfo(name, {}, set(), set(), call.line, None) + else: + if var_name is not None and name != var_name: + self.fail( + 'First argument "{}" to TypedDict() does not match variable name "{}"'.format( + name, var_name + ), + node, + code=codes.NAME_MATCH, + ) + if name != var_name or is_func_scope: + # Give it a unique name derived from the line number. + name += "@" + str(call.line) + required_keys = { + field + for (field, t) in zip(items, types) + if (total or (isinstance(t, RequiredType) and t.required)) + and not (isinstance(t, RequiredType) and not t.required) + } + readonly_keys = { + field for (field, t) in zip(items, types) if isinstance(t, ReadOnlyType) + } + types = [ # unwrap Required[T] or ReadOnly[T] to just T + t.item if isinstance(t, (RequiredType, ReadOnlyType)) else t for t in types + ] + + # Perform various validations after unwrapping. + for t in types: + check_for_explicit_any( + t, self.options, self.api.is_typeshed_stub_file, self.msg, context=call + ) + if self.options.disallow_any_unimported: + for t in types: + if has_any_from_unimported_type(t): + self.msg.unimported_type_becomes_any("Type of a TypedDict key", t, call) + + existing_info = None + if isinstance(node.analyzed, TypedDictExpr): + existing_info = node.analyzed.info + info = self.build_typeddict_typeinfo( + name, + dict(zip(items, types)), + required_keys, + readonly_keys, + call.line, + existing_info, + ) + info.line = node.line + # Store generated TypeInfo under both names, see semanal_namedtuple for more details. + if name != var_name or is_func_scope: + self.api.add_symbol_skip_local(name, info) + if var_name: + self.api.add_symbol(var_name, info, node) + call.analyzed = TypedDictExpr(info) + call.analyzed.set_line(call) + return True, info, tvar_defs + + def parse_typeddict_args( + self, call: CallExpr + ) -> tuple[str, list[str], list[Type], bool, list[TypeVarLikeType], bool] | None: + """Parse typed dict call expression. + + Return names, types, totality, was there an error during parsing. + If some type is not ready, return None. + """ + # TODO: Share code with check_argument_count in checkexpr.py? + args = call.args + if len(args) < 2: + return self.fail_typeddict_arg("Too few arguments for TypedDict()", call) + if len(args) > 3: + return self.fail_typeddict_arg("Too many arguments for TypedDict()", call) + # TODO: Support keyword arguments + if call.arg_kinds not in ([ARG_POS, ARG_POS], [ARG_POS, ARG_POS, ARG_NAMED]): + return self.fail_typeddict_arg("Unexpected arguments to TypedDict()", call) + if len(args) == 3 and call.arg_names[2] != "total": + return self.fail_typeddict_arg( + f'Unexpected keyword argument "{call.arg_names[2]}" for "TypedDict"', call + ) + if not isinstance(args[0], StrExpr): + return self.fail_typeddict_arg( + "TypedDict() expects a string literal as the first argument", call + ) + if not isinstance(args[1], DictExpr): + return self.fail_typeddict_arg( + "TypedDict() expects a dictionary literal as the second argument", call + ) + total: bool | None = True + if len(args) == 3: + total = require_bool_literal_argument(self.api, call.args[2], "total") + if total is None: + return "", [], [], True, [], False + dictexpr = args[1] + tvar_defs = self.api.get_and_bind_all_tvars([t for k, t in dictexpr.items]) + res = self.parse_typeddict_fields_with_types(dictexpr.items) + if res is None: + # One of the types is not ready, defer. + return None + items, types, ok = res + assert total is not None + return args[0].value, items, types, total, tvar_defs, ok + + def parse_typeddict_fields_with_types( + self, dict_items: list[tuple[Expression | None, Expression]] + ) -> tuple[list[str], list[Type], bool] | None: + """Parse typed dict items passed as pairs (name expression, type expression). + + Return names, types, was there an error. If some type is not ready, return None. + """ + seen_keys = set() + items: list[str] = [] + types: list[Type] = [] + for field_name_expr, field_type_expr in dict_items: + if isinstance(field_name_expr, StrExpr): + key = field_name_expr.value + items.append(key) + if key in seen_keys: + self.fail(f'Duplicate TypedDict key "{key}"', field_name_expr) + seen_keys.add(key) + else: + name_context = field_name_expr or field_type_expr + self.fail_typeddict_arg("Invalid TypedDict() field name", name_context) + return [], [], False + try: + type = expr_to_unanalyzed_type( + field_type_expr, self.options, self.api.is_stub_file + ) + except TypeTranslationError: + self.fail_typeddict_arg("Use dict literal for nested TypedDict", field_type_expr) + return [], [], False + analyzed = self.api.anal_type( + type, + allow_typed_dict_special_forms=True, + allow_placeholder=not self.api.is_func_scope(), + prohibit_self_type="TypedDict item type", + prohibit_special_class_field_types="TypedDict", + ) + if analyzed is None: + return None + types.append(analyzed) + return items, types, True + + def fail_typeddict_arg( + self, message: str, context: Context + ) -> tuple[str, list[str], list[Type], bool, list[TypeVarLikeType], bool]: + self.fail(message, context) + return "", [], [], True, [], False + + def build_typeddict_typeinfo( + self, + name: str, + item_types: dict[str, Type], + required_keys: set[str], + readonly_keys: set[str], + line: int, + existing_info: TypeInfo | None, + ) -> TypeInfo: + # Prefer typing then typing_extensions if available. + fallback = ( + self.api.named_type_or_none("typing._TypedDict", []) + or self.api.named_type_or_none("typing_extensions._TypedDict", []) + or self.api.named_type_or_none("mypy_extensions._TypedDict", []) + ) + assert fallback is not None + info = existing_info or self.api.basic_new_typeinfo(name, fallback, line) + typeddict_type = TypedDictType(item_types, required_keys, readonly_keys, fallback) + if info.special_alias and has_placeholder(info.special_alias.target): + self.api.process_placeholder( + None, "TypedDict item", info, force_progress=typeddict_type != info.typeddict_type + ) + info.update_typeddict_type(typeddict_type) + return info + + # Helpers + + def is_typeddict(self, expr: Expression) -> bool: + return isinstance(expr, RefExpr) and ( + isinstance(expr.node, TypeInfo) + and expr.node.typeddict_type is not None + or isinstance(expr.node, TypeAlias) + and isinstance(get_proper_type(expr.node.target), TypedDictType) + ) + + def fail(self, msg: str, ctx: Context, *, code: ErrorCode | None = None) -> None: + self.api.fail(msg, ctx, code=code) + + def note(self, msg: str, ctx: Context) -> None: + self.api.note(msg, ctx) diff --git a/.venv/lib/python3.12/site-packages/mypy/server/__init__.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/server/__init__.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..7acf57a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/server/__init__.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/server/__init__.py b/.venv/lib/python3.12/site-packages/mypy/server/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypy/server/astdiff.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/server/astdiff.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..565d71c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/server/astdiff.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/server/astdiff.py b/.venv/lib/python3.12/site-packages/mypy/server/astdiff.py new file mode 100644 index 0000000..15d472b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/server/astdiff.py @@ -0,0 +1,550 @@ +"""Utilities for comparing two versions of a module symbol table. + +The goal is to find which AST nodes have externally visible changes, so +that we can fire triggers and re-process other parts of the program +that are stale because of the changes. + +Only look at detail at definitions at the current module -- don't +recurse into other modules. + +A summary of the module contents: + +* snapshot_symbol_table(...) creates an opaque snapshot description of a + module/class symbol table (recursing into nested class symbol tables). + +* compare_symbol_table_snapshots(...) compares two snapshots for the same + module id and returns fully qualified names of differences (which act as + triggers). + +To compare two versions of a module symbol table, take snapshots of both +versions and compare the snapshots. The use of snapshots makes it easy to +compare two versions of the *same* symbol table that is being mutated. + +Summary of how this works for certain kinds of differences: + +* If a symbol table node is deleted or added (only present in old/new version + of the symbol table), it is considered different, of course. + +* If a symbol table node refers to a different sort of thing in the new version, + it is considered different (for example, if a class is replaced with a + function). + +* If the signature of a function has changed, it is considered different. + +* If the type of a variable changes, it is considered different. + +* If the MRO of a class changes, or a non-generic class is turned into a + generic class, the class is considered different (there are other such "big" + differences that cause a class to be considered changed). However, just changes + to attributes or methods don't generally constitute a difference at the + class level -- these are handled at attribute level (say, 'mod.Cls.method' + is different rather than 'mod.Cls' being different). + +* If an imported name targets a different name (say, 'from x import y' is + replaced with 'from z import y'), the name in the module is considered + different. If the target of an import continues to have the same name, + but it's specifics change, this doesn't mean that the imported name is + treated as changed. Say, there is 'from x import y' in 'm', and the + type of 'x.y' has changed. This doesn't mean that that 'm.y' is considered + changed. Instead, processing the difference in 'm' will be handled through + fine-grained dependencies. +""" + +from __future__ import annotations + +from collections.abc import Sequence +from typing import Union +from typing_extensions import TypeAlias as _TypeAlias + +from mypy.expandtype import expand_type +from mypy.nodes import ( + SYMBOL_FUNCBASE_TYPES, + UNBOUND_IMPORTED, + Decorator, + FuncDef, + FuncItem, + MypyFile, + OverloadedFuncDef, + ParamSpecExpr, + SymbolNode, + SymbolTable, + TypeAlias, + TypeInfo, + TypeVarExpr, + TypeVarTupleExpr, + Var, +) +from mypy.semanal_shared import find_dataclass_transform_spec +from mypy.state import state +from mypy.types import ( + AnyType, + CallableType, + DeletedType, + ErasedType, + Instance, + LiteralType, + NoneType, + Overloaded, + Parameters, + ParamSpecType, + PartialType, + TupleType, + Type, + TypeAliasType, + TypedDictType, + TypeType, + TypeVarId, + TypeVarLikeType, + TypeVarTupleType, + TypeVarType, + TypeVisitor, + UnboundType, + UninhabitedType, + UnionType, + UnpackType, +) +from mypy.util import get_prefix + +# Snapshot representation of a symbol table node or type. The representation is +# opaque -- the only supported operations are comparing for equality and +# hashing (latter for type snapshots only). Snapshots can contain primitive +# objects, nested tuples, lists and dictionaries and primitive objects (type +# snapshots are immutable). +# +# For example, the snapshot of the 'int' type is ('Instance', 'builtins.int', ()). + +# Type snapshots are strict, they must be hashable and ordered (e.g. for Unions). +Primitive: _TypeAlias = Union[str, float, int, bool] # float is for Literal[3.14] support. +SnapshotItem: _TypeAlias = tuple[Union[Primitive, "SnapshotItem"], ...] + +# Symbol snapshots can be more lenient. +SymbolSnapshot: _TypeAlias = tuple[object, ...] + + +def compare_symbol_table_snapshots( + name_prefix: str, snapshot1: dict[str, SymbolSnapshot], snapshot2: dict[str, SymbolSnapshot] +) -> set[str]: + """Return names that are different in two snapshots of a symbol table. + + Only shallow (intra-module) differences are considered. References to things defined + outside the module are compared based on the name of the target only. + + Recurse into class symbol tables (if the class is defined in the target module). + + Return a set of fully-qualified names (e.g., 'mod.func' or 'mod.Class.method'). + """ + # Find names only defined only in one version. + names1 = {f"{name_prefix}.{name}" for name in snapshot1} + names2 = {f"{name_prefix}.{name}" for name in snapshot2} + triggers = names1 ^ names2 + + # Look for names defined in both versions that are different. + for name in set(snapshot1.keys()) & set(snapshot2.keys()): + item1 = snapshot1[name] + item2 = snapshot2[name] + kind1 = item1[0] + kind2 = item2[0] + item_name = f"{name_prefix}.{name}" + if kind1 != kind2: + # Different kind of node in two snapshots -> trivially different. + triggers.add(item_name) + elif kind1 == "TypeInfo": + if item1[:-1] != item2[:-1]: + # Record major difference (outside class symbol tables). + triggers.add(item_name) + # Look for differences in nested class symbol table entries. + assert isinstance(item1[-1], dict) + assert isinstance(item2[-1], dict) + triggers |= compare_symbol_table_snapshots(item_name, item1[-1], item2[-1]) + else: + # Shallow node (no interesting internal structure). Just use equality. + if snapshot1[name] != snapshot2[name]: + triggers.add(item_name) + + return triggers + + +def snapshot_symbol_table(name_prefix: str, table: SymbolTable) -> dict[str, SymbolSnapshot]: + """Create a snapshot description that represents the state of a symbol table. + + The snapshot has a representation based on nested tuples and dicts + that makes it easy and fast to find differences. + + Only "shallow" state is included in the snapshot -- references to + things defined in other modules are represented just by the names of + the targets. + """ + result: dict[str, SymbolSnapshot] = {} + for name, symbol in table.items(): + node = symbol.node + # TODO: cross_ref? + fullname = node.fullname if node else None + common = (fullname, symbol.kind, symbol.module_public) + if isinstance(node, MypyFile): + # This is a cross-reference to another module. + # If the reference is busted because the other module is missing, + # the node will be a "stale_info" TypeInfo produced by fixup, + # but that doesn't really matter to us here. + result[name] = ("Moduleref", common) + elif isinstance(node, TypeVarExpr): + result[name] = ( + "TypeVar", + node.variance, + [snapshot_type(value) for value in node.values], + snapshot_type(node.upper_bound), + snapshot_type(node.default), + ) + elif isinstance(node, TypeAlias): + result[name] = ( + "TypeAlias", + snapshot_types(node.alias_tvars), + node.normalized, + node.no_args, + snapshot_optional_type(node.target), + ) + elif isinstance(node, ParamSpecExpr): + result[name] = ( + "ParamSpec", + node.variance, + snapshot_type(node.upper_bound), + snapshot_type(node.default), + ) + elif isinstance(node, TypeVarTupleExpr): + result[name] = ( + "TypeVarTuple", + node.variance, + snapshot_type(node.upper_bound), + snapshot_type(node.default), + ) + else: + assert symbol.kind != UNBOUND_IMPORTED + if node and get_prefix(node.fullname) != name_prefix: + # This is a cross-reference to a node defined in another module. + # Include the node kind (FuncDef, Decorator, TypeInfo, ...), so that we will + # reprocess when a *new* node is created instead of merging an existing one. + result[name] = ("CrossRef", common, type(node).__name__) + else: + result[name] = snapshot_definition(node, common) + return result + + +def snapshot_definition(node: SymbolNode | None, common: SymbolSnapshot) -> SymbolSnapshot: + """Create a snapshot description of a symbol table node. + + The representation is nested tuples and dicts. Only externally + visible attributes are included. + """ + if isinstance(node, SYMBOL_FUNCBASE_TYPES): + # TODO: info + if node.type: + signature: tuple[object, ...] = snapshot_type(node.type) + else: + signature = snapshot_untyped_signature(node) + impl: FuncDef | None = None + if isinstance(node, FuncDef): + impl = node + elif node.impl: + impl = node.impl.func if isinstance(node.impl, Decorator) else node.impl + setter_type = None + if isinstance(node, OverloadedFuncDef) and node.items: + first_item = node.items[0] + if isinstance(first_item, Decorator) and first_item.func.is_property: + setter_type = snapshot_optional_type(first_item.var.setter_type) + is_trivial_body = impl.is_trivial_body if impl else False + dataclass_transform_spec = find_dataclass_transform_spec(node) + + deprecated: str | list[str | None] | None = None + if isinstance(node, FuncDef): + deprecated = node.deprecated + elif isinstance(node, OverloadedFuncDef): + deprecated = [node.deprecated] + [ + i.func.deprecated for i in node.items if isinstance(i, Decorator) + ] + + return ( + "Func", + common, + node.is_property, + node.is_final, + node.is_class, + node.is_static, + signature, + is_trivial_body, + dataclass_transform_spec.serialize() if dataclass_transform_spec is not None else None, + deprecated, + setter_type, # multi-part properties are stored as OverloadedFuncDef + ) + elif isinstance(node, Var): + return ("Var", common, snapshot_optional_type(node.type), node.is_final) + elif isinstance(node, Decorator): + # Note that decorated methods are represented by Decorator instances in + # a symbol table since we need to preserve information about the + # decorated function (whether it's a class function, for + # example). Top-level decorated functions, however, are represented by + # the corresponding Var node, since that happens to provide enough + # context. + return ( + "Decorator", + node.is_overload, + snapshot_optional_type(node.var.type), + snapshot_definition(node.func, common), + ) + elif isinstance(node, TypeInfo): + dataclass_transform_spec = node.dataclass_transform_spec + if dataclass_transform_spec is None: + dataclass_transform_spec = find_dataclass_transform_spec(node) + + attrs = ( + node.is_abstract, + node.is_enum, + node.is_protocol, + node.fallback_to_any, + node.meta_fallback_to_any, + node.is_named_tuple, + node.is_newtype, + # We need this to e.g. trigger metaclass calculation in subclasses. + snapshot_optional_type(node.metaclass_type), + snapshot_optional_type(node.tuple_type), + snapshot_optional_type(node.typeddict_type), + [base.fullname for base in node.mro], + # Note that the structure of type variables is a part of the external interface, + # since creating instances might fail, for example: + # T = TypeVar('T', bound=int) + # class C(Generic[T]): + # ... + # x: C[str] <- this is invalid, and needs to be re-checked if `T` changes. + # An alternative would be to create both deps: <...> -> C, and <...> -> , + # but this currently seems a bit ad hoc. + tuple(snapshot_type(tdef) for tdef in node.defn.type_vars), + [snapshot_type(base) for base in node.bases], + [snapshot_type(p) for p in node._promote], + dataclass_transform_spec.serialize() if dataclass_transform_spec is not None else None, + node.deprecated, + ) + prefix = node.fullname + symbol_table = snapshot_symbol_table(prefix, node.names) + # Special dependency for abstract attribute handling. + symbol_table["(abstract)"] = ("Abstract", tuple(sorted(node.abstract_attributes))) + return ("TypeInfo", common, attrs, symbol_table) + else: + # Other node types are handled elsewhere. + assert False, type(node) + + +def snapshot_type(typ: Type) -> SnapshotItem: + """Create a snapshot representation of a type using nested tuples.""" + return typ.accept(SnapshotTypeVisitor()) + + +def snapshot_optional_type(typ: Type | None) -> SnapshotItem: + if typ: + return snapshot_type(typ) + else: + return ("",) + + +def snapshot_types(types: Sequence[Type]) -> SnapshotItem: + return tuple(snapshot_type(item) for item in types) + + +def snapshot_simple_type(typ: Type) -> SnapshotItem: + return (type(typ).__name__,) + + +def encode_optional_str(s: str | None) -> str: + if s is None: + return "" + else: + return s + + +class SnapshotTypeVisitor(TypeVisitor[SnapshotItem]): + """Creates a read-only, self-contained snapshot of a type object. + + Properties of a snapshot: + + - Contains (nested) tuples and other immutable primitive objects only. + - References to AST nodes are replaced with full names of targets. + - Has no references to mutable or non-primitive objects. + - Two snapshots represent the same object if and only if they are + equal. + - Results must be sortable. It's important that tuples have + consistent types and can't arbitrarily mix str and None values, + for example, since they can't be compared. + """ + + def visit_unbound_type(self, typ: UnboundType) -> SnapshotItem: + return ( + "UnboundType", + typ.name, + typ.optional, + typ.empty_tuple_index, + snapshot_types(typ.args), + ) + + def visit_any(self, typ: AnyType) -> SnapshotItem: + return snapshot_simple_type(typ) + + def visit_none_type(self, typ: NoneType) -> SnapshotItem: + return snapshot_simple_type(typ) + + def visit_uninhabited_type(self, typ: UninhabitedType) -> SnapshotItem: + return snapshot_simple_type(typ) + + def visit_erased_type(self, typ: ErasedType) -> SnapshotItem: + return snapshot_simple_type(typ) + + def visit_deleted_type(self, typ: DeletedType) -> SnapshotItem: + return snapshot_simple_type(typ) + + def visit_instance(self, typ: Instance) -> SnapshotItem: + extra_attrs: SnapshotItem + if typ.extra_attrs: + extra_attrs = ( + tuple(sorted((k, v.accept(self)) for k, v in typ.extra_attrs.attrs.items())), + tuple(typ.extra_attrs.immutable), + ) + else: + extra_attrs = () + return ( + "Instance", + encode_optional_str(typ.type.fullname), + snapshot_types(typ.args), + ("None",) if typ.last_known_value is None else snapshot_type(typ.last_known_value), + extra_attrs, + ) + + def visit_type_var(self, typ: TypeVarType) -> SnapshotItem: + return ( + "TypeVar", + typ.name, + typ.fullname, + typ.id.raw_id, + typ.id.meta_level, + snapshot_types(typ.values), + snapshot_type(typ.upper_bound), + snapshot_type(typ.default), + typ.variance, + ) + + def visit_param_spec(self, typ: ParamSpecType) -> SnapshotItem: + return ( + "ParamSpec", + typ.id.raw_id, + typ.id.meta_level, + typ.flavor, + snapshot_type(typ.upper_bound), + snapshot_type(typ.default), + snapshot_type(typ.prefix), + ) + + def visit_type_var_tuple(self, typ: TypeVarTupleType) -> SnapshotItem: + return ( + "TypeVarTupleType", + typ.id.raw_id, + typ.id.meta_level, + snapshot_type(typ.upper_bound), + snapshot_type(typ.default), + ) + + def visit_unpack_type(self, typ: UnpackType) -> SnapshotItem: + return ("UnpackType", snapshot_type(typ.type)) + + def visit_parameters(self, typ: Parameters) -> SnapshotItem: + return ( + "Parameters", + snapshot_types(typ.arg_types), + tuple(encode_optional_str(name) for name in typ.arg_names), + tuple(k.value for k in typ.arg_kinds), + ) + + def visit_callable_type(self, typ: CallableType) -> SnapshotItem: + if typ.is_generic(): + typ = self.normalize_callable_variables(typ) + return ( + "CallableType", + snapshot_types(typ.arg_types), + snapshot_type(typ.ret_type), + tuple(encode_optional_str(name) for name in typ.arg_names), + tuple(k.value for k in typ.arg_kinds), + typ.is_type_obj(), + typ.is_ellipsis_args, + snapshot_types(typ.variables), + typ.is_bound, + ) + + def normalize_callable_variables(self, typ: CallableType) -> CallableType: + """Normalize all type variable ids to run from -1 to -len(variables).""" + tvs = [] + tvmap: dict[TypeVarId, Type] = {} + for i, v in enumerate(typ.variables): + tid = TypeVarId(-1 - i) + if isinstance(v, TypeVarType): + tv: TypeVarLikeType = v.copy_modified(id=tid) + elif isinstance(v, TypeVarTupleType): + tv = v.copy_modified(id=tid) + else: + assert isinstance(v, ParamSpecType) + tv = v.copy_modified(id=tid) + tvs.append(tv) + tvmap[v.id] = tv + with state.strict_optional_set(True): + return expand_type(typ, tvmap).copy_modified(variables=tvs) + + def visit_tuple_type(self, typ: TupleType) -> SnapshotItem: + return ("TupleType", snapshot_types(typ.items)) + + def visit_typeddict_type(self, typ: TypedDictType) -> SnapshotItem: + items = tuple((key, snapshot_type(item_type)) for key, item_type in typ.items.items()) + required = tuple(sorted(typ.required_keys)) + readonly = tuple(sorted(typ.readonly_keys)) + return ("TypedDictType", items, required, readonly) + + def visit_literal_type(self, typ: LiteralType) -> SnapshotItem: + return ("LiteralType", snapshot_type(typ.fallback), typ.value) + + def visit_union_type(self, typ: UnionType) -> SnapshotItem: + # Sort and remove duplicates so that we can use equality to test for + # equivalent union type snapshots. + items = {snapshot_type(item) for item in typ.items} + normalized = tuple(sorted(items)) + return ("UnionType", normalized) + + def visit_overloaded(self, typ: Overloaded) -> SnapshotItem: + return ("Overloaded", snapshot_types(typ.items)) + + def visit_partial_type(self, typ: PartialType) -> SnapshotItem: + # A partial type is not fully defined, so the result is indeterminate. We shouldn't + # get here. + raise RuntimeError + + def visit_type_type(self, typ: TypeType) -> SnapshotItem: + return ("TypeType", snapshot_type(typ.item), typ.is_type_form) + + def visit_type_alias_type(self, typ: TypeAliasType) -> SnapshotItem: + assert typ.alias is not None + return ("TypeAliasType", typ.alias.fullname, snapshot_types(typ.args)) + + +def snapshot_untyped_signature(func: OverloadedFuncDef | FuncItem) -> SymbolSnapshot: + """Create a snapshot of the signature of a function that has no explicit signature. + + If the arguments to a function without signature change, it must be + considered as different. We have this special casing since we don't store + the implicit signature anywhere, and we'd rather not construct new + Callable objects in this module (the idea is to only read properties of + the AST here). + """ + if isinstance(func, FuncItem): + return (tuple(func.arg_names), tuple(func.arg_kinds)) + else: + result: list[SymbolSnapshot] = [] + for item in func.items: + if isinstance(item, Decorator): + if item.var.type: + result.append(snapshot_type(item.var.type)) + else: + result.append(("DecoratorWithoutType",)) + else: + result.append(snapshot_untyped_signature(item)) + return tuple(result) diff --git a/.venv/lib/python3.12/site-packages/mypy/server/astmerge.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/server/astmerge.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..b57c8b2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/server/astmerge.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/server/astmerge.py b/.venv/lib/python3.12/site-packages/mypy/server/astmerge.py new file mode 100644 index 0000000..56f2f93 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/server/astmerge.py @@ -0,0 +1,575 @@ +"""Merge a new version of a module AST and symbol table to older versions of those. + +When the source code of a module has a change in fine-grained incremental mode, +we build a new AST from the updated source. However, other parts of the program +may have direct references to parts of the old AST (namely, those nodes exposed +in the module symbol table). The merge operation changes the identities of new +AST nodes that have a correspondence in the old AST to the old ones so that +existing cross-references in other modules will continue to point to the correct +nodes. Also internal cross-references within the new AST are replaced. AST nodes +that aren't externally visible will get new, distinct object identities. This +applies to most expression and statement nodes, for example. + +We perform this merge operation so that we don't have to update all +external references (which would be slow and fragile) or always perform +translation when looking up references (which would be hard to retrofit). + +The AST merge operation is performed after semantic analysis. Semantic +analysis has to deal with potentially multiple aliases to certain AST +nodes (in particular, MypyFile nodes). Type checking assumes that we +don't have multiple variants of a single AST node visible to the type +checker. + +Discussion of some notable special cases: + +* If a node is replaced with a different kind of node (say, a function is + replaced with a class), we don't perform the merge. Fine-grained dependencies + will be used to rebind all references to the node. + +* If a function is replaced with another function with an identical signature, + call sites continue to point to the same object (by identity) and don't need + to be reprocessed. Similarly, if a class is replaced with a class that is + sufficiently similar (MRO preserved, etc.), class references don't need any + processing. A typical incremental update to a file only changes a few + externally visible things in a module, and this means that often only few + external references need any processing, even if the modified module is large. + +* A no-op update of a module should not require any processing outside the + module, since all relevant object identities are preserved. + +* The AST diff operation (mypy.server.astdiff) and the top-level fine-grained + incremental logic (mypy.server.update) handle the cases where the new AST has + differences from the old one that may need to be propagated to elsewhere in the + program. + +See the main entry point merge_asts for more details. +""" + +from __future__ import annotations + +from typing import TypeVar, cast + +from mypy.nodes import ( + MDEF, + SYMBOL_NODE_EXPRESSION_TYPES, + AssertTypeExpr, + AssignmentStmt, + Block, + CallExpr, + CastExpr, + ClassDef, + EnumCallExpr, + FuncBase, + FuncDef, + LambdaExpr, + MemberExpr, + MypyFile, + NamedTupleExpr, + NameExpr, + NewTypeExpr, + OverloadedFuncDef, + RefExpr, + Statement, + SuperExpr, + SymbolNode, + SymbolTable, + TypeAlias, + TypedDictExpr, + TypeFormExpr, + TypeInfo, + Var, +) +from mypy.traverser import TraverserVisitor +from mypy.types import ( + AnyType, + CallableArgument, + CallableType, + DeletedType, + EllipsisType, + ErasedType, + Instance, + LiteralType, + NoneType, + Overloaded, + Parameters, + ParamSpecType, + PartialType, + PlaceholderType, + RawExpressionType, + SyntheticTypeVisitor, + TupleType, + Type, + TypeAliasType, + TypedDictType, + TypeList, + TypeType, + TypeVarTupleType, + TypeVarType, + UnboundType, + UninhabitedType, + UnionType, + UnpackType, +) +from mypy.typestate import type_state +from mypy.util import get_prefix, replace_object_state + + +def merge_asts( + old: MypyFile, old_symbols: SymbolTable, new: MypyFile, new_symbols: SymbolTable +) -> None: + """Merge a new version of a module AST to a previous version. + + The main idea is to preserve the identities of externally visible + nodes in the old AST (that have a corresponding node in the new AST). + All old node state (outside identity) will come from the new AST. + + When this returns, 'old' will refer to the merged AST, but 'new_symbols' + will be the new symbol table. 'new' and 'old_symbols' will no longer be + valid. + """ + assert new.fullname == old.fullname + # Find the mapping from new to old node identities for all nodes + # whose identities should be preserved. + replacement_map = replacement_map_from_symbol_table( + old_symbols, new_symbols, prefix=old.fullname + ) + # Also replace references to the new MypyFile node. + replacement_map[new] = old + # Perform replacements to everywhere within the new AST (not including symbol + # tables). + node = replace_nodes_in_ast(new, replacement_map) + assert node is old + # Also replace AST node references in the *new* symbol table (we'll + # continue to use the new symbol table since it has all the new definitions + # that have no correspondence in the old AST). + replace_nodes_in_symbol_table(new_symbols, replacement_map) + + +def replacement_map_from_symbol_table( + old: SymbolTable, new: SymbolTable, prefix: str +) -> dict[SymbolNode, SymbolNode]: + """Create a new-to-old object identity map by comparing two symbol table revisions. + + Both symbol tables must refer to revisions of the same module id. The symbol tables + are compared recursively (recursing into nested class symbol tables), but only within + the given module prefix. Don't recurse into other modules accessible through the symbol + table. + """ + replacements: dict[SymbolNode, SymbolNode] = {} + for name, node in old.items(): + if name in new and ( + node.kind == MDEF or node.node and get_prefix(node.node.fullname) == prefix + ): + new_node = new[name] + if ( + type(new_node.node) == type(node.node) + and new_node.node + and node.node + and new_node.node.fullname == node.node.fullname + and new_node.kind == node.kind + ): + replacements[new_node.node] = node.node + if isinstance(node.node, TypeInfo) and isinstance(new_node.node, TypeInfo): + type_repl = replacement_map_from_symbol_table( + node.node.names, new_node.node.names, prefix + ) + replacements.update(type_repl) + if node.node.special_alias and new_node.node.special_alias: + replacements[new_node.node.special_alias] = node.node.special_alias + return replacements + + +def replace_nodes_in_ast( + node: SymbolNode, replacements: dict[SymbolNode, SymbolNode] +) -> SymbolNode: + """Replace all references to replacement map keys within an AST node, recursively. + + Also replace the *identity* of any nodes that have replacements. Return the + *replaced* version of the argument node (which may have a different identity, if + it's included in the replacement map). + """ + visitor = NodeReplaceVisitor(replacements) + node.accept(visitor) + return replacements.get(node, node) + + +SN = TypeVar("SN", bound=SymbolNode) + + +class NodeReplaceVisitor(TraverserVisitor): + """Transform some nodes to new identities in an AST. + + Only nodes that live in the symbol table may be + replaced, which simplifies the implementation some. Also + replace all references to the old identities. + """ + + def __init__(self, replacements: dict[SymbolNode, SymbolNode]) -> None: + self.replacements = replacements + + def visit_mypy_file(self, node: MypyFile) -> None: + node = self.fixup(node) + node.defs = self.replace_statements(node.defs) + super().visit_mypy_file(node) + + def visit_block(self, node: Block) -> None: + node.body = self.replace_statements(node.body) + super().visit_block(node) + + def visit_func_def(self, node: FuncDef) -> None: + node = self.fixup(node) + self.process_base_func(node) + super().visit_func_def(node) + + def visit_overloaded_func_def(self, node: OverloadedFuncDef) -> None: + self.process_base_func(node) + super().visit_overloaded_func_def(node) + + def visit_class_def(self, node: ClassDef) -> None: + # TODO additional things? + node.info = self.fixup_and_reset_typeinfo(node.info) + node.defs.body = self.replace_statements(node.defs.body) + info = node.info + for tv in node.type_vars: + if isinstance(tv, TypeVarType): + self.process_type_var_def(tv) + if info: + if info.is_named_tuple: + self.process_synthetic_type_info(info) + else: + self.process_type_info(info) + super().visit_class_def(node) + + def process_base_func(self, node: FuncBase) -> None: + self.fixup_type(node.type) + node.info = self.fixup(node.info) + if node.unanalyzed_type: + # Unanalyzed types can have AST node references + self.fixup_type(node.unanalyzed_type) + + def process_type_var_def(self, tv: TypeVarType) -> None: + for value in tv.values: + self.fixup_type(value) + self.fixup_type(tv.upper_bound) + self.fixup_type(tv.default) + + def process_param_spec_def(self, tv: ParamSpecType) -> None: + self.fixup_type(tv.upper_bound) + self.fixup_type(tv.default) + + def process_type_var_tuple_def(self, tv: TypeVarTupleType) -> None: + self.fixup_type(tv.upper_bound) + self.fixup_type(tv.default) + + def visit_assignment_stmt(self, node: AssignmentStmt) -> None: + self.fixup_type(node.type) + super().visit_assignment_stmt(node) + + # Expressions + + def visit_name_expr(self, node: NameExpr) -> None: + self.visit_ref_expr(node) + + def visit_member_expr(self, node: MemberExpr) -> None: + if node.def_var: + node.def_var = self.fixup(node.def_var) + self.visit_ref_expr(node) + super().visit_member_expr(node) + + def visit_ref_expr(self, node: RefExpr) -> None: + if node.node is not None: + node.node = self.fixup(node.node) + if isinstance(node.node, Var): + # The Var node may be an orphan and won't otherwise be processed. + node.node.accept(self) + + def visit_namedtuple_expr(self, node: NamedTupleExpr) -> None: + super().visit_namedtuple_expr(node) + node.info = self.fixup_and_reset_typeinfo(node.info) + self.process_synthetic_type_info(node.info) + + def visit_cast_expr(self, node: CastExpr) -> None: + super().visit_cast_expr(node) + self.fixup_type(node.type) + + def visit_type_form_expr(self, node: TypeFormExpr) -> None: + super().visit_type_form_expr(node) + self.fixup_type(node.type) + + def visit_assert_type_expr(self, node: AssertTypeExpr) -> None: + super().visit_assert_type_expr(node) + self.fixup_type(node.type) + + def visit_super_expr(self, node: SuperExpr) -> None: + super().visit_super_expr(node) + if node.info is not None: + node.info = self.fixup(node.info) + + def visit_call_expr(self, node: CallExpr) -> None: + super().visit_call_expr(node) + if isinstance(node.analyzed, SYMBOL_NODE_EXPRESSION_TYPES): + node.analyzed = self.fixup(node.analyzed) + + def visit_newtype_expr(self, node: NewTypeExpr) -> None: + if node.info: + node.info = self.fixup_and_reset_typeinfo(node.info) + self.process_synthetic_type_info(node.info) + self.fixup_type(node.old_type) + super().visit_newtype_expr(node) + + def visit_lambda_expr(self, node: LambdaExpr) -> None: + node.info = self.fixup(node.info) + super().visit_lambda_expr(node) + + def visit_typeddict_expr(self, node: TypedDictExpr) -> None: + super().visit_typeddict_expr(node) + node.info = self.fixup_and_reset_typeinfo(node.info) + self.process_synthetic_type_info(node.info) + + def visit_enum_call_expr(self, node: EnumCallExpr) -> None: + node.info = self.fixup_and_reset_typeinfo(node.info) + self.process_synthetic_type_info(node.info) + super().visit_enum_call_expr(node) + + # Others + + def visit_var(self, node: Var) -> None: + node.info = self.fixup(node.info) + self.fixup_type(node.type) + self.fixup_type(node.setter_type) + super().visit_var(node) + + def visit_type_alias(self, node: TypeAlias) -> None: + self.fixup_type(node.target) + for v in node.alias_tvars: + self.fixup_type(v) + super().visit_type_alias(node) + + # Helpers + + def fixup(self, node: SN) -> SN: + if node in self.replacements: + new = self.replacements[node] + if isinstance(node, TypeInfo) and isinstance(new, TypeInfo): + # Special case: special_alias is not exposed in symbol tables, but may appear + # in external types (e.g. named tuples), so we need to update it manually. + replace_object_state(new.special_alias, node.special_alias) + replace_object_state(new, node, skip_slots=_get_ignored_slots(new)) + return cast(SN, new) + return node + + def fixup_and_reset_typeinfo(self, node: TypeInfo) -> TypeInfo: + """Fix-up type info and reset subtype caches. + + This needs to be called at least once per each merged TypeInfo, as otherwise we + may leak stale caches. + """ + if node in self.replacements: + # The subclass relationships may change, so reset all caches relevant to the + # old MRO. + new = self.replacements[node] + assert isinstance(new, TypeInfo) + type_state.reset_all_subtype_caches_for(new) + return self.fixup(node) + + def fixup_type(self, typ: Type | None) -> None: + if typ is not None: + typ.accept(TypeReplaceVisitor(self.replacements)) + + def process_type_info(self, info: TypeInfo | None) -> None: + if info is None: + return + self.fixup_type(info.declared_metaclass) + self.fixup_type(info.metaclass_type) + for target in info._promote: + self.fixup_type(target) + self.fixup_type(info.tuple_type) + self.fixup_type(info.typeddict_type) + if info.special_alias: + self.fixup_type(info.special_alias.target) + info.defn.info = self.fixup(info) + replace_nodes_in_symbol_table(info.names, self.replacements) + for i, item in enumerate(info.mro): + info.mro[i] = self.fixup(info.mro[i]) + for i, base in enumerate(info.bases): + self.fixup_type(info.bases[i]) + + def process_synthetic_type_info(self, info: TypeInfo) -> None: + # Synthetic types (types not created using a class statement) don't + # have bodies in the AST so we need to iterate over their symbol + # tables separately, unlike normal classes. + self.process_type_info(info) + for node in info.names.values(): + if node.node: + node.node.accept(self) + + def replace_statements(self, nodes: list[Statement]) -> list[Statement]: + result = [] + for node in nodes: + if isinstance(node, SymbolNode): + node = self.fixup(node) + result.append(node) + return result + + +class TypeReplaceVisitor(SyntheticTypeVisitor[None]): + """Similar to NodeReplaceVisitor, but for type objects. + + Note: this visitor may sometimes visit unanalyzed types + such as 'UnboundType' and 'RawExpressionType' For example, see + NodeReplaceVisitor.process_base_func. + """ + + def __init__(self, replacements: dict[SymbolNode, SymbolNode]) -> None: + self.replacements = replacements + + def visit_instance(self, typ: Instance) -> None: + typ.type = self.fixup(typ.type) + for arg in typ.args: + arg.accept(self) + if typ.last_known_value: + typ.last_known_value.accept(self) + + def visit_type_alias_type(self, typ: TypeAliasType) -> None: + assert typ.alias is not None + typ.alias = self.fixup(typ.alias) + for arg in typ.args: + arg.accept(self) + + def visit_any(self, typ: AnyType) -> None: + pass + + def visit_none_type(self, typ: NoneType) -> None: + pass + + def visit_callable_type(self, typ: CallableType) -> None: + for arg in typ.arg_types: + arg.accept(self) + typ.ret_type.accept(self) + if typ.definition: + # No need to fixup since this is just a cross-reference. + typ.definition = self.replacements.get(typ.definition, typ.definition) + # Fallback can be None for callable types that haven't been semantically analyzed. + if typ.fallback is not None: + typ.fallback.accept(self) + for tv in typ.variables: + if isinstance(tv, TypeVarType): + tv.upper_bound.accept(self) + for value in tv.values: + value.accept(self) + + def visit_overloaded(self, t: Overloaded) -> None: + for item in t.items: + item.accept(self) + # Fallback can be None for overloaded types that haven't been semantically analyzed. + if t.fallback is not None: + t.fallback.accept(self) + + def visit_erased_type(self, t: ErasedType) -> None: + # This type should exist only temporarily during type inference + raise RuntimeError("Cannot handle erased type") + + def visit_deleted_type(self, typ: DeletedType) -> None: + pass + + def visit_partial_type(self, typ: PartialType) -> None: + raise RuntimeError("Cannot handle partial type") + + def visit_tuple_type(self, typ: TupleType) -> None: + for item in typ.items: + item.accept(self) + # Fallback can be None for implicit tuple types that haven't been semantically analyzed. + if typ.partial_fallback is not None: + typ.partial_fallback.accept(self) + + def visit_type_type(self, typ: TypeType) -> None: + typ.item.accept(self) + + def visit_type_var(self, typ: TypeVarType) -> None: + typ.upper_bound.accept(self) + typ.default.accept(self) + for value in typ.values: + value.accept(self) + + def visit_param_spec(self, typ: ParamSpecType) -> None: + typ.upper_bound.accept(self) + typ.default.accept(self) + typ.prefix.accept(self) + + def visit_type_var_tuple(self, typ: TypeVarTupleType) -> None: + typ.upper_bound.accept(self) + typ.default.accept(self) + + def visit_unpack_type(self, typ: UnpackType) -> None: + typ.type.accept(self) + + def visit_parameters(self, typ: Parameters) -> None: + for arg in typ.arg_types: + arg.accept(self) + + def visit_typeddict_type(self, typ: TypedDictType) -> None: + for value_type in typ.items.values(): + value_type.accept(self) + typ.fallback.accept(self) + + def visit_raw_expression_type(self, t: RawExpressionType) -> None: + pass + + def visit_literal_type(self, typ: LiteralType) -> None: + typ.fallback.accept(self) + + def visit_unbound_type(self, typ: UnboundType) -> None: + for arg in typ.args: + arg.accept(self) + + def visit_type_list(self, typ: TypeList) -> None: + for item in typ.items: + item.accept(self) + + def visit_callable_argument(self, typ: CallableArgument) -> None: + typ.typ.accept(self) + + def visit_ellipsis_type(self, typ: EllipsisType) -> None: + pass + + def visit_uninhabited_type(self, typ: UninhabitedType) -> None: + pass + + def visit_union_type(self, typ: UnionType) -> None: + for item in typ.items: + item.accept(self) + + def visit_placeholder_type(self, t: PlaceholderType) -> None: + for item in t.args: + item.accept(self) + + # Helpers + + def fixup(self, node: SN) -> SN: + if node in self.replacements: + new = self.replacements[node] + return cast(SN, new) + return node + + +def replace_nodes_in_symbol_table( + symbols: SymbolTable, replacements: dict[SymbolNode, SymbolNode] +) -> None: + for node in symbols.values(): + if node.node: + if node.node in replacements: + new = replacements[node.node] + old = node.node + replace_object_state(new, old, skip_slots=_get_ignored_slots(new)) + node.node = new + if isinstance(node.node, (Var, TypeAlias)): + # Handle them here just in case these aren't exposed through the AST. + node.node.accept(NodeReplaceVisitor(replacements)) + + +def _get_ignored_slots(node: SymbolNode) -> tuple[str, ...]: + if isinstance(node, OverloadedFuncDef): + return ("setter",) + if isinstance(node, TypeInfo): + return ("special_alias",) + return () diff --git a/.venv/lib/python3.12/site-packages/mypy/server/aststrip.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/server/aststrip.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..1bb98a8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/server/aststrip.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/server/aststrip.py b/.venv/lib/python3.12/site-packages/mypy/server/aststrip.py new file mode 100644 index 0000000..27c1c4a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/server/aststrip.py @@ -0,0 +1,281 @@ +"""Strip/reset AST in-place to match state after semantic analyzer pre-analysis. + +Fine-grained incremental mode reruns semantic analysis main pass +and type checking for *existing* AST nodes (targets) when changes are +propagated using fine-grained dependencies. AST nodes attributes are +sometimes changed during semantic analysis main pass, and running +semantic analysis again on those nodes would produce incorrect +results, since this pass isn't idempotent. This pass resets AST +nodes to reflect the state after semantic pre-analysis, so that we +can rerun semantic analysis. +(The above is in contrast to behavior with modules that have source code +changes, for which we re-parse the entire module and reconstruct a fresh +AST. No stripping is required in this case. Both modes of operation should +have the same outcome.) +Notes: +* This is currently pretty fragile, as we must carefully undo whatever + changes can be made in semantic analysis main pass, including changes + to symbol tables. +* We reuse existing AST nodes because it makes it relatively straightforward + to reprocess only a single target within a module efficiently. If there + was a way to parse a single target within a file, in time proportional to + the size of the target, we'd rather create fresh AST nodes than strip them. + (This is possible only in Python 3.8+) +* Currently we don't actually reset all changes, but only those known to affect + non-idempotent semantic analysis behavior. + TODO: It would be more principled and less fragile to reset everything + changed in semantic analysis main pass and later. +* Reprocessing may recreate AST nodes (such as Var nodes, and TypeInfo nodes + created with assignment statements) that will get different identities from + the original AST. Thus running an AST merge is necessary after stripping, + even though some identities are preserved. +""" + +from __future__ import annotations + +from collections.abc import Iterator +from contextlib import contextmanager, nullcontext +from typing_extensions import TypeAlias as _TypeAlias + +from mypy.nodes import ( + CLASSDEF_NO_INFO, + AssignmentStmt, + Block, + CallExpr, + ClassDef, + Decorator, + ForStmt, + FuncDef, + ImportAll, + ImportFrom, + IndexExpr, + ListExpr, + MemberExpr, + MypyFile, + NameExpr, + Node, + OpExpr, + OverloadedFuncDef, + RefExpr, + StarExpr, + SuperExpr, + SymbolTableNode, + TupleExpr, + TypeInfo, + Var, +) +from mypy.traverser import TraverserVisitor +from mypy.types import CallableType +from mypy.typestate import type_state + +SavedAttributes: _TypeAlias = dict[tuple[ClassDef, str], SymbolTableNode] + + +def strip_target( + node: MypyFile | FuncDef | OverloadedFuncDef, saved_attrs: SavedAttributes +) -> None: + """Reset a fine-grained incremental target to state before semantic analysis. + + All TypeInfos are killed. Therefore we need to preserve the variables + defined as attributes on self. This is done by patches (callbacks) + returned from this function that re-add these variables when called. + + Args: + node: node to strip + saved_attrs: collect attributes here that may need to be re-added to + classes afterwards if stripping a class body (this dict is mutated) + """ + visitor = NodeStripVisitor(saved_attrs) + if isinstance(node, MypyFile): + visitor.strip_file_top_level(node) + else: + node.accept(visitor) + + +class NodeStripVisitor(TraverserVisitor): + def __init__(self, saved_class_attrs: SavedAttributes) -> None: + # The current active class. + self.type: TypeInfo | None = None + # This is True at class scope, but not in methods. + self.is_class_body = False + # By default, process function definitions. If False, don't -- this is used for + # processing module top levels. + self.recurse_into_functions = True + # These attributes were removed from top-level classes during strip and + # will be added afterwards (if no existing definition is found). These + # must be added back before semantically analyzing any methods. + self.saved_class_attrs = saved_class_attrs + + def strip_file_top_level(self, file_node: MypyFile) -> None: + """Strip a module top-level (don't recursive into functions).""" + self.recurse_into_functions = False + file_node.plugin_deps.clear() + file_node.accept(self) + for name in file_node.names.copy(): + # TODO: this is a hot fix, we should delete all names, + # see https://github.com/python/mypy/issues/6422. + if "@" not in name: + del file_node.names[name] + + def visit_block(self, b: Block) -> None: + if b.is_unreachable: + return + super().visit_block(b) + + def visit_class_def(self, node: ClassDef) -> None: + """Strip class body and type info, but don't strip methods.""" + # We need to save the implicitly defined instance variables, + # i.e. those defined as attributes on self. Otherwise, they would + # be lost if we only reprocess top-levels (this kills TypeInfos) + # but not the methods that defined those variables. + if not self.recurse_into_functions: + self.save_implicit_attributes(node) + # We need to delete any entries that were generated by plugins, + # since they will get regenerated. + to_delete = {v.node for v in node.info.names.values() if v.plugin_generated} + node.type_vars = [] + node.base_type_exprs.extend(node.removed_base_type_exprs) + node.removed_base_type_exprs = [] + node.defs.body = [ + s for s in node.defs.body if s not in to_delete # type: ignore[comparison-overlap] + ] + with self.enter_class(node.info): + super().visit_class_def(node) + node.defs.body.extend(node.removed_statements) + node.removed_statements = [] + type_state.reset_subtype_caches_for(node.info) + # Kill the TypeInfo, since there is none before semantic analysis. + node.info = CLASSDEF_NO_INFO + node.analyzed = None + + def save_implicit_attributes(self, node: ClassDef) -> None: + """Produce callbacks that re-add attributes defined on self.""" + for name, sym in node.info.names.items(): + if isinstance(sym.node, Var) and sym.implicit: + self.saved_class_attrs[node, name] = sym + + def visit_func_def(self, node: FuncDef) -> None: + if not self.recurse_into_functions: + return + node.expanded = [] + node.type = node.unanalyzed_type + if node.type: + # Type variable binder binds type variables before the type is analyzed, + # this causes unanalyzed_type to be modified in place. We needed to revert this + # in order to get the state exactly as it was before semantic analysis. + # See also #4814. + assert isinstance(node.type, CallableType) + node.type.variables = () + with self.enter_method(node.info) if node.info else nullcontext(): + super().visit_func_def(node) + + def visit_decorator(self, node: Decorator) -> None: + node.var.type = None + for expr in node.decorators: + expr.accept(self) + if self.recurse_into_functions: + node.func.accept(self) + else: + # Only touch the final status if we re-process + # the top level, since decorators are processed there. + node.var.is_final = False + node.func.is_final = False + + def visit_overloaded_func_def(self, node: OverloadedFuncDef) -> None: + if not self.recurse_into_functions: + return + # Revert change made during semantic analysis main pass. + node.items = node.unanalyzed_items.copy() + node.impl = None + node.is_final = False + super().visit_overloaded_func_def(node) + + def visit_assignment_stmt(self, node: AssignmentStmt) -> None: + node.type = node.unanalyzed_type + node.is_final_def = False + node.is_alias_def = False + if self.type and not self.is_class_body: + for lvalue in node.lvalues: + # Revert assignments made via self attributes. + self.process_lvalue_in_method(lvalue) + super().visit_assignment_stmt(node) + + def visit_import_from(self, node: ImportFrom) -> None: + node.assignments = [] + + def visit_import_all(self, node: ImportAll) -> None: + node.assignments = [] + + def visit_for_stmt(self, node: ForStmt) -> None: + node.index_type = node.unanalyzed_index_type + node.inferred_item_type = None + node.inferred_iterator_type = None + super().visit_for_stmt(node) + + def visit_name_expr(self, node: NameExpr) -> None: + self.strip_ref_expr(node) + + def visit_member_expr(self, node: MemberExpr) -> None: + self.strip_ref_expr(node) + super().visit_member_expr(node) + + def visit_index_expr(self, node: IndexExpr) -> None: + node.analyzed = None # May have been an alias or type application. + super().visit_index_expr(node) + + def visit_op_expr(self, node: OpExpr) -> None: + node.analyzed = None # May have been an alias + super().visit_op_expr(node) + + def strip_ref_expr(self, node: RefExpr) -> None: + node.kind = None + node.node = None + node.fullname = "" + node.is_new_def = False + node.is_inferred_def = False + + def visit_call_expr(self, node: CallExpr) -> None: + node.analyzed = None + super().visit_call_expr(node) + + def visit_super_expr(self, node: SuperExpr) -> None: + node.info = None + super().visit_super_expr(node) + + def process_lvalue_in_method(self, lvalue: Node) -> None: + if isinstance(lvalue, MemberExpr): + if lvalue.is_new_def: + # Remove defined attribute from the class symbol table. If is_new_def is + # true for a MemberExpr, we know that it must be an assignment through + # self, since only those can define new attributes. + assert self.type is not None + if lvalue.name in self.type.names: + del self.type.names[lvalue.name] + key = (self.type.defn, lvalue.name) + if key in self.saved_class_attrs: + del self.saved_class_attrs[key] + elif isinstance(lvalue, (TupleExpr, ListExpr)): + for item in lvalue.items: + self.process_lvalue_in_method(item) + elif isinstance(lvalue, StarExpr): + self.process_lvalue_in_method(lvalue.expr) + + @contextmanager + def enter_class(self, info: TypeInfo) -> Iterator[None]: + old_type = self.type + old_is_class_body = self.is_class_body + self.type = info + self.is_class_body = True + yield + self.type = old_type + self.is_class_body = old_is_class_body + + @contextmanager + def enter_method(self, info: TypeInfo) -> Iterator[None]: + old_type = self.type + old_is_class_body = self.is_class_body + self.type = info + self.is_class_body = False + yield + self.type = old_type + self.is_class_body = old_is_class_body diff --git a/.venv/lib/python3.12/site-packages/mypy/server/deps.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/server/deps.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..d6f55fe Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/server/deps.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/server/deps.py b/.venv/lib/python3.12/site-packages/mypy/server/deps.py new file mode 100644 index 0000000..ba62232 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/server/deps.py @@ -0,0 +1,1136 @@ +"""Generate fine-grained dependencies for AST nodes, for use in the daemon mode. + +Dependencies are stored in a map from *triggers* to *sets of affected locations*. + +A trigger is a string that represents a program property that has changed, such +as the signature of a specific function. Triggers are written as '<...>' (angle +brackets). When a program property changes, we determine the relevant trigger(s) +and all affected locations. The latter are stale and will have to be reprocessed. + +An affected location is a string than can refer to a *target* (a non-nested +function or method, or a module top level), a class, or a trigger (for +recursively triggering other triggers). + +Here's an example representation of a simple dependency map (in format +" -> locations"): + + -> m.f + -> , m.A, m.f + +Assuming 'A' is a class, this means that + +1) if a property of 'm.A.g', such as the signature, is changed, we need + to process target (function) 'm.f' + +2) if the MRO or other significant property of class 'm.A' changes, we + need to process target 'm.f', the entire class 'm.A', and locations + triggered by trigger '' (this explanation is a bit simplified; + see below for more details). + +The triggers to fire are determined using mypy.server.astdiff. + +Examples of triggers: + +* '' represents a module attribute/function/class. If any externally + visible property of 'x' changes, this gets fired. For changes within + classes, only "big" changes cause the class to be triggered (such as a + change in MRO). Smaller changes, such as changes to some attributes, don't + trigger the entire class. +* '' represents the type and kind of attribute/method 'x' of + class 'mod.Cls'. This can also refer to an attribute inherited from a + base class (relevant if it's accessed through a value of type 'Cls' + instead of the base class type). +* '' represents the existence of module 'package.mod'. This + gets triggered if 'package.mod' is created or deleted, or if it gets + changed into something other than a module. + +Examples of locations: + +* 'mod' is the top level of module 'mod' (doesn't include any function bodies, + but includes class bodies not nested within a function). +* 'mod.f' is function 'f' in module 'mod' (module-level variables aren't separate + locations but are included in the module top level). Functions also include + any nested functions and classes -- such nested definitions aren't separate + locations, for simplicity of implementation. +* 'mod.Cls.f' is method 'f' of 'mod.Cls'. Non-method attributes aren't locations. +* 'mod.Cls' represents each method in class 'mod.Cls' + the top-level of the + module 'mod'. (To simplify the implementation, there is no location that only + includes the body of a class without the entire surrounding module top level.) +* Trigger '<...>' as a location is an indirect way of referring to all + locations triggered by the trigger. These indirect locations keep the + dependency map smaller and easier to manage. + +Triggers can be triggered by program changes such as these: + +* Addition or deletion of an attribute (or module). +* Change of the kind of thing a name represents (such as a change from a function + to a class). +* Change of the static type of a name. + +Changes in the body of a function that aren't reflected in the signature don't +cause the function to be triggered. More generally, we trigger only on changes +that may affect type checking results outside the module that contains the +change. + +We don't generate dependencies from builtins and certain other stdlib modules, +since these change very rarely, and they would just increase the size of the +dependency map significantly without significant benefit. + +Test cases for this module live in 'test-data/unit/deps*.test'. +""" + +from __future__ import annotations + +from collections import defaultdict + +from mypy.nodes import ( + GDEF, + LDEF, + MDEF, + SYMBOL_FUNCBASE_TYPES, + AssertTypeExpr, + AssignmentStmt, + AwaitExpr, + Block, + CallExpr, + CastExpr, + ClassDef, + ComparisonExpr, + Decorator, + DelStmt, + DictionaryComprehension, + EnumCallExpr, + Expression, + ForStmt, + FuncBase, + FuncDef, + GeneratorExpr, + Import, + ImportAll, + ImportFrom, + IndexExpr, + MemberExpr, + MypyFile, + NamedTupleExpr, + NameExpr, + NewTypeExpr, + Node, + OperatorAssignmentStmt, + OpExpr, + OverloadedFuncDef, + RefExpr, + StarExpr, + SuperExpr, + TupleExpr, + TypeAliasExpr, + TypeApplication, + TypedDictExpr, + TypeFormExpr, + TypeInfo, + TypeVarExpr, + UnaryExpr, + Var, + WithStmt, + YieldFromExpr, +) +from mypy.operators import ( + op_methods, + ops_with_inplace_method, + reverse_op_methods, + unary_op_methods, +) +from mypy.options import Options +from mypy.scope import Scope +from mypy.server.trigger import make_trigger, make_wildcard_trigger +from mypy.traverser import TraverserVisitor +from mypy.typeops import bind_self +from mypy.types import ( + AnyType, + CallableType, + DeletedType, + ErasedType, + FunctionLike, + Instance, + LiteralType, + NoneType, + Overloaded, + Parameters, + ParamSpecType, + PartialType, + ProperType, + TupleType, + Type, + TypeAliasType, + TypedDictType, + TypeOfAny, + TypeType, + TypeVarTupleType, + TypeVarType, + TypeVisitor, + UnboundType, + UninhabitedType, + UnionType, + UnpackType, + get_proper_type, +) +from mypy.typestate import type_state +from mypy.util import correct_relative_import + + +def get_dependencies( + target: MypyFile, + type_map: dict[Expression, Type], + python_version: tuple[int, int], + options: Options, +) -> dict[str, set[str]]: + """Get all dependencies of a node, recursively.""" + visitor = DependencyVisitor(type_map, python_version, target.alias_deps, options) + target.accept(visitor) + return visitor.map + + +def get_dependencies_of_target( + module_id: str, + module_tree: MypyFile, + target: Node, + type_map: dict[Expression, Type], + python_version: tuple[int, int], +) -> dict[str, set[str]]: + """Get dependencies of a target -- don't recursive into nested targets.""" + # TODO: Add tests for this function. + visitor = DependencyVisitor(type_map, python_version, module_tree.alias_deps) + with visitor.scope.module_scope(module_id): + if isinstance(target, MypyFile): + # Only get dependencies of the top-level of the module. Don't recurse into + # functions. + for defn in target.defs: + # TODO: Recurse into top-level statements and class bodies but skip functions. + if not isinstance(defn, (ClassDef, Decorator, FuncDef, OverloadedFuncDef)): + defn.accept(visitor) + elif isinstance(target, FuncBase) and target.info: + # It's a method. + # TODO: Methods in nested classes. + with visitor.scope.class_scope(target.info): + target.accept(visitor) + else: + target.accept(visitor) + return visitor.map + + +class DependencyVisitor(TraverserVisitor): + def __init__( + self, + type_map: dict[Expression, Type], + python_version: tuple[int, int], + alias_deps: defaultdict[str, set[str]], + options: Options | None = None, + ) -> None: + self.scope = Scope() + self.type_map = type_map + # This attribute holds a mapping from target to names of type aliases + # it depends on. These need to be processed specially, since they may + # appear in expanded form in symbol tables, because of a get_proper_type() + # somewhere. For example, after: + # A = int + # x: A + # the module symbol table will just have a Var `x` with type `int`, + # and the dependency of `x` on `A` is lost. Therefore, the alias dependencies + # are preserved at alias expansion points in `semanal.py`, stored as an attribute + # on MypyFile, and then passed here. + # TODO: fine-grained is more susceptible to this partially because we are reckless + # about get_proper_type() in *this specific file*. + self.alias_deps = alias_deps + self.map: dict[str, set[str]] = {} + self.is_class = False + self.is_package_init_file = False + self.options = options + + def visit_mypy_file(self, o: MypyFile) -> None: + with self.scope.module_scope(o.fullname): + self.is_package_init_file = o.is_package_init_file() + self.add_type_alias_deps(self.scope.current_target()) + for trigger, targets in o.plugin_deps.items(): + self.map.setdefault(trigger, set()).update(targets) + super().visit_mypy_file(o) + + def visit_func_def(self, o: FuncDef) -> None: + with self.scope.function_scope(o): + target = self.scope.current_target() + if o.type: + if self.is_class and isinstance(o.type, FunctionLike): + signature: Type = bind_self(o.type) + else: + signature = o.type + for trigger in self.get_type_triggers(signature): + self.add_dependency(trigger) + self.add_dependency(trigger, target=make_trigger(target)) + if o.info: + for base in non_trivial_bases(o.info): + # Base class __init__/__new__ doesn't generate a logical + # dependency since the override can be incompatible. + if not self.use_logical_deps() or o.name not in ("__init__", "__new__"): + self.add_dependency(make_trigger(base.fullname + "." + o.name)) + self.add_type_alias_deps(self.scope.current_target()) + super().visit_func_def(o) + variants = set(o.expanded) - {o} + for ex in variants: + if isinstance(ex, FuncDef): + super().visit_func_def(ex) + + def visit_decorator(self, o: Decorator) -> None: + if not self.use_logical_deps(): + # We don't need to recheck outer scope for an overload, only overload itself. + # Also if any decorator is nested, it is not externally visible, so we don't need to + # generate dependency. + if not o.func.is_overload and self.scope.current_function_name() is None: + self.add_dependency(make_trigger(o.func.fullname)) + else: + # Add logical dependencies from decorators to the function. For example, + # if we have + # @dec + # def func(): ... + # then if `dec` is unannotated, then it will "spoil" `func` and consequently + # all call sites, making them all `Any`. + for d in o.decorators: + tname: str | None = None + if isinstance(d, RefExpr) and d.fullname: + tname = d.fullname + if isinstance(d, CallExpr) and isinstance(d.callee, RefExpr) and d.callee.fullname: + tname = d.callee.fullname + if tname is not None: + self.add_dependency(make_trigger(tname), make_trigger(o.func.fullname)) + super().visit_decorator(o) + + def visit_class_def(self, o: ClassDef) -> None: + with self.scope.class_scope(o.info): + target = self.scope.current_full_target() + self.add_dependency(make_trigger(target), target) + old_is_class = self.is_class + self.is_class = True + # Add dependencies to type variables of a generic class. + for tv in o.type_vars: + self.add_dependency(make_trigger(tv.fullname), target) + self.process_type_info(o.info) + super().visit_class_def(o) + self.is_class = old_is_class + + def visit_newtype_expr(self, o: NewTypeExpr) -> None: + if o.info: + with self.scope.class_scope(o.info): + self.process_type_info(o.info) + + def process_type_info(self, info: TypeInfo) -> None: + target = self.scope.current_full_target() + for base in info.bases: + self.add_type_dependencies(base, target=target) + if info.tuple_type: + self.add_type_dependencies(info.tuple_type, target=make_trigger(target)) + if info.typeddict_type: + self.add_type_dependencies(info.typeddict_type, target=make_trigger(target)) + if info.declared_metaclass: + self.add_type_dependencies(info.declared_metaclass, target=make_trigger(target)) + if info.is_protocol: + for base_info in info.mro[:-1]: + # We add dependencies from whole MRO to cover explicit subprotocols. + # For example: + # + # class Super(Protocol): + # x: int + # class Sub(Super, Protocol): + # y: int + # + # In this example we add -> , to invalidate Sub if + # a new member is added to Super. + self.add_dependency( + make_wildcard_trigger(base_info.fullname), target=make_trigger(target) + ) + # More protocol dependencies are collected in type_state._snapshot_protocol_deps + # after a full run or update is finished. + + self.add_type_alias_deps(self.scope.current_target()) + for name, node in info.names.items(): + if isinstance(node.node, Var): + # Recheck Liskov if needed, self definitions are checked in the defining method + if node.node.is_initialized_in_class and has_user_bases(info): + self.add_dependency(make_trigger(info.fullname + "." + name)) + for base_info in non_trivial_bases(info): + # If the type of an attribute changes in a base class, we make references + # to the attribute in the subclass stale. + self.add_dependency( + make_trigger(base_info.fullname + "." + name), + target=make_trigger(info.fullname + "." + name), + ) + for base_info in non_trivial_bases(info): + for name, node in base_info.names.items(): + if self.use_logical_deps(): + # Skip logical dependency if an attribute is not overridden. For example, + # in case of: + # class Base: + # x = 1 + # y = 2 + # class Sub(Base): + # x = 3 + # we skip -> , because even if `y` is unannotated it + # doesn't affect precision of Liskov checking. + if name not in info.names: + continue + # __init__ and __new__ can be overridden with different signatures, so no + # logical dependency. + if name in ("__init__", "__new__"): + continue + self.add_dependency( + make_trigger(base_info.fullname + "." + name), + target=make_trigger(info.fullname + "." + name), + ) + if not self.use_logical_deps(): + # These dependencies are only useful for propagating changes -- + # they aren't logical dependencies since __init__ and __new__ can be + # overridden with a different signature. + self.add_dependency( + make_trigger(base_info.fullname + ".__init__"), + target=make_trigger(info.fullname + ".__init__"), + ) + self.add_dependency( + make_trigger(base_info.fullname + ".__new__"), + target=make_trigger(info.fullname + ".__new__"), + ) + # If the set of abstract attributes change, this may invalidate class + # instantiation, or change the generated error message, since Python checks + # class abstract status when creating an instance. + self.add_dependency( + make_trigger(base_info.fullname + ".(abstract)"), + target=make_trigger(info.fullname + ".__init__"), + ) + # If the base class abstract attributes change, subclass abstract + # attributes need to be recalculated. + self.add_dependency(make_trigger(base_info.fullname + ".(abstract)")) + + def visit_import(self, o: Import) -> None: + for id, as_id in o.ids: + self.add_dependency(make_trigger(id), self.scope.current_target()) + + def visit_import_from(self, o: ImportFrom) -> None: + if self.use_logical_deps(): + # Just importing a name doesn't create a logical dependency. + return + module_id, _ = correct_relative_import( + self.scope.current_module_id(), o.relative, o.id, self.is_package_init_file + ) + self.add_dependency(make_trigger(module_id)) # needed if module is added/removed + for name, as_name in o.names: + self.add_dependency(make_trigger(module_id + "." + name)) + + def visit_import_all(self, o: ImportAll) -> None: + module_id, _ = correct_relative_import( + self.scope.current_module_id(), o.relative, o.id, self.is_package_init_file + ) + # The current target needs to be rechecked if anything "significant" changes in the + # target module namespace (as the imported definitions will need to be updated). + self.add_dependency(make_wildcard_trigger(module_id)) + + def visit_block(self, o: Block) -> None: + if not o.is_unreachable: + super().visit_block(o) + + def visit_assignment_stmt(self, o: AssignmentStmt) -> None: + rvalue = o.rvalue + if isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, TypeVarExpr): + analyzed = rvalue.analyzed + self.add_type_dependencies( + analyzed.upper_bound, target=make_trigger(analyzed.fullname) + ) + for val in analyzed.values: + self.add_type_dependencies(val, target=make_trigger(analyzed.fullname)) + # We need to re-analyze the definition if bound or value is deleted. + super().visit_call_expr(rvalue) + elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, NamedTupleExpr): + # Depend on types of named tuple items. + info = rvalue.analyzed.info + prefix = f"{self.scope.current_full_target()}.{info.name}" + for name, symnode in info.names.items(): + if not name.startswith("_") and isinstance(symnode.node, Var): + typ = symnode.node.type + if typ: + self.add_type_dependencies(typ) + self.add_type_dependencies(typ, target=make_trigger(prefix)) + attr_target = make_trigger(f"{prefix}.{name}") + self.add_type_dependencies(typ, target=attr_target) + elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, TypedDictExpr): + # Depend on the underlying typeddict type + info = rvalue.analyzed.info + assert info.typeddict_type is not None + prefix = f"{self.scope.current_full_target()}.{info.name}" + self.add_type_dependencies(info.typeddict_type, target=make_trigger(prefix)) + elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, EnumCallExpr): + # Enum values are currently not checked, but for future we add the deps on them + for name, symnode in rvalue.analyzed.info.names.items(): + if isinstance(symnode.node, Var) and symnode.node.type: + self.add_type_dependencies(symnode.node.type) + elif o.is_alias_def: + assert len(o.lvalues) == 1 + lvalue = o.lvalues[0] + assert isinstance(lvalue, NameExpr) + typ = get_proper_type(self.type_map.get(lvalue)) + if isinstance(typ, FunctionLike) and typ.is_type_obj(): + class_name = typ.type_object().fullname + self.add_dependency(make_trigger(class_name + ".__init__")) + self.add_dependency(make_trigger(class_name + ".__new__")) + if isinstance(rvalue, IndexExpr) and isinstance(rvalue.analyzed, TypeAliasExpr): + self.add_type_dependencies(rvalue.analyzed.node.target) + elif typ: + self.add_type_dependencies(typ) + else: + # Normal assignment + super().visit_assignment_stmt(o) + for lvalue in o.lvalues: + self.process_lvalue(lvalue) + items = o.lvalues + [rvalue] + for i in range(len(items) - 1): + lvalue = items[i] + rvalue = items[i + 1] + if isinstance(lvalue, TupleExpr): + self.add_attribute_dependency_for_expr(rvalue, "__iter__") + if o.type: + self.add_type_dependencies(o.type) + if self.use_logical_deps() and o.unanalyzed_type is None: + # Special case: for definitions without an explicit type like this: + # x = func(...) + # we add a logical dependency -> , because if `func` is not annotated, + # then it will make all points of use of `x` unchecked. + if ( + isinstance(rvalue, CallExpr) + and isinstance(rvalue.callee, RefExpr) + and rvalue.callee.fullname + ): + fname: str | None = None + if isinstance(rvalue.callee.node, TypeInfo): + # use actual __init__ as a dependency source + init = rvalue.callee.node.get("__init__") + if init and isinstance(init.node, SYMBOL_FUNCBASE_TYPES): + fname = init.node.fullname + else: + fname = rvalue.callee.fullname + if not fname: + return + for lv in o.lvalues: + if isinstance(lv, RefExpr) and lv.fullname and lv.is_new_def: + if lv.kind == LDEF: + return # local definitions don't generate logical deps + self.add_dependency(make_trigger(fname), make_trigger(lv.fullname)) + + def process_lvalue(self, lvalue: Expression) -> None: + """Generate additional dependencies for an lvalue.""" + if isinstance(lvalue, IndexExpr): + self.add_operator_method_dependency(lvalue.base, "__setitem__") + elif isinstance(lvalue, NameExpr): + if lvalue.kind in (MDEF, GDEF): + # Assignment to an attribute in the class body, or direct assignment to a + # global variable. + lvalue_type = self.get_non_partial_lvalue_type(lvalue) + type_triggers = self.get_type_triggers(lvalue_type) + attr_trigger = make_trigger(f"{self.scope.current_full_target()}.{lvalue.name}") + for type_trigger in type_triggers: + self.add_dependency(type_trigger, attr_trigger) + elif isinstance(lvalue, MemberExpr): + if self.is_self_member_ref(lvalue) and lvalue.is_new_def: + node = lvalue.node + if isinstance(node, Var): + info = node.info + if info and has_user_bases(info): + # Recheck Liskov for self definitions + self.add_dependency(make_trigger(info.fullname + "." + lvalue.name)) + if lvalue.kind is None: + # Reference to a non-module attribute + if lvalue.expr not in self.type_map: + # Unreachable assignment -> not checked so no dependencies to generate. + return + object_type = self.type_map[lvalue.expr] + lvalue_type = self.get_non_partial_lvalue_type(lvalue) + type_triggers = self.get_type_triggers(lvalue_type) + for attr_trigger in self.attribute_triggers(object_type, lvalue.name): + for type_trigger in type_triggers: + self.add_dependency(type_trigger, attr_trigger) + elif isinstance(lvalue, TupleExpr): + for item in lvalue.items: + self.process_lvalue(item) + elif isinstance(lvalue, StarExpr): + self.process_lvalue(lvalue.expr) + + def is_self_member_ref(self, memberexpr: MemberExpr) -> bool: + """Does memberexpr to refer to an attribute of self?""" + if not isinstance(memberexpr.expr, NameExpr): + return False + node = memberexpr.expr.node + return isinstance(node, Var) and node.is_self + + def get_non_partial_lvalue_type(self, lvalue: RefExpr) -> Type: + if lvalue not in self.type_map: + # Likely a block considered unreachable during type checking. + return UninhabitedType() + lvalue_type = get_proper_type(self.type_map[lvalue]) + if isinstance(lvalue_type, PartialType): + if isinstance(lvalue.node, Var): + if lvalue.node.type: + lvalue_type = get_proper_type(lvalue.node.type) + else: + lvalue_type = UninhabitedType() + else: + # Probably a secondary, non-definition assignment that doesn't + # result in a non-partial type. We won't be able to infer any + # dependencies from this so just return something. (The first, + # definition assignment with a partial type is handled + # differently, in the semantic analyzer.) + assert not lvalue.is_new_def + return UninhabitedType() + return lvalue_type + + def visit_operator_assignment_stmt(self, o: OperatorAssignmentStmt) -> None: + super().visit_operator_assignment_stmt(o) + self.process_lvalue(o.lvalue) + method = op_methods[o.op] + self.add_attribute_dependency_for_expr(o.lvalue, method) + if o.op in ops_with_inplace_method: + inplace_method = "__i" + method[2:] + self.add_attribute_dependency_for_expr(o.lvalue, inplace_method) + + def visit_for_stmt(self, o: ForStmt) -> None: + super().visit_for_stmt(o) + if not o.is_async: + # __getitem__ is only used if __iter__ is missing but for simplicity we + # just always depend on both. + self.add_attribute_dependency_for_expr(o.expr, "__iter__") + self.add_attribute_dependency_for_expr(o.expr, "__getitem__") + if o.inferred_iterator_type: + self.add_attribute_dependency(o.inferred_iterator_type, "__next__") + else: + self.add_attribute_dependency_for_expr(o.expr, "__aiter__") + if o.inferred_iterator_type: + self.add_attribute_dependency(o.inferred_iterator_type, "__anext__") + + self.process_lvalue(o.index) + if isinstance(o.index, TupleExpr): + # Process multiple assignment to index variables. + item_type = o.inferred_item_type + if item_type: + # This is similar to above. + self.add_attribute_dependency(item_type, "__iter__") + self.add_attribute_dependency(item_type, "__getitem__") + if o.index_type: + self.add_type_dependencies(o.index_type) + + def visit_with_stmt(self, o: WithStmt) -> None: + super().visit_with_stmt(o) + for e in o.expr: + if not o.is_async: + self.add_attribute_dependency_for_expr(e, "__enter__") + self.add_attribute_dependency_for_expr(e, "__exit__") + else: + self.add_attribute_dependency_for_expr(e, "__aenter__") + self.add_attribute_dependency_for_expr(e, "__aexit__") + for typ in o.analyzed_types: + self.add_type_dependencies(typ) + + def visit_del_stmt(self, o: DelStmt) -> None: + super().visit_del_stmt(o) + if isinstance(o.expr, IndexExpr): + self.add_attribute_dependency_for_expr(o.expr.base, "__delitem__") + + # Expressions + + def process_global_ref_expr(self, o: RefExpr) -> None: + if o.fullname: + self.add_dependency(make_trigger(o.fullname)) + + # If this is a reference to a type, generate a dependency to its + # constructor. + # IDEA: Avoid generating spurious dependencies for except statements, + # class attribute references, etc., if performance is a problem. + typ = get_proper_type(self.type_map.get(o)) + if isinstance(typ, FunctionLike) and typ.is_type_obj(): + class_name = typ.type_object().fullname + self.add_dependency(make_trigger(class_name + ".__init__")) + self.add_dependency(make_trigger(class_name + ".__new__")) + + def visit_name_expr(self, o: NameExpr) -> None: + if o.kind == LDEF: + # We don't track dependencies to local variables, since they + # aren't externally visible. + return + if o.kind == MDEF: + # Direct reference to member is only possible in the scope that + # defined the name, so no dependency is required. + return + self.process_global_ref_expr(o) + + def visit_member_expr(self, e: MemberExpr) -> None: + if isinstance(e.expr, RefExpr) and isinstance(e.expr.node, TypeInfo): + # Special case class attribute so that we don't depend on "__init__". + self.add_dependency(make_trigger(e.expr.node.fullname)) + else: + super().visit_member_expr(e) + if e.kind is not None: + # Reference to a module attribute + self.process_global_ref_expr(e) + else: + # Reference to a non-module (or missing) attribute + if e.expr not in self.type_map: + # No type available -- this happens for unreachable code. Since it's unreachable, + # it wasn't type checked and we don't need to generate dependencies. + return + if isinstance(e.expr, RefExpr) and isinstance(e.expr.node, MypyFile): + # Special case: reference to a missing module attribute. + self.add_dependency(make_trigger(e.expr.node.fullname + "." + e.name)) + return + typ = get_proper_type(self.type_map[e.expr]) + self.add_attribute_dependency(typ, e.name) + if self.use_logical_deps() and isinstance(typ, AnyType): + name = self.get_unimported_fullname(e, typ) + if name is not None: + # Generate a logical dependency from an unimported + # definition (which comes from a missing module). + # Example: + # import missing # "missing" not in build + # + # def g() -> None: + # missing.f() # Generate dependency from "missing.f" + self.add_dependency(make_trigger(name)) + + def get_unimported_fullname(self, e: MemberExpr, typ: AnyType) -> str | None: + """If e refers to an unimported definition, infer the fullname of this. + + Return None if e doesn't refer to an unimported definition or if we can't + determine the name. + """ + suffix = "" + # Unwrap nested member expression to handle cases like "a.b.c.d" where + # "a.b" is a known reference to an unimported module. Find the base + # reference to an unimported module (such as "a.b") and the name suffix + # (such as "c.d") needed to build a full name. + while typ.type_of_any == TypeOfAny.from_another_any and isinstance(e.expr, MemberExpr): + suffix = "." + e.name + suffix + e = e.expr + if e.expr not in self.type_map: + return None + obj_type = get_proper_type(self.type_map[e.expr]) + if not isinstance(obj_type, AnyType): + # Can't find the base reference to the unimported module. + return None + typ = obj_type + if typ.type_of_any == TypeOfAny.from_unimported_type and typ.missing_import_name: + # Infer the full name of the unimported definition. + return typ.missing_import_name + "." + e.name + suffix + return None + + def visit_super_expr(self, e: SuperExpr) -> None: + # Arguments in "super(C, self)" won't generate useful logical deps. + if not self.use_logical_deps(): + super().visit_super_expr(e) + if e.info is not None: + name = e.name + for base in non_trivial_bases(e.info): + self.add_dependency(make_trigger(base.fullname + "." + name)) + if name in base.names: + # No need to depend on further base classes, since we found + # the target. This is safe since if the target gets + # deleted or modified, we'll trigger it. + break + + def visit_call_expr(self, e: CallExpr) -> None: + if isinstance(e.callee, RefExpr) and e.callee.fullname == "builtins.isinstance": + self.process_isinstance_call(e) + else: + super().visit_call_expr(e) + typ = self.type_map.get(e.callee) + if typ is not None: + typ = get_proper_type(typ) + if not isinstance(typ, FunctionLike): + self.add_attribute_dependency(typ, "__call__") + + def process_isinstance_call(self, e: CallExpr) -> None: + """Process "isinstance(...)" in a way to avoid some extra dependencies.""" + if len(e.args) == 2: + arg = e.args[1] + if ( + isinstance(arg, RefExpr) + and arg.kind == GDEF + and isinstance(arg.node, TypeInfo) + and arg.fullname + ): + # Special case to avoid redundant dependencies from "__init__". + self.add_dependency(make_trigger(arg.fullname)) + return + # In uncommon cases generate normal dependencies. These will include + # spurious dependencies, but the performance impact is small. + super().visit_call_expr(e) + + def visit_cast_expr(self, e: CastExpr) -> None: + super().visit_cast_expr(e) + self.add_type_dependencies(e.type) + + def visit_type_form_expr(self, e: TypeFormExpr) -> None: + super().visit_type_form_expr(e) + self.add_type_dependencies(e.type) + + def visit_assert_type_expr(self, e: AssertTypeExpr) -> None: + super().visit_assert_type_expr(e) + self.add_type_dependencies(e.type) + + def visit_type_application(self, e: TypeApplication) -> None: + super().visit_type_application(e) + for typ in e.types: + self.add_type_dependencies(typ) + + def visit_index_expr(self, e: IndexExpr) -> None: + super().visit_index_expr(e) + self.add_operator_method_dependency(e.base, "__getitem__") + + def visit_unary_expr(self, e: UnaryExpr) -> None: + super().visit_unary_expr(e) + if e.op not in unary_op_methods: + return + method = unary_op_methods[e.op] + self.add_operator_method_dependency(e.expr, method) + + def visit_op_expr(self, e: OpExpr) -> None: + super().visit_op_expr(e) + self.process_binary_op(e.op, e.left, e.right) + + def visit_comparison_expr(self, e: ComparisonExpr) -> None: + super().visit_comparison_expr(e) + for i, op in enumerate(e.operators): + left = e.operands[i] + right = e.operands[i + 1] + self.process_binary_op(op, left, right) + + def process_binary_op(self, op: str, left: Expression, right: Expression) -> None: + method = op_methods.get(op) + if method: + if op == "in": + self.add_operator_method_dependency(right, method) + else: + self.add_operator_method_dependency(left, method) + rev_method = reverse_op_methods.get(method) + if rev_method: + self.add_operator_method_dependency(right, rev_method) + + def add_operator_method_dependency(self, e: Expression, method: str) -> None: + typ = get_proper_type(self.type_map.get(e)) + if typ is not None: + self.add_operator_method_dependency_for_type(typ, method) + + def add_operator_method_dependency_for_type(self, typ: ProperType, method: str) -> None: + # Note that operator methods can't be (non-metaclass) methods of type objects + # (that is, TypeType objects or Callables representing a type). + if isinstance(typ, TypeVarType): + typ = get_proper_type(typ.upper_bound) + if isinstance(typ, TupleType): + typ = typ.partial_fallback + if isinstance(typ, Instance): + trigger = make_trigger(typ.type.fullname + "." + method) + self.add_dependency(trigger) + elif isinstance(typ, UnionType): + for item in typ.items: + self.add_operator_method_dependency_for_type(get_proper_type(item), method) + elif isinstance(typ, FunctionLike) and typ.is_type_obj(): + self.add_operator_method_dependency_for_type(typ.fallback, method) + elif isinstance(typ, TypeType): + if isinstance(typ.item, Instance) and typ.item.type.metaclass_type is not None: + self.add_operator_method_dependency_for_type(typ.item.type.metaclass_type, method) + + def visit_generator_expr(self, e: GeneratorExpr) -> None: + super().visit_generator_expr(e) + for seq in e.sequences: + self.add_iter_dependency(seq) + + def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> None: + super().visit_dictionary_comprehension(e) + for seq in e.sequences: + self.add_iter_dependency(seq) + + def visit_star_expr(self, e: StarExpr) -> None: + super().visit_star_expr(e) + self.add_iter_dependency(e.expr) + + def visit_yield_from_expr(self, e: YieldFromExpr) -> None: + super().visit_yield_from_expr(e) + self.add_iter_dependency(e.expr) + + def visit_await_expr(self, e: AwaitExpr) -> None: + super().visit_await_expr(e) + self.add_attribute_dependency_for_expr(e.expr, "__await__") + + # Helpers + + def add_type_alias_deps(self, target: str) -> None: + # Type aliases are special, because some of the dependencies are calculated + # in semanal.py, before they are expanded. + if target in self.alias_deps: + for alias in self.alias_deps[target]: + self.add_dependency(make_trigger(alias)) + + def add_dependency(self, trigger: str, target: str | None = None) -> None: + """Add dependency from trigger to a target. + + If the target is not given explicitly, use the current target. + """ + if trigger.startswith( + (" None: + """Add dependencies to all components of a type. + + Args: + target: If not None, override the default (current) target of the + generated dependency. + """ + for trigger in self.get_type_triggers(typ): + self.add_dependency(trigger, target) + + def add_attribute_dependency(self, typ: Type, name: str) -> None: + """Add dependencies for accessing a named attribute of a type.""" + targets = self.attribute_triggers(typ, name) + for target in targets: + self.add_dependency(target) + + def attribute_triggers(self, typ: Type, name: str) -> list[str]: + """Return all triggers associated with the attribute of a type.""" + typ = get_proper_type(typ) + if isinstance(typ, TypeVarType): + typ = get_proper_type(typ.upper_bound) + if isinstance(typ, TupleType): + typ = typ.partial_fallback + if isinstance(typ, Instance): + member = f"{typ.type.fullname}.{name}" + return [make_trigger(member)] + elif isinstance(typ, FunctionLike) and typ.is_type_obj(): + member = f"{typ.type_object().fullname}.{name}" + triggers = [make_trigger(member)] + triggers.extend(self.attribute_triggers(typ.fallback, name)) + return triggers + elif isinstance(typ, UnionType): + targets = [] + for item in typ.items: + targets.extend(self.attribute_triggers(item, name)) + return targets + elif isinstance(typ, TypeType): + triggers = self.attribute_triggers(typ.item, name) + if isinstance(typ.item, Instance) and typ.item.type.metaclass_type is not None: + triggers.append( + make_trigger(f"{typ.item.type.metaclass_type.type.fullname}.{name}") + ) + return triggers + else: + return [] + + def add_attribute_dependency_for_expr(self, e: Expression, name: str) -> None: + typ = self.type_map.get(e) + if typ is not None: + self.add_attribute_dependency(typ, name) + + def add_iter_dependency(self, node: Expression) -> None: + typ = self.type_map.get(node) + if typ: + self.add_attribute_dependency(typ, "__iter__") + + def use_logical_deps(self) -> bool: + return self.options is not None and self.options.logical_deps + + def get_type_triggers(self, typ: Type) -> list[str]: + return get_type_triggers(typ, self.use_logical_deps()) + + +def get_type_triggers( + typ: Type, use_logical_deps: bool, seen_aliases: set[TypeAliasType] | None = None +) -> list[str]: + """Return all triggers that correspond to a type becoming stale.""" + return typ.accept(TypeTriggersVisitor(use_logical_deps, seen_aliases)) + + +class TypeTriggersVisitor(TypeVisitor[list[str]]): + def __init__( + self, use_logical_deps: bool, seen_aliases: set[TypeAliasType] | None = None + ) -> None: + self.deps: list[str] = [] + self.seen_aliases: set[TypeAliasType] = seen_aliases or set() + self.use_logical_deps = use_logical_deps + + def get_type_triggers(self, typ: Type) -> list[str]: + return get_type_triggers(typ, self.use_logical_deps, self.seen_aliases) + + def visit_instance(self, typ: Instance) -> list[str]: + trigger = make_trigger(typ.type.fullname) + triggers = [trigger] + for arg in typ.args: + triggers.extend(self.get_type_triggers(arg)) + if typ.last_known_value: + triggers.extend(self.get_type_triggers(typ.last_known_value)) + if typ.extra_attrs and typ.extra_attrs.mod_name: + # Module as type effectively depends on all module attributes, use wildcard. + triggers.append(make_wildcard_trigger(typ.extra_attrs.mod_name)) + return triggers + + def visit_type_alias_type(self, typ: TypeAliasType) -> list[str]: + if typ in self.seen_aliases: + return [] + self.seen_aliases.add(typ) + assert typ.alias is not None + trigger = make_trigger(typ.alias.fullname) + triggers = [trigger] + for arg in typ.args: + triggers.extend(self.get_type_triggers(arg)) + triggers.extend(self.get_type_triggers(typ.alias.target)) + return triggers + + def visit_any(self, typ: AnyType) -> list[str]: + if typ.missing_import_name is not None: + return [make_trigger(typ.missing_import_name)] + return [] + + def visit_none_type(self, typ: NoneType) -> list[str]: + return [] + + def visit_callable_type(self, typ: CallableType) -> list[str]: + triggers = [] + for arg in typ.arg_types: + triggers.extend(self.get_type_triggers(arg)) + triggers.extend(self.get_type_triggers(typ.ret_type)) + # fallback is a metaclass type for class objects, and is + # processed separately. + return triggers + + def visit_overloaded(self, typ: Overloaded) -> list[str]: + triggers = [] + for item in typ.items: + triggers.extend(self.get_type_triggers(item)) + return triggers + + def visit_erased_type(self, t: ErasedType) -> list[str]: + # This type should exist only temporarily during type inference + assert False, "Should not see an erased type here" + + def visit_deleted_type(self, typ: DeletedType) -> list[str]: + return [] + + def visit_partial_type(self, typ: PartialType) -> list[str]: + assert False, "Should not see a partial type here" + + def visit_tuple_type(self, typ: TupleType) -> list[str]: + triggers = [] + for item in typ.items: + triggers.extend(self.get_type_triggers(item)) + triggers.extend(self.get_type_triggers(typ.partial_fallback)) + return triggers + + def visit_type_type(self, typ: TypeType) -> list[str]: + triggers = self.get_type_triggers(typ.item) + if not self.use_logical_deps: + old_triggers = triggers.copy() + for trigger in old_triggers: + triggers.append(trigger.rstrip(">") + ".__init__>") + triggers.append(trigger.rstrip(">") + ".__new__>") + return triggers + + def visit_type_var(self, typ: TypeVarType) -> list[str]: + triggers = [] + if typ.fullname: + triggers.append(make_trigger(typ.fullname)) + triggers.extend(self.get_type_triggers(typ.upper_bound)) + triggers.extend(self.get_type_triggers(typ.default)) + for val in typ.values: + triggers.extend(self.get_type_triggers(val)) + return triggers + + def visit_param_spec(self, typ: ParamSpecType) -> list[str]: + triggers = [] + if typ.fullname: + triggers.append(make_trigger(typ.fullname)) + triggers.extend(self.get_type_triggers(typ.upper_bound)) + triggers.extend(self.get_type_triggers(typ.default)) + triggers.extend(self.get_type_triggers(typ.prefix)) + return triggers + + def visit_type_var_tuple(self, typ: TypeVarTupleType) -> list[str]: + triggers = [] + if typ.fullname: + triggers.append(make_trigger(typ.fullname)) + triggers.extend(self.get_type_triggers(typ.upper_bound)) + triggers.extend(self.get_type_triggers(typ.default)) + return triggers + + def visit_unpack_type(self, typ: UnpackType) -> list[str]: + return typ.type.accept(self) + + def visit_parameters(self, typ: Parameters) -> list[str]: + triggers = [] + for arg in typ.arg_types: + triggers.extend(self.get_type_triggers(arg)) + return triggers + + def visit_typeddict_type(self, typ: TypedDictType) -> list[str]: + triggers = [] + for item in typ.items.values(): + triggers.extend(self.get_type_triggers(item)) + triggers.extend(self.get_type_triggers(typ.fallback)) + return triggers + + def visit_literal_type(self, typ: LiteralType) -> list[str]: + return self.get_type_triggers(typ.fallback) + + def visit_unbound_type(self, typ: UnboundType) -> list[str]: + return [] + + def visit_uninhabited_type(self, typ: UninhabitedType) -> list[str]: + return [] + + def visit_union_type(self, typ: UnionType) -> list[str]: + triggers = [] + for item in typ.items: + triggers.extend(self.get_type_triggers(item)) + return triggers + + +def merge_dependencies(new_deps: dict[str, set[str]], deps: dict[str, set[str]]) -> None: + for trigger, targets in new_deps.items(): + deps.setdefault(trigger, set()).update(targets) + + +def non_trivial_bases(info: TypeInfo) -> list[TypeInfo]: + return [base for base in info.mro[1:] if base.fullname != "builtins.object"] + + +def has_user_bases(info: TypeInfo) -> bool: + return any(base.module_name not in ("builtins", "typing", "enum") for base in info.mro[1:]) + + +def dump_all_dependencies( + modules: dict[str, MypyFile], + type_map: dict[Expression, Type], + python_version: tuple[int, int], + options: Options, +) -> None: + """Generate dependencies for all interesting modules and print them to stdout.""" + all_deps: dict[str, set[str]] = {} + for id, node in modules.items(): + # Uncomment for debugging: + # print('processing', id) + if id in ("builtins", "typing") or "/typeshed/" in node.path: + continue + assert id == node.fullname + deps = get_dependencies(node, type_map, python_version, options) + for trigger, targets in deps.items(): + all_deps.setdefault(trigger, set()).update(targets) + type_state.add_all_protocol_deps(all_deps) + + for trigger, targets in sorted(all_deps.items(), key=lambda x: x[0]): + print(trigger) + for target in sorted(targets): + print(f" {target}") diff --git a/.venv/lib/python3.12/site-packages/mypy/server/mergecheck.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/server/mergecheck.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..d826ef5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/server/mergecheck.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/server/mergecheck.py b/.venv/lib/python3.12/site-packages/mypy/server/mergecheck.py new file mode 100644 index 0000000..11e0021 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/server/mergecheck.py @@ -0,0 +1,84 @@ +"""Check for duplicate AST nodes after merge.""" + +from __future__ import annotations + +from typing import Final + +from mypy.nodes import Decorator, FakeInfo, FuncDef, SymbolNode, Var +from mypy.server.objgraph import get_path, get_reachable_graph + +# If True, print more verbose output on failure. +DUMP_MISMATCH_NODES: Final = False + + +def check_consistency(o: object) -> None: + """Fail if there are two AST nodes with the same fullname reachable from 'o'. + + Raise AssertionError on failure and print some debugging output. + """ + seen, parents = get_reachable_graph(o) + reachable = list(seen.values()) + syms = [x for x in reachable if isinstance(x, SymbolNode)] + + m: dict[str, SymbolNode] = {} + for sym in syms: + if isinstance(sym, FakeInfo): + continue + + fn = sym.fullname + # Skip None and empty names, since they are ambiguous. + # TODO: Everything should have a proper full name? + if not fn: + continue + + # Skip stuff that should be expected to have duplicate names + if isinstance(sym, (Var, Decorator)): + continue + if isinstance(sym, FuncDef) and sym.is_overload: + continue + + if fn not in m: + m[fn] = sym + continue + + # We have trouble and need to decide what to do about it. + sym1, sym2 = sym, m[fn] + + # If the type changed, then it shouldn't have been merged. + if type(sym1) is not type(sym2): + continue + + path1 = get_path(sym1, seen, parents) + path2 = get_path(sym2, seen, parents) + + if fn in m: + print(f"\nDuplicate {type(sym).__name__!r} nodes with fullname {fn!r} found:") + print("[1] %d: %s" % (id(sym1), path_to_str(path1))) + print("[2] %d: %s" % (id(sym2), path_to_str(path2))) + + if DUMP_MISMATCH_NODES and fn in m: + # Add verbose output with full AST node contents. + print("---") + print(id(sym1), sym1) + print("---") + print(id(sym2), sym2) + + assert sym.fullname not in m + + +def path_to_str(path: list[tuple[object, object]]) -> str: + result = "" + for attr, obj in path: + t = type(obj).__name__ + if t in ("dict", "tuple", "SymbolTable", "list"): + result += f"[{repr(attr)}]" + else: + if isinstance(obj, Var): + result += f".{attr}({t}:{obj.name})" + elif t in ("BuildManager", "FineGrainedBuildManager"): + # Omit class name for some classes that aren't part of a class + # hierarchy since there isn't much ambiguity. + result += f".{attr}" + else: + result += f".{attr}({t})" + return result diff --git a/.venv/lib/python3.12/site-packages/mypy/server/objgraph.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/server/objgraph.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..4af190f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/server/objgraph.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/server/objgraph.py b/.venv/lib/python3.12/site-packages/mypy/server/objgraph.py new file mode 100644 index 0000000..e5096d5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/server/objgraph.py @@ -0,0 +1,101 @@ +"""Find all objects reachable from a root object.""" + +from __future__ import annotations + +import types +import weakref +from collections.abc import Iterable, Iterator, Mapping +from typing import Final + +method_descriptor_type: Final = type(object.__dir__) +method_wrapper_type: Final = type(object().__ne__) +wrapper_descriptor_type: Final = type(object.__ne__) + +FUNCTION_TYPES: Final = ( + types.BuiltinFunctionType, + types.FunctionType, + types.MethodType, + method_descriptor_type, + wrapper_descriptor_type, + method_wrapper_type, +) + +ATTR_BLACKLIST: Final = {"__doc__", "__name__", "__class__", "__dict__"} + +# Instances of these types can't have references to other objects +ATOMIC_TYPE_BLACKLIST: Final = {bool, int, float, str, type(None), object} + +# Don't look at most attributes of these types +COLLECTION_TYPE_BLACKLIST: Final = {list, set, dict, tuple} + +# Don't return these objects +TYPE_BLACKLIST: Final = {weakref.ReferenceType} + + +def isproperty(o: object, attr: str) -> bool: + return isinstance(getattr(type(o), attr, None), property) + + +def get_edge_candidates(o: object) -> Iterator[tuple[object, object]]: + # use getattr because mypyc expects dict, not mappingproxy + if "__getattribute__" in getattr(type(o), "__dict__"): # noqa: B009 + return + if type(o) not in COLLECTION_TYPE_BLACKLIST: + for attr in dir(o): + try: + if attr not in ATTR_BLACKLIST and hasattr(o, attr) and not isproperty(o, attr): + e = getattr(o, attr) + if type(e) not in ATOMIC_TYPE_BLACKLIST: + yield attr, e + except AssertionError: + pass + if isinstance(o, Mapping): + yield from o.items() + elif isinstance(o, Iterable) and not isinstance(o, str): + for i, e in enumerate(o): + yield i, e + + +def get_edges(o: object) -> Iterator[tuple[object, object]]: + for s, e in get_edge_candidates(o): + if isinstance(e, FUNCTION_TYPES): + # We don't want to collect methods, but do want to collect values + # in closures and self pointers to other objects + + if hasattr(e, "__closure__"): + yield (s, "__closure__"), e.__closure__ + if hasattr(e, "__self__"): + se = e.__self__ + if se is not o and se is not type(o) and hasattr(s, "__self__"): + yield s.__self__, se + else: + if type(e) not in TYPE_BLACKLIST: + yield s, e + + +def get_reachable_graph(root: object) -> tuple[dict[int, object], dict[int, tuple[int, object]]]: + parents = {} + seen = {id(root): root} + worklist = [root] + while worklist: + o = worklist.pop() + for s, e in get_edges(o): + if id(e) in seen: + continue + parents[id(e)] = (id(o), s) + seen[id(e)] = e + worklist.append(e) + + return seen, parents + + +def get_path( + o: object, seen: dict[int, object], parents: dict[int, tuple[int, object]] +) -> list[tuple[object, object]]: + path = [] + while id(o) in parents: + pid, attr = parents[id(o)] + o = seen[pid] + path.append((attr, o)) + path.reverse() + return path diff --git a/.venv/lib/python3.12/site-packages/mypy/server/subexpr.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/server/subexpr.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..15a15de Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/server/subexpr.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/server/subexpr.py b/.venv/lib/python3.12/site-packages/mypy/server/subexpr.py new file mode 100644 index 0000000..013b936 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/server/subexpr.py @@ -0,0 +1,203 @@ +"""Find all subexpressions of an AST node.""" + +from __future__ import annotations + +from mypy.nodes import ( + AssertTypeExpr, + AssignmentExpr, + AwaitExpr, + CallExpr, + CastExpr, + ComparisonExpr, + ConditionalExpr, + DictExpr, + DictionaryComprehension, + Expression, + GeneratorExpr, + IndexExpr, + LambdaExpr, + ListComprehension, + ListExpr, + MemberExpr, + Node, + OpExpr, + RevealExpr, + SetComprehension, + SetExpr, + SliceExpr, + StarExpr, + TupleExpr, + TypeApplication, + TypeFormExpr, + UnaryExpr, + YieldExpr, + YieldFromExpr, +) +from mypy.traverser import TraverserVisitor + + +def get_subexpressions(node: Node) -> list[Expression]: + visitor = SubexpressionFinder() + node.accept(visitor) + return visitor.expressions + + +class SubexpressionFinder(TraverserVisitor): + def __init__(self) -> None: + self.expressions: list[Expression] = [] + + def visit_int_expr(self, o: Expression) -> None: + self.add(o) + + def visit_name_expr(self, o: Expression) -> None: + self.add(o) + + def visit_float_expr(self, o: Expression) -> None: + self.add(o) + + def visit_str_expr(self, o: Expression) -> None: + self.add(o) + + def visit_bytes_expr(self, o: Expression) -> None: + self.add(o) + + def visit_unicode_expr(self, o: Expression) -> None: + self.add(o) + + def visit_complex_expr(self, o: Expression) -> None: + self.add(o) + + def visit_ellipsis(self, o: Expression) -> None: + self.add(o) + + def visit_super_expr(self, o: Expression) -> None: + self.add(o) + + def visit_type_var_expr(self, o: Expression) -> None: + self.add(o) + + def visit_type_alias_expr(self, o: Expression) -> None: + self.add(o) + + def visit_namedtuple_expr(self, o: Expression) -> None: + self.add(o) + + def visit_typeddict_expr(self, o: Expression) -> None: + self.add(o) + + def visit__promote_expr(self, o: Expression) -> None: + self.add(o) + + def visit_newtype_expr(self, o: Expression) -> None: + self.add(o) + + def visit_member_expr(self, e: MemberExpr) -> None: + self.add(e) + super().visit_member_expr(e) + + def visit_yield_from_expr(self, e: YieldFromExpr) -> None: + self.add(e) + super().visit_yield_from_expr(e) + + def visit_yield_expr(self, e: YieldExpr) -> None: + self.add(e) + super().visit_yield_expr(e) + + def visit_call_expr(self, e: CallExpr) -> None: + self.add(e) + super().visit_call_expr(e) + + def visit_op_expr(self, e: OpExpr) -> None: + self.add(e) + super().visit_op_expr(e) + + def visit_comparison_expr(self, e: ComparisonExpr) -> None: + self.add(e) + super().visit_comparison_expr(e) + + def visit_slice_expr(self, e: SliceExpr) -> None: + self.add(e) + super().visit_slice_expr(e) + + def visit_cast_expr(self, e: CastExpr) -> None: + self.add(e) + super().visit_cast_expr(e) + + def visit_type_form_expr(self, e: TypeFormExpr) -> None: + self.add(e) + super().visit_type_form_expr(e) + + def visit_assert_type_expr(self, e: AssertTypeExpr) -> None: + self.add(e) + super().visit_assert_type_expr(e) + + def visit_reveal_expr(self, e: RevealExpr) -> None: + self.add(e) + super().visit_reveal_expr(e) + + def visit_assignment_expr(self, e: AssignmentExpr) -> None: + self.add(e) + super().visit_assignment_expr(e) + + def visit_unary_expr(self, e: UnaryExpr) -> None: + self.add(e) + super().visit_unary_expr(e) + + def visit_list_expr(self, e: ListExpr) -> None: + self.add(e) + super().visit_list_expr(e) + + def visit_tuple_expr(self, e: TupleExpr) -> None: + self.add(e) + super().visit_tuple_expr(e) + + def visit_dict_expr(self, e: DictExpr) -> None: + self.add(e) + super().visit_dict_expr(e) + + def visit_set_expr(self, e: SetExpr) -> None: + self.add(e) + super().visit_set_expr(e) + + def visit_index_expr(self, e: IndexExpr) -> None: + self.add(e) + super().visit_index_expr(e) + + def visit_generator_expr(self, e: GeneratorExpr) -> None: + self.add(e) + super().visit_generator_expr(e) + + def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> None: + self.add(e) + super().visit_dictionary_comprehension(e) + + def visit_list_comprehension(self, e: ListComprehension) -> None: + self.add(e) + super().visit_list_comprehension(e) + + def visit_set_comprehension(self, e: SetComprehension) -> None: + self.add(e) + super().visit_set_comprehension(e) + + def visit_conditional_expr(self, e: ConditionalExpr) -> None: + self.add(e) + super().visit_conditional_expr(e) + + def visit_type_application(self, e: TypeApplication) -> None: + self.add(e) + super().visit_type_application(e) + + def visit_lambda_expr(self, e: LambdaExpr) -> None: + self.add(e) + super().visit_lambda_expr(e) + + def visit_star_expr(self, e: StarExpr) -> None: + self.add(e) + super().visit_star_expr(e) + + def visit_await_expr(self, e: AwaitExpr) -> None: + self.add(e) + super().visit_await_expr(e) + + def add(self, e: Expression) -> None: + self.expressions.append(e) diff --git a/.venv/lib/python3.12/site-packages/mypy/server/target.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/server/target.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..7e7a86c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/server/target.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/server/target.py b/.venv/lib/python3.12/site-packages/mypy/server/target.py new file mode 100644 index 0000000..c06eeeb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/server/target.py @@ -0,0 +1,11 @@ +from __future__ import annotations + + +def trigger_to_target(s: str) -> str: + assert s[0] == "<" + # Strip off the angle brackets + s = s[1:-1] + # If there is a [wildcard] or similar, strip that off too + if s[-1] == "]": + s = s.split("[")[0] + return s diff --git a/.venv/lib/python3.12/site-packages/mypy/server/trigger.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/server/trigger.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..474db12 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/server/trigger.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/server/trigger.py b/.venv/lib/python3.12/site-packages/mypy/server/trigger.py new file mode 100644 index 0000000..97b5f89 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/server/trigger.py @@ -0,0 +1,26 @@ +"""AST triggers that are used for fine-grained dependency handling.""" + +from __future__ import annotations + +from typing import Final + +# Used as a suffix for triggers to handle "from m import *" dependencies (see also +# make_wildcard_trigger) + +WILDCARD_TAG: Final = "[wildcard]" + + +def make_trigger(name: str) -> str: + return f"<{name}>" + + +def make_wildcard_trigger(module: str) -> str: + """Special trigger fired when any top-level name is changed in a module. + + Note that this is different from a module trigger, as module triggers are only + fired if the module is created, deleted, or replaced with a non-module, whereas + a wildcard trigger is triggered for namespace changes. + + This is used for "from m import *" dependencies. + """ + return f"<{module}{WILDCARD_TAG}>" diff --git a/.venv/lib/python3.12/site-packages/mypy/server/update.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/server/update.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..aa07289 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/server/update.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/server/update.py b/.venv/lib/python3.12/site-packages/mypy/server/update.py new file mode 100644 index 0000000..86ccb57 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/server/update.py @@ -0,0 +1,1345 @@ +"""Update build by processing changes using fine-grained dependencies. + +Use fine-grained dependencies to update targets in other modules that +may be affected by externally-visible changes in the changed modules. + +This forms the core of the fine-grained incremental daemon mode. This +module is not used at all by the 'classic' (non-daemon) incremental +mode. + +Here is some motivation for this mode: + +* By keeping program state in memory between incremental runs, we + only have to process changed modules, not their dependencies. The + classic incremental mode has to deserialize the symbol tables of + all dependencies of changed modules, which can be slow for large + programs. + +* Fine-grained dependencies allow processing only the relevant parts + of modules indirectly affected by a change. Say, if only one function + in a large module is affected by a change in another module, only this + function is processed. The classic incremental mode always processes + an entire file as a unit, which is typically much slower. + +* It's possible to independently process individual modules within an + import cycle (SCC). Small incremental changes can be fast independent + of the size of the related SCC. In classic incremental mode, any change + within a SCC requires the entire SCC to be processed, which can slow + things down considerably. + +Some terms: + +* A *target* is a function/method definition or the top level of a module. + We refer to targets using their fully qualified name (e.g. + 'mod.Cls.method'). Targets are the smallest units of processing during + fine-grained incremental checking. + +* A *trigger* represents the properties of a part of a program, and it + gets triggered/fired when these properties change. For example, + '' refers to a module-level function. It gets triggered if + the signature of the function changes, or if the function is removed, + for example. + +Some program state is maintained across multiple build increments in +memory: + +* The full ASTs of all modules are stored in memory all the time (this + includes the type map). + +* A fine-grained dependency map is maintained, which maps triggers to + affected program locations (these can be targets, triggers, or + classes). The latter determine what other parts of a program need to + be processed again due to a fired trigger. + +Here's a summary of how a fine-grained incremental program update happens: + +* Determine which modules have changes in their source code since the + previous update. + +* Process changed modules one at a time. Perform a separate full update + for each changed module, but only report the errors after all modules + have been processed, since the intermediate states can generate bogus + errors due to only seeing a partial set of changes. + +* Each changed module is processed in full. We parse the module, and + run semantic analysis to create a new AST and symbol table for the + module. Reuse the existing ASTs and symbol tables of modules that + have no changes in their source code. At the end of this stage, we have + two ASTs and symbol tables for the changed module (the old and the new + versions). The latter AST has not yet been type checked. + +* Take a snapshot of the old symbol table. This is used later to determine + which properties of the module have changed and which triggers to fire. + +* Merge the old AST with the new AST, preserving the identities of + externally visible AST nodes for which we can find a corresponding node + in the new AST. (Look at mypy.server.astmerge for the details.) This + way all external references to AST nodes in the changed module will + continue to point to the right nodes (assuming they still have a valid + target). + +* Type check the new module. + +* Take another snapshot of the symbol table of the changed module. + Look at the differences between the old and new snapshots to determine + which parts of the changed modules have changed. The result is a set of + fired triggers. + +* Using the dependency map and the fired triggers, decide which other + targets have become stale and need to be reprocessed. + +* Create new fine-grained dependencies for the changed module. We don't + garbage collect old dependencies, since extra dependencies are relatively + harmless (they take some memory and can theoretically slow things down + a bit by causing redundant work). This is implemented in + mypy.server.deps. + +* Strip the stale AST nodes that we found above. This returns them to a + state resembling the end of semantic analysis pass 1. We'll run semantic + analysis again on the existing AST nodes, and since semantic analysis + is not idempotent, we need to revert some changes made during semantic + analysis. This is implemented in mypy.server.aststrip. + +* Run semantic analyzer passes 2 and 3 on the stale AST nodes, and type + check them. We also need to do the symbol table snapshot comparison + dance to find any changes, and we need to merge ASTs to preserve AST node + identities. + +* If some triggers haven been fired, continue processing and repeat the + previous steps until no triggers are fired. + +This is module is tested using end-to-end fine-grained incremental mode +test cases (test-data/unit/fine-grained*.test). +""" + +from __future__ import annotations + +import os +import re +import sys +import time +from collections.abc import Sequence +from typing import Callable, Final, NamedTuple, Union +from typing_extensions import TypeAlias as _TypeAlias + +from mypy.build import ( + DEBUG_FINE_GRAINED, + FAKE_ROOT_MODULE, + BuildManager, + BuildResult, + Graph, + State, + load_graph, + process_fresh_modules, +) +from mypy.checker import FineGrainedDeferredNode +from mypy.errors import CompileError +from mypy.fscache import FileSystemCache +from mypy.modulefinder import BuildSource +from mypy.nodes import ( + Decorator, + FuncDef, + ImportFrom, + MypyFile, + OverloadedFuncDef, + SymbolNode, + SymbolTable, + TypeInfo, +) +from mypy.options import Options +from mypy.semanal_main import semantic_analysis_for_scc, semantic_analysis_for_targets +from mypy.server.astdiff import ( + SymbolSnapshot, + compare_symbol_table_snapshots, + snapshot_symbol_table, +) +from mypy.server.astmerge import merge_asts +from mypy.server.aststrip import SavedAttributes, strip_target +from mypy.server.deps import get_dependencies_of_target, merge_dependencies +from mypy.server.target import trigger_to_target +from mypy.server.trigger import WILDCARD_TAG, make_trigger +from mypy.typestate import type_state +from mypy.util import is_stdlib_file, module_prefix, split_target + +MAX_ITER: Final = 1000 + +# These are modules beyond stdlib that have some special meaning for mypy. +SENSITIVE_INTERNAL_MODULES = ("mypy_extensions", "typing_extensions") + + +class FineGrainedBuildManager: + def __init__(self, result: BuildResult) -> None: + """Initialize fine-grained build based on a batch build. + + Args: + result: Result from the initialized build. + The manager and graph will be taken over by this class. + manager: State of the build (mutated by this class) + graph: Additional state of the build (mutated by this class) + """ + manager = result.manager + self.manager = manager + self.graph = result.graph + self.previous_modules = get_module_to_path_map(self.graph) + self.deps = manager.fg_deps + # Merge in any root dependencies that may not have been loaded + merge_dependencies(manager.load_fine_grained_deps(FAKE_ROOT_MODULE), self.deps) + self.previous_targets_with_errors = manager.errors.targets() + self.previous_messages: list[str] = result.errors.copy() + # Module, if any, that had blocking errors in the last run as (id, path) tuple. + self.blocking_error: tuple[str, str] | None = None + # Module that we haven't processed yet but that are known to be stale. + self.stale: list[tuple[str, str]] = [] + # Disable the cache so that load_graph doesn't try going back to disk + # for the cache. + self.manager.cache_enabled = False + + # Some hints to the test suite about what is going on: + # Active triggers during the last update + self.triggered: list[str] = [] + # Modules passed to update during the last update + self.changed_modules: list[tuple[str, str]] = [] + # Modules processed during the last update + self.updated_modules: list[str] = [] + # Targets processed during last update (for testing only). + self.processed_targets: list[str] = [] + + def update( + self, + changed_modules: list[tuple[str, str]], + removed_modules: list[tuple[str, str]], + followed: bool = False, + ) -> list[str]: + """Update previous build result by processing changed modules. + + Also propagate changes to other modules as needed, but only process + those parts of other modules that are affected by the changes. Retain + the existing ASTs and symbol tables of unaffected modules. + + Reuses original BuildManager and Graph. + + Args: + changed_modules: Modules changed since the previous update/build; each is + a (module id, path) tuple. Includes modified and added modules. + Assume this is correct; it's not validated here. + removed_modules: Modules that have been deleted since the previous update + or removed from the build. + followed: If True, the modules were found through following imports + + Returns: + A list of errors. + """ + self.processed_targets.clear() + changed_modules = changed_modules + removed_modules + removed_set = {module for module, _ in removed_modules} + self.changed_modules = changed_modules + + if not changed_modules: + return self.previous_messages + + # Reset find_module's caches for the new build. + self.manager.find_module_cache.clear() + + self.triggered = [] + self.updated_modules = [] + changed_modules = dedupe_modules(changed_modules + self.stale) + initial_set = {id for id, _ in changed_modules} + self.manager.log_fine_grained( + "==== update %s ====" % ", ".join(repr(id) for id, _ in changed_modules) + ) + if self.previous_targets_with_errors and is_verbose(self.manager): + self.manager.log_fine_grained( + "previous targets with errors: %s" % sorted(self.previous_targets_with_errors) + ) + + blocking_error = None + if self.blocking_error: + # Handle blocking errors first. We'll exit as soon as we find a + # module that still has blocking errors. + self.manager.log_fine_grained(f"existing blocker: {self.blocking_error[0]}") + changed_modules = dedupe_modules([self.blocking_error] + changed_modules) + blocking_error = self.blocking_error[0] + self.blocking_error = None + + while True: + result = self.update_one( + changed_modules, initial_set, removed_set, blocking_error, followed + ) + changed_modules, (next_id, next_path), blocker_messages = result + + if blocker_messages is not None: + self.blocking_error = (next_id, next_path) + self.stale = changed_modules + messages = blocker_messages + break + + # It looks like we are done processing everything, so now + # reprocess all targets with errors. We are careful to + # support the possibility that reprocessing an errored module + # might trigger loading of a module, but I am not sure + # if this can really happen. + if not changed_modules: + # N.B: We just checked next_id, so manager.errors contains + # the errors from it. Thus we consider next_id up to date + # when propagating changes from the errored targets, + # which prevents us from reprocessing errors in it. + changed_modules = propagate_changes_using_dependencies( + self.manager, + self.graph, + self.deps, + set(), + {next_id}, + self.previous_targets_with_errors, + self.processed_targets, + ) + changed_modules = dedupe_modules(changed_modules) + if not changed_modules: + # Preserve state needed for the next update. + self.previous_targets_with_errors = self.manager.errors.targets() + messages = self.manager.errors.new_messages() + break + + messages = sort_messages_preserving_file_order(messages, self.previous_messages) + self.previous_messages = messages.copy() + return messages + + def trigger(self, target: str) -> list[str]: + """Trigger a specific target explicitly. + + This is intended for use by the suggestions engine. + """ + self.manager.errors.reset() + changed_modules = propagate_changes_using_dependencies( + self.manager, + self.graph, + self.deps, + set(), + set(), + self.previous_targets_with_errors | {target}, + [], + ) + # Preserve state needed for the next update. + self.previous_targets_with_errors = self.manager.errors.targets() + self.previous_messages = self.manager.errors.new_messages().copy() + return self.update(changed_modules, []) + + def flush_cache(self) -> None: + """Flush AST cache. + + This needs to be called after each increment, or file changes won't + be detected reliably. + """ + self.manager.ast_cache.clear() + + def update_one( + self, + changed_modules: list[tuple[str, str]], + initial_set: set[str], + removed_set: set[str], + blocking_error: str | None, + followed: bool, + ) -> tuple[list[tuple[str, str]], tuple[str, str], list[str] | None]: + """Process a module from the list of changed modules. + + Returns: + Tuple with these items: + + - Updated list of pending changed modules as (module id, path) tuples + - Module which was actually processed as (id, path) tuple + - If there was a blocking error, the error messages from it + """ + t0 = time.time() + next_id, next_path = changed_modules.pop(0) + + # If we have a module with a blocking error that is no longer + # in the import graph, we must skip it as otherwise we'll be + # stuck with the blocking error. + if ( + next_id == blocking_error + and next_id not in self.previous_modules + and next_id not in initial_set + ): + self.manager.log_fine_grained( + f"skip {next_id!r} (module with blocking error not in import graph)" + ) + return changed_modules, (next_id, next_path), None + + result = self.update_module(next_id, next_path, next_id in removed_set, followed) + remaining, (next_id, next_path), blocker_messages = result + changed_modules = [(id, path) for id, path in changed_modules if id != next_id] + changed_modules = dedupe_modules(remaining + changed_modules) + t1 = time.time() + + self.manager.log_fine_grained( + f"update once: {next_id} in {t1 - t0:.3f}s - {len(changed_modules)} left" + ) + + return changed_modules, (next_id, next_path), blocker_messages + + def update_module( + self, module: str, path: str, force_removed: bool, followed: bool + ) -> tuple[list[tuple[str, str]], tuple[str, str], list[str] | None]: + """Update a single modified module. + + If the module contains imports of previously unseen modules, only process one of + the new modules and return the remaining work to be done. + + Args: + module: Id of the module + path: File system path of the module + force_removed: If True, consider module removed from the build even if path + exists (used for removing an existing file from the build) + followed: Was this found via import following? + + Returns: + Tuple with these items: + + - Remaining modules to process as (module id, path) tuples + - Module which was actually processed as (id, path) tuple + - If there was a blocking error, the error messages from it + """ + self.manager.log_fine_grained(f"--- update single {module!r} ---") + self.updated_modules.append(module) + + # builtins and friends could potentially get triggered because + # of protocol stuff, but nothing good could possibly come from + # actually updating them. + if ( + is_stdlib_file(self.manager.options.abs_custom_typeshed_dir, path) + or module in SENSITIVE_INTERNAL_MODULES + ): + return [], (module, path), None + + manager = self.manager + previous_modules = self.previous_modules + graph = self.graph + + ensure_deps_loaded(module, self.deps, graph) + + # If this is an already existing module, make sure that we have + # its tree loaded so that we can snapshot it for comparison. + ensure_trees_loaded(manager, graph, [module]) + + t0 = time.time() + # Record symbol table snapshot of old version the changed module. + old_snapshots: dict[str, dict[str, SymbolSnapshot]] = {} + if module in manager.modules: + snapshot = snapshot_symbol_table(module, manager.modules[module].names) + old_snapshots[module] = snapshot + + manager.errors.reset() + self.processed_targets.append(module) + result = update_module_isolated( + module, path, manager, previous_modules, graph, force_removed, followed + ) + if isinstance(result, BlockedUpdate): + # Blocking error -- just give up + module, path, remaining, errors = result + self.previous_modules = get_module_to_path_map(graph) + return remaining, (module, path), errors + assert isinstance(result, NormalUpdate) # Work around #4124 + module, path, remaining, tree = result + + # TODO: What to do with stale dependencies? + t1 = time.time() + triggered = calculate_active_triggers(manager, old_snapshots, {module: tree}) + if is_verbose(self.manager): + filtered = [trigger for trigger in triggered if not trigger.endswith("__>")] + self.manager.log_fine_grained(f"triggered: {sorted(filtered)!r}") + self.triggered.extend(triggered | self.previous_targets_with_errors) + if module in graph: + graph[module].update_fine_grained_deps(self.deps) + graph[module].free_state() + remaining += propagate_changes_using_dependencies( + manager, + graph, + self.deps, + triggered, + {module}, + targets_with_errors=set(), + processed_targets=self.processed_targets, + ) + t2 = time.time() + manager.add_stats(update_isolated_time=t1 - t0, propagate_time=t2 - t1) + + # Preserve state needed for the next update. + self.previous_targets_with_errors.update(manager.errors.targets()) + self.previous_modules = get_module_to_path_map(graph) + + return remaining, (module, path), None + + +def find_unloaded_deps( + manager: BuildManager, graph: dict[str, State], initial: Sequence[str] +) -> list[str]: + """Find all the deps of the nodes in initial that haven't had their tree loaded. + + The key invariant here is that if a module is loaded, so are all + of their dependencies. This means that when we encounter a loaded + module, we don't need to explore its dependencies. (This + invariant is slightly violated when dependencies are added, which + can be handled by calling find_unloaded_deps directly on the new + dependencies.) + """ + worklist = list(initial) + seen: set[str] = set() + unloaded = [] + while worklist: + node = worklist.pop() + if node in seen or node not in graph: + continue + seen.add(node) + if node not in manager.modules: + ancestors = graph[node].ancestors or [] + worklist.extend(graph[node].dependencies + ancestors) + unloaded.append(node) + + return unloaded + + +def ensure_deps_loaded(module: str, deps: dict[str, set[str]], graph: dict[str, State]) -> None: + """Ensure that the dependencies on a module are loaded. + + Dependencies are loaded into the 'deps' dictionary. + + This also requires loading dependencies from any parent modules, + since dependencies will get stored with parent modules when a module + doesn't exist. + """ + if module in graph and graph[module].fine_grained_deps_loaded: + return + parts = module.split(".") + for i in range(len(parts)): + base = ".".join(parts[: i + 1]) + if base in graph and not graph[base].fine_grained_deps_loaded: + merge_dependencies(graph[base].load_fine_grained_deps(), deps) + graph[base].fine_grained_deps_loaded = True + + +def ensure_trees_loaded( + manager: BuildManager, graph: dict[str, State], initial: Sequence[str] +) -> None: + """Ensure that the modules in initial and their deps have loaded trees.""" + to_process = find_unloaded_deps(manager, graph, initial) + if to_process: + if is_verbose(manager): + manager.log_fine_grained( + "Calling process_fresh_modules on set of size {} ({})".format( + len(to_process), sorted(to_process) + ) + ) + process_fresh_modules(graph, to_process, manager) + + +# The result of update_module_isolated when no blockers, with these items: +# +# - Id of the changed module (can be different from the module argument) +# - Path of the changed module +# - New AST for the changed module (None if module was deleted) +# - Remaining changed modules that are not processed yet as (module id, path) +# tuples (non-empty if the original changed module imported other new +# modules) +class NormalUpdate(NamedTuple): + module: str + path: str + remaining: list[tuple[str, str]] + tree: MypyFile | None + + +# The result of update_module_isolated when there is a blocking error. Items +# are similar to NormalUpdate (but there are fewer). +class BlockedUpdate(NamedTuple): + module: str + path: str + remaining: list[tuple[str, str]] + messages: list[str] + + +UpdateResult: _TypeAlias = Union[NormalUpdate, BlockedUpdate] + + +def update_module_isolated( + module: str, + path: str, + manager: BuildManager, + previous_modules: dict[str, str], + graph: Graph, + force_removed: bool, + followed: bool, +) -> UpdateResult: + """Build a new version of one changed module only. + + Don't propagate changes to elsewhere in the program. Raise CompileError on + encountering a blocking error. + + Args: + module: Changed module (modified, created or deleted) + path: Path of the changed module + manager: Build manager + graph: Build graph + force_removed: If True, consider the module removed from the build even it the + file exists + + Returns a named tuple describing the result (see above for details). + """ + if module not in graph: + manager.log_fine_grained(f"new module {module!r}") + + if not manager.fscache.isfile(path) or force_removed: + delete_module(module, path, graph, manager) + return NormalUpdate(module, path, [], None) + + sources = get_sources(manager.fscache, previous_modules, [(module, path)], followed) + + if module in manager.missing_modules: + manager.missing_modules.remove(module) + + orig_module = module + orig_state = graph.get(module) + orig_tree = manager.modules.get(module) + + def restore(ids: list[str]) -> None: + # For each of the modules in ids, restore that id's old + # manager.modules and graphs entries. (Except for the original + # module, this means deleting them.) + for id in ids: + if id == orig_module and orig_tree: + manager.modules[id] = orig_tree + elif id in manager.modules: + del manager.modules[id] + if id == orig_module and orig_state: + graph[id] = orig_state + elif id in graph: + del graph[id] + + new_modules: list[State] = [] + try: + if module in graph: + del graph[module] + load_graph(sources, manager, graph, new_modules) + except CompileError as err: + # Parse error somewhere in the program -- a blocker + assert err.module_with_blocker + restore([module] + [st.id for st in new_modules]) + return BlockedUpdate(err.module_with_blocker, path, [], err.messages) + + # Reparsing the file may have brought in dependencies that we + # didn't have before. Make sure that they are loaded to restore + # the invariant that a module having a loaded tree implies that + # its dependencies do as well. + ensure_trees_loaded(manager, graph, graph[module].dependencies) + + # Find any other modules brought in by imports. + changed_modules = [(st.id, st.xpath) for st in new_modules] + for m in new_modules: + manager.import_map[m.id] = set(m.dependencies + m.suppressed) + + # If there are multiple modules to process, only process one of them and return + # the remaining ones to the caller. + if len(changed_modules) > 1: + # As an optimization, look for a module that imports no other changed modules. + module, path = find_relative_leaf_module(changed_modules, graph) + changed_modules.remove((module, path)) + remaining_modules = changed_modules + # The remaining modules haven't been processed yet so drop them. + restore([id for id, _ in remaining_modules]) + manager.log_fine_grained(f"--> {module!r} (newly imported)") + else: + remaining_modules = [] + + state = graph[module] + + # Process the changed file. + state.parse_file() + assert state.tree is not None, "file must be at least parsed" + t0 = time.time() + try: + semantic_analysis_for_scc(graph, [state.id], manager.errors) + except CompileError as err: + # There was a blocking error, so module AST is incomplete. Restore old modules. + restore([module]) + return BlockedUpdate(module, path, remaining_modules, err.messages) + + # Merge old and new ASTs. + new_modules_dict: dict[str, MypyFile | None] = {module: state.tree} + replace_modules_with_new_variants(manager, graph, {orig_module: orig_tree}, new_modules_dict) + + t1 = time.time() + # Perform type checking. + state.type_checker().reset() + state.type_check_first_pass() + state.type_check_second_pass() + state.detect_possibly_undefined_vars() + state.generate_unused_ignore_notes() + state.generate_ignore_without_code_notes() + t2 = time.time() + state.finish_passes() + t3 = time.time() + manager.add_stats(semanal_time=t1 - t0, typecheck_time=t2 - t1, finish_passes_time=t3 - t2) + + graph[module] = state + + return NormalUpdate(module, path, remaining_modules, state.tree) + + +def find_relative_leaf_module(modules: list[tuple[str, str]], graph: Graph) -> tuple[str, str]: + """Find a module in a list that directly imports no other module in the list. + + If no such module exists, return the lexicographically first module from the list. + Always return one of the items in the modules list. + + NOTE: If both 'abc' and 'typing' have changed, an effect of the above rule is that + we prefer 'abc', even if both are in the same SCC. This works around a false + positive in 'typing', at least in tests. + + Args: + modules: List of (module, path) tuples (non-empty) + graph: Program import graph that contains all modules in the module list + """ + assert modules + # Sort for repeatable results. + modules = sorted(modules) + module_set = {module for module, _ in modules} + for module, path in modules: + state = graph[module] + if len(set(state.dependencies) & module_set) == 0: + # Found it! + return module, path + # Could not find any. Just return the first module (by lexicographic order). + return modules[0] + + +def delete_module(module_id: str, path: str, graph: Graph, manager: BuildManager) -> None: + manager.log_fine_grained(f"delete module {module_id!r}") + # TODO: Remove deps for the module (this only affects memory use, not correctness) + if module_id in graph: + del graph[module_id] + if module_id in manager.modules: + del manager.modules[module_id] + components = module_id.split(".") + if len(components) > 1: + # Delete reference to module in parent module. + parent_id = ".".join(components[:-1]) + # If parent module is ignored, it won't be included in the modules dictionary. + if parent_id in manager.modules: + parent = manager.modules[parent_id] + if components[-1] in parent.names: + del parent.names[components[-1]] + # If the module is removed from the build but still exists, then + # we mark it as missing so that it will get picked up by import from still. + if manager.fscache.isfile(path): + manager.missing_modules.add(module_id) + + +def dedupe_modules(modules: list[tuple[str, str]]) -> list[tuple[str, str]]: + seen: set[str] = set() + result = [] + for id, path in modules: + if id not in seen: + seen.add(id) + result.append((id, path)) + return result + + +def get_module_to_path_map(graph: Graph) -> dict[str, str]: + return {module: node.xpath for module, node in graph.items()} + + +def get_sources( + fscache: FileSystemCache, + modules: dict[str, str], + changed_modules: list[tuple[str, str]], + followed: bool, +) -> list[BuildSource]: + sources = [] + for id, path in changed_modules: + if fscache.isfile(path): + sources.append(BuildSource(path, id, None, followed=followed)) + return sources + + +def calculate_active_triggers( + manager: BuildManager, + old_snapshots: dict[str, dict[str, SymbolSnapshot]], + new_modules: dict[str, MypyFile | None], +) -> set[str]: + """Determine activated triggers by comparing old and new symbol tables. + + For example, if only the signature of function m.f is different in the new + symbol table, return {''}. + """ + names: set[str] = set() + for id in new_modules: + snapshot1 = old_snapshots.get(id) + if snapshot1 is None: + names.add(id) + snapshot1 = {} + new = new_modules[id] + if new is None: + snapshot2 = snapshot_symbol_table(id, SymbolTable()) + names.add(id) + else: + snapshot2 = snapshot_symbol_table(id, new.names) + diff = compare_symbol_table_snapshots(id, snapshot1, snapshot2) + package_nesting_level = id.count(".") + for item in diff.copy(): + if item.count(".") <= package_nesting_level + 1 and item.split(".")[-1] not in ( + "__builtins__", + "__file__", + "__name__", + "__package__", + "__doc__", + ): + # Activate catch-all wildcard trigger for top-level module changes (used for + # "from m import *"). This also gets triggered by changes to module-private + # entries, but as these unneeded dependencies only result in extra processing, + # it's a minor problem. + # + # TODO: Some __* names cause mistriggers. Fix the underlying issue instead of + # special casing them here. + diff.add(id + WILDCARD_TAG) + if item.count(".") > package_nesting_level + 1: + # These are for changes within classes, used by protocols. + diff.add(item.rsplit(".", 1)[0] + WILDCARD_TAG) + + names |= diff + return {make_trigger(name) for name in names} + + +def replace_modules_with_new_variants( + manager: BuildManager, + graph: dict[str, State], + old_modules: dict[str, MypyFile | None], + new_modules: dict[str, MypyFile | None], +) -> None: + """Replace modules with newly builds versions. + + Retain the identities of externally visible AST nodes in the + old ASTs so that references to the affected modules from other + modules will still be valid (unless something was deleted or + replaced with an incompatible definition, in which case there + will be dangling references that will be handled by + propagate_changes_using_dependencies). + """ + for id in new_modules: + preserved_module = old_modules.get(id) + new_module = new_modules[id] + if preserved_module and new_module is not None: + merge_asts(preserved_module, preserved_module.names, new_module, new_module.names) + manager.modules[id] = preserved_module + graph[id].tree = preserved_module + + +def propagate_changes_using_dependencies( + manager: BuildManager, + graph: dict[str, State], + deps: dict[str, set[str]], + triggered: set[str], + up_to_date_modules: set[str], + targets_with_errors: set[str], + processed_targets: list[str], +) -> list[tuple[str, str]]: + """Transitively rechecks targets based on triggers and the dependency map. + + Returns a list (module id, path) tuples representing modules that contain + a target that needs to be reprocessed but that has not been parsed yet. + + Processed targets should be appended to processed_targets (used in tests only, + to test the order of processing targets). + """ + + num_iter = 0 + remaining_modules: list[tuple[str, str]] = [] + + # Propagate changes until nothing visible has changed during the last + # iteration. + while triggered or targets_with_errors: + num_iter += 1 + if num_iter > MAX_ITER: + raise RuntimeError("Max number of iterations (%d) reached (endless loop?)" % MAX_ITER) + + todo, unloaded, stale_protos = find_targets_recursive( + manager, graph, triggered, deps, up_to_date_modules + ) + # TODO: we sort to make it deterministic, but this is *incredibly* ad hoc + remaining_modules.extend((id, graph[id].xpath) for id in sorted(unloaded)) + # Also process targets that used to have errors, as otherwise some + # errors might be lost. + for target in targets_with_errors: + id = module_prefix(graph, target) + if id is not None and id not in up_to_date_modules: + if id not in todo: + todo[id] = set() + manager.log_fine_grained(f"process target with error: {target}") + more_nodes, _ = lookup_target(manager, target) + todo[id].update(more_nodes) + triggered = set() + # First invalidate subtype caches in all stale protocols. + # We need to do this to avoid false negatives if the protocol itself is + # unchanged, but was marked stale because its sub- (or super-) type changed. + for info in stale_protos: + type_state.reset_subtype_caches_for(info) + # Then fully reprocess all targets. + # TODO: Preserve order (set is not optimal) + for id, nodes in sorted(todo.items(), key=lambda x: x[0]): + assert id not in up_to_date_modules + triggered |= reprocess_nodes(manager, graph, id, nodes, deps, processed_targets) + # Changes elsewhere may require us to reprocess modules that were + # previously considered up to date. For example, there may be a + # dependency loop that loops back to an originally processed module. + up_to_date_modules = set() + targets_with_errors = set() + if is_verbose(manager): + manager.log_fine_grained(f"triggered: {list(triggered)!r}") + + return remaining_modules + + +def find_targets_recursive( + manager: BuildManager, + graph: Graph, + triggers: set[str], + deps: dict[str, set[str]], + up_to_date_modules: set[str], +) -> tuple[dict[str, set[FineGrainedDeferredNode]], set[str], set[TypeInfo]]: + """Find names of all targets that need to reprocessed, given some triggers. + + Returns: A tuple containing a: + * Dictionary from module id to a set of stale targets. + * A set of module ids for unparsed modules with stale targets. + """ + result: dict[str, set[FineGrainedDeferredNode]] = {} + worklist = triggers + processed: set[str] = set() + stale_protos: set[TypeInfo] = set() + unloaded_files: set[str] = set() + + # Find AST nodes corresponding to each target. + # + # TODO: Don't rely on a set, since the items are in an unpredictable order. + while worklist: + processed |= worklist + current = worklist + worklist = set() + for target in current: + if target.startswith("<"): + module_id = module_prefix(graph, trigger_to_target(target)) + if module_id: + ensure_deps_loaded(module_id, deps, graph) + + worklist |= deps.get(target, set()) - processed + else: + module_id = module_prefix(graph, target) + if module_id is None: + # Deleted module. + continue + if module_id in up_to_date_modules: + # Already processed. + continue + if ( + module_id not in manager.modules + or manager.modules[module_id].is_cache_skeleton + ): + # We haven't actually parsed and checked the module, so we don't have + # access to the actual nodes. + # Add it to the queue of files that need to be processed fully. + unloaded_files.add(module_id) + continue + + if module_id not in result: + result[module_id] = set() + manager.log_fine_grained(f"process: {target}") + deferred, stale_proto = lookup_target(manager, target) + if stale_proto: + stale_protos.add(stale_proto) + result[module_id].update(deferred) + + return result, unloaded_files, stale_protos + + +def reprocess_nodes( + manager: BuildManager, + graph: dict[str, State], + module_id: str, + nodeset: set[FineGrainedDeferredNode], + deps: dict[str, set[str]], + processed_targets: list[str], +) -> set[str]: + """Reprocess a set of nodes within a single module. + + Return fired triggers. + """ + if module_id not in graph: + manager.log_fine_grained("%s not in graph (blocking errors or deleted?)" % module_id) + return set() + + file_node = manager.modules[module_id] + old_symbols = find_symbol_tables_recursive(file_node.fullname, file_node.names) + old_symbols = {name: names.copy() for name, names in old_symbols.items()} + old_symbols_snapshot = snapshot_symbol_table(file_node.fullname, file_node.names) + + def key(node: FineGrainedDeferredNode) -> int: + # Unlike modules which are sorted by name within SCC, + # nodes within the same module are sorted by line number, because + # this is how they are processed in normal mode. + return node.node.line + + nodes = sorted(nodeset, key=key) + + state = graph[module_id] + options = state.options + manager.errors.set_file_ignored_lines( + file_node.path, file_node.ignored_lines, options.ignore_errors or state.ignore_all + ) + manager.errors.set_skipped_lines(file_node.path, file_node.skipped_lines) + + targets = set() + for node in nodes: + target = target_from_node(module_id, node.node) + if target is not None: + targets.add(target) + manager.errors.clear_errors_in_targets(file_node.path, targets) + + # If one of the nodes is the module itself, emit any errors that + # happened before semantic analysis. + for target in targets: + if target == module_id: + for info in graph[module_id].early_errors: + manager.errors.add_error_info(info) + + # Strip semantic analysis information. + saved_attrs: SavedAttributes = {} + for deferred in nodes: + processed_targets.append(deferred.node.fullname) + strip_target(deferred.node, saved_attrs) + semantic_analysis_for_targets(graph[module_id], nodes, graph, saved_attrs) + # Merge symbol tables to preserve identities of AST nodes. The file node will remain + # the same, but other nodes may have been recreated with different identities, such as + # NamedTuples defined using assignment statements. + new_symbols = find_symbol_tables_recursive(file_node.fullname, file_node.names) + for name in old_symbols: + if name in new_symbols: + merge_asts(file_node, old_symbols[name], file_node, new_symbols[name]) + + # Type check. + checker = graph[module_id].type_checker() + checker.reset() + # We seem to need additional passes in fine-grained incremental mode. + checker.pass_num = 0 + checker.last_pass = 3 + # It is tricky to reliably invalidate constructor cache in fine-grained increments. + # See PR 19514 description for details. + more = checker.check_second_pass(nodes, allow_constructor_cache=False) + while more: + more = False + if graph[module_id].type_checker().check_second_pass(allow_constructor_cache=False): + more = True + + if manager.options.export_types: + manager.all_types.update(graph[module_id].type_map()) + + new_symbols_snapshot = snapshot_symbol_table(file_node.fullname, file_node.names) + # Check if any attribute types were changed and need to be propagated further. + changed = compare_symbol_table_snapshots( + file_node.fullname, old_symbols_snapshot, new_symbols_snapshot + ) + new_triggered = {make_trigger(name) for name in changed} + + # Dependencies may have changed. + update_deps(module_id, nodes, graph, deps, options) + + # Report missing imports. + graph[module_id].verify_dependencies() + + graph[module_id].free_state() + + return new_triggered + + +def find_symbol_tables_recursive(prefix: str, symbols: SymbolTable) -> dict[str, SymbolTable]: + """Find all nested symbol tables. + + Args: + prefix: Full name prefix (used for return value keys and to filter result so that + cross references to other modules aren't included) + symbols: Root symbol table + + Returns a dictionary from full name to corresponding symbol table. + """ + result = {prefix: symbols} + for name, node in symbols.items(): + if isinstance(node.node, TypeInfo) and node.node.fullname.startswith(prefix + "."): + more = find_symbol_tables_recursive(prefix + "." + name, node.node.names) + result.update(more) + return result + + +def update_deps( + module_id: str, + nodes: list[FineGrainedDeferredNode], + graph: dict[str, State], + deps: dict[str, set[str]], + options: Options, +) -> None: + for deferred in nodes: + node = deferred.node + type_map = graph[module_id].type_map() + tree = graph[module_id].tree + assert tree is not None, "Tree must be processed at this stage" + new_deps = get_dependencies_of_target( + module_id, tree, node, type_map, options.python_version + ) + for trigger, targets in new_deps.items(): + deps.setdefault(trigger, set()).update(targets) + # Merge also the newly added protocol deps (if any). + type_state.update_protocol_deps(deps) + + +def lookup_target( + manager: BuildManager, target: str +) -> tuple[list[FineGrainedDeferredNode], TypeInfo | None]: + """Look up a target by fully-qualified name. + + The first item in the return tuple is a list of deferred nodes that + needs to be reprocessed. If the target represents a TypeInfo corresponding + to a protocol, return it as a second item in the return tuple, otherwise None. + """ + + def not_found() -> None: + manager.log_fine_grained(f"Can't find matching target for {target} (stale dependency?)") + + modules = manager.modules + items = split_target(modules, target) + if items is None: + not_found() # Stale dependency + return [], None + module, rest = items + if rest: + components = rest.split(".") + else: + components = [] + node: SymbolNode | None = modules[module] + file: MypyFile | None = None + active_class = None + for c in components: + if isinstance(node, TypeInfo): + active_class = node + if isinstance(node, MypyFile): + file = node + if not isinstance(node, (MypyFile, TypeInfo)) or c not in node.names: + not_found() # Stale dependency + return [], None + # Don't reprocess plugin generated targets. They should get + # stripped and regenerated when the containing target is + # reprocessed. + if node.names[c].plugin_generated: + return [], None + node = node.names[c].node + if isinstance(node, TypeInfo): + # A ClassDef target covers the body of the class and everything defined + # within it. To get the body we include the entire surrounding target, + # typically a module top-level, since we don't support processing class + # bodies as separate entities for simplicity. + assert file is not None + if node.fullname != target: + # This is a reference to a different TypeInfo, likely due to a stale dependency. + # Processing them would spell trouble -- for example, we could be refreshing + # a deserialized TypeInfo with missing attributes. + not_found() + return [], None + result = [FineGrainedDeferredNode(file, None)] + stale_info: TypeInfo | None = None + if node.is_protocol: + stale_info = node + for name, symnode in node.names.items(): + node = symnode.node + if isinstance(node, FuncDef): + method, _ = lookup_target(manager, target + "." + name) + result.extend(method) + return result, stale_info + if isinstance(node, Decorator): + # Decorator targets actually refer to the function definition only. + node = node.func + if not isinstance(node, (FuncDef, MypyFile, OverloadedFuncDef)): + # The target can't be refreshed. It's possible that the target was + # changed to another type and we have a stale dependency pointing to it. + not_found() + return [], None + if node.fullname != target: + # Stale reference points to something unexpected. We shouldn't process since the + # context will be wrong and it could be a partially initialized deserialized node. + not_found() + return [], None + return [FineGrainedDeferredNode(node, active_class)], None + + +def is_verbose(manager: BuildManager) -> bool: + return manager.options.verbosity >= 1 or DEBUG_FINE_GRAINED + + +def target_from_node(module: str, node: FuncDef | MypyFile | OverloadedFuncDef) -> str | None: + """Return the target name corresponding to a deferred node. + + Args: + module: Must be module id of the module that defines 'node' + + Returns the target name, or None if the node is not a valid target in the given + module (for example, if it's actually defined in another module). + """ + if isinstance(node, MypyFile): + if module != node.fullname: + # Actually a reference to another module -- likely a stale dependency. + return None + return module + else: # OverloadedFuncDef or FuncDef + if node.info: + return f"{node.info.fullname}.{node.name}" + else: + return f"{module}.{node.name}" + + +if sys.platform != "win32": + INIT_SUFFIXES: Final = ("/__init__.py", "/__init__.pyi") +else: + INIT_SUFFIXES: Final = ( + os.sep + "__init__.py", + os.sep + "__init__.pyi", + os.altsep + "__init__.py", + os.altsep + "__init__.pyi", + ) + + +def refresh_suppressed_submodules( + module: str, + path: str | None, + deps: dict[str, set[str]], + graph: Graph, + fscache: FileSystemCache, + refresh_file: Callable[[str, str], list[str]], +) -> list[str] | None: + """Look for submodules that are now suppressed in target package. + + If a submodule a.b gets added, we need to mark it as suppressed + in modules that contain "from a import b". Previously we assumed + that 'a.b' is not a module but a regular name. + + This is only relevant when following imports normally. + + Args: + module: target package in which to look for submodules + path: path of the module + refresh_file: function that reads the AST of a module (returns error messages) + + Return a list of errors from refresh_file() if it was called. If the + return value is None, we didn't call refresh_file(). + """ + messages = None + if path is None or not path.endswith(INIT_SUFFIXES): + # Only packages have submodules. + return None + # Find any submodules present in the directory. + pkgdir = os.path.dirname(path) + try: + entries = fscache.listdir(pkgdir) + except FileNotFoundError: + entries = [] + for fnam in entries: + if ( + not fnam.endswith((".py", ".pyi")) + or fnam.startswith("__init__.") + or fnam.count(".") != 1 + ): + continue + shortname = fnam.split(".")[0] + submodule = module + "." + shortname + trigger = make_trigger(submodule) + + # We may be missing the required fine-grained deps. + ensure_deps_loaded(module, deps, graph) + + if trigger in deps: + for dep in deps[trigger]: + # We can ignore <...> deps since a submodule can't trigger any. + state = graph.get(dep) + if not state: + # Maybe it's a non-top-level target. We only care about the module. + dep_module = module_prefix(graph, dep) + if dep_module is not None: + state = graph.get(dep_module) + if state: + # Is the file may missing an AST in case it's read from cache? + if state.tree is None: + # Create AST for the file. This may produce some new errors + # that we need to propagate. + assert state.path is not None + messages = refresh_file(state.id, state.path) + tree = state.tree + assert tree # Will be fine, due to refresh_file() above + for imp in tree.imports: + if isinstance(imp, ImportFrom): + if ( + imp.id == module + and any(name == shortname for name, _ in imp.names) + and submodule not in state.suppressed_set + ): + state.suppressed.append(submodule) + state.suppressed_set.add(submodule) + return messages + + +def extract_fnam_from_message(message: str) -> str | None: + m = re.match(r"([^:]+):[0-9]+: (error|note): ", message) + if m: + return m.group(1) + return None + + +def extract_possible_fnam_from_message(message: str) -> str: + # This may return non-path things if there is some random colon on the line + return message.split(":", 1)[0] + + +def sort_messages_preserving_file_order( + messages: list[str], prev_messages: list[str] +) -> list[str]: + """Sort messages so that the order of files is preserved. + + An update generates messages so that the files can be in a fairly + arbitrary order. Preserve the order of files to avoid messages + getting reshuffled continuously. If there are messages in + additional files, sort them towards the end. + """ + # Calculate file order from the previous messages + n = 0 + order = {} + for msg in prev_messages: + fnam = extract_fnam_from_message(msg) + if fnam and fnam not in order: + order[fnam] = n + n += 1 + + # Related messages must be sorted as a group of successive lines + groups = [] + i = 0 + while i < len(messages): + msg = messages[i] + maybe_fnam = extract_possible_fnam_from_message(msg) + group = [msg] + if maybe_fnam in order: + # This looks like a file name. Collect all lines related to this message. + while ( + i + 1 < len(messages) + and extract_possible_fnam_from_message(messages[i + 1]) not in order + and extract_fnam_from_message(messages[i + 1]) is None + and not messages[i + 1].startswith("mypy: ") + ): + i += 1 + group.append(messages[i]) + groups.append((order.get(maybe_fnam, n), group)) + i += 1 + + groups = sorted(groups, key=lambda g: g[0]) + result = [] + for key, group in groups: + result.extend(group) + return result diff --git a/.venv/lib/python3.12/site-packages/mypy/sharedparse.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/sharedparse.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..d1479be Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/sharedparse.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/sharedparse.py b/.venv/lib/python3.12/site-packages/mypy/sharedparse.py new file mode 100644 index 0000000..71d1dee --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/sharedparse.py @@ -0,0 +1,114 @@ +from __future__ import annotations + +from typing import Final + +"""Shared logic between our three mypy parser files.""" + + +_NON_BINARY_MAGIC_METHODS: Final = { + "__abs__", + "__call__", + "__complex__", + "__contains__", + "__buffer__", + "__del__", + "__delattr__", + "__delitem__", + "__enter__", + "__exit__", + "__float__", + "__getattr__", + "__getattribute__", + "__getitem__", + "__hex__", + "__init__", + "__init_subclass__", + "__int__", + "__invert__", + "__iter__", + "__len__", + "__long__", + "__neg__", + "__new__", + "__oct__", + "__pos__", + "__release_buffer__", + "__repr__", + "__reversed__", + "__setattr__", + "__setitem__", + "__str__", +} + +MAGIC_METHODS_ALLOWING_KWARGS: Final = { + "__init__", + "__init_subclass__", + "__new__", + "__call__", + "__setattr__", +} + +BINARY_MAGIC_METHODS: Final = { + "__add__", + "__and__", + "__divmod__", + "__eq__", + "__floordiv__", + "__ge__", + "__gt__", + "__iadd__", + "__iand__", + "__idiv__", + "__ifloordiv__", + "__ilshift__", + "__imatmul__", + "__imod__", + "__imul__", + "__ior__", + "__ipow__", + "__irshift__", + "__isub__", + "__itruediv__", + "__ixor__", + "__le__", + "__lshift__", + "__lt__", + "__matmul__", + "__mod__", + "__mul__", + "__ne__", + "__or__", + "__pow__", + "__radd__", + "__rand__", + "__rdiv__", + "__rfloordiv__", + "__rlshift__", + "__rmatmul__", + "__rmod__", + "__rmul__", + "__ror__", + "__rpow__", + "__rrshift__", + "__rshift__", + "__rsub__", + "__rtruediv__", + "__rxor__", + "__sub__", + "__truediv__", + "__xor__", +} + +assert not (_NON_BINARY_MAGIC_METHODS & BINARY_MAGIC_METHODS) + +MAGIC_METHODS: Final = _NON_BINARY_MAGIC_METHODS | BINARY_MAGIC_METHODS + +MAGIC_METHODS_POS_ARGS_ONLY: Final = MAGIC_METHODS - MAGIC_METHODS_ALLOWING_KWARGS + + +def special_function_elide_names(name: str) -> bool: + return name in MAGIC_METHODS_POS_ARGS_ONLY + + +def argument_elide_name(name: str | None) -> bool: + return name is not None and name.startswith("__") and not name.endswith("__") diff --git a/.venv/lib/python3.12/site-packages/mypy/solve.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/solve.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..5738ad6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/solve.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/solve.py b/.venv/lib/python3.12/site-packages/mypy/solve.py new file mode 100644 index 0000000..fbbcac2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/solve.py @@ -0,0 +1,598 @@ +"""Type inference constraint solving""" + +from __future__ import annotations + +from collections import defaultdict +from collections.abc import Iterable, Sequence +from typing_extensions import TypeAlias as _TypeAlias + +from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, infer_constraints, neg_op +from mypy.expandtype import expand_type +from mypy.graph_utils import prepare_sccs, strongly_connected_components, topsort +from mypy.join import join_type_list +from mypy.meet import meet_type_list, meet_types +from mypy.subtypes import is_subtype +from mypy.typeops import get_all_type_vars +from mypy.types import ( + AnyType, + Instance, + NoneType, + ParamSpecType, + ProperType, + TupleType, + Type, + TypeOfAny, + TypeVarId, + TypeVarLikeType, + TypeVarTupleType, + TypeVarType, + UninhabitedType, + UnionType, + UnpackType, + get_proper_type, +) +from mypy.typestate import type_state + +Bounds: _TypeAlias = "dict[TypeVarId, set[Type]]" +Graph: _TypeAlias = "set[tuple[TypeVarId, TypeVarId]]" +Solutions: _TypeAlias = "dict[TypeVarId, Type | None]" + + +def solve_constraints( + original_vars: Sequence[TypeVarLikeType], + constraints: list[Constraint], + strict: bool = True, + allow_polymorphic: bool = False, + skip_unsatisfied: bool = False, +) -> tuple[list[Type | None], list[TypeVarLikeType]]: + """Solve type constraints. + + Return the best type(s) for type variables; each type can be None if the value of + the variable could not be solved. + + If a variable has no constraints, if strict=True then arbitrarily + pick UninhabitedType as the value of the type variable. If strict=False, pick AnyType. + If allow_polymorphic=True, then use the full algorithm that can potentially return + free type variables in solutions (these require special care when applying). Otherwise, + use a simplified algorithm that just solves each type variable individually if possible. + + The skip_unsatisfied flag matches the same one in applytype.apply_generic_arguments(). + """ + vars = [tv.id for tv in original_vars] + if not vars: + return [], [] + + originals = {tv.id: tv for tv in original_vars} + extra_vars: list[TypeVarId] = [] + # Get additional type variables from generic actuals. + for c in constraints: + extra_vars.extend([v.id for v in c.extra_tvars if v.id not in vars + extra_vars]) + originals.update({v.id: v for v in c.extra_tvars if v.id not in originals}) + + if allow_polymorphic: + # Constraints inferred from unions require special handling in polymorphic inference. + constraints = skip_reverse_union_constraints(constraints) + + # Collect a list of constraints for each type variable. + cmap: dict[TypeVarId, list[Constraint]] = {tv: [] for tv in vars + extra_vars} + for con in constraints: + if con.type_var in vars + extra_vars: + cmap[con.type_var].append(con) + + if allow_polymorphic: + if constraints: + solutions, free_vars = solve_with_dependent( + vars + extra_vars, constraints, vars, originals + ) + else: + solutions = {} + free_vars = [] + else: + solutions = {} + free_vars = [] + for tv, cs in cmap.items(): + if not cs: + continue + lowers = [c.target for c in cs if c.op == SUPERTYPE_OF] + uppers = [c.target for c in cs if c.op == SUBTYPE_OF] + solution = solve_one(lowers, uppers) + + # Do not leak type variables in non-polymorphic solutions. + if solution is None or not get_vars( + solution, [tv for tv in extra_vars if tv not in vars] + ): + solutions[tv] = solution + + res: list[Type | None] = [] + for v in vars: + if v in solutions: + res.append(solutions[v]) + else: + # No constraints for type variable -- 'UninhabitedType' is the most specific type. + candidate: Type + if strict: + candidate = UninhabitedType() + candidate.ambiguous = True + else: + candidate = AnyType(TypeOfAny.special_form) + res.append(candidate) + + if not free_vars and not skip_unsatisfied: + # Most of the validation for solutions is done in applytype.py, but here we can + # quickly test solutions w.r.t. to upper bounds, and use the latter (if possible), + # if solutions are actually not valid (due to poor inference context). + res = pre_validate_solutions(res, original_vars, constraints) + + return res, free_vars + + +def solve_with_dependent( + vars: list[TypeVarId], + constraints: list[Constraint], + original_vars: list[TypeVarId], + originals: dict[TypeVarId, TypeVarLikeType], +) -> tuple[Solutions, list[TypeVarLikeType]]: + """Solve set of constraints that may depend on each other, like T <: List[S]. + + The whole algorithm consists of five steps: + * Propagate via linear constraints and use secondary constraints to get transitive closure + * Find dependencies between type variables, group them in SCCs, and sort topologically + * Check that all SCC are intrinsically linear, we can't solve (express) T <: List[T] + * Variables in leaf SCCs that don't have constant bounds are free (choose one per SCC) + * Solve constraints iteratively starting from leaves, updating bounds after each step. + """ + graph, lowers, uppers = transitive_closure(vars, constraints) + + dmap = compute_dependencies(vars, graph, lowers, uppers) + sccs = list(strongly_connected_components(set(vars), dmap)) + if not all(check_linear(scc, lowers, uppers) for scc in sccs): + return {}, [] + raw_batches = list(topsort(prepare_sccs(sccs, dmap))) + + free_vars = [] + free_solutions = {} + for scc in raw_batches[0]: + # If there are no bounds on this SCC, then the only meaningful solution we can + # express, is that each variable is equal to a new free variable. For example, + # if we have T <: S, S <: U, we deduce: T = S = U = . + if all(not lowers[tv] and not uppers[tv] for tv in scc): + best_free = choose_free([originals[tv] for tv in scc], original_vars) + if best_free: + # TODO: failing to choose may cause leaking type variables, + # we need to fail gracefully instead. + free_vars.append(best_free.id) + free_solutions[best_free.id] = best_free + + # Update lowers/uppers with free vars, so these can now be used + # as valid solutions. + for l, u in graph: + if l in free_vars: + lowers[u].add(free_solutions[l]) + if u in free_vars: + uppers[l].add(free_solutions[u]) + + # Flatten the SCCs that are independent, we can solve them together, + # since we don't need to update any targets in between. + batches = [] + for batch in raw_batches: + next_bc = [] + for scc in batch: + next_bc.extend(list(scc)) + batches.append(next_bc) + + solutions: dict[TypeVarId, Type | None] = {} + for flat_batch in batches: + res = solve_iteratively(flat_batch, graph, lowers, uppers) + solutions.update(res) + return solutions, [free_solutions[tv] for tv in free_vars] + + +def solve_iteratively( + batch: list[TypeVarId], graph: Graph, lowers: Bounds, uppers: Bounds +) -> Solutions: + """Solve transitive closure sequentially, updating upper/lower bounds after each step. + + Transitive closure is represented as a linear graph plus lower/upper bounds for each + type variable, see transitive_closure() docstring for details. + + We solve for type variables that appear in `batch`. If a bound is not constant (i.e. it + looks like T :> F[S, ...]), we substitute solutions found so far in the target F[S, ...] + after solving the batch. + + Importantly, after solving each variable in a batch, we move it from linear graph to + upper/lower bounds, this way we can guarantee consistency of solutions (see comment below + for an example when this is important). + """ + solutions = {} + s_batch = set(batch) + while s_batch: + for tv in sorted(s_batch, key=lambda x: x.raw_id): + if lowers[tv] or uppers[tv]: + solvable_tv = tv + break + else: + break + # Solve each solvable type variable separately. + s_batch.remove(solvable_tv) + result = solve_one(lowers[solvable_tv], uppers[solvable_tv]) + solutions[solvable_tv] = result + if result is None: + # TODO: support backtracking lower/upper bound choices and order within SCCs. + # (will require switching this function from iterative to recursive). + continue + + # Update the (transitive) bounds from graph if there is a solution. + # This is needed to guarantee solutions will never contradict the initial + # constraints. For example, consider {T <: S, T <: A, S :> B} with A :> B. + # If we would not update the uppers/lowers from graph, we would infer T = A, S = B + # which is not correct. + for l, u in graph.copy(): + if l == u: + continue + if l == solvable_tv: + lowers[u].add(result) + graph.remove((l, u)) + if u == solvable_tv: + uppers[l].add(result) + graph.remove((l, u)) + + # We can update uppers/lowers only once after solving the whole SCC, + # since uppers/lowers can't depend on type variables in the SCC + # (and we would reject such SCC as non-linear and therefore not solvable). + subs = {tv: s for (tv, s) in solutions.items() if s is not None} + for tv in lowers: + lowers[tv] = {expand_type(lt, subs) for lt in lowers[tv]} + for tv in uppers: + uppers[tv] = {expand_type(ut, subs) for ut in uppers[tv]} + return solutions + + +def _join_sorted_key(t: Type) -> int: + t = get_proper_type(t) + if isinstance(t, UnionType): + return -2 + if isinstance(t, NoneType): + return -1 + return 0 + + +def solve_one(lowers: Iterable[Type], uppers: Iterable[Type]) -> Type | None: + """Solve constraints by finding by using meets of upper bounds, and joins of lower bounds.""" + + candidate: Type | None = None + + # Filter out previous results of failed inference, they will only spoil the current pass... + new_uppers = [] + for u in uppers: + pu = get_proper_type(u) + if not isinstance(pu, UninhabitedType) or not pu.ambiguous: + new_uppers.append(u) + uppers = new_uppers + + # ...unless this is the only information we have, then we just pass it on. + lowers = list(lowers) + if not uppers and not lowers: + candidate = UninhabitedType() + candidate.ambiguous = True + return candidate + + bottom: Type | None = None + top: Type | None = None + + # Process each bound separately, and calculate the lower and upper + # bounds based on constraints. Note that we assume that the constraint + # targets do not have constraint references. + if type_state.infer_unions and lowers: + # This deviates from the general mypy semantics because + # recursive types are union-heavy in 95% of cases. + # Retain `None` when no bottoms were provided to avoid bogus `Never` inference. + bottom = UnionType.make_union(lowers) + else: + # The order of lowers is non-deterministic. + # We attempt to sort lowers because joins are non-associative. For instance: + # join(join(int, str), int | str) == join(object, int | str) == object + # join(int, join(str, int | str)) == join(int, int | str) == int | str + # Note that joins in theory should be commutative, but in practice some bugs mean this is + # also a source of non-deterministic type checking results. + sorted_lowers = sorted(lowers, key=_join_sorted_key) + if sorted_lowers: + bottom = join_type_list(sorted_lowers) + + for target in uppers: + if top is None: + top = target + else: + top = meet_types(top, target) + + p_top = get_proper_type(top) + p_bottom = get_proper_type(bottom) + if isinstance(p_top, AnyType) or isinstance(p_bottom, AnyType): + source_any = top if isinstance(p_top, AnyType) else bottom + assert isinstance(source_any, ProperType) and isinstance(source_any, AnyType) + return AnyType(TypeOfAny.from_another_any, source_any=source_any) + elif bottom is None: + if top: + candidate = top + else: + # No constraints for type variable + return None + elif top is None: + candidate = bottom + elif is_subtype(bottom, top): + candidate = bottom + else: + candidate = None + return candidate + + +def choose_free( + scc: list[TypeVarLikeType], original_vars: list[TypeVarId] +) -> TypeVarLikeType | None: + """Choose the best solution for an SCC containing only type variables. + + This is needed to preserve e.g. the upper bound in a situation like this: + def dec(f: Callable[[T], S]) -> Callable[[T], S]: ... + + @dec + def test(x: U) -> U: ... + + where U <: A. + """ + + if len(scc) == 1: + # Fast path, choice is trivial. + return scc[0] + + common_upper_bound = meet_type_list([t.upper_bound for t in scc]) + common_upper_bound_p = get_proper_type(common_upper_bound) + # We include None for when strict-optional is disabled. + if isinstance(common_upper_bound_p, (UninhabitedType, NoneType)): + # This will cause to infer Never, which is better than a free TypeVar + # that has an upper bound Never. + return None + + values: list[Type] = [] + for tv in scc: + if isinstance(tv, TypeVarType) and tv.values: + if values: + # It is too tricky to support multiple TypeVars with values + # within the same SCC. + return None + values = tv.values.copy() + + if values and not is_trivial_bound(common_upper_bound_p): + # If there are both values and upper bound present, we give up, + # since type variables having both are not supported. + return None + + # For convenience with current type application machinery, we use a stable + # choice that prefers the original type variables (not polymorphic ones) in SCC. + best = min(scc, key=lambda x: (x.id not in original_vars, x.id.raw_id)) + if isinstance(best, TypeVarType): + return best.copy_modified(values=values, upper_bound=common_upper_bound) + if is_trivial_bound(common_upper_bound_p, allow_tuple=True): + # TODO: support more cases for ParamSpecs/TypeVarTuples + return best + return None + + +def is_trivial_bound(tp: ProperType, allow_tuple: bool = False) -> bool: + if isinstance(tp, Instance) and tp.type.fullname == "builtins.tuple": + return allow_tuple and is_trivial_bound(get_proper_type(tp.args[0])) + return isinstance(tp, Instance) and tp.type.fullname == "builtins.object" + + +def find_linear(c: Constraint) -> tuple[bool, TypeVarId | None]: + """Find out if this constraint represent a linear relationship, return target id if yes.""" + if isinstance(c.origin_type_var, TypeVarType): + if isinstance(c.target, TypeVarType): + return True, c.target.id + if isinstance(c.origin_type_var, ParamSpecType): + if isinstance(c.target, ParamSpecType) and not c.target.prefix.arg_types: + return True, c.target.id + if isinstance(c.origin_type_var, TypeVarTupleType): + target = get_proper_type(c.target) + if isinstance(target, TupleType) and len(target.items) == 1: + item = target.items[0] + if isinstance(item, UnpackType) and isinstance(item.type, TypeVarTupleType): + return True, item.type.id + return False, None + + +def transitive_closure( + tvars: list[TypeVarId], constraints: list[Constraint] +) -> tuple[Graph, Bounds, Bounds]: + """Find transitive closure for given constraints on type variables. + + Transitive closure gives maximal set of lower/upper bounds for each type variable, + such that we cannot deduce any further bounds by chaining other existing bounds. + + The transitive closure is represented by: + * A set of lower and upper bounds for each type variable, where only constant and + non-linear terms are included in the bounds. + * A graph of linear constraints between type variables (represented as a set of pairs) + Such separation simplifies reasoning, and allows an efficient and simple incremental + transitive closure algorithm that we use here. + + For example if we have initial constraints [T <: S, S <: U, U <: int], the transitive + closure is given by: + * {} <: T <: {int} + * {} <: S <: {int} + * {} <: U <: {int} + * {T <: S, S <: U, T <: U} + """ + uppers: Bounds = defaultdict(set) + lowers: Bounds = defaultdict(set) + graph: Graph = {(tv, tv) for tv in tvars} + + remaining = set(constraints) + while remaining: + c = remaining.pop() + # Note that ParamSpec constraint P <: Q may be considered linear only if Q has no prefix, + # for cases like P <: Concatenate[T, Q] we should consider this non-linear and put {P} and + # {T, Q} into separate SCCs. Similarly, Ts <: Tuple[*Us] considered linear, while + # Ts <: Tuple[*Us, U] is non-linear. + is_linear, target_id = find_linear(c) + if is_linear and target_id in tvars: + assert target_id is not None + if c.op == SUBTYPE_OF: + lower, upper = c.type_var, target_id + else: + lower, upper = target_id, c.type_var + if (lower, upper) in graph: + continue + graph |= { + (l, u) for l in tvars for u in tvars if (l, lower) in graph and (upper, u) in graph + } + for u in tvars: + if (upper, u) in graph: + lowers[u] |= lowers[lower] + for l in tvars: + if (l, lower) in graph: + uppers[l] |= uppers[upper] + for lt in lowers[lower]: + for ut in uppers[upper]: + add_secondary_constraints(remaining, lt, ut) + elif c.op == SUBTYPE_OF: + if c.target in uppers[c.type_var]: + continue + for l in tvars: + if (l, c.type_var) in graph: + uppers[l].add(c.target) + for lt in lowers[c.type_var]: + add_secondary_constraints(remaining, lt, c.target) + else: + assert c.op == SUPERTYPE_OF + if c.target in lowers[c.type_var]: + continue + for u in tvars: + if (c.type_var, u) in graph: + lowers[u].add(c.target) + for ut in uppers[c.type_var]: + add_secondary_constraints(remaining, c.target, ut) + return graph, lowers, uppers + + +def add_secondary_constraints(cs: set[Constraint], lower: Type, upper: Type) -> None: + """Add secondary constraints inferred between lower and upper (in place).""" + if isinstance(get_proper_type(upper), UnionType) and isinstance( + get_proper_type(lower), UnionType + ): + # When both types are unions, this can lead to inferring spurious constraints, + # for example Union[T, int] <: S <: Union[T, int] may infer T <: int. + # To avoid this, just skip them for now. + return + # TODO: what if secondary constraints result in inference against polymorphic actual? + cs.update(set(infer_constraints(lower, upper, SUBTYPE_OF))) + cs.update(set(infer_constraints(upper, lower, SUPERTYPE_OF))) + + +def compute_dependencies( + tvars: list[TypeVarId], graph: Graph, lowers: Bounds, uppers: Bounds +) -> dict[TypeVarId, list[TypeVarId]]: + """Compute dependencies between type variables induced by constraints. + + If we have a constraint like T <: List[S], we say that T depends on S, since + we will need to solve for S first before we can solve for T. + """ + res = {} + for tv in tvars: + deps = set() + for lt in lowers[tv]: + deps |= get_vars(lt, tvars) + for ut in uppers[tv]: + deps |= get_vars(ut, tvars) + for other in tvars: + if other == tv: + continue + if (tv, other) in graph or (other, tv) in graph: + deps.add(other) + res[tv] = list(deps) + return res + + +def check_linear(scc: set[TypeVarId], lowers: Bounds, uppers: Bounds) -> bool: + """Check there are only linear constraints between type variables in SCC. + + Linear are constraints like T <: S (while T <: F[S] are non-linear). + """ + for tv in scc: + if any(get_vars(lt, list(scc)) for lt in lowers[tv]): + return False + if any(get_vars(ut, list(scc)) for ut in uppers[tv]): + return False + return True + + +def skip_reverse_union_constraints(cs: list[Constraint]) -> list[Constraint]: + """Avoid ambiguities for constraints inferred from unions during polymorphic inference. + + Polymorphic inference implicitly relies on assumption that a reverse of a linear constraint + is a linear constraint. This is however not true in presence of union types, for example + T :> Union[S, int] vs S <: T. Trying to solve such constraints would be detected ambiguous + as (T, S) form a non-linear SCC. However, simply removing the linear part results in a valid + solution T = Union[S, int], S = . A similar scenario is when we get T <: Union[T, int], + such constraints carry no information, and will equally confuse linearity check. + + TODO: a cleaner solution may be to avoid inferring such constraints in first place, but + this would require passing around a flag through all infer_constraints() calls. + """ + reverse_union_cs = set() + for c in cs: + p_target = get_proper_type(c.target) + if isinstance(p_target, UnionType): + for item in p_target.items: + if isinstance(item, TypeVarType): + if item == c.origin_type_var and c.op == SUBTYPE_OF: + reverse_union_cs.add(c) + continue + # These two forms are semantically identical, but are different from + # the point of view of Constraint.__eq__(). + reverse_union_cs.add(Constraint(item, neg_op(c.op), c.origin_type_var)) + reverse_union_cs.add(Constraint(c.origin_type_var, c.op, item)) + return [c for c in cs if c not in reverse_union_cs] + + +def get_vars(target: Type, vars: list[TypeVarId]) -> set[TypeVarId]: + """Find type variables for which we are solving in a target type.""" + return {tv.id for tv in get_all_type_vars(target)} & set(vars) + + +def pre_validate_solutions( + solutions: list[Type | None], + original_vars: Sequence[TypeVarLikeType], + constraints: list[Constraint], +) -> list[Type | None]: + """Check is each solution satisfies the upper bound of the corresponding type variable. + + If it doesn't satisfy the bound, check if bound itself satisfies all constraints, and + if yes, use it instead as a fallback solution. + """ + new_solutions: list[Type | None] = [] + for t, s in zip(original_vars, solutions): + if is_callable_protocol(t.upper_bound): + # This is really ad-hoc, but a proper fix would be much more complex, + # and otherwise this may cause crash in a relatively common scenario. + new_solutions.append(s) + continue + if s is not None and not is_subtype(s, t.upper_bound): + bound_satisfies_all = True + for c in constraints: + if c.op == SUBTYPE_OF and not is_subtype(t.upper_bound, c.target): + bound_satisfies_all = False + break + if c.op == SUPERTYPE_OF and not is_subtype(c.target, t.upper_bound): + bound_satisfies_all = False + break + if bound_satisfies_all: + new_solutions.append(t.upper_bound) + continue + new_solutions.append(s) + return new_solutions + + +def is_callable_protocol(t: Type) -> bool: + proper_t = get_proper_type(t) + if isinstance(proper_t, Instance) and proper_t.type.is_protocol: + return "__call__" in proper_t.type.protocol_members + return False diff --git a/.venv/lib/python3.12/site-packages/mypy/split_namespace.py b/.venv/lib/python3.12/site-packages/mypy/split_namespace.py new file mode 100644 index 0000000..d1720cc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/split_namespace.py @@ -0,0 +1,35 @@ +"""Split namespace for argparse to allow separating options by prefix. + +We use this to direct some options to an Options object and some to a +regular namespace. +""" + +# In its own file largely because mypyc doesn't support its use of +# __getattr__/__setattr__ and has some issues with __dict__ + +from __future__ import annotations + +import argparse +from typing import Any + + +class SplitNamespace(argparse.Namespace): + def __init__(self, standard_namespace: object, alt_namespace: object, alt_prefix: str) -> None: + self.__dict__["_standard_namespace"] = standard_namespace + self.__dict__["_alt_namespace"] = alt_namespace + self.__dict__["_alt_prefix"] = alt_prefix + + def _get(self) -> tuple[Any, Any]: + return (self._standard_namespace, self._alt_namespace) + + def __setattr__(self, name: str, value: Any) -> None: + if name.startswith(self._alt_prefix): + setattr(self._alt_namespace, name[len(self._alt_prefix) :], value) + else: + setattr(self._standard_namespace, name, value) + + def __getattr__(self, name: str) -> Any: + if name.startswith(self._alt_prefix): + return getattr(self._alt_namespace, name[len(self._alt_prefix) :]) + else: + return getattr(self._standard_namespace, name) diff --git a/.venv/lib/python3.12/site-packages/mypy/state.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/state.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..d759fea Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/state.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/state.py b/.venv/lib/python3.12/site-packages/mypy/state.py new file mode 100644 index 0000000..a3055bf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/state.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +from collections.abc import Iterator +from contextlib import contextmanager +from typing import Final + +# These are global mutable state. Don't add anything here unless there's a very +# good reason. + + +class StrictOptionalState: + # Wrap this in a class since it's faster that using a module-level attribute. + + def __init__(self, strict_optional: bool) -> None: + # Value varies by file being processed + self.strict_optional = strict_optional + + @contextmanager + def strict_optional_set(self, value: bool) -> Iterator[None]: + saved = self.strict_optional + self.strict_optional = value + try: + yield + finally: + self.strict_optional = saved + + +state: Final = StrictOptionalState(strict_optional=True) +find_occurrences: tuple[str, str] | None = None diff --git a/.venv/lib/python3.12/site-packages/mypy/stats.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/stats.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..57db8cc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/stats.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/stats.py b/.venv/lib/python3.12/site-packages/mypy/stats.py new file mode 100644 index 0000000..e3499d2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/stats.py @@ -0,0 +1,494 @@ +"""Utilities for calculating and reporting statistics about types.""" + +from __future__ import annotations + +import os +from collections import Counter +from collections.abc import Iterator +from contextlib import contextmanager +from typing import Final + +from mypy import nodes +from mypy.argmap import map_formals_to_actuals +from mypy.nodes import ( + AssignmentExpr, + AssignmentStmt, + BreakStmt, + BytesExpr, + CallExpr, + ClassDef, + ComparisonExpr, + ComplexExpr, + ContinueStmt, + EllipsisExpr, + Expression, + ExpressionStmt, + FloatExpr, + FuncDef, + Import, + ImportAll, + ImportFrom, + IndexExpr, + IntExpr, + MemberExpr, + MypyFile, + NameExpr, + Node, + OpExpr, + PassStmt, + RefExpr, + StrExpr, + TypeApplication, + UnaryExpr, + YieldFromExpr, +) +from mypy.traverser import TraverserVisitor +from mypy.type_visitor import ANY_STRATEGY, BoolTypeQuery +from mypy.typeanal import collect_all_inner_types +from mypy.types import ( + AnyType, + CallableType, + FunctionLike, + Instance, + TupleType, + Type, + TypeOfAny, + TypeVarType, + get_proper_type, + get_proper_types, +) +from mypy.util import correct_relative_import + +TYPE_EMPTY: Final = 0 +TYPE_UNANALYZED: Final = 1 # type of non-typechecked code +TYPE_PRECISE: Final = 2 +TYPE_IMPRECISE: Final = 3 +TYPE_ANY: Final = 4 + +precision_names: Final = ["empty", "unanalyzed", "precise", "imprecise", "any"] + + +class StatisticsVisitor(TraverserVisitor): + def __init__( + self, + inferred: bool, + filename: str, + modules: dict[str, MypyFile], + typemap: dict[Expression, Type] | None = None, + all_nodes: bool = False, + visit_untyped_defs: bool = True, + ) -> None: + self.inferred = inferred + self.filename = filename + self.modules = modules + self.typemap = typemap + self.all_nodes = all_nodes + self.visit_untyped_defs = visit_untyped_defs + + self.num_precise_exprs = 0 + self.num_imprecise_exprs = 0 + self.num_any_exprs = 0 + + self.num_simple_types = 0 + self.num_generic_types = 0 + self.num_tuple_types = 0 + self.num_function_types = 0 + self.num_typevar_types = 0 + self.num_complex_types = 0 + self.num_any_types = 0 + + self.line = -1 + + self.line_map: dict[int, int] = {} + + self.type_of_any_counter: Counter[int] = Counter() + self.any_line_map: dict[int, list[AnyType]] = {} + + # For each scope (top level/function), whether the scope was type checked + # (annotated function). + # + # TODO: Handle --check-untyped-defs + self.checked_scopes = [True] + + self.output: list[str] = [] + + TraverserVisitor.__init__(self) + + def visit_mypy_file(self, o: MypyFile) -> None: + self.cur_mod_node = o + self.cur_mod_id = o.fullname + super().visit_mypy_file(o) + + def visit_import_from(self, imp: ImportFrom) -> None: + self.process_import(imp) + + def visit_import_all(self, imp: ImportAll) -> None: + self.process_import(imp) + + def process_import(self, imp: ImportFrom | ImportAll) -> None: + import_id, ok = correct_relative_import( + self.cur_mod_id, imp.relative, imp.id, self.cur_mod_node.is_package_init_file() + ) + if ok and import_id in self.modules: + kind = TYPE_PRECISE + else: + kind = TYPE_ANY + self.record_line(imp.line, kind) + + def visit_import(self, imp: Import) -> None: + if all(id in self.modules for id, _ in imp.ids): + kind = TYPE_PRECISE + else: + kind = TYPE_ANY + self.record_line(imp.line, kind) + + def visit_func_def(self, o: FuncDef) -> None: + with self.enter_scope(o): + self.line = o.line + if len(o.expanded) > 1 and o.expanded != [o] * len(o.expanded): + if o in o.expanded: + print( + "{}:{}: ERROR: cycle in function expansion; skipping".format( + self.filename, o.line + ) + ) + return + for defn in o.expanded: + assert isinstance(defn, FuncDef) + self.visit_func_def(defn) + else: + if o.type: + assert isinstance(o.type, CallableType) + sig = o.type + arg_types = sig.arg_types + if sig.arg_names and sig.arg_names[0] == "self" and not self.inferred: + arg_types = arg_types[1:] + for arg in arg_types: + self.type(arg) + self.type(sig.ret_type) + elif self.all_nodes: + self.record_line(self.line, TYPE_ANY) + if not o.is_dynamic() or self.visit_untyped_defs: + super().visit_func_def(o) + + @contextmanager + def enter_scope(self, o: FuncDef) -> Iterator[None]: + self.checked_scopes.append(o.type is not None and self.checked_scopes[-1]) + yield None + self.checked_scopes.pop() + + def is_checked_scope(self) -> bool: + return self.checked_scopes[-1] + + def visit_class_def(self, o: ClassDef) -> None: + self.record_line(o.line, TYPE_PRECISE) # TODO: Look at base classes + # Override this method because we don't want to analyze base_type_exprs (base_type_exprs + # are base classes in a class declaration). + # While base_type_exprs are technically expressions, type analyzer does not visit them and + # they are not in the typemap. + for d in o.decorators: + d.accept(self) + o.defs.accept(self) + + def visit_type_application(self, o: TypeApplication) -> None: + self.line = o.line + for t in o.types: + self.type(t) + super().visit_type_application(o) + + def visit_assignment_stmt(self, o: AssignmentStmt) -> None: + self.line = o.line + if isinstance(o.rvalue, nodes.CallExpr) and isinstance( + o.rvalue.analyzed, nodes.TypeVarExpr + ): + # Type variable definition -- not a real assignment. + return + if o.type: + # If there is an explicit type, don't visit the l.h.s. as an expression + # to avoid double-counting and mishandling special forms. + self.type(o.type) + o.rvalue.accept(self) + return + elif self.inferred and not self.all_nodes: + # if self.all_nodes is set, lvalues will be visited later + for lvalue in o.lvalues: + if isinstance(lvalue, nodes.TupleExpr): + items = lvalue.items + else: + items = [lvalue] + for item in items: + if isinstance(item, RefExpr) and item.is_inferred_def: + if self.typemap is not None: + self.type(self.typemap.get(item)) + super().visit_assignment_stmt(o) + + def visit_expression_stmt(self, o: ExpressionStmt) -> None: + if isinstance(o.expr, (StrExpr, BytesExpr)): + # Docstring + self.record_line(o.line, TYPE_EMPTY) + else: + super().visit_expression_stmt(o) + + def visit_pass_stmt(self, o: PassStmt) -> None: + self.record_precise_if_checked_scope(o) + + def visit_break_stmt(self, o: BreakStmt) -> None: + self.record_precise_if_checked_scope(o) + + def visit_continue_stmt(self, o: ContinueStmt) -> None: + self.record_precise_if_checked_scope(o) + + def visit_name_expr(self, o: NameExpr) -> None: + if o.fullname in ("builtins.None", "builtins.True", "builtins.False", "builtins.Ellipsis"): + self.record_precise_if_checked_scope(o) + else: + self.process_node(o) + super().visit_name_expr(o) + + def visit_yield_from_expr(self, o: YieldFromExpr) -> None: + if o.expr: + o.expr.accept(self) + + def visit_call_expr(self, o: CallExpr) -> None: + self.process_node(o) + if o.analyzed: + o.analyzed.accept(self) + else: + o.callee.accept(self) + for a in o.args: + a.accept(self) + self.record_call_target_precision(o) + + def record_call_target_precision(self, o: CallExpr) -> None: + """Record precision of formal argument types used in a call.""" + if not self.typemap or o.callee not in self.typemap: + # Type not available. + return + callee_type = get_proper_type(self.typemap[o.callee]) + if isinstance(callee_type, CallableType): + self.record_callable_target_precision(o, callee_type) + else: + pass # TODO: Handle overloaded functions, etc. + + def record_callable_target_precision(self, o: CallExpr, callee: CallableType) -> None: + """Record imprecision caused by callee argument types. + + This only considers arguments passed in a call expression. Arguments + with default values that aren't provided in a call arguably don't + contribute to typing imprecision at the *call site* (but they + contribute at the function definition). + """ + assert self.typemap + typemap = self.typemap + actual_to_formal = map_formals_to_actuals( + o.arg_kinds, + o.arg_names, + callee.arg_kinds, + callee.arg_names, + lambda n: typemap[o.args[n]], + ) + for formals in actual_to_formal: + for n in formals: + formal = get_proper_type(callee.arg_types[n]) + if isinstance(formal, AnyType): + self.record_line(o.line, TYPE_ANY) + elif is_imprecise(formal): + self.record_line(o.line, TYPE_IMPRECISE) + + def visit_member_expr(self, o: MemberExpr) -> None: + self.process_node(o) + super().visit_member_expr(o) + + def visit_op_expr(self, o: OpExpr) -> None: + self.process_node(o) + super().visit_op_expr(o) + + def visit_comparison_expr(self, o: ComparisonExpr) -> None: + self.process_node(o) + super().visit_comparison_expr(o) + + def visit_index_expr(self, o: IndexExpr) -> None: + self.process_node(o) + super().visit_index_expr(o) + + def visit_assignment_expr(self, o: AssignmentExpr) -> None: + self.process_node(o) + super().visit_assignment_expr(o) + + def visit_unary_expr(self, o: UnaryExpr) -> None: + self.process_node(o) + super().visit_unary_expr(o) + + def visit_str_expr(self, o: StrExpr) -> None: + self.record_precise_if_checked_scope(o) + + def visit_bytes_expr(self, o: BytesExpr) -> None: + self.record_precise_if_checked_scope(o) + + def visit_int_expr(self, o: IntExpr) -> None: + self.record_precise_if_checked_scope(o) + + def visit_float_expr(self, o: FloatExpr) -> None: + self.record_precise_if_checked_scope(o) + + def visit_complex_expr(self, o: ComplexExpr) -> None: + self.record_precise_if_checked_scope(o) + + def visit_ellipsis(self, o: EllipsisExpr) -> None: + self.record_precise_if_checked_scope(o) + + # Helpers + + def process_node(self, node: Expression) -> None: + if self.all_nodes: + if self.typemap is not None: + self.line = node.line + self.type(self.typemap.get(node)) + + def record_precise_if_checked_scope(self, node: Node) -> None: + if isinstance(node, Expression) and self.typemap and node not in self.typemap: + kind = TYPE_UNANALYZED + elif self.is_checked_scope(): + kind = TYPE_PRECISE + else: + kind = TYPE_ANY + self.record_line(node.line, kind) + + def type(self, t: Type | None) -> None: + t = get_proper_type(t) + + if not t: + # If an expression does not have a type, it is often due to dead code. + # Don't count these because there can be an unanalyzed value on a line with other + # analyzed expressions, which overwrite the TYPE_UNANALYZED. + self.record_line(self.line, TYPE_UNANALYZED) + return + + if isinstance(t, AnyType) and is_special_form_any(t): + # TODO: What if there is an error in special form definition? + self.record_line(self.line, TYPE_PRECISE) + return + + if isinstance(t, AnyType): + self.log(" !! Any type around line %d" % self.line) + self.num_any_exprs += 1 + self.record_line(self.line, TYPE_ANY) + elif (not self.all_nodes and is_imprecise(t)) or (self.all_nodes and is_imprecise2(t)): + self.log(" !! Imprecise type around line %d" % self.line) + self.num_imprecise_exprs += 1 + self.record_line(self.line, TYPE_IMPRECISE) + else: + self.num_precise_exprs += 1 + self.record_line(self.line, TYPE_PRECISE) + + for typ in get_proper_types(collect_all_inner_types(t)) + [t]: + if isinstance(typ, AnyType): + typ = get_original_any(typ) + if is_special_form_any(typ): + continue + self.type_of_any_counter[typ.type_of_any] += 1 + self.num_any_types += 1 + if self.line in self.any_line_map: + self.any_line_map[self.line].append(typ) + else: + self.any_line_map[self.line] = [typ] + elif isinstance(typ, Instance): + if typ.args: + if any(is_complex(arg) for arg in typ.args): + self.num_complex_types += 1 + else: + self.num_generic_types += 1 + else: + self.num_simple_types += 1 + elif isinstance(typ, FunctionLike): + self.num_function_types += 1 + elif isinstance(typ, TupleType): + if any(is_complex(item) for item in typ.items): + self.num_complex_types += 1 + else: + self.num_tuple_types += 1 + elif isinstance(typ, TypeVarType): + self.num_typevar_types += 1 + + def log(self, string: str) -> None: + self.output.append(string) + + def record_line(self, line: int, precision: int) -> None: + self.line_map[line] = max(precision, self.line_map.get(line, TYPE_EMPTY)) + + +def dump_type_stats( + tree: MypyFile, + path: str, + modules: dict[str, MypyFile], + inferred: bool = False, + typemap: dict[Expression, Type] | None = None, +) -> None: + if is_special_module(path): + return + print(path) + visitor = StatisticsVisitor(inferred, filename=tree.fullname, modules=modules, typemap=typemap) + tree.accept(visitor) + for line in visitor.output: + print(line) + print(" ** precision **") + print(" precise ", visitor.num_precise_exprs) + print(" imprecise", visitor.num_imprecise_exprs) + print(" any ", visitor.num_any_exprs) + print(" ** kinds **") + print(" simple ", visitor.num_simple_types) + print(" generic ", visitor.num_generic_types) + print(" function ", visitor.num_function_types) + print(" tuple ", visitor.num_tuple_types) + print(" TypeVar ", visitor.num_typevar_types) + print(" complex ", visitor.num_complex_types) + print(" any ", visitor.num_any_types) + + +def is_special_module(path: str) -> bool: + return os.path.basename(path) in ("abc.pyi", "typing.pyi", "builtins.pyi") + + +def is_imprecise(t: Type) -> bool: + return t.accept(HasAnyQuery()) + + +class HasAnyQuery(BoolTypeQuery): + def __init__(self) -> None: + super().__init__(ANY_STRATEGY) + + def visit_any(self, t: AnyType) -> bool: + return not is_special_form_any(t) + + +def is_imprecise2(t: Type) -> bool: + return t.accept(HasAnyQuery2()) + + +class HasAnyQuery2(HasAnyQuery): + def visit_callable_type(self, t: CallableType) -> bool: + # We don't want to flag references to functions with some Any + # argument types (etc.) since they generally don't mean trouble. + return False + + +def is_generic(t: Type) -> bool: + t = get_proper_type(t) + return isinstance(t, Instance) and bool(t.args) + + +def is_complex(t: Type) -> bool: + t = get_proper_type(t) + return is_generic(t) or isinstance(t, (FunctionLike, TupleType, TypeVarType)) + + +def is_special_form_any(t: AnyType) -> bool: + return get_original_any(t).type_of_any == TypeOfAny.special_form + + +def get_original_any(t: AnyType) -> AnyType: + if t.type_of_any == TypeOfAny.from_another_any: + assert t.source_any + assert t.source_any.type_of_any != TypeOfAny.from_another_any + t = t.source_any + return t diff --git a/.venv/lib/python3.12/site-packages/mypy/strconv.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/strconv.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..fbc6f6a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/strconv.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/strconv.py b/.venv/lib/python3.12/site-packages/mypy/strconv.py new file mode 100644 index 0000000..168a8bc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/strconv.py @@ -0,0 +1,683 @@ +"""Conversion of parse tree nodes to strings.""" + +from __future__ import annotations + +import os +import re +from collections.abc import Sequence +from typing import TYPE_CHECKING, Any + +import mypy.nodes +from mypy.options import Options +from mypy.util import IdMapper, short_type +from mypy.visitor import NodeVisitor + +if TYPE_CHECKING: + import mypy.patterns + import mypy.types + + +class StrConv(NodeVisitor[str]): + """Visitor for converting a node to a human-readable string. + + For example, an MypyFile node from program '1' is converted into + something like this: + + MypyFile:1( + fnam + ExpressionStmt:1( + IntExpr(1))) + """ + + __slots__ = ["options", "show_ids", "id_mapper"] + + def __init__(self, *, show_ids: bool = False, options: Options) -> None: + self.options = options + self.show_ids = show_ids + self.id_mapper: IdMapper | None = None + if show_ids: + self.id_mapper = IdMapper() + + def stringify_type(self, t: mypy.types.Type) -> str: + import mypy.types + + return t.accept(mypy.types.TypeStrVisitor(id_mapper=self.id_mapper, options=self.options)) + + def get_id(self, o: object) -> int | None: + if self.id_mapper: + return self.id_mapper.id(o) + return None + + def format_id(self, o: object) -> str: + if self.id_mapper: + return f"<{self.get_id(o)}>" + else: + return "" + + def dump(self, nodes: Sequence[object], obj: mypy.nodes.Context) -> str: + """Convert a list of items to a multiline pretty-printed string. + + The tag is produced from the type name of obj and its line + number. See mypy.util.dump_tagged for a description of the nodes + argument. + """ + tag = short_type(obj) + ":" + str(obj.line) + if self.show_ids: + assert self.id_mapper is not None + tag += f"<{self.get_id(obj)}>" + return dump_tagged(nodes, tag, self) + + def func_helper(self, o: mypy.nodes.FuncItem) -> list[object]: + """Return a list in a format suitable for dump() that represents the + arguments and the body of a function. The caller can then decorate the + array with information specific to methods, global functions or + anonymous functions. + """ + args: list[mypy.nodes.Var | tuple[str, list[mypy.nodes.Node]]] = [] + extra: list[tuple[str, list[mypy.nodes.Var]]] = [] + for arg in o.arguments: + kind: mypy.nodes.ArgKind = arg.kind + if kind.is_required(): + args.append(arg.variable) + elif kind.is_optional(): + assert arg.initializer is not None + args.append(("default", [arg.variable, arg.initializer])) + elif kind == mypy.nodes.ARG_STAR: + extra.append(("VarArg", [arg.variable])) + elif kind == mypy.nodes.ARG_STAR2: + extra.append(("DictVarArg", [arg.variable])) + a: list[Any] = [] + if o.type_args: + for p in o.type_args: + a.append(self.type_param(p)) + if args: + a.append(("Args", args)) + if o.type: + a.append(o.type) + if o.is_generator: + a.append("Generator") + a.extend(extra) + a.append(o.body) + return a + + # Top-level structures + + def visit_mypy_file(self, o: mypy.nodes.MypyFile) -> str: + # Skip implicit definitions. + a: list[Any] = [o.defs] + if o.is_bom: + a.insert(0, "BOM") + # Omit path to special file with name "main". This is used to simplify + # test case descriptions; the file "main" is used by default in many + # test cases. + if o.path != "main": + # Insert path. Normalize directory separators to / to unify test + # case# output in all platforms. + a.insert(0, o.path.replace(os.getcwd() + os.sep, "").replace(os.sep, "/")) + if o.ignored_lines: + a.append("IgnoredLines(%s)" % ", ".join(str(line) for line in sorted(o.ignored_lines))) + return self.dump(a, o) + + def visit_import(self, o: mypy.nodes.Import) -> str: + a = [] + for id, as_id in o.ids: + if as_id is not None: + a.append(f"{id} : {as_id}") + else: + a.append(id) + return f"Import:{o.line}({', '.join(a)})" + + def visit_import_from(self, o: mypy.nodes.ImportFrom) -> str: + a = [] + for name, as_name in o.names: + if as_name is not None: + a.append(f"{name} : {as_name}") + else: + a.append(name) + return f"ImportFrom:{o.line}({'.' * o.relative + o.id}, [{', '.join(a)}])" + + def visit_import_all(self, o: mypy.nodes.ImportAll) -> str: + return f"ImportAll:{o.line}({'.' * o.relative + o.id})" + + # Definitions + + def visit_func_def(self, o: mypy.nodes.FuncDef) -> str: + a = self.func_helper(o) + a.insert(0, o.name) + arg_kinds = {arg.kind for arg in o.arguments} + if len(arg_kinds & {mypy.nodes.ARG_NAMED, mypy.nodes.ARG_NAMED_OPT}) > 0: + a.insert(1, f"MaxPos({o.max_pos})") + if o.abstract_status in (mypy.nodes.IS_ABSTRACT, mypy.nodes.IMPLICITLY_ABSTRACT): + a.insert(-1, "Abstract") + if o.is_static: + a.insert(-1, "Static") + if o.is_class: + a.insert(-1, "Class") + if o.is_property: + a.insert(-1, "Property") + return self.dump(a, o) + + def visit_overloaded_func_def(self, o: mypy.nodes.OverloadedFuncDef) -> str: + a: Any = o.items.copy() + if o.type: + a.insert(0, o.type) + if o.impl: + a.insert(0, o.impl) + if o.is_static: + a.insert(-1, "Static") + if o.is_class: + a.insert(-1, "Class") + return self.dump(a, o) + + def visit_class_def(self, o: mypy.nodes.ClassDef) -> str: + a = [o.name, o.defs.body] + # Display base types unless they are implicitly just builtins.object + # (in this case base_type_exprs is empty). + if o.base_type_exprs: + if o.info and o.info.bases: + if len(o.info.bases) != 1 or o.info.bases[0].type.fullname != "builtins.object": + a.insert(1, ("BaseType", o.info.bases)) + else: + a.insert(1, ("BaseTypeExpr", o.base_type_exprs)) + if o.type_vars: + a.insert(1, ("TypeVars", o.type_vars)) + if o.metaclass: + a.insert(1, f"Metaclass({o.metaclass.accept(self)})") + if o.decorators: + a.insert(1, ("Decorators", o.decorators)) + if o.info and o.info._promote: + a.insert(1, f"Promote([{','.join(self.stringify_type(p) for p in o.info._promote)}])") + if o.info and o.info.tuple_type: + a.insert(1, ("TupleType", [o.info.tuple_type])) + if o.info and o.info.fallback_to_any: + a.insert(1, "FallbackToAny") + if o.type_args: + for p in reversed(o.type_args): + a.insert(1, self.type_param(p)) + return self.dump(a, o) + + def visit_var(self, o: mypy.nodes.Var) -> str: + lst = "" + # Add :nil line number tag if no line number is specified to remain + # compatible with old test case descriptions that assume this. + if o.line < 0: + lst = ":nil" + return "Var" + lst + "(" + o.name + ")" + + def visit_global_decl(self, o: mypy.nodes.GlobalDecl) -> str: + return self.dump([o.names], o) + + def visit_nonlocal_decl(self, o: mypy.nodes.NonlocalDecl) -> str: + return self.dump([o.names], o) + + def visit_decorator(self, o: mypy.nodes.Decorator) -> str: + return self.dump([o.var, o.decorators, o.func], o) + + def visit_type_alias(self, o: mypy.nodes.TypeAlias, /) -> str: + return self.dump([o.name, o.target, o.alias_tvars, o.no_args], o) + + def visit_placeholder_node(self, o: mypy.nodes.PlaceholderNode, /) -> str: + return self.dump([o.fullname], o) + + # Statements + + def visit_block(self, o: mypy.nodes.Block) -> str: + return self.dump(o.body, o) + + def visit_expression_stmt(self, o: mypy.nodes.ExpressionStmt) -> str: + return self.dump([o.expr], o) + + def visit_assignment_stmt(self, o: mypy.nodes.AssignmentStmt) -> str: + a: list[Any] = [] + if len(o.lvalues) > 1: + a = [("Lvalues", o.lvalues)] + else: + a = [o.lvalues[0]] + a.append(o.rvalue) + if o.type: + a.append(o.type) + return self.dump(a, o) + + def visit_operator_assignment_stmt(self, o: mypy.nodes.OperatorAssignmentStmt) -> str: + return self.dump([o.op, o.lvalue, o.rvalue], o) + + def visit_while_stmt(self, o: mypy.nodes.WhileStmt) -> str: + a: list[Any] = [o.expr, o.body] + if o.else_body: + a.append(("Else", o.else_body.body)) + return self.dump(a, o) + + def visit_for_stmt(self, o: mypy.nodes.ForStmt) -> str: + a: list[Any] = [] + if o.is_async: + a.append(("Async", "")) + a.append(o.index) + if o.index_type: + a.append(o.index_type) + a.extend([o.expr, o.body]) + if o.else_body: + a.append(("Else", o.else_body.body)) + return self.dump(a, o) + + def visit_return_stmt(self, o: mypy.nodes.ReturnStmt) -> str: + return self.dump([o.expr], o) + + def visit_if_stmt(self, o: mypy.nodes.IfStmt) -> str: + a: list[Any] = [] + for i in range(len(o.expr)): + a.append(("If", [o.expr[i]])) + a.append(("Then", o.body[i].body)) + + if not o.else_body: + return self.dump(a, o) + else: + return self.dump([a, ("Else", o.else_body.body)], o) + + def visit_break_stmt(self, o: mypy.nodes.BreakStmt) -> str: + return self.dump([], o) + + def visit_continue_stmt(self, o: mypy.nodes.ContinueStmt) -> str: + return self.dump([], o) + + def visit_pass_stmt(self, o: mypy.nodes.PassStmt) -> str: + return self.dump([], o) + + def visit_raise_stmt(self, o: mypy.nodes.RaiseStmt) -> str: + return self.dump([o.expr, o.from_expr], o) + + def visit_assert_stmt(self, o: mypy.nodes.AssertStmt) -> str: + if o.msg is not None: + return self.dump([o.expr, o.msg], o) + else: + return self.dump([o.expr], o) + + def visit_await_expr(self, o: mypy.nodes.AwaitExpr) -> str: + return self.dump([o.expr], o) + + def visit_del_stmt(self, o: mypy.nodes.DelStmt) -> str: + return self.dump([o.expr], o) + + def visit_try_stmt(self, o: mypy.nodes.TryStmt) -> str: + a: list[Any] = [o.body] + if o.is_star: + a.append("*") + + for i in range(len(o.vars)): + a.append(o.types[i]) + if o.vars[i]: + a.append(o.vars[i]) + a.append(o.handlers[i]) + + if o.else_body: + a.append(("Else", o.else_body.body)) + if o.finally_body: + a.append(("Finally", o.finally_body.body)) + + return self.dump(a, o) + + def visit_with_stmt(self, o: mypy.nodes.WithStmt) -> str: + a: list[Any] = [] + if o.is_async: + a.append(("Async", "")) + for i in range(len(o.expr)): + a.append(("Expr", [o.expr[i]])) + if o.target[i]: + a.append(("Target", [o.target[i]])) + if o.unanalyzed_type: + a.append(o.unanalyzed_type) + return self.dump(a + [o.body], o) + + def visit_match_stmt(self, o: mypy.nodes.MatchStmt) -> str: + a: list[Any] = [o.subject] + for i in range(len(o.patterns)): + a.append(("Pattern", [o.patterns[i]])) + if o.guards[i] is not None: + a.append(("Guard", [o.guards[i]])) + a.append(("Body", o.bodies[i].body)) + return self.dump(a, o) + + def visit_type_alias_stmt(self, o: mypy.nodes.TypeAliasStmt) -> str: + a: list[Any] = [o.name] + for p in o.type_args: + a.append(self.type_param(p)) + a.append(o.value) + return self.dump(a, o) + + def type_param(self, p: mypy.nodes.TypeParam) -> list[Any]: + a: list[Any] = [] + if p.kind == mypy.nodes.PARAM_SPEC_KIND: + prefix = "**" + elif p.kind == mypy.nodes.TYPE_VAR_TUPLE_KIND: + prefix = "*" + else: + prefix = "" + a.append(prefix + p.name) + if p.upper_bound: + a.append(p.upper_bound) + if p.values: + a.append(("Values", p.values)) + if p.default: + a.append(("Default", [p.default])) + return [("TypeParam", a)] + + # Expressions + + # Simple expressions + + def visit_int_expr(self, o: mypy.nodes.IntExpr) -> str: + return f"IntExpr({o.value})" + + def visit_str_expr(self, o: mypy.nodes.StrExpr) -> str: + return f"StrExpr({self.str_repr(o.value)})" + + def visit_bytes_expr(self, o: mypy.nodes.BytesExpr) -> str: + return f"BytesExpr({self.str_repr(o.value)})" + + def str_repr(self, s: str) -> str: + s = re.sub(r"\\u[0-9a-fA-F]{4}", lambda m: "\\" + m.group(0), s) + return re.sub("[^\\x20-\\x7e]", lambda m: r"\u%.4x" % ord(m.group(0)), s) + + def visit_float_expr(self, o: mypy.nodes.FloatExpr) -> str: + return f"FloatExpr({o.value})" + + def visit_complex_expr(self, o: mypy.nodes.ComplexExpr) -> str: + return f"ComplexExpr({o.value})" + + def visit_ellipsis(self, o: mypy.nodes.EllipsisExpr) -> str: + return "Ellipsis" + + def visit_star_expr(self, o: mypy.nodes.StarExpr) -> str: + return self.dump([o.expr], o) + + def visit_name_expr(self, o: mypy.nodes.NameExpr) -> str: + pretty = self.pretty_name( + o.name, o.kind, o.fullname, o.is_inferred_def or o.is_special_form, o.node + ) + if isinstance(o.node, mypy.nodes.Var) and o.node.is_final: + final_value = o.node.final_value + if final_value is not None: + pretty += f" = {o.node.final_value}" + return short_type(o) + "(" + pretty + ")" + + def pretty_name( + self, + name: str, + kind: int | None, + fullname: str | None, + is_inferred_def: bool, + target_node: mypy.nodes.Node | None = None, + ) -> str: + n = name + if is_inferred_def: + n += "*" + if target_node: + id = self.format_id(target_node) + else: + id = "" + if isinstance(target_node, mypy.nodes.MypyFile) and name == fullname: + n += id + elif kind == mypy.nodes.GDEF or (fullname != name and fullname): + # Append fully qualified name for global references. + n += f" [{fullname}{id}]" + elif kind == mypy.nodes.LDEF: + # Add tag to signify a local reference. + n += f" [l{id}]" + elif kind == mypy.nodes.MDEF: + # Add tag to signify a member reference. + n += f" [m{id}]" + else: + n += id + return n + + def visit_member_expr(self, o: mypy.nodes.MemberExpr) -> str: + pretty = self.pretty_name(o.name, o.kind, o.fullname, o.is_inferred_def, o.node) + return self.dump([o.expr, pretty], o) + + def visit_yield_expr(self, o: mypy.nodes.YieldExpr) -> str: + return self.dump([o.expr], o) + + def visit_yield_from_expr(self, o: mypy.nodes.YieldFromExpr) -> str: + if o.expr: + return self.dump([o.expr.accept(self)], o) + else: + return self.dump([], o) + + def visit_call_expr(self, o: mypy.nodes.CallExpr) -> str: + if o.analyzed: + return o.analyzed.accept(self) + args: list[mypy.nodes.Expression] = [] + extra: list[str | tuple[str, list[Any]]] = [] + for i, kind in enumerate(o.arg_kinds): + if kind in [mypy.nodes.ARG_POS, mypy.nodes.ARG_STAR]: + args.append(o.args[i]) + if kind == mypy.nodes.ARG_STAR: + extra.append("VarArg") + elif kind == mypy.nodes.ARG_NAMED: + extra.append(("KwArgs", [o.arg_names[i], o.args[i]])) + elif kind == mypy.nodes.ARG_STAR2: + extra.append(("DictVarArg", [o.args[i]])) + else: + raise RuntimeError(f"unknown kind {kind}") + a: list[Any] = [o.callee, ("Args", args)] + return self.dump(a + extra, o) + + def visit_op_expr(self, o: mypy.nodes.OpExpr) -> str: + if o.analyzed: + return o.analyzed.accept(self) + return self.dump([o.op, o.left, o.right], o) + + def visit_comparison_expr(self, o: mypy.nodes.ComparisonExpr) -> str: + return self.dump([o.operators, o.operands], o) + + def visit_cast_expr(self, o: mypy.nodes.CastExpr) -> str: + return self.dump([o.expr, o.type], o) + + def visit_type_form_expr(self, o: mypy.nodes.TypeFormExpr) -> str: + return self.dump([o.type], o) + + def visit_assert_type_expr(self, o: mypy.nodes.AssertTypeExpr) -> str: + return self.dump([o.expr, o.type], o) + + def visit_reveal_expr(self, o: mypy.nodes.RevealExpr) -> str: + if o.kind == mypy.nodes.REVEAL_TYPE: + return self.dump([o.expr], o) + else: + # REVEAL_LOCALS + return self.dump([o.local_nodes], o) + + def visit_assignment_expr(self, o: mypy.nodes.AssignmentExpr) -> str: + return self.dump([o.target, o.value], o) + + def visit_unary_expr(self, o: mypy.nodes.UnaryExpr) -> str: + return self.dump([o.op, o.expr], o) + + def visit_list_expr(self, o: mypy.nodes.ListExpr) -> str: + return self.dump(o.items, o) + + def visit_dict_expr(self, o: mypy.nodes.DictExpr) -> str: + return self.dump([[k, v] for k, v in o.items], o) + + def visit_set_expr(self, o: mypy.nodes.SetExpr) -> str: + return self.dump(o.items, o) + + def visit_tuple_expr(self, o: mypy.nodes.TupleExpr) -> str: + return self.dump(o.items, o) + + def visit_index_expr(self, o: mypy.nodes.IndexExpr) -> str: + if o.analyzed: + return o.analyzed.accept(self) + return self.dump([o.base, o.index], o) + + def visit_super_expr(self, o: mypy.nodes.SuperExpr) -> str: + return self.dump([o.name, o.call], o) + + def visit_type_application(self, o: mypy.nodes.TypeApplication) -> str: + return self.dump([o.expr, ("Types", o.types)], o) + + def visit_type_var_expr(self, o: mypy.nodes.TypeVarExpr) -> str: + import mypy.types + + a: list[Any] = [] + if o.variance == mypy.nodes.COVARIANT: + a += ["Variance(COVARIANT)"] + if o.variance == mypy.nodes.CONTRAVARIANT: + a += ["Variance(CONTRAVARIANT)"] + if o.values: + a += [("Values", o.values)] + if not mypy.types.is_named_instance(o.upper_bound, "builtins.object"): + a += [f"UpperBound({self.stringify_type(o.upper_bound)})"] + return self.dump(a, o) + + def visit_paramspec_expr(self, o: mypy.nodes.ParamSpecExpr) -> str: + import mypy.types + + a: list[Any] = [] + if o.variance == mypy.nodes.COVARIANT: + a += ["Variance(COVARIANT)"] + if o.variance == mypy.nodes.CONTRAVARIANT: + a += ["Variance(CONTRAVARIANT)"] + if not mypy.types.is_named_instance(o.upper_bound, "builtins.object"): + a += [f"UpperBound({self.stringify_type(o.upper_bound)})"] + return self.dump(a, o) + + def visit_type_var_tuple_expr(self, o: mypy.nodes.TypeVarTupleExpr) -> str: + import mypy.types + + a: list[Any] = [] + if o.variance == mypy.nodes.COVARIANT: + a += ["Variance(COVARIANT)"] + if o.variance == mypy.nodes.CONTRAVARIANT: + a += ["Variance(CONTRAVARIANT)"] + if not mypy.types.is_named_instance(o.upper_bound, "builtins.object"): + a += [f"UpperBound({self.stringify_type(o.upper_bound)})"] + return self.dump(a, o) + + def visit_type_alias_expr(self, o: mypy.nodes.TypeAliasExpr) -> str: + return f"TypeAliasExpr({self.stringify_type(o.node.target)})" + + def visit_namedtuple_expr(self, o: mypy.nodes.NamedTupleExpr) -> str: + return f"NamedTupleExpr:{o.line}({o.info.name}, {self.stringify_type(o.info.tuple_type) if o.info.tuple_type is not None else None})" + + def visit_enum_call_expr(self, o: mypy.nodes.EnumCallExpr) -> str: + return f"EnumCallExpr:{o.line}({o.info.name}, {o.items})" + + def visit_typeddict_expr(self, o: mypy.nodes.TypedDictExpr) -> str: + return f"TypedDictExpr:{o.line}({o.info.name})" + + def visit__promote_expr(self, o: mypy.nodes.PromoteExpr) -> str: + return f"PromoteExpr:{o.line}({self.stringify_type(o.type)})" + + def visit_newtype_expr(self, o: mypy.nodes.NewTypeExpr) -> str: + return f"NewTypeExpr:{o.line}({o.name}, {self.dump([o.old_type], o)})" + + def visit_lambda_expr(self, o: mypy.nodes.LambdaExpr) -> str: + a = self.func_helper(o) + return self.dump(a, o) + + def visit_generator_expr(self, o: mypy.nodes.GeneratorExpr) -> str: + condlists = o.condlists if any(o.condlists) else None + return self.dump([o.left_expr, o.indices, o.sequences, condlists], o) + + def visit_list_comprehension(self, o: mypy.nodes.ListComprehension) -> str: + return self.dump([o.generator], o) + + def visit_set_comprehension(self, o: mypy.nodes.SetComprehension) -> str: + return self.dump([o.generator], o) + + def visit_dictionary_comprehension(self, o: mypy.nodes.DictionaryComprehension) -> str: + condlists = o.condlists if any(o.condlists) else None + return self.dump([o.key, o.value, o.indices, o.sequences, condlists], o) + + def visit_conditional_expr(self, o: mypy.nodes.ConditionalExpr) -> str: + return self.dump([("Condition", [o.cond]), o.if_expr, o.else_expr], o) + + def visit_slice_expr(self, o: mypy.nodes.SliceExpr) -> str: + a: list[Any] = [o.begin_index, o.end_index, o.stride] + if not a[0]: + a[0] = "" + if not a[1]: + a[1] = "" + return self.dump(a, o) + + def visit_temp_node(self, o: mypy.nodes.TempNode) -> str: + return self.dump([o.type], o) + + def visit_as_pattern(self, o: mypy.patterns.AsPattern) -> str: + return self.dump([o.pattern, o.name], o) + + def visit_or_pattern(self, o: mypy.patterns.OrPattern) -> str: + return self.dump(o.patterns, o) + + def visit_value_pattern(self, o: mypy.patterns.ValuePattern) -> str: + return self.dump([o.expr], o) + + def visit_singleton_pattern(self, o: mypy.patterns.SingletonPattern) -> str: + return self.dump([o.value], o) + + def visit_sequence_pattern(self, o: mypy.patterns.SequencePattern) -> str: + return self.dump(o.patterns, o) + + def visit_starred_pattern(self, o: mypy.patterns.StarredPattern) -> str: + return self.dump([o.capture], o) + + def visit_mapping_pattern(self, o: mypy.patterns.MappingPattern) -> str: + a: list[Any] = [] + for i in range(len(o.keys)): + a.append(("Key", [o.keys[i]])) + a.append(("Value", [o.values[i]])) + if o.rest is not None: + a.append(("Rest", [o.rest])) + return self.dump(a, o) + + def visit_class_pattern(self, o: mypy.patterns.ClassPattern) -> str: + a: list[Any] = [o.class_ref] + if len(o.positionals) > 0: + a.append(("Positionals", o.positionals)) + for i in range(len(o.keyword_keys)): + a.append(("Keyword", [o.keyword_keys[i], o.keyword_values[i]])) + + return self.dump(a, o) + + +def dump_tagged(nodes: Sequence[object], tag: str | None, str_conv: StrConv) -> str: + """Convert an array into a pretty-printed multiline string representation. + + The format is + tag( + item1.. + itemN) + Individual items are formatted like this: + - arrays are flattened + - pairs (str, array) are converted recursively, so that str is the tag + - other items are converted to strings and indented + """ + from mypy.types import Type, TypeStrVisitor + + a: list[str] = [] + if tag: + a.append(tag + "(") + for n in nodes: + if isinstance(n, list): + if n: + a.append(dump_tagged(n, None, str_conv)) + elif isinstance(n, tuple): + s = dump_tagged(n[1], n[0], str_conv) + a.append(indent(s, 2)) + elif isinstance(n, mypy.nodes.Node): + a.append(indent(n.accept(str_conv), 2)) + elif isinstance(n, Type): + a.append( + indent(n.accept(TypeStrVisitor(str_conv.id_mapper, options=str_conv.options)), 2) + ) + elif n is not None: + a.append(indent(str(n), 2)) + if tag: + a[-1] += ")" + return "\n".join(a) + + +def indent(s: str, n: int) -> str: + """Indent all the lines in s (separated by newlines) by n spaces.""" + s = " " * n + s + s = s.replace("\n", "\n" + " " * n) + return s diff --git a/.venv/lib/python3.12/site-packages/mypy/stubdoc.py b/.venv/lib/python3.12/site-packages/mypy/stubdoc.py new file mode 100644 index 0000000..89db6cb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/stubdoc.py @@ -0,0 +1,533 @@ +"""Parsing/inferring signatures from documentation. + +This module provides several functions to generate better stubs using +docstrings and Sphinx docs (.rst files). +""" + +from __future__ import annotations + +import contextlib +import io +import keyword +import re +import tokenize +from collections.abc import MutableMapping, MutableSequence, Sequence +from typing import Any, Final, NamedTuple +from typing_extensions import TypeAlias as _TypeAlias + +import mypy.util + +# Type alias for signatures strings in format ('func_name', '(arg, opt_arg=False)'). +Sig: _TypeAlias = tuple[str, str] + + +_TYPE_RE: Final = re.compile(r"^[a-zA-Z_][\w\[\], .\"\'|]*(\.[a-zA-Z_][\w\[\], ]*)*$") +_ARG_NAME_RE: Final = re.compile(r"\**[A-Za-z_][A-Za-z0-9_]*$") + + +def is_valid_type(s: str) -> bool: + """Try to determine whether a string might be a valid type annotation.""" + if s in ("True", "False", "retval"): + return False + if "," in s and "[" not in s: + return False + return _TYPE_RE.match(s) is not None + + +class ArgSig: + """Signature info for a single argument.""" + + def __init__( + self, + name: str, + type: str | None = None, + *, + default: bool = False, + default_value: str = "...", + ) -> None: + self.name = name + self.type = type + # Does this argument have a default value? + self.default = default + self.default_value = default_value + + def is_star_arg(self) -> bool: + return self.name.startswith("*") and not self.name.startswith("**") + + def is_star_kwarg(self) -> bool: + return self.name.startswith("**") + + def __repr__(self) -> str: + return "ArgSig(name={}, type={}, default={})".format( + repr(self.name), repr(self.type), repr(self.default) + ) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ArgSig): + return ( + self.name == other.name + and self.type == other.type + and self.default == other.default + and self.default_value == other.default_value + ) + return False + + +class FunctionSig(NamedTuple): + name: str + args: list[ArgSig] + ret_type: str | None + type_args: str = "" # TODO implement in stubgenc and remove the default + docstring: str | None = None + + def is_special_method(self) -> bool: + return bool( + self.name.startswith("__") + and self.name.endswith("__") + and self.args + and self.args[0].name in ("self", "cls") + ) + + def has_catchall_args(self) -> bool: + """Return if this signature has catchall args: (*args, **kwargs)""" + if self.args and self.args[0].name in ("self", "cls"): + args = self.args[1:] + else: + args = self.args + return ( + len(args) == 2 + and all(a.type in (None, "object", "Any", "typing.Any") for a in args) + and args[0].is_star_arg() + and args[1].is_star_kwarg() + ) + + def is_catchall_signature(self) -> bool: + """Return if this signature is the catchall identity: (*args, **kwargs) -> Any""" + return self.has_catchall_args() and self.ret_type in (None, "Any", "typing.Any") + + def format_sig( + self, + indent: str = "", + is_async: bool = False, + any_val: str | None = None, + docstring: str | None = None, + include_docstrings: bool = False, + ) -> str: + args: list[str] = [] + for arg in self.args: + arg_def = arg.name + + if arg_def in keyword.kwlist: + arg_def = "_" + arg_def + + if ( + arg.type is None + and any_val is not None + and arg.name not in ("self", "cls") + and not arg.name.startswith("*") + ): + arg_type: str | None = any_val + else: + arg_type = arg.type + if arg_type: + arg_def += ": " + arg_type + if arg.default: + arg_def += f" = {arg.default_value}" + + elif arg.default: + arg_def += f"={arg.default_value}" + + args.append(arg_def) + + retfield = "" + ret_type = self.ret_type if self.ret_type else any_val + if ret_type is not None: + retfield = " -> " + ret_type + + prefix = "async " if is_async else "" + sig = f"{indent}{prefix}def {self.name}{self.type_args}({', '.join(args)}){retfield}:" + # if this object has a docstring it's probably produced by a SignatureGenerator, so it + # takes precedence over the passed docstring, which acts as a fallback. + doc = (self.docstring or docstring) if include_docstrings else None + if doc: + suffix = f"\n{indent} {mypy.util.quote_docstring(doc)}" + else: + suffix = " ..." + return f"{sig}{suffix}" + + +# States of the docstring parser. +STATE_INIT: Final = 1 +STATE_FUNCTION_NAME: Final = 2 +STATE_ARGUMENT_LIST: Final = 3 +STATE_ARGUMENT_TYPE: Final = 4 +STATE_ARGUMENT_DEFAULT: Final = 5 +STATE_RETURN_VALUE: Final = 6 +STATE_OPEN_BRACKET: Final = 7 # For generic types. + + +class DocStringParser: + """Parse function signatures in documentation.""" + + def __init__(self, function_name: str) -> None: + # Only search for signatures of function with this name. + self.function_name = function_name + self.state = [STATE_INIT] + self.accumulator = "" + self.arg_type: str | None = None + self.arg_name = "" + self.arg_default: str | None = None + self.ret_type = "Any" + self.found = False + self.args: list[ArgSig] = [] + self.pos_only: int | None = None + self.keyword_only: int | None = None + # Valid signatures found so far. + self.signatures: list[FunctionSig] = [] + + def add_token(self, token: tokenize.TokenInfo) -> None: + """Process next token from the token stream.""" + if ( + token.type == tokenize.NAME + and token.string == self.function_name + and self.state[-1] == STATE_INIT + ): + self.state.append(STATE_FUNCTION_NAME) + + elif ( + token.type == tokenize.OP + and token.string == "(" + and self.state[-1] == STATE_FUNCTION_NAME + ): + self.state.pop() + self.accumulator = "" + self.found = True + self.state.append(STATE_ARGUMENT_LIST) + + elif self.state[-1] == STATE_FUNCTION_NAME: + # Reset state, function name not followed by '('. + self.state.pop() + + elif ( + token.type == tokenize.OP + and token.string in ("[", "(", "{") + and self.state[-1] != STATE_INIT + ): + self.accumulator += token.string + self.state.append(STATE_OPEN_BRACKET) + + elif ( + token.type == tokenize.OP + and token.string in ("]", ")", "}") + and self.state[-1] == STATE_OPEN_BRACKET + ): + self.accumulator += token.string + self.state.pop() + + elif ( + token.type == tokenize.OP + and token.string == ":" + and self.state[-1] == STATE_ARGUMENT_LIST + ): + self.arg_name = self.accumulator + self.accumulator = "" + self.state.append(STATE_ARGUMENT_TYPE) + + elif ( + token.type == tokenize.OP + and token.string == "=" + and self.state[-1] in (STATE_ARGUMENT_LIST, STATE_ARGUMENT_TYPE) + ): + if self.state[-1] == STATE_ARGUMENT_TYPE: + self.arg_type = self.accumulator + self.state.pop() + else: + self.arg_name = self.accumulator + self.accumulator = "" + self.state.append(STATE_ARGUMENT_DEFAULT) + + elif ( + token.type == tokenize.OP + and token.string in (",", ")") + and self.state[-1] + in (STATE_ARGUMENT_LIST, STATE_ARGUMENT_DEFAULT, STATE_ARGUMENT_TYPE) + ): + if self.state[-1] == STATE_ARGUMENT_DEFAULT: + self.arg_default = self.accumulator + self.state.pop() + elif self.state[-1] == STATE_ARGUMENT_TYPE: + self.arg_type = self.accumulator + self.state.pop() + elif self.state[-1] == STATE_ARGUMENT_LIST: + if self.accumulator == "*": + if self.keyword_only is not None: + # Error condition: cannot have * twice + self.reset() + return + self.keyword_only = len(self.args) + self.accumulator = "" + else: + if self.accumulator.startswith("*"): + self.keyword_only = len(self.args) + 1 + self.arg_name = self.accumulator + if not ( + token.string == ")" and self.accumulator.strip() == "" + ) and not _ARG_NAME_RE.match(self.arg_name): + # Invalid argument name. + self.reset() + return + + if token.string == ")": + if ( + self.state[-1] == STATE_ARGUMENT_LIST + and self.keyword_only is not None + and self.keyword_only == len(self.args) + and not self.arg_name + ): + # Error condition: * must be followed by arguments + self.reset() + return + self.state.pop() + + # arg_name is empty when there are no args. e.g. func() + if self.arg_name: + if self.arg_type and not is_valid_type(self.arg_type): + # wrong type, use Any + self.args.append( + ArgSig(name=self.arg_name, type=None, default=bool(self.arg_default)) + ) + else: + self.args.append( + ArgSig( + name=self.arg_name, type=self.arg_type, default=bool(self.arg_default) + ) + ) + self.arg_name = "" + self.arg_type = None + self.arg_default = None + self.accumulator = "" + elif ( + token.type == tokenize.OP + and token.string == "/" + and self.state[-1] == STATE_ARGUMENT_LIST + ): + if token.string == "/": + if self.pos_only is not None or self.keyword_only is not None or not self.args: + # Error cases: + # - / shows up more than once + # - / shows up after * + # - / shows up before any arguments + self.reset() + return + self.pos_only = len(self.args) + self.state.append(STATE_ARGUMENT_TYPE) + self.accumulator = "" + + elif token.type == tokenize.OP and token.string == "->" and self.state[-1] == STATE_INIT: + self.accumulator = "" + self.state.append(STATE_RETURN_VALUE) + + # ENDMAKER is necessary for python 3.4 and 3.5. + elif token.type in (tokenize.NEWLINE, tokenize.ENDMARKER) and self.state[-1] in ( + STATE_INIT, + STATE_RETURN_VALUE, + ): + if self.state[-1] == STATE_RETURN_VALUE: + if not is_valid_type(self.accumulator): + self.reset() + return + self.ret_type = self.accumulator + self.accumulator = "" + self.state.pop() + + if self.found: + self.signatures.append( + FunctionSig(name=self.function_name, args=self.args, ret_type=self.ret_type) + ) + self.found = False + self.args = [] + self.ret_type = "Any" + # Leave state as INIT. + else: + self.accumulator += token.string + + def reset(self) -> None: + self.state = [STATE_INIT] + self.args = [] + self.found = False + self.accumulator = "" + + def get_signatures(self) -> list[FunctionSig]: + """Return sorted copy of the list of signatures found so far.""" + + def has_arg(name: str, signature: FunctionSig) -> bool: + return any(x.name == name for x in signature.args) + + def args_kwargs(signature: FunctionSig) -> bool: + return has_arg("*args", signature) and has_arg("**kwargs", signature) + + # Move functions with (*args, **kwargs) in their signature to last place. + return sorted(self.signatures, key=lambda x: 1 if args_kwargs(x) else 0) + + +def infer_sig_from_docstring(docstr: str | None, name: str) -> list[FunctionSig] | None: + """Convert function signature to list of FunctionSig + + Look for function signatures of function in docstring. Signature is a string of + the format () -> or perhaps without + the return type. + + Returns empty list, when no signature is found, one signature in typical case, + multiple signatures, if docstring specifies multiple signatures for overload functions. + Return None if the docstring is empty. + + Arguments: + * docstr: docstring + * name: name of function for which signatures are to be found + """ + if not (isinstance(docstr, str) and docstr): + return None + + state = DocStringParser(name) + # Return all found signatures, even if there is a parse error after some are found. + with contextlib.suppress(tokenize.TokenError): + try: + tokens = tokenize.tokenize(io.BytesIO(docstr.encode("utf-8")).readline) + for token in tokens: + state.add_token(token) + except IndentationError: + return None + sigs = state.get_signatures() + + def is_unique_args(sig: FunctionSig) -> bool: + """return true if function argument names are unique""" + return len(sig.args) == len({arg.name for arg in sig.args}) + + # Return only signatures that have unique argument names. Mypy fails on non-unique arg names. + return [sig for sig in sigs if is_unique_args(sig)] + + +def infer_arg_sig_from_anon_docstring(docstr: str) -> list[ArgSig]: + """Convert signature in form of "(self: TestClass, arg0: str='ada')" to List[TypedArgList].""" + ret = infer_sig_from_docstring("stub" + docstr, "stub") + if ret: + return ret[0].args + return [] + + +def infer_ret_type_sig_from_docstring(docstr: str, name: str) -> str | None: + """Convert signature in form of "func(self: TestClass, arg0) -> int" to their return type.""" + ret = infer_sig_from_docstring(docstr, name) + if ret: + return ret[0].ret_type + return None + + +def infer_ret_type_sig_from_anon_docstring(docstr: str) -> str | None: + """Convert signature in form of "(self: TestClass, arg0) -> int" to their return type.""" + lines = ["stub" + line.strip() for line in docstr.splitlines() if line.strip().startswith("(")] + return infer_ret_type_sig_from_docstring("".join(lines), "stub") + + +def parse_signature(sig: str) -> tuple[str, list[str], list[str]] | None: + """Split function signature into its name, positional an optional arguments. + + The expected format is "func_name(arg, opt_arg=False)". Return the name of function + and lists of positional and optional argument names. + """ + m = re.match(r"([.a-zA-Z0-9_]+)\(([^)]*)\)", sig) + if not m: + return None + name = m.group(1) + name = name.split(".")[-1] + arg_string = m.group(2) + if not arg_string.strip(): + # Simple case -- no arguments. + return name, [], [] + + args = [arg.strip() for arg in arg_string.split(",")] + positional = [] + optional = [] + i = 0 + while i < len(args): + # Accept optional arguments as in both formats: x=None and [x]. + if args[i].startswith("[") or "=" in args[i]: + break + positional.append(args[i].rstrip("[")) + i += 1 + if args[i - 1].endswith("["): + break + while i < len(args): + arg = args[i] + arg = arg.strip("[]") + arg = arg.split("=")[0] + optional.append(arg) + i += 1 + return name, positional, optional + + +def build_signature(positional: Sequence[str], optional: Sequence[str]) -> str: + """Build function signature from lists of positional and optional argument names.""" + args: MutableSequence[str] = [] + args.extend(positional) + for arg in optional: + if arg.startswith("*"): + args.append(arg) + else: + args.append(f"{arg}=...") + sig = f"({', '.join(args)})" + # Ad-hoc fixes. + sig = sig.replace("(self)", "") + return sig + + +def parse_all_signatures(lines: Sequence[str]) -> tuple[list[Sig], list[Sig]]: + """Parse all signatures in a given reST document. + + Return lists of found signatures for functions and classes. + """ + sigs = [] + class_sigs = [] + for line in lines: + line = line.strip() + m = re.match(r"\.\. *(function|method|class) *:: *[a-zA-Z_]", line) + if m: + sig = line.split("::")[1].strip() + parsed = parse_signature(sig) + if parsed: + name, fixed, optional = parsed + if m.group(1) != "class": + sigs.append((name, build_signature(fixed, optional))) + else: + class_sigs.append((name, build_signature(fixed, optional))) + + return sorted(sigs), sorted(class_sigs) + + +def find_unique_signatures(sigs: Sequence[Sig]) -> list[Sig]: + """Remove names with duplicate found signatures.""" + sig_map: MutableMapping[str, list[str]] = {} + for name, sig in sigs: + sig_map.setdefault(name, []).append(sig) + + result = [] + for name, name_sigs in sig_map.items(): + if len(set(name_sigs)) == 1: + result.append((name, name_sigs[0])) + return sorted(result) + + +def infer_prop_type_from_docstring(docstr: str | None) -> str | None: + """Check for Google/Numpy style docstring type annotation for a property. + + The docstring has the format ": ". + In the type string, we allow the following characters: + * dot: because sometimes classes are annotated using full path + * brackets: to allow type hints like List[int] + * comma/space: things like Tuple[int, int] + """ + if not docstr: + return None + test_str = r"^([a-zA-Z0-9_, \.\[\]]*): " + m = re.match(test_str, docstr) + return m.group(1) if m else None diff --git a/.venv/lib/python3.12/site-packages/mypy/stubgen.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/stubgen.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..6dbcd56 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/stubgen.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/stubgen.py b/.venv/lib/python3.12/site-packages/mypy/stubgen.py new file mode 100644 index 0000000..60fbd7f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/stubgen.py @@ -0,0 +1,2049 @@ +#!/usr/bin/env python3 +"""Generator of dynamically typed draft stubs for arbitrary modules. + +The logic of this script can be split in three steps: +* parsing options and finding sources: + - use runtime imports be default (to find also C modules) + - or use mypy's mechanisms, if importing is prohibited +* (optionally) semantically analysing the sources using mypy (as a single set) +* emitting the stubs text: + - for Python modules: from ASTs using ASTStubGenerator + - for C modules using runtime introspection and (optionally) Sphinx docs + +During first and third steps some problematic files can be skipped, but any +blocking error during second step will cause the whole program to stop. + +Basic usage: + + $ stubgen foo.py bar.py some_directory + => Generate out/foo.pyi, out/bar.pyi, and stubs for some_directory (recursively). + + $ stubgen -m urllib.parse + => Generate out/urllib/parse.pyi. + + $ stubgen -p urllib + => Generate stubs for whole urllib package (recursively). + +For C modules, you can get more precise function signatures by parsing .rst (Sphinx) +documentation for extra information. For this, use the --doc-dir option: + + $ stubgen --doc-dir /Python-3.4.2/Doc/library -m curses + +Note: The generated stubs should be verified manually. + +TODO: + - maybe use .rst docs also for Python modules + - maybe export more imported names if there is no __all__ (this affects ssl.SSLError, for example) + - a quick and dirty heuristic would be to turn this on if a module has something like + 'from x import y as _y' + - we don't seem to always detect properties ('closed' in 'io', for example) +""" + +from __future__ import annotations + +import argparse +import keyword +import os +import os.path +import sys +import traceback +from collections.abc import Iterable, Iterator +from typing import Final + +import mypy.build +import mypy.mixedtraverser +import mypy.parse +import mypy.traverser +import mypy.util +import mypy.version +from mypy.build import build +from mypy.errors import CompileError, Errors +from mypy.find_sources import InvalidSourceList, create_source_list +from mypy.modulefinder import ( + BuildSource, + FindModuleCache, + ModuleNotFoundReason, + SearchPaths, + default_lib_path, +) +from mypy.moduleinspect import ModuleInspect, is_pyc_only +from mypy.nodes import ( + ARG_NAMED, + ARG_POS, + ARG_STAR, + ARG_STAR2, + IS_ABSTRACT, + NOT_ABSTRACT, + AssignmentStmt, + Block, + BytesExpr, + CallExpr, + CastExpr, + ClassDef, + ComparisonExpr, + ComplexExpr, + ConditionalExpr, + Decorator, + DictExpr, + DictionaryComprehension, + EllipsisExpr, + Expression, + ExpressionStmt, + FloatExpr, + FuncBase, + FuncDef, + GeneratorExpr, + IfStmt, + Import, + ImportAll, + ImportFrom, + IndexExpr, + IntExpr, + LambdaExpr, + ListComprehension, + ListExpr, + MemberExpr, + MypyFile, + NameExpr, + OpExpr, + OverloadedFuncDef, + SetComprehension, + SetExpr, + SliceExpr, + StarExpr, + Statement, + StrExpr, + TempNode, + TupleExpr, + TypeAliasStmt, + TypeInfo, + UnaryExpr, + Var, +) +from mypy.options import Options as MypyOptions +from mypy.plugins.dataclasses import DATACLASS_FIELD_SPECIFIERS +from mypy.semanal_shared import find_dataclass_transform_spec +from mypy.sharedparse import MAGIC_METHODS_POS_ARGS_ONLY +from mypy.stubdoc import ArgSig, FunctionSig +from mypy.stubgenc import InspectionStubGenerator, generate_stub_for_c_module +from mypy.stubutil import ( + TYPING_BUILTIN_REPLACEMENTS, + BaseStubGenerator, + CantImport, + ClassInfo, + FunctionContext, + common_dir_prefix, + fail_missing, + find_module_path_and_all_py3, + generate_guarded, + infer_method_arg_types, + infer_method_ret_type, + remove_misplaced_type_comments, + report_missing, + walk_packages, +) +from mypy.traverser import ( + all_yield_expressions, + has_return_statement, + has_yield_expression, + has_yield_from_expression, +) +from mypy.types import ( + DATACLASS_TRANSFORM_NAMES, + OVERLOAD_NAMES, + TPDICT_NAMES, + TYPE_VAR_LIKE_NAMES, + TYPED_NAMEDTUPLE_NAMES, + AnyType, + CallableType, + Instance, + TupleType, + Type, + UnboundType, + get_proper_type, +) +from mypy.visitor import NodeVisitor + +# Common ways of naming package containing vendored modules. +VENDOR_PACKAGES: Final = ["packages", "vendor", "vendored", "_vendor", "_vendored_packages"] + +# Avoid some file names that are unnecessary or likely to cause trouble (\n for end of path). +BLACKLIST: Final = [ + "/six.py\n", # Likely vendored six; too dynamic for us to handle + "/vendored/", # Vendored packages + "/vendor/", # Vendored packages + "/_vendor/", + "/_vendored_packages/", +] + +# These methods are expected to always return a non-trivial value. +METHODS_WITH_RETURN_VALUE: Final = { + "__ne__", + "__eq__", + "__lt__", + "__le__", + "__gt__", + "__ge__", + "__hash__", + "__iter__", +} + + +class Options: + """Represents stubgen options. + + This class is mutable to simplify testing. + """ + + def __init__( + self, + pyversion: tuple[int, int], + no_import: bool, + inspect: bool, + doc_dir: str, + search_path: list[str], + interpreter: str, + parse_only: bool, + ignore_errors: bool, + include_private: bool, + output_dir: str, + modules: list[str], + packages: list[str], + files: list[str], + verbose: bool, + quiet: bool, + export_less: bool, + include_docstrings: bool, + ) -> None: + # See parse_options for descriptions of the flags. + self.pyversion = pyversion + self.no_import = no_import + self.inspect = inspect + self.doc_dir = doc_dir + self.search_path = search_path + self.interpreter = interpreter + self.decointerpreter = interpreter + self.parse_only = parse_only + self.ignore_errors = ignore_errors + self.include_private = include_private + self.output_dir = output_dir + self.modules = modules + self.packages = packages + self.files = files + self.verbose = verbose + self.quiet = quiet + self.export_less = export_less + self.include_docstrings = include_docstrings + + +class StubSource: + """A single source for stub: can be a Python or C module. + + A simple extension of BuildSource that also carries the AST and + the value of __all__ detected at runtime. + """ + + def __init__( + self, module: str, path: str | None = None, runtime_all: list[str] | None = None + ) -> None: + self.source = BuildSource(path, module, None) + self.runtime_all = runtime_all + self.ast: MypyFile | None = None + + def __repr__(self) -> str: + return f"StubSource({self.source})" + + @property + def module(self) -> str: + return self.source.module + + @property + def path(self) -> str | None: + return self.source.path + + +# What was generated previously in the stub file. We keep track of these to generate +# nicely formatted output (add empty line between non-empty classes, for example). +EMPTY: Final = "EMPTY" +FUNC: Final = "FUNC" +CLASS: Final = "CLASS" +EMPTY_CLASS: Final = "EMPTY_CLASS" +VAR: Final = "VAR" +NOT_IN_ALL: Final = "NOT_IN_ALL" + +# Indicates that we failed to generate a reasonable output +# for a given node. These should be manually replaced by a user. + +ERROR_MARKER: Final = "" + + +class AliasPrinter(NodeVisitor[str]): + """Visitor used to collect type aliases _and_ type variable definitions. + + Visit r.h.s of the definition to get the string representation of type alias. + """ + + def __init__(self, stubgen: ASTStubGenerator) -> None: + self.stubgen = stubgen + super().__init__() + + def visit_call_expr(self, node: CallExpr) -> str: + # Call expressions are not usually types, but we also treat `X = TypeVar(...)` as a + # type alias that has to be preserved (even if TypeVar is not the same as an alias) + callee = node.callee.accept(self) + args = [] + for name, arg, kind in zip(node.arg_names, node.args, node.arg_kinds): + if kind == ARG_POS: + args.append(arg.accept(self)) + elif kind == ARG_STAR: + args.append("*" + arg.accept(self)) + elif kind == ARG_STAR2: + args.append("**" + arg.accept(self)) + elif kind == ARG_NAMED: + args.append(f"{name}={arg.accept(self)}") + else: + raise ValueError(f"Unknown argument kind {kind} in call") + return f"{callee}({', '.join(args)})" + + def _visit_ref_expr(self, node: NameExpr | MemberExpr) -> str: + fullname = self.stubgen.get_fullname(node) + if fullname in TYPING_BUILTIN_REPLACEMENTS: + return self.stubgen.add_name(TYPING_BUILTIN_REPLACEMENTS[fullname], require=False) + qualname = get_qualified_name(node) + self.stubgen.import_tracker.require_name(qualname) + return qualname + + def visit_name_expr(self, node: NameExpr) -> str: + return self._visit_ref_expr(node) + + def visit_member_expr(self, o: MemberExpr) -> str: + return self._visit_ref_expr(o) + + def _visit_literal_node( + self, node: StrExpr | BytesExpr | IntExpr | FloatExpr | ComplexExpr + ) -> str: + return repr(node.value) + + def visit_str_expr(self, node: StrExpr) -> str: + return self._visit_literal_node(node) + + def visit_bytes_expr(self, node: BytesExpr) -> str: + return f"b{self._visit_literal_node(node)}" + + def visit_int_expr(self, node: IntExpr) -> str: + return self._visit_literal_node(node) + + def visit_float_expr(self, node: FloatExpr) -> str: + return self._visit_literal_node(node) + + def visit_complex_expr(self, node: ComplexExpr) -> str: + return self._visit_literal_node(node) + + def visit_index_expr(self, node: IndexExpr) -> str: + base_fullname = self.stubgen.get_fullname(node.base) + if base_fullname == "typing.Union": + if isinstance(node.index, TupleExpr): + return " | ".join([item.accept(self) for item in node.index.items]) + return node.index.accept(self) + if base_fullname == "typing.Optional": + if isinstance(node.index, TupleExpr): + return self.stubgen.add_name("_typeshed.Incomplete") + return f"{node.index.accept(self)} | None" + base = node.base.accept(self) + index = node.index.accept(self) + if len(index) > 2 and index.startswith("(") and index.endswith(")"): + index = index[1:-1].rstrip(",") + return f"{base}[{index}]" + + def visit_tuple_expr(self, node: TupleExpr) -> str: + suffix = "," if len(node.items) == 1 else "" + return f"({', '.join(n.accept(self) for n in node.items)}{suffix})" + + def visit_list_expr(self, node: ListExpr) -> str: + return f"[{', '.join(n.accept(self) for n in node.items)}]" + + def visit_set_expr(self, node: SetExpr) -> str: + return f"{{{', '.join(n.accept(self) for n in node.items)}}}" + + def visit_dict_expr(self, o: DictExpr) -> str: + dict_items = [] + for key, value in o.items: + # This is currently only used for TypedDict where all keys are strings. + assert isinstance(key, StrExpr) + dict_items.append(f"{key.accept(self)}: {value.accept(self)}") + return f"{{{', '.join(dict_items)}}}" + + def visit_ellipsis(self, node: EllipsisExpr) -> str: + return "..." + + def visit_op_expr(self, o: OpExpr) -> str: + return f"{o.left.accept(self)} {o.op} {o.right.accept(self)}" + + def visit_unary_expr(self, o: UnaryExpr, /) -> str: + return f"{o.op}{o.expr.accept(self)}" + + def visit_slice_expr(self, o: SliceExpr, /) -> str: + blocks = [ + o.begin_index.accept(self) if o.begin_index is not None else "", + o.end_index.accept(self) if o.end_index is not None else "", + ] + if o.stride is not None: + blocks.append(o.stride.accept(self)) + return ":".join(blocks) + + def visit_star_expr(self, o: StarExpr) -> str: + return f"*{o.expr.accept(self)}" + + def visit_lambda_expr(self, o: LambdaExpr) -> str: + # TODO: Required for among other things dataclass.field default_factory + return self.stubgen.add_name("_typeshed.Incomplete") + + def _visit_unsupported_expr(self, o: object) -> str: + # Something we do not understand. + return self.stubgen.add_name("_typeshed.Incomplete") + + def visit_comparison_expr(self, o: ComparisonExpr) -> str: + return self._visit_unsupported_expr(o) + + def visit_cast_expr(self, o: CastExpr) -> str: + return self._visit_unsupported_expr(o) + + def visit_conditional_expr(self, o: ConditionalExpr) -> str: + return self._visit_unsupported_expr(o) + + def visit_list_comprehension(self, o: ListComprehension) -> str: + return self._visit_unsupported_expr(o) + + def visit_set_comprehension(self, o: SetComprehension) -> str: + return self._visit_unsupported_expr(o) + + def visit_dictionary_comprehension(self, o: DictionaryComprehension) -> str: + return self._visit_unsupported_expr(o) + + def visit_generator_expr(self, o: GeneratorExpr) -> str: + return self._visit_unsupported_expr(o) + + +def find_defined_names(file: MypyFile) -> set[str]: + finder = DefinitionFinder() + file.accept(finder) + return finder.names + + +def get_assigned_names(lvalues: Iterable[Expression]) -> Iterator[str]: + for lvalue in lvalues: + if isinstance(lvalue, NameExpr): + yield lvalue.name + elif isinstance(lvalue, TupleExpr): + yield from get_assigned_names(lvalue.items) + + +class DefinitionFinder(mypy.traverser.TraverserVisitor): + """Find names of things defined at the top level of a module.""" + + def __init__(self) -> None: + # Short names of things defined at the top level. + self.names: set[str] = set() + + def visit_class_def(self, o: ClassDef) -> None: + # Don't recurse into classes, as we only keep track of top-level definitions. + self.names.add(o.name) + + def visit_func_def(self, o: FuncDef) -> None: + # Don't recurse, as we only keep track of top-level definitions. + self.names.add(o.name) + + def visit_assignment_stmt(self, o: AssignmentStmt) -> None: + for name in get_assigned_names(o.lvalues): + self.names.add(name) + + def visit_type_alias_stmt(self, o: TypeAliasStmt) -> None: + self.names.add(o.name.name) + + +def find_referenced_names(file: MypyFile) -> set[str]: + finder = ReferenceFinder() + file.accept(finder) + return finder.refs + + +def is_none_expr(expr: Expression) -> bool: + return isinstance(expr, NameExpr) and expr.name == "None" + + +class ReferenceFinder(mypy.mixedtraverser.MixedTraverserVisitor): + """Find all name references (both local and global).""" + + # TODO: Filter out local variable and class attribute references + + def __init__(self) -> None: + # Short names of things defined at the top level. + self.refs: set[str] = set() + + def visit_block(self, block: Block) -> None: + if not block.is_unreachable: + super().visit_block(block) + + def visit_name_expr(self, e: NameExpr) -> None: + self.refs.add(e.name) + + def visit_instance(self, t: Instance) -> None: + self.add_ref(t.type.name) + super().visit_instance(t) + + def visit_unbound_type(self, t: UnboundType) -> None: + if t.name: + self.add_ref(t.name) + + def visit_tuple_type(self, t: TupleType) -> None: + # Ignore fallback + for item in t.items: + item.accept(self) + + def visit_callable_type(self, t: CallableType) -> None: + # Ignore fallback + for arg in t.arg_types: + arg.accept(self) + t.ret_type.accept(self) + + def add_ref(self, fullname: str) -> None: + self.refs.add(fullname) + while "." in fullname: + fullname = fullname.rsplit(".", 1)[0] + self.refs.add(fullname) + + +class ASTStubGenerator(BaseStubGenerator, mypy.traverser.TraverserVisitor): + """Generate stub text from a mypy AST.""" + + def __init__( + self, + _all_: list[str] | None = None, + include_private: bool = False, + analyzed: bool = False, + export_less: bool = False, + include_docstrings: bool = False, + ) -> None: + super().__init__(_all_, include_private, export_less, include_docstrings) + self._decorators: list[str] = [] + # Stack of defined variables (per scope). + self._vars: list[list[str]] = [[]] + # What was generated previously in the stub file. + self._state = EMPTY + self._class_stack: list[ClassDef] = [] + # Was the tree semantically analysed before? + self.analyzed = analyzed + # Short names of methods defined in the body of the current class + self.method_names: set[str] = set() + self.processing_enum = False + self.processing_dataclass = False + self.dataclass_field_specifier: tuple[str, ...] = () + + @property + def _current_class(self) -> ClassDef | None: + return self._class_stack[-1] if self._class_stack else None + + def visit_mypy_file(self, o: MypyFile) -> None: + self.module_name = o.fullname # Current module being processed + self.path = o.path + self.set_defined_names(find_defined_names(o)) + self.referenced_names = find_referenced_names(o) + super().visit_mypy_file(o) + self.check_undefined_names() + + def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None: + """@property with setters and getters, @overload chain and some others.""" + overload_chain = False + for item in o.items: + if not isinstance(item, Decorator): + continue + if self.is_private_name(item.func.name, item.func.fullname): + continue + + self.process_decorator(item) + if not overload_chain: + self.visit_func_def(item.func) + if item.func.is_overload: + overload_chain = True + elif item.func.is_overload: + self.visit_func_def(item.func) + else: + # skip the overload implementation and clear the decorator we just processed + self.clear_decorators() + + def get_default_function_sig(self, func_def: FuncDef, ctx: FunctionContext) -> FunctionSig: + args = self._get_func_args(func_def, ctx) + retname = self._get_func_return(func_def, ctx) + type_args = self.format_type_args(func_def) + return FunctionSig(func_def.name, args, retname, type_args) + + def _get_func_args(self, o: FuncDef, ctx: FunctionContext) -> list[ArgSig]: + args: list[ArgSig] = [] + + # Ignore pos-only status of magic methods whose args names are elided by mypy at parse + actually_pos_only_args = o.name not in MAGIC_METHODS_POS_ARGS_ONLY + pos_only_marker_position = 0 # Where to insert "/", if any + for i, arg_ in enumerate(o.arguments): + var = arg_.variable + kind = arg_.kind + name = var.name + annotated_type = ( + o.unanalyzed_type.arg_types[i] + if isinstance(o.unanalyzed_type, CallableType) + else None + ) + # I think the name check is incorrect: there are libraries which + # name their 0th argument other than self/cls + is_self_arg = i == 0 and name == "self" + is_cls_arg = i == 0 and name == "cls" + typename: str | None = None + if annotated_type and not is_self_arg and not is_cls_arg: + # Luckily, an argument explicitly annotated with "Any" has + # type "UnboundType" and will not match. + if not isinstance(get_proper_type(annotated_type), AnyType): + typename = self.print_annotation(annotated_type) + + if actually_pos_only_args and arg_.pos_only: + pos_only_marker_position += 1 + + if kind.is_named() and not any(arg.name.startswith("*") for arg in args): + args.append(ArgSig("*")) + + default = "..." + if arg_.initializer: + if not typename: + typename = self.get_str_type_of_node(arg_.initializer, can_be_incomplete=False) + potential_default, valid = self.get_str_default_of_node(arg_.initializer) + if valid and len(potential_default) <= 200: + default = potential_default + elif kind == ARG_STAR: + name = f"*{name}" + elif kind == ARG_STAR2: + name = f"**{name}" + + args.append( + ArgSig(name, typename, default=bool(arg_.initializer), default_value=default) + ) + if pos_only_marker_position: + args.insert(pos_only_marker_position, ArgSig("/")) + + if ctx.class_info is not None and all( + arg.type is None and arg.default is False for arg in args + ): + new_args = infer_method_arg_types( + ctx.name, ctx.class_info.self_var, [arg.name for arg in args] + ) + + if ctx.name == "__exit__": + self.import_tracker.add_import("types") + self.import_tracker.require_name("types") + + if new_args is not None: + args = new_args + + return args + + def _get_func_return(self, o: FuncDef, ctx: FunctionContext) -> str | None: + if o.name != "__init__" and isinstance(o.unanalyzed_type, CallableType): + if isinstance(get_proper_type(o.unanalyzed_type.ret_type), AnyType): + # Luckily, a return type explicitly annotated with "Any" has + # type "UnboundType" and will enter the else branch. + return None # implicit Any + else: + return self.print_annotation(o.unanalyzed_type.ret_type) + if o.abstract_status == IS_ABSTRACT or o.name in METHODS_WITH_RETURN_VALUE: + # Always assume abstract methods return Any unless explicitly annotated. Also + # some dunder methods should not have a None return type. + return None # implicit Any + retname = infer_method_ret_type(o.name) + if retname is not None: + return retname + if has_yield_expression(o) or has_yield_from_expression(o): + generator_name = self.add_name("collections.abc.Generator") + yield_name = "None" + send_name: str | None = None + return_name: str | None = None + if has_yield_from_expression(o): + yield_name = send_name = self.add_name("_typeshed.Incomplete") + else: + for expr, in_assignment in all_yield_expressions(o): + if expr.expr is not None and not is_none_expr(expr.expr): + yield_name = self.add_name("_typeshed.Incomplete") + if in_assignment: + send_name = self.add_name("_typeshed.Incomplete") + if has_return_statement(o): + return_name = self.add_name("_typeshed.Incomplete") + if return_name is not None: + if send_name is None: + send_name = "None" + return f"{generator_name}[{yield_name}, {send_name}, {return_name}]" + elif send_name is not None: + return f"{generator_name}[{yield_name}, {send_name}]" + else: + return f"{generator_name}[{yield_name}]" + if not has_return_statement(o) and o.abstract_status == NOT_ABSTRACT: + return "None" + return None + + def _get_func_docstring(self, node: FuncDef) -> str | None: + if not node.body.body: + return None + expr = node.body.body[0] + if isinstance(expr, ExpressionStmt) and isinstance(expr.expr, StrExpr): + return expr.expr.value + return None + + def visit_func_def(self, o: FuncDef) -> None: + is_dataclass_generated = ( + self.analyzed and self.processing_dataclass and o.info.names[o.name].plugin_generated + ) + if is_dataclass_generated: + # Skip methods generated by the @dataclass decorator + return + if ( + self.is_private_name(o.name, o.fullname) + or self.is_not_in_all(o.name) + or (self.is_recorded_name(o.name) and not o.is_overload) + ): + self.clear_decorators() + return + if self.is_top_level() and self._state not in (EMPTY, FUNC): + self.add("\n") + if not self.is_top_level(): + self_inits = find_self_initializers(o) + for init, value, annotation in self_inits: + if init in self.method_names: + # Can't have both an attribute and a method/property with the same name. + continue + init_code = self.get_init(init, value, annotation) + if init_code: + self.add(init_code) + + if self._class_stack: + if len(o.arguments): + self_var = o.arguments[0].variable.name + else: + self_var = "self" + class_info: ClassInfo | None = None + for class_def in self._class_stack: + class_info = ClassInfo(class_def.name, self_var, parent=class_info) + else: + class_info = None + + ctx = FunctionContext( + module_name=self.module_name, + name=o.name, + docstring=self._get_func_docstring(o), + is_abstract=o.abstract_status != NOT_ABSTRACT, + class_info=class_info, + ) + + self.record_name(o.name) + + default_sig = self.get_default_function_sig(o, ctx) + sigs = self.get_signatures(default_sig, self.sig_generators, ctx) + + for output in self.format_func_def( + sigs, is_coroutine=o.is_coroutine, decorators=self._decorators, docstring=ctx.docstring + ): + self.add(output + "\n") + + self.clear_decorators() + self._state = FUNC + + def visit_decorator(self, o: Decorator) -> None: + if self.is_private_name(o.func.name, o.func.fullname): + return + self.process_decorator(o) + self.visit_func_def(o.func) + + def process_decorator(self, o: Decorator) -> None: + """Process a series of decorators. + + Only preserve certain special decorators such as @abstractmethod. + """ + o.func.is_overload = False + for decorator in o.original_decorators: + d = decorator + if isinstance(d, CallExpr): + d = d.callee + if not isinstance(d, (NameExpr, MemberExpr)): + continue + qualname = get_qualified_name(d) + fullname = self.get_fullname(d) + if fullname in ( + "builtins.property", + "builtins.staticmethod", + "builtins.classmethod", + "functools.cached_property", + ): + self.add_decorator(qualname, require_name=True) + elif fullname in ( + "asyncio.coroutine", + "asyncio.coroutines.coroutine", + "types.coroutine", + ): + o.func.is_awaitable_coroutine = True + self.add_decorator(qualname, require_name=True) + elif fullname == "abc.abstractmethod": + self.add_decorator(qualname, require_name=True) + o.func.abstract_status = IS_ABSTRACT + elif fullname in ( + "abc.abstractproperty", + "abc.abstractstaticmethod", + "abc.abstractclassmethod", + ): + abc_module = qualname.rpartition(".")[0] + if not abc_module: + self.import_tracker.add_import("abc") + builtin_decorator_replacement = fullname[len("abc.abstract") :] + self.add_decorator(builtin_decorator_replacement, require_name=False) + self.add_decorator(f"{abc_module or 'abc'}.abstractmethod", require_name=True) + o.func.abstract_status = IS_ABSTRACT + elif fullname in OVERLOAD_NAMES: + self.add_decorator(qualname, require_name=True) + o.func.is_overload = True + elif qualname.endswith((".setter", ".deleter")): + self.add_decorator(qualname, require_name=False) + elif fullname in DATACLASS_TRANSFORM_NAMES: + p = AliasPrinter(self) + self._decorators.append(f"@{decorator.accept(p)}") + elif isinstance(decorator, (NameExpr, MemberExpr)): + p = AliasPrinter(self) + self._decorators.append(f"@{decorator.accept(p)}") + + def get_fullname(self, expr: Expression) -> str: + """Return the expression's full name.""" + if ( + self.analyzed + and isinstance(expr, (NameExpr, MemberExpr)) + and expr.fullname + and not (isinstance(expr.node, Var) and expr.node.is_suppressed_import) + ): + return expr.fullname + name = get_qualified_name(expr) + return self.resolve_name(name) + + def visit_class_def(self, o: ClassDef) -> None: + self._class_stack.append(o) + self.method_names = find_method_names(o.defs.body) + sep: int | None = None + if self.is_top_level() and self._state != EMPTY: + sep = len(self._output) + self.add("\n") + decorators = self.get_class_decorators(o) + for d in decorators: + self.add(f"{self._indent}@{d}\n") + self.record_name(o.name) + base_types = self.get_base_types(o) + if base_types: + for base in base_types: + self.import_tracker.require_name(base) + if self.analyzed and o.info.is_enum: + self.processing_enum = True + if isinstance(o.metaclass, (NameExpr, MemberExpr)): + meta = o.metaclass.accept(AliasPrinter(self)) + base_types.append("metaclass=" + meta) + elif self.analyzed and o.info.is_abstract and not o.info.is_protocol: + base_types.append("metaclass=abc.ABCMeta") + self.import_tracker.add_import("abc") + self.import_tracker.require_name("abc") + bases = f"({', '.join(base_types)})" if base_types else "" + type_args = self.format_type_args(o) + self.add(f"{self._indent}class {o.name}{type_args}{bases}:\n") + self.indent() + if self._include_docstrings and o.docstring: + docstring = mypy.util.quote_docstring(o.docstring) + self.add(f"{self._indent}{docstring}\n") + n = len(self._output) + self._vars.append([]) + if self.analyzed and (spec := find_dataclass_transform_spec(o)): + self.processing_dataclass = True + self.dataclass_field_specifier = spec.field_specifiers + super().visit_class_def(o) + self.dedent() + self._vars.pop() + self._vars[-1].append(o.name) + if len(self._output) == n: + if self._state == EMPTY_CLASS and sep is not None: + self._output[sep] = "" + if not (self._include_docstrings and o.docstring): + self._output[-1] = self._output[-1][:-1] + " ...\n" + self._state = EMPTY_CLASS + else: + self._state = CLASS + self.method_names = set() + self.processing_dataclass = False + self.dataclass_field_specifier = () + self._class_stack.pop(-1) + self.processing_enum = False + + def get_base_types(self, cdef: ClassDef) -> list[str]: + """Get list of base classes for a class.""" + base_types: list[str] = [] + p = AliasPrinter(self) + for base in cdef.base_type_exprs + cdef.removed_base_type_exprs: + if isinstance(base, (NameExpr, MemberExpr)): + if self.get_fullname(base) != "builtins.object": + base_types.append(get_qualified_name(base)) + elif isinstance(base, IndexExpr): + base_types.append(base.accept(p)) + elif isinstance(base, CallExpr): + # namedtuple(typename, fields), NamedTuple(typename, fields) calls can + # be used as a base class. The first argument is a string literal that + # is usually the same as the class name. + # + # Note: + # A call-based named tuple as a base class cannot be safely converted to + # a class-based NamedTuple definition because class attributes defined + # in the body of the class inheriting from the named tuple call are not + # namedtuple fields at runtime. + if self.is_namedtuple(base): + nt_fields = self._get_namedtuple_fields(base) + assert isinstance(base.args[0], StrExpr) + typename = base.args[0].value + if nt_fields is None: + # Invalid namedtuple() call, cannot determine fields + base_types.append(self.add_name("_typeshed.Incomplete")) + continue + fields_str = ", ".join(f"({f!r}, {t})" for f, t in nt_fields) + namedtuple_name = self.add_name("typing.NamedTuple") + base_types.append(f"{namedtuple_name}({typename!r}, [{fields_str}])") + elif self.is_typed_namedtuple(base): + base_types.append(base.accept(p)) + else: + # At this point, we don't know what the base class is, so we + # just use Incomplete as the base class. + base_types.append(self.add_name("_typeshed.Incomplete")) + for name, value in cdef.keywords.items(): + if name == "metaclass": + continue # handled separately + processed_value = value.accept(p) or "..." # at least, don't crash + base_types.append(f"{name}={processed_value}") + return base_types + + def get_class_decorators(self, cdef: ClassDef) -> list[str]: + decorators: list[str] = [] + p = AliasPrinter(self) + for d in cdef.decorators: + if self.is_dataclass(d): + decorators.append(d.accept(p)) + self.import_tracker.require_name(get_qualified_name(d)) + self.processing_dataclass = True + if self.is_dataclass_transform(d): + decorators.append(d.accept(p)) + self.import_tracker.require_name(get_qualified_name(d)) + return decorators + + def is_dataclass(self, expr: Expression) -> bool: + if isinstance(expr, CallExpr): + expr = expr.callee + return self.get_fullname(expr) == "dataclasses.dataclass" + + def is_dataclass_transform(self, expr: Expression) -> bool: + if isinstance(expr, CallExpr): + expr = expr.callee + if self.get_fullname(expr) in DATACLASS_TRANSFORM_NAMES: + return True + if (spec := find_dataclass_transform_spec(expr)) is not None: + self.processing_dataclass = True + self.dataclass_field_specifier = spec.field_specifiers + return True + return False + + def visit_block(self, o: Block) -> None: + # Unreachable statements may be partially uninitialized and that may + # cause trouble. + if not o.is_unreachable: + super().visit_block(o) + + def visit_assignment_stmt(self, o: AssignmentStmt) -> None: + foundl = [] + + for lvalue in o.lvalues: + if isinstance(lvalue, NameExpr) and isinstance(o.rvalue, CallExpr): + if self.is_namedtuple(o.rvalue) or self.is_typed_namedtuple(o.rvalue): + self.process_namedtuple(lvalue, o.rvalue) + foundl.append(False) # state is updated in process_namedtuple + continue + if self.is_typeddict(o.rvalue): + self.process_typeddict(lvalue, o.rvalue) + foundl.append(False) # state is updated in process_typeddict + continue + if ( + isinstance(lvalue, NameExpr) + and self.is_alias_expression(o.rvalue) + and not self.is_private_name(lvalue.name) + ): + is_explicit_type_alias = ( + o.unanalyzed_type and getattr(o.type, "name", None) == "TypeAlias" + ) + if is_explicit_type_alias: + self.process_typealias(lvalue, o.rvalue, is_explicit_type_alias=True) + continue + + if not o.unanalyzed_type: + self.process_typealias(lvalue, o.rvalue) + continue + + if isinstance(lvalue, (TupleExpr, ListExpr)): + items = lvalue.items + if isinstance(o.unanalyzed_type, TupleType): # type: ignore[misc] + annotations: Iterable[Type | None] = o.unanalyzed_type.items + else: + annotations = [None] * len(items) + else: + items = [lvalue] + annotations = [o.unanalyzed_type] + sep = False + found = False + for item, annotation in zip(items, annotations): + if isinstance(item, NameExpr): + init = self.get_init(item.name, o.rvalue, annotation) + if init: + found = True + if not sep and self.is_top_level() and self._state not in (EMPTY, VAR): + init = "\n" + init + sep = True + self.add(init) + self.record_name(item.name) + foundl.append(found) + + if all(foundl): + self._state = VAR + + def is_namedtuple(self, expr: CallExpr) -> bool: + return self.get_fullname(expr.callee) == "collections.namedtuple" + + def is_typed_namedtuple(self, expr: CallExpr) -> bool: + return self.get_fullname(expr.callee) in TYPED_NAMEDTUPLE_NAMES + + def _get_namedtuple_fields(self, call: CallExpr) -> list[tuple[str, str]] | None: + if self.is_namedtuple(call): + fields_arg = call.args[1] + if isinstance(fields_arg, StrExpr): + field_names = fields_arg.value.replace(",", " ").split() + elif isinstance(fields_arg, (ListExpr, TupleExpr)): + field_names = [] + for field in fields_arg.items: + if not isinstance(field, StrExpr): + return None + field_names.append(field.value) + else: + return None # Invalid namedtuple fields type + if field_names: + incomplete = self.add_name("_typeshed.Incomplete") + return [(field_name, incomplete) for field_name in field_names] + else: + return [] + + elif self.is_typed_namedtuple(call): + fields_arg = call.args[1] + if not isinstance(fields_arg, (ListExpr, TupleExpr)): + return None + fields: list[tuple[str, str]] = [] + p = AliasPrinter(self) + for field in fields_arg.items: + if not (isinstance(field, TupleExpr) and len(field.items) == 2): + return None + field_name, field_type = field.items + if not isinstance(field_name, StrExpr): + return None + fields.append((field_name.value, field_type.accept(p))) + return fields + else: + return None # Not a named tuple call + + def process_namedtuple(self, lvalue: NameExpr, rvalue: CallExpr) -> None: + if self._state == CLASS: + self.add("\n") + + if not isinstance(rvalue.args[0], StrExpr): + self.annotate_as_incomplete(lvalue) + return + + fields = self._get_namedtuple_fields(rvalue) + if fields is None: + self.annotate_as_incomplete(lvalue) + return + bases = self.add_name("typing.NamedTuple") + # TODO: Add support for generic NamedTuples. Requires `Generic` as base class. + class_def = f"{self._indent}class {lvalue.name}({bases}):" + if len(fields) == 0: + self.add(f"{class_def} ...\n") + self._state = EMPTY_CLASS + else: + if self._state not in (EMPTY, CLASS): + self.add("\n") + self.add(f"{class_def}\n") + for f_name, f_type in fields: + self.add(f"{self._indent} {f_name}: {f_type}\n") + self._state = CLASS + + def is_typeddict(self, expr: CallExpr) -> bool: + return self.get_fullname(expr.callee) in TPDICT_NAMES + + def process_typeddict(self, lvalue: NameExpr, rvalue: CallExpr) -> None: + if self._state == CLASS: + self.add("\n") + + if not isinstance(rvalue.args[0], StrExpr): + self.annotate_as_incomplete(lvalue) + return + + items: list[tuple[str, Expression]] = [] + total: Expression | None = None + if len(rvalue.args) > 1 and rvalue.arg_kinds[1] == ARG_POS: + if not isinstance(rvalue.args[1], DictExpr): + self.annotate_as_incomplete(lvalue) + return + for attr_name, attr_type in rvalue.args[1].items: + if not isinstance(attr_name, StrExpr): + self.annotate_as_incomplete(lvalue) + return + items.append((attr_name.value, attr_type)) + if len(rvalue.args) > 2: + if rvalue.arg_kinds[2] != ARG_NAMED or rvalue.arg_names[2] != "total": + self.annotate_as_incomplete(lvalue) + return + total = rvalue.args[2] + else: + for arg_name, arg in zip(rvalue.arg_names[1:], rvalue.args[1:]): + if not isinstance(arg_name, str): + self.annotate_as_incomplete(lvalue) + return + if arg_name == "total": + total = arg + else: + items.append((arg_name, arg)) + p = AliasPrinter(self) + if any(not key.isidentifier() or keyword.iskeyword(key) for key, _ in items): + # Keep the call syntax if there are non-identifier or reserved keyword keys. + self.add(f"{self._indent}{lvalue.name} = {rvalue.accept(p)}\n") + self._state = VAR + else: + bases = self.add_name("typing_extensions.TypedDict") + # TODO: Add support for generic TypedDicts. Requires `Generic` as base class. + if total is not None: + bases += f", total={total.accept(p)}" + class_def = f"{self._indent}class {lvalue.name}({bases}):" + if len(items) == 0: + self.add(f"{class_def} ...\n") + self._state = EMPTY_CLASS + else: + if self._state not in (EMPTY, CLASS): + self.add("\n") + self.add(f"{class_def}\n") + for key, key_type in items: + self.add(f"{self._indent} {key}: {key_type.accept(p)}\n") + self._state = CLASS + + def annotate_as_incomplete(self, lvalue: NameExpr) -> None: + incomplete = self.add_name("_typeshed.Incomplete") + self.add(f"{self._indent}{lvalue.name}: {incomplete}\n") + self._state = VAR + + def is_alias_expression(self, expr: Expression, top_level: bool = True) -> bool: + """Return True for things that look like target for an alias. + + Used to know if assignments look like type aliases, function alias, + or module alias. + """ + # Assignment of TypeVar(...) and other typevar-likes are passed through + if isinstance(expr, CallExpr) and self.get_fullname(expr.callee) in TYPE_VAR_LIKE_NAMES: + return True + elif isinstance(expr, EllipsisExpr): + return not top_level + elif isinstance(expr, NameExpr): + if expr.name in ("True", "False"): + return False + elif expr.name == "None": + return not top_level + else: + return not self.is_private_name(expr.name) + elif isinstance(expr, MemberExpr) and self.analyzed: + # Also add function and module aliases. + return ( + top_level + and isinstance(expr.node, (FuncDef, Decorator, MypyFile)) + or isinstance(expr.node, TypeInfo) + ) and not self.is_private_member(expr.node.fullname) + elif isinstance(expr, IndexExpr) and ( + (isinstance(expr.base, NameExpr) and not self.is_private_name(expr.base.name)) + or ( # Also some known aliases that could be member expression + isinstance(expr.base, MemberExpr) + and not self.is_private_member(get_qualified_name(expr.base)) + and self.get_fullname(expr.base).startswith( + ("builtins.", "typing.", "typing_extensions.", "collections.abc.") + ) + ) + ): + if isinstance(expr.index, TupleExpr): + indices = expr.index.items + else: + indices = [expr.index] + if expr.base.name == "Callable" and len(indices) == 2: + args, ret = indices + if isinstance(args, EllipsisExpr): + indices = [ret] + elif isinstance(args, ListExpr): + indices = args.items + [ret] + else: + return False + return all(self.is_alias_expression(i, top_level=False) for i in indices) + elif isinstance(expr, OpExpr) and expr.op == "|": + return self.is_alias_expression( + expr.left, top_level=False + ) and self.is_alias_expression(expr.right, top_level=False) + else: + return False + + def process_typealias( + self, lvalue: NameExpr, rvalue: Expression, is_explicit_type_alias: bool = False + ) -> None: + p = AliasPrinter(self) + if is_explicit_type_alias: + self.import_tracker.require_name("TypeAlias") + self.add(f"{self._indent}{lvalue.name}: TypeAlias = {rvalue.accept(p)}\n") + else: + self.add(f"{self._indent}{lvalue.name} = {rvalue.accept(p)}\n") + self.record_name(lvalue.name) + self._vars[-1].append(lvalue.name) + + def visit_type_alias_stmt(self, o: TypeAliasStmt) -> None: + """Type aliases defined with the `type` keyword (PEP 695).""" + p = AliasPrinter(self) + name = o.name.name + rvalue = o.value.expr() + type_args = self.format_type_args(o) + self.add(f"{self._indent}type {name}{type_args} = {rvalue.accept(p)}\n") + self.record_name(name) + self._vars[-1].append(name) + + def visit_if_stmt(self, o: IfStmt) -> None: + # Ignore if __name__ == '__main__'. + expr = o.expr[0] + if ( + isinstance(expr, ComparisonExpr) + and isinstance(expr.operands[0], NameExpr) + and isinstance(expr.operands[1], StrExpr) + and expr.operands[0].name == "__name__" + and "__main__" in expr.operands[1].value + ): + return + super().visit_if_stmt(o) + + def visit_import_all(self, o: ImportAll) -> None: + self.add_import_line(f"from {'.' * o.relative}{o.id} import *\n") + + def visit_import_from(self, o: ImportFrom) -> None: + exported_names: set[str] = set() + import_names = [] + module, relative = translate_module_name(o.id, o.relative) + if self.module_name: + full_module, ok = mypy.util.correct_relative_import( + self.module_name, relative, module, self.path.endswith(".__init__.py") + ) + if not ok: + full_module = module + else: + full_module = module + if module == "__future__": + return # Not preserved + for name, as_name in o.names: + if name == "six": + # Vendored six -- translate into plain 'import six'. + self.visit_import(Import([("six", None)])) + continue + if self.should_reexport(name, full_module, as_name is not None): + self.import_tracker.reexport(name) + as_name = name + import_names.append((name, as_name)) + self.import_tracker.add_import_from("." * relative + module, import_names) + self._vars[-1].extend(alias or name for name, alias in import_names) + for name, alias in import_names: + self.record_name(alias or name) + + if self._all_: + # Include "import from"s that import names defined in __all__. + names = [ + name + for name, alias in o.names + if name in self._all_ and alias is None and name not in self.IGNORED_DUNDERS + ] + exported_names.update(names) + + def visit_import(self, o: Import) -> None: + for id, as_id in o.ids: + self.import_tracker.add_import(id, as_id) + if as_id is None: + target_name = id.split(".")[0] + else: + target_name = as_id + self._vars[-1].append(target_name) + self.record_name(target_name) + + def get_init( + self, lvalue: str, rvalue: Expression, annotation: Type | None = None + ) -> str | None: + """Return initializer for a variable. + + Return None if we've generated one already or if the variable is internal. + """ + if lvalue in self._vars[-1]: + # We've generated an initializer already for this variable. + return None + # TODO: Only do this at module top level. + if self.is_private_name(lvalue) or self.is_not_in_all(lvalue): + return None + self._vars[-1].append(lvalue) + if annotation is not None: + typename = self.print_annotation(annotation) + if ( + isinstance(annotation, UnboundType) + and not annotation.args + and annotation.name == "Final" + and self.import_tracker.module_for.get("Final") in self.TYPING_MODULE_NAMES + ): + # Final without type argument is invalid in stubs. + final_arg = self.get_str_type_of_node(rvalue) + typename += f"[{final_arg}]" + elif self.processing_enum: + initializer, _ = self.get_str_default_of_node(rvalue) + return f"{self._indent}{lvalue} = {initializer}\n" + elif self.processing_dataclass: + # attribute without annotation is not a dataclass field, don't add annotation. + return f"{self._indent}{lvalue} = ...\n" + else: + typename = self.get_str_type_of_node(rvalue) + initializer = self.get_assign_initializer(rvalue) + return f"{self._indent}{lvalue}: {typename}{initializer}\n" + + def get_assign_initializer(self, rvalue: Expression) -> str: + """Does this rvalue need some special initializer value?""" + if not self._current_class: + return "" + # Current rules + # 1. Return `...` if we are dealing with `NamedTuple` or `dataclass` field and + # it has an existing default value + if ( + self._current_class.info + and self._current_class.info.is_named_tuple + and not isinstance(rvalue, TempNode) + ): + return " = ..." + if self.processing_dataclass: + if isinstance(rvalue, CallExpr): + fullname = self.get_fullname(rvalue.callee) + if fullname in (self.dataclass_field_specifier or DATACLASS_FIELD_SPECIFIERS): + p = AliasPrinter(self) + return f" = {rvalue.accept(p)}" + if not (isinstance(rvalue, TempNode) and rvalue.no_rhs): + return " = ..." + # TODO: support other possible cases, where initializer is important + + # By default, no initializer is required: + return "" + + def add_decorator(self, name: str, require_name: bool = False) -> None: + if require_name: + self.import_tracker.require_name(name) + self._decorators.append(f"@{name}") + + def clear_decorators(self) -> None: + self._decorators.clear() + + def is_private_member(self, fullname: str) -> bool: + parts = fullname.split(".") + return any(self.is_private_name(part) for part in parts) + + def get_str_type_of_node(self, rvalue: Expression, *, can_be_incomplete: bool = True) -> str: + rvalue = self.maybe_unwrap_unary_expr(rvalue) + + if isinstance(rvalue, IntExpr): + return "int" + if isinstance(rvalue, StrExpr): + return "str" + if isinstance(rvalue, BytesExpr): + return "bytes" + if isinstance(rvalue, FloatExpr): + return "float" + if isinstance(rvalue, ComplexExpr): # 1j + return "complex" + if isinstance(rvalue, OpExpr) and rvalue.op in ("-", "+"): # -1j + 1 + if isinstance(self.maybe_unwrap_unary_expr(rvalue.left), ComplexExpr) or isinstance( + self.maybe_unwrap_unary_expr(rvalue.right), ComplexExpr + ): + return "complex" + if isinstance(rvalue, NameExpr) and rvalue.name in ("True", "False"): + return "bool" + if can_be_incomplete: + return self.add_name("_typeshed.Incomplete") + else: + return "" + + def maybe_unwrap_unary_expr(self, expr: Expression) -> Expression: + """Unwrap (possibly nested) unary expressions. + + But, some unary expressions can change the type of expression. + While we want to preserve it. For example, `~True` is `int`. + So, we only allow a subset of unary expressions to be unwrapped. + """ + if not isinstance(expr, UnaryExpr): + return expr + + # First, try to unwrap `[+-]+ (int|float|complex)` expr: + math_ops = ("+", "-") + if expr.op in math_ops: + while isinstance(expr, UnaryExpr): + if expr.op not in math_ops or not isinstance( + expr.expr, (IntExpr, FloatExpr, ComplexExpr, UnaryExpr) + ): + break + expr = expr.expr + return expr + + # Next, try `not bool` expr: + if expr.op == "not": + while isinstance(expr, UnaryExpr): + if expr.op != "not" or not isinstance(expr.expr, (NameExpr, UnaryExpr)): + break + if isinstance(expr.expr, NameExpr) and expr.expr.name not in ("True", "False"): + break + expr = expr.expr + return expr + + # This is some other unary expr, we cannot do anything with it (yet?). + return expr + + def get_str_default_of_node(self, rvalue: Expression) -> tuple[str, bool]: + """Get a string representation of the default value of a node. + + Returns a 2-tuple of the default and whether or not it is valid. + """ + if isinstance(rvalue, NameExpr): + if rvalue.name in ("None", "True", "False"): + return rvalue.name, True + elif isinstance(rvalue, (IntExpr, FloatExpr)): + return f"{rvalue.value}", True + elif isinstance(rvalue, UnaryExpr): + if isinstance(rvalue.expr, (IntExpr, FloatExpr)): + return f"{rvalue.op}{rvalue.expr.value}", True + elif isinstance(rvalue, StrExpr): + return repr(rvalue.value), True + elif isinstance(rvalue, BytesExpr): + return "b" + repr(rvalue.value).replace("\\\\", "\\"), True + elif isinstance(rvalue, TupleExpr): + items_defaults = [] + for e in rvalue.items: + e_default, valid = self.get_str_default_of_node(e) + if not valid: + break + items_defaults.append(e_default) + else: + closing = ",)" if len(items_defaults) == 1 else ")" + default = "(" + ", ".join(items_defaults) + closing + return default, True + elif isinstance(rvalue, ListExpr): + items_defaults = [] + for e in rvalue.items: + e_default, valid = self.get_str_default_of_node(e) + if not valid: + break + items_defaults.append(e_default) + else: + default = "[" + ", ".join(items_defaults) + "]" + return default, True + elif isinstance(rvalue, SetExpr): + items_defaults = [] + for e in rvalue.items: + e_default, valid = self.get_str_default_of_node(e) + if not valid: + break + items_defaults.append(e_default) + else: + if items_defaults: + default = "{" + ", ".join(items_defaults) + "}" + return default, True + elif isinstance(rvalue, DictExpr): + items_defaults = [] + for k, v in rvalue.items: + if k is None: + break + k_default, k_valid = self.get_str_default_of_node(k) + v_default, v_valid = self.get_str_default_of_node(v) + if not (k_valid and v_valid): + break + items_defaults.append(f"{k_default}: {v_default}") + else: + default = "{" + ", ".join(items_defaults) + "}" + return default, True + return "...", False + + def should_reexport(self, name: str, full_module: str, name_is_alias: bool) -> bool: + is_private = self.is_private_name(name, full_module + "." + name) + if ( + not name_is_alias + and name not in self.referenced_names + and (not self._all_ or name in self.IGNORED_DUNDERS) + and not is_private + and full_module not in ("abc", "asyncio") + self.TYPING_MODULE_NAMES + ): + # An imported name that is never referenced in the module is assumed to be + # exported, unless there is an explicit __all__. Note that we need to special + # case 'abc' since some references are deleted during semantic analysis. + return True + return super().should_reexport(name, full_module, name_is_alias) + + +def find_method_names(defs: list[Statement]) -> set[str]: + # TODO: Traverse into nested definitions + result = set() + for defn in defs: + if isinstance(defn, FuncDef): + result.add(defn.name) + elif isinstance(defn, Decorator): + result.add(defn.func.name) + elif isinstance(defn, OverloadedFuncDef): + for item in defn.items: + result.update(find_method_names([item])) + return result + + +class SelfTraverser(mypy.traverser.TraverserVisitor): + def __init__(self) -> None: + self.results: list[tuple[str, Expression, Type | None]] = [] + + def visit_assignment_stmt(self, o: AssignmentStmt) -> None: + lvalue = o.lvalues[0] + if ( + isinstance(lvalue, MemberExpr) + and isinstance(lvalue.expr, NameExpr) + and lvalue.expr.name == "self" + ): + self.results.append((lvalue.name, o.rvalue, o.unanalyzed_type)) + + +def find_self_initializers(fdef: FuncBase) -> list[tuple[str, Expression, Type | None]]: + """Find attribute initializers in a method. + + Return a list of pairs (attribute name, r.h.s. expression). + """ + traverser = SelfTraverser() + fdef.accept(traverser) + return traverser.results + + +def get_qualified_name(o: Expression) -> str: + if isinstance(o, NameExpr): + return o.name + elif isinstance(o, MemberExpr): + return f"{get_qualified_name(o.expr)}.{o.name}" + else: + return ERROR_MARKER + + +def remove_blacklisted_modules(modules: list[StubSource]) -> list[StubSource]: + return [ + module for module in modules if module.path is None or not is_blacklisted_path(module.path) + ] + + +def split_pyc_from_py(modules: list[StubSource]) -> tuple[list[StubSource], list[StubSource]]: + py_modules = [] + pyc_modules = [] + for mod in modules: + if is_pyc_only(mod.path): + pyc_modules.append(mod) + else: + py_modules.append(mod) + return pyc_modules, py_modules + + +def is_blacklisted_path(path: str) -> bool: + return any(substr in (normalize_path_separators(path) + "\n") for substr in BLACKLIST) + + +def normalize_path_separators(path: str) -> str: + return path.replace("\\", "/") if sys.platform == "win32" else path + + +def collect_build_targets( + options: Options, mypy_opts: MypyOptions +) -> tuple[list[StubSource], list[StubSource], list[StubSource]]: + """Collect files for which we need to generate stubs. + + Return list of py modules, pyc modules, and C modules. + """ + if options.packages or options.modules: + if options.no_import: + py_modules = find_module_paths_using_search( + options.modules, options.packages, options.search_path, options.pyversion + ) + c_modules: list[StubSource] = [] + else: + # Using imports is the default, since we can also find C modules. + py_modules, c_modules = find_module_paths_using_imports( + options.modules, options.packages, options.verbose, options.quiet + ) + else: + # Use mypy native source collection for files and directories. + try: + source_list = create_source_list(options.files, mypy_opts) + except InvalidSourceList as e: + raise SystemExit(str(e)) from e + py_modules = [StubSource(m.module, m.path) for m in source_list] + c_modules = [] + + py_modules = remove_blacklisted_modules(py_modules) + pyc_mod, py_mod = split_pyc_from_py(py_modules) + return py_mod, pyc_mod, c_modules + + +def find_module_paths_using_imports( + modules: list[str], packages: list[str], verbose: bool, quiet: bool +) -> tuple[list[StubSource], list[StubSource]]: + """Find path and runtime value of __all__ (if possible) for modules and packages. + + This function uses runtime Python imports to get the information. + """ + with ModuleInspect() as inspect: + py_modules: list[StubSource] = [] + c_modules: list[StubSource] = [] + found = list(walk_packages(inspect, packages, verbose)) + modules = modules + found + modules = [ + mod for mod in modules if not is_non_library_module(mod) + ] # We don't want to run any tests or scripts + for mod in modules: + try: + result = find_module_path_and_all_py3(inspect, mod, verbose) + except CantImport as e: + tb = traceback.format_exc() + if verbose: + sys.stderr.write(tb) + if not quiet: + report_missing(mod, e.message, tb) + continue + if not result: + c_modules.append(StubSource(mod)) + else: + path, runtime_all = result + py_modules.append(StubSource(mod, path, runtime_all)) + return py_modules, c_modules + + +def is_non_library_module(module: str) -> bool: + """Does module look like a test module or a script?""" + if module.endswith( + ( + ".tests", + ".test", + ".testing", + "_tests", + "_test_suite", + "test_util", + "test_utils", + "test_base", + ".__main__", + ".conftest", # Used by pytest + ".setup", # Typically an install script + ) + ): + return True + if module.split(".")[-1].startswith("test_"): + return True + if ( + ".tests." in module + or ".test." in module + or ".testing." in module + or ".SelfTest." in module + ): + return True + return False + + +def translate_module_name(module: str, relative: int) -> tuple[str, int]: + for pkg in VENDOR_PACKAGES: + for alt in "six.moves", "six": + substr = f"{pkg}.{alt}" + if module.endswith("." + substr) or (module == substr and relative): + return alt, 0 + if "." + substr + "." in module: + return alt + "." + module.partition("." + substr + ".")[2], 0 + return module, relative + + +def find_module_paths_using_search( + modules: list[str], packages: list[str], search_path: list[str], pyversion: tuple[int, int] +) -> list[StubSource]: + """Find sources for modules and packages requested. + + This function just looks for source files at the file system level. + This is used if user passes --no-import, and will not find C modules. + Exit if some of the modules or packages can't be found. + """ + result: list[StubSource] = [] + typeshed_path = default_lib_path(mypy.build.default_data_dir(), pyversion, None) + search_paths = SearchPaths((".",) + tuple(search_path), (), (), tuple(typeshed_path)) + cache = FindModuleCache(search_paths, fscache=None, options=None) + for module in modules: + m_result = cache.find_module(module) + if isinstance(m_result, ModuleNotFoundReason): + fail_missing(module, m_result) + module_path = None + else: + module_path = m_result + result.append(StubSource(module, module_path)) + for package in packages: + p_result = cache.find_modules_recursive(package) + if p_result: + fail_missing(package, ModuleNotFoundReason.NOT_FOUND) + sources = [StubSource(m.module, m.path) for m in p_result] + result.extend(sources) + + result = [m for m in result if not is_non_library_module(m.module)] + + return result + + +def mypy_options(stubgen_options: Options) -> MypyOptions: + """Generate mypy options using the flag passed by user.""" + options = MypyOptions() + options.follow_imports = "skip" + options.incremental = False + options.ignore_errors = True + options.semantic_analysis_only = True + options.python_version = stubgen_options.pyversion + options.show_traceback = True + options.transform_source = remove_misplaced_type_comments + options.preserve_asts = True + options.include_docstrings = stubgen_options.include_docstrings + + # Override cache_dir if provided in the environment + environ_cache_dir = os.getenv("MYPY_CACHE_DIR", "") + if environ_cache_dir.strip(): + options.cache_dir = environ_cache_dir + options.cache_dir = os.path.expanduser(options.cache_dir) + + return options + + +def parse_source_file(mod: StubSource, mypy_options: MypyOptions) -> None: + """Parse a source file. + + On success, store AST in the corresponding attribute of the stub source. + If there are syntax errors, print them and exit. + """ + assert mod.path is not None, "Not found module was not skipped" + with open(mod.path, "rb") as f: + data = f.read() + source = mypy.util.decode_python_encoding(data) + errors = Errors(mypy_options) + mod.ast = mypy.parse.parse( + source, fnam=mod.path, module=mod.module, errors=errors, options=mypy_options + ) + mod.ast._fullname = mod.module + if errors.is_blockers(): + # Syntax error! + for m in errors.new_messages(): + sys.stderr.write(f"{m}\n") + sys.exit(1) + + +def generate_asts_for_modules( + py_modules: list[StubSource], parse_only: bool, mypy_options: MypyOptions, verbose: bool +) -> None: + """Use mypy to parse (and optionally analyze) source files.""" + if not py_modules: + return # Nothing to do here, but there may be C modules + if verbose: + print(f"Processing {len(py_modules)} files...") + if parse_only: + for mod in py_modules: + parse_source_file(mod, mypy_options) + return + # Perform full semantic analysis of the source set. + try: + res = build([module.source for module in py_modules], mypy_options) + except CompileError as e: + raise SystemExit(f"Critical error during semantic analysis: {e}") from e + + for mod in py_modules: + mod.ast = res.graph[mod.module].tree + # Use statically inferred __all__ if there is no runtime one. + if mod.runtime_all is None: + mod.runtime_all = res.manager.semantic_analyzer.export_map[mod.module] + + +def generate_stub_for_py_module( + mod: StubSource, + target: str, + *, + parse_only: bool = False, + inspect: bool = False, + include_private: bool = False, + export_less: bool = False, + include_docstrings: bool = False, + doc_dir: str = "", + all_modules: list[str], +) -> None: + """Use analysed (or just parsed) AST to generate type stub for single file. + + If directory for target doesn't exist it will created. Existing stub + will be overwritten. + """ + if inspect: + ngen = InspectionStubGenerator( + module_name=mod.module, + known_modules=all_modules, + _all_=mod.runtime_all, + doc_dir=doc_dir, + include_private=include_private, + export_less=export_less, + include_docstrings=include_docstrings, + ) + ngen.generate_module() + output = ngen.output() + + else: + gen = ASTStubGenerator( + mod.runtime_all, + include_private=include_private, + analyzed=not parse_only, + export_less=export_less, + include_docstrings=include_docstrings, + ) + assert mod.ast is not None, "This function must be used only with analyzed modules" + mod.ast.accept(gen) + output = gen.output() + + # Write output to file. + subdir = os.path.dirname(target) + if subdir and not os.path.isdir(subdir): + os.makedirs(subdir) + with open(target, "w", encoding="utf-8") as file: + file.write(output) + + +def generate_stubs(options: Options) -> None: + """Main entry point for the program.""" + mypy_opts = mypy_options(options) + py_modules, pyc_modules, c_modules = collect_build_targets(options, mypy_opts) + all_modules = py_modules + pyc_modules + c_modules + all_module_names = sorted(m.module for m in all_modules) + # Use parsed sources to generate stubs for Python modules. + generate_asts_for_modules(py_modules, options.parse_only, mypy_opts, options.verbose) + files = [] + for mod in py_modules + pyc_modules: + assert mod.path is not None, "Not found module was not skipped" + target = mod.module.replace(".", "/") + if os.path.basename(mod.path) in ["__init__.py", "__init__.pyc"]: + target += "/__init__.pyi" + else: + target += ".pyi" + target = os.path.join(options.output_dir, target) + files.append(target) + with generate_guarded(mod.module, target, options.ignore_errors, options.verbose): + generate_stub_for_py_module( + mod, + target, + parse_only=options.parse_only, + inspect=options.inspect or mod in pyc_modules, + include_private=options.include_private, + export_less=options.export_less, + include_docstrings=options.include_docstrings, + doc_dir=options.doc_dir, + all_modules=all_module_names, + ) + + # Separately analyse C modules using different logic. + for mod in c_modules: + if any(py_mod.module.startswith(mod.module + ".") for py_mod in all_modules): + target = mod.module.replace(".", "/") + "/__init__.pyi" + else: + target = mod.module.replace(".", "/") + ".pyi" + target = os.path.join(options.output_dir, target) + files.append(target) + with generate_guarded(mod.module, target, options.ignore_errors, options.verbose): + generate_stub_for_c_module( + mod.module, + target, + known_modules=all_module_names, + doc_dir=options.doc_dir, + include_private=options.include_private, + export_less=options.export_less, + include_docstrings=options.include_docstrings, + ) + num_modules = len(all_modules) + if not options.quiet and num_modules > 0: + print("Processed %d modules" % num_modules) + if len(files) == 1: + print(f"Generated {files[0]}") + else: + print(f"Generated files under {common_dir_prefix(files)}" + os.sep) + + +HEADER = """%(prog)s [-h] [more options, see -h] + [-m MODULE] [-p PACKAGE] [files ...]""" + +DESCRIPTION = """ +Generate draft stubs for modules. + +Stubs are generated in directory ./out, to avoid overriding files with +manual changes. This directory is assumed to exist. +""" + + +def parse_options(args: list[str]) -> Options: + parser = argparse.ArgumentParser( + prog="stubgen", usage=HEADER, description=DESCRIPTION, fromfile_prefix_chars="@" + ) + + parser.add_argument( + "--ignore-errors", + action="store_true", + help="ignore errors when trying to generate stubs for modules", + ) + parser.add_argument( + "--no-import", + action="store_true", + help="don't import the modules, just parse and analyze them " + "(doesn't work with C extension modules and might not " + "respect __all__)", + ) + parser.add_argument( + "--no-analysis", + "--parse-only", + dest="parse_only", + action="store_true", + help="don't perform semantic analysis of sources, just parse them " + "(only applies to Python modules, might affect quality of stubs. " + "Not compatible with --inspect-mode)", + ) + parser.add_argument( + "--inspect-mode", + dest="inspect", + action="store_true", + help="import and inspect modules instead of parsing source code." + "This is the default behavior for c modules and pyc-only packages, but " + "it is also useful for pure python modules with dynamically generated members.", + ) + parser.add_argument( + "--include-private", + action="store_true", + help="generate stubs for objects and members considered private " + "(single leading underscore and no trailing underscores)", + ) + parser.add_argument( + "--export-less", + action="store_true", + help="don't implicitly export all names imported from other modules in the same package", + ) + parser.add_argument( + "--include-docstrings", + action="store_true", + help="include existing docstrings with the stubs", + ) + parser.add_argument("-v", "--verbose", action="store_true", help="show more verbose messages") + parser.add_argument("-q", "--quiet", action="store_true", help="show fewer messages") + parser.add_argument( + "--doc-dir", + metavar="PATH", + default="", + help="use .rst documentation in PATH (this may result in " + "better stubs in some cases; consider setting this to " + "DIR/Python-X.Y.Z/Doc/library)", + ) + parser.add_argument( + "--search-path", + metavar="PATH", + default="", + help="specify module search directories, separated by ':' " + "(currently only used if --no-import is given)", + ) + parser.add_argument( + "-o", + "--output", + metavar="PATH", + dest="output_dir", + default="out", + help="change the output directory [default: %(default)s]", + ) + parser.add_argument( + "-m", + "--module", + action="append", + metavar="MODULE", + dest="modules", + default=[], + help="generate stub for module; can repeat for more modules", + ) + parser.add_argument( + "-p", + "--package", + action="append", + metavar="PACKAGE", + dest="packages", + default=[], + help="generate stubs for package recursively; can be repeated", + ) + parser.add_argument( + metavar="files", + nargs="*", + dest="files", + help="generate stubs for given files or directories", + ) + parser.add_argument( + "--version", action="version", version="%(prog)s " + mypy.version.__version__ + ) + + ns = parser.parse_args(args) + + pyversion = sys.version_info[:2] + ns.interpreter = sys.executable + + if ns.modules + ns.packages and ns.files: + parser.error("May only specify one of: modules/packages or files.") + if ns.quiet and ns.verbose: + parser.error("Cannot specify both quiet and verbose messages") + if ns.inspect and ns.parse_only: + parser.error("Cannot specify both --parse-only/--no-analysis and --inspect-mode") + + # Create the output folder if it doesn't already exist. + os.makedirs(ns.output_dir, exist_ok=True) + + return Options( + pyversion=pyversion, + no_import=ns.no_import, + inspect=ns.inspect, + doc_dir=ns.doc_dir, + search_path=ns.search_path.split(":"), + interpreter=ns.interpreter, + ignore_errors=ns.ignore_errors, + parse_only=ns.parse_only, + include_private=ns.include_private, + output_dir=ns.output_dir, + modules=ns.modules, + packages=ns.packages, + files=ns.files, + verbose=ns.verbose, + quiet=ns.quiet, + export_less=ns.export_less, + include_docstrings=ns.include_docstrings, + ) + + +def main(args: list[str] | None = None) -> None: + mypy.util.check_python_version("stubgen") + # Make sure that the current directory is in sys.path so that + # stubgen can be run on packages in the current directory. + if not ("" in sys.path or "." in sys.path): + sys.path.insert(0, "") + + options = parse_options(sys.argv[1:] if args is None else args) + generate_stubs(options) + + +if __name__ == "__main__": + main() diff --git a/.venv/lib/python3.12/site-packages/mypy/stubgenc.py b/.venv/lib/python3.12/site-packages/mypy/stubgenc.py new file mode 100644 index 0000000..e0e0639 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/stubgenc.py @@ -0,0 +1,1046 @@ +#!/usr/bin/env python3 +"""Stub generator for C modules. + +The public interface is via the mypy.stubgen module. +""" + +from __future__ import annotations + +import enum +import glob +import importlib +import inspect +import keyword +import os.path +from collections.abc import Mapping +from types import FunctionType, ModuleType +from typing import Any, Callable + +from mypy.fastparse import parse_type_comment +from mypy.moduleinspect import is_c_module +from mypy.stubdoc import ( + ArgSig, + FunctionSig, + Sig, + find_unique_signatures, + infer_arg_sig_from_anon_docstring, + infer_prop_type_from_docstring, + infer_ret_type_sig_from_anon_docstring, + infer_ret_type_sig_from_docstring, + infer_sig_from_docstring, + parse_all_signatures, +) +from mypy.stubutil import ( + BaseStubGenerator, + ClassInfo, + FunctionContext, + SignatureGenerator, + infer_method_arg_types, + infer_method_ret_type, +) +from mypy.util import quote_docstring + + +class ExternalSignatureGenerator(SignatureGenerator): + def __init__( + self, func_sigs: dict[str, str] | None = None, class_sigs: dict[str, str] | None = None + ) -> None: + """ + Takes a mapping of function/method names to signatures and class name to + class signatures (usually corresponds to __init__). + """ + self.func_sigs = func_sigs or {} + self.class_sigs = class_sigs or {} + + @classmethod + def from_doc_dir(cls, doc_dir: str) -> ExternalSignatureGenerator: + """Instantiate from a directory of .rst files.""" + all_sigs: list[Sig] = [] + all_class_sigs: list[Sig] = [] + for path in glob.glob(f"{doc_dir}/*.rst"): + with open(path) as f: + loc_sigs, loc_class_sigs = parse_all_signatures(f.readlines()) + all_sigs += loc_sigs + all_class_sigs += loc_class_sigs + sigs = dict(find_unique_signatures(all_sigs)) + class_sigs = dict(find_unique_signatures(all_class_sigs)) + return ExternalSignatureGenerator(sigs, class_sigs) + + def get_function_sig( + self, default_sig: FunctionSig, ctx: FunctionContext + ) -> list[FunctionSig] | None: + # method: + if ( + ctx.class_info + and ctx.name in ("__new__", "__init__") + and ctx.name not in self.func_sigs + and ctx.class_info.name in self.class_sigs + ): + return [ + FunctionSig( + name=ctx.name, + args=infer_arg_sig_from_anon_docstring(self.class_sigs[ctx.class_info.name]), + ret_type=infer_method_ret_type(ctx.name), + ) + ] + + # function: + if ctx.name not in self.func_sigs: + return None + + inferred = [ + FunctionSig( + name=ctx.name, + args=infer_arg_sig_from_anon_docstring(self.func_sigs[ctx.name]), + ret_type=None, + ) + ] + if ctx.class_info: + return self.remove_self_type(inferred, ctx.class_info.self_var) + else: + return inferred + + def get_property_type(self, default_type: str | None, ctx: FunctionContext) -> str | None: + return None + + +class DocstringSignatureGenerator(SignatureGenerator): + def get_function_sig( + self, default_sig: FunctionSig, ctx: FunctionContext + ) -> list[FunctionSig] | None: + inferred = infer_sig_from_docstring(ctx.docstring, ctx.name) + if inferred: + assert ctx.docstring is not None + if is_pybind11_overloaded_function_docstring(ctx.docstring, ctx.name): + # Remove pybind11 umbrella (*args, **kwargs) for overloaded functions + del inferred[-1] + + if ctx.class_info: + if not inferred and ctx.name == "__init__": + # look for class-level constructor signatures of the form () + inferred = infer_sig_from_docstring(ctx.class_info.docstring, ctx.class_info.name) + if inferred: + inferred = [sig._replace(name="__init__") for sig in inferred] + return self.remove_self_type(inferred, ctx.class_info.self_var) + else: + return inferred + + def get_property_type(self, default_type: str | None, ctx: FunctionContext) -> str | None: + """Infer property type from docstring or docstring signature.""" + if ctx.docstring is not None: + inferred = infer_ret_type_sig_from_anon_docstring(ctx.docstring) + if inferred: + return inferred + inferred = infer_ret_type_sig_from_docstring(ctx.docstring, ctx.name) + if inferred: + return inferred + inferred = infer_prop_type_from_docstring(ctx.docstring) + return inferred + else: + return None + + +def is_pybind11_overloaded_function_docstring(docstring: str, name: str) -> bool: + return docstring.startswith(f"{name}(*args, **kwargs)\nOverloaded function.\n\n") + + +def generate_stub_for_c_module( + module_name: str, + target: str, + known_modules: list[str], + doc_dir: str = "", + *, + include_private: bool = False, + export_less: bool = False, + include_docstrings: bool = False, +) -> None: + """Generate stub for C module. + + Signature generators are called in order until a list of signatures is returned. The order + is: + - signatures inferred from .rst documentation (if given) + - simple runtime introspection (looking for docstrings and attributes + with simple builtin types) + - fallback based special method names or "(*args, **kwargs)" + + If directory for target doesn't exist it will be created. Existing stub + will be overwritten. + """ + subdir = os.path.dirname(target) + if subdir and not os.path.isdir(subdir): + os.makedirs(subdir) + + gen = InspectionStubGenerator( + module_name, + known_modules, + doc_dir, + include_private=include_private, + export_less=export_less, + include_docstrings=include_docstrings, + ) + gen.generate_module() + output = gen.output() + + with open(target, "w", encoding="utf-8") as file: + file.write(output) + + +class CFunctionStub: + """ + Class that mimics a C function in order to provide parseable docstrings. + """ + + def __init__(self, name: str, doc: str, is_abstract: bool = False) -> None: + self.__name__ = name + self.__doc__ = doc + self.__abstractmethod__ = is_abstract + + @classmethod + def _from_sig(cls, sig: FunctionSig, is_abstract: bool = False) -> CFunctionStub: + return CFunctionStub(sig.name, sig.format_sig()[:-4], is_abstract) + + @classmethod + def _from_sigs(cls, sigs: list[FunctionSig], is_abstract: bool = False) -> CFunctionStub: + return CFunctionStub( + sigs[0].name, "\n".join(sig.format_sig()[:-4] for sig in sigs), is_abstract + ) + + def __get__(self) -> None: # noqa: PLE0302 + """ + This exists to make this object look like a method descriptor and thus + return true for CStubGenerator.ismethod() + """ + pass + + +_Missing = enum.Enum("_Missing", "VALUE") + + +class InspectionStubGenerator(BaseStubGenerator): + """Stub generator that does not parse code. + + Generation is performed by inspecting the module's contents, and thus works + for highly dynamic modules, pyc files, and C modules (via the CStubGenerator + subclass). + """ + + def __init__( + self, + module_name: str, + known_modules: list[str], + doc_dir: str = "", + _all_: list[str] | None = None, + include_private: bool = False, + export_less: bool = False, + include_docstrings: bool = False, + module: ModuleType | None = None, + ) -> None: + self.doc_dir = doc_dir + if module is None: + self.module = importlib.import_module(module_name) + else: + self.module = module + self.is_c_module = is_c_module(self.module) + self.known_modules = known_modules + self.resort_members = self.is_c_module + super().__init__(_all_, include_private, export_less, include_docstrings) + self.module_name = module_name + if self.is_c_module: + # Add additional implicit imports. + # C-extensions are given more latitude since they do not import the typing module. + self.known_imports.update( + { + "typing": [ + "Any", + "Callable", + "ClassVar", + "Dict", + "Iterable", + "Iterator", + "List", + "Literal", + "NamedTuple", + "Optional", + "Tuple", + "Union", + ] + } + ) + + def get_default_function_sig(self, func: object, ctx: FunctionContext) -> FunctionSig: + argspec = None + if not self.is_c_module: + # Get the full argument specification of the function + try: + argspec = inspect.getfullargspec(func) + except TypeError: + # some callables cannot be inspected, e.g. functools.partial + pass + if argspec is None: + if ctx.class_info is not None: + # method: + return FunctionSig( + name=ctx.name, + args=infer_c_method_args(ctx.name, ctx.class_info.self_var), + ret_type=infer_method_ret_type(ctx.name), + ) + else: + # function: + return FunctionSig( + name=ctx.name, + args=[ArgSig(name="*args"), ArgSig(name="**kwargs")], + ret_type=None, + ) + + # Extract the function arguments, defaults, and varargs + args = argspec.args + defaults = argspec.defaults + varargs = argspec.varargs + kwargs = argspec.varkw + annotations = argspec.annotations + kwonlyargs = argspec.kwonlyargs + kwonlydefaults = argspec.kwonlydefaults + + def get_annotation(key: str) -> str | None: + if key not in annotations: + return None + argtype = annotations[key] + if argtype is None: + return "None" + if not isinstance(argtype, str): + return self.get_type_fullname(argtype) + return argtype + + arglist: list[ArgSig] = [] + + # Add the arguments to the signature + def add_args( + args: list[str], get_default_value: Callable[[int, str], object | _Missing] + ) -> None: + for i, arg in enumerate(args): + # Check if the argument has a default value + default_value = get_default_value(i, arg) + if default_value is not _Missing.VALUE: + if arg in annotations: + argtype = get_annotation(arg) + else: + argtype = self.get_type_annotation(default_value) + if argtype == "None": + # None is not a useful annotation, but we can infer that the arg + # is optional + incomplete = self.add_name("_typeshed.Incomplete") + argtype = f"{incomplete} | None" + + arglist.append(ArgSig(arg, argtype, default=True)) + else: + arglist.append(ArgSig(arg, get_annotation(arg), default=False)) + + def get_pos_default(i: int, _arg: str) -> Any | _Missing: + if defaults and i >= len(args) - len(defaults): + return defaults[i - (len(args) - len(defaults))] + else: + return _Missing.VALUE + + add_args(args, get_pos_default) + + # Add *args if present + if varargs: + arglist.append(ArgSig(f"*{varargs}", get_annotation(varargs))) + # if we have keyword only args, then we need to add "*" + elif kwonlyargs: + arglist.append(ArgSig("*")) + + def get_kw_default(_i: int, arg: str) -> Any | _Missing: + if kwonlydefaults and arg in kwonlydefaults: + return kwonlydefaults[arg] + else: + return _Missing.VALUE + + add_args(kwonlyargs, get_kw_default) + + # Add **kwargs if present + if kwargs: + arglist.append(ArgSig(f"**{kwargs}", get_annotation(kwargs))) + + # add types for known special methods + if ctx.class_info is not None and all( + arg.type is None and arg.default is False for arg in arglist + ): + new_args = infer_method_arg_types( + ctx.name, ctx.class_info.self_var, [arg.name for arg in arglist if arg.name] + ) + if new_args is not None: + arglist = new_args + + ret_type = get_annotation("return") or infer_method_ret_type(ctx.name) + return FunctionSig(ctx.name, arglist, ret_type) + + def get_sig_generators(self) -> list[SignatureGenerator]: + if not self.is_c_module: + return [] + else: + sig_generators: list[SignatureGenerator] = [DocstringSignatureGenerator()] + if self.doc_dir: + # Collect info from docs (if given). Always check these first. + sig_generators.insert(0, ExternalSignatureGenerator.from_doc_dir(self.doc_dir)) + return sig_generators + + def strip_or_import(self, type_name: str) -> str: + """Strips unnecessary module names from typ. + + If typ represents a type that is inside module or is a type coming from builtins, remove + module declaration from it. Return stripped name of the type. + + Arguments: + typ: name of the type + """ + local_modules = ["builtins", self.module_name] + parsed_type = parse_type_comment(type_name, 0, 0, None)[1] + assert parsed_type is not None, type_name + return self.print_annotation(parsed_type, self.known_modules, local_modules) + + def get_obj_module(self, obj: object) -> str | None: + """Return module name of the object.""" + return getattr(obj, "__module__", None) + + def is_defined_in_module(self, obj: object) -> bool: + """Check if object is considered defined in the current module.""" + module = self.get_obj_module(obj) + return module is None or module == self.module_name + + def generate_module(self) -> None: + all_items = self.get_members(self.module) + if self.resort_members: + all_items = sorted(all_items, key=lambda x: x[0]) + items = [] + for name, obj in all_items: + if inspect.ismodule(obj) and obj.__name__ in self.known_modules: + module_name = obj.__name__ + if module_name.startswith(self.module_name + "."): + # from {.rel_name} import {mod_name} as {name} + pkg_name, mod_name = module_name.rsplit(".", 1) + rel_module = pkg_name[len(self.module_name) :] or "." + self.import_tracker.add_import_from(rel_module, [(mod_name, name)]) + self.import_tracker.reexport(name) + else: + # import {module_name} as {name} + self.import_tracker.add_import(module_name, name) + self.import_tracker.reexport(name) + elif self.is_defined_in_module(obj) and not inspect.ismodule(obj): + # process this below + items.append((name, obj)) + else: + # from {obj_module} import {obj_name} + obj_module_name = self.get_obj_module(obj) + if obj_module_name: + self.import_tracker.add_import_from(obj_module_name, [(name, None)]) + if self.should_reexport(name, obj_module_name, name_is_alias=False): + self.import_tracker.reexport(name) + + self.set_defined_names({name for name, obj in all_items if not inspect.ismodule(obj)}) + + if self.resort_members: + functions: list[str] = [] + types: list[str] = [] + variables: list[str] = [] + else: + output: list[str] = [] + functions = types = variables = output + + for name, obj in items: + if self.is_function(obj): + self.generate_function_stub(name, obj, output=functions) + elif inspect.isclass(obj): + self.generate_class_stub(name, obj, output=types) + else: + self.generate_variable_stub(name, obj, output=variables) + + self._output = [] + + if self.resort_members: + for line in variables: + self._output.append(line + "\n") + for line in types: + if line.startswith("class") and self._output and self._output[-1]: + self._output.append("\n") + self._output.append(line + "\n") + if self._output and functions: + self._output.append("\n") + for line in functions: + self._output.append(line + "\n") + else: + for i, line in enumerate(output): + if ( + self._output + and line.startswith("class") + and ( + not self._output[-1].startswith("class") + or (len(output) > i + 1 and output[i + 1].startswith(" ")) + ) + ) or ( + self._output + and self._output[-1].startswith("def") + and not line.startswith("def") + ): + self._output.append("\n") + self._output.append(line + "\n") + self.check_undefined_names() + + def is_skipped_attribute(self, attr: str) -> bool: + return ( + attr + in ( + "__class__", + "__getattribute__", + "__str__", + "__repr__", + "__doc__", + "__dict__", + "__module__", + "__weakref__", + "__annotations__", + "__firstlineno__", + "__static_attributes__", + "__annotate__", + ) + or attr in self.IGNORED_DUNDERS + or is_pybind_skipped_attribute(attr) # For pickling + or keyword.iskeyword(attr) + ) + + def get_members(self, obj: object) -> list[tuple[str, Any]]: + obj_dict: Mapping[str, Any] = getattr(obj, "__dict__") # noqa: B009 + results = [] + for name in obj_dict: + if self.is_skipped_attribute(name): + continue + # Try to get the value via getattr + try: + value = getattr(obj, name) + except AttributeError: + continue + else: + results.append((name, value)) + return results + + def get_type_annotation(self, obj: object) -> str: + """ + Given an instance, return a string representation of its type that is valid + to use as a type annotation. + """ + if obj is None or obj is type(None): + return "None" + elif inspect.isclass(obj): + return f"type[{self.get_type_fullname(obj)}]" + elif isinstance(obj, FunctionType): + return self.add_name("typing.Callable") + elif isinstance(obj, ModuleType): + return self.add_name("types.ModuleType", require=False) + else: + return self.get_type_fullname(type(obj)) + + def is_function(self, obj: object) -> bool: + if self.is_c_module: + return inspect.isbuiltin(obj) + else: + return inspect.isfunction(obj) + + def is_method(self, class_info: ClassInfo, name: str, obj: object) -> bool: + if self.is_c_module: + return inspect.ismethoddescriptor(obj) or type(obj) in ( + type(str.index), + type(str.__add__), + type(str.__new__), + ) + else: + # this is valid because it is only called on members of a class + return inspect.isfunction(obj) + + def is_classmethod(self, class_info: ClassInfo, name: str, obj: object) -> bool: + if self.is_c_module: + return inspect.isbuiltin(obj) or type(obj).__name__ in ( + "classmethod", + "classmethod_descriptor", + ) + else: + return inspect.ismethod(obj) + + def is_staticmethod(self, class_info: ClassInfo | None, name: str, obj: object) -> bool: + if class_info is None: + return False + elif self.is_c_module: + raw_lookup: Mapping[str, Any] = getattr(class_info.cls, "__dict__") # noqa: B009 + raw_value = raw_lookup.get(name, obj) + return isinstance(raw_value, staticmethod) + else: + return isinstance(inspect.getattr_static(class_info.cls, name), staticmethod) + + @staticmethod + def is_abstract_method(obj: object) -> bool: + return getattr(obj, "__abstractmethod__", False) + + @staticmethod + def is_property(class_info: ClassInfo, name: str, obj: object) -> bool: + return inspect.isdatadescriptor(obj) or hasattr(obj, "fget") + + @staticmethod + def is_property_readonly(prop: Any) -> bool: + return hasattr(prop, "fset") and prop.fset is None + + def is_static_property(self, obj: object) -> bool: + """For c-modules, whether the property behaves like an attribute""" + if self.is_c_module: + # StaticProperty is from boost-python + return type(obj).__name__ in ("pybind11_static_property", "StaticProperty") + else: + return False + + def process_inferred_sigs(self, inferred: list[FunctionSig]) -> None: + for i, sig in enumerate(inferred): + for arg in sig.args: + if arg.type is not None: + arg.type = self.strip_or_import(arg.type) + if sig.ret_type is not None: + inferred[i] = sig._replace(ret_type=self.strip_or_import(sig.ret_type)) + + def generate_function_stub( + self, name: str, obj: object, *, output: list[str], class_info: ClassInfo | None = None + ) -> None: + """Generate stub for a single function or method. + + The result (always a single line) will be appended to 'output'. + If necessary, any required names will be added to 'imports'. + The 'class_name' is used to find signature of __init__ or __new__ in + 'class_sigs'. + """ + docstring: Any = getattr(obj, "__doc__", None) + if not isinstance(docstring, str): + docstring = None + + ctx = FunctionContext( + self.module_name, + name, + docstring=docstring, + is_abstract=self.is_abstract_method(obj), + class_info=class_info, + ) + if self.is_private_name(name, ctx.fullname) or self.is_not_in_all(name): + return + + self.record_name(ctx.name) + default_sig = self.get_default_function_sig(obj, ctx) + inferred = self.get_signatures(default_sig, self.sig_generators, ctx) + self.process_inferred_sigs(inferred) + + decorators = [] + if len(inferred) > 1: + decorators.append("@{}".format(self.add_name("typing.overload"))) + + if ctx.is_abstract: + decorators.append("@{}".format(self.add_name("abc.abstractmethod"))) + + if class_info is not None: + if self.is_staticmethod(class_info, name, obj): + decorators.append("@staticmethod") + else: + for sig in inferred: + if not sig.args or sig.args[0].name not in ("self", "cls"): + sig.args.insert(0, ArgSig(name=class_info.self_var)) + # a sig generator indicates @classmethod by specifying the cls arg. + if inferred[0].args and inferred[0].args[0].name == "cls": + decorators.append("@classmethod") + + docstring = self._indent_docstring(ctx.docstring) if ctx.docstring else None + output.extend(self.format_func_def(inferred, decorators=decorators, docstring=docstring)) + self._fix_iter(ctx, inferred, output) + + def _indent_docstring(self, docstring: str) -> str: + """Fix indentation of docstring extracted from pybind11 or other binding generators.""" + lines = docstring.splitlines(keepends=True) + indent = self._indent + " " + if len(lines) > 1: + if not all(line.startswith(indent) or not line.strip() for line in lines): + # if the docstring is not indented, then indent all but the first line + for i, line in enumerate(lines[1:]): + if line.strip(): + lines[i + 1] = indent + line + # if there's a trailing newline, add a final line to visually indent the quoted docstring + if lines[-1].endswith("\n"): + if len(lines) > 1: + lines.append(indent) + else: + lines[-1] = lines[-1][:-1] + return "".join(lines) + + def _fix_iter( + self, ctx: FunctionContext, inferred: list[FunctionSig], output: list[str] + ) -> None: + """Ensure that objects which implement old-style iteration via __getitem__ + are considered iterable. + """ + if ( + ctx.class_info + and ctx.class_info.cls is not None + and ctx.name == "__getitem__" + and "__iter__" not in ctx.class_info.cls.__dict__ + ): + item_type: str | None = None + for sig in inferred: + if sig.args and sig.args[-1].type == "int": + item_type = sig.ret_type + break + if item_type is None: + return + obj = CFunctionStub( + "__iter__", f"def __iter__(self) -> typing.Iterator[{item_type}]\n" + ) + self.generate_function_stub("__iter__", obj, output=output, class_info=ctx.class_info) + + def generate_property_stub( + self, + name: str, + raw_obj: object, + obj: object, + static_properties: list[str], + rw_properties: list[str], + ro_properties: list[str], + class_info: ClassInfo | None = None, + ) -> None: + """Generate property stub using introspection of 'obj'. + + Try to infer type from docstring, append resulting lines to 'output'. + + raw_obj : object before evaluation of descriptor (if any) + obj : object after evaluation of descriptor + """ + + docstring = getattr(raw_obj, "__doc__", None) + fget = getattr(raw_obj, "fget", None) + if fget: + alt_docstr = getattr(fget, "__doc__", None) + if alt_docstr and docstring: + docstring += "\n" + alt_docstr + elif alt_docstr: + docstring = alt_docstr + + ctx = FunctionContext( + self.module_name, name, docstring=docstring, is_abstract=False, class_info=class_info + ) + + if self.is_private_name(name, ctx.fullname) or self.is_not_in_all(name): + return + + self.record_name(ctx.name) + static = self.is_static_property(raw_obj) + readonly = self.is_property_readonly(raw_obj) + if static: + ret_type: str | None = self.strip_or_import(self.get_type_annotation(obj)) + else: + default_sig = self.get_default_function_sig(raw_obj, ctx) + ret_type = default_sig.ret_type + + inferred_type = self.get_property_type(ret_type, self.sig_generators, ctx) + if inferred_type is not None: + inferred_type = self.strip_or_import(inferred_type) + + if static: + classvar = self.add_name("typing.ClassVar") + trailing_comment = " # read-only" if readonly else "" + if inferred_type is None: + inferred_type = self.add_name("_typeshed.Incomplete") + + static_properties.append( + f"{self._indent}{name}: {classvar}[{inferred_type}] = ...{trailing_comment}" + ) + else: # regular property + if readonly: + docstring = self._indent_docstring(ctx.docstring) if ctx.docstring else None + ro_properties.append(f"{self._indent}@property") + sig = FunctionSig(name, [ArgSig("self")], inferred_type, docstring=docstring) + ro_properties.append( + sig.format_sig( + indent=self._indent, include_docstrings=self._include_docstrings + ) + ) + else: + if inferred_type is None: + inferred_type = self.add_name("_typeshed.Incomplete") + + rw_properties.append(f"{self._indent}{name}: {inferred_type}") + + def get_type_fullname(self, typ: type) -> str: + """Given a type, return a string representation""" + if typ is Any: + return "Any" + typename = getattr(typ, "__qualname__", typ.__name__) + module_name = self.get_obj_module(typ) + if module_name is None: + # This should not normally happen, but some types may resist our + # introspection attempts too hard. See + # https://github.com/python/mypy/issues/19031 + return "_typeshed.Incomplete" + if module_name != "builtins": + typename = f"{module_name}.{typename}" + return typename + + def get_base_types(self, obj: type) -> list[str]: + all_bases = type.mro(obj) + if all_bases[-1] is object: + # TODO: Is this always object? + del all_bases[-1] + # remove pybind11_object. All classes generated by pybind11 have pybind11_object in their MRO, + # which only overrides a few functions in object type + if all_bases and all_bases[-1].__name__ == "pybind11_object": + del all_bases[-1] + # remove the class itself + all_bases = all_bases[1:] + # Remove base classes of other bases as redundant. + bases: list[type] = [] + for base in all_bases: + if not any(issubclass(b, base) for b in bases): + bases.append(base) + return [self.strip_or_import(self.get_type_fullname(base)) for base in bases] + + def generate_class_stub( + self, class_name: str, cls: type, output: list[str], parent_class: ClassInfo | None = None + ) -> None: + """Generate stub for a single class using runtime introspection. + + The result lines will be appended to 'output'. If necessary, any + required names will be added to 'imports'. + """ + raw_lookup: Mapping[str, Any] = getattr(cls, "__dict__") # noqa: B009 + items = self.get_members(cls) + if self.resort_members: + items = sorted(items, key=lambda x: method_name_sort_key(x[0])) + names = {x[0] for x in items} + methods: list[str] = [] + types: list[str] = [] + static_properties: list[str] = [] + rw_properties: list[str] = [] + ro_properties: list[str] = [] + attrs: list[tuple[str, Any]] = [] + + self.record_name(class_name) + self.indent() + + class_info = ClassInfo( + class_name, "", getattr(cls, "__doc__", None), cls, parent=parent_class + ) + + for attr, value in items: + # use unevaluated descriptors when dealing with property inspection + raw_value = raw_lookup.get(attr, value) + if self.is_method(class_info, attr, value) or self.is_classmethod( + class_info, attr, value + ): + if attr == "__new__": + # TODO: We should support __new__. + if "__init__" in names: + # Avoid duplicate functions if both are present. + # But is there any case where .__new__() has a + # better signature than __init__() ? + continue + attr = "__init__" + # FIXME: make this nicer + if self.is_staticmethod(class_info, attr, value): + class_info.self_var = "" + elif self.is_classmethod(class_info, attr, value): + class_info.self_var = "cls" + else: + class_info.self_var = "self" + self.generate_function_stub(attr, value, output=methods, class_info=class_info) + elif self.is_property(class_info, attr, raw_value): + self.generate_property_stub( + attr, + raw_value, + value, + static_properties, + rw_properties, + ro_properties, + class_info, + ) + elif inspect.isclass(value) and self.is_defined_in_module(value): + self.generate_class_stub(attr, value, types, parent_class=class_info) + else: + attrs.append((attr, value)) + + for attr, value in attrs: + if attr == "__hash__" and value is None: + # special case for __hash__ + continue + prop_type_name = self.strip_or_import(self.get_type_annotation(value)) + classvar = self.add_name("typing.ClassVar") + static_properties.append(f"{self._indent}{attr}: {classvar}[{prop_type_name}] = ...") + + self.dedent() + + bases = self.get_base_types(cls) + if bases: + bases_str = "(%s)" % ", ".join(bases) + else: + bases_str = "" + + if class_info.docstring and self._include_docstrings: + doc = quote_docstring(self._indent_docstring(class_info.docstring)) + doc = f" {self._indent}{doc}" + docstring = doc.splitlines(keepends=False) + else: + docstring = [] + + if docstring or types or static_properties or rw_properties or methods or ro_properties: + output.append(f"{self._indent}class {class_name}{bases_str}:") + output.extend(docstring) + for line in types: + if ( + output + and output[-1] + and not output[-1].strip().startswith("class") + and line.strip().startswith("class") + ): + output.append("") + output.append(line) + output.extend(static_properties) + output.extend(rw_properties) + output.extend(methods) + output.extend(ro_properties) + else: + output.append(f"{self._indent}class {class_name}{bases_str}: ...") + + def generate_variable_stub(self, name: str, obj: object, output: list[str]) -> None: + """Generate stub for a single variable using runtime introspection. + + The result lines will be appended to 'output'. If necessary, any + required names will be added to 'imports'. + """ + if self.is_private_name(name, f"{self.module_name}.{name}") or self.is_not_in_all(name): + return + self.record_name(name) + type_str = self.strip_or_import(self.get_type_annotation(obj)) + output.append(f"{name}: {type_str}") + + +def method_name_sort_key(name: str) -> tuple[int, str]: + """Sort methods in classes in a typical order. + + I.e.: constructor, normal methods, special methods. + """ + if name in ("__new__", "__init__"): + return 0, name + if name.startswith("__") and name.endswith("__"): + return 2, name + return 1, name + + +def is_pybind_skipped_attribute(attr: str) -> bool: + return attr.startswith("__pybind11_module_local_") + + +def infer_c_method_args( + name: str, self_var: str = "self", arg_names: list[str] | None = None +) -> list[ArgSig]: + args: list[ArgSig] | None = None + if name.startswith("__") and name.endswith("__"): + name = name[2:-2] + if name in ( + "hash", + "iter", + "next", + "sizeof", + "copy", + "deepcopy", + "reduce", + "getinitargs", + "int", + "float", + "trunc", + "complex", + "bool", + "abs", + "bytes", + "dir", + "len", + "reversed", + "round", + "index", + "enter", + ): + args = [] + elif name == "getitem": + args = [ArgSig(name="index")] + elif name == "setitem": + args = [ArgSig(name="index"), ArgSig(name="object")] + elif name in ("delattr", "getattr"): + args = [ArgSig(name="name")] + elif name == "setattr": + args = [ArgSig(name="name"), ArgSig(name="value")] + elif name == "getstate": + args = [] + elif name == "setstate": + args = [ArgSig(name="state")] + elif name in ("eq", "ne", "lt", "le", "gt", "ge"): + args = [ArgSig(name="other", type="object")] + elif name in ( + "add", + "radd", + "sub", + "rsub", + "mul", + "rmul", + "mod", + "rmod", + "floordiv", + "rfloordiv", + "truediv", + "rtruediv", + "divmod", + "rdivmod", + "pow", + "rpow", + "xor", + "rxor", + "or", + "ror", + "and", + "rand", + "lshift", + "rlshift", + "rshift", + "rrshift", + "contains", + "delitem", + "iadd", + "iand", + "ifloordiv", + "ilshift", + "imod", + "imul", + "ior", + "ipow", + "irshift", + "isub", + "itruediv", + "ixor", + ): + args = [ArgSig(name="other")] + elif name in ("neg", "pos", "invert"): + args = [] + elif name == "get": + args = [ArgSig(name="instance"), ArgSig(name="owner")] + elif name == "set": + args = [ArgSig(name="instance"), ArgSig(name="value")] + elif name == "reduce_ex": + args = [ArgSig(name="protocol")] + elif name == "exit": + args = [ + ArgSig(name="type", type="type[BaseException] | None"), + ArgSig(name="value", type="BaseException | None"), + ArgSig(name="traceback", type="types.TracebackType | None"), + ] + if args is None: + args = infer_method_arg_types(name, self_var, arg_names) + else: + args = [ArgSig(name=self_var)] + args + if args is None: + args = [ArgSig(name="*args"), ArgSig(name="**kwargs")] + return args diff --git a/.venv/lib/python3.12/site-packages/mypy/stubinfo.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/stubinfo.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..0143395 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/stubinfo.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/stubinfo.py b/.venv/lib/python3.12/site-packages/mypy/stubinfo.py new file mode 100644 index 0000000..42e53ba --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/stubinfo.py @@ -0,0 +1,318 @@ +from __future__ import annotations + + +def is_module_from_legacy_bundled_package(module: str) -> bool: + top_level = module.split(".", 1)[0] + return top_level in legacy_bundled_packages + + +def stub_distribution_name(module: str) -> str | None: + top_level = module.split(".", 1)[0] + + dist = legacy_bundled_packages.get(top_level) + if dist: + return dist + dist = non_bundled_packages_flat.get(top_level) + if dist: + return dist + + if top_level in non_bundled_packages_namespace: + namespace = non_bundled_packages_namespace[top_level] + components = module.split(".") + for i in range(len(components), 0, -1): + module = ".".join(components[:i]) + dist = namespace.get(module) + if dist: + return dist + + return None + + +# Stubs for these third-party packages used to be shipped with mypy. +# +# Map package name to PyPI stub distribution name. +legacy_bundled_packages: dict[str, str] = { + "aiofiles": "types-aiofiles", + "bleach": "types-bleach", + "cachetools": "types-cachetools", + "click_spinner": "types-click-spinner", + "croniter": "types-croniter", + "dateparser": "types-dateparser", + "dateutil": "types-python-dateutil", + "decorator": "types-decorator", + "deprecated": "types-Deprecated", + "docutils": "types-docutils", + "first": "types-first", + "markdown": "types-Markdown", + "mock": "types-mock", + "paramiko": "types-paramiko", + "polib": "types-polib", + "pycurl": "types-pycurl", + "pymysql": "types-PyMySQL", + "pyrfc3339": "types-pyRFC3339", + "pytz": "types-pytz", + "requests": "types-requests", + "retry": "types-retry", + "simplejson": "types-simplejson", + "singledispatch": "types-singledispatch", + "six": "types-six", + "tabulate": "types-tabulate", + "toml": "types-toml", + "ujson": "types-ujson", + "waitress": "types-waitress", + "yaml": "types-PyYAML", +} + +# Map package name to PyPI stub distribution name from typeshed. +# Stubs for these packages were never bundled with mypy. Don't +# include packages that have a release that includes PEP 561 type +# information. +# +# Note that these packages are omitted for now: +# pika: typeshed's stubs are on PyPI as types-pika-ts. +# types-pika already exists on PyPI, and is more complete in many ways, +# but is a non-typeshed stubs package. +non_bundled_packages_flat: dict[str, str] = { + "_cffi_backend": "types-cffi", + "_jsonnet": "types-jsonnet", + "_win32typing": "types-pywin32", + "antlr4": "types-antlr4-python3-runtime", + "assertpy": "types-assertpy", + "auth0": "types-auth0-python", + "authlib": "types-Authlib", + "aws_xray_sdk": "types-aws-xray-sdk", + "binaryornot": "types-binaryornot", + "boltons": "types-boltons", + "braintree": "types-braintree", + "bugbear": "types-flake8-bugbear", + "capturer": "types-capturer", + "cffi": "types-cffi", + "channels": "types-channels", + "chevron": "types-chevron", + "click_default_group": "types-click-default-group", + "click_log": "types-click-log", + "click_shell": "types-click-shell", + "click_web": "types-click-web", + "colorama": "types-colorama", + "commctrl": "types-pywin32", + "consolemenu": "types-console-menu", + "convertdate": "types-convertdate", + "cronlog": "types-python-crontab", + "crontab": "types-python-crontab", + "crontabs": "types-python-crontab", + "dateparser_data": "types-dateparser", + "dde": "types-pywin32", + "defusedxml": "types-defusedxml", + "dirhash": "types-dirhash", + "django_filters": "types-django-filter", + "docker": "types-docker", + "dockerfile_parse": "types-dockerfile-parse", + "editdistance": "types-editdistance", + "entrypoints": "types-entrypoints", + "exifread": "types-ExifRead", + "fanstatic": "types-fanstatic", + "farmhash": "types-pyfarmhash", + "flake8_builtins": "types-flake8-builtins", + "flake8_docstrings": "types-flake8-docstrings", + "flake8_rst_docstrings": "types-flake8-rst-docstrings", + "flake8_simplify": "types-flake8-simplify", + "flake8_typing_imports": "types-flake8-typing-imports", + "flake8": "types-flake8", + "flask_cors": "types-Flask-Cors", + "flask_migrate": "types-Flask-Migrate", + "flask_socketio": "types-Flask-SocketIO", + "fpdf": "types-fpdf2", + "gdb": "types-gdb", + "geopandas": "types-geopandas", + "gevent": "types-gevent", + "greenlet": "types-greenlet", + "grpc_channelz": "types-grpcio-channelz", + "grpc_health": "types-grpcio-health-checking", + "grpc_reflection": "types-grpcio-reflection", + "grpc_status": "types-grpcio-status", + "grpc": "types-grpcio", + "hdbcli": "types-hdbcli", + "hnswlib": "types-hnswlib", + "html5lib": "types-html5lib", + "httplib2": "types-httplib2", + "hvac": "types-hvac", + "ibm_db": "types-ibm-db", + "icalendar": "types-icalendar", + "import_export": "types-django-import-export", + "inifile": "types-inifile", + "isapi": "types-pywin32", + "jack": "types-JACK-Client", + "jenkins": "types-python-jenkins", + "Jetson": "types-Jetson.GPIO", + "jks": "types-pyjks", + "jmespath": "types-jmespath", + "jose": "types-python-jose", + "jsonschema": "types-jsonschema", + "jwcrypto": "types-jwcrypto", + "keyboard": "types-keyboard", + "ldap3": "types-ldap3", + "lunardate": "types-lunardate", + "lupa": "types-lupa", + "lzstring": "types-lzstring", + "m3u8": "types-m3u8", + "management": "types-django-import-export", + "mmapfile": "types-pywin32", + "mmsystem": "types-pywin32", + "mypy_extensions": "types-mypy-extensions", + "MySQLdb": "types-mysqlclient", + "nanoid": "types-nanoid", + "nanoleafapi": "types-nanoleafapi", + "netaddr": "types-netaddr", + "netifaces": "types-netifaces", + "networkx": "types-networkx", + "nmap": "types-python-nmap", + "ntsecuritycon": "types-pywin32", + "oauthlib": "types-oauthlib", + "objgraph": "types-objgraph", + "odbc": "types-pywin32", + "olefile": "types-olefile", + "openpyxl": "types-openpyxl", + "opentracing": "types-opentracing", + "parsimonious": "types-parsimonious", + "passlib": "types-passlib", + "passpy": "types-passpy", + "peewee": "types-peewee", + "pep8ext_naming": "types-pep8-naming", + "perfmon": "types-pywin32", + "pexpect": "types-pexpect", + "playhouse": "types-peewee", + "pony": "types-pony", + "portpicker": "types-portpicker", + "psutil": "types-psutil", + "psycopg2": "types-psycopg2", + "pyasn1": "types-pyasn1", + "pyaudio": "types-pyaudio", + "pyautogui": "types-PyAutoGUI", + "pycocotools": "types-pycocotools", + "pyflakes": "types-pyflakes", + "pygments": "types-Pygments", + "pyi_splash": "types-pyinstaller", + "PyInstaller": "types-pyinstaller", + "pyluach": "types-pyluach", + "pymeeus": "types-PyMeeus", + "pynput": "types-pynput", + "pyperclip": "types-pyperclip", + "pyscreeze": "types-PyScreeze", + "pysftp": "types-pysftp", + "pytest_lazyfixture": "types-pytest-lazy-fixture", + "python_http_client": "types-python-http-client", + "pythoncom": "types-pywin32", + "pythonwin": "types-pywin32", + "pywintypes": "types-pywin32", + "qrbill": "types-qrbill", + "qrcode": "types-qrcode", + "ratelimit": "types-ratelimit", + "regex": "types-regex", + "regutil": "types-pywin32", + "reportlab": "types-reportlab", + "requests_oauthlib": "types-requests-oauthlib", + "rfc3339_validator": "types-rfc3339-validator", + "RPi": "types-RPi.GPIO", + "s2clientprotocol": "types-s2clientprotocol", + "sass": "types-libsass", + "sassutils": "types-libsass", + "seaborn": "types-seaborn", + "send2trash": "types-Send2Trash", + "serial": "types-pyserial", + "servicemanager": "types-pywin32", + "setuptools": "types-setuptools", + "shapely": "types-shapely", + "slumber": "types-slumber", + "socks": "types-PySocks", + "sockshandler": "types-PySocks", + "sspicon": "types-pywin32", + "str2bool": "types-str2bool", + "tensorflow": "types-tensorflow", + "tgcrypto": "types-TgCrypto", + "timer": "types-pywin32", + "toposort": "types-toposort", + "tqdm": "types-tqdm", + "translationstring": "types-translationstring", + "ttkthemes": "types-ttkthemes", + "unidiff": "types-unidiff", + "untangle": "types-untangle", + "usersettings": "types-usersettings", + "uwsgi": "types-uWSGI", + "uwsgidecorators": "types-uWSGI", + "vobject": "types-vobject", + "watchpoints": "types-watchpoints", + "webob": "types-WebOb", + "whatthepatch": "types-whatthepatch", + "win2kras": "types-pywin32", + "win32": "types-pywin32", + "win32api": "types-pywin32", + "win32clipboard": "types-pywin32", + "win32com": "types-pywin32", + "win32comext": "types-pywin32", + "win32con": "types-pywin32", + "win32console": "types-pywin32", + "win32cred": "types-pywin32", + "win32crypt": "types-pywin32", + "win32cryptcon": "types-pywin32", + "win32event": "types-pywin32", + "win32evtlog": "types-pywin32", + "win32evtlogutil": "types-pywin32", + "win32file": "types-pywin32", + "win32gui_struct": "types-pywin32", + "win32gui": "types-pywin32", + "win32help": "types-pywin32", + "win32inet": "types-pywin32", + "win32inetcon": "types-pywin32", + "win32job": "types-pywin32", + "win32lz": "types-pywin32", + "win32net": "types-pywin32", + "win32netcon": "types-pywin32", + "win32pdh": "types-pywin32", + "win32pdhquery": "types-pywin32", + "win32pipe": "types-pywin32", + "win32print": "types-pywin32", + "win32process": "types-pywin32", + "win32profile": "types-pywin32", + "win32ras": "types-pywin32", + "win32security": "types-pywin32", + "win32service": "types-pywin32", + "win32serviceutil": "types-pywin32", + "win32timezone": "types-pywin32", + "win32trace": "types-pywin32", + "win32transaction": "types-pywin32", + "win32ts": "types-pywin32", + "win32ui": "types-pywin32", + "win32uiole": "types-pywin32", + "win32verstamp": "types-pywin32", + "win32wnet": "types-pywin32", + "winerror": "types-pywin32", + "winioctlcon": "types-pywin32", + "winnt": "types-pywin32", + "winperf": "types-pywin32", + "winxpgui": "types-pywin32", + "winxptheme": "types-pywin32", + "workalendar": "types-workalendar", + "wtforms": "types-WTForms", + "wurlitzer": "types-wurlitzer", + "xdg": "types-pyxdg", + "xdgenvpy": "types-xdgenvpy", + "Xlib": "types-python-xlib", + "xlrd": "types-xlrd", + "xmltodict": "types-xmltodict", + "yt_dlp": "types-yt-dlp", + "zstd": "types-zstd", + "zxcvbn": "types-zxcvbn", + # Stub packages that are not from typeshed + # Since these can be installed automatically via --install-types, we have a high trust bar + # for additions here + "pandas": "pandas-stubs", # https://github.com/pandas-dev/pandas-stubs + "lxml": "lxml-stubs", # https://github.com/lxml/lxml-stubs + "scipy": "scipy-stubs", # https://github.com/scipy/scipy-stubs +} + + +non_bundled_packages_namespace: dict[str, dict[str, str]] = { + "backports": {"backports.ssl_match_hostname": "types-backports.ssl_match_hostname"}, + "google": {"google.cloud.ndb": "types-google-cloud-ndb", "google.protobuf": "types-protobuf"}, + "paho": {"paho.mqtt": "types-paho-mqtt"}, +} diff --git a/.venv/lib/python3.12/site-packages/mypy/stubtest.py b/.venv/lib/python3.12/site-packages/mypy/stubtest.py new file mode 100644 index 0000000..ada56a2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/stubtest.py @@ -0,0 +1,2464 @@ +"""Tests for stubs. + +Verify that various things in stubs are consistent with how things behave at runtime. + +""" + +from __future__ import annotations + +import argparse +import collections.abc +import copy +import enum +import functools +import importlib +import importlib.machinery +import inspect +import os +import pkgutil +import re +import struct +import symtable +import sys +import traceback +import types +import typing +import typing_extensions +import warnings +from collections import defaultdict +from collections.abc import Iterator, Set as AbstractSet +from contextlib import redirect_stderr, redirect_stdout +from functools import singledispatch +from pathlib import Path +from typing import Any, Final, Generic, TypeVar, Union +from typing_extensions import get_origin, is_typeddict + +import mypy.build +import mypy.checkexpr +import mypy.checkmember +import mypy.erasetype +import mypy.modulefinder +import mypy.nodes +import mypy.state +import mypy.types +import mypy.version +from mypy import nodes +from mypy.config_parser import parse_config_file +from mypy.evalexpr import UNKNOWN, evaluate_expression +from mypy.options import Options +from mypy.util import FancyFormatter, bytes_to_human_readable_repr, is_dunder, plural_s + + +class Missing: + """Marker object for things that are missing (from a stub or the runtime).""" + + def __repr__(self) -> str: + return "MISSING" + + +MISSING: Final = Missing() + +T = TypeVar("T") +MaybeMissing: typing_extensions.TypeAlias = Union[T, Missing] + + +class Unrepresentable: + """Marker object for unrepresentable parameter defaults.""" + + def __repr__(self) -> str: + return "" + + +UNREPRESENTABLE: Final = Unrepresentable() + + +_formatter: Final = FancyFormatter(sys.stdout, sys.stderr, False) + + +def _style(message: str, **kwargs: Any) -> str: + """Wrapper around mypy.util for fancy formatting.""" + kwargs.setdefault("color", "none") + return _formatter.style(message, **kwargs) + + +def _truncate(message: str, length: int) -> str: + if len(message) > length: + return message[: length - 3] + "..." + return message + + +class StubtestFailure(Exception): + pass + + +class Error: + def __init__( + self, + object_path: list[str], + message: str, + stub_object: MaybeMissing[nodes.Node], + runtime_object: MaybeMissing[Any], + *, + stub_desc: str | None = None, + runtime_desc: str | None = None, + ) -> None: + """Represents an error found by stubtest. + + :param object_path: Location of the object with the error, + e.g. ``["module", "Class", "method"]`` + :param message: Error message + :param stub_object: The mypy node representing the stub + :param runtime_object: Actual object obtained from the runtime + :param stub_desc: Specialised description for the stub object, should you wish + :param runtime_desc: Specialised description for the runtime object, should you wish + + """ + self.object_path = object_path + self.object_desc = ".".join(object_path) + self.message = message + self.stub_object = stub_object + self.runtime_object = runtime_object + self.stub_desc = stub_desc or str(getattr(stub_object, "type", stub_object)) + + if runtime_desc is None: + runtime_sig = safe_inspect_signature(runtime_object) + if runtime_sig is None: + self.runtime_desc = _truncate(repr(runtime_object), 100) + else: + runtime_is_async = inspect.iscoroutinefunction(runtime_object) + description = describe_runtime_callable(runtime_sig, is_async=runtime_is_async) + self.runtime_desc = _truncate(description, 100) + else: + self.runtime_desc = runtime_desc + + def is_missing_stub(self) -> bool: + """Whether or not the error is for something missing from the stub.""" + return isinstance(self.stub_object, Missing) + + def is_positional_only_related(self) -> bool: + """Whether or not the error is for something being (or not being) positional-only.""" + # TODO: This is hacky, use error codes or something more resilient + return "should be positional" in self.message + + def is_disjoint_base_related(self) -> bool: + """Whether or not the error is related to @disjoint_base.""" + # TODO: This is hacky, use error codes or something more resilient + return "@disjoint_base" in self.message + + def get_description(self, concise: bool = False) -> str: + """Returns a description of the error. + + :param concise: Whether to return a concise, one-line description + + """ + if concise: + return _style(self.object_desc, bold=True) + " " + self.message + + stub_line = None + stub_file = None + if not isinstance(self.stub_object, Missing): + stub_line = self.stub_object.line + stub_node = get_stub(self.object_path[0]) + if stub_node is not None: + stub_file = stub_node.path or None + + stub_loc_str = "" + if stub_file: + stub_loc_str += f" in file {Path(stub_file)}" + if stub_line: + stub_loc_str += f"{':' if stub_file else ' at line '}{stub_line}" + + runtime_line = None + runtime_file = None + if not isinstance(self.runtime_object, Missing): + try: + runtime_line = inspect.getsourcelines(self.runtime_object)[1] + except (OSError, TypeError, SyntaxError): + pass + try: + runtime_file = inspect.getsourcefile(self.runtime_object) + except TypeError: + pass + + runtime_loc_str = "" + if runtime_file: + runtime_loc_str += f" in file {Path(runtime_file)}" + if runtime_line: + runtime_loc_str += f"{':' if runtime_file else ' at line '}{runtime_line}" + + output = [ + _style("error: ", color="red", bold=True), + _style(self.object_desc, bold=True), + " ", + self.message, + "\n", + "Stub:", + _style(stub_loc_str, dim=True), + "\n", + _style(self.stub_desc + "\n", color="blue", dim=True), + "Runtime:", + _style(runtime_loc_str, dim=True), + "\n", + _style(self.runtime_desc + "\n", color="blue", dim=True), + ] + return "".join(output) + + +# ==================== +# Core logic +# ==================== + + +def silent_import_module(module_name: str) -> types.ModuleType: + with open(os.devnull, "w") as devnull: + with warnings.catch_warnings(), redirect_stdout(devnull), redirect_stderr(devnull): + warnings.simplefilter("ignore") + runtime = importlib.import_module(module_name) + # Also run the equivalent of `from module import *` + # This could have the additional effect of loading not-yet-loaded submodules + # mentioned in __all__ + __import__(module_name, fromlist=["*"]) + return runtime + + +def test_module(module_name: str) -> Iterator[Error]: + """Tests a given module's stub against introspecting it at runtime. + + Requires the stub to have been built already, accomplished by a call to ``build_stubs``. + + :param module_name: The module to test + + """ + stub = get_stub(module_name) + if stub is None: + if not is_probably_private(module_name.split(".")[-1]): + runtime_desc = repr(sys.modules[module_name]) if module_name in sys.modules else "N/A" + yield Error( + [module_name], "failed to find stubs", MISSING, None, runtime_desc=runtime_desc + ) + return + + try: + runtime = silent_import_module(module_name) + except KeyboardInterrupt: + raise + except BaseException as e: + note = "" + if isinstance(e, ModuleNotFoundError): + note = " Maybe install the runtime package or alter PYTHONPATH?" + yield Error( + [module_name], f"failed to import.{note} {type(e).__name__}: {e}", stub, MISSING + ) + return + + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + try: + yield from verify(stub, runtime, [module_name]) + except Exception as e: + bottom_frame = list(traceback.walk_tb(e.__traceback__))[-1][0] + bottom_module = bottom_frame.f_globals.get("__name__", "") + # Pass on any errors originating from stubtest or mypy + # These can occur expectedly, e.g. StubtestFailure + if bottom_module == "__main__" or bottom_module.split(".")[0] == "mypy": + raise + yield Error( + [module_name], + f"encountered unexpected error, {type(e).__name__}: {e}", + stub, + runtime, + stub_desc="N/A", + runtime_desc=( + "This is most likely the fault of something very dynamic in your library. " + "It's also possible this is a bug in stubtest.\nIf in doubt, please " + "open an issue at https://github.com/python/mypy\n\n" + + traceback.format_exc().strip() + ), + ) + + +@singledispatch +def verify( + stub: MaybeMissing[nodes.Node], runtime: MaybeMissing[Any], object_path: list[str] +) -> Iterator[Error]: + """Entry point for comparing a stub to a runtime object. + + We use single dispatch based on the type of ``stub``. + + :param stub: The mypy node representing a part of the stub + :param runtime: The runtime object corresponding to ``stub`` + + """ + yield Error(object_path, "is an unknown mypy node", stub, runtime) + + +def _verify_exported_names( + object_path: list[str], stub: nodes.MypyFile, runtime_all_as_set: set[str] +) -> Iterator[Error]: + # note that this includes the case the stub simply defines `__all__: list[str]` + assert "__all__" in stub.names + public_names_in_stub = {m for m, o in stub.names.items() if o.module_public} + names_in_stub_not_runtime = sorted(public_names_in_stub - runtime_all_as_set) + names_in_runtime_not_stub = sorted(runtime_all_as_set - public_names_in_stub) + if not (names_in_runtime_not_stub or names_in_stub_not_runtime): + return + yield Error( + object_path + ["__all__"], + ( + "names exported from the stub do not correspond to the names exported at runtime. " + "This is probably due to things being missing from the stub or an inaccurate `__all__` in the stub" + ), + # Pass in MISSING instead of the stub and runtime objects, as the line numbers aren't very + # relevant here, and it makes for a prettier error message + # This means this error will be ignored when using `--ignore-missing-stub`, which is + # desirable in at least the `names_in_runtime_not_stub` case + stub_object=MISSING, + runtime_object=MISSING, + stub_desc=(f"Names exported in the stub but not at runtime: {names_in_stub_not_runtime}"), + runtime_desc=( + f"Names exported at runtime but not in the stub: {names_in_runtime_not_stub}" + ), + ) + + +@functools.lru_cache +def _module_symbol_table(runtime: types.ModuleType) -> symtable.SymbolTable | None: + """Retrieve the symbol table for the module (or None on failure). + + 1) Use inspect to retrieve the source code of the module + 2) Use symtable to parse the source (and use what symtable knows for its purposes) + """ + try: + source = inspect.getsource(runtime) + except (OSError, TypeError, SyntaxError): + return None + + try: + return symtable.symtable(source, runtime.__name__, "exec") + except SyntaxError: + return None + + +@verify.register(nodes.MypyFile) +def verify_mypyfile( + stub: nodes.MypyFile, runtime: MaybeMissing[types.ModuleType], object_path: list[str] +) -> Iterator[Error]: + if isinstance(runtime, Missing): + yield Error(object_path, "is not present at runtime", stub, runtime) + return + if not isinstance(runtime, types.ModuleType): + # Can possibly happen: + yield Error(object_path, "is not a module", stub, runtime) # type: ignore[unreachable] + return + + runtime_all_as_set: set[str] | None + + if hasattr(runtime, "__all__"): + runtime_all_as_set = set(runtime.__all__) + if "__all__" in stub.names: + # Only verify the contents of the stub's __all__ + # if the stub actually defines __all__ + yield from _verify_exported_names(object_path, stub, runtime_all_as_set) + else: + yield Error(object_path + ["__all__"], "is not present in stub", MISSING, runtime) + else: + runtime_all_as_set = None + + # Check things in the stub + to_check = { + m + for m, o in stub.names.items() + if not o.module_hidden and (not is_probably_private(m) or hasattr(runtime, m)) + } + + def _belongs_to_runtime(r: types.ModuleType, attr: str) -> bool: + """Heuristics to determine whether a name originates from another module.""" + obj = getattr(r, attr) + if isinstance(obj, types.ModuleType): + return False + + symbol_table = _module_symbol_table(r) + if symbol_table is not None: + try: + symbol = symbol_table.lookup(attr) + except KeyError: + pass + else: + if symbol.is_imported(): + # symtable says we got this from another module + return False + # But we can't just return True here, because symtable doesn't know about symbols + # that come from `from module import *` + if symbol.is_assigned(): + # symtable knows we assigned this symbol in the module + return True + + # The __module__ attribute is unreliable for anything except functions and classes, + # but it's our best guess at this point + try: + obj_mod = obj.__module__ + except Exception: + pass + else: + if isinstance(obj_mod, str): + return bool(obj_mod == r.__name__) + return True + + runtime_public_contents = ( + runtime_all_as_set + if runtime_all_as_set is not None + else { + m + for m in dir(runtime) + if not is_probably_private(m) + # Filter out objects that originate from other modules (best effort). Note that in the + # absence of __all__, we don't have a way to detect explicit / intentional re-exports + # at runtime + and _belongs_to_runtime(runtime, m) + } + ) + # Check all things declared in module's __all__, falling back to our best guess + to_check.update(runtime_public_contents) + to_check.difference_update(IGNORED_MODULE_DUNDERS) + + for entry in sorted(to_check): + stub_entry = stub.names[entry].node if entry in stub.names else MISSING + if isinstance(stub_entry, nodes.MypyFile): + # Don't recursively check exported modules, since that leads to infinite recursion + continue + assert stub_entry is not None + try: + runtime_entry = getattr(runtime, entry, MISSING) + except Exception: + # Catch all exceptions in case the runtime raises an unexpected exception + # from __getattr__ or similar. + continue + yield from verify(stub_entry, runtime_entry, object_path + [entry]) + + +def _verify_final( + stub: nodes.TypeInfo, runtime: type[Any], object_path: list[str] +) -> Iterator[Error]: + try: + + class SubClass(runtime): # type: ignore[misc] + pass + + except TypeError: + # Enum classes are implicitly @final + if not stub.is_final and not issubclass(runtime, enum.Enum): + yield Error( + object_path, + "cannot be subclassed at runtime, but isn't marked with @final in the stub", + stub, + runtime, + stub_desc=repr(stub), + ) + except Exception: + # The class probably wants its subclasses to do something special. + # Examples: ctypes.Array, ctypes._SimpleCData + pass + + # Runtime class might be annotated with `@final`: + try: + runtime_final = getattr(runtime, "__final__", False) + except Exception: + runtime_final = False + + if runtime_final and not stub.is_final: + yield Error( + object_path, + "has `__final__` attribute, but isn't marked with @final in the stub", + stub, + runtime, + stub_desc=repr(stub), + ) + + +SIZEOF_PYOBJECT = struct.calcsize("P") + + +def _shape_differs(t1: type[object], t2: type[object]) -> bool: + """Check whether two types differ in shape. + + Mirrors the shape_differs() function in typeobject.c in CPython.""" + if sys.version_info >= (3, 12): + return t1.__basicsize__ != t2.__basicsize__ or t1.__itemsize__ != t2.__itemsize__ + else: + # CPython had more complicated logic before 3.12: + # https://github.com/python/cpython/blob/f3c6f882cddc8dc30320d2e73edf019e201394fc/Objects/typeobject.c#L2224 + # We attempt to mirror it here well enough to support the most common cases. + if t1.__itemsize__ or t2.__itemsize__: + return t1.__basicsize__ != t2.__basicsize__ or t1.__itemsize__ != t2.__itemsize__ + t_size = t1.__basicsize__ + if not t2.__weakrefoffset__ and t1.__weakrefoffset__ + SIZEOF_PYOBJECT == t_size: + t_size -= SIZEOF_PYOBJECT + if not t2.__dictoffset__ and t1.__dictoffset__ + SIZEOF_PYOBJECT == t_size: + t_size -= SIZEOF_PYOBJECT + if not t2.__weakrefoffset__ and t2.__weakrefoffset__ == t_size: + t_size -= SIZEOF_PYOBJECT + return t_size != t2.__basicsize__ + + +def _is_disjoint_base(typ: type[object]) -> bool: + """Return whether a type is a disjoint base at runtime, mirroring CPython's logic in typeobject.c. + + See PEP 800.""" + if typ is object: + return True + base = typ.__base__ + assert base is not None, f"Type {typ} has no base" + return _shape_differs(typ, base) + + +def _verify_disjoint_base( + stub: nodes.TypeInfo, runtime: type[object], object_path: list[str] +) -> Iterator[Error]: + is_disjoint_runtime = _is_disjoint_base(runtime) + # Don't complain about missing @disjoint_base if there are __slots__, because + # in that case we can infer that it's a disjoint base. + if ( + is_disjoint_runtime + and not stub.is_disjoint_base + and not runtime.__dict__.get("__slots__") + and not stub.is_final + and not (stub.is_enum and stub.enum_members) + ): + yield Error( + object_path, + "is a disjoint base at runtime, but isn't marked with @disjoint_base in the stub", + stub, + runtime, + stub_desc=repr(stub), + ) + elif stub.is_disjoint_base: + if not is_disjoint_runtime: + yield Error( + object_path, + "is marked with @disjoint_base in the stub, but isn't a disjoint base at runtime", + stub, + runtime, + stub_desc=repr(stub), + ) + if runtime.__dict__.get("__slots__"): + yield Error( + object_path, + "is marked as @disjoint_base, but also has slots; add __slots__ instead", + stub, + runtime, + stub_desc=repr(stub), + ) + elif stub.is_final: + yield Error( + object_path, + "is marked as @disjoint_base, but also marked as @final; remove @disjoint_base", + stub, + runtime, + stub_desc=repr(stub), + ) + elif stub.is_enum and stub.enum_members: + yield Error( + object_path, + "is marked as @disjoint_base, but is an enum with members, which is implicitly final; " + "remove @disjoint_base", + stub, + runtime, + stub_desc=repr(stub), + ) + + +def _verify_metaclass( + stub: nodes.TypeInfo, runtime: type[Any], object_path: list[str], *, is_runtime_typeddict: bool +) -> Iterator[Error]: + # We exclude protocols, because of how complex their implementation is in different versions of + # python. Enums are also hard, as are runtime TypedDicts; ignoring. + # TODO: check that metaclasses are identical? + if not stub.is_protocol and not stub.is_enum and not is_runtime_typeddict: + runtime_metaclass = type(runtime) + if runtime_metaclass is not type and stub.metaclass_type is None: + # This means that runtime has a custom metaclass, but a stub does not. + yield Error( + object_path, + "is inconsistent, metaclass differs", + stub, + runtime, + stub_desc="N/A", + runtime_desc=f"{runtime_metaclass}", + ) + elif ( + runtime_metaclass is type + and stub.metaclass_type is not None + # We ignore extra `ABCMeta` metaclass on stubs, this might be typing hack. + # We also ignore `builtins.type` metaclass as an implementation detail in mypy. + and not mypy.types.is_named_instance( + stub.metaclass_type, ("abc.ABCMeta", "builtins.type") + ) + ): + # This means that our stub has a metaclass that is not present at runtime. + yield Error( + object_path, + "metaclass mismatch", + stub, + runtime, + stub_desc=f"{stub.metaclass_type.type.fullname}", + runtime_desc="N/A", + ) + + +@verify.register(nodes.TypeInfo) +def verify_typeinfo( + stub: nodes.TypeInfo, + runtime: MaybeMissing[type[Any]], + object_path: list[str], + *, + is_alias_target: bool = False, +) -> Iterator[Error]: + if stub.is_type_check_only and not is_alias_target: + # This type only exists in stubs, we only check that the runtime part + # is missing. Other checks are not required. + if not isinstance(runtime, Missing): + yield Error( + object_path, + 'is marked as "@type_check_only", but also exists at runtime', + stub, + runtime, + stub_desc=repr(stub), + ) + return + + if isinstance(runtime, Missing): + yield Error(object_path, "is not present at runtime", stub, runtime, stub_desc=repr(stub)) + return + if not isinstance(runtime, type): + # Yes, some runtime objects can be not types, no way to tell mypy about that. + yield Error(object_path, "is not a type", stub, runtime, stub_desc=repr(stub)) # type: ignore[unreachable] + return + + yield from _verify_final(stub, runtime, object_path) + yield from _verify_disjoint_base(stub, runtime, object_path) + is_runtime_typeddict = stub.typeddict_type is not None and is_typeddict(runtime) + yield from _verify_metaclass( + stub, runtime, object_path, is_runtime_typeddict=is_runtime_typeddict + ) + + # Check everything already defined on the stub class itself (i.e. not inherited) + # + # Filter out non-identifier names, as these are (hopefully always?) whacky/fictional things + # (like __mypy-replace or __mypy-post_init, etc.) that don't exist at runtime, + # and exist purely for internal mypy reasons + to_check = {name for name in stub.names if name.isidentifier()} + # Check all public things on the runtime class + to_check.update( + m for m in vars(runtime) if not is_probably_private(m) and m not in IGNORABLE_CLASS_DUNDERS + ) + # Special-case the __init__ method for Protocols and the __new__ method for TypedDicts + # + # TODO: On Python <3.11, __init__ methods on Protocol classes + # are silently discarded and replaced. + # However, this is not the case on Python 3.11+. + # Ideally, we'd figure out a good way of validating Protocol __init__ methods on 3.11+. + if stub.is_protocol: + to_check.discard("__init__") + if is_runtime_typeddict: + to_check.discard("__new__") + + for entry in sorted(to_check): + mangled_entry = entry + if entry.startswith("__") and not entry.endswith("__"): + mangled_entry = f"_{stub.name.lstrip('_')}{entry}" + stub_to_verify = next((t.names[entry].node for t in stub.mro if entry in t.names), MISSING) + assert stub_to_verify is not None + try: + try: + runtime_attr = getattr(runtime, mangled_entry) + except AttributeError: + runtime_attr = inspect.getattr_static(runtime, mangled_entry, MISSING) + except Exception: + # Catch all exceptions in case the runtime raises an unexpected exception + # from __getattr__ or similar. + continue + + # If it came from the metaclass, consider the runtime_attr to be MISSING + # for a more accurate message + if ( + runtime_attr is not MISSING + and type(runtime) is not runtime + and getattr(runtime_attr, "__objclass__", None) is type(runtime) + ): + runtime_attr = MISSING + + # __setattr__ and __delattr__ on object are a special case, + # so if we only have these methods inherited from there, pretend that + # we don't have them. See python/typeshed#7385. + if ( + entry in ("__setattr__", "__delattr__") + and runtime_attr is not MISSING + and runtime is not object + and getattr(runtime_attr, "__objclass__", None) is object + ): + runtime_attr = MISSING + + # Do not error for an object missing from the stub + # If the runtime object is a types.WrapperDescriptorType object + # and has a non-special dunder name. + # The vast majority of these are false positives. + if not ( + isinstance(stub_to_verify, Missing) + and isinstance(runtime_attr, types.WrapperDescriptorType) + and is_dunder(mangled_entry, exclude_special=True) + ): + yield from verify(stub_to_verify, runtime_attr, object_path + [entry]) + + +def _static_lookup_runtime(object_path: list[str]) -> MaybeMissing[Any]: + static_runtime = importlib.import_module(object_path[0]) + for entry in object_path[1:]: + try: + static_runtime = inspect.getattr_static(static_runtime, entry) + except AttributeError: + # This can happen with mangled names, ignore for now. + # TODO: pass more information about ancestors of nodes/objects to verify, so we don't + # have to do this hacky lookup. Would be useful in several places. + return MISSING + return static_runtime + + +def _verify_static_class_methods( + stub: nodes.FuncBase, runtime: Any, static_runtime: MaybeMissing[Any], object_path: list[str] +) -> Iterator[str]: + if stub.name in ("__new__", "__init_subclass__", "__class_getitem__"): + # Special cased by Python, so don't bother checking + return + if inspect.isbuiltin(runtime): + # The isinstance checks don't work reliably for builtins, e.g. datetime.datetime.now, so do + # something a little hacky that seems to work well + probably_class_method = isinstance(getattr(runtime, "__self__", None), type) + if probably_class_method and not stub.is_class: + yield "runtime is a classmethod but stub is not" + if not probably_class_method and stub.is_class: + yield "stub is a classmethod but runtime is not" + return + + if static_runtime is MISSING: + return + + if isinstance(static_runtime, classmethod) and not stub.is_class: + yield "runtime is a classmethod but stub is not" + if not isinstance(static_runtime, classmethod) and stub.is_class: + yield "stub is a classmethod but runtime is not" + if isinstance(static_runtime, staticmethod) and not stub.is_static: + yield "runtime is a staticmethod but stub is not" + if not isinstance(static_runtime, staticmethod) and stub.is_static: + yield "stub is a staticmethod but runtime is not" + + +def _verify_arg_name( + stub_arg: nodes.Argument, runtime_arg: inspect.Parameter, function_name: str +) -> Iterator[str]: + """Checks whether argument names match.""" + # Ignore exact names for most dunder methods + if is_dunder(function_name, exclude_special=True): + return + + if ( + stub_arg.variable.name == runtime_arg.name + or stub_arg.variable.name.removeprefix("__") == runtime_arg.name + ): + return + + nonspecific_names = {"object", "args"} + if runtime_arg.name in nonspecific_names: + return + + def names_approx_match(a: str, b: str) -> bool: + a = a.strip("_") + b = b.strip("_") + return a.startswith(b) or b.startswith(a) or len(a) == 1 or len(b) == 1 + + # Be more permissive about names matching for positional-only arguments + if runtime_arg.kind == inspect.Parameter.POSITIONAL_ONLY and names_approx_match( + stub_arg.variable.name, runtime_arg.name + ): + return + # This comes up with namedtuples, so ignore + if stub_arg.variable.name == "_self": + return + yield ( + f'stub parameter "{stub_arg.variable.name}" ' + f'differs from runtime parameter "{runtime_arg.name}"' + ) + + +def _verify_arg_default_value( + stub_arg: nodes.Argument, runtime_arg: inspect.Parameter +) -> Iterator[str]: + """Checks whether argument default values are compatible.""" + if runtime_arg.default is not inspect.Parameter.empty: + if stub_arg.kind.is_required(): + yield ( + f'runtime parameter "{runtime_arg.name}" ' + "has a default value but stub parameter does not" + ) + else: + type_context = stub_arg.variable.type + runtime_type = get_mypy_type_of_runtime_value( + runtime_arg.default, type_context=type_context + ) + + # Fallback to the type annotation type if var type is missing. The type annotation + # is an UnboundType, but I don't know enough to know what the pros and cons here are. + # UnboundTypes have ugly question marks following them, so default to var type. + # Note we do this same fallback when constructing signatures in from_overloadedfuncdef + stub_type = stub_arg.variable.type or stub_arg.type_annotation + if isinstance(stub_type, mypy.types.TypeVarType): + stub_type = stub_type.upper_bound + if ( + runtime_type is not None + and stub_type is not None + # Avoid false positives for marker objects + and type(runtime_arg.default) is not object + # And ellipsis + and runtime_arg.default is not ... + and not is_subtype_helper(runtime_type, stub_type) + ): + yield ( + f'runtime parameter "{runtime_arg.name}" ' + f"has a default value of type {runtime_type}, " + f"which is incompatible with stub parameter type {stub_type}" + ) + if stub_arg.initializer is not None: + stub_default = evaluate_expression(stub_arg.initializer) + if ( + stub_default is not UNKNOWN + and stub_default is not ... + and runtime_arg.default is not UNREPRESENTABLE + ): + defaults_match = True + # We want the types to match exactly, e.g. in case the stub has + # True and the runtime has 1 (or vice versa). + if type(stub_default) is not type(runtime_arg.default): + defaults_match = False + else: + try: + defaults_match = bool(stub_default == runtime_arg.default) + except Exception: + # Exception can be raised in bool dunder method (e.g. numpy arrays) + # At this point, consider the default to be different, it is probably + # too complex to put in a stub anyway. + defaults_match = False + if not defaults_match: + yield ( + f'runtime parameter "{runtime_arg.name}" ' + f"has a default value of {runtime_arg.default!r}, " + f"which is different from stub parameter default {stub_default!r}" + ) + else: + if stub_arg.kind.is_optional(): + yield ( + f'stub parameter "{stub_arg.variable.name}" has a default value ' + f"but runtime parameter does not" + ) + + +def maybe_strip_cls(name: str, args: list[nodes.Argument]) -> list[nodes.Argument]: + if args and name in ("__init_subclass__", "__class_getitem__"): + # These are implicitly classmethods. If the stub chooses not to have @classmethod, we + # should remove the cls argument + if args[0].variable.name == "cls": + return args[1:] + return args + + +class Signature(Generic[T]): + def __init__(self) -> None: + self.pos: list[T] = [] + self.kwonly: dict[str, T] = {} + self.varpos: T | None = None + self.varkw: T | None = None + + def __str__(self) -> str: + def get_name(arg: Any) -> str: + if isinstance(arg, inspect.Parameter): + return arg.name + if isinstance(arg, nodes.Argument): + return arg.variable.name + raise AssertionError + + def get_type(arg: Any) -> str | None: + if isinstance(arg, inspect.Parameter): + return None + if isinstance(arg, nodes.Argument): + return str(arg.variable.type or arg.type_annotation) + raise AssertionError + + def has_default(arg: Any) -> bool: + if isinstance(arg, inspect.Parameter): + return arg.default is not inspect.Parameter.empty + if isinstance(arg, nodes.Argument): + return arg.kind.is_optional() + raise AssertionError + + def get_desc(arg: Any) -> str: + arg_type = get_type(arg) + return ( + get_name(arg) + + (f": {arg_type}" if arg_type else "") + + (" = ..." if has_default(arg) else "") + ) + + kw_only = sorted(self.kwonly.values(), key=lambda a: (has_default(a), get_name(a))) + ret = "def (" + ret += ", ".join( + [get_desc(arg) for arg in self.pos] + + (["*" + get_name(self.varpos)] if self.varpos else (["*"] if self.kwonly else [])) + + [get_desc(arg) for arg in kw_only] + + (["**" + get_name(self.varkw)] if self.varkw else []) + ) + ret += ")" + return ret + + @staticmethod + def from_funcitem(stub: nodes.FuncItem) -> Signature[nodes.Argument]: + stub_sig: Signature[nodes.Argument] = Signature() + stub_args = maybe_strip_cls(stub.name, stub.arguments) + for stub_arg in stub_args: + if stub_arg.kind.is_positional(): + stub_sig.pos.append(stub_arg) + elif stub_arg.kind.is_named(): + stub_sig.kwonly[stub_arg.variable.name] = stub_arg + elif stub_arg.kind == nodes.ARG_STAR: + stub_sig.varpos = stub_arg + elif stub_arg.kind == nodes.ARG_STAR2: + stub_sig.varkw = stub_arg + else: + raise AssertionError + return stub_sig + + @staticmethod + def from_inspect_signature(signature: inspect.Signature) -> Signature[inspect.Parameter]: + runtime_sig: Signature[inspect.Parameter] = Signature() + for runtime_arg in signature.parameters.values(): + if runtime_arg.kind in ( + inspect.Parameter.POSITIONAL_ONLY, + inspect.Parameter.POSITIONAL_OR_KEYWORD, + ): + runtime_sig.pos.append(runtime_arg) + elif runtime_arg.kind == inspect.Parameter.KEYWORD_ONLY: + runtime_sig.kwonly[runtime_arg.name] = runtime_arg + elif runtime_arg.kind == inspect.Parameter.VAR_POSITIONAL: + runtime_sig.varpos = runtime_arg + elif runtime_arg.kind == inspect.Parameter.VAR_KEYWORD: + runtime_sig.varkw = runtime_arg + else: + raise AssertionError + return runtime_sig + + @staticmethod + def from_overloadedfuncdef(stub: nodes.OverloadedFuncDef) -> Signature[nodes.Argument]: + """Returns a Signature from an OverloadedFuncDef. + + If life were simple, to verify_overloadedfuncdef, we'd just verify_funcitem for each of its + items. Unfortunately, life isn't simple and overloads are pretty deceitful. So instead, we + try and combine the overload's items into a single signature that is compatible with any + lies it might try to tell. + + """ + # For most dunder methods, just assume all args are positional-only + assume_positional_only = is_dunder(stub.name, exclude_special=True) + + is_arg_pos_only: defaultdict[str, set[bool]] = defaultdict(set) + for func in map(_resolve_funcitem_from_decorator, stub.items): + assert func is not None, f"Failed to resolve decorated overload of {stub.fullname!r}" + args = maybe_strip_cls(stub.name, func.arguments) + for index, arg in enumerate(args): + if ( + arg.variable.name.startswith("__") + or arg.pos_only + or assume_positional_only + or arg.variable.name.strip("_") == "self" + or (index == 0 and arg.variable.name.strip("_") == "cls") + ): + is_arg_pos_only[arg.variable.name].add(True) + else: + is_arg_pos_only[arg.variable.name].add(False) + + all_args: dict[str, list[tuple[nodes.Argument, int]]] = {} + for func in map(_resolve_funcitem_from_decorator, stub.items): + assert func is not None, f"Failed to resolve decorated overload of {stub.fullname!r}" + args = maybe_strip_cls(stub.name, func.arguments) + for index, arg in enumerate(args): + # For positional-only args, we allow overloads to have different names for the same + # argument. To accomplish this, we just make up a fake index-based name. + # We can only use the index-based name if the argument is always + # positional only. Sometimes overloads have an arg as positional-only + # in some but not all branches of the overload. + name = arg.variable.name + if is_arg_pos_only[name] == {True}: + name = f"__{index}" + + all_args.setdefault(name, []).append((arg, index)) + + def get_position(arg_name: str) -> int: + # We just need this to return the positional args in the correct order. + return max(index for _, index in all_args[arg_name]) + + def get_type(arg_name: str) -> mypy.types.ProperType: + with mypy.state.state.strict_optional_set(True): + all_types = [ + arg.variable.type or arg.type_annotation for arg, _ in all_args[arg_name] + ] + return mypy.typeops.make_simplified_union([t for t in all_types if t]) + + def get_kind(arg_name: str) -> nodes.ArgKind: + kinds = {arg.kind for arg, _ in all_args[arg_name]} + if nodes.ARG_STAR in kinds: + return nodes.ARG_STAR + if nodes.ARG_STAR2 in kinds: + return nodes.ARG_STAR2 + # The logic here is based on two tenets: + # 1) If an arg is ever optional (or unspecified), it is optional + # 2) If an arg is ever positional, it is positional + is_opt = ( + len(all_args[arg_name]) < len(stub.items) + or nodes.ARG_OPT in kinds + or nodes.ARG_NAMED_OPT in kinds + ) + is_pos = nodes.ARG_OPT in kinds or nodes.ARG_POS in kinds + if is_opt: + return nodes.ARG_OPT if is_pos else nodes.ARG_NAMED_OPT + return nodes.ARG_POS if is_pos else nodes.ARG_NAMED + + sig: Signature[nodes.Argument] = Signature() + for arg_name in sorted(all_args, key=get_position): + # example_arg_name gives us a real name (in case we had a fake index-based name) + example_arg_name = all_args[arg_name][0][0].variable.name + arg = nodes.Argument( + nodes.Var(example_arg_name, get_type(arg_name)), + type_annotation=None, + initializer=None, + kind=get_kind(arg_name), + pos_only=all(arg.pos_only for arg, _ in all_args[arg_name]), + ) + if arg.kind.is_positional(): + sig.pos.append(arg) + elif arg.kind.is_named(): + sig.kwonly[arg.variable.name] = arg + elif arg.kind == nodes.ARG_STAR: + sig.varpos = arg + elif arg.kind == nodes.ARG_STAR2: + sig.varkw = arg + else: + raise AssertionError + return sig + + +def _verify_signature( + stub: Signature[nodes.Argument], + runtime: Signature[inspect.Parameter], + function_name: str, + warn_runtime_is_object_init: bool = False, +) -> Iterator[str]: + # Check positional arguments match up + for stub_arg, runtime_arg in zip(stub.pos, runtime.pos): + yield from _verify_arg_name(stub_arg, runtime_arg, function_name) + yield from _verify_arg_default_value(stub_arg, runtime_arg) + if ( + runtime_arg.kind == inspect.Parameter.POSITIONAL_ONLY + and not stub_arg.pos_only + and not stub_arg.variable.name.startswith("__") + and stub_arg.variable.name.strip("_") != "self" + and stub_arg.variable.name.strip("_") != "cls" + and not is_dunder(function_name, exclude_special=True) # noisy for dunder methods + ): + yield ( + f'stub parameter "{stub_arg.variable.name}" should be positional-only ' + f'(add "/", e.g. "{runtime_arg.name}, /")' + ) + if ( + runtime_arg.kind != inspect.Parameter.POSITIONAL_ONLY + and (stub_arg.pos_only or stub_arg.variable.name.startswith("__")) + and not runtime_arg.name.startswith("__") + and stub_arg.variable.name.strip("_") != "self" + and stub_arg.variable.name.strip("_") != "cls" + and not is_dunder(function_name, exclude_special=True) # noisy for dunder methods + ): + yield ( + f'stub parameter "{stub_arg.variable.name}" should be positional or keyword ' + '(remove "/")' + ) + + # Check unmatched positional args + if len(stub.pos) > len(runtime.pos): + # There are cases where the stub exhaustively lists out the extra parameters the function + # would take through *args. Hence, a) if runtime accepts *args, we don't check whether the + # runtime has all of the stub's parameters, b) below, we don't enforce that the stub takes + # *args, since runtime logic may prevent arbitrary arguments from actually being accepted. + if runtime.varpos is None: + for stub_arg in stub.pos[len(runtime.pos) :]: + # If the variable is in runtime.kwonly, it's just mislabelled as not a + # keyword-only argument + if stub_arg.variable.name not in runtime.kwonly: + msg = f'runtime does not have parameter "{stub_arg.variable.name}"' + if runtime.varkw is not None: + msg += ". Maybe you forgot to make it keyword-only in the stub?" + elif warn_runtime_is_object_init: + msg += ". You may need to write stubs for __new__ instead of __init__." + yield msg + else: + yield f'stub parameter "{stub_arg.variable.name}" is not keyword-only' + if stub.varpos is not None: + yield f'runtime does not have *args parameter "{stub.varpos.variable.name}"' + elif len(stub.pos) < len(runtime.pos): + for runtime_arg in runtime.pos[len(stub.pos) :]: + if runtime_arg.name not in stub.kwonly: + if not _is_private_parameter(runtime_arg): + yield f'stub does not have parameter "{runtime_arg.name}"' + else: + yield f'runtime parameter "{runtime_arg.name}" is not keyword-only' + + # Checks involving *args + if len(stub.pos) <= len(runtime.pos) or runtime.varpos is None: + if stub.varpos is None and runtime.varpos is not None: + yield f'stub does not have *args parameter "{runtime.varpos.name}"' + if stub.varpos is not None and runtime.varpos is None: + yield f'runtime does not have *args parameter "{stub.varpos.variable.name}"' + + # Check keyword-only args + for arg in sorted(set(stub.kwonly) & set(runtime.kwonly)): + stub_arg, runtime_arg = stub.kwonly[arg], runtime.kwonly[arg] + yield from _verify_arg_name(stub_arg, runtime_arg, function_name) + yield from _verify_arg_default_value(stub_arg, runtime_arg) + + # Check unmatched keyword-only args + if runtime.varkw is None or not set(runtime.kwonly).issubset(set(stub.kwonly)): + # There are cases where the stub exhaustively lists out the extra parameters the function + # would take through **kwargs. Hence, a) if runtime accepts **kwargs (and the stub hasn't + # exhaustively listed out params), we don't check whether the runtime has all of the stub's + # parameters, b) below, we don't enforce that the stub takes **kwargs, since runtime logic + # may prevent arbitrary keyword arguments from actually being accepted. + for arg in sorted(set(stub.kwonly) - set(runtime.kwonly)): + if arg in {runtime_arg.name for runtime_arg in runtime.pos}: + # Don't report this if we've reported it before + if arg not in {runtime_arg.name for runtime_arg in runtime.pos[len(stub.pos) :]}: + yield f'runtime parameter "{arg}" is not keyword-only' + else: + msg = f'runtime does not have parameter "{arg}"' + if warn_runtime_is_object_init: + msg += ". You may need to write stubs for __new__ instead of __init__." + yield msg + + for arg in sorted(set(runtime.kwonly) - set(stub.kwonly)): + if arg in {stub_arg.variable.name for stub_arg in stub.pos}: + # Don't report this if we've reported it before + if not ( + runtime.varpos is None + and arg in {stub_arg.variable.name for stub_arg in stub.pos[len(runtime.pos) :]} + ): + yield f'stub parameter "{arg}" is not keyword-only' + else: + if not _is_private_parameter(runtime.kwonly[arg]): + yield f'stub does not have parameter "{arg}"' + + # Checks involving **kwargs + if stub.varkw is None and runtime.varkw is not None: + # As mentioned above, don't enforce that the stub takes **kwargs. + # Also check against positional parameters, to avoid a nitpicky message when an argument + # isn't marked as keyword-only + stub_pos_names = {stub_arg.variable.name for stub_arg in stub.pos} + # Ideally we'd do a strict subset check, but in practice the errors from that aren't useful + if not set(runtime.kwonly).issubset(set(stub.kwonly) | stub_pos_names): + yield f'stub does not have **kwargs parameter "{runtime.varkw.name}"' + if stub.varkw is not None and runtime.varkw is None: + yield f'runtime does not have **kwargs parameter "{stub.varkw.variable.name}"' + + +def _is_private_parameter(arg: inspect.Parameter) -> bool: + return ( + arg.name.startswith("_") + and not arg.name.startswith("__") + and arg.default is not inspect.Parameter.empty + ) + + +@verify.register(nodes.FuncItem) +def verify_funcitem( + stub: nodes.FuncItem, runtime: MaybeMissing[Any], object_path: list[str] +) -> Iterator[Error]: + if isinstance(runtime, Missing): + yield Error(object_path, "is not present at runtime", stub, runtime) + return + + if not is_probably_a_function(runtime): + yield Error(object_path, "is not a function", stub, runtime) + if not callable(runtime): + return + + # Look the object up statically, to avoid binding by the descriptor protocol + static_runtime = _static_lookup_runtime(object_path) + + if isinstance(stub, nodes.FuncDef): + for error_text in _verify_abstract_status(stub, runtime): + yield Error(object_path, error_text, stub, runtime) + for error_text in _verify_final_method(stub, runtime, static_runtime): + yield Error(object_path, error_text, stub, runtime) + + for message in _verify_static_class_methods(stub, runtime, static_runtime, object_path): + yield Error(object_path, "is inconsistent, " + message, stub, runtime) + + signature = safe_inspect_signature(runtime) + runtime_is_coroutine = inspect.iscoroutinefunction(runtime) + + if signature: + stub_sig = Signature.from_funcitem(stub) + runtime_sig = Signature.from_inspect_signature(signature) + runtime_sig_desc = describe_runtime_callable(signature, is_async=runtime_is_coroutine) + stub_desc = str(stub_sig) + else: + runtime_sig_desc, stub_desc = None, None + + # Don't raise an error if the stub is a coroutine, but the runtime isn't. + # That results in false positives. + # See https://github.com/python/typeshed/issues/7344 + if runtime_is_coroutine and not stub.is_coroutine: + yield Error( + object_path, + 'is an "async def" function at runtime, but not in the stub', + stub, + runtime, + stub_desc=stub_desc, + runtime_desc=runtime_sig_desc, + ) + + if not signature: + return + + for message in _verify_signature( + stub_sig, + runtime_sig, + function_name=stub.name, + warn_runtime_is_object_init=runtime is object.__init__, + ): + yield Error( + object_path, + "is inconsistent, " + message, + stub, + runtime, + runtime_desc=runtime_sig_desc, + ) + + +@verify.register(Missing) +def verify_missing( + stub: Missing, runtime: MaybeMissing[Any], object_path: list[str] +) -> Iterator[Error]: + if runtime is MISSING: + return + yield Error(object_path, "is not present in stub", stub, runtime) + + +@verify.register(nodes.Var) +def verify_var( + stub: nodes.Var, runtime: MaybeMissing[Any], object_path: list[str] +) -> Iterator[Error]: + if isinstance(runtime, Missing): + # Don't always yield an error here, because we often can't find instance variables + if len(object_path) <= 2: + yield Error(object_path, "is not present at runtime", stub, runtime) + return + + if ( + stub.is_initialized_in_class + and is_read_only_property(runtime) + and (stub.is_settable_property or not stub.is_property) + ): + yield Error(object_path, "is read-only at runtime but not in the stub", stub, runtime) + + runtime_type = get_mypy_type_of_runtime_value(runtime, type_context=stub.type) + note = "" + if ( + runtime_type is not None + and stub.type is not None + and not is_subtype_helper(runtime_type, stub.type) + ): + should_error = True + # Avoid errors when defining enums, since runtime_type is the enum itself, but we'd + # annotate it with the type of runtime.value + if isinstance(runtime, enum.Enum): + runtime_type = get_mypy_type_of_runtime_value(runtime.value) + if runtime_type is not None and is_subtype_helper(runtime_type, stub.type): + should_error = False + # We always allow setting the stub value to Ellipsis (...), but use + # _value_ type as a fallback if given. If a member is ... and _value_ + # type is given, all runtime types should be assignable to _value_. + proper_type = mypy.types.get_proper_type(stub.type) + if ( + isinstance(proper_type, mypy.types.Instance) + and proper_type.type.fullname in mypy.types.ELLIPSIS_TYPE_NAMES + ): + value_t = stub.info.get("_value_") + if value_t is None or value_t.type is None or runtime_type is None: + should_error = False + elif is_subtype_helper(runtime_type, value_t.type): + should_error = False + else: + note = " (incompatible '_value_')" + + if should_error: + yield Error( + object_path, + f"variable differs from runtime type {runtime_type}{note}", + stub, + runtime, + ) + + +@verify.register(nodes.OverloadedFuncDef) +def verify_overloadedfuncdef( + stub: nodes.OverloadedFuncDef, runtime: MaybeMissing[Any], object_path: list[str] +) -> Iterator[Error]: + # TODO: support `@type_check_only` decorator + if isinstance(runtime, Missing): + yield Error(object_path, "is not present at runtime", stub, runtime) + return + + if stub.is_property: + # Any property with a setter is represented as an OverloadedFuncDef + if is_read_only_property(runtime): + yield Error(object_path, "is read-only at runtime but not in the stub", stub, runtime) + return + + if not is_probably_a_function(runtime): + yield Error(object_path, "is not a function", stub, runtime) + if not callable(runtime): + return + + # mypy doesn't allow overloads where one overload is abstract but another isn't, + # so it should be okay to just check whether the first overload is abstract or not. + # + # TODO: Mypy *does* allow properties where e.g. the getter is abstract but the setter is not; + # and any property with a setter is represented as an OverloadedFuncDef internally; + # not sure exactly what (if anything) we should do about that. + first_part = stub.items[0] + if isinstance(first_part, nodes.Decorator) and first_part.is_overload: + for msg in _verify_abstract_status(first_part.func, runtime): + yield Error(object_path, msg, stub, runtime) + + # Look the object up statically, to avoid binding by the descriptor protocol + static_runtime = _static_lookup_runtime(object_path) + + for message in _verify_static_class_methods(stub, runtime, static_runtime, object_path): + yield Error(object_path, "is inconsistent, " + message, stub, runtime) + + # TODO: Should call _verify_final_method here, + # but overloaded final methods in stubs cause a stubtest crash: see #14950 + + signature = safe_inspect_signature(runtime) + if not signature: + return + + stub_sig = Signature.from_overloadedfuncdef(stub) + runtime_sig = Signature.from_inspect_signature(signature) + + for message in _verify_signature( + stub_sig, + runtime_sig, + function_name=stub.name, + warn_runtime_is_object_init=runtime is object.__init__, + ): + # TODO: This is a little hacky, but the addition here is super useful + if "has a default value of type" in message: + message += ( + ". This is often caused by overloads failing to account for explicitly passing " + "in the default value." + ) + yield Error( + object_path, + "is inconsistent, " + message, + stub, + runtime, + stub_desc=(str(stub.type)) + f"\nInferred signature: {stub_sig}", + runtime_desc="def " + str(signature), + ) + + +@verify.register(nodes.TypeVarExpr) +def verify_typevarexpr( + stub: nodes.TypeVarExpr, runtime: MaybeMissing[Any], object_path: list[str] +) -> Iterator[Error]: + if isinstance(runtime, Missing): + # We seem to insert these typevars into NamedTuple stubs, but they + # don't exist at runtime. Just ignore! + if stub.name == "_NT": + return + yield Error(object_path, "is not present at runtime", stub, runtime) + return + if not isinstance(runtime, TypeVar): + yield Error(object_path, "is not a TypeVar", stub, runtime) + return + + +@verify.register(nodes.ParamSpecExpr) +def verify_paramspecexpr( + stub: nodes.ParamSpecExpr, runtime: MaybeMissing[Any], object_path: list[str] +) -> Iterator[Error]: + if isinstance(runtime, Missing): + yield Error(object_path, "is not present at runtime", stub, runtime) + return + maybe_paramspec_types = ( + getattr(typing, "ParamSpec", None), + getattr(typing_extensions, "ParamSpec", None), + ) + paramspec_types = tuple(t for t in maybe_paramspec_types if t is not None) + if not paramspec_types or not isinstance(runtime, paramspec_types): + yield Error(object_path, "is not a ParamSpec", stub, runtime) + return + + +def _is_django_cached_property(runtime: Any) -> bool: # pragma: no cover + # This is a special case for + # https://docs.djangoproject.com/en/5.2/ref/utils/#django.utils.functional.cached_property + # This is needed in `django-stubs` project: + # https://github.com/typeddjango/django-stubs + if type(runtime).__name__ != "cached_property": + return False + try: + return bool(runtime.func) + except Exception: + return False + + +def _verify_readonly_property(stub: nodes.Decorator, runtime: Any) -> Iterator[str]: + assert stub.func.is_property + if isinstance(runtime, property): + yield from _verify_final_method(stub.func, runtime.fget, MISSING) + return + if isinstance(runtime, functools.cached_property): + yield from _verify_final_method(stub.func, runtime.func, MISSING) + return + if _is_django_cached_property(runtime): + yield from _verify_final_method(stub.func, runtime.func, MISSING) + return + if inspect.isdatadescriptor(runtime): + # It's enough like a property... + return + # Sometimes attributes pretend to be properties, for instance, to express that they + # are read only. So allowlist if runtime_type matches the return type of stub. + runtime_type = get_mypy_type_of_runtime_value(runtime) + func_type = ( + stub.func.type.ret_type if isinstance(stub.func.type, mypy.types.CallableType) else None + ) + if ( + runtime_type is not None + and func_type is not None + and is_subtype_helper(runtime_type, func_type) + ): + return + yield "is inconsistent, cannot reconcile @property on stub with runtime object" + + +def _verify_abstract_status(stub: nodes.FuncDef, runtime: Any) -> Iterator[str]: + stub_abstract = stub.abstract_status == nodes.IS_ABSTRACT + runtime_abstract = getattr(runtime, "__isabstractmethod__", False) + # The opposite can exist: some implementations omit `@abstractmethod` decorators + if runtime_abstract and not stub_abstract: + item_type = "property" if stub.is_property else "method" + yield f"is inconsistent, runtime {item_type} is abstract but stub is not" + + +def _verify_final_method( + stub: nodes.FuncDef, runtime: Any, static_runtime: MaybeMissing[Any] +) -> Iterator[str]: + if stub.is_final: + return + if getattr(runtime, "__final__", False) or ( + static_runtime is not MISSING and getattr(static_runtime, "__final__", False) + ): + yield "is decorated with @final at runtime, but not in the stub" + + +def _resolve_funcitem_from_decorator(dec: nodes.OverloadPart) -> nodes.FuncItem | None: + """Returns a FuncItem that corresponds to the output of the decorator. + + Returns None if we can't figure out what that would be. For convenience, this function also + accepts FuncItems. + """ + if isinstance(dec, nodes.FuncItem): + return dec + if dec.func.is_property: + return None + + def apply_decorator_to_funcitem( + decorator: nodes.Expression, func: nodes.FuncItem + ) -> nodes.FuncItem | None: + if ( + isinstance(decorator, nodes.CallExpr) + and isinstance(decorator.callee, nodes.RefExpr) + and decorator.callee.fullname in mypy.types.DEPRECATED_TYPE_NAMES + ): + return func + if not isinstance(decorator, nodes.RefExpr): + return None + if not decorator.fullname: + # Happens with namedtuple + return None + if ( + decorator.fullname in ("builtins.staticmethod", "abc.abstractmethod") + or decorator.fullname in mypy.types.OVERLOAD_NAMES + or decorator.fullname in mypy.types.OVERRIDE_DECORATOR_NAMES + or decorator.fullname in mypy.types.FINAL_DECORATOR_NAMES + ): + return func + if decorator.fullname == "builtins.classmethod": + if func.arguments[0].variable.name not in ("cls", "mcs", "metacls"): + raise StubtestFailure( + f"unexpected class parameter name {func.arguments[0].variable.name!r} " + f"in {dec.fullname}" + ) + # FuncItem is written so that copy.copy() actually works, even when compiled + ret = copy.copy(func) + # Remove the cls argument, since it's not present in inspect.signature of classmethods + ret.arguments = ret.arguments[1:] + return ret + # Just give up on any other decorators. After excluding properties, we don't run into + # anything else when running on typeshed's stdlib. + return None + + func: nodes.FuncItem = dec.func + for decorator in dec.original_decorators: + resulting_func = apply_decorator_to_funcitem(decorator, func) + if resulting_func is None: + return None + func = resulting_func + return func + + +@verify.register(nodes.Decorator) +def verify_decorator( + stub: nodes.Decorator, runtime: MaybeMissing[Any], object_path: list[str] +) -> Iterator[Error]: + if stub.func.is_type_check_only: + # This function only exists in stubs, we only check that the runtime part + # is missing. Other checks are not required. + if not isinstance(runtime, Missing): + yield Error( + object_path, + 'is marked as "@type_check_only", but also exists at runtime', + stub, + runtime, + stub_desc=repr(stub), + ) + return + + if isinstance(runtime, Missing): + yield Error(object_path, "is not present at runtime", stub, runtime) + return + if stub.func.is_property: + for message in _verify_readonly_property(stub, runtime): + yield Error(object_path, message, stub, runtime) + for message in _verify_abstract_status(stub.func, runtime): + yield Error(object_path, message, stub, runtime) + return + + func = _resolve_funcitem_from_decorator(stub) + if func is not None: + yield from verify(func, runtime, object_path) + + +@verify.register(nodes.TypeAlias) +def verify_typealias( + stub: nodes.TypeAlias, runtime: MaybeMissing[Any], object_path: list[str] +) -> Iterator[Error]: + stub_target = mypy.types.get_proper_type(stub.target) + stub_desc = f"Type alias for {stub_target}" + if isinstance(runtime, Missing): + yield Error(object_path, "is not present at runtime", stub, runtime, stub_desc=stub_desc) + return + runtime_origin = get_origin(runtime) or runtime + if isinstance(stub_target, mypy.types.Instance): + if not isinstance(runtime_origin, type): + yield Error( + object_path, + "is inconsistent, runtime is not a type", + stub, + runtime, + stub_desc=stub_desc, + ) + return + + stub_origin = stub_target.type + # Do our best to figure out the fullname of the runtime object... + runtime_name: object + try: + runtime_name = runtime_origin.__qualname__ + except AttributeError: + runtime_name = getattr(runtime_origin, "__name__", MISSING) + if isinstance(runtime_name, str): + runtime_module: object = getattr(runtime_origin, "__module__", MISSING) + if isinstance(runtime_module, str): + if runtime_module == "collections.abc" or ( + runtime_module == "re" and runtime_name in {"Match", "Pattern"} + ): + runtime_module = "typing" + runtime_fullname = f"{runtime_module}.{runtime_name}" + if re.fullmatch(rf"_?{re.escape(stub_origin.fullname)}", runtime_fullname): + # Okay, we're probably fine. + return + + # Okay, either we couldn't construct a fullname + # or the fullname of the stub didn't match the fullname of the runtime. + # Fallback to a full structural check of the runtime vis-a-vis the stub. + yield from verify_typeinfo(stub_origin, runtime_origin, object_path, is_alias_target=True) + return + if isinstance(stub_target, mypy.types.UnionType): + # complain if runtime is not a Union or UnionType + if runtime_origin is not Union and ( + not (sys.version_info >= (3, 10) and isinstance(runtime, types.UnionType)) + ): + yield Error(object_path, "is not a Union", stub, runtime, stub_desc=str(stub_target)) + # could check Union contents here... + return + if isinstance(stub_target, mypy.types.TupleType): + if tuple not in getattr(runtime_origin, "__mro__", ()): + yield Error( + object_path, "is not a subclass of tuple", stub, runtime, stub_desc=stub_desc + ) + # could check Tuple contents here... + return + if isinstance(stub_target, mypy.types.CallableType): + if runtime_origin is not collections.abc.Callable: + yield Error( + object_path, "is not a type alias for Callable", stub, runtime, stub_desc=stub_desc + ) + # could check Callable contents here... + return + if isinstance(stub_target, mypy.types.AnyType): + return + yield Error(object_path, "is not a recognised type alias", stub, runtime, stub_desc=stub_desc) + + +# ==================== +# Helpers +# ==================== + + +IGNORED_MODULE_DUNDERS: Final = frozenset( + { + "__file__", + "__doc__", + "__name__", + "__builtins__", + "__package__", + "__cached__", + "__loader__", + "__spec__", + "__annotations__", + "__annotate__", + "__path__", # mypy adds __path__ to packages, but C packages don't have it + "__getattr__", # resulting behaviour might be typed explicitly + # Created by `warnings.warn`, does not make much sense to have in stubs: + "__warningregistry__", + # TODO: remove the following from this list + "__author__", + "__version__", + "__copyright__", + } +) + +IGNORABLE_CLASS_DUNDERS: Final = frozenset( + { + # Special attributes + "__dict__", + "__annotations__", + "__annotate__", + "__annotations_cache__", + "__annotate_func__", + "__text_signature__", + "__weakref__", + "__hash__", + "__getattr__", # resulting behaviour might be typed explicitly + "__setattr__", # defining this on a class can cause worse type checking + "__vectorcalloffset__", # undocumented implementation detail of the vectorcall protocol + "__firstlineno__", + "__static_attributes__", + "__classdictcell__", + # isinstance/issubclass hooks that type-checkers don't usually care about + "__instancecheck__", + "__subclasshook__", + "__subclasscheck__", + # python2 only magic methods: + "__cmp__", + "__nonzero__", + "__unicode__", + "__div__", + # cython methods + "__pyx_vtable__", + # Pickle methods + "__setstate__", + "__getstate__", + "__getnewargs__", + "__getinitargs__", + "__reduce_ex__", + "__reduce__", + "__slotnames__", # Cached names of slots added by `copyreg` module. + # ctypes weirdness + "__ctype_be__", + "__ctype_le__", + "__ctypes_from_outparam__", + # mypy limitations + "__abstractmethods__", # Classes with metaclass=ABCMeta inherit this attribute + "__new_member__", # If an enum defines __new__, the method is renamed as __new_member__ + "__dataclass_fields__", # Generated by dataclasses + "__dataclass_params__", # Generated by dataclasses + "__doc__", # mypy's semanal for namedtuples assumes this is str, not Optional[str] + # Added to all protocol classes on 3.12+ (or if using typing_extensions.Protocol) + "__protocol_attrs__", + "__callable_proto_members_only__", + "__non_callable_proto_members__", + # typing implementation details, consider removing some of these: + "__parameters__", + "__origin__", + "__args__", + "__orig_bases__", + "__final__", # Has a specialized check + # Consider removing __slots__? + "__slots__", + } +) + + +def is_probably_private(name: str) -> bool: + return name.startswith("_") and not is_dunder(name) + + +def is_probably_a_function(runtime: Any) -> bool: + return ( + isinstance( + runtime, + ( + types.FunctionType, + types.BuiltinFunctionType, + types.MethodType, + types.BuiltinMethodType, + ), + ) + or (inspect.ismethoddescriptor(runtime) and callable(runtime)) + or (isinstance(runtime, types.MethodWrapperType) and callable(runtime)) + ) + + +def is_read_only_property(runtime: object) -> bool: + return isinstance(runtime, property) and runtime.fset is None + + +def safe_inspect_signature(runtime: Any) -> inspect.Signature | None: + if ( + hasattr(runtime, "__name__") + and runtime.__name__ == "__init__" + and hasattr(runtime, "__text_signature__") + and runtime.__text_signature__ == "($self, /, *args, **kwargs)" + and hasattr(runtime, "__objclass__") + and hasattr(runtime.__objclass__, "__text_signature__") + and runtime.__objclass__.__text_signature__ is not None + ): + # This is an __init__ method with the generic C-class signature. + # In this case, the underlying class often has a better signature, + # which we can convert into an __init__ signature by adding in the + # self parameter. + try: + s = inspect.signature(runtime.__objclass__) + + parameter_kind: inspect._ParameterKind = inspect.Parameter.POSITIONAL_OR_KEYWORD + if s.parameters: + first_parameter = next(iter(s.parameters.values())) + if first_parameter.kind == inspect.Parameter.POSITIONAL_ONLY: + parameter_kind = inspect.Parameter.POSITIONAL_ONLY + return s.replace( + parameters=[inspect.Parameter("self", parameter_kind), *s.parameters.values()] + ) + except Exception: + pass + + if ( + hasattr(runtime, "__name__") + and runtime.__name__ == "__new__" + and hasattr(runtime, "__text_signature__") + and runtime.__text_signature__ == "($type, *args, **kwargs)" + and hasattr(runtime, "__self__") + and hasattr(runtime.__self__, "__text_signature__") + and runtime.__self__.__text_signature__ is not None + ): + # This is a __new__ method with the generic C-class signature. + # In this case, the underlying class often has a better signature, + # which we can convert into a __new__ signature by adding in the + # cls parameter. + + # If the attached class has a valid __init__, skip recovering a + # signature for this __new__ method. + has_init = False + if ( + hasattr(runtime.__self__, "__init__") + and hasattr(runtime.__self__.__init__, "__objclass__") + and runtime.__self__.__init__.__objclass__ is runtime.__self__ + ): + has_init = True + + if not has_init: + try: + s = inspect.signature(runtime.__self__) + parameter_kind = inspect.Parameter.POSITIONAL_OR_KEYWORD + if s.parameters: + first_parameter = next(iter(s.parameters.values())) + if first_parameter.kind == inspect.Parameter.POSITIONAL_ONLY: + parameter_kind = inspect.Parameter.POSITIONAL_ONLY + return s.replace( + parameters=[inspect.Parameter("cls", parameter_kind), *s.parameters.values()] + ) + except Exception: + pass + + try: + try: + return inspect.signature(runtime) + except ValueError: + if ( + hasattr(runtime, "__text_signature__") + and "" in runtime.__text_signature__ + ): + # Try to fix up the signature. Workaround for + # https://github.com/python/cpython/issues/87233 + sig = runtime.__text_signature__.replace("", "...") + sig = inspect._signature_fromstr(inspect.Signature, runtime, sig) # type: ignore[attr-defined] + assert isinstance(sig, inspect.Signature) + new_params = [ + ( + parameter.replace(default=UNREPRESENTABLE) + if parameter.default is ... + else parameter + ) + for parameter in sig.parameters.values() + ] + return sig.replace(parameters=new_params) + else: + raise + except Exception: + # inspect.signature throws ValueError all the time + # catch RuntimeError because of https://bugs.python.org/issue39504 + # catch TypeError because of https://github.com/python/typeshed/pull/5762 + # catch AttributeError because of inspect.signature(_curses.window.border) + return None + + +def describe_runtime_callable(signature: inspect.Signature, *, is_async: bool) -> str: + return f'{"async " if is_async else ""}def {signature}' + + +def is_subtype_helper(left: mypy.types.Type, right: mypy.types.Type) -> bool: + """Checks whether ``left`` is a subtype of ``right``.""" + left = mypy.types.get_proper_type(left) + right = mypy.types.get_proper_type(right) + if ( + isinstance(left, mypy.types.LiteralType) + and isinstance(left.value, int) + and left.value in (0, 1) + and mypy.types.is_named_instance(right, "builtins.bool") + ): + # Pretend Literal[0, 1] is a subtype of bool to avoid unhelpful errors. + return True + + if isinstance(right, mypy.types.TypedDictType) and mypy.types.is_named_instance( + left, "builtins.dict" + ): + # Special case checks against TypedDicts + return True + + with mypy.state.state.strict_optional_set(True): + return mypy.subtypes.is_subtype(left, right) + + +def get_mypy_node_for_name(module: str, type_name: str) -> mypy.nodes.SymbolNode | None: + stub = get_stub(module) + if stub is None: + return None + if type_name not in stub.names: + return None + return stub.names[type_name].node + + +def get_mypy_type_of_runtime_value( + runtime: Any, type_context: mypy.types.Type | None = None +) -> mypy.types.Type | None: + """Returns a mypy type object representing the type of ``runtime``. + + Returns None if we can't find something that works. + + """ + if runtime is None: + return mypy.types.NoneType() + if isinstance(runtime, property): + # Give up on properties to avoid issues with things that are typed as attributes. + return None + + def anytype() -> mypy.types.AnyType: + return mypy.types.AnyType(mypy.types.TypeOfAny.unannotated) + + if isinstance( + runtime, + (types.FunctionType, types.BuiltinFunctionType, types.MethodType, types.BuiltinMethodType), + ): + builtins = get_stub("builtins") + assert builtins is not None + type_info = builtins.names["function"].node + assert isinstance(type_info, nodes.TypeInfo) + fallback = mypy.types.Instance(type_info, [anytype()]) + signature = safe_inspect_signature(runtime) + if signature: + arg_types = [] + arg_kinds = [] + arg_names = [] + for arg in signature.parameters.values(): + arg_types.append(anytype()) + arg_names.append( + None if arg.kind == inspect.Parameter.POSITIONAL_ONLY else arg.name + ) + no_default = arg.default is inspect.Parameter.empty + if arg.kind == inspect.Parameter.POSITIONAL_ONLY: + arg_kinds.append(nodes.ARG_POS if no_default else nodes.ARG_OPT) + elif arg.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD: + arg_kinds.append(nodes.ARG_POS if no_default else nodes.ARG_OPT) + elif arg.kind == inspect.Parameter.KEYWORD_ONLY: + arg_kinds.append(nodes.ARG_NAMED if no_default else nodes.ARG_NAMED_OPT) + elif arg.kind == inspect.Parameter.VAR_POSITIONAL: + arg_kinds.append(nodes.ARG_STAR) + elif arg.kind == inspect.Parameter.VAR_KEYWORD: + arg_kinds.append(nodes.ARG_STAR2) + else: + raise AssertionError + else: + arg_types = [anytype(), anytype()] + arg_kinds = [nodes.ARG_STAR, nodes.ARG_STAR2] + arg_names = [None, None] + + return mypy.types.CallableType( + arg_types, + arg_kinds, + arg_names, + ret_type=anytype(), + fallback=fallback, + is_ellipsis_args=True, + ) + + skip_type_object_type = False + if type_context: + # Don't attempt to process the type object when context is generic + # This is related to issue #3737 + type_context = mypy.types.get_proper_type(type_context) + # Callable types with a generic return value + if isinstance(type_context, mypy.types.CallableType): + if isinstance(type_context.ret_type, mypy.types.TypeVarType): + skip_type_object_type = True + # Type[x] where x is generic + if isinstance(type_context, mypy.types.TypeType): + if isinstance(type_context.item, mypy.types.TypeVarType): + skip_type_object_type = True + + if isinstance(runtime, type) and not skip_type_object_type: + + def _named_type(name: str) -> mypy.types.Instance: + parts = name.rsplit(".", maxsplit=1) + node = get_mypy_node_for_name(parts[0], parts[1]) + assert isinstance(node, nodes.TypeInfo) + any_type = mypy.types.AnyType(mypy.types.TypeOfAny.special_form) + return mypy.types.Instance(node, [any_type] * len(node.defn.type_vars)) + + # Try and look up a stub for the runtime object itself + # The logic here is similar to ExpressionChecker.analyze_ref_expr + type_info = get_mypy_node_for_name(runtime.__module__, runtime.__name__) + if isinstance(type_info, nodes.TypeInfo): + result: mypy.types.Type | None = None + result = mypy.typeops.type_object_type(type_info, _named_type) + if mypy.checkexpr.is_type_type_context(type_context): + # This is the type in a type[] expression, so substitute type + # variables with Any. + result = mypy.erasetype.erase_typevars(result) + return result + + # Try and look up a stub for the runtime object's type + type_info = get_mypy_node_for_name(type(runtime).__module__, type(runtime).__name__) + if type_info is None: + return None + if isinstance(type_info, nodes.Var): + return type_info.type + if not isinstance(type_info, nodes.TypeInfo): + return None + + if isinstance(runtime, tuple): + # Special case tuples so we construct a valid mypy.types.TupleType + optional_items = [get_mypy_type_of_runtime_value(v) for v in runtime] + items = [(i if i is not None else anytype()) for i in optional_items] + fallback = mypy.types.Instance(type_info, [anytype()]) + return mypy.types.TupleType(items, fallback) + + fallback = mypy.types.Instance(type_info, [anytype() for _ in type_info.type_vars]) + + value: bool | int | str + if isinstance(runtime, enum.Enum) and isinstance(runtime.name, str): + value = runtime.name + elif isinstance(runtime, bytes): + value = bytes_to_human_readable_repr(runtime) + elif isinstance(runtime, (bool, int, str)): + value = runtime + else: + return fallback + + return mypy.types.LiteralType(value=value, fallback=fallback) + + +# ==================== +# Build and entrypoint +# ==================== + + +_all_stubs: dict[str, nodes.MypyFile] = {} + + +def build_stubs(modules: list[str], options: Options, find_submodules: bool = False) -> list[str]: + """Uses mypy to construct stub objects for the given modules. + + This sets global state that ``get_stub`` can access. + + Returns all modules we might want to check. If ``find_submodules`` is False, this is equal + to ``modules``. + + :param modules: List of modules to build stubs for. + :param options: Mypy options for finding and building stubs. + :param find_submodules: Whether to attempt to find submodules of the given modules as well. + + """ + data_dir = mypy.build.default_data_dir() + search_path = mypy.modulefinder.compute_search_paths([], options, data_dir) + find_module_cache = mypy.modulefinder.FindModuleCache( + search_path, fscache=None, options=options + ) + + all_modules = [] + sources = [] + for module in modules: + all_modules.append(module) + if not find_submodules: + module_path = find_module_cache.find_module(module) + if not isinstance(module_path, str): + # test_module will yield an error later when it can't find stubs + continue + sources.append(mypy.modulefinder.BuildSource(module_path, module, None)) + else: + found_sources = find_module_cache.find_modules_recursive(module) + sources.extend(found_sources) + # find submodules via mypy + all_modules.extend(s.module for s in found_sources if s.module not in all_modules) + # find submodules via pkgutil + try: + runtime = silent_import_module(module) + all_modules.extend( + m.name + for m in pkgutil.walk_packages(runtime.__path__, runtime.__name__ + ".") + if m.name not in all_modules + ) + except KeyboardInterrupt: + raise + except BaseException: + pass + + if sources: + try: + res = mypy.build.build(sources=sources, options=options) + except mypy.errors.CompileError as e: + raise StubtestFailure(f"failed mypy compile:\n{e}") from e + if res.errors: + raise StubtestFailure("mypy build errors:\n" + "\n".join(res.errors)) + + global _all_stubs + _all_stubs = res.files + + return all_modules + + +def get_stub(module: str) -> nodes.MypyFile | None: + """Returns a stub object for the given module, if we've built one.""" + return _all_stubs.get(module) + + +def get_typeshed_stdlib_modules( + custom_typeshed_dir: str | None, version_info: tuple[int, int] | None = None +) -> set[str]: + """Returns a list of stdlib modules in typeshed (for current Python version).""" + stdlib_py_versions = mypy.modulefinder.load_stdlib_py_versions(custom_typeshed_dir) + if version_info is None: + version_info = sys.version_info[0:2] + + def exists_in_version(module: str) -> bool: + assert version_info is not None + parts = module.split(".") + for i in range(len(parts), 0, -1): + current_module = ".".join(parts[:i]) + if current_module in stdlib_py_versions: + minver, maxver = stdlib_py_versions[current_module] + return version_info >= minver and (maxver is None or version_info <= maxver) + return False + + if custom_typeshed_dir: + typeshed_dir = Path(custom_typeshed_dir) + else: + typeshed_dir = Path(mypy.build.default_data_dir()) / "typeshed" + stdlib_dir = typeshed_dir / "stdlib" + + modules: set[str] = set() + for path in stdlib_dir.rglob("*.pyi"): + if path.stem == "__init__": + path = path.parent + module = ".".join(path.relative_to(stdlib_dir).parts[:-1] + (path.stem,)) + if exists_in_version(module): + modules.add(module) + return modules + + +def get_importable_stdlib_modules() -> set[str]: + """Return all importable stdlib modules at runtime.""" + all_stdlib_modules: AbstractSet[str] + if sys.version_info >= (3, 10): + all_stdlib_modules = sys.stdlib_module_names + else: + all_stdlib_modules = set(sys.builtin_module_names) + modules_by_finder: defaultdict[importlib.machinery.FileFinder, set[str]] = defaultdict(set) + for m in pkgutil.iter_modules(): + if isinstance(m.module_finder, importlib.machinery.FileFinder): + modules_by_finder[m.module_finder].add(m.name) + for finder, module_group in modules_by_finder.items(): + if ( + "site-packages" not in Path(finder.path).parts + # if "_queue" is present, it's most likely the module finder + # for stdlib extension modules; + # if "queue" is present, it's most likely the module finder + # for pure-Python stdlib modules. + # In either case, we'll want to add all the modules that the finder has to offer us. + # This is a bit hacky, but seems to work well in a cross-platform way. + and {"_queue", "queue"} & module_group + ): + all_stdlib_modules.update(module_group) + + importable_stdlib_modules: set[str] = set() + for module_name in all_stdlib_modules: + if module_name in ANNOYING_STDLIB_MODULES: + continue + + try: + runtime = silent_import_module(module_name) + except ImportError: + continue + else: + importable_stdlib_modules.add(module_name) + + try: + # some stdlib modules (e.g. `nt`) don't have __path__ set... + runtime_path = runtime.__path__ + runtime_name = runtime.__name__ + except AttributeError: + continue + + for submodule in pkgutil.walk_packages(runtime_path, runtime_name + "."): + submodule_name = submodule.name + + # There are many annoying *.__main__ stdlib modules, + # and including stubs for them isn't really that useful anyway: + # tkinter.__main__ opens a tkinter windows; unittest.__main__ raises SystemExit; etc. + # + # The idlelib.* submodules are similarly annoying in opening random tkinter windows, + # and we're unlikely to ever add stubs for idlelib in typeshed + # (see discussion in https://github.com/python/typeshed/pull/9193) + # + # test.* modules do weird things like raising exceptions in __del__ methods, + # leading to unraisable exceptions being logged to the terminal + # as a warning at the end of the stubtest run + if submodule_name.endswith(".__main__") or submodule_name.startswith( + ("idlelib.", "test.") + ): + continue + + try: + silent_import_module(submodule_name) + except KeyboardInterrupt: + raise + # importing multiprocessing.popen_forkserver on Windows raises AttributeError... + # some submodules also appear to raise SystemExit as well on some Python versions + # (not sure exactly which) + except BaseException: + continue + else: + importable_stdlib_modules.add(submodule_name) + + return importable_stdlib_modules + + +def get_allowlist_entries(allowlist_file: str) -> Iterator[str]: + def strip_comments(s: str) -> str: + try: + return s[: s.index("#")].strip() + except ValueError: + return s.strip() + + with open(allowlist_file) as f: + for line in f: + entry = strip_comments(line) + if entry: + yield entry + + +class _Arguments: + modules: list[str] + concise: bool + ignore_missing_stub: bool + ignore_positional_only: bool + ignore_disjoint_bases: bool + allowlist: list[str] + generate_allowlist: bool + ignore_unused_allowlist: bool + mypy_config_file: str | None + custom_typeshed_dir: str | None + check_typeshed: bool + version: str + show_traceback: bool + pdb: bool + + +# typeshed added a stub for __main__, but that causes stubtest to check itself +ANNOYING_STDLIB_MODULES: Final = frozenset({"antigravity", "this", "__main__", "_ios_support"}) + + +def test_stubs(args: _Arguments, use_builtins_fixtures: bool = False) -> int: + """This is stubtest! It's time to test the stubs!""" + # Load the allowlist. This is a series of strings corresponding to Error.object_desc + # Values in the dict will store whether we used the allowlist entry or not. + allowlist = { + entry: False + for allowlist_file in args.allowlist + for entry in get_allowlist_entries(allowlist_file) + } + allowlist_regexes = {entry: re.compile(entry) for entry in allowlist} + + # If we need to generate an allowlist, we store Error.object_desc for each error here. + generated_allowlist = set() + + modules = args.modules + if args.check_typeshed: + if args.modules: + print( + _style("error:", color="red", bold=True), + "cannot pass both --check-typeshed and a list of modules", + ) + return 1 + typeshed_modules = get_typeshed_stdlib_modules(args.custom_typeshed_dir) + runtime_modules = get_importable_stdlib_modules() + modules = sorted((typeshed_modules | runtime_modules) - ANNOYING_STDLIB_MODULES) + + if not modules: + print(_style("error:", color="red", bold=True), "no modules to check") + return 1 + + options = Options() + options.incremental = False + options.custom_typeshed_dir = args.custom_typeshed_dir + if options.custom_typeshed_dir: + options.abs_custom_typeshed_dir = os.path.abspath(options.custom_typeshed_dir) + options.config_file = args.mypy_config_file + options.use_builtins_fixtures = use_builtins_fixtures + options.show_traceback = args.show_traceback + options.pdb = args.pdb + + if options.config_file: + + def set_strict_flags() -> None: # not needed yet + return + + parse_config_file(options, set_strict_flags, options.config_file, sys.stdout, sys.stderr) + + def error_callback(msg: str) -> typing.NoReturn: + print(_style("error:", color="red", bold=True), msg) + sys.exit(1) + + def warning_callback(msg: str) -> None: + print(_style("warning:", color="yellow", bold=True), msg) + + options.process_error_codes(error_callback=error_callback) + options.process_incomplete_features( + error_callback=error_callback, warning_callback=warning_callback + ) + options.process_strict_bytes() + + try: + modules = build_stubs(modules, options, find_submodules=not args.check_typeshed) + except StubtestFailure as stubtest_failure: + print( + _style("error:", color="red", bold=True), + f"not checking stubs due to {stubtest_failure}", + ) + return 1 + + exit_code = 0 + error_count = 0 + for module in modules: + for error in test_module(module): + # Filter errors + if args.ignore_missing_stub and error.is_missing_stub(): + continue + if args.ignore_positional_only and error.is_positional_only_related(): + continue + if args.ignore_disjoint_bases and error.is_disjoint_base_related(): + continue + if error.object_desc in allowlist: + allowlist[error.object_desc] = True + continue + is_allowlisted = False + for w in allowlist: + if allowlist_regexes[w].fullmatch(error.object_desc): + allowlist[w] = True + is_allowlisted = True + break + if is_allowlisted: + continue + + # We have errors, so change exit code, and output whatever necessary + exit_code = 1 + if args.generate_allowlist: + generated_allowlist.add(error.object_desc) + continue + safe_print(error.get_description(concise=args.concise)) + error_count += 1 + + # Print unused allowlist entries + if not args.ignore_unused_allowlist: + for w in allowlist: + # Don't consider an entry unused if it regex-matches the empty string + # This lets us allowlist errors that don't manifest at all on some systems + if not allowlist[w] and not allowlist_regexes[w].fullmatch(""): + exit_code = 1 + error_count += 1 + print(f"note: unused allowlist entry {w}") + + # Print the generated allowlist + if args.generate_allowlist: + for e in sorted(generated_allowlist): + print(e) + exit_code = 0 + elif not args.concise: + if error_count: + print( + _style( + f"Found {error_count} error{plural_s(error_count)}" + f" (checked {len(modules)} module{plural_s(modules)})", + color="red", + bold=True, + ) + ) + else: + print( + _style( + f"Success: no issues found in {len(modules)} module{plural_s(modules)}", + color="green", + bold=True, + ) + ) + + return exit_code + + +def safe_print(text: str) -> None: + """Print a text replacing chars not representable in stdout encoding.""" + # If `sys.stdout` encoding is not the same as out (usually UTF8) string, + # if may cause painful crashes. I don't want to reconfigure `sys.stdout` + # to do `errors = "replace"` as that sounds scary. + out_encoding = sys.stdout.encoding + if out_encoding is not None: + # Can be None if stdout is replaced (including our own tests). This should be + # safe to omit if the actual stream doesn't care about encoding. + text = text.encode(out_encoding, errors="replace").decode(out_encoding, errors="replace") + print(text) + + +def parse_options(args: list[str]) -> _Arguments: + parser = argparse.ArgumentParser( + description="Compares stubs to objects introspected from the runtime." + ) + parser.add_argument("modules", nargs="*", help="Modules to test") + parser.add_argument( + "--concise", + action="store_true", + help="Makes stubtest's output more concise, one line per error", + ) + parser.add_argument( + "--ignore-missing-stub", + action="store_true", + help="Ignore errors for stub missing things that are present at runtime", + ) + parser.add_argument( + "--ignore-positional-only", + action="store_true", + help="Ignore errors for whether an argument should or shouldn't be positional-only", + ) + # TODO: Remove once PEP 800 is accepted + parser.add_argument( + "--ignore-disjoint-bases", + action="store_true", + help="Disable checks for PEP 800 @disjoint_base classes", + ) + parser.add_argument( + "--allowlist", + "--whitelist", + action="append", + metavar="FILE", + default=[], + help=( + "Use file as an allowlist. Can be passed multiple times to combine multiple " + "allowlists. Allowlists can be created with --generate-allowlist. Allowlists " + "support regular expressions." + ), + ) + parser.add_argument( + "--generate-allowlist", + "--generate-whitelist", + action="store_true", + help="Print an allowlist (to stdout) to be used with --allowlist", + ) + parser.add_argument( + "--ignore-unused-allowlist", + "--ignore-unused-whitelist", + action="store_true", + help="Ignore unused allowlist entries", + ) + parser.add_argument( + "--mypy-config-file", + metavar="FILE", + help=("Use specified mypy config file to determine mypy plugins and mypy path"), + ) + parser.add_argument( + "--custom-typeshed-dir", metavar="DIR", help="Use the custom typeshed in DIR" + ) + parser.add_argument( + "--check-typeshed", action="store_true", help="Check all stdlib modules in typeshed" + ) + parser.add_argument( + "--version", action="version", version="%(prog)s " + mypy.version.__version__ + ) + parser.add_argument("--pdb", action="store_true", help="Invoke pdb on fatal error") + parser.add_argument( + "--show-traceback", "--tb", action="store_true", help="Show traceback on fatal error" + ) + + return parser.parse_args(args, namespace=_Arguments()) + + +def main() -> int: + mypy.util.check_python_version("stubtest") + return test_stubs(parse_options(sys.argv[1:])) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/.venv/lib/python3.12/site-packages/mypy/stubutil.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/stubutil.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..ccafb9a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/stubutil.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/stubutil.py b/.venv/lib/python3.12/site-packages/mypy/stubutil.py new file mode 100644 index 0000000..a3c0f9b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/stubutil.py @@ -0,0 +1,895 @@ +"""Utilities for mypy.stubgen, mypy.stubgenc, and mypy.stubdoc modules.""" + +from __future__ import annotations + +import os.path +import re +import sys +import traceback +from abc import abstractmethod +from collections import defaultdict +from collections.abc import Iterable, Iterator, Mapping +from contextlib import contextmanager +from typing import Final, overload + +from mypy_extensions import mypyc_attr + +import mypy.options +from mypy.modulefinder import ModuleNotFoundReason +from mypy.moduleinspect import InspectError, ModuleInspect +from mypy.nodes import PARAM_SPEC_KIND, TYPE_VAR_TUPLE_KIND, ClassDef, FuncDef, TypeAliasStmt +from mypy.stubdoc import ArgSig, FunctionSig +from mypy.types import ( + AnyType, + NoneType, + Type, + TypeList, + TypeStrVisitor, + UnboundType, + UnionType, + UnpackType, +) + +# Modules that may fail when imported, or that may have side effects (fully qualified). +NOT_IMPORTABLE_MODULES = () + +# Typing constructs to be replaced by their builtin equivalents. +TYPING_BUILTIN_REPLACEMENTS: Final = { + # From typing + "typing.Text": "builtins.str", + "typing.Tuple": "builtins.tuple", + "typing.List": "builtins.list", + "typing.Dict": "builtins.dict", + "typing.Set": "builtins.set", + "typing.FrozenSet": "builtins.frozenset", + "typing.Type": "builtins.type", + # From typing_extensions + "typing_extensions.Text": "builtins.str", + "typing_extensions.Tuple": "builtins.tuple", + "typing_extensions.List": "builtins.list", + "typing_extensions.Dict": "builtins.dict", + "typing_extensions.Set": "builtins.set", + "typing_extensions.FrozenSet": "builtins.frozenset", + "typing_extensions.Type": "builtins.type", +} + + +class CantImport(Exception): + def __init__(self, module: str, message: str) -> None: + self.module = module + self.message = message + + +def walk_packages( + inspect: ModuleInspect, packages: list[str], verbose: bool = False +) -> Iterator[str]: + """Iterates through all packages and sub-packages in the given list. + + This uses runtime imports (in another process) to find both Python and C modules. + For Python packages we simply pass the __path__ attribute to pkgutil.walk_packages() to + get the content of the package (all subpackages and modules). However, packages in C + extensions do not have this attribute, so we have to roll out our own logic: recursively + find all modules imported in the package that have matching names. + """ + for package_name in packages: + if package_name in NOT_IMPORTABLE_MODULES: + print(f"{package_name}: Skipped (blacklisted)") + continue + if verbose: + print(f"Trying to import {package_name!r} for runtime introspection") + try: + prop = inspect.get_package_properties(package_name) + except InspectError: + if verbose: + tb = traceback.format_exc() + sys.stderr.write(tb) + report_missing(package_name) + continue + yield prop.name + if prop.is_c_module: + # Recursively iterate through the subpackages + yield from walk_packages(inspect, prop.subpackages, verbose) + else: + yield from prop.subpackages + + +def find_module_path_using_sys_path(module: str, sys_path: list[str]) -> str | None: + relative_candidates = ( + module.replace(".", "/") + ".py", + os.path.join(module.replace(".", "/"), "__init__.py"), + ) + for base in sys_path: + for relative_path in relative_candidates: + path = os.path.join(base, relative_path) + if os.path.isfile(path): + return path + return None + + +def find_module_path_and_all_py3( + inspect: ModuleInspect, module: str, verbose: bool +) -> tuple[str | None, list[str] | None] | None: + """Find module and determine __all__ for a Python 3 module. + + Return None if the module is a C or pyc-only module. + Return (module_path, __all__) if it is a Python module. + Raise CantImport if import failed. + """ + if module in NOT_IMPORTABLE_MODULES: + raise CantImport(module, "") + + # TODO: Support custom interpreters. + if verbose: + print(f"Trying to import {module!r} for runtime introspection") + try: + mod = inspect.get_package_properties(module) + except InspectError as e: + # Fall back to finding the module using sys.path. + path = find_module_path_using_sys_path(module, sys.path) + if path is None: + raise CantImport(module, str(e)) from e + return path, None + if mod.is_c_module: + return None + return mod.file, mod.all + + +@contextmanager +def generate_guarded( + mod: str, target: str, ignore_errors: bool = True, verbose: bool = False +) -> Iterator[None]: + """Ignore or report errors during stub generation. + + Optionally report success. + """ + if verbose: + print(f"Processing {mod}") + try: + yield + except Exception as e: + if not ignore_errors: + raise e + else: + # --ignore-errors was passed + print("Stub generation failed for", mod, file=sys.stderr) + else: + if verbose: + print(f"Created {target}") + + +def report_missing(mod: str, message: str | None = "", traceback: str = "") -> None: + if message: + message = " with error: " + message + print(f"{mod}: Failed to import, skipping{message}") + + +def fail_missing(mod: str, reason: ModuleNotFoundReason) -> None: + if reason is ModuleNotFoundReason.NOT_FOUND: + clarification = "(consider using --search-path)" + elif reason is ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS: + clarification = "(module likely exists, but is not PEP 561 compatible)" + else: + clarification = f"(unknown reason '{reason}')" + raise SystemExit(f"Can't find module '{mod}' {clarification}") + + +@overload +def remove_misplaced_type_comments(source: bytes) -> bytes: ... + + +@overload +def remove_misplaced_type_comments(source: str) -> str: ... + + +def remove_misplaced_type_comments(source: str | bytes) -> str | bytes: + """Remove comments from source that could be understood as misplaced type comments. + + Normal comments may look like misplaced type comments, and since they cause blocking + parse errors, we want to avoid them. + """ + if isinstance(source, bytes): + # This gives us a 1-1 character code mapping, so it's roundtrippable. + text = source.decode("latin1") + else: + text = source + + # Remove something that looks like a variable type comment but that's by itself + # on a line, as it will often generate a parse error (unless it's # type: ignore). + text = re.sub(r'^[ \t]*# +type: +["\'a-zA-Z_].*$', "", text, flags=re.MULTILINE) + + # Remove something that looks like a function type comment after docstring, + # which will result in a parse error. + text = re.sub(r'""" *\n[ \t\n]*# +type: +\(.*$', '"""\n', text, flags=re.MULTILINE) + text = re.sub(r"''' *\n[ \t\n]*# +type: +\(.*$", "'''\n", text, flags=re.MULTILINE) + + # Remove something that looks like a badly formed function type comment. + text = re.sub(r"^[ \t]*# +type: +\([^()]+(\)[ \t]*)?$", "", text, flags=re.MULTILINE) + + if isinstance(source, bytes): + return text.encode("latin1") + else: + return text + + +def common_dir_prefix(paths: list[str]) -> str: + if not paths: + return "." + cur = os.path.dirname(os.path.normpath(paths[0])) + for path in paths[1:]: + while True: + path = os.path.dirname(os.path.normpath(path)) + if (cur + os.sep).startswith(path + os.sep): + cur = path + break + return cur or "." + + +class AnnotationPrinter(TypeStrVisitor): + """Visitor used to print existing annotations in a file. + + The main difference from TypeStrVisitor is a better treatment of + unbound types. + + Notes: + * This visitor doesn't add imports necessary for annotations, this is done separately + by ImportTracker. + * It can print all kinds of types, but the generated strings may not be valid (notably + callable types) since it prints the same string that reveal_type() does. + * For Instance types it prints the fully qualified names. + """ + + # TODO: Generate valid string representation for callable types. + # TODO: Use short names for Instances. + def __init__( + self, + stubgen: BaseStubGenerator, + known_modules: list[str] | None = None, + local_modules: list[str] | None = None, + ) -> None: + super().__init__(options=mypy.options.Options()) + self.stubgen = stubgen + self.known_modules = known_modules + self.local_modules = local_modules or ["builtins"] + + def visit_any(self, t: AnyType) -> str: + s = super().visit_any(t) + self.stubgen.import_tracker.require_name(s) + return s + + def visit_unbound_type(self, t: UnboundType) -> str: + s = t.name + fullname = self.stubgen.resolve_name(s) + if fullname == "typing.Union": + return " | ".join([item.accept(self) for item in t.args]) + if fullname == "typing.Optional": + if len(t.args) == 1: + return f"{t.args[0].accept(self)} | None" + return self.stubgen.add_name("_typeshed.Incomplete") + if fullname in TYPING_BUILTIN_REPLACEMENTS: + s = self.stubgen.add_name(TYPING_BUILTIN_REPLACEMENTS[fullname], require=True) + if self.known_modules is not None and "." in s: + # see if this object is from any of the modules that we're currently processing. + # reverse sort so that subpackages come before parents: e.g. "foo.bar" before "foo". + for module_name in self.local_modules + sorted(self.known_modules, reverse=True): + if s.startswith(module_name + "."): + if module_name in self.local_modules: + s = s[len(module_name) + 1 :] + arg_module = module_name + break + else: + arg_module = s[: s.rindex(".")] + if arg_module not in self.local_modules: + self.stubgen.import_tracker.add_import(arg_module, require=True) + elif s == "NoneType": + # when called without analysis all types are unbound, so this won't hit + # visit_none_type(). + s = "None" + else: + self.stubgen.import_tracker.require_name(s) + if t.args: + s += f"[{self.args_str(t.args)}]" + elif t.empty_tuple_index: + s += "[()]" + return s + + def visit_none_type(self, t: NoneType) -> str: + return "None" + + def visit_type_list(self, t: TypeList) -> str: + return f"[{self.list_str(t.items)}]" + + def visit_union_type(self, t: UnionType) -> str: + return " | ".join([item.accept(self) for item in t.items]) + + def visit_unpack_type(self, t: UnpackType) -> str: + if self.options.python_version >= (3, 11): + return f"*{t.type.accept(self)}" + return super().visit_unpack_type(t) + + def args_str(self, args: Iterable[Type]) -> str: + """Convert an array of arguments to strings and join the results with commas. + + The main difference from list_str is the preservation of quotes for string + arguments + """ + types = ["builtins.bytes", "builtins.str"] + res = [] + for arg in args: + arg_str = arg.accept(self) + if isinstance(arg, UnboundType) and arg.original_str_fallback in types: + res.append(f"'{arg_str}'") + else: + res.append(arg_str) + return ", ".join(res) + + +class ClassInfo: + def __init__( + self, + name: str, + self_var: str, + docstring: str | None = None, + cls: type | None = None, + parent: ClassInfo | None = None, + ) -> None: + self.name = name + self.self_var = self_var + self.docstring = docstring + self.cls = cls + self.parent = parent + + +class FunctionContext: + def __init__( + self, + module_name: str, + name: str, + docstring: str | None = None, + is_abstract: bool = False, + class_info: ClassInfo | None = None, + ) -> None: + self.module_name = module_name + self.name = name + self.docstring = docstring + self.is_abstract = is_abstract + self.class_info = class_info + self._fullname: str | None = None + + @property + def fullname(self) -> str: + if self._fullname is None: + if self.class_info: + parents = [] + class_info: ClassInfo | None = self.class_info + while class_info is not None: + parents.append(class_info.name) + class_info = class_info.parent + namespace = ".".join(reversed(parents)) + self._fullname = f"{self.module_name}.{namespace}.{self.name}" + else: + self._fullname = f"{self.module_name}.{self.name}" + return self._fullname + + +def infer_method_ret_type(name: str) -> str | None: + """Infer return types for known special methods""" + if name.startswith("__") and name.endswith("__"): + name = name[2:-2] + if name in ("float", "bool", "bytes", "int", "complex", "str"): + return name + # Note: __eq__ and co may return arbitrary types, but bool is good enough for stubgen. + elif name in ("eq", "ne", "lt", "le", "gt", "ge", "contains"): + return "bool" + elif name in ("len", "length_hint", "index", "hash", "sizeof", "trunc", "floor", "ceil"): + return "int" + elif name in ("format", "repr"): + return "str" + elif name in ("init", "setitem", "del", "delitem"): + return "None" + return None + + +def infer_method_arg_types( + name: str, self_var: str = "self", arg_names: list[str] | None = None +) -> list[ArgSig] | None: + """Infer argument types for known special methods""" + args: list[ArgSig] | None = None + if name.startswith("__") and name.endswith("__"): + if arg_names and len(arg_names) >= 1 and arg_names[0] == "self": + arg_names = arg_names[1:] + + name = name[2:-2] + if name == "exit": + if arg_names is None: + arg_names = ["type", "value", "traceback"] + if len(arg_names) == 3: + arg_types = [ + "type[BaseException] | None", + "BaseException | None", + "types.TracebackType | None", + ] + args = [ + ArgSig(name=arg_name, type=arg_type) + for arg_name, arg_type in zip(arg_names, arg_types) + ] + if args is not None: + return [ArgSig(name=self_var)] + args + return None + + +@mypyc_attr(allow_interpreted_subclasses=True) +class SignatureGenerator: + """Abstract base class for extracting a list of FunctionSigs for each function.""" + + def remove_self_type( + self, inferred: list[FunctionSig] | None, self_var: str + ) -> list[FunctionSig] | None: + """Remove type annotation from self/cls argument""" + if inferred: + for signature in inferred: + if signature.args: + if signature.args[0].name == self_var: + signature.args[0].type = None + return inferred + + @abstractmethod + def get_function_sig( + self, default_sig: FunctionSig, ctx: FunctionContext + ) -> list[FunctionSig] | None: + """Return a list of signatures for the given function. + + If no signature can be found, return None. If all of the registered SignatureGenerators + for the stub generator return None, then the default_sig will be used. + """ + pass + + @abstractmethod + def get_property_type(self, default_type: str | None, ctx: FunctionContext) -> str | None: + """Return the type of the given property""" + pass + + +class ImportTracker: + """Record necessary imports during stub generation.""" + + def __init__(self) -> None: + # module_for['foo'] has the module name where 'foo' was imported from, or None if + # 'foo' is a module imported directly; + # direct_imports['foo'] is the module path used when the name 'foo' was added to the + # namespace. + # reverse_alias['foo'] is the name that 'foo' had originally when imported with an + # alias; examples + # 'from pkg import mod' ==> module_for['mod'] == 'pkg' + # 'from pkg import mod as m' ==> module_for['m'] == 'pkg' + # ==> reverse_alias['m'] == 'mod' + # 'import pkg.mod as m' ==> module_for['m'] == None + # ==> reverse_alias['m'] == 'pkg.mod' + # 'import pkg.mod' ==> module_for['pkg'] == None + # ==> module_for['pkg.mod'] == None + # ==> direct_imports['pkg'] == 'pkg.mod' + # ==> direct_imports['pkg.mod'] == 'pkg.mod' + self.module_for: dict[str, str | None] = {} + self.direct_imports: dict[str, str] = {} + self.reverse_alias: dict[str, str] = {} + + # required_names is the set of names that are actually used in a type annotation + self.required_names: set[str] = set() + + # Names that should be reexported if they come from another module + self.reexports: set[str] = set() + + def add_import_from( + self, module: str, names: list[tuple[str, str | None]], require: bool = False + ) -> None: + for name, alias in names: + if alias: + # 'from {module} import {name} as {alias}' + self.module_for[alias] = module + self.reverse_alias[alias] = name + else: + # 'from {module} import {name}' + self.module_for[name] = module + self.reverse_alias.pop(name, None) + if require: + self.require_name(alias or name) + self.direct_imports.pop(alias or name, None) + + def add_import(self, module: str, alias: str | None = None, require: bool = False) -> None: + if alias: + # 'import {module} as {alias}' + assert "." not in alias # invalid syntax + self.module_for[alias] = None + self.reverse_alias[alias] = module + if require: + self.required_names.add(alias) + else: + # 'import {module}' + name = module + if require: + self.required_names.add(name) + # add module and its parent packages + while name: + self.module_for[name] = None + self.direct_imports[name] = module + self.reverse_alias.pop(name, None) + name = name.rpartition(".")[0] + + def require_name(self, name: str) -> None: + while name not in self.direct_imports and "." in name: + name = name.rsplit(".", 1)[0] + self.required_names.add(name) + + def reexport(self, name: str) -> None: + """Mark a given non qualified name as needed in __all__. + + This means that in case it comes from a module, it should be + imported with an alias even if the alias is the same as the name. + """ + self.require_name(name) + self.reexports.add(name) + + def import_lines(self) -> list[str]: + """The list of required import lines (as strings with python code). + + In order for a module be included in this output, an identifier must be both + 'required' via require_name() and 'imported' via add_import_from() + or add_import() + """ + result = [] + + # To summarize multiple names imported from a same module, we collect those + # in the `module_map` dictionary, mapping a module path to the list of names that should + # be imported from it. the names can also be alias in the form 'original as alias' + module_map: Mapping[str, list[str]] = defaultdict(list) + + for name in sorted( + self.required_names, + key=lambda n: (self.reverse_alias[n], n) if n in self.reverse_alias else (n, ""), + ): + # If we haven't seen this name in an import statement, ignore it + if name not in self.module_for: + continue + + m = self.module_for[name] + if m is not None: + # This name was found in a from ... import ... + # Collect the name in the module_map + if name in self.reverse_alias: + name = f"{self.reverse_alias[name]} as {name}" + elif name in self.reexports: + name = f"{name} as {name}" + module_map[m].append(name) + else: + # This name was found in an import ... + # We can already generate the import line + if name in self.reverse_alias: + source = self.reverse_alias[name] + result.append(f"import {source} as {name}\n") + elif name in self.reexports: + assert "." not in name # Because reexports only has nonqualified names + result.append(f"import {name} as {name}\n") + else: + result.append(f"import {name}\n") + + # Now generate all the from ... import ... lines collected in module_map + for module, names in sorted(module_map.items()): + result.append(f"from {module} import {', '.join(sorted(names))}\n") + return result + + +@mypyc_attr(allow_interpreted_subclasses=True) +class BaseStubGenerator: + # These names should be omitted from generated stubs. + IGNORED_DUNDERS: Final = { + "__all__", + "__author__", + "__about__", + "__copyright__", + "__email__", + "__license__", + "__summary__", + "__title__", + "__uri__", + "__str__", + "__repr__", + "__getstate__", + "__setstate__", + "__slots__", + "__builtins__", + "__cached__", + "__file__", + "__name__", + "__package__", + "__path__", + "__spec__", + "__loader__", + } + TYPING_MODULE_NAMES: Final = ("typing", "typing_extensions") + # Special-cased names that are implicitly exported from the stub (from m import y as y). + EXTRA_EXPORTED: Final = { + "pyasn1_modules.rfc2437.univ", + "pyasn1_modules.rfc2459.char", + "pyasn1_modules.rfc2459.univ", + } + + def __init__( + self, + _all_: list[str] | None = None, + include_private: bool = False, + export_less: bool = False, + include_docstrings: bool = False, + ) -> None: + # Best known value of __all__. + self._all_ = _all_ + self._include_private = include_private + self._include_docstrings = include_docstrings + # Disable implicit exports of package-internal imports? + self.export_less = export_less + self._import_lines: list[str] = [] + self._output: list[str] = [] + # Current indent level (indent is hardcoded to 4 spaces). + self._indent = "" + self._toplevel_names: list[str] = [] + self.import_tracker = ImportTracker() + # Top-level members + self.defined_names: set[str] = set() + self.sig_generators = self.get_sig_generators() + # populated by visit_mypy_file + self.module_name: str = "" + # These are "soft" imports for objects which might appear in annotations but not have + # a corresponding import statement. + self.known_imports = { + "_typeshed": ["Incomplete"], + "typing": ["Any", "TypeVar", "NamedTuple", "TypedDict"], + "collections.abc": ["Generator"], + "typing_extensions": ["ParamSpec", "TypeVarTuple"], + } + + def get_sig_generators(self) -> list[SignatureGenerator]: + return [] + + def resolve_name(self, name: str) -> str: + """Return the full name resolving imports and import aliases.""" + if "." not in name: + real_module = self.import_tracker.module_for.get(name) + real_short = self.import_tracker.reverse_alias.get(name, name) + if real_module is None and real_short not in self.defined_names: + real_module = "builtins" # not imported and not defined, must be a builtin + else: + name_module, real_short = name.split(".", 1) + real_module = self.import_tracker.reverse_alias.get(name_module, name_module) + resolved_name = real_short if real_module is None else f"{real_module}.{real_short}" + return resolved_name + + def add_name(self, fullname: str, require: bool = True) -> str: + """Add a name to be imported and return the name reference. + + The import will be internal to the stub (i.e don't reexport). + """ + module, name = fullname.rsplit(".", 1) + alias = "_" + name if name in self.defined_names else None + while alias in self.defined_names: + alias = "_" + alias + if module != "builtins" or alias: # don't import from builtins unless needed + self.import_tracker.add_import_from(module, [(name, alias)], require=require) + return alias or name + + def add_import_line(self, line: str) -> None: + """Add a line of text to the import section, unless it's already there.""" + if line not in self._import_lines: + self._import_lines.append(line) + + def get_imports(self) -> str: + """Return the import statements for the stub.""" + imports = "" + if self._import_lines: + imports += "".join(self._import_lines) + imports += "".join(self.import_tracker.import_lines()) + return imports + + def output(self) -> str: + """Return the text for the stub.""" + pieces: list[str] = [] + if imports := self.get_imports(): + pieces.append(imports) + if dunder_all := self.get_dunder_all(): + pieces.append(dunder_all) + if self._output: + pieces.append("".join(self._output)) + return "\n".join(pieces) + + def get_dunder_all(self) -> str: + """Return the __all__ list for the stub.""" + if self._all_: + # Note we emit all names in the runtime __all__ here, even if they + # don't actually exist. If that happens, the runtime has a bug, and + # it's not obvious what the correct behavior should be. We choose + # to reflect the runtime __all__ as closely as possible. + return f"__all__ = {self._all_!r}\n" + return "" + + def add(self, string: str) -> None: + """Add text to generated stub.""" + self._output.append(string) + + def is_top_level(self) -> bool: + """Are we processing the top level of a file?""" + return self._indent == "" + + def indent(self) -> None: + """Add one level of indentation.""" + self._indent += " " + + def dedent(self) -> None: + """Remove one level of indentation.""" + self._indent = self._indent[:-4] + + def record_name(self, name: str) -> None: + """Mark a name as defined. + + This only does anything if at the top level of a module. + """ + if self.is_top_level(): + self._toplevel_names.append(name) + + def is_recorded_name(self, name: str) -> bool: + """Has this name been recorded previously?""" + return self.is_top_level() and name in self._toplevel_names + + def set_defined_names(self, defined_names: set[str]) -> None: + self.defined_names = defined_names + # Names in __all__ are required + for name in self._all_ or (): + self.import_tracker.reexport(name) + + for pkg, imports in self.known_imports.items(): + for t in imports: + # require=False means that the import won't be added unless require_name() is called + # for the object during generation. + self.add_name(f"{pkg}.{t}", require=False) + + def check_undefined_names(self) -> None: + undefined_names = [name for name in self._all_ or [] if name not in self._toplevel_names] + if undefined_names: + if self._output: + self.add("\n") + self.add("# Names in __all__ with no definition:\n") + for name in sorted(undefined_names): + self.add(f"# {name}\n") + + def get_signatures( + self, + default_signature: FunctionSig, + sig_generators: list[SignatureGenerator], + func_ctx: FunctionContext, + ) -> list[FunctionSig]: + for sig_gen in sig_generators: + inferred = sig_gen.get_function_sig(default_signature, func_ctx) + if inferred: + return inferred + + return [default_signature] + + def get_property_type( + self, + default_type: str | None, + sig_generators: list[SignatureGenerator], + func_ctx: FunctionContext, + ) -> str | None: + for sig_gen in sig_generators: + inferred = sig_gen.get_property_type(default_type, func_ctx) + if inferred: + return inferred + + return default_type + + def format_func_def( + self, + sigs: list[FunctionSig], + is_coroutine: bool = False, + decorators: list[str] | None = None, + docstring: str | None = None, + ) -> list[str]: + lines: list[str] = [] + if decorators is None: + decorators = [] + + for signature in sigs: + # dump decorators, just before "def ..." + for deco in decorators: + lines.append(f"{self._indent}{deco}") + + lines.append( + signature.format_sig( + indent=self._indent, + is_async=is_coroutine, + docstring=docstring, + include_docstrings=self._include_docstrings, + ) + ) + return lines + + def format_type_args(self, o: TypeAliasStmt | FuncDef | ClassDef) -> str: + if not o.type_args: + return "" + p = AnnotationPrinter(self) + type_args_list: list[str] = [] + for type_arg in o.type_args: + if type_arg.kind == PARAM_SPEC_KIND: + prefix = "**" + elif type_arg.kind == TYPE_VAR_TUPLE_KIND: + prefix = "*" + else: + prefix = "" + if type_arg.upper_bound: + bound_or_values = f": {type_arg.upper_bound.accept(p)}" + elif type_arg.values: + bound_or_values = f": ({', '.join(v.accept(p) for v in type_arg.values)})" + else: + bound_or_values = "" + if type_arg.default: + default = f" = {type_arg.default.accept(p)}" + else: + default = "" + type_args_list.append(f"{prefix}{type_arg.name}{bound_or_values}{default}") + return "[" + ", ".join(type_args_list) + "]" + + def print_annotation( + self, + t: Type, + known_modules: list[str] | None = None, + local_modules: list[str] | None = None, + ) -> str: + printer = AnnotationPrinter(self, known_modules, local_modules) + return t.accept(printer) + + def is_not_in_all(self, name: str) -> bool: + if self.is_private_name(name): + return False + if self._all_: + return self.is_top_level() and name not in self._all_ + return False + + def is_private_name(self, name: str, fullname: str | None = None) -> bool: + if "__mypy-" in name: + return True # Never include mypy generated symbols + if self._include_private: + return False + if fullname in self.EXTRA_EXPORTED: + return False + if name == "_": + return False + if not name.startswith("_"): + return False + if self._all_ and name in self._all_: + return False + if name.startswith("__") and name.endswith("__"): + return name in self.IGNORED_DUNDERS + return True + + def should_reexport(self, name: str, full_module: str, name_is_alias: bool) -> bool: + if ( + not name_is_alias + and self.module_name + and (self.module_name + "." + name) in self.EXTRA_EXPORTED + ): + # Special case certain names that should be exported, against our general rules. + return True + if name_is_alias: + return False + if self.export_less: + return False + if not self.module_name: + return False + is_private = self.is_private_name(name, full_module + "." + name) + if is_private: + return False + top_level = full_module.split(".")[0] + self_top_level = self.module_name.split(".", 1)[0] + if top_level not in (self_top_level, "_" + self_top_level): + # Export imports from the same package, since we can't reliably tell whether they + # are part of the public API. + return False + if self._all_: + return name in self._all_ + return True diff --git a/.venv/lib/python3.12/site-packages/mypy/subtypes.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/subtypes.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..e11c662 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/subtypes.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/subtypes.py b/.venv/lib/python3.12/site-packages/mypy/subtypes.py new file mode 100644 index 0000000..c02ff06 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/subtypes.py @@ -0,0 +1,2314 @@ +from __future__ import annotations + +from collections.abc import Iterable, Iterator +from contextlib import contextmanager +from typing import Any, Callable, Final, TypeVar, cast +from typing_extensions import TypeAlias as _TypeAlias + +import mypy.applytype +import mypy.constraints +import mypy.typeops +from mypy.checker_state import checker_state +from mypy.erasetype import erase_type +from mypy.expandtype import ( + expand_self_type, + expand_type, + expand_type_by_instance, + freshen_function_type_vars, +) +from mypy.maptype import map_instance_to_supertype + +# Circular import; done in the function instead. +# import mypy.solve +from mypy.nodes import ( + ARG_STAR, + ARG_STAR2, + CONTRAVARIANT, + COVARIANT, + INVARIANT, + VARIANCE_NOT_READY, + Context, + Decorator, + FuncBase, + OverloadedFuncDef, + TypeInfo, + Var, +) +from mypy.options import Options +from mypy.state import state +from mypy.types import ( + MYPYC_NATIVE_INT_NAMES, + TUPLE_LIKE_INSTANCE_NAMES, + TYPED_NAMEDTUPLE_NAMES, + AnyType, + CallableType, + DeletedType, + ErasedType, + FormalArgument, + FunctionLike, + Instance, + LiteralType, + NoneType, + NormalizedCallableType, + Overloaded, + Parameters, + ParamSpecType, + PartialType, + ProperType, + TupleType, + Type, + TypeAliasType, + TypedDictType, + TypeOfAny, + TypeType, + TypeVarTupleType, + TypeVarType, + TypeVisitor, + UnboundType, + UninhabitedType, + UnionType, + UnpackType, + find_unpack_in_list, + flatten_nested_unions, + get_proper_type, + is_named_instance, + split_with_prefix_and_suffix, +) +from mypy.types_utils import flatten_types +from mypy.typestate import SubtypeKind, type_state +from mypy.typevars import fill_typevars, fill_typevars_with_any + +# Flags for detected protocol members +IS_SETTABLE: Final = 1 +IS_CLASSVAR: Final = 2 +IS_CLASS_OR_STATIC: Final = 3 +IS_VAR: Final = 4 +IS_EXPLICIT_SETTER: Final = 5 + +TypeParameterChecker: _TypeAlias = Callable[[Type, Type, int, bool, "SubtypeContext"], bool] + + +class SubtypeContext: + def __init__( + self, + *, + # Non-proper subtype flags + ignore_type_params: bool = False, + ignore_pos_arg_names: bool = False, + ignore_declared_variance: bool = False, + # Supported for both proper and non-proper + always_covariant: bool = False, + ignore_promotions: bool = False, + # Proper subtype flags + erase_instances: bool = False, + keep_erased_types: bool = False, + options: Options | None = None, + ) -> None: + self.ignore_type_params = ignore_type_params + self.ignore_pos_arg_names = ignore_pos_arg_names + self.ignore_declared_variance = ignore_declared_variance + self.always_covariant = always_covariant + self.ignore_promotions = ignore_promotions + self.erase_instances = erase_instances + self.keep_erased_types = keep_erased_types + self.options = options + + def check_context(self, proper_subtype: bool) -> None: + # Historically proper and non-proper subtypes were defined using different helpers + # and different visitors. Check if flag values are such that we definitely support. + if proper_subtype: + assert not self.ignore_pos_arg_names and not self.ignore_declared_variance + else: + assert not self.erase_instances and not self.keep_erased_types + + +def is_subtype( + left: Type, + right: Type, + *, + subtype_context: SubtypeContext | None = None, + ignore_type_params: bool = False, + ignore_pos_arg_names: bool = False, + ignore_declared_variance: bool = False, + always_covariant: bool = False, + ignore_promotions: bool = False, + options: Options | None = None, +) -> bool: + """Is 'left' subtype of 'right'? + + Also consider Any to be a subtype of any type, and vice versa. This + recursively applies to components of composite types (List[int] is subtype + of List[Any], for example). + + type_parameter_checker is used to check the type parameters (for example, + A with B in is_subtype(C[A], C[B]). The default checks for subtype relation + between the type arguments (e.g., A and B), taking the variance of the + type var into account. + """ + if left == right: + return True + if subtype_context is None: + subtype_context = SubtypeContext( + ignore_type_params=ignore_type_params, + ignore_pos_arg_names=ignore_pos_arg_names, + ignore_declared_variance=ignore_declared_variance, + always_covariant=always_covariant, + ignore_promotions=ignore_promotions, + options=options, + ) + else: + assert ( + not ignore_type_params + and not ignore_pos_arg_names + and not ignore_declared_variance + and not always_covariant + and not ignore_promotions + and options is None + ), "Don't pass both context and individual flags" + if type_state.is_assumed_subtype(left, right): + return True + if mypy.typeops.is_recursive_pair(left, right): + # This case requires special care because it may cause infinite recursion. + # Our view on recursive types is known under a fancy name of iso-recursive mu-types. + # Roughly this means that a recursive type is defined as an alias where right hand side + # can refer to the type as a whole, for example: + # A = Union[int, Tuple[A, ...]] + # and an alias unrolled once represents the *same type*, in our case all these represent + # the same type: + # A + # Union[int, Tuple[A, ...]] + # Union[int, Tuple[Union[int, Tuple[A, ...]], ...]] + # The algorithm for subtyping is then essentially under the assumption that left <: right, + # check that get_proper_type(left) <: get_proper_type(right). On the example above, + # If we start with: + # A = Union[int, Tuple[A, ...]] + # B = Union[int, Tuple[B, ...]] + # When checking if A <: B we push pair (A, B) onto 'assuming' stack, then when after few + # steps we come back to initial call is_subtype(A, B) and immediately return True. + with pop_on_exit(type_state.get_assumptions(is_proper=False), left, right): + return _is_subtype(left, right, subtype_context, proper_subtype=False) + return _is_subtype(left, right, subtype_context, proper_subtype=False) + + +def is_proper_subtype( + left: Type, + right: Type, + *, + subtype_context: SubtypeContext | None = None, + ignore_promotions: bool = False, + erase_instances: bool = False, + keep_erased_types: bool = False, +) -> bool: + """Is left a proper subtype of right? + + For proper subtypes, there's no need to rely on compatibility due to + Any types. Every usable type is a proper subtype of itself. + + If erase_instances is True, erase left instance *after* mapping it to supertype + (this is useful for runtime isinstance() checks). If keep_erased_types is True, + do not consider ErasedType a subtype of all types (used by type inference against unions). + """ + if left == right: + return True + if subtype_context is None: + subtype_context = SubtypeContext( + ignore_promotions=ignore_promotions, + erase_instances=erase_instances, + keep_erased_types=keep_erased_types, + ) + else: + assert ( + not ignore_promotions and not erase_instances and not keep_erased_types + ), "Don't pass both context and individual flags" + if type_state.is_assumed_proper_subtype(left, right): + return True + if mypy.typeops.is_recursive_pair(left, right): + # Same as for non-proper subtype, see detailed comment there for explanation. + with pop_on_exit(type_state.get_assumptions(is_proper=True), left, right): + return _is_subtype(left, right, subtype_context, proper_subtype=True) + return _is_subtype(left, right, subtype_context, proper_subtype=True) + + +def is_equivalent( + a: Type, + b: Type, + *, + ignore_type_params: bool = False, + ignore_pos_arg_names: bool = False, + options: Options | None = None, + subtype_context: SubtypeContext | None = None, +) -> bool: + return is_subtype( + a, + b, + ignore_type_params=ignore_type_params, + ignore_pos_arg_names=ignore_pos_arg_names, + options=options, + subtype_context=subtype_context, + ) and is_subtype( + b, + a, + ignore_type_params=ignore_type_params, + ignore_pos_arg_names=ignore_pos_arg_names, + options=options, + subtype_context=subtype_context, + ) + + +def is_same_type( + a: Type, b: Type, ignore_promotions: bool = True, subtype_context: SubtypeContext | None = None +) -> bool: + """Are these types proper subtypes of each other? + + This means types may have different representation (e.g. an alias, or + a non-simplified union) but are semantically exchangeable in all contexts. + """ + # First, use fast path for some common types. This is performance-critical. + if ( + type(a) is Instance + and type(b) is Instance + and a.type == b.type + and len(a.args) == len(b.args) + and a.last_known_value is b.last_known_value + ): + return all(is_same_type(x, y) for x, y in zip(a.args, b.args)) + elif isinstance(a, TypeVarType) and isinstance(b, TypeVarType) and a.id == b.id: + return True + + # Note that using ignore_promotions=True (default) makes types like int and int64 + # considered not the same type (which is the case at runtime). + # Also Union[bool, int] (if it wasn't simplified before) will be different + # from plain int, etc. + return is_proper_subtype( + a, b, ignore_promotions=ignore_promotions, subtype_context=subtype_context + ) and is_proper_subtype( + b, a, ignore_promotions=ignore_promotions, subtype_context=subtype_context + ) + + +# This is a common entry point for subtyping checks (both proper and non-proper). +# Never call this private function directly, use the public versions. +def _is_subtype( + left: Type, right: Type, subtype_context: SubtypeContext, proper_subtype: bool +) -> bool: + subtype_context.check_context(proper_subtype) + orig_right = right + orig_left = left + left = get_proper_type(left) + right = get_proper_type(right) + + # Note: Unpack type should not be a subtype of Any, since it may represent + # multiple types. This should always go through the visitor, to check arity. + if ( + not proper_subtype + and isinstance(right, (AnyType, UnboundType, ErasedType)) + and not isinstance(left, UnpackType) + ): + # TODO: should we consider all types proper subtypes of UnboundType and/or + # ErasedType as we do for non-proper subtyping. + return True + + if isinstance(right, UnionType) and not isinstance(left, UnionType): + # Normally, when 'left' is not itself a union, the only way + # 'left' can be a subtype of the union 'right' is if it is a + # subtype of one of the items making up the union. + if proper_subtype: + is_subtype_of_item = any( + is_proper_subtype(orig_left, item, subtype_context=subtype_context) + for item in right.items + ) + else: + is_subtype_of_item = any( + is_subtype(orig_left, item, subtype_context=subtype_context) + for item in right.items + ) + # Recombine rhs literal types, to make an enum type a subtype + # of a union of all enum items as literal types. Only do it if + # the previous check didn't succeed, since recombining can be + # expensive. + # `bool` is a special case, because `bool` is `Literal[True, False]`. + if ( + not is_subtype_of_item + and isinstance(left, Instance) + and (left.type.is_enum or left.type.fullname == "builtins.bool") + ): + right = UnionType( + mypy.typeops.try_contracting_literals_in_union(flatten_nested_unions(right.items)) + ) + if proper_subtype: + is_subtype_of_item = any( + is_proper_subtype(orig_left, item, subtype_context=subtype_context) + for item in right.items + ) + else: + is_subtype_of_item = any( + is_subtype(orig_left, item, subtype_context=subtype_context) + for item in right.items + ) + # However, if 'left' is a type variable T, T might also have + # an upper bound which is itself a union. This case will be + # handled below by the SubtypeVisitor. We have to check both + # possibilities, to handle both cases like T <: Union[T, U] + # and cases like T <: B where B is the upper bound of T and is + # a union. (See #2314.) + if not isinstance(left, TypeVarType): + return is_subtype_of_item + elif is_subtype_of_item: + return True + # otherwise, fall through + return left.accept(SubtypeVisitor(orig_right, subtype_context, proper_subtype)) + + +def check_type_parameter( + left: Type, right: Type, variance: int, proper_subtype: bool, subtype_context: SubtypeContext +) -> bool: + # It is safe to consider empty collection literals and similar as covariant, since + # such type can't be stored in a variable, see checker.is_valid_inferred_type(). + if variance == INVARIANT: + p_left = get_proper_type(left) + if isinstance(p_left, UninhabitedType) and p_left.ambiguous: + variance = COVARIANT + # If variance hasn't been inferred yet, we are lenient and default to + # covariance. This shouldn't happen often, but it's very difficult to + # avoid these cases altogether. + if variance == COVARIANT or variance == VARIANCE_NOT_READY: + if proper_subtype: + return is_proper_subtype(left, right, subtype_context=subtype_context) + else: + return is_subtype(left, right, subtype_context=subtype_context) + elif variance == CONTRAVARIANT: + if proper_subtype: + return is_proper_subtype(right, left, subtype_context=subtype_context) + else: + return is_subtype(right, left, subtype_context=subtype_context) + else: + if proper_subtype: + # We pass ignore_promotions=False because it is a default for subtype checks. + # The actual value will be taken from the subtype_context, and it is whatever + # the original caller passed. + return is_same_type( + left, right, ignore_promotions=False, subtype_context=subtype_context + ) + else: + return is_equivalent(left, right, subtype_context=subtype_context) + + +class SubtypeVisitor(TypeVisitor[bool]): + __slots__ = ( + "right", + "orig_right", + "proper_subtype", + "subtype_context", + "options", + "_subtype_kind", + ) + + def __init__(self, right: Type, subtype_context: SubtypeContext, proper_subtype: bool) -> None: + self.right = get_proper_type(right) + self.orig_right = right + self.proper_subtype = proper_subtype + self.subtype_context = subtype_context + self.options = subtype_context.options + self._subtype_kind = SubtypeVisitor.build_subtype_kind(subtype_context, proper_subtype) + + @staticmethod + def build_subtype_kind(subtype_context: SubtypeContext, proper_subtype: bool) -> SubtypeKind: + return ( + state.strict_optional, + proper_subtype, + subtype_context.ignore_type_params, + subtype_context.ignore_pos_arg_names, + subtype_context.ignore_declared_variance, + subtype_context.always_covariant, + subtype_context.ignore_promotions, + subtype_context.erase_instances, + subtype_context.keep_erased_types, + ) + + def _is_subtype(self, left: Type, right: Type) -> bool: + if self.proper_subtype: + return is_proper_subtype(left, right, subtype_context=self.subtype_context) + return is_subtype(left, right, subtype_context=self.subtype_context) + + def _all_subtypes(self, lefts: Iterable[Type], rights: Iterable[Type]) -> bool: + return all(self._is_subtype(li, ri) for (li, ri) in zip(lefts, rights)) + + # visit_x(left) means: is left (which is an instance of X) a subtype of right? + + def visit_unbound_type(self, left: UnboundType) -> bool: + # This can be called if there is a bad type annotation. The result probably + # doesn't matter much but by returning True we simplify these bad types away + # from unions, which could filter out some bogus messages. + return True + + def visit_any(self, left: AnyType) -> bool: + return isinstance(self.right, AnyType) if self.proper_subtype else True + + def visit_none_type(self, left: NoneType) -> bool: + if state.strict_optional: + if isinstance(self.right, NoneType) or is_named_instance( + self.right, "builtins.object" + ): + return True + if isinstance(self.right, Instance) and self.right.type.is_protocol: + members = self.right.type.protocol_members + # None is compatible with Hashable (and other similar protocols). This is + # slightly sloppy since we don't check the signature of "__hash__". + # None is also compatible with `SupportsStr` protocol. + return not members or all(member in ("__hash__", "__str__") for member in members) + return False + else: + return True + + def visit_uninhabited_type(self, left: UninhabitedType) -> bool: + return True + + def visit_erased_type(self, left: ErasedType) -> bool: + # This may be encountered during type inference. The result probably doesn't + # matter much. + # TODO: it actually does matter, figure out more principled logic about this. + return not self.subtype_context.keep_erased_types + + def visit_deleted_type(self, left: DeletedType) -> bool: + return True + + def visit_instance(self, left: Instance) -> bool: + if left.type.fallback_to_any and not self.proper_subtype: + # NOTE: `None` is a *non-subclassable* singleton, therefore no class + # can by a subtype of it, even with an `Any` fallback. + # This special case is needed to treat descriptors in classes with + # dynamic base classes correctly, see #5456. + return not isinstance(self.right, NoneType) + right = self.right + if isinstance(right, TupleType) and right.partial_fallback.type.is_enum: + return self._is_subtype(left, mypy.typeops.tuple_fallback(right)) + if isinstance(right, TupleType): + if len(right.items) == 1: + # Non-normalized Tuple type (may be left after semantic analysis + # because semanal_typearg visitor is not a type translator). + item = right.items[0] + if isinstance(item, UnpackType): + unpacked = get_proper_type(item.type) + if isinstance(unpacked, Instance): + return self._is_subtype(left, unpacked) + if left.type.has_base(right.partial_fallback.type.fullname): + if not self.proper_subtype: + # Special cases to consider: + # * Plain tuple[Any, ...] instance is a subtype of all tuple types. + # * Foo[*tuple[Any, ...]] (normalized) instance is a subtype of all + # tuples with fallback to Foo (e.g. for variadic NamedTuples). + mapped = map_instance_to_supertype(left, right.partial_fallback.type) + if is_erased_instance(mapped): + if ( + mapped.type.fullname == "builtins.tuple" + or mapped.type.has_type_var_tuple_type + ): + return True + return False + if isinstance(right, TypeVarTupleType): + # tuple[Any, ...] is like Any in the world of tuples (see special case above). + if left.type.has_base("builtins.tuple"): + mapped = map_instance_to_supertype(left, right.tuple_fallback.type) + if isinstance(get_proper_type(mapped.args[0]), AnyType): + return not self.proper_subtype + if isinstance(right, Instance): + if type_state.is_cached_subtype_check(self._subtype_kind, left, right): + return True + if type_state.is_cached_negative_subtype_check(self._subtype_kind, left, right): + return False + if not self.subtype_context.ignore_promotions and not right.type.is_protocol: + for base in left.type.mro: + if base._promote and any( + self._is_subtype(p, self.right) for p in base._promote + ): + type_state.record_subtype_cache_entry(self._subtype_kind, left, right) + return True + # Special case: Low-level integer types are compatible with 'int'. We can't + # use promotions, since 'int' is already promoted to low-level integer types, + # and we can't have circular promotions. + if left.type.alt_promote and left.type.alt_promote.type is right.type: + return True + rname = right.type.fullname + # Always try a nominal check if possible, + # there might be errors that a user wants to silence *once*. + # NamedTuples are a special case, because `NamedTuple` is not listed + # in `TypeInfo.mro`, so when `(a: NamedTuple) -> None` is used, + # we need to check for `is_named_tuple` property + if ( + left.type.has_base(rname) + or rname == "builtins.object" + or ( + rname in TYPED_NAMEDTUPLE_NAMES + and any(l.is_named_tuple for l in left.type.mro) + ) + ) and not self.subtype_context.ignore_declared_variance: + # Map left type to corresponding right instances. + t = map_instance_to_supertype(left, right.type) + if self.subtype_context.erase_instances: + erased = erase_type(t) + assert isinstance(erased, Instance) + t = erased + nominal = True + if right.type.has_type_var_tuple_type: + # For variadic instances we simply find the correct type argument mappings, + # all the heavy lifting is done by the tuple subtyping. + assert right.type.type_var_tuple_prefix is not None + assert right.type.type_var_tuple_suffix is not None + prefix = right.type.type_var_tuple_prefix + suffix = right.type.type_var_tuple_suffix + tvt = right.type.defn.type_vars[prefix] + assert isinstance(tvt, TypeVarTupleType) + fallback = tvt.tuple_fallback + left_prefix, left_middle, left_suffix = split_with_prefix_and_suffix( + t.args, prefix, suffix + ) + right_prefix, right_middle, right_suffix = split_with_prefix_and_suffix( + right.args, prefix, suffix + ) + left_args = ( + left_prefix + (TupleType(list(left_middle), fallback),) + left_suffix + ) + right_args = ( + right_prefix + (TupleType(list(right_middle), fallback),) + right_suffix + ) + if not self.proper_subtype and is_erased_instance(t): + return True + if len(left_args) != len(right_args): + return False + type_params = zip(left_args, right_args, right.type.defn.type_vars) + else: + type_params = zip(t.args, right.args, right.type.defn.type_vars) + if not self.subtype_context.ignore_type_params: + tried_infer = False + for lefta, righta, tvar in type_params: + if isinstance(tvar, TypeVarType): + if tvar.variance == VARIANCE_NOT_READY and not tried_infer: + infer_class_variances(right.type) + tried_infer = True + if ( + self.subtype_context.always_covariant + and tvar.variance == INVARIANT + ): + variance = COVARIANT + else: + variance = tvar.variance + if not check_type_parameter( + lefta, righta, variance, self.proper_subtype, self.subtype_context + ): + nominal = False + else: + # TODO: everywhere else ParamSpecs are handled as invariant. + if not check_type_parameter( + lefta, righta, COVARIANT, self.proper_subtype, self.subtype_context + ): + nominal = False + if nominal: + type_state.record_subtype_cache_entry(self._subtype_kind, left, right) + else: + type_state.record_negative_subtype_cache_entry(self._subtype_kind, left, right) + return nominal + if right.type.is_protocol and is_protocol_implementation( + left, right, proper_subtype=self.proper_subtype, options=self.options + ): + return True + # We record negative cache entry here, and not in the protocol check like we do for + # positive cache, to avoid accidentally adding a type that is not a structural + # subtype, but is a nominal subtype (involving type: ignore override). + type_state.record_negative_subtype_cache_entry(self._subtype_kind, left, right) + return False + if isinstance(right, TypeType): + item = right.item + if isinstance(item, TupleType): + item = mypy.typeops.tuple_fallback(item) + # TODO: this is a bit arbitrary, we should only skip Any-related cases. + if not self.proper_subtype: + if is_named_instance(left, "builtins.type"): + return self._is_subtype(TypeType(AnyType(TypeOfAny.special_form)), right) + if left.type.is_metaclass(): + if isinstance(item, AnyType): + return True + if isinstance(item, Instance): + return is_named_instance(item, "builtins.object") + if isinstance(right, LiteralType) and left.last_known_value is not None: + return self._is_subtype(left.last_known_value, right) + if isinstance(right, FunctionLike): + # Special case: Instance can be a subtype of Callable / Overloaded. + call = find_member("__call__", left, left, is_operator=True) + if call: + return self._is_subtype(call, right) + return False + else: + return False + + def visit_type_var(self, left: TypeVarType) -> bool: + right = self.right + if isinstance(right, TypeVarType) and left.id == right.id: + # Fast path for most common case. + if left.upper_bound == right.upper_bound: + return True + # Corner case for self-types in classes generic in type vars + # with value restrictions. + if left.id.is_self(): + return True + return self._is_subtype(left.upper_bound, right.upper_bound) + if left.values and self._is_subtype(UnionType.make_union(left.values), right): + return True + return self._is_subtype(left.upper_bound, self.right) + + def visit_param_spec(self, left: ParamSpecType) -> bool: + right = self.right + if ( + isinstance(right, ParamSpecType) + and right.id == left.id + and right.flavor == left.flavor + ): + return self._is_subtype(left.prefix, right.prefix) + if isinstance(right, Parameters) and are_trivial_parameters(right): + return True + return self._is_subtype(left.upper_bound, self.right) + + def visit_type_var_tuple(self, left: TypeVarTupleType) -> bool: + right = self.right + if isinstance(right, TypeVarTupleType) and right.id == left.id: + return left.min_len >= right.min_len + return self._is_subtype(left.upper_bound, self.right) + + def visit_unpack_type(self, left: UnpackType) -> bool: + # TODO: Ideally we should not need this (since it is not a real type). + # Instead callers (upper level types) should handle it when it appears in type list. + if isinstance(self.right, UnpackType): + return self._is_subtype(left.type, self.right.type) + if isinstance(self.right, Instance) and self.right.type.fullname == "builtins.object": + return True + return False + + def visit_parameters(self, left: Parameters) -> bool: + if isinstance(self.right, Parameters): + return are_parameters_compatible( + left, + self.right, + is_compat=self._is_subtype, + # TODO: this should pass the current value, but then couple tests fail. + is_proper_subtype=False, + ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, + ) + elif isinstance(self.right, Instance): + return self.right.type.fullname == "builtins.object" + else: + return False + + def visit_callable_type(self, left: CallableType) -> bool: + right = self.right + if isinstance(right, CallableType): + if left.type_guard is not None and right.type_guard is not None: + if not self._is_subtype(left.type_guard, right.type_guard): + return False + elif left.type_is is not None and right.type_is is not None: + # For TypeIs we have to check both ways; it is unsafe to pass + # a TypeIs[Child] when a TypeIs[Parent] is expected, because + # if the narrower returns False, we assume that the narrowed value is + # *not* a Parent. + if not self._is_subtype(left.type_is, right.type_is) or not self._is_subtype( + right.type_is, left.type_is + ): + return False + elif right.type_guard is not None and left.type_guard is None: + # This means that one function has `TypeGuard` and other does not. + # They are not compatible. See https://github.com/python/mypy/issues/11307 + return False + elif right.type_is is not None and left.type_is is None: + # Similarly, if one function has `TypeIs` and the other does not, + # they are not compatible. + return False + return is_callable_compatible( + left, + right, + is_compat=self._is_subtype, + is_proper_subtype=self.proper_subtype, + ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, + strict_concatenate=( + (self.options.extra_checks or self.options.strict_concatenate) + if self.options + else False + ), + ) + elif isinstance(right, Overloaded): + return all(self._is_subtype(left, item) for item in right.items) + elif isinstance(right, Instance): + if right.type.is_protocol and "__call__" in right.type.protocol_members: + # OK, a callable can implement a protocol with a `__call__` member. + call = find_member("__call__", right, right, is_operator=True) + assert call is not None + if self._is_subtype(left, call): + if len(right.type.protocol_members) == 1: + return True + if is_protocol_implementation(left.fallback, right, skip=["__call__"]): + return True + if right.type.is_protocol and left.is_type_obj(): + ret_type = get_proper_type(left.ret_type) + if isinstance(ret_type, TupleType): + ret_type = mypy.typeops.tuple_fallback(ret_type) + if isinstance(ret_type, Instance) and is_protocol_implementation( + ret_type, right, proper_subtype=self.proper_subtype, class_obj=True + ): + return True + return self._is_subtype(left.fallback, right) + elif isinstance(right, TypeType): + # This is unsound, we don't check the __init__ signature. + return left.is_type_obj() and self._is_subtype(left.ret_type, right.item) + else: + return False + + def visit_tuple_type(self, left: TupleType) -> bool: + right = self.right + if isinstance(right, Instance): + if is_named_instance(right, "typing.Sized"): + return True + elif is_named_instance(right, TUPLE_LIKE_INSTANCE_NAMES): + if right.args: + iter_type = right.args[0] + else: + if self.proper_subtype: + return False + iter_type = AnyType(TypeOfAny.special_form) + if is_named_instance(right, "builtins.tuple") and isinstance( + get_proper_type(iter_type), AnyType + ): + # TODO: We shouldn't need this special case. This is currently needed + # for isinstance(x, tuple), though it's unclear why. + return True + for li in left.items: + if isinstance(li, UnpackType): + unpack = get_proper_type(li.type) + if isinstance(unpack, TypeVarTupleType): + unpack = get_proper_type(unpack.upper_bound) + assert ( + isinstance(unpack, Instance) + and unpack.type.fullname == "builtins.tuple" + ) + li = unpack.args[0] + if not self._is_subtype(li, iter_type): + return False + return True + elif self._is_subtype(left.partial_fallback, right) and self._is_subtype( + mypy.typeops.tuple_fallback(left), right + ): + return True + return False + elif isinstance(right, TupleType): + # If right has a variadic unpack this needs special handling. If there is a TypeVarTuple + # unpack, item count must coincide. If the left has variadic unpack but right + # doesn't have one, we will fall through to False down the line. + if self.variadic_tuple_subtype(left, right): + return True + if len(left.items) != len(right.items): + return False + if any(not self._is_subtype(l, r) for l, r in zip(left.items, right.items)): + return False + if is_named_instance(right.partial_fallback, "builtins.tuple"): + # No need to verify fallback. This is useful since the calculated fallback + # may be inconsistent due to how we calculate joins between unions vs. + # non-unions. For example, join(int, str) == object, whereas + # join(Union[int, C], Union[str, C]) == Union[int, str, C]. + return True + if is_named_instance(left.partial_fallback, "builtins.tuple"): + # Again, no need to verify. At this point we know the right fallback + # is a subclass of tuple, so if left is plain tuple, it cannot be a subtype. + return False + # At this point we know both fallbacks are non-tuple. + return self._is_subtype(left.partial_fallback, right.partial_fallback) + else: + return False + + def variadic_tuple_subtype(self, left: TupleType, right: TupleType) -> bool: + """Check subtyping between two potentially variadic tuples. + + Most non-trivial cases here are due to variadic unpacks like *tuple[X, ...], + we handle such unpacks as infinite unions Tuple[()] | Tuple[X] | Tuple[X, X] | ... + + Note: the cases where right is fixed or has *Ts unpack should be handled + by the caller. + """ + right_unpack_index = find_unpack_in_list(right.items) + if right_unpack_index is None: + # This case should be handled by the caller. + return False + right_unpack = right.items[right_unpack_index] + assert isinstance(right_unpack, UnpackType) + right_unpacked = get_proper_type(right_unpack.type) + if not isinstance(right_unpacked, Instance): + # This case should be handled by the caller. + return False + assert right_unpacked.type.fullname == "builtins.tuple" + right_item = right_unpacked.args[0] + right_prefix = right_unpack_index + right_suffix = len(right.items) - right_prefix - 1 + left_unpack_index = find_unpack_in_list(left.items) + if left_unpack_index is None: + # Simple case: left is fixed, simply find correct mapping to the right + # (effectively selecting item with matching length from an infinite union). + if len(left.items) < right_prefix + right_suffix: + return False + prefix, middle, suffix = split_with_prefix_and_suffix( + tuple(left.items), right_prefix, right_suffix + ) + if not all( + self._is_subtype(li, ri) for li, ri in zip(prefix, right.items[:right_prefix]) + ): + return False + if right_suffix and not all( + self._is_subtype(li, ri) for li, ri in zip(suffix, right.items[-right_suffix:]) + ): + return False + return all(self._is_subtype(li, right_item) for li in middle) + else: + if len(left.items) < len(right.items): + # There are some items on the left that will never have a matching length + # on the right. + return False + left_prefix = left_unpack_index + left_suffix = len(left.items) - left_prefix - 1 + left_unpack = left.items[left_unpack_index] + assert isinstance(left_unpack, UnpackType) + left_unpacked = get_proper_type(left_unpack.type) + if not isinstance(left_unpacked, Instance): + # *Ts unpack can't be split, except if it is all mapped to Anys or objects. + if self.is_top_type(right_item): + right_prefix_types, middle, right_suffix_types = split_with_prefix_and_suffix( + tuple(right.items), left_prefix, left_suffix + ) + if not all( + self.is_top_type(ri) or isinstance(ri, UnpackType) for ri in middle + ): + return False + # Also check the tails match as well. + return self._all_subtypes( + left.items[:left_prefix], right_prefix_types + ) and self._all_subtypes(left.items[-left_suffix:], right_suffix_types) + return False + assert left_unpacked.type.fullname == "builtins.tuple" + left_item = left_unpacked.args[0] + + # The most tricky case with two variadic unpacks we handle similar to union + # subtyping: *each* item on the left, must be a subtype of *some* item on the right. + # For this we first check the "asymptotic case", i.e. that both unpacks a subtypes, + # and then check subtyping for all finite overlaps. + if not self._is_subtype(left_item, right_item): + return False + max_overlap = max(0, right_prefix - left_prefix, right_suffix - left_suffix) + for overlap in range(max_overlap + 1): + repr_items = left.items[:left_prefix] + [left_item] * overlap + if left_suffix: + repr_items += left.items[-left_suffix:] + left_repr = left.copy_modified(items=repr_items) + if not self._is_subtype(left_repr, right): + return False + return True + + def is_top_type(self, typ: Type) -> bool: + if not self.proper_subtype and isinstance(get_proper_type(typ), AnyType): + return True + return is_named_instance(typ, "builtins.object") + + def visit_typeddict_type(self, left: TypedDictType) -> bool: + right = self.right + if isinstance(right, Instance): + return self._is_subtype(left.fallback, right) + elif isinstance(right, TypedDictType): + if left == right: + return True # Fast path + if not left.names_are_wider_than(right): + return False + for name, l, r in left.zip(right): + # TODO: should we pass on the full subtype_context here and below? + right_readonly = name in right.readonly_keys + if not right_readonly: + if self.proper_subtype: + check = is_same_type(l, r) + else: + check = is_equivalent( + l, + r, + ignore_type_params=self.subtype_context.ignore_type_params, + options=self.options, + ) + else: + # Read-only items behave covariantly + check = self._is_subtype(l, r) + if not check: + return False + # Non-required key is not compatible with a required key since + # indexing may fail unexpectedly if a required key is missing. + # Required key is not compatible with a non-read-only non-required + # key since the prior doesn't support 'del' but the latter should + # support it. + # Required key is compatible with a read-only non-required key. + required_differ = (name in left.required_keys) != (name in right.required_keys) + if not right_readonly and required_differ: + return False + # Readonly fields check: + # + # A = TypedDict('A', {'x': ReadOnly[int]}) + # B = TypedDict('B', {'x': int}) + # def reset_x(b: B) -> None: + # b['x'] = 0 + # + # So, `A` cannot be a subtype of `B`, while `B` can be a subtype of `A`, + # because you can use `B` everywhere you use `A`, but not the other way around. + if name in left.readonly_keys and name not in right.readonly_keys: + return False + # (NOTE: Fallbacks don't matter.) + return True + else: + return False + + def visit_literal_type(self, left: LiteralType) -> bool: + if isinstance(self.right, LiteralType): + return left == self.right + else: + return self._is_subtype(left.fallback, self.right) + + def visit_overloaded(self, left: Overloaded) -> bool: + right = self.right + if isinstance(right, Instance): + if right.type.is_protocol and "__call__" in right.type.protocol_members: + # same as for CallableType + call = find_member("__call__", right, right, is_operator=True) + assert call is not None + if self._is_subtype(left, call): + if len(right.type.protocol_members) == 1: + return True + if is_protocol_implementation(left.fallback, right, skip=["__call__"]): + return True + return self._is_subtype(left.fallback, right) + elif isinstance(right, CallableType): + for item in left.items: + if self._is_subtype(item, right): + return True + return False + elif isinstance(right, Overloaded): + if left == self.right: + # When it is the same overload, then the types are equal. + return True + + # Ensure each overload on the right side (the supertype) is accounted for. + previous_match_left_index = -1 + matched_overloads = set() + + for right_item in right.items: + found_match = False + + for left_index, left_item in enumerate(left.items): + subtype_match = self._is_subtype(left_item, right_item) + + # Order matters: we need to make sure that the index of + # this item is at least the index of the previous one. + if subtype_match and previous_match_left_index <= left_index: + previous_match_left_index = left_index + found_match = True + matched_overloads.add(left_index) + break + else: + # If this one overlaps with the supertype in any way, but it wasn't + # an exact match, then it's a potential error. + strict_concat = ( + (self.options.extra_checks or self.options.strict_concatenate) + if self.options + else False + ) + if left_index not in matched_overloads and ( + is_callable_compatible( + left_item, + right_item, + is_compat=self._is_subtype, + is_proper_subtype=self.proper_subtype, + ignore_return=True, + ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, + strict_concatenate=strict_concat, + ) + or is_callable_compatible( + right_item, + left_item, + is_compat=self._is_subtype, + is_proper_subtype=self.proper_subtype, + ignore_return=True, + ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, + strict_concatenate=strict_concat, + ) + ): + return False + + if not found_match: + return False + return True + elif isinstance(right, UnboundType): + return True + elif isinstance(right, TypeType): + # All the items must have the same type object status, so + # it's sufficient to query only (any) one of them. + # This is unsound, we don't check all the __init__ signatures. + return left.is_type_obj() and self._is_subtype(left.items[0], right) + else: + return False + + def visit_union_type(self, left: UnionType) -> bool: + if isinstance(self.right, Instance): + literal_types: set[Instance] = set() + # avoid redundant check for union of literals + for item in left.relevant_items(): + p_item = get_proper_type(item) + lit_type = mypy.typeops.simple_literal_type(p_item) + if lit_type is not None: + if lit_type in literal_types: + continue + literal_types.add(lit_type) + item = lit_type + if not self._is_subtype(item, self.orig_right): + return False + return True + + elif isinstance(self.right, UnionType): + # prune literals early to avoid nasty quadratic behavior which would otherwise arise when checking + # subtype relationships between slightly different narrowings of an Enum + # we achieve O(N+M) instead of O(N*M) + + fast_check: set[ProperType] = set() + + for item in flatten_types(self.right.relevant_items()): + p_item = get_proper_type(item) + fast_check.add(p_item) + if isinstance(p_item, Instance) and p_item.last_known_value is not None: + fast_check.add(p_item.last_known_value) + + for item in left.relevant_items(): + p_item = get_proper_type(item) + if p_item in fast_check: + continue + lit_type = mypy.typeops.simple_literal_type(p_item) + if lit_type in fast_check: + continue + if not self._is_subtype(item, self.orig_right): + return False + return True + + return all(self._is_subtype(item, self.orig_right) for item in left.items) + + def visit_partial_type(self, left: PartialType) -> bool: + # This is indeterminate as we don't really know the complete type yet. + if self.proper_subtype: + # TODO: What's the right thing to do here? + return False + if left.type is None: + # Special case, partial `None`. This might happen when defining + # class-level attributes with explicit `None`. + # We can still recover from this. + # https://github.com/python/mypy/issues/11105 + return self.visit_none_type(NoneType()) + raise RuntimeError(f'Partial type "{left}" cannot be checked with "issubtype()"') + + def visit_type_type(self, left: TypeType) -> bool: + right = self.right + if left.is_type_form: + if isinstance(right, TypeType): + if not right.is_type_form: + return False + return self._is_subtype(left.item, right.item) + if isinstance(right, Instance): + if right.type.fullname == "builtins.object": + return True + return False + return False + else: # not left.is_type_form + if isinstance(right, TypeType): + return self._is_subtype(left.item, right.item) + if isinstance(right, Overloaded) and right.is_type_obj(): + # Same as in other direction: if it's a constructor callable, all + # items should belong to the same class' constructor, so it's enough + # to check one of them. + return self._is_subtype(left, right.items[0]) + if isinstance(right, CallableType): + if self.proper_subtype and not right.is_type_obj(): + # We can't accept `Type[X]` as a *proper* subtype of Callable[P, X] + # since this will break transitivity of subtyping. + return False + # This is unsound, we don't check the __init__ signature. + return self._is_subtype(left.item, right.ret_type) + + if isinstance(right, Instance): + if right.type.fullname in ["builtins.object", "builtins.type"]: + # TODO: Strictly speaking, the type builtins.type is considered equivalent to + # Type[Any]. However, this would break the is_proper_subtype check in + # conditional_types for cases like isinstance(x, type) when the type + # of x is Type[int]. It's unclear what's the right way to address this. + return True + item = left.item + if isinstance(item, TypeVarType): + item = get_proper_type(item.upper_bound) + if isinstance(item, Instance): + if right.type.is_protocol and is_protocol_implementation( + item, right, proper_subtype=self.proper_subtype, class_obj=True + ): + return True + metaclass = item.type.metaclass_type + return metaclass is not None and self._is_subtype(metaclass, right) + return False + + def visit_type_alias_type(self, left: TypeAliasType) -> bool: + assert False, f"This should be never called, got {left}" + + +T = TypeVar("T", bound=Type) + + +@contextmanager +def pop_on_exit(stack: list[tuple[T, T]], left: T, right: T) -> Iterator[None]: + stack.append((left, right)) + yield + stack.pop() + + +def is_protocol_implementation( + left: Instance, + right: Instance, + proper_subtype: bool = False, + class_obj: bool = False, + skip: list[str] | None = None, + options: Options | None = None, +) -> bool: + """Check whether 'left' implements the protocol 'right'. + + If 'proper_subtype' is True, then check for a proper subtype. + Treat recursive protocols by using the 'assuming' structural subtype matrix + (in sparse representation, i.e. as a list of pairs (subtype, supertype)), + see also comment in nodes.TypeInfo. When we enter a check for classes + (A, P), defined as following:: + + class P(Protocol): + def f(self) -> P: ... + class A: + def f(self) -> A: ... + + this results in A being a subtype of P without infinite recursion. + On every false result, we pop the assumption, thus avoiding an infinite recursion + as well. + """ + assert right.type.is_protocol + if skip is None: + skip = [] + # We need to record this check to generate protocol fine-grained dependencies. + type_state.record_protocol_subtype_check(left.type, right.type) + # nominal subtyping currently ignores '__init__' and '__new__' signatures + members_not_to_check = {"__init__", "__new__"} + members_not_to_check.update(skip) + # Trivial check that circumvents the bug described in issue 9771: + if left.type.is_protocol: + members_right = set(right.type.protocol_members) - members_not_to_check + members_left = set(left.type.protocol_members) - members_not_to_check + if not members_right.issubset(members_left): + return False + assuming = right.type.assuming_proper if proper_subtype else right.type.assuming + for l, r in reversed(assuming): + if l == left and r == right: + return True + with pop_on_exit(assuming, left, right): + for member in right.type.protocol_members: + if member in members_not_to_check: + continue + ignore_names = member != "__call__" # __call__ can be passed kwargs + # The third argument below indicates to what self type is bound. + # We always bind self to the subtype. (Similarly to nominal types). + supertype = find_member(member, right, left) + assert supertype is not None + + subtype = mypy.typeops.get_protocol_member(left, member, class_obj) + # Useful for debugging: + # print(member, 'of', left, 'has type', subtype) + # print(member, 'of', right, 'has type', supertype) + if not subtype: + return False + if not proper_subtype: + # Nominal check currently ignores arg names + # NOTE: If we ever change this, be sure to also change the call to + # SubtypeVisitor.build_subtype_kind(...) down below. + is_compat = is_subtype( + subtype, supertype, ignore_pos_arg_names=ignore_names, options=options + ) + else: + is_compat = is_proper_subtype(subtype, supertype) + if not is_compat: + return False + if isinstance(get_proper_type(subtype), NoneType) and isinstance( + get_proper_type(supertype), CallableType + ): + # We want __hash__ = None idiom to work even without --strict-optional + return False + subflags = get_member_flags(member, left, class_obj=class_obj) + superflags = get_member_flags(member, right) + if IS_SETTABLE in superflags: + # Check opposite direction for settable attributes. + if IS_EXPLICIT_SETTER in superflags: + supertype = find_member(member, right, left, is_lvalue=True) + if IS_EXPLICIT_SETTER in subflags: + subtype = mypy.typeops.get_protocol_member( + left, member, class_obj, is_lvalue=True + ) + # At this point we know attribute is present on subtype, otherwise we + # would return False above. + assert supertype is not None and subtype is not None + if not is_subtype(supertype, subtype, options=options): + return False + if IS_SETTABLE in superflags and IS_SETTABLE not in subflags: + return False + if not class_obj: + if IS_SETTABLE not in superflags: + if IS_CLASSVAR in superflags and IS_CLASSVAR not in subflags: + return False + elif (IS_CLASSVAR in subflags) != (IS_CLASSVAR in superflags): + return False + else: + if IS_VAR in superflags and IS_CLASSVAR not in subflags: + # Only class variables are allowed for class object access. + return False + if IS_CLASSVAR in superflags: + # This can be never matched by a class object. + return False + # This rule is copied from nominal check in checker.py + if IS_CLASS_OR_STATIC in superflags and IS_CLASS_OR_STATIC not in subflags: + return False + + if not proper_subtype: + # Nominal check currently ignores arg names, but __call__ is special for protocols + ignore_names = right.type.protocol_members != ["__call__"] + else: + ignore_names = False + subtype_kind = SubtypeVisitor.build_subtype_kind( + subtype_context=SubtypeContext(ignore_pos_arg_names=ignore_names), + proper_subtype=proper_subtype, + ) + type_state.record_subtype_cache_entry(subtype_kind, left, right) + return True + + +def find_member( + name: str, + itype: Instance, + subtype: Type, + *, + is_operator: bool = False, + class_obj: bool = False, + is_lvalue: bool = False, +) -> Type | None: + type_checker = checker_state.type_checker + if type_checker is None: + # Unfortunately, there are many scenarios where someone calls is_subtype() before + # type checking phase. In this case we fallback to old (incomplete) logic. + # TODO: reduce number of such cases (e.g. semanal_typeargs, post-semanal plugins). + return find_member_simple( + name, itype, subtype, is_operator=is_operator, class_obj=class_obj, is_lvalue=is_lvalue + ) + + # We don't use ATTR_DEFINED error code below (since missing attributes can cause various + # other error codes), instead we perform quick node lookup with all the fallbacks. + info = itype.type + sym = info.get(name) + node = sym.node if sym else None + if not node: + name_not_found = True + if ( + name not in ["__getattr__", "__setattr__", "__getattribute__"] + and not is_operator + and not class_obj + and itype.extra_attrs is None # skip ModuleType.__getattr__ + ): + for method_name in ("__getattribute__", "__getattr__"): + method = info.get_method(method_name) + if method and method.info.fullname != "builtins.object": + name_not_found = False + break + if name_not_found: + if info.fallback_to_any or class_obj and info.meta_fallback_to_any: + return AnyType(TypeOfAny.special_form) + if itype.extra_attrs and name in itype.extra_attrs.attrs: + return itype.extra_attrs.attrs[name] + return None + + from mypy.checkmember import ( + MemberContext, + analyze_class_attribute_access, + analyze_instance_member_access, + ) + + mx = MemberContext( + is_lvalue=is_lvalue, + is_super=False, + is_operator=is_operator, + original_type=TypeType.make_normalized(itype) if class_obj else itype, + self_type=TypeType.make_normalized(subtype) if class_obj else subtype, + context=Context(), # all errors are filtered, but this is a required argument + chk=type_checker, + suppress_errors=True, + # This is needed to avoid infinite recursion in situations involving protocols like + # class P(Protocol[T]): + # def combine(self, other: P[S]) -> P[Tuple[T, S]]: ... + # Normally we call freshen_all_functions_type_vars() during attribute access, + # to avoid type variable id collisions, but for protocols this means we can't + # use the assumption stack, that will grow indefinitely. + # TODO: find a cleaner solution that doesn't involve massive perf impact. + preserve_type_var_ids=True, + ) + with type_checker.msg.filter_errors(filter_deprecated=True): + if class_obj: + fallback = itype.type.metaclass_type or mx.named_type("builtins.type") + return analyze_class_attribute_access(itype, name, mx, mcs_fallback=fallback) + else: + return analyze_instance_member_access(name, itype, mx, info) + + +def find_member_simple( + name: str, + itype: Instance, + subtype: Type, + *, + is_operator: bool = False, + class_obj: bool = False, + is_lvalue: bool = False, +) -> Type | None: + """Find the type of member by 'name' in 'itype's TypeInfo. + + Find the member type after applying type arguments from 'itype', and binding + 'self' to 'subtype'. Return None if member was not found. + """ + info = itype.type + method = info.get_method(name) + if method: + if isinstance(method, Decorator): + return find_node_type(method.var, itype, subtype, class_obj=class_obj) + if method.is_property: + assert isinstance(method, OverloadedFuncDef) + dec = method.items[0] + assert isinstance(dec, Decorator) + # Pass on is_lvalue flag as this may be a property with different setter type. + return find_node_type( + dec.var, itype, subtype, class_obj=class_obj, is_lvalue=is_lvalue + ) + return find_node_type(method, itype, subtype, class_obj=class_obj) + else: + # don't have such method, maybe variable or decorator? + node = info.get(name) + v = node.node if node else None + if isinstance(v, Var): + return find_node_type(v, itype, subtype, class_obj=class_obj) + if ( + not v + and name not in ["__getattr__", "__setattr__", "__getattribute__"] + and not is_operator + and not class_obj + and itype.extra_attrs is None # skip ModuleType.__getattr__ + ): + for method_name in ("__getattribute__", "__getattr__"): + # Normally, mypy assumes that instances that define __getattr__ have all + # attributes with the corresponding return type. If this will produce + # many false negatives, then this could be prohibited for + # structural subtyping. + method = info.get_method(method_name) + if method and method.info.fullname != "builtins.object": + if isinstance(method, Decorator): + getattr_type = get_proper_type(find_node_type(method.var, itype, subtype)) + else: + getattr_type = get_proper_type(find_node_type(method, itype, subtype)) + if isinstance(getattr_type, CallableType): + return getattr_type.ret_type + return getattr_type + if itype.type.fallback_to_any or class_obj and itype.type.meta_fallback_to_any: + return AnyType(TypeOfAny.special_form) + if isinstance(v, TypeInfo): + # PEP 544 doesn't specify anything about such use cases. So we just try + # to do something meaningful (at least we should not crash). + return TypeType(fill_typevars_with_any(v)) + if itype.extra_attrs and name in itype.extra_attrs.attrs: + return itype.extra_attrs.attrs[name] + return None + + +def get_member_flags(name: str, itype: Instance, class_obj: bool = False) -> set[int]: + """Detect whether a member 'name' is settable, whether it is an + instance or class variable, and whether it is class or static method. + + The flags are defined as following: + * IS_SETTABLE: whether this attribute can be set, not set for methods and + non-settable properties; + * IS_CLASSVAR: set if the variable is annotated as 'x: ClassVar[t]'; + * IS_CLASS_OR_STATIC: set for methods decorated with @classmethod or + with @staticmethod. + """ + info = itype.type + method = info.get_method(name) + setattr_meth = info.get_method("__setattr__") + if method: + if isinstance(method, Decorator): + if method.var.is_staticmethod or method.var.is_classmethod: + return {IS_CLASS_OR_STATIC} + elif method.var.is_property: + return {IS_VAR} + elif method.is_property: # this could be settable property + assert isinstance(method, OverloadedFuncDef) + dec = method.items[0] + assert isinstance(dec, Decorator) + if dec.var.is_settable_property or setattr_meth: + flags = {IS_VAR, IS_SETTABLE} + if dec.var.setter_type is not None: + flags.add(IS_EXPLICIT_SETTER) + return flags + else: + return {IS_VAR} + return set() # Just a regular method + node = info.get(name) + if not node: + if setattr_meth: + return {IS_SETTABLE} + if itype.extra_attrs and name in itype.extra_attrs.attrs: + flags = set() + if name not in itype.extra_attrs.immutable: + flags.add(IS_SETTABLE) + return flags + return set() + v = node.node + # just a variable + if isinstance(v, Var): + if v.is_property: + return {IS_VAR} + flags = {IS_VAR} + if not v.is_final: + flags.add(IS_SETTABLE) + # TODO: define cleaner rules for class vs instance variables. + if v.is_classvar and not is_descriptor(v.type): + flags.add(IS_CLASSVAR) + if class_obj and v.is_inferred: + flags.add(IS_CLASSVAR) + return flags + return set() + + +def is_descriptor(typ: Type | None) -> bool: + typ = get_proper_type(typ) + if isinstance(typ, Instance): + return typ.type.get("__get__") is not None + if isinstance(typ, UnionType): + return all(is_descriptor(item) for item in typ.relevant_items()) + return False + + +def find_node_type( + node: Var | FuncBase, + itype: Instance, + subtype: Type, + class_obj: bool = False, + is_lvalue: bool = False, +) -> Type: + """Find type of a variable or method 'node' (maybe also a decorated method). + Apply type arguments from 'itype', and bind 'self' to 'subtype'. + """ + from mypy.typeops import bind_self + + if isinstance(node, FuncBase): + typ: Type | None = mypy.typeops.function_type( + node, fallback=Instance(itype.type.mro[-1], []) + ) + else: + # This part and the one below are simply copies of the logic from checkmember.py. + if node.is_settable_property and is_lvalue: + typ = node.setter_type + if typ is None and node.is_ready: + typ = node.type + else: + typ = node.type + if typ is not None: + typ = expand_self_type(node, typ, subtype) + p_typ = get_proper_type(typ) + if typ is None: + return AnyType(TypeOfAny.from_error) + # We don't need to bind 'self' for static methods, since there is no 'self'. + if isinstance(node, FuncBase) or ( + isinstance(p_typ, FunctionLike) + and node.is_initialized_in_class + and not node.is_staticmethod + ): + assert isinstance(p_typ, FunctionLike) + if class_obj and not ( + node.is_class if isinstance(node, FuncBase) else node.is_classmethod + ): + # Don't bind instance methods on class objects. + signature = p_typ + else: + signature = bind_self( + p_typ, subtype, is_classmethod=isinstance(node, Var) and node.is_classmethod + ) + if node.is_property and not class_obj: + assert isinstance(signature, CallableType) + if ( + isinstance(node, Var) + and node.is_settable_property + and is_lvalue + and node.setter_type is not None + ): + typ = signature.arg_types[0] + else: + typ = signature.ret_type + else: + typ = signature + itype = map_instance_to_supertype(itype, node.info) + typ = expand_type_by_instance(typ, itype) + return typ + + +def non_method_protocol_members(tp: TypeInfo) -> list[str]: + """Find all non-callable members of a protocol.""" + + assert tp.is_protocol + result: list[str] = [] + anytype = AnyType(TypeOfAny.special_form) + instance = Instance(tp, [anytype] * len(tp.defn.type_vars)) + + for member in tp.protocol_members: + typ = get_proper_type(find_member(member, instance, instance)) + if not isinstance(typ, (Overloaded, CallableType)): + result.append(member) + return result + + +def is_callable_compatible( + left: CallableType, + right: CallableType, + *, + is_compat: Callable[[Type, Type], bool], + is_proper_subtype: bool, + is_compat_return: Callable[[Type, Type], bool] | None = None, + ignore_return: bool = False, + ignore_pos_arg_names: bool = False, + check_args_covariantly: bool = False, + allow_partial_overlap: bool = False, + strict_concatenate: bool = False, +) -> bool: + """Is the left compatible with the right, using the provided compatibility check? + + is_compat: + The check we want to run against the parameters. + + is_compat_return: + The check we want to run against the return type. + If None, use the 'is_compat' check. + + check_args_covariantly: + If true, check if the left's args is compatible with the right's + instead of the other way around (contravariantly). + + This function is mostly used to check if the left is a subtype of the right which + is why the default is to check the args contravariantly. However, it's occasionally + useful to check the args using some other check, so we leave the variance + configurable. + + For example, when checking the validity of overloads, it's useful to see if + the first overload alternative has more precise arguments than the second. + We would want to check the arguments covariantly in that case. + + Note! The following two function calls are NOT equivalent: + + is_callable_compatible(f, g, is_compat=is_subtype, check_args_covariantly=False) + is_callable_compatible(g, f, is_compat=is_subtype, check_args_covariantly=True) + + The two calls are similar in that they both check the function arguments in + the same direction: they both run `is_subtype(argument_from_g, argument_from_f)`. + + However, the two calls differ in which direction they check things like + keyword arguments. For example, suppose f and g are defined like so: + + def f(x: int, *y: int) -> int: ... + def g(x: int) -> int: ... + + In this case, the first call will succeed and the second will fail: f is a + valid stand-in for g but not vice-versa. + + allow_partial_overlap: + By default this function returns True if and only if *all* calls to left are + also calls to right (with respect to the provided 'is_compat' function). + + If this parameter is set to 'True', we return True if *there exists at least one* + call to left that's also a call to right. + + In other words, we perform an existential check instead of a universal one; + we require left to only overlap with right instead of being a subset. + + For example, suppose we set 'is_compat' to some subtype check and compare following: + + f(x: float, y: str = "...", *args: bool) -> str + g(*args: int) -> str + + This function would normally return 'False': f is not a subtype of g. + However, we would return True if this parameter is set to 'True': the two + calls are compatible if the user runs "f_or_g(3)". In the context of that + specific call, the two functions effectively have signatures of: + + f2(float) -> str + g2(int) -> str + + Here, f2 is a valid subtype of g2 so we return True. + + Specifically, if this parameter is set this function will: + + - Ignore optional arguments on either the left or right that have no + corresponding match. + - No longer mandate optional arguments on either side are also optional + on the other. + - No longer mandate that if right has a *arg or **kwarg that left must also + have the same. + + Note: when this argument is set to True, this function becomes "symmetric" -- + the following calls are equivalent: + + is_callable_compatible(f, g, + is_compat=some_check, + check_args_covariantly=False, + allow_partial_overlap=True) + is_callable_compatible(g, f, + is_compat=some_check, + check_args_covariantly=True, + allow_partial_overlap=True) + + If the 'some_check' function is also symmetric, the two calls would be equivalent + whether or not we check the args covariantly. + """ + # Normalize both types before comparing them. + left = left.with_unpacked_kwargs().with_normalized_var_args() + right = right.with_unpacked_kwargs().with_normalized_var_args() + + if is_compat_return is None: + is_compat_return = is_compat + + # If either function is implicitly typed, ignore positional arg names too + if left.implicit or right.implicit: + ignore_pos_arg_names = True + + # Non-type cannot be a subtype of type. + if right.is_type_obj() and not left.is_type_obj() and not allow_partial_overlap: + return False + + # A callable L is a subtype of a generic callable R if L is a + # subtype of every type obtained from R by substituting types for + # the variables of R. We can check this by simply leaving the + # generic variables of R as type variables, effectively varying + # over all possible values. + + # It's okay even if these variables share ids with generic + # type variables of L, because generating and solving + # constraints for the variables of L to make L a subtype of R + # (below) treats type variables on the two sides as independent. + if left.variables: + # Apply generic type variables away in left via type inference. + unified = unify_generic_callable(left, right, ignore_return=ignore_return) + if unified is None: + return False + left = unified + + # Check return types. + if not ignore_return and not is_compat_return(left.ret_type, right.ret_type): + return False + + if check_args_covariantly: + is_compat = flip_compat_check(is_compat) + + if not strict_concatenate and (left.from_concatenate or right.from_concatenate): + strict_concatenate_check = False + else: + strict_concatenate_check = True + + return are_parameters_compatible( + left, + right, + is_compat=is_compat, + is_proper_subtype=is_proper_subtype, + ignore_pos_arg_names=ignore_pos_arg_names, + allow_partial_overlap=allow_partial_overlap, + strict_concatenate_check=strict_concatenate_check, + ) + + +def are_trivial_parameters(param: Parameters | NormalizedCallableType) -> bool: + param_star = param.var_arg() + param_star2 = param.kw_arg() + return ( + param.arg_kinds == [ARG_STAR, ARG_STAR2] + and param_star is not None + and isinstance(get_proper_type(param_star.typ), AnyType) + and param_star2 is not None + and isinstance(get_proper_type(param_star2.typ), AnyType) + ) + + +def is_trivial_suffix(param: Parameters | NormalizedCallableType) -> bool: + param_star = param.var_arg() + param_star2 = param.kw_arg() + return ( + param.arg_kinds[-2:] == [ARG_STAR, ARG_STAR2] + and param_star is not None + and isinstance(get_proper_type(param_star.typ), AnyType) + and param_star2 is not None + and isinstance(get_proper_type(param_star2.typ), AnyType) + ) + + +def are_parameters_compatible( + left: Parameters | NormalizedCallableType, + right: Parameters | NormalizedCallableType, + *, + is_compat: Callable[[Type, Type], bool], + is_proper_subtype: bool, + ignore_pos_arg_names: bool = False, + allow_partial_overlap: bool = False, + strict_concatenate_check: bool = False, +) -> bool: + """Helper function for is_callable_compatible, used for Parameter compatibility""" + if right.is_ellipsis_args and not is_proper_subtype: + return True + + left_star = left.var_arg() + left_star2 = left.kw_arg() + right_star = right.var_arg() + right_star2 = right.kw_arg() + + # Treat "def _(*a: Any, **kw: Any) -> X" similarly to "Callable[..., X]" + if are_trivial_parameters(right) and not is_proper_subtype: + return True + trivial_suffix = is_trivial_suffix(right) and not is_proper_subtype + + trivial_vararg_suffix = False + if ( + right.arg_kinds[-1:] == [ARG_STAR] + and isinstance(get_proper_type(right.arg_types[-1]), AnyType) + and not is_proper_subtype + and all(k.is_positional(star=True) for k in left.arg_kinds) + ): + # Similar to how (*Any, **Any) is considered a supertype of all callables, we consider + # (*Any) a supertype of all callables with positional arguments. This is needed in + # particular because we often refuse to try type inference if actual type is not + # a subtype of erased template type. + trivial_vararg_suffix = True + + # Match up corresponding arguments and check them for compatibility. In + # every pair (argL, argR) of corresponding arguments from L and R, argL must + # be "more general" than argR if L is to be a subtype of R. + + # Arguments are corresponding if they either share a name, share a position, + # or both. If L's corresponding argument is ambiguous, L is not a subtype of R. + + # If left has one corresponding argument by name and another by position, + # consider them to be one "merged" argument (and not ambiguous) if they're + # both optional, they're name-only and position-only respectively, and they + # have the same type. This rule allows functions with (*args, **kwargs) to + # properly stand in for the full domain of formal arguments that they're + # used for in practice. + + # Every argument in R must have a corresponding argument in L, and every + # required argument in L must have a corresponding argument in R. + + # Phase 1: Confirm every argument in R has a corresponding argument in L. + + # Phase 1a: If left and right can both accept an infinite number of args, + # their types must be compatible. + # + # Furthermore, if we're checking for compatibility in all cases, + # we confirm that if R accepts an infinite number of arguments, + # L must accept the same. + def _incompatible(left_arg: FormalArgument | None, right_arg: FormalArgument | None) -> bool: + if right_arg is None: + return False + if left_arg is None: + return not allow_partial_overlap and not trivial_suffix + return not is_compat(right_arg.typ, left_arg.typ) + + if ( + _incompatible(left_star, right_star) + and not trivial_vararg_suffix + or _incompatible(left_star2, right_star2) + ): + return False + + # Phase 1b: Check non-star args: for every arg right can accept, left must + # also accept. The only exception is if we are allowing partial + # overlaps: in that case, we ignore optional args on the right. + for right_arg in right.formal_arguments(): + left_arg = mypy.typeops.callable_corresponding_argument(left, right_arg) + if left_arg is None: + if allow_partial_overlap and not right_arg.required: + continue + return False + if not are_args_compatible( + left_arg, + right_arg, + is_compat, + ignore_pos_arg_names=ignore_pos_arg_names, + allow_partial_overlap=allow_partial_overlap, + allow_imprecise_kinds=right.imprecise_arg_kinds, + ): + return False + + if trivial_suffix: + # For trivial right suffix we *only* check that every non-star right argument + # has a valid match on the left. + return True + + # Phase 1c: Check var args. Right has an infinite series of optional positional + # arguments. Get all further positional args of left, and make sure + # they're more general than the corresponding member in right. + # TODO: handle suffix in UnpackType (i.e. *args: *Tuple[Ts, X, Y]). + if right_star is not None and not trivial_vararg_suffix: + # Synthesize an anonymous formal argument for the right + right_by_position = right.try_synthesizing_arg_from_vararg(None) + assert right_by_position is not None + + i = right_star.pos + assert i is not None + while i < len(left.arg_kinds) and left.arg_kinds[i].is_positional(): + if allow_partial_overlap and left.arg_kinds[i].is_optional(): + break + + left_by_position = left.argument_by_position(i) + assert left_by_position is not None + + if not are_args_compatible( + left_by_position, + right_by_position, + is_compat, + ignore_pos_arg_names=ignore_pos_arg_names, + allow_partial_overlap=allow_partial_overlap, + ): + return False + i += 1 + + # Phase 1d: Check kw args. Right has an infinite series of optional named + # arguments. Get all further named args of left, and make sure + # they're more general than the corresponding member in right. + if right_star2 is not None: + right_names = {name for name in right.arg_names if name is not None} + left_only_names = set() + for name, kind in zip(left.arg_names, left.arg_kinds): + if ( + name is None + or kind.is_star() + or name in right_names + or not strict_concatenate_check + ): + continue + left_only_names.add(name) + + # Synthesize an anonymous formal argument for the right + right_by_name = right.try_synthesizing_arg_from_kwarg(None) + assert right_by_name is not None + + for name in left_only_names: + left_by_name = left.argument_by_name(name) + assert left_by_name is not None + + if allow_partial_overlap and not left_by_name.required: + continue + + if not are_args_compatible( + left_by_name, + right_by_name, + is_compat, + ignore_pos_arg_names=ignore_pos_arg_names, + allow_partial_overlap=allow_partial_overlap, + ): + return False + + # Phase 2: Left must not impose additional restrictions. + # (Every required argument in L must have a corresponding argument in R) + # Note: we already checked the *arg and **kwarg arguments in phase 1a. + for left_arg in left.formal_arguments(): + right_by_name = ( + right.argument_by_name(left_arg.name) if left_arg.name is not None else None + ) + + right_by_pos = ( + right.argument_by_position(left_arg.pos) if left_arg.pos is not None else None + ) + + # If the left hand argument corresponds to two right-hand arguments, + # neither of them can be required. + if ( + right_by_name is not None + and right_by_pos is not None + and right_by_name != right_by_pos + and (right_by_pos.required or right_by_name.required) + and strict_concatenate_check + and not right.imprecise_arg_kinds + ): + return False + + # All *required* left-hand arguments must have a corresponding + # right-hand argument. Optional args do not matter. + if left_arg.required and right_by_pos is None and right_by_name is None: + return False + + return True + + +def are_args_compatible( + left: FormalArgument, + right: FormalArgument, + is_compat: Callable[[Type, Type], bool], + *, + ignore_pos_arg_names: bool, + allow_partial_overlap: bool, + allow_imprecise_kinds: bool = False, +) -> bool: + if left.required and right.required: + # If both arguments are required allow_partial_overlap has no effect. + allow_partial_overlap = False + + def is_different( + left_item: object | None, right_item: object | None, allow_overlap: bool + ) -> bool: + """Checks if the left and right items are different. + + If the right item is unspecified (e.g. if the right callable doesn't care + about what name or position its arg has), we default to returning False. + + If we're allowing partial overlap, we also default to returning False + if the left callable also doesn't care.""" + if right_item is None: + return False + if allow_overlap and left_item is None: + return False + return left_item != right_item + + # If right has a specific name it wants this argument to be, left must + # have the same. + if is_different(left.name, right.name, allow_partial_overlap): + # But pay attention to whether we're ignoring positional arg names + if not ignore_pos_arg_names or right.pos is None: + return False + + # If right is at a specific position, left must have the same. + # TODO: partial overlap logic is flawed for positions. + # We disable it to avoid false positives at a cost of few false negatives. + if is_different(left.pos, right.pos, allow_overlap=False) and not allow_imprecise_kinds: + return False + + # If right's argument is optional, left's must also be + # (unless we're relaxing the checks to allow potential + # rather than definite compatibility). + if not allow_partial_overlap and not right.required and left.required: + return False + + # If we're allowing partial overlaps and neither arg is required, + # the types don't actually need to be the same + if allow_partial_overlap and not left.required and not right.required: + return True + + # Left must have a more general type + return is_compat(right.typ, left.typ) + + +def flip_compat_check(is_compat: Callable[[Type, Type], bool]) -> Callable[[Type, Type], bool]: + def new_is_compat(left: Type, right: Type) -> bool: + return is_compat(right, left) + + return new_is_compat + + +def unify_generic_callable( + type: NormalizedCallableType, + target: NormalizedCallableType, + ignore_return: bool, + return_constraint_direction: int | None = None, +) -> NormalizedCallableType | None: + """Try to unify a generic callable type with another callable type. + + Return unified CallableType if successful; otherwise, return None. + """ + import mypy.solve + + if set(type.type_var_ids()) & {v.id for v in mypy.typeops.get_all_type_vars(target)}: + # Overload overlap check does nasty things like unifying in opposite direction. + # This can easily create type variable clashes, so we need to refresh. + type = freshen_function_type_vars(type) + + if return_constraint_direction is None: + return_constraint_direction = mypy.constraints.SUBTYPE_OF + + constraints: list[mypy.constraints.Constraint] = [] + # There is some special logic for inference in callables, so better use them + # as wholes instead of picking separate arguments. + cs = mypy.constraints.infer_constraints( + type.copy_modified(ret_type=UninhabitedType()), + target.copy_modified(ret_type=UninhabitedType()), + mypy.constraints.SUBTYPE_OF, + skip_neg_op=True, + ) + constraints.extend(cs) + if not ignore_return: + c = mypy.constraints.infer_constraints( + type.ret_type, target.ret_type, return_constraint_direction + ) + constraints.extend(c) + inferred_vars, _ = mypy.solve.solve_constraints( + type.variables, constraints, allow_polymorphic=True + ) + if None in inferred_vars: + return None + non_none_inferred_vars = cast(list[Type], inferred_vars) + had_errors = False + + def report(*args: Any) -> None: + nonlocal had_errors + had_errors = True + + # This function may be called by the solver, so we need to allow erased types here. + # We anyway allow checking subtyping between other types containing + # (probably also because solver needs subtyping). See also comment in + # ExpandTypeVisitor.visit_erased_type(). + applied = mypy.applytype.apply_generic_arguments( + type, non_none_inferred_vars, report, context=target + ) + if had_errors: + return None + return cast(NormalizedCallableType, applied) + + +def try_restrict_literal_union(t: UnionType, s: Type) -> list[Type] | None: + """Return the items of t, excluding any occurrence of s, if and only if + - t only contains simple literals + - s is a simple literal + + Otherwise, returns None + """ + ps = get_proper_type(s) + if not mypy.typeops.is_simple_literal(ps): + return None + + new_items: list[Type] = [] + for i in t.relevant_items(): + pi = get_proper_type(i) + if not mypy.typeops.is_simple_literal(pi): + return None + if pi != ps: + new_items.append(i) + return new_items + + +def restrict_subtype_away(t: Type, s: Type, *, consider_runtime_isinstance: bool = True) -> Type: + """Return t minus s for runtime type assertions. + + If we can't determine a precise result, return a supertype of the + ideal result (just t is a valid result). + + This is used for type inference of runtime type checks such as + isinstance(). Currently, this just removes elements of a union type. + """ + p_t = get_proper_type(t) + if isinstance(p_t, UnionType): + new_items = try_restrict_literal_union(p_t, s) + if new_items is None: + new_items = [ + restrict_subtype_away( + item, s, consider_runtime_isinstance=consider_runtime_isinstance + ) + for item in p_t.relevant_items() + ] + return UnionType.make_union( + [item for item in new_items if not isinstance(get_proper_type(item), UninhabitedType)] + ) + elif isinstance(p_t, TypeVarType): + return p_t.copy_modified(upper_bound=restrict_subtype_away(p_t.upper_bound, s)) + + if consider_runtime_isinstance: + if covers_at_runtime(t, s): + return UninhabitedType() + else: + return t + else: + if is_proper_subtype(t, s, ignore_promotions=True): + return UninhabitedType() + if is_proper_subtype(t, s, ignore_promotions=True, erase_instances=True): + return UninhabitedType() + return t + + +def covers_at_runtime(item: Type, supertype: Type) -> bool: + """Will isinstance(item, supertype) always return True at runtime?""" + item = get_proper_type(item) + supertype = get_proper_type(supertype) + + # Since runtime type checks will ignore type arguments, erase the types. + if not (isinstance(supertype, FunctionLike) and supertype.is_type_obj()): + supertype = erase_type(supertype) + if is_proper_subtype( + erase_type(item), supertype, ignore_promotions=True, erase_instances=True + ): + return True + if isinstance(supertype, Instance): + if supertype.type.is_protocol: + # TODO: Implement more robust support for runtime isinstance() checks, see issue #3827. + if is_proper_subtype(item, supertype, ignore_promotions=True): + return True + if isinstance(item, TypedDictType): + # Special case useful for selecting TypedDicts from unions using isinstance(x, dict). + if supertype.type.fullname == "builtins.dict": + return True + elif isinstance(item, TypeVarType): + if is_proper_subtype(item.upper_bound, supertype, ignore_promotions=True): + return True + elif isinstance(item, Instance) and supertype.type.fullname == "builtins.int": + # "int" covers all native int types + if item.type.fullname in MYPYC_NATIVE_INT_NAMES: + return True + # TODO: Add more special cases. + return False + + +def is_more_precise(left: Type, right: Type, *, ignore_promotions: bool = False) -> bool: + """Check if left is a more precise type than right. + + A left is a proper subtype of right, left is also more precise than + right. Also, if right is Any, left is more precise than right, for + any left. + """ + # TODO Should List[int] be more precise than List[Any]? + right = get_proper_type(right) + if isinstance(right, AnyType): + return True + return is_proper_subtype(left, right, ignore_promotions=ignore_promotions) + + +def all_non_object_members(info: TypeInfo) -> set[str]: + members = set(info.names) + for base in info.mro[1:-1]: + members.update(base.names) + return members + + +def infer_variance(info: TypeInfo, i: int) -> bool: + """Infer the variance of the ith type variable of a generic class. + + Return True if successful. This can fail if some inferred types aren't ready. + """ + object_type = Instance(info.mro[-1], []) + + for variance in COVARIANT, CONTRAVARIANT, INVARIANT: + tv = info.defn.type_vars[i] + assert isinstance(tv, TypeVarType) + if tv.variance != VARIANCE_NOT_READY: + continue + tv.variance = variance + co = True + contra = True + tvar = info.defn.type_vars[i] + self_type = fill_typevars(info) + for member in all_non_object_members(info): + # __mypy-replace is an implementation detail of the dataclass plugin + if member in ("__init__", "__new__", "__mypy-replace"): + continue + + if isinstance(self_type, TupleType): + self_type = mypy.typeops.tuple_fallback(self_type) + flags = get_member_flags(member, self_type) + settable = IS_SETTABLE in flags + + node = info[member].node + if isinstance(node, Var): + if node.type is None: + tv.variance = VARIANCE_NOT_READY + return False + if has_underscore_prefix(member): + # Special case to avoid false positives (and to pass conformance tests) + settable = False + + # TODO: handle settable properties with setter type different from getter. + typ = find_member(member, self_type, self_type) + if typ: + # It's okay for a method in a generic class with a contravariant type + # variable to return a generic instance of the class, if it doesn't involve + # variance (i.e. values of type variables are propagated). Our normal rules + # would disallow this. Replace such return types with 'Any' to allow this. + # + # This could probably be more lenient (e.g. allow self type be nested, don't + # require all type arguments to be identical to self_type), but this will + # hopefully cover the vast majority of such cases, including Self. + typ = erase_return_self_types(typ, self_type) + + typ2 = expand_type(typ, {tvar.id: object_type}) + if not is_subtype(typ, typ2): + co = False + if not is_subtype(typ2, typ): + contra = False + if settable: + co = False + + # Infer variance from base classes, in case they have explicit variances + for base in info.bases: + base2 = expand_type(base, {tvar.id: object_type}) + if not is_subtype(base, base2): + co = False + if not is_subtype(base2, base): + contra = False + + if co: + v = COVARIANT + elif contra: + v = CONTRAVARIANT + else: + v = INVARIANT + if v == variance: + break + tv.variance = VARIANCE_NOT_READY + return True + + +def has_underscore_prefix(name: str) -> bool: + return name.startswith("_") and not (name.startswith("__") and name.endswith("__")) + + +def infer_class_variances(info: TypeInfo) -> bool: + if not info.defn.type_args: + return True + tvs = info.defn.type_vars + success = True + for i, tv in enumerate(tvs): + if isinstance(tv, TypeVarType) and tv.variance == VARIANCE_NOT_READY: + if not infer_variance(info, i): + success = False + return success + + +def erase_return_self_types(typ: Type, self_type: Instance) -> Type: + """If a typ is function-like and returns self_type, replace return type with Any.""" + proper_type = get_proper_type(typ) + if isinstance(proper_type, CallableType): + ret = get_proper_type(proper_type.ret_type) + if isinstance(ret, Instance) and ret == self_type: + return proper_type.copy_modified(ret_type=AnyType(TypeOfAny.implementation_artifact)) + elif isinstance(proper_type, Overloaded): + return Overloaded( + [ + cast(CallableType, erase_return_self_types(it, self_type)) + for it in proper_type.items + ] + ) + return typ + + +def is_erased_instance(t: Instance) -> bool: + """Is this an instance where all args are Any types?""" + if not t.args: + return False + for arg in t.args: + if isinstance(arg, UnpackType): + unpacked = get_proper_type(arg.type) + if not isinstance(unpacked, Instance): + return False + assert unpacked.type.fullname == "builtins.tuple" + if not isinstance(get_proper_type(unpacked.args[0]), AnyType): + return False + elif not isinstance(get_proper_type(arg), AnyType): + return False + return True diff --git a/.venv/lib/python3.12/site-packages/mypy/suggestions.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/suggestions.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..fe226af Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/suggestions.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/suggestions.py b/.venv/lib/python3.12/site-packages/mypy/suggestions.py new file mode 100644 index 0000000..756cf6a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/suggestions.py @@ -0,0 +1,1068 @@ +"""Mechanisms for inferring function types based on callsites. + +Currently works by collecting all argument types at callsites, +synthesizing a list of possible function types from that, trying them +all, and picking the one with the fewest errors that we think is the +"best". + +Can return JSON that pyannotate can use to apply the annotations to code. + +There are a bunch of TODOs here: + * Maybe want a way to surface the choices not selected?? + * We can generate an exponential number of type suggestions, and probably want + a way to not always need to check them all. + * Our heuristics for what types to try are primitive and not yet + supported by real practice. + * More! + +Other things: + * This is super brute force. Could we integrate with the typechecker + more to understand more about what is going on? + * Like something with tracking constraints/unification variables? + * No understanding of type variables at *all* +""" + +from __future__ import annotations + +import itertools +import json +import os +import sys +from collections.abc import Iterator +from contextlib import contextmanager +from typing import Callable, NamedTuple, TypedDict, TypeVar, cast + +from mypy.argmap import map_actuals_to_formals +from mypy.build import Graph, State +from mypy.checkexpr import has_any_type +from mypy.find_sources import InvalidSourceList, SourceFinder +from mypy.join import join_type_list +from mypy.meet import meet_type_list +from mypy.modulefinder import PYTHON_EXTENSIONS +from mypy.nodes import ( + ARG_STAR, + ARG_STAR2, + ArgKind, + CallExpr, + Decorator, + Expression, + FuncDef, + MypyFile, + RefExpr, + ReturnStmt, + SymbolNode, + SymbolTable, + TypeInfo, + Var, +) +from mypy.options import Options +from mypy.plugin import FunctionContext, MethodContext, Plugin +from mypy.server.update import FineGrainedBuildManager +from mypy.state import state +from mypy.traverser import TraverserVisitor +from mypy.typeops import bind_self, make_simplified_union +from mypy.types import ( + AnyType, + CallableType, + FunctionLike, + Instance, + NoneType, + ProperType, + TupleType, + Type, + TypeAliasType, + TypedDictType, + TypeOfAny, + TypeStrVisitor, + TypeTranslator, + TypeVarType, + UninhabitedType, + UnionType, + get_proper_type, +) +from mypy.types_utils import is_overlapping_none, remove_optional +from mypy.util import split_target + + +class PyAnnotateSignature(TypedDict): + return_type: str + arg_types: list[str] + + +class Callsite(NamedTuple): + path: str + line: int + arg_kinds: list[list[ArgKind]] + callee_arg_names: list[str | None] + arg_names: list[list[str | None]] + arg_types: list[list[Type]] + + +class SuggestionPlugin(Plugin): + """Plugin that records all calls to a given target.""" + + def __init__(self, target: str) -> None: + if target.endswith((".__new__", ".__init__")): + target = target.rsplit(".", 1)[0] + + self.target = target + # List of call sites found by dmypy suggest: + # (path, line, , , ) + self.mystery_hits: list[Callsite] = [] + + def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None: + if fullname == self.target: + return self.log + else: + return None + + def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None: + if fullname == self.target: + return self.log + else: + return None + + def log(self, ctx: FunctionContext | MethodContext) -> Type: + self.mystery_hits.append( + Callsite( + ctx.api.path, + ctx.context.line, + ctx.arg_kinds, + ctx.callee_arg_names, + ctx.arg_names, + ctx.arg_types, + ) + ) + return ctx.default_return_type + + +# NOTE: We could make this a bunch faster by implementing a StatementVisitor that skips +# traversing into expressions +class ReturnFinder(TraverserVisitor): + """Visitor for finding all types returned from a function.""" + + def __init__(self, typemap: dict[Expression, Type]) -> None: + self.typemap = typemap + self.return_types: list[Type] = [] + + def visit_return_stmt(self, o: ReturnStmt) -> None: + if o.expr is not None and o.expr in self.typemap: + self.return_types.append(self.typemap[o.expr]) + + def visit_func_def(self, o: FuncDef) -> None: + # Skip nested functions + pass + + +def get_return_types(typemap: dict[Expression, Type], func: FuncDef) -> list[Type]: + """Find all the types returned by return statements in func.""" + finder = ReturnFinder(typemap) + func.body.accept(finder) + return finder.return_types + + +class ArgUseFinder(TraverserVisitor): + """Visitor for finding all the types of arguments that each arg is passed to. + + This is extremely simple minded but might be effective anyways. + """ + + def __init__(self, func: FuncDef, typemap: dict[Expression, Type]) -> None: + self.typemap = typemap + self.arg_types: dict[SymbolNode, list[Type]] = {arg.variable: [] for arg in func.arguments} + + def visit_call_expr(self, o: CallExpr) -> None: + if not any(isinstance(e, RefExpr) and e.node in self.arg_types for e in o.args): + return + + typ = get_proper_type(self.typemap.get(o.callee)) + if not isinstance(typ, CallableType): + return + + formal_to_actual = map_actuals_to_formals( + o.arg_kinds, + o.arg_names, + typ.arg_kinds, + typ.arg_names, + lambda n: AnyType(TypeOfAny.special_form), + ) + + for i, args in enumerate(formal_to_actual): + for arg_idx in args: + arg = o.args[arg_idx] + if isinstance(arg, RefExpr) and arg.node in self.arg_types: + self.arg_types[arg.node].append(typ.arg_types[i]) + + +def get_arg_uses(typemap: dict[Expression, Type], func: FuncDef) -> list[list[Type]]: + """Find all the types of arguments that each arg is passed to. + + For example, given + def foo(x: int) -> None: ... + def bar(x: str) -> None: ... + def test(x, y): + foo(x) + bar(y) + + this will return [[int], [str]]. + """ + finder = ArgUseFinder(func, typemap) + func.body.accept(finder) + return [finder.arg_types[arg.variable] for arg in func.arguments] + + +class SuggestionFailure(Exception): + pass + + +def is_explicit_any(typ: AnyType) -> bool: + # Originally I wanted to count as explicit anything derived from an explicit any, but that + # seemed too strict in some testing. + # return (typ.type_of_any == TypeOfAny.explicit + # or (typ.source_any is not None and typ.source_any.type_of_any == TypeOfAny.explicit)) + # Important question: what should we do with source_any stuff? Does that count? + # And actually should explicit anys count at all?? Maybe not! + return typ.type_of_any == TypeOfAny.explicit + + +def is_implicit_any(typ: Type) -> bool: + typ = get_proper_type(typ) + return isinstance(typ, AnyType) and not is_explicit_any(typ) + + +def _arg_accepts_function(typ: ProperType) -> bool: + return ( + # TypeVar / Callable + isinstance(typ, (TypeVarType, CallableType)) + or + # Protocol with __call__ + isinstance(typ, Instance) + and typ.type.is_protocol + and typ.type.get_method("__call__") is not None + ) + + +class SuggestionEngine: + """Engine for finding call sites and suggesting signatures.""" + + def __init__( + self, + fgmanager: FineGrainedBuildManager, + *, + json: bool, + no_errors: bool = False, + no_any: bool = False, + flex_any: float | None = None, + use_fixme: str | None = None, + max_guesses: int | None = None, + ) -> None: + self.fgmanager = fgmanager + self.manager = fgmanager.manager + self.plugin = self.manager.plugin + self.graph = fgmanager.graph + self.finder = SourceFinder(self.manager.fscache, self.manager.options) + + self.give_json = json + self.no_errors = no_errors + self.flex_any = flex_any + if no_any: + self.flex_any = 1.0 + + self.max_guesses = max_guesses or 64 + self.use_fixme = use_fixme + + def suggest(self, function: str) -> str: + """Suggest an inferred type for function.""" + mod, func_name, node = self.find_node(function) + + with self.restore_after(mod): + with self.with_export_types(): + suggestion = self.get_suggestion(mod, node) + + if self.give_json: + return self.json_suggestion(mod, func_name, node, suggestion) + else: + return self.format_signature(suggestion) + + def suggest_callsites(self, function: str) -> str: + """Find a list of call sites of function.""" + mod, _, node = self.find_node(function) + with self.restore_after(mod): + callsites, _ = self.get_callsites(node) + + return "\n".join( + dedup( + [ + f"{path}:{line}: {self.format_args(arg_kinds, arg_names, arg_types)}" + for path, line, arg_kinds, _, arg_names, arg_types in callsites + ] + ) + ) + + @contextmanager + def restore_after(self, module: str) -> Iterator[None]: + """Context manager that reloads a module after executing the body. + + This should undo any damage done to the module state while mucking around. + """ + try: + yield + finally: + self.reload(self.graph[module]) + + @contextmanager + def with_export_types(self) -> Iterator[None]: + """Context manager that enables the export_types flag in the body. + + This causes type information to be exported into the manager's all_types variable. + """ + old = self.manager.options.export_types + self.manager.options.export_types = True + try: + yield + finally: + self.manager.options.export_types = old + + def get_trivial_type(self, fdef: FuncDef) -> CallableType: + """Generate a trivial callable type from a func def, with all Anys""" + # The Anys are marked as being from the suggestion engine + # since they need some special treatment (specifically, + # constraint generation ignores them.) + return CallableType( + [AnyType(TypeOfAny.suggestion_engine) for _ in fdef.arg_kinds], + fdef.arg_kinds, + fdef.arg_names, + AnyType(TypeOfAny.suggestion_engine), + self.named_type("builtins.function"), + ) + + def get_starting_type(self, fdef: FuncDef) -> CallableType: + if isinstance(fdef.type, CallableType): + return make_suggestion_anys(fdef.type) + else: + return self.get_trivial_type(fdef) + + def get_args( + self, + is_method: bool, + base: CallableType, + defaults: list[Type | None], + callsites: list[Callsite], + uses: list[list[Type]], + ) -> list[list[Type]]: + """Produce a list of type suggestions for each argument type.""" + types: list[list[Type]] = [] + for i in range(len(base.arg_kinds)): + # Make self args Any but this will get overridden somewhere in the checker + if i == 0 and is_method: + types.append([AnyType(TypeOfAny.suggestion_engine)]) + continue + + all_arg_types = [] + for call in callsites: + for typ in call.arg_types[i - is_method]: + # Collect all the types except for implicit anys + if not is_implicit_any(typ): + all_arg_types.append(typ) + all_use_types = [] + for typ in uses[i]: + # Collect all the types except for implicit anys + if not is_implicit_any(typ): + all_use_types.append(typ) + # Add in any default argument types + default = defaults[i] + if default: + all_arg_types.append(default) + if all_use_types: + all_use_types.append(default) + + arg_types = [] + + if all_arg_types and all( + isinstance(get_proper_type(tp), NoneType) for tp in all_arg_types + ): + arg_types.append( + UnionType.make_union([all_arg_types[0], AnyType(TypeOfAny.explicit)]) + ) + elif all_arg_types: + arg_types.extend(generate_type_combinations(all_arg_types)) + else: + arg_types.append(AnyType(TypeOfAny.explicit)) + + if all_use_types: + # This is a meet because the type needs to be compatible with all the uses + arg_types.append(meet_type_list(all_use_types)) + + types.append(arg_types) + return types + + def get_default_arg_types(self, fdef: FuncDef) -> list[Type | None]: + return [ + self.manager.all_types[arg.initializer] if arg.initializer else None + for arg in fdef.arguments + ] + + def get_guesses( + self, + is_method: bool, + base: CallableType, + defaults: list[Type | None], + callsites: list[Callsite], + uses: list[list[Type]], + ) -> list[CallableType]: + """Compute a list of guesses for a function's type. + + This focuses just on the argument types, and doesn't change the provided return type. + """ + options = self.get_args(is_method, base, defaults, callsites, uses) + + # Take the first `max_guesses` guesses. + product = itertools.islice(itertools.product(*options), 0, self.max_guesses) + return [refine_callable(base, base.copy_modified(arg_types=list(x))) for x in product] + + def get_callsites(self, func: FuncDef) -> tuple[list[Callsite], list[str]]: + """Find all call sites of a function.""" + new_type = self.get_starting_type(func) + + collector_plugin = SuggestionPlugin(func.fullname) + + self.plugin._plugins.insert(0, collector_plugin) + try: + errors = self.try_type(func, new_type) + finally: + self.plugin._plugins.pop(0) + + return collector_plugin.mystery_hits, errors + + def filter_options( + self, guesses: list[CallableType], is_method: bool, ignore_return: bool + ) -> list[CallableType]: + """Apply any configured filters to the possible guesses. + + Currently the only option is filtering based on Any prevalance.""" + return [ + t + for t in guesses + if self.flex_any is None + or any_score_callable(t, is_method, ignore_return) >= self.flex_any + ] + + def find_best(self, func: FuncDef, guesses: list[CallableType]) -> tuple[CallableType, int]: + """From a list of possible function types, find the best one. + + For best, we want the fewest errors, then the best "score" from score_callable. + """ + if not guesses: + raise SuggestionFailure("No guesses that match criteria!") + errors = {guess: self.try_type(func, guess) for guess in guesses} + best = min(guesses, key=lambda s: (count_errors(errors[s]), self.score_callable(s))) + return best, count_errors(errors[best]) + + def get_guesses_from_parent(self, node: FuncDef) -> list[CallableType]: + """Try to get a guess of a method type from a parent class.""" + if not node.info: + return [] + + for parent in node.info.mro[1:]: + pnode = parent.names.get(node.name) + if pnode and isinstance(pnode.node, (FuncDef, Decorator)): + typ = get_proper_type(pnode.node.type) + # FIXME: Doesn't work right with generic types + if isinstance(typ, CallableType) and len(typ.arg_types) == len(node.arguments): + # Return the first thing we find, since it probably doesn't make sense + # to grab things further up in the chain if an earlier parent has it. + return [typ] + + return [] + + def get_suggestion(self, mod: str, node: FuncDef) -> PyAnnotateSignature: + """Compute a suggestion for a function. + + Return the type and whether the first argument should be ignored. + """ + graph = self.graph + callsites, orig_errors = self.get_callsites(node) + uses = get_arg_uses(self.manager.all_types, node) + + if self.no_errors and orig_errors: + raise SuggestionFailure("Function does not typecheck.") + + is_method = bool(node.info) and node.has_self_or_cls_argument + + with state.strict_optional_set(graph[mod].options.strict_optional): + guesses = self.get_guesses( + is_method, + self.get_starting_type(node), + self.get_default_arg_types(node), + callsites, + uses, + ) + guesses += self.get_guesses_from_parent(node) + guesses = self.filter_options(guesses, is_method, ignore_return=True) + best, _ = self.find_best(node, guesses) + + # Now try to find the return type! + self.try_type(node, best) + returns = get_return_types(self.manager.all_types, node) + with state.strict_optional_set(graph[mod].options.strict_optional): + if returns: + ret_types = generate_type_combinations(returns) + else: + ret_types = [NoneType()] + + guesses = [best.copy_modified(ret_type=refine_type(best.ret_type, t)) for t in ret_types] + guesses = self.filter_options(guesses, is_method, ignore_return=False) + best, errors = self.find_best(node, guesses) + + if self.no_errors and errors: + raise SuggestionFailure("No annotation without errors") + + return self.pyannotate_signature(mod, is_method, best) + + def format_args( + self, + arg_kinds: list[list[ArgKind]], + arg_names: list[list[str | None]], + arg_types: list[list[Type]], + ) -> str: + args: list[str] = [] + for i in range(len(arg_types)): + for kind, name, typ in zip(arg_kinds[i], arg_names[i], arg_types[i]): + arg = self.format_type(None, typ) + if kind == ARG_STAR: + arg = "*" + arg + elif kind == ARG_STAR2: + arg = "**" + arg + elif kind.is_named(): + if name: + arg = f"{name}={arg}" + args.append(arg) + return f"({', '.join(args)})" + + def find_node(self, key: str) -> tuple[str, str, FuncDef]: + """From a target name, return module/target names and the func def. + + The 'key' argument can be in one of two formats: + * As the function full name, e.g., package.module.Cls.method + * As the function location as file and line separated by column, + e.g., path/to/file.py:42 + """ + # TODO: Also return OverloadedFuncDef -- currently these are ignored. + node: SymbolNode | None = None + if ":" in key: + # A colon might be part of a drive name on Windows (like `C:/foo/bar`) + # and is also used as a delimiter between file path and lineno. + # If a colon is there for any of those reasons, it must be a file+line + # reference. + platform_key_count = 2 if sys.platform == "win32" else 1 + if key.count(":") > platform_key_count: + raise SuggestionFailure( + "Malformed location for function: {}. Must be either" + " package.module.Class.method or path/to/file.py:line".format(key) + ) + file, line = key.rsplit(":", 1) + if not line.isdigit(): + raise SuggestionFailure(f"Line number must be a number. Got {line}") + line_number = int(line) + modname, node = self.find_node_by_file_and_line(file, line_number) + tail = node.fullname[len(modname) + 1 :] # add one to account for '.' + else: + target = split_target(self.fgmanager.graph, key) + if not target: + raise SuggestionFailure(f"Cannot find module for {key}") + modname, tail = target + node = self.find_node_by_module_and_name(modname, tail) + + if isinstance(node, Decorator): + node = self.extract_from_decorator(node) + if not node: + raise SuggestionFailure(f"Object {key} is a decorator we can't handle") + + if not isinstance(node, FuncDef): + raise SuggestionFailure(f"Object {key} is not a function") + + return modname, tail, node + + def find_node_by_module_and_name(self, modname: str, tail: str) -> SymbolNode | None: + """Find symbol node by module id and qualified name. + + Raise SuggestionFailure if can't find one. + """ + tree = self.ensure_loaded(self.fgmanager.graph[modname]) + + # N.B. This is reimplemented from update's lookup_target + # basically just to produce better error messages. + + names: SymbolTable = tree.names + + # Look through any classes + components = tail.split(".") + for i, component in enumerate(components[:-1]): + if component not in names: + raise SuggestionFailure( + "Unknown class {}.{}".format(modname, ".".join(components[: i + 1])) + ) + node: SymbolNode | None = names[component].node + if not isinstance(node, TypeInfo): + raise SuggestionFailure( + "Object {}.{} is not a class".format(modname, ".".join(components[: i + 1])) + ) + names = node.names + + # Look for the actual function/method + funcname = components[-1] + if funcname not in names: + key = modname + "." + tail + raise SuggestionFailure( + "Unknown {} {}".format("method" if len(components) > 1 else "function", key) + ) + return names[funcname].node + + def find_node_by_file_and_line(self, file: str, line: int) -> tuple[str, SymbolNode]: + """Find symbol node by path to file and line number. + + Find the first function declared *before or on* the line number. + + Return module id and the node found. Raise SuggestionFailure if can't find one. + """ + if not any(file.endswith(ext) for ext in PYTHON_EXTENSIONS): + raise SuggestionFailure("Source file is not a Python file") + try: + modname, _ = self.finder.crawl_up(os.path.normpath(file)) + except InvalidSourceList as e: + raise SuggestionFailure("Invalid source file name: " + file) from e + if modname not in self.graph: + raise SuggestionFailure("Unknown module: " + modname) + # We must be sure about any edits in this file as this might affect the line numbers. + tree = self.ensure_loaded(self.fgmanager.graph[modname], force=True) + node: SymbolNode | None = None + closest_line: int | None = None + # TODO: Handle nested functions. + for _, sym, _ in tree.local_definitions(): + if isinstance(sym.node, (FuncDef, Decorator)): + sym_line = sym.node.line + # TODO: add support for OverloadedFuncDef. + else: + continue + + # We want the closest function above the specified line + if sym_line <= line and (closest_line is None or sym_line > closest_line): + closest_line = sym_line + node = sym.node + if not node: + raise SuggestionFailure(f"Cannot find a function at line {line}") + return modname, node + + def extract_from_decorator(self, node: Decorator) -> FuncDef | None: + for dec in node.decorators: + typ = None + if isinstance(dec, RefExpr) and isinstance(dec.node, (Var, FuncDef)): + typ = get_proper_type(dec.node.type) + elif ( + isinstance(dec, CallExpr) + and isinstance(dec.callee, RefExpr) + and isinstance(dec.callee.node, (Decorator, FuncDef, Var)) + and isinstance((call_tp := get_proper_type(dec.callee.node.type)), CallableType) + ): + typ = get_proper_type(call_tp.ret_type) + + if isinstance(typ, Instance): + call_method = typ.type.get_method("__call__") + if isinstance(call_method, FuncDef) and isinstance(call_method.type, FunctionLike): + typ = bind_self(call_method.type, None) + + if not isinstance(typ, FunctionLike): + return None + for ct in typ.items: + if not ( + len(ct.arg_types) == 1 + and _arg_accepts_function(get_proper_type(ct.arg_types[0])) + and ct.arg_types[0] == ct.ret_type + ): + return None + + return node.func + + def try_type(self, func: FuncDef, typ: ProperType) -> list[str]: + """Recheck a function while assuming it has type typ. + + Return all error messages. + """ + old = func.unanalyzed_type + # During reprocessing, unanalyzed_type gets copied to type (by aststrip). + # We set type to None to ensure that the type always changes during + # reprocessing. + func.type = None + func.unanalyzed_type = typ + try: + res = self.fgmanager.trigger(func.fullname) + # if res: + # print('===', typ) + # print('\n'.join(res)) + return res + finally: + func.unanalyzed_type = old + + def reload(self, state: State) -> list[str]: + """Recheck the module given by state.""" + assert state.path is not None + self.fgmanager.flush_cache() + return self.fgmanager.update([(state.id, state.path)], []) + + def ensure_loaded(self, state: State, force: bool = False) -> MypyFile: + """Make sure that the module represented by state is fully loaded.""" + if not state.tree or state.tree.is_cache_skeleton or force: + self.reload(state) + assert state.tree is not None + return state.tree + + def named_type(self, s: str) -> Instance: + return self.manager.semantic_analyzer.named_type(s) + + def json_suggestion( + self, mod: str, func_name: str, node: FuncDef, suggestion: PyAnnotateSignature + ) -> str: + """Produce a json blob for a suggestion suitable for application by pyannotate.""" + # pyannotate irritatingly drops class names for class and static methods + if node.is_class or node.is_static: + func_name = func_name.split(".", 1)[-1] + + # pyannotate works with either paths relative to where the + # module is rooted or with absolute paths. We produce absolute + # paths because it is simpler. + path = os.path.abspath(self.graph[mod].xpath) + + obj = { + "signature": suggestion, + "line": node.line, + "path": path, + "func_name": func_name, + "samples": 0, + } + return json.dumps([obj], sort_keys=True) + + def pyannotate_signature( + self, cur_module: str | None, is_method: bool, typ: CallableType + ) -> PyAnnotateSignature: + """Format a callable type as a pyannotate dict""" + start = int(is_method) + return { + "arg_types": [self.format_type(cur_module, t) for t in typ.arg_types[start:]], + "return_type": self.format_type(cur_module, typ.ret_type), + } + + def format_signature(self, sig: PyAnnotateSignature) -> str: + """Format a callable type in a way suitable as an annotation... kind of""" + return f"({', '.join(sig['arg_types'])}) -> {sig['return_type']}" + + def format_type(self, cur_module: str | None, typ: Type) -> str: + if self.use_fixme and isinstance(get_proper_type(typ), AnyType): + return self.use_fixme + return typ.accept(TypeFormatter(cur_module, self.graph, self.manager.options)) + + def score_type(self, t: Type, arg_pos: bool) -> int: + """Generate a score for a type that we use to pick which type to use. + + Lower is better, prefer non-union/non-any types. Don't penalize optionals. + """ + t = get_proper_type(t) + if isinstance(t, AnyType): + return 20 + if arg_pos and isinstance(t, NoneType): + return 20 + if isinstance(t, UnionType): + if any(isinstance(get_proper_type(x), AnyType) for x in t.items): + return 20 + if any(has_any_type(x) for x in t.items): + return 15 + if not is_overlapping_none(t): + return 10 + if isinstance(t, CallableType) and (has_any_type(t) or is_tricky_callable(t)): + return 10 + return 0 + + def score_callable(self, t: CallableType) -> int: + return sum(self.score_type(x, arg_pos=True) for x in t.arg_types) + self.score_type( + t.ret_type, arg_pos=False + ) + + +def any_score_type(ut: Type, arg_pos: bool) -> float: + """Generate a very made up number representing the Anyness of a type. + + Higher is better, 1.0 is max + """ + t = get_proper_type(ut) + if isinstance(t, AnyType) and t.type_of_any != TypeOfAny.suggestion_engine: + return 0 + if isinstance(t, NoneType) and arg_pos: + return 0.5 + if isinstance(t, UnionType): + if any(isinstance(get_proper_type(x), AnyType) for x in t.items): + return 0.5 + if any(has_any_type(x) for x in t.items): + return 0.25 + if isinstance(t, CallableType) and is_tricky_callable(t): + return 0.5 + if has_any_type(t): + return 0.5 + + return 1.0 + + +def any_score_callable(t: CallableType, is_method: bool, ignore_return: bool) -> float: + # Ignore the first argument of methods + scores = [any_score_type(x, arg_pos=True) for x in t.arg_types[int(is_method) :]] + # Return type counts twice (since it spreads type information), unless it is + # None in which case it does not count at all. (Though it *does* still count + # if there are no arguments.) + if not isinstance(get_proper_type(t.ret_type), NoneType) or not scores: + ret = 1.0 if ignore_return else any_score_type(t.ret_type, arg_pos=False) + scores += [ret, ret] + + return sum(scores) / len(scores) + + +def is_tricky_callable(t: CallableType) -> bool: + """Is t a callable that we need to put a ... in for syntax reasons?""" + return t.is_ellipsis_args or any(k.is_star() or k.is_named() for k in t.arg_kinds) + + +class TypeFormatter(TypeStrVisitor): + """Visitor used to format types""" + + # TODO: Probably a lot + def __init__(self, module: str | None, graph: Graph, options: Options) -> None: + super().__init__(options=options) + self.module = module + self.graph = graph + + def visit_any(self, t: AnyType) -> str: + if t.missing_import_name: + return t.missing_import_name + else: + return "Any" + + def visit_instance(self, t: Instance) -> str: + s = t.type.fullname or t.type.name or None + if s is None: + return "" + + mod_obj = split_target(self.graph, s) + assert mod_obj + mod, obj = mod_obj + + # If a class is imported into the current module, rewrite the reference + # to point to the current module. This helps the annotation tool avoid + # inserting redundant imports when a type has been reexported. + if self.module: + parts = obj.split(".") # need to split the object part if it is a nested class + tree = self.graph[self.module].tree + if tree and parts[0] in tree.names and mod not in tree.names: + mod = self.module + + if (mod, obj) == ("builtins", "tuple"): + mod, obj = "typing", "Tuple[" + t.args[0].accept(self) + ", ...]" + elif t.args: + obj += f"[{self.list_str(t.args)}]" + + if mod_obj == ("builtins", "unicode"): + return "Text" + elif mod == "builtins": + return obj + else: + delim = "." if "." not in obj else ":" + return mod + delim + obj + + def visit_tuple_type(self, t: TupleType) -> str: + if t.partial_fallback and t.partial_fallback.type: + fallback_name = t.partial_fallback.type.fullname + if fallback_name != "builtins.tuple": + return t.partial_fallback.accept(self) + s = self.list_str(t.items) + return f"Tuple[{s}]" + + def visit_uninhabited_type(self, t: UninhabitedType) -> str: + return "Any" + + def visit_typeddict_type(self, t: TypedDictType) -> str: + return t.fallback.accept(self) + + def visit_union_type(self, t: UnionType) -> str: + if len(t.items) == 2 and is_overlapping_none(t): + s = remove_optional(t).accept(self) + return f"{s} | None" if self.options.use_or_syntax() else f"Optional[{s}]" + else: + return super().visit_union_type(t) + + def visit_callable_type(self, t: CallableType) -> str: + # TODO: use extended callables? + if is_tricky_callable(t): + arg_str = "..." + else: + # Note: for default arguments, we just assume that they + # are required. This isn't right, but neither is the + # other thing, and I suspect this will produce more better + # results than falling back to `...` + args = [typ.accept(self) for typ in t.arg_types] + arg_str = f"[{', '.join(args)}]" + + return f"Callable[{arg_str}, {t.ret_type.accept(self)}]" + + +TType = TypeVar("TType", bound=Type) + + +def make_suggestion_anys(t: TType) -> TType: + """Make all anys in the type as coming from the suggestion engine. + + This keeps those Anys from influencing constraint generation, + which allows us to do better when refining types. + """ + return cast(TType, t.accept(MakeSuggestionAny())) + + +class MakeSuggestionAny(TypeTranslator): + def visit_any(self, t: AnyType) -> Type: + if not t.missing_import_name: + return t.copy_modified(type_of_any=TypeOfAny.suggestion_engine) + else: + return t + + def visit_type_alias_type(self, t: TypeAliasType) -> Type: + return t.copy_modified(args=[a.accept(self) for a in t.args]) + + +def generate_type_combinations(types: list[Type]) -> list[Type]: + """Generate possible combinations of a list of types. + + mypy essentially supports two different ways to do this: joining the types + and unioning the types. We try both. + """ + joined_type = join_type_list(types) + union_type = make_simplified_union(types) + if joined_type == union_type: + return [joined_type] + else: + return [joined_type, union_type] + + +def count_errors(msgs: list[str]) -> int: + return len([x for x in msgs if " error: " in x]) + + +def refine_type(ti: Type, si: Type) -> Type: + """Refine `ti` by replacing Anys in it with information taken from `si` + + This basically works by, when the types have the same structure, + traversing both of them in parallel and replacing Any on the left + with whatever the type on the right is. If the types don't have the + same structure (or aren't supported), the left type is chosen. + + For example: + refine(Any, T) = T, for all T + refine(float, int) = float + refine(List[Any], List[int]) = List[int] + refine(Dict[int, Any], Dict[Any, int]) = Dict[int, int] + refine(Tuple[int, Any], Tuple[Any, int]) = Tuple[int, int] + + refine(Callable[[Any], Any], Callable[[int], int]) = Callable[[int], int] + refine(Callable[..., int], Callable[[int, float], Any]) = Callable[[int, float], int] + + refine(Optional[Any], int) = Optional[int] + refine(Optional[Any], Optional[int]) = Optional[int] + refine(Optional[Any], Union[int, str]) = Optional[Union[int, str]] + refine(Optional[List[Any]], List[int]) = List[int] + + """ + t = get_proper_type(ti) + s = get_proper_type(si) + + if isinstance(t, AnyType): + # If s is also an Any, we return if it is a missing_import Any + return t if isinstance(s, AnyType) and t.missing_import_name else s + + if isinstance(t, Instance) and isinstance(s, Instance) and t.type == s.type: + return t.copy_modified(args=[refine_type(ta, sa) for ta, sa in zip(t.args, s.args)]) + + if ( + isinstance(t, TupleType) + and isinstance(s, TupleType) + and t.partial_fallback == s.partial_fallback + and len(t.items) == len(s.items) + ): + return t.copy_modified(items=[refine_type(ta, sa) for ta, sa in zip(t.items, s.items)]) + + if isinstance(t, CallableType) and isinstance(s, CallableType): + return refine_callable(t, s) + + if isinstance(t, UnionType): + return refine_union(t, s) + + # TODO: Refining of builtins.tuple, Type? + + return t + + +def refine_union(t: UnionType, s: ProperType) -> Type: + """Refine a union type based on another type. + + This is done by refining every component of the union against the + right hand side type (or every component of its union if it is + one). If an element of the union is successfully refined, we drop it + from the union in favor of the refined versions. + """ + # Don't try to do any union refining if the types are already the + # same. This prevents things like refining Optional[Any] against + # itself and producing None. + if t == s: + return t + + rhs_items = s.items if isinstance(s, UnionType) else [s] + + new_items = [] + for lhs in t.items: + refined = False + for rhs in rhs_items: + new = refine_type(lhs, rhs) + if new != lhs: + new_items.append(new) + refined = True + if not refined: + new_items.append(lhs) + + # Turn strict optional on when simplifying the union since we + # don't want to drop Nones. + with state.strict_optional_set(True): + return make_simplified_union(new_items) + + +def refine_callable(t: CallableType, s: CallableType) -> CallableType: + """Refine a callable based on another. + + See comments for refine_type. + """ + if t.fallback != s.fallback: + return t + + if t.is_ellipsis_args and not is_tricky_callable(s): + return s.copy_modified(ret_type=refine_type(t.ret_type, s.ret_type)) + + if is_tricky_callable(t) or t.arg_kinds != s.arg_kinds: + return t + + return t.copy_modified( + arg_types=[refine_type(ta, sa) for ta, sa in zip(t.arg_types, s.arg_types)], + ret_type=refine_type(t.ret_type, s.ret_type), + ) + + +T = TypeVar("T") + + +def dedup(old: list[T]) -> list[T]: + new: list[T] = [] + for x in old: + if x not in new: + new.append(x) + return new diff --git a/.venv/lib/python3.12/site-packages/mypy/test/__init__.py b/.venv/lib/python3.12/site-packages/mypy/test/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypy/test/config.py b/.venv/lib/python3.12/site-packages/mypy/test/config.py new file mode 100644 index 0000000..2dc4208 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/config.py @@ -0,0 +1,31 @@ +from __future__ import annotations + +import os.path + +provided_prefix = os.getenv("MYPY_TEST_PREFIX", None) +if provided_prefix: + PREFIX = provided_prefix +else: + this_file_dir = os.path.dirname(os.path.realpath(__file__)) + PREFIX = os.path.dirname(os.path.dirname(this_file_dir)) + +# Location of test data files such as test case descriptions. +test_data_prefix = os.path.join(PREFIX, "test-data", "unit") +package_path = os.path.join(PREFIX, "test-data", "packages") + +# Temp directory used for the temp files created when running test cases. +# This is *within* the tempfile.TemporaryDirectory that is chroot'ed per testcase. +# It is also hard-coded in numerous places, so don't change it. +test_temp_dir = "tmp" + +# Mypyc tests may write intermediate files (e.g. generated C) here on failure +mypyc_output_dir = os.path.join(PREFIX, ".mypyc_test_output") + +# The PEP 561 tests do a bunch of pip installs which, even though they operate +# on distinct temporary virtual environments, run into race conditions on shared +# file-system state. To make this work reliably in parallel mode, we'll use a +# FileLock courtesy of the tox-dev/py-filelock package. +# Ref. https://github.com/python/mypy/issues/12615 +# Ref. mypy/test/testpep561.py +pip_lock = os.path.join(package_path, ".pip_lock") +pip_timeout = 60 diff --git a/.venv/lib/python3.12/site-packages/mypy/test/data.py b/.venv/lib/python3.12/site-packages/mypy/test/data.py new file mode 100644 index 0000000..5b0ad84 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/data.py @@ -0,0 +1,830 @@ +"""Utilities for processing .test files containing test case descriptions.""" + +from __future__ import annotations + +import os +import os.path +import posixpath +import re +import shutil +import sys +import tempfile +from abc import abstractmethod +from collections.abc import Iterator +from dataclasses import dataclass +from pathlib import Path +from re import Pattern +from typing import Any, Final, NamedTuple, NoReturn, Union +from typing_extensions import TypeAlias as _TypeAlias + +import pytest + +from mypy import defaults +from mypy.test.config import PREFIX, mypyc_output_dir, test_data_prefix, test_temp_dir + +root_dir = os.path.normpath(PREFIX) + +# Debuggers that we support for debugging mypyc run tests +# implementation of using each of these debuggers is in test_run.py +# TODO: support more debuggers +SUPPORTED_DEBUGGERS: Final = ["gdb", "lldb"] + + +# File modify/create operation: copy module contents from source_path. +class UpdateFile(NamedTuple): + module: str + content: str + target_path: str + + +# File delete operation: delete module file. +class DeleteFile(NamedTuple): + module: str + path: str + + +FileOperation: _TypeAlias = Union[UpdateFile, DeleteFile] + + +def _file_arg_to_module(filename: str) -> str: + filename, _ = os.path.splitext(filename) + parts = filename.split("/") # not os.sep since it comes from test data + if parts[-1] == "__init__": + parts.pop() + return ".".join(parts) + + +def parse_test_case(case: DataDrivenTestCase) -> None: + """Parse and prepare a single case from suite with test case descriptions. + + This method is part of the setup phase, just before the test case is run. + """ + test_items = parse_test_data(case.data, case.name) + base_path = case.suite.base_path + if case.suite.native_sep: + join = os.path.join + else: + join = posixpath.join + + out_section_missing = case.suite.required_out_section + + files: list[tuple[str, str]] = [] # path and contents + output_files: list[tuple[str, str | Pattern[str]]] = [] # output path and contents + output: list[str] = [] # Regular output errors + output2: dict[int, list[str]] = {} # Output errors for incremental, runs 2+ + deleted_paths: dict[int, set[str]] = {} # from run number of paths + stale_modules: dict[int, set[str]] = {} # from run number to module names + rechecked_modules: dict[int, set[str]] = {} # from run number module names + triggered: list[str] = [] # Active triggers (one line per incremental step) + targets: dict[int, list[str]] = {} # Fine-grained targets (per fine-grained update) + test_modules: list[str] = [] # Modules which are deemed "test" (vs "fixture") + + def _case_fail(msg: str) -> NoReturn: + pytest.fail(f"{case.file}:{case.line}: {msg}", pytrace=False) + + # Process the parsed items. Each item has a header of form [id args], + # optionally followed by lines of text. + item = first_item = test_items[0] + test_modules.append("__main__") + for item in test_items[1:]: + + def _item_fail(msg: str) -> NoReturn: + item_abs_line = case.line + item.line - 2 + pytest.fail(f"{case.file}:{item_abs_line}: {msg}", pytrace=False) + + if item.id in {"file", "fixture", "outfile", "outfile-re"}: + # Record an extra file needed for the test case. + assert item.arg is not None + contents = expand_variables("\n".join(item.data)) + path = join(base_path, item.arg) + if item.id != "fixture": + test_modules.append(_file_arg_to_module(item.arg)) + if item.id in {"file", "fixture"}: + files.append((path, contents)) + elif item.id == "outfile-re": + output_files.append((path, re.compile(contents.rstrip(), re.S))) + elif item.id == "outfile": + output_files.append((path, contents)) + elif item.id == "builtins": + # Use an alternative stub file for the builtins module. + assert item.arg is not None + mpath = join(os.path.dirname(case.file), item.arg) + with open(mpath, encoding="utf8") as f: + files.append((join(base_path, "builtins.pyi"), f.read())) + elif item.id == "typing": + # Use an alternative stub file for the typing module. + assert item.arg is not None + src_path = join(os.path.dirname(case.file), item.arg) + with open(src_path, encoding="utf8") as f: + files.append((join(base_path, "typing.pyi"), f.read())) + elif item.id == "_typeshed": + # Use an alternative stub file for the _typeshed module. + assert item.arg is not None + src_path = join(os.path.dirname(case.file), item.arg) + with open(src_path, encoding="utf8") as f: + files.append((join(base_path, "_typeshed.pyi"), f.read())) + elif re.match(r"stale[0-9]*$", item.id): + passnum = 1 if item.id == "stale" else int(item.id[len("stale") :]) + assert passnum > 0 + modules = set() if item.arg is None else {t.strip() for t in item.arg.split(",")} + stale_modules[passnum] = modules + elif re.match(r"rechecked[0-9]*$", item.id): + passnum = 1 if item.id == "rechecked" else int(item.id[len("rechecked") :]) + assert passnum > 0 + modules = set() if item.arg is None else {t.strip() for t in item.arg.split(",")} + rechecked_modules[passnum] = modules + elif re.match(r"targets[0-9]*$", item.id): + passnum = 1 if item.id == "targets" else int(item.id[len("targets") :]) + assert passnum > 0 + reprocessed = [] if item.arg is None else [t.strip() for t in item.arg.split(",")] + targets[passnum] = reprocessed + elif item.id == "delete": + # File/directory to delete during a multi-step test case + assert item.arg is not None + m = re.match(r"(.*)\.([0-9]+)$", item.arg) + if m is None: + _item_fail(f"Invalid delete section {item.arg!r}") + num = int(m.group(2)) + if num < 2: + _item_fail(f"Can't delete during step {num}") + full = join(base_path, m.group(1)) + deleted_paths.setdefault(num, set()).add(full) + elif re.match(r"out[0-9]*$", item.id): + if item.arg is None: + args = [] + else: + args = item.arg.split(",") + + version_check = True + for arg in args: + if arg.startswith("version"): + compare_op = arg[7:9] + if compare_op not in {">=", "=="}: + _item_fail("Only >= and == version checks are currently supported") + version_str = arg[9:] + try: + version = tuple(int(x) for x in version_str.split(".")) + except ValueError: + _item_fail(f"{version_str!r} is not a valid python version") + if compare_op == ">=": + if version <= defaults.PYTHON3_VERSION: + _item_fail( + f"{arg} always true since minimum runtime version is {defaults.PYTHON3_VERSION}" + ) + version_check = sys.version_info >= version + elif compare_op == "==": + if version < defaults.PYTHON3_VERSION: + _item_fail( + f"{arg} always false since minimum runtime version is {defaults.PYTHON3_VERSION}" + ) + if not 1 < len(version) < 4: + _item_fail( + f'Only minor or patch version checks are currently supported with "==": {version_str!r}' + ) + version_check = sys.version_info[: len(version)] == version + if version_check: + tmp_output = [expand_variables(line) for line in item.data] + if os.path.sep == "\\" and case.normalize_output: + tmp_output = [fix_win_path(line) for line in tmp_output] + if item.id == "out" or item.id == "out1": + output = tmp_output + else: + passnum = int(item.id[len("out") :]) + assert passnum > 1 + output2[passnum] = tmp_output + out_section_missing = False + elif item.id == "triggered" and item.arg is None: + triggered = item.data + else: + section_str = item.id + (f" {item.arg}" if item.arg else "") + _item_fail(f"Invalid section header [{section_str}] in case {case.name!r}") + + if out_section_missing: + _case_fail(f"Required output section not found in case {case.name!r}") + + for passnum in stale_modules.keys(): + if passnum not in rechecked_modules: + # If the set of rechecked modules isn't specified, make it the same as the set + # of modules with a stale public interface. + rechecked_modules[passnum] = stale_modules[passnum] + if ( + passnum in stale_modules + and passnum in rechecked_modules + and not stale_modules[passnum].issubset(rechecked_modules[passnum]) + ): + _case_fail(f"Stale modules after pass {passnum} must be a subset of rechecked modules") + + output_inline_start = len(output) + input = first_item.data + expand_errors(input, output, "main") + for file_path, contents in files: + expand_errors(contents.split("\n"), output, file_path) + + seen_files = set() + for file, _ in files: + if file in seen_files: + _case_fail(f"Duplicated filename {file}. Did you include it multiple times?") + + seen_files.add(file) + + case.input = input + case.output = output + case.output_inline_start = output_inline_start + case.output2 = output2 + case.last_line = case.line + item.line + len(item.data) - 2 + case.files = files + case.output_files = output_files + case.expected_stale_modules = stale_modules + case.expected_rechecked_modules = rechecked_modules + case.deleted_paths = deleted_paths + case.triggered = triggered or [] + case.expected_fine_grained_targets = targets + case.test_modules = test_modules + + +class DataDrivenTestCase(pytest.Item): + """Holds parsed data-driven test cases, and handles directory setup and teardown.""" + + # Override parent member type + parent: DataFileCollector + + input: list[str] + output: list[str] # Output for the first pass + output_inline_start: int + output2: dict[int, list[str]] # Output for runs 2+, indexed by run number + + # full path of test suite + file = "" + line = 0 + + # (file path, file content) tuples + files: list[tuple[str, str]] + # Modules which is to be considered "test" rather than "fixture" + test_modules: list[str] + expected_stale_modules: dict[int, set[str]] + expected_rechecked_modules: dict[int, set[str]] + expected_fine_grained_targets: dict[int, list[str]] + + # Whether or not we should normalize the output to standardize things like + # forward vs backward slashes in file paths for Windows vs Linux. + normalize_output: bool + + # Extra attributes used by some tests. + last_line: int + output_files: list[tuple[str, str | Pattern[str]]] # Path and contents for output files + deleted_paths: dict[int, set[str]] # Mapping run number -> paths + triggered: list[str] # Active triggers (one line per incremental step) + + def __init__( + self, + parent: DataFileCollector, + suite: DataSuite, + *, + file: str, + name: str, + writescache: bool, + only_when: str, + normalize_output: bool, + platform: str | None, + skip: bool, + xfail: bool, + data: str, + line: int, + ) -> None: + assert isinstance(parent, DataFileCollector) + super().__init__(name, parent) + self.suite = suite + self.file = file + self.writescache = writescache + self.only_when = only_when + self.normalize_output = normalize_output + if (platform == "windows" and sys.platform != "win32") or ( + platform == "posix" and sys.platform == "win32" + ): + skip = True + self.skip = skip + self.xfail = xfail + self.data = data + self.line = line + self.old_cwd: str | None = None + self.tmpdir: str | None = None + + def runtest(self) -> None: + if self.skip: + pytest.skip() + # TODO: add a better error message for when someone uses skip and xfail at the same time + elif self.xfail: + self.add_marker(pytest.mark.xfail) + parent = self.getparent(DataSuiteCollector) + assert parent is not None, "Should not happen" + suite = parent.obj() + suite.setup() + try: + suite.run_case(self) + except Exception: + # As a debugging aid, support copying the contents of the tmp directory somewhere + save_dir: str | None = self.config.getoption("--save-failures-to", None) + if save_dir: + assert self.tmpdir is not None + target_dir = os.path.join(save_dir, os.path.basename(self.tmpdir)) + print(f"Copying data from test {self.name} to {target_dir}") + if not os.path.isabs(target_dir): + assert self.old_cwd + target_dir = os.path.join(self.old_cwd, target_dir) + shutil.copytree(self.tmpdir, target_dir) + raise + + def setup(self) -> None: + parse_test_case(case=self) + self.old_cwd = os.getcwd() + self.tmpdir = tempfile.mkdtemp(prefix="mypy-test-") + os.chdir(self.tmpdir) + os.mkdir(test_temp_dir) + + # Precalculate steps for find_steps() + steps: dict[int, list[FileOperation]] = {} + + for path, content in self.files: + m = re.match(r".*\.([0-9]+)$", path) + if m: + # Skip writing subsequent incremental steps - rather + # store them as operations. + num = int(m.group(1)) + assert num >= 2 + target_path = re.sub(r"\.[0-9]+$", "", path) + module = module_from_path(target_path) + operation = UpdateFile(module, content, target_path) + steps.setdefault(num, []).append(operation) + else: + # Write the first incremental steps + dir = os.path.dirname(path) + os.makedirs(dir, exist_ok=True) + with open(path, "w", encoding="utf8") as f: + f.write(content) + + for num, paths in self.deleted_paths.items(): + assert num >= 2 + for path in paths: + module = module_from_path(path) + steps.setdefault(num, []).append(DeleteFile(module, path)) + max_step = max(steps) if steps else 2 + self.steps = [steps.get(num, []) for num in range(2, max_step + 1)] + + def teardown(self) -> None: + if self.old_cwd is not None: + os.chdir(self.old_cwd) + if self.tmpdir is not None: + shutil.rmtree(self.tmpdir, ignore_errors=True) + self.old_cwd = None + self.tmpdir = None + + def reportinfo(self) -> tuple[str, int, str]: + return self.file, self.line, self.name + + def repr_failure( + self, excinfo: pytest.ExceptionInfo[BaseException], style: Any | None = None + ) -> str: + excrepr: object + if isinstance(excinfo.value, SystemExit): + # We assume that before doing exit() (which raises SystemExit) we've printed + # enough context about what happened so that a stack trace is not useful. + # In particular, uncaught exceptions during semantic analysis or type checking + # call exit() and they already print out a stack trace. + excrepr = excinfo.exconly() + elif isinstance(excinfo.value, pytest.fail.Exception) and not excinfo.value.pytrace: + excrepr = excinfo.exconly() + else: + excinfo.traceback = self.parent._traceback_filter(excinfo) + excrepr = excinfo.getrepr(style="short") + + return f"data: {self.file}:{self.line}:\n{excrepr}" + + def find_steps(self) -> list[list[FileOperation]]: + """Return a list of descriptions of file operations for each incremental step. + + The first list item corresponds to the first incremental step, the second for the + second step, etc. Each operation can either be a file modification/creation (UpdateFile) + or deletion (DeleteFile). + + Defaults to having two steps if there aern't any operations. + """ + return self.steps + + +def module_from_path(path: str) -> str: + path = re.sub(r"\.pyi?$", "", path) + # We can have a mix of Unix-style and Windows-style separators. + parts = re.split(r"[/\\]", path) + del parts[0] + module = ".".join(parts) + module = re.sub(r"\.__init__$", "", module) + return module + + +@dataclass +class TestItem: + """Parsed test caseitem. + + An item is of the form + [id arg] + .. data .. + """ + + id: str + arg: str | None + # Processed, collapsed text data + data: list[str] + # Start line: 1-based, inclusive, relative to testcase + line: int + # End line: 1-based, exclusive, relative to testcase; not same as `line + len(test_item.data)` due to collapsing + end_line: int + + @property + def trimmed_newlines(self) -> int: # compensates for strip_list + return self.end_line - self.line - len(self.data) + + +def parse_test_data(raw_data: str, name: str) -> list[TestItem]: + """Parse a list of lines that represent a sequence of test items.""" + + lines = ["", "[case " + name + "]"] + raw_data.split("\n") + ret: list[TestItem] = [] + data: list[str] = [] + + id: str | None = None + arg: str | None = None + + i = 0 + i0 = 0 + while i < len(lines): + s = lines[i].strip() + + if lines[i].startswith("[") and s.endswith("]"): + if id: + data = collapse_line_continuation(data) + data = strip_list(data) + ret.append(TestItem(id, arg, data, i0 + 1, i)) + + i0 = i + id = s[1:-1] + arg = None + if " " in id: + arg = id[id.index(" ") + 1 :] + id = id[: id.index(" ")] + data = [] + elif lines[i].startswith("\\["): + data.append(lines[i][1:]) + elif not lines[i].startswith("--"): + data.append(lines[i]) + elif lines[i].startswith("----"): + data.append(lines[i][2:]) + i += 1 + + # Process the last item. + if id: + data = collapse_line_continuation(data) + data = strip_list(data) + ret.append(TestItem(id, arg, data, i0 + 1, i - 1)) + + return ret + + +def strip_list(l: list[str]) -> list[str]: + """Return a stripped copy of l. + + Strip whitespace at the end of all lines, and strip all empty + lines from the end of the array. + """ + + r: list[str] = [] + for s in l: + # Strip spaces at end of line + r.append(re.sub(r"\s+$", "", s)) + + while r and r[-1] == "": + r.pop() + + return r + + +def collapse_line_continuation(l: list[str]) -> list[str]: + r: list[str] = [] + cont = False + for s in l: + ss = re.sub(r"\\$", "", s) + if cont: + r[-1] += re.sub("^ +", "", ss) + else: + r.append(ss) + cont = s.endswith("\\") + return r + + +def expand_variables(s: str) -> str: + return s.replace("", root_dir) + + +def expand_errors(input: list[str], output: list[str], fnam: str) -> None: + """Transform comments such as '# E: message' or + '# E:3: message' in input. + + The result is lines like 'fnam:line: error: message'. + """ + + for i in range(len(input)): + # The first in the split things isn't a comment + for possible_err_comment in input[i].split(" # ")[1:]: + m = re.search( + r"^([ENW]):((?P\d+):)? (?P.*)$", possible_err_comment.strip() + ) + if m: + if m.group(1) == "E": + severity = "error" + elif m.group(1) == "N": + severity = "note" + elif m.group(1) == "W": + severity = "warning" + col = m.group("col") + message = m.group("message") + message = message.replace("\\#", "#") # adds back escaped # character + if col is None: + output.append(f"{fnam}:{i + 1}: {severity}: {message}") + else: + output.append(f"{fnam}:{i + 1}:{col}: {severity}: {message}") + + +def fix_win_path(line: str) -> str: + r"""Changes Windows paths to Linux paths in error messages. + + E.g. foo\bar.py -> foo/bar.py. + """ + line = line.replace(root_dir, root_dir.replace("\\", "/")) + m = re.match(r"^([\S/]+):(\d+:)?(\s+.*)", line) + if not m: + return line + else: + filename, lineno, message = m.groups() + return "{}:{}{}".format(filename.replace("\\", "/"), lineno or "", message) + + +def fix_cobertura_filename(line: str) -> str: + r"""Changes filename paths to Linux paths in Cobertura output files. + + E.g. filename="pkg\subpkg\a.py" -> filename="pkg/subpkg/a.py". + """ + m = re.search(r' None: + # Clean up directory where mypyc tests write intermediate files on failure + # to avoid any confusion between test runs + if os.path.isdir(mypyc_output_dir): + shutil.rmtree(mypyc_output_dir) + + +# This function name is special to pytest. See +# https://docs.pytest.org/en/latest/reference.html#initialization-hooks +def pytest_addoption(parser: Any) -> None: + group = parser.getgroup("mypy") + group.addoption( + "--update-data", + action="store_true", + default=False, + help="Update test data to reflect actual output (supported only for certain tests)", + ) + group.addoption( + "--save-failures-to", + default=None, + help="Copy the temp directories from failing tests to a target directory", + ) + group.addoption( + "--mypy-verbose", action="count", help="Set the verbose flag when creating mypy Options" + ) + group.addoption( + "--mypyc-showc", + action="store_true", + default=False, + help="Display C code on mypyc test failures", + ) + group.addoption( + "--mypyc-debug", + default=None, + dest="debugger", + choices=SUPPORTED_DEBUGGERS, + help="Run the first mypyc run test with the specified debugger", + ) + + +@pytest.hookimpl(tryfirst=True) +def pytest_cmdline_main(config: pytest.Config) -> None: + if config.getoption("--collectonly"): + return + # --update-data is not compatible with parallelized tests, disable parallelization + if config.getoption("--update-data"): + config.option.numprocesses = 0 + + +# This function name is special to pytest. See +# https://doc.pytest.org/en/latest/how-to/writing_plugins.html#collection-hooks +def pytest_pycollect_makeitem(collector: Any, name: str, obj: object) -> Any | None: + """Called by pytest on each object in modules configured in conftest.py files. + + collector is pytest.Collector, returns Optional[pytest.Class] + """ + if isinstance(obj, type): + # Only classes derived from DataSuite contain test cases, not the DataSuite class itself + if issubclass(obj, DataSuite) and obj is not DataSuite: + # Non-None result means this obj is a test case. + # The collect method of the returned DataSuiteCollector instance will be called later, + # with self.obj being obj. + return DataSuiteCollector.from_parent(parent=collector, name=name) + return None + + +_case_name_pattern = re.compile( + r"(?P[a-zA-Z_0-9]+)" + r"(?P-writescache)?" + r"(?P-only_when_cache|-only_when_nocache)?" + r"(?P-skip_path_normalization)?" + r"(-(?Pposix|windows))?" + r"(?P-skip)?" + r"(?P-xfail)?" +) + + +def split_test_cases( + parent: DataFileCollector, suite: DataSuite, file: str +) -> Iterator[DataDrivenTestCase]: + """Iterate over raw test cases in file, at collection time, ignoring sub items. + + The collection phase is slow, so any heavy processing should be deferred to after + uninteresting tests are filtered (when using -k PATTERN switch). + """ + with open(file, encoding="utf-8") as f: + data = f.read() + cases = re.split(r"^\[case ([^]+)]+)\][ \t]*$\n", data, flags=re.DOTALL | re.MULTILINE) + cases_iter = iter(cases) + line_no = next(cases_iter).count("\n") + 1 + test_names = set() + for case_id in cases_iter: + data = next(cases_iter) + + m = _case_name_pattern.fullmatch(case_id) + if not m: + raise RuntimeError(f"Invalid testcase id {case_id!r}") + name = m.group("name") + if name in test_names: + raise RuntimeError( + 'Found a duplicate test name "{}" in {} on line {}'.format( + name, parent.name, line_no + ) + ) + yield DataDrivenTestCase.from_parent( + parent=parent, + suite=suite, + file=file, + name=add_test_name_suffix(name, suite.test_name_suffix), + writescache=bool(m.group("writescache")), + only_when=m.group("only_when"), + platform=m.group("platform"), + skip=bool(m.group("skip")), + xfail=bool(m.group("xfail")), + normalize_output=not m.group("skip_path_normalization"), + data=data, + line=line_no, + ) + line_no += data.count("\n") + 1 + + # Record existing tests to prevent duplicates: + test_names.update({name}) + + +class DataSuiteCollector(pytest.Class): + def collect(self) -> Iterator[DataFileCollector]: + """Called by pytest on each of the object returned from pytest_pycollect_makeitem""" + + # obj is the object for which pytest_pycollect_makeitem returned self. + suite: DataSuite = self.obj + + assert os.path.isdir( + suite.data_prefix + ), f"Test data prefix ({suite.data_prefix}) not set correctly" + + for data_file in suite.files: + yield DataFileCollector.from_parent(parent=self, name=data_file) + + +class DataFileFix(NamedTuple): + lineno: int # 1-offset, inclusive + end_lineno: int # 1-offset, exclusive + lines: list[str] + + +class DataFileCollector(pytest.Collector): + """Represents a single `.test` data driven test file. + + More context: https://github.com/python/mypy/issues/11662 + """ + + parent: DataSuiteCollector + + _fixes: list[DataFileFix] + + @classmethod # We have to fight with pytest here: + def from_parent( + cls, parent: DataSuiteCollector, *, name: str # type: ignore[override] + ) -> DataFileCollector: + collector = super().from_parent(parent, name=name) + assert isinstance(collector, DataFileCollector) + return collector + + def collect(self) -> Iterator[DataDrivenTestCase]: + yield from split_test_cases( + parent=self, + suite=self.parent.obj, + file=os.path.join(self.parent.obj.data_prefix, self.name), + ) + + def setup(self) -> None: + super().setup() + self._fixes = [] + + def teardown(self) -> None: + super().teardown() + self._apply_fixes() + + def enqueue_fix(self, fix: DataFileFix) -> None: + self._fixes.append(fix) + + def _apply_fixes(self) -> None: + if not self._fixes: + return + data_path = Path(self.parent.obj.data_prefix) / self.name + lines = data_path.read_text().split("\n") + # start from end to prevent line offsets from shifting as we update + for fix in sorted(self._fixes, reverse=True): + lines[fix.lineno - 1 : fix.end_lineno - 1] = fix.lines + data_path.write_text("\n".join(lines)) + + +def add_test_name_suffix(name: str, suffix: str) -> str: + # Find magic suffix of form "-foobar" (used for things like "-skip"). + m = re.search(r"-[-A-Za-z0-9]+$", name) + if m: + # Insert suite-specific test name suffix before the magic suffix + # which must be the last thing in the test case name since we + # are using endswith() checks. + magic_suffix = m.group(0) + return name[: -len(magic_suffix)] + suffix + magic_suffix + else: + return name + suffix + + +def is_incremental(testcase: DataDrivenTestCase) -> bool: + return "incremental" in testcase.name.lower() or "incremental" in testcase.file + + +def has_stable_flags(testcase: DataDrivenTestCase) -> bool: + if any(re.match(r"# flags[2-9]:", line) for line in testcase.input): + return False + for filename, contents in testcase.files: + if os.path.basename(filename).startswith("mypy.ini."): + return False + return True + + +class DataSuite: + # option fields - class variables + files: list[str] + + base_path = test_temp_dir + + # Allow external users of the test code to override the data prefix + data_prefix = test_data_prefix + + required_out_section = False + + native_sep = False + + # Name suffix automatically added to each test case in the suite (can be + # used to distinguish test cases in suites that share data files) + test_name_suffix = "" + + def setup(self) -> None: + """Setup fixtures (ad-hoc)""" + + @abstractmethod + def run_case(self, testcase: DataDrivenTestCase) -> None: + raise NotImplementedError diff --git a/.venv/lib/python3.12/site-packages/mypy/test/helpers.py b/.venv/lib/python3.12/site-packages/mypy/test/helpers.py new file mode 100644 index 0000000..8ff6874 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/helpers.py @@ -0,0 +1,484 @@ +from __future__ import annotations + +import contextlib +import difflib +import os +import pathlib +import re +import shutil +import sys +import time +from collections.abc import Iterable, Iterator +from re import Pattern +from typing import IO, Any, Callable + +# Exporting Suite as alias to TestCase for backwards compatibility +# TODO: avoid aliasing - import and subclass TestCase directly +from unittest import TestCase + +Suite = TestCase # re-exporting + +import pytest + +import mypy.api as api +import mypy.version +from mypy import defaults +from mypy.main import process_options +from mypy.options import Options +from mypy.test.config import test_data_prefix, test_temp_dir +from mypy.test.data import DataDrivenTestCase, DeleteFile, UpdateFile, fix_cobertura_filename + +skip = pytest.mark.skip + +# AssertStringArraysEqual displays special line alignment helper messages if +# the first different line has at least this many characters, +MIN_LINE_LENGTH_FOR_ALIGNMENT = 5 + + +def run_mypy(args: list[str]) -> None: + __tracebackhide__ = True + # We must enable site packages even though they could cause problems, + # since stubs for typing_extensions live there. + outval, errval, status = api.run(args + ["--show-traceback", "--no-silence-site-packages"]) + if status != 0: + sys.stdout.write(outval) + sys.stderr.write(errval) + pytest.fail(reason="Sample check failed", pytrace=False) + + +def diff_ranges( + left: list[str], right: list[str] +) -> tuple[list[tuple[int, int]], list[tuple[int, int]]]: + seq = difflib.SequenceMatcher(None, left, right) + # note last triple is a dummy, so don't need to worry + blocks = seq.get_matching_blocks() + + i = 0 + j = 0 + left_ranges = [] + right_ranges = [] + for block in blocks: + # mismatched range + left_ranges.append((i, block.a)) + right_ranges.append((j, block.b)) + + i = block.a + block.size + j = block.b + block.size + + # matched range + left_ranges.append((block.a, i)) + right_ranges.append((block.b, j)) + return left_ranges, right_ranges + + +def render_diff_range( + ranges: list[tuple[int, int]], + content: list[str], + *, + colour: str | None = None, + output: IO[str] = sys.stderr, + indent: int = 2, +) -> None: + for i, line_range in enumerate(ranges): + is_matching = i % 2 == 1 + lines = content[line_range[0] : line_range[1]] + for j, line in enumerate(lines): + if ( + is_matching + # elide the middle of matching blocks + and j >= 3 + and j < len(lines) - 3 + ): + if j == 3: + output.write(" " * indent + "...\n") + continue + + if not is_matching and colour: + output.write(colour) + + output.write(" " * indent + line) + + if not is_matching: + if colour: + output.write("\033[0m") + output.write(" (diff)") + + output.write("\n") + + +def assert_string_arrays_equal( + expected: list[str], actual: list[str], msg: str, *, traceback: bool = False +) -> None: + """Assert that two string arrays are equal. + + Display any differences in a human-readable form. + """ + actual = clean_up(actual) + if expected != actual: + expected_ranges, actual_ranges = diff_ranges(expected, actual) + sys.stderr.write("Expected:\n") + red = "\033[31m" if sys.platform != "win32" else None + render_diff_range(expected_ranges, expected, colour=red) + sys.stderr.write("Actual:\n") + green = "\033[32m" if sys.platform != "win32" else None + render_diff_range(actual_ranges, actual, colour=green) + + sys.stderr.write("\n") + first_diff = next( + (i for i, (a, b) in enumerate(zip(expected, actual)) if a != b), + max(len(expected), len(actual)), + ) + if 0 <= first_diff < len(actual) and ( + len(expected[first_diff]) >= MIN_LINE_LENGTH_FOR_ALIGNMENT + or len(actual[first_diff]) >= MIN_LINE_LENGTH_FOR_ALIGNMENT + ): + # Display message that helps visualize the differences between two + # long lines. + show_align_message(expected[first_diff], actual[first_diff]) + + sys.stderr.write( + "Update the test output using --update-data " + "(implies -n0; you can additionally use the -k selector to update only specific tests)\n" + ) + pytest.fail(msg, pytrace=traceback) + + +def assert_module_equivalence(name: str, expected: Iterable[str], actual: Iterable[str]) -> None: + expected_normalized = sorted(expected) + actual_normalized = sorted(set(actual).difference({"__main__"})) + assert_string_arrays_equal( + expected_normalized, + actual_normalized, + ('Actual modules ({}) do not match expected modules ({}) for "[{} ...]"').format( + ", ".join(actual_normalized), ", ".join(expected_normalized), name + ), + ) + + +def assert_target_equivalence(name: str, expected: list[str], actual: list[str]) -> None: + """Compare actual and expected targets (order sensitive).""" + assert_string_arrays_equal( + expected, + actual, + ('Actual targets ({}) do not match expected targets ({}) for "[{} ...]"').format( + ", ".join(actual), ", ".join(expected), name + ), + ) + + +def show_align_message(s1: str, s2: str) -> None: + """Align s1 and s2 so that the their first difference is highlighted. + + For example, if s1 is 'foobar' and s2 is 'fobar', display the + following lines: + + E: foobar + A: fobar + ^ + + If s1 and s2 are long, only display a fragment of the strings around the + first difference. If s1 is very short, do nothing. + """ + + # Seeing what went wrong is trivial even without alignment if the expected + # string is very short. In this case do nothing to simplify output. + if len(s1) < 4: + return + + maxw = 72 # Maximum number of characters shown + + sys.stderr.write("Alignment of first line difference:\n") + + trunc = False + while s1[:30] == s2[:30]: + s1 = s1[10:] + s2 = s2[10:] + trunc = True + + if trunc: + s1 = "..." + s1 + s2 = "..." + s2 + + max_len = max(len(s1), len(s2)) + extra = "" + if max_len > maxw: + extra = "..." + + # Write a chunk of both lines, aligned. + sys.stderr.write(f" E: {s1[:maxw]}{extra}\n") + sys.stderr.write(f" A: {s2[:maxw]}{extra}\n") + # Write an indicator character under the different columns. + sys.stderr.write(" ") + for j in range(min(maxw, max(len(s1), len(s2)))): + if s1[j : j + 1] != s2[j : j + 1]: + sys.stderr.write("^") # Difference + break + else: + sys.stderr.write(" ") # Equal + sys.stderr.write("\n") + + +def clean_up(a: list[str]) -> list[str]: + """Remove common directory prefix from all strings in a. + + This uses a naive string replace; it seems to work well enough. Also + remove trailing carriage returns. + """ + res = [] + pwd = os.getcwd() + driver = pwd + "/driver.py" + for s in a: + prefix = os.sep + ss = s + for p in prefix, prefix.replace(os.sep, "/"): + if p != "/" and p != "//" and p != "\\" and p != "\\\\": + ss = ss.replace(p, "") + # Replace memory address with zeros + if "at 0x" in ss: + ss = re.sub(r"(at 0x)\w+>", r"\g<1>000000000000>", ss) + # Ignore spaces at end of line. + ss = re.sub(" +$", "", ss) + # Remove pwd from driver.py's path + ss = ss.replace(driver, "driver.py") + res.append(re.sub("\\r$", "", ss)) + return res + + +@contextlib.contextmanager +def local_sys_path_set() -> Iterator[None]: + """Temporary insert current directory into sys.path. + + This can be used by test cases that do runtime imports, for example + by the stubgen tests. + """ + old_sys_path = sys.path.copy() + if not ("" in sys.path or "." in sys.path): + sys.path.insert(0, "") + try: + yield + finally: + sys.path = old_sys_path + + +def testfile_pyversion(path: str) -> tuple[int, int]: + if m := re.search(r"python3([0-9]+)\.test$", path): + # For older unsupported version like python38, + # default to that earliest supported version. + return max((3, int(m.group(1))), defaults.PYTHON3_VERSION_MIN) + else: + return defaults.PYTHON3_VERSION_MIN + + +def normalize_error_messages(messages: list[str]) -> list[str]: + """Translate an array of error messages to use / as path separator.""" + + a = [] + for m in messages: + a.append(m.replace(os.sep, "/")) + return a + + +def retry_on_error(func: Callable[[], Any], max_wait: float = 1.0) -> None: + """Retry callback with exponential backoff when it raises OSError. + + If the function still generates an error after max_wait seconds, propagate + the exception. + + This can be effective against random file system operation failures on + Windows. + """ + t0 = time.time() + wait_time = 0.01 + while True: + try: + func() + return + except OSError: + wait_time = min(wait_time * 2, t0 + max_wait - time.time()) + if wait_time <= 0.01: + # Done enough waiting, the error seems persistent. + raise + time.sleep(wait_time) + + +def good_repr(obj: object) -> str: + if isinstance(obj, str): + if obj.count("\n") > 1: + bits = ["'''\\"] + for line in obj.split("\n"): + # force repr to use ' not ", then cut it off + bits.append(repr('"' + line)[2:-1]) + bits[-1] += "'''" + return "\n".join(bits) + return repr(obj) + + +def assert_equal(a: object, b: object, fmt: str = "{} != {}") -> None: + __tracebackhide__ = True + if a != b: + raise AssertionError(fmt.format(good_repr(a), good_repr(b))) + + +def typename(t: type) -> str: + if "." in str(t): + return str(t).split(".")[-1].rstrip("'>") + else: + return str(t)[8:-2] + + +def assert_type(typ: type, value: object) -> None: + __tracebackhide__ = True + if type(value) != typ: + raise AssertionError(f"Invalid type {typename(type(value))}, expected {typename(typ)}") + + +def parse_options( + program_text: str, testcase: DataDrivenTestCase, incremental_step: int +) -> Options: + """Parse comments like '# flags: --foo' in a test case.""" + options = Options() + flags = re.search("# flags: (.*)$", program_text, flags=re.MULTILINE) + if incremental_step > 1: + flags2 = re.search(f"# flags{incremental_step}: (.*)$", program_text, flags=re.MULTILINE) + if flags2: + flags = flags2 + + if flags: + flag_list = flags.group(1).split() + flag_list.append("--no-site-packages") # the tests shouldn't need an installed Python + targets, options = process_options(flag_list, require_targets=False) + if targets: + # TODO: support specifying targets via the flags pragma + raise RuntimeError("Specifying targets via the flags pragma is not supported.") + if "--show-error-codes" not in flag_list: + options.hide_error_codes = True + else: + flag_list = [] + options = Options() + options.error_summary = False + options.hide_error_codes = True + options.force_union_syntax = True + + # Allow custom python version to override testfile_pyversion. + if all(flag.split("=")[0] != "--python-version" for flag in flag_list): + options.python_version = testfile_pyversion(testcase.file) + + if testcase.config.getoption("--mypy-verbose"): + options.verbosity = testcase.config.getoption("--mypy-verbose") + + return options + + +def split_lines(*streams: bytes) -> list[str]: + """Returns a single list of string lines from the byte streams in args.""" + return [s for stream in streams for s in stream.decode("utf8").splitlines()] + + +def write_and_fudge_mtime(content: str, target_path: str) -> None: + # In some systems, mtime has a resolution of 1 second which can + # cause annoying-to-debug issues when a file has the same size + # after a change. We manually set the mtime to circumvent this. + # Note that we increment the old file's mtime, which guarantees a + # different value, rather than incrementing the mtime after the + # copy, which could leave the mtime unchanged if the old file had + # a similarly fudged mtime. + new_time = None + if os.path.isfile(target_path): + new_time = os.stat(target_path).st_mtime + 1 + + dir = os.path.dirname(target_path) + os.makedirs(dir, exist_ok=True) + with open(target_path, "w", encoding="utf-8") as target: + target.write(content) + + if new_time: + os.utime(target_path, times=(new_time, new_time)) + + +def perform_file_operations(operations: list[UpdateFile | DeleteFile]) -> None: + for op in operations: + if isinstance(op, UpdateFile): + # Modify/create file + write_and_fudge_mtime(op.content, op.target_path) + else: + # Delete file/directory + if os.path.isdir(op.path): + # Sanity check to avoid unexpected deletions + assert op.path.startswith("tmp") + shutil.rmtree(op.path) + else: + # Use retries to work around potential flakiness on Windows (AppVeyor). + path = op.path + retry_on_error(lambda: os.remove(path)) + + +def check_test_output_files( + testcase: DataDrivenTestCase, step: int, strip_prefix: str = "" +) -> None: + for path, expected_content in testcase.output_files: + path = path.removeprefix(strip_prefix) + if not os.path.exists(path): + raise AssertionError( + "Expected file {} was not produced by test case{}".format( + path, " on step %d" % step if testcase.output2 else "" + ) + ) + with open(path, encoding="utf8") as output_file: + actual_output_content = output_file.read() + + if isinstance(expected_content, Pattern): + if expected_content.fullmatch(actual_output_content) is not None: + continue + raise AssertionError( + "Output file {} did not match its expected output pattern\n---\n{}\n---".format( + path, actual_output_content + ) + ) + + normalized_output = normalize_file_output( + actual_output_content.splitlines(), os.path.abspath(test_temp_dir) + ) + # We always normalize things like timestamp, but only handle operating-system + # specific things if requested. + if testcase.normalize_output: + if testcase.suite.native_sep and os.path.sep == "\\": + normalized_output = [fix_cobertura_filename(line) for line in normalized_output] + normalized_output = normalize_error_messages(normalized_output) + if os.path.basename(testcase.file) == "reports.test": + normalized_output = normalize_report_meta(normalized_output) + assert_string_arrays_equal( + expected_content.splitlines(), + normalized_output, + "Output file {} did not match its expected output{}".format( + path, " on step %d" % step if testcase.output2 else "" + ), + ) + + +def normalize_file_output(content: list[str], current_abs_path: str) -> list[str]: + """Normalize file output for comparison.""" + timestamp_regex = re.compile(r"\d{10}") + result = [x.replace(current_abs_path, "$PWD") for x in content] + version = mypy.version.__version__ + result = [re.sub(r"\b" + re.escape(version) + r"\b", "$VERSION", x) for x in result] + # We generate a new mypy.version when building mypy wheels that + # lacks base_version, so handle that case. + base_version = getattr(mypy.version, "base_version", version) + result = [re.sub(r"\b" + re.escape(base_version) + r"\b", "$VERSION", x) for x in result] + result = [timestamp_regex.sub("$TIMESTAMP", x) for x in result] + return result + + +def normalize_report_meta(content: list[str]) -> list[str]: + # libxml 2.15 and newer emits the "modern" version of this element. + # Normalize the old style to look the same. + html_meta = '' + return ['' if x == html_meta else x for x in content] + + +def find_test_files(pattern: str, exclude: list[str] | None = None) -> list[str]: + return [ + path.name + for path in (pathlib.Path(test_data_prefix).rglob(pattern)) + if path.name not in (exclude or []) + ] diff --git a/.venv/lib/python3.12/site-packages/mypy/test/meta/__init__.py b/.venv/lib/python3.12/site-packages/mypy/test/meta/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypy/test/meta/_pytest.py b/.venv/lib/python3.12/site-packages/mypy/test/meta/_pytest.py new file mode 100644 index 0000000..0caa6b8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/meta/_pytest.py @@ -0,0 +1,72 @@ +import shlex +import subprocess +import sys +import textwrap +import uuid +from collections.abc import Iterable +from dataclasses import dataclass +from pathlib import Path + +from mypy.test.config import test_data_prefix + + +@dataclass +class PytestResult: + input: str + input_updated: str # any updates made by --update-data + stdout: str + stderr: str + + +def dedent_docstring(s: str) -> str: + return textwrap.dedent(s).lstrip() + + +def run_pytest_data_suite( + data_suite: str, + *, + data_file_prefix: str = "check", + pytest_node_prefix: str = "mypy/test/testcheck.py::TypeCheckSuite", + extra_args: Iterable[str], + max_attempts: int, +) -> PytestResult: + """ + Runs a suite of data test cases through pytest until either tests pass + or until a maximum number of attempts (needed for incremental tests). + + :param data_suite: the actual "suite" i.e. the contents of a .test file + """ + p_test_data = Path(test_data_prefix) + p_root = p_test_data.parent.parent + p = p_test_data / f"{data_file_prefix}-meta-{uuid.uuid4()}.test" + assert not p.exists() + data_suite = dedent_docstring(data_suite) + try: + p.write_text(data_suite) + + test_nodeid = f"{pytest_node_prefix}::{p.name}" + extra_args = [sys.executable, "-m", "pytest", "-n", "0", "-s", *extra_args, test_nodeid] + cmd = shlex.join(extra_args) + for i in range(max_attempts - 1, -1, -1): + print(f">> {cmd}") + proc = subprocess.run(extra_args, capture_output=True, check=False, cwd=p_root) + if proc.returncode == 0: + break + prefix = "NESTED PYTEST STDOUT" + for line in proc.stdout.decode().splitlines(): + print(f"{prefix}: {line}") + prefix = " " * len(prefix) + prefix = "NESTED PYTEST STDERR" + for line in proc.stderr.decode().splitlines(): + print(f"{prefix}: {line}") + prefix = " " * len(prefix) + print(f"Exit code {proc.returncode} ({i} attempts remaining)") + + return PytestResult( + input=data_suite, + input_updated=p.read_text(), + stdout=proc.stdout.decode(), + stderr=proc.stderr.decode(), + ) + finally: + p.unlink() diff --git a/.venv/lib/python3.12/site-packages/mypy/test/meta/test_diff_helper.py b/.venv/lib/python3.12/site-packages/mypy/test/meta/test_diff_helper.py new file mode 100644 index 0000000..047751f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/meta/test_diff_helper.py @@ -0,0 +1,47 @@ +import io + +from mypy.test.helpers import Suite, diff_ranges, render_diff_range + + +class DiffHelperSuite(Suite): + def test_render_diff_range(self) -> None: + expected = ["hello", "world"] + actual = ["goodbye", "world"] + + expected_ranges, actual_ranges = diff_ranges(expected, actual) + + output = io.StringIO() + render_diff_range(expected_ranges, expected, output=output) + assert output.getvalue() == " hello (diff)\n world\n" + output = io.StringIO() + render_diff_range(actual_ranges, actual, output=output) + assert output.getvalue() == " goodbye (diff)\n world\n" + + expected = ["a", "b", "c", "d", "e", "f", "g", "h", "circle", "i", "j"] + actual = ["a", "b", "c", "d", "e", "f", "g", "h", "square", "i", "j"] + + expected_ranges, actual_ranges = diff_ranges(expected, actual) + + output = io.StringIO() + render_diff_range(expected_ranges, expected, output=output, indent=0) + assert output.getvalue() == "a\nb\nc\n...\nf\ng\nh\ncircle (diff)\ni\nj\n" + output = io.StringIO() + render_diff_range(actual_ranges, actual, output=output, indent=0) + assert output.getvalue() == "a\nb\nc\n...\nf\ng\nh\nsquare (diff)\ni\nj\n" + + def test_diff_ranges(self) -> None: + a = ["hello", "world"] + b = ["hello", "world"] + + assert diff_ranges(a, b) == ( + [(0, 0), (0, 2), (2, 2), (2, 2)], + [(0, 0), (0, 2), (2, 2), (2, 2)], + ) + + a = ["hello", "world"] + b = ["goodbye", "world"] + + assert diff_ranges(a, b) == ( + [(0, 1), (1, 2), (2, 2), (2, 2)], + [(0, 1), (1, 2), (2, 2), (2, 2)], + ) diff --git a/.venv/lib/python3.12/site-packages/mypy/test/meta/test_parse_data.py b/.venv/lib/python3.12/site-packages/mypy/test/meta/test_parse_data.py new file mode 100644 index 0000000..8c6fc16 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/meta/test_parse_data.py @@ -0,0 +1,73 @@ +""" +A "meta test" which tests the parsing of .test files. This is not meant to become exhaustive +but to ensure we maintain a basic level of ergonomics for mypy contributors. +""" + +from mypy.test.helpers import Suite +from mypy.test.meta._pytest import PytestResult, run_pytest_data_suite + + +def _run_pytest(data_suite: str) -> PytestResult: + return run_pytest_data_suite(data_suite, extra_args=[], max_attempts=1) + + +class ParseTestDataSuite(Suite): + def test_parse_invalid_case(self) -> None: + # Act + result = _run_pytest( + """ + [case abc] + s: str + [case foo-XFAIL] + s: str + """ + ) + + # Assert + assert "Invalid testcase id 'foo-XFAIL'" in result.stdout + + def test_parse_invalid_section(self) -> None: + # Act + result = _run_pytest( + """ + [case abc] + s: str + [unknownsection] + abc + """ + ) + + # Assert + expected_lineno = result.input.splitlines().index("[unknownsection]") + 1 + expected = ( + f".test:{expected_lineno}: Invalid section header [unknownsection] in case 'abc'" + ) + assert expected in result.stdout + + def test_bad_ge_version_check(self) -> None: + # Act + actual = _run_pytest( + """ + [case abc] + s: str + [out version>=3.9] + abc + """ + ) + + # Assert + assert "version>=3.9 always true since minimum runtime version is (3, 9)" in actual.stdout + + def test_bad_eq_version_check(self) -> None: + # Act + actual = _run_pytest( + """ + [case abc] + s: str + [out version==3.7] + abc + """ + ) + + # Assert + assert "version==3.7 always false since minimum runtime version is (3, 9)" in actual.stdout diff --git a/.venv/lib/python3.12/site-packages/mypy/test/meta/test_update_data.py b/.venv/lib/python3.12/site-packages/mypy/test/meta/test_update_data.py new file mode 100644 index 0000000..820fd35 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/meta/test_update_data.py @@ -0,0 +1,135 @@ +""" +A "meta test" which tests the `--update-data` feature for updating .test files. +Updating the expected output, especially when it's in the form of inline (comment) assertions, +can be brittle, which is why we're "meta-testing" here. +""" + +from mypy.test.helpers import Suite +from mypy.test.meta._pytest import PytestResult, dedent_docstring, run_pytest_data_suite + + +def _run_pytest_update_data(data_suite: str) -> PytestResult: + """ + Runs a suite of data test cases through 'pytest --update-data' until either tests pass + or until a maximum number of attempts (needed for incremental tests). + """ + return run_pytest_data_suite(data_suite, extra_args=["--update-data"], max_attempts=3) + + +class UpdateDataSuite(Suite): + def test_update_data(self) -> None: + # Note: We test multiple testcases rather than 'test case per test case' + # so we could also exercise rewriting multiple testcases at once. + result = _run_pytest_update_data( + """ + [case testCorrect] + s: str = 42 # E: Incompatible types in assignment (expression has type "int", variable has type "str") + + [case testWrong] + s: str = 42 # E: wrong error + + [case testXfail-xfail] + s: str = 42 # E: wrong error + + [case testWrongMultiline] + s: str = 42 # E: foo \ + # N: bar + + [case testMissingMultiline] + s: str = 42; i: int = 'foo' + + [case testExtraneous] + s: str = 'foo' # E: wrong error + + [case testExtraneousMultiline] + s: str = 'foo' # E: foo \ + # E: bar + + [case testExtraneousMultilineNonError] + s: str = 'foo' # W: foo \ + # N: bar + + [case testOutCorrect] + s: str = 42 + [out] + main:1: error: Incompatible types in assignment (expression has type "int", variable has type "str") + + [case testOutWrong] + s: str = 42 + [out] + main:1: error: foobar + + [case testOutWrongIncremental] + s: str = 42 + [out] + main:1: error: foobar + [out2] + main:1: error: foobar + + [case testWrongMultipleFiles] + import a, b + s: str = 42 # E: foo + [file a.py] + s1: str = 42 # E: bar + [file b.py] + s2: str = 43 # E: baz + [builtins fixtures/list.pyi] + """ + ) + + # Assert + expected = dedent_docstring( + """ + [case testCorrect] + s: str = 42 # E: Incompatible types in assignment (expression has type "int", variable has type "str") + + [case testWrong] + s: str = 42 # E: Incompatible types in assignment (expression has type "int", variable has type "str") + + [case testXfail-xfail] + s: str = 42 # E: wrong error + + [case testWrongMultiline] + s: str = 42 # E: Incompatible types in assignment (expression has type "int", variable has type "str") + + [case testMissingMultiline] + s: str = 42; i: int = 'foo' # E: Incompatible types in assignment (expression has type "int", variable has type "str") \\ + # E: Incompatible types in assignment (expression has type "str", variable has type "int") + + [case testExtraneous] + s: str = 'foo' + + [case testExtraneousMultiline] + s: str = 'foo' + + [case testExtraneousMultilineNonError] + s: str = 'foo' + + [case testOutCorrect] + s: str = 42 + [out] + main:1: error: Incompatible types in assignment (expression has type "int", variable has type "str") + + [case testOutWrong] + s: str = 42 + [out] + main:1: error: Incompatible types in assignment (expression has type "int", variable has type "str") + + [case testOutWrongIncremental] + s: str = 42 + [out] + main:1: error: Incompatible types in assignment (expression has type "int", variable has type "str") + [out2] + main:1: error: Incompatible types in assignment (expression has type "int", variable has type "str") + + [case testWrongMultipleFiles] + import a, b + s: str = 42 # E: Incompatible types in assignment (expression has type "int", variable has type "str") + [file a.py] + s1: str = 42 # E: Incompatible types in assignment (expression has type "int", variable has type "str") + [file b.py] + s2: str = 43 # E: Incompatible types in assignment (expression has type "int", variable has type "str") + [builtins fixtures/list.pyi] + """ + ) + assert result.input_updated == expected diff --git a/.venv/lib/python3.12/site-packages/mypy/test/test_config_parser.py b/.venv/lib/python3.12/site-packages/mypy/test/test_config_parser.py new file mode 100644 index 0000000..5971437 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/test_config_parser.py @@ -0,0 +1,130 @@ +from __future__ import annotations + +import contextlib +import os +import tempfile +import unittest +from collections.abc import Iterator +from pathlib import Path + +from mypy.config_parser import _find_config_file +from mypy.defaults import CONFIG_NAMES, SHARED_CONFIG_NAMES + + +@contextlib.contextmanager +def chdir(target: Path) -> Iterator[None]: + # Replace with contextlib.chdir in Python 3.11 + dir = os.getcwd() + os.chdir(target) + try: + yield + finally: + os.chdir(dir) + + +def write_config(path: Path, content: str | None = None) -> None: + if path.suffix == ".toml": + if content is None: + content = "[tool.mypy]\nstrict = true" + path.write_text(content) + else: + if content is None: + content = "[mypy]\nstrict = True" + path.write_text(content) + + +class FindConfigFileSuite(unittest.TestCase): + + def test_no_config(self) -> None: + with tempfile.TemporaryDirectory() as _tmpdir: + tmpdir = Path(_tmpdir) + (tmpdir / ".git").touch() + with chdir(tmpdir): + result = _find_config_file() + assert result is None + + def test_parent_config_with_and_without_git(self) -> None: + for name in CONFIG_NAMES + SHARED_CONFIG_NAMES: + with tempfile.TemporaryDirectory() as _tmpdir: + tmpdir = Path(_tmpdir) + + config = tmpdir / name + write_config(config) + + child = tmpdir / "child" + child.mkdir() + + with chdir(child): + result = _find_config_file() + assert result is not None + assert Path(result[2]).resolve() == config.resolve() + + git = child / ".git" + git.touch() + + result = _find_config_file() + assert result is None + + git.unlink() + result = _find_config_file() + assert result is not None + hg = child / ".hg" + hg.touch() + + result = _find_config_file() + assert result is None + + def test_precedence(self) -> None: + with tempfile.TemporaryDirectory() as _tmpdir: + tmpdir = Path(_tmpdir) + + pyproject = tmpdir / "pyproject.toml" + setup_cfg = tmpdir / "setup.cfg" + mypy_ini = tmpdir / "mypy.ini" + dot_mypy = tmpdir / ".mypy.ini" + + child = tmpdir / "child" + child.mkdir() + + for cwd in [tmpdir, child]: + write_config(pyproject) + write_config(setup_cfg) + write_config(mypy_ini) + write_config(dot_mypy) + + with chdir(cwd): + result = _find_config_file() + assert result is not None + assert os.path.basename(result[2]) == "mypy.ini" + + mypy_ini.unlink() + result = _find_config_file() + assert result is not None + assert os.path.basename(result[2]) == ".mypy.ini" + + dot_mypy.unlink() + result = _find_config_file() + assert result is not None + assert os.path.basename(result[2]) == "pyproject.toml" + + pyproject.unlink() + result = _find_config_file() + assert result is not None + assert os.path.basename(result[2]) == "setup.cfg" + + def test_precedence_missing_section(self) -> None: + with tempfile.TemporaryDirectory() as _tmpdir: + tmpdir = Path(_tmpdir) + + child = tmpdir / "child" + child.mkdir() + + parent_mypy = tmpdir / "mypy.ini" + child_pyproject = child / "pyproject.toml" + write_config(parent_mypy) + write_config(child_pyproject, content="") + + with chdir(child): + result = _find_config_file() + assert result is not None + assert Path(result[2]).resolve() == parent_mypy.resolve() diff --git a/.venv/lib/python3.12/site-packages/mypy/test/test_find_sources.py b/.venv/lib/python3.12/site-packages/mypy/test/test_find_sources.py new file mode 100644 index 0000000..321f340 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/test_find_sources.py @@ -0,0 +1,376 @@ +from __future__ import annotations + +import os +import shutil +import tempfile +import unittest + +import pytest + +from mypy.find_sources import InvalidSourceList, SourceFinder, create_source_list +from mypy.fscache import FileSystemCache +from mypy.modulefinder import BuildSource +from mypy.options import Options + + +class FakeFSCache(FileSystemCache): + def __init__(self, files: set[str]) -> None: + self.files = {os.path.abspath(f) for f in files} + + def isfile(self, path: str) -> bool: + return path in self.files + + def isdir(self, path: str) -> bool: + if not path.endswith(os.sep): + path += os.sep + return any(f.startswith(path) for f in self.files) + + def listdir(self, path: str) -> list[str]: + if not path.endswith(os.sep): + path += os.sep + return list({f[len(path) :].split(os.sep)[0] for f in self.files if f.startswith(path)}) + + def init_under_package_root(self, path: str) -> bool: + return False + + +def normalise_path(path: str) -> str: + path = os.path.splitdrive(path)[1] + path = path.replace(os.sep, "/") + return path + + +def normalise_build_source_list(sources: list[BuildSource]) -> list[tuple[str, str | None]]: + return sorted( + (s.module, (normalise_path(s.base_dir) if s.base_dir is not None else None)) + for s in sources + ) + + +def crawl(finder: SourceFinder, f: str) -> tuple[str, str]: + module, base_dir = finder.crawl_up(f) + return module, normalise_path(base_dir) + + +def find_sources_in_dir(finder: SourceFinder, f: str) -> list[tuple[str, str | None]]: + return normalise_build_source_list(finder.find_sources_in_dir(os.path.abspath(f))) + + +def find_sources( + paths: list[str], options: Options, fscache: FileSystemCache +) -> list[tuple[str, str | None]]: + paths = [os.path.abspath(p) for p in paths] + return normalise_build_source_list(create_source_list(paths, options, fscache)) + + +class SourceFinderSuite(unittest.TestCase): + def setUp(self) -> None: + self.tempdir = tempfile.mkdtemp() + self.oldcwd = os.getcwd() + os.chdir(self.tempdir) + + def tearDown(self) -> None: + os.chdir(self.oldcwd) + shutil.rmtree(self.tempdir) + + def test_crawl_no_namespace(self) -> None: + options = Options() + options.namespace_packages = False + + finder = SourceFinder(FakeFSCache({"/setup.py"}), options) + assert crawl(finder, "/setup.py") == ("setup", "/") + + finder = SourceFinder(FakeFSCache({"/a/setup.py"}), options) + assert crawl(finder, "/a/setup.py") == ("setup", "/a") + + finder = SourceFinder(FakeFSCache({"/a/b/setup.py"}), options) + assert crawl(finder, "/a/b/setup.py") == ("setup", "/a/b") + + finder = SourceFinder(FakeFSCache({"/a/setup.py", "/a/__init__.py"}), options) + assert crawl(finder, "/a/setup.py") == ("a.setup", "/") + + finder = SourceFinder(FakeFSCache({"/a/invalid-name/setup.py", "/a/__init__.py"}), options) + assert crawl(finder, "/a/invalid-name/setup.py") == ("setup", "/a/invalid-name") + + finder = SourceFinder(FakeFSCache({"/a/b/setup.py", "/a/__init__.py"}), options) + assert crawl(finder, "/a/b/setup.py") == ("setup", "/a/b") + + finder = SourceFinder( + FakeFSCache({"/a/b/c/setup.py", "/a/__init__.py", "/a/b/c/__init__.py"}), options + ) + assert crawl(finder, "/a/b/c/setup.py") == ("c.setup", "/a/b") + + def test_crawl_namespace(self) -> None: + options = Options() + options.namespace_packages = True + + finder = SourceFinder(FakeFSCache({"/setup.py"}), options) + assert crawl(finder, "/setup.py") == ("setup", "/") + + finder = SourceFinder(FakeFSCache({"/a/setup.py"}), options) + assert crawl(finder, "/a/setup.py") == ("setup", "/a") + + finder = SourceFinder(FakeFSCache({"/a/b/setup.py"}), options) + assert crawl(finder, "/a/b/setup.py") == ("setup", "/a/b") + + finder = SourceFinder(FakeFSCache({"/a/setup.py", "/a/__init__.py"}), options) + assert crawl(finder, "/a/setup.py") == ("a.setup", "/") + + finder = SourceFinder(FakeFSCache({"/a/invalid-name/setup.py", "/a/__init__.py"}), options) + assert crawl(finder, "/a/invalid-name/setup.py") == ("setup", "/a/invalid-name") + + finder = SourceFinder(FakeFSCache({"/a/b/setup.py", "/a/__init__.py"}), options) + assert crawl(finder, "/a/b/setup.py") == ("a.b.setup", "/") + + finder = SourceFinder( + FakeFSCache({"/a/b/c/setup.py", "/a/__init__.py", "/a/b/c/__init__.py"}), options + ) + assert crawl(finder, "/a/b/c/setup.py") == ("a.b.c.setup", "/") + + def test_crawl_namespace_explicit_base(self) -> None: + options = Options() + options.namespace_packages = True + options.explicit_package_bases = True + + finder = SourceFinder(FakeFSCache({"/setup.py"}), options) + assert crawl(finder, "/setup.py") == ("setup", "/") + + finder = SourceFinder(FakeFSCache({"/a/setup.py"}), options) + assert crawl(finder, "/a/setup.py") == ("setup", "/a") + + finder = SourceFinder(FakeFSCache({"/a/b/setup.py"}), options) + assert crawl(finder, "/a/b/setup.py") == ("setup", "/a/b") + + finder = SourceFinder(FakeFSCache({"/a/setup.py", "/a/__init__.py"}), options) + assert crawl(finder, "/a/setup.py") == ("a.setup", "/") + + finder = SourceFinder(FakeFSCache({"/a/invalid-name/setup.py", "/a/__init__.py"}), options) + assert crawl(finder, "/a/invalid-name/setup.py") == ("setup", "/a/invalid-name") + + finder = SourceFinder(FakeFSCache({"/a/b/setup.py", "/a/__init__.py"}), options) + assert crawl(finder, "/a/b/setup.py") == ("a.b.setup", "/") + + finder = SourceFinder( + FakeFSCache({"/a/b/c/setup.py", "/a/__init__.py", "/a/b/c/__init__.py"}), options + ) + assert crawl(finder, "/a/b/c/setup.py") == ("a.b.c.setup", "/") + + # set mypy path, so we actually have some explicit base dirs + options.mypy_path = ["/a/b"] + + finder = SourceFinder(FakeFSCache({"/a/b/c/setup.py"}), options) + assert crawl(finder, "/a/b/c/setup.py") == ("c.setup", "/a/b") + + finder = SourceFinder( + FakeFSCache({"/a/b/c/setup.py", "/a/__init__.py", "/a/b/c/__init__.py"}), options + ) + assert crawl(finder, "/a/b/c/setup.py") == ("c.setup", "/a/b") + + options.mypy_path = ["/a/b", "/a/b/c"] + finder = SourceFinder(FakeFSCache({"/a/b/c/setup.py"}), options) + assert crawl(finder, "/a/b/c/setup.py") == ("setup", "/a/b/c") + + def test_crawl_namespace_multi_dir(self) -> None: + options = Options() + options.namespace_packages = True + options.explicit_package_bases = True + options.mypy_path = ["/a", "/b"] + + finder = SourceFinder(FakeFSCache({"/a/pkg/a.py", "/b/pkg/b.py"}), options) + assert crawl(finder, "/a/pkg/a.py") == ("pkg.a", "/a") + assert crawl(finder, "/b/pkg/b.py") == ("pkg.b", "/b") + + def test_find_sources_in_dir_no_namespace(self) -> None: + options = Options() + options.namespace_packages = False + + files = { + "/pkg/a1/b/c/d/e.py", + "/pkg/a1/b/f.py", + "/pkg/a2/__init__.py", + "/pkg/a2/b/c/d/e.py", + "/pkg/a2/b/f.py", + } + finder = SourceFinder(FakeFSCache(files), options) + assert find_sources_in_dir(finder, "/") == [ + ("a2", "/pkg"), + ("e", "/pkg/a1/b/c/d"), + ("e", "/pkg/a2/b/c/d"), + ("f", "/pkg/a1/b"), + ("f", "/pkg/a2/b"), + ] + + def test_find_sources_in_dir_namespace(self) -> None: + options = Options() + options.namespace_packages = True + + files = { + "/pkg/a1/b/c/d/e.py", + "/pkg/a1/b/f.py", + "/pkg/a2/__init__.py", + "/pkg/a2/b/c/d/e.py", + "/pkg/a2/b/f.py", + } + finder = SourceFinder(FakeFSCache(files), options) + assert find_sources_in_dir(finder, "/") == [ + ("a2", "/pkg"), + ("a2.b.c.d.e", "/pkg"), + ("a2.b.f", "/pkg"), + ("e", "/pkg/a1/b/c/d"), + ("f", "/pkg/a1/b"), + ] + + def test_find_sources_in_dir_namespace_explicit_base(self) -> None: + options = Options() + options.namespace_packages = True + options.explicit_package_bases = True + options.mypy_path = ["/"] + + files = { + "/pkg/a1/b/c/d/e.py", + "/pkg/a1/b/f.py", + "/pkg/a2/__init__.py", + "/pkg/a2/b/c/d/e.py", + "/pkg/a2/b/f.py", + } + finder = SourceFinder(FakeFSCache(files), options) + assert find_sources_in_dir(finder, "/") == [ + ("pkg.a1.b.c.d.e", "/"), + ("pkg.a1.b.f", "/"), + ("pkg.a2", "/"), + ("pkg.a2.b.c.d.e", "/"), + ("pkg.a2.b.f", "/"), + ] + + options.mypy_path = ["/pkg"] + finder = SourceFinder(FakeFSCache(files), options) + assert find_sources_in_dir(finder, "/") == [ + ("a1.b.c.d.e", "/pkg"), + ("a1.b.f", "/pkg"), + ("a2", "/pkg"), + ("a2.b.c.d.e", "/pkg"), + ("a2.b.f", "/pkg"), + ] + + def test_find_sources_in_dir_namespace_multi_dir(self) -> None: + options = Options() + options.namespace_packages = True + options.explicit_package_bases = True + options.mypy_path = ["/a", "/b"] + + finder = SourceFinder(FakeFSCache({"/a/pkg/a.py", "/b/pkg/b.py"}), options) + assert find_sources_in_dir(finder, "/") == [("pkg.a", "/a"), ("pkg.b", "/b")] + + def test_find_sources_exclude(self) -> None: + options = Options() + options.namespace_packages = True + + # default + for excluded_dir in ["site-packages", ".whatever", "node_modules", ".x/.z"]: + fscache = FakeFSCache({"/dir/a.py", f"/dir/venv/{excluded_dir}/b.py"}) + assert find_sources(["/"], options, fscache) == [("a", "/dir")] + with pytest.raises(InvalidSourceList): + find_sources(["/dir/venv/"], options, fscache) + assert find_sources([f"/dir/venv/{excluded_dir}"], options, fscache) == [ + ("b", f"/dir/venv/{excluded_dir}") + ] + assert find_sources([f"/dir/venv/{excluded_dir}/b.py"], options, fscache) == [ + ("b", f"/dir/venv/{excluded_dir}") + ] + + files = { + "/pkg/a1/b/c/d/e.py", + "/pkg/a1/b/f.py", + "/pkg/a2/__init__.py", + "/pkg/a2/b/c/d/e.py", + "/pkg/a2/b/f.py", + } + + # file name + options.exclude = [r"/f\.py$"] + fscache = FakeFSCache(files) + assert find_sources(["/"], options, fscache) == [ + ("a2", "/pkg"), + ("a2.b.c.d.e", "/pkg"), + ("e", "/pkg/a1/b/c/d"), + ] + assert find_sources(["/pkg/a1/b/f.py"], options, fscache) == [("f", "/pkg/a1/b")] + assert find_sources(["/pkg/a2/b/f.py"], options, fscache) == [("a2.b.f", "/pkg")] + + # directory name + options.exclude = ["/a1/"] + fscache = FakeFSCache(files) + assert find_sources(["/"], options, fscache) == [ + ("a2", "/pkg"), + ("a2.b.c.d.e", "/pkg"), + ("a2.b.f", "/pkg"), + ] + with pytest.raises(InvalidSourceList): + find_sources(["/pkg/a1"], options, fscache) + with pytest.raises(InvalidSourceList): + find_sources(["/pkg/a1/"], options, fscache) + with pytest.raises(InvalidSourceList): + find_sources(["/pkg/a1/b"], options, fscache) + + options.exclude = ["/a1/$"] + assert find_sources(["/pkg/a1"], options, fscache) == [ + ("e", "/pkg/a1/b/c/d"), + ("f", "/pkg/a1/b"), + ] + + # paths + options.exclude = ["/pkg/a1/"] + fscache = FakeFSCache(files) + assert find_sources(["/"], options, fscache) == [ + ("a2", "/pkg"), + ("a2.b.c.d.e", "/pkg"), + ("a2.b.f", "/pkg"), + ] + with pytest.raises(InvalidSourceList): + find_sources(["/pkg/a1"], options, fscache) + + # OR two patterns together + for orred in [["/(a1|a3)/"], ["a1", "a3"], ["a3", "a1"]]: + options.exclude = orred + fscache = FakeFSCache(files) + assert find_sources(["/"], options, fscache) == [ + ("a2", "/pkg"), + ("a2.b.c.d.e", "/pkg"), + ("a2.b.f", "/pkg"), + ] + + options.exclude = ["b/c/"] + fscache = FakeFSCache(files) + assert find_sources(["/"], options, fscache) == [ + ("a2", "/pkg"), + ("a2.b.f", "/pkg"), + ("f", "/pkg/a1/b"), + ] + + # nothing should be ignored as a result of this + big_exclude1 = [ + "/pkg/a/", + "/2", + "/1", + "/pk/", + "/kg", + "/g.py", + "/bc", + "/xxx/pkg/a2/b/f.py", + "xxx/pkg/a2/b/f.py", + ] + big_exclude2 = ["|".join(big_exclude1)] + for big_exclude in [big_exclude1, big_exclude2]: + options.exclude = big_exclude + fscache = FakeFSCache(files) + assert len(find_sources(["/"], options, fscache)) == len(files) + + files = { + "pkg/a1/b/c/d/e.py", + "pkg/a1/b/f.py", + "pkg/a2/__init__.py", + "pkg/a2/b/c/d/e.py", + "pkg/a2/b/f.py", + } + fscache = FakeFSCache(files) + assert len(find_sources(["."], options, fscache)) == len(files) diff --git a/.venv/lib/python3.12/site-packages/mypy/test/test_ref_info.py b/.venv/lib/python3.12/site-packages/mypy/test/test_ref_info.py new file mode 100644 index 0000000..05052e4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/test_ref_info.py @@ -0,0 +1,45 @@ +"""Test exporting line-level reference information (undocumented feature)""" + +from __future__ import annotations + +import json +import os +import sys + +from mypy import build +from mypy.modulefinder import BuildSource +from mypy.options import Options +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase, DataSuite +from mypy.test.helpers import assert_string_arrays_equal + + +class RefInfoSuite(DataSuite): + required_out_section = True + files = ["ref-info.test"] + + def run_case(self, testcase: DataDrivenTestCase) -> None: + options = Options() + options.use_builtins_fixtures = True + options.show_traceback = True + options.export_ref_info = True # This is the flag we are testing + + src = "\n".join(testcase.input) + result = build.build( + sources=[BuildSource("main", None, src)], options=options, alt_lib_path=test_temp_dir + ) + assert not result.errors + + major, minor = sys.version_info[:2] + ref_path = os.path.join(options.cache_dir, f"{major}.{minor}", "__main__.refs.json") + + with open(ref_path) as refs_file: + data = json.load(refs_file) + + a = [] + for item in data: + a.append(f"{item['line']}:{item['column']}:{item['target']}") + + assert_string_arrays_equal( + testcase.output, a, f"Invalid output ({testcase.file}, line {testcase.line})" + ) diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testapi.py b/.venv/lib/python3.12/site-packages/mypy/test/testapi.py new file mode 100644 index 0000000..95bd95e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testapi.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +import sys +from io import StringIO + +import mypy.api +from mypy.test.helpers import Suite + + +class APISuite(Suite): + def setUp(self) -> None: + self.sys_stdout = sys.stdout + self.sys_stderr = sys.stderr + sys.stdout = self.stdout = StringIO() + sys.stderr = self.stderr = StringIO() + + def tearDown(self) -> None: + sys.stdout = self.sys_stdout + sys.stderr = self.sys_stderr + assert self.stdout.getvalue() == "" + assert self.stderr.getvalue() == "" + + def test_capture_bad_opt(self) -> None: + """stderr should be captured when a bad option is passed.""" + _, stderr, _ = mypy.api.run(["--some-bad-option"]) + assert isinstance(stderr, str) + assert stderr != "" + + def test_capture_empty(self) -> None: + """stderr should be captured when a bad option is passed.""" + _, stderr, _ = mypy.api.run([]) + assert isinstance(stderr, str) + assert stderr != "" + + def test_capture_help(self) -> None: + """stdout should be captured when --help is passed.""" + stdout, _, _ = mypy.api.run(["--help"]) + assert isinstance(stdout, str) + assert stdout != "" + + def test_capture_version(self) -> None: + """stdout should be captured when --version is passed.""" + stdout, _, _ = mypy.api.run(["--version"]) + assert isinstance(stdout, str) + assert stdout != "" diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testargs.py b/.venv/lib/python3.12/site-packages/mypy/test/testargs.py new file mode 100644 index 0000000..7c13990 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testargs.py @@ -0,0 +1,77 @@ +"""Ensure the argparse parser and Options class are in sync. + +In particular, verify that the argparse defaults are the same as the Options +defaults, and that argparse doesn't assign any new members to the Options +object it creates. +""" + +from __future__ import annotations + +import argparse +import sys + +from mypy.main import infer_python_executable, process_options +from mypy.options import Options +from mypy.test.helpers import Suite, assert_equal + + +class ArgSuite(Suite): + def test_coherence(self) -> None: + options = Options() + _, parsed_options = process_options([], require_targets=False) + # FIX: test this too. Requires changing working dir to avoid finding 'setup.cfg' + options.config_file = parsed_options.config_file + assert_equal(options.snapshot(), parsed_options.snapshot()) + + def test_executable_inference(self) -> None: + """Test the --python-executable flag with --python-version""" + sys_ver_str = "{ver.major}.{ver.minor}".format(ver=sys.version_info) + + base = ["file.py"] # dummy file + + # test inference given one (infer the other) + matching_version = base + [f"--python-version={sys_ver_str}"] + _, options = process_options(matching_version) + assert options.python_version == sys.version_info[:2] + assert options.python_executable == sys.executable + + matching_version = base + [f"--python-executable={sys.executable}"] + _, options = process_options(matching_version) + assert options.python_version == sys.version_info[:2] + assert options.python_executable == sys.executable + + # test inference given both + matching_version = base + [ + f"--python-version={sys_ver_str}", + f"--python-executable={sys.executable}", + ] + _, options = process_options(matching_version) + assert options.python_version == sys.version_info[:2] + assert options.python_executable == sys.executable + + # test that --no-site-packages will disable executable inference + matching_version = base + [f"--python-version={sys_ver_str}", "--no-site-packages"] + _, options = process_options(matching_version) + assert options.python_version == sys.version_info[:2] + assert options.python_executable is None + + # Test setting python_version/executable from config file + special_opts = argparse.Namespace() + special_opts.python_executable = None + special_opts.python_version = None + special_opts.no_executable = None + + # first test inferring executable from version + options = Options() + options.python_executable = None + options.python_version = sys.version_info[:2] + infer_python_executable(options, special_opts) + assert options.python_version == sys.version_info[:2] + assert options.python_executable == sys.executable + + # then test inferring version from executable + options = Options() + options.python_executable = sys.executable + infer_python_executable(options, special_opts) + assert options.python_version == sys.version_info[:2] + assert options.python_executable == sys.executable diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testcheck.py b/.venv/lib/python3.12/site-packages/mypy/test/testcheck.py new file mode 100644 index 0000000..f2b7057 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testcheck.py @@ -0,0 +1,311 @@ +"""Type checker test cases""" + +from __future__ import annotations + +import os +import re +import sys +import tempfile +from pathlib import Path + +from mypy import build +from mypy.errors import CompileError +from mypy.modulefinder import BuildSource, FindModuleCache, SearchPaths +from mypy.test.config import test_data_prefix, test_temp_dir +from mypy.test.data import DataDrivenTestCase, DataSuite, FileOperation, module_from_path +from mypy.test.helpers import ( + assert_module_equivalence, + assert_string_arrays_equal, + assert_target_equivalence, + check_test_output_files, + find_test_files, + normalize_error_messages, + parse_options, + perform_file_operations, +) +from mypy.test.update_data import update_testcase_output + +try: + import lxml # type: ignore[import-untyped] +except ImportError: + lxml = None + + +import pytest + +# List of files that contain test case descriptions. +# Includes all check-* files with the .test extension in the test-data/unit directory +typecheck_files = find_test_files(pattern="check-*.test") + +# Tests that use Python version specific features: +if sys.version_info < (3, 10): + typecheck_files.remove("check-python310.test") +if sys.version_info < (3, 11): + typecheck_files.remove("check-python311.test") +if sys.version_info < (3, 12): + typecheck_files.remove("check-python312.test") +if sys.version_info < (3, 13): + typecheck_files.remove("check-python313.test") +if sys.version_info < (3, 14): + typecheck_files.remove("check-python314.test") + + +class TypeCheckSuite(DataSuite): + files = typecheck_files + + def run_case(self, testcase: DataDrivenTestCase) -> None: + if os.path.basename(testcase.file) == "check-modules-case.test": + with tempfile.NamedTemporaryFile(prefix="test", dir=".") as temp_file: + temp_path = Path(temp_file.name) + if not temp_path.with_name(temp_path.name.upper()).exists(): + pytest.skip("File system is not case‐insensitive") + if lxml is None and os.path.basename(testcase.file) == "check-reports.test": + pytest.skip("Cannot import lxml. Is it installed?") + incremental = ( + "incremental" in testcase.name.lower() + or "incremental" in testcase.file + or "serialize" in testcase.file + ) + if incremental: + # Incremental tests are run once with a cold cache, once with a warm cache. + # Expect success on first run, errors from testcase.output (if any) on second run. + num_steps = max([2] + list(testcase.output2.keys())) + # Check that there are no file changes beyond the last run (they would be ignored). + for dn, dirs, files in os.walk(os.curdir): + for file in files: + m = re.search(r"\.([2-9])$", file) + if m and int(m.group(1)) > num_steps: + raise ValueError( + "Output file {} exists though test case only has {} runs".format( + file, num_steps + ) + ) + steps = testcase.find_steps() + for step in range(1, num_steps + 1): + idx = step - 2 + ops = steps[idx] if idx < len(steps) and idx >= 0 else [] + self.run_case_once(testcase, ops, step) + else: + self.run_case_once(testcase) + + def _sort_output_if_needed(self, testcase: DataDrivenTestCase, a: list[str]) -> None: + idx = testcase.output_inline_start + if not testcase.files or idx == len(testcase.output): + return + + def _filename(_msg: str) -> str: + return _msg.partition(":")[0] + + file_weights = {file: idx for idx, file in enumerate(_filename(msg) for msg in a)} + testcase.output[idx:] = sorted( + testcase.output[idx:], key=lambda msg: file_weights.get(_filename(msg), -1) + ) + + def run_case_once( + self, + testcase: DataDrivenTestCase, + operations: list[FileOperation] | None = None, + incremental_step: int = 0, + ) -> None: + if operations is None: + operations = [] + original_program_text = "\n".join(testcase.input) + module_data = self.parse_module(original_program_text, incremental_step) + + # Unload already loaded plugins, they may be updated. + for file, _ in testcase.files: + module = module_from_path(file) + if module.endswith("_plugin") and module in sys.modules: + del sys.modules[module] + if incremental_step == 0 or incremental_step == 1: + # In run 1, copy program text to program file. + for module_name, program_path, program_text in module_data: + if module_name == "__main__": + with open(program_path, "w", encoding="utf8") as f: + f.write(program_text) + break + elif incremental_step > 1: + # In runs 2+, copy *.[num] files to * files. + perform_file_operations(operations) + + # Parse options after moving files (in case mypy.ini is being moved). + options = parse_options(original_program_text, testcase, incremental_step) + options.use_builtins_fixtures = True + options.show_traceback = True + + # Enable some options automatically based on test file name. + if "columns" in testcase.file: + options.show_column_numbers = True + if "errorcodes" in testcase.file: + options.hide_error_codes = False + if "abstract" not in testcase.file: + options.allow_empty_bodies = not testcase.name.endswith("_no_empty") + if "union-error" not in testcase.file and "Pep604" not in testcase.name: + options.force_union_syntax = True + + if incremental_step and options.incremental: + # Don't overwrite # flags: --no-incremental in incremental test cases + options.incremental = True + else: + options.incremental = False + # Don't waste time writing cache unless we are specifically looking for it + if not testcase.writescache: + options.cache_dir = os.devnull + + sources = [] + for module_name, program_path, program_text in module_data: + # Always set to none so we're forced to reread the module in incremental mode + sources.append( + BuildSource(program_path, module_name, None if incremental_step else program_text) + ) + + plugin_dir = os.path.join(test_data_prefix, "plugins") + sys.path.insert(0, plugin_dir) + + res = None + blocker = False + try: + res = build.build(sources=sources, options=options, alt_lib_path=test_temp_dir) + a = res.errors + except CompileError as e: + a = e.messages + blocker = True + finally: + assert sys.path[0] == plugin_dir + del sys.path[0] + + if testcase.normalize_output: + a = normalize_error_messages(a) + + # Make sure error messages match + if incremental_step < 2: + if incremental_step == 1: + msg = "Unexpected type checker output in incremental, run 1 ({}, line {})" + else: + assert incremental_step == 0 + msg = "Unexpected type checker output ({}, line {})" + self._sort_output_if_needed(testcase, a) + output = testcase.output + else: + msg = ( + f"Unexpected type checker output in incremental, run {incremental_step}" + + " ({}, line {})" + ) + output = testcase.output2.get(incremental_step, []) + + if output != a and testcase.config.getoption("--update-data", False): + update_testcase_output(testcase, a, incremental_step=incremental_step) + + assert_string_arrays_equal(output, a, msg.format(testcase.file, testcase.line)) + + if res: + if options.cache_dir != os.devnull: + self.verify_cache(module_data, res.manager, blocker, incremental_step) + + name = "targets" + if incremental_step: + name += str(incremental_step + 1) + expected = testcase.expected_fine_grained_targets.get(incremental_step + 1) + actual = [ + target + for module, target in res.manager.processed_targets + if module in testcase.test_modules + ] + if expected is not None: + assert_target_equivalence(name, expected, actual) + if incremental_step > 1: + suffix = "" if incremental_step == 2 else str(incremental_step - 1) + expected_rechecked = testcase.expected_rechecked_modules.get(incremental_step - 1) + if expected_rechecked is not None: + assert_module_equivalence( + "rechecked" + suffix, expected_rechecked, res.manager.rechecked_modules + ) + expected_stale = testcase.expected_stale_modules.get(incremental_step - 1) + if expected_stale is not None: + assert_module_equivalence( + "stale" + suffix, expected_stale, res.manager.stale_modules + ) + + if testcase.output_files: + check_test_output_files(testcase, incremental_step, strip_prefix="tmp/") + + def verify_cache( + self, + module_data: list[tuple[str, str, str]], + manager: build.BuildManager, + blocker: bool, + step: int, + ) -> None: + if not blocker: + # There should be valid cache metadata for each module except + # in case of a blocking error in themselves or one of their + # dependencies. + modules = self.find_module_files(manager) + modules.update({module_name: path for module_name, path, text in module_data}) + missing_paths = self.find_missing_cache_files(modules, manager) + if missing_paths: + raise AssertionError(f"cache data missing for {missing_paths} on run {step}") + assert os.path.isfile(os.path.join(manager.options.cache_dir, ".gitignore")) + cachedir_tag = os.path.join(manager.options.cache_dir, "CACHEDIR.TAG") + assert os.path.isfile(cachedir_tag) + with open(cachedir_tag) as f: + assert f.read().startswith("Signature: 8a477f597d28d172789f06886806bc55") + + def find_module_files(self, manager: build.BuildManager) -> dict[str, str]: + return {id: module.path for id, module in manager.modules.items()} + + def find_missing_cache_files( + self, modules: dict[str, str], manager: build.BuildManager + ) -> set[str]: + ignore_errors = True + missing = {} + for id, path in modules.items(): + meta = build.find_cache_meta(id, path, manager) + if not build.validate_meta(meta, id, path, ignore_errors, manager): + missing[id] = path + return set(missing.values()) + + def parse_module( + self, program_text: str, incremental_step: int = 0 + ) -> list[tuple[str, str, str]]: + """Return the module and program names for a test case. + + Normally, the unit tests will parse the default ('__main__') + module and follow all the imports listed there. You can override + this behavior and instruct the tests to check multiple modules + by using a comment like this in the test case input: + + # cmd: mypy -m foo.bar foo.baz + + You can also use `# cmdN:` to have a different cmd for incremental + step N (2, 3, ...). + + Return a list of tuples (module name, file name, program text). + """ + m = re.search("# cmd: mypy -m ([a-zA-Z0-9_. ]+)$", program_text, flags=re.MULTILINE) + if incremental_step > 1: + alt_regex = f"# cmd{incremental_step}: mypy -m ([a-zA-Z0-9_. ]+)$" + alt_m = re.search(alt_regex, program_text, flags=re.MULTILINE) + if alt_m is not None: + # Optionally return a different command if in a later step + # of incremental mode, otherwise default to reusing the + # original cmd. + m = alt_m + + if m: + # The test case wants to use a non-default main + # module. Look up the module and give it as the thing to + # analyze. + module_names = m.group(1) + out = [] + search_paths = SearchPaths((test_temp_dir,), (), (), ()) + cache = FindModuleCache(search_paths, fscache=None, options=None) + for module_name in module_names.split(" "): + path = cache.find_module(module_name) + assert isinstance(path, str), f"Can't find ad hoc case file: {module_name}" + with open(path, encoding="utf8") as f: + program_text = f.read() + out.append((module_name, path, program_text)) + return out + else: + return [("__main__", "main", program_text)] diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testcmdline.py b/.venv/lib/python3.12/site-packages/mypy/test/testcmdline.py new file mode 100644 index 0000000..11d2290 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testcmdline.py @@ -0,0 +1,150 @@ +"""Test cases for the command line. + +To begin we test that "mypy [/]" always recurses down the +whole tree. +""" + +from __future__ import annotations + +import os +import re +import subprocess +import sys + +from mypy.test.config import PREFIX, test_temp_dir +from mypy.test.data import DataDrivenTestCase, DataSuite +from mypy.test.helpers import ( + assert_string_arrays_equal, + check_test_output_files, + normalize_error_messages, +) + +try: + import lxml # type: ignore[import-untyped] +except ImportError: + lxml = None + +import pytest + +# Path to Python 3 interpreter +python3_path = sys.executable + +# Files containing test case descriptions. +cmdline_files = ["cmdline.test", "cmdline.pyproject.test", "reports.test", "envvars.test"] + + +class PythonCmdlineSuite(DataSuite): + files = cmdline_files + native_sep = True + + def run_case(self, testcase: DataDrivenTestCase) -> None: + if lxml is None and os.path.basename(testcase.file) == "reports.test": + pytest.skip("Cannot import lxml. Is it installed?") + for step in [1] + sorted(testcase.output2): + test_python_cmdline(testcase, step) + + +def test_python_cmdline(testcase: DataDrivenTestCase, step: int) -> None: + assert testcase.old_cwd is not None, "test was not properly set up" + # Write the program to a file. + program = "_program.py" + program_path = os.path.join(test_temp_dir, program) + with open(program_path, "w", encoding="utf8") as file: + for s in testcase.input: + file.write(f"{s}\n") + args = parse_args(testcase.input[0]) + custom_cwd = parse_cwd(testcase.input[1]) if len(testcase.input) > 1 else None + args.append("--show-traceback") + if "--error-summary" not in args: + args.append("--no-error-summary") + if "--show-error-codes" not in args: + args.append("--hide-error-codes") + if "--disallow-empty-bodies" not in args: + args.append("--allow-empty-bodies") + if "--no-force-union-syntax" not in args: + args.append("--force-union-syntax") + # Type check the program. + fixed = [python3_path, "-m", "mypy"] + env = os.environ.copy() + env.pop("COLUMNS", None) + extra_path = os.path.join(os.path.abspath(test_temp_dir), "pypath") + env["PYTHONPATH"] = PREFIX + if os.path.isdir(extra_path): + env["PYTHONPATH"] += os.pathsep + extra_path + cwd = os.path.join(test_temp_dir, custom_cwd or "") + args = [arg.replace("$CWD", os.path.abspath(cwd)) for arg in args] + process = subprocess.Popen( + fixed + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env + ) + outb, errb = process.communicate() + result = process.returncode + # Split output into lines. + out = [s.rstrip("\n\r") for s in str(outb, "utf8").splitlines()] + err = [s.rstrip("\n\r") for s in str(errb, "utf8").splitlines()] + + if "PYCHARM_HOSTED" in os.environ: + for pos, line in enumerate(err): + if line.startswith("pydev debugger: "): + # Delete the attaching debugger message itself, plus the extra newline added. + del err[pos : pos + 2] + break + + # Remove temp file. + os.remove(program_path) + # Compare actual output to expected. + if testcase.output_files: + # Ignore stdout, but we insist on empty stderr and zero status. + if err or result: + raise AssertionError( + "Expected zero status and empty stderr%s, got %d and\n%s" + % (" on step %d" % step if testcase.output2 else "", result, "\n".join(err + out)) + ) + check_test_output_files(testcase, step) + else: + if testcase.normalize_output: + out = normalize_error_messages(err + out) + obvious_result = 1 if out else 0 + if obvious_result != result: + out.append(f"== Return code: {result}") + expected_out = testcase.output if step == 1 else testcase.output2[step] + # Strip "tmp/" out of the test so that # E: works... + expected_out = [s.replace("tmp" + os.sep, "") for s in expected_out] + assert_string_arrays_equal( + expected_out, + out, + "Invalid output ({}, line {}){}".format( + testcase.file, testcase.line, " on step %d" % step if testcase.output2 else "" + ), + ) + + +def parse_args(line: str) -> list[str]: + """Parse the first line of the program for the command line. + + This should have the form + + # cmd: mypy + + For example: + + # cmd: mypy pkg/ + """ + m = re.match("# cmd: mypy (.*)$", line) + if not m: + return [] # No args; mypy will spit out an error. + return m.group(1).split() + + +def parse_cwd(line: str) -> str | None: + """Parse the second line of the program for the command line. + + This should have the form + + # cwd: + + For example: + + # cwd: main/subdir + """ + m = re.match("# cwd: (.*)$", line) + return m.group(1) if m else None diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testconstraints.py b/.venv/lib/python3.12/site-packages/mypy/test/testconstraints.py new file mode 100644 index 0000000..277694a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testconstraints.py @@ -0,0 +1,134 @@ +from __future__ import annotations + +from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, infer_constraints +from mypy.test.helpers import Suite +from mypy.test.typefixture import TypeFixture +from mypy.types import Instance, TupleType, UnpackType + + +class ConstraintsSuite(Suite): + def setUp(self) -> None: + self.fx = TypeFixture() + + def test_no_type_variables(self) -> None: + assert not infer_constraints(self.fx.o, self.fx.o, SUBTYPE_OF) + + def test_basic_type_variable(self) -> None: + fx = self.fx + for direction in [SUBTYPE_OF, SUPERTYPE_OF]: + assert infer_constraints(fx.gt, fx.ga, direction) == [ + Constraint(type_var=fx.t, op=direction, target=fx.a) + ] + + def test_basic_type_var_tuple_subtype(self) -> None: + fx = self.fx + assert infer_constraints( + Instance(fx.gvi, [UnpackType(fx.ts)]), Instance(fx.gvi, [fx.a, fx.b]), SUBTYPE_OF + ) == [ + Constraint(type_var=fx.ts, op=SUBTYPE_OF, target=TupleType([fx.a, fx.b], fx.std_tuple)) + ] + + def test_basic_type_var_tuple(self) -> None: + fx = self.fx + assert set( + infer_constraints( + Instance(fx.gvi, [UnpackType(fx.ts)]), Instance(fx.gvi, [fx.a, fx.b]), SUPERTYPE_OF + ) + ) == { + Constraint( + type_var=fx.ts, op=SUPERTYPE_OF, target=TupleType([fx.a, fx.b], fx.std_tuple) + ), + Constraint( + type_var=fx.ts, op=SUBTYPE_OF, target=TupleType([fx.a, fx.b], fx.std_tuple) + ), + } + + def test_type_var_tuple_with_prefix_and_suffix(self) -> None: + fx = self.fx + assert set( + infer_constraints( + Instance(fx.gv2i, [fx.t, UnpackType(fx.ts), fx.s]), + Instance(fx.gv2i, [fx.a, fx.b, fx.c, fx.d]), + SUPERTYPE_OF, + ) + ) == { + Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.a), + Constraint( + type_var=fx.ts, op=SUPERTYPE_OF, target=TupleType([fx.b, fx.c], fx.std_tuple) + ), + Constraint( + type_var=fx.ts, op=SUBTYPE_OF, target=TupleType([fx.b, fx.c], fx.std_tuple) + ), + Constraint(type_var=fx.s, op=SUPERTYPE_OF, target=fx.d), + } + + def test_unpack_homogeneous_tuple(self) -> None: + fx = self.fx + assert set( + infer_constraints( + Instance(fx.gvi, [UnpackType(Instance(fx.std_tuplei, [fx.t]))]), + Instance(fx.gvi, [fx.a, fx.b]), + SUPERTYPE_OF, + ) + ) == { + Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.a), + Constraint(type_var=fx.t, op=SUBTYPE_OF, target=fx.a), + Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.b), + Constraint(type_var=fx.t, op=SUBTYPE_OF, target=fx.b), + } + + def test_unpack_homogeneous_tuple_with_prefix_and_suffix(self) -> None: + fx = self.fx + assert set( + infer_constraints( + Instance(fx.gv2i, [fx.t, UnpackType(Instance(fx.std_tuplei, [fx.s])), fx.u]), + Instance(fx.gv2i, [fx.a, fx.b, fx.c, fx.d]), + SUPERTYPE_OF, + ) + ) == { + Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.a), + Constraint(type_var=fx.s, op=SUPERTYPE_OF, target=fx.b), + Constraint(type_var=fx.s, op=SUBTYPE_OF, target=fx.b), + Constraint(type_var=fx.s, op=SUPERTYPE_OF, target=fx.c), + Constraint(type_var=fx.s, op=SUBTYPE_OF, target=fx.c), + Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.d), + } + + def test_unpack_with_prefix_and_suffix(self) -> None: + fx = self.fx + assert set( + infer_constraints( + Instance(fx.gv2i, [fx.u, fx.t, fx.s, fx.u]), + Instance(fx.gv2i, [fx.a, fx.b, fx.c, fx.d]), + SUPERTYPE_OF, + ) + ) == { + Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.a), + Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.b), + Constraint(type_var=fx.t, op=SUBTYPE_OF, target=fx.b), + Constraint(type_var=fx.s, op=SUPERTYPE_OF, target=fx.c), + Constraint(type_var=fx.s, op=SUBTYPE_OF, target=fx.c), + Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.d), + } + + def test_unpack_tuple_length_non_match(self) -> None: + fx = self.fx + assert set( + infer_constraints( + Instance(fx.gv2i, [fx.u, fx.t, fx.s, fx.u]), + Instance(fx.gv2i, [fx.a, fx.b, fx.d]), + SUPERTYPE_OF, + ) + # We still get constraints on the prefix/suffix in this case. + ) == { + Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.a), + Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.d), + } + + def test_var_length_tuple_with_fixed_length_tuple(self) -> None: + fx = self.fx + assert not infer_constraints( + TupleType([fx.t, fx.s], fallback=Instance(fx.std_tuplei, [fx.o])), + Instance(fx.std_tuplei, [fx.a]), + SUPERTYPE_OF, + ) diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testdaemon.py b/.venv/lib/python3.12/site-packages/mypy/test/testdaemon.py new file mode 100644 index 0000000..7115e68 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testdaemon.py @@ -0,0 +1,132 @@ +"""End-to-end test cases for the daemon (dmypy). + +These are special because they run multiple shell commands. + +This also includes some unit tests. +""" + +from __future__ import annotations + +import os +import subprocess +import sys +import tempfile +import unittest + +from mypy.dmypy_server import filter_out_missing_top_level_packages +from mypy.fscache import FileSystemCache +from mypy.modulefinder import SearchPaths +from mypy.test.config import PREFIX, test_temp_dir +from mypy.test.data import DataDrivenTestCase, DataSuite +from mypy.test.helpers import assert_string_arrays_equal, normalize_error_messages + +# Files containing test cases descriptions. +daemon_files = ["daemon.test"] + + +class DaemonSuite(DataSuite): + files = daemon_files + + def run_case(self, testcase: DataDrivenTestCase) -> None: + try: + test_daemon(testcase) + finally: + # Kill the daemon if it's still running. + run_cmd("dmypy kill") + + +def test_daemon(testcase: DataDrivenTestCase) -> None: + assert testcase.old_cwd is not None, "test was not properly set up" + for i, step in enumerate(parse_script(testcase.input)): + cmd = step[0] + expected_lines = step[1:] + assert cmd.startswith("$") + cmd = cmd[1:].strip() + cmd = cmd.replace("{python}", sys.executable) + sts, output = run_cmd(cmd) + output_lines = output.splitlines() + output_lines = normalize_error_messages(output_lines) + if sts: + output_lines.append("== Return code: %d" % sts) + assert_string_arrays_equal( + expected_lines, + output_lines, + "Command %d (%s) did not give expected output" % (i + 1, cmd), + ) + + +def parse_script(input: list[str]) -> list[list[str]]: + """Parse testcase.input into steps. + + Each command starts with a line starting with '$'. + The first line (less '$') is sent to the shell. + The remaining lines are expected output. + """ + steps = [] + step: list[str] = [] + for line in input: + if line.startswith("$"): + if step: + assert step[0].startswith("$") + steps.append(step) + step = [] + step.append(line) + if step: + steps.append(step) + return steps + + +def run_cmd(input: str) -> tuple[int, str]: + if input[1:].startswith("mypy run --") and "--show-error-codes" not in input: + input += " --hide-error-codes" + if input.startswith("dmypy "): + input = sys.executable + " -m mypy." + input + if input.startswith("mypy "): + input = sys.executable + " -m" + input + env = os.environ.copy() + env["PYTHONPATH"] = PREFIX + try: + output = subprocess.check_output( + input, shell=True, stderr=subprocess.STDOUT, text=True, cwd=test_temp_dir, env=env + ) + return 0, output + except subprocess.CalledProcessError as err: + return err.returncode, err.output + + +class DaemonUtilitySuite(unittest.TestCase): + """Unit tests for helpers""" + + def test_filter_out_missing_top_level_packages(self) -> None: + with tempfile.TemporaryDirectory() as td: + self.make_file(td, "base/a/") + self.make_file(td, "base/b.py") + self.make_file(td, "base/c.pyi") + self.make_file(td, "base/missing.txt") + self.make_file(td, "typeshed/d.pyi") + self.make_file(td, "typeshed/@python2/e") # outdated + self.make_file(td, "pkg1/f-stubs") + self.make_file(td, "pkg2/g-python2-stubs") # outdated + self.make_file(td, "mpath/sub/long_name/") + + def makepath(p: str) -> str: + return os.path.join(td, p) + + search = SearchPaths( + python_path=(makepath("base"),), + mypy_path=(makepath("mpath/sub"),), + package_path=(makepath("pkg1"), makepath("pkg2")), + typeshed_path=(makepath("typeshed"),), + ) + fscache = FileSystemCache() + res = filter_out_missing_top_level_packages( + {"a", "b", "c", "d", "e", "f", "g", "long_name", "ff", "missing"}, search, fscache + ) + assert res == {"a", "b", "c", "d", "f", "long_name"} + + def make_file(self, base: str, path: str) -> None: + fullpath = os.path.join(base, path) + os.makedirs(os.path.dirname(fullpath), exist_ok=True) + if not path.endswith("/"): + with open(fullpath, "w") as f: + f.write("# test file") diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testdeps.py b/.venv/lib/python3.12/site-packages/mypy/test/testdeps.py new file mode 100644 index 0000000..7c845ea --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testdeps.py @@ -0,0 +1,82 @@ +"""Test cases for generating node-level dependencies (for fine-grained incremental checking)""" + +from __future__ import annotations + +import os +import sys +from collections import defaultdict + +import pytest + +from mypy import build +from mypy.errors import CompileError +from mypy.modulefinder import BuildSource +from mypy.nodes import Expression, MypyFile +from mypy.options import Options +from mypy.server.deps import get_dependencies +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase, DataSuite +from mypy.test.helpers import assert_string_arrays_equal, find_test_files, parse_options +from mypy.types import Type +from mypy.typestate import type_state + +# Only dependencies in these modules are dumped +dumped_modules = ["__main__", "pkg", "pkg.mod"] + + +class GetDependenciesSuite(DataSuite): + files = find_test_files(pattern="deps*.test") + + def run_case(self, testcase: DataDrivenTestCase) -> None: + src = "\n".join(testcase.input) + dump_all = "# __dump_all__" in src + options = parse_options(src, testcase, incremental_step=1) + if options.python_version > sys.version_info: + pytest.skip("Test case requires a newer Python version") + options.use_builtins_fixtures = True + options.show_traceback = True + options.cache_dir = os.devnull + options.export_types = True + options.preserve_asts = True + options.allow_empty_bodies = True + messages, files, type_map = self.build(src, options) + a = messages + if files is None or type_map is None: + if not a: + a = ["Unknown compile error (likely syntax error in test case or fixture)"] + else: + deps: defaultdict[str, set[str]] = defaultdict(set) + for module, file in files.items(): + if (module in dumped_modules or dump_all) and (module in testcase.test_modules): + new_deps = get_dependencies(file, type_map, options.python_version, options) + for source in new_deps: + deps[source].update(new_deps[source]) + + type_state.add_all_protocol_deps(deps) + + for source, targets in sorted(deps.items()): + if source.startswith((" {', '.join(sorted(targets))}" + # Clean up output a bit + line = line.replace("__main__", "m") + a.append(line) + + assert_string_arrays_equal( + testcase.output, a, f"Invalid output ({testcase.file}, line {testcase.line})" + ) + + def build( + self, source: str, options: Options + ) -> tuple[list[str], dict[str, MypyFile] | None, dict[Expression, Type] | None]: + try: + result = build.build( + sources=[BuildSource("main", None, source)], + options=options, + alt_lib_path=test_temp_dir, + ) + except CompileError as e: + # TODO: Should perhaps not return None here. + return e.messages, None, None + return result.errors, result.files, result.types diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testdiff.py b/.venv/lib/python3.12/site-packages/mypy/test/testdiff.py new file mode 100644 index 0000000..0559b33 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testdiff.py @@ -0,0 +1,70 @@ +"""Test cases for AST diff (used for fine-grained incremental checking)""" + +from __future__ import annotations + +import os +import sys + +import pytest + +from mypy import build +from mypy.errors import CompileError +from mypy.modulefinder import BuildSource +from mypy.nodes import MypyFile +from mypy.options import Options +from mypy.server.astdiff import compare_symbol_table_snapshots, snapshot_symbol_table +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase, DataSuite +from mypy.test.helpers import assert_string_arrays_equal, parse_options + + +class ASTDiffSuite(DataSuite): + files = ["diff.test"] + + def run_case(self, testcase: DataDrivenTestCase) -> None: + first_src = "\n".join(testcase.input) + files_dict = dict(testcase.files) + second_src = files_dict["tmp/next.py"] + options = parse_options(first_src, testcase, 1) + if options.python_version > sys.version_info: + pytest.skip("Test case requires a newer Python version") + + messages1, files1 = self.build(first_src, options) + messages2, files2 = self.build(second_src, options) + + a = [] + if messages1: + a.extend(messages1) + if messages2: + a.append("== next ==") + a.extend(messages2) + + assert ( + files1 is not None and files2 is not None + ), "cases where CompileError occurred should not be run" + prefix = "__main__" + snapshot1 = snapshot_symbol_table(prefix, files1["__main__"].names) + snapshot2 = snapshot_symbol_table(prefix, files2["__main__"].names) + diff = compare_symbol_table_snapshots(prefix, snapshot1, snapshot2) + for trigger in sorted(diff): + a.append(trigger) + + assert_string_arrays_equal( + testcase.output, a, f"Invalid output ({testcase.file}, line {testcase.line})" + ) + + def build(self, source: str, options: Options) -> tuple[list[str], dict[str, MypyFile] | None]: + options.use_builtins_fixtures = True + options.show_traceback = True + options.cache_dir = os.devnull + options.allow_empty_bodies = True + try: + result = build.build( + sources=[BuildSource("main", None, source)], + options=options, + alt_lib_path=test_temp_dir, + ) + except CompileError as e: + # TODO: Is it okay to return None? + return e.messages, None + return result.errors, result.files diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testerrorstream.py b/.venv/lib/python3.12/site-packages/mypy/test/testerrorstream.py new file mode 100644 index 0000000..a54a349 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testerrorstream.py @@ -0,0 +1,46 @@ +"""Tests for mypy incremental error output.""" + +from __future__ import annotations + +from mypy import build +from mypy.errors import CompileError +from mypy.modulefinder import BuildSource +from mypy.options import Options +from mypy.test.data import DataDrivenTestCase, DataSuite +from mypy.test.helpers import assert_string_arrays_equal + + +class ErrorStreamSuite(DataSuite): + required_out_section = True + base_path = "." + files = ["errorstream.test"] + + def run_case(self, testcase: DataDrivenTestCase) -> None: + test_error_stream(testcase) + + +def test_error_stream(testcase: DataDrivenTestCase) -> None: + """Perform a single error streaming test case. + + The argument contains the description of the test case. + """ + options = Options() + options.show_traceback = True + options.hide_error_codes = True + + logged_messages: list[str] = [] + + def flush_errors(filename: str | None, msgs: list[str], serious: bool) -> None: + if msgs: + logged_messages.append("==== Errors flushed ====") + logged_messages.extend(msgs) + + sources = [BuildSource("main", "__main__", "\n".join(testcase.input))] + try: + build.build(sources=sources, options=options, flush_errors=flush_errors) + except CompileError as e: + assert e.messages == [] + + assert_string_arrays_equal( + testcase.output, logged_messages, f"Invalid output ({testcase.file}, line {testcase.line})" + ) diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testexportjson.py b/.venv/lib/python3.12/site-packages/mypy/test/testexportjson.py new file mode 100644 index 0000000..13bd96d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testexportjson.py @@ -0,0 +1,70 @@ +"""Test cases for the mypy cache JSON export tool.""" + +from __future__ import annotations + +import json +import os +import re +import sys + +from mypy import build +from mypy.errors import CompileError +from mypy.exportjson import convert_binary_cache_to_json +from mypy.modulefinder import BuildSource +from mypy.options import Options +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase, DataSuite +from mypy.test.helpers import assert_string_arrays_equal + + +class TypeExportSuite(DataSuite): + required_out_section = True + files = ["exportjson.test"] + + def run_case(self, testcase: DataDrivenTestCase) -> None: + error = False + src = "\n".join(testcase.input) + try: + options = Options() + options.use_builtins_fixtures = True + options.show_traceback = True + options.allow_empty_bodies = True + options.fixed_format_cache = True + fnam = os.path.join(self.base_path, "main.py") + with open(fnam, "w") as f: + f.write(src) + result = build.build( + sources=[BuildSource(fnam, "main")], options=options, alt_lib_path=test_temp_dir + ) + a = result.errors + error = bool(a) + + major, minor = sys.version_info[:2] + cache_dir = os.path.join(".mypy_cache", f"{major}.{minor}") + + for module in result.files: + if module in ( + "builtins", + "typing", + "_typeshed", + "__future__", + "typing_extensions", + "sys", + ): + continue + fnam = os.path.join(cache_dir, f"{module}.data.ff") + with open(fnam, "rb") as f: + json_data = convert_binary_cache_to_json(f.read(), implicit_names=False) + for line in json.dumps(json_data, indent=4).splitlines(): + if '"path": ' in line: + # We source file path is unpredictable, so filter it out + line = re.sub(r'"[^"]+\.pyi?"', "...", line) + assert "ERROR" not in line, line + a.append(line) + except CompileError as e: + a = e.messages + error = True + if error or "\n".join(testcase.output).strip() != "": + assert_string_arrays_equal( + testcase.output, a, f"Invalid output ({testcase.file}, line {testcase.line})" + ) diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testfinegrained.py b/.venv/lib/python3.12/site-packages/mypy/test/testfinegrained.py new file mode 100644 index 0000000..b098c1f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testfinegrained.py @@ -0,0 +1,441 @@ +"""Test cases for fine-grained incremental checking. + +Each test cases runs a batch build followed by one or more fine-grained +incremental steps. We verify that each step produces the expected output. + +See the comment at the top of test-data/unit/fine-grained.test for more +information. + +N.B.: Unlike most of the other test suites, testfinegrained does not +rely on an alt_lib_path for finding source files. This means that they +can test interactions with the lib_path that is built implicitly based +on specified sources. +""" + +from __future__ import annotations + +import os +import re +import sys +import unittest +from typing import Any + +import pytest + +from mypy import build +from mypy.config_parser import parse_config_file +from mypy.dmypy_server import Server +from mypy.dmypy_util import DEFAULT_STATUS_FILE +from mypy.errors import CompileError +from mypy.find_sources import create_source_list +from mypy.modulefinder import BuildSource +from mypy.options import Options +from mypy.server.mergecheck import check_consistency +from mypy.server.update import sort_messages_preserving_file_order +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase, DataSuite, DeleteFile, UpdateFile +from mypy.test.helpers import ( + assert_module_equivalence, + assert_string_arrays_equal, + assert_target_equivalence, + find_test_files, + parse_options, + perform_file_operations, +) + +# Set to True to perform (somewhat expensive) checks for duplicate AST nodes after merge +CHECK_CONSISTENCY = False + + +class FineGrainedSuite(DataSuite): + files = find_test_files( + pattern="fine-grained*.test", exclude=["fine-grained-cache-incremental.test"] + ) + + # Whether to use the fine-grained cache in the testing. This is overridden + # by a trivial subclass to produce a suite that uses the cache. + use_cache = False + + def should_skip(self, testcase: DataDrivenTestCase) -> bool: + # Decide whether to skip the test. This could have been structured + # as a filter() classmethod also, but we want the tests reported + # as skipped, not just elided. + if self.use_cache: + if testcase.only_when == "-only_when_nocache": + return True + # TODO: In caching mode we currently don't well support + # starting from cached states with errors in them. + if testcase.output and testcase.output[0] != "==": + return True + else: + if testcase.only_when == "-only_when_cache": + return True + return False + + def run_case(self, testcase: DataDrivenTestCase) -> None: + if self.should_skip(testcase): + pytest.skip() + + main_src = "\n".join(testcase.input) + main_path = os.path.join(test_temp_dir, "main") + with open(main_path, "w", encoding="utf8") as f: + f.write(main_src) + + options = self.get_options(main_src, testcase, build_cache=False) + if options.python_version > sys.version_info: + pytest.skip("Test case requires a newer Python version") + + build_options = self.get_options(main_src, testcase, build_cache=True) + server = Server(options, DEFAULT_STATUS_FILE) + + num_regular_incremental_steps = self.get_build_steps(main_src) + step = 1 + sources = self.parse_sources(main_src, step, options) + if step <= num_regular_incremental_steps: + messages = self.build(build_options, sources) + else: + messages = self.run_check(server, sources) + + a = [] + if messages: + a.extend(normalize_messages(messages)) + + assert testcase.tmpdir is not None + a.extend(self.maybe_suggest(step, server, main_src, testcase.tmpdir)) + a.extend(self.maybe_inspect(step, server, main_src)) + + if server.fine_grained_manager: + if CHECK_CONSISTENCY: + check_consistency(server.fine_grained_manager) + + steps = testcase.find_steps() + all_triggered = [] + + for operations in steps: + step += 1 + output, triggered = self.perform_step( + operations, + server, + options, + build_options, + testcase, + main_src, + step, + num_regular_incremental_steps, + ) + a.append("==") + a.extend(output) + all_triggered.extend(triggered) + + # Normalize paths in test output (for Windows). + a = [line.replace("\\", "/") for line in a] + + assert_string_arrays_equal( + testcase.output, a, f"Invalid output ({testcase.file}, line {testcase.line})" + ) + + if testcase.triggered: + assert_string_arrays_equal( + testcase.triggered, + self.format_triggered(all_triggered), + f"Invalid active triggers ({testcase.file}, line {testcase.line})", + ) + + def get_options(self, source: str, testcase: DataDrivenTestCase, build_cache: bool) -> Options: + # This handles things like '# flags: --foo'. + options = parse_options(source, testcase, incremental_step=1) + options.incremental = True + options.use_builtins_fixtures = True + options.show_traceback = True + options.error_summary = False + options.fine_grained_incremental = not build_cache + options.use_fine_grained_cache = self.use_cache and not build_cache + options.cache_fine_grained = self.use_cache + options.local_partial_types = True + options.export_types = "inspect" in testcase.file + # Treat empty bodies safely for these test cases. + options.allow_empty_bodies = not testcase.name.endswith("_no_empty") + if re.search("flags:.*--follow-imports", source) is None: + # Override the default for follow_imports + options.follow_imports = "error" + + for name, _ in testcase.files: + if "mypy.ini" in name or "pyproject.toml" in name: + parse_config_file(options, lambda: None, name) + break + + return options + + def run_check(self, server: Server, sources: list[BuildSource]) -> list[str]: + response = server.check(sources, export_types=False, is_tty=False, terminal_width=-1) + out = response["out"] or response["err"] + assert isinstance(out, str) + return out.splitlines() + + def build(self, options: Options, sources: list[BuildSource]) -> list[str]: + try: + result = build.build(sources=sources, options=options) + except CompileError as e: + return e.messages + return result.errors + + def format_triggered(self, triggered: list[list[str]]) -> list[str]: + result = [] + for n, triggers in enumerate(triggered): + filtered = [trigger for trigger in triggers if not trigger.endswith("__>")] + filtered = sorted(filtered) + result.append(("%d: %s" % (n + 2, ", ".join(filtered))).strip()) + return result + + def get_build_steps(self, program_text: str) -> int: + """Get the number of regular incremental steps to run, from the test source""" + if not self.use_cache: + return 0 + m = re.search("# num_build_steps: ([0-9]+)$", program_text, flags=re.MULTILINE) + if m is not None: + return int(m.group(1)) + return 1 + + def perform_step( + self, + operations: list[UpdateFile | DeleteFile], + server: Server, + options: Options, + build_options: Options, + testcase: DataDrivenTestCase, + main_src: str, + step: int, + num_regular_incremental_steps: int, + ) -> tuple[list[str], list[list[str]]]: + """Perform one fine-grained incremental build step (after some file updates/deletions). + + Return (mypy output, triggered targets). + """ + perform_file_operations(operations) + sources = self.parse_sources(main_src, step, options) + + if step <= num_regular_incremental_steps: + new_messages = self.build(build_options, sources) + else: + new_messages = self.run_check(server, sources) + + updated: list[str] = [] + changed: list[str] = [] + targets: list[str] = [] + triggered = [] + if server.fine_grained_manager: + if CHECK_CONSISTENCY: + check_consistency(server.fine_grained_manager) + triggered.append(server.fine_grained_manager.triggered) + + updated = server.fine_grained_manager.updated_modules + changed = [mod for mod, file in server.fine_grained_manager.changed_modules] + targets = server.fine_grained_manager.processed_targets + + expected_stale = testcase.expected_stale_modules.get(step - 1) + if expected_stale is not None: + assert_module_equivalence("stale" + str(step - 1), expected_stale, changed) + + expected_rechecked = testcase.expected_rechecked_modules.get(step - 1) + if expected_rechecked is not None: + assert_module_equivalence("rechecked" + str(step - 1), expected_rechecked, updated) + + expected = testcase.expected_fine_grained_targets.get(step) + if expected: + assert_target_equivalence("targets" + str(step), expected, targets) + + new_messages = normalize_messages(new_messages) + + a = new_messages + assert testcase.tmpdir is not None + a.extend(self.maybe_suggest(step, server, main_src, testcase.tmpdir)) + a.extend(self.maybe_inspect(step, server, main_src)) + + return a, triggered + + def parse_sources( + self, program_text: str, incremental_step: int, options: Options + ) -> list[BuildSource]: + """Return target BuildSources for a test case. + + Normally, the unit tests will check all files included in the test + case. This differs from how testcheck works by default, as dmypy + doesn't currently support following imports. + + You can override this behavior and instruct the tests to check + multiple modules by using a comment like this in the test case + input: + + # cmd: main a.py + + You can also use `# cmdN:` to have a different cmd for incremental + step N (2, 3, ...). + + """ + m = re.search("# cmd: mypy ([a-zA-Z0-9_./ ]+)$", program_text, flags=re.MULTILINE) + regex = f"# cmd{incremental_step}: mypy ([a-zA-Z0-9_./ ]+)$" + alt_m = re.search(regex, program_text, flags=re.MULTILINE) + if alt_m is not None: + # Optionally return a different command if in a later step + # of incremental mode, otherwise default to reusing the + # original cmd. + m = alt_m + + if m: + # The test case wants to use a non-default set of files. + paths = [os.path.join(test_temp_dir, path) for path in m.group(1).strip().split()] + return create_source_list(paths, options) + else: + base = BuildSource(os.path.join(test_temp_dir, "main"), "__main__", None) + # Use expand_dir instead of create_source_list to avoid complaints + # when there aren't any .py files in an increment + return [base] + create_source_list([test_temp_dir], options, allow_empty_dir=True) + + def maybe_suggest(self, step: int, server: Server, src: str, tmp_dir: str) -> list[str]: + output: list[str] = [] + targets = self.get_suggest(src, step) + for flags, target in targets: + json = "--json" in flags + callsites = "--callsites" in flags + no_any = "--no-any" in flags + no_errors = "--no-errors" in flags + m = re.match("--flex-any=([0-9.]+)", flags) + flex_any = float(m.group(1)) if m else None + m = re.match(r"--use-fixme=(\w+)", flags) + use_fixme = m.group(1) if m else None + m = re.match("--max-guesses=([0-9]+)", flags) + max_guesses = int(m.group(1)) if m else None + res: dict[str, Any] = server.cmd_suggest( + target.strip(), + json=json, + no_any=no_any, + no_errors=no_errors, + flex_any=flex_any, + use_fixme=use_fixme, + callsites=callsites, + max_guesses=max_guesses, + ) + val = res["error"] if "error" in res else res["out"] + res["err"] + if json: + # JSON contains already escaped \ on Windows, so requires a bit of care. + val = val.replace("\\\\", "\\") + val = val.replace(os.path.realpath(tmp_dir) + os.path.sep, "") + val = val.replace(os.path.abspath(tmp_dir) + os.path.sep, "") + output.extend(val.strip().split("\n")) + return normalize_messages(output) + + def maybe_inspect(self, step: int, server: Server, src: str) -> list[str]: + output: list[str] = [] + targets = self.get_inspect(src, step) + for flags, location in targets: + m = re.match(r"--show=(\w+)", flags) + show = m.group(1) if m else "type" + verbosity = 0 + if "-v" in flags: + verbosity = 1 + if "-vv" in flags: + verbosity = 2 + m = re.match(r"--limit=([0-9]+)", flags) + limit = int(m.group(1)) if m else 0 + include_span = "--include-span" in flags + include_kind = "--include-kind" in flags + include_object_attrs = "--include-object-attrs" in flags + union_attrs = "--union-attrs" in flags + force_reload = "--force-reload" in flags + res: dict[str, Any] = server.cmd_inspect( + show, + location, + verbosity=verbosity, + limit=limit, + include_span=include_span, + include_kind=include_kind, + include_object_attrs=include_object_attrs, + union_attrs=union_attrs, + force_reload=force_reload, + ) + val = res["error"] if "error" in res else res["out"] + res["err"] + output.extend(val.strip().split("\n")) + return output + + def get_suggest(self, program_text: str, incremental_step: int) -> list[tuple[str, str]]: + step_bit = "1?" if incremental_step == 1 else str(incremental_step) + regex = f"# suggest{step_bit}: (--[a-zA-Z0-9_\\-./=?^ ]+ )*([a-zA-Z0-9_.:/?^ ]+)$" + m = re.findall(regex, program_text, flags=re.MULTILINE) + return m + + def get_inspect(self, program_text: str, incremental_step: int) -> list[tuple[str, str]]: + step_bit = "1?" if incremental_step == 1 else str(incremental_step) + regex = f"# inspect{step_bit}: (--[a-zA-Z0-9_\\-=?^ ]+ )*([a-zA-Z0-9_.:/?^ ]+)$" + m = re.findall(regex, program_text, flags=re.MULTILINE) + return m + + +def normalize_messages(messages: list[str]) -> list[str]: + return [re.sub("^tmp" + re.escape(os.sep), "", message) for message in messages] + + +class TestMessageSorting(unittest.TestCase): + def test_simple_sorting(self) -> None: + msgs = ['x.py:1: error: "int" not callable', 'foo/y.py:123: note: "X" not defined'] + old_msgs = ['foo/y.py:12: note: "Y" not defined', 'x.py:8: error: "str" not callable'] + assert sort_messages_preserving_file_order(msgs, old_msgs) == list(reversed(msgs)) + assert sort_messages_preserving_file_order(list(reversed(msgs)), old_msgs) == list( + reversed(msgs) + ) + + def test_long_form_sorting(self) -> None: + # Multi-line errors should be sorted together and not split. + msg1 = [ + 'x.py:1: error: "int" not callable', + "and message continues (x: y)", + " 1()", + " ^~~", + ] + msg2 = [ + 'foo/y.py: In function "f":', + 'foo/y.py:123: note: "X" not defined', + "and again message continues", + ] + old_msgs = ['foo/y.py:12: note: "Y" not defined', 'x.py:8: error: "str" not callable'] + assert sort_messages_preserving_file_order(msg1 + msg2, old_msgs) == msg2 + msg1 + assert sort_messages_preserving_file_order(msg2 + msg1, old_msgs) == msg2 + msg1 + + def test_mypy_error_prefix(self) -> None: + # Some errors don't have a file and start with "mypy: ". These + # shouldn't be sorted together with file-specific errors. + msg1 = 'x.py:1: error: "int" not callable' + msg2 = 'foo/y:123: note: "X" not defined' + msg3 = "mypy: Error not associated with a file" + old_msgs = [ + "mypy: Something wrong", + 'foo/y:12: note: "Y" not defined', + 'x.py:8: error: "str" not callable', + ] + assert sort_messages_preserving_file_order([msg1, msg2, msg3], old_msgs) == [ + msg2, + msg1, + msg3, + ] + assert sort_messages_preserving_file_order([msg3, msg2, msg1], old_msgs) == [ + msg2, + msg1, + msg3, + ] + + def test_new_file_at_the_end(self) -> None: + msg1 = 'x.py:1: error: "int" not callable' + msg2 = 'foo/y.py:123: note: "X" not defined' + new1 = "ab.py:3: error: Problem: error" + new2 = "aaa:3: error: Bad" + old_msgs = ['foo/y.py:12: note: "Y" not defined', 'x.py:8: error: "str" not callable'] + assert sort_messages_preserving_file_order([msg1, msg2, new1], old_msgs) == [ + msg2, + msg1, + new1, + ] + assert sort_messages_preserving_file_order([new1, msg1, msg2, new2], old_msgs) == [ + msg2, + msg1, + new1, + new2, + ] diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testfinegrainedcache.py b/.venv/lib/python3.12/site-packages/mypy/test/testfinegrainedcache.py new file mode 100644 index 0000000..45523a1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testfinegrainedcache.py @@ -0,0 +1,18 @@ +"""Tests for fine-grained incremental checking using the cache. + +All of the real code for this lives in testfinegrained.py. +""" + +# We can't "import FineGrainedSuite from ..." because that will cause pytest +# to collect the non-caching tests when running this file. +from __future__ import annotations + +import mypy.test.testfinegrained + + +class FineGrainedCacheSuite(mypy.test.testfinegrained.FineGrainedSuite): + use_cache = True + test_name_suffix = "_cached" + files = mypy.test.testfinegrained.FineGrainedSuite.files + [ + "fine-grained-cache-incremental.test" + ] diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testformatter.py b/.venv/lib/python3.12/site-packages/mypy/test/testformatter.py new file mode 100644 index 0000000..9f8bb5d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testformatter.py @@ -0,0 +1,85 @@ +from __future__ import annotations + +from unittest import TestCase, main + +from mypy.util import split_words, trim_source_line + + +class FancyErrorFormattingTestCases(TestCase): + def test_trim_source(self) -> None: + assert trim_source_line("0123456789abcdef", max_len=16, col=5, min_width=2) == ( + "0123456789abcdef", + 0, + ) + + # Locations near start. + assert trim_source_line("0123456789abcdef", max_len=7, col=0, min_width=2) == ( + "0123456...", + 0, + ) + assert trim_source_line("0123456789abcdef", max_len=7, col=4, min_width=2) == ( + "0123456...", + 0, + ) + + # Middle locations. + assert trim_source_line("0123456789abcdef", max_len=7, col=5, min_width=2) == ( + "...1234567...", + -2, + ) + assert trim_source_line("0123456789abcdef", max_len=7, col=6, min_width=2) == ( + "...2345678...", + -1, + ) + assert trim_source_line("0123456789abcdef", max_len=7, col=8, min_width=2) == ( + "...456789a...", + 1, + ) + + # Locations near the end. + assert trim_source_line("0123456789abcdef", max_len=7, col=11, min_width=2) == ( + "...789abcd...", + 4, + ) + assert trim_source_line("0123456789abcdef", max_len=7, col=13, min_width=2) == ( + "...9abcdef", + 6, + ) + assert trim_source_line("0123456789abcdef", max_len=7, col=15, min_width=2) == ( + "...9abcdef", + 6, + ) + + def test_split_words(self) -> None: + assert split_words("Simple message") == ["Simple", "message"] + assert split_words('Message with "Some[Long, Types]" in it') == [ + "Message", + "with", + '"Some[Long, Types]"', + "in", + "it", + ] + assert split_words('Message with "Some[Long, Types]" and [error-code]') == [ + "Message", + "with", + '"Some[Long, Types]"', + "and", + "[error-code]", + ] + assert split_words('"Type[Stands, First]" then words') == [ + '"Type[Stands, First]"', + "then", + "words", + ] + assert split_words('First words "Then[Stands, Type]"') == [ + "First", + "words", + '"Then[Stands, Type]"', + ] + assert split_words('"Type[Only, Here]"') == ['"Type[Only, Here]"'] + assert split_words("OneWord") == ["OneWord"] + assert split_words(" ") == ["", ""] + + +if __name__ == "__main__": + main() diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testfscache.py b/.venv/lib/python3.12/site-packages/mypy/test/testfscache.py new file mode 100644 index 0000000..529402d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testfscache.py @@ -0,0 +1,100 @@ +"""Unit tests for file system cache.""" + +from __future__ import annotations + +import os +import shutil +import tempfile +import unittest + +from mypy.fscache import FileSystemCache + + +class TestFileSystemCache(unittest.TestCase): + def setUp(self) -> None: + self.tempdir = tempfile.mkdtemp() + self.oldcwd = os.getcwd() + os.chdir(self.tempdir) + self.fscache = FileSystemCache() + + def tearDown(self) -> None: + os.chdir(self.oldcwd) + shutil.rmtree(self.tempdir) + + def test_isfile_case_1(self) -> None: + self.make_file("bar.py") + self.make_file("pkg/sub_package/__init__.py") + self.make_file("pkg/sub_package/foo.py") + # Run twice to test both cached and non-cached code paths. + for i in range(2): + assert self.isfile_case("bar.py") + assert self.isfile_case("pkg/sub_package/__init__.py") + assert self.isfile_case("pkg/sub_package/foo.py") + assert not self.isfile_case("non_existent.py") + assert not self.isfile_case("pkg/non_existent.py") + assert not self.isfile_case("pkg/") + assert not self.isfile_case("bar.py/") + for i in range(2): + assert not self.isfile_case("Bar.py") + assert not self.isfile_case("pkg/sub_package/__init__.PY") + assert not self.isfile_case("pkg/Sub_Package/foo.py") + assert not self.isfile_case("Pkg/sub_package/foo.py") + + def test_isfile_case_2(self) -> None: + self.make_file("bar.py") + self.make_file("pkg/sub_package/__init__.py") + self.make_file("pkg/sub_package/foo.py") + # Run twice to test both cached and non-cached code paths. + # This reverses the order of checks from test_isfile_case_1. + for i in range(2): + assert not self.isfile_case("Bar.py") + assert not self.isfile_case("pkg/sub_package/__init__.PY") + assert not self.isfile_case("pkg/Sub_Package/foo.py") + assert not self.isfile_case("Pkg/sub_package/foo.py") + for i in range(2): + assert self.isfile_case("bar.py") + assert self.isfile_case("pkg/sub_package/__init__.py") + assert self.isfile_case("pkg/sub_package/foo.py") + assert not self.isfile_case("non_existent.py") + assert not self.isfile_case("pkg/non_existent.py") + + def test_isfile_case_3(self) -> None: + self.make_file("bar.py") + self.make_file("pkg/sub_package/__init__.py") + self.make_file("pkg/sub_package/foo.py") + # Run twice to test both cached and non-cached code paths. + for i in range(2): + assert self.isfile_case("bar.py") + assert not self.isfile_case("non_existent.py") + assert not self.isfile_case("pkg/non_existent.py") + assert not self.isfile_case("Bar.py") + assert not self.isfile_case("pkg/sub_package/__init__.PY") + assert not self.isfile_case("pkg/Sub_Package/foo.py") + assert not self.isfile_case("Pkg/sub_package/foo.py") + assert self.isfile_case("pkg/sub_package/__init__.py") + assert self.isfile_case("pkg/sub_package/foo.py") + + def test_isfile_case_other_directory(self) -> None: + self.make_file("bar.py") + with tempfile.TemporaryDirectory() as other: + self.make_file("other_dir.py", base=other) + self.make_file("pkg/other_dir.py", base=other) + assert self.isfile_case(os.path.join(other, "other_dir.py")) + assert not self.isfile_case(os.path.join(other, "Other_Dir.py")) + assert not self.isfile_case(os.path.join(other, "bar.py")) + if os.path.exists(os.path.join(other, "PKG/other_dir.py")): + # We only check case for directories under our prefix, and since + # this path is not under the prefix, case difference is fine. + assert self.isfile_case(os.path.join(other, "PKG/other_dir.py")) + + def make_file(self, path: str, base: str | None = None) -> None: + if base is None: + base = self.tempdir + fullpath = os.path.join(base, path) + os.makedirs(os.path.dirname(fullpath), exist_ok=True) + if not path.endswith("/"): + with open(fullpath, "w") as f: + f.write("# test file") + + def isfile_case(self, path: str) -> bool: + return self.fscache.isfile_case(os.path.join(self.tempdir, path), self.tempdir) diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testgraph.py b/.venv/lib/python3.12/site-packages/mypy/test/testgraph.py new file mode 100644 index 0000000..c87eb66 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testgraph.py @@ -0,0 +1,83 @@ +"""Test cases for graph processing code in build.py.""" + +from __future__ import annotations + +import sys +from collections.abc import Set as AbstractSet + +from mypy.build import BuildManager, BuildSourceSet, State, order_ascc, sorted_components +from mypy.errors import Errors +from mypy.fscache import FileSystemCache +from mypy.graph_utils import strongly_connected_components, topsort +from mypy.modulefinder import SearchPaths +from mypy.options import Options +from mypy.plugin import Plugin +from mypy.report import Reports +from mypy.test.helpers import Suite, assert_equal +from mypy.version import __version__ + + +class GraphSuite(Suite): + def test_topsort(self) -> None: + a = frozenset({"A"}) + b = frozenset({"B"}) + c = frozenset({"C"}) + d = frozenset({"D"}) + data: dict[AbstractSet[str], set[AbstractSet[str]]] = {a: {b, c}, b: {d}, c: {d}} + res = list(topsort(data)) + assert_equal(res, [{d}, {b, c}, {a}]) + + def test_scc(self) -> None: + vertices = {"A", "B", "C", "D"} + edges: dict[str, list[str]] = {"A": ["B", "C"], "B": ["C"], "C": ["B", "D"], "D": []} + sccs = {frozenset(x) for x in strongly_connected_components(vertices, edges)} + assert_equal(sccs, {frozenset({"A"}), frozenset({"B", "C"}), frozenset({"D"})}) + + def _make_manager(self) -> BuildManager: + options = Options() + options.use_builtins_fixtures = True + errors = Errors(options) + fscache = FileSystemCache() + search_paths = SearchPaths((), (), (), ()) + manager = BuildManager( + data_dir="", + search_paths=search_paths, + ignore_prefix="", + source_set=BuildSourceSet([]), + reports=Reports("", {}), + options=options, + version_id=__version__, + plugin=Plugin(options), + plugins_snapshot={}, + errors=errors, + flush_errors=lambda filename, msgs, serious: None, + fscache=fscache, + stdout=sys.stdout, + stderr=sys.stderr, + ) + return manager + + def test_sorted_components(self) -> None: + manager = self._make_manager() + graph = { + "a": State("a", None, "import b, c", manager), + "d": State("d", None, "pass", manager), + "b": State("b", None, "import c", manager), + "c": State("c", None, "import b, d", manager), + } + res = [scc.mod_ids for scc in sorted_components(graph)] + assert_equal(res, [{"d"}, {"c", "b"}, {"a"}]) + + def test_order_ascc(self) -> None: + manager = self._make_manager() + graph = { + "a": State("a", None, "import b, c", manager), + "d": State("d", None, "def f(): import a", manager), + "b": State("b", None, "import c", manager), + "c": State("c", None, "import b, d", manager), + } + res = [scc.mod_ids for scc in sorted_components(graph)] + assert_equal(res, [frozenset({"a", "d", "c", "b"})]) + ascc = res[0] + scc = order_ascc(graph, ascc) + assert_equal(scc, ["d", "c", "b", "a"]) diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testinfer.py b/.venv/lib/python3.12/site-packages/mypy/test/testinfer.py new file mode 100644 index 0000000..9c18624 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testinfer.py @@ -0,0 +1,373 @@ +"""Test cases for type inference helper functions.""" + +from __future__ import annotations + +from mypy.argmap import map_actuals_to_formals +from mypy.checker import DisjointDict, group_comparison_operands +from mypy.literals import Key +from mypy.nodes import ARG_NAMED, ARG_OPT, ARG_POS, ARG_STAR, ARG_STAR2, ArgKind, NameExpr +from mypy.test.helpers import Suite, assert_equal +from mypy.test.typefixture import TypeFixture +from mypy.types import AnyType, TupleType, Type, TypeOfAny + + +class MapActualsToFormalsSuite(Suite): + """Test cases for argmap.map_actuals_to_formals.""" + + def test_basic(self) -> None: + self.assert_map([], [], []) + + def test_positional_only(self) -> None: + self.assert_map([ARG_POS], [ARG_POS], [[0]]) + self.assert_map([ARG_POS, ARG_POS], [ARG_POS, ARG_POS], [[0], [1]]) + + def test_optional(self) -> None: + self.assert_map([], [ARG_OPT], [[]]) + self.assert_map([ARG_POS], [ARG_OPT], [[0]]) + self.assert_map([ARG_POS], [ARG_OPT, ARG_OPT], [[0], []]) + + def test_callee_star(self) -> None: + self.assert_map([], [ARG_STAR], [[]]) + self.assert_map([ARG_POS], [ARG_STAR], [[0]]) + self.assert_map([ARG_POS, ARG_POS], [ARG_STAR], [[0, 1]]) + + def test_caller_star(self) -> None: + self.assert_map([ARG_STAR], [ARG_STAR], [[0]]) + self.assert_map([ARG_POS, ARG_STAR], [ARG_STAR], [[0, 1]]) + self.assert_map([ARG_STAR], [ARG_POS, ARG_STAR], [[0], [0]]) + self.assert_map([ARG_STAR], [ARG_OPT, ARG_STAR], [[0], [0]]) + + def test_too_many_caller_args(self) -> None: + self.assert_map([ARG_POS], [], []) + self.assert_map([ARG_STAR], [], []) + self.assert_map([ARG_STAR], [ARG_POS], [[0]]) + + def test_tuple_star(self) -> None: + any_type = AnyType(TypeOfAny.special_form) + self.assert_vararg_map([ARG_STAR], [ARG_POS], [[0]], self.make_tuple(any_type)) + self.assert_vararg_map( + [ARG_STAR], [ARG_POS, ARG_POS], [[0], [0]], self.make_tuple(any_type, any_type) + ) + self.assert_vararg_map( + [ARG_STAR], + [ARG_POS, ARG_OPT, ARG_OPT], + [[0], [0], []], + self.make_tuple(any_type, any_type), + ) + + def make_tuple(self, *args: Type) -> TupleType: + return TupleType(list(args), TypeFixture().std_tuple) + + def test_named_args(self) -> None: + self.assert_map(["x"], [(ARG_POS, "x")], [[0]]) + self.assert_map(["y", "x"], [(ARG_POS, "x"), (ARG_POS, "y")], [[1], [0]]) + + def test_some_named_args(self) -> None: + self.assert_map(["y"], [(ARG_OPT, "x"), (ARG_OPT, "y"), (ARG_OPT, "z")], [[], [0], []]) + + def test_missing_named_arg(self) -> None: + self.assert_map(["y"], [(ARG_OPT, "x")], [[]]) + + def test_duplicate_named_arg(self) -> None: + self.assert_map(["x", "x"], [(ARG_OPT, "x")], [[0, 1]]) + + def test_varargs_and_bare_asterisk(self) -> None: + self.assert_map([ARG_STAR], [ARG_STAR, (ARG_NAMED, "x")], [[0], []]) + self.assert_map([ARG_STAR, "x"], [ARG_STAR, (ARG_NAMED, "x")], [[0], [1]]) + + def test_keyword_varargs(self) -> None: + self.assert_map(["x"], [ARG_STAR2], [[0]]) + self.assert_map(["x", ARG_STAR2], [ARG_STAR2], [[0, 1]]) + self.assert_map(["x", ARG_STAR2], [(ARG_POS, "x"), ARG_STAR2], [[0], [1]]) + self.assert_map([ARG_POS, ARG_STAR2], [(ARG_POS, "x"), ARG_STAR2], [[0], [1]]) + + def test_both_kinds_of_varargs(self) -> None: + self.assert_map([ARG_STAR, ARG_STAR2], [(ARG_POS, "x"), (ARG_POS, "y")], [[0, 1], [0, 1]]) + + def test_special_cases(self) -> None: + self.assert_map([ARG_STAR], [ARG_STAR, ARG_STAR2], [[0], []]) + self.assert_map([ARG_STAR, ARG_STAR2], [ARG_STAR, ARG_STAR2], [[0], [1]]) + self.assert_map([ARG_STAR2], [(ARG_POS, "x"), ARG_STAR2], [[0], [0]]) + self.assert_map([ARG_STAR2], [ARG_STAR2], [[0]]) + + def assert_map( + self, + caller_kinds_: list[ArgKind | str], + callee_kinds_: list[ArgKind | tuple[ArgKind, str]], + expected: list[list[int]], + ) -> None: + caller_kinds, caller_names = expand_caller_kinds(caller_kinds_) + callee_kinds, callee_names = expand_callee_kinds(callee_kinds_) + result = map_actuals_to_formals( + caller_kinds, + caller_names, + callee_kinds, + callee_names, + lambda i: AnyType(TypeOfAny.special_form), + ) + assert_equal(result, expected) + + def assert_vararg_map( + self, + caller_kinds: list[ArgKind], + callee_kinds: list[ArgKind], + expected: list[list[int]], + vararg_type: Type, + ) -> None: + result = map_actuals_to_formals(caller_kinds, [], callee_kinds, [], lambda i: vararg_type) + assert_equal(result, expected) + + +def expand_caller_kinds( + kinds_or_names: list[ArgKind | str], +) -> tuple[list[ArgKind], list[str | None]]: + kinds = [] + names: list[str | None] = [] + for k in kinds_or_names: + if isinstance(k, str): + kinds.append(ARG_NAMED) + names.append(k) + else: + kinds.append(k) + names.append(None) + return kinds, names + + +def expand_callee_kinds( + kinds_and_names: list[ArgKind | tuple[ArgKind, str]], +) -> tuple[list[ArgKind], list[str | None]]: + kinds = [] + names: list[str | None] = [] + for v in kinds_and_names: + if isinstance(v, tuple): + kinds.append(v[0]) + names.append(v[1]) + else: + kinds.append(v) + names.append(None) + return kinds, names + + +class OperandDisjointDictSuite(Suite): + """Test cases for checker.DisjointDict, which is used for type inference with operands.""" + + def new(self) -> DisjointDict[int, str]: + return DisjointDict() + + def test_independent_maps(self) -> None: + d = self.new() + d.add_mapping({0, 1}, {"group1"}) + d.add_mapping({2, 3, 4}, {"group2"}) + d.add_mapping({5, 6, 7}, {"group3"}) + + self.assertEqual( + d.items(), [({0, 1}, {"group1"}), ({2, 3, 4}, {"group2"}), ({5, 6, 7}, {"group3"})] + ) + + def test_partial_merging(self) -> None: + d = self.new() + d.add_mapping({0, 1}, {"group1"}) + d.add_mapping({1, 2}, {"group2"}) + d.add_mapping({3, 4}, {"group3"}) + d.add_mapping({5, 0}, {"group4"}) + d.add_mapping({5, 6}, {"group5"}) + d.add_mapping({4, 7}, {"group6"}) + + self.assertEqual( + d.items(), + [ + ({0, 1, 2, 5, 6}, {"group1", "group2", "group4", "group5"}), + ({3, 4, 7}, {"group3", "group6"}), + ], + ) + + def test_full_merging(self) -> None: + d = self.new() + d.add_mapping({0, 1, 2}, {"a"}) + d.add_mapping({3, 4, 2}, {"b"}) + d.add_mapping({10, 11, 12}, {"c"}) + d.add_mapping({13, 14, 15}, {"d"}) + d.add_mapping({14, 10, 16}, {"e"}) + d.add_mapping({0, 10}, {"f"}) + + self.assertEqual( + d.items(), + [({0, 1, 2, 3, 4, 10, 11, 12, 13, 14, 15, 16}, {"a", "b", "c", "d", "e", "f"})], + ) + + def test_merge_with_multiple_overlaps(self) -> None: + d = self.new() + d.add_mapping({0, 1, 2}, {"a"}) + d.add_mapping({3, 4, 5}, {"b"}) + d.add_mapping({1, 2, 4, 5}, {"c"}) + d.add_mapping({6, 1, 2, 4, 5}, {"d"}) + d.add_mapping({6, 1, 2, 4, 5}, {"e"}) + + self.assertEqual(d.items(), [({0, 1, 2, 3, 4, 5, 6}, {"a", "b", "c", "d", "e"})]) + + +class OperandComparisonGroupingSuite(Suite): + """Test cases for checker.group_comparison_operands.""" + + def literal_keymap(self, assignable_operands: dict[int, NameExpr]) -> dict[int, Key]: + output: dict[int, Key] = {} + for index, expr in assignable_operands.items(): + output[index] = ("FakeExpr", expr.name) + return output + + def test_basic_cases(self) -> None: + # Note: the grouping function doesn't actually inspect the input exprs, so we + # just default to using NameExprs for simplicity. + x0 = NameExpr("x0") + x1 = NameExpr("x1") + x2 = NameExpr("x2") + x3 = NameExpr("x3") + x4 = NameExpr("x4") + + basic_input = [("==", x0, x1), ("==", x1, x2), ("<", x2, x3), ("==", x3, x4)] + + none_assignable = self.literal_keymap({}) + all_assignable = self.literal_keymap({0: x0, 1: x1, 2: x2, 3: x3, 4: x4}) + + for assignable in [none_assignable, all_assignable]: + self.assertEqual( + group_comparison_operands(basic_input, assignable, set()), + [("==", [0, 1]), ("==", [1, 2]), ("<", [2, 3]), ("==", [3, 4])], + ) + self.assertEqual( + group_comparison_operands(basic_input, assignable, {"=="}), + [("==", [0, 1, 2]), ("<", [2, 3]), ("==", [3, 4])], + ) + self.assertEqual( + group_comparison_operands(basic_input, assignable, {"<"}), + [("==", [0, 1]), ("==", [1, 2]), ("<", [2, 3]), ("==", [3, 4])], + ) + self.assertEqual( + group_comparison_operands(basic_input, assignable, {"==", "<"}), + [("==", [0, 1, 2]), ("<", [2, 3]), ("==", [3, 4])], + ) + + def test_multiple_groups(self) -> None: + x0 = NameExpr("x0") + x1 = NameExpr("x1") + x2 = NameExpr("x2") + x3 = NameExpr("x3") + x4 = NameExpr("x4") + x5 = NameExpr("x5") + + self.assertEqual( + group_comparison_operands( + [("==", x0, x1), ("==", x1, x2), ("is", x2, x3), ("is", x3, x4)], + self.literal_keymap({}), + {"==", "is"}, + ), + [("==", [0, 1, 2]), ("is", [2, 3, 4])], + ) + self.assertEqual( + group_comparison_operands( + [("==", x0, x1), ("==", x1, x2), ("==", x2, x3), ("==", x3, x4)], + self.literal_keymap({}), + {"==", "is"}, + ), + [("==", [0, 1, 2, 3, 4])], + ) + self.assertEqual( + group_comparison_operands( + [("is", x0, x1), ("==", x1, x2), ("==", x2, x3), ("==", x3, x4)], + self.literal_keymap({}), + {"==", "is"}, + ), + [("is", [0, 1]), ("==", [1, 2, 3, 4])], + ) + self.assertEqual( + group_comparison_operands( + [("is", x0, x1), ("is", x1, x2), ("<", x2, x3), ("==", x3, x4), ("==", x4, x5)], + self.literal_keymap({}), + {"==", "is"}, + ), + [("is", [0, 1, 2]), ("<", [2, 3]), ("==", [3, 4, 5])], + ) + + def test_multiple_groups_coalescing(self) -> None: + x0 = NameExpr("x0") + x1 = NameExpr("x1") + x2 = NameExpr("x2") + x3 = NameExpr("x3") + x4 = NameExpr("x4") + + nothing_combined = [("==", [0, 1, 2]), ("<", [2, 3]), ("==", [3, 4, 5])] + everything_combined = [("==", [0, 1, 2, 3, 4, 5]), ("<", [2, 3])] + + # Note: We do 'x4 == x0' at the very end! + two_groups = [ + ("==", x0, x1), + ("==", x1, x2), + ("<", x2, x3), + ("==", x3, x4), + ("==", x4, x0), + ] + self.assertEqual( + group_comparison_operands( + two_groups, self.literal_keymap({0: x0, 1: x1, 2: x2, 3: x3, 4: x4, 5: x0}), {"=="} + ), + everything_combined, + "All vars are assignable, everything is combined", + ) + self.assertEqual( + group_comparison_operands( + two_groups, self.literal_keymap({1: x1, 2: x2, 3: x3, 4: x4}), {"=="} + ), + nothing_combined, + "x0 is unassignable, so no combining", + ) + self.assertEqual( + group_comparison_operands( + two_groups, self.literal_keymap({0: x0, 1: x1, 3: x3, 5: x0}), {"=="} + ), + everything_combined, + "Some vars are unassignable but x0 is, so we combine", + ) + self.assertEqual( + group_comparison_operands(two_groups, self.literal_keymap({0: x0, 5: x0}), {"=="}), + everything_combined, + "All vars are unassignable but x0 is, so we combine", + ) + + def test_multiple_groups_different_operators(self) -> None: + x0 = NameExpr("x0") + x1 = NameExpr("x1") + x2 = NameExpr("x2") + x3 = NameExpr("x3") + + groups = [("==", x0, x1), ("==", x1, x2), ("is", x2, x3), ("is", x3, x0)] + keymap = self.literal_keymap({0: x0, 1: x1, 2: x2, 3: x3, 4: x0}) + self.assertEqual( + group_comparison_operands(groups, keymap, {"==", "is"}), + [("==", [0, 1, 2]), ("is", [2, 3, 4])], + "Different operators can never be combined", + ) + + def test_single_pair(self) -> None: + x0 = NameExpr("x0") + x1 = NameExpr("x1") + + single_comparison = [("==", x0, x1)] + expected_output = [("==", [0, 1])] + + assignable_combinations: list[dict[int, NameExpr]] = [{}, {0: x0}, {1: x1}, {0: x0, 1: x1}] + to_group_by: list[set[str]] = [set(), {"=="}, {"is"}] + + for combo in assignable_combinations: + for operators in to_group_by: + keymap = self.literal_keymap(combo) + self.assertEqual( + group_comparison_operands(single_comparison, keymap, operators), + expected_output, + ) + + def test_empty_pair_list(self) -> None: + # This case should never occur in practice -- ComparisonExprs + # always contain at least one comparison. But in case it does... + + self.assertEqual(group_comparison_operands([], {}, set()), []) + self.assertEqual(group_comparison_operands([], {}, {"=="}), []) diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testipc.py b/.venv/lib/python3.12/site-packages/mypy/test/testipc.py new file mode 100644 index 0000000..0224035 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testipc.py @@ -0,0 +1,119 @@ +from __future__ import annotations + +import sys +import time +from multiprocessing import Queue, get_context +from unittest import TestCase, main + +import pytest + +from mypy.ipc import IPCClient, IPCServer + +CONNECTION_NAME = "dmypy-test-ipc" + + +def server(msg: str, q: Queue[str]) -> None: + server = IPCServer(CONNECTION_NAME) + q.put(server.connection_name) + data = "" + while not data: + with server: + server.write(msg) + data = server.read() + server.cleanup() + + +def server_multi_message_echo(q: Queue[str]) -> None: + server = IPCServer(CONNECTION_NAME) + q.put(server.connection_name) + data = "" + with server: + while data != "quit": + data = server.read() + server.write(data) + server.cleanup() + + +class IPCTests(TestCase): + def setUp(self) -> None: + if sys.platform == "linux": + # The default "fork" start method is potentially unsafe + self.ctx = get_context("forkserver") + else: + self.ctx = get_context("spawn") + + def test_transaction_large(self) -> None: + queue: Queue[str] = self.ctx.Queue() + msg = "t" * 200000 # longer than the max read size of 100_000 + p = self.ctx.Process(target=server, args=(msg, queue), daemon=True) + p.start() + connection_name = queue.get() + with IPCClient(connection_name, timeout=1) as client: + assert client.read() == msg + client.write("test") + queue.close() + queue.join_thread() + p.join() + + def test_connect_twice(self) -> None: + queue: Queue[str] = self.ctx.Queue() + msg = "this is a test message" + p = self.ctx.Process(target=server, args=(msg, queue), daemon=True) + p.start() + connection_name = queue.get() + with IPCClient(connection_name, timeout=1) as client: + assert client.read() == msg + client.write("") # don't let the server hang up yet, we want to connect again. + + with IPCClient(connection_name, timeout=1) as client: + assert client.read() == msg + client.write("test") + queue.close() + queue.join_thread() + p.join() + assert p.exitcode == 0 + + def test_multiple_messages(self) -> None: + queue: Queue[str] = self.ctx.Queue() + p = self.ctx.Process(target=server_multi_message_echo, args=(queue,), daemon=True) + p.start() + connection_name = queue.get() + with IPCClient(connection_name, timeout=1) as client: + # "foo bar" with extra accents on letters. + # In UTF-8 encoding so we don't confuse editors opening this file. + fancy_text = b"f\xcc\xb6o\xcc\xb2\xf0\x9d\x91\x9c \xd0\xb2\xe2\xb7\xa1a\xcc\xb6r\xcc\x93\xcd\x98\xcd\x8c" + client.write(fancy_text.decode("utf-8")) + assert client.read() == fancy_text.decode("utf-8") + + client.write("Test with spaces") + client.write("Test write before reading previous") + time.sleep(0) # yield to the server to force reading of all messages by server. + assert client.read() == "Test with spaces" + assert client.read() == "Test write before reading previous" + + client.write("quit") + assert client.read() == "quit" + queue.close() + queue.join_thread() + p.join() + assert p.exitcode == 0 + + # Run test_connect_twice a lot, in the hopes of finding issues. + # This is really slow, so it is skipped, but can be enabled if + # needed to debug IPC issues. + @pytest.mark.skip + def test_connect_alot(self) -> None: + t0 = time.time() + for i in range(1000): + try: + print(i, "start") + self.test_connect_twice() + finally: + t1 = time.time() + print(i, t1 - t0) + sys.stdout.flush() + t0 = t1 + + +if __name__ == "__main__": + main() diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testmerge.py b/.venv/lib/python3.12/site-packages/mypy/test/testmerge.py new file mode 100644 index 0000000..c2c75f6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testmerge.py @@ -0,0 +1,233 @@ +"""Test cases for AST merge (used for fine-grained incremental checking)""" + +from __future__ import annotations + +import os +import shutil + +from mypy import build +from mypy.build import BuildResult +from mypy.errors import CompileError +from mypy.modulefinder import BuildSource +from mypy.nodes import ( + UNBOUND_IMPORTED, + Expression, + MypyFile, + SymbolTable, + SymbolTableNode, + TypeInfo, + TypeVarExpr, + Var, +) +from mypy.options import Options +from mypy.server.subexpr import get_subexpressions +from mypy.server.update import FineGrainedBuildManager +from mypy.strconv import StrConv +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase, DataSuite +from mypy.test.helpers import assert_string_arrays_equal, normalize_error_messages, parse_options +from mypy.types import Type, TypeStrVisitor +from mypy.util import IdMapper, short_type + +# Which data structures to dump in a test case? +SYMTABLE = "SYMTABLE" +TYPEINFO = " TYPEINFO" +TYPES = "TYPES" +AST = "AST" + + +class ASTMergeSuite(DataSuite): + files = ["merge.test"] + + def setup(self) -> None: + super().setup() + self.str_conv = StrConv(show_ids=True, options=Options()) + assert self.str_conv.id_mapper is not None + self.id_mapper: IdMapper = self.str_conv.id_mapper + self.type_str_conv = TypeStrVisitor(self.id_mapper, options=Options()) + + def run_case(self, testcase: DataDrivenTestCase) -> None: + name = testcase.name + # We use the test case name to decide which data structures to dump. + # Dumping everything would result in very verbose test cases. + if name.endswith("_symtable"): + kind = SYMTABLE + elif name.endswith("_typeinfo"): + kind = TYPEINFO + elif name.endswith("_types"): + kind = TYPES + else: + kind = AST + + main_src = "\n".join(testcase.input) + result = self.build(main_src, testcase) + assert result is not None, "cases where CompileError occurred should not be run" + result.manager.fscache.flush() + fine_grained_manager = FineGrainedBuildManager(result) + + a = [] + if result.errors: + a.extend(result.errors) + + target_path = os.path.join(test_temp_dir, "target.py") + shutil.copy(os.path.join(test_temp_dir, "target.py.next"), target_path) + + a.extend(self.dump(fine_grained_manager, kind, testcase.test_modules)) + old_subexpr = get_subexpressions(result.manager.modules["target"]) + + a.append("==>") + + new_file, new_types = self.build_increment(fine_grained_manager, "target", target_path) + a.extend(self.dump(fine_grained_manager, kind, testcase.test_modules)) + + for expr in old_subexpr: + if isinstance(expr, TypeVarExpr): + # These are merged so we can't perform the check. + continue + # Verify that old AST nodes are removed from the expression type map. + assert expr not in new_types + + if testcase.normalize_output: + a = normalize_error_messages(a) + + assert_string_arrays_equal( + testcase.output, a, f"Invalid output ({testcase.file}, line {testcase.line})" + ) + + def build(self, source: str, testcase: DataDrivenTestCase) -> BuildResult | None: + options = parse_options(source, testcase, incremental_step=1) + options.incremental = True + options.fine_grained_incremental = True + options.use_builtins_fixtures = True + options.export_types = True + options.show_traceback = True + options.allow_empty_bodies = True + main_path = os.path.join(test_temp_dir, "main") + + self.str_conv.options = options + self.type_str_conv.options = options + with open(main_path, "w", encoding="utf8") as f: + f.write(source) + try: + result = build.build( + sources=[BuildSource(main_path, None, None)], + options=options, + alt_lib_path=test_temp_dir, + ) + except CompileError: + # TODO: Is it okay to return None? + return None + return result + + def build_increment( + self, manager: FineGrainedBuildManager, module_id: str, path: str + ) -> tuple[MypyFile, dict[Expression, Type]]: + manager.flush_cache() + manager.update([(module_id, path)], []) + module = manager.manager.modules[module_id] + type_map = manager.graph[module_id].type_map() + return module, type_map + + def dump( + self, manager: FineGrainedBuildManager, kind: str, test_modules: list[str] + ) -> list[str]: + modules = { + name: file for name, file in manager.manager.modules.items() if name in test_modules + } + if kind == AST: + return self.dump_asts(modules) + elif kind == TYPEINFO: + return self.dump_typeinfos(modules) + elif kind == SYMTABLE: + return self.dump_symbol_tables(modules) + elif kind == TYPES: + return self.dump_types(modules, manager) + assert False, f"Invalid kind {kind}" + + def dump_asts(self, modules: dict[str, MypyFile]) -> list[str]: + a = [] + for m in sorted(modules): + s = modules[m].accept(self.str_conv) + a.extend(s.splitlines()) + return a + + def dump_symbol_tables(self, modules: dict[str, MypyFile]) -> list[str]: + a = [] + for id in sorted(modules): + a.extend(self.dump_symbol_table(id, modules[id].names)) + return a + + def dump_symbol_table(self, module_id: str, symtable: SymbolTable) -> list[str]: + a = [f"{module_id}:"] + for name in sorted(symtable): + if name.startswith("__"): + continue + a.append(f" {name}: {self.format_symbol_table_node(symtable[name])}") + return a + + def format_symbol_table_node(self, node: SymbolTableNode) -> str: + if node.node is None: + if node.kind == UNBOUND_IMPORTED: + return "UNBOUND_IMPORTED" + return "None" + s = f"{str(type(node.node).__name__)}<{self.id_mapper.id(node.node)}>" + if ( + isinstance(node.node, Var) + and node.node.type + and not node.node.fullname.startswith("typing.") + ): + typestr = self.format_type(node.node.type) + s += f"({typestr})" + return s + + def dump_typeinfos(self, modules: dict[str, MypyFile]) -> list[str]: + a = [] + for id in sorted(modules): + a.extend(self.dump_typeinfos_recursive(modules[id].names)) + return a + + def dump_typeinfos_recursive(self, names: SymbolTable) -> list[str]: + a = [] + for name, node in sorted(names.items(), key=lambda x: x[0]): + if isinstance(node.node, TypeInfo): + a.extend(self.dump_typeinfo(node.node)) + a.extend(self.dump_typeinfos_recursive(node.node.names)) + return a + + def dump_typeinfo(self, info: TypeInfo) -> list[str]: + if info.fullname == "enum.Enum": + # Avoid noise + return [] + s = info.dump(str_conv=self.str_conv, type_str_conv=self.type_str_conv) + return s.splitlines() + + def dump_types( + self, modules: dict[str, MypyFile], manager: FineGrainedBuildManager + ) -> list[str]: + a = [] + # To make the results repeatable, we try to generate unique and + # deterministic sort keys. + for module_id in sorted(modules): + all_types = manager.manager.all_types + # Compute a module type map from the global type map + tree = manager.graph[module_id].tree + assert tree is not None + type_map = { + node: all_types[node] for node in get_subexpressions(tree) if node in all_types + } + if type_map: + a.append(f"## {module_id}") + for expr in sorted( + type_map, + key=lambda n: ( + n.line, + short_type(n), + n.str_with_options(self.str_conv.options) + str(type_map[n]), + ), + ): + typ = type_map[expr] + a.append(f"{short_type(expr)}:{expr.line}: {self.format_type(typ)}") + return a + + def format_type(self, typ: Type) -> str: + return typ.accept(self.type_str_conv) diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testmodulefinder.py b/.venv/lib/python3.12/site-packages/mypy/test/testmodulefinder.py new file mode 100644 index 0000000..d4ee3af --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testmodulefinder.py @@ -0,0 +1,284 @@ +from __future__ import annotations + +import os + +from mypy.modulefinder import FindModuleCache, ModuleNotFoundReason, SearchPaths +from mypy.options import Options +from mypy.test.config import package_path +from mypy.test.helpers import Suite, assert_equal + +data_path = os.path.relpath(os.path.join(package_path, "modulefinder")) + + +class ModuleFinderSuite(Suite): + def setUp(self) -> None: + self.search_paths = SearchPaths( + python_path=(), + mypy_path=( + os.path.join(data_path, "nsx-pkg1"), + os.path.join(data_path, "nsx-pkg2"), + os.path.join(data_path, "nsx-pkg3"), + os.path.join(data_path, "nsy-pkg1"), + os.path.join(data_path, "nsy-pkg2"), + os.path.join(data_path, "pkg1"), + os.path.join(data_path, "pkg2"), + ), + package_path=(), + typeshed_path=(), + ) + options = Options() + options.namespace_packages = True + self.fmc_ns = FindModuleCache(self.search_paths, fscache=None, options=options) + + options = Options() + options.namespace_packages = False + self.fmc_nons = FindModuleCache(self.search_paths, fscache=None, options=options) + + def test__no_namespace_packages__nsx(self) -> None: + """ + If namespace_packages is False, we shouldn't find nsx + """ + found_module = self.fmc_nons.find_module("nsx") + assert_equal(ModuleNotFoundReason.NOT_FOUND, found_module) + + def test__no_namespace_packages__nsx_a(self) -> None: + """ + If namespace_packages is False, we shouldn't find nsx.a. + """ + found_module = self.fmc_nons.find_module("nsx.a") + assert_equal(ModuleNotFoundReason.NOT_FOUND, found_module) + + def test__no_namespace_packages__find_a_in_pkg1(self) -> None: + """ + Find find pkg1/a.py for "a" with namespace_packages False. + """ + found_module = self.fmc_nons.find_module("a") + expected = os.path.abspath(os.path.join(data_path, "pkg1", "a.py")) + assert_equal(expected, found_module) + + def test__no_namespace_packages__find_b_in_pkg2(self) -> None: + found_module = self.fmc_ns.find_module("b") + expected = os.path.abspath(os.path.join(data_path, "pkg2", "b", "__init__.py")) + assert_equal(expected, found_module) + + def test__find_nsx_as_namespace_pkg_in_pkg1(self) -> None: + """ + There's no __init__.py in any of the nsx dirs, return + the path to the first one found in mypypath. + """ + found_module = self.fmc_ns.find_module("nsx") + expected = os.path.abspath(os.path.join(data_path, "nsx-pkg1", "nsx")) + assert_equal(expected, found_module) + + def test__find_nsx_a_init_in_pkg1(self) -> None: + """ + Find nsx-pkg1/nsx/a/__init__.py for "nsx.a" in namespace mode. + """ + found_module = self.fmc_ns.find_module("nsx.a") + expected = os.path.abspath(os.path.join(data_path, "nsx-pkg1", "nsx", "a", "__init__.py")) + assert_equal(expected, found_module) + + def test__find_nsx_b_init_in_pkg2(self) -> None: + """ + Find nsx-pkg2/nsx/b/__init__.py for "nsx.b" in namespace mode. + """ + found_module = self.fmc_ns.find_module("nsx.b") + expected = os.path.abspath(os.path.join(data_path, "nsx-pkg2", "nsx", "b", "__init__.py")) + assert_equal(expected, found_module) + + def test__find_nsx_c_c_in_pkg3(self) -> None: + """ + Find nsx-pkg3/nsx/c/c.py for "nsx.c.c" in namespace mode. + """ + found_module = self.fmc_ns.find_module("nsx.c.c") + expected = os.path.abspath(os.path.join(data_path, "nsx-pkg3", "nsx", "c", "c.py")) + assert_equal(expected, found_module) + + def test__find_nsy_a__init_pyi(self) -> None: + """ + Prefer nsy-pkg1/a/__init__.pyi file over __init__.py. + """ + found_module = self.fmc_ns.find_module("nsy.a") + expected = os.path.abspath(os.path.join(data_path, "nsy-pkg1", "nsy", "a", "__init__.pyi")) + assert_equal(expected, found_module) + + def test__find_nsy_b__init_py(self) -> None: + """ + There is a nsy-pkg2/nsy/b.pyi, but also a nsy-pkg2/nsy/b/__init__.py. + We expect to find the latter when looking up "nsy.b" as + a package is preferred over a module. + """ + found_module = self.fmc_ns.find_module("nsy.b") + expected = os.path.abspath(os.path.join(data_path, "nsy-pkg2", "nsy", "b", "__init__.py")) + assert_equal(expected, found_module) + + def test__find_nsy_c_pyi(self) -> None: + """ + There is a nsy-pkg2/nsy/c.pyi and nsy-pkg2/nsy/c.py + We expect to find the former when looking up "nsy.b" as + .pyi is preferred over .py. + """ + found_module = self.fmc_ns.find_module("nsy.c") + expected = os.path.abspath(os.path.join(data_path, "nsy-pkg2", "nsy", "c.pyi")) + assert_equal(expected, found_module) + + def test__find_a_in_pkg1(self) -> None: + found_module = self.fmc_ns.find_module("a") + expected = os.path.abspath(os.path.join(data_path, "pkg1", "a.py")) + assert_equal(expected, found_module) + + def test__find_b_init_in_pkg2(self) -> None: + found_module = self.fmc_ns.find_module("b") + expected = os.path.abspath(os.path.join(data_path, "pkg2", "b", "__init__.py")) + assert_equal(expected, found_module) + + def test__find_d_nowhere(self) -> None: + found_module = self.fmc_ns.find_module("d") + assert_equal(ModuleNotFoundReason.NOT_FOUND, found_module) + + +class ModuleFinderSitePackagesSuite(Suite): + def setUp(self) -> None: + self.package_dir = os.path.relpath( + os.path.join(package_path, "modulefinder-site-packages") + ) + + package_paths = ( + os.path.join(self.package_dir, "baz"), + os.path.join(self.package_dir, "..", "not-a-directory"), + os.path.join(self.package_dir, "..", "modulefinder-src"), + self.package_dir, + ) + + self.search_paths = SearchPaths( + python_path=(), + mypy_path=(os.path.join(data_path, "pkg1"),), + package_path=tuple(package_paths), + typeshed_path=(), + ) + options = Options() + options.namespace_packages = True + self.fmc_ns = FindModuleCache(self.search_paths, fscache=None, options=options) + + options = Options() + options.namespace_packages = False + self.fmc_nons = FindModuleCache(self.search_paths, fscache=None, options=options) + + def path(self, *parts: str) -> str: + return os.path.abspath(os.path.join(self.package_dir, *parts)) + + def test__packages_with_ns(self) -> None: + cases = [ + # Namespace package with py.typed + ("ns_pkg_typed", self.path("ns_pkg_typed")), + ("ns_pkg_typed.a", self.path("ns_pkg_typed", "a.py")), + ("ns_pkg_typed.b", self.path("ns_pkg_typed", "b")), + ("ns_pkg_typed.b.c", self.path("ns_pkg_typed", "b", "c.py")), + ("ns_pkg_typed.a.a_var", ModuleNotFoundReason.NOT_FOUND), + # Namespace package without py.typed + ("ns_pkg_untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("ns_pkg_untyped.a", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("ns_pkg_untyped.b", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("ns_pkg_untyped.b.c", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("ns_pkg_untyped.a.a_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + # Namespace package without stub package + ("ns_pkg_w_stubs", self.path("ns_pkg_w_stubs")), + ("ns_pkg_w_stubs.typed", self.path("ns_pkg_w_stubs-stubs", "typed", "__init__.pyi")), + ( + "ns_pkg_w_stubs.typed_inline", + self.path("ns_pkg_w_stubs", "typed_inline", "__init__.py"), + ), + ("ns_pkg_w_stubs.untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + # Regular package with py.typed + ("pkg_typed", self.path("pkg_typed", "__init__.py")), + ("pkg_typed.a", self.path("pkg_typed", "a.py")), + ("pkg_typed.b", self.path("pkg_typed", "b", "__init__.py")), + ("pkg_typed.b.c", self.path("pkg_typed", "b", "c.py")), + ("pkg_typed.a.a_var", ModuleNotFoundReason.NOT_FOUND), + # Regular package with py.typed, bundled stubs, and external stubs-only package + ("pkg_typed_w_stubs", self.path("pkg_typed_w_stubs-stubs", "__init__.pyi")), + ("pkg_typed_w_stubs.spam", self.path("pkg_typed_w_stubs-stubs", "spam.pyi")), + # Regular package without py.typed + ("pkg_untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("pkg_untyped.a", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("pkg_untyped.b", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("pkg_untyped.b.c", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("pkg_untyped.a.a_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + # Top-level Python file in site-packages + ("standalone", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("standalone.standalone_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + # Packages found by following .pth files + ("baz_pkg", self.path("baz", "baz_pkg", "__init__.py")), + ("ns_baz_pkg.a", self.path("baz", "ns_baz_pkg", "a.py")), + ("neighbor_pkg", self.path("..", "modulefinder-src", "neighbor_pkg", "__init__.py")), + ("ns_neighbor_pkg.a", self.path("..", "modulefinder-src", "ns_neighbor_pkg", "a.py")), + # Something that doesn't exist + ("does_not_exist", ModuleNotFoundReason.NOT_FOUND), + # A regular package with an installed set of stubs + ("foo.bar", self.path("foo-stubs", "bar.pyi")), + # A regular, non-site-packages module + ("a", os.path.abspath(os.path.join(data_path, "pkg1", "a.py"))), + ] + for module, expected in cases: + template = "Find(" + module + ") got {}; expected {}" + + actual = self.fmc_ns.find_module(module) + assert_equal(actual, expected, template) + + def test__packages_without_ns(self) -> None: + cases = [ + # Namespace package with py.typed + ("ns_pkg_typed", ModuleNotFoundReason.NOT_FOUND), + ("ns_pkg_typed.a", ModuleNotFoundReason.NOT_FOUND), + ("ns_pkg_typed.b", ModuleNotFoundReason.NOT_FOUND), + ("ns_pkg_typed.b.c", ModuleNotFoundReason.NOT_FOUND), + ("ns_pkg_typed.a.a_var", ModuleNotFoundReason.NOT_FOUND), + # Namespace package without py.typed + ("ns_pkg_untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("ns_pkg_untyped.a", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("ns_pkg_untyped.b", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("ns_pkg_untyped.b.c", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("ns_pkg_untyped.a.a_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + # Namespace package without stub package + ("ns_pkg_w_stubs", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("ns_pkg_w_stubs.typed", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ( + "ns_pkg_w_stubs.typed_inline", + self.path("ns_pkg_w_stubs", "typed_inline", "__init__.py"), + ), + ("ns_pkg_w_stubs.untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + # Regular package with py.typed + ("pkg_typed", self.path("pkg_typed", "__init__.py")), + ("pkg_typed.a", self.path("pkg_typed", "a.py")), + ("pkg_typed.b", self.path("pkg_typed", "b", "__init__.py")), + ("pkg_typed.b.c", self.path("pkg_typed", "b", "c.py")), + ("pkg_typed.a.a_var", ModuleNotFoundReason.NOT_FOUND), + # Regular package with py.typed, bundled stubs, and external stubs-only package + ("pkg_typed_w_stubs", self.path("pkg_typed_w_stubs-stubs", "__init__.pyi")), + ("pkg_typed_w_stubs.spam", self.path("pkg_typed_w_stubs-stubs", "spam.pyi")), + # Regular package without py.typed + ("pkg_untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("pkg_untyped.a", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("pkg_untyped.b", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("pkg_untyped.b.c", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("pkg_untyped.a.a_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + # Top-level Python file in site-packages + ("standalone", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("standalone.standalone_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + # Packages found by following .pth files + ("baz_pkg", self.path("baz", "baz_pkg", "__init__.py")), + ("ns_baz_pkg.a", ModuleNotFoundReason.NOT_FOUND), + ("neighbor_pkg", self.path("..", "modulefinder-src", "neighbor_pkg", "__init__.py")), + ("ns_neighbor_pkg.a", ModuleNotFoundReason.NOT_FOUND), + # Something that doesn't exist + ("does_not_exist", ModuleNotFoundReason.NOT_FOUND), + # A regular package with an installed set of stubs + ("foo.bar", self.path("foo-stubs", "bar.pyi")), + # A regular, non-site-packages module + ("a", os.path.abspath(os.path.join(data_path, "pkg1", "a.py"))), + ] + for module, expected in cases: + template = "Find(" + module + ") got {}; expected {}" + + actual = self.fmc_nons.find_module(module) + assert_equal(actual, expected, template) diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testmypyc.py b/.venv/lib/python3.12/site-packages/mypy/test/testmypyc.py new file mode 100644 index 0000000..e8436f4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testmypyc.py @@ -0,0 +1,14 @@ +"""A basic check to make sure that we are using a mypyc-compiled version when expected.""" + +from __future__ import annotations + +import os +from unittest import TestCase + +import mypy + + +class MypycTest(TestCase): + def test_using_mypyc(self) -> None: + if os.getenv("TEST_MYPYC", None) == "1": + assert not mypy.__file__.endswith(".py"), "Expected to find a mypyc-compiled version" diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testoutput.py b/.venv/lib/python3.12/site-packages/mypy/test/testoutput.py new file mode 100644 index 0000000..41f6881 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testoutput.py @@ -0,0 +1,58 @@ +"""Test cases for `--output=json`. + +These cannot be run by the usual unit test runner because of the backslashes in +the output, which get normalized to forward slashes by the test suite on Windows. +""" + +from __future__ import annotations + +import os +import os.path + +from mypy import api +from mypy.defaults import PYTHON3_VERSION +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase, DataSuite + + +class OutputJSONsuite(DataSuite): + files = ["outputjson.test"] + + def run_case(self, testcase: DataDrivenTestCase) -> None: + test_output_json(testcase) + + +def test_output_json(testcase: DataDrivenTestCase) -> None: + """Runs Mypy in a subprocess, and ensures that `--output=json` works as intended.""" + mypy_cmdline = ["--output=json"] + mypy_cmdline.append(f"--python-version={'.'.join(map(str, PYTHON3_VERSION))}") + + # Write the program to a file. + program_path = os.path.join(test_temp_dir, "main") + mypy_cmdline.append(program_path) + with open(program_path, "w", encoding="utf8") as file: + for s in testcase.input: + file.write(f"{s}\n") + + output = [] + # Type check the program. + out, err, returncode = api.run(mypy_cmdline) + # split lines, remove newlines, and remove directory of test case + for line in (out + err).rstrip("\n").splitlines(): + if line.startswith(test_temp_dir + os.sep): + output.append(line[len(test_temp_dir + os.sep) :].rstrip("\r\n")) + else: + output.append(line.rstrip("\r\n")) + + if returncode > 1: + output.append("!!! Mypy crashed !!!") + + # Remove temp file. + os.remove(program_path) + + # JSON encodes every `\` character into `\\`, so we need to remove `\\` from windows paths + # and `/` from POSIX paths + json_os_separator = os.sep.replace("\\", "\\\\") + normalized_output = [line.replace(test_temp_dir + json_os_separator, "") for line in output] + + assert normalized_output == testcase.output diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testparse.py b/.venv/lib/python3.12/site-packages/mypy/test/testparse.py new file mode 100644 index 0000000..c8bcb5c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testparse.py @@ -0,0 +1,118 @@ +"""Tests for the mypy parser.""" + +from __future__ import annotations + +import sys + +from pytest import skip + +from mypy import defaults +from mypy.config_parser import parse_mypy_comments +from mypy.errors import CompileError, Errors +from mypy.options import Options +from mypy.parse import parse +from mypy.test.data import DataDrivenTestCase, DataSuite +from mypy.test.helpers import assert_string_arrays_equal, find_test_files, parse_options +from mypy.util import get_mypy_comments + + +class ParserSuite(DataSuite): + required_out_section = True + base_path = "." + files = find_test_files(pattern="parse*.test", exclude=["parse-errors.test"]) + + if sys.version_info < (3, 10): + files.remove("parse-python310.test") + if sys.version_info < (3, 12): + files.remove("parse-python312.test") + if sys.version_info < (3, 13): + files.remove("parse-python313.test") + if sys.version_info < (3, 14): + files.remove("parse-python314.test") + + def run_case(self, testcase: DataDrivenTestCase) -> None: + test_parser(testcase) + + +def test_parser(testcase: DataDrivenTestCase) -> None: + """Perform a single parser test case. + + The argument contains the description of the test case. + """ + options = Options() + options.hide_error_codes = True + + if testcase.file.endswith("python310.test"): + options.python_version = (3, 10) + elif testcase.file.endswith("python312.test"): + options.python_version = (3, 12) + elif testcase.file.endswith("python313.test"): + options.python_version = (3, 13) + elif testcase.file.endswith("python314.test"): + options.python_version = (3, 14) + else: + options.python_version = defaults.PYTHON3_VERSION + + source = "\n".join(testcase.input) + + # Apply mypy: comments to options. + comments = get_mypy_comments(source) + changes, _ = parse_mypy_comments(comments, options) + options = options.apply_changes(changes) + + try: + n = parse( + bytes(source, "ascii"), + fnam="main", + module="__main__", + errors=Errors(options), + options=options, + raise_on_error=True, + ) + a = n.str_with_options(options).split("\n") + except CompileError as e: + a = e.messages + assert_string_arrays_equal( + testcase.output, a, f"Invalid parser output ({testcase.file}, line {testcase.line})" + ) + + +# The file name shown in test case output. This is displayed in error +# messages, and must match the file name in the test case descriptions. +INPUT_FILE_NAME = "file" + + +class ParseErrorSuite(DataSuite): + required_out_section = True + base_path = "." + files = ["parse-errors.test"] + + def run_case(self, testcase: DataDrivenTestCase) -> None: + test_parse_error(testcase) + + +def test_parse_error(testcase: DataDrivenTestCase) -> None: + try: + options = parse_options("\n".join(testcase.input), testcase, 0) + if options.python_version != sys.version_info[:2]: + skip() + # Compile temporary file. The test file contains non-ASCII characters. + parse( + bytes("\n".join(testcase.input), "utf-8"), + INPUT_FILE_NAME, + "__main__", + errors=Errors(options), + options=options, + raise_on_error=True, + ) + raise AssertionError("No errors reported") + except CompileError as e: + if e.module_with_blocker is not None: + assert e.module_with_blocker == "__main__" + # Verify that there was a compile error and that the error messages + # are equivalent. + assert_string_arrays_equal( + testcase.output, + e.messages, + f"Invalid compiler output ({testcase.file}, line {testcase.line})", + ) diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testpep561.py b/.venv/lib/python3.12/site-packages/mypy/test/testpep561.py new file mode 100644 index 0000000..0afb69b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testpep561.py @@ -0,0 +1,178 @@ +from __future__ import annotations + +import os +import re +import subprocess +import sys +import tempfile +from collections.abc import Iterator +from contextlib import contextmanager + +import filelock + +import mypy.api +from mypy.test.config import package_path, pip_lock, pip_timeout, test_temp_dir +from mypy.test.data import DataDrivenTestCase, DataSuite +from mypy.test.helpers import assert_string_arrays_equal, perform_file_operations + +# NOTE: options.use_builtins_fixtures should not be set in these +# tests, otherwise mypy will ignore installed third-party packages. + + +class PEP561Suite(DataSuite): + files = ["pep561.test"] + base_path = "." + + def run_case(self, testcase: DataDrivenTestCase) -> None: + test_pep561(testcase) + + +@contextmanager +def virtualenv(python_executable: str = sys.executable) -> Iterator[tuple[str, str]]: + """Context manager that creates a virtualenv in a temporary directory + + Returns the path to the created Python executable + """ + with tempfile.TemporaryDirectory() as venv_dir: + proc = subprocess.run( + [python_executable, "-m", "venv", venv_dir], cwd=os.getcwd(), capture_output=True + ) + if proc.returncode != 0: + err = proc.stdout.decode("utf-8") + proc.stderr.decode("utf-8") + raise Exception("Failed to create venv.\n" + err) + if sys.platform == "win32": + yield venv_dir, os.path.abspath(os.path.join(venv_dir, "Scripts", "python")) + else: + yield venv_dir, os.path.abspath(os.path.join(venv_dir, "bin", "python")) + + +def upgrade_pip(python_executable: str) -> None: + """Install pip>=21.3.1. Required for editable installs with PEP 660.""" + if ( + sys.version_info >= (3, 11) + or (3, 10, 3) <= sys.version_info < (3, 11) + or (3, 9, 11) <= sys.version_info < (3, 10) + ): + # Skip for more recent Python releases which come with pip>=21.3.1 + # out of the box - for performance reasons. + return + + install_cmd = [python_executable, "-m", "pip", "install", "pip>=21.3.1"] + try: + with filelock.FileLock(pip_lock, timeout=pip_timeout): + proc = subprocess.run(install_cmd, capture_output=True, env=os.environ) + except filelock.Timeout as err: + raise Exception(f"Failed to acquire {pip_lock}") from err + if proc.returncode != 0: + raise Exception(proc.stdout.decode("utf-8") + proc.stderr.decode("utf-8")) + + +def install_package( + pkg: str, python_executable: str = sys.executable, editable: bool = False +) -> None: + """Install a package from test-data/packages/pkg/""" + working_dir = os.path.join(package_path, pkg) + with tempfile.TemporaryDirectory() as dir: + install_cmd = [python_executable, "-m", "pip", "install"] + if editable: + install_cmd.append("-e") + install_cmd.append(".") + + # Note that newer versions of pip (21.3+) don't + # follow this env variable, but this is for compatibility + env = {"PIP_BUILD": dir} + # Inherit environment for Windows + env.update(os.environ) + try: + with filelock.FileLock(pip_lock, timeout=pip_timeout): + proc = subprocess.run(install_cmd, cwd=working_dir, capture_output=True, env=env) + except filelock.Timeout as err: + raise Exception(f"Failed to acquire {pip_lock}") from err + if proc.returncode != 0: + raise Exception(proc.stdout.decode("utf-8") + proc.stderr.decode("utf-8")) + + +def test_pep561(testcase: DataDrivenTestCase) -> None: + """Test running mypy on files that depend on PEP 561 packages.""" + assert testcase.old_cwd is not None, "test was not properly set up" + python = sys.executable + + assert python is not None, "Should be impossible" + pkgs, pip_args = parse_pkgs(testcase.input[0]) + mypy_args = parse_mypy_args(testcase.input[1]) + editable = False + for arg in pip_args: + if arg == "editable": + editable = True + else: + raise ValueError(f"Unknown pip argument: {arg}") + assert pkgs, "No packages to install for PEP 561 test?" + with virtualenv(python) as venv: + venv_dir, python_executable = venv + if editable: + # Editable installs with PEP 660 require pip>=21.3 + upgrade_pip(python_executable) + for pkg in pkgs: + install_package(pkg, python_executable, editable) + + cmd_line = list(mypy_args) + has_program = not ("-p" in cmd_line or "--package" in cmd_line) + if has_program: + program = testcase.name + ".py" + with open(program, "w", encoding="utf-8") as f: + for s in testcase.input: + f.write(f"{s}\n") + cmd_line.append(program) + + cmd_line.extend(["--no-error-summary", "--hide-error-codes"]) + if python_executable != sys.executable: + cmd_line.append(f"--python-executable={python_executable}") + + steps = testcase.find_steps() + if steps != [[]]: + steps = [[]] + steps + + for i, operations in enumerate(steps): + perform_file_operations(operations) + + output = [] + # Type check the module + out, err, returncode = mypy.api.run(cmd_line) + + # split lines, remove newlines, and remove directory of test case + for line in (out + err).splitlines(): + if line.startswith(test_temp_dir + os.sep): + output.append(line[len(test_temp_dir + os.sep) :].rstrip("\r\n")) + else: + # Normalize paths so that the output is the same on Windows and Linux/macOS. + # Yes, this is naive: replace all slashes preceding first colon, if any. + path, *rest = line.split(":", maxsplit=1) + if rest: + path = path.replace(os.sep, "/") + output.append(":".join([path, *rest]).rstrip("\r\n")) + iter_count = "" if i == 0 else f" on iteration {i + 1}" + expected = testcase.output if i == 0 else testcase.output2.get(i + 1, []) + + assert_string_arrays_equal( + expected, + output, + f"Invalid output ({testcase.file}, line {testcase.line}){iter_count}", + ) + + if has_program: + os.remove(program) + + +def parse_pkgs(comment: str) -> tuple[list[str], list[str]]: + if not comment.startswith("# pkgs:"): + return ([], []) + else: + pkgs_str, *args = comment[7:].split(";") + return ([pkg.strip() for pkg in pkgs_str.split(",")], [arg.strip() for arg in args]) + + +def parse_mypy_args(line: str) -> list[str]: + m = re.match("# flags: (.*)$", line) + if not m: + return [] # No args; mypy will spit out an error. + return m.group(1).split() diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testpythoneval.py b/.venv/lib/python3.12/site-packages/mypy/test/testpythoneval.py new file mode 100644 index 0000000..6d22aca --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testpythoneval.py @@ -0,0 +1,116 @@ +"""Test cases for running mypy programs using a Python interpreter. + +Each test case type checks a program then runs it using Python. The +output (stdout) of the program is compared to expected output. Type checking +uses full builtins and other stubs. + +Note: Currently Python interpreter paths are hard coded. + +Note: These test cases are *not* included in the main test suite, as including + this suite would slow down the main suite too much. +""" + +from __future__ import annotations + +import os +import os.path +import re +import subprocess +import sys +from tempfile import TemporaryDirectory + +from mypy import api +from mypy.defaults import PYTHON3_VERSION +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase, DataSuite +from mypy.test.helpers import assert_string_arrays_equal, split_lines + +# Path to Python 3 interpreter +python3_path = sys.executable +program_re = re.compile(r"\b_program.py\b") + + +class PythonEvaluationSuite(DataSuite): + files = ["pythoneval.test", "pythoneval-asyncio.test"] + cache_dir = TemporaryDirectory() + + def run_case(self, testcase: DataDrivenTestCase) -> None: + test_python_evaluation(testcase, os.path.join(self.cache_dir.name, ".mypy_cache")) + + +def test_python_evaluation(testcase: DataDrivenTestCase, cache_dir: str) -> None: + """Runs Mypy in a subprocess. + + If this passes without errors, executes the script again with a given Python + version. + """ + assert testcase.old_cwd is not None, "test was not properly set up" + # We must enable site packages to get access to installed stubs. + mypy_cmdline = [ + "--show-traceback", + "--no-silence-site-packages", + "--no-error-summary", + "--hide-error-codes", + "--allow-empty-bodies", + "--test-env", # Speeds up some checks + ] + interpreter = python3_path + mypy_cmdline.append(f"--python-version={'.'.join(map(str, PYTHON3_VERSION))}") + + m = re.search("# flags: (.*)$", "\n".join(testcase.input), re.MULTILINE) + if m: + additional_flags = m.group(1).split() + for flag in additional_flags: + if flag.startswith("--python-version="): + targeted_python_version = flag.split("=")[1] + targeted_major, targeted_minor = targeted_python_version.split(".") + if (int(targeted_major), int(targeted_minor)) > ( + sys.version_info.major, + sys.version_info.minor, + ): + return + mypy_cmdline.extend(additional_flags) + + # Write the program to a file. + program = "_" + testcase.name + ".py" + program_path = os.path.join(test_temp_dir, program) + mypy_cmdline.append(program_path) + with open(program_path, "w", encoding="utf8") as file: + for s in testcase.input: + file.write(f"{s}\n") + mypy_cmdline.append(f"--cache-dir={cache_dir}") + output = [] + # Type check the program. + out, err, returncode = api.run(mypy_cmdline) + # split lines, remove newlines, and remove directory of test case + for line in (out + err).splitlines(): + if line.startswith(test_temp_dir + os.sep): + output.append(line[len(test_temp_dir + os.sep) :].rstrip("\r\n")) + else: + # Normalize paths so that the output is the same on Windows and Linux/macOS. + line = line.replace(test_temp_dir + os.sep, test_temp_dir + "/") + output.append(line.rstrip("\r\n")) + if returncode > 1 and not testcase.output: + # Either api.run() doesn't work well in case of a crash, or pytest interferes with it. + # Tweak output to prevent tests with empty expected output to pass in case of a crash. + output.append("!!! Mypy crashed !!!") + if returncode == 0 and not output: + # Execute the program. + proc = subprocess.run( + [interpreter, "-Wignore", program], cwd=test_temp_dir, capture_output=True + ) + output.extend(split_lines(proc.stdout, proc.stderr)) + # Remove temp file. + os.remove(program_path) + for i, line in enumerate(output): + if os.path.sep + "typeshed" + os.path.sep in line: + output[i] = line.split(os.path.sep)[-1] + assert_string_arrays_equal( + adapt_output(testcase), output, f"Invalid output ({testcase.file}, line {testcase.line})" + ) + + +def adapt_output(testcase: DataDrivenTestCase) -> list[str]: + """Translates the generic _program.py into the actual filename.""" + program = "_" + testcase.name + ".py" + return [program_re.sub(program, line) for line in testcase.output] diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testreports.py b/.venv/lib/python3.12/site-packages/mypy/test/testreports.py new file mode 100644 index 0000000..f638756 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testreports.py @@ -0,0 +1,55 @@ +"""Test cases for reports generated by mypy.""" + +from __future__ import annotations + +import textwrap + +from mypy.report import CoberturaPackage, get_line_rate +from mypy.test.helpers import Suite, assert_equal + +try: + import lxml # type: ignore[import-untyped] +except ImportError: + lxml = None + +import pytest + + +class CoberturaReportSuite(Suite): + @pytest.mark.skipif(lxml is None, reason="Cannot import lxml. Is it installed?") + def test_get_line_rate(self) -> None: + assert_equal("1.0", get_line_rate(0, 0)) + assert_equal("0.3333", get_line_rate(1, 3)) + + @pytest.mark.skipif(lxml is None, reason="Cannot import lxml. Is it installed?") + def test_as_xml(self) -> None: + import lxml.etree as etree # type: ignore[import-untyped] + + cobertura_package = CoberturaPackage("foobar") + cobertura_package.covered_lines = 21 + cobertura_package.total_lines = 42 + + child_package = CoberturaPackage("raz") + child_package.covered_lines = 10 + child_package.total_lines = 10 + child_package.classes["class"] = etree.Element("class") + + cobertura_package.packages["raz"] = child_package + + expected_output = textwrap.dedent( + """\ + + + + + + + + + + + """ + ).encode("ascii") + assert_equal( + expected_output, etree.tostring(cobertura_package.as_xml(), pretty_print=True) + ) diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testsemanal.py b/.venv/lib/python3.12/site-packages/mypy/test/testsemanal.py new file mode 100644 index 0000000..741c03f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testsemanal.py @@ -0,0 +1,207 @@ +"""Semantic analyzer test cases""" + +from __future__ import annotations + +import sys + +from mypy import build +from mypy.defaults import PYTHON3_VERSION +from mypy.errors import CompileError +from mypy.modulefinder import BuildSource +from mypy.nodes import TypeInfo +from mypy.options import Options +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase, DataSuite +from mypy.test.helpers import ( + assert_string_arrays_equal, + find_test_files, + normalize_error_messages, + parse_options, + testfile_pyversion, +) + +# Semantic analyzer test cases: dump parse tree + +# Semantic analysis test case description files. +semanal_files = find_test_files( + pattern="semanal-*.test", + exclude=[ + "semanal-errors-python310.test", + "semanal-errors.test", + "semanal-typeinfo.test", + "semanal-symtable.test", + ], +) + + +if sys.version_info < (3, 10): + semanal_files.remove("semanal-python310.test") + + +def get_semanal_options(program_text: str, testcase: DataDrivenTestCase) -> Options: + options = parse_options(program_text, testcase, 1) + options.use_builtins_fixtures = True + options.semantic_analysis_only = True + options.show_traceback = True + options.python_version = PYTHON3_VERSION + return options + + +class SemAnalSuite(DataSuite): + files = semanal_files + native_sep = True + + def run_case(self, testcase: DataDrivenTestCase) -> None: + test_semanal(testcase) + + +def test_semanal(testcase: DataDrivenTestCase) -> None: + """Perform a semantic analysis test case. + + The testcase argument contains a description of the test case + (inputs and output). + """ + + try: + src = "\n".join(testcase.input) + options = get_semanal_options(src, testcase) + options.python_version = testfile_pyversion(testcase.file) + result = build.build( + sources=[BuildSource("main", None, src)], options=options, alt_lib_path=test_temp_dir + ) + a = result.errors + if a: + raise CompileError(a) + # Include string representations of the source files in the actual + # output. + for module in sorted(result.files.keys()): + if module in testcase.test_modules: + a += result.files[module].str_with_options(options).split("\n") + except CompileError as e: + a = e.messages + if testcase.normalize_output: + a = normalize_error_messages(a) + assert_string_arrays_equal( + testcase.output, + a, + f"Invalid semantic analyzer output ({testcase.file}, line {testcase.line})", + ) + + +# Semantic analyzer error test cases + + +class SemAnalErrorSuite(DataSuite): + files = ["semanal-errors.test"] + if sys.version_info >= (3, 10): + semanal_files.append("semanal-errors-python310.test") + + def run_case(self, testcase: DataDrivenTestCase) -> None: + test_semanal_error(testcase) + + +def test_semanal_error(testcase: DataDrivenTestCase) -> None: + """Perform a test case.""" + + try: + src = "\n".join(testcase.input) + res = build.build( + sources=[BuildSource("main", None, src)], + options=get_semanal_options(src, testcase), + alt_lib_path=test_temp_dir, + ) + a = res.errors + except CompileError as e: + # Verify that there was a compile error and that the error messages + # are equivalent. + a = e.messages + if testcase.normalize_output: + a = normalize_error_messages(a) + assert_string_arrays_equal( + testcase.output, a, f"Invalid compiler output ({testcase.file}, line {testcase.line})" + ) + + +# SymbolNode table export test cases + + +class SemAnalSymtableSuite(DataSuite): + required_out_section = True + files = ["semanal-symtable.test"] + + def run_case(self, testcase: DataDrivenTestCase) -> None: + """Perform a test case.""" + try: + # Build test case input. + src = "\n".join(testcase.input) + result = build.build( + sources=[BuildSource("main", None, src)], + options=get_semanal_options(src, testcase), + alt_lib_path=test_temp_dir, + ) + # The output is the symbol table converted into a string. + a = result.errors + if a: + raise CompileError(a) + for module in sorted(result.files.keys()): + if module in testcase.test_modules: + a.append(f"{module}:") + for s in str(result.files[module].names).split("\n"): + a.append(" " + s) + except CompileError as e: + a = e.messages + assert_string_arrays_equal( + testcase.output, + a, + f"Invalid semantic analyzer output ({testcase.file}, line {testcase.line})", + ) + + +# Type info export test cases +class SemAnalTypeInfoSuite(DataSuite): + required_out_section = True + files = ["semanal-typeinfo.test"] + + def run_case(self, testcase: DataDrivenTestCase) -> None: + """Perform a test case.""" + try: + # Build test case input. + src = "\n".join(testcase.input) + result = build.build( + sources=[BuildSource("main", None, src)], + options=get_semanal_options(src, testcase), + alt_lib_path=test_temp_dir, + ) + a = result.errors + if a: + raise CompileError(a) + + # Collect all TypeInfos in top-level modules. + typeinfos = TypeInfoMap() + for module, file in result.files.items(): + if module in testcase.test_modules: + for n in file.names.values(): + if isinstance(n.node, TypeInfo): + assert n.fullname + if any(n.fullname.startswith(m + ".") for m in testcase.test_modules): + typeinfos[n.fullname] = n.node + + # The output is the symbol table converted into a string. + a = str(typeinfos).split("\n") + except CompileError as e: + a = e.messages + assert_string_arrays_equal( + testcase.output, + a, + f"Invalid semantic analyzer output ({testcase.file}, line {testcase.line})", + ) + + +class TypeInfoMap(dict[str, TypeInfo]): + def __str__(self) -> str: + a: list[str] = ["TypeInfoMap("] + for x, y in sorted(self.items()): + ti = ("\n" + " ").join(str(y).split("\n")) + a.append(f" {x} : {ti}") + a[-1] += ")" + return "\n".join(a) diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testsolve.py b/.venv/lib/python3.12/site-packages/mypy/test/testsolve.py new file mode 100644 index 0000000..d60b2cb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testsolve.py @@ -0,0 +1,287 @@ +"""Test cases for the constraint solver used in type inference.""" + +from __future__ import annotations + +from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint +from mypy.solve import Bounds, Graph, solve_constraints, transitive_closure +from mypy.test.helpers import Suite, assert_equal +from mypy.test.typefixture import TypeFixture +from mypy.types import Type, TypeVarId, TypeVarLikeType, TypeVarType + + +class SolveSuite(Suite): + def setUp(self) -> None: + self.fx = TypeFixture() + + def test_empty_input(self) -> None: + self.assert_solve([], [], []) + + def test_simple_supertype_constraints(self) -> None: + self.assert_solve([self.fx.t], [self.supc(self.fx.t, self.fx.a)], [self.fx.a]) + self.assert_solve( + [self.fx.t], + [self.supc(self.fx.t, self.fx.a), self.supc(self.fx.t, self.fx.b)], + [self.fx.a], + ) + + def test_simple_subtype_constraints(self) -> None: + self.assert_solve([self.fx.t], [self.subc(self.fx.t, self.fx.a)], [self.fx.a]) + self.assert_solve( + [self.fx.t], + [self.subc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.b)], + [self.fx.b], + ) + + def test_both_kinds_of_constraints(self) -> None: + self.assert_solve( + [self.fx.t], + [self.supc(self.fx.t, self.fx.b), self.subc(self.fx.t, self.fx.a)], + [self.fx.b], + ) + + def test_unsatisfiable_constraints(self) -> None: + # The constraints are impossible to satisfy. + self.assert_solve( + [self.fx.t], [self.supc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.b)], [None] + ) + + def test_exactly_specified_result(self) -> None: + self.assert_solve( + [self.fx.t], + [self.supc(self.fx.t, self.fx.b), self.subc(self.fx.t, self.fx.b)], + [self.fx.b], + ) + + def test_multiple_variables(self) -> None: + self.assert_solve( + [self.fx.t, self.fx.s], + [ + self.supc(self.fx.t, self.fx.b), + self.supc(self.fx.s, self.fx.c), + self.subc(self.fx.t, self.fx.a), + ], + [self.fx.b, self.fx.c], + ) + + def test_no_constraints_for_var(self) -> None: + self.assert_solve([self.fx.t], [], [self.fx.a_uninhabited]) + self.assert_solve( + [self.fx.t, self.fx.s], [], [self.fx.a_uninhabited, self.fx.a_uninhabited] + ) + self.assert_solve( + [self.fx.t, self.fx.s], + [self.supc(self.fx.s, self.fx.a)], + [self.fx.a_uninhabited, self.fx.a], + ) + + def test_simple_constraints_with_dynamic_type(self) -> None: + self.assert_solve([self.fx.t], [self.supc(self.fx.t, self.fx.anyt)], [self.fx.anyt]) + self.assert_solve( + [self.fx.t], + [self.supc(self.fx.t, self.fx.anyt), self.supc(self.fx.t, self.fx.anyt)], + [self.fx.anyt], + ) + self.assert_solve( + [self.fx.t], + [self.supc(self.fx.t, self.fx.anyt), self.supc(self.fx.t, self.fx.a)], + [self.fx.anyt], + ) + + self.assert_solve([self.fx.t], [self.subc(self.fx.t, self.fx.anyt)], [self.fx.anyt]) + self.assert_solve( + [self.fx.t], + [self.subc(self.fx.t, self.fx.anyt), self.subc(self.fx.t, self.fx.anyt)], + [self.fx.anyt], + ) + # self.assert_solve([self.fx.t], + # [self.subc(self.fx.t, self.fx.anyt), + # self.subc(self.fx.t, self.fx.a)], + # [self.fx.anyt]) + # TODO: figure out what this should be after changes to meet(any, X) + + def test_both_normal_and_any_types_in_results(self) -> None: + # If one of the bounds is any, we promote the other bound to + # any as well, since otherwise the type range does not make sense. + self.assert_solve( + [self.fx.t], + [self.supc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.anyt)], + [self.fx.anyt], + ) + + self.assert_solve( + [self.fx.t], + [self.supc(self.fx.t, self.fx.anyt), self.subc(self.fx.t, self.fx.a)], + [self.fx.anyt], + ) + + def test_poly_no_constraints(self) -> None: + self.assert_solve( + [self.fx.t, self.fx.u], + [], + [self.fx.a_uninhabited, self.fx.a_uninhabited], + allow_polymorphic=True, + ) + + def test_poly_trivial_free(self) -> None: + self.assert_solve( + [self.fx.t, self.fx.u], + [self.subc(self.fx.t, self.fx.a)], + [self.fx.a, self.fx.u], + [self.fx.u], + allow_polymorphic=True, + ) + + def test_poly_free_pair(self) -> None: + self.assert_solve( + [self.fx.t, self.fx.u], + [self.subc(self.fx.t, self.fx.u)], + [self.fx.t, self.fx.t], + [self.fx.t], + allow_polymorphic=True, + ) + + def test_poly_free_pair_with_bounds(self) -> None: + t_prime = self.fx.t.copy_modified(upper_bound=self.fx.b) + self.assert_solve( + [self.fx.t, self.fx.ub], + [self.subc(self.fx.t, self.fx.ub)], + [t_prime, t_prime], + [t_prime], + allow_polymorphic=True, + ) + + def test_poly_free_pair_with_bounds_uninhabited(self) -> None: + self.assert_solve( + [self.fx.ub, self.fx.uc], + [self.subc(self.fx.ub, self.fx.uc)], + [self.fx.a_uninhabited, self.fx.a_uninhabited], + [], + allow_polymorphic=True, + ) + + def test_poly_bounded_chain(self) -> None: + # B <: T <: U <: S <: A + self.assert_solve( + [self.fx.t, self.fx.u, self.fx.s], + [ + self.supc(self.fx.t, self.fx.b), + self.subc(self.fx.t, self.fx.u), + self.subc(self.fx.u, self.fx.s), + self.subc(self.fx.s, self.fx.a), + ], + [self.fx.b, self.fx.b, self.fx.b], + allow_polymorphic=True, + ) + + def test_poly_reverse_overlapping_chain(self) -> None: + # A :> T <: S :> B + self.assert_solve( + [self.fx.t, self.fx.s], + [ + self.subc(self.fx.t, self.fx.s), + self.subc(self.fx.t, self.fx.a), + self.supc(self.fx.s, self.fx.b), + ], + [self.fx.a, self.fx.a], + allow_polymorphic=True, + ) + + def test_poly_reverse_split_chain(self) -> None: + # B :> T <: S :> A + self.assert_solve( + [self.fx.t, self.fx.s], + [ + self.subc(self.fx.t, self.fx.s), + self.subc(self.fx.t, self.fx.b), + self.supc(self.fx.s, self.fx.a), + ], + [self.fx.b, self.fx.a], + allow_polymorphic=True, + ) + + def test_poly_unsolvable_chain(self) -> None: + # A <: T <: U <: S <: B + self.assert_solve( + [self.fx.t, self.fx.u, self.fx.s], + [ + self.supc(self.fx.t, self.fx.a), + self.subc(self.fx.t, self.fx.u), + self.subc(self.fx.u, self.fx.s), + self.subc(self.fx.s, self.fx.b), + ], + [None, None, None], + allow_polymorphic=True, + ) + + def test_simple_chain_closure(self) -> None: + self.assert_transitive_closure( + [self.fx.t.id, self.fx.s.id], + [ + self.supc(self.fx.t, self.fx.b), + self.subc(self.fx.t, self.fx.s), + self.subc(self.fx.s, self.fx.a), + ], + {(self.fx.t.id, self.fx.s.id)}, + {self.fx.t.id: {self.fx.b}, self.fx.s.id: {self.fx.b}}, + {self.fx.t.id: {self.fx.a}, self.fx.s.id: {self.fx.a}}, + ) + + def test_reverse_chain_closure(self) -> None: + self.assert_transitive_closure( + [self.fx.t.id, self.fx.s.id], + [ + self.subc(self.fx.t, self.fx.s), + self.subc(self.fx.t, self.fx.a), + self.supc(self.fx.s, self.fx.b), + ], + {(self.fx.t.id, self.fx.s.id)}, + {self.fx.t.id: set(), self.fx.s.id: {self.fx.b}}, + {self.fx.t.id: {self.fx.a}, self.fx.s.id: set()}, + ) + + def test_secondary_constraint_closure(self) -> None: + self.assert_transitive_closure( + [self.fx.t.id, self.fx.s.id], + [self.supc(self.fx.s, self.fx.gt), self.subc(self.fx.s, self.fx.ga)], + set(), + {self.fx.t.id: set(), self.fx.s.id: {self.fx.gt}}, + {self.fx.t.id: {self.fx.a}, self.fx.s.id: {self.fx.ga}}, + ) + + def assert_solve( + self, + vars: list[TypeVarLikeType], + constraints: list[Constraint], + results: list[None | Type], + free_vars: list[TypeVarLikeType] | None = None, + allow_polymorphic: bool = False, + ) -> None: + if free_vars is None: + free_vars = [] + actual, actual_free = solve_constraints( + vars, constraints, allow_polymorphic=allow_polymorphic + ) + assert_equal(actual, results) + assert_equal(actual_free, free_vars) + + def assert_transitive_closure( + self, + vars: list[TypeVarId], + constraints: list[Constraint], + graph: Graph, + lowers: Bounds, + uppers: Bounds, + ) -> None: + actual_graph, actual_lowers, actual_uppers = transitive_closure(vars, constraints) + # Add trivial elements. + for v in vars: + graph.add((v, v)) + assert_equal(actual_graph, graph) + assert_equal(dict(actual_lowers), lowers) + assert_equal(dict(actual_uppers), uppers) + + def supc(self, type_var: TypeVarType, bound: Type) -> Constraint: + return Constraint(type_var, SUPERTYPE_OF, bound) + + def subc(self, type_var: TypeVarType, bound: Type) -> Constraint: + return Constraint(type_var, SUBTYPE_OF, bound) diff --git a/.venv/lib/python3.12/site-packages/mypy/test/teststubgen.py b/.venv/lib/python3.12/site-packages/mypy/test/teststubgen.py new file mode 100644 index 0000000..43974cf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/teststubgen.py @@ -0,0 +1,1648 @@ +from __future__ import annotations + +import io +import os.path +import re +import shutil +import sys +import tempfile +import unittest +from types import ModuleType +from typing import Any + +import pytest + +from mypy.errors import CompileError +from mypy.moduleinspect import InspectError, ModuleInspect +from mypy.stubdoc import ( + ArgSig, + FunctionSig, + build_signature, + find_unique_signatures, + infer_arg_sig_from_anon_docstring, + infer_prop_type_from_docstring, + infer_sig_from_docstring, + is_valid_type, + parse_all_signatures, + parse_signature, +) +from mypy.stubgen import ( + Options, + collect_build_targets, + generate_stubs, + is_blacklisted_path, + is_non_library_module, + mypy_options, + parse_options, +) +from mypy.stubgenc import InspectionStubGenerator, infer_c_method_args +from mypy.stubutil import ( + ClassInfo, + FunctionContext, + common_dir_prefix, + infer_method_ret_type, + remove_misplaced_type_comments, + walk_packages, +) +from mypy.test.data import DataDrivenTestCase, DataSuite +from mypy.test.helpers import assert_equal, assert_string_arrays_equal, local_sys_path_set + + +class StubgenCmdLineSuite(unittest.TestCase): + """Test cases for processing command-line options and finding files.""" + + @unittest.skipIf(sys.platform == "win32", "clean up fails on Windows") + def test_files_found(self) -> None: + current = os.getcwd() + with tempfile.TemporaryDirectory() as tmp: + try: + os.chdir(tmp) + os.mkdir("subdir") + self.make_file("subdir", "a.py") + self.make_file("subdir", "b.py") + os.mkdir(os.path.join("subdir", "pack")) + self.make_file("subdir", "pack", "__init__.py") + opts = parse_options(["subdir"]) + py_mods, pyi_mods, c_mods = collect_build_targets(opts, mypy_options(opts)) + assert_equal(pyi_mods, []) + assert_equal(c_mods, []) + files = {mod.path for mod in py_mods} + assert_equal( + files, + { + os.path.join("subdir", "pack", "__init__.py"), + os.path.join("subdir", "a.py"), + os.path.join("subdir", "b.py"), + }, + ) + finally: + os.chdir(current) + + @unittest.skipIf(sys.platform == "win32", "clean up fails on Windows") + def test_packages_found(self) -> None: + current = os.getcwd() + with tempfile.TemporaryDirectory() as tmp: + try: + os.chdir(tmp) + os.mkdir("pack") + self.make_file("pack", "__init__.py", content="from . import a, b") + self.make_file("pack", "a.py") + self.make_file("pack", "b.py") + opts = parse_options(["-p", "pack"]) + py_mods, pyi_mods, c_mods = collect_build_targets(opts, mypy_options(opts)) + assert_equal(pyi_mods, []) + assert_equal(c_mods, []) + files = {os.path.relpath(mod.path or "FAIL") for mod in py_mods} + assert_equal( + files, + { + os.path.join("pack", "__init__.py"), + os.path.join("pack", "a.py"), + os.path.join("pack", "b.py"), + }, + ) + finally: + os.chdir(current) + + @unittest.skipIf(sys.platform == "win32", "clean up fails on Windows") + def test_module_not_found(self) -> None: + current = os.getcwd() + captured_output = io.StringIO() + sys.stdout = captured_output + with tempfile.TemporaryDirectory() as tmp: + try: + os.chdir(tmp) + self.make_file(tmp, "mymodule.py", content="import a") + opts = parse_options(["-m", "mymodule"]) + collect_build_targets(opts, mypy_options(opts)) + assert captured_output.getvalue() == "" + finally: + sys.stdout = sys.__stdout__ + os.chdir(current) + + def make_file(self, *path: str, content: str = "") -> None: + file = os.path.join(*path) + with open(file, "w") as f: + f.write(content) + + def run(self, result: Any | None = None) -> Any | None: + with local_sys_path_set(): + return super().run(result) + + +class StubgenCliParseSuite(unittest.TestCase): + def test_walk_packages(self) -> None: + with ModuleInspect() as m: + assert_equal(set(walk_packages(m, ["mypy.errors"])), {"mypy.errors"}) + + assert_equal( + set(walk_packages(m, ["mypy.errors", "mypy.stubgen"])), + {"mypy.errors", "mypy.stubgen"}, + ) + + all_mypy_packages = set(walk_packages(m, ["mypy"])) + self.assertTrue( + all_mypy_packages.issuperset( + {"mypy", "mypy.errors", "mypy.stubgen", "mypy.test", "mypy.test.helpers"} + ) + ) + + +class StubgenUtilSuite(unittest.TestCase): + """Unit tests for stubgen utility functions.""" + + def test_parse_signature(self) -> None: + self.assert_parse_signature("func()", ("func", [], [])) + + def test_parse_signature_with_args(self) -> None: + self.assert_parse_signature("func(arg)", ("func", ["arg"], [])) + self.assert_parse_signature("do(arg, arg2)", ("do", ["arg", "arg2"], [])) + + def test_parse_signature_with_optional_args(self) -> None: + self.assert_parse_signature("func([arg])", ("func", [], ["arg"])) + self.assert_parse_signature("func(arg[, arg2])", ("func", ["arg"], ["arg2"])) + self.assert_parse_signature("func([arg[, arg2]])", ("func", [], ["arg", "arg2"])) + + def test_parse_signature_with_default_arg(self) -> None: + self.assert_parse_signature("func(arg=None)", ("func", [], ["arg"])) + self.assert_parse_signature("func(arg, arg2=None)", ("func", ["arg"], ["arg2"])) + self.assert_parse_signature('func(arg=1, arg2="")', ("func", [], ["arg", "arg2"])) + + def test_parse_signature_with_qualified_function(self) -> None: + self.assert_parse_signature("ClassName.func(arg)", ("func", ["arg"], [])) + + def test_parse_signature_with_kw_only_arg(self) -> None: + self.assert_parse_signature( + "ClassName.func(arg, *, arg2=1)", ("func", ["arg", "*"], ["arg2"]) + ) + + def test_parse_signature_with_star_arg(self) -> None: + self.assert_parse_signature("ClassName.func(arg, *args)", ("func", ["arg", "*args"], [])) + + def test_parse_signature_with_star_star_arg(self) -> None: + self.assert_parse_signature("ClassName.func(arg, **args)", ("func", ["arg", "**args"], [])) + + def assert_parse_signature(self, sig: str, result: tuple[str, list[str], list[str]]) -> None: + assert_equal(parse_signature(sig), result) + + def test_build_signature(self) -> None: + assert_equal(build_signature([], []), "()") + assert_equal(build_signature(["arg"], []), "(arg)") + assert_equal(build_signature(["arg", "arg2"], []), "(arg, arg2)") + assert_equal(build_signature(["arg"], ["arg2"]), "(arg, arg2=...)") + assert_equal(build_signature(["arg"], ["arg2", "**x"]), "(arg, arg2=..., **x)") + + def test_parse_all_signatures(self) -> None: + assert_equal( + parse_all_signatures( + [ + "random text", + ".. function:: fn(arg", + ".. function:: fn()", + " .. method:: fn2(arg)", + ] + ), + ([("fn", "()"), ("fn2", "(arg)")], []), + ) + + def test_find_unique_signatures(self) -> None: + assert_equal( + find_unique_signatures( + [ + ("func", "()"), + ("func", "()"), + ("func2", "()"), + ("func2", "(arg)"), + ("func3", "(arg, arg2)"), + ] + ), + [("func", "()"), ("func3", "(arg, arg2)")], + ) + + def test_infer_sig_from_docstring(self) -> None: + assert_equal( + infer_sig_from_docstring("\nfunc(x) - y", "func"), + [FunctionSig(name="func", args=[ArgSig(name="x")], ret_type="Any")], + ) + assert_equal( + infer_sig_from_docstring("\nfunc(x)", "func"), + [FunctionSig(name="func", args=[ArgSig(name="x")], ret_type="Any")], + ) + + assert_equal( + infer_sig_from_docstring("\nfunc(x, Y_a=None)", "func"), + [ + FunctionSig( + name="func", + args=[ArgSig(name="x"), ArgSig(name="Y_a", default=True)], + ret_type="Any", + ) + ], + ) + + assert_equal( + infer_sig_from_docstring("\nfunc(x, Y_a=3)", "func"), + [ + FunctionSig( + name="func", + args=[ArgSig(name="x"), ArgSig(name="Y_a", default=True)], + ret_type="Any", + ) + ], + ) + + assert_equal( + infer_sig_from_docstring("\nfunc(x, Y_a=[1, 2, 3])", "func"), + [ + FunctionSig( + name="func", + args=[ArgSig(name="x"), ArgSig(name="Y_a", default=True)], + ret_type="Any", + ) + ], + ) + + assert_equal(infer_sig_from_docstring("\nafunc(x) - y", "func"), []) + assert_equal(infer_sig_from_docstring("\nfunc(x, y", "func"), []) + assert_equal( + infer_sig_from_docstring("\nfunc(x=z(y))", "func"), + [FunctionSig(name="func", args=[ArgSig(name="x", default=True)], ret_type="Any")], + ) + + assert_equal(infer_sig_from_docstring("\nfunc x", "func"), []) + # Try to infer signature from type annotation. + assert_equal( + infer_sig_from_docstring("\nfunc(x: int)", "func"), + [FunctionSig(name="func", args=[ArgSig(name="x", type="int")], ret_type="Any")], + ) + assert_equal( + infer_sig_from_docstring("\nfunc(x: int=3)", "func"), + [ + FunctionSig( + name="func", args=[ArgSig(name="x", type="int", default=True)], ret_type="Any" + ) + ], + ) + + assert_equal( + infer_sig_from_docstring("\nfunc(x=3)", "func"), + [ + FunctionSig( + name="func", args=[ArgSig(name="x", type=None, default=True)], ret_type="Any" + ) + ], + ) + + assert_equal( + infer_sig_from_docstring("\nfunc() -> int", "func"), + [FunctionSig(name="func", args=[], ret_type="int")], + ) + + assert_equal( + infer_sig_from_docstring("\nfunc(x: int=3) -> int", "func"), + [ + FunctionSig( + name="func", args=[ArgSig(name="x", type="int", default=True)], ret_type="int" + ) + ], + ) + + assert_equal( + infer_sig_from_docstring("\nfunc(x: int=3) -> int \n", "func"), + [ + FunctionSig( + name="func", args=[ArgSig(name="x", type="int", default=True)], ret_type="int" + ) + ], + ) + + assert_equal( + infer_sig_from_docstring("\nfunc(x: Tuple[int, str]) -> str", "func"), + [ + FunctionSig( + name="func", args=[ArgSig(name="x", type="Tuple[int,str]")], ret_type="str" + ) + ], + ) + + assert_equal( + infer_sig_from_docstring( + "\nfunc(x: Tuple[int, Tuple[str, int], str], y: int) -> str", "func" + ), + [ + FunctionSig( + name="func", + args=[ + ArgSig(name="x", type="Tuple[int,Tuple[str,int],str]"), + ArgSig(name="y", type="int"), + ], + ret_type="str", + ) + ], + ) + + assert_equal( + infer_sig_from_docstring("\nfunc(x: foo.bar)", "func"), + [FunctionSig(name="func", args=[ArgSig(name="x", type="foo.bar")], ret_type="Any")], + ) + + assert_equal( + infer_sig_from_docstring("\nfunc(x: list=[1,2,[3,4]])", "func"), + [ + FunctionSig( + name="func", args=[ArgSig(name="x", type="list", default=True)], ret_type="Any" + ) + ], + ) + + assert_equal( + infer_sig_from_docstring('\nfunc(x: str="nasty[")', "func"), + [ + FunctionSig( + name="func", args=[ArgSig(name="x", type="str", default=True)], ret_type="Any" + ) + ], + ) + + assert_equal(infer_sig_from_docstring("\nfunc[(x: foo.bar, invalid]", "func"), []) + + assert_equal( + infer_sig_from_docstring("\nfunc(x: invalid::type)", "func"), + [FunctionSig(name="func", args=[ArgSig(name="x", type=None)], ret_type="Any")], + ) + + assert_equal( + infer_sig_from_docstring('\nfunc(x: str="")', "func"), + [ + FunctionSig( + name="func", args=[ArgSig(name="x", type="str", default=True)], ret_type="Any" + ) + ], + ) + + def test_infer_sig_from_docstring_duplicate_args(self) -> None: + assert_equal( + infer_sig_from_docstring("\nfunc(x, x) -> str\nfunc(x, y) -> int", "func"), + [FunctionSig(name="func", args=[ArgSig(name="x"), ArgSig(name="y")], ret_type="int")], + ) + + def test_infer_sig_from_docstring_bad_indentation(self) -> None: + assert_equal( + infer_sig_from_docstring( + """ + x + x + x + """, + "func", + ), + None, + ) + + def test_infer_sig_from_docstring_args_kwargs(self) -> None: + assert_equal( + infer_sig_from_docstring("func(*args, **kwargs) -> int", "func"), + [ + FunctionSig( + name="func", + args=[ArgSig(name="*args"), ArgSig(name="**kwargs")], + ret_type="int", + ) + ], + ) + + assert_equal( + infer_sig_from_docstring("func(*args) -> int", "func"), + [FunctionSig(name="func", args=[ArgSig(name="*args")], ret_type="int")], + ) + + assert_equal( + infer_sig_from_docstring("func(**kwargs) -> int", "func"), + [FunctionSig(name="func", args=[ArgSig(name="**kwargs")], ret_type="int")], + ) + + @pytest.mark.xfail( + raises=AssertionError, reason="Arg and kwarg signature validation not implemented yet" + ) + def test_infer_sig_from_docstring_args_kwargs_errors(self) -> None: + # Double args + assert_equal(infer_sig_from_docstring("func(*args, *args2) -> int", "func"), []) + + # Double kwargs + assert_equal(infer_sig_from_docstring("func(**kw, **kw2) -> int", "func"), []) + + # args after kwargs + assert_equal(infer_sig_from_docstring("func(**kwargs, *args) -> int", "func"), []) + + def test_infer_sig_from_docstring_positional_only_arguments(self) -> None: + assert_equal( + infer_sig_from_docstring("func(self, /) -> str", "func"), + [FunctionSig(name="func", args=[ArgSig(name="self")], ret_type="str")], + ) + + assert_equal( + infer_sig_from_docstring("func(self, x, /) -> str", "func"), + [ + FunctionSig( + name="func", args=[ArgSig(name="self"), ArgSig(name="x")], ret_type="str" + ) + ], + ) + + assert_equal( + infer_sig_from_docstring("func(x, /, y) -> int", "func"), + [FunctionSig(name="func", args=[ArgSig(name="x"), ArgSig(name="y")], ret_type="int")], + ) + + assert_equal( + infer_sig_from_docstring("func(x, /, *args) -> str", "func"), + [ + FunctionSig( + name="func", args=[ArgSig(name="x"), ArgSig(name="*args")], ret_type="str" + ) + ], + ) + + assert_equal( + infer_sig_from_docstring("func(x, /, *, kwonly, **kwargs) -> str", "func"), + [ + FunctionSig( + name="func", + args=[ArgSig(name="x"), ArgSig(name="kwonly"), ArgSig(name="**kwargs")], + ret_type="str", + ) + ], + ) + + def test_infer_sig_from_docstring_keyword_only_arguments(self) -> None: + assert_equal( + infer_sig_from_docstring("func(*, x) -> str", "func"), + [FunctionSig(name="func", args=[ArgSig(name="x")], ret_type="str")], + ) + + assert_equal( + infer_sig_from_docstring("func(x, *, y) -> str", "func"), + [FunctionSig(name="func", args=[ArgSig(name="x"), ArgSig(name="y")], ret_type="str")], + ) + + assert_equal( + infer_sig_from_docstring("func(*, x, y) -> str", "func"), + [FunctionSig(name="func", args=[ArgSig(name="x"), ArgSig(name="y")], ret_type="str")], + ) + + assert_equal( + infer_sig_from_docstring("func(x, *, kwonly, **kwargs) -> str", "func"), + [ + FunctionSig( + name="func", + args=[ArgSig(name="x"), ArgSig(name="kwonly"), ArgSig("**kwargs")], + ret_type="str", + ) + ], + ) + + def test_infer_sig_from_docstring_pos_only_and_keyword_only_arguments(self) -> None: + assert_equal( + infer_sig_from_docstring("func(x, /, *, y) -> str", "func"), + [FunctionSig(name="func", args=[ArgSig(name="x"), ArgSig(name="y")], ret_type="str")], + ) + + assert_equal( + infer_sig_from_docstring("func(x, /, y, *, z) -> str", "func"), + [ + FunctionSig( + name="func", + args=[ArgSig(name="x"), ArgSig(name="y"), ArgSig(name="z")], + ret_type="str", + ) + ], + ) + + assert_equal( + infer_sig_from_docstring("func(x, /, y, *, z, **kwargs) -> str", "func"), + [ + FunctionSig( + name="func", + args=[ + ArgSig(name="x"), + ArgSig(name="y"), + ArgSig(name="z"), + ArgSig("**kwargs"), + ], + ret_type="str", + ) + ], + ) + + def test_infer_sig_from_docstring_pos_only_and_keyword_only_arguments_errors(self) -> None: + # / as first argument + assert_equal(infer_sig_from_docstring("func(/, x) -> str", "func"), []) + + # * as last argument + assert_equal(infer_sig_from_docstring("func(x, *) -> str", "func"), []) + + # / after * + assert_equal(infer_sig_from_docstring("func(x, *, /, y) -> str", "func"), []) + + # Two / + assert_equal(infer_sig_from_docstring("func(x, /, /, *, y) -> str", "func"), []) + + assert_equal(infer_sig_from_docstring("func(x, /, y, /, *, z) -> str", "func"), []) + + # Two * + assert_equal(infer_sig_from_docstring("func(x, /, *, *, y) -> str", "func"), []) + + assert_equal(infer_sig_from_docstring("func(x, /, *, y, *, z) -> str", "func"), []) + + # *args and * are not allowed + assert_equal(infer_sig_from_docstring("func(*args, *, kwonly) -> str", "func"), []) + + def test_infer_arg_sig_from_anon_docstring(self) -> None: + assert_equal( + infer_arg_sig_from_anon_docstring("(*args, **kwargs)"), + [ArgSig(name="*args"), ArgSig(name="**kwargs")], + ) + + assert_equal( + infer_arg_sig_from_anon_docstring( + "(x: Tuple[int, Tuple[str, int], str]=(1, ('a', 2), 'y'), y: int=4)" + ), + [ + ArgSig(name="x", type="Tuple[int,Tuple[str,int],str]", default=True), + ArgSig(name="y", type="int", default=True), + ], + ) + + def test_infer_prop_type_from_docstring(self) -> None: + assert_equal(infer_prop_type_from_docstring("str: A string."), "str") + assert_equal(infer_prop_type_from_docstring("Optional[int]: An int."), "Optional[int]") + assert_equal( + infer_prop_type_from_docstring("Tuple[int, int]: A tuple."), "Tuple[int, int]" + ) + assert_equal(infer_prop_type_from_docstring("\nstr: A string."), None) + + def test_infer_sig_from_docstring_square_brackets(self) -> None: + assert ( + infer_sig_from_docstring("fetch_row([maxrows, how]) -- Fetches stuff", "fetch_row") + == [] + ) + + def test_remove_misplaced_type_comments_1(self) -> None: + good = """ + \u1234 + def f(x): # type: (int) -> int + + def g(x): + # type: (int) -> int + + def h(): + + # type: () int + + x = 1 # type: int + """ + + assert_equal(remove_misplaced_type_comments(good), good) + + def test_remove_misplaced_type_comments_2(self) -> None: + bad = """ + def f(x): + # type: Callable[[int], int] + pass + + # type: "foo" + # type: 'bar' + x = 1 + # type: int + """ + bad_fixed = """ + def f(x): + + pass + + + + x = 1 + + """ + assert_equal(remove_misplaced_type_comments(bad), bad_fixed) + + def test_remove_misplaced_type_comments_3(self) -> None: + bad = ''' + def f(x): + """docstring""" + # type: (int) -> int + pass + + def g(x): + """docstring + """ + # type: (int) -> int + pass + ''' + bad_fixed = ''' + def f(x): + """docstring""" + + pass + + def g(x): + """docstring + """ + + pass + ''' + assert_equal(remove_misplaced_type_comments(bad), bad_fixed) + + def test_remove_misplaced_type_comments_4(self) -> None: + bad = """ + def f(x): + '''docstring''' + # type: (int) -> int + pass + + def g(x): + '''docstring + ''' + # type: (int) -> int + pass + """ + bad_fixed = """ + def f(x): + '''docstring''' + + pass + + def g(x): + '''docstring + ''' + + pass + """ + assert_equal(remove_misplaced_type_comments(bad), bad_fixed) + + def test_remove_misplaced_type_comments_5(self) -> None: + bad = """ + def f(x): + # type: (int, List[Any], + # float, bool) -> int + pass + + def g(x): + # type: (int, List[Any]) + pass + """ + bad_fixed = """ + def f(x): + + # float, bool) -> int + pass + + def g(x): + + pass + """ + assert_equal(remove_misplaced_type_comments(bad), bad_fixed) + + def test_remove_misplaced_type_comments_bytes(self) -> None: + original = b""" + \xbf + def f(x): # type: (int) -> int + + def g(x): + # type: (int) -> int + pass + + def h(): + # type: int + pass + + x = 1 # type: int + """ + + dest = b""" + \xbf + def f(x): # type: (int) -> int + + def g(x): + # type: (int) -> int + pass + + def h(): + + pass + + x = 1 # type: int + """ + + assert_equal(remove_misplaced_type_comments(original), dest) + + @unittest.skipIf(sys.platform == "win32", "Tests building the paths common ancestor on *nix") + def test_common_dir_prefix_unix(self) -> None: + assert common_dir_prefix([]) == "." + assert common_dir_prefix(["x.pyi"]) == "." + assert common_dir_prefix(["./x.pyi"]) == "." + assert common_dir_prefix(["foo/bar/x.pyi"]) == "foo/bar" + assert common_dir_prefix(["foo/bar/x.pyi", "foo/bar/y.pyi"]) == "foo/bar" + assert common_dir_prefix(["foo/bar/x.pyi", "foo/y.pyi"]) == "foo" + assert common_dir_prefix(["foo/x.pyi", "foo/bar/y.pyi"]) == "foo" + assert common_dir_prefix(["foo/bar/zar/x.pyi", "foo/y.pyi"]) == "foo" + assert common_dir_prefix(["foo/x.pyi", "foo/bar/zar/y.pyi"]) == "foo" + assert common_dir_prefix(["foo/bar/zar/x.pyi", "foo/bar/y.pyi"]) == "foo/bar" + assert common_dir_prefix(["foo/bar/x.pyi", "foo/bar/zar/y.pyi"]) == "foo/bar" + assert common_dir_prefix([r"foo/bar\x.pyi"]) == "foo" + assert common_dir_prefix([r"foo\bar/x.pyi"]) == r"foo\bar" + + @unittest.skipIf( + sys.platform != "win32", "Tests building the paths common ancestor on Windows" + ) + def test_common_dir_prefix_win(self) -> None: + assert common_dir_prefix(["x.pyi"]) == "." + assert common_dir_prefix([r".\x.pyi"]) == "." + assert common_dir_prefix([r"foo\bar\x.pyi"]) == r"foo\bar" + assert common_dir_prefix([r"foo\bar\x.pyi", r"foo\bar\y.pyi"]) == r"foo\bar" + assert common_dir_prefix([r"foo\bar\x.pyi", r"foo\y.pyi"]) == "foo" + assert common_dir_prefix([r"foo\x.pyi", r"foo\bar\y.pyi"]) == "foo" + assert common_dir_prefix([r"foo\bar\zar\x.pyi", r"foo\y.pyi"]) == "foo" + assert common_dir_prefix([r"foo\x.pyi", r"foo\bar\zar\y.pyi"]) == "foo" + assert common_dir_prefix([r"foo\bar\zar\x.pyi", r"foo\bar\y.pyi"]) == r"foo\bar" + assert common_dir_prefix([r"foo\bar\x.pyi", r"foo\bar\zar\y.pyi"]) == r"foo\bar" + assert common_dir_prefix([r"foo/bar\x.pyi"]) == r"foo\bar" + assert common_dir_prefix([r"foo\bar/x.pyi"]) == r"foo\bar" + assert common_dir_prefix([r"foo/bar/x.pyi"]) == r"foo\bar" + + def test_function_context_nested_classes(self) -> None: + ctx = FunctionContext( + module_name="spangle", + name="foo", + class_info=ClassInfo( + name="Nested", self_var="self", parent=ClassInfo(name="Parent", self_var="self") + ), + ) + assert ctx.fullname == "spangle.Parent.Nested.foo" + + +class StubgenHelpersSuite(unittest.TestCase): + def test_is_blacklisted_path(self) -> None: + assert not is_blacklisted_path("foo/bar.py") + assert not is_blacklisted_path("foo.py") + assert not is_blacklisted_path("foo/xvendor/bar.py") + assert not is_blacklisted_path("foo/vendorx/bar.py") + assert is_blacklisted_path("foo/vendor/bar.py") + assert is_blacklisted_path("foo/vendored/bar.py") + assert is_blacklisted_path("foo/vendored/bar/thing.py") + assert is_blacklisted_path("foo/six.py") + + def test_is_non_library_module(self) -> None: + assert not is_non_library_module("foo") + assert not is_non_library_module("foo.bar") + + # The following could be test modules, but we are very conservative and + # don't treat them as such since they could plausibly be real modules. + assert not is_non_library_module("foo.bartest") + assert not is_non_library_module("foo.bartests") + assert not is_non_library_module("foo.testbar") + + assert is_non_library_module("foo.test") + assert is_non_library_module("foo.test.foo") + assert is_non_library_module("foo.tests") + assert is_non_library_module("foo.tests.foo") + assert is_non_library_module("foo.testing.foo") + assert is_non_library_module("foo.SelfTest.foo") + + assert is_non_library_module("foo.test_bar") + assert is_non_library_module("foo.bar_tests") + assert is_non_library_module("foo.testing") + assert is_non_library_module("foo.conftest") + assert is_non_library_module("foo.bar_test_util") + assert is_non_library_module("foo.bar_test_utils") + assert is_non_library_module("foo.bar_test_base") + + assert is_non_library_module("foo.setup") + + assert is_non_library_module("foo.__main__") + + +class StubgenPythonSuite(DataSuite): + """Data-driven end-to-end test cases that generate stub files. + + You can use these magic test case name suffixes: + + *_semanal + Run semantic analysis (slow as this uses real stubs -- only use + when necessary) + *_import + Import module and perform runtime introspection (in the current + process!) + + You can use these magic comments: + + # flags: --some-stubgen-option ... + Specify custom stubgen options + + # modules: module1 module2 ... + Specify which modules to output (by default only 'main') + """ + + required_out_section = True + base_path = "." + files = ["stubgen.test"] + + @unittest.skipIf(sys.platform == "win32", "clean up fails on Windows") + def run_case(self, testcase: DataDrivenTestCase) -> None: + with local_sys_path_set(): + self.run_case_inner(testcase) + + def run_case_inner(self, testcase: DataDrivenTestCase) -> None: + extra = [] # Extra command-line args + mods = [] # Module names to process + source = "\n".join(testcase.input) + for file, content in testcase.files + [("./main.py", source)]: + # Strip ./ prefix and .py suffix. + mod = file[2:-3].replace("/", ".") + if mod.endswith(".__init__"): + mod, _, _ = mod.rpartition(".") + mods.append(mod) + if "-p " not in source: + extra.extend(["-m", mod]) + with open(file, "w") as f: + f.write(content) + + options = self.parse_flags(source, extra) + if sys.version_info < options.pyversion: + pytest.skip() + modules = self.parse_modules(source) + out_dir = "out" + try: + try: + if testcase.name.endswith("_inspect"): + options.inspect = True + else: + if not testcase.name.endswith("_import"): + options.no_import = True + if not testcase.name.endswith("_semanal"): + options.parse_only = True + + generate_stubs(options) + a: list[str] = [] + for module in modules: + fnam = module_to_path(out_dir, module) + self.add_file(fnam, a, header=len(modules) > 1) + except CompileError as e: + a = e.messages + assert_string_arrays_equal( + testcase.output, a, f"Invalid output ({testcase.file}, line {testcase.line})" + ) + finally: + for mod in mods: + if mod in sys.modules: + del sys.modules[mod] + shutil.rmtree(out_dir) + + def parse_flags(self, program_text: str, extra: list[str]) -> Options: + flags = re.search("# flags: (.*)$", program_text, flags=re.MULTILINE) + pyversion = None + if flags: + flag_list = flags.group(1).split() + for i, flag in enumerate(flag_list): + if flag.startswith("--python-version="): + pyversion = flag.split("=", 1)[1] + del flag_list[i] + break + else: + flag_list = [] + options = parse_options(flag_list + extra) + if pyversion: + # A hack to allow testing old python versions with new language constructs + # This should be rarely used in general as stubgen output should not be version-specific + major, minor = pyversion.split(".", 1) + options.pyversion = (int(major), int(minor)) + if "--verbose" not in flag_list: + options.quiet = True + else: + options.verbose = True + return options + + def parse_modules(self, program_text: str) -> list[str]: + modules = re.search("# modules: (.*)$", program_text, flags=re.MULTILINE) + if modules: + return modules.group(1).split() + else: + return ["main"] + + def add_file(self, path: str, result: list[str], header: bool) -> None: + if not os.path.exists(path): + result.append("<%s was not generated>" % path.replace("\\", "/")) + return + if header: + result.append(f"# {path[4:]}") + with open(path, encoding="utf8") as file: + result.extend(file.read().splitlines()) + + +self_arg = ArgSig(name="self") + + +class TestBaseClass: + pass + + +class TestClass(TestBaseClass): + pass + + +class StubgencSuite(unittest.TestCase): + """Unit tests for stub generation from C modules using introspection. + + Note that these don't cover a lot! + """ + + def test_infer_hash_sig(self) -> None: + assert_equal(infer_c_method_args("__hash__"), [self_arg]) + assert_equal(infer_method_ret_type("__hash__"), "int") + + def test_infer_getitem_sig(self) -> None: + assert_equal(infer_c_method_args("__getitem__"), [self_arg, ArgSig(name="index")]) + + def test_infer_setitem_sig(self) -> None: + assert_equal( + infer_c_method_args("__setitem__"), + [self_arg, ArgSig(name="index"), ArgSig(name="object")], + ) + assert_equal(infer_method_ret_type("__setitem__"), "None") + + def test_infer_eq_op_sig(self) -> None: + for op in ("eq", "ne", "lt", "le", "gt", "ge"): + assert_equal( + infer_c_method_args(f"__{op}__"), [self_arg, ArgSig(name="other", type="object")] + ) + + def test_infer_binary_op_sig(self) -> None: + for op in ("add", "radd", "sub", "rsub", "mul", "rmul"): + assert_equal(infer_c_method_args(f"__{op}__"), [self_arg, ArgSig(name="other")]) + + def test_infer_equality_op_sig(self) -> None: + for op in ("eq", "ne", "lt", "le", "gt", "ge", "contains"): + assert_equal(infer_method_ret_type(f"__{op}__"), "bool") + + def test_infer_unary_op_sig(self) -> None: + for op in ("neg", "pos"): + assert_equal(infer_c_method_args(f"__{op}__"), [self_arg]) + + def test_infer_cast_sig(self) -> None: + for op in ("float", "bool", "bytes", "int"): + assert_equal(infer_method_ret_type(f"__{op}__"), op) + + def test_generate_class_stub_no_crash_for_object(self) -> None: + output: list[str] = [] + mod = ModuleType("module", "") # any module is fine + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + + gen.generate_class_stub("alias", object, output) + assert_equal(gen.get_imports().splitlines(), []) + assert_equal(output[0], "class alias:") + + def test_generate_class_stub_variable_type_annotation(self) -> None: + # This class mimics the stubgen unit test 'testClassVariable' + class TestClassVariableCls: + x = 1 + + output: list[str] = [] + mod = ModuleType("module", "") # any module is fine + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_class_stub("C", TestClassVariableCls, output) + assert_equal(gen.get_imports().splitlines(), ["from typing import ClassVar"]) + assert_equal(output, ["class C:", " x: ClassVar[int] = ..."]) + + def test_generate_c_type_none_default(self) -> None: + class TestClass: + def test(self, arg0=1, arg1=None) -> None: # type: ignore[no-untyped-def] + pass + + output: list[str] = [] + mod = ModuleType(TestClass.__module__, "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.is_c_module = False + gen.generate_function_stub( + "test", + TestClass.test, + output=output, + class_info=ClassInfo( + self_var="self", + cls=TestClass, + name="TestClass", + docstring=getattr(TestClass, "__doc__", None), + ), + ) + assert_equal( + output, ["def test(self, arg0: int = ..., arg1: Incomplete | None = ...) -> None: ..."] + ) + + def test_non_c_generate_signature_with_kw_only_args(self) -> None: + class TestClass: + def test( + self, arg0: str, *, keyword_only: str, keyword_only_with_default: int = 7 + ) -> None: + pass + + output: list[str] = [] + mod = ModuleType(TestClass.__module__, "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.is_c_module = False + gen.generate_function_stub( + "test", + TestClass.test, + output=output, + class_info=ClassInfo( + self_var="self", + cls=TestClass, + name="TestClass", + docstring=getattr(TestClass, "__doc__", None), + ), + ) + assert_equal( + output, + [ + "def test(self, arg0: str, *, keyword_only: str, keyword_only_with_default: int = ...) -> None: ..." + ], + ) + + def test_generate_c_type_inheritance(self) -> None: + class TestClass(KeyError): + pass + + output: list[str] = [] + mod = ModuleType("module, ") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_class_stub("C", TestClass, output) + assert_equal(output, ["class C(KeyError): ..."]) + assert_equal(gen.get_imports().splitlines(), []) + + def test_generate_c_type_inheritance_same_module(self) -> None: + output: list[str] = [] + mod = ModuleType(TestBaseClass.__module__, "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_class_stub("C", TestClass, output) + assert_equal(output, ["class C(TestBaseClass): ..."]) + assert_equal(gen.get_imports().splitlines(), []) + + def test_generate_c_type_inheritance_other_module(self) -> None: + import argparse + + class TestClass(argparse.Action): + pass + + output: list[str] = [] + mod = ModuleType("module", "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_class_stub("C", TestClass, output) + assert_equal(output, ["class C(argparse.Action): ..."]) + assert_equal(gen.get_imports().splitlines(), ["import argparse"]) + + def test_generate_c_type_inheritance_builtin_type(self) -> None: + class TestClass(type): + pass + + output: list[str] = [] + mod = ModuleType("module", "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_class_stub("C", TestClass, output) + assert_equal(output, ["class C(type): ..."]) + assert_equal(gen.get_imports().splitlines(), []) + + def test_generate_c_type_with_docstring(self) -> None: + class TestClass: + def test(self, arg0: str) -> None: + """ + test(self: TestClass, arg0: int) + """ + + output: list[str] = [] + mod = ModuleType(TestClass.__module__, "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( + "test", + TestClass.test, + output=output, + class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), + ) + assert_equal(output, ["def test(self, arg0: int) -> Any: ..."]) + assert_equal(gen.get_imports().splitlines(), []) + + def test_generate_c_type_with_docstring_no_self_arg(self) -> None: + class TestClass: + def test(self, arg0: str) -> None: + """ + test(arg0: int) + """ + + output: list[str] = [] + mod = ModuleType(TestClass.__module__, "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( + "test", + TestClass.test, + output=output, + class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), + ) + assert_equal(output, ["def test(self, arg0: int) -> Any: ..."]) + assert_equal(gen.get_imports().splitlines(), []) + + def test_generate_c_type_classmethod(self) -> None: + class TestClass: + @classmethod + def test(cls, arg0: str) -> None: + pass + + output: list[str] = [] + mod = ModuleType(TestClass.__module__, "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( + "test", + TestClass.test, + output=output, + class_info=ClassInfo(self_var="cls", cls=TestClass, name="TestClass"), + ) + assert_equal(output, ["@classmethod", "def test(cls, *args, **kwargs): ..."]) + assert_equal(gen.get_imports().splitlines(), []) + + def test_generate_c_type_classmethod_with_overloads(self) -> None: + class TestClass: + @classmethod + def test(cls, arg0: str) -> None: + """ + test(cls, arg0: str) + test(cls, arg0: int) + """ + pass + + output: list[str] = [] + mod = ModuleType(TestClass.__module__, "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( + "test", + TestClass.test, + output=output, + class_info=ClassInfo(self_var="cls", cls=TestClass, name="TestClass"), + ) + assert_equal( + output, + [ + "@overload", + "@classmethod", + "def test(cls, arg0: str) -> Any: ...", + "@overload", + "@classmethod", + "def test(cls, arg0: int) -> Any: ...", + ], + ) + assert_equal(gen.get_imports().splitlines(), ["from typing import overload"]) + + def test_generate_c_type_with_docstring_empty_default(self) -> None: + class TestClass: + def test(self, arg0: str = "") -> None: + """ + test(self: TestClass, arg0: str = "") + """ + + output: list[str] = [] + mod = ModuleType(TestClass.__module__, "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( + "test", + TestClass.test, + output=output, + class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), + ) + assert_equal(output, ["def test(self, arg0: str = ...) -> Any: ..."]) + assert_equal(gen.get_imports().splitlines(), []) + + def test_generate_c_function_other_module_arg(self) -> None: + """Test that if argument references type from other module, module will be imported.""" + + # Provide different type in python spec than in docstring to make sure, that docstring + # information is used. + def test(arg0: str) -> None: + """ + test(arg0: argparse.Action) + """ + + output: list[str] = [] + mod = ModuleType(self.__module__, "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub("test", test, output=output) + assert_equal(output, ["def test(arg0: argparse.Action) -> Any: ..."]) + assert_equal(gen.get_imports().splitlines(), ["import argparse"]) + + def test_generate_c_function_same_module(self) -> None: + """Test that if annotation references type from same module but using full path, no module + will be imported, and type specification will be striped to local reference. + """ + + # Provide different type in python spec than in docstring to make sure, that docstring + # information is used. + def test(arg0: str) -> None: + """ + test(arg0: argparse.Action) -> argparse.Action + """ + + output: list[str] = [] + mod = ModuleType("argparse", "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub("test", test, output=output) + assert_equal(output, ["def test(arg0: Action) -> Action: ..."]) + assert_equal(gen.get_imports().splitlines(), []) + + def test_generate_c_function_other_module(self) -> None: + """Test that if annotation references type from other module, module will be imported.""" + + def test(arg0: str) -> None: + """ + test(arg0: argparse.Action) -> argparse.Action + """ + + output: list[str] = [] + mod = ModuleType(self.__module__, "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub("test", test, output=output) + assert_equal(output, ["def test(arg0: argparse.Action) -> argparse.Action: ..."]) + assert_equal(gen.get_imports().splitlines(), ["import argparse"]) + + def test_generate_c_function_same_module_nested(self) -> None: + """Test that if annotation references type from same module but using full path, no module + will be imported, and type specification will be stripped to local reference. + """ + + # Provide different type in python spec than in docstring to make sure, that docstring + # information is used. + def test(arg0: str) -> None: + """ + test(arg0: list[argparse.Action]) -> list[argparse.Action] + """ + + output: list[str] = [] + mod = ModuleType("argparse", "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub("test", test, output=output) + assert_equal(output, ["def test(arg0: list[Action]) -> list[Action]: ..."]) + assert_equal(gen.get_imports().splitlines(), []) + + def test_generate_c_function_same_module_compound(self) -> None: + """Test that if annotation references type from same module but using full path, no module + will be imported, and type specification will be stripped to local reference. + """ + + # Provide different type in python spec than in docstring to make sure, that docstring + # information is used. + def test(arg0: str) -> None: + """ + test(arg0: Union[argparse.Action, NoneType]) -> Tuple[argparse.Action, NoneType] + """ + + output: list[str] = [] + mod = ModuleType("argparse", "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub("test", test, output=output) + assert_equal(output, ["def test(arg0: Union[Action, None]) -> Tuple[Action, None]: ..."]) + assert_equal(gen.get_imports().splitlines(), []) + + def test_generate_c_function_other_module_nested(self) -> None: + """Test that if annotation references type from other module, module will be imported, + and the import will be restricted to one of the known modules.""" + + def test(arg0: str) -> None: + """ + test(arg0: foo.bar.Action) -> other.Thing + """ + + output: list[str] = [] + mod = ModuleType(self.__module__, "") + gen = InspectionStubGenerator( + mod.__name__, known_modules=["foo", "foo.spangle", "bar"], module=mod + ) + gen.generate_function_stub("test", test, output=output) + assert_equal(output, ["def test(arg0: foo.bar.Action) -> other.Thing: ..."]) + assert_equal(gen.get_imports().splitlines(), ["import foo", "import other"]) + + def test_generate_c_function_no_crash_for_non_str_docstring(self) -> None: + def test(arg0: str) -> None: ... + + test.__doc__ = property(lambda self: "test(arg0: str) -> None") # type: ignore[assignment] + + output: list[str] = [] + mod = ModuleType(self.__module__, "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub("test", test, output=output) + assert_equal(output, ["def test(*args, **kwargs): ..."]) + assert_equal(gen.get_imports().splitlines(), []) + + def test_generate_c_property_with_pybind11(self) -> None: + """Signatures included by PyBind11 inside property.fget are read.""" + + class TestClass: + def get_attribute(self) -> None: + """ + (self: TestClass) -> str + """ + + attribute = property(get_attribute, doc="") + + readwrite_properties: list[str] = [] + readonly_properties: list[str] = [] + mod = ModuleType("module", "") # any module is fine + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_property_stub( + "attribute", + TestClass.__dict__["attribute"], + TestClass.attribute, + [], + readwrite_properties, + readonly_properties, + ) + assert_equal(readwrite_properties, []) + assert_equal(readonly_properties, ["@property", "def attribute(self) -> str: ..."]) + + def test_generate_c_property_with_rw_property(self) -> None: + class TestClass: + def __init__(self) -> None: + self._attribute = 0 + + @property + def attribute(self) -> int: + return self._attribute + + @attribute.setter + def attribute(self, value: int) -> None: + self._attribute = value + + readwrite_properties: list[str] = [] + readonly_properties: list[str] = [] + mod = ModuleType("module", "") # any module is fine + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_property_stub( + "attribute", + TestClass.__dict__["attribute"], + TestClass.attribute, + [], + readwrite_properties, + readonly_properties, + ) + assert_equal(readwrite_properties, ["attribute: Incomplete"]) + assert_equal(readonly_properties, []) + + def test_generate_c_type_with_single_arg_generic(self) -> None: + class TestClass: + def test(self, arg0: str) -> None: + """ + test(self: TestClass, arg0: List[int]) + """ + + output: list[str] = [] + mod = ModuleType(TestClass.__module__, "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( + "test", + TestClass.test, + output=output, + class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), + ) + assert_equal(output, ["def test(self, arg0: List[int]) -> Any: ..."]) + assert_equal(gen.get_imports().splitlines(), []) + + def test_generate_c_type_with_double_arg_generic(self) -> None: + class TestClass: + def test(self, arg0: str) -> None: + """ + test(self: TestClass, arg0: Dict[str, int]) + """ + + output: list[str] = [] + mod = ModuleType(TestClass.__module__, "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( + "test", + TestClass.test, + output=output, + class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), + ) + assert_equal(output, ["def test(self, arg0: Dict[str, int]) -> Any: ..."]) + assert_equal(gen.get_imports().splitlines(), []) + + def test_generate_c_type_with_nested_generic(self) -> None: + class TestClass: + def test(self, arg0: str) -> None: + """ + test(self: TestClass, arg0: Dict[str, List[int]]) + """ + + output: list[str] = [] + mod = ModuleType(TestClass.__module__, "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( + "test", + TestClass.test, + output=output, + class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), + ) + assert_equal(output, ["def test(self, arg0: Dict[str, List[int]]) -> Any: ..."]) + assert_equal(gen.get_imports().splitlines(), []) + + def test_generate_c_type_with_generic_using_other_module_first(self) -> None: + class TestClass: + def test(self, arg0: str) -> None: + """ + test(self: TestClass, arg0: Dict[argparse.Action, int]) + """ + + output: list[str] = [] + mod = ModuleType(TestClass.__module__, "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( + "test", + TestClass.test, + output=output, + class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), + ) + assert_equal(output, ["def test(self, arg0: Dict[argparse.Action, int]) -> Any: ..."]) + assert_equal(gen.get_imports().splitlines(), ["import argparse"]) + + def test_generate_c_type_with_generic_using_other_module_last(self) -> None: + class TestClass: + def test(self, arg0: str) -> None: + """ + test(self: TestClass, arg0: Dict[str, argparse.Action]) + """ + + output: list[str] = [] + mod = ModuleType(TestClass.__module__, "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( + "test", + TestClass.test, + output=output, + class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), + ) + assert_equal(output, ["def test(self, arg0: Dict[str, argparse.Action]) -> Any: ..."]) + assert_equal(gen.get_imports().splitlines(), ["import argparse"]) + + def test_generate_c_type_with_overload_pybind11(self) -> None: + class TestClass: + def __init__(self, arg0: str) -> None: + """ + __init__(*args, **kwargs) + Overloaded function. + + 1. __init__(self: TestClass, arg0: str) -> None + + 2. __init__(self: TestClass, arg0: str, arg1: str) -> None + """ + + output: list[str] = [] + mod = ModuleType(TestClass.__module__, "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( + "__init__", + TestClass.__init__, + output=output, + class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), + ) + assert_equal( + output, + [ + "@overload", + "def __init__(self, arg0: str) -> None: ...", + "@overload", + "def __init__(self, arg0: str, arg1: str) -> None: ...", + "@overload", + "def __init__(self, *args, **kwargs) -> Any: ...", + ], + ) + assert_equal(gen.get_imports().splitlines(), ["from typing import overload"]) + + def test_generate_c_type_with_overload_shiboken(self) -> None: + class TestClass: + """ + TestClass(self: TestClass, arg0: str) -> None + TestClass(self: TestClass, arg0: str, arg1: str) -> None + """ + + def __init__(self, arg0: str) -> None: + pass + + output: list[str] = [] + mod = ModuleType(TestClass.__module__, "") + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( + "__init__", + TestClass.__init__, + output=output, + class_info=ClassInfo( + self_var="self", + cls=TestClass, + name="TestClass", + docstring=getattr(TestClass, "__doc__", None), + ), + ) + assert_equal( + output, + [ + "@overload", + "def __init__(self, arg0: str) -> None: ...", + "@overload", + "def __init__(self, arg0: str, arg1: str) -> None: ...", + ], + ) + assert_equal(gen.get_imports().splitlines(), ["from typing import overload"]) + + +class ArgSigSuite(unittest.TestCase): + def test_repr(self) -> None: + assert_equal( + repr(ArgSig(name='asd"dsa')), "ArgSig(name='asd\"dsa', type=None, default=False)" + ) + assert_equal( + repr(ArgSig(name="asd'dsa")), 'ArgSig(name="asd\'dsa", type=None, default=False)' + ) + assert_equal(repr(ArgSig("func", "str")), "ArgSig(name='func', type='str', default=False)") + assert_equal( + repr(ArgSig("func", "str", default=True)), + "ArgSig(name='func', type='str', default=True)", + ) + + +class IsValidTypeSuite(unittest.TestCase): + def test_is_valid_type(self) -> None: + assert is_valid_type("int") + assert is_valid_type("str") + assert is_valid_type("Foo_Bar234") + assert is_valid_type("foo.bar") + assert is_valid_type("List[int]") + assert is_valid_type("Dict[str, int]") + assert is_valid_type("None") + assert is_valid_type("Literal[26]") + assert is_valid_type("Literal[0x1A]") + assert is_valid_type('Literal["hello world"]') + assert is_valid_type('Literal[b"hello world"]') + assert is_valid_type('Literal[u"hello world"]') + assert is_valid_type("Literal[True]") + assert is_valid_type("Literal[Color.RED]") + assert is_valid_type("Literal[None]") + assert is_valid_type("str | int") + assert is_valid_type("dict[str, int] | int") + assert is_valid_type("tuple[str, ...]") + assert is_valid_type( + 'Literal[26, 0x1A, "hello world", b"hello world", u"hello world", True, Color.RED, None]' + ) + assert not is_valid_type("foo-bar") + assert not is_valid_type("x->y") + assert not is_valid_type("True") + assert not is_valid_type("False") + assert not is_valid_type("x,y") + assert not is_valid_type("x, y") + + +class ModuleInspectSuite(unittest.TestCase): + def test_python_module(self) -> None: + with ModuleInspect() as m: + p = m.get_package_properties("inspect") + assert p is not None + assert p.name == "inspect" + assert p.file + assert p.path is None + assert p.is_c_module is False + assert p.subpackages == [] + + def test_python_package(self) -> None: + with ModuleInspect() as m: + p = m.get_package_properties("unittest") + assert p is not None + assert p.name == "unittest" + assert p.file + assert p.path + assert p.is_c_module is False + assert p.subpackages + assert all(sub.startswith("unittest.") for sub in p.subpackages) + + def test_c_module(self) -> None: + with ModuleInspect() as m: + p = m.get_package_properties("_socket") + assert p is not None + assert p.name == "_socket" + assert p.path is None + assert p.is_c_module is True + assert p.subpackages == [] + + def test_non_existent(self) -> None: + with ModuleInspect() as m: + with self.assertRaises(InspectError) as e: + m.get_package_properties("foobar-non-existent") + assert str(e.exception) == "No module named 'foobar-non-existent'" + + +def module_to_path(out_dir: str, module: str) -> str: + fnam = os.path.join(out_dir, f"{module.replace('.', '/')}.pyi") + if not os.path.exists(fnam): + alt_fnam = fnam.replace(".pyi", "/__init__.pyi") + if os.path.exists(alt_fnam): + return alt_fnam + return fnam diff --git a/.venv/lib/python3.12/site-packages/mypy/test/teststubinfo.py b/.venv/lib/python3.12/site-packages/mypy/test/teststubinfo.py new file mode 100644 index 0000000..ae34e78 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/teststubinfo.py @@ -0,0 +1,35 @@ +from __future__ import annotations + +import unittest + +from mypy.stubinfo import ( + is_module_from_legacy_bundled_package, + legacy_bundled_packages, + non_bundled_packages_flat, + stub_distribution_name, +) + + +class TestStubInfo(unittest.TestCase): + def test_is_legacy_bundled_packages(self) -> None: + assert not is_module_from_legacy_bundled_package("foobar_asdf") + assert not is_module_from_legacy_bundled_package("PIL") + assert is_module_from_legacy_bundled_package("pycurl") + assert is_module_from_legacy_bundled_package("dateparser") + + def test_stub_distribution_name(self) -> None: + assert stub_distribution_name("foobar_asdf") is None + assert stub_distribution_name("pycurl") == "types-pycurl" + assert stub_distribution_name("psutil") == "types-psutil" + assert stub_distribution_name("sassutils") == "types-libsass" + assert stub_distribution_name("google.cloud.ndb") == "types-google-cloud-ndb" + assert stub_distribution_name("google.cloud.ndb.submodule") == "types-google-cloud-ndb" + assert stub_distribution_name("google.cloud.unknown") is None + assert stub_distribution_name("google.protobuf") == "types-protobuf" + assert stub_distribution_name("google.protobuf.submodule") == "types-protobuf" + assert stub_distribution_name("google") is None + + def test_period_in_top_level(self) -> None: + for packages in (non_bundled_packages_flat, legacy_bundled_packages): + for top_level_module in packages: + assert "." not in top_level_module diff --git a/.venv/lib/python3.12/site-packages/mypy/test/teststubtest.py b/.venv/lib/python3.12/site-packages/mypy/test/teststubtest.py new file mode 100644 index 0000000..4bec5da --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/teststubtest.py @@ -0,0 +1,2965 @@ +from __future__ import annotations + +import contextlib +import inspect +import io +import os +import re +import sys +import tempfile +import textwrap +import unittest +from collections.abc import Iterator +from typing import Any, Callable + +from pytest import raises + +import mypy.stubtest +from mypy import build, nodes +from mypy.modulefinder import BuildSource +from mypy.options import Options +from mypy.stubtest import parse_options, test_stubs +from mypy.test.config import test_temp_dir +from mypy.test.data import root_dir + + +@contextlib.contextmanager +def use_tmp_dir(mod_name: str) -> Iterator[str]: + current = os.getcwd() + current_syspath = sys.path.copy() + with tempfile.TemporaryDirectory() as tmp: + try: + os.chdir(tmp) + if sys.path[0] != tmp: + sys.path.insert(0, tmp) + yield tmp + finally: + sys.path = current_syspath.copy() + if mod_name in sys.modules: + del sys.modules[mod_name] + + os.chdir(current) + + +TEST_MODULE_NAME = "test_module" + + +stubtest_typing_stub = """ +Any = object() + +class _SpecialForm: + def __getitem__(self, typeargs: Any) -> object: ... + +Callable: _SpecialForm = ... +Generic: _SpecialForm = ... +Protocol: _SpecialForm = ... +Union: _SpecialForm = ... +ClassVar: _SpecialForm = ... + +Final = 0 +Literal = 0 +TypedDict = 0 + +class TypeVar: + def __init__(self, name, covariant: bool = ..., contravariant: bool = ...) -> None: ... + +class ParamSpec: + def __init__(self, name: str) -> None: ... + +AnyStr = TypeVar("AnyStr", str, bytes) +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) +_K = TypeVar("_K") +_V = TypeVar("_V") +_S = TypeVar("_S", contravariant=True) +_R = TypeVar("_R", covariant=True) + +class Coroutine(Generic[_T_co, _S, _R]): ... +class Iterable(Generic[_T_co]): ... +class Iterator(Iterable[_T_co]): ... +class Mapping(Generic[_K, _V]): ... +class Match(Generic[AnyStr]): ... +class Sequence(Iterable[_T_co]): ... +class Tuple(Sequence[_T_co]): ... +class NamedTuple(tuple[Any, ...]): ... +class _TypedDict(Mapping[str, object]): + __required_keys__: ClassVar[frozenset[str]] + __optional_keys__: ClassVar[frozenset[str]] + __total__: ClassVar[bool] + __readonly_keys__: ClassVar[frozenset[str]] + __mutable_keys__: ClassVar[frozenset[str]] + __closed__: ClassVar[bool | None] + __extra_items__: ClassVar[Any] +def overload(func: _T) -> _T: ... +def type_check_only(func: _T) -> _T: ... +def final(func: _T) -> _T: ... +""" + +stubtest_builtins_stub = """ +from typing import Generic, Mapping, Sequence, TypeVar, overload + +T = TypeVar('T') +T_co = TypeVar('T_co', covariant=True) +KT = TypeVar('KT') +VT = TypeVar('VT') + +class object: + __module__: str + def __init__(self) -> None: pass + def __repr__(self) -> str: pass +class type: ... + +class tuple(Sequence[T_co], Generic[T_co]): + def __ge__(self, __other: tuple[T_co, ...]) -> bool: pass + +class dict(Mapping[KT, VT]): ... + +class frozenset(Generic[T]): ... + +class function: pass +class ellipsis: pass + +class int: ... +class float: ... +class bool(int): ... +class str: ... +class bytes: ... + +class list(Sequence[T]): ... + +def property(f: T) -> T: ... +def classmethod(f: T) -> T: ... +def staticmethod(f: T) -> T: ... +""" + +stubtest_enum_stub = """ +import sys +from typing import Any, TypeVar, Iterator + +_T = TypeVar('_T') + +class EnumMeta(type): + def __len__(self) -> int: pass + def __iter__(self: type[_T]) -> Iterator[_T]: pass + def __reversed__(self: type[_T]) -> Iterator[_T]: pass + def __getitem__(self: type[_T], name: str) -> _T: pass + +class Enum(metaclass=EnumMeta): + def __new__(cls: type[_T], value: object) -> _T: pass + def __repr__(self) -> str: pass + def __str__(self) -> str: pass + def __format__(self, format_spec: str) -> str: pass + def __hash__(self) -> Any: pass + def __reduce_ex__(self, proto: Any) -> Any: pass + name: str + value: Any + +class Flag(Enum): + def __or__(self: _T, other: _T) -> _T: pass + def __and__(self: _T, other: _T) -> _T: pass + def __xor__(self: _T, other: _T) -> _T: pass + def __invert__(self: _T) -> _T: pass + if sys.version_info >= (3, 11): + __ror__ = __or__ + __rand__ = __and__ + __rxor__ = __xor__ +""" + + +def build_helper(source: str) -> build.BuildResult: + return build.build( + sources=[BuildSource("main.pyi", None, textwrap.dedent(source))], + options=Options(), + alt_lib_path=test_temp_dir, + ) + + +def run_stubtest_with_stderr( + stub: str, + runtime: str, + options: list[str], + config_file: str | None = None, + output: io.StringIO | None = None, + outerr: io.StringIO | None = None, +) -> tuple[str, str]: + with use_tmp_dir(TEST_MODULE_NAME) as tmp_dir: + with open("builtins.pyi", "w") as f: + f.write(stubtest_builtins_stub) + with open("typing.pyi", "w") as f: + f.write(stubtest_typing_stub) + with open("enum.pyi", "w") as f: + f.write(stubtest_enum_stub) + with open(f"{TEST_MODULE_NAME}.pyi", "w") as f: + f.write(stub) + with open(f"{TEST_MODULE_NAME}.py", "w") as f: + f.write(runtime) + if config_file: + with open(f"{TEST_MODULE_NAME}_config.ini", "w") as f: + f.write(config_file) + options = options + ["--mypy-config-file", f"{TEST_MODULE_NAME}_config.ini"] + output = io.StringIO() if output is None else output + outerr = io.StringIO() if outerr is None else outerr + with contextlib.redirect_stdout(output), contextlib.redirect_stderr(outerr): + test_stubs(parse_options([TEST_MODULE_NAME] + options), use_builtins_fixtures=True) + filtered_output = remove_color_code( + output.getvalue() + # remove cwd as it's not available from outside + .replace(os.path.realpath(tmp_dir) + os.sep, "").replace(tmp_dir + os.sep, "") + ) + filtered_outerr = remove_color_code( + outerr.getvalue() + # remove cwd as it's not available from outside + .replace(os.path.realpath(tmp_dir) + os.sep, "").replace(tmp_dir + os.sep, "") + ) + return filtered_output, filtered_outerr + + +def run_stubtest( + stub: str, runtime: str, options: list[str], config_file: str | None = None +) -> str: + return run_stubtest_with_stderr(stub, runtime, options, config_file)[0] + + +class Case: + def __init__(self, stub: str, runtime: str, error: str | None) -> None: + self.stub = stub + self.runtime = runtime + self.error = error + + +def collect_cases(fn: Callable[..., Iterator[Case]]) -> Callable[..., None]: + """run_stubtest used to be slow, so we used this decorator to combine cases. + + If you're reading this and bored, feel free to refactor this and make it more like + other mypy tests. + + """ + + def test(*args: Any, **kwargs: Any) -> None: + cases = list(fn(*args, **kwargs)) + expected_errors = set() + for c in cases: + if c.error is None: + continue + expected_error = c.error + if expected_error == "": + expected_error = TEST_MODULE_NAME + elif not expected_error.startswith(f"{TEST_MODULE_NAME}."): + expected_error = f"{TEST_MODULE_NAME}.{expected_error}" + assert expected_error not in expected_errors, ( + "collect_cases merges cases into a single stubtest invocation; we already " + "expect an error for {}".format(expected_error) + ) + expected_errors.add(expected_error) + output = run_stubtest( + stub="\n\n".join(textwrap.dedent(c.stub.lstrip("\n")) for c in cases), + runtime="\n\n".join(textwrap.dedent(c.runtime.lstrip("\n")) for c in cases), + options=["--generate-allowlist"], + ) + + actual_errors = set(output.splitlines()) + if actual_errors != expected_errors: + output = run_stubtest( + stub="\n\n".join(textwrap.dedent(c.stub.lstrip("\n")) for c in cases), + runtime="\n\n".join(textwrap.dedent(c.runtime.lstrip("\n")) for c in cases), + options=[], + ) + assert actual_errors == expected_errors, output + + return test + + +class StubtestUnit(unittest.TestCase): + @collect_cases + def test_basic_good(self) -> Iterator[Case]: + yield Case( + stub="def f(number: int, text: str) -> None: ...", + runtime="def f(number, text): pass", + error=None, + ) + yield Case( + stub=""" + class X: + def f(self, number: int, text: str) -> None: ... + """, + runtime=""" + class X: + def f(self, number, text): pass + """, + error=None, + ) + + @collect_cases + def test_types(self) -> Iterator[Case]: + yield Case( + stub="def mistyped_class() -> None: ...", + runtime="class mistyped_class: pass", + error="mistyped_class", + ) + yield Case( + stub="class mistyped_fn: ...", runtime="def mistyped_fn(): pass", error="mistyped_fn" + ) + yield Case( + stub=""" + class X: + def mistyped_var(self) -> int: ... + """, + runtime=""" + class X: + mistyped_var = 1 + """, + error="X.mistyped_var", + ) + + @collect_cases + def test_coroutines(self) -> Iterator[Case]: + yield Case(stub="def bar() -> int: ...", runtime="async def bar(): return 5", error="bar") + # Don't error for this one -- we get false positives otherwise + yield Case(stub="async def foo() -> int: ...", runtime="def foo(): return 5", error=None) + yield Case(stub="def baz() -> int: ...", runtime="def baz(): return 5", error=None) + yield Case( + stub="async def bingo() -> int: ...", runtime="async def bingo(): return 5", error=None + ) + + @collect_cases + def test_arg_name(self) -> Iterator[Case]: + yield Case( + stub="def bad(number: int, text: str) -> None: ...", + runtime="def bad(num, text) -> None: pass", + error="bad", + ) + yield Case( + stub="def good_posonly(__number: int, text: str) -> None: ...", + runtime="def good_posonly(num, /, text): pass", + error=None, + ) + yield Case( + stub="def bad_posonly(__number: int, text: str) -> None: ...", + runtime="def bad_posonly(flag, /, text): pass", + error="bad_posonly", + ) + yield Case( + stub=""" + class BadMethod: + def f(self, number: int, text: str) -> None: ... + """, + runtime=""" + class BadMethod: + def f(self, n, text): pass + """, + error="BadMethod.f", + ) + yield Case( + stub=""" + class GoodDunder: + def __exit__(self, t, v, tb) -> None: ... + """, + runtime=""" + class GoodDunder: + def __exit__(self, exc_type, exc_val, exc_tb): pass + """, + error=None, + ) + yield Case( + stub="""def dunder_name(__x: int) -> None: ...""", + runtime="""def dunder_name(__x: int) -> None: ...""", + error=None, + ) + yield Case( + stub="""def dunder_name_posonly(__x: int, /) -> None: ...""", + runtime="""def dunder_name_posonly(__x: int) -> None: ...""", + error=None, + ) + yield Case( + stub="""def dunder_name_bad(x: int) -> None: ...""", + runtime="""def dunder_name_bad(__x: int) -> None: ...""", + error="dunder_name_bad", + ) + + @collect_cases + def test_arg_kind(self) -> Iterator[Case]: + yield Case( + stub="def runtime_kwonly(number: int, text: str) -> None: ...", + runtime="def runtime_kwonly(number, *, text): pass", + error="runtime_kwonly", + ) + yield Case( + stub="def stub_kwonly(number: int, *, text: str) -> None: ...", + runtime="def stub_kwonly(number, text): pass", + error="stub_kwonly", + ) + yield Case( + stub="def stub_posonly(__number: int, text: str) -> None: ...", + runtime="def stub_posonly(number, text): pass", + error="stub_posonly", + ) + yield Case( + stub="def good_posonly(__number: int, text: str) -> None: ...", + runtime="def good_posonly(number, /, text): pass", + error=None, + ) + yield Case( + stub="def runtime_posonly(number: int, text: str) -> None: ...", + runtime="def runtime_posonly(number, /, text): pass", + error="runtime_posonly", + ) + yield Case( + stub="def stub_posonly_570(number: int, /, text: str) -> None: ...", + runtime="def stub_posonly_570(number, text): pass", + error="stub_posonly_570", + ) + + @collect_cases + def test_private_parameters(self) -> Iterator[Case]: + # Private parameters can optionally be omitted. + yield Case( + stub="def priv_pos_arg_missing() -> None: ...", + runtime="def priv_pos_arg_missing(_p1=None): pass", + error=None, + ) + yield Case( + stub="def multi_priv_args() -> None: ...", + runtime="def multi_priv_args(_p='', _q=''): pass", + error=None, + ) + yield Case( + stub="def priv_kwarg_missing() -> None: ...", + runtime="def priv_kwarg_missing(*, _p2=''): pass", + error=None, + ) + # But if they are included, they must be correct. + yield Case( + stub="def priv_pos_arg_wrong(_p: int = ...) -> None: ...", + runtime="def priv_pos_arg_wrong(_p=None): pass", + error="priv_pos_arg_wrong", + ) + yield Case( + stub="def priv_kwarg_wrong(*, _p: int = ...) -> None: ...", + runtime="def priv_kwarg_wrong(*, _p=None): pass", + error="priv_kwarg_wrong", + ) + # Private parameters must have a default and start with exactly one + # underscore. + yield Case( + stub="def pos_arg_no_default() -> None: ...", + runtime="def pos_arg_no_default(_np): pass", + error="pos_arg_no_default", + ) + yield Case( + stub="def kwarg_no_default() -> None: ...", + runtime="def kwarg_no_default(*, _np): pass", + error="kwarg_no_default", + ) + yield Case( + stub="def double_underscore_pos_arg() -> None: ...", + runtime="def double_underscore_pos_arg(__np = None): pass", + error="double_underscore_pos_arg", + ) + yield Case( + stub="def double_underscore_kwarg() -> None: ...", + runtime="def double_underscore_kwarg(*, __np = None): pass", + error="double_underscore_kwarg", + ) + # But spot parameters that are accidentally not marked kw-only and + # vice-versa. + yield Case( + stub="def priv_arg_is_kwonly(_p=...) -> None: ...", + runtime="def priv_arg_is_kwonly(*, _p=''): pass", + error="priv_arg_is_kwonly", + ) + yield Case( + stub="def priv_arg_is_positional(*, _p=...) -> None: ...", + runtime="def priv_arg_is_positional(_p=''): pass", + error="priv_arg_is_positional", + ) + # Private parameters not at the end of the parameter list must be + # included so that users can pass the following arguments using + # positional syntax. + yield Case( + stub="def priv_args_not_at_end(*, q='') -> None: ...", + runtime="def priv_args_not_at_end(_p='', q=''): pass", + error="priv_args_not_at_end", + ) + + @collect_cases + def test_default_presence(self) -> Iterator[Case]: + yield Case( + stub="def f1(text: str = ...) -> None: ...", + runtime="def f1(text = 'asdf'): pass", + error=None, + ) + yield Case( + stub="def f2(text: str = ...) -> None: ...", runtime="def f2(text): pass", error="f2" + ) + yield Case( + stub="def f3(text: str) -> None: ...", + runtime="def f3(text = 'asdf'): pass", + error="f3", + ) + yield Case( + stub="def f4(text: str = ...) -> None: ...", + runtime="def f4(text = None): pass", + error="f4", + ) + yield Case( + stub="def f5(data: bytes = ...) -> None: ...", + runtime="def f5(data = 'asdf'): pass", + error="f5", + ) + yield Case( + stub=""" + from typing import TypeVar + _T = TypeVar("_T", bound=str) + def f6(text: _T = ...) -> None: ... + """, + runtime="def f6(text = None): pass", + error="f6", + ) + + @collect_cases + def test_default_value(self) -> Iterator[Case]: + yield Case( + stub="def f1(text: str = 'x') -> None: ...", + runtime="def f1(text = 'y'): pass", + error="f1", + ) + yield Case( + stub='def f2(text: bytes = b"x\'") -> None: ...', + runtime='def f2(text = b"x\'"): pass', + error=None, + ) + yield Case( + stub='def f3(text: bytes = b"y\'") -> None: ...', + runtime='def f3(text = b"x\'"): pass', + error="f3", + ) + yield Case( + stub="def f4(text: object = 1) -> None: ...", + runtime="def f4(text = 1.0): pass", + error="f4", + ) + yield Case( + stub="def f5(text: object = True) -> None: ...", + runtime="def f5(text = 1): pass", + error="f5", + ) + yield Case( + stub="def f6(text: object = True) -> None: ...", + runtime="def f6(text = True): pass", + error=None, + ) + yield Case( + stub="def f7(text: object = not True) -> None: ...", + runtime="def f7(text = False): pass", + error=None, + ) + yield Case( + stub="def f8(text: object = not True) -> None: ...", + runtime="def f8(text = True): pass", + error="f8", + ) + yield Case( + stub="def f9(text: object = {1: 2}) -> None: ...", + runtime="def f9(text = {1: 3}): pass", + error="f9", + ) + yield Case( + stub="def f10(text: object = [1, 2]) -> None: ...", + runtime="def f10(text = [1, 2]): pass", + error=None, + ) + + # Simulate "" + yield Case( + stub="def f11() -> None: ...", + runtime=""" + def f11(text=None) -> None: pass + f11.__text_signature__ = "(text=)" + """, + error="f11", + ) + + # Simulate numpy ndarray.__bool__ that raises an error + yield Case( + stub="def f12(x=1): ...", + runtime=""" + class _ndarray: + def __eq__(self, obj): return self + def __bool__(self): raise ValueError + def f12(x=_ndarray()) -> None: pass + """, + error="f12", + ) + + @collect_cases + def test_static_class_method(self) -> Iterator[Case]: + yield Case( + stub=""" + class Good: + @classmethod + def f(cls, number: int, text: str) -> None: ... + """, + runtime=""" + class Good: + @classmethod + def f(cls, number, text): pass + """, + error=None, + ) + yield Case( + stub=""" + class Bad1: + def f(cls, number: int, text: str) -> None: ... + """, + runtime=""" + class Bad1: + @classmethod + def f(cls, number, text): pass + """, + error="Bad1.f", + ) + yield Case( + stub=""" + class Bad2: + @classmethod + def f(cls, number: int, text: str) -> None: ... + """, + runtime=""" + class Bad2: + @staticmethod + def f(self, number, text): pass + """, + error="Bad2.f", + ) + yield Case( + stub=""" + class Bad3: + @staticmethod + def f(cls, number: int, text: str) -> None: ... + """, + runtime=""" + class Bad3: + @classmethod + def f(self, number, text): pass + """, + error="Bad3.f", + ) + yield Case( + stub=""" + class GoodNew: + def __new__(cls, *args, **kwargs): ... + """, + runtime=""" + class GoodNew: + def __new__(cls, *args, **kwargs): pass + """, + error=None, + ) + + @collect_cases + def test_arg_mismatch(self) -> Iterator[Case]: + yield Case( + stub="def f1(a, *, b, c) -> None: ...", runtime="def f1(a, *, b, c): pass", error=None + ) + yield Case( + stub="def f2(a, *, b) -> None: ...", runtime="def f2(a, *, b, c): pass", error="f2" + ) + yield Case( + stub="def f3(a, *, b, c) -> None: ...", runtime="def f3(a, *, b): pass", error="f3" + ) + yield Case( + stub="def f4(a, *, b, c) -> None: ...", runtime="def f4(a, b, *, c): pass", error="f4" + ) + yield Case( + stub="def f5(a, b, *, c) -> None: ...", runtime="def f5(a, *, b, c): pass", error="f5" + ) + + @collect_cases + def test_varargs_varkwargs(self) -> Iterator[Case]: + yield Case( + stub="def f1(*args, **kwargs) -> None: ...", + runtime="def f1(*args, **kwargs): pass", + error=None, + ) + yield Case( + stub="def f2(*args, **kwargs) -> None: ...", + runtime="def f2(**kwargs): pass", + error="f2", + ) + yield Case( + stub="def g1(a, b, c, d) -> None: ...", runtime="def g1(a, *args): pass", error=None + ) + yield Case( + stub="def g2(a, b, c, d, *args) -> None: ...", runtime="def g2(a): pass", error="g2" + ) + yield Case( + stub="def g3(a, b, c, d, *args) -> None: ...", + runtime="def g3(a, *args): pass", + error=None, + ) + yield Case( + stub="def h1(a) -> None: ...", runtime="def h1(a, b, c, d, *args): pass", error="h1" + ) + yield Case( + stub="def h2(a, *args) -> None: ...", runtime="def h2(a, b, c, d): pass", error="h2" + ) + yield Case( + stub="def h3(a, *args) -> None: ...", + runtime="def h3(a, b, c, d, *args): pass", + error="h3", + ) + yield Case( + stub="def j1(a: int, *args) -> None: ...", runtime="def j1(a): pass", error="j1" + ) + yield Case( + stub="def j2(a: int) -> None: ...", runtime="def j2(a, *args): pass", error="j2" + ) + yield Case( + stub="def j3(a, b, c) -> None: ...", runtime="def j3(a, *args, c): pass", error="j3" + ) + yield Case(stub="def k1(a, **kwargs) -> None: ...", runtime="def k1(a): pass", error="k1") + yield Case( + # In theory an error, but led to worse results in practice + stub="def k2(a) -> None: ...", + runtime="def k2(a, **kwargs): pass", + error=None, + ) + yield Case( + stub="def k3(a, b) -> None: ...", runtime="def k3(a, **kwargs): pass", error="k3" + ) + yield Case( + stub="def k4(a, *, b) -> None: ...", runtime="def k4(a, **kwargs): pass", error=None + ) + yield Case( + stub="def k5(a, *, b) -> None: ...", + runtime="def k5(a, *, b, c, **kwargs): pass", + error="k5", + ) + yield Case( + stub="def k6(a, *, b, **kwargs) -> None: ...", + runtime="def k6(a, *, b, c, **kwargs): pass", + error="k6", + ) + + @collect_cases + def test_overload(self) -> Iterator[Case]: + yield Case( + stub=""" + from typing import overload + + @overload + def f1(a: int, *, c: int = ...) -> int: ... + @overload + def f1(a: int, b: int, c: int = ...) -> str: ... + """, + runtime="def f1(a, b = 0, c = 0): pass", + error=None, + ) + yield Case( + stub=""" + @overload + def f2(a: int, *, c: int = ...) -> int: ... + @overload + def f2(a: int, b: int, c: int = ...) -> str: ... + """, + runtime="def f2(a, b, c = 0): pass", + error="f2", + ) + yield Case( + stub=""" + @overload + def f3(a: int) -> int: ... + @overload + def f3(a: int, b: str) -> str: ... + """, + runtime="def f3(a, b = None): pass", + error="f3", + ) + yield Case( + stub=""" + @overload + def f4(a: int, *args, b: int, **kwargs) -> int: ... + @overload + def f4(a: str, *args, b: int, **kwargs) -> str: ... + """, + runtime="def f4(a, *args, b, **kwargs): pass", + error=None, + ) + yield Case( + stub=""" + @overload + def f5(__a: int) -> int: ... + @overload + def f5(__b: str) -> str: ... + """, + runtime="def f5(x, /): pass", + error=None, + ) + yield Case( + stub=""" + from typing import final + from typing_extensions import deprecated + class Foo: + @overload + @final + def f6(self, __a: int) -> int: ... + @overload + @deprecated("evil") + def f6(self, __b: str) -> str: ... + """, + runtime=""" + class Foo: + def f6(self, x, /): pass + """, + error=None, + ) + yield Case( + stub=""" + @overload + def f7(a: int, /) -> int: ... + @overload + def f7(b: str, /) -> str: ... + """, + runtime="def f7(x, /): pass", + error=None, + ) + yield Case( + stub=""" + @overload + def f8(a: int, c: int = 0, /) -> int: ... + @overload + def f8(b: str, d: int, /) -> str: ... + """, + runtime="def f8(x, y, /): pass", + error="f8", + ) + yield Case( + stub=""" + @overload + def f9(a: int, c: int = 0, /) -> int: ... + @overload + def f9(b: str, d: int, /) -> str: ... + """, + runtime="def f9(x, y=0, /): pass", + error=None, + ) + yield Case( + stub=""" + class Bar: + @overload + def f1(self) -> int: ... + @overload + def f1(self, a: int, /) -> int: ... + + @overload + def f2(self, a: int, /) -> int: ... + @overload + def f2(self, a: str, /) -> int: ... + """, + runtime=""" + class Bar: + def f1(self, *a) -> int: ... + def f2(self, *a) -> int: ... + """, + error=None, + ) + yield Case( + stub=""" + @overload + def f(a: int) -> int: ... + @overload + def f(a: int, b: str, /) -> str: ... + """, + runtime=""" + def f(a, *args): ... + """, + error=None, + ) + + @collect_cases + def test_property(self) -> Iterator[Case]: + yield Case( + stub=""" + class Good: + @property + def read_only_attr(self) -> int: ... + read_only_attr_alias = read_only_attr + """, + runtime=""" + class Good: + @property + def read_only_attr(self): return 1 + read_only_attr_alias = read_only_attr + """, + error=None, + ) + yield Case( + stub=""" + class Bad: + @property + def f(self) -> int: ... + """, + runtime=""" + class Bad: + def f(self) -> int: return 1 + """, + error="Bad.f", + ) + yield Case( + stub=""" + class GoodReadOnly: + @property + def f(self) -> int: ... + """, + runtime=""" + class GoodReadOnly: + f = 1 + """, + error=None, + ) + yield Case( + stub=""" + class BadReadOnly: + @property + def f(self) -> str: ... + """, + runtime=""" + class BadReadOnly: + f = 1 + """, + error="BadReadOnly.f", + ) + yield Case( + stub=""" + class Y: + @property + def read_only_attr(self) -> int: ... + @read_only_attr.setter + def read_only_attr(self, val: int) -> None: ... + """, + runtime=""" + class Y: + @property + def read_only_attr(self): return 5 + """, + error="Y.read_only_attr", + ) + yield Case( + stub=""" + class Z: + @property + def read_write_attr(self) -> int: ... + @read_write_attr.setter + def read_write_attr(self, val: int) -> None: ... + read_write_attr_alias = read_write_attr + """, + runtime=""" + class Z: + @property + def read_write_attr(self): return self._val + @read_write_attr.setter + def read_write_attr(self, val): self._val = val + read_write_attr_alias = read_write_attr + """, + error=None, + ) + yield Case( + stub=""" + class FineAndDandy: + @property + def attr(self) -> int: ... + """, + runtime=""" + class _EvilDescriptor: + def __get__(self, instance, ownerclass=None): + if instance is None: + raise AttributeError('no') + return 42 + def __set__(self, instance, value): + raise AttributeError('no') + + class FineAndDandy: + attr = _EvilDescriptor() + """, + error=None, + ) + + @collect_cases + def test_cached_property(self) -> Iterator[Case]: + yield Case( + stub=""" + from functools import cached_property + class Good: + @cached_property + def read_only_attr(self) -> int: ... + @cached_property + def read_only_attr2(self) -> int: ... + """, + runtime=""" + import functools as ft + from functools import cached_property + class Good: + @cached_property + def read_only_attr(self): return 1 + @ft.cached_property + def read_only_attr2(self): return 1 + """, + error=None, + ) + yield Case( + stub=""" + from functools import cached_property + class Bad: + @cached_property + def f(self) -> int: ... + """, + runtime=""" + class Bad: + def f(self) -> int: return 1 + """, + error="Bad.f", + ) + yield Case( + stub=""" + from functools import cached_property + class GoodCachedAttr: + @cached_property + def f(self) -> int: ... + """, + runtime=""" + class GoodCachedAttr: + f = 1 + """, + error=None, + ) + yield Case( + stub=""" + from functools import cached_property + class BadCachedAttr: + @cached_property + def f(self) -> str: ... + """, + runtime=""" + class BadCachedAttr: + f = 1 + """, + error="BadCachedAttr.f", + ) + yield Case( + stub=""" + from functools import cached_property + from typing import final + class FinalGood: + @cached_property + @final + def attr(self) -> int: ... + """, + runtime=""" + from functools import cached_property + from typing import final + class FinalGood: + @cached_property + @final + def attr(self): + return 1 + """, + error=None, + ) + yield Case( + stub=""" + from functools import cached_property + class FinalBad: + @cached_property + def attr(self) -> int: ... + """, + runtime=""" + from functools import cached_property + from typing_extensions import final + class FinalBad: + @cached_property + @final + def attr(self): + return 1 + """, + error="FinalBad.attr", + ) + + @collect_cases + def test_var(self) -> Iterator[Case]: + yield Case(stub="x1: int", runtime="x1 = 5", error=None) + yield Case(stub="x2: str", runtime="x2 = 5", error="x2") + yield Case("from typing import Tuple", "", None) # dummy case + yield Case( + stub=""" + x3: Tuple[int, int] + """, + runtime="x3 = (1, 3)", + error=None, + ) + yield Case( + stub=""" + x4: Tuple[int, int] + """, + runtime="x4 = (1, 3, 5)", + error="x4", + ) + yield Case(stub="x5: int", runtime="def x5(a, b): pass", error="x5") + yield Case( + stub="def foo(a: int, b: int) -> None: ...\nx6 = foo", + runtime="def foo(a, b): pass\ndef x6(c, d): pass", + error="x6", + ) + yield Case( + stub=""" + class X: + f: int + """, + runtime=""" + class X: + def __init__(self): + self.f = "asdf" + """, + error=None, + ) + yield Case( + stub=""" + class Y: + read_only_attr: int + """, + runtime=""" + class Y: + @property + def read_only_attr(self): return 5 + """, + error="Y.read_only_attr", + ) + yield Case( + stub=""" + class Z: + read_write_attr: int + """, + runtime=""" + class Z: + @property + def read_write_attr(self): return self._val + @read_write_attr.setter + def read_write_attr(self, val): self._val = val + """, + error=None, + ) + + @collect_cases + def test_type_alias(self) -> Iterator[Case]: + yield Case( + stub=""" + import collections.abc + import re + import typing + from typing import Callable, Dict, Generic, Iterable, List, Match, Tuple, TypeVar, Union + """, + runtime=""" + import collections.abc + import re + from typing import Callable, Dict, Generic, Iterable, List, Match, Tuple, TypeVar, Union + """, + error=None, + ) + yield Case( + stub=""" + class X: + def f(self) -> None: ... + Y = X + """, + runtime=""" + class X: + def f(self) -> None: ... + class Y: ... + """, + error="Y.f", + ) + yield Case(stub="A = Tuple[int, str]", runtime="A = (int, str)", error="A") + # Error if an alias isn't present at runtime... + yield Case(stub="B = str", runtime="", error="B") + # ... but only if the alias isn't private + yield Case(stub="_C = int", runtime="", error=None) + yield Case( + stub=""" + D = tuple[str, str] + E = Tuple[int, int, int] + F = Tuple[str, int] + """, + runtime=""" + D = Tuple[str, str] + E = Tuple[int, int, int] + F = List[str] + """, + error="F", + ) + yield Case( + stub=""" + G = str | int + H = Union[str, bool] + I = str | int + """, + runtime=""" + G = Union[str, int] + H = Union[str, bool] + I = str + """, + error="I", + ) + yield Case( + stub=""" + K = dict[str, str] + L = Dict[int, int] + KK = collections.abc.Iterable[str] + LL = typing.Iterable[str] + """, + runtime=""" + K = Dict[str, str] + L = Dict[int, int] + KK = Iterable[str] + LL = Iterable[str] + """, + error=None, + ) + yield Case( + stub=""" + _T = TypeVar("_T") + class _Spam(Generic[_T]): + def foo(self) -> None: ... + IntFood = _Spam[int] + """, + runtime=""" + _T = TypeVar("_T") + class _Bacon(Generic[_T]): + def foo(self, arg): pass + IntFood = _Bacon[int] + """, + error="IntFood.foo", + ) + yield Case(stub="StrList = list[str]", runtime="StrList = ['foo', 'bar']", error="StrList") + yield Case( + stub=""" + N = typing.Callable[[str], bool] + O = collections.abc.Callable[[int], str] + P = typing.Callable[[str], bool] + """, + runtime=""" + N = Callable[[str], bool] + O = Callable[[int], str] + P = int + """, + error="P", + ) + yield Case( + stub=""" + class Foo: + class Bar: ... + BarAlias = Foo.Bar + """, + runtime=""" + class Foo: + class Bar: pass + BarAlias = Foo.Bar + """, + error=None, + ) + yield Case( + stub=""" + from io import StringIO + StringIOAlias = StringIO + """, + runtime=""" + from _io import StringIO + StringIOAlias = StringIO + """, + error=None, + ) + yield Case(stub="M = Match[str]", runtime="M = Match[str]", error=None) + yield Case( + stub=""" + class Baz: + def fizz(self) -> None: ... + BazAlias = Baz + """, + runtime=""" + class Baz: + def fizz(self): pass + BazAlias = Baz + Baz.__name__ = Baz.__qualname__ = Baz.__module__ = "New" + """, + error=None, + ) + yield Case( + stub=""" + class FooBar: + __module__: None # type: ignore + def fizz(self) -> None: ... + FooBarAlias = FooBar + """, + runtime=""" + class FooBar: + def fizz(self): pass + FooBarAlias = FooBar + FooBar.__module__ = None + """, + error=None, + ) + if sys.version_info >= (3, 10): + yield Case( + stub=""" + Q = Dict[str, str] + R = dict[int, int] + S = Tuple[int, int] + T = tuple[str, str] + U = int | str + V = Union[int, str] + W = typing.Callable[[str], bool] + Z = collections.abc.Callable[[str], bool] + QQ = typing.Iterable[str] + RR = collections.abc.Iterable[str] + MM = typing.Match[str] + MMM = re.Match[str] + """, + runtime=""" + Q = dict[str, str] + R = dict[int, int] + S = tuple[int, int] + T = tuple[str, str] + U = int | str + V = int | str + W = collections.abc.Callable[[str], bool] + Z = collections.abc.Callable[[str], bool] + QQ = collections.abc.Iterable[str] + RR = collections.abc.Iterable[str] + MM = re.Match[str] + MMM = re.Match[str] + """, + error=None, + ) + + @collect_cases + def test_enum(self) -> Iterator[Case]: + yield Case(stub="import enum", runtime="import enum", error=None) + yield Case( + stub=""" + class X(enum.Enum): + a = ... + b = "asdf" + c = "oops" + """, + runtime=""" + class X(enum.Enum): + a = 1 + b = "asdf" + c = 2 + """, + error="X.c", + ) + yield Case( + stub=""" + class Flags1(enum.Flag): + a = ... + b = 2 + def foo(x: Flags1 = ...) -> None: ... + """, + runtime=""" + class Flags1(enum.Flag): + a = 1 + b = 2 + def foo(x=Flags1.a|Flags1.b): pass + """, + error=None, + ) + yield Case( + stub=""" + class Flags2(enum.Flag): + a = ... + b = 2 + def bar(x: Flags2 | None = None) -> None: ... + """, + runtime=""" + class Flags2(enum.Flag): + a = 1 + b = 2 + def bar(x=Flags2.a|Flags2.b): pass + """, + error="bar", + ) + yield Case( + stub=""" + class Flags3(enum.Flag): + a = ... + b = 2 + def baz(x: Flags3 | None = ...) -> None: ... + """, + runtime=""" + class Flags3(enum.Flag): + a = 1 + b = 2 + def baz(x=Flags3(0)): pass + """, + error=None, + ) + yield Case( + runtime=""" + import enum + class SomeObject: ... + + class WeirdEnum(enum.Enum): + a = SomeObject() + b = SomeObject() + """, + stub=""" + import enum + class SomeObject: ... + class WeirdEnum(enum.Enum): + _value_: SomeObject + a = ... + b = ... + """, + error=None, + ) + yield Case( + stub=""" + class Flags4(enum.Flag): + a = 1 + b = 2 + def spam(x: Flags4 | None = None) -> None: ... + """, + runtime=""" + class Flags4(enum.Flag): + a = 1 + b = 2 + def spam(x=Flags4(0)): pass + """, + error="spam", + ) + yield Case( + stub=""" + import sys + from typing import Final, Literal + class BytesEnum(bytes, enum.Enum): + a = b'foo' + + FOO: Literal[BytesEnum.a] + BAR: Final = BytesEnum.a + BAZ: BytesEnum + EGGS: bytes + """, + runtime=""" + class BytesEnum(bytes, enum.Enum): + a = b'foo' + FOO = BytesEnum.a + BAR = BytesEnum.a + BAZ = BytesEnum.a + EGGS = BytesEnum.a + """, + error=None, + ) + yield Case( + stub=""" + class HasSlotsAndNothingElse: + __slots__ = ("x",) + x: int + + class HasInheritedSlots(HasSlotsAndNothingElse): + pass + + class HasEmptySlots: + __slots__ = () + """, + runtime=""" + class HasSlotsAndNothingElse: + __slots__ = ("x",) + x: int + + class HasInheritedSlots(HasSlotsAndNothingElse): + pass + + class HasEmptySlots: + __slots__ = () + """, + error=None, + ) + yield Case( + stub=""" + class HasCompatibleValue(enum.Enum): + _value_: str + FOO = ... + """, + runtime=""" + class HasCompatibleValue(enum.Enum): + FOO = "foo" + """, + error=None, + ) + yield Case( + stub=""" + class HasIncompatibleValue(enum.Enum): + _value_: int + FOO = ... + """, + runtime=""" + class HasIncompatibleValue(enum.Enum): + FOO = "foo" + """, + error="HasIncompatibleValue.FOO", + ) + + @collect_cases + def test_decorator(self) -> Iterator[Case]: + yield Case( + stub=""" + from typing import Any, Callable + def decorator(f: Callable[[], int]) -> Callable[..., Any]: ... + @decorator + def f() -> Any: ... + """, + runtime=""" + def decorator(f): return f + @decorator + def f(): return 3 + """, + error=None, + ) + + @collect_cases + def test_all_at_runtime_not_stub(self) -> Iterator[Case]: + yield Case( + stub="Z: int", + runtime=""" + __all__ = [] + Z = 5""", + error="__all__", + ) + + @collect_cases + def test_all_in_stub_not_at_runtime(self) -> Iterator[Case]: + yield Case(stub="__all__ = ()", runtime="", error="__all__") + + @collect_cases + def test_all_in_stub_different_to_all_at_runtime(self) -> Iterator[Case]: + # We *should* emit an error with the module name itself + __all__, + # if the stub *does* define __all__, + # but the stub's __all__ is inconsistent with the runtime's __all__ + yield Case( + stub=""" + __all__ = ['foo'] + foo: str + """, + runtime=""" + __all__ = [] + foo = 'foo' + """, + error="__all__", + ) + + @collect_cases + def test_missing(self) -> Iterator[Case]: + yield Case(stub="x = 5", runtime="", error="x") + yield Case(stub="def f(): ...", runtime="", error="f") + yield Case(stub="class X: ...", runtime="", error="X") + yield Case( + stub=""" + from typing import overload + @overload + def h(x: int): ... + @overload + def h(x: str): ... + """, + runtime="", + error="h", + ) + yield Case(stub="", runtime="__all__ = []", error="__all__") # dummy case + yield Case(stub="", runtime="__all__ += ['y']\ny = 5", error="y") + yield Case(stub="", runtime="__all__ += ['g']\ndef g(): pass", error="g") + # Here we should only check that runtime has B, since the stub explicitly re-exports it + yield Case( + stub="from mystery import A, B as B, C as D # type: ignore", runtime="", error="B" + ) + yield Case( + stub="class Y: ...", + runtime="__all__ += ['Y']\nclass Y:\n def __or__(self, other): return self|other", + error="Y.__or__", + ) + yield Case( + stub="class Z: ...", + runtime="__all__ += ['Z']\nclass Z:\n def __reduce__(self): return (Z,)", + error=None, + ) + # __call__ exists on type, so it appears to exist on the class. + # This checks that we identify it as missing at runtime anyway. + yield Case( + stub=""" + class ClassWithMetaclassOverride: + def __call__(*args, **kwds): ... + """, + runtime="class ClassWithMetaclassOverride: ...", + error="ClassWithMetaclassOverride.__call__", + ) + # Test that we ignore object.__setattr__ and object.__delattr__ inheritance + yield Case( + stub=""" + from typing import Any + class FakeSetattrClass: + def __setattr__(self, name: str, value: Any, /) -> None: ... + """, + runtime="class FakeSetattrClass: ...", + error="FakeSetattrClass.__setattr__", + ) + yield Case( + stub=""" + class FakeDelattrClass: + def __delattr__(self, name: str, /) -> None: ... + """, + runtime="class FakeDelattrClass: ...", + error="FakeDelattrClass.__delattr__", + ) + + @collect_cases + def test_missing_no_runtime_all(self) -> Iterator[Case]: + yield Case(stub="", runtime="import sys", error=None) + yield Case(stub="", runtime="def g(): ...", error="g") + yield Case(stub="", runtime="CONSTANT = 0", error="CONSTANT") + yield Case(stub="", runtime="import re; constant = re.compile('foo')", error="constant") + yield Case(stub="", runtime="from json.scanner import NUMBER_RE", error=None) + yield Case(stub="", runtime="from string import ascii_letters", error=None) + + @collect_cases + def test_missing_no_runtime_all_terrible(self) -> Iterator[Case]: + yield Case( + stub="", + runtime=""" +import sys +import types +import __future__ +_m = types.SimpleNamespace() +_m.annotations = __future__.annotations +sys.modules["_terrible_stubtest_test_module"] = _m + +from _terrible_stubtest_test_module import * +assert annotations +""", + error=None, + ) + + @collect_cases + def test_non_public_1(self) -> Iterator[Case]: + yield Case( + stub="__all__: list[str]", runtime="", error=f"{TEST_MODULE_NAME}.__all__" + ) # dummy case + yield Case(stub="_f: int", runtime="def _f(): ...", error="_f") + + @collect_cases + def test_non_public_2(self) -> Iterator[Case]: + yield Case(stub="__all__: list[str] = ['f']", runtime="__all__ = ['f']", error=None) + yield Case(stub="f: int", runtime="def f(): ...", error="f") + yield Case(stub="g: int", runtime="def g(): ...", error="g") + + @collect_cases + def test_dunders(self) -> Iterator[Case]: + yield Case( + stub="class A:\n def __init__(self, a: int, b: int) -> None: ...", + runtime="class A:\n def __init__(self, a, bx): pass", + error="A.__init__", + ) + yield Case( + stub="class B:\n def __call__(self, c: int, d: int) -> None: ...", + runtime="class B:\n def __call__(self, c, dx): pass", + error="B.__call__", + ) + yield Case( + stub=( + "class C:\n" + " def __init_subclass__(\n" + " cls, e: int = ..., **kwargs: int\n" + " ) -> None: ...\n" + ), + runtime="class C:\n def __init_subclass__(cls, e=1, **kwargs): pass", + error=None, + ) + yield Case( + stub="class D:\n def __class_getitem__(cls, type: type) -> type: ...", + runtime="class D:\n def __class_getitem__(cls, type): ...", + error=None, + ) + + @collect_cases + def test_not_subclassable(self) -> Iterator[Case]: + yield Case( + stub="class CanBeSubclassed: ...", runtime="class CanBeSubclassed: ...", error=None + ) + yield Case( + stub="class CannotBeSubclassed:\n def __init_subclass__(cls) -> None: ...", + runtime="class CannotBeSubclassed:\n def __init_subclass__(cls): raise TypeError", + error="CannotBeSubclassed", + ) + + @collect_cases + def test_disjoint_base(self) -> Iterator[Case]: + yield Case( + stub=""" + class A: pass + """, + runtime=""" + class A: pass + """, + error=None, + ) + yield Case( + stub=""" + from typing_extensions import disjoint_base + + @disjoint_base + class B: pass + """, + runtime=""" + class B: pass + """, + error="test_module.B", + ) + yield Case( + stub=""" + from typing_extensions import Self + + class mytakewhile: + def __new__(cls, predicate: object, iterable: object, /) -> Self: ... + def __iter__(self) -> Self: ... + def __next__(self) -> object: ... + """, + runtime=""" + from itertools import takewhile as mytakewhile + """, + # Should have @disjoint_base + error="test_module.mytakewhile", + ) + yield Case( + stub=""" + from typing_extensions import disjoint_base, Self + + @disjoint_base + class mycorrecttakewhile: + def __new__(cls, predicate: object, iterable: object, /) -> Self: ... + def __iter__(self) -> Self: ... + def __next__(self) -> object: ... + """, + runtime=""" + from itertools import takewhile as mycorrecttakewhile + """, + error=None, + ) + yield Case( + runtime=""" + class IsDisjointBaseBecauseItHasSlots: + __slots__ = ("a",) + a: int + """, + stub=""" + from typing_extensions import disjoint_base + + @disjoint_base + class IsDisjointBaseBecauseItHasSlots: + a: int + """, + error="test_module.IsDisjointBaseBecauseItHasSlots", + ) + yield Case( + runtime=""" + class IsFinalSoDisjointBaseIsRedundant: ... + """, + stub=""" + from typing_extensions import disjoint_base, final + + @final + @disjoint_base + class IsFinalSoDisjointBaseIsRedundant: ... + """, + error="test_module.IsFinalSoDisjointBaseIsRedundant", + ) + yield Case( + runtime=""" + import enum + + class IsEnumWithMembersSoDisjointBaseIsRedundant(enum.Enum): + A = 1 + B = 2 + """, + stub=""" + from typing_extensions import disjoint_base + import enum + + @disjoint_base + class IsEnumWithMembersSoDisjointBaseIsRedundant(enum.Enum): + A = 1 + B = 2 + """, + error="test_module.IsEnumWithMembersSoDisjointBaseIsRedundant", + ) + + @collect_cases + def test_has_runtime_final_decorator(self) -> Iterator[Case]: + yield Case( + stub="from typing_extensions import final", + runtime=""" + import functools + from typing_extensions import final + """, + error=None, + ) + yield Case( + stub=""" + @final + class A: ... + """, + runtime=""" + @final + class A: ... + """, + error=None, + ) + yield Case( # Runtime can miss `@final` decorator + stub=""" + @final + class B: ... + """, + runtime=""" + class B: ... + """, + error=None, + ) + yield Case( # Stub cannot miss `@final` decorator + stub=""" + class C: ... + """, + runtime=""" + @final + class C: ... + """, + error="C", + ) + yield Case( + stub=""" + class D: + @final + def foo(self) -> None: ... + @final + @staticmethod + def bar() -> None: ... + @staticmethod + @final + def bar2() -> None: ... + @final + @classmethod + def baz(cls) -> None: ... + @classmethod + @final + def baz2(cls) -> None: ... + @property + @final + def eggs(self) -> int: ... + @final + @property + def eggs2(self) -> int: ... + @final + def ham(self, obj: int) -> int: ... + """, + runtime=""" + class D: + @final + def foo(self): pass + @final + @staticmethod + def bar(): pass + @staticmethod + @final + def bar2(): pass + @final + @classmethod + def baz(cls): pass + @classmethod + @final + def baz2(cls): pass + @property + @final + def eggs(self): return 42 + @final + @property + def eggs2(self): pass + @final + @functools.lru_cache() + def ham(self, obj): return obj * 2 + """, + error=None, + ) + # Stub methods are allowed to have @final even if the runtime doesn't... + yield Case( + stub=""" + class E: + @final + def foo(self) -> None: ... + @final + @staticmethod + def bar() -> None: ... + @staticmethod + @final + def bar2() -> None: ... + @final + @classmethod + def baz(cls) -> None: ... + @classmethod + @final + def baz2(cls) -> None: ... + @property + @final + def eggs(self) -> int: ... + @final + @property + def eggs2(self) -> int: ... + @final + def ham(self, obj: int) -> int: ... + """, + runtime=""" + class E: + def foo(self): pass + @staticmethod + def bar(): pass + @staticmethod + def bar2(): pass + @classmethod + def baz(cls): pass + @classmethod + def baz2(cls): pass + @property + def eggs(self): return 42 + @property + def eggs2(self): return 42 + @functools.lru_cache() + def ham(self, obj): return obj * 2 + """, + error=None, + ) + # ...But if the runtime has @final, the stub must have it as well + yield Case( + stub=""" + class F: + def foo(self) -> None: ... + """, + runtime=""" + class F: + @final + def foo(self): pass + """, + error="F.foo", + ) + yield Case( + stub=""" + class G: + @staticmethod + def foo() -> None: ... + """, + runtime=""" + class G: + @final + @staticmethod + def foo(): pass + """, + error="G.foo", + ) + yield Case( + stub=""" + class H: + @staticmethod + def foo() -> None: ... + """, + runtime=""" + class H: + @staticmethod + @final + def foo(): pass + """, + error="H.foo", + ) + yield Case( + stub=""" + class I: + @classmethod + def foo(cls) -> None: ... + """, + runtime=""" + class I: + @final + @classmethod + def foo(cls): pass + """, + error="I.foo", + ) + yield Case( + stub=""" + class J: + @classmethod + def foo(cls) -> None: ... + """, + runtime=""" + class J: + @classmethod + @final + def foo(cls): pass + """, + error="J.foo", + ) + yield Case( + stub=""" + class K: + @property + def foo(self) -> int: ... + """, + runtime=""" + class K: + @property + @final + def foo(self): return 42 + """, + error="K.foo", + ) + # This test wouldn't pass, + # because the runtime can't set __final__ on instances of builtins.property, + # so stubtest has non way of knowing that the runtime was decorated with @final: + # + # yield Case( + # stub=""" + # class K2: + # @property + # def foo(self) -> int: ... + # """, + # runtime=""" + # class K2: + # @final + # @property + # def foo(self): return 42 + # """, + # error="K2.foo", + # ) + yield Case( + stub=""" + class L: + def foo(self, obj: int) -> int: ... + """, + runtime=""" + class L: + @final + @functools.lru_cache() + def foo(self, obj): return obj * 2 + """, + error="L.foo", + ) + + @collect_cases + def test_name_mangling(self) -> Iterator[Case]: + yield Case( + stub=""" + class X: + def __mangle_good(self, text: str) -> None: ... + def __mangle_bad(self, number: int) -> None: ... + """, + runtime=""" + class X: + def __mangle_good(self, text): pass + def __mangle_bad(self, text): pass + """, + error="X.__mangle_bad", + ) + yield Case( + stub=""" + class Klass: + class __Mangled1: + class __Mangled2: + def __mangle_good(self, text: str) -> None: ... + def __mangle_bad(self, number: int) -> None: ... + """, + runtime=""" + class Klass: + class __Mangled1: + class __Mangled2: + def __mangle_good(self, text): pass + def __mangle_bad(self, text): pass + """, + error="Klass.__Mangled1.__Mangled2.__mangle_bad", + ) + yield Case( + stub=""" + class __Dunder__: + def __mangle_good(self, text: str) -> None: ... + def __mangle_bad(self, number: int) -> None: ... + """, + runtime=""" + class __Dunder__: + def __mangle_good(self, text): pass + def __mangle_bad(self, text): pass + """, + error="__Dunder__.__mangle_bad", + ) + yield Case( + stub=""" + class _Private: + def __mangle_good(self, text: str) -> None: ... + def __mangle_bad(self, number: int) -> None: ... + """, + runtime=""" + class _Private: + def __mangle_good(self, text): pass + def __mangle_bad(self, text): pass + """, + error="_Private.__mangle_bad", + ) + + @collect_cases + def test_mro(self) -> Iterator[Case]: + yield Case( + stub=""" + class A: + def foo(self, x: int) -> None: ... + class B(A): + pass + class C(A): + pass + """, + runtime=""" + class A: + def foo(self, x: int) -> None: ... + class B(A): + def foo(self, x: int) -> None: ... + class C(A): + def foo(self, y: int) -> None: ... + """, + error="C.foo", + ) + yield Case( + stub=""" + class X: ... + """, + runtime=""" + class X: + def __init__(self, x): pass + """, + error="X.__init__", + ) + + @collect_cases + def test_good_literal(self) -> Iterator[Case]: + yield Case( + stub=r""" + from typing import Literal + + import enum + class Color(enum.Enum): + RED = ... + + NUM: Literal[1] + CHAR: Literal['a'] + FLAG: Literal[True] + NON: Literal[None] + BYT1: Literal[b'abc'] + BYT2: Literal[b'\x90'] + ENUM: Literal[Color.RED] + """, + runtime=r""" + import enum + class Color(enum.Enum): + RED = 3 + + NUM = 1 + CHAR = 'a' + NON = None + FLAG = True + BYT1 = b"abc" + BYT2 = b'\x90' + ENUM = Color.RED + """, + error=None, + ) + + @collect_cases + def test_bad_literal(self) -> Iterator[Case]: + yield Case("from typing import Literal", "", None) # dummy case + yield Case( + stub="INT_FLOAT_MISMATCH: Literal[1]", + runtime="INT_FLOAT_MISMATCH = 1.0", + error="INT_FLOAT_MISMATCH", + ) + yield Case(stub="WRONG_INT: Literal[1]", runtime="WRONG_INT = 2", error="WRONG_INT") + yield Case(stub="WRONG_STR: Literal['a']", runtime="WRONG_STR = 'b'", error="WRONG_STR") + yield Case( + stub="BYTES_STR_MISMATCH: Literal[b'value']", + runtime="BYTES_STR_MISMATCH = 'value'", + error="BYTES_STR_MISMATCH", + ) + yield Case( + stub="STR_BYTES_MISMATCH: Literal['value']", + runtime="STR_BYTES_MISMATCH = b'value'", + error="STR_BYTES_MISMATCH", + ) + yield Case( + stub="WRONG_BYTES: Literal[b'abc']", + runtime="WRONG_BYTES = b'xyz'", + error="WRONG_BYTES", + ) + yield Case( + stub="WRONG_BOOL_1: Literal[True]", + runtime="WRONG_BOOL_1 = False", + error="WRONG_BOOL_1", + ) + yield Case( + stub="WRONG_BOOL_2: Literal[False]", + runtime="WRONG_BOOL_2 = True", + error="WRONG_BOOL_2", + ) + + @collect_cases + def test_special_subtype(self) -> Iterator[Case]: + yield Case( + stub=""" + b1: bool + b2: bool + b3: bool + """, + runtime=""" + b1 = 0 + b2 = 1 + b3 = 2 + """, + error="b3", + ) + yield Case( + stub=""" + from typing import TypedDict + + class _Options(TypedDict): + a: str + b: int + + opt1: _Options + opt2: _Options + opt3: _Options + """, + runtime=""" + opt1 = {"a": "3.", "b": 14} + opt2 = {"some": "stuff"} # false negative + opt3 = 0 + """, + error="opt3", + ) + + @collect_cases + def test_runtime_typing_objects(self) -> Iterator[Case]: + yield Case( + stub="from typing import Protocol, TypedDict", + runtime="from typing import Protocol, TypedDict", + error=None, + ) + yield Case( + stub=""" + class X(Protocol): + bar: int + def foo(self, x: int, y: bytes = ...) -> str: ... + """, + runtime=""" + class X(Protocol): + bar: int + def foo(self, x: int, y: bytes = ...) -> str: ... + """, + error=None, + ) + yield Case( + stub=""" + class Y(TypedDict): + a: int + """, + runtime=""" + class Y(TypedDict): + a: int + """, + error=None, + ) + + @collect_cases + def test_named_tuple(self) -> Iterator[Case]: + yield Case( + stub="from typing import NamedTuple", + runtime="from typing import NamedTuple", + error=None, + ) + yield Case( + stub=""" + class X1(NamedTuple): + bar: int + foo: str = ... + """, + runtime=""" + class X1(NamedTuple): + bar: int + foo: str = 'a' + """, + error=None, + ) + yield Case( + stub=""" + class X2(NamedTuple): + bar: int + foo: str + """, + runtime=""" + class X2(NamedTuple): + bar: int + foo: str = 'a' + """, + # `__new__` will miss a default value for a `foo` parameter, + # but we don't generate special errors for `foo` missing `...` part. + error="X2.__new__", + ) + + @collect_cases + def test_named_tuple_typing_and_collections(self) -> Iterator[Case]: + yield Case( + stub="from typing import NamedTuple", + runtime="from collections import namedtuple", + error=None, + ) + yield Case( + stub=""" + class X1(NamedTuple): + bar: int + foo: str = ... + """, + runtime=""" + X1 = namedtuple('X1', ['bar', 'foo'], defaults=['a']) + """, + error=None, + ) + yield Case( + stub=""" + class X2(NamedTuple): + bar: int + foo: str + """, + runtime=""" + X2 = namedtuple('X1', ['bar', 'foo'], defaults=['a']) + """, + error="X2.__new__", + ) + + @collect_cases + def test_type_var(self) -> Iterator[Case]: + yield Case( + stub="from typing import TypeVar", runtime="from typing import TypeVar", error=None + ) + yield Case(stub="A = TypeVar('A')", runtime="A = TypeVar('A')", error=None) + yield Case(stub="B = TypeVar('B')", runtime="B = 5", error="B") + if sys.version_info >= (3, 10): + yield Case( + stub="from typing import ParamSpec", + runtime="from typing import ParamSpec", + error=None, + ) + yield Case(stub="C = ParamSpec('C')", runtime="C = ParamSpec('C')", error=None) + + @collect_cases + def test_metaclass_match(self) -> Iterator[Case]: + yield Case(stub="class Meta(type): ...", runtime="class Meta(type): ...", error=None) + yield Case(stub="class A0: ...", runtime="class A0: ...", error=None) + yield Case( + stub="class A1(metaclass=Meta): ...", + runtime="class A1(metaclass=Meta): ...", + error=None, + ) + yield Case(stub="class A2: ...", runtime="class A2(metaclass=Meta): ...", error="A2") + yield Case(stub="class A3(metaclass=Meta): ...", runtime="class A3: ...", error="A3") + + # Explicit `type` metaclass can always be added in any part: + yield Case( + stub="class T1(metaclass=type): ...", + runtime="class T1(metaclass=type): ...", + error=None, + ) + yield Case(stub="class T2: ...", runtime="class T2(metaclass=type): ...", error=None) + yield Case(stub="class T3(metaclass=type): ...", runtime="class T3: ...", error=None) + + # Explicit check that `_protected` names are also supported: + yield Case(stub="class _P1(type): ...", runtime="class _P1(type): ...", error=None) + yield Case(stub="class P2: ...", runtime="class P2(metaclass=_P1): ...", error="P2") + + # With inheritance: + yield Case( + stub=""" + class I1(metaclass=Meta): ... + class S1(I1): ... + """, + runtime=""" + class I1(metaclass=Meta): ... + class S1(I1): ... + """, + error=None, + ) + yield Case( + stub=""" + class I2(metaclass=Meta): ... + class S2: ... # missing inheritance + """, + runtime=""" + class I2(metaclass=Meta): ... + class S2(I2): ... + """, + error="S2", + ) + + @collect_cases + def test_metaclass_abcmeta(self) -> Iterator[Case]: + # Handling abstract metaclasses is special: + yield Case(stub="from abc import ABCMeta", runtime="from abc import ABCMeta", error=None) + yield Case( + stub="class A1(metaclass=ABCMeta): ...", + runtime="class A1(metaclass=ABCMeta): ...", + error=None, + ) + # Stubs cannot miss abstract metaclass: + yield Case(stub="class A2: ...", runtime="class A2(metaclass=ABCMeta): ...", error="A2") + # But, stubs can add extra abstract metaclass, this might be a typing hack: + yield Case(stub="class A3(metaclass=ABCMeta): ...", runtime="class A3: ...", error=None) + + @collect_cases + def test_abstract_methods(self) -> Iterator[Case]: + yield Case( + stub=""" + from abc import abstractmethod + from typing import overload + """, + runtime="from abc import abstractmethod", + error=None, + ) + yield Case( + stub=""" + class A1: + def some(self) -> None: ... + """, + runtime=""" + class A1: + @abstractmethod + def some(self) -> None: ... + """, + error="A1.some", + ) + yield Case( + stub=""" + class A2: + @abstractmethod + def some(self) -> None: ... + """, + runtime=""" + class A2: + @abstractmethod + def some(self) -> None: ... + """, + error=None, + ) + yield Case( + stub=""" + class A3: + @overload + def some(self, other: int) -> str: ... + @overload + def some(self, other: str) -> int: ... + """, + runtime=""" + class A3: + @abstractmethod + def some(self, other) -> None: ... + """, + error="A3.some", + ) + yield Case( + stub=""" + class A4: + @overload + @abstractmethod + def some(self, other: int) -> str: ... + @overload + @abstractmethod + def some(self, other: str) -> int: ... + """, + runtime=""" + class A4: + @abstractmethod + def some(self, other) -> None: ... + """, + error=None, + ) + yield Case( + stub=""" + class A5: + @abstractmethod + @overload + def some(self, other: int) -> str: ... + @abstractmethod + @overload + def some(self, other: str) -> int: ... + """, + runtime=""" + class A5: + @abstractmethod + def some(self, other) -> None: ... + """, + error=None, + ) + # Runtime can miss `@abstractmethod`: + yield Case( + stub=""" + class A6: + @abstractmethod + def some(self) -> None: ... + """, + runtime=""" + class A6: + def some(self) -> None: ... + """, + error=None, + ) + + @collect_cases + def test_abstract_properties(self) -> Iterator[Case]: + # TODO: test abstract properties with setters + yield Case( + stub="from abc import abstractmethod", + runtime="from abc import abstractmethod", + error=None, + ) + # Ensure that `@property` also can be abstract: + yield Case( + stub=""" + class AP1: + @property + def some(self) -> int: ... + """, + runtime=""" + class AP1: + @property + @abstractmethod + def some(self) -> int: ... + """, + error="AP1.some", + ) + yield Case( + stub=""" + class AP1_2: + def some(self) -> int: ... # missing `@property` decorator + """, + runtime=""" + class AP1_2: + @property + @abstractmethod + def some(self) -> int: ... + """, + error="AP1_2.some", + ) + yield Case( + stub=""" + class AP2: + @property + @abstractmethod + def some(self) -> int: ... + """, + runtime=""" + class AP2: + @property + @abstractmethod + def some(self) -> int: ... + """, + error=None, + ) + # Runtime can miss `@abstractmethod`: + yield Case( + stub=""" + class AP3: + @property + @abstractmethod + def some(self) -> int: ... + """, + runtime=""" + class AP3: + @property + def some(self) -> int: ... + """, + error=None, + ) + + @collect_cases + def test_type_check_only(self) -> Iterator[Case]: + yield Case( + stub="from typing import type_check_only, overload", + runtime="from typing import overload", + error=None, + ) + # You can have public types that are only defined in stubs + # with `@type_check_only`: + yield Case( + stub=""" + @type_check_only + class A1: ... + """, + runtime="", + error=None, + ) + # Having `@type_check_only` on a type that exists at runtime is an error + yield Case( + stub=""" + @type_check_only + class A2: ... + """, + runtime="class A2: ...", + error="A2", + ) + # The same is true for NamedTuples and TypedDicts: + yield Case( + stub="from typing import NamedTuple, TypedDict", + runtime="from typing import NamedTuple, TypedDict", + error=None, + ) + yield Case( + stub=""" + @type_check_only + class NT1(NamedTuple): ... + """, + runtime="class NT1(NamedTuple): ...", + error="NT1", + ) + yield Case( + stub=""" + @type_check_only + class TD1(TypedDict): ... + """, + runtime="class TD1(TypedDict): ...", + error="TD1", + ) + # The same is true for functions: + yield Case( + stub=""" + @type_check_only + def func1() -> None: ... + """, + runtime="", + error=None, + ) + yield Case( + stub=""" + @type_check_only + def func2() -> None: ... + """, + runtime="def func2() -> None: ...", + error="func2", + ) + # A type that exists at runtime is allowed to alias a type marked + # as '@type_check_only' in the stubs. + yield Case( + stub=""" + @type_check_only + class _X1: ... + X2 = _X1 + """, + runtime="class X2: ...", + error=None, + ) + + @collect_cases + def test_type_default_protocol(self) -> Iterator[Case]: + yield Case( + stub=""" + from typing import Protocol + + class _FormatterClass(Protocol): + def __call__(self, *, prog: str) -> HelpFormatter: ... + + class ArgumentParser: + def __init__(self, formatter_class: _FormatterClass = ...) -> None: ... + + class HelpFormatter: + def __init__(self, prog: str, indent_increment: int = 2) -> None: ... + """, + runtime=""" + class HelpFormatter: + def __init__(self, prog, indent_increment=2) -> None: ... + + class ArgumentParser: + def __init__(self, formatter_class=HelpFormatter): ... + """, + error=None, + ) + + +def remove_color_code(s: str) -> str: + return re.sub("\\x1b.*?m", "", s) # this works! + + +class StubtestMiscUnit(unittest.TestCase): + def test_output(self) -> None: + output = run_stubtest( + stub="def bad(number: int, text: str) -> None: ...", + runtime="def bad(num, text): pass", + options=[], + ) + expected = ( + f'error: {TEST_MODULE_NAME}.bad is inconsistent, stub parameter "number" differs ' + 'from runtime parameter "num"\n' + f"Stub: in file {TEST_MODULE_NAME}.pyi:1\n" + "def (number: builtins.int, text: builtins.str)\n" + f"Runtime: in file {TEST_MODULE_NAME}.py:1\ndef (num, text)\n\n" + "Found 1 error (checked 1 module)\n" + ) + assert output == expected + + output = run_stubtest( + stub="def bad(number: int, text: str) -> None: ...", + runtime="def bad(num, text): pass", + options=["--concise"], + ) + expected = ( + "{}.bad is inconsistent, " + 'stub parameter "number" differs from runtime parameter "num"\n'.format( + TEST_MODULE_NAME + ) + ) + assert output == expected + + def test_ignore_flags(self) -> None: + output = run_stubtest( + stub="", runtime="__all__ = ['f']\ndef f(): pass", options=["--ignore-missing-stub"] + ) + assert output == "Success: no issues found in 1 module\n" + + output = run_stubtest(stub="", runtime="def f(): pass", options=["--ignore-missing-stub"]) + assert output == "Success: no issues found in 1 module\n" + + output = run_stubtest( + stub="def f(__a): ...", runtime="def f(a): pass", options=["--ignore-positional-only"] + ) + assert output == "Success: no issues found in 1 module\n" + + def test_allowlist(self) -> None: + # Can't use this as a context because Windows + allowlist = tempfile.NamedTemporaryFile(mode="w+", delete=False) + try: + with allowlist: + allowlist.write(f"{TEST_MODULE_NAME}.bad # comment\n# comment") + + output = run_stubtest( + stub="def bad(number: int, text: str) -> None: ...", + runtime="def bad(asdf, text): pass", + options=["--allowlist", allowlist.name], + ) + assert output == "Success: no issues found in 1 module\n" + + # test unused entry detection + output = run_stubtest(stub="", runtime="", options=["--allowlist", allowlist.name]) + assert output == ( + f"note: unused allowlist entry {TEST_MODULE_NAME}.bad\n" + "Found 1 error (checked 1 module)\n" + ) + + output = run_stubtest( + stub="", + runtime="", + options=["--allowlist", allowlist.name, "--ignore-unused-allowlist"], + ) + assert output == "Success: no issues found in 1 module\n" + + # test regex matching + with open(allowlist.name, mode="w+") as f: + f.write(f"{TEST_MODULE_NAME}.b.*\n") + f.write("(unused_missing)?\n") + f.write("unused.*\n") + + output = run_stubtest( + stub=textwrap.dedent( + """ + def good() -> None: ... + def bad(number: int) -> None: ... + def also_bad(number: int) -> None: ... + """.lstrip( + "\n" + ) + ), + runtime=textwrap.dedent( + """ + def good(): pass + def bad(asdf): pass + def also_bad(asdf): pass + """.lstrip( + "\n" + ) + ), + options=["--allowlist", allowlist.name, "--generate-allowlist"], + ) + assert output == ( + f"note: unused allowlist entry unused.*\n{TEST_MODULE_NAME}.also_bad\n" + ) + finally: + os.unlink(allowlist.name) + + def test_mypy_build(self) -> None: + output = run_stubtest(stub="+", runtime="", options=[]) + assert output == ( + "error: not checking stubs due to failed mypy compile:\n{}.pyi:1: " + "error: Invalid syntax [syntax]\n".format(TEST_MODULE_NAME) + ) + + output = run_stubtest(stub="def f(): ...\ndef f(): ...", runtime="", options=[]) + assert output == ( + "error: not checking stubs due to mypy build errors:\n{}.pyi:2: " + 'error: Name "f" already defined on line 1 [no-redef]\n'.format(TEST_MODULE_NAME) + ) + + def test_missing_stubs(self) -> None: + output = io.StringIO() + with contextlib.redirect_stdout(output): + test_stubs(parse_options(["not_a_module"])) + assert remove_color_code(output.getvalue()) == ( + "error: not_a_module failed to find stubs\n" + "Stub:\nMISSING\nRuntime:\nN/A\n\n" + "Found 1 error (checked 1 module)\n" + ) + + def test_only_py(self) -> None: + # in this case, stubtest will check the py against itself + # this is useful to support packages with a mix of stubs and inline types + with use_tmp_dir(TEST_MODULE_NAME): + with open(f"{TEST_MODULE_NAME}.py", "w") as f: + f.write("a = 1") + output = io.StringIO() + with contextlib.redirect_stdout(output): + test_stubs(parse_options([TEST_MODULE_NAME])) + output_str = remove_color_code(output.getvalue()) + assert output_str == "Success: no issues found in 1 module\n" + + def test_get_typeshed_stdlib_modules(self) -> None: + stdlib = mypy.stubtest.get_typeshed_stdlib_modules(None, (3, 7)) + assert "builtins" in stdlib + assert "os" in stdlib + assert "os.path" in stdlib + assert "asyncio" in stdlib + assert "graphlib" not in stdlib + assert "formatter" in stdlib + assert "contextvars" in stdlib # 3.7+ + assert "importlib.metadata" not in stdlib + + stdlib = mypy.stubtest.get_typeshed_stdlib_modules(None, (3, 10)) + assert "graphlib" in stdlib + assert "formatter" not in stdlib + assert "importlib.metadata" in stdlib + + def test_signature(self) -> None: + def f(a: int, b: int, *, c: int, d: int = 0, **kwargs: Any) -> None: + pass + + assert ( + str(mypy.stubtest.Signature.from_inspect_signature(inspect.signature(f))) + == "def (a, b, *, c, d = ..., **kwargs)" + ) + + def test_builtin_signature_with_unrepresentable_default(self) -> None: + sig = mypy.stubtest.safe_inspect_signature(bytes.hex) + assert sig is not None + assert ( + str(mypy.stubtest.Signature.from_inspect_signature(sig)) + == "def (self, sep = ..., bytes_per_sep = ...)" + ) + + def test_overload_signature(self) -> None: + # The same argument as both positional-only and pos-or-kw in + # different overloads previously produced incorrect signatures + source = """ + from typing import overload + @overload + def myfunction(arg: int) -> None: ... + @overload + def myfunction(arg: str, /) -> None: ... + """ + result = build_helper(source) + stub = result.files["__main__"].names["myfunction"].node + assert isinstance(stub, nodes.OverloadedFuncDef) + sig = mypy.stubtest.Signature.from_overloadedfuncdef(stub) + if sys.version_info >= (3, 10): + assert str(sig) == "def (arg: builtins.int | builtins.str)" + else: + assert str(sig) == "def (arg: Union[builtins.int, builtins.str])" + + def test_config_file(self) -> None: + runtime = "temp = 5\n" + stub = "from decimal import Decimal\ntemp: Decimal\n" + config_file = f"[mypy]\nplugins={root_dir}/test-data/unit/plugins/decimal_to_int.py\n" + output = run_stubtest(stub=stub, runtime=runtime, options=[]) + assert output == ( + f"error: {TEST_MODULE_NAME}.temp variable differs from runtime type Literal[5]\n" + f"Stub: in file {TEST_MODULE_NAME}.pyi:2\n_decimal.Decimal\nRuntime:\n5\n\n" + "Found 1 error (checked 1 module)\n" + ) + output = run_stubtest(stub=stub, runtime=runtime, options=[], config_file=config_file) + assert output == "Success: no issues found in 1 module\n" + + def test_config_file_error_codes(self) -> None: + runtime = "temp = 5\n" + stub = "temp = SOME_GLOBAL_CONST" + output = run_stubtest(stub=stub, runtime=runtime, options=[]) + assert output == ( + "error: not checking stubs due to mypy build errors:\n" + 'test_module.pyi:1: error: Name "SOME_GLOBAL_CONST" is not defined [name-defined]\n' + ) + + config_file = "[mypy]\ndisable_error_code = name-defined\n" + output = run_stubtest(stub=stub, runtime=runtime, options=[], config_file=config_file) + assert output == "Success: no issues found in 1 module\n" + + def test_config_file_error_codes_invalid(self) -> None: + runtime = "temp = 5\n" + stub = "temp: int\n" + config_file = "[mypy]\ndisable_error_code = not-a-valid-name\n" + output = io.StringIO() + outerr = io.StringIO() + with raises(SystemExit): + run_stubtest_with_stderr( + stub=stub, + runtime=runtime, + options=[], + config_file=config_file, + output=output, + outerr=outerr, + ) + + assert output.getvalue() == "error: Invalid error code(s): not-a-valid-name\n" + assert outerr.getvalue() == "" + + def test_config_file_wrong_incomplete_feature(self) -> None: + runtime = "x = 1\n" + stub = "x: int\n" + config_file = "[mypy]\nenable_incomplete_feature = Unpack\n" + output = run_stubtest(stub=stub, runtime=runtime, options=[], config_file=config_file) + assert output == ( + "warning: Warning: Unpack is already enabled by default\n" + "Success: no issues found in 1 module\n" + ) + + config_file = "[mypy]\nenable_incomplete_feature = not-a-valid-name\n" + with self.assertRaises(SystemExit): + run_stubtest(stub=stub, runtime=runtime, options=[], config_file=config_file) + + def test_no_modules(self) -> None: + output = io.StringIO() + with contextlib.redirect_stdout(output): + test_stubs(parse_options([])) + assert remove_color_code(output.getvalue()) == "error: no modules to check\n" + + def test_module_and_typeshed(self) -> None: + output = io.StringIO() + with contextlib.redirect_stdout(output): + test_stubs(parse_options(["--check-typeshed", "some_module"])) + assert remove_color_code(output.getvalue()) == ( + "error: cannot pass both --check-typeshed and a list of modules\n" + ) diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testsubtypes.py b/.venv/lib/python3.12/site-packages/mypy/test/testsubtypes.py new file mode 100644 index 0000000..b75c22b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testsubtypes.py @@ -0,0 +1,306 @@ +from __future__ import annotations + +from mypy.nodes import CONTRAVARIANT, COVARIANT, INVARIANT +from mypy.subtypes import is_subtype +from mypy.test.helpers import Suite +from mypy.test.typefixture import InterfaceTypeFixture, TypeFixture +from mypy.types import Instance, TupleType, Type, UninhabitedType, UnpackType + + +class SubtypingSuite(Suite): + def setUp(self) -> None: + self.fx = TypeFixture(INVARIANT) + self.fx_contra = TypeFixture(CONTRAVARIANT) + self.fx_co = TypeFixture(COVARIANT) + + def test_trivial_cases(self) -> None: + for simple in self.fx_co.a, self.fx_co.o, self.fx_co.b: + self.assert_subtype(simple, simple) + + def test_instance_subtyping(self) -> None: + self.assert_strict_subtype(self.fx.a, self.fx.o) + self.assert_strict_subtype(self.fx.b, self.fx.o) + self.assert_strict_subtype(self.fx.b, self.fx.a) + + self.assert_not_subtype(self.fx.a, self.fx.d) + self.assert_not_subtype(self.fx.b, self.fx.c) + + def test_simple_generic_instance_subtyping_invariant(self) -> None: + self.assert_subtype(self.fx.ga, self.fx.ga) + self.assert_subtype(self.fx.hab, self.fx.hab) + + self.assert_not_subtype(self.fx.ga, self.fx.g2a) + self.assert_not_subtype(self.fx.ga, self.fx.gb) + self.assert_not_subtype(self.fx.gb, self.fx.ga) + + def test_simple_generic_instance_subtyping_covariant(self) -> None: + self.assert_subtype(self.fx_co.ga, self.fx_co.ga) + self.assert_subtype(self.fx_co.hab, self.fx_co.hab) + + self.assert_not_subtype(self.fx_co.ga, self.fx_co.g2a) + self.assert_not_subtype(self.fx_co.ga, self.fx_co.gb) + self.assert_subtype(self.fx_co.gb, self.fx_co.ga) + + def test_simple_generic_instance_subtyping_contravariant(self) -> None: + self.assert_subtype(self.fx_contra.ga, self.fx_contra.ga) + self.assert_subtype(self.fx_contra.hab, self.fx_contra.hab) + + self.assert_not_subtype(self.fx_contra.ga, self.fx_contra.g2a) + self.assert_subtype(self.fx_contra.ga, self.fx_contra.gb) + self.assert_not_subtype(self.fx_contra.gb, self.fx_contra.ga) + + def test_generic_subtyping_with_inheritance_invariant(self) -> None: + self.assert_subtype(self.fx.gsab, self.fx.gb) + self.assert_not_subtype(self.fx.gsab, self.fx.ga) + self.assert_not_subtype(self.fx.gsaa, self.fx.gb) + + def test_generic_subtyping_with_inheritance_covariant(self) -> None: + self.assert_subtype(self.fx_co.gsab, self.fx_co.gb) + self.assert_subtype(self.fx_co.gsab, self.fx_co.ga) + self.assert_not_subtype(self.fx_co.gsaa, self.fx_co.gb) + + def test_generic_subtyping_with_inheritance_contravariant(self) -> None: + self.assert_subtype(self.fx_contra.gsab, self.fx_contra.gb) + self.assert_not_subtype(self.fx_contra.gsab, self.fx_contra.ga) + self.assert_subtype(self.fx_contra.gsaa, self.fx_contra.gb) + + def test_interface_subtyping(self) -> None: + self.assert_subtype(self.fx.e, self.fx.f) + self.assert_equivalent(self.fx.f, self.fx.f) + self.assert_not_subtype(self.fx.a, self.fx.f) + + def test_generic_interface_subtyping(self) -> None: + # TODO make this work + fx2 = InterfaceTypeFixture() + + self.assert_subtype(fx2.m1, fx2.gfa) + self.assert_not_subtype(fx2.m1, fx2.gfb) + + self.assert_equivalent(fx2.gfa, fx2.gfa) + + def test_basic_callable_subtyping(self) -> None: + self.assert_strict_subtype( + self.fx.callable(self.fx.o, self.fx.d), self.fx.callable(self.fx.a, self.fx.d) + ) + self.assert_strict_subtype( + self.fx.callable(self.fx.d, self.fx.b), self.fx.callable(self.fx.d, self.fx.a) + ) + + self.assert_strict_subtype( + self.fx.callable(self.fx.a, UninhabitedType()), self.fx.callable(self.fx.a, self.fx.a) + ) + + self.assert_unrelated( + self.fx.callable(self.fx.a, self.fx.a, self.fx.a), + self.fx.callable(self.fx.a, self.fx.a), + ) + + def test_default_arg_callable_subtyping(self) -> None: + self.assert_strict_subtype( + self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a), + self.fx.callable(self.fx.a, self.fx.d, self.fx.a), + ) + + self.assert_strict_subtype( + self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a), + self.fx.callable(self.fx.a, self.fx.a), + ) + + self.assert_strict_subtype( + self.fx.callable_default(0, self.fx.a, self.fx.d, self.fx.a), + self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a), + ) + + self.assert_unrelated( + self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a), + self.fx.callable(self.fx.d, self.fx.d, self.fx.a), + ) + + self.assert_unrelated( + self.fx.callable_default(0, self.fx.a, self.fx.d, self.fx.a), + self.fx.callable_default(1, self.fx.a, self.fx.a, self.fx.a), + ) + + self.assert_unrelated( + self.fx.callable_default(1, self.fx.a, self.fx.a), + self.fx.callable(self.fx.a, self.fx.a, self.fx.a), + ) + + def test_var_arg_callable_subtyping_1(self) -> None: + self.assert_strict_subtype( + self.fx.callable_var_arg(0, self.fx.a, self.fx.a), + self.fx.callable_var_arg(0, self.fx.b, self.fx.a), + ) + + def test_var_arg_callable_subtyping_2(self) -> None: + self.assert_strict_subtype( + self.fx.callable_var_arg(0, self.fx.a, self.fx.a), + self.fx.callable(self.fx.b, self.fx.a), + ) + + def test_var_arg_callable_subtyping_3(self) -> None: + self.assert_strict_subtype( + self.fx.callable_var_arg(0, self.fx.a, self.fx.a), self.fx.callable(self.fx.a) + ) + + def test_var_arg_callable_subtyping_4(self) -> None: + self.assert_strict_subtype( + self.fx.callable_var_arg(1, self.fx.a, self.fx.d, self.fx.a), + self.fx.callable(self.fx.b, self.fx.a), + ) + + def test_var_arg_callable_subtyping_5(self) -> None: + self.assert_strict_subtype( + self.fx.callable_var_arg(0, self.fx.a, self.fx.d, self.fx.a), + self.fx.callable(self.fx.b, self.fx.a), + ) + + def test_var_arg_callable_subtyping_6(self) -> None: + self.assert_strict_subtype( + self.fx.callable_var_arg(0, self.fx.a, self.fx.f, self.fx.d), + self.fx.callable_var_arg(0, self.fx.b, self.fx.e, self.fx.d), + ) + + def test_var_arg_callable_subtyping_7(self) -> None: + self.assert_not_subtype( + self.fx.callable_var_arg(0, self.fx.b, self.fx.d), + self.fx.callable(self.fx.a, self.fx.d), + ) + + def test_var_arg_callable_subtyping_8(self) -> None: + self.assert_not_subtype( + self.fx.callable_var_arg(0, self.fx.b, self.fx.d), + self.fx.callable_var_arg(0, self.fx.a, self.fx.a, self.fx.d), + ) + self.assert_subtype( + self.fx.callable_var_arg(0, self.fx.a, self.fx.d), + self.fx.callable_var_arg(0, self.fx.b, self.fx.b, self.fx.d), + ) + + def test_var_arg_callable_subtyping_9(self) -> None: + self.assert_not_subtype( + self.fx.callable_var_arg(0, self.fx.b, self.fx.b, self.fx.d), + self.fx.callable_var_arg(0, self.fx.a, self.fx.d), + ) + self.assert_subtype( + self.fx.callable_var_arg(0, self.fx.a, self.fx.a, self.fx.d), + self.fx.callable_var_arg(0, self.fx.b, self.fx.d), + ) + + def test_type_callable_subtyping(self) -> None: + self.assert_subtype(self.fx.callable_type(self.fx.d, self.fx.a), self.fx.type_type) + + self.assert_strict_subtype( + self.fx.callable_type(self.fx.d, self.fx.b), self.fx.callable(self.fx.d, self.fx.a) + ) + + self.assert_strict_subtype( + self.fx.callable_type(self.fx.a, self.fx.b), self.fx.callable(self.fx.a, self.fx.b) + ) + + def test_type_var_tuple(self) -> None: + self.assert_subtype(Instance(self.fx.gvi, []), Instance(self.fx.gvi, [])) + self.assert_subtype( + Instance(self.fx.gvi, [self.fx.a, self.fx.b]), + Instance(self.fx.gvi, [self.fx.a, self.fx.b]), + ) + self.assert_not_subtype( + Instance(self.fx.gvi, [self.fx.a, self.fx.b]), + Instance(self.fx.gvi, [self.fx.b, self.fx.a]), + ) + self.assert_not_subtype( + Instance(self.fx.gvi, [self.fx.a, self.fx.b]), Instance(self.fx.gvi, [self.fx.a]) + ) + + self.assert_subtype( + Instance(self.fx.gvi, [UnpackType(self.fx.ss)]), + Instance(self.fx.gvi, [UnpackType(self.fx.ss)]), + ) + self.assert_not_subtype( + Instance(self.fx.gvi, [UnpackType(self.fx.ss)]), + Instance(self.fx.gvi, [UnpackType(self.fx.us)]), + ) + + self.assert_not_subtype( + Instance(self.fx.gvi, [UnpackType(self.fx.ss)]), Instance(self.fx.gvi, []) + ) + self.assert_not_subtype( + Instance(self.fx.gvi, [UnpackType(self.fx.ss)]), Instance(self.fx.gvi, [self.fx.anyt]) + ) + + def test_type_var_tuple_with_prefix_suffix(self) -> None: + self.assert_subtype( + Instance(self.fx.gvi, [self.fx.a, UnpackType(self.fx.ss)]), + Instance(self.fx.gvi, [self.fx.a, UnpackType(self.fx.ss)]), + ) + self.assert_subtype( + Instance(self.fx.gvi, [self.fx.a, self.fx.b, UnpackType(self.fx.ss)]), + Instance(self.fx.gvi, [self.fx.a, self.fx.b, UnpackType(self.fx.ss)]), + ) + self.assert_not_subtype( + Instance(self.fx.gvi, [self.fx.a, UnpackType(self.fx.ss)]), + Instance(self.fx.gvi, [self.fx.b, UnpackType(self.fx.ss)]), + ) + self.assert_not_subtype( + Instance(self.fx.gvi, [self.fx.a, UnpackType(self.fx.ss)]), + Instance(self.fx.gvi, [self.fx.a, self.fx.b, UnpackType(self.fx.ss)]), + ) + + self.assert_subtype( + Instance(self.fx.gvi, [UnpackType(self.fx.ss), self.fx.a]), + Instance(self.fx.gvi, [UnpackType(self.fx.ss), self.fx.a]), + ) + self.assert_not_subtype( + Instance(self.fx.gvi, [UnpackType(self.fx.ss), self.fx.a]), + Instance(self.fx.gvi, [UnpackType(self.fx.ss), self.fx.b]), + ) + self.assert_not_subtype( + Instance(self.fx.gvi, [UnpackType(self.fx.ss), self.fx.a]), + Instance(self.fx.gvi, [UnpackType(self.fx.ss), self.fx.a, self.fx.b]), + ) + + self.assert_subtype( + Instance(self.fx.gvi, [self.fx.a, self.fx.b, UnpackType(self.fx.ss), self.fx.c]), + Instance(self.fx.gvi, [self.fx.a, self.fx.b, UnpackType(self.fx.ss), self.fx.c]), + ) + self.assert_not_subtype( + Instance(self.fx.gvi, [self.fx.a, self.fx.b, UnpackType(self.fx.ss), self.fx.c]), + Instance(self.fx.gvi, [self.fx.a, UnpackType(self.fx.ss), self.fx.b, self.fx.c]), + ) + + def test_type_var_tuple_unpacked_variable_length_tuple(self) -> None: + self.assert_subtype( + Instance(self.fx.gvi, [self.fx.a, self.fx.a]), + Instance(self.fx.gvi, [UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))]), + ) + + def test_fallback_not_subtype_of_tuple(self) -> None: + self.assert_not_subtype(self.fx.a, TupleType([self.fx.b], fallback=self.fx.a)) + + # IDEA: Maybe add these test cases (they are tested pretty well in type + # checker tests already): + # * more interface subtyping test cases + # * more generic interface subtyping test cases + # * type variables + # * tuple types + # * None type + # * any type + # * generic function types + + def assert_subtype(self, s: Type, t: Type) -> None: + assert is_subtype(s, t), f"{s} not subtype of {t}" + + def assert_not_subtype(self, s: Type, t: Type) -> None: + assert not is_subtype(s, t), f"{s} subtype of {t}" + + def assert_strict_subtype(self, s: Type, t: Type) -> None: + self.assert_subtype(s, t) + self.assert_not_subtype(t, s) + + def assert_equivalent(self, s: Type, t: Type) -> None: + self.assert_subtype(s, t) + self.assert_subtype(t, s) + + def assert_unrelated(self, s: Type, t: Type) -> None: + self.assert_not_subtype(s, t) + self.assert_not_subtype(t, s) diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testtransform.py b/.venv/lib/python3.12/site-packages/mypy/test/testtransform.py new file mode 100644 index 0000000..48a3eee --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testtransform.py @@ -0,0 +1,63 @@ +"""Identity AST transform test cases""" + +from __future__ import annotations + +from mypy import build +from mypy.errors import CompileError +from mypy.modulefinder import BuildSource +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase, DataSuite +from mypy.test.helpers import assert_string_arrays_equal, normalize_error_messages, parse_options +from mypy.test.visitors import TypeAssertTransformVisitor + + +class TransformSuite(DataSuite): + required_out_section = True + # Reuse semantic analysis test cases. + files = [ + "semanal-basic.test", + "semanal-expressions.test", + "semanal-classes.test", + "semanal-types.test", + "semanal-modules.test", + "semanal-statements.test", + "semanal-abstractclasses.test", + ] + native_sep = True + + def run_case(self, testcase: DataDrivenTestCase) -> None: + test_transform(testcase) + + +def test_transform(testcase: DataDrivenTestCase) -> None: + """Perform an identity transform test case.""" + + try: + src = "\n".join(testcase.input) + options = parse_options(src, testcase, 1) + options.use_builtins_fixtures = True + options.semantic_analysis_only = True + options.show_traceback = True + result = build.build( + sources=[BuildSource("main", None, src)], options=options, alt_lib_path=test_temp_dir + ) + a = result.errors + if a: + raise CompileError(a) + # Include string representations of the source files in the actual + # output. + for module in sorted(result.files.keys()): + if module in testcase.test_modules: + t = TypeAssertTransformVisitor() + t.test_only = True + file = t.mypyfile(result.files[module]) + a += file.str_with_options(options).split("\n") + except CompileError as e: + a = e.messages + if testcase.normalize_output: + a = normalize_error_messages(a) + assert_string_arrays_equal( + testcase.output, + a, + f"Invalid semantic analyzer output ({testcase.file}, line {testcase.line})", + ) diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testtypegen.py b/.venv/lib/python3.12/site-packages/mypy/test/testtypegen.py new file mode 100644 index 0000000..42d831b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testtypegen.py @@ -0,0 +1,82 @@ +"""Test cases for the type checker: exporting inferred types""" + +from __future__ import annotations + +import re + +from mypy import build +from mypy.errors import CompileError +from mypy.modulefinder import BuildSource +from mypy.nodes import NameExpr, TempNode +from mypy.options import Options +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase, DataSuite +from mypy.test.helpers import assert_string_arrays_equal +from mypy.test.visitors import SkippedNodeSearcher, ignore_node +from mypy.util import short_type + + +class TypeExportSuite(DataSuite): + required_out_section = True + files = ["typexport-basic.test"] + + def run_case(self, testcase: DataDrivenTestCase) -> None: + try: + line = testcase.input[0] + mask = "" + if line.startswith("##"): + mask = "(" + line[2:].strip() + ")$" + + src = "\n".join(testcase.input) + options = Options() + options.strict_optional = False # TODO: Enable strict optional checking + options.use_builtins_fixtures = True + options.show_traceback = True + options.export_types = True + options.preserve_asts = True + options.allow_empty_bodies = True + result = build.build( + sources=[BuildSource("main", None, src)], + options=options, + alt_lib_path=test_temp_dir, + ) + a = result.errors + map = result.types + nodes = map.keys() + + # Ignore NameExpr nodes of variables with explicit (trivial) types + # to simplify output. + searcher = SkippedNodeSearcher() + for file in result.files.values(): + searcher.ignore_file = file.fullname not in testcase.test_modules + file.accept(searcher) + ignored = searcher.nodes + + # Filter nodes that should be included in the output. + keys = [] + for node in nodes: + if isinstance(node, TempNode): + continue + if node.line != -1 and map[node]: + if ignore_node(node) or node in ignored: + continue + if re.match(mask, short_type(node)) or ( + isinstance(node, NameExpr) and re.match(mask, node.name) + ): + # Include node in output. + keys.append(node) + + for key in sorted( + keys, + key=lambda n: (n.line, short_type(n), str(n) + map[n].str_with_options(options)), + ): + ts = map[key].str_with_options(options).replace("*", "") # Remove erased tags + ts = ts.replace("__main__.", "") + a.append(f"{short_type(key)}({key.line}) : {ts}") + except CompileError as e: + a = e.messages + assert_string_arrays_equal( + testcase.output, + a, + f"Invalid type checker output ({testcase.file}, line {testcase.line})", + ) diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testtypes.py b/.venv/lib/python3.12/site-packages/mypy/test/testtypes.py new file mode 100644 index 0000000..f5f4c67 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testtypes.py @@ -0,0 +1,1611 @@ +"""Test cases for mypy types and type operations.""" + +from __future__ import annotations + +import re +from unittest import TestCase, skipUnless + +from mypy.erasetype import erase_type, remove_instance_last_known_values +from mypy.indirection import TypeIndirectionVisitor +from mypy.join import join_types +from mypy.meet import meet_types, narrow_declared_type +from mypy.nodes import ( + ARG_NAMED, + ARG_OPT, + ARG_POS, + ARG_STAR, + ARG_STAR2, + CONTRAVARIANT, + COVARIANT, + INVARIANT, + ArgKind, + CallExpr, + Expression, + NameExpr, +) +from mypy.plugins.common import find_shallow_matching_overload_item +from mypy.state import state +from mypy.subtypes import is_more_precise, is_proper_subtype, is_same_type, is_subtype +from mypy.test.helpers import Suite, assert_equal, assert_type, skip +from mypy.test.typefixture import InterfaceTypeFixture, TypeFixture +from mypy.typeops import false_only, make_simplified_union, true_only +from mypy.types import ( + AnyType, + CallableType, + Instance, + LiteralType, + NoneType, + Overloaded, + ProperType, + TupleType, + Type, + TypeOfAny, + TypeType, + TypeVarId, + TypeVarType, + UnboundType, + UninhabitedType, + UnionType, + UnpackType, + get_proper_type, + has_recursive_types, +) + +# Solving the import cycle: +import mypy.expandtype # ruff: isort: skip + + +class TypesSuite(Suite): + def setUp(self) -> None: + self.x = UnboundType("X") # Helpers + self.y = UnboundType("Y") + self.fx = TypeFixture() + self.function = self.fx.function + + def test_any(self) -> None: + assert_equal(str(AnyType(TypeOfAny.special_form)), "Any") + + def test_simple_unbound_type(self) -> None: + u = UnboundType("Foo") + assert_equal(str(u), "Foo?") + + def test_generic_unbound_type(self) -> None: + u = UnboundType("Foo", [UnboundType("T"), AnyType(TypeOfAny.special_form)]) + assert_equal(str(u), "Foo?[T?, Any]") + + def test_callable_type(self) -> None: + c = CallableType( + [self.x, self.y], + [ARG_POS, ARG_POS], + [None, None], + AnyType(TypeOfAny.special_form), + self.function, + ) + assert_equal(str(c), "def (X?, Y?) -> Any") + + c2 = CallableType([], [], [], NoneType(), self.fx.function) + assert_equal(str(c2), "def ()") + + def test_callable_type_with_default_args(self) -> None: + c = CallableType( + [self.x, self.y], + [ARG_POS, ARG_OPT], + [None, None], + AnyType(TypeOfAny.special_form), + self.function, + ) + assert_equal(str(c), "def (X?, Y? =) -> Any") + + c2 = CallableType( + [self.x, self.y], + [ARG_OPT, ARG_OPT], + [None, None], + AnyType(TypeOfAny.special_form), + self.function, + ) + assert_equal(str(c2), "def (X? =, Y? =) -> Any") + + def test_callable_type_with_var_args(self) -> None: + c = CallableType( + [self.x], [ARG_STAR], [None], AnyType(TypeOfAny.special_form), self.function + ) + assert_equal(str(c), "def (*X?) -> Any") + + c2 = CallableType( + [self.x, self.y], + [ARG_POS, ARG_STAR], + [None, None], + AnyType(TypeOfAny.special_form), + self.function, + ) + assert_equal(str(c2), "def (X?, *Y?) -> Any") + + c3 = CallableType( + [self.x, self.y], + [ARG_OPT, ARG_STAR], + [None, None], + AnyType(TypeOfAny.special_form), + self.function, + ) + assert_equal(str(c3), "def (X? =, *Y?) -> Any") + + def test_tuple_type_str(self) -> None: + t1 = TupleType([], self.fx.std_tuple) + assert_equal(str(t1), "tuple[()]") + t2 = TupleType([self.x], self.fx.std_tuple) + assert_equal(str(t2), "tuple[X?]") + t3 = TupleType([self.x, AnyType(TypeOfAny.special_form)], self.fx.std_tuple) + assert_equal(str(t3), "tuple[X?, Any]") + + def test_type_variable_binding(self) -> None: + assert_equal( + str( + TypeVarType( + "X", "X", TypeVarId(1), [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics) + ) + ), + "X`1", + ) + assert_equal( + str( + TypeVarType( + "X", + "X", + TypeVarId(1), + [self.x, self.y], + self.fx.o, + AnyType(TypeOfAny.from_omitted_generics), + ) + ), + "X`1", + ) + + def test_generic_function_type(self) -> None: + c = CallableType( + [self.x, self.y], + [ARG_POS, ARG_POS], + [None, None], + self.y, + self.function, + name=None, + variables=[ + TypeVarType( + "X", + "X", + TypeVarId(-1), + [], + self.fx.o, + AnyType(TypeOfAny.from_omitted_generics), + ) + ], + ) + assert_equal(str(c), "def [X] (X?, Y?) -> Y?") + + v = [ + TypeVarType( + "Y", "Y", TypeVarId(-1), [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics) + ), + TypeVarType( + "X", "X", TypeVarId(-2), [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics) + ), + ] + c2 = CallableType([], [], [], NoneType(), self.function, name=None, variables=v) + assert_equal(str(c2), "def [Y, X] ()") + + def test_type_alias_expand_once(self) -> None: + A, target = self.fx.def_alias_1(self.fx.a) + assert get_proper_type(A) == target + assert get_proper_type(target) == target + + A, target = self.fx.def_alias_2(self.fx.a) + assert get_proper_type(A) == target + assert get_proper_type(target) == target + + def test_recursive_nested_in_non_recursive(self) -> None: + A, _ = self.fx.def_alias_1(self.fx.a) + T = TypeVarType( + "T", "T", TypeVarId(-1), [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics) + ) + NA = self.fx.non_rec_alias(Instance(self.fx.gi, [T]), [T], [A]) + assert not NA.is_recursive + assert has_recursive_types(NA) + + def test_indirection_no_infinite_recursion(self) -> None: + A, _ = self.fx.def_alias_1(self.fx.a) + visitor = TypeIndirectionVisitor() + A.accept(visitor) + modules = visitor.modules + assert modules == {"__main__", "builtins"} + + A, _ = self.fx.def_alias_2(self.fx.a) + visitor = TypeIndirectionVisitor() + A.accept(visitor) + modules = visitor.modules + assert modules == {"__main__", "builtins"} + + +class TypeOpsSuite(Suite): + def setUp(self) -> None: + self.fx = TypeFixture(INVARIANT) + self.fx_co = TypeFixture(COVARIANT) + self.fx_contra = TypeFixture(CONTRAVARIANT) + + # expand_type + + def test_trivial_expand(self) -> None: + for t in ( + self.fx.a, + self.fx.o, + self.fx.t, + self.fx.nonet, + self.tuple(self.fx.a), + self.callable([], self.fx.a, self.fx.a), + self.fx.anyt, + ): + self.assert_expand(t, [], t) + self.assert_expand(t, [], t) + self.assert_expand(t, [], t) + + def test_trivial_expand_recursive(self) -> None: + A, _ = self.fx.def_alias_1(self.fx.a) + self.assert_expand(A, [], A) + A, _ = self.fx.def_alias_2(self.fx.a) + self.assert_expand(A, [], A) + + def test_expand_naked_type_var(self) -> None: + self.assert_expand(self.fx.t, [(self.fx.t.id, self.fx.a)], self.fx.a) + self.assert_expand(self.fx.t, [(self.fx.s.id, self.fx.a)], self.fx.t) + + def test_expand_basic_generic_types(self) -> None: + self.assert_expand(self.fx.gt, [(self.fx.t.id, self.fx.a)], self.fx.ga) + + # IDEA: Add test cases for + # tuple types + # callable types + # multiple arguments + + def assert_expand( + self, orig: Type, map_items: list[tuple[TypeVarId, Type]], result: Type + ) -> None: + lower_bounds = {} + + for id, t in map_items: + lower_bounds[id] = t + + exp = mypy.expandtype.expand_type(orig, lower_bounds) + # Remove erased tags (asterisks). + assert_equal(str(exp).replace("*", ""), str(result)) + + # erase_type + + def test_trivial_erase(self) -> None: + for t in (self.fx.a, self.fx.o, self.fx.nonet, self.fx.anyt): + self.assert_erase(t, t) + + def test_erase_with_type_variable(self) -> None: + self.assert_erase(self.fx.t, self.fx.anyt) + + def test_erase_with_generic_type(self) -> None: + self.assert_erase(self.fx.ga, self.fx.gdyn) + self.assert_erase(self.fx.hab, Instance(self.fx.hi, [self.fx.anyt, self.fx.anyt])) + + def test_erase_with_generic_type_recursive(self) -> None: + tuple_any = Instance(self.fx.std_tuplei, [AnyType(TypeOfAny.explicit)]) + A, _ = self.fx.def_alias_1(self.fx.a) + self.assert_erase(A, tuple_any) + A, _ = self.fx.def_alias_2(self.fx.a) + self.assert_erase(A, UnionType([self.fx.a, tuple_any])) + + def test_erase_with_tuple_type(self) -> None: + self.assert_erase(self.tuple(self.fx.a), self.fx.std_tuple) + + def test_erase_with_function_type(self) -> None: + self.assert_erase( + self.fx.callable(self.fx.a, self.fx.b), + CallableType( + arg_types=[self.fx.anyt, self.fx.anyt], + arg_kinds=[ARG_STAR, ARG_STAR2], + arg_names=[None, None], + ret_type=self.fx.anyt, + fallback=self.fx.function, + ), + ) + + def test_erase_with_type_object(self) -> None: + self.assert_erase( + self.fx.callable_type(self.fx.a, self.fx.b), + CallableType( + arg_types=[self.fx.anyt, self.fx.anyt], + arg_kinds=[ARG_STAR, ARG_STAR2], + arg_names=[None, None], + ret_type=self.fx.anyt, + fallback=self.fx.type_type, + ), + ) + + def test_erase_with_type_type(self) -> None: + self.assert_erase(self.fx.type_a, self.fx.type_a) + self.assert_erase(self.fx.type_t, self.fx.type_any) + + def assert_erase(self, orig: Type, result: Type) -> None: + assert_equal(str(erase_type(orig)), str(result)) + + # is_more_precise + + def test_is_more_precise(self) -> None: + fx = self.fx + assert is_more_precise(fx.b, fx.a) + assert is_more_precise(fx.b, fx.b) + assert is_more_precise(fx.b, fx.b) + assert is_more_precise(fx.b, fx.anyt) + assert is_more_precise(self.tuple(fx.b, fx.a), self.tuple(fx.b, fx.a)) + assert is_more_precise(self.tuple(fx.b, fx.b), self.tuple(fx.b, fx.a)) + + assert not is_more_precise(fx.a, fx.b) + assert not is_more_precise(fx.anyt, fx.b) + + # is_proper_subtype + + def test_is_proper_subtype(self) -> None: + fx = self.fx + + assert is_proper_subtype(fx.a, fx.a) + assert is_proper_subtype(fx.b, fx.a) + assert is_proper_subtype(fx.b, fx.o) + assert is_proper_subtype(fx.b, fx.o) + + assert not is_proper_subtype(fx.a, fx.b) + assert not is_proper_subtype(fx.o, fx.b) + + assert is_proper_subtype(fx.anyt, fx.anyt) + assert not is_proper_subtype(fx.a, fx.anyt) + assert not is_proper_subtype(fx.anyt, fx.a) + + assert is_proper_subtype(fx.ga, fx.ga) + assert is_proper_subtype(fx.gdyn, fx.gdyn) + assert not is_proper_subtype(fx.ga, fx.gdyn) + assert not is_proper_subtype(fx.gdyn, fx.ga) + + assert is_proper_subtype(fx.t, fx.t) + assert not is_proper_subtype(fx.t, fx.s) + + assert is_proper_subtype(fx.a, UnionType([fx.a, fx.b])) + assert is_proper_subtype(UnionType([fx.a, fx.b]), UnionType([fx.a, fx.b, fx.c])) + assert not is_proper_subtype(UnionType([fx.a, fx.b]), UnionType([fx.b, fx.c])) + + def test_is_proper_subtype_covariance(self) -> None: + fx_co = self.fx_co + + assert is_proper_subtype(fx_co.gsab, fx_co.gb) + assert is_proper_subtype(fx_co.gsab, fx_co.ga) + assert not is_proper_subtype(fx_co.gsaa, fx_co.gb) + assert is_proper_subtype(fx_co.gb, fx_co.ga) + assert not is_proper_subtype(fx_co.ga, fx_co.gb) + + def test_is_proper_subtype_contravariance(self) -> None: + fx_contra = self.fx_contra + + assert is_proper_subtype(fx_contra.gsab, fx_contra.gb) + assert not is_proper_subtype(fx_contra.gsab, fx_contra.ga) + assert is_proper_subtype(fx_contra.gsaa, fx_contra.gb) + assert not is_proper_subtype(fx_contra.gb, fx_contra.ga) + assert is_proper_subtype(fx_contra.ga, fx_contra.gb) + + def test_is_proper_subtype_invariance(self) -> None: + fx = self.fx + + assert is_proper_subtype(fx.gsab, fx.gb) + assert not is_proper_subtype(fx.gsab, fx.ga) + assert not is_proper_subtype(fx.gsaa, fx.gb) + assert not is_proper_subtype(fx.gb, fx.ga) + assert not is_proper_subtype(fx.ga, fx.gb) + + def test_is_proper_subtype_and_subtype_literal_types(self) -> None: + fx = self.fx + + lit1 = fx.lit1 + lit2 = fx.lit2 + lit3 = fx.lit3 + + assert is_proper_subtype(lit1, fx.a) + assert not is_proper_subtype(lit1, fx.d) + assert not is_proper_subtype(fx.a, lit1) + assert is_proper_subtype(fx.uninhabited, lit1) + assert not is_proper_subtype(lit1, fx.uninhabited) + assert is_proper_subtype(lit1, lit1) + assert not is_proper_subtype(lit1, lit2) + assert not is_proper_subtype(lit2, lit3) + + assert is_subtype(lit1, fx.a) + assert not is_subtype(lit1, fx.d) + assert not is_subtype(fx.a, lit1) + assert is_subtype(fx.uninhabited, lit1) + assert not is_subtype(lit1, fx.uninhabited) + assert is_subtype(lit1, lit1) + assert not is_subtype(lit1, lit2) + assert not is_subtype(lit2, lit3) + + assert not is_proper_subtype(lit1, fx.anyt) + assert not is_proper_subtype(fx.anyt, lit1) + + assert is_subtype(lit1, fx.anyt) + assert is_subtype(fx.anyt, lit1) + + def test_subtype_aliases(self) -> None: + A1, _ = self.fx.def_alias_1(self.fx.a) + AA1, _ = self.fx.def_alias_1(self.fx.a) + assert is_subtype(A1, AA1) + assert is_subtype(AA1, A1) + + A2, _ = self.fx.def_alias_2(self.fx.a) + AA2, _ = self.fx.def_alias_2(self.fx.a) + assert is_subtype(A2, AA2) + assert is_subtype(AA2, A2) + + B1, _ = self.fx.def_alias_1(self.fx.b) + B2, _ = self.fx.def_alias_2(self.fx.b) + assert is_subtype(B1, A1) + assert is_subtype(B2, A2) + assert not is_subtype(A1, B1) + assert not is_subtype(A2, B2) + + assert not is_subtype(A2, A1) + assert is_subtype(A1, A2) + + # can_be_true / can_be_false + + def test_empty_tuple_always_false(self) -> None: + tuple_type = self.tuple() + assert tuple_type.can_be_false + assert not tuple_type.can_be_true + + def test_nonempty_tuple_always_true(self) -> None: + tuple_type = self.tuple(AnyType(TypeOfAny.special_form), AnyType(TypeOfAny.special_form)) + assert tuple_type.can_be_true + assert not tuple_type.can_be_false + + def test_union_can_be_true_if_any_true(self) -> None: + union_type = UnionType([self.fx.a, self.tuple()]) + assert union_type.can_be_true + + def test_union_can_not_be_true_if_none_true(self) -> None: + union_type = UnionType([self.tuple(), self.tuple()]) + assert not union_type.can_be_true + + def test_union_can_be_false_if_any_false(self) -> None: + union_type = UnionType([self.fx.a, self.tuple()]) + assert union_type.can_be_false + + def test_union_can_not_be_false_if_none_false(self) -> None: + union_type = UnionType([self.tuple(self.fx.a), self.tuple(self.fx.d)]) + assert not union_type.can_be_false + + # true_only / false_only + + def test_true_only_of_false_type_is_uninhabited(self) -> None: + to = true_only(NoneType()) + assert_type(UninhabitedType, to) + + def test_true_only_of_true_type_is_idempotent(self) -> None: + always_true = self.tuple(AnyType(TypeOfAny.special_form)) + to = true_only(always_true) + assert always_true is to + + def test_true_only_of_instance(self) -> None: + to = true_only(self.fx.a) + assert_equal(str(to), "A") + assert to.can_be_true + assert not to.can_be_false + assert_type(Instance, to) + # The original class still can be false + assert self.fx.a.can_be_false + + def test_true_only_of_union(self) -> None: + tup_type = self.tuple(AnyType(TypeOfAny.special_form)) + # Union of something that is unknown, something that is always true, something + # that is always false + union_type = UnionType([self.fx.a, tup_type, self.tuple()]) + to = true_only(union_type) + assert isinstance(to, UnionType) + assert_equal(len(to.items), 2) + assert to.items[0].can_be_true + assert not to.items[0].can_be_false + assert to.items[1] is tup_type + + def test_false_only_of_true_type_is_uninhabited(self) -> None: + with state.strict_optional_set(True): + fo = false_only(self.tuple(AnyType(TypeOfAny.special_form))) + assert_type(UninhabitedType, fo) + + def test_false_only_tuple(self) -> None: + with state.strict_optional_set(False): + fo = false_only(self.tuple(self.fx.a)) + assert_equal(fo, NoneType()) + with state.strict_optional_set(True): + fo = false_only(self.tuple(self.fx.a)) + assert_equal(fo, UninhabitedType()) + + def test_false_only_of_false_type_is_idempotent(self) -> None: + always_false = NoneType() + fo = false_only(always_false) + assert always_false is fo + + def test_false_only_of_instance(self) -> None: + fo = false_only(self.fx.a) + assert_equal(str(fo), "A") + assert not fo.can_be_true + assert fo.can_be_false + assert_type(Instance, fo) + # The original class still can be true + assert self.fx.a.can_be_true + + def test_false_only_of_union(self) -> None: + with state.strict_optional_set(True): + tup_type = self.tuple() + # Union of something that is unknown, something that is always true, something + # that is always false + union_type = UnionType( + [self.fx.a, self.tuple(AnyType(TypeOfAny.special_form)), tup_type] + ) + assert_equal(len(union_type.items), 3) + fo = false_only(union_type) + assert isinstance(fo, UnionType) + assert_equal(len(fo.items), 2) + assert not fo.items[0].can_be_true + assert fo.items[0].can_be_false + assert fo.items[1] is tup_type + + def test_simplified_union(self) -> None: + fx = self.fx + + self.assert_simplified_union([fx.a, fx.a], fx.a) + self.assert_simplified_union([fx.a, fx.b], fx.a) + self.assert_simplified_union([fx.a, fx.d], UnionType([fx.a, fx.d])) + self.assert_simplified_union([fx.a, fx.uninhabited], fx.a) + self.assert_simplified_union([fx.ga, fx.gs2a], fx.ga) + self.assert_simplified_union([fx.ga, fx.gsab], UnionType([fx.ga, fx.gsab])) + self.assert_simplified_union([fx.ga, fx.gsba], fx.ga) + self.assert_simplified_union([fx.a, UnionType([fx.d])], UnionType([fx.a, fx.d])) + self.assert_simplified_union([fx.a, UnionType([fx.a])], fx.a) + self.assert_simplified_union( + [fx.b, UnionType([fx.c, UnionType([fx.d])])], UnionType([fx.b, fx.c, fx.d]) + ) + + def test_simplified_union_with_literals(self) -> None: + fx = self.fx + + self.assert_simplified_union([fx.lit1, fx.a], fx.a) + self.assert_simplified_union([fx.lit1, fx.lit2, fx.a], fx.a) + self.assert_simplified_union([fx.lit1, fx.lit1], fx.lit1) + self.assert_simplified_union([fx.lit1, fx.lit2], UnionType([fx.lit1, fx.lit2])) + self.assert_simplified_union([fx.lit1, fx.lit3], UnionType([fx.lit1, fx.lit3])) + self.assert_simplified_union([fx.lit1, fx.uninhabited], fx.lit1) + self.assert_simplified_union([fx.lit1_inst, fx.a], fx.a) + self.assert_simplified_union([fx.lit1_inst, fx.lit1_inst], fx.lit1_inst) + self.assert_simplified_union( + [fx.lit1_inst, fx.lit2_inst], UnionType([fx.lit1_inst, fx.lit2_inst]) + ) + self.assert_simplified_union( + [fx.lit1_inst, fx.lit3_inst], UnionType([fx.lit1_inst, fx.lit3_inst]) + ) + self.assert_simplified_union([fx.lit1_inst, fx.uninhabited], fx.lit1_inst) + self.assert_simplified_union([fx.lit1, fx.lit1_inst], fx.lit1) + self.assert_simplified_union([fx.lit1, fx.lit2_inst], UnionType([fx.lit1, fx.lit2_inst])) + self.assert_simplified_union([fx.lit1, fx.lit3_inst], UnionType([fx.lit1, fx.lit3_inst])) + + def test_simplified_union_with_str_literals(self) -> None: + fx = self.fx + + self.assert_simplified_union([fx.lit_str1, fx.lit_str2, fx.str_type], fx.str_type) + self.assert_simplified_union([fx.lit_str1, fx.lit_str1, fx.lit_str1], fx.lit_str1) + self.assert_simplified_union( + [fx.lit_str1, fx.lit_str2, fx.lit_str3], + UnionType([fx.lit_str1, fx.lit_str2, fx.lit_str3]), + ) + self.assert_simplified_union( + [fx.lit_str1, fx.lit_str2, fx.uninhabited], UnionType([fx.lit_str1, fx.lit_str2]) + ) + + def test_simplify_very_large_union(self) -> None: + fx = self.fx + literals = [] + for i in range(5000): + literals.append(LiteralType("v%d" % i, fx.str_type)) + # This shouldn't be very slow, even if the union is big. + self.assert_simplified_union([*literals, fx.str_type], fx.str_type) + + def test_simplified_union_with_str_instance_literals(self) -> None: + fx = self.fx + + self.assert_simplified_union( + [fx.lit_str1_inst, fx.lit_str2_inst, fx.str_type], fx.str_type + ) + self.assert_simplified_union( + [fx.lit_str1_inst, fx.lit_str1_inst, fx.lit_str1_inst], fx.lit_str1_inst + ) + self.assert_simplified_union( + [fx.lit_str1_inst, fx.lit_str2_inst, fx.lit_str3_inst], + UnionType([fx.lit_str1_inst, fx.lit_str2_inst, fx.lit_str3_inst]), + ) + self.assert_simplified_union( + [fx.lit_str1_inst, fx.lit_str2_inst, fx.uninhabited], + UnionType([fx.lit_str1_inst, fx.lit_str2_inst]), + ) + + def test_simplified_union_with_mixed_str_literals(self) -> None: + fx = self.fx + + self.assert_simplified_union( + [fx.lit_str1, fx.lit_str2, fx.lit_str3_inst], + UnionType([fx.lit_str1, fx.lit_str2, fx.lit_str3_inst]), + ) + self.assert_simplified_union([fx.lit_str1, fx.lit_str1, fx.lit_str1_inst], fx.lit_str1) + + def assert_simplified_union(self, original: list[Type], union: Type) -> None: + assert_equal(make_simplified_union(original), union) + assert_equal(make_simplified_union(list(reversed(original))), union) + + # Helpers + + def tuple(self, *a: Type) -> TupleType: + return TupleType(list(a), self.fx.std_tuple) + + def callable(self, vars: list[str], *a: Type) -> CallableType: + """callable(args, a1, ..., an, r) constructs a callable with + argument types a1, ... an and return type r and type arguments + vars. + """ + tv: list[TypeVarType] = [] + n = -1 + for v in vars: + tv.append( + TypeVarType( + v, v, TypeVarId(n), [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics) + ) + ) + n -= 1 + return CallableType( + list(a[:-1]), + [ARG_POS] * (len(a) - 1), + [None] * (len(a) - 1), + a[-1], + self.fx.function, + name=None, + variables=tv, + ) + + +class JoinSuite(Suite): + def setUp(self) -> None: + self.fx = TypeFixture(INVARIANT) + self.fx_co = TypeFixture(COVARIANT) + self.fx_contra = TypeFixture(CONTRAVARIANT) + + def test_trivial_cases(self) -> None: + for simple in self.fx.a, self.fx.o, self.fx.b: + self.assert_join(simple, simple, simple) + + def test_class_subtyping(self) -> None: + self.assert_join(self.fx.a, self.fx.o, self.fx.o) + self.assert_join(self.fx.b, self.fx.o, self.fx.o) + self.assert_join(self.fx.a, self.fx.d, self.fx.o) + self.assert_join(self.fx.b, self.fx.c, self.fx.a) + self.assert_join(self.fx.b, self.fx.d, self.fx.o) + + def test_tuples(self) -> None: + self.assert_join(self.tuple(), self.tuple(), self.tuple()) + self.assert_join(self.tuple(self.fx.a), self.tuple(self.fx.a), self.tuple(self.fx.a)) + self.assert_join( + self.tuple(self.fx.b, self.fx.c), + self.tuple(self.fx.a, self.fx.d), + self.tuple(self.fx.a, self.fx.o), + ) + + self.assert_join( + self.tuple(self.fx.a, self.fx.a), self.fx.std_tuple, self.var_tuple(self.fx.anyt) + ) + self.assert_join( + self.tuple(self.fx.a), self.tuple(self.fx.a, self.fx.a), self.var_tuple(self.fx.a) + ) + self.assert_join( + self.tuple(self.fx.b), self.tuple(self.fx.a, self.fx.c), self.var_tuple(self.fx.a) + ) + self.assert_join(self.tuple(), self.tuple(self.fx.a), self.var_tuple(self.fx.a)) + + def test_var_tuples(self) -> None: + self.assert_join( + self.tuple(self.fx.a), self.var_tuple(self.fx.a), self.var_tuple(self.fx.a) + ) + self.assert_join( + self.var_tuple(self.fx.a), self.tuple(self.fx.a), self.var_tuple(self.fx.a) + ) + self.assert_join(self.var_tuple(self.fx.a), self.tuple(), self.var_tuple(self.fx.a)) + + def test_function_types(self) -> None: + self.assert_join( + self.callable(self.fx.a, self.fx.b), + self.callable(self.fx.a, self.fx.b), + self.callable(self.fx.a, self.fx.b), + ) + + self.assert_join( + self.callable(self.fx.a, self.fx.b), + self.callable(self.fx.b, self.fx.b), + self.callable(self.fx.b, self.fx.b), + ) + self.assert_join( + self.callable(self.fx.a, self.fx.b), + self.callable(self.fx.a, self.fx.a), + self.callable(self.fx.a, self.fx.a), + ) + self.assert_join(self.callable(self.fx.a, self.fx.b), self.fx.function, self.fx.function) + self.assert_join( + self.callable(self.fx.a, self.fx.b), + self.callable(self.fx.d, self.fx.b), + self.fx.function, + ) + + def test_type_vars(self) -> None: + self.assert_join(self.fx.t, self.fx.t, self.fx.t) + self.assert_join(self.fx.s, self.fx.s, self.fx.s) + self.assert_join(self.fx.t, self.fx.s, self.fx.o) + + def test_none(self) -> None: + with state.strict_optional_set(False): + # Any type t joined with None results in t. + for t in [ + NoneType(), + self.fx.a, + self.fx.o, + UnboundType("x"), + self.fx.t, + self.tuple(), + self.callable(self.fx.a, self.fx.b), + self.fx.anyt, + ]: + self.assert_join(t, NoneType(), t) + + def test_unbound_type(self) -> None: + self.assert_join(UnboundType("x"), UnboundType("x"), self.fx.anyt) + self.assert_join(UnboundType("x"), UnboundType("y"), self.fx.anyt) + + # Any type t joined with an unbound type results in dynamic. Unbound + # type means that there is an error somewhere in the program, so this + # does not affect type safety (whatever the result). + for t in [ + self.fx.a, + self.fx.o, + self.fx.ga, + self.fx.t, + self.tuple(), + self.callable(self.fx.a, self.fx.b), + ]: + self.assert_join(t, UnboundType("X"), self.fx.anyt) + + def test_any_type(self) -> None: + # Join against 'Any' type always results in 'Any'. + with state.strict_optional_set(False): + self.assert_join(NoneType(), self.fx.anyt, self.fx.anyt) + + for t in [ + self.fx.anyt, + self.fx.a, + self.fx.o, + NoneType(), + UnboundType("x"), + self.fx.t, + self.tuple(), + self.callable(self.fx.a, self.fx.b), + ]: + self.assert_join(t, self.fx.anyt, self.fx.anyt) + + def test_mixed_truth_restricted_type_simple(self) -> None: + # make_simplified_union against differently restricted truthiness types drops restrictions. + true_a = true_only(self.fx.a) + false_o = false_only(self.fx.o) + u = make_simplified_union([true_a, false_o]) + assert u.can_be_true + assert u.can_be_false + + def test_mixed_truth_restricted_type(self) -> None: + # join_types against differently restricted truthiness types drops restrictions. + true_any = true_only(AnyType(TypeOfAny.special_form)) + false_o = false_only(self.fx.o) + j = join_types(true_any, false_o) + assert j.can_be_true + assert j.can_be_false + + def test_other_mixed_types(self) -> None: + # In general, joining unrelated types produces object. + for t1 in [self.fx.a, self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: + for t2 in [self.fx.a, self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: + if str(t1) != str(t2): + self.assert_join(t1, t2, self.fx.o) + + def test_simple_generics(self) -> None: + with state.strict_optional_set(False): + self.assert_join(self.fx.ga, self.fx.nonet, self.fx.ga) + with state.strict_optional_set(True): + self.assert_join(self.fx.ga, self.fx.nonet, UnionType([self.fx.ga, NoneType()])) + + self.assert_join(self.fx.ga, self.fx.anyt, self.fx.anyt) + + for t in [ + self.fx.a, + self.fx.o, + self.fx.t, + self.tuple(), + self.callable(self.fx.a, self.fx.b), + ]: + self.assert_join(t, self.fx.ga, self.fx.o) + + def test_generics_invariant(self) -> None: + self.assert_join(self.fx.ga, self.fx.ga, self.fx.ga) + self.assert_join(self.fx.ga, self.fx.gb, self.fx.o) + self.assert_join(self.fx.ga, self.fx.gd, self.fx.o) + self.assert_join(self.fx.ga, self.fx.g2a, self.fx.o) + + def test_generics_covariant(self) -> None: + self.assert_join(self.fx_co.ga, self.fx_co.ga, self.fx_co.ga) + self.assert_join(self.fx_co.ga, self.fx_co.gb, self.fx_co.ga) + self.assert_join(self.fx_co.ga, self.fx_co.gd, self.fx_co.go) + self.assert_join(self.fx_co.ga, self.fx_co.g2a, self.fx_co.o) + + def test_generics_contravariant(self) -> None: + self.assert_join(self.fx_contra.ga, self.fx_contra.ga, self.fx_contra.ga) + # TODO: this can be more precise than "object", see a comment in mypy/join.py + self.assert_join(self.fx_contra.ga, self.fx_contra.gb, self.fx_contra.o) + self.assert_join(self.fx_contra.ga, self.fx_contra.g2a, self.fx_contra.o) + + def test_generics_with_multiple_args(self) -> None: + self.assert_join(self.fx_co.hab, self.fx_co.hab, self.fx_co.hab) + self.assert_join(self.fx_co.hab, self.fx_co.hbb, self.fx_co.hab) + self.assert_join(self.fx_co.had, self.fx_co.haa, self.fx_co.hao) + + def test_generics_with_inheritance(self) -> None: + self.assert_join(self.fx_co.gsab, self.fx_co.gb, self.fx_co.gb) + self.assert_join(self.fx_co.gsba, self.fx_co.gb, self.fx_co.ga) + self.assert_join(self.fx_co.gsab, self.fx_co.gd, self.fx_co.go) + + def test_generics_with_inheritance_and_shared_supertype(self) -> None: + self.assert_join(self.fx_co.gsba, self.fx_co.gs2a, self.fx_co.ga) + self.assert_join(self.fx_co.gsab, self.fx_co.gs2a, self.fx_co.ga) + self.assert_join(self.fx_co.gsab, self.fx_co.gs2d, self.fx_co.go) + + def test_generic_types_and_any(self) -> None: + self.assert_join(self.fx.gdyn, self.fx.ga, self.fx.gdyn) + self.assert_join(self.fx_co.gdyn, self.fx_co.ga, self.fx_co.gdyn) + self.assert_join(self.fx_contra.gdyn, self.fx_contra.ga, self.fx_contra.gdyn) + + def test_callables_with_any(self) -> None: + self.assert_join( + self.callable(self.fx.a, self.fx.a, self.fx.anyt, self.fx.a), + self.callable(self.fx.a, self.fx.anyt, self.fx.a, self.fx.anyt), + self.callable(self.fx.a, self.fx.anyt, self.fx.anyt, self.fx.anyt), + ) + + def test_overloaded(self) -> None: + c = self.callable + + def ov(*items: CallableType) -> Overloaded: + return Overloaded(list(items)) + + fx = self.fx + func = fx.function + c1 = c(fx.a, fx.a) + c2 = c(fx.b, fx.b) + c3 = c(fx.c, fx.c) + self.assert_join(ov(c1, c2), c1, c1) + self.assert_join(ov(c1, c2), c2, c2) + self.assert_join(ov(c1, c2), ov(c1, c2), ov(c1, c2)) + self.assert_join(ov(c1, c2), ov(c1, c3), c1) + self.assert_join(ov(c2, c1), ov(c3, c1), c1) + self.assert_join(ov(c1, c2), c3, func) + + def test_overloaded_with_any(self) -> None: + c = self.callable + + def ov(*items: CallableType) -> Overloaded: + return Overloaded(list(items)) + + fx = self.fx + any = fx.anyt + self.assert_join(ov(c(fx.a, fx.a), c(fx.b, fx.b)), c(any, fx.b), c(any, fx.b)) + self.assert_join(ov(c(fx.a, fx.a), c(any, fx.b)), c(fx.b, fx.b), c(any, fx.b)) + + def test_join_interface_types(self) -> None: + self.assert_join(self.fx.f, self.fx.f, self.fx.f) + self.assert_join(self.fx.f, self.fx.f2, self.fx.o) + self.assert_join(self.fx.f, self.fx.f3, self.fx.f) + + def test_join_interface_and_class_types(self) -> None: + self.assert_join(self.fx.o, self.fx.f, self.fx.o) + self.assert_join(self.fx.a, self.fx.f, self.fx.o) + + self.assert_join(self.fx.e, self.fx.f, self.fx.f) + + @skip + def test_join_class_types_with_interface_result(self) -> None: + # Unique result + self.assert_join(self.fx.e, self.fx.e2, self.fx.f) + + # Ambiguous result + self.assert_join(self.fx.e2, self.fx.e3, self.fx.anyt) + + @skip + def test_generic_interfaces(self) -> None: + fx = InterfaceTypeFixture() + + self.assert_join(fx.gfa, fx.gfa, fx.gfa) + self.assert_join(fx.gfa, fx.gfb, fx.o) + + self.assert_join(fx.m1, fx.gfa, fx.gfa) + + self.assert_join(fx.m1, fx.gfb, fx.o) + + def test_simple_type_objects(self) -> None: + t1 = self.type_callable(self.fx.a, self.fx.a) + t2 = self.type_callable(self.fx.b, self.fx.b) + tr = self.type_callable(self.fx.b, self.fx.a) + + self.assert_join(t1, t1, t1) + j = join_types(t1, t1) + assert isinstance(j, CallableType) + assert j.is_type_obj() + + self.assert_join(t1, t2, tr) + self.assert_join(t1, self.fx.type_type, self.fx.type_type) + self.assert_join(self.fx.type_type, self.fx.type_type, self.fx.type_type) + + def test_type_type(self) -> None: + self.assert_join(self.fx.type_a, self.fx.type_b, self.fx.type_a) + self.assert_join(self.fx.type_b, self.fx.type_any, self.fx.type_any) + self.assert_join(self.fx.type_b, self.fx.type_type, self.fx.type_type) + self.assert_join(self.fx.type_b, self.fx.type_c, self.fx.type_a) + self.assert_join(self.fx.type_c, self.fx.type_d, TypeType.make_normalized(self.fx.o)) + self.assert_join(self.fx.type_type, self.fx.type_any, self.fx.type_type) + self.assert_join(self.fx.type_b, self.fx.anyt, self.fx.anyt) + + def test_literal_type(self) -> None: + a = self.fx.a + d = self.fx.d + lit1 = self.fx.lit1 + lit2 = self.fx.lit2 + lit3 = self.fx.lit3 + + self.assert_join(lit1, lit1, lit1) + self.assert_join(lit1, a, a) + self.assert_join(lit1, d, self.fx.o) + self.assert_join(lit1, lit2, a) + self.assert_join(lit1, lit3, self.fx.o) + self.assert_join(lit1, self.fx.anyt, self.fx.anyt) + self.assert_join(UnionType([lit1, lit2]), lit2, UnionType([lit1, lit2])) + self.assert_join(UnionType([lit1, lit2]), a, a) + self.assert_join(UnionType([lit1, lit3]), a, UnionType([a, lit3])) + self.assert_join(UnionType([d, lit3]), lit3, d) + self.assert_join(UnionType([d, lit3]), d, UnionType([d, lit3])) + self.assert_join(UnionType([a, lit1]), lit1, a) + self.assert_join(UnionType([a, lit1]), lit2, a) + self.assert_join(UnionType([lit1, lit2]), UnionType([lit1, lit2]), UnionType([lit1, lit2])) + + # The order in which we try joining two unions influences the + # ordering of the items in the final produced unions. So, we + # manually call 'assert_simple_join' and tune the output + # after swapping the arguments here. + self.assert_simple_join( + UnionType([lit1, lit2]), UnionType([lit2, lit3]), UnionType([lit1, lit2, lit3]) + ) + self.assert_simple_join( + UnionType([lit2, lit3]), UnionType([lit1, lit2]), UnionType([lit2, lit3, lit1]) + ) + + def test_variadic_tuple_joins(self) -> None: + # These tests really test just the "arity", to be sure it is handled correctly. + self.assert_join( + self.tuple(self.fx.a, self.fx.a), + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + Instance(self.fx.std_tuplei, [self.fx.a]), + ) + self.assert_join( + self.tuple(self.fx.a, self.fx.a), + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a), + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a), + ) + self.assert_join( + self.tuple(self.fx.a, self.fx.a), + self.tuple(self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + self.tuple(self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + ) + self.assert_join( + self.tuple( + self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a + ), + self.tuple( + self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a + ), + self.tuple( + self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a + ), + ) + self.assert_join( + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + self.tuple( + self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a + ), + Instance(self.fx.std_tuplei, [self.fx.a]), + ) + self.assert_join( + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + Instance(self.fx.std_tuplei, [self.fx.a]), + ) + self.assert_join( + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a), + self.tuple( + self.fx.b, UnpackType(Instance(self.fx.std_tuplei, [self.fx.b])), self.fx.b + ), + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a), + ) + + def test_join_type_type_type_var(self) -> None: + self.assert_join(self.fx.type_a, self.fx.t, self.fx.o) + self.assert_join(self.fx.t, self.fx.type_a, self.fx.o) + + def test_join_type_var_bounds(self) -> None: + tvar1 = TypeVarType( + "tvar1", + "tvar1", + TypeVarId(-100), + [], + self.fx.o, + AnyType(TypeOfAny.from_omitted_generics), + INVARIANT, + ) + any_type = AnyType(TypeOfAny.special_form) + tvar2 = TypeVarType( + "tvar2", + "tvar2", + TypeVarId(-101), + [], + upper_bound=UnionType( + [ + TupleType([any_type], self.fx.std_tuple), + TupleType([any_type, any_type], self.fx.std_tuple), + ] + ), + default=AnyType(TypeOfAny.from_omitted_generics), + variance=INVARIANT, + ) + + self.assert_join(tvar1, tvar2, self.fx.o) + self.assert_join(tvar2, tvar1, self.fx.o) + + # There are additional test cases in check-inference.test. + + # TODO: Function types + varargs and default args. + + def assert_join(self, s: Type, t: Type, join: Type) -> None: + self.assert_simple_join(s, t, join) + self.assert_simple_join(t, s, join) + + def assert_simple_join(self, s: Type, t: Type, join: Type) -> None: + result = join_types(s, t) + actual = str(result) + expected = str(join) + assert_equal(actual, expected, f"join({s}, {t}) == {{}} ({{}} expected)") + assert is_subtype(s, result), f"{s} not subtype of {result}" + assert is_subtype(t, result), f"{t} not subtype of {result}" + + def tuple(self, *a: Type) -> TupleType: + return TupleType(list(a), self.fx.std_tuple) + + def var_tuple(self, t: Type) -> Instance: + """Construct a variable-length tuple type""" + return Instance(self.fx.std_tuplei, [t]) + + def callable(self, *a: Type) -> CallableType: + """callable(a1, ..., an, r) constructs a callable with argument types + a1, ... an and return type r. + """ + n = len(a) - 1 + return CallableType(list(a[:-1]), [ARG_POS] * n, [None] * n, a[-1], self.fx.function) + + def type_callable(self, *a: Type) -> CallableType: + """type_callable(a1, ..., an, r) constructs a callable with + argument types a1, ... an and return type r, and which + represents a type. + """ + n = len(a) - 1 + return CallableType(list(a[:-1]), [ARG_POS] * n, [None] * n, a[-1], self.fx.type_type) + + +class MeetSuite(Suite): + def setUp(self) -> None: + self.fx = TypeFixture() + + def test_trivial_cases(self) -> None: + for simple in self.fx.a, self.fx.o, self.fx.b: + self.assert_meet(simple, simple, simple) + + def test_class_subtyping(self) -> None: + self.assert_meet(self.fx.a, self.fx.o, self.fx.a) + self.assert_meet(self.fx.a, self.fx.b, self.fx.b) + self.assert_meet(self.fx.b, self.fx.o, self.fx.b) + self.assert_meet(self.fx.a, self.fx.d, UninhabitedType()) + self.assert_meet(self.fx.b, self.fx.c, UninhabitedType()) + + def test_tuples(self) -> None: + self.assert_meet(self.tuple(), self.tuple(), self.tuple()) + self.assert_meet(self.tuple(self.fx.a), self.tuple(self.fx.a), self.tuple(self.fx.a)) + self.assert_meet( + self.tuple(self.fx.b, self.fx.c), + self.tuple(self.fx.a, self.fx.d), + self.tuple(self.fx.b, UninhabitedType()), + ) + + self.assert_meet( + self.tuple(self.fx.a, self.fx.a), self.fx.std_tuple, self.tuple(self.fx.a, self.fx.a) + ) + self.assert_meet( + self.tuple(self.fx.a), self.tuple(self.fx.a, self.fx.a), UninhabitedType() + ) + + def test_function_types(self) -> None: + self.assert_meet( + self.callable(self.fx.a, self.fx.b), + self.callable(self.fx.a, self.fx.b), + self.callable(self.fx.a, self.fx.b), + ) + + self.assert_meet( + self.callable(self.fx.a, self.fx.b), + self.callable(self.fx.b, self.fx.b), + self.callable(self.fx.a, self.fx.b), + ) + self.assert_meet( + self.callable(self.fx.a, self.fx.b), + self.callable(self.fx.a, self.fx.a), + self.callable(self.fx.a, self.fx.b), + ) + + def test_type_vars(self) -> None: + self.assert_meet(self.fx.t, self.fx.t, self.fx.t) + self.assert_meet(self.fx.s, self.fx.s, self.fx.s) + self.assert_meet(self.fx.t, self.fx.s, UninhabitedType()) + + def test_none(self) -> None: + self.assert_meet(NoneType(), NoneType(), NoneType()) + + self.assert_meet(NoneType(), self.fx.anyt, NoneType()) + + # Any type t joined with None results in None, unless t is Any. + with state.strict_optional_set(False): + for t in [ + self.fx.a, + self.fx.o, + UnboundType("x"), + self.fx.t, + self.tuple(), + self.callable(self.fx.a, self.fx.b), + ]: + self.assert_meet(t, NoneType(), NoneType()) + + with state.strict_optional_set(True): + self.assert_meet(self.fx.o, NoneType(), NoneType()) + for t in [ + self.fx.a, + UnboundType("x"), + self.fx.t, + self.tuple(), + self.callable(self.fx.a, self.fx.b), + ]: + self.assert_meet(t, NoneType(), UninhabitedType()) + + def test_unbound_type(self) -> None: + self.assert_meet(UnboundType("x"), UnboundType("x"), self.fx.anyt) + self.assert_meet(UnboundType("x"), UnboundType("y"), self.fx.anyt) + + self.assert_meet(UnboundType("x"), self.fx.anyt, UnboundType("x")) + + # The meet of any type t with an unbound type results in dynamic. + # Unbound type means that there is an error somewhere in the program, + # so this does not affect type safety. + for t in [ + self.fx.a, + self.fx.o, + self.fx.t, + self.tuple(), + self.callable(self.fx.a, self.fx.b), + ]: + self.assert_meet(t, UnboundType("X"), self.fx.anyt) + + def test_dynamic_type(self) -> None: + # Meet against dynamic type always results in dynamic. + for t in [ + self.fx.anyt, + self.fx.a, + self.fx.o, + NoneType(), + UnboundType("x"), + self.fx.t, + self.tuple(), + self.callable(self.fx.a, self.fx.b), + ]: + self.assert_meet(t, self.fx.anyt, t) + + def test_simple_generics(self) -> None: + self.assert_meet(self.fx.ga, self.fx.ga, self.fx.ga) + self.assert_meet(self.fx.ga, self.fx.o, self.fx.ga) + self.assert_meet(self.fx.ga, self.fx.gb, self.fx.gb) + self.assert_meet(self.fx.ga, self.fx.gd, UninhabitedType()) + self.assert_meet(self.fx.ga, self.fx.g2a, UninhabitedType()) + + self.assert_meet(self.fx.ga, self.fx.nonet, UninhabitedType()) + self.assert_meet(self.fx.ga, self.fx.anyt, self.fx.ga) + + for t in [self.fx.a, self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: + self.assert_meet(t, self.fx.ga, UninhabitedType()) + + def test_generics_with_multiple_args(self) -> None: + self.assert_meet(self.fx.hab, self.fx.hab, self.fx.hab) + self.assert_meet(self.fx.hab, self.fx.haa, self.fx.hab) + self.assert_meet(self.fx.hab, self.fx.had, UninhabitedType()) + self.assert_meet(self.fx.hab, self.fx.hbb, self.fx.hbb) + + def test_generics_with_inheritance(self) -> None: + self.assert_meet(self.fx.gsab, self.fx.gb, self.fx.gsab) + self.assert_meet(self.fx.gsba, self.fx.gb, UninhabitedType()) + + def test_generics_with_inheritance_and_shared_supertype(self) -> None: + self.assert_meet(self.fx.gsba, self.fx.gs2a, UninhabitedType()) + self.assert_meet(self.fx.gsab, self.fx.gs2a, UninhabitedType()) + + def test_generic_types_and_dynamic(self) -> None: + self.assert_meet(self.fx.gdyn, self.fx.ga, self.fx.ga) + + def test_callables_with_dynamic(self) -> None: + self.assert_meet( + self.callable(self.fx.a, self.fx.a, self.fx.anyt, self.fx.a), + self.callable(self.fx.a, self.fx.anyt, self.fx.a, self.fx.anyt), + self.callable(self.fx.a, self.fx.anyt, self.fx.anyt, self.fx.anyt), + ) + + def test_meet_interface_types(self) -> None: + self.assert_meet(self.fx.f, self.fx.f, self.fx.f) + self.assert_meet(self.fx.f, self.fx.f2, UninhabitedType()) + self.assert_meet(self.fx.f, self.fx.f3, self.fx.f3) + + def test_meet_interface_and_class_types(self) -> None: + self.assert_meet(self.fx.o, self.fx.f, self.fx.f) + self.assert_meet(self.fx.a, self.fx.f, UninhabitedType()) + + self.assert_meet(self.fx.e, self.fx.f, self.fx.e) + + def test_meet_class_types_with_shared_interfaces(self) -> None: + # These have nothing special with respect to meets, unlike joins. These + # are for completeness only. + self.assert_meet(self.fx.e, self.fx.e2, UninhabitedType()) + self.assert_meet(self.fx.e2, self.fx.e3, UninhabitedType()) + + def test_meet_with_generic_interfaces(self) -> None: + fx = InterfaceTypeFixture() + self.assert_meet(fx.gfa, fx.m1, fx.m1) + self.assert_meet(fx.gfa, fx.gfa, fx.gfa) + self.assert_meet(fx.gfb, fx.m1, UninhabitedType()) + + def test_type_type(self) -> None: + self.assert_meet(self.fx.type_a, self.fx.type_b, self.fx.type_b) + self.assert_meet(self.fx.type_b, self.fx.type_any, self.fx.type_b) + self.assert_meet(self.fx.type_b, self.fx.type_type, self.fx.type_b) + self.assert_meet(self.fx.type_b, self.fx.type_c, self.fx.type_never) + self.assert_meet(self.fx.type_c, self.fx.type_d, self.fx.type_never) + self.assert_meet(self.fx.type_type, self.fx.type_any, self.fx.type_any) + self.assert_meet(self.fx.type_b, self.fx.anyt, self.fx.type_b) + + def test_literal_type(self) -> None: + a = self.fx.a + lit1 = self.fx.lit1 + lit2 = self.fx.lit2 + lit3 = self.fx.lit3 + + self.assert_meet(lit1, lit1, lit1) + self.assert_meet(lit1, a, lit1) + self.assert_meet_uninhabited(lit1, lit3) + self.assert_meet_uninhabited(lit1, lit2) + self.assert_meet(UnionType([lit1, lit2]), lit1, lit1) + self.assert_meet(UnionType([lit1, lit2]), UnionType([lit2, lit3]), lit2) + self.assert_meet(UnionType([lit1, lit2]), UnionType([lit1, lit2]), UnionType([lit1, lit2])) + self.assert_meet(lit1, self.fx.anyt, lit1) + self.assert_meet(lit1, self.fx.o, lit1) + + assert is_same_type(lit1, narrow_declared_type(lit1, a)) + assert is_same_type(lit2, narrow_declared_type(lit2, a)) + + # FIX generic interfaces + ranges + + def assert_meet_uninhabited(self, s: Type, t: Type) -> None: + with state.strict_optional_set(False): + self.assert_meet(s, t, self.fx.nonet) + with state.strict_optional_set(True): + self.assert_meet(s, t, self.fx.uninhabited) + + def test_variadic_tuple_meets(self) -> None: + # These tests really test just the "arity", to be sure it is handled correctly. + self.assert_meet( + self.tuple(self.fx.a, self.fx.a), + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + self.tuple(self.fx.a, self.fx.a), + ) + self.assert_meet( + self.tuple(self.fx.a, self.fx.a), + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a), + self.tuple(self.fx.a, self.fx.a), + ) + self.assert_meet( + self.tuple(self.fx.a, self.fx.a), + self.tuple(self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + self.tuple(self.fx.a, self.fx.a), + ) + self.assert_meet( + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + ) + self.assert_meet( + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a), + self.tuple(self.fx.b, UnpackType(Instance(self.fx.std_tuplei, [self.fx.b]))), + self.tuple(self.fx.b, UnpackType(Instance(self.fx.std_tuplei, [self.fx.b]))), + ) + + def assert_meet(self, s: Type, t: Type, meet: Type) -> None: + self.assert_simple_meet(s, t, meet) + self.assert_simple_meet(t, s, meet) + + def assert_simple_meet(self, s: Type, t: Type, meet: Type) -> None: + result = meet_types(s, t) + actual = str(result) + expected = str(meet) + assert_equal(actual, expected, f"meet({s}, {t}) == {{}} ({{}} expected)") + assert is_subtype(result, s), f"{result} not subtype of {s}" + assert is_subtype(result, t), f"{result} not subtype of {t}" + + def tuple(self, *a: Type) -> TupleType: + return TupleType(list(a), self.fx.std_tuple) + + def callable(self, *a: Type) -> CallableType: + """callable(a1, ..., an, r) constructs a callable with argument types + a1, ... an and return type r. + """ + n = len(a) - 1 + return CallableType(list(a[:-1]), [ARG_POS] * n, [None] * n, a[-1], self.fx.function) + + +class SameTypeSuite(Suite): + def setUp(self) -> None: + self.fx = TypeFixture() + + def test_literal_type(self) -> None: + a = self.fx.a + b = self.fx.b # Reminder: b is a subclass of a + + lit1 = self.fx.lit1 + lit2 = self.fx.lit2 + lit3 = self.fx.lit3 + + self.assert_same(lit1, lit1) + self.assert_same(UnionType([lit1, lit2]), UnionType([lit1, lit2])) + self.assert_same(UnionType([lit1, lit2]), UnionType([lit2, lit1])) + self.assert_same(UnionType([a, b]), UnionType([b, a])) + self.assert_not_same(lit1, b) + self.assert_not_same(lit1, lit2) + self.assert_not_same(lit1, lit3) + + self.assert_not_same(lit1, self.fx.anyt) + self.assert_not_same(lit1, self.fx.nonet) + + def assert_same(self, s: Type, t: Type, strict: bool = True) -> None: + self.assert_simple_is_same(s, t, expected=True, strict=strict) + self.assert_simple_is_same(t, s, expected=True, strict=strict) + + def assert_not_same(self, s: Type, t: Type, strict: bool = True) -> None: + self.assert_simple_is_same(s, t, False, strict=strict) + self.assert_simple_is_same(t, s, False, strict=strict) + + def assert_simple_is_same(self, s: Type, t: Type, expected: bool, strict: bool) -> None: + actual = is_same_type(s, t) + assert_equal(actual, expected, f"is_same_type({s}, {t}) is {{}} ({{}} expected)") + + if strict: + actual2 = s == t + assert_equal(actual2, expected, f"({s} == {t}) is {{}} ({{}} expected)") + assert_equal( + hash(s) == hash(t), expected, f"(hash({s}) == hash({t}) is {{}} ({{}} expected)" + ) + + +class RemoveLastKnownValueSuite(Suite): + def setUp(self) -> None: + self.fx = TypeFixture() + + def test_optional(self) -> None: + t = UnionType.make_union([self.fx.a, self.fx.nonet]) + self.assert_union_result(t, [self.fx.a, self.fx.nonet]) + + def test_two_instances(self) -> None: + t = UnionType.make_union([self.fx.a, self.fx.b]) + self.assert_union_result(t, [self.fx.a, self.fx.b]) + + def test_multiple_same_instances(self) -> None: + t = UnionType.make_union([self.fx.a, self.fx.a]) + assert remove_instance_last_known_values(t) == self.fx.a + t = UnionType.make_union([self.fx.a, self.fx.a, self.fx.b]) + self.assert_union_result(t, [self.fx.a, self.fx.b]) + t = UnionType.make_union([self.fx.a, self.fx.nonet, self.fx.a, self.fx.b]) + self.assert_union_result(t, [self.fx.a, self.fx.nonet, self.fx.b]) + + def test_single_last_known_value(self) -> None: + t = UnionType.make_union([self.fx.lit1_inst, self.fx.nonet]) + self.assert_union_result(t, [self.fx.a, self.fx.nonet]) + + def test_last_known_values_with_merge(self) -> None: + t = UnionType.make_union([self.fx.lit1_inst, self.fx.lit2_inst, self.fx.lit4_inst]) + assert remove_instance_last_known_values(t) == self.fx.a + t = UnionType.make_union( + [self.fx.lit1_inst, self.fx.b, self.fx.lit2_inst, self.fx.lit4_inst] + ) + self.assert_union_result(t, [self.fx.a, self.fx.b]) + + def test_generics(self) -> None: + t = UnionType.make_union([self.fx.ga, self.fx.gb]) + self.assert_union_result(t, [self.fx.ga, self.fx.gb]) + + def assert_union_result(self, t: ProperType, expected: list[Type]) -> None: + t2 = remove_instance_last_known_values(t) + assert type(t2) is UnionType + assert t2.items == expected + + +class ShallowOverloadMatchingSuite(Suite): + def setUp(self) -> None: + self.fx = TypeFixture() + + def test_simple(self) -> None: + fx = self.fx + ov = self.make_overload([[("x", fx.anyt, ARG_NAMED)], [("y", fx.anyt, ARG_NAMED)]]) + # Match first only + self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", "x")), 0) + # Match second only + self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", "y")), 1) + # No match -- invalid keyword arg name + self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", "z")), 1) + # No match -- missing arg + self.assert_find_shallow_matching_overload_item(ov, make_call(), 1) + # No match -- extra arg + self.assert_find_shallow_matching_overload_item( + ov, make_call(("foo", "x"), ("foo", "z")), 1 + ) + + def test_match_using_types(self) -> None: + fx = self.fx + ov = self.make_overload( + [ + [("x", fx.nonet, ARG_POS)], + [("x", fx.lit_false, ARG_POS)], + [("x", fx.lit_true, ARG_POS)], + [("x", fx.anyt, ARG_POS)], + ] + ) + self.assert_find_shallow_matching_overload_item(ov, make_call(("None", None)), 0) + self.assert_find_shallow_matching_overload_item(ov, make_call(("builtins.False", None)), 1) + self.assert_find_shallow_matching_overload_item(ov, make_call(("builtins.True", None)), 2) + self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", None)), 3) + + def test_none_special_cases(self) -> None: + fx = self.fx + ov = self.make_overload( + [[("x", fx.callable(fx.nonet), ARG_POS)], [("x", fx.nonet, ARG_POS)]] + ) + self.assert_find_shallow_matching_overload_item(ov, make_call(("None", None)), 1) + self.assert_find_shallow_matching_overload_item(ov, make_call(("func", None)), 0) + ov = self.make_overload([[("x", fx.str_type, ARG_POS)], [("x", fx.nonet, ARG_POS)]]) + self.assert_find_shallow_matching_overload_item(ov, make_call(("None", None)), 1) + self.assert_find_shallow_matching_overload_item(ov, make_call(("func", None)), 0) + ov = self.make_overload( + [[("x", UnionType([fx.str_type, fx.a]), ARG_POS)], [("x", fx.nonet, ARG_POS)]] + ) + self.assert_find_shallow_matching_overload_item(ov, make_call(("None", None)), 1) + self.assert_find_shallow_matching_overload_item(ov, make_call(("func", None)), 0) + ov = self.make_overload([[("x", fx.o, ARG_POS)], [("x", fx.nonet, ARG_POS)]]) + self.assert_find_shallow_matching_overload_item(ov, make_call(("None", None)), 0) + self.assert_find_shallow_matching_overload_item(ov, make_call(("func", None)), 0) + ov = self.make_overload( + [[("x", UnionType([fx.str_type, fx.nonet]), ARG_POS)], [("x", fx.nonet, ARG_POS)]] + ) + self.assert_find_shallow_matching_overload_item(ov, make_call(("None", None)), 0) + self.assert_find_shallow_matching_overload_item(ov, make_call(("func", None)), 0) + ov = self.make_overload([[("x", fx.anyt, ARG_POS)], [("x", fx.nonet, ARG_POS)]]) + self.assert_find_shallow_matching_overload_item(ov, make_call(("None", None)), 0) + self.assert_find_shallow_matching_overload_item(ov, make_call(("func", None)), 0) + + def test_optional_arg(self) -> None: + fx = self.fx + ov = self.make_overload( + [[("x", fx.anyt, ARG_NAMED)], [("y", fx.anyt, ARG_OPT)], [("z", fx.anyt, ARG_NAMED)]] + ) + self.assert_find_shallow_matching_overload_item(ov, make_call(), 1) + self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", "x")), 0) + self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", "y")), 1) + self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", "z")), 2) + + def test_two_args(self) -> None: + fx = self.fx + ov = self.make_overload( + [ + [("x", fx.nonet, ARG_OPT), ("y", fx.anyt, ARG_OPT)], + [("x", fx.anyt, ARG_OPT), ("y", fx.anyt, ARG_OPT)], + ] + ) + self.assert_find_shallow_matching_overload_item(ov, make_call(), 0) + self.assert_find_shallow_matching_overload_item(ov, make_call(("None", "x")), 0) + self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", "x")), 1) + self.assert_find_shallow_matching_overload_item( + ov, make_call(("foo", "y"), ("None", "x")), 0 + ) + self.assert_find_shallow_matching_overload_item( + ov, make_call(("foo", "y"), ("bar", "x")), 1 + ) + + def assert_find_shallow_matching_overload_item( + self, ov: Overloaded, call: CallExpr, expected_index: int + ) -> None: + c = find_shallow_matching_overload_item(ov, call) + assert c in ov.items + assert ov.items.index(c) == expected_index + + def make_overload(self, items: list[list[tuple[str, Type, ArgKind]]]) -> Overloaded: + result = [] + for item in items: + arg_types = [] + arg_names = [] + arg_kinds = [] + for name, typ, kind in item: + arg_names.append(name) + arg_types.append(typ) + arg_kinds.append(kind) + result.append( + CallableType( + arg_types, arg_kinds, arg_names, ret_type=NoneType(), fallback=self.fx.o + ) + ) + return Overloaded(result) + + +def make_call(*items: tuple[str, str | None]) -> CallExpr: + args: list[Expression] = [] + arg_names = [] + arg_kinds = [] + for arg, name in items: + shortname = arg.split(".")[-1] + n = NameExpr(shortname) + n.fullname = arg + args.append(n) + arg_names.append(name) + if name: + arg_kinds.append(ARG_NAMED) + else: + arg_kinds.append(ARG_POS) + return CallExpr(NameExpr("f"), args, arg_kinds, arg_names) + + +class TestExpandTypeLimitGetProperType(TestCase): + # WARNING: do not increase this number unless absolutely necessary, + # and you understand what you are doing. + ALLOWED_GET_PROPER_TYPES = 7 + + @skipUnless(mypy.expandtype.__file__.endswith(".py"), "Skip for compiled mypy") + def test_count_get_proper_type(self) -> None: + with open(mypy.expandtype.__file__) as f: + code = f.read() + get_proper_type_count = len(re.findall(r"get_proper_type\(", code)) + get_proper_type_count -= len(re.findall(r"get_proper_type\(\)", code)) + assert get_proper_type_count == self.ALLOWED_GET_PROPER_TYPES diff --git a/.venv/lib/python3.12/site-packages/mypy/test/testutil.py b/.venv/lib/python3.12/site-packages/mypy/test/testutil.py new file mode 100644 index 0000000..a7c3f1c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/testutil.py @@ -0,0 +1,111 @@ +from __future__ import annotations + +import os +from unittest import TestCase, mock + +from mypy.inspections import parse_location +from mypy.util import _generate_junit_contents, get_terminal_width + + +class TestGetTerminalSize(TestCase): + def test_get_terminal_size_in_pty_defaults_to_80(self) -> None: + # when run using a pty, `os.get_terminal_size()` returns `0, 0` + ret = os.terminal_size((0, 0)) + mock_environ = os.environ.copy() + mock_environ.pop("COLUMNS", None) + with mock.patch.object(os, "get_terminal_size", return_value=ret): + with mock.patch.dict(os.environ, values=mock_environ, clear=True): + assert get_terminal_width() == 80 + + def test_parse_location_windows(self) -> None: + assert parse_location(r"C:\test.py:1:1") == (r"C:\test.py", [1, 1]) + assert parse_location(r"C:\test.py:1:1:1:1") == (r"C:\test.py", [1, 1, 1, 1]) + + +class TestWriteJunitXml(TestCase): + def test_junit_pass(self) -> None: + serious = False + messages_by_file: dict[str | None, list[str]] = {} + expected = """ + + + + +""" + result = _generate_junit_contents( + dt=1.23, + serious=serious, + messages_by_file=messages_by_file, + version="3.14", + platform="test-plat", + ) + assert result == expected + + def test_junit_fail_escape_xml_chars(self) -> None: + serious = False + messages_by_file: dict[str | None, list[str]] = { + "file1.py": ["Test failed", "another line < > &"] + } + expected = """ + + + Test failed +another line < > & + + +""" + result = _generate_junit_contents( + dt=1.23, + serious=serious, + messages_by_file=messages_by_file, + version="3.14", + platform="test-plat", + ) + assert result == expected + + def test_junit_fail_two_files(self) -> None: + serious = False + messages_by_file: dict[str | None, list[str]] = { + "file1.py": ["Test failed", "another line"], + "file2.py": ["Another failure", "line 2"], + } + expected = """ + + + Test failed +another line + + + Another failure +line 2 + + +""" + result = _generate_junit_contents( + dt=1.23, + serious=serious, + messages_by_file=messages_by_file, + version="3.14", + platform="test-plat", + ) + assert result == expected + + def test_serious_error(self) -> None: + serious = True + messages_by_file: dict[str | None, list[str]] = {None: ["Error line 1", "Error line 2"]} + expected = """ + + + Error line 1 +Error line 2 + + +""" + result = _generate_junit_contents( + dt=1.23, + serious=serious, + messages_by_file=messages_by_file, + version="3.14", + platform="test-plat", + ) + assert result == expected diff --git a/.venv/lib/python3.12/site-packages/mypy/test/typefixture.py b/.venv/lib/python3.12/site-packages/mypy/test/typefixture.py new file mode 100644 index 0000000..f70c8b9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/typefixture.py @@ -0,0 +1,419 @@ +"""Fixture used in type-related test cases. + +It contains class TypeInfos and Type objects. +""" + +from __future__ import annotations + +from mypy.nodes import ( + ARG_OPT, + ARG_POS, + ARG_STAR, + COVARIANT, + MDEF, + Block, + ClassDef, + FuncDef, + SymbolTable, + SymbolTableNode, + TypeAlias, + TypeInfo, +) +from mypy.semanal_shared import set_callable_name +from mypy.types import ( + AnyType, + CallableType, + Instance, + LiteralType, + NoneType, + Type, + TypeAliasType, + TypeOfAny, + TypeType, + TypeVarId, + TypeVarLikeType, + TypeVarTupleType, + TypeVarType, + UninhabitedType, + UnionType, +) + + +class TypeFixture: + """Helper class that is used as a fixture in type-related unit tests. + + The members are initialized to contain various type-related values. + """ + + def __init__(self, variance: int = COVARIANT) -> None: + # The 'object' class + self.oi = self.make_type_info("builtins.object") # class object + self.o = Instance(self.oi, []) # object + + # Type variables (these are effectively global) + + def make_type_var( + name: str, id: int, values: list[Type], upper_bound: Type, variance: int + ) -> TypeVarType: + return TypeVarType( + name, + name, + TypeVarId(id), + values, + upper_bound, + AnyType(TypeOfAny.from_omitted_generics), + variance, + ) + + self.t = make_type_var("T", 1, [], self.o, variance) # T`1 (type variable) + self.tf = make_type_var("T", -1, [], self.o, variance) # T`-1 (type variable) + self.tf2 = make_type_var("T", -2, [], self.o, variance) # T`-2 (type variable) + self.s = make_type_var("S", 2, [], self.o, variance) # S`2 (type variable) + self.s1 = make_type_var("S", 1, [], self.o, variance) # S`1 (type variable) + self.sf = make_type_var("S", -2, [], self.o, variance) # S`-2 (type variable) + self.sf1 = make_type_var("S", -1, [], self.o, variance) # S`-1 (type variable) + self.u = make_type_var("U", 3, [], self.o, variance) # U`3 (type variable) + + # Simple types + self.anyt = AnyType(TypeOfAny.special_form) + self.nonet = NoneType() + self.uninhabited = UninhabitedType() + self.a_uninhabited = UninhabitedType() + self.a_uninhabited.ambiguous = True + + # Abstract class TypeInfos + + # class F + self.fi = self.make_type_info("F", is_abstract=True) + + # class F2 + self.f2i = self.make_type_info("F2", is_abstract=True) + + # class F3(F) + self.f3i = self.make_type_info("F3", is_abstract=True, mro=[self.fi]) + + # Class TypeInfos + self.std_tuplei = self.make_type_info( + "builtins.tuple", mro=[self.oi], typevars=["T"], variances=[COVARIANT] + ) # class tuple + self.type_typei = self.make_type_info("builtins.type") # class type + self.bool_type_info = self.make_type_info("builtins.bool") + self.str_type_info = self.make_type_info("builtins.str") + self.functioni = self.make_type_info("builtins.function") # function TODO + self.ai = self.make_type_info("A", mro=[self.oi]) # class A + self.bi = self.make_type_info("B", mro=[self.ai, self.oi]) # class B(A) + self.ci = self.make_type_info("C", mro=[self.ai, self.oi]) # class C(A) + self.di = self.make_type_info("D", mro=[self.oi]) # class D + # class E(F) + self.ei = self.make_type_info("E", mro=[self.fi, self.oi]) + # class E2(F2, F) + self.e2i = self.make_type_info("E2", mro=[self.f2i, self.fi, self.oi]) + # class E3(F, F2) + self.e3i = self.make_type_info("E3", mro=[self.fi, self.f2i, self.oi]) + + # Generic class TypeInfos + # G[T] + self.gi = self.make_type_info("G", mro=[self.oi], typevars=["T"], variances=[variance]) + # G2[T] + self.g2i = self.make_type_info("G2", mro=[self.oi], typevars=["T"], variances=[variance]) + # H[S, T] + self.hi = self.make_type_info( + "H", mro=[self.oi], typevars=["S", "T"], variances=[variance, variance] + ) + # GS[T, S] <: G[S] + self.gsi = self.make_type_info( + "GS", + mro=[self.gi, self.oi], + typevars=["T", "S"], + variances=[variance, variance], + bases=[Instance(self.gi, [self.s])], + ) + # GS2[S] <: G[S] + self.gs2i = self.make_type_info( + "GS2", + mro=[self.gi, self.oi], + typevars=["S"], + variances=[variance], + bases=[Instance(self.gi, [self.s1])], + ) + + # list[T] + self.std_listi = self.make_type_info( + "builtins.list", mro=[self.oi], typevars=["T"], variances=[variance] + ) + + # Instance types + self.std_tuple = Instance(self.std_tuplei, [self.anyt]) # tuple + self.type_type = Instance(self.type_typei, []) # type + self.function = Instance(self.functioni, []) # function TODO + self.str_type = Instance(self.str_type_info, []) + self.bool_type = Instance(self.bool_type_info, []) + self.a = Instance(self.ai, []) # A + self.b = Instance(self.bi, []) # B + self.c = Instance(self.ci, []) # C + self.d = Instance(self.di, []) # D + + self.e = Instance(self.ei, []) # E + self.e2 = Instance(self.e2i, []) # E2 + self.e3 = Instance(self.e3i, []) # E3 + + self.f = Instance(self.fi, []) # F + self.f2 = Instance(self.f2i, []) # F2 + self.f3 = Instance(self.f3i, []) # F3 + + # Generic instance types + self.ga = Instance(self.gi, [self.a]) # G[A] + self.gb = Instance(self.gi, [self.b]) # G[B] + self.gd = Instance(self.gi, [self.d]) # G[D] + self.go = Instance(self.gi, [self.o]) # G[object] + self.gt = Instance(self.gi, [self.t]) # G[T`1] + self.gtf = Instance(self.gi, [self.tf]) # G[T`-1] + self.gtf2 = Instance(self.gi, [self.tf2]) # G[T`-2] + self.gs = Instance(self.gi, [self.s]) # G[S] + self.gdyn = Instance(self.gi, [self.anyt]) # G[Any] + self.gn = Instance(self.gi, [NoneType()]) # G[None] + + self.g2a = Instance(self.g2i, [self.a]) # G2[A] + + self.gsaa = Instance(self.gsi, [self.a, self.a]) # GS[A, A] + self.gsab = Instance(self.gsi, [self.a, self.b]) # GS[A, B] + self.gsba = Instance(self.gsi, [self.b, self.a]) # GS[B, A] + + self.gs2a = Instance(self.gs2i, [self.a]) # GS2[A] + self.gs2b = Instance(self.gs2i, [self.b]) # GS2[B] + self.gs2d = Instance(self.gs2i, [self.d]) # GS2[D] + + self.hab = Instance(self.hi, [self.a, self.b]) # H[A, B] + self.haa = Instance(self.hi, [self.a, self.a]) # H[A, A] + self.hbb = Instance(self.hi, [self.b, self.b]) # H[B, B] + self.hts = Instance(self.hi, [self.t, self.s]) # H[T, S] + self.had = Instance(self.hi, [self.a, self.d]) # H[A, D] + self.hao = Instance(self.hi, [self.a, self.o]) # H[A, object] + + self.lsta = Instance(self.std_listi, [self.a]) # List[A] + self.lstb = Instance(self.std_listi, [self.b]) # List[B] + + self.lit1 = LiteralType(1, self.a) + self.lit2 = LiteralType(2, self.a) + self.lit3 = LiteralType("foo", self.d) + self.lit4 = LiteralType(4, self.a) + self.lit1_inst = Instance(self.ai, [], last_known_value=self.lit1) + self.lit2_inst = Instance(self.ai, [], last_known_value=self.lit2) + self.lit3_inst = Instance(self.di, [], last_known_value=self.lit3) + self.lit4_inst = Instance(self.ai, [], last_known_value=self.lit4) + + self.lit_str1 = LiteralType("x", self.str_type) + self.lit_str2 = LiteralType("y", self.str_type) + self.lit_str3 = LiteralType("z", self.str_type) + self.lit_str1_inst = Instance(self.str_type_info, [], last_known_value=self.lit_str1) + self.lit_str2_inst = Instance(self.str_type_info, [], last_known_value=self.lit_str2) + self.lit_str3_inst = Instance(self.str_type_info, [], last_known_value=self.lit_str3) + + self.lit_false = LiteralType(False, self.bool_type) + self.lit_true = LiteralType(True, self.bool_type) + + self.type_a = TypeType.make_normalized(self.a) + self.type_b = TypeType.make_normalized(self.b) + self.type_c = TypeType.make_normalized(self.c) + self.type_d = TypeType.make_normalized(self.d) + self.type_t = TypeType.make_normalized(self.t) + self.type_any = TypeType.make_normalized(self.anyt) + self.type_never = TypeType.make_normalized(UninhabitedType()) + + self._add_bool_dunder(self.bool_type_info) + self._add_bool_dunder(self.ai) + + # TypeVars with non-trivial bounds + self.ub = make_type_var("UB", 5, [], self.b, variance) # UB`5 (type variable) + self.uc = make_type_var("UC", 6, [], self.c, variance) # UC`6 (type variable) + + def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleType: + return TypeVarTupleType( + name, + name, + TypeVarId(id), + upper_bound, + self.std_tuple, + AnyType(TypeOfAny.from_omitted_generics), + ) + + obj_tuple = self.std_tuple.copy_modified(args=[self.o]) + self.ts = make_type_var_tuple("Ts", 1, obj_tuple) # Ts`1 (type var tuple) + self.ss = make_type_var_tuple("Ss", 2, obj_tuple) # Ss`2 (type var tuple) + self.us = make_type_var_tuple("Us", 3, obj_tuple) # Us`3 (type var tuple) + + self.gvi = self.make_type_info("GV", mro=[self.oi], typevars=["Ts"], typevar_tuple_index=0) + self.gv2i = self.make_type_info( + "GV2", mro=[self.oi], typevars=["T", "Ts", "S"], typevar_tuple_index=1 + ) + + def _add_bool_dunder(self, type_info: TypeInfo) -> None: + signature = CallableType([], [], [], Instance(self.bool_type_info, []), self.function) + bool_func = FuncDef("__bool__", [], Block([])) + bool_func.type = set_callable_name(signature, bool_func) + type_info.names[bool_func.name] = SymbolTableNode(MDEF, bool_func) + + # Helper methods + + def callable(self, *a: Type) -> CallableType: + """callable(a1, ..., an, r) constructs a callable with argument types + a1, ... an and return type r. + """ + return CallableType( + list(a[:-1]), [ARG_POS] * (len(a) - 1), [None] * (len(a) - 1), a[-1], self.function + ) + + def callable_type(self, *a: Type) -> CallableType: + """callable_type(a1, ..., an, r) constructs a callable with + argument types a1, ... an and return type r, and which + represents a type. + """ + return CallableType( + list(a[:-1]), [ARG_POS] * (len(a) - 1), [None] * (len(a) - 1), a[-1], self.type_type + ) + + def callable_default(self, min_args: int, *a: Type) -> CallableType: + """callable_default(min_args, a1, ..., an, r) constructs a + callable with argument types a1, ... an and return type r, + with min_args mandatory fixed arguments. + """ + n = len(a) - 1 + return CallableType( + list(a[:-1]), + [ARG_POS] * min_args + [ARG_OPT] * (n - min_args), + [None] * n, + a[-1], + self.function, + ) + + def callable_var_arg(self, min_args: int, *a: Type) -> CallableType: + """callable_var_arg(min_args, a1, ..., an, r) constructs a callable + with argument types a1, ... *an and return type r. + """ + n = len(a) - 1 + return CallableType( + list(a[:-1]), + [ARG_POS] * min_args + [ARG_OPT] * (n - 1 - min_args) + [ARG_STAR], + [None] * n, + a[-1], + self.function, + ) + + def make_type_info( + self, + name: str, + module_name: str | None = None, + is_abstract: bool = False, + mro: list[TypeInfo] | None = None, + bases: list[Instance] | None = None, + typevars: list[str] | None = None, + typevar_tuple_index: int | None = None, + variances: list[int] | None = None, + ) -> TypeInfo: + """Make a TypeInfo suitable for use in unit tests.""" + + class_def = ClassDef(name, Block([]), None, []) + class_def.fullname = name + + if module_name is None: + if "." in name: + module_name = name.rsplit(".", 1)[0] + else: + module_name = "__main__" + + if typevars: + v: list[TypeVarLikeType] = [] + for id, n in enumerate(typevars, 1): + if typevar_tuple_index is not None and id - 1 == typevar_tuple_index: + v.append( + TypeVarTupleType( + n, + n, + TypeVarId(id), + self.std_tuple.copy_modified(args=[self.o]), + self.std_tuple.copy_modified(args=[self.o]), + AnyType(TypeOfAny.from_omitted_generics), + ) + ) + else: + if variances: + variance = variances[id - 1] + else: + variance = COVARIANT + v.append( + TypeVarType( + n, + n, + TypeVarId(id), + [], + self.o, + AnyType(TypeOfAny.from_omitted_generics), + variance=variance, + ) + ) + class_def.type_vars = v + + info = TypeInfo(SymbolTable(), class_def, module_name) + if mro is None: + mro = [] + if name != "builtins.object": + mro.append(self.oi) + info.mro = [info] + mro + if bases is None: + if mro: + # By default, assume that there is a single non-generic base. + bases = [Instance(mro[0], [])] + else: + bases = [] + info.bases = bases + + return info + + def def_alias_1(self, base: Instance) -> tuple[TypeAliasType, Type]: + A = TypeAliasType(None, []) + target = Instance( + self.std_tuplei, [UnionType([base, A])] + ) # A = Tuple[Union[base, A], ...] + AN = TypeAlias(target, "__main__.A", "__main__", -1, -1) + A.alias = AN + return A, target + + def def_alias_2(self, base: Instance) -> tuple[TypeAliasType, Type]: + A = TypeAliasType(None, []) + target = UnionType( + [base, Instance(self.std_tuplei, [A])] + ) # A = Union[base, Tuple[A, ...]] + AN = TypeAlias(target, "__main__.A", "__main__", -1, -1) + A.alias = AN + return A, target + + def non_rec_alias( + self, + target: Type, + alias_tvars: list[TypeVarLikeType] | None = None, + args: list[Type] | None = None, + ) -> TypeAliasType: + AN = TypeAlias(target, "__main__.A", "__main__", -1, -1, alias_tvars=alias_tvars) + if args is None: + args = [] + return TypeAliasType(AN, args) + + +class InterfaceTypeFixture(TypeFixture): + """Extension of TypeFixture that contains additional generic + interface types.""" + + def __init__(self) -> None: + super().__init__() + # GF[T] + self.gfi = self.make_type_info("GF", typevars=["T"], is_abstract=True) + + # M1 <: GF[A] + self.m1i = self.make_type_info( + "M1", is_abstract=True, mro=[self.gfi, self.oi], bases=[Instance(self.gfi, [self.a])] + ) + + self.gfa = Instance(self.gfi, [self.a]) # GF[A] + self.gfb = Instance(self.gfi, [self.b]) # GF[B] + + self.m1 = Instance(self.m1i, []) # M1 diff --git a/.venv/lib/python3.12/site-packages/mypy/test/update_data.py b/.venv/lib/python3.12/site-packages/mypy/test/update_data.py new file mode 100644 index 0000000..84b6383 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/update_data.py @@ -0,0 +1,87 @@ +from __future__ import annotations + +import re +from collections import defaultdict +from collections.abc import Iterator + +from mypy.test.data import DataDrivenTestCase, DataFileCollector, DataFileFix, parse_test_data + + +def update_testcase_output( + testcase: DataDrivenTestCase, actual: list[str], *, incremental_step: int +) -> None: + if testcase.xfail: + return + collector = testcase.parent + assert isinstance(collector, DataFileCollector) + for fix in _iter_fixes(testcase, actual, incremental_step=incremental_step): + collector.enqueue_fix(fix) + + +def _iter_fixes( + testcase: DataDrivenTestCase, actual: list[str], *, incremental_step: int +) -> Iterator[DataFileFix]: + reports_by_line: dict[tuple[str, int], list[tuple[str, str]]] = defaultdict(list) + for error_line in actual: + comment_match = re.match( + r"^(?P[^:]+):(?P\d+): (?Perror|note|warning): (?P.+)$", + error_line, + ) + if comment_match: + filename = comment_match.group("filename") + lineno = int(comment_match.group("lineno")) + severity = comment_match.group("severity") + msg = comment_match.group("msg") + reports_by_line[filename, lineno].append((severity, msg)) + + test_items = parse_test_data(testcase.data, testcase.name) + + # If we have [out] and/or [outN], we update just those sections. + if any(re.match(r"^out\d*$", test_item.id) for test_item in test_items): + for test_item in test_items: + if (incremental_step < 2 and test_item.id == "out") or ( + incremental_step >= 2 and test_item.id == f"out{incremental_step}" + ): + yield DataFileFix( + lineno=testcase.line + test_item.line - 1, + end_lineno=testcase.line + test_item.end_line - 1, + lines=actual + [""] * test_item.trimmed_newlines, + ) + + return + + # Update assertion comments within the sections + for test_item in test_items: + if test_item.id == "case": + source_lines = test_item.data + file_path = "main" + elif test_item.id == "file": + source_lines = test_item.data + file_path = f"tmp/{test_item.arg}" + else: + continue # other sections we don't touch + + fix_lines = [] + for lineno, source_line in enumerate(source_lines, start=1): + reports = reports_by_line.get((file_path, lineno)) + comment_match = re.search(r"(?P\s+)(?P# [EWN]: .+)$", source_line) + if comment_match: + source_line = source_line[: comment_match.start("indent")] # strip old comment + if reports: + indent = comment_match.group("indent") if comment_match else " " + # multiline comments are on the first line and then on subsequent lines empty lines + # with a continuation backslash + for j, (severity, msg) in enumerate(reports): + out_l = source_line if j == 0 else " " * len(source_line) + is_last = j == len(reports) - 1 + severity_char = severity[0].upper() + continuation = "" if is_last else " \\" + fix_lines.append(f"{out_l}{indent}# {severity_char}: {msg}{continuation}") + else: + fix_lines.append(source_line) + + yield DataFileFix( + lineno=testcase.line + test_item.line - 1, + end_lineno=testcase.line + test_item.end_line - 1, + lines=fix_lines + [""] * test_item.trimmed_newlines, + ) diff --git a/.venv/lib/python3.12/site-packages/mypy/test/visitors.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/test/visitors.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..54697d7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/test/visitors.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/test/visitors.py b/.venv/lib/python3.12/site-packages/mypy/test/visitors.py new file mode 100644 index 0000000..2b748ec --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/test/visitors.py @@ -0,0 +1,63 @@ +"""Visitor classes pulled out from different tests + +These are here because we don't currently support having interpreted +classes subtype compiled ones but pytest grabs the python file +even if the test was compiled. + +""" + +from __future__ import annotations + +from mypy.nodes import AssignmentStmt, CallExpr, Expression, IntExpr, NameExpr, Node, TypeVarExpr +from mypy.traverser import TraverserVisitor +from mypy.treetransform import TransformVisitor +from mypy.types import Type + + +# from testtypegen +class SkippedNodeSearcher(TraverserVisitor): + def __init__(self) -> None: + self.nodes: set[Node] = set() + self.ignore_file = False + + def visit_assignment_stmt(self, s: AssignmentStmt) -> None: + if s.type or ignore_node(s.rvalue): + for lvalue in s.lvalues: + if isinstance(lvalue, NameExpr): + self.nodes.add(lvalue) + super().visit_assignment_stmt(s) + + def visit_name_expr(self, n: NameExpr) -> None: + if self.ignore_file: + self.nodes.add(n) + super().visit_name_expr(n) + + def visit_int_expr(self, n: IntExpr) -> None: + if self.ignore_file: + self.nodes.add(n) + super().visit_int_expr(n) + + +def ignore_node(node: Expression) -> bool: + """Return True if node is to be omitted from test case output.""" + + # We want to get rid of object() expressions in the typing module stub + # and also TypeVar(...) expressions. Since detecting whether a node comes + # from the typing module is not easy, we just to strip them all away. + if isinstance(node, TypeVarExpr): + return True + if isinstance(node, NameExpr) and node.fullname == "builtins.object": + return True + if isinstance(node, NameExpr) and node.fullname == "builtins.None": + return True + if isinstance(node, CallExpr) and (ignore_node(node.callee) or node.analyzed): + return True + + return False + + +# from testtransform +class TypeAssertTransformVisitor(TransformVisitor): + def type(self, type: Type) -> Type: + assert type is not None + return type diff --git a/.venv/lib/python3.12/site-packages/mypy/traverser.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/traverser.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..7f5a236 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/traverser.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/traverser.py b/.venv/lib/python3.12/site-packages/mypy/traverser.py new file mode 100644 index 0000000..baf234c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/traverser.py @@ -0,0 +1,1086 @@ +"""Generic node traverser visitor""" + +from __future__ import annotations + +from mypy_extensions import mypyc_attr, trait + +from mypy.nodes import ( + REVEAL_TYPE, + AssertStmt, + AssertTypeExpr, + AssignmentExpr, + AssignmentStmt, + AwaitExpr, + Block, + BreakStmt, + BytesExpr, + CallExpr, + CastExpr, + ClassDef, + ComparisonExpr, + ComplexExpr, + ConditionalExpr, + ContinueStmt, + Decorator, + DelStmt, + DictExpr, + DictionaryComprehension, + EllipsisExpr, + EnumCallExpr, + Expression, + ExpressionStmt, + FloatExpr, + ForStmt, + FuncBase, + FuncDef, + FuncItem, + GeneratorExpr, + GlobalDecl, + IfStmt, + Import, + ImportAll, + ImportFrom, + IndexExpr, + IntExpr, + LambdaExpr, + ListComprehension, + ListExpr, + MatchStmt, + MemberExpr, + MypyFile, + NamedTupleExpr, + NameExpr, + NewTypeExpr, + Node, + NonlocalDecl, + OperatorAssignmentStmt, + OpExpr, + OverloadedFuncDef, + ParamSpecExpr, + PassStmt, + PromoteExpr, + RaiseStmt, + ReturnStmt, + RevealExpr, + SetComprehension, + SetExpr, + SliceExpr, + StarExpr, + StrExpr, + SuperExpr, + TempNode, + TryStmt, + TupleExpr, + TypeAlias, + TypeAliasExpr, + TypeAliasStmt, + TypeApplication, + TypedDictExpr, + TypeFormExpr, + TypeVarExpr, + TypeVarTupleExpr, + UnaryExpr, + Var, + WhileStmt, + WithStmt, + YieldExpr, + YieldFromExpr, +) +from mypy.patterns import ( + AsPattern, + ClassPattern, + MappingPattern, + OrPattern, + SequencePattern, + SingletonPattern, + StarredPattern, + ValuePattern, +) +from mypy.visitor import NodeVisitor + + +@trait +@mypyc_attr(allow_interpreted_subclasses=True) +class TraverserVisitor(NodeVisitor[None]): + """A parse tree visitor that traverses the parse tree during visiting. + + It does not perform any actions outside the traversal. Subclasses + should override visit methods to perform actions during + traversal. Calling the superclass method allows reusing the + traversal implementation. + """ + + def __init__(self) -> None: + pass + + # Visit methods + + def visit_mypy_file(self, o: MypyFile, /) -> None: + for d in o.defs: + d.accept(self) + + def visit_block(self, block: Block, /) -> None: + for s in block.body: + s.accept(self) + + def visit_func(self, o: FuncItem, /) -> None: + if o.arguments is not None: + for arg in o.arguments: + init = arg.initializer + if init is not None: + init.accept(self) + + for arg in o.arguments: + self.visit_var(arg.variable) + + o.body.accept(self) + + def visit_func_def(self, o: FuncDef, /) -> None: + self.visit_func(o) + + def visit_overloaded_func_def(self, o: OverloadedFuncDef, /) -> None: + for item in o.items: + item.accept(self) + if o.impl: + o.impl.accept(self) + + def visit_class_def(self, o: ClassDef, /) -> None: + for d in o.decorators: + d.accept(self) + for base in o.base_type_exprs: + base.accept(self) + if o.metaclass: + o.metaclass.accept(self) + for v in o.keywords.values(): + v.accept(self) + o.defs.accept(self) + if o.analyzed: + o.analyzed.accept(self) + + def visit_decorator(self, o: Decorator, /) -> None: + o.func.accept(self) + o.var.accept(self) + for decorator in o.decorators: + decorator.accept(self) + + def visit_expression_stmt(self, o: ExpressionStmt, /) -> None: + o.expr.accept(self) + + def visit_assignment_stmt(self, o: AssignmentStmt, /) -> None: + o.rvalue.accept(self) + for l in o.lvalues: + l.accept(self) + + def visit_operator_assignment_stmt(self, o: OperatorAssignmentStmt, /) -> None: + o.rvalue.accept(self) + o.lvalue.accept(self) + + def visit_while_stmt(self, o: WhileStmt, /) -> None: + o.expr.accept(self) + o.body.accept(self) + if o.else_body: + o.else_body.accept(self) + + def visit_for_stmt(self, o: ForStmt, /) -> None: + o.index.accept(self) + o.expr.accept(self) + o.body.accept(self) + if o.else_body: + o.else_body.accept(self) + + def visit_return_stmt(self, o: ReturnStmt, /) -> None: + if o.expr is not None: + o.expr.accept(self) + + def visit_assert_stmt(self, o: AssertStmt, /) -> None: + if o.expr is not None: + o.expr.accept(self) + if o.msg is not None: + o.msg.accept(self) + + def visit_del_stmt(self, o: DelStmt, /) -> None: + if o.expr is not None: + o.expr.accept(self) + + def visit_if_stmt(self, o: IfStmt, /) -> None: + for e in o.expr: + e.accept(self) + for b in o.body: + b.accept(self) + if o.else_body: + o.else_body.accept(self) + + def visit_raise_stmt(self, o: RaiseStmt, /) -> None: + if o.expr is not None: + o.expr.accept(self) + if o.from_expr is not None: + o.from_expr.accept(self) + + def visit_try_stmt(self, o: TryStmt, /) -> None: + o.body.accept(self) + for i in range(len(o.types)): + tp = o.types[i] + if tp is not None: + tp.accept(self) + o.handlers[i].accept(self) + for v in o.vars: + if v is not None: + v.accept(self) + if o.else_body is not None: + o.else_body.accept(self) + if o.finally_body is not None: + o.finally_body.accept(self) + + def visit_with_stmt(self, o: WithStmt, /) -> None: + for i in range(len(o.expr)): + o.expr[i].accept(self) + targ = o.target[i] + if targ is not None: + targ.accept(self) + o.body.accept(self) + + def visit_match_stmt(self, o: MatchStmt, /) -> None: + o.subject.accept(self) + for i in range(len(o.patterns)): + o.patterns[i].accept(self) + guard = o.guards[i] + if guard is not None: + guard.accept(self) + o.bodies[i].accept(self) + + def visit_type_alias_stmt(self, o: TypeAliasStmt, /) -> None: + o.name.accept(self) + o.value.accept(self) + + def visit_member_expr(self, o: MemberExpr, /) -> None: + o.expr.accept(self) + + def visit_yield_from_expr(self, o: YieldFromExpr, /) -> None: + o.expr.accept(self) + + def visit_yield_expr(self, o: YieldExpr, /) -> None: + if o.expr: + o.expr.accept(self) + + def visit_call_expr(self, o: CallExpr, /) -> None: + o.callee.accept(self) + for a in o.args: + a.accept(self) + if o.analyzed: + o.analyzed.accept(self) + + def visit_op_expr(self, o: OpExpr, /) -> None: + o.left.accept(self) + o.right.accept(self) + if o.analyzed is not None: + o.analyzed.accept(self) + + def visit_comparison_expr(self, o: ComparisonExpr, /) -> None: + for operand in o.operands: + operand.accept(self) + + def visit_slice_expr(self, o: SliceExpr, /) -> None: + if o.begin_index is not None: + o.begin_index.accept(self) + if o.end_index is not None: + o.end_index.accept(self) + if o.stride is not None: + o.stride.accept(self) + + def visit_cast_expr(self, o: CastExpr, /) -> None: + o.expr.accept(self) + + def visit_type_form_expr(self, o: TypeFormExpr, /) -> None: + pass + + def visit_assert_type_expr(self, o: AssertTypeExpr, /) -> None: + o.expr.accept(self) + + def visit_reveal_expr(self, o: RevealExpr, /) -> None: + if o.kind == REVEAL_TYPE: + assert o.expr is not None + o.expr.accept(self) + else: + # RevealLocalsExpr doesn't have an inner expression + pass + + def visit_assignment_expr(self, o: AssignmentExpr, /) -> None: + o.target.accept(self) + o.value.accept(self) + + def visit_unary_expr(self, o: UnaryExpr, /) -> None: + o.expr.accept(self) + + def visit_list_expr(self, o: ListExpr, /) -> None: + for item in o.items: + item.accept(self) + + def visit_tuple_expr(self, o: TupleExpr, /) -> None: + for item in o.items: + item.accept(self) + + def visit_dict_expr(self, o: DictExpr, /) -> None: + for k, v in o.items: + if k is not None: + k.accept(self) + v.accept(self) + + def visit_set_expr(self, o: SetExpr, /) -> None: + for item in o.items: + item.accept(self) + + def visit_index_expr(self, o: IndexExpr, /) -> None: + o.base.accept(self) + o.index.accept(self) + if o.analyzed: + o.analyzed.accept(self) + + def visit_generator_expr(self, o: GeneratorExpr, /) -> None: + for index, sequence, conditions in zip(o.indices, o.sequences, o.condlists): + sequence.accept(self) + index.accept(self) + for cond in conditions: + cond.accept(self) + o.left_expr.accept(self) + + def visit_dictionary_comprehension(self, o: DictionaryComprehension, /) -> None: + for index, sequence, conditions in zip(o.indices, o.sequences, o.condlists): + sequence.accept(self) + index.accept(self) + for cond in conditions: + cond.accept(self) + o.key.accept(self) + o.value.accept(self) + + def visit_list_comprehension(self, o: ListComprehension, /) -> None: + o.generator.accept(self) + + def visit_set_comprehension(self, o: SetComprehension, /) -> None: + o.generator.accept(self) + + def visit_conditional_expr(self, o: ConditionalExpr, /) -> None: + o.cond.accept(self) + o.if_expr.accept(self) + o.else_expr.accept(self) + + def visit_type_application(self, o: TypeApplication, /) -> None: + o.expr.accept(self) + + def visit_lambda_expr(self, o: LambdaExpr, /) -> None: + self.visit_func(o) + + def visit_star_expr(self, o: StarExpr, /) -> None: + o.expr.accept(self) + + def visit_await_expr(self, o: AwaitExpr, /) -> None: + o.expr.accept(self) + + def visit_super_expr(self, o: SuperExpr, /) -> None: + o.call.accept(self) + + def visit_as_pattern(self, o: AsPattern, /) -> None: + if o.pattern is not None: + o.pattern.accept(self) + if o.name is not None: + o.name.accept(self) + + def visit_or_pattern(self, o: OrPattern, /) -> None: + for p in o.patterns: + p.accept(self) + + def visit_value_pattern(self, o: ValuePattern, /) -> None: + o.expr.accept(self) + + def visit_sequence_pattern(self, o: SequencePattern, /) -> None: + for p in o.patterns: + p.accept(self) + + def visit_starred_pattern(self, o: StarredPattern, /) -> None: + if o.capture is not None: + o.capture.accept(self) + + def visit_mapping_pattern(self, o: MappingPattern, /) -> None: + for key in o.keys: + key.accept(self) + for value in o.values: + value.accept(self) + if o.rest is not None: + o.rest.accept(self) + + def visit_class_pattern(self, o: ClassPattern, /) -> None: + o.class_ref.accept(self) + for p in o.positionals: + p.accept(self) + for v in o.keyword_values: + v.accept(self) + + def visit_import(self, o: Import, /) -> None: + for a in o.assignments: + a.accept(self) + + def visit_import_from(self, o: ImportFrom, /) -> None: + for a in o.assignments: + a.accept(self) + + # leaf nodes + def visit_name_expr(self, o: NameExpr, /) -> None: + return None + + def visit_str_expr(self, o: StrExpr, /) -> None: + return None + + def visit_int_expr(self, o: IntExpr, /) -> None: + return None + + def visit_float_expr(self, o: FloatExpr, /) -> None: + return None + + def visit_bytes_expr(self, o: BytesExpr, /) -> None: + return None + + def visit_ellipsis(self, o: EllipsisExpr, /) -> None: + return None + + def visit_var(self, o: Var, /) -> None: + return None + + def visit_continue_stmt(self, o: ContinueStmt, /) -> None: + return None + + def visit_pass_stmt(self, o: PassStmt, /) -> None: + return None + + def visit_break_stmt(self, o: BreakStmt, /) -> None: + return None + + def visit_temp_node(self, o: TempNode, /) -> None: + return None + + def visit_nonlocal_decl(self, o: NonlocalDecl, /) -> None: + return None + + def visit_global_decl(self, o: GlobalDecl, /) -> None: + return None + + def visit_import_all(self, o: ImportAll, /) -> None: + return None + + def visit_type_var_expr(self, o: TypeVarExpr, /) -> None: + return None + + def visit_paramspec_expr(self, o: ParamSpecExpr, /) -> None: + return None + + def visit_type_var_tuple_expr(self, o: TypeVarTupleExpr, /) -> None: + return None + + def visit_type_alias_expr(self, o: TypeAliasExpr, /) -> None: + return None + + def visit_type_alias(self, o: TypeAlias, /) -> None: + return None + + def visit_namedtuple_expr(self, o: NamedTupleExpr, /) -> None: + return None + + def visit_typeddict_expr(self, o: TypedDictExpr, /) -> None: + return None + + def visit_newtype_expr(self, o: NewTypeExpr, /) -> None: + return None + + def visit__promote_expr(self, o: PromoteExpr, /) -> None: + return None + + def visit_complex_expr(self, o: ComplexExpr, /) -> None: + return None + + def visit_enum_call_expr(self, o: EnumCallExpr, /) -> None: + return None + + def visit_singleton_pattern(self, o: SingletonPattern, /) -> None: + return None + + +class ExtendedTraverserVisitor(TraverserVisitor): + """This is a more flexible traverser. + + In addition to the base traverser it: + * has visit_ methods for leaf nodes + * has common method that is called for all nodes + * allows skipping recursing into a node + + Note that this traverser still doesn't visit some internal + mypy constructs like _promote expression and Var. + """ + + def visit(self, o: Node) -> bool: + # If returns True, will continue to nested nodes. + return True + + def visit_mypy_file(self, o: MypyFile, /) -> None: + if not self.visit(o): + return + super().visit_mypy_file(o) + + # Module structure + + def visit_import(self, o: Import, /) -> None: + if not self.visit(o): + return + super().visit_import(o) + + def visit_import_from(self, o: ImportFrom, /) -> None: + if not self.visit(o): + return + super().visit_import_from(o) + + def visit_import_all(self, o: ImportAll, /) -> None: + if not self.visit(o): + return + super().visit_import_all(o) + + # Definitions + + def visit_func_def(self, o: FuncDef, /) -> None: + if not self.visit(o): + return + super().visit_func_def(o) + + def visit_overloaded_func_def(self, o: OverloadedFuncDef, /) -> None: + if not self.visit(o): + return + super().visit_overloaded_func_def(o) + + def visit_class_def(self, o: ClassDef, /) -> None: + if not self.visit(o): + return + super().visit_class_def(o) + + def visit_global_decl(self, o: GlobalDecl, /) -> None: + if not self.visit(o): + return + super().visit_global_decl(o) + + def visit_nonlocal_decl(self, o: NonlocalDecl, /) -> None: + if not self.visit(o): + return + super().visit_nonlocal_decl(o) + + def visit_decorator(self, o: Decorator, /) -> None: + if not self.visit(o): + return + super().visit_decorator(o) + + def visit_type_alias(self, o: TypeAlias, /) -> None: + if not self.visit(o): + return + super().visit_type_alias(o) + + # Statements + + def visit_block(self, block: Block, /) -> None: + if not self.visit(block): + return + super().visit_block(block) + + def visit_expression_stmt(self, o: ExpressionStmt, /) -> None: + if not self.visit(o): + return + super().visit_expression_stmt(o) + + def visit_assignment_stmt(self, o: AssignmentStmt, /) -> None: + if not self.visit(o): + return + super().visit_assignment_stmt(o) + + def visit_operator_assignment_stmt(self, o: OperatorAssignmentStmt, /) -> None: + if not self.visit(o): + return + super().visit_operator_assignment_stmt(o) + + def visit_while_stmt(self, o: WhileStmt, /) -> None: + if not self.visit(o): + return + super().visit_while_stmt(o) + + def visit_for_stmt(self, o: ForStmt, /) -> None: + if not self.visit(o): + return + super().visit_for_stmt(o) + + def visit_return_stmt(self, o: ReturnStmt, /) -> None: + if not self.visit(o): + return + super().visit_return_stmt(o) + + def visit_assert_stmt(self, o: AssertStmt, /) -> None: + if not self.visit(o): + return + super().visit_assert_stmt(o) + + def visit_del_stmt(self, o: DelStmt, /) -> None: + if not self.visit(o): + return + super().visit_del_stmt(o) + + def visit_if_stmt(self, o: IfStmt, /) -> None: + if not self.visit(o): + return + super().visit_if_stmt(o) + + def visit_break_stmt(self, o: BreakStmt, /) -> None: + if not self.visit(o): + return + super().visit_break_stmt(o) + + def visit_continue_stmt(self, o: ContinueStmt, /) -> None: + if not self.visit(o): + return + super().visit_continue_stmt(o) + + def visit_pass_stmt(self, o: PassStmt, /) -> None: + if not self.visit(o): + return + super().visit_pass_stmt(o) + + def visit_raise_stmt(self, o: RaiseStmt, /) -> None: + if not self.visit(o): + return + super().visit_raise_stmt(o) + + def visit_try_stmt(self, o: TryStmt, /) -> None: + if not self.visit(o): + return + super().visit_try_stmt(o) + + def visit_with_stmt(self, o: WithStmt, /) -> None: + if not self.visit(o): + return + super().visit_with_stmt(o) + + def visit_match_stmt(self, o: MatchStmt, /) -> None: + if not self.visit(o): + return + super().visit_match_stmt(o) + + # Expressions (default no-op implementation) + + def visit_int_expr(self, o: IntExpr, /) -> None: + if not self.visit(o): + return + super().visit_int_expr(o) + + def visit_str_expr(self, o: StrExpr, /) -> None: + if not self.visit(o): + return + super().visit_str_expr(o) + + def visit_bytes_expr(self, o: BytesExpr, /) -> None: + if not self.visit(o): + return + super().visit_bytes_expr(o) + + def visit_float_expr(self, o: FloatExpr, /) -> None: + if not self.visit(o): + return + super().visit_float_expr(o) + + def visit_complex_expr(self, o: ComplexExpr, /) -> None: + if not self.visit(o): + return + super().visit_complex_expr(o) + + def visit_ellipsis(self, o: EllipsisExpr, /) -> None: + if not self.visit(o): + return + super().visit_ellipsis(o) + + def visit_star_expr(self, o: StarExpr, /) -> None: + if not self.visit(o): + return + super().visit_star_expr(o) + + def visit_name_expr(self, o: NameExpr, /) -> None: + if not self.visit(o): + return + super().visit_name_expr(o) + + def visit_member_expr(self, o: MemberExpr, /) -> None: + if not self.visit(o): + return + super().visit_member_expr(o) + + def visit_yield_from_expr(self, o: YieldFromExpr, /) -> None: + if not self.visit(o): + return + super().visit_yield_from_expr(o) + + def visit_yield_expr(self, o: YieldExpr, /) -> None: + if not self.visit(o): + return + super().visit_yield_expr(o) + + def visit_call_expr(self, o: CallExpr, /) -> None: + if not self.visit(o): + return + super().visit_call_expr(o) + + def visit_op_expr(self, o: OpExpr, /) -> None: + if not self.visit(o): + return + super().visit_op_expr(o) + + def visit_comparison_expr(self, o: ComparisonExpr, /) -> None: + if not self.visit(o): + return + super().visit_comparison_expr(o) + + def visit_cast_expr(self, o: CastExpr, /) -> None: + if not self.visit(o): + return + super().visit_cast_expr(o) + + def visit_type_form_expr(self, o: TypeFormExpr, /) -> None: + if not self.visit(o): + return + super().visit_type_form_expr(o) + + def visit_assert_type_expr(self, o: AssertTypeExpr, /) -> None: + if not self.visit(o): + return + super().visit_assert_type_expr(o) + + def visit_reveal_expr(self, o: RevealExpr, /) -> None: + if not self.visit(o): + return + super().visit_reveal_expr(o) + + def visit_super_expr(self, o: SuperExpr, /) -> None: + if not self.visit(o): + return + super().visit_super_expr(o) + + def visit_assignment_expr(self, o: AssignmentExpr, /) -> None: + if not self.visit(o): + return + super().visit_assignment_expr(o) + + def visit_unary_expr(self, o: UnaryExpr, /) -> None: + if not self.visit(o): + return + super().visit_unary_expr(o) + + def visit_list_expr(self, o: ListExpr, /) -> None: + if not self.visit(o): + return + super().visit_list_expr(o) + + def visit_dict_expr(self, o: DictExpr, /) -> None: + if not self.visit(o): + return + super().visit_dict_expr(o) + + def visit_tuple_expr(self, o: TupleExpr, /) -> None: + if not self.visit(o): + return + super().visit_tuple_expr(o) + + def visit_set_expr(self, o: SetExpr, /) -> None: + if not self.visit(o): + return + super().visit_set_expr(o) + + def visit_index_expr(self, o: IndexExpr, /) -> None: + if not self.visit(o): + return + super().visit_index_expr(o) + + def visit_type_application(self, o: TypeApplication, /) -> None: + if not self.visit(o): + return + super().visit_type_application(o) + + def visit_lambda_expr(self, o: LambdaExpr, /) -> None: + if not self.visit(o): + return + super().visit_lambda_expr(o) + + def visit_list_comprehension(self, o: ListComprehension, /) -> None: + if not self.visit(o): + return + super().visit_list_comprehension(o) + + def visit_set_comprehension(self, o: SetComprehension, /) -> None: + if not self.visit(o): + return + super().visit_set_comprehension(o) + + def visit_dictionary_comprehension(self, o: DictionaryComprehension, /) -> None: + if not self.visit(o): + return + super().visit_dictionary_comprehension(o) + + def visit_generator_expr(self, o: GeneratorExpr, /) -> None: + if not self.visit(o): + return + super().visit_generator_expr(o) + + def visit_slice_expr(self, o: SliceExpr, /) -> None: + if not self.visit(o): + return + super().visit_slice_expr(o) + + def visit_conditional_expr(self, o: ConditionalExpr, /) -> None: + if not self.visit(o): + return + super().visit_conditional_expr(o) + + def visit_type_var_expr(self, o: TypeVarExpr, /) -> None: + if not self.visit(o): + return + super().visit_type_var_expr(o) + + def visit_paramspec_expr(self, o: ParamSpecExpr, /) -> None: + if not self.visit(o): + return + super().visit_paramspec_expr(o) + + def visit_type_var_tuple_expr(self, o: TypeVarTupleExpr, /) -> None: + if not self.visit(o): + return + super().visit_type_var_tuple_expr(o) + + def visit_type_alias_expr(self, o: TypeAliasExpr, /) -> None: + if not self.visit(o): + return + super().visit_type_alias_expr(o) + + def visit_namedtuple_expr(self, o: NamedTupleExpr, /) -> None: + if not self.visit(o): + return + super().visit_namedtuple_expr(o) + + def visit_enum_call_expr(self, o: EnumCallExpr, /) -> None: + if not self.visit(o): + return + super().visit_enum_call_expr(o) + + def visit_typeddict_expr(self, o: TypedDictExpr, /) -> None: + if not self.visit(o): + return + super().visit_typeddict_expr(o) + + def visit_newtype_expr(self, o: NewTypeExpr, /) -> None: + if not self.visit(o): + return + super().visit_newtype_expr(o) + + def visit_await_expr(self, o: AwaitExpr, /) -> None: + if not self.visit(o): + return + super().visit_await_expr(o) + + # Patterns + + def visit_as_pattern(self, o: AsPattern, /) -> None: + if not self.visit(o): + return + super().visit_as_pattern(o) + + def visit_or_pattern(self, o: OrPattern, /) -> None: + if not self.visit(o): + return + super().visit_or_pattern(o) + + def visit_value_pattern(self, o: ValuePattern, /) -> None: + if not self.visit(o): + return + super().visit_value_pattern(o) + + def visit_singleton_pattern(self, o: SingletonPattern, /) -> None: + if not self.visit(o): + return + super().visit_singleton_pattern(o) + + def visit_sequence_pattern(self, o: SequencePattern, /) -> None: + if not self.visit(o): + return + super().visit_sequence_pattern(o) + + def visit_starred_pattern(self, o: StarredPattern, /) -> None: + if not self.visit(o): + return + super().visit_starred_pattern(o) + + def visit_mapping_pattern(self, o: MappingPattern, /) -> None: + if not self.visit(o): + return + super().visit_mapping_pattern(o) + + def visit_class_pattern(self, o: ClassPattern, /) -> None: + if not self.visit(o): + return + super().visit_class_pattern(o) + + +class ReturnSeeker(TraverserVisitor): + def __init__(self) -> None: + self.found = False + + def visit_return_stmt(self, o: ReturnStmt) -> None: + if o.expr is None or isinstance(o.expr, NameExpr) and o.expr.name == "None": + return + self.found = True + + +def has_return_statement(fdef: FuncBase) -> bool: + """Find if a function has a non-trivial return statement. + + Plain 'return' and 'return None' don't count. + """ + seeker = ReturnSeeker() + fdef.accept(seeker) + return seeker.found + + +class NameAndMemberCollector(TraverserVisitor): + def __init__(self) -> None: + super().__init__() + self.name_exprs: list[NameExpr] = [] + self.member_exprs: list[MemberExpr] = [] + + def visit_name_expr(self, o: NameExpr, /) -> None: + self.name_exprs.append(o) + super().visit_name_expr(o) + + def visit_member_expr(self, o: MemberExpr, /) -> None: + self.member_exprs.append(o) + super().visit_member_expr(o) + + +def all_name_and_member_expressions(node: Expression) -> tuple[list[NameExpr], list[MemberExpr]]: + v = NameAndMemberCollector() + node.accept(v) + return (v.name_exprs, v.member_exprs) + + +class StringSeeker(TraverserVisitor): + def __init__(self) -> None: + self.found = False + + def visit_str_expr(self, o: StrExpr, /) -> None: + self.found = True + + +def has_str_expression(node: Expression) -> bool: + v = StringSeeker() + node.accept(v) + return v.found + + +class FuncCollectorBase(TraverserVisitor): + def __init__(self) -> None: + self.inside_func = False + + def visit_func_def(self, defn: FuncDef) -> None: + if not self.inside_func: + self.inside_func = True + super().visit_func_def(defn) + self.inside_func = False + + +class YieldSeeker(FuncCollectorBase): + def __init__(self) -> None: + super().__init__() + self.found = False + + def visit_yield_expr(self, o: YieldExpr) -> None: + self.found = True + + +def has_yield_expression(fdef: FuncBase) -> bool: + seeker = YieldSeeker() + fdef.accept(seeker) + return seeker.found + + +class YieldFromSeeker(FuncCollectorBase): + def __init__(self) -> None: + super().__init__() + self.found = False + + def visit_yield_from_expr(self, o: YieldFromExpr) -> None: + self.found = True + + +def has_yield_from_expression(fdef: FuncBase) -> bool: + seeker = YieldFromSeeker() + fdef.accept(seeker) + return seeker.found + + +class AwaitSeeker(TraverserVisitor): + def __init__(self) -> None: + super().__init__() + self.found = False + + def visit_await_expr(self, o: AwaitExpr) -> None: + self.found = True + + +def has_await_expression(expr: Expression) -> bool: + seeker = AwaitSeeker() + expr.accept(seeker) + return seeker.found + + +class ReturnCollector(FuncCollectorBase): + def __init__(self) -> None: + super().__init__() + self.return_statements: list[ReturnStmt] = [] + + def visit_return_stmt(self, stmt: ReturnStmt) -> None: + self.return_statements.append(stmt) + + +def all_return_statements(node: Node) -> list[ReturnStmt]: + v = ReturnCollector() + node.accept(v) + return v.return_statements + + +class YieldCollector(FuncCollectorBase): + def __init__(self) -> None: + super().__init__() + self.in_assignment = False + self.yield_expressions: list[tuple[YieldExpr, bool]] = [] + + def visit_assignment_stmt(self, stmt: AssignmentStmt) -> None: + self.in_assignment = True + super().visit_assignment_stmt(stmt) + self.in_assignment = False + + def visit_yield_expr(self, expr: YieldExpr) -> None: + self.yield_expressions.append((expr, self.in_assignment)) + + +def all_yield_expressions(node: Node) -> list[tuple[YieldExpr, bool]]: + v = YieldCollector() + node.accept(v) + return v.yield_expressions + + +class YieldFromCollector(FuncCollectorBase): + def __init__(self) -> None: + super().__init__() + self.in_assignment = False + self.yield_from_expressions: list[tuple[YieldFromExpr, bool]] = [] + + def visit_assignment_stmt(self, stmt: AssignmentStmt) -> None: + self.in_assignment = True + super().visit_assignment_stmt(stmt) + self.in_assignment = False + + def visit_yield_from_expr(self, expr: YieldFromExpr) -> None: + self.yield_from_expressions.append((expr, self.in_assignment)) diff --git a/.venv/lib/python3.12/site-packages/mypy/treetransform.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/treetransform.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..0724f94 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/treetransform.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/treetransform.py b/.venv/lib/python3.12/site-packages/mypy/treetransform.py new file mode 100644 index 0000000..f5af5fb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/treetransform.py @@ -0,0 +1,805 @@ +"""Base visitor that implements an identity AST transform. + +Subclass TransformVisitor to perform non-trivial transformations. +""" + +from __future__ import annotations + +from collections.abc import Iterable +from typing import Optional, cast + +from mypy.nodes import ( + GDEF, + REVEAL_TYPE, + Argument, + AssertStmt, + AssertTypeExpr, + AssignmentExpr, + AssignmentStmt, + AwaitExpr, + Block, + BreakStmt, + BytesExpr, + CallExpr, + CastExpr, + ClassDef, + ComparisonExpr, + ComplexExpr, + ConditionalExpr, + ContinueStmt, + Decorator, + DelStmt, + DictExpr, + DictionaryComprehension, + EllipsisExpr, + EnumCallExpr, + Expression, + ExpressionStmt, + FloatExpr, + ForStmt, + FuncDef, + FuncItem, + GeneratorExpr, + GlobalDecl, + IfStmt, + Import, + ImportAll, + ImportFrom, + IndexExpr, + IntExpr, + LambdaExpr, + ListComprehension, + ListExpr, + MatchStmt, + MemberExpr, + MypyFile, + NamedTupleExpr, + NameExpr, + NewTypeExpr, + Node, + NonlocalDecl, + OperatorAssignmentStmt, + OpExpr, + OverloadedFuncDef, + OverloadPart, + ParamSpecExpr, + PassStmt, + PromoteExpr, + RaiseStmt, + RefExpr, + ReturnStmt, + RevealExpr, + SetComprehension, + SetExpr, + SliceExpr, + StarExpr, + Statement, + StrExpr, + SuperExpr, + SymbolTable, + TempNode, + TryStmt, + TupleExpr, + TypeAliasExpr, + TypeApplication, + TypedDictExpr, + TypeFormExpr, + TypeVarExpr, + TypeVarTupleExpr, + UnaryExpr, + Var, + WhileStmt, + WithStmt, + YieldExpr, + YieldFromExpr, +) +from mypy.patterns import ( + AsPattern, + ClassPattern, + MappingPattern, + OrPattern, + Pattern, + SequencePattern, + SingletonPattern, + StarredPattern, + ValuePattern, +) +from mypy.traverser import TraverserVisitor +from mypy.types import FunctionLike, ProperType, Type +from mypy.util import replace_object_state +from mypy.visitor import NodeVisitor + + +class TransformVisitor(NodeVisitor[Node]): + """Transform a semantically analyzed AST (or subtree) to an identical copy. + + Use the node() method to transform an AST node. + + Subclass to perform a non-identity transform. + + Notes: + + * This can only be used to transform functions or classes, not top-level + statements, and/or modules as a whole. + * Do not duplicate TypeInfo nodes. This would generally not be desirable. + * Only update some name binding cross-references, but only those that + refer to Var, Decorator or FuncDef nodes, not those targeting ClassDef or + TypeInfo nodes. + * Types are not transformed, but you can override type() to also perform + type transformation. + + TODO nested classes and functions have not been tested well enough + """ + + def __init__(self) -> None: + # To simplify testing, set this flag to True if you want to transform + # all statements in a file (this is prohibited in normal mode). + self.test_only = False + # There may be multiple references to a Var node. Keep track of + # Var translations using a dictionary. + self.var_map: dict[Var, Var] = {} + # These are uninitialized placeholder nodes used temporarily for nested + # functions while we are transforming a top-level function. This maps an + # untransformed node to a placeholder (which will later become the + # transformed node). + self.func_placeholder_map: dict[FuncDef, FuncDef] = {} + + def visit_mypy_file(self, node: MypyFile) -> MypyFile: + assert self.test_only, "This visitor should not be used for whole files." + # NOTE: The 'names' and 'imports' instance variables will be empty! + ignored_lines = {line: codes.copy() for line, codes in node.ignored_lines.items()} + new = MypyFile(self.statements(node.defs), [], node.is_bom, ignored_lines=ignored_lines) + new._fullname = node._fullname + new.path = node.path + new.names = SymbolTable() + return new + + def visit_import(self, node: Import) -> Import: + return Import(node.ids.copy()) + + def visit_import_from(self, node: ImportFrom) -> ImportFrom: + return ImportFrom(node.id, node.relative, node.names.copy()) + + def visit_import_all(self, node: ImportAll) -> ImportAll: + return ImportAll(node.id, node.relative) + + def copy_argument(self, argument: Argument) -> Argument: + arg = Argument( + self.visit_var(argument.variable), + argument.type_annotation, + argument.initializer, + argument.kind, + ) + + # Refresh lines of the inner things + arg.set_line(argument) + + return arg + + def visit_func_def(self, node: FuncDef) -> FuncDef: + # Note that a FuncDef must be transformed to a FuncDef. + + # These contortions are needed to handle the case of recursive + # references inside the function being transformed. + # Set up placeholder nodes for references within this function + # to other functions defined inside it. + # Don't create an entry for this function itself though, + # since we want self-references to point to the original + # function if this is the top-level node we are transforming. + init = FuncMapInitializer(self) + for stmt in node.body.body: + stmt.accept(init) + + new = FuncDef( + node.name, + [self.copy_argument(arg) for arg in node.arguments], + self.block(node.body), + cast(Optional[FunctionLike], self.optional_type(node.type)), + ) + + self.copy_function_attributes(new, node) + + new._fullname = node._fullname + new.is_decorated = node.is_decorated + new.is_conditional = node.is_conditional + new.abstract_status = node.abstract_status + new.is_static = node.is_static + new.is_class = node.is_class + new.is_property = node.is_property + new.is_final = node.is_final + new.original_def = node.original_def + + if node in self.func_placeholder_map: + # There is a placeholder definition for this function. Replace + # the attributes of the placeholder with those form the transformed + # function. We know that the classes will be identical (otherwise + # this wouldn't work). + result = self.func_placeholder_map[node] + replace_object_state(result, new) + return result + else: + return new + + def visit_lambda_expr(self, node: LambdaExpr) -> LambdaExpr: + new = LambdaExpr( + [self.copy_argument(arg) for arg in node.arguments], + self.block(node.body), + cast(Optional[FunctionLike], self.optional_type(node.type)), + ) + self.copy_function_attributes(new, node) + return new + + def copy_function_attributes(self, new: FuncItem, original: FuncItem) -> None: + new.info = original.info + new.min_args = original.min_args + new.max_pos = original.max_pos + new.is_overload = original.is_overload + new.is_generator = original.is_generator + new.is_coroutine = original.is_coroutine + new.is_async_generator = original.is_async_generator + new.is_awaitable_coroutine = original.is_awaitable_coroutine + new.line = original.line + + def visit_overloaded_func_def(self, node: OverloadedFuncDef) -> OverloadedFuncDef: + items = [cast(OverloadPart, item.accept(self)) for item in node.items] + for newitem, olditem in zip(items, node.items): + newitem.line = olditem.line + new = OverloadedFuncDef(items) + new._fullname = node._fullname + new_type = self.optional_type(node.type) + assert isinstance(new_type, ProperType) + new.type = new_type + new.info = node.info + new.is_static = node.is_static + new.is_class = node.is_class + new.is_property = node.is_property + new.is_final = node.is_final + if node.impl: + new.impl = cast(OverloadPart, node.impl.accept(self)) + return new + + def visit_class_def(self, node: ClassDef) -> ClassDef: + new = ClassDef( + node.name, + self.block(node.defs), + node.type_vars, + self.expressions(node.base_type_exprs), + self.optional_expr(node.metaclass), + ) + new.fullname = node.fullname + new.info = node.info + new.decorators = [self.expr(decorator) for decorator in node.decorators] + return new + + def visit_global_decl(self, node: GlobalDecl) -> GlobalDecl: + return GlobalDecl(node.names.copy()) + + def visit_nonlocal_decl(self, node: NonlocalDecl) -> NonlocalDecl: + return NonlocalDecl(node.names.copy()) + + def visit_block(self, node: Block) -> Block: + return Block(self.statements(node.body), is_unreachable=node.is_unreachable) + + def visit_decorator(self, node: Decorator) -> Decorator: + # Note that a Decorator must be transformed to a Decorator. + func = self.visit_func_def(node.func) + func.line = node.func.line + new = Decorator(func, self.expressions(node.decorators), self.visit_var(node.var)) + new.is_overload = node.is_overload + return new + + def visit_var(self, node: Var) -> Var: + # Note that a Var must be transformed to a Var. + if node in self.var_map: + return self.var_map[node] + new = Var(node.name, self.optional_type(node.type)) + new.line = node.line + new._fullname = node._fullname + new.info = node.info + new.is_self = node.is_self + new.is_ready = node.is_ready + new.is_initialized_in_class = node.is_initialized_in_class + new.is_staticmethod = node.is_staticmethod + new.is_classmethod = node.is_classmethod + new.is_property = node.is_property + new.is_final = node.is_final + new.final_value = node.final_value + new.final_unset_in_class = node.final_unset_in_class + new.final_set_in_init = node.final_set_in_init + new.set_line(node) + self.var_map[node] = new + return new + + def visit_expression_stmt(self, node: ExpressionStmt) -> ExpressionStmt: + return ExpressionStmt(self.expr(node.expr)) + + def visit_assignment_stmt(self, node: AssignmentStmt) -> AssignmentStmt: + return self.duplicate_assignment(node) + + def duplicate_assignment(self, node: AssignmentStmt) -> AssignmentStmt: + new = AssignmentStmt( + self.expressions(node.lvalues), + self.expr(node.rvalue), + self.optional_type(node.unanalyzed_type), + ) + new.line = node.line + new.is_final_def = node.is_final_def + new.type = self.optional_type(node.type) + return new + + def visit_operator_assignment_stmt( + self, node: OperatorAssignmentStmt + ) -> OperatorAssignmentStmt: + return OperatorAssignmentStmt(node.op, self.expr(node.lvalue), self.expr(node.rvalue)) + + def visit_while_stmt(self, node: WhileStmt) -> WhileStmt: + return WhileStmt( + self.expr(node.expr), self.block(node.body), self.optional_block(node.else_body) + ) + + def visit_for_stmt(self, node: ForStmt) -> ForStmt: + new = ForStmt( + self.expr(node.index), + self.expr(node.expr), + self.block(node.body), + self.optional_block(node.else_body), + self.optional_type(node.unanalyzed_index_type), + ) + new.is_async = node.is_async + new.index_type = self.optional_type(node.index_type) + return new + + def visit_return_stmt(self, node: ReturnStmt) -> ReturnStmt: + return ReturnStmt(self.optional_expr(node.expr)) + + def visit_assert_stmt(self, node: AssertStmt) -> AssertStmt: + return AssertStmt(self.expr(node.expr), self.optional_expr(node.msg)) + + def visit_del_stmt(self, node: DelStmt) -> DelStmt: + return DelStmt(self.expr(node.expr)) + + def visit_if_stmt(self, node: IfStmt) -> IfStmt: + return IfStmt( + self.expressions(node.expr), + self.blocks(node.body), + self.optional_block(node.else_body), + ) + + def visit_break_stmt(self, node: BreakStmt) -> BreakStmt: + return BreakStmt() + + def visit_continue_stmt(self, node: ContinueStmt) -> ContinueStmt: + return ContinueStmt() + + def visit_pass_stmt(self, node: PassStmt) -> PassStmt: + return PassStmt() + + def visit_raise_stmt(self, node: RaiseStmt) -> RaiseStmt: + return RaiseStmt(self.optional_expr(node.expr), self.optional_expr(node.from_expr)) + + def visit_try_stmt(self, node: TryStmt) -> TryStmt: + new = TryStmt( + self.block(node.body), + self.optional_names(node.vars), + self.optional_expressions(node.types), + self.blocks(node.handlers), + self.optional_block(node.else_body), + self.optional_block(node.finally_body), + ) + new.is_star = node.is_star + return new + + def visit_with_stmt(self, node: WithStmt) -> WithStmt: + new = WithStmt( + self.expressions(node.expr), + self.optional_expressions(node.target), + self.block(node.body), + self.optional_type(node.unanalyzed_type), + ) + new.is_async = node.is_async + new.analyzed_types = [self.type(typ) for typ in node.analyzed_types] + return new + + def visit_as_pattern(self, p: AsPattern) -> AsPattern: + return AsPattern( + pattern=self.pattern(p.pattern) if p.pattern is not None else None, + name=self.duplicate_name(p.name) if p.name is not None else None, + ) + + def visit_or_pattern(self, p: OrPattern) -> OrPattern: + return OrPattern([self.pattern(pat) for pat in p.patterns]) + + def visit_value_pattern(self, p: ValuePattern) -> ValuePattern: + return ValuePattern(self.expr(p.expr)) + + def visit_singleton_pattern(self, p: SingletonPattern) -> SingletonPattern: + return SingletonPattern(p.value) + + def visit_sequence_pattern(self, p: SequencePattern) -> SequencePattern: + return SequencePattern([self.pattern(pat) for pat in p.patterns]) + + def visit_starred_pattern(self, p: StarredPattern) -> StarredPattern: + return StarredPattern(self.duplicate_name(p.capture) if p.capture is not None else None) + + def visit_mapping_pattern(self, p: MappingPattern) -> MappingPattern: + return MappingPattern( + keys=[self.expr(expr) for expr in p.keys], + values=[self.pattern(pat) for pat in p.values], + rest=self.duplicate_name(p.rest) if p.rest is not None else None, + ) + + def visit_class_pattern(self, p: ClassPattern) -> ClassPattern: + class_ref = p.class_ref.accept(self) + assert isinstance(class_ref, RefExpr) + return ClassPattern( + class_ref=class_ref, + positionals=[self.pattern(pat) for pat in p.positionals], + keyword_keys=list(p.keyword_keys), + keyword_values=[self.pattern(pat) for pat in p.keyword_values], + ) + + def visit_match_stmt(self, o: MatchStmt) -> MatchStmt: + return MatchStmt( + subject=self.expr(o.subject), + patterns=[self.pattern(p) for p in o.patterns], + guards=self.optional_expressions(o.guards), + bodies=self.blocks(o.bodies), + ) + + def visit_star_expr(self, node: StarExpr) -> StarExpr: + return StarExpr(node.expr) + + def visit_int_expr(self, node: IntExpr) -> IntExpr: + return IntExpr(node.value) + + def visit_str_expr(self, node: StrExpr) -> StrExpr: + return StrExpr(node.value) + + def visit_bytes_expr(self, node: BytesExpr) -> BytesExpr: + return BytesExpr(node.value) + + def visit_float_expr(self, node: FloatExpr) -> FloatExpr: + return FloatExpr(node.value) + + def visit_complex_expr(self, node: ComplexExpr) -> ComplexExpr: + return ComplexExpr(node.value) + + def visit_ellipsis(self, node: EllipsisExpr) -> EllipsisExpr: + return EllipsisExpr() + + def visit_name_expr(self, node: NameExpr) -> NameExpr: + return self.duplicate_name(node) + + def duplicate_name(self, node: NameExpr) -> NameExpr: + # This method is used when the transform result must be a NameExpr. + # visit_name_expr() is used when there is no such restriction. + new = NameExpr(node.name) + self.copy_ref(new, node) + new.is_special_form = node.is_special_form + return new + + def visit_member_expr(self, node: MemberExpr) -> MemberExpr: + member = MemberExpr(self.expr(node.expr), node.name) + if node.def_var: + # This refers to an attribute and we don't transform attributes by default, + # just normal variables. + member.def_var = node.def_var + self.copy_ref(member, node) + return member + + def copy_ref(self, new: RefExpr, original: RefExpr) -> None: + new.kind = original.kind + new.fullname = original.fullname + target = original.node + if isinstance(target, Var): + # Do not transform references to global variables. See + # testGenericFunctionAliasExpand for an example where this is important. + if original.kind != GDEF: + target = self.visit_var(target) + elif isinstance(target, Decorator): + target = self.visit_var(target.var) + elif isinstance(target, FuncDef): + # Use a placeholder node for the function if it exists. + target = self.func_placeholder_map.get(target, target) + new.node = target + new.is_new_def = original.is_new_def + new.is_inferred_def = original.is_inferred_def + + def visit_yield_from_expr(self, node: YieldFromExpr) -> YieldFromExpr: + return YieldFromExpr(self.expr(node.expr)) + + def visit_yield_expr(self, node: YieldExpr) -> YieldExpr: + return YieldExpr(self.optional_expr(node.expr)) + + def visit_await_expr(self, node: AwaitExpr) -> AwaitExpr: + return AwaitExpr(self.expr(node.expr)) + + def visit_call_expr(self, node: CallExpr) -> CallExpr: + return CallExpr( + self.expr(node.callee), + self.expressions(node.args), + node.arg_kinds.copy(), + node.arg_names.copy(), + self.optional_expr(node.analyzed), + ) + + def visit_op_expr(self, node: OpExpr) -> OpExpr: + new = OpExpr( + node.op, + self.expr(node.left), + self.expr(node.right), + cast(Optional[TypeAliasExpr], self.optional_expr(node.analyzed)), + ) + new.method_type = self.optional_type(node.method_type) + return new + + def visit_comparison_expr(self, node: ComparisonExpr) -> ComparisonExpr: + new = ComparisonExpr(node.operators, self.expressions(node.operands)) + new.method_types = [self.optional_type(t) for t in node.method_types] + return new + + def visit_cast_expr(self, node: CastExpr) -> CastExpr: + return CastExpr(self.expr(node.expr), self.type(node.type)) + + def visit_type_form_expr(self, node: TypeFormExpr) -> TypeFormExpr: + return TypeFormExpr(self.type(node.type)) + + def visit_assert_type_expr(self, node: AssertTypeExpr) -> AssertTypeExpr: + return AssertTypeExpr(self.expr(node.expr), self.type(node.type)) + + def visit_reveal_expr(self, node: RevealExpr) -> RevealExpr: + if node.kind == REVEAL_TYPE: + assert node.expr is not None + return RevealExpr(kind=REVEAL_TYPE, expr=self.expr(node.expr)) + else: + # Reveal locals expressions don't have any sub expressions + return node + + def visit_super_expr(self, node: SuperExpr) -> SuperExpr: + call = self.expr(node.call) + assert isinstance(call, CallExpr) + new = SuperExpr(node.name, call) + new.info = node.info + return new + + def visit_assignment_expr(self, node: AssignmentExpr) -> AssignmentExpr: + return AssignmentExpr(self.duplicate_name(node.target), self.expr(node.value)) + + def visit_unary_expr(self, node: UnaryExpr) -> UnaryExpr: + new = UnaryExpr(node.op, self.expr(node.expr)) + new.method_type = self.optional_type(node.method_type) + return new + + def visit_list_expr(self, node: ListExpr) -> ListExpr: + return ListExpr(self.expressions(node.items)) + + def visit_dict_expr(self, node: DictExpr) -> DictExpr: + return DictExpr( + [(self.expr(key) if key else None, self.expr(value)) for key, value in node.items] + ) + + def visit_tuple_expr(self, node: TupleExpr) -> TupleExpr: + return TupleExpr(self.expressions(node.items)) + + def visit_set_expr(self, node: SetExpr) -> SetExpr: + return SetExpr(self.expressions(node.items)) + + def visit_index_expr(self, node: IndexExpr) -> IndexExpr: + new = IndexExpr(self.expr(node.base), self.expr(node.index)) + if node.method_type: + new.method_type = self.type(node.method_type) + if node.analyzed: + if isinstance(node.analyzed, TypeApplication): + new.analyzed = self.visit_type_application(node.analyzed) + else: + new.analyzed = self.visit_type_alias_expr(node.analyzed) + new.analyzed.set_line(node.analyzed) + return new + + def visit_type_application(self, node: TypeApplication) -> TypeApplication: + return TypeApplication(self.expr(node.expr), self.types(node.types)) + + def visit_list_comprehension(self, node: ListComprehension) -> ListComprehension: + generator = self.duplicate_generator(node.generator) + generator.set_line(node.generator) + return ListComprehension(generator) + + def visit_set_comprehension(self, node: SetComprehension) -> SetComprehension: + generator = self.duplicate_generator(node.generator) + generator.set_line(node.generator) + return SetComprehension(generator) + + def visit_dictionary_comprehension( + self, node: DictionaryComprehension + ) -> DictionaryComprehension: + return DictionaryComprehension( + self.expr(node.key), + self.expr(node.value), + [self.expr(index) for index in node.indices], + [self.expr(s) for s in node.sequences], + [[self.expr(cond) for cond in conditions] for conditions in node.condlists], + node.is_async, + ) + + def visit_generator_expr(self, node: GeneratorExpr) -> GeneratorExpr: + return self.duplicate_generator(node) + + def duplicate_generator(self, node: GeneratorExpr) -> GeneratorExpr: + return GeneratorExpr( + self.expr(node.left_expr), + [self.expr(index) for index in node.indices], + [self.expr(s) for s in node.sequences], + [[self.expr(cond) for cond in conditions] for conditions in node.condlists], + node.is_async, + ) + + def visit_slice_expr(self, node: SliceExpr) -> SliceExpr: + return SliceExpr( + self.optional_expr(node.begin_index), + self.optional_expr(node.end_index), + self.optional_expr(node.stride), + ) + + def visit_conditional_expr(self, node: ConditionalExpr) -> ConditionalExpr: + return ConditionalExpr( + self.expr(node.cond), self.expr(node.if_expr), self.expr(node.else_expr) + ) + + def visit_type_var_expr(self, node: TypeVarExpr) -> TypeVarExpr: + return TypeVarExpr( + node.name, + node.fullname, + self.types(node.values), + self.type(node.upper_bound), + self.type(node.default), + variance=node.variance, + ) + + def visit_paramspec_expr(self, node: ParamSpecExpr) -> ParamSpecExpr: + return ParamSpecExpr( + node.name, + node.fullname, + self.type(node.upper_bound), + self.type(node.default), + variance=node.variance, + ) + + def visit_type_var_tuple_expr(self, node: TypeVarTupleExpr) -> TypeVarTupleExpr: + return TypeVarTupleExpr( + node.name, + node.fullname, + self.type(node.upper_bound), + node.tuple_fallback, + self.type(node.default), + variance=node.variance, + ) + + def visit_type_alias_expr(self, node: TypeAliasExpr) -> TypeAliasExpr: + return TypeAliasExpr(node.node) + + def visit_newtype_expr(self, node: NewTypeExpr) -> NewTypeExpr: + res = NewTypeExpr(node.name, node.old_type, line=node.line, column=node.column) + res.info = node.info + return res + + def visit_namedtuple_expr(self, node: NamedTupleExpr) -> NamedTupleExpr: + return NamedTupleExpr(node.info) + + def visit_enum_call_expr(self, node: EnumCallExpr) -> EnumCallExpr: + return EnumCallExpr(node.info, node.items, node.values) + + def visit_typeddict_expr(self, node: TypedDictExpr) -> Node: + return TypedDictExpr(node.info) + + def visit__promote_expr(self, node: PromoteExpr) -> PromoteExpr: + return PromoteExpr(node.type) + + def visit_temp_node(self, node: TempNode) -> TempNode: + return TempNode(self.type(node.type)) + + def node(self, node: Node) -> Node: + new = node.accept(self) + new.set_line(node) + return new + + def mypyfile(self, node: MypyFile) -> MypyFile: + new = node.accept(self) + assert isinstance(new, MypyFile) + new.set_line(node) + return new + + def expr(self, expr: Expression) -> Expression: + new = expr.accept(self) + assert isinstance(new, Expression) + new.set_line(expr) + return new + + def stmt(self, stmt: Statement) -> Statement: + new = stmt.accept(self) + assert isinstance(new, Statement) + new.set_line(stmt) + return new + + def pattern(self, pattern: Pattern) -> Pattern: + new = pattern.accept(self) + assert isinstance(new, Pattern) + new.set_line(pattern) + return new + + # Helpers + # + # All the node helpers also propagate line numbers. + + def optional_expr(self, expr: Expression | None) -> Expression | None: + if expr: + return self.expr(expr) + else: + return None + + def block(self, block: Block) -> Block: + new = self.visit_block(block) + new.line = block.line + return new + + def optional_block(self, block: Block | None) -> Block | None: + if block: + return self.block(block) + else: + return None + + def statements(self, statements: list[Statement]) -> list[Statement]: + return [self.stmt(stmt) for stmt in statements] + + def expressions(self, expressions: list[Expression]) -> list[Expression]: + return [self.expr(expr) for expr in expressions] + + def optional_expressions( + self, expressions: Iterable[Expression | None] + ) -> list[Expression | None]: + return [self.optional_expr(expr) for expr in expressions] + + def blocks(self, blocks: list[Block]) -> list[Block]: + return [self.block(block) for block in blocks] + + def names(self, names: list[NameExpr]) -> list[NameExpr]: + return [self.duplicate_name(name) for name in names] + + def optional_names(self, names: Iterable[NameExpr | None]) -> list[NameExpr | None]: + result: list[NameExpr | None] = [] + for name in names: + if name: + result.append(self.duplicate_name(name)) + else: + result.append(None) + return result + + def type(self, type: Type) -> Type: + # Override this method to transform types. + return type + + def optional_type(self, type: Type | None) -> Type | None: + if type: + return self.type(type) + else: + return None + + def types(self, types: list[Type]) -> list[Type]: + return [self.type(type) for type in types] + + +class FuncMapInitializer(TraverserVisitor): + """This traverser creates mappings from nested FuncDefs to placeholder FuncDefs. + + The placeholders will later be replaced with transformed nodes. + """ + + def __init__(self, transformer: TransformVisitor) -> None: + self.transformer = transformer + + def visit_func_def(self, node: FuncDef) -> None: + if node not in self.transformer.func_placeholder_map: + # Haven't seen this FuncDef before, so create a placeholder node. + self.transformer.func_placeholder_map[node] = FuncDef( + node.name, node.arguments, node.body, None + ) + super().visit_func_def(node) diff --git a/.venv/lib/python3.12/site-packages/mypy/tvar_scope.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/tvar_scope.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..95a060f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/tvar_scope.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/tvar_scope.py b/.venv/lib/python3.12/site-packages/mypy/tvar_scope.py new file mode 100644 index 0000000..fe97a83 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/tvar_scope.py @@ -0,0 +1,167 @@ +from __future__ import annotations + +from mypy.nodes import ( + ParamSpecExpr, + SymbolTableNode, + TypeVarExpr, + TypeVarLikeExpr, + TypeVarTupleExpr, +) +from mypy.types import ( + ParamSpecFlavor, + ParamSpecType, + TypeVarId, + TypeVarLikeType, + TypeVarTupleType, + TypeVarType, +) +from mypy.typetraverser import TypeTraverserVisitor + + +class TypeVarLikeNamespaceSetter(TypeTraverserVisitor): + """Set namespace for all TypeVarLikeTypes types.""" + + def __init__(self, namespace: str) -> None: + self.namespace = namespace + + def visit_type_var(self, t: TypeVarType) -> None: + t.id.namespace = self.namespace + super().visit_type_var(t) + + def visit_param_spec(self, t: ParamSpecType) -> None: + t.id.namespace = self.namespace + return super().visit_param_spec(t) + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> None: + t.id.namespace = self.namespace + super().visit_type_var_tuple(t) + + +class TypeVarLikeScope: + """Scope that holds bindings for type variables and parameter specifications. + + Node fullname -> TypeVarLikeType. + """ + + def __init__( + self, + parent: TypeVarLikeScope | None = None, + is_class_scope: bool = False, + prohibited: TypeVarLikeScope | None = None, + namespace: str = "", + ) -> None: + """Initializer for TypeVarLikeScope + + Parameters: + parent: the outer scope for this scope + is_class_scope: True if this represents a generic class + prohibited: Type variables that aren't strictly in scope exactly, + but can't be bound because they're part of an outer class's scope. + """ + self.scope: dict[str, TypeVarLikeType] = {} + self.parent = parent + self.func_id = 0 + self.class_id = 0 + self.is_class_scope = is_class_scope + self.prohibited = prohibited + self.namespace = namespace + if parent is not None: + self.func_id = parent.func_id + self.class_id = parent.class_id + + def get_function_scope(self) -> TypeVarLikeScope | None: + """Get the nearest parent that's a function scope, not a class scope""" + it: TypeVarLikeScope | None = self + while it is not None and it.is_class_scope: + it = it.parent + return it + + def allow_binding(self, fullname: str) -> bool: + if fullname in self.scope: + return False + elif self.parent and not self.parent.allow_binding(fullname): + return False + elif self.prohibited and not self.prohibited.allow_binding(fullname): + return False + return True + + def method_frame(self, namespace: str) -> TypeVarLikeScope: + """A new scope frame for binding a method""" + return TypeVarLikeScope(self, False, None, namespace=namespace) + + def class_frame(self, namespace: str) -> TypeVarLikeScope: + """A new scope frame for binding a class. Prohibits *this* class's tvars""" + return TypeVarLikeScope(self.get_function_scope(), True, self, namespace=namespace) + + def new_unique_func_id(self) -> TypeVarId: + """Used by plugin-like code that needs to make synthetic generic functions.""" + self.func_id -= 1 + return TypeVarId(self.func_id) + + def bind_new(self, name: str, tvar_expr: TypeVarLikeExpr) -> TypeVarLikeType: + if self.is_class_scope: + self.class_id += 1 + i = self.class_id + else: + self.func_id -= 1 + i = self.func_id + namespace = self.namespace + tvar_expr.default.accept(TypeVarLikeNamespaceSetter(namespace)) + + if isinstance(tvar_expr, TypeVarExpr): + tvar_def: TypeVarLikeType = TypeVarType( + name=name, + fullname=tvar_expr.fullname, + id=TypeVarId(i, namespace=namespace), + values=tvar_expr.values, + upper_bound=tvar_expr.upper_bound, + default=tvar_expr.default, + variance=tvar_expr.variance, + line=tvar_expr.line, + column=tvar_expr.column, + ) + elif isinstance(tvar_expr, ParamSpecExpr): + tvar_def = ParamSpecType( + name=name, + fullname=tvar_expr.fullname, + id=TypeVarId(i, namespace=namespace), + flavor=ParamSpecFlavor.BARE, + upper_bound=tvar_expr.upper_bound, + default=tvar_expr.default, + line=tvar_expr.line, + column=tvar_expr.column, + ) + elif isinstance(tvar_expr, TypeVarTupleExpr): + tvar_def = TypeVarTupleType( + name=name, + fullname=tvar_expr.fullname, + id=TypeVarId(i, namespace=namespace), + upper_bound=tvar_expr.upper_bound, + tuple_fallback=tvar_expr.tuple_fallback, + default=tvar_expr.default, + line=tvar_expr.line, + column=tvar_expr.column, + ) + else: + assert False + self.scope[tvar_expr.fullname] = tvar_def + return tvar_def + + def bind_existing(self, tvar_def: TypeVarLikeType) -> None: + self.scope[tvar_def.fullname] = tvar_def + + def get_binding(self, item: str | SymbolTableNode) -> TypeVarLikeType | None: + fullname = item.fullname if isinstance(item, SymbolTableNode) else item + assert fullname + if fullname in self.scope: + return self.scope[fullname] + elif self.parent is not None: + return self.parent.get_binding(fullname) + else: + return None + + def __str__(self) -> str: + me = ", ".join(f"{k}: {v.name}`{v.id}" for k, v in self.scope.items()) + if self.parent is None: + return me + return f"{self.parent} <- {me}" diff --git a/.venv/lib/python3.12/site-packages/mypy/type_visitor.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/type_visitor.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..288af37 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/type_visitor.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/type_visitor.py b/.venv/lib/python3.12/site-packages/mypy/type_visitor.py new file mode 100644 index 0000000..1b38481 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/type_visitor.py @@ -0,0 +1,612 @@ +"""Type visitor classes. + +This module defines the type visitors that are intended to be +subclassed by other code. They have been separated out into their own +module to ease converting mypy to run under mypyc, since currently +mypyc-extension classes can extend interpreted classes but not the +other way around. Separating them out, then, allows us to compile +types before we can compile everything that uses a TypeVisitor. + +The visitors are all re-exported from mypy.types and that is how +other modules refer to them. +""" + +from __future__ import annotations + +from abc import abstractmethod +from collections.abc import Iterable, Sequence +from typing import Any, Final, Generic, TypeVar, cast + +from mypy_extensions import mypyc_attr, trait + +from mypy.types import ( + AnyType, + CallableArgument, + CallableType, + DeletedType, + EllipsisType, + ErasedType, + Instance, + LiteralType, + NoneType, + Overloaded, + Parameters, + ParamSpecType, + PartialType, + PlaceholderType, + RawExpressionType, + TupleType, + Type, + TypeAliasType, + TypedDictType, + TypeList, + TypeType, + TypeVarLikeType, + TypeVarTupleType, + TypeVarType, + UnboundType, + UninhabitedType, + UnionType, + UnpackType, + get_proper_type, +) + +T = TypeVar("T", covariant=True) + + +@trait +@mypyc_attr(allow_interpreted_subclasses=True) +class TypeVisitor(Generic[T]): + """Visitor class for types (Type subclasses). + + The parameter T is the return type of the visit methods. + """ + + @abstractmethod + def visit_unbound_type(self, t: UnboundType, /) -> T: + pass + + @abstractmethod + def visit_any(self, t: AnyType, /) -> T: + pass + + @abstractmethod + def visit_none_type(self, t: NoneType, /) -> T: + pass + + @abstractmethod + def visit_uninhabited_type(self, t: UninhabitedType, /) -> T: + pass + + @abstractmethod + def visit_erased_type(self, t: ErasedType, /) -> T: + pass + + @abstractmethod + def visit_deleted_type(self, t: DeletedType, /) -> T: + pass + + @abstractmethod + def visit_type_var(self, t: TypeVarType, /) -> T: + pass + + @abstractmethod + def visit_param_spec(self, t: ParamSpecType, /) -> T: + pass + + @abstractmethod + def visit_parameters(self, t: Parameters, /) -> T: + pass + + @abstractmethod + def visit_type_var_tuple(self, t: TypeVarTupleType, /) -> T: + pass + + @abstractmethod + def visit_instance(self, t: Instance, /) -> T: + pass + + @abstractmethod + def visit_callable_type(self, t: CallableType, /) -> T: + pass + + @abstractmethod + def visit_overloaded(self, t: Overloaded, /) -> T: + pass + + @abstractmethod + def visit_tuple_type(self, t: TupleType, /) -> T: + pass + + @abstractmethod + def visit_typeddict_type(self, t: TypedDictType, /) -> T: + pass + + @abstractmethod + def visit_literal_type(self, t: LiteralType, /) -> T: + pass + + @abstractmethod + def visit_union_type(self, t: UnionType, /) -> T: + pass + + @abstractmethod + def visit_partial_type(self, t: PartialType, /) -> T: + pass + + @abstractmethod + def visit_type_type(self, t: TypeType, /) -> T: + pass + + @abstractmethod + def visit_type_alias_type(self, t: TypeAliasType, /) -> T: + pass + + @abstractmethod + def visit_unpack_type(self, t: UnpackType, /) -> T: + pass + + +@trait +@mypyc_attr(allow_interpreted_subclasses=True) +class SyntheticTypeVisitor(TypeVisitor[T]): + """A TypeVisitor that also knows how to visit synthetic AST constructs. + + Not just real types. + """ + + @abstractmethod + def visit_type_list(self, t: TypeList, /) -> T: + pass + + @abstractmethod + def visit_callable_argument(self, t: CallableArgument, /) -> T: + pass + + @abstractmethod + def visit_ellipsis_type(self, t: EllipsisType, /) -> T: + pass + + @abstractmethod + def visit_raw_expression_type(self, t: RawExpressionType, /) -> T: + pass + + @abstractmethod + def visit_placeholder_type(self, t: PlaceholderType, /) -> T: + pass + + +@mypyc_attr(allow_interpreted_subclasses=True) +class TypeTranslator(TypeVisitor[Type]): + """Identity type transformation. + + Subclass this and override some methods to implement a non-trivial + transformation. + + We cache the results of certain translations to avoid + massively expanding the sizes of types. + """ + + def __init__(self, cache: dict[Type, Type] | None = None) -> None: + # For deduplication of results + self.cache = cache + + def get_cached(self, t: Type) -> Type | None: + if self.cache is None: + return None + return self.cache.get(t) + + def set_cached(self, orig: Type, new: Type) -> None: + if self.cache is None: + # Minor optimization: construct lazily + self.cache = {} + self.cache[orig] = new + + def visit_unbound_type(self, t: UnboundType, /) -> Type: + return t + + def visit_any(self, t: AnyType, /) -> Type: + return t + + def visit_none_type(self, t: NoneType, /) -> Type: + return t + + def visit_uninhabited_type(self, t: UninhabitedType, /) -> Type: + return t + + def visit_erased_type(self, t: ErasedType, /) -> Type: + return t + + def visit_deleted_type(self, t: DeletedType, /) -> Type: + return t + + def visit_instance(self, t: Instance, /) -> Type: + last_known_value: LiteralType | None = None + if t.last_known_value is not None: + raw_last_known_value = t.last_known_value.accept(self) + assert isinstance(raw_last_known_value, LiteralType) # type: ignore[misc] + last_known_value = raw_last_known_value + return Instance( + typ=t.type, + args=self.translate_type_tuple(t.args), + line=t.line, + column=t.column, + last_known_value=last_known_value, + extra_attrs=t.extra_attrs, + ) + + def visit_type_var(self, t: TypeVarType, /) -> Type: + return t + + def visit_param_spec(self, t: ParamSpecType, /) -> Type: + return t + + def visit_parameters(self, t: Parameters, /) -> Type: + return t.copy_modified(arg_types=self.translate_type_list(t.arg_types)) + + def visit_type_var_tuple(self, t: TypeVarTupleType, /) -> Type: + return t + + def visit_partial_type(self, t: PartialType, /) -> Type: + return t + + def visit_unpack_type(self, t: UnpackType, /) -> Type: + return UnpackType(t.type.accept(self)) + + def visit_callable_type(self, t: CallableType, /) -> Type: + return t.copy_modified( + arg_types=self.translate_type_list(t.arg_types), + ret_type=t.ret_type.accept(self), + variables=self.translate_variables(t.variables), + ) + + def visit_tuple_type(self, t: TupleType, /) -> Type: + return TupleType( + self.translate_type_list(t.items), + # TODO: This appears to be unsafe. + cast(Any, t.partial_fallback.accept(self)), + t.line, + t.column, + ) + + def visit_typeddict_type(self, t: TypedDictType, /) -> Type: + # Use cache to avoid O(n**2) or worse expansion of types during translation + if cached := self.get_cached(t): + return cached + items = {item_name: item_type.accept(self) for (item_name, item_type) in t.items.items()} + result = TypedDictType( + items, + t.required_keys, + t.readonly_keys, + # TODO: This appears to be unsafe. + cast(Any, t.fallback.accept(self)), + t.line, + t.column, + ) + self.set_cached(t, result) + return result + + def visit_literal_type(self, t: LiteralType, /) -> Type: + fallback = t.fallback.accept(self) + assert isinstance(fallback, Instance) # type: ignore[misc] + return LiteralType(value=t.value, fallback=fallback, line=t.line, column=t.column) + + def visit_union_type(self, t: UnionType, /) -> Type: + # Use cache to avoid O(n**2) or worse expansion of types during translation + # (only for large unions, since caching adds overhead) + use_cache = len(t.items) > 3 + if use_cache and (cached := self.get_cached(t)): + return cached + + result = UnionType( + self.translate_type_list(t.items), + t.line, + t.column, + uses_pep604_syntax=t.uses_pep604_syntax, + ) + if use_cache: + self.set_cached(t, result) + return result + + def translate_type_list(self, types: list[Type]) -> list[Type]: + return [t.accept(self) for t in types] + + def translate_type_tuple(self, types: tuple[Type, ...]) -> tuple[Type, ...]: + return tuple(t.accept(self) for t in types) + + def translate_variables( + self, variables: Sequence[TypeVarLikeType] + ) -> Sequence[TypeVarLikeType]: + return variables + + def visit_overloaded(self, t: Overloaded, /) -> Type: + items: list[CallableType] = [] + for item in t.items: + new = item.accept(self) + assert isinstance(new, CallableType) # type: ignore[misc] + items.append(new) + return Overloaded(items=items) + + def visit_type_type(self, t: TypeType, /) -> Type: + return TypeType.make_normalized( + t.item.accept(self), line=t.line, column=t.column, is_type_form=t.is_type_form + ) + + @abstractmethod + def visit_type_alias_type(self, t: TypeAliasType, /) -> Type: + # This method doesn't have a default implementation for type translators, + # because type aliases are special: some information is contained in the + # TypeAlias node, and we normally don't generate new nodes. Every subclass + # must implement this depending on its semantics. + pass + + +@mypyc_attr(allow_interpreted_subclasses=True) +class TypeQuery(SyntheticTypeVisitor[T]): + """Visitor for performing queries of types. + + strategy is used to combine results for a series of types, + common use cases involve a boolean query using `any` or `all`. + + Note: this visitor keeps an internal state (tracks type aliases to avoid + recursion), so it should *never* be reused for querying different types, + create a new visitor instance instead. + + # TODO: check that we don't have existing violations of this rule. + """ + + def __init__(self) -> None: + # Keep track of the type aliases already visited. This is needed to avoid + # infinite recursion on types like A = Union[int, List[A]]. + self.seen_aliases: set[TypeAliasType] | None = None + # By default, we eagerly expand type aliases, and query also types in the + # alias target. In most cases this is a desired behavior, but we may want + # to skip targets in some cases (e.g. when collecting type variables). + self.skip_alias_target = False + + @abstractmethod + def strategy(self, items: list[T]) -> T: + raise NotImplementedError + + def visit_unbound_type(self, t: UnboundType, /) -> T: + return self.query_types(t.args) + + def visit_type_list(self, t: TypeList, /) -> T: + return self.query_types(t.items) + + def visit_callable_argument(self, t: CallableArgument, /) -> T: + return t.typ.accept(self) + + def visit_any(self, t: AnyType, /) -> T: + return self.strategy([]) + + def visit_uninhabited_type(self, t: UninhabitedType, /) -> T: + return self.strategy([]) + + def visit_none_type(self, t: NoneType, /) -> T: + return self.strategy([]) + + def visit_erased_type(self, t: ErasedType, /) -> T: + return self.strategy([]) + + def visit_deleted_type(self, t: DeletedType, /) -> T: + return self.strategy([]) + + def visit_type_var(self, t: TypeVarType, /) -> T: + return self.query_types([t.upper_bound, t.default] + t.values) + + def visit_param_spec(self, t: ParamSpecType, /) -> T: + return self.query_types([t.upper_bound, t.default, t.prefix]) + + def visit_type_var_tuple(self, t: TypeVarTupleType, /) -> T: + return self.query_types([t.upper_bound, t.default]) + + def visit_unpack_type(self, t: UnpackType, /) -> T: + return self.query_types([t.type]) + + def visit_parameters(self, t: Parameters, /) -> T: + return self.query_types(t.arg_types) + + def visit_partial_type(self, t: PartialType, /) -> T: + return self.strategy([]) + + def visit_instance(self, t: Instance, /) -> T: + return self.query_types(t.args) + + def visit_callable_type(self, t: CallableType, /) -> T: + # FIX generics + return self.query_types(t.arg_types + [t.ret_type]) + + def visit_tuple_type(self, t: TupleType, /) -> T: + return self.query_types([t.partial_fallback] + t.items) + + def visit_typeddict_type(self, t: TypedDictType, /) -> T: + return self.query_types(t.items.values()) + + def visit_raw_expression_type(self, t: RawExpressionType, /) -> T: + return self.strategy([]) + + def visit_literal_type(self, t: LiteralType, /) -> T: + return self.strategy([]) + + def visit_union_type(self, t: UnionType, /) -> T: + return self.query_types(t.items) + + def visit_overloaded(self, t: Overloaded, /) -> T: + return self.query_types(t.items) + + def visit_type_type(self, t: TypeType, /) -> T: + return t.item.accept(self) + + def visit_ellipsis_type(self, t: EllipsisType, /) -> T: + return self.strategy([]) + + def visit_placeholder_type(self, t: PlaceholderType, /) -> T: + return self.query_types(t.args) + + def visit_type_alias_type(self, t: TypeAliasType, /) -> T: + if self.skip_alias_target: + return self.query_types(t.args) + # Skip type aliases already visited types to avoid infinite recursion + # (also use this as a simple-minded cache). + if self.seen_aliases is None: + self.seen_aliases = set() + elif t in self.seen_aliases: + return self.strategy([]) + self.seen_aliases.add(t) + return get_proper_type(t).accept(self) + + def query_types(self, types: Iterable[Type]) -> T: + """Perform a query for a list of types using the strategy to combine the results.""" + return self.strategy([t.accept(self) for t in types]) + + +# Return True if at least one type component returns True +ANY_STRATEGY: Final = 0 +# Return True if no type component returns False +ALL_STRATEGY: Final = 1 + + +class BoolTypeQuery(SyntheticTypeVisitor[bool]): + """Visitor for performing recursive queries of types with a bool result. + + Use TypeQuery if you need non-bool results. + + 'strategy' is used to combine results for a series of types. It must + be ANY_STRATEGY or ALL_STRATEGY. + + Note: This visitor keeps an internal state (tracks type aliases to avoid + recursion), so it should *never* be reused for querying different types + unless you call reset() first. + """ + + def __init__(self, strategy: int) -> None: + self.strategy = strategy + if strategy == ANY_STRATEGY: + self.default = False + else: + assert strategy == ALL_STRATEGY + self.default = True + # Keep track of the type aliases already visited. This is needed to avoid + # infinite recursion on types like A = Union[int, List[A]]. An empty set is + # represented as None as a micro-optimization. + self.seen_aliases: set[TypeAliasType] | None = None + # By default, we eagerly expand type aliases, and query also types in the + # alias target. In most cases this is a desired behavior, but we may want + # to skip targets in some cases (e.g. when collecting type variables). + self.skip_alias_target = False + + def reset(self) -> None: + """Clear mutable state (but preserve strategy). + + This *must* be called if you want to reuse the visitor. + """ + self.seen_aliases = None + + def visit_unbound_type(self, t: UnboundType, /) -> bool: + return self.query_types(t.args) + + def visit_type_list(self, t: TypeList, /) -> bool: + return self.query_types(t.items) + + def visit_callable_argument(self, t: CallableArgument, /) -> bool: + return t.typ.accept(self) + + def visit_any(self, t: AnyType, /) -> bool: + return self.default + + def visit_uninhabited_type(self, t: UninhabitedType, /) -> bool: + return self.default + + def visit_none_type(self, t: NoneType, /) -> bool: + return self.default + + def visit_erased_type(self, t: ErasedType, /) -> bool: + return self.default + + def visit_deleted_type(self, t: DeletedType, /) -> bool: + return self.default + + def visit_type_var(self, t: TypeVarType, /) -> bool: + return self.query_types([t.upper_bound, t.default] + t.values) + + def visit_param_spec(self, t: ParamSpecType, /) -> bool: + return self.query_types([t.upper_bound, t.default, t.prefix]) + + def visit_type_var_tuple(self, t: TypeVarTupleType, /) -> bool: + return self.query_types([t.upper_bound, t.default]) + + def visit_unpack_type(self, t: UnpackType, /) -> bool: + return self.query_types([t.type]) + + def visit_parameters(self, t: Parameters, /) -> bool: + return self.query_types(t.arg_types) + + def visit_partial_type(self, t: PartialType, /) -> bool: + return self.default + + def visit_instance(self, t: Instance, /) -> bool: + return self.query_types(t.args) + + def visit_callable_type(self, t: CallableType, /) -> bool: + # FIX generics + # Avoid allocating any objects here as an optimization. + args = self.query_types(t.arg_types) + ret = t.ret_type.accept(self) + if self.strategy == ANY_STRATEGY: + return args or ret + else: + return args and ret + + def visit_tuple_type(self, t: TupleType, /) -> bool: + return self.query_types([t.partial_fallback] + t.items) + + def visit_typeddict_type(self, t: TypedDictType, /) -> bool: + return self.query_types(list(t.items.values())) + + def visit_raw_expression_type(self, t: RawExpressionType, /) -> bool: + return self.default + + def visit_literal_type(self, t: LiteralType, /) -> bool: + return self.default + + def visit_union_type(self, t: UnionType, /) -> bool: + return self.query_types(t.items) + + def visit_overloaded(self, t: Overloaded, /) -> bool: + return self.query_types(t.items) # type: ignore[arg-type] + + def visit_type_type(self, t: TypeType, /) -> bool: + return t.item.accept(self) + + def visit_ellipsis_type(self, t: EllipsisType, /) -> bool: + return self.default + + def visit_placeholder_type(self, t: PlaceholderType, /) -> bool: + return self.query_types(t.args) + + def visit_type_alias_type(self, t: TypeAliasType, /) -> bool: + if self.skip_alias_target: + return self.query_types(t.args) + # Skip type aliases already visited types to avoid infinite recursion + # (also use this as a simple-minded cache). + if self.seen_aliases is None: + self.seen_aliases = set() + elif t in self.seen_aliases: + return self.default + self.seen_aliases.add(t) + return get_proper_type(t).accept(self) + + def query_types(self, types: list[Type] | tuple[Type, ...]) -> bool: + """Perform a query for a sequence of types using the strategy to combine the results.""" + # Special-case for lists and tuples to allow mypyc to produce better code. + if isinstance(types, list): + if self.strategy == ANY_STRATEGY: + return any(t.accept(self) for t in types) + else: + return all(t.accept(self) for t in types) + else: + if self.strategy == ANY_STRATEGY: + return any(t.accept(self) for t in types) + else: + return all(t.accept(self) for t in types) diff --git a/.venv/lib/python3.12/site-packages/mypy/typeanal.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/typeanal.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..ccb3605 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/typeanal.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/typeanal.py b/.venv/lib/python3.12/site-packages/mypy/typeanal.py new file mode 100644 index 0000000..3e5f522 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeanal.py @@ -0,0 +1,2731 @@ +"""Semantic analysis of types""" + +from __future__ import annotations + +import itertools +from collections.abc import Iterable, Iterator, Sequence +from contextlib import contextmanager +from typing import Callable, Final, Protocol, TypeVar + +from mypy import errorcodes as codes, message_registry, nodes +from mypy.errorcodes import ErrorCode +from mypy.errors import ErrorInfo +from mypy.expandtype import expand_type +from mypy.message_registry import ( + INVALID_PARAM_SPEC_LOCATION, + INVALID_PARAM_SPEC_LOCATION_NOTE, + TYPEDDICT_OVERRIDE_MERGE, +) +from mypy.messages import ( + MessageBuilder, + format_type, + format_type_bare, + quote_type_string, + wrong_type_arg_count, +) +from mypy.nodes import ( + ARG_NAMED, + ARG_NAMED_OPT, + ARG_OPT, + ARG_POS, + ARG_STAR, + ARG_STAR2, + MISSING_FALLBACK, + SYMBOL_FUNCBASE_TYPES, + ArgKind, + Context, + Decorator, + ImportFrom, + MypyFile, + ParamSpecExpr, + PlaceholderNode, + SymbolTableNode, + TypeAlias, + TypeInfo, + TypeVarExpr, + TypeVarLikeExpr, + TypeVarTupleExpr, + Var, + check_arg_kinds, + check_arg_names, +) +from mypy.options import INLINE_TYPEDDICT, TYPE_FORM, Options +from mypy.plugin import AnalyzeTypeContext, Plugin, TypeAnalyzerPluginInterface +from mypy.semanal_shared import ( + SemanticAnalyzerCoreInterface, + SemanticAnalyzerInterface, + paramspec_args, + paramspec_kwargs, +) +from mypy.state import state +from mypy.tvar_scope import TypeVarLikeScope +from mypy.types import ( + ANNOTATED_TYPE_NAMES, + ANY_STRATEGY, + CONCATENATE_TYPE_NAMES, + FINAL_TYPE_NAMES, + LITERAL_TYPE_NAMES, + NEVER_NAMES, + TUPLE_NAMES, + TYPE_ALIAS_NAMES, + TYPE_NAMES, + UNPACK_TYPE_NAMES, + AnyType, + BoolTypeQuery, + CallableArgument, + CallableType, + DeletedType, + EllipsisType, + ErasedType, + Instance, + LiteralType, + NoneType, + Overloaded, + Parameters, + ParamSpecFlavor, + ParamSpecType, + PartialType, + PlaceholderType, + ProperType, + RawExpressionType, + ReadOnlyType, + RequiredType, + SyntheticTypeVisitor, + TrivialSyntheticTypeTranslator, + TupleType, + Type, + TypeAliasType, + TypedDictType, + TypeList, + TypeOfAny, + TypeQuery, + TypeType, + TypeVarId, + TypeVarLikeType, + TypeVarTupleType, + TypeVarType, + UnboundType, + UninhabitedType, + UnionType, + UnpackType, + callable_with_ellipsis, + find_unpack_in_list, + flatten_nested_tuples, + get_proper_type, + has_type_vars, +) +from mypy.types_utils import get_bad_type_type_item +from mypy.typevars import fill_typevars + +T = TypeVar("T") + +type_constructors: Final = { + "typing.Callable", + "typing.Optional", + "typing.Tuple", + "typing.Type", + "typing.Union", + *LITERAL_TYPE_NAMES, + *ANNOTATED_TYPE_NAMES, +} + +ARG_KINDS_BY_CONSTRUCTOR: Final = { + "mypy_extensions.Arg": ARG_POS, + "mypy_extensions.DefaultArg": ARG_OPT, + "mypy_extensions.NamedArg": ARG_NAMED, + "mypy_extensions.DefaultNamedArg": ARG_NAMED_OPT, + "mypy_extensions.VarArg": ARG_STAR, + "mypy_extensions.KwArg": ARG_STAR2, +} + +SELF_TYPE_NAMES: Final = {"typing.Self", "typing_extensions.Self"} + + +def analyze_type_alias( + type: Type, + api: SemanticAnalyzerCoreInterface, + tvar_scope: TypeVarLikeScope, + plugin: Plugin, + options: Options, + cur_mod_node: MypyFile, + is_typeshed_stub: bool, + allow_placeholder: bool = False, + in_dynamic_func: bool = False, + global_scope: bool = True, + allowed_alias_tvars: list[TypeVarLikeType] | None = None, + alias_type_params_names: list[str] | None = None, + python_3_12_type_alias: bool = False, +) -> tuple[Type, set[str]]: + """Analyze r.h.s. of a (potential) type alias definition. + + If `node` is valid as a type alias rvalue, return the resulting type and a set of + full names of type aliases it depends on (directly or indirectly). + 'node' must have been semantically analyzed. + """ + analyzer = TypeAnalyser( + api, + tvar_scope, + plugin, + options, + cur_mod_node, + is_typeshed_stub, + defining_alias=True, + allow_placeholder=allow_placeholder, + prohibit_self_type="type alias target", + allowed_alias_tvars=allowed_alias_tvars, + alias_type_params_names=alias_type_params_names, + python_3_12_type_alias=python_3_12_type_alias, + ) + analyzer.in_dynamic_func = in_dynamic_func + analyzer.global_scope = global_scope + res = analyzer.anal_type(type, nested=False) + return res, analyzer.aliases_used + + +class TypeAnalyser(SyntheticTypeVisitor[Type], TypeAnalyzerPluginInterface): + """Semantic analyzer for types. + + Converts unbound types into bound types. This is a no-op for already + bound types. + + If an incomplete reference is encountered, this does a defer. The + caller never needs to defer. + """ + + # Is this called from an untyped function definition? + in_dynamic_func: bool = False + # Is this called from global scope? + global_scope: bool = True + + def __init__( + self, + api: SemanticAnalyzerCoreInterface, + tvar_scope: TypeVarLikeScope, + plugin: Plugin, + options: Options, + cur_mod_node: MypyFile, + is_typeshed_stub: bool, + *, + defining_alias: bool = False, + python_3_12_type_alias: bool = False, + allow_tuple_literal: bool = False, + allow_unbound_tvars: bool = False, + allow_placeholder: bool = False, + allow_typed_dict_special_forms: bool = False, + allow_final: bool = True, + allow_param_spec_literals: bool = False, + allow_unpack: bool = False, + report_invalid_types: bool = True, + prohibit_self_type: str | None = None, + prohibit_special_class_field_types: str | None = None, + allowed_alias_tvars: list[TypeVarLikeType] | None = None, + allow_type_any: bool = False, + alias_type_params_names: list[str] | None = None, + ) -> None: + self.api = api + self.fail_func = api.fail + self.note_func = api.note + self.tvar_scope = tvar_scope + # Are we analysing a type alias definition rvalue? + self.defining_alias = defining_alias + self.python_3_12_type_alias = python_3_12_type_alias + self.allow_tuple_literal = allow_tuple_literal + # Positive if we are analyzing arguments of another (outer) type + self.nesting_level = 0 + # Should we allow new type syntax when targeting older Python versions + # like 'list[int]' or 'X | Y' (allowed in stubs and with `__future__` import)? + self.always_allow_new_syntax = self.api.is_stub_file or self.api.is_future_flag_set( + "annotations" + ) + # Should we accept unbound type variables? This is currently used for class bases, + # and alias right hand sides (before they are analyzed as type aliases). + self.allow_unbound_tvars = allow_unbound_tvars + if allowed_alias_tvars is None: + allowed_alias_tvars = [] + self.allowed_alias_tvars = allowed_alias_tvars + self.alias_type_params_names = alias_type_params_names + # If false, record incomplete ref if we generate PlaceholderType. + self.allow_placeholder = allow_placeholder + # Are we in a context where Required[] is allowed? + self.allow_typed_dict_special_forms = allow_typed_dict_special_forms + # Set True when we analyze ClassVar else False + self.allow_final = allow_final + # Are we in a context where ParamSpec literals are allowed? + self.allow_param_spec_literals = allow_param_spec_literals + # Are we in context where literal "..." specifically is allowed? + self.allow_ellipsis = False + # Should we report an error whenever we encounter a RawExpressionType outside + # of a Literal context: e.g. whenever we encounter an invalid type? Normally, + # we want to report an error, but the caller may want to do more specialized + # error handling. + self.report_invalid_types = report_invalid_types + self.plugin = plugin + self.options = options + self.cur_mod_node = cur_mod_node + self.is_typeshed_stub = is_typeshed_stub + # Names of type aliases encountered while analysing a type will be collected here. + self.aliases_used: set[str] = set() + self.prohibit_self_type = prohibit_self_type + # Set when we analyze TypedDicts or NamedTuples, since they are special: + self.prohibit_special_class_field_types = prohibit_special_class_field_types + # Allow variables typed as Type[Any] and type (useful for base classes). + self.allow_type_any = allow_type_any + self.allow_type_var_tuple = False + self.allow_unpack = allow_unpack + + def lookup_qualified( + self, name: str, ctx: Context, suppress_errors: bool = False + ) -> SymbolTableNode | None: + return self.api.lookup_qualified(name, ctx, suppress_errors) + + def lookup_fully_qualified(self, fullname: str) -> SymbolTableNode: + return self.api.lookup_fully_qualified(fullname) + + def visit_unbound_type(self, t: UnboundType, defining_literal: bool = False) -> Type: + typ = self.visit_unbound_type_nonoptional(t, defining_literal) + if t.optional: + # We don't need to worry about double-wrapping Optionals or + # wrapping Anys: Union simplification will take care of that. + return make_optional_type(typ) + return typ + + def not_declared_in_type_params(self, tvar_name: str) -> bool: + return ( + self.alias_type_params_names is not None + and tvar_name not in self.alias_type_params_names + ) + + def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) -> Type: + sym = self.lookup_qualified(t.name, t) + param_spec_name = None + if t.name.endswith((".args", ".kwargs")): + param_spec_name = t.name.rsplit(".", 1)[0] + maybe_param_spec = self.lookup_qualified(param_spec_name, t) + if maybe_param_spec and isinstance(maybe_param_spec.node, ParamSpecExpr): + sym = maybe_param_spec + else: + param_spec_name = None + + if sym is not None: + node = sym.node + if isinstance(node, PlaceholderNode): + if node.becomes_typeinfo: + # Reference to placeholder type. + if self.api.final_iteration: + self.cannot_resolve_type(t) + return AnyType(TypeOfAny.from_error) + elif self.allow_placeholder: + self.api.defer() + else: + self.api.record_incomplete_ref() + # Always allow ParamSpec for placeholders, if they are actually not valid, + # they will be reported later, after we resolve placeholders. + return PlaceholderType( + node.fullname, + self.anal_array( + t.args, + allow_param_spec=True, + allow_param_spec_literals=True, + allow_unpack=True, + ), + t.line, + ) + else: + if self.api.final_iteration: + self.cannot_resolve_type(t) + return AnyType(TypeOfAny.from_error) + else: + # Reference to an unknown placeholder node. + self.api.record_incomplete_ref() + return AnyType(TypeOfAny.special_form) + if node is None: + self.fail(f"Internal error (node is None, kind={sym.kind})", t) + return AnyType(TypeOfAny.special_form) + fullname = node.fullname + hook = self.plugin.get_type_analyze_hook(fullname) + if hook is not None: + return hook(AnalyzeTypeContext(t, t, self)) + tvar_def = self.tvar_scope.get_binding(sym) + if tvar_def is not None: + # We need to cover special-case explained in get_typevarlike_argument() here, + # since otherwise the deferral will not be triggered if the type variable is + # used in a different module. Using isinstance() should be safe for this purpose. + tvar_params = [tvar_def.upper_bound, tvar_def.default] + if isinstance(tvar_def, TypeVarType): + tvar_params += tvar_def.values + if any(isinstance(tp, PlaceholderType) for tp in tvar_params): + self.api.defer() + if isinstance(sym.node, ParamSpecExpr): + if tvar_def is None: + if self.allow_unbound_tvars: + return t + name = param_spec_name or t.name + if self.defining_alias and self.not_declared_in_type_params(t.name): + msg = f'ParamSpec "{name}" is not included in type_params' + else: + msg = f'ParamSpec "{name}" is unbound' + self.fail(msg, t, code=codes.VALID_TYPE) + return AnyType(TypeOfAny.from_error) + assert isinstance(tvar_def, ParamSpecType) + if len(t.args) > 0: + self.fail( + f'ParamSpec "{t.name}" used with arguments', t, code=codes.VALID_TYPE + ) + if param_spec_name is not None and not self.allow_param_spec_literals: + self.fail( + "ParamSpec components are not allowed here", t, code=codes.VALID_TYPE + ) + return AnyType(TypeOfAny.from_error) + # Change the line number + return ParamSpecType( + tvar_def.name, + tvar_def.fullname, + tvar_def.id, + tvar_def.flavor, + tvar_def.upper_bound, + tvar_def.default, + line=t.line, + column=t.column, + ) + if ( + isinstance(sym.node, TypeVarExpr) + and self.defining_alias + and not defining_literal + and (tvar_def is None or tvar_def not in self.allowed_alias_tvars) + ): + if self.not_declared_in_type_params(t.name): + if self.python_3_12_type_alias: + msg = message_registry.TYPE_PARAMETERS_SHOULD_BE_DECLARED.format( + f'"{t.name}"' + ) + else: + msg = f'Type variable "{t.name}" is not included in type_params' + else: + msg = f'Can\'t use bound type variable "{t.name}" to define generic alias' + self.fail(msg, t, code=codes.VALID_TYPE) + return AnyType(TypeOfAny.from_error) + if isinstance(sym.node, TypeVarExpr) and tvar_def is not None: + assert isinstance(tvar_def, TypeVarType) + if len(t.args) > 0: + self.fail( + f'Type variable "{t.name}" used with arguments', t, code=codes.VALID_TYPE + ) + # Change the line number + return tvar_def.copy_modified(line=t.line, column=t.column) + if isinstance(sym.node, TypeVarTupleExpr) and ( + tvar_def is not None + and self.defining_alias + and tvar_def not in self.allowed_alias_tvars + ): + if self.not_declared_in_type_params(t.name): + msg = f'Type variable "{t.name}" is not included in type_params' + else: + msg = f'Can\'t use bound type variable "{t.name}" to define generic alias' + self.fail(msg, t, code=codes.VALID_TYPE) + return AnyType(TypeOfAny.from_error) + if isinstance(sym.node, TypeVarTupleExpr): + if tvar_def is None: + if self.allow_unbound_tvars: + return t + if self.defining_alias and self.not_declared_in_type_params(t.name): + if self.python_3_12_type_alias: + msg = message_registry.TYPE_PARAMETERS_SHOULD_BE_DECLARED.format( + f'"{t.name}"' + ) + else: + msg = f'TypeVarTuple "{t.name}" is not included in type_params' + else: + msg = f'TypeVarTuple "{t.name}" is unbound' + self.fail(msg, t, code=codes.VALID_TYPE) + return AnyType(TypeOfAny.from_error) + assert isinstance(tvar_def, TypeVarTupleType) + if not self.allow_type_var_tuple: + self.fail( + f'TypeVarTuple "{t.name}" is only valid with an unpack', + t, + code=codes.VALID_TYPE, + ) + return AnyType(TypeOfAny.from_error) + if len(t.args) > 0: + self.fail( + f'Type variable "{t.name}" used with arguments', t, code=codes.VALID_TYPE + ) + + # Change the line number + return TypeVarTupleType( + tvar_def.name, + tvar_def.fullname, + tvar_def.id, + tvar_def.upper_bound, + sym.node.tuple_fallback, + tvar_def.default, + line=t.line, + column=t.column, + ) + special = self.try_analyze_special_unbound_type(t, fullname) + if special is not None: + return special + if isinstance(node, TypeAlias): + self.aliases_used.add(fullname) + an_args = self.anal_array( + t.args, + allow_param_spec=True, + allow_param_spec_literals=node.has_param_spec_type, + allow_unpack=True, # Fixed length unpacks can be used for non-variadic aliases. + ) + if node.has_param_spec_type and len(node.alias_tvars) == 1: + an_args = self.pack_paramspec_args(an_args) + + disallow_any = self.options.disallow_any_generics and not self.is_typeshed_stub + res = instantiate_type_alias( + node, + an_args, + self.fail, + node.no_args, + t, + self.options, + unexpanded_type=t, + disallow_any=disallow_any, + empty_tuple_index=t.empty_tuple_index, + ) + # The only case where instantiate_type_alias() can return an incorrect instance is + # when it is top-level instance, so no need to recurse. + if ( + isinstance(res, ProperType) + and isinstance(res, Instance) + and not (self.defining_alias and self.nesting_level == 0) + and not validate_instance(res, self.fail, t.empty_tuple_index) + ): + fix_instance( + res, + self.fail, + self.note, + disallow_any=disallow_any, + options=self.options, + use_generic_error=True, + unexpanded_type=t, + ) + if node.eager: + res = get_proper_type(res) + return res + elif isinstance(node, TypeInfo): + return self.analyze_type_with_type_info(node, t.args, t, t.empty_tuple_index) + elif node.fullname in TYPE_ALIAS_NAMES: + return AnyType(TypeOfAny.special_form) + # Concatenate is an operator, no need for a proper type + elif node.fullname in CONCATENATE_TYPE_NAMES: + # We check the return type further up the stack for valid use locations + return self.apply_concatenate_operator(t) + else: + return self.analyze_unbound_type_without_type_info(t, sym, defining_literal) + else: # sym is None + return AnyType(TypeOfAny.special_form) + + def pack_paramspec_args(self, an_args: Sequence[Type]) -> list[Type]: + # "Aesthetic" ParamSpec literals for single ParamSpec: C[int, str] -> C[[int, str]]. + # These do not support mypy_extensions VarArgs, etc. as they were already analyzed + # TODO: should these be re-analyzed to get rid of this inconsistency? + count = len(an_args) + if count == 0: + return [] + if count == 1 and isinstance(get_proper_type(an_args[0]), AnyType): + # Single Any is interpreted as ..., rather that a single argument with Any type. + # I didn't find this in the PEP, but it sounds reasonable. + return list(an_args) + if any(isinstance(a, (Parameters, ParamSpecType)) for a in an_args): + if len(an_args) > 1: + first_wrong = next( + arg for arg in an_args if isinstance(arg, (Parameters, ParamSpecType)) + ) + self.fail( + "Nested parameter specifications are not allowed", + first_wrong, + code=codes.VALID_TYPE, + ) + return [AnyType(TypeOfAny.from_error)] + return list(an_args) + first = an_args[0] + return [ + Parameters( + an_args, [ARG_POS] * count, [None] * count, line=first.line, column=first.column + ) + ] + + def cannot_resolve_type(self, t: UnboundType) -> None: + # TODO: Move error message generation to messages.py. We'd first + # need access to MessageBuilder here. Also move the similar + # message generation logic in semanal.py. + self.api.fail(f'Cannot resolve name "{t.name}" (possible cyclic definition)', t) + if self.api.is_func_scope(): + self.note("Recursive types are not allowed at function scope", t) + + def apply_concatenate_operator(self, t: UnboundType) -> Type: + if len(t.args) == 0: + self.api.fail("Concatenate needs type arguments", t, code=codes.VALID_TYPE) + return AnyType(TypeOfAny.from_error) + + # Last argument has to be ParamSpec or Ellipsis. + ps = self.anal_type(t.args[-1], allow_param_spec=True, allow_ellipsis=True) + if not isinstance(ps, (ParamSpecType, Parameters)): + if isinstance(ps, UnboundType) and self.allow_unbound_tvars: + sym = self.lookup_qualified(ps.name, t) + if sym is not None and isinstance(sym.node, ParamSpecExpr): + return ps + self.api.fail( + "The last parameter to Concatenate needs to be a ParamSpec", + t, + code=codes.VALID_TYPE, + ) + return AnyType(TypeOfAny.from_error) + elif isinstance(ps, ParamSpecType) and ps.prefix.arg_types: + self.api.fail("Nested Concatenates are invalid", t, code=codes.VALID_TYPE) + + args = self.anal_array(t.args[:-1]) + pre = ps.prefix if isinstance(ps, ParamSpecType) else ps + + # mypy can't infer this :( + names: list[str | None] = [None] * len(args) + + pre = Parameters( + args + pre.arg_types, + [ARG_POS] * len(args) + pre.arg_kinds, + names + pre.arg_names, + line=t.line, + column=t.column, + ) + return ps.copy_modified(prefix=pre) if isinstance(ps, ParamSpecType) else pre + + def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Type | None: + """Bind special type that is recognized through magic name such as 'typing.Any'. + + Return the bound type if successful, and return None if the type is a normal type. + """ + if fullname == "builtins.None": + return NoneType() + elif fullname == "typing.Any": + return AnyType(TypeOfAny.explicit, line=t.line, column=t.column) + elif fullname in FINAL_TYPE_NAMES: + if self.prohibit_special_class_field_types: + self.fail( + f"Final[...] can't be used inside a {self.prohibit_special_class_field_types}", + t, + code=codes.VALID_TYPE, + ) + else: + if not self.allow_final: + self.fail( + "Final can be only used as an outermost qualifier in a variable annotation", + t, + code=codes.VALID_TYPE, + ) + return AnyType(TypeOfAny.from_error) + elif fullname in TUPLE_NAMES: + # Tuple is special because it is involved in builtin import cycle + # and may be not ready when used. + sym = self.api.lookup_fully_qualified_or_none("builtins.tuple") + if not sym or isinstance(sym.node, PlaceholderNode): + if self.api.is_incomplete_namespace("builtins"): + self.api.record_incomplete_ref() + else: + self.fail('Name "tuple" is not defined', t) + return AnyType(TypeOfAny.special_form) + if len(t.args) == 0 and not t.empty_tuple_index: + # Bare 'Tuple' is same as 'tuple' + any_type = self.get_omitted_any(t) + return self.named_type("builtins.tuple", [any_type], line=t.line, column=t.column) + if len(t.args) == 2 and isinstance(t.args[1], EllipsisType): + # Tuple[T, ...] (uniform, variable-length tuple) + instance = self.named_type("builtins.tuple", [self.anal_type(t.args[0])]) + instance.line = t.line + return instance + return self.tuple_type( + self.anal_array(t.args, allow_unpack=True), line=t.line, column=t.column + ) + elif fullname == "typing.Union": + items = self.anal_array(t.args) + return UnionType.make_union(items, line=t.line, column=t.column) + elif fullname == "typing.Optional": + if len(t.args) != 1: + self.fail( + "Optional[...] must have exactly one type argument", t, code=codes.VALID_TYPE + ) + return AnyType(TypeOfAny.from_error) + item = self.anal_type(t.args[0]) + return make_optional_type(item) + elif fullname == "typing.Callable": + return self.analyze_callable_type(t) + elif fullname in TYPE_NAMES: + if len(t.args) == 0: + if fullname == "typing.Type": + any_type = self.get_omitted_any(t) + return TypeType(any_type, line=t.line, column=t.column) + else: + # To prevent assignment of 'builtins.type' inferred as 'builtins.object' + # See https://github.com/python/mypy/issues/9476 for more information + return None + type_str = "Type[...]" if fullname == "typing.Type" else "type[...]" + if len(t.args) != 1: + self.fail( + f"{type_str} must have exactly one type argument", t, code=codes.VALID_TYPE + ) + item = self.anal_type(t.args[0]) + bad_item_name = get_bad_type_type_item(item) + if bad_item_name: + self.fail(f'{type_str} can\'t contain "{bad_item_name}"', t, code=codes.VALID_TYPE) + item = AnyType(TypeOfAny.from_error) + return TypeType.make_normalized(item, line=t.line, column=t.column) + elif fullname in ("typing_extensions.TypeForm", "typing.TypeForm"): + if TYPE_FORM not in self.options.enable_incomplete_feature: + self.fail( + "TypeForm is experimental," + " must be enabled with --enable-incomplete-feature=TypeForm", + t, + ) + if len(t.args) == 0: + any_type = self.get_omitted_any(t) + return TypeType(any_type, line=t.line, column=t.column, is_type_form=True) + if len(t.args) != 1: + type_str = "TypeForm[...]" + self.fail( + type_str + " must have exactly one type argument", t, code=codes.VALID_TYPE + ) + item = self.anal_type(t.args[0]) + return TypeType.make_normalized(item, line=t.line, column=t.column, is_type_form=True) + elif fullname == "typing.ClassVar": + if self.nesting_level > 0: + self.fail( + "Invalid type: ClassVar nested inside other type", t, code=codes.VALID_TYPE + ) + if self.prohibit_special_class_field_types: + self.fail( + f"ClassVar[...] can't be used inside a {self.prohibit_special_class_field_types}", + t, + code=codes.VALID_TYPE, + ) + if self.defining_alias: + self.fail( + "ClassVar[...] can't be used inside a type alias", t, code=codes.VALID_TYPE + ) + if len(t.args) == 0: + return AnyType(TypeOfAny.from_omitted_generics, line=t.line, column=t.column) + if len(t.args) != 1: + self.fail( + "ClassVar[...] must have at most one type argument", t, code=codes.VALID_TYPE + ) + return AnyType(TypeOfAny.from_error) + return self.anal_type(t.args[0], allow_final=self.options.python_version >= (3, 13)) + elif fullname in NEVER_NAMES: + return UninhabitedType() + elif fullname in LITERAL_TYPE_NAMES: + return self.analyze_literal_type(t) + elif fullname in ANNOTATED_TYPE_NAMES: + if len(t.args) < 2: + self.fail( + "Annotated[...] must have exactly one type argument" + " and at least one annotation", + t, + code=codes.VALID_TYPE, + ) + return AnyType(TypeOfAny.from_error) + return self.anal_type( + t.args[0], allow_typed_dict_special_forms=self.allow_typed_dict_special_forms + ) + elif fullname in ("typing_extensions.Required", "typing.Required"): + if not self.allow_typed_dict_special_forms: + self.fail( + "Required[] can be only used in a TypedDict definition", + t, + code=codes.VALID_TYPE, + ) + return AnyType(TypeOfAny.from_error) + if len(t.args) != 1: + self.fail( + "Required[] must have exactly one type argument", t, code=codes.VALID_TYPE + ) + return AnyType(TypeOfAny.from_error) + return RequiredType( + self.anal_type(t.args[0], allow_typed_dict_special_forms=True), required=True + ) + elif fullname in ("typing_extensions.NotRequired", "typing.NotRequired"): + if not self.allow_typed_dict_special_forms: + self.fail( + "NotRequired[] can be only used in a TypedDict definition", + t, + code=codes.VALID_TYPE, + ) + return AnyType(TypeOfAny.from_error) + if len(t.args) != 1: + self.fail( + "NotRequired[] must have exactly one type argument", t, code=codes.VALID_TYPE + ) + return AnyType(TypeOfAny.from_error) + return RequiredType( + self.anal_type(t.args[0], allow_typed_dict_special_forms=True), required=False + ) + elif fullname in ("typing_extensions.ReadOnly", "typing.ReadOnly"): + if not self.allow_typed_dict_special_forms: + self.fail( + "ReadOnly[] can be only used in a TypedDict definition", + t, + code=codes.VALID_TYPE, + ) + return AnyType(TypeOfAny.from_error) + if len(t.args) != 1: + self.fail( + '"ReadOnly[]" must have exactly one type argument', t, code=codes.VALID_TYPE + ) + return AnyType(TypeOfAny.from_error) + return ReadOnlyType(self.anal_type(t.args[0], allow_typed_dict_special_forms=True)) + elif ( + self.anal_type_guard_arg(t, fullname) is not None + or self.anal_type_is_arg(t, fullname) is not None + ): + # In most contexts, TypeGuard[...] acts as an alias for bool (ignoring its args) + return self.named_type("builtins.bool") + elif fullname in UNPACK_TYPE_NAMES: + if len(t.args) != 1: + self.fail("Unpack[...] requires exactly one type argument", t) + return AnyType(TypeOfAny.from_error) + if not self.allow_unpack: + self.fail(message_registry.INVALID_UNPACK_POSITION, t, code=codes.VALID_TYPE) + return AnyType(TypeOfAny.from_error) + self.allow_type_var_tuple = True + result = UnpackType(self.anal_type(t.args[0]), line=t.line, column=t.column) + self.allow_type_var_tuple = False + return result + elif fullname in SELF_TYPE_NAMES: + if t.args: + self.fail("Self type cannot have type arguments", t) + if self.prohibit_self_type is not None: + self.fail(f"Self type cannot be used in {self.prohibit_self_type}", t) + return AnyType(TypeOfAny.from_error) + if self.api.type is None: + self.fail("Self type is only allowed in annotations within class definition", t) + return AnyType(TypeOfAny.from_error) + if self.api.type.has_base("builtins.type"): + self.fail("Self type cannot be used in a metaclass", t) + if self.api.type.self_type is not None: + if self.api.type.is_final or self.api.type.is_enum and self.api.type.enum_members: + return fill_typevars(self.api.type) + return self.api.type.self_type.copy_modified(line=t.line, column=t.column) + # TODO: verify this is unreachable and replace with an assert? + self.fail("Unexpected Self type", t) + return AnyType(TypeOfAny.from_error) + return None + + def get_omitted_any(self, typ: Type, fullname: str | None = None) -> AnyType: + disallow_any = not self.is_typeshed_stub and self.options.disallow_any_generics + return get_omitted_any(disallow_any, self.fail, self.note, typ, self.options, fullname) + + def check_and_warn_deprecated(self, info: TypeInfo, ctx: Context) -> None: + """Similar logic to `TypeChecker.check_deprecated` and `TypeChecker.warn_deprecated.""" + + if ( + (deprecated := info.deprecated) + and not self.is_typeshed_stub + and not (self.api.type and (self.api.type.fullname == info.fullname)) + and not any( + info.fullname == p or info.fullname.startswith(f"{p}.") + for p in self.options.deprecated_calls_exclude + ) + ): + for imp in self.cur_mod_node.imports: + if isinstance(imp, ImportFrom) and any(info.name == n[0] for n in imp.names): + break + else: + warn = self.note if self.options.report_deprecated_as_note else self.fail + warn(deprecated, ctx, code=codes.DEPRECATED) + + def analyze_type_with_type_info( + self, info: TypeInfo, args: Sequence[Type], ctx: Context, empty_tuple_index: bool + ) -> Type: + """Bind unbound type when were able to find target TypeInfo. + + This handles simple cases like 'int', 'modname.UserClass[str]', etc. + """ + + self.check_and_warn_deprecated(info, ctx) + + if len(args) > 0 and info.fullname == "builtins.tuple": + fallback = Instance(info, [AnyType(TypeOfAny.special_form)], ctx.line) + return TupleType(self.anal_array(args, allow_unpack=True), fallback, ctx.line) + + # Analyze arguments and (usually) construct Instance type. The + # number of type arguments and their values are + # checked only later, since we do not always know the + # valid count at this point. Thus we may construct an + # Instance with an invalid number of type arguments. + # + # We allow ParamSpec literals based on a heuristic: it will be + # checked later anyways but the error message may be worse. + instance = Instance( + info, + self.anal_array( + args, + allow_param_spec=True, + allow_param_spec_literals=info.has_param_spec_type, + allow_unpack=True, # Fixed length tuples can be used for non-variadic types. + ), + ctx.line, + ctx.column, + ) + instance.end_line = ctx.end_line + instance.end_column = ctx.end_column + if len(info.type_vars) == 1 and info.has_param_spec_type: + instance.args = tuple(self.pack_paramspec_args(instance.args)) + + # Check type argument count. + instance.args = tuple(flatten_nested_tuples(instance.args)) + if not (self.defining_alias and self.nesting_level == 0) and not validate_instance( + instance, self.fail, empty_tuple_index + ): + fix_instance( + instance, + self.fail, + self.note, + disallow_any=self.options.disallow_any_generics and not self.is_typeshed_stub, + options=self.options, + ) + + tup = info.tuple_type + if tup is not None: + # The class has a Tuple[...] base class so it will be + # represented as a tuple type. + if info.special_alias: + return instantiate_type_alias( + info.special_alias, + # TODO: should we allow NamedTuples generic in ParamSpec? + self.anal_array(args, allow_unpack=True), + self.fail, + False, + ctx, + self.options, + use_standard_error=True, + ) + return tup.copy_modified( + items=self.anal_array(tup.items, allow_unpack=True), fallback=instance + ) + td = info.typeddict_type + if td is not None: + # The class has a TypedDict[...] base class so it will be + # represented as a typeddict type. + if info.special_alias: + return instantiate_type_alias( + info.special_alias, + # TODO: should we allow TypedDicts generic in ParamSpec? + self.anal_array(args, allow_unpack=True), + self.fail, + False, + ctx, + self.options, + use_standard_error=True, + ) + # Create a named TypedDictType + return td.copy_modified( + item_types=self.anal_array(list(td.items.values())), fallback=instance + ) + + if info.fullname == "types.NoneType": + self.fail( + "NoneType should not be used as a type, please use None instead", + ctx, + code=codes.VALID_TYPE, + ) + return NoneType(ctx.line, ctx.column) + + return instance + + def analyze_unbound_type_without_type_info( + self, t: UnboundType, sym: SymbolTableNode, defining_literal: bool + ) -> Type: + """Figure out what an unbound type that doesn't refer to a TypeInfo node means. + + This is something unusual. We try our best to find out what it is. + """ + name = sym.fullname + if name is None: + assert sym.node is not None + name = sym.node.name + # Option 1: + # Something with an Any type -- make it an alias for Any in a type + # context. This is slightly problematic as it allows using the type 'Any' + # as a base class -- however, this will fail soon at runtime so the problem + # is pretty minor. + if isinstance(sym.node, Var): + typ = get_proper_type(sym.node.type) + if isinstance(typ, AnyType): + return AnyType( + TypeOfAny.from_unimported_type, missing_import_name=typ.missing_import_name + ) + elif self.allow_type_any: + if isinstance(typ, Instance) and typ.type.fullname == "builtins.type": + return AnyType(TypeOfAny.special_form) + if isinstance(typ, TypeType) and isinstance(typ.item, AnyType): + return AnyType(TypeOfAny.from_another_any, source_any=typ.item) + # Option 2: + # Unbound type variable. Currently these may be still valid, + # for example when defining a generic type alias. + unbound_tvar = ( + isinstance(sym.node, (TypeVarExpr, TypeVarTupleExpr)) + and self.tvar_scope.get_binding(sym) is None + ) + if self.allow_unbound_tvars and unbound_tvar: + return t + + # Option 3: + # Enum value. Note: we only want to return a LiteralType when + # we're using this enum value specifically within context of + # a "Literal[...]" type. So, if `defining_literal` is not set, + # we bail out early with an error. + # + # If, in the distant future, we decide to permit things like + # `def foo(x: Color.RED) -> None: ...`, we can remove that + # check entirely. + if ( + isinstance(sym.node, Var) + and sym.node.info + and sym.node.info.is_enum + and not sym.node.name.startswith("__") + ): + value = sym.node.name + base_enum_short_name = sym.node.info.name + if not defining_literal: + msg = message_registry.INVALID_TYPE_RAW_ENUM_VALUE.format( + base_enum_short_name, value + ) + self.fail(msg.value, t, code=msg.code) + return AnyType(TypeOfAny.from_error) + return LiteralType( + value=value, + fallback=Instance(sym.node.info, [], line=t.line, column=t.column), + line=t.line, + column=t.column, + ) + + # None of the above options worked. We parse the args (if there are any) + # to make sure there are no remaining semanal-only types, then give up. + t = t.copy_modified(args=self.anal_array(t.args)) + # TODO: Move this message building logic to messages.py. + notes: list[str] = [] + error_code = codes.VALID_TYPE + if isinstance(sym.node, Var): + notes.append( + "See https://mypy.readthedocs.io/en/" + "stable/common_issues.html#variables-vs-type-aliases" + ) + message = 'Variable "{}" is not valid as a type' + elif isinstance(sym.node, (SYMBOL_FUNCBASE_TYPES, Decorator)): + message = 'Function "{}" is not valid as a type' + if name == "builtins.any": + notes.append('Perhaps you meant "typing.Any" instead of "any"?') + elif name == "builtins.callable": + notes.append('Perhaps you meant "typing.Callable" instead of "callable"?') + else: + notes.append('Perhaps you need "Callable[...]" or a callback protocol?') + elif isinstance(sym.node, MypyFile): + message = 'Module "{}" is not valid as a type' + notes.append("Perhaps you meant to use a protocol matching the module structure?") + elif unbound_tvar: + assert isinstance(sym.node, TypeVarLikeExpr) + if sym.node.is_new_style: + # PEP 695 type parameters are never considered unbound -- they are undefined + # in contexts where they aren't valid, such as in argument default values. + message = 'Name "{}" is not defined' + name = name.split(".")[-1] + error_code = codes.NAME_DEFINED + else: + message = 'Type variable "{}" is unbound' + short = name.split(".")[-1] + notes.append( + f'(Hint: Use "Generic[{short}]" or "Protocol[{short}]" base class' + f' to bind "{short}" inside a class)' + ) + notes.append( + f'(Hint: Use "{short}" in function signature ' + f'to bind "{short}" inside a function)' + ) + else: + message = 'Cannot interpret reference "{}" as a type' + if not defining_literal: + # Literal check already gives a custom error. Avoid duplicating errors. + self.fail(message.format(name), t, code=error_code) + for note in notes: + self.note(note, t, code=error_code) + + # TODO: Would it be better to always return Any instead of UnboundType + # in case of an error? On one hand, UnboundType has a name so error messages + # are more detailed, on the other hand, some of them may be bogus, + # see https://github.com/python/mypy/issues/4987. + return t + + def visit_any(self, t: AnyType) -> Type: + return t + + def visit_none_type(self, t: NoneType) -> Type: + return t + + def visit_uninhabited_type(self, t: UninhabitedType) -> Type: + return t + + def visit_erased_type(self, t: ErasedType) -> Type: + # This type should exist only temporarily during type inference + assert False, "Internal error: Unexpected erased type" + + def visit_deleted_type(self, t: DeletedType) -> Type: + return t + + def visit_type_list(self, t: TypeList) -> Type: + # Parameters literal (Z[[int, str, Whatever]]) + if self.allow_param_spec_literals: + params = self.analyze_callable_args(t) + if params: + ts, kinds, names = params + # bind these types + return Parameters(self.anal_array(ts), kinds, names, line=t.line, column=t.column) + else: + return AnyType(TypeOfAny.from_error) + else: + self.fail( + 'Bracketed expression "[...]" is not valid as a type', t, code=codes.VALID_TYPE + ) + if len(t.items) == 1: + self.note('Did you mean "List[...]"?', t) + return AnyType(TypeOfAny.from_error) + + def visit_callable_argument(self, t: CallableArgument) -> Type: + self.fail("Invalid type", t, code=codes.VALID_TYPE) + return AnyType(TypeOfAny.from_error) + + def visit_instance(self, t: Instance) -> Type: + return t + + def visit_type_alias_type(self, t: TypeAliasType) -> Type: + # TODO: should we do something here? + return t + + def visit_type_var(self, t: TypeVarType) -> Type: + return t + + def visit_param_spec(self, t: ParamSpecType) -> Type: + return t + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type: + return t + + def visit_unpack_type(self, t: UnpackType) -> Type: + if not self.allow_unpack: + self.fail(message_registry.INVALID_UNPACK_POSITION, t.type, code=codes.VALID_TYPE) + return AnyType(TypeOfAny.from_error) + self.allow_type_var_tuple = True + result = UnpackType(self.anal_type(t.type), from_star_syntax=t.from_star_syntax) + self.allow_type_var_tuple = False + return result + + def visit_parameters(self, t: Parameters) -> Type: + raise NotImplementedError("ParamSpec literals cannot have unbound TypeVars") + + def visit_callable_type( + self, t: CallableType, nested: bool = True, namespace: str = "" + ) -> Type: + # Every Callable can bind its own type variables, if they're not in the outer scope + # TODO: attach namespace for nested free type variables (these appear in return type only). + with self.tvar_scope_frame(namespace=namespace): + unpacked_kwargs = t.unpack_kwargs + if self.defining_alias: + variables = t.variables + else: + variables, _ = self.bind_function_type_variables(t, t) + type_guard = self.anal_type_guard(t.ret_type) if t.type_guard is None else t.type_guard + type_is = self.anal_type_is(t.ret_type) if t.type_is is None else t.type_is + + arg_kinds = t.arg_kinds + arg_types = [] + param_spec_with_args = param_spec_with_kwargs = None + param_spec_invalid = False + for kind, ut in zip(arg_kinds, t.arg_types): + if kind == ARG_STAR: + param_spec_with_args, at = self.anal_star_arg_type(ut, kind, nested=nested) + elif kind == ARG_STAR2: + param_spec_with_kwargs, at = self.anal_star_arg_type(ut, kind, nested=nested) + else: + if param_spec_with_args: + param_spec_invalid = True + self.fail( + "Arguments not allowed after ParamSpec.args", t, code=codes.VALID_TYPE + ) + at = self.anal_type(ut, nested=nested, allow_unpack=False) + arg_types.append(at) + + if nested and arg_types: + # If we've got a Callable[[Unpack[SomeTypedDict]], None], make sure + # Unpack is interpreted as `**` and not as `*`. + last = arg_types[-1] + if isinstance(last, UnpackType): + # TODO: it would be better to avoid this get_proper_type() call. + p_at = get_proper_type(last.type) + if isinstance(p_at, TypedDictType) and not last.from_star_syntax: + # Automatically detect Unpack[Foo] in Callable as backwards + # compatible syntax for **Foo, if Foo is a TypedDict. + arg_kinds[-1] = ARG_STAR2 + arg_types[-1] = p_at + unpacked_kwargs = True + arg_types = self.check_unpacks_in_list(arg_types) + + if not param_spec_invalid and param_spec_with_args != param_spec_with_kwargs: + # If already invalid, do not report more errors - definition has + # to be fixed anyway + name = param_spec_with_args or param_spec_with_kwargs + self.fail( + f'ParamSpec must have "*args" typed as "{name}.args" and "**kwargs" typed as "{name}.kwargs"', + t, + code=codes.VALID_TYPE, + ) + param_spec_invalid = True + + if param_spec_invalid: + if ARG_STAR in arg_kinds: + arg_types[arg_kinds.index(ARG_STAR)] = AnyType(TypeOfAny.from_error) + if ARG_STAR2 in arg_kinds: + arg_types[arg_kinds.index(ARG_STAR2)] = AnyType(TypeOfAny.from_error) + + # If there were multiple (invalid) unpacks, the arg types list will become shorter, + # we need to trim the kinds/names as well to avoid crashes. + arg_kinds = t.arg_kinds[: len(arg_types)] + arg_names = t.arg_names[: len(arg_types)] + + ret = t.copy_modified( + arg_types=arg_types, + arg_kinds=arg_kinds, + arg_names=arg_names, + ret_type=self.anal_type(t.ret_type, nested=nested), + # If the fallback isn't filled in yet, + # its type will be the falsey FakeInfo + fallback=(t.fallback if t.fallback.type else self.named_type("builtins.function")), + variables=self.anal_var_defs(variables), + type_guard=type_guard, + type_is=type_is, + unpack_kwargs=unpacked_kwargs, + ) + return ret + + def anal_type_guard(self, t: Type) -> Type | None: + if isinstance(t, UnboundType): + sym = self.lookup_qualified(t.name, t) + if sym is not None and sym.node is not None: + return self.anal_type_guard_arg(t, sym.node.fullname) + # TODO: What if it's an Instance? Then use t.type.fullname? + return None + + def anal_type_guard_arg(self, t: UnboundType, fullname: str) -> Type | None: + if fullname in ("typing_extensions.TypeGuard", "typing.TypeGuard"): + if len(t.args) != 1: + self.fail( + "TypeGuard must have exactly one type argument", t, code=codes.VALID_TYPE + ) + return AnyType(TypeOfAny.from_error) + return self.anal_type(t.args[0]) + return None + + def anal_type_is(self, t: Type) -> Type | None: + if isinstance(t, UnboundType): + sym = self.lookup_qualified(t.name, t) + if sym is not None and sym.node is not None: + return self.anal_type_is_arg(t, sym.node.fullname) + # TODO: What if it's an Instance? Then use t.type.fullname? + return None + + def anal_type_is_arg(self, t: UnboundType, fullname: str) -> Type | None: + if fullname in ("typing_extensions.TypeIs", "typing.TypeIs"): + if len(t.args) != 1: + self.fail("TypeIs must have exactly one type argument", t, code=codes.VALID_TYPE) + return AnyType(TypeOfAny.from_error) + return self.anal_type(t.args[0]) + return None + + def anal_star_arg_type(self, t: Type, kind: ArgKind, nested: bool) -> tuple[str | None, Type]: + """Analyze signature argument type for *args and **kwargs argument.""" + if isinstance(t, UnboundType) and t.name and "." in t.name and not t.args: + components = t.name.split(".") + tvar_name = ".".join(components[:-1]) + sym = self.lookup_qualified(tvar_name, t) + if sym is not None and isinstance(sym.node, ParamSpecExpr): + tvar_def = self.tvar_scope.get_binding(sym) + if isinstance(tvar_def, ParamSpecType): + if kind == ARG_STAR: + make_paramspec = paramspec_args + if components[-1] != "args": + self.fail( + f'Use "{tvar_name}.args" for variadic "*" parameter', + t, + code=codes.VALID_TYPE, + ) + elif kind == ARG_STAR2: + make_paramspec = paramspec_kwargs + if components[-1] != "kwargs": + self.fail( + f'Use "{tvar_name}.kwargs" for variadic "**" parameter', + t, + code=codes.VALID_TYPE, + ) + else: + assert False, kind + return tvar_name, make_paramspec( + tvar_def.name, + tvar_def.fullname, + tvar_def.id, + named_type_func=self.named_type, + line=t.line, + column=t.column, + ) + return None, self.anal_type(t, nested=nested, allow_unpack=True) + + def visit_overloaded(self, t: Overloaded) -> Type: + # Overloaded types are manually constructed in semanal.py by analyzing the + # AST and combining together the Callable types this visitor converts. + # + # So if we're ever asked to reanalyze an Overloaded type, we know it's + # fine to just return it as-is. + return t + + def visit_tuple_type(self, t: TupleType) -> Type: + # Types such as (t1, t2, ...) only allowed in assignment statements. They'll + # generate errors elsewhere, and Tuple[t1, t2, ...] must be used instead. + if t.implicit and not self.allow_tuple_literal: + self.fail("Syntax error in type annotation", t, code=codes.SYNTAX) + if len(t.items) == 0: + self.note( + "Suggestion: Use Tuple[()] instead of () for an empty tuple, or " + "None for a function without a return value", + t, + code=codes.SYNTAX, + ) + elif len(t.items) == 1: + self.note("Suggestion: Is there a spurious trailing comma?", t, code=codes.SYNTAX) + else: + self.note( + "Suggestion: Use Tuple[T1, ..., Tn] instead of (T1, ..., Tn)", + t, + code=codes.SYNTAX, + ) + return AnyType(TypeOfAny.from_error) + + any_type = AnyType(TypeOfAny.special_form) + # If the fallback isn't filled in yet, its type will be the falsey FakeInfo + fallback = ( + t.partial_fallback + if t.partial_fallback.type + else self.named_type("builtins.tuple", [any_type]) + ) + return TupleType(self.anal_array(t.items, allow_unpack=True), fallback, t.line) + + def visit_typeddict_type(self, t: TypedDictType) -> Type: + req_keys = set() + readonly_keys = set() + items = {} + for item_name, item_type in t.items.items(): + # TODO: rework + analyzed = self.anal_type(item_type, allow_typed_dict_special_forms=True) + if isinstance(analyzed, RequiredType): + if analyzed.required: + req_keys.add(item_name) + analyzed = analyzed.item + else: + # Keys are required by default. + req_keys.add(item_name) + if isinstance(analyzed, ReadOnlyType): + readonly_keys.add(item_name) + analyzed = analyzed.item + items[item_name] = analyzed + if t.fallback.type is MISSING_FALLBACK: # anonymous/inline TypedDict + if INLINE_TYPEDDICT not in self.options.enable_incomplete_feature: + self.fail( + "Inline TypedDict is experimental," + " must be enabled with --enable-incomplete-feature=InlineTypedDict", + t, + ) + required_keys = req_keys + fallback = self.named_type("typing._TypedDict") + for typ in t.extra_items_from: + analyzed = self.analyze_type(typ) + p_analyzed = get_proper_type(analyzed) + if not isinstance(p_analyzed, TypedDictType): + if not isinstance(p_analyzed, (AnyType, PlaceholderType)): + self.fail("Can only merge-in other TypedDict", t, code=codes.VALID_TYPE) + continue + for sub_item_name, sub_item_type in p_analyzed.items.items(): + if sub_item_name in items: + self.fail(TYPEDDICT_OVERRIDE_MERGE.format(sub_item_name), t) + continue + items[sub_item_name] = sub_item_type + if sub_item_name in p_analyzed.required_keys: + req_keys.add(sub_item_name) + if sub_item_name in p_analyzed.readonly_keys: + readonly_keys.add(sub_item_name) + else: + required_keys = t.required_keys + fallback = t.fallback + return TypedDictType(items, required_keys, readonly_keys, fallback, t.line, t.column) + + def visit_raw_expression_type(self, t: RawExpressionType) -> Type: + # We should never see a bare Literal. We synthesize these raw literals + # in the earlier stages of semantic analysis, but those + # "fake literals" should always be wrapped in an UnboundType + # corresponding to 'Literal'. + # + # Note: if at some point in the distant future, we decide to + # make signatures like "foo(x: 20) -> None" legal, we can change + # this method so it generates and returns an actual LiteralType + # instead. + + if self.report_invalid_types: + if t.base_type_name in ("builtins.int", "builtins.bool"): + # The only time it makes sense to use an int or bool is inside of + # a literal type. + msg = f"Invalid type: try using Literal[{repr(t.literal_value)}] instead?" + elif t.base_type_name in ("builtins.float", "builtins.complex"): + # We special-case warnings for floats and complex numbers. + msg = f"Invalid type: {t.simple_name()} literals cannot be used as a type" + else: + # And in all other cases, we default to a generic error message. + # Note: the reason why we use a generic error message for strings + # but not ints or bools is because whenever we see an out-of-place + # string, it's unclear if the user meant to construct a literal type + # or just misspelled a regular type. So we avoid guessing. + msg = "Invalid type comment or annotation" + + self.fail(msg, t, code=codes.VALID_TYPE) + if t.note is not None: + self.note(t.note, t, code=codes.VALID_TYPE) + + return AnyType(TypeOfAny.from_error, line=t.line, column=t.column) + + def visit_literal_type(self, t: LiteralType) -> Type: + return t + + def visit_union_type(self, t: UnionType) -> Type: + if ( + t.uses_pep604_syntax is True + and t.is_evaluated is True + and not self.always_allow_new_syntax + and not self.options.python_version >= (3, 10) + ): + self.fail("X | Y syntax for unions requires Python 3.10", t, code=codes.SYNTAX) + return UnionType(self.anal_array(t.items), t.line, uses_pep604_syntax=t.uses_pep604_syntax) + + def visit_partial_type(self, t: PartialType) -> Type: + assert False, "Internal error: Unexpected partial type" + + def visit_ellipsis_type(self, t: EllipsisType) -> Type: + if self.allow_ellipsis or self.allow_param_spec_literals: + any_type = AnyType(TypeOfAny.explicit) + return Parameters( + [any_type, any_type], [ARG_STAR, ARG_STAR2], [None, None], is_ellipsis_args=True + ) + else: + self.fail('Unexpected "..."', t) + return AnyType(TypeOfAny.from_error) + + def visit_type_type(self, t: TypeType) -> Type: + return TypeType.make_normalized( + self.anal_type(t.item), line=t.line, is_type_form=t.is_type_form + ) + + def visit_placeholder_type(self, t: PlaceholderType) -> Type: + n = ( + None + # No dot in fullname indicates we are at function scope, and recursive + # types are not supported there anyway, so we just give up. + if not t.fullname or "." not in t.fullname + else self.api.lookup_fully_qualified(t.fullname) + ) + if not n or isinstance(n.node, PlaceholderNode): + self.api.defer() # Still incomplete + return t + else: + # TODO: Handle non-TypeInfo + assert isinstance(n.node, TypeInfo) + return self.analyze_type_with_type_info(n.node, t.args, t, False) + + def analyze_callable_args_for_paramspec( + self, callable_args: Type, ret_type: Type, fallback: Instance + ) -> CallableType | None: + """Construct a 'Callable[P, RET]', where P is ParamSpec, return None if we cannot.""" + if not isinstance(callable_args, UnboundType): + return None + sym = self.lookup_qualified(callable_args.name, callable_args) + if sym is None: + return None + tvar_def = self.tvar_scope.get_binding(sym) + if not isinstance(tvar_def, ParamSpecType): + if ( + tvar_def is None + and self.allow_unbound_tvars + and isinstance(sym.node, ParamSpecExpr) + ): + # We are analyzing this type in runtime context (e.g. as type application). + # If it is not valid as a type in this position an error will be given later. + return callable_with_ellipsis( + AnyType(TypeOfAny.explicit), ret_type=ret_type, fallback=fallback + ) + return None + elif ( + self.defining_alias + and self.not_declared_in_type_params(tvar_def.name) + and tvar_def not in self.allowed_alias_tvars + ): + if self.python_3_12_type_alias: + msg = message_registry.TYPE_PARAMETERS_SHOULD_BE_DECLARED.format( + f'"{tvar_def.name}"' + ) + else: + msg = f'ParamSpec "{tvar_def.name}" is not included in type_params' + self.fail(msg, callable_args, code=codes.VALID_TYPE) + return callable_with_ellipsis( + AnyType(TypeOfAny.special_form), ret_type=ret_type, fallback=fallback + ) + + return CallableType( + [ + paramspec_args( + tvar_def.name, tvar_def.fullname, tvar_def.id, named_type_func=self.named_type + ), + paramspec_kwargs( + tvar_def.name, tvar_def.fullname, tvar_def.id, named_type_func=self.named_type + ), + ], + [nodes.ARG_STAR, nodes.ARG_STAR2], + [None, None], + ret_type=ret_type, + fallback=fallback, + ) + + def analyze_callable_args_for_concatenate( + self, callable_args: Type, ret_type: Type, fallback: Instance + ) -> CallableType | AnyType | None: + """Construct a 'Callable[C, RET]', where C is Concatenate[..., P], returning None if we + cannot. + """ + if not isinstance(callable_args, UnboundType): + return None + sym = self.lookup_qualified(callable_args.name, callable_args) + if sym is None: + return None + if sym.node is None: + return None + if sym.node.fullname not in CONCATENATE_TYPE_NAMES: + return None + + tvar_def = self.anal_type(callable_args, allow_param_spec=True) + if not isinstance(tvar_def, (ParamSpecType, Parameters)): + if self.allow_unbound_tvars and isinstance(tvar_def, UnboundType): + sym = self.lookup_qualified(tvar_def.name, callable_args) + if sym is not None and isinstance(sym.node, ParamSpecExpr): + # We are analyzing this type in runtime context (e.g. as type application). + # If it is not valid as a type in this position an error will be given later. + return callable_with_ellipsis( + AnyType(TypeOfAny.explicit), ret_type=ret_type, fallback=fallback + ) + # Error was already given, so prevent further errors. + return AnyType(TypeOfAny.from_error) + if isinstance(tvar_def, Parameters): + # This comes from Concatenate[int, ...] + return CallableType( + arg_types=tvar_def.arg_types, + arg_names=tvar_def.arg_names, + arg_kinds=tvar_def.arg_kinds, + ret_type=ret_type, + fallback=fallback, + from_concatenate=True, + ) + + # ick, CallableType should take ParamSpecType + prefix = tvar_def.prefix + # we don't set the prefix here as generic arguments will get updated at some point + # in the future. CallableType.param_spec() accounts for this. + return CallableType( + [ + *prefix.arg_types, + paramspec_args( + tvar_def.name, tvar_def.fullname, tvar_def.id, named_type_func=self.named_type + ), + paramspec_kwargs( + tvar_def.name, tvar_def.fullname, tvar_def.id, named_type_func=self.named_type + ), + ], + [*prefix.arg_kinds, nodes.ARG_STAR, nodes.ARG_STAR2], + [*prefix.arg_names, None, None], + ret_type=ret_type, + fallback=fallback, + from_concatenate=True, + ) + + def analyze_callable_type(self, t: UnboundType) -> Type: + fallback = self.named_type("builtins.function") + if len(t.args) == 0: + # Callable (bare). Treat as Callable[..., Any]. + any_type = self.get_omitted_any(t) + ret = callable_with_ellipsis(any_type, any_type, fallback) + elif len(t.args) == 2: + callable_args = t.args[0] + ret_type = t.args[1] + if isinstance(callable_args, TypeList): + # Callable[[ARG, ...], RET] (ordinary callable type) + analyzed_args = self.analyze_callable_args(callable_args) + if analyzed_args is None: + return AnyType(TypeOfAny.from_error) + args, kinds, names = analyzed_args + ret = CallableType(args, kinds, names, ret_type=ret_type, fallback=fallback) + elif isinstance(callable_args, EllipsisType): + # Callable[..., RET] (with literal ellipsis; accept arbitrary arguments) + ret = callable_with_ellipsis( + AnyType(TypeOfAny.explicit), ret_type=ret_type, fallback=fallback + ) + else: + # Callable[P, RET] (where P is ParamSpec) + with self.tvar_scope_frame(namespace=""): + # Temporarily bind ParamSpecs to allow code like this: + # my_fun: Callable[Q, Foo[Q]] + # We usually do this later in visit_callable_type(), but the analysis + # below happens at very early stage. + variables = [] + for name, tvar_expr in self.find_type_var_likes(callable_args): + variables.append(self.tvar_scope.bind_new(name, tvar_expr)) + maybe_ret = self.analyze_callable_args_for_paramspec( + callable_args, ret_type, fallback + ) or self.analyze_callable_args_for_concatenate( + callable_args, ret_type, fallback + ) + if isinstance(maybe_ret, CallableType): + maybe_ret = maybe_ret.copy_modified(variables=variables) + if maybe_ret is None: + # Callable[?, RET] (where ? is something invalid) + self.fail( + "The first argument to Callable must be a " + 'list of types, parameter specification, or "..."', + t, + code=codes.VALID_TYPE, + ) + self.note( + "See https://mypy.readthedocs.io/en/stable/kinds_of_types.html#callable-types-and-lambdas", + t, + ) + return AnyType(TypeOfAny.from_error) + elif isinstance(maybe_ret, AnyType): + return maybe_ret + ret = maybe_ret + else: + if self.options.disallow_any_generics: + self.fail('Please use "Callable[[], ]"', t) + else: + self.fail('Please use "Callable[[], ]" or "Callable"', t) + return AnyType(TypeOfAny.from_error) + assert isinstance(ret, CallableType) + return ret.accept(self) + + def refers_to_full_names(self, arg: UnboundType, names: Sequence[str]) -> bool: + sym = self.lookup_qualified(arg.name, arg) + if sym is not None: + if sym.fullname in names: + return True + return False + + def analyze_callable_args( + self, arglist: TypeList + ) -> tuple[list[Type], list[ArgKind], list[str | None]] | None: + args: list[Type] = [] + kinds: list[ArgKind] = [] + names: list[str | None] = [] + seen_unpack = False + unpack_types: list[Type] = [] + invalid_unpacks: list[Type] = [] + second_unpack_last = False + for i, arg in enumerate(arglist.items): + if isinstance(arg, CallableArgument): + args.append(arg.typ) + names.append(arg.name) + if arg.constructor is None: + return None + found = self.lookup_qualified(arg.constructor, arg) + if found is None: + # Looking it up already put an error message in + return None + elif found.fullname not in ARG_KINDS_BY_CONSTRUCTOR: + self.fail(f'Invalid argument constructor "{found.fullname}"', arg) + return None + else: + assert found.fullname is not None + kind = ARG_KINDS_BY_CONSTRUCTOR[found.fullname] + kinds.append(kind) + if arg.name is not None and kind.is_star(): + self.fail(f"{arg.constructor} arguments should not have names", arg) + return None + elif ( + isinstance(arg, UnboundType) + and self.refers_to_full_names(arg, UNPACK_TYPE_NAMES) + or isinstance(arg, UnpackType) + ): + if seen_unpack: + # Multiple unpacks, preserve them, so we can give an error later. + if i == len(arglist.items) - 1 and not invalid_unpacks: + # Special case: if there are just two unpacks, and the second one appears + # as last type argument, it can be still valid, if the second unpacked type + # is a TypedDict. This should be checked by the caller. + second_unpack_last = True + invalid_unpacks.append(arg) + continue + seen_unpack = True + unpack_types.append(arg) + else: + if seen_unpack: + unpack_types.append(arg) + else: + args.append(arg) + kinds.append(ARG_POS) + names.append(None) + if seen_unpack: + if len(unpack_types) == 1: + args.append(unpack_types[0]) + else: + first = unpack_types[0] + if isinstance(first, UnpackType): + # UnpackType doesn't have its own line/column numbers, + # so use the unpacked type for error messages. + first = first.type + args.append( + UnpackType(self.tuple_type(unpack_types, line=first.line, column=first.column)) + ) + kinds.append(ARG_STAR) + names.append(None) + for arg in invalid_unpacks: + args.append(arg) + kinds.append(ARG_STAR2 if second_unpack_last else ARG_STAR) + names.append(None) + # Note that arglist below is only used for error context. + check_arg_names(names, [arglist] * len(args), self.fail, "Callable") + check_arg_kinds(kinds, [arglist] * len(args), self.fail) + return args, kinds, names + + def analyze_literal_type(self, t: UnboundType) -> Type: + if len(t.args) == 0: + self.fail("Literal[...] must have at least one parameter", t, code=codes.VALID_TYPE) + return AnyType(TypeOfAny.from_error) + + output: list[Type] = [] + for i, arg in enumerate(t.args): + analyzed_types = self.analyze_literal_param(i + 1, arg, t) + if analyzed_types is None: + return AnyType(TypeOfAny.from_error) + else: + output.extend(analyzed_types) + return UnionType.make_union(output, line=t.line) + + def analyze_literal_param(self, idx: int, arg: Type, ctx: Context) -> list[Type] | None: + # This UnboundType was originally defined as a string. + if ( + isinstance(arg, ProperType) + and isinstance(arg, (UnboundType, UnionType)) + and arg.original_str_expr is not None + ): + assert arg.original_str_fallback is not None + return [ + LiteralType( + value=arg.original_str_expr, + fallback=self.named_type(arg.original_str_fallback), + line=arg.line, + column=arg.column, + ) + ] + + # If arg is an UnboundType that was *not* originally defined as + # a string, try expanding it in case it's a type alias or something. + if isinstance(arg, UnboundType): + self.nesting_level += 1 + try: + arg = self.visit_unbound_type(arg, defining_literal=True) + finally: + self.nesting_level -= 1 + + # Literal[...] cannot contain Any. Give up and add an error message + # (if we haven't already). + arg = get_proper_type(arg) + if isinstance(arg, AnyType): + # Note: We can encounter Literals containing 'Any' under three circumstances: + # + # 1. If the user attempts use an explicit Any as a parameter + # 2. If the user is trying to use an enum value imported from a module with + # no type hints, giving it an implicit type of 'Any' + # 3. If there's some other underlying problem with the parameter. + # + # We report an error in only the first two cases. In the third case, we assume + # some other region of the code has already reported a more relevant error. + # + # TODO: Once we start adding support for enums, make sure we report a custom + # error for case 2 as well. + if arg.type_of_any not in (TypeOfAny.from_error, TypeOfAny.special_form): + self.fail( + f'Parameter {idx} of Literal[...] cannot be of type "Any"', + ctx, + code=codes.VALID_TYPE, + ) + return None + elif isinstance(arg, RawExpressionType): + # A raw literal. Convert it directly into a literal if we can. + if arg.literal_value is None: + name = arg.simple_name() + if name in ("float", "complex"): + msg = f'Parameter {idx} of Literal[...] cannot be of type "{name}"' + else: + msg = "Invalid type: Literal[...] cannot contain arbitrary expressions" + self.fail(msg, ctx, code=codes.VALID_TYPE) + # Note: we deliberately ignore arg.note here: the extra info might normally be + # helpful, but it generally won't make sense in the context of a Literal[...]. + return None + + # Remap bytes and unicode into the appropriate type for the correct Python version + fallback = self.named_type(arg.base_type_name) + assert isinstance(fallback, Instance) + return [LiteralType(arg.literal_value, fallback, line=arg.line, column=arg.column)] + elif isinstance(arg, (NoneType, LiteralType)): + # Types that we can just add directly to the literal/potential union of literals. + return [arg] + elif isinstance(arg, Instance) and arg.last_known_value is not None: + # Types generated from declarations like "var: Final = 4". + return [arg.last_known_value] + elif isinstance(arg, UnionType): + out = [] + for union_arg in arg.items: + union_result = self.analyze_literal_param(idx, union_arg, ctx) + if union_result is None: + return None + out.extend(union_result) + return out + else: + self.fail(f"Parameter {idx} of Literal[...] is invalid", ctx, code=codes.VALID_TYPE) + return None + + def analyze_type(self, typ: Type) -> Type: + return typ.accept(self) + + def fail(self, msg: str, ctx: Context, *, code: ErrorCode | None = None) -> None: + self.fail_func(msg, ctx, code=code) + + def note(self, msg: str, ctx: Context, *, code: ErrorCode | None = None) -> None: + self.note_func(msg, ctx, code=code) + + @contextmanager + def tvar_scope_frame(self, namespace: str) -> Iterator[None]: + old_scope = self.tvar_scope + self.tvar_scope = self.tvar_scope.method_frame(namespace) + yield + self.tvar_scope = old_scope + + def find_type_var_likes(self, t: Type) -> TypeVarLikeList: + visitor = FindTypeVarVisitor(self.api, self.tvar_scope) + t.accept(visitor) + return visitor.type_var_likes + + def infer_type_variables( + self, type: CallableType + ) -> tuple[list[tuple[str, TypeVarLikeExpr]], bool]: + """Infer type variables from a callable. + + Return tuple with these items: + - list of unique type variables referred to in a callable + - whether there is a reference to the Self type + """ + visitor = FindTypeVarVisitor(self.api, self.tvar_scope) + for arg in type.arg_types: + arg.accept(visitor) + + # When finding type variables in the return type of a function, don't + # look inside Callable types. Type variables only appearing in + # functions in the return type belong to those functions, not the + # function we're currently analyzing. + visitor.include_callables = False + type.ret_type.accept(visitor) + + return visitor.type_var_likes, visitor.has_self_type + + def bind_function_type_variables( + self, fun_type: CallableType, defn: Context + ) -> tuple[tuple[TypeVarLikeType, ...], bool]: + """Find the type variables of the function type and bind them in our tvar_scope""" + has_self_type = False + if fun_type.variables: + defs = [] + for var in fun_type.variables: + if self.api.type and self.api.type.self_type and var == self.api.type.self_type: + has_self_type = True + continue + var_node = self.lookup_qualified(var.name, defn) + assert var_node, "Binding for function type variable not found within function" + var_expr = var_node.node + assert isinstance(var_expr, TypeVarLikeExpr) + binding = self.tvar_scope.bind_new(var.name, var_expr) + defs.append(binding) + return tuple(defs), has_self_type + typevars, has_self_type = self.infer_type_variables(fun_type) + # Do not define a new type variable if already defined in scope. + typevars = [ + (name, tvar) for name, tvar in typevars if not self.is_defined_type_var(name, defn) + ] + defs = [] + for name, tvar in typevars: + if not self.tvar_scope.allow_binding(tvar.fullname): + err_msg = message_registry.TYPE_VAR_REDECLARED_IN_NESTED_CLASS.format(name) + self.fail(err_msg.value, defn, code=err_msg.code) + binding = self.tvar_scope.bind_new(name, tvar) + defs.append(binding) + + return tuple(defs), has_self_type + + def is_defined_type_var(self, tvar: str, context: Context) -> bool: + tvar_node = self.lookup_qualified(tvar, context) + if not tvar_node: + return False + return self.tvar_scope.get_binding(tvar_node) is not None + + def anal_array( + self, + a: Iterable[Type], + nested: bool = True, + *, + allow_param_spec: bool = False, + allow_param_spec_literals: bool = False, + allow_unpack: bool = False, + ) -> list[Type]: + old_allow_param_spec_literals = self.allow_param_spec_literals + self.allow_param_spec_literals = allow_param_spec_literals + res: list[Type] = [] + for t in a: + res.append( + self.anal_type( + t, nested, allow_param_spec=allow_param_spec, allow_unpack=allow_unpack + ) + ) + self.allow_param_spec_literals = old_allow_param_spec_literals + return self.check_unpacks_in_list(res) + + def anal_type( + self, + t: Type, + nested: bool = True, + *, + allow_param_spec: bool = False, + allow_unpack: bool = False, + allow_ellipsis: bool = False, + allow_typed_dict_special_forms: bool = False, + allow_final: bool = False, + ) -> Type: + if nested: + self.nesting_level += 1 + old_allow_typed_dict_special_forms = self.allow_typed_dict_special_forms + self.allow_typed_dict_special_forms = allow_typed_dict_special_forms + self.allow_final = allow_final + old_allow_ellipsis = self.allow_ellipsis + self.allow_ellipsis = allow_ellipsis + old_allow_unpack = self.allow_unpack + self.allow_unpack = allow_unpack + try: + analyzed = t.accept(self) + finally: + if nested: + self.nesting_level -= 1 + self.allow_typed_dict_special_forms = old_allow_typed_dict_special_forms + self.allow_ellipsis = old_allow_ellipsis + self.allow_unpack = old_allow_unpack + if ( + not allow_param_spec + and isinstance(analyzed, ParamSpecType) + and analyzed.flavor == ParamSpecFlavor.BARE + ): + if analyzed.prefix.arg_types: + self.fail("Invalid location for Concatenate", t, code=codes.VALID_TYPE) + self.note("You can use Concatenate as the first argument to Callable", t) + analyzed = AnyType(TypeOfAny.from_error) + else: + self.fail( + INVALID_PARAM_SPEC_LOCATION.format(format_type(analyzed, self.options)), + t, + code=codes.VALID_TYPE, + ) + self.note( + INVALID_PARAM_SPEC_LOCATION_NOTE.format(analyzed.name), + t, + code=codes.VALID_TYPE, + ) + analyzed = AnyType(TypeOfAny.from_error) + return analyzed + + def anal_var_def(self, var_def: TypeVarLikeType) -> TypeVarLikeType: + if isinstance(var_def, TypeVarType): + return TypeVarType( + name=var_def.name, + fullname=var_def.fullname, + id=var_def.id, + values=self.anal_array(var_def.values), + upper_bound=var_def.upper_bound.accept(self), + default=var_def.default.accept(self), + variance=var_def.variance, + line=var_def.line, + column=var_def.column, + ) + else: + return var_def + + def anal_var_defs(self, var_defs: Sequence[TypeVarLikeType]) -> list[TypeVarLikeType]: + return [self.anal_var_def(vd) for vd in var_defs] + + def named_type( + self, fullname: str, args: list[Type] | None = None, line: int = -1, column: int = -1 + ) -> Instance: + node = self.lookup_fully_qualified(fullname) + assert isinstance(node.node, TypeInfo) + any_type = AnyType(TypeOfAny.special_form) + if args is not None: + args = self.check_unpacks_in_list(args) + return Instance( + node.node, args or [any_type] * len(node.node.defn.type_vars), line=line, column=column + ) + + def check_unpacks_in_list(self, items: list[Type]) -> list[Type]: + new_items: list[Type] = [] + num_unpacks = 0 + final_unpack = None + for item in items: + # TODO: handle forward references here, they appear as Unpack[Any]. + if isinstance(item, UnpackType) and not isinstance( + get_proper_type(item.type), TupleType + ): + if not num_unpacks: + new_items.append(item) + num_unpacks += 1 + final_unpack = item + else: + new_items.append(item) + + if num_unpacks > 1: + assert final_unpack is not None + self.fail("More than one variadic Unpack in a type is not allowed", final_unpack.type) + return new_items + + def tuple_type(self, items: list[Type], line: int, column: int) -> TupleType: + any_type = AnyType(TypeOfAny.special_form) + return TupleType( + items, fallback=self.named_type("builtins.tuple", [any_type]), line=line, column=column + ) + + +TypeVarLikeList = list[tuple[str, TypeVarLikeExpr]] + + +class MsgCallback(Protocol): + def __call__( + self, __msg: str, __ctx: Context, *, code: ErrorCode | None = None + ) -> ErrorInfo | None: ... + + +def get_omitted_any( + disallow_any: bool, + fail: MsgCallback, + note: MsgCallback, + orig_type: Type, + options: Options, + fullname: str | None = None, + unexpanded_type: Type | None = None, +) -> AnyType: + if disallow_any: + typ = unexpanded_type or orig_type + type_str = typ.name if isinstance(typ, UnboundType) else format_type_bare(typ, options) + + fail( + message_registry.BARE_GENERIC.format(quote_type_string(type_str)), + typ, + code=codes.TYPE_ARG, + ) + + any_type = AnyType(TypeOfAny.from_error, line=typ.line, column=typ.column) + else: + any_type = AnyType( + TypeOfAny.from_omitted_generics, line=orig_type.line, column=orig_type.column + ) + return any_type + + +def fix_type_var_tuple_argument(t: Instance) -> None: + if t.type.has_type_var_tuple_type: + args = list(t.args) + assert t.type.type_var_tuple_prefix is not None + tvt = t.type.defn.type_vars[t.type.type_var_tuple_prefix] + assert isinstance(tvt, TypeVarTupleType) + args[t.type.type_var_tuple_prefix] = UnpackType( + Instance(tvt.tuple_fallback.type, [args[t.type.type_var_tuple_prefix]]) + ) + t.args = tuple(args) + + +def fix_instance( + t: Instance, + fail: MsgCallback, + note: MsgCallback, + disallow_any: bool, + options: Options, + use_generic_error: bool = False, + unexpanded_type: Type | None = None, +) -> None: + """Fix a malformed instance by replacing all type arguments with TypeVar default or Any. + + Also emit a suitable error if this is not due to implicit Any's. + """ + arg_count = len(t.args) + min_tv_count = sum(not tv.has_default() for tv in t.type.defn.type_vars) + max_tv_count = len(t.type.type_vars) + if arg_count < min_tv_count or arg_count > max_tv_count: + # Don't use existing args if arg_count doesn't match + if arg_count > max_tv_count: + # Already wrong arg count error, don't emit missing type parameters error as well. + disallow_any = False + t.args = () + arg_count = 0 + + args: list[Type] = [*(t.args[:max_tv_count])] + any_type: AnyType | None = None + env: dict[TypeVarId, Type] = {} + + for tv, arg in itertools.zip_longest(t.type.defn.type_vars, t.args, fillvalue=None): + if tv is None: + continue + if arg is None: + if tv.has_default(): + arg = tv.default + else: + if any_type is None: + fullname = None if use_generic_error else t.type.fullname + any_type = get_omitted_any( + disallow_any, fail, note, t, options, fullname, unexpanded_type + ) + arg = any_type + args.append(arg) + env[tv.id] = arg + t.args = tuple(args) + fix_type_var_tuple_argument(t) + if not t.type.has_type_var_tuple_type: + with state.strict_optional_set(options.strict_optional): + fixed = expand_type(t, env) + assert isinstance(fixed, Instance) + t.args = fixed.args + + +def instantiate_type_alias( + node: TypeAlias, + args: list[Type], + fail: MsgCallback, + no_args: bool, + ctx: Context, + options: Options, + *, + unexpanded_type: Type | None = None, + disallow_any: bool = False, + use_standard_error: bool = False, + empty_tuple_index: bool = False, +) -> Type: + """Create an instance of a (generic) type alias from alias node and type arguments. + + We are following the rules outlined in TypeAlias docstring. + Here: + node: type alias node (definition) + args: type arguments (types to be substituted in place of type variables + when expanding the alias) + fail: error reporter callback + no_args: whether original definition used a bare generic `A = List` + ctx: context where expansion happens + unexpanded_type, disallow_any, use_standard_error: used to customize error messages + """ + # Type aliases are special, since they can be expanded during semantic analysis, + # so we need to normalize them as soon as possible. + # TODO: can this cause an infinite recursion? + args = flatten_nested_tuples(args) + if any(unknown_unpack(a) for a in args): + # This type is not ready to be validated, because of unknown total count. + # Note that we keep the kind of Any for consistency. + return set_any_tvars(node, [], ctx.line, ctx.column, options, special_form=True) + + max_tv_count = len(node.alias_tvars) + act_len = len(args) + if ( + max_tv_count > 0 + and act_len == 0 + and not (empty_tuple_index and node.tvar_tuple_index is not None) + ): + # Interpret bare Alias same as normal generic, i.e., Alias[Any, Any, ...] + return set_any_tvars( + node, + args, + ctx.line, + ctx.column, + options, + disallow_any=disallow_any, + fail=fail, + unexpanded_type=unexpanded_type, + ) + if max_tv_count == 0 and act_len == 0: + if no_args: + assert isinstance(node.target, Instance) # type: ignore[misc] + # Note: this is the only case where we use an eager expansion. See more info about + # no_args aliases like L = List in the docstring for TypeAlias class. + return Instance(node.target.type, [], line=ctx.line, column=ctx.column) + return TypeAliasType(node, [], line=ctx.line, column=ctx.column) + if ( + max_tv_count == 0 + and act_len > 0 + and isinstance(node.target, Instance) # type: ignore[misc] + and no_args + ): + tp = Instance(node.target.type, args) + tp.line = ctx.line + tp.column = ctx.column + tp.end_line = ctx.end_line + tp.end_column = ctx.end_column + return tp + if node.tvar_tuple_index is None: + if any(isinstance(a, UnpackType) for a in args): + # A variadic unpack in fixed size alias (fixed unpacks must be flattened by the caller) + fail(message_registry.INVALID_UNPACK_POSITION, ctx, code=codes.VALID_TYPE) + return set_any_tvars(node, [], ctx.line, ctx.column, options, from_error=True) + min_tv_count = sum(not tv.has_default() for tv in node.alias_tvars) + fill_typevars = act_len != max_tv_count + correct = min_tv_count <= act_len <= max_tv_count + else: + min_tv_count = sum( + not tv.has_default() and not isinstance(tv, TypeVarTupleType) + for tv in node.alias_tvars + ) + correct = act_len >= min_tv_count + for a in args: + if isinstance(a, UnpackType): + unpacked = get_proper_type(a.type) + if isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple": + # Variadic tuple is always correct. + correct = True + fill_typevars = not correct + if fill_typevars: + if not correct: + if use_standard_error: + # This is used if type alias is an internal representation of another type, + # for example a generic TypedDict or NamedTuple. + msg = wrong_type_arg_count(max_tv_count, max_tv_count, str(act_len), node.name) + else: + if node.tvar_tuple_index is not None: + msg = ( + "Bad number of arguments for type alias," + f" expected at least {min_tv_count}, given {act_len}" + ) + elif min_tv_count != max_tv_count: + msg = ( + "Bad number of arguments for type alias," + f" expected between {min_tv_count} and {max_tv_count}, given {act_len}" + ) + else: + msg = ( + "Bad number of arguments for type alias," + f" expected {min_tv_count}, given {act_len}" + ) + fail(msg, ctx, code=codes.TYPE_ARG) + args = [] + return set_any_tvars(node, args, ctx.line, ctx.column, options, from_error=True) + elif node.tvar_tuple_index is not None: + # We also need to check if we are not performing a type variable tuple split. + unpack = find_unpack_in_list(args) + if unpack is not None: + unpack_arg = args[unpack] + assert isinstance(unpack_arg, UnpackType) + if isinstance(unpack_arg.type, TypeVarTupleType): + exp_prefix = node.tvar_tuple_index + act_prefix = unpack + exp_suffix = len(node.alias_tvars) - node.tvar_tuple_index - 1 + act_suffix = len(args) - unpack - 1 + if act_prefix < exp_prefix or act_suffix < exp_suffix: + fail("TypeVarTuple cannot be split", ctx, code=codes.TYPE_ARG) + return set_any_tvars(node, [], ctx.line, ctx.column, options, from_error=True) + # TODO: we need to check args validity w.r.t alias.alias_tvars. + # Otherwise invalid instantiations will be allowed in runtime context. + # Note: in type context, these will be still caught by semanal_typeargs. + typ = TypeAliasType(node, args, ctx.line, ctx.column) + assert typ.alias is not None + # HACK: Implement FlexibleAlias[T, typ] by expanding it to typ here. + if ( + isinstance(typ.alias.target, Instance) # type: ignore[misc] + and typ.alias.target.type.fullname == "mypy_extensions.FlexibleAlias" + ): + exp = get_proper_type(typ) + assert isinstance(exp, Instance) + return exp.args[-1] + return typ + + +def set_any_tvars( + node: TypeAlias, + args: list[Type], + newline: int, + newcolumn: int, + options: Options, + *, + from_error: bool = False, + disallow_any: bool = False, + special_form: bool = False, + fail: MsgCallback | None = None, + unexpanded_type: Type | None = None, +) -> TypeAliasType: + if from_error or disallow_any: + type_of_any = TypeOfAny.from_error + elif special_form: + type_of_any = TypeOfAny.special_form + else: + type_of_any = TypeOfAny.from_omitted_generics + any_type = AnyType(type_of_any, line=newline, column=newcolumn) + + env: dict[TypeVarId, Type] = {} + used_any_type = False + has_type_var_tuple_type = False + for tv, arg in itertools.zip_longest(node.alias_tvars, args, fillvalue=None): + if tv is None: + continue + if arg is None: + if tv.has_default(): + arg = tv.default + else: + arg = any_type + used_any_type = True + if isinstance(tv, TypeVarTupleType): + # TODO Handle TypeVarTuple defaults + has_type_var_tuple_type = True + arg = UnpackType(Instance(tv.tuple_fallback.type, [any_type])) + args.append(arg) + env[tv.id] = arg + t = TypeAliasType(node, args, newline, newcolumn) + if not has_type_var_tuple_type: + with state.strict_optional_set(options.strict_optional): + fixed = expand_type(t, env) + assert isinstance(fixed, TypeAliasType) + t.args = fixed.args + + if used_any_type and disallow_any and node.alias_tvars: + assert fail is not None + if unexpanded_type: + type_str = ( + unexpanded_type.name + if isinstance(unexpanded_type, UnboundType) + else format_type_bare(unexpanded_type, options) + ) + else: + type_str = node.name + + fail( + message_registry.BARE_GENERIC.format(quote_type_string(type_str)), + Context(newline, newcolumn), + code=codes.TYPE_ARG, + ) + return t + + +class DivergingAliasDetector(TrivialSyntheticTypeTranslator): + """See docstring of detect_diverging_alias() for details.""" + + # TODO: this doesn't really need to be a translator, but we don't have a trivial visitor. + def __init__( + self, + seen_nodes: set[TypeAlias], + lookup: Callable[[str, Context], SymbolTableNode | None], + scope: TypeVarLikeScope, + ) -> None: + super().__init__() + self.seen_nodes = seen_nodes + self.lookup = lookup + self.scope = scope + self.diverging = False + + def visit_type_alias_type(self, t: TypeAliasType) -> Type: + assert t.alias is not None, f"Unfixed type alias {t.type_ref}" + if t.alias in self.seen_nodes: + for arg in t.args: + if not ( + isinstance(arg, TypeVarLikeType) + or isinstance(arg, UnpackType) + and isinstance(arg.type, TypeVarLikeType) + ) and has_type_vars(arg): + self.diverging = True + return t + # All clear for this expansion chain. + return t + new_nodes = self.seen_nodes | {t.alias} + visitor = DivergingAliasDetector(new_nodes, self.lookup, self.scope) + _ = get_proper_type(t).accept(visitor) + if visitor.diverging: + self.diverging = True + return t + + +def detect_diverging_alias( + node: TypeAlias, + target: Type, + lookup: Callable[[str, Context], SymbolTableNode | None], + scope: TypeVarLikeScope, +) -> bool: + """This detects type aliases that will diverge during type checking. + + For example F = Something[..., F[List[T]]]. At each expansion step this will produce + *new* type aliases: e.g. F[List[int]], F[List[List[int]]], etc. So we can't detect + recursion. It is a known problem in the literature, recursive aliases and generic types + don't always go well together. It looks like there is no known systematic solution yet. + + # TODO: should we handle such aliases using type_recursion counter and some large limit? + They may be handy in rare cases, e.g. to express a union of non-mixed nested lists: + Nested = Union[T, Nested[List[T]]] ~> Union[T, List[T], List[List[T]], ...] + """ + visitor = DivergingAliasDetector({node}, lookup, scope) + _ = target.accept(visitor) + return visitor.diverging + + +def check_for_explicit_any( + typ: Type | None, + options: Options, + is_typeshed_stub: bool, + msg: MessageBuilder, + context: Context, +) -> None: + if options.disallow_any_explicit and not is_typeshed_stub and typ and has_explicit_any(typ): + msg.explicit_any(context) + + +def has_explicit_any(t: Type) -> bool: + """ + Whether this type is or type it contains is an Any coming from explicit type annotation + """ + return t.accept(HasExplicitAny()) + + +class HasExplicitAny(BoolTypeQuery): + def __init__(self) -> None: + super().__init__(ANY_STRATEGY) + + def visit_any(self, t: AnyType) -> bool: + return t.type_of_any == TypeOfAny.explicit + + def visit_typeddict_type(self, t: TypedDictType) -> bool: + # typeddict is checked during TypedDict declaration, so don't typecheck it here. + return False + + +def has_any_from_unimported_type(t: Type) -> bool: + """Return true if this type is Any because an import was not followed. + + If type t is such Any type or has type arguments that contain such Any type + this function will return true. + """ + return t.accept(HasAnyFromUnimportedType()) + + +class HasAnyFromUnimportedType(BoolTypeQuery): + def __init__(self) -> None: + super().__init__(ANY_STRATEGY) + + def visit_any(self, t: AnyType) -> bool: + return t.type_of_any == TypeOfAny.from_unimported_type + + def visit_typeddict_type(self, t: TypedDictType) -> bool: + # typeddict is checked during TypedDict declaration, so don't typecheck it here + return False + + +def collect_all_inner_types(t: Type) -> list[Type]: + """ + Return all types that `t` contains + """ + return t.accept(CollectAllInnerTypesQuery()) + + +class CollectAllInnerTypesQuery(TypeQuery[list[Type]]): + def query_types(self, types: Iterable[Type]) -> list[Type]: + return self.strategy([t.accept(self) for t in types]) + list(types) + + def strategy(self, items: Iterable[list[Type]]) -> list[Type]: + return list(itertools.chain.from_iterable(items)) + + +def make_optional_type(t: Type) -> Type: + """Return the type corresponding to Optional[t]. + + Note that we can't use normal union simplification, since this function + is called during semantic analysis and simplification only works during + type checking. + """ + if isinstance(t, ProperType) and isinstance(t, NoneType): + return t + elif isinstance(t, ProperType) and isinstance(t, UnionType): + # Eagerly expanding aliases is not safe during semantic analysis. + items = [item for item in t.items if not isinstance(get_proper_type(item), NoneType)] + return UnionType(items + [NoneType()], t.line, t.column) + else: + return UnionType([t, NoneType()], t.line, t.column) + + +def validate_instance(t: Instance, fail: MsgCallback, empty_tuple_index: bool) -> bool: + """Check if this is a well-formed instance with respect to argument count/positions.""" + # TODO: combine logic with instantiate_type_alias(). + if any(unknown_unpack(a) for a in t.args): + # This type is not ready to be validated, because of unknown total count. + # TODO: is it OK to fill with TypeOfAny.from_error instead of special form? + return False + if t.type.has_type_var_tuple_type: + min_tv_count = sum( + not tv.has_default() and not isinstance(tv, TypeVarTupleType) + for tv in t.type.defn.type_vars + ) + correct = len(t.args) >= min_tv_count + if any( + isinstance(a, UnpackType) and isinstance(get_proper_type(a.type), Instance) + for a in t.args + ): + correct = True + if not t.args: + if not (empty_tuple_index and len(t.type.type_vars) == 1): + # The Any arguments should be set by the caller. + if empty_tuple_index and min_tv_count: + fail( + f"At least {min_tv_count} type argument(s) expected, none given", + t, + code=codes.TYPE_ARG, + ) + return False + elif not correct: + fail( + f"Bad number of arguments, expected: at least {min_tv_count}, given: {len(t.args)}", + t, + code=codes.TYPE_ARG, + ) + return False + else: + # We also need to check if we are not performing a type variable tuple split. + unpack = find_unpack_in_list(t.args) + if unpack is not None: + unpack_arg = t.args[unpack] + assert isinstance(unpack_arg, UnpackType) + if isinstance(unpack_arg.type, TypeVarTupleType): + assert t.type.type_var_tuple_prefix is not None + assert t.type.type_var_tuple_suffix is not None + exp_prefix = t.type.type_var_tuple_prefix + act_prefix = unpack + exp_suffix = t.type.type_var_tuple_suffix + act_suffix = len(t.args) - unpack - 1 + if act_prefix < exp_prefix or act_suffix < exp_suffix: + fail("TypeVarTuple cannot be split", t, code=codes.TYPE_ARG) + return False + elif any(isinstance(a, UnpackType) for a in t.args): + # A variadic unpack in fixed size instance (fixed unpacks must be flattened by the caller) + fail(message_registry.INVALID_UNPACK_POSITION, t, code=codes.VALID_TYPE) + t.args = () + return False + elif len(t.args) != len(t.type.type_vars): + # Invalid number of type parameters. + arg_count = len(t.args) + min_tv_count = sum(not tv.has_default() for tv in t.type.defn.type_vars) + max_tv_count = len(t.type.type_vars) + if arg_count and (arg_count < min_tv_count or arg_count > max_tv_count): + fail( + wrong_type_arg_count(min_tv_count, max_tv_count, str(arg_count), t.type.name), + t, + code=codes.TYPE_ARG, + ) + t.invalid = True + return False + return True + + +def find_self_type(typ: Type, lookup: Callable[[str], SymbolTableNode | None]) -> bool: + return typ.accept(HasSelfType(lookup)) + + +class HasSelfType(BoolTypeQuery): + def __init__(self, lookup: Callable[[str], SymbolTableNode | None]) -> None: + self.lookup = lookup + super().__init__(ANY_STRATEGY) + + def visit_unbound_type(self, t: UnboundType) -> bool: + sym = self.lookup(t.name) + if sym and sym.fullname in SELF_TYPE_NAMES: + return True + return super().visit_unbound_type(t) + + +def unknown_unpack(t: Type) -> bool: + """Check if a given type is an unpack of an unknown type. + + Unfortunately, there is no robust way to distinguish forward references from + genuine undefined names here. But this worked well so far, although it looks + quite fragile. + """ + if isinstance(t, UnpackType): + unpacked = get_proper_type(t.type) + if isinstance(unpacked, AnyType) and unpacked.type_of_any == TypeOfAny.special_form: + return True + return False + + +class FindTypeVarVisitor(SyntheticTypeVisitor[None]): + """Type visitor that looks for type variable types and self types.""" + + def __init__(self, api: SemanticAnalyzerCoreInterface, scope: TypeVarLikeScope) -> None: + self.api = api + self.scope = scope + self.type_var_likes: list[tuple[str, TypeVarLikeExpr]] = [] + self.has_self_type = False + self.include_callables = True + + def _seems_like_callable(self, type: UnboundType) -> bool: + if not type.args: + return False + return isinstance(type.args[0], (EllipsisType, TypeList, ParamSpecType)) + + def visit_unbound_type(self, t: UnboundType) -> None: + name = t.name + node = self.api.lookup_qualified(name, t) + if node and node.fullname in SELF_TYPE_NAMES: + self.has_self_type = True + if ( + node + and isinstance(node.node, TypeVarLikeExpr) + and self.scope.get_binding(node) is None + ): + if (name, node.node) not in self.type_var_likes: + self.type_var_likes.append((name, node.node)) + elif not self.include_callables and self._seems_like_callable(t): + if find_self_type( + t, lambda name: self.api.lookup_qualified(name, t, suppress_errors=True) + ): + self.has_self_type = True + return + elif node and node.fullname in LITERAL_TYPE_NAMES: + return + elif node and node.fullname in ANNOTATED_TYPE_NAMES and t.args: + # Don't query the second argument to Annotated for TypeVars + self.process_types([t.args[0]]) + elif t.args: + self.process_types(t.args) + + def visit_type_list(self, t: TypeList) -> None: + self.process_types(t.items) + + def visit_callable_argument(self, t: CallableArgument) -> None: + t.typ.accept(self) + + def visit_any(self, t: AnyType) -> None: + pass + + def visit_uninhabited_type(self, t: UninhabitedType) -> None: + pass + + def visit_none_type(self, t: NoneType) -> None: + pass + + def visit_erased_type(self, t: ErasedType) -> None: + pass + + def visit_deleted_type(self, t: DeletedType) -> None: + pass + + def visit_type_var(self, t: TypeVarType) -> None: + self.process_types([t.upper_bound, t.default] + t.values) + + def visit_param_spec(self, t: ParamSpecType) -> None: + self.process_types([t.upper_bound, t.default, t.prefix]) + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> None: + self.process_types([t.upper_bound, t.default]) + + def visit_unpack_type(self, t: UnpackType) -> None: + self.process_types([t.type]) + + def visit_parameters(self, t: Parameters) -> None: + self.process_types(t.arg_types) + + def visit_partial_type(self, t: PartialType) -> None: + pass + + def visit_instance(self, t: Instance) -> None: + self.process_types(t.args) + + def visit_callable_type(self, t: CallableType) -> None: + # FIX generics + self.process_types(t.arg_types) + t.ret_type.accept(self) + + def visit_tuple_type(self, t: TupleType) -> None: + self.process_types(t.items) + + def visit_typeddict_type(self, t: TypedDictType) -> None: + self.process_types(list(t.items.values())) + + def visit_raw_expression_type(self, t: RawExpressionType) -> None: + pass + + def visit_literal_type(self, t: LiteralType) -> None: + pass + + def visit_union_type(self, t: UnionType) -> None: + self.process_types(t.items) + + def visit_overloaded(self, t: Overloaded) -> None: + for it in t.items: + it.accept(self) + + def visit_type_type(self, t: TypeType) -> None: + t.item.accept(self) + + def visit_ellipsis_type(self, t: EllipsisType) -> None: + pass + + def visit_placeholder_type(self, t: PlaceholderType) -> None: + return self.process_types(t.args) + + def visit_type_alias_type(self, t: TypeAliasType) -> None: + self.process_types(t.args) + + def process_types(self, types: list[Type] | tuple[Type, ...]) -> None: + # Redundant type check helps mypyc. + if isinstance(types, list): + for t in types: + t.accept(self) + else: + for t in types: + t.accept(self) + + +class TypeVarDefaultTranslator(TrivialSyntheticTypeTranslator): + """Type translate visitor that replaces UnboundTypes with in-scope TypeVars.""" + + def __init__( + self, api: SemanticAnalyzerInterface, tvar_expr_name: str, context: Context + ) -> None: + super().__init__() + self.api = api + self.tvar_expr_name = tvar_expr_name + self.context = context + + def visit_unbound_type(self, t: UnboundType) -> Type: + sym = self.api.lookup_qualified(t.name, t, suppress_errors=True) + if sym is not None: + if type_var := self.api.tvar_scope.get_binding(sym): + return type_var + if isinstance(sym.node, TypeVarLikeExpr): + self.api.fail( + f'Type parameter "{self.tvar_expr_name}" has a default type ' + "that refers to one or more type variables that are out of scope", + self.context, + ) + return AnyType(TypeOfAny.from_error) + return super().visit_unbound_type(t) + + def visit_type_alias_type(self, t: TypeAliasType) -> Type: + # TypeAliasTypes are analyzed separately already, just return it + return t diff --git a/.venv/lib/python3.12/site-packages/mypy/typeops.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/typeops.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..d07bba1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/typeops.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/typeops.py b/.venv/lib/python3.12/site-packages/mypy/typeops.py new file mode 100644 index 0000000..f664674 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeops.py @@ -0,0 +1,1314 @@ +"""Miscellaneous type operations and helpers for use during type checking. + +NOTE: These must not be accessed from mypy.nodes or mypy.types to avoid import + cycles. These must not be called from the semantic analysis main pass + since these may assume that MROs are ready. +""" + +from __future__ import annotations + +import itertools +from collections.abc import Iterable, Sequence +from typing import Any, Callable, TypeVar, cast + +from mypy.checker_state import checker_state +from mypy.copytype import copy_type +from mypy.expandtype import expand_type, expand_type_by_instance +from mypy.maptype import map_instance_to_supertype +from mypy.nodes import ( + ARG_POS, + ARG_STAR, + ARG_STAR2, + SYMBOL_FUNCBASE_TYPES, + Decorator, + Expression, + FuncBase, + FuncDef, + FuncItem, + OverloadedFuncDef, + StrExpr, + SymbolNode, + TypeInfo, + Var, +) +from mypy.state import state +from mypy.types import ( + AnyType, + CallableType, + ExtraAttrs, + FormalArgument, + FunctionLike, + Instance, + LiteralType, + NoneType, + NormalizedCallableType, + Overloaded, + Parameters, + ParamSpecType, + PartialType, + ProperType, + TupleType, + Type, + TypeAliasType, + TypedDictType, + TypeOfAny, + TypeQuery, + TypeType, + TypeVarLikeType, + TypeVarTupleType, + TypeVarType, + UninhabitedType, + UnionType, + UnpackType, + flatten_nested_unions, + get_proper_type, + get_proper_types, + remove_dups, +) +from mypy.typetraverser import TypeTraverserVisitor +from mypy.typevars import fill_typevars + + +def is_recursive_pair(s: Type, t: Type) -> bool: + """Is this a pair of recursive types? + + There may be more cases, and we may be forced to use e.g. has_recursive_types() + here, but this function is called in very hot code, so we try to keep it simple + and return True only in cases we know may have problems. + """ + if isinstance(s, TypeAliasType) and s.is_recursive: + return ( + isinstance(get_proper_type(t), (Instance, UnionType)) + or isinstance(t, TypeAliasType) + and t.is_recursive + # Tuple types are special, they can cause an infinite recursion even if + # the other type is not recursive, because of the tuple fallback that is + # calculated "on the fly". + or isinstance(get_proper_type(s), TupleType) + ) + if isinstance(t, TypeAliasType) and t.is_recursive: + return ( + isinstance(get_proper_type(s), (Instance, UnionType)) + or isinstance(s, TypeAliasType) + and s.is_recursive + # Same as above. + or isinstance(get_proper_type(t), TupleType) + ) + return False + + +def tuple_fallback(typ: TupleType) -> Instance: + """Return fallback type for a tuple.""" + info = typ.partial_fallback.type + if info.fullname != "builtins.tuple": + return typ.partial_fallback + items = [] + for item in typ.items: + if isinstance(item, UnpackType): + unpacked_type = get_proper_type(item.type) + if isinstance(unpacked_type, TypeVarTupleType): + unpacked_type = get_proper_type(unpacked_type.upper_bound) + if ( + isinstance(unpacked_type, Instance) + and unpacked_type.type.fullname == "builtins.tuple" + ): + items.append(unpacked_type.args[0]) + else: + raise NotImplementedError + else: + items.append(item) + return Instance( + info, + # Note: flattening recursive unions is dangerous, since it can fool recursive + # types optimization in subtypes.py and go into infinite recursion. + [make_simplified_union(items, handle_recursive=False)], + extra_attrs=typ.partial_fallback.extra_attrs, + ) + + +def get_self_type(func: CallableType, def_info: TypeInfo) -> Type | None: + default_self = fill_typevars(def_info) + if isinstance(get_proper_type(func.ret_type), UninhabitedType): + return func.ret_type + elif func.arg_types and func.arg_types[0] != default_self and func.arg_kinds[0] == ARG_POS: + return func.arg_types[0] + else: + return None + + +def type_object_type(info: TypeInfo, named_type: Callable[[str], Instance]) -> ProperType: + """Return the type of a type object. + + For a generic type G with type variables T and S the type is generally of form + + Callable[..., G[T, S]] + + where ... are argument types for the __init__/__new__ method (without the self + argument). Also, the fallback type will be 'type' instead of 'function'. + """ + allow_cache = ( + checker_state.type_checker is not None + and checker_state.type_checker.allow_constructor_cache + ) + + if info.type_object_type is not None: + if allow_cache: + return info.type_object_type + info.type_object_type = None + + # We take the type from whichever of __init__ and __new__ is first + # in the MRO, preferring __init__ if there is a tie. + init_method = info.get("__init__") + new_method = info.get("__new__") + if not init_method or not is_valid_constructor(init_method.node): + # Must be an invalid class definition. + return AnyType(TypeOfAny.from_error) + # There *should* always be a __new__ method except the test stubs + # lack it, so just copy init_method in that situation + new_method = new_method or init_method + if not is_valid_constructor(new_method.node): + # Must be an invalid class definition. + return AnyType(TypeOfAny.from_error) + + # The two is_valid_constructor() checks ensure this. + assert isinstance(new_method.node, (SYMBOL_FUNCBASE_TYPES, Decorator)) + assert isinstance(init_method.node, (SYMBOL_FUNCBASE_TYPES, Decorator)) + + init_index = info.mro.index(init_method.node.info) + new_index = info.mro.index(new_method.node.info) + + if info.metaclass_type is not None: + fallback = info.metaclass_type + elif checker_state.type_checker: + # Prefer direct call when it is available. It is faster, and, + # unfortunately, some callers provide bogus callback. + fallback = checker_state.type_checker.named_type("builtins.type") + else: + fallback = named_type("builtins.type") + + if init_index < new_index: + method: FuncBase | Decorator = init_method.node + is_new = False + elif init_index > new_index: + method = new_method.node + is_new = True + else: + if init_method.node.info.fullname == "builtins.object": + # Both are defined by object. But if we've got a bogus + # base class, we can't know for sure, so check for that. + if info.fallback_to_any: + # Construct a universal callable as the prototype. + any_type = AnyType(TypeOfAny.special_form) + sig = CallableType( + arg_types=[any_type, any_type], + arg_kinds=[ARG_STAR, ARG_STAR2], + arg_names=["_args", "_kwds"], + ret_type=any_type, + is_bound=True, + fallback=named_type("builtins.function"), + ) + result: FunctionLike = class_callable(sig, info, fallback, None, is_new=False) + if allow_cache and state.strict_optional: + info.type_object_type = result + return result + + # Otherwise prefer __init__ in a tie. It isn't clear that this + # is the right thing, but __new__ caused problems with + # typeshed (#5647). + method = init_method.node + is_new = False + # Construct callable type based on signature of __init__. Adjust + # return type and insert type arguments. + if isinstance(method, FuncBase): + if isinstance(method, OverloadedFuncDef) and not method.type: + # Do not cache if the type is not ready. Same logic for decorators is + # achieved in early return above because is_valid_constructor() is False. + allow_cache = False + t = function_type(method, fallback) + else: + assert isinstance(method.type, ProperType) + assert isinstance(method.type, FunctionLike) # is_valid_constructor() ensures this + t = method.type + result = type_object_type_from_function(t, info, method.info, fallback, is_new) + # Only write cached result is strict_optional=True, otherwise we may get + # inconsistent behaviour because of union simplification. + if allow_cache and state.strict_optional: + info.type_object_type = result + return result + + +def is_valid_constructor(n: SymbolNode | None) -> bool: + """Does this node represents a valid constructor method? + + This includes normal functions, overloaded functions, and decorators + that return a callable type. + """ + if isinstance(n, SYMBOL_FUNCBASE_TYPES): + return True + if isinstance(n, Decorator): + return isinstance(get_proper_type(n.type), FunctionLike) + return False + + +def type_object_type_from_function( + signature: FunctionLike, info: TypeInfo, def_info: TypeInfo, fallback: Instance, is_new: bool +) -> FunctionLike: + # We first need to record all non-trivial (explicit) self types in __init__, + # since they will not be available after we bind them. Note, we use explicit + # self-types only in the defining class, similar to __new__ (but not exactly the same, + # see comment in class_callable below). This is mostly useful for annotating library + # classes such as subprocess.Popen. + if not is_new and not info.is_newtype: + orig_self_types = [get_self_type(it, def_info) for it in signature.items] + else: + orig_self_types = [None] * len(signature.items) + + # The __init__ method might come from a generic superclass 'def_info' + # with type variables that do not map identically to the type variables of + # the class 'info' being constructed. For example: + # + # class A(Generic[T]): + # def __init__(self, x: T) -> None: ... + # class B(A[List[T]]): + # ... + # + # We need to map B's __init__ to the type (List[T]) -> None. + signature = bind_self( + signature, + original_type=fill_typevars(info), + is_classmethod=is_new, + # Explicit instance self annotations have special handling in class_callable(), + # we don't need to bind any type variables in them if they are generic. + ignore_instances=True, + ) + signature = cast(FunctionLike, map_type_from_supertype(signature, info, def_info)) + + special_sig: str | None = None + if def_info.fullname == "builtins.dict": + # Special signature! + special_sig = "dict" + + if isinstance(signature, CallableType): + return class_callable(signature, info, fallback, special_sig, is_new, orig_self_types[0]) + else: + # Overloaded __init__/__new__. + assert isinstance(signature, Overloaded) + items: list[CallableType] = [] + for item, orig_self in zip(signature.items, orig_self_types): + items.append(class_callable(item, info, fallback, special_sig, is_new, orig_self)) + return Overloaded(items) + + +def class_callable( + init_type: CallableType, + info: TypeInfo, + type_type: Instance, + special_sig: str | None, + is_new: bool, + orig_self_type: Type | None = None, +) -> CallableType: + """Create a type object type based on the signature of __init__.""" + variables: list[TypeVarLikeType] = [] + variables.extend(info.defn.type_vars) + variables.extend(init_type.variables) + + from mypy.subtypes import is_subtype + + init_ret_type = get_proper_type(init_type.ret_type) + orig_self_type = get_proper_type(orig_self_type) + default_ret_type = fill_typevars(info) + explicit_type = init_ret_type if is_new else orig_self_type + if ( + isinstance(explicit_type, (Instance, TupleType, UninhabitedType, LiteralType)) + # We have to skip protocols, because it can be a subtype of a return type + # by accident. Like `Hashable` is a subtype of `object`. See #11799 + and isinstance(default_ret_type, Instance) + and not default_ret_type.type.is_protocol + # Only use the declared return type from __new__ or declared self in __init__ + # if it is actually returning a subtype of what we would return otherwise. + and is_subtype(explicit_type, default_ret_type, ignore_type_params=True) + ): + ret_type: Type = explicit_type + else: + ret_type = default_ret_type + + callable_type = init_type.copy_modified( + ret_type=ret_type, + fallback=type_type, + name=None, + variables=variables, + special_sig=special_sig, + ) + c = callable_type.with_name(info.name) + return c + + +def map_type_from_supertype(typ: Type, sub_info: TypeInfo, super_info: TypeInfo) -> Type: + """Map type variables in a type defined in a supertype context to be valid + in the subtype context. Assume that the result is unique; if more than + one type is possible, return one of the alternatives. + + For example, assume + + class D(Generic[S]): ... + class C(D[E[T]], Generic[T]): ... + + Now S in the context of D would be mapped to E[T] in the context of C. + """ + # Create the type of self in subtype, of form t[a1, ...]. + inst_type = fill_typevars(sub_info) + if isinstance(inst_type, TupleType): + inst_type = tuple_fallback(inst_type) + # Map the type of self to supertype. This gets us a description of the + # supertype type variables in terms of subtype variables, i.e. t[t1, ...] + # so that any type variables in tN are to be interpreted in subtype + # context. + inst_type = map_instance_to_supertype(inst_type, super_info) + # Finally expand the type variables in type with those in the previously + # constructed type. Note that both type and inst_type may have type + # variables, but in type they are interpreted in supertype context while + # in inst_type they are interpreted in subtype context. This works even if + # the names of type variables in supertype and subtype overlap. + return expand_type_by_instance(typ, inst_type) + + +def supported_self_type( + typ: ProperType, allow_callable: bool = True, allow_instances: bool = True +) -> bool: + """Is this a supported kind of explicit self-types? + + Currently, this means an X or Type[X], where X is an instance or + a type variable with an instance upper bound. + """ + if isinstance(typ, TypeType): + return supported_self_type(typ.item) + if allow_callable and isinstance(typ, CallableType): + # Special case: allow class callable instead of Type[...] as cls annotation, + # as well as callable self for callback protocols. + return True + return isinstance(typ, TypeVarType) or ( + allow_instances and isinstance(typ, Instance) and typ != fill_typevars(typ.type) + ) + + +F = TypeVar("F", bound=FunctionLike) + + +def bind_self( + method: F, + original_type: Type | None = None, + is_classmethod: bool = False, + ignore_instances: bool = False, +) -> F: + """Return a copy of `method`, with the type of its first parameter (usually + self or cls) bound to original_type. + + If the type of `self` is a generic type (T, or Type[T] for classmethods), + instantiate every occurrence of type with original_type in the rest of the + signature and in the return type. + + original_type is the type of E in the expression E.copy(). It is None in + compatibility checks. In this case we treat it as the erasure of the + declared type of self. + + This way we can express "the type of self". For example: + + T = TypeVar('T', bound='A') + class A: + def copy(self: T) -> T: ... + + class B(A): pass + + b = B().copy() # type: B + + """ + if isinstance(method, Overloaded): + items = [ + bind_self(c, original_type, is_classmethod, ignore_instances) for c in method.items + ] + return cast(F, Overloaded(items)) + assert isinstance(method, CallableType) + func: CallableType = method + if not func.arg_types: + # Invalid method, return something. + return method + if func.arg_kinds[0] in (ARG_STAR, ARG_STAR2): + # The signature is of the form 'def foo(*args, ...)'. + # In this case we shouldn't drop the first arg, + # since func will be absorbed by the *args. + # TODO: infer bounds on the type of *args? + + # In the case of **kwargs we should probably emit an error, but + # for now we simply skip it, to avoid crashes down the line. + return method + self_param_type = get_proper_type(func.arg_types[0]) + + variables: Sequence[TypeVarLikeType] + # Having a def __call__(self: Callable[...], ...) can cause infinite recursion. Although + # this special-casing looks not very principled, there is nothing meaningful we can infer + # from such definition, since it is inherently indefinitely recursive. + allow_callable = func.name is None or not func.name.startswith("__call__ of") + if func.variables and supported_self_type( + self_param_type, allow_callable=allow_callable, allow_instances=not ignore_instances + ): + from mypy.infer import infer_type_arguments + + if original_type is None: + # TODO: type check method override (see #7861). + original_type = erase_to_bound(self_param_type) + original_type = get_proper_type(original_type) + + # Find which of method type variables appear in the type of "self". + self_ids = {tv.id for tv in get_all_type_vars(self_param_type)} + self_vars = [tv for tv in func.variables if tv.id in self_ids] + + # Solve for these type arguments using the actual class or instance type. + typeargs = infer_type_arguments( + self_vars, self_param_type, original_type, is_supertype=True + ) + if ( + is_classmethod + and any(isinstance(get_proper_type(t), UninhabitedType) for t in typeargs) + and isinstance(original_type, (Instance, TypeVarType, TupleType)) + ): + # In case we call a classmethod through an instance x, fallback to type(x). + typeargs = infer_type_arguments( + self_vars, self_param_type, TypeType(original_type), is_supertype=True + ) + + # Update the method signature with the solutions found. + # Technically, some constraints might be unsolvable, make them Never. + to_apply = [t if t is not None else UninhabitedType() for t in typeargs] + func = expand_type(func, {tv.id: arg for tv, arg in zip(self_vars, to_apply)}) + variables = [v for v in func.variables if v not in self_vars] + else: + variables = func.variables + + res = func.copy_modified( + arg_types=func.arg_types[1:], + arg_kinds=func.arg_kinds[1:], + arg_names=func.arg_names[1:], + variables=variables, + is_bound=True, + ) + return cast(F, res) + + +def erase_to_bound(t: Type) -> Type: + # TODO: use value restrictions to produce a union? + t = get_proper_type(t) + if isinstance(t, TypeVarType): + return t.upper_bound + if isinstance(t, TypeType): + if isinstance(t.item, TypeVarType): + return TypeType.make_normalized(t.item.upper_bound, is_type_form=t.is_type_form) + return t + + +def callable_corresponding_argument( + typ: NormalizedCallableType | Parameters, model: FormalArgument +) -> FormalArgument | None: + """Return the argument of a function that corresponds to `model`""" + + by_name = typ.argument_by_name(model.name) + by_pos = typ.argument_by_position(model.pos) + if by_name is None and by_pos is None: + return None + if by_name is not None and by_pos is not None: + if by_name == by_pos: + return by_name + # If we're dealing with an optional pos-only and an optional + # name-only arg, merge them. This is the case for all functions + # taking both *args and **args, or a pair of functions like so: + + # def right(a: int = ...) -> None: ... + # def left(x: int = ..., /, *, a: int = ...) -> None: ... + from mypy.meet import meet_types + + if ( + not (by_name.required or by_pos.required) + and by_pos.name is None + and by_name.pos is None + # This is not principled, but prevents a crash. It's weird to have a FormalArgument + # that has an UnpackType. + and not isinstance(by_name.typ, UnpackType) + and not isinstance(by_pos.typ, UnpackType) + ): + return FormalArgument( + by_name.name, by_pos.pos, meet_types(by_name.typ, by_pos.typ), False + ) + return by_name + + return by_name if by_name is not None else by_pos + + +def simple_literal_type(t: ProperType | None) -> Instance | None: + """Extract the underlying fallback Instance type for a simple Literal""" + if isinstance(t, Instance) and t.last_known_value is not None: + t = t.last_known_value + if isinstance(t, LiteralType): + return t.fallback + return None + + +def is_simple_literal(t: ProperType) -> bool: + if isinstance(t, LiteralType): + return t.fallback.type.is_enum or t.fallback.type.fullname == "builtins.str" + if isinstance(t, Instance): + return t.last_known_value is not None and isinstance(t.last_known_value.value, str) + return False + + +def make_simplified_union( + items: Sequence[Type], + line: int = -1, + column: int = -1, + *, + keep_erased: bool = False, + contract_literals: bool = True, + handle_recursive: bool = True, +) -> ProperType: + """Build union type with redundant union items removed. + + If only a single item remains, this may return a non-union type. + + Examples: + + * [int, str] -> Union[int, str] + * [int, object] -> object + * [int, int] -> int + * [int, Any] -> Union[int, Any] (Any types are not simplified away!) + * [Any, Any] -> Any + * [int, Union[bytes, str]] -> Union[int, bytes, str] + + Note: This must NOT be used during semantic analysis, since TypeInfos may not + be fully initialized. + + The keep_erased flag is used for type inference against union types + containing type variables. If set to True, keep all ErasedType items. + + The contract_literals flag indicates whether we need to contract literal types + back into a sum type. Set it to False when called by try_expanding_sum_type_ + to_union(). + """ + # Step 1: expand all nested unions + items = flatten_nested_unions(items, handle_recursive=handle_recursive) + + # Step 2: fast path for single item + if len(items) == 1: + return get_proper_type(items[0]) + + # Step 3: remove redundant unions + simplified_set: Sequence[Type] = _remove_redundant_union_items(items, keep_erased) + + # Step 4: If more than one literal exists in the union, try to simplify + if ( + contract_literals + and sum(isinstance(get_proper_type(item), LiteralType) for item in simplified_set) > 1 + ): + simplified_set = try_contracting_literals_in_union(simplified_set) + + result = get_proper_type(UnionType.make_union(simplified_set, line, column)) + + nitems = len(items) + if nitems > 1 and ( + nitems > 2 or not (type(items[0]) is NoneType or type(items[1]) is NoneType) + ): + # Step 5: At last, we erase any (inconsistent) extra attributes on instances. + + # Initialize with None instead of an empty set as a micro-optimization. The set + # is needed very rarely, so we try to avoid constructing it. + extra_attrs_set: set[ExtraAttrs] | None = None + for item in items: + instance = try_getting_instance_fallback(item) + if instance and instance.extra_attrs: + if extra_attrs_set is None: + extra_attrs_set = {instance.extra_attrs} + else: + extra_attrs_set.add(instance.extra_attrs) + + if extra_attrs_set is not None and len(extra_attrs_set) > 1: + fallback = try_getting_instance_fallback(result) + if fallback: + fallback.extra_attrs = None + + return result + + +def _remove_redundant_union_items(items: list[Type], keep_erased: bool) -> list[Type]: + from mypy.subtypes import is_proper_subtype + + # The first pass through this loop, we check if later items are subtypes of earlier items. + # The second pass through this loop, we check if earlier items are subtypes of later items + # (by reversing the remaining items) + for _direction in range(2): + new_items: list[Type] = [] + # seen is a map from a type to its index in new_items + seen: dict[ProperType, int] = {} + unduplicated_literal_fallbacks: set[Instance] | None = None + for ti in items: + proper_ti = get_proper_type(ti) + + # UninhabitedType is always redundant + if isinstance(proper_ti, UninhabitedType): + continue + + duplicate_index = -1 + # Quickly check if we've seen this type + if proper_ti in seen: + duplicate_index = seen[proper_ti] + elif ( + isinstance(proper_ti, LiteralType) + and unduplicated_literal_fallbacks is not None + and proper_ti.fallback in unduplicated_literal_fallbacks + ): + # This is an optimisation for unions with many LiteralType + # We've already checked for exact duplicates. This means that any super type of + # the LiteralType must be a super type of its fallback. If we've gone through + # the expensive loop below and found no super type for a previous LiteralType + # with the same fallback, we can skip doing that work again and just add the type + # to new_items + pass + else: + # If not, check if we've seen a supertype of this type + for j, tj in enumerate(new_items): + proper_tj = get_proper_type(tj) + # If tj is an Instance with a last_known_value, do not remove proper_ti + # (unless it's an instance with the same last_known_value) + if ( + isinstance(proper_tj, Instance) + and proper_tj.last_known_value is not None + and not ( + isinstance(proper_ti, Instance) + and proper_tj.last_known_value == proper_ti.last_known_value + ) + ): + continue + + if is_proper_subtype( + ti, tj, keep_erased_types=keep_erased, ignore_promotions=True + ): + duplicate_index = j + break + if duplicate_index != -1: + # If deleted subtypes had more general truthiness, use that + orig_item = new_items[duplicate_index] + if not orig_item.can_be_true and ti.can_be_true: + new_items[duplicate_index] = true_or_false(orig_item) + elif not orig_item.can_be_false and ti.can_be_false: + new_items[duplicate_index] = true_or_false(orig_item) + else: + # We have a non-duplicate item, add it to new_items + seen[proper_ti] = len(new_items) + new_items.append(ti) + if isinstance(proper_ti, LiteralType): + if unduplicated_literal_fallbacks is None: + unduplicated_literal_fallbacks = set() + unduplicated_literal_fallbacks.add(proper_ti.fallback) + + items = new_items + if len(items) <= 1: + break + items.reverse() + + return items + + +def _get_type_method_ret_type(t: ProperType, *, name: str) -> Type | None: + # For Enum literals the ret_type can change based on the Enum + # we need to check the type of the enum rather than the literal + if isinstance(t, LiteralType) and t.is_enum_literal(): + t = t.fallback + + if isinstance(t, Instance): + sym = t.type.get(name) + if sym: + sym_type = get_proper_type(sym.type) + if isinstance(sym_type, CallableType): + return sym_type.ret_type + + return None + + +def true_only(t: Type) -> ProperType: + """ + Restricted version of t with only True-ish values + """ + t = get_proper_type(t) + + if not t.can_be_true: + # All values of t are False-ish, so there are no true values in it + return UninhabitedType(line=t.line, column=t.column) + elif not t.can_be_false: + # All values of t are already True-ish, so true_only is idempotent in this case + return t + elif isinstance(t, UnionType): + # The true version of a union type is the union of the true versions of its components + new_items = [true_only(item) for item in t.items] + can_be_true_items = [item for item in new_items if item.can_be_true] + return make_simplified_union(can_be_true_items, line=t.line, column=t.column) + else: + ret_type = _get_type_method_ret_type(t, name="__bool__") or _get_type_method_ret_type( + t, name="__len__" + ) + + if ret_type and not ret_type.can_be_true: + return UninhabitedType(line=t.line, column=t.column) + + new_t = copy_type(t) + new_t.can_be_false = False + return new_t + + +def false_only(t: Type) -> ProperType: + """ + Restricted version of t with only False-ish values + """ + t = get_proper_type(t) + + if not t.can_be_false: + if state.strict_optional: + # All values of t are True-ish, so there are no false values in it + return UninhabitedType(line=t.line) + else: + # When strict optional checking is disabled, everything can be + # False-ish since anything can be None + return NoneType(line=t.line) + elif not t.can_be_true: + # All values of t are already False-ish, so false_only is idempotent in this case + return t + elif isinstance(t, UnionType): + # The false version of a union type is the union of the false versions of its components + new_items = [false_only(item) for item in t.items] + can_be_false_items = [item for item in new_items if item.can_be_false] + return make_simplified_union(can_be_false_items, line=t.line, column=t.column) + elif isinstance(t, Instance) and t.type.fullname in ("builtins.str", "builtins.bytes"): + return LiteralType("", fallback=t) + elif isinstance(t, Instance) and t.type.fullname == "builtins.int": + return LiteralType(0, fallback=t) + else: + ret_type = _get_type_method_ret_type(t, name="__bool__") or _get_type_method_ret_type( + t, name="__len__" + ) + + if ret_type: + if not ret_type.can_be_false: + return UninhabitedType(line=t.line) + elif isinstance(t, Instance): + if (t.type.is_final or t.type.is_enum) and state.strict_optional: + return UninhabitedType(line=t.line) + elif isinstance(t, LiteralType) and t.is_enum_literal() and state.strict_optional: + return UninhabitedType(line=t.line) + + new_t = copy_type(t) + new_t.can_be_true = False + return new_t + + +def true_or_false(t: Type) -> ProperType: + """ + Unrestricted version of t with both True-ish and False-ish values + """ + t = get_proper_type(t) + + if isinstance(t, UnionType): + new_items = [true_or_false(item) for item in t.items] + return make_simplified_union(new_items, line=t.line, column=t.column) + + new_t = copy_type(t) + new_t.can_be_true = new_t.can_be_true_default() + new_t.can_be_false = new_t.can_be_false_default() + return new_t + + +def erase_def_to_union_or_bound(tdef: TypeVarLikeType) -> Type: + # TODO(PEP612): fix for ParamSpecType + if isinstance(tdef, ParamSpecType): + return AnyType(TypeOfAny.from_error) + if isinstance(tdef, TypeVarType) and tdef.values: + return make_simplified_union(tdef.values) + else: + return tdef.upper_bound + + +def erase_to_union_or_bound(typ: TypeVarType) -> ProperType: + if typ.values: + return make_simplified_union(typ.values) + else: + return get_proper_type(typ.upper_bound) + + +def function_type(func: FuncBase, fallback: Instance) -> FunctionLike: + if func.type: + assert isinstance(func.type, FunctionLike) + return func.type + else: + # Implicit type signature with dynamic types. + if isinstance(func, FuncItem): + return callable_type(func, fallback) + else: + # Either a broken overload, or decorated overload type is not ready. + # TODO: make sure the caller defers if possible. + assert isinstance(func, OverloadedFuncDef) + any_type = AnyType(TypeOfAny.from_error) + dummy = CallableType( + [any_type, any_type], + [ARG_STAR, ARG_STAR2], + [None, None], + any_type, + fallback, + line=func.line, + is_ellipsis_args=True, + ) + # Return an Overloaded, because some callers may expect that + # an OverloadedFuncDef has an Overloaded type. + return Overloaded([dummy]) + + +def callable_type( + fdef: FuncItem, fallback: Instance, ret_type: Type | None = None +) -> CallableType: + # TODO: somewhat unfortunate duplication with prepare_method_signature in semanal + if fdef.info and fdef.has_self_or_cls_argument and fdef.arg_names: + self_type: Type = fill_typevars(fdef.info) + if fdef.is_class or fdef.name == "__new__": + self_type = TypeType.make_normalized(self_type) + args = [self_type] + [AnyType(TypeOfAny.unannotated)] * (len(fdef.arg_names) - 1) + else: + args = [AnyType(TypeOfAny.unannotated)] * len(fdef.arg_names) + + return CallableType( + args, + fdef.arg_kinds, + fdef.arg_names, + ret_type or AnyType(TypeOfAny.unannotated), + fallback, + name=fdef.name, + line=fdef.line, + column=fdef.column, + implicit=True, + # We need this for better error messages, like missing `self` note: + definition=fdef if isinstance(fdef, FuncDef) else None, + ) + + +def try_getting_str_literals(expr: Expression, typ: Type) -> list[str] | None: + """If the given expression or type corresponds to a string literal + or a union of string literals, returns a list of the underlying strings. + Otherwise, returns None. + + Specifically, this function is guaranteed to return a list with + one or more strings if one of the following is true: + + 1. 'expr' is a StrExpr + 2. 'typ' is a LiteralType containing a string + 3. 'typ' is a UnionType containing only LiteralType of strings + """ + if isinstance(expr, StrExpr): + return [expr.value] + + # TODO: See if we can eliminate this function and call the below one directly + return try_getting_str_literals_from_type(typ) + + +def try_getting_str_literals_from_type(typ: Type) -> list[str] | None: + """If the given expression or type corresponds to a string Literal + or a union of string Literals, returns a list of the underlying strings. + Otherwise, returns None. + + For example, if we had the type 'Literal["foo", "bar"]' as input, this function + would return a list of strings ["foo", "bar"]. + """ + return try_getting_literals_from_type(typ, str, "builtins.str") + + +def try_getting_int_literals_from_type(typ: Type) -> list[int] | None: + """If the given expression or type corresponds to an int Literal + or a union of int Literals, returns a list of the underlying ints. + Otherwise, returns None. + + For example, if we had the type 'Literal[1, 2, 3]' as input, this function + would return a list of ints [1, 2, 3]. + """ + return try_getting_literals_from_type(typ, int, "builtins.int") + + +T = TypeVar("T") + + +def try_getting_literals_from_type( + typ: Type, target_literal_type: type[T], target_fullname: str +) -> list[T] | None: + """If the given expression or type corresponds to a Literal or + union of Literals where the underlying values correspond to the given + target type, returns a list of those underlying values. Otherwise, + returns None. + """ + typ = get_proper_type(typ) + + if isinstance(typ, Instance) and typ.last_known_value is not None: + possible_literals: list[Type] = [typ.last_known_value] + elif isinstance(typ, UnionType): + possible_literals = list(typ.items) + else: + possible_literals = [typ] + + literals: list[T] = [] + for lit in get_proper_types(possible_literals): + if isinstance(lit, LiteralType) and lit.fallback.type.fullname == target_fullname: + val = lit.value + if isinstance(val, target_literal_type): + literals.append(val) + else: + return None + else: + return None + return literals + + +def is_literal_type_like(t: Type | None) -> bool: + """Returns 'true' if the given type context is potentially either a LiteralType, + a Union of LiteralType, or something similar. + """ + t = get_proper_type(t) + if t is None: + return False + elif isinstance(t, LiteralType): + return True + elif isinstance(t, UnionType): + return any(is_literal_type_like(item) for item in t.items) + elif isinstance(t, TypeVarType): + return is_literal_type_like(t.upper_bound) or any( + is_literal_type_like(item) for item in t.values + ) + else: + return False + + +def is_singleton_type(typ: Type) -> bool: + """Returns 'true' if this type is a "singleton type" -- if there exists + exactly only one runtime value associated with this type. + + That is, given two values 'a' and 'b' that have the same type 't', + 'is_singleton_type(t)' returns True if and only if the expression 'a is b' is + always true. + + Currently, this returns True when given NoneTypes, enum LiteralTypes, + enum types with a single value and ... (Ellipses). + + Note that other kinds of LiteralTypes cannot count as singleton types. For + example, suppose we do 'a = 100000 + 1' and 'b = 100001'. It is not guaranteed + that 'a is b' will always be true -- some implementations of Python will end up + constructing two distinct instances of 100001. + """ + typ = get_proper_type(typ) + return typ.is_singleton_type() + + +def try_expanding_sum_type_to_union(typ: Type, target_fullname: str) -> Type: + """Attempts to recursively expand any enum Instances with the given target_fullname + into a Union of all of its component LiteralTypes. + + For example, if we have: + + class Color(Enum): + RED = 1 + BLUE = 2 + YELLOW = 3 + + class Status(Enum): + SUCCESS = 1 + FAILURE = 2 + UNKNOWN = 3 + + ...and if we call `try_expanding_sum_type_to_union(Union[Color, Status], 'module.Color')`, + this function will return Literal[Color.RED, Color.BLUE, Color.YELLOW, Status]. + """ + typ = get_proper_type(typ) + + if isinstance(typ, UnionType): + # Non-empty enums cannot subclass each other so simply removing duplicates is enough. + items = [ + try_expanding_sum_type_to_union(item, target_fullname) + for item in remove_dups(flatten_nested_unions(typ.relevant_items())) + ] + return UnionType.make_union(items) + + if isinstance(typ, Instance) and typ.type.fullname == target_fullname: + if typ.type.fullname == "builtins.bool": + return UnionType([LiteralType(True, typ), LiteralType(False, typ)]) + + if typ.type.is_enum: + items = [LiteralType(name, typ) for name in typ.type.enum_members] + if not items: + return typ + return UnionType.make_union(items) + + return typ + + +def try_contracting_literals_in_union(types: Sequence[Type]) -> list[ProperType]: + """Contracts any literal types back into a sum type if possible. + + Requires a flattened union and does not descend into children. + + Will replace the first instance of the literal with the sum type and + remove all others. + + If we call `try_contracting_union(Literal[Color.RED, Color.BLUE, Color.YELLOW])`, + this function will return Color. + + We also treat `Literal[True, False]` as `bool`. + """ + proper_types = [get_proper_type(typ) for typ in types] + sum_types: dict[str, tuple[set[Any], list[int]]] = {} + marked_for_deletion = set() + for idx, typ in enumerate(proper_types): + if isinstance(typ, LiteralType): + fullname = typ.fallback.type.fullname + if typ.fallback.type.is_enum or isinstance(typ.value, bool): + if fullname not in sum_types: + sum_types[fullname] = ( + ( + set(typ.fallback.type.enum_members) + if typ.fallback.type.is_enum + else {True, False} + ), + [], + ) + literals, indexes = sum_types[fullname] + literals.discard(typ.value) + indexes.append(idx) + if not literals: + first, *rest = indexes + proper_types[first] = typ.fallback + marked_for_deletion |= set(rest) + return list( + itertools.compress( + proper_types, [(i not in marked_for_deletion) for i in range(len(proper_types))] + ) + ) + + +def coerce_to_literal(typ: Type) -> Type: + """Recursively converts any Instances that have a last_known_value or are + instances of enum types with a single value into the corresponding LiteralType. + """ + original_type = typ + typ = get_proper_type(typ) + if isinstance(typ, UnionType): + new_items = [coerce_to_literal(item) for item in typ.items] + return UnionType.make_union(new_items) + elif isinstance(typ, Instance): + if typ.last_known_value: + return typ.last_known_value + elif typ.type.is_enum: + enum_values = typ.type.enum_members + if len(enum_values) == 1: + return LiteralType(value=enum_values[0], fallback=typ) + return original_type + + +def get_type_vars(tp: Type) -> list[TypeVarType]: + return cast("list[TypeVarType]", tp.accept(TypeVarExtractor())) + + +def get_all_type_vars(tp: Type) -> list[TypeVarLikeType]: + # TODO: should we always use this function instead of get_type_vars() above? + return tp.accept(TypeVarExtractor(include_all=True)) + + +class TypeVarExtractor(TypeQuery[list[TypeVarLikeType]]): + def __init__(self, include_all: bool = False) -> None: + super().__init__() + self.include_all = include_all + + def strategy(self, items: Iterable[list[TypeVarLikeType]]) -> list[TypeVarLikeType]: + out = [] + for item in items: + out.extend(item) + return out + + def visit_type_var(self, t: TypeVarType) -> list[TypeVarLikeType]: + return [t] + + def visit_param_spec(self, t: ParamSpecType) -> list[TypeVarLikeType]: + return [t] if self.include_all else [] + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> list[TypeVarLikeType]: + return [t] if self.include_all else [] + + +def freeze_all_type_vars(member_type: Type) -> None: + member_type.accept(FreezeTypeVarsVisitor()) + + +class FreezeTypeVarsVisitor(TypeTraverserVisitor): + def visit_callable_type(self, t: CallableType) -> None: + for v in t.variables: + v.id.meta_level = 0 + super().visit_callable_type(t) + + +def custom_special_method(typ: Type, name: str, check_all: bool = False) -> bool: + """Does this type have a custom special method such as __format__() or __eq__()? + + If check_all is True ensure all items of a union have a custom method, not just some. + """ + typ = get_proper_type(typ) + if isinstance(typ, Instance): + method = typ.type.get(name) + if method and isinstance(method.node, (SYMBOL_FUNCBASE_TYPES, Decorator, Var)): + if method.node.info: + return not method.node.info.fullname.startswith(("builtins.", "typing.")) + return False + if isinstance(typ, UnionType): + if check_all: + return all(custom_special_method(t, name, check_all) for t in typ.items) + return any(custom_special_method(t, name) for t in typ.items) + if isinstance(typ, TupleType): + return custom_special_method(tuple_fallback(typ), name, check_all) + if isinstance(typ, FunctionLike) and typ.is_type_obj(): + # Look up __method__ on the metaclass for class objects. + return custom_special_method(typ.fallback, name, check_all) + if isinstance(typ, TypeType) and isinstance(typ.item, Instance): + if typ.item.type.metaclass_type: + # Look up __method__ on the metaclass for class objects. + return custom_special_method(typ.item.type.metaclass_type, name, check_all) + if isinstance(typ, AnyType): + # Avoid false positives in uncertain cases. + return True + # TODO: support other types (see ExpressionChecker.has_member())? + return False + + +def separate_union_literals(t: UnionType) -> tuple[Sequence[LiteralType], Sequence[Type]]: + """Separate literals from other members in a union type.""" + literal_items = [] + union_items = [] + + for item in t.items: + proper = get_proper_type(item) + if isinstance(proper, LiteralType): + literal_items.append(proper) + else: + union_items.append(item) + + return literal_items, union_items + + +def try_getting_instance_fallback(typ: Type) -> Instance | None: + """Returns the Instance fallback for this type if one exists or None.""" + typ = get_proper_type(typ) + if isinstance(typ, Instance): + return typ + elif isinstance(typ, LiteralType): + return typ.fallback + elif isinstance(typ, NoneType): + return None # Fast path for None, which is common + elif isinstance(typ, FunctionLike): + return typ.fallback + elif isinstance(typ, TupleType): + return typ.partial_fallback + elif isinstance(typ, TypedDictType): + return typ.fallback + elif isinstance(typ, TypeVarType): + return try_getting_instance_fallback(typ.upper_bound) + return None + + +def fixup_partial_type(typ: Type) -> Type: + """Convert a partial type that we couldn't resolve into something concrete. + + This means, for None we make it Optional[Any], and for anything else we + fill in all of the type arguments with Any. + """ + if not isinstance(typ, PartialType): + return typ + if typ.type is None: + return UnionType.make_union([AnyType(TypeOfAny.unannotated), NoneType()]) + else: + return Instance(typ.type, [AnyType(TypeOfAny.unannotated)] * len(typ.type.type_vars)) + + +def get_protocol_member( + left: Instance, member: str, class_obj: bool, is_lvalue: bool = False +) -> Type | None: + if member == "__call__" and class_obj: + # Special case: class objects always have __call__ that is just the constructor. + + # TODO: this is wrong, it creates callables that are not recognized as type objects. + # Long-term, we should probably get rid of this callback argument altogether. + def named_type(fullname: str) -> Instance: + return Instance(left.type.mro[-1], []) + + return type_object_type(left.type, named_type) + + if member == "__call__" and left.type.is_metaclass(precise=True): + # Special case: we want to avoid falling back to metaclass __call__ + # if constructor signature didn't match, this can cause many false negatives. + return None + + from mypy.subtypes import find_member + + subtype = find_member(member, left, left, class_obj=class_obj, is_lvalue=is_lvalue) + if isinstance(subtype, PartialType): + subtype = ( + NoneType() + if subtype.type is None + else Instance( + subtype.type, [AnyType(TypeOfAny.unannotated)] * len(subtype.type.type_vars) + ) + ) + return subtype + + +def _is_disjoint_base(info: TypeInfo) -> bool: + # It either has the @disjoint_base decorator or defines nonempty __slots__. + if info.is_disjoint_base: + return True + if not info.slots: + return False + own_slots = { + slot + for slot in info.slots + if not any( + base_info.type.slots is not None and slot in base_info.type.slots + for base_info in info.bases + ) + } + return bool(own_slots) + + +def _get_disjoint_base_of(instance: Instance) -> TypeInfo | None: + """Returns the disjoint base of the given instance, if it exists.""" + if _is_disjoint_base(instance.type): + return instance.type + for base in instance.type.mro: + if _is_disjoint_base(base): + return base + return None + + +def can_have_shared_disjoint_base(instances: list[Instance]) -> bool: + """Returns whether the given instances can share a disjoint base. + + This means that a child class of these classes can exist at runtime. + """ + # Ignore None disjoint bases (which are `object`). + disjoint_bases = [ + base for instance in instances if (base := _get_disjoint_base_of(instance)) is not None + ] + if not disjoint_bases: + # All are `object`. + return True + + candidate = disjoint_bases[0] + for base in disjoint_bases[1:]: + if candidate.has_base(base.fullname): + continue + elif base.has_base(candidate.fullname): + candidate = base + else: + return False + return True diff --git a/.venv/lib/python3.12/site-packages/mypy/types.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/types.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..ce66c46 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/types.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/types.py b/.venv/lib/python3.12/site-packages/mypy/types.py new file mode 100644 index 0000000..09e4b74 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/types.py @@ -0,0 +1,4438 @@ +"""Classes for representing mypy types.""" + +from __future__ import annotations + +import sys +from abc import abstractmethod +from collections.abc import Iterable, Sequence +from typing import TYPE_CHECKING, Any, ClassVar, Final, NewType, TypeVar, Union, cast, overload +from typing_extensions import Self, TypeAlias as _TypeAlias, TypeGuard + +from librt.internal import ( + read_int as read_int_bare, + read_str as read_str_bare, + write_int as write_int_bare, + write_str as write_str_bare, +) + +import mypy.nodes +from mypy.bogus_type import Bogus +from mypy.cache import ( + DICT_STR_GEN, + END_TAG, + EXTRA_ATTRS, + LIST_GEN, + LITERAL_NONE, + ReadBuffer, + Tag, + WriteBuffer, + read_bool, + read_int, + read_int_list, + read_literal, + read_str, + read_str_list, + read_str_opt, + read_str_opt_list, + read_tag, + write_bool, + write_int, + write_int_list, + write_literal, + write_str, + write_str_list, + write_str_opt, + write_str_opt_list, + write_tag, +) +from mypy.nodes import ARG_KINDS, ARG_POS, ARG_STAR, ARG_STAR2, INVARIANT, ArgKind, SymbolNode +from mypy.options import Options +from mypy.state import state +from mypy.util import IdMapper + +T = TypeVar("T") + +JsonDict: _TypeAlias = dict[str, Any] + +# The set of all valid expressions that can currently be contained +# inside of a Literal[...]. +# +# Literals can contain bytes and enum-values: we special-case both of these +# and store the value as a string. We rely on the fallback type that's also +# stored with the Literal to determine how a string is being used. +# +# TODO: confirm that we're happy with representing enums (and the +# other types) in the manner described above. +# +# Note: if we change the set of types included below, we must also +# make sure to audit the following methods: +# +# 1. types.LiteralType's serialize and deserialize methods: this method +# needs to make sure it can convert the below types into JSON and back. +# +# 2. types.LiteralType's 'value_repr` method: this method is ultimately used +# by TypeStrVisitor's visit_literal_type to generate a reasonable +# repr-able output. +# +# 3. server.astdiff.SnapshotTypeVisitor's visit_literal_type_method: this +# method assumes that the following types supports equality checks and +# hashability. +# +# Note: Although "Literal[None]" is a valid type, we internally always convert +# such a type directly into "None". So, "None" is not a valid parameter of +# LiteralType and is omitted from this list. +# +# Note: Float values are only used internally. They are not accepted within +# Literal[...]. +LiteralValue: _TypeAlias = Union[int, str, bool, float] + + +# If we only import type_visitor in the middle of the file, mypy +# breaks, and if we do it at the top, it breaks at runtime because of +# import cycle issues, so we do it at the top while typechecking and +# then again in the middle at runtime. +# We should be able to remove this once we are switched to the new +# semantic analyzer! +if TYPE_CHECKING: + from mypy.type_visitor import ( + SyntheticTypeVisitor as SyntheticTypeVisitor, + TypeVisitor as TypeVisitor, + ) + +TUPLE_NAMES: Final = ("builtins.tuple", "typing.Tuple") +TYPE_NAMES: Final = ("builtins.type", "typing.Type") + +TYPE_VAR_LIKE_NAMES: Final = ( + "typing.TypeVar", + "typing_extensions.TypeVar", + "typing.ParamSpec", + "typing_extensions.ParamSpec", + "typing.TypeVarTuple", + "typing_extensions.TypeVarTuple", +) + +TYPED_NAMEDTUPLE_NAMES: Final = ("typing.NamedTuple", "typing_extensions.NamedTuple") + +# Supported names of TypedDict type constructors. +TPDICT_NAMES: Final = ( + "typing.TypedDict", + "typing_extensions.TypedDict", + "mypy_extensions.TypedDict", +) + +# Supported fallback instance type names for TypedDict types. +TPDICT_FB_NAMES: Final = ( + "typing._TypedDict", + "typing_extensions._TypedDict", + "mypy_extensions._TypedDict", +) + +# Supported names of Protocol base class. +PROTOCOL_NAMES: Final = ("typing.Protocol", "typing_extensions.Protocol") + +# Supported TypeAlias names. +TYPE_ALIAS_NAMES: Final = ("typing.TypeAlias", "typing_extensions.TypeAlias") + +# Supported Final type names. +FINAL_TYPE_NAMES: Final = ("typing.Final", "typing_extensions.Final") + +# Supported @final decorator names. +FINAL_DECORATOR_NAMES: Final = ("typing.final", "typing_extensions.final") + +# Supported @type_check_only names. +TYPE_CHECK_ONLY_NAMES: Final = ("typing.type_check_only", "typing_extensions.type_check_only") + +# Supported Literal type names. +LITERAL_TYPE_NAMES: Final = ("typing.Literal", "typing_extensions.Literal") + +# Supported Annotated type names. +ANNOTATED_TYPE_NAMES: Final = ("typing.Annotated", "typing_extensions.Annotated") + +# Supported Concatenate type names. +CONCATENATE_TYPE_NAMES: Final = ("typing.Concatenate", "typing_extensions.Concatenate") + +# Supported Unpack type names. +UNPACK_TYPE_NAMES: Final = ("typing.Unpack", "typing_extensions.Unpack") + +# Supported @deprecated decorator names +DEPRECATED_TYPE_NAMES: Final = ("warnings.deprecated", "typing_extensions.deprecated") + +# Supported @disjoint_base decorator names +DISJOINT_BASE_DECORATOR_NAMES: Final = ("typing.disjoint_base", "typing_extensions.disjoint_base") + +# We use this constant in various places when checking `tuple` subtyping: +TUPLE_LIKE_INSTANCE_NAMES: Final = ( + "builtins.tuple", + "typing.Iterable", + "typing.Container", + "typing.Sequence", + "typing.Reversible", +) + +IMPORTED_REVEAL_TYPE_NAMES: Final = ("typing.reveal_type", "typing_extensions.reveal_type") +REVEAL_TYPE_NAMES: Final = ("builtins.reveal_type", *IMPORTED_REVEAL_TYPE_NAMES) + +ASSERT_TYPE_NAMES: Final = ("typing.assert_type", "typing_extensions.assert_type") + +OVERLOAD_NAMES: Final = ("typing.overload", "typing_extensions.overload") + +NEVER_NAMES: Final = ( + "typing.NoReturn", + "typing_extensions.NoReturn", + "mypy_extensions.NoReturn", + "typing.Never", + "typing_extensions.Never", +) + +# Mypyc fixed-width native int types (compatible with builtins.int) +MYPYC_NATIVE_INT_NAMES: Final = ( + "mypy_extensions.i64", + "mypy_extensions.i32", + "mypy_extensions.i16", + "mypy_extensions.u8", +) + +DATACLASS_TRANSFORM_NAMES: Final = ( + "typing.dataclass_transform", + "typing_extensions.dataclass_transform", +) +# Supported @override decorator names. +OVERRIDE_DECORATOR_NAMES: Final = ("typing.override", "typing_extensions.override") + +ELLIPSIS_TYPE_NAMES: Final = ("builtins.ellipsis", "types.EllipsisType") + +NOT_IMPLEMENTED_TYPE_NAMES: Final = ("builtins._NotImplementedType", "types.NotImplementedType") + +# A placeholder used for Bogus[...] parameters +_dummy: Final[Any] = object() + +# A placeholder for int parameters +_dummy_int: Final = -999999 + + +class TypeOfAny: + """ + This class describes different types of Any. Each 'Any' can be of only one type at a time. + """ + + __slots__ = () + + # Was this Any type inferred without a type annotation? + unannotated: Final = 1 + # Does this Any come from an explicit type annotation? + explicit: Final = 2 + # Does this come from an unfollowed import? See --disallow-any-unimported option + from_unimported_type: Final = 3 + # Does this Any type come from omitted generics? + from_omitted_generics: Final = 4 + # Does this Any come from an error? + from_error: Final = 5 + # Is this a type that can't be represented in mypy's type system? For instance, type of + # call to NewType(...). Even though these types aren't real Anys, we treat them as such. + # Also used for variables named '_'. + special_form: Final = 6 + # Does this Any come from interaction with another Any? + from_another_any: Final = 7 + # Does this Any come from an implementation limitation/bug? + implementation_artifact: Final = 8 + # Does this Any come from use in the suggestion engine? This is + # used to ignore Anys inserted by the suggestion engine when + # generating constraints. + suggestion_engine: Final = 9 + + +def deserialize_type(data: JsonDict | str) -> Type: + if isinstance(data, str): + return Instance.deserialize(data) + classname = data[".class"] + method = deserialize_map.get(classname) + if method is not None: + return method(data) + raise NotImplementedError(f"unexpected .class {classname}") + + +class Type(mypy.nodes.Context): + """Abstract base class for all types.""" + + __slots__ = ("_can_be_true", "_can_be_false") + # 'can_be_true' and 'can_be_false' mean whether the value of the + # expression can be true or false in a boolean context. They are useful + # when inferring the type of logic expressions like `x and y`. + # + # For example: + # * the literal `False` can't be true while `True` can. + # * a value with type `bool` can be true or false. + # * `None` can't be true + # * ... + + def __init__(self, line: int = -1, column: int = -1) -> None: + super().__init__(line, column) + # Value of these can be -1 (use the default, lazy init), 0 (false) or 1 (true) + self._can_be_true = -1 + self._can_be_false = -1 + + @property + def can_be_true(self) -> bool: + if self._can_be_true == -1: # Lazy init helps mypyc + self._can_be_true = self.can_be_true_default() + return bool(self._can_be_true) + + @can_be_true.setter + def can_be_true(self, v: bool) -> None: + self._can_be_true = v + + @property + def can_be_false(self) -> bool: + if self._can_be_false == -1: # Lazy init helps mypyc + self._can_be_false = self.can_be_false_default() + return bool(self._can_be_false) + + @can_be_false.setter + def can_be_false(self, v: bool) -> None: + self._can_be_false = v + + def can_be_true_default(self) -> bool: + return True + + def can_be_false_default(self) -> bool: + return True + + def accept(self, visitor: TypeVisitor[T]) -> T: + raise RuntimeError("Not implemented", type(self)) + + def __repr__(self) -> str: + return self.accept(TypeStrVisitor(options=Options())) + + def str_with_options(self, options: Options) -> str: + return self.accept(TypeStrVisitor(options=options)) + + def serialize(self) -> JsonDict | str: + raise NotImplementedError(f"Cannot serialize {self.__class__.__name__} instance") + + @classmethod + def deserialize(cls, data: JsonDict) -> Type: + raise NotImplementedError(f"Cannot deserialize {cls.__name__} instance") + + def write(self, data: WriteBuffer) -> None: + raise NotImplementedError(f"Cannot serialize {self.__class__.__name__} instance") + + @classmethod + def read(cls, data: ReadBuffer) -> Type: + raise NotImplementedError(f"Cannot deserialize {cls.__name__} instance") + + def is_singleton_type(self) -> bool: + return False + + +class TypeAliasType(Type): + """A type alias to another type. + + To support recursive type aliases we don't immediately expand a type alias + during semantic analysis, but create an instance of this type that records the target alias + definition node (mypy.nodes.TypeAlias) and type arguments (for generic aliases). + + This is very similar to how TypeInfo vs Instance interact, where a recursive class-based + structure like + class Node: + value: int + children: List[Node] + can be represented in a tree-like manner. + """ + + __slots__ = ("alias", "args", "type_ref") + + def __init__( + self, + alias: mypy.nodes.TypeAlias | None, + args: list[Type], + line: int = -1, + column: int = -1, + ) -> None: + super().__init__(line, column) + self.alias = alias + self.args = args + self.type_ref: str | None = None + + def _expand_once(self) -> Type: + """Expand to the target type exactly once. + + This doesn't do full expansion, i.e. the result can contain another + (or even this same) type alias. Use this internal helper only when really needed, + its public wrapper mypy.types.get_proper_type() is preferred. + """ + assert self.alias is not None + if self.alias.no_args: + # We know that no_args=True aliases like L = List must have an instance + # as their target. + assert isinstance(self.alias.target, Instance) # type: ignore[misc] + return self.alias.target.copy_modified(args=self.args) + + # TODO: this logic duplicates the one in expand_type_by_instance(). + if self.alias.tvar_tuple_index is None: + mapping = {v.id: s for (v, s) in zip(self.alias.alias_tvars, self.args)} + else: + prefix = self.alias.tvar_tuple_index + suffix = len(self.alias.alias_tvars) - self.alias.tvar_tuple_index - 1 + start, middle, end = split_with_prefix_and_suffix(tuple(self.args), prefix, suffix) + tvar = self.alias.alias_tvars[prefix] + assert isinstance(tvar, TypeVarTupleType) + mapping = {tvar.id: TupleType(list(middle), tvar.tuple_fallback)} + for tvar, sub in zip( + self.alias.alias_tvars[:prefix] + self.alias.alias_tvars[prefix + 1 :], start + end + ): + mapping[tvar.id] = sub + + return self.alias.target.accept(InstantiateAliasVisitor(mapping)) + + @property + def is_recursive(self) -> bool: + """Whether this type alias is recursive. + + Note this doesn't check generic alias arguments, but only if this alias + *definition* is recursive. The property value thus can be cached on the + underlying TypeAlias node. If you want to include all nested types, use + has_recursive_types() function. + """ + assert self.alias is not None, "Unfixed type alias" + is_recursive = self.alias._is_recursive + if is_recursive is None: + is_recursive = self.alias in self.alias.target.accept(CollectAliasesVisitor()) + # We cache the value on the underlying TypeAlias node as an optimization, + # since the value is the same for all instances of the same alias. + self.alias._is_recursive = is_recursive + return is_recursive + + def can_be_true_default(self) -> bool: + if self.alias is not None: + return self.alias.target.can_be_true + return super().can_be_true_default() + + def can_be_false_default(self) -> bool: + if self.alias is not None: + return self.alias.target.can_be_false + return super().can_be_false_default() + + def copy_modified(self, *, args: list[Type] | None = None) -> TypeAliasType: + return TypeAliasType( + self.alias, args if args is not None else self.args.copy(), self.line, self.column + ) + + def accept(self, visitor: TypeVisitor[T]) -> T: + return visitor.visit_type_alias_type(self) + + def __hash__(self) -> int: + return hash((self.alias, tuple(self.args))) + + def __eq__(self, other: object) -> bool: + # Note: never use this to determine subtype relationships, use is_subtype(). + if not isinstance(other, TypeAliasType): + return NotImplemented + return self.alias == other.alias and self.args == other.args + + def serialize(self) -> JsonDict: + assert self.alias is not None + data: JsonDict = { + ".class": "TypeAliasType", + "type_ref": self.alias.fullname, + "args": [arg.serialize() for arg in self.args], + } + return data + + @classmethod + def deserialize(cls, data: JsonDict) -> TypeAliasType: + assert data[".class"] == "TypeAliasType" + args: list[Type] = [] + if "args" in data: + args_list = data["args"] + assert isinstance(args_list, list) + args = [deserialize_type(arg) for arg in args_list] + alias = TypeAliasType(None, args) + alias.type_ref = data["type_ref"] + return alias + + def write(self, data: WriteBuffer) -> None: + write_tag(data, TYPE_ALIAS_TYPE) + write_type_list(data, self.args) + assert self.alias is not None + write_str(data, self.alias.fullname) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> TypeAliasType: + alias = TypeAliasType(None, read_type_list(data)) + alias.type_ref = read_str(data) + assert read_tag(data) == END_TAG + return alias + + +class TypeGuardedType(Type): + """Only used by find_isinstance_check() etc.""" + + __slots__ = ("type_guard",) + + def __init__(self, type_guard: Type) -> None: + super().__init__(line=type_guard.line, column=type_guard.column) + self.type_guard = type_guard + + def __repr__(self) -> str: + return f"TypeGuard({self.type_guard})" + + # This may hide some real bugs, but it is convenient for various "synthetic" + # visitors, similar to RequiredType and ReadOnlyType below. + def accept(self, visitor: TypeVisitor[T]) -> T: + return self.type_guard.accept(visitor) + + +class RequiredType(Type): + """Required[T] or NotRequired[T]. Only usable at top-level of a TypedDict definition.""" + + def __init__(self, item: Type, *, required: bool) -> None: + super().__init__(line=item.line, column=item.column) + self.item = item + self.required = required + + def __repr__(self) -> str: + if self.required: + return f"Required[{self.item}]" + else: + return f"NotRequired[{self.item}]" + + def accept(self, visitor: TypeVisitor[T]) -> T: + return self.item.accept(visitor) + + +class ReadOnlyType(Type): + """ReadOnly[T] Only usable at top-level of a TypedDict definition.""" + + def __init__(self, item: Type) -> None: + super().__init__(line=item.line, column=item.column) + self.item = item + + def __repr__(self) -> str: + return f"ReadOnly[{self.item}]" + + def accept(self, visitor: TypeVisitor[T]) -> T: + return self.item.accept(visitor) + + +class ProperType(Type): + """Not a type alias. + + Every type except TypeAliasType must inherit from this type. + """ + + __slots__ = () + + +class TypeVarId: + # A type variable is uniquely identified by its raw id and meta level. + + # For plain variables (type parameters of generic classes and + # functions) raw ids are allocated by semantic analysis, using + # positive ids 1, 2, ... for generic class parameters and negative + # ids -1, ... for generic function type arguments. A special value 0 + # is reserved for Self type variable (autogenerated). This convention + # is only used to keep type variable ids distinct when allocating + # them; the type checker makes no distinction between class and + # function type variables. + + # Metavariables are allocated unique ids starting from 1. + raw_id: Final[int] + + # Level of the variable in type inference. Currently either 0 for + # declared types, or 1 for type inference metavariables. + meta_level: int = 0 + + # Class variable used for allocating fresh ids for metavariables. + next_raw_id: ClassVar[int] = 1 + + # Fullname of class or function/method which declares this type + # variable (not the fullname of the TypeVar definition!), or '' + namespace: str + + def __init__(self, raw_id: int, meta_level: int = 0, *, namespace: str = "") -> None: + self.raw_id = raw_id + self.meta_level = meta_level + self.namespace = namespace + + @staticmethod + def new(meta_level: int) -> TypeVarId: + raw_id = TypeVarId.next_raw_id + TypeVarId.next_raw_id += 1 + return TypeVarId(raw_id, meta_level) + + def __repr__(self) -> str: + return self.raw_id.__repr__() + + def __eq__(self, other: object) -> bool: + # Although this call is not expensive (like UnionType or TypedDictType), + # most of the time we get the same object here, so add a fast path. + if self is other: + return True + return ( + isinstance(other, TypeVarId) + and self.raw_id == other.raw_id + and self.meta_level == other.meta_level + and self.namespace == other.namespace + ) + + def __ne__(self, other: object) -> bool: + return not (self == other) + + def __hash__(self) -> int: + return self.raw_id ^ (self.meta_level << 8) ^ hash(self.namespace) + + def is_meta_var(self) -> bool: + return self.meta_level > 0 + + def is_self(self) -> bool: + # This is a special value indicating typing.Self variable. + return self.raw_id == 0 + + +class TypeVarLikeType(ProperType): + __slots__ = ("name", "fullname", "id", "upper_bound", "default") + + name: str # Name (may be qualified) + fullname: str # Fully qualified name + id: TypeVarId + upper_bound: Type + default: Type + + def __init__( + self, + name: str, + fullname: str, + id: TypeVarId, + upper_bound: Type, + default: Type, + line: int = -1, + column: int = -1, + ) -> None: + super().__init__(line, column) + self.name = name + self.fullname = fullname + self.id = id + self.upper_bound = upper_bound + self.default = default + + def serialize(self) -> JsonDict: + raise NotImplementedError + + @classmethod + def deserialize(cls, data: JsonDict) -> TypeVarLikeType: + raise NotImplementedError + + def copy_modified(self, *, id: TypeVarId, **kwargs: Any) -> Self: + raise NotImplementedError + + @classmethod + def new_unification_variable(cls, old: Self) -> Self: + new_id = TypeVarId.new(meta_level=1) + return old.copy_modified(id=new_id) + + def has_default(self) -> bool: + t = get_proper_type(self.default) + return not (isinstance(t, AnyType) and t.type_of_any == TypeOfAny.from_omitted_generics) + + def values_or_bound(self) -> ProperType: + if isinstance(self, TypeVarType) and self.values: + return UnionType(self.values) + return get_proper_type(self.upper_bound) + + +class TypeVarType(TypeVarLikeType): + """Type that refers to a type variable.""" + + __slots__ = ("values", "variance") + + values: list[Type] # Value restriction, empty list if no restriction + variance: int + + def __init__( + self, + name: str, + fullname: str, + id: TypeVarId, + values: list[Type], + upper_bound: Type, + default: Type, + variance: int = INVARIANT, + line: int = -1, + column: int = -1, + ) -> None: + super().__init__(name, fullname, id, upper_bound, default, line, column) + assert values is not None, "No restrictions must be represented by empty list" + self.values = values + self.variance = variance + + def copy_modified( + self, + *, + values: Bogus[list[Type]] = _dummy, + upper_bound: Bogus[Type] = _dummy, + default: Bogus[Type] = _dummy, + id: Bogus[TypeVarId] = _dummy, + line: int = _dummy_int, + column: int = _dummy_int, + **kwargs: Any, + ) -> TypeVarType: + return TypeVarType( + name=self.name, + fullname=self.fullname, + id=self.id if id is _dummy else id, + values=self.values if values is _dummy else values, + upper_bound=self.upper_bound if upper_bound is _dummy else upper_bound, + default=self.default if default is _dummy else default, + variance=self.variance, + line=self.line if line == _dummy_int else line, + column=self.column if column == _dummy_int else column, + ) + + def accept(self, visitor: TypeVisitor[T]) -> T: + return visitor.visit_type_var(self) + + def __hash__(self) -> int: + return hash((self.id, self.upper_bound, tuple(self.values))) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, TypeVarType): + return NotImplemented + return ( + self.id == other.id + and self.upper_bound == other.upper_bound + and self.values == other.values + ) + + def serialize(self) -> JsonDict: + assert not self.id.is_meta_var() + return { + ".class": "TypeVarType", + "name": self.name, + "fullname": self.fullname, + "id": self.id.raw_id, + "namespace": self.id.namespace, + "values": [v.serialize() for v in self.values], + "upper_bound": self.upper_bound.serialize(), + "default": self.default.serialize(), + "variance": self.variance, + } + + @classmethod + def deserialize(cls, data: JsonDict) -> TypeVarType: + assert data[".class"] == "TypeVarType" + return TypeVarType( + name=data["name"], + fullname=data["fullname"], + id=TypeVarId(data["id"], namespace=data["namespace"]), + values=[deserialize_type(v) for v in data["values"]], + upper_bound=deserialize_type(data["upper_bound"]), + default=deserialize_type(data["default"]), + variance=data["variance"], + ) + + def write(self, data: WriteBuffer) -> None: + write_tag(data, TYPE_VAR_TYPE) + write_str(data, self.name) + write_str(data, self.fullname) + write_int(data, self.id.raw_id) + write_str(data, self.id.namespace) + write_type_list(data, self.values) + self.upper_bound.write(data) + self.default.write(data) + write_int(data, self.variance) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> TypeVarType: + ret = TypeVarType( + read_str(data), + read_str(data), + TypeVarId(read_int(data), namespace=read_str(data)), + read_type_list(data), + read_type(data), + read_type(data), + read_int(data), + ) + assert read_tag(data) == END_TAG + return ret + + +class ParamSpecFlavor: + # Simple ParamSpec reference such as "P" + BARE: Final = 0 + # P.args + ARGS: Final = 1 + # P.kwargs + KWARGS: Final = 2 + + +class ParamSpecType(TypeVarLikeType): + """Type that refers to a ParamSpec. + + A ParamSpec is a type variable that represents the parameter + types, names and kinds of a callable (i.e., the signature without + the return type). + + This can be one of these forms + * P (ParamSpecFlavor.BARE) + * P.args (ParamSpecFlavor.ARGS) + * P.kwargs (ParamSpecFLavor.KWARGS) + + The upper_bound is really used as a fallback type -- it's shared + with TypeVarType for simplicity. It can't be specified by the user + and the value is directly derived from the flavor (currently + always just 'object'). + """ + + __slots__ = ("flavor", "prefix") + + flavor: int + prefix: Parameters + + def __init__( + self, + name: str, + fullname: str, + id: TypeVarId, + flavor: int, + upper_bound: Type, + default: Type, + *, + line: int = -1, + column: int = -1, + prefix: Parameters | None = None, + ) -> None: + super().__init__(name, fullname, id, upper_bound, default, line=line, column=column) + self.flavor = flavor + self.prefix = prefix or Parameters([], [], []) + + def with_flavor(self, flavor: int) -> ParamSpecType: + return ParamSpecType( + self.name, + self.fullname, + self.id, + flavor, + upper_bound=self.upper_bound, + default=self.default, + prefix=self.prefix, + ) + + def copy_modified( + self, + *, + id: Bogus[TypeVarId] = _dummy, + flavor: int = _dummy_int, + prefix: Bogus[Parameters] = _dummy, + default: Bogus[Type] = _dummy, + **kwargs: Any, + ) -> ParamSpecType: + return ParamSpecType( + self.name, + self.fullname, + id if id is not _dummy else self.id, + flavor if flavor != _dummy_int else self.flavor, + self.upper_bound, + default=default if default is not _dummy else self.default, + line=self.line, + column=self.column, + prefix=prefix if prefix is not _dummy else self.prefix, + ) + + def accept(self, visitor: TypeVisitor[T]) -> T: + return visitor.visit_param_spec(self) + + def name_with_suffix(self) -> str: + n = self.name + if self.flavor == ParamSpecFlavor.ARGS: + return f"{n}.args" + elif self.flavor == ParamSpecFlavor.KWARGS: + return f"{n}.kwargs" + return n + + def __hash__(self) -> int: + return hash((self.id, self.flavor, self.prefix)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, ParamSpecType): + return NotImplemented + # Upper bound can be ignored, since it's determined by flavor. + return self.id == other.id and self.flavor == other.flavor and self.prefix == other.prefix + + def serialize(self) -> JsonDict: + assert not self.id.is_meta_var() + return { + ".class": "ParamSpecType", + "name": self.name, + "fullname": self.fullname, + "id": self.id.raw_id, + "namespace": self.id.namespace, + "flavor": self.flavor, + "upper_bound": self.upper_bound.serialize(), + "default": self.default.serialize(), + "prefix": self.prefix.serialize(), + } + + @classmethod + def deserialize(cls, data: JsonDict) -> ParamSpecType: + assert data[".class"] == "ParamSpecType" + return ParamSpecType( + data["name"], + data["fullname"], + TypeVarId(data["id"], namespace=data["namespace"]), + data["flavor"], + deserialize_type(data["upper_bound"]), + deserialize_type(data["default"]), + prefix=Parameters.deserialize(data["prefix"]), + ) + + def write(self, data: WriteBuffer) -> None: + write_tag(data, PARAM_SPEC_TYPE) + self.prefix.write(data) + write_str(data, self.name) + write_str(data, self.fullname) + write_int(data, self.id.raw_id) + write_str(data, self.id.namespace) + write_int(data, self.flavor) + self.upper_bound.write(data) + self.default.write(data) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> ParamSpecType: + assert read_tag(data) == PARAMETERS + prefix = Parameters.read(data) + ret = ParamSpecType( + read_str(data), + read_str(data), + TypeVarId(read_int(data), namespace=read_str(data)), + read_int(data), + read_type(data), + read_type(data), + prefix=prefix, + ) + assert read_tag(data) == END_TAG + return ret + + +class TypeVarTupleType(TypeVarLikeType): + """Type that refers to a TypeVarTuple. + + See PEP646 for more information. + """ + + __slots__ = ("tuple_fallback", "min_len") + + def __init__( + self, + name: str, + fullname: str, + id: TypeVarId, + upper_bound: Type, + tuple_fallback: Instance, + default: Type, + *, + line: int = -1, + column: int = -1, + min_len: int = 0, + ) -> None: + super().__init__(name, fullname, id, upper_bound, default, line=line, column=column) + self.tuple_fallback = tuple_fallback + # This value is not settable by a user. It is an internal-only thing to support + # len()-narrowing of variadic tuples. + self.min_len = min_len + + def serialize(self) -> JsonDict: + assert not self.id.is_meta_var() + return { + ".class": "TypeVarTupleType", + "name": self.name, + "fullname": self.fullname, + "id": self.id.raw_id, + "namespace": self.id.namespace, + "upper_bound": self.upper_bound.serialize(), + "tuple_fallback": self.tuple_fallback.serialize(), + "default": self.default.serialize(), + "min_len": self.min_len, + } + + @classmethod + def deserialize(cls, data: JsonDict) -> TypeVarTupleType: + assert data[".class"] == "TypeVarTupleType" + return TypeVarTupleType( + data["name"], + data["fullname"], + TypeVarId(data["id"], namespace=data["namespace"]), + deserialize_type(data["upper_bound"]), + Instance.deserialize(data["tuple_fallback"]), + deserialize_type(data["default"]), + min_len=data["min_len"], + ) + + def write(self, data: WriteBuffer) -> None: + write_tag(data, TYPE_VAR_TUPLE_TYPE) + self.tuple_fallback.write(data) + write_str(data, self.name) + write_str(data, self.fullname) + write_int(data, self.id.raw_id) + write_str(data, self.id.namespace) + self.upper_bound.write(data) + self.default.write(data) + write_int(data, self.min_len) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> TypeVarTupleType: + assert read_tag(data) == INSTANCE + fallback = Instance.read(data) + ret = TypeVarTupleType( + read_str(data), + read_str(data), + TypeVarId(read_int(data), namespace=read_str(data)), + read_type(data), + fallback, + read_type(data), + min_len=read_int(data), + ) + assert read_tag(data) == END_TAG + return ret + + def accept(self, visitor: TypeVisitor[T]) -> T: + return visitor.visit_type_var_tuple(self) + + def __hash__(self) -> int: + return hash((self.id, self.min_len)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, TypeVarTupleType): + return NotImplemented + return self.id == other.id and self.min_len == other.min_len + + def copy_modified( + self, + *, + id: Bogus[TypeVarId] = _dummy, + upper_bound: Bogus[Type] = _dummy, + default: Bogus[Type] = _dummy, + min_len: Bogus[int] = _dummy, + **kwargs: Any, + ) -> TypeVarTupleType: + return TypeVarTupleType( + self.name, + self.fullname, + self.id if id is _dummy else id, + self.upper_bound if upper_bound is _dummy else upper_bound, + self.tuple_fallback, + self.default if default is _dummy else default, + line=self.line, + column=self.column, + min_len=self.min_len if min_len is _dummy else min_len, + ) + + +class UnboundType(ProperType): + """Instance type that has not been bound during semantic analysis.""" + + __slots__ = ( + "name", + "args", + "optional", + "empty_tuple_index", + "original_str_expr", + "original_str_fallback", + ) + + def __init__( + self, + name: str, + args: Sequence[Type] | None = None, + line: int = -1, + column: int = -1, + optional: bool = False, + empty_tuple_index: bool = False, + original_str_expr: str | None = None, + original_str_fallback: str | None = None, + ) -> None: + super().__init__(line, column) + if not args: + args = [] + self.name = name + self.args = tuple(args) + # Should this type be wrapped in an Optional? + self.optional = optional + # Special case for X[()] + self.empty_tuple_index = empty_tuple_index + # If this UnboundType was originally defined as a str or bytes, keep track of + # the original contents of that string-like thing. This way, if this UnboundExpr + # ever shows up inside of a LiteralType, we can determine whether that + # Literal[...] is valid or not. E.g. Literal[foo] is most likely invalid + # (unless 'foo' is an alias for another literal or something) and + # Literal["foo"] most likely is. + # + # We keep track of the entire string instead of just using a boolean flag + # so we can distinguish between things like Literal["foo"] vs + # Literal[" foo "]. + # + # We also keep track of what the original base fallback type was supposed to be + # so we don't have to try and recompute it later + self.original_str_expr = original_str_expr + self.original_str_fallback = original_str_fallback + + def copy_modified(self, args: Bogus[Sequence[Type] | None] = _dummy) -> UnboundType: + if args is _dummy: + args = self.args + return UnboundType( + name=self.name, + args=args, + line=self.line, + column=self.column, + optional=self.optional, + empty_tuple_index=self.empty_tuple_index, + original_str_expr=self.original_str_expr, + original_str_fallback=self.original_str_fallback, + ) + + def accept(self, visitor: TypeVisitor[T]) -> T: + return visitor.visit_unbound_type(self) + + def __hash__(self) -> int: + return hash((self.name, self.optional, tuple(self.args), self.original_str_expr)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, UnboundType): + return NotImplemented + return ( + self.name == other.name + and self.optional == other.optional + and self.args == other.args + and self.original_str_expr == other.original_str_expr + and self.original_str_fallback == other.original_str_fallback + ) + + def serialize(self) -> JsonDict: + return { + ".class": "UnboundType", + "name": self.name, + "args": [a.serialize() for a in self.args], + "expr": self.original_str_expr, + "expr_fallback": self.original_str_fallback, + } + + @classmethod + def deserialize(cls, data: JsonDict) -> UnboundType: + assert data[".class"] == "UnboundType" + return UnboundType( + data["name"], + [deserialize_type(a) for a in data["args"]], + original_str_expr=data["expr"], + original_str_fallback=data["expr_fallback"], + ) + + def write(self, data: WriteBuffer) -> None: + write_tag(data, UNBOUND_TYPE) + write_str(data, self.name) + write_type_list(data, self.args) + write_str_opt(data, self.original_str_expr) + write_str_opt(data, self.original_str_fallback) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> UnboundType: + ret = UnboundType( + read_str(data), + read_type_list(data), + original_str_expr=read_str_opt(data), + original_str_fallback=read_str_opt(data), + ) + assert read_tag(data) == END_TAG + return ret + + +class CallableArgument(ProperType): + """Represents a Arg(type, 'name') inside a Callable's type list. + + Note that this is a synthetic type for helping parse ASTs, not a real type. + """ + + __slots__ = ("typ", "name", "constructor") + + typ: Type + name: str | None + constructor: str | None + + def __init__( + self, + typ: Type, + name: str | None, + constructor: str | None, + line: int = -1, + column: int = -1, + ) -> None: + super().__init__(line, column) + self.typ = typ + self.name = name + self.constructor = constructor + + def accept(self, visitor: TypeVisitor[T]) -> T: + assert isinstance(visitor, SyntheticTypeVisitor) + ret: T = visitor.visit_callable_argument(self) + return ret + + def serialize(self) -> JsonDict: + assert False, "Synthetic types don't serialize" + + +class TypeList(ProperType): + """Information about argument types and names [...]. + + This is used for the arguments of a Callable type, i.e. for + [arg, ...] in Callable[[arg, ...], ret]. This is not a real type + but a syntactic AST construct. UnboundTypes can also have TypeList + types before they are processed into Callable types. + """ + + __slots__ = ("items",) + + items: list[Type] + + def __init__(self, items: list[Type], line: int = -1, column: int = -1) -> None: + super().__init__(line, column) + self.items = items + + def accept(self, visitor: TypeVisitor[T]) -> T: + assert isinstance(visitor, SyntheticTypeVisitor) + ret: T = visitor.visit_type_list(self) + return ret + + def serialize(self) -> JsonDict: + assert False, "Synthetic types don't serialize" + + def __hash__(self) -> int: + return hash(tuple(self.items)) + + def __eq__(self, other: object) -> bool: + return isinstance(other, TypeList) and self.items == other.items + + +class UnpackType(ProperType): + """Type operator Unpack from PEP646. Can be either with Unpack[] + or unpacking * syntax. + + The inner type should be either a TypeVarTuple, or a variable length tuple. + In an exceptional case of callable star argument it can be a fixed length tuple. + + Note: the above restrictions are only guaranteed by normalizations after semantic + analysis, if your code needs to handle UnpackType *during* semantic analysis, it is + wild west, technically anything can be present in the wrapped type. + """ + + __slots__ = ["type", "from_star_syntax"] + + def __init__( + self, typ: Type, line: int = -1, column: int = -1, from_star_syntax: bool = False + ) -> None: + super().__init__(line, column) + self.type = typ + self.from_star_syntax = from_star_syntax + + def accept(self, visitor: TypeVisitor[T]) -> T: + return visitor.visit_unpack_type(self) + + def serialize(self) -> JsonDict: + return {".class": "UnpackType", "type": self.type.serialize()} + + def write(self, data: WriteBuffer) -> None: + write_tag(data, UNPACK_TYPE) + self.type.write(data) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> UnpackType: + ret = UnpackType(read_type(data)) + assert read_tag(data) == END_TAG + return ret + + @classmethod + def deserialize(cls, data: JsonDict) -> UnpackType: + assert data[".class"] == "UnpackType" + typ = data["type"] + return UnpackType(deserialize_type(typ)) + + def __hash__(self) -> int: + return hash(self.type) + + def __eq__(self, other: object) -> bool: + return isinstance(other, UnpackType) and self.type == other.type + + +class AnyType(ProperType): + """The type 'Any'.""" + + __slots__ = ("type_of_any", "source_any", "missing_import_name") + + def __init__( + self, + type_of_any: int, + source_any: AnyType | None = None, + missing_import_name: str | None = None, + line: int = -1, + column: int = -1, + ) -> None: + super().__init__(line, column) + self.type_of_any = type_of_any + # If this Any was created as a result of interacting with another 'Any', record the source + # and use it in reports. + self.source_any = source_any + if source_any and source_any.source_any: + self.source_any = source_any.source_any + + if source_any is None: + self.missing_import_name = missing_import_name + else: + self.missing_import_name = source_any.missing_import_name + + # Only unimported type anys and anys from other anys should have an import name + assert missing_import_name is None or type_of_any in ( + TypeOfAny.from_unimported_type, + TypeOfAny.from_another_any, + ) + # Only Anys that come from another Any can have source_any. + assert type_of_any != TypeOfAny.from_another_any or source_any is not None + # We should not have chains of Anys. + assert not self.source_any or self.source_any.type_of_any != TypeOfAny.from_another_any + + @property + def is_from_error(self) -> bool: + return self.type_of_any == TypeOfAny.from_error + + def accept(self, visitor: TypeVisitor[T]) -> T: + return visitor.visit_any(self) + + def copy_modified( + self, + # Mark with Bogus because _dummy is just an object (with type Any) + type_of_any: int = _dummy_int, + original_any: Bogus[AnyType | None] = _dummy, + missing_import_name: Bogus[str | None] = _dummy, + ) -> AnyType: + if type_of_any == _dummy_int: + type_of_any = self.type_of_any + if original_any is _dummy: + original_any = self.source_any + if missing_import_name is _dummy: + missing_import_name = self.missing_import_name + return AnyType( + type_of_any=type_of_any, + source_any=original_any, + missing_import_name=missing_import_name, + line=self.line, + column=self.column, + ) + + def __hash__(self) -> int: + return hash(AnyType) + + def __eq__(self, other: object) -> bool: + return isinstance(other, AnyType) + + def serialize(self) -> JsonDict: + return { + ".class": "AnyType", + "type_of_any": self.type_of_any, + "source_any": self.source_any.serialize() if self.source_any is not None else None, + "missing_import_name": self.missing_import_name, + } + + @classmethod + def deserialize(cls, data: JsonDict) -> AnyType: + assert data[".class"] == "AnyType" + source = data["source_any"] + return AnyType( + data["type_of_any"], + AnyType.deserialize(source) if source is not None else None, + data["missing_import_name"], + ) + + def write(self, data: WriteBuffer) -> None: + write_tag(data, ANY_TYPE) + write_type_opt(data, self.source_any) + write_int(data, self.type_of_any) + write_str_opt(data, self.missing_import_name) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> AnyType: + tag = read_tag(data) + if tag != LITERAL_NONE: + assert tag == ANY_TYPE + source_any = AnyType.read(data) + else: + source_any = None + ret = AnyType(read_int(data), source_any, read_str_opt(data)) + assert read_tag(data) == END_TAG + return ret + + +class UninhabitedType(ProperType): + """This type has no members. + + This type is the bottom type. + With strict Optional checking, it is the only common subtype between all + other types, which allows `meet` to be well defined. Without strict + Optional checking, NoneType fills this role. + + In general, for any type T: + join(UninhabitedType, T) = T + meet(UninhabitedType, T) = UninhabitedType + is_subtype(UninhabitedType, T) = True + """ + + __slots__ = ("ambiguous",) + + ambiguous: bool # Is this a result of inference for a variable without constraints? + + def __init__(self, line: int = -1, column: int = -1) -> None: + super().__init__(line, column) + self.ambiguous = False + + def can_be_true_default(self) -> bool: + return False + + def can_be_false_default(self) -> bool: + return False + + def accept(self, visitor: TypeVisitor[T]) -> T: + return visitor.visit_uninhabited_type(self) + + def __hash__(self) -> int: + return hash((UninhabitedType, self.ambiguous)) + + def __eq__(self, other: object) -> bool: + return isinstance(other, UninhabitedType) and other.ambiguous == self.ambiguous + + def serialize(self) -> JsonDict: + return {".class": "UninhabitedType"} + + @classmethod + def deserialize(cls, data: JsonDict) -> UninhabitedType: + assert data[".class"] == "UninhabitedType" + return UninhabitedType() + + def write(self, data: WriteBuffer) -> None: + write_tag(data, UNINHABITED_TYPE) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> UninhabitedType: + assert read_tag(data) == END_TAG + return UninhabitedType() + + +class NoneType(ProperType): + """The type of 'None'. + + This type can be written by users as 'None'. + """ + + __slots__ = () + + def __init__(self, line: int = -1, column: int = -1) -> None: + super().__init__(line, column) + + def can_be_true_default(self) -> bool: + return False + + def __hash__(self) -> int: + return hash(NoneType) + + def __eq__(self, other: object) -> bool: + return isinstance(other, NoneType) + + def accept(self, visitor: TypeVisitor[T]) -> T: + return visitor.visit_none_type(self) + + def serialize(self) -> JsonDict: + return {".class": "NoneType"} + + @classmethod + def deserialize(cls, data: JsonDict) -> NoneType: + assert data[".class"] == "NoneType" + return NoneType() + + def write(self, data: WriteBuffer) -> None: + write_tag(data, NONE_TYPE) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> NoneType: + assert read_tag(data) == END_TAG + return NoneType() + + def is_singleton_type(self) -> bool: + return True + + +# NoneType used to be called NoneTyp so to avoid needlessly breaking +# external plugins we keep that alias here. +NoneTyp = NoneType + + +class ErasedType(ProperType): + """Placeholder for an erased type. + + This is used during type inference. This has the special property that + it is ignored during type inference. + """ + + __slots__ = () + + def accept(self, visitor: TypeVisitor[T]) -> T: + return visitor.visit_erased_type(self) + + +class DeletedType(ProperType): + """Type of deleted variables. + + These can be used as lvalues but not rvalues. + """ + + __slots__ = ("source",) + + source: str | None # May be None; name that generated this value + + def __init__(self, source: str | None = None, line: int = -1, column: int = -1) -> None: + super().__init__(line, column) + self.source = source + + def accept(self, visitor: TypeVisitor[T]) -> T: + return visitor.visit_deleted_type(self) + + def serialize(self) -> JsonDict: + return {".class": "DeletedType", "source": self.source} + + @classmethod + def deserialize(cls, data: JsonDict) -> DeletedType: + assert data[".class"] == "DeletedType" + return DeletedType(data["source"]) + + def write(self, data: WriteBuffer) -> None: + write_tag(data, DELETED_TYPE) + write_str_opt(data, self.source) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> DeletedType: + ret = DeletedType(read_str_opt(data)) + assert read_tag(data) == END_TAG + return ret + + +# Fake TypeInfo to be used as a placeholder during Instance de-serialization. +NOT_READY: Final = mypy.nodes.FakeInfo("De-serialization failure: TypeInfo not fixed") + + +class ExtraAttrs: + """Summary of module attributes and types. + + This is used for instances of types.ModuleType, because they can have different + attributes per instance, and for type narrowing with hasattr() checks. + """ + + def __init__( + self, + attrs: dict[str, Type], + immutable: set[str] | None = None, + mod_name: str | None = None, + ) -> None: + self.attrs = attrs + if immutable is None: + immutable = set() + self.immutable = immutable + self.mod_name = mod_name + + def __hash__(self) -> int: + return hash((tuple(self.attrs.items()), tuple(sorted(self.immutable)))) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, ExtraAttrs): + return NotImplemented + return self.attrs == other.attrs and self.immutable == other.immutable + + def copy(self) -> ExtraAttrs: + return ExtraAttrs(self.attrs.copy(), self.immutable.copy(), self.mod_name) + + def __repr__(self) -> str: + return f"ExtraAttrs({self.attrs!r}, {self.immutable!r}, {self.mod_name!r})" + + def serialize(self) -> JsonDict: + return { + ".class": "ExtraAttrs", + "attrs": {k: v.serialize() for k, v in self.attrs.items()}, + "immutable": sorted(self.immutable), + "mod_name": self.mod_name, + } + + @classmethod + def deserialize(cls, data: JsonDict) -> ExtraAttrs: + assert data[".class"] == "ExtraAttrs" + return ExtraAttrs( + {k: deserialize_type(v) for k, v in data["attrs"].items()}, + set(data["immutable"]), + data["mod_name"], + ) + + def write(self, data: WriteBuffer) -> None: + write_tag(data, EXTRA_ATTRS) + write_type_map(data, self.attrs) + write_str_list(data, sorted(self.immutable)) + write_str_opt(data, self.mod_name) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> ExtraAttrs: + ret = ExtraAttrs(read_type_map(data), set(read_str_list(data)), read_str_opt(data)) + assert read_tag(data) == END_TAG + return ret + + +class Instance(ProperType): + """An instance type of form C[T1, ..., Tn]. + + The list of type variables may be empty. + + Several types have fallbacks to `Instance`, because in Python everything is an object + and this concept is impossible to express without intersection types. We therefore use + fallbacks for all "non-special" (like UninhabitedType, ErasedType etc) types. + """ + + __slots__ = ("type", "args", "invalid", "type_ref", "last_known_value", "_hash", "extra_attrs") + + def __init__( + self, + typ: mypy.nodes.TypeInfo, + args: Sequence[Type], + line: int = -1, + column: int = -1, + *, + last_known_value: LiteralType | None = None, + extra_attrs: ExtraAttrs | None = None, + ) -> None: + super().__init__(line, column) + self.type = typ + self.args = tuple(args) + self.type_ref: str | None = None + + # True if recovered after incorrect number of type arguments error + self.invalid = False + + # This field keeps track of the underlying Literal[...] value associated with + # this instance, if one is known. + # + # This field is set whenever possible within expressions, but is erased upon + # variable assignment (see erasetype.remove_instance_last_known_values) unless + # the variable is declared to be final. + # + # For example, consider the following program: + # + # a = 1 + # b: Final[int] = 2 + # c: Final = 3 + # print(a + b + c + 4) + # + # The 'Instance' objects associated with the expressions '1', '2', '3', and '4' will + # have last_known_values of type Literal[1], Literal[2], Literal[3], and Literal[4] + # respectively. However, the Instance object assigned to 'a' and 'b' will have their + # last_known_value erased: variable 'a' is mutable; variable 'b' was declared to be + # specifically an int. + # + # Or more broadly, this field lets this Instance "remember" its original declaration + # when applicable. We want this behavior because we want implicit Final declarations + # to act pretty much identically with constants: we should be able to replace any + # places where we use some Final variable with the original value and get the same + # type-checking behavior. For example, we want this program: + # + # def expects_literal(x: Literal[3]) -> None: pass + # var: Final = 3 + # expects_literal(var) + # + # ...to type-check in the exact same way as if we had written the program like this: + # + # def expects_literal(x: Literal[3]) -> None: pass + # expects_literal(3) + # + # In order to make this work (especially with literal types), we need var's type + # (an Instance) to remember the "original" value. + # + # Preserving this value within expressions is useful for similar reasons. + # + # Currently most of mypy will ignore this field and will continue to treat this type like + # a regular Instance. We end up using this field only when we are explicitly within a + # Literal context. + self.last_known_value = last_known_value + + # Cached hash value + self._hash = -1 + + # Additional attributes defined per instance of this type. For example modules + # have different attributes per instance of types.ModuleType. + self.extra_attrs = extra_attrs + + def accept(self, visitor: TypeVisitor[T]) -> T: + return visitor.visit_instance(self) + + def __hash__(self) -> int: + if self._hash == -1: + self._hash = hash((self.type, self.args, self.last_known_value, self.extra_attrs)) + return self._hash + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Instance): + return NotImplemented + return ( + self.type == other.type + and self.args == other.args + and self.last_known_value == other.last_known_value + and self.extra_attrs == other.extra_attrs + ) + + def serialize(self) -> JsonDict | str: + assert self.type is not None + type_ref = self.type.fullname + if not self.args and not self.last_known_value and not self.extra_attrs: + return type_ref + data: JsonDict = { + ".class": "Instance", + "type_ref": type_ref, + "args": [arg.serialize() for arg in self.args], + } + if self.last_known_value is not None: + data["last_known_value"] = self.last_known_value.serialize() + data["extra_attrs"] = self.extra_attrs.serialize() if self.extra_attrs else None + return data + + @classmethod + def deserialize(cls, data: JsonDict | str) -> Instance: + if isinstance(data, str): + inst = Instance(NOT_READY, []) + inst.type_ref = data + return inst + assert data[".class"] == "Instance" + args: list[Type] = [] + if "args" in data: + args_list = data["args"] + assert isinstance(args_list, list) + args = [deserialize_type(arg) for arg in args_list] + inst = Instance(NOT_READY, args) + inst.type_ref = data["type_ref"] # Will be fixed up by fixup.py later. + if "last_known_value" in data: + inst.last_known_value = LiteralType.deserialize(data["last_known_value"]) + if data.get("extra_attrs") is not None: + inst.extra_attrs = ExtraAttrs.deserialize(data["extra_attrs"]) + return inst + + def write(self, data: WriteBuffer) -> None: + write_tag(data, INSTANCE) + if not self.args and not self.last_known_value and not self.extra_attrs: + type_ref = self.type.fullname + if type_ref == "builtins.str": + write_tag(data, INSTANCE_STR) + elif type_ref == "builtins.function": + write_tag(data, INSTANCE_FUNCTION) + elif type_ref == "builtins.int": + write_tag(data, INSTANCE_INT) + elif type_ref == "builtins.bool": + write_tag(data, INSTANCE_BOOL) + elif type_ref == "builtins.object": + write_tag(data, INSTANCE_OBJECT) + else: + write_tag(data, INSTANCE_SIMPLE) + write_str_bare(data, type_ref) + return + write_tag(data, INSTANCE_GENERIC) + write_str(data, self.type.fullname) + write_type_list(data, self.args) + write_type_opt(data, self.last_known_value) + if self.extra_attrs is None: + write_tag(data, LITERAL_NONE) + else: + self.extra_attrs.write(data) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> Instance: + tag = read_tag(data) + # This is quite verbose, but this is very hot code, so we are not + # using dictionary lookups here. + if tag == INSTANCE_STR: + if instance_cache.str_type is None: + instance_cache.str_type = Instance(NOT_READY, []) + instance_cache.str_type.type_ref = "builtins.str" + return instance_cache.str_type + if tag == INSTANCE_FUNCTION: + if instance_cache.function_type is None: + instance_cache.function_type = Instance(NOT_READY, []) + instance_cache.function_type.type_ref = "builtins.function" + return instance_cache.function_type + if tag == INSTANCE_INT: + if instance_cache.int_type is None: + instance_cache.int_type = Instance(NOT_READY, []) + instance_cache.int_type.type_ref = "builtins.int" + return instance_cache.int_type + if tag == INSTANCE_BOOL: + if instance_cache.bool_type is None: + instance_cache.bool_type = Instance(NOT_READY, []) + instance_cache.bool_type.type_ref = "builtins.bool" + return instance_cache.bool_type + if tag == INSTANCE_OBJECT: + if instance_cache.object_type is None: + instance_cache.object_type = Instance(NOT_READY, []) + instance_cache.object_type.type_ref = "builtins.object" + return instance_cache.object_type + if tag == INSTANCE_SIMPLE: + inst = Instance(NOT_READY, []) + inst.type_ref = read_str_bare(data) + return inst + assert tag == INSTANCE_GENERIC + type_ref = read_str(data) + inst = Instance(NOT_READY, read_type_list(data)) + inst.type_ref = type_ref + tag = read_tag(data) + if tag != LITERAL_NONE: + assert tag == LITERAL_TYPE + inst.last_known_value = LiteralType.read(data) + tag = read_tag(data) + if tag != LITERAL_NONE: + assert tag == EXTRA_ATTRS + inst.extra_attrs = ExtraAttrs.read(data) + assert read_tag(data) == END_TAG + return inst + + def copy_modified( + self, + *, + args: Bogus[list[Type]] = _dummy, + last_known_value: Bogus[LiteralType | None] = _dummy, + ) -> Instance: + new = Instance( + typ=self.type, + args=args if args is not _dummy else self.args, + line=self.line, + column=self.column, + last_known_value=( + last_known_value if last_known_value is not _dummy else self.last_known_value + ), + extra_attrs=self.extra_attrs, + ) + new.can_be_true = self.can_be_true + new.can_be_false = self.can_be_false + return new + + def copy_with_extra_attr(self, name: str, typ: Type) -> Instance: + if self.extra_attrs: + existing_attrs = self.extra_attrs.copy() + else: + existing_attrs = ExtraAttrs({}, set(), None) + existing_attrs.attrs[name] = typ + new = self.copy_modified() + new.extra_attrs = existing_attrs + return new + + def is_singleton_type(self) -> bool: + # TODO: + # Also make this return True if the type corresponds to NotImplemented? + return ( + self.type.is_enum + and len(self.type.enum_members) == 1 + or self.type.fullname in ELLIPSIS_TYPE_NAMES + ) + + +class InstanceCache: + def __init__(self) -> None: + self.str_type: Instance | None = None + self.function_type: Instance | None = None + self.int_type: Instance | None = None + self.bool_type: Instance | None = None + self.object_type: Instance | None = None + + def reset(self) -> None: + self.str_type = None + self.function_type = None + self.int_type = None + self.bool_type = None + self.object_type = None + + +instance_cache: Final = InstanceCache() + + +class FunctionLike(ProperType): + """Abstract base class for function types.""" + + __slots__ = ("fallback",) + + fallback: Instance + + def __init__(self, line: int = -1, column: int = -1) -> None: + super().__init__(line, column) + self._can_be_false = False + + @abstractmethod + def is_type_obj(self) -> bool: + pass + + @abstractmethod + def type_object(self) -> mypy.nodes.TypeInfo: + pass + + @property + @abstractmethod + def items(self) -> list[CallableType]: + pass + + @abstractmethod + def with_name(self, name: str) -> FunctionLike: + pass + + @abstractmethod + def get_name(self) -> str | None: + pass + + def bound(self) -> bool: + return bool(self.items) and self.items[0].is_bound + + +class FormalArgument: + def __init__(self, name: str | None, pos: int | None, typ: Type, required: bool) -> None: + self.name = name + self.pos = pos + self.typ = typ + self.required = required + + def __eq__(self, other: object) -> bool: + if not isinstance(other, FormalArgument): + return NotImplemented + return ( + self.name == other.name + and self.pos == other.pos + and self.typ == other.typ + and self.required == other.required + ) + + def __hash__(self) -> int: + return hash((self.name, self.pos, self.typ, self.required)) + + +class Parameters(ProperType): + """Type that represents the parameters to a function. + + Used for ParamSpec analysis. Note that by convention we handle this + type as a Callable without return type, not as a "tuple with names", + so that it behaves contravariantly, in particular [x: int] <: [int]. + """ + + __slots__ = ( + "arg_types", + "arg_kinds", + "arg_names", + "min_args", + "is_ellipsis_args", + # TODO: variables don't really belong here, but they are used to allow hacky support + # for forall . Foo[[x: T], T] by capturing generic callable with ParamSpec, see #15909 + "variables", + "imprecise_arg_kinds", + ) + + def __init__( + self, + arg_types: Sequence[Type], + arg_kinds: list[ArgKind], + arg_names: Sequence[str | None], + *, + variables: Sequence[TypeVarLikeType] | None = None, + is_ellipsis_args: bool = False, + imprecise_arg_kinds: bool = False, + line: int = -1, + column: int = -1, + ) -> None: + super().__init__(line, column) + self.arg_types = list(arg_types) + self.arg_kinds = arg_kinds + self.arg_names = list(arg_names) + assert len(arg_types) == len(arg_kinds) == len(arg_names) + assert not any(isinstance(t, Parameters) for t in arg_types) + self.min_args = arg_kinds.count(ARG_POS) + self.is_ellipsis_args = is_ellipsis_args + self.variables = variables or [] + self.imprecise_arg_kinds = imprecise_arg_kinds + + def copy_modified( + self, + arg_types: Bogus[Sequence[Type]] = _dummy, + arg_kinds: Bogus[list[ArgKind]] = _dummy, + arg_names: Bogus[Sequence[str | None]] = _dummy, + *, + variables: Bogus[Sequence[TypeVarLikeType]] = _dummy, + is_ellipsis_args: Bogus[bool] = _dummy, + imprecise_arg_kinds: Bogus[bool] = _dummy, + ) -> Parameters: + return Parameters( + arg_types=arg_types if arg_types is not _dummy else self.arg_types, + arg_kinds=arg_kinds if arg_kinds is not _dummy else self.arg_kinds, + arg_names=arg_names if arg_names is not _dummy else self.arg_names, + is_ellipsis_args=( + is_ellipsis_args if is_ellipsis_args is not _dummy else self.is_ellipsis_args + ), + variables=variables if variables is not _dummy else self.variables, + imprecise_arg_kinds=( + imprecise_arg_kinds + if imprecise_arg_kinds is not _dummy + else self.imprecise_arg_kinds + ), + ) + + # TODO: here is a lot of code duplication with Callable type, fix this. + def var_arg(self) -> FormalArgument | None: + """The formal argument for *args.""" + for position, (type, kind) in enumerate(zip(self.arg_types, self.arg_kinds)): + if kind == ARG_STAR: + return FormalArgument(None, position, type, False) + return None + + def kw_arg(self) -> FormalArgument | None: + """The formal argument for **kwargs.""" + for position, (type, kind) in enumerate(zip(self.arg_types, self.arg_kinds)): + if kind == ARG_STAR2: + return FormalArgument(None, position, type, False) + return None + + def formal_arguments(self, include_star_args: bool = False) -> list[FormalArgument]: + """Yields the formal arguments corresponding to this callable, ignoring *arg and **kwargs. + + To handle *args and **kwargs, use the 'callable.var_args' and 'callable.kw_args' fields, + if they are not None. + + If you really want to include star args in the yielded output, set the + 'include_star_args' parameter to 'True'.""" + args = [] + done_with_positional = False + for i in range(len(self.arg_types)): + kind = self.arg_kinds[i] + if kind.is_named() or kind.is_star(): + done_with_positional = True + if not include_star_args and kind.is_star(): + continue + + required = kind.is_required() + pos = None if done_with_positional else i + arg = FormalArgument(self.arg_names[i], pos, self.arg_types[i], required) + args.append(arg) + return args + + def argument_by_name(self, name: str | None) -> FormalArgument | None: + if name is None: + return None + seen_star = False + for i, (arg_name, kind, typ) in enumerate( + zip(self.arg_names, self.arg_kinds, self.arg_types) + ): + # No more positional arguments after these. + if kind.is_named() or kind.is_star(): + seen_star = True + if kind.is_star(): + continue + if arg_name == name: + position = None if seen_star else i + return FormalArgument(name, position, typ, kind.is_required()) + return self.try_synthesizing_arg_from_kwarg(name) + + def argument_by_position(self, position: int | None) -> FormalArgument | None: + if position is None: + return None + if position >= len(self.arg_names): + return self.try_synthesizing_arg_from_vararg(position) + name, kind, typ = ( + self.arg_names[position], + self.arg_kinds[position], + self.arg_types[position], + ) + if kind.is_positional(): + return FormalArgument(name, position, typ, kind == ARG_POS) + else: + return self.try_synthesizing_arg_from_vararg(position) + + def try_synthesizing_arg_from_kwarg(self, name: str | None) -> FormalArgument | None: + kw_arg = self.kw_arg() + if kw_arg is not None: + return FormalArgument(name, None, kw_arg.typ, False) + else: + return None + + def try_synthesizing_arg_from_vararg(self, position: int | None) -> FormalArgument | None: + var_arg = self.var_arg() + if var_arg is not None: + return FormalArgument(None, position, var_arg.typ, False) + else: + return None + + def accept(self, visitor: TypeVisitor[T]) -> T: + return visitor.visit_parameters(self) + + def serialize(self) -> JsonDict: + return { + ".class": "Parameters", + "arg_types": [t.serialize() for t in self.arg_types], + "arg_kinds": [int(x.value) for x in self.arg_kinds], + "arg_names": self.arg_names, + "variables": [tv.serialize() for tv in self.variables], + "imprecise_arg_kinds": self.imprecise_arg_kinds, + } + + @classmethod + def deserialize(cls, data: JsonDict) -> Parameters: + assert data[".class"] == "Parameters" + return Parameters( + [deserialize_type(t) for t in data["arg_types"]], + # This is a micro-optimization until mypyc gets dedicated enum support. Otherwise, + # we would spend ~20% of types deserialization time in Enum.__call__(). + [ARG_KINDS[x] for x in data["arg_kinds"]], + data["arg_names"], + variables=[cast(TypeVarLikeType, deserialize_type(v)) for v in data["variables"]], + imprecise_arg_kinds=data["imprecise_arg_kinds"], + ) + + def write(self, data: WriteBuffer) -> None: + write_tag(data, PARAMETERS) + write_type_list(data, self.arg_types) + write_int_list(data, [int(x.value) for x in self.arg_kinds]) + write_str_opt_list(data, self.arg_names) + write_type_list(data, self.variables) + write_bool(data, self.imprecise_arg_kinds) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> Parameters: + ret = Parameters( + read_type_list(data), + # This is a micro-optimization until mypyc gets dedicated enum support. Otherwise, + # we would spend ~20% of types deserialization time in Enum.__call__(). + [ARG_KINDS[ak] for ak in read_int_list(data)], + read_str_opt_list(data), + variables=read_type_var_likes(data), + imprecise_arg_kinds=read_bool(data), + ) + assert read_tag(data) == END_TAG + return ret + + def __hash__(self) -> int: + return hash( + ( + self.is_ellipsis_args, + tuple(self.arg_types), + tuple(self.arg_names), + tuple(self.arg_kinds), + ) + ) + + def __eq__(self, other: object) -> bool: + if isinstance(other, Parameters): + return ( + self.arg_types == other.arg_types + and self.arg_names == other.arg_names + and self.arg_kinds == other.arg_kinds + and self.is_ellipsis_args == other.is_ellipsis_args + ) + else: + return NotImplemented + + +CT = TypeVar("CT", bound="CallableType") + + +class CallableType(FunctionLike): + """Type of a non-overloaded callable object (such as function).""" + + __slots__ = ( + "arg_types", # Types of function arguments + "arg_kinds", # ARG_ constants + "arg_names", # Argument names; None if not a keyword argument + "min_args", # Minimum number of arguments; derived from arg_kinds + "ret_type", # Return value type + "name", # Name (may be None; for error messages and plugins) + "definition", # For error messages. May be None. + "variables", # Type variables for a generic function + "is_ellipsis_args", # Is this Callable[..., t] (with literal '...')? + "implicit", # Was this type implicitly generated instead of explicitly + # specified by the user? + "special_sig", # Non-None for signatures that require special handling + # (currently only values are 'dict' for a signature similar to + # 'dict' and 'partial' for a `functools.partial` evaluation) + "from_type_type", # Was this callable generated by analyzing Type[...] + # instantiation? + "is_bound", # Is this a bound method? + "type_guard", # T, if -> TypeGuard[T] (ret_type is bool in this case). + "type_is", # T, if -> TypeIs[T] (ret_type is bool in this case). + "from_concatenate", # whether this callable is from a concatenate object + # (this is used for error messages) + "imprecise_arg_kinds", + "unpack_kwargs", # Was an Unpack[...] with **kwargs used to define this callable? + ) + + def __init__( + self, + # maybe this should be refactored to take a Parameters object + arg_types: Sequence[Type], + arg_kinds: list[ArgKind], + arg_names: Sequence[str | None], + ret_type: Type, + fallback: Instance, + name: str | None = None, + definition: SymbolNode | None = None, + variables: Sequence[TypeVarLikeType] | None = None, + line: int = -1, + column: int = -1, + is_ellipsis_args: bool = False, + implicit: bool = False, + special_sig: str | None = None, + from_type_type: bool = False, + is_bound: bool = False, + type_guard: Type | None = None, + type_is: Type | None = None, + from_concatenate: bool = False, + imprecise_arg_kinds: bool = False, + unpack_kwargs: bool = False, + ) -> None: + super().__init__(line, column) + assert len(arg_types) == len(arg_kinds) == len(arg_names) + self.arg_types = list(arg_types) + for t in self.arg_types: + if isinstance(t, ParamSpecType): + assert not t.prefix.arg_types + # TODO: should we assert that only ARG_STAR contain ParamSpecType? + # See testParamSpecJoin, that relies on passing e.g `P.args` as plain argument. + self.arg_kinds = arg_kinds + self.arg_names = list(arg_names) + self.min_args = arg_kinds.count(ARG_POS) + self.ret_type = ret_type + self.fallback = fallback + assert not name or " CT: + modified = CallableType( + arg_types=arg_types if arg_types is not _dummy else self.arg_types, + arg_kinds=arg_kinds if arg_kinds is not _dummy else self.arg_kinds, + arg_names=arg_names if arg_names is not _dummy else self.arg_names, + ret_type=ret_type if ret_type is not _dummy else self.ret_type, + fallback=fallback if fallback is not _dummy else self.fallback, + name=name if name is not _dummy else self.name, + definition=definition if definition is not _dummy else self.definition, + variables=variables if variables is not _dummy else self.variables, + line=line if line != _dummy_int else self.line, + column=column if column != _dummy_int else self.column, + is_ellipsis_args=( + is_ellipsis_args if is_ellipsis_args is not _dummy else self.is_ellipsis_args + ), + implicit=implicit if implicit is not _dummy else self.implicit, + special_sig=special_sig if special_sig is not _dummy else self.special_sig, + from_type_type=from_type_type if from_type_type is not _dummy else self.from_type_type, + is_bound=is_bound if is_bound is not _dummy else self.is_bound, + type_guard=type_guard if type_guard is not _dummy else self.type_guard, + type_is=type_is if type_is is not _dummy else self.type_is, + from_concatenate=( + from_concatenate if from_concatenate is not _dummy else self.from_concatenate + ), + imprecise_arg_kinds=( + imprecise_arg_kinds + if imprecise_arg_kinds is not _dummy + else self.imprecise_arg_kinds + ), + unpack_kwargs=unpack_kwargs if unpack_kwargs is not _dummy else self.unpack_kwargs, + ) + # Optimization: Only NewTypes are supported as subtypes since + # the class is effectively final, so we can use a cast safely. + return cast(CT, modified) + + def var_arg(self) -> FormalArgument | None: + """The formal argument for *args.""" + for position, (type, kind) in enumerate(zip(self.arg_types, self.arg_kinds)): + if kind == ARG_STAR: + return FormalArgument(None, position, type, False) + return None + + def kw_arg(self) -> FormalArgument | None: + """The formal argument for **kwargs.""" + for position, (type, kind) in enumerate(zip(self.arg_types, self.arg_kinds)): + if kind == ARG_STAR2: + return FormalArgument(None, position, type, False) + return None + + @property + def is_var_arg(self) -> bool: + """Does this callable have a *args argument?""" + return ARG_STAR in self.arg_kinds + + @property + def is_kw_arg(self) -> bool: + """Does this callable have a **kwargs argument?""" + return ARG_STAR2 in self.arg_kinds + + def is_type_obj(self) -> bool: + return self.fallback.type.is_metaclass() and not isinstance( + get_proper_type(self.ret_type), UninhabitedType + ) + + def type_object(self) -> mypy.nodes.TypeInfo: + assert self.is_type_obj() + ret = get_proper_type(self.ret_type) + if isinstance(ret, TypeVarType): + ret = get_proper_type(ret.upper_bound) + if isinstance(ret, TupleType): + ret = ret.partial_fallback + if isinstance(ret, TypedDictType): + ret = ret.fallback + if isinstance(ret, LiteralType): + ret = ret.fallback + assert isinstance(ret, Instance) + return ret.type + + def accept(self, visitor: TypeVisitor[T]) -> T: + return visitor.visit_callable_type(self) + + def with_name(self, name: str) -> CallableType: + """Return a copy of this type with the specified name.""" + return self.copy_modified(ret_type=self.ret_type, name=name) + + def get_name(self) -> str | None: + return self.name + + def max_possible_positional_args(self) -> int: + """Returns maximum number of positional arguments this method could possibly accept. + + This takes into account *arg and **kwargs but excludes keyword-only args.""" + if self.is_var_arg or self.is_kw_arg: + return sys.maxsize + return sum(kind.is_positional() for kind in self.arg_kinds) + + def formal_arguments(self, include_star_args: bool = False) -> list[FormalArgument]: + """Return a list of the formal arguments of this callable, ignoring *arg and **kwargs. + + To handle *args and **kwargs, use the 'callable.var_args' and 'callable.kw_args' fields, + if they are not None. + + If you really want to include star args in the yielded output, set the + 'include_star_args' parameter to 'True'.""" + args = [] + done_with_positional = False + for i in range(len(self.arg_types)): + kind = self.arg_kinds[i] + if kind.is_named() or kind.is_star(): + done_with_positional = True + if not include_star_args and kind.is_star(): + continue + + required = kind.is_required() + pos = None if done_with_positional else i + arg = FormalArgument(self.arg_names[i], pos, self.arg_types[i], required) + args.append(arg) + return args + + def argument_by_name(self, name: str | None) -> FormalArgument | None: + if name is None: + return None + seen_star = False + for i, (arg_name, kind, typ) in enumerate( + zip(self.arg_names, self.arg_kinds, self.arg_types) + ): + # No more positional arguments after these. + if kind.is_named() or kind.is_star(): + seen_star = True + if kind.is_star(): + continue + if arg_name == name: + position = None if seen_star else i + return FormalArgument(name, position, typ, kind.is_required()) + return self.try_synthesizing_arg_from_kwarg(name) + + def argument_by_position(self, position: int | None) -> FormalArgument | None: + if position is None: + return None + if position >= len(self.arg_names): + return self.try_synthesizing_arg_from_vararg(position) + name, kind, typ = ( + self.arg_names[position], + self.arg_kinds[position], + self.arg_types[position], + ) + if kind.is_positional(): + return FormalArgument(name, position, typ, kind == ARG_POS) + else: + return self.try_synthesizing_arg_from_vararg(position) + + def try_synthesizing_arg_from_kwarg(self, name: str | None) -> FormalArgument | None: + kw_arg = self.kw_arg() + if kw_arg is not None: + return FormalArgument(name, None, kw_arg.typ, False) + else: + return None + + def try_synthesizing_arg_from_vararg(self, position: int | None) -> FormalArgument | None: + var_arg = self.var_arg() + if var_arg is not None: + return FormalArgument(None, position, var_arg.typ, False) + else: + return None + + @property + def items(self) -> list[CallableType]: + return [self] + + def is_generic(self) -> bool: + return bool(self.variables) + + def type_var_ids(self) -> list[TypeVarId]: + a: list[TypeVarId] = [] + for tv in self.variables: + a.append(tv.id) + return a + + def param_spec(self) -> ParamSpecType | None: + """Return ParamSpec if callable can be called with one. + + A Callable accepting ParamSpec P args (*args, **kwargs) must have the + two final parameters like this: *args: P.args, **kwargs: P.kwargs. + """ + if len(self.arg_types) < 2: + return None + if self.arg_kinds[-2] != ARG_STAR or self.arg_kinds[-1] != ARG_STAR2: + return None + arg_type = self.arg_types[-2] + if not isinstance(arg_type, ParamSpecType): + return None + + # Prepend prefix for def f(prefix..., *args: P.args, **kwargs: P.kwargs) -> ... + # TODO: confirm that all arg kinds are positional + prefix = Parameters(self.arg_types[:-2], self.arg_kinds[:-2], self.arg_names[:-2]) + return arg_type.copy_modified(flavor=ParamSpecFlavor.BARE, prefix=prefix) + + def normalize_trivial_unpack(self) -> None: + # Normalize trivial unpack in var args as *args: *tuple[X, ...] -> *args: X in place. + if self.is_var_arg: + star_index = self.arg_kinds.index(ARG_STAR) + star_type = self.arg_types[star_index] + if isinstance(star_type, UnpackType): + p_type = get_proper_type(star_type.type) + if isinstance(p_type, Instance): + assert p_type.type.fullname == "builtins.tuple" + self.arg_types[star_index] = p_type.args[0] + + def with_unpacked_kwargs(self) -> NormalizedCallableType: + if not self.unpack_kwargs: + return cast(NormalizedCallableType, self) + last_type = get_proper_type(self.arg_types[-1]) + assert isinstance(last_type, TypedDictType) + extra_kinds = [ + ArgKind.ARG_NAMED if name in last_type.required_keys else ArgKind.ARG_NAMED_OPT + for name in last_type.items + ] + new_arg_kinds = self.arg_kinds[:-1] + extra_kinds + new_arg_names = self.arg_names[:-1] + list(last_type.items) + new_arg_types = self.arg_types[:-1] + list(last_type.items.values()) + return NormalizedCallableType( + self.copy_modified( + arg_kinds=new_arg_kinds, + arg_names=new_arg_names, + arg_types=new_arg_types, + unpack_kwargs=False, + ) + ) + + def with_normalized_var_args(self) -> Self: + var_arg = self.var_arg() + if not var_arg or not isinstance(var_arg.typ, UnpackType): + return self + unpacked = get_proper_type(var_arg.typ.type) + if not isinstance(unpacked, TupleType): + # Note that we don't normalize *args: *tuple[X, ...] -> *args: X, + # this should be done once in semanal_typeargs.py for user-defined types, + # and we ourselves rarely construct such type. + return self + unpack_index = find_unpack_in_list(unpacked.items) + if unpack_index == 0 and len(unpacked.items) > 1: + # Already normalized. + return self + + # Boilerplate: + var_arg_index = self.arg_kinds.index(ARG_STAR) + types_prefix = self.arg_types[:var_arg_index] + kinds_prefix = self.arg_kinds[:var_arg_index] + names_prefix = self.arg_names[:var_arg_index] + types_suffix = self.arg_types[var_arg_index + 1 :] + kinds_suffix = self.arg_kinds[var_arg_index + 1 :] + names_suffix = self.arg_names[var_arg_index + 1 :] + no_name: str | None = None # to silence mypy + + # Now we have something non-trivial to do. + if unpack_index is None: + # Plain *Tuple[X, Y, Z] -> replace with ARG_POS completely + types_middle = unpacked.items + kinds_middle = [ARG_POS] * len(unpacked.items) + names_middle = [no_name] * len(unpacked.items) + else: + # *Tuple[X, *Ts, Y, Z] or *Tuple[X, *tuple[T, ...], X, Z], here + # we replace the prefix by ARG_POS (this is how some places expect + # Callables to be represented) + nested_unpack = unpacked.items[unpack_index] + assert isinstance(nested_unpack, UnpackType) + nested_unpacked = get_proper_type(nested_unpack.type) + if unpack_index == len(unpacked.items) - 1: + # Normalize also single item tuples like + # *args: *Tuple[*tuple[X, ...]] -> *args: X + # *args: *Tuple[*Ts] -> *args: *Ts + # This may be not strictly necessary, but these are very verbose. + if isinstance(nested_unpacked, Instance): + assert nested_unpacked.type.fullname == "builtins.tuple" + new_unpack = nested_unpacked.args[0] + else: + if not isinstance(nested_unpacked, TypeVarTupleType): + # We found a non-normalized tuple type, this means this method + # is called during semantic analysis (e.g. from get_proper_type()) + # there is no point in normalizing callables at this stage. + return self + new_unpack = nested_unpack + else: + new_unpack = UnpackType( + unpacked.copy_modified(items=unpacked.items[unpack_index:]) + ) + types_middle = unpacked.items[:unpack_index] + [new_unpack] + kinds_middle = [ARG_POS] * unpack_index + [ARG_STAR] + names_middle = [no_name] * unpack_index + [self.arg_names[var_arg_index]] + return self.copy_modified( + arg_types=types_prefix + types_middle + types_suffix, + arg_kinds=kinds_prefix + kinds_middle + kinds_suffix, + arg_names=names_prefix + names_middle + names_suffix, + ) + + def __hash__(self) -> int: + return hash( + ( + self.ret_type, + self.is_ellipsis_args, + self.name, + tuple(self.arg_types), + tuple(self.arg_names), + tuple(self.arg_kinds), + self.fallback, + ) + ) + + def __eq__(self, other: object) -> bool: + if isinstance(other, CallableType): + return ( + self.ret_type == other.ret_type + and self.arg_types == other.arg_types + and self.arg_names == other.arg_names + and self.arg_kinds == other.arg_kinds + and self.name == other.name + and self.is_ellipsis_args == other.is_ellipsis_args + and self.type_guard == other.type_guard + and self.type_is == other.type_is + and self.fallback == other.fallback + ) + else: + return NotImplemented + + def serialize(self) -> JsonDict: + # TODO: As an optimization, leave out everything related to + # generic functions for non-generic functions. + return { + ".class": "CallableType", + "arg_types": [t.serialize() for t in self.arg_types], + "arg_kinds": [int(x.value) for x in self.arg_kinds], + "arg_names": self.arg_names, + "ret_type": self.ret_type.serialize(), + "fallback": self.fallback.serialize(), + "name": self.name, + # We don't serialize the definition (only used for error messages). + "variables": [v.serialize() for v in self.variables], + "is_ellipsis_args": self.is_ellipsis_args, + "implicit": self.implicit, + "is_bound": self.is_bound, + "type_guard": self.type_guard.serialize() if self.type_guard is not None else None, + "type_is": (self.type_is.serialize() if self.type_is is not None else None), + "from_concatenate": self.from_concatenate, + "imprecise_arg_kinds": self.imprecise_arg_kinds, + "unpack_kwargs": self.unpack_kwargs, + } + + @classmethod + def deserialize(cls, data: JsonDict) -> CallableType: + assert data[".class"] == "CallableType" + # The .definition link is set in fixup.py. + return CallableType( + [deserialize_type(t) for t in data["arg_types"]], + [ARG_KINDS[x] for x in data["arg_kinds"]], + data["arg_names"], + deserialize_type(data["ret_type"]), + Instance.deserialize(data["fallback"]), + name=data["name"], + variables=[cast(TypeVarLikeType, deserialize_type(v)) for v in data["variables"]], + is_ellipsis_args=data["is_ellipsis_args"], + implicit=data["implicit"], + is_bound=data["is_bound"], + type_guard=( + deserialize_type(data["type_guard"]) if data["type_guard"] is not None else None + ), + type_is=(deserialize_type(data["type_is"]) if data["type_is"] is not None else None), + from_concatenate=data["from_concatenate"], + imprecise_arg_kinds=data["imprecise_arg_kinds"], + unpack_kwargs=data["unpack_kwargs"], + ) + + def write(self, data: WriteBuffer) -> None: + write_tag(data, CALLABLE_TYPE) + self.fallback.write(data) + write_type_list(data, self.arg_types) + write_int_list(data, [int(x.value) for x in self.arg_kinds]) + write_str_opt_list(data, self.arg_names) + self.ret_type.write(data) + write_str_opt(data, self.name) + write_type_list(data, self.variables) + write_bool(data, self.is_ellipsis_args) + write_bool(data, self.implicit) + write_bool(data, self.is_bound) + write_type_opt(data, self.type_guard) + write_type_opt(data, self.type_is) + write_bool(data, self.from_concatenate) + write_bool(data, self.imprecise_arg_kinds) + write_bool(data, self.unpack_kwargs) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> CallableType: + assert read_tag(data) == INSTANCE + fallback = Instance.read(data) + ret = CallableType( + read_type_list(data), + [ARG_KINDS[ak] for ak in read_int_list(data)], + read_str_opt_list(data), + read_type(data), + fallback, + name=read_str_opt(data), + variables=read_type_var_likes(data), + is_ellipsis_args=read_bool(data), + implicit=read_bool(data), + is_bound=read_bool(data), + type_guard=read_type_opt(data), + type_is=read_type_opt(data), + from_concatenate=read_bool(data), + imprecise_arg_kinds=read_bool(data), + unpack_kwargs=read_bool(data), + ) + assert read_tag(data) == END_TAG + return ret + + +# This is a little safety net to prevent reckless special-casing of callables +# that can potentially break Unpack[...] with **kwargs. +# TODO: use this in more places in checkexpr.py etc? +NormalizedCallableType = NewType("NormalizedCallableType", CallableType) + + +class Overloaded(FunctionLike): + """Overloaded function type T1, ... Tn, where each Ti is CallableType. + + The variant to call is chosen based on static argument + types. Overloaded function types can only be defined in stub + files, and thus there is no explicit runtime dispatch + implementation. + """ + + __slots__ = ("_items",) + + _items: list[CallableType] # Must not be empty + + def __init__(self, items: list[CallableType]) -> None: + super().__init__(items[0].line, items[0].column) + self._items = items + self.fallback = items[0].fallback + + @property + def items(self) -> list[CallableType]: + return self._items + + def name(self) -> str | None: + return self.get_name() + + def is_type_obj(self) -> bool: + # All the items must have the same type object status, so it's + # sufficient to query only (any) one of them. + return self._items[0].is_type_obj() + + def type_object(self) -> mypy.nodes.TypeInfo: + # All the items must have the same type object, so it's sufficient to + # query only (any) one of them. + return self._items[0].type_object() + + def with_name(self, name: str) -> Overloaded: + ni: list[CallableType] = [] + for it in self._items: + ni.append(it.with_name(name)) + return Overloaded(ni) + + def get_name(self) -> str | None: + return self._items[0].name + + def with_unpacked_kwargs(self) -> Overloaded: + if any(i.unpack_kwargs for i in self.items): + return Overloaded([i.with_unpacked_kwargs() for i in self.items]) + return self + + def accept(self, visitor: TypeVisitor[T]) -> T: + return visitor.visit_overloaded(self) + + def __hash__(self) -> int: + return hash(tuple(self.items)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Overloaded): + return NotImplemented + return self.items == other.items + + def serialize(self) -> JsonDict: + return {".class": "Overloaded", "items": [t.serialize() for t in self.items]} + + @classmethod + def deserialize(cls, data: JsonDict) -> Overloaded: + assert data[".class"] == "Overloaded" + return Overloaded([CallableType.deserialize(t) for t in data["items"]]) + + def write(self, data: WriteBuffer) -> None: + write_tag(data, OVERLOADED) + write_type_list(data, self.items) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> Overloaded: + items = [] + assert read_tag(data) == LIST_GEN + for _ in range(read_int_bare(data)): + assert read_tag(data) == CALLABLE_TYPE + items.append(CallableType.read(data)) + assert read_tag(data) == END_TAG + return Overloaded(items) + + +class TupleType(ProperType): + """The tuple type Tuple[T1, ..., Tn] (at least one type argument). + + Instance variables: + items: Tuple item types + partial_fallback: The (imprecise) underlying instance type that is used + for non-tuple methods. This is generally builtins.tuple[Any, ...] for + regular tuples, but it's different for named tuples and classes with + a tuple base class. Use mypy.typeops.tuple_fallback to calculate the + precise fallback type derived from item types. + implicit: If True, derived from a tuple expression (t,....) instead of Tuple[t, ...] + """ + + __slots__ = ("items", "partial_fallback", "implicit") + + items: list[Type] + partial_fallback: Instance + implicit: bool + + def __init__( + self, + items: list[Type], + fallback: Instance, + line: int = -1, + column: int = -1, + implicit: bool = False, + ) -> None: + super().__init__(line, column) + self.partial_fallback = fallback + self.items = items + self.implicit = implicit + + def can_be_true_default(self) -> bool: + if self.can_be_any_bool(): + # Corner case: it is a `NamedTuple` with `__bool__` method defined. + # It can be anything: both `True` and `False`. + return True + return self.length() > 0 + + def can_be_false_default(self) -> bool: + if self.can_be_any_bool(): + # Corner case: it is a `NamedTuple` with `__bool__` method defined. + # It can be anything: both `True` and `False`. + return True + if self.length() == 0: + return True + if self.length() > 1: + return False + # Special case tuple[*Ts] may or may not be false. + item = self.items[0] + if not isinstance(item, UnpackType): + return False + if not isinstance(item.type, TypeVarTupleType): + # Non-normalized tuple[int, ...] can be false. + return True + return item.type.min_len == 0 + + def can_be_any_bool(self) -> bool: + return bool( + self.partial_fallback.type + and self.partial_fallback.type.fullname != "builtins.tuple" + and self.partial_fallback.type.names.get("__bool__") + ) + + def length(self) -> int: + return len(self.items) + + def accept(self, visitor: TypeVisitor[T]) -> T: + return visitor.visit_tuple_type(self) + + def __hash__(self) -> int: + return hash((tuple(self.items), self.partial_fallback)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, TupleType): + return NotImplemented + return self.items == other.items and self.partial_fallback == other.partial_fallback + + def serialize(self) -> JsonDict: + return { + ".class": "TupleType", + "items": [t.serialize() for t in self.items], + "partial_fallback": self.partial_fallback.serialize(), + "implicit": self.implicit, + } + + @classmethod + def deserialize(cls, data: JsonDict) -> TupleType: + assert data[".class"] == "TupleType" + return TupleType( + [deserialize_type(t) for t in data["items"]], + Instance.deserialize(data["partial_fallback"]), + implicit=data["implicit"], + ) + + def write(self, data: WriteBuffer) -> None: + write_tag(data, TUPLE_TYPE) + self.partial_fallback.write(data) + write_type_list(data, self.items) + write_bool(data, self.implicit) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> TupleType: + assert read_tag(data) == INSTANCE + fallback = Instance.read(data) + ret = TupleType(read_type_list(data), fallback, implicit=read_bool(data)) + assert read_tag(data) == END_TAG + return ret + + def copy_modified( + self, *, fallback: Instance | None = None, items: list[Type] | None = None + ) -> TupleType: + if fallback is None: + fallback = self.partial_fallback + if items is None: + items = self.items + return TupleType(items, fallback, self.line, self.column) + + def slice( + self, begin: int | None, end: int | None, stride: int | None, *, fallback: Instance | None + ) -> TupleType | None: + if fallback is None: + fallback = self.partial_fallback + + if stride == 0: + return None + + if any(isinstance(t, UnpackType) for t in self.items): + total = len(self.items) + unpack_index = find_unpack_in_list(self.items) + assert unpack_index is not None + if begin is None and end is None: + # We special-case this to support reversing variadic tuples. + # General support for slicing is tricky, so we handle only simple cases. + if stride == -1: + slice_items = self.items[::-1] + elif stride is None or stride == 1: + slice_items = self.items + else: + return None + elif (begin is None or unpack_index >= begin >= 0) and ( + end is not None and unpack_index >= end >= 0 + ): + # Start and end are in the prefix, everything works in this case. + slice_items = self.items[begin:end:stride] + elif (begin is not None and unpack_index - total < begin < 0) and ( + end is None or unpack_index - total < end < 0 + ): + # Start and end are in the suffix, everything works in this case. + slice_items = self.items[begin:end:stride] + elif (begin is None or unpack_index >= begin >= 0) and ( + end is None or unpack_index - total < end < 0 + ): + # Start in the prefix, end in the suffix, we can support only trivial strides. + if stride is None or stride == 1: + slice_items = self.items[begin:end:stride] + else: + return None + elif (begin is not None and unpack_index - total < begin < 0) and ( + end is not None and unpack_index >= end >= 0 + ): + # Start in the suffix, end in the prefix, we can support only trivial strides. + if stride is None or stride == -1: + slice_items = self.items[begin:end:stride] + else: + return None + else: + # TODO: there some additional cases we can support for homogeneous variadic + # items, we can "eat away" finite number of items. + return None + else: + slice_items = self.items[begin:end:stride] + return TupleType(slice_items, fallback, self.line, self.column, self.implicit) + + +class TypedDictType(ProperType): + """Type of TypedDict object {'k1': v1, ..., 'kn': vn}. + + A TypedDict object is a dictionary with specific string (literal) keys. Each + key has a value with a distinct type that depends on the key. TypedDict objects + are normal dict objects at runtime. + + A TypedDictType can be either named or anonymous. If it's anonymous, its + fallback will be typing_extensions._TypedDict (Instance). _TypedDict is a subclass + of Mapping[str, object] and defines all non-mapping dict methods that TypedDict + supports. Some dict methods are unsafe and not supported. _TypedDict isn't defined + at runtime. + + If a TypedDict is named, its fallback will be an Instance of the named type + (ex: "Point") whose TypeInfo has a typeddict_type that is anonymous. This + is similar to how named tuples work. + + TODO: The fallback structure is perhaps overly complicated. + """ + + __slots__ = ( + "items", + "required_keys", + "readonly_keys", + "fallback", + "extra_items_from", + "to_be_mutated", + ) + + items: dict[str, Type] # item_name -> item_type + required_keys: set[str] + readonly_keys: set[str] + fallback: Instance + + extra_items_from: list[ProperType] # only used during semantic analysis + to_be_mutated: bool # only used in a plugin for `.update`, `|=`, etc + + def __init__( + self, + items: dict[str, Type], + required_keys: set[str], + readonly_keys: set[str], + fallback: Instance, + line: int = -1, + column: int = -1, + ) -> None: + super().__init__(line, column) + self.items = items + self.required_keys = required_keys + self.readonly_keys = readonly_keys + self.fallback = fallback + self.can_be_true = len(self.items) > 0 + self.can_be_false = len(self.required_keys) == 0 + self.extra_items_from = [] + self.to_be_mutated = False + + def accept(self, visitor: TypeVisitor[T]) -> T: + return visitor.visit_typeddict_type(self) + + def __hash__(self) -> int: + return hash( + ( + frozenset(self.items.items()), + self.fallback, + frozenset(self.required_keys), + frozenset(self.readonly_keys), + ) + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, TypedDictType): + return NotImplemented + if self is other: + return True + return ( + frozenset(self.items.keys()) == frozenset(other.items.keys()) + and all( + left_item_type == right_item_type + for (_, left_item_type, right_item_type) in self.zip(other) + ) + and self.fallback == other.fallback + and self.required_keys == other.required_keys + and self.readonly_keys == other.readonly_keys + ) + + def serialize(self) -> JsonDict: + return { + ".class": "TypedDictType", + "items": [[n, t.serialize()] for (n, t) in self.items.items()], + "required_keys": sorted(self.required_keys), + "readonly_keys": sorted(self.readonly_keys), + "fallback": self.fallback.serialize(), + } + + @classmethod + def deserialize(cls, data: JsonDict) -> TypedDictType: + assert data[".class"] == "TypedDictType" + return TypedDictType( + {n: deserialize_type(t) for (n, t) in data["items"]}, + set(data["required_keys"]), + set(data["readonly_keys"]), + Instance.deserialize(data["fallback"]), + ) + + def write(self, data: WriteBuffer) -> None: + write_tag(data, TYPED_DICT_TYPE) + self.fallback.write(data) + write_type_map(data, self.items) + write_str_list(data, sorted(self.required_keys)) + write_str_list(data, sorted(self.readonly_keys)) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> TypedDictType: + assert read_tag(data) == INSTANCE + fallback = Instance.read(data) + ret = TypedDictType( + read_type_map(data), set(read_str_list(data)), set(read_str_list(data)), fallback + ) + assert read_tag(data) == END_TAG + return ret + + @property + def is_final(self) -> bool: + return self.fallback.type.is_final + + def is_anonymous(self) -> bool: + return self.fallback.type.fullname in TPDICT_FB_NAMES + + def as_anonymous(self) -> TypedDictType: + if self.is_anonymous(): + return self + assert self.fallback.type.typeddict_type is not None + return self.fallback.type.typeddict_type.as_anonymous() + + def copy_modified( + self, + *, + fallback: Instance | None = None, + item_types: list[Type] | None = None, + item_names: list[str] | None = None, + required_keys: set[str] | None = None, + readonly_keys: set[str] | None = None, + ) -> TypedDictType: + if fallback is None: + fallback = self.fallback + if item_types is None: + items = self.items + else: + items = dict(zip(self.items, item_types)) + if required_keys is None: + required_keys = self.required_keys + if readonly_keys is None: + readonly_keys = self.readonly_keys + if item_names is not None: + items = {k: v for (k, v) in items.items() if k in item_names} + required_keys &= set(item_names) + return TypedDictType(items, required_keys, readonly_keys, fallback, self.line, self.column) + + def create_anonymous_fallback(self) -> Instance: + anonymous = self.as_anonymous() + return anonymous.fallback + + def names_are_wider_than(self, other: TypedDictType) -> bool: + return len(other.items.keys() - self.items.keys()) == 0 + + def zip(self, right: TypedDictType) -> Iterable[tuple[str, Type, Type]]: + left = self + for item_name, left_item_type in left.items.items(): + right_item_type = right.items.get(item_name) + if right_item_type is not None: + yield (item_name, left_item_type, right_item_type) + + def zipall(self, right: TypedDictType) -> Iterable[tuple[str, Type | None, Type | None]]: + left = self + for item_name, left_item_type in left.items.items(): + right_item_type = right.items.get(item_name) + yield (item_name, left_item_type, right_item_type) + for item_name, right_item_type in right.items.items(): + if item_name in left.items: + continue + yield (item_name, None, right_item_type) + + +class RawExpressionType(ProperType): + """A synthetic type representing some arbitrary expression that does not cleanly + translate into a type. + + This synthetic type is only used at the beginning stages of semantic analysis + and should be completely removing during the process for mapping UnboundTypes to + actual types: we either turn it into a LiteralType or an AnyType. + + For example, suppose `Foo[1]` is initially represented as the following: + + UnboundType( + name='Foo', + args=[ + RawExpressionType(value=1, base_type_name='builtins.int'), + ], + ) + + As we perform semantic analysis, this type will transform into one of two + possible forms. + + If 'Foo' was an alias for 'Literal' all along, this type is transformed into: + + LiteralType(value=1, fallback=int_instance_here) + + Alternatively, if 'Foo' is an unrelated class, we report an error and instead + produce something like this: + + Instance(type=typeinfo_for_foo, args=[AnyType(TypeOfAny.from_error)) + + If the "note" field is not None, the provided note will be reported alongside the + error at this point. + + Note: if "literal_value" is None, that means this object is representing some + expression that cannot possibly be a parameter of Literal[...]. For example, + "Foo[3j]" would be represented as: + + UnboundType( + name='Foo', + args=[ + RawExpressionType(value=None, base_type_name='builtins.complex'), + ], + ) + """ + + __slots__ = ("literal_value", "base_type_name", "note") + + def __init__( + self, + literal_value: LiteralValue | None, + base_type_name: str, + line: int = -1, + column: int = -1, + note: str | None = None, + ) -> None: + super().__init__(line, column) + self.literal_value = literal_value + self.base_type_name = base_type_name + self.note = note + + def simple_name(self) -> str: + return self.base_type_name.replace("builtins.", "") + + def accept(self, visitor: TypeVisitor[T]) -> T: + assert isinstance(visitor, SyntheticTypeVisitor) + ret: T = visitor.visit_raw_expression_type(self) + return ret + + def serialize(self) -> JsonDict: + assert False, "Synthetic types don't serialize" + + def __hash__(self) -> int: + return hash((self.literal_value, self.base_type_name)) + + def __eq__(self, other: object) -> bool: + if isinstance(other, RawExpressionType): + return ( + self.base_type_name == other.base_type_name + and self.literal_value == other.literal_value + ) + else: + return NotImplemented + + +class LiteralType(ProperType): + """The type of a Literal instance. Literal[Value] + + A Literal always consists of: + + 1. A native Python object corresponding to the contained inner value + 2. A fallback for this Literal. The fallback also corresponds to the + parent type this Literal subtypes. + + For example, 'Literal[42]' is represented as + 'LiteralType(value=42, fallback=instance_of_int)' + + As another example, `Literal[Color.RED]` (where Color is an enum) is + represented as `LiteralType(value="RED", fallback=instance_of_color)'. + """ + + __slots__ = ("value", "fallback", "_hash") + + def __init__( + self, value: LiteralValue, fallback: Instance, line: int = -1, column: int = -1 + ) -> None: + super().__init__(line, column) + self.value = value + self.fallback = fallback + self._hash = -1 # Cached hash value + + # NOTE: Enum types are always truthy by default, but this can be changed + # in subclasses, so we need to get the truthyness from the Enum + # type rather than base it on the value (which is a non-empty + # string for enums, so always truthy) + # TODO: We should consider moving this branch to the `can_be_true` + # `can_be_false` properties instead, so the truthyness only + # needs to be determined once per set of Enum literals. + # However, the same can be said for `TypeAliasType` in some + # cases and we only set the default based on the type it is + # aliasing. So if we decide to change this, we may want to + # change that as well. perf_compare output was inconclusive + # but slightly favored this version, probably because we have + # almost no test cases where we would redundantly compute + # `can_be_false`/`can_be_true`. + def can_be_false_default(self) -> bool: + if self.fallback.type.is_enum: + return self.fallback.can_be_false + return not self.value + + def can_be_true_default(self) -> bool: + if self.fallback.type.is_enum: + return self.fallback.can_be_true + return bool(self.value) + + def accept(self, visitor: TypeVisitor[T]) -> T: + return visitor.visit_literal_type(self) + + def __hash__(self) -> int: + if self._hash == -1: + self._hash = hash((self.value, self.fallback)) + return self._hash + + def __eq__(self, other: object) -> bool: + if isinstance(other, LiteralType): + return self.fallback == other.fallback and self.value == other.value + else: + return NotImplemented + + def is_enum_literal(self) -> bool: + return self.fallback.type.is_enum + + def value_repr(self) -> str: + """Returns the string representation of the underlying type. + + This function is almost equivalent to running `repr(self.value)`, + except it includes some additional logic to correctly handle cases + where the value is a string, byte string, a unicode string, or an enum. + """ + raw = repr(self.value) + fallback_name = self.fallback.type.fullname + + # If this is backed by an enum, + if self.is_enum_literal(): + return f"{fallback_name}.{self.value}" + + if fallback_name == "builtins.bytes": + # Note: 'builtins.bytes' only appears in Python 3, so we want to + # explicitly prefix with a "b" + return "b" + raw + else: + # 'builtins.str' could mean either depending on context, but either way + # we don't prefix: it's the "native" string. And of course, if value is + # some other type, we just return that string repr directly. + return raw + + def serialize(self) -> JsonDict | str: + return { + ".class": "LiteralType", + "value": self.value, + "fallback": self.fallback.serialize(), + } + + @classmethod + def deserialize(cls, data: JsonDict) -> LiteralType: + assert data[".class"] == "LiteralType" + return LiteralType(value=data["value"], fallback=Instance.deserialize(data["fallback"])) + + def write(self, data: WriteBuffer) -> None: + write_tag(data, LITERAL_TYPE) + self.fallback.write(data) + write_literal(data, self.value) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> LiteralType: + assert read_tag(data) == INSTANCE + fallback = Instance.read(data) + tag = read_tag(data) + ret = LiteralType(read_literal(data, tag), fallback) + assert read_tag(data) == END_TAG + return ret + + def is_singleton_type(self) -> bool: + return self.is_enum_literal() or isinstance(self.value, bool) + + +class UnionType(ProperType): + """The union type Union[T1, ..., Tn] (at least one type argument).""" + + __slots__ = ( + "items", + "is_evaluated", + "uses_pep604_syntax", + "original_str_expr", + "original_str_fallback", + ) + + def __init__( + self, + items: Sequence[Type], + line: int = -1, + column: int = -1, + *, + is_evaluated: bool = True, + uses_pep604_syntax: bool = False, + ) -> None: + super().__init__(line, column) + # We must keep this false to avoid crashes during semantic analysis. + # TODO: maybe switch this to True during type-checking pass? + self.items = flatten_nested_unions(items, handle_type_alias_type=False) + # is_evaluated should be set to false for type comments and string literals + self.is_evaluated = is_evaluated + # uses_pep604_syntax is True if Union uses OR syntax (X | Y) + self.uses_pep604_syntax = uses_pep604_syntax + # The meaning of these two is the same as for UnboundType. A UnionType can be + # return by type parser from a string "A|B", and we need to be able to fall back + # to plain string, when such a string appears inside a Literal[...]. + self.original_str_expr: str | None = None + self.original_str_fallback: str | None = None + + def can_be_true_default(self) -> bool: + return any(item.can_be_true for item in self.items) + + def can_be_false_default(self) -> bool: + return any(item.can_be_false for item in self.items) + + def __hash__(self) -> int: + return hash(frozenset(self.items)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, UnionType): + return NotImplemented + if self is other: + return True + return frozenset(self.items) == frozenset(other.items) + + @overload + @staticmethod + def make_union( + items: Sequence[ProperType], line: int = -1, column: int = -1 + ) -> ProperType: ... + + @overload + @staticmethod + def make_union(items: Sequence[Type], line: int = -1, column: int = -1) -> Type: ... + + @staticmethod + def make_union(items: Sequence[Type], line: int = -1, column: int = -1) -> Type: + if len(items) > 1: + return UnionType(items, line, column) + elif len(items) == 1: + return items[0] + else: + return UninhabitedType() + + def length(self) -> int: + return len(self.items) + + def accept(self, visitor: TypeVisitor[T]) -> T: + return visitor.visit_union_type(self) + + def relevant_items(self) -> list[Type]: + """Removes NoneTypes from Unions when strict Optional checking is off.""" + if state.strict_optional: + return self.items + else: + return [i for i in self.items if not isinstance(get_proper_type(i), NoneType)] + + def serialize(self) -> JsonDict: + return { + ".class": "UnionType", + "items": [t.serialize() for t in self.items], + "uses_pep604_syntax": self.uses_pep604_syntax, + } + + @classmethod + def deserialize(cls, data: JsonDict) -> UnionType: + assert data[".class"] == "UnionType" + return UnionType( + [deserialize_type(t) for t in data["items"]], + uses_pep604_syntax=data["uses_pep604_syntax"], + ) + + def write(self, data: WriteBuffer) -> None: + write_tag(data, UNION_TYPE) + write_type_list(data, self.items) + write_bool(data, self.uses_pep604_syntax) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> UnionType: + ret = UnionType(read_type_list(data), uses_pep604_syntax=read_bool(data)) + assert read_tag(data) == END_TAG + return ret + + +class PartialType(ProperType): + """Type such as List[?] where type arguments are unknown, or partial None type. + + These are used for inferring types in multiphase initialization such as this: + + x = [] # x gets a partial type List[?], as item type is unknown + x.append(1) # partial type gets replaced with normal type List[int] + + Or with None: + + x = None # x gets a partial type None + if c: + x = 1 # Infer actual type int for x + """ + + __slots__ = ("type", "var", "value_type") + + # None for the 'None' partial type; otherwise a generic class + type: mypy.nodes.TypeInfo | None + var: mypy.nodes.Var + # For partial defaultdict[K, V], the type V (K is unknown). If V is generic, + # the type argument is Any and will be replaced later. + value_type: Instance | None + + def __init__( + self, + type: mypy.nodes.TypeInfo | None, + var: mypy.nodes.Var, + value_type: Instance | None = None, + ) -> None: + super().__init__() + self.type = type + self.var = var + self.value_type = value_type + + def accept(self, visitor: TypeVisitor[T]) -> T: + return visitor.visit_partial_type(self) + + +class EllipsisType(ProperType): + """The type ... (ellipsis). + + This is not a real type but a syntactic AST construct, used in Callable[..., T], for example. + + A semantically analyzed type will never have ellipsis types. + """ + + __slots__ = () + + def accept(self, visitor: TypeVisitor[T]) -> T: + assert isinstance(visitor, SyntheticTypeVisitor) + ret: T = visitor.visit_ellipsis_type(self) + return ret + + def serialize(self) -> JsonDict: + assert False, "Synthetic types don't serialize" + + +class TypeType(ProperType): + """For types like Type[User] or TypeForm[User | None]. + + Type[C] annotates variables that are class objects, constrained by + the type argument. See PEP 484 for more details. + + TypeForm[T] annotates variables that hold the result of evaluating + a type expression. See PEP 747 for more details. + + We may encounter expressions whose values are specific classes; + those are represented as callables (possibly overloaded) + corresponding to the class's constructor's signature and returning + an instance of that class. The difference with Type[C] is that + those callables always represent the exact class given as the + return type; Type[C] represents any class that's a subclass of C, + and C may also be a type variable or a union (or Any). + + Many questions around subtype relationships between Type[C1] and + def(...) -> C2 are answered by looking at the subtype + relationships between C1 and C2, since Type[] is considered + covariant. + + There's an unsolved problem with constructor signatures (also + unsolved in PEP 484): calling a variable whose type is Type[C] + assumes the constructor signature for C, even though a subclass of + C might completely change the constructor signature. For now we + just assume that users of Type[C] are careful not to do that (in + the future we might detect when they are violating that + assumption). + """ + + __slots__ = ("item", "is_type_form") + + # This can't be everything, but it can be a class reference, + # a generic class instance, a union, Any, a type variable... + item: ProperType + + # If True then this TypeType represents a TypeForm[T]. + # If False then this TypeType represents a Type[C]. + is_type_form: bool + + def __init__( + self, + item: Bogus[Instance | AnyType | TypeVarType | TupleType | NoneType | CallableType], + *, + line: int = -1, + column: int = -1, + is_type_form: bool = False, + ) -> None: + """To ensure Type[Union[A, B]] is always represented as Union[Type[A], Type[B]], item of + type UnionType must be handled through make_normalized static method. + """ + super().__init__(line, column) + self.item = item + self.is_type_form = is_type_form + + @staticmethod + def make_normalized( + item: Type, *, line: int = -1, column: int = -1, is_type_form: bool = False + ) -> ProperType: + item = get_proper_type(item) + if is_type_form: + # Don't convert TypeForm[X | Y] to (TypeForm[X] | TypeForm[Y]) + pass + else: + if isinstance(item, UnionType): + return UnionType.make_union( + [TypeType.make_normalized(union_item) for union_item in item.items], + line=line, + column=column, + ) + return TypeType(item, line=line, column=column, is_type_form=is_type_form) # type: ignore[arg-type] + + def accept(self, visitor: TypeVisitor[T]) -> T: + return visitor.visit_type_type(self) + + def __hash__(self) -> int: + return hash(self.item) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, TypeType): + return NotImplemented + return self.item == other.item and self.is_type_form == other.is_type_form + + def serialize(self) -> JsonDict: + return { + ".class": "TypeType", + "item": self.item.serialize(), + "is_type_form": self.is_type_form, + } + + @classmethod + def deserialize(cls, data: JsonDict) -> Type: + assert data[".class"] == "TypeType" + return TypeType.make_normalized( + deserialize_type(data["item"]), is_type_form=data["is_type_form"] + ) + + def write(self, data: WriteBuffer) -> None: + write_tag(data, TYPE_TYPE) + self.item.write(data) + write_tag(data, END_TAG) + + @classmethod + def read(cls, data: ReadBuffer) -> Type: + ret = TypeType.make_normalized(read_type(data)) + assert read_tag(data) == END_TAG + return ret + + +class PlaceholderType(ProperType): + """Temporary, yet-unknown type during semantic analysis. + + This is needed when there's a reference to a type before the real symbol + table entry of the target type is available (specifically, we use a + temporary PlaceholderNode symbol node). Consider this example: + + class str(Sequence[str]): ... + + We use a PlaceholderType for the 'str' in 'Sequence[str]' since we can't create + a TypeInfo for 'str' until all base classes have been resolved. We'll soon + perform another analysis iteration which replaces the base class with a complete + type without any placeholders. After semantic analysis, no placeholder types must + exist. + """ + + __slots__ = ("fullname", "args") + + def __init__(self, fullname: str | None, args: list[Type], line: int) -> None: + super().__init__(line) + self.fullname = fullname # Must be a valid full name of an actual node (or None). + self.args = args + + def accept(self, visitor: TypeVisitor[T]) -> T: + assert isinstance(visitor, SyntheticTypeVisitor) + ret: T = visitor.visit_placeholder_type(self) + return ret + + def __hash__(self) -> int: + return hash((self.fullname, tuple(self.args))) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PlaceholderType): + return NotImplemented + return self.fullname == other.fullname and self.args == other.args + + def serialize(self) -> str: + # We should never get here since all placeholders should be replaced + # during semantic analysis. + assert False, f"Internal error: unresolved placeholder type {self.fullname}" + + +@overload +def get_proper_type(typ: None) -> None: ... + + +@overload +def get_proper_type(typ: Type) -> ProperType: ... + + +def get_proper_type(typ: Type | None) -> ProperType | None: + """Get the expansion of a type alias type. + + If the type is already a proper type, this is a no-op. Use this function + wherever a decision is made on a call like e.g. 'if isinstance(typ, UnionType): ...', + because 'typ' in this case may be an alias to union. Note: if after making the decision + on the isinstance() call you pass on the original type (and not one of its components) + it is recommended to *always* pass on the unexpanded alias. + """ + if typ is None: + return None + # TODO: this is an ugly hack, remove. + if isinstance(typ, TypeGuardedType): + typ = typ.type_guard + while isinstance(typ, TypeAliasType): + typ = typ._expand_once() + # TODO: store the name of original type alias on this type, so we can show it in errors. + return cast(ProperType, typ) + + +@overload +def get_proper_types(types: list[Type] | tuple[Type, ...]) -> list[ProperType]: ... + + +@overload +def get_proper_types( + types: list[Type | None] | tuple[Type | None, ...], +) -> list[ProperType | None]: ... + + +def get_proper_types( + types: list[Type] | list[Type | None] | tuple[Type | None, ...], +) -> list[ProperType] | list[ProperType | None]: + if isinstance(types, list): + typelist = types + # Optimize for the common case so that we don't need to allocate anything + if not any(isinstance(t, (TypeAliasType, TypeGuardedType)) for t in typelist): + return cast("list[ProperType]", typelist) + return [get_proper_type(t) for t in typelist] + else: + return [get_proper_type(t) for t in types] + + +# We split off the type visitor base classes to another module +# to make it easier to gradually get modules working with mypyc. +# Import them here, after the types are defined. +# This is intended as a re-export also. +from mypy.type_visitor import ( + ALL_STRATEGY as ALL_STRATEGY, + ANY_STRATEGY as ANY_STRATEGY, + BoolTypeQuery as BoolTypeQuery, + SyntheticTypeVisitor as SyntheticTypeVisitor, + TypeQuery as TypeQuery, + TypeTranslator as TypeTranslator, + TypeVisitor as TypeVisitor, +) + + +class TypeStrVisitor(SyntheticTypeVisitor[str]): + """Visitor for pretty-printing types into strings. + + This is mostly for debugging/testing. + + Do not preserve original formatting. + + Notes: + - Represent unbound types as Foo? or Foo?[...]. + - Represent the NoneType type as None. + """ + + def __init__(self, id_mapper: IdMapper | None = None, *, options: Options) -> None: + self.id_mapper = id_mapper + self.options = options + self.dotted_aliases: set[TypeAliasType] | None = None + + def visit_unbound_type(self, t: UnboundType, /) -> str: + s = t.name + "?" + if t.args: + s += f"[{self.list_str(t.args)}]" + return s + + def visit_type_list(self, t: TypeList, /) -> str: + return f"" + + def visit_callable_argument(self, t: CallableArgument, /) -> str: + typ = t.typ.accept(self) + if t.name is None: + return f"{t.constructor}({typ})" + else: + return f"{t.constructor}({typ}, {t.name})" + + def visit_any(self, t: AnyType, /) -> str: + return "Any" + + def visit_none_type(self, t: NoneType, /) -> str: + return "None" + + def visit_uninhabited_type(self, t: UninhabitedType, /) -> str: + return "Never" + + def visit_erased_type(self, t: ErasedType, /) -> str: + return "" + + def visit_deleted_type(self, t: DeletedType, /) -> str: + if t.source is None: + return "" + else: + return f"" + + def visit_instance(self, t: Instance, /) -> str: + if t.last_known_value and not t.args: + # Instances with a literal fallback should never be generic. If they are, + # something went wrong so we fall back to showing the full Instance repr. + s = f"{t.last_known_value.accept(self)}?" + else: + s = t.type.fullname or t.type.name or "" + + if t.args: + if t.type.fullname == "builtins.tuple": + assert len(t.args) == 1 + s += f"[{self.list_str(t.args)}, ...]" + else: + s += f"[{self.list_str(t.args)}]" + elif t.type.has_type_var_tuple_type and len(t.type.type_vars) == 1: + s += "[()]" + if self.id_mapper: + s += f"<{self.id_mapper.id(t.type)}>" + return s + + def visit_type_var(self, t: TypeVarType, /) -> str: + s = f"{t.name}`{t.id}" + if self.id_mapper and t.upper_bound: + s += f"(upper_bound={t.upper_bound.accept(self)})" + if t.has_default(): + s += f" = {t.default.accept(self)}" + return s + + def visit_param_spec(self, t: ParamSpecType, /) -> str: + # prefixes are displayed as Concatenate + s = "" + if t.prefix.arg_types: + s += f"[{self.list_str(t.prefix.arg_types)}, **" + s += f"{t.name_with_suffix()}`{t.id}" + if t.prefix.arg_types: + s += "]" + if t.has_default(): + s += f" = {t.default.accept(self)}" + return s + + def visit_parameters(self, t: Parameters, /) -> str: + # This is copied from visit_callable -- is there a way to decrease duplication? + if t.is_ellipsis_args: + return "..." + + s = "" + bare_asterisk = False + for i in range(len(t.arg_types)): + if s != "": + s += ", " + if t.arg_kinds[i].is_named() and not bare_asterisk: + s += "*, " + bare_asterisk = True + if t.arg_kinds[i] == ARG_STAR: + s += "*" + if t.arg_kinds[i] == ARG_STAR2: + s += "**" + name = t.arg_names[i] + if name: + s += f"{name}: " + r = t.arg_types[i].accept(self) + + s += r + + if t.arg_kinds[i].is_optional(): + s += " =" + + return f"[{s}]" + + def visit_type_var_tuple(self, t: TypeVarTupleType, /) -> str: + s = f"{t.name}`{t.id}" + if t.has_default(): + s += f" = {t.default.accept(self)}" + return s + + def visit_callable_type(self, t: CallableType, /) -> str: + param_spec = t.param_spec() + if param_spec is not None: + num_skip = 2 + else: + num_skip = 0 + + s = "" + asterisk = False + for i in range(len(t.arg_types) - num_skip): + if s != "": + s += ", " + if t.arg_kinds[i].is_named() and not asterisk: + s += "*, " + asterisk = True + if t.arg_kinds[i] == ARG_STAR: + s += "*" + asterisk = True + if t.arg_kinds[i] == ARG_STAR2: + s += "**" + name = t.arg_names[i] + if name: + s += name + ": " + type_str = t.arg_types[i].accept(self) + if t.arg_kinds[i] == ARG_STAR2 and t.unpack_kwargs: + type_str = f"Unpack[{type_str}]" + s += type_str + if t.arg_kinds[i].is_optional(): + s += " =" + + if param_spec is not None: + n = param_spec.name + if s: + s += ", " + s += f"*{n}.args, **{n}.kwargs" + if param_spec.has_default(): + s += f" = {param_spec.default.accept(self)}" + + s = f"({s})" + + if not isinstance(get_proper_type(t.ret_type), NoneType): + if t.type_guard is not None: + s += f" -> TypeGuard[{t.type_guard.accept(self)}]" + elif t.type_is is not None: + s += f" -> TypeIs[{t.type_is.accept(self)}]" + else: + s += f" -> {t.ret_type.accept(self)}" + + if t.variables: + vs = [] + for var in t.variables: + if isinstance(var, TypeVarType): + # We reimplement TypeVarType.__repr__ here in order to support id_mapper. + if var.values: + vals = f"({', '.join(val.accept(self) for val in var.values)})" + vs.append(f"{var.name} in {vals}") + elif not is_named_instance(var.upper_bound, "builtins.object"): + vs.append( + f"{var.name} <: {var.upper_bound.accept(self)}{f' = {var.default.accept(self)}' if var.has_default() else ''}" + ) + else: + vs.append( + f"{var.name}{f' = {var.default.accept(self)}' if var.has_default() else ''}" + ) + else: + # For other TypeVarLikeTypes, use the name and default + vs.append( + f"{var.name}{f' = {var.default.accept(self)}' if var.has_default() else ''}" + ) + s = f"[{', '.join(vs)}] {s}" + + return f"def {s}" + + def visit_overloaded(self, t: Overloaded, /) -> str: + a = [] + for i in t.items: + a.append(i.accept(self)) + return f"Overload({', '.join(a)})" + + def visit_tuple_type(self, t: TupleType, /) -> str: + s = self.list_str(t.items) or "()" + if t.partial_fallback and t.partial_fallback.type: + fallback_name = t.partial_fallback.type.fullname + if fallback_name != "builtins.tuple": + return f"tuple[{s}, fallback={t.partial_fallback.accept(self)}]" + return f"tuple[{s}]" + + def visit_typeddict_type(self, t: TypedDictType, /) -> str: + def item_str(name: str, typ: str) -> str: + modifier = "" + if name not in t.required_keys: + modifier += "?" + if name in t.readonly_keys: + modifier += "=" + return f"{name!r}{modifier}: {typ}" + + s = ( + "{" + + ", ".join(item_str(name, typ.accept(self)) for name, typ in t.items.items()) + + "}" + ) + prefix = "" + if t.fallback and t.fallback.type: + if t.fallback.type.fullname not in TPDICT_FB_NAMES: + prefix = repr(t.fallback.type.fullname) + ", " + return f"TypedDict({prefix}{s})" + + def visit_raw_expression_type(self, t: RawExpressionType, /) -> str: + return repr(t.literal_value) + + def visit_literal_type(self, t: LiteralType, /) -> str: + return f"Literal[{t.value_repr()}]" + + def visit_union_type(self, t: UnionType, /) -> str: + use_or_syntax = self.options.use_or_syntax() + s = self.list_str(t.items, use_or_syntax=use_or_syntax) + return s if use_or_syntax else f"Union[{s}]" + + def visit_partial_type(self, t: PartialType, /) -> str: + if t.type is None: + return "" + else: + return "".format(t.type.name, ", ".join(["?"] * len(t.type.type_vars))) + + def visit_ellipsis_type(self, t: EllipsisType, /) -> str: + return "..." + + def visit_type_type(self, t: TypeType, /) -> str: + if t.is_type_form: + type_name = "TypeForm" + else: + type_name = "type" + return f"{type_name}[{t.item.accept(self)}]" + + def visit_placeholder_type(self, t: PlaceholderType, /) -> str: + return f"" + + def visit_type_alias_type(self, t: TypeAliasType, /) -> str: + if t.alias is None: + return "" + if not t.is_recursive: + return get_proper_type(t).accept(self) + if self.dotted_aliases is None: + self.dotted_aliases = set() + elif t in self.dotted_aliases: + return "..." + self.dotted_aliases.add(t) + type_str = get_proper_type(t).accept(self) + self.dotted_aliases.discard(t) + return type_str + + def visit_unpack_type(self, t: UnpackType, /) -> str: + return f"Unpack[{t.type.accept(self)}]" + + def list_str(self, a: Iterable[Type], *, use_or_syntax: bool = False) -> str: + """Convert items of an array to strings (pretty-print types) + and join the results with commas. + """ + res = [] + for t in a: + res.append(t.accept(self)) + sep = ", " if not use_or_syntax else " | " + return sep.join(res) + + +class TrivialSyntheticTypeTranslator(TypeTranslator, SyntheticTypeVisitor[Type]): + """A base class for type translators that need to be run during semantic analysis.""" + + def visit_placeholder_type(self, t: PlaceholderType, /) -> Type: + return t + + def visit_callable_argument(self, t: CallableArgument, /) -> Type: + return t + + def visit_ellipsis_type(self, t: EllipsisType, /) -> Type: + return t + + def visit_raw_expression_type(self, t: RawExpressionType, /) -> Type: + return t + + def visit_type_list(self, t: TypeList, /) -> Type: + return t + + +class CollectAliasesVisitor(TypeQuery[list[mypy.nodes.TypeAlias]]): + def __init__(self) -> None: + super().__init__() + self.seen_alias_nodes: set[mypy.nodes.TypeAlias] = set() + + def strategy(self, items: list[list[mypy.nodes.TypeAlias]]) -> list[mypy.nodes.TypeAlias]: + out = [] + for item in items: + out.extend(item) + return out + + def visit_type_alias_type(self, t: TypeAliasType, /) -> list[mypy.nodes.TypeAlias]: + assert t.alias is not None + if t.alias not in self.seen_alias_nodes: + self.seen_alias_nodes.add(t.alias) + res = [t.alias] + t.alias.target.accept(self) + else: + res = [] + for arg in t.args: + res.extend(arg.accept(self)) + return res + + +def is_named_instance(t: Type, fullnames: str | tuple[str, ...]) -> TypeGuard[Instance]: + if not isinstance(fullnames, tuple): + fullnames = (fullnames,) + + t = get_proper_type(t) + return isinstance(t, Instance) and t.type.fullname in fullnames + + +class HasTypeVars(BoolTypeQuery): + """Visitor for querying whether a type has a type variable component.""" + + def __init__(self) -> None: + super().__init__(ANY_STRATEGY) + self.skip_alias_target = True + + def visit_type_var(self, t: TypeVarType) -> bool: + return True + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> bool: + return True + + def visit_param_spec(self, t: ParamSpecType) -> bool: + return True + + +def has_type_vars(typ: Type) -> bool: + """Check if a type contains any type variables (recursively).""" + return typ.accept(HasTypeVars()) + + +class HasRecursiveType(BoolTypeQuery): + def __init__(self) -> None: + super().__init__(ANY_STRATEGY) + + def visit_type_alias_type(self, t: TypeAliasType) -> bool: + return t.is_recursive or self.query_types(t.args) + + +# Use singleton since this is hot (note: call reset() before using) +_has_recursive_type: Final = HasRecursiveType() + + +def has_recursive_types(typ: Type) -> bool: + """Check if a type contains any recursive aliases (recursively).""" + _has_recursive_type.reset() + return typ.accept(_has_recursive_type) + + +def split_with_prefix_and_suffix( + types: tuple[Type, ...], prefix: int, suffix: int +) -> tuple[tuple[Type, ...], tuple[Type, ...], tuple[Type, ...]]: + if len(types) <= prefix + suffix: + types = extend_args_for_prefix_and_suffix(types, prefix, suffix) + if suffix: + return types[:prefix], types[prefix:-suffix], types[-suffix:] + else: + return types[:prefix], types[prefix:], () + + +def extend_args_for_prefix_and_suffix( + types: tuple[Type, ...], prefix: int, suffix: int +) -> tuple[Type, ...]: + """Extend list of types by eating out from variadic tuple to satisfy prefix and suffix.""" + idx = None + item = None + for i, t in enumerate(types): + if isinstance(t, UnpackType): + p_type = get_proper_type(t.type) + if isinstance(p_type, Instance) and p_type.type.fullname == "builtins.tuple": + item = p_type.args[0] + idx = i + break + + if idx is None: + return types + assert item is not None + if idx < prefix: + start = (item,) * (prefix - idx) + else: + start = () + if len(types) - idx - 1 < suffix: + end = (item,) * (suffix - len(types) + idx + 1) + else: + end = () + return types[:idx] + start + (types[idx],) + end + types[idx + 1 :] + + +def flatten_nested_unions( + types: Sequence[Type], *, handle_type_alias_type: bool = True, handle_recursive: bool = True +) -> list[Type]: + """Flatten nested unions in a type list.""" + if not isinstance(types, list): + typelist = list(types) + else: + typelist = cast("list[Type]", types) + + # Fast path: most of the time there is nothing to flatten + if not any(isinstance(t, (TypeAliasType, UnionType)) for t in typelist): # type: ignore[misc] + return typelist + + flat_items: list[Type] = [] + for t in typelist: + if handle_type_alias_type and isinstance(t, TypeAliasType): + if not handle_recursive and t.is_recursive: + tp: Type = t + else: + tp = get_proper_type(t) + else: + tp = t + if isinstance(tp, ProperType) and isinstance(tp, UnionType): + flat_items.extend( + flatten_nested_unions( + tp.items, + handle_type_alias_type=handle_type_alias_type, + handle_recursive=handle_recursive, + ) + ) + else: + # Must preserve original aliases when possible. + flat_items.append(t) + return flat_items + + +def find_unpack_in_list(items: Sequence[Type]) -> int | None: + unpack_index: int | None = None + for i, item in enumerate(items): + if isinstance(item, UnpackType): + # We cannot fail here, so we must check this in an earlier + # semanal phase. + # Funky code here avoids mypyc narrowing the type of unpack_index. + old_index = unpack_index + assert old_index is None + # Don't return so that we can also sanity check there is only one. + unpack_index = i + return unpack_index + + +def flatten_nested_tuples(types: Iterable[Type]) -> list[Type]: + """Recursively flatten TupleTypes nested with Unpack. + + For example this will transform + Tuple[A, Unpack[Tuple[B, Unpack[Tuple[C, D]]]]] + into + Tuple[A, B, C, D] + """ + res = [] + for typ in types: + if not isinstance(typ, UnpackType): + res.append(typ) + continue + p_type = get_proper_type(typ.type) + if not isinstance(p_type, TupleType): + res.append(typ) + continue + if isinstance(typ.type, TypeAliasType): + items = [] + for item in p_type.items: + if ( + isinstance(item, ProperType) + and isinstance(item, Instance) + or isinstance(item, TypeAliasType) + ): + if len(item.args) == 0: + item = item.copy_modified() + item.set_line(typ) + items.append(item) + else: + items = p_type.items + res.extend(flatten_nested_tuples(items)) + return res + + +def is_literal_type(typ: ProperType, fallback_fullname: str, value: LiteralValue) -> bool: + """Check if this type is a LiteralType with the given fallback type and value.""" + if isinstance(typ, Instance) and typ.last_known_value: + typ = typ.last_known_value + return ( + isinstance(typ, LiteralType) + and typ.fallback.type.fullname == fallback_fullname + and typ.value == value + ) + + +names: Final = globals().copy() +names.pop("NOT_READY", None) +deserialize_map: Final = { + key: obj.deserialize + for key, obj in names.items() + if isinstance(obj, type) and issubclass(obj, Type) and obj is not Type +} + + +def callable_with_ellipsis(any_type: AnyType, ret_type: Type, fallback: Instance) -> CallableType: + """Construct type Callable[..., ret_type].""" + return CallableType( + [any_type, any_type], + [ARG_STAR, ARG_STAR2], + [None, None], + ret_type=ret_type, + fallback=fallback, + is_ellipsis_args=True, + ) + + +def remove_dups(types: list[T]) -> list[T]: + if len(types) <= 1: + return types + # Get unique elements in order of appearance + all_types: set[T] = set() + new_types: list[T] = [] + for t in types: + if t not in all_types: + new_types.append(t) + all_types.add(t) + return new_types + + +def type_vars_as_args(type_vars: Sequence[TypeVarLikeType]) -> tuple[Type, ...]: + """Represent type variables as they would appear in a type argument list.""" + args: list[Type] = [] + for tv in type_vars: + if isinstance(tv, TypeVarTupleType): + args.append(UnpackType(tv)) + else: + args.append(tv) + return tuple(args) + + +# See docstring for mypy/cache.py for reserved tag ranges. +# Instance-related tags. +INSTANCE: Final[Tag] = 80 +INSTANCE_SIMPLE: Final[Tag] = 81 +INSTANCE_GENERIC: Final[Tag] = 82 +INSTANCE_STR: Final[Tag] = 83 +INSTANCE_FUNCTION: Final[Tag] = 84 +INSTANCE_INT: Final[Tag] = 85 +INSTANCE_BOOL: Final[Tag] = 86 +INSTANCE_OBJECT: Final[Tag] = 87 + +# Other type tags. +TYPE_ALIAS_TYPE: Final[Tag] = 100 +TYPE_VAR_TYPE: Final[Tag] = 101 +PARAM_SPEC_TYPE: Final[Tag] = 102 +TYPE_VAR_TUPLE_TYPE: Final[Tag] = 103 +UNBOUND_TYPE: Final[Tag] = 104 +UNPACK_TYPE: Final[Tag] = 105 +ANY_TYPE: Final[Tag] = 106 +UNINHABITED_TYPE: Final[Tag] = 107 +NONE_TYPE: Final[Tag] = 108 +DELETED_TYPE: Final[Tag] = 109 +CALLABLE_TYPE: Final[Tag] = 110 +OVERLOADED: Final[Tag] = 111 +TUPLE_TYPE: Final[Tag] = 112 +TYPED_DICT_TYPE: Final[Tag] = 113 +LITERAL_TYPE: Final[Tag] = 114 +UNION_TYPE: Final[Tag] = 115 +TYPE_TYPE: Final[Tag] = 116 +PARAMETERS: Final[Tag] = 117 + + +def read_type(data: ReadBuffer, tag: Tag | None = None) -> Type: + if tag is None: + tag = read_tag(data) + # The branches here are ordered manually by type "popularity". + if tag == INSTANCE: + return Instance.read(data) + if tag == ANY_TYPE: + return AnyType.read(data) + if tag == TYPE_VAR_TYPE: + return TypeVarType.read(data) + if tag == CALLABLE_TYPE: + return CallableType.read(data) + if tag == NONE_TYPE: + return NoneType.read(data) + if tag == UNION_TYPE: + return UnionType.read(data) + if tag == LITERAL_TYPE: + return LiteralType.read(data) + if tag == TYPE_ALIAS_TYPE: + return TypeAliasType.read(data) + if tag == TUPLE_TYPE: + return TupleType.read(data) + if tag == TYPED_DICT_TYPE: + return TypedDictType.read(data) + if tag == TYPE_TYPE: + return TypeType.read(data) + if tag == OVERLOADED: + return Overloaded.read(data) + if tag == PARAM_SPEC_TYPE: + return ParamSpecType.read(data) + if tag == TYPE_VAR_TUPLE_TYPE: + return TypeVarTupleType.read(data) + if tag == UNPACK_TYPE: + return UnpackType.read(data) + if tag == PARAMETERS: + return Parameters.read(data) + if tag == UNINHABITED_TYPE: + return UninhabitedType.read(data) + if tag == UNBOUND_TYPE: + return UnboundType.read(data) + if tag == DELETED_TYPE: + return DeletedType.read(data) + assert False, f"Unknown type tag {tag}" + + +def read_function_like(data: ReadBuffer, tag: Tag) -> FunctionLike: + if tag == CALLABLE_TYPE: + return CallableType.read(data) + if tag == OVERLOADED: + return Overloaded.read(data) + assert False, f"Invalid type tag for FunctionLike {tag}" + + +def read_type_var_likes(data: ReadBuffer) -> list[TypeVarLikeType]: + """Specialized version of read_type_list() for lists of type variables.""" + assert read_tag(data) == LIST_GEN + ret: list[TypeVarLikeType] = [] + for _ in range(read_int_bare(data)): + tag = read_tag(data) + if tag == TYPE_VAR_TYPE: + ret.append(TypeVarType.read(data)) + elif tag == PARAM_SPEC_TYPE: + ret.append(ParamSpecType.read(data)) + elif tag == TYPE_VAR_TUPLE_TYPE: + ret.append(TypeVarTupleType.read(data)) + else: + assert False, f"Invalid type tag for TypeVarLikeType {tag}" + return ret + + +def read_type_opt(data: ReadBuffer) -> Type | None: + tag = read_tag(data) + if tag == LITERAL_NONE: + return None + return read_type(data, tag) + + +def write_type_opt(data: WriteBuffer, value: Type | None) -> None: + if value is not None: + value.write(data) + else: + write_tag(data, LITERAL_NONE) + + +def read_type_list(data: ReadBuffer) -> list[Type]: + assert read_tag(data) == LIST_GEN + size = read_int_bare(data) + return [read_type(data) for _ in range(size)] + + +def write_type_list(data: WriteBuffer, value: Sequence[Type]) -> None: + write_tag(data, LIST_GEN) + write_int_bare(data, len(value)) + for item in value: + item.write(data) + + +def read_type_map(data: ReadBuffer) -> dict[str, Type]: + assert read_tag(data) == DICT_STR_GEN + size = read_int_bare(data) + return {read_str_bare(data): read_type(data) for _ in range(size)} + + +def write_type_map(data: WriteBuffer, value: dict[str, Type]) -> None: + write_tag(data, DICT_STR_GEN) + write_int_bare(data, len(value)) + for key in sorted(value): + write_str_bare(data, key) + value[key].write(data) + + +# This cyclic import is unfortunate, but to avoid it we would need to move away all uses +# of get_proper_type() from types.py. Majority of them have been removed, but few remaining +# are quite tricky to get rid of, but ultimately we want to do it at some point. +from mypy.expandtype import ExpandTypeVisitor + + +class InstantiateAliasVisitor(ExpandTypeVisitor): + def visit_union_type(self, t: UnionType) -> Type: + # Unlike regular expand_type(), we don't do any simplification for unions, + # not even removing strict duplicates. There are three reasons for this: + # * get_proper_type() is a very hot function, even slightest slow down will + # cause a perf regression + # * We want to preserve this historical behaviour, to avoid possible + # regressions + # * Simplifying unions may (indirectly) call get_proper_type(), causing + # infinite recursion. + return TypeTranslator.visit_union_type(self, t) diff --git a/.venv/lib/python3.12/site-packages/mypy/types_utils.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/types_utils.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..b335f11 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/types_utils.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/types_utils.py b/.venv/lib/python3.12/site-packages/mypy/types_utils.py new file mode 100644 index 0000000..124d024 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/types_utils.py @@ -0,0 +1,180 @@ +""" +This module is for (more basic) type operations that should not depend on is_subtype(), +meet_types(), join_types() etc. We don't want to keep them in mypy/types.py for two reasons: +* Reduce the size of that module. +* Reduce use of get_proper_type() in types.py to avoid cyclic imports + expand_type <-> types, if we move get_proper_type() to the former. +""" + +from __future__ import annotations + +from collections.abc import Iterable +from typing import Callable, cast + +from mypy.nodes import ARG_STAR, ARG_STAR2, FuncItem, TypeAlias +from mypy.types import ( + AnyType, + CallableType, + Instance, + LiteralType, + NoneType, + Overloaded, + ParamSpecType, + ProperType, + TupleType, + Type, + TypeAliasType, + TypeType, + TypeVarType, + UnionType, + UnpackType, + flatten_nested_unions, + get_proper_type, + get_proper_types, +) + + +def flatten_types(types: Iterable[Type]) -> Iterable[Type]: + for t in types: + tp = get_proper_type(t) + if isinstance(tp, UnionType): + yield from flatten_types(tp.items) + else: + yield t + + +def strip_type(typ: Type) -> Type: + """Make a copy of type without 'debugging info' (function name).""" + orig_typ = typ + typ = get_proper_type(typ) + if isinstance(typ, CallableType): + return typ.copy_modified(name=None) + elif isinstance(typ, Overloaded): + return Overloaded([cast(CallableType, strip_type(item)) for item in typ.items]) + else: + return orig_typ + + +def is_invalid_recursive_alias(seen_nodes: set[TypeAlias], target: Type) -> bool: + """Flag aliases like A = Union[int, A], T = tuple[int, *T] (and similar mutual aliases). + + Such aliases don't make much sense, and cause problems in later phases. + """ + if isinstance(target, TypeAliasType): + if target.alias in seen_nodes: + return True + assert target.alias, f"Unfixed type alias {target.type_ref}" + return is_invalid_recursive_alias(seen_nodes | {target.alias}, get_proper_type(target)) + assert isinstance(target, ProperType) + if not isinstance(target, (UnionType, TupleType)): + return False + if isinstance(target, UnionType): + return any(is_invalid_recursive_alias(seen_nodes, item) for item in target.items) + for item in target.items: + if isinstance(item, UnpackType): + if is_invalid_recursive_alias(seen_nodes, item.type): + return True + return False + + +def get_bad_type_type_item(item: Type) -> str | None: + """Prohibit types like Type[Type[...]]. + + Such types are explicitly prohibited by PEP 484. Also, they cause problems + with recursive types like T = Type[T], because internal representation of + TypeType item is normalized (i.e. always a proper type). + + Also forbids `Type[Literal[...]]`, because typing spec does not allow it. + """ + # TODO: what else cannot be present in `type[...]`? + item = get_proper_type(item) + if isinstance(item, TypeType): + return "Type[...]" + if isinstance(item, LiteralType): + return "Literal[...]" + if isinstance(item, UnionType): + items = [ + bad_item + for typ in flatten_nested_unions(item.items) + if (bad_item := get_bad_type_type_item(typ)) is not None + ] + if not items: + return None + if len(items) == 1: + return items[0] + return f"Union[{', '.join(items)}]" + return None + + +def is_union_with_any(tp: Type) -> bool: + """Is this a union with Any or a plain Any type?""" + tp = get_proper_type(tp) + if isinstance(tp, AnyType): + return True + if not isinstance(tp, UnionType): + return False + return any(is_union_with_any(t) for t in get_proper_types(tp.items)) + + +def is_generic_instance(tp: Type) -> bool: + tp = get_proper_type(tp) + return isinstance(tp, Instance) and bool(tp.args) + + +def is_overlapping_none(t: Type) -> bool: + t = get_proper_type(t) + return isinstance(t, NoneType) or ( + isinstance(t, UnionType) and any(isinstance(get_proper_type(e), NoneType) for e in t.items) + ) + + +def remove_optional(typ: Type) -> Type: + typ = get_proper_type(typ) + if isinstance(typ, UnionType): + return UnionType.make_union( + [t for t in typ.items if not isinstance(get_proper_type(t), NoneType)] + ) + else: + return typ + + +def is_self_type_like(typ: Type, *, is_classmethod: bool) -> bool: + """Does this look like a self-type annotation?""" + typ = get_proper_type(typ) + if not is_classmethod: + return isinstance(typ, TypeVarType) + if not isinstance(typ, TypeType): + return False + return isinstance(typ.item, TypeVarType) + + +def store_argument_type( + defn: FuncItem, i: int, typ: CallableType, named_type: Callable[[str, list[Type]], Instance] +) -> None: + arg_type = typ.arg_types[i] + if typ.arg_kinds[i] == ARG_STAR: + if isinstance(arg_type, ParamSpecType): + pass + elif isinstance(arg_type, UnpackType): + unpacked_type = get_proper_type(arg_type.type) + if isinstance(unpacked_type, TupleType): + # Instead of using Tuple[Unpack[Tuple[...]]], just use Tuple[...] + arg_type = unpacked_type + elif ( + isinstance(unpacked_type, Instance) + and unpacked_type.type.fullname == "builtins.tuple" + ): + arg_type = unpacked_type + else: + # TODO: verify that we can only have a TypeVarTuple here. + arg_type = TupleType( + [arg_type], + fallback=named_type("builtins.tuple", [named_type("builtins.object", [])]), + ) + else: + # builtins.tuple[T] is typing.Tuple[T, ...] + arg_type = named_type("builtins.tuple", [arg_type]) + elif typ.arg_kinds[i] == ARG_STAR2: + if not isinstance(arg_type, ParamSpecType) and not typ.unpack_kwargs: + arg_type = named_type("builtins.dict", [named_type("builtins.str", []), arg_type]) + defn.arguments[i].variable.type = arg_type diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/LICENSE b/.venv/lib/python3.12/site-packages/mypy/typeshed/LICENSE new file mode 100644 index 0000000..1326448 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/LICENSE @@ -0,0 +1,237 @@ +The "typeshed" project is licensed under the terms of the Apache license, as +reproduced below. + += = = = = + +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + += = = = = + +Parts of typeshed are licensed under different licenses (like the MIT +license), reproduced below. + += = = = = + +The MIT License + +Copyright (c) 2015 Jukka Lehtosalo and contributors + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + += = = = = diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/VERSIONS b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/VERSIONS new file mode 100644 index 0000000..6fcf016 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/VERSIONS @@ -0,0 +1,348 @@ +# The structure of this file is as follows: +# - Blank lines and comments starting with `#` are ignored. +# - Lines contain the name of a module, followed by a colon, +# a space, and a version range (for example: `symbol: 3.0-3.9`). +# +# Version ranges may be of the form "X.Y-A.B" or "X.Y-". The +# first form means that a module was introduced in version X.Y and last +# available in version A.B. The second form means that the module was +# introduced in version X.Y and is still available in the latest +# version of Python. +# +# If a submodule is not listed separately, it has the same lifetime as +# its parent module. +# +# Python versions before 3.0 are ignored, so any module that was already +# present in 3.0 will have "3.0" as its minimum version. Version ranges +# for unsupported versions of Python 3 are generally accurate but we do +# not guarantee their correctness. + +__future__: 3.0- +__main__: 3.0- +_ast: 3.0- +_asyncio: 3.0- +_bisect: 3.0- +_blake2: 3.6- +_bootlocale: 3.4-3.9 +_bz2: 3.3- +_codecs: 3.0- +_collections_abc: 3.3- +_compat_pickle: 3.1- +_compression: 3.5-3.13 +_contextvars: 3.7- +_csv: 3.0- +_ctypes: 3.0- +_curses: 3.0- +_curses_panel: 3.0- +_dbm: 3.0- +_decimal: 3.3- +_frozen_importlib: 3.0- +_frozen_importlib_external: 3.5- +_gdbm: 3.0- +_hashlib: 3.0- +_heapq: 3.0- +_imp: 3.0- +_interpchannels: 3.13- +_interpqueues: 3.13- +_interpreters: 3.13- +_io: 3.0- +_json: 3.0- +_locale: 3.0- +_lsprof: 3.0- +_lzma: 3.3- +_markupbase: 3.0- +_msi: 3.0-3.12 +_multibytecodec: 3.0- +_operator: 3.4- +_osx_support: 3.0- +_pickle: 3.0- +_posixsubprocess: 3.2- +_py_abc: 3.7- +_pydecimal: 3.5- +_queue: 3.7- +_random: 3.0- +_sitebuiltins: 3.4- +_socket: 3.0- # present in 3.0 at runtime, but not in typeshed +_sqlite3: 3.0- +_ssl: 3.0- +_stat: 3.4- +_struct: 3.0- +_thread: 3.0- +_threading_local: 3.0- +_tkinter: 3.0- +_tracemalloc: 3.4- +_typeshed: 3.0- # not present at runtime, only for type checking +_warnings: 3.0- +_weakref: 3.0- +_weakrefset: 3.0- +_winapi: 3.3- +_zstd: 3.14- +abc: 3.0- +aifc: 3.0-3.12 +annotationlib: 3.14- +antigravity: 3.0- +argparse: 3.0- +array: 3.0- +ast: 3.0- +asynchat: 3.0-3.11 +asyncio: 3.4- +asyncio.exceptions: 3.8- +asyncio.format_helpers: 3.7- +asyncio.graph: 3.14- +asyncio.mixins: 3.10- +asyncio.runners: 3.7- +asyncio.staggered: 3.8- +asyncio.taskgroups: 3.11- +asyncio.threads: 3.9- +asyncio.timeouts: 3.11- +asyncio.tools: 3.14- +asyncio.trsock: 3.8- +asyncore: 3.0-3.11 +atexit: 3.0- +audioop: 3.0-3.12 +base64: 3.0- +bdb: 3.0- +binascii: 3.0- +binhex: 3.0-3.10 +bisect: 3.0- +builtins: 3.0- +bz2: 3.0- +cProfile: 3.0- +calendar: 3.0- +cgi: 3.0-3.12 +cgitb: 3.0-3.12 +chunk: 3.0-3.12 +cmath: 3.0- +cmd: 3.0- +code: 3.0- +codecs: 3.0- +codeop: 3.0- +collections: 3.0- +collections.abc: 3.3- +colorsys: 3.0- +compileall: 3.0- +compression: 3.14- +concurrent: 3.2- +concurrent.futures.interpreter: 3.14- +concurrent.interpreters: 3.14- +configparser: 3.0- +contextlib: 3.0- +contextvars: 3.7- +copy: 3.0- +copyreg: 3.0- +crypt: 3.0-3.12 +csv: 3.0- +ctypes: 3.0- +curses: 3.0- +dataclasses: 3.7- +datetime: 3.0- +dbm: 3.0- +dbm.sqlite3: 3.13- +decimal: 3.0- +difflib: 3.0- +dis: 3.0- +distutils: 3.0-3.11 +distutils.command.bdist_msi: 3.0-3.10 +distutils.command.bdist_wininst: 3.0-3.9 +doctest: 3.0- +email: 3.0- +encodings: 3.0- +encodings.cp1125: 3.4- +encodings.cp273: 3.4- +encodings.cp858: 3.2- +encodings.koi8_t: 3.5- +encodings.kz1048: 3.5- +ensurepip: 3.0- +enum: 3.4- +errno: 3.0- +faulthandler: 3.3- +fcntl: 3.0- +filecmp: 3.0- +fileinput: 3.0- +fnmatch: 3.0- +formatter: 3.0-3.9 +fractions: 3.0- +ftplib: 3.0- +functools: 3.0- +gc: 3.0- +genericpath: 3.0- +getopt: 3.0- +getpass: 3.0- +gettext: 3.0- +glob: 3.0- +graphlib: 3.9- +grp: 3.0- +gzip: 3.0- +hashlib: 3.0- +heapq: 3.0- +hmac: 3.0- +html: 3.0- +http: 3.0- +imaplib: 3.0- +imghdr: 3.0-3.12 +imp: 3.0-3.11 +importlib: 3.0- +importlib._abc: 3.10- +importlib._bootstrap: 3.0- +importlib._bootstrap_external: 3.5- +importlib.metadata: 3.8- +importlib.metadata._meta: 3.10- +importlib.metadata.diagnose: 3.13- +importlib.readers: 3.10- +importlib.resources: 3.7- +importlib.resources._common: 3.11- +importlib.resources._functional: 3.13- +importlib.resources.abc: 3.11- +importlib.resources.readers: 3.11- +importlib.resources.simple: 3.11- +importlib.simple: 3.11- +inspect: 3.0- +io: 3.0- +ipaddress: 3.3- +itertools: 3.0- +json: 3.0- +keyword: 3.0- +lib2to3: 3.0-3.12 +linecache: 3.0- +locale: 3.0- +logging: 3.0- +lzma: 3.3- +mailbox: 3.0- +mailcap: 3.0-3.12 +marshal: 3.0- +math: 3.0- +mimetypes: 3.0- +mmap: 3.0- +modulefinder: 3.0- +msilib: 3.0-3.12 +msvcrt: 3.0- +multiprocessing: 3.0- +multiprocessing.resource_tracker: 3.8- +multiprocessing.shared_memory: 3.8- +netrc: 3.0- +nis: 3.0-3.12 +nntplib: 3.0-3.12 +nt: 3.0- +ntpath: 3.0- +nturl2path: 3.0- +numbers: 3.0- +opcode: 3.0- +operator: 3.0- +optparse: 3.0- +os: 3.0- +ossaudiodev: 3.0-3.12 +parser: 3.0-3.9 +pathlib: 3.4- +pathlib.types: 3.14- +pdb: 3.0- +pickle: 3.0- +pickletools: 3.0- +pipes: 3.0-3.12 +pkgutil: 3.0- +platform: 3.0- +plistlib: 3.0- +poplib: 3.0- +posix: 3.0- +posixpath: 3.0- +pprint: 3.0- +profile: 3.0- +pstats: 3.0- +pty: 3.0- +pwd: 3.0- +py_compile: 3.0- +pyclbr: 3.0- +pydoc: 3.0- +pydoc_data: 3.0- +pyexpat: 3.0- +queue: 3.0- +quopri: 3.0- +random: 3.0- +re: 3.0- +readline: 3.0- +reprlib: 3.0- +resource: 3.0- +rlcompleter: 3.0- +runpy: 3.0- +sched: 3.0- +secrets: 3.6- +select: 3.0- +selectors: 3.4- +shelve: 3.0- +shlex: 3.0- +shutil: 3.0- +signal: 3.0- +site: 3.0- +smtpd: 3.0-3.11 +smtplib: 3.0- +sndhdr: 3.0-3.12 +socket: 3.0- +socketserver: 3.0- +spwd: 3.0-3.12 +sqlite3: 3.0- +sre_compile: 3.0- +sre_constants: 3.0- +sre_parse: 3.0- +ssl: 3.0- +stat: 3.0- +statistics: 3.4- +string: 3.0- +string.templatelib: 3.14- +stringprep: 3.0- +struct: 3.0- +subprocess: 3.0- +sunau: 3.0-3.12 +symbol: 3.0-3.9 +symtable: 3.0- +sys: 3.0- +sys._monitoring: 3.12- # Doesn't actually exist. See comments in the stub. +sysconfig: 3.0- +syslog: 3.0- +tabnanny: 3.0- +tarfile: 3.0- +telnetlib: 3.0-3.12 +tempfile: 3.0- +termios: 3.0- +textwrap: 3.0- +this: 3.0- +threading: 3.0- +time: 3.0- +timeit: 3.0- +tkinter: 3.0- +tkinter.tix: 3.0-3.12 +token: 3.0- +tokenize: 3.0- +tomllib: 3.11- +trace: 3.0- +traceback: 3.0- +tracemalloc: 3.4- +tty: 3.0- +turtle: 3.0- +types: 3.0- +typing: 3.5- +typing_extensions: 3.0- +unicodedata: 3.0- +unittest: 3.0- +unittest._log: 3.9- +unittest.async_case: 3.8- +urllib: 3.0- +uu: 3.0-3.12 +uuid: 3.0- +venv: 3.3- +warnings: 3.0- +wave: 3.0- +weakref: 3.0- +webbrowser: 3.0- +winreg: 3.0- +winsound: 3.0- +wsgiref: 3.0- +wsgiref.types: 3.11- +xdrlib: 3.0-3.12 +xml: 3.0- +xmlrpc: 3.0- +xxlimited: 3.2- +zipapp: 3.5- +zipfile: 3.0- +zipfile._path: 3.12- +zipimport: 3.0- +zlib: 3.0- +zoneinfo: 3.9- diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/__future__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/__future__.pyi new file mode 100644 index 0000000..a90cf1e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/__future__.pyi @@ -0,0 +1,36 @@ +from typing_extensions import TypeAlias + +_VersionInfo: TypeAlias = tuple[int, int, int, str, int] + +class _Feature: + def __init__(self, optionalRelease: _VersionInfo, mandatoryRelease: _VersionInfo | None, compiler_flag: int) -> None: ... + def getOptionalRelease(self) -> _VersionInfo: ... + def getMandatoryRelease(self) -> _VersionInfo | None: ... + compiler_flag: int + +absolute_import: _Feature +division: _Feature +generators: _Feature +nested_scopes: _Feature +print_function: _Feature +unicode_literals: _Feature +with_statement: _Feature +barry_as_FLUFL: _Feature +generator_stop: _Feature +annotations: _Feature + +all_feature_names: list[str] # undocumented + +__all__ = [ + "all_feature_names", + "absolute_import", + "division", + "generators", + "nested_scopes", + "print_function", + "unicode_literals", + "with_statement", + "barry_as_FLUFL", + "generator_stop", + "annotations", +] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/__main__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/__main__.pyi new file mode 100644 index 0000000..5b0f74f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/__main__.pyi @@ -0,0 +1 @@ +def __getattr__(name: str): ... # incomplete module diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_ast.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_ast.pyi new file mode 100644 index 0000000..d8d5a18 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_ast.pyi @@ -0,0 +1,145 @@ +import sys +from ast import ( + AST as AST, + Add as Add, + And as And, + AnnAssign as AnnAssign, + Assert as Assert, + Assign as Assign, + AsyncFor as AsyncFor, + AsyncFunctionDef as AsyncFunctionDef, + AsyncWith as AsyncWith, + Attribute as Attribute, + AugAssign as AugAssign, + Await as Await, + BinOp as BinOp, + BitAnd as BitAnd, + BitOr as BitOr, + BitXor as BitXor, + BoolOp as BoolOp, + Break as Break, + Call as Call, + ClassDef as ClassDef, + Compare as Compare, + Constant as Constant, + Continue as Continue, + Del as Del, + Delete as Delete, + Dict as Dict, + DictComp as DictComp, + Div as Div, + Eq as Eq, + ExceptHandler as ExceptHandler, + Expr as Expr, + Expression as Expression, + FloorDiv as FloorDiv, + For as For, + FormattedValue as FormattedValue, + FunctionDef as FunctionDef, + FunctionType as FunctionType, + GeneratorExp as GeneratorExp, + Global as Global, + Gt as Gt, + GtE as GtE, + If as If, + IfExp as IfExp, + Import as Import, + ImportFrom as ImportFrom, + In as In, + Interactive as Interactive, + Invert as Invert, + Is as Is, + IsNot as IsNot, + JoinedStr as JoinedStr, + Lambda as Lambda, + List as List, + ListComp as ListComp, + Load as Load, + LShift as LShift, + Lt as Lt, + LtE as LtE, + MatMult as MatMult, + Mod as Mod, + Module as Module, + Mult as Mult, + Name as Name, + NamedExpr as NamedExpr, + Nonlocal as Nonlocal, + Not as Not, + NotEq as NotEq, + NotIn as NotIn, + Or as Or, + Pass as Pass, + Pow as Pow, + Raise as Raise, + Return as Return, + RShift as RShift, + Set as Set, + SetComp as SetComp, + Slice as Slice, + Starred as Starred, + Store as Store, + Sub as Sub, + Subscript as Subscript, + Try as Try, + Tuple as Tuple, + TypeIgnore as TypeIgnore, + UAdd as UAdd, + UnaryOp as UnaryOp, + USub as USub, + While as While, + With as With, + Yield as Yield, + YieldFrom as YieldFrom, + alias as alias, + arg as arg, + arguments as arguments, + boolop as boolop, + cmpop as cmpop, + comprehension as comprehension, + excepthandler as excepthandler, + expr as expr, + expr_context as expr_context, + keyword as keyword, + mod as mod, + operator as operator, + stmt as stmt, + type_ignore as type_ignore, + unaryop as unaryop, + withitem as withitem, +) +from typing import Final + +if sys.version_info >= (3, 12): + from ast import ( + ParamSpec as ParamSpec, + TypeAlias as TypeAlias, + TypeVar as TypeVar, + TypeVarTuple as TypeVarTuple, + type_param as type_param, + ) + +if sys.version_info >= (3, 11): + from ast import TryStar as TryStar + +if sys.version_info >= (3, 10): + from ast import ( + Match as Match, + MatchAs as MatchAs, + MatchClass as MatchClass, + MatchMapping as MatchMapping, + MatchOr as MatchOr, + MatchSequence as MatchSequence, + MatchSingleton as MatchSingleton, + MatchStar as MatchStar, + MatchValue as MatchValue, + match_case as match_case, + pattern as pattern, + ) + +PyCF_ALLOW_TOP_LEVEL_AWAIT: Final = 8192 +PyCF_ONLY_AST: Final = 1024 +PyCF_TYPE_COMMENTS: Final = 4096 + +if sys.version_info >= (3, 13): + PyCF_OPTIMIZED_AST: Final = 33792 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_asyncio.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_asyncio.pyi new file mode 100644 index 0000000..f43178e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_asyncio.pyi @@ -0,0 +1,112 @@ +import sys +from asyncio.events import AbstractEventLoop +from collections.abc import Awaitable, Callable, Coroutine, Generator, Iterable +from contextvars import Context +from types import FrameType, GenericAlias +from typing import Any, Literal, TextIO, TypeVar +from typing_extensions import Self, TypeAlias, disjoint_base + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) +_TaskYieldType: TypeAlias = Future[object] | None + +@disjoint_base +class Future(Awaitable[_T], Iterable[_T]): + _state: str + @property + def _exception(self) -> BaseException | None: ... + _blocking: bool + @property + def _log_traceback(self) -> bool: ... + @_log_traceback.setter + def _log_traceback(self, val: Literal[False]) -> None: ... + _asyncio_future_blocking: bool # is a part of duck-typing contract for `Future` + def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ... + def __del__(self) -> None: ... + def get_loop(self) -> AbstractEventLoop: ... + @property + def _callbacks(self) -> list[tuple[Callable[[Self], Any], Context]]: ... + def add_done_callback(self, fn: Callable[[Self], object], /, *, context: Context | None = None) -> None: ... + def cancel(self, msg: Any | None = None) -> bool: ... + def cancelled(self) -> bool: ... + def done(self) -> bool: ... + def result(self) -> _T: ... + def exception(self) -> BaseException | None: ... + def remove_done_callback(self, fn: Callable[[Self], object], /) -> int: ... + def set_result(self, result: _T, /) -> None: ... + def set_exception(self, exception: type | BaseException, /) -> None: ... + def __iter__(self) -> Generator[Any, None, _T]: ... + def __await__(self) -> Generator[Any, None, _T]: ... + @property + def _loop(self) -> AbstractEventLoop: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +if sys.version_info >= (3, 12): + _TaskCompatibleCoro: TypeAlias = Coroutine[Any, Any, _T_co] +else: + _TaskCompatibleCoro: TypeAlias = Generator[_TaskYieldType, None, _T_co] | Coroutine[Any, Any, _T_co] + +# mypy and pyright complain that a subclass of an invariant class shouldn't be covariant. +# While this is true in general, here it's sort-of okay to have a covariant subclass, +# since the only reason why `asyncio.Future` is invariant is the `set_result()` method, +# and `asyncio.Task.set_result()` always raises. +@disjoint_base +class Task(Future[_T_co]): # type: ignore[type-var] # pyright: ignore[reportInvalidTypeArguments] + if sys.version_info >= (3, 12): + def __init__( + self, + coro: _TaskCompatibleCoro[_T_co], + *, + loop: AbstractEventLoop | None = None, + name: str | None = None, + context: Context | None = None, + eager_start: bool = False, + ) -> None: ... + elif sys.version_info >= (3, 11): + def __init__( + self, + coro: _TaskCompatibleCoro[_T_co], + *, + loop: AbstractEventLoop | None = None, + name: str | None = None, + context: Context | None = None, + ) -> None: ... + else: + def __init__( + self, coro: _TaskCompatibleCoro[_T_co], *, loop: AbstractEventLoop | None = None, name: str | None = None + ) -> None: ... + + if sys.version_info >= (3, 12): + def get_coro(self) -> _TaskCompatibleCoro[_T_co] | None: ... + else: + def get_coro(self) -> _TaskCompatibleCoro[_T_co]: ... + + def get_name(self) -> str: ... + def set_name(self, value: object, /) -> None: ... + if sys.version_info >= (3, 12): + def get_context(self) -> Context: ... + + def get_stack(self, *, limit: int | None = None) -> list[FrameType]: ... + def print_stack(self, *, limit: int | None = None, file: TextIO | None = None) -> None: ... + if sys.version_info >= (3, 11): + def cancelling(self) -> int: ... + def uncancel(self) -> int: ... + + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +def get_event_loop() -> AbstractEventLoop: ... +def get_running_loop() -> AbstractEventLoop: ... +def _set_running_loop(loop: AbstractEventLoop | None, /) -> None: ... +def _get_running_loop() -> AbstractEventLoop: ... +def _register_task(task: Task[Any]) -> None: ... +def _unregister_task(task: Task[Any]) -> None: ... +def _enter_task(loop: AbstractEventLoop, task: Task[Any]) -> None: ... +def _leave_task(loop: AbstractEventLoop, task: Task[Any]) -> None: ... + +if sys.version_info >= (3, 12): + def current_task(loop: AbstractEventLoop | None = None) -> Task[Any] | None: ... + +if sys.version_info >= (3, 14): + def future_discard_from_awaited_by(future: Future[Any], waiter: Future[Any], /) -> None: ... + def future_add_to_awaited_by(future: Future[Any], waiter: Future[Any], /) -> None: ... + def all_tasks(loop: AbstractEventLoop | None = None) -> set[Task[Any]]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_bisect.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_bisect.pyi new file mode 100644 index 0000000..58488e3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_bisect.pyi @@ -0,0 +1,84 @@ +import sys +from _typeshed import SupportsLenAndGetItem, SupportsRichComparisonT +from collections.abc import Callable, MutableSequence +from typing import TypeVar, overload + +_T = TypeVar("_T") + +if sys.version_info >= (3, 10): + @overload + def bisect_left( + a: SupportsLenAndGetItem[SupportsRichComparisonT], + x: SupportsRichComparisonT, + lo: int = 0, + hi: int | None = None, + *, + key: None = None, + ) -> int: ... + @overload + def bisect_left( + a: SupportsLenAndGetItem[_T], + x: SupportsRichComparisonT, + lo: int = 0, + hi: int | None = None, + *, + key: Callable[[_T], SupportsRichComparisonT], + ) -> int: ... + @overload + def bisect_right( + a: SupportsLenAndGetItem[SupportsRichComparisonT], + x: SupportsRichComparisonT, + lo: int = 0, + hi: int | None = None, + *, + key: None = None, + ) -> int: ... + @overload + def bisect_right( + a: SupportsLenAndGetItem[_T], + x: SupportsRichComparisonT, + lo: int = 0, + hi: int | None = None, + *, + key: Callable[[_T], SupportsRichComparisonT], + ) -> int: ... + @overload + def insort_left( + a: MutableSequence[SupportsRichComparisonT], + x: SupportsRichComparisonT, + lo: int = 0, + hi: int | None = None, + *, + key: None = None, + ) -> None: ... + @overload + def insort_left( + a: MutableSequence[_T], x: _T, lo: int = 0, hi: int | None = None, *, key: Callable[[_T], SupportsRichComparisonT] + ) -> None: ... + @overload + def insort_right( + a: MutableSequence[SupportsRichComparisonT], + x: SupportsRichComparisonT, + lo: int = 0, + hi: int | None = None, + *, + key: None = None, + ) -> None: ... + @overload + def insort_right( + a: MutableSequence[_T], x: _T, lo: int = 0, hi: int | None = None, *, key: Callable[[_T], SupportsRichComparisonT] + ) -> None: ... + +else: + def bisect_left( + a: SupportsLenAndGetItem[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None + ) -> int: ... + def bisect_right( + a: SupportsLenAndGetItem[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None + ) -> int: ... + def insort_left( + a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None + ) -> None: ... + def insort_right( + a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None + ) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_blake2.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_blake2.pyi new file mode 100644 index 0000000..a6c3869 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_blake2.pyi @@ -0,0 +1,119 @@ +import sys +from _typeshed import ReadableBuffer +from typing import ClassVar, Final, final +from typing_extensions import Self + +BLAKE2B_MAX_DIGEST_SIZE: Final = 64 +BLAKE2B_MAX_KEY_SIZE: Final = 64 +BLAKE2B_PERSON_SIZE: Final = 16 +BLAKE2B_SALT_SIZE: Final = 16 +BLAKE2S_MAX_DIGEST_SIZE: Final = 32 +BLAKE2S_MAX_KEY_SIZE: Final = 32 +BLAKE2S_PERSON_SIZE: Final = 8 +BLAKE2S_SALT_SIZE: Final = 8 + +@final +class blake2b: + MAX_DIGEST_SIZE: ClassVar[int] = 64 + MAX_KEY_SIZE: ClassVar[int] = 64 + PERSON_SIZE: ClassVar[int] = 16 + SALT_SIZE: ClassVar[int] = 16 + block_size: int + digest_size: int + name: str + if sys.version_info >= (3, 13): + def __new__( + cls, + data: ReadableBuffer = b"", + *, + digest_size: int = 64, + key: ReadableBuffer = b"", + salt: ReadableBuffer = b"", + person: ReadableBuffer = b"", + fanout: int = 1, + depth: int = 1, + leaf_size: int = 0, + node_offset: int = 0, + node_depth: int = 0, + inner_size: int = 0, + last_node: bool = False, + usedforsecurity: bool = True, + string: ReadableBuffer | None = None, + ) -> Self: ... + else: + def __new__( + cls, + data: ReadableBuffer = b"", + /, + *, + digest_size: int = 64, + key: ReadableBuffer = b"", + salt: ReadableBuffer = b"", + person: ReadableBuffer = b"", + fanout: int = 1, + depth: int = 1, + leaf_size: int = 0, + node_offset: int = 0, + node_depth: int = 0, + inner_size: int = 0, + last_node: bool = False, + usedforsecurity: bool = True, + ) -> Self: ... + + def copy(self) -> Self: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def update(self, data: ReadableBuffer, /) -> None: ... + +@final +class blake2s: + MAX_DIGEST_SIZE: ClassVar[int] = 32 + MAX_KEY_SIZE: ClassVar[int] = 32 + PERSON_SIZE: ClassVar[int] = 8 + SALT_SIZE: ClassVar[int] = 8 + block_size: int + digest_size: int + name: str + if sys.version_info >= (3, 13): + def __new__( + cls, + data: ReadableBuffer = b"", + *, + digest_size: int = 32, + key: ReadableBuffer = b"", + salt: ReadableBuffer = b"", + person: ReadableBuffer = b"", + fanout: int = 1, + depth: int = 1, + leaf_size: int = 0, + node_offset: int = 0, + node_depth: int = 0, + inner_size: int = 0, + last_node: bool = False, + usedforsecurity: bool = True, + string: ReadableBuffer | None = None, + ) -> Self: ... + else: + def __new__( + cls, + data: ReadableBuffer = b"", + /, + *, + digest_size: int = 32, + key: ReadableBuffer = b"", + salt: ReadableBuffer = b"", + person: ReadableBuffer = b"", + fanout: int = 1, + depth: int = 1, + leaf_size: int = 0, + node_offset: int = 0, + node_depth: int = 0, + inner_size: int = 0, + last_node: bool = False, + usedforsecurity: bool = True, + ) -> Self: ... + + def copy(self) -> Self: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def update(self, data: ReadableBuffer, /) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_bootlocale.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_bootlocale.pyi new file mode 100644 index 0000000..233d493 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_bootlocale.pyi @@ -0,0 +1 @@ +def getpreferredencoding(do_setlocale: bool = True) -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_bz2.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_bz2.pyi new file mode 100644 index 0000000..fdad932 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_bz2.pyi @@ -0,0 +1,24 @@ +import sys +from _typeshed import ReadableBuffer +from typing import final +from typing_extensions import Self + +@final +class BZ2Compressor: + if sys.version_info >= (3, 12): + def __new__(cls, compresslevel: int = 9, /) -> Self: ... + else: + def __init__(self, compresslevel: int = 9, /) -> None: ... + + def compress(self, data: ReadableBuffer, /) -> bytes: ... + def flush(self) -> bytes: ... + +@final +class BZ2Decompressor: + def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: ... + @property + def eof(self) -> bool: ... + @property + def needs_input(self) -> bool: ... + @property + def unused_data(self) -> bytes: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_codecs.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_codecs.pyi new file mode 100644 index 0000000..89f97ed --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_codecs.pyi @@ -0,0 +1,122 @@ +import codecs +import sys +from _typeshed import ReadableBuffer +from collections.abc import Callable +from typing import Literal, final, overload, type_check_only +from typing_extensions import TypeAlias + +# This type is not exposed; it is defined in unicodeobject.c +# At runtime it calls itself builtins.EncodingMap +@final +@type_check_only +class _EncodingMap: + def size(self) -> int: ... + +_CharMap: TypeAlias = dict[int, int] | _EncodingMap +_Handler: TypeAlias = Callable[[UnicodeError], tuple[str | bytes, int]] +_SearchFunction: TypeAlias = Callable[[str], codecs.CodecInfo | None] + +def register(search_function: _SearchFunction, /) -> None: ... + +if sys.version_info >= (3, 10): + def unregister(search_function: _SearchFunction, /) -> None: ... + +def register_error(errors: str, handler: _Handler, /) -> None: ... +def lookup_error(name: str, /) -> _Handler: ... + +# The type ignore on `encode` and `decode` is to avoid issues with overlapping overloads, for more details, see #300 +# https://docs.python.org/3/library/codecs.html#binary-transforms +_BytesToBytesEncoding: TypeAlias = Literal[ + "base64", + "base_64", + "base64_codec", + "bz2", + "bz2_codec", + "hex", + "hex_codec", + "quopri", + "quotedprintable", + "quoted_printable", + "quopri_codec", + "uu", + "uu_codec", + "zip", + "zlib", + "zlib_codec", +] +# https://docs.python.org/3/library/codecs.html#text-transforms +_StrToStrEncoding: TypeAlias = Literal["rot13", "rot_13"] + +@overload +def encode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ... +@overload +def encode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ... # type: ignore[overload-overlap] +@overload +def encode(obj: str, encoding: str = "utf-8", errors: str = "strict") -> bytes: ... +@overload +def decode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ... # type: ignore[overload-overlap] +@overload +def decode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ... + +# these are documented as text encodings but in practice they also accept str as input +@overload +def decode( + obj: str, + encoding: Literal["unicode_escape", "unicode-escape", "raw_unicode_escape", "raw-unicode-escape"], + errors: str = "strict", +) -> str: ... + +# hex is officially documented as a bytes to bytes encoding, but it appears to also work with str +@overload +def decode(obj: str, encoding: Literal["hex", "hex_codec"], errors: str = "strict") -> bytes: ... +@overload +def decode(obj: ReadableBuffer, encoding: str = "utf-8", errors: str = "strict") -> str: ... +def lookup(encoding: str, /) -> codecs.CodecInfo: ... +def charmap_build(map: str, /) -> _CharMap: ... +def ascii_decode(data: ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ... +def ascii_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... +def charmap_decode(data: ReadableBuffer, errors: str | None = None, mapping: _CharMap | None = None, /) -> tuple[str, int]: ... +def charmap_encode(str: str, errors: str | None = None, mapping: _CharMap | None = None, /) -> tuple[bytes, int]: ... +def escape_decode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ... +def escape_encode(data: bytes, errors: str | None = None, /) -> tuple[bytes, int]: ... +def latin_1_decode(data: ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ... +def latin_1_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... +def raw_unicode_escape_decode( + data: str | ReadableBuffer, errors: str | None = None, final: bool = True, / +) -> tuple[str, int]: ... +def raw_unicode_escape_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... +def readbuffer_encode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[bytes, int]: ... +def unicode_escape_decode(data: str | ReadableBuffer, errors: str | None = None, final: bool = True, /) -> tuple[str, int]: ... +def unicode_escape_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... +def utf_16_be_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... +def utf_16_be_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... +def utf_16_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... +def utf_16_encode(str: str, errors: str | None = None, byteorder: int = 0, /) -> tuple[bytes, int]: ... +def utf_16_ex_decode( + data: ReadableBuffer, errors: str | None = None, byteorder: int = 0, final: bool = False, / +) -> tuple[str, int, int]: ... +def utf_16_le_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... +def utf_16_le_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... +def utf_32_be_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... +def utf_32_be_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... +def utf_32_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... +def utf_32_encode(str: str, errors: str | None = None, byteorder: int = 0, /) -> tuple[bytes, int]: ... +def utf_32_ex_decode( + data: ReadableBuffer, errors: str | None = None, byteorder: int = 0, final: bool = False, / +) -> tuple[str, int, int]: ... +def utf_32_le_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... +def utf_32_le_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... +def utf_7_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... +def utf_7_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... +def utf_8_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... +def utf_8_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... + +if sys.platform == "win32": + def mbcs_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... + def mbcs_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... + def code_page_decode( + codepage: int, data: ReadableBuffer, errors: str | None = None, final: bool = False, / + ) -> tuple[str, int]: ... + def code_page_encode(code_page: int, str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... + def oem_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... + def oem_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_collections_abc.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_collections_abc.pyi new file mode 100644 index 0000000..319577c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_collections_abc.pyi @@ -0,0 +1,105 @@ +import sys +from abc import abstractmethod +from types import MappingProxyType +from typing import ( # noqa: Y022,Y038,UP035,Y057 + AbstractSet as Set, + AsyncGenerator as AsyncGenerator, + AsyncIterable as AsyncIterable, + AsyncIterator as AsyncIterator, + Awaitable as Awaitable, + ByteString as ByteString, + Callable as Callable, + ClassVar, + Collection as Collection, + Container as Container, + Coroutine as Coroutine, + Generator as Generator, + Generic, + Hashable as Hashable, + ItemsView as ItemsView, + Iterable as Iterable, + Iterator as Iterator, + KeysView as KeysView, + Mapping as Mapping, + MappingView as MappingView, + MutableMapping as MutableMapping, + MutableSequence as MutableSequence, + MutableSet as MutableSet, + Protocol, + Reversible as Reversible, + Sequence as Sequence, + Sized as Sized, + TypeVar, + ValuesView as ValuesView, + final, + runtime_checkable, +) + +__all__ = [ + "Awaitable", + "Coroutine", + "AsyncIterable", + "AsyncIterator", + "AsyncGenerator", + "Hashable", + "Iterable", + "Iterator", + "Generator", + "Reversible", + "Sized", + "Container", + "Callable", + "Collection", + "Set", + "MutableSet", + "Mapping", + "MutableMapping", + "MappingView", + "KeysView", + "ItemsView", + "ValuesView", + "Sequence", + "MutableSequence", + "ByteString", +] +if sys.version_info >= (3, 12): + __all__ += ["Buffer"] + +_KT_co = TypeVar("_KT_co", covariant=True) # Key type covariant containers. +_VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers. + +@final +class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented + def __eq__(self, value: object, /) -> bool: ... + def __reversed__(self) -> Iterator[_KT_co]: ... + __hash__: ClassVar[None] # type: ignore[assignment] + if sys.version_info >= (3, 13): + def isdisjoint(self, other: Iterable[_KT_co], /) -> bool: ... + if sys.version_info >= (3, 10): + @property + def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ... + +@final +class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented + def __reversed__(self) -> Iterator[_VT_co]: ... + if sys.version_info >= (3, 10): + @property + def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ... + +@final +class dict_items(ItemsView[_KT_co, _VT_co]): # undocumented + def __eq__(self, value: object, /) -> bool: ... + def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... + __hash__: ClassVar[None] # type: ignore[assignment] + if sys.version_info >= (3, 13): + def isdisjoint(self, other: Iterable[tuple[_KT_co, _VT_co]], /) -> bool: ... + if sys.version_info >= (3, 10): + @property + def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ... + +if sys.version_info >= (3, 12): + @runtime_checkable + class Buffer(Protocol): + __slots__ = () + @abstractmethod + def __buffer__(self, flags: int, /) -> memoryview: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_compat_pickle.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_compat_pickle.pyi new file mode 100644 index 0000000..32c0b54 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_compat_pickle.pyi @@ -0,0 +1,10 @@ +from typing import Final + +IMPORT_MAPPING: Final[dict[str, str]] +NAME_MAPPING: Final[dict[tuple[str, str], tuple[str, str]]] +PYTHON2_EXCEPTIONS: Final[tuple[str, ...]] +MULTIPROCESSING_EXCEPTIONS: Final[tuple[str, ...]] +REVERSE_IMPORT_MAPPING: Final[dict[str, str]] +REVERSE_NAME_MAPPING: Final[dict[tuple[str, str], tuple[str, str]]] +PYTHON3_OSERROR_EXCEPTIONS: Final[tuple[str, ...]] +PYTHON3_IMPORTERROR_EXCEPTIONS: Final[tuple[str, ...]] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_compression.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_compression.pyi new file mode 100644 index 0000000..6015bcb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_compression.pyi @@ -0,0 +1,39 @@ +# _compression is replaced by compression._common._streams on Python 3.14+ (PEP-784) + +from _typeshed import ReadableBuffer, WriteableBuffer +from collections.abc import Callable +from io import DEFAULT_BUFFER_SIZE, BufferedIOBase, RawIOBase +from typing import Any, Protocol, type_check_only + +BUFFER_SIZE = DEFAULT_BUFFER_SIZE + +@type_check_only +class _Reader(Protocol): + def read(self, n: int, /) -> bytes: ... + def seekable(self) -> bool: ... + def seek(self, n: int, /) -> Any: ... + +@type_check_only +class _Decompressor(Protocol): + def decompress(self, data: ReadableBuffer, /, max_length: int = ...) -> bytes: ... + @property + def unused_data(self) -> bytes: ... + @property + def eof(self) -> bool: ... + # `zlib._Decompress` does not have next property, but `DecompressReader` calls it: + # @property + # def needs_input(self) -> bool: ... + +class BaseStream(BufferedIOBase): ... + +class DecompressReader(RawIOBase): + def __init__( + self, + fp: _Reader, + decomp_factory: Callable[..., _Decompressor], + trailing_error: type[Exception] | tuple[type[Exception], ...] = (), + **decomp_args: Any, # These are passed to decomp_factory. + ) -> None: ... + def readinto(self, b: WriteableBuffer) -> int: ... + def read(self, size: int = -1) -> bytes: ... + def seek(self, offset: int, whence: int = 0) -> int: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_contextvars.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_contextvars.pyi new file mode 100644 index 0000000..0ddeca7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_contextvars.pyi @@ -0,0 +1,64 @@ +import sys +from collections.abc import Callable, Iterator, Mapping +from types import GenericAlias, TracebackType +from typing import Any, ClassVar, Generic, TypeVar, final, overload +from typing_extensions import ParamSpec, Self + +_T = TypeVar("_T") +_D = TypeVar("_D") +_P = ParamSpec("_P") + +@final +class ContextVar(Generic[_T]): + @overload + def __new__(cls, name: str) -> Self: ... + @overload + def __new__(cls, name: str, *, default: _T) -> Self: ... + def __hash__(self) -> int: ... + @property + def name(self) -> str: ... + @overload + def get(self) -> _T: ... + @overload + def get(self, default: _T, /) -> _T: ... + @overload + def get(self, default: _D, /) -> _D | _T: ... + def set(self, value: _T, /) -> Token[_T]: ... + def reset(self, token: Token[_T], /) -> None: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +@final +class Token(Generic[_T]): + @property + def var(self) -> ContextVar[_T]: ... + @property + def old_value(self) -> Any: ... # returns either _T or MISSING, but that's hard to express + MISSING: ClassVar[object] + __hash__: ClassVar[None] # type: ignore[assignment] + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + if sys.version_info >= (3, 14): + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / + ) -> None: ... + +def copy_context() -> Context: ... + +# It doesn't make sense to make this generic, because for most Contexts each ContextVar will have +# a different value. +@final +class Context(Mapping[ContextVar[Any], Any]): + def __init__(self) -> None: ... + @overload + def get(self, key: ContextVar[_T], default: None = None, /) -> _T | None: ... + @overload + def get(self, key: ContextVar[_T], default: _T, /) -> _T: ... + @overload + def get(self, key: ContextVar[_T], default: _D, /) -> _T | _D: ... + def run(self, callable: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ... + def copy(self) -> Context: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __getitem__(self, key: ContextVar[_T], /) -> _T: ... + def __iter__(self) -> Iterator[ContextVar[Any]]: ... + def __len__(self) -> int: ... + def __eq__(self, value: object, /) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_csv.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_csv.pyi new file mode 100644 index 0000000..ea90766 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_csv.pyi @@ -0,0 +1,139 @@ +import csv +import sys +from _typeshed import SupportsWrite +from collections.abc import Iterable +from typing import Any, Final, Literal, type_check_only +from typing_extensions import Self, TypeAlias, disjoint_base + +__version__: Final[str] + +QUOTE_ALL: Final = 1 +QUOTE_MINIMAL: Final = 0 +QUOTE_NONE: Final = 3 +QUOTE_NONNUMERIC: Final = 2 +if sys.version_info >= (3, 12): + QUOTE_STRINGS: Final = 4 + QUOTE_NOTNULL: Final = 5 + +if sys.version_info >= (3, 12): + _QuotingType: TypeAlias = Literal[0, 1, 2, 3, 4, 5] +else: + _QuotingType: TypeAlias = Literal[0, 1, 2, 3] + +class Error(Exception): ... + +_DialectLike: TypeAlias = str | Dialect | csv.Dialect | type[Dialect | csv.Dialect] + +@disjoint_base +class Dialect: + delimiter: str + quotechar: str | None + escapechar: str | None + doublequote: bool + skipinitialspace: bool + lineterminator: str + quoting: _QuotingType + strict: bool + def __new__( + cls, + dialect: _DialectLike | None = None, + delimiter: str = ",", + doublequote: bool = True, + escapechar: str | None = None, + lineterminator: str = "\r\n", + quotechar: str | None = '"', + quoting: _QuotingType = 0, + skipinitialspace: bool = False, + strict: bool = False, + ) -> Self: ... + +if sys.version_info >= (3, 10): + # This class calls itself _csv.reader. + @disjoint_base + class Reader: + @property + def dialect(self) -> Dialect: ... + line_num: int + def __iter__(self) -> Self: ... + def __next__(self) -> list[str]: ... + + # This class calls itself _csv.writer. + @disjoint_base + class Writer: + @property + def dialect(self) -> Dialect: ... + if sys.version_info >= (3, 13): + def writerow(self, row: Iterable[Any], /) -> Any: ... + def writerows(self, rows: Iterable[Iterable[Any]], /) -> None: ... + else: + def writerow(self, row: Iterable[Any]) -> Any: ... + def writerows(self, rows: Iterable[Iterable[Any]]) -> None: ... + + # For the return types below. + # These aliases can be removed when typeshed drops support for 3.9. + _reader = Reader + _writer = Writer +else: + # This class is not exposed. It calls itself _csv.reader. + @type_check_only + class _reader: + @property + def dialect(self) -> Dialect: ... + line_num: int + def __iter__(self) -> Self: ... + def __next__(self) -> list[str]: ... + + # This class is not exposed. It calls itself _csv.writer. + @type_check_only + class _writer: + @property + def dialect(self) -> Dialect: ... + def writerow(self, row: Iterable[Any]) -> Any: ... + def writerows(self, rows: Iterable[Iterable[Any]]) -> None: ... + +def writer( + fileobj: SupportsWrite[str], + /, + dialect: _DialectLike = "excel", + *, + delimiter: str = ",", + quotechar: str | None = '"', + escapechar: str | None = None, + doublequote: bool = True, + skipinitialspace: bool = False, + lineterminator: str = "\r\n", + quoting: _QuotingType = 0, + strict: bool = False, +) -> _writer: ... +def reader( + iterable: Iterable[str], + /, + dialect: _DialectLike = "excel", + *, + delimiter: str = ",", + quotechar: str | None = '"', + escapechar: str | None = None, + doublequote: bool = True, + skipinitialspace: bool = False, + lineterminator: str = "\r\n", + quoting: _QuotingType = 0, + strict: bool = False, +) -> _reader: ... +def register_dialect( + name: str, + /, + dialect: type[Dialect | csv.Dialect] | str = "excel", + *, + delimiter: str = ",", + quotechar: str | None = '"', + escapechar: str | None = None, + doublequote: bool = True, + skipinitialspace: bool = False, + lineterminator: str = "\r\n", + quoting: _QuotingType = 0, + strict: bool = False, +) -> None: ... +def unregister_dialect(name: str) -> None: ... +def get_dialect(name: str) -> Dialect: ... +def list_dialects() -> list[str]: ... +def field_size_limit(new_limit: int = ...) -> int: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_ctypes.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_ctypes.pyi new file mode 100644 index 0000000..082a31f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_ctypes.pyi @@ -0,0 +1,337 @@ +import _typeshed +import sys +from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer +from abc import abstractmethod +from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence +from ctypes import CDLL, ArgumentError as ArgumentError, c_void_p +from types import GenericAlias +from typing import Any, ClassVar, Final, Generic, TypeVar, final, overload, type_check_only +from typing_extensions import Self, TypeAlias + +_T = TypeVar("_T") +_CT = TypeVar("_CT", bound=_CData) + +FUNCFLAG_CDECL: Final = 0x1 +FUNCFLAG_PYTHONAPI: Final = 0x4 +FUNCFLAG_USE_ERRNO: Final = 0x8 +FUNCFLAG_USE_LASTERROR: Final = 0x10 +RTLD_GLOBAL: Final[int] +RTLD_LOCAL: Final[int] + +if sys.version_info >= (3, 11): + CTYPES_MAX_ARGCOUNT: Final[int] + +if sys.version_info >= (3, 12): + SIZEOF_TIME_T: Final[int] + +if sys.platform == "win32": + # Description, Source, HelpFile, HelpContext, scode + _COMError_Details: TypeAlias = tuple[str | None, str | None, str | None, int | None, int | None] + + class COMError(Exception): + hresult: int + text: str | None + details: _COMError_Details + + def __init__(self, hresult: int, text: str | None, details: _COMError_Details) -> None: ... + + def CopyComPointer(src: _PointerLike, dst: _PointerLike | _CArgObject) -> int: ... + + FUNCFLAG_HRESULT: Final = 0x2 + FUNCFLAG_STDCALL: Final = 0x0 + + def FormatError(code: int = ...) -> str: ... + def get_last_error() -> int: ... + def set_last_error(value: int) -> int: ... + def LoadLibrary(name: str, load_flags: int = 0, /) -> int: ... + def FreeLibrary(handle: int, /) -> None: ... + +else: + def dlclose(handle: int, /) -> None: ... + # The default for flag is RTLD_GLOBAL|RTLD_LOCAL, which is platform dependent. + def dlopen(name: StrOrBytesPath, flag: int = ..., /) -> int: ... + def dlsym(handle: int, name: str, /) -> int: ... + +if sys.version_info >= (3, 13): + # This class is not exposed. It calls itself _ctypes.CType_Type. + @type_check_only + class _CType_Type(type): + # By default mypy complains about the following two methods, because strictly speaking cls + # might not be a Type[_CT]. However this doesn't happen because this is only a + # metaclass for subclasses of _CData. + def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + + _CTypeBaseType = _CType_Type + +else: + _CTypeBaseType = type + +# This class is not exposed. +@type_check_only +class _CData: + _b_base_: int + _b_needsfree_: bool + _objects: Mapping[Any, int] | None + def __buffer__(self, flags: int, /) -> memoryview: ... + def __ctypes_from_outparam__(self, /) -> Self: ... + if sys.version_info >= (3, 14): + __pointer_type__: type + +# this is a union of all the subclasses of _CData, which is useful because of +# the methods that are present on each of those subclasses which are not present +# on _CData itself. +_CDataType: TypeAlias = _SimpleCData[Any] | _Pointer[Any] | CFuncPtr | Union | Structure | Array[Any] + +# This class is not exposed. It calls itself _ctypes.PyCSimpleType. +@type_check_only +class _PyCSimpleType(_CTypeBaseType): + def from_address(self: type[_typeshed.Self], value: int, /) -> _typeshed.Self: ... + def from_buffer(self: type[_typeshed.Self], obj: WriteableBuffer, offset: int = 0, /) -> _typeshed.Self: ... + def from_buffer_copy(self: type[_typeshed.Self], buffer: ReadableBuffer, offset: int = 0, /) -> _typeshed.Self: ... + def from_param(self: type[_typeshed.Self], value: Any, /) -> _typeshed.Self | _CArgObject: ... + def in_dll(self: type[_typeshed.Self], dll: CDLL, name: str, /) -> _typeshed.Self: ... + if sys.version_info < (3, 13): + # Inherited from CType_Type starting on 3.13 + def __mul__(self: type[_CT], value: int, /) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def __rmul__(self: type[_CT], value: int, /) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + +class _SimpleCData(_CData, Generic[_T], metaclass=_PyCSimpleType): + value: _T + # The TypeVar can be unsolved here, + # but we can't use overloads without creating many, many mypy false-positive errors + def __init__(self, value: _T = ...) -> None: ... # pyright: ignore[reportInvalidTypeVarUse] + def __ctypes_from_outparam__(self, /) -> _T: ... # type: ignore[override] + +@type_check_only +class _CanCastTo(_CData): ... + +@type_check_only +class _PointerLike(_CanCastTo): ... + +# This type is not exposed. It calls itself _ctypes.PyCPointerType. +@type_check_only +class _PyCPointerType(_CTypeBaseType): + def from_address(self: type[_typeshed.Self], value: int, /) -> _typeshed.Self: ... + def from_buffer(self: type[_typeshed.Self], obj: WriteableBuffer, offset: int = 0, /) -> _typeshed.Self: ... + def from_buffer_copy(self: type[_typeshed.Self], buffer: ReadableBuffer, offset: int = 0, /) -> _typeshed.Self: ... + def from_param(self: type[_typeshed.Self], value: Any, /) -> _typeshed.Self | _CArgObject: ... + def in_dll(self: type[_typeshed.Self], dll: CDLL, name: str, /) -> _typeshed.Self: ... + def set_type(self, type: _CTypeBaseType, /) -> None: ... + if sys.version_info < (3, 13): + # Inherited from CType_Type starting on 3.13 + def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + +class _Pointer(_PointerLike, _CData, Generic[_CT], metaclass=_PyCPointerType): + _type_: type[_CT] + contents: _CT + @overload + def __init__(self) -> None: ... + @overload + def __init__(self, arg: _CT) -> None: ... + @overload + def __getitem__(self, key: int, /) -> Any: ... + @overload + def __getitem__(self, key: slice, /) -> list[Any]: ... + def __setitem__(self, key: int, value: Any, /) -> None: ... + +if sys.version_info < (3, 14): + @overload + def POINTER(type: None, /) -> type[c_void_p]: ... + @overload + def POINTER(type: type[_CT], /) -> type[_Pointer[_CT]]: ... + def pointer(obj: _CT, /) -> _Pointer[_CT]: ... + +# This class is not exposed. It calls itself _ctypes.CArgObject. +@final +@type_check_only +class _CArgObject: ... + +if sys.version_info >= (3, 14): + def byref(obj: _CData | _CDataType, offset: int = 0, /) -> _CArgObject: ... + +else: + def byref(obj: _CData | _CDataType, offset: int = 0) -> _CArgObject: ... + +_ECT: TypeAlias = Callable[[_CData | _CDataType | None, CFuncPtr, tuple[_CData | _CDataType, ...]], _CDataType] +_PF: TypeAlias = tuple[int] | tuple[int, str | None] | tuple[int, str | None, Any] + +# This class is not exposed. It calls itself _ctypes.PyCFuncPtrType. +@type_check_only +class _PyCFuncPtrType(_CTypeBaseType): + def from_address(self: type[_typeshed.Self], value: int, /) -> _typeshed.Self: ... + def from_buffer(self: type[_typeshed.Self], obj: WriteableBuffer, offset: int = 0, /) -> _typeshed.Self: ... + def from_buffer_copy(self: type[_typeshed.Self], buffer: ReadableBuffer, offset: int = 0, /) -> _typeshed.Self: ... + def from_param(self: type[_typeshed.Self], value: Any, /) -> _typeshed.Self | _CArgObject: ... + def in_dll(self: type[_typeshed.Self], dll: CDLL, name: str, /) -> _typeshed.Self: ... + if sys.version_info < (3, 13): + # Inherited from CType_Type starting on 3.13 + def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + +class CFuncPtr(_PointerLike, _CData, metaclass=_PyCFuncPtrType): + restype: type[_CDataType] | Callable[[int], Any] | None + argtypes: Sequence[type[_CDataType]] + errcheck: _ECT + # Abstract attribute that must be defined on subclasses + _flags_: ClassVar[int] + @overload + def __new__(cls) -> Self: ... + @overload + def __new__(cls, address: int, /) -> Self: ... + @overload + def __new__(cls, callable: Callable[..., Any], /) -> Self: ... + @overload + def __new__(cls, func_spec: tuple[str | int, CDLL], paramflags: tuple[_PF, ...] | None = ..., /) -> Self: ... + if sys.platform == "win32": + @overload + def __new__( + cls, vtbl_index: int, name: str, paramflags: tuple[_PF, ...] | None = ..., iid: _CData | _CDataType | None = ..., / + ) -> Self: ... + + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + +_GetT = TypeVar("_GetT") +_SetT = TypeVar("_SetT") + +# This class is not exposed. It calls itself _ctypes.CField. +@final +@type_check_only +class _CField(Generic[_CT, _GetT, _SetT]): + offset: int + size: int + if sys.version_info >= (3, 10): + @overload + def __get__(self, instance: None, owner: type[Any] | None = None, /) -> Self: ... + @overload + def __get__(self, instance: Any, owner: type[Any] | None = None, /) -> _GetT: ... + else: + @overload + def __get__(self, instance: None, owner: type[Any] | None, /) -> Self: ... + @overload + def __get__(self, instance: Any, owner: type[Any] | None, /) -> _GetT: ... + + def __set__(self, instance: Any, value: _SetT, /) -> None: ... + +# This class is not exposed. It calls itself _ctypes.UnionType. +@type_check_only +class _UnionType(_CTypeBaseType): + def from_address(self: type[_typeshed.Self], value: int, /) -> _typeshed.Self: ... + def from_buffer(self: type[_typeshed.Self], obj: WriteableBuffer, offset: int = 0, /) -> _typeshed.Self: ... + def from_buffer_copy(self: type[_typeshed.Self], buffer: ReadableBuffer, offset: int = 0, /) -> _typeshed.Self: ... + def from_param(self: type[_typeshed.Self], value: Any, /) -> _typeshed.Self | _CArgObject: ... + def in_dll(self: type[_typeshed.Self], dll: CDLL, name: str, /) -> _typeshed.Self: ... + # At runtime, various attributes are created on a Union subclass based + # on its _fields_. This method doesn't exist, but represents those + # dynamically created attributes. + def __getattr__(self, name: str) -> _CField[Any, Any, Any]: ... + if sys.version_info < (3, 13): + # Inherited from CType_Type starting on 3.13 + def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + +class Union(_CData, metaclass=_UnionType): + _fields_: ClassVar[Sequence[tuple[str, type[_CDataType]] | tuple[str, type[_CDataType], int]]] + _pack_: ClassVar[int] + _anonymous_: ClassVar[Sequence[str]] + if sys.version_info >= (3, 13): + _align_: ClassVar[int] + + def __init__(self, *args: Any, **kw: Any) -> None: ... + def __getattr__(self, name: str) -> Any: ... + def __setattr__(self, name: str, value: Any) -> None: ... + +# This class is not exposed. It calls itself _ctypes.PyCStructType. +@type_check_only +class _PyCStructType(_CTypeBaseType): + def from_address(self: type[_typeshed.Self], value: int, /) -> _typeshed.Self: ... + def from_buffer(self: type[_typeshed.Self], obj: WriteableBuffer, offset: int = 0, /) -> _typeshed.Self: ... + def from_buffer_copy(self: type[_typeshed.Self], buffer: ReadableBuffer, offset: int = 0, /) -> _typeshed.Self: ... + def from_param(self: type[_typeshed.Self], value: Any, /) -> _typeshed.Self | _CArgObject: ... + def in_dll(self: type[_typeshed.Self], dll: CDLL, name: str, /) -> _typeshed.Self: ... + # At runtime, various attributes are created on a Structure subclass based + # on its _fields_. This method doesn't exist, but represents those + # dynamically created attributes. + def __getattr__(self, name: str) -> _CField[Any, Any, Any]: ... + if sys.version_info < (3, 13): + # Inherited from CType_Type starting on 3.13 + def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + +class Structure(_CData, metaclass=_PyCStructType): + _fields_: ClassVar[Sequence[tuple[str, type[_CDataType]] | tuple[str, type[_CDataType], int]]] + _pack_: ClassVar[int] + _anonymous_: ClassVar[Sequence[str]] + if sys.version_info >= (3, 13): + _align_: ClassVar[int] + + def __init__(self, *args: Any, **kw: Any) -> None: ... + def __getattr__(self, name: str) -> Any: ... + def __setattr__(self, name: str, value: Any) -> None: ... + +# This class is not exposed. It calls itself _ctypes.PyCArrayType. +@type_check_only +class _PyCArrayType(_CTypeBaseType): + def from_address(self: type[_typeshed.Self], value: int, /) -> _typeshed.Self: ... + def from_buffer(self: type[_typeshed.Self], obj: WriteableBuffer, offset: int = 0, /) -> _typeshed.Self: ... + def from_buffer_copy(self: type[_typeshed.Self], buffer: ReadableBuffer, offset: int = 0, /) -> _typeshed.Self: ... + def from_param(self: type[_typeshed.Self], value: Any, /) -> _typeshed.Self | _CArgObject: ... + def in_dll(self: type[_typeshed.Self], dll: CDLL, name: str, /) -> _typeshed.Self: ... + if sys.version_info < (3, 13): + # Inherited from CType_Type starting on 3.13 + def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + +class Array(_CData, Generic[_CT], metaclass=_PyCArrayType): + @property + @abstractmethod + def _length_(self) -> int: ... + @_length_.setter + def _length_(self, value: int) -> None: ... + @property + @abstractmethod + def _type_(self) -> type[_CT]: ... + @_type_.setter + def _type_(self, value: type[_CT]) -> None: ... + raw: bytes # Note: only available if _CT == c_char + value: Any # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise + # TODO: These methods cannot be annotated correctly at the moment. + # All of these "Any"s stand for the array's element type, but it's not possible to use _CT + # here, because of a special feature of ctypes. + # By default, when accessing an element of an Array[_CT], the returned object has type _CT. + # However, when _CT is a "simple type" like c_int, ctypes automatically "unboxes" the object + # and converts it to the corresponding Python primitive. For example, when accessing an element + # of an Array[c_int], a Python int object is returned, not a c_int. + # This behavior does *not* apply to subclasses of "simple types". + # If MyInt is a subclass of c_int, then accessing an element of an Array[MyInt] returns + # a MyInt, not an int. + # This special behavior is not easy to model in a stub, so for now all places where + # the array element type would belong are annotated with Any instead. + def __init__(self, *args: Any) -> None: ... + @overload + def __getitem__(self, key: int, /) -> Any: ... + @overload + def __getitem__(self, key: slice, /) -> list[Any]: ... + @overload + def __setitem__(self, key: int, value: Any, /) -> None: ... + @overload + def __setitem__(self, key: slice, value: Iterable[Any], /) -> None: ... + def __iter__(self) -> Iterator[Any]: ... + # Can't inherit from Sized because the metaclass conflict between + # Sized and _CData prevents using _CDataMeta. + def __len__(self) -> int: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +def addressof(obj: _CData | _CDataType, /) -> int: ... +def alignment(obj_or_type: _CData | _CDataType | type[_CData | _CDataType], /) -> int: ... +def get_errno() -> int: ... +def resize(obj: _CData | _CDataType, size: int, /) -> None: ... +def set_errno(value: int, /) -> int: ... +def sizeof(obj_or_type: _CData | _CDataType | type[_CData | _CDataType], /) -> int: ... +def PyObj_FromPtr(address: int, /) -> Any: ... +def Py_DECREF(o: _T, /) -> _T: ... +def Py_INCREF(o: _T, /) -> _T: ... +def buffer_info(o: _CData | _CDataType | type[_CData | _CDataType], /) -> tuple[str, int, tuple[int, ...]]: ... +def call_cdeclfunction(address: int, arguments: tuple[Any, ...], /) -> Any: ... +def call_function(address: int, arguments: tuple[Any, ...], /) -> Any: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_curses.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_curses.pyi new file mode 100644 index 0000000..d4e4d48 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_curses.pyi @@ -0,0 +1,551 @@ +import sys +from _typeshed import ReadOnlyBuffer, SupportsRead, SupportsWrite +from curses import _ncurses_version +from typing import Any, Final, final, overload +from typing_extensions import TypeAlias + +# NOTE: This module is ordinarily only available on Unix, but the windows-curses +# package makes it available on Windows as well with the same contents. + +# Handled by PyCurses_ConvertToChtype in _cursesmodule.c. +_ChType: TypeAlias = str | bytes | int + +# ACS codes are only initialized after initscr is called +ACS_BBSS: Final[int] +ACS_BLOCK: Final[int] +ACS_BOARD: Final[int] +ACS_BSBS: Final[int] +ACS_BSSB: Final[int] +ACS_BSSS: Final[int] +ACS_BTEE: Final[int] +ACS_BULLET: Final[int] +ACS_CKBOARD: Final[int] +ACS_DARROW: Final[int] +ACS_DEGREE: Final[int] +ACS_DIAMOND: Final[int] +ACS_GEQUAL: Final[int] +ACS_HLINE: Final[int] +ACS_LANTERN: Final[int] +ACS_LARROW: Final[int] +ACS_LEQUAL: Final[int] +ACS_LLCORNER: Final[int] +ACS_LRCORNER: Final[int] +ACS_LTEE: Final[int] +ACS_NEQUAL: Final[int] +ACS_PI: Final[int] +ACS_PLMINUS: Final[int] +ACS_PLUS: Final[int] +ACS_RARROW: Final[int] +ACS_RTEE: Final[int] +ACS_S1: Final[int] +ACS_S3: Final[int] +ACS_S7: Final[int] +ACS_S9: Final[int] +ACS_SBBS: Final[int] +ACS_SBSB: Final[int] +ACS_SBSS: Final[int] +ACS_SSBB: Final[int] +ACS_SSBS: Final[int] +ACS_SSSB: Final[int] +ACS_SSSS: Final[int] +ACS_STERLING: Final[int] +ACS_TTEE: Final[int] +ACS_UARROW: Final[int] +ACS_ULCORNER: Final[int] +ACS_URCORNER: Final[int] +ACS_VLINE: Final[int] +ALL_MOUSE_EVENTS: Final[int] +A_ALTCHARSET: Final[int] +A_ATTRIBUTES: Final[int] +A_BLINK: Final[int] +A_BOLD: Final[int] +A_CHARTEXT: Final[int] +A_COLOR: Final[int] +A_DIM: Final[int] +A_HORIZONTAL: Final[int] +A_INVIS: Final[int] +A_ITALIC: Final[int] +A_LEFT: Final[int] +A_LOW: Final[int] +A_NORMAL: Final[int] +A_PROTECT: Final[int] +A_REVERSE: Final[int] +A_RIGHT: Final[int] +A_STANDOUT: Final[int] +A_TOP: Final[int] +A_UNDERLINE: Final[int] +A_VERTICAL: Final[int] +BUTTON1_CLICKED: Final[int] +BUTTON1_DOUBLE_CLICKED: Final[int] +BUTTON1_PRESSED: Final[int] +BUTTON1_RELEASED: Final[int] +BUTTON1_TRIPLE_CLICKED: Final[int] +BUTTON2_CLICKED: Final[int] +BUTTON2_DOUBLE_CLICKED: Final[int] +BUTTON2_PRESSED: Final[int] +BUTTON2_RELEASED: Final[int] +BUTTON2_TRIPLE_CLICKED: Final[int] +BUTTON3_CLICKED: Final[int] +BUTTON3_DOUBLE_CLICKED: Final[int] +BUTTON3_PRESSED: Final[int] +BUTTON3_RELEASED: Final[int] +BUTTON3_TRIPLE_CLICKED: Final[int] +BUTTON4_CLICKED: Final[int] +BUTTON4_DOUBLE_CLICKED: Final[int] +BUTTON4_PRESSED: Final[int] +BUTTON4_RELEASED: Final[int] +BUTTON4_TRIPLE_CLICKED: Final[int] +# Darwin ncurses doesn't provide BUTTON5_* constants prior to 3.12.10 and 3.13.3 +if sys.version_info >= (3, 10): + if sys.version_info >= (3, 12) or sys.platform != "darwin": + BUTTON5_PRESSED: Final[int] + BUTTON5_RELEASED: Final[int] + BUTTON5_CLICKED: Final[int] + BUTTON5_DOUBLE_CLICKED: Final[int] + BUTTON5_TRIPLE_CLICKED: Final[int] +BUTTON_ALT: Final[int] +BUTTON_CTRL: Final[int] +BUTTON_SHIFT: Final[int] +COLOR_BLACK: Final[int] +COLOR_BLUE: Final[int] +COLOR_CYAN: Final[int] +COLOR_GREEN: Final[int] +COLOR_MAGENTA: Final[int] +COLOR_RED: Final[int] +COLOR_WHITE: Final[int] +COLOR_YELLOW: Final[int] +ERR: Final[int] +KEY_A1: Final[int] +KEY_A3: Final[int] +KEY_B2: Final[int] +KEY_BACKSPACE: Final[int] +KEY_BEG: Final[int] +KEY_BREAK: Final[int] +KEY_BTAB: Final[int] +KEY_C1: Final[int] +KEY_C3: Final[int] +KEY_CANCEL: Final[int] +KEY_CATAB: Final[int] +KEY_CLEAR: Final[int] +KEY_CLOSE: Final[int] +KEY_COMMAND: Final[int] +KEY_COPY: Final[int] +KEY_CREATE: Final[int] +KEY_CTAB: Final[int] +KEY_DC: Final[int] +KEY_DL: Final[int] +KEY_DOWN: Final[int] +KEY_EIC: Final[int] +KEY_END: Final[int] +KEY_ENTER: Final[int] +KEY_EOL: Final[int] +KEY_EOS: Final[int] +KEY_EXIT: Final[int] +KEY_F0: Final[int] +KEY_F1: Final[int] +KEY_F10: Final[int] +KEY_F11: Final[int] +KEY_F12: Final[int] +KEY_F13: Final[int] +KEY_F14: Final[int] +KEY_F15: Final[int] +KEY_F16: Final[int] +KEY_F17: Final[int] +KEY_F18: Final[int] +KEY_F19: Final[int] +KEY_F2: Final[int] +KEY_F20: Final[int] +KEY_F21: Final[int] +KEY_F22: Final[int] +KEY_F23: Final[int] +KEY_F24: Final[int] +KEY_F25: Final[int] +KEY_F26: Final[int] +KEY_F27: Final[int] +KEY_F28: Final[int] +KEY_F29: Final[int] +KEY_F3: Final[int] +KEY_F30: Final[int] +KEY_F31: Final[int] +KEY_F32: Final[int] +KEY_F33: Final[int] +KEY_F34: Final[int] +KEY_F35: Final[int] +KEY_F36: Final[int] +KEY_F37: Final[int] +KEY_F38: Final[int] +KEY_F39: Final[int] +KEY_F4: Final[int] +KEY_F40: Final[int] +KEY_F41: Final[int] +KEY_F42: Final[int] +KEY_F43: Final[int] +KEY_F44: Final[int] +KEY_F45: Final[int] +KEY_F46: Final[int] +KEY_F47: Final[int] +KEY_F48: Final[int] +KEY_F49: Final[int] +KEY_F5: Final[int] +KEY_F50: Final[int] +KEY_F51: Final[int] +KEY_F52: Final[int] +KEY_F53: Final[int] +KEY_F54: Final[int] +KEY_F55: Final[int] +KEY_F56: Final[int] +KEY_F57: Final[int] +KEY_F58: Final[int] +KEY_F59: Final[int] +KEY_F6: Final[int] +KEY_F60: Final[int] +KEY_F61: Final[int] +KEY_F62: Final[int] +KEY_F63: Final[int] +KEY_F7: Final[int] +KEY_F8: Final[int] +KEY_F9: Final[int] +KEY_FIND: Final[int] +KEY_HELP: Final[int] +KEY_HOME: Final[int] +KEY_IC: Final[int] +KEY_IL: Final[int] +KEY_LEFT: Final[int] +KEY_LL: Final[int] +KEY_MARK: Final[int] +KEY_MAX: Final[int] +KEY_MESSAGE: Final[int] +KEY_MIN: Final[int] +KEY_MOUSE: Final[int] +KEY_MOVE: Final[int] +KEY_NEXT: Final[int] +KEY_NPAGE: Final[int] +KEY_OPEN: Final[int] +KEY_OPTIONS: Final[int] +KEY_PPAGE: Final[int] +KEY_PREVIOUS: Final[int] +KEY_PRINT: Final[int] +KEY_REDO: Final[int] +KEY_REFERENCE: Final[int] +KEY_REFRESH: Final[int] +KEY_REPLACE: Final[int] +KEY_RESET: Final[int] +KEY_RESIZE: Final[int] +KEY_RESTART: Final[int] +KEY_RESUME: Final[int] +KEY_RIGHT: Final[int] +KEY_SAVE: Final[int] +KEY_SBEG: Final[int] +KEY_SCANCEL: Final[int] +KEY_SCOMMAND: Final[int] +KEY_SCOPY: Final[int] +KEY_SCREATE: Final[int] +KEY_SDC: Final[int] +KEY_SDL: Final[int] +KEY_SELECT: Final[int] +KEY_SEND: Final[int] +KEY_SEOL: Final[int] +KEY_SEXIT: Final[int] +KEY_SF: Final[int] +KEY_SFIND: Final[int] +KEY_SHELP: Final[int] +KEY_SHOME: Final[int] +KEY_SIC: Final[int] +KEY_SLEFT: Final[int] +KEY_SMESSAGE: Final[int] +KEY_SMOVE: Final[int] +KEY_SNEXT: Final[int] +KEY_SOPTIONS: Final[int] +KEY_SPREVIOUS: Final[int] +KEY_SPRINT: Final[int] +KEY_SR: Final[int] +KEY_SREDO: Final[int] +KEY_SREPLACE: Final[int] +KEY_SRESET: Final[int] +KEY_SRIGHT: Final[int] +KEY_SRSUME: Final[int] +KEY_SSAVE: Final[int] +KEY_SSUSPEND: Final[int] +KEY_STAB: Final[int] +KEY_SUNDO: Final[int] +KEY_SUSPEND: Final[int] +KEY_UNDO: Final[int] +KEY_UP: Final[int] +OK: Final[int] +REPORT_MOUSE_POSITION: Final[int] +_C_API: Any +version: Final[bytes] + +def baudrate() -> int: ... +def beep() -> None: ... +def can_change_color() -> bool: ... +def cbreak(flag: bool = True, /) -> None: ... +def color_content(color_number: int, /) -> tuple[int, int, int]: ... +def color_pair(pair_number: int, /) -> int: ... +def curs_set(visibility: int, /) -> int: ... +def def_prog_mode() -> None: ... +def def_shell_mode() -> None: ... +def delay_output(ms: int, /) -> None: ... +def doupdate() -> None: ... +def echo(flag: bool = True, /) -> None: ... +def endwin() -> None: ... +def erasechar() -> bytes: ... +def filter() -> None: ... +def flash() -> None: ... +def flushinp() -> None: ... +def get_escdelay() -> int: ... +def get_tabsize() -> int: ... +def getmouse() -> tuple[int, int, int, int, int]: ... +def getsyx() -> tuple[int, int]: ... +def getwin(file: SupportsRead[bytes], /) -> window: ... +def halfdelay(tenths: int, /) -> None: ... +def has_colors() -> bool: ... + +if sys.version_info >= (3, 10): + def has_extended_color_support() -> bool: ... + +if sys.version_info >= (3, 14): + def assume_default_colors(fg: int, bg: int, /) -> None: ... + +def has_ic() -> bool: ... +def has_il() -> bool: ... +def has_key(key: int, /) -> bool: ... +def init_color(color_number: int, r: int, g: int, b: int, /) -> None: ... +def init_pair(pair_number: int, fg: int, bg: int, /) -> None: ... +def initscr() -> window: ... +def intrflush(flag: bool, /) -> None: ... +def is_term_resized(nlines: int, ncols: int, /) -> bool: ... +def isendwin() -> bool: ... +def keyname(key: int, /) -> bytes: ... +def killchar() -> bytes: ... +def longname() -> bytes: ... +def meta(yes: bool, /) -> None: ... +def mouseinterval(interval: int, /) -> None: ... +def mousemask(newmask: int, /) -> tuple[int, int]: ... +def napms(ms: int, /) -> int: ... +def newpad(nlines: int, ncols: int, /) -> window: ... +def newwin(nlines: int, ncols: int, begin_y: int = 0, begin_x: int = 0, /) -> window: ... +def nl(flag: bool = True, /) -> None: ... +def nocbreak() -> None: ... +def noecho() -> None: ... +def nonl() -> None: ... +def noqiflush() -> None: ... +def noraw() -> None: ... +def pair_content(pair_number: int, /) -> tuple[int, int]: ... +def pair_number(attr: int, /) -> int: ... +def putp(string: ReadOnlyBuffer, /) -> None: ... +def qiflush(flag: bool = True, /) -> None: ... +def raw(flag: bool = True, /) -> None: ... +def reset_prog_mode() -> None: ... +def reset_shell_mode() -> None: ... +def resetty() -> None: ... +def resize_term(nlines: int, ncols: int, /) -> None: ... +def resizeterm(nlines: int, ncols: int, /) -> None: ... +def savetty() -> None: ... +def set_escdelay(ms: int, /) -> None: ... +def set_tabsize(size: int, /) -> None: ... +def setsyx(y: int, x: int, /) -> None: ... +def setupterm(term: str | None = None, fd: int = -1) -> None: ... +def start_color() -> None: ... +def termattrs() -> int: ... +def termname() -> bytes: ... +def tigetflag(capname: str, /) -> int: ... +def tigetnum(capname: str, /) -> int: ... +def tigetstr(capname: str, /) -> bytes | None: ... +def tparm( + str: ReadOnlyBuffer, + i1: int = 0, + i2: int = 0, + i3: int = 0, + i4: int = 0, + i5: int = 0, + i6: int = 0, + i7: int = 0, + i8: int = 0, + i9: int = 0, + /, +) -> bytes: ... +def typeahead(fd: int, /) -> None: ... +def unctrl(ch: _ChType, /) -> bytes: ... +def unget_wch(ch: int | str, /) -> None: ... +def ungetch(ch: _ChType, /) -> None: ... +def ungetmouse(id: int, x: int, y: int, z: int, bstate: int, /) -> None: ... +def update_lines_cols() -> None: ... +def use_default_colors() -> None: ... +def use_env(flag: bool, /) -> None: ... + +class error(Exception): ... + +@final +class window: # undocumented + encoding: str + @overload + def addch(self, ch: _ChType, attr: int = ...) -> None: ... + @overload + def addch(self, y: int, x: int, ch: _ChType, attr: int = ...) -> None: ... + @overload + def addnstr(self, str: str, n: int, attr: int = ...) -> None: ... + @overload + def addnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ... + @overload + def addstr(self, str: str, attr: int = ...) -> None: ... + @overload + def addstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ... + def attroff(self, attr: int, /) -> None: ... + def attron(self, attr: int, /) -> None: ... + def attrset(self, attr: int, /) -> None: ... + def bkgd(self, ch: _ChType, attr: int = 0, /) -> None: ... + def bkgdset(self, ch: _ChType, attr: int = 0, /) -> None: ... + def border( + self, + ls: _ChType = ..., + rs: _ChType = ..., + ts: _ChType = ..., + bs: _ChType = ..., + tl: _ChType = ..., + tr: _ChType = ..., + bl: _ChType = ..., + br: _ChType = ..., + ) -> None: ... + @overload + def box(self) -> None: ... + @overload + def box(self, vertch: _ChType = 0, horch: _ChType = 0) -> None: ... + @overload + def chgat(self, attr: int) -> None: ... + @overload + def chgat(self, num: int, attr: int) -> None: ... + @overload + def chgat(self, y: int, x: int, attr: int) -> None: ... + @overload + def chgat(self, y: int, x: int, num: int, attr: int) -> None: ... + def clear(self) -> None: ... + def clearok(self, yes: int) -> None: ... + def clrtobot(self) -> None: ... + def clrtoeol(self) -> None: ... + def cursyncup(self) -> None: ... + @overload + def delch(self) -> None: ... + @overload + def delch(self, y: int, x: int) -> None: ... + def deleteln(self) -> None: ... + @overload + def derwin(self, begin_y: int, begin_x: int) -> window: ... + @overload + def derwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ... + def echochar(self, ch: _ChType, attr: int = 0, /) -> None: ... + def enclose(self, y: int, x: int, /) -> bool: ... + def erase(self) -> None: ... + def getbegyx(self) -> tuple[int, int]: ... + def getbkgd(self) -> tuple[int, int]: ... + @overload + def getch(self) -> int: ... + @overload + def getch(self, y: int, x: int) -> int: ... + @overload + def get_wch(self) -> int | str: ... + @overload + def get_wch(self, y: int, x: int) -> int | str: ... + @overload + def getkey(self) -> str: ... + @overload + def getkey(self, y: int, x: int) -> str: ... + def getmaxyx(self) -> tuple[int, int]: ... + def getparyx(self) -> tuple[int, int]: ... + @overload + def getstr(self) -> bytes: ... + @overload + def getstr(self, n: int) -> bytes: ... + @overload + def getstr(self, y: int, x: int) -> bytes: ... + @overload + def getstr(self, y: int, x: int, n: int) -> bytes: ... + def getyx(self) -> tuple[int, int]: ... + @overload + def hline(self, ch: _ChType, n: int) -> None: ... + @overload + def hline(self, y: int, x: int, ch: _ChType, n: int) -> None: ... + def idcok(self, flag: bool) -> None: ... + def idlok(self, yes: bool) -> None: ... + def immedok(self, flag: bool) -> None: ... + @overload + def inch(self) -> int: ... + @overload + def inch(self, y: int, x: int) -> int: ... + @overload + def insch(self, ch: _ChType, attr: int = ...) -> None: ... + @overload + def insch(self, y: int, x: int, ch: _ChType, attr: int = ...) -> None: ... + def insdelln(self, nlines: int) -> None: ... + def insertln(self) -> None: ... + @overload + def insnstr(self, str: str, n: int, attr: int = ...) -> None: ... + @overload + def insnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ... + @overload + def insstr(self, str: str, attr: int = ...) -> None: ... + @overload + def insstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ... + @overload + def instr(self, n: int = 2047) -> bytes: ... + @overload + def instr(self, y: int, x: int, n: int = 2047) -> bytes: ... + def is_linetouched(self, line: int, /) -> bool: ... + def is_wintouched(self) -> bool: ... + def keypad(self, yes: bool, /) -> None: ... + def leaveok(self, yes: bool) -> None: ... + def move(self, new_y: int, new_x: int) -> None: ... + def mvderwin(self, y: int, x: int) -> None: ... + def mvwin(self, new_y: int, new_x: int) -> None: ... + def nodelay(self, yes: bool) -> None: ... + def notimeout(self, yes: bool) -> None: ... + @overload + def noutrefresh(self) -> None: ... + @overload + def noutrefresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ... + @overload + def overlay(self, destwin: window) -> None: ... + @overload + def overlay( + self, destwin: window, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int + ) -> None: ... + @overload + def overwrite(self, destwin: window) -> None: ... + @overload + def overwrite( + self, destwin: window, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int + ) -> None: ... + def putwin(self, file: SupportsWrite[bytes], /) -> None: ... + def redrawln(self, beg: int, num: int, /) -> None: ... + def redrawwin(self) -> None: ... + @overload + def refresh(self) -> None: ... + @overload + def refresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ... + def resize(self, nlines: int, ncols: int) -> None: ... + def scroll(self, lines: int = 1) -> None: ... + def scrollok(self, flag: bool) -> None: ... + def setscrreg(self, top: int, bottom: int, /) -> None: ... + def standend(self) -> None: ... + def standout(self) -> None: ... + @overload + def subpad(self, begin_y: int, begin_x: int) -> window: ... + @overload + def subpad(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ... + @overload + def subwin(self, begin_y: int, begin_x: int) -> window: ... + @overload + def subwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ... + def syncdown(self) -> None: ... + def syncok(self, flag: bool) -> None: ... + def syncup(self) -> None: ... + def timeout(self, delay: int) -> None: ... + def touchline(self, start: int, count: int, changed: bool = True) -> None: ... + def touchwin(self) -> None: ... + def untouchwin(self) -> None: ... + @overload + def vline(self, ch: _ChType, n: int) -> None: ... + @overload + def vline(self, y: int, x: int, ch: _ChType, n: int) -> None: ... + +ncurses_version: _ncurses_version diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_curses_panel.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_curses_panel.pyi new file mode 100644 index 0000000..a552a15 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_curses_panel.pyi @@ -0,0 +1,27 @@ +from _curses import window +from typing import Final, final + +__version__: Final[str] +version: Final[str] + +class error(Exception): ... + +@final +class panel: + def above(self) -> panel: ... + def below(self) -> panel: ... + def bottom(self) -> None: ... + def hidden(self) -> bool: ... + def hide(self) -> None: ... + def move(self, y: int, x: int, /) -> None: ... + def replace(self, win: window, /) -> None: ... + def set_userptr(self, obj: object, /) -> None: ... + def show(self) -> None: ... + def top(self) -> None: ... + def userptr(self) -> object: ... + def window(self) -> window: ... + +def bottom_panel() -> panel: ... +def new_panel(win: window, /) -> panel: ... +def top_panel() -> panel: ... +def update_panels() -> panel: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_dbm.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_dbm.pyi new file mode 100644 index 0000000..222c3ff --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_dbm.pyi @@ -0,0 +1,44 @@ +import sys +from _typeshed import ReadOnlyBuffer, StrOrBytesPath +from types import TracebackType +from typing import Final, TypeVar, final, overload, type_check_only +from typing_extensions import Self, TypeAlias + +if sys.platform != "win32": + _T = TypeVar("_T") + _KeyType: TypeAlias = str | ReadOnlyBuffer + _ValueType: TypeAlias = str | ReadOnlyBuffer + + class error(OSError): ... + library: Final[str] + + # Actual typename dbm, not exposed by the implementation + @final + @type_check_only + class _dbm: + def close(self) -> None: ... + if sys.version_info >= (3, 13): + def clear(self) -> None: ... + + def __getitem__(self, item: _KeyType) -> bytes: ... + def __setitem__(self, key: _KeyType, value: _ValueType) -> None: ... + def __delitem__(self, key: _KeyType) -> None: ... + def __len__(self) -> int: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + @overload + def get(self, k: _KeyType, /) -> bytes | None: ... + @overload + def get(self, k: _KeyType, default: _T, /) -> bytes | _T: ... + def keys(self) -> list[bytes]: ... + def setdefault(self, k: _KeyType, default: _ValueType = b"", /) -> bytes: ... + # This isn't true, but the class can't be instantiated. See #13024 + __new__: None # type: ignore[assignment] + __init__: None # type: ignore[assignment] + + if sys.version_info >= (3, 11): + def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _dbm: ... + else: + def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _dbm: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_decimal.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_decimal.pyi new file mode 100644 index 0000000..3cfe894 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_decimal.pyi @@ -0,0 +1,72 @@ +import sys +from decimal import ( + Clamped as Clamped, + Context as Context, + ConversionSyntax as ConversionSyntax, + Decimal as Decimal, + DecimalException as DecimalException, + DecimalTuple as DecimalTuple, + DivisionByZero as DivisionByZero, + DivisionImpossible as DivisionImpossible, + DivisionUndefined as DivisionUndefined, + FloatOperation as FloatOperation, + Inexact as Inexact, + InvalidContext as InvalidContext, + InvalidOperation as InvalidOperation, + Overflow as Overflow, + Rounded as Rounded, + Subnormal as Subnormal, + Underflow as Underflow, + _ContextManager, +) +from typing import Final +from typing_extensions import TypeAlias + +_TrapType: TypeAlias = type[DecimalException] + +__version__: Final[str] +__libmpdec_version__: Final[str] + +ROUND_DOWN: Final = "ROUND_DOWN" +ROUND_HALF_UP: Final = "ROUND_HALF_UP" +ROUND_HALF_EVEN: Final = "ROUND_HALF_EVEN" +ROUND_CEILING: Final = "ROUND_CEILING" +ROUND_FLOOR: Final = "ROUND_FLOOR" +ROUND_UP: Final = "ROUND_UP" +ROUND_HALF_DOWN: Final = "ROUND_HALF_DOWN" +ROUND_05UP: Final = "ROUND_05UP" +HAVE_CONTEXTVAR: Final[bool] +HAVE_THREADS: Final[bool] +MAX_EMAX: Final[int] +MAX_PREC: Final[int] +MIN_EMIN: Final[int] +MIN_ETINY: Final[int] +if sys.version_info >= (3, 14): + IEEE_CONTEXT_MAX_BITS: Final[int] + +def setcontext(context: Context, /) -> None: ... +def getcontext() -> Context: ... + +if sys.version_info >= (3, 11): + def localcontext( + ctx: Context | None = None, + *, + prec: int | None = None, + rounding: str | None = None, + Emin: int | None = None, + Emax: int | None = None, + capitals: int | None = None, + clamp: int | None = None, + traps: dict[_TrapType, bool] | None = None, + flags: dict[_TrapType, bool] | None = None, + ) -> _ContextManager: ... + +else: + def localcontext(ctx: Context | None = None) -> _ContextManager: ... + +if sys.version_info >= (3, 14): + def IEEEContext(bits: int, /) -> Context: ... + +DefaultContext: Context +BasicContext: Context +ExtendedContext: Context diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_frozen_importlib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_frozen_importlib.pyi new file mode 100644 index 0000000..58db64a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_frozen_importlib.pyi @@ -0,0 +1,124 @@ +import importlib.abc +import importlib.machinery +import sys +import types +from _typeshed.importlib import LoaderProtocol +from collections.abc import Mapping, Sequence +from types import ModuleType +from typing import Any, ClassVar +from typing_extensions import deprecated + +# Signature of `builtins.__import__` should be kept identical to `importlib.__import__` +def __import__( + name: str, + globals: Mapping[str, object] | None = None, + locals: Mapping[str, object] | None = None, + fromlist: Sequence[str] | None = (), + level: int = 0, +) -> ModuleType: ... +def spec_from_loader( + name: str, loader: LoaderProtocol | None, *, origin: str | None = None, is_package: bool | None = None +) -> importlib.machinery.ModuleSpec | None: ... +def module_from_spec(spec: importlib.machinery.ModuleSpec) -> types.ModuleType: ... +def _init_module_attrs( + spec: importlib.machinery.ModuleSpec, module: types.ModuleType, *, override: bool = False +) -> types.ModuleType: ... + +class ModuleSpec: + def __init__( + self, + name: str, + loader: importlib.abc.Loader | None, + *, + origin: str | None = None, + loader_state: Any = None, + is_package: bool | None = None, + ) -> None: ... + name: str + loader: importlib.abc.Loader | None + origin: str | None + submodule_search_locations: list[str] | None + loader_state: Any + cached: str | None + @property + def parent(self) -> str | None: ... + has_location: bool + def __eq__(self, other: object) -> bool: ... + __hash__: ClassVar[None] # type: ignore[assignment] + +class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): + # MetaPathFinder + if sys.version_info < (3, 12): + @classmethod + @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `find_spec()` instead.") + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... + + @classmethod + def find_spec( + cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None + ) -> ModuleSpec | None: ... + # InspectLoader + @classmethod + def is_package(cls, fullname: str) -> bool: ... + @classmethod + def load_module(cls, fullname: str) -> types.ModuleType: ... + @classmethod + def get_code(cls, fullname: str) -> None: ... + @classmethod + def get_source(cls, fullname: str) -> None: ... + # Loader + if sys.version_info < (3, 12): + @staticmethod + @deprecated( + "Deprecated since Python 3.4; removed in Python 3.12. " + "The module spec is now used by the import machinery to generate a module repr." + ) + def module_repr(module: types.ModuleType) -> str: ... + if sys.version_info >= (3, 10): + @staticmethod + def create_module(spec: ModuleSpec) -> types.ModuleType | None: ... + @staticmethod + def exec_module(module: types.ModuleType) -> None: ... + else: + @classmethod + def create_module(cls, spec: ModuleSpec) -> types.ModuleType | None: ... + @classmethod + def exec_module(cls, module: types.ModuleType) -> None: ... + +class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): + # MetaPathFinder + if sys.version_info < (3, 12): + @classmethod + @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `find_spec()` instead.") + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... + + @classmethod + def find_spec( + cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None + ) -> ModuleSpec | None: ... + # InspectLoader + @classmethod + def is_package(cls, fullname: str) -> bool: ... + @classmethod + def load_module(cls, fullname: str) -> types.ModuleType: ... + @classmethod + def get_code(cls, fullname: str) -> None: ... + @classmethod + def get_source(cls, fullname: str) -> None: ... + # Loader + if sys.version_info < (3, 12): + @staticmethod + @deprecated( + "Deprecated since Python 3.4; removed in Python 3.12. " + "The module spec is now used by the import machinery to generate a module repr." + ) + def module_repr(m: types.ModuleType) -> str: ... + if sys.version_info >= (3, 10): + @staticmethod + def create_module(spec: ModuleSpec) -> types.ModuleType | None: ... + else: + @classmethod + def create_module(cls, spec: ModuleSpec) -> types.ModuleType | None: ... + + @staticmethod + def exec_module(module: types.ModuleType) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_frozen_importlib_external.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_frozen_importlib_external.pyi new file mode 100644 index 0000000..4778be3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_frozen_importlib_external.pyi @@ -0,0 +1,200 @@ +import _ast +import _io +import importlib.abc +import importlib.machinery +import sys +import types +from _typeshed import ReadableBuffer, StrOrBytesPath, StrPath +from _typeshed.importlib import LoaderProtocol +from collections.abc import Callable, Iterable, Iterator, Mapping, MutableSequence, Sequence +from importlib.machinery import ModuleSpec +from importlib.metadata import DistributionFinder, PathDistribution +from typing import Any, Final, Literal +from typing_extensions import Self, deprecated + +if sys.version_info >= (3, 10): + import importlib.readers + +if sys.platform == "win32": + path_separators: Literal["\\/"] + path_sep: Literal["\\"] + path_sep_tuple: tuple[Literal["\\"], Literal["/"]] +else: + path_separators: Literal["/"] + path_sep: Literal["/"] + path_sep_tuple: tuple[Literal["/"]] + +MAGIC_NUMBER: Final[bytes] + +def cache_from_source(path: StrPath, debug_override: bool | None = None, *, optimization: Any | None = None) -> str: ... +def source_from_cache(path: StrPath) -> str: ... +def decode_source(source_bytes: ReadableBuffer) -> str: ... +def spec_from_file_location( + name: str, + location: StrOrBytesPath | None = None, + *, + loader: LoaderProtocol | None = None, + submodule_search_locations: list[str] | None = ..., +) -> importlib.machinery.ModuleSpec | None: ... +@deprecated( + "Deprecated since Python 3.6. Use site configuration instead. " + "Future versions of Python may not enable this finder by default." +) +class WindowsRegistryFinder(importlib.abc.MetaPathFinder): + if sys.version_info < (3, 12): + @classmethod + @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `find_spec()` instead.") + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... + + @classmethod + def find_spec( + cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None + ) -> ModuleSpec | None: ... + +class PathFinder(importlib.abc.MetaPathFinder): + if sys.version_info >= (3, 10): + @staticmethod + def invalidate_caches() -> None: ... + else: + @classmethod + def invalidate_caches(cls) -> None: ... + if sys.version_info >= (3, 10): + @staticmethod + def find_distributions(context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: ... + else: + @classmethod + def find_distributions(cls, context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: ... + + @classmethod + def find_spec( + cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None + ) -> ModuleSpec | None: ... + if sys.version_info < (3, 12): + @classmethod + @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `find_spec()` instead.") + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... + +SOURCE_SUFFIXES: Final[list[str]] +DEBUG_BYTECODE_SUFFIXES: Final = [".pyc"] +OPTIMIZED_BYTECODE_SUFFIXES: Final = [".pyc"] +BYTECODE_SUFFIXES: Final = [".pyc"] +EXTENSION_SUFFIXES: Final[list[str]] + +class FileFinder(importlib.abc.PathEntryFinder): + path: str + def __init__(self, path: str, *loader_details: tuple[type[importlib.abc.Loader], list[str]]) -> None: ... + @classmethod + def path_hook( + cls, *loader_details: tuple[type[importlib.abc.Loader], list[str]] + ) -> Callable[[str], importlib.abc.PathEntryFinder]: ... + +class _LoaderBasics: + def is_package(self, fullname: str) -> bool: ... + def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: ... + def exec_module(self, module: types.ModuleType) -> None: ... + def load_module(self, fullname: str) -> types.ModuleType: ... + +class SourceLoader(_LoaderBasics): + def path_mtime(self, path: str) -> float: ... + def set_data(self, path: str, data: bytes) -> None: ... + def get_source(self, fullname: str) -> str | None: ... + def path_stats(self, path: str) -> Mapping[str, Any]: ... + def source_to_code( + self, data: ReadableBuffer | str | _ast.Module | _ast.Expression | _ast.Interactive, path: bytes | StrPath + ) -> types.CodeType: ... + def get_code(self, fullname: str) -> types.CodeType | None: ... + +class FileLoader: + name: str + path: str + def __init__(self, fullname: str, path: str) -> None: ... + def get_data(self, path: str) -> bytes: ... + def get_filename(self, fullname: str | None = None) -> str: ... + def load_module(self, fullname: str | None = None) -> types.ModuleType: ... + if sys.version_info >= (3, 10): + def get_resource_reader(self, name: str | None = None) -> importlib.readers.FileReader: ... + else: + def get_resource_reader(self, name: str | None = None) -> Self | None: ... + def open_resource(self, resource: str) -> _io.FileIO: ... + def resource_path(self, resource: str) -> str: ... + def is_resource(self, name: str) -> bool: ... + def contents(self) -> Iterator[str]: ... + +class SourceFileLoader(importlib.abc.FileLoader, FileLoader, importlib.abc.SourceLoader, SourceLoader): # type: ignore[misc] # incompatible method arguments in base classes + def set_data(self, path: str, data: ReadableBuffer, *, _mode: int = 0o666) -> None: ... + def path_stats(self, path: str) -> Mapping[str, Any]: ... + def source_to_code( # type: ignore[override] # incompatible with InspectLoader.source_to_code + self, + data: ReadableBuffer | str | _ast.Module | _ast.Expression | _ast.Interactive, + path: bytes | StrPath, + *, + _optimize: int = -1, + ) -> types.CodeType: ... + +class SourcelessFileLoader(importlib.abc.FileLoader, FileLoader, _LoaderBasics): + def get_code(self, fullname: str) -> types.CodeType | None: ... + def get_source(self, fullname: str) -> None: ... + +class ExtensionFileLoader(FileLoader, _LoaderBasics, importlib.abc.ExecutionLoader): + def __init__(self, name: str, path: str) -> None: ... + def get_filename(self, fullname: str | None = None) -> str: ... + def get_source(self, fullname: str) -> None: ... + def create_module(self, spec: ModuleSpec) -> types.ModuleType: ... + def exec_module(self, module: types.ModuleType) -> None: ... + def get_code(self, fullname: str) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + +if sys.version_info >= (3, 11): + class NamespaceLoader(importlib.abc.InspectLoader): + def __init__( + self, name: str, path: MutableSequence[str], path_finder: Callable[[str, tuple[str, ...]], ModuleSpec] + ) -> None: ... + def is_package(self, fullname: str) -> Literal[True]: ... + def get_source(self, fullname: str) -> Literal[""]: ... + def get_code(self, fullname: str) -> types.CodeType: ... + def create_module(self, spec: ModuleSpec) -> None: ... + def exec_module(self, module: types.ModuleType) -> None: ... + @deprecated("Deprecated since Python 3.10; will be removed in Python 3.15. Use `exec_module()` instead.") + def load_module(self, fullname: str) -> types.ModuleType: ... + def get_resource_reader(self, module: types.ModuleType) -> importlib.readers.NamespaceReader: ... + if sys.version_info < (3, 12): + @staticmethod + @deprecated( + "Deprecated since Python 3.4; removed in Python 3.12. " + "The module spec is now used by the import machinery to generate a module repr." + ) + def module_repr(module: types.ModuleType) -> str: ... + + _NamespaceLoader = NamespaceLoader +else: + class _NamespaceLoader: + def __init__( + self, name: str, path: MutableSequence[str], path_finder: Callable[[str, tuple[str, ...]], ModuleSpec] + ) -> None: ... + def is_package(self, fullname: str) -> Literal[True]: ... + def get_source(self, fullname: str) -> Literal[""]: ... + def get_code(self, fullname: str) -> types.CodeType: ... + def create_module(self, spec: ModuleSpec) -> None: ... + def exec_module(self, module: types.ModuleType) -> None: ... + if sys.version_info >= (3, 10): + @deprecated("Deprecated since Python 3.10; will be removed in Python 3.15. Use `exec_module()` instead.") + def load_module(self, fullname: str) -> types.ModuleType: ... + @staticmethod + @deprecated( + "Deprecated since Python 3.4; removed in Python 3.12. " + "The module spec is now used by the import machinery to generate a module repr." + ) + def module_repr(module: types.ModuleType) -> str: ... + def get_resource_reader(self, module: types.ModuleType) -> importlib.readers.NamespaceReader: ... + else: + def load_module(self, fullname: str) -> types.ModuleType: ... + @classmethod + @deprecated( + "Deprecated since Python 3.4; removed in Python 3.12. " + "The module spec is now used by the import machinery to generate a module repr." + ) + def module_repr(cls, module: types.ModuleType) -> str: ... + +if sys.version_info >= (3, 13): + class AppleFrameworkLoader(ExtensionFileLoader, importlib.abc.ExecutionLoader): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_gdbm.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_gdbm.pyi new file mode 100644 index 0000000..2cb5fba --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_gdbm.pyi @@ -0,0 +1,48 @@ +import sys +from _typeshed import ReadOnlyBuffer, StrOrBytesPath +from types import TracebackType +from typing import TypeVar, overload, type_check_only +from typing_extensions import Self, TypeAlias + +if sys.platform != "win32": + _T = TypeVar("_T") + _KeyType: TypeAlias = str | ReadOnlyBuffer + _ValueType: TypeAlias = str | ReadOnlyBuffer + + open_flags: str + + class error(OSError): ... + # Actual typename gdbm, not exposed by the implementation + @type_check_only + class _gdbm: + def firstkey(self) -> bytes | None: ... + def nextkey(self, key: _KeyType) -> bytes | None: ... + def reorganize(self) -> None: ... + def sync(self) -> None: ... + def close(self) -> None: ... + if sys.version_info >= (3, 13): + def clear(self) -> None: ... + + def __getitem__(self, item: _KeyType) -> bytes: ... + def __setitem__(self, key: _KeyType, value: _ValueType) -> None: ... + def __delitem__(self, key: _KeyType) -> None: ... + def __contains__(self, key: _KeyType) -> bool: ... + def __len__(self) -> int: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + @overload + def get(self, k: _KeyType) -> bytes | None: ... + @overload + def get(self, k: _KeyType, default: _T) -> bytes | _T: ... + def keys(self) -> list[bytes]: ... + def setdefault(self, k: _KeyType, default: _ValueType = ...) -> bytes: ... + # Don't exist at runtime + __new__: None # type: ignore[assignment] + __init__: None # type: ignore[assignment] + + if sys.version_info >= (3, 11): + def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _gdbm: ... + else: + def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _gdbm: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_hashlib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_hashlib.pyi new file mode 100644 index 0000000..03c1eef --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_hashlib.pyi @@ -0,0 +1,127 @@ +import sys +from _typeshed import ReadableBuffer +from collections.abc import Callable +from types import ModuleType +from typing import AnyStr, Protocol, final, overload, type_check_only +from typing_extensions import Self, TypeAlias, disjoint_base + +_DigestMod: TypeAlias = str | Callable[[], _HashObject] | ModuleType | None + +openssl_md_meth_names: frozenset[str] + +@type_check_only +class _HashObject(Protocol): + @property + def digest_size(self) -> int: ... + @property + def block_size(self) -> int: ... + @property + def name(self) -> str: ... + def copy(self) -> Self: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def update(self, obj: ReadableBuffer, /) -> None: ... + +@disjoint_base +class HASH: + @property + def digest_size(self) -> int: ... + @property + def block_size(self) -> int: ... + @property + def name(self) -> str: ... + def copy(self) -> Self: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def update(self, obj: ReadableBuffer, /) -> None: ... + +if sys.version_info >= (3, 10): + class UnsupportedDigestmodError(ValueError): ... + +class HASHXOF(HASH): + def digest(self, length: int) -> bytes: ... # type: ignore[override] + def hexdigest(self, length: int) -> str: ... # type: ignore[override] + +@final +class HMAC: + @property + def digest_size(self) -> int: ... + @property + def block_size(self) -> int: ... + @property + def name(self) -> str: ... + def copy(self) -> Self: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def update(self, msg: ReadableBuffer) -> None: ... + +@overload +def compare_digest(a: ReadableBuffer, b: ReadableBuffer, /) -> bool: ... +@overload +def compare_digest(a: AnyStr, b: AnyStr, /) -> bool: ... +def get_fips_mode() -> int: ... +def hmac_new(key: bytes | bytearray, msg: ReadableBuffer = b"", digestmod: _DigestMod = None) -> HMAC: ... + +if sys.version_info >= (3, 13): + def new( + name: str, data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None + ) -> HASH: ... + def openssl_md5( + data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None + ) -> HASH: ... + def openssl_sha1( + data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None + ) -> HASH: ... + def openssl_sha224( + data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None + ) -> HASH: ... + def openssl_sha256( + data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None + ) -> HASH: ... + def openssl_sha384( + data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None + ) -> HASH: ... + def openssl_sha512( + data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None + ) -> HASH: ... + def openssl_sha3_224( + data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None + ) -> HASH: ... + def openssl_sha3_256( + data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None + ) -> HASH: ... + def openssl_sha3_384( + data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None + ) -> HASH: ... + def openssl_sha3_512( + data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None + ) -> HASH: ... + def openssl_shake_128( + data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None + ) -> HASHXOF: ... + def openssl_shake_256( + data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None + ) -> HASHXOF: ... + +else: + def new(name: str, string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_md5(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_sha1(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_sha224(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_sha256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_sha384(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_sha512(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_sha3_224(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_sha3_256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_sha3_384(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_sha3_512(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_shake_128(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASHXOF: ... + def openssl_shake_256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASHXOF: ... + +def hmac_digest(key: bytes | bytearray, msg: ReadableBuffer, digest: str) -> bytes: ... +def pbkdf2_hmac( + hash_name: str, password: ReadableBuffer, salt: ReadableBuffer, iterations: int, dklen: int | None = None +) -> bytes: ... +def scrypt( + password: ReadableBuffer, *, salt: ReadableBuffer, n: int, r: int, p: int, maxmem: int = 0, dklen: int = 64 +) -> bytes: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_heapq.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_heapq.pyi new file mode 100644 index 0000000..4d7d6ab --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_heapq.pyi @@ -0,0 +1,18 @@ +import sys +from _typeshed import SupportsRichComparisonT as _T # All type variable use in this module requires comparability. +from typing import Final + +__about__: Final[str] + +def heapify(heap: list[_T], /) -> None: ... +def heappop(heap: list[_T], /) -> _T: ... +def heappush(heap: list[_T], item: _T, /) -> None: ... +def heappushpop(heap: list[_T], item: _T, /) -> _T: ... +def heapreplace(heap: list[_T], item: _T, /) -> _T: ... + +if sys.version_info >= (3, 14): + def heapify_max(heap: list[_T], /) -> None: ... + def heappop_max(heap: list[_T], /) -> _T: ... + def heappush_max(heap: list[_T], item: _T, /) -> None: ... + def heappushpop_max(heap: list[_T], item: _T, /) -> _T: ... + def heapreplace_max(heap: list[_T], item: _T, /) -> _T: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_imp.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_imp.pyi new file mode 100644 index 0000000..c12c26d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_imp.pyi @@ -0,0 +1,30 @@ +import sys +import types +from _typeshed import ReadableBuffer +from importlib.machinery import ModuleSpec +from typing import Any + +check_hash_based_pycs: str +if sys.version_info >= (3, 14): + pyc_magic_number_token: int + +def source_hash(key: int, source: ReadableBuffer) -> bytes: ... +def create_builtin(spec: ModuleSpec, /) -> types.ModuleType: ... +def create_dynamic(spec: ModuleSpec, file: Any = None, /) -> types.ModuleType: ... +def acquire_lock() -> None: ... +def exec_builtin(mod: types.ModuleType, /) -> int: ... +def exec_dynamic(mod: types.ModuleType, /) -> int: ... +def extension_suffixes() -> list[str]: ... +def init_frozen(name: str, /) -> types.ModuleType: ... +def is_builtin(name: str, /) -> int: ... +def is_frozen(name: str, /) -> bool: ... +def is_frozen_package(name: str, /) -> bool: ... +def lock_held() -> bool: ... +def release_lock() -> None: ... + +if sys.version_info >= (3, 11): + def find_frozen(name: str, /, *, withdata: bool = False) -> tuple[memoryview | None, bool, str | None] | None: ... + def get_frozen_object(name: str, data: ReadableBuffer | None = None, /) -> types.CodeType: ... + +else: + def get_frozen_object(name: str, /) -> types.CodeType: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_interpchannels.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_interpchannels.pyi new file mode 100644 index 0000000..a631a6f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_interpchannels.pyi @@ -0,0 +1,86 @@ +from _typeshed import structseq +from typing import Any, Final, Literal, SupportsIndex, final +from typing_extensions import Buffer, Self + +class ChannelError(RuntimeError): ... +class ChannelClosedError(ChannelError): ... +class ChannelEmptyError(ChannelError): ... +class ChannelNotEmptyError(ChannelError): ... +class ChannelNotFoundError(ChannelError): ... + +# Mark as final, since instantiating ChannelID is not supported. +@final +class ChannelID: + @property + def end(self) -> Literal["send", "recv", "both"]: ... + @property + def send(self) -> Self: ... + @property + def recv(self) -> Self: ... + def __eq__(self, other: object, /) -> bool: ... + def __ge__(self, other: ChannelID, /) -> bool: ... + def __gt__(self, other: ChannelID, /) -> bool: ... + def __hash__(self) -> int: ... + def __index__(self) -> int: ... + def __int__(self) -> int: ... + def __le__(self, other: ChannelID, /) -> bool: ... + def __lt__(self, other: ChannelID, /) -> bool: ... + def __ne__(self, other: object, /) -> bool: ... + +@final +class ChannelInfo(structseq[int], tuple[bool, bool, bool, int, int, int, int, int]): + __match_args__: Final = ( + "open", + "closing", + "closed", + "count", + "num_interp_send", + "num_interp_send_released", + "num_interp_recv", + "num_interp_recv_released", + ) + @property + def open(self) -> bool: ... + @property + def closing(self) -> bool: ... + @property + def closed(self) -> bool: ... + @property + def count(self) -> int: ... # type: ignore[override] + @property + def num_interp_send(self) -> int: ... + @property + def num_interp_send_released(self) -> int: ... + @property + def num_interp_recv(self) -> int: ... + @property + def num_interp_recv_released(self) -> int: ... + @property + def num_interp_both(self) -> int: ... + @property + def num_interp_both_recv_released(self) -> int: ... + @property + def num_interp_both_send_released(self) -> int: ... + @property + def num_interp_both_released(self) -> int: ... + @property + def recv_associated(self) -> bool: ... + @property + def recv_released(self) -> bool: ... + @property + def send_associated(self) -> bool: ... + @property + def send_released(self) -> bool: ... + +def create(unboundop: Literal[1, 2, 3]) -> ChannelID: ... +def destroy(cid: SupportsIndex) -> None: ... +def list_all() -> list[ChannelID]: ... +def list_interpreters(cid: SupportsIndex, *, send: bool) -> list[int]: ... +def send(cid: SupportsIndex, obj: object, *, blocking: bool = True, timeout: float | None = None) -> None: ... +def send_buffer(cid: SupportsIndex, obj: Buffer, *, blocking: bool = True, timeout: float | None = None) -> None: ... +def recv(cid: SupportsIndex, default: object = ...) -> tuple[Any, Literal[1, 2, 3]]: ... +def close(cid: SupportsIndex, *, send: bool = False, recv: bool = False) -> None: ... +def get_count(cid: SupportsIndex) -> int: ... +def get_info(cid: SupportsIndex) -> ChannelInfo: ... +def get_channel_defaults(cid: SupportsIndex) -> Literal[1, 2, 3]: ... +def release(cid: SupportsIndex, *, send: bool = False, recv: bool = False, force: bool = False) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_interpqueues.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_interpqueues.pyi new file mode 100644 index 0000000..c9323b1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_interpqueues.pyi @@ -0,0 +1,19 @@ +from typing import Any, Literal, SupportsIndex +from typing_extensions import TypeAlias + +_UnboundOp: TypeAlias = Literal[1, 2, 3] + +class QueueError(RuntimeError): ... +class QueueNotFoundError(QueueError): ... + +def bind(qid: SupportsIndex) -> None: ... +def create(maxsize: SupportsIndex, fmt: SupportsIndex, unboundop: _UnboundOp) -> int: ... +def destroy(qid: SupportsIndex) -> None: ... +def get(qid: SupportsIndex) -> tuple[Any, int, _UnboundOp | None]: ... +def get_count(qid: SupportsIndex) -> int: ... +def get_maxsize(qid: SupportsIndex) -> int: ... +def get_queue_defaults(qid: SupportsIndex) -> tuple[int, _UnboundOp]: ... +def is_full(qid: SupportsIndex) -> bool: ... +def list_all() -> list[tuple[int, int, _UnboundOp]]: ... +def put(qid: SupportsIndex, obj: Any, fmt: SupportsIndex, unboundop: _UnboundOp) -> None: ... +def release(qid: SupportsIndex) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_interpreters.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_interpreters.pyi new file mode 100644 index 0000000..8e097ef --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_interpreters.pyi @@ -0,0 +1,61 @@ +import types +from collections.abc import Callable +from typing import Any, Final, Literal, SupportsIndex, TypeVar, overload +from typing_extensions import TypeAlias, disjoint_base + +_R = TypeVar("_R") + +_Configs: TypeAlias = Literal["default", "isolated", "legacy", "empty", ""] +_SharedDict: TypeAlias = dict[str, Any] # many objects can be shared + +class InterpreterError(Exception): ... +class InterpreterNotFoundError(InterpreterError): ... +class NotShareableError(ValueError): ... + +@disjoint_base +class CrossInterpreterBufferView: + def __buffer__(self, flags: int, /) -> memoryview: ... + +def new_config(name: _Configs = "isolated", /, **overides: object) -> types.SimpleNamespace: ... +def create(config: types.SimpleNamespace | _Configs | None = "isolated", *, reqrefs: bool = False) -> int: ... +def destroy(id: SupportsIndex, *, restrict: bool = False) -> None: ... +def list_all(*, require_ready: bool = False) -> list[tuple[int, _Whence]]: ... +def get_current() -> tuple[int, _Whence]: ... +def get_main() -> tuple[int, _Whence]: ... +def is_running(id: SupportsIndex, *, restrict: bool = False) -> bool: ... +def get_config(id: SupportsIndex, *, restrict: bool = False) -> types.SimpleNamespace: ... +def whence(id: SupportsIndex) -> _Whence: ... +def exec( + id: SupportsIndex, code: str | types.CodeType | Callable[[], object], shared: _SharedDict = {}, *, restrict: bool = False +) -> None | types.SimpleNamespace: ... +def call( + id: SupportsIndex, + callable: Callable[..., _R], + args: tuple[Any, ...] = (), + kwargs: dict[str, Any] = {}, + *, + preserve_exc: bool = False, + restrict: bool = False, +) -> tuple[_R, types.SimpleNamespace]: ... +def run_string( + id: SupportsIndex, script: str | types.CodeType | Callable[[], object], shared: _SharedDict = {}, *, restrict: bool = False +) -> None: ... +def run_func( + id: SupportsIndex, func: types.CodeType | Callable[[], object], shared: _SharedDict = {}, *, restrict: bool = False +) -> None: ... +def set___main___attrs(id: SupportsIndex, updates: _SharedDict, *, restrict: bool = False) -> None: ... +def incref(id: SupportsIndex, *, implieslink: bool = False, restrict: bool = False) -> None: ... +def decref(id: SupportsIndex, *, restrict: bool = False) -> None: ... +def is_shareable(obj: object) -> bool: ... +@overload +def capture_exception(exc: BaseException) -> types.SimpleNamespace: ... +@overload +def capture_exception(exc: None = None) -> types.SimpleNamespace | None: ... + +_Whence: TypeAlias = Literal[0, 1, 2, 3, 4, 5] +WHENCE_UNKNOWN: Final = 0 +WHENCE_RUNTIME: Final = 1 +WHENCE_LEGACY_CAPI: Final = 2 +WHENCE_CAPI: Final = 3 +WHENCE_XI: Final = 4 +WHENCE_STDLIB: Final = 5 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_io.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_io.pyi new file mode 100644 index 0000000..2d2a60e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_io.pyi @@ -0,0 +1,301 @@ +import builtins +import codecs +import sys +from _typeshed import FileDescriptorOrPath, MaybeNone, ReadableBuffer, WriteableBuffer +from collections.abc import Callable, Iterable, Iterator +from io import BufferedIOBase, RawIOBase, TextIOBase, UnsupportedOperation as UnsupportedOperation +from os import _Opener +from types import TracebackType +from typing import IO, Any, BinaryIO, Final, Generic, Literal, Protocol, TextIO, TypeVar, overload, type_check_only +from typing_extensions import Self, disjoint_base + +_T = TypeVar("_T") + +if sys.version_info >= (3, 14): + DEFAULT_BUFFER_SIZE: Final = 131072 +else: + DEFAULT_BUFFER_SIZE: Final = 8192 + +open = builtins.open + +def open_code(path: str) -> IO[bytes]: ... + +BlockingIOError = builtins.BlockingIOError + +if sys.version_info >= (3, 12): + @disjoint_base + class _IOBase: + def __iter__(self) -> Iterator[bytes]: ... + def __next__(self) -> bytes: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def close(self) -> None: ... + def fileno(self) -> int: ... + def flush(self) -> None: ... + def isatty(self) -> bool: ... + def readable(self) -> bool: ... + read: Callable[..., Any] + def readlines(self, hint: int = -1, /) -> list[bytes]: ... + def seek(self, offset: int, whence: int = 0, /) -> int: ... + def seekable(self) -> bool: ... + def tell(self) -> int: ... + def truncate(self, size: int | None = None, /) -> int: ... + def writable(self) -> bool: ... + write: Callable[..., Any] + def writelines(self, lines: Iterable[ReadableBuffer], /) -> None: ... + def readline(self, size: int | None = -1, /) -> bytes: ... + def __del__(self) -> None: ... + @property + def closed(self) -> bool: ... + def _checkClosed(self) -> None: ... # undocumented + +else: + class _IOBase: + def __iter__(self) -> Iterator[bytes]: ... + def __next__(self) -> bytes: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def close(self) -> None: ... + def fileno(self) -> int: ... + def flush(self) -> None: ... + def isatty(self) -> bool: ... + def readable(self) -> bool: ... + read: Callable[..., Any] + def readlines(self, hint: int = -1, /) -> list[bytes]: ... + def seek(self, offset: int, whence: int = 0, /) -> int: ... + def seekable(self) -> bool: ... + def tell(self) -> int: ... + def truncate(self, size: int | None = None, /) -> int: ... + def writable(self) -> bool: ... + write: Callable[..., Any] + def writelines(self, lines: Iterable[ReadableBuffer], /) -> None: ... + def readline(self, size: int | None = -1, /) -> bytes: ... + def __del__(self) -> None: ... + @property + def closed(self) -> bool: ... + def _checkClosed(self) -> None: ... # undocumented + +class _RawIOBase(_IOBase): + def readall(self) -> bytes: ... + # The following methods can return None if the file is in non-blocking mode + # and no data is available. + def readinto(self, buffer: WriteableBuffer, /) -> int | MaybeNone: ... + def write(self, b: ReadableBuffer, /) -> int | MaybeNone: ... + def read(self, size: int = -1, /) -> bytes | MaybeNone: ... + +class _BufferedIOBase(_IOBase): + def detach(self) -> RawIOBase: ... + def readinto(self, buffer: WriteableBuffer, /) -> int: ... + def write(self, buffer: ReadableBuffer, /) -> int: ... + def readinto1(self, buffer: WriteableBuffer, /) -> int: ... + def read(self, size: int | None = -1, /) -> bytes: ... + def read1(self, size: int = -1, /) -> bytes: ... + +@disjoint_base +class FileIO(RawIOBase, _RawIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of writelines in the base classes + mode: str + # The type of "name" equals the argument passed in to the constructor, + # but that can make FileIO incompatible with other I/O types that assume + # "name" is a str. In the future, making FileIO generic might help. + name: Any + def __init__( + self, file: FileDescriptorOrPath, mode: str = "r", closefd: bool = True, opener: _Opener | None = None + ) -> None: ... + @property + def closefd(self) -> bool: ... + def seek(self, pos: int, whence: int = 0, /) -> int: ... + def read(self, size: int | None = -1, /) -> bytes | MaybeNone: ... + +@disjoint_base +class BytesIO(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of methods in the base classes + def __init__(self, initial_bytes: ReadableBuffer = b"") -> None: ... + # BytesIO does not contain a "name" field. This workaround is necessary + # to allow BytesIO sub-classes to add this field, as it is defined + # as a read-only property on IO[]. + name: Any + def getvalue(self) -> bytes: ... + def getbuffer(self) -> memoryview: ... + def read1(self, size: int | None = -1, /) -> bytes: ... + def readlines(self, size: int | None = None, /) -> list[bytes]: ... + def seek(self, pos: int, whence: int = 0, /) -> int: ... + +@type_check_only +class _BufferedReaderStream(Protocol): + def read(self, n: int = ..., /) -> bytes: ... + # Optional: def readall(self) -> bytes: ... + def readinto(self, b: memoryview, /) -> int | None: ... + def seek(self, pos: int, whence: int, /) -> int: ... + def tell(self) -> int: ... + def truncate(self, size: int, /) -> int: ... + def flush(self) -> object: ... + def close(self) -> object: ... + @property + def closed(self) -> bool: ... + def readable(self) -> bool: ... + def seekable(self) -> bool: ... + + # The following methods just pass through to the underlying stream. Since + # not all streams support them, they are marked as optional here, and will + # raise an AttributeError if called on a stream that does not support them. + + # @property + # def name(self) -> Any: ... # Type is inconsistent between the various I/O types. + # @property + # def mode(self) -> str: ... + # def fileno(self) -> int: ... + # def isatty(self) -> bool: ... + +_BufferedReaderStreamT = TypeVar("_BufferedReaderStreamT", bound=_BufferedReaderStream, default=_BufferedReaderStream) + +@disjoint_base +class BufferedReader(BufferedIOBase, _BufferedIOBase, BinaryIO, Generic[_BufferedReaderStreamT]): # type: ignore[misc] # incompatible definitions of methods in the base classes + raw: _BufferedReaderStreamT + if sys.version_info >= (3, 14): + def __init__(self, raw: _BufferedReaderStreamT, buffer_size: int = 131072) -> None: ... + else: + def __init__(self, raw: _BufferedReaderStreamT, buffer_size: int = 8192) -> None: ... + + def peek(self, size: int = 0, /) -> bytes: ... + def seek(self, target: int, whence: int = 0, /) -> int: ... + def truncate(self, pos: int | None = None, /) -> int: ... + +@disjoint_base +class BufferedWriter(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of writelines in the base classes + raw: RawIOBase + if sys.version_info >= (3, 14): + def __init__(self, raw: RawIOBase, buffer_size: int = 131072) -> None: ... + else: + def __init__(self, raw: RawIOBase, buffer_size: int = 8192) -> None: ... + + def write(self, buffer: ReadableBuffer, /) -> int: ... + def seek(self, target: int, whence: int = 0, /) -> int: ... + def truncate(self, pos: int | None = None, /) -> int: ... + +@disjoint_base +class BufferedRandom(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of methods in the base classes + mode: str + name: Any + raw: RawIOBase + if sys.version_info >= (3, 14): + def __init__(self, raw: RawIOBase, buffer_size: int = 131072) -> None: ... + else: + def __init__(self, raw: RawIOBase, buffer_size: int = 8192) -> None: ... + + def seek(self, target: int, whence: int = 0, /) -> int: ... # stubtest needs this + def peek(self, size: int = 0, /) -> bytes: ... + def truncate(self, pos: int | None = None, /) -> int: ... + +@disjoint_base +class BufferedRWPair(BufferedIOBase, _BufferedIOBase, Generic[_BufferedReaderStreamT]): + if sys.version_info >= (3, 14): + def __init__(self, reader: _BufferedReaderStreamT, writer: RawIOBase, buffer_size: int = 131072, /) -> None: ... + else: + def __init__(self, reader: _BufferedReaderStreamT, writer: RawIOBase, buffer_size: int = 8192, /) -> None: ... + + def peek(self, size: int = 0, /) -> bytes: ... + +class _TextIOBase(_IOBase): + encoding: str + errors: str | None + newlines: str | tuple[str, ...] | None + def __iter__(self) -> Iterator[str]: ... # type: ignore[override] + def __next__(self) -> str: ... # type: ignore[override] + def detach(self) -> BinaryIO: ... + def write(self, s: str, /) -> int: ... + def writelines(self, lines: Iterable[str], /) -> None: ... # type: ignore[override] + def readline(self, size: int = -1, /) -> str: ... # type: ignore[override] + def readlines(self, hint: int = -1, /) -> list[str]: ... # type: ignore[override] + def read(self, size: int | None = -1, /) -> str: ... + +@type_check_only +class _WrappedBuffer(Protocol): + # "name" is wrapped by TextIOWrapper. Its type is inconsistent between + # the various I/O types. + @property + def name(self) -> Any: ... + @property + def closed(self) -> bool: ... + def read(self, size: int = ..., /) -> ReadableBuffer: ... + # Optional: def read1(self, size: int, /) -> ReadableBuffer: ... + def write(self, b: bytes, /) -> object: ... + def flush(self) -> object: ... + def close(self) -> object: ... + def seekable(self) -> bool: ... + def readable(self) -> bool: ... + def writable(self) -> bool: ... + def truncate(self, size: int, /) -> int: ... + def fileno(self) -> int: ... + def isatty(self) -> bool: ... + # Optional: Only needs to be present if seekable() returns True. + # def seek(self, offset: Literal[0], whence: Literal[2]) -> int: ... + # def tell(self) -> int: ... + +_BufferT_co = TypeVar("_BufferT_co", bound=_WrappedBuffer, default=_WrappedBuffer, covariant=True) + +@disjoint_base +class TextIOWrapper(TextIOBase, _TextIOBase, TextIO, Generic[_BufferT_co]): # type: ignore[misc] # incompatible definitions of write in the base classes + def __init__( + self, + buffer: _BufferT_co, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + line_buffering: bool = False, + write_through: bool = False, + ) -> None: ... + # Equals the "buffer" argument passed in to the constructor. + @property + def buffer(self) -> _BufferT_co: ... # type: ignore[override] + @property + def line_buffering(self) -> bool: ... + @property + def write_through(self) -> bool: ... + def reconfigure( + self, + *, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + line_buffering: bool | None = None, + write_through: bool | None = None, + ) -> None: ... + def readline(self, size: int = -1, /) -> str: ... # type: ignore[override] + # Equals the "buffer" argument passed in to the constructor. + def detach(self) -> _BufferT_co: ... # type: ignore[override] + # TextIOWrapper's version of seek only supports a limited subset of + # operations. + def seek(self, cookie: int, whence: int = 0, /) -> int: ... + def truncate(self, pos: int | None = None, /) -> int: ... + +@disjoint_base +class StringIO(TextIOBase, _TextIOBase, TextIO): # type: ignore[misc] # incompatible definitions of write in the base classes + def __init__(self, initial_value: str | None = "", newline: str | None = "\n") -> None: ... + # StringIO does not contain a "name" field. This workaround is necessary + # to allow StringIO sub-classes to add this field, as it is defined + # as a read-only property on IO[]. + name: Any + def getvalue(self) -> str: ... + @property + def line_buffering(self) -> bool: ... + def seek(self, pos: int, whence: int = 0, /) -> int: ... + def truncate(self, pos: int | None = None, /) -> int: ... + +@disjoint_base +class IncrementalNewlineDecoder: + def __init__(self, decoder: codecs.IncrementalDecoder | None, translate: bool, errors: str = "strict") -> None: ... + def decode(self, input: ReadableBuffer | str, final: bool = False) -> str: ... + @property + def newlines(self) -> str | tuple[str, ...] | None: ... + def getstate(self) -> tuple[bytes, int]: ... + def reset(self) -> None: ... + def setstate(self, state: tuple[bytes, int], /) -> None: ... + +if sys.version_info >= (3, 10): + @overload + def text_encoding(encoding: None, stacklevel: int = 2, /) -> Literal["locale", "utf-8"]: ... + @overload + def text_encoding(encoding: _T, stacklevel: int = 2, /) -> _T: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_json.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_json.pyi new file mode 100644 index 0000000..4a77e5b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_json.pyi @@ -0,0 +1,51 @@ +from collections.abc import Callable +from typing import Any, final +from typing_extensions import Self + +@final +class make_encoder: + @property + def sort_keys(self) -> bool: ... + @property + def skipkeys(self) -> bool: ... + @property + def key_separator(self) -> str: ... + @property + def indent(self) -> str | None: ... + @property + def markers(self) -> dict[int, Any] | None: ... + @property + def default(self) -> Callable[[Any], Any]: ... + @property + def encoder(self) -> Callable[[str], str]: ... + @property + def item_separator(self) -> str: ... + def __new__( + cls, + markers: dict[int, Any] | None, + default: Callable[[Any], Any], + encoder: Callable[[str], str], + indent: str | None, + key_separator: str, + item_separator: str, + sort_keys: bool, + skipkeys: bool, + allow_nan: bool, + ) -> Self: ... + def __call__(self, obj: object, _current_indent_level: int) -> Any: ... + +@final +class make_scanner: + object_hook: Any + object_pairs_hook: Any + parse_int: Any + parse_constant: Any + parse_float: Any + strict: bool + # TODO: 'context' needs the attrs above (ducktype), but not __call__. + def __new__(cls, context: make_scanner) -> Self: ... + def __call__(self, string: str, index: int) -> tuple[Any, int]: ... + +def encode_basestring(s: str, /) -> str: ... +def encode_basestring_ascii(s: str, /) -> str: ... +def scanstring(string: str, end: int, strict: bool = True) -> tuple[str, int]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_locale.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_locale.pyi new file mode 100644 index 0000000..ccce7a0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_locale.pyi @@ -0,0 +1,121 @@ +import sys +from _typeshed import StrPath +from typing import Final, Literal, TypedDict, type_check_only + +@type_check_only +class _LocaleConv(TypedDict): + decimal_point: str + grouping: list[int] + thousands_sep: str + int_curr_symbol: str + currency_symbol: str + p_cs_precedes: Literal[0, 1, 127] + n_cs_precedes: Literal[0, 1, 127] + p_sep_by_space: Literal[0, 1, 127] + n_sep_by_space: Literal[0, 1, 127] + mon_decimal_point: str + frac_digits: int + int_frac_digits: int + mon_thousands_sep: str + mon_grouping: list[int] + positive_sign: str + negative_sign: str + p_sign_posn: Literal[0, 1, 2, 3, 4, 127] + n_sign_posn: Literal[0, 1, 2, 3, 4, 127] + +LC_CTYPE: Final[int] +LC_COLLATE: Final[int] +LC_TIME: Final[int] +LC_MONETARY: Final[int] +LC_NUMERIC: Final[int] +LC_ALL: Final[int] +CHAR_MAX: Final = 127 + +def setlocale(category: int, locale: str | None = None, /) -> str: ... +def localeconv() -> _LocaleConv: ... + +if sys.version_info >= (3, 11): + def getencoding() -> str: ... + +def strcoll(os1: str, os2: str, /) -> int: ... +def strxfrm(string: str, /) -> str: ... + +# native gettext functions +# https://docs.python.org/3/library/locale.html#access-to-message-catalogs +# https://github.com/python/cpython/blob/f4c03484da59049eb62a9bf7777b963e2267d187/Modules/_localemodule.c#L626 +if sys.platform != "win32": + LC_MESSAGES: int + + ABDAY_1: Final[int] + ABDAY_2: Final[int] + ABDAY_3: Final[int] + ABDAY_4: Final[int] + ABDAY_5: Final[int] + ABDAY_6: Final[int] + ABDAY_7: Final[int] + + ABMON_1: Final[int] + ABMON_2: Final[int] + ABMON_3: Final[int] + ABMON_4: Final[int] + ABMON_5: Final[int] + ABMON_6: Final[int] + ABMON_7: Final[int] + ABMON_8: Final[int] + ABMON_9: Final[int] + ABMON_10: Final[int] + ABMON_11: Final[int] + ABMON_12: Final[int] + + DAY_1: Final[int] + DAY_2: Final[int] + DAY_3: Final[int] + DAY_4: Final[int] + DAY_5: Final[int] + DAY_6: Final[int] + DAY_7: Final[int] + + ERA: Final[int] + ERA_D_T_FMT: Final[int] + ERA_D_FMT: Final[int] + ERA_T_FMT: Final[int] + + MON_1: Final[int] + MON_2: Final[int] + MON_3: Final[int] + MON_4: Final[int] + MON_5: Final[int] + MON_6: Final[int] + MON_7: Final[int] + MON_8: Final[int] + MON_9: Final[int] + MON_10: Final[int] + MON_11: Final[int] + MON_12: Final[int] + + CODESET: Final[int] + D_T_FMT: Final[int] + D_FMT: Final[int] + T_FMT: Final[int] + T_FMT_AMPM: Final[int] + AM_STR: Final[int] + PM_STR: Final[int] + + RADIXCHAR: Final[int] + THOUSEP: Final[int] + YESEXPR: Final[int] + NOEXPR: Final[int] + CRNCYSTR: Final[int] + ALT_DIGITS: Final[int] + + def nl_langinfo(key: int, /) -> str: ... + + # This is dependent on `libintl.h` which is a part of `gettext` + # system dependency. These functions might be missing. + # But, we always say that they are present. + def gettext(msg: str, /) -> str: ... + def dgettext(domain: str | None, msg: str, /) -> str: ... + def dcgettext(domain: str | None, msg: str, category: int, /) -> str: ... + def textdomain(domain: str | None, /) -> str: ... + def bindtextdomain(domain: str, dir: StrPath | None, /) -> str: ... + def bind_textdomain_codeset(domain: str, codeset: str | None, /) -> str | None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_lsprof.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_lsprof.pyi new file mode 100644 index 0000000..4f6d98b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_lsprof.pyi @@ -0,0 +1,37 @@ +import sys +from _typeshed import structseq +from collections.abc import Callable +from types import CodeType +from typing import Any, Final, final +from typing_extensions import disjoint_base + +@disjoint_base +class Profiler: + def __init__( + self, timer: Callable[[], float] | None = None, timeunit: float = 0.0, subcalls: bool = True, builtins: bool = True + ) -> None: ... + def getstats(self) -> list[profiler_entry]: ... + def enable(self, subcalls: bool = True, builtins: bool = True) -> None: ... + def disable(self) -> None: ... + def clear(self) -> None: ... + +@final +class profiler_entry(structseq[Any], tuple[CodeType | str, int, int, float, float, list[profiler_subentry]]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("code", "callcount", "reccallcount", "totaltime", "inlinetime", "calls") + code: CodeType | str + callcount: int + reccallcount: int + totaltime: float + inlinetime: float + calls: list[profiler_subentry] + +@final +class profiler_subentry(structseq[Any], tuple[CodeType | str, int, int, float, float]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("code", "callcount", "reccallcount", "totaltime", "inlinetime") + code: CodeType | str + callcount: int + reccallcount: int + totaltime: float + inlinetime: float diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_lzma.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_lzma.pyi new file mode 100644 index 0000000..b38dce9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_lzma.pyi @@ -0,0 +1,71 @@ +import sys +from _typeshed import ReadableBuffer +from collections.abc import Mapping, Sequence +from typing import Any, Final, final +from typing_extensions import Self, TypeAlias + +_FilterChain: TypeAlias = Sequence[Mapping[str, Any]] + +FORMAT_AUTO: Final = 0 +FORMAT_XZ: Final = 1 +FORMAT_ALONE: Final = 2 +FORMAT_RAW: Final = 3 +CHECK_NONE: Final = 0 +CHECK_CRC32: Final = 1 +CHECK_CRC64: Final = 4 +CHECK_SHA256: Final = 10 +CHECK_ID_MAX: Final = 15 +CHECK_UNKNOWN: Final = 16 +FILTER_LZMA1: Final[int] # v big number +FILTER_LZMA2: Final = 33 +FILTER_DELTA: Final = 3 +FILTER_X86: Final = 4 +FILTER_IA64: Final = 6 +FILTER_ARM: Final = 7 +FILTER_ARMTHUMB: Final = 8 +FILTER_SPARC: Final = 9 +FILTER_POWERPC: Final = 5 +MF_HC3: Final = 3 +MF_HC4: Final = 4 +MF_BT2: Final = 18 +MF_BT3: Final = 19 +MF_BT4: Final = 20 +MODE_FAST: Final = 1 +MODE_NORMAL: Final = 2 +PRESET_DEFAULT: Final = 6 +PRESET_EXTREME: Final[int] # v big number + +@final +class LZMADecompressor: + if sys.version_info >= (3, 12): + def __new__(cls, format: int = 0, memlimit: int | None = None, filters: _FilterChain | None = None) -> Self: ... + else: + def __init__(self, format: int = 0, memlimit: int | None = None, filters: _FilterChain | None = None) -> None: ... + + def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: ... + @property + def check(self) -> int: ... + @property + def eof(self) -> bool: ... + @property + def unused_data(self) -> bytes: ... + @property + def needs_input(self) -> bool: ... + +@final +class LZMACompressor: + if sys.version_info >= (3, 12): + def __new__( + cls, format: int = 1, check: int = -1, preset: int | None = None, filters: _FilterChain | None = None + ) -> Self: ... + else: + def __init__( + self, format: int = 1, check: int = -1, preset: int | None = None, filters: _FilterChain | None = None + ) -> None: ... + + def compress(self, data: ReadableBuffer, /) -> bytes: ... + def flush(self) -> bytes: ... + +class LZMAError(Exception): ... + +def is_check_supported(check_id: int, /) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_markupbase.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_markupbase.pyi new file mode 100644 index 0000000..597bd09 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_markupbase.pyi @@ -0,0 +1,16 @@ +import sys +from typing import Any + +class ParserBase: + def reset(self) -> None: ... + def getpos(self) -> tuple[int, int]: ... + def unknown_decl(self, data: str) -> None: ... + def parse_comment(self, i: int, report: bool = True) -> int: ... # undocumented + def parse_declaration(self, i: int) -> int: ... # undocumented + def parse_marked_section(self, i: int, report: bool = True) -> int: ... # undocumented + def updatepos(self, i: int, j: int) -> int: ... # undocumented + if sys.version_info < (3, 10): + # Removed from ParserBase: https://bugs.python.org/issue31844 + def error(self, message: str) -> Any: ... # undocumented + lineno: int # undocumented + offset: int # undocumented diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_msi.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_msi.pyi new file mode 100644 index 0000000..edceed5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_msi.pyi @@ -0,0 +1,97 @@ +import sys +from typing import Final, type_check_only + +if sys.platform == "win32": + class MSIError(Exception): ... + # Actual typename View, not exposed by the implementation + @type_check_only + class _View: + def Execute(self, params: _Record | None = ...) -> None: ... + def GetColumnInfo(self, kind: int) -> _Record: ... + def Fetch(self) -> _Record: ... + def Modify(self, mode: int, record: _Record) -> None: ... + def Close(self) -> None: ... + # Don't exist at runtime + __new__: None # type: ignore[assignment] + __init__: None # type: ignore[assignment] + + # Actual typename SummaryInformation, not exposed by the implementation + @type_check_only + class _SummaryInformation: + def GetProperty(self, field: int) -> int | bytes | None: ... + def GetPropertyCount(self) -> int: ... + def SetProperty(self, field: int, value: int | str) -> None: ... + def Persist(self) -> None: ... + # Don't exist at runtime + __new__: None # type: ignore[assignment] + __init__: None # type: ignore[assignment] + + # Actual typename Database, not exposed by the implementation + @type_check_only + class _Database: + def OpenView(self, sql: str) -> _View: ... + def Commit(self) -> None: ... + def GetSummaryInformation(self, updateCount: int) -> _SummaryInformation: ... + def Close(self) -> None: ... + # Don't exist at runtime + __new__: None # type: ignore[assignment] + __init__: None # type: ignore[assignment] + + # Actual typename Record, not exposed by the implementation + @type_check_only + class _Record: + def GetFieldCount(self) -> int: ... + def GetInteger(self, field: int) -> int: ... + def GetString(self, field: int) -> str: ... + def SetString(self, field: int, str: str) -> None: ... + def SetStream(self, field: int, stream: str) -> None: ... + def SetInteger(self, field: int, int: int) -> None: ... + def ClearData(self) -> None: ... + # Don't exist at runtime + __new__: None # type: ignore[assignment] + __init__: None # type: ignore[assignment] + + def UuidCreate() -> str: ... + def FCICreate(cabname: str, files: list[str], /) -> None: ... + def OpenDatabase(path: str, persist: int, /) -> _Database: ... + def CreateRecord(count: int, /) -> _Record: ... + + MSICOLINFO_NAMES: Final[int] + MSICOLINFO_TYPES: Final[int] + MSIDBOPEN_CREATE: Final[int] + MSIDBOPEN_CREATEDIRECT: Final[int] + MSIDBOPEN_DIRECT: Final[int] + MSIDBOPEN_PATCHFILE: Final[int] + MSIDBOPEN_READONLY: Final[int] + MSIDBOPEN_TRANSACT: Final[int] + MSIMODIFY_ASSIGN: Final[int] + MSIMODIFY_DELETE: Final[int] + MSIMODIFY_INSERT: Final[int] + MSIMODIFY_INSERT_TEMPORARY: Final[int] + MSIMODIFY_MERGE: Final[int] + MSIMODIFY_REFRESH: Final[int] + MSIMODIFY_REPLACE: Final[int] + MSIMODIFY_SEEK: Final[int] + MSIMODIFY_UPDATE: Final[int] + MSIMODIFY_VALIDATE: Final[int] + MSIMODIFY_VALIDATE_DELETE: Final[int] + MSIMODIFY_VALIDATE_FIELD: Final[int] + MSIMODIFY_VALIDATE_NEW: Final[int] + + PID_APPNAME: Final[int] + PID_AUTHOR: Final[int] + PID_CHARCOUNT: Final[int] + PID_CODEPAGE: Final[int] + PID_COMMENTS: Final[int] + PID_CREATE_DTM: Final[int] + PID_KEYWORDS: Final[int] + PID_LASTAUTHOR: Final[int] + PID_LASTPRINTED: Final[int] + PID_LASTSAVE_DTM: Final[int] + PID_PAGECOUNT: Final[int] + PID_REVNUMBER: Final[int] + PID_SECURITY: Final[int] + PID_SUBJECT: Final[int] + PID_TEMPLATE: Final[int] + PID_TITLE: Final[int] + PID_WORDCOUNT: Final[int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_multibytecodec.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_multibytecodec.pyi new file mode 100644 index 0000000..abe58cb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_multibytecodec.pyi @@ -0,0 +1,49 @@ +from _typeshed import ReadableBuffer +from codecs import _ReadableStream, _WritableStream +from collections.abc import Iterable +from typing import final, type_check_only +from typing_extensions import disjoint_base + +# This class is not exposed. It calls itself _multibytecodec.MultibyteCodec. +@final +@type_check_only +class _MultibyteCodec: + def decode(self, input: ReadableBuffer, errors: str | None = None) -> str: ... + def encode(self, input: str, errors: str | None = None) -> bytes: ... + +@disjoint_base +class MultibyteIncrementalDecoder: + errors: str + def __init__(self, errors: str = "strict") -> None: ... + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + def getstate(self) -> tuple[bytes, int]: ... + def reset(self) -> None: ... + def setstate(self, state: tuple[bytes, int], /) -> None: ... + +@disjoint_base +class MultibyteIncrementalEncoder: + errors: str + def __init__(self, errors: str = "strict") -> None: ... + def encode(self, input: str, final: bool = False) -> bytes: ... + def getstate(self) -> int: ... + def reset(self) -> None: ... + def setstate(self, state: int, /) -> None: ... + +@disjoint_base +class MultibyteStreamReader: + errors: str + stream: _ReadableStream + def __init__(self, stream: _ReadableStream, errors: str = "strict") -> None: ... + def read(self, sizeobj: int | None = None, /) -> str: ... + def readline(self, sizeobj: int | None = None, /) -> str: ... + def readlines(self, sizehintobj: int | None = None, /) -> list[str]: ... + def reset(self) -> None: ... + +@disjoint_base +class MultibyteStreamWriter: + errors: str + stream: _WritableStream + def __init__(self, stream: _WritableStream, errors: str = "strict") -> None: ... + def reset(self) -> None: ... + def write(self, strobj: str, /) -> None: ... + def writelines(self, lines: Iterable[str], /) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_operator.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_operator.pyi new file mode 100644 index 0000000..cb1c1bc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_operator.pyi @@ -0,0 +1,122 @@ +import sys +from _typeshed import SupportsGetItem +from collections.abc import Callable, Container, Iterable, MutableMapping, MutableSequence, Sequence +from operator import attrgetter as attrgetter, itemgetter as itemgetter, methodcaller as methodcaller +from typing import Any, AnyStr, Protocol, SupportsAbs, SupportsIndex, TypeVar, overload, type_check_only +from typing_extensions import ParamSpec, TypeAlias, TypeIs + +_R = TypeVar("_R") +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) +_K = TypeVar("_K") +_V = TypeVar("_V") +_P = ParamSpec("_P") + +# The following protocols return "Any" instead of bool, since the comparison +# operators can be overloaded to return an arbitrary object. For example, +# the numpy.array comparison dunders return another numpy.array. + +@type_check_only +class _SupportsDunderLT(Protocol): + def __lt__(self, other: Any, /) -> Any: ... + +@type_check_only +class _SupportsDunderGT(Protocol): + def __gt__(self, other: Any, /) -> Any: ... + +@type_check_only +class _SupportsDunderLE(Protocol): + def __le__(self, other: Any, /) -> Any: ... + +@type_check_only +class _SupportsDunderGE(Protocol): + def __ge__(self, other: Any, /) -> Any: ... + +_SupportsComparison: TypeAlias = _SupportsDunderLE | _SupportsDunderGE | _SupportsDunderGT | _SupportsDunderLT + +@type_check_only +class _SupportsInversion(Protocol[_T_co]): + def __invert__(self) -> _T_co: ... + +@type_check_only +class _SupportsNeg(Protocol[_T_co]): + def __neg__(self) -> _T_co: ... + +@type_check_only +class _SupportsPos(Protocol[_T_co]): + def __pos__(self) -> _T_co: ... + +# All four comparison functions must have the same signature, or we get false-positive errors +def lt(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: ... +def le(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: ... +def eq(a: object, b: object, /) -> Any: ... +def ne(a: object, b: object, /) -> Any: ... +def ge(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: ... +def gt(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: ... +def not_(a: object, /) -> bool: ... +def truth(a: object, /) -> bool: ... +def is_(a: object, b: object, /) -> bool: ... +def is_not(a: object, b: object, /) -> bool: ... +def abs(a: SupportsAbs[_T], /) -> _T: ... +def add(a: Any, b: Any, /) -> Any: ... +def and_(a: Any, b: Any, /) -> Any: ... +def floordiv(a: Any, b: Any, /) -> Any: ... +def index(a: SupportsIndex, /) -> int: ... +def inv(a: _SupportsInversion[_T_co], /) -> _T_co: ... +def invert(a: _SupportsInversion[_T_co], /) -> _T_co: ... +def lshift(a: Any, b: Any, /) -> Any: ... +def mod(a: Any, b: Any, /) -> Any: ... +def mul(a: Any, b: Any, /) -> Any: ... +def matmul(a: Any, b: Any, /) -> Any: ... +def neg(a: _SupportsNeg[_T_co], /) -> _T_co: ... +def or_(a: Any, b: Any, /) -> Any: ... +def pos(a: _SupportsPos[_T_co], /) -> _T_co: ... +def pow(a: Any, b: Any, /) -> Any: ... +def rshift(a: Any, b: Any, /) -> Any: ... +def sub(a: Any, b: Any, /) -> Any: ... +def truediv(a: Any, b: Any, /) -> Any: ... +def xor(a: Any, b: Any, /) -> Any: ... +def concat(a: Sequence[_T], b: Sequence[_T], /) -> Sequence[_T]: ... +def contains(a: Container[object], b: object, /) -> bool: ... +def countOf(a: Iterable[object], b: object, /) -> int: ... +@overload +def delitem(a: MutableSequence[Any], b: SupportsIndex, /) -> None: ... +@overload +def delitem(a: MutableSequence[Any], b: slice, /) -> None: ... +@overload +def delitem(a: MutableMapping[_K, Any], b: _K, /) -> None: ... +@overload +def getitem(a: Sequence[_T], b: slice, /) -> Sequence[_T]: ... +@overload +def getitem(a: SupportsGetItem[_K, _V], b: _K, /) -> _V: ... +def indexOf(a: Iterable[_T], b: _T, /) -> int: ... +@overload +def setitem(a: MutableSequence[_T], b: SupportsIndex, c: _T, /) -> None: ... +@overload +def setitem(a: MutableSequence[_T], b: slice, c: Sequence[_T], /) -> None: ... +@overload +def setitem(a: MutableMapping[_K, _V], b: _K, c: _V, /) -> None: ... +def length_hint(obj: object, default: int = 0, /) -> int: ... +def iadd(a: Any, b: Any, /) -> Any: ... +def iand(a: Any, b: Any, /) -> Any: ... +def iconcat(a: Any, b: Any, /) -> Any: ... +def ifloordiv(a: Any, b: Any, /) -> Any: ... +def ilshift(a: Any, b: Any, /) -> Any: ... +def imod(a: Any, b: Any, /) -> Any: ... +def imul(a: Any, b: Any, /) -> Any: ... +def imatmul(a: Any, b: Any, /) -> Any: ... +def ior(a: Any, b: Any, /) -> Any: ... +def ipow(a: Any, b: Any, /) -> Any: ... +def irshift(a: Any, b: Any, /) -> Any: ... +def isub(a: Any, b: Any, /) -> Any: ... +def itruediv(a: Any, b: Any, /) -> Any: ... +def ixor(a: Any, b: Any, /) -> Any: ... + +if sys.version_info >= (3, 11): + def call(obj: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: ... + +def _compare_digest(a: AnyStr, b: AnyStr, /) -> bool: ... + +if sys.version_info >= (3, 14): + def is_none(a: object, /) -> TypeIs[None]: ... + def is_not_none(a: _T | None, /) -> TypeIs[_T]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_osx_support.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_osx_support.pyi new file mode 100644 index 0000000..fb00e69 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_osx_support.pyi @@ -0,0 +1,34 @@ +from collections.abc import Iterable, Sequence +from typing import Final, TypeVar + +_T = TypeVar("_T") +_K = TypeVar("_K") +_V = TypeVar("_V") + +__all__ = ["compiler_fixup", "customize_config_vars", "customize_compiler", "get_platform_osx"] + +_UNIVERSAL_CONFIG_VARS: Final[tuple[str, ...]] # undocumented +_COMPILER_CONFIG_VARS: Final[tuple[str, ...]] # undocumented +_INITPRE: Final[str] # undocumented + +def _find_executable(executable: str, path: str | None = None) -> str | None: ... # undocumented +def _read_output(commandstring: str, capture_stderr: bool = False) -> str | None: ... # undocumented +def _find_build_tool(toolname: str) -> str: ... # undocumented + +_SYSTEM_VERSION: Final[str | None] # undocumented + +def _get_system_version() -> str: ... # undocumented +def _remove_original_values(_config_vars: dict[str, str]) -> None: ... # undocumented +def _save_modified_value(_config_vars: dict[str, str], cv: str, newvalue: str) -> None: ... # undocumented +def _supports_universal_builds() -> bool: ... # undocumented +def _find_appropriate_compiler(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented +def _remove_universal_flags(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented +def _remove_unsupported_archs(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented +def _override_all_archs(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented +def _check_for_unavailable_sdk(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented +def compiler_fixup(compiler_so: Iterable[str], cc_args: Sequence[str]) -> list[str]: ... +def customize_config_vars(_config_vars: dict[str, str]) -> dict[str, str]: ... +def customize_compiler(_config_vars: dict[str, str]) -> dict[str, str]: ... +def get_platform_osx( + _config_vars: dict[str, str], osname: _T, release: _K, machine: _V +) -> tuple[str | _T, str | _K, str | _V]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_pickle.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_pickle.pyi new file mode 100644 index 0000000..544f787 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_pickle.pyi @@ -0,0 +1,107 @@ +from _typeshed import ReadableBuffer, SupportsWrite +from collections.abc import Callable, Iterable, Iterator, Mapping +from pickle import PickleBuffer as PickleBuffer +from typing import Any, Protocol, type_check_only +from typing_extensions import TypeAlias, disjoint_base + +@type_check_only +class _ReadableFileobj(Protocol): + def read(self, n: int, /) -> bytes: ... + def readline(self) -> bytes: ... + +_BufferCallback: TypeAlias = Callable[[PickleBuffer], Any] | None + +_ReducedType: TypeAlias = ( + str + | tuple[Callable[..., Any], tuple[Any, ...]] + | tuple[Callable[..., Any], tuple[Any, ...], Any] + | tuple[Callable[..., Any], tuple[Any, ...], Any, Iterator[Any] | None] + | tuple[Callable[..., Any], tuple[Any, ...], Any, Iterator[Any] | None, Iterator[Any] | None] +) + +def dump( + obj: Any, + file: SupportsWrite[bytes], + protocol: int | None = None, + *, + fix_imports: bool = True, + buffer_callback: _BufferCallback = None, +) -> None: ... +def dumps( + obj: Any, protocol: int | None = None, *, fix_imports: bool = True, buffer_callback: _BufferCallback = None +) -> bytes: ... +def load( + file: _ReadableFileobj, + *, + fix_imports: bool = True, + encoding: str = "ASCII", + errors: str = "strict", + buffers: Iterable[Any] | None = (), +) -> Any: ... +def loads( + data: ReadableBuffer, + /, + *, + fix_imports: bool = True, + encoding: str = "ASCII", + errors: str = "strict", + buffers: Iterable[Any] | None = (), +) -> Any: ... + +class PickleError(Exception): ... +class PicklingError(PickleError): ... +class UnpicklingError(PickleError): ... + +@type_check_only +class PicklerMemoProxy: + def clear(self, /) -> None: ... + def copy(self, /) -> dict[int, tuple[int, Any]]: ... + +@disjoint_base +class Pickler: + fast: bool + dispatch_table: Mapping[type, Callable[[Any], _ReducedType]] + reducer_override: Callable[[Any], Any] + bin: bool # undocumented + def __init__( + self, + file: SupportsWrite[bytes], + protocol: int | None = None, + fix_imports: bool = True, + buffer_callback: _BufferCallback = None, + ) -> None: ... + @property + def memo(self) -> PicklerMemoProxy: ... + @memo.setter + def memo(self, value: PicklerMemoProxy | dict[int, tuple[int, Any]]) -> None: ... + def dump(self, obj: Any, /) -> None: ... + def clear_memo(self) -> None: ... + + # this method has no default implementation for Python < 3.13 + def persistent_id(self, obj: Any, /) -> Any: ... + +@type_check_only +class UnpicklerMemoProxy: + def clear(self, /) -> None: ... + def copy(self, /) -> dict[int, tuple[int, Any]]: ... + +@disjoint_base +class Unpickler: + def __init__( + self, + file: _ReadableFileobj, + *, + fix_imports: bool = True, + encoding: str = "ASCII", + errors: str = "strict", + buffers: Iterable[Any] | None = (), + ) -> None: ... + @property + def memo(self) -> UnpicklerMemoProxy: ... + @memo.setter + def memo(self, value: UnpicklerMemoProxy | dict[int, tuple[int, Any]]) -> None: ... + def load(self) -> Any: ... + def find_class(self, module_name: str, global_name: str, /) -> Any: ... + + # this method has no default implementation for Python < 3.13 + def persistent_load(self, pid: Any, /) -> Any: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_posixsubprocess.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_posixsubprocess.pyi new file mode 100644 index 0000000..dd74e31 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_posixsubprocess.pyi @@ -0,0 +1,59 @@ +import sys +from _typeshed import StrOrBytesPath +from collections.abc import Callable, Sequence +from typing import SupportsIndex + +if sys.platform != "win32": + if sys.version_info >= (3, 14): + def fork_exec( + args: Sequence[StrOrBytesPath] | None, + executable_list: Sequence[bytes], + close_fds: bool, + pass_fds: tuple[int, ...], + cwd: str, + env: Sequence[bytes] | None, + p2cread: int, + p2cwrite: int, + c2pread: int, + c2pwrite: int, + errread: int, + errwrite: int, + errpipe_read: int, + errpipe_write: int, + restore_signals: int, + call_setsid: int, + pgid_to_set: int, + gid: SupportsIndex | None, + extra_groups: list[int] | None, + uid: SupportsIndex | None, + child_umask: int, + preexec_fn: Callable[[], None], + /, + ) -> int: ... + else: + def fork_exec( + args: Sequence[StrOrBytesPath] | None, + executable_list: Sequence[bytes], + close_fds: bool, + pass_fds: tuple[int, ...], + cwd: str, + env: Sequence[bytes] | None, + p2cread: int, + p2cwrite: int, + c2pread: int, + c2pwrite: int, + errread: int, + errwrite: int, + errpipe_read: int, + errpipe_write: int, + restore_signals: bool, + call_setsid: bool, + pgid_to_set: int, + gid: SupportsIndex | None, + extra_groups: list[int] | None, + uid: SupportsIndex | None, + child_umask: int, + preexec_fn: Callable[[], None], + allow_vfork: bool, + /, + ) -> int: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_py_abc.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_py_abc.pyi new file mode 100644 index 0000000..1260717 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_py_abc.pyi @@ -0,0 +1,14 @@ +import _typeshed +from typing import Any, NewType, TypeVar + +_T = TypeVar("_T") + +_CacheToken = NewType("_CacheToken", int) + +def get_cache_token() -> _CacheToken: ... + +class ABCMeta(type): + def __new__( + mcls: type[_typeshed.Self], name: str, bases: tuple[type[Any], ...], namespace: dict[str, Any], / + ) -> _typeshed.Self: ... + def register(cls, subclass: type[_T]) -> type[_T]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_pydecimal.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_pydecimal.pyi new file mode 100644 index 0000000..a6723f7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_pydecimal.pyi @@ -0,0 +1,47 @@ +# This is a slight lie, the implementations aren't exactly identical +# However, in all likelihood, the differences are inconsequential +import sys +from _decimal import * + +__all__ = [ + "Decimal", + "Context", + "DecimalTuple", + "DefaultContext", + "BasicContext", + "ExtendedContext", + "DecimalException", + "Clamped", + "InvalidOperation", + "DivisionByZero", + "Inexact", + "Rounded", + "Subnormal", + "Overflow", + "Underflow", + "FloatOperation", + "DivisionImpossible", + "InvalidContext", + "ConversionSyntax", + "DivisionUndefined", + "ROUND_DOWN", + "ROUND_HALF_UP", + "ROUND_HALF_EVEN", + "ROUND_CEILING", + "ROUND_FLOOR", + "ROUND_UP", + "ROUND_HALF_DOWN", + "ROUND_05UP", + "setcontext", + "getcontext", + "localcontext", + "MAX_PREC", + "MAX_EMAX", + "MIN_EMIN", + "MIN_ETINY", + "HAVE_THREADS", + "HAVE_CONTEXTVAR", +] + +if sys.version_info >= (3, 14): + __all__ += ["IEEEContext", "IEEE_CONTEXT_MAX_BITS"] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_queue.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_queue.pyi new file mode 100644 index 0000000..edd484a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_queue.pyi @@ -0,0 +1,18 @@ +from types import GenericAlias +from typing import Any, Generic, TypeVar +from typing_extensions import disjoint_base + +_T = TypeVar("_T") + +class Empty(Exception): ... + +@disjoint_base +class SimpleQueue(Generic[_T]): + def __init__(self) -> None: ... + def empty(self) -> bool: ... + def get(self, block: bool = True, timeout: float | None = None) -> _T: ... + def get_nowait(self) -> _T: ... + def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: ... + def put_nowait(self, item: _T) -> None: ... + def qsize(self) -> int: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_random.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_random.pyi new file mode 100644 index 0000000..ac00fdf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_random.pyi @@ -0,0 +1,18 @@ +import sys +from typing_extensions import Self, TypeAlias, disjoint_base + +# Actually Tuple[(int,) * 625] +_State: TypeAlias = tuple[int, ...] + +@disjoint_base +class Random: + if sys.version_info >= (3, 10): + def __init__(self, seed: object = ..., /) -> None: ... + else: + def __new__(self, seed: object = ..., /) -> Self: ... + + def seed(self, n: object = None, /) -> None: ... + def getstate(self) -> _State: ... + def setstate(self, state: _State, /) -> None: ... + def random(self) -> float: ... + def getrandbits(self, k: int, /) -> int: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_sitebuiltins.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_sitebuiltins.pyi new file mode 100644 index 0000000..eb6c811 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_sitebuiltins.pyi @@ -0,0 +1,17 @@ +import sys +from collections.abc import Iterable +from typing import ClassVar, Literal, NoReturn + +class Quitter: + name: str + eof: str + def __init__(self, name: str, eof: str) -> None: ... + def __call__(self, code: sys._ExitCode = None) -> NoReturn: ... + +class _Printer: + MAXLINES: ClassVar[Literal[23]] + def __init__(self, name: str, data: str, files: Iterable[str] = (), dirs: Iterable[str] = ()) -> None: ... + def __call__(self) -> None: ... + +class _Helper: + def __call__(self, request: object = ...) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_socket.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_socket.pyi new file mode 100644 index 0000000..cdad886 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_socket.pyi @@ -0,0 +1,858 @@ +import sys +from _typeshed import ReadableBuffer, WriteableBuffer +from collections.abc import Iterable +from socket import error as error, gaierror as gaierror, herror as herror, timeout as timeout +from typing import Any, Final, SupportsIndex, overload +from typing_extensions import CapsuleType, TypeAlias, disjoint_base + +_CMSG: TypeAlias = tuple[int, int, bytes] +_CMSGArg: TypeAlias = tuple[int, int, ReadableBuffer] + +# Addresses can be either tuples of varying lengths (AF_INET, AF_INET6, +# AF_NETLINK, AF_TIPC) or strings/buffers (AF_UNIX). +# See getsockaddrarg() in socketmodule.c. +_Address: TypeAlias = tuple[Any, ...] | str | ReadableBuffer +_RetAddress: TypeAlias = Any + +# ===== Constants ===== +# This matches the order in the CPython documentation +# https://docs.python.org/3/library/socket.html#constants + +if sys.platform != "win32": + AF_UNIX: Final[int] + +AF_INET: Final[int] +AF_INET6: Final[int] + +AF_UNSPEC: Final[int] + +SOCK_STREAM: Final[int] +SOCK_DGRAM: Final[int] +SOCK_RAW: Final[int] +SOCK_RDM: Final[int] +SOCK_SEQPACKET: Final[int] + +if sys.platform == "linux": + # Availability: Linux >= 2.6.27 + SOCK_CLOEXEC: Final[int] + SOCK_NONBLOCK: Final[int] + +# -------------------- +# Many constants of these forms, documented in the Unix documentation on +# sockets and/or the IP protocol, are also defined in the socket module. +# SO_* +# socket.SOMAXCONN +# MSG_* +# SOL_* +# SCM_* +# IPPROTO_* +# IPPORT_* +# INADDR_* +# IP_* +# IPV6_* +# EAI_* +# AI_* +# NI_* +# TCP_* +# -------------------- + +SO_ACCEPTCONN: Final[int] +SO_BROADCAST: Final[int] +SO_DEBUG: Final[int] +SO_DONTROUTE: Final[int] +SO_ERROR: Final[int] +SO_KEEPALIVE: Final[int] +SO_LINGER: Final[int] +SO_OOBINLINE: Final[int] +SO_RCVBUF: Final[int] +SO_RCVLOWAT: Final[int] +SO_RCVTIMEO: Final[int] +SO_REUSEADDR: Final[int] +SO_SNDBUF: Final[int] +SO_SNDLOWAT: Final[int] +SO_SNDTIMEO: Final[int] +SO_TYPE: Final[int] +if sys.platform != "linux": + SO_USELOOPBACK: Final[int] +if sys.platform == "win32": + SO_EXCLUSIVEADDRUSE: Final[int] +if sys.platform != "win32": + SO_REUSEPORT: Final[int] + if sys.platform != "darwin" or sys.version_info >= (3, 13): + SO_BINDTODEVICE: Final[int] + +if sys.platform != "win32" and sys.platform != "darwin": + SO_DOMAIN: Final[int] + SO_MARK: Final[int] + SO_PASSCRED: Final[int] + SO_PASSSEC: Final[int] + SO_PEERCRED: Final[int] + SO_PEERSEC: Final[int] + SO_PRIORITY: Final[int] + SO_PROTOCOL: Final[int] +if sys.platform != "win32" and sys.platform != "darwin" and sys.platform != "linux": + SO_SETFIB: Final[int] +if sys.platform == "linux" and sys.version_info >= (3, 13): + SO_BINDTOIFINDEX: Final[int] + +SOMAXCONN: Final[int] + +MSG_CTRUNC: Final[int] +MSG_DONTROUTE: Final[int] +MSG_OOB: Final[int] +MSG_PEEK: Final[int] +MSG_TRUNC: Final[int] +MSG_WAITALL: Final[int] +if sys.platform != "win32": + MSG_DONTWAIT: Final[int] + MSG_EOR: Final[int] + MSG_NOSIGNAL: Final[int] # Sometimes this exists on darwin, sometimes not +if sys.platform != "darwin": + MSG_ERRQUEUE: Final[int] +if sys.platform == "win32": + MSG_BCAST: Final[int] + MSG_MCAST: Final[int] +if sys.platform != "win32" and sys.platform != "darwin": + MSG_CMSG_CLOEXEC: Final[int] + MSG_CONFIRM: Final[int] + MSG_FASTOPEN: Final[int] + MSG_MORE: Final[int] +if sys.platform != "win32" and sys.platform != "linux": + MSG_EOF: Final[int] +if sys.platform != "win32" and sys.platform != "linux" and sys.platform != "darwin": + MSG_NOTIFICATION: Final[int] + MSG_BTAG: Final[int] # Not FreeBSD either + MSG_ETAG: Final[int] # Not FreeBSD either + +SOL_IP: Final[int] +SOL_SOCKET: Final[int] +SOL_TCP: Final[int] +SOL_UDP: Final[int] +if sys.platform != "win32" and sys.platform != "darwin": + # Defined in socket.h for Linux, but these aren't always present for + # some reason. + SOL_ATALK: Final[int] + SOL_AX25: Final[int] + SOL_HCI: Final[int] + SOL_IPX: Final[int] + SOL_NETROM: Final[int] + SOL_ROSE: Final[int] + +if sys.platform != "win32": + SCM_RIGHTS: Final[int] +if sys.platform != "win32" and sys.platform != "darwin": + SCM_CREDENTIALS: Final[int] +if sys.platform != "win32" and sys.platform != "linux": + SCM_CREDS: Final[int] + +IPPROTO_ICMP: Final[int] +IPPROTO_IP: Final[int] +IPPROTO_RAW: Final[int] +IPPROTO_TCP: Final[int] +IPPROTO_UDP: Final[int] +IPPROTO_AH: Final[int] +IPPROTO_DSTOPTS: Final[int] +IPPROTO_EGP: Final[int] +IPPROTO_ESP: Final[int] +IPPROTO_FRAGMENT: Final[int] +IPPROTO_HOPOPTS: Final[int] +IPPROTO_ICMPV6: Final[int] +IPPROTO_IDP: Final[int] +IPPROTO_IGMP: Final[int] +IPPROTO_IPV6: Final[int] +IPPROTO_NONE: Final[int] +IPPROTO_PIM: Final[int] +IPPROTO_PUP: Final[int] +IPPROTO_ROUTING: Final[int] +IPPROTO_SCTP: Final[int] +if sys.platform != "linux": + IPPROTO_GGP: Final[int] + IPPROTO_IPV4: Final[int] + IPPROTO_MAX: Final[int] + IPPROTO_ND: Final[int] +if sys.platform == "win32": + IPPROTO_CBT: Final[int] + IPPROTO_ICLFXBM: Final[int] + IPPROTO_IGP: Final[int] + IPPROTO_L2TP: Final[int] + IPPROTO_PGM: Final[int] + IPPROTO_RDP: Final[int] + IPPROTO_ST: Final[int] +if sys.platform != "win32": + IPPROTO_GRE: Final[int] + IPPROTO_IPIP: Final[int] + IPPROTO_RSVP: Final[int] + IPPROTO_TP: Final[int] +if sys.platform != "win32" and sys.platform != "linux": + IPPROTO_EON: Final[int] + IPPROTO_HELLO: Final[int] + IPPROTO_IPCOMP: Final[int] + IPPROTO_XTP: Final[int] +if sys.platform != "win32" and sys.platform != "darwin" and sys.platform != "linux": + IPPROTO_BIP: Final[int] # Not FreeBSD either + IPPROTO_MOBILE: Final[int] # Not FreeBSD either + IPPROTO_VRRP: Final[int] # Not FreeBSD either +if sys.platform == "linux": + # Availability: Linux >= 2.6.20, FreeBSD >= 10.1 + IPPROTO_UDPLITE: Final[int] +if sys.version_info >= (3, 10) and sys.platform == "linux": + IPPROTO_MPTCP: Final[int] + +IPPORT_RESERVED: Final[int] +IPPORT_USERRESERVED: Final[int] + +INADDR_ALLHOSTS_GROUP: Final[int] +INADDR_ANY: Final[int] +INADDR_BROADCAST: Final[int] +INADDR_LOOPBACK: Final[int] +INADDR_MAX_LOCAL_GROUP: Final[int] +INADDR_NONE: Final[int] +INADDR_UNSPEC_GROUP: Final[int] + +IP_ADD_MEMBERSHIP: Final[int] +IP_DROP_MEMBERSHIP: Final[int] +IP_HDRINCL: Final[int] +IP_MULTICAST_IF: Final[int] +IP_MULTICAST_LOOP: Final[int] +IP_MULTICAST_TTL: Final[int] +IP_OPTIONS: Final[int] +if sys.platform != "linux": + IP_RECVDSTADDR: Final[int] +if sys.version_info >= (3, 10): + IP_RECVTOS: Final[int] +IP_TOS: Final[int] +IP_TTL: Final[int] +if sys.platform != "win32": + IP_DEFAULT_MULTICAST_LOOP: Final[int] + IP_DEFAULT_MULTICAST_TTL: Final[int] + IP_MAX_MEMBERSHIPS: Final[int] + IP_RECVOPTS: Final[int] + IP_RECVRETOPTS: Final[int] + IP_RETOPTS: Final[int] +if sys.version_info >= (3, 13) and sys.platform == "linux": + CAN_RAW_ERR_FILTER: Final[int] +if sys.version_info >= (3, 14): + IP_RECVTTL: Final[int] + + if sys.platform == "win32" or sys.platform == "linux": + IPV6_RECVERR: Final[int] + IP_RECVERR: Final[int] + SO_ORIGINAL_DST: Final[int] + + if sys.platform == "win32": + SOL_RFCOMM: Final[int] + SO_BTH_ENCRYPT: Final[int] + SO_BTH_MTU: Final[int] + SO_BTH_MTU_MAX: Final[int] + SO_BTH_MTU_MIN: Final[int] + TCP_QUICKACK: Final[int] + + if sys.platform == "linux": + IP_FREEBIND: Final[int] + IP_RECVORIGDSTADDR: Final[int] + VMADDR_CID_LOCAL: Final[int] + +if sys.platform != "win32" and sys.platform != "darwin": + IP_TRANSPARENT: Final[int] +if sys.platform != "win32" and sys.platform != "darwin" and sys.version_info >= (3, 11): + IP_BIND_ADDRESS_NO_PORT: Final[int] +if sys.version_info >= (3, 12): + IP_ADD_SOURCE_MEMBERSHIP: Final[int] + IP_BLOCK_SOURCE: Final[int] + IP_DROP_SOURCE_MEMBERSHIP: Final[int] + IP_PKTINFO: Final[int] + IP_UNBLOCK_SOURCE: Final[int] + +IPV6_CHECKSUM: Final[int] +IPV6_JOIN_GROUP: Final[int] +IPV6_LEAVE_GROUP: Final[int] +IPV6_MULTICAST_HOPS: Final[int] +IPV6_MULTICAST_IF: Final[int] +IPV6_MULTICAST_LOOP: Final[int] +IPV6_RECVTCLASS: Final[int] +IPV6_TCLASS: Final[int] +IPV6_UNICAST_HOPS: Final[int] +IPV6_V6ONLY: Final[int] +IPV6_DONTFRAG: Final[int] +IPV6_HOPLIMIT: Final[int] +IPV6_HOPOPTS: Final[int] +IPV6_PKTINFO: Final[int] +IPV6_RECVRTHDR: Final[int] +IPV6_RTHDR: Final[int] +if sys.platform != "win32": + IPV6_RTHDR_TYPE_0: Final[int] + IPV6_DSTOPTS: Final[int] + IPV6_NEXTHOP: Final[int] + IPV6_PATHMTU: Final[int] + IPV6_RECVDSTOPTS: Final[int] + IPV6_RECVHOPLIMIT: Final[int] + IPV6_RECVHOPOPTS: Final[int] + IPV6_RECVPATHMTU: Final[int] + IPV6_RECVPKTINFO: Final[int] + IPV6_RTHDRDSTOPTS: Final[int] + +if sys.platform != "win32" and sys.platform != "linux": + IPV6_USE_MIN_MTU: Final[int] + +EAI_AGAIN: Final[int] +EAI_BADFLAGS: Final[int] +EAI_FAIL: Final[int] +EAI_FAMILY: Final[int] +EAI_MEMORY: Final[int] +EAI_NODATA: Final[int] +EAI_NONAME: Final[int] +EAI_SERVICE: Final[int] +EAI_SOCKTYPE: Final[int] +if sys.platform != "win32": + EAI_ADDRFAMILY: Final[int] + EAI_OVERFLOW: Final[int] + EAI_SYSTEM: Final[int] +if sys.platform != "win32" and sys.platform != "linux": + EAI_BADHINTS: Final[int] + EAI_MAX: Final[int] + EAI_PROTOCOL: Final[int] + +AI_ADDRCONFIG: Final[int] +AI_ALL: Final[int] +AI_CANONNAME: Final[int] +AI_NUMERICHOST: Final[int] +AI_NUMERICSERV: Final[int] +AI_PASSIVE: Final[int] +AI_V4MAPPED: Final[int] +if sys.platform != "win32" and sys.platform != "linux": + AI_DEFAULT: Final[int] + AI_MASK: Final[int] + AI_V4MAPPED_CFG: Final[int] + +NI_DGRAM: Final[int] +NI_MAXHOST: Final[int] +NI_MAXSERV: Final[int] +NI_NAMEREQD: Final[int] +NI_NOFQDN: Final[int] +NI_NUMERICHOST: Final[int] +NI_NUMERICSERV: Final[int] +if sys.platform == "linux" and sys.version_info >= (3, 13): + NI_IDN: Final[int] + +TCP_FASTOPEN: Final[int] +TCP_KEEPCNT: Final[int] +TCP_KEEPINTVL: Final[int] +TCP_MAXSEG: Final[int] +TCP_NODELAY: Final[int] +if sys.platform != "win32": + TCP_NOTSENT_LOWAT: Final[int] +if sys.platform != "darwin": + TCP_KEEPIDLE: Final[int] +if sys.version_info >= (3, 10) and sys.platform == "darwin": + TCP_KEEPALIVE: Final[int] +if sys.version_info >= (3, 11) and sys.platform == "darwin": + TCP_CONNECTION_INFO: Final[int] + +if sys.platform != "win32" and sys.platform != "darwin": + TCP_CONGESTION: Final[int] + TCP_CORK: Final[int] + TCP_DEFER_ACCEPT: Final[int] + TCP_INFO: Final[int] + TCP_LINGER2: Final[int] + TCP_QUICKACK: Final[int] + TCP_SYNCNT: Final[int] + TCP_USER_TIMEOUT: Final[int] + TCP_WINDOW_CLAMP: Final[int] +if sys.platform == "linux" and sys.version_info >= (3, 12): + TCP_CC_INFO: Final[int] + TCP_FASTOPEN_CONNECT: Final[int] + TCP_FASTOPEN_KEY: Final[int] + TCP_FASTOPEN_NO_COOKIE: Final[int] + TCP_INQ: Final[int] + TCP_MD5SIG: Final[int] + TCP_MD5SIG_EXT: Final[int] + TCP_QUEUE_SEQ: Final[int] + TCP_REPAIR: Final[int] + TCP_REPAIR_OPTIONS: Final[int] + TCP_REPAIR_QUEUE: Final[int] + TCP_REPAIR_WINDOW: Final[int] + TCP_SAVED_SYN: Final[int] + TCP_SAVE_SYN: Final[int] + TCP_THIN_DUPACK: Final[int] + TCP_THIN_LINEAR_TIMEOUTS: Final[int] + TCP_TIMESTAMP: Final[int] + TCP_TX_DELAY: Final[int] + TCP_ULP: Final[int] + TCP_ZEROCOPY_RECEIVE: Final[int] + +# -------------------- +# Specifically documented constants +# -------------------- + +if sys.platform == "linux": + # Availability: Linux >= 2.6.25, NetBSD >= 8 + AF_CAN: Final[int] + PF_CAN: Final[int] + SOL_CAN_BASE: Final[int] + SOL_CAN_RAW: Final[int] + CAN_EFF_FLAG: Final[int] + CAN_EFF_MASK: Final[int] + CAN_ERR_FLAG: Final[int] + CAN_ERR_MASK: Final[int] + CAN_RAW: Final[int] + CAN_RAW_FILTER: Final[int] + CAN_RAW_LOOPBACK: Final[int] + CAN_RAW_RECV_OWN_MSGS: Final[int] + CAN_RTR_FLAG: Final[int] + CAN_SFF_MASK: Final[int] + if sys.version_info < (3, 11): + CAN_RAW_ERR_FILTER: Final[int] + +if sys.platform == "linux": + # Availability: Linux >= 2.6.25 + CAN_BCM: Final[int] + CAN_BCM_TX_SETUP: Final[int] + CAN_BCM_TX_DELETE: Final[int] + CAN_BCM_TX_READ: Final[int] + CAN_BCM_TX_SEND: Final[int] + CAN_BCM_RX_SETUP: Final[int] + CAN_BCM_RX_DELETE: Final[int] + CAN_BCM_RX_READ: Final[int] + CAN_BCM_TX_STATUS: Final[int] + CAN_BCM_TX_EXPIRED: Final[int] + CAN_BCM_RX_STATUS: Final[int] + CAN_BCM_RX_TIMEOUT: Final[int] + CAN_BCM_RX_CHANGED: Final[int] + CAN_BCM_SETTIMER: Final[int] + CAN_BCM_STARTTIMER: Final[int] + CAN_BCM_TX_COUNTEVT: Final[int] + CAN_BCM_TX_ANNOUNCE: Final[int] + CAN_BCM_TX_CP_CAN_ID: Final[int] + CAN_BCM_RX_FILTER_ID: Final[int] + CAN_BCM_RX_CHECK_DLC: Final[int] + CAN_BCM_RX_NO_AUTOTIMER: Final[int] + CAN_BCM_RX_ANNOUNCE_RESUME: Final[int] + CAN_BCM_TX_RESET_MULTI_IDX: Final[int] + CAN_BCM_RX_RTR_FRAME: Final[int] + CAN_BCM_CAN_FD_FRAME: Final[int] + +if sys.platform == "linux": + # Availability: Linux >= 3.6 + CAN_RAW_FD_FRAMES: Final[int] + # Availability: Linux >= 4.1 + CAN_RAW_JOIN_FILTERS: Final[int] + # Availability: Linux >= 2.6.25 + CAN_ISOTP: Final[int] + # Availability: Linux >= 5.4 + CAN_J1939: Final[int] + + J1939_MAX_UNICAST_ADDR: Final[int] + J1939_IDLE_ADDR: Final[int] + J1939_NO_ADDR: Final[int] + J1939_NO_NAME: Final[int] + J1939_PGN_REQUEST: Final[int] + J1939_PGN_ADDRESS_CLAIMED: Final[int] + J1939_PGN_ADDRESS_COMMANDED: Final[int] + J1939_PGN_PDU1_MAX: Final[int] + J1939_PGN_MAX: Final[int] + J1939_NO_PGN: Final[int] + + SO_J1939_FILTER: Final[int] + SO_J1939_PROMISC: Final[int] + SO_J1939_SEND_PRIO: Final[int] + SO_J1939_ERRQUEUE: Final[int] + + SCM_J1939_DEST_ADDR: Final[int] + SCM_J1939_DEST_NAME: Final[int] + SCM_J1939_PRIO: Final[int] + SCM_J1939_ERRQUEUE: Final[int] + + J1939_NLA_PAD: Final[int] + J1939_NLA_BYTES_ACKED: Final[int] + J1939_EE_INFO_NONE: Final[int] + J1939_EE_INFO_TX_ABORT: Final[int] + J1939_FILTER_MAX: Final[int] + +if sys.version_info >= (3, 12) and sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darwin": + # Availability: FreeBSD >= 14.0 + AF_DIVERT: Final[int] + PF_DIVERT: Final[int] + +if sys.platform == "linux": + # Availability: Linux >= 2.2 + AF_PACKET: Final[int] + PF_PACKET: Final[int] + PACKET_BROADCAST: Final[int] + PACKET_FASTROUTE: Final[int] + PACKET_HOST: Final[int] + PACKET_LOOPBACK: Final[int] + PACKET_MULTICAST: Final[int] + PACKET_OTHERHOST: Final[int] + PACKET_OUTGOING: Final[int] + +if sys.version_info >= (3, 12) and sys.platform == "linux": + ETH_P_ALL: Final[int] + +if sys.platform == "linux": + # Availability: Linux >= 2.6.30 + AF_RDS: Final[int] + PF_RDS: Final[int] + SOL_RDS: Final[int] + # These are present in include/linux/rds.h but don't always show up + # here. + RDS_CANCEL_SENT_TO: Final[int] + RDS_CMSG_RDMA_ARGS: Final[int] + RDS_CMSG_RDMA_DEST: Final[int] + RDS_CMSG_RDMA_MAP: Final[int] + RDS_CMSG_RDMA_STATUS: Final[int] + RDS_CONG_MONITOR: Final[int] + RDS_FREE_MR: Final[int] + RDS_GET_MR: Final[int] + RDS_GET_MR_FOR_DEST: Final[int] + RDS_RDMA_DONTWAIT: Final[int] + RDS_RDMA_FENCE: Final[int] + RDS_RDMA_INVALIDATE: Final[int] + RDS_RDMA_NOTIFY_ME: Final[int] + RDS_RDMA_READWRITE: Final[int] + RDS_RDMA_SILENT: Final[int] + RDS_RDMA_USE_ONCE: Final[int] + RDS_RECVERR: Final[int] + + # This is supported by CPython but doesn't seem to be a real thing. + # The closest existing constant in rds.h is RDS_CMSG_CONG_UPDATE + # RDS_CMSG_RDMA_UPDATE: Final[int] + +if sys.platform == "win32": + SIO_RCVALL: Final[int] + SIO_KEEPALIVE_VALS: Final[int] + SIO_LOOPBACK_FAST_PATH: Final[int] + RCVALL_MAX: Final[int] + RCVALL_OFF: Final[int] + RCVALL_ON: Final[int] + RCVALL_SOCKETLEVELONLY: Final[int] + +if sys.platform == "linux": + AF_TIPC: Final[int] + SOL_TIPC: Final[int] + TIPC_ADDR_ID: Final[int] + TIPC_ADDR_NAME: Final[int] + TIPC_ADDR_NAMESEQ: Final[int] + TIPC_CFG_SRV: Final[int] + TIPC_CLUSTER_SCOPE: Final[int] + TIPC_CONN_TIMEOUT: Final[int] + TIPC_CRITICAL_IMPORTANCE: Final[int] + TIPC_DEST_DROPPABLE: Final[int] + TIPC_HIGH_IMPORTANCE: Final[int] + TIPC_IMPORTANCE: Final[int] + TIPC_LOW_IMPORTANCE: Final[int] + TIPC_MEDIUM_IMPORTANCE: Final[int] + TIPC_NODE_SCOPE: Final[int] + TIPC_PUBLISHED: Final[int] + TIPC_SRC_DROPPABLE: Final[int] + TIPC_SUBSCR_TIMEOUT: Final[int] + TIPC_SUB_CANCEL: Final[int] + TIPC_SUB_PORTS: Final[int] + TIPC_SUB_SERVICE: Final[int] + TIPC_TOP_SRV: Final[int] + TIPC_WAIT_FOREVER: Final[int] + TIPC_WITHDRAWN: Final[int] + TIPC_ZONE_SCOPE: Final[int] + +if sys.platform == "linux": + # Availability: Linux >= 2.6.38 + AF_ALG: Final[int] + SOL_ALG: Final[int] + ALG_OP_DECRYPT: Final[int] + ALG_OP_ENCRYPT: Final[int] + ALG_OP_SIGN: Final[int] + ALG_OP_VERIFY: Final[int] + ALG_SET_AEAD_ASSOCLEN: Final[int] + ALG_SET_AEAD_AUTHSIZE: Final[int] + ALG_SET_IV: Final[int] + ALG_SET_KEY: Final[int] + ALG_SET_OP: Final[int] + ALG_SET_PUBKEY: Final[int] + +if sys.platform == "linux": + # Availability: Linux >= 4.8 (or maybe 3.9, CPython docs are confusing) + AF_VSOCK: Final[int] + IOCTL_VM_SOCKETS_GET_LOCAL_CID: Final = 0x7B9 + VMADDR_CID_ANY: Final = 0xFFFFFFFF + VMADDR_CID_HOST: Final = 2 + VMADDR_PORT_ANY: Final = 0xFFFFFFFF + SO_VM_SOCKETS_BUFFER_MAX_SIZE: Final = 2 + SO_VM_SOCKETS_BUFFER_SIZE: Final = 0 + SO_VM_SOCKETS_BUFFER_MIN_SIZE: Final = 1 + VM_SOCKETS_INVALID_VERSION: Final = 0xFFFFFFFF # undocumented + +# Documented as only available on BSD, macOS, but empirically sometimes +# available on Windows +if sys.platform != "linux": + AF_LINK: Final[int] + +has_ipv6: bool + +if sys.platform != "darwin" and sys.platform != "linux": + BDADDR_ANY: Final = "00:00:00:00:00:00" + BDADDR_LOCAL: Final = "00:00:00:FF:FF:FF" + +if sys.platform != "win32" and sys.platform != "darwin" and sys.platform != "linux": + HCI_FILTER: Final[int] # not in NetBSD or DragonFlyBSD + HCI_TIME_STAMP: Final[int] # not in FreeBSD, NetBSD, or DragonFlyBSD + HCI_DATA_DIR: Final[int] # not in FreeBSD, NetBSD, or DragonFlyBSD + +if sys.platform == "linux": + AF_QIPCRTR: Final[int] # Availability: Linux >= 4.7 + +if sys.version_info >= (3, 11) and sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darwin": + # FreeBSD + SCM_CREDS2: Final[int] + LOCAL_CREDS: Final[int] + LOCAL_CREDS_PERSISTENT: Final[int] + +if sys.version_info >= (3, 11) and sys.platform == "linux": + SO_INCOMING_CPU: Final[int] # Availability: Linux >= 3.9 + +if sys.version_info >= (3, 12) and sys.platform == "win32": + # Availability: Windows + AF_HYPERV: Final[int] + HV_PROTOCOL_RAW: Final[int] + HVSOCKET_CONNECT_TIMEOUT: Final[int] + HVSOCKET_CONNECT_TIMEOUT_MAX: Final[int] + HVSOCKET_CONNECTED_SUSPEND: Final[int] + HVSOCKET_ADDRESS_FLAG_PASSTHRU: Final[int] + HV_GUID_ZERO: Final = "00000000-0000-0000-0000-000000000000" + HV_GUID_WILDCARD: Final = "00000000-0000-0000-0000-000000000000" + HV_GUID_BROADCAST: Final = "FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF" + HV_GUID_CHILDREN: Final = "90DB8B89-0D35-4F79-8CE9-49EA0AC8B7CD" + HV_GUID_LOOPBACK: Final = "E0E16197-DD56-4A10-9195-5EE7A155A838" + HV_GUID_PARENT: Final = "A42E7CDA-D03F-480C-9CC2-A4DE20ABB878" + +if sys.version_info >= (3, 12): + if sys.platform != "win32": + # Availability: Linux, FreeBSD, macOS + ETHERTYPE_ARP: Final[int] + ETHERTYPE_IP: Final[int] + ETHERTYPE_IPV6: Final[int] + ETHERTYPE_VLAN: Final[int] + +# -------------------- +# Semi-documented constants +# These are alluded to under the "Socket families" section in the docs +# https://docs.python.org/3/library/socket.html#socket-families +# -------------------- + +if sys.platform == "linux": + # Netlink is defined by Linux + AF_NETLINK: Final[int] + NETLINK_CRYPTO: Final[int] + NETLINK_DNRTMSG: Final[int] + NETLINK_FIREWALL: Final[int] + NETLINK_IP6_FW: Final[int] + NETLINK_NFLOG: Final[int] + NETLINK_ROUTE: Final[int] + NETLINK_USERSOCK: Final[int] + NETLINK_XFRM: Final[int] + # Technically still supported by CPython + # NETLINK_ARPD: Final[int] # linux 2.0 to 2.6.12 (EOL August 2005) + # NETLINK_ROUTE6: Final[int] # linux 2.2 to 2.6.12 (EOL August 2005) + # NETLINK_SKIP: Final[int] # linux 2.0 to 2.6.12 (EOL August 2005) + # NETLINK_TAPBASE: Final[int] # linux 2.2 to 2.6.12 (EOL August 2005) + # NETLINK_TCPDIAG: Final[int] # linux 2.6.0 to 2.6.13 (EOL December 2005) + # NETLINK_W1: Final[int] # linux 2.6.13 to 2.6.17 (EOL October 2006) + +if sys.platform == "darwin": + PF_SYSTEM: Final[int] + SYSPROTO_CONTROL: Final[int] + +if sys.platform != "darwin" and sys.platform != "linux": + AF_BLUETOOTH: Final[int] + +if sys.platform != "win32" and sys.platform != "darwin" and sys.platform != "linux": + # Linux and some BSD support is explicit in the docs + # Windows and macOS do not support in practice + BTPROTO_HCI: Final[int] + BTPROTO_L2CAP: Final[int] + BTPROTO_SCO: Final[int] # not in FreeBSD +if sys.platform != "darwin" and sys.platform != "linux": + BTPROTO_RFCOMM: Final[int] + +if sys.platform == "linux": + UDPLITE_RECV_CSCOV: Final[int] + UDPLITE_SEND_CSCOV: Final[int] + +# -------------------- +# Documented under socket.shutdown +# -------------------- +SHUT_RD: Final[int] +SHUT_RDWR: Final[int] +SHUT_WR: Final[int] + +# -------------------- +# Undocumented constants +# -------------------- + +# Undocumented address families +AF_APPLETALK: Final[int] +AF_DECnet: Final[int] +AF_IPX: Final[int] +AF_SNA: Final[int] + +if sys.platform != "win32": + AF_ROUTE: Final[int] + +if sys.platform == "darwin": + AF_SYSTEM: Final[int] + +if sys.platform != "darwin": + AF_IRDA: Final[int] + +if sys.platform != "win32" and sys.platform != "darwin": + AF_ASH: Final[int] + AF_ATMPVC: Final[int] + AF_ATMSVC: Final[int] + AF_AX25: Final[int] + AF_BRIDGE: Final[int] + AF_ECONET: Final[int] + AF_KEY: Final[int] + AF_LLC: Final[int] + AF_NETBEUI: Final[int] + AF_NETROM: Final[int] + AF_PPPOX: Final[int] + AF_ROSE: Final[int] + AF_SECURITY: Final[int] + AF_WANPIPE: Final[int] + AF_X25: Final[int] + +# Miscellaneous undocumented + +if sys.platform != "win32" and sys.platform != "linux": + LOCAL_PEERCRED: Final[int] + +if sys.platform != "win32" and sys.platform != "darwin": + # Defined in linux socket.h, but this isn't always present for + # some reason. + IPX_TYPE: Final[int] + +# ===== Classes ===== + +@disjoint_base +class socket: + @property + def family(self) -> int: ... + @property + def type(self) -> int: ... + @property + def proto(self) -> int: ... + # F811: "Redefinition of unused `timeout`" + @property + def timeout(self) -> float | None: ... # noqa: F811 + if sys.platform == "win32": + def __init__( + self, family: int = ..., type: int = ..., proto: int = ..., fileno: SupportsIndex | bytes | None = None + ) -> None: ... + else: + def __init__(self, family: int = ..., type: int = ..., proto: int = ..., fileno: SupportsIndex | None = None) -> None: ... + + def bind(self, address: _Address, /) -> None: ... + def close(self) -> None: ... + def connect(self, address: _Address, /) -> None: ... + def connect_ex(self, address: _Address, /) -> int: ... + def detach(self) -> int: ... + def fileno(self) -> int: ... + def getpeername(self) -> _RetAddress: ... + def getsockname(self) -> _RetAddress: ... + @overload + def getsockopt(self, level: int, optname: int, /) -> int: ... + @overload + def getsockopt(self, level: int, optname: int, buflen: int, /) -> bytes: ... + def getblocking(self) -> bool: ... + def gettimeout(self) -> float | None: ... + if sys.platform == "win32": + def ioctl(self, control: int, option: int | tuple[int, int, int] | bool, /) -> None: ... + + def listen(self, backlog: int = ..., /) -> None: ... + def recv(self, bufsize: int, flags: int = 0, /) -> bytes: ... + def recvfrom(self, bufsize: int, flags: int = 0, /) -> tuple[bytes, _RetAddress]: ... + if sys.platform != "win32": + def recvmsg(self, bufsize: int, ancbufsize: int = 0, flags: int = 0, /) -> tuple[bytes, list[_CMSG], int, Any]: ... + def recvmsg_into( + self, buffers: Iterable[WriteableBuffer], ancbufsize: int = 0, flags: int = 0, / + ) -> tuple[int, list[_CMSG], int, Any]: ... + + def recvfrom_into(self, buffer: WriteableBuffer, nbytes: int = 0, flags: int = 0) -> tuple[int, _RetAddress]: ... + def recv_into(self, buffer: WriteableBuffer, nbytes: int = 0, flags: int = 0) -> int: ... + def send(self, data: ReadableBuffer, flags: int = 0, /) -> int: ... + def sendall(self, data: ReadableBuffer, flags: int = 0, /) -> None: ... + @overload + def sendto(self, data: ReadableBuffer, address: _Address, /) -> int: ... + @overload + def sendto(self, data: ReadableBuffer, flags: int, address: _Address, /) -> int: ... + if sys.platform != "win32": + def sendmsg( + self, + buffers: Iterable[ReadableBuffer], + ancdata: Iterable[_CMSGArg] = ..., + flags: int = 0, + address: _Address | None = None, + /, + ) -> int: ... + if sys.platform == "linux": + def sendmsg_afalg( + self, msg: Iterable[ReadableBuffer] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = 0 + ) -> int: ... + + def setblocking(self, flag: bool, /) -> None: ... + def settimeout(self, value: float | None, /) -> None: ... + @overload + def setsockopt(self, level: int, optname: int, value: int | ReadableBuffer, /) -> None: ... + @overload + def setsockopt(self, level: int, optname: int, value: None, optlen: int, /) -> None: ... + if sys.platform == "win32": + def share(self, process_id: int, /) -> bytes: ... + + def shutdown(self, how: int, /) -> None: ... + +SocketType = socket + +# ===== Functions ===== + +def close(fd: SupportsIndex, /) -> None: ... +def dup(fd: SupportsIndex, /) -> int: ... + +# the 5th tuple item is an address +def getaddrinfo( + host: bytes | str | None, port: bytes | str | int | None, family: int = ..., type: int = 0, proto: int = 0, flags: int = 0 +) -> list[tuple[int, int, int, str, tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes]]]: ... +def gethostbyname(hostname: str, /) -> str: ... +def gethostbyname_ex(hostname: str, /) -> tuple[str, list[str], list[str]]: ... +def gethostname() -> str: ... +def gethostbyaddr(ip_address: str, /) -> tuple[str, list[str], list[str]]: ... +def getnameinfo(sockaddr: tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], flags: int, /) -> tuple[str, str]: ... +def getprotobyname(protocolname: str, /) -> int: ... +def getservbyname(servicename: str, protocolname: str = ..., /) -> int: ... +def getservbyport(port: int, protocolname: str = ..., /) -> str: ... +def ntohl(x: int, /) -> int: ... # param & ret val are 32-bit ints +def ntohs(x: int, /) -> int: ... # param & ret val are 16-bit ints +def htonl(x: int, /) -> int: ... # param & ret val are 32-bit ints +def htons(x: int, /) -> int: ... # param & ret val are 16-bit ints +def inet_aton(ip_addr: str, /) -> bytes: ... # ret val 4 bytes in length +def inet_ntoa(packed_ip: ReadableBuffer, /) -> str: ... +def inet_pton(address_family: int, ip_string: str, /) -> bytes: ... +def inet_ntop(address_family: int, packed_ip: ReadableBuffer, /) -> str: ... +def getdefaulttimeout() -> float | None: ... + +# F811: "Redefinition of unused `timeout`" +def setdefaulttimeout(timeout: float | None, /) -> None: ... # noqa: F811 + +if sys.platform != "win32": + def sethostname(name: str, /) -> None: ... + def CMSG_LEN(length: int, /) -> int: ... + def CMSG_SPACE(length: int, /) -> int: ... + def socketpair(family: int = ..., type: int = ..., proto: int = 0, /) -> tuple[socket, socket]: ... + +def if_nameindex() -> list[tuple[int, str]]: ... +def if_nametoindex(oname: str, /) -> int: ... + +if sys.version_info >= (3, 14): + def if_indextoname(if_index: int, /) -> str: ... + +else: + def if_indextoname(index: int, /) -> str: ... + +CAPI: CapsuleType diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_sqlite3.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_sqlite3.pyi new file mode 100644 index 0000000..50006dc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_sqlite3.pyi @@ -0,0 +1,313 @@ +import sys +from _typeshed import ReadableBuffer, StrOrBytesPath +from collections.abc import Callable +from sqlite3 import ( + Connection as Connection, + Cursor as Cursor, + DatabaseError as DatabaseError, + DataError as DataError, + Error as Error, + IntegrityError as IntegrityError, + InterfaceError as InterfaceError, + InternalError as InternalError, + NotSupportedError as NotSupportedError, + OperationalError as OperationalError, + PrepareProtocol as PrepareProtocol, + ProgrammingError as ProgrammingError, + Row as Row, + Warning as Warning, + _IsolationLevel, +) +from typing import Any, Final, Literal, TypeVar, overload +from typing_extensions import TypeAlias + +if sys.version_info >= (3, 11): + from sqlite3 import Blob as Blob + +_T = TypeVar("_T") +_ConnectionT = TypeVar("_ConnectionT", bound=Connection) +_SqliteData: TypeAlias = str | ReadableBuffer | int | float | None +_Adapter: TypeAlias = Callable[[_T], _SqliteData] +_Converter: TypeAlias = Callable[[bytes], Any] + +PARSE_COLNAMES: Final = 2 +PARSE_DECLTYPES: Final = 1 +SQLITE_ALTER_TABLE: Final = 26 +SQLITE_ANALYZE: Final = 28 +SQLITE_ATTACH: Final = 24 +SQLITE_CREATE_INDEX: Final = 1 +SQLITE_CREATE_TABLE: Final = 2 +SQLITE_CREATE_TEMP_INDEX: Final = 3 +SQLITE_CREATE_TEMP_TABLE: Final = 4 +SQLITE_CREATE_TEMP_TRIGGER: Final = 5 +SQLITE_CREATE_TEMP_VIEW: Final = 6 +SQLITE_CREATE_TRIGGER: Final = 7 +SQLITE_CREATE_VIEW: Final = 8 +SQLITE_CREATE_VTABLE: Final = 29 +SQLITE_DELETE: Final = 9 +SQLITE_DENY: Final = 1 +SQLITE_DETACH: Final = 25 +SQLITE_DONE: Final = 101 +SQLITE_DROP_INDEX: Final = 10 +SQLITE_DROP_TABLE: Final = 11 +SQLITE_DROP_TEMP_INDEX: Final = 12 +SQLITE_DROP_TEMP_TABLE: Final = 13 +SQLITE_DROP_TEMP_TRIGGER: Final = 14 +SQLITE_DROP_TEMP_VIEW: Final = 15 +SQLITE_DROP_TRIGGER: Final = 16 +SQLITE_DROP_VIEW: Final = 17 +SQLITE_DROP_VTABLE: Final = 30 +SQLITE_FUNCTION: Final = 31 +SQLITE_IGNORE: Final = 2 +SQLITE_INSERT: Final = 18 +SQLITE_OK: Final = 0 +SQLITE_PRAGMA: Final = 19 +SQLITE_READ: Final = 20 +SQLITE_RECURSIVE: Final = 33 +SQLITE_REINDEX: Final = 27 +SQLITE_SAVEPOINT: Final = 32 +SQLITE_SELECT: Final = 21 +SQLITE_TRANSACTION: Final = 22 +SQLITE_UPDATE: Final = 23 +adapters: dict[tuple[type[Any], type[Any]], _Adapter[Any]] +converters: dict[str, _Converter] +sqlite_version: str + +if sys.version_info < (3, 12): + version: str + +if sys.version_info >= (3, 12): + LEGACY_TRANSACTION_CONTROL: Final = -1 + SQLITE_DBCONFIG_DEFENSIVE: Final = 1010 + SQLITE_DBCONFIG_DQS_DDL: Final = 1014 + SQLITE_DBCONFIG_DQS_DML: Final = 1013 + SQLITE_DBCONFIG_ENABLE_FKEY: Final = 1002 + SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER: Final = 1004 + SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION: Final = 1005 + SQLITE_DBCONFIG_ENABLE_QPSG: Final = 1007 + SQLITE_DBCONFIG_ENABLE_TRIGGER: Final = 1003 + SQLITE_DBCONFIG_ENABLE_VIEW: Final = 1015 + SQLITE_DBCONFIG_LEGACY_ALTER_TABLE: Final = 1012 + SQLITE_DBCONFIG_LEGACY_FILE_FORMAT: Final = 1016 + SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE: Final = 1006 + SQLITE_DBCONFIG_RESET_DATABASE: Final = 1009 + SQLITE_DBCONFIG_TRIGGER_EQP: Final = 1008 + SQLITE_DBCONFIG_TRUSTED_SCHEMA: Final = 1017 + SQLITE_DBCONFIG_WRITABLE_SCHEMA: Final = 1011 + +if sys.version_info >= (3, 11): + SQLITE_ABORT: Final = 4 + SQLITE_ABORT_ROLLBACK: Final = 516 + SQLITE_AUTH: Final = 23 + SQLITE_AUTH_USER: Final = 279 + SQLITE_BUSY: Final = 5 + SQLITE_BUSY_RECOVERY: Final = 261 + SQLITE_BUSY_SNAPSHOT: Final = 517 + SQLITE_BUSY_TIMEOUT: Final = 773 + SQLITE_CANTOPEN: Final = 14 + SQLITE_CANTOPEN_CONVPATH: Final = 1038 + SQLITE_CANTOPEN_DIRTYWAL: Final = 1294 + SQLITE_CANTOPEN_FULLPATH: Final = 782 + SQLITE_CANTOPEN_ISDIR: Final = 526 + SQLITE_CANTOPEN_NOTEMPDIR: Final = 270 + SQLITE_CANTOPEN_SYMLINK: Final = 1550 + SQLITE_CONSTRAINT: Final = 19 + SQLITE_CONSTRAINT_CHECK: Final = 275 + SQLITE_CONSTRAINT_COMMITHOOK: Final = 531 + SQLITE_CONSTRAINT_FOREIGNKEY: Final = 787 + SQLITE_CONSTRAINT_FUNCTION: Final = 1043 + SQLITE_CONSTRAINT_NOTNULL: Final = 1299 + SQLITE_CONSTRAINT_PINNED: Final = 2835 + SQLITE_CONSTRAINT_PRIMARYKEY: Final = 1555 + SQLITE_CONSTRAINT_ROWID: Final = 2579 + SQLITE_CONSTRAINT_TRIGGER: Final = 1811 + SQLITE_CONSTRAINT_UNIQUE: Final = 2067 + SQLITE_CONSTRAINT_VTAB: Final = 2323 + SQLITE_CORRUPT: Final = 11 + SQLITE_CORRUPT_INDEX: Final = 779 + SQLITE_CORRUPT_SEQUENCE: Final = 523 + SQLITE_CORRUPT_VTAB: Final = 267 + SQLITE_EMPTY: Final = 16 + SQLITE_ERROR: Final = 1 + SQLITE_ERROR_MISSING_COLLSEQ: Final = 257 + SQLITE_ERROR_RETRY: Final = 513 + SQLITE_ERROR_SNAPSHOT: Final = 769 + SQLITE_FORMAT: Final = 24 + SQLITE_FULL: Final = 13 + SQLITE_INTERNAL: Final = 2 + SQLITE_INTERRUPT: Final = 9 + SQLITE_IOERR: Final = 10 + SQLITE_IOERR_ACCESS: Final = 3338 + SQLITE_IOERR_AUTH: Final = 7178 + SQLITE_IOERR_BEGIN_ATOMIC: Final = 7434 + SQLITE_IOERR_BLOCKED: Final = 2826 + SQLITE_IOERR_CHECKRESERVEDLOCK: Final = 3594 + SQLITE_IOERR_CLOSE: Final = 4106 + SQLITE_IOERR_COMMIT_ATOMIC: Final = 7690 + SQLITE_IOERR_CONVPATH: Final = 6666 + SQLITE_IOERR_CORRUPTFS: Final = 8458 + SQLITE_IOERR_DATA: Final = 8202 + SQLITE_IOERR_DELETE: Final = 2570 + SQLITE_IOERR_DELETE_NOENT: Final = 5898 + SQLITE_IOERR_DIR_CLOSE: Final = 4362 + SQLITE_IOERR_DIR_FSYNC: Final = 1290 + SQLITE_IOERR_FSTAT: Final = 1802 + SQLITE_IOERR_FSYNC: Final = 1034 + SQLITE_IOERR_GETTEMPPATH: Final = 6410 + SQLITE_IOERR_LOCK: Final = 3850 + SQLITE_IOERR_MMAP: Final = 6154 + SQLITE_IOERR_NOMEM: Final = 3082 + SQLITE_IOERR_RDLOCK: Final = 2314 + SQLITE_IOERR_READ: Final = 266 + SQLITE_IOERR_ROLLBACK_ATOMIC: Final = 7946 + SQLITE_IOERR_SEEK: Final = 5642 + SQLITE_IOERR_SHMLOCK: Final = 5130 + SQLITE_IOERR_SHMMAP: Final = 5386 + SQLITE_IOERR_SHMOPEN: Final = 4618 + SQLITE_IOERR_SHMSIZE: Final = 4874 + SQLITE_IOERR_SHORT_READ: Final = 522 + SQLITE_IOERR_TRUNCATE: Final = 1546 + SQLITE_IOERR_UNLOCK: Final = 2058 + SQLITE_IOERR_VNODE: Final = 6922 + SQLITE_IOERR_WRITE: Final = 778 + SQLITE_LIMIT_ATTACHED: Final = 7 + SQLITE_LIMIT_COLUMN: Final = 22 + SQLITE_LIMIT_COMPOUND_SELECT: Final = 4 + SQLITE_LIMIT_EXPR_DEPTH: Final = 3 + SQLITE_LIMIT_FUNCTION_ARG: Final = 6 + SQLITE_LIMIT_LENGTH: Final = 0 + SQLITE_LIMIT_LIKE_PATTERN_LENGTH: Final = 8 + SQLITE_LIMIT_SQL_LENGTH: Final = 1 + SQLITE_LIMIT_TRIGGER_DEPTH: Final = 10 + SQLITE_LIMIT_VARIABLE_NUMBER: Final = 9 + SQLITE_LIMIT_VDBE_OP: Final = 5 + SQLITE_LIMIT_WORKER_THREADS: Final = 11 + SQLITE_LOCKED: Final = 6 + SQLITE_LOCKED_SHAREDCACHE: Final = 262 + SQLITE_LOCKED_VTAB: Final = 518 + SQLITE_MISMATCH: Final = 20 + SQLITE_MISUSE: Final = 21 + SQLITE_NOLFS: Final = 22 + SQLITE_NOMEM: Final = 7 + SQLITE_NOTADB: Final = 26 + SQLITE_NOTFOUND: Final = 12 + SQLITE_NOTICE: Final = 27 + SQLITE_NOTICE_RECOVER_ROLLBACK: Final = 539 + SQLITE_NOTICE_RECOVER_WAL: Final = 283 + SQLITE_OK_LOAD_PERMANENTLY: Final = 256 + SQLITE_OK_SYMLINK: Final = 512 + SQLITE_PERM: Final = 3 + SQLITE_PROTOCOL: Final = 15 + SQLITE_RANGE: Final = 25 + SQLITE_READONLY: Final = 8 + SQLITE_READONLY_CANTINIT: Final = 1288 + SQLITE_READONLY_CANTLOCK: Final = 520 + SQLITE_READONLY_DBMOVED: Final = 1032 + SQLITE_READONLY_DIRECTORY: Final = 1544 + SQLITE_READONLY_RECOVERY: Final = 264 + SQLITE_READONLY_ROLLBACK: Final = 776 + SQLITE_ROW: Final = 100 + SQLITE_SCHEMA: Final = 17 + SQLITE_TOOBIG: Final = 18 + SQLITE_WARNING: Final = 28 + SQLITE_WARNING_AUTOINDEX: Final = 284 + threadsafety: Literal[0, 1, 3] + +# Can take or return anything depending on what's in the registry. +@overload +def adapt(obj: Any, proto: Any, /) -> Any: ... +@overload +def adapt(obj: Any, proto: Any, alt: _T, /) -> Any | _T: ... +def complete_statement(statement: str) -> bool: ... + +if sys.version_info >= (3, 12): + @overload + def connect( + database: StrOrBytesPath, + timeout: float = 5.0, + detect_types: int = 0, + isolation_level: _IsolationLevel = "DEFERRED", + check_same_thread: bool = True, + cached_statements: int = 128, + uri: bool = False, + *, + autocommit: bool = ..., + ) -> Connection: ... + @overload + def connect( + database: StrOrBytesPath, + timeout: float, + detect_types: int, + isolation_level: _IsolationLevel, + check_same_thread: bool, + factory: type[_ConnectionT], + cached_statements: int = 128, + uri: bool = False, + *, + autocommit: bool = ..., + ) -> _ConnectionT: ... + @overload + def connect( + database: StrOrBytesPath, + timeout: float = 5.0, + detect_types: int = 0, + isolation_level: _IsolationLevel = "DEFERRED", + check_same_thread: bool = True, + *, + factory: type[_ConnectionT], + cached_statements: int = 128, + uri: bool = False, + autocommit: bool = ..., + ) -> _ConnectionT: ... + +else: + @overload + def connect( + database: StrOrBytesPath, + timeout: float = 5.0, + detect_types: int = 0, + isolation_level: _IsolationLevel = "DEFERRED", + check_same_thread: bool = True, + cached_statements: int = 128, + uri: bool = False, + ) -> Connection: ... + @overload + def connect( + database: StrOrBytesPath, + timeout: float, + detect_types: int, + isolation_level: _IsolationLevel, + check_same_thread: bool, + factory: type[_ConnectionT], + cached_statements: int = 128, + uri: bool = False, + ) -> _ConnectionT: ... + @overload + def connect( + database: StrOrBytesPath, + timeout: float = 5.0, + detect_types: int = 0, + isolation_level: _IsolationLevel = "DEFERRED", + check_same_thread: bool = True, + *, + factory: type[_ConnectionT], + cached_statements: int = 128, + uri: bool = False, + ) -> _ConnectionT: ... + +def enable_callback_tracebacks(enable: bool, /) -> None: ... + +if sys.version_info < (3, 12): + # takes a pos-or-keyword argument because there is a C wrapper + def enable_shared_cache(do_enable: int) -> None: ... + +if sys.version_info >= (3, 10): + def register_adapter(type: type[_T], adapter: _Adapter[_T], /) -> None: ... + def register_converter(typename: str, converter: _Converter, /) -> None: ... + +else: + def register_adapter(type: type[_T], caster: _Adapter[_T], /) -> None: ... + def register_converter(name: str, converter: _Converter, /) -> None: ... + +if sys.version_info < (3, 10): + OptimizedUnicode = str diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_ssl.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_ssl.pyi new file mode 100644 index 0000000..73a43f2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_ssl.pyi @@ -0,0 +1,295 @@ +import sys +from _typeshed import ReadableBuffer, StrOrBytesPath +from collections.abc import Callable +from ssl import ( + SSLCertVerificationError as SSLCertVerificationError, + SSLContext, + SSLEOFError as SSLEOFError, + SSLError as SSLError, + SSLObject, + SSLSyscallError as SSLSyscallError, + SSLWantReadError as SSLWantReadError, + SSLWantWriteError as SSLWantWriteError, + SSLZeroReturnError as SSLZeroReturnError, +) +from typing import Any, ClassVar, Final, Literal, TypedDict, final, overload, type_check_only +from typing_extensions import NotRequired, Self, TypeAlias, deprecated, disjoint_base + +_PasswordType: TypeAlias = Callable[[], str | bytes | bytearray] | str | bytes | bytearray +_PCTRTT: TypeAlias = tuple[tuple[str, str], ...] +_PCTRTTT: TypeAlias = tuple[_PCTRTT, ...] +_PeerCertRetDictType: TypeAlias = dict[str, str | _PCTRTTT | _PCTRTT] + +@type_check_only +class _Cipher(TypedDict): + aead: bool + alg_bits: int + auth: str + description: str + digest: str | None + id: int + kea: str + name: str + protocol: str + strength_bits: int + symmetric: str + +@type_check_only +class _CertInfo(TypedDict): + subject: tuple[tuple[tuple[str, str], ...], ...] + issuer: tuple[tuple[tuple[str, str], ...], ...] + version: int + serialNumber: str + notBefore: str + notAfter: str + subjectAltName: NotRequired[tuple[tuple[str, str], ...] | None] + OCSP: NotRequired[tuple[str, ...] | None] + caIssuers: NotRequired[tuple[str, ...] | None] + crlDistributionPoints: NotRequired[tuple[str, ...] | None] + +def RAND_add(string: str | ReadableBuffer, entropy: float, /) -> None: ... +def RAND_bytes(n: int, /) -> bytes: ... + +if sys.version_info < (3, 12): + @deprecated("Deprecated since Python 3.6; removed in Python 3.12. Use `ssl.RAND_bytes()` instead.") + def RAND_pseudo_bytes(n: int, /) -> tuple[bytes, bool]: ... + +if sys.version_info < (3, 10): + def RAND_egd(path: str) -> None: ... + +def RAND_status() -> bool: ... +def get_default_verify_paths() -> tuple[str, str, str, str]: ... + +if sys.platform == "win32": + _EnumRetType: TypeAlias = list[tuple[bytes, str, set[str] | bool]] + def enum_certificates(store_name: str) -> _EnumRetType: ... + def enum_crls(store_name: str) -> _EnumRetType: ... + +def txt2obj(txt: str, name: bool = False) -> tuple[int, str, str, str]: ... +def nid2obj(nid: int, /) -> tuple[int, str, str, str]: ... +@disjoint_base +class _SSLContext: + check_hostname: bool + keylog_filename: str | None + maximum_version: int + minimum_version: int + num_tickets: int + options: int + post_handshake_auth: bool + protocol: int + if sys.version_info >= (3, 10): + security_level: int + sni_callback: Callable[[SSLObject, str, SSLContext], None | int] | None + verify_flags: int + verify_mode: int + def __new__(cls, protocol: int, /) -> Self: ... + def cert_store_stats(self) -> dict[str, int]: ... + @overload + def get_ca_certs(self, binary_form: Literal[False] = False) -> list[_PeerCertRetDictType]: ... + @overload + def get_ca_certs(self, binary_form: Literal[True]) -> list[bytes]: ... + @overload + def get_ca_certs(self, binary_form: bool = False) -> Any: ... + def get_ciphers(self) -> list[_Cipher]: ... + def load_cert_chain( + self, certfile: StrOrBytesPath, keyfile: StrOrBytesPath | None = None, password: _PasswordType | None = None + ) -> None: ... + def load_dh_params(self, path: str, /) -> None: ... + def load_verify_locations( + self, + cafile: StrOrBytesPath | None = None, + capath: StrOrBytesPath | None = None, + cadata: str | ReadableBuffer | None = None, + ) -> None: ... + def session_stats(self) -> dict[str, int]: ... + def set_ciphers(self, cipherlist: str, /) -> None: ... + def set_default_verify_paths(self) -> None: ... + def set_ecdh_curve(self, name: str, /) -> None: ... + if sys.version_info >= (3, 13): + def set_psk_client_callback(self, callback: Callable[[str | None], tuple[str | None, bytes]] | None) -> None: ... + def set_psk_server_callback( + self, callback: Callable[[str | None], bytes] | None, identity_hint: str | None = None + ) -> None: ... + +@final +class MemoryBIO: + eof: bool + pending: int + def __new__(self) -> Self: ... + def read(self, size: int = -1, /) -> bytes: ... + def write(self, b: ReadableBuffer, /) -> int: ... + def write_eof(self) -> None: ... + +@final +class SSLSession: + __hash__: ClassVar[None] # type: ignore[assignment] + @property + def has_ticket(self) -> bool: ... + @property + def id(self) -> bytes: ... + @property + def ticket_lifetime_hint(self) -> int: ... + @property + def time(self) -> int: ... + @property + def timeout(self) -> int: ... + +# _ssl.Certificate is weird: it can't be instantiated or subclassed. +# Instances can only be created via methods of the private _ssl._SSLSocket class, +# for which the relevant method signatures are: +# +# class _SSLSocket: +# def get_unverified_chain(self) -> list[Certificate] | None: ... +# def get_verified_chain(self) -> list[Certificate] | None: ... +# +# You can find a _ssl._SSLSocket object as the _sslobj attribute of a ssl.SSLSocket object + +if sys.version_info >= (3, 10): + @final + class Certificate: + def get_info(self) -> _CertInfo: ... + @overload + def public_bytes(self) -> str: ... + @overload + def public_bytes(self, format: Literal[1] = 1, /) -> str: ... # ENCODING_PEM + @overload + def public_bytes(self, format: Literal[2], /) -> bytes: ... # ENCODING_DER + @overload + def public_bytes(self, format: int, /) -> str | bytes: ... + +if sys.version_info < (3, 12): + err_codes_to_names: dict[tuple[int, int], str] + err_names_to_codes: dict[str, tuple[int, int]] + lib_codes_to_names: dict[int, str] + +_DEFAULT_CIPHERS: Final[str] + +# SSL error numbers +SSL_ERROR_ZERO_RETURN: Final = 6 +SSL_ERROR_WANT_READ: Final = 2 +SSL_ERROR_WANT_WRITE: Final = 3 +SSL_ERROR_WANT_X509_LOOKUP: Final = 4 +SSL_ERROR_SYSCALL: Final = 5 +SSL_ERROR_SSL: Final = 1 +SSL_ERROR_WANT_CONNECT: Final = 7 +SSL_ERROR_EOF: Final = 8 +SSL_ERROR_INVALID_ERROR_CODE: Final = 10 + +# verify modes +CERT_NONE: Final = 0 +CERT_OPTIONAL: Final = 1 +CERT_REQUIRED: Final = 2 + +# verify flags +VERIFY_DEFAULT: Final = 0 +VERIFY_CRL_CHECK_LEAF: Final = 0x4 +VERIFY_CRL_CHECK_CHAIN: Final = 0x8 +VERIFY_X509_STRICT: Final = 0x20 +VERIFY_X509_TRUSTED_FIRST: Final = 0x8000 +if sys.version_info >= (3, 10): + VERIFY_ALLOW_PROXY_CERTS: Final = 0x40 + VERIFY_X509_PARTIAL_CHAIN: Final = 0x80000 + +# alert descriptions +ALERT_DESCRIPTION_CLOSE_NOTIFY: Final = 0 +ALERT_DESCRIPTION_UNEXPECTED_MESSAGE: Final = 10 +ALERT_DESCRIPTION_BAD_RECORD_MAC: Final = 20 +ALERT_DESCRIPTION_RECORD_OVERFLOW: Final = 22 +ALERT_DESCRIPTION_DECOMPRESSION_FAILURE: Final = 30 +ALERT_DESCRIPTION_HANDSHAKE_FAILURE: Final = 40 +ALERT_DESCRIPTION_BAD_CERTIFICATE: Final = 42 +ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE: Final = 43 +ALERT_DESCRIPTION_CERTIFICATE_REVOKED: Final = 44 +ALERT_DESCRIPTION_CERTIFICATE_EXPIRED: Final = 45 +ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN: Final = 46 +ALERT_DESCRIPTION_ILLEGAL_PARAMETER: Final = 47 +ALERT_DESCRIPTION_UNKNOWN_CA: Final = 48 +ALERT_DESCRIPTION_ACCESS_DENIED: Final = 49 +ALERT_DESCRIPTION_DECODE_ERROR: Final = 50 +ALERT_DESCRIPTION_DECRYPT_ERROR: Final = 51 +ALERT_DESCRIPTION_PROTOCOL_VERSION: Final = 70 +ALERT_DESCRIPTION_INSUFFICIENT_SECURITY: Final = 71 +ALERT_DESCRIPTION_INTERNAL_ERROR: Final = 80 +ALERT_DESCRIPTION_USER_CANCELLED: Final = 90 +ALERT_DESCRIPTION_NO_RENEGOTIATION: Final = 100 +ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION: Final = 110 +ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE: Final = 111 +ALERT_DESCRIPTION_UNRECOGNIZED_NAME: Final = 112 +ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE: Final = 113 +ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE: Final = 114 +ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY: Final = 115 + +# protocol versions +PROTOCOL_SSLv23: Final = 2 +PROTOCOL_TLS: Final = 2 +PROTOCOL_TLS_CLIENT: Final = 16 +PROTOCOL_TLS_SERVER: Final = 17 +PROTOCOL_TLSv1: Final = 3 +PROTOCOL_TLSv1_1: Final = 4 +PROTOCOL_TLSv1_2: Final = 5 + +# protocol options +OP_ALL: Final = 0x80000050 +OP_NO_SSLv2: Final = 0x0 +OP_NO_SSLv3: Final = 0x2000000 +OP_NO_TLSv1: Final = 0x4000000 +OP_NO_TLSv1_1: Final = 0x10000000 +OP_NO_TLSv1_2: Final = 0x8000000 +OP_NO_TLSv1_3: Final = 0x20000000 +OP_CIPHER_SERVER_PREFERENCE: Final = 0x400000 +OP_SINGLE_DH_USE: Final = 0x0 +OP_NO_TICKET: Final = 0x4000 +OP_SINGLE_ECDH_USE: Final = 0x0 +OP_NO_COMPRESSION: Final = 0x20000 +OP_ENABLE_MIDDLEBOX_COMPAT: Final = 0x100000 +OP_NO_RENEGOTIATION: Final = 0x40000000 +if sys.version_info >= (3, 11) or sys.platform == "linux": + OP_IGNORE_UNEXPECTED_EOF: Final = 0x80 +if sys.version_info >= (3, 12): + OP_LEGACY_SERVER_CONNECT: Final = 0x4 + OP_ENABLE_KTLS: Final = 0x8 + +# host flags +HOSTFLAG_ALWAYS_CHECK_SUBJECT: Final = 0x1 +HOSTFLAG_NEVER_CHECK_SUBJECT: Final = 0x20 +HOSTFLAG_NO_WILDCARDS: Final = 0x2 +HOSTFLAG_NO_PARTIAL_WILDCARDS: Final = 0x4 +HOSTFLAG_MULTI_LABEL_WILDCARDS: Final = 0x8 +HOSTFLAG_SINGLE_LABEL_SUBDOMAINS: Final = 0x10 + +if sys.version_info >= (3, 10): + # certificate file types + ENCODING_PEM: Final = 1 + ENCODING_DER: Final = 2 + +# protocol versions +PROTO_MINIMUM_SUPPORTED: Final = -2 +PROTO_MAXIMUM_SUPPORTED: Final = -1 +PROTO_SSLv3: Final[int] +PROTO_TLSv1: Final[int] +PROTO_TLSv1_1: Final[int] +PROTO_TLSv1_2: Final[int] +PROTO_TLSv1_3: Final[int] + +# feature support +HAS_SNI: Final[bool] +HAS_TLS_UNIQUE: Final[bool] +HAS_ECDH: Final[bool] +HAS_NPN: Final[bool] +if sys.version_info >= (3, 13): + HAS_PSK: Final[bool] +HAS_ALPN: Final[bool] +HAS_SSLv2: Final[bool] +HAS_SSLv3: Final[bool] +HAS_TLSv1: Final[bool] +HAS_TLSv1_1: Final[bool] +HAS_TLSv1_2: Final[bool] +HAS_TLSv1_3: Final[bool] +if sys.version_info >= (3, 14): + HAS_PHA: Final[bool] + +# version info +OPENSSL_VERSION_NUMBER: Final[int] +OPENSSL_VERSION_INFO: Final[tuple[int, int, int, int, int]] +OPENSSL_VERSION: Final[str] +_OPENSSL_API_VERSION: Final[tuple[int, int, int, int, int]] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_stat.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_stat.pyi new file mode 100644 index 0000000..7129a28 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_stat.pyi @@ -0,0 +1,119 @@ +import sys +from typing import Final + +SF_APPEND: Final = 0x00040000 +SF_ARCHIVED: Final = 0x00010000 +SF_IMMUTABLE: Final = 0x00020000 +SF_NOUNLINK: Final = 0x00100000 +SF_SNAPSHOT: Final = 0x00200000 + +ST_MODE: Final = 0 +ST_INO: Final = 1 +ST_DEV: Final = 2 +ST_NLINK: Final = 3 +ST_UID: Final = 4 +ST_GID: Final = 5 +ST_SIZE: Final = 6 +ST_ATIME: Final = 7 +ST_MTIME: Final = 8 +ST_CTIME: Final = 9 + +S_IFIFO: Final = 0o010000 +S_IFLNK: Final = 0o120000 +S_IFREG: Final = 0o100000 +S_IFSOCK: Final = 0o140000 +S_IFBLK: Final = 0o060000 +S_IFCHR: Final = 0o020000 +S_IFDIR: Final = 0o040000 + +# These are 0 on systems that don't support the specific kind of file. +# Example: Linux doesn't support door files, so S_IFDOOR is 0 on linux. +S_IFDOOR: Final[int] +S_IFPORT: Final[int] +S_IFWHT: Final[int] + +S_ISUID: Final = 0o4000 +S_ISGID: Final = 0o2000 +S_ISVTX: Final = 0o1000 + +S_IRWXU: Final = 0o0700 +S_IRUSR: Final = 0o0400 +S_IWUSR: Final = 0o0200 +S_IXUSR: Final = 0o0100 + +S_IRWXG: Final = 0o0070 +S_IRGRP: Final = 0o0040 +S_IWGRP: Final = 0o0020 +S_IXGRP: Final = 0o0010 + +S_IRWXO: Final = 0o0007 +S_IROTH: Final = 0o0004 +S_IWOTH: Final = 0o0002 +S_IXOTH: Final = 0o0001 + +S_ENFMT: Final = 0o2000 +S_IREAD: Final = 0o0400 +S_IWRITE: Final = 0o0200 +S_IEXEC: Final = 0o0100 + +UF_APPEND: Final = 0x00000004 +UF_COMPRESSED: Final = 0x00000020 # OS X 10.6+ only +UF_HIDDEN: Final = 0x00008000 # OX X 10.5+ only +UF_IMMUTABLE: Final = 0x00000002 +UF_NODUMP: Final = 0x00000001 +UF_NOUNLINK: Final = 0x00000010 +UF_OPAQUE: Final = 0x00000008 + +def S_IMODE(mode: int, /) -> int: ... +def S_IFMT(mode: int, /) -> int: ... +def S_ISBLK(mode: int, /) -> bool: ... +def S_ISCHR(mode: int, /) -> bool: ... +def S_ISDIR(mode: int, /) -> bool: ... +def S_ISDOOR(mode: int, /) -> bool: ... +def S_ISFIFO(mode: int, /) -> bool: ... +def S_ISLNK(mode: int, /) -> bool: ... +def S_ISPORT(mode: int, /) -> bool: ... +def S_ISREG(mode: int, /) -> bool: ... +def S_ISSOCK(mode: int, /) -> bool: ... +def S_ISWHT(mode: int, /) -> bool: ... +def filemode(mode: int, /) -> str: ... + +if sys.platform == "win32": + IO_REPARSE_TAG_SYMLINK: Final = 0xA000000C + IO_REPARSE_TAG_MOUNT_POINT: Final = 0xA0000003 + IO_REPARSE_TAG_APPEXECLINK: Final = 0x8000001B + +if sys.platform == "win32": + FILE_ATTRIBUTE_ARCHIVE: Final = 32 + FILE_ATTRIBUTE_COMPRESSED: Final = 2048 + FILE_ATTRIBUTE_DEVICE: Final = 64 + FILE_ATTRIBUTE_DIRECTORY: Final = 16 + FILE_ATTRIBUTE_ENCRYPTED: Final = 16384 + FILE_ATTRIBUTE_HIDDEN: Final = 2 + FILE_ATTRIBUTE_INTEGRITY_STREAM: Final = 32768 + FILE_ATTRIBUTE_NORMAL: Final = 128 + FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: Final = 8192 + FILE_ATTRIBUTE_NO_SCRUB_DATA: Final = 131072 + FILE_ATTRIBUTE_OFFLINE: Final = 4096 + FILE_ATTRIBUTE_READONLY: Final = 1 + FILE_ATTRIBUTE_REPARSE_POINT: Final = 1024 + FILE_ATTRIBUTE_SPARSE_FILE: Final = 512 + FILE_ATTRIBUTE_SYSTEM: Final = 4 + FILE_ATTRIBUTE_TEMPORARY: Final = 256 + FILE_ATTRIBUTE_VIRTUAL: Final = 65536 + +if sys.version_info >= (3, 13): + # Varies by platform. + SF_SETTABLE: Final[int] + # https://github.com/python/cpython/issues/114081#issuecomment-2119017790 + # SF_RESTRICTED: Literal[0x00080000] + SF_FIRMLINK: Final = 0x00800000 + SF_DATALESS: Final = 0x40000000 + + if sys.platform == "darwin": + SF_SUPPORTED: Final = 0x9F0000 + SF_SYNTHETIC: Final = 0xC0000000 + + UF_TRACKED: Final = 0x00000040 + UF_DATAVAULT: Final = 0x00000080 + UF_SETTABLE: Final = 0x0000FFFF diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_struct.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_struct.pyi new file mode 100644 index 0000000..a8fac2a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_struct.pyi @@ -0,0 +1,23 @@ +from _typeshed import ReadableBuffer, WriteableBuffer +from collections.abc import Iterator +from typing import Any +from typing_extensions import disjoint_base + +def pack(fmt: str | bytes, /, *v: Any) -> bytes: ... +def pack_into(fmt: str | bytes, buffer: WriteableBuffer, offset: int, /, *v: Any) -> None: ... +def unpack(format: str | bytes, buffer: ReadableBuffer, /) -> tuple[Any, ...]: ... +def unpack_from(format: str | bytes, /, buffer: ReadableBuffer, offset: int = 0) -> tuple[Any, ...]: ... +def iter_unpack(format: str | bytes, buffer: ReadableBuffer, /) -> Iterator[tuple[Any, ...]]: ... +def calcsize(format: str | bytes, /) -> int: ... +@disjoint_base +class Struct: + @property + def format(self) -> str: ... + @property + def size(self) -> int: ... + def __init__(self, format: str | bytes) -> None: ... + def pack(self, *v: Any) -> bytes: ... + def pack_into(self, buffer: WriteableBuffer, offset: int, *v: Any) -> None: ... + def unpack(self, buffer: ReadableBuffer, /) -> tuple[Any, ...]: ... + def unpack_from(self, buffer: ReadableBuffer, offset: int = 0) -> tuple[Any, ...]: ... + def iter_unpack(self, buffer: ReadableBuffer, /) -> Iterator[tuple[Any, ...]]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_thread.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_thread.pyi new file mode 100644 index 0000000..6969ae4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_thread.pyi @@ -0,0 +1,117 @@ +import signal +import sys +from _typeshed import structseq +from collections.abc import Callable +from threading import Thread +from types import TracebackType +from typing import Any, Final, NoReturn, final, overload +from typing_extensions import TypeVarTuple, Unpack, disjoint_base + +_Ts = TypeVarTuple("_Ts") + +error = RuntimeError + +def _count() -> int: ... +@final +class RLock: + def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... + def release(self) -> None: ... + __enter__ = acquire + def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + if sys.version_info >= (3, 14): + def locked(self) -> bool: ... + +if sys.version_info >= (3, 13): + @final + class _ThreadHandle: + ident: int + + def join(self, timeout: float | None = None, /) -> None: ... + def is_done(self) -> bool: ... + def _set_done(self) -> None: ... + + def start_joinable_thread( + function: Callable[[], object], handle: _ThreadHandle | None = None, daemon: bool = True + ) -> _ThreadHandle: ... + @final + class lock: + def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... + def release(self) -> None: ... + def locked(self) -> bool: ... + def acquire_lock(self, blocking: bool = True, timeout: float = -1) -> bool: ... + def release_lock(self) -> None: ... + def locked_lock(self) -> bool: ... + def __enter__(self) -> bool: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + + LockType = lock +else: + @final + class LockType: + def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... + def release(self) -> None: ... + def locked(self) -> bool: ... + def acquire_lock(self, blocking: bool = True, timeout: float = -1) -> bool: ... + def release_lock(self) -> None: ... + def locked_lock(self) -> bool: ... + def __enter__(self) -> bool: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + +@overload +def start_new_thread(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]], /) -> int: ... +@overload +def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any], /) -> int: ... + +# Obsolete synonym for start_new_thread() +@overload +def start_new(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]], /) -> int: ... +@overload +def start_new(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any], /) -> int: ... + +if sys.version_info >= (3, 10): + def interrupt_main(signum: signal.Signals = signal.SIGINT, /) -> None: ... + +else: + def interrupt_main() -> None: ... + +def exit() -> NoReturn: ... +def exit_thread() -> NoReturn: ... # Obsolete synonym for exit() +def allocate_lock() -> LockType: ... +def allocate() -> LockType: ... # Obsolete synonym for allocate_lock() +def get_ident() -> int: ... +def stack_size(size: int = 0, /) -> int: ... + +TIMEOUT_MAX: Final[float] + +def get_native_id() -> int: ... # only available on some platforms +@final +class _ExceptHookArgs(structseq[Any], tuple[type[BaseException], BaseException | None, TracebackType | None, Thread | None]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("exc_type", "exc_value", "exc_traceback", "thread") + + @property + def exc_type(self) -> type[BaseException]: ... + @property + def exc_value(self) -> BaseException | None: ... + @property + def exc_traceback(self) -> TracebackType | None: ... + @property + def thread(self) -> Thread | None: ... + +_excepthook: Callable[[_ExceptHookArgs], Any] + +if sys.version_info >= (3, 12): + def daemon_threads_allowed() -> bool: ... + +if sys.version_info >= (3, 14): + def set_name(name: str) -> None: ... + +@disjoint_base +class _local: + def __getattribute__(self, name: str, /) -> Any: ... + def __setattr__(self, name: str, value: Any, /) -> None: ... + def __delattr__(self, name: str, /) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_threading_local.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_threading_local.pyi new file mode 100644 index 0000000..5f6acaf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_threading_local.pyi @@ -0,0 +1,24 @@ +from threading import RLock +from typing import Any +from typing_extensions import Self, TypeAlias +from weakref import ReferenceType + +__all__ = ["local"] +_LocalDict: TypeAlias = dict[Any, Any] + +class _localimpl: + __slots__ = ("key", "dicts", "localargs", "locallock", "__weakref__") + key: str + dicts: dict[int, tuple[ReferenceType[Any], _LocalDict]] + # Keep localargs in sync with the *args, **kwargs annotation on local.__new__ + localargs: tuple[list[Any], dict[str, Any]] + locallock: RLock + def get_dict(self) -> _LocalDict: ... + def create_dict(self) -> _LocalDict: ... + +class local: + __slots__ = ("_local__impl", "__dict__") + def __new__(cls, /, *args: Any, **kw: Any) -> Self: ... + def __getattribute__(self, name: str) -> Any: ... + def __setattr__(self, name: str, value: Any) -> None: ... + def __delattr__(self, name: str) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_tkinter.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_tkinter.pyi new file mode 100644 index 0000000..a3868f4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_tkinter.pyi @@ -0,0 +1,144 @@ +import sys +from collections.abc import Callable +from typing import Any, ClassVar, Final, final +from typing_extensions import TypeAlias, deprecated + +# _tkinter is meant to be only used internally by tkinter, but some tkinter +# functions e.g. return _tkinter.Tcl_Obj objects. Tcl_Obj represents a Tcl +# object that hasn't been converted to a string. +# +# There are not many ways to get Tcl_Objs from tkinter, and I'm not sure if the +# only existing ways are supposed to return Tcl_Objs as opposed to returning +# strings. Here's one of these things that return Tcl_Objs: +# +# >>> import tkinter +# >>> text = tkinter.Text() +# >>> text.tag_add('foo', '1.0', 'end') +# >>> text.tag_ranges('foo') +# (, ) +@final +class Tcl_Obj: + @property + def string(self) -> str: ... + @property + def typename(self) -> str: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __eq__(self, value, /): ... + def __ge__(self, value, /): ... + def __gt__(self, value, /): ... + def __le__(self, value, /): ... + def __lt__(self, value, /): ... + def __ne__(self, value, /): ... + +class TclError(Exception): ... + +_TkinterTraceFunc: TypeAlias = Callable[[tuple[str, ...]], object] + +# This class allows running Tcl code. Tkinter uses it internally a lot, and +# it's often handy to drop a piece of Tcl code into a tkinter program. Example: +# +# >>> import tkinter, _tkinter +# >>> tkapp = tkinter.Tk().tk +# >>> isinstance(tkapp, _tkinter.TkappType) +# True +# >>> tkapp.call('set', 'foo', (1,2,3)) +# (1, 2, 3) +# >>> tkapp.eval('return $foo') +# '1 2 3' +# >>> +# +# call args can be pretty much anything. Also, call(some_tuple) is same as call(*some_tuple). +# +# eval always returns str because _tkinter_tkapp_eval_impl in _tkinter.c calls +# Tkapp_UnicodeResult, and it returns a string when it succeeds. +@final +class TkappType: + # Please keep in sync with tkinter.Tk + def adderrorinfo(self, msg: str, /): ... + def call(self, command: Any, /, *args: Any) -> Any: ... + def createcommand(self, name: str, func, /): ... + if sys.platform != "win32": + def createfilehandler(self, file, mask: int, func, /): ... + def deletefilehandler(self, file, /) -> None: ... + + def createtimerhandler(self, milliseconds: int, func, /): ... + def deletecommand(self, name: str, /): ... + def dooneevent(self, flags: int = 0, /): ... + def eval(self, script: str, /) -> str: ... + def evalfile(self, fileName: str, /): ... + def exprboolean(self, s: str, /): ... + def exprdouble(self, s: str, /): ... + def exprlong(self, s: str, /): ... + def exprstring(self, s: str, /): ... + def getboolean(self, arg, /) -> bool: ... + def getdouble(self, arg, /) -> float: ... + def getint(self, arg, /) -> int: ... + def getvar(self, *args, **kwargs): ... + def globalgetvar(self, *args, **kwargs): ... + def globalsetvar(self, *args, **kwargs): ... + def globalunsetvar(self, *args, **kwargs): ... + def interpaddr(self) -> int: ... + def loadtk(self) -> None: ... + def mainloop(self, threshold: int = 0, /) -> None: ... + def quit(self) -> None: ... + def record(self, script: str, /): ... + def setvar(self, *ags, **kwargs): ... + if sys.version_info < (3, 11): + @deprecated("Deprecated since Python 3.9; removed in Python 3.11. Use `splitlist()` instead.") + def split(self, arg, /): ... + + def splitlist(self, arg, /): ... + def unsetvar(self, *args, **kwargs): ... + def wantobjects(self, *args, **kwargs): ... + def willdispatch(self) -> None: ... + if sys.version_info >= (3, 12): + def gettrace(self, /) -> _TkinterTraceFunc | None: ... + def settrace(self, func: _TkinterTraceFunc | None, /) -> None: ... + +# These should be kept in sync with tkinter.tix constants, except ALL_EVENTS which doesn't match TCL_ALL_EVENTS +ALL_EVENTS: Final = -3 +FILE_EVENTS: Final = 8 +IDLE_EVENTS: Final = 32 +TIMER_EVENTS: Final = 16 +WINDOW_EVENTS: Final = 4 + +DONT_WAIT: Final = 2 +EXCEPTION: Final = 8 +READABLE: Final = 2 +WRITABLE: Final = 4 + +TCL_VERSION: Final[str] +TK_VERSION: Final[str] + +@final +class TkttType: + def deletetimerhandler(self): ... + +if sys.version_info >= (3, 13): + def create( + screenName: str | None = None, + baseName: str = "", + className: str = "Tk", + interactive: bool = False, + wantobjects: int = 0, + wantTk: bool = True, + sync: bool = False, + use: str | None = None, + /, + ): ... + +else: + def create( + screenName: str | None = None, + baseName: str = "", + className: str = "Tk", + interactive: bool = False, + wantobjects: bool = False, + wantTk: bool = True, + sync: bool = False, + use: str | None = None, + /, + ): ... + +def getbusywaitinterval() -> int: ... +def setbusywaitinterval(new_val: int, /) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_tracemalloc.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_tracemalloc.pyi new file mode 100644 index 0000000..e9720f4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_tracemalloc.pyi @@ -0,0 +1,13 @@ +from collections.abc import Sequence +from tracemalloc import _FrameTuple, _TraceTuple + +def _get_object_traceback(obj: object, /) -> Sequence[_FrameTuple] | None: ... +def _get_traces() -> Sequence[_TraceTuple]: ... +def clear_traces() -> None: ... +def get_traceback_limit() -> int: ... +def get_traced_memory() -> tuple[int, int]: ... +def get_tracemalloc_memory() -> int: ... +def is_tracing() -> bool: ... +def reset_peak() -> None: ... +def start(nframe: int = 1, /) -> None: ... +def stop() -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_typeshed/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_typeshed/__init__.pyi new file mode 100644 index 0000000..b786923 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_typeshed/__init__.pyi @@ -0,0 +1,393 @@ +# Utility types for typeshed +# +# See the README.md file in this directory for more information. + +import sys +from collections.abc import Awaitable, Callable, Iterable, Iterator, Sequence, Set as AbstractSet, Sized +from dataclasses import Field +from os import PathLike +from types import FrameType, TracebackType +from typing import ( + Any, + AnyStr, + ClassVar, + Final, + Generic, + Literal, + Protocol, + SupportsFloat, + SupportsIndex, + SupportsInt, + TypeVar, + final, + overload, +) +from typing_extensions import Buffer, LiteralString, Self as _Self, TypeAlias + +_KT = TypeVar("_KT") +_KT_co = TypeVar("_KT_co", covariant=True) +_KT_contra = TypeVar("_KT_contra", contravariant=True) +_VT = TypeVar("_VT") +_VT_co = TypeVar("_VT_co", covariant=True) +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) +_T_contra = TypeVar("_T_contra", contravariant=True) + +# Alternative to `typing_extensions.Self`, exclusively for use with `__new__` +# in metaclasses: +# def __new__(cls: type[Self], ...) -> Self: ... +# In other cases, use `typing_extensions.Self`. +Self = TypeVar("Self") # noqa: Y001 + +# covariant version of typing.AnyStr, useful for protocols +AnyStr_co = TypeVar("AnyStr_co", str, bytes, covariant=True) # noqa: Y001 + +# For partially known annotations. Usually, fields where type annotations +# haven't been added are left unannotated, but in some situations this +# isn't possible or a type is already partially known. In cases like these, +# use Incomplete instead of Any as a marker. For example, use +# "Incomplete | None" instead of "Any | None". +Incomplete: TypeAlias = Any # stable + +# To describe a function parameter that is unused and will work with anything. +Unused: TypeAlias = object # stable + +# Marker for return types that include None, but where forcing the user to +# check for None can be detrimental. Sometimes called "the Any trick". See +# https://typing.python.org/en/latest/guides/writing_stubs.html#the-any-trick +# for more information. +MaybeNone: TypeAlias = Any # stable + +# Used to mark arguments that default to a sentinel value. This prevents +# stubtest from complaining about the default value not matching. +# +# def foo(x: int | None = sentinel) -> None: ... +# +# In cases where the sentinel object is exported and can be used by user code, +# a construct like this is better: +# +# _SentinelType = NewType("_SentinelType", object) # does not exist at runtime +# sentinel: Final[_SentinelType] +# def foo(x: int | None | _SentinelType = ...) -> None: ... +sentinel: Any # stable + +# stable +class IdentityFunction(Protocol): + def __call__(self, x: _T, /) -> _T: ... + +# stable +class SupportsNext(Protocol[_T_co]): + def __next__(self) -> _T_co: ... + +# stable +class SupportsAnext(Protocol[_T_co]): + def __anext__(self) -> Awaitable[_T_co]: ... + +class SupportsBool(Protocol): + def __bool__(self) -> bool: ... + +# Comparison protocols +class SupportsDunderLT(Protocol[_T_contra]): + def __lt__(self, other: _T_contra, /) -> SupportsBool: ... + +class SupportsDunderGT(Protocol[_T_contra]): + def __gt__(self, other: _T_contra, /) -> SupportsBool: ... + +class SupportsDunderLE(Protocol[_T_contra]): + def __le__(self, other: _T_contra, /) -> SupportsBool: ... + +class SupportsDunderGE(Protocol[_T_contra]): + def __ge__(self, other: _T_contra, /) -> SupportsBool: ... + +class SupportsAllComparisons( + SupportsDunderLT[Any], SupportsDunderGT[Any], SupportsDunderLE[Any], SupportsDunderGE[Any], Protocol +): ... + +SupportsRichComparison: TypeAlias = SupportsDunderLT[Any] | SupportsDunderGT[Any] +SupportsRichComparisonT = TypeVar("SupportsRichComparisonT", bound=SupportsRichComparison) # noqa: Y001 + +# Dunder protocols + +class SupportsAdd(Protocol[_T_contra, _T_co]): + def __add__(self, x: _T_contra, /) -> _T_co: ... + +class SupportsRAdd(Protocol[_T_contra, _T_co]): + def __radd__(self, x: _T_contra, /) -> _T_co: ... + +class SupportsSub(Protocol[_T_contra, _T_co]): + def __sub__(self, x: _T_contra, /) -> _T_co: ... + +class SupportsRSub(Protocol[_T_contra, _T_co]): + def __rsub__(self, x: _T_contra, /) -> _T_co: ... + +class SupportsMul(Protocol[_T_contra, _T_co]): + def __mul__(self, x: _T_contra, /) -> _T_co: ... + +class SupportsRMul(Protocol[_T_contra, _T_co]): + def __rmul__(self, x: _T_contra, /) -> _T_co: ... + +class SupportsDivMod(Protocol[_T_contra, _T_co]): + def __divmod__(self, other: _T_contra, /) -> _T_co: ... + +class SupportsRDivMod(Protocol[_T_contra, _T_co]): + def __rdivmod__(self, other: _T_contra, /) -> _T_co: ... + +# This protocol is generic over the iterator type, while Iterable is +# generic over the type that is iterated over. +class SupportsIter(Protocol[_T_co]): + def __iter__(self) -> _T_co: ... + +# This protocol is generic over the iterator type, while AsyncIterable is +# generic over the type that is iterated over. +class SupportsAiter(Protocol[_T_co]): + def __aiter__(self) -> _T_co: ... + +class SupportsLen(Protocol): + def __len__(self) -> int: ... + +class SupportsLenAndGetItem(Protocol[_T_co]): + def __len__(self) -> int: ... + def __getitem__(self, k: int, /) -> _T_co: ... + +class SupportsTrunc(Protocol): + def __trunc__(self) -> int: ... + +# Mapping-like protocols + +# stable +class SupportsItems(Protocol[_KT_co, _VT_co]): + def items(self) -> AbstractSet[tuple[_KT_co, _VT_co]]: ... + +# stable +class SupportsKeysAndGetItem(Protocol[_KT, _VT_co]): + def keys(self) -> Iterable[_KT]: ... + def __getitem__(self, key: _KT, /) -> _VT_co: ... + +# stable +class SupportsGetItem(Protocol[_KT_contra, _VT_co]): + def __getitem__(self, key: _KT_contra, /) -> _VT_co: ... + +# stable +class SupportsContainsAndGetItem(Protocol[_KT_contra, _VT_co]): + def __contains__(self, x: Any, /) -> bool: ... + def __getitem__(self, key: _KT_contra, /) -> _VT_co: ... + +# stable +class SupportsItemAccess(Protocol[_KT_contra, _VT]): + def __contains__(self, x: Any, /) -> bool: ... + def __getitem__(self, key: _KT_contra, /) -> _VT: ... + def __setitem__(self, key: _KT_contra, value: _VT, /) -> None: ... + def __delitem__(self, key: _KT_contra, /) -> None: ... + +StrPath: TypeAlias = str | PathLike[str] # stable +BytesPath: TypeAlias = bytes | PathLike[bytes] # stable +GenericPath: TypeAlias = AnyStr | PathLike[AnyStr] +StrOrBytesPath: TypeAlias = str | bytes | PathLike[str] | PathLike[bytes] # stable + +OpenTextModeUpdating: TypeAlias = Literal[ + "r+", + "+r", + "rt+", + "r+t", + "+rt", + "tr+", + "t+r", + "+tr", + "w+", + "+w", + "wt+", + "w+t", + "+wt", + "tw+", + "t+w", + "+tw", + "a+", + "+a", + "at+", + "a+t", + "+at", + "ta+", + "t+a", + "+ta", + "x+", + "+x", + "xt+", + "x+t", + "+xt", + "tx+", + "t+x", + "+tx", +] +OpenTextModeWriting: TypeAlias = Literal["w", "wt", "tw", "a", "at", "ta", "x", "xt", "tx"] +OpenTextModeReading: TypeAlias = Literal["r", "rt", "tr", "U", "rU", "Ur", "rtU", "rUt", "Urt", "trU", "tUr", "Utr"] +OpenTextMode: TypeAlias = OpenTextModeUpdating | OpenTextModeWriting | OpenTextModeReading +OpenBinaryModeUpdating: TypeAlias = Literal[ + "rb+", + "r+b", + "+rb", + "br+", + "b+r", + "+br", + "wb+", + "w+b", + "+wb", + "bw+", + "b+w", + "+bw", + "ab+", + "a+b", + "+ab", + "ba+", + "b+a", + "+ba", + "xb+", + "x+b", + "+xb", + "bx+", + "b+x", + "+bx", +] +OpenBinaryModeWriting: TypeAlias = Literal["wb", "bw", "ab", "ba", "xb", "bx"] +OpenBinaryModeReading: TypeAlias = Literal["rb", "br", "rbU", "rUb", "Urb", "brU", "bUr", "Ubr"] +OpenBinaryMode: TypeAlias = OpenBinaryModeUpdating | OpenBinaryModeReading | OpenBinaryModeWriting + +# stable +class HasFileno(Protocol): + def fileno(self) -> int: ... + +FileDescriptor: TypeAlias = int # stable +FileDescriptorLike: TypeAlias = int | HasFileno # stable +FileDescriptorOrPath: TypeAlias = int | StrOrBytesPath + +# stable +class SupportsRead(Protocol[_T_co]): + def read(self, length: int = ..., /) -> _T_co: ... + +# stable +class SupportsReadline(Protocol[_T_co]): + def readline(self, length: int = ..., /) -> _T_co: ... + +# stable +class SupportsNoArgReadline(Protocol[_T_co]): + def readline(self) -> _T_co: ... + +# stable +class SupportsWrite(Protocol[_T_contra]): + def write(self, s: _T_contra, /) -> object: ... + +# stable +class SupportsFlush(Protocol): + def flush(self) -> object: ... + +# Suitable for dictionary view objects +class Viewable(Protocol[_T_co]): + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[_T_co]: ... + +class SupportsGetItemViewable(Protocol[_KT, _VT_co]): + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[_KT]: ... + def __getitem__(self, key: _KT, /) -> _VT_co: ... + +# Unfortunately PEP 688 does not allow us to distinguish read-only +# from writable buffers. We use these aliases for readability for now. +# Perhaps a future extension of the buffer protocol will allow us to +# distinguish these cases in the type system. +ReadOnlyBuffer: TypeAlias = Buffer # stable +# Anything that implements the read-write buffer interface. +WriteableBuffer: TypeAlias = Buffer +# Same as WriteableBuffer, but also includes read-only buffer types (like bytes). +ReadableBuffer: TypeAlias = Buffer # stable + +class SliceableBuffer(Buffer, Protocol): + def __getitem__(self, slice: slice, /) -> Sequence[int]: ... + +class IndexableBuffer(Buffer, Protocol): + def __getitem__(self, i: int, /) -> int: ... + +class SupportsGetItemBuffer(SliceableBuffer, IndexableBuffer, Protocol): + def __contains__(self, x: Any, /) -> bool: ... + @overload + def __getitem__(self, slice: slice, /) -> Sequence[int]: ... + @overload + def __getitem__(self, i: int, /) -> int: ... + +class SizedBuffer(Sized, Buffer, Protocol): ... + +ExcInfo: TypeAlias = tuple[type[BaseException], BaseException, TracebackType] +OptExcInfo: TypeAlias = ExcInfo | tuple[None, None, None] + +# stable +if sys.version_info >= (3, 10): + from types import NoneType as NoneType +else: + # Used by type checkers for checks involving None (does not exist at runtime) + @final + class NoneType: + def __bool__(self) -> Literal[False]: ... + +# This is an internal CPython type that is like, but subtly different from, a NamedTuple +# Subclasses of this type are found in multiple modules. +# In typeshed, `structseq` is only ever used as a mixin in combination with a fixed-length `Tuple` +# See discussion at #6546 & #6560 +# `structseq` classes are unsubclassable, so are all decorated with `@final`. +class structseq(Generic[_T_co]): + n_fields: Final[int] + n_unnamed_fields: Final[int] + n_sequence_fields: Final[int] + # The first parameter will generally only take an iterable of a specific length. + # E.g. `os.uname_result` takes any iterable of length exactly 5. + # + # The second parameter will accept a dict of any kind without raising an exception, + # but only has any meaning if you supply it a dict where the keys are strings. + # https://github.com/python/typeshed/pull/6560#discussion_r767149830 + def __new__(cls, sequence: Iterable[_T_co], dict: dict[str, Any] = ...) -> _Self: ... + if sys.version_info >= (3, 13): + def __replace__(self, **kwargs: Any) -> _Self: ... + +# Superset of typing.AnyStr that also includes LiteralString +AnyOrLiteralStr = TypeVar("AnyOrLiteralStr", str, bytes, LiteralString) # noqa: Y001 + +# Represents when str or LiteralStr is acceptable. Useful for string processing +# APIs where literalness of return value depends on literalness of inputs +StrOrLiteralStr = TypeVar("StrOrLiteralStr", LiteralString, str) # noqa: Y001 + +# Objects suitable to be passed to sys.setprofile, threading.setprofile, and similar +ProfileFunction: TypeAlias = Callable[[FrameType, str, Any], object] + +# Objects suitable to be passed to sys.settrace, threading.settrace, and similar +TraceFunction: TypeAlias = Callable[[FrameType, str, Any], TraceFunction | None] + +# experimental +# Might not work as expected for pyright, see +# https://github.com/python/typeshed/pull/9362 +# https://github.com/microsoft/pyright/issues/4339 +class DataclassInstance(Protocol): + __dataclass_fields__: ClassVar[dict[str, Field[Any]]] + +# Anything that can be passed to the int/float constructors +if sys.version_info >= (3, 14): + ConvertibleToInt: TypeAlias = str | ReadableBuffer | SupportsInt | SupportsIndex +else: + ConvertibleToInt: TypeAlias = str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc +ConvertibleToFloat: TypeAlias = str | ReadableBuffer | SupportsFloat | SupportsIndex + +# A few classes updated from Foo(str, Enum) to Foo(StrEnum). This is a convenience so these +# can be accurate on all python versions without getting too wordy +if sys.version_info >= (3, 11): + from enum import StrEnum as StrEnum +else: + from enum import Enum + + class StrEnum(str, Enum): ... + +# Objects that appear in annotations or in type expressions. +# Similar to PEP 747's TypeForm but a little broader. +AnnotationForm: TypeAlias = Any + +if sys.version_info >= (3, 14): + from annotationlib import Format + + # These return annotations, which can be arbitrary objects + AnnotateFunc: TypeAlias = Callable[[Format], dict[str, AnnotationForm]] + EvaluateFunc: TypeAlias = Callable[[Format], AnnotationForm] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_typeshed/_type_checker_internals.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_typeshed/_type_checker_internals.pyi new file mode 100644 index 0000000..feb22aa --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_typeshed/_type_checker_internals.pyi @@ -0,0 +1,89 @@ +# Internals used by some type checkers. +# +# Don't use this module directly. It is only for type checkers to use. + +import sys +import typing_extensions +from _collections_abc import dict_items, dict_keys, dict_values +from abc import ABCMeta +from collections.abc import Awaitable, Generator, Iterable, Mapping +from typing import Any, ClassVar, Generic, TypeVar, overload +from typing_extensions import Never + +_T = TypeVar("_T") + +# Used for an undocumented mypy feature. Does not exist at runtime. +promote = object() + +# Fallback type providing methods and attributes that appear on all `TypedDict` types. +# N.B. Keep this mostly in sync with typing_extensions._TypedDict/mypy_extensions._TypedDict +class TypedDictFallback(Mapping[str, object], metaclass=ABCMeta): + __total__: ClassVar[bool] + __required_keys__: ClassVar[frozenset[str]] + __optional_keys__: ClassVar[frozenset[str]] + # __orig_bases__ sometimes exists on <3.12, but not consistently, + # so we only add it to the stub on 3.12+ + if sys.version_info >= (3, 12): + __orig_bases__: ClassVar[tuple[Any, ...]] + if sys.version_info >= (3, 13): + __readonly_keys__: ClassVar[frozenset[str]] + __mutable_keys__: ClassVar[frozenset[str]] + + def copy(self) -> typing_extensions.Self: ... + # Using Never so that only calls using mypy plugin hook that specialize the signature + # can go through. + def setdefault(self, k: Never, default: object) -> object: ... + # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. + def pop(self, k: Never, default: _T = ...) -> object: ... # pyright: ignore[reportInvalidTypeVarUse] + def update(self, m: typing_extensions.Self, /) -> None: ... + def __delitem__(self, k: Never) -> None: ... + def items(self) -> dict_items[str, object]: ... + def keys(self) -> dict_keys[str, object]: ... + def values(self) -> dict_values[str, object]: ... + @overload + def __or__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: ... + @overload + def __or__(self, value: dict[str, Any], /) -> dict[str, object]: ... + @overload + def __ror__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: ... + @overload + def __ror__(self, value: dict[str, Any], /) -> dict[str, object]: ... + # supposedly incompatible definitions of __or__ and __ior__ + def __ior__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: ... # type: ignore[misc] + +# Fallback type providing methods and attributes that appear on all `NamedTuple` types. +class NamedTupleFallback(tuple[Any, ...]): + _field_defaults: ClassVar[dict[str, Any]] + _fields: ClassVar[tuple[str, ...]] + # __orig_bases__ sometimes exists on <3.12, but not consistently + # So we only add it to the stub on 3.12+. + if sys.version_info >= (3, 12): + __orig_bases__: ClassVar[tuple[Any, ...]] + + @overload + def __init__(self, typename: str, fields: Iterable[tuple[str, Any]], /) -> None: ... + @overload + @typing_extensions.deprecated( + "Creating a typing.NamedTuple using keyword arguments is deprecated and support will be removed in Python 3.15" + ) + def __init__(self, typename: str, fields: None = None, /, **kwargs: Any) -> None: ... + @classmethod + def _make(cls, iterable: Iterable[Any]) -> typing_extensions.Self: ... + def _asdict(self) -> dict[str, Any]: ... + def _replace(self, **kwargs: Any) -> typing_extensions.Self: ... + if sys.version_info >= (3, 13): + def __replace__(self, **kwargs: Any) -> typing_extensions.Self: ... + +# Non-default variations to accommodate couroutines, and `AwaitableGenerator` having a 4th type parameter. +_S = TypeVar("_S") +_YieldT_co = TypeVar("_YieldT_co", covariant=True) +_SendT_nd_contra = TypeVar("_SendT_nd_contra", contravariant=True) +_ReturnT_nd_co = TypeVar("_ReturnT_nd_co", covariant=True) + +# The parameters correspond to Generator, but the 4th is the original type. +class AwaitableGenerator( + Awaitable[_ReturnT_nd_co], + Generator[_YieldT_co, _SendT_nd_contra, _ReturnT_nd_co], + Generic[_YieldT_co, _SendT_nd_contra, _ReturnT_nd_co, _S], + metaclass=ABCMeta, +): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_typeshed/dbapi.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_typeshed/dbapi.pyi new file mode 100644 index 0000000..d54fbee --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_typeshed/dbapi.pyi @@ -0,0 +1,37 @@ +# PEP 249 Database API 2.0 Types +# https://www.python.org/dev/peps/pep-0249/ + +from collections.abc import Mapping, Sequence +from typing import Any, Protocol +from typing_extensions import TypeAlias + +DBAPITypeCode: TypeAlias = Any | None +# Strictly speaking, this should be a Sequence, but the type system does +# not support fixed-length sequences. +DBAPIColumnDescription: TypeAlias = tuple[str, DBAPITypeCode, int | None, int | None, int | None, int | None, bool | None] + +class DBAPIConnection(Protocol): + def close(self) -> object: ... + def commit(self) -> object: ... + # optional: + # def rollback(self) -> Any: ... + def cursor(self) -> DBAPICursor: ... + +class DBAPICursor(Protocol): + @property + def description(self) -> Sequence[DBAPIColumnDescription] | None: ... + @property + def rowcount(self) -> int: ... + # optional: + # def callproc(self, procname: str, parameters: Sequence[Any] = ..., /) -> Sequence[Any]: ... + def close(self) -> object: ... + def execute(self, operation: str, parameters: Sequence[Any] | Mapping[str, Any] = ..., /) -> object: ... + def executemany(self, operation: str, seq_of_parameters: Sequence[Sequence[Any]], /) -> object: ... + def fetchone(self) -> Sequence[Any] | None: ... + def fetchmany(self, size: int = ..., /) -> Sequence[Sequence[Any]]: ... + def fetchall(self) -> Sequence[Sequence[Any]]: ... + # optional: + # def nextset(self) -> None | Literal[True]: ... + arraysize: int + def setinputsizes(self, sizes: Sequence[DBAPITypeCode | int | None], /) -> object: ... + def setoutputsize(self, size: int, column: int = ..., /) -> object: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_typeshed/importlib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_typeshed/importlib.pyi new file mode 100644 index 0000000..a4e56cd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_typeshed/importlib.pyi @@ -0,0 +1,18 @@ +# Implicit protocols used in importlib. +# We intentionally omit deprecated and optional methods. + +from collections.abc import Sequence +from importlib.machinery import ModuleSpec +from types import ModuleType +from typing import Protocol + +__all__ = ["LoaderProtocol", "MetaPathFinderProtocol", "PathEntryFinderProtocol"] + +class LoaderProtocol(Protocol): + def load_module(self, fullname: str, /) -> ModuleType: ... + +class MetaPathFinderProtocol(Protocol): + def find_spec(self, fullname: str, path: Sequence[str] | None, target: ModuleType | None = ..., /) -> ModuleSpec | None: ... + +class PathEntryFinderProtocol(Protocol): + def find_spec(self, fullname: str, target: ModuleType | None = ..., /) -> ModuleSpec | None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_typeshed/wsgi.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_typeshed/wsgi.pyi new file mode 100644 index 0000000..63f204e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_typeshed/wsgi.pyi @@ -0,0 +1,44 @@ +# Types to support PEP 3333 (WSGI) +# +# Obsolete since Python 3.11: Use wsgiref.types instead. +# +# See the README.md file in this directory for more information. + +import sys +from _typeshed import OptExcInfo +from collections.abc import Callable, Iterable, Iterator +from typing import Any, Protocol +from typing_extensions import TypeAlias + +class _Readable(Protocol): + def read(self, size: int = ..., /) -> bytes: ... + # Optional: def close(self) -> object: ... + +if sys.version_info >= (3, 11): + from wsgiref.types import * +else: + # stable + class StartResponse(Protocol): + def __call__( + self, status: str, headers: list[tuple[str, str]], exc_info: OptExcInfo | None = ..., / + ) -> Callable[[bytes], object]: ... + + WSGIEnvironment: TypeAlias = dict[str, Any] # stable + WSGIApplication: TypeAlias = Callable[[WSGIEnvironment, StartResponse], Iterable[bytes]] # stable + + # WSGI input streams per PEP 3333, stable + class InputStream(Protocol): + def read(self, size: int = ..., /) -> bytes: ... + def readline(self, size: int = ..., /) -> bytes: ... + def readlines(self, hint: int = ..., /) -> list[bytes]: ... + def __iter__(self) -> Iterator[bytes]: ... + + # WSGI error streams per PEP 3333, stable + class ErrorStream(Protocol): + def flush(self) -> object: ... + def write(self, s: str, /) -> object: ... + def writelines(self, seq: list[str], /) -> object: ... + + # Optional file wrapper in wsgi.file_wrapper + class FileWrapper(Protocol): + def __call__(self, file: _Readable, block_size: int = ..., /) -> Iterable[bytes]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_typeshed/xml.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_typeshed/xml.pyi new file mode 100644 index 0000000..6cd1b39 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_typeshed/xml.pyi @@ -0,0 +1,9 @@ +# See the README.md file in this directory for more information. + +from typing import Any, Protocol + +# As defined https://docs.python.org/3/library/xml.dom.html#domimplementation-objects +class DOMImplementation(Protocol): + def hasFeature(self, feature: str, version: str | None, /) -> bool: ... + def createDocument(self, namespaceUri: str, qualifiedName: str, doctype: Any | None, /) -> Any: ... + def createDocumentType(self, qualifiedName: str, publicId: str, systemId: str, /) -> Any: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_warnings.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_warnings.pyi new file mode 100644 index 0000000..2dbc7b8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_warnings.pyi @@ -0,0 +1,55 @@ +import sys +from typing import Any, overload + +_defaultaction: str +_onceregistry: dict[Any, Any] +filters: list[tuple[str, str | None, type[Warning], str | None, int]] + +if sys.version_info >= (3, 12): + @overload + def warn( + message: str, + category: type[Warning] | None = None, + stacklevel: int = 1, + source: Any | None = None, + *, + skip_file_prefixes: tuple[str, ...] = (), + ) -> None: ... + @overload + def warn( + message: Warning, + category: Any = None, + stacklevel: int = 1, + source: Any | None = None, + *, + skip_file_prefixes: tuple[str, ...] = (), + ) -> None: ... + +else: + @overload + def warn(message: str, category: type[Warning] | None = None, stacklevel: int = 1, source: Any | None = None) -> None: ... + @overload + def warn(message: Warning, category: Any = None, stacklevel: int = 1, source: Any | None = None) -> None: ... + +@overload +def warn_explicit( + message: str, + category: type[Warning], + filename: str, + lineno: int, + module: str | None = ..., + registry: dict[str | tuple[str, type[Warning], int], int] | None = None, + module_globals: dict[str, Any] | None = None, + source: Any | None = None, +) -> None: ... +@overload +def warn_explicit( + message: Warning, + category: Any, + filename: str, + lineno: int, + module: str | None = None, + registry: dict[str | tuple[str, type[Warning], int], int] | None = None, + module_globals: dict[str, Any] | None = None, + source: Any | None = None, +) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_weakref.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_weakref.pyi new file mode 100644 index 0000000..a744340 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_weakref.pyi @@ -0,0 +1,15 @@ +from collections.abc import Callable +from typing import Any, TypeVar, overload +from weakref import CallableProxyType as CallableProxyType, ProxyType as ProxyType, ReferenceType as ReferenceType, ref as ref + +_C = TypeVar("_C", bound=Callable[..., Any]) +_T = TypeVar("_T") + +def getweakrefcount(object: Any, /) -> int: ... +def getweakrefs(object: Any, /) -> list[Any]: ... + +# Return CallableProxyType if object is callable, ProxyType otherwise +@overload +def proxy(object: _C, callback: Callable[[_C], Any] | None = None, /) -> CallableProxyType[_C]: ... +@overload +def proxy(object: _T, callback: Callable[[_T], Any] | None = None, /) -> Any: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_weakrefset.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_weakrefset.pyi new file mode 100644 index 0000000..dad1ed7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_weakrefset.pyi @@ -0,0 +1,48 @@ +from collections.abc import Iterable, Iterator, MutableSet +from types import GenericAlias +from typing import Any, ClassVar, TypeVar, overload +from typing_extensions import Self + +__all__ = ["WeakSet"] + +_S = TypeVar("_S") +_T = TypeVar("_T") + +class WeakSet(MutableSet[_T]): + @overload + def __init__(self, data: None = None) -> None: ... + @overload + def __init__(self, data: Iterable[_T]) -> None: ... + def add(self, item: _T) -> None: ... + def discard(self, item: _T) -> None: ... + def copy(self) -> Self: ... + def remove(self, item: _T) -> None: ... + def update(self, other: Iterable[_T]) -> None: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __contains__(self, item: object) -> bool: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[_T]: ... + def __ior__(self, other: Iterable[_T]) -> Self: ... # type: ignore[override,misc] + def difference(self, other: Iterable[_T]) -> Self: ... + def __sub__(self, other: Iterable[Any]) -> Self: ... + def difference_update(self, other: Iterable[Any]) -> None: ... + def __isub__(self, other: Iterable[Any]) -> Self: ... + def intersection(self, other: Iterable[_T]) -> Self: ... + def __and__(self, other: Iterable[Any]) -> Self: ... + def intersection_update(self, other: Iterable[Any]) -> None: ... + def __iand__(self, other: Iterable[Any]) -> Self: ... + def issubset(self, other: Iterable[_T]) -> bool: ... + def __le__(self, other: Iterable[_T]) -> bool: ... + def __lt__(self, other: Iterable[_T]) -> bool: ... + def issuperset(self, other: Iterable[_T]) -> bool: ... + def __ge__(self, other: Iterable[_T]) -> bool: ... + def __gt__(self, other: Iterable[_T]) -> bool: ... + def __eq__(self, other: object) -> bool: ... + def symmetric_difference(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... + def __xor__(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... + def symmetric_difference_update(self, other: Iterable[_T]) -> None: ... + def __ixor__(self, other: Iterable[_T]) -> Self: ... # type: ignore[override,misc] + def union(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... + def __or__(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... + def isdisjoint(self, other: Iterable[_T]) -> bool: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_winapi.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_winapi.pyi new file mode 100644 index 0000000..d9e2c37 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_winapi.pyi @@ -0,0 +1,286 @@ +import sys +from _typeshed import ReadableBuffer +from collections.abc import Sequence +from typing import Any, Final, Literal, NoReturn, final, overload + +if sys.platform == "win32": + ABOVE_NORMAL_PRIORITY_CLASS: Final = 0x8000 + BELOW_NORMAL_PRIORITY_CLASS: Final = 0x4000 + + CREATE_BREAKAWAY_FROM_JOB: Final = 0x1000000 + CREATE_DEFAULT_ERROR_MODE: Final = 0x4000000 + CREATE_NO_WINDOW: Final = 0x8000000 + CREATE_NEW_CONSOLE: Final = 0x10 + CREATE_NEW_PROCESS_GROUP: Final = 0x200 + + DETACHED_PROCESS: Final = 8 + DUPLICATE_CLOSE_SOURCE: Final = 1 + DUPLICATE_SAME_ACCESS: Final = 2 + + ERROR_ALREADY_EXISTS: Final = 183 + ERROR_BROKEN_PIPE: Final = 109 + ERROR_IO_PENDING: Final = 997 + ERROR_MORE_DATA: Final = 234 + ERROR_NETNAME_DELETED: Final = 64 + ERROR_NO_DATA: Final = 232 + ERROR_NO_SYSTEM_RESOURCES: Final = 1450 + ERROR_OPERATION_ABORTED: Final = 995 + ERROR_PIPE_BUSY: Final = 231 + ERROR_PIPE_CONNECTED: Final = 535 + ERROR_SEM_TIMEOUT: Final = 121 + + FILE_FLAG_FIRST_PIPE_INSTANCE: Final = 0x80000 + FILE_FLAG_OVERLAPPED: Final = 0x40000000 + + FILE_GENERIC_READ: Final = 1179785 + FILE_GENERIC_WRITE: Final = 1179926 + + FILE_MAP_ALL_ACCESS: Final = 983071 + FILE_MAP_COPY: Final = 1 + FILE_MAP_EXECUTE: Final = 32 + FILE_MAP_READ: Final = 4 + FILE_MAP_WRITE: Final = 2 + + FILE_TYPE_CHAR: Final = 2 + FILE_TYPE_DISK: Final = 1 + FILE_TYPE_PIPE: Final = 3 + FILE_TYPE_REMOTE: Final = 32768 + FILE_TYPE_UNKNOWN: Final = 0 + + GENERIC_READ: Final = 0x80000000 + GENERIC_WRITE: Final = 0x40000000 + HIGH_PRIORITY_CLASS: Final = 0x80 + INFINITE: Final = 0xFFFFFFFF + # Ignore the Flake8 error -- flake8-pyi assumes + # most numbers this long will be implementation details, + # but here we can see that it's a power of 2 + INVALID_HANDLE_VALUE: Final = 0xFFFFFFFFFFFFFFFF # noqa: Y054 + IDLE_PRIORITY_CLASS: Final = 0x40 + NORMAL_PRIORITY_CLASS: Final = 0x20 + REALTIME_PRIORITY_CLASS: Final = 0x100 + NMPWAIT_WAIT_FOREVER: Final = 0xFFFFFFFF + + MEM_COMMIT: Final = 0x1000 + MEM_FREE: Final = 0x10000 + MEM_IMAGE: Final = 0x1000000 + MEM_MAPPED: Final = 0x40000 + MEM_PRIVATE: Final = 0x20000 + MEM_RESERVE: Final = 0x2000 + + NULL: Final = 0 + OPEN_EXISTING: Final = 3 + + PIPE_ACCESS_DUPLEX: Final = 3 + PIPE_ACCESS_INBOUND: Final = 1 + PIPE_READMODE_MESSAGE: Final = 2 + PIPE_TYPE_MESSAGE: Final = 4 + PIPE_UNLIMITED_INSTANCES: Final = 255 + PIPE_WAIT: Final = 0 + + PAGE_EXECUTE: Final = 0x10 + PAGE_EXECUTE_READ: Final = 0x20 + PAGE_EXECUTE_READWRITE: Final = 0x40 + PAGE_EXECUTE_WRITECOPY: Final = 0x80 + PAGE_GUARD: Final = 0x100 + PAGE_NOACCESS: Final = 0x1 + PAGE_NOCACHE: Final = 0x200 + PAGE_READONLY: Final = 0x2 + PAGE_READWRITE: Final = 0x4 + PAGE_WRITECOMBINE: Final = 0x400 + PAGE_WRITECOPY: Final = 0x8 + + PROCESS_ALL_ACCESS: Final = 0x1FFFFF + PROCESS_DUP_HANDLE: Final = 0x40 + + SEC_COMMIT: Final = 0x8000000 + SEC_IMAGE: Final = 0x1000000 + SEC_LARGE_PAGES: Final = 0x80000000 + SEC_NOCACHE: Final = 0x10000000 + SEC_RESERVE: Final = 0x4000000 + SEC_WRITECOMBINE: Final = 0x40000000 + + if sys.version_info >= (3, 13): + STARTF_FORCEOFFFEEDBACK: Final = 0x80 + STARTF_FORCEONFEEDBACK: Final = 0x40 + STARTF_PREVENTPINNING: Final = 0x2000 + STARTF_RUNFULLSCREEN: Final = 0x20 + STARTF_TITLEISAPPID: Final = 0x1000 + STARTF_TITLEISLINKNAME: Final = 0x800 + STARTF_UNTRUSTEDSOURCE: Final = 0x8000 + STARTF_USECOUNTCHARS: Final = 0x8 + STARTF_USEFILLATTRIBUTE: Final = 0x10 + STARTF_USEHOTKEY: Final = 0x200 + STARTF_USEPOSITION: Final = 0x4 + STARTF_USESIZE: Final = 0x2 + + STARTF_USESHOWWINDOW: Final = 0x1 + STARTF_USESTDHANDLES: Final = 0x100 + + STD_ERROR_HANDLE: Final = 0xFFFFFFF4 + STD_OUTPUT_HANDLE: Final = 0xFFFFFFF5 + STD_INPUT_HANDLE: Final = 0xFFFFFFF6 + + STILL_ACTIVE: Final = 259 + SW_HIDE: Final = 0 + SYNCHRONIZE: Final = 0x100000 + WAIT_ABANDONED_0: Final = 128 + WAIT_OBJECT_0: Final = 0 + WAIT_TIMEOUT: Final = 258 + + if sys.version_info >= (3, 10): + LOCALE_NAME_INVARIANT: Final[str] + LOCALE_NAME_MAX_LENGTH: Final[int] + LOCALE_NAME_SYSTEM_DEFAULT: Final[str] + LOCALE_NAME_USER_DEFAULT: Final[str | None] + + LCMAP_FULLWIDTH: Final[int] + LCMAP_HALFWIDTH: Final[int] + LCMAP_HIRAGANA: Final[int] + LCMAP_KATAKANA: Final[int] + LCMAP_LINGUISTIC_CASING: Final[int] + LCMAP_LOWERCASE: Final[int] + LCMAP_SIMPLIFIED_CHINESE: Final[int] + LCMAP_TITLECASE: Final[int] + LCMAP_TRADITIONAL_CHINESE: Final[int] + LCMAP_UPPERCASE: Final[int] + + if sys.version_info >= (3, 12): + COPYFILE2_CALLBACK_CHUNK_STARTED: Final = 1 + COPYFILE2_CALLBACK_CHUNK_FINISHED: Final = 2 + COPYFILE2_CALLBACK_STREAM_STARTED: Final = 3 + COPYFILE2_CALLBACK_STREAM_FINISHED: Final = 4 + COPYFILE2_CALLBACK_POLL_CONTINUE: Final = 5 + COPYFILE2_CALLBACK_ERROR: Final = 6 + + COPYFILE2_PROGRESS_CONTINUE: Final = 0 + COPYFILE2_PROGRESS_CANCEL: Final = 1 + COPYFILE2_PROGRESS_STOP: Final = 2 + COPYFILE2_PROGRESS_QUIET: Final = 3 + COPYFILE2_PROGRESS_PAUSE: Final = 4 + + COPY_FILE_FAIL_IF_EXISTS: Final = 0x1 + COPY_FILE_RESTARTABLE: Final = 0x2 + COPY_FILE_OPEN_SOURCE_FOR_WRITE: Final = 0x4 + COPY_FILE_ALLOW_DECRYPTED_DESTINATION: Final = 0x8 + COPY_FILE_COPY_SYMLINK: Final = 0x800 + COPY_FILE_NO_BUFFERING: Final = 0x1000 + COPY_FILE_REQUEST_SECURITY_PRIVILEGES: Final = 0x2000 + COPY_FILE_RESUME_FROM_PAUSE: Final = 0x4000 + COPY_FILE_NO_OFFLOAD: Final = 0x40000 + COPY_FILE_REQUEST_COMPRESSED_TRAFFIC: Final = 0x10000000 + + ERROR_ACCESS_DENIED: Final = 5 + ERROR_PRIVILEGE_NOT_HELD: Final = 1314 + + if sys.version_info >= (3, 14): + COPY_FILE_DIRECTORY: Final = 0x00000080 + + def CloseHandle(handle: int, /) -> None: ... + @overload + def ConnectNamedPipe(handle: int, overlapped: Literal[True]) -> Overlapped: ... + @overload + def ConnectNamedPipe(handle: int, overlapped: Literal[False] = False) -> None: ... + @overload + def ConnectNamedPipe(handle: int, overlapped: bool) -> Overlapped | None: ... + def CreateFile( + file_name: str, + desired_access: int, + share_mode: int, + security_attributes: int, + creation_disposition: int, + flags_and_attributes: int, + template_file: int, + /, + ) -> int: ... + def CreateJunction(src_path: str, dst_path: str, /) -> None: ... + def CreateNamedPipe( + name: str, + open_mode: int, + pipe_mode: int, + max_instances: int, + out_buffer_size: int, + in_buffer_size: int, + default_timeout: int, + security_attributes: int, + /, + ) -> int: ... + def CreatePipe(pipe_attrs: Any, size: int, /) -> tuple[int, int]: ... + def CreateProcess( + application_name: str | None, + command_line: str | None, + proc_attrs: Any, + thread_attrs: Any, + inherit_handles: bool, + creation_flags: int, + env_mapping: dict[str, str], + current_directory: str | None, + startup_info: Any, + /, + ) -> tuple[int, int, int, int]: ... + def DuplicateHandle( + source_process_handle: int, + source_handle: int, + target_process_handle: int, + desired_access: int, + inherit_handle: bool, + options: int = 0, + /, + ) -> int: ... + def ExitProcess(ExitCode: int, /) -> NoReturn: ... + def GetACP() -> int: ... + def GetFileType(handle: int) -> int: ... + def GetCurrentProcess() -> int: ... + def GetExitCodeProcess(process: int, /) -> int: ... + def GetLastError() -> int: ... + def GetModuleFileName(module_handle: int, /) -> str: ... + def GetStdHandle(std_handle: int, /) -> int: ... + def GetVersion() -> int: ... + def OpenProcess(desired_access: int, inherit_handle: bool, process_id: int, /) -> int: ... + def PeekNamedPipe(handle: int, size: int = 0, /) -> tuple[int, int] | tuple[bytes, int, int]: ... + if sys.version_info >= (3, 10): + def LCMapStringEx(locale: str, flags: int, src: str) -> str: ... + def UnmapViewOfFile(address: int, /) -> None: ... + + @overload + def ReadFile(handle: int, size: int, overlapped: Literal[True]) -> tuple[Overlapped, int]: ... + @overload + def ReadFile(handle: int, size: int, overlapped: Literal[False] = False) -> tuple[bytes, int]: ... + @overload + def ReadFile(handle: int, size: int, overlapped: int | bool) -> tuple[Any, int]: ... + def SetNamedPipeHandleState( + named_pipe: int, mode: int | None, max_collection_count: int | None, collect_data_timeout: int | None, / + ) -> None: ... + def TerminateProcess(handle: int, exit_code: int, /) -> None: ... + def WaitForMultipleObjects(handle_seq: Sequence[int], wait_flag: bool, milliseconds: int = 0xFFFFFFFF, /) -> int: ... + def WaitForSingleObject(handle: int, milliseconds: int, /) -> int: ... + def WaitNamedPipe(name: str, timeout: int, /) -> None: ... + @overload + def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: Literal[True]) -> tuple[Overlapped, int]: ... + @overload + def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: Literal[False] = False) -> tuple[int, int]: ... + @overload + def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: int | bool) -> tuple[Any, int]: ... + @final + class Overlapped: + event: int + def GetOverlappedResult(self, wait: bool, /) -> tuple[int, int]: ... + def cancel(self) -> None: ... + def getbuffer(self) -> bytes | None: ... + + if sys.version_info >= (3, 13): + def BatchedWaitForMultipleObjects( + handle_seq: Sequence[int], wait_all: bool, milliseconds: int = 0xFFFFFFFF + ) -> list[int]: ... + def CreateEventW(security_attributes: int, manual_reset: bool, initial_state: bool, name: str | None) -> int: ... + def CreateMutexW(security_attributes: int, initial_owner: bool, name: str) -> int: ... + def GetLongPathName(path: str) -> str: ... + def GetShortPathName(path: str) -> str: ... + def OpenEventW(desired_access: int, inherit_handle: bool, name: str) -> int: ... + def OpenMutexW(desired_access: int, inherit_handle: bool, name: str) -> int: ... + def ReleaseMutex(mutex: int) -> None: ... + def ResetEvent(event: int) -> None: ... + def SetEvent(event: int) -> None: ... + + if sys.version_info >= (3, 12): + def CopyFile2(existing_file_name: str, new_file_name: str, flags: int, progress_routine: int | None = None) -> int: ... + def NeedCurrentDirectoryForExePath(exe_name: str, /) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_zstd.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_zstd.pyi new file mode 100644 index 0000000..f5e98ef --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/_zstd.pyi @@ -0,0 +1,97 @@ +from _typeshed import ReadableBuffer +from collections.abc import Mapping +from compression.zstd import CompressionParameter, DecompressionParameter +from typing import Final, Literal, final +from typing_extensions import Self, TypeAlias + +ZSTD_CLEVEL_DEFAULT: Final = 3 +ZSTD_DStreamOutSize: Final = 131072 +ZSTD_btlazy2: Final = 6 +ZSTD_btopt: Final = 7 +ZSTD_btultra: Final = 8 +ZSTD_btultra2: Final = 9 +ZSTD_c_chainLog: Final = 103 +ZSTD_c_checksumFlag: Final = 201 +ZSTD_c_compressionLevel: Final = 100 +ZSTD_c_contentSizeFlag: Final = 200 +ZSTD_c_dictIDFlag: Final = 202 +ZSTD_c_enableLongDistanceMatching: Final = 160 +ZSTD_c_hashLog: Final = 102 +ZSTD_c_jobSize: Final = 401 +ZSTD_c_ldmBucketSizeLog: Final = 163 +ZSTD_c_ldmHashLog: Final = 161 +ZSTD_c_ldmHashRateLog: Final = 164 +ZSTD_c_ldmMinMatch: Final = 162 +ZSTD_c_minMatch: Final = 105 +ZSTD_c_nbWorkers: Final = 400 +ZSTD_c_overlapLog: Final = 402 +ZSTD_c_searchLog: Final = 104 +ZSTD_c_strategy: Final = 107 +ZSTD_c_targetLength: Final = 106 +ZSTD_c_windowLog: Final = 101 +ZSTD_d_windowLogMax: Final = 100 +ZSTD_dfast: Final = 2 +ZSTD_fast: Final = 1 +ZSTD_greedy: Final = 3 +ZSTD_lazy: Final = 4 +ZSTD_lazy2: Final = 5 + +_ZstdCompressorContinue: TypeAlias = Literal[0] +_ZstdCompressorFlushBlock: TypeAlias = Literal[1] +_ZstdCompressorFlushFrame: TypeAlias = Literal[2] + +@final +class ZstdCompressor: + CONTINUE: Final = 0 + FLUSH_BLOCK: Final = 1 + FLUSH_FRAME: Final = 2 + def __new__( + cls, level: int | None = None, options: Mapping[int, int] | None = None, zstd_dict: ZstdDict | None = None + ) -> Self: ... + def compress( + self, /, data: ReadableBuffer, mode: _ZstdCompressorContinue | _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame = 0 + ) -> bytes: ... + def flush(self, /, mode: _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame = 2) -> bytes: ... + def set_pledged_input_size(self, size: int | None, /) -> None: ... + @property + def last_mode(self) -> _ZstdCompressorContinue | _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame: ... + +@final +class ZstdDecompressor: + def __new__(cls, zstd_dict: ZstdDict | None = None, options: Mapping[int, int] | None = None) -> Self: ... + def decompress(self, /, data: ReadableBuffer, max_length: int = -1) -> bytes: ... + @property + def eof(self) -> bool: ... + @property + def needs_input(self) -> bool: ... + @property + def unused_data(self) -> bytes: ... + +@final +class ZstdDict: + def __new__(cls, dict_content: bytes, /, *, is_raw: bool = False) -> Self: ... + def __len__(self, /) -> int: ... + @property + def as_digested_dict(self) -> tuple[Self, int]: ... + @property + def as_prefix(self) -> tuple[Self, int]: ... + @property + def as_undigested_dict(self) -> tuple[Self, int]: ... + @property + def dict_content(self) -> bytes: ... + @property + def dict_id(self) -> int: ... + +class ZstdError(Exception): ... + +def finalize_dict( + custom_dict_bytes: bytes, samples_bytes: bytes, samples_sizes: tuple[int, ...], dict_size: int, compression_level: int, / +) -> bytes: ... +def get_frame_info(frame_buffer: ReadableBuffer) -> tuple[int, int]: ... +def get_frame_size(frame_buffer: ReadableBuffer) -> int: ... +def get_param_bounds(parameter: int, is_compress: bool) -> tuple[int, int]: ... +def set_parameter_types(c_parameter_type: type[CompressionParameter], d_parameter_type: type[DecompressionParameter]) -> None: ... +def train_dict(samples_bytes: bytes, samples_sizes: tuple[int, ...], dict_size: int, /) -> bytes: ... + +zstd_version: Final[str] +zstd_version_number: Final[int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/abc.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/abc.pyi new file mode 100644 index 0000000..c8cd549 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/abc.pyi @@ -0,0 +1,51 @@ +import _typeshed +import sys +from _typeshed import SupportsWrite +from collections.abc import Callable +from typing import Any, Literal, TypeVar +from typing_extensions import Concatenate, ParamSpec, deprecated + +_T = TypeVar("_T") +_R_co = TypeVar("_R_co", covariant=True) +_FuncT = TypeVar("_FuncT", bound=Callable[..., Any]) +_P = ParamSpec("_P") + +# These definitions have special processing in mypy +class ABCMeta(type): + __abstractmethods__: frozenset[str] + if sys.version_info >= (3, 11): + def __new__( + mcls: type[_typeshed.Self], name: str, bases: tuple[type, ...], namespace: dict[str, Any], /, **kwargs: Any + ) -> _typeshed.Self: ... + else: + def __new__( + mcls: type[_typeshed.Self], name: str, bases: tuple[type, ...], namespace: dict[str, Any], **kwargs: Any + ) -> _typeshed.Self: ... + + def __instancecheck__(cls: ABCMeta, instance: Any) -> bool: ... + def __subclasscheck__(cls: ABCMeta, subclass: type) -> bool: ... + def _dump_registry(cls: ABCMeta, file: SupportsWrite[str] | None = None) -> None: ... + def register(cls: ABCMeta, subclass: type[_T]) -> type[_T]: ... + +def abstractmethod(funcobj: _FuncT) -> _FuncT: ... +@deprecated("Deprecated since Python 3.3. Use `@classmethod` stacked on top of `@abstractmethod` instead.") +class abstractclassmethod(classmethod[_T, _P, _R_co]): + __isabstractmethod__: Literal[True] + def __init__(self, callable: Callable[Concatenate[type[_T], _P], _R_co]) -> None: ... + +@deprecated("Deprecated since Python 3.3. Use `@staticmethod` stacked on top of `@abstractmethod` instead.") +class abstractstaticmethod(staticmethod[_P, _R_co]): + __isabstractmethod__: Literal[True] + def __init__(self, callable: Callable[_P, _R_co]) -> None: ... + +@deprecated("Deprecated since Python 3.3. Use `@property` stacked on top of `@abstractmethod` instead.") +class abstractproperty(property): + __isabstractmethod__: Literal[True] + +class ABC(metaclass=ABCMeta): + __slots__ = () + +def get_cache_token() -> object: ... + +if sys.version_info >= (3, 10): + def update_abstractmethods(cls: type[_T]) -> type[_T]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/aifc.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/aifc.pyi new file mode 100644 index 0000000..bfe12c6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/aifc.pyi @@ -0,0 +1,79 @@ +from types import TracebackType +from typing import IO, Any, Literal, NamedTuple, overload +from typing_extensions import Self, TypeAlias + +__all__ = ["Error", "open"] + +class Error(Exception): ... + +class _aifc_params(NamedTuple): + nchannels: int + sampwidth: int + framerate: int + nframes: int + comptype: bytes + compname: bytes + +_File: TypeAlias = str | IO[bytes] +_Marker: TypeAlias = tuple[int, int, bytes] + +class Aifc_read: + def __init__(self, f: _File) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def initfp(self, file: IO[bytes]) -> None: ... + def getfp(self) -> IO[bytes]: ... + def rewind(self) -> None: ... + def close(self) -> None: ... + def tell(self) -> int: ... + def getnchannels(self) -> int: ... + def getnframes(self) -> int: ... + def getsampwidth(self) -> int: ... + def getframerate(self) -> int: ... + def getcomptype(self) -> bytes: ... + def getcompname(self) -> bytes: ... + def getparams(self) -> _aifc_params: ... + def getmarkers(self) -> list[_Marker] | None: ... + def getmark(self, id: int) -> _Marker: ... + def setpos(self, pos: int) -> None: ... + def readframes(self, nframes: int) -> bytes: ... + +class Aifc_write: + def __init__(self, f: _File) -> None: ... + def __del__(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def initfp(self, file: IO[bytes]) -> None: ... + def aiff(self) -> None: ... + def aifc(self) -> None: ... + def setnchannels(self, nchannels: int) -> None: ... + def getnchannels(self) -> int: ... + def setsampwidth(self, sampwidth: int) -> None: ... + def getsampwidth(self) -> int: ... + def setframerate(self, framerate: int) -> None: ... + def getframerate(self) -> int: ... + def setnframes(self, nframes: int) -> None: ... + def getnframes(self) -> int: ... + def setcomptype(self, comptype: bytes, compname: bytes) -> None: ... + def getcomptype(self) -> bytes: ... + def getcompname(self) -> bytes: ... + def setparams(self, params: tuple[int, int, int, int, bytes, bytes]) -> None: ... + def getparams(self) -> _aifc_params: ... + def setmark(self, id: int, pos: int, name: bytes) -> None: ... + def getmark(self, id: int) -> _Marker: ... + def getmarkers(self) -> list[_Marker] | None: ... + def tell(self) -> int: ... + def writeframesraw(self, data: Any) -> None: ... # Actual type for data is Buffer Protocol + def writeframes(self, data: Any) -> None: ... + def close(self) -> None: ... + +@overload +def open(f: _File, mode: Literal["r", "rb"]) -> Aifc_read: ... +@overload +def open(f: _File, mode: Literal["w", "wb"]) -> Aifc_write: ... +@overload +def open(f: _File, mode: str | None = None) -> Any: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/annotationlib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/annotationlib.pyi new file mode 100644 index 0000000..3679dc2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/annotationlib.pyi @@ -0,0 +1,146 @@ +import sys +from typing import Literal + +if sys.version_info >= (3, 14): + import enum + import types + from _typeshed import AnnotateFunc, AnnotationForm, EvaluateFunc, SupportsItems + from collections.abc import Mapping + from typing import Any, ParamSpec, TypeVar, TypeVarTuple, final, overload + from warnings import deprecated + + __all__ = [ + "Format", + "ForwardRef", + "call_annotate_function", + "call_evaluate_function", + "get_annotate_from_class_namespace", + "get_annotations", + "annotations_to_string", + "type_repr", + ] + + class Format(enum.IntEnum): + VALUE = 1 + VALUE_WITH_FAKE_GLOBALS = 2 + FORWARDREF = 3 + STRING = 4 + + @final + class ForwardRef: + __slots__ = ( + "__forward_is_argument__", + "__forward_is_class__", + "__forward_module__", + "__weakref__", + "__arg__", + "__globals__", + "__extra_names__", + "__code__", + "__ast_node__", + "__cell__", + "__owner__", + "__stringifier_dict__", + ) + __forward_is_argument__: bool + __forward_is_class__: bool + __forward_module__: str | None + def __init__( + self, arg: str, *, module: str | None = None, owner: object = None, is_argument: bool = True, is_class: bool = False + ) -> None: ... + @overload + def evaluate( + self, + *, + globals: dict[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] | None = None, + owner: object = None, + format: Literal[Format.STRING], + ) -> str: ... + @overload + def evaluate( + self, + *, + globals: dict[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] | None = None, + owner: object = None, + format: Literal[Format.FORWARDREF], + ) -> AnnotationForm | ForwardRef: ... + @overload + def evaluate( + self, + *, + globals: dict[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] | None = None, + owner: object = None, + format: Format = Format.VALUE, # noqa: Y011 + ) -> AnnotationForm: ... + @deprecated("Use `ForwardRef.evaluate()` or `typing.evaluate_forward_ref()` instead.") + def _evaluate( + self, + globalns: dict[str, Any] | None, + localns: Mapping[str, Any] | None, + type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] = ..., + *, + recursive_guard: frozenset[str], + ) -> AnnotationForm: ... + @property + def __forward_arg__(self) -> str: ... + @property + def __forward_code__(self) -> types.CodeType: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def __or__(self, other: Any) -> types.UnionType: ... + def __ror__(self, other: Any) -> types.UnionType: ... + + @overload + def call_evaluate_function(evaluate: EvaluateFunc, format: Literal[Format.STRING], *, owner: object = None) -> str: ... + @overload + def call_evaluate_function( + evaluate: EvaluateFunc, format: Literal[Format.FORWARDREF], *, owner: object = None + ) -> AnnotationForm | ForwardRef: ... + @overload + def call_evaluate_function(evaluate: EvaluateFunc, format: Format, *, owner: object = None) -> AnnotationForm: ... + @overload + def call_annotate_function( + annotate: AnnotateFunc, format: Literal[Format.STRING], *, owner: object = None + ) -> dict[str, str]: ... + @overload + def call_annotate_function( + annotate: AnnotateFunc, format: Literal[Format.FORWARDREF], *, owner: object = None + ) -> dict[str, AnnotationForm | ForwardRef]: ... + @overload + def call_annotate_function(annotate: AnnotateFunc, format: Format, *, owner: object = None) -> dict[str, AnnotationForm]: ... + def get_annotate_from_class_namespace(obj: Mapping[str, object]) -> AnnotateFunc | None: ... + @overload + def get_annotations( + obj: Any, # any object with __annotations__ or __annotate__ + *, + globals: dict[str, object] | None = None, + locals: Mapping[str, object] | None = None, + eval_str: bool = False, + format: Literal[Format.STRING], + ) -> dict[str, str]: ... + @overload + def get_annotations( + obj: Any, + *, + globals: dict[str, object] | None = None, + locals: Mapping[str, object] | None = None, + eval_str: bool = False, + format: Literal[Format.FORWARDREF], + ) -> dict[str, AnnotationForm | ForwardRef]: ... + @overload + def get_annotations( + obj: Any, + *, + globals: dict[str, object] | None = None, + locals: Mapping[str, object] | None = None, + eval_str: bool = False, + format: Format = Format.VALUE, # noqa: Y011 + ) -> dict[str, AnnotationForm]: ... + def type_repr(value: object) -> str: ... + def annotations_to_string(annotations: SupportsItems[str, object]) -> dict[str, str]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/antigravity.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/antigravity.pyi new file mode 100644 index 0000000..3986e7d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/antigravity.pyi @@ -0,0 +1,3 @@ +from _typeshed import ReadableBuffer + +def geohash(latitude: float, longitude: float, datedow: ReadableBuffer) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/argparse.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/argparse.pyi new file mode 100644 index 0000000..f4b3aac --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/argparse.pyi @@ -0,0 +1,829 @@ +import sys +from _typeshed import SupportsWrite, sentinel +from collections.abc import Callable, Generator, Iterable, Sequence +from re import Pattern +from typing import IO, Any, ClassVar, Final, Generic, NewType, NoReturn, Protocol, TypeVar, overload, type_check_only +from typing_extensions import Self, TypeAlias, deprecated + +__all__ = [ + "ArgumentParser", + "ArgumentError", + "ArgumentTypeError", + "FileType", + "HelpFormatter", + "ArgumentDefaultsHelpFormatter", + "RawDescriptionHelpFormatter", + "RawTextHelpFormatter", + "MetavarTypeHelpFormatter", + "Namespace", + "Action", + "BooleanOptionalAction", + "ONE_OR_MORE", + "OPTIONAL", + "PARSER", + "REMAINDER", + "SUPPRESS", + "ZERO_OR_MORE", +] + +_T = TypeVar("_T") +_ActionT = TypeVar("_ActionT", bound=Action) +_ArgumentParserT = TypeVar("_ArgumentParserT", bound=ArgumentParser) +_N = TypeVar("_N") +_ActionType: TypeAlias = Callable[[str], Any] | FileType | str + +ONE_OR_MORE: Final = "+" +OPTIONAL: Final = "?" +PARSER: Final = "A..." +REMAINDER: Final = "..." +_SUPPRESS_T = NewType("_SUPPRESS_T", str) +SUPPRESS: _SUPPRESS_T | str # not using Literal because argparse sometimes compares SUPPRESS with is +# the | str is there so that foo = argparse.SUPPRESS; foo = "test" checks out in mypy +ZERO_OR_MORE: Final = "*" +_UNRECOGNIZED_ARGS_ATTR: Final = "_unrecognized_args" # undocumented + +class ArgumentError(Exception): + argument_name: str | None + message: str + def __init__(self, argument: Action | None, message: str) -> None: ... + +# undocumented +class _AttributeHolder: + def _get_kwargs(self) -> list[tuple[str, Any]]: ... + def _get_args(self) -> list[Any]: ... + +# undocumented +class _ActionsContainer: + description: str | None + prefix_chars: str + argument_default: Any + conflict_handler: str + + _registries: dict[str, dict[Any, Any]] + _actions: list[Action] + _option_string_actions: dict[str, Action] + _action_groups: list[_ArgumentGroup] + _mutually_exclusive_groups: list[_MutuallyExclusiveGroup] + _defaults: dict[str, Any] + _negative_number_matcher: Pattern[str] + _has_negative_number_optionals: list[bool] + def __init__(self, description: str | None, prefix_chars: str, argument_default: Any, conflict_handler: str) -> None: ... + def register(self, registry_name: str, value: Any, object: Any) -> None: ... + def _registry_get(self, registry_name: str, value: Any, default: Any = None) -> Any: ... + def set_defaults(self, **kwargs: Any) -> None: ... + def get_default(self, dest: str) -> Any: ... + def add_argument( + self, + *name_or_flags: str, + # str covers predefined actions ("store_true", "count", etc.) + # and user registered actions via the `register` method. + action: str | type[Action] = ..., + # more precisely, Literal["?", "*", "+", "...", "A...", "==SUPPRESS=="], + # but using this would make it hard to annotate callers that don't use a + # literal argument and for subclasses to override this method. + nargs: int | str | _SUPPRESS_T | None = None, + const: Any = ..., + default: Any = ..., + type: _ActionType = ..., + choices: Iterable[_T] | None = ..., + required: bool = ..., + help: str | None = ..., + metavar: str | tuple[str, ...] | None = ..., + dest: str | None = ..., + version: str = ..., + **kwargs: Any, + ) -> Action: ... + def add_argument_group( + self, + title: str | None = None, + description: str | None = None, + *, + prefix_chars: str = ..., + argument_default: Any = ..., + conflict_handler: str = ..., + ) -> _ArgumentGroup: ... + def add_mutually_exclusive_group(self, *, required: bool = False) -> _MutuallyExclusiveGroup: ... + def _add_action(self, action: _ActionT) -> _ActionT: ... + def _remove_action(self, action: Action) -> None: ... + def _add_container_actions(self, container: _ActionsContainer) -> None: ... + def _get_positional_kwargs(self, dest: str, **kwargs: Any) -> dict[str, Any]: ... + def _get_optional_kwargs(self, *args: Any, **kwargs: Any) -> dict[str, Any]: ... + def _pop_action_class(self, kwargs: Any, default: type[Action] | None = None) -> type[Action]: ... + def _get_handler(self) -> Callable[[Action, Iterable[tuple[str, Action]]], Any]: ... + def _check_conflict(self, action: Action) -> None: ... + def _handle_conflict_error(self, action: Action, conflicting_actions: Iterable[tuple[str, Action]]) -> NoReturn: ... + def _handle_conflict_resolve(self, action: Action, conflicting_actions: Iterable[tuple[str, Action]]) -> None: ... + +@type_check_only +class _FormatterClass(Protocol): + def __call__(self, *, prog: str) -> HelpFormatter: ... + +class ArgumentParser(_AttributeHolder, _ActionsContainer): + prog: str + usage: str | None + epilog: str | None + formatter_class: _FormatterClass + fromfile_prefix_chars: str | None + add_help: bool + allow_abbrev: bool + exit_on_error: bool + + if sys.version_info >= (3, 14): + suggest_on_error: bool + color: bool + + # undocumented + _positionals: _ArgumentGroup + _optionals: _ArgumentGroup + _subparsers: _ArgumentGroup | None + + # Note: the constructor arguments are also used in _SubParsersAction.add_parser. + if sys.version_info >= (3, 14): + def __init__( + self, + prog: str | None = None, + usage: str | None = None, + description: str | None = None, + epilog: str | None = None, + parents: Sequence[ArgumentParser] = [], + formatter_class: _FormatterClass = ..., + prefix_chars: str = "-", + fromfile_prefix_chars: str | None = None, + argument_default: Any = None, + conflict_handler: str = "error", + add_help: bool = True, + allow_abbrev: bool = True, + exit_on_error: bool = True, + *, + suggest_on_error: bool = False, + color: bool = True, + ) -> None: ... + else: + def __init__( + self, + prog: str | None = None, + usage: str | None = None, + description: str | None = None, + epilog: str | None = None, + parents: Sequence[ArgumentParser] = [], + formatter_class: _FormatterClass = ..., + prefix_chars: str = "-", + fromfile_prefix_chars: str | None = None, + argument_default: Any = None, + conflict_handler: str = "error", + add_help: bool = True, + allow_abbrev: bool = True, + exit_on_error: bool = True, + ) -> None: ... + + @overload + def parse_args(self, args: Sequence[str] | None = None, namespace: None = None) -> Namespace: ... + @overload + def parse_args(self, args: Sequence[str] | None, namespace: _N) -> _N: ... + @overload + def parse_args(self, *, namespace: _N) -> _N: ... + @overload + def add_subparsers( + self: _ArgumentParserT, + *, + title: str = "subcommands", + description: str | None = None, + prog: str | None = None, + action: type[Action] = ..., + option_string: str = ..., + dest: str | None = None, + required: bool = False, + help: str | None = None, + metavar: str | None = None, + ) -> _SubParsersAction[_ArgumentParserT]: ... + @overload + def add_subparsers( + self, + *, + title: str = "subcommands", + description: str | None = None, + prog: str | None = None, + parser_class: type[_ArgumentParserT], + action: type[Action] = ..., + option_string: str = ..., + dest: str | None = None, + required: bool = False, + help: str | None = None, + metavar: str | None = None, + ) -> _SubParsersAction[_ArgumentParserT]: ... + def print_usage(self, file: SupportsWrite[str] | None = None) -> None: ... + def print_help(self, file: SupportsWrite[str] | None = None) -> None: ... + def format_usage(self) -> str: ... + def format_help(self) -> str: ... + @overload + def parse_known_args(self, args: Sequence[str] | None = None, namespace: None = None) -> tuple[Namespace, list[str]]: ... + @overload + def parse_known_args(self, args: Sequence[str] | None, namespace: _N) -> tuple[_N, list[str]]: ... + @overload + def parse_known_args(self, *, namespace: _N) -> tuple[_N, list[str]]: ... + def convert_arg_line_to_args(self, arg_line: str) -> list[str]: ... + def exit(self, status: int = 0, message: str | None = None) -> NoReturn: ... + def error(self, message: str) -> NoReturn: ... + @overload + def parse_intermixed_args(self, args: Sequence[str] | None = None, namespace: None = None) -> Namespace: ... + @overload + def parse_intermixed_args(self, args: Sequence[str] | None, namespace: _N) -> _N: ... + @overload + def parse_intermixed_args(self, *, namespace: _N) -> _N: ... + @overload + def parse_known_intermixed_args( + self, args: Sequence[str] | None = None, namespace: None = None + ) -> tuple[Namespace, list[str]]: ... + @overload + def parse_known_intermixed_args(self, args: Sequence[str] | None, namespace: _N) -> tuple[_N, list[str]]: ... + @overload + def parse_known_intermixed_args(self, *, namespace: _N) -> tuple[_N, list[str]]: ... + # undocumented + def _get_optional_actions(self) -> list[Action]: ... + def _get_positional_actions(self) -> list[Action]: ... + if sys.version_info >= (3, 12): + def _parse_known_args( + self, arg_strings: list[str], namespace: Namespace, intermixed: bool + ) -> tuple[Namespace, list[str]]: ... + else: + def _parse_known_args(self, arg_strings: list[str], namespace: Namespace) -> tuple[Namespace, list[str]]: ... + + def _read_args_from_files(self, arg_strings: list[str]) -> list[str]: ... + def _match_argument(self, action: Action, arg_strings_pattern: str) -> int: ... + def _match_arguments_partial(self, actions: Sequence[Action], arg_strings_pattern: str) -> list[int]: ... + def _parse_optional(self, arg_string: str) -> tuple[Action | None, str, str | None] | None: ... + def _get_option_tuples(self, option_string: str) -> list[tuple[Action, str, str | None]]: ... + def _get_nargs_pattern(self, action: Action) -> str: ... + def _get_values(self, action: Action, arg_strings: list[str]) -> Any: ... + def _get_value(self, action: Action, arg_string: str) -> Any: ... + def _check_value(self, action: Action, value: Any) -> None: ... + def _get_formatter(self) -> HelpFormatter: ... + def _print_message(self, message: str, file: SupportsWrite[str] | None = None) -> None: ... + +class HelpFormatter: + # undocumented + _prog: str + _indent_increment: int + _max_help_position: int + _width: int + _current_indent: int + _level: int + _action_max_length: int + _root_section: _Section + _current_section: _Section + _whitespace_matcher: Pattern[str] + _long_break_matcher: Pattern[str] + + class _Section: + formatter: HelpFormatter + heading: str | None + parent: Self | None + items: list[tuple[Callable[..., str], Iterable[Any]]] + def __init__(self, formatter: HelpFormatter, parent: Self | None, heading: str | None = None) -> None: ... + def format_help(self) -> str: ... + + if sys.version_info >= (3, 14): + def __init__( + self, prog: str, indent_increment: int = 2, max_help_position: int = 24, width: int | None = None, color: bool = True + ) -> None: ... + else: + def __init__( + self, prog: str, indent_increment: int = 2, max_help_position: int = 24, width: int | None = None + ) -> None: ... + + def _indent(self) -> None: ... + def _dedent(self) -> None: ... + def _add_item(self, func: Callable[..., str], args: Iterable[Any]) -> None: ... + def start_section(self, heading: str | None) -> None: ... + def end_section(self) -> None: ... + def add_text(self, text: str | None) -> None: ... + def add_usage( + self, usage: str | None, actions: Iterable[Action], groups: Iterable[_MutuallyExclusiveGroup], prefix: str | None = None + ) -> None: ... + def add_argument(self, action: Action) -> None: ... + def add_arguments(self, actions: Iterable[Action]) -> None: ... + def format_help(self) -> str: ... + def _join_parts(self, part_strings: Iterable[str]) -> str: ... + def _format_usage( + self, usage: str | None, actions: Iterable[Action], groups: Iterable[_MutuallyExclusiveGroup], prefix: str | None + ) -> str: ... + def _format_actions_usage(self, actions: Iterable[Action], groups: Iterable[_MutuallyExclusiveGroup]) -> str: ... + def _format_text(self, text: str) -> str: ... + def _format_action(self, action: Action) -> str: ... + def _format_action_invocation(self, action: Action) -> str: ... + def _metavar_formatter(self, action: Action, default_metavar: str) -> Callable[[int], tuple[str, ...]]: ... + def _format_args(self, action: Action, default_metavar: str) -> str: ... + def _expand_help(self, action: Action) -> str: ... + def _iter_indented_subactions(self, action: Action) -> Generator[Action, None, None]: ... + def _split_lines(self, text: str, width: int) -> list[str]: ... + def _fill_text(self, text: str, width: int, indent: str) -> str: ... + def _get_help_string(self, action: Action) -> str | None: ... + def _get_default_metavar_for_optional(self, action: Action) -> str: ... + def _get_default_metavar_for_positional(self, action: Action) -> str: ... + +class RawDescriptionHelpFormatter(HelpFormatter): ... +class RawTextHelpFormatter(RawDescriptionHelpFormatter): ... +class ArgumentDefaultsHelpFormatter(HelpFormatter): ... +class MetavarTypeHelpFormatter(HelpFormatter): ... + +class Action(_AttributeHolder): + option_strings: Sequence[str] + dest: str + nargs: int | str | None + const: Any + default: Any + type: _ActionType | None + choices: Iterable[Any] | None + required: bool + help: str | None + metavar: str | tuple[str, ...] | None + if sys.version_info >= (3, 13): + def __init__( + self, + option_strings: Sequence[str], + dest: str, + nargs: int | str | None = None, + const: _T | None = None, + default: _T | str | None = None, + type: Callable[[str], _T] | FileType | None = None, + choices: Iterable[_T] | None = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, + deprecated: bool = False, + ) -> None: ... + else: + def __init__( + self, + option_strings: Sequence[str], + dest: str, + nargs: int | str | None = None, + const: _T | None = None, + default: _T | str | None = None, + type: Callable[[str], _T] | FileType | None = None, + choices: Iterable[_T] | None = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, + ) -> None: ... + + def __call__( + self, parser: ArgumentParser, namespace: Namespace, values: str | Sequence[Any] | None, option_string: str | None = None + ) -> None: ... + def format_usage(self) -> str: ... + +if sys.version_info >= (3, 12): + class BooleanOptionalAction(Action): + if sys.version_info >= (3, 14): + def __init__( + self, + option_strings: Sequence[str], + dest: str, + default: bool | None = None, + required: bool = False, + help: str | None = None, + deprecated: bool = False, + ) -> None: ... + elif sys.version_info >= (3, 13): + @overload + def __init__( + self, + option_strings: Sequence[str], + dest: str, + default: bool | None = None, + *, + required: bool = False, + help: str | None = None, + deprecated: bool = False, + ) -> None: ... + @overload + @deprecated("The `type`, `choices`, and `metavar` parameters are ignored and will be removed in Python 3.14.") + def __init__( + self, + option_strings: Sequence[str], + dest: str, + default: _T | bool | None = None, + type: Callable[[str], _T] | FileType | None = sentinel, + choices: Iterable[_T] | None = sentinel, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = sentinel, + deprecated: bool = False, + ) -> None: ... + else: + @overload + def __init__( + self, + option_strings: Sequence[str], + dest: str, + default: bool | None = None, + *, + required: bool = False, + help: str | None = None, + ) -> None: ... + @overload + @deprecated("The `type`, `choices`, and `metavar` parameters are ignored and will be removed in Python 3.14.") + def __init__( + self, + option_strings: Sequence[str], + dest: str, + default: _T | bool | None = None, + type: Callable[[str], _T] | FileType | None = sentinel, + choices: Iterable[_T] | None = sentinel, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = sentinel, + ) -> None: ... + +else: + class BooleanOptionalAction(Action): + @overload + def __init__( + self, + option_strings: Sequence[str], + dest: str, + default: bool | None = None, + *, + required: bool = False, + help: str | None = None, + ) -> None: ... + @overload + @deprecated("The `type`, `choices`, and `metavar` parameters are ignored and will be removed in Python 3.14.") + def __init__( + self, + option_strings: Sequence[str], + dest: str, + default: _T | bool | None = None, + type: Callable[[str], _T] | FileType | None = None, + choices: Iterable[_T] | None = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, + ) -> None: ... + +class Namespace(_AttributeHolder): + def __init__(self, **kwargs: Any) -> None: ... + def __getattr__(self, name: str) -> Any: ... + def __setattr__(self, name: str, value: Any, /) -> None: ... + def __contains__(self, key: str) -> bool: ... + def __eq__(self, other: object) -> bool: ... + __hash__: ClassVar[None] # type: ignore[assignment] + +if sys.version_info >= (3, 14): + @deprecated("Deprecated since Python 3.14. Open files after parsing arguments instead.") + class FileType: + # undocumented + _mode: str + _bufsize: int + _encoding: str | None + _errors: str | None + def __init__( + self, mode: str = "r", bufsize: int = -1, encoding: str | None = None, errors: str | None = None + ) -> None: ... + def __call__(self, string: str) -> IO[Any]: ... + +else: + class FileType: + # undocumented + _mode: str + _bufsize: int + _encoding: str | None + _errors: str | None + def __init__( + self, mode: str = "r", bufsize: int = -1, encoding: str | None = None, errors: str | None = None + ) -> None: ... + def __call__(self, string: str) -> IO[Any]: ... + +# undocumented +class _ArgumentGroup(_ActionsContainer): + title: str | None + _group_actions: list[Action] + if sys.version_info >= (3, 14): + @overload + def __init__( + self, + container: _ActionsContainer, + title: str | None = None, + description: str | None = None, + *, + argument_default: Any = ..., + conflict_handler: str = ..., + ) -> None: ... + @overload + @deprecated("Undocumented `prefix_chars` parameter is deprecated since Python 3.14.") + def __init__( + self, + container: _ActionsContainer, + title: str | None = None, + description: str | None = None, + *, + prefix_chars: str, + argument_default: Any = ..., + conflict_handler: str = ..., + ) -> None: ... + else: + def __init__( + self, + container: _ActionsContainer, + title: str | None = None, + description: str | None = None, + *, + prefix_chars: str = ..., + argument_default: Any = ..., + conflict_handler: str = ..., + ) -> None: ... + +# undocumented +class _MutuallyExclusiveGroup(_ArgumentGroup): + required: bool + _container: _ActionsContainer + def __init__(self, container: _ActionsContainer, required: bool = False) -> None: ... + +# undocumented +class _StoreAction(Action): ... + +# undocumented +class _StoreConstAction(Action): + if sys.version_info >= (3, 13): + def __init__( + self, + option_strings: Sequence[str], + dest: str, + const: Any | None = None, + default: Any = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, + deprecated: bool = False, + ) -> None: ... + elif sys.version_info >= (3, 11): + def __init__( + self, + option_strings: Sequence[str], + dest: str, + const: Any | None = None, + default: Any = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, + ) -> None: ... + else: + def __init__( + self, + option_strings: Sequence[str], + dest: str, + const: Any, + default: Any = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, + ) -> None: ... + +# undocumented +class _StoreTrueAction(_StoreConstAction): + if sys.version_info >= (3, 13): + def __init__( + self, + option_strings: Sequence[str], + dest: str, + default: bool = False, + required: bool = False, + help: str | None = None, + deprecated: bool = False, + ) -> None: ... + else: + def __init__( + self, option_strings: Sequence[str], dest: str, default: bool = False, required: bool = False, help: str | None = None + ) -> None: ... + +# undocumented +class _StoreFalseAction(_StoreConstAction): + if sys.version_info >= (3, 13): + def __init__( + self, + option_strings: Sequence[str], + dest: str, + default: bool = True, + required: bool = False, + help: str | None = None, + deprecated: bool = False, + ) -> None: ... + else: + def __init__( + self, option_strings: Sequence[str], dest: str, default: bool = True, required: bool = False, help: str | None = None + ) -> None: ... + +# undocumented +class _AppendAction(Action): ... + +# undocumented +class _ExtendAction(_AppendAction): ... + +# undocumented +class _AppendConstAction(Action): + if sys.version_info >= (3, 13): + def __init__( + self, + option_strings: Sequence[str], + dest: str, + const: Any | None = None, + default: Any = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, + deprecated: bool = False, + ) -> None: ... + elif sys.version_info >= (3, 11): + def __init__( + self, + option_strings: Sequence[str], + dest: str, + const: Any | None = None, + default: Any = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, + ) -> None: ... + else: + def __init__( + self, + option_strings: Sequence[str], + dest: str, + const: Any, + default: Any = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, + ) -> None: ... + +# undocumented +class _CountAction(Action): + if sys.version_info >= (3, 13): + def __init__( + self, + option_strings: Sequence[str], + dest: str, + default: Any = None, + required: bool = False, + help: str | None = None, + deprecated: bool = False, + ) -> None: ... + else: + def __init__( + self, option_strings: Sequence[str], dest: str, default: Any = None, required: bool = False, help: str | None = None + ) -> None: ... + +# undocumented +class _HelpAction(Action): + if sys.version_info >= (3, 13): + def __init__( + self, + option_strings: Sequence[str], + dest: str = "==SUPPRESS==", + default: str = "==SUPPRESS==", + help: str | None = None, + deprecated: bool = False, + ) -> None: ... + else: + def __init__( + self, + option_strings: Sequence[str], + dest: str = "==SUPPRESS==", + default: str = "==SUPPRESS==", + help: str | None = None, + ) -> None: ... + +# undocumented +class _VersionAction(Action): + version: str | None + if sys.version_info >= (3, 13): + def __init__( + self, + option_strings: Sequence[str], + version: str | None = None, + dest: str = "==SUPPRESS==", + default: str = "==SUPPRESS==", + help: str | None = None, + deprecated: bool = False, + ) -> None: ... + elif sys.version_info >= (3, 11): + def __init__( + self, + option_strings: Sequence[str], + version: str | None = None, + dest: str = "==SUPPRESS==", + default: str = "==SUPPRESS==", + help: str | None = None, + ) -> None: ... + else: + def __init__( + self, + option_strings: Sequence[str], + version: str | None = None, + dest: str = "==SUPPRESS==", + default: str = "==SUPPRESS==", + help: str = "show program's version number and exit", + ) -> None: ... + +# undocumented +class _SubParsersAction(Action, Generic[_ArgumentParserT]): + _ChoicesPseudoAction: type[Any] # nested class + _prog_prefix: str + _parser_class: type[_ArgumentParserT] + _name_parser_map: dict[str, _ArgumentParserT] + choices: dict[str, _ArgumentParserT] + _choices_actions: list[Action] + def __init__( + self, + option_strings: Sequence[str], + prog: str, + parser_class: type[_ArgumentParserT], + dest: str = "==SUPPRESS==", + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, + ) -> None: ... + + # Note: `add_parser` accepts all kwargs of `ArgumentParser.__init__`. It also + # accepts its own `help` and `aliases` kwargs. + if sys.version_info >= (3, 14): + def add_parser( + self, + name: str, + *, + deprecated: bool = False, + help: str | None = ..., + aliases: Sequence[str] = ..., + # Kwargs from ArgumentParser constructor + prog: str | None = ..., + usage: str | None = ..., + description: str | None = ..., + epilog: str | None = ..., + parents: Sequence[_ArgumentParserT] = ..., + formatter_class: _FormatterClass = ..., + prefix_chars: str = ..., + fromfile_prefix_chars: str | None = ..., + argument_default: Any = ..., + conflict_handler: str = ..., + add_help: bool = True, + allow_abbrev: bool = True, + exit_on_error: bool = True, + suggest_on_error: bool = False, + color: bool = False, + **kwargs: Any, # Accepting any additional kwargs for custom parser classes + ) -> _ArgumentParserT: ... + elif sys.version_info >= (3, 13): + def add_parser( + self, + name: str, + *, + deprecated: bool = False, + help: str | None = ..., + aliases: Sequence[str] = ..., + # Kwargs from ArgumentParser constructor + prog: str | None = ..., + usage: str | None = ..., + description: str | None = ..., + epilog: str | None = ..., + parents: Sequence[_ArgumentParserT] = ..., + formatter_class: _FormatterClass = ..., + prefix_chars: str = ..., + fromfile_prefix_chars: str | None = ..., + argument_default: Any = ..., + conflict_handler: str = ..., + add_help: bool = True, + allow_abbrev: bool = True, + exit_on_error: bool = True, + **kwargs: Any, # Accepting any additional kwargs for custom parser classes + ) -> _ArgumentParserT: ... + else: + def add_parser( + self, + name: str, + *, + help: str | None = ..., + aliases: Sequence[str] = ..., + # Kwargs from ArgumentParser constructor + prog: str | None = ..., + usage: str | None = ..., + description: str | None = ..., + epilog: str | None = ..., + parents: Sequence[_ArgumentParserT] = ..., + formatter_class: _FormatterClass = ..., + prefix_chars: str = ..., + fromfile_prefix_chars: str | None = ..., + argument_default: Any = ..., + conflict_handler: str = ..., + add_help: bool = True, + allow_abbrev: bool = True, + exit_on_error: bool = True, + **kwargs: Any, # Accepting any additional kwargs for custom parser classes + ) -> _ArgumentParserT: ... + + def _get_subactions(self) -> list[Action]: ... + +# undocumented +class ArgumentTypeError(Exception): ... + +# undocumented +def _get_action_name(argument: Action | None) -> str | None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/array.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/array.pyi new file mode 100644 index 0000000..a6b0344 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/array.pyi @@ -0,0 +1,106 @@ +import sys +from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite +from collections.abc import Iterable, MutableSequence +from types import GenericAlias +from typing import Any, ClassVar, Literal, SupportsIndex, TypeVar, overload +from typing_extensions import Self, TypeAlias, deprecated, disjoint_base + +_IntTypeCode: TypeAlias = Literal["b", "B", "h", "H", "i", "I", "l", "L", "q", "Q"] +_FloatTypeCode: TypeAlias = Literal["f", "d"] +if sys.version_info >= (3, 13): + _UnicodeTypeCode: TypeAlias = Literal["u", "w"] +else: + _UnicodeTypeCode: TypeAlias = Literal["u"] +_TypeCode: TypeAlias = _IntTypeCode | _FloatTypeCode | _UnicodeTypeCode + +_T = TypeVar("_T", int, float, str) + +typecodes: str + +@disjoint_base +class array(MutableSequence[_T]): + @property + def typecode(self) -> _TypeCode: ... + @property + def itemsize(self) -> int: ... + @overload + def __new__( + cls: type[array[int]], typecode: _IntTypeCode, initializer: bytes | bytearray | Iterable[int] = ..., / + ) -> array[int]: ... + @overload + def __new__( + cls: type[array[float]], typecode: _FloatTypeCode, initializer: bytes | bytearray | Iterable[float] = ..., / + ) -> array[float]: ... + if sys.version_info >= (3, 13): + @overload + def __new__( + cls: type[array[str]], typecode: Literal["w"], initializer: bytes | bytearray | Iterable[str] = ..., / + ) -> array[str]: ... + @overload + @deprecated("Deprecated since Python 3.3; will be removed in Python 3.16. Use 'w' typecode instead.") + def __new__( + cls: type[array[str]], typecode: Literal["u"], initializer: bytes | bytearray | Iterable[str] = ..., / + ) -> array[str]: ... + else: + @overload + @deprecated("Deprecated since Python 3.3; will be removed in Python 3.16.") + def __new__( + cls: type[array[str]], typecode: Literal["u"], initializer: bytes | bytearray | Iterable[str] = ..., / + ) -> array[str]: ... + + @overload + def __new__(cls, typecode: str, initializer: Iterable[_T], /) -> Self: ... + @overload + def __new__(cls, typecode: str, initializer: bytes | bytearray = ..., /) -> Self: ... + def append(self, v: _T, /) -> None: ... + def buffer_info(self) -> tuple[int, int]: ... + def byteswap(self) -> None: ... + def count(self, v: _T, /) -> int: ... + def extend(self, bb: Iterable[_T], /) -> None: ... + def frombytes(self, buffer: ReadableBuffer, /) -> None: ... + def fromfile(self, f: SupportsRead[bytes], n: int, /) -> None: ... + def fromlist(self, list: list[_T], /) -> None: ... + def fromunicode(self, ustr: str, /) -> None: ... + if sys.version_info >= (3, 10): + def index(self, v: _T, start: int = 0, stop: int = sys.maxsize, /) -> int: ... + else: + def index(self, v: _T, /) -> int: ... # type: ignore[override] + + def insert(self, i: int, v: _T, /) -> None: ... + def pop(self, i: int = -1, /) -> _T: ... + def remove(self, v: _T, /) -> None: ... + def tobytes(self) -> bytes: ... + def tofile(self, f: SupportsWrite[bytes], /) -> None: ... + def tolist(self) -> list[_T]: ... + def tounicode(self) -> str: ... + + __hash__: ClassVar[None] # type: ignore[assignment] + def __contains__(self, value: object, /) -> bool: ... + def __len__(self) -> int: ... + @overload + def __getitem__(self, key: SupportsIndex, /) -> _T: ... + @overload + def __getitem__(self, key: slice, /) -> array[_T]: ... + @overload # type: ignore[override] + def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: ... + @overload + def __setitem__(self, key: slice, value: array[_T], /) -> None: ... + def __delitem__(self, key: SupportsIndex | slice, /) -> None: ... + def __add__(self, value: array[_T], /) -> array[_T]: ... + def __eq__(self, value: object, /) -> bool: ... + def __ge__(self, value: array[_T], /) -> bool: ... + def __gt__(self, value: array[_T], /) -> bool: ... + def __iadd__(self, value: array[_T], /) -> Self: ... # type: ignore[override] + def __imul__(self, value: int, /) -> Self: ... + def __le__(self, value: array[_T], /) -> bool: ... + def __lt__(self, value: array[_T], /) -> bool: ... + def __mul__(self, value: int, /) -> array[_T]: ... + def __rmul__(self, value: int, /) -> array[_T]: ... + def __copy__(self) -> array[_T]: ... + def __deepcopy__(self, unused: Any, /) -> array[_T]: ... + def __buffer__(self, flags: int, /) -> memoryview: ... + def __release_buffer__(self, buffer: memoryview, /) -> None: ... + if sys.version_info >= (3, 12): + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +ArrayType = array diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ast.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ast.pyi new file mode 100644 index 0000000..e66e609 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ast.pyi @@ -0,0 +1,2099 @@ +import ast +import builtins +import os +import sys +import typing_extensions +from _ast import ( + PyCF_ALLOW_TOP_LEVEL_AWAIT as PyCF_ALLOW_TOP_LEVEL_AWAIT, + PyCF_ONLY_AST as PyCF_ONLY_AST, + PyCF_TYPE_COMMENTS as PyCF_TYPE_COMMENTS, +) +from _typeshed import ReadableBuffer, Unused +from collections.abc import Iterable, Iterator, Sequence +from typing import Any, ClassVar, Generic, Literal, TypedDict, TypeVar as _TypeVar, overload, type_check_only +from typing_extensions import Self, Unpack, deprecated, disjoint_base + +if sys.version_info >= (3, 13): + from _ast import PyCF_OPTIMIZED_AST as PyCF_OPTIMIZED_AST + +# Used for node end positions in constructor keyword arguments +_EndPositionT = typing_extensions.TypeVar("_EndPositionT", int, int | None, default=int | None) + +# Corresponds to the names in the `_attributes` class variable which is non-empty in certain AST nodes +@type_check_only +class _Attributes(TypedDict, Generic[_EndPositionT], total=False): + lineno: int + col_offset: int + end_lineno: _EndPositionT + end_col_offset: _EndPositionT + +# The various AST classes are implemented in C, and imported from _ast at runtime, +# but they consider themselves to live in the ast module, +# so we'll define the stubs in this file. +if sys.version_info >= (3, 12): + @disjoint_base + class AST: + __match_args__ = () + _attributes: ClassVar[tuple[str, ...]] + _fields: ClassVar[tuple[str, ...]] + if sys.version_info >= (3, 13): + _field_types: ClassVar[dict[str, Any]] + + if sys.version_info >= (3, 14): + def __replace__(self) -> Self: ... + +else: + class AST: + if sys.version_info >= (3, 10): + __match_args__ = () + _attributes: ClassVar[tuple[str, ...]] + _fields: ClassVar[tuple[str, ...]] + +class mod(AST): ... + +class Module(mod): + if sys.version_info >= (3, 10): + __match_args__ = ("body", "type_ignores") + body: list[stmt] + type_ignores: list[TypeIgnore] + if sys.version_info >= (3, 13): + def __init__(self, body: list[stmt] = ..., type_ignores: list[TypeIgnore] = ...) -> None: ... + else: + def __init__(self, body: list[stmt], type_ignores: list[TypeIgnore]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, body: list[stmt] = ..., type_ignores: list[TypeIgnore] = ...) -> Self: ... + +class Interactive(mod): + if sys.version_info >= (3, 10): + __match_args__ = ("body",) + body: list[stmt] + if sys.version_info >= (3, 13): + def __init__(self, body: list[stmt] = ...) -> None: ... + else: + def __init__(self, body: list[stmt]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, body: list[stmt] = ...) -> Self: ... + +class Expression(mod): + if sys.version_info >= (3, 10): + __match_args__ = ("body",) + body: expr + def __init__(self, body: expr) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, body: expr = ...) -> Self: ... + +class FunctionType(mod): + if sys.version_info >= (3, 10): + __match_args__ = ("argtypes", "returns") + argtypes: list[expr] + returns: expr + if sys.version_info >= (3, 13): + @overload + def __init__(self, argtypes: list[expr], returns: expr) -> None: ... + @overload + def __init__(self, argtypes: list[expr] = ..., *, returns: expr) -> None: ... + else: + def __init__(self, argtypes: list[expr], returns: expr) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, argtypes: list[expr] = ..., returns: expr = ...) -> Self: ... + +class stmt(AST): + lineno: int + col_offset: int + end_lineno: int | None + end_col_offset: int | None + def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: ... + +class FunctionDef(stmt): + if sys.version_info >= (3, 12): + __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params") + elif sys.version_info >= (3, 10): + __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment") + name: str + args: arguments + body: list[stmt] + decorator_list: list[expr] + returns: expr | None + type_comment: str | None + if sys.version_info >= (3, 12): + type_params: list[type_param] + if sys.version_info >= (3, 13): + def __init__( + self, + name: str, + args: arguments, + body: list[stmt] = ..., + decorator_list: list[expr] = ..., + returns: expr | None = None, + type_comment: str | None = None, + type_params: list[type_param] = ..., + **kwargs: Unpack[_Attributes], + ) -> None: ... + elif sys.version_info >= (3, 12): + @overload + def __init__( + self, + name: str, + args: arguments, + body: list[stmt], + decorator_list: list[expr], + returns: expr | None, + type_comment: str | None, + type_params: list[type_param], + **kwargs: Unpack[_Attributes], + ) -> None: ... + @overload + def __init__( + self, + name: str, + args: arguments, + body: list[stmt], + decorator_list: list[expr], + returns: expr | None = None, + type_comment: str | None = None, + *, + type_params: list[type_param], + **kwargs: Unpack[_Attributes], + ) -> None: ... + else: + def __init__( + self, + name: str, + args: arguments, + body: list[stmt], + decorator_list: list[expr], + returns: expr | None = None, + type_comment: str | None = None, + **kwargs: Unpack[_Attributes], + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + name: str = ..., + args: arguments = ..., + body: list[stmt] = ..., + decorator_list: list[expr] = ..., + returns: expr | None = ..., + type_comment: str | None = ..., + type_params: list[type_param] = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... + +class AsyncFunctionDef(stmt): + if sys.version_info >= (3, 12): + __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params") + elif sys.version_info >= (3, 10): + __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment") + name: str + args: arguments + body: list[stmt] + decorator_list: list[expr] + returns: expr | None + type_comment: str | None + if sys.version_info >= (3, 12): + type_params: list[type_param] + if sys.version_info >= (3, 13): + def __init__( + self, + name: str, + args: arguments, + body: list[stmt] = ..., + decorator_list: list[expr] = ..., + returns: expr | None = None, + type_comment: str | None = None, + type_params: list[type_param] = ..., + **kwargs: Unpack[_Attributes], + ) -> None: ... + elif sys.version_info >= (3, 12): + @overload + def __init__( + self, + name: str, + args: arguments, + body: list[stmt], + decorator_list: list[expr], + returns: expr | None, + type_comment: str | None, + type_params: list[type_param], + **kwargs: Unpack[_Attributes], + ) -> None: ... + @overload + def __init__( + self, + name: str, + args: arguments, + body: list[stmt], + decorator_list: list[expr], + returns: expr | None = None, + type_comment: str | None = None, + *, + type_params: list[type_param], + **kwargs: Unpack[_Attributes], + ) -> None: ... + else: + def __init__( + self, + name: str, + args: arguments, + body: list[stmt], + decorator_list: list[expr], + returns: expr | None = None, + type_comment: str | None = None, + **kwargs: Unpack[_Attributes], + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + name: str = ..., + args: arguments = ..., + body: list[stmt] = ..., + decorator_list: list[expr] = ..., + returns: expr | None = ..., + type_comment: str | None = ..., + type_params: list[type_param] = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... + +class ClassDef(stmt): + if sys.version_info >= (3, 12): + __match_args__ = ("name", "bases", "keywords", "body", "decorator_list", "type_params") + elif sys.version_info >= (3, 10): + __match_args__ = ("name", "bases", "keywords", "body", "decorator_list") + name: str + bases: list[expr] + keywords: list[keyword] + body: list[stmt] + decorator_list: list[expr] + if sys.version_info >= (3, 12): + type_params: list[type_param] + if sys.version_info >= (3, 13): + def __init__( + self, + name: str, + bases: list[expr] = ..., + keywords: list[keyword] = ..., + body: list[stmt] = ..., + decorator_list: list[expr] = ..., + type_params: list[type_param] = ..., + **kwargs: Unpack[_Attributes], + ) -> None: ... + elif sys.version_info >= (3, 12): + def __init__( + self, + name: str, + bases: list[expr], + keywords: list[keyword], + body: list[stmt], + decorator_list: list[expr], + type_params: list[type_param], + **kwargs: Unpack[_Attributes], + ) -> None: ... + else: + def __init__( + self, + name: str, + bases: list[expr], + keywords: list[keyword], + body: list[stmt], + decorator_list: list[expr], + **kwargs: Unpack[_Attributes], + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + name: str = ..., + bases: list[expr] = ..., + keywords: list[keyword] = ..., + body: list[stmt] = ..., + decorator_list: list[expr] = ..., + type_params: list[type_param] = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... + +class Return(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("value",) + value: expr | None + def __init__(self, value: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Delete(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("targets",) + targets: list[expr] + if sys.version_info >= (3, 13): + def __init__(self, targets: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, targets: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, targets: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Assign(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("targets", "value", "type_comment") + targets: list[expr] + value: expr + type_comment: str | None + if sys.version_info >= (3, 13): + @overload + def __init__( + self, targets: list[expr], value: expr, type_comment: str | None = None, **kwargs: Unpack[_Attributes] + ) -> None: ... + @overload + def __init__( + self, targets: list[expr] = ..., *, value: expr, type_comment: str | None = None, **kwargs: Unpack[_Attributes] + ) -> None: ... + else: + def __init__( + self, targets: list[expr], value: expr, type_comment: str | None = None, **kwargs: Unpack[_Attributes] + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, targets: list[expr] = ..., value: expr = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +if sys.version_info >= (3, 12): + class TypeAlias(stmt): + __match_args__ = ("name", "type_params", "value") + name: Name + type_params: list[type_param] + value: expr + if sys.version_info >= (3, 13): + @overload + def __init__( + self, name: Name, type_params: list[type_param], value: expr, **kwargs: Unpack[_Attributes[int]] + ) -> None: ... + @overload + def __init__( + self, name: Name, type_params: list[type_param] = ..., *, value: expr, **kwargs: Unpack[_Attributes[int]] + ) -> None: ... + else: + def __init__( + self, name: Name, type_params: list[type_param], value: expr, **kwargs: Unpack[_Attributes[int]] + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( # type: ignore[override] + self, + *, + name: Name = ..., + type_params: list[type_param] = ..., + value: expr = ..., + **kwargs: Unpack[_Attributes[int]], + ) -> Self: ... + +class AugAssign(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("target", "op", "value") + target: Name | Attribute | Subscript + op: operator + value: expr + def __init__( + self, target: Name | Attribute | Subscript, op: operator, value: expr, **kwargs: Unpack[_Attributes] + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + target: Name | Attribute | Subscript = ..., + op: operator = ..., + value: expr = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... + +class AnnAssign(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("target", "annotation", "value", "simple") + target: Name | Attribute | Subscript + annotation: expr + value: expr | None + simple: int + @overload + def __init__( + self, + target: Name | Attribute | Subscript, + annotation: expr, + value: expr | None, + simple: int, + **kwargs: Unpack[_Attributes], + ) -> None: ... + @overload + def __init__( + self, + target: Name | Attribute | Subscript, + annotation: expr, + value: expr | None = None, + *, + simple: int, + **kwargs: Unpack[_Attributes], + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + target: Name | Attribute | Subscript = ..., + annotation: expr = ..., + value: expr | None = ..., + simple: int = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... + +class For(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("target", "iter", "body", "orelse", "type_comment") + target: expr + iter: expr + body: list[stmt] + orelse: list[stmt] + type_comment: str | None + if sys.version_info >= (3, 13): + def __init__( + self, + target: expr, + iter: expr, + body: list[stmt] = ..., + orelse: list[stmt] = ..., + type_comment: str | None = None, + **kwargs: Unpack[_Attributes], + ) -> None: ... + else: + def __init__( + self, + target: expr, + iter: expr, + body: list[stmt], + orelse: list[stmt], + type_comment: str | None = None, + **kwargs: Unpack[_Attributes], + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + target: expr = ..., + iter: expr = ..., + body: list[stmt] = ..., + orelse: list[stmt] = ..., + type_comment: str | None = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... + +class AsyncFor(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("target", "iter", "body", "orelse", "type_comment") + target: expr + iter: expr + body: list[stmt] + orelse: list[stmt] + type_comment: str | None + if sys.version_info >= (3, 13): + def __init__( + self, + target: expr, + iter: expr, + body: list[stmt] = ..., + orelse: list[stmt] = ..., + type_comment: str | None = None, + **kwargs: Unpack[_Attributes], + ) -> None: ... + else: + def __init__( + self, + target: expr, + iter: expr, + body: list[stmt], + orelse: list[stmt], + type_comment: str | None = None, + **kwargs: Unpack[_Attributes], + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + target: expr = ..., + iter: expr = ..., + body: list[stmt] = ..., + orelse: list[stmt] = ..., + type_comment: str | None = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... + +class While(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("test", "body", "orelse") + test: expr + body: list[stmt] + orelse: list[stmt] + if sys.version_info >= (3, 13): + def __init__( + self, test: expr, body: list[stmt] = ..., orelse: list[stmt] = ..., **kwargs: Unpack[_Attributes] + ) -> None: ... + else: + def __init__(self, test: expr, body: list[stmt], orelse: list[stmt], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, test: expr = ..., body: list[stmt] = ..., orelse: list[stmt] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class If(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("test", "body", "orelse") + test: expr + body: list[stmt] + orelse: list[stmt] + if sys.version_info >= (3, 13): + def __init__( + self, test: expr, body: list[stmt] = ..., orelse: list[stmt] = ..., **kwargs: Unpack[_Attributes] + ) -> None: ... + else: + def __init__(self, test: expr, body: list[stmt], orelse: list[stmt], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, test: expr = ..., body: list[stmt] = ..., orelse: list[stmt] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class With(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("items", "body", "type_comment") + items: list[withitem] + body: list[stmt] + type_comment: str | None + if sys.version_info >= (3, 13): + def __init__( + self, + items: list[withitem] = ..., + body: list[stmt] = ..., + type_comment: str | None = None, + **kwargs: Unpack[_Attributes], + ) -> None: ... + else: + def __init__( + self, items: list[withitem], body: list[stmt], type_comment: str | None = None, **kwargs: Unpack[_Attributes] + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + items: list[withitem] = ..., + body: list[stmt] = ..., + type_comment: str | None = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... + +class AsyncWith(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("items", "body", "type_comment") + items: list[withitem] + body: list[stmt] + type_comment: str | None + if sys.version_info >= (3, 13): + def __init__( + self, + items: list[withitem] = ..., + body: list[stmt] = ..., + type_comment: str | None = None, + **kwargs: Unpack[_Attributes], + ) -> None: ... + else: + def __init__( + self, items: list[withitem], body: list[stmt], type_comment: str | None = None, **kwargs: Unpack[_Attributes] + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + items: list[withitem] = ..., + body: list[stmt] = ..., + type_comment: str | None = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... + +class Raise(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("exc", "cause") + exc: expr | None + cause: expr | None + def __init__(self, exc: expr | None = None, cause: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, exc: expr | None = ..., cause: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Try(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("body", "handlers", "orelse", "finalbody") + body: list[stmt] + handlers: list[ExceptHandler] + orelse: list[stmt] + finalbody: list[stmt] + if sys.version_info >= (3, 13): + def __init__( + self, + body: list[stmt] = ..., + handlers: list[ExceptHandler] = ..., + orelse: list[stmt] = ..., + finalbody: list[stmt] = ..., + **kwargs: Unpack[_Attributes], + ) -> None: ... + else: + def __init__( + self, + body: list[stmt], + handlers: list[ExceptHandler], + orelse: list[stmt], + finalbody: list[stmt], + **kwargs: Unpack[_Attributes], + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + body: list[stmt] = ..., + handlers: list[ExceptHandler] = ..., + orelse: list[stmt] = ..., + finalbody: list[stmt] = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... + +if sys.version_info >= (3, 11): + class TryStar(stmt): + __match_args__ = ("body", "handlers", "orelse", "finalbody") + body: list[stmt] + handlers: list[ExceptHandler] + orelse: list[stmt] + finalbody: list[stmt] + if sys.version_info >= (3, 13): + def __init__( + self, + body: list[stmt] = ..., + handlers: list[ExceptHandler] = ..., + orelse: list[stmt] = ..., + finalbody: list[stmt] = ..., + **kwargs: Unpack[_Attributes], + ) -> None: ... + else: + def __init__( + self, + body: list[stmt], + handlers: list[ExceptHandler], + orelse: list[stmt], + finalbody: list[stmt], + **kwargs: Unpack[_Attributes], + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + body: list[stmt] = ..., + handlers: list[ExceptHandler] = ..., + orelse: list[stmt] = ..., + finalbody: list[stmt] = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... + +class Assert(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("test", "msg") + test: expr + msg: expr | None + def __init__(self, test: expr, msg: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, test: expr = ..., msg: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Import(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("names",) + names: list[alias] + if sys.version_info >= (3, 13): + def __init__(self, names: list[alias] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, names: list[alias], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, names: list[alias] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class ImportFrom(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("module", "names", "level") + module: str | None + names: list[alias] + level: int + if sys.version_info >= (3, 13): + @overload + def __init__(self, module: str | None, names: list[alias], level: int, **kwargs: Unpack[_Attributes]) -> None: ... + @overload + def __init__( + self, module: str | None = None, names: list[alias] = ..., *, level: int, **kwargs: Unpack[_Attributes] + ) -> None: ... + else: + @overload + def __init__(self, module: str | None, names: list[alias], level: int, **kwargs: Unpack[_Attributes]) -> None: ... + @overload + def __init__( + self, module: str | None = None, *, names: list[alias], level: int, **kwargs: Unpack[_Attributes] + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, module: str | None = ..., names: list[alias] = ..., level: int = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class Global(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("names",) + names: list[str] + if sys.version_info >= (3, 13): + def __init__(self, names: list[str] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, names: list[str], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, names: list[str] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Nonlocal(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("names",) + names: list[str] + if sys.version_info >= (3, 13): + def __init__(self, names: list[str] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, names: list[str], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, names: list[str] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Expr(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("value",) + value: expr + def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Pass(stmt): ... +class Break(stmt): ... +class Continue(stmt): ... + +class expr(AST): + lineno: int + col_offset: int + end_lineno: int | None + end_col_offset: int | None + def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: ... + +class BoolOp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("op", "values") + op: boolop + values: list[expr] + if sys.version_info >= (3, 13): + def __init__(self, op: boolop, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, op: boolop, values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, op: boolop = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class NamedExpr(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("target", "value") + target: Name + value: expr + def __init__(self, target: Name, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, target: Name = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class BinOp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("left", "op", "right") + left: expr + op: operator + right: expr + def __init__(self, left: expr, op: operator, right: expr, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, left: expr = ..., op: operator = ..., right: expr = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class UnaryOp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("op", "operand") + op: unaryop + operand: expr + def __init__(self, op: unaryop, operand: expr, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, op: unaryop = ..., operand: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Lambda(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("args", "body") + args: arguments + body: expr + def __init__(self, args: arguments, body: expr, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, args: arguments = ..., body: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class IfExp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("test", "body", "orelse") + test: expr + body: expr + orelse: expr + def __init__(self, test: expr, body: expr, orelse: expr, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, test: expr = ..., body: expr = ..., orelse: expr = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class Dict(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("keys", "values") + keys: list[expr | None] + values: list[expr] + if sys.version_info >= (3, 13): + def __init__(self, keys: list[expr | None] = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, keys: list[expr | None], values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, keys: list[expr | None] = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class Set(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("elts",) + elts: list[expr] + if sys.version_info >= (3, 13): + def __init__(self, elts: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, elts: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, elts: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class ListComp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("elt", "generators") + elt: expr + generators: list[comprehension] + if sys.version_info >= (3, 13): + def __init__(self, elt: expr, generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class SetComp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("elt", "generators") + elt: expr + generators: list[comprehension] + if sys.version_info >= (3, 13): + def __init__(self, elt: expr, generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class DictComp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("key", "value", "generators") + key: expr + value: expr + generators: list[comprehension] + if sys.version_info >= (3, 13): + def __init__( + self, key: expr, value: expr, generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] + ) -> None: ... + else: + def __init__(self, key: expr, value: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, key: expr = ..., value: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class GeneratorExp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("elt", "generators") + elt: expr + generators: list[comprehension] + if sys.version_info >= (3, 13): + def __init__(self, elt: expr, generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class Await(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value",) + value: expr + def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Yield(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value",) + value: expr | None + def __init__(self, value: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class YieldFrom(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value",) + value: expr + def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Compare(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("left", "ops", "comparators") + left: expr + ops: list[cmpop] + comparators: list[expr] + if sys.version_info >= (3, 13): + def __init__( + self, left: expr, ops: list[cmpop] = ..., comparators: list[expr] = ..., **kwargs: Unpack[_Attributes] + ) -> None: ... + else: + def __init__(self, left: expr, ops: list[cmpop], comparators: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, left: expr = ..., ops: list[cmpop] = ..., comparators: list[expr] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class Call(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("func", "args", "keywords") + func: expr + args: list[expr] + keywords: list[keyword] + if sys.version_info >= (3, 13): + def __init__( + self, func: expr, args: list[expr] = ..., keywords: list[keyword] = ..., **kwargs: Unpack[_Attributes] + ) -> None: ... + else: + def __init__(self, func: expr, args: list[expr], keywords: list[keyword], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, func: expr = ..., args: list[expr] = ..., keywords: list[keyword] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class FormattedValue(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value", "conversion", "format_spec") + value: expr + conversion: int + format_spec: expr | None + def __init__(self, value: expr, conversion: int, format_spec: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, value: expr = ..., conversion: int = ..., format_spec: expr | None = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class JoinedStr(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("values",) + values: list[expr] + if sys.version_info >= (3, 13): + def __init__(self, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +if sys.version_info >= (3, 14): + class TemplateStr(expr): + __match_args__ = ("values",) + values: list[expr] + def __init__(self, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + def __replace__(self, *, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + + class Interpolation(expr): + __match_args__ = ("value", "str", "conversion", "format_spec") + value: expr + str: builtins.str + conversion: int + format_spec: expr | None = None + def __init__( + self, + value: expr = ..., + str: builtins.str = ..., + conversion: int = ..., + format_spec: expr | None = ..., + **kwargs: Unpack[_Attributes], + ) -> None: ... + def __replace__( + self, + *, + value: expr = ..., + str: builtins.str = ..., + conversion: int = ..., + format_spec: expr | None = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... + +if sys.version_info >= (3, 10): + from types import EllipsisType + + _ConstantValue: typing_extensions.TypeAlias = str | bytes | bool | int | float | complex | None | EllipsisType +else: + # Rely on builtins.ellipsis + _ConstantValue: typing_extensions.TypeAlias = str | bytes | bool | int | float | complex | None | ellipsis # noqa: F821 + +class Constant(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value", "kind") + value: _ConstantValue + kind: str | None + if sys.version_info < (3, 14): + # Aliases for value, for backwards compatibility + @property + @deprecated("Removed in Python 3.14. Use `value` instead.") + def n(self) -> _ConstantValue: ... + @n.setter + @deprecated("Removed in Python 3.14. Use `value` instead.") + def n(self, value: _ConstantValue) -> None: ... + @property + @deprecated("Removed in Python 3.14. Use `value` instead.") + def s(self) -> _ConstantValue: ... + @s.setter + @deprecated("Removed in Python 3.14. Use `value` instead.") + def s(self, value: _ConstantValue) -> None: ... + + def __init__(self, value: _ConstantValue, kind: str | None = None, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, value: _ConstantValue = ..., kind: str | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Attribute(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value", "attr", "ctx") + value: expr + attr: str + ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__` + def __init__(self, value: expr, attr: str, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, value: expr = ..., attr: str = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class Subscript(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value", "slice", "ctx") + value: expr + slice: expr + ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__` + def __init__(self, value: expr, slice: expr, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, value: expr = ..., slice: expr = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class Starred(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value", "ctx") + value: expr + ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__` + def __init__(self, value: expr, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, value: expr = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Name(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("id", "ctx") + id: str + ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__` + def __init__(self, id: str, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, id: str = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class List(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("elts", "ctx") + elts: list[expr] + ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__` + if sys.version_info >= (3, 13): + def __init__(self, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, elts: list[expr], ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Tuple(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("elts", "ctx") + elts: list[expr] + ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__` + dims: list[expr] + if sys.version_info >= (3, 13): + def __init__(self, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, elts: list[expr], ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +@deprecated("Deprecated since Python 3.9.") +class slice(AST): ... + +class Slice(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("lower", "upper", "step") + lower: expr | None + upper: expr | None + step: expr | None + def __init__( + self, lower: expr | None = None, upper: expr | None = None, step: expr | None = None, **kwargs: Unpack[_Attributes] + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, lower: expr | None = ..., upper: expr | None = ..., step: expr | None = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +@deprecated("Deprecated since Python 3.9. Use `ast.Tuple` instead.") +class ExtSlice(slice): + def __new__(cls, dims: Iterable[slice] = (), **kwargs: Unpack[_Attributes]) -> Tuple: ... # type: ignore[misc] + +@deprecated("Deprecated since Python 3.9. Use the index value directly instead.") +class Index(slice): + def __new__(cls, value: expr, **kwargs: Unpack[_Attributes]) -> expr: ... # type: ignore[misc] + +class expr_context(AST): ... + +@deprecated("Deprecated since Python 3.9. Unused in Python 3.") +class AugLoad(expr_context): ... + +@deprecated("Deprecated since Python 3.9. Unused in Python 3.") +class AugStore(expr_context): ... + +@deprecated("Deprecated since Python 3.9. Unused in Python 3.") +class Param(expr_context): ... + +@deprecated("Deprecated since Python 3.9. Unused in Python 3.") +class Suite(mod): ... + +class Load(expr_context): ... +class Store(expr_context): ... +class Del(expr_context): ... +class boolop(AST): ... +class And(boolop): ... +class Or(boolop): ... +class operator(AST): ... +class Add(operator): ... +class Sub(operator): ... +class Mult(operator): ... +class MatMult(operator): ... +class Div(operator): ... +class Mod(operator): ... +class Pow(operator): ... +class LShift(operator): ... +class RShift(operator): ... +class BitOr(operator): ... +class BitXor(operator): ... +class BitAnd(operator): ... +class FloorDiv(operator): ... +class unaryop(AST): ... +class Invert(unaryop): ... +class Not(unaryop): ... +class UAdd(unaryop): ... +class USub(unaryop): ... +class cmpop(AST): ... +class Eq(cmpop): ... +class NotEq(cmpop): ... +class Lt(cmpop): ... +class LtE(cmpop): ... +class Gt(cmpop): ... +class GtE(cmpop): ... +class Is(cmpop): ... +class IsNot(cmpop): ... +class In(cmpop): ... +class NotIn(cmpop): ... + +class comprehension(AST): + if sys.version_info >= (3, 10): + __match_args__ = ("target", "iter", "ifs", "is_async") + target: expr + iter: expr + ifs: list[expr] + is_async: int + if sys.version_info >= (3, 13): + @overload + def __init__(self, target: expr, iter: expr, ifs: list[expr], is_async: int) -> None: ... + @overload + def __init__(self, target: expr, iter: expr, ifs: list[expr] = ..., *, is_async: int) -> None: ... + else: + def __init__(self, target: expr, iter: expr, ifs: list[expr], is_async: int) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, target: expr = ..., iter: expr = ..., ifs: list[expr] = ..., is_async: int = ...) -> Self: ... + +class excepthandler(AST): + lineno: int + col_offset: int + end_lineno: int | None + end_col_offset: int | None + def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, lineno: int = ..., col_offset: int = ..., end_lineno: int | None = ..., end_col_offset: int | None = ... + ) -> Self: ... + +class ExceptHandler(excepthandler): + if sys.version_info >= (3, 10): + __match_args__ = ("type", "name", "body") + type: expr | None + name: str | None + body: list[stmt] + if sys.version_info >= (3, 13): + def __init__( + self, type: expr | None = None, name: str | None = None, body: list[stmt] = ..., **kwargs: Unpack[_Attributes] + ) -> None: ... + else: + @overload + def __init__(self, type: expr | None, name: str | None, body: list[stmt], **kwargs: Unpack[_Attributes]) -> None: ... + @overload + def __init__( + self, type: expr | None = None, name: str | None = None, *, body: list[stmt], **kwargs: Unpack[_Attributes] + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, type: expr | None = ..., name: str | None = ..., body: list[stmt] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class arguments(AST): + if sys.version_info >= (3, 10): + __match_args__ = ("posonlyargs", "args", "vararg", "kwonlyargs", "kw_defaults", "kwarg", "defaults") + posonlyargs: list[arg] + args: list[arg] + vararg: arg | None + kwonlyargs: list[arg] + kw_defaults: list[expr | None] + kwarg: arg | None + defaults: list[expr] + if sys.version_info >= (3, 13): + def __init__( + self, + posonlyargs: list[arg] = ..., + args: list[arg] = ..., + vararg: arg | None = None, + kwonlyargs: list[arg] = ..., + kw_defaults: list[expr | None] = ..., + kwarg: arg | None = None, + defaults: list[expr] = ..., + ) -> None: ... + else: + @overload + def __init__( + self, + posonlyargs: list[arg], + args: list[arg], + vararg: arg | None, + kwonlyargs: list[arg], + kw_defaults: list[expr | None], + kwarg: arg | None, + defaults: list[expr], + ) -> None: ... + @overload + def __init__( + self, + posonlyargs: list[arg], + args: list[arg], + vararg: arg | None, + kwonlyargs: list[arg], + kw_defaults: list[expr | None], + kwarg: arg | None = None, + *, + defaults: list[expr], + ) -> None: ... + @overload + def __init__( + self, + posonlyargs: list[arg], + args: list[arg], + vararg: arg | None = None, + *, + kwonlyargs: list[arg], + kw_defaults: list[expr | None], + kwarg: arg | None = None, + defaults: list[expr], + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + posonlyargs: list[arg] = ..., + args: list[arg] = ..., + vararg: arg | None = ..., + kwonlyargs: list[arg] = ..., + kw_defaults: list[expr | None] = ..., + kwarg: arg | None = ..., + defaults: list[expr] = ..., + ) -> Self: ... + +class arg(AST): + lineno: int + col_offset: int + end_lineno: int | None + end_col_offset: int | None + if sys.version_info >= (3, 10): + __match_args__ = ("arg", "annotation", "type_comment") + arg: str + annotation: expr | None + type_comment: str | None + def __init__( + self, arg: str, annotation: expr | None = None, type_comment: str | None = None, **kwargs: Unpack[_Attributes] + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, arg: str = ..., annotation: expr | None = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class keyword(AST): + lineno: int + col_offset: int + end_lineno: int | None + end_col_offset: int | None + if sys.version_info >= (3, 10): + __match_args__ = ("arg", "value") + arg: str | None + value: expr + @overload + def __init__(self, arg: str | None, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... + @overload + def __init__(self, arg: str | None = None, *, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, arg: str | None = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class alias(AST): + name: str + asname: str | None + if sys.version_info >= (3, 10): + lineno: int + col_offset: int + end_lineno: int | None + end_col_offset: int | None + if sys.version_info >= (3, 10): + __match_args__ = ("name", "asname") + if sys.version_info >= (3, 10): + def __init__(self, name: str, asname: str | None = None, **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, name: str, asname: str | None = None) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, name: str = ..., asname: str | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class withitem(AST): + if sys.version_info >= (3, 10): + __match_args__ = ("context_expr", "optional_vars") + context_expr: expr + optional_vars: expr | None + def __init__(self, context_expr: expr, optional_vars: expr | None = None) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, context_expr: expr = ..., optional_vars: expr | None = ...) -> Self: ... + +if sys.version_info >= (3, 10): + class pattern(AST): + lineno: int + col_offset: int + end_lineno: int + end_col_offset: int + def __init__(self, **kwargs: Unpack[_Attributes[int]]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, lineno: int = ..., col_offset: int = ..., end_lineno: int = ..., end_col_offset: int = ... + ) -> Self: ... + + class match_case(AST): + __match_args__ = ("pattern", "guard", "body") + pattern: ast.pattern + guard: expr | None + body: list[stmt] + if sys.version_info >= (3, 13): + def __init__(self, pattern: ast.pattern, guard: expr | None = None, body: list[stmt] = ...) -> None: ... + elif sys.version_info >= (3, 10): + @overload + def __init__(self, pattern: ast.pattern, guard: expr | None, body: list[stmt]) -> None: ... + @overload + def __init__(self, pattern: ast.pattern, guard: expr | None = None, *, body: list[stmt]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, pattern: ast.pattern = ..., guard: expr | None = ..., body: list[stmt] = ...) -> Self: ... + + class Match(stmt): + __match_args__ = ("subject", "cases") + subject: expr + cases: list[match_case] + if sys.version_info >= (3, 13): + def __init__(self, subject: expr, cases: list[match_case] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, subject: expr, cases: list[match_case], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, subject: expr = ..., cases: list[match_case] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + + class MatchValue(pattern): + __match_args__ = ("value",) + value: expr + def __init__(self, value: expr, **kwargs: Unpack[_Attributes[int]]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... + + class MatchSingleton(pattern): + __match_args__ = ("value",) + value: bool | None + def __init__(self, value: bool | None, **kwargs: Unpack[_Attributes[int]]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, value: bool | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... + + class MatchSequence(pattern): + __match_args__ = ("patterns",) + patterns: list[pattern] + if sys.version_info >= (3, 13): + def __init__(self, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> None: ... + else: + def __init__(self, patterns: list[pattern], **kwargs: Unpack[_Attributes[int]]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... + + class MatchMapping(pattern): + __match_args__ = ("keys", "patterns", "rest") + keys: list[expr] + patterns: list[pattern] + rest: str | None + if sys.version_info >= (3, 13): + def __init__( + self, + keys: list[expr] = ..., + patterns: list[pattern] = ..., + rest: str | None = None, + **kwargs: Unpack[_Attributes[int]], + ) -> None: ... + else: + def __init__( + self, keys: list[expr], patterns: list[pattern], rest: str | None = None, **kwargs: Unpack[_Attributes[int]] + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + keys: list[expr] = ..., + patterns: list[pattern] = ..., + rest: str | None = ..., + **kwargs: Unpack[_Attributes[int]], + ) -> Self: ... + + class MatchClass(pattern): + __match_args__ = ("cls", "patterns", "kwd_attrs", "kwd_patterns") + cls: expr + patterns: list[pattern] + kwd_attrs: list[str] + kwd_patterns: list[pattern] + if sys.version_info >= (3, 13): + def __init__( + self, + cls: expr, + patterns: list[pattern] = ..., + kwd_attrs: list[str] = ..., + kwd_patterns: list[pattern] = ..., + **kwargs: Unpack[_Attributes[int]], + ) -> None: ... + else: + def __init__( + self, + cls: expr, + patterns: list[pattern], + kwd_attrs: list[str], + kwd_patterns: list[pattern], + **kwargs: Unpack[_Attributes[int]], + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + cls: expr = ..., + patterns: list[pattern] = ..., + kwd_attrs: list[str] = ..., + kwd_patterns: list[pattern] = ..., + **kwargs: Unpack[_Attributes[int]], + ) -> Self: ... + + class MatchStar(pattern): + __match_args__ = ("name",) + name: str | None + def __init__(self, name: str | None = None, **kwargs: Unpack[_Attributes[int]]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, name: str | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... + + class MatchAs(pattern): + __match_args__ = ("pattern", "name") + pattern: ast.pattern | None + name: str | None + def __init__( + self, pattern: ast.pattern | None = None, name: str | None = None, **kwargs: Unpack[_Attributes[int]] + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, pattern: ast.pattern | None = ..., name: str | None = ..., **kwargs: Unpack[_Attributes[int]] + ) -> Self: ... + + class MatchOr(pattern): + __match_args__ = ("patterns",) + patterns: list[pattern] + if sys.version_info >= (3, 13): + def __init__(self, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> None: ... + else: + def __init__(self, patterns: list[pattern], **kwargs: Unpack[_Attributes[int]]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... + +class type_ignore(AST): ... + +class TypeIgnore(type_ignore): + if sys.version_info >= (3, 10): + __match_args__ = ("lineno", "tag") + lineno: int + tag: str + def __init__(self, lineno: int, tag: str) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, lineno: int = ..., tag: str = ...) -> Self: ... + +if sys.version_info >= (3, 12): + class type_param(AST): + lineno: int + col_offset: int + end_lineno: int + end_col_offset: int + def __init__(self, **kwargs: Unpack[_Attributes[int]]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, **kwargs: Unpack[_Attributes[int]]) -> Self: ... + + class TypeVar(type_param): + if sys.version_info >= (3, 13): + __match_args__ = ("name", "bound", "default_value") + else: + __match_args__ = ("name", "bound") + name: str + bound: expr | None + if sys.version_info >= (3, 13): + default_value: expr | None + def __init__( + self, name: str, bound: expr | None = None, default_value: expr | None = None, **kwargs: Unpack[_Attributes[int]] + ) -> None: ... + else: + def __init__(self, name: str, bound: expr | None = None, **kwargs: Unpack[_Attributes[int]]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + name: str = ..., + bound: expr | None = ..., + default_value: expr | None = ..., + **kwargs: Unpack[_Attributes[int]], + ) -> Self: ... + + class ParamSpec(type_param): + if sys.version_info >= (3, 13): + __match_args__ = ("name", "default_value") + else: + __match_args__ = ("name",) + name: str + if sys.version_info >= (3, 13): + default_value: expr | None + def __init__(self, name: str, default_value: expr | None = None, **kwargs: Unpack[_Attributes[int]]) -> None: ... + else: + def __init__(self, name: str, **kwargs: Unpack[_Attributes[int]]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, name: str = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]] + ) -> Self: ... + + class TypeVarTuple(type_param): + if sys.version_info >= (3, 13): + __match_args__ = ("name", "default_value") + else: + __match_args__ = ("name",) + name: str + if sys.version_info >= (3, 13): + default_value: expr | None + def __init__(self, name: str, default_value: expr | None = None, **kwargs: Unpack[_Attributes[int]]) -> None: ... + else: + def __init__(self, name: str, **kwargs: Unpack[_Attributes[int]]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, name: str = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]] + ) -> Self: ... + +if sys.version_info >= (3, 14): + @type_check_only + class _ABC(type): + def __init__(cls, *args: Unused) -> None: ... + +else: + class _ABC(type): + def __init__(cls, *args: Unused) -> None: ... + +if sys.version_info < (3, 14): + @deprecated("Removed in Python 3.14. Use `ast.Constant` instead.") + class Num(Constant, metaclass=_ABC): + def __new__(cls, n: complex, **kwargs: Unpack[_Attributes]) -> Constant: ... # type: ignore[misc] # pyright: ignore[reportInconsistentConstructor] + + @deprecated("Removed in Python 3.14. Use `ast.Constant` instead.") + class Str(Constant, metaclass=_ABC): + def __new__(cls, s: str, **kwargs: Unpack[_Attributes]) -> Constant: ... # type: ignore[misc] # pyright: ignore[reportInconsistentConstructor] + + @deprecated("Removed in Python 3.14. Use `ast.Constant` instead.") + class Bytes(Constant, metaclass=_ABC): + def __new__(cls, s: bytes, **kwargs: Unpack[_Attributes]) -> Constant: ... # type: ignore[misc] # pyright: ignore[reportInconsistentConstructor] + + @deprecated("Removed in Python 3.14. Use `ast.Constant` instead.") + class NameConstant(Constant, metaclass=_ABC): + def __new__(cls, value: _ConstantValue, kind: str | None, **kwargs: Unpack[_Attributes]) -> Constant: ... # type: ignore[misc] # pyright: ignore[reportInconsistentConstructor] + + @deprecated("Removed in Python 3.14. Use `ast.Constant` instead.") + class Ellipsis(Constant, metaclass=_ABC): + def __new__(cls, **kwargs: Unpack[_Attributes]) -> Constant: ... # type: ignore[misc] # pyright: ignore[reportInconsistentConstructor] + +# everything below here is defined in ast.py + +_T = _TypeVar("_T", bound=AST) + +if sys.version_info >= (3, 13): + @overload + def parse( + source: _T, + filename: str | bytes | os.PathLike[Any] = "", + mode: Literal["exec", "eval", "func_type", "single"] = "exec", + *, + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + optimize: Literal[-1, 0, 1, 2] = -1, + ) -> _T: ... + @overload + def parse( + source: str | ReadableBuffer, + filename: str | bytes | os.PathLike[Any] = "", + mode: Literal["exec"] = "exec", + *, + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + optimize: Literal[-1, 0, 1, 2] = -1, + ) -> Module: ... + @overload + def parse( + source: str | ReadableBuffer, + filename: str | bytes | os.PathLike[Any], + mode: Literal["eval"], + *, + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + optimize: Literal[-1, 0, 1, 2] = -1, + ) -> Expression: ... + @overload + def parse( + source: str | ReadableBuffer, + filename: str | bytes | os.PathLike[Any], + mode: Literal["func_type"], + *, + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + optimize: Literal[-1, 0, 1, 2] = -1, + ) -> FunctionType: ... + @overload + def parse( + source: str | ReadableBuffer, + filename: str | bytes | os.PathLike[Any], + mode: Literal["single"], + *, + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + optimize: Literal[-1, 0, 1, 2] = -1, + ) -> Interactive: ... + @overload + def parse( + source: str | ReadableBuffer, + *, + mode: Literal["eval"], + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + optimize: Literal[-1, 0, 1, 2] = -1, + ) -> Expression: ... + @overload + def parse( + source: str | ReadableBuffer, + *, + mode: Literal["func_type"], + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + optimize: Literal[-1, 0, 1, 2] = -1, + ) -> FunctionType: ... + @overload + def parse( + source: str | ReadableBuffer, + *, + mode: Literal["single"], + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + optimize: Literal[-1, 0, 1, 2] = -1, + ) -> Interactive: ... + @overload + def parse( + source: str | ReadableBuffer, + filename: str | bytes | os.PathLike[Any] = "", + mode: str = "exec", + *, + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + optimize: Literal[-1, 0, 1, 2] = -1, + ) -> mod: ... + +else: + @overload + def parse( + source: _T, + filename: str | bytes | os.PathLike[Any] = "", + mode: Literal["exec", "eval", "func_type", "single"] = "exec", + *, + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + ) -> _T: ... + @overload + def parse( + source: str | ReadableBuffer, + filename: str | bytes | os.PathLike[Any] = "", + mode: Literal["exec"] = "exec", + *, + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + ) -> Module: ... + @overload + def parse( + source: str | ReadableBuffer, + filename: str | bytes | os.PathLike[Any], + mode: Literal["eval"], + *, + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + ) -> Expression: ... + @overload + def parse( + source: str | ReadableBuffer, + filename: str | bytes | os.PathLike[Any], + mode: Literal["func_type"], + *, + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + ) -> FunctionType: ... + @overload + def parse( + source: str | ReadableBuffer, + filename: str | bytes | os.PathLike[Any], + mode: Literal["single"], + *, + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + ) -> Interactive: ... + @overload + def parse( + source: str | ReadableBuffer, + *, + mode: Literal["eval"], + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + ) -> Expression: ... + @overload + def parse( + source: str | ReadableBuffer, + *, + mode: Literal["func_type"], + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + ) -> FunctionType: ... + @overload + def parse( + source: str | ReadableBuffer, + *, + mode: Literal["single"], + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + ) -> Interactive: ... + @overload + def parse( + source: str | ReadableBuffer, + filename: str | bytes | os.PathLike[Any] = "", + mode: str = "exec", + *, + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + ) -> mod: ... + +def literal_eval(node_or_string: str | AST) -> Any: ... + +if sys.version_info >= (3, 13): + def dump( + node: AST, + annotate_fields: bool = True, + include_attributes: bool = False, + *, + indent: int | str | None = None, + show_empty: bool = False, + ) -> str: ... + +else: + def dump( + node: AST, annotate_fields: bool = True, include_attributes: bool = False, *, indent: int | str | None = None + ) -> str: ... + +def copy_location(new_node: _T, old_node: AST) -> _T: ... +def fix_missing_locations(node: _T) -> _T: ... +def increment_lineno(node: _T, n: int = 1) -> _T: ... +def iter_fields(node: AST) -> Iterator[tuple[str, Any]]: ... +def iter_child_nodes(node: AST) -> Iterator[AST]: ... +def get_docstring(node: AsyncFunctionDef | FunctionDef | ClassDef | Module, clean: bool = True) -> str | None: ... +def get_source_segment(source: str, node: AST, *, padded: bool = False) -> str | None: ... +def walk(node: AST) -> Iterator[AST]: ... + +if sys.version_info >= (3, 14): + def compare(left: AST, right: AST, /, *, compare_attributes: bool = False) -> bool: ... + +class NodeVisitor: + # All visit methods below can be overwritten by subclasses and return an + # arbitrary value, which is passed to the caller. + def visit(self, node: AST) -> Any: ... + def generic_visit(self, node: AST) -> Any: ... + # The following visit methods are not defined on NodeVisitor, but can + # be implemented by subclasses and are called during a visit if defined. + def visit_Module(self, node: Module) -> Any: ... + def visit_Interactive(self, node: Interactive) -> Any: ... + def visit_Expression(self, node: Expression) -> Any: ... + def visit_FunctionDef(self, node: FunctionDef) -> Any: ... + def visit_AsyncFunctionDef(self, node: AsyncFunctionDef) -> Any: ... + def visit_ClassDef(self, node: ClassDef) -> Any: ... + def visit_Return(self, node: Return) -> Any: ... + def visit_Delete(self, node: Delete) -> Any: ... + def visit_Assign(self, node: Assign) -> Any: ... + def visit_AugAssign(self, node: AugAssign) -> Any: ... + def visit_AnnAssign(self, node: AnnAssign) -> Any: ... + def visit_For(self, node: For) -> Any: ... + def visit_AsyncFor(self, node: AsyncFor) -> Any: ... + def visit_While(self, node: While) -> Any: ... + def visit_If(self, node: If) -> Any: ... + def visit_With(self, node: With) -> Any: ... + def visit_AsyncWith(self, node: AsyncWith) -> Any: ... + def visit_Raise(self, node: Raise) -> Any: ... + def visit_Try(self, node: Try) -> Any: ... + def visit_Assert(self, node: Assert) -> Any: ... + def visit_Import(self, node: Import) -> Any: ... + def visit_ImportFrom(self, node: ImportFrom) -> Any: ... + def visit_Global(self, node: Global) -> Any: ... + def visit_Nonlocal(self, node: Nonlocal) -> Any: ... + def visit_Expr(self, node: Expr) -> Any: ... + def visit_Pass(self, node: Pass) -> Any: ... + def visit_Break(self, node: Break) -> Any: ... + def visit_Continue(self, node: Continue) -> Any: ... + def visit_Slice(self, node: Slice) -> Any: ... + def visit_BoolOp(self, node: BoolOp) -> Any: ... + def visit_BinOp(self, node: BinOp) -> Any: ... + def visit_UnaryOp(self, node: UnaryOp) -> Any: ... + def visit_Lambda(self, node: Lambda) -> Any: ... + def visit_IfExp(self, node: IfExp) -> Any: ... + def visit_Dict(self, node: Dict) -> Any: ... + def visit_Set(self, node: Set) -> Any: ... + def visit_ListComp(self, node: ListComp) -> Any: ... + def visit_SetComp(self, node: SetComp) -> Any: ... + def visit_DictComp(self, node: DictComp) -> Any: ... + def visit_GeneratorExp(self, node: GeneratorExp) -> Any: ... + def visit_Await(self, node: Await) -> Any: ... + def visit_Yield(self, node: Yield) -> Any: ... + def visit_YieldFrom(self, node: YieldFrom) -> Any: ... + def visit_Compare(self, node: Compare) -> Any: ... + def visit_Call(self, node: Call) -> Any: ... + def visit_FormattedValue(self, node: FormattedValue) -> Any: ... + def visit_JoinedStr(self, node: JoinedStr) -> Any: ... + def visit_Constant(self, node: Constant) -> Any: ... + def visit_NamedExpr(self, node: NamedExpr) -> Any: ... + def visit_TypeIgnore(self, node: TypeIgnore) -> Any: ... + def visit_Attribute(self, node: Attribute) -> Any: ... + def visit_Subscript(self, node: Subscript) -> Any: ... + def visit_Starred(self, node: Starred) -> Any: ... + def visit_Name(self, node: Name) -> Any: ... + def visit_List(self, node: List) -> Any: ... + def visit_Tuple(self, node: Tuple) -> Any: ... + def visit_Del(self, node: Del) -> Any: ... + def visit_Load(self, node: Load) -> Any: ... + def visit_Store(self, node: Store) -> Any: ... + def visit_And(self, node: And) -> Any: ... + def visit_Or(self, node: Or) -> Any: ... + def visit_Add(self, node: Add) -> Any: ... + def visit_BitAnd(self, node: BitAnd) -> Any: ... + def visit_BitOr(self, node: BitOr) -> Any: ... + def visit_BitXor(self, node: BitXor) -> Any: ... + def visit_Div(self, node: Div) -> Any: ... + def visit_FloorDiv(self, node: FloorDiv) -> Any: ... + def visit_LShift(self, node: LShift) -> Any: ... + def visit_Mod(self, node: Mod) -> Any: ... + def visit_Mult(self, node: Mult) -> Any: ... + def visit_MatMult(self, node: MatMult) -> Any: ... + def visit_Pow(self, node: Pow) -> Any: ... + def visit_RShift(self, node: RShift) -> Any: ... + def visit_Sub(self, node: Sub) -> Any: ... + def visit_Invert(self, node: Invert) -> Any: ... + def visit_Not(self, node: Not) -> Any: ... + def visit_UAdd(self, node: UAdd) -> Any: ... + def visit_USub(self, node: USub) -> Any: ... + def visit_Eq(self, node: Eq) -> Any: ... + def visit_Gt(self, node: Gt) -> Any: ... + def visit_GtE(self, node: GtE) -> Any: ... + def visit_In(self, node: In) -> Any: ... + def visit_Is(self, node: Is) -> Any: ... + def visit_IsNot(self, node: IsNot) -> Any: ... + def visit_Lt(self, node: Lt) -> Any: ... + def visit_LtE(self, node: LtE) -> Any: ... + def visit_NotEq(self, node: NotEq) -> Any: ... + def visit_NotIn(self, node: NotIn) -> Any: ... + def visit_comprehension(self, node: comprehension) -> Any: ... + def visit_ExceptHandler(self, node: ExceptHandler) -> Any: ... + def visit_arguments(self, node: arguments) -> Any: ... + def visit_arg(self, node: arg) -> Any: ... + def visit_keyword(self, node: keyword) -> Any: ... + def visit_alias(self, node: alias) -> Any: ... + def visit_withitem(self, node: withitem) -> Any: ... + if sys.version_info >= (3, 10): + def visit_Match(self, node: Match) -> Any: ... + def visit_match_case(self, node: match_case) -> Any: ... + def visit_MatchValue(self, node: MatchValue) -> Any: ... + def visit_MatchSequence(self, node: MatchSequence) -> Any: ... + def visit_MatchSingleton(self, node: MatchSingleton) -> Any: ... + def visit_MatchStar(self, node: MatchStar) -> Any: ... + def visit_MatchMapping(self, node: MatchMapping) -> Any: ... + def visit_MatchClass(self, node: MatchClass) -> Any: ... + def visit_MatchAs(self, node: MatchAs) -> Any: ... + def visit_MatchOr(self, node: MatchOr) -> Any: ... + + if sys.version_info >= (3, 11): + def visit_TryStar(self, node: TryStar) -> Any: ... + + if sys.version_info >= (3, 12): + def visit_TypeVar(self, node: TypeVar) -> Any: ... + def visit_ParamSpec(self, node: ParamSpec) -> Any: ... + def visit_TypeVarTuple(self, node: TypeVarTuple) -> Any: ... + def visit_TypeAlias(self, node: TypeAlias) -> Any: ... + + # visit methods for deprecated nodes + def visit_ExtSlice(self, node: ExtSlice) -> Any: ... + def visit_Index(self, node: Index) -> Any: ... + def visit_Suite(self, node: Suite) -> Any: ... + def visit_AugLoad(self, node: AugLoad) -> Any: ... + def visit_AugStore(self, node: AugStore) -> Any: ... + def visit_Param(self, node: Param) -> Any: ... + + if sys.version_info < (3, 14): + @deprecated("Removed in Python 3.14. Use `visit_Constant` instead.") + def visit_Num(self, node: Num) -> Any: ... # type: ignore[deprecated] + @deprecated("Removed in Python 3.14. Use `visit_Constant` instead.") + def visit_Str(self, node: Str) -> Any: ... # type: ignore[deprecated] + @deprecated("Removed in Python 3.14. Use `visit_Constant` instead.") + def visit_Bytes(self, node: Bytes) -> Any: ... # type: ignore[deprecated] + @deprecated("Removed in Python 3.14. Use `visit_Constant` instead.") + def visit_NameConstant(self, node: NameConstant) -> Any: ... # type: ignore[deprecated] + @deprecated("Removed in Python 3.14. Use `visit_Constant` instead.") + def visit_Ellipsis(self, node: Ellipsis) -> Any: ... # type: ignore[deprecated] + +class NodeTransformer(NodeVisitor): + def generic_visit(self, node: AST) -> AST: ... + # TODO: Override the visit_* methods with better return types. + # The usual return type is AST | None, but Iterable[AST] + # is also allowed in some cases -- this needs to be mapped. + +def unparse(ast_obj: AST) -> str: ... + +if sys.version_info >= (3, 14): + def main(args: Sequence[str] | None = None) -> None: ... + +else: + def main() -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asynchat.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asynchat.pyi new file mode 100644 index 0000000..79a70d1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asynchat.pyi @@ -0,0 +1,21 @@ +import asyncore +from abc import abstractmethod + +class simple_producer: + def __init__(self, data: bytes, buffer_size: int = 512) -> None: ... + def more(self) -> bytes: ... + +class async_chat(asyncore.dispatcher): + ac_in_buffer_size: int + ac_out_buffer_size: int + @abstractmethod + def collect_incoming_data(self, data: bytes) -> None: ... + @abstractmethod + def found_terminator(self) -> None: ... + def set_terminator(self, term: bytes | int | None) -> None: ... + def get_terminator(self) -> bytes | int | None: ... + def push(self, data: bytes) -> None: ... + def push_with_producer(self, producer: simple_producer) -> None: ... + def close_when_done(self) -> None: ... + def initiate_send(self) -> None: ... + def discard_buffers(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/__init__.pyi new file mode 100644 index 0000000..23cf57a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/__init__.pyi @@ -0,0 +1,1005 @@ +# This condition is so big, it's clearer to keep to platform condition in two blocks +# Can't NOQA on a specific line: https://github.com/plinss/flake8-noqa/issues/22 +import sys +from collections.abc import Awaitable, Coroutine, Generator +from typing import Any, TypeVar +from typing_extensions import TypeAlias + +# As at runtime, this depends on all submodules defining __all__ accurately. +from .base_events import * +from .coroutines import * +from .events import * +from .exceptions import * +from .futures import * +from .locks import * +from .protocols import * +from .queues import * +from .runners import * +from .streams import * +from .subprocess import * +from .tasks import * +from .threads import * +from .transports import * + +if sys.version_info >= (3, 14): + from .graph import * + +if sys.version_info >= (3, 11): + from .taskgroups import * + from .timeouts import * + +if sys.platform == "win32": + from .windows_events import * +else: + from .unix_events import * + +if sys.platform == "win32": + if sys.version_info >= (3, 14): + + __all__ = ( + "BaseEventLoop", # from base_events + "Server", # from base_events + "iscoroutinefunction", # from coroutines + "iscoroutine", # from coroutines + "AbstractEventLoop", # from events + "AbstractServer", # from events + "Handle", # from events + "TimerHandle", # from events + "get_event_loop_policy", # from events + "set_event_loop_policy", # from events + "get_event_loop", # from events + "set_event_loop", # from events + "new_event_loop", # from events + "_set_running_loop", # from events + "get_running_loop", # from events + "_get_running_loop", # from events + "BrokenBarrierError", # from exceptions + "CancelledError", # from exceptions + "InvalidStateError", # from exceptions + "TimeoutError", # from exceptions + "IncompleteReadError", # from exceptions + "LimitOverrunError", # from exceptions + "SendfileNotAvailableError", # from exceptions + "Future", # from futures + "wrap_future", # from futures + "isfuture", # from futures + "future_discard_from_awaited_by", # from futures + "future_add_to_awaited_by", # from futures + "capture_call_graph", # from graph + "format_call_graph", # from graph + "print_call_graph", # from graph + "FrameCallGraphEntry", # from graph + "FutureCallGraph", # from graph + "Lock", # from locks + "Event", # from locks + "Condition", # from locks + "Semaphore", # from locks + "BoundedSemaphore", # from locks + "Barrier", # from locks + "BaseProtocol", # from protocols + "Protocol", # from protocols + "DatagramProtocol", # from protocols + "SubprocessProtocol", # from protocols + "BufferedProtocol", # from protocols + "Runner", # from runners + "run", # from runners + "Queue", # from queues + "PriorityQueue", # from queues + "LifoQueue", # from queues + "QueueFull", # from queues + "QueueEmpty", # from queues + "QueueShutDown", # from queues + "StreamReader", # from streams + "StreamWriter", # from streams + "StreamReaderProtocol", # from streams + "open_connection", # from streams + "start_server", # from streams + "create_subprocess_exec", # from subprocess + "create_subprocess_shell", # from subprocess + "Task", # from tasks + "create_task", # from tasks + "FIRST_COMPLETED", # from tasks + "FIRST_EXCEPTION", # from tasks + "ALL_COMPLETED", # from tasks + "wait", # from tasks + "wait_for", # from tasks + "as_completed", # from tasks + "sleep", # from tasks + "gather", # from tasks + "shield", # from tasks + "ensure_future", # from tasks + "run_coroutine_threadsafe", # from tasks + "current_task", # from tasks + "all_tasks", # from tasks + "create_eager_task_factory", # from tasks + "eager_task_factory", # from tasks + "_register_task", # from tasks + "_unregister_task", # from tasks + "_enter_task", # from tasks + "_leave_task", # from tasks + "TaskGroup", # from taskgroups + "to_thread", # from threads + "Timeout", # from timeouts + "timeout", # from timeouts + "timeout_at", # from timeouts + "BaseTransport", # from transports + "ReadTransport", # from transports + "WriteTransport", # from transports + "Transport", # from transports + "DatagramTransport", # from transports + "SubprocessTransport", # from transports + "SelectorEventLoop", # from windows_events + "ProactorEventLoop", # from windows_events + "IocpProactor", # from windows_events + "_DefaultEventLoopPolicy", # from windows_events + "_WindowsSelectorEventLoopPolicy", # from windows_events + "_WindowsProactorEventLoopPolicy", # from windows_events + "EventLoop", # from windows_events + ) + elif sys.version_info >= (3, 13): + __all__ = ( + "BaseEventLoop", # from base_events + "Server", # from base_events + "iscoroutinefunction", # from coroutines + "iscoroutine", # from coroutines + "AbstractEventLoopPolicy", # from events + "AbstractEventLoop", # from events + "AbstractServer", # from events + "Handle", # from events + "TimerHandle", # from events + "get_event_loop_policy", # from events + "set_event_loop_policy", # from events + "get_event_loop", # from events + "set_event_loop", # from events + "new_event_loop", # from events + "get_child_watcher", # from events + "set_child_watcher", # from events + "_set_running_loop", # from events + "get_running_loop", # from events + "_get_running_loop", # from events + "BrokenBarrierError", # from exceptions + "CancelledError", # from exceptions + "InvalidStateError", # from exceptions + "TimeoutError", # from exceptions + "IncompleteReadError", # from exceptions + "LimitOverrunError", # from exceptions + "SendfileNotAvailableError", # from exceptions + "Future", # from futures + "wrap_future", # from futures + "isfuture", # from futures + "Lock", # from locks + "Event", # from locks + "Condition", # from locks + "Semaphore", # from locks + "BoundedSemaphore", # from locks + "Barrier", # from locks + "BaseProtocol", # from protocols + "Protocol", # from protocols + "DatagramProtocol", # from protocols + "SubprocessProtocol", # from protocols + "BufferedProtocol", # from protocols + "Runner", # from runners + "run", # from runners + "Queue", # from queues + "PriorityQueue", # from queues + "LifoQueue", # from queues + "QueueFull", # from queues + "QueueEmpty", # from queues + "QueueShutDown", # from queues + "StreamReader", # from streams + "StreamWriter", # from streams + "StreamReaderProtocol", # from streams + "open_connection", # from streams + "start_server", # from streams + "create_subprocess_exec", # from subprocess + "create_subprocess_shell", # from subprocess + "Task", # from tasks + "create_task", # from tasks + "FIRST_COMPLETED", # from tasks + "FIRST_EXCEPTION", # from tasks + "ALL_COMPLETED", # from tasks + "wait", # from tasks + "wait_for", # from tasks + "as_completed", # from tasks + "sleep", # from tasks + "gather", # from tasks + "shield", # from tasks + "ensure_future", # from tasks + "run_coroutine_threadsafe", # from tasks + "current_task", # from tasks + "all_tasks", # from tasks + "create_eager_task_factory", # from tasks + "eager_task_factory", # from tasks + "_register_task", # from tasks + "_unregister_task", # from tasks + "_enter_task", # from tasks + "_leave_task", # from tasks + "TaskGroup", # from taskgroups + "to_thread", # from threads + "Timeout", # from timeouts + "timeout", # from timeouts + "timeout_at", # from timeouts + "BaseTransport", # from transports + "ReadTransport", # from transports + "WriteTransport", # from transports + "Transport", # from transports + "DatagramTransport", # from transports + "SubprocessTransport", # from transports + "SelectorEventLoop", # from windows_events + "ProactorEventLoop", # from windows_events + "IocpProactor", # from windows_events + "DefaultEventLoopPolicy", # from windows_events + "WindowsSelectorEventLoopPolicy", # from windows_events + "WindowsProactorEventLoopPolicy", # from windows_events + "EventLoop", # from windows_events + ) + elif sys.version_info >= (3, 12): + __all__ = ( + "BaseEventLoop", # from base_events + "Server", # from base_events + "iscoroutinefunction", # from coroutines + "iscoroutine", # from coroutines + "AbstractEventLoopPolicy", # from events + "AbstractEventLoop", # from events + "AbstractServer", # from events + "Handle", # from events + "TimerHandle", # from events + "get_event_loop_policy", # from events + "set_event_loop_policy", # from events + "get_event_loop", # from events + "set_event_loop", # from events + "new_event_loop", # from events + "get_child_watcher", # from events + "set_child_watcher", # from events + "_set_running_loop", # from events + "get_running_loop", # from events + "_get_running_loop", # from events + "BrokenBarrierError", # from exceptions + "CancelledError", # from exceptions + "InvalidStateError", # from exceptions + "TimeoutError", # from exceptions + "IncompleteReadError", # from exceptions + "LimitOverrunError", # from exceptions + "SendfileNotAvailableError", # from exceptions + "Future", # from futures + "wrap_future", # from futures + "isfuture", # from futures + "Lock", # from locks + "Event", # from locks + "Condition", # from locks + "Semaphore", # from locks + "BoundedSemaphore", # from locks + "Barrier", # from locks + "BaseProtocol", # from protocols + "Protocol", # from protocols + "DatagramProtocol", # from protocols + "SubprocessProtocol", # from protocols + "BufferedProtocol", # from protocols + "Runner", # from runners + "run", # from runners + "Queue", # from queues + "PriorityQueue", # from queues + "LifoQueue", # from queues + "QueueFull", # from queues + "QueueEmpty", # from queues + "StreamReader", # from streams + "StreamWriter", # from streams + "StreamReaderProtocol", # from streams + "open_connection", # from streams + "start_server", # from streams + "create_subprocess_exec", # from subprocess + "create_subprocess_shell", # from subprocess + "Task", # from tasks + "create_task", # from tasks + "FIRST_COMPLETED", # from tasks + "FIRST_EXCEPTION", # from tasks + "ALL_COMPLETED", # from tasks + "wait", # from tasks + "wait_for", # from tasks + "as_completed", # from tasks + "sleep", # from tasks + "gather", # from tasks + "shield", # from tasks + "ensure_future", # from tasks + "run_coroutine_threadsafe", # from tasks + "current_task", # from tasks + "all_tasks", # from tasks + "create_eager_task_factory", # from tasks + "eager_task_factory", # from tasks + "_register_task", # from tasks + "_unregister_task", # from tasks + "_enter_task", # from tasks + "_leave_task", # from tasks + "TaskGroup", # from taskgroups + "to_thread", # from threads + "Timeout", # from timeouts + "timeout", # from timeouts + "timeout_at", # from timeouts + "BaseTransport", # from transports + "ReadTransport", # from transports + "WriteTransport", # from transports + "Transport", # from transports + "DatagramTransport", # from transports + "SubprocessTransport", # from transports + "SelectorEventLoop", # from windows_events + "ProactorEventLoop", # from windows_events + "IocpProactor", # from windows_events + "DefaultEventLoopPolicy", # from windows_events + "WindowsSelectorEventLoopPolicy", # from windows_events + "WindowsProactorEventLoopPolicy", # from windows_events + ) + elif sys.version_info >= (3, 11): + __all__ = ( + "BaseEventLoop", # from base_events + "Server", # from base_events + "iscoroutinefunction", # from coroutines + "iscoroutine", # from coroutines + "AbstractEventLoopPolicy", # from events + "AbstractEventLoop", # from events + "AbstractServer", # from events + "Handle", # from events + "TimerHandle", # from events + "get_event_loop_policy", # from events + "set_event_loop_policy", # from events + "get_event_loop", # from events + "set_event_loop", # from events + "new_event_loop", # from events + "get_child_watcher", # from events + "set_child_watcher", # from events + "_set_running_loop", # from events + "get_running_loop", # from events + "_get_running_loop", # from events + "BrokenBarrierError", # from exceptions + "CancelledError", # from exceptions + "InvalidStateError", # from exceptions + "TimeoutError", # from exceptions + "IncompleteReadError", # from exceptions + "LimitOverrunError", # from exceptions + "SendfileNotAvailableError", # from exceptions + "Future", # from futures + "wrap_future", # from futures + "isfuture", # from futures + "Lock", # from locks + "Event", # from locks + "Condition", # from locks + "Semaphore", # from locks + "BoundedSemaphore", # from locks + "Barrier", # from locks + "BaseProtocol", # from protocols + "Protocol", # from protocols + "DatagramProtocol", # from protocols + "SubprocessProtocol", # from protocols + "BufferedProtocol", # from protocols + "Runner", # from runners + "run", # from runners + "Queue", # from queues + "PriorityQueue", # from queues + "LifoQueue", # from queues + "QueueFull", # from queues + "QueueEmpty", # from queues + "StreamReader", # from streams + "StreamWriter", # from streams + "StreamReaderProtocol", # from streams + "open_connection", # from streams + "start_server", # from streams + "create_subprocess_exec", # from subprocess + "create_subprocess_shell", # from subprocess + "Task", # from tasks + "create_task", # from tasks + "FIRST_COMPLETED", # from tasks + "FIRST_EXCEPTION", # from tasks + "ALL_COMPLETED", # from tasks + "wait", # from tasks + "wait_for", # from tasks + "as_completed", # from tasks + "sleep", # from tasks + "gather", # from tasks + "shield", # from tasks + "ensure_future", # from tasks + "run_coroutine_threadsafe", # from tasks + "current_task", # from tasks + "all_tasks", # from tasks + "_register_task", # from tasks + "_unregister_task", # from tasks + "_enter_task", # from tasks + "_leave_task", # from tasks + "to_thread", # from threads + "Timeout", # from timeouts + "timeout", # from timeouts + "timeout_at", # from timeouts + "BaseTransport", # from transports + "ReadTransport", # from transports + "WriteTransport", # from transports + "Transport", # from transports + "DatagramTransport", # from transports + "SubprocessTransport", # from transports + "SelectorEventLoop", # from windows_events + "ProactorEventLoop", # from windows_events + "IocpProactor", # from windows_events + "DefaultEventLoopPolicy", # from windows_events + "WindowsSelectorEventLoopPolicy", # from windows_events + "WindowsProactorEventLoopPolicy", # from windows_events + ) + else: + __all__ = ( + "BaseEventLoop", # from base_events + "Server", # from base_events + "coroutine", # from coroutines + "iscoroutinefunction", # from coroutines + "iscoroutine", # from coroutines + "AbstractEventLoopPolicy", # from events + "AbstractEventLoop", # from events + "AbstractServer", # from events + "Handle", # from events + "TimerHandle", # from events + "get_event_loop_policy", # from events + "set_event_loop_policy", # from events + "get_event_loop", # from events + "set_event_loop", # from events + "new_event_loop", # from events + "get_child_watcher", # from events + "set_child_watcher", # from events + "_set_running_loop", # from events + "get_running_loop", # from events + "_get_running_loop", # from events + "CancelledError", # from exceptions + "InvalidStateError", # from exceptions + "TimeoutError", # from exceptions + "IncompleteReadError", # from exceptions + "LimitOverrunError", # from exceptions + "SendfileNotAvailableError", # from exceptions + "Future", # from futures + "wrap_future", # from futures + "isfuture", # from futures + "Lock", # from locks + "Event", # from locks + "Condition", # from locks + "Semaphore", # from locks + "BoundedSemaphore", # from locks + "BaseProtocol", # from protocols + "Protocol", # from protocols + "DatagramProtocol", # from protocols + "SubprocessProtocol", # from protocols + "BufferedProtocol", # from protocols + "run", # from runners + "Queue", # from queues + "PriorityQueue", # from queues + "LifoQueue", # from queues + "QueueFull", # from queues + "QueueEmpty", # from queues + "StreamReader", # from streams + "StreamWriter", # from streams + "StreamReaderProtocol", # from streams + "open_connection", # from streams + "start_server", # from streams + "create_subprocess_exec", # from subprocess + "create_subprocess_shell", # from subprocess + "Task", # from tasks + "create_task", # from tasks + "FIRST_COMPLETED", # from tasks + "FIRST_EXCEPTION", # from tasks + "ALL_COMPLETED", # from tasks + "wait", # from tasks + "wait_for", # from tasks + "as_completed", # from tasks + "sleep", # from tasks + "gather", # from tasks + "shield", # from tasks + "ensure_future", # from tasks + "run_coroutine_threadsafe", # from tasks + "current_task", # from tasks + "all_tasks", # from tasks + "_register_task", # from tasks + "_unregister_task", # from tasks + "_enter_task", # from tasks + "_leave_task", # from tasks + "to_thread", # from threads + "BaseTransport", # from transports + "ReadTransport", # from transports + "WriteTransport", # from transports + "Transport", # from transports + "DatagramTransport", # from transports + "SubprocessTransport", # from transports + "SelectorEventLoop", # from windows_events + "ProactorEventLoop", # from windows_events + "IocpProactor", # from windows_events + "DefaultEventLoopPolicy", # from windows_events + "WindowsSelectorEventLoopPolicy", # from windows_events + "WindowsProactorEventLoopPolicy", # from windows_events + ) +else: + if sys.version_info >= (3, 14): + __all__ = ( + "BaseEventLoop", # from base_events + "Server", # from base_events + "iscoroutinefunction", # from coroutines + "iscoroutine", # from coroutines + "AbstractEventLoop", # from events + "AbstractServer", # from events + "Handle", # from events + "TimerHandle", # from events + "get_event_loop_policy", # from events + "set_event_loop_policy", # from events + "get_event_loop", # from events + "set_event_loop", # from events + "new_event_loop", # from events + "_set_running_loop", # from events + "get_running_loop", # from events + "_get_running_loop", # from events + "BrokenBarrierError", # from exceptions + "CancelledError", # from exceptions + "InvalidStateError", # from exceptions + "TimeoutError", # from exceptions + "IncompleteReadError", # from exceptions + "LimitOverrunError", # from exceptions + "SendfileNotAvailableError", # from exceptions + "Future", # from futures + "wrap_future", # from futures + "isfuture", # from futures + "future_discard_from_awaited_by", # from futures + "future_add_to_awaited_by", # from futures + "capture_call_graph", # from graph + "format_call_graph", # from graph + "print_call_graph", # from graph + "FrameCallGraphEntry", # from graph + "FutureCallGraph", # from graph + "Lock", # from locks + "Event", # from locks + "Condition", # from locks + "Semaphore", # from locks + "BoundedSemaphore", # from locks + "Barrier", # from locks + "BaseProtocol", # from protocols + "Protocol", # from protocols + "DatagramProtocol", # from protocols + "SubprocessProtocol", # from protocols + "BufferedProtocol", # from protocols + "Runner", # from runners + "run", # from runners + "Queue", # from queues + "PriorityQueue", # from queues + "LifoQueue", # from queues + "QueueFull", # from queues + "QueueEmpty", # from queues + "QueueShutDown", # from queues + "StreamReader", # from streams + "StreamWriter", # from streams + "StreamReaderProtocol", # from streams + "open_connection", # from streams + "start_server", # from streams + "open_unix_connection", # from streams + "start_unix_server", # from streams + "create_subprocess_exec", # from subprocess + "create_subprocess_shell", # from subprocess + "Task", # from tasks + "create_task", # from tasks + "FIRST_COMPLETED", # from tasks + "FIRST_EXCEPTION", # from tasks + "ALL_COMPLETED", # from tasks + "wait", # from tasks + "wait_for", # from tasks + "as_completed", # from tasks + "sleep", # from tasks + "gather", # from tasks + "shield", # from tasks + "ensure_future", # from tasks + "run_coroutine_threadsafe", # from tasks + "current_task", # from tasks + "all_tasks", # from tasks + "create_eager_task_factory", # from tasks + "eager_task_factory", # from tasks + "_register_task", # from tasks + "_unregister_task", # from tasks + "_enter_task", # from tasks + "_leave_task", # from tasks + "TaskGroup", # from taskgroups + "to_thread", # from threads + "Timeout", # from timeouts + "timeout", # from timeouts + "timeout_at", # from timeouts + "BaseTransport", # from transports + "ReadTransport", # from transports + "WriteTransport", # from transports + "Transport", # from transports + "DatagramTransport", # from transports + "SubprocessTransport", # from transports + "SelectorEventLoop", # from unix_events + "EventLoop", # from unix_events + ) + elif sys.version_info >= (3, 13): + __all__ = ( + "BaseEventLoop", # from base_events + "Server", # from base_events + "iscoroutinefunction", # from coroutines + "iscoroutine", # from coroutines + "AbstractEventLoopPolicy", # from events + "AbstractEventLoop", # from events + "AbstractServer", # from events + "Handle", # from events + "TimerHandle", # from events + "get_event_loop_policy", # from events + "set_event_loop_policy", # from events + "get_event_loop", # from events + "set_event_loop", # from events + "new_event_loop", # from events + "get_child_watcher", # from events + "set_child_watcher", # from events + "_set_running_loop", # from events + "get_running_loop", # from events + "_get_running_loop", # from events + "BrokenBarrierError", # from exceptions + "CancelledError", # from exceptions + "InvalidStateError", # from exceptions + "TimeoutError", # from exceptions + "IncompleteReadError", # from exceptions + "LimitOverrunError", # from exceptions + "SendfileNotAvailableError", # from exceptions + "Future", # from futures + "wrap_future", # from futures + "isfuture", # from futures + "Lock", # from locks + "Event", # from locks + "Condition", # from locks + "Semaphore", # from locks + "BoundedSemaphore", # from locks + "Barrier", # from locks + "BaseProtocol", # from protocols + "Protocol", # from protocols + "DatagramProtocol", # from protocols + "SubprocessProtocol", # from protocols + "BufferedProtocol", # from protocols + "Runner", # from runners + "run", # from runners + "Queue", # from queues + "PriorityQueue", # from queues + "LifoQueue", # from queues + "QueueFull", # from queues + "QueueEmpty", # from queues + "QueueShutDown", # from queues + "StreamReader", # from streams + "StreamWriter", # from streams + "StreamReaderProtocol", # from streams + "open_connection", # from streams + "start_server", # from streams + "open_unix_connection", # from streams + "start_unix_server", # from streams + "create_subprocess_exec", # from subprocess + "create_subprocess_shell", # from subprocess + "Task", # from tasks + "create_task", # from tasks + "FIRST_COMPLETED", # from tasks + "FIRST_EXCEPTION", # from tasks + "ALL_COMPLETED", # from tasks + "wait", # from tasks + "wait_for", # from tasks + "as_completed", # from tasks + "sleep", # from tasks + "gather", # from tasks + "shield", # from tasks + "ensure_future", # from tasks + "run_coroutine_threadsafe", # from tasks + "current_task", # from tasks + "all_tasks", # from tasks + "create_eager_task_factory", # from tasks + "eager_task_factory", # from tasks + "_register_task", # from tasks + "_unregister_task", # from tasks + "_enter_task", # from tasks + "_leave_task", # from tasks + "TaskGroup", # from taskgroups + "to_thread", # from threads + "Timeout", # from timeouts + "timeout", # from timeouts + "timeout_at", # from timeouts + "BaseTransport", # from transports + "ReadTransport", # from transports + "WriteTransport", # from transports + "Transport", # from transports + "DatagramTransport", # from transports + "SubprocessTransport", # from transports + "SelectorEventLoop", # from unix_events + "AbstractChildWatcher", # from unix_events + "SafeChildWatcher", # from unix_events + "FastChildWatcher", # from unix_events + "PidfdChildWatcher", # from unix_events + "MultiLoopChildWatcher", # from unix_events + "ThreadedChildWatcher", # from unix_events + "DefaultEventLoopPolicy", # from unix_events + "EventLoop", # from unix_events + ) + elif sys.version_info >= (3, 12): + __all__ = ( + "BaseEventLoop", # from base_events + "Server", # from base_events + "iscoroutinefunction", # from coroutines + "iscoroutine", # from coroutines + "AbstractEventLoopPolicy", # from events + "AbstractEventLoop", # from events + "AbstractServer", # from events + "Handle", # from events + "TimerHandle", # from events + "get_event_loop_policy", # from events + "set_event_loop_policy", # from events + "get_event_loop", # from events + "set_event_loop", # from events + "new_event_loop", # from events + "get_child_watcher", # from events + "set_child_watcher", # from events + "_set_running_loop", # from events + "get_running_loop", # from events + "_get_running_loop", # from events + "BrokenBarrierError", # from exceptions + "CancelledError", # from exceptions + "InvalidStateError", # from exceptions + "TimeoutError", # from exceptions + "IncompleteReadError", # from exceptions + "LimitOverrunError", # from exceptions + "SendfileNotAvailableError", # from exceptions + "Future", # from futures + "wrap_future", # from futures + "isfuture", # from futures + "Lock", # from locks + "Event", # from locks + "Condition", # from locks + "Semaphore", # from locks + "BoundedSemaphore", # from locks + "Barrier", # from locks + "BaseProtocol", # from protocols + "Protocol", # from protocols + "DatagramProtocol", # from protocols + "SubprocessProtocol", # from protocols + "BufferedProtocol", # from protocols + "Runner", # from runners + "run", # from runners + "Queue", # from queues + "PriorityQueue", # from queues + "LifoQueue", # from queues + "QueueFull", # from queues + "QueueEmpty", # from queues + "StreamReader", # from streams + "StreamWriter", # from streams + "StreamReaderProtocol", # from streams + "open_connection", # from streams + "start_server", # from streams + "open_unix_connection", # from streams + "start_unix_server", # from streams + "create_subprocess_exec", # from subprocess + "create_subprocess_shell", # from subprocess + "Task", # from tasks + "create_task", # from tasks + "FIRST_COMPLETED", # from tasks + "FIRST_EXCEPTION", # from tasks + "ALL_COMPLETED", # from tasks + "wait", # from tasks + "wait_for", # from tasks + "as_completed", # from tasks + "sleep", # from tasks + "gather", # from tasks + "shield", # from tasks + "ensure_future", # from tasks + "run_coroutine_threadsafe", # from tasks + "current_task", # from tasks + "all_tasks", # from tasks + "create_eager_task_factory", # from tasks + "eager_task_factory", # from tasks + "_register_task", # from tasks + "_unregister_task", # from tasks + "_enter_task", # from tasks + "_leave_task", # from tasks + "TaskGroup", # from taskgroups + "to_thread", # from threads + "Timeout", # from timeouts + "timeout", # from timeouts + "timeout_at", # from timeouts + "BaseTransport", # from transports + "ReadTransport", # from transports + "WriteTransport", # from transports + "Transport", # from transports + "DatagramTransport", # from transports + "SubprocessTransport", # from transports + "SelectorEventLoop", # from unix_events + "AbstractChildWatcher", # from unix_events + "SafeChildWatcher", # from unix_events + "FastChildWatcher", # from unix_events + "PidfdChildWatcher", # from unix_events + "MultiLoopChildWatcher", # from unix_events + "ThreadedChildWatcher", # from unix_events + "DefaultEventLoopPolicy", # from unix_events + ) + elif sys.version_info >= (3, 11): + __all__ = ( + "BaseEventLoop", # from base_events + "Server", # from base_events + "iscoroutinefunction", # from coroutines + "iscoroutine", # from coroutines + "AbstractEventLoopPolicy", # from events + "AbstractEventLoop", # from events + "AbstractServer", # from events + "Handle", # from events + "TimerHandle", # from events + "get_event_loop_policy", # from events + "set_event_loop_policy", # from events + "get_event_loop", # from events + "set_event_loop", # from events + "new_event_loop", # from events + "get_child_watcher", # from events + "set_child_watcher", # from events + "_set_running_loop", # from events + "get_running_loop", # from events + "_get_running_loop", # from events + "BrokenBarrierError", # from exceptions + "CancelledError", # from exceptions + "InvalidStateError", # from exceptions + "TimeoutError", # from exceptions + "IncompleteReadError", # from exceptions + "LimitOverrunError", # from exceptions + "SendfileNotAvailableError", # from exceptions + "Future", # from futures + "wrap_future", # from futures + "isfuture", # from futures + "Lock", # from locks + "Event", # from locks + "Condition", # from locks + "Semaphore", # from locks + "BoundedSemaphore", # from locks + "Barrier", # from locks + "BaseProtocol", # from protocols + "Protocol", # from protocols + "DatagramProtocol", # from protocols + "SubprocessProtocol", # from protocols + "BufferedProtocol", # from protocols + "Runner", # from runners + "run", # from runners + "Queue", # from queues + "PriorityQueue", # from queues + "LifoQueue", # from queues + "QueueFull", # from queues + "QueueEmpty", # from queues + "StreamReader", # from streams + "StreamWriter", # from streams + "StreamReaderProtocol", # from streams + "open_connection", # from streams + "start_server", # from streams + "open_unix_connection", # from streams + "start_unix_server", # from streams + "create_subprocess_exec", # from subprocess + "create_subprocess_shell", # from subprocess + "Task", # from tasks + "create_task", # from tasks + "FIRST_COMPLETED", # from tasks + "FIRST_EXCEPTION", # from tasks + "ALL_COMPLETED", # from tasks + "wait", # from tasks + "wait_for", # from tasks + "as_completed", # from tasks + "sleep", # from tasks + "gather", # from tasks + "shield", # from tasks + "ensure_future", # from tasks + "run_coroutine_threadsafe", # from tasks + "current_task", # from tasks + "all_tasks", # from tasks + "_register_task", # from tasks + "_unregister_task", # from tasks + "_enter_task", # from tasks + "_leave_task", # from tasks + "to_thread", # from threads + "Timeout", # from timeouts + "timeout", # from timeouts + "timeout_at", # from timeouts + "BaseTransport", # from transports + "ReadTransport", # from transports + "WriteTransport", # from transports + "Transport", # from transports + "DatagramTransport", # from transports + "SubprocessTransport", # from transports + "SelectorEventLoop", # from unix_events + "AbstractChildWatcher", # from unix_events + "SafeChildWatcher", # from unix_events + "FastChildWatcher", # from unix_events + "PidfdChildWatcher", # from unix_events + "MultiLoopChildWatcher", # from unix_events + "ThreadedChildWatcher", # from unix_events + "DefaultEventLoopPolicy", # from unix_events + ) + else: + __all__ = ( + "BaseEventLoop", # from base_events + "Server", # from base_events + "coroutine", # from coroutines + "iscoroutinefunction", # from coroutines + "iscoroutine", # from coroutines + "AbstractEventLoopPolicy", # from events + "AbstractEventLoop", # from events + "AbstractServer", # from events + "Handle", # from events + "TimerHandle", # from events + "get_event_loop_policy", # from events + "set_event_loop_policy", # from events + "get_event_loop", # from events + "set_event_loop", # from events + "new_event_loop", # from events + "get_child_watcher", # from events + "set_child_watcher", # from events + "_set_running_loop", # from events + "get_running_loop", # from events + "_get_running_loop", # from events + "CancelledError", # from exceptions + "InvalidStateError", # from exceptions + "TimeoutError", # from exceptions + "IncompleteReadError", # from exceptions + "LimitOverrunError", # from exceptions + "SendfileNotAvailableError", # from exceptions + "Future", # from futures + "wrap_future", # from futures + "isfuture", # from futures + "Lock", # from locks + "Event", # from locks + "Condition", # from locks + "Semaphore", # from locks + "BoundedSemaphore", # from locks + "BaseProtocol", # from protocols + "Protocol", # from protocols + "DatagramProtocol", # from protocols + "SubprocessProtocol", # from protocols + "BufferedProtocol", # from protocols + "run", # from runners + "Queue", # from queues + "PriorityQueue", # from queues + "LifoQueue", # from queues + "QueueFull", # from queues + "QueueEmpty", # from queues + "StreamReader", # from streams + "StreamWriter", # from streams + "StreamReaderProtocol", # from streams + "open_connection", # from streams + "start_server", # from streams + "open_unix_connection", # from streams + "start_unix_server", # from streams + "create_subprocess_exec", # from subprocess + "create_subprocess_shell", # from subprocess + "Task", # from tasks + "create_task", # from tasks + "FIRST_COMPLETED", # from tasks + "FIRST_EXCEPTION", # from tasks + "ALL_COMPLETED", # from tasks + "wait", # from tasks + "wait_for", # from tasks + "as_completed", # from tasks + "sleep", # from tasks + "gather", # from tasks + "shield", # from tasks + "ensure_future", # from tasks + "run_coroutine_threadsafe", # from tasks + "current_task", # from tasks + "all_tasks", # from tasks + "_register_task", # from tasks + "_unregister_task", # from tasks + "_enter_task", # from tasks + "_leave_task", # from tasks + "to_thread", # from threads + "BaseTransport", # from transports + "ReadTransport", # from transports + "WriteTransport", # from transports + "Transport", # from transports + "DatagramTransport", # from transports + "SubprocessTransport", # from transports + "SelectorEventLoop", # from unix_events + "AbstractChildWatcher", # from unix_events + "SafeChildWatcher", # from unix_events + "FastChildWatcher", # from unix_events + "PidfdChildWatcher", # from unix_events + "MultiLoopChildWatcher", # from unix_events + "ThreadedChildWatcher", # from unix_events + "DefaultEventLoopPolicy", # from unix_events + ) + +_T_co = TypeVar("_T_co", covariant=True) + +# Aliases imported by multiple submodules in typeshed +if sys.version_info >= (3, 12): + _AwaitableLike: TypeAlias = Awaitable[_T_co] # noqa: Y047 + _CoroutineLike: TypeAlias = Coroutine[Any, Any, _T_co] # noqa: Y047 +else: + _AwaitableLike: TypeAlias = Generator[Any, None, _T_co] | Awaitable[_T_co] + _CoroutineLike: TypeAlias = Generator[Any, None, _T_co] | Coroutine[Any, Any, _T_co] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/base_events.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/base_events.pyi new file mode 100644 index 0000000..1f49321 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/base_events.pyi @@ -0,0 +1,488 @@ +import ssl +import sys +from _typeshed import FileDescriptorLike, ReadableBuffer, WriteableBuffer +from asyncio import _AwaitableLike, _CoroutineLike +from asyncio.events import AbstractEventLoop, AbstractServer, Handle, TimerHandle, _TaskFactory +from asyncio.futures import Future +from asyncio.protocols import BaseProtocol +from asyncio.tasks import Task +from asyncio.transports import BaseTransport, DatagramTransport, ReadTransport, SubprocessTransport, Transport, WriteTransport +from collections.abc import Callable, Iterable, Sequence +from concurrent.futures import Executor, ThreadPoolExecutor +from contextvars import Context +from socket import AddressFamily, AddressInfo, SocketKind, _Address, _RetAddress, socket +from typing import IO, Any, Literal, TypeVar, overload +from typing_extensions import TypeAlias, TypeVarTuple, Unpack + +# Keep asyncio.__all__ updated with any changes to __all__ here +__all__ = ("BaseEventLoop", "Server") + +_T = TypeVar("_T") +_Ts = TypeVarTuple("_Ts") +_ProtocolT = TypeVar("_ProtocolT", bound=BaseProtocol) +_Context: TypeAlias = dict[str, Any] +_ExceptionHandler: TypeAlias = Callable[[AbstractEventLoop, _Context], object] +_ProtocolFactory: TypeAlias = Callable[[], BaseProtocol] +_SSLContext: TypeAlias = bool | None | ssl.SSLContext + +class Server(AbstractServer): + if sys.version_info >= (3, 11): + def __init__( + self, + loop: AbstractEventLoop, + sockets: Iterable[socket], + protocol_factory: _ProtocolFactory, + ssl_context: _SSLContext, + backlog: int, + ssl_handshake_timeout: float | None, + ssl_shutdown_timeout: float | None = None, + ) -> None: ... + else: + def __init__( + self, + loop: AbstractEventLoop, + sockets: Iterable[socket], + protocol_factory: _ProtocolFactory, + ssl_context: _SSLContext, + backlog: int, + ssl_handshake_timeout: float | None, + ) -> None: ... + + if sys.version_info >= (3, 13): + def close_clients(self) -> None: ... + def abort_clients(self) -> None: ... + + def get_loop(self) -> AbstractEventLoop: ... + def is_serving(self) -> bool: ... + async def start_serving(self) -> None: ... + async def serve_forever(self) -> None: ... + @property + def sockets(self) -> tuple[socket, ...]: ... + def close(self) -> None: ... + async def wait_closed(self) -> None: ... + +class BaseEventLoop(AbstractEventLoop): + def run_forever(self) -> None: ... + def run_until_complete(self, future: _AwaitableLike[_T]) -> _T: ... + def stop(self) -> None: ... + def is_running(self) -> bool: ... + def is_closed(self) -> bool: ... + def close(self) -> None: ... + async def shutdown_asyncgens(self) -> None: ... + # Methods scheduling callbacks. All these return Handles. + def call_soon( + self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None + ) -> Handle: ... + def call_later( + self, delay: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None + ) -> TimerHandle: ... + def call_at( + self, when: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None + ) -> TimerHandle: ... + def time(self) -> float: ... + # Future methods + def create_future(self) -> Future[Any]: ... + # Tasks methods + if sys.version_info >= (3, 11): + def create_task(self, coro: _CoroutineLike[_T], *, name: object = None, context: Context | None = None) -> Task[_T]: ... + else: + def create_task(self, coro: _CoroutineLike[_T], *, name: object = None) -> Task[_T]: ... + + def set_task_factory(self, factory: _TaskFactory | None) -> None: ... + def get_task_factory(self) -> _TaskFactory | None: ... + # Methods for interacting with threads + def call_soon_threadsafe( + self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None + ) -> Handle: ... + def run_in_executor(self, executor: Executor | None, func: Callable[[Unpack[_Ts]], _T], *args: Unpack[_Ts]) -> Future[_T]: ... + def set_default_executor(self, executor: ThreadPoolExecutor) -> None: ... # type: ignore[override] + # Network I/O methods returning Futures. + async def getaddrinfo( + self, + host: bytes | str | None, + port: bytes | str | int | None, + *, + family: int = 0, + type: int = 0, + proto: int = 0, + flags: int = 0, + ) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ... + async def getnameinfo(self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = 0) -> tuple[str, str]: ... + if sys.version_info >= (3, 12): + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + all_errors: bool = False, + ) -> tuple[Transport, _ProtocolT]: ... + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: None = None, + port: None = None, + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: socket, + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + all_errors: bool = False, + ) -> tuple[Transport, _ProtocolT]: ... + elif sys.version_info >= (3, 11): + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: None = None, + port: None = None, + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: socket, + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + else: + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: None = None, + port: None = None, + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: socket, + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + + if sys.version_info >= (3, 13): + # 3.13 added `keep_alive`. + @overload + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: str | Sequence[str] | None = None, + port: int = ..., + *, + family: int = 0, + flags: int = 1, + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + keep_alive: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + @overload + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: None = None, + port: None = None, + *, + family: int = 0, + flags: int = 1, + sock: socket = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + keep_alive: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + elif sys.version_info >= (3, 11): + @overload + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: str | Sequence[str] | None = None, + port: int = ..., + *, + family: int = AddressFamily.AF_UNSPEC, + flags: int = AddressInfo.AI_PASSIVE, + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + @overload + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: None = None, + port: None = None, + *, + family: int = AddressFamily.AF_UNSPEC, + flags: int = AddressInfo.AI_PASSIVE, + sock: socket = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + else: + @overload + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: str | Sequence[str] | None = None, + port: int = ..., + *, + family: int = AddressFamily.AF_UNSPEC, + flags: int = AddressInfo.AI_PASSIVE, + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + @overload + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: None = None, + port: None = None, + *, + family: int = AddressFamily.AF_UNSPEC, + flags: int = AddressInfo.AI_PASSIVE, + sock: socket = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + + if sys.version_info >= (3, 11): + async def start_tls( + self, + transport: BaseTransport, + protocol: BaseProtocol, + sslcontext: ssl.SSLContext, + *, + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> Transport | None: ... + async def connect_accepted_socket( + self, + protocol_factory: Callable[[], _ProtocolT], + sock: socket, + *, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + else: + async def start_tls( + self, + transport: BaseTransport, + protocol: BaseProtocol, + sslcontext: ssl.SSLContext, + *, + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> Transport | None: ... + async def connect_accepted_socket( + self, + protocol_factory: Callable[[], _ProtocolT], + sock: socket, + *, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + + async def sock_sendfile( + self, sock: socket, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool | None = True + ) -> int: ... + async def sendfile( + self, transport: WriteTransport, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool = True + ) -> int: ... + if sys.version_info >= (3, 11): + async def create_datagram_endpoint( # type: ignore[override] + self, + protocol_factory: Callable[[], _ProtocolT], + local_addr: tuple[str, int] | str | None = None, + remote_addr: tuple[str, int] | str | None = None, + *, + family: int = 0, + proto: int = 0, + flags: int = 0, + reuse_port: bool | None = None, + allow_broadcast: bool | None = None, + sock: socket | None = None, + ) -> tuple[DatagramTransport, _ProtocolT]: ... + else: + async def create_datagram_endpoint( + self, + protocol_factory: Callable[[], _ProtocolT], + local_addr: tuple[str, int] | str | None = None, + remote_addr: tuple[str, int] | str | None = None, + *, + family: int = 0, + proto: int = 0, + flags: int = 0, + reuse_address: bool | None = ..., + reuse_port: bool | None = None, + allow_broadcast: bool | None = None, + sock: socket | None = None, + ) -> tuple[DatagramTransport, _ProtocolT]: ... + # Pipes and subprocesses. + async def connect_read_pipe( + self, protocol_factory: Callable[[], _ProtocolT], pipe: Any + ) -> tuple[ReadTransport, _ProtocolT]: ... + async def connect_write_pipe( + self, protocol_factory: Callable[[], _ProtocolT], pipe: Any + ) -> tuple[WriteTransport, _ProtocolT]: ... + async def subprocess_shell( + self, + protocol_factory: Callable[[], _ProtocolT], + cmd: bytes | str, + *, + stdin: int | IO[Any] | None = -1, + stdout: int | IO[Any] | None = -1, + stderr: int | IO[Any] | None = -1, + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + text: Literal[False] | None = None, + **kwargs: Any, + ) -> tuple[SubprocessTransport, _ProtocolT]: ... + async def subprocess_exec( + self, + protocol_factory: Callable[[], _ProtocolT], + program: Any, + *args: Any, + stdin: int | IO[Any] | None = -1, + stdout: int | IO[Any] | None = -1, + stderr: int | IO[Any] | None = -1, + universal_newlines: Literal[False] = False, + shell: Literal[False] = False, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + text: Literal[False] | None = None, + **kwargs: Any, + ) -> tuple[SubprocessTransport, _ProtocolT]: ... + def add_reader(self, fd: FileDescriptorLike, callback: Callable[[Unpack[_Ts]], Any], *args: Unpack[_Ts]) -> None: ... + def remove_reader(self, fd: FileDescriptorLike) -> bool: ... + def add_writer(self, fd: FileDescriptorLike, callback: Callable[[Unpack[_Ts]], Any], *args: Unpack[_Ts]) -> None: ... + def remove_writer(self, fd: FileDescriptorLike) -> bool: ... + # The sock_* methods (and probably some others) are not actually implemented on + # BaseEventLoop, only on subclasses. We list them here for now for convenience. + async def sock_recv(self, sock: socket, nbytes: int) -> bytes: ... + async def sock_recv_into(self, sock: socket, buf: WriteableBuffer) -> int: ... + async def sock_sendall(self, sock: socket, data: ReadableBuffer) -> None: ... + async def sock_connect(self, sock: socket, address: _Address) -> None: ... + async def sock_accept(self, sock: socket) -> tuple[socket, _RetAddress]: ... + if sys.version_info >= (3, 11): + async def sock_recvfrom(self, sock: socket, bufsize: int) -> tuple[bytes, _RetAddress]: ... + async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = 0) -> tuple[int, _RetAddress]: ... + async def sock_sendto(self, sock: socket, data: ReadableBuffer, address: _Address) -> int: ... + # Signal handling. + def add_signal_handler(self, sig: int, callback: Callable[[Unpack[_Ts]], Any], *args: Unpack[_Ts]) -> None: ... + def remove_signal_handler(self, sig: int) -> bool: ... + # Error handlers. + def set_exception_handler(self, handler: _ExceptionHandler | None) -> None: ... + def get_exception_handler(self) -> _ExceptionHandler | None: ... + def default_exception_handler(self, context: _Context) -> None: ... + def call_exception_handler(self, context: _Context) -> None: ... + # Debug flag management. + def get_debug(self) -> bool: ... + def set_debug(self, enabled: bool) -> None: ... + if sys.version_info >= (3, 12): + async def shutdown_default_executor(self, timeout: float | None = None) -> None: ... + else: + async def shutdown_default_executor(self) -> None: ... + + def __del__(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/base_futures.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/base_futures.pyi new file mode 100644 index 0000000..2cd0f2e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/base_futures.pyi @@ -0,0 +1,17 @@ +from _asyncio import Future +from collections.abc import Callable, Sequence +from contextvars import Context +from typing import Any, Final +from typing_extensions import TypeIs + +from . import futures + +__all__ = () + +_PENDING: Final = "PENDING" # undocumented +_CANCELLED: Final = "CANCELLED" # undocumented +_FINISHED: Final = "FINISHED" # undocumented + +def isfuture(obj: object) -> TypeIs[Future[Any]]: ... +def _format_callbacks(cb: Sequence[tuple[Callable[[futures.Future[Any]], None], Context]]) -> str: ... # undocumented +def _future_repr_info(future: futures.Future[Any]) -> list[str]: ... # undocumented diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/base_subprocess.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/base_subprocess.pyi new file mode 100644 index 0000000..a5fe24e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/base_subprocess.pyi @@ -0,0 +1,63 @@ +import subprocess +from collections import deque +from collections.abc import Callable, Sequence +from typing import IO, Any +from typing_extensions import TypeAlias + +from . import events, futures, protocols, transports + +_File: TypeAlias = int | IO[Any] | None + +class BaseSubprocessTransport(transports.SubprocessTransport): + _closed: bool # undocumented + _protocol: protocols.SubprocessProtocol # undocumented + _loop: events.AbstractEventLoop # undocumented + _proc: subprocess.Popen[Any] | None # undocumented + _pid: int | None # undocumented + _returncode: int | None # undocumented + _exit_waiters: list[futures.Future[Any]] # undocumented + _pending_calls: deque[tuple[Callable[..., Any], tuple[Any, ...]]] # undocumented + _pipes: dict[int, _File] # undocumented + _finished: bool # undocumented + def __init__( + self, + loop: events.AbstractEventLoop, + protocol: protocols.SubprocessProtocol, + args: str | bytes | Sequence[str | bytes], + shell: bool, + stdin: _File, + stdout: _File, + stderr: _File, + bufsize: int, + waiter: futures.Future[Any] | None = None, + extra: Any | None = None, + **kwargs: Any, + ) -> None: ... + def _start( + self, + args: str | bytes | Sequence[str | bytes], + shell: bool, + stdin: _File, + stdout: _File, + stderr: _File, + bufsize: int, + **kwargs: Any, + ) -> None: ... # undocumented + def get_pid(self) -> int | None: ... # type: ignore[override] + def get_pipe_transport(self, fd: int) -> _File: ... # type: ignore[override] + def _check_proc(self) -> None: ... # undocumented + def send_signal(self, signal: int) -> None: ... + async def _connect_pipes(self, waiter: futures.Future[Any] | None) -> None: ... # undocumented + def _call(self, cb: Callable[..., object], *data: Any) -> None: ... # undocumented + def _pipe_connection_lost(self, fd: int, exc: BaseException | None) -> None: ... # undocumented + def _pipe_data_received(self, fd: int, data: bytes) -> None: ... # undocumented + def _process_exited(self, returncode: int) -> None: ... # undocumented + async def _wait(self) -> int: ... # undocumented + def _try_finish(self) -> None: ... # undocumented + def _call_connection_lost(self, exc: BaseException | None) -> None: ... # undocumented + def __del__(self) -> None: ... + +class WriteSubprocessPipeProto(protocols.BaseProtocol): # undocumented + def __init__(self, proc: BaseSubprocessTransport, fd: int) -> None: ... + +class ReadSubprocessPipeProto(WriteSubprocessPipeProto, protocols.Protocol): ... # undocumented diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/base_tasks.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/base_tasks.pyi new file mode 100644 index 0000000..42e952f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/base_tasks.pyi @@ -0,0 +1,9 @@ +from _typeshed import StrOrBytesPath +from types import FrameType +from typing import Any + +from . import tasks + +def _task_repr_info(task: tasks.Task[Any]) -> list[str]: ... # undocumented +def _task_get_stack(task: tasks.Task[Any], limit: int | None) -> list[FrameType]: ... # undocumented +def _task_print_stack(task: tasks.Task[Any], limit: int | None, file: StrOrBytesPath) -> None: ... # undocumented diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/constants.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/constants.pyi new file mode 100644 index 0000000..5c6456b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/constants.pyi @@ -0,0 +1,20 @@ +import enum +import sys +from typing import Final + +LOG_THRESHOLD_FOR_CONNLOST_WRITES: Final = 5 +ACCEPT_RETRY_DELAY: Final = 1 +DEBUG_STACK_DEPTH: Final = 10 +SSL_HANDSHAKE_TIMEOUT: float +SENDFILE_FALLBACK_READBUFFER_SIZE: Final = 262144 +if sys.version_info >= (3, 11): + SSL_SHUTDOWN_TIMEOUT: float + FLOW_CONTROL_HIGH_WATER_SSL_READ: Final = 256 + FLOW_CONTROL_HIGH_WATER_SSL_WRITE: Final = 512 +if sys.version_info >= (3, 12): + THREAD_JOIN_TIMEOUT: Final = 300 + +class _SendfileMode(enum.Enum): + UNSUPPORTED = 1 + TRY_NATIVE = 2 + FALLBACK = 3 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/coroutines.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/coroutines.pyi new file mode 100644 index 0000000..59212f4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/coroutines.pyi @@ -0,0 +1,28 @@ +import sys +from collections.abc import Awaitable, Callable, Coroutine +from typing import Any, TypeVar, overload +from typing_extensions import ParamSpec, TypeGuard, TypeIs, deprecated + +# Keep asyncio.__all__ updated with any changes to __all__ here +if sys.version_info >= (3, 11): + __all__ = ("iscoroutinefunction", "iscoroutine") +else: + __all__ = ("coroutine", "iscoroutinefunction", "iscoroutine") + +_T = TypeVar("_T") +_FunctionT = TypeVar("_FunctionT", bound=Callable[..., Any]) +_P = ParamSpec("_P") + +if sys.version_info < (3, 11): + @deprecated("Deprecated since Python 3.8; removed in Python 3.11. Use `async def` instead.") + def coroutine(func: _FunctionT) -> _FunctionT: ... + +@overload +def iscoroutinefunction(func: Callable[..., Coroutine[Any, Any, Any]]) -> bool: ... +@overload +def iscoroutinefunction(func: Callable[_P, Awaitable[_T]]) -> TypeGuard[Callable[_P, Coroutine[Any, Any, _T]]]: ... +@overload +def iscoroutinefunction(func: Callable[_P, object]) -> TypeGuard[Callable[_P, Coroutine[Any, Any, Any]]]: ... +@overload +def iscoroutinefunction(func: object) -> TypeGuard[Callable[..., Coroutine[Any, Any, Any]]]: ... +def iscoroutine(obj: object) -> TypeIs[Coroutine[Any, Any, Any]]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/events.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/events.pyi new file mode 100644 index 0000000..5dc698b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/events.pyi @@ -0,0 +1,675 @@ +import ssl +import sys +from _asyncio import ( + _get_running_loop as _get_running_loop, + _set_running_loop as _set_running_loop, + get_event_loop as get_event_loop, + get_running_loop as get_running_loop, +) +from _typeshed import FileDescriptorLike, ReadableBuffer, StrPath, Unused, WriteableBuffer +from abc import ABCMeta, abstractmethod +from collections.abc import Callable, Sequence +from concurrent.futures import Executor +from contextvars import Context +from socket import AddressFamily, AddressInfo, SocketKind, _Address, _RetAddress, socket +from typing import IO, Any, Literal, Protocol, TypeVar, overload, type_check_only +from typing_extensions import Self, TypeAlias, TypeVarTuple, Unpack, deprecated + +from . import _AwaitableLike, _CoroutineLike +from .base_events import Server +from .futures import Future +from .protocols import BaseProtocol +from .tasks import Task +from .transports import BaseTransport, DatagramTransport, ReadTransport, SubprocessTransport, Transport, WriteTransport + +if sys.version_info < (3, 14): + from .unix_events import AbstractChildWatcher + +# Keep asyncio.__all__ updated with any changes to __all__ here +if sys.version_info >= (3, 14): + __all__ = ( + "AbstractEventLoop", + "AbstractServer", + "Handle", + "TimerHandle", + "get_event_loop_policy", + "set_event_loop_policy", + "get_event_loop", + "set_event_loop", + "new_event_loop", + "_set_running_loop", + "get_running_loop", + "_get_running_loop", + ) +else: + __all__ = ( + "AbstractEventLoopPolicy", + "AbstractEventLoop", + "AbstractServer", + "Handle", + "TimerHandle", + "get_event_loop_policy", + "set_event_loop_policy", + "get_event_loop", + "set_event_loop", + "new_event_loop", + "get_child_watcher", + "set_child_watcher", + "_set_running_loop", + "get_running_loop", + "_get_running_loop", + ) + +_T = TypeVar("_T") +_Ts = TypeVarTuple("_Ts") +_ProtocolT = TypeVar("_ProtocolT", bound=BaseProtocol) +_Context: TypeAlias = dict[str, Any] +_ExceptionHandler: TypeAlias = Callable[[AbstractEventLoop, _Context], object] +_ProtocolFactory: TypeAlias = Callable[[], BaseProtocol] +_SSLContext: TypeAlias = bool | None | ssl.SSLContext + +@type_check_only +class _TaskFactory(Protocol): + def __call__(self, loop: AbstractEventLoop, factory: _CoroutineLike[_T], /) -> Future[_T]: ... + +class Handle: + __slots__ = ("_callback", "_args", "_cancelled", "_loop", "_source_traceback", "_repr", "__weakref__", "_context") + _cancelled: bool + _args: Sequence[Any] + def __init__( + self, callback: Callable[..., object], args: Sequence[Any], loop: AbstractEventLoop, context: Context | None = None + ) -> None: ... + def cancel(self) -> None: ... + def _run(self) -> None: ... + def cancelled(self) -> bool: ... + if sys.version_info >= (3, 12): + def get_context(self) -> Context: ... + +class TimerHandle(Handle): + __slots__ = ["_scheduled", "_when"] + def __init__( + self, + when: float, + callback: Callable[..., object], + args: Sequence[Any], + loop: AbstractEventLoop, + context: Context | None = None, + ) -> None: ... + def __hash__(self) -> int: ... + def when(self) -> float: ... + def __lt__(self, other: TimerHandle) -> bool: ... + def __le__(self, other: TimerHandle) -> bool: ... + def __gt__(self, other: TimerHandle) -> bool: ... + def __ge__(self, other: TimerHandle) -> bool: ... + def __eq__(self, other: object) -> bool: ... + +class AbstractServer: + @abstractmethod + def close(self) -> None: ... + if sys.version_info >= (3, 13): + @abstractmethod + def close_clients(self) -> None: ... + @abstractmethod + def abort_clients(self) -> None: ... + + async def __aenter__(self) -> Self: ... + async def __aexit__(self, *exc: Unused) -> None: ... + @abstractmethod + def get_loop(self) -> AbstractEventLoop: ... + @abstractmethod + def is_serving(self) -> bool: ... + @abstractmethod + async def start_serving(self) -> None: ... + @abstractmethod + async def serve_forever(self) -> None: ... + @abstractmethod + async def wait_closed(self) -> None: ... + +class AbstractEventLoop: + slow_callback_duration: float + @abstractmethod + def run_forever(self) -> None: ... + @abstractmethod + def run_until_complete(self, future: _AwaitableLike[_T]) -> _T: ... + @abstractmethod + def stop(self) -> None: ... + @abstractmethod + def is_running(self) -> bool: ... + @abstractmethod + def is_closed(self) -> bool: ... + @abstractmethod + def close(self) -> None: ... + @abstractmethod + async def shutdown_asyncgens(self) -> None: ... + # Methods scheduling callbacks. All these return Handles. + # "context" added in 3.9.10/3.10.2 for call_* + @abstractmethod + def call_soon( + self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None + ) -> Handle: ... + @abstractmethod + def call_later( + self, delay: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None + ) -> TimerHandle: ... + @abstractmethod + def call_at( + self, when: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None + ) -> TimerHandle: ... + @abstractmethod + def time(self) -> float: ... + # Future methods + @abstractmethod + def create_future(self) -> Future[Any]: ... + # Tasks methods + if sys.version_info >= (3, 11): + @abstractmethod + def create_task( + self, coro: _CoroutineLike[_T], *, name: str | None = None, context: Context | None = None + ) -> Task[_T]: ... + else: + @abstractmethod + def create_task(self, coro: _CoroutineLike[_T], *, name: str | None = None) -> Task[_T]: ... + + @abstractmethod + def set_task_factory(self, factory: _TaskFactory | None) -> None: ... + @abstractmethod + def get_task_factory(self) -> _TaskFactory | None: ... + # Methods for interacting with threads + # "context" added in 3.9.10/3.10.2 + @abstractmethod + def call_soon_threadsafe( + self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None + ) -> Handle: ... + @abstractmethod + def run_in_executor(self, executor: Executor | None, func: Callable[[Unpack[_Ts]], _T], *args: Unpack[_Ts]) -> Future[_T]: ... + @abstractmethod + def set_default_executor(self, executor: Executor) -> None: ... + # Network I/O methods returning Futures. + @abstractmethod + async def getaddrinfo( + self, + host: bytes | str | None, + port: bytes | str | int | None, + *, + family: int = 0, + type: int = 0, + proto: int = 0, + flags: int = 0, + ) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ... + @abstractmethod + async def getnameinfo(self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = 0) -> tuple[str, str]: ... + if sys.version_info >= (3, 11): + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: None = None, + port: None = None, + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: socket, + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + else: + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: None = None, + port: None = None, + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: socket, + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + + if sys.version_info >= (3, 13): + # 3.13 added `keep_alive`. + @overload + @abstractmethod + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: str | Sequence[str] | None = None, + port: int = ..., + *, + family: int = AddressFamily.AF_UNSPEC, + flags: int = AddressInfo.AI_PASSIVE, + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + keep_alive: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + @overload + @abstractmethod + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: None = None, + port: None = None, + *, + family: int = AddressFamily.AF_UNSPEC, + flags: int = AddressInfo.AI_PASSIVE, + sock: socket = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + keep_alive: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + elif sys.version_info >= (3, 11): + @overload + @abstractmethod + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: str | Sequence[str] | None = None, + port: int = ..., + *, + family: int = AddressFamily.AF_UNSPEC, + flags: int = AddressInfo.AI_PASSIVE, + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + @overload + @abstractmethod + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: None = None, + port: None = None, + *, + family: int = AddressFamily.AF_UNSPEC, + flags: int = AddressInfo.AI_PASSIVE, + sock: socket = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + else: + @overload + @abstractmethod + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: str | Sequence[str] | None = None, + port: int = ..., + *, + family: int = AddressFamily.AF_UNSPEC, + flags: int = AddressInfo.AI_PASSIVE, + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + @overload + @abstractmethod + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: None = None, + port: None = None, + *, + family: int = AddressFamily.AF_UNSPEC, + flags: int = AddressInfo.AI_PASSIVE, + sock: socket = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + + if sys.version_info >= (3, 11): + @abstractmethod + async def start_tls( + self, + transport: WriteTransport, + protocol: BaseProtocol, + sslcontext: ssl.SSLContext, + *, + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> Transport | None: ... + async def create_unix_server( + self, + protocol_factory: _ProtocolFactory, + path: StrPath | None = None, + *, + sock: socket | None = None, + backlog: int = 100, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + else: + @abstractmethod + async def start_tls( + self, + transport: BaseTransport, + protocol: BaseProtocol, + sslcontext: ssl.SSLContext, + *, + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> Transport | None: ... + async def create_unix_server( + self, + protocol_factory: _ProtocolFactory, + path: StrPath | None = None, + *, + sock: socket | None = None, + backlog: int = 100, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + + if sys.version_info >= (3, 11): + async def connect_accepted_socket( + self, + protocol_factory: Callable[[], _ProtocolT], + sock: socket, + *, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + elif sys.version_info >= (3, 10): + async def connect_accepted_socket( + self, + protocol_factory: Callable[[], _ProtocolT], + sock: socket, + *, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + if sys.version_info >= (3, 11): + async def create_unix_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + path: str | None = None, + *, + ssl: _SSLContext = None, + sock: socket | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + else: + async def create_unix_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + path: str | None = None, + *, + ssl: _SSLContext = None, + sock: socket | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + + @abstractmethod + async def sock_sendfile( + self, sock: socket, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool | None = None + ) -> int: ... + @abstractmethod + async def sendfile( + self, transport: WriteTransport, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool = True + ) -> int: ... + @abstractmethod + async def create_datagram_endpoint( + self, + protocol_factory: Callable[[], _ProtocolT], + local_addr: tuple[str, int] | str | None = None, + remote_addr: tuple[str, int] | str | None = None, + *, + family: int = 0, + proto: int = 0, + flags: int = 0, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + allow_broadcast: bool | None = None, + sock: socket | None = None, + ) -> tuple[DatagramTransport, _ProtocolT]: ... + # Pipes and subprocesses. + @abstractmethod + async def connect_read_pipe( + self, protocol_factory: Callable[[], _ProtocolT], pipe: Any + ) -> tuple[ReadTransport, _ProtocolT]: ... + @abstractmethod + async def connect_write_pipe( + self, protocol_factory: Callable[[], _ProtocolT], pipe: Any + ) -> tuple[WriteTransport, _ProtocolT]: ... + @abstractmethod + async def subprocess_shell( + self, + protocol_factory: Callable[[], _ProtocolT], + cmd: bytes | str, + *, + stdin: int | IO[Any] | None = -1, + stdout: int | IO[Any] | None = -1, + stderr: int | IO[Any] | None = -1, + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + text: Literal[False] | None = None, + **kwargs: Any, + ) -> tuple[SubprocessTransport, _ProtocolT]: ... + @abstractmethod + async def subprocess_exec( + self, + protocol_factory: Callable[[], _ProtocolT], + program: Any, + *args: Any, + stdin: int | IO[Any] | None = -1, + stdout: int | IO[Any] | None = -1, + stderr: int | IO[Any] | None = -1, + universal_newlines: Literal[False] = False, + shell: Literal[False] = False, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + **kwargs: Any, + ) -> tuple[SubprocessTransport, _ProtocolT]: ... + @abstractmethod + def add_reader(self, fd: FileDescriptorLike, callback: Callable[[Unpack[_Ts]], Any], *args: Unpack[_Ts]) -> None: ... + @abstractmethod + def remove_reader(self, fd: FileDescriptorLike) -> bool: ... + @abstractmethod + def add_writer(self, fd: FileDescriptorLike, callback: Callable[[Unpack[_Ts]], Any], *args: Unpack[_Ts]) -> None: ... + @abstractmethod + def remove_writer(self, fd: FileDescriptorLike) -> bool: ... + @abstractmethod + async def sock_recv(self, sock: socket, nbytes: int) -> bytes: ... + @abstractmethod + async def sock_recv_into(self, sock: socket, buf: WriteableBuffer) -> int: ... + @abstractmethod + async def sock_sendall(self, sock: socket, data: ReadableBuffer) -> None: ... + @abstractmethod + async def sock_connect(self, sock: socket, address: _Address) -> None: ... + @abstractmethod + async def sock_accept(self, sock: socket) -> tuple[socket, _RetAddress]: ... + if sys.version_info >= (3, 11): + @abstractmethod + async def sock_recvfrom(self, sock: socket, bufsize: int) -> tuple[bytes, _RetAddress]: ... + @abstractmethod + async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = 0) -> tuple[int, _RetAddress]: ... + @abstractmethod + async def sock_sendto(self, sock: socket, data: ReadableBuffer, address: _Address) -> int: ... + # Signal handling. + @abstractmethod + def add_signal_handler(self, sig: int, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> None: ... + @abstractmethod + def remove_signal_handler(self, sig: int) -> bool: ... + # Error handlers. + @abstractmethod + def set_exception_handler(self, handler: _ExceptionHandler | None) -> None: ... + @abstractmethod + def get_exception_handler(self) -> _ExceptionHandler | None: ... + @abstractmethod + def default_exception_handler(self, context: _Context) -> None: ... + @abstractmethod + def call_exception_handler(self, context: _Context) -> None: ... + # Debug flag management. + @abstractmethod + def get_debug(self) -> bool: ... + @abstractmethod + def set_debug(self, enabled: bool) -> None: ... + @abstractmethod + async def shutdown_default_executor(self) -> None: ... + +if sys.version_info >= (3, 14): + class _AbstractEventLoopPolicy: + @abstractmethod + def get_event_loop(self) -> AbstractEventLoop: ... + @abstractmethod + def set_event_loop(self, loop: AbstractEventLoop | None) -> None: ... + @abstractmethod + def new_event_loop(self) -> AbstractEventLoop: ... + +else: + @type_check_only + class _AbstractEventLoopPolicy: + @abstractmethod + def get_event_loop(self) -> AbstractEventLoop: ... + @abstractmethod + def set_event_loop(self, loop: AbstractEventLoop | None) -> None: ... + @abstractmethod + def new_event_loop(self) -> AbstractEventLoop: ... + # Child processes handling (Unix only). + if sys.version_info >= (3, 12): + @abstractmethod + @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") + def get_child_watcher(self) -> AbstractChildWatcher: ... + @abstractmethod + @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") + def set_child_watcher(self, watcher: AbstractChildWatcher) -> None: ... + else: + @abstractmethod + def get_child_watcher(self) -> AbstractChildWatcher: ... + @abstractmethod + def set_child_watcher(self, watcher: AbstractChildWatcher) -> None: ... + + AbstractEventLoopPolicy = _AbstractEventLoopPolicy + +if sys.version_info >= (3, 14): + class _BaseDefaultEventLoopPolicy(_AbstractEventLoopPolicy, metaclass=ABCMeta): + def get_event_loop(self) -> AbstractEventLoop: ... + def set_event_loop(self, loop: AbstractEventLoop | None) -> None: ... + def new_event_loop(self) -> AbstractEventLoop: ... + +else: + class BaseDefaultEventLoopPolicy(_AbstractEventLoopPolicy, metaclass=ABCMeta): + def get_event_loop(self) -> AbstractEventLoop: ... + def set_event_loop(self, loop: AbstractEventLoop | None) -> None: ... + def new_event_loop(self) -> AbstractEventLoop: ... + +if sys.version_info >= (3, 14): + def _get_event_loop_policy() -> _AbstractEventLoopPolicy: ... + def _set_event_loop_policy(policy: _AbstractEventLoopPolicy | None) -> None: ... + @deprecated("Deprecated since Python 3.14; will be removed in Python 3.16.") + def get_event_loop_policy() -> _AbstractEventLoopPolicy: ... + @deprecated("Deprecated since Python 3.14; will be removed in Python 3.16.") + def set_event_loop_policy(policy: _AbstractEventLoopPolicy | None) -> None: ... + +else: + def get_event_loop_policy() -> _AbstractEventLoopPolicy: ... + def set_event_loop_policy(policy: _AbstractEventLoopPolicy | None) -> None: ... + +def set_event_loop(loop: AbstractEventLoop | None) -> None: ... +def new_event_loop() -> AbstractEventLoop: ... + +if sys.version_info < (3, 14): + if sys.version_info >= (3, 12): + @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") + def get_child_watcher() -> AbstractChildWatcher: ... + @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") + def set_child_watcher(watcher: AbstractChildWatcher) -> None: ... + + else: + def get_child_watcher() -> AbstractChildWatcher: ... + def set_child_watcher(watcher: AbstractChildWatcher) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/exceptions.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/exceptions.pyi new file mode 100644 index 0000000..759838f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/exceptions.pyi @@ -0,0 +1,44 @@ +import sys + +# Keep asyncio.__all__ updated with any changes to __all__ here +if sys.version_info >= (3, 11): + __all__ = ( + "BrokenBarrierError", + "CancelledError", + "InvalidStateError", + "TimeoutError", + "IncompleteReadError", + "LimitOverrunError", + "SendfileNotAvailableError", + ) +else: + __all__ = ( + "CancelledError", + "InvalidStateError", + "TimeoutError", + "IncompleteReadError", + "LimitOverrunError", + "SendfileNotAvailableError", + ) + +class CancelledError(BaseException): ... + +if sys.version_info >= (3, 11): + from builtins import TimeoutError as TimeoutError +else: + class TimeoutError(Exception): ... + +class InvalidStateError(Exception): ... +class SendfileNotAvailableError(RuntimeError): ... + +class IncompleteReadError(EOFError): + expected: int | None + partial: bytes + def __init__(self, partial: bytes, expected: int | None) -> None: ... + +class LimitOverrunError(Exception): + consumed: int + def __init__(self, message: str, consumed: int) -> None: ... + +if sys.version_info >= (3, 11): + class BrokenBarrierError(RuntimeError): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/format_helpers.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/format_helpers.pyi new file mode 100644 index 0000000..597eb9e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/format_helpers.pyi @@ -0,0 +1,32 @@ +import functools +import sys +import traceback +from collections.abc import Iterable +from types import FrameType, FunctionType +from typing import Any, overload, type_check_only +from typing_extensions import TypeAlias + +@type_check_only +class _HasWrapper: + __wrapper__: _HasWrapper | FunctionType + +_FuncType: TypeAlias = FunctionType | _HasWrapper | functools.partial[Any] | functools.partialmethod[Any] + +@overload +def _get_function_source(func: _FuncType) -> tuple[str, int]: ... +@overload +def _get_function_source(func: object) -> tuple[str, int] | None: ... + +if sys.version_info >= (3, 13): + def _format_callback_source(func: object, args: Iterable[Any], *, debug: bool = False) -> str: ... + def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any], *, debug: bool = False) -> str: ... + def _format_callback( + func: object, args: Iterable[Any], kwargs: dict[str, Any], *, debug: bool = False, suffix: str = "" + ) -> str: ... + +else: + def _format_callback_source(func: object, args: Iterable[Any]) -> str: ... + def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any]) -> str: ... + def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], suffix: str = "") -> str: ... + +def extract_stack(f: FrameType | None = None, limit: int | None = None) -> traceback.StackSummary: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/futures.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/futures.pyi new file mode 100644 index 0000000..c907c70 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/futures.pyi @@ -0,0 +1,19 @@ +import sys +from _asyncio import Future as Future +from concurrent.futures._base import Future as _ConcurrentFuture +from typing import TypeVar + +from .base_futures import isfuture as isfuture +from .events import AbstractEventLoop + +# Keep asyncio.__all__ updated with any changes to __all__ here +if sys.version_info >= (3, 14): + from _asyncio import future_add_to_awaited_by, future_discard_from_awaited_by + + __all__ = ("Future", "wrap_future", "isfuture", "future_discard_from_awaited_by", "future_add_to_awaited_by") +else: + __all__ = ("Future", "wrap_future", "isfuture") + +_T = TypeVar("_T") + +def wrap_future(future: _ConcurrentFuture[_T] | Future[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/graph.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/graph.pyi new file mode 100644 index 0000000..18a8a64 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/graph.pyi @@ -0,0 +1,28 @@ +import sys +from _typeshed import SupportsWrite +from asyncio import Future +from dataclasses import dataclass +from types import FrameType +from typing import Any, overload + +if sys.version_info >= (3, 14): + __all__ = ("capture_call_graph", "format_call_graph", "print_call_graph", "FrameCallGraphEntry", "FutureCallGraph") + + @dataclass(frozen=True, slots=True) + class FrameCallGraphEntry: + frame: FrameType + + @dataclass(frozen=True, slots=True) + class FutureCallGraph: + future: Future[Any] + call_stack: tuple[FrameCallGraphEntry, ...] + awaited_by: tuple[FutureCallGraph, ...] + + @overload + def capture_call_graph(future: None = None, /, *, depth: int = 1, limit: int | None = None) -> FutureCallGraph | None: ... + @overload + def capture_call_graph(future: Future[Any], /, *, depth: int = 1, limit: int | None = None) -> FutureCallGraph | None: ... + def format_call_graph(future: Future[Any] | None = None, /, *, depth: int = 1, limit: int | None = None) -> str: ... + def print_call_graph( + future: Future[Any] | None = None, /, *, file: SupportsWrite[str] | None = None, depth: int = 1, limit: int | None = None + ) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/locks.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/locks.pyi new file mode 100644 index 0000000..17390b0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/locks.pyi @@ -0,0 +1,104 @@ +import enum +import sys +from _typeshed import Unused +from collections import deque +from collections.abc import Callable +from types import TracebackType +from typing import Any, Literal, TypeVar +from typing_extensions import Self + +from .events import AbstractEventLoop +from .futures import Future + +if sys.version_info >= (3, 10): + from .mixins import _LoopBoundMixin +else: + _LoopBoundMixin = object + +# Keep asyncio.__all__ updated with any changes to __all__ here +if sys.version_info >= (3, 11): + __all__ = ("Lock", "Event", "Condition", "Semaphore", "BoundedSemaphore", "Barrier") +else: + __all__ = ("Lock", "Event", "Condition", "Semaphore", "BoundedSemaphore") + +_T = TypeVar("_T") + +class _ContextManagerMixin: + async def __aenter__(self) -> None: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None + ) -> None: ... + +class Lock(_ContextManagerMixin, _LoopBoundMixin): + _waiters: deque[Future[Any]] | None + if sys.version_info >= (3, 10): + def __init__(self) -> None: ... + else: + def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ... + + def locked(self) -> bool: ... + async def acquire(self) -> Literal[True]: ... + def release(self) -> None: ... + +class Event(_LoopBoundMixin): + _waiters: deque[Future[Any]] + if sys.version_info >= (3, 10): + def __init__(self) -> None: ... + else: + def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ... + + def is_set(self) -> bool: ... + def set(self) -> None: ... + def clear(self) -> None: ... + async def wait(self) -> Literal[True]: ... + +class Condition(_ContextManagerMixin, _LoopBoundMixin): + _waiters: deque[Future[Any]] + if sys.version_info >= (3, 10): + def __init__(self, lock: Lock | None = None) -> None: ... + else: + def __init__(self, lock: Lock | None = None, *, loop: AbstractEventLoop | None = None) -> None: ... + + def locked(self) -> bool: ... + async def acquire(self) -> Literal[True]: ... + def release(self) -> None: ... + async def wait(self) -> Literal[True]: ... + async def wait_for(self, predicate: Callable[[], _T]) -> _T: ... + def notify(self, n: int = 1) -> None: ... + def notify_all(self) -> None: ... + +class Semaphore(_ContextManagerMixin, _LoopBoundMixin): + _value: int + _waiters: deque[Future[Any]] | None + if sys.version_info >= (3, 10): + def __init__(self, value: int = 1) -> None: ... + else: + def __init__(self, value: int = 1, *, loop: AbstractEventLoop | None = None) -> None: ... + + def locked(self) -> bool: ... + async def acquire(self) -> Literal[True]: ... + def release(self) -> None: ... + def _wake_up_next(self) -> None: ... + +class BoundedSemaphore(Semaphore): ... + +if sys.version_info >= (3, 11): + class _BarrierState(enum.Enum): # undocumented + FILLING = "filling" + DRAINING = "draining" + RESETTING = "resetting" + BROKEN = "broken" + + class Barrier(_LoopBoundMixin): + def __init__(self, parties: int) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__(self, *args: Unused) -> None: ... + async def wait(self) -> int: ... + async def abort(self) -> None: ... + async def reset(self) -> None: ... + @property + def parties(self) -> int: ... + @property + def n_waiting(self) -> int: ... + @property + def broken(self) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/log.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/log.pyi new file mode 100644 index 0000000..e1de0b3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/log.pyi @@ -0,0 +1,3 @@ +import logging + +logger: logging.Logger diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/mixins.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/mixins.pyi new file mode 100644 index 0000000..6ebcf54 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/mixins.pyi @@ -0,0 +1,9 @@ +import sys +import threading +from typing_extensions import Never + +_global_lock: threading.Lock + +class _LoopBoundMixin: + if sys.version_info < (3, 11): + def __init__(self, *, loop: Never = ...) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/proactor_events.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/proactor_events.pyi new file mode 100644 index 0000000..909d671 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/proactor_events.pyi @@ -0,0 +1,65 @@ +import sys +from collections.abc import Mapping +from socket import socket +from typing import Any, ClassVar, Literal + +from . import base_events, constants, events, futures, streams, transports + +__all__ = ("BaseProactorEventLoop",) + +class _ProactorBasePipeTransport(transports._FlowControlMixin, transports.BaseTransport): + def __init__( + self, + loop: events.AbstractEventLoop, + sock: socket, + protocol: streams.StreamReaderProtocol, + waiter: futures.Future[Any] | None = None, + extra: Mapping[Any, Any] | None = None, + server: events.AbstractServer | None = None, + ) -> None: ... + def __del__(self) -> None: ... + +class _ProactorReadPipeTransport(_ProactorBasePipeTransport, transports.ReadTransport): + if sys.version_info >= (3, 10): + def __init__( + self, + loop: events.AbstractEventLoop, + sock: socket, + protocol: streams.StreamReaderProtocol, + waiter: futures.Future[Any] | None = None, + extra: Mapping[Any, Any] | None = None, + server: events.AbstractServer | None = None, + buffer_size: int = 65536, + ) -> None: ... + else: + def __init__( + self, + loop: events.AbstractEventLoop, + sock: socket, + protocol: streams.StreamReaderProtocol, + waiter: futures.Future[Any] | None = None, + extra: Mapping[Any, Any] | None = None, + server: events.AbstractServer | None = None, + ) -> None: ... + +class _ProactorBaseWritePipeTransport(_ProactorBasePipeTransport, transports.WriteTransport): ... +class _ProactorWritePipeTransport(_ProactorBaseWritePipeTransport): ... +class _ProactorDuplexPipeTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): ... + +class _ProactorSocketTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): + _sendfile_compatible: ClassVar[constants._SendfileMode] + def __init__( + self, + loop: events.AbstractEventLoop, + sock: socket, + protocol: streams.StreamReaderProtocol, + waiter: futures.Future[Any] | None = None, + extra: Mapping[Any, Any] | None = None, + server: events.AbstractServer | None = None, + ) -> None: ... + def _set_extra(self, sock: socket) -> None: ... + def can_write_eof(self) -> Literal[True]: ... + +class BaseProactorEventLoop(base_events.BaseEventLoop): + def __init__(self, proactor: Any) -> None: ... + async def sock_recv(self, sock: socket, n: int) -> bytes: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/protocols.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/protocols.pyi new file mode 100644 index 0000000..3a8965f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/protocols.pyi @@ -0,0 +1,41 @@ +from _typeshed import ReadableBuffer +from asyncio import transports +from typing import Any + +# Keep asyncio.__all__ updated with any changes to __all__ here +__all__ = ("BaseProtocol", "Protocol", "DatagramProtocol", "SubprocessProtocol", "BufferedProtocol") + +class BaseProtocol: + __slots__ = () + def connection_made(self, transport: transports.BaseTransport) -> None: ... + def connection_lost(self, exc: Exception | None) -> None: ... + def pause_writing(self) -> None: ... + def resume_writing(self) -> None: ... + +class Protocol(BaseProtocol): + # Need annotation or mypy will complain about 'Cannot determine type of "__slots__" in base class' + __slots__: tuple[str, ...] = () + def data_received(self, data: bytes) -> None: ... + def eof_received(self) -> bool | None: ... + +class BufferedProtocol(BaseProtocol): + __slots__ = () + def get_buffer(self, sizehint: int) -> ReadableBuffer: ... + def buffer_updated(self, nbytes: int) -> None: ... + def eof_received(self) -> bool | None: ... + +class DatagramProtocol(BaseProtocol): + __slots__ = () + def connection_made(self, transport: transports.DatagramTransport) -> None: ... # type: ignore[override] + # addr can be a tuple[int, int] for some unusual protocols like socket.AF_NETLINK. + # Use tuple[str | Any, int] to not cause typechecking issues on most usual cases. + # This could be improved by using tuple[AnyOf[str, int], int] if the AnyOf feature is accepted. + # See https://github.com/python/typing/issues/566 + def datagram_received(self, data: bytes, addr: tuple[str | Any, int]) -> None: ... + def error_received(self, exc: Exception) -> None: ... + +class SubprocessProtocol(BaseProtocol): + __slots__: tuple[str, ...] = () + def pipe_data_received(self, fd: int, data: bytes) -> None: ... + def pipe_connection_lost(self, fd: int, exc: Exception | None) -> None: ... + def process_exited(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/queues.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/queues.pyi new file mode 100644 index 0000000..2fa2226 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/queues.pyi @@ -0,0 +1,55 @@ +import sys +from _typeshed import SupportsRichComparisonT +from asyncio.events import AbstractEventLoop +from types import GenericAlias +from typing import Any, Generic, TypeVar + +if sys.version_info >= (3, 10): + from .mixins import _LoopBoundMixin +else: + _LoopBoundMixin = object + +class QueueEmpty(Exception): ... +class QueueFull(Exception): ... + +# Keep asyncio.__all__ updated with any changes to __all__ here +if sys.version_info >= (3, 13): + __all__ = ("Queue", "PriorityQueue", "LifoQueue", "QueueFull", "QueueEmpty", "QueueShutDown") + +else: + __all__ = ("Queue", "PriorityQueue", "LifoQueue", "QueueFull", "QueueEmpty") + +_T = TypeVar("_T") + +if sys.version_info >= (3, 13): + class QueueShutDown(Exception): ... + +# If Generic[_T] is last and _LoopBoundMixin is object, pyright is unhappy. +# We can remove the noqa pragma when dropping 3.9 support. +class Queue(Generic[_T], _LoopBoundMixin): # noqa: Y059 + if sys.version_info >= (3, 10): + def __init__(self, maxsize: int = 0) -> None: ... + else: + def __init__(self, maxsize: int = 0, *, loop: AbstractEventLoop | None = None) -> None: ... + + def _init(self, maxsize: int) -> None: ... + def _get(self) -> _T: ... + def _put(self, item: _T) -> None: ... + def _format(self) -> str: ... + def qsize(self) -> int: ... + @property + def maxsize(self) -> int: ... + def empty(self) -> bool: ... + def full(self) -> bool: ... + async def put(self, item: _T) -> None: ... + def put_nowait(self, item: _T) -> None: ... + async def get(self) -> _T: ... + def get_nowait(self) -> _T: ... + async def join(self) -> None: ... + def task_done(self) -> None: ... + def __class_getitem__(cls, type: Any, /) -> GenericAlias: ... + if sys.version_info >= (3, 13): + def shutdown(self, immediate: bool = False) -> None: ... + +class PriorityQueue(Queue[SupportsRichComparisonT]): ... +class LifoQueue(Queue[_T]): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/runners.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/runners.pyi new file mode 100644 index 0000000..919e652 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/runners.pyi @@ -0,0 +1,33 @@ +import sys +from _typeshed import Unused +from collections.abc import Callable, Coroutine +from contextvars import Context +from typing import Any, TypeVar, final +from typing_extensions import Self + +from .events import AbstractEventLoop + +# Keep asyncio.__all__ updated with any changes to __all__ here +if sys.version_info >= (3, 11): + __all__ = ("Runner", "run") +else: + __all__ = ("run",) +_T = TypeVar("_T") + +if sys.version_info >= (3, 11): + @final + class Runner: + def __init__(self, *, debug: bool | None = None, loop_factory: Callable[[], AbstractEventLoop] | None = None) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, exc_type: Unused, exc_val: Unused, exc_tb: Unused) -> None: ... + def close(self) -> None: ... + def get_loop(self) -> AbstractEventLoop: ... + def run(self, coro: Coroutine[Any, Any, _T], *, context: Context | None = None) -> _T: ... + +if sys.version_info >= (3, 12): + def run( + main: Coroutine[Any, Any, _T], *, debug: bool | None = None, loop_factory: Callable[[], AbstractEventLoop] | None = None + ) -> _T: ... + +else: + def run(main: Coroutine[Any, Any, _T], *, debug: bool | None = None) -> _T: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/selector_events.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/selector_events.pyi new file mode 100644 index 0000000..18c5df0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/selector_events.pyi @@ -0,0 +1,10 @@ +import selectors +from socket import socket + +from . import base_events + +__all__ = ("BaseSelectorEventLoop",) + +class BaseSelectorEventLoop(base_events.BaseEventLoop): + def __init__(self, selector: selectors.BaseSelector | None = None) -> None: ... + async def sock_recv(self, sock: socket, n: int) -> bytes: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/sslproto.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/sslproto.pyi new file mode 100644 index 0000000..ab102f1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/sslproto.pyi @@ -0,0 +1,165 @@ +import ssl +import sys +from collections import deque +from collections.abc import Callable +from enum import Enum +from typing import Any, ClassVar, Final, Literal +from typing_extensions import TypeAlias + +from . import constants, events, futures, protocols, transports + +def _create_transport_context(server_side: bool, server_hostname: str | None) -> ssl.SSLContext: ... + +if sys.version_info >= (3, 11): + SSLAgainErrors: tuple[type[ssl.SSLWantReadError], type[ssl.SSLSyscallError]] + + class SSLProtocolState(Enum): + UNWRAPPED = "UNWRAPPED" + DO_HANDSHAKE = "DO_HANDSHAKE" + WRAPPED = "WRAPPED" + FLUSHING = "FLUSHING" + SHUTDOWN = "SHUTDOWN" + + class AppProtocolState(Enum): + STATE_INIT = "STATE_INIT" + STATE_CON_MADE = "STATE_CON_MADE" + STATE_EOF = "STATE_EOF" + STATE_CON_LOST = "STATE_CON_LOST" + + def add_flowcontrol_defaults(high: int | None, low: int | None, kb: int) -> tuple[int, int]: ... + +else: + _UNWRAPPED: Final = "UNWRAPPED" + _DO_HANDSHAKE: Final = "DO_HANDSHAKE" + _WRAPPED: Final = "WRAPPED" + _SHUTDOWN: Final = "SHUTDOWN" + +if sys.version_info < (3, 11): + class _SSLPipe: + max_size: ClassVar[int] + + _context: ssl.SSLContext + _server_side: bool + _server_hostname: str | None + _state: str + _incoming: ssl.MemoryBIO + _outgoing: ssl.MemoryBIO + _sslobj: ssl.SSLObject | None + _need_ssldata: bool + _handshake_cb: Callable[[BaseException | None], None] | None + _shutdown_cb: Callable[[], None] | None + def __init__(self, context: ssl.SSLContext, server_side: bool, server_hostname: str | None = None) -> None: ... + @property + def context(self) -> ssl.SSLContext: ... + @property + def ssl_object(self) -> ssl.SSLObject | None: ... + @property + def need_ssldata(self) -> bool: ... + @property + def wrapped(self) -> bool: ... + def do_handshake(self, callback: Callable[[BaseException | None], object] | None = None) -> list[bytes]: ... + def shutdown(self, callback: Callable[[], object] | None = None) -> list[bytes]: ... + def feed_eof(self) -> None: ... + def feed_ssldata(self, data: bytes, only_handshake: bool = False) -> tuple[list[bytes], list[bytes]]: ... + def feed_appdata(self, data: bytes, offset: int = 0) -> tuple[list[bytes], int]: ... + +class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport): + _sendfile_compatible: ClassVar[constants._SendfileMode] + + _loop: events.AbstractEventLoop + if sys.version_info >= (3, 11): + _ssl_protocol: SSLProtocol | None + else: + _ssl_protocol: SSLProtocol + _closed: bool + def __init__(self, loop: events.AbstractEventLoop, ssl_protocol: SSLProtocol) -> None: ... + def get_extra_info(self, name: str, default: Any | None = None) -> dict[str, Any]: ... + @property + def _protocol_paused(self) -> bool: ... + def write(self, data: bytes | bytearray | memoryview[Any]) -> None: ... # any memoryview format or shape + def can_write_eof(self) -> Literal[False]: ... + if sys.version_info >= (3, 11): + def get_write_buffer_limits(self) -> tuple[int, int]: ... + def get_read_buffer_limits(self) -> tuple[int, int]: ... + def set_read_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: ... + def get_read_buffer_size(self) -> int: ... + + def __del__(self) -> None: ... + +if sys.version_info >= (3, 11): + _SSLProtocolBase: TypeAlias = protocols.BufferedProtocol +else: + _SSLProtocolBase: TypeAlias = protocols.Protocol + +class SSLProtocol(_SSLProtocolBase): + _server_side: bool + _server_hostname: str | None + _sslcontext: ssl.SSLContext + _extra: dict[str, Any] + _write_backlog: deque[tuple[bytes, int]] + _write_buffer_size: int + _waiter: futures.Future[Any] + _loop: events.AbstractEventLoop + _app_transport: _SSLProtocolTransport + _transport: transports.BaseTransport | None + _ssl_handshake_timeout: int | None + _app_protocol: protocols.BaseProtocol + _app_protocol_is_buffer: bool + + if sys.version_info >= (3, 11): + max_size: ClassVar[int] + else: + _sslpipe: _SSLPipe | None + _session_established: bool + _call_connection_made: bool + _in_handshake: bool + _in_shutdown: bool + + if sys.version_info >= (3, 11): + def __init__( + self, + loop: events.AbstractEventLoop, + app_protocol: protocols.BaseProtocol, + sslcontext: ssl.SSLContext, + waiter: futures.Future[Any], + server_side: bool = False, + server_hostname: str | None = None, + call_connection_made: bool = True, + ssl_handshake_timeout: int | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> None: ... + else: + def __init__( + self, + loop: events.AbstractEventLoop, + app_protocol: protocols.BaseProtocol, + sslcontext: ssl.SSLContext, + waiter: futures.Future[Any], + server_side: bool = False, + server_hostname: str | None = None, + call_connection_made: bool = True, + ssl_handshake_timeout: int | None = None, + ) -> None: ... + + def _set_app_protocol(self, app_protocol: protocols.BaseProtocol) -> None: ... + def _wakeup_waiter(self, exc: BaseException | None = None) -> None: ... + def connection_lost(self, exc: BaseException | None) -> None: ... + def eof_received(self) -> None: ... + def _get_extra_info(self, name: str, default: Any | None = None) -> Any: ... + def _start_shutdown(self) -> None: ... + if sys.version_info >= (3, 11): + def _write_appdata(self, list_of_data: list[bytes]) -> None: ... + else: + def _write_appdata(self, data: bytes) -> None: ... + + def _start_handshake(self) -> None: ... + def _check_handshake_timeout(self) -> None: ... + def _on_handshake_complete(self, handshake_exc: BaseException | None) -> None: ... + def _fatal_error(self, exc: BaseException, message: str = "Fatal error on transport") -> None: ... + if sys.version_info >= (3, 11): + def _abort(self, exc: BaseException | None) -> None: ... + def get_buffer(self, n: int) -> memoryview: ... + else: + def _abort(self) -> None: ... + def _finalize(self) -> None: ... + def _process_write_backlog(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/staggered.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/staggered.pyi new file mode 100644 index 0000000..3324777 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/staggered.pyi @@ -0,0 +1,10 @@ +from collections.abc import Awaitable, Callable, Iterable +from typing import Any + +from . import events + +__all__ = ("staggered_race",) + +async def staggered_race( + coro_fns: Iterable[Callable[[], Awaitable[Any]]], delay: float | None, *, loop: events.AbstractEventLoop | None = None +) -> tuple[Any, int | None, list[Exception | None]]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/streams.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/streams.pyi new file mode 100644 index 0000000..33cffb1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/streams.pyi @@ -0,0 +1,159 @@ +import ssl +import sys +from _typeshed import ReadableBuffer, StrPath +from collections.abc import Awaitable, Callable, Iterable, Sequence, Sized +from types import ModuleType +from typing import Any, Protocol, SupportsIndex, type_check_only +from typing_extensions import Self, TypeAlias + +from . import events, protocols, transports +from .base_events import Server + +# Keep asyncio.__all__ updated with any changes to __all__ here +if sys.platform == "win32": + __all__ = ("StreamReader", "StreamWriter", "StreamReaderProtocol", "open_connection", "start_server") +else: + __all__ = ( + "StreamReader", + "StreamWriter", + "StreamReaderProtocol", + "open_connection", + "start_server", + "open_unix_connection", + "start_unix_server", + ) + +_ClientConnectedCallback: TypeAlias = Callable[[StreamReader, StreamWriter], Awaitable[None] | None] + +@type_check_only +class _ReaduntilBuffer(ReadableBuffer, Sized, Protocol): ... + +if sys.version_info >= (3, 10): + async def open_connection( + host: str | None = None, + port: int | str | None = None, + *, + limit: int = 65536, + ssl_handshake_timeout: float | None = None, + **kwds: Any, + ) -> tuple[StreamReader, StreamWriter]: ... + async def start_server( + client_connected_cb: _ClientConnectedCallback, + host: str | Sequence[str] | None = None, + port: int | str | None = None, + *, + limit: int = 65536, + ssl_handshake_timeout: float | None = None, + **kwds: Any, + ) -> Server: ... + +else: + async def open_connection( + host: str | None = None, + port: int | str | None = None, + *, + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, + ssl_handshake_timeout: float | None = None, + **kwds: Any, + ) -> tuple[StreamReader, StreamWriter]: ... + async def start_server( + client_connected_cb: _ClientConnectedCallback, + host: str | None = None, + port: int | str | None = None, + *, + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, + ssl_handshake_timeout: float | None = None, + **kwds: Any, + ) -> Server: ... + +if sys.platform != "win32": + if sys.version_info >= (3, 10): + async def open_unix_connection( + path: StrPath | None = None, *, limit: int = 65536, **kwds: Any + ) -> tuple[StreamReader, StreamWriter]: ... + async def start_unix_server( + client_connected_cb: _ClientConnectedCallback, path: StrPath | None = None, *, limit: int = 65536, **kwds: Any + ) -> Server: ... + else: + async def open_unix_connection( + path: StrPath | None = None, *, loop: events.AbstractEventLoop | None = None, limit: int = 65536, **kwds: Any + ) -> tuple[StreamReader, StreamWriter]: ... + async def start_unix_server( + client_connected_cb: _ClientConnectedCallback, + path: StrPath | None = None, + *, + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, + **kwds: Any, + ) -> Server: ... + +class FlowControlMixin(protocols.Protocol): + def __init__(self, loop: events.AbstractEventLoop | None = None) -> None: ... + +class StreamReaderProtocol(FlowControlMixin, protocols.Protocol): + def __init__( + self, + stream_reader: StreamReader, + client_connected_cb: _ClientConnectedCallback | None = None, + loop: events.AbstractEventLoop | None = None, + ) -> None: ... + def __del__(self) -> None: ... + +class StreamWriter: + def __init__( + self, + transport: transports.WriteTransport, + protocol: protocols.BaseProtocol, + reader: StreamReader | None, + loop: events.AbstractEventLoop, + ) -> None: ... + @property + def transport(self) -> transports.WriteTransport: ... + def write(self, data: bytes | bytearray | memoryview) -> None: ... + def writelines(self, data: Iterable[bytes | bytearray | memoryview]) -> None: ... + def write_eof(self) -> None: ... + def can_write_eof(self) -> bool: ... + def close(self) -> None: ... + def is_closing(self) -> bool: ... + async def wait_closed(self) -> None: ... + def get_extra_info(self, name: str, default: Any = None) -> Any: ... + async def drain(self) -> None: ... + if sys.version_info >= (3, 12): + async def start_tls( + self, + sslcontext: ssl.SSLContext, + *, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> None: ... + elif sys.version_info >= (3, 11): + async def start_tls( + self, sslcontext: ssl.SSLContext, *, server_hostname: str | None = None, ssl_handshake_timeout: float | None = None + ) -> None: ... + + if sys.version_info >= (3, 13): + def __del__(self, warnings: ModuleType = ...) -> None: ... + elif sys.version_info >= (3, 11): + def __del__(self) -> None: ... + +class StreamReader: + def __init__(self, limit: int = 65536, loop: events.AbstractEventLoop | None = None) -> None: ... + def exception(self) -> Exception: ... + def set_exception(self, exc: Exception) -> None: ... + def set_transport(self, transport: transports.BaseTransport) -> None: ... + def feed_eof(self) -> None: ... + def at_eof(self) -> bool: ... + def feed_data(self, data: Iterable[SupportsIndex]) -> None: ... + async def readline(self) -> bytes: ... + if sys.version_info >= (3, 13): + async def readuntil(self, separator: _ReaduntilBuffer | tuple[_ReaduntilBuffer, ...] = b"\n") -> bytes: ... + else: + async def readuntil(self, separator: _ReaduntilBuffer = b"\n") -> bytes: ... + + async def read(self, n: int = -1) -> bytes: ... + async def readexactly(self, n: int) -> bytes: ... + def __aiter__(self) -> Self: ... + async def __anext__(self) -> bytes: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/subprocess.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/subprocess.pyi new file mode 100644 index 0000000..ceee2b5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/subprocess.pyi @@ -0,0 +1,230 @@ +import subprocess +import sys +from _typeshed import StrOrBytesPath +from asyncio import events, protocols, streams, transports +from collections.abc import Callable, Collection +from typing import IO, Any, Literal + +# Keep asyncio.__all__ updated with any changes to __all__ here +__all__ = ("create_subprocess_exec", "create_subprocess_shell") + +PIPE: int +STDOUT: int +DEVNULL: int + +class SubprocessStreamProtocol(streams.FlowControlMixin, protocols.SubprocessProtocol): + stdin: streams.StreamWriter | None + stdout: streams.StreamReader | None + stderr: streams.StreamReader | None + def __init__(self, limit: int, loop: events.AbstractEventLoop) -> None: ... + def pipe_data_received(self, fd: int, data: bytes | str) -> None: ... + +class Process: + stdin: streams.StreamWriter | None + stdout: streams.StreamReader | None + stderr: streams.StreamReader | None + pid: int + def __init__( + self, transport: transports.BaseTransport, protocol: protocols.BaseProtocol, loop: events.AbstractEventLoop + ) -> None: ... + @property + def returncode(self) -> int | None: ... + async def wait(self) -> int: ... + def send_signal(self, signal: int) -> None: ... + def terminate(self) -> None: ... + def kill(self) -> None: ... + async def communicate(self, input: bytes | bytearray | memoryview | None = None) -> tuple[bytes, bytes]: ... + +if sys.version_info >= (3, 11): + async def create_subprocess_shell( + cmd: str | bytes, + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + limit: int = 65536, + *, + # These parameters are forced to these values by BaseEventLoop.subprocess_shell + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + text: Literal[False] | None = None, + # These parameters are taken by subprocess.Popen, which this ultimately delegates to + executable: StrOrBytesPath | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + cwd: StrOrBytesPath | None = None, + env: subprocess._ENV | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + group: None | str | int = None, + extra_groups: None | Collection[str | int] = None, + user: None | str | int = None, + umask: int = -1, + process_group: int | None = None, + pipesize: int = -1, + ) -> Process: ... + async def create_subprocess_exec( + program: StrOrBytesPath, + *args: StrOrBytesPath, + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + limit: int = 65536, + # These parameters are forced to these values by BaseEventLoop.subprocess_exec + universal_newlines: Literal[False] = False, + shell: Literal[False] = False, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + text: Literal[False] | None = None, + # These parameters are taken by subprocess.Popen, which this ultimately delegates to + executable: StrOrBytesPath | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + cwd: StrOrBytesPath | None = None, + env: subprocess._ENV | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + group: None | str | int = None, + extra_groups: None | Collection[str | int] = None, + user: None | str | int = None, + umask: int = -1, + process_group: int | None = None, + pipesize: int = -1, + ) -> Process: ... + +elif sys.version_info >= (3, 10): + async def create_subprocess_shell( + cmd: str | bytes, + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + limit: int = 65536, + *, + # These parameters are forced to these values by BaseEventLoop.subprocess_shell + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + text: Literal[False] | None = None, + # These parameters are taken by subprocess.Popen, which this ultimately delegates to + executable: StrOrBytesPath | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + cwd: StrOrBytesPath | None = None, + env: subprocess._ENV | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + group: None | str | int = None, + extra_groups: None | Collection[str | int] = None, + user: None | str | int = None, + umask: int = -1, + pipesize: int = -1, + ) -> Process: ... + async def create_subprocess_exec( + program: StrOrBytesPath, + *args: StrOrBytesPath, + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + limit: int = 65536, + # These parameters are forced to these values by BaseEventLoop.subprocess_exec + universal_newlines: Literal[False] = False, + shell: Literal[False] = False, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + text: Literal[False] | None = None, + # These parameters are taken by subprocess.Popen, which this ultimately delegates to + executable: StrOrBytesPath | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + cwd: StrOrBytesPath | None = None, + env: subprocess._ENV | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + group: None | str | int = None, + extra_groups: None | Collection[str | int] = None, + user: None | str | int = None, + umask: int = -1, + pipesize: int = -1, + ) -> Process: ... + +else: # >= 3.9 + async def create_subprocess_shell( + cmd: str | bytes, + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, + *, + # These parameters are forced to these values by BaseEventLoop.subprocess_shell + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + text: Literal[False] | None = None, + # These parameters are taken by subprocess.Popen, which this ultimately delegates to + executable: StrOrBytesPath | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + cwd: StrOrBytesPath | None = None, + env: subprocess._ENV | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + group: None | str | int = None, + extra_groups: None | Collection[str | int] = None, + user: None | str | int = None, + umask: int = -1, + ) -> Process: ... + async def create_subprocess_exec( + program: StrOrBytesPath, + *args: StrOrBytesPath, + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, + # These parameters are forced to these values by BaseEventLoop.subprocess_exec + universal_newlines: Literal[False] = False, + shell: Literal[False] = False, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + text: Literal[False] | None = None, + # These parameters are taken by subprocess.Popen, which this ultimately delegates to + executable: StrOrBytesPath | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + cwd: StrOrBytesPath | None = None, + env: subprocess._ENV | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + group: None | str | int = None, + extra_groups: None | Collection[str | int] = None, + user: None | str | int = None, + umask: int = -1, + ) -> Process: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/taskgroups.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/taskgroups.pyi new file mode 100644 index 0000000..30b7c91 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/taskgroups.pyi @@ -0,0 +1,26 @@ +import sys +from contextvars import Context +from types import TracebackType +from typing import Any, TypeVar +from typing_extensions import Self + +from . import _CoroutineLike +from .events import AbstractEventLoop +from .tasks import Task + +# Keep asyncio.__all__ updated with any changes to __all__ here +if sys.version_info >= (3, 12): + __all__ = ("TaskGroup",) +else: + __all__ = ["TaskGroup"] + +_T = TypeVar("_T") + +class TaskGroup: + _loop: AbstractEventLoop | None + _tasks: set[Task[Any]] + + async def __aenter__(self) -> Self: ... + async def __aexit__(self, et: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ... + def create_task(self, coro: _CoroutineLike[_T], *, name: str | None = None, context: Context | None = None) -> Task[_T]: ... + def _on_task_done(self, task: Task[object]) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/tasks.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/tasks.pyi new file mode 100644 index 0000000..1442f74 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/tasks.pyi @@ -0,0 +1,475 @@ +import concurrent.futures +import sys +from _asyncio import ( + Task as Task, + _enter_task as _enter_task, + _leave_task as _leave_task, + _register_task as _register_task, + _unregister_task as _unregister_task, +) +from collections.abc import AsyncIterator, Awaitable, Coroutine, Generator, Iterable, Iterator +from typing import Any, Final, Literal, Protocol, TypeVar, overload, type_check_only +from typing_extensions import TypeAlias + +from . import _CoroutineLike +from .events import AbstractEventLoop +from .futures import Future + +if sys.version_info >= (3, 11): + from contextvars import Context + +# Keep asyncio.__all__ updated with any changes to __all__ here +if sys.version_info >= (3, 12): + __all__ = ( + "Task", + "create_task", + "FIRST_COMPLETED", + "FIRST_EXCEPTION", + "ALL_COMPLETED", + "wait", + "wait_for", + "as_completed", + "sleep", + "gather", + "shield", + "ensure_future", + "run_coroutine_threadsafe", + "current_task", + "all_tasks", + "create_eager_task_factory", + "eager_task_factory", + "_register_task", + "_unregister_task", + "_enter_task", + "_leave_task", + ) +else: + __all__ = ( + "Task", + "create_task", + "FIRST_COMPLETED", + "FIRST_EXCEPTION", + "ALL_COMPLETED", + "wait", + "wait_for", + "as_completed", + "sleep", + "gather", + "shield", + "ensure_future", + "run_coroutine_threadsafe", + "current_task", + "all_tasks", + "_register_task", + "_unregister_task", + "_enter_task", + "_leave_task", + ) + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_T4 = TypeVar("_T4") +_T5 = TypeVar("_T5") +_T6 = TypeVar("_T6") +_FT = TypeVar("_FT", bound=Future[Any]) +if sys.version_info >= (3, 12): + _FutureLike: TypeAlias = Future[_T] | Awaitable[_T] +else: + _FutureLike: TypeAlias = Future[_T] | Generator[Any, None, _T] | Awaitable[_T] + +_TaskYieldType: TypeAlias = Future[object] | None + +FIRST_COMPLETED: Final = concurrent.futures.FIRST_COMPLETED +FIRST_EXCEPTION: Final = concurrent.futures.FIRST_EXCEPTION +ALL_COMPLETED: Final = concurrent.futures.ALL_COMPLETED + +if sys.version_info >= (3, 13): + @type_check_only + class _SyncAndAsyncIterator(Iterator[_T_co], AsyncIterator[_T_co], Protocol[_T_co]): ... + + def as_completed(fs: Iterable[_FutureLike[_T]], *, timeout: float | None = None) -> _SyncAndAsyncIterator[Future[_T]]: ... + +elif sys.version_info >= (3, 10): + def as_completed(fs: Iterable[_FutureLike[_T]], *, timeout: float | None = None) -> Iterator[Future[_T]]: ... + +else: + def as_completed( + fs: Iterable[_FutureLike[_T]], *, loop: AbstractEventLoop | None = None, timeout: float | None = None + ) -> Iterator[Future[_T]]: ... + +@overload +def ensure_future(coro_or_future: _FT, *, loop: AbstractEventLoop | None = None) -> _FT: ... # type: ignore[overload-overlap] +@overload +def ensure_future(coro_or_future: Awaitable[_T], *, loop: AbstractEventLoop | None = None) -> Task[_T]: ... + +# `gather()` actually returns a list with length equal to the number +# of tasks passed; however, Tuple is used similar to the annotation for +# zip() because typing does not support variadic type variables. See +# typing PR #1550 for discussion. +# +# N.B. Having overlapping overloads is the only way to get acceptable type inference in all edge cases. +if sys.version_info >= (3, 10): + @overload + def gather(coro_or_future1: _FutureLike[_T1], /, *, return_exceptions: Literal[False] = False) -> Future[tuple[_T1]]: ... # type: ignore[overload-overlap] + @overload + def gather( # type: ignore[overload-overlap] + coro_or_future1: _FutureLike[_T1], coro_or_future2: _FutureLike[_T2], /, *, return_exceptions: Literal[False] = False + ) -> Future[tuple[_T1, _T2]]: ... + @overload + def gather( # type: ignore[overload-overlap] + coro_or_future1: _FutureLike[_T1], + coro_or_future2: _FutureLike[_T2], + coro_or_future3: _FutureLike[_T3], + /, + *, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3]]: ... + @overload + def gather( # type: ignore[overload-overlap] + coro_or_future1: _FutureLike[_T1], + coro_or_future2: _FutureLike[_T2], + coro_or_future3: _FutureLike[_T3], + coro_or_future4: _FutureLike[_T4], + /, + *, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3, _T4]]: ... + @overload + def gather( # type: ignore[overload-overlap] + coro_or_future1: _FutureLike[_T1], + coro_or_future2: _FutureLike[_T2], + coro_or_future3: _FutureLike[_T3], + coro_or_future4: _FutureLike[_T4], + coro_or_future5: _FutureLike[_T5], + /, + *, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5]]: ... + @overload + def gather( # type: ignore[overload-overlap] + coro_or_future1: _FutureLike[_T1], + coro_or_future2: _FutureLike[_T2], + coro_or_future3: _FutureLike[_T3], + coro_or_future4: _FutureLike[_T4], + coro_or_future5: _FutureLike[_T5], + coro_or_future6: _FutureLike[_T6], + /, + *, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5, _T6]]: ... + @overload + def gather(*coros_or_futures: _FutureLike[_T], return_exceptions: Literal[False] = False) -> Future[list[_T]]: ... # type: ignore[overload-overlap] + @overload + def gather(coro_or_future1: _FutureLike[_T1], /, *, return_exceptions: bool) -> Future[tuple[_T1 | BaseException]]: ... + @overload + def gather( + coro_or_future1: _FutureLike[_T1], coro_or_future2: _FutureLike[_T2], /, *, return_exceptions: bool + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException]]: ... + @overload + def gather( + coro_or_future1: _FutureLike[_T1], + coro_or_future2: _FutureLike[_T2], + coro_or_future3: _FutureLike[_T3], + /, + *, + return_exceptions: bool, + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException]]: ... + @overload + def gather( + coro_or_future1: _FutureLike[_T1], + coro_or_future2: _FutureLike[_T2], + coro_or_future3: _FutureLike[_T3], + coro_or_future4: _FutureLike[_T4], + /, + *, + return_exceptions: bool, + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException]]: ... + @overload + def gather( + coro_or_future1: _FutureLike[_T1], + coro_or_future2: _FutureLike[_T2], + coro_or_future3: _FutureLike[_T3], + coro_or_future4: _FutureLike[_T4], + coro_or_future5: _FutureLike[_T5], + /, + *, + return_exceptions: bool, + ) -> Future[ + tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException, _T5 | BaseException] + ]: ... + @overload + def gather( + coro_or_future1: _FutureLike[_T1], + coro_or_future2: _FutureLike[_T2], + coro_or_future3: _FutureLike[_T3], + coro_or_future4: _FutureLike[_T4], + coro_or_future5: _FutureLike[_T5], + coro_or_future6: _FutureLike[_T6], + /, + *, + return_exceptions: bool, + ) -> Future[ + tuple[ + _T1 | BaseException, + _T2 | BaseException, + _T3 | BaseException, + _T4 | BaseException, + _T5 | BaseException, + _T6 | BaseException, + ] + ]: ... + @overload + def gather(*coros_or_futures: _FutureLike[_T], return_exceptions: bool) -> Future[list[_T | BaseException]]: ... + +else: + @overload + def gather( # type: ignore[overload-overlap] + coro_or_future1: _FutureLike[_T1], /, *, loop: AbstractEventLoop | None = None, return_exceptions: Literal[False] = False + ) -> Future[tuple[_T1]]: ... + @overload + def gather( # type: ignore[overload-overlap] + coro_or_future1: _FutureLike[_T1], + coro_or_future2: _FutureLike[_T2], + /, + *, + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2]]: ... + @overload + def gather( # type: ignore[overload-overlap] + coro_or_future1: _FutureLike[_T1], + coro_or_future2: _FutureLike[_T2], + coro_or_future3: _FutureLike[_T3], + /, + *, + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3]]: ... + @overload + def gather( # type: ignore[overload-overlap] + coro_or_future1: _FutureLike[_T1], + coro_or_future2: _FutureLike[_T2], + coro_or_future3: _FutureLike[_T3], + coro_or_future4: _FutureLike[_T4], + /, + *, + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3, _T4]]: ... + @overload + def gather( # type: ignore[overload-overlap] + coro_or_future1: _FutureLike[_T1], + coro_or_future2: _FutureLike[_T2], + coro_or_future3: _FutureLike[_T3], + coro_or_future4: _FutureLike[_T4], + coro_or_future5: _FutureLike[_T5], + /, + *, + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5]]: ... + @overload + def gather( # type: ignore[overload-overlap] + coro_or_future1: _FutureLike[_T1], + coro_or_future2: _FutureLike[_T2], + coro_or_future3: _FutureLike[_T3], + coro_or_future4: _FutureLike[_T4], + coro_or_future5: _FutureLike[_T5], + coro_or_future6: _FutureLike[_T6], + /, + *, + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5, _T6]]: ... + @overload + def gather( # type: ignore[overload-overlap] + *coros_or_futures: _FutureLike[_T], loop: AbstractEventLoop | None = None, return_exceptions: Literal[False] = False + ) -> Future[list[_T]]: ... + @overload + def gather( # type: ignore[overload-overlap] + coro_or_future1: _FutureLike[_T1], /, *, loop: AbstractEventLoop | None = None, return_exceptions: bool + ) -> Future[tuple[_T1 | BaseException]]: ... + @overload + def gather( # type: ignore[overload-overlap] + coro_or_future1: _FutureLike[_T1], + coro_or_future2: _FutureLike[_T2], + /, + *, + loop: AbstractEventLoop | None = None, + return_exceptions: bool, + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException]]: ... + @overload + def gather( # type: ignore[overload-overlap] + coro_or_future1: _FutureLike[_T1], + coro_or_future2: _FutureLike[_T2], + coro_or_future3: _FutureLike[_T3], + /, + *, + loop: AbstractEventLoop | None = None, + return_exceptions: bool, + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException]]: ... + @overload + def gather( # type: ignore[overload-overlap] + coro_or_future1: _FutureLike[_T1], + coro_or_future2: _FutureLike[_T2], + coro_or_future3: _FutureLike[_T3], + coro_or_future4: _FutureLike[_T4], + /, + *, + loop: AbstractEventLoop | None = None, + return_exceptions: bool, + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException]]: ... + @overload + def gather( # type: ignore[overload-overlap] + coro_or_future1: _FutureLike[_T1], + coro_or_future2: _FutureLike[_T2], + coro_or_future3: _FutureLike[_T3], + coro_or_future4: _FutureLike[_T4], + coro_or_future5: _FutureLike[_T5], + coro_or_future6: _FutureLike[_T6], + /, + *, + loop: AbstractEventLoop | None = None, + return_exceptions: bool, + ) -> Future[ + tuple[ + _T1 | BaseException, + _T2 | BaseException, + _T3 | BaseException, + _T4 | BaseException, + _T5 | BaseException, + _T6 | BaseException, + ] + ]: ... + @overload + def gather( + *coros_or_futures: _FutureLike[_T], loop: AbstractEventLoop | None = None, return_exceptions: bool + ) -> Future[list[_T | BaseException]]: ... + +# unlike some asyncio apis, This does strict runtime checking of actually being a coroutine, not of any future-like. +def run_coroutine_threadsafe(coro: Coroutine[Any, Any, _T], loop: AbstractEventLoop) -> concurrent.futures.Future[_T]: ... + +if sys.version_info >= (3, 10): + def shield(arg: _FutureLike[_T]) -> Future[_T]: ... + @overload + async def sleep(delay: float) -> None: ... + @overload + async def sleep(delay: float, result: _T) -> _T: ... + async def wait_for(fut: _FutureLike[_T], timeout: float | None) -> _T: ... + +else: + def shield(arg: _FutureLike[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: ... + @overload + async def sleep(delay: float, *, loop: AbstractEventLoop | None = None) -> None: ... + @overload + async def sleep(delay: float, result: _T, *, loop: AbstractEventLoop | None = None) -> _T: ... + async def wait_for(fut: _FutureLike[_T], timeout: float | None, *, loop: AbstractEventLoop | None = None) -> _T: ... + +if sys.version_info >= (3, 11): + @overload + async def wait( + fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" + ) -> tuple[set[_FT], set[_FT]]: ... + @overload + async def wait( + fs: Iterable[Task[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" + ) -> tuple[set[Task[_T]], set[Task[_T]]]: ... + +elif sys.version_info >= (3, 10): + @overload + async def wait( # type: ignore[overload-overlap] + fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" + ) -> tuple[set[_FT], set[_FT]]: ... + @overload + async def wait( + fs: Iterable[Awaitable[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" + ) -> tuple[set[Task[_T]], set[Task[_T]]]: ... + +else: + @overload + async def wait( # type: ignore[overload-overlap] + fs: Iterable[_FT], + *, + loop: AbstractEventLoop | None = None, + timeout: float | None = None, + return_when: str = "ALL_COMPLETED", + ) -> tuple[set[_FT], set[_FT]]: ... + @overload + async def wait( + fs: Iterable[Awaitable[_T]], + *, + loop: AbstractEventLoop | None = None, + timeout: float | None = None, + return_when: str = "ALL_COMPLETED", + ) -> tuple[set[Task[_T]], set[Task[_T]]]: ... + +if sys.version_info >= (3, 12): + _TaskCompatibleCoro: TypeAlias = Coroutine[Any, Any, _T_co] +else: + _TaskCompatibleCoro: TypeAlias = Generator[_TaskYieldType, None, _T_co] | Coroutine[Any, Any, _T_co] + +def all_tasks(loop: AbstractEventLoop | None = None) -> set[Task[Any]]: ... + +if sys.version_info >= (3, 11): + def create_task(coro: _CoroutineLike[_T], *, name: str | None = None, context: Context | None = None) -> Task[_T]: ... + +else: + def create_task(coro: _CoroutineLike[_T], *, name: str | None = None) -> Task[_T]: ... + +if sys.version_info >= (3, 12): + from _asyncio import current_task as current_task +else: + def current_task(loop: AbstractEventLoop | None = None) -> Task[Any] | None: ... + +if sys.version_info >= (3, 14): + def eager_task_factory( + loop: AbstractEventLoop | None, + coro: _TaskCompatibleCoro[_T_co], + *, + name: str | None = None, + context: Context | None = None, + eager_start: bool = True, + ) -> Task[_T_co]: ... + +elif sys.version_info >= (3, 12): + def eager_task_factory( + loop: AbstractEventLoop | None, + coro: _TaskCompatibleCoro[_T_co], + *, + name: str | None = None, + context: Context | None = None, + ) -> Task[_T_co]: ... + +if sys.version_info >= (3, 12): + _TaskT_co = TypeVar("_TaskT_co", bound=Task[Any], covariant=True) + + @type_check_only + class _CustomTaskConstructor(Protocol[_TaskT_co]): + def __call__( + self, + coro: _TaskCompatibleCoro[Any], + /, + *, + loop: AbstractEventLoop, + name: str | None, + context: Context | None, + eager_start: bool, + ) -> _TaskT_co: ... + + @type_check_only + class _EagerTaskFactoryType(Protocol[_TaskT_co]): + def __call__( + self, + loop: AbstractEventLoop, + coro: _TaskCompatibleCoro[Any], + *, + name: str | None = None, + context: Context | None = None, + ) -> _TaskT_co: ... + + def create_eager_task_factory( + custom_task_constructor: _CustomTaskConstructor[_TaskT_co], + ) -> _EagerTaskFactoryType[_TaskT_co]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/threads.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/threads.pyi new file mode 100644 index 0000000..00aae2e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/threads.pyi @@ -0,0 +1,10 @@ +from collections.abc import Callable +from typing import TypeVar +from typing_extensions import ParamSpec + +# Keep asyncio.__all__ updated with any changes to __all__ here +__all__ = ("to_thread",) +_P = ParamSpec("_P") +_R = TypeVar("_R") + +async def to_thread(func: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/timeouts.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/timeouts.pyi new file mode 100644 index 0000000..668cccb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/timeouts.pyi @@ -0,0 +1,20 @@ +from types import TracebackType +from typing import final +from typing_extensions import Self + +# Keep asyncio.__all__ updated with any changes to __all__ here +__all__ = ("Timeout", "timeout", "timeout_at") + +@final +class Timeout: + def __init__(self, when: float | None) -> None: ... + def when(self) -> float | None: ... + def reschedule(self, when: float | None) -> None: ... + def expired(self) -> bool: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + +def timeout(delay: float | None) -> Timeout: ... +def timeout_at(when: float | None) -> Timeout: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/tools.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/tools.pyi new file mode 100644 index 0000000..bc8b809 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/tools.pyi @@ -0,0 +1,46 @@ +import sys +from collections.abc import Iterable +from enum import Enum +from typing import NamedTuple, SupportsIndex, type_check_only + +@type_check_only +class _AwaitedInfo(NamedTuple): # AwaitedInfo_Type from _remote_debugging + thread_id: int + awaited_by: list[_TaskInfo] + +@type_check_only +class _TaskInfo(NamedTuple): # TaskInfo_Type from _remote_debugging + task_id: int + task_name: str + coroutine_stack: list[_CoroInfo] + awaited_by: list[_CoroInfo] + +@type_check_only +class _CoroInfo(NamedTuple): # CoroInfo_Type from _remote_debugging + call_stack: list[_FrameInfo] + task_name: int | str + +@type_check_only +class _FrameInfo(NamedTuple): # FrameInfo_Type from _remote_debugging + filename: str + lineno: int + funcname: str + +class NodeType(Enum): + COROUTINE = 1 + TASK = 2 + +class CycleFoundException(Exception): + cycles: list[list[int]] + id2name: dict[int, str] + def __init__(self, cycles: list[list[int]], id2name: dict[int, str]) -> None: ... + +def get_all_awaited_by(pid: SupportsIndex) -> list[_AwaitedInfo]: ... +def build_async_tree(result: Iterable[_AwaitedInfo], task_emoji: str = "(T)", cor_emoji: str = "") -> list[list[str]]: ... +def build_task_table(result: Iterable[_AwaitedInfo]) -> list[list[int | str]]: ... + +if sys.version_info >= (3, 14): + def exit_with_permission_help_text() -> None: ... + +def display_awaited_by_tasks_table(pid: SupportsIndex) -> None: ... +def display_awaited_by_tasks_tree(pid: SupportsIndex) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/transports.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/transports.pyi new file mode 100644 index 0000000..cc870d5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/transports.pyi @@ -0,0 +1,57 @@ +from asyncio.events import AbstractEventLoop +from asyncio.protocols import BaseProtocol +from collections.abc import Iterable, Mapping +from socket import _Address +from typing import Any + +# Keep asyncio.__all__ updated with any changes to __all__ here +__all__ = ("BaseTransport", "ReadTransport", "WriteTransport", "Transport", "DatagramTransport", "SubprocessTransport") + +class BaseTransport: + __slots__ = ("_extra",) + def __init__(self, extra: Mapping[str, Any] | None = None) -> None: ... + def get_extra_info(self, name: str, default: Any = None) -> Any: ... + def is_closing(self) -> bool: ... + def close(self) -> None: ... + def set_protocol(self, protocol: BaseProtocol) -> None: ... + def get_protocol(self) -> BaseProtocol: ... + +class ReadTransport(BaseTransport): + __slots__ = () + def is_reading(self) -> bool: ... + def pause_reading(self) -> None: ... + def resume_reading(self) -> None: ... + +class WriteTransport(BaseTransport): + __slots__ = () + def set_write_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: ... + def get_write_buffer_size(self) -> int: ... + def get_write_buffer_limits(self) -> tuple[int, int]: ... + def write(self, data: bytes | bytearray | memoryview[Any]) -> None: ... # any memoryview format or shape + def writelines( + self, list_of_data: Iterable[bytes | bytearray | memoryview[Any]] + ) -> None: ... # any memoryview format or shape + def write_eof(self) -> None: ... + def can_write_eof(self) -> bool: ... + def abort(self) -> None: ... + +class Transport(ReadTransport, WriteTransport): + __slots__ = () + +class DatagramTransport(BaseTransport): + __slots__ = () + def sendto(self, data: bytes | bytearray | memoryview, addr: _Address | None = None) -> None: ... + def abort(self) -> None: ... + +class SubprocessTransport(BaseTransport): + __slots__ = () + def get_pid(self) -> int: ... + def get_returncode(self) -> int | None: ... + def get_pipe_transport(self, fd: int) -> BaseTransport | None: ... + def send_signal(self, signal: int) -> None: ... + def terminate(self) -> None: ... + def kill(self) -> None: ... + +class _FlowControlMixin(Transport): + __slots__ = ("_loop", "_protocol_paused", "_high_water", "_low_water") + def __init__(self, extra: Mapping[str, Any] | None = None, loop: AbstractEventLoop | None = None) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/trsock.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/trsock.pyi new file mode 100644 index 0000000..492f1e4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/trsock.pyi @@ -0,0 +1,129 @@ +import socket +import sys +from _typeshed import ReadableBuffer +from builtins import type as Type # alias to avoid name clashes with property named "type" +from collections.abc import Iterable +from types import TracebackType +from typing import Any, BinaryIO, NoReturn, overload +from typing_extensions import TypeAlias, deprecated + +# These are based in socket, maybe move them out into _typeshed.pyi or such +_Address: TypeAlias = socket._Address +_RetAddress: TypeAlias = Any +_WriteBuffer: TypeAlias = bytearray | memoryview +_CMSG: TypeAlias = tuple[int, int, bytes] + +class TransportSocket: + __slots__ = ("_sock",) + def __init__(self, sock: socket.socket) -> None: ... + @property + def family(self) -> int: ... + @property + def type(self) -> int: ... + @property + def proto(self) -> int: ... + def __getstate__(self) -> NoReturn: ... + def fileno(self) -> int: ... + def dup(self) -> socket.socket: ... + def get_inheritable(self) -> bool: ... + def shutdown(self, how: int) -> None: ... + @overload + def getsockopt(self, level: int, optname: int) -> int: ... + @overload + def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ... + @overload + def setsockopt(self, level: int, optname: int, value: int | ReadableBuffer) -> None: ... + @overload + def setsockopt(self, level: int, optname: int, value: None, optlen: int) -> None: ... + def getpeername(self) -> _RetAddress: ... + def getsockname(self) -> _RetAddress: ... + def getsockbyname(self) -> NoReturn: ... # This method doesn't exist on socket, yet is passed through? + def settimeout(self, value: float | None) -> None: ... + def gettimeout(self) -> float | None: ... + def setblocking(self, flag: bool) -> None: ... + if sys.version_info < (3, 11): + def _na(self, what: str) -> None: ... + @deprecated("Removed in Python 3.11") + def accept(self) -> tuple[socket.socket, _RetAddress]: ... + @deprecated("Removed in Python 3.11") + def connect(self, address: _Address) -> None: ... + @deprecated("Removed in Python 3.11") + def connect_ex(self, address: _Address) -> int: ... + @deprecated("Removed in Python 3.11") + def bind(self, address: _Address) -> None: ... + if sys.platform == "win32": + @deprecated("Removed in Python 3.11") + def ioctl(self, control: int, option: int | tuple[int, int, int] | bool) -> None: ... + else: + @deprecated("Removed in Python 3.11") + def ioctl(self, control: int, option: int | tuple[int, int, int] | bool) -> NoReturn: ... + + @deprecated("Removed in Python 3.11") + def listen(self, backlog: int = ..., /) -> None: ... + @deprecated("Removed in Python 3.11") + def makefile(self) -> BinaryIO: ... + @deprecated("Rmoved in Python 3.11") + def sendfile(self, file: BinaryIO, offset: int = 0, count: int | None = None) -> int: ... + @deprecated("Removed in Python 3.11") + def close(self) -> None: ... + @deprecated("Removed in Python 3.11") + def detach(self) -> int: ... + if sys.platform == "linux": + @deprecated("Removed in Python 3.11") + def sendmsg_afalg( + self, msg: Iterable[ReadableBuffer] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = 0 + ) -> int: ... + else: + @deprecated("Removed in Python 3.11.") + def sendmsg_afalg( + self, msg: Iterable[ReadableBuffer] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = 0 + ) -> NoReturn: ... + + @deprecated("Removed in Python 3.11.") + def sendmsg( + self, + buffers: Iterable[ReadableBuffer], + ancdata: Iterable[_CMSG] = ..., + flags: int = 0, + address: _Address | None = None, + /, + ) -> int: ... + @overload + @deprecated("Removed in Python 3.11.") + def sendto(self, data: ReadableBuffer, address: _Address) -> int: ... + @overload + @deprecated("Removed in Python 3.11.") + def sendto(self, data: ReadableBuffer, flags: int, address: _Address) -> int: ... + @deprecated("Removed in Python 3.11.") + def send(self, data: ReadableBuffer, flags: int = 0) -> int: ... + @deprecated("Removed in Python 3.11.") + def sendall(self, data: ReadableBuffer, flags: int = 0) -> None: ... + @deprecated("Removed in Python 3.11.") + def set_inheritable(self, inheritable: bool) -> None: ... + if sys.platform == "win32": + @deprecated("Removed in Python 3.11.") + def share(self, process_id: int) -> bytes: ... + else: + @deprecated("Removed in Python 3.11.") + def share(self, process_id: int) -> NoReturn: ... + + @deprecated("Removed in Python 3.11.") + def recv_into(self, buffer: _WriteBuffer, nbytes: int = 0, flags: int = 0) -> int: ... + @deprecated("Removed in Python 3.11.") + def recvfrom_into(self, buffer: _WriteBuffer, nbytes: int = 0, flags: int = 0) -> tuple[int, _RetAddress]: ... + @deprecated("Removed in Python 3.11.") + def recvmsg_into( + self, buffers: Iterable[_WriteBuffer], ancbufsize: int = 0, flags: int = 0, / + ) -> tuple[int, list[_CMSG], int, Any]: ... + @deprecated("Removed in Python 3.11.") + def recvmsg(self, bufsize: int, ancbufsize: int = 0, flags: int = 0, /) -> tuple[bytes, list[_CMSG], int, Any]: ... + @deprecated("Removed in Python 3.11.") + def recvfrom(self, bufsize: int, flags: int = 0) -> tuple[bytes, _RetAddress]: ... + @deprecated("Removed in Python 3.11.") + def recv(self, bufsize: int, flags: int = 0) -> bytes: ... + @deprecated("Removed in Python 3.11.") + def __enter__(self) -> socket.socket: ... + @deprecated("Removed in Python 3.11.") + def __exit__( + self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/unix_events.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/unix_events.pyi new file mode 100644 index 0000000..9071ee9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/unix_events.pyi @@ -0,0 +1,248 @@ +import sys +import types +from _typeshed import StrPath +from abc import ABCMeta, abstractmethod +from collections.abc import Callable +from socket import socket +from typing import Literal +from typing_extensions import Self, TypeVarTuple, Unpack, deprecated + +from . import events +from .base_events import Server, _ProtocolFactory, _SSLContext +from .selector_events import BaseSelectorEventLoop + +_Ts = TypeVarTuple("_Ts") + +# Keep asyncio.__all__ updated with any changes to __all__ here +if sys.platform != "win32": + if sys.version_info >= (3, 14): + __all__ = ("SelectorEventLoop", "EventLoop") + elif sys.version_info >= (3, 13): + # Adds EventLoop + __all__ = ( + "SelectorEventLoop", + "AbstractChildWatcher", + "SafeChildWatcher", + "FastChildWatcher", + "PidfdChildWatcher", + "MultiLoopChildWatcher", + "ThreadedChildWatcher", + "DefaultEventLoopPolicy", + "EventLoop", + ) + else: + # adds PidfdChildWatcher + __all__ = ( + "SelectorEventLoop", + "AbstractChildWatcher", + "SafeChildWatcher", + "FastChildWatcher", + "PidfdChildWatcher", + "MultiLoopChildWatcher", + "ThreadedChildWatcher", + "DefaultEventLoopPolicy", + ) + +# This is also technically not available on Win, +# but other parts of typeshed need this definition. +# So, it is special cased. +if sys.version_info < (3, 14): + if sys.version_info >= (3, 12): + @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") + class AbstractChildWatcher: + @abstractmethod + def add_child_handler( + self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] + ) -> None: ... + @abstractmethod + def remove_child_handler(self, pid: int) -> bool: ... + @abstractmethod + def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: ... + @abstractmethod + def close(self) -> None: ... + @abstractmethod + def __enter__(self) -> Self: ... + @abstractmethod + def __exit__( + self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None + ) -> None: ... + @abstractmethod + def is_active(self) -> bool: ... + + else: + class AbstractChildWatcher: + @abstractmethod + def add_child_handler( + self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] + ) -> None: ... + @abstractmethod + def remove_child_handler(self, pid: int) -> bool: ... + @abstractmethod + def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: ... + @abstractmethod + def close(self) -> None: ... + @abstractmethod + def __enter__(self) -> Self: ... + @abstractmethod + def __exit__( + self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None + ) -> None: ... + @abstractmethod + def is_active(self) -> bool: ... + +if sys.platform != "win32": + if sys.version_info < (3, 14): + if sys.version_info >= (3, 12): + # Doesn't actually have ABCMeta metaclass at runtime, but mypy complains if we don't have it in the stub. + # See discussion in #7412 + class BaseChildWatcher(AbstractChildWatcher, metaclass=ABCMeta): + def close(self) -> None: ... + def is_active(self) -> bool: ... + def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: ... + + @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") + class SafeChildWatcher(BaseChildWatcher): + def __enter__(self) -> Self: ... + def __exit__( + self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None + ) -> None: ... + def add_child_handler( + self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] + ) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... + + @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") + class FastChildWatcher(BaseChildWatcher): + def __enter__(self) -> Self: ... + def __exit__( + self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None + ) -> None: ... + def add_child_handler( + self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] + ) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... + + else: + # Doesn't actually have ABCMeta metaclass at runtime, but mypy complains if we don't have it in the stub. + # See discussion in #7412 + class BaseChildWatcher(AbstractChildWatcher, metaclass=ABCMeta): + def close(self) -> None: ... + def is_active(self) -> bool: ... + def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: ... + + class SafeChildWatcher(BaseChildWatcher): + def __enter__(self) -> Self: ... + def __exit__( + self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None + ) -> None: ... + def add_child_handler( + self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] + ) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... + + class FastChildWatcher(BaseChildWatcher): + def __enter__(self) -> Self: ... + def __exit__( + self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None + ) -> None: ... + def add_child_handler( + self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] + ) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... + + class _UnixSelectorEventLoop(BaseSelectorEventLoop): + if sys.version_info >= (3, 13): + async def create_unix_server( + self, + protocol_factory: _ProtocolFactory, + path: StrPath | None = None, + *, + sock: socket | None = None, + backlog: int = 100, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, + cleanup_socket: bool = True, + ) -> Server: ... + + if sys.version_info >= (3, 14): + class _UnixDefaultEventLoopPolicy(events._BaseDefaultEventLoopPolicy): ... + else: + class _UnixDefaultEventLoopPolicy(events.BaseDefaultEventLoopPolicy): + if sys.version_info >= (3, 12): + @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") + def get_child_watcher(self) -> AbstractChildWatcher: ... + @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") + def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ... + else: + def get_child_watcher(self) -> AbstractChildWatcher: ... + def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ... + + SelectorEventLoop = _UnixSelectorEventLoop + + if sys.version_info >= (3, 14): + _DefaultEventLoopPolicy = _UnixDefaultEventLoopPolicy + else: + DefaultEventLoopPolicy = _UnixDefaultEventLoopPolicy + + if sys.version_info >= (3, 13): + EventLoop = SelectorEventLoop + + if sys.version_info < (3, 14): + if sys.version_info >= (3, 12): + @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") + class MultiLoopChildWatcher(AbstractChildWatcher): + def is_active(self) -> bool: ... + def close(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None + ) -> None: ... + def add_child_handler( + self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] + ) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... + def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: ... + + else: + class MultiLoopChildWatcher(AbstractChildWatcher): + def is_active(self) -> bool: ... + def close(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None + ) -> None: ... + def add_child_handler( + self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] + ) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... + def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: ... + + if sys.version_info < (3, 14): + class ThreadedChildWatcher(AbstractChildWatcher): + def is_active(self) -> Literal[True]: ... + def close(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None + ) -> None: ... + def __del__(self) -> None: ... + def add_child_handler( + self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] + ) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... + def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: ... + + class PidfdChildWatcher(AbstractChildWatcher): + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None + ) -> None: ... + def is_active(self) -> bool: ... + def close(self) -> None: ... + def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: ... + def add_child_handler( + self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] + ) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/windows_events.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/windows_events.pyi new file mode 100644 index 0000000..a32381b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/windows_events.pyi @@ -0,0 +1,121 @@ +import socket +import sys +from _typeshed import Incomplete, ReadableBuffer, WriteableBuffer +from collections.abc import Callable +from typing import IO, Any, ClassVar, Final, NoReturn + +from . import events, futures, proactor_events, selector_events, streams, windows_utils + +# Keep asyncio.__all__ updated with any changes to __all__ here +if sys.platform == "win32": + if sys.version_info >= (3, 14): + __all__ = ( + "SelectorEventLoop", + "ProactorEventLoop", + "IocpProactor", + "_DefaultEventLoopPolicy", + "_WindowsSelectorEventLoopPolicy", + "_WindowsProactorEventLoopPolicy", + "EventLoop", + ) + elif sys.version_info >= (3, 13): + # 3.13 added `EventLoop`. + __all__ = ( + "SelectorEventLoop", + "ProactorEventLoop", + "IocpProactor", + "DefaultEventLoopPolicy", + "WindowsSelectorEventLoopPolicy", + "WindowsProactorEventLoopPolicy", + "EventLoop", + ) + else: + __all__ = ( + "SelectorEventLoop", + "ProactorEventLoop", + "IocpProactor", + "DefaultEventLoopPolicy", + "WindowsSelectorEventLoopPolicy", + "WindowsProactorEventLoopPolicy", + ) + + NULL: Final = 0 + INFINITE: Final = 0xFFFFFFFF + ERROR_CONNECTION_REFUSED: Final = 1225 + ERROR_CONNECTION_ABORTED: Final = 1236 + CONNECT_PIPE_INIT_DELAY: float + CONNECT_PIPE_MAX_DELAY: float + + class PipeServer: + def __init__(self, address: str) -> None: ... + def __del__(self) -> None: ... + def closed(self) -> bool: ... + def close(self) -> None: ... + + class _WindowsSelectorEventLoop(selector_events.BaseSelectorEventLoop): ... + + class ProactorEventLoop(proactor_events.BaseProactorEventLoop): + def __init__(self, proactor: IocpProactor | None = None) -> None: ... + async def create_pipe_connection( + self, protocol_factory: Callable[[], streams.StreamReaderProtocol], address: str + ) -> tuple[proactor_events._ProactorDuplexPipeTransport, streams.StreamReaderProtocol]: ... + async def start_serving_pipe( + self, protocol_factory: Callable[[], streams.StreamReaderProtocol], address: str + ) -> list[PipeServer]: ... + + class IocpProactor: + def __init__(self, concurrency: int = 0xFFFFFFFF) -> None: ... + def __del__(self) -> None: ... + def set_loop(self, loop: events.AbstractEventLoop) -> None: ... + def select(self, timeout: int | None = None) -> list[futures.Future[Any]]: ... + def recv(self, conn: socket.socket, nbytes: int, flags: int = 0) -> futures.Future[bytes]: ... + def recv_into(self, conn: socket.socket, buf: WriteableBuffer, flags: int = 0) -> futures.Future[Any]: ... + def recvfrom( + self, conn: socket.socket, nbytes: int, flags: int = 0 + ) -> futures.Future[tuple[bytes, socket._RetAddress]]: ... + def sendto( + self, conn: socket.socket, buf: ReadableBuffer, flags: int = 0, addr: socket._Address | None = None + ) -> futures.Future[int]: ... + def send(self, conn: socket.socket, buf: WriteableBuffer, flags: int = 0) -> futures.Future[Any]: ... + def accept(self, listener: socket.socket) -> futures.Future[Any]: ... + def connect( + self, + conn: socket.socket, + address: tuple[Incomplete, Incomplete] | tuple[Incomplete, Incomplete, Incomplete, Incomplete], + ) -> futures.Future[Any]: ... + def sendfile(self, sock: socket.socket, file: IO[bytes], offset: int, count: int) -> futures.Future[Any]: ... + def accept_pipe(self, pipe: socket.socket) -> futures.Future[Any]: ... + async def connect_pipe(self, address: str) -> windows_utils.PipeHandle: ... + def wait_for_handle(self, handle: windows_utils.PipeHandle, timeout: int | None = None) -> bool: ... + def close(self) -> None: ... + if sys.version_info >= (3, 11): + def recvfrom_into( + self, conn: socket.socket, buf: WriteableBuffer, flags: int = 0 + ) -> futures.Future[tuple[int, socket._RetAddress]]: ... + + SelectorEventLoop = _WindowsSelectorEventLoop + + if sys.version_info >= (3, 14): + class _WindowsSelectorEventLoopPolicy(events._BaseDefaultEventLoopPolicy): + _loop_factory: ClassVar[type[SelectorEventLoop]] + + class _WindowsProactorEventLoopPolicy(events._BaseDefaultEventLoopPolicy): + _loop_factory: ClassVar[type[ProactorEventLoop]] + + else: + class WindowsSelectorEventLoopPolicy(events.BaseDefaultEventLoopPolicy): + _loop_factory: ClassVar[type[SelectorEventLoop]] + def get_child_watcher(self) -> NoReturn: ... + def set_child_watcher(self, watcher: Any) -> NoReturn: ... + + class WindowsProactorEventLoopPolicy(events.BaseDefaultEventLoopPolicy): + _loop_factory: ClassVar[type[ProactorEventLoop]] + def get_child_watcher(self) -> NoReturn: ... + def set_child_watcher(self, watcher: Any) -> NoReturn: ... + + if sys.version_info >= (3, 14): + _DefaultEventLoopPolicy = _WindowsProactorEventLoopPolicy + else: + DefaultEventLoopPolicy = WindowsProactorEventLoopPolicy + if sys.version_info >= (3, 13): + EventLoop = ProactorEventLoop diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/windows_utils.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/windows_utils.pyi new file mode 100644 index 0000000..5cedd61 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncio/windows_utils.pyi @@ -0,0 +1,49 @@ +import subprocess +import sys +from collections.abc import Callable +from types import TracebackType +from typing import Any, AnyStr, Final +from typing_extensions import Self + +if sys.platform == "win32": + __all__ = ("pipe", "Popen", "PIPE", "PipeHandle") + + BUFSIZE: Final = 8192 + PIPE: Final = subprocess.PIPE + STDOUT: Final = subprocess.STDOUT + def pipe(*, duplex: bool = False, overlapped: tuple[bool, bool] = (True, True), bufsize: int = 8192) -> tuple[int, int]: ... + + class PipeHandle: + def __init__(self, handle: int) -> None: ... + def __del__(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + @property + def handle(self) -> int: ... + def fileno(self) -> int: ... + def close(self, *, CloseHandle: Callable[[int], object] = ...) -> None: ... + + class Popen(subprocess.Popen[AnyStr]): + stdin: PipeHandle | None # type: ignore[assignment] + stdout: PipeHandle | None # type: ignore[assignment] + stderr: PipeHandle | None # type: ignore[assignment] + # For simplicity we omit the full overloaded __new__ signature of + # subprocess.Popen. The arguments are mostly the same, but + # subprocess.Popen takes other positional-or-keyword arguments before + # stdin. + def __new__( + cls, + args: subprocess._CMD, + stdin: subprocess._FILE | None = None, + stdout: subprocess._FILE | None = None, + stderr: subprocess._FILE | None = None, + **kwds: Any, + ) -> Self: ... + def __init__( + self, + args: subprocess._CMD, + stdin: subprocess._FILE | None = None, + stdout: subprocess._FILE | None = None, + stderr: subprocess._FILE | None = None, + **kwds: Any, + ) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncore.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncore.pyi new file mode 100644 index 0000000..36d1862 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/asyncore.pyi @@ -0,0 +1,90 @@ +import sys +from _typeshed import FileDescriptorLike, ReadableBuffer +from socket import socket +from typing import Any, overload +from typing_extensions import TypeAlias + +# cyclic dependence with asynchat +_MapType: TypeAlias = dict[int, Any] +_Socket: TypeAlias = socket + +socket_map: _MapType # undocumented + +class ExitNow(Exception): ... + +def read(obj: Any) -> None: ... +def write(obj: Any) -> None: ... +def readwrite(obj: Any, flags: int) -> None: ... +def poll(timeout: float = 0.0, map: _MapType | None = None) -> None: ... +def poll2(timeout: float = 0.0, map: _MapType | None = None) -> None: ... + +poll3 = poll2 + +def loop(timeout: float = 30.0, use_poll: bool = False, map: _MapType | None = None, count: int | None = None) -> None: ... + +# Not really subclass of socket.socket; it's only delegation. +# It is not covariant to it. +class dispatcher: + debug: bool + connected: bool + accepting: bool + connecting: bool + closing: bool + ignore_log_types: frozenset[str] + socket: _Socket | None + def __init__(self, sock: _Socket | None = None, map: _MapType | None = None) -> None: ... + def add_channel(self, map: _MapType | None = None) -> None: ... + def del_channel(self, map: _MapType | None = None) -> None: ... + def create_socket(self, family: int = ..., type: int = ...) -> None: ... + def set_socket(self, sock: _Socket, map: _MapType | None = None) -> None: ... + def set_reuse_addr(self) -> None: ... + def readable(self) -> bool: ... + def writable(self) -> bool: ... + def listen(self, num: int) -> None: ... + def bind(self, addr: tuple[Any, ...] | str) -> None: ... + def connect(self, address: tuple[Any, ...] | str) -> None: ... + def accept(self) -> tuple[_Socket, Any] | None: ... + def send(self, data: ReadableBuffer) -> int: ... + def recv(self, buffer_size: int) -> bytes: ... + def close(self) -> None: ... + def log(self, message: Any) -> None: ... + def log_info(self, message: Any, type: str = "info") -> None: ... + def handle_read_event(self) -> None: ... + def handle_connect_event(self) -> None: ... + def handle_write_event(self) -> None: ... + def handle_expt_event(self) -> None: ... + def handle_error(self) -> None: ... + def handle_expt(self) -> None: ... + def handle_read(self) -> None: ... + def handle_write(self) -> None: ... + def handle_connect(self) -> None: ... + def handle_accept(self) -> None: ... + def handle_close(self) -> None: ... + +class dispatcher_with_send(dispatcher): + def initiate_send(self) -> None: ... + # incompatible signature: + # def send(self, data: bytes) -> int | None: ... + +def compact_traceback() -> tuple[tuple[str, str, str], type, type, str]: ... +def close_all(map: _MapType | None = None, ignore_all: bool = False) -> None: ... + +if sys.platform != "win32": + class file_wrapper: + fd: int + def __init__(self, fd: int) -> None: ... + def recv(self, bufsize: int, flags: int = ...) -> bytes: ... + def send(self, data: bytes, flags: int = ...) -> int: ... + @overload + def getsockopt(self, level: int, optname: int, buflen: None = None) -> int: ... + @overload + def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ... + def read(self, bufsize: int, flags: int = ...) -> bytes: ... + def write(self, data: bytes, flags: int = ...) -> int: ... + def close(self) -> None: ... + def fileno(self) -> int: ... + def __del__(self) -> None: ... + + class file_dispatcher(dispatcher): + def __init__(self, fd: FileDescriptorLike, map: _MapType | None = None) -> None: ... + def set_file(self, fd: int) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/atexit.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/atexit.pyi new file mode 100644 index 0000000..7f7b05c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/atexit.pyi @@ -0,0 +1,12 @@ +from collections.abc import Callable +from typing import TypeVar +from typing_extensions import ParamSpec + +_T = TypeVar("_T") +_P = ParamSpec("_P") + +def _clear() -> None: ... +def _ncallbacks() -> int: ... +def _run_exitfuncs() -> None: ... +def register(func: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> Callable[_P, _T]: ... +def unregister(func: Callable[..., object], /) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/audioop.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/audioop.pyi new file mode 100644 index 0000000..f3ce78c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/audioop.pyi @@ -0,0 +1,43 @@ +from typing_extensions import Buffer, TypeAlias + +_AdpcmState: TypeAlias = tuple[int, int] +_RatecvState: TypeAlias = tuple[int, tuple[tuple[int, int], ...]] + +class error(Exception): ... + +def add(fragment1: Buffer, fragment2: Buffer, width: int, /) -> bytes: ... +def adpcm2lin(fragment: Buffer, width: int, state: _AdpcmState | None, /) -> tuple[bytes, _AdpcmState]: ... +def alaw2lin(fragment: Buffer, width: int, /) -> bytes: ... +def avg(fragment: Buffer, width: int, /) -> int: ... +def avgpp(fragment: Buffer, width: int, /) -> int: ... +def bias(fragment: Buffer, width: int, bias: int, /) -> bytes: ... +def byteswap(fragment: Buffer, width: int, /) -> bytes: ... +def cross(fragment: Buffer, width: int, /) -> int: ... +def findfactor(fragment: Buffer, reference: Buffer, /) -> float: ... +def findfit(fragment: Buffer, reference: Buffer, /) -> tuple[int, float]: ... +def findmax(fragment: Buffer, length: int, /) -> int: ... +def getsample(fragment: Buffer, width: int, index: int, /) -> int: ... +def lin2adpcm(fragment: Buffer, width: int, state: _AdpcmState | None, /) -> tuple[bytes, _AdpcmState]: ... +def lin2alaw(fragment: Buffer, width: int, /) -> bytes: ... +def lin2lin(fragment: Buffer, width: int, newwidth: int, /) -> bytes: ... +def lin2ulaw(fragment: Buffer, width: int, /) -> bytes: ... +def max(fragment: Buffer, width: int, /) -> int: ... +def maxpp(fragment: Buffer, width: int, /) -> int: ... +def minmax(fragment: Buffer, width: int, /) -> tuple[int, int]: ... +def mul(fragment: Buffer, width: int, factor: float, /) -> bytes: ... +def ratecv( + fragment: Buffer, + width: int, + nchannels: int, + inrate: int, + outrate: int, + state: _RatecvState | None, + weightA: int = 1, + weightB: int = 0, + /, +) -> tuple[bytes, _RatecvState]: ... +def reverse(fragment: Buffer, width: int, /) -> bytes: ... +def rms(fragment: Buffer, width: int, /) -> int: ... +def tomono(fragment: Buffer, width: int, lfactor: float, rfactor: float, /) -> bytes: ... +def tostereo(fragment: Buffer, width: int, lfactor: float, rfactor: float, /) -> bytes: ... +def ulaw2lin(fragment: Buffer, width: int, /) -> bytes: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/base64.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/base64.pyi new file mode 100644 index 0000000..279d74a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/base64.pyi @@ -0,0 +1,61 @@ +import sys +from _typeshed import ReadableBuffer +from typing import IO + +__all__ = [ + "encode", + "decode", + "encodebytes", + "decodebytes", + "b64encode", + "b64decode", + "b32encode", + "b32decode", + "b16encode", + "b16decode", + "b85encode", + "b85decode", + "a85encode", + "a85decode", + "standard_b64encode", + "standard_b64decode", + "urlsafe_b64encode", + "urlsafe_b64decode", +] + +if sys.version_info >= (3, 10): + __all__ += ["b32hexencode", "b32hexdecode"] +if sys.version_info >= (3, 13): + __all__ += ["z85decode", "z85encode"] + +def b64encode(s: ReadableBuffer, altchars: ReadableBuffer | None = None) -> bytes: ... +def b64decode(s: str | ReadableBuffer, altchars: str | ReadableBuffer | None = None, validate: bool = False) -> bytes: ... +def standard_b64encode(s: ReadableBuffer) -> bytes: ... +def standard_b64decode(s: str | ReadableBuffer) -> bytes: ... +def urlsafe_b64encode(s: ReadableBuffer) -> bytes: ... +def urlsafe_b64decode(s: str | ReadableBuffer) -> bytes: ... +def b32encode(s: ReadableBuffer) -> bytes: ... +def b32decode(s: str | ReadableBuffer, casefold: bool = False, map01: str | ReadableBuffer | None = None) -> bytes: ... +def b16encode(s: ReadableBuffer) -> bytes: ... +def b16decode(s: str | ReadableBuffer, casefold: bool = False) -> bytes: ... + +if sys.version_info >= (3, 10): + def b32hexencode(s: ReadableBuffer) -> bytes: ... + def b32hexdecode(s: str | ReadableBuffer, casefold: bool = False) -> bytes: ... + +def a85encode( + b: ReadableBuffer, *, foldspaces: bool = False, wrapcol: int = 0, pad: bool = False, adobe: bool = False +) -> bytes: ... +def a85decode( + b: str | ReadableBuffer, *, foldspaces: bool = False, adobe: bool = False, ignorechars: bytearray | bytes = b" \t\n\r\x0b" +) -> bytes: ... +def b85encode(b: ReadableBuffer, pad: bool = False) -> bytes: ... +def b85decode(b: str | ReadableBuffer) -> bytes: ... +def decode(input: IO[bytes], output: IO[bytes]) -> None: ... +def encode(input: IO[bytes], output: IO[bytes]) -> None: ... +def encodebytes(s: ReadableBuffer) -> bytes: ... +def decodebytes(s: ReadableBuffer) -> bytes: ... + +if sys.version_info >= (3, 13): + def z85encode(s: ReadableBuffer) -> bytes: ... + def z85decode(s: str | ReadableBuffer) -> bytes: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/bdb.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/bdb.pyi new file mode 100644 index 0000000..b6be221 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/bdb.pyi @@ -0,0 +1,130 @@ +import sys +from _typeshed import ExcInfo, TraceFunction, Unused +from collections.abc import Callable, Iterable, Iterator, Mapping +from contextlib import contextmanager +from types import CodeType, FrameType, TracebackType +from typing import IO, Any, Final, Literal, SupportsInt, TypeVar +from typing_extensions import ParamSpec, TypeAlias + +__all__ = ["BdbQuit", "Bdb", "Breakpoint"] + +_T = TypeVar("_T") +_P = ParamSpec("_P") +_Backend: TypeAlias = Literal["settrace", "monitoring"] + +# A union of code-object flags at runtime. +# The exact values of code-object flags are implementation details, +# so we don't include the value of this constant in the stubs. +GENERATOR_AND_COROUTINE_FLAGS: Final[int] + +class BdbQuit(Exception): ... + +class Bdb: + skip: set[str] | None + breaks: dict[str, list[int]] + fncache: dict[str, str] + frame_returning: FrameType | None + botframe: FrameType | None + quitting: bool + stopframe: FrameType | None + returnframe: FrameType | None + stoplineno: int + if sys.version_info >= (3, 14): + backend: _Backend + def __init__(self, skip: Iterable[str] | None = None, backend: _Backend = "settrace") -> None: ... + else: + def __init__(self, skip: Iterable[str] | None = None) -> None: ... + + def canonic(self, filename: str) -> str: ... + def reset(self) -> None: ... + if sys.version_info >= (3, 12): + @contextmanager + def set_enterframe(self, frame: FrameType) -> Iterator[None]: ... + + def trace_dispatch(self, frame: FrameType, event: str, arg: Any) -> TraceFunction: ... + def dispatch_line(self, frame: FrameType) -> TraceFunction: ... + def dispatch_call(self, frame: FrameType, arg: None) -> TraceFunction: ... + def dispatch_return(self, frame: FrameType, arg: Any) -> TraceFunction: ... + def dispatch_exception(self, frame: FrameType, arg: ExcInfo) -> TraceFunction: ... + if sys.version_info >= (3, 13): + def dispatch_opcode(self, frame: FrameType, arg: Unused) -> Callable[[FrameType, str, Any], TraceFunction]: ... + + def is_skipped_module(self, module_name: str) -> bool: ... + def stop_here(self, frame: FrameType) -> bool: ... + def break_here(self, frame: FrameType) -> bool: ... + def do_clear(self, arg: Any) -> bool | None: ... + def break_anywhere(self, frame: FrameType) -> bool: ... + def user_call(self, frame: FrameType, argument_list: None) -> None: ... + def user_line(self, frame: FrameType) -> None: ... + def user_return(self, frame: FrameType, return_value: Any) -> None: ... + def user_exception(self, frame: FrameType, exc_info: ExcInfo) -> None: ... + def set_until(self, frame: FrameType, lineno: int | None = None) -> None: ... + if sys.version_info >= (3, 13): + def user_opcode(self, frame: FrameType) -> None: ... # undocumented + + def set_step(self) -> None: ... + if sys.version_info >= (3, 13): + def set_stepinstr(self) -> None: ... # undocumented + + def set_next(self, frame: FrameType) -> None: ... + def set_return(self, frame: FrameType) -> None: ... + def set_trace(self, frame: FrameType | None = None) -> None: ... + def set_continue(self) -> None: ... + def set_quit(self) -> None: ... + def set_break( + self, filename: str, lineno: int, temporary: bool = False, cond: str | None = None, funcname: str | None = None + ) -> str | None: ... + def clear_break(self, filename: str, lineno: int) -> str | None: ... + def clear_bpbynumber(self, arg: SupportsInt) -> str | None: ... + def clear_all_file_breaks(self, filename: str) -> str | None: ... + def clear_all_breaks(self) -> str | None: ... + def get_bpbynumber(self, arg: SupportsInt) -> Breakpoint: ... + def get_break(self, filename: str, lineno: int) -> bool: ... + def get_breaks(self, filename: str, lineno: int) -> list[Breakpoint]: ... + def get_file_breaks(self, filename: str) -> list[int]: ... + def get_all_breaks(self) -> dict[str, list[int]]: ... + def get_stack(self, f: FrameType | None, t: TracebackType | None) -> tuple[list[tuple[FrameType, int]], int]: ... + def format_stack_entry(self, frame_lineno: tuple[FrameType, int], lprefix: str = ": ") -> str: ... + def run( + self, cmd: str | CodeType, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None + ) -> None: ... + def runeval(self, expr: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: ... + def runctx(self, cmd: str | CodeType, globals: dict[str, Any] | None, locals: Mapping[str, Any] | None) -> None: ... + def runcall(self, func: Callable[_P, _T], /, *args: _P.args, **kwds: _P.kwargs) -> _T | None: ... + if sys.version_info >= (3, 14): + def start_trace(self) -> None: ... + def stop_trace(self) -> None: ... + def disable_current_event(self) -> None: ... + def restart_events(self) -> None: ... + +class Breakpoint: + next: int + bplist: dict[tuple[str, int], list[Breakpoint]] + bpbynumber: list[Breakpoint | None] + + funcname: str | None + func_first_executable_line: int | None + file: str + line: int + temporary: bool + cond: str | None + enabled: bool + ignore: int + hits: int + number: int + def __init__( + self, file: str, line: int, temporary: bool = False, cond: str | None = None, funcname: str | None = None + ) -> None: ... + if sys.version_info >= (3, 11): + @staticmethod + def clearBreakpoints() -> None: ... + + def deleteMe(self) -> None: ... + def enable(self) -> None: ... + def disable(self) -> None: ... + def bpprint(self, out: IO[str] | None = None) -> None: ... + def bpformat(self) -> str: ... + +def checkfuncname(b: Breakpoint, frame: FrameType) -> bool: ... +def effective(file: str, line: int, frame: FrameType) -> tuple[Breakpoint, bool] | tuple[None, None]: ... +def set_trace() -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/binascii.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/binascii.pyi new file mode 100644 index 0000000..5606d5c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/binascii.pyi @@ -0,0 +1,40 @@ +import sys +from _typeshed import ReadableBuffer +from typing_extensions import TypeAlias, deprecated + +# Many functions in binascii accept buffer objects +# or ASCII-only strings. +_AsciiBuffer: TypeAlias = str | ReadableBuffer + +def a2b_uu(data: _AsciiBuffer, /) -> bytes: ... +def b2a_uu(data: ReadableBuffer, /, *, backtick: bool = False) -> bytes: ... + +if sys.version_info >= (3, 11): + def a2b_base64(data: _AsciiBuffer, /, *, strict_mode: bool = False) -> bytes: ... + +else: + def a2b_base64(data: _AsciiBuffer, /) -> bytes: ... + +def b2a_base64(data: ReadableBuffer, /, *, newline: bool = True) -> bytes: ... +def a2b_qp(data: _AsciiBuffer, header: bool = False) -> bytes: ... +def b2a_qp(data: ReadableBuffer, quotetabs: bool = False, istext: bool = True, header: bool = False) -> bytes: ... + +if sys.version_info < (3, 11): + @deprecated("Deprecated since Python 3.9; removed in Python 3.11.") + def a2b_hqx(data: _AsciiBuffer, /) -> bytes: ... + @deprecated("Deprecated since Python 3.9; removed in Python 3.11.") + def rledecode_hqx(data: ReadableBuffer, /) -> bytes: ... + @deprecated("Deprecated since Python 3.9; removed in Python 3.11.") + def rlecode_hqx(data: ReadableBuffer, /) -> bytes: ... + @deprecated("Deprecated since Python 3.9; removed in Python 3.11.") + def b2a_hqx(data: ReadableBuffer, /) -> bytes: ... + +def crc_hqx(data: ReadableBuffer, crc: int, /) -> int: ... +def crc32(data: ReadableBuffer, crc: int = 0, /) -> int: ... +def b2a_hex(data: ReadableBuffer, sep: str | bytes = ..., bytes_per_sep: int = 1) -> bytes: ... +def hexlify(data: ReadableBuffer, sep: str | bytes = ..., bytes_per_sep: int = 1) -> bytes: ... +def a2b_hex(hexstr: _AsciiBuffer, /) -> bytes: ... +def unhexlify(hexstr: _AsciiBuffer, /) -> bytes: ... + +class Error(ValueError): ... +class Incomplete(Exception): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/binhex.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/binhex.pyi new file mode 100644 index 0000000..bdead92 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/binhex.pyi @@ -0,0 +1,45 @@ +from _typeshed import SizedBuffer +from typing import IO, Any, Final +from typing_extensions import TypeAlias + +__all__ = ["binhex", "hexbin", "Error"] + +class Error(Exception): ... + +REASONABLY_LARGE: Final = 32768 +LINELEN: Final = 64 +RUNCHAR: Final = b"\x90" + +class FInfo: + Type: str + Creator: str + Flags: int + +_FileInfoTuple: TypeAlias = tuple[str, FInfo, int, int] +_FileHandleUnion: TypeAlias = str | IO[bytes] + +def getfileinfo(name: str) -> _FileInfoTuple: ... + +class openrsrc: + def __init__(self, *args: Any) -> None: ... + def read(self, *args: Any) -> bytes: ... + def write(self, *args: Any) -> None: ... + def close(self) -> None: ... + +class BinHex: + def __init__(self, name_finfo_dlen_rlen: _FileInfoTuple, ofp: _FileHandleUnion) -> None: ... + def write(self, data: SizedBuffer) -> None: ... + def close_data(self) -> None: ... + def write_rsrc(self, data: SizedBuffer) -> None: ... + def close(self) -> None: ... + +def binhex(inp: str, out: str) -> None: ... + +class HexBin: + def __init__(self, ifp: _FileHandleUnion) -> None: ... + def read(self, *n: int) -> bytes: ... + def close_data(self) -> None: ... + def read_rsrc(self, *n: int) -> bytes: ... + def close(self) -> None: ... + +def hexbin(inp: str, out: str) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/bisect.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/bisect.pyi new file mode 100644 index 0000000..60dfc48 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/bisect.pyi @@ -0,0 +1,4 @@ +from _bisect import * + +bisect = bisect_right +insort = insort_right diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/builtins.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/builtins.pyi new file mode 100644 index 0000000..30dfb9d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/builtins.pyi @@ -0,0 +1,2218 @@ +import _ast +import _sitebuiltins +import _typeshed +import sys +import types +from _collections_abc import dict_items, dict_keys, dict_values +from _typeshed import ( + AnnotationForm, + ConvertibleToFloat, + ConvertibleToInt, + FileDescriptorOrPath, + OpenBinaryMode, + OpenBinaryModeReading, + OpenBinaryModeUpdating, + OpenBinaryModeWriting, + OpenTextMode, + ReadableBuffer, + SupportsAdd, + SupportsAiter, + SupportsAnext, + SupportsDivMod, + SupportsFlush, + SupportsIter, + SupportsKeysAndGetItem, + SupportsLenAndGetItem, + SupportsNext, + SupportsRAdd, + SupportsRDivMod, + SupportsRichComparison, + SupportsRichComparisonT, + SupportsWrite, +) +from collections.abc import Awaitable, Callable, Iterable, Iterator, MutableSet, Reversible, Set as AbstractSet, Sized +from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper +from os import PathLike +from types import CellType, CodeType, GenericAlias, TracebackType + +# mypy crashes if any of {ByteString, Sequence, MutableSequence, Mapping, MutableMapping} +# are imported from collections.abc in builtins.pyi +from typing import ( # noqa: Y022,UP035 + IO, + Any, + BinaryIO, + ClassVar, + Final, + Generic, + Mapping, + MutableMapping, + MutableSequence, + Protocol, + Sequence, + SupportsAbs, + SupportsBytes, + SupportsComplex, + SupportsFloat, + SupportsIndex, + TypeVar, + final, + overload, + type_check_only, +) + +# we can't import `Literal` from typing or mypy crashes: see #11247 +from typing_extensions import ( # noqa: Y023 + Concatenate, + Literal, + ParamSpec, + Self, + TypeAlias, + TypeGuard, + TypeIs, + TypeVarTuple, + deprecated, + disjoint_base, +) + +if sys.version_info >= (3, 14): + from _typeshed import AnnotateFunc + +_T = TypeVar("_T") +_I = TypeVar("_I", default=int) +_T_co = TypeVar("_T_co", covariant=True) +_T_contra = TypeVar("_T_contra", contravariant=True) +_R_co = TypeVar("_R_co", covariant=True) +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") +_S = TypeVar("_S") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_T4 = TypeVar("_T4") +_T5 = TypeVar("_T5") +_SupportsNextT_co = TypeVar("_SupportsNextT_co", bound=SupportsNext[Any], covariant=True) +_SupportsAnextT_co = TypeVar("_SupportsAnextT_co", bound=SupportsAnext[Any], covariant=True) +_AwaitableT = TypeVar("_AwaitableT", bound=Awaitable[Any]) +_AwaitableT_co = TypeVar("_AwaitableT_co", bound=Awaitable[Any], covariant=True) +_P = ParamSpec("_P") + +# Type variables for slice +_StartT_co = TypeVar("_StartT_co", covariant=True, default=Any) # slice -> slice[Any, Any, Any] +_StopT_co = TypeVar("_StopT_co", covariant=True, default=_StartT_co) # slice[A] -> slice[A, A, A] +# NOTE: step could differ from start and stop, (e.g. datetime/timedelta)l +# the default (start|stop) is chosen to cater to the most common case of int/index slices. +# FIXME: https://github.com/python/typing/issues/213 (replace step=start|stop with step=start&stop) +_StepT_co = TypeVar("_StepT_co", covariant=True, default=_StartT_co | _StopT_co) # slice[A,B] -> slice[A, B, A|B] + +@disjoint_base +class object: + __doc__: str | None + __dict__: dict[str, Any] + __module__: str + __annotations__: dict[str, Any] + @property + def __class__(self) -> type[Self]: ... + @__class__.setter + def __class__(self, type: type[Self], /) -> None: ... + def __init__(self) -> None: ... + def __new__(cls) -> Self: ... + # N.B. `object.__setattr__` and `object.__delattr__` are heavily special-cased by type checkers. + # Overriding them in subclasses has different semantics, even if the override has an identical signature. + def __setattr__(self, name: str, value: Any, /) -> None: ... + def __delattr__(self, name: str, /) -> None: ... + def __eq__(self, value: object, /) -> bool: ... + def __ne__(self, value: object, /) -> bool: ... + def __str__(self) -> str: ... # noqa: Y029 + def __repr__(self) -> str: ... # noqa: Y029 + def __hash__(self) -> int: ... + def __format__(self, format_spec: str, /) -> str: ... + def __getattribute__(self, name: str, /) -> Any: ... + def __sizeof__(self) -> int: ... + # return type of pickle methods is rather hard to express in the current type system + # see #6661 and https://docs.python.org/3/library/pickle.html#object.__reduce__ + def __reduce__(self) -> str | tuple[Any, ...]: ... + def __reduce_ex__(self, protocol: SupportsIndex, /) -> str | tuple[Any, ...]: ... + if sys.version_info >= (3, 11): + def __getstate__(self) -> object: ... + + def __dir__(self) -> Iterable[str]: ... + def __init_subclass__(cls) -> None: ... + @classmethod + def __subclasshook__(cls, subclass: type, /) -> bool: ... + +@disjoint_base +class staticmethod(Generic[_P, _R_co]): + @property + def __func__(self) -> Callable[_P, _R_co]: ... + @property + def __isabstractmethod__(self) -> bool: ... + def __init__(self, f: Callable[_P, _R_co], /) -> None: ... + @overload + def __get__(self, instance: None, owner: type, /) -> Callable[_P, _R_co]: ... + @overload + def __get__(self, instance: _T, owner: type[_T] | None = None, /) -> Callable[_P, _R_co]: ... + if sys.version_info >= (3, 10): + __name__: str + __qualname__: str + @property + def __wrapped__(self) -> Callable[_P, _R_co]: ... + def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R_co: ... + if sys.version_info >= (3, 14): + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + __annotate__: AnnotateFunc | None + +@disjoint_base +class classmethod(Generic[_T, _P, _R_co]): + @property + def __func__(self) -> Callable[Concatenate[type[_T], _P], _R_co]: ... + @property + def __isabstractmethod__(self) -> bool: ... + def __init__(self, f: Callable[Concatenate[type[_T], _P], _R_co], /) -> None: ... + @overload + def __get__(self, instance: _T, owner: type[_T] | None = None, /) -> Callable[_P, _R_co]: ... + @overload + def __get__(self, instance: None, owner: type[_T], /) -> Callable[_P, _R_co]: ... + if sys.version_info >= (3, 10): + __name__: str + __qualname__: str + @property + def __wrapped__(self) -> Callable[Concatenate[type[_T], _P], _R_co]: ... + if sys.version_info >= (3, 14): + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + __annotate__: AnnotateFunc | None + +@disjoint_base +class type: + # object.__base__ is None. Otherwise, it would be a type. + @property + def __base__(self) -> type | None: ... + __bases__: tuple[type, ...] + @property + def __basicsize__(self) -> int: ... + # type.__dict__ is read-only at runtime, but that can't be expressed currently. + # See https://github.com/python/typeshed/issues/11033 for a discussion. + __dict__: Final[types.MappingProxyType[str, Any]] # type: ignore[assignment] + @property + def __dictoffset__(self) -> int: ... + @property + def __flags__(self) -> int: ... + @property + def __itemsize__(self) -> int: ... + __module__: str + @property + def __mro__(self) -> tuple[type, ...]: ... + __name__: str + __qualname__: str + @property + def __text_signature__(self) -> str | None: ... + @property + def __weakrefoffset__(self) -> int: ... + @overload + def __init__(self, o: object, /) -> None: ... + @overload + def __init__(self, name: str, bases: tuple[type, ...], dict: dict[str, Any], /, **kwds: Any) -> None: ... + @overload + def __new__(cls, o: object, /) -> type: ... + @overload + def __new__( + cls: type[_typeshed.Self], name: str, bases: tuple[type, ...], namespace: dict[str, Any], /, **kwds: Any + ) -> _typeshed.Self: ... + def __call__(self, *args: Any, **kwds: Any) -> Any: ... + def __subclasses__(self: _typeshed.Self) -> list[_typeshed.Self]: ... + # Note: the documentation doesn't specify what the return type is, the standard + # implementation seems to be returning a list. + def mro(self) -> list[type]: ... + def __instancecheck__(self, instance: Any, /) -> bool: ... + def __subclasscheck__(self, subclass: type, /) -> bool: ... + @classmethod + def __prepare__(metacls, name: str, bases: tuple[type, ...], /, **kwds: Any) -> MutableMapping[str, object]: ... + if sys.version_info >= (3, 10): + # `int | str` produces an instance of `UnionType`, but `int | int` produces an instance of `type`, + # and `abc.ABC | abc.ABC` produces an instance of `abc.ABCMeta`. + def __or__(self: _typeshed.Self, value: Any, /) -> types.UnionType | _typeshed.Self: ... + def __ror__(self: _typeshed.Self, value: Any, /) -> types.UnionType | _typeshed.Self: ... + if sys.version_info >= (3, 12): + __type_params__: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] + __annotations__: dict[str, AnnotationForm] + if sys.version_info >= (3, 14): + __annotate__: AnnotateFunc | None + +@disjoint_base +class super: + @overload + def __init__(self, t: Any, obj: Any, /) -> None: ... + @overload + def __init__(self, t: Any, /) -> None: ... + @overload + def __init__(self) -> None: ... + +_PositiveInteger: TypeAlias = Literal[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25] +_NegativeInteger: TypeAlias = Literal[-1, -2, -3, -4, -5, -6, -7, -8, -9, -10, -11, -12, -13, -14, -15, -16, -17, -18, -19, -20] +_LiteralInteger = _PositiveInteger | _NegativeInteger | Literal[0] # noqa: Y026 # TODO: Use TypeAlias once mypy bugs are fixed + +@disjoint_base +class int: + @overload + def __new__(cls, x: ConvertibleToInt = 0, /) -> Self: ... + @overload + def __new__(cls, x: str | bytes | bytearray, /, base: SupportsIndex) -> Self: ... + def as_integer_ratio(self) -> tuple[int, Literal[1]]: ... + @property + def real(self) -> int: ... + @property + def imag(self) -> Literal[0]: ... + @property + def numerator(self) -> int: ... + @property + def denominator(self) -> Literal[1]: ... + def conjugate(self) -> int: ... + def bit_length(self) -> int: ... + if sys.version_info >= (3, 10): + def bit_count(self) -> int: ... + + if sys.version_info >= (3, 11): + def to_bytes( + self, length: SupportsIndex = 1, byteorder: Literal["little", "big"] = "big", *, signed: bool = False + ) -> bytes: ... + @classmethod + def from_bytes( + cls, + bytes: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer, + byteorder: Literal["little", "big"] = "big", + *, + signed: bool = False, + ) -> Self: ... + else: + def to_bytes(self, length: SupportsIndex, byteorder: Literal["little", "big"], *, signed: bool = False) -> bytes: ... + @classmethod + def from_bytes( + cls, + bytes: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer, + byteorder: Literal["little", "big"], + *, + signed: bool = False, + ) -> Self: ... + + if sys.version_info >= (3, 12): + def is_integer(self) -> Literal[True]: ... + + def __add__(self, value: int, /) -> int: ... + def __sub__(self, value: int, /) -> int: ... + def __mul__(self, value: int, /) -> int: ... + def __floordiv__(self, value: int, /) -> int: ... + def __truediv__(self, value: int, /) -> float: ... + def __mod__(self, value: int, /) -> int: ... + def __divmod__(self, value: int, /) -> tuple[int, int]: ... + def __radd__(self, value: int, /) -> int: ... + def __rsub__(self, value: int, /) -> int: ... + def __rmul__(self, value: int, /) -> int: ... + def __rfloordiv__(self, value: int, /) -> int: ... + def __rtruediv__(self, value: int, /) -> float: ... + def __rmod__(self, value: int, /) -> int: ... + def __rdivmod__(self, value: int, /) -> tuple[int, int]: ... + @overload + def __pow__(self, x: Literal[0], /) -> Literal[1]: ... + @overload + def __pow__(self, value: Literal[0], mod: None, /) -> Literal[1]: ... + @overload + def __pow__(self, value: _PositiveInteger, mod: None = None, /) -> int: ... + @overload + def __pow__(self, value: _NegativeInteger, mod: None = None, /) -> float: ... + # positive __value -> int; negative __value -> float + # return type must be Any as `int | float` causes too many false-positive errors + @overload + def __pow__(self, value: int, mod: None = None, /) -> Any: ... + @overload + def __pow__(self, value: int, mod: int, /) -> int: ... + def __rpow__(self, value: int, mod: int | None = None, /) -> Any: ... + def __and__(self, value: int, /) -> int: ... + def __or__(self, value: int, /) -> int: ... + def __xor__(self, value: int, /) -> int: ... + def __lshift__(self, value: int, /) -> int: ... + def __rshift__(self, value: int, /) -> int: ... + def __rand__(self, value: int, /) -> int: ... + def __ror__(self, value: int, /) -> int: ... + def __rxor__(self, value: int, /) -> int: ... + def __rlshift__(self, value: int, /) -> int: ... + def __rrshift__(self, value: int, /) -> int: ... + def __neg__(self) -> int: ... + def __pos__(self) -> int: ... + def __invert__(self) -> int: ... + def __trunc__(self) -> int: ... + def __ceil__(self) -> int: ... + def __floor__(self) -> int: ... + if sys.version_info >= (3, 14): + def __round__(self, ndigits: SupportsIndex | None = None, /) -> int: ... + else: + def __round__(self, ndigits: SupportsIndex = ..., /) -> int: ... + + def __getnewargs__(self) -> tuple[int]: ... + def __eq__(self, value: object, /) -> bool: ... + def __ne__(self, value: object, /) -> bool: ... + def __lt__(self, value: int, /) -> bool: ... + def __le__(self, value: int, /) -> bool: ... + def __gt__(self, value: int, /) -> bool: ... + def __ge__(self, value: int, /) -> bool: ... + def __float__(self) -> float: ... + def __int__(self) -> int: ... + def __abs__(self) -> int: ... + def __hash__(self) -> int: ... + def __bool__(self) -> bool: ... + def __index__(self) -> int: ... + def __format__(self, format_spec: str, /) -> str: ... + +@disjoint_base +class float: + def __new__(cls, x: ConvertibleToFloat = 0, /) -> Self: ... + def as_integer_ratio(self) -> tuple[int, int]: ... + def hex(self) -> str: ... + def is_integer(self) -> bool: ... + @classmethod + def fromhex(cls, string: str, /) -> Self: ... + @property + def real(self) -> float: ... + @property + def imag(self) -> float: ... + def conjugate(self) -> float: ... + def __add__(self, value: float, /) -> float: ... + def __sub__(self, value: float, /) -> float: ... + def __mul__(self, value: float, /) -> float: ... + def __floordiv__(self, value: float, /) -> float: ... + def __truediv__(self, value: float, /) -> float: ... + def __mod__(self, value: float, /) -> float: ... + def __divmod__(self, value: float, /) -> tuple[float, float]: ... + @overload + def __pow__(self, value: int, mod: None = None, /) -> float: ... + # positive __value -> float; negative __value -> complex + # return type must be Any as `float | complex` causes too many false-positive errors + @overload + def __pow__(self, value: float, mod: None = None, /) -> Any: ... + def __radd__(self, value: float, /) -> float: ... + def __rsub__(self, value: float, /) -> float: ... + def __rmul__(self, value: float, /) -> float: ... + def __rfloordiv__(self, value: float, /) -> float: ... + def __rtruediv__(self, value: float, /) -> float: ... + def __rmod__(self, value: float, /) -> float: ... + def __rdivmod__(self, value: float, /) -> tuple[float, float]: ... + @overload + def __rpow__(self, value: _PositiveInteger, mod: None = None, /) -> float: ... + @overload + def __rpow__(self, value: _NegativeInteger, mod: None = None, /) -> complex: ... + # Returning `complex` for the general case gives too many false-positive errors. + @overload + def __rpow__(self, value: float, mod: None = None, /) -> Any: ... + def __getnewargs__(self) -> tuple[float]: ... + def __trunc__(self) -> int: ... + def __ceil__(self) -> int: ... + def __floor__(self) -> int: ... + @overload + def __round__(self, ndigits: None = None, /) -> int: ... + @overload + def __round__(self, ndigits: SupportsIndex, /) -> float: ... + def __eq__(self, value: object, /) -> bool: ... + def __ne__(self, value: object, /) -> bool: ... + def __lt__(self, value: float, /) -> bool: ... + def __le__(self, value: float, /) -> bool: ... + def __gt__(self, value: float, /) -> bool: ... + def __ge__(self, value: float, /) -> bool: ... + def __neg__(self) -> float: ... + def __pos__(self) -> float: ... + def __int__(self) -> int: ... + def __float__(self) -> float: ... + def __abs__(self) -> float: ... + def __hash__(self) -> int: ... + def __bool__(self) -> bool: ... + def __format__(self, format_spec: str, /) -> str: ... + if sys.version_info >= (3, 14): + @classmethod + def from_number(cls, number: float | SupportsIndex | SupportsFloat, /) -> Self: ... + +@disjoint_base +class complex: + # Python doesn't currently accept SupportsComplex for the second argument + @overload + def __new__( + cls, + real: complex | SupportsComplex | SupportsFloat | SupportsIndex = 0, + imag: complex | SupportsFloat | SupportsIndex = 0, + ) -> Self: ... + @overload + def __new__(cls, real: str | SupportsComplex | SupportsFloat | SupportsIndex | complex) -> Self: ... + @property + def real(self) -> float: ... + @property + def imag(self) -> float: ... + def conjugate(self) -> complex: ... + def __add__(self, value: complex, /) -> complex: ... + def __sub__(self, value: complex, /) -> complex: ... + def __mul__(self, value: complex, /) -> complex: ... + def __pow__(self, value: complex, mod: None = None, /) -> complex: ... + def __truediv__(self, value: complex, /) -> complex: ... + def __radd__(self, value: complex, /) -> complex: ... + def __rsub__(self, value: complex, /) -> complex: ... + def __rmul__(self, value: complex, /) -> complex: ... + def __rpow__(self, value: complex, mod: None = None, /) -> complex: ... + def __rtruediv__(self, value: complex, /) -> complex: ... + def __eq__(self, value: object, /) -> bool: ... + def __ne__(self, value: object, /) -> bool: ... + def __neg__(self) -> complex: ... + def __pos__(self) -> complex: ... + def __abs__(self) -> float: ... + def __hash__(self) -> int: ... + def __bool__(self) -> bool: ... + def __format__(self, format_spec: str, /) -> str: ... + if sys.version_info >= (3, 11): + def __complex__(self) -> complex: ... + if sys.version_info >= (3, 14): + @classmethod + def from_number(cls, number: complex | SupportsComplex | SupportsFloat | SupportsIndex, /) -> Self: ... + +@type_check_only +class _FormatMapMapping(Protocol): + def __getitem__(self, key: str, /) -> Any: ... + +@type_check_only +class _TranslateTable(Protocol): + def __getitem__(self, key: int, /) -> str | int | None: ... + +@disjoint_base +class str(Sequence[str]): + @overload + def __new__(cls, object: object = "") -> Self: ... + @overload + def __new__(cls, object: ReadableBuffer, encoding: str = "utf-8", errors: str = "strict") -> Self: ... + def capitalize(self) -> str: ... # type: ignore[misc] + def casefold(self) -> str: ... # type: ignore[misc] + def center(self, width: SupportsIndex, fillchar: str = " ", /) -> str: ... # type: ignore[misc] + def count(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: ... + def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: ... + def endswith( + self, suffix: str | tuple[str, ...], start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> bool: ... + def expandtabs(self, tabsize: SupportsIndex = 8) -> str: ... # type: ignore[misc] + def find(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: ... + def format(self, *args: object, **kwargs: object) -> str: ... + def format_map(self, mapping: _FormatMapMapping, /) -> str: ... + def index(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: ... + def isalnum(self) -> bool: ... + def isalpha(self) -> bool: ... + def isascii(self) -> bool: ... + def isdecimal(self) -> bool: ... + def isdigit(self) -> bool: ... + def isidentifier(self) -> bool: ... + def islower(self) -> bool: ... + def isnumeric(self) -> bool: ... + def isprintable(self) -> bool: ... + def isspace(self) -> bool: ... + def istitle(self) -> bool: ... + def isupper(self) -> bool: ... + def join(self, iterable: Iterable[str], /) -> str: ... # type: ignore[misc] + def ljust(self, width: SupportsIndex, fillchar: str = " ", /) -> str: ... # type: ignore[misc] + def lower(self) -> str: ... # type: ignore[misc] + def lstrip(self, chars: str | None = None, /) -> str: ... # type: ignore[misc] + def partition(self, sep: str, /) -> tuple[str, str, str]: ... # type: ignore[misc] + if sys.version_info >= (3, 13): + def replace(self, old: str, new: str, /, count: SupportsIndex = -1) -> str: ... # type: ignore[misc] + else: + def replace(self, old: str, new: str, count: SupportsIndex = -1, /) -> str: ... # type: ignore[misc] + + def removeprefix(self, prefix: str, /) -> str: ... # type: ignore[misc] + def removesuffix(self, suffix: str, /) -> str: ... # type: ignore[misc] + def rfind(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: ... + def rindex(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: ... + def rjust(self, width: SupportsIndex, fillchar: str = " ", /) -> str: ... # type: ignore[misc] + def rpartition(self, sep: str, /) -> tuple[str, str, str]: ... # type: ignore[misc] + def rsplit(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] + def rstrip(self, chars: str | None = None, /) -> str: ... # type: ignore[misc] + def split(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] + def splitlines(self, keepends: bool = False) -> list[str]: ... # type: ignore[misc] + def startswith( + self, prefix: str | tuple[str, ...], start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> bool: ... + def strip(self, chars: str | None = None, /) -> str: ... # type: ignore[misc] + def swapcase(self) -> str: ... # type: ignore[misc] + def title(self) -> str: ... # type: ignore[misc] + def translate(self, table: _TranslateTable, /) -> str: ... + def upper(self) -> str: ... # type: ignore[misc] + def zfill(self, width: SupportsIndex, /) -> str: ... # type: ignore[misc] + @staticmethod + @overload + def maketrans(x: dict[int, _T] | dict[str, _T] | dict[str | int, _T], /) -> dict[int, _T]: ... + @staticmethod + @overload + def maketrans(x: str, y: str, /) -> dict[int, int]: ... + @staticmethod + @overload + def maketrans(x: str, y: str, z: str, /) -> dict[int, int | None]: ... + def __add__(self, value: str, /) -> str: ... # type: ignore[misc] + # Incompatible with Sequence.__contains__ + def __contains__(self, key: str, /) -> bool: ... # type: ignore[override] + def __eq__(self, value: object, /) -> bool: ... + def __ge__(self, value: str, /) -> bool: ... + def __getitem__(self, key: SupportsIndex | slice, /) -> str: ... + def __gt__(self, value: str, /) -> bool: ... + def __hash__(self) -> int: ... + def __iter__(self) -> Iterator[str]: ... # type: ignore[misc] + def __le__(self, value: str, /) -> bool: ... + def __len__(self) -> int: ... + def __lt__(self, value: str, /) -> bool: ... + def __mod__(self, value: Any, /) -> str: ... + def __mul__(self, value: SupportsIndex, /) -> str: ... # type: ignore[misc] + def __ne__(self, value: object, /) -> bool: ... + def __rmul__(self, value: SupportsIndex, /) -> str: ... # type: ignore[misc] + def __getnewargs__(self) -> tuple[str]: ... + def __format__(self, format_spec: str, /) -> str: ... + +@disjoint_base +class bytes(Sequence[int]): + @overload + def __new__(cls, o: Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer, /) -> Self: ... + @overload + def __new__(cls, string: str, /, encoding: str, errors: str = "strict") -> Self: ... + @overload + def __new__(cls) -> Self: ... + def capitalize(self) -> bytes: ... + def center(self, width: SupportsIndex, fillchar: bytes = b" ", /) -> bytes: ... + def count( + self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> int: ... + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: ... + def endswith( + self, + suffix: ReadableBuffer | tuple[ReadableBuffer, ...], + start: SupportsIndex | None = None, + end: SupportsIndex | None = None, + /, + ) -> bool: ... + def expandtabs(self, tabsize: SupportsIndex = 8) -> bytes: ... + def find( + self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> int: ... + def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = 1) -> str: ... + def index( + self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> int: ... + def isalnum(self) -> bool: ... + def isalpha(self) -> bool: ... + def isascii(self) -> bool: ... + def isdigit(self) -> bool: ... + def islower(self) -> bool: ... + def isspace(self) -> bool: ... + def istitle(self) -> bool: ... + def isupper(self) -> bool: ... + def join(self, iterable_of_bytes: Iterable[ReadableBuffer], /) -> bytes: ... + def ljust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytes: ... + def lower(self) -> bytes: ... + def lstrip(self, bytes: ReadableBuffer | None = None, /) -> bytes: ... + def partition(self, sep: ReadableBuffer, /) -> tuple[bytes, bytes, bytes]: ... + def replace(self, old: ReadableBuffer, new: ReadableBuffer, count: SupportsIndex = -1, /) -> bytes: ... + def removeprefix(self, prefix: ReadableBuffer, /) -> bytes: ... + def removesuffix(self, suffix: ReadableBuffer, /) -> bytes: ... + def rfind( + self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> int: ... + def rindex( + self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> int: ... + def rjust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytes: ... + def rpartition(self, sep: ReadableBuffer, /) -> tuple[bytes, bytes, bytes]: ... + def rsplit(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytes]: ... + def rstrip(self, bytes: ReadableBuffer | None = None, /) -> bytes: ... + def split(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytes]: ... + def splitlines(self, keepends: bool = False) -> list[bytes]: ... + def startswith( + self, + prefix: ReadableBuffer | tuple[ReadableBuffer, ...], + start: SupportsIndex | None = None, + end: SupportsIndex | None = None, + /, + ) -> bool: ... + def strip(self, bytes: ReadableBuffer | None = None, /) -> bytes: ... + def swapcase(self) -> bytes: ... + def title(self) -> bytes: ... + def translate(self, table: ReadableBuffer | None, /, delete: ReadableBuffer = b"") -> bytes: ... + def upper(self) -> bytes: ... + def zfill(self, width: SupportsIndex, /) -> bytes: ... + @classmethod + def fromhex(cls, string: str, /) -> Self: ... + @staticmethod + def maketrans(frm: ReadableBuffer, to: ReadableBuffer, /) -> bytes: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[int]: ... + def __hash__(self) -> int: ... + @overload + def __getitem__(self, key: SupportsIndex, /) -> int: ... + @overload + def __getitem__(self, key: slice, /) -> bytes: ... + def __add__(self, value: ReadableBuffer, /) -> bytes: ... + def __mul__(self, value: SupportsIndex, /) -> bytes: ... + def __rmul__(self, value: SupportsIndex, /) -> bytes: ... + def __mod__(self, value: Any, /) -> bytes: ... + # Incompatible with Sequence.__contains__ + def __contains__(self, key: SupportsIndex | ReadableBuffer, /) -> bool: ... # type: ignore[override] + def __eq__(self, value: object, /) -> bool: ... + def __ne__(self, value: object, /) -> bool: ... + def __lt__(self, value: bytes, /) -> bool: ... + def __le__(self, value: bytes, /) -> bool: ... + def __gt__(self, value: bytes, /) -> bool: ... + def __ge__(self, value: bytes, /) -> bool: ... + def __getnewargs__(self) -> tuple[bytes]: ... + if sys.version_info >= (3, 11): + def __bytes__(self) -> bytes: ... + + def __buffer__(self, flags: int, /) -> memoryview: ... + +@disjoint_base +class bytearray(MutableSequence[int]): + @overload + def __init__(self) -> None: ... + @overload + def __init__(self, ints: Iterable[SupportsIndex] | SupportsIndex | ReadableBuffer, /) -> None: ... + @overload + def __init__(self, string: str, /, encoding: str, errors: str = "strict") -> None: ... + def append(self, item: SupportsIndex, /) -> None: ... + def capitalize(self) -> bytearray: ... + def center(self, width: SupportsIndex, fillchar: bytes = b" ", /) -> bytearray: ... + def count( + self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> int: ... + def copy(self) -> bytearray: ... + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: ... + def endswith( + self, + suffix: ReadableBuffer | tuple[ReadableBuffer, ...], + start: SupportsIndex | None = None, + end: SupportsIndex | None = None, + /, + ) -> bool: ... + def expandtabs(self, tabsize: SupportsIndex = 8) -> bytearray: ... + def extend(self, iterable_of_ints: Iterable[SupportsIndex], /) -> None: ... + def find( + self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> int: ... + def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = 1) -> str: ... + def index( + self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> int: ... + def insert(self, index: SupportsIndex, item: SupportsIndex, /) -> None: ... + def isalnum(self) -> bool: ... + def isalpha(self) -> bool: ... + def isascii(self) -> bool: ... + def isdigit(self) -> bool: ... + def islower(self) -> bool: ... + def isspace(self) -> bool: ... + def istitle(self) -> bool: ... + def isupper(self) -> bool: ... + def join(self, iterable_of_bytes: Iterable[ReadableBuffer], /) -> bytearray: ... + def ljust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytearray: ... + def lower(self) -> bytearray: ... + def lstrip(self, bytes: ReadableBuffer | None = None, /) -> bytearray: ... + def partition(self, sep: ReadableBuffer, /) -> tuple[bytearray, bytearray, bytearray]: ... + def pop(self, index: int = -1, /) -> int: ... + def remove(self, value: int, /) -> None: ... + def removeprefix(self, prefix: ReadableBuffer, /) -> bytearray: ... + def removesuffix(self, suffix: ReadableBuffer, /) -> bytearray: ... + def replace(self, old: ReadableBuffer, new: ReadableBuffer, count: SupportsIndex = -1, /) -> bytearray: ... + def rfind( + self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> int: ... + def rindex( + self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> int: ... + def rjust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytearray: ... + def rpartition(self, sep: ReadableBuffer, /) -> tuple[bytearray, bytearray, bytearray]: ... + def rsplit(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytearray]: ... + def rstrip(self, bytes: ReadableBuffer | None = None, /) -> bytearray: ... + def split(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytearray]: ... + def splitlines(self, keepends: bool = False) -> list[bytearray]: ... + def startswith( + self, + prefix: ReadableBuffer | tuple[ReadableBuffer, ...], + start: SupportsIndex | None = None, + end: SupportsIndex | None = None, + /, + ) -> bool: ... + def strip(self, bytes: ReadableBuffer | None = None, /) -> bytearray: ... + def swapcase(self) -> bytearray: ... + def title(self) -> bytearray: ... + def translate(self, table: ReadableBuffer | None, /, delete: bytes = b"") -> bytearray: ... + def upper(self) -> bytearray: ... + def zfill(self, width: SupportsIndex, /) -> bytearray: ... + @classmethod + def fromhex(cls, string: str, /) -> Self: ... + @staticmethod + def maketrans(frm: ReadableBuffer, to: ReadableBuffer, /) -> bytes: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[int]: ... + __hash__: ClassVar[None] # type: ignore[assignment] + @overload + def __getitem__(self, key: SupportsIndex, /) -> int: ... + @overload + def __getitem__(self, key: slice, /) -> bytearray: ... + @overload + def __setitem__(self, key: SupportsIndex, value: SupportsIndex, /) -> None: ... + @overload + def __setitem__(self, key: slice, value: Iterable[SupportsIndex] | bytes, /) -> None: ... + def __delitem__(self, key: SupportsIndex | slice, /) -> None: ... + def __add__(self, value: ReadableBuffer, /) -> bytearray: ... + # The superclass wants us to accept Iterable[int], but that fails at runtime. + def __iadd__(self, value: ReadableBuffer, /) -> Self: ... # type: ignore[override] + def __mul__(self, value: SupportsIndex, /) -> bytearray: ... + def __rmul__(self, value: SupportsIndex, /) -> bytearray: ... + def __imul__(self, value: SupportsIndex, /) -> Self: ... + def __mod__(self, value: Any, /) -> bytes: ... + # Incompatible with Sequence.__contains__ + def __contains__(self, key: SupportsIndex | ReadableBuffer, /) -> bool: ... # type: ignore[override] + def __eq__(self, value: object, /) -> bool: ... + def __ne__(self, value: object, /) -> bool: ... + def __lt__(self, value: ReadableBuffer, /) -> bool: ... + def __le__(self, value: ReadableBuffer, /) -> bool: ... + def __gt__(self, value: ReadableBuffer, /) -> bool: ... + def __ge__(self, value: ReadableBuffer, /) -> bool: ... + def __alloc__(self) -> int: ... + def __buffer__(self, flags: int, /) -> memoryview: ... + def __release_buffer__(self, buffer: memoryview, /) -> None: ... + if sys.version_info >= (3, 14): + def resize(self, size: int, /) -> None: ... + +_IntegerFormats: TypeAlias = Literal[ + "b", "B", "@b", "@B", "h", "H", "@h", "@H", "i", "I", "@i", "@I", "l", "L", "@l", "@L", "q", "Q", "@q", "@Q", "P", "@P" +] + +@final +class memoryview(Sequence[_I]): + @property + def format(self) -> str: ... + @property + def itemsize(self) -> int: ... + @property + def shape(self) -> tuple[int, ...] | None: ... + @property + def strides(self) -> tuple[int, ...] | None: ... + @property + def suboffsets(self) -> tuple[int, ...] | None: ... + @property + def readonly(self) -> bool: ... + @property + def ndim(self) -> int: ... + @property + def obj(self) -> ReadableBuffer: ... + @property + def c_contiguous(self) -> bool: ... + @property + def f_contiguous(self) -> bool: ... + @property + def contiguous(self) -> bool: ... + @property + def nbytes(self) -> int: ... + def __new__(cls, obj: ReadableBuffer) -> Self: ... + def __enter__(self) -> Self: ... + def __exit__( + self, + exc_type: type[BaseException] | None, # noqa: PYI036 # This is the module declaring BaseException + exc_val: BaseException | None, + exc_tb: TracebackType | None, + /, + ) -> None: ... + @overload + def cast(self, format: Literal["c", "@c"], shape: list[int] | tuple[int, ...] = ...) -> memoryview[bytes]: ... + @overload + def cast(self, format: Literal["f", "@f", "d", "@d"], shape: list[int] | tuple[int, ...] = ...) -> memoryview[float]: ... + @overload + def cast(self, format: Literal["?"], shape: list[int] | tuple[int, ...] = ...) -> memoryview[bool]: ... + @overload + def cast(self, format: _IntegerFormats, shape: list[int] | tuple[int, ...] = ...) -> memoryview: ... + @overload + def __getitem__(self, key: SupportsIndex | tuple[SupportsIndex, ...], /) -> _I: ... + @overload + def __getitem__(self, key: slice, /) -> memoryview[_I]: ... + def __contains__(self, x: object, /) -> bool: ... + def __iter__(self) -> Iterator[_I]: ... + def __len__(self) -> int: ... + def __eq__(self, value: object, /) -> bool: ... + def __hash__(self) -> int: ... + @overload + def __setitem__(self, key: slice, value: ReadableBuffer, /) -> None: ... + @overload + def __setitem__(self, key: SupportsIndex | tuple[SupportsIndex, ...], value: _I, /) -> None: ... + if sys.version_info >= (3, 10): + def tobytes(self, order: Literal["C", "F", "A"] | None = "C") -> bytes: ... + else: + def tobytes(self, order: Literal["C", "F", "A"] | None = None) -> bytes: ... + + def tolist(self) -> list[int]: ... + def toreadonly(self) -> memoryview: ... + def release(self) -> None: ... + def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = 1) -> str: ... + def __buffer__(self, flags: int, /) -> memoryview: ... + def __release_buffer__(self, buffer: memoryview, /) -> None: ... + + # These are inherited from the Sequence ABC, but don't actually exist on memoryview. + # See https://github.com/python/cpython/issues/125420 + index: ClassVar[None] # type: ignore[assignment] + count: ClassVar[None] # type: ignore[assignment] + if sys.version_info >= (3, 14): + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +@final +class bool(int): + def __new__(cls, o: object = False, /) -> Self: ... + # The following overloads could be represented more elegantly with a TypeVar("_B", bool, int), + # however mypy has a bug regarding TypeVar constraints (https://github.com/python/mypy/issues/11880). + @overload + def __and__(self, value: bool, /) -> bool: ... + @overload + def __and__(self, value: int, /) -> int: ... + @overload + def __or__(self, value: bool, /) -> bool: ... + @overload + def __or__(self, value: int, /) -> int: ... + @overload + def __xor__(self, value: bool, /) -> bool: ... + @overload + def __xor__(self, value: int, /) -> int: ... + @overload + def __rand__(self, value: bool, /) -> bool: ... + @overload + def __rand__(self, value: int, /) -> int: ... + @overload + def __ror__(self, value: bool, /) -> bool: ... + @overload + def __ror__(self, value: int, /) -> int: ... + @overload + def __rxor__(self, value: bool, /) -> bool: ... + @overload + def __rxor__(self, value: int, /) -> int: ... + def __getnewargs__(self) -> tuple[int]: ... + @deprecated("Will throw an error in Python 3.16. Use `not` for logical negation of bools instead.") + def __invert__(self) -> int: ... + +@final +class slice(Generic[_StartT_co, _StopT_co, _StepT_co]): + @property + def start(self) -> _StartT_co: ... + @property + def step(self) -> _StepT_co: ... + @property + def stop(self) -> _StopT_co: ... + # Note: __new__ overloads map `None` to `Any`, since users expect slice(x, None) + # to be compatible with slice(None, x). + # generic slice -------------------------------------------------------------------- + @overload + def __new__(cls, start: None, stop: None = None, step: None = None, /) -> slice[Any, Any, Any]: ... + # unary overloads ------------------------------------------------------------------ + @overload + def __new__(cls, stop: _T2, /) -> slice[Any, _T2, Any]: ... + # binary overloads ----------------------------------------------------------------- + @overload + def __new__(cls, start: _T1, stop: None, step: None = None, /) -> slice[_T1, Any, Any]: ... + @overload + def __new__(cls, start: None, stop: _T2, step: None = None, /) -> slice[Any, _T2, Any]: ... + @overload + def __new__(cls, start: _T1, stop: _T2, step: None = None, /) -> slice[_T1, _T2, Any]: ... + # ternary overloads ---------------------------------------------------------------- + @overload + def __new__(cls, start: None, stop: None, step: _T3, /) -> slice[Any, Any, _T3]: ... + @overload + def __new__(cls, start: _T1, stop: None, step: _T3, /) -> slice[_T1, Any, _T3]: ... + @overload + def __new__(cls, start: None, stop: _T2, step: _T3, /) -> slice[Any, _T2, _T3]: ... + @overload + def __new__(cls, start: _T1, stop: _T2, step: _T3, /) -> slice[_T1, _T2, _T3]: ... + def __eq__(self, value: object, /) -> bool: ... + if sys.version_info >= (3, 12): + def __hash__(self) -> int: ... + else: + __hash__: ClassVar[None] # type: ignore[assignment] + + def indices(self, len: SupportsIndex, /) -> tuple[int, int, int]: ... + +@disjoint_base +class tuple(Sequence[_T_co]): + def __new__(cls, iterable: Iterable[_T_co] = (), /) -> Self: ... + def __len__(self) -> int: ... + def __contains__(self, key: object, /) -> bool: ... + @overload + def __getitem__(self, key: SupportsIndex, /) -> _T_co: ... + @overload + def __getitem__(self, key: slice, /) -> tuple[_T_co, ...]: ... + def __iter__(self) -> Iterator[_T_co]: ... + def __lt__(self, value: tuple[_T_co, ...], /) -> bool: ... + def __le__(self, value: tuple[_T_co, ...], /) -> bool: ... + def __gt__(self, value: tuple[_T_co, ...], /) -> bool: ... + def __ge__(self, value: tuple[_T_co, ...], /) -> bool: ... + def __eq__(self, value: object, /) -> bool: ... + def __hash__(self) -> int: ... + @overload + def __add__(self, value: tuple[_T_co, ...], /) -> tuple[_T_co, ...]: ... + @overload + def __add__(self, value: tuple[_T, ...], /) -> tuple[_T_co | _T, ...]: ... + def __mul__(self, value: SupportsIndex, /) -> tuple[_T_co, ...]: ... + def __rmul__(self, value: SupportsIndex, /) -> tuple[_T_co, ...]: ... + def count(self, value: Any, /) -> int: ... + def index(self, value: Any, start: SupportsIndex = 0, stop: SupportsIndex = sys.maxsize, /) -> int: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +# Doesn't exist at runtime, but deleting this breaks mypy and pyright. See: +# https://github.com/python/typeshed/issues/7580 +# https://github.com/python/mypy/issues/8240 +# Obsolete, use types.FunctionType instead. +@final +@type_check_only +class function: + # Make sure this class definition stays roughly in line with `types.FunctionType` + @property + def __closure__(self) -> tuple[CellType, ...] | None: ... + __code__: CodeType + __defaults__: tuple[Any, ...] | None + __dict__: dict[str, Any] + @property + def __globals__(self) -> dict[str, Any]: ... + __name__: str + __qualname__: str + __annotations__: dict[str, AnnotationForm] + if sys.version_info >= (3, 14): + __annotate__: AnnotateFunc | None + __kwdefaults__: dict[str, Any] | None + if sys.version_info >= (3, 10): + @property + def __builtins__(self) -> dict[str, Any]: ... + if sys.version_info >= (3, 12): + __type_params__: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] + + __module__: str + if sys.version_info >= (3, 13): + def __new__( + cls, + code: CodeType, + globals: dict[str, Any], + name: str | None = None, + argdefs: tuple[object, ...] | None = None, + closure: tuple[CellType, ...] | None = None, + kwdefaults: dict[str, object] | None = None, + ) -> Self: ... + else: + def __new__( + cls, + code: CodeType, + globals: dict[str, Any], + name: str | None = None, + argdefs: tuple[object, ...] | None = None, + closure: tuple[CellType, ...] | None = None, + ) -> Self: ... + + # mypy uses `builtins.function.__get__` to represent methods, properties, and getset_descriptors so we type the return as Any. + def __get__(self, instance: object, owner: type | None = None, /) -> Any: ... + +@disjoint_base +class list(MutableSequence[_T]): + @overload + def __init__(self) -> None: ... + @overload + def __init__(self, iterable: Iterable[_T], /) -> None: ... + def copy(self) -> list[_T]: ... + def append(self, object: _T, /) -> None: ... + def extend(self, iterable: Iterable[_T], /) -> None: ... + def pop(self, index: SupportsIndex = -1, /) -> _T: ... + # Signature of `list.index` should be kept in line with `collections.UserList.index()` + # and multiprocessing.managers.ListProxy.index() + def index(self, value: _T, start: SupportsIndex = 0, stop: SupportsIndex = sys.maxsize, /) -> int: ... + def count(self, value: _T, /) -> int: ... + def insert(self, index: SupportsIndex, object: _T, /) -> None: ... + def remove(self, value: _T, /) -> None: ... + # Signature of `list.sort` should be kept inline with `collections.UserList.sort()` + # and multiprocessing.managers.ListProxy.sort() + # + # Use list[SupportsRichComparisonT] for the first overload rather than [SupportsRichComparison] + # to work around invariance + @overload + def sort(self: list[SupportsRichComparisonT], *, key: None = None, reverse: bool = False) -> None: ... + @overload + def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False) -> None: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[_T]: ... + __hash__: ClassVar[None] # type: ignore[assignment] + @overload + def __getitem__(self, i: SupportsIndex, /) -> _T: ... + @overload + def __getitem__(self, s: slice, /) -> list[_T]: ... + @overload + def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: ... + @overload + def __setitem__(self, key: slice, value: Iterable[_T], /) -> None: ... + def __delitem__(self, key: SupportsIndex | slice, /) -> None: ... + # Overloading looks unnecessary, but is needed to work around complex mypy problems + @overload + def __add__(self, value: list[_T], /) -> list[_T]: ... + @overload + def __add__(self, value: list[_S], /) -> list[_S | _T]: ... + def __iadd__(self, value: Iterable[_T], /) -> Self: ... # type: ignore[misc] + def __mul__(self, value: SupportsIndex, /) -> list[_T]: ... + def __rmul__(self, value: SupportsIndex, /) -> list[_T]: ... + def __imul__(self, value: SupportsIndex, /) -> Self: ... + def __contains__(self, key: object, /) -> bool: ... + def __reversed__(self) -> Iterator[_T]: ... + def __gt__(self, value: list[_T], /) -> bool: ... + def __ge__(self, value: list[_T], /) -> bool: ... + def __lt__(self, value: list[_T], /) -> bool: ... + def __le__(self, value: list[_T], /) -> bool: ... + def __eq__(self, value: object, /) -> bool: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +@disjoint_base +class dict(MutableMapping[_KT, _VT]): + # __init__ should be kept roughly in line with `collections.UserDict.__init__`, which has similar semantics + # Also multiprocessing.managers.SyncManager.dict() + @overload + def __init__(self) -> None: ... + @overload + def __init__(self: dict[str, _VT], **kwargs: _VT) -> None: ... # pyright: ignore[reportInvalidTypeVarUse] #11780 + @overload + def __init__(self, map: SupportsKeysAndGetItem[_KT, _VT], /) -> None: ... + @overload + def __init__( + self: dict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780 + map: SupportsKeysAndGetItem[str, _VT], + /, + **kwargs: _VT, + ) -> None: ... + @overload + def __init__(self, iterable: Iterable[tuple[_KT, _VT]], /) -> None: ... + @overload + def __init__( + self: dict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780 + iterable: Iterable[tuple[str, _VT]], + /, + **kwargs: _VT, + ) -> None: ... + # Next two overloads are for dict(string.split(sep) for string in iterable) + # Cannot be Iterable[Sequence[_T]] or otherwise dict(["foo", "bar", "baz"]) is not an error + @overload + def __init__(self: dict[str, str], iterable: Iterable[list[str]], /) -> None: ... + @overload + def __init__(self: dict[bytes, bytes], iterable: Iterable[list[bytes]], /) -> None: ... + def __new__(cls, *args: Any, **kwargs: Any) -> Self: ... + def copy(self) -> dict[_KT, _VT]: ... + def keys(self) -> dict_keys[_KT, _VT]: ... + def values(self) -> dict_values[_KT, _VT]: ... + def items(self) -> dict_items[_KT, _VT]: ... + # Signature of `dict.fromkeys` should be kept identical to + # `fromkeys` methods of `OrderedDict`/`ChainMap`/`UserDict` in `collections` + # TODO: the true signature of `dict.fromkeys` is not expressible in the current type system. + # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], value: None = None, /) -> dict[_T, Any | None]: ... + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], value: _S, /) -> dict[_T, _S]: ... + # Positional-only in dict, but not in MutableMapping + @overload # type: ignore[override] + def get(self, key: _KT, default: None = None, /) -> _VT | None: ... + @overload + def get(self, key: _KT, default: _VT, /) -> _VT: ... + @overload + def get(self, key: _KT, default: _T, /) -> _VT | _T: ... + @overload + def pop(self, key: _KT, /) -> _VT: ... + @overload + def pop(self, key: _KT, default: _VT, /) -> _VT: ... + @overload + def pop(self, key: _KT, default: _T, /) -> _VT | _T: ... + def __len__(self) -> int: ... + def __getitem__(self, key: _KT, /) -> _VT: ... + def __setitem__(self, key: _KT, value: _VT, /) -> None: ... + def __delitem__(self, key: _KT, /) -> None: ... + def __iter__(self) -> Iterator[_KT]: ... + def __eq__(self, value: object, /) -> bool: ... + def __reversed__(self) -> Iterator[_KT]: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + @overload + def __or__(self, value: dict[_KT, _VT], /) -> dict[_KT, _VT]: ... + @overload + def __or__(self, value: dict[_T1, _T2], /) -> dict[_KT | _T1, _VT | _T2]: ... + @overload + def __ror__(self, value: dict[_KT, _VT], /) -> dict[_KT, _VT]: ... + @overload + def __ror__(self, value: dict[_T1, _T2], /) -> dict[_KT | _T1, _VT | _T2]: ... + # dict.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self, value: SupportsKeysAndGetItem[_KT, _VT], /) -> Self: ... + @overload + def __ior__(self, value: Iterable[tuple[_KT, _VT]], /) -> Self: ... + +@disjoint_base +class set(MutableSet[_T]): + @overload + def __init__(self) -> None: ... + @overload + def __init__(self, iterable: Iterable[_T], /) -> None: ... + def add(self, element: _T, /) -> None: ... + def copy(self) -> set[_T]: ... + def difference(self, *s: Iterable[Any]) -> set[_T]: ... + def difference_update(self, *s: Iterable[Any]) -> None: ... + def discard(self, element: _T, /) -> None: ... + def intersection(self, *s: Iterable[Any]) -> set[_T]: ... + def intersection_update(self, *s: Iterable[Any]) -> None: ... + def isdisjoint(self, s: Iterable[Any], /) -> bool: ... + def issubset(self, s: Iterable[Any], /) -> bool: ... + def issuperset(self, s: Iterable[Any], /) -> bool: ... + def remove(self, element: _T, /) -> None: ... + def symmetric_difference(self, s: Iterable[_T], /) -> set[_T]: ... + def symmetric_difference_update(self, s: Iterable[_T], /) -> None: ... + def union(self, *s: Iterable[_S]) -> set[_T | _S]: ... + def update(self, *s: Iterable[_T]) -> None: ... + def __len__(self) -> int: ... + def __contains__(self, o: object, /) -> bool: ... + def __iter__(self) -> Iterator[_T]: ... + def __and__(self, value: AbstractSet[object], /) -> set[_T]: ... + def __iand__(self, value: AbstractSet[object], /) -> Self: ... + def __or__(self, value: AbstractSet[_S], /) -> set[_T | _S]: ... + def __ior__(self, value: AbstractSet[_T], /) -> Self: ... # type: ignore[override,misc] + def __sub__(self, value: AbstractSet[_T | None], /) -> set[_T]: ... + def __isub__(self, value: AbstractSet[object], /) -> Self: ... + def __xor__(self, value: AbstractSet[_S], /) -> set[_T | _S]: ... + def __ixor__(self, value: AbstractSet[_T], /) -> Self: ... # type: ignore[override,misc] + def __le__(self, value: AbstractSet[object], /) -> bool: ... + def __lt__(self, value: AbstractSet[object], /) -> bool: ... + def __ge__(self, value: AbstractSet[object], /) -> bool: ... + def __gt__(self, value: AbstractSet[object], /) -> bool: ... + def __eq__(self, value: object, /) -> bool: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +@disjoint_base +class frozenset(AbstractSet[_T_co]): + @overload + def __new__(cls) -> Self: ... + @overload + def __new__(cls, iterable: Iterable[_T_co], /) -> Self: ... + def copy(self) -> frozenset[_T_co]: ... + def difference(self, *s: Iterable[object]) -> frozenset[_T_co]: ... + def intersection(self, *s: Iterable[object]) -> frozenset[_T_co]: ... + def isdisjoint(self, s: Iterable[_T_co], /) -> bool: ... + def issubset(self, s: Iterable[object], /) -> bool: ... + def issuperset(self, s: Iterable[object], /) -> bool: ... + def symmetric_difference(self, s: Iterable[_T_co], /) -> frozenset[_T_co]: ... + def union(self, *s: Iterable[_S]) -> frozenset[_T_co | _S]: ... + def __len__(self) -> int: ... + def __contains__(self, o: object, /) -> bool: ... + def __iter__(self) -> Iterator[_T_co]: ... + def __and__(self, value: AbstractSet[_T_co], /) -> frozenset[_T_co]: ... + def __or__(self, value: AbstractSet[_S], /) -> frozenset[_T_co | _S]: ... + def __sub__(self, value: AbstractSet[_T_co], /) -> frozenset[_T_co]: ... + def __xor__(self, value: AbstractSet[_S], /) -> frozenset[_T_co | _S]: ... + def __le__(self, value: AbstractSet[object], /) -> bool: ... + def __lt__(self, value: AbstractSet[object], /) -> bool: ... + def __ge__(self, value: AbstractSet[object], /) -> bool: ... + def __gt__(self, value: AbstractSet[object], /) -> bool: ... + def __eq__(self, value: object, /) -> bool: ... + def __hash__(self) -> int: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +@disjoint_base +class enumerate(Iterator[tuple[int, _T]]): + def __new__(cls, iterable: Iterable[_T], start: int = 0) -> Self: ... + def __iter__(self) -> Self: ... + def __next__(self) -> tuple[int, _T]: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +@final +class range(Sequence[int]): + @property + def start(self) -> int: ... + @property + def stop(self) -> int: ... + @property + def step(self) -> int: ... + @overload + def __new__(cls, stop: SupportsIndex, /) -> Self: ... + @overload + def __new__(cls, start: SupportsIndex, stop: SupportsIndex, step: SupportsIndex = 1, /) -> Self: ... + def count(self, value: int, /) -> int: ... + def index(self, value: int, /) -> int: ... # type: ignore[override] + def __len__(self) -> int: ... + def __eq__(self, value: object, /) -> bool: ... + def __hash__(self) -> int: ... + def __contains__(self, key: object, /) -> bool: ... + def __iter__(self) -> Iterator[int]: ... + @overload + def __getitem__(self, key: SupportsIndex, /) -> int: ... + @overload + def __getitem__(self, key: slice, /) -> range: ... + def __reversed__(self) -> Iterator[int]: ... + +@disjoint_base +class property: + fget: Callable[[Any], Any] | None + fset: Callable[[Any, Any], None] | None + fdel: Callable[[Any], None] | None + __isabstractmethod__: bool + if sys.version_info >= (3, 13): + __name__: str + + def __init__( + self, + fget: Callable[[Any], Any] | None = None, + fset: Callable[[Any, Any], None] | None = None, + fdel: Callable[[Any], None] | None = None, + doc: str | None = None, + ) -> None: ... + def getter(self, fget: Callable[[Any], Any], /) -> property: ... + def setter(self, fset: Callable[[Any, Any], None], /) -> property: ... + def deleter(self, fdel: Callable[[Any], None], /) -> property: ... + @overload + def __get__(self, instance: None, owner: type, /) -> Self: ... + @overload + def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ... + def __set__(self, instance: Any, value: Any, /) -> None: ... + def __delete__(self, instance: Any, /) -> None: ... + +def abs(x: SupportsAbs[_T], /) -> _T: ... +def all(iterable: Iterable[object], /) -> bool: ... +def any(iterable: Iterable[object], /) -> bool: ... +def ascii(obj: object, /) -> str: ... +def bin(number: int | SupportsIndex, /) -> str: ... +def breakpoint(*args: Any, **kws: Any) -> None: ... +def callable(obj: object, /) -> TypeIs[Callable[..., object]]: ... +def chr(i: int | SupportsIndex, /) -> str: ... + +if sys.version_info >= (3, 10): + def aiter(async_iterable: SupportsAiter[_SupportsAnextT_co], /) -> _SupportsAnextT_co: ... + @type_check_only + class _SupportsSynchronousAnext(Protocol[_AwaitableT_co]): + def __anext__(self) -> _AwaitableT_co: ... + + @overload + # `anext` is not, in fact, an async function. When default is not provided + # `anext` is just a passthrough for `obj.__anext__` + # See discussion in #7491 and pure-Python implementation of `anext` at https://github.com/python/cpython/blob/ea786a882b9ed4261eafabad6011bc7ef3b5bf94/Lib/test/test_asyncgen.py#L52-L80 + def anext(i: _SupportsSynchronousAnext[_AwaitableT], /) -> _AwaitableT: ... + @overload + async def anext(i: SupportsAnext[_T], default: _VT, /) -> _T | _VT: ... + +# compile() returns a CodeType, unless the flags argument includes PyCF_ONLY_AST (=1024), +# in which case it returns ast.AST. We have overloads for flag 0 (the default) and for +# explicitly passing PyCF_ONLY_AST. We fall back to Any for other values of flags. +@overload +def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | bytes | PathLike[Any], + mode: str, + flags: Literal[0], + dont_inherit: bool = False, + optimize: int = -1, + *, + _feature_version: int = -1, +) -> CodeType: ... +@overload +def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | bytes | PathLike[Any], + mode: str, + *, + dont_inherit: bool = False, + optimize: int = -1, + _feature_version: int = -1, +) -> CodeType: ... +@overload +def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | bytes | PathLike[Any], + mode: str, + flags: Literal[1024], + dont_inherit: bool = False, + optimize: int = -1, + *, + _feature_version: int = -1, +) -> _ast.AST: ... +@overload +def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | bytes | PathLike[Any], + mode: str, + flags: int, + dont_inherit: bool = False, + optimize: int = -1, + *, + _feature_version: int = -1, +) -> Any: ... + +copyright: _sitebuiltins._Printer +credits: _sitebuiltins._Printer + +def delattr(obj: object, name: str, /) -> None: ... +def dir(o: object = ..., /) -> list[str]: ... +@overload +def divmod(x: SupportsDivMod[_T_contra, _T_co], y: _T_contra, /) -> _T_co: ... +@overload +def divmod(x: _T_contra, y: SupportsRDivMod[_T_contra, _T_co], /) -> _T_co: ... + +# The `globals` argument to `eval` has to be `dict[str, Any]` rather than `dict[str, object]` due to invariance. +# (The `globals` argument has to be a "real dict", rather than any old mapping, unlike the `locals` argument.) +if sys.version_info >= (3, 13): + def eval( + source: str | ReadableBuffer | CodeType, + /, + globals: dict[str, Any] | None = None, + locals: Mapping[str, object] | None = None, + ) -> Any: ... + +else: + def eval( + source: str | ReadableBuffer | CodeType, + globals: dict[str, Any] | None = None, + locals: Mapping[str, object] | None = None, + /, + ) -> Any: ... + +# Comment above regarding `eval` applies to `exec` as well +if sys.version_info >= (3, 13): + def exec( + source: str | ReadableBuffer | CodeType, + /, + globals: dict[str, Any] | None = None, + locals: Mapping[str, object] | None = None, + *, + closure: tuple[CellType, ...] | None = None, + ) -> None: ... + +elif sys.version_info >= (3, 11): + def exec( + source: str | ReadableBuffer | CodeType, + globals: dict[str, Any] | None = None, + locals: Mapping[str, object] | None = None, + /, + *, + closure: tuple[CellType, ...] | None = None, + ) -> None: ... + +else: + def exec( + source: str | ReadableBuffer | CodeType, + globals: dict[str, Any] | None = None, + locals: Mapping[str, object] | None = None, + /, + ) -> None: ... + +exit: _sitebuiltins.Quitter + +@disjoint_base +class filter(Iterator[_T]): + @overload + def __new__(cls, function: None, iterable: Iterable[_T | None], /) -> Self: ... + @overload + def __new__(cls, function: Callable[[_S], TypeGuard[_T]], iterable: Iterable[_S], /) -> Self: ... + @overload + def __new__(cls, function: Callable[[_S], TypeIs[_T]], iterable: Iterable[_S], /) -> Self: ... + @overload + def __new__(cls, function: Callable[[_T], Any], iterable: Iterable[_T], /) -> Self: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... + +def format(value: object, format_spec: str = "", /) -> str: ... +@overload +def getattr(o: object, name: str, /) -> Any: ... + +# While technically covered by the last overload, spelling out the types for None, bool +# and basic containers help mypy out in some tricky situations involving type context +# (aka bidirectional inference) +@overload +def getattr(o: object, name: str, default: None, /) -> Any | None: ... +@overload +def getattr(o: object, name: str, default: bool, /) -> Any | bool: ... +@overload +def getattr(o: object, name: str, default: list[Any], /) -> Any | list[Any]: ... +@overload +def getattr(o: object, name: str, default: dict[Any, Any], /) -> Any | dict[Any, Any]: ... +@overload +def getattr(o: object, name: str, default: _T, /) -> Any | _T: ... +def globals() -> dict[str, Any]: ... +def hasattr(obj: object, name: str, /) -> bool: ... +def hash(obj: object, /) -> int: ... + +help: _sitebuiltins._Helper + +def hex(number: int | SupportsIndex, /) -> str: ... +def id(obj: object, /) -> int: ... +def input(prompt: object = "", /) -> str: ... +@type_check_only +class _GetItemIterable(Protocol[_T_co]): + def __getitem__(self, i: int, /) -> _T_co: ... + +@overload +def iter(object: SupportsIter[_SupportsNextT_co], /) -> _SupportsNextT_co: ... +@overload +def iter(object: _GetItemIterable[_T], /) -> Iterator[_T]: ... +@overload +def iter(object: Callable[[], _T | None], sentinel: None, /) -> Iterator[_T]: ... +@overload +def iter(object: Callable[[], _T], sentinel: object, /) -> Iterator[_T]: ... + +if sys.version_info >= (3, 10): + _ClassInfo: TypeAlias = type | types.UnionType | tuple[_ClassInfo, ...] +else: + _ClassInfo: TypeAlias = type | tuple[_ClassInfo, ...] + +def isinstance(obj: object, class_or_tuple: _ClassInfo, /) -> bool: ... +def issubclass(cls: type, class_or_tuple: _ClassInfo, /) -> bool: ... +def len(obj: Sized, /) -> int: ... + +license: _sitebuiltins._Printer + +def locals() -> dict[str, Any]: ... +@disjoint_base +class map(Iterator[_S]): + # 3.14 adds `strict` argument. + if sys.version_info >= (3, 14): + @overload + def __new__(cls, func: Callable[[_T1], _S], iterable: Iterable[_T1], /, *, strict: bool = False) -> Self: ... + @overload + def __new__( + cls, func: Callable[[_T1, _T2], _S], iterable: Iterable[_T1], iter2: Iterable[_T2], /, *, strict: bool = False + ) -> Self: ... + @overload + def __new__( + cls, + func: Callable[[_T1, _T2, _T3], _S], + iterable: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + /, + *, + strict: bool = False, + ) -> Self: ... + @overload + def __new__( + cls, + func: Callable[[_T1, _T2, _T3, _T4], _S], + iterable: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + iter4: Iterable[_T4], + /, + *, + strict: bool = False, + ) -> Self: ... + @overload + def __new__( + cls, + func: Callable[[_T1, _T2, _T3, _T4, _T5], _S], + iterable: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + iter4: Iterable[_T4], + iter5: Iterable[_T5], + /, + *, + strict: bool = False, + ) -> Self: ... + @overload + def __new__( + cls, + func: Callable[..., _S], + iterable: Iterable[Any], + iter2: Iterable[Any], + iter3: Iterable[Any], + iter4: Iterable[Any], + iter5: Iterable[Any], + iter6: Iterable[Any], + /, + *iterables: Iterable[Any], + strict: bool = False, + ) -> Self: ... + else: + @overload + def __new__(cls, func: Callable[[_T1], _S], iterable: Iterable[_T1], /) -> Self: ... + @overload + def __new__(cls, func: Callable[[_T1, _T2], _S], iterable: Iterable[_T1], iter2: Iterable[_T2], /) -> Self: ... + @overload + def __new__( + cls, func: Callable[[_T1, _T2, _T3], _S], iterable: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], / + ) -> Self: ... + @overload + def __new__( + cls, + func: Callable[[_T1, _T2, _T3, _T4], _S], + iterable: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + iter4: Iterable[_T4], + /, + ) -> Self: ... + @overload + def __new__( + cls, + func: Callable[[_T1, _T2, _T3, _T4, _T5], _S], + iterable: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + iter4: Iterable[_T4], + iter5: Iterable[_T5], + /, + ) -> Self: ... + @overload + def __new__( + cls, + func: Callable[..., _S], + iterable: Iterable[Any], + iter2: Iterable[Any], + iter3: Iterable[Any], + iter4: Iterable[Any], + iter5: Iterable[Any], + iter6: Iterable[Any], + /, + *iterables: Iterable[Any], + ) -> Self: ... + + def __iter__(self) -> Self: ... + def __next__(self) -> _S: ... + +@overload +def max( + arg1: SupportsRichComparisonT, arg2: SupportsRichComparisonT, /, *_args: SupportsRichComparisonT, key: None = None +) -> SupportsRichComparisonT: ... +@overload +def max(arg1: _T, arg2: _T, /, *_args: _T, key: Callable[[_T], SupportsRichComparison]) -> _T: ... +@overload +def max(iterable: Iterable[SupportsRichComparisonT], /, *, key: None = None) -> SupportsRichComparisonT: ... +@overload +def max(iterable: Iterable[_T], /, *, key: Callable[[_T], SupportsRichComparison]) -> _T: ... +@overload +def max(iterable: Iterable[SupportsRichComparisonT], /, *, key: None = None, default: _T) -> SupportsRichComparisonT | _T: ... +@overload +def max(iterable: Iterable[_T1], /, *, key: Callable[[_T1], SupportsRichComparison], default: _T2) -> _T1 | _T2: ... +@overload +def min( + arg1: SupportsRichComparisonT, arg2: SupportsRichComparisonT, /, *_args: SupportsRichComparisonT, key: None = None +) -> SupportsRichComparisonT: ... +@overload +def min(arg1: _T, arg2: _T, /, *_args: _T, key: Callable[[_T], SupportsRichComparison]) -> _T: ... +@overload +def min(iterable: Iterable[SupportsRichComparisonT], /, *, key: None = None) -> SupportsRichComparisonT: ... +@overload +def min(iterable: Iterable[_T], /, *, key: Callable[[_T], SupportsRichComparison]) -> _T: ... +@overload +def min(iterable: Iterable[SupportsRichComparisonT], /, *, key: None = None, default: _T) -> SupportsRichComparisonT | _T: ... +@overload +def min(iterable: Iterable[_T1], /, *, key: Callable[[_T1], SupportsRichComparison], default: _T2) -> _T1 | _T2: ... +@overload +def next(i: SupportsNext[_T], /) -> _T: ... +@overload +def next(i: SupportsNext[_T], default: _VT, /) -> _T | _VT: ... +def oct(number: int | SupportsIndex, /) -> str: ... + +_Opener: TypeAlias = Callable[[str, int], int] + +# Text mode: always returns a TextIOWrapper +@overload +def open( + file: FileDescriptorOrPath, + mode: OpenTextMode = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> TextIOWrapper: ... + +# Unbuffered binary mode: returns a FileIO +@overload +def open( + file: FileDescriptorOrPath, + mode: OpenBinaryMode, + buffering: Literal[0], + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> FileIO: ... + +# Buffering is on: return BufferedRandom, BufferedReader, or BufferedWriter +@overload +def open( + file: FileDescriptorOrPath, + mode: OpenBinaryModeUpdating, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> BufferedRandom: ... +@overload +def open( + file: FileDescriptorOrPath, + mode: OpenBinaryModeWriting, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> BufferedWriter: ... +@overload +def open( + file: FileDescriptorOrPath, + mode: OpenBinaryModeReading, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> BufferedReader: ... + +# Buffering cannot be determined: fall back to BinaryIO +@overload +def open( + file: FileDescriptorOrPath, + mode: OpenBinaryMode, + buffering: int = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> BinaryIO: ... + +# Fallback if mode is not specified +@overload +def open( + file: FileDescriptorOrPath, + mode: str, + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> IO[Any]: ... +def ord(c: str | bytes | bytearray, /) -> int: ... +@type_check_only +class _SupportsWriteAndFlush(SupportsWrite[_T_contra], SupportsFlush, Protocol[_T_contra]): ... + +@overload +def print( + *values: object, + sep: str | None = " ", + end: str | None = "\n", + file: SupportsWrite[str] | None = None, + flush: Literal[False] = False, +) -> None: ... +@overload +def print( + *values: object, sep: str | None = " ", end: str | None = "\n", file: _SupportsWriteAndFlush[str] | None = None, flush: bool +) -> None: ... + +_E_contra = TypeVar("_E_contra", contravariant=True) +_M_contra = TypeVar("_M_contra", contravariant=True) + +@type_check_only +class _SupportsPow2(Protocol[_E_contra, _T_co]): + def __pow__(self, other: _E_contra, /) -> _T_co: ... + +@type_check_only +class _SupportsPow3NoneOnly(Protocol[_E_contra, _T_co]): + def __pow__(self, other: _E_contra, modulo: None = None, /) -> _T_co: ... + +@type_check_only +class _SupportsPow3(Protocol[_E_contra, _M_contra, _T_co]): + def __pow__(self, other: _E_contra, modulo: _M_contra, /) -> _T_co: ... + +_SupportsSomeKindOfPow = ( # noqa: Y026 # TODO: Use TypeAlias once mypy bugs are fixed + _SupportsPow2[Any, Any] | _SupportsPow3NoneOnly[Any, Any] | _SupportsPow3[Any, Any, Any] +) + +# TODO: `pow(int, int, Literal[0])` fails at runtime, +# but adding a `NoReturn` overload isn't a good solution for expressing that (see #8566). +@overload +def pow(base: int, exp: int, mod: int) -> int: ... +@overload +def pow(base: int, exp: Literal[0], mod: None = None) -> Literal[1]: ... +@overload +def pow(base: int, exp: _PositiveInteger, mod: None = None) -> int: ... +@overload +def pow(base: int, exp: _NegativeInteger, mod: None = None) -> float: ... + +# int base & positive-int exp -> int; int base & negative-int exp -> float +# return type must be Any as `int | float` causes too many false-positive errors +@overload +def pow(base: int, exp: int, mod: None = None) -> Any: ... +@overload +def pow(base: _PositiveInteger, exp: float, mod: None = None) -> float: ... +@overload +def pow(base: _NegativeInteger, exp: float, mod: None = None) -> complex: ... +@overload +def pow(base: float, exp: int, mod: None = None) -> float: ... + +# float base & float exp could return float or complex +# return type must be Any (same as complex base, complex exp), +# as `float | complex` causes too many false-positive errors +@overload +def pow(base: float, exp: complex | _SupportsSomeKindOfPow, mod: None = None) -> Any: ... +@overload +def pow(base: complex, exp: complex | _SupportsSomeKindOfPow, mod: None = None) -> complex: ... +@overload +def pow(base: _SupportsPow2[_E_contra, _T_co], exp: _E_contra, mod: None = None) -> _T_co: ... # type: ignore[overload-overlap] +@overload +def pow(base: _SupportsPow3NoneOnly[_E_contra, _T_co], exp: _E_contra, mod: None = None) -> _T_co: ... # type: ignore[overload-overlap] +@overload +def pow(base: _SupportsPow3[_E_contra, _M_contra, _T_co], exp: _E_contra, mod: _M_contra) -> _T_co: ... +@overload +def pow(base: _SupportsSomeKindOfPow, exp: float, mod: None = None) -> Any: ... +@overload +def pow(base: _SupportsSomeKindOfPow, exp: complex, mod: None = None) -> complex: ... + +quit: _sitebuiltins.Quitter + +@disjoint_base +class reversed(Iterator[_T]): + @overload + def __new__(cls, sequence: Reversible[_T], /) -> Iterator[_T]: ... # type: ignore[misc] + @overload + def __new__(cls, sequence: SupportsLenAndGetItem[_T], /) -> Iterator[_T]: ... # type: ignore[misc] + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... + def __length_hint__(self) -> int: ... + +def repr(obj: object, /) -> str: ... + +# See https://github.com/python/typeshed/pull/9141 +# and https://github.com/python/typeshed/pull/9151 +# on why we don't use `SupportsRound` from `typing.pyi` + +@type_check_only +class _SupportsRound1(Protocol[_T_co]): + def __round__(self) -> _T_co: ... + +@type_check_only +class _SupportsRound2(Protocol[_T_co]): + def __round__(self, ndigits: int, /) -> _T_co: ... + +@overload +def round(number: _SupportsRound1[_T], ndigits: None = None) -> _T: ... +@overload +def round(number: _SupportsRound2[_T], ndigits: SupportsIndex) -> _T: ... + +# See https://github.com/python/typeshed/pull/6292#discussion_r748875189 +# for why arg 3 of `setattr` should be annotated with `Any` and not `object` +def setattr(obj: object, name: str, value: Any, /) -> None: ... +@overload +def sorted( + iterable: Iterable[SupportsRichComparisonT], /, *, key: None = None, reverse: bool = False +) -> list[SupportsRichComparisonT]: ... +@overload +def sorted(iterable: Iterable[_T], /, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False) -> list[_T]: ... + +_AddableT1 = TypeVar("_AddableT1", bound=SupportsAdd[Any, Any]) +_AddableT2 = TypeVar("_AddableT2", bound=SupportsAdd[Any, Any]) + +@type_check_only +class _SupportsSumWithNoDefaultGiven(SupportsAdd[Any, Any], SupportsRAdd[int, Any], Protocol): ... + +_SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWithNoDefaultGiven) + +# In general, the return type of `x + x` is *not* guaranteed to be the same type as x. +# However, we can't express that in the stub for `sum()` +# without creating many false-positive errors (see #7578). +# Instead, we special-case the most common examples of this: bool and literal integers. +@overload +def sum(iterable: Iterable[bool], /, start: int = 0) -> int: ... +@overload +def sum(iterable: Iterable[_SupportsSumNoDefaultT], /) -> _SupportsSumNoDefaultT | Literal[0]: ... +@overload +def sum(iterable: Iterable[_AddableT1], /, start: _AddableT2) -> _AddableT1 | _AddableT2: ... + +# The argument to `vars()` has to have a `__dict__` attribute, so the second overload can't be annotated with `object` +# (A "SupportsDunderDict" protocol doesn't work) +@overload +def vars(object: type, /) -> types.MappingProxyType[str, Any]: ... +@overload +def vars(object: Any = ..., /) -> dict[str, Any]: ... +@disjoint_base +class zip(Iterator[_T_co]): + if sys.version_info >= (3, 10): + @overload + def __new__(cls, *, strict: bool = False) -> zip[Any]: ... + @overload + def __new__(cls, iter1: Iterable[_T1], /, *, strict: bool = False) -> zip[tuple[_T1]]: ... + @overload + def __new__(cls, iter1: Iterable[_T1], iter2: Iterable[_T2], /, *, strict: bool = False) -> zip[tuple[_T1, _T2]]: ... + @overload + def __new__( + cls, iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], /, *, strict: bool = False + ) -> zip[tuple[_T1, _T2, _T3]]: ... + @overload + def __new__( + cls, + iter1: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + iter4: Iterable[_T4], + /, + *, + strict: bool = False, + ) -> zip[tuple[_T1, _T2, _T3, _T4]]: ... + @overload + def __new__( + cls, + iter1: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + iter4: Iterable[_T4], + iter5: Iterable[_T5], + /, + *, + strict: bool = False, + ) -> zip[tuple[_T1, _T2, _T3, _T4, _T5]]: ... + @overload + def __new__( + cls, + iter1: Iterable[Any], + iter2: Iterable[Any], + iter3: Iterable[Any], + iter4: Iterable[Any], + iter5: Iterable[Any], + iter6: Iterable[Any], + /, + *iterables: Iterable[Any], + strict: bool = False, + ) -> zip[tuple[Any, ...]]: ... + else: + @overload + def __new__(cls) -> zip[Any]: ... + @overload + def __new__(cls, iter1: Iterable[_T1], /) -> zip[tuple[_T1]]: ... + @overload + def __new__(cls, iter1: Iterable[_T1], iter2: Iterable[_T2], /) -> zip[tuple[_T1, _T2]]: ... + @overload + def __new__(cls, iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], /) -> zip[tuple[_T1, _T2, _T3]]: ... + @overload + def __new__( + cls, iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], / + ) -> zip[tuple[_T1, _T2, _T3, _T4]]: ... + @overload + def __new__( + cls, iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], iter5: Iterable[_T5], / + ) -> zip[tuple[_T1, _T2, _T3, _T4, _T5]]: ... + @overload + def __new__( + cls, + iter1: Iterable[Any], + iter2: Iterable[Any], + iter3: Iterable[Any], + iter4: Iterable[Any], + iter5: Iterable[Any], + iter6: Iterable[Any], + /, + *iterables: Iterable[Any], + ) -> zip[tuple[Any, ...]]: ... + + def __iter__(self) -> Self: ... + def __next__(self) -> _T_co: ... + +# Signature of `builtins.__import__` should be kept identical to `importlib.__import__` +# Return type of `__import__` should be kept the same as return type of `importlib.import_module` +def __import__( + name: str, + globals: Mapping[str, object] | None = None, + locals: Mapping[str, object] | None = None, + fromlist: Sequence[str] | None = (), + level: int = 0, +) -> types.ModuleType: ... +def __build_class__(func: Callable[[], CellType | Any], name: str, /, *bases: Any, metaclass: Any = ..., **kwds: Any) -> Any: ... + +if sys.version_info >= (3, 10): + from types import EllipsisType, NotImplementedType + + # Backwards compatibility hack for folks who relied on the ellipsis type + # existing in typeshed in Python 3.9 and earlier. + ellipsis = EllipsisType + + Ellipsis: EllipsisType + NotImplemented: NotImplementedType +else: + # Actually the type of Ellipsis is , but since it's + # not exposed anywhere under that name, we make it private here. + @final + @type_check_only + class ellipsis: ... + + Ellipsis: ellipsis + + @final + @type_check_only + class _NotImplementedType(Any): ... + + NotImplemented: _NotImplementedType + +@disjoint_base +class BaseException: + args: tuple[Any, ...] + __cause__: BaseException | None + __context__: BaseException | None + __suppress_context__: bool + __traceback__: TracebackType | None + def __init__(self, *args: object) -> None: ... + def __new__(cls, *args: Any, **kwds: Any) -> Self: ... + def __setstate__(self, state: dict[str, Any] | None, /) -> None: ... + def with_traceback(self, tb: TracebackType | None, /) -> Self: ... + # Necessary for security-focused static analyzers (e.g, pysa) + # See https://github.com/python/typeshed/pull/14900 + def __str__(self) -> str: ... # noqa: Y029 + def __repr__(self) -> str: ... # noqa: Y029 + if sys.version_info >= (3, 11): + # only present after add_note() is called + __notes__: list[str] + def add_note(self, note: str, /) -> None: ... + +class GeneratorExit(BaseException): ... +class KeyboardInterrupt(BaseException): ... + +@disjoint_base +class SystemExit(BaseException): + code: sys._ExitCode + +class Exception(BaseException): ... + +@disjoint_base +class StopIteration(Exception): + value: Any + +@disjoint_base +class OSError(Exception): + errno: int | None + strerror: str | None + # filename, filename2 are actually str | bytes | None + filename: Any + filename2: Any + if sys.platform == "win32": + winerror: int + +EnvironmentError = OSError +IOError = OSError +if sys.platform == "win32": + WindowsError = OSError + +class ArithmeticError(Exception): ... +class AssertionError(Exception): ... + +if sys.version_info >= (3, 10): + @disjoint_base + class AttributeError(Exception): + def __init__(self, *args: object, name: str | None = None, obj: object = None) -> None: ... + name: str | None + obj: object + +else: + class AttributeError(Exception): ... + +class BufferError(Exception): ... +class EOFError(Exception): ... + +@disjoint_base +class ImportError(Exception): + def __init__(self, *args: object, name: str | None = None, path: str | None = None) -> None: ... + name: str | None + path: str | None + msg: str # undocumented + if sys.version_info >= (3, 12): + name_from: str | None # undocumented + +class LookupError(Exception): ... +class MemoryError(Exception): ... + +if sys.version_info >= (3, 10): + @disjoint_base + class NameError(Exception): + def __init__(self, *args: object, name: str | None = None) -> None: ... + name: str | None + +else: + class NameError(Exception): ... + +class ReferenceError(Exception): ... +class RuntimeError(Exception): ... +class StopAsyncIteration(Exception): ... + +@disjoint_base +class SyntaxError(Exception): + msg: str + filename: str | None + lineno: int | None + offset: int | None + text: str | None + # Errors are displayed differently if this attribute exists on the exception. + # The value is always None. + print_file_and_line: None + if sys.version_info >= (3, 10): + end_lineno: int | None + end_offset: int | None + + @overload + def __init__(self) -> None: ... + @overload + def __init__(self, msg: object, /) -> None: ... + # Second argument is the tuple (filename, lineno, offset, text) + @overload + def __init__(self, msg: str, info: tuple[str | None, int | None, int | None, str | None], /) -> None: ... + if sys.version_info >= (3, 10): + # end_lineno and end_offset must both be provided if one is. + @overload + def __init__( + self, msg: str, info: tuple[str | None, int | None, int | None, str | None, int | None, int | None], / + ) -> None: ... + # If you provide more than two arguments, it still creates the SyntaxError, but + # the arguments from the info tuple are not parsed. This form is omitted. + +class SystemError(Exception): ... +class TypeError(Exception): ... +class ValueError(Exception): ... +class FloatingPointError(ArithmeticError): ... +class OverflowError(ArithmeticError): ... +class ZeroDivisionError(ArithmeticError): ... +class ModuleNotFoundError(ImportError): ... +class IndexError(LookupError): ... +class KeyError(LookupError): ... +class UnboundLocalError(NameError): ... + +class BlockingIOError(OSError): + characters_written: int + +class ChildProcessError(OSError): ... +class ConnectionError(OSError): ... +class BrokenPipeError(ConnectionError): ... +class ConnectionAbortedError(ConnectionError): ... +class ConnectionRefusedError(ConnectionError): ... +class ConnectionResetError(ConnectionError): ... +class FileExistsError(OSError): ... +class FileNotFoundError(OSError): ... +class InterruptedError(OSError): ... +class IsADirectoryError(OSError): ... +class NotADirectoryError(OSError): ... +class PermissionError(OSError): ... +class ProcessLookupError(OSError): ... +class TimeoutError(OSError): ... +class NotImplementedError(RuntimeError): ... +class RecursionError(RuntimeError): ... +class IndentationError(SyntaxError): ... +class TabError(IndentationError): ... +class UnicodeError(ValueError): ... + +@disjoint_base +class UnicodeDecodeError(UnicodeError): + encoding: str + object: bytes + start: int + end: int + reason: str + def __init__(self, encoding: str, object: ReadableBuffer, start: int, end: int, reason: str, /) -> None: ... + +@disjoint_base +class UnicodeEncodeError(UnicodeError): + encoding: str + object: str + start: int + end: int + reason: str + def __init__(self, encoding: str, object: str, start: int, end: int, reason: str, /) -> None: ... + +@disjoint_base +class UnicodeTranslateError(UnicodeError): + encoding: None + object: str + start: int + end: int + reason: str + def __init__(self, object: str, start: int, end: int, reason: str, /) -> None: ... + +class Warning(Exception): ... +class UserWarning(Warning): ... +class DeprecationWarning(Warning): ... +class SyntaxWarning(Warning): ... +class RuntimeWarning(Warning): ... +class FutureWarning(Warning): ... +class PendingDeprecationWarning(Warning): ... +class ImportWarning(Warning): ... +class UnicodeWarning(Warning): ... +class BytesWarning(Warning): ... +class ResourceWarning(Warning): ... + +if sys.version_info >= (3, 10): + class EncodingWarning(Warning): ... + +if sys.version_info >= (3, 11): + _BaseExceptionT_co = TypeVar("_BaseExceptionT_co", bound=BaseException, covariant=True, default=BaseException) + _BaseExceptionT = TypeVar("_BaseExceptionT", bound=BaseException) + _ExceptionT_co = TypeVar("_ExceptionT_co", bound=Exception, covariant=True, default=Exception) + _ExceptionT = TypeVar("_ExceptionT", bound=Exception) + + # See `check_exception_group.py` for use-cases and comments. + @disjoint_base + class BaseExceptionGroup(BaseException, Generic[_BaseExceptionT_co]): + def __new__(cls, message: str, exceptions: Sequence[_BaseExceptionT_co], /) -> Self: ... + def __init__(self, message: str, exceptions: Sequence[_BaseExceptionT_co], /) -> None: ... + @property + def message(self) -> str: ... + @property + def exceptions(self) -> tuple[_BaseExceptionT_co | BaseExceptionGroup[_BaseExceptionT_co], ...]: ... + @overload + def subgroup( + self, matcher_value: type[_ExceptionT] | tuple[type[_ExceptionT], ...], / + ) -> ExceptionGroup[_ExceptionT] | None: ... + @overload + def subgroup( + self, matcher_value: type[_BaseExceptionT] | tuple[type[_BaseExceptionT], ...], / + ) -> BaseExceptionGroup[_BaseExceptionT] | None: ... + @overload + def subgroup( + self, matcher_value: Callable[[_BaseExceptionT_co | Self], bool], / + ) -> BaseExceptionGroup[_BaseExceptionT_co] | None: ... + @overload + def split( + self, matcher_value: type[_ExceptionT] | tuple[type[_ExceptionT], ...], / + ) -> tuple[ExceptionGroup[_ExceptionT] | None, BaseExceptionGroup[_BaseExceptionT_co] | None]: ... + @overload + def split( + self, matcher_value: type[_BaseExceptionT] | tuple[type[_BaseExceptionT], ...], / + ) -> tuple[BaseExceptionGroup[_BaseExceptionT] | None, BaseExceptionGroup[_BaseExceptionT_co] | None]: ... + @overload + def split( + self, matcher_value: Callable[[_BaseExceptionT_co | Self], bool], / + ) -> tuple[BaseExceptionGroup[_BaseExceptionT_co] | None, BaseExceptionGroup[_BaseExceptionT_co] | None]: ... + # In reality it is `NonEmptySequence`: + @overload + def derive(self, excs: Sequence[_ExceptionT], /) -> ExceptionGroup[_ExceptionT]: ... + @overload + def derive(self, excs: Sequence[_BaseExceptionT], /) -> BaseExceptionGroup[_BaseExceptionT]: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + + class ExceptionGroup(BaseExceptionGroup[_ExceptionT_co], Exception): + def __new__(cls, message: str, exceptions: Sequence[_ExceptionT_co], /) -> Self: ... + def __init__(self, message: str, exceptions: Sequence[_ExceptionT_co], /) -> None: ... + @property + def exceptions(self) -> tuple[_ExceptionT_co | ExceptionGroup[_ExceptionT_co], ...]: ... + # We accept a narrower type, but that's OK. + @overload # type: ignore[override] + def subgroup( + self, matcher_value: type[_ExceptionT] | tuple[type[_ExceptionT], ...], / + ) -> ExceptionGroup[_ExceptionT] | None: ... + @overload + def subgroup( + self, matcher_value: Callable[[_ExceptionT_co | Self], bool], / + ) -> ExceptionGroup[_ExceptionT_co] | None: ... + @overload # type: ignore[override] + def split( + self, matcher_value: type[_ExceptionT] | tuple[type[_ExceptionT], ...], / + ) -> tuple[ExceptionGroup[_ExceptionT] | None, ExceptionGroup[_ExceptionT_co] | None]: ... + @overload + def split( + self, matcher_value: Callable[[_ExceptionT_co | Self], bool], / + ) -> tuple[ExceptionGroup[_ExceptionT_co] | None, ExceptionGroup[_ExceptionT_co] | None]: ... + +if sys.version_info >= (3, 13): + class PythonFinalizationError(RuntimeError): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/bz2.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/bz2.pyi new file mode 100644 index 0000000..7bd829d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/bz2.pyi @@ -0,0 +1,119 @@ +import sys +from _bz2 import BZ2Compressor as BZ2Compressor, BZ2Decompressor as BZ2Decompressor +from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer +from collections.abc import Iterable +from io import TextIOWrapper +from typing import IO, Literal, Protocol, SupportsIndex, overload, type_check_only +from typing_extensions import Self, TypeAlias + +if sys.version_info >= (3, 14): + from compression._common._streams import BaseStream, _Reader +else: + from _compression import BaseStream, _Reader + +__all__ = ["BZ2File", "BZ2Compressor", "BZ2Decompressor", "open", "compress", "decompress"] + +# The following attributes and methods are optional: +# def fileno(self) -> int: ... +# def close(self) -> object: ... +@type_check_only +class _ReadableFileobj(_Reader, Protocol): ... + +@type_check_only +class _WritableFileobj(Protocol): + def write(self, b: bytes, /) -> object: ... + # The following attributes and methods are optional: + # def fileno(self) -> int: ... + # def close(self) -> object: ... + +def compress(data: ReadableBuffer, compresslevel: int = 9) -> bytes: ... +def decompress(data: ReadableBuffer) -> bytes: ... + +_ReadBinaryMode: TypeAlias = Literal["", "r", "rb"] +_WriteBinaryMode: TypeAlias = Literal["w", "wb", "x", "xb", "a", "ab"] +_ReadTextMode: TypeAlias = Literal["rt"] +_WriteTextMode: TypeAlias = Literal["wt", "xt", "at"] + +@overload +def open( + filename: _ReadableFileobj, + mode: _ReadBinaryMode = "rb", + compresslevel: int = 9, + encoding: None = None, + errors: None = None, + newline: None = None, +) -> BZ2File: ... +@overload +def open( + filename: _ReadableFileobj, + mode: _ReadTextMode, + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> TextIOWrapper: ... +@overload +def open( + filename: _WritableFileobj, + mode: _WriteBinaryMode, + compresslevel: int = 9, + encoding: None = None, + errors: None = None, + newline: None = None, +) -> BZ2File: ... +@overload +def open( + filename: _WritableFileobj, + mode: _WriteTextMode, + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> TextIOWrapper: ... +@overload +def open( + filename: StrOrBytesPath, + mode: _ReadBinaryMode | _WriteBinaryMode = "rb", + compresslevel: int = 9, + encoding: None = None, + errors: None = None, + newline: None = None, +) -> BZ2File: ... +@overload +def open( + filename: StrOrBytesPath, + mode: _ReadTextMode | _WriteTextMode, + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> TextIOWrapper: ... +@overload +def open( + filename: StrOrBytesPath | _ReadableFileobj | _WritableFileobj, + mode: str, + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> BZ2File | TextIOWrapper: ... + +class BZ2File(BaseStream, IO[bytes]): + def __enter__(self) -> Self: ... + @overload + def __init__(self, filename: _WritableFileobj, mode: _WriteBinaryMode, *, compresslevel: int = 9) -> None: ... + @overload + def __init__(self, filename: _ReadableFileobj, mode: _ReadBinaryMode = "r", *, compresslevel: int = 9) -> None: ... + @overload + def __init__( + self, filename: StrOrBytesPath, mode: _ReadBinaryMode | _WriteBinaryMode = "r", *, compresslevel: int = 9 + ) -> None: ... + def read(self, size: int | None = -1) -> bytes: ... + def read1(self, size: int = -1) -> bytes: ... + def readline(self, size: SupportsIndex = -1) -> bytes: ... # type: ignore[override] + def readinto(self, b: WriteableBuffer) -> int: ... + def readlines(self, size: SupportsIndex = -1) -> list[bytes]: ... + def peek(self, n: int = 0) -> bytes: ... + def seek(self, offset: int, whence: int = 0) -> int: ... + def write(self, data: ReadableBuffer) -> int: ... + def writelines(self, seq: Iterable[ReadableBuffer]) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/cProfile.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/cProfile.pyi new file mode 100644 index 0000000..e921584 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/cProfile.pyi @@ -0,0 +1,31 @@ +import _lsprof +from _typeshed import StrOrBytesPath, Unused +from collections.abc import Callable, Mapping +from types import CodeType +from typing import Any, TypeVar +from typing_extensions import ParamSpec, Self, TypeAlias + +__all__ = ["run", "runctx", "Profile"] + +def run(statement: str, filename: str | None = None, sort: str | int = -1) -> None: ... +def runctx( + statement: str, globals: dict[str, Any], locals: Mapping[str, Any], filename: str | None = None, sort: str | int = -1 +) -> None: ... + +_T = TypeVar("_T") +_P = ParamSpec("_P") +_Label: TypeAlias = tuple[str, int, str] + +class Profile(_lsprof.Profiler): + stats: dict[_Label, tuple[int, int, int, int, dict[_Label, tuple[int, int, int, int]]]] # undocumented + def print_stats(self, sort: str | int = -1) -> None: ... + def dump_stats(self, file: StrOrBytesPath) -> None: ... + def create_stats(self) -> None: ... + def snapshot_stats(self) -> None: ... + def run(self, cmd: str) -> Self: ... + def runctx(self, cmd: str, globals: dict[str, Any], locals: Mapping[str, Any]) -> Self: ... + def runcall(self, func: Callable[_P, _T], /, *args: _P.args, **kw: _P.kwargs) -> _T: ... + def __enter__(self) -> Self: ... + def __exit__(self, *exc_info: Unused) -> None: ... + +def label(code: str | CodeType) -> _Label: ... # undocumented diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/calendar.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/calendar.pyi new file mode 100644 index 0000000..d00f0d5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/calendar.pyi @@ -0,0 +1,208 @@ +import datetime +import enum +import sys +from _typeshed import Unused +from collections.abc import Iterable, Sequence +from time import struct_time +from typing import ClassVar, Final +from typing_extensions import TypeAlias + +__all__ = [ + "IllegalMonthError", + "IllegalWeekdayError", + "setfirstweekday", + "firstweekday", + "isleap", + "leapdays", + "weekday", + "monthrange", + "monthcalendar", + "prmonth", + "month", + "prcal", + "calendar", + "timegm", + "month_name", + "month_abbr", + "day_name", + "day_abbr", + "Calendar", + "TextCalendar", + "HTMLCalendar", + "LocaleTextCalendar", + "LocaleHTMLCalendar", + "weekheader", +] + +if sys.version_info >= (3, 10): + __all__ += ["FRIDAY", "MONDAY", "SATURDAY", "SUNDAY", "THURSDAY", "TUESDAY", "WEDNESDAY"] +if sys.version_info >= (3, 12): + __all__ += [ + "Day", + "Month", + "JANUARY", + "FEBRUARY", + "MARCH", + "APRIL", + "MAY", + "JUNE", + "JULY", + "AUGUST", + "SEPTEMBER", + "OCTOBER", + "NOVEMBER", + "DECEMBER", + ] + +_LocaleType: TypeAlias = tuple[str | None, str | None] + +class IllegalMonthError(ValueError): + def __init__(self, month: int) -> None: ... + +class IllegalWeekdayError(ValueError): + def __init__(self, weekday: int) -> None: ... + +def isleap(year: int) -> bool: ... +def leapdays(y1: int, y2: int) -> int: ... +def weekday(year: int, month: int, day: int) -> int: ... +def monthrange(year: int, month: int) -> tuple[int, int]: ... + +class Calendar: + firstweekday: int + def __init__(self, firstweekday: int = 0) -> None: ... + def getfirstweekday(self) -> int: ... + def setfirstweekday(self, firstweekday: int) -> None: ... + def iterweekdays(self) -> Iterable[int]: ... + def itermonthdates(self, year: int, month: int) -> Iterable[datetime.date]: ... + def itermonthdays2(self, year: int, month: int) -> Iterable[tuple[int, int]]: ... + def itermonthdays(self, year: int, month: int) -> Iterable[int]: ... + def monthdatescalendar(self, year: int, month: int) -> list[list[datetime.date]]: ... + def monthdays2calendar(self, year: int, month: int) -> list[list[tuple[int, int]]]: ... + def monthdayscalendar(self, year: int, month: int) -> list[list[int]]: ... + def yeardatescalendar(self, year: int, width: int = 3) -> list[list[list[list[datetime.date]]]]: ... + def yeardays2calendar(self, year: int, width: int = 3) -> list[list[list[list[tuple[int, int]]]]]: ... + def yeardayscalendar(self, year: int, width: int = 3) -> list[list[list[list[int]]]]: ... + def itermonthdays3(self, year: int, month: int) -> Iterable[tuple[int, int, int]]: ... + def itermonthdays4(self, year: int, month: int) -> Iterable[tuple[int, int, int, int]]: ... + +class TextCalendar(Calendar): + def prweek(self, theweek: int, width: int) -> None: ... + def formatday(self, day: int, weekday: int, width: int) -> str: ... + def formatweek(self, theweek: int, width: int) -> str: ... + def formatweekday(self, day: int, width: int) -> str: ... + def formatweekheader(self, width: int) -> str: ... + def formatmonthname(self, theyear: int, themonth: int, width: int, withyear: bool = True) -> str: ... + def prmonth(self, theyear: int, themonth: int, w: int = 0, l: int = 0) -> None: ... + def formatmonth(self, theyear: int, themonth: int, w: int = 0, l: int = 0) -> str: ... + def formatyear(self, theyear: int, w: int = 2, l: int = 1, c: int = 6, m: int = 3) -> str: ... + def pryear(self, theyear: int, w: int = 0, l: int = 0, c: int = 6, m: int = 3) -> None: ... + +def firstweekday() -> int: ... +def monthcalendar(year: int, month: int) -> list[list[int]]: ... +def prweek(theweek: int, width: int) -> None: ... +def week(theweek: int, width: int) -> str: ... +def weekheader(width: int) -> str: ... +def prmonth(theyear: int, themonth: int, w: int = 0, l: int = 0) -> None: ... +def month(theyear: int, themonth: int, w: int = 0, l: int = 0) -> str: ... +def calendar(theyear: int, w: int = 2, l: int = 1, c: int = 6, m: int = 3) -> str: ... +def prcal(theyear: int, w: int = 0, l: int = 0, c: int = 6, m: int = 3) -> None: ... + +class HTMLCalendar(Calendar): + cssclasses: ClassVar[list[str]] + cssclass_noday: ClassVar[str] + cssclasses_weekday_head: ClassVar[list[str]] + cssclass_month_head: ClassVar[str] + cssclass_month: ClassVar[str] + cssclass_year: ClassVar[str] + cssclass_year_head: ClassVar[str] + def formatday(self, day: int, weekday: int) -> str: ... + def formatweek(self, theweek: int) -> str: ... + def formatweekday(self, day: int) -> str: ... + def formatweekheader(self) -> str: ... + def formatmonthname(self, theyear: int, themonth: int, withyear: bool = True) -> str: ... + def formatmonth(self, theyear: int, themonth: int, withyear: bool = True) -> str: ... + def formatyear(self, theyear: int, width: int = 3) -> str: ... + def formatyearpage( + self, theyear: int, width: int = 3, css: str | None = "calendar.css", encoding: str | None = None + ) -> bytes: ... + +class different_locale: + def __init__(self, locale: _LocaleType) -> None: ... + def __enter__(self) -> None: ... + def __exit__(self, *args: Unused) -> None: ... + +class LocaleTextCalendar(TextCalendar): + def __init__(self, firstweekday: int = 0, locale: _LocaleType | None = None) -> None: ... + +class LocaleHTMLCalendar(HTMLCalendar): + def __init__(self, firstweekday: int = 0, locale: _LocaleType | None = None) -> None: ... + def formatweekday(self, day: int) -> str: ... + def formatmonthname(self, theyear: int, themonth: int, withyear: bool = True) -> str: ... + +c: TextCalendar + +def setfirstweekday(firstweekday: int) -> None: ... +def format(cols: int, colwidth: int = 20, spacing: int = 6) -> str: ... +def formatstring(cols: int, colwidth: int = 20, spacing: int = 6) -> str: ... +def timegm(tuple: tuple[int, ...] | struct_time) -> int: ... + +# Data attributes +day_name: Sequence[str] +day_abbr: Sequence[str] +month_name: Sequence[str] +month_abbr: Sequence[str] + +if sys.version_info >= (3, 12): + class Month(enum.IntEnum): + JANUARY = 1 + FEBRUARY = 2 + MARCH = 3 + APRIL = 4 + MAY = 5 + JUNE = 6 + JULY = 7 + AUGUST = 8 + SEPTEMBER = 9 + OCTOBER = 10 + NOVEMBER = 11 + DECEMBER = 12 + + JANUARY: Final = Month.JANUARY + FEBRUARY: Final = Month.FEBRUARY + MARCH: Final = Month.MARCH + APRIL: Final = Month.APRIL + MAY: Final = Month.MAY + JUNE: Final = Month.JUNE + JULY: Final = Month.JULY + AUGUST: Final = Month.AUGUST + SEPTEMBER: Final = Month.SEPTEMBER + OCTOBER: Final = Month.OCTOBER + NOVEMBER: Final = Month.NOVEMBER + DECEMBER: Final = Month.DECEMBER + + class Day(enum.IntEnum): + MONDAY = 0 + TUESDAY = 1 + WEDNESDAY = 2 + THURSDAY = 3 + FRIDAY = 4 + SATURDAY = 5 + SUNDAY = 6 + + MONDAY: Final = Day.MONDAY + TUESDAY: Final = Day.TUESDAY + WEDNESDAY: Final = Day.WEDNESDAY + THURSDAY: Final = Day.THURSDAY + FRIDAY: Final = Day.FRIDAY + SATURDAY: Final = Day.SATURDAY + SUNDAY: Final = Day.SUNDAY +else: + MONDAY: Final = 0 + TUESDAY: Final = 1 + WEDNESDAY: Final = 2 + THURSDAY: Final = 3 + FRIDAY: Final = 4 + SATURDAY: Final = 5 + SUNDAY: Final = 6 + +EPOCH: Final = 1970 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/cgi.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/cgi.pyi new file mode 100644 index 0000000..0f9d434 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/cgi.pyi @@ -0,0 +1,119 @@ +import os +from _typeshed import SupportsContainsAndGetItem, SupportsGetItem, SupportsItemAccess, Unused +from builtins import list as _list, type as _type +from collections.abc import Iterable, Iterator, Mapping +from email.message import Message +from types import TracebackType +from typing import IO, Any, Protocol, type_check_only +from typing_extensions import Self + +__all__ = [ + "MiniFieldStorage", + "FieldStorage", + "parse", + "parse_multipart", + "parse_header", + "test", + "print_exception", + "print_environ", + "print_form", + "print_directory", + "print_arguments", + "print_environ_usage", +] + +def parse( + fp: IO[Any] | None = None, + environ: SupportsItemAccess[str, str] = os.environ, + keep_blank_values: bool = ..., + strict_parsing: bool = ..., + separator: str = "&", +) -> dict[str, list[str]]: ... +def parse_multipart( + fp: IO[Any], pdict: SupportsGetItem[str, bytes], encoding: str = "utf-8", errors: str = "replace", separator: str = "&" +) -> dict[str, list[Any]]: ... +@type_check_only +class _Environ(Protocol): + def __getitem__(self, k: str, /) -> str: ... + def keys(self) -> Iterable[str]: ... + +def parse_header(line: str) -> tuple[str, dict[str, str]]: ... +def test(environ: _Environ = os.environ) -> None: ... +def print_environ(environ: _Environ = os.environ) -> None: ... +def print_form(form: dict[str, Any]) -> None: ... +def print_directory() -> None: ... +def print_environ_usage() -> None: ... + +class MiniFieldStorage: + # The first five "Any" attributes here are always None, but mypy doesn't support that + filename: Any + list: Any + type: Any + file: IO[bytes] | None + type_options: dict[Any, Any] + disposition: Any + disposition_options: dict[Any, Any] + headers: dict[Any, Any] + name: Any + value: Any + def __init__(self, name: Any, value: Any) -> None: ... + +class FieldStorage: + FieldStorageClass: _type | None + keep_blank_values: int + strict_parsing: int + qs_on_post: str | None + headers: Mapping[str, str] | Message + fp: IO[bytes] + encoding: str + errors: str + outerboundary: bytes + bytes_read: int + limit: int | None + disposition: str + disposition_options: dict[str, str] + filename: str | None + file: IO[bytes] | None + type: str + type_options: dict[str, str] + innerboundary: bytes + length: int + done: int + list: _list[Any] | None + value: None | bytes | _list[Any] + def __init__( + self, + fp: IO[Any] | None = None, + headers: Mapping[str, str] | Message | None = None, + outerboundary: bytes = b"", + environ: SupportsContainsAndGetItem[str, str] = os.environ, + keep_blank_values: int = 0, + strict_parsing: int = 0, + limit: int | None = None, + encoding: str = "utf-8", + errors: str = "replace", + max_num_fields: int | None = None, + separator: str = "&", + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + def __iter__(self) -> Iterator[str]: ... + def __getitem__(self, key: str) -> Any: ... + def getvalue(self, key: str, default: Any = None) -> Any: ... + def getfirst(self, key: str, default: Any = None) -> Any: ... + def getlist(self, key: str) -> _list[Any]: ... + def keys(self) -> _list[str]: ... + def __contains__(self, key: str) -> bool: ... + def __len__(self) -> int: ... + def __bool__(self) -> bool: ... + def __del__(self) -> None: ... + # Returns bytes or str IO depending on an internal flag + def make_file(self) -> IO[Any]: ... + +def print_exception( + type: type[BaseException] | None = None, + value: BaseException | None = None, + tb: TracebackType | None = None, + limit: int | None = None, +) -> None: ... +def print_arguments() -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/cgitb.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/cgitb.pyi new file mode 100644 index 0000000..5657258 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/cgitb.pyi @@ -0,0 +1,32 @@ +from _typeshed import OptExcInfo, StrOrBytesPath +from collections.abc import Callable +from types import FrameType, TracebackType +from typing import IO, Any, Final + +__UNDEF__: Final[object] # undocumented sentinel + +def reset() -> str: ... # undocumented +def small(text: str) -> str: ... # undocumented +def strong(text: str) -> str: ... # undocumented +def grey(text: str) -> str: ... # undocumented +def lookup(name: str, frame: FrameType, locals: dict[str, Any]) -> tuple[str | None, Any]: ... # undocumented +def scanvars( + reader: Callable[[], bytes], frame: FrameType, locals: dict[str, Any] +) -> list[tuple[str, str | None, Any]]: ... # undocumented +def html(einfo: OptExcInfo, context: int = 5) -> str: ... +def text(einfo: OptExcInfo, context: int = 5) -> str: ... + +class Hook: # undocumented + def __init__( + self, + display: int = 1, + logdir: StrOrBytesPath | None = None, + context: int = 5, + file: IO[str] | None = None, + format: str = "html", + ) -> None: ... + def __call__(self, etype: type[BaseException] | None, evalue: BaseException | None, etb: TracebackType | None) -> None: ... + def handle(self, info: OptExcInfo | None = None) -> None: ... + +def handler(info: OptExcInfo | None = None) -> None: ... +def enable(display: int = 1, logdir: StrOrBytesPath | None = None, context: int = 5, format: str = "html") -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/chunk.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/chunk.pyi new file mode 100644 index 0000000..9788d35 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/chunk.pyi @@ -0,0 +1,20 @@ +from typing import IO + +class Chunk: + closed: bool + align: bool + file: IO[bytes] + chunkname: bytes + chunksize: int + size_read: int + offset: int + seekable: bool + def __init__(self, file: IO[bytes], align: bool = True, bigendian: bool = True, inclheader: bool = False) -> None: ... + def getname(self) -> bytes: ... + def getsize(self) -> int: ... + def close(self) -> None: ... + def isatty(self) -> bool: ... + def seek(self, pos: int, whence: int = 0) -> None: ... + def tell(self) -> int: ... + def read(self, size: int = -1) -> bytes: ... + def skip(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/cmath.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/cmath.pyi new file mode 100644 index 0000000..aed4c63 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/cmath.pyi @@ -0,0 +1,36 @@ +from typing import Final, SupportsComplex, SupportsFloat, SupportsIndex +from typing_extensions import TypeAlias + +e: Final[float] +pi: Final[float] +inf: Final[float] +infj: Final[complex] +nan: Final[float] +nanj: Final[complex] +tau: Final[float] + +_C: TypeAlias = SupportsFloat | SupportsComplex | SupportsIndex | complex + +def acos(z: _C, /) -> complex: ... +def acosh(z: _C, /) -> complex: ... +def asin(z: _C, /) -> complex: ... +def asinh(z: _C, /) -> complex: ... +def atan(z: _C, /) -> complex: ... +def atanh(z: _C, /) -> complex: ... +def cos(z: _C, /) -> complex: ... +def cosh(z: _C, /) -> complex: ... +def exp(z: _C, /) -> complex: ... +def isclose(a: _C, b: _C, *, rel_tol: SupportsFloat = 1e-09, abs_tol: SupportsFloat = 0.0) -> bool: ... +def isinf(z: _C, /) -> bool: ... +def isnan(z: _C, /) -> bool: ... +def log(z: _C, base: _C = ..., /) -> complex: ... +def log10(z: _C, /) -> complex: ... +def phase(z: _C, /) -> float: ... +def polar(z: _C, /) -> tuple[float, float]: ... +def rect(r: float, phi: float, /) -> complex: ... +def sin(z: _C, /) -> complex: ... +def sinh(z: _C, /) -> complex: ... +def sqrt(z: _C, /) -> complex: ... +def tan(z: _C, /) -> complex: ... +def tanh(z: _C, /) -> complex: ... +def isfinite(z: _C, /) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/cmd.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/cmd.pyi new file mode 100644 index 0000000..6e84133 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/cmd.pyi @@ -0,0 +1,46 @@ +from collections.abc import Callable +from typing import IO, Any, Final +from typing_extensions import LiteralString + +__all__ = ["Cmd"] + +PROMPT: Final = "(Cmd) " +IDENTCHARS: Final[LiteralString] # Too big to be `Literal` + +class Cmd: + prompt: str + identchars: str + ruler: str + lastcmd: str + intro: Any | None + doc_leader: str + doc_header: str + misc_header: str + undoc_header: str + nohelp: str + use_rawinput: bool + stdin: IO[str] + stdout: IO[str] + cmdqueue: list[str] + completekey: str + def __init__(self, completekey: str = "tab", stdin: IO[str] | None = None, stdout: IO[str] | None = None) -> None: ... + old_completer: Callable[[str, int], str | None] | None + def cmdloop(self, intro: Any | None = None) -> None: ... + def precmd(self, line: str) -> str: ... + def postcmd(self, stop: bool, line: str) -> bool: ... + def preloop(self) -> None: ... + def postloop(self) -> None: ... + def parseline(self, line: str) -> tuple[str | None, str | None, str]: ... + def onecmd(self, line: str) -> bool: ... + def emptyline(self) -> bool: ... + def default(self, line: str) -> None: ... + def completedefault(self, *ignored: Any) -> list[str]: ... + def completenames(self, text: str, *ignored: Any) -> list[str]: ... + completion_matches: list[str] | None + def complete(self, text: str, state: int) -> list[str] | None: ... + def get_names(self) -> list[str]: ... + # Only the first element of args matters. + def complete_help(self, *args: Any) -> list[str]: ... + def do_help(self, arg: str) -> bool | None: ... + def print_topics(self, header: str, cmds: list[str] | None, cmdlen: Any, maxcol: int) -> None: ... + def columnize(self, list: list[str] | None, displaywidth: int = 80) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/code.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/code.pyi new file mode 100644 index 0000000..0b13c8a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/code.pyi @@ -0,0 +1,54 @@ +import sys +from codeop import CommandCompiler, compile_command as compile_command +from collections.abc import Callable +from types import CodeType +from typing import Any + +__all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact", "compile_command"] + +class InteractiveInterpreter: + locals: dict[str, Any] # undocumented + compile: CommandCompiler # undocumented + def __init__(self, locals: dict[str, Any] | None = None) -> None: ... + def runsource(self, source: str, filename: str = "", symbol: str = "single") -> bool: ... + def runcode(self, code: CodeType) -> None: ... + if sys.version_info >= (3, 13): + def showsyntaxerror(self, filename: str | None = None, *, source: str = "") -> None: ... + else: + def showsyntaxerror(self, filename: str | None = None) -> None: ... + + def showtraceback(self) -> None: ... + def write(self, data: str) -> None: ... + +class InteractiveConsole(InteractiveInterpreter): + buffer: list[str] # undocumented + filename: str # undocumented + if sys.version_info >= (3, 13): + def __init__( + self, locals: dict[str, Any] | None = None, filename: str = "", *, local_exit: bool = False + ) -> None: ... + def push(self, line: str, filename: str | None = None) -> bool: ... + else: + def __init__(self, locals: dict[str, Any] | None = None, filename: str = "") -> None: ... + def push(self, line: str) -> bool: ... + + def interact(self, banner: str | None = None, exitmsg: str | None = None) -> None: ... + def resetbuffer(self) -> None: ... + def raw_input(self, prompt: str = "") -> str: ... + +if sys.version_info >= (3, 13): + def interact( + banner: str | None = None, + readfunc: Callable[[str], str] | None = None, + local: dict[str, Any] | None = None, + exitmsg: str | None = None, + local_exit: bool = False, + ) -> None: ... + +else: + def interact( + banner: str | None = None, + readfunc: Callable[[str], str] | None = None, + local: dict[str, Any] | None = None, + exitmsg: str | None = None, + ) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/codecs.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/codecs.pyi new file mode 100644 index 0000000..fa4d4fd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/codecs.pyi @@ -0,0 +1,354 @@ +import sys +import types +from _codecs import * +from _typeshed import ReadableBuffer +from abc import abstractmethod +from collections.abc import Callable, Generator, Iterable +from typing import Any, BinaryIO, ClassVar, Final, Literal, Protocol, TextIO, overload, type_check_only +from typing_extensions import Self, TypeAlias, disjoint_base + +__all__ = [ + "register", + "lookup", + "open", + "EncodedFile", + "BOM", + "BOM_BE", + "BOM_LE", + "BOM32_BE", + "BOM32_LE", + "BOM64_BE", + "BOM64_LE", + "BOM_UTF8", + "BOM_UTF16", + "BOM_UTF16_LE", + "BOM_UTF16_BE", + "BOM_UTF32", + "BOM_UTF32_LE", + "BOM_UTF32_BE", + "CodecInfo", + "Codec", + "IncrementalEncoder", + "IncrementalDecoder", + "StreamReader", + "StreamWriter", + "StreamReaderWriter", + "StreamRecoder", + "getencoder", + "getdecoder", + "getincrementalencoder", + "getincrementaldecoder", + "getreader", + "getwriter", + "encode", + "decode", + "iterencode", + "iterdecode", + "strict_errors", + "ignore_errors", + "replace_errors", + "xmlcharrefreplace_errors", + "backslashreplace_errors", + "namereplace_errors", + "register_error", + "lookup_error", +] + +BOM32_BE: Final = b"\xfe\xff" +BOM32_LE: Final = b"\xff\xfe" +BOM64_BE: Final = b"\x00\x00\xfe\xff" +BOM64_LE: Final = b"\xff\xfe\x00\x00" + +_BufferedEncoding: TypeAlias = Literal[ + "idna", + "raw-unicode-escape", + "unicode-escape", + "utf-16", + "utf-16-be", + "utf-16-le", + "utf-32", + "utf-32-be", + "utf-32-le", + "utf-7", + "utf-8", + "utf-8-sig", +] + +@type_check_only +class _WritableStream(Protocol): + def write(self, data: bytes, /) -> object: ... + def seek(self, offset: int, whence: int, /) -> object: ... + def close(self) -> object: ... + +@type_check_only +class _ReadableStream(Protocol): + def read(self, size: int = ..., /) -> bytes: ... + def seek(self, offset: int, whence: int, /) -> object: ... + def close(self) -> object: ... + +@type_check_only +class _Stream(_WritableStream, _ReadableStream, Protocol): ... + +# TODO: this only satisfies the most common interface, where +# bytes is the raw form and str is the cooked form. +# In the long run, both should become template parameters maybe? +# There *are* bytes->bytes and str->str encodings in the standard library. +# They were much more common in Python 2 than in Python 3. + +@type_check_only +class _Encoder(Protocol): + def __call__(self, input: str, errors: str = ..., /) -> tuple[bytes, int]: ... # signature of Codec().encode + +@type_check_only +class _Decoder(Protocol): + def __call__(self, input: ReadableBuffer, errors: str = ..., /) -> tuple[str, int]: ... # signature of Codec().decode + +@type_check_only +class _StreamReader(Protocol): + def __call__(self, stream: _ReadableStream, errors: str = ..., /) -> StreamReader: ... + +@type_check_only +class _StreamWriter(Protocol): + def __call__(self, stream: _WritableStream, errors: str = ..., /) -> StreamWriter: ... + +@type_check_only +class _IncrementalEncoder(Protocol): + def __call__(self, errors: str = ...) -> IncrementalEncoder: ... + +@type_check_only +class _IncrementalDecoder(Protocol): + def __call__(self, errors: str = ...) -> IncrementalDecoder: ... + +@type_check_only +class _BufferedIncrementalDecoder(Protocol): + def __call__(self, errors: str = ...) -> BufferedIncrementalDecoder: ... + +if sys.version_info >= (3, 12): + class CodecInfo(tuple[_Encoder, _Decoder, _StreamReader, _StreamWriter]): + _is_text_encoding: bool + @property + def encode(self) -> _Encoder: ... + @property + def decode(self) -> _Decoder: ... + @property + def streamreader(self) -> _StreamReader: ... + @property + def streamwriter(self) -> _StreamWriter: ... + @property + def incrementalencoder(self) -> _IncrementalEncoder: ... + @property + def incrementaldecoder(self) -> _IncrementalDecoder: ... + name: str + def __new__( + cls, + encode: _Encoder, + decode: _Decoder, + streamreader: _StreamReader | None = None, + streamwriter: _StreamWriter | None = None, + incrementalencoder: _IncrementalEncoder | None = None, + incrementaldecoder: _IncrementalDecoder | None = None, + name: str | None = None, + *, + _is_text_encoding: bool | None = None, + ) -> Self: ... + +else: + @disjoint_base + class CodecInfo(tuple[_Encoder, _Decoder, _StreamReader, _StreamWriter]): + _is_text_encoding: bool + @property + def encode(self) -> _Encoder: ... + @property + def decode(self) -> _Decoder: ... + @property + def streamreader(self) -> _StreamReader: ... + @property + def streamwriter(self) -> _StreamWriter: ... + @property + def incrementalencoder(self) -> _IncrementalEncoder: ... + @property + def incrementaldecoder(self) -> _IncrementalDecoder: ... + name: str + def __new__( + cls, + encode: _Encoder, + decode: _Decoder, + streamreader: _StreamReader | None = None, + streamwriter: _StreamWriter | None = None, + incrementalencoder: _IncrementalEncoder | None = None, + incrementaldecoder: _IncrementalDecoder | None = None, + name: str | None = None, + *, + _is_text_encoding: bool | None = None, + ) -> Self: ... + +def getencoder(encoding: str) -> _Encoder: ... +def getdecoder(encoding: str) -> _Decoder: ... +def getincrementalencoder(encoding: str) -> _IncrementalEncoder: ... +@overload +def getincrementaldecoder(encoding: _BufferedEncoding) -> _BufferedIncrementalDecoder: ... +@overload +def getincrementaldecoder(encoding: str) -> _IncrementalDecoder: ... +def getreader(encoding: str) -> _StreamReader: ... +def getwriter(encoding: str) -> _StreamWriter: ... +def open( + filename: str, mode: str = "r", encoding: str | None = None, errors: str = "strict", buffering: int = -1 +) -> StreamReaderWriter: ... +def EncodedFile(file: _Stream, data_encoding: str, file_encoding: str | None = None, errors: str = "strict") -> StreamRecoder: ... +def iterencode(iterator: Iterable[str], encoding: str, errors: str = "strict") -> Generator[bytes, None, None]: ... +def iterdecode(iterator: Iterable[bytes], encoding: str, errors: str = "strict") -> Generator[str, None, None]: ... + +BOM: Final[Literal[b"\xff\xfe", b"\xfe\xff"]] # depends on `sys.byteorder` +BOM_BE: Final = b"\xfe\xff" +BOM_LE: Final = b"\xff\xfe" +BOM_UTF8: Final = b"\xef\xbb\xbf" +BOM_UTF16: Final[Literal[b"\xff\xfe", b"\xfe\xff"]] # depends on `sys.byteorder` +BOM_UTF16_BE: Final = b"\xfe\xff" +BOM_UTF16_LE: Final = b"\xff\xfe" +BOM_UTF32: Final[Literal[b"\xff\xfe\x00\x00", b"\x00\x00\xfe\xff"]] # depends on `sys.byteorder` +BOM_UTF32_BE: Final = b"\x00\x00\xfe\xff" +BOM_UTF32_LE: Final = b"\xff\xfe\x00\x00" + +def strict_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: ... +def replace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: ... +def ignore_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: ... +def xmlcharrefreplace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: ... +def backslashreplace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: ... +def namereplace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: ... + +class Codec: + # These are sort of @abstractmethod but sort of not. + # The StreamReader and StreamWriter subclasses only implement one. + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder: + errors: str + def __init__(self, errors: str = "strict") -> None: ... + @abstractmethod + def encode(self, input: str, final: bool = False) -> bytes: ... + def reset(self) -> None: ... + # documentation says int but str is needed for the subclass. + def getstate(self) -> int | str: ... + def setstate(self, state: int | str) -> None: ... + +class IncrementalDecoder: + errors: str + def __init__(self, errors: str = "strict") -> None: ... + @abstractmethod + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + def reset(self) -> None: ... + def getstate(self) -> tuple[bytes, int]: ... + def setstate(self, state: tuple[bytes, int]) -> None: ... + +# These are not documented but used in encodings/*.py implementations. +class BufferedIncrementalEncoder(IncrementalEncoder): + buffer: str + def __init__(self, errors: str = "strict") -> None: ... + @abstractmethod + def _buffer_encode(self, input: str, errors: str, final: bool) -> tuple[bytes, int]: ... + def encode(self, input: str, final: bool = False) -> bytes: ... + +class BufferedIncrementalDecoder(IncrementalDecoder): + buffer: bytes + def __init__(self, errors: str = "strict") -> None: ... + @abstractmethod + def _buffer_decode(self, input: ReadableBuffer, errors: str, final: bool) -> tuple[str, int]: ... + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +# TODO: it is not possible to specify the requirement that all other +# attributes and methods are passed-through from the stream. +class StreamWriter(Codec): + stream: _WritableStream + errors: str + def __init__(self, stream: _WritableStream, errors: str = "strict") -> None: ... + def write(self, object: str) -> None: ... + def writelines(self, list: Iterable[str]) -> None: ... + def reset(self) -> None: ... + def seek(self, offset: int, whence: int = 0) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... + def __getattr__(self, name: str, getattr: Callable[[Any, str], Any] = ...) -> Any: ... + +class StreamReader(Codec): + stream: _ReadableStream + errors: str + # This is set to str, but some subclasses set to bytes instead. + charbuffertype: ClassVar[type] = ... + def __init__(self, stream: _ReadableStream, errors: str = "strict") -> None: ... + def read(self, size: int = -1, chars: int = -1, firstline: bool = False) -> str: ... + def readline(self, size: int | None = None, keepends: bool = True) -> str: ... + def readlines(self, sizehint: int | None = None, keepends: bool = True) -> list[str]: ... + def reset(self) -> None: ... + def seek(self, offset: int, whence: int = 0) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> str: ... + def __getattr__(self, name: str, getattr: Callable[[Any, str], Any] = ...) -> Any: ... + +# Doesn't actually inherit from TextIO, but wraps a BinaryIO to provide text reading and writing +# and delegates attributes to the underlying binary stream with __getattr__. +class StreamReaderWriter(TextIO): + stream: _Stream + def __init__(self, stream: _Stream, Reader: _StreamReader, Writer: _StreamWriter, errors: str = "strict") -> None: ... + def read(self, size: int = -1) -> str: ... + def readline(self, size: int | None = None) -> str: ... + def readlines(self, sizehint: int | None = None) -> list[str]: ... + def __next__(self) -> str: ... + def __iter__(self) -> Self: ... + def write(self, data: str) -> None: ... # type: ignore[override] + def writelines(self, list: Iterable[str]) -> None: ... + def reset(self) -> None: ... + def seek(self, offset: int, whence: int = 0) -> None: ... # type: ignore[override] + def __enter__(self) -> Self: ... + def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... + def __getattr__(self, name: str) -> Any: ... + # These methods don't actually exist directly, but they are needed to satisfy the TextIO + # interface. At runtime, they are delegated through __getattr__. + def close(self) -> None: ... + def fileno(self) -> int: ... + def flush(self) -> None: ... + def isatty(self) -> bool: ... + def readable(self) -> bool: ... + def truncate(self, size: int | None = ...) -> int: ... + def seekable(self) -> bool: ... + def tell(self) -> int: ... + def writable(self) -> bool: ... + +class StreamRecoder(BinaryIO): + data_encoding: str + file_encoding: str + def __init__( + self, + stream: _Stream, + encode: _Encoder, + decode: _Decoder, + Reader: _StreamReader, + Writer: _StreamWriter, + errors: str = "strict", + ) -> None: ... + def read(self, size: int = -1) -> bytes: ... + def readline(self, size: int | None = None) -> bytes: ... + def readlines(self, sizehint: int | None = None) -> list[bytes]: ... + def __next__(self) -> bytes: ... + def __iter__(self) -> Self: ... + # Base class accepts more types than just bytes + def write(self, data: bytes) -> None: ... # type: ignore[override] + def writelines(self, list: Iterable[bytes]) -> None: ... # type: ignore[override] + def reset(self) -> None: ... + def __getattr__(self, name: str) -> Any: ... + def __enter__(self) -> Self: ... + def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... + def seek(self, offset: int, whence: int = 0) -> None: ... # type: ignore[override] + # These methods don't actually exist directly, but they are needed to satisfy the BinaryIO + # interface. At runtime, they are delegated through __getattr__. + def close(self) -> None: ... + def fileno(self) -> int: ... + def flush(self) -> None: ... + def isatty(self) -> bool: ... + def readable(self) -> bool: ... + def truncate(self, size: int | None = ...) -> int: ... + def seekable(self) -> bool: ... + def tell(self) -> int: ... + def writable(self) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/codeop.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/codeop.pyi new file mode 100644 index 0000000..8e31134 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/codeop.pyi @@ -0,0 +1,21 @@ +import sys +from types import CodeType + +__all__ = ["compile_command", "Compile", "CommandCompiler"] + +if sys.version_info >= (3, 14): + def compile_command(source: str, filename: str = "", symbol: str = "single", flags: int = 0) -> CodeType | None: ... + +else: + def compile_command(source: str, filename: str = "", symbol: str = "single") -> CodeType | None: ... + +class Compile: + flags: int + if sys.version_info >= (3, 13): + def __call__(self, source: str, filename: str, symbol: str, flags: int = 0) -> CodeType: ... + else: + def __call__(self, source: str, filename: str, symbol: str) -> CodeType: ... + +class CommandCompiler: + compiler: Compile + def __call__(self, source: str, filename: str = "", symbol: str = "single") -> CodeType | None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/collections/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/collections/__init__.pyi new file mode 100644 index 0000000..8636e6c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/collections/__init__.pyi @@ -0,0 +1,511 @@ +import sys +from _collections_abc import dict_items, dict_keys, dict_values +from _typeshed import SupportsItems, SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT +from types import GenericAlias +from typing import Any, ClassVar, Generic, NoReturn, SupportsIndex, TypeVar, final, overload, type_check_only +from typing_extensions import Self, disjoint_base + +if sys.version_info >= (3, 10): + from collections.abc import ( + Callable, + ItemsView, + Iterable, + Iterator, + KeysView, + Mapping, + MutableMapping, + MutableSequence, + Sequence, + ValuesView, + ) +else: + from _collections_abc import * + +__all__ = ["ChainMap", "Counter", "OrderedDict", "UserDict", "UserList", "UserString", "defaultdict", "deque", "namedtuple"] + +_S = TypeVar("_S") +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") +_KT_co = TypeVar("_KT_co", covariant=True) +_VT_co = TypeVar("_VT_co", covariant=True) + +# namedtuple is special-cased in the type checker; the initializer is ignored. +def namedtuple( + typename: str, + field_names: str | Iterable[str], + *, + rename: bool = False, + module: str | None = None, + defaults: Iterable[Any] | None = None, +) -> type[tuple[Any, ...]]: ... + +class UserDict(MutableMapping[_KT, _VT]): + data: dict[_KT, _VT] + # __init__ should be kept roughly in line with `dict.__init__`, which has the same semantics + @overload + def __init__(self, dict: None = None, /) -> None: ... + @overload + def __init__( + self: UserDict[str, _VT], dict: None = None, /, **kwargs: _VT # pyright: ignore[reportInvalidTypeVarUse] #11780 + ) -> None: ... + @overload + def __init__(self, dict: SupportsKeysAndGetItem[_KT, _VT], /) -> None: ... + @overload + def __init__( + self: UserDict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780 + dict: SupportsKeysAndGetItem[str, _VT], + /, + **kwargs: _VT, + ) -> None: ... + @overload + def __init__(self, iterable: Iterable[tuple[_KT, _VT]], /) -> None: ... + @overload + def __init__( + self: UserDict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780 + iterable: Iterable[tuple[str, _VT]], + /, + **kwargs: _VT, + ) -> None: ... + @overload + def __init__(self: UserDict[str, str], iterable: Iterable[list[str]], /) -> None: ... + @overload + def __init__(self: UserDict[bytes, bytes], iterable: Iterable[list[bytes]], /) -> None: ... + def __len__(self) -> int: ... + def __getitem__(self, key: _KT) -> _VT: ... + def __setitem__(self, key: _KT, item: _VT) -> None: ... + def __delitem__(self, key: _KT) -> None: ... + def __iter__(self) -> Iterator[_KT]: ... + def __contains__(self, key: object) -> bool: ... + def copy(self) -> Self: ... + def __copy__(self) -> Self: ... + + # `UserDict.fromkeys` has the same semantics as `dict.fromkeys`, so should be kept in line with `dict.fromkeys`. + # TODO: Much like `dict.fromkeys`, the true signature of `UserDict.fromkeys` is inexpressible in the current type system. + # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], value: None = None) -> UserDict[_T, Any | None]: ... + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], value: _S) -> UserDict[_T, _S]: ... + @overload + def __or__(self, other: UserDict[_KT, _VT] | dict[_KT, _VT]) -> Self: ... + @overload + def __or__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ... + @overload + def __ror__(self, other: UserDict[_KT, _VT] | dict[_KT, _VT]) -> Self: ... + @overload + def __ror__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ... + # UserDict.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + @overload + def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... + if sys.version_info >= (3, 12): + @overload + def get(self, key: _KT, default: None = None) -> _VT | None: ... + @overload + def get(self, key: _KT, default: _VT) -> _VT: ... + @overload + def get(self, key: _KT, default: _T) -> _VT | _T: ... + +class UserList(MutableSequence[_T]): + data: list[_T] + @overload + def __init__(self, initlist: None = None) -> None: ... + @overload + def __init__(self, initlist: Iterable[_T]) -> None: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __lt__(self, other: list[_T] | UserList[_T]) -> bool: ... + def __le__(self, other: list[_T] | UserList[_T]) -> bool: ... + def __gt__(self, other: list[_T] | UserList[_T]) -> bool: ... + def __ge__(self, other: list[_T] | UserList[_T]) -> bool: ... + def __eq__(self, other: object) -> bool: ... + def __contains__(self, item: object) -> bool: ... + def __len__(self) -> int: ... + @overload + def __getitem__(self, i: SupportsIndex) -> _T: ... + @overload + def __getitem__(self, i: slice) -> Self: ... + @overload + def __setitem__(self, i: SupportsIndex, item: _T) -> None: ... + @overload + def __setitem__(self, i: slice, item: Iterable[_T]) -> None: ... + def __delitem__(self, i: SupportsIndex | slice) -> None: ... + def __add__(self, other: Iterable[_T]) -> Self: ... + def __radd__(self, other: Iterable[_T]) -> Self: ... + def __iadd__(self, other: Iterable[_T]) -> Self: ... + def __mul__(self, n: int) -> Self: ... + def __rmul__(self, n: int) -> Self: ... + def __imul__(self, n: int) -> Self: ... + def append(self, item: _T) -> None: ... + def insert(self, i: int, item: _T) -> None: ... + def pop(self, i: int = -1) -> _T: ... + def remove(self, item: _T) -> None: ... + def copy(self) -> Self: ... + def __copy__(self) -> Self: ... + def count(self, item: _T) -> int: ... + # The runtime signature is "item, *args", and the arguments are then passed + # to `list.index`. In order to give more precise types, we pretend that the + # `item` argument is positional-only. + def index(self, item: _T, start: SupportsIndex = 0, stop: SupportsIndex = sys.maxsize, /) -> int: ... + # All arguments are passed to `list.sort` at runtime, so the signature should be kept in line with `list.sort`. + @overload + def sort(self: UserList[SupportsRichComparisonT], *, key: None = None, reverse: bool = False) -> None: ... + @overload + def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False) -> None: ... + def extend(self, other: Iterable[_T]) -> None: ... + +class UserString(Sequence[UserString]): + data: str + def __init__(self, seq: object) -> None: ... + def __int__(self) -> int: ... + def __float__(self) -> float: ... + def __complex__(self) -> complex: ... + def __getnewargs__(self) -> tuple[str]: ... + def __lt__(self, string: str | UserString) -> bool: ... + def __le__(self, string: str | UserString) -> bool: ... + def __gt__(self, string: str | UserString) -> bool: ... + def __ge__(self, string: str | UserString) -> bool: ... + def __eq__(self, string: object) -> bool: ... + def __hash__(self) -> int: ... + def __contains__(self, char: object) -> bool: ... + def __len__(self) -> int: ... + def __getitem__(self, index: SupportsIndex | slice) -> Self: ... + def __iter__(self) -> Iterator[Self]: ... + def __reversed__(self) -> Iterator[Self]: ... + def __add__(self, other: object) -> Self: ... + def __radd__(self, other: object) -> Self: ... + def __mul__(self, n: int) -> Self: ... + def __rmul__(self, n: int) -> Self: ... + def __mod__(self, args: Any) -> Self: ... + def __rmod__(self, template: object) -> Self: ... + def capitalize(self) -> Self: ... + def casefold(self) -> Self: ... + def center(self, width: int, *args: Any) -> Self: ... + def count(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... + def encode(self: UserString, encoding: str | None = "utf-8", errors: str | None = "strict") -> bytes: ... + def endswith(self, suffix: str | tuple[str, ...], start: int | None = 0, end: int | None = sys.maxsize) -> bool: ... + def expandtabs(self, tabsize: int = 8) -> Self: ... + def find(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... + def format(self, *args: Any, **kwds: Any) -> str: ... + def format_map(self, mapping: Mapping[str, Any]) -> str: ... + def index(self, sub: str, start: int = 0, end: int = sys.maxsize) -> int: ... + def isalpha(self) -> bool: ... + def isalnum(self) -> bool: ... + def isdecimal(self) -> bool: ... + def isdigit(self) -> bool: ... + def isidentifier(self) -> bool: ... + def islower(self) -> bool: ... + def isnumeric(self) -> bool: ... + def isprintable(self) -> bool: ... + def isspace(self) -> bool: ... + def istitle(self) -> bool: ... + def isupper(self) -> bool: ... + def isascii(self) -> bool: ... + def join(self, seq: Iterable[str]) -> str: ... + def ljust(self, width: int, *args: Any) -> Self: ... + def lower(self) -> Self: ... + def lstrip(self, chars: str | None = None) -> Self: ... + maketrans = str.maketrans + def partition(self, sep: str) -> tuple[str, str, str]: ... + def removeprefix(self, prefix: str | UserString, /) -> Self: ... + def removesuffix(self, suffix: str | UserString, /) -> Self: ... + def replace(self, old: str | UserString, new: str | UserString, maxsplit: int = -1) -> Self: ... + def rfind(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... + def rindex(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... + def rjust(self, width: int, *args: Any) -> Self: ... + def rpartition(self, sep: str) -> tuple[str, str, str]: ... + def rstrip(self, chars: str | None = None) -> Self: ... + def split(self, sep: str | None = None, maxsplit: int = -1) -> list[str]: ... + def rsplit(self, sep: str | None = None, maxsplit: int = -1) -> list[str]: ... + def splitlines(self, keepends: bool = False) -> list[str]: ... + def startswith(self, prefix: str | tuple[str, ...], start: int | None = 0, end: int | None = sys.maxsize) -> bool: ... + def strip(self, chars: str | None = None) -> Self: ... + def swapcase(self) -> Self: ... + def title(self) -> Self: ... + def translate(self, *args: Any) -> Self: ... + def upper(self) -> Self: ... + def zfill(self, width: int) -> Self: ... + +@disjoint_base +class deque(MutableSequence[_T]): + @property + def maxlen(self) -> int | None: ... + @overload + def __init__(self, *, maxlen: int | None = None) -> None: ... + @overload + def __init__(self, iterable: Iterable[_T], maxlen: int | None = None) -> None: ... + def append(self, x: _T, /) -> None: ... + def appendleft(self, x: _T, /) -> None: ... + def copy(self) -> Self: ... + def count(self, x: _T, /) -> int: ... + def extend(self, iterable: Iterable[_T], /) -> None: ... + def extendleft(self, iterable: Iterable[_T], /) -> None: ... + def insert(self, i: int, x: _T, /) -> None: ... + def index(self, x: _T, start: int = 0, stop: int = ..., /) -> int: ... + def pop(self) -> _T: ... # type: ignore[override] + def popleft(self) -> _T: ... + def remove(self, value: _T, /) -> None: ... + def rotate(self, n: int = 1, /) -> None: ... + def __copy__(self) -> Self: ... + def __len__(self) -> int: ... + __hash__: ClassVar[None] # type: ignore[assignment] + # These methods of deque don't take slices, unlike MutableSequence, hence the type: ignores + def __getitem__(self, key: SupportsIndex, /) -> _T: ... # type: ignore[override] + def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: ... # type: ignore[override] + def __delitem__(self, key: SupportsIndex, /) -> None: ... # type: ignore[override] + def __contains__(self, key: object, /) -> bool: ... + def __reduce__(self) -> tuple[type[Self], tuple[()], None, Iterator[_T]]: ... + def __iadd__(self, value: Iterable[_T], /) -> Self: ... + def __add__(self, value: Self, /) -> Self: ... + def __mul__(self, value: int, /) -> Self: ... + def __imul__(self, value: int, /) -> Self: ... + def __lt__(self, value: deque[_T], /) -> bool: ... + def __le__(self, value: deque[_T], /) -> bool: ... + def __gt__(self, value: deque[_T], /) -> bool: ... + def __ge__(self, value: deque[_T], /) -> bool: ... + def __eq__(self, value: object, /) -> bool: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +class Counter(dict[_T, int], Generic[_T]): + @overload + def __init__(self, iterable: None = None, /) -> None: ... + @overload + def __init__(self: Counter[str], iterable: None = None, /, **kwargs: int) -> None: ... + @overload + def __init__(self, mapping: SupportsKeysAndGetItem[_T, int], /) -> None: ... + @overload + def __init__(self, iterable: Iterable[_T], /) -> None: ... + def copy(self) -> Self: ... + def elements(self) -> Iterator[_T]: ... + def most_common(self, n: int | None = None) -> list[tuple[_T, int]]: ... + @classmethod + def fromkeys(cls, iterable: Any, v: int | None = None) -> NoReturn: ... # type: ignore[override] + @overload + def subtract(self, iterable: None = None, /) -> None: ... + @overload + def subtract(self, mapping: Mapping[_T, int], /) -> None: ... + @overload + def subtract(self, iterable: Iterable[_T], /) -> None: ... + # Unlike dict.update(), use Mapping instead of SupportsKeysAndGetItem for the first overload + # (source code does an `isinstance(other, Mapping)` check) + # + # The second overload is also deliberately different to dict.update() + # (if it were `Iterable[_T] | Iterable[tuple[_T, int]]`, + # the tuples would be added as keys, breaking type safety) + @overload # type: ignore[override] + def update(self, m: Mapping[_T, int], /, **kwargs: int) -> None: ... + @overload + def update(self, iterable: Iterable[_T], /, **kwargs: int) -> None: ... + @overload + def update(self, iterable: None = None, /, **kwargs: int) -> None: ... + def __missing__(self, key: _T) -> int: ... + def __delitem__(self, elem: object) -> None: ... + if sys.version_info >= (3, 10): + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + + def __add__(self, other: Counter[_S]) -> Counter[_T | _S]: ... + def __sub__(self, other: Counter[_T]) -> Counter[_T]: ... + def __and__(self, other: Counter[_T]) -> Counter[_T]: ... + def __or__(self, other: Counter[_S]) -> Counter[_T | _S]: ... # type: ignore[override] + def __pos__(self) -> Counter[_T]: ... + def __neg__(self) -> Counter[_T]: ... + # several type: ignores because __iadd__ is supposedly incompatible with __add__, etc. + def __iadd__(self, other: SupportsItems[_T, int]) -> Self: ... # type: ignore[misc] + def __isub__(self, other: SupportsItems[_T, int]) -> Self: ... + def __iand__(self, other: SupportsItems[_T, int]) -> Self: ... + def __ior__(self, other: SupportsItems[_T, int]) -> Self: ... # type: ignore[override,misc] + if sys.version_info >= (3, 10): + def total(self) -> int: ... + def __le__(self, other: Counter[Any]) -> bool: ... + def __lt__(self, other: Counter[Any]) -> bool: ... + def __ge__(self, other: Counter[Any]) -> bool: ... + def __gt__(self, other: Counter[Any]) -> bool: ... + +# The pure-Python implementations of the "views" classes +# These are exposed at runtime in `collections/__init__.py` +class _OrderedDictKeysView(KeysView[_KT_co]): + def __reversed__(self) -> Iterator[_KT_co]: ... + +class _OrderedDictItemsView(ItemsView[_KT_co, _VT_co]): + def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... + +class _OrderedDictValuesView(ValuesView[_VT_co]): + def __reversed__(self) -> Iterator[_VT_co]: ... + +# The C implementations of the "views" classes +# (At runtime, these are called `odict_keys`, `odict_items` and `odict_values`, +# but they are not exposed anywhere) +# pyright doesn't have a specific error code for subclassing error! +@final +@type_check_only +class _odict_keys(dict_keys[_KT_co, _VT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def __reversed__(self) -> Iterator[_KT_co]: ... + +@final +@type_check_only +class _odict_items(dict_items[_KT_co, _VT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... + +@final +@type_check_only +class _odict_values(dict_values[_KT_co, _VT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def __reversed__(self) -> Iterator[_VT_co]: ... + +@disjoint_base +class OrderedDict(dict[_KT, _VT]): + def popitem(self, last: bool = True) -> tuple[_KT, _VT]: ... + def move_to_end(self, key: _KT, last: bool = True) -> None: ... + def copy(self) -> Self: ... + def __reversed__(self) -> Iterator[_KT]: ... + def keys(self) -> _odict_keys[_KT, _VT]: ... + def items(self) -> _odict_items[_KT, _VT]: ... + def values(self) -> _odict_values[_KT, _VT]: ... + # The signature of OrderedDict.fromkeys should be kept in line with `dict.fromkeys`, modulo positional-only differences. + # Like dict.fromkeys, its true signature is not expressible in the current type system. + # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], value: None = None) -> OrderedDict[_T, Any | None]: ... + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], value: _S) -> OrderedDict[_T, _S]: ... + # Keep OrderedDict.setdefault in line with MutableMapping.setdefault, modulo positional-only differences. + @overload + def setdefault(self: OrderedDict[_KT, _T | None], key: _KT, default: None = None) -> _T | None: ... + @overload + def setdefault(self, key: _KT, default: _VT) -> _VT: ... + # Same as dict.pop, but accepts keyword arguments + @overload + def pop(self, key: _KT) -> _VT: ... + @overload + def pop(self, key: _KT, default: _VT) -> _VT: ... + @overload + def pop(self, key: _KT, default: _T) -> _VT | _T: ... + def __eq__(self, value: object, /) -> bool: ... + @overload + def __or__(self, value: dict[_KT, _VT], /) -> Self: ... + @overload + def __or__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]: ... + @overload + def __ror__(self, value: dict[_KT, _VT], /) -> Self: ... + @overload + def __ror__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc] + +@disjoint_base +class defaultdict(dict[_KT, _VT]): + default_factory: Callable[[], _VT] | None + @overload + def __init__(self) -> None: ... + @overload + def __init__(self: defaultdict[str, _VT], **kwargs: _VT) -> None: ... # pyright: ignore[reportInvalidTypeVarUse] #11780 + @overload + def __init__(self, default_factory: Callable[[], _VT] | None, /) -> None: ... + @overload + def __init__( + self: defaultdict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780 + default_factory: Callable[[], _VT] | None, + /, + **kwargs: _VT, + ) -> None: ... + @overload + def __init__(self, default_factory: Callable[[], _VT] | None, map: SupportsKeysAndGetItem[_KT, _VT], /) -> None: ... + @overload + def __init__( + self: defaultdict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780 + default_factory: Callable[[], _VT] | None, + map: SupportsKeysAndGetItem[str, _VT], + /, + **kwargs: _VT, + ) -> None: ... + @overload + def __init__(self, default_factory: Callable[[], _VT] | None, iterable: Iterable[tuple[_KT, _VT]], /) -> None: ... + @overload + def __init__( + self: defaultdict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780 + default_factory: Callable[[], _VT] | None, + iterable: Iterable[tuple[str, _VT]], + /, + **kwargs: _VT, + ) -> None: ... + def __missing__(self, key: _KT, /) -> _VT: ... + def __copy__(self) -> Self: ... + def copy(self) -> Self: ... + @overload + def __or__(self, value: dict[_KT, _VT], /) -> Self: ... + @overload + def __or__(self, value: dict[_T1, _T2], /) -> defaultdict[_KT | _T1, _VT | _T2]: ... + @overload + def __ror__(self, value: dict[_KT, _VT], /) -> Self: ... + @overload + def __ror__(self, value: dict[_T1, _T2], /) -> defaultdict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc] + +class ChainMap(MutableMapping[_KT, _VT]): + maps: list[MutableMapping[_KT, _VT]] + def __init__(self, *maps: MutableMapping[_KT, _VT]) -> None: ... + def new_child(self, m: MutableMapping[_KT, _VT] | None = None) -> Self: ... + @property + def parents(self) -> Self: ... + def __setitem__(self, key: _KT, value: _VT) -> None: ... + def __delitem__(self, key: _KT) -> None: ... + def __getitem__(self, key: _KT) -> _VT: ... + def __iter__(self) -> Iterator[_KT]: ... + def __len__(self) -> int: ... + def __contains__(self, key: object) -> bool: ... + @overload + def get(self, key: _KT, default: None = None) -> _VT | None: ... + @overload + def get(self, key: _KT, default: _VT) -> _VT: ... + @overload + def get(self, key: _KT, default: _T) -> _VT | _T: ... + def __missing__(self, key: _KT) -> _VT: ... # undocumented + def __bool__(self) -> bool: ... + # Keep ChainMap.setdefault in line with MutableMapping.setdefault, modulo positional-only differences. + @overload + def setdefault(self: ChainMap[_KT, _T | None], key: _KT, default: None = None) -> _T | None: ... + @overload + def setdefault(self, key: _KT, default: _VT) -> _VT: ... + @overload + def pop(self, key: _KT) -> _VT: ... + @overload + def pop(self, key: _KT, default: _VT) -> _VT: ... + @overload + def pop(self, key: _KT, default: _T) -> _VT | _T: ... + def copy(self) -> Self: ... + __copy__ = copy + # All arguments to `fromkeys` are passed to `dict.fromkeys` at runtime, + # so the signature should be kept in line with `dict.fromkeys`. + if sys.version_info >= (3, 13): + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], /) -> ChainMap[_T, Any | None]: ... + else: + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T]) -> ChainMap[_T, Any | None]: ... + + @classmethod + @overload + # Special-case None: the user probably wants to add non-None values later. + def fromkeys(cls, iterable: Iterable[_T], value: None, /) -> ChainMap[_T, Any | None]: ... + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], value: _S, /) -> ChainMap[_T, _S]: ... + @overload + def __or__(self, other: Mapping[_KT, _VT]) -> Self: ... + @overload + def __or__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ... + @overload + def __ror__(self, other: Mapping[_KT, _VT]) -> Self: ... + @overload + def __ror__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ... + # ChainMap.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + @overload + def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/collections/abc.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/collections/abc.pyi new file mode 100644 index 0000000..3df2a1d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/collections/abc.pyi @@ -0,0 +1,2 @@ +from _collections_abc import * +from _collections_abc import __all__ as __all__ diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/colorsys.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/colorsys.pyi new file mode 100644 index 0000000..4afcb53 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/colorsys.pyi @@ -0,0 +1,15 @@ +from typing import Final + +__all__ = ["rgb_to_yiq", "yiq_to_rgb", "rgb_to_hls", "hls_to_rgb", "rgb_to_hsv", "hsv_to_rgb"] + +def rgb_to_yiq(r: float, g: float, b: float) -> tuple[float, float, float]: ... +def yiq_to_rgb(y: float, i: float, q: float) -> tuple[float, float, float]: ... +def rgb_to_hls(r: float, g: float, b: float) -> tuple[float, float, float]: ... +def hls_to_rgb(h: float, l: float, s: float) -> tuple[float, float, float]: ... +def rgb_to_hsv(r: float, g: float, b: float) -> tuple[float, float, float]: ... +def hsv_to_rgb(h: float, s: float, v: float) -> tuple[float, float, float]: ... + +# TODO: undocumented +ONE_SIXTH: Final[float] +ONE_THIRD: Final[float] +TWO_THIRD: Final[float] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compileall.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compileall.pyi new file mode 100644 index 0000000..8972d50 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compileall.pyi @@ -0,0 +1,88 @@ +import sys +from _typeshed import StrPath +from py_compile import PycInvalidationMode +from typing import Any, Protocol, type_check_only + +__all__ = ["compile_dir", "compile_file", "compile_path"] + +@type_check_only +class _SupportsSearch(Protocol): + def search(self, string: str, /) -> Any: ... + +if sys.version_info >= (3, 10): + def compile_dir( + dir: StrPath, + maxlevels: int | None = None, + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + workers: int = 1, + invalidation_mode: PycInvalidationMode | None = None, + *, + stripdir: StrPath | None = None, + prependdir: StrPath | None = None, + limit_sl_dest: StrPath | None = None, + hardlink_dupes: bool = False, + ) -> bool: ... + def compile_file( + fullname: StrPath, + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, + *, + stripdir: StrPath | None = None, + prependdir: StrPath | None = None, + limit_sl_dest: StrPath | None = None, + hardlink_dupes: bool = False, + ) -> bool: ... + +else: + def compile_dir( + dir: StrPath, + maxlevels: int | None = None, + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + workers: int = 1, + invalidation_mode: PycInvalidationMode | None = None, + *, + stripdir: str | None = None, # https://bugs.python.org/issue40447 + prependdir: StrPath | None = None, + limit_sl_dest: StrPath | None = None, + hardlink_dupes: bool = False, + ) -> bool: ... + def compile_file( + fullname: StrPath, + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, + *, + stripdir: str | None = None, # https://bugs.python.org/issue40447 + prependdir: StrPath | None = None, + limit_sl_dest: StrPath | None = None, + hardlink_dupes: bool = False, + ) -> bool: ... + +def compile_path( + skip_curdir: bool = ..., + maxlevels: int = 0, + force: bool = False, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, +) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/__init__.pyi new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/_common/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/_common/__init__.pyi new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/_common/_streams.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/_common/_streams.pyi new file mode 100644 index 0000000..96aec24 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/_common/_streams.pyi @@ -0,0 +1,37 @@ +from _typeshed import ReadableBuffer, WriteableBuffer +from collections.abc import Callable +from io import DEFAULT_BUFFER_SIZE, BufferedIOBase, RawIOBase +from typing import Any, Protocol, type_check_only + +BUFFER_SIZE = DEFAULT_BUFFER_SIZE + +@type_check_only +class _Reader(Protocol): + def read(self, n: int, /) -> bytes: ... + def seekable(self) -> bool: ... + def seek(self, n: int, /) -> Any: ... + +@type_check_only +class _Decompressor(Protocol): + def decompress(self, data: ReadableBuffer, /, max_length: int = ...) -> bytes: ... + @property + def unused_data(self) -> bytes: ... + @property + def eof(self) -> bool: ... + # `zlib._Decompress` does not have next property, but `DecompressReader` calls it: + # @property + # def needs_input(self) -> bool: ... + +class BaseStream(BufferedIOBase): ... + +class DecompressReader(RawIOBase): + def __init__( + self, + fp: _Reader, + decomp_factory: Callable[..., _Decompressor], # Consider backporting changes to _compression + trailing_error: type[Exception] | tuple[type[Exception], ...] = (), + **decomp_args: Any, # These are passed to decomp_factory. + ) -> None: ... + def readinto(self, b: WriteableBuffer) -> int: ... + def read(self, size: int = -1) -> bytes: ... + def seek(self, offset: int, whence: int = 0) -> int: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/bz2.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/bz2.pyi new file mode 100644 index 0000000..9ddc39f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/bz2.pyi @@ -0,0 +1 @@ +from bz2 import * diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/gzip.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/gzip.pyi new file mode 100644 index 0000000..9422a73 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/gzip.pyi @@ -0,0 +1 @@ +from gzip import * diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/lzma.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/lzma.pyi new file mode 100644 index 0000000..936c381 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/lzma.pyi @@ -0,0 +1 @@ +from lzma import * diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/zlib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/zlib.pyi new file mode 100644 index 0000000..78d176c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/zlib.pyi @@ -0,0 +1 @@ +from zlib import * diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/zstd/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/zstd/__init__.pyi new file mode 100644 index 0000000..d5da4be --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/zstd/__init__.pyi @@ -0,0 +1,88 @@ +import enum +from _typeshed import ReadableBuffer +from collections.abc import Iterable, Mapping +from compression.zstd._zstdfile import ZstdFile, open +from typing import Final, final + +import _zstd +from _zstd import ZstdCompressor, ZstdDecompressor, ZstdDict, ZstdError, get_frame_size, zstd_version + +__all__ = ( + # compression.zstd + "COMPRESSION_LEVEL_DEFAULT", + "compress", + "CompressionParameter", + "decompress", + "DecompressionParameter", + "finalize_dict", + "get_frame_info", + "Strategy", + "train_dict", + # compression.zstd._zstdfile + "open", + "ZstdFile", + # _zstd + "get_frame_size", + "zstd_version", + "zstd_version_info", + "ZstdCompressor", + "ZstdDecompressor", + "ZstdDict", + "ZstdError", +) + +zstd_version_info: Final[tuple[int, int, int]] +COMPRESSION_LEVEL_DEFAULT: Final = _zstd.ZSTD_CLEVEL_DEFAULT + +class FrameInfo: + __slots__ = ("decompressed_size", "dictionary_id") + decompressed_size: int + dictionary_id: int + def __init__(self, decompressed_size: int, dictionary_id: int) -> None: ... + +def get_frame_info(frame_buffer: ReadableBuffer) -> FrameInfo: ... +def train_dict(samples: Iterable[ReadableBuffer], dict_size: int) -> ZstdDict: ... +def finalize_dict(zstd_dict: ZstdDict, /, samples: Iterable[ReadableBuffer], dict_size: int, level: int) -> ZstdDict: ... +def compress( + data: ReadableBuffer, level: int | None = None, options: Mapping[int, int] | None = None, zstd_dict: ZstdDict | None = None +) -> bytes: ... +def decompress(data: ReadableBuffer, zstd_dict: ZstdDict | None = None, options: Mapping[int, int] | None = None) -> bytes: ... +@final +class CompressionParameter(enum.IntEnum): + compression_level = _zstd.ZSTD_c_compressionLevel + window_log = _zstd.ZSTD_c_windowLog + hash_log = _zstd.ZSTD_c_hashLog + chain_log = _zstd.ZSTD_c_chainLog + search_log = _zstd.ZSTD_c_searchLog + min_match = _zstd.ZSTD_c_minMatch + target_length = _zstd.ZSTD_c_targetLength + strategy = _zstd.ZSTD_c_strategy + enable_long_distance_matching = _zstd.ZSTD_c_enableLongDistanceMatching + ldm_hash_log = _zstd.ZSTD_c_ldmHashLog + ldm_min_match = _zstd.ZSTD_c_ldmMinMatch + ldm_bucket_size_log = _zstd.ZSTD_c_ldmBucketSizeLog + ldm_hash_rate_log = _zstd.ZSTD_c_ldmHashRateLog + content_size_flag = _zstd.ZSTD_c_contentSizeFlag + checksum_flag = _zstd.ZSTD_c_checksumFlag + dict_id_flag = _zstd.ZSTD_c_dictIDFlag + nb_workers = _zstd.ZSTD_c_nbWorkers + job_size = _zstd.ZSTD_c_jobSize + overlap_log = _zstd.ZSTD_c_overlapLog + def bounds(self) -> tuple[int, int]: ... + +@final +class DecompressionParameter(enum.IntEnum): + window_log_max = _zstd.ZSTD_d_windowLogMax + def bounds(self) -> tuple[int, int]: ... + +@final +class Strategy(enum.IntEnum): + fast = _zstd.ZSTD_fast + dfast = _zstd.ZSTD_dfast + greedy = _zstd.ZSTD_greedy + lazy = _zstd.ZSTD_lazy + lazy2 = _zstd.ZSTD_lazy2 + btlazy2 = _zstd.ZSTD_btlazy2 + btopt = _zstd.ZSTD_btopt + btultra = _zstd.ZSTD_btultra + btultra2 = _zstd.ZSTD_btultra2 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/zstd/_zstdfile.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/zstd/_zstdfile.pyi new file mode 100644 index 0000000..e67b3d9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/compression/zstd/_zstdfile.pyi @@ -0,0 +1,117 @@ +from _typeshed import ReadableBuffer, StrOrBytesPath, SupportsWrite, WriteableBuffer +from collections.abc import Mapping +from compression._common import _streams +from compression.zstd import ZstdDict +from io import TextIOWrapper, _WrappedBuffer +from typing import Literal, Protocol, overload, type_check_only +from typing_extensions import TypeAlias + +from _zstd import ZstdCompressor, _ZstdCompressorFlushBlock, _ZstdCompressorFlushFrame + +__all__ = ("ZstdFile", "open") + +_ReadBinaryMode: TypeAlias = Literal["r", "rb"] +_WriteBinaryMode: TypeAlias = Literal["w", "wb", "x", "xb", "a", "ab"] +_ReadTextMode: TypeAlias = Literal["rt"] +_WriteTextMode: TypeAlias = Literal["wt", "xt", "at"] + +@type_check_only +class _FileBinaryRead(_streams._Reader, Protocol): + def close(self) -> None: ... + +@type_check_only +class _FileBinaryWrite(SupportsWrite[bytes], Protocol): + def close(self) -> None: ... + +class ZstdFile(_streams.BaseStream): + FLUSH_BLOCK = ZstdCompressor.FLUSH_BLOCK + FLUSH_FRAME = ZstdCompressor.FLUSH_FRAME + + @overload + def __init__( + self, + file: StrOrBytesPath | _FileBinaryRead, + /, + mode: _ReadBinaryMode = "r", + *, + level: None = None, + options: Mapping[int, int] | None = None, + zstd_dict: ZstdDict | None = None, + ) -> None: ... + @overload + def __init__( + self, + file: StrOrBytesPath | _FileBinaryWrite, + /, + mode: _WriteBinaryMode, + *, + level: int | None = None, + options: Mapping[int, int] | None = None, + zstd_dict: ZstdDict | None = None, + ) -> None: ... + def write(self, data: ReadableBuffer, /) -> int: ... + def flush(self, mode: _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame = 1) -> bytes: ... # type: ignore[override] + def read(self, size: int | None = -1) -> bytes: ... + def read1(self, size: int | None = -1) -> bytes: ... + def readinto(self, b: WriteableBuffer) -> int: ... + def readinto1(self, b: WriteableBuffer) -> int: ... + def readline(self, size: int | None = -1) -> bytes: ... + def seek(self, offset: int, whence: int = 0) -> int: ... + def peek(self, size: int = -1) -> bytes: ... + @property + def name(self) -> str | bytes: ... + @property + def mode(self) -> Literal["rb", "wb"]: ... + +@overload +def open( + file: StrOrBytesPath | _FileBinaryRead, + /, + mode: _ReadBinaryMode = "rb", + *, + level: None = None, + options: Mapping[int, int] | None = None, + zstd_dict: ZstdDict | None = None, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> ZstdFile: ... +@overload +def open( + file: StrOrBytesPath | _FileBinaryWrite, + /, + mode: _WriteBinaryMode, + *, + level: int | None = None, + options: Mapping[int, int] | None = None, + zstd_dict: ZstdDict | None = None, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> ZstdFile: ... +@overload +def open( + file: StrOrBytesPath | _WrappedBuffer, + /, + mode: _ReadTextMode, + *, + level: None = None, + options: Mapping[int, int] | None = None, + zstd_dict: ZstdDict | None = None, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> TextIOWrapper: ... +@overload +def open( + file: StrOrBytesPath | _WrappedBuffer, + /, + mode: _WriteTextMode, + *, + level: int | None = None, + options: Mapping[int, int] | None = None, + zstd_dict: ZstdDict | None = None, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> TextIOWrapper: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/__init__.pyi new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/futures/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/futures/__init__.pyi new file mode 100644 index 0000000..ad4d20e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/futures/__init__.pyi @@ -0,0 +1,71 @@ +import sys + +from ._base import ( + ALL_COMPLETED as ALL_COMPLETED, + FIRST_COMPLETED as FIRST_COMPLETED, + FIRST_EXCEPTION as FIRST_EXCEPTION, + BrokenExecutor as BrokenExecutor, + CancelledError as CancelledError, + Executor as Executor, + Future as Future, + InvalidStateError as InvalidStateError, + TimeoutError as TimeoutError, + as_completed as as_completed, + wait as wait, +) +from .process import ProcessPoolExecutor as ProcessPoolExecutor +from .thread import ThreadPoolExecutor as ThreadPoolExecutor + +if sys.version_info >= (3, 14): + from .interpreter import InterpreterPoolExecutor as InterpreterPoolExecutor + + __all__ = [ + "FIRST_COMPLETED", + "FIRST_EXCEPTION", + "ALL_COMPLETED", + "CancelledError", + "TimeoutError", + "InvalidStateError", + "BrokenExecutor", + "Future", + "Executor", + "wait", + "as_completed", + "ProcessPoolExecutor", + "ThreadPoolExecutor", + "InterpreterPoolExecutor", + ] + +elif sys.version_info >= (3, 13): + __all__ = ( + "FIRST_COMPLETED", + "FIRST_EXCEPTION", + "ALL_COMPLETED", + "CancelledError", + "TimeoutError", + "InvalidStateError", + "BrokenExecutor", + "Future", + "Executor", + "wait", + "as_completed", + "ProcessPoolExecutor", + "ThreadPoolExecutor", + ) +else: + __all__ = ( + "FIRST_COMPLETED", + "FIRST_EXCEPTION", + "ALL_COMPLETED", + "CancelledError", + "TimeoutError", + "BrokenExecutor", + "Future", + "Executor", + "wait", + "as_completed", + "ProcessPoolExecutor", + "ThreadPoolExecutor", + ) + +def __dir__() -> tuple[str, ...]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/futures/_base.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/futures/_base.pyi new file mode 100644 index 0000000..be48a6e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/futures/_base.pyi @@ -0,0 +1,119 @@ +import sys +import threading +from _typeshed import Unused +from collections.abc import Callable, Iterable, Iterator +from logging import Logger +from types import GenericAlias, TracebackType +from typing import Any, Final, Generic, NamedTuple, Protocol, TypeVar, type_check_only +from typing_extensions import ParamSpec, Self + +FIRST_COMPLETED: Final = "FIRST_COMPLETED" +FIRST_EXCEPTION: Final = "FIRST_EXCEPTION" +ALL_COMPLETED: Final = "ALL_COMPLETED" +PENDING: Final = "PENDING" +RUNNING: Final = "RUNNING" +CANCELLED: Final = "CANCELLED" +CANCELLED_AND_NOTIFIED: Final = "CANCELLED_AND_NOTIFIED" +FINISHED: Final = "FINISHED" +_STATE_TO_DESCRIPTION_MAP: Final[dict[str, str]] +LOGGER: Logger + +class Error(Exception): ... +class CancelledError(Error): ... + +if sys.version_info >= (3, 11): + from builtins import TimeoutError as TimeoutError +else: + class TimeoutError(Error): ... + +class InvalidStateError(Error): ... +class BrokenExecutor(RuntimeError): ... + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) +_P = ParamSpec("_P") + +class Future(Generic[_T]): + _condition: threading.Condition + _state: str + _result: _T | None + _exception: BaseException | None + _waiters: list[_Waiter] + def cancel(self) -> bool: ... + def cancelled(self) -> bool: ... + def running(self) -> bool: ... + def done(self) -> bool: ... + def add_done_callback(self, fn: Callable[[Future[_T]], object]) -> None: ... + def result(self, timeout: float | None = None) -> _T: ... + def set_running_or_notify_cancel(self) -> bool: ... + def set_result(self, result: _T) -> None: ... + def exception(self, timeout: float | None = None) -> BaseException | None: ... + def set_exception(self, exception: BaseException | None) -> None: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +class Executor: + def submit(self, fn: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> Future[_T]: ... + if sys.version_info >= (3, 14): + def map( + self, + fn: Callable[..., _T], + *iterables: Iterable[Any], + timeout: float | None = None, + chunksize: int = 1, + buffersize: int | None = None, + ) -> Iterator[_T]: ... + else: + def map( + self, fn: Callable[..., _T], *iterables: Iterable[Any], timeout: float | None = None, chunksize: int = 1 + ) -> Iterator[_T]: ... + + def shutdown(self, wait: bool = True, *, cancel_futures: bool = False) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> bool | None: ... + +@type_check_only +class _AsCompletedFuture(Protocol[_T_co]): + # as_completed only mutates non-generic aspects of passed Futures and does not do any nominal + # checks. Therefore, we can use a Protocol here to allow as_completed to act covariantly. + # See the tests for concurrent.futures + _condition: threading.Condition + _state: str + _waiters: list[_Waiter] + # Not used by as_completed, but needed to propagate the generic type + def result(self, timeout: float | None = None) -> _T_co: ... + +def as_completed(fs: Iterable[_AsCompletedFuture[_T]], timeout: float | None = None) -> Iterator[Future[_T]]: ... + +class DoneAndNotDoneFutures(NamedTuple, Generic[_T]): + done: set[Future[_T]] + not_done: set[Future[_T]] + +def wait( + fs: Iterable[Future[_T]], timeout: float | None = None, return_when: str = "ALL_COMPLETED" +) -> DoneAndNotDoneFutures[_T]: ... + +class _Waiter: + event: threading.Event + finished_futures: list[Future[Any]] + def add_result(self, future: Future[Any]) -> None: ... + def add_exception(self, future: Future[Any]) -> None: ... + def add_cancelled(self, future: Future[Any]) -> None: ... + +class _AsCompletedWaiter(_Waiter): + lock: threading.Lock + +class _FirstCompletedWaiter(_Waiter): ... + +class _AllCompletedWaiter(_Waiter): + num_pending_calls: int + stop_on_exception: bool + lock: threading.Lock + def __init__(self, num_pending_calls: int, stop_on_exception: bool) -> None: ... + +class _AcquireFutures: + futures: Iterable[Future[Any]] + def __init__(self, futures: Iterable[Future[Any]]) -> None: ... + def __enter__(self) -> None: ... + def __exit__(self, *args: Unused) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/futures/interpreter.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/futures/interpreter.pyi new file mode 100644 index 0000000..e101022 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/futures/interpreter.pyi @@ -0,0 +1,79 @@ +import sys +from collections.abc import Callable +from concurrent.futures import ThreadPoolExecutor +from typing import Any, Literal, Protocol, overload, type_check_only +from typing_extensions import ParamSpec, Self, TypeAlias, TypeVar, TypeVarTuple, Unpack + +_Task: TypeAlias = tuple[bytes, Literal["function", "script"]] +_Ts = TypeVarTuple("_Ts") +_P = ParamSpec("_P") +_R = TypeVar("_R") + +@type_check_only +class _TaskFunc(Protocol): + @overload + def __call__(self, fn: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs) -> tuple[bytes, Literal["function"]]: ... + @overload + def __call__(self, fn: str) -> tuple[bytes, Literal["script"]]: ... + +if sys.version_info >= (3, 14): + from concurrent.futures.thread import BrokenThreadPool, WorkerContext as ThreadWorkerContext + from concurrent.interpreters import Interpreter, Queue + + def do_call(results: Queue, func: Callable[..., _R], args: tuple[Any, ...], kwargs: dict[str, Any]) -> _R: ... + + class WorkerContext(ThreadWorkerContext): + interp: Interpreter | None + results: Queue | None + @overload # type: ignore[override] + @classmethod + def prepare( + cls, initializer: Callable[[Unpack[_Ts]], object], initargs: tuple[Unpack[_Ts]] + ) -> tuple[Callable[[], Self], _TaskFunc]: ... + @overload + @classmethod + def prepare(cls, initializer: Callable[[], object], initargs: tuple[()]) -> tuple[Callable[[], Self], _TaskFunc]: ... + def __init__(self, initdata: _Task) -> None: ... + def __del__(self) -> None: ... + def run(self, task: _Task) -> None: ... # type: ignore[override] + + class BrokenInterpreterPool(BrokenThreadPool): ... + + class InterpreterPoolExecutor(ThreadPoolExecutor): + BROKEN: type[BrokenInterpreterPool] + + @overload # type: ignore[override] + @classmethod + def prepare_context( + cls, initializer: Callable[[], object], initargs: tuple[()] + ) -> tuple[Callable[[], WorkerContext], _TaskFunc]: ... + @overload + @classmethod + def prepare_context( + cls, initializer: Callable[[Unpack[_Ts]], object], initargs: tuple[Unpack[_Ts]] + ) -> tuple[Callable[[], WorkerContext], _TaskFunc]: ... + @overload + def __init__( + self, + max_workers: int | None = None, + thread_name_prefix: str = "", + initializer: Callable[[], object] | None = None, + initargs: tuple[()] = (), + ) -> None: ... + @overload + def __init__( + self, + max_workers: int | None = None, + thread_name_prefix: str = "", + *, + initializer: Callable[[Unpack[_Ts]], object], + initargs: tuple[Unpack[_Ts]], + ) -> None: ... + @overload + def __init__( + self, + max_workers: int | None, + thread_name_prefix: str, + initializer: Callable[[Unpack[_Ts]], object], + initargs: tuple[Unpack[_Ts]], + ) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/futures/process.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/futures/process.pyi new file mode 100644 index 0000000..071b3ab --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/futures/process.pyi @@ -0,0 +1,242 @@ +import sys +from collections.abc import Callable, Generator, Iterable, Mapping, MutableMapping, MutableSequence +from multiprocessing.connection import Connection +from multiprocessing.context import BaseContext, Process +from multiprocessing.queues import Queue, SimpleQueue +from threading import Lock, Semaphore, Thread +from types import TracebackType +from typing import Any, Final, Generic, TypeVar, overload +from typing_extensions import TypeVarTuple, Unpack +from weakref import ref + +from ._base import BrokenExecutor, Executor, Future + +_T = TypeVar("_T") +_Ts = TypeVarTuple("_Ts") + +_threads_wakeups: MutableMapping[Any, Any] +_global_shutdown: bool + +class _ThreadWakeup: + _closed: bool + # Any: Unused send and recv methods + _reader: Connection[Any, Any] + _writer: Connection[Any, Any] + def close(self) -> None: ... + def wakeup(self) -> None: ... + def clear(self) -> None: ... + +def _python_exit() -> None: ... + +EXTRA_QUEUED_CALLS: Final = 1 + +_MAX_WINDOWS_WORKERS: Final = 61 + +class _RemoteTraceback(Exception): + tb: str + def __init__(self, tb: TracebackType) -> None: ... + +class _ExceptionWithTraceback: + exc: BaseException + tb: TracebackType + def __init__(self, exc: BaseException, tb: TracebackType) -> None: ... + def __reduce__(self) -> str | tuple[Any, ...]: ... + +def _rebuild_exc(exc: Exception, tb: str) -> Exception: ... + +class _WorkItem(Generic[_T]): + future: Future[_T] + fn: Callable[..., _T] + args: Iterable[Any] + kwargs: Mapping[str, Any] + def __init__(self, future: Future[_T], fn: Callable[..., _T], args: Iterable[Any], kwargs: Mapping[str, Any]) -> None: ... + +class _ResultItem: + work_id: int + exception: Exception + result: Any + if sys.version_info >= (3, 11): + exit_pid: int | None + def __init__( + self, work_id: int, exception: Exception | None = None, result: Any | None = None, exit_pid: int | None = None + ) -> None: ... + else: + def __init__(self, work_id: int, exception: Exception | None = None, result: Any | None = None) -> None: ... + +class _CallItem: + work_id: int + fn: Callable[..., Any] + args: Iterable[Any] + kwargs: Mapping[str, Any] + def __init__(self, work_id: int, fn: Callable[..., Any], args: Iterable[Any], kwargs: Mapping[str, Any]) -> None: ... + +class _SafeQueue(Queue[Future[Any]]): + pending_work_items: dict[int, _WorkItem[Any]] + if sys.version_info < (3, 12): + shutdown_lock: Lock + thread_wakeup: _ThreadWakeup + if sys.version_info >= (3, 12): + def __init__( + self, + max_size: int | None = 0, + *, + ctx: BaseContext, + pending_work_items: dict[int, _WorkItem[Any]], + thread_wakeup: _ThreadWakeup, + ) -> None: ... + else: + def __init__( + self, + max_size: int | None = 0, + *, + ctx: BaseContext, + pending_work_items: dict[int, _WorkItem[Any]], + shutdown_lock: Lock, + thread_wakeup: _ThreadWakeup, + ) -> None: ... + + def _on_queue_feeder_error(self, e: Exception, obj: _CallItem) -> None: ... + +def _get_chunks(*iterables: Any, chunksize: int) -> Generator[tuple[Any, ...], None, None]: ... +def _process_chunk(fn: Callable[..., _T], chunk: Iterable[tuple[Any, ...]]) -> list[_T]: ... + +if sys.version_info >= (3, 11): + def _sendback_result( + result_queue: SimpleQueue[_WorkItem[Any]], + work_id: int, + result: Any | None = None, + exception: Exception | None = None, + exit_pid: int | None = None, + ) -> None: ... + +else: + def _sendback_result( + result_queue: SimpleQueue[_WorkItem[Any]], work_id: int, result: Any | None = None, exception: Exception | None = None + ) -> None: ... + +if sys.version_info >= (3, 11): + def _process_worker( + call_queue: Queue[_CallItem], + result_queue: SimpleQueue[_ResultItem], + initializer: Callable[[Unpack[_Ts]], object] | None, + initargs: tuple[Unpack[_Ts]], + max_tasks: int | None = None, + ) -> None: ... + +else: + def _process_worker( + call_queue: Queue[_CallItem], + result_queue: SimpleQueue[_ResultItem], + initializer: Callable[[Unpack[_Ts]], object] | None, + initargs: tuple[Unpack[_Ts]], + ) -> None: ... + +class _ExecutorManagerThread(Thread): + thread_wakeup: _ThreadWakeup + shutdown_lock: Lock + executor_reference: ref[Any] + processes: MutableMapping[int, Process] + call_queue: Queue[_CallItem] + result_queue: SimpleQueue[_ResultItem] + work_ids_queue: Queue[int] + pending_work_items: dict[int, _WorkItem[Any]] + def __init__(self, executor: ProcessPoolExecutor) -> None: ... + def run(self) -> None: ... + def add_call_item_to_queue(self) -> None: ... + def wait_result_broken_or_wakeup(self) -> tuple[Any, bool, str]: ... + def process_result_item(self, result_item: int | _ResultItem) -> None: ... + def is_shutting_down(self) -> bool: ... + def terminate_broken(self, cause: str) -> None: ... + def flag_executor_shutting_down(self) -> None: ... + def shutdown_workers(self) -> None: ... + def join_executor_internals(self) -> None: ... + def get_n_children_alive(self) -> int: ... + +_system_limits_checked: bool +_system_limited: bool | None + +def _check_system_limits() -> None: ... +def _chain_from_iterable_of_lists(iterable: Iterable[MutableSequence[Any]]) -> Any: ... + +class BrokenProcessPool(BrokenExecutor): ... + +class ProcessPoolExecutor(Executor): + _mp_context: BaseContext | None + _initializer: Callable[..., None] | None + _initargs: tuple[Any, ...] + _executor_manager_thread: _ThreadWakeup + _processes: MutableMapping[int, Process] + _shutdown_thread: bool + _shutdown_lock: Lock + _idle_worker_semaphore: Semaphore + _broken: bool + _queue_count: int + _pending_work_items: dict[int, _WorkItem[Any]] + _cancel_pending_futures: bool + _executor_manager_thread_wakeup: _ThreadWakeup + _result_queue: SimpleQueue[Any] + _work_ids: Queue[Any] + if sys.version_info >= (3, 11): + @overload + def __init__( + self, + max_workers: int | None = None, + mp_context: BaseContext | None = None, + initializer: Callable[[], object] | None = None, + initargs: tuple[()] = (), + *, + max_tasks_per_child: int | None = None, + ) -> None: ... + @overload + def __init__( + self, + max_workers: int | None = None, + mp_context: BaseContext | None = None, + *, + initializer: Callable[[Unpack[_Ts]], object], + initargs: tuple[Unpack[_Ts]], + max_tasks_per_child: int | None = None, + ) -> None: ... + @overload + def __init__( + self, + max_workers: int | None, + mp_context: BaseContext | None, + initializer: Callable[[Unpack[_Ts]], object], + initargs: tuple[Unpack[_Ts]], + *, + max_tasks_per_child: int | None = None, + ) -> None: ... + else: + @overload + def __init__( + self, + max_workers: int | None = None, + mp_context: BaseContext | None = None, + initializer: Callable[[], object] | None = None, + initargs: tuple[()] = (), + ) -> None: ... + @overload + def __init__( + self, + max_workers: int | None = None, + mp_context: BaseContext | None = None, + *, + initializer: Callable[[Unpack[_Ts]], object], + initargs: tuple[Unpack[_Ts]], + ) -> None: ... + @overload + def __init__( + self, + max_workers: int | None, + mp_context: BaseContext | None, + initializer: Callable[[Unpack[_Ts]], object], + initargs: tuple[Unpack[_Ts]], + ) -> None: ... + + def _start_executor_manager_thread(self) -> None: ... + def _adjust_process_count(self) -> None: ... + + if sys.version_info >= (3, 14): + def kill_workers(self) -> None: ... + def terminate_workers(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/futures/thread.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/futures/thread.pyi new file mode 100644 index 0000000..50a6a9c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/futures/thread.pyi @@ -0,0 +1,140 @@ +import queue +import sys +from collections.abc import Callable, Iterable, Mapping, Set as AbstractSet +from threading import Lock, Semaphore, Thread +from types import GenericAlias +from typing import Any, Generic, Protocol, TypeVar, overload, type_check_only +from typing_extensions import Self, TypeAlias, TypeVarTuple, Unpack +from weakref import ref + +from ._base import BrokenExecutor, Executor, Future + +_Ts = TypeVarTuple("_Ts") + +_threads_queues: Mapping[Any, Any] +_shutdown: bool +_global_shutdown_lock: Lock + +def _python_exit() -> None: ... + +_S = TypeVar("_S") + +_Task: TypeAlias = tuple[Callable[..., Any], tuple[Any, ...], dict[str, Any]] + +_C = TypeVar("_C", bound=Callable[..., object]) +_KT = TypeVar("_KT", bound=str) +_VT = TypeVar("_VT") + +@type_check_only +class _ResolveTaskFunc(Protocol): + def __call__( + self, func: _C, args: tuple[Unpack[_Ts]], kwargs: dict[_KT, _VT] + ) -> tuple[_C, tuple[Unpack[_Ts]], dict[_KT, _VT]]: ... + +if sys.version_info >= (3, 14): + class WorkerContext: + @overload + @classmethod + def prepare( + cls, initializer: Callable[[Unpack[_Ts]], object], initargs: tuple[Unpack[_Ts]] + ) -> tuple[Callable[[], Self], _ResolveTaskFunc]: ... + @overload + @classmethod + def prepare( + cls, initializer: Callable[[], object], initargs: tuple[()] + ) -> tuple[Callable[[], Self], _ResolveTaskFunc]: ... + @overload + def __init__(self, initializer: Callable[[Unpack[_Ts]], object], initargs: tuple[Unpack[_Ts]]) -> None: ... + @overload + def __init__(self, initializer: Callable[[], object], initargs: tuple[()]) -> None: ... + def initialize(self) -> None: ... + def finalize(self) -> None: ... + def run(self, task: _Task) -> None: ... + +if sys.version_info >= (3, 14): + class _WorkItem(Generic[_S]): + future: Future[Any] + task: _Task + def __init__(self, future: Future[Any], task: _Task) -> None: ... + def run(self, ctx: WorkerContext) -> None: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + + def _worker(executor_reference: ref[Any], ctx: WorkerContext, work_queue: queue.SimpleQueue[Any]) -> None: ... + +else: + class _WorkItem(Generic[_S]): + future: Future[_S] + fn: Callable[..., _S] + args: Iterable[Any] + kwargs: Mapping[str, Any] + def __init__(self, future: Future[_S], fn: Callable[..., _S], args: Iterable[Any], kwargs: Mapping[str, Any]) -> None: ... + def run(self) -> None: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + + def _worker( + executor_reference: ref[Any], + work_queue: queue.SimpleQueue[Any], + initializer: Callable[[Unpack[_Ts]], object], + initargs: tuple[Unpack[_Ts]], + ) -> None: ... + +class BrokenThreadPool(BrokenExecutor): ... + +class ThreadPoolExecutor(Executor): + if sys.version_info >= (3, 14): + BROKEN: type[BrokenThreadPool] + + _max_workers: int + _idle_semaphore: Semaphore + _threads: AbstractSet[Thread] + _broken: bool + _shutdown: bool + _shutdown_lock: Lock + _thread_name_prefix: str | None + if sys.version_info >= (3, 14): + _create_worker_context: Callable[[], WorkerContext] + _resolve_work_item_task: _ResolveTaskFunc + else: + _initializer: Callable[..., None] | None + _initargs: tuple[Any, ...] + _work_queue: queue.SimpleQueue[_WorkItem[Any]] + + if sys.version_info >= (3, 14): + @overload + @classmethod + def prepare_context( + cls, initializer: Callable[[], object], initargs: tuple[()] + ) -> tuple[Callable[[], WorkerContext], _ResolveTaskFunc]: ... + @overload + @classmethod + def prepare_context( + cls, initializer: Callable[[Unpack[_Ts]], object], initargs: tuple[Unpack[_Ts]] + ) -> tuple[Callable[[], WorkerContext], _ResolveTaskFunc]: ... + + @overload + def __init__( + self, + max_workers: int | None = None, + thread_name_prefix: str = "", + initializer: Callable[[], object] | None = None, + initargs: tuple[()] = (), + ) -> None: ... + @overload + def __init__( + self, + max_workers: int | None = None, + thread_name_prefix: str = "", + *, + initializer: Callable[[Unpack[_Ts]], object], + initargs: tuple[Unpack[_Ts]], + ) -> None: ... + @overload + def __init__( + self, + max_workers: int | None, + thread_name_prefix: str, + initializer: Callable[[Unpack[_Ts]], object], + initargs: tuple[Unpack[_Ts]], + ) -> None: ... + def _adjust_thread_count(self) -> None: ... + def _initializer_failed(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/interpreters/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/interpreters/__init__.pyi new file mode 100644 index 0000000..3839e6b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/interpreters/__init__.pyi @@ -0,0 +1,68 @@ +import sys +import threading +import types +from collections.abc import Callable +from typing import Any, Literal, TypeVar +from typing_extensions import ParamSpec, Self + +if sys.version_info >= (3, 13): # needed to satisfy pyright checks for Python <3.13 + from _interpreters import ( + InterpreterError as InterpreterError, + InterpreterNotFoundError as InterpreterNotFoundError, + NotShareableError as NotShareableError, + _SharedDict, + _Whence, + is_shareable as is_shareable, + ) + + from ._queues import Queue as Queue, QueueEmpty as QueueEmpty, QueueFull as QueueFull, create as create_queue + + __all__ = [ + "ExecutionFailed", + "Interpreter", + "InterpreterError", + "InterpreterNotFoundError", + "NotShareableError", + "Queue", + "QueueEmpty", + "QueueFull", + "create", + "create_queue", + "get_current", + "get_main", + "is_shareable", + "list_all", + ] + + _R = TypeVar("_R") + _P = ParamSpec("_P") + + class ExecutionFailed(InterpreterError): + excinfo: types.SimpleNamespace + + def __init__(self, excinfo: types.SimpleNamespace) -> None: ... + + def create() -> Interpreter: ... + def list_all() -> list[Interpreter]: ... + def get_current() -> Interpreter: ... + def get_main() -> Interpreter: ... + + class Interpreter: + def __new__(cls, id: int, /, _whence: _Whence | None = None, _ownsref: bool | None = None) -> Self: ... + def __reduce__(self) -> tuple[type[Self], int]: ... + def __hash__(self) -> int: ... + def __del__(self) -> None: ... + @property + def id(self) -> int: ... + @property + def whence( + self, + ) -> Literal["unknown", "runtime init", "legacy C-API", "C-API", "cross-interpreter C-API", "_interpreters module"]: ... + def is_running(self) -> bool: ... + def close(self) -> None: ... + def prepare_main( + self, ns: _SharedDict | None = None, /, **kwargs: Any + ) -> None: ... # kwargs has same value restrictions as _SharedDict + def exec(self, code: str | types.CodeType | Callable[[], object], /) -> None: ... + def call(self, callable: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: ... + def call_in_thread(self, callable: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> threading.Thread: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/interpreters/_crossinterp.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/interpreters/_crossinterp.pyi new file mode 100644 index 0000000..7cf1ea3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/interpreters/_crossinterp.pyi @@ -0,0 +1,30 @@ +import sys +from collections.abc import Callable +from typing import Final, NewType +from typing_extensions import Never, Self, TypeAlias + +if sys.version_info >= (3, 13): # needed to satisfy pyright checks for Python <3.13 + from _interpqueues import _UnboundOp + + class ItemInterpreterDestroyed(Exception): ... + # Actually a descriptor that behaves similarly to classmethod but prevents + # access from instances. + classonly = classmethod + + class UnboundItem: + __slots__ = () + def __new__(cls) -> Never: ... + @classonly + def singleton(cls, kind: str, module: str, name: str = "UNBOUND") -> Self: ... + + # Sentinel types and alias that don't exist at runtime. + _UnboundErrorType = NewType("_UnboundErrorType", object) + _UnboundRemoveType = NewType("_UnboundRemoveType", object) + _AnyUnbound: TypeAlias = _UnboundErrorType | _UnboundRemoveType | UnboundItem + + UNBOUND_ERROR: Final[_UnboundErrorType] + UNBOUND_REMOVE: Final[_UnboundRemoveType] + UNBOUND: Final[UnboundItem] # analogous to UNBOUND_REPLACE in C + + def serialize_unbound(unbound: _AnyUnbound) -> tuple[_UnboundOp]: ... + def resolve_unbound(flag: _UnboundOp, exctype_destroyed: Callable[[str], BaseException]) -> UnboundItem: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/interpreters/_queues.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/interpreters/_queues.pyi new file mode 100644 index 0000000..bdf08d9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/concurrent/interpreters/_queues.pyi @@ -0,0 +1,74 @@ +import queue +import sys +from typing import Final, SupportsIndex +from typing_extensions import Self + +if sys.version_info >= (3, 13): # needed to satisfy pyright checks for Python <3.13 + from _interpqueues import QueueError as QueueError, QueueNotFoundError as QueueNotFoundError + + from . import _crossinterp + from ._crossinterp import UNBOUND_ERROR as UNBOUND_ERROR, UNBOUND_REMOVE as UNBOUND_REMOVE, UnboundItem, _AnyUnbound + + __all__ = [ + "UNBOUND", + "UNBOUND_ERROR", + "UNBOUND_REMOVE", + "ItemInterpreterDestroyed", + "Queue", + "QueueEmpty", + "QueueError", + "QueueFull", + "QueueNotFoundError", + "create", + "list_all", + ] + + class QueueEmpty(QueueError, queue.Empty): ... + class QueueFull(QueueError, queue.Full): ... + class ItemInterpreterDestroyed(QueueError, _crossinterp.ItemInterpreterDestroyed): ... + UNBOUND: Final[UnboundItem] + + def create(maxsize: int = 0, *, unbounditems: _AnyUnbound = ...) -> Queue: ... + def list_all() -> list[Queue]: ... + + class Queue: + def __new__(cls, id: int, /) -> Self: ... + def __del__(self) -> None: ... + def __hash__(self) -> int: ... + def __reduce__(self) -> tuple[type[Self], int]: ... + @property + def id(self) -> int: ... + @property + def unbounditems(self) -> _AnyUnbound: ... + @property + def maxsize(self) -> int: ... + def empty(self) -> bool: ... + def full(self) -> bool: ... + def qsize(self) -> int: ... + if sys.version_info >= (3, 14): + def put( + self, + obj: object, + block: bool = True, + timeout: SupportsIndex | None = None, + *, + unbounditems: _AnyUnbound | None = None, + _delay: float = 0.01, + ) -> None: ... + else: + def put( + self, + obj: object, + timeout: SupportsIndex | None = None, + *, + unbounditems: _AnyUnbound | None = None, + _delay: float = 0.01, + ) -> None: ... + + def put_nowait(self, obj: object, *, unbounditems: _AnyUnbound | None = None) -> None: ... + if sys.version_info >= (3, 14): + def get(self, block: bool = True, timeout: SupportsIndex | None = None, *, _delay: float = 0.01) -> object: ... + else: + def get(self, timeout: SupportsIndex | None = None, *, _delay: float = 0.01) -> object: ... + + def get_nowait(self) -> object: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/configparser.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/configparser.pyi new file mode 100644 index 0000000..1909d80 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/configparser.pyi @@ -0,0 +1,486 @@ +import sys +from _typeshed import MaybeNone, StrOrBytesPath, SupportsWrite +from collections.abc import Callable, ItemsView, Iterable, Iterator, Mapping, MutableMapping, Sequence +from re import Pattern +from typing import Any, ClassVar, Final, Literal, TypeVar, overload, type_check_only +from typing_extensions import TypeAlias, deprecated + +if sys.version_info >= (3, 14): + __all__ = ( + "NoSectionError", + "DuplicateOptionError", + "DuplicateSectionError", + "NoOptionError", + "InterpolationError", + "InterpolationDepthError", + "InterpolationMissingOptionError", + "InterpolationSyntaxError", + "ParsingError", + "MissingSectionHeaderError", + "MultilineContinuationError", + "UnnamedSectionDisabledError", + "InvalidWriteError", + "ConfigParser", + "RawConfigParser", + "Interpolation", + "BasicInterpolation", + "ExtendedInterpolation", + "SectionProxy", + "ConverterMapping", + "DEFAULTSECT", + "MAX_INTERPOLATION_DEPTH", + "UNNAMED_SECTION", + ) +elif sys.version_info >= (3, 13): + __all__ = ( + "NoSectionError", + "DuplicateOptionError", + "DuplicateSectionError", + "NoOptionError", + "InterpolationError", + "InterpolationDepthError", + "InterpolationMissingOptionError", + "InterpolationSyntaxError", + "ParsingError", + "MissingSectionHeaderError", + "ConfigParser", + "RawConfigParser", + "Interpolation", + "BasicInterpolation", + "ExtendedInterpolation", + "SectionProxy", + "ConverterMapping", + "DEFAULTSECT", + "MAX_INTERPOLATION_DEPTH", + "UNNAMED_SECTION", + "MultilineContinuationError", + ) +elif sys.version_info >= (3, 12): + __all__ = ( + "NoSectionError", + "DuplicateOptionError", + "DuplicateSectionError", + "NoOptionError", + "InterpolationError", + "InterpolationDepthError", + "InterpolationMissingOptionError", + "InterpolationSyntaxError", + "ParsingError", + "MissingSectionHeaderError", + "ConfigParser", + "RawConfigParser", + "Interpolation", + "BasicInterpolation", + "ExtendedInterpolation", + "LegacyInterpolation", + "SectionProxy", + "ConverterMapping", + "DEFAULTSECT", + "MAX_INTERPOLATION_DEPTH", + ) +else: + __all__ = [ + "NoSectionError", + "DuplicateOptionError", + "DuplicateSectionError", + "NoOptionError", + "InterpolationError", + "InterpolationDepthError", + "InterpolationMissingOptionError", + "InterpolationSyntaxError", + "ParsingError", + "MissingSectionHeaderError", + "ConfigParser", + "SafeConfigParser", + "RawConfigParser", + "Interpolation", + "BasicInterpolation", + "ExtendedInterpolation", + "LegacyInterpolation", + "SectionProxy", + "ConverterMapping", + "DEFAULTSECT", + "MAX_INTERPOLATION_DEPTH", + ] + +if sys.version_info >= (3, 13): + @type_check_only + class _UNNAMED_SECTION: ... + + UNNAMED_SECTION: _UNNAMED_SECTION + + _SectionName: TypeAlias = str | _UNNAMED_SECTION + # A list of sections can only include an unnamed section if the parser was initialized with + # allow_unnamed_section=True. Any prevents users from having to use explicit + # type checks if allow_unnamed_section is False (the default). + _SectionNameList: TypeAlias = list[Any] +else: + _SectionName: TypeAlias = str + _SectionNameList: TypeAlias = list[str] + +_Section: TypeAlias = Mapping[str, str] +_Parser: TypeAlias = MutableMapping[str, _Section] +_ConverterCallback: TypeAlias = Callable[[str], Any] +_ConvertersMap: TypeAlias = dict[str, _ConverterCallback] +_T = TypeVar("_T") + +DEFAULTSECT: Final = "DEFAULT" +MAX_INTERPOLATION_DEPTH: Final = 10 + +class Interpolation: + def before_get(self, parser: _Parser, section: _SectionName, option: str, value: str, defaults: _Section) -> str: ... + def before_set(self, parser: _Parser, section: _SectionName, option: str, value: str) -> str: ... + def before_read(self, parser: _Parser, section: _SectionName, option: str, value: str) -> str: ... + def before_write(self, parser: _Parser, section: _SectionName, option: str, value: str) -> str: ... + +class BasicInterpolation(Interpolation): ... +class ExtendedInterpolation(Interpolation): ... + +if sys.version_info < (3, 13): + @deprecated( + "Deprecated since Python 3.2; removed in Python 3.13. Use `BasicInterpolation` or `ExtendedInterpolation` instead." + ) + class LegacyInterpolation(Interpolation): + def before_get(self, parser: _Parser, section: _SectionName, option: str, value: str, vars: _Section) -> str: ... + +class RawConfigParser(_Parser): + _SECT_TMPL: ClassVar[str] # undocumented + _OPT_TMPL: ClassVar[str] # undocumented + _OPT_NV_TMPL: ClassVar[str] # undocumented + + SECTCRE: Pattern[str] + OPTCRE: ClassVar[Pattern[str]] + OPTCRE_NV: ClassVar[Pattern[str]] # undocumented + NONSPACECRE: ClassVar[Pattern[str]] # undocumented + + BOOLEAN_STATES: ClassVar[Mapping[str, bool]] # undocumented + default_section: str + if sys.version_info >= (3, 13): + @overload + def __init__( + self, + defaults: Mapping[str, str | None] | None = None, + dict_type: type[Mapping[str, str]] = ..., + *, + allow_no_value: Literal[True], + delimiters: Sequence[str] = ("=", ":"), + comment_prefixes: Sequence[str] = ("#", ";"), + inline_comment_prefixes: Sequence[str] | None = None, + strict: bool = True, + empty_lines_in_values: bool = True, + default_section: str = "DEFAULT", + interpolation: Interpolation | None = ..., + converters: _ConvertersMap = ..., + allow_unnamed_section: bool = False, + ) -> None: ... + @overload + def __init__( + self, + defaults: Mapping[str, str | None] | None, + dict_type: type[Mapping[str, str]], + allow_no_value: Literal[True], + *, + delimiters: Sequence[str] = ("=", ":"), + comment_prefixes: Sequence[str] = ("#", ";"), + inline_comment_prefixes: Sequence[str] | None = None, + strict: bool = True, + empty_lines_in_values: bool = True, + default_section: str = "DEFAULT", + interpolation: Interpolation | None = ..., + converters: _ConvertersMap = ..., + allow_unnamed_section: bool = False, + ) -> None: ... + @overload + def __init__( + self, + defaults: _Section | None = None, + dict_type: type[Mapping[str, str]] = ..., + allow_no_value: bool = False, + *, + delimiters: Sequence[str] = ("=", ":"), + comment_prefixes: Sequence[str] = ("#", ";"), + inline_comment_prefixes: Sequence[str] | None = None, + strict: bool = True, + empty_lines_in_values: bool = True, + default_section: str = "DEFAULT", + interpolation: Interpolation | None = ..., + converters: _ConvertersMap = ..., + allow_unnamed_section: bool = False, + ) -> None: ... + else: + @overload + def __init__( + self, + defaults: Mapping[str, str | None] | None = None, + dict_type: type[Mapping[str, str]] = ..., + *, + allow_no_value: Literal[True], + delimiters: Sequence[str] = ("=", ":"), + comment_prefixes: Sequence[str] = ("#", ";"), + inline_comment_prefixes: Sequence[str] | None = None, + strict: bool = True, + empty_lines_in_values: bool = True, + default_section: str = "DEFAULT", + interpolation: Interpolation | None = ..., + converters: _ConvertersMap = ..., + ) -> None: ... + @overload + def __init__( + self, + defaults: Mapping[str, str | None] | None, + dict_type: type[Mapping[str, str]], + allow_no_value: Literal[True], + *, + delimiters: Sequence[str] = ("=", ":"), + comment_prefixes: Sequence[str] = ("#", ";"), + inline_comment_prefixes: Sequence[str] | None = None, + strict: bool = True, + empty_lines_in_values: bool = True, + default_section: str = "DEFAULT", + interpolation: Interpolation | None = ..., + converters: _ConvertersMap = ..., + ) -> None: ... + @overload + def __init__( + self, + defaults: _Section | None = None, + dict_type: type[Mapping[str, str]] = ..., + allow_no_value: bool = False, + *, + delimiters: Sequence[str] = ("=", ":"), + comment_prefixes: Sequence[str] = ("#", ";"), + inline_comment_prefixes: Sequence[str] | None = None, + strict: bool = True, + empty_lines_in_values: bool = True, + default_section: str = "DEFAULT", + interpolation: Interpolation | None = ..., + converters: _ConvertersMap = ..., + ) -> None: ... + + def __len__(self) -> int: ... + def __getitem__(self, key: _SectionName) -> SectionProxy: ... + def __setitem__(self, key: _SectionName, value: _Section) -> None: ... + def __delitem__(self, key: _SectionName) -> None: ... + def __iter__(self) -> Iterator[str]: ... + def __contains__(self, key: object) -> bool: ... + def defaults(self) -> _Section: ... + def sections(self) -> _SectionNameList: ... + def add_section(self, section: _SectionName) -> None: ... + def has_section(self, section: _SectionName) -> bool: ... + def options(self, section: _SectionName) -> list[str]: ... + def has_option(self, section: _SectionName, option: str) -> bool: ... + def read(self, filenames: StrOrBytesPath | Iterable[StrOrBytesPath], encoding: str | None = None) -> list[str]: ... + def read_file(self, f: Iterable[str], source: str | None = None) -> None: ... + def read_string(self, string: str, source: str = "") -> None: ... + def read_dict(self, dictionary: Mapping[str, Mapping[str, Any]], source: str = "") -> None: ... + if sys.version_info < (3, 12): + @deprecated("Deprecated since Python 3.2; removed in Python 3.12. Use `parser.read_file()` instead.") + def readfp(self, fp: Iterable[str], filename: str | None = None) -> None: ... + # These get* methods are partially applied (with the same names) in + # SectionProxy; the stubs should be kept updated together + @overload + def getint(self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None) -> int: ... + @overload + def getint( + self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T = ... + ) -> int | _T: ... + @overload + def getfloat(self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None) -> float: ... + @overload + def getfloat( + self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T = ... + ) -> float | _T: ... + @overload + def getboolean(self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None) -> bool: ... + @overload + def getboolean( + self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T = ... + ) -> bool | _T: ... + def _get_conv( + self, + section: _SectionName, + option: str, + conv: Callable[[str], _T], + *, + raw: bool = False, + vars: _Section | None = None, + fallback: _T = ..., + ) -> _T: ... + # This is incompatible with MutableMapping so we ignore the type + @overload # type: ignore[override] + def get(self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None) -> str | MaybeNone: ... + @overload + def get( + self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T + ) -> str | _T | MaybeNone: ... + @overload + def items(self, *, raw: bool = False, vars: _Section | None = None) -> ItemsView[str, SectionProxy]: ... + @overload + def items(self, section: _SectionName, raw: bool = False, vars: _Section | None = None) -> list[tuple[str, str]]: ... + def set(self, section: _SectionName, option: str, value: str | None = None) -> None: ... + def write(self, fp: SupportsWrite[str], space_around_delimiters: bool = True) -> None: ... + def remove_option(self, section: _SectionName, option: str) -> bool: ... + def remove_section(self, section: _SectionName) -> bool: ... + def optionxform(self, optionstr: str) -> str: ... + @property + def converters(self) -> ConverterMapping: ... + +class ConfigParser(RawConfigParser): + # This is incompatible with MutableMapping so we ignore the type + @overload # type: ignore[override] + def get(self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None) -> str: ... + @overload + def get( + self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T + ) -> str | _T: ... + +if sys.version_info < (3, 12): + @deprecated("Deprecated since Python 3.2; removed in Python 3.12. Use `ConfigParser` instead.") + class SafeConfigParser(ConfigParser): ... + +class SectionProxy(MutableMapping[str, str]): + def __init__(self, parser: RawConfigParser, name: str) -> None: ... + def __getitem__(self, key: str) -> str: ... + def __setitem__(self, key: str, value: str) -> None: ... + def __delitem__(self, key: str) -> None: ... + def __contains__(self, key: object) -> bool: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[str]: ... + @property + def parser(self) -> RawConfigParser: ... + @property + def name(self) -> str: ... + # This is incompatible with MutableMapping so we ignore the type + @overload # type: ignore[override] + def get( + self, + option: str, + fallback: None = None, + *, + raw: bool = False, + vars: _Section | None = None, + _impl: Any | None = None, + **kwargs: Any, # passed to the underlying parser's get() method + ) -> str | None: ... + @overload + def get( + self, + option: str, + fallback: _T, + *, + raw: bool = False, + vars: _Section | None = None, + _impl: Any | None = None, + **kwargs: Any, # passed to the underlying parser's get() method + ) -> str | _T: ... + # These are partially-applied version of the methods with the same names in + # RawConfigParser; the stubs should be kept updated together + @overload + def getint(self, option: str, *, raw: bool = False, vars: _Section | None = None) -> int | None: ... + @overload + def getint(self, option: str, fallback: _T = ..., *, raw: bool = False, vars: _Section | None = None) -> int | _T: ... + @overload + def getfloat(self, option: str, *, raw: bool = False, vars: _Section | None = None) -> float | None: ... + @overload + def getfloat(self, option: str, fallback: _T = ..., *, raw: bool = False, vars: _Section | None = None) -> float | _T: ... + @overload + def getboolean(self, option: str, *, raw: bool = False, vars: _Section | None = None) -> bool | None: ... + @overload + def getboolean(self, option: str, fallback: _T = ..., *, raw: bool = False, vars: _Section | None = None) -> bool | _T: ... + # SectionProxy can have arbitrary attributes when custom converters are used + def __getattr__(self, key: str) -> Callable[..., Any]: ... + +class ConverterMapping(MutableMapping[str, _ConverterCallback | None]): + GETTERCRE: ClassVar[Pattern[Any]] + def __init__(self, parser: RawConfigParser) -> None: ... + def __getitem__(self, key: str) -> _ConverterCallback: ... + def __setitem__(self, key: str, value: _ConverterCallback | None) -> None: ... + def __delitem__(self, key: str) -> None: ... + def __iter__(self) -> Iterator[str]: ... + def __len__(self) -> int: ... + +class Error(Exception): + message: str + def __init__(self, msg: str = "") -> None: ... + +class NoSectionError(Error): + section: _SectionName + def __init__(self, section: _SectionName) -> None: ... + +class DuplicateSectionError(Error): + section: _SectionName + source: str | None + lineno: int | None + def __init__(self, section: _SectionName, source: str | None = None, lineno: int | None = None) -> None: ... + +class DuplicateOptionError(Error): + section: _SectionName + option: str + source: str | None + lineno: int | None + def __init__(self, section: _SectionName, option: str, source: str | None = None, lineno: int | None = None) -> None: ... + +class NoOptionError(Error): + section: _SectionName + option: str + def __init__(self, option: str, section: _SectionName) -> None: ... + +class InterpolationError(Error): + section: _SectionName + option: str + def __init__(self, option: str, section: _SectionName, msg: str) -> None: ... + +class InterpolationDepthError(InterpolationError): + def __init__(self, option: str, section: _SectionName, rawval: object) -> None: ... + +class InterpolationMissingOptionError(InterpolationError): + reference: str + def __init__(self, option: str, section: _SectionName, rawval: object, reference: str) -> None: ... + +class InterpolationSyntaxError(InterpolationError): ... + +class ParsingError(Error): + source: str + errors: list[tuple[int, str]] + if sys.version_info >= (3, 13): + def __init__(self, source: str, *args: object) -> None: ... + def combine(self, others: Iterable[ParsingError]) -> ParsingError: ... + elif sys.version_info >= (3, 12): + def __init__(self, source: str) -> None: ... + else: + @overload + def __init__(self, source: str) -> None: ... + @overload + @deprecated("The `filename` parameter removed in Python 3.12. Use `source` instead.") + def __init__(self, source: None, filename: str | None) -> None: ... + @overload + @deprecated("The `filename` parameter removed in Python 3.12. Use `source` instead.") + def __init__(self, source: None = None, *, filename: str | None) -> None: ... + + def append(self, lineno: int, line: str) -> None: ... + + if sys.version_info < (3, 12): + @property + @deprecated("Deprecated since Python 3.2; removed in Python 3.12. Use `source` instead.") + def filename(self) -> str: ... + @filename.setter + @deprecated("Deprecated since Python 3.2; removed in Python 3.12. Use `source` instead.") + def filename(self, value: str) -> None: ... + +class MissingSectionHeaderError(ParsingError): + lineno: int + line: str + def __init__(self, filename: str, lineno: int, line: str) -> None: ... + +if sys.version_info >= (3, 13): + class MultilineContinuationError(ParsingError): + lineno: int + line: str + def __init__(self, filename: str, lineno: int, line: str) -> None: ... + +if sys.version_info >= (3, 14): + class UnnamedSectionDisabledError(Error): + msg: Final = "Support for UNNAMED_SECTION is disabled." + def __init__(self) -> None: ... + + class InvalidWriteError(Error): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/contextlib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/contextlib.pyi new file mode 100644 index 0000000..221102e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/contextlib.pyi @@ -0,0 +1,225 @@ +import abc +import sys +from _typeshed import FileDescriptorOrPath, Unused +from abc import ABC, abstractmethod +from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Callable, Generator, Iterator +from types import TracebackType +from typing import Any, Generic, Protocol, TypeVar, overload, runtime_checkable, type_check_only +from typing_extensions import ParamSpec, Self, TypeAlias + +__all__ = [ + "contextmanager", + "closing", + "AbstractContextManager", + "ContextDecorator", + "ExitStack", + "redirect_stdout", + "redirect_stderr", + "suppress", + "AbstractAsyncContextManager", + "AsyncExitStack", + "asynccontextmanager", + "nullcontext", +] + +if sys.version_info >= (3, 10): + __all__ += ["aclosing"] + +if sys.version_info >= (3, 11): + __all__ += ["chdir"] + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) +_ExitT_co = TypeVar("_ExitT_co", covariant=True, bound=bool | None, default=bool | None) +_F = TypeVar("_F", bound=Callable[..., Any]) +_G_co = TypeVar("_G_co", bound=Generator[Any, Any, Any] | AsyncGenerator[Any, Any], covariant=True) +_P = ParamSpec("_P") + +_SendT_contra = TypeVar("_SendT_contra", contravariant=True, default=None) +_ReturnT_co = TypeVar("_ReturnT_co", covariant=True, default=None) + +_ExitFunc: TypeAlias = Callable[[type[BaseException] | None, BaseException | None, TracebackType | None], bool | None] +_CM_EF = TypeVar("_CM_EF", bound=AbstractContextManager[Any, Any] | _ExitFunc) + +# mypy and pyright object to this being both ABC and Protocol. +# At runtime it inherits from ABC and is not a Protocol, but it is on the +# allowlist for use as a Protocol. +@runtime_checkable +class AbstractContextManager(ABC, Protocol[_T_co, _ExitT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + __slots__ = () + def __enter__(self) -> _T_co: ... + @abstractmethod + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / + ) -> _ExitT_co: ... + +# mypy and pyright object to this being both ABC and Protocol. +# At runtime it inherits from ABC and is not a Protocol, but it is on the +# allowlist for use as a Protocol. +@runtime_checkable +class AbstractAsyncContextManager(ABC, Protocol[_T_co, _ExitT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + __slots__ = () + async def __aenter__(self) -> _T_co: ... + @abstractmethod + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / + ) -> _ExitT_co: ... + +class ContextDecorator: + def _recreate_cm(self) -> Self: ... + def __call__(self, func: _F) -> _F: ... + +class _GeneratorContextManagerBase(Generic[_G_co]): + # Ideally this would use ParamSpec, but that requires (*args, **kwargs), which this isn't. see #6676 + def __init__(self, func: Callable[..., _G_co], args: tuple[Any, ...], kwds: dict[str, Any]) -> None: ... + gen: _G_co + func: Callable[..., _G_co] + args: tuple[Any, ...] + kwds: dict[str, Any] + +class _GeneratorContextManager( + _GeneratorContextManagerBase[Generator[_T_co, _SendT_contra, _ReturnT_co]], + AbstractContextManager[_T_co, bool | None], + ContextDecorator, +): + def __exit__( + self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> bool | None: ... + +def contextmanager(func: Callable[_P, Iterator[_T_co]]) -> Callable[_P, _GeneratorContextManager[_T_co]]: ... + +if sys.version_info >= (3, 10): + _AF = TypeVar("_AF", bound=Callable[..., Awaitable[Any]]) + + class AsyncContextDecorator: + def _recreate_cm(self) -> Self: ... + def __call__(self, func: _AF) -> _AF: ... + + class _AsyncGeneratorContextManager( + _GeneratorContextManagerBase[AsyncGenerator[_T_co, _SendT_contra]], + AbstractAsyncContextManager[_T_co, bool | None], + AsyncContextDecorator, + ): + async def __aexit__( + self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> bool | None: ... + +else: + class _AsyncGeneratorContextManager( + _GeneratorContextManagerBase[AsyncGenerator[_T_co, _SendT_contra]], AbstractAsyncContextManager[_T_co, bool | None] + ): + async def __aexit__( + self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> bool | None: ... + +def asynccontextmanager(func: Callable[_P, AsyncIterator[_T_co]]) -> Callable[_P, _AsyncGeneratorContextManager[_T_co]]: ... +@type_check_only +class _SupportsClose(Protocol): + def close(self) -> object: ... + +_SupportsCloseT = TypeVar("_SupportsCloseT", bound=_SupportsClose) + +class closing(AbstractContextManager[_SupportsCloseT, None]): + def __init__(self, thing: _SupportsCloseT) -> None: ... + def __exit__(self, *exc_info: Unused) -> None: ... + +if sys.version_info >= (3, 10): + @type_check_only + class _SupportsAclose(Protocol): + def aclose(self) -> Awaitable[object]: ... + + _SupportsAcloseT = TypeVar("_SupportsAcloseT", bound=_SupportsAclose) + + class aclosing(AbstractAsyncContextManager[_SupportsAcloseT, None]): + def __init__(self, thing: _SupportsAcloseT) -> None: ... + async def __aexit__(self, *exc_info: Unused) -> None: ... + +class suppress(AbstractContextManager[None, bool]): + def __init__(self, *exceptions: type[BaseException]) -> None: ... + def __exit__( + self, exctype: type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None + ) -> bool: ... + +# This is trying to describe what is needed for (most?) uses +# of `redirect_stdout` and `redirect_stderr`. +# https://github.com/python/typeshed/issues/14903 +@type_check_only +class _SupportsRedirect(Protocol): + def write(self, s: str, /) -> int: ... + def flush(self) -> None: ... + +_SupportsRedirectT = TypeVar("_SupportsRedirectT", bound=_SupportsRedirect | None) + +class _RedirectStream(AbstractContextManager[_SupportsRedirectT, None]): + def __init__(self, new_target: _SupportsRedirectT) -> None: ... + def __exit__( + self, exctype: type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None + ) -> None: ... + +class redirect_stdout(_RedirectStream[_SupportsRedirectT]): ... +class redirect_stderr(_RedirectStream[_SupportsRedirectT]): ... + +class _BaseExitStack(Generic[_ExitT_co]): + def enter_context(self, cm: AbstractContextManager[_T, _ExitT_co]) -> _T: ... + def push(self, exit: _CM_EF) -> _CM_EF: ... + def callback(self, callback: Callable[_P, _T], /, *args: _P.args, **kwds: _P.kwargs) -> Callable[_P, _T]: ... + def pop_all(self) -> Self: ... + +# In reality this is a subclass of `AbstractContextManager`; +# see #7961 for why we don't do that in the stub +class ExitStack(_BaseExitStack[_ExitT_co], metaclass=abc.ABCMeta): + def close(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / + ) -> _ExitT_co: ... + +_ExitCoroFunc: TypeAlias = Callable[ + [type[BaseException] | None, BaseException | None, TracebackType | None], Awaitable[bool | None] +] +_ACM_EF = TypeVar("_ACM_EF", bound=AbstractAsyncContextManager[Any, Any] | _ExitCoroFunc) + +# In reality this is a subclass of `AbstractAsyncContextManager`; +# see #7961 for why we don't do that in the stub +class AsyncExitStack(_BaseExitStack[_ExitT_co], metaclass=abc.ABCMeta): + async def enter_async_context(self, cm: AbstractAsyncContextManager[_T, _ExitT_co]) -> _T: ... + def push_async_exit(self, exit: _ACM_EF) -> _ACM_EF: ... + def push_async_callback( + self, callback: Callable[_P, Awaitable[_T]], /, *args: _P.args, **kwds: _P.kwargs + ) -> Callable[_P, Awaitable[_T]]: ... + async def aclose(self) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / + ) -> _ExitT_co: ... + +if sys.version_info >= (3, 10): + class nullcontext(AbstractContextManager[_T, None], AbstractAsyncContextManager[_T, None]): + enter_result: _T + @overload + def __init__(self: nullcontext[None], enter_result: None = None) -> None: ... + @overload + def __init__(self: nullcontext[_T], enter_result: _T) -> None: ... # pyright: ignore[reportInvalidTypeVarUse] #11780 + def __enter__(self) -> _T: ... + def __exit__(self, *exctype: Unused) -> None: ... + async def __aenter__(self) -> _T: ... + async def __aexit__(self, *exctype: Unused) -> None: ... + +else: + class nullcontext(AbstractContextManager[_T, None]): + enter_result: _T + @overload + def __init__(self: nullcontext[None], enter_result: None = None) -> None: ... + @overload + def __init__(self: nullcontext[_T], enter_result: _T) -> None: ... # pyright: ignore[reportInvalidTypeVarUse] #11780 + def __enter__(self) -> _T: ... + def __exit__(self, *exctype: Unused) -> None: ... + +if sys.version_info >= (3, 11): + _T_fd_or_any_path = TypeVar("_T_fd_or_any_path", bound=FileDescriptorOrPath) + + class chdir(AbstractContextManager[None, None], Generic[_T_fd_or_any_path]): + path: _T_fd_or_any_path + def __init__(self, path: _T_fd_or_any_path) -> None: ... + def __enter__(self) -> None: ... + def __exit__(self, *excinfo: Unused) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/contextvars.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/contextvars.pyi new file mode 100644 index 0000000..22dc330 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/contextvars.pyi @@ -0,0 +1,3 @@ +from _contextvars import Context as Context, ContextVar as ContextVar, Token as Token, copy_context as copy_context + +__all__ = ("Context", "ContextVar", "Token", "copy_context") diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/copy.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/copy.pyi new file mode 100644 index 0000000..373899e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/copy.pyi @@ -0,0 +1,28 @@ +import sys +from typing import Any, Protocol, TypeVar, type_check_only + +__all__ = ["Error", "copy", "deepcopy"] + +_T = TypeVar("_T") +_RT_co = TypeVar("_RT_co", covariant=True) + +@type_check_only +class _SupportsReplace(Protocol[_RT_co]): + # In reality doesn't support args, but there's no great way to express this. + def __replace__(self, /, *_: Any, **changes: Any) -> _RT_co: ... + +# None in CPython but non-None in Jython +PyStringMap: Any + +# Note: memo and _nil are internal kwargs. +def deepcopy(x: _T, memo: dict[int, Any] | None = None, _nil: Any = []) -> _T: ... +def copy(x: _T) -> _T: ... + +if sys.version_info >= (3, 13): + __all__ += ["replace"] + # The types accepted by `**changes` match those of `obj.__replace__`. + def replace(obj: _SupportsReplace[_RT_co], /, **changes: Any) -> _RT_co: ... + +class Error(Exception): ... + +error = Error diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/copyreg.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/copyreg.pyi new file mode 100644 index 0000000..8f7fd95 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/copyreg.pyi @@ -0,0 +1,21 @@ +from collections.abc import Callable, Hashable +from typing import Any, SupportsInt, TypeVar +from typing_extensions import TypeAlias + +_T = TypeVar("_T") +_Reduce: TypeAlias = tuple[Callable[..., _T], tuple[Any, ...]] | tuple[Callable[..., _T], tuple[Any, ...], Any | None] + +__all__ = ["pickle", "constructor", "add_extension", "remove_extension", "clear_extension_cache"] + +def pickle( + ob_type: type[_T], + pickle_function: Callable[[_T], str | _Reduce[_T]], + constructor_ob: Callable[[_Reduce[_T]], _T] | None = None, +) -> None: ... +def constructor(object: Callable[[_Reduce[_T]], _T]) -> None: ... +def add_extension(module: Hashable, name: Hashable, code: SupportsInt) -> None: ... +def remove_extension(module: Hashable, name: Hashable, code: int) -> None: ... +def clear_extension_cache() -> None: ... + +_DispatchTableType: TypeAlias = dict[type, Callable[[Any], str | _Reduce[Any]]] # imported by multiprocessing.reduction +dispatch_table: _DispatchTableType # undocumented diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/crypt.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/crypt.pyi new file mode 100644 index 0000000..f926321 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/crypt.pyi @@ -0,0 +1,26 @@ +import sys +from typing import Final, NamedTuple, type_check_only +from typing_extensions import disjoint_base + +if sys.platform != "win32": + @type_check_only + class _MethodBase(NamedTuple): + name: str + ident: str | None + salt_chars: int + total_size: int + + if sys.version_info >= (3, 12): + class _Method(_MethodBase): ... + else: + @disjoint_base + class _Method(_MethodBase): ... + + METHOD_CRYPT: Final[_Method] + METHOD_MD5: Final[_Method] + METHOD_SHA256: Final[_Method] + METHOD_SHA512: Final[_Method] + METHOD_BLOWFISH: Final[_Method] + methods: list[_Method] + def mksalt(method: _Method | None = None, *, rounds: int | None = None) -> str: ... + def crypt(word: str, salt: str | _Method | None = None) -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/csv.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/csv.pyi new file mode 100644 index 0000000..4ed0ab1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/csv.pyi @@ -0,0 +1,155 @@ +import sys +from _csv import ( + QUOTE_ALL as QUOTE_ALL, + QUOTE_MINIMAL as QUOTE_MINIMAL, + QUOTE_NONE as QUOTE_NONE, + QUOTE_NONNUMERIC as QUOTE_NONNUMERIC, + Error as Error, + __version__ as __version__, + _DialectLike, + _QuotingType, + field_size_limit as field_size_limit, + get_dialect as get_dialect, + list_dialects as list_dialects, + reader as reader, + register_dialect as register_dialect, + unregister_dialect as unregister_dialect, + writer as writer, +) + +if sys.version_info >= (3, 12): + from _csv import QUOTE_NOTNULL as QUOTE_NOTNULL, QUOTE_STRINGS as QUOTE_STRINGS +if sys.version_info >= (3, 10): + from _csv import Reader, Writer +else: + from _csv import _reader as Reader, _writer as Writer + +from _typeshed import SupportsWrite +from collections.abc import Collection, Iterable, Iterator, Mapping, Sequence +from types import GenericAlias +from typing import Any, Generic, Literal, TypeVar, overload +from typing_extensions import Self + +__all__ = [ + "QUOTE_MINIMAL", + "QUOTE_ALL", + "QUOTE_NONNUMERIC", + "QUOTE_NONE", + "Error", + "Dialect", + "excel", + "excel_tab", + "field_size_limit", + "reader", + "writer", + "register_dialect", + "get_dialect", + "list_dialects", + "Sniffer", + "unregister_dialect", + "DictReader", + "DictWriter", + "unix_dialect", +] +if sys.version_info >= (3, 12): + __all__ += ["QUOTE_STRINGS", "QUOTE_NOTNULL"] +if sys.version_info < (3, 13): + __all__ += ["__doc__", "__version__"] + +_T = TypeVar("_T") + +class Dialect: + delimiter: str + quotechar: str | None + escapechar: str | None + doublequote: bool + skipinitialspace: bool + lineterminator: str + quoting: _QuotingType + strict: bool + def __init__(self) -> None: ... + +class excel(Dialect): ... +class excel_tab(excel): ... +class unix_dialect(Dialect): ... + +class DictReader(Iterator[dict[_T | Any, str | Any]], Generic[_T]): + fieldnames: Sequence[_T] | None + restkey: _T | None + restval: str | Any | None + reader: Reader + dialect: _DialectLike + line_num: int + @overload + def __init__( + self, + f: Iterable[str], + fieldnames: Sequence[_T], + restkey: _T | None = None, + restval: str | Any | None = None, + dialect: _DialectLike = "excel", + *, + delimiter: str = ",", + quotechar: str | None = '"', + escapechar: str | None = None, + doublequote: bool = True, + skipinitialspace: bool = False, + lineterminator: str = "\r\n", + quoting: _QuotingType = 0, + strict: bool = False, + ) -> None: ... + @overload + def __init__( + self: DictReader[str], + f: Iterable[str], + fieldnames: Sequence[str] | None = None, + restkey: str | None = None, + restval: str | None = None, + dialect: _DialectLike = "excel", + *, + delimiter: str = ",", + quotechar: str | None = '"', + escapechar: str | None = None, + doublequote: bool = True, + skipinitialspace: bool = False, + lineterminator: str = "\r\n", + quoting: _QuotingType = 0, + strict: bool = False, + ) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> dict[_T | Any, str | Any]: ... + if sys.version_info >= (3, 12): + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +class DictWriter(Generic[_T]): + fieldnames: Collection[_T] + restval: Any | None + extrasaction: Literal["raise", "ignore"] + writer: Writer + def __init__( + self, + f: SupportsWrite[str], + fieldnames: Collection[_T], + restval: Any | None = "", + extrasaction: Literal["raise", "ignore"] = "raise", + dialect: _DialectLike = "excel", + *, + delimiter: str = ",", + quotechar: str | None = '"', + escapechar: str | None = None, + doublequote: bool = True, + skipinitialspace: bool = False, + lineterminator: str = "\r\n", + quoting: _QuotingType = 0, + strict: bool = False, + ) -> None: ... + def writeheader(self) -> Any: ... + def writerow(self, rowdict: Mapping[_T, Any]) -> Any: ... + def writerows(self, rowdicts: Iterable[Mapping[_T, Any]]) -> None: ... + if sys.version_info >= (3, 12): + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +class Sniffer: + preferred: list[str] + def sniff(self, sample: str, delimiters: str | None = None) -> type[Dialect]: ... + def has_header(self, sample: str) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/__init__.pyi new file mode 100644 index 0000000..19bd261 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/__init__.pyi @@ -0,0 +1,332 @@ +import sys +from _ctypes import ( + RTLD_GLOBAL as RTLD_GLOBAL, + RTLD_LOCAL as RTLD_LOCAL, + Array as Array, + CFuncPtr as _CFuncPtr, + Structure as Structure, + Union as Union, + _CanCastTo as _CanCastTo, + _CArgObject as _CArgObject, + _CData as _CData, + _CDataType as _CDataType, + _CField as _CField, + _CTypeBaseType, + _Pointer as _Pointer, + _PointerLike as _PointerLike, + _SimpleCData as _SimpleCData, + addressof as addressof, + alignment as alignment, + byref as byref, + get_errno as get_errno, + resize as resize, + set_errno as set_errno, + sizeof as sizeof, +) +from _typeshed import StrPath, SupportsBool, SupportsLen +from ctypes._endian import BigEndianStructure as BigEndianStructure, LittleEndianStructure as LittleEndianStructure +from types import GenericAlias +from typing import Any, ClassVar, Final, Generic, Literal, TypeVar, overload, type_check_only +from typing_extensions import Self, TypeAlias, deprecated + +if sys.platform == "win32": + from _ctypes import FormatError as FormatError, get_last_error as get_last_error, set_last_error as set_last_error + + if sys.version_info >= (3, 14): + from _ctypes import COMError as COMError, CopyComPointer as CopyComPointer + +if sys.version_info >= (3, 11): + from ctypes._endian import BigEndianUnion as BigEndianUnion, LittleEndianUnion as LittleEndianUnion + +_CT = TypeVar("_CT", bound=_CData) +_T = TypeVar("_T", default=Any) +_DLLT = TypeVar("_DLLT", bound=CDLL) + +if sys.version_info >= (3, 14): + @overload + @deprecated("ctypes.POINTER with string") + def POINTER(cls: str) -> type[Any]: ... + @overload + def POINTER(cls: None) -> type[c_void_p]: ... + @overload + def POINTER(cls: type[_CT]) -> type[_Pointer[_CT]]: ... + def pointer(obj: _CT) -> _Pointer[_CT]: ... + +else: + from _ctypes import POINTER as POINTER, pointer as pointer + +DEFAULT_MODE: Final[int] + +class ArgumentError(Exception): ... + +# defined within CDLL.__init__ +# Runtime name is ctypes.CDLL.__init__.._FuncPtr +@type_check_only +class _CDLLFuncPointer(_CFuncPtr): + _flags_: ClassVar[int] + _restype_: ClassVar[type[_CDataType]] + +# Not a real class; _CDLLFuncPointer with a __name__ set on it. +@type_check_only +class _NamedFuncPointer(_CDLLFuncPointer): + __name__: str + +if sys.version_info >= (3, 12): + _NameTypes: TypeAlias = StrPath | None +else: + _NameTypes: TypeAlias = str | None + +class CDLL: + _func_flags_: ClassVar[int] + _func_restype_: ClassVar[type[_CDataType]] + _name: str + _handle: int + _FuncPtr: type[_CDLLFuncPointer] + def __init__( + self, + name: _NameTypes, + mode: int = ..., + handle: int | None = None, + use_errno: bool = False, + use_last_error: bool = False, + winmode: int | None = None, + ) -> None: ... + def __getattr__(self, name: str) -> _NamedFuncPointer: ... + def __getitem__(self, name_or_ordinal: str) -> _NamedFuncPointer: ... + +if sys.platform == "win32": + class OleDLL(CDLL): ... + class WinDLL(CDLL): ... + +class PyDLL(CDLL): ... + +class LibraryLoader(Generic[_DLLT]): + def __init__(self, dlltype: type[_DLLT]) -> None: ... + def __getattr__(self, name: str) -> _DLLT: ... + def __getitem__(self, name: str) -> _DLLT: ... + def LoadLibrary(self, name: str) -> _DLLT: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +cdll: LibraryLoader[CDLL] +if sys.platform == "win32": + windll: LibraryLoader[WinDLL] + oledll: LibraryLoader[OleDLL] +pydll: LibraryLoader[PyDLL] +pythonapi: PyDLL + +# Class definition within CFUNCTYPE / WINFUNCTYPE / PYFUNCTYPE +# Names at runtime are +# ctypes.CFUNCTYPE..CFunctionType +# ctypes.WINFUNCTYPE..WinFunctionType +# ctypes.PYFUNCTYPE..CFunctionType +@type_check_only +class _CFunctionType(_CFuncPtr): + _argtypes_: ClassVar[list[type[_CData | _CDataType]]] + _restype_: ClassVar[type[_CData | _CDataType] | None] + _flags_: ClassVar[int] + +# Alias for either function pointer type +_FuncPointer: TypeAlias = _CDLLFuncPointer | _CFunctionType # noqa: Y047 # not used here + +def CFUNCTYPE( + restype: type[_CData | _CDataType] | None, + *argtypes: type[_CData | _CDataType], + use_errno: bool = False, + use_last_error: bool = False, +) -> type[_CFunctionType]: ... + +if sys.platform == "win32": + def WINFUNCTYPE( + restype: type[_CData | _CDataType] | None, + *argtypes: type[_CData | _CDataType], + use_errno: bool = False, + use_last_error: bool = False, + ) -> type[_CFunctionType]: ... + +def PYFUNCTYPE(restype: type[_CData | _CDataType] | None, *argtypes: type[_CData | _CDataType]) -> type[_CFunctionType]: ... + +# Any type that can be implicitly converted to c_void_p when passed as a C function argument. +# (bytes is not included here, see below.) +_CVoidPLike: TypeAlias = _PointerLike | Array[Any] | _CArgObject | int +# Same as above, but including types known to be read-only (i. e. bytes). +# This distinction is not strictly necessary (ctypes doesn't differentiate between const +# and non-const pointers), but it catches errors like memmove(b'foo', buf, 4) +# when memmove(buf, b'foo', 4) was intended. +_CVoidConstPLike: TypeAlias = _CVoidPLike | bytes + +_CastT = TypeVar("_CastT", bound=_CanCastTo) + +def cast(obj: _CData | _CDataType | _CArgObject | int, typ: type[_CastT]) -> _CastT: ... +def create_string_buffer(init: int | bytes, size: int | None = None) -> Array[c_char]: ... + +c_buffer = create_string_buffer + +def create_unicode_buffer(init: int | str, size: int | None = None) -> Array[c_wchar]: ... + +if sys.version_info >= (3, 13): + @deprecated("Deprecated since Python 3.13; will be removed in Python 3.15.") + def SetPointerType(pointer: type[_Pointer[Any]], cls: _CTypeBaseType) -> None: ... + +else: + def SetPointerType(pointer: type[_Pointer[Any]], cls: _CTypeBaseType) -> None: ... + +def ARRAY(typ: _CT, len: int) -> Array[_CT]: ... # Soft Deprecated, no plans to remove + +if sys.platform == "win32": + def DllCanUnloadNow() -> int: ... + def DllGetClassObject(rclsid: Any, riid: Any, ppv: Any) -> int: ... # TODO: not documented + + # Actually just an instance of _NamedFuncPointer (aka _CDLLFuncPointer), + # but we want to set a more specific __call__ + @type_check_only + class _GetLastErrorFunctionType(_NamedFuncPointer): + def __call__(self) -> int: ... + + GetLastError: _GetLastErrorFunctionType + +# Actually just an instance of _CFunctionType, but we want to set a more +# specific __call__. +@type_check_only +class _MemmoveFunctionType(_CFunctionType): + def __call__(self, dst: _CVoidPLike, src: _CVoidConstPLike, count: int) -> int: ... + +memmove: _MemmoveFunctionType + +# Actually just an instance of _CFunctionType, but we want to set a more +# specific __call__. +@type_check_only +class _MemsetFunctionType(_CFunctionType): + def __call__(self, dst: _CVoidPLike, c: int, count: int) -> int: ... + +memset: _MemsetFunctionType + +def string_at(ptr: _CVoidConstPLike, size: int = -1) -> bytes: ... + +if sys.platform == "win32": + def WinError(code: int | None = None, descr: str | None = None) -> OSError: ... + +def wstring_at(ptr: _CVoidConstPLike, size: int = -1) -> str: ... + +if sys.version_info >= (3, 14): + def memoryview_at(ptr: _CVoidConstPLike, size: int, readonly: bool = False) -> memoryview: ... + +class py_object(_CanCastTo, _SimpleCData[_T]): + _type_: ClassVar[Literal["O"]] + if sys.version_info >= (3, 14): + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +class c_bool(_SimpleCData[bool]): + _type_: ClassVar[Literal["?"]] + def __init__(self, value: SupportsBool | SupportsLen | None = ...) -> None: ... + +class c_byte(_SimpleCData[int]): + _type_: ClassVar[Literal["b"]] + +class c_ubyte(_SimpleCData[int]): + _type_: ClassVar[Literal["B"]] + +class c_short(_SimpleCData[int]): + _type_: ClassVar[Literal["h"]] + +class c_ushort(_SimpleCData[int]): + _type_: ClassVar[Literal["H"]] + +class c_long(_SimpleCData[int]): + _type_: ClassVar[Literal["l"]] + +class c_ulong(_SimpleCData[int]): + _type_: ClassVar[Literal["L"]] + +class c_int(_SimpleCData[int]): # can be an alias for c_long + _type_: ClassVar[Literal["i", "l"]] + +class c_uint(_SimpleCData[int]): # can be an alias for c_ulong + _type_: ClassVar[Literal["I", "L"]] + +class c_longlong(_SimpleCData[int]): # can be an alias for c_long + _type_: ClassVar[Literal["q", "l"]] + +class c_ulonglong(_SimpleCData[int]): # can be an alias for c_ulong + _type_: ClassVar[Literal["Q", "L"]] + +c_int8 = c_byte +c_uint8 = c_ubyte + +class c_int16(_SimpleCData[int]): # can be an alias for c_short or c_int + _type_: ClassVar[Literal["h", "i"]] + +class c_uint16(_SimpleCData[int]): # can be an alias for c_ushort or c_uint + _type_: ClassVar[Literal["H", "I"]] + +class c_int32(_SimpleCData[int]): # can be an alias for c_int or c_long + _type_: ClassVar[Literal["i", "l"]] + +class c_uint32(_SimpleCData[int]): # can be an alias for c_uint or c_ulong + _type_: ClassVar[Literal["I", "L"]] + +class c_int64(_SimpleCData[int]): # can be an alias for c_long or c_longlong + _type_: ClassVar[Literal["l", "q"]] + +class c_uint64(_SimpleCData[int]): # can be an alias for c_ulong or c_ulonglong + _type_: ClassVar[Literal["L", "Q"]] + +class c_ssize_t(_SimpleCData[int]): # alias for c_int, c_long, or c_longlong + _type_: ClassVar[Literal["i", "l", "q"]] + +class c_size_t(_SimpleCData[int]): # alias for c_uint, c_ulong, or c_ulonglong + _type_: ClassVar[Literal["I", "L", "Q"]] + +class c_float(_SimpleCData[float]): + _type_: ClassVar[Literal["f"]] + +class c_double(_SimpleCData[float]): + _type_: ClassVar[Literal["d"]] + +class c_longdouble(_SimpleCData[float]): # can be an alias for c_double + _type_: ClassVar[Literal["d", "g"]] + +if sys.version_info >= (3, 14) and sys.platform != "win32": + class c_double_complex(_SimpleCData[complex]): + _type_: ClassVar[Literal["D"]] + + class c_float_complex(_SimpleCData[complex]): + _type_: ClassVar[Literal["F"]] + + class c_longdouble_complex(_SimpleCData[complex]): + _type_: ClassVar[Literal["G"]] + +class c_char(_SimpleCData[bytes]): + _type_: ClassVar[Literal["c"]] + def __init__(self, value: int | bytes | bytearray = ...) -> None: ... + +class c_char_p(_PointerLike, _SimpleCData[bytes | None]): + _type_: ClassVar[Literal["z"]] + def __init__(self, value: int | bytes | None = ...) -> None: ... + @classmethod + def from_param(cls, value: Any, /) -> Self | _CArgObject: ... + +class c_void_p(_PointerLike, _SimpleCData[int | None]): + _type_: ClassVar[Literal["P"]] + @classmethod + def from_param(cls, value: Any, /) -> Self | _CArgObject: ... + +c_voidp = c_void_p # backwards compatibility (to a bug) + +class c_wchar(_SimpleCData[str]): + _type_: ClassVar[Literal["u"]] + +class c_wchar_p(_PointerLike, _SimpleCData[str | None]): + _type_: ClassVar[Literal["Z"]] + def __init__(self, value: int | str | None = ...) -> None: ... + @classmethod + def from_param(cls, value: Any, /) -> Self | _CArgObject: ... + +if sys.platform == "win32": + class HRESULT(_SimpleCData[int]): # TODO: undocumented + _type_: ClassVar[Literal["l"]] + +if sys.version_info >= (3, 12): + # At runtime, this is an alias for either c_int32 or c_int64, + # which are themselves an alias for one of c_int, c_long, or c_longlong + # This covers all our bases. + c_time_t: type[c_int32 | c_int64 | c_int | c_long | c_longlong] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/_endian.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/_endian.pyi new file mode 100644 index 0000000..97852f6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/_endian.pyi @@ -0,0 +1,16 @@ +import sys +from ctypes import Structure, Union + +# At runtime, the native endianness is an alias for Structure, +# while the other is a subclass with a metaclass added in. +class BigEndianStructure(Structure): + __slots__ = () + +class LittleEndianStructure(Structure): ... + +# Same thing for these: one is an alias of Union at runtime +if sys.version_info >= (3, 11): + class BigEndianUnion(Union): + __slots__ = () + + class LittleEndianUnion(Union): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/macholib/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/macholib/__init__.pyi new file mode 100644 index 0000000..c5dd954 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/macholib/__init__.pyi @@ -0,0 +1,3 @@ +from typing import Final + +__version__: Final[str] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/macholib/dyld.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/macholib/dyld.pyi new file mode 100644 index 0000000..c7e94da --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/macholib/dyld.pyi @@ -0,0 +1,8 @@ +from collections.abc import Mapping +from ctypes.macholib.dylib import dylib_info as dylib_info +from ctypes.macholib.framework import framework_info as framework_info + +__all__ = ["dyld_find", "framework_find", "framework_info", "dylib_info"] + +def dyld_find(name: str, executable_path: str | None = None, env: Mapping[str, str] | None = None) -> str: ... +def framework_find(fn: str, executable_path: str | None = None, env: Mapping[str, str] | None = None) -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/macholib/dylib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/macholib/dylib.pyi new file mode 100644 index 0000000..95945ed --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/macholib/dylib.pyi @@ -0,0 +1,14 @@ +from typing import TypedDict, type_check_only + +__all__ = ["dylib_info"] + +# Actual result is produced by re.match.groupdict() +@type_check_only +class _DylibInfo(TypedDict): + location: str + name: str + shortname: str + version: str | None + suffix: str | None + +def dylib_info(filename: str) -> _DylibInfo | None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/macholib/framework.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/macholib/framework.pyi new file mode 100644 index 0000000..e92bf37 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/macholib/framework.pyi @@ -0,0 +1,14 @@ +from typing import TypedDict, type_check_only + +__all__ = ["framework_info"] + +# Actual result is produced by re.match.groupdict() +@type_check_only +class _FrameworkInfo(TypedDict): + location: str + name: str + shortname: str + version: str | None + suffix: str | None + +def framework_info(filename: str) -> _FrameworkInfo | None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/util.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/util.pyi new file mode 100644 index 0000000..4f18c1d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/util.pyi @@ -0,0 +1,11 @@ +import sys + +def find_library(name: str) -> str | None: ... + +if sys.platform == "win32": + def find_msvcrt() -> str | None: ... + +if sys.version_info >= (3, 14): + def dllist() -> list[str]: ... + +def test() -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/wintypes.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/wintypes.pyi new file mode 100644 index 0000000..0f0d61a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ctypes/wintypes.pyi @@ -0,0 +1,321 @@ +import sys +from _ctypes import _CArgObject, _CField +from ctypes import ( + Array, + Structure, + _Pointer, + _SimpleCData, + c_char, + c_char_p, + c_double, + c_float, + c_int, + c_long, + c_longlong, + c_short, + c_uint, + c_ulong, + c_ulonglong, + c_ushort, + c_void_p, + c_wchar, + c_wchar_p, +) +from typing import Any, Final, TypeVar +from typing_extensions import Self, TypeAlias + +if sys.version_info >= (3, 12): + from ctypes import c_ubyte + + BYTE = c_ubyte +else: + from ctypes import c_byte + + BYTE = c_byte + +WORD = c_ushort +DWORD = c_ulong +CHAR = c_char +WCHAR = c_wchar +UINT = c_uint +INT = c_int +DOUBLE = c_double +FLOAT = c_float +BOOLEAN = BYTE +BOOL = c_long + +class VARIANT_BOOL(_SimpleCData[bool]): ... + +ULONG = c_ulong +LONG = c_long +USHORT = c_ushort +SHORT = c_short +LARGE_INTEGER = c_longlong +_LARGE_INTEGER = c_longlong +ULARGE_INTEGER = c_ulonglong +_ULARGE_INTEGER = c_ulonglong + +OLESTR = c_wchar_p +LPOLESTR = c_wchar_p +LPCOLESTR = c_wchar_p +LPWSTR = c_wchar_p +LPCWSTR = c_wchar_p +LPSTR = c_char_p +LPCSTR = c_char_p +LPVOID = c_void_p +LPCVOID = c_void_p + +# These two types are pointer-sized unsigned and signed ints, respectively. +# At runtime, they are either c_[u]long or c_[u]longlong, depending on the host's pointer size +# (they are not really separate classes). +class WPARAM(_SimpleCData[int]): ... +class LPARAM(_SimpleCData[int]): ... + +ATOM = WORD +LANGID = WORD +COLORREF = DWORD +LGRPID = DWORD +LCTYPE = DWORD +LCID = DWORD + +HANDLE = c_void_p +HACCEL = HANDLE +HBITMAP = HANDLE +HBRUSH = HANDLE +HCOLORSPACE = HANDLE +if sys.version_info >= (3, 14): + HCONV = HANDLE + HCONVLIST = HANDLE + HCURSOR = HANDLE + HDDEDATA = HANDLE + HDROP = HANDLE + HFILE = INT + HRESULT = LONG + HSZ = HANDLE +HDC = HANDLE +HDESK = HANDLE +HDWP = HANDLE +HENHMETAFILE = HANDLE +HFONT = HANDLE +HGDIOBJ = HANDLE +HGLOBAL = HANDLE +HHOOK = HANDLE +HICON = HANDLE +HINSTANCE = HANDLE +HKEY = HANDLE +HKL = HANDLE +HLOCAL = HANDLE +HMENU = HANDLE +HMETAFILE = HANDLE +HMODULE = HANDLE +HMONITOR = HANDLE +HPALETTE = HANDLE +HPEN = HANDLE +HRGN = HANDLE +HRSRC = HANDLE +HSTR = HANDLE +HTASK = HANDLE +HWINSTA = HANDLE +HWND = HANDLE +SC_HANDLE = HANDLE +SERVICE_STATUS_HANDLE = HANDLE + +_CIntLikeT = TypeVar("_CIntLikeT", bound=_SimpleCData[int]) +_CIntLikeField: TypeAlias = _CField[_CIntLikeT, int, _CIntLikeT | int] + +class RECT(Structure): + left: _CIntLikeField[LONG] + top: _CIntLikeField[LONG] + right: _CIntLikeField[LONG] + bottom: _CIntLikeField[LONG] + +RECTL = RECT +_RECTL = RECT +tagRECT = RECT + +class _SMALL_RECT(Structure): + Left: _CIntLikeField[SHORT] + Top: _CIntLikeField[SHORT] + Right: _CIntLikeField[SHORT] + Bottom: _CIntLikeField[SHORT] + +SMALL_RECT = _SMALL_RECT + +class _COORD(Structure): + X: _CIntLikeField[SHORT] + Y: _CIntLikeField[SHORT] + +class POINT(Structure): + x: _CIntLikeField[LONG] + y: _CIntLikeField[LONG] + +POINTL = POINT +_POINTL = POINT +tagPOINT = POINT + +class SIZE(Structure): + cx: _CIntLikeField[LONG] + cy: _CIntLikeField[LONG] + +SIZEL = SIZE +tagSIZE = SIZE + +def RGB(red: int, green: int, blue: int) -> int: ... + +class FILETIME(Structure): + dwLowDateTime: _CIntLikeField[DWORD] + dwHighDateTime: _CIntLikeField[DWORD] + +_FILETIME = FILETIME + +class MSG(Structure): + hWnd: _CField[HWND, int | None, HWND | int | None] + message: _CIntLikeField[UINT] + wParam: _CIntLikeField[WPARAM] + lParam: _CIntLikeField[LPARAM] + time: _CIntLikeField[DWORD] + pt: _CField[POINT, POINT, POINT] + +tagMSG = MSG +MAX_PATH: Final = 260 + +class WIN32_FIND_DATAA(Structure): + dwFileAttributes: _CIntLikeField[DWORD] + ftCreationTime: _CField[FILETIME, FILETIME, FILETIME] + ftLastAccessTime: _CField[FILETIME, FILETIME, FILETIME] + ftLastWriteTime: _CField[FILETIME, FILETIME, FILETIME] + nFileSizeHigh: _CIntLikeField[DWORD] + nFileSizeLow: _CIntLikeField[DWORD] + dwReserved0: _CIntLikeField[DWORD] + dwReserved1: _CIntLikeField[DWORD] + cFileName: _CField[Array[CHAR], bytes, bytes] + cAlternateFileName: _CField[Array[CHAR], bytes, bytes] + +class WIN32_FIND_DATAW(Structure): + dwFileAttributes: _CIntLikeField[DWORD] + ftCreationTime: _CField[FILETIME, FILETIME, FILETIME] + ftLastAccessTime: _CField[FILETIME, FILETIME, FILETIME] + ftLastWriteTime: _CField[FILETIME, FILETIME, FILETIME] + nFileSizeHigh: _CIntLikeField[DWORD] + nFileSizeLow: _CIntLikeField[DWORD] + dwReserved0: _CIntLikeField[DWORD] + dwReserved1: _CIntLikeField[DWORD] + cFileName: _CField[Array[WCHAR], str, str] + cAlternateFileName: _CField[Array[WCHAR], str, str] + +# These are all defined with the POINTER() function, which keeps a cache and will +# return a previously created class if it can. The self-reported __name__ +# of these classes is f"LP_{typ.__name__}", where typ is the original class +# passed in to the POINTER() function. + +# LP_c_short +class PSHORT(_Pointer[SHORT]): ... + +# LP_c_ushort +class PUSHORT(_Pointer[USHORT]): ... + +PWORD = PUSHORT +LPWORD = PUSHORT + +# LP_c_long +class PLONG(_Pointer[LONG]): ... + +LPLONG = PLONG +PBOOL = PLONG +LPBOOL = PLONG + +# LP_c_ulong +class PULONG(_Pointer[ULONG]): ... + +PDWORD = PULONG +LPDWORD = PDWORD +LPCOLORREF = PDWORD +PLCID = PDWORD + +# LP_c_int (or LP_c_long if int and long have the same size) +class PINT(_Pointer[INT]): ... + +LPINT = PINT + +# LP_c_uint (or LP_c_ulong if int and long have the same size) +class PUINT(_Pointer[UINT]): ... + +LPUINT = PUINT + +# LP_c_float +class PFLOAT(_Pointer[FLOAT]): ... + +# LP_c_longlong (or LP_c_long if long and long long have the same size) +class PLARGE_INTEGER(_Pointer[LARGE_INTEGER]): ... + +# LP_c_ulonglong (or LP_c_ulong if long and long long have the same size) +class PULARGE_INTEGER(_Pointer[ULARGE_INTEGER]): ... + +# LP_c_byte types +class PBYTE(_Pointer[BYTE]): ... + +LPBYTE = PBYTE +PBOOLEAN = PBYTE + +# LP_c_char +class PCHAR(_Pointer[CHAR]): + # this is inherited from ctypes.c_char_p, kind of. + @classmethod + def from_param(cls, value: Any, /) -> Self | _CArgObject: ... + +# LP_c_wchar +class PWCHAR(_Pointer[WCHAR]): + # inherited from ctypes.c_wchar_p, kind of + @classmethod + def from_param(cls, value: Any, /) -> Self | _CArgObject: ... + +# LP_c_void_p +class PHANDLE(_Pointer[HANDLE]): ... + +LPHANDLE = PHANDLE +PHKEY = PHANDLE +LPHKL = PHANDLE +LPSC_HANDLE = PHANDLE + +# LP_FILETIME +class PFILETIME(_Pointer[FILETIME]): ... + +LPFILETIME = PFILETIME + +# LP_MSG +class PMSG(_Pointer[MSG]): ... + +LPMSG = PMSG + +# LP_POINT +class PPOINT(_Pointer[POINT]): ... + +LPPOINT = PPOINT +PPOINTL = PPOINT + +# LP_RECT +class PRECT(_Pointer[RECT]): ... + +LPRECT = PRECT +PRECTL = PRECT +LPRECTL = PRECT + +# LP_SIZE +class PSIZE(_Pointer[SIZE]): ... + +LPSIZE = PSIZE +PSIZEL = PSIZE +LPSIZEL = PSIZE + +# LP__SMALL_RECT +class PSMALL_RECT(_Pointer[SMALL_RECT]): ... + +# LP_WIN32_FIND_DATAA +class PWIN32_FIND_DATAA(_Pointer[WIN32_FIND_DATAA]): ... + +LPWIN32_FIND_DATAA = PWIN32_FIND_DATAA + +# LP_WIN32_FIND_DATAW +class PWIN32_FIND_DATAW(_Pointer[WIN32_FIND_DATAW]): ... + +LPWIN32_FIND_DATAW = PWIN32_FIND_DATAW diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/curses/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/curses/__init__.pyi new file mode 100644 index 0000000..3e32487 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/curses/__init__.pyi @@ -0,0 +1,41 @@ +import sys +from _curses import * +from _curses import window as window +from _typeshed import structseq +from collections.abc import Callable +from typing import Final, TypeVar, final, type_check_only +from typing_extensions import Concatenate, ParamSpec + +# NOTE: The _curses module is ordinarily only available on Unix, but the +# windows-curses package makes it available on Windows as well with the same +# contents. + +_T = TypeVar("_T") +_P = ParamSpec("_P") + +# available after calling `curses.initscr()` +# not `Final` as it can change during the terminal resize: +LINES: int +COLS: int + +# available after calling `curses.start_color()` +COLORS: Final[int] +COLOR_PAIRS: Final[int] + +def wrapper(func: Callable[Concatenate[window, _P], _T], /, *arg: _P.args, **kwds: _P.kwargs) -> _T: ... + +# At runtime this class is unexposed and calls itself curses.ncurses_version. +# That name would conflict with the actual curses.ncurses_version, which is +# an instance of this class. +@final +@type_check_only +class _ncurses_version(structseq[int], tuple[int, int, int]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("major", "minor", "patch") + + @property + def major(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def patch(self) -> int: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/curses/ascii.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/curses/ascii.pyi new file mode 100644 index 0000000..0234434 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/curses/ascii.pyi @@ -0,0 +1,62 @@ +from typing import Final, TypeVar + +_CharT = TypeVar("_CharT", str, int) + +NUL: Final = 0x00 +SOH: Final = 0x01 +STX: Final = 0x02 +ETX: Final = 0x03 +EOT: Final = 0x04 +ENQ: Final = 0x05 +ACK: Final = 0x06 +BEL: Final = 0x07 +BS: Final = 0x08 +TAB: Final = 0x09 +HT: Final = 0x09 +LF: Final = 0x0A +NL: Final = 0x0A +VT: Final = 0x0B +FF: Final = 0x0C +CR: Final = 0x0D +SO: Final = 0x0E +SI: Final = 0x0F +DLE: Final = 0x10 +DC1: Final = 0x11 +DC2: Final = 0x12 +DC3: Final = 0x13 +DC4: Final = 0x14 +NAK: Final = 0x15 +SYN: Final = 0x16 +ETB: Final = 0x17 +CAN: Final = 0x18 +EM: Final = 0x19 +SUB: Final = 0x1A +ESC: Final = 0x1B +FS: Final = 0x1C +GS: Final = 0x1D +RS: Final = 0x1E +US: Final = 0x1F +SP: Final = 0x20 +DEL: Final = 0x7F + +controlnames: Final[list[int]] + +def isalnum(c: str | int) -> bool: ... +def isalpha(c: str | int) -> bool: ... +def isascii(c: str | int) -> bool: ... +def isblank(c: str | int) -> bool: ... +def iscntrl(c: str | int) -> bool: ... +def isdigit(c: str | int) -> bool: ... +def isgraph(c: str | int) -> bool: ... +def islower(c: str | int) -> bool: ... +def isprint(c: str | int) -> bool: ... +def ispunct(c: str | int) -> bool: ... +def isspace(c: str | int) -> bool: ... +def isupper(c: str | int) -> bool: ... +def isxdigit(c: str | int) -> bool: ... +def isctrl(c: str | int) -> bool: ... +def ismeta(c: str | int) -> bool: ... +def ascii(c: _CharT) -> _CharT: ... +def ctrl(c: _CharT) -> _CharT: ... +def alt(c: _CharT) -> _CharT: ... +def unctrl(c: str | int) -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/curses/has_key.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/curses/has_key.pyi new file mode 100644 index 0000000..3811060 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/curses/has_key.pyi @@ -0,0 +1 @@ +def has_key(ch: int | str) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/curses/panel.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/curses/panel.pyi new file mode 100644 index 0000000..861559d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/curses/panel.pyi @@ -0,0 +1 @@ +from _curses_panel import * diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/curses/textpad.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/curses/textpad.pyi new file mode 100644 index 0000000..48ef67c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/curses/textpad.pyi @@ -0,0 +1,11 @@ +from _curses import window +from collections.abc import Callable + +def rectangle(win: window, uly: int, ulx: int, lry: int, lrx: int) -> None: ... + +class Textbox: + stripspaces: bool + def __init__(self, win: window, insert_mode: bool = False) -> None: ... + def edit(self, validate: Callable[[int], int] | None = None) -> str: ... + def do_command(self, ch: str | int) -> None: ... + def gather(self) -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/dataclasses.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/dataclasses.pyi new file mode 100644 index 0000000..3a1c8cb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/dataclasses.pyi @@ -0,0 +1,490 @@ +import enum +import sys +import types +from _typeshed import DataclassInstance +from builtins import type as Type # alias to avoid name clashes with fields named "type" +from collections.abc import Callable, Iterable, Mapping +from types import GenericAlias +from typing import Any, Final, Generic, Literal, Protocol, TypeVar, overload, type_check_only +from typing_extensions import Never, TypeIs + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) + +__all__ = [ + "dataclass", + "field", + "Field", + "FrozenInstanceError", + "InitVar", + "MISSING", + "fields", + "asdict", + "astuple", + "make_dataclass", + "replace", + "is_dataclass", +] + +if sys.version_info >= (3, 10): + __all__ += ["KW_ONLY"] + +_DataclassT = TypeVar("_DataclassT", bound=DataclassInstance) + +@type_check_only +class _DataclassFactory(Protocol): + def __call__( + self, + cls: type[_T], + /, + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + weakref_slot: bool = False, + ) -> type[_T]: ... + +# define _MISSING_TYPE as an enum within the type stubs, +# even though that is not really its type at runtime +# this allows us to use Literal[_MISSING_TYPE.MISSING] +# for background, see: +# https://github.com/python/typeshed/pull/5900#issuecomment-895513797 +class _MISSING_TYPE(enum.Enum): + MISSING = enum.auto() + +MISSING: Final = _MISSING_TYPE.MISSING + +if sys.version_info >= (3, 10): + class KW_ONLY: ... + +@overload +def asdict(obj: DataclassInstance) -> dict[str, Any]: ... +@overload +def asdict(obj: DataclassInstance, *, dict_factory: Callable[[list[tuple[str, Any]]], _T]) -> _T: ... +@overload +def astuple(obj: DataclassInstance) -> tuple[Any, ...]: ... +@overload +def astuple(obj: DataclassInstance, *, tuple_factory: Callable[[list[Any]], _T]) -> _T: ... + +if sys.version_info >= (3, 11): + @overload + def dataclass( + cls: type[_T], + /, + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + weakref_slot: bool = False, + ) -> type[_T]: ... + @overload + def dataclass( + cls: None = None, + /, + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + weakref_slot: bool = False, + ) -> Callable[[type[_T]], type[_T]]: ... + +elif sys.version_info >= (3, 10): + @overload + def dataclass( + cls: type[_T], + /, + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + ) -> type[_T]: ... + @overload + def dataclass( + cls: None = None, + /, + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + ) -> Callable[[type[_T]], type[_T]]: ... + +else: + @overload + def dataclass( + cls: type[_T], + /, + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + ) -> type[_T]: ... + @overload + def dataclass( + cls: None = None, + /, + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + ) -> Callable[[type[_T]], type[_T]]: ... + +# See https://github.com/python/mypy/issues/10750 +@type_check_only +class _DefaultFactory(Protocol[_T_co]): + def __call__(self) -> _T_co: ... + +class Field(Generic[_T]): + if sys.version_info >= (3, 14): + __slots__ = ( + "name", + "type", + "default", + "default_factory", + "repr", + "hash", + "init", + "compare", + "metadata", + "kw_only", + "doc", + "_field_type", + ) + elif sys.version_info >= (3, 10): + __slots__ = ( + "name", + "type", + "default", + "default_factory", + "repr", + "hash", + "init", + "compare", + "metadata", + "kw_only", + "_field_type", + ) + else: + __slots__ = ("name", "type", "default", "default_factory", "repr", "hash", "init", "compare", "metadata", "_field_type") + name: str + type: Type[_T] | str | Any + default: _T | Literal[_MISSING_TYPE.MISSING] + default_factory: _DefaultFactory[_T] | Literal[_MISSING_TYPE.MISSING] + repr: bool + hash: bool | None + init: bool + compare: bool + metadata: types.MappingProxyType[Any, Any] + + if sys.version_info >= (3, 14): + doc: str | None + + if sys.version_info >= (3, 10): + kw_only: bool | Literal[_MISSING_TYPE.MISSING] + + if sys.version_info >= (3, 14): + def __init__( + self, + default: _T, + default_factory: Callable[[], _T], + init: bool, + repr: bool, + hash: bool | None, + compare: bool, + metadata: Mapping[Any, Any], + kw_only: bool, + doc: str | None, + ) -> None: ... + elif sys.version_info >= (3, 10): + def __init__( + self, + default: _T, + default_factory: Callable[[], _T], + init: bool, + repr: bool, + hash: bool | None, + compare: bool, + metadata: Mapping[Any, Any], + kw_only: bool, + ) -> None: ... + else: + def __init__( + self, + default: _T, + default_factory: Callable[[], _T], + init: bool, + repr: bool, + hash: bool | None, + compare: bool, + metadata: Mapping[Any, Any], + ) -> None: ... + + def __set_name__(self, owner: Type[Any], name: str) -> None: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +# NOTE: Actual return type is 'Field[_T]', but we want to help type checkers +# to understand the magic that happens at runtime. +if sys.version_info >= (3, 14): + @overload # `default` and `default_factory` are optional and mutually exclusive. + def field( + *, + default: _T, + default_factory: Literal[_MISSING_TYPE.MISSING] = ..., + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + kw_only: bool | Literal[_MISSING_TYPE.MISSING] = ..., + doc: str | None = None, + ) -> _T: ... + @overload + def field( + *, + default: Literal[_MISSING_TYPE.MISSING] = ..., + default_factory: Callable[[], _T], + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + kw_only: bool | Literal[_MISSING_TYPE.MISSING] = ..., + doc: str | None = None, + ) -> _T: ... + @overload + def field( + *, + default: Literal[_MISSING_TYPE.MISSING] = ..., + default_factory: Literal[_MISSING_TYPE.MISSING] = ..., + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + kw_only: bool | Literal[_MISSING_TYPE.MISSING] = ..., + doc: str | None = None, + ) -> Any: ... + +elif sys.version_info >= (3, 10): + @overload # `default` and `default_factory` are optional and mutually exclusive. + def field( + *, + default: _T, + default_factory: Literal[_MISSING_TYPE.MISSING] = ..., + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + kw_only: bool | Literal[_MISSING_TYPE.MISSING] = ..., + ) -> _T: ... + @overload + def field( + *, + default: Literal[_MISSING_TYPE.MISSING] = ..., + default_factory: Callable[[], _T], + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + kw_only: bool | Literal[_MISSING_TYPE.MISSING] = ..., + ) -> _T: ... + @overload + def field( + *, + default: Literal[_MISSING_TYPE.MISSING] = ..., + default_factory: Literal[_MISSING_TYPE.MISSING] = ..., + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + kw_only: bool | Literal[_MISSING_TYPE.MISSING] = ..., + ) -> Any: ... + +else: + @overload # `default` and `default_factory` are optional and mutually exclusive. + def field( + *, + default: _T, + default_factory: Literal[_MISSING_TYPE.MISSING] = ..., + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + ) -> _T: ... + @overload + def field( + *, + default: Literal[_MISSING_TYPE.MISSING] = ..., + default_factory: Callable[[], _T], + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + ) -> _T: ... + @overload + def field( + *, + default: Literal[_MISSING_TYPE.MISSING] = ..., + default_factory: Literal[_MISSING_TYPE.MISSING] = ..., + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + ) -> Any: ... + +def fields(class_or_instance: DataclassInstance | type[DataclassInstance]) -> tuple[Field[Any], ...]: ... + +# HACK: `obj: Never` typing matches if object argument is using `Any` type. +@overload +def is_dataclass(obj: Never) -> TypeIs[DataclassInstance | type[DataclassInstance]]: ... # type: ignore[narrowed-type-not-subtype] # pyright: ignore[reportGeneralTypeIssues] +@overload +def is_dataclass(obj: type) -> TypeIs[type[DataclassInstance]]: ... +@overload +def is_dataclass(obj: object) -> TypeIs[DataclassInstance | type[DataclassInstance]]: ... + +class FrozenInstanceError(AttributeError): ... + +class InitVar(Generic[_T]): + __slots__ = ("type",) + type: Type[_T] + def __init__(self, type: Type[_T]) -> None: ... + @overload + def __class_getitem__(cls, type: Type[_T]) -> InitVar[_T]: ... # pyright: ignore[reportInvalidTypeForm] + @overload + def __class_getitem__(cls, type: Any) -> InitVar[Any]: ... # pyright: ignore[reportInvalidTypeForm] + +if sys.version_info >= (3, 14): + def make_dataclass( + cls_name: str, + fields: Iterable[str | tuple[str, Any] | tuple[str, Any, Any]], + *, + bases: tuple[type, ...] = (), + namespace: dict[str, Any] | None = None, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + weakref_slot: bool = False, + module: str | None = None, + decorator: _DataclassFactory = ..., + ) -> type: ... + +elif sys.version_info >= (3, 12): + def make_dataclass( + cls_name: str, + fields: Iterable[str | tuple[str, Any] | tuple[str, Any, Any]], + *, + bases: tuple[type, ...] = (), + namespace: dict[str, Any] | None = None, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + weakref_slot: bool = False, + module: str | None = None, + ) -> type: ... + +elif sys.version_info >= (3, 11): + def make_dataclass( + cls_name: str, + fields: Iterable[str | tuple[str, Any] | tuple[str, Any, Any]], + *, + bases: tuple[type, ...] = (), + namespace: dict[str, Any] | None = None, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + weakref_slot: bool = False, + ) -> type: ... + +elif sys.version_info >= (3, 10): + def make_dataclass( + cls_name: str, + fields: Iterable[str | tuple[str, Any] | tuple[str, Any, Any]], + *, + bases: tuple[type, ...] = (), + namespace: dict[str, Any] | None = None, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + ) -> type: ... + +else: + def make_dataclass( + cls_name: str, + fields: Iterable[str | tuple[str, Any] | tuple[str, Any, Any]], + *, + bases: tuple[type, ...] = (), + namespace: dict[str, Any] | None = None, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + ) -> type: ... + +def replace(obj: _DataclassT, /, **changes: Any) -> _DataclassT: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/datetime.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/datetime.pyi new file mode 100644 index 0000000..8a0536c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/datetime.pyi @@ -0,0 +1,346 @@ +import sys +from abc import abstractmethod +from time import struct_time +from typing import ClassVar, Final, NoReturn, SupportsIndex, final, overload, type_check_only +from typing_extensions import CapsuleType, Self, TypeAlias, deprecated, disjoint_base + +if sys.version_info >= (3, 11): + __all__ = ("date", "datetime", "time", "timedelta", "timezone", "tzinfo", "MINYEAR", "MAXYEAR", "UTC") +else: + __all__ = ("date", "datetime", "time", "timedelta", "timezone", "tzinfo", "MINYEAR", "MAXYEAR") + +MINYEAR: Final = 1 +MAXYEAR: Final = 9999 + +class tzinfo: + @abstractmethod + def tzname(self, dt: datetime | None, /) -> str | None: ... + @abstractmethod + def utcoffset(self, dt: datetime | None, /) -> timedelta | None: ... + @abstractmethod + def dst(self, dt: datetime | None, /) -> timedelta | None: ... + def fromutc(self, dt: datetime, /) -> datetime: ... + +# Alias required to avoid name conflicts with date(time).tzinfo. +_TzInfo: TypeAlias = tzinfo + +@final +class timezone(tzinfo): + utc: ClassVar[timezone] + min: ClassVar[timezone] + max: ClassVar[timezone] + def __new__(cls, offset: timedelta, name: str = ...) -> Self: ... + def tzname(self, dt: datetime | None, /) -> str: ... + def utcoffset(self, dt: datetime | None, /) -> timedelta: ... + def dst(self, dt: datetime | None, /) -> None: ... + def __hash__(self) -> int: ... + def __eq__(self, value: object, /) -> bool: ... + +if sys.version_info >= (3, 11): + UTC: timezone + +# This class calls itself datetime.IsoCalendarDate. It's neither +# NamedTuple nor structseq. +@final +@type_check_only +class _IsoCalendarDate(tuple[int, int, int]): + @property + def year(self) -> int: ... + @property + def week(self) -> int: ... + @property + def weekday(self) -> int: ... + +@disjoint_base +class date: + min: ClassVar[date] + max: ClassVar[date] + resolution: ClassVar[timedelta] + def __new__(cls, year: SupportsIndex, month: SupportsIndex, day: SupportsIndex) -> Self: ... + @classmethod + def fromtimestamp(cls, timestamp: float, /) -> Self: ... + @classmethod + def today(cls) -> Self: ... + @classmethod + def fromordinal(cls, n: int, /) -> Self: ... + @classmethod + def fromisoformat(cls, date_string: str, /) -> Self: ... + @classmethod + def fromisocalendar(cls, year: int, week: int, day: int) -> Self: ... + @property + def year(self) -> int: ... + @property + def month(self) -> int: ... + @property + def day(self) -> int: ... + def ctime(self) -> str: ... + + if sys.version_info >= (3, 14): + @classmethod + def strptime(cls, date_string: str, format: str, /) -> Self: ... + + # On <3.12, the name of the parameter in the pure-Python implementation + # didn't match the name in the C implementation, + # meaning it is only *safe* to pass it as a keyword argument on 3.12+ + if sys.version_info >= (3, 12): + def strftime(self, format: str) -> str: ... + else: + def strftime(self, format: str, /) -> str: ... + + def __format__(self, fmt: str, /) -> str: ... + def isoformat(self) -> str: ... + def timetuple(self) -> struct_time: ... + def toordinal(self) -> int: ... + if sys.version_info >= (3, 13): + def __replace__(self, /, *, year: SupportsIndex = ..., month: SupportsIndex = ..., day: SupportsIndex = ...) -> Self: ... + + def replace(self, year: SupportsIndex = ..., month: SupportsIndex = ..., day: SupportsIndex = ...) -> Self: ... + def __le__(self, value: date, /) -> bool: ... + def __lt__(self, value: date, /) -> bool: ... + def __ge__(self, value: date, /) -> bool: ... + def __gt__(self, value: date, /) -> bool: ... + def __eq__(self, value: object, /) -> bool: ... + def __add__(self, value: timedelta, /) -> Self: ... + def __radd__(self, value: timedelta, /) -> Self: ... + @overload + def __sub__(self, value: datetime, /) -> NoReturn: ... + @overload + def __sub__(self, value: Self, /) -> timedelta: ... + @overload + def __sub__(self, value: timedelta, /) -> Self: ... + def __hash__(self) -> int: ... + def weekday(self) -> int: ... + def isoweekday(self) -> int: ... + def isocalendar(self) -> _IsoCalendarDate: ... + +@disjoint_base +class time: + min: ClassVar[time] + max: ClassVar[time] + resolution: ClassVar[timedelta] + def __new__( + cls, + hour: SupportsIndex = 0, + minute: SupportsIndex = 0, + second: SupportsIndex = 0, + microsecond: SupportsIndex = 0, + tzinfo: _TzInfo | None = None, + *, + fold: int = 0, + ) -> Self: ... + @property + def hour(self) -> int: ... + @property + def minute(self) -> int: ... + @property + def second(self) -> int: ... + @property + def microsecond(self) -> int: ... + @property + def tzinfo(self) -> _TzInfo | None: ... + @property + def fold(self) -> int: ... + def __le__(self, value: time, /) -> bool: ... + def __lt__(self, value: time, /) -> bool: ... + def __ge__(self, value: time, /) -> bool: ... + def __gt__(self, value: time, /) -> bool: ... + def __eq__(self, value: object, /) -> bool: ... + def __hash__(self) -> int: ... + def isoformat(self, timespec: str = "auto") -> str: ... + @classmethod + def fromisoformat(cls, time_string: str, /) -> Self: ... + + if sys.version_info >= (3, 14): + @classmethod + def strptime(cls, date_string: str, format: str, /) -> Self: ... + + # On <3.12, the name of the parameter in the pure-Python implementation + # didn't match the name in the C implementation, + # meaning it is only *safe* to pass it as a keyword argument on 3.12+ + if sys.version_info >= (3, 12): + def strftime(self, format: str) -> str: ... + else: + def strftime(self, format: str, /) -> str: ... + + def __format__(self, fmt: str, /) -> str: ... + def utcoffset(self) -> timedelta | None: ... + def tzname(self) -> str | None: ... + def dst(self) -> timedelta | None: ... + if sys.version_info >= (3, 13): + def __replace__( + self, + /, + *, + hour: SupportsIndex = ..., + minute: SupportsIndex = ..., + second: SupportsIndex = ..., + microsecond: SupportsIndex = ..., + tzinfo: _TzInfo | None = ..., + fold: int = ..., + ) -> Self: ... + + def replace( + self, + hour: SupportsIndex = ..., + minute: SupportsIndex = ..., + second: SupportsIndex = ..., + microsecond: SupportsIndex = ..., + tzinfo: _TzInfo | None = ..., + *, + fold: int = ..., + ) -> Self: ... + +_Date: TypeAlias = date +_Time: TypeAlias = time + +@disjoint_base +class timedelta: + min: ClassVar[timedelta] + max: ClassVar[timedelta] + resolution: ClassVar[timedelta] + def __new__( + cls, + days: float = 0, + seconds: float = 0, + microseconds: float = 0, + milliseconds: float = 0, + minutes: float = 0, + hours: float = 0, + weeks: float = 0, + ) -> Self: ... + @property + def days(self) -> int: ... + @property + def seconds(self) -> int: ... + @property + def microseconds(self) -> int: ... + def total_seconds(self) -> float: ... + def __add__(self, value: timedelta, /) -> timedelta: ... + def __radd__(self, value: timedelta, /) -> timedelta: ... + def __sub__(self, value: timedelta, /) -> timedelta: ... + def __rsub__(self, value: timedelta, /) -> timedelta: ... + def __neg__(self) -> timedelta: ... + def __pos__(self) -> timedelta: ... + def __abs__(self) -> timedelta: ... + def __mul__(self, value: float, /) -> timedelta: ... + def __rmul__(self, value: float, /) -> timedelta: ... + @overload + def __floordiv__(self, value: timedelta, /) -> int: ... + @overload + def __floordiv__(self, value: int, /) -> timedelta: ... + @overload + def __truediv__(self, value: timedelta, /) -> float: ... + @overload + def __truediv__(self, value: float, /) -> timedelta: ... + def __mod__(self, value: timedelta, /) -> timedelta: ... + def __divmod__(self, value: timedelta, /) -> tuple[int, timedelta]: ... + def __le__(self, value: timedelta, /) -> bool: ... + def __lt__(self, value: timedelta, /) -> bool: ... + def __ge__(self, value: timedelta, /) -> bool: ... + def __gt__(self, value: timedelta, /) -> bool: ... + def __eq__(self, value: object, /) -> bool: ... + def __bool__(self) -> bool: ... + def __hash__(self) -> int: ... + +@disjoint_base +class datetime(date): + min: ClassVar[datetime] + max: ClassVar[datetime] + def __new__( + cls, + year: SupportsIndex, + month: SupportsIndex, + day: SupportsIndex, + hour: SupportsIndex = 0, + minute: SupportsIndex = 0, + second: SupportsIndex = 0, + microsecond: SupportsIndex = 0, + tzinfo: _TzInfo | None = None, + *, + fold: int = 0, + ) -> Self: ... + @property + def hour(self) -> int: ... + @property + def minute(self) -> int: ... + @property + def second(self) -> int: ... + @property + def microsecond(self) -> int: ... + @property + def tzinfo(self) -> _TzInfo | None: ... + @property + def fold(self) -> int: ... + # On <3.12, the name of the first parameter in the pure-Python implementation + # didn't match the name in the C implementation, + # meaning it is only *safe* to pass it as a keyword argument on 3.12+ + if sys.version_info >= (3, 12): + @classmethod + def fromtimestamp(cls, timestamp: float, tz: _TzInfo | None = None) -> Self: ... + else: + @classmethod + def fromtimestamp(cls, timestamp: float, /, tz: _TzInfo | None = None) -> Self: ... + + @classmethod + @deprecated("Use timezone-aware objects to represent datetimes in UTC; e.g. by calling .fromtimestamp(datetime.timezone.utc)") + def utcfromtimestamp(cls, t: float, /) -> Self: ... + @classmethod + def now(cls, tz: _TzInfo | None = None) -> Self: ... + @classmethod + @deprecated("Use timezone-aware objects to represent datetimes in UTC; e.g. by calling .now(datetime.timezone.utc)") + def utcnow(cls) -> Self: ... + @classmethod + def combine(cls, date: _Date, time: _Time, tzinfo: _TzInfo | None = ...) -> Self: ... + def timestamp(self) -> float: ... + def utctimetuple(self) -> struct_time: ... + def date(self) -> _Date: ... + def time(self) -> _Time: ... + def timetz(self) -> _Time: ... + if sys.version_info >= (3, 13): + def __replace__( + self, + /, + *, + year: SupportsIndex = ..., + month: SupportsIndex = ..., + day: SupportsIndex = ..., + hour: SupportsIndex = ..., + minute: SupportsIndex = ..., + second: SupportsIndex = ..., + microsecond: SupportsIndex = ..., + tzinfo: _TzInfo | None = ..., + fold: int = ..., + ) -> Self: ... + + def replace( + self, + year: SupportsIndex = ..., + month: SupportsIndex = ..., + day: SupportsIndex = ..., + hour: SupportsIndex = ..., + minute: SupportsIndex = ..., + second: SupportsIndex = ..., + microsecond: SupportsIndex = ..., + tzinfo: _TzInfo | None = ..., + *, + fold: int = ..., + ) -> Self: ... + def astimezone(self, tz: _TzInfo | None = None) -> Self: ... + def isoformat(self, sep: str = "T", timespec: str = "auto") -> str: ... + @classmethod + def strptime(cls, date_string: str, format: str, /) -> Self: ... + def utcoffset(self) -> timedelta | None: ... + def tzname(self) -> str | None: ... + def dst(self) -> timedelta | None: ... + def __le__(self, value: datetime, /) -> bool: ... # type: ignore[override] + def __lt__(self, value: datetime, /) -> bool: ... # type: ignore[override] + def __ge__(self, value: datetime, /) -> bool: ... # type: ignore[override] + def __gt__(self, value: datetime, /) -> bool: ... # type: ignore[override] + def __eq__(self, value: object, /) -> bool: ... + def __hash__(self) -> int: ... + @overload # type: ignore[override] + def __sub__(self, value: Self, /) -> timedelta: ... + @overload + def __sub__(self, value: timedelta, /) -> Self: ... + +datetime_CAPI: CapsuleType diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/dbm/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/dbm/__init__.pyi new file mode 100644 index 0000000..7cbb63c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/dbm/__init__.pyi @@ -0,0 +1,105 @@ +import sys +from _typeshed import StrOrBytesPath +from collections.abc import Iterator, MutableMapping +from types import TracebackType +from typing import Literal, type_check_only +from typing_extensions import Self, TypeAlias + +__all__ = ["open", "whichdb", "error"] + +_KeyType: TypeAlias = str | bytes +_ValueType: TypeAlias = str | bytes | bytearray +_TFlags: TypeAlias = Literal[ + "r", + "w", + "c", + "n", + "rf", + "wf", + "cf", + "nf", + "rs", + "ws", + "cs", + "ns", + "ru", + "wu", + "cu", + "nu", + "rfs", + "wfs", + "cfs", + "nfs", + "rfu", + "wfu", + "cfu", + "nfu", + "rsf", + "wsf", + "csf", + "nsf", + "rsu", + "wsu", + "csu", + "nsu", + "ruf", + "wuf", + "cuf", + "nuf", + "rus", + "wus", + "cus", + "nus", + "rfsu", + "wfsu", + "cfsu", + "nfsu", + "rfus", + "wfus", + "cfus", + "nfus", + "rsfu", + "wsfu", + "csfu", + "nsfu", + "rsuf", + "wsuf", + "csuf", + "nsuf", + "rufs", + "wufs", + "cufs", + "nufs", + "rusf", + "wusf", + "cusf", + "nusf", +] + +@type_check_only +class _Database(MutableMapping[_KeyType, bytes]): + def close(self) -> None: ... + def __getitem__(self, key: _KeyType) -> bytes: ... + def __setitem__(self, key: _KeyType, value: _ValueType) -> None: ... + def __delitem__(self, key: _KeyType) -> None: ... + def __iter__(self) -> Iterator[bytes]: ... + def __len__(self) -> int: ... + def __del__(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + +# This class is not exposed. It calls itself dbm.error. +@type_check_only +class _error(Exception): ... + +error: tuple[type[_error], type[OSError]] + +if sys.version_info >= (3, 11): + def whichdb(filename: StrOrBytesPath) -> str | None: ... + def open(file: StrOrBytesPath, flag: _TFlags = "r", mode: int = 0o666) -> _Database: ... + +else: + def whichdb(filename: str) -> str | None: ... + def open(file: str, flag: _TFlags = "r", mode: int = 0o666) -> _Database: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/dbm/dumb.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/dbm/dumb.pyi new file mode 100644 index 0000000..1c0b775 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/dbm/dumb.pyi @@ -0,0 +1,37 @@ +import sys +from _typeshed import StrOrBytesPath +from collections.abc import Iterator, MutableMapping +from types import TracebackType +from typing_extensions import Self, TypeAlias + +__all__ = ["error", "open"] + +_KeyType: TypeAlias = str | bytes +_ValueType: TypeAlias = str | bytes + +error = OSError + +# This class doesn't exist at runtime. open() can return an instance of +# any of the three implementations of dbm (dumb, gnu, ndbm), and this +# class is intended to represent the common interface supported by all three. +class _Database(MutableMapping[_KeyType, bytes]): + def __init__(self, filebasename: str, mode: str, flag: str = "c") -> None: ... + def sync(self) -> None: ... + def iterkeys(self) -> Iterator[bytes]: ... # undocumented + def close(self) -> None: ... + def __getitem__(self, key: _KeyType) -> bytes: ... + def __setitem__(self, key: _KeyType, val: _ValueType) -> None: ... + def __delitem__(self, key: _KeyType) -> None: ... + def __iter__(self) -> Iterator[bytes]: ... + def __len__(self) -> int: ... + def __del__(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + +if sys.version_info >= (3, 11): + def open(file: StrOrBytesPath, flag: str = "c", mode: int = 0o666) -> _Database: ... + +else: + def open(file: str, flag: str = "c", mode: int = 0o666) -> _Database: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/dbm/gnu.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/dbm/gnu.pyi new file mode 100644 index 0000000..2dac3d1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/dbm/gnu.pyi @@ -0,0 +1 @@ +from _gdbm import * diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/dbm/ndbm.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/dbm/ndbm.pyi new file mode 100644 index 0000000..66c943a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/dbm/ndbm.pyi @@ -0,0 +1 @@ +from _dbm import * diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/dbm/sqlite3.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/dbm/sqlite3.pyi new file mode 100644 index 0000000..e2fba93 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/dbm/sqlite3.pyi @@ -0,0 +1,29 @@ +from _typeshed import ReadableBuffer, StrOrBytesPath, Unused +from collections.abc import Generator, MutableMapping +from typing import Final, Literal +from typing_extensions import LiteralString, Self, TypeAlias + +BUILD_TABLE: Final[LiteralString] +GET_SIZE: Final[LiteralString] +LOOKUP_KEY: Final[LiteralString] +STORE_KV: Final[LiteralString] +DELETE_KEY: Final[LiteralString] +ITER_KEYS: Final[LiteralString] + +_SqliteData: TypeAlias = str | ReadableBuffer | int | float + +class error(OSError): ... + +class _Database(MutableMapping[bytes, bytes]): + def __init__(self, path: StrOrBytesPath, /, *, flag: Literal["r", "w", "c", "n"], mode: int) -> None: ... + def __len__(self) -> int: ... + def __getitem__(self, key: _SqliteData) -> bytes: ... + def __setitem__(self, key: _SqliteData, value: _SqliteData) -> None: ... + def __delitem__(self, key: _SqliteData) -> None: ... + def __iter__(self) -> Generator[bytes]: ... + def close(self) -> None: ... + def keys(self) -> list[bytes]: ... # type: ignore[override] + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + +def open(filename: StrOrBytesPath, /, flag: Literal["r", "w", "c", "n"] = "r", mode: int = 0o666) -> _Database: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/decimal.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/decimal.pyi new file mode 100644 index 0000000..2e06c2d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/decimal.pyi @@ -0,0 +1,274 @@ +import numbers +import sys +from _decimal import ( + HAVE_CONTEXTVAR as HAVE_CONTEXTVAR, + HAVE_THREADS as HAVE_THREADS, + MAX_EMAX as MAX_EMAX, + MAX_PREC as MAX_PREC, + MIN_EMIN as MIN_EMIN, + MIN_ETINY as MIN_ETINY, + ROUND_05UP as ROUND_05UP, + ROUND_CEILING as ROUND_CEILING, + ROUND_DOWN as ROUND_DOWN, + ROUND_FLOOR as ROUND_FLOOR, + ROUND_HALF_DOWN as ROUND_HALF_DOWN, + ROUND_HALF_EVEN as ROUND_HALF_EVEN, + ROUND_HALF_UP as ROUND_HALF_UP, + ROUND_UP as ROUND_UP, + BasicContext as BasicContext, + DefaultContext as DefaultContext, + ExtendedContext as ExtendedContext, + __libmpdec_version__ as __libmpdec_version__, + __version__ as __version__, + getcontext as getcontext, + localcontext as localcontext, + setcontext as setcontext, +) +from collections.abc import Container, Sequence +from types import TracebackType +from typing import Any, ClassVar, Literal, NamedTuple, final, overload, type_check_only +from typing_extensions import Self, TypeAlias, disjoint_base + +if sys.version_info >= (3, 14): + from _decimal import IEEE_CONTEXT_MAX_BITS as IEEE_CONTEXT_MAX_BITS, IEEEContext as IEEEContext + +_Decimal: TypeAlias = Decimal | int +_DecimalNew: TypeAlias = Decimal | float | str | tuple[int, Sequence[int], int] +_ComparableNum: TypeAlias = Decimal | float | numbers.Rational +_TrapType: TypeAlias = type[DecimalException] + +# At runtime, these classes are implemented in C as part of "_decimal". +# However, they consider themselves to live in "decimal", so we'll put them here. + +# This type isn't exposed at runtime. It calls itself decimal.ContextManager +@final +@type_check_only +class _ContextManager: + def __init__(self, new_context: Context) -> None: ... + def __enter__(self) -> Context: ... + def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + +class DecimalTuple(NamedTuple): + sign: int + digits: tuple[int, ...] + exponent: int | Literal["n", "N", "F"] + +class DecimalException(ArithmeticError): ... +class Clamped(DecimalException): ... +class InvalidOperation(DecimalException): ... +class ConversionSyntax(InvalidOperation): ... +class DivisionByZero(DecimalException, ZeroDivisionError): ... +class DivisionImpossible(InvalidOperation): ... +class DivisionUndefined(InvalidOperation, ZeroDivisionError): ... +class Inexact(DecimalException): ... +class InvalidContext(InvalidOperation): ... +class Rounded(DecimalException): ... +class Subnormal(DecimalException): ... +class Overflow(Inexact, Rounded): ... +class Underflow(Inexact, Rounded, Subnormal): ... +class FloatOperation(DecimalException, TypeError): ... + +@disjoint_base +class Decimal: + def __new__(cls, value: _DecimalNew = "0", context: Context | None = None) -> Self: ... + if sys.version_info >= (3, 14): + @classmethod + def from_number(cls, number: Decimal | float, /) -> Self: ... + + @classmethod + def from_float(cls, f: float, /) -> Self: ... + def __bool__(self) -> bool: ... + def compare(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def __hash__(self) -> int: ... + def as_tuple(self) -> DecimalTuple: ... + def as_integer_ratio(self) -> tuple[int, int]: ... + def to_eng_string(self, context: Context | None = None) -> str: ... + def __abs__(self) -> Decimal: ... + def __add__(self, value: _Decimal, /) -> Decimal: ... + def __divmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: ... + def __eq__(self, value: object, /) -> bool: ... + def __floordiv__(self, value: _Decimal, /) -> Decimal: ... + def __ge__(self, value: _ComparableNum, /) -> bool: ... + def __gt__(self, value: _ComparableNum, /) -> bool: ... + def __le__(self, value: _ComparableNum, /) -> bool: ... + def __lt__(self, value: _ComparableNum, /) -> bool: ... + def __mod__(self, value: _Decimal, /) -> Decimal: ... + def __mul__(self, value: _Decimal, /) -> Decimal: ... + def __neg__(self) -> Decimal: ... + def __pos__(self) -> Decimal: ... + def __pow__(self, value: _Decimal, mod: _Decimal | None = None, /) -> Decimal: ... + def __radd__(self, value: _Decimal, /) -> Decimal: ... + def __rdivmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: ... + def __rfloordiv__(self, value: _Decimal, /) -> Decimal: ... + def __rmod__(self, value: _Decimal, /) -> Decimal: ... + def __rmul__(self, value: _Decimal, /) -> Decimal: ... + def __rsub__(self, value: _Decimal, /) -> Decimal: ... + def __rtruediv__(self, value: _Decimal, /) -> Decimal: ... + def __sub__(self, value: _Decimal, /) -> Decimal: ... + def __truediv__(self, value: _Decimal, /) -> Decimal: ... + def remainder_near(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def __float__(self) -> float: ... + def __int__(self) -> int: ... + def __trunc__(self) -> int: ... + @property + def real(self) -> Decimal: ... + @property + def imag(self) -> Decimal: ... + def conjugate(self) -> Decimal: ... + def __complex__(self) -> complex: ... + @overload + def __round__(self) -> int: ... + @overload + def __round__(self, ndigits: int, /) -> Decimal: ... + def __floor__(self) -> int: ... + def __ceil__(self) -> int: ... + def fma(self, other: _Decimal, third: _Decimal, context: Context | None = None) -> Decimal: ... + def __rpow__(self, value: _Decimal, mod: Context | None = None, /) -> Decimal: ... + def normalize(self, context: Context | None = None) -> Decimal: ... + def quantize(self, exp: _Decimal, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def same_quantum(self, other: _Decimal, context: Context | None = None) -> bool: ... + def to_integral_exact(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def to_integral_value(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def to_integral(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def sqrt(self, context: Context | None = None) -> Decimal: ... + def max(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def min(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def adjusted(self) -> int: ... + def canonical(self) -> Decimal: ... + def compare_signal(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def compare_total(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def compare_total_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def copy_abs(self) -> Decimal: ... + def copy_negate(self) -> Decimal: ... + def copy_sign(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def exp(self, context: Context | None = None) -> Decimal: ... + def is_canonical(self) -> bool: ... + def is_finite(self) -> bool: ... + def is_infinite(self) -> bool: ... + def is_nan(self) -> bool: ... + def is_normal(self, context: Context | None = None) -> bool: ... + def is_qnan(self) -> bool: ... + def is_signed(self) -> bool: ... + def is_snan(self) -> bool: ... + def is_subnormal(self, context: Context | None = None) -> bool: ... + def is_zero(self) -> bool: ... + def ln(self, context: Context | None = None) -> Decimal: ... + def log10(self, context: Context | None = None) -> Decimal: ... + def logb(self, context: Context | None = None) -> Decimal: ... + def logical_and(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def logical_invert(self, context: Context | None = None) -> Decimal: ... + def logical_or(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def logical_xor(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def max_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def min_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def next_minus(self, context: Context | None = None) -> Decimal: ... + def next_plus(self, context: Context | None = None) -> Decimal: ... + def next_toward(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def number_class(self, context: Context | None = None) -> str: ... + def radix(self) -> Decimal: ... + def rotate(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def scaleb(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def shift(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def __reduce__(self) -> tuple[type[Self], tuple[str]]: ... + def __copy__(self) -> Self: ... + def __deepcopy__(self, memo: Any, /) -> Self: ... + def __format__(self, specifier: str, context: Context | None = None, /) -> str: ... + +@disjoint_base +class Context: + # TODO: Context doesn't allow you to delete *any* attributes from instances of the class at runtime, + # even settable attributes like `prec` and `rounding`, + # but that's inexpressible in the stub. + # Type checkers either ignore it or misinterpret it + # if you add a `def __delattr__(self, name: str, /) -> NoReturn` method to the stub + prec: int + rounding: str + Emin: int + Emax: int + capitals: int + clamp: int + traps: dict[_TrapType, bool] + flags: dict[_TrapType, bool] + def __init__( + self, + prec: int | None = None, + rounding: str | None = None, + Emin: int | None = None, + Emax: int | None = None, + capitals: int | None = None, + clamp: int | None = None, + flags: dict[_TrapType, bool] | Container[_TrapType] | None = None, + traps: dict[_TrapType, bool] | Container[_TrapType] | None = None, + ) -> None: ... + def __reduce__(self) -> tuple[type[Self], tuple[Any, ...]]: ... + def clear_flags(self) -> None: ... + def clear_traps(self) -> None: ... + def copy(self) -> Context: ... + def __copy__(self) -> Context: ... + # see https://github.com/python/cpython/issues/94107 + __hash__: ClassVar[None] # type: ignore[assignment] + def Etiny(self) -> int: ... + def Etop(self) -> int: ... + def create_decimal(self, num: _DecimalNew = "0", /) -> Decimal: ... + def create_decimal_from_float(self, f: float, /) -> Decimal: ... + def abs(self, x: _Decimal, /) -> Decimal: ... + def add(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def canonical(self, x: Decimal, /) -> Decimal: ... + def compare(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def compare_signal(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def compare_total(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def compare_total_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def copy_abs(self, x: _Decimal, /) -> Decimal: ... + def copy_decimal(self, x: _Decimal, /) -> Decimal: ... + def copy_negate(self, x: _Decimal, /) -> Decimal: ... + def copy_sign(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def divide(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def divide_int(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def divmod(self, x: _Decimal, y: _Decimal, /) -> tuple[Decimal, Decimal]: ... + def exp(self, x: _Decimal, /) -> Decimal: ... + def fma(self, x: _Decimal, y: _Decimal, z: _Decimal, /) -> Decimal: ... + def is_canonical(self, x: _Decimal, /) -> bool: ... + def is_finite(self, x: _Decimal, /) -> bool: ... + def is_infinite(self, x: _Decimal, /) -> bool: ... + def is_nan(self, x: _Decimal, /) -> bool: ... + def is_normal(self, x: _Decimal, /) -> bool: ... + def is_qnan(self, x: _Decimal, /) -> bool: ... + def is_signed(self, x: _Decimal, /) -> bool: ... + def is_snan(self, x: _Decimal, /) -> bool: ... + def is_subnormal(self, x: _Decimal, /) -> bool: ... + def is_zero(self, x: _Decimal, /) -> bool: ... + def ln(self, x: _Decimal, /) -> Decimal: ... + def log10(self, x: _Decimal, /) -> Decimal: ... + def logb(self, x: _Decimal, /) -> Decimal: ... + def logical_and(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def logical_invert(self, x: _Decimal, /) -> Decimal: ... + def logical_or(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def logical_xor(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def max(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def max_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def min(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def min_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def minus(self, x: _Decimal, /) -> Decimal: ... + def multiply(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def next_minus(self, x: _Decimal, /) -> Decimal: ... + def next_plus(self, x: _Decimal, /) -> Decimal: ... + def next_toward(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def normalize(self, x: _Decimal, /) -> Decimal: ... + def number_class(self, x: _Decimal, /) -> str: ... + def plus(self, x: _Decimal, /) -> Decimal: ... + def power(self, a: _Decimal, b: _Decimal, modulo: _Decimal | None = None) -> Decimal: ... + def quantize(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def radix(self) -> Decimal: ... + def remainder(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def remainder_near(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def rotate(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def same_quantum(self, x: _Decimal, y: _Decimal, /) -> bool: ... + def scaleb(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def shift(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def sqrt(self, x: _Decimal, /) -> Decimal: ... + def subtract(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def to_eng_string(self, x: _Decimal, /) -> str: ... + def to_sci_string(self, x: _Decimal, /) -> str: ... + def to_integral_exact(self, x: _Decimal, /) -> Decimal: ... + def to_integral_value(self, x: _Decimal, /) -> Decimal: ... + def to_integral(self, x: _Decimal, /) -> Decimal: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/difflib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/difflib.pyi new file mode 100644 index 0000000..6efe683 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/difflib.pyi @@ -0,0 +1,139 @@ +import re +import sys +from collections.abc import Callable, Iterable, Iterator, Sequence +from types import GenericAlias +from typing import Any, AnyStr, Generic, Literal, NamedTuple, TypeVar, overload + +__all__ = [ + "get_close_matches", + "ndiff", + "restore", + "SequenceMatcher", + "Differ", + "IS_CHARACTER_JUNK", + "IS_LINE_JUNK", + "context_diff", + "unified_diff", + "diff_bytes", + "HtmlDiff", + "Match", +] + +_T = TypeVar("_T") + +class Match(NamedTuple): + a: int + b: int + size: int + +class SequenceMatcher(Generic[_T]): + @overload + def __init__(self, isjunk: Callable[[_T], bool] | None, a: Sequence[_T], b: Sequence[_T], autojunk: bool = True) -> None: ... + @overload + def __init__(self, *, a: Sequence[_T], b: Sequence[_T], autojunk: bool = True) -> None: ... + @overload + def __init__( + self: SequenceMatcher[str], + isjunk: Callable[[str], bool] | None = None, + a: Sequence[str] = "", + b: Sequence[str] = "", + autojunk: bool = True, + ) -> None: ... + def set_seqs(self, a: Sequence[_T], b: Sequence[_T]) -> None: ... + def set_seq1(self, a: Sequence[_T]) -> None: ... + def set_seq2(self, b: Sequence[_T]) -> None: ... + def find_longest_match(self, alo: int = 0, ahi: int | None = None, blo: int = 0, bhi: int | None = None) -> Match: ... + def get_matching_blocks(self) -> list[Match]: ... + def get_opcodes(self) -> list[tuple[Literal["replace", "delete", "insert", "equal"], int, int, int, int]]: ... + def get_grouped_opcodes(self, n: int = 3) -> Iterable[list[tuple[str, int, int, int, int]]]: ... + def ratio(self) -> float: ... + def quick_ratio(self) -> float: ... + def real_quick_ratio(self) -> float: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +@overload +def get_close_matches(word: AnyStr, possibilities: Iterable[AnyStr], n: int = 3, cutoff: float = 0.6) -> list[AnyStr]: ... +@overload +def get_close_matches( + word: Sequence[_T], possibilities: Iterable[Sequence[_T]], n: int = 3, cutoff: float = 0.6 +) -> list[Sequence[_T]]: ... + +class Differ: + def __init__(self, linejunk: Callable[[str], bool] | None = None, charjunk: Callable[[str], bool] | None = None) -> None: ... + def compare(self, a: Sequence[str], b: Sequence[str]) -> Iterator[str]: ... + +if sys.version_info >= (3, 14): + def IS_LINE_JUNK(line: str, pat: Callable[[str], re.Match[str] | None] | None = None) -> bool: ... + +else: + def IS_LINE_JUNK(line: str, pat: Callable[[str], re.Match[str] | None] = ...) -> bool: ... + +def IS_CHARACTER_JUNK(ch: str, ws: str = " \t") -> bool: ... # ws is undocumented +def unified_diff( + a: Sequence[str], + b: Sequence[str], + fromfile: str = "", + tofile: str = "", + fromfiledate: str = "", + tofiledate: str = "", + n: int = 3, + lineterm: str = "\n", +) -> Iterator[str]: ... +def context_diff( + a: Sequence[str], + b: Sequence[str], + fromfile: str = "", + tofile: str = "", + fromfiledate: str = "", + tofiledate: str = "", + n: int = 3, + lineterm: str = "\n", +) -> Iterator[str]: ... +def ndiff( + a: Sequence[str], + b: Sequence[str], + linejunk: Callable[[str], bool] | None = None, + charjunk: Callable[[str], bool] | None = ..., +) -> Iterator[str]: ... + +class HtmlDiff: + def __init__( + self, + tabsize: int = 8, + wrapcolumn: int | None = None, + linejunk: Callable[[str], bool] | None = None, + charjunk: Callable[[str], bool] | None = ..., + ) -> None: ... + def make_file( + self, + fromlines: Sequence[str], + tolines: Sequence[str], + fromdesc: str = "", + todesc: str = "", + context: bool = False, + numlines: int = 5, + *, + charset: str = "utf-8", + ) -> str: ... + def make_table( + self, + fromlines: Sequence[str], + tolines: Sequence[str], + fromdesc: str = "", + todesc: str = "", + context: bool = False, + numlines: int = 5, + ) -> str: ... + +def restore(delta: Iterable[str], which: int) -> Iterator[str]: ... +def diff_bytes( + dfunc: Callable[[Sequence[str], Sequence[str], str, str, str, str, int, str], Iterator[str]], + a: Iterable[bytes | bytearray], + b: Iterable[bytes | bytearray], + fromfile: bytes | bytearray = b"", + tofile: bytes | bytearray = b"", + fromfiledate: bytes | bytearray = b"", + tofiledate: bytes | bytearray = b"", + n: int = 3, + lineterm: bytes | bytearray = b"\n", +) -> Iterator[bytes]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/dis.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/dis.pyi new file mode 100644 index 0000000..896b50f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/dis.pyi @@ -0,0 +1,295 @@ +import sys +import types +from collections.abc import Callable, Iterator +from opcode import * # `dis` re-exports it as a part of public API +from typing import IO, Any, Final, NamedTuple +from typing_extensions import Self, TypeAlias, disjoint_base + +__all__ = [ + "code_info", + "dis", + "disassemble", + "distb", + "disco", + "findlinestarts", + "findlabels", + "show_code", + "get_instructions", + "Instruction", + "Bytecode", + "cmp_op", + "hasconst", + "hasname", + "hasjrel", + "hasjabs", + "haslocal", + "hascompare", + "hasfree", + "opname", + "opmap", + "HAVE_ARGUMENT", + "EXTENDED_ARG", + "stack_effect", +] +if sys.version_info >= (3, 13): + __all__ += ["hasjump"] + +if sys.version_info >= (3, 12): + __all__ += ["hasarg", "hasexc"] +else: + __all__ += ["hasnargs"] + +# Strictly this should not have to include Callable, but mypy doesn't use FunctionType +# for functions (python/mypy#3171) +_HaveCodeType: TypeAlias = types.MethodType | types.FunctionType | types.CodeType | type | Callable[..., Any] + +if sys.version_info >= (3, 11): + class Positions(NamedTuple): + lineno: int | None = None + end_lineno: int | None = None + col_offset: int | None = None + end_col_offset: int | None = None + +if sys.version_info >= (3, 13): + class _Instruction(NamedTuple): + opname: str + opcode: int + arg: int | None + argval: Any + argrepr: str + offset: int + start_offset: int + starts_line: bool + line_number: int | None + label: int | None = None + positions: Positions | None = None + cache_info: list[tuple[str, int, Any]] | None = None + +elif sys.version_info >= (3, 11): + class _Instruction(NamedTuple): + opname: str + opcode: int + arg: int | None + argval: Any + argrepr: str + offset: int + starts_line: int | None + is_jump_target: bool + positions: Positions | None = None + +else: + class _Instruction(NamedTuple): + opname: str + opcode: int + arg: int | None + argval: Any + argrepr: str + offset: int + starts_line: int | None + is_jump_target: bool + +if sys.version_info >= (3, 12): + class Instruction(_Instruction): + if sys.version_info < (3, 13): + def _disassemble(self, lineno_width: int = 3, mark_as_current: bool = False, offset_width: int = 4) -> str: ... + if sys.version_info >= (3, 13): + @property + def oparg(self) -> int: ... + @property + def baseopcode(self) -> int: ... + @property + def baseopname(self) -> str: ... + @property + def cache_offset(self) -> int: ... + @property + def end_offset(self) -> int: ... + @property + def jump_target(self) -> int: ... + @property + def is_jump_target(self) -> bool: ... + if sys.version_info >= (3, 14): + @staticmethod + def make( + opname: str, + arg: int | None, + argval: Any, + argrepr: str, + offset: int, + start_offset: int, + starts_line: bool, + line_number: int | None, + label: int | None = None, + positions: Positions | None = None, + cache_info: list[tuple[str, int, Any]] | None = None, + ) -> Instruction: ... + +else: + @disjoint_base + class Instruction(_Instruction): + def _disassemble(self, lineno_width: int = 3, mark_as_current: bool = False, offset_width: int = 4) -> str: ... + +class Bytecode: + codeobj: types.CodeType + first_line: int + if sys.version_info >= (3, 14): + show_positions: bool + # 3.14 added `show_positions` + def __init__( + self, + x: _HaveCodeType | str, + *, + first_line: int | None = None, + current_offset: int | None = None, + show_caches: bool = False, + adaptive: bool = False, + show_offsets: bool = False, + show_positions: bool = False, + ) -> None: ... + elif sys.version_info >= (3, 13): + show_offsets: bool + # 3.13 added `show_offsets` + def __init__( + self, + x: _HaveCodeType | str, + *, + first_line: int | None = None, + current_offset: int | None = None, + show_caches: bool = False, + adaptive: bool = False, + show_offsets: bool = False, + ) -> None: ... + elif sys.version_info >= (3, 11): + def __init__( + self, + x: _HaveCodeType | str, + *, + first_line: int | None = None, + current_offset: int | None = None, + show_caches: bool = False, + adaptive: bool = False, + ) -> None: ... + else: + def __init__( + self, x: _HaveCodeType | str, *, first_line: int | None = None, current_offset: int | None = None + ) -> None: ... + + if sys.version_info >= (3, 11): + @classmethod + def from_traceback(cls, tb: types.TracebackType, *, show_caches: bool = False, adaptive: bool = False) -> Self: ... + else: + @classmethod + def from_traceback(cls, tb: types.TracebackType) -> Self: ... + + def __iter__(self) -> Iterator[Instruction]: ... + def info(self) -> str: ... + def dis(self) -> str: ... + +COMPILER_FLAG_NAMES: Final[dict[int, str]] + +def findlabels(code: _HaveCodeType) -> list[int]: ... +def findlinestarts(code: _HaveCodeType) -> Iterator[tuple[int, int]]: ... +def pretty_flags(flags: int) -> str: ... +def code_info(x: _HaveCodeType | str) -> str: ... + +if sys.version_info >= (3, 14): + # 3.14 added `show_positions` + def dis( + x: _HaveCodeType | str | bytes | bytearray | None = None, + *, + file: IO[str] | None = None, + depth: int | None = None, + show_caches: bool = False, + adaptive: bool = False, + show_offsets: bool = False, + show_positions: bool = False, + ) -> None: ... + def disassemble( + co: _HaveCodeType, + lasti: int = -1, + *, + file: IO[str] | None = None, + show_caches: bool = False, + adaptive: bool = False, + show_offsets: bool = False, + show_positions: bool = False, + ) -> None: ... + def distb( + tb: types.TracebackType | None = None, + *, + file: IO[str] | None = None, + show_caches: bool = False, + adaptive: bool = False, + show_offsets: bool = False, + show_positions: bool = False, + ) -> None: ... + +elif sys.version_info >= (3, 13): + # 3.13 added `show_offsets` + def dis( + x: _HaveCodeType | str | bytes | bytearray | None = None, + *, + file: IO[str] | None = None, + depth: int | None = None, + show_caches: bool = False, + adaptive: bool = False, + show_offsets: bool = False, + ) -> None: ... + def disassemble( + co: _HaveCodeType, + lasti: int = -1, + *, + file: IO[str] | None = None, + show_caches: bool = False, + adaptive: bool = False, + show_offsets: bool = False, + ) -> None: ... + def distb( + tb: types.TracebackType | None = None, + *, + file: IO[str] | None = None, + show_caches: bool = False, + adaptive: bool = False, + show_offsets: bool = False, + ) -> None: ... + +elif sys.version_info >= (3, 11): + # 3.11 added `show_caches` and `adaptive` + def dis( + x: _HaveCodeType | str | bytes | bytearray | None = None, + *, + file: IO[str] | None = None, + depth: int | None = None, + show_caches: bool = False, + adaptive: bool = False, + ) -> None: ... + def disassemble( + co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False + ) -> None: ... + def distb( + tb: types.TracebackType | None = None, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False + ) -> None: ... + +else: + def dis( + x: _HaveCodeType | str | bytes | bytearray | None = None, *, file: IO[str] | None = None, depth: int | None = None + ) -> None: ... + def disassemble(co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None) -> None: ... + def distb(tb: types.TracebackType | None = None, *, file: IO[str] | None = None) -> None: ... + +if sys.version_info >= (3, 13): + # 3.13 made `show_cache` `None` by default + def get_instructions( + x: _HaveCodeType, *, first_line: int | None = None, show_caches: bool | None = None, adaptive: bool = False + ) -> Iterator[Instruction]: ... + +elif sys.version_info >= (3, 11): + def get_instructions( + x: _HaveCodeType, *, first_line: int | None = None, show_caches: bool = False, adaptive: bool = False + ) -> Iterator[Instruction]: ... + +else: + def get_instructions(x: _HaveCodeType, *, first_line: int | None = None) -> Iterator[Instruction]: ... + +def show_code(co: _HaveCodeType, *, file: IO[str] | None = None) -> None: ... + +disco = disassemble diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/__init__.pyi new file mode 100644 index 0000000..328a5b7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/__init__.pyi @@ -0,0 +1,5 @@ +# Attempts to improve these stubs are probably not the best use of time: +# - distutils is deleted in Python 3.12 and newer +# - Most users already do not use stdlib distutils, due to setuptools monkeypatching +# - We have very little quality assurance on these stubs, since due to the two above issues +# we allowlist all distutils errors in stubtest. diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/_msvccompiler.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/_msvccompiler.pyi new file mode 100644 index 0000000..bba9373 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/_msvccompiler.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete +from distutils.ccompiler import CCompiler +from typing import ClassVar, Final + +PLAT_SPEC_TO_RUNTIME: Final[dict[str, str]] +PLAT_TO_VCVARS: Final[dict[str, str]] + +class MSVCCompiler(CCompiler): + compiler_type: ClassVar[str] + executables: ClassVar[dict[Incomplete, Incomplete]] + res_extension: ClassVar[str] + initialized: bool + def initialize(self, plat_name: str | None = None) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/archive_util.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/archive_util.pyi new file mode 100644 index 0000000..16684ff --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/archive_util.pyi @@ -0,0 +1,35 @@ +from _typeshed import StrOrBytesPath, StrPath +from typing import Literal, overload + +@overload +def make_archive( + base_name: str, + format: str, + root_dir: StrOrBytesPath | None = None, + base_dir: str | None = None, + verbose: bool | Literal[0, 1] = 0, + dry_run: bool | Literal[0, 1] = 0, + owner: str | None = None, + group: str | None = None, +) -> str: ... +@overload +def make_archive( + base_name: StrPath, + format: str, + root_dir: StrOrBytesPath, + base_dir: str | None = None, + verbose: bool | Literal[0, 1] = 0, + dry_run: bool | Literal[0, 1] = 0, + owner: str | None = None, + group: str | None = None, +) -> str: ... +def make_tarball( + base_name: str, + base_dir: StrPath, + compress: str | None = "gzip", + verbose: bool | Literal[0, 1] = 0, + dry_run: bool | Literal[0, 1] = 0, + owner: str | None = None, + group: str | None = None, +) -> str: ... +def make_zipfile(base_name: str, base_dir: str, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0) -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/bcppcompiler.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/bcppcompiler.pyi new file mode 100644 index 0000000..3e432f9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/bcppcompiler.pyi @@ -0,0 +1,3 @@ +from distutils.ccompiler import CCompiler + +class BCPPCompiler(CCompiler): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/ccompiler.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/ccompiler.pyi new file mode 100644 index 0000000..5bff209 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/ccompiler.pyi @@ -0,0 +1,176 @@ +from _typeshed import BytesPath, StrPath, Unused +from collections.abc import Callable, Iterable, Sequence +from distutils.file_util import _BytesPathT, _StrPathT +from typing import Literal, overload +from typing_extensions import TypeAlias, TypeVarTuple, Unpack + +_Macro: TypeAlias = tuple[str] | tuple[str, str | None] +_Ts = TypeVarTuple("_Ts") + +def gen_lib_options( + compiler: CCompiler, library_dirs: list[str], runtime_library_dirs: list[str], libraries: list[str] +) -> list[str]: ... +def gen_preprocess_options(macros: list[_Macro], include_dirs: list[str]) -> list[str]: ... +def get_default_compiler(osname: str | None = None, platform: str | None = None) -> str: ... +def new_compiler( + plat: str | None = None, + compiler: str | None = None, + verbose: bool | Literal[0, 1] = 0, + dry_run: bool | Literal[0, 1] = 0, + force: bool | Literal[0, 1] = 0, +) -> CCompiler: ... +def show_compilers() -> None: ... + +class CCompiler: + dry_run: bool + force: bool + verbose: bool + output_dir: str | None + macros: list[_Macro] + include_dirs: list[str] + libraries: list[str] + library_dirs: list[str] + runtime_library_dirs: list[str] + objects: list[str] + def __init__( + self, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0, force: bool | Literal[0, 1] = 0 + ) -> None: ... + def add_include_dir(self, dir: str) -> None: ... + def set_include_dirs(self, dirs: list[str]) -> None: ... + def add_library(self, libname: str) -> None: ... + def set_libraries(self, libnames: list[str]) -> None: ... + def add_library_dir(self, dir: str) -> None: ... + def set_library_dirs(self, dirs: list[str]) -> None: ... + def add_runtime_library_dir(self, dir: str) -> None: ... + def set_runtime_library_dirs(self, dirs: list[str]) -> None: ... + def define_macro(self, name: str, value: str | None = None) -> None: ... + def undefine_macro(self, name: str) -> None: ... + def add_link_object(self, object: str) -> None: ... + def set_link_objects(self, objects: list[str]) -> None: ... + def detect_language(self, sources: str | list[str]) -> str | None: ... + def find_library_file(self, dirs: list[str], lib: str, debug: bool | Literal[0, 1] = 0) -> str | None: ... + def has_function( + self, + funcname: str, + includes: list[str] | None = None, + include_dirs: list[str] | None = None, + libraries: list[str] | None = None, + library_dirs: list[str] | None = None, + ) -> bool: ... + def library_dir_option(self, dir: str) -> str: ... + def library_option(self, lib: str) -> str: ... + def runtime_library_dir_option(self, dir: str) -> str: ... + def set_executables(self, **args: str) -> None: ... + def compile( + self, + sources: Sequence[StrPath], + output_dir: str | None = None, + macros: list[_Macro] | None = None, + include_dirs: list[str] | None = None, + debug: bool | Literal[0, 1] = 0, + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + depends: list[str] | None = None, + ) -> list[str]: ... + def create_static_lib( + self, + objects: list[str], + output_libname: str, + output_dir: str | None = None, + debug: bool | Literal[0, 1] = 0, + target_lang: str | None = None, + ) -> None: ... + def link( + self, + target_desc: str, + objects: list[str], + output_filename: str, + output_dir: str | None = None, + libraries: list[str] | None = None, + library_dirs: list[str] | None = None, + runtime_library_dirs: list[str] | None = None, + export_symbols: list[str] | None = None, + debug: bool | Literal[0, 1] = 0, + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + build_temp: str | None = None, + target_lang: str | None = None, + ) -> None: ... + def link_executable( + self, + objects: list[str], + output_progname: str, + output_dir: str | None = None, + libraries: list[str] | None = None, + library_dirs: list[str] | None = None, + runtime_library_dirs: list[str] | None = None, + debug: bool | Literal[0, 1] = 0, + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + target_lang: str | None = None, + ) -> None: ... + def link_shared_lib( + self, + objects: list[str], + output_libname: str, + output_dir: str | None = None, + libraries: list[str] | None = None, + library_dirs: list[str] | None = None, + runtime_library_dirs: list[str] | None = None, + export_symbols: list[str] | None = None, + debug: bool | Literal[0, 1] = 0, + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + build_temp: str | None = None, + target_lang: str | None = None, + ) -> None: ... + def link_shared_object( + self, + objects: list[str], + output_filename: str, + output_dir: str | None = None, + libraries: list[str] | None = None, + library_dirs: list[str] | None = None, + runtime_library_dirs: list[str] | None = None, + export_symbols: list[str] | None = None, + debug: bool | Literal[0, 1] = 0, + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + build_temp: str | None = None, + target_lang: str | None = None, + ) -> None: ... + def preprocess( + self, + source: str, + output_file: str | None = None, + macros: list[_Macro] | None = None, + include_dirs: list[str] | None = None, + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + ) -> None: ... + @overload + def executable_filename(self, basename: str, strip_dir: Literal[0, False] = 0, output_dir: StrPath = "") -> str: ... + @overload + def executable_filename(self, basename: StrPath, strip_dir: Literal[1, True], output_dir: StrPath = "") -> str: ... + def library_filename( + self, libname: str, lib_type: str = "static", strip_dir: bool | Literal[0, 1] = 0, output_dir: StrPath = "" + ) -> str: ... + def object_filenames( + self, source_filenames: Iterable[StrPath], strip_dir: bool | Literal[0, 1] = 0, output_dir: StrPath | None = "" + ) -> list[str]: ... + @overload + def shared_object_filename(self, basename: str, strip_dir: Literal[0, False] = 0, output_dir: StrPath = "") -> str: ... + @overload + def shared_object_filename(self, basename: StrPath, strip_dir: Literal[1, True], output_dir: StrPath = "") -> str: ... + def execute( + self, func: Callable[[Unpack[_Ts]], Unused], args: tuple[Unpack[_Ts]], msg: str | None = None, level: int = 1 + ) -> None: ... + def spawn(self, cmd: Iterable[str]) -> None: ... + def mkpath(self, name: str, mode: int = 0o777) -> None: ... + @overload + def move_file(self, src: StrPath, dst: _StrPathT) -> _StrPathT | str: ... + @overload + def move_file(self, src: BytesPath, dst: _BytesPathT) -> _BytesPathT | bytes: ... + def announce(self, msg: str, level: int = 1) -> None: ... + def warn(self, msg: str) -> None: ... + def debug_print(self, msg: str) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/cmd.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/cmd.pyi new file mode 100644 index 0000000..7f97bc3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/cmd.pyi @@ -0,0 +1,229 @@ +from _typeshed import BytesPath, StrOrBytesPath, StrPath, Unused +from abc import abstractmethod +from collections.abc import Callable, Iterable +from distutils.command.bdist import bdist +from distutils.command.bdist_dumb import bdist_dumb +from distutils.command.bdist_rpm import bdist_rpm +from distutils.command.build import build +from distutils.command.build_clib import build_clib +from distutils.command.build_ext import build_ext +from distutils.command.build_py import build_py +from distutils.command.build_scripts import build_scripts +from distutils.command.check import check +from distutils.command.clean import clean +from distutils.command.config import config +from distutils.command.install import install +from distutils.command.install_data import install_data +from distutils.command.install_egg_info import install_egg_info +from distutils.command.install_headers import install_headers +from distutils.command.install_lib import install_lib +from distutils.command.install_scripts import install_scripts +from distutils.command.register import register +from distutils.command.sdist import sdist +from distutils.command.upload import upload +from distutils.dist import Distribution +from distutils.file_util import _BytesPathT, _StrPathT +from typing import Any, ClassVar, Literal, TypeVar, overload +from typing_extensions import TypeVarTuple, Unpack + +_CommandT = TypeVar("_CommandT", bound=Command) +_Ts = TypeVarTuple("_Ts") + +class Command: + dry_run: bool | Literal[0, 1] # Exposed from __getattr_. Same as Distribution.dry_run + distribution: Distribution + # Any to work around variance issues + sub_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]] + def __init__(self, dist: Distribution) -> None: ... + @abstractmethod + def initialize_options(self) -> None: ... + @abstractmethod + def finalize_options(self) -> None: ... + @abstractmethod + def run(self) -> None: ... + def announce(self, msg: str, level: int = 1) -> None: ... + def debug_print(self, msg: str) -> None: ... + def ensure_string(self, option: str, default: str | None = None) -> None: ... + def ensure_string_list(self, option: str) -> None: ... + def ensure_filename(self, option: str) -> None: ... + def ensure_dirname(self, option: str) -> None: ... + def get_command_name(self) -> str: ... + def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: ... + # NOTE: This list comes directly from the distutils/command folder. Minus bdist_msi and bdist_wininst. + @overload + def get_finalized_command(self, command: Literal["bdist"], create: bool | Literal[0, 1] = 1) -> bdist: ... + @overload + def get_finalized_command(self, command: Literal["bdist_dumb"], create: bool | Literal[0, 1] = 1) -> bdist_dumb: ... + @overload + def get_finalized_command(self, command: Literal["bdist_rpm"], create: bool | Literal[0, 1] = 1) -> bdist_rpm: ... + @overload + def get_finalized_command(self, command: Literal["build"], create: bool | Literal[0, 1] = 1) -> build: ... + @overload + def get_finalized_command(self, command: Literal["build_clib"], create: bool | Literal[0, 1] = 1) -> build_clib: ... + @overload + def get_finalized_command(self, command: Literal["build_ext"], create: bool | Literal[0, 1] = 1) -> build_ext: ... + @overload + def get_finalized_command(self, command: Literal["build_py"], create: bool | Literal[0, 1] = 1) -> build_py: ... + @overload + def get_finalized_command(self, command: Literal["build_scripts"], create: bool | Literal[0, 1] = 1) -> build_scripts: ... + @overload + def get_finalized_command(self, command: Literal["check"], create: bool | Literal[0, 1] = 1) -> check: ... + @overload + def get_finalized_command(self, command: Literal["clean"], create: bool | Literal[0, 1] = 1) -> clean: ... + @overload + def get_finalized_command(self, command: Literal["config"], create: bool | Literal[0, 1] = 1) -> config: ... + @overload + def get_finalized_command(self, command: Literal["install"], create: bool | Literal[0, 1] = 1) -> install: ... + @overload + def get_finalized_command(self, command: Literal["install_data"], create: bool | Literal[0, 1] = 1) -> install_data: ... + @overload + def get_finalized_command( + self, command: Literal["install_egg_info"], create: bool | Literal[0, 1] = 1 + ) -> install_egg_info: ... + @overload + def get_finalized_command(self, command: Literal["install_headers"], create: bool | Literal[0, 1] = 1) -> install_headers: ... + @overload + def get_finalized_command(self, command: Literal["install_lib"], create: bool | Literal[0, 1] = 1) -> install_lib: ... + @overload + def get_finalized_command(self, command: Literal["install_scripts"], create: bool | Literal[0, 1] = 1) -> install_scripts: ... + @overload + def get_finalized_command(self, command: Literal["register"], create: bool | Literal[0, 1] = 1) -> register: ... + @overload + def get_finalized_command(self, command: Literal["sdist"], create: bool | Literal[0, 1] = 1) -> sdist: ... + @overload + def get_finalized_command(self, command: Literal["upload"], create: bool | Literal[0, 1] = 1) -> upload: ... + @overload + def get_finalized_command(self, command: str, create: bool | Literal[0, 1] = 1) -> Command: ... + @overload + def reinitialize_command(self, command: Literal["bdist"], reinit_subcommands: bool | Literal[0, 1] = 0) -> bdist: ... + @overload + def reinitialize_command( + self, command: Literal["bdist_dumb"], reinit_subcommands: bool | Literal[0, 1] = 0 + ) -> bdist_dumb: ... + @overload + def reinitialize_command(self, command: Literal["bdist_rpm"], reinit_subcommands: bool | Literal[0, 1] = 0) -> bdist_rpm: ... + @overload + def reinitialize_command(self, command: Literal["build"], reinit_subcommands: bool | Literal[0, 1] = 0) -> build: ... + @overload + def reinitialize_command( + self, command: Literal["build_clib"], reinit_subcommands: bool | Literal[0, 1] = 0 + ) -> build_clib: ... + @overload + def reinitialize_command(self, command: Literal["build_ext"], reinit_subcommands: bool | Literal[0, 1] = 0) -> build_ext: ... + @overload + def reinitialize_command(self, command: Literal["build_py"], reinit_subcommands: bool | Literal[0, 1] = 0) -> build_py: ... + @overload + def reinitialize_command( + self, command: Literal["build_scripts"], reinit_subcommands: bool | Literal[0, 1] = 0 + ) -> build_scripts: ... + @overload + def reinitialize_command(self, command: Literal["check"], reinit_subcommands: bool | Literal[0, 1] = 0) -> check: ... + @overload + def reinitialize_command(self, command: Literal["clean"], reinit_subcommands: bool | Literal[0, 1] = 0) -> clean: ... + @overload + def reinitialize_command(self, command: Literal["config"], reinit_subcommands: bool | Literal[0, 1] = 0) -> config: ... + @overload + def reinitialize_command(self, command: Literal["install"], reinit_subcommands: bool | Literal[0, 1] = 0) -> install: ... + @overload + def reinitialize_command( + self, command: Literal["install_data"], reinit_subcommands: bool | Literal[0, 1] = 0 + ) -> install_data: ... + @overload + def reinitialize_command( + self, command: Literal["install_egg_info"], reinit_subcommands: bool | Literal[0, 1] = 0 + ) -> install_egg_info: ... + @overload + def reinitialize_command( + self, command: Literal["install_headers"], reinit_subcommands: bool | Literal[0, 1] = 0 + ) -> install_headers: ... + @overload + def reinitialize_command( + self, command: Literal["install_lib"], reinit_subcommands: bool | Literal[0, 1] = 0 + ) -> install_lib: ... + @overload + def reinitialize_command( + self, command: Literal["install_scripts"], reinit_subcommands: bool | Literal[0, 1] = 0 + ) -> install_scripts: ... + @overload + def reinitialize_command(self, command: Literal["register"], reinit_subcommands: bool | Literal[0, 1] = 0) -> register: ... + @overload + def reinitialize_command(self, command: Literal["sdist"], reinit_subcommands: bool | Literal[0, 1] = 0) -> sdist: ... + @overload + def reinitialize_command(self, command: Literal["upload"], reinit_subcommands: bool | Literal[0, 1] = 0) -> upload: ... + @overload + def reinitialize_command(self, command: str, reinit_subcommands: bool | Literal[0, 1] = 0) -> Command: ... + @overload + def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool | Literal[0, 1] = 0) -> _CommandT: ... + def run_command(self, command: str) -> None: ... + def get_sub_commands(self) -> list[str]: ... + def warn(self, msg: str) -> None: ... + def execute( + self, func: Callable[[Unpack[_Ts]], Unused], args: tuple[Unpack[_Ts]], msg: str | None = None, level: int = 1 + ) -> None: ... + def mkpath(self, name: str, mode: int = 0o777) -> None: ... + @overload + def copy_file( + self, + infile: StrPath, + outfile: _StrPathT, + preserve_mode: bool | Literal[0, 1] = 1, + preserve_times: bool | Literal[0, 1] = 1, + link: str | None = None, + level: Unused = 1, + ) -> tuple[_StrPathT | str, bool]: ... + @overload + def copy_file( + self, + infile: BytesPath, + outfile: _BytesPathT, + preserve_mode: bool | Literal[0, 1] = 1, + preserve_times: bool | Literal[0, 1] = 1, + link: str | None = None, + level: Unused = 1, + ) -> tuple[_BytesPathT | bytes, bool]: ... + def copy_tree( + self, + infile: StrPath, + outfile: str, + preserve_mode: bool | Literal[0, 1] = 1, + preserve_times: bool | Literal[0, 1] = 1, + preserve_symlinks: bool | Literal[0, 1] = 0, + level: Unused = 1, + ) -> list[str]: ... + @overload + def move_file(self, src: StrPath, dst: _StrPathT, level: Unused = 1) -> _StrPathT | str: ... + @overload + def move_file(self, src: BytesPath, dst: _BytesPathT, level: Unused = 1) -> _BytesPathT | bytes: ... + def spawn(self, cmd: Iterable[str], search_path: bool | Literal[0, 1] = 1, level: Unused = 1) -> None: ... + @overload + def make_archive( + self, + base_name: str, + format: str, + root_dir: StrOrBytesPath | None = None, + base_dir: str | None = None, + owner: str | None = None, + group: str | None = None, + ) -> str: ... + @overload + def make_archive( + self, + base_name: StrPath, + format: str, + root_dir: StrOrBytesPath, + base_dir: str | None = None, + owner: str | None = None, + group: str | None = None, + ) -> str: ... + def make_file( + self, + infiles: str | list[str] | tuple[str, ...], + outfile: StrOrBytesPath, + func: Callable[[Unpack[_Ts]], Unused], + args: tuple[Unpack[_Ts]], + exec_msg: str | None = None, + skip_msg: str | None = None, + level: Unused = 1, + ) -> None: ... + def ensure_finalized(self) -> None: ... + def dump_options(self, header=None, indent: str = "") -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/__init__.pyi new file mode 100644 index 0000000..4d73728 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/__init__.pyi @@ -0,0 +1,48 @@ +import sys + +from . import ( + bdist, + bdist_dumb, + bdist_rpm, + build, + build_clib, + build_ext, + build_py, + build_scripts, + check, + clean, + install, + install_data, + install_headers, + install_lib, + install_scripts, + register, + sdist, + upload, +) + +__all__ = [ + "build", + "build_py", + "build_ext", + "build_clib", + "build_scripts", + "clean", + "install", + "install_lib", + "install_headers", + "install_scripts", + "install_data", + "sdist", + "register", + "bdist", + "bdist_dumb", + "bdist_rpm", + "check", + "upload", +] + +if sys.version_info < (3, 10): + from . import bdist_wininst + + __all__ += ["bdist_wininst"] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/bdist.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/bdist.pyi new file mode 100644 index 0000000..6f99620 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/bdist.pyi @@ -0,0 +1,27 @@ +from _typeshed import Incomplete, Unused +from collections.abc import Callable +from typing import ClassVar + +from ..cmd import Command + +def show_formats() -> None: ... + +class bdist(Command): + description: str + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], Unused]]]] + no_format_option: ClassVar[tuple[str, ...]] + default_format: ClassVar[dict[str, str]] + format_commands: ClassVar[list[str]] + format_command: ClassVar[dict[str, tuple[str, str]]] + bdist_base: Incomplete + plat_name: Incomplete + formats: Incomplete + dist_dir: Incomplete + skip_build: int + group: Incomplete + owner: Incomplete + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/bdist_dumb.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/bdist_dumb.pyi new file mode 100644 index 0000000..297a0c3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/bdist_dumb.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete +from typing import ClassVar + +from ..cmd import Command + +class bdist_dumb(Command): + description: str + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + default_format: ClassVar[dict[str, str]] + bdist_dir: Incomplete + plat_name: Incomplete + format: Incomplete + keep_temp: int + dist_dir: Incomplete + skip_build: Incomplete + relative: int + owner: Incomplete + group: Incomplete + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/bdist_msi.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/bdist_msi.pyi new file mode 100644 index 0000000..d677f81 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/bdist_msi.pyi @@ -0,0 +1,45 @@ +import sys +from _typeshed import Incomplete +from typing import ClassVar, Literal + +from ..cmd import Command + +if sys.platform == "win32": + from msilib import Control, Dialog + + class PyDialog(Dialog): + def __init__(self, *args, **kw) -> None: ... + def title(self, title) -> None: ... + def back(self, title, next, name: str = "Back", active: bool | Literal[0, 1] = 1) -> Control: ... + def cancel(self, title, next, name: str = "Cancel", active: bool | Literal[0, 1] = 1) -> Control: ... + def next(self, title, next, name: str = "Next", active: bool | Literal[0, 1] = 1) -> Control: ... + def xbutton(self, name, title, next, xpos) -> Control: ... + + class bdist_msi(Command): + description: str + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + all_versions: Incomplete + other_version: str + def __init__(self, *args, **kw) -> None: ... + bdist_dir: Incomplete + plat_name: Incomplete + keep_temp: int + no_target_compile: int + no_target_optimize: int + target_version: Incomplete + dist_dir: Incomplete + skip_build: Incomplete + install_script: Incomplete + pre_install_script: Incomplete + versions: Incomplete + def initialize_options(self) -> None: ... + install_script_key: Incomplete + def finalize_options(self) -> None: ... + db: Incomplete + def run(self) -> None: ... + def add_files(self) -> None: ... + def add_find_python(self) -> None: ... + def add_scripts(self) -> None: ... + def add_ui(self) -> None: ... + def get_installer_filename(self, fullname): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/bdist_packager.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/bdist_packager.pyi new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/bdist_rpm.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/bdist_rpm.pyi new file mode 100644 index 0000000..83b4161 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/bdist_rpm.pyi @@ -0,0 +1,53 @@ +from _typeshed import Incomplete +from typing import ClassVar + +from ..cmd import Command + +class bdist_rpm(Command): + description: str + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + negative_opt: ClassVar[dict[str, str]] + bdist_base: Incomplete + rpm_base: Incomplete + dist_dir: Incomplete + python: Incomplete + fix_python: Incomplete + spec_only: Incomplete + binary_only: Incomplete + source_only: Incomplete + use_bzip2: Incomplete + distribution_name: Incomplete + group: Incomplete + release: Incomplete + serial: Incomplete + vendor: Incomplete + packager: Incomplete + doc_files: Incomplete + changelog: Incomplete + icon: Incomplete + prep_script: Incomplete + build_script: Incomplete + install_script: Incomplete + clean_script: Incomplete + verify_script: Incomplete + pre_install: Incomplete + post_install: Incomplete + pre_uninstall: Incomplete + post_uninstall: Incomplete + prep: Incomplete + provides: Incomplete + requires: Incomplete + conflicts: Incomplete + build_requires: Incomplete + obsoletes: Incomplete + keep_temp: int + use_rpm_opt_flags: int + rpm3_mode: int + no_autoreq: int + force_arch: Incomplete + quiet: int + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def finalize_package_data(self) -> None: ... + def run(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/bdist_wininst.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/bdist_wininst.pyi new file mode 100644 index 0000000..cf333bc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/bdist_wininst.pyi @@ -0,0 +1,16 @@ +from _typeshed import StrOrBytesPath +from distutils.cmd import Command +from typing import ClassVar + +class bdist_wininst(Command): + description: ClassVar[str] + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def get_inidata(self) -> str: ... + def create_exe(self, arcname: StrOrBytesPath, fullname: str, bitmap: StrOrBytesPath | None = None) -> None: ... + def get_installer_filename(self, fullname: str) -> str: ... + def get_exe_bytes(self) -> bytes: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/build.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/build.pyi new file mode 100644 index 0000000..3ec0c96 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/build.pyi @@ -0,0 +1,34 @@ +from _typeshed import Incomplete, Unused +from collections.abc import Callable +from typing import Any, ClassVar + +from ..cmd import Command + +def show_compilers() -> None: ... + +class build(Command): + description: str + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], Unused]]]] + build_base: str + build_purelib: Incomplete + build_platlib: Incomplete + build_lib: Incomplete + build_temp: Incomplete + build_scripts: Incomplete + compiler: Incomplete + plat_name: Incomplete + debug: Incomplete + force: int + executable: Incomplete + parallel: Incomplete + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def has_pure_modules(self): ... + def has_c_libraries(self): ... + def has_ext_modules(self): ... + def has_scripts(self): ... + # Any to work around variance issues + sub_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/build_clib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/build_clib.pyi new file mode 100644 index 0000000..69cfbe7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/build_clib.pyi @@ -0,0 +1,29 @@ +from _typeshed import Incomplete, Unused +from collections.abc import Callable +from typing import ClassVar + +from ..cmd import Command + +def show_compilers() -> None: ... + +class build_clib(Command): + description: str + user_options: ClassVar[list[tuple[str, str, str]]] + boolean_options: ClassVar[list[str]] + help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], Unused]]]] + build_clib: Incomplete + build_temp: Incomplete + libraries: Incomplete + include_dirs: Incomplete + define: Incomplete + undef: Incomplete + debug: Incomplete + force: int + compiler: Incomplete + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def check_library_list(self, libraries) -> None: ... + def get_library_names(self): ... + def get_source_files(self): ... + def build_libraries(self, libraries) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/build_ext.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/build_ext.pyi new file mode 100644 index 0000000..c5a9b5d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/build_ext.pyi @@ -0,0 +1,52 @@ +from _typeshed import Incomplete, Unused +from collections.abc import Callable +from typing import ClassVar + +from ..cmd import Command + +extension_name_re: Incomplete + +def show_compilers() -> None: ... + +class build_ext(Command): + description: str + sep_by: Incomplete + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], Unused]]]] + extensions: Incomplete + build_lib: Incomplete + plat_name: Incomplete + build_temp: Incomplete + inplace: int + package: Incomplete + include_dirs: Incomplete + define: Incomplete + undef: Incomplete + libraries: Incomplete + library_dirs: Incomplete + rpath: Incomplete + link_objects: Incomplete + debug: Incomplete + force: Incomplete + compiler: Incomplete + swig: Incomplete + swig_cpp: Incomplete + swig_opts: Incomplete + user: Incomplete + parallel: Incomplete + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def check_extensions_list(self, extensions) -> None: ... + def get_source_files(self): ... + def get_outputs(self): ... + def build_extensions(self) -> None: ... + def build_extension(self, ext) -> None: ... + def swig_sources(self, sources, extension): ... + def find_swig(self): ... + def get_ext_fullpath(self, ext_name: str) -> str: ... + def get_ext_fullname(self, ext_name: str) -> str: ... + def get_ext_filename(self, ext_name: str) -> str: ... + def get_export_symbols(self, ext): ... + def get_libraries(self, ext): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/build_py.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/build_py.pyi new file mode 100644 index 0000000..23ed230 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/build_py.pyi @@ -0,0 +1,45 @@ +from _typeshed import Incomplete +from typing import ClassVar, Literal + +from ..cmd import Command +from ..util import Mixin2to3 as Mixin2to3 + +class build_py(Command): + description: str + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + negative_opt: ClassVar[dict[str, str]] + build_lib: Incomplete + py_modules: Incomplete + package: Incomplete + package_data: Incomplete + package_dir: Incomplete + compile: int + optimize: int + force: Incomplete + def initialize_options(self) -> None: ... + packages: Incomplete + data_files: Incomplete + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def get_data_files(self): ... + def find_data_files(self, package, src_dir): ... + def build_package_data(self) -> None: ... + def get_package_dir(self, package): ... + def check_package(self, package, package_dir): ... + def check_module(self, module, module_file): ... + def find_package_modules(self, package, package_dir): ... + def find_modules(self): ... + def find_all_modules(self): ... + def get_source_files(self): ... + def get_module_outfile(self, build_dir, package, module): ... + def get_outputs(self, include_bytecode: bool | Literal[0, 1] = 1) -> list[str]: ... + def build_module(self, module, module_file, package): ... + def build_modules(self) -> None: ... + def build_packages(self) -> None: ... + def byte_compile(self, files) -> None: ... + +class build_py_2to3(build_py, Mixin2to3): + updated_files: Incomplete + def run(self) -> None: ... + def build_module(self, module, module_file, package): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/build_scripts.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/build_scripts.pyi new file mode 100644 index 0000000..8372919 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/build_scripts.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete +from typing import ClassVar + +from ..cmd import Command +from ..util import Mixin2to3 as Mixin2to3 + +first_line_re: Incomplete + +class build_scripts(Command): + description: str + user_options: ClassVar[list[tuple[str, str, str]]] + boolean_options: ClassVar[list[str]] + build_dir: Incomplete + scripts: Incomplete + force: Incomplete + executable: Incomplete + outfiles: Incomplete + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def get_source_files(self): ... + def run(self) -> None: ... + def copy_scripts(self): ... + +class build_scripts_2to3(build_scripts, Mixin2to3): + def copy_scripts(self): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/check.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/check.pyi new file mode 100644 index 0000000..2c807fd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/check.pyi @@ -0,0 +1,40 @@ +from _typeshed import Incomplete +from typing import Any, ClassVar, Final, Literal +from typing_extensions import TypeAlias + +from ..cmd import Command + +_Reporter: TypeAlias = Any # really docutils.utils.Reporter + +# Only defined if docutils is installed. +# Depends on a third-party stub. Since distutils is deprecated anyway, +# it's easier to just suppress the "any subclassing" error. +class SilentReporter(_Reporter): + messages: Incomplete + def __init__( + self, + source, + report_level, + halt_level, + stream: Incomplete | None = ..., + debug: bool | Literal[0, 1] = 0, + encoding: str = ..., + error_handler: str = ..., + ) -> None: ... + def system_message(self, level, message, *children, **kwargs): ... + +HAS_DOCUTILS: Final[bool] + +class check(Command): + description: str + user_options: ClassVar[list[tuple[str, str, str]]] + boolean_options: ClassVar[list[str]] + restructuredtext: int + metadata: int + strict: int + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def warn(self, msg): ... + def run(self) -> None: ... + def check_metadata(self) -> None: ... + def check_restructuredtext(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/clean.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/clean.pyi new file mode 100644 index 0000000..0f3768d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/clean.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete +from typing import ClassVar + +from ..cmd import Command + +class clean(Command): + description: str + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + build_base: Incomplete + build_lib: Incomplete + build_temp: Incomplete + build_scripts: Incomplete + bdist_base: Incomplete + all: Incomplete + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/config.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/config.pyi new file mode 100644 index 0000000..381e8e4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/config.pyi @@ -0,0 +1,84 @@ +from _typeshed import StrOrBytesPath +from collections.abc import Sequence +from re import Pattern +from typing import ClassVar, Final, Literal + +from ..ccompiler import CCompiler +from ..cmd import Command + +LANG_EXT: Final[dict[str, str]] + +class config(Command): + description: str + # Tuple is full name, short name, description + user_options: ClassVar[list[tuple[str, str | None, str]]] + compiler: str | CCompiler + cc: str | None + include_dirs: Sequence[str] | None + libraries: Sequence[str] | None + library_dirs: Sequence[str] | None + noisy: int + dump_source: int + temp_files: Sequence[str] + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def try_cpp( + self, + body: str | None = None, + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + lang: str = "c", + ) -> bool: ... + def search_cpp( + self, + pattern: Pattern[str] | str, + body: str | None = None, + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + lang: str = "c", + ) -> bool: ... + def try_compile( + self, body: str, headers: Sequence[str] | None = None, include_dirs: Sequence[str] | None = None, lang: str = "c" + ) -> bool: ... + def try_link( + self, + body: str, + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + libraries: Sequence[str] | None = None, + library_dirs: Sequence[str] | None = None, + lang: str = "c", + ) -> bool: ... + def try_run( + self, + body: str, + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + libraries: Sequence[str] | None = None, + library_dirs: Sequence[str] | None = None, + lang: str = "c", + ) -> bool: ... + def check_func( + self, + func: str, + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + libraries: Sequence[str] | None = None, + library_dirs: Sequence[str] | None = None, + decl: bool | Literal[0, 1] = 0, + call: bool | Literal[0, 1] = 0, + ) -> bool: ... + def check_lib( + self, + library: str, + library_dirs: Sequence[str] | None = None, + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + other_libraries: list[str] = [], + ) -> bool: ... + def check_header( + self, header: str, include_dirs: Sequence[str] | None = None, library_dirs: Sequence[str] | None = None, lang: str = "c" + ) -> bool: ... + +def dump_file(filename: StrOrBytesPath, head=None) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/install.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/install.pyi new file mode 100644 index 0000000..1714e01 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/install.pyi @@ -0,0 +1,71 @@ +import sys +from _typeshed import Incomplete +from collections.abc import Callable +from typing import Any, ClassVar, Final, Literal + +from ..cmd import Command + +HAS_USER_SITE: Final[bool] + +SCHEME_KEYS: Final[tuple[Literal["purelib"], Literal["platlib"], Literal["headers"], Literal["scripts"], Literal["data"]]] +INSTALL_SCHEMES: Final[dict[str, dict[str, str]]] + +if sys.version_info < (3, 10): + WINDOWS_SCHEME: Final[dict[str, str]] + +class install(Command): + description: str + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + negative_opt: ClassVar[dict[str, str]] + prefix: str | None + exec_prefix: Incomplete + home: str | None + user: bool + install_base: Incomplete + install_platbase: Incomplete + root: str | None + install_purelib: Incomplete + install_platlib: Incomplete + install_headers: Incomplete + install_lib: str | None + install_scripts: Incomplete + install_data: Incomplete + install_userbase: Incomplete + install_usersite: Incomplete + compile: Incomplete + optimize: Incomplete + extra_path: Incomplete + install_path_file: int + force: int + skip_build: int + warn_dir: int + build_base: Incomplete + build_lib: Incomplete + record: Incomplete + def initialize_options(self) -> None: ... + config_vars: Incomplete + install_libbase: Incomplete + def finalize_options(self) -> None: ... + def dump_dirs(self, msg) -> None: ... + def finalize_unix(self) -> None: ... + def finalize_other(self) -> None: ... + def select_scheme(self, name) -> None: ... + def expand_basedirs(self) -> None: ... + def expand_dirs(self) -> None: ... + def convert_paths(self, *names) -> None: ... + path_file: Incomplete + extra_dirs: Incomplete + def handle_extra_path(self) -> None: ... + def change_roots(self, *names) -> None: ... + def create_home_path(self) -> None: ... + def run(self) -> None: ... + def create_path_file(self) -> None: ... + def get_outputs(self): ... + def get_inputs(self): ... + def has_lib(self): ... + def has_headers(self): ... + def has_scripts(self): ... + def has_data(self): ... + # Any to work around variance issues + sub_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/install_data.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/install_data.pyi new file mode 100644 index 0000000..609de62 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/install_data.pyi @@ -0,0 +1,20 @@ +from _typeshed import Incomplete +from typing import ClassVar + +from ..cmd import Command + +class install_data(Command): + description: str + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + install_dir: Incomplete + outfiles: Incomplete + root: Incomplete + force: int + data_files: Incomplete + warn_dir: int + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def get_inputs(self): ... + def get_outputs(self): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/install_egg_info.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/install_egg_info.pyi new file mode 100644 index 0000000..75bb906 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/install_egg_info.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete +from typing import ClassVar + +from ..cmd import Command + +class install_egg_info(Command): + description: ClassVar[str] + user_options: ClassVar[list[tuple[str, str, str]]] + install_dir: Incomplete + def initialize_options(self) -> None: ... + target: Incomplete + outputs: Incomplete + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def get_outputs(self) -> list[str]: ... + +def safe_name(name): ... +def safe_version(version): ... +def to_filename(name): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/install_headers.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/install_headers.pyi new file mode 100644 index 0000000..3caad8a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/install_headers.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete +from typing import ClassVar + +from ..cmd import Command + +class install_headers(Command): + description: str + user_options: ClassVar[list[tuple[str, str, str]]] + boolean_options: ClassVar[list[str]] + install_dir: Incomplete + force: int + outfiles: Incomplete + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def get_inputs(self): ... + def get_outputs(self): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/install_lib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/install_lib.pyi new file mode 100644 index 0000000..a537e25 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/install_lib.pyi @@ -0,0 +1,26 @@ +from _typeshed import Incomplete +from typing import ClassVar, Final + +from ..cmd import Command + +PYTHON_SOURCE_EXTENSION: Final = ".py" + +class install_lib(Command): + description: str + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + negative_opt: ClassVar[dict[str, str]] + install_dir: Incomplete + build_dir: Incomplete + force: int + compile: Incomplete + optimize: Incomplete + skip_build: Incomplete + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def build(self) -> None: ... + def install(self): ... + def byte_compile(self, files) -> None: ... + def get_outputs(self): ... + def get_inputs(self): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/install_scripts.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/install_scripts.pyi new file mode 100644 index 0000000..658594f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/install_scripts.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete +from typing import ClassVar + +from ..cmd import Command + +class install_scripts(Command): + description: str + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + install_dir: Incomplete + force: int + build_dir: Incomplete + skip_build: Incomplete + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + outfiles: Incomplete + def run(self) -> None: ... + def get_inputs(self): ... + def get_outputs(self): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/register.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/register.pyi new file mode 100644 index 0000000..c3bd62a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/register.pyi @@ -0,0 +1,20 @@ +from collections.abc import Callable +from typing import Any, ClassVar + +from ..config import PyPIRCCommand + +class register(PyPIRCCommand): + description: str + # Any to work around variance issues + sub_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]] + list_classifiers: int + strict: int + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def check_metadata(self) -> None: ... + def classifiers(self) -> None: ... + def verify_metadata(self) -> None: ... + def send_metadata(self) -> None: ... + def build_post_data(self, action): ... + def post_to_server(self, data, auth=None): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/sdist.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/sdist.pyi new file mode 100644 index 0000000..48a1407 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/sdist.pyi @@ -0,0 +1,45 @@ +from _typeshed import Incomplete, Unused +from collections.abc import Callable +from typing import Any, ClassVar + +from ..cmd import Command + +def show_formats() -> None: ... + +class sdist(Command): + description: str + def checking_metadata(self): ... + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], Unused]]]] + negative_opt: ClassVar[dict[str, str]] + # Any to work around variance issues + sub_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]] + READMES: ClassVar[tuple[str, ...]] + template: Incomplete + manifest: Incomplete + use_defaults: int + prune: int + manifest_only: int + force_manifest: int + formats: Incomplete + keep_temp: int + dist_dir: Incomplete + archive_files: Incomplete + metadata_check: int + owner: Incomplete + group: Incomplete + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + filelist: Incomplete + def run(self) -> None: ... + def check_metadata(self) -> None: ... + def get_file_list(self) -> None: ... + def add_defaults(self) -> None: ... + def read_template(self) -> None: ... + def prune_file_list(self) -> None: ... + def write_manifest(self) -> None: ... + def read_manifest(self) -> None: ... + def make_release_tree(self, base_dir, files) -> None: ... + def make_distribution(self) -> None: ... + def get_archive_files(self): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/upload.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/upload.pyi new file mode 100644 index 0000000..afcfbaf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/command/upload.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete +from typing import ClassVar + +from ..config import PyPIRCCommand + +class upload(PyPIRCCommand): + description: ClassVar[str] + username: str + password: str + show_response: int + sign: bool + identity: Incomplete + def initialize_options(self) -> None: ... + repository: Incomplete + realm: Incomplete + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def upload_file(self, command: str, pyversion: str, filename: str) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/config.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/config.pyi new file mode 100644 index 0000000..5814a82 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/config.pyi @@ -0,0 +1,17 @@ +from abc import abstractmethod +from distutils.cmd import Command +from typing import ClassVar + +DEFAULT_PYPIRC: str + +class PyPIRCCommand(Command): + DEFAULT_REPOSITORY: ClassVar[str] + DEFAULT_REALM: ClassVar[str] + repository: None + realm: None + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + @abstractmethod + def run(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/core.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/core.pyi new file mode 100644 index 0000000..174f249 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/core.pyi @@ -0,0 +1,58 @@ +from _typeshed import Incomplete, StrOrBytesPath +from collections.abc import Mapping +from distutils.cmd import Command as Command +from distutils.dist import Distribution as Distribution +from distutils.extension import Extension as Extension +from typing import Any, Final, Literal + +USAGE: Final[str] + +def gen_usage(script_name: StrOrBytesPath) -> str: ... + +setup_keywords: tuple[str, ...] +extension_keywords: tuple[str, ...] + +def setup( + *, + name: str = ..., + version: str = ..., + description: str = ..., + long_description: str = ..., + author: str = ..., + author_email: str = ..., + maintainer: str = ..., + maintainer_email: str = ..., + url: str = ..., + download_url: str = ..., + packages: list[str] = ..., + py_modules: list[str] = ..., + scripts: list[str] = ..., + ext_modules: list[Extension] = ..., + classifiers: list[str] = ..., + distclass: type[Distribution] = ..., + script_name: str = ..., + script_args: list[str] = ..., + options: Mapping[str, Incomplete] = ..., + license: str = ..., + keywords: list[str] | str = ..., + platforms: list[str] | str = ..., + cmdclass: Mapping[str, type[Command]] = ..., + data_files: list[tuple[str, list[str]]] = ..., + package_dir: Mapping[str, str] = ..., + obsoletes: list[str] = ..., + provides: list[str] = ..., + requires: list[str] = ..., + command_packages: list[str] = ..., + command_options: Mapping[str, Mapping[str, tuple[Incomplete, Incomplete]]] = ..., + package_data: Mapping[str, list[str]] = ..., + include_package_data: bool | Literal[0, 1] = ..., + libraries: list[str] = ..., + headers: list[str] = ..., + ext_package: str = ..., + include_dirs: list[str] = ..., + password: str = ..., + fullname: str = ..., + # Custom Distributions could accept more params + **attrs: Any, +) -> Distribution: ... +def run_setup(script_name: str, script_args: list[str] | None = None, stop_after: str = "run") -> Distribution: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/cygwinccompiler.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/cygwinccompiler.pyi new file mode 100644 index 0000000..80924d6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/cygwinccompiler.pyi @@ -0,0 +1,20 @@ +from distutils.unixccompiler import UnixCCompiler +from distutils.version import LooseVersion +from re import Pattern +from typing import Final, Literal + +def get_msvcr() -> list[str] | None: ... + +class CygwinCCompiler(UnixCCompiler): ... +class Mingw32CCompiler(CygwinCCompiler): ... + +CONFIG_H_OK: Final = "ok" +CONFIG_H_NOTOK: Final = "not ok" +CONFIG_H_UNCERTAIN: Final = "uncertain" + +def check_config_h() -> tuple[Literal["ok", "not ok", "uncertain"], str]: ... + +RE_VERSION: Final[Pattern[bytes]] + +def get_versions() -> tuple[LooseVersion | None, ...]: ... +def is_cygwingcc() -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/debug.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/debug.pyi new file mode 100644 index 0000000..3009588 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/debug.pyi @@ -0,0 +1,3 @@ +from typing import Final + +DEBUG: Final[str | None] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/dep_util.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/dep_util.pyi new file mode 100644 index 0000000..058377a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/dep_util.pyi @@ -0,0 +1,14 @@ +from _typeshed import StrOrBytesPath, SupportsLenAndGetItem +from collections.abc import Iterable +from typing import Literal, TypeVar + +_SourcesT = TypeVar("_SourcesT", bound=StrOrBytesPath) +_TargetsT = TypeVar("_TargetsT", bound=StrOrBytesPath) + +def newer(source: StrOrBytesPath, target: StrOrBytesPath) -> bool | Literal[1]: ... +def newer_pairwise( + sources: SupportsLenAndGetItem[_SourcesT], targets: SupportsLenAndGetItem[_TargetsT] +) -> tuple[list[_SourcesT], list[_TargetsT]]: ... +def newer_group( + sources: Iterable[StrOrBytesPath], target: StrOrBytesPath, missing: Literal["error", "ignore", "newer"] = "error" +) -> Literal[0, 1]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/dir_util.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/dir_util.pyi new file mode 100644 index 0000000..23e2c3b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/dir_util.pyi @@ -0,0 +1,23 @@ +from _typeshed import StrOrBytesPath, StrPath +from collections.abc import Iterable +from typing import Literal + +def mkpath(name: str, mode: int = 0o777, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0) -> list[str]: ... +def create_tree( + base_dir: StrPath, + files: Iterable[StrPath], + mode: int = 0o777, + verbose: bool | Literal[0, 1] = 1, + dry_run: bool | Literal[0, 1] = 0, +) -> None: ... +def copy_tree( + src: StrPath, + dst: str, + preserve_mode: bool | Literal[0, 1] = 1, + preserve_times: bool | Literal[0, 1] = 1, + preserve_symlinks: bool | Literal[0, 1] = 0, + update: bool | Literal[0, 1] = 0, + verbose: bool | Literal[0, 1] = 1, + dry_run: bool | Literal[0, 1] = 0, +) -> list[str]: ... +def remove_tree(directory: StrOrBytesPath, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/dist.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/dist.pyi new file mode 100644 index 0000000..412b941 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/dist.pyi @@ -0,0 +1,315 @@ +from _typeshed import Incomplete, StrOrBytesPath, StrPath, SupportsWrite +from collections.abc import Iterable, MutableMapping +from distutils.cmd import Command +from distutils.command.bdist import bdist +from distutils.command.bdist_dumb import bdist_dumb +from distutils.command.bdist_rpm import bdist_rpm +from distutils.command.build import build +from distutils.command.build_clib import build_clib +from distutils.command.build_ext import build_ext +from distutils.command.build_py import build_py +from distutils.command.build_scripts import build_scripts +from distutils.command.check import check +from distutils.command.clean import clean +from distutils.command.config import config +from distutils.command.install import install +from distutils.command.install_data import install_data +from distutils.command.install_egg_info import install_egg_info +from distutils.command.install_headers import install_headers +from distutils.command.install_lib import install_lib +from distutils.command.install_scripts import install_scripts +from distutils.command.register import register +from distutils.command.sdist import sdist +from distutils.command.upload import upload +from re import Pattern +from typing import IO, ClassVar, Literal, TypeVar, overload +from typing_extensions import TypeAlias + +command_re: Pattern[str] + +_OptionsList: TypeAlias = list[tuple[str, str | None, str, int] | tuple[str, str | None, str]] +_CommandT = TypeVar("_CommandT", bound=Command) + +class DistributionMetadata: + def __init__(self, path: StrOrBytesPath | None = None) -> None: ... + name: str | None + version: str | None + author: str | None + author_email: str | None + maintainer: str | None + maintainer_email: str | None + url: str | None + license: str | None + description: str | None + long_description: str | None + keywords: str | list[str] | None + platforms: str | list[str] | None + classifiers: str | list[str] | None + download_url: str | None + provides: list[str] | None + requires: list[str] | None + obsoletes: list[str] | None + def read_pkg_file(self, file: IO[str]) -> None: ... + def write_pkg_info(self, base_dir: StrPath) -> None: ... + def write_pkg_file(self, file: SupportsWrite[str]) -> None: ... + def get_name(self) -> str: ... + def get_version(self) -> str: ... + def get_fullname(self) -> str: ... + def get_author(self) -> str: ... + def get_author_email(self) -> str: ... + def get_maintainer(self) -> str: ... + def get_maintainer_email(self) -> str: ... + def get_contact(self) -> str: ... + def get_contact_email(self) -> str: ... + def get_url(self) -> str: ... + def get_license(self) -> str: ... + def get_licence(self) -> str: ... + def get_description(self) -> str: ... + def get_long_description(self) -> str: ... + def get_keywords(self) -> str | list[str]: ... + def get_platforms(self) -> str | list[str]: ... + def get_classifiers(self) -> str | list[str]: ... + def get_download_url(self) -> str: ... + def get_requires(self) -> list[str]: ... + def set_requires(self, value: Iterable[str]) -> None: ... + def get_provides(self) -> list[str]: ... + def set_provides(self, value: Iterable[str]) -> None: ... + def get_obsoletes(self) -> list[str]: ... + def set_obsoletes(self, value: Iterable[str]) -> None: ... + +class Distribution: + cmdclass: dict[str, type[Command]] + metadata: DistributionMetadata + def __init__(self, attrs: MutableMapping[str, Incomplete] | None = None) -> None: ... + def get_option_dict(self, command: str) -> dict[str, tuple[str, str]]: ... + def parse_config_files(self, filenames: Iterable[str] | None = None) -> None: ... + global_options: ClassVar[_OptionsList] + common_usage: ClassVar[str] + display_options: ClassVar[_OptionsList] + display_option_names: ClassVar[list[str]] + negative_opt: ClassVar[dict[str, str]] + verbose: bool | Literal[0, 1] + dry_run: bool | Literal[0, 1] + help: bool | Literal[0, 1] + command_packages: list[str] | None + script_name: str | None + script_args: list[str] | None + command_options: dict[str, dict[str, tuple[str, str]]] + dist_files: list[tuple[str, str, str]] + packages: Incomplete + package_data: dict[str, list[str]] + package_dir: Incomplete + py_modules: Incomplete + libraries: Incomplete + headers: Incomplete + ext_modules: Incomplete + ext_package: Incomplete + include_dirs: Incomplete + extra_path: Incomplete + scripts: Incomplete + data_files: Incomplete + password: str + command_obj: Incomplete + have_run: Incomplete + want_user_cfg: bool + def dump_option_dicts(self, header=None, commands=None, indent: str = "") -> None: ... + def find_config_files(self): ... + commands: Incomplete + def parse_command_line(self): ... + def finalize_options(self) -> None: ... + def handle_display_options(self, option_order): ... + def print_command_list(self, commands, header, max_length) -> None: ... + def print_commands(self) -> None: ... + def get_command_list(self): ... + def get_command_packages(self): ... + # NOTE: This list comes directly from the distutils/command folder. Minus bdist_msi and bdist_wininst. + @overload + def get_command_obj(self, command: Literal["bdist"], create: Literal[1, True] = 1) -> bdist: ... + @overload + def get_command_obj(self, command: Literal["bdist_dumb"], create: Literal[1, True] = 1) -> bdist_dumb: ... + @overload + def get_command_obj(self, command: Literal["bdist_rpm"], create: Literal[1, True] = 1) -> bdist_rpm: ... + @overload + def get_command_obj(self, command: Literal["build"], create: Literal[1, True] = 1) -> build: ... + @overload + def get_command_obj(self, command: Literal["build_clib"], create: Literal[1, True] = 1) -> build_clib: ... + @overload + def get_command_obj(self, command: Literal["build_ext"], create: Literal[1, True] = 1) -> build_ext: ... + @overload + def get_command_obj(self, command: Literal["build_py"], create: Literal[1, True] = 1) -> build_py: ... + @overload + def get_command_obj(self, command: Literal["build_scripts"], create: Literal[1, True] = 1) -> build_scripts: ... + @overload + def get_command_obj(self, command: Literal["check"], create: Literal[1, True] = 1) -> check: ... + @overload + def get_command_obj(self, command: Literal["clean"], create: Literal[1, True] = 1) -> clean: ... + @overload + def get_command_obj(self, command: Literal["config"], create: Literal[1, True] = 1) -> config: ... + @overload + def get_command_obj(self, command: Literal["install"], create: Literal[1, True] = 1) -> install: ... + @overload + def get_command_obj(self, command: Literal["install_data"], create: Literal[1, True] = 1) -> install_data: ... + @overload + def get_command_obj(self, command: Literal["install_egg_info"], create: Literal[1, True] = 1) -> install_egg_info: ... + @overload + def get_command_obj(self, command: Literal["install_headers"], create: Literal[1, True] = 1) -> install_headers: ... + @overload + def get_command_obj(self, command: Literal["install_lib"], create: Literal[1, True] = 1) -> install_lib: ... + @overload + def get_command_obj(self, command: Literal["install_scripts"], create: Literal[1, True] = 1) -> install_scripts: ... + @overload + def get_command_obj(self, command: Literal["register"], create: Literal[1, True] = 1) -> register: ... + @overload + def get_command_obj(self, command: Literal["sdist"], create: Literal[1, True] = 1) -> sdist: ... + @overload + def get_command_obj(self, command: Literal["upload"], create: Literal[1, True] = 1) -> upload: ... + @overload + def get_command_obj(self, command: str, create: Literal[1, True] = 1) -> Command: ... + # Not replicating the overloads for "Command | None", user may use "isinstance" + @overload + def get_command_obj(self, command: str, create: Literal[0, False]) -> Command | None: ... + @overload + def get_command_class(self, command: Literal["bdist"]) -> type[bdist]: ... + @overload + def get_command_class(self, command: Literal["bdist_dumb"]) -> type[bdist_dumb]: ... + @overload + def get_command_class(self, command: Literal["bdist_rpm"]) -> type[bdist_rpm]: ... + @overload + def get_command_class(self, command: Literal["build"]) -> type[build]: ... + @overload + def get_command_class(self, command: Literal["build_clib"]) -> type[build_clib]: ... + @overload + def get_command_class(self, command: Literal["build_ext"]) -> type[build_ext]: ... + @overload + def get_command_class(self, command: Literal["build_py"]) -> type[build_py]: ... + @overload + def get_command_class(self, command: Literal["build_scripts"]) -> type[build_scripts]: ... + @overload + def get_command_class(self, command: Literal["check"]) -> type[check]: ... + @overload + def get_command_class(self, command: Literal["clean"]) -> type[clean]: ... + @overload + def get_command_class(self, command: Literal["config"]) -> type[config]: ... + @overload + def get_command_class(self, command: Literal["install"]) -> type[install]: ... + @overload + def get_command_class(self, command: Literal["install_data"]) -> type[install_data]: ... + @overload + def get_command_class(self, command: Literal["install_egg_info"]) -> type[install_egg_info]: ... + @overload + def get_command_class(self, command: Literal["install_headers"]) -> type[install_headers]: ... + @overload + def get_command_class(self, command: Literal["install_lib"]) -> type[install_lib]: ... + @overload + def get_command_class(self, command: Literal["install_scripts"]) -> type[install_scripts]: ... + @overload + def get_command_class(self, command: Literal["register"]) -> type[register]: ... + @overload + def get_command_class(self, command: Literal["sdist"]) -> type[sdist]: ... + @overload + def get_command_class(self, command: Literal["upload"]) -> type[upload]: ... + @overload + def get_command_class(self, command: str) -> type[Command]: ... + @overload + def reinitialize_command(self, command: Literal["bdist"], reinit_subcommands: bool = False) -> bdist: ... + @overload + def reinitialize_command(self, command: Literal["bdist_dumb"], reinit_subcommands: bool = False) -> bdist_dumb: ... + @overload + def reinitialize_command(self, command: Literal["bdist_rpm"], reinit_subcommands: bool = False) -> bdist_rpm: ... + @overload + def reinitialize_command(self, command: Literal["build"], reinit_subcommands: bool = False) -> build: ... + @overload + def reinitialize_command(self, command: Literal["build_clib"], reinit_subcommands: bool = False) -> build_clib: ... + @overload + def reinitialize_command(self, command: Literal["build_ext"], reinit_subcommands: bool = False) -> build_ext: ... + @overload + def reinitialize_command(self, command: Literal["build_py"], reinit_subcommands: bool = False) -> build_py: ... + @overload + def reinitialize_command(self, command: Literal["build_scripts"], reinit_subcommands: bool = False) -> build_scripts: ... + @overload + def reinitialize_command(self, command: Literal["check"], reinit_subcommands: bool = False) -> check: ... + @overload + def reinitialize_command(self, command: Literal["clean"], reinit_subcommands: bool = False) -> clean: ... + @overload + def reinitialize_command(self, command: Literal["config"], reinit_subcommands: bool = False) -> config: ... + @overload + def reinitialize_command(self, command: Literal["install"], reinit_subcommands: bool = False) -> install: ... + @overload + def reinitialize_command(self, command: Literal["install_data"], reinit_subcommands: bool = False) -> install_data: ... + @overload + def reinitialize_command( + self, command: Literal["install_egg_info"], reinit_subcommands: bool = False + ) -> install_egg_info: ... + @overload + def reinitialize_command(self, command: Literal["install_headers"], reinit_subcommands: bool = False) -> install_headers: ... + @overload + def reinitialize_command(self, command: Literal["install_lib"], reinit_subcommands: bool = False) -> install_lib: ... + @overload + def reinitialize_command(self, command: Literal["install_scripts"], reinit_subcommands: bool = False) -> install_scripts: ... + @overload + def reinitialize_command(self, command: Literal["register"], reinit_subcommands: bool = False) -> register: ... + @overload + def reinitialize_command(self, command: Literal["sdist"], reinit_subcommands: bool = False) -> sdist: ... + @overload + def reinitialize_command(self, command: Literal["upload"], reinit_subcommands: bool = False) -> upload: ... + @overload + def reinitialize_command(self, command: str, reinit_subcommands: bool = False) -> Command: ... + @overload + def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool = False) -> _CommandT: ... + def announce(self, msg, level: int = 2) -> None: ... + def run_commands(self) -> None: ... + def run_command(self, command: str) -> None: ... + def has_pure_modules(self) -> bool: ... + def has_ext_modules(self) -> bool: ... + def has_c_libraries(self) -> bool: ... + def has_modules(self) -> bool: ... + def has_headers(self) -> bool: ... + def has_scripts(self) -> bool: ... + def has_data_files(self) -> bool: ... + def is_pure(self) -> bool: ... + + # Default getter methods generated in __init__ from self.metadata._METHOD_BASENAMES + def get_name(self) -> str: ... + def get_version(self) -> str: ... + def get_fullname(self) -> str: ... + def get_author(self) -> str: ... + def get_author_email(self) -> str: ... + def get_maintainer(self) -> str: ... + def get_maintainer_email(self) -> str: ... + def get_contact(self) -> str: ... + def get_contact_email(self) -> str: ... + def get_url(self) -> str: ... + def get_license(self) -> str: ... + def get_licence(self) -> str: ... + def get_description(self) -> str: ... + def get_long_description(self) -> str: ... + def get_keywords(self) -> str | list[str]: ... + def get_platforms(self) -> str | list[str]: ... + def get_classifiers(self) -> str | list[str]: ... + def get_download_url(self) -> str: ... + def get_requires(self) -> list[str]: ... + def get_provides(self) -> list[str]: ... + def get_obsoletes(self) -> list[str]: ... + + # Default attributes generated in __init__ from self.display_option_names + help_commands: bool | Literal[0] + name: str | Literal[0] + version: str | Literal[0] + fullname: str | Literal[0] + author: str | Literal[0] + author_email: str | Literal[0] + maintainer: str | Literal[0] + maintainer_email: str | Literal[0] + contact: str | Literal[0] + contact_email: str | Literal[0] + url: str | Literal[0] + license: str | Literal[0] + licence: str | Literal[0] + description: str | Literal[0] + long_description: str | Literal[0] + platforms: str | list[str] | Literal[0] + classifiers: str | list[str] | Literal[0] + keywords: str | list[str] | Literal[0] + provides: list[str] | Literal[0] + requires: list[str] | Literal[0] + obsoletes: list[str] | Literal[0] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/errors.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/errors.pyi new file mode 100644 index 0000000..e483362 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/errors.pyi @@ -0,0 +1,19 @@ +class DistutilsError(Exception): ... +class DistutilsModuleError(DistutilsError): ... +class DistutilsClassError(DistutilsError): ... +class DistutilsGetoptError(DistutilsError): ... +class DistutilsArgError(DistutilsError): ... +class DistutilsFileError(DistutilsError): ... +class DistutilsOptionError(DistutilsError): ... +class DistutilsSetupError(DistutilsError): ... +class DistutilsPlatformError(DistutilsError): ... +class DistutilsExecError(DistutilsError): ... +class DistutilsInternalError(DistutilsError): ... +class DistutilsTemplateError(DistutilsError): ... +class DistutilsByteCompileError(DistutilsError): ... +class CCompilerError(Exception): ... +class PreprocessError(CCompilerError): ... +class CompileError(CCompilerError): ... +class LibError(CCompilerError): ... +class LinkError(CCompilerError): ... +class UnknownFileError(CCompilerError): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/extension.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/extension.pyi new file mode 100644 index 0000000..789bbf6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/extension.pyi @@ -0,0 +1,36 @@ +class Extension: + name: str + sources: list[str] + include_dirs: list[str] + define_macros: list[tuple[str, str | None]] + undef_macros: list[str] + library_dirs: list[str] + libraries: list[str] + runtime_library_dirs: list[str] + extra_objects: list[str] + extra_compile_args: list[str] + extra_link_args: list[str] + export_symbols: list[str] + swig_opts: list[str] + depends: list[str] + language: str | None + optional: bool | None + def __init__( + self, + name: str, + sources: list[str], + include_dirs: list[str] | None = None, + define_macros: list[tuple[str, str | None]] | None = None, + undef_macros: list[str] | None = None, + library_dirs: list[str] | None = None, + libraries: list[str] | None = None, + runtime_library_dirs: list[str] | None = None, + extra_objects: list[str] | None = None, + extra_compile_args: list[str] | None = None, + extra_link_args: list[str] | None = None, + export_symbols: list[str] | None = None, + swig_opts: list[str] | None = None, + depends: list[str] | None = None, + language: str | None = None, + optional: bool | None = None, + ) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/fancy_getopt.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/fancy_getopt.pyi new file mode 100644 index 0000000..f3fa2a1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/fancy_getopt.pyi @@ -0,0 +1,44 @@ +from collections.abc import Iterable, Mapping +from getopt import _SliceableT, _StrSequenceT_co +from re import Pattern +from typing import Any, Final, overload +from typing_extensions import TypeAlias + +_Option: TypeAlias = tuple[str, str | None, str] + +longopt_pat: Final = r"[a-zA-Z](?:[a-zA-Z0-9-]*)" +longopt_re: Final[Pattern[str]] +neg_alias_re: Final[Pattern[str]] +longopt_xlate: Final[dict[int, int]] + +class FancyGetopt: + def __init__(self, option_table: list[_Option] | None = None) -> None: ... + # TODO: kinda wrong, `getopt(object=object())` is invalid + @overload + def getopt( + self, args: _SliceableT[_StrSequenceT_co] | None = None, object: None = None + ) -> tuple[_StrSequenceT_co, OptionDummy]: ... + @overload + def getopt( + self, args: _SliceableT[_StrSequenceT_co] | None, object: Any + ) -> _StrSequenceT_co: ... # object is an arbitrary non-slotted object + def get_option_order(self) -> list[tuple[str, str]]: ... + def generate_help(self, header: str | None = None) -> list[str]: ... + +# Same note as FancyGetopt.getopt +@overload +def fancy_getopt( + options: list[_Option], negative_opt: Mapping[_Option, _Option], object: None, args: _SliceableT[_StrSequenceT_co] | None +) -> tuple[_StrSequenceT_co, OptionDummy]: ... +@overload +def fancy_getopt( + options: list[_Option], negative_opt: Mapping[_Option, _Option], object: Any, args: _SliceableT[_StrSequenceT_co] | None +) -> _StrSequenceT_co: ... + +WS_TRANS: Final[dict[int, str]] + +def wrap_text(text: str, width: int) -> list[str]: ... +def translate_longopt(opt: str) -> str: ... + +class OptionDummy: + def __init__(self, options: Iterable[str] = []) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/file_util.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/file_util.pyi new file mode 100644 index 0000000..c763f91 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/file_util.pyi @@ -0,0 +1,38 @@ +from _typeshed import BytesPath, StrOrBytesPath, StrPath +from collections.abc import Iterable +from typing import Literal, TypeVar, overload + +_StrPathT = TypeVar("_StrPathT", bound=StrPath) +_BytesPathT = TypeVar("_BytesPathT", bound=BytesPath) + +@overload +def copy_file( + src: StrPath, + dst: _StrPathT, + preserve_mode: bool | Literal[0, 1] = 1, + preserve_times: bool | Literal[0, 1] = 1, + update: bool | Literal[0, 1] = 0, + link: str | None = None, + verbose: bool | Literal[0, 1] = 1, + dry_run: bool | Literal[0, 1] = 0, +) -> tuple[_StrPathT | str, bool]: ... +@overload +def copy_file( + src: BytesPath, + dst: _BytesPathT, + preserve_mode: bool | Literal[0, 1] = 1, + preserve_times: bool | Literal[0, 1] = 1, + update: bool | Literal[0, 1] = 0, + link: str | None = None, + verbose: bool | Literal[0, 1] = 1, + dry_run: bool | Literal[0, 1] = 0, +) -> tuple[_BytesPathT | bytes, bool]: ... +@overload +def move_file( + src: StrPath, dst: _StrPathT, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0 +) -> _StrPathT | str: ... +@overload +def move_file( + src: BytesPath, dst: _BytesPathT, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0 +) -> _BytesPathT | bytes: ... +def write_file(filename: StrOrBytesPath, contents: Iterable[str]) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/filelist.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/filelist.pyi new file mode 100644 index 0000000..607a78a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/filelist.pyi @@ -0,0 +1,58 @@ +from collections.abc import Iterable +from re import Pattern +from typing import Literal, overload + +# class is entirely undocumented +class FileList: + allfiles: Iterable[str] | None + files: list[str] + def __init__(self, warn: None = None, debug_print: None = None) -> None: ... + def set_allfiles(self, allfiles: Iterable[str]) -> None: ... + def findall(self, dir: str = ".") -> None: ... + def debug_print(self, msg: str) -> None: ... + def append(self, item: str) -> None: ... + def extend(self, items: Iterable[str]) -> None: ... + def sort(self) -> None: ... + def remove_duplicates(self) -> None: ... + def process_template_line(self, line: str) -> None: ... + @overload + def include_pattern( + self, pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[0, False] = 0 + ) -> bool: ... + @overload + def include_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> bool: ... + @overload + def include_pattern( + self, + pattern: str | Pattern[str], + anchor: bool | Literal[0, 1] = 1, + prefix: str | None = None, + is_regex: bool | Literal[0, 1] = 0, + ) -> bool: ... + @overload + def exclude_pattern( + self, pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[0, False] = 0 + ) -> bool: ... + @overload + def exclude_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> bool: ... + @overload + def exclude_pattern( + self, + pattern: str | Pattern[str], + anchor: bool | Literal[0, 1] = 1, + prefix: str | None = None, + is_regex: bool | Literal[0, 1] = 0, + ) -> bool: ... + +def findall(dir: str = ".") -> list[str]: ... +def glob_to_re(pattern: str) -> str: ... +@overload +def translate_pattern( + pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[False, 0] = 0 +) -> Pattern[str]: ... +@overload +def translate_pattern(pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> Pattern[str]: ... +@overload +def translate_pattern( + pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: bool | Literal[0, 1] = 0 +) -> Pattern[str]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/log.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/log.pyi new file mode 100644 index 0000000..7246dd6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/log.pyi @@ -0,0 +1,26 @@ +from typing import Any, Final + +DEBUG: Final = 1 +INFO: Final = 2 +WARN: Final = 3 +ERROR: Final = 4 +FATAL: Final = 5 + +class Log: + def __init__(self, threshold: int = 3) -> None: ... + # Arbitrary msg args' type depends on the format method + def log(self, level: int, msg: str, *args: Any) -> None: ... + def debug(self, msg: str, *args: Any) -> None: ... + def info(self, msg: str, *args: Any) -> None: ... + def warn(self, msg: str, *args: Any) -> None: ... + def error(self, msg: str, *args: Any) -> None: ... + def fatal(self, msg: str, *args: Any) -> None: ... + +def log(level: int, msg: str, *args: Any) -> None: ... +def debug(msg: str, *args: Any) -> None: ... +def info(msg: str, *args: Any) -> None: ... +def warn(msg: str, *args: Any) -> None: ... +def error(msg: str, *args: Any) -> None: ... +def fatal(msg: str, *args: Any) -> None: ... +def set_threshold(level: int) -> int: ... +def set_verbosity(v: int) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/msvccompiler.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/msvccompiler.pyi new file mode 100644 index 0000000..80872a6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/msvccompiler.pyi @@ -0,0 +1,3 @@ +from distutils.ccompiler import CCompiler + +class MSVCCompiler(CCompiler): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/spawn.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/spawn.pyi new file mode 100644 index 0000000..ae07a49 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/spawn.pyi @@ -0,0 +1,10 @@ +from collections.abc import Iterable +from typing import Literal + +def spawn( + cmd: Iterable[str], + search_path: bool | Literal[0, 1] = 1, + verbose: bool | Literal[0, 1] = 0, + dry_run: bool | Literal[0, 1] = 0, +) -> None: ... +def find_executable(executable: str, path: str | None = None) -> str | None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/sysconfig.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/sysconfig.pyi new file mode 100644 index 0000000..4a9c45e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/sysconfig.pyi @@ -0,0 +1,33 @@ +import sys +from collections.abc import Mapping +from distutils.ccompiler import CCompiler +from typing import Final, Literal, overload +from typing_extensions import deprecated + +PREFIX: Final[str] +EXEC_PREFIX: Final[str] +BASE_PREFIX: Final[str] +BASE_EXEC_PREFIX: Final[str] +project_base: Final[str] +python_build: Final[bool] + +def expand_makefile_vars(s: str, vars: Mapping[str, str]) -> str: ... +@overload +@deprecated("SO is deprecated, use EXT_SUFFIX. Support is removed in Python 3.11") +def get_config_var(name: Literal["SO"]) -> int | str | None: ... +@overload +def get_config_var(name: str) -> int | str | None: ... +@overload +def get_config_vars() -> dict[str, str | int]: ... +@overload +def get_config_vars(arg: str, /, *args: str) -> list[str | int]: ... +def get_config_h_filename() -> str: ... +def get_makefile_filename() -> str: ... +def get_python_inc(plat_specific: bool | Literal[0, 1] = 0, prefix: str | None = None) -> str: ... +def get_python_lib( + plat_specific: bool | Literal[0, 1] = 0, standard_lib: bool | Literal[0, 1] = 0, prefix: str | None = None +) -> str: ... +def customize_compiler(compiler: CCompiler) -> None: ... + +if sys.version_info < (3, 10): + def get_python_version() -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/text_file.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/text_file.pyi new file mode 100644 index 0000000..54951af --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/text_file.pyi @@ -0,0 +1,21 @@ +from typing import IO, Literal + +class TextFile: + def __init__( + self, + filename: str | None = None, + file: IO[str] | None = None, + *, + strip_comments: bool | Literal[0, 1] = ..., + lstrip_ws: bool | Literal[0, 1] = ..., + rstrip_ws: bool | Literal[0, 1] = ..., + skip_blanks: bool | Literal[0, 1] = ..., + join_lines: bool | Literal[0, 1] = ..., + collapse_join: bool | Literal[0, 1] = ..., + ) -> None: ... + def open(self, filename: str) -> None: ... + def close(self) -> None: ... + def warn(self, msg: str, line: list[int] | tuple[int, int] | int | None = None) -> None: ... + def readline(self) -> str | None: ... + def readlines(self) -> list[str]: ... + def unreadline(self, line: str) -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/unixccompiler.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/unixccompiler.pyi new file mode 100644 index 0000000..e1d4434 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/unixccompiler.pyi @@ -0,0 +1,3 @@ +from distutils.ccompiler import CCompiler + +class UnixCCompiler(CCompiler): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/util.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/util.pyi new file mode 100644 index 0000000..0e1bb41 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/util.pyi @@ -0,0 +1,53 @@ +from _typeshed import StrPath, Unused +from collections.abc import Callable, Container, Iterable, Mapping +from typing import Any, Literal +from typing_extensions import TypeVarTuple, Unpack + +_Ts = TypeVarTuple("_Ts") + +def get_host_platform() -> str: ... +def get_platform() -> str: ... +def convert_path(pathname: str) -> str: ... +def change_root(new_root: StrPath, pathname: StrPath) -> str: ... +def check_environ() -> None: ... +def subst_vars(s: str, local_vars: Mapping[str, str]) -> None: ... +def split_quoted(s: str) -> list[str]: ... +def execute( + func: Callable[[Unpack[_Ts]], Unused], + args: tuple[Unpack[_Ts]], + msg: str | None = None, + verbose: bool | Literal[0, 1] = 0, + dry_run: bool | Literal[0, 1] = 0, +) -> None: ... +def strtobool(val: str) -> Literal[0, 1]: ... +def byte_compile( + py_files: list[str], + optimize: int = 0, + force: bool | Literal[0, 1] = 0, + prefix: str | None = None, + base_dir: str | None = None, + verbose: bool | Literal[0, 1] = 1, + dry_run: bool | Literal[0, 1] = 0, + direct: bool | None = None, +) -> None: ... +def rfc822_escape(header: str) -> str: ... +def run_2to3( + files: Iterable[str], + fixer_names: Iterable[str] | None = None, + options: Mapping[str, Any] | None = None, + explicit: Unused = None, +) -> None: ... +def copydir_run_2to3( + src: StrPath, + dest: StrPath, + template: str | None = None, + fixer_names: Iterable[str] | None = None, + options: Mapping[str, Any] | None = None, + explicit: Container[str] | None = None, +) -> list[str]: ... + +class Mixin2to3: + fixer_names: Iterable[str] | None + options: Mapping[str, Any] | None + explicit: Container[str] | None + def run_2to3(self, files: Iterable[str]) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/version.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/version.pyi new file mode 100644 index 0000000..47da65e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/distutils/version.pyi @@ -0,0 +1,36 @@ +from abc import abstractmethod +from re import Pattern +from typing_extensions import Self + +class Version: + def __eq__(self, other: object) -> bool: ... + def __lt__(self, other: Self | str) -> bool: ... + def __le__(self, other: Self | str) -> bool: ... + def __gt__(self, other: Self | str) -> bool: ... + def __ge__(self, other: Self | str) -> bool: ... + @abstractmethod + def __init__(self, vstring: str | None = None) -> None: ... + @abstractmethod + def parse(self, vstring: str) -> Self: ... + @abstractmethod + def __str__(self) -> str: ... + @abstractmethod + def _cmp(self, other: Self | str) -> bool: ... + +class StrictVersion(Version): + version_re: Pattern[str] + version: tuple[int, int, int] + prerelease: tuple[str, int] | None + def __init__(self, vstring: str | None = None) -> None: ... + def parse(self, vstring: str) -> Self: ... + def __str__(self) -> str: ... # noqa: Y029 + def _cmp(self, other: Self | str) -> bool: ... + +class LooseVersion(Version): + component_re: Pattern[str] + vstring: str + version: tuple[str | int, ...] + def __init__(self, vstring: str | None = None) -> None: ... + def parse(self, vstring: str) -> Self: ... + def __str__(self) -> str: ... # noqa: Y029 + def _cmp(self, other: Self | str) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/doctest.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/doctest.pyi new file mode 100644 index 0000000..1bb96e1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/doctest.pyi @@ -0,0 +1,262 @@ +import sys +import types +import unittest +from _typeshed import ExcInfo +from collections.abc import Callable +from typing import Any, Final, NamedTuple, type_check_only +from typing_extensions import Self, TypeAlias + +__all__ = [ + "register_optionflag", + "DONT_ACCEPT_TRUE_FOR_1", + "DONT_ACCEPT_BLANKLINE", + "NORMALIZE_WHITESPACE", + "ELLIPSIS", + "SKIP", + "IGNORE_EXCEPTION_DETAIL", + "COMPARISON_FLAGS", + "REPORT_UDIFF", + "REPORT_CDIFF", + "REPORT_NDIFF", + "REPORT_ONLY_FIRST_FAILURE", + "REPORTING_FLAGS", + "FAIL_FAST", + "Example", + "DocTest", + "DocTestParser", + "DocTestFinder", + "DocTestRunner", + "OutputChecker", + "DocTestFailure", + "UnexpectedException", + "DebugRunner", + "testmod", + "testfile", + "run_docstring_examples", + "DocTestSuite", + "DocFileSuite", + "set_unittest_reportflags", + "script_from_examples", + "testsource", + "debug_src", + "debug", +] + +if sys.version_info >= (3, 13): + @type_check_only + class _TestResultsBase(NamedTuple): + failed: int + attempted: int + + class TestResults(_TestResultsBase): + def __new__(cls, failed: int, attempted: int, *, skipped: int = 0) -> Self: ... + skipped: int + +else: + class TestResults(NamedTuple): + failed: int + attempted: int + +OPTIONFLAGS_BY_NAME: Final[dict[str, int]] + +def register_optionflag(name: str) -> int: ... + +DONT_ACCEPT_TRUE_FOR_1: Final = 1 +DONT_ACCEPT_BLANKLINE: Final = 2 +NORMALIZE_WHITESPACE: Final = 4 +ELLIPSIS: Final = 8 +SKIP: Final = 16 +IGNORE_EXCEPTION_DETAIL: Final = 32 + +COMPARISON_FLAGS: Final = 63 + +REPORT_UDIFF: Final = 64 +REPORT_CDIFF: Final = 128 +REPORT_NDIFF: Final = 256 +REPORT_ONLY_FIRST_FAILURE: Final = 512 +FAIL_FAST: Final = 1024 + +REPORTING_FLAGS: Final = 1984 + +BLANKLINE_MARKER: Final = "" +ELLIPSIS_MARKER: Final = "..." + +class Example: + source: str + want: str + exc_msg: str | None + lineno: int + indent: int + options: dict[int, bool] + def __init__( + self, + source: str, + want: str, + exc_msg: str | None = None, + lineno: int = 0, + indent: int = 0, + options: dict[int, bool] | None = None, + ) -> None: ... + def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + +class DocTest: + examples: list[Example] + globs: dict[str, Any] + name: str + filename: str | None + lineno: int | None + docstring: str | None + def __init__( + self, + examples: list[Example], + globs: dict[str, Any], + name: str, + filename: str | None, + lineno: int | None, + docstring: str | None, + ) -> None: ... + def __hash__(self) -> int: ... + def __lt__(self, other: DocTest) -> bool: ... + def __eq__(self, other: object) -> bool: ... + +class DocTestParser: + def parse(self, string: str, name: str = "") -> list[str | Example]: ... + def get_doctest(self, string: str, globs: dict[str, Any], name: str, filename: str | None, lineno: int | None) -> DocTest: ... + def get_examples(self, string: str, name: str = "") -> list[Example]: ... + +class DocTestFinder: + def __init__( + self, verbose: bool = False, parser: DocTestParser = ..., recurse: bool = True, exclude_empty: bool = True + ) -> None: ... + def find( + self, + obj: object, + name: str | None = None, + module: None | bool | types.ModuleType = None, + globs: dict[str, Any] | None = None, + extraglobs: dict[str, Any] | None = None, + ) -> list[DocTest]: ... + +_Out: TypeAlias = Callable[[str], object] + +class DocTestRunner: + DIVIDER: str + optionflags: int + original_optionflags: int + tries: int + failures: int + if sys.version_info >= (3, 13): + skips: int + test: DocTest + def __init__(self, checker: OutputChecker | None = None, verbose: bool | None = None, optionflags: int = 0) -> None: ... + def report_start(self, out: _Out, test: DocTest, example: Example) -> None: ... + def report_success(self, out: _Out, test: DocTest, example: Example, got: str) -> None: ... + def report_failure(self, out: _Out, test: DocTest, example: Example, got: str) -> None: ... + def report_unexpected_exception(self, out: _Out, test: DocTest, example: Example, exc_info: ExcInfo) -> None: ... + def run( + self, test: DocTest, compileflags: int | None = None, out: _Out | None = None, clear_globs: bool = True + ) -> TestResults: ... + def summarize(self, verbose: bool | None = None) -> TestResults: ... + def merge(self, other: DocTestRunner) -> None: ... + +class OutputChecker: + def check_output(self, want: str, got: str, optionflags: int) -> bool: ... + def output_difference(self, example: Example, got: str, optionflags: int) -> str: ... + +class DocTestFailure(Exception): + test: DocTest + example: Example + got: str + def __init__(self, test: DocTest, example: Example, got: str) -> None: ... + +class UnexpectedException(Exception): + test: DocTest + example: Example + exc_info: ExcInfo + def __init__(self, test: DocTest, example: Example, exc_info: ExcInfo) -> None: ... + +class DebugRunner(DocTestRunner): ... + +master: DocTestRunner | None + +def testmod( + m: types.ModuleType | None = None, + name: str | None = None, + globs: dict[str, Any] | None = None, + verbose: bool | None = None, + report: bool = True, + optionflags: int = 0, + extraglobs: dict[str, Any] | None = None, + raise_on_error: bool = False, + exclude_empty: bool = False, +) -> TestResults: ... +def testfile( + filename: str, + module_relative: bool = True, + name: str | None = None, + package: None | str | types.ModuleType = None, + globs: dict[str, Any] | None = None, + verbose: bool | None = None, + report: bool = True, + optionflags: int = 0, + extraglobs: dict[str, Any] | None = None, + raise_on_error: bool = False, + parser: DocTestParser = ..., + encoding: str | None = None, +) -> TestResults: ... +def run_docstring_examples( + f: object, + globs: dict[str, Any], + verbose: bool = False, + name: str = "NoName", + compileflags: int | None = None, + optionflags: int = 0, +) -> None: ... +def set_unittest_reportflags(flags: int) -> int: ... + +class DocTestCase(unittest.TestCase): + def __init__( + self, + test: DocTest, + optionflags: int = 0, + setUp: Callable[[DocTest], object] | None = None, + tearDown: Callable[[DocTest], object] | None = None, + checker: OutputChecker | None = None, + ) -> None: ... + def runTest(self) -> None: ... + def format_failure(self, err: str) -> str: ... + def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + +class SkipDocTestCase(DocTestCase): + def __init__(self, module: types.ModuleType) -> None: ... + def test_skip(self) -> None: ... + +class _DocTestSuite(unittest.TestSuite): ... + +def DocTestSuite( + module: None | str | types.ModuleType = None, + globs: dict[str, Any] | None = None, + extraglobs: dict[str, Any] | None = None, + test_finder: DocTestFinder | None = None, + **options: Any, +) -> _DocTestSuite: ... + +class DocFileCase(DocTestCase): ... + +def DocFileTest( + path: str, + module_relative: bool = True, + package: None | str | types.ModuleType = None, + globs: dict[str, Any] | None = None, + parser: DocTestParser = ..., + encoding: str | None = None, + **options: Any, +) -> DocFileCase: ... +def DocFileSuite(*paths: str, **kw: Any) -> _DocTestSuite: ... +def script_from_examples(s: str) -> str: ... +def testsource(module: None | str | types.ModuleType, name: str) -> str: ... +def debug_src(src: str, pm: bool = False, globs: dict[str, Any] | None = None) -> None: ... +def debug_script(src: str, pm: bool = False, globs: dict[str, Any] | None = None) -> None: ... +def debug(module: None | str | types.ModuleType, name: str, pm: bool = False) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/__init__.pyi new file mode 100644 index 0000000..53f8c35 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/__init__.pyi @@ -0,0 +1,60 @@ +from collections.abc import Callable +from email._policybase import _MessageT +from email.message import Message +from email.policy import Policy +from typing import IO, overload +from typing_extensions import TypeAlias + +# At runtime, listing submodules in __all__ without them being imported is +# valid, and causes them to be included in a star import. See #6523 + +__all__ = [ # noqa: F822 # Undefined names in __all__ + "base64mime", # pyright: ignore[reportUnsupportedDunderAll] + "charset", # pyright: ignore[reportUnsupportedDunderAll] + "encoders", # pyright: ignore[reportUnsupportedDunderAll] + "errors", # pyright: ignore[reportUnsupportedDunderAll] + "feedparser", # pyright: ignore[reportUnsupportedDunderAll] + "generator", # pyright: ignore[reportUnsupportedDunderAll] + "header", # pyright: ignore[reportUnsupportedDunderAll] + "iterators", # pyright: ignore[reportUnsupportedDunderAll] + "message", # pyright: ignore[reportUnsupportedDunderAll] + "message_from_file", + "message_from_binary_file", + "message_from_string", + "message_from_bytes", + "mime", # pyright: ignore[reportUnsupportedDunderAll] + "parser", # pyright: ignore[reportUnsupportedDunderAll] + "quoprimime", # pyright: ignore[reportUnsupportedDunderAll] + "utils", # pyright: ignore[reportUnsupportedDunderAll] +] + +# Definitions imported by multiple submodules in typeshed +_ParamType: TypeAlias = str | tuple[str | None, str | None, str] # noqa: Y047 +_ParamsType: TypeAlias = str | None | tuple[str, str | None, str] # noqa: Y047 + +@overload +def message_from_string(s: str) -> Message: ... +@overload +def message_from_string(s: str, _class: Callable[[], _MessageT]) -> _MessageT: ... +@overload +def message_from_string(s: str, _class: Callable[[], _MessageT] = ..., *, policy: Policy[_MessageT]) -> _MessageT: ... +@overload +def message_from_bytes(s: bytes | bytearray) -> Message: ... +@overload +def message_from_bytes(s: bytes | bytearray, _class: Callable[[], _MessageT]) -> _MessageT: ... +@overload +def message_from_bytes( + s: bytes | bytearray, _class: Callable[[], _MessageT] = ..., *, policy: Policy[_MessageT] +) -> _MessageT: ... +@overload +def message_from_file(fp: IO[str]) -> Message: ... +@overload +def message_from_file(fp: IO[str], _class: Callable[[], _MessageT]) -> _MessageT: ... +@overload +def message_from_file(fp: IO[str], _class: Callable[[], _MessageT] = ..., *, policy: Policy[_MessageT]) -> _MessageT: ... +@overload +def message_from_binary_file(fp: IO[bytes]) -> Message: ... +@overload +def message_from_binary_file(fp: IO[bytes], _class: Callable[[], _MessageT]) -> _MessageT: ... +@overload +def message_from_binary_file(fp: IO[bytes], _class: Callable[[], _MessageT] = ..., *, policy: Policy[_MessageT]) -> _MessageT: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/_header_value_parser.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/_header_value_parser.pyi new file mode 100644 index 0000000..dededd0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/_header_value_parser.pyi @@ -0,0 +1,398 @@ +from collections.abc import Iterable, Iterator +from email.errors import HeaderParseError, MessageDefect +from email.policy import Policy +from re import Pattern +from typing import Any, Final +from typing_extensions import Self + +WSP: Final[set[str]] +CFWS_LEADER: Final[set[str]] +SPECIALS: Final[set[str]] +ATOM_ENDS: Final[set[str]] +DOT_ATOM_ENDS: Final[set[str]] +PHRASE_ENDS: Final[set[str]] +TSPECIALS: Final[set[str]] +TOKEN_ENDS: Final[set[str]] +ASPECIALS: Final[set[str]] +ATTRIBUTE_ENDS: Final[set[str]] +EXTENDED_ATTRIBUTE_ENDS: Final[set[str]] +# Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 +NLSET: Final[set[str]] +# Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 +SPECIALSNL: Final[set[str]] + +# Added in Python 3.9.23, 3.10.17, 3.11.12, 3.12.9, 3.13.2 +def make_quoted_pairs(value: Any) -> str: ... +def quote_string(value: Any) -> str: ... + +rfc2047_matcher: Final[Pattern[str]] + +class TokenList(list[TokenList | Terminal]): + token_type: str | None + syntactic_break: bool + ew_combine_allowed: bool + defects: list[MessageDefect] + def __init__(self, *args: Any, **kw: Any) -> None: ... + @property + def value(self) -> str: ... + @property + def all_defects(self) -> list[MessageDefect]: ... + def startswith_fws(self) -> bool: ... + @property + def as_ew_allowed(self) -> bool: ... + @property + def comments(self) -> list[str]: ... + def fold(self, *, policy: Policy) -> str: ... + def pprint(self, indent: str = "") -> None: ... + def ppstr(self, indent: str = "") -> str: ... + +class WhiteSpaceTokenList(TokenList): ... + +class UnstructuredTokenList(TokenList): + token_type: str + +class Phrase(TokenList): + token_type: str + +class Word(TokenList): + token_type: str + +class CFWSList(WhiteSpaceTokenList): + token_type: str + +class Atom(TokenList): + token_type: str + +class Token(TokenList): + token_type: str + encode_as_ew: bool + +class EncodedWord(TokenList): + token_type: str + cte: str | None + charset: str | None + lang: str | None + +class QuotedString(TokenList): + token_type: str + @property + def content(self) -> str: ... + @property + def quoted_value(self) -> str: ... + @property + def stripped_value(self) -> str: ... + +class BareQuotedString(QuotedString): + token_type: str + +class Comment(WhiteSpaceTokenList): + token_type: str + def quote(self, value: Any) -> str: ... + @property + def content(self) -> str: ... + +class AddressList(TokenList): + token_type: str + @property + def addresses(self) -> list[Address]: ... + @property + def mailboxes(self) -> list[Mailbox]: ... + @property + def all_mailboxes(self) -> list[Mailbox]: ... + +class Address(TokenList): + token_type: str + @property + def display_name(self) -> str: ... + @property + def mailboxes(self) -> list[Mailbox]: ... + @property + def all_mailboxes(self) -> list[Mailbox]: ... + +class MailboxList(TokenList): + token_type: str + @property + def mailboxes(self) -> list[Mailbox]: ... + @property + def all_mailboxes(self) -> list[Mailbox]: ... + +class GroupList(TokenList): + token_type: str + @property + def mailboxes(self) -> list[Mailbox]: ... + @property + def all_mailboxes(self) -> list[Mailbox]: ... + +class Group(TokenList): + token_type: str + @property + def mailboxes(self) -> list[Mailbox]: ... + @property + def all_mailboxes(self) -> list[Mailbox]: ... + @property + def display_name(self) -> str: ... + +class NameAddr(TokenList): + token_type: str + @property + def display_name(self) -> str: ... + @property + def local_part(self) -> str: ... + @property + def domain(self) -> str: ... + @property + def route(self) -> list[Domain] | None: ... + @property + def addr_spec(self) -> str: ... + +class AngleAddr(TokenList): + token_type: str + @property + def local_part(self) -> str: ... + @property + def domain(self) -> str: ... + @property + def route(self) -> list[Domain] | None: ... + @property + def addr_spec(self) -> str: ... + +class ObsRoute(TokenList): + token_type: str + @property + def domains(self) -> list[Domain]: ... + +class Mailbox(TokenList): + token_type: str + @property + def display_name(self) -> str: ... + @property + def local_part(self) -> str: ... + @property + def domain(self) -> str: ... + @property + def route(self) -> list[str]: ... + @property + def addr_spec(self) -> str: ... + +class InvalidMailbox(TokenList): + token_type: str + @property + def display_name(self) -> None: ... + @property + def local_part(self) -> None: ... + @property + def domain(self) -> None: ... + @property + def route(self) -> None: ... + @property + def addr_spec(self) -> None: ... + +class Domain(TokenList): + token_type: str + as_ew_allowed: bool + @property + def domain(self) -> str: ... + +class DotAtom(TokenList): + token_type: str + +class DotAtomText(TokenList): + token_type: str + as_ew_allowed: bool + +class NoFoldLiteral(TokenList): + token_type: str + as_ew_allowed: bool + +class AddrSpec(TokenList): + token_type: str + as_ew_allowed: bool + @property + def local_part(self) -> str: ... + @property + def domain(self) -> str: ... + @property + def addr_spec(self) -> str: ... + +class ObsLocalPart(TokenList): + token_type: str + as_ew_allowed: bool + +class DisplayName(Phrase): + token_type: str + @property + def display_name(self) -> str: ... + +class LocalPart(TokenList): + token_type: str + as_ew_allowed: bool + @property + def local_part(self) -> str: ... + +class DomainLiteral(TokenList): + token_type: str + as_ew_allowed: bool + @property + def domain(self) -> str: ... + @property + def ip(self) -> str: ... + +class MIMEVersion(TokenList): + token_type: str + major: int | None + minor: int | None + +class Parameter(TokenList): + token_type: str + sectioned: bool + extended: bool + charset: str + @property + def section_number(self) -> int: ... + @property + def param_value(self) -> str: ... + +class InvalidParameter(Parameter): + token_type: str + +class Attribute(TokenList): + token_type: str + @property + def stripped_value(self) -> str: ... + +class Section(TokenList): + token_type: str + number: int | None + +class Value(TokenList): + token_type: str + @property + def stripped_value(self) -> str: ... + +class MimeParameters(TokenList): + token_type: str + syntactic_break: bool + @property + def params(self) -> Iterator[tuple[str, str]]: ... + +class ParameterizedHeaderValue(TokenList): + syntactic_break: bool + @property + def params(self) -> Iterable[tuple[str, str]]: ... + +class ContentType(ParameterizedHeaderValue): + token_type: str + as_ew_allowed: bool + maintype: str + subtype: str + +class ContentDisposition(ParameterizedHeaderValue): + token_type: str + as_ew_allowed: bool + content_disposition: Any + +class ContentTransferEncoding(TokenList): + token_type: str + as_ew_allowed: bool + cte: str + +class HeaderLabel(TokenList): + token_type: str + as_ew_allowed: bool + +class MsgID(TokenList): + token_type: str + as_ew_allowed: bool + def fold(self, policy: Policy) -> str: ... + +class MessageID(MsgID): + token_type: str + +class InvalidMessageID(MessageID): + token_type: str + +class Header(TokenList): + token_type: str + +class Terminal(str): + as_ew_allowed: bool + ew_combine_allowed: bool + syntactic_break: bool + token_type: str + defects: list[MessageDefect] + def __new__(cls, value: str, token_type: str) -> Self: ... + def pprint(self) -> None: ... + @property + def all_defects(self) -> list[MessageDefect]: ... + def pop_trailing_ws(self) -> None: ... + @property + def comments(self) -> list[str]: ... + def __getnewargs__(self) -> tuple[str, str]: ... # type: ignore[override] + +class WhiteSpaceTerminal(Terminal): + @property + def value(self) -> str: ... + def startswith_fws(self) -> bool: ... + +class ValueTerminal(Terminal): + @property + def value(self) -> ValueTerminal: ... + def startswith_fws(self) -> bool: ... + +class EWWhiteSpaceTerminal(WhiteSpaceTerminal): ... +class _InvalidEwError(HeaderParseError): ... + +DOT: Final[ValueTerminal] +ListSeparator: Final[ValueTerminal] +RouteComponentMarker: Final[ValueTerminal] + +def get_fws(value: str) -> tuple[WhiteSpaceTerminal, str]: ... +def get_encoded_word(value: str, terminal_type: str = "vtext") -> tuple[EncodedWord, str]: ... +def get_unstructured(value: str) -> UnstructuredTokenList: ... +def get_qp_ctext(value: str) -> tuple[WhiteSpaceTerminal, str]: ... +def get_qcontent(value: str) -> tuple[ValueTerminal, str]: ... +def get_atext(value: str) -> tuple[ValueTerminal, str]: ... +def get_bare_quoted_string(value: str) -> tuple[BareQuotedString, str]: ... +def get_comment(value: str) -> tuple[Comment, str]: ... +def get_cfws(value: str) -> tuple[CFWSList, str]: ... +def get_quoted_string(value: str) -> tuple[QuotedString, str]: ... +def get_atom(value: str) -> tuple[Atom, str]: ... +def get_dot_atom_text(value: str) -> tuple[DotAtomText, str]: ... +def get_dot_atom(value: str) -> tuple[DotAtom, str]: ... +def get_word(value: str) -> tuple[Any, str]: ... +def get_phrase(value: str) -> tuple[Phrase, str]: ... +def get_local_part(value: str) -> tuple[LocalPart, str]: ... +def get_obs_local_part(value: str) -> tuple[ObsLocalPart, str]: ... +def get_dtext(value: str) -> tuple[ValueTerminal, str]: ... +def get_domain_literal(value: str) -> tuple[DomainLiteral, str]: ... +def get_domain(value: str) -> tuple[Domain, str]: ... +def get_addr_spec(value: str) -> tuple[AddrSpec, str]: ... +def get_obs_route(value: str) -> tuple[ObsRoute, str]: ... +def get_angle_addr(value: str) -> tuple[AngleAddr, str]: ... +def get_display_name(value: str) -> tuple[DisplayName, str]: ... +def get_name_addr(value: str) -> tuple[NameAddr, str]: ... +def get_mailbox(value: str) -> tuple[Mailbox, str]: ... +def get_invalid_mailbox(value: str, endchars: str) -> tuple[InvalidMailbox, str]: ... +def get_mailbox_list(value: str) -> tuple[MailboxList, str]: ... +def get_group_list(value: str) -> tuple[GroupList, str]: ... +def get_group(value: str) -> tuple[Group, str]: ... +def get_address(value: str) -> tuple[Address, str]: ... +def get_address_list(value: str) -> tuple[AddressList, str]: ... +def get_no_fold_literal(value: str) -> tuple[NoFoldLiteral, str]: ... +def get_msg_id(value: str) -> tuple[MsgID, str]: ... +def parse_message_id(value: str) -> MessageID: ... +def parse_mime_version(value: str) -> MIMEVersion: ... +def get_invalid_parameter(value: str) -> tuple[InvalidParameter, str]: ... +def get_ttext(value: str) -> tuple[ValueTerminal, str]: ... +def get_token(value: str) -> tuple[Token, str]: ... +def get_attrtext(value: str) -> tuple[ValueTerminal, str]: ... +def get_attribute(value: str) -> tuple[Attribute, str]: ... +def get_extended_attrtext(value: str) -> tuple[ValueTerminal, str]: ... +def get_extended_attribute(value: str) -> tuple[Attribute, str]: ... +def get_section(value: str) -> tuple[Section, str]: ... +def get_value(value: str) -> tuple[Value, str]: ... +def get_parameter(value: str) -> tuple[Parameter, str]: ... +def parse_mime_parameters(value: str) -> MimeParameters: ... +def parse_content_type_header(value: str) -> ContentType: ... +def parse_content_disposition_header(value: str) -> ContentDisposition: ... +def parse_content_transfer_encoding_header(value: str) -> ContentTransferEncoding: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/_policybase.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/_policybase.pyi new file mode 100644 index 0000000..0fb890d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/_policybase.pyi @@ -0,0 +1,80 @@ +from abc import ABCMeta, abstractmethod +from email.errors import MessageDefect +from email.header import Header +from email.message import Message +from typing import Any, Generic, Protocol, TypeVar, type_check_only +from typing_extensions import Self + +__all__ = ["Policy", "Compat32", "compat32"] + +_MessageT = TypeVar("_MessageT", bound=Message[Any, Any], default=Message[str, str]) +_MessageT_co = TypeVar("_MessageT_co", covariant=True, bound=Message[Any, Any], default=Message[str, str]) + +@type_check_only +class _MessageFactory(Protocol[_MessageT]): + def __call__(self, policy: Policy[_MessageT]) -> _MessageT: ... + +# Policy below is the only known direct subclass of _PolicyBase. We therefore +# assume that the __init__ arguments and attributes of _PolicyBase are +# the same as those of Policy. +class _PolicyBase(Generic[_MessageT_co]): + max_line_length: int | None + linesep: str + cte_type: str + raise_on_defect: bool + mangle_from_: bool + message_factory: _MessageFactory[_MessageT_co] | None + # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 + verify_generated_headers: bool + + def __init__( + self, + *, + max_line_length: int | None = 78, + linesep: str = "\n", + cte_type: str = "8bit", + raise_on_defect: bool = False, + mangle_from_: bool = ..., # default depends on sub-class + message_factory: _MessageFactory[_MessageT_co] | None = None, + # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 + verify_generated_headers: bool = True, + ) -> None: ... + def clone( + self, + *, + max_line_length: int | None = ..., + linesep: str = ..., + cte_type: str = ..., + raise_on_defect: bool = ..., + mangle_from_: bool = ..., + message_factory: _MessageFactory[_MessageT_co] | None = ..., + # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 + verify_generated_headers: bool = ..., + ) -> Self: ... + def __add__(self, other: Policy) -> Self: ... + +class Policy(_PolicyBase[_MessageT_co], metaclass=ABCMeta): + # Every Message object has a `defects` attribute, so the following + # methods will work for any Message object. + def handle_defect(self, obj: Message[Any, Any], defect: MessageDefect) -> None: ... + def register_defect(self, obj: Message[Any, Any], defect: MessageDefect) -> None: ... + def header_max_count(self, name: str) -> int | None: ... + @abstractmethod + def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: ... + @abstractmethod + def header_store_parse(self, name: str, value: str) -> tuple[str, str]: ... + @abstractmethod + def header_fetch_parse(self, name: str, value: str) -> str: ... + @abstractmethod + def fold(self, name: str, value: str) -> str: ... + @abstractmethod + def fold_binary(self, name: str, value: str) -> bytes: ... + +class Compat32(Policy[_MessageT_co]): + def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: ... + def header_store_parse(self, name: str, value: str) -> tuple[str, str]: ... + def header_fetch_parse(self, name: str, value: str) -> str | Header: ... # type: ignore[override] + def fold(self, name: str, value: str) -> str: ... + def fold_binary(self, name: str, value: str) -> bytes: ... + +compat32: Compat32[Message[str, str]] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/base64mime.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/base64mime.pyi new file mode 100644 index 0000000..563cd7f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/base64mime.pyi @@ -0,0 +1,13 @@ +__all__ = ["body_decode", "body_encode", "decode", "decodestring", "header_encode", "header_length"] + +from _typeshed import ReadableBuffer + +def header_length(bytearray: str | bytes | bytearray) -> int: ... +def header_encode(header_bytes: str | ReadableBuffer, charset: str = "iso-8859-1") -> str: ... + +# First argument should be a buffer that supports slicing and len(). +def body_encode(s: bytes | bytearray, maxlinelen: int = 76, eol: str = "\n") -> str: ... +def decode(string: str | ReadableBuffer) -> bytes: ... + +body_decode = decode +decodestring = decode diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/charset.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/charset.pyi new file mode 100644 index 0000000..e193083 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/charset.pyi @@ -0,0 +1,42 @@ +from collections.abc import Callable, Iterator +from email.message import Message +from typing import ClassVar, Final, overload + +__all__ = ["Charset", "add_alias", "add_charset", "add_codec"] + +QP: Final = 1 # undocumented +BASE64: Final = 2 # undocumented +SHORTEST: Final = 3 # undocumented +RFC2047_CHROME_LEN: Final = 7 # undocumented +DEFAULT_CHARSET: Final = "us-ascii" # undocumented +UNKNOWN8BIT: Final = "unknown-8bit" # undocumented +EMPTYSTRING: Final = "" # undocumented +CHARSETS: Final[dict[str, tuple[int | None, int | None, str | None]]] +ALIASES: Final[dict[str, str]] +CODEC_MAP: Final[dict[str, str | None]] # undocumented + +class Charset: + input_charset: str + header_encoding: int + body_encoding: int + output_charset: str | None + input_codec: str | None + output_codec: str | None + def __init__(self, input_charset: str = "us-ascii") -> None: ... + def get_body_encoding(self) -> str | Callable[[Message], None]: ... + def get_output_charset(self) -> str | None: ... + def header_encode(self, string: str) -> str: ... + def header_encode_lines(self, string: str, maxlengths: Iterator[int]) -> list[str | None]: ... + @overload + def body_encode(self, string: None) -> None: ... + @overload + def body_encode(self, string: str | bytes) -> str: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __eq__(self, other: object) -> bool: ... + def __ne__(self, value: object, /) -> bool: ... + +def add_charset( + charset: str, header_enc: int | None = None, body_enc: int | None = None, output_charset: str | None = None +) -> None: ... +def add_alias(alias: str, canonical: str) -> None: ... +def add_codec(charset: str, codecname: str) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/contentmanager.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/contentmanager.pyi new file mode 100644 index 0000000..3214f1a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/contentmanager.pyi @@ -0,0 +1,11 @@ +from collections.abc import Callable +from email.message import Message +from typing import Any + +class ContentManager: + def get_content(self, msg: Message, *args: Any, **kw: Any) -> Any: ... + def set_content(self, msg: Message, obj: Any, *args: Any, **kw: Any) -> Any: ... + def add_get_handler(self, key: str, handler: Callable[..., Any]) -> None: ... + def add_set_handler(self, typekey: type, handler: Callable[..., Any]) -> None: ... + +raw_data_manager: ContentManager diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/encoders.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/encoders.pyi new file mode 100644 index 0000000..55223bd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/encoders.pyi @@ -0,0 +1,8 @@ +from email.message import Message + +__all__ = ["encode_7or8bit", "encode_base64", "encode_noop", "encode_quopri"] + +def encode_base64(msg: Message) -> None: ... +def encode_quopri(msg: Message) -> None: ... +def encode_7or8bit(msg: Message) -> None: ... +def encode_noop(msg: Message) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/errors.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/errors.pyi new file mode 100644 index 0000000..b501a58 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/errors.pyi @@ -0,0 +1,42 @@ +import sys + +class MessageError(Exception): ... +class MessageParseError(MessageError): ... +class HeaderParseError(MessageParseError): ... +class BoundaryError(MessageParseError): ... +class MultipartConversionError(MessageError, TypeError): ... +class CharsetError(MessageError): ... + +# Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 +class HeaderWriteError(MessageError): ... + +class MessageDefect(ValueError): + def __init__(self, line: str | None = None) -> None: ... + +class NoBoundaryInMultipartDefect(MessageDefect): ... +class StartBoundaryNotFoundDefect(MessageDefect): ... +class FirstHeaderLineIsContinuationDefect(MessageDefect): ... +class MisplacedEnvelopeHeaderDefect(MessageDefect): ... +class MultipartInvariantViolationDefect(MessageDefect): ... +class InvalidMultipartContentTransferEncodingDefect(MessageDefect): ... +class UndecodableBytesDefect(MessageDefect): ... +class InvalidBase64PaddingDefect(MessageDefect): ... +class InvalidBase64CharactersDefect(MessageDefect): ... +class InvalidBase64LengthDefect(MessageDefect): ... +class CloseBoundaryNotFoundDefect(MessageDefect): ... +class MissingHeaderBodySeparatorDefect(MessageDefect): ... + +MalformedHeaderDefect = MissingHeaderBodySeparatorDefect + +class HeaderDefect(MessageDefect): ... +class InvalidHeaderDefect(HeaderDefect): ... +class HeaderMissingRequiredValue(HeaderDefect): ... + +class NonPrintableDefect(HeaderDefect): + def __init__(self, non_printables: str | None) -> None: ... + +class ObsoleteHeaderDefect(HeaderDefect): ... +class NonASCIILocalPartDefect(HeaderDefect): ... + +if sys.version_info >= (3, 10): + class InvalidDateDefect(HeaderDefect): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/feedparser.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/feedparser.pyi new file mode 100644 index 0000000..d9279e9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/feedparser.pyi @@ -0,0 +1,22 @@ +from collections.abc import Callable +from email._policybase import _MessageT +from email.message import Message +from email.policy import Policy +from typing import Generic, overload + +__all__ = ["FeedParser", "BytesFeedParser"] + +class FeedParser(Generic[_MessageT]): + @overload + def __init__(self: FeedParser[Message], _factory: None = None, *, policy: Policy[Message] = ...) -> None: ... + @overload + def __init__(self, _factory: Callable[[], _MessageT], *, policy: Policy[_MessageT] = ...) -> None: ... + def feed(self, data: str) -> None: ... + def close(self) -> _MessageT: ... + +class BytesFeedParser(FeedParser[_MessageT]): + @overload + def __init__(self: BytesFeedParser[Message], _factory: None = None, *, policy: Policy[Message] = ...) -> None: ... + @overload + def __init__(self, _factory: Callable[[], _MessageT], *, policy: Policy[_MessageT] = ...) -> None: ... + def feed(self, data: bytes | bytearray) -> None: ... # type: ignore[override] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/generator.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/generator.pyi new file mode 100644 index 0000000..d30e686 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/generator.pyi @@ -0,0 +1,77 @@ +from _typeshed import SupportsWrite +from email.message import Message +from email.policy import Policy +from typing import Any, Generic, TypeVar, overload +from typing_extensions import Self + +__all__ = ["Generator", "DecodedGenerator", "BytesGenerator"] + +# By default, generators do not have a message policy. +_MessageT = TypeVar("_MessageT", bound=Message[Any, Any], default=Any) + +class Generator(Generic[_MessageT]): + maxheaderlen: int | None + policy: Policy[_MessageT] | None + @overload + def __init__( + self: Generator[Any], # The Policy of the message is used. + outfp: SupportsWrite[str], + mangle_from_: bool | None = None, + maxheaderlen: int | None = None, + *, + policy: None = None, + ) -> None: ... + @overload + def __init__( + self, + outfp: SupportsWrite[str], + mangle_from_: bool | None = None, + maxheaderlen: int | None = None, + *, + policy: Policy[_MessageT], + ) -> None: ... + def write(self, s: str) -> None: ... + def flatten(self, msg: _MessageT, unixfrom: bool = False, linesep: str | None = None) -> None: ... + def clone(self, fp: SupportsWrite[str]) -> Self: ... + +class BytesGenerator(Generator[_MessageT]): + @overload + def __init__( + self: BytesGenerator[Any], # The Policy of the message is used. + outfp: SupportsWrite[bytes], + mangle_from_: bool | None = None, + maxheaderlen: int | None = None, + *, + policy: None = None, + ) -> None: ... + @overload + def __init__( + self, + outfp: SupportsWrite[bytes], + mangle_from_: bool | None = None, + maxheaderlen: int | None = None, + *, + policy: Policy[_MessageT], + ) -> None: ... + +class DecodedGenerator(Generator[_MessageT]): + @overload + def __init__( + self: DecodedGenerator[Any], # The Policy of the message is used. + outfp: SupportsWrite[str], + mangle_from_: bool | None = None, + maxheaderlen: int | None = None, + fmt: str | None = None, + *, + policy: None = None, + ) -> None: ... + @overload + def __init__( + self, + outfp: SupportsWrite[str], + mangle_from_: bool | None = None, + maxheaderlen: int | None = None, + fmt: str | None = None, + *, + policy: Policy[_MessageT], + ) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/header.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/header.pyi new file mode 100644 index 0000000..a26bbb5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/header.pyi @@ -0,0 +1,32 @@ +from collections.abc import Iterable +from email.charset import Charset +from typing import Any, ClassVar + +__all__ = ["Header", "decode_header", "make_header"] + +class Header: + def __init__( + self, + s: bytes | bytearray | str | None = None, + charset: Charset | str | None = None, + maxlinelen: int | None = None, + header_name: str | None = None, + continuation_ws: str = " ", + errors: str = "strict", + ) -> None: ... + def append(self, s: bytes | bytearray | str, charset: Charset | str | None = None, errors: str = "strict") -> None: ... + def encode(self, splitchars: str = ";, \t", maxlinelen: int | None = None, linesep: str = "\n") -> str: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __eq__(self, other: object) -> bool: ... + def __ne__(self, value: object, /) -> bool: ... + +# decode_header() either returns list[tuple[str, None]] if the header +# contains no encoded parts, or list[tuple[bytes, str | None]] if the header +# contains at least one encoded part. +def decode_header(header: Header | str) -> list[tuple[Any, Any | None]]: ... +def make_header( + decoded_seq: Iterable[tuple[bytes | bytearray | str, str | None]], + maxlinelen: int | None = None, + header_name: str | None = None, + continuation_ws: str = " ", +) -> Header: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/headerregistry.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/headerregistry.pyi new file mode 100644 index 0000000..bea6830 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/headerregistry.pyi @@ -0,0 +1,181 @@ +import types +from collections.abc import Iterable, Mapping +from datetime import datetime as _datetime +from email._header_value_parser import ( + AddressList, + ContentDisposition, + ContentTransferEncoding, + ContentType, + MessageID, + MIMEVersion, + TokenList, + UnstructuredTokenList, +) +from email.errors import MessageDefect +from email.policy import Policy +from typing import Any, ClassVar, Literal, Protocol, type_check_only +from typing_extensions import Self + +class BaseHeader(str): + # max_count is actually more of an abstract ClassVar (not defined on the base class, but expected to be defined in subclasses) + max_count: ClassVar[Literal[1] | None] + @property + def name(self) -> str: ... + @property + def defects(self) -> tuple[MessageDefect, ...]: ... + def __new__(cls, name: str, value: Any) -> Self: ... + def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect]) -> None: ... + def fold(self, *, policy: Policy) -> str: ... + +class UnstructuredHeader: + max_count: ClassVar[Literal[1] | None] + @staticmethod + def value_parser(value: str) -> UnstructuredTokenList: ... + @classmethod + def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... + +class UniqueUnstructuredHeader(UnstructuredHeader): + max_count: ClassVar[Literal[1]] + +class DateHeader: + max_count: ClassVar[Literal[1] | None] + def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect], datetime: _datetime) -> None: ... + @property + def datetime(self) -> _datetime | None: ... + @staticmethod + def value_parser(value: str) -> UnstructuredTokenList: ... + @classmethod + def parse(cls, value: str | _datetime, kwds: dict[str, Any]) -> None: ... + +class UniqueDateHeader(DateHeader): + max_count: ClassVar[Literal[1]] + +class AddressHeader: + max_count: ClassVar[Literal[1] | None] + def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect], groups: Iterable[Group]) -> None: ... + @property + def groups(self) -> tuple[Group, ...]: ... + @property + def addresses(self) -> tuple[Address, ...]: ... + @staticmethod + def value_parser(value: str) -> AddressList: ... + @classmethod + def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... + +class UniqueAddressHeader(AddressHeader): + max_count: ClassVar[Literal[1]] + +class SingleAddressHeader(AddressHeader): + @property + def address(self) -> Address: ... + +class UniqueSingleAddressHeader(SingleAddressHeader): + max_count: ClassVar[Literal[1]] + +class MIMEVersionHeader: + max_count: ClassVar[Literal[1]] + def init( + self, + name: str, + *, + parse_tree: TokenList, + defects: Iterable[MessageDefect], + version: str | None, + major: int | None, + minor: int | None, + ) -> None: ... + @property + def version(self) -> str | None: ... + @property + def major(self) -> int | None: ... + @property + def minor(self) -> int | None: ... + @staticmethod + def value_parser(value: str) -> MIMEVersion: ... + @classmethod + def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... + +class ParameterizedMIMEHeader: + max_count: ClassVar[Literal[1]] + def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect], params: Mapping[str, Any]) -> None: ... + @property + def params(self) -> types.MappingProxyType[str, Any]: ... + @classmethod + def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... + +class ContentTypeHeader(ParameterizedMIMEHeader): + @property + def content_type(self) -> str: ... + @property + def maintype(self) -> str: ... + @property + def subtype(self) -> str: ... + @staticmethod + def value_parser(value: str) -> ContentType: ... + +class ContentDispositionHeader(ParameterizedMIMEHeader): + # init is redefined but has the same signature as parent class, so is omitted from the stub + @property + def content_disposition(self) -> str | None: ... + @staticmethod + def value_parser(value: str) -> ContentDisposition: ... + +class ContentTransferEncodingHeader: + max_count: ClassVar[Literal[1]] + def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect]) -> None: ... + @property + def cte(self) -> str: ... + @classmethod + def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... + @staticmethod + def value_parser(value: str) -> ContentTransferEncoding: ... + +class MessageIDHeader: + max_count: ClassVar[Literal[1]] + @classmethod + def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... + @staticmethod + def value_parser(value: str) -> MessageID: ... + +@type_check_only +class _HeaderParser(Protocol): + max_count: ClassVar[Literal[1] | None] + @staticmethod + def value_parser(value: str, /) -> TokenList: ... + @classmethod + def parse(cls, value: str, kwds: dict[str, Any], /) -> None: ... + +class HeaderRegistry: + registry: dict[str, type[_HeaderParser]] + base_class: type[BaseHeader] + default_class: type[_HeaderParser] + def __init__( + self, base_class: type[BaseHeader] = ..., default_class: type[_HeaderParser] = ..., use_default_map: bool = True + ) -> None: ... + def map_to_type(self, name: str, cls: type[BaseHeader]) -> None: ... + def __getitem__(self, name: str) -> type[BaseHeader]: ... + def __call__(self, name: str, value: Any) -> BaseHeader: ... + +class Address: + @property + def display_name(self) -> str: ... + @property + def username(self) -> str: ... + @property + def domain(self) -> str: ... + @property + def addr_spec(self) -> str: ... + def __init__( + self, display_name: str = "", username: str | None = "", domain: str | None = "", addr_spec: str | None = None + ) -> None: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __eq__(self, other: object) -> bool: ... + +class Group: + @property + def display_name(self) -> str | None: ... + @property + def addresses(self) -> tuple[Address, ...]: ... + def __init__(self, display_name: str | None = None, addresses: Iterable[Address] | None = None) -> None: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __eq__(self, other: object) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/iterators.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/iterators.pyi new file mode 100644 index 0000000..d964d68 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/iterators.pyi @@ -0,0 +1,12 @@ +from _typeshed import SupportsWrite +from collections.abc import Iterator +from email.message import Message + +__all__ = ["body_line_iterator", "typed_subpart_iterator", "walk"] + +def body_line_iterator(msg: Message, decode: bool = False) -> Iterator[str]: ... +def typed_subpart_iterator(msg: Message, maintype: str = "text", subtype: str | None = None) -> Iterator[str]: ... +def walk(self: Message) -> Iterator[Message]: ... + +# We include the seemingly private function because it is documented in the stdlib documentation. +def _structure(msg: Message, fp: SupportsWrite[str] | None = None, level: int = 0, include_default: bool = False) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/message.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/message.pyi new file mode 100644 index 0000000..794882b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/message.pyi @@ -0,0 +1,174 @@ +from _typeshed import MaybeNone +from collections.abc import Generator, Iterator, Sequence +from email import _ParamsType, _ParamType +from email.charset import Charset +from email.contentmanager import ContentManager +from email.errors import MessageDefect +from email.policy import Policy +from typing import Any, Generic, Literal, Protocol, TypeVar, overload, type_check_only +from typing_extensions import Self, TypeAlias + +__all__ = ["Message", "EmailMessage"] + +_T = TypeVar("_T") +# Type returned by Policy.header_fetch_parse, often str or Header. +_HeaderT_co = TypeVar("_HeaderT_co", covariant=True, default=str) +_HeaderParamT_contra = TypeVar("_HeaderParamT_contra", contravariant=True, default=str) +# Represents headers constructed by HeaderRegistry. Those are sub-classes +# of BaseHeader and another header type. +_HeaderRegistryT_co = TypeVar("_HeaderRegistryT_co", covariant=True, default=Any) +_HeaderRegistryParamT_contra = TypeVar("_HeaderRegistryParamT_contra", contravariant=True, default=Any) + +_PayloadType: TypeAlias = Message | str +_EncodedPayloadType: TypeAlias = Message | bytes +_MultipartPayloadType: TypeAlias = list[_PayloadType] +_CharsetType: TypeAlias = Charset | str | None + +@type_check_only +class _SupportsEncodeToPayload(Protocol): + def encode(self, encoding: str, /) -> _PayloadType | _MultipartPayloadType | _SupportsDecodeToPayload: ... + +@type_check_only +class _SupportsDecodeToPayload(Protocol): + def decode(self, encoding: str, errors: str, /) -> _PayloadType | _MultipartPayloadType: ... + +class Message(Generic[_HeaderT_co, _HeaderParamT_contra]): + # The policy attributes and arguments in this class and its subclasses + # would ideally use Policy[Self], but this is not possible. + policy: Policy[Any] # undocumented + preamble: str | None + epilogue: str | None + defects: list[MessageDefect] + def __init__(self, policy: Policy[Any] = ...) -> None: ... + def is_multipart(self) -> bool: ... + def set_unixfrom(self, unixfrom: str) -> None: ... + def get_unixfrom(self) -> str | None: ... + def attach(self, payload: _PayloadType) -> None: ... + # `i: int` without a multipart payload results in an error + # `| MaybeNone` acts like `| Any`: can be None for cleared or unset payload, but annoying to check + @overload # multipart + def get_payload(self, i: int, decode: Literal[True]) -> None: ... + @overload # multipart + def get_payload(self, i: int, decode: Literal[False] = False) -> _PayloadType | MaybeNone: ... + @overload # either + def get_payload(self, i: None = None, decode: Literal[False] = False) -> _PayloadType | _MultipartPayloadType | MaybeNone: ... + @overload # not multipart + def get_payload(self, i: None = None, *, decode: Literal[True]) -> _EncodedPayloadType | MaybeNone: ... + @overload # not multipart, IDEM but w/o kwarg + def get_payload(self, i: None, decode: Literal[True]) -> _EncodedPayloadType | MaybeNone: ... + # If `charset=None` and payload supports both `encode` AND `decode`, + # then an invalid payload could be passed, but this is unlikely + # Not[_SupportsEncodeToPayload] + @overload + def set_payload( + self, payload: _SupportsDecodeToPayload | _PayloadType | _MultipartPayloadType, charset: None = None + ) -> None: ... + @overload + def set_payload( + self, + payload: _SupportsEncodeToPayload | _SupportsDecodeToPayload | _PayloadType | _MultipartPayloadType, + charset: Charset | str, + ) -> None: ... + def set_charset(self, charset: _CharsetType) -> None: ... + def get_charset(self) -> _CharsetType: ... + def __len__(self) -> int: ... + def __contains__(self, name: str) -> bool: ... + def __iter__(self) -> Iterator[str]: ... + # Same as `get` with `failobj=None`, but with the expectation that it won't return None in most scenarios + # This is important for protocols using __getitem__, like SupportsKeysAndGetItem + # Morally, the return type should be `AnyOf[_HeaderType, None]`, + # so using "the Any trick" instead. + def __getitem__(self, name: str) -> _HeaderT_co | MaybeNone: ... + def __setitem__(self, name: str, val: _HeaderParamT_contra) -> None: ... + def __delitem__(self, name: str) -> None: ... + def keys(self) -> list[str]: ... + def values(self) -> list[_HeaderT_co]: ... + def items(self) -> list[tuple[str, _HeaderT_co]]: ... + @overload + def get(self, name: str, failobj: None = None) -> _HeaderT_co | None: ... + @overload + def get(self, name: str, failobj: _T) -> _HeaderT_co | _T: ... + @overload + def get_all(self, name: str, failobj: None = None) -> list[_HeaderT_co] | None: ... + @overload + def get_all(self, name: str, failobj: _T) -> list[_HeaderT_co] | _T: ... + def add_header(self, _name: str, _value: str, **_params: _ParamsType) -> None: ... + def replace_header(self, _name: str, _value: _HeaderParamT_contra) -> None: ... + def get_content_type(self) -> str: ... + def get_content_maintype(self) -> str: ... + def get_content_subtype(self) -> str: ... + def get_default_type(self) -> str: ... + def set_default_type(self, ctype: str) -> None: ... + @overload + def get_params( + self, failobj: None = None, header: str = "content-type", unquote: bool = True + ) -> list[tuple[str, str]] | None: ... + @overload + def get_params(self, failobj: _T, header: str = "content-type", unquote: bool = True) -> list[tuple[str, str]] | _T: ... + @overload + def get_param( + self, param: str, failobj: None = None, header: str = "content-type", unquote: bool = True + ) -> _ParamType | None: ... + @overload + def get_param(self, param: str, failobj: _T, header: str = "content-type", unquote: bool = True) -> _ParamType | _T: ... + def del_param(self, param: str, header: str = "content-type", requote: bool = True) -> None: ... + def set_type(self, type: str, header: str = "Content-Type", requote: bool = True) -> None: ... + @overload + def get_filename(self, failobj: None = None) -> str | None: ... + @overload + def get_filename(self, failobj: _T) -> str | _T: ... + @overload + def get_boundary(self, failobj: None = None) -> str | None: ... + @overload + def get_boundary(self, failobj: _T) -> str | _T: ... + def set_boundary(self, boundary: str) -> None: ... + @overload + def get_content_charset(self) -> str | None: ... + @overload + def get_content_charset(self, failobj: _T) -> str | _T: ... + @overload + def get_charsets(self, failobj: None = None) -> list[str | None]: ... + @overload + def get_charsets(self, failobj: _T) -> list[str | _T]: ... + def walk(self) -> Generator[Self, None, None]: ... + def get_content_disposition(self) -> str | None: ... + def as_string(self, unixfrom: bool = False, maxheaderlen: int = 0, policy: Policy[Any] | None = None) -> str: ... + def as_bytes(self, unixfrom: bool = False, policy: Policy[Any] | None = None) -> bytes: ... + def __bytes__(self) -> bytes: ... + def set_param( + self, + param: str, + value: str, + header: str = "Content-Type", + requote: bool = True, + charset: str | None = None, + language: str = "", + replace: bool = False, + ) -> None: ... + # The following two methods are undocumented, but a source code comment states that they are public API + def set_raw(self, name: str, value: _HeaderParamT_contra) -> None: ... + def raw_items(self) -> Iterator[tuple[str, _HeaderT_co]]: ... + +class MIMEPart(Message[_HeaderRegistryT_co, _HeaderRegistryParamT_contra]): + def __init__(self, policy: Policy[Any] | None = None) -> None: ... + def get_body(self, preferencelist: Sequence[str] = ("related", "html", "plain")) -> MIMEPart[_HeaderRegistryT_co] | None: ... + def attach(self, payload: Self) -> None: ... # type: ignore[override] + # The attachments are created via type(self) in the attach method. It's theoretically + # possible to sneak other attachment types into a MIMEPart instance, but could cause + # cause unforseen consequences. + def iter_attachments(self) -> Iterator[Self]: ... + def iter_parts(self) -> Iterator[MIMEPart[_HeaderRegistryT_co]]: ... + def get_content(self, *args: Any, content_manager: ContentManager | None = None, **kw: Any) -> Any: ... + def set_content(self, *args: Any, content_manager: ContentManager | None = None, **kw: Any) -> None: ... + def make_related(self, boundary: str | None = None) -> None: ... + def make_alternative(self, boundary: str | None = None) -> None: ... + def make_mixed(self, boundary: str | None = None) -> None: ... + def add_related(self, *args: Any, content_manager: ContentManager | None = ..., **kw: Any) -> None: ... + def add_alternative(self, *args: Any, content_manager: ContentManager | None = ..., **kw: Any) -> None: ... + def add_attachment(self, *args: Any, content_manager: ContentManager | None = ..., **kw: Any) -> None: ... + def clear(self) -> None: ... + def clear_content(self) -> None: ... + def as_string(self, unixfrom: bool = False, maxheaderlen: int | None = None, policy: Policy[Any] | None = None) -> str: ... + def is_attachment(self) -> bool: ... + +class EmailMessage(MIMEPart): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/__init__.pyi new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/application.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/application.pyi new file mode 100644 index 0000000..a7ab9dc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/application.pyi @@ -0,0 +1,17 @@ +from collections.abc import Callable +from email import _ParamsType +from email.mime.nonmultipart import MIMENonMultipart +from email.policy import Policy + +__all__ = ["MIMEApplication"] + +class MIMEApplication(MIMENonMultipart): + def __init__( + self, + _data: str | bytes | bytearray, + _subtype: str = "octet-stream", + _encoder: Callable[[MIMEApplication], object] = ..., + *, + policy: Policy | None = None, + **_params: _ParamsType, + ) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/audio.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/audio.pyi new file mode 100644 index 0000000..090dfb9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/audio.pyi @@ -0,0 +1,17 @@ +from collections.abc import Callable +from email import _ParamsType +from email.mime.nonmultipart import MIMENonMultipart +from email.policy import Policy + +__all__ = ["MIMEAudio"] + +class MIMEAudio(MIMENonMultipart): + def __init__( + self, + _audiodata: str | bytes | bytearray, + _subtype: str | None = None, + _encoder: Callable[[MIMEAudio], object] = ..., + *, + policy: Policy | None = None, + **_params: _ParamsType, + ) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/base.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/base.pyi new file mode 100644 index 0000000..b733709 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/base.pyi @@ -0,0 +1,8 @@ +import email.message +from email import _ParamsType +from email.policy import Policy + +__all__ = ["MIMEBase"] + +class MIMEBase(email.message.Message): + def __init__(self, _maintype: str, _subtype: str, *, policy: Policy | None = None, **_params: _ParamsType) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/image.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/image.pyi new file mode 100644 index 0000000..b47afa6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/image.pyi @@ -0,0 +1,17 @@ +from collections.abc import Callable +from email import _ParamsType +from email.mime.nonmultipart import MIMENonMultipart +from email.policy import Policy + +__all__ = ["MIMEImage"] + +class MIMEImage(MIMENonMultipart): + def __init__( + self, + _imagedata: str | bytes | bytearray, + _subtype: str | None = None, + _encoder: Callable[[MIMEImage], object] = ..., + *, + policy: Policy | None = None, + **_params: _ParamsType, + ) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/message.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/message.pyi new file mode 100644 index 0000000..a1e370e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/message.pyi @@ -0,0 +1,8 @@ +from email._policybase import _MessageT +from email.mime.nonmultipart import MIMENonMultipart +from email.policy import Policy + +__all__ = ["MIMEMessage"] + +class MIMEMessage(MIMENonMultipart): + def __init__(self, _msg: _MessageT, _subtype: str = "rfc822", *, policy: Policy[_MessageT] | None = None) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/multipart.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/multipart.pyi new file mode 100644 index 0000000..fb9599e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/multipart.pyi @@ -0,0 +1,18 @@ +from collections.abc import Sequence +from email import _ParamsType +from email._policybase import _MessageT +from email.mime.base import MIMEBase +from email.policy import Policy + +__all__ = ["MIMEMultipart"] + +class MIMEMultipart(MIMEBase): + def __init__( + self, + _subtype: str = "mixed", + boundary: str | None = None, + _subparts: Sequence[_MessageT] | None = None, + *, + policy: Policy[_MessageT] | None = None, + **_params: _ParamsType, + ) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/nonmultipart.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/nonmultipart.pyi new file mode 100644 index 0000000..5497d89 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/nonmultipart.pyi @@ -0,0 +1,5 @@ +from email.mime.base import MIMEBase + +__all__ = ["MIMENonMultipart"] + +class MIMENonMultipart(MIMEBase): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/text.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/text.pyi new file mode 100644 index 0000000..edfa67a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/mime/text.pyi @@ -0,0 +1,9 @@ +from email._policybase import Policy +from email.mime.nonmultipart import MIMENonMultipart + +__all__ = ["MIMEText"] + +class MIMEText(MIMENonMultipart): + def __init__( + self, _text: str, _subtype: str = "plain", _charset: str | None = None, *, policy: Policy | None = None + ) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/parser.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/parser.pyi new file mode 100644 index 0000000..a4924a6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/parser.pyi @@ -0,0 +1,39 @@ +from _typeshed import SupportsRead +from collections.abc import Callable +from email._policybase import _MessageT +from email.feedparser import BytesFeedParser as BytesFeedParser, FeedParser as FeedParser +from email.message import Message +from email.policy import Policy +from io import _WrappedBuffer +from typing import Generic, overload + +__all__ = ["Parser", "HeaderParser", "BytesParser", "BytesHeaderParser", "FeedParser", "BytesFeedParser"] + +class Parser(Generic[_MessageT]): + @overload + def __init__(self: Parser[Message[str, str]], _class: None = None) -> None: ... + @overload + def __init__(self, _class: None = None, *, policy: Policy[_MessageT]) -> None: ... + @overload + def __init__(self, _class: Callable[[], _MessageT] | None, *, policy: Policy[_MessageT] = ...) -> None: ... + def parse(self, fp: SupportsRead[str], headersonly: bool = False) -> _MessageT: ... + def parsestr(self, text: str, headersonly: bool = False) -> _MessageT: ... + +class HeaderParser(Parser[_MessageT]): + def parse(self, fp: SupportsRead[str], headersonly: bool = True) -> _MessageT: ... + def parsestr(self, text: str, headersonly: bool = True) -> _MessageT: ... + +class BytesParser(Generic[_MessageT]): + parser: Parser[_MessageT] + @overload + def __init__(self: BytesParser[Message[str, str]], _class: None = None) -> None: ... + @overload + def __init__(self, _class: None = None, *, policy: Policy[_MessageT]) -> None: ... + @overload + def __init__(self, _class: Callable[[], _MessageT], *, policy: Policy[_MessageT] = ...) -> None: ... + def parse(self, fp: _WrappedBuffer, headersonly: bool = False) -> _MessageT: ... + def parsebytes(self, text: bytes | bytearray, headersonly: bool = False) -> _MessageT: ... + +class BytesHeaderParser(BytesParser[_MessageT]): + def parse(self, fp: _WrappedBuffer, headersonly: bool = True) -> _MessageT: ... + def parsebytes(self, text: bytes | bytearray, headersonly: bool = True) -> _MessageT: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/policy.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/policy.pyi new file mode 100644 index 0000000..35c9999 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/policy.pyi @@ -0,0 +1,75 @@ +from collections.abc import Callable +from email._policybase import Compat32 as Compat32, Policy as Policy, _MessageFactory, _MessageT, compat32 as compat32 +from email.contentmanager import ContentManager +from email.message import EmailMessage +from typing import Any, overload +from typing_extensions import Self + +__all__ = ["Compat32", "compat32", "Policy", "EmailPolicy", "default", "strict", "SMTP", "HTTP"] + +class EmailPolicy(Policy[_MessageT]): + utf8: bool + refold_source: str + header_factory: Callable[[str, Any], Any] + content_manager: ContentManager + @overload + def __init__( + self: EmailPolicy[EmailMessage], + *, + max_line_length: int | None = ..., + linesep: str = ..., + cte_type: str = ..., + raise_on_defect: bool = ..., + mangle_from_: bool = ..., + message_factory: None = None, + # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 + verify_generated_headers: bool = ..., + utf8: bool = ..., + refold_source: str = ..., + header_factory: Callable[[str, str], str] = ..., + content_manager: ContentManager = ..., + ) -> None: ... + @overload + def __init__( + self, + *, + max_line_length: int | None = ..., + linesep: str = ..., + cte_type: str = ..., + raise_on_defect: bool = ..., + mangle_from_: bool = ..., + message_factory: _MessageFactory[_MessageT] | None = ..., + # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 + verify_generated_headers: bool = ..., + utf8: bool = ..., + refold_source: str = ..., + header_factory: Callable[[str, str], str] = ..., + content_manager: ContentManager = ..., + ) -> None: ... + def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: ... + def header_store_parse(self, name: str, value: Any) -> tuple[str, Any]: ... + def header_fetch_parse(self, name: str, value: str) -> Any: ... + def fold(self, name: str, value: str) -> Any: ... + def fold_binary(self, name: str, value: str) -> bytes: ... + def clone( + self, + *, + max_line_length: int | None = ..., + linesep: str = ..., + cte_type: str = ..., + raise_on_defect: bool = ..., + mangle_from_: bool = ..., + message_factory: _MessageFactory[_MessageT] | None = ..., + # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 + verify_generated_headers: bool = ..., + utf8: bool = ..., + refold_source: str = ..., + header_factory: Callable[[str, str], str] = ..., + content_manager: ContentManager = ..., + ) -> Self: ... + +default: EmailPolicy[EmailMessage] +SMTP: EmailPolicy[EmailMessage] +SMTPUTF8: EmailPolicy[EmailMessage] +HTTP: EmailPolicy[EmailMessage] +strict: EmailPolicy[EmailMessage] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/quoprimime.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/quoprimime.pyi new file mode 100644 index 0000000..87d08ee --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/quoprimime.pyi @@ -0,0 +1,28 @@ +from collections.abc import Iterable + +__all__ = [ + "body_decode", + "body_encode", + "body_length", + "decode", + "decodestring", + "header_decode", + "header_encode", + "header_length", + "quote", + "unquote", +] + +def header_check(octet: int) -> bool: ... +def body_check(octet: int) -> bool: ... +def header_length(bytearray: Iterable[int]) -> int: ... +def body_length(bytearray: Iterable[int]) -> int: ... +def unquote(s: str | bytes | bytearray) -> str: ... +def quote(c: str | bytes | bytearray) -> str: ... +def header_encode(header_bytes: bytes | bytearray, charset: str = "iso-8859-1") -> str: ... +def body_encode(body: str, maxlinelen: int = 76, eol: str = "\n") -> str: ... +def decode(encoded: str, eol: str = "\n") -> str: ... +def header_decode(s: str) -> str: ... + +body_decode = decode +decodestring = decode diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/utils.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/utils.pyi new file mode 100644 index 0000000..efc32a7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/email/utils.pyi @@ -0,0 +1,78 @@ +import datetime +import sys +from _typeshed import Unused +from collections.abc import Iterable +from email import _ParamType +from email.charset import Charset +from typing import overload +from typing_extensions import TypeAlias, deprecated + +__all__ = [ + "collapse_rfc2231_value", + "decode_params", + "decode_rfc2231", + "encode_rfc2231", + "formataddr", + "formatdate", + "format_datetime", + "getaddresses", + "make_msgid", + "mktime_tz", + "parseaddr", + "parsedate", + "parsedate_tz", + "parsedate_to_datetime", + "unquote", +] + +_PDTZ: TypeAlias = tuple[int, int, int, int, int, int, int, int, int, int | None] + +def quote(str: str) -> str: ... +def unquote(str: str) -> str: ... + +# `strict` parameter added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 +def parseaddr(addr: str | list[str], *, strict: bool = True) -> tuple[str, str]: ... +def formataddr(pair: tuple[str | None, str], charset: str | Charset = "utf-8") -> str: ... + +# `strict` parameter added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 +def getaddresses(fieldvalues: Iterable[str], *, strict: bool = True) -> list[tuple[str, str]]: ... +@overload +def parsedate(data: None) -> None: ... +@overload +def parsedate(data: str) -> tuple[int, int, int, int, int, int, int, int, int] | None: ... +@overload +def parsedate_tz(data: None) -> None: ... +@overload +def parsedate_tz(data: str) -> _PDTZ | None: ... + +if sys.version_info >= (3, 10): + @overload + def parsedate_to_datetime(data: None) -> None: ... + @overload + def parsedate_to_datetime(data: str) -> datetime.datetime: ... + +else: + def parsedate_to_datetime(data: str) -> datetime.datetime: ... + +def mktime_tz(data: _PDTZ) -> int: ... +def formatdate(timeval: float | None = None, localtime: bool = False, usegmt: bool = False) -> str: ... +def format_datetime(dt: datetime.datetime, usegmt: bool = False) -> str: ... + +if sys.version_info >= (3, 14): + def localtime(dt: datetime.datetime | None = None) -> datetime.datetime: ... + +elif sys.version_info >= (3, 12): + @overload + def localtime(dt: datetime.datetime | None = None) -> datetime.datetime: ... + @overload + @deprecated("The `isdst` parameter does nothing and will be removed in Python 3.14.") + def localtime(dt: datetime.datetime | None = None, isdst: Unused = None) -> datetime.datetime: ... + +else: + def localtime(dt: datetime.datetime | None = None, isdst: int = -1) -> datetime.datetime: ... + +def make_msgid(idstring: str | None = None, domain: str | None = None) -> str: ... +def decode_rfc2231(s: str) -> tuple[str | None, str | None, str]: ... # May return list[str]. See issue #10431 for details. +def encode_rfc2231(s: str, charset: str | None = None, language: str | None = None) -> str: ... +def collapse_rfc2231_value(value: _ParamType, errors: str = "replace", fallback_charset: str = "us-ascii") -> str: ... +def decode_params(params: list[tuple[str, str]]) -> list[tuple[str, _ParamType]]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/__init__.pyi new file mode 100644 index 0000000..61f86d2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/__init__.pyi @@ -0,0 +1,13 @@ +import sys +from codecs import CodecInfo + +class CodecRegistryError(LookupError, SystemError): ... + +def normalize_encoding(encoding: str | bytes) -> str: ... +def search_function(encoding: str) -> CodecInfo | None: ... + +if sys.version_info >= (3, 14) and sys.platform == "win32": + def win32_code_page_search_function(encoding: str) -> CodecInfo | None: ... + +# Needed for submodules +def __getattr__(name: str): ... # incomplete module diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/aliases.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/aliases.pyi new file mode 100644 index 0000000..079af85 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/aliases.pyi @@ -0,0 +1 @@ +aliases: dict[str, str] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/ascii.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/ascii.pyi new file mode 100644 index 0000000..a85585a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/ascii.pyi @@ -0,0 +1,30 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + # At runtime, this is codecs.ascii_encode + @staticmethod + def encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... + # At runtime, this is codecs.ascii_decode + @staticmethod + def decode(data: ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +# Note: encode being a decode function and decode being an encode function is accurate to runtime. +class StreamConverter(StreamWriter, StreamReader): # type: ignore[misc] # incompatible methods in base classes + # At runtime, this is codecs.ascii_decode + @staticmethod + def encode(data: ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ... # type: ignore[override] + # At runtime, this is codecs.ascii_encode + @staticmethod + def decode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... # type: ignore[override] + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/base64_codec.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/base64_codec.pyi new file mode 100644 index 0000000..0c4f1cb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/base64_codec.pyi @@ -0,0 +1,26 @@ +import codecs +from _typeshed import ReadableBuffer +from typing import ClassVar + +# This codec is bytes to bytes. + +def base64_encode(input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... +def base64_decode(input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... + +class Codec(codecs.Codec): + def encode(self, input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... # type: ignore[override] + def decode(self, input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... # type: ignore[override] + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: ReadableBuffer, final: bool = False) -> bytes: ... # type: ignore[override] + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> bytes: ... # type: ignore[override] + +class StreamWriter(Codec, codecs.StreamWriter): + charbuffertype: ClassVar[type] = ... + +class StreamReader(Codec, codecs.StreamReader): + charbuffertype: ClassVar[type] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/big5.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/big5.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/big5.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/big5hkscs.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/big5hkscs.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/big5hkscs.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/bz2_codec.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/bz2_codec.pyi new file mode 100644 index 0000000..468346a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/bz2_codec.pyi @@ -0,0 +1,26 @@ +import codecs +from _typeshed import ReadableBuffer +from typing import ClassVar + +# This codec is bytes to bytes. + +def bz2_encode(input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... +def bz2_decode(input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... + +class Codec(codecs.Codec): + def encode(self, input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... # type: ignore[override] + def decode(self, input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... # type: ignore[override] + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: ReadableBuffer, final: bool = False) -> bytes: ... # type: ignore[override] + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> bytes: ... # type: ignore[override] + +class StreamWriter(Codec, codecs.StreamWriter): + charbuffertype: ClassVar[type] = ... + +class StreamReader(Codec, codecs.StreamReader): + charbuffertype: ClassVar[type] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/charmap.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/charmap.pyi new file mode 100644 index 0000000..a971a15 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/charmap.pyi @@ -0,0 +1,33 @@ +import codecs +from _codecs import _CharMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + # At runtime, this is codecs.charmap_encode + @staticmethod + def encode(str: str, errors: str | None = None, mapping: _CharMap | None = None, /) -> tuple[bytes, int]: ... + # At runtime, this is codecs.charmap_decode + @staticmethod + def decode(data: ReadableBuffer, errors: str | None = None, mapping: _CharMap | None = None, /) -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + mapping: _CharMap | None + def __init__(self, errors: str = "strict", mapping: _CharMap | None = None) -> None: ... + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + mapping: _CharMap | None + def __init__(self, errors: str = "strict", mapping: _CharMap | None = None) -> None: ... + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): + mapping: _CharMap | None + def __init__(self, stream: codecs._WritableStream, errors: str = "strict", mapping: _CharMap | None = None) -> None: ... + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... # type: ignore[override] + +class StreamReader(Codec, codecs.StreamReader): + mapping: _CharMap | None + def __init__(self, stream: codecs._ReadableStream, errors: str = "strict", mapping: _CharMap | None = None) -> None: ... + def decode(self, input: ReadableBuffer, errors: str = "strict") -> tuple[str, int]: ... # type: ignore[override] + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp037.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp037.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp037.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1006.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1006.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1006.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1026.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1026.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1026.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1125.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1125.pyi new file mode 100644 index 0000000..42781b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1125.pyi @@ -0,0 +1,21 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_map: dict[int, int | None] +decoding_table: str +encoding_map: dict[int, int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1140.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1140.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1140.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1250.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1250.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1250.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1251.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1251.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1251.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1252.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1252.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1252.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1253.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1253.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1253.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1254.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1254.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1254.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1255.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1255.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1255.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1256.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1256.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1256.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1257.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1257.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1257.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1258.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1258.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp1258.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp273.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp273.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp273.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp424.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp424.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp424.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp437.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp437.pyi new file mode 100644 index 0000000..42781b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp437.pyi @@ -0,0 +1,21 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_map: dict[int, int | None] +decoding_table: str +encoding_map: dict[int, int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp500.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp500.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp500.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp720.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp720.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp720.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp737.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp737.pyi new file mode 100644 index 0000000..42781b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp737.pyi @@ -0,0 +1,21 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_map: dict[int, int | None] +decoding_table: str +encoding_map: dict[int, int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp775.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp775.pyi new file mode 100644 index 0000000..42781b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp775.pyi @@ -0,0 +1,21 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_map: dict[int, int | None] +decoding_table: str +encoding_map: dict[int, int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp850.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp850.pyi new file mode 100644 index 0000000..42781b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp850.pyi @@ -0,0 +1,21 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_map: dict[int, int | None] +decoding_table: str +encoding_map: dict[int, int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp852.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp852.pyi new file mode 100644 index 0000000..42781b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp852.pyi @@ -0,0 +1,21 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_map: dict[int, int | None] +decoding_table: str +encoding_map: dict[int, int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp855.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp855.pyi new file mode 100644 index 0000000..42781b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp855.pyi @@ -0,0 +1,21 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_map: dict[int, int | None] +decoding_table: str +encoding_map: dict[int, int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp856.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp856.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp856.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp857.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp857.pyi new file mode 100644 index 0000000..42781b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp857.pyi @@ -0,0 +1,21 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_map: dict[int, int | None] +decoding_table: str +encoding_map: dict[int, int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp858.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp858.pyi new file mode 100644 index 0000000..42781b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp858.pyi @@ -0,0 +1,21 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_map: dict[int, int | None] +decoding_table: str +encoding_map: dict[int, int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp860.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp860.pyi new file mode 100644 index 0000000..42781b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp860.pyi @@ -0,0 +1,21 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_map: dict[int, int | None] +decoding_table: str +encoding_map: dict[int, int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp861.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp861.pyi new file mode 100644 index 0000000..42781b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp861.pyi @@ -0,0 +1,21 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_map: dict[int, int | None] +decoding_table: str +encoding_map: dict[int, int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp862.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp862.pyi new file mode 100644 index 0000000..42781b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp862.pyi @@ -0,0 +1,21 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_map: dict[int, int | None] +decoding_table: str +encoding_map: dict[int, int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp863.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp863.pyi new file mode 100644 index 0000000..42781b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp863.pyi @@ -0,0 +1,21 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_map: dict[int, int | None] +decoding_table: str +encoding_map: dict[int, int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp864.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp864.pyi new file mode 100644 index 0000000..42781b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp864.pyi @@ -0,0 +1,21 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_map: dict[int, int | None] +decoding_table: str +encoding_map: dict[int, int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp865.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp865.pyi new file mode 100644 index 0000000..42781b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp865.pyi @@ -0,0 +1,21 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_map: dict[int, int | None] +decoding_table: str +encoding_map: dict[int, int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp866.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp866.pyi new file mode 100644 index 0000000..42781b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp866.pyi @@ -0,0 +1,21 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_map: dict[int, int | None] +decoding_table: str +encoding_map: dict[int, int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp869.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp869.pyi new file mode 100644 index 0000000..42781b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp869.pyi @@ -0,0 +1,21 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_map: dict[int, int | None] +decoding_table: str +encoding_map: dict[int, int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp874.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp874.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp874.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp875.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp875.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp875.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp932.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp932.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp932.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp949.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp949.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp949.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp950.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp950.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/cp950.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/euc_jis_2004.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/euc_jis_2004.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/euc_jis_2004.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/euc_jisx0213.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/euc_jisx0213.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/euc_jisx0213.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/euc_jp.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/euc_jp.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/euc_jp.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/euc_kr.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/euc_kr.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/euc_kr.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/gb18030.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/gb18030.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/gb18030.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/gb2312.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/gb2312.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/gb2312.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/gbk.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/gbk.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/gbk.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/hex_codec.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/hex_codec.pyi new file mode 100644 index 0000000..3fd4fe3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/hex_codec.pyi @@ -0,0 +1,26 @@ +import codecs +from _typeshed import ReadableBuffer +from typing import ClassVar + +# This codec is bytes to bytes. + +def hex_encode(input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... +def hex_decode(input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... + +class Codec(codecs.Codec): + def encode(self, input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... # type: ignore[override] + def decode(self, input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... # type: ignore[override] + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: ReadableBuffer, final: bool = False) -> bytes: ... # type: ignore[override] + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> bytes: ... # type: ignore[override] + +class StreamWriter(Codec, codecs.StreamWriter): + charbuffertype: ClassVar[type] = ... + +class StreamReader(Codec, codecs.StreamReader): + charbuffertype: ClassVar[type] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/hp_roman8.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/hp_roman8.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/hp_roman8.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/hz.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/hz.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/hz.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/idna.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/idna.pyi new file mode 100644 index 0000000..3e2c8ba --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/idna.pyi @@ -0,0 +1,26 @@ +import codecs +import re +from _typeshed import ReadableBuffer + +dots: re.Pattern[str] +ace_prefix: bytes +sace_prefix: str + +def nameprep(label: str) -> str: ... +def ToASCII(label: str) -> bytes: ... +def ToUnicode(label: bytes | str) -> str: ... + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: ReadableBuffer | str, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, input: str, errors: str, final: bool) -> tuple[bytes, int]: ... + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, input: ReadableBuffer | str, errors: str, final: bool) -> tuple[str, int]: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso2022_jp.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso2022_jp.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso2022_jp.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso2022_jp_1.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso2022_jp_1.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso2022_jp_1.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso2022_jp_2.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso2022_jp_2.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso2022_jp_2.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso2022_jp_2004.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso2022_jp_2004.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso2022_jp_2004.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso2022_jp_3.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso2022_jp_3.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso2022_jp_3.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso2022_jp_ext.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso2022_jp_ext.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso2022_jp_ext.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso2022_kr.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso2022_kr.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso2022_kr.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_1.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_1.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_1.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_10.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_10.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_10.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_11.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_11.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_11.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_13.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_13.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_13.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_14.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_14.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_14.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_15.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_15.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_15.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_16.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_16.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_16.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_2.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_2.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_2.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_3.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_3.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_3.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_4.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_4.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_4.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_5.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_5.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_5.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_6.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_6.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_6.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_7.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_7.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_7.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_8.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_8.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_8.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_9.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_9.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/iso8859_9.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/johab.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/johab.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/johab.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/koi8_r.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/koi8_r.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/koi8_r.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/koi8_t.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/koi8_t.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/koi8_t.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/koi8_u.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/koi8_u.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/koi8_u.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/kz1048.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/kz1048.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/kz1048.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/latin_1.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/latin_1.pyi new file mode 100644 index 0000000..3b06773 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/latin_1.pyi @@ -0,0 +1,30 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + # At runtime, this is codecs.latin_1_encode + @staticmethod + def encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... + # At runtime, this is codecs.latin_1_decode + @staticmethod + def decode(data: ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +# Note: encode being a decode function and decode being an encode function is accurate to runtime. +class StreamConverter(StreamWriter, StreamReader): # type: ignore[misc] # incompatible methods in base classes + # At runtime, this is codecs.latin_1_decode + @staticmethod + def encode(data: ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ... # type: ignore[override] + # At runtime, this is codecs.latin_1_encode + @staticmethod + def decode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... # type: ignore[override] + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_arabic.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_arabic.pyi new file mode 100644 index 0000000..42781b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_arabic.pyi @@ -0,0 +1,21 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_map: dict[int, int | None] +decoding_table: str +encoding_map: dict[int, int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_croatian.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_croatian.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_croatian.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_cyrillic.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_cyrillic.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_cyrillic.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_farsi.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_farsi.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_farsi.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_greek.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_greek.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_greek.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_iceland.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_iceland.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_iceland.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_latin2.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_latin2.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_latin2.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_roman.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_roman.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_roman.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_romanian.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_romanian.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_romanian.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_turkish.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_turkish.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mac_turkish.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mbcs.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mbcs.pyi new file mode 100644 index 0000000..2c2917d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/mbcs.pyi @@ -0,0 +1,28 @@ +import codecs +import sys +from _typeshed import ReadableBuffer + +if sys.platform == "win32": + encode = codecs.mbcs_encode + + def decode(input: ReadableBuffer, errors: str | None = "strict") -> tuple[str, int]: ... + + class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + + class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + # At runtime, this is codecs.mbcs_decode + @staticmethod + def _buffer_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... + + class StreamWriter(codecs.StreamWriter): + # At runtime, this is codecs.mbcs_encode + @staticmethod + def encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... + + class StreamReader(codecs.StreamReader): + # At runtime, this is codecs.mbcs_decode + @staticmethod + def decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... + + def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/oem.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/oem.pyi new file mode 100644 index 0000000..376c12c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/oem.pyi @@ -0,0 +1,28 @@ +import codecs +import sys +from _typeshed import ReadableBuffer + +if sys.platform == "win32": + encode = codecs.oem_encode + + def decode(input: ReadableBuffer, errors: str | None = "strict") -> tuple[str, int]: ... + + class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + + class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + # At runtime, this is codecs.oem_decode + @staticmethod + def _buffer_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... + + class StreamWriter(codecs.StreamWriter): + # At runtime, this is codecs.oem_encode + @staticmethod + def encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... + + class StreamReader(codecs.StreamReader): + # At runtime, this is codecs.oem_decode + @staticmethod + def decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... + + def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/palmos.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/palmos.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/palmos.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/ptcp154.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/ptcp154.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/ptcp154.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/punycode.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/punycode.pyi new file mode 100644 index 0000000..eb99e66 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/punycode.pyi @@ -0,0 +1,33 @@ +import codecs +from typing import Literal + +def segregate(str: str) -> tuple[bytes, list[int]]: ... +def selective_len(str: str, max: int) -> int: ... +def selective_find(str: str, char: str, index: int, pos: int) -> tuple[int, int]: ... +def insertion_unsort(str: str, extended: list[int]) -> list[int]: ... +def T(j: int, bias: int) -> int: ... + +digits: Literal[b"abcdefghijklmnopqrstuvwxyz0123456789"] + +def generate_generalized_integer(N: int, bias: int) -> bytes: ... +def adapt(delta: int, first: bool, numchars: int) -> int: ... +def generate_integers(baselen: int, deltas: list[int]) -> bytes: ... +def punycode_encode(text: str) -> bytes: ... +def decode_generalized_number(extended: bytes, extpos: int, bias: int, errors: str) -> tuple[int, int | None]: ... +def insertion_sort(base: str, extended: bytes, errors: str) -> str: ... +def punycode_decode(text: memoryview | bytes | bytearray | str, errors: str) -> str: ... + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: memoryview | bytes | bytearray | str, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: memoryview | bytes | bytearray | str, final: bool = False) -> str: ... # type: ignore[override] + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/quopri_codec.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/quopri_codec.pyi new file mode 100644 index 0000000..e9deadd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/quopri_codec.pyi @@ -0,0 +1,26 @@ +import codecs +from _typeshed import ReadableBuffer +from typing import ClassVar + +# This codec is bytes to bytes. + +def quopri_encode(input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... +def quopri_decode(input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... + +class Codec(codecs.Codec): + def encode(self, input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... # type: ignore[override] + def decode(self, input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... # type: ignore[override] + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: ReadableBuffer, final: bool = False) -> bytes: ... # type: ignore[override] + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> bytes: ... # type: ignore[override] + +class StreamWriter(Codec, codecs.StreamWriter): + charbuffertype: ClassVar[type] = ... + +class StreamReader(Codec, codecs.StreamReader): + charbuffertype: ClassVar[type] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/raw_unicode_escape.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/raw_unicode_escape.pyi new file mode 100644 index 0000000..2887739 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/raw_unicode_escape.pyi @@ -0,0 +1,23 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + # At runtime, this is codecs.raw_unicode_escape_encode + @staticmethod + def encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... + # At runtime, this is codecs.raw_unicode_escape_decode + @staticmethod + def decode(data: str | ReadableBuffer, errors: str | None = None, final: bool = True, /) -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, input: str | ReadableBuffer, errors: str | None, final: bool) -> tuple[str, int]: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... + +class StreamReader(Codec, codecs.StreamReader): + def decode(self, input: str | ReadableBuffer, errors: str = "strict") -> tuple[str, int]: ... # type: ignore[override] + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/rot_13.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/rot_13.pyi new file mode 100644 index 0000000..8d71bc9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/rot_13.pyi @@ -0,0 +1,23 @@ +import codecs +from _typeshed import SupportsRead, SupportsWrite + +# This codec is string to string. + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[str, int]: ... # type: ignore[override] + def decode(self, input: str, errors: str = "strict") -> tuple[str, int]: ... # type: ignore[override] + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> str: ... # type: ignore[override] + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: str, final: bool = False) -> str: ... # type: ignore[override] + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +rot13_map: dict[int, int] + +def rot13(infile: SupportsRead[str], outfile: SupportsWrite[str]) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/shift_jis.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/shift_jis.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/shift_jis.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/shift_jis_2004.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/shift_jis_2004.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/shift_jis_2004.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/shift_jisx0213.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/shift_jisx0213.pyi new file mode 100644 index 0000000..d613026 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/shift_jisx0213.pyi @@ -0,0 +1,23 @@ +import _multibytecodec as mbc +import codecs +from typing import ClassVar + +codec: mbc._MultibyteCodec + +class Codec(codecs.Codec): + encode = codec.encode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + decode = codec.decode # type: ignore[assignment] # pyright: ignore[reportAssignmentType] + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): # type: ignore[misc] + codec: ClassVar[mbc._MultibyteCodec] = ... + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec: ClassVar[mbc._MultibyteCodec] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/tis_620.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/tis_620.pyi new file mode 100644 index 0000000..f621956 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/tis_620.pyi @@ -0,0 +1,21 @@ +import codecs +from _codecs import _EncodingMap +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... + +decoding_table: str +encoding_table: _EncodingMap diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/undefined.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/undefined.pyi new file mode 100644 index 0000000..4775dac --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/undefined.pyi @@ -0,0 +1,20 @@ +import codecs +from _typeshed import ReadableBuffer + +# These return types are just to match the base types. In reality, these always +# raise an error. + +class Codec(codecs.Codec): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: ReadableBuffer, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/unicode_escape.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/unicode_escape.pyi new file mode 100644 index 0000000..ceaa39a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/unicode_escape.pyi @@ -0,0 +1,23 @@ +import codecs +from _typeshed import ReadableBuffer + +class Codec(codecs.Codec): + # At runtime, this is codecs.unicode_escape_encode + @staticmethod + def encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... + # At runtime, this is codecs.unicode_escape_decode + @staticmethod + def decode(data: str | ReadableBuffer, errors: str | None = None, final: bool = True, /) -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, input: str | ReadableBuffer, errors: str | None, final: bool) -> tuple[str, int]: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... + +class StreamReader(Codec, codecs.StreamReader): + def decode(self, input: str | ReadableBuffer, errors: str = "strict") -> tuple[str, int]: ... # type: ignore[override] + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_16.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_16.pyi new file mode 100644 index 0000000..3b712cd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_16.pyi @@ -0,0 +1,20 @@ +import codecs +from _typeshed import ReadableBuffer + +encode = codecs.utf_16_encode + +def decode(input: ReadableBuffer, errors: str | None = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, input: ReadableBuffer, errors: str, final: bool) -> tuple[str, int]: ... + +class StreamWriter(codecs.StreamWriter): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + +class StreamReader(codecs.StreamReader): + def decode(self, input: ReadableBuffer, errors: str = "strict") -> tuple[str, int]: ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_16_be.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_16_be.pyi new file mode 100644 index 0000000..cc7d153 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_16_be.pyi @@ -0,0 +1,26 @@ +import codecs +from _typeshed import ReadableBuffer + +encode = codecs.utf_16_be_encode + +def decode(input: ReadableBuffer, errors: str | None = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + # At runtime, this is codecs.utf_16_be_decode + @staticmethod + def _buffer_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... + +class StreamWriter(codecs.StreamWriter): + # At runtime, this is codecs.utf_16_be_encode + @staticmethod + def encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... + +class StreamReader(codecs.StreamReader): + # At runtime, this is codecs.utf_16_be_decode + @staticmethod + def decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_16_le.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_16_le.pyi new file mode 100644 index 0000000..ba103eb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_16_le.pyi @@ -0,0 +1,26 @@ +import codecs +from _typeshed import ReadableBuffer + +encode = codecs.utf_16_le_encode + +def decode(input: ReadableBuffer, errors: str | None = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + # At runtime, this is codecs.utf_16_le_decode + @staticmethod + def _buffer_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... + +class StreamWriter(codecs.StreamWriter): + # At runtime, this is codecs.utf_16_le_encode + @staticmethod + def encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... + +class StreamReader(codecs.StreamReader): + # At runtime, this is codecs.utf_16_le_decode + @staticmethod + def decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_32.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_32.pyi new file mode 100644 index 0000000..c925be7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_32.pyi @@ -0,0 +1,20 @@ +import codecs +from _typeshed import ReadableBuffer + +encode = codecs.utf_32_encode + +def decode(input: ReadableBuffer, errors: str | None = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, input: ReadableBuffer, errors: str, final: bool) -> tuple[str, int]: ... + +class StreamWriter(codecs.StreamWriter): + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + +class StreamReader(codecs.StreamReader): + def decode(self, input: ReadableBuffer, errors: str = "strict") -> tuple[str, int]: ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_32_be.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_32_be.pyi new file mode 100644 index 0000000..9d28f51 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_32_be.pyi @@ -0,0 +1,26 @@ +import codecs +from _typeshed import ReadableBuffer + +encode = codecs.utf_32_be_encode + +def decode(input: ReadableBuffer, errors: str | None = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + # At runtime, this is codecs.utf_32_be_decode + @staticmethod + def _buffer_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... + +class StreamWriter(codecs.StreamWriter): + # At runtime, this is codecs.utf_32_be_encode + @staticmethod + def encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... + +class StreamReader(codecs.StreamReader): + # At runtime, this is codecs.utf_32_be_decode + @staticmethod + def decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_32_le.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_32_le.pyi new file mode 100644 index 0000000..5be14a9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_32_le.pyi @@ -0,0 +1,26 @@ +import codecs +from _typeshed import ReadableBuffer + +encode = codecs.utf_32_le_encode + +def decode(input: ReadableBuffer, errors: str | None = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + # At runtime, this is codecs.utf_32_le_decode + @staticmethod + def _buffer_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... + +class StreamWriter(codecs.StreamWriter): + # At runtime, this is codecs.utf_32_le_encode + @staticmethod + def encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... + +class StreamReader(codecs.StreamReader): + # At runtime, this is codecs.utf_32_le_decode + @staticmethod + def decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_7.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_7.pyi new file mode 100644 index 0000000..dc1162f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_7.pyi @@ -0,0 +1,26 @@ +import codecs +from _typeshed import ReadableBuffer + +encode = codecs.utf_7_encode + +def decode(input: ReadableBuffer, errors: str | None = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + # At runtime, this is codecs.utf_7_decode + @staticmethod + def _buffer_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... + +class StreamWriter(codecs.StreamWriter): + # At runtime, this is codecs.utf_7_encode + @staticmethod + def encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... + +class StreamReader(codecs.StreamReader): + # At runtime, this is codecs.utf_7_decode + @staticmethod + def decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_8.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_8.pyi new file mode 100644 index 0000000..918712d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_8.pyi @@ -0,0 +1,26 @@ +import codecs +from _typeshed import ReadableBuffer + +encode = codecs.utf_8_encode + +def decode(input: ReadableBuffer, errors: str | None = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + # At runtime, this is codecs.utf_8_decode + @staticmethod + def _buffer_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... + +class StreamWriter(codecs.StreamWriter): + # At runtime, this is codecs.utf_8_encode + @staticmethod + def encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... + +class StreamReader(codecs.StreamReader): + # At runtime, this is codecs.utf_8_decode + @staticmethod + def decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_8_sig.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_8_sig.pyi new file mode 100644 index 0000000..af69217 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/utf_8_sig.pyi @@ -0,0 +1,22 @@ +import codecs +from _typeshed import ReadableBuffer + +class IncrementalEncoder(codecs.IncrementalEncoder): + def __init__(self, errors: str = "strict") -> None: ... + def encode(self, input: str, final: bool = False) -> bytes: ... + def getstate(self) -> int: ... + def setstate(self, state: int) -> None: ... # type: ignore[override] + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def __init__(self, errors: str = "strict") -> None: ... + def _buffer_decode(self, input: ReadableBuffer, errors: str | None, final: bool) -> tuple[str, int]: ... + +class StreamWriter(codecs.StreamWriter): + def encode(self, input: str, errors: str | None = "strict") -> tuple[bytes, int]: ... + +class StreamReader(codecs.StreamReader): + def decode(self, input: ReadableBuffer, errors: str | None = "strict") -> tuple[str, int]: ... + +def getregentry() -> codecs.CodecInfo: ... +def encode(input: str, errors: str | None = "strict") -> tuple[bytes, int]: ... +def decode(input: ReadableBuffer, errors: str | None = "strict") -> tuple[str, int]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/uu_codec.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/uu_codec.pyi new file mode 100644 index 0000000..e32ba8a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/uu_codec.pyi @@ -0,0 +1,28 @@ +import codecs +from _typeshed import ReadableBuffer +from typing import ClassVar + +# This codec is bytes to bytes. + +def uu_encode( + input: ReadableBuffer, errors: str = "strict", filename: str = "", mode: int = 0o666 +) -> tuple[bytes, int]: ... +def uu_decode(input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... + +class Codec(codecs.Codec): + def encode(self, input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... # type: ignore[override] + def decode(self, input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... # type: ignore[override] + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: ReadableBuffer, final: bool = False) -> bytes: ... # type: ignore[override] + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> bytes: ... # type: ignore[override] + +class StreamWriter(Codec, codecs.StreamWriter): + charbuffertype: ClassVar[type] = ... + +class StreamReader(Codec, codecs.StreamReader): + charbuffertype: ClassVar[type] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/zlib_codec.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/zlib_codec.pyi new file mode 100644 index 0000000..0f13d0e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/encodings/zlib_codec.pyi @@ -0,0 +1,26 @@ +import codecs +from _typeshed import ReadableBuffer +from typing import ClassVar + +# This codec is bytes to bytes. + +def zlib_encode(input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... +def zlib_decode(input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... + +class Codec(codecs.Codec): + def encode(self, input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... # type: ignore[override] + def decode(self, input: ReadableBuffer, errors: str = "strict") -> tuple[bytes, int]: ... # type: ignore[override] + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: ReadableBuffer, final: bool = False) -> bytes: ... # type: ignore[override] + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input: ReadableBuffer, final: bool = False) -> bytes: ... # type: ignore[override] + +class StreamWriter(Codec, codecs.StreamWriter): + charbuffertype: ClassVar[type] = ... + +class StreamReader(Codec, codecs.StreamReader): + charbuffertype: ClassVar[type] = ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ensurepip/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ensurepip/__init__.pyi new file mode 100644 index 0000000..332fb18 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ensurepip/__init__.pyi @@ -0,0 +1,12 @@ +__all__ = ["version", "bootstrap"] + +def version() -> str: ... +def bootstrap( + *, + root: str | None = None, + upgrade: bool = False, + user: bool = False, + altinstall: bool = False, + default_pip: bool = False, + verbosity: int = 0, +) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/enum.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/enum.pyi new file mode 100644 index 0000000..c131c93 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/enum.pyi @@ -0,0 +1,371 @@ +import _typeshed +import sys +import types +from _typeshed import SupportsKeysAndGetItem, Unused +from builtins import property as _builtins_property +from collections.abc import Callable, Iterable, Iterator, Mapping +from typing import Any, Final, Generic, Literal, TypeVar, overload +from typing_extensions import Self, TypeAlias, disjoint_base + +__all__ = ["EnumMeta", "Enum", "IntEnum", "Flag", "IntFlag", "auto", "unique"] + +if sys.version_info >= (3, 11): + __all__ += [ + "CONFORM", + "CONTINUOUS", + "EJECT", + "EnumCheck", + "EnumType", + "FlagBoundary", + "KEEP", + "NAMED_FLAGS", + "ReprEnum", + "STRICT", + "StrEnum", + "UNIQUE", + "global_enum", + "global_enum_repr", + "global_flag_repr", + "global_str", + "member", + "nonmember", + "property", + "verify", + "pickle_by_enum_name", + "pickle_by_global_name", + ] + +if sys.version_info >= (3, 13): + __all__ += ["EnumDict"] + +_EnumMemberT = TypeVar("_EnumMemberT") +_EnumerationT = TypeVar("_EnumerationT", bound=type[Enum]) + +# The following all work: +# >>> from enum import Enum +# >>> from string import ascii_lowercase +# >>> Enum('Foo', names='RED YELLOW GREEN') +# +# >>> Enum('Foo', names=[('RED', 1), ('YELLOW, 2)]) +# +# >>> Enum('Foo', names=((x for x in (ascii_lowercase[i], i)) for i in range(5))) +# +# >>> Enum('Foo', names={'RED': 1, 'YELLOW': 2}) +# +_EnumNames: TypeAlias = str | Iterable[str] | Iterable[Iterable[str | Any]] | Mapping[str, Any] +_Signature: TypeAlias = Any # TODO: Unable to import Signature from inspect module + +if sys.version_info >= (3, 11): + class nonmember(Generic[_EnumMemberT]): + value: _EnumMemberT + def __init__(self, value: _EnumMemberT) -> None: ... + + class member(Generic[_EnumMemberT]): + value: _EnumMemberT + def __init__(self, value: _EnumMemberT) -> None: ... + +class _EnumDict(dict[str, Any]): + if sys.version_info >= (3, 13): + def __init__(self, cls_name: str | None = None) -> None: ... + else: + def __init__(self) -> None: ... + + def __setitem__(self, key: str, value: Any) -> None: ... + if sys.version_info >= (3, 11): + # See comment above `typing.MutableMapping.update` + # for why overloads are preferable to a Union here + # + # Unlike with MutableMapping.update(), the first argument is required, + # hence the type: ignore + @overload # type: ignore[override] + def update(self, members: SupportsKeysAndGetItem[str, Any], **more_members: Any) -> None: ... + @overload + def update(self, members: Iterable[tuple[str, Any]], **more_members: Any) -> None: ... + if sys.version_info >= (3, 13): + @property + def member_names(self) -> list[str]: ... + +if sys.version_info >= (3, 13): + EnumDict = _EnumDict + +# Structurally: Iterable[T], Reversible[T], Container[T] where T is the enum itself +class EnumMeta(type): + if sys.version_info >= (3, 11): + def __new__( + metacls: type[_typeshed.Self], + cls: str, + bases: tuple[type, ...], + classdict: _EnumDict, + *, + boundary: FlagBoundary | None = None, + _simple: bool = False, + **kwds: Any, + ) -> _typeshed.Self: ... + else: + def __new__( + metacls: type[_typeshed.Self], cls: str, bases: tuple[type, ...], classdict: _EnumDict, **kwds: Any + ) -> _typeshed.Self: ... + + @classmethod + def __prepare__(metacls, cls: str, bases: tuple[type, ...], **kwds: Any) -> _EnumDict: ... # type: ignore[override] + def __iter__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: ... + def __reversed__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: ... + if sys.version_info >= (3, 12): + def __contains__(self: type[Any], value: object) -> bool: ... + elif sys.version_info >= (3, 11): + def __contains__(self: type[Any], member: object) -> bool: ... + elif sys.version_info >= (3, 10): + def __contains__(self: type[Any], obj: object) -> bool: ... + else: + def __contains__(self: type[Any], member: object) -> bool: ... + + def __getitem__(self: type[_EnumMemberT], name: str) -> _EnumMemberT: ... + @_builtins_property + def __members__(self: type[_EnumMemberT]) -> types.MappingProxyType[str, _EnumMemberT]: ... + def __len__(self) -> int: ... + def __bool__(self) -> Literal[True]: ... + def __dir__(self) -> list[str]: ... + + # Overload 1: Value lookup on an already existing enum class (simple case) + @overload + def __call__(cls: type[_EnumMemberT], value: Any, names: None = None) -> _EnumMemberT: ... + + # Overload 2: Functional API for constructing new enum classes. + if sys.version_info >= (3, 11): + @overload + def __call__( + cls, + value: str, + names: _EnumNames, + *, + module: str | None = None, + qualname: str | None = None, + type: type | None = None, + start: int = 1, + boundary: FlagBoundary | None = None, + ) -> type[Enum]: ... + else: + @overload + def __call__( + cls, + value: str, + names: _EnumNames, + *, + module: str | None = None, + qualname: str | None = None, + type: type | None = None, + start: int = 1, + ) -> type[Enum]: ... + + # Overload 3 (py312+ only): Value lookup on an already existing enum class (complex case) + # + # >>> class Foo(enum.Enum): + # ... X = 1, 2, 3 + # >>> Foo(1, 2, 3) + # + # + if sys.version_info >= (3, 12): + @overload + def __call__(cls: type[_EnumMemberT], value: Any, *values: Any) -> _EnumMemberT: ... + if sys.version_info >= (3, 14): + @property + def __signature__(cls) -> _Signature: ... + + _member_names_: list[str] # undocumented + _member_map_: dict[str, Enum] # undocumented + _value2member_map_: dict[Any, Enum] # undocumented + +if sys.version_info >= (3, 11): + # In 3.11 `EnumMeta` metaclass is renamed to `EnumType`, but old name also exists. + EnumType = EnumMeta + + class property(types.DynamicClassAttribute): + def __set_name__(self, ownerclass: type[Enum], name: str) -> None: ... + name: str + clsname: str + member: Enum | None + + _magic_enum_attr = property +else: + _magic_enum_attr = types.DynamicClassAttribute + +class Enum(metaclass=EnumMeta): + @_magic_enum_attr + def name(self) -> str: ... + @_magic_enum_attr + def value(self) -> Any: ... + _name_: str + _value_: Any + _ignore_: str | list[str] + _order_: str + __order__: str + @classmethod + def _missing_(cls, value: object) -> Any: ... + @staticmethod + def _generate_next_value_(name: str, start: int, count: int, last_values: list[Any]) -> Any: ... + # It's not true that `__new__` will accept any argument type, + # so ideally we'd use `Any` to indicate that the argument type is inexpressible. + # However, using `Any` causes too many false-positives for those using mypy's `--disallow-any-expr` + # (see #7752, #2539, mypy/#5788), + # and in practice using `object` here has the same effect as using `Any`. + def __new__(cls, value: object) -> Self: ... + def __dir__(self) -> list[str]: ... + def __hash__(self) -> int: ... + def __format__(self, format_spec: str) -> str: ... + def __reduce_ex__(self, proto: Unused) -> tuple[Any, ...]: ... + if sys.version_info >= (3, 11): + def __copy__(self) -> Self: ... + def __deepcopy__(self, memo: Any) -> Self: ... + if sys.version_info >= (3, 12) and sys.version_info < (3, 14): + @classmethod + def __signature__(cls) -> str: ... + if sys.version_info >= (3, 13): + # Value may be any type, even in special enums. Enabling Enum parsing from + # multiple value types + def _add_value_alias_(self, value: Any) -> None: ... + def _add_alias_(self, name: str) -> None: ... + +if sys.version_info >= (3, 11): + class ReprEnum(Enum): ... + +if sys.version_info >= (3, 12): + class IntEnum(int, ReprEnum): + _value_: int + @_magic_enum_attr + def value(self) -> int: ... + def __new__(cls, value: int) -> Self: ... + +else: + if sys.version_info >= (3, 11): + _IntEnumBase = ReprEnum + else: + _IntEnumBase = Enum + + @disjoint_base + class IntEnum(int, _IntEnumBase): + _value_: int + @_magic_enum_attr + def value(self) -> int: ... + def __new__(cls, value: int) -> Self: ... + +def unique(enumeration: _EnumerationT) -> _EnumerationT: ... + +_auto_null: Any + +class Flag(Enum): + _name_: str | None # type: ignore[assignment] + _value_: int + @_magic_enum_attr + def name(self) -> str | None: ... # type: ignore[override] + @_magic_enum_attr + def value(self) -> int: ... + def __contains__(self, other: Self) -> bool: ... + def __bool__(self) -> bool: ... + def __or__(self, other: Self) -> Self: ... + def __and__(self, other: Self) -> Self: ... + def __xor__(self, other: Self) -> Self: ... + def __invert__(self) -> Self: ... + if sys.version_info >= (3, 11): + def __iter__(self) -> Iterator[Self]: ... + def __len__(self) -> int: ... + __ror__ = __or__ + __rand__ = __and__ + __rxor__ = __xor__ + +if sys.version_info >= (3, 11): + class StrEnum(str, ReprEnum): + def __new__(cls, value: str) -> Self: ... + _value_: str + @_magic_enum_attr + def value(self) -> str: ... + @staticmethod + def _generate_next_value_(name: str, start: int, count: int, last_values: list[str]) -> str: ... + + class EnumCheck(StrEnum): + CONTINUOUS = "no skipped integer values" + NAMED_FLAGS = "multi-flag aliases may not contain unnamed flags" + UNIQUE = "one name per value" + + CONTINUOUS: Final = EnumCheck.CONTINUOUS + NAMED_FLAGS: Final = EnumCheck.NAMED_FLAGS + UNIQUE: Final = EnumCheck.UNIQUE + + class verify: + def __init__(self, *checks: EnumCheck) -> None: ... + def __call__(self, enumeration: _EnumerationT) -> _EnumerationT: ... + + class FlagBoundary(StrEnum): + STRICT = "strict" + CONFORM = "conform" + EJECT = "eject" + KEEP = "keep" + + STRICT: Final = FlagBoundary.STRICT + CONFORM: Final = FlagBoundary.CONFORM + EJECT: Final = FlagBoundary.EJECT + KEEP: Final = FlagBoundary.KEEP + + def global_str(self: Enum) -> str: ... + def global_enum(cls: _EnumerationT, update_str: bool = False) -> _EnumerationT: ... + def global_enum_repr(self: Enum) -> str: ... + def global_flag_repr(self: Flag) -> str: ... + def show_flag_values(value: int) -> list[int]: ... + +if sys.version_info >= (3, 12): + # The body of the class is the same, but the base classes are different. + class IntFlag(int, ReprEnum, Flag, boundary=KEEP): # type: ignore[misc] # complaints about incompatible bases + def __new__(cls, value: int) -> Self: ... + def __or__(self, other: int) -> Self: ... + def __and__(self, other: int) -> Self: ... + def __xor__(self, other: int) -> Self: ... + def __invert__(self) -> Self: ... + __ror__ = __or__ + __rand__ = __and__ + __rxor__ = __xor__ + +elif sys.version_info >= (3, 11): + # The body of the class is the same, but the base classes are different. + @disjoint_base + class IntFlag(int, ReprEnum, Flag, boundary=KEEP): # type: ignore[misc] # complaints about incompatible bases + def __new__(cls, value: int) -> Self: ... + def __or__(self, other: int) -> Self: ... + def __and__(self, other: int) -> Self: ... + def __xor__(self, other: int) -> Self: ... + def __invert__(self) -> Self: ... + __ror__ = __or__ + __rand__ = __and__ + __rxor__ = __xor__ + +else: + @disjoint_base + class IntFlag(int, Flag): # type: ignore[misc] # complaints about incompatible bases + def __new__(cls, value: int) -> Self: ... + def __or__(self, other: int) -> Self: ... + def __and__(self, other: int) -> Self: ... + def __xor__(self, other: int) -> Self: ... + def __invert__(self) -> Self: ... + __ror__ = __or__ + __rand__ = __and__ + __rxor__ = __xor__ + +class auto: + _value_: Any + @_magic_enum_attr + def value(self) -> Any: ... + def __new__(cls) -> Self: ... + + # These don't exist, but auto is basically immediately replaced with + # either an int or a str depending on the type of the enum. StrEnum's auto + # shouldn't have these, but they're needed for int versions of auto (mostly the __or__). + # Ideally type checkers would special case auto enough to handle this, + # but until then this is a slightly inaccurate helping hand. + def __or__(self, other: int | Self) -> Self: ... + def __and__(self, other: int | Self) -> Self: ... + def __xor__(self, other: int | Self) -> Self: ... + __ror__ = __or__ + __rand__ = __and__ + __rxor__ = __xor__ + +if sys.version_info >= (3, 11): + def pickle_by_global_name(self: Enum, proto: int) -> str: ... + def pickle_by_enum_name(self: _EnumMemberT, proto: int) -> tuple[Callable[..., Any], tuple[type[_EnumMemberT], str]]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/errno.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/errno.pyi new file mode 100644 index 0000000..e025e1f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/errno.pyi @@ -0,0 +1,227 @@ +import sys +from collections.abc import Mapping +from typing import Final + +errorcode: Mapping[int, str] + +EPERM: Final[int] +ENOENT: Final[int] +ESRCH: Final[int] +EINTR: Final[int] +EIO: Final[int] +ENXIO: Final[int] +E2BIG: Final[int] +ENOEXEC: Final[int] +EBADF: Final[int] +ECHILD: Final[int] +EAGAIN: Final[int] +ENOMEM: Final[int] +EACCES: Final[int] +EFAULT: Final[int] +EBUSY: Final[int] +EEXIST: Final[int] +EXDEV: Final[int] +ENODEV: Final[int] +ENOTDIR: Final[int] +EISDIR: Final[int] +EINVAL: Final[int] +ENFILE: Final[int] +EMFILE: Final[int] +ENOTTY: Final[int] +ETXTBSY: Final[int] +EFBIG: Final[int] +ENOSPC: Final[int] +ESPIPE: Final[int] +EROFS: Final[int] +EMLINK: Final[int] +EPIPE: Final[int] +EDOM: Final[int] +ERANGE: Final[int] +EDEADLK: Final[int] +ENAMETOOLONG: Final[int] +ENOLCK: Final[int] +ENOSYS: Final[int] +ENOTEMPTY: Final[int] +ELOOP: Final[int] +EWOULDBLOCK: Final[int] +ENOMSG: Final[int] +EIDRM: Final[int] +ENOSTR: Final[int] +ENODATA: Final[int] +ETIME: Final[int] +ENOSR: Final[int] +EREMOTE: Final[int] +ENOLINK: Final[int] +EPROTO: Final[int] +EBADMSG: Final[int] +EOVERFLOW: Final[int] +EILSEQ: Final[int] +EUSERS: Final[int] +ENOTSOCK: Final[int] +EDESTADDRREQ: Final[int] +EMSGSIZE: Final[int] +EPROTOTYPE: Final[int] +ENOPROTOOPT: Final[int] +EPROTONOSUPPORT: Final[int] +ESOCKTNOSUPPORT: Final[int] +ENOTSUP: Final[int] +EOPNOTSUPP: Final[int] +EPFNOSUPPORT: Final[int] +EAFNOSUPPORT: Final[int] +EADDRINUSE: Final[int] +EADDRNOTAVAIL: Final[int] +ENETDOWN: Final[int] +ENETUNREACH: Final[int] +ENETRESET: Final[int] +ECONNABORTED: Final[int] +ECONNRESET: Final[int] +ENOBUFS: Final[int] +EISCONN: Final[int] +ENOTCONN: Final[int] +ESHUTDOWN: Final[int] +ETOOMANYREFS: Final[int] +ETIMEDOUT: Final[int] +ECONNREFUSED: Final[int] +EHOSTDOWN: Final[int] +EHOSTUNREACH: Final[int] +EALREADY: Final[int] +EINPROGRESS: Final[int] +ESTALE: Final[int] +EDQUOT: Final[int] +ECANCELED: Final[int] # undocumented +ENOTRECOVERABLE: Final[int] # undocumented +EOWNERDEAD: Final[int] # undocumented + +if sys.platform == "sunos5" or sys.platform == "solaris": # noqa: Y008 + ELOCKUNMAPPED: Final[int] + ENOTACTIVE: Final[int] + +if sys.platform != "win32": + ENOTBLK: Final[int] + EMULTIHOP: Final[int] + +if sys.platform == "darwin": + # All of the below are undocumented + EAUTH: Final[int] + EBADARCH: Final[int] + EBADEXEC: Final[int] + EBADMACHO: Final[int] + EBADRPC: Final[int] + EDEVERR: Final[int] + EFTYPE: Final[int] + ENEEDAUTH: Final[int] + ENOATTR: Final[int] + ENOPOLICY: Final[int] + EPROCLIM: Final[int] + EPROCUNAVAIL: Final[int] + EPROGMISMATCH: Final[int] + EPROGUNAVAIL: Final[int] + EPWROFF: Final[int] + ERPCMISMATCH: Final[int] + ESHLIBVERS: Final[int] + if sys.version_info >= (3, 11): + EQFULL: Final[int] + ENOTCAPABLE: Final[int] # available starting with 3.11.1 + +if sys.platform != "darwin": + EDEADLOCK: Final[int] + +if sys.platform != "win32" and sys.platform != "darwin": + ECHRNG: Final[int] + EL2NSYNC: Final[int] + EL3HLT: Final[int] + EL3RST: Final[int] + ELNRNG: Final[int] + EUNATCH: Final[int] + ENOCSI: Final[int] + EL2HLT: Final[int] + EBADE: Final[int] + EBADR: Final[int] + EXFULL: Final[int] + ENOANO: Final[int] + EBADRQC: Final[int] + EBADSLT: Final[int] + EBFONT: Final[int] + ENONET: Final[int] + ENOPKG: Final[int] + EADV: Final[int] + ESRMNT: Final[int] + ECOMM: Final[int] + EDOTDOT: Final[int] + ENOTUNIQ: Final[int] + EBADFD: Final[int] + EREMCHG: Final[int] + ELIBACC: Final[int] + ELIBBAD: Final[int] + ELIBSCN: Final[int] + ELIBMAX: Final[int] + ELIBEXEC: Final[int] + ERESTART: Final[int] + ESTRPIPE: Final[int] + EUCLEAN: Final[int] + ENOTNAM: Final[int] + ENAVAIL: Final[int] + EISNAM: Final[int] + EREMOTEIO: Final[int] + # All of the below are undocumented + EKEYEXPIRED: Final[int] + EKEYREJECTED: Final[int] + EKEYREVOKED: Final[int] + EMEDIUMTYPE: Final[int] + ENOKEY: Final[int] + ENOMEDIUM: Final[int] + ERFKILL: Final[int] + + if sys.version_info >= (3, 14): + EHWPOISON: Final[int] + +if sys.platform == "win32": + # All of these are undocumented + WSABASEERR: Final[int] + WSAEACCES: Final[int] + WSAEADDRINUSE: Final[int] + WSAEADDRNOTAVAIL: Final[int] + WSAEAFNOSUPPORT: Final[int] + WSAEALREADY: Final[int] + WSAEBADF: Final[int] + WSAECONNABORTED: Final[int] + WSAECONNREFUSED: Final[int] + WSAECONNRESET: Final[int] + WSAEDESTADDRREQ: Final[int] + WSAEDISCON: Final[int] + WSAEDQUOT: Final[int] + WSAEFAULT: Final[int] + WSAEHOSTDOWN: Final[int] + WSAEHOSTUNREACH: Final[int] + WSAEINPROGRESS: Final[int] + WSAEINTR: Final[int] + WSAEINVAL: Final[int] + WSAEISCONN: Final[int] + WSAELOOP: Final[int] + WSAEMFILE: Final[int] + WSAEMSGSIZE: Final[int] + WSAENAMETOOLONG: Final[int] + WSAENETDOWN: Final[int] + WSAENETRESET: Final[int] + WSAENETUNREACH: Final[int] + WSAENOBUFS: Final[int] + WSAENOPROTOOPT: Final[int] + WSAENOTCONN: Final[int] + WSAENOTEMPTY: Final[int] + WSAENOTSOCK: Final[int] + WSAEOPNOTSUPP: Final[int] + WSAEPFNOSUPPORT: Final[int] + WSAEPROCLIM: Final[int] + WSAEPROTONOSUPPORT: Final[int] + WSAEPROTOTYPE: Final[int] + WSAEREMOTE: Final[int] + WSAESHUTDOWN: Final[int] + WSAESOCKTNOSUPPORT: Final[int] + WSAESTALE: Final[int] + WSAETIMEDOUT: Final[int] + WSAETOOMANYREFS: Final[int] + WSAEUSERS: Final[int] + WSAEWOULDBLOCK: Final[int] + WSANOTINITIALISED: Final[int] + WSASYSNOTREADY: Final[int] + WSAVERNOTSUPPORTED: Final[int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/faulthandler.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/faulthandler.pyi new file mode 100644 index 0000000..33d0899 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/faulthandler.pyi @@ -0,0 +1,23 @@ +import sys +from _typeshed import FileDescriptorLike + +def cancel_dump_traceback_later() -> None: ... +def disable() -> None: ... +def dump_traceback(file: FileDescriptorLike = ..., all_threads: bool = ...) -> None: ... + +if sys.version_info >= (3, 14): + def dump_c_stack(file: FileDescriptorLike = ...) -> None: ... + +def dump_traceback_later(timeout: float, repeat: bool = ..., file: FileDescriptorLike = ..., exit: bool = ...) -> None: ... + +if sys.version_info >= (3, 14): + def enable(file: FileDescriptorLike = ..., all_threads: bool = ..., c_stack: bool = True) -> None: ... + +else: + def enable(file: FileDescriptorLike = ..., all_threads: bool = ...) -> None: ... + +def is_enabled() -> bool: ... + +if sys.platform != "win32": + def register(signum: int, file: FileDescriptorLike = ..., all_threads: bool = ..., chain: bool = ...) -> None: ... + def unregister(signum: int, /) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/fcntl.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/fcntl.pyi new file mode 100644 index 0000000..5a3e89b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/fcntl.pyi @@ -0,0 +1,158 @@ +import sys +from _typeshed import FileDescriptorLike, ReadOnlyBuffer, WriteableBuffer +from typing import Any, Final, Literal, overload +from typing_extensions import Buffer + +if sys.platform != "win32": + FASYNC: Final[int] + FD_CLOEXEC: Final[int] + F_DUPFD: Final[int] + F_DUPFD_CLOEXEC: Final[int] + F_GETFD: Final[int] + F_GETFL: Final[int] + F_GETLK: Final[int] + F_GETOWN: Final[int] + F_RDLCK: Final[int] + F_SETFD: Final[int] + F_SETFL: Final[int] + F_SETLK: Final[int] + F_SETLKW: Final[int] + F_SETOWN: Final[int] + F_UNLCK: Final[int] + F_WRLCK: Final[int] + + F_GETLEASE: Final[int] + F_SETLEASE: Final[int] + if sys.platform == "darwin": + F_FULLFSYNC: Final[int] + F_NOCACHE: Final[int] + F_GETPATH: Final[int] + if sys.platform == "linux": + F_SETLKW64: Final[int] + F_SETSIG: Final[int] + F_SHLCK: Final[int] + F_SETLK64: Final[int] + F_GETSIG: Final[int] + F_NOTIFY: Final[int] + F_EXLCK: Final[int] + F_GETLK64: Final[int] + F_ADD_SEALS: Final[int] + F_GET_SEALS: Final[int] + F_SEAL_GROW: Final[int] + F_SEAL_SEAL: Final[int] + F_SEAL_SHRINK: Final[int] + F_SEAL_WRITE: Final[int] + F_OFD_GETLK: Final[int] + F_OFD_SETLK: Final[int] + F_OFD_SETLKW: Final[int] + + if sys.version_info >= (3, 10): + F_GETPIPE_SZ: Final[int] + F_SETPIPE_SZ: Final[int] + + DN_ACCESS: Final[int] + DN_ATTRIB: Final[int] + DN_CREATE: Final[int] + DN_DELETE: Final[int] + DN_MODIFY: Final[int] + DN_MULTISHOT: Final[int] + DN_RENAME: Final[int] + + LOCK_EX: Final[int] + LOCK_NB: Final[int] + LOCK_SH: Final[int] + LOCK_UN: Final[int] + if sys.platform == "linux": + LOCK_MAND: Final[int] + LOCK_READ: Final[int] + LOCK_RW: Final[int] + LOCK_WRITE: Final[int] + + if sys.platform == "linux": + # Constants for the POSIX STREAMS interface. Present in glibc until 2.29 (released February 2019). + # Never implemented on BSD, and considered "obsolescent" starting in POSIX 2008. + # Probably still used on Solaris. + I_ATMARK: Final[int] + I_CANPUT: Final[int] + I_CKBAND: Final[int] + I_FDINSERT: Final[int] + I_FIND: Final[int] + I_FLUSH: Final[int] + I_FLUSHBAND: Final[int] + I_GETBAND: Final[int] + I_GETCLTIME: Final[int] + I_GETSIG: Final[int] + I_GRDOPT: Final[int] + I_GWROPT: Final[int] + I_LINK: Final[int] + I_LIST: Final[int] + I_LOOK: Final[int] + I_NREAD: Final[int] + I_PEEK: Final[int] + I_PLINK: Final[int] + I_POP: Final[int] + I_PUNLINK: Final[int] + I_PUSH: Final[int] + I_RECVFD: Final[int] + I_SENDFD: Final[int] + I_SETCLTIME: Final[int] + I_SETSIG: Final[int] + I_SRDOPT: Final[int] + I_STR: Final[int] + I_SWROPT: Final[int] + I_UNLINK: Final[int] + + if sys.version_info >= (3, 12) and sys.platform == "linux": + FICLONE: Final[int] + FICLONERANGE: Final[int] + + if sys.version_info >= (3, 13) and sys.platform == "linux": + F_OWNER_TID: Final = 0 + F_OWNER_PID: Final = 1 + F_OWNER_PGRP: Final = 2 + F_SETOWN_EX: Final = 15 + F_GETOWN_EX: Final = 16 + F_SEAL_FUTURE_WRITE: Final = 16 + F_GET_RW_HINT: Final = 1035 + F_SET_RW_HINT: Final = 1036 + F_GET_FILE_RW_HINT: Final = 1037 + F_SET_FILE_RW_HINT: Final = 1038 + RWH_WRITE_LIFE_NOT_SET: Final = 0 + RWH_WRITE_LIFE_NONE: Final = 1 + RWH_WRITE_LIFE_SHORT: Final = 2 + RWH_WRITE_LIFE_MEDIUM: Final = 3 + RWH_WRITE_LIFE_LONG: Final = 4 + RWH_WRITE_LIFE_EXTREME: Final = 5 + + if sys.version_info >= (3, 11) and sys.platform == "darwin": + F_OFD_SETLK: Final = 90 + F_OFD_SETLKW: Final = 91 + F_OFD_GETLK: Final = 92 + + if sys.version_info >= (3, 13) and sys.platform != "linux": + # OSx and NetBSD + F_GETNOSIGPIPE: Final[int] + F_SETNOSIGPIPE: Final[int] + # OSx and FreeBSD + F_RDAHEAD: Final[int] + + @overload + def fcntl(fd: FileDescriptorLike, cmd: int, arg: int = 0, /) -> int: ... + @overload + def fcntl(fd: FileDescriptorLike, cmd: int, arg: str | ReadOnlyBuffer, /) -> bytes: ... + # If arg is an int, return int + @overload + def ioctl(fd: FileDescriptorLike, request: int, arg: int = 0, mutate_flag: bool = True, /) -> int: ... + # The return type works as follows: + # - If arg is a read-write buffer, return int if mutate_flag is True, otherwise bytes + # - If arg is a read-only buffer, return bytes (and ignore the value of mutate_flag) + # We can't represent that precisely as we can't distinguish between read-write and read-only + # buffers, so we add overloads for a few unambiguous cases and use Any for the rest. + @overload + def ioctl(fd: FileDescriptorLike, request: int, arg: bytes, mutate_flag: bool = True, /) -> bytes: ... + @overload + def ioctl(fd: FileDescriptorLike, request: int, arg: WriteableBuffer, mutate_flag: Literal[False], /) -> bytes: ... + @overload + def ioctl(fd: FileDescriptorLike, request: int, arg: Buffer, mutate_flag: bool = True, /) -> Any: ... + def flock(fd: FileDescriptorLike, operation: int, /) -> None: ... + def lockf(fd: FileDescriptorLike, cmd: int, len: int = 0, start: int = 0, whence: int = 0, /) -> Any: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/filecmp.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/filecmp.pyi new file mode 100644 index 0000000..620cc17 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/filecmp.pyi @@ -0,0 +1,65 @@ +import sys +from _typeshed import GenericPath, StrOrBytesPath +from collections.abc import Callable, Iterable, Sequence +from types import GenericAlias +from typing import Any, AnyStr, Final, Generic, Literal + +__all__ = ["clear_cache", "cmp", "dircmp", "cmpfiles", "DEFAULT_IGNORES"] + +DEFAULT_IGNORES: Final[list[str]] +BUFSIZE: Final = 8192 + +def cmp(f1: StrOrBytesPath, f2: StrOrBytesPath, shallow: bool | Literal[0, 1] = True) -> bool: ... +def cmpfiles( + a: GenericPath[AnyStr], b: GenericPath[AnyStr], common: Iterable[GenericPath[AnyStr]], shallow: bool | Literal[0, 1] = True +) -> tuple[list[AnyStr], list[AnyStr], list[AnyStr]]: ... + +class dircmp(Generic[AnyStr]): + if sys.version_info >= (3, 13): + def __init__( + self, + a: GenericPath[AnyStr], + b: GenericPath[AnyStr], + ignore: Sequence[AnyStr] | None = None, + hide: Sequence[AnyStr] | None = None, + *, + shallow: bool = True, + ) -> None: ... + else: + def __init__( + self, + a: GenericPath[AnyStr], + b: GenericPath[AnyStr], + ignore: Sequence[AnyStr] | None = None, + hide: Sequence[AnyStr] | None = None, + ) -> None: ... + left: AnyStr + right: AnyStr + hide: Sequence[AnyStr] + ignore: Sequence[AnyStr] + # These properties are created at runtime by __getattr__ + subdirs: dict[AnyStr, dircmp[AnyStr]] + same_files: list[AnyStr] + diff_files: list[AnyStr] + funny_files: list[AnyStr] + common_dirs: list[AnyStr] + common_files: list[AnyStr] + common_funny: list[AnyStr] + common: list[AnyStr] + left_only: list[AnyStr] + right_only: list[AnyStr] + left_list: list[AnyStr] + right_list: list[AnyStr] + def report(self) -> None: ... + def report_partial_closure(self) -> None: ... + def report_full_closure(self) -> None: ... + methodmap: dict[str, Callable[[], None]] + def phase0(self) -> None: ... + def phase1(self) -> None: ... + def phase2(self) -> None: ... + def phase3(self) -> None: ... + def phase4(self) -> None: ... + def phase4_closure(self) -> None: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +def clear_cache() -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/fileinput.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/fileinput.pyi new file mode 100644 index 0000000..eb942bc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/fileinput.pyi @@ -0,0 +1,210 @@ +import sys +from _typeshed import AnyStr_co, StrOrBytesPath +from collections.abc import Callable, Iterable, Iterator +from types import GenericAlias, TracebackType +from typing import IO, Any, AnyStr, Literal, Protocol, overload, type_check_only +from typing_extensions import Self, TypeAlias + +__all__ = [ + "input", + "close", + "nextfile", + "filename", + "lineno", + "filelineno", + "fileno", + "isfirstline", + "isstdin", + "FileInput", + "hook_compressed", + "hook_encoded", +] + +if sys.version_info >= (3, 11): + _TextMode: TypeAlias = Literal["r"] +else: + _TextMode: TypeAlias = Literal["r", "rU", "U"] + +@type_check_only +class _HasReadlineAndFileno(Protocol[AnyStr_co]): + def readline(self) -> AnyStr_co: ... + def fileno(self) -> int: ... + +if sys.version_info >= (3, 10): + # encoding and errors are added + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, + encoding: str | None = None, + errors: str | None = None, + ) -> FileInput[str]: ... + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: Literal["rb"], + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, + encoding: None = None, + errors: None = None, + ) -> FileInput[bytes]: ... + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: str, + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, + encoding: str | None = None, + errors: str | None = None, + ) -> FileInput[Any]: ... + +else: + # bufsize is dropped and mode and openhook become keyword-only + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, + ) -> FileInput[str]: ... + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: Literal["rb"], + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, + ) -> FileInput[bytes]: ... + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: str, + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, + ) -> FileInput[Any]: ... + +def close() -> None: ... +def nextfile() -> None: ... +def filename() -> str: ... +def lineno() -> int: ... +def filelineno() -> int: ... +def fileno() -> int: ... +def isfirstline() -> bool: ... +def isstdin() -> bool: ... + +class FileInput(Iterator[AnyStr]): + if sys.version_info >= (3, 10): + # encoding and errors are added + @overload + def __init__( + self: FileInput[str], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, + encoding: str | None = None, + errors: str | None = None, + ) -> None: ... + @overload + def __init__( + self: FileInput[bytes], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: Literal["rb"], + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, + encoding: None = None, + errors: None = None, + ) -> None: ... + @overload + def __init__( + self: FileInput[Any], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: str, + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, + encoding: str | None = None, + errors: str | None = None, + ) -> None: ... + + else: + # bufsize is dropped and mode and openhook become keyword-only + @overload + def __init__( + self: FileInput[str], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, + ) -> None: ... + @overload + def __init__( + self: FileInput[bytes], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: Literal["rb"], + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, + ) -> None: ... + @overload + def __init__( + self: FileInput[Any], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: str, + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, + ) -> None: ... + + def __del__(self) -> None: ... + def close(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> AnyStr: ... + if sys.version_info < (3, 11): + def __getitem__(self, i: int) -> AnyStr: ... + + def nextfile(self) -> None: ... + def readline(self) -> AnyStr: ... + def filename(self) -> str: ... + def lineno(self) -> int: ... + def filelineno(self) -> int: ... + def fileno(self) -> int: ... + def isfirstline(self) -> bool: ... + def isstdin(self) -> bool: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +if sys.version_info >= (3, 10): + def hook_compressed( + filename: StrOrBytesPath, mode: str, *, encoding: str | None = None, errors: str | None = None + ) -> IO[Any]: ... + +else: + def hook_compressed(filename: StrOrBytesPath, mode: str) -> IO[Any]: ... + +def hook_encoded(encoding: str, errors: str | None = None) -> Callable[[StrOrBytesPath, str], IO[Any]]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/fnmatch.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/fnmatch.pyi new file mode 100644 index 0000000..345c457 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/fnmatch.pyi @@ -0,0 +1,15 @@ +import sys +from collections.abc import Iterable +from typing import AnyStr + +__all__ = ["filter", "fnmatch", "fnmatchcase", "translate"] +if sys.version_info >= (3, 14): + __all__ += ["filterfalse"] + +def fnmatch(name: AnyStr, pat: AnyStr) -> bool: ... +def fnmatchcase(name: AnyStr, pat: AnyStr) -> bool: ... +def filter(names: Iterable[AnyStr], pat: AnyStr) -> list[AnyStr]: ... +def translate(pat: str) -> str: ... + +if sys.version_info >= (3, 14): + def filterfalse(names: Iterable[AnyStr], pat: AnyStr) -> list[AnyStr]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/formatter.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/formatter.pyi new file mode 100644 index 0000000..05c3c8b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/formatter.pyi @@ -0,0 +1,88 @@ +from collections.abc import Iterable +from typing import IO, Any +from typing_extensions import TypeAlias + +AS_IS: None +_FontType: TypeAlias = tuple[str, bool, bool, bool] +_StylesType: TypeAlias = tuple[Any, ...] + +class NullFormatter: + writer: NullWriter | None + def __init__(self, writer: NullWriter | None = None) -> None: ... + def end_paragraph(self, blankline: int) -> None: ... + def add_line_break(self) -> None: ... + def add_hor_rule(self, *args: Any, **kw: Any) -> None: ... + def add_label_data(self, format: str, counter: int, blankline: int | None = None) -> None: ... + def add_flowing_data(self, data: str) -> None: ... + def add_literal_data(self, data: str) -> None: ... + def flush_softspace(self) -> None: ... + def push_alignment(self, align: str | None) -> None: ... + def pop_alignment(self) -> None: ... + def push_font(self, x: _FontType) -> None: ... + def pop_font(self) -> None: ... + def push_margin(self, margin: int) -> None: ... + def pop_margin(self) -> None: ... + def set_spacing(self, spacing: str | None) -> None: ... + def push_style(self, *styles: _StylesType) -> None: ... + def pop_style(self, n: int = 1) -> None: ... + def assert_line_data(self, flag: int = 1) -> None: ... + +class AbstractFormatter: + writer: NullWriter + align: str | None + align_stack: list[str | None] + font_stack: list[_FontType] + margin_stack: list[int] + spacing: str | None + style_stack: Any + nospace: int + softspace: int + para_end: int + parskip: int + hard_break: int + have_label: int + def __init__(self, writer: NullWriter) -> None: ... + def end_paragraph(self, blankline: int) -> None: ... + def add_line_break(self) -> None: ... + def add_hor_rule(self, *args: Any, **kw: Any) -> None: ... + def add_label_data(self, format: str, counter: int, blankline: int | None = None) -> None: ... + def format_counter(self, format: Iterable[str], counter: int) -> str: ... + def format_letter(self, case: str, counter: int) -> str: ... + def format_roman(self, case: str, counter: int) -> str: ... + def add_flowing_data(self, data: str) -> None: ... + def add_literal_data(self, data: str) -> None: ... + def flush_softspace(self) -> None: ... + def push_alignment(self, align: str | None) -> None: ... + def pop_alignment(self) -> None: ... + def push_font(self, font: _FontType) -> None: ... + def pop_font(self) -> None: ... + def push_margin(self, margin: int) -> None: ... + def pop_margin(self) -> None: ... + def set_spacing(self, spacing: str | None) -> None: ... + def push_style(self, *styles: _StylesType) -> None: ... + def pop_style(self, n: int = 1) -> None: ... + def assert_line_data(self, flag: int = 1) -> None: ... + +class NullWriter: + def flush(self) -> None: ... + def new_alignment(self, align: str | None) -> None: ... + def new_font(self, font: _FontType) -> None: ... + def new_margin(self, margin: int, level: int) -> None: ... + def new_spacing(self, spacing: str | None) -> None: ... + def new_styles(self, styles: tuple[Any, ...]) -> None: ... + def send_paragraph(self, blankline: int) -> None: ... + def send_line_break(self) -> None: ... + def send_hor_rule(self, *args: Any, **kw: Any) -> None: ... + def send_label_data(self, data: str) -> None: ... + def send_flowing_data(self, data: str) -> None: ... + def send_literal_data(self, data: str) -> None: ... + +class AbstractWriter(NullWriter): ... + +class DumbWriter(NullWriter): + file: IO[str] + maxcol: int + def __init__(self, file: IO[str] | None = None, maxcol: int = 72) -> None: ... + def reset(self) -> None: ... + +def test(file: str | None = None) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/fractions.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/fractions.pyi new file mode 100644 index 0000000..ef4066a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/fractions.pyi @@ -0,0 +1,167 @@ +import sys +from collections.abc import Callable +from decimal import Decimal +from numbers import Rational, Real +from typing import Any, Literal, Protocol, SupportsIndex, overload, type_check_only +from typing_extensions import Self, TypeAlias + +_ComparableNum: TypeAlias = int | float | Decimal | Real + +__all__ = ["Fraction"] + +@type_check_only +class _ConvertibleToIntegerRatio(Protocol): + def as_integer_ratio(self) -> tuple[int | Rational, int | Rational]: ... + +class Fraction(Rational): + __slots__ = ("_numerator", "_denominator") + @overload + def __new__(cls, numerator: int | Rational = 0, denominator: int | Rational | None = None) -> Self: ... + @overload + def __new__(cls, numerator: float | Decimal | str) -> Self: ... + + if sys.version_info >= (3, 14): + @overload + def __new__(cls, numerator: _ConvertibleToIntegerRatio) -> Self: ... + + @classmethod + def from_float(cls, f: float) -> Self: ... + @classmethod + def from_decimal(cls, dec: Decimal) -> Self: ... + def limit_denominator(self, max_denominator: int = 1000000) -> Fraction: ... + def as_integer_ratio(self) -> tuple[int, int]: ... + if sys.version_info >= (3, 12): + def is_integer(self) -> bool: ... + + @property + def numerator(a) -> int: ... + @property + def denominator(a) -> int: ... + @overload + def __add__(a, b: int | Fraction) -> Fraction: ... + @overload + def __add__(a, b: float) -> float: ... + @overload + def __add__(a, b: complex) -> complex: ... + @overload + def __radd__(b, a: int | Fraction) -> Fraction: ... + @overload + def __radd__(b, a: float) -> float: ... + @overload + def __radd__(b, a: complex) -> complex: ... + @overload + def __sub__(a, b: int | Fraction) -> Fraction: ... + @overload + def __sub__(a, b: float) -> float: ... + @overload + def __sub__(a, b: complex) -> complex: ... + @overload + def __rsub__(b, a: int | Fraction) -> Fraction: ... + @overload + def __rsub__(b, a: float) -> float: ... + @overload + def __rsub__(b, a: complex) -> complex: ... + @overload + def __mul__(a, b: int | Fraction) -> Fraction: ... + @overload + def __mul__(a, b: float) -> float: ... + @overload + def __mul__(a, b: complex) -> complex: ... + @overload + def __rmul__(b, a: int | Fraction) -> Fraction: ... + @overload + def __rmul__(b, a: float) -> float: ... + @overload + def __rmul__(b, a: complex) -> complex: ... + @overload + def __truediv__(a, b: int | Fraction) -> Fraction: ... + @overload + def __truediv__(a, b: float) -> float: ... + @overload + def __truediv__(a, b: complex) -> complex: ... + @overload + def __rtruediv__(b, a: int | Fraction) -> Fraction: ... + @overload + def __rtruediv__(b, a: float) -> float: ... + @overload + def __rtruediv__(b, a: complex) -> complex: ... + @overload + def __floordiv__(a, b: int | Fraction) -> int: ... + @overload + def __floordiv__(a, b: float) -> float: ... + @overload + def __rfloordiv__(b, a: int | Fraction) -> int: ... + @overload + def __rfloordiv__(b, a: float) -> float: ... + @overload + def __mod__(a, b: int | Fraction) -> Fraction: ... + @overload + def __mod__(a, b: float) -> float: ... + @overload + def __rmod__(b, a: int | Fraction) -> Fraction: ... + @overload + def __rmod__(b, a: float) -> float: ... + @overload + def __divmod__(a, b: int | Fraction) -> tuple[int, Fraction]: ... + @overload + def __divmod__(a, b: float) -> tuple[float, Fraction]: ... + @overload + def __rdivmod__(a, b: int | Fraction) -> tuple[int, Fraction]: ... + @overload + def __rdivmod__(a, b: float) -> tuple[float, Fraction]: ... + if sys.version_info >= (3, 14): + @overload + def __pow__(a, b: int, modulo: None = None) -> Fraction: ... + @overload + def __pow__(a, b: float | Fraction, modulo: None = None) -> float: ... + @overload + def __pow__(a, b: complex, modulo: None = None) -> complex: ... + else: + @overload + def __pow__(a, b: int) -> Fraction: ... + @overload + def __pow__(a, b: float | Fraction) -> float: ... + @overload + def __pow__(a, b: complex) -> complex: ... + if sys.version_info >= (3, 14): + @overload + def __rpow__(b, a: float | Fraction, modulo: None = None) -> float: ... + @overload + def __rpow__(b, a: complex, modulo: None = None) -> complex: ... + else: + @overload + def __rpow__(b, a: float | Fraction) -> float: ... + @overload + def __rpow__(b, a: complex) -> complex: ... + + def __pos__(a) -> Fraction: ... + def __neg__(a) -> Fraction: ... + def __abs__(a) -> Fraction: ... + def __trunc__(a) -> int: ... + def __floor__(a) -> int: ... + def __ceil__(a) -> int: ... + @overload + def __round__(self, ndigits: None = None) -> int: ... + @overload + def __round__(self, ndigits: int) -> Fraction: ... + def __hash__(self) -> int: ... # type: ignore[override] + def __eq__(a, b: object) -> bool: ... + def __lt__(a, b: _ComparableNum) -> bool: ... + def __gt__(a, b: _ComparableNum) -> bool: ... + def __le__(a, b: _ComparableNum) -> bool: ... + def __ge__(a, b: _ComparableNum) -> bool: ... + def __bool__(a) -> bool: ... + def __copy__(self) -> Self: ... + def __deepcopy__(self, memo: Any) -> Self: ... + if sys.version_info >= (3, 11): + def __int__(a, _index: Callable[[SupportsIndex], int] = ...) -> int: ... + # Not actually defined within fractions.py, but provides more useful + # overrides + @property + def real(self) -> Fraction: ... + @property + def imag(self) -> Literal[0]: ... + def conjugate(self) -> Fraction: ... + if sys.version_info >= (3, 14): + @classmethod + def from_number(cls, number: float | Rational | _ConvertibleToIntegerRatio) -> Self: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ftplib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ftplib.pyi new file mode 100644 index 0000000..44bc216 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ftplib.pyi @@ -0,0 +1,153 @@ +import sys +from _typeshed import SupportsRead, SupportsReadline +from collections.abc import Callable, Iterable, Iterator +from socket import socket +from ssl import SSLContext +from types import TracebackType +from typing import Any, Final, Literal, TextIO +from typing_extensions import Self + +__all__ = ["FTP", "error_reply", "error_temp", "error_perm", "error_proto", "all_errors", "FTP_TLS"] + +MSG_OOB: Final = 1 +FTP_PORT: Final = 21 +MAXLINE: Final = 8192 +CRLF: Final = "\r\n" +B_CRLF: Final = b"\r\n" + +class Error(Exception): ... +class error_reply(Error): ... +class error_temp(Error): ... +class error_perm(Error): ... +class error_proto(Error): ... + +all_errors: tuple[type[Exception], ...] + +class FTP: + debugging: int + host: str + port: int + maxline: int + sock: socket | None + welcome: str | None + passiveserver: int + timeout: float | None + af: int + lastresp: str + file: TextIO | None + encoding: str + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + source_address: tuple[str, int] | None + def __init__( + self, + host: str = "", + user: str = "", + passwd: str = "", + acct: str = "", + timeout: float | None = ..., + source_address: tuple[str, int] | None = None, + *, + encoding: str = "utf-8", + ) -> None: ... + def connect( + self, host: str = "", port: int = 0, timeout: float = -999, source_address: tuple[str, int] | None = None + ) -> str: ... + def getwelcome(self) -> str: ... + def set_debuglevel(self, level: int) -> None: ... + def debug(self, level: int) -> None: ... + def set_pasv(self, val: bool | Literal[0, 1]) -> None: ... + def sanitize(self, s: str) -> str: ... + def putline(self, line: str) -> None: ... + def putcmd(self, line: str) -> None: ... + def getline(self) -> str: ... + def getmultiline(self) -> str: ... + def getresp(self) -> str: ... + def voidresp(self) -> str: ... + def abort(self) -> str: ... + def sendcmd(self, cmd: str) -> str: ... + def voidcmd(self, cmd: str) -> str: ... + def sendport(self, host: str, port: int) -> str: ... + def sendeprt(self, host: str, port: int) -> str: ... + def makeport(self) -> socket: ... + def makepasv(self) -> tuple[str, int]: ... + def login(self, user: str = "", passwd: str = "", acct: str = "") -> str: ... + # In practice, `rest` can actually be anything whose str() is an integer sequence, so to make it simple we allow integers + def ntransfercmd(self, cmd: str, rest: int | str | None = None) -> tuple[socket, int | None]: ... + def transfercmd(self, cmd: str, rest: int | str | None = None) -> socket: ... + def retrbinary( + self, cmd: str, callback: Callable[[bytes], object], blocksize: int = 8192, rest: int | str | None = None + ) -> str: ... + def storbinary( + self, + cmd: str, + fp: SupportsRead[bytes], + blocksize: int = 8192, + callback: Callable[[bytes], object] | None = None, + rest: int | str | None = None, + ) -> str: ... + def retrlines(self, cmd: str, callback: Callable[[str], object] | None = None) -> str: ... + def storlines(self, cmd: str, fp: SupportsReadline[bytes], callback: Callable[[bytes], object] | None = None) -> str: ... + def acct(self, password: str) -> str: ... + def nlst(self, *args: str) -> list[str]: ... + # Technically only the last arg can be a Callable but ... + def dir(self, *args: str | Callable[[str], object]) -> None: ... + def mlsd(self, path: str = "", facts: Iterable[str] = []) -> Iterator[tuple[str, dict[str, str]]]: ... + def rename(self, fromname: str, toname: str) -> str: ... + def delete(self, filename: str) -> str: ... + def cwd(self, dirname: str) -> str: ... + def size(self, filename: str) -> int | None: ... + def mkd(self, dirname: str) -> str: ... + def rmd(self, dirname: str) -> str: ... + def pwd(self) -> str: ... + def quit(self) -> str: ... + def close(self) -> None: ... + +class FTP_TLS(FTP): + if sys.version_info >= (3, 12): + def __init__( + self, + host: str = "", + user: str = "", + passwd: str = "", + acct: str = "", + *, + context: SSLContext | None = None, + timeout: float | None = ..., + source_address: tuple[str, int] | None = None, + encoding: str = "utf-8", + ) -> None: ... + else: + def __init__( + self, + host: str = "", + user: str = "", + passwd: str = "", + acct: str = "", + keyfile: str | None = None, + certfile: str | None = None, + context: SSLContext | None = None, + timeout: float | None = ..., + source_address: tuple[str, int] | None = None, + *, + encoding: str = "utf-8", + ) -> None: ... + ssl_version: int + keyfile: str | None + certfile: str | None + context: SSLContext + def login(self, user: str = "", passwd: str = "", acct: str = "", secure: bool = True) -> str: ... + def auth(self) -> str: ... + def prot_p(self) -> str: ... + def prot_c(self) -> str: ... + def ccc(self) -> str: ... + +def parse150(resp: str) -> int | None: ... # undocumented +def parse227(resp: str) -> tuple[str, int]: ... # undocumented +def parse229(resp: str, peer: Any) -> tuple[str, int]: ... # undocumented +def parse257(resp: str) -> str: ... # undocumented +def ftpcp( + source: FTP, sourcename: str, target: FTP, targetname: str = "", type: Literal["A", "I"] = "I" +) -> None: ... # undocumented diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/functools.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/functools.pyi new file mode 100644 index 0000000..47baf91 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/functools.pyi @@ -0,0 +1,258 @@ +import sys +import types +from _typeshed import SupportsAllComparisons, SupportsItems +from collections.abc import Callable, Hashable, Iterable, Sized +from types import GenericAlias +from typing import Any, Final, Generic, Literal, NamedTuple, TypedDict, TypeVar, final, overload, type_check_only +from typing_extensions import ParamSpec, Self, TypeAlias, disjoint_base + +__all__ = [ + "update_wrapper", + "wraps", + "WRAPPER_ASSIGNMENTS", + "WRAPPER_UPDATES", + "total_ordering", + "cmp_to_key", + "lru_cache", + "reduce", + "partial", + "partialmethod", + "singledispatch", + "cached_property", + "singledispatchmethod", + "cache", +] + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) +_S = TypeVar("_S") +_PWrapped = ParamSpec("_PWrapped") +_RWrapped = TypeVar("_RWrapped") +_PWrapper = ParamSpec("_PWrapper") +_RWrapper = TypeVar("_RWrapper") + +if sys.version_info >= (3, 14): + @overload + def reduce(function: Callable[[_T, _S], _T], iterable: Iterable[_S], /, initial: _T) -> _T: ... + +else: + @overload + def reduce(function: Callable[[_T, _S], _T], iterable: Iterable[_S], initial: _T, /) -> _T: ... + +@overload +def reduce(function: Callable[[_T, _T], _T], iterable: Iterable[_T], /) -> _T: ... + +class _CacheInfo(NamedTuple): + hits: int + misses: int + maxsize: int | None + currsize: int + +@type_check_only +class _CacheParameters(TypedDict): + maxsize: int + typed: bool + +@final +class _lru_cache_wrapper(Generic[_T]): + __wrapped__: Callable[..., _T] + def __call__(self, *args: Hashable, **kwargs: Hashable) -> _T: ... + def cache_info(self) -> _CacheInfo: ... + def cache_clear(self) -> None: ... + def cache_parameters(self) -> _CacheParameters: ... + def __copy__(self) -> _lru_cache_wrapper[_T]: ... + def __deepcopy__(self, memo: Any, /) -> _lru_cache_wrapper[_T]: ... + +@overload +def lru_cache(maxsize: int | None = 128, typed: bool = False) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: ... +@overload +def lru_cache(maxsize: Callable[..., _T], typed: bool = False) -> _lru_cache_wrapper[_T]: ... + +if sys.version_info >= (3, 14): + WRAPPER_ASSIGNMENTS: Final[ + tuple[ + Literal["__module__"], + Literal["__name__"], + Literal["__qualname__"], + Literal["__doc__"], + Literal["__annotate__"], + Literal["__type_params__"], + ] + ] +elif sys.version_info >= (3, 12): + WRAPPER_ASSIGNMENTS: Final[ + tuple[ + Literal["__module__"], + Literal["__name__"], + Literal["__qualname__"], + Literal["__doc__"], + Literal["__annotations__"], + Literal["__type_params__"], + ] + ] +else: + WRAPPER_ASSIGNMENTS: Final[ + tuple[Literal["__module__"], Literal["__name__"], Literal["__qualname__"], Literal["__doc__"], Literal["__annotations__"]] + ] + +WRAPPER_UPDATES: Final[tuple[Literal["__dict__"]]] + +@type_check_only +class _Wrapped(Generic[_PWrapped, _RWrapped, _PWrapper, _RWrapper]): + __wrapped__: Callable[_PWrapped, _RWrapped] + def __call__(self, *args: _PWrapper.args, **kwargs: _PWrapper.kwargs) -> _RWrapper: ... + # as with ``Callable``, we'll assume that these attributes exist + __name__: str + __qualname__: str + +@type_check_only +class _Wrapper(Generic[_PWrapped, _RWrapped]): + def __call__(self, f: Callable[_PWrapper, _RWrapper]) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ... + +if sys.version_info >= (3, 14): + def update_wrapper( + wrapper: Callable[_PWrapper, _RWrapper], + wrapped: Callable[_PWrapped, _RWrapped], + assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotate__", "__type_params__"), + updated: Iterable[str] = ("__dict__",), + ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ... + def wraps( + wrapped: Callable[_PWrapped, _RWrapped], + assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotate__", "__type_params__"), + updated: Iterable[str] = ("__dict__",), + ) -> _Wrapper[_PWrapped, _RWrapped]: ... + +elif sys.version_info >= (3, 12): + def update_wrapper( + wrapper: Callable[_PWrapper, _RWrapper], + wrapped: Callable[_PWrapped, _RWrapped], + assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), + updated: Iterable[str] = ("__dict__",), + ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ... + def wraps( + wrapped: Callable[_PWrapped, _RWrapped], + assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), + updated: Iterable[str] = ("__dict__",), + ) -> _Wrapper[_PWrapped, _RWrapped]: ... + +else: + def update_wrapper( + wrapper: Callable[_PWrapper, _RWrapper], + wrapped: Callable[_PWrapped, _RWrapped], + assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), + updated: Iterable[str] = ("__dict__",), + ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ... + def wraps( + wrapped: Callable[_PWrapped, _RWrapped], + assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), + updated: Iterable[str] = ("__dict__",), + ) -> _Wrapper[_PWrapped, _RWrapped]: ... + +def total_ordering(cls: type[_T]) -> type[_T]: ... +def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: ... +@disjoint_base +class partial(Generic[_T]): + @property + def func(self) -> Callable[..., _T]: ... + @property + def args(self) -> tuple[Any, ...]: ... + @property + def keywords(self) -> dict[str, Any]: ... + def __new__(cls, func: Callable[..., _T], /, *args: Any, **kwargs: Any) -> Self: ... + def __call__(self, /, *args: Any, **kwargs: Any) -> _T: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +# With protocols, this could change into a generic protocol that defines __get__ and returns _T +_Descriptor: TypeAlias = Any + +class partialmethod(Generic[_T]): + func: Callable[..., _T] | _Descriptor + args: tuple[Any, ...] + keywords: dict[str, Any] + if sys.version_info >= (3, 14): + @overload + def __new__(self, func: Callable[..., _T], /, *args: Any, **keywords: Any) -> Self: ... + @overload + def __new__(self, func: _Descriptor, /, *args: Any, **keywords: Any) -> Self: ... + else: + @overload + def __init__(self, func: Callable[..., _T], /, *args: Any, **keywords: Any) -> None: ... + @overload + def __init__(self, func: _Descriptor, /, *args: Any, **keywords: Any) -> None: ... + + def __get__(self, obj: Any, cls: type[Any] | None = None) -> Callable[..., _T]: ... + @property + def __isabstractmethod__(self) -> bool: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +if sys.version_info >= (3, 11): + _RegType: TypeAlias = type[Any] | types.UnionType +else: + _RegType: TypeAlias = type[Any] + +@type_check_only +class _SingleDispatchCallable(Generic[_T]): + registry: types.MappingProxyType[Any, Callable[..., _T]] + def dispatch(self, cls: Any) -> Callable[..., _T]: ... + # @fun.register(complex) + # def _(arg, verbose=False): ... + @overload + def register(self, cls: _RegType, func: None = None) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + # @fun.register + # def _(arg: int, verbose=False): + @overload + def register(self, cls: Callable[..., _T], func: None = None) -> Callable[..., _T]: ... + # fun.register(int, lambda x: x) + @overload + def register(self, cls: _RegType, func: Callable[..., _T]) -> Callable[..., _T]: ... + def _clear_cache(self) -> None: ... + def __call__(self, /, *args: Any, **kwargs: Any) -> _T: ... + +def singledispatch(func: Callable[..., _T]) -> _SingleDispatchCallable[_T]: ... + +class singledispatchmethod(Generic[_T]): + dispatcher: _SingleDispatchCallable[_T] + func: Callable[..., _T] + def __init__(self, func: Callable[..., _T]) -> None: ... + @property + def __isabstractmethod__(self) -> bool: ... + @overload + def register(self, cls: _RegType, method: None = None) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + @overload + def register(self, cls: Callable[..., _T], method: None = None) -> Callable[..., _T]: ... + @overload + def register(self, cls: _RegType, method: Callable[..., _T]) -> Callable[..., _T]: ... + def __get__(self, obj: _S, cls: type[_S] | None = None) -> Callable[..., _T]: ... + +class cached_property(Generic[_T_co]): + func: Callable[[Any], _T_co] + attrname: str | None + def __init__(self, func: Callable[[Any], _T_co]) -> None: ... + @overload + def __get__(self, instance: None, owner: type[Any] | None = None) -> Self: ... + @overload + def __get__(self, instance: object, owner: type[Any] | None = None) -> _T_co: ... + def __set_name__(self, owner: type[Any], name: str) -> None: ... + # __set__ is not defined at runtime, but @cached_property is designed to be settable + def __set__(self, instance: object, value: _T_co) -> None: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +def cache(user_function: Callable[..., _T], /) -> _lru_cache_wrapper[_T]: ... +def _make_key( + args: tuple[Hashable, ...], + kwds: SupportsItems[Any, Any], + typed: bool, + kwd_mark: tuple[object, ...] = ..., + fasttypes: set[type] = ..., + tuple: type = ..., + type: Any = ..., + len: Callable[[Sized], int] = ..., +) -> Hashable: ... + +if sys.version_info >= (3, 14): + @final + class _PlaceholderType: ... + + Placeholder: Final[_PlaceholderType] + + __all__ += ["Placeholder"] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/gc.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/gc.pyi new file mode 100644 index 0000000..06fb6b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/gc.pyi @@ -0,0 +1,33 @@ +from collections.abc import Callable +from typing import Any, Final, Literal +from typing_extensions import TypeAlias + +DEBUG_COLLECTABLE: Final = 2 +DEBUG_LEAK: Final = 38 +DEBUG_SAVEALL: Final = 32 +DEBUG_STATS: Final = 1 +DEBUG_UNCOLLECTABLE: Final = 4 + +_CallbackType: TypeAlias = Callable[[Literal["start", "stop"], dict[str, int]], object] + +callbacks: list[_CallbackType] +garbage: list[Any] + +def collect(generation: int = 2) -> int: ... +def disable() -> None: ... +def enable() -> None: ... +def get_count() -> tuple[int, int, int]: ... +def get_debug() -> int: ... +def get_objects(generation: int | None = None) -> list[Any]: ... +def freeze() -> None: ... +def unfreeze() -> None: ... +def get_freeze_count() -> int: ... +def get_referents(*objs: Any) -> list[Any]: ... +def get_referrers(*objs: Any) -> list[Any]: ... +def get_stats() -> list[dict[str, Any]]: ... +def get_threshold() -> tuple[int, int, int]: ... +def is_tracked(obj: Any, /) -> bool: ... +def is_finalized(obj: Any, /) -> bool: ... +def isenabled() -> bool: ... +def set_debug(flags: int, /) -> None: ... +def set_threshold(threshold0: int, threshold1: int = ..., threshold2: int = ..., /) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/genericpath.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/genericpath.pyi new file mode 100644 index 0000000..3caed77 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/genericpath.pyi @@ -0,0 +1,64 @@ +import os +import sys +from _typeshed import BytesPath, FileDescriptorOrPath, StrOrBytesPath, StrPath, SupportsRichComparisonT +from collections.abc import Sequence +from typing import Literal, NewType, overload +from typing_extensions import LiteralString + +__all__ = [ + "commonprefix", + "exists", + "getatime", + "getctime", + "getmtime", + "getsize", + "isdir", + "isfile", + "samefile", + "sameopenfile", + "samestat", + "ALLOW_MISSING", +] +if sys.version_info >= (3, 12): + __all__ += ["islink"] +if sys.version_info >= (3, 13): + __all__ += ["isjunction", "isdevdrive", "lexists"] + +# All overloads can return empty string. Ideally, Literal[""] would be a valid +# Iterable[T], so that list[T] | Literal[""] could be used as a return +# type. But because this only works when T is str, we need Sequence[T] instead. +@overload +def commonprefix(m: Sequence[LiteralString]) -> LiteralString: ... +@overload +def commonprefix(m: Sequence[StrPath]) -> str: ... +@overload +def commonprefix(m: Sequence[BytesPath]) -> bytes | Literal[""]: ... +@overload +def commonprefix(m: Sequence[list[SupportsRichComparisonT]]) -> Sequence[SupportsRichComparisonT]: ... +@overload +def commonprefix(m: Sequence[tuple[SupportsRichComparisonT, ...]]) -> Sequence[SupportsRichComparisonT]: ... +def exists(path: FileDescriptorOrPath) -> bool: ... +def getsize(filename: FileDescriptorOrPath) -> int: ... +def isfile(path: FileDescriptorOrPath) -> bool: ... +def isdir(s: FileDescriptorOrPath) -> bool: ... + +if sys.version_info >= (3, 12): + def islink(path: StrOrBytesPath) -> bool: ... + +# These return float if os.stat_float_times() == True, +# but int is a subclass of float. +def getatime(filename: FileDescriptorOrPath) -> float: ... +def getmtime(filename: FileDescriptorOrPath) -> float: ... +def getctime(filename: FileDescriptorOrPath) -> float: ... +def samefile(f1: FileDescriptorOrPath, f2: FileDescriptorOrPath) -> bool: ... +def sameopenfile(fp1: int, fp2: int) -> bool: ... +def samestat(s1: os.stat_result, s2: os.stat_result) -> bool: ... + +if sys.version_info >= (3, 13): + def isjunction(path: StrOrBytesPath) -> bool: ... + def isdevdrive(path: StrOrBytesPath) -> bool: ... + def lexists(path: StrOrBytesPath) -> bool: ... + +# Added in Python 3.9.23, 3.10.18, 3.11.13, 3.12.11, 3.13.4 +_AllowMissingType = NewType("_AllowMissingType", object) +ALLOW_MISSING: _AllowMissingType diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/getopt.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/getopt.pyi new file mode 100644 index 0000000..c15db81 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/getopt.pyi @@ -0,0 +1,27 @@ +from collections.abc import Iterable, Sequence +from typing import Protocol, TypeVar, overload, type_check_only + +_StrSequenceT_co = TypeVar("_StrSequenceT_co", covariant=True, bound=Sequence[str]) + +@type_check_only +class _SliceableT(Protocol[_StrSequenceT_co]): + @overload + def __getitem__(self, key: int, /) -> str: ... + @overload + def __getitem__(self, key: slice, /) -> _StrSequenceT_co: ... + +__all__ = ["GetoptError", "error", "getopt", "gnu_getopt"] + +def getopt( + args: _SliceableT[_StrSequenceT_co], shortopts: str, longopts: Iterable[str] | str = [] +) -> tuple[list[tuple[str, str]], _StrSequenceT_co]: ... +def gnu_getopt( + args: Sequence[str], shortopts: str, longopts: Iterable[str] | str = [] +) -> tuple[list[tuple[str, str]], list[str]]: ... + +class GetoptError(Exception): + msg: str + opt: str + def __init__(self, msg: str, opt: str = "") -> None: ... + +error = GetoptError diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/getpass.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/getpass.pyi new file mode 100644 index 0000000..bb3013d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/getpass.pyi @@ -0,0 +1,14 @@ +import sys +from typing import TextIO + +__all__ = ["getpass", "getuser", "GetPassWarning"] + +if sys.version_info >= (3, 14): + def getpass(prompt: str = "Password: ", stream: TextIO | None = None, *, echo_char: str | None = None) -> str: ... + +else: + def getpass(prompt: str = "Password: ", stream: TextIO | None = None) -> str: ... + +def getuser() -> str: ... + +class GetPassWarning(UserWarning): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/gettext.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/gettext.pyi new file mode 100644 index 0000000..e9ffd7a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/gettext.pyi @@ -0,0 +1,189 @@ +import io +import sys +from _typeshed import StrPath +from collections.abc import Callable, Container, Iterable, Sequence +from typing import Any, Final, Literal, Protocol, TypeVar, overload, type_check_only +from typing_extensions import deprecated + +__all__ = [ + "NullTranslations", + "GNUTranslations", + "Catalog", + "find", + "translation", + "install", + "textdomain", + "bindtextdomain", + "dgettext", + "dngettext", + "gettext", + "ngettext", + "dnpgettext", + "dpgettext", + "npgettext", + "pgettext", +] + +if sys.version_info < (3, 11): + __all__ += ["bind_textdomain_codeset", "ldgettext", "ldngettext", "lgettext", "lngettext"] + +@type_check_only +class _TranslationsReader(Protocol): + def read(self) -> bytes: ... + # optional: + # name: str + +class NullTranslations: + def __init__(self, fp: _TranslationsReader | None = None) -> None: ... + def _parse(self, fp: _TranslationsReader) -> None: ... + def add_fallback(self, fallback: NullTranslations) -> None: ... + def gettext(self, message: str) -> str: ... + def ngettext(self, msgid1: str, msgid2: str, n: int) -> str: ... + def pgettext(self, context: str, message: str) -> str: ... + def npgettext(self, context: str, msgid1: str, msgid2: str, n: int) -> str: ... + def info(self) -> dict[str, str]: ... + def charset(self) -> str | None: ... + if sys.version_info < (3, 11): + @deprecated("Deprecated since Python 3.8; removed in Python 3.11.") + def output_charset(self) -> str | None: ... + @deprecated("Deprecated since Python 3.8; removed in Python 3.11.") + def set_output_charset(self, charset: str) -> None: ... + @deprecated("Deprecated since Python 3.8; removed in Python 3.11. Use `gettext()` instead.") + def lgettext(self, message: str) -> str: ... + @deprecated("Deprecated since Python 3.8; removed in Python 3.11. Use `ngettext()` instead.") + def lngettext(self, msgid1: str, msgid2: str, n: int) -> str: ... + + def install(self, names: Container[str] | None = None) -> None: ... + +class GNUTranslations(NullTranslations): + LE_MAGIC: Final[int] + BE_MAGIC: Final[int] + CONTEXT: str + VERSIONS: Sequence[int] + +@overload +def find( + domain: str, localedir: StrPath | None = None, languages: Iterable[str] | None = None, all: Literal[False] = False +) -> str | None: ... +@overload +def find( + domain: str, localedir: StrPath | None = None, languages: Iterable[str] | None = None, *, all: Literal[True] +) -> list[str]: ... +@overload +def find(domain: str, localedir: StrPath | None, languages: Iterable[str] | None, all: Literal[True]) -> list[str]: ... +@overload +def find(domain: str, localedir: StrPath | None = None, languages: Iterable[str] | None = None, all: bool = False) -> Any: ... + +_NullTranslationsT = TypeVar("_NullTranslationsT", bound=NullTranslations) + +if sys.version_info >= (3, 11): + @overload + def translation( + domain: str, + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, + class_: None = None, + fallback: Literal[False] = False, + ) -> GNUTranslations: ... + @overload + def translation( + domain: str, + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, + *, + class_: Callable[[io.BufferedReader], _NullTranslationsT], + fallback: Literal[False] = False, + ) -> _NullTranslationsT: ... + @overload + def translation( + domain: str, + localedir: StrPath | None, + languages: Iterable[str] | None, + class_: Callable[[io.BufferedReader], _NullTranslationsT], + fallback: Literal[False] = False, + ) -> _NullTranslationsT: ... + @overload + def translation( + domain: str, + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, + class_: Callable[[io.BufferedReader], NullTranslations] | None = None, + fallback: bool = False, + ) -> NullTranslations: ... + def install(domain: str, localedir: StrPath | None = None, *, names: Container[str] | None = None) -> None: ... + +else: + @overload + def translation( + domain: str, + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, + class_: None = None, + fallback: Literal[False] = False, + codeset: str | None = ..., + ) -> GNUTranslations: ... + @overload + def translation( + domain: str, + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, + *, + class_: Callable[[io.BufferedReader], _NullTranslationsT], + fallback: Literal[False] = False, + codeset: str | None = ..., + ) -> _NullTranslationsT: ... + @overload + def translation( + domain: str, + localedir: StrPath | None, + languages: Iterable[str] | None, + class_: Callable[[io.BufferedReader], _NullTranslationsT], + fallback: Literal[False] = False, + codeset: str | None = ..., + ) -> _NullTranslationsT: ... + @overload + def translation( + domain: str, + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, + class_: Callable[[io.BufferedReader], NullTranslations] | None = None, + fallback: bool = False, + codeset: str | None = ..., + ) -> NullTranslations: ... + @overload + def install(domain: str, localedir: StrPath | None = None, names: Container[str] | None = None) -> None: ... + @overload + @deprecated("The `codeset` parameter is deprecated since Python 3.8; removed in Python 3.11.") + def install(domain: str, localedir: StrPath | None, codeset: str | None, /, names: Container[str] | None = None) -> None: ... + @overload + @deprecated("The `codeset` parameter is deprecated since Python 3.8; removed in Python 3.11.") + def install( + domain: str, localedir: StrPath | None = None, *, codeset: str | None, names: Container[str] | None = None + ) -> None: ... + +def textdomain(domain: str | None = None) -> str: ... +def bindtextdomain(domain: str, localedir: StrPath | None = None) -> str: ... +def dgettext(domain: str, message: str) -> str: ... +def dngettext(domain: str, msgid1: str, msgid2: str, n: int) -> str: ... +def gettext(message: str) -> str: ... +def ngettext(msgid1: str, msgid2: str, n: int) -> str: ... +def pgettext(context: str, message: str) -> str: ... +def dpgettext(domain: str, context: str, message: str) -> str: ... +def npgettext(context: str, msgid1: str, msgid2: str, n: int) -> str: ... +def dnpgettext(domain: str, context: str, msgid1: str, msgid2: str, n: int) -> str: ... + +if sys.version_info < (3, 11): + @deprecated("Deprecated since Python 3.8; removed in Python 3.11. Use `gettext()` instead.") + def lgettext(message: str) -> str: ... + @deprecated("Deprecated since Python 3.8; removed in Python 3.11. Use `dgettext()` instead.") + def ldgettext(domain: str, message: str) -> str: ... + @deprecated("Deprecated since Python 3.8; removed in Python 3.11. Use `ngettext()` instead.") + def lngettext(msgid1: str, msgid2: str, n: int) -> str: ... + @deprecated("Deprecated since Python 3.8; removed in Python 3.11. Use `dngettext()` instead.") + def ldngettext(domain: str, msgid1: str, msgid2: str, n: int) -> str: ... + @deprecated("Deprecated since Python 3.8; removed in Python 3.11. Use `bindtextdomain()` instead.") + def bind_textdomain_codeset(domain: str, codeset: str | None = None) -> str: ... + +Catalog = translation + +def c2py(plural: str) -> Callable[[int], int]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/glob.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/glob.pyi new file mode 100644 index 0000000..942fd73 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/glob.pyi @@ -0,0 +1,62 @@ +import sys +from _typeshed import StrOrBytesPath +from collections.abc import Iterator, Sequence +from typing import AnyStr +from typing_extensions import deprecated + +__all__ = ["escape", "glob", "iglob"] + +if sys.version_info >= (3, 13): + __all__ += ["translate"] + +if sys.version_info >= (3, 10): + @deprecated( + "Deprecated since Python 3.10; will be removed in Python 3.15. Use `glob.glob()` with the *root_dir* argument instead." + ) + def glob0(dirname: AnyStr, pattern: AnyStr) -> list[AnyStr]: ... + @deprecated( + "Deprecated since Python 3.10; will be removed in Python 3.15. Use `glob.glob()` with the *root_dir* argument instead." + ) + def glob1(dirname: AnyStr, pattern: AnyStr) -> list[AnyStr]: ... + +else: + def glob0(dirname: AnyStr, pattern: AnyStr) -> list[AnyStr]: ... + def glob1(dirname: AnyStr, pattern: AnyStr) -> list[AnyStr]: ... + +if sys.version_info >= (3, 11): + def glob( + pathname: AnyStr, + *, + root_dir: StrOrBytesPath | None = None, + dir_fd: int | None = None, + recursive: bool = False, + include_hidden: bool = False, + ) -> list[AnyStr]: ... + def iglob( + pathname: AnyStr, + *, + root_dir: StrOrBytesPath | None = None, + dir_fd: int | None = None, + recursive: bool = False, + include_hidden: bool = False, + ) -> Iterator[AnyStr]: ... + +elif sys.version_info >= (3, 10): + def glob( + pathname: AnyStr, *, root_dir: StrOrBytesPath | None = None, dir_fd: int | None = None, recursive: bool = False + ) -> list[AnyStr]: ... + def iglob( + pathname: AnyStr, *, root_dir: StrOrBytesPath | None = None, dir_fd: int | None = None, recursive: bool = False + ) -> Iterator[AnyStr]: ... + +else: + def glob(pathname: AnyStr, *, recursive: bool = False) -> list[AnyStr]: ... + def iglob(pathname: AnyStr, *, recursive: bool = False) -> Iterator[AnyStr]: ... + +def escape(pathname: AnyStr) -> AnyStr: ... +def has_magic(s: str | bytes) -> bool: ... # undocumented + +if sys.version_info >= (3, 13): + def translate( + pat: str, *, recursive: bool = False, include_hidden: bool = False, seps: Sequence[str] | None = None + ) -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/graphlib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/graphlib.pyi new file mode 100644 index 0000000..1ca8cbe --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/graphlib.pyi @@ -0,0 +1,28 @@ +import sys +from _typeshed import SupportsItems +from collections.abc import Iterable +from typing import Any, Generic, TypeVar, overload + +__all__ = ["TopologicalSorter", "CycleError"] + +_T = TypeVar("_T") + +if sys.version_info >= (3, 11): + from types import GenericAlias + +class TopologicalSorter(Generic[_T]): + @overload + def __init__(self, graph: None = None) -> None: ... + @overload + def __init__(self, graph: SupportsItems[_T, Iterable[_T]]) -> None: ... + def add(self, node: _T, *predecessors: _T) -> None: ... + def prepare(self) -> None: ... + def is_active(self) -> bool: ... + def __bool__(self) -> bool: ... + def done(self, *nodes: _T) -> None: ... + def get_ready(self) -> tuple[_T, ...]: ... + def static_order(self) -> Iterable[_T]: ... + if sys.version_info >= (3, 11): + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +class CycleError(ValueError): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/grp.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/grp.pyi new file mode 100644 index 0000000..965ecec --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/grp.pyi @@ -0,0 +1,22 @@ +import sys +from _typeshed import structseq +from typing import Any, Final, final + +if sys.platform != "win32": + @final + class struct_group(structseq[Any], tuple[str, str | None, int, list[str]]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("gr_name", "gr_passwd", "gr_gid", "gr_mem") + + @property + def gr_name(self) -> str: ... + @property + def gr_passwd(self) -> str | None: ... + @property + def gr_gid(self) -> int: ... + @property + def gr_mem(self) -> list[str]: ... + + def getgrall() -> list[struct_group]: ... + def getgrgid(id: int) -> struct_group: ... + def getgrnam(name: str) -> struct_group: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/gzip.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/gzip.pyi new file mode 100644 index 0000000..b18f76f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/gzip.pyi @@ -0,0 +1,176 @@ +import sys +import zlib +from _typeshed import ReadableBuffer, SizedBuffer, StrOrBytesPath, WriteableBuffer +from io import FileIO, TextIOWrapper +from typing import Final, Literal, Protocol, overload, type_check_only +from typing_extensions import TypeAlias, deprecated + +if sys.version_info >= (3, 14): + from compression._common._streams import BaseStream, DecompressReader +else: + from _compression import BaseStream, DecompressReader + +__all__ = ["BadGzipFile", "GzipFile", "open", "compress", "decompress"] + +_ReadBinaryMode: TypeAlias = Literal["r", "rb"] +_WriteBinaryMode: TypeAlias = Literal["a", "ab", "w", "wb", "x", "xb"] +_OpenTextMode: TypeAlias = Literal["rt", "at", "wt", "xt"] + +READ: Final[object] # undocumented +WRITE: Final[object] # undocumented + +FTEXT: Final[int] # actually Literal[1] # undocumented +FHCRC: Final[int] # actually Literal[2] # undocumented +FEXTRA: Final[int] # actually Literal[4] # undocumented +FNAME: Final[int] # actually Literal[8] # undocumented +FCOMMENT: Final[int] # actually Literal[16] # undocumented + +@type_check_only +class _ReadableFileobj(Protocol): + def read(self, n: int, /) -> bytes: ... + def seek(self, n: int, /) -> object: ... + # The following attributes and methods are optional: + # name: str + # mode: str + # def fileno() -> int: ... + +@type_check_only +class _WritableFileobj(Protocol): + def write(self, b: bytes, /) -> object: ... + def flush(self) -> object: ... + # The following attributes and methods are optional: + # name: str + # mode: str + # def fileno() -> int: ... + +@overload +def open( + filename: StrOrBytesPath | _ReadableFileobj, + mode: _ReadBinaryMode = "rb", + compresslevel: int = 9, + encoding: None = None, + errors: None = None, + newline: None = None, +) -> GzipFile: ... +@overload +def open( + filename: StrOrBytesPath | _WritableFileobj, + mode: _WriteBinaryMode, + compresslevel: int = 9, + encoding: None = None, + errors: None = None, + newline: None = None, +) -> GzipFile: ... +@overload +def open( + filename: StrOrBytesPath | _ReadableFileobj | _WritableFileobj, + mode: _OpenTextMode, + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> TextIOWrapper: ... +@overload +def open( + filename: StrOrBytesPath | _ReadableFileobj | _WritableFileobj, + mode: str, + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> GzipFile | TextIOWrapper: ... + +class _PaddedFile: + file: _ReadableFileobj + def __init__(self, f: _ReadableFileobj, prepend: bytes = b"") -> None: ... + def read(self, size: int) -> bytes: ... + def prepend(self, prepend: bytes = b"") -> None: ... + def seek(self, off: int) -> int: ... + def seekable(self) -> bool: ... + +class BadGzipFile(OSError): ... + +class GzipFile(BaseStream): + myfileobj: FileIO | None + mode: object + name: str + compress: zlib._Compress + fileobj: _ReadableFileobj | _WritableFileobj + @overload + def __init__( + self, + filename: StrOrBytesPath | None, + mode: _ReadBinaryMode, + compresslevel: int = 9, + fileobj: _ReadableFileobj | None = None, + mtime: float | None = None, + ) -> None: ... + @overload + def __init__( + self, + *, + mode: _ReadBinaryMode, + compresslevel: int = 9, + fileobj: _ReadableFileobj | None = None, + mtime: float | None = None, + ) -> None: ... + @overload + def __init__( + self, + filename: StrOrBytesPath | None, + mode: _WriteBinaryMode, + compresslevel: int = 9, + fileobj: _WritableFileobj | None = None, + mtime: float | None = None, + ) -> None: ... + @overload + def __init__( + self, + *, + mode: _WriteBinaryMode, + compresslevel: int = 9, + fileobj: _WritableFileobj | None = None, + mtime: float | None = None, + ) -> None: ... + @overload + def __init__( + self, + filename: StrOrBytesPath | None = None, + mode: str | None = None, + compresslevel: int = 9, + fileobj: _ReadableFileobj | _WritableFileobj | None = None, + mtime: float | None = None, + ) -> None: ... + if sys.version_info < (3, 12): + @property + @deprecated("Deprecated since Python 2.6; removed in Python 3.12. Use `name` attribute instead.") + def filename(self) -> str: ... + + @property + def mtime(self) -> int | None: ... + crc: int + def write(self, data: ReadableBuffer) -> int: ... + def read(self, size: int | None = -1) -> bytes: ... + def read1(self, size: int = -1) -> bytes: ... + def peek(self, n: int) -> bytes: ... + def close(self) -> None: ... + def flush(self, zlib_mode: int = 2) -> None: ... + def fileno(self) -> int: ... + def rewind(self) -> None: ... + def seek(self, offset: int, whence: int = 0) -> int: ... + def readline(self, size: int | None = -1) -> bytes: ... + + if sys.version_info >= (3, 14): + def readinto(self, b: WriteableBuffer) -> int: ... + def readinto1(self, b: WriteableBuffer) -> int: ... + +class _GzipReader(DecompressReader): + def __init__(self, fp: _ReadableFileobj) -> None: ... + +if sys.version_info >= (3, 14): + def compress(data: SizedBuffer, compresslevel: int = 9, *, mtime: float = 0) -> bytes: ... + +else: + def compress(data: SizedBuffer, compresslevel: int = 9, *, mtime: float | None = None) -> bytes: ... + +def decompress(data: ReadableBuffer) -> bytes: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/hashlib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/hashlib.pyi new file mode 100644 index 0000000..9241363 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/hashlib.pyi @@ -0,0 +1,89 @@ +import sys +from _blake2 import blake2b as blake2b, blake2s as blake2s +from _hashlib import ( + HASH, + _HashObject, + openssl_md5 as md5, + openssl_sha1 as sha1, + openssl_sha3_224 as sha3_224, + openssl_sha3_256 as sha3_256, + openssl_sha3_384 as sha3_384, + openssl_sha3_512 as sha3_512, + openssl_sha224 as sha224, + openssl_sha256 as sha256, + openssl_sha384 as sha384, + openssl_sha512 as sha512, + openssl_shake_128 as shake_128, + openssl_shake_256 as shake_256, + pbkdf2_hmac as pbkdf2_hmac, + scrypt as scrypt, +) +from _typeshed import ReadableBuffer +from collections.abc import Callable, Set as AbstractSet +from typing import Protocol, type_check_only + +if sys.version_info >= (3, 11): + __all__ = ( + "md5", + "sha1", + "sha224", + "sha256", + "sha384", + "sha512", + "blake2b", + "blake2s", + "sha3_224", + "sha3_256", + "sha3_384", + "sha3_512", + "shake_128", + "shake_256", + "new", + "algorithms_guaranteed", + "algorithms_available", + "pbkdf2_hmac", + "file_digest", + ) +else: + __all__ = ( + "md5", + "sha1", + "sha224", + "sha256", + "sha384", + "sha512", + "blake2b", + "blake2s", + "sha3_224", + "sha3_256", + "sha3_384", + "sha3_512", + "shake_128", + "shake_256", + "new", + "algorithms_guaranteed", + "algorithms_available", + "pbkdf2_hmac", + ) + +def new(name: str, data: ReadableBuffer = b"", *, usedforsecurity: bool = ...) -> HASH: ... + +algorithms_guaranteed: AbstractSet[str] +algorithms_available: AbstractSet[str] + +if sys.version_info >= (3, 11): + @type_check_only + class _BytesIOLike(Protocol): + def getbuffer(self) -> ReadableBuffer: ... + + @type_check_only + class _FileDigestFileObj(Protocol): + def readinto(self, buf: bytearray, /) -> int: ... + def readable(self) -> bool: ... + + def file_digest( + fileobj: _BytesIOLike | _FileDigestFileObj, digest: str | Callable[[], _HashObject], /, *, _bufsize: int = 262144 + ) -> HASH: ... + +# Legacy typing-only alias +_Hash = HASH diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/heapq.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/heapq.pyi new file mode 100644 index 0000000..220c41f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/heapq.pyi @@ -0,0 +1,17 @@ +from _heapq import * +from _typeshed import SupportsRichComparison +from collections.abc import Callable, Generator, Iterable +from typing import Any, Final, TypeVar + +__all__ = ["heappush", "heappop", "heapify", "heapreplace", "merge", "nlargest", "nsmallest", "heappushpop"] + +_S = TypeVar("_S") + +__about__: Final[str] + +def merge( + *iterables: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None, reverse: bool = False +) -> Generator[_S]: ... +def nlargest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None) -> list[_S]: ... +def nsmallest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None) -> list[_S]: ... +def _heapify_max(heap: list[Any], /) -> None: ... # undocumented diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/hmac.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/hmac.pyi new file mode 100644 index 0000000..070c59b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/hmac.pyi @@ -0,0 +1,34 @@ +from _hashlib import _HashObject, compare_digest as compare_digest +from _typeshed import ReadableBuffer, SizedBuffer +from collections.abc import Callable +from types import ModuleType +from typing import overload +from typing_extensions import TypeAlias + +_DigestMod: TypeAlias = str | Callable[[], _HashObject] | ModuleType + +trans_5C: bytes +trans_36: bytes + +digest_size: None + +# In reality digestmod has a default value, but the function always throws an error +# if the argument is not given, so we pretend it is a required argument. +@overload +def new(key: bytes | bytearray, msg: ReadableBuffer | None, digestmod: _DigestMod) -> HMAC: ... +@overload +def new(key: bytes | bytearray, *, digestmod: _DigestMod) -> HMAC: ... + +class HMAC: + __slots__ = ("_hmac", "_inner", "_outer", "block_size", "digest_size") + digest_size: int + block_size: int + @property + def name(self) -> str: ... + def __init__(self, key: bytes | bytearray, msg: ReadableBuffer | None = None, digestmod: _DigestMod = "") -> None: ... + def update(self, msg: ReadableBuffer) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> HMAC: ... + +def digest(key: SizedBuffer, msg: ReadableBuffer, digest: _DigestMod) -> bytes: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/html/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/html/__init__.pyi new file mode 100644 index 0000000..afba908 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/html/__init__.pyi @@ -0,0 +1,6 @@ +from typing import AnyStr + +__all__ = ["escape", "unescape"] + +def escape(s: AnyStr, quote: bool = True) -> AnyStr: ... +def unescape(s: AnyStr) -> AnyStr: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/html/entities.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/html/entities.pyi new file mode 100644 index 0000000..e5890d1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/html/entities.pyi @@ -0,0 +1,8 @@ +from typing import Final + +__all__ = ["html5", "name2codepoint", "codepoint2name", "entitydefs"] + +name2codepoint: Final[dict[str, int]] +html5: Final[dict[str, str]] +codepoint2name: Final[dict[int, str]] +entitydefs: Final[dict[str, str]] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/html/parser.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/html/parser.pyi new file mode 100644 index 0000000..08dc7b9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/html/parser.pyi @@ -0,0 +1,40 @@ +from _markupbase import ParserBase +from re import Pattern +from typing import Final + +__all__ = ["HTMLParser"] + +class HTMLParser(ParserBase): + CDATA_CONTENT_ELEMENTS: Final[tuple[str, ...]] + # Added in Python 3.9.23, 3.10.18, 3.11.13, 3.12.11, 3.13.6 + RCDATA_CONTENT_ELEMENTS: Final[tuple[str, ...]] + + # `scripting` parameter added in Python 3.9.25, 3.10.20, 3.11.15, 3.12.13, 3.13.10, 3.14.1 + def __init__(self, *, convert_charrefs: bool = True, scripting: bool = False) -> None: ... + def feed(self, data: str) -> None: ... + def close(self) -> None: ... + def get_starttag_text(self) -> str | None: ... + def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: ... + def handle_endtag(self, tag: str) -> None: ... + def handle_startendtag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: ... + def handle_data(self, data: str) -> None: ... + def handle_entityref(self, name: str) -> None: ... + def handle_charref(self, name: str) -> None: ... + def handle_comment(self, data: str) -> None: ... + def handle_decl(self, decl: str) -> None: ... + def handle_pi(self, data: str) -> None: ... + def check_for_whole_start_tag(self, i: int) -> int: ... # undocumented + def clear_cdata_mode(self) -> None: ... # undocumented + def goahead(self, end: bool) -> None: ... # undocumented + def parse_bogus_comment(self, i: int, report: bool = True) -> int: ... # undocumented + def parse_endtag(self, i: int) -> int: ... # undocumented + def parse_html_declaration(self, i: int) -> int: ... # undocumented + def parse_pi(self, i: int) -> int: ... # undocumented + def parse_starttag(self, i: int) -> int: ... # undocumented + # `escapable` parameter added in Python 3.9.23, 3.10.18, 3.11.13, 3.12.11, 3.13.6 + def set_cdata_mode(self, elem: str, *, escapable: bool = False) -> None: ... # undocumented + rawdata: str # undocumented + cdata_elem: str | None # undocumented + convert_charrefs: bool # undocumented + interesting: Pattern[str] # undocumented + lasttag: str # undocumented diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/http/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/http/__init__.pyi new file mode 100644 index 0000000..f60c390 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/http/__init__.pyi @@ -0,0 +1,118 @@ +import sys +from enum import IntEnum + +if sys.version_info >= (3, 11): + from enum import StrEnum + +if sys.version_info >= (3, 11): + __all__ = ["HTTPStatus", "HTTPMethod"] +else: + __all__ = ["HTTPStatus"] + +class HTTPStatus(IntEnum): + @property + def phrase(self) -> str: ... + @property + def description(self) -> str: ... + + # Keep these synced with the global constants in http/client.pyi. + CONTINUE = 100 + SWITCHING_PROTOCOLS = 101 + PROCESSING = 102 + EARLY_HINTS = 103 + + OK = 200 + CREATED = 201 + ACCEPTED = 202 + NON_AUTHORITATIVE_INFORMATION = 203 + NO_CONTENT = 204 + RESET_CONTENT = 205 + PARTIAL_CONTENT = 206 + MULTI_STATUS = 207 + ALREADY_REPORTED = 208 + IM_USED = 226 + + MULTIPLE_CHOICES = 300 + MOVED_PERMANENTLY = 301 + FOUND = 302 + SEE_OTHER = 303 + NOT_MODIFIED = 304 + USE_PROXY = 305 + TEMPORARY_REDIRECT = 307 + PERMANENT_REDIRECT = 308 + + BAD_REQUEST = 400 + UNAUTHORIZED = 401 + PAYMENT_REQUIRED = 402 + FORBIDDEN = 403 + NOT_FOUND = 404 + METHOD_NOT_ALLOWED = 405 + NOT_ACCEPTABLE = 406 + PROXY_AUTHENTICATION_REQUIRED = 407 + REQUEST_TIMEOUT = 408 + CONFLICT = 409 + GONE = 410 + LENGTH_REQUIRED = 411 + PRECONDITION_FAILED = 412 + if sys.version_info >= (3, 13): + CONTENT_TOO_LARGE = 413 + REQUEST_ENTITY_TOO_LARGE = 413 + if sys.version_info >= (3, 13): + URI_TOO_LONG = 414 + REQUEST_URI_TOO_LONG = 414 + UNSUPPORTED_MEDIA_TYPE = 415 + if sys.version_info >= (3, 13): + RANGE_NOT_SATISFIABLE = 416 + REQUESTED_RANGE_NOT_SATISFIABLE = 416 + EXPECTATION_FAILED = 417 + IM_A_TEAPOT = 418 + MISDIRECTED_REQUEST = 421 + if sys.version_info >= (3, 13): + UNPROCESSABLE_CONTENT = 422 + UNPROCESSABLE_ENTITY = 422 + LOCKED = 423 + FAILED_DEPENDENCY = 424 + TOO_EARLY = 425 + UPGRADE_REQUIRED = 426 + PRECONDITION_REQUIRED = 428 + TOO_MANY_REQUESTS = 429 + REQUEST_HEADER_FIELDS_TOO_LARGE = 431 + UNAVAILABLE_FOR_LEGAL_REASONS = 451 + + INTERNAL_SERVER_ERROR = 500 + NOT_IMPLEMENTED = 501 + BAD_GATEWAY = 502 + SERVICE_UNAVAILABLE = 503 + GATEWAY_TIMEOUT = 504 + HTTP_VERSION_NOT_SUPPORTED = 505 + VARIANT_ALSO_NEGOTIATES = 506 + INSUFFICIENT_STORAGE = 507 + LOOP_DETECTED = 508 + NOT_EXTENDED = 510 + NETWORK_AUTHENTICATION_REQUIRED = 511 + + if sys.version_info >= (3, 12): + @property + def is_informational(self) -> bool: ... + @property + def is_success(self) -> bool: ... + @property + def is_redirection(self) -> bool: ... + @property + def is_client_error(self) -> bool: ... + @property + def is_server_error(self) -> bool: ... + +if sys.version_info >= (3, 11): + class HTTPMethod(StrEnum): + @property + def description(self) -> str: ... + CONNECT = "CONNECT" + DELETE = "DELETE" + GET = "GET" + HEAD = "HEAD" + OPTIONS = "OPTIONS" + PATCH = "PATCH" + POST = "POST" + PUT = "PUT" + TRACE = "TRACE" diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/http/client.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/http/client.pyi new file mode 100644 index 0000000..1568567 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/http/client.pyi @@ -0,0 +1,266 @@ +import email.message +import io +import ssl +import sys +import types +from _typeshed import MaybeNone, ReadableBuffer, SupportsRead, SupportsReadline, WriteableBuffer +from collections.abc import Callable, Iterable, Iterator, Mapping +from email._policybase import _MessageT +from socket import socket +from typing import BinaryIO, Final, TypeVar, overload +from typing_extensions import Self, TypeAlias + +__all__ = [ + "HTTPResponse", + "HTTPConnection", + "HTTPException", + "NotConnected", + "UnknownProtocol", + "UnknownTransferEncoding", + "UnimplementedFileMode", + "IncompleteRead", + "InvalidURL", + "ImproperConnectionState", + "CannotSendRequest", + "CannotSendHeader", + "ResponseNotReady", + "BadStatusLine", + "LineTooLong", + "RemoteDisconnected", + "error", + "responses", + "HTTPSConnection", +] + +_DataType: TypeAlias = SupportsRead[bytes] | Iterable[ReadableBuffer] | ReadableBuffer +_T = TypeVar("_T") +_HeaderValue: TypeAlias = ReadableBuffer | str | int + +HTTP_PORT: Final = 80 +HTTPS_PORT: Final = 443 + +# Keep these global constants in sync with http.HTTPStatus (http/__init__.pyi). +# They are present for backward compatibility reasons. +CONTINUE: Final = 100 +SWITCHING_PROTOCOLS: Final = 101 +PROCESSING: Final = 102 +EARLY_HINTS: Final = 103 + +OK: Final = 200 +CREATED: Final = 201 +ACCEPTED: Final = 202 +NON_AUTHORITATIVE_INFORMATION: Final = 203 +NO_CONTENT: Final = 204 +RESET_CONTENT: Final = 205 +PARTIAL_CONTENT: Final = 206 +MULTI_STATUS: Final = 207 +ALREADY_REPORTED: Final = 208 +IM_USED: Final = 226 + +MULTIPLE_CHOICES: Final = 300 +MOVED_PERMANENTLY: Final = 301 +FOUND: Final = 302 +SEE_OTHER: Final = 303 +NOT_MODIFIED: Final = 304 +USE_PROXY: Final = 305 +TEMPORARY_REDIRECT: Final = 307 +PERMANENT_REDIRECT: Final = 308 + +BAD_REQUEST: Final = 400 +UNAUTHORIZED: Final = 401 +PAYMENT_REQUIRED: Final = 402 +FORBIDDEN: Final = 403 +NOT_FOUND: Final = 404 +METHOD_NOT_ALLOWED: Final = 405 +NOT_ACCEPTABLE: Final = 406 +PROXY_AUTHENTICATION_REQUIRED: Final = 407 +REQUEST_TIMEOUT: Final = 408 +CONFLICT: Final = 409 +GONE: Final = 410 +LENGTH_REQUIRED: Final = 411 +PRECONDITION_FAILED: Final = 412 +if sys.version_info >= (3, 13): + CONTENT_TOO_LARGE: Final = 413 +REQUEST_ENTITY_TOO_LARGE: Final = 413 +if sys.version_info >= (3, 13): + URI_TOO_LONG: Final = 414 +REQUEST_URI_TOO_LONG: Final = 414 +UNSUPPORTED_MEDIA_TYPE: Final = 415 +if sys.version_info >= (3, 13): + RANGE_NOT_SATISFIABLE: Final = 416 +REQUESTED_RANGE_NOT_SATISFIABLE: Final = 416 +EXPECTATION_FAILED: Final = 417 +IM_A_TEAPOT: Final = 418 +MISDIRECTED_REQUEST: Final = 421 +if sys.version_info >= (3, 13): + UNPROCESSABLE_CONTENT: Final = 422 +UNPROCESSABLE_ENTITY: Final = 422 +LOCKED: Final = 423 +FAILED_DEPENDENCY: Final = 424 +TOO_EARLY: Final = 425 +UPGRADE_REQUIRED: Final = 426 +PRECONDITION_REQUIRED: Final = 428 +TOO_MANY_REQUESTS: Final = 429 +REQUEST_HEADER_FIELDS_TOO_LARGE: Final = 431 +UNAVAILABLE_FOR_LEGAL_REASONS: Final = 451 + +INTERNAL_SERVER_ERROR: Final = 500 +NOT_IMPLEMENTED: Final = 501 +BAD_GATEWAY: Final = 502 +SERVICE_UNAVAILABLE: Final = 503 +GATEWAY_TIMEOUT: Final = 504 +HTTP_VERSION_NOT_SUPPORTED: Final = 505 +VARIANT_ALSO_NEGOTIATES: Final = 506 +INSUFFICIENT_STORAGE: Final = 507 +LOOP_DETECTED: Final = 508 +NOT_EXTENDED: Final = 510 +NETWORK_AUTHENTICATION_REQUIRED: Final = 511 + +responses: dict[int, str] + +class HTTPMessage(email.message.Message[str, str]): + def getallmatchingheaders(self, name: str) -> list[str]: ... # undocumented + +@overload +def parse_headers(fp: SupportsReadline[bytes], _class: Callable[[], _MessageT]) -> _MessageT: ... +@overload +def parse_headers(fp: SupportsReadline[bytes]) -> HTTPMessage: ... + +class HTTPResponse(io.BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible method definitions in the base classes + msg: HTTPMessage + headers: HTTPMessage + version: int + debuglevel: int + fp: io.BufferedReader + closed: bool + status: int + reason: str + chunked: bool + chunk_left: int | None + length: int | None + will_close: bool + # url is set on instances of the class in urllib.request.AbstractHTTPHandler.do_open + # to match urllib.response.addinfourl's interface. + # It's not set in HTTPResponse.__init__ or any other method on the class + url: str + def __init__(self, sock: socket, debuglevel: int = 0, method: str | None = None, url: str | None = None) -> None: ... + def peek(self, n: int = -1) -> bytes: ... + def read(self, amt: int | None = None) -> bytes: ... + def read1(self, n: int = -1) -> bytes: ... + def readinto(self, b: WriteableBuffer) -> int: ... + def readline(self, limit: int = -1) -> bytes: ... # type: ignore[override] + @overload + def getheader(self, name: str) -> str | None: ... + @overload + def getheader(self, name: str, default: _T) -> str | _T: ... + def getheaders(self) -> list[tuple[str, str]]: ... + def isclosed(self) -> bool: ... + def __iter__(self) -> Iterator[bytes]: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None + ) -> None: ... + def info(self) -> email.message.Message: ... + def geturl(self) -> str: ... + def getcode(self) -> int: ... + def begin(self) -> None: ... + +class HTTPConnection: + blocksize: int + auto_open: int # undocumented + debuglevel: int + default_port: int # undocumented + response_class: type[HTTPResponse] # undocumented + timeout: float | None + host: str + port: int + sock: socket | MaybeNone # can be `None` if `.connect()` was not called + def __init__( + self, + host: str, + port: int | None = None, + timeout: float | None = ..., + source_address: tuple[str, int] | None = None, + blocksize: int = 8192, + ) -> None: ... + def request( + self, + method: str, + url: str, + body: _DataType | str | None = None, + headers: Mapping[str, _HeaderValue] = {}, + *, + encode_chunked: bool = False, + ) -> None: ... + def getresponse(self) -> HTTPResponse: ... + def set_debuglevel(self, level: int) -> None: ... + if sys.version_info >= (3, 12): + def get_proxy_response_headers(self) -> HTTPMessage | None: ... + + def set_tunnel(self, host: str, port: int | None = None, headers: Mapping[str, str] | None = None) -> None: ... + def connect(self) -> None: ... + def close(self) -> None: ... + def putrequest(self, method: str, url: str, skip_host: bool = False, skip_accept_encoding: bool = False) -> None: ... + def putheader(self, header: str | bytes, *values: _HeaderValue) -> None: ... + def endheaders(self, message_body: _DataType | None = None, *, encode_chunked: bool = False) -> None: ... + def send(self, data: _DataType | str) -> None: ... + +class HTTPSConnection(HTTPConnection): + # Can be `None` if `.connect()` was not called: + sock: ssl.SSLSocket | MaybeNone + if sys.version_info >= (3, 12): + def __init__( + self, + host: str, + port: int | None = None, + *, + timeout: float | None = ..., + source_address: tuple[str, int] | None = None, + context: ssl.SSLContext | None = None, + blocksize: int = 8192, + ) -> None: ... + else: + def __init__( + self, + host: str, + port: int | None = None, + key_file: str | None = None, + cert_file: str | None = None, + timeout: float | None = ..., + source_address: tuple[str, int] | None = None, + *, + context: ssl.SSLContext | None = None, + check_hostname: bool | None = None, + blocksize: int = 8192, + ) -> None: ... + +class HTTPException(Exception): ... + +error = HTTPException + +class NotConnected(HTTPException): ... +class InvalidURL(HTTPException): ... + +class UnknownProtocol(HTTPException): + def __init__(self, version: str) -> None: ... + +class UnknownTransferEncoding(HTTPException): ... +class UnimplementedFileMode(HTTPException): ... + +class IncompleteRead(HTTPException): + def __init__(self, partial: bytes, expected: int | None = None) -> None: ... + partial: bytes + expected: int | None + +class ImproperConnectionState(HTTPException): ... +class CannotSendRequest(ImproperConnectionState): ... +class CannotSendHeader(ImproperConnectionState): ... +class ResponseNotReady(ImproperConnectionState): ... + +class BadStatusLine(HTTPException): + def __init__(self, line: str) -> None: ... + +class LineTooLong(HTTPException): + def __init__(self, line_type: str) -> None: ... + +class RemoteDisconnected(ConnectionResetError, BadStatusLine): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/http/cookiejar.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/http/cookiejar.pyi new file mode 100644 index 0000000..31e1d3f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/http/cookiejar.pyi @@ -0,0 +1,159 @@ +import sys +from _typeshed import StrPath +from collections.abc import Iterator, Sequence +from http.client import HTTPResponse +from re import Pattern +from typing import ClassVar, TypeVar, overload +from urllib.request import Request + +__all__ = [ + "Cookie", + "CookieJar", + "CookiePolicy", + "DefaultCookiePolicy", + "FileCookieJar", + "LWPCookieJar", + "LoadError", + "MozillaCookieJar", +] + +_T = TypeVar("_T") + +class LoadError(OSError): ... + +class CookieJar: + non_word_re: ClassVar[Pattern[str]] # undocumented + quote_re: ClassVar[Pattern[str]] # undocumented + strict_domain_re: ClassVar[Pattern[str]] # undocumented + domain_re: ClassVar[Pattern[str]] # undocumented + dots_re: ClassVar[Pattern[str]] # undocumented + magic_re: ClassVar[Pattern[str]] # undocumented + def __init__(self, policy: CookiePolicy | None = None) -> None: ... + def add_cookie_header(self, request: Request) -> None: ... + def extract_cookies(self, response: HTTPResponse, request: Request) -> None: ... + def set_policy(self, policy: CookiePolicy) -> None: ... + def make_cookies(self, response: HTTPResponse, request: Request) -> Sequence[Cookie]: ... + def set_cookie(self, cookie: Cookie) -> None: ... + def set_cookie_if_ok(self, cookie: Cookie, request: Request) -> None: ... + def clear(self, domain: str | None = None, path: str | None = None, name: str | None = None) -> None: ... + def clear_session_cookies(self) -> None: ... + def clear_expired_cookies(self) -> None: ... # undocumented + def __iter__(self) -> Iterator[Cookie]: ... + def __len__(self) -> int: ... + +class FileCookieJar(CookieJar): + filename: str | None + delayload: bool + def __init__(self, filename: StrPath | None = None, delayload: bool = False, policy: CookiePolicy | None = None) -> None: ... + def save(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: ... + def load(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: ... + def revert(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: ... + +class MozillaCookieJar(FileCookieJar): + if sys.version_info < (3, 10): + header: ClassVar[str] # undocumented + +class LWPCookieJar(FileCookieJar): + def as_lwp_str(self, ignore_discard: bool = True, ignore_expires: bool = True) -> str: ... # undocumented + +class CookiePolicy: + netscape: bool + rfc2965: bool + hide_cookie2: bool + def set_ok(self, cookie: Cookie, request: Request) -> bool: ... + def return_ok(self, cookie: Cookie, request: Request) -> bool: ... + def domain_return_ok(self, domain: str, request: Request) -> bool: ... + def path_return_ok(self, path: str, request: Request) -> bool: ... + +class DefaultCookiePolicy(CookiePolicy): + rfc2109_as_netscape: bool + strict_domain: bool + strict_rfc2965_unverifiable: bool + strict_ns_unverifiable: bool + strict_ns_domain: int + strict_ns_set_initial_dollar: bool + strict_ns_set_path: bool + DomainStrictNoDots: ClassVar[int] + DomainStrictNonDomain: ClassVar[int] + DomainRFC2965Match: ClassVar[int] + DomainLiberal: ClassVar[int] + DomainStrict: ClassVar[int] + def __init__( + self, + blocked_domains: Sequence[str] | None = None, + allowed_domains: Sequence[str] | None = None, + netscape: bool = True, + rfc2965: bool = False, + rfc2109_as_netscape: bool | None = None, + hide_cookie2: bool = False, + strict_domain: bool = False, + strict_rfc2965_unverifiable: bool = True, + strict_ns_unverifiable: bool = False, + strict_ns_domain: int = 0, + strict_ns_set_initial_dollar: bool = False, + strict_ns_set_path: bool = False, + secure_protocols: Sequence[str] = ("https", "wss"), + ) -> None: ... + def blocked_domains(self) -> tuple[str, ...]: ... + def set_blocked_domains(self, blocked_domains: Sequence[str]) -> None: ... + def is_blocked(self, domain: str) -> bool: ... + def allowed_domains(self) -> tuple[str, ...] | None: ... + def set_allowed_domains(self, allowed_domains: Sequence[str] | None) -> None: ... + def is_not_allowed(self, domain: str) -> bool: ... + def set_ok_version(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def set_ok_verifiability(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def set_ok_name(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def set_ok_path(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def set_ok_domain(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def set_ok_port(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def return_ok_version(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def return_ok_verifiability(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def return_ok_secure(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def return_ok_expires(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def return_ok_port(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def return_ok_domain(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + +class Cookie: + version: int | None + name: str + value: str | None + port: str | None + path: str + path_specified: bool + secure: bool + expires: int | None + discard: bool + comment: str | None + comment_url: str | None + rfc2109: bool + port_specified: bool + domain: str # undocumented + domain_specified: bool + domain_initial_dot: bool + def __init__( + self, + version: int | None, + name: str, + value: str | None, # undocumented + port: str | None, + port_specified: bool, + domain: str, + domain_specified: bool, + domain_initial_dot: bool, + path: str, + path_specified: bool, + secure: bool, + expires: int | None, + discard: bool, + comment: str | None, + comment_url: str | None, + rest: dict[str, str], + rfc2109: bool = False, + ) -> None: ... + def has_nonstandard_attr(self, name: str) -> bool: ... + @overload + def get_nonstandard_attr(self, name: str) -> str | None: ... + @overload + def get_nonstandard_attr(self, name: str, default: _T) -> str | _T: ... + def set_nonstandard_attr(self, name: str, value: str) -> None: ... + def is_expired(self, now: int | None = None) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/http/cookies.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/http/cookies.pyi new file mode 100644 index 0000000..4df12e3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/http/cookies.pyi @@ -0,0 +1,56 @@ +from collections.abc import Iterable, Mapping +from types import GenericAlias +from typing import Any, Generic, TypeVar, overload +from typing_extensions import TypeAlias + +__all__ = ["CookieError", "BaseCookie", "SimpleCookie"] + +_DataType: TypeAlias = str | Mapping[str, str | Morsel[Any]] +_T = TypeVar("_T") + +@overload +def _quote(str: None) -> None: ... +@overload +def _quote(str: str) -> str: ... +@overload +def _unquote(str: None) -> None: ... +@overload +def _unquote(str: str) -> str: ... + +class CookieError(Exception): ... + +class Morsel(dict[str, Any], Generic[_T]): + @property + def value(self) -> str: ... + @property + def coded_value(self) -> _T: ... + @property + def key(self) -> str: ... + def __init__(self) -> None: ... + def set(self, key: str, val: str, coded_val: _T) -> None: ... + def setdefault(self, key: str, val: str | None = None) -> str: ... + # The dict update can also get a keywords argument so this is incompatible + @overload # type: ignore[override] + def update(self, values: Mapping[str, str]) -> None: ... + @overload + def update(self, values: Iterable[tuple[str, str]]) -> None: ... + def isReservedKey(self, K: str) -> bool: ... + def output(self, attrs: list[str] | None = None, header: str = "Set-Cookie:") -> str: ... + __str__ = output + def js_output(self, attrs: list[str] | None = None) -> str: ... + def OutputString(self, attrs: list[str] | None = None) -> str: ... + def __eq__(self, morsel: object) -> bool: ... + def __setitem__(self, K: str, V: Any) -> None: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +class BaseCookie(dict[str, Morsel[_T]], Generic[_T]): + def __init__(self, input: _DataType | None = None) -> None: ... + def value_decode(self, val: str) -> tuple[_T, str]: ... + def value_encode(self, val: _T) -> tuple[_T, str]: ... + def output(self, attrs: list[str] | None = None, header: str = "Set-Cookie:", sep: str = "\r\n") -> str: ... + __str__ = output + def js_output(self, attrs: list[str] | None = None) -> str: ... + def load(self, rawdata: _DataType) -> None: ... + def __setitem__(self, key: str, value: str | Morsel[_T]) -> None: ... + +class SimpleCookie(BaseCookie[str]): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/http/server.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/http/server.pyi new file mode 100644 index 0000000..2c1a374 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/http/server.pyi @@ -0,0 +1,142 @@ +import _socket +import email.message +import io +import socketserver +import sys +from _ssl import _PasswordType +from _typeshed import ReadableBuffer, StrOrBytesPath, StrPath, SupportsRead, SupportsWrite +from collections.abc import Callable, Iterable, Mapping, Sequence +from ssl import Purpose, SSLContext +from typing import Any, AnyStr, BinaryIO, ClassVar, Protocol, type_check_only +from typing_extensions import Self, deprecated + +if sys.version_info >= (3, 14): + __all__ = [ + "HTTPServer", + "ThreadingHTTPServer", + "HTTPSServer", + "ThreadingHTTPSServer", + "BaseHTTPRequestHandler", + "SimpleHTTPRequestHandler", + "CGIHTTPRequestHandler", + ] +else: + __all__ = ["HTTPServer", "ThreadingHTTPServer", "BaseHTTPRequestHandler", "SimpleHTTPRequestHandler", "CGIHTTPRequestHandler"] + +class HTTPServer(socketserver.TCPServer): + server_name: str + server_port: int + +class ThreadingHTTPServer(socketserver.ThreadingMixIn, HTTPServer): ... + +if sys.version_info >= (3, 14): + @type_check_only + class _SSLModule(Protocol): + @staticmethod + def create_default_context( + purpose: Purpose = ..., + *, + cafile: StrOrBytesPath | None = None, + capath: StrOrBytesPath | None = None, + cadata: str | ReadableBuffer | None = None, + ) -> SSLContext: ... + + class HTTPSServer(HTTPServer): + ssl: _SSLModule + certfile: StrOrBytesPath + keyfile: StrOrBytesPath | None + password: _PasswordType | None + alpn_protocols: Iterable[str] + def __init__( + self, + server_address: socketserver._AfInetAddress, + RequestHandlerClass: Callable[[Any, _socket._RetAddress, Self], socketserver.BaseRequestHandler], + bind_and_activate: bool = True, + *, + certfile: StrOrBytesPath, + keyfile: StrOrBytesPath | None = None, + password: _PasswordType | None = None, + alpn_protocols: Iterable[str] | None = None, + ) -> None: ... + def server_activate(self) -> None: ... + + class ThreadingHTTPSServer(socketserver.ThreadingMixIn, HTTPSServer): ... + +class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): + client_address: tuple[str, int] + close_connection: bool + requestline: str + command: str + path: str + request_version: str + headers: email.message.Message + server_version: str + sys_version: str + error_message_format: str + error_content_type: str + protocol_version: str + MessageClass: type + responses: Mapping[int, tuple[str, str]] + default_request_version: str # undocumented + weekdayname: ClassVar[Sequence[str]] # undocumented + monthname: ClassVar[Sequence[str | None]] # undocumented + def handle_one_request(self) -> None: ... + def handle_expect_100(self) -> bool: ... + def send_error(self, code: int, message: str | None = None, explain: str | None = None) -> None: ... + def send_response(self, code: int, message: str | None = None) -> None: ... + def send_header(self, keyword: str, value: str) -> None: ... + def send_response_only(self, code: int, message: str | None = None) -> None: ... + def end_headers(self) -> None: ... + def flush_headers(self) -> None: ... + def log_request(self, code: int | str = "-", size: int | str = "-") -> None: ... + def log_error(self, format: str, *args: Any) -> None: ... + def log_message(self, format: str, *args: Any) -> None: ... + def version_string(self) -> str: ... + def date_time_string(self, timestamp: float | None = None) -> str: ... + def log_date_time_string(self) -> str: ... + def address_string(self) -> str: ... + def parse_request(self) -> bool: ... # undocumented + +class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): + extensions_map: dict[str, str] + if sys.version_info >= (3, 12): + index_pages: ClassVar[tuple[str, ...]] + directory: str + def __init__( + self, + request: socketserver._RequestType, + client_address: _socket._RetAddress, + server: socketserver.BaseServer, + *, + directory: StrPath | None = None, + ) -> None: ... + def do_GET(self) -> None: ... + def do_HEAD(self) -> None: ... + def send_head(self) -> io.BytesIO | BinaryIO | None: ... # undocumented + def list_directory(self, path: StrPath) -> io.BytesIO | None: ... # undocumented + def translate_path(self, path: str) -> str: ... # undocumented + def copyfile(self, source: SupportsRead[AnyStr], outputfile: SupportsWrite[AnyStr]) -> None: ... # undocumented + def guess_type(self, path: StrPath) -> str: ... # undocumented + +def executable(path: StrPath) -> bool: ... # undocumented + +if sys.version_info >= (3, 13): + @deprecated("Deprecated since Python 3.13; will be removed in Python 3.15.") + class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): + cgi_directories: list[str] + have_fork: bool # undocumented + def do_POST(self) -> None: ... + def is_cgi(self) -> bool: ... # undocumented + def is_executable(self, path: StrPath) -> bool: ... # undocumented + def is_python(self, path: StrPath) -> bool: ... # undocumented + def run_cgi(self) -> None: ... # undocumented + +else: + class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): + cgi_directories: list[str] + have_fork: bool # undocumented + def do_POST(self) -> None: ... + def is_cgi(self) -> bool: ... # undocumented + def is_executable(self, path: StrPath) -> bool: ... # undocumented + def is_python(self, path: StrPath) -> bool: ... # undocumented + def run_cgi(self) -> None: ... # undocumented diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/imaplib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/imaplib.pyi new file mode 100644 index 0000000..39fd466 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/imaplib.pyi @@ -0,0 +1,175 @@ +import subprocess +import sys +import time +from _typeshed import ReadableBuffer, SizedBuffer, Unused +from builtins import list as _list # conflicts with a method named "list" +from collections.abc import Callable, Generator +from datetime import datetime +from re import Pattern +from socket import socket as _socket +from ssl import SSLContext, SSLSocket +from types import TracebackType +from typing import IO, Any, Literal, SupportsAbs, SupportsInt +from typing_extensions import Self, TypeAlias, deprecated + +__all__ = ["IMAP4", "IMAP4_stream", "Internaldate2tuple", "Int2AP", "ParseFlags", "Time2Internaldate", "IMAP4_SSL"] + +# TODO: Commands should use their actual return types, not this type alias. +# E.g. Tuple[Literal["OK"], List[bytes]] +_CommandResults: TypeAlias = tuple[str, list[Any]] + +_AnyResponseData: TypeAlias = list[None] | list[bytes | tuple[bytes, bytes]] + +Commands: dict[str, tuple[str, ...]] + +class IMAP4: + class error(Exception): ... + class abort(error): ... + class readonly(abort): ... + utf8_enabled: bool + mustquote: Pattern[str] + debug: int + state: str + literal: str | None + tagged_commands: dict[bytes, _list[bytes] | None] + untagged_responses: dict[str, _list[bytes | tuple[bytes, bytes]]] + continuation_response: str + is_readonly: bool + tagnum: int + tagpre: str + tagre: Pattern[str] + welcome: bytes + capabilities: tuple[str, ...] + PROTOCOL_VERSION: str + def __init__(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: ... + def open(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: ... + if sys.version_info >= (3, 14): + @property + @deprecated("IMAP4.file is unsupported, can cause errors, and may be removed.") + def file(self) -> IO[str] | IO[bytes]: ... + else: + file: IO[str] | IO[bytes] + + def __getattr__(self, attr: str) -> Any: ... + host: str + port: int + sock: _socket + def read(self, size: int) -> bytes: ... + def readline(self) -> bytes: ... + def send(self, data: ReadableBuffer) -> None: ... + def shutdown(self) -> None: ... + def socket(self) -> _socket: ... + def recent(self) -> _CommandResults: ... + def response(self, code: str) -> _CommandResults: ... + def append(self, mailbox: str, flags: str, date_time: str, message: ReadableBuffer) -> str: ... + def authenticate(self, mechanism: str, authobject: Callable[[bytes], bytes | None]) -> tuple[str, str]: ... + def capability(self) -> _CommandResults: ... + def check(self) -> _CommandResults: ... + def close(self) -> _CommandResults: ... + def copy(self, message_set: str, new_mailbox: str) -> _CommandResults: ... + def create(self, mailbox: str) -> _CommandResults: ... + def delete(self, mailbox: str) -> _CommandResults: ... + def deleteacl(self, mailbox: str, who: str) -> _CommandResults: ... + def enable(self, capability: str) -> _CommandResults: ... + def __enter__(self) -> Self: ... + def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + def expunge(self) -> _CommandResults: ... + def fetch(self, message_set: str, message_parts: str) -> tuple[str, _AnyResponseData]: ... + def getacl(self, mailbox: str) -> _CommandResults: ... + def getannotation(self, mailbox: str, entry: str, attribute: str) -> _CommandResults: ... + def getquota(self, root: str) -> _CommandResults: ... + def getquotaroot(self, mailbox: str) -> _CommandResults: ... + if sys.version_info >= (3, 14): + def idle(self, duration: float | None = None) -> Idler: ... + + def list(self, directory: str = '""', pattern: str = "*") -> tuple[str, _AnyResponseData]: ... + def login(self, user: str, password: str) -> tuple[Literal["OK"], _list[bytes]]: ... + def login_cram_md5(self, user: str, password: str) -> _CommandResults: ... + def logout(self) -> tuple[str, _AnyResponseData]: ... + def lsub(self, directory: str = '""', pattern: str = "*") -> _CommandResults: ... + def myrights(self, mailbox: str) -> _CommandResults: ... + def namespace(self) -> _CommandResults: ... + def noop(self) -> tuple[str, _list[bytes]]: ... + def partial(self, message_num: str, message_part: str, start: str, length: str) -> _CommandResults: ... + def proxyauth(self, user: str) -> _CommandResults: ... + def rename(self, oldmailbox: str, newmailbox: str) -> _CommandResults: ... + def search(self, charset: str | None, *criteria: str) -> _CommandResults: ... + def select(self, mailbox: str = "INBOX", readonly: bool = False) -> tuple[str, _list[bytes | None]]: ... + def setacl(self, mailbox: str, who: str, what: str) -> _CommandResults: ... + def setannotation(self, *args: str) -> _CommandResults: ... + def setquota(self, root: str, limits: str) -> _CommandResults: ... + def sort(self, sort_criteria: str, charset: str, *search_criteria: str) -> _CommandResults: ... + def starttls(self, ssl_context: Any | None = None) -> tuple[Literal["OK"], _list[None]]: ... + def status(self, mailbox: str, names: str) -> _CommandResults: ... + def store(self, message_set: str, command: str, flags: str) -> _CommandResults: ... + def subscribe(self, mailbox: str) -> _CommandResults: ... + def thread(self, threading_algorithm: str, charset: str, *search_criteria: str) -> _CommandResults: ... + def uid(self, command: str, *args: str) -> _CommandResults: ... + def unsubscribe(self, mailbox: str) -> _CommandResults: ... + def unselect(self) -> _CommandResults: ... + def xatom(self, name: str, *args: str) -> _CommandResults: ... + def print_log(self) -> None: ... + +if sys.version_info >= (3, 14): + class Idler: + def __init__(self, imap: IMAP4, duration: float | None = None) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, exc_type: object, exc_val: Unused, exc_tb: Unused) -> Literal[False]: ... + def __iter__(self) -> Self: ... + def __next__(self) -> tuple[str, float | None]: ... + def burst(self, interval: float = 0.1) -> Generator[tuple[str, float | None]]: ... + +class IMAP4_SSL(IMAP4): + if sys.version_info < (3, 12): + keyfile: str + certfile: str + if sys.version_info >= (3, 12): + def __init__( + self, host: str = "", port: int = 993, *, ssl_context: SSLContext | None = None, timeout: float | None = None + ) -> None: ... + else: + def __init__( + self, + host: str = "", + port: int = 993, + keyfile: str | None = None, + certfile: str | None = None, + ssl_context: SSLContext | None = None, + timeout: float | None = None, + ) -> None: ... + sslobj: SSLSocket + if sys.version_info >= (3, 14): + @property + @deprecated("IMAP4_SSL.file is unsupported, can cause errors, and may be removed.") + def file(self) -> IO[Any]: ... + else: + file: IO[Any] + + def open(self, host: str = "", port: int | None = 993, timeout: float | None = None) -> None: ... + def ssl(self) -> SSLSocket: ... + +class IMAP4_stream(IMAP4): + command: str + def __init__(self, command: str) -> None: ... + if sys.version_info >= (3, 14): + @property + @deprecated("IMAP4_stream.file is unsupported, can cause errors, and may be removed.") + def file(self) -> IO[Any]: ... + else: + file: IO[Any] + process: subprocess.Popen[bytes] + writefile: IO[Any] + readfile: IO[Any] + def open(self, host: str | None = None, port: int | None = None, timeout: float | None = None) -> None: ... + +class _Authenticator: + mech: Callable[[bytes], bytes | bytearray | memoryview | str | None] + def __init__(self, mechinst: Callable[[bytes], bytes | bytearray | memoryview | str | None]) -> None: ... + def process(self, data: str) -> str: ... + def encode(self, inp: bytes | bytearray | memoryview) -> str: ... + def decode(self, inp: str | SizedBuffer) -> bytes: ... + +def Internaldate2tuple(resp: ReadableBuffer) -> time.struct_time | None: ... +def Int2AP(num: SupportsAbs[SupportsInt]) -> bytes: ... +def ParseFlags(resp: ReadableBuffer) -> tuple[bytes, ...]: ... +def Time2Internaldate(date_time: float | time.struct_time | time._TimeTuple | datetime | str) -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/imghdr.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/imghdr.pyi new file mode 100644 index 0000000..e45ca3e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/imghdr.pyi @@ -0,0 +1,18 @@ +from _typeshed import StrPath +from collections.abc import Callable +from typing import Any, BinaryIO, Protocol, overload, type_check_only + +__all__ = ["what"] + +@type_check_only +class _ReadableBinary(Protocol): + def tell(self) -> int: ... + def read(self, size: int, /) -> bytes: ... + def seek(self, offset: int, /) -> Any: ... + +@overload +def what(file: StrPath | _ReadableBinary, h: None = None) -> str | None: ... +@overload +def what(file: Any, h: bytes) -> str | None: ... + +tests: list[Callable[[bytes, BinaryIO | None], str | None]] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/imp.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/imp.pyi new file mode 100644 index 0000000..b5b4223 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/imp.pyi @@ -0,0 +1,63 @@ +import types +from _imp import ( + acquire_lock as acquire_lock, + create_dynamic as create_dynamic, + get_frozen_object as get_frozen_object, + init_frozen as init_frozen, + is_builtin as is_builtin, + is_frozen as is_frozen, + is_frozen_package as is_frozen_package, + lock_held as lock_held, + release_lock as release_lock, +) +from _typeshed import StrPath +from os import PathLike +from types import TracebackType +from typing import IO, Any, Final, Protocol, type_check_only + +SEARCH_ERROR: Final = 0 +PY_SOURCE: Final = 1 +PY_COMPILED: Final = 2 +C_EXTENSION: Final = 3 +PY_RESOURCE: Final = 4 +PKG_DIRECTORY: Final = 5 +C_BUILTIN: Final = 6 +PY_FROZEN: Final = 7 +PY_CODERESOURCE: Final = 8 +IMP_HOOK: Final = 9 + +def new_module(name: str) -> types.ModuleType: ... +def get_magic() -> bytes: ... +def get_tag() -> str: ... +def cache_from_source(path: StrPath, debug_override: bool | None = None) -> str: ... +def source_from_cache(path: StrPath) -> str: ... +def get_suffixes() -> list[tuple[str, str, int]]: ... + +class NullImporter: + def __init__(self, path: StrPath) -> None: ... + def find_module(self, fullname: Any) -> None: ... + +# Technically, a text file has to support a slightly different set of operations than a binary file, +# but we ignore that here. +@type_check_only +class _FileLike(Protocol): + closed: bool + mode: str + def read(self) -> str | bytes: ... + def close(self) -> Any: ... + def __enter__(self) -> Any: ... + def __exit__(self, typ: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None, /) -> Any: ... + +# PathLike doesn't work for the pathname argument here +def load_source(name: str, pathname: str, file: _FileLike | None = None) -> types.ModuleType: ... +def load_compiled(name: str, pathname: str, file: _FileLike | None = None) -> types.ModuleType: ... +def load_package(name: str, path: StrPath) -> types.ModuleType: ... +def load_module(name: str, file: _FileLike | None, filename: str, details: tuple[str, str, int]) -> types.ModuleType: ... + +# IO[Any] is a TextIOWrapper if name is a .py file, and a FileIO otherwise. +def find_module( + name: str, path: None | list[str] | list[PathLike[str]] | list[StrPath] = None +) -> tuple[IO[Any], str, tuple[str, str, int]]: ... +def reload(module: types.ModuleType) -> types.ModuleType: ... +def init_builtin(name: str) -> types.ModuleType | None: ... +def load_dynamic(name: str, path: str, file: Any = None) -> types.ModuleType: ... # file argument is ignored diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/__init__.pyi new file mode 100644 index 0000000..d60f90a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/__init__.pyi @@ -0,0 +1,17 @@ +import sys +from importlib._bootstrap import __import__ as __import__ +from importlib.abc import Loader +from types import ModuleType +from typing_extensions import deprecated + +__all__ = ["__import__", "import_module", "invalidate_caches", "reload"] + +# `importlib.import_module` return type should be kept the same as `builtins.__import__` +def import_module(name: str, package: str | None = None) -> ModuleType: ... + +if sys.version_info < (3, 12): + @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `importlib.util.find_spec()` instead.") + def find_loader(name: str, path: str | None = None) -> Loader | None: ... + +def invalidate_caches() -> None: ... +def reload(module: ModuleType) -> ModuleType: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/_abc.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/_abc.pyi new file mode 100644 index 0000000..90ab340 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/_abc.pyi @@ -0,0 +1,20 @@ +import sys +import types +from abc import ABCMeta +from importlib.machinery import ModuleSpec +from typing_extensions import deprecated + +if sys.version_info >= (3, 10): + class Loader(metaclass=ABCMeta): + def load_module(self, fullname: str) -> types.ModuleType: ... + if sys.version_info < (3, 12): + @deprecated( + "Deprecated since Python 3.4; removed in Python 3.12. " + "The module spec is now used by the import machinery to generate a module repr." + ) + def module_repr(self, module: types.ModuleType) -> str: ... + + def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: ... + # Not defined on the actual class for backwards-compatibility reasons, + # but expected in new code. + def exec_module(self, module: types.ModuleType) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/_bootstrap.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/_bootstrap.pyi new file mode 100644 index 0000000..02427ff --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/_bootstrap.pyi @@ -0,0 +1,2 @@ +from _frozen_importlib import * +from _frozen_importlib import __import__ as __import__, _init_module_attrs as _init_module_attrs diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/_bootstrap_external.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/_bootstrap_external.pyi new file mode 100644 index 0000000..6210ce7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/_bootstrap_external.pyi @@ -0,0 +1,2 @@ +from _frozen_importlib_external import * +from _frozen_importlib_external import _NamespaceLoader as _NamespaceLoader diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/abc.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/abc.pyi new file mode 100644 index 0000000..ef7761f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/abc.pyi @@ -0,0 +1,187 @@ +import _ast +import sys +import types +from _typeshed import ReadableBuffer, StrPath +from abc import ABCMeta, abstractmethod +from collections.abc import Iterator, Mapping, Sequence +from importlib import _bootstrap_external +from importlib.machinery import ModuleSpec +from io import BufferedReader +from typing import IO, Any, Literal, Protocol, overload, runtime_checkable +from typing_extensions import deprecated + +if sys.version_info >= (3, 11): + __all__ = [ + "Loader", + "MetaPathFinder", + "PathEntryFinder", + "ResourceLoader", + "InspectLoader", + "ExecutionLoader", + "FileLoader", + "SourceLoader", + ] + + if sys.version_info < (3, 12): + __all__ += ["Finder", "ResourceReader", "Traversable", "TraversableResources"] + +if sys.version_info >= (3, 10): + from importlib._abc import Loader as Loader +else: + class Loader(metaclass=ABCMeta): + def load_module(self, fullname: str) -> types.ModuleType: ... + def module_repr(self, module: types.ModuleType) -> str: ... + def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: ... + # Not defined on the actual class for backwards-compatibility reasons, + # but expected in new code. + def exec_module(self, module: types.ModuleType) -> None: ... + +if sys.version_info < (3, 12): + @deprecated("Deprecated since Python 3.3; removed in Python 3.12. Use `MetaPathFinder` or `PathEntryFinder` instead.") + class Finder(metaclass=ABCMeta): ... + +@deprecated("Deprecated since Python 3.7. Use `importlib.resources.abc.TraversableResources` instead.") +class ResourceLoader(Loader): + @abstractmethod + def get_data(self, path: str) -> bytes: ... + +class InspectLoader(Loader): + def is_package(self, fullname: str) -> bool: ... + def get_code(self, fullname: str) -> types.CodeType | None: ... + @abstractmethod + def get_source(self, fullname: str) -> str | None: ... + def exec_module(self, module: types.ModuleType) -> None: ... + @staticmethod + def source_to_code( + data: ReadableBuffer | str | _ast.Module | _ast.Expression | _ast.Interactive, path: bytes | StrPath = "" + ) -> types.CodeType: ... + +class ExecutionLoader(InspectLoader): + @abstractmethod + def get_filename(self, fullname: str) -> str: ... + +class SourceLoader(_bootstrap_external.SourceLoader, ResourceLoader, ExecutionLoader, metaclass=ABCMeta): # type: ignore[misc] # incompatible definitions of source_to_code in the base classes + @deprecated("Deprecated since Python 3.3. Use `importlib.resources.abc.SourceLoader.path_stats` instead.") + def path_mtime(self, path: str) -> float: ... + def set_data(self, path: str, data: bytes) -> None: ... + def get_source(self, fullname: str) -> str | None: ... + def path_stats(self, path: str) -> Mapping[str, Any]: ... + +# The base classes differ starting in 3.10: +if sys.version_info >= (3, 10): + # Please keep in sync with _typeshed.importlib.MetaPathFinderProtocol + class MetaPathFinder(metaclass=ABCMeta): + if sys.version_info < (3, 12): + @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `MetaPathFinder.find_spec()` instead.") + def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: ... + + def invalidate_caches(self) -> None: ... + # Not defined on the actual class, but expected to exist. + def find_spec( + self, fullname: str, path: Sequence[str] | None, target: types.ModuleType | None = ..., / + ) -> ModuleSpec | None: ... + + class PathEntryFinder(metaclass=ABCMeta): + if sys.version_info < (3, 12): + @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `PathEntryFinder.find_spec()` instead.") + def find_module(self, fullname: str) -> Loader | None: ... + @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `find_spec()` instead.") + def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: ... + + def invalidate_caches(self) -> None: ... + # Not defined on the actual class, but expected to exist. + def find_spec(self, fullname: str, target: types.ModuleType | None = ...) -> ModuleSpec | None: ... + +else: + # Please keep in sync with _typeshed.importlib.MetaPathFinderProtocol + class MetaPathFinder(Finder): + def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: ... + def invalidate_caches(self) -> None: ... + # Not defined on the actual class, but expected to exist. + def find_spec( + self, fullname: str, path: Sequence[str] | None, target: types.ModuleType | None = ..., / + ) -> ModuleSpec | None: ... + + class PathEntryFinder(Finder): + def find_module(self, fullname: str) -> Loader | None: ... + def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: ... + def invalidate_caches(self) -> None: ... + # Not defined on the actual class, but expected to exist. + def find_spec(self, fullname: str, target: types.ModuleType | None = ...) -> ModuleSpec | None: ... + +class FileLoader(_bootstrap_external.FileLoader, ResourceLoader, ExecutionLoader, metaclass=ABCMeta): + name: str + path: str + def __init__(self, fullname: str, path: str) -> None: ... + def get_data(self, path: str) -> bytes: ... + def get_filename(self, fullname: str | None = None) -> str: ... + def load_module(self, fullname: str | None = None) -> types.ModuleType: ... + +if sys.version_info < (3, 11): + class ResourceReader(metaclass=ABCMeta): + @abstractmethod + def open_resource(self, resource: str) -> IO[bytes]: ... + @abstractmethod + def resource_path(self, resource: str) -> str: ... + if sys.version_info >= (3, 10): + @abstractmethod + def is_resource(self, path: str) -> bool: ... + else: + @abstractmethod + def is_resource(self, name: str) -> bool: ... + + @abstractmethod + def contents(self) -> Iterator[str]: ... + + @runtime_checkable + class Traversable(Protocol): + @abstractmethod + def is_dir(self) -> bool: ... + @abstractmethod + def is_file(self) -> bool: ... + @abstractmethod + def iterdir(self) -> Iterator[Traversable]: ... + if sys.version_info >= (3, 11): + @abstractmethod + def joinpath(self, *descendants: str) -> Traversable: ... + else: + @abstractmethod + def joinpath(self, child: str, /) -> Traversable: ... + + # The documentation and runtime protocol allows *args, **kwargs arguments, + # but this would mean that all implementers would have to support them, + # which is not the case. + @overload + @abstractmethod + def open(self, mode: Literal["r"] = "r", *, encoding: str | None = None, errors: str | None = None) -> IO[str]: ... + @overload + @abstractmethod + def open(self, mode: Literal["rb"]) -> IO[bytes]: ... + @property + @abstractmethod + def name(self) -> str: ... + if sys.version_info >= (3, 10): + def __truediv__(self, child: str, /) -> Traversable: ... + else: + @abstractmethod + def __truediv__(self, child: str, /) -> Traversable: ... + + @abstractmethod + def read_bytes(self) -> bytes: ... + @abstractmethod + def read_text(self, encoding: str | None = None) -> str: ... + + class TraversableResources(ResourceReader): + @abstractmethod + def files(self) -> Traversable: ... + def open_resource(self, resource: str) -> BufferedReader: ... + def resource_path(self, resource: Any) -> str: ... + def is_resource(self, path: str) -> bool: ... + def contents(self) -> Iterator[str]: ... + +elif sys.version_info < (3, 14): + from importlib.resources.abc import ( + ResourceReader as ResourceReader, + Traversable as Traversable, + TraversableResources as TraversableResources, + ) diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/machinery.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/machinery.pyi new file mode 100644 index 0000000..767046b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/machinery.pyi @@ -0,0 +1,43 @@ +import sys +from importlib._bootstrap import BuiltinImporter as BuiltinImporter, FrozenImporter as FrozenImporter, ModuleSpec as ModuleSpec +from importlib._bootstrap_external import ( + BYTECODE_SUFFIXES as BYTECODE_SUFFIXES, + DEBUG_BYTECODE_SUFFIXES as DEBUG_BYTECODE_SUFFIXES, + EXTENSION_SUFFIXES as EXTENSION_SUFFIXES, + OPTIMIZED_BYTECODE_SUFFIXES as OPTIMIZED_BYTECODE_SUFFIXES, + SOURCE_SUFFIXES as SOURCE_SUFFIXES, + ExtensionFileLoader as ExtensionFileLoader, + FileFinder as FileFinder, + PathFinder as PathFinder, + SourceFileLoader as SourceFileLoader, + SourcelessFileLoader as SourcelessFileLoader, + WindowsRegistryFinder as WindowsRegistryFinder, +) + +if sys.version_info >= (3, 11): + from importlib._bootstrap_external import NamespaceLoader as NamespaceLoader +if sys.version_info >= (3, 14): + from importlib._bootstrap_external import AppleFrameworkLoader as AppleFrameworkLoader + +def all_suffixes() -> list[str]: ... + +if sys.version_info >= (3, 14): + __all__ = [ + "AppleFrameworkLoader", + "BYTECODE_SUFFIXES", + "BuiltinImporter", + "DEBUG_BYTECODE_SUFFIXES", + "EXTENSION_SUFFIXES", + "ExtensionFileLoader", + "FileFinder", + "FrozenImporter", + "ModuleSpec", + "NamespaceLoader", + "OPTIMIZED_BYTECODE_SUFFIXES", + "PathFinder", + "SOURCE_SUFFIXES", + "SourceFileLoader", + "SourcelessFileLoader", + "WindowsRegistryFinder", + "all_suffixes", + ] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi new file mode 100644 index 0000000..9286e92 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi @@ -0,0 +1,320 @@ +import abc +import pathlib +import sys +import types +from _collections_abc import dict_keys, dict_values +from _typeshed import StrPath +from collections.abc import Iterable, Iterator, Mapping +from email.message import Message +from importlib.abc import MetaPathFinder +from os import PathLike +from pathlib import Path +from re import Pattern +from typing import Any, ClassVar, Generic, NamedTuple, TypeVar, overload +from typing_extensions import Self, TypeAlias, deprecated, disjoint_base + +_T = TypeVar("_T") +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") + +__all__ = [ + "Distribution", + "DistributionFinder", + "PackageNotFoundError", + "distribution", + "distributions", + "entry_points", + "files", + "metadata", + "requires", + "version", +] + +if sys.version_info >= (3, 10): + __all__ += ["PackageMetadata", "packages_distributions"] + +if sys.version_info >= (3, 10): + from importlib.metadata._meta import PackageMetadata as PackageMetadata, SimplePath + def packages_distributions() -> Mapping[str, list[str]]: ... + + _SimplePath: TypeAlias = SimplePath + +else: + _SimplePath: TypeAlias = Path + +class PackageNotFoundError(ModuleNotFoundError): + @property + def name(self) -> str: ... # type: ignore[override] + +if sys.version_info >= (3, 13): + _EntryPointBase = object +elif sys.version_info >= (3, 11): + class DeprecatedTuple: + def __getitem__(self, item: int) -> str: ... + + _EntryPointBase = DeprecatedTuple +else: + class _EntryPointBase(NamedTuple): + name: str + value: str + group: str + +if sys.version_info >= (3, 11): + class EntryPoint(_EntryPointBase): + pattern: ClassVar[Pattern[str]] + name: str + value: str + group: str + + def __init__(self, name: str, value: str, group: str) -> None: ... + def load(self) -> Any: ... # Callable[[], Any] or an importable module + @property + def extras(self) -> list[str]: ... + @property + def module(self) -> str: ... + @property + def attr(self) -> str: ... + dist: ClassVar[Distribution | None] + def matches( + self, + *, + name: str = ..., + value: str = ..., + group: str = ..., + module: str = ..., + attr: str = ..., + extras: list[str] = ..., + ) -> bool: ... # undocumented + def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + def __lt__(self, other: object) -> bool: ... + if sys.version_info < (3, 12): + def __iter__(self) -> Iterator[Any]: ... # result of iter((str, Self)), really + +else: + @disjoint_base + class EntryPoint(_EntryPointBase): + pattern: ClassVar[Pattern[str]] + + def load(self) -> Any: ... # Callable[[], Any] or an importable module + @property + def extras(self) -> list[str]: ... + @property + def module(self) -> str: ... + @property + def attr(self) -> str: ... + if sys.version_info >= (3, 10): + dist: ClassVar[Distribution | None] + def matches( + self, + *, + name: str = ..., + value: str = ..., + group: str = ..., + module: str = ..., + attr: str = ..., + extras: list[str] = ..., + ) -> bool: ... # undocumented + + def __hash__(self) -> int: ... + def __iter__(self) -> Iterator[Any]: ... # result of iter((str, Self)), really + +if sys.version_info >= (3, 12): + class EntryPoints(tuple[EntryPoint, ...]): + __slots__ = () + def __getitem__(self, name: str) -> EntryPoint: ... # type: ignore[override] + def select( + self, + *, + name: str = ..., + value: str = ..., + group: str = ..., + module: str = ..., + attr: str = ..., + extras: list[str] = ..., + ) -> EntryPoints: ... + @property + def names(self) -> set[str]: ... + @property + def groups(self) -> set[str]: ... + +elif sys.version_info >= (3, 10): + class DeprecatedList(list[_T]): + __slots__ = () + + class EntryPoints(DeprecatedList[EntryPoint]): # use as list is deprecated since 3.10 + # int argument is deprecated since 3.10 + __slots__ = () + def __getitem__(self, name: int | str) -> EntryPoint: ... # type: ignore[override] + def select( + self, + *, + name: str = ..., + value: str = ..., + group: str = ..., + module: str = ..., + attr: str = ..., + extras: list[str] = ..., + ) -> EntryPoints: ... + @property + def names(self) -> set[str]: ... + @property + def groups(self) -> set[str]: ... + +if sys.version_info >= (3, 10) and sys.version_info < (3, 12): + class Deprecated(Generic[_KT, _VT]): + def __getitem__(self, name: _KT) -> _VT: ... + @overload + def get(self, name: _KT, default: None = None) -> _VT | None: ... + @overload + def get(self, name: _KT, default: _VT) -> _VT: ... + @overload + def get(self, name: _KT, default: _T) -> _VT | _T: ... + def __iter__(self) -> Iterator[_KT]: ... + def __contains__(self, *args: object) -> bool: ... + def keys(self) -> dict_keys[_KT, _VT]: ... + def values(self) -> dict_values[_KT, _VT]: ... + + @deprecated("Deprecated since Python 3.10; removed in Python 3.12. Use `select` instead.") + class SelectableGroups(Deprecated[str, EntryPoints], dict[str, EntryPoints]): # use as dict is deprecated since 3.10 + @classmethod + def load(cls, eps: Iterable[EntryPoint]) -> Self: ... + @property + def groups(self) -> set[str]: ... + @property + def names(self) -> set[str]: ... + @overload + def select(self) -> Self: ... + @overload + def select( + self, + *, + name: str = ..., + value: str = ..., + group: str = ..., + module: str = ..., + attr: str = ..., + extras: list[str] = ..., + ) -> EntryPoints: ... + +class PackagePath(pathlib.PurePosixPath): + def read_text(self, encoding: str = "utf-8") -> str: ... + def read_binary(self) -> bytes: ... + def locate(self) -> PathLike[str]: ... + # The following attributes are not defined on PackagePath, but are dynamically added by Distribution.files: + hash: FileHash | None + size: int | None + dist: Distribution + +class FileHash: + mode: str + value: str + def __init__(self, spec: str) -> None: ... + +if sys.version_info >= (3, 12): + class DeprecatedNonAbstract: ... + _distribution_parent = DeprecatedNonAbstract +else: + _distribution_parent = object + +class Distribution(_distribution_parent): + @abc.abstractmethod + def read_text(self, filename: str) -> str | None: ... + @abc.abstractmethod + def locate_file(self, path: StrPath) -> _SimplePath: ... + @classmethod + def from_name(cls, name: str) -> Distribution: ... + @overload + @classmethod + def discover(cls, *, context: DistributionFinder.Context) -> Iterable[Distribution]: ... + @overload + @classmethod + def discover( + cls, *, context: None = None, name: str | None = ..., path: list[str] = ..., **kwargs: Any + ) -> Iterable[Distribution]: ... + @staticmethod + def at(path: StrPath) -> PathDistribution: ... + + if sys.version_info >= (3, 10): + @property + def metadata(self) -> PackageMetadata: ... + @property + def entry_points(self) -> EntryPoints: ... + else: + @property + def metadata(self) -> Message: ... + @property + def entry_points(self) -> list[EntryPoint]: ... + + @property + def version(self) -> str: ... + @property + def files(self) -> list[PackagePath] | None: ... + @property + def requires(self) -> list[str] | None: ... + if sys.version_info >= (3, 10): + @property + def name(self) -> str: ... + if sys.version_info >= (3, 13): + @property + def origin(self) -> types.SimpleNamespace | None: ... + +class DistributionFinder(MetaPathFinder): + class Context: + name: str | None + def __init__(self, *, name: str | None = ..., path: list[str] = ..., **kwargs: Any) -> None: ... + @property + def path(self) -> list[str]: ... + + @abc.abstractmethod + def find_distributions(self, context: DistributionFinder.Context = ...) -> Iterable[Distribution]: ... + +class MetadataPathFinder(DistributionFinder): + @classmethod + def find_distributions(cls, context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: ... + if sys.version_info >= (3, 11): + @classmethod + def invalidate_caches(cls) -> None: ... + elif sys.version_info >= (3, 10): + # Yes, this is an instance method that has a parameter named "cls" + def invalidate_caches(cls) -> None: ... + +class PathDistribution(Distribution): + _path: _SimplePath + def __init__(self, path: _SimplePath) -> None: ... + def read_text(self, filename: StrPath) -> str | None: ... + def locate_file(self, path: StrPath) -> _SimplePath: ... + +def distribution(distribution_name: str) -> Distribution: ... +@overload +def distributions(*, context: DistributionFinder.Context) -> Iterable[Distribution]: ... +@overload +def distributions( + *, context: None = None, name: str | None = ..., path: list[str] = ..., **kwargs: Any +) -> Iterable[Distribution]: ... + +if sys.version_info >= (3, 10): + def metadata(distribution_name: str) -> PackageMetadata: ... + +else: + def metadata(distribution_name: str) -> Message: ... + +if sys.version_info >= (3, 12): + def entry_points( + *, name: str = ..., value: str = ..., group: str = ..., module: str = ..., attr: str = ..., extras: list[str] = ... + ) -> EntryPoints: ... + +elif sys.version_info >= (3, 10): + @overload + def entry_points() -> SelectableGroups: ... + @overload + def entry_points( + *, name: str = ..., value: str = ..., group: str = ..., module: str = ..., attr: str = ..., extras: list[str] = ... + ) -> EntryPoints: ... + +else: + def entry_points() -> dict[str, list[EntryPoint]]: ... + +def version(distribution_name: str) -> str: ... +def files(distribution_name: str) -> list[PackagePath] | None: ... +def requires(distribution_name: str) -> list[str] | None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/metadata/_meta.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/metadata/_meta.pyi new file mode 100644 index 0000000..9f791da --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/metadata/_meta.pyi @@ -0,0 +1,63 @@ +import sys +from _typeshed import StrPath +from collections.abc import Iterator +from os import PathLike +from typing import Any, Protocol, overload +from typing_extensions import TypeVar + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True, default=Any) + +class PackageMetadata(Protocol): + def __len__(self) -> int: ... + def __contains__(self, item: str) -> bool: ... + def __getitem__(self, key: str) -> str: ... + def __iter__(self) -> Iterator[str]: ... + @property + def json(self) -> dict[str, str | list[str]]: ... + @overload + def get_all(self, name: str, failobj: None = None) -> list[Any] | None: ... + @overload + def get_all(self, name: str, failobj: _T) -> list[Any] | _T: ... + if sys.version_info >= (3, 12): + @overload + def get(self, name: str, failobj: None = None) -> str | None: ... + @overload + def get(self, name: str, failobj: _T) -> _T | str: ... + +if sys.version_info >= (3, 13): + class SimplePath(Protocol): + def joinpath(self, other: StrPath, /) -> SimplePath: ... + def __truediv__(self, other: StrPath, /) -> SimplePath: ... + # Incorrect at runtime + @property + def parent(self) -> PathLike[str]: ... + def read_text(self, encoding: str | None = None) -> str: ... + def read_bytes(self) -> bytes: ... + def exists(self) -> bool: ... + +elif sys.version_info >= (3, 12): + class SimplePath(Protocol[_T_co]): + # At runtime this is defined as taking `str | _T`, but that causes trouble. + # See #11436. + def joinpath(self, other: str, /) -> _T_co: ... + @property + def parent(self) -> _T_co: ... + def read_text(self) -> str: ... + # As with joinpath(), this is annotated as taking `str | _T` at runtime. + def __truediv__(self, other: str, /) -> _T_co: ... + +else: + class SimplePath(Protocol): + # Actually takes only self at runtime, but that's clearly wrong + def joinpath(self, other: Any, /) -> SimplePath: ... + # Not defined as a property at runtime, but it should be + @property + def parent(self) -> Any: ... + def read_text(self) -> str: ... + # There was a bug in `SimplePath` definition in cpython, see #8451 + # Strictly speaking `__div__` was defined in 3.10, not __truediv__, + # but it should have always been `__truediv__`. + # Also, the runtime defines this method as taking no arguments, + # which is obviously wrong. + def __truediv__(self, other: Any, /) -> SimplePath: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/metadata/diagnose.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/metadata/diagnose.pyi new file mode 100644 index 0000000..565872f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/metadata/diagnose.pyi @@ -0,0 +1,2 @@ +def inspect(path: str) -> None: ... +def run() -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/readers.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/readers.pyi new file mode 100644 index 0000000..4a6c739 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/readers.pyi @@ -0,0 +1,72 @@ +# On py311+, things are actually defined in importlib.resources.readers, +# and re-exported here, +# but doing it this way leads to less code duplication for us + +import pathlib +import sys +import zipfile +from _typeshed import StrPath +from collections.abc import Iterable, Iterator +from io import BufferedReader +from typing import Literal, NoReturn, TypeVar +from typing_extensions import Never + +if sys.version_info >= (3, 10): + from importlib._bootstrap_external import FileLoader + from zipimport import zipimporter + +if sys.version_info >= (3, 11): + from importlib.resources import abc +else: + from importlib import abc + +if sys.version_info >= (3, 10): + if sys.version_info >= (3, 11): + __all__ = ["FileReader", "ZipReader", "MultiplexedPath", "NamespaceReader"] + + if sys.version_info < (3, 11): + _T = TypeVar("_T") + + def remove_duplicates(items: Iterable[_T]) -> Iterator[_T]: ... + + class FileReader(abc.TraversableResources): + path: pathlib.Path + def __init__(self, loader: FileLoader) -> None: ... + def resource_path(self, resource: StrPath) -> str: ... + def files(self) -> pathlib.Path: ... + + class ZipReader(abc.TraversableResources): + prefix: str + archive: str + def __init__(self, loader: zipimporter, module: str) -> None: ... + def open_resource(self, resource: str) -> BufferedReader: ... + def is_resource(self, path: StrPath) -> bool: ... + def files(self) -> zipfile.Path: ... + + class MultiplexedPath(abc.Traversable): + def __init__(self, *paths: abc.Traversable) -> None: ... + def iterdir(self) -> Iterator[abc.Traversable]: ... + def read_bytes(self) -> NoReturn: ... + def read_text(self, *args: Never, **kwargs: Never) -> NoReturn: ... # type: ignore[override] + def is_dir(self) -> Literal[True]: ... + def is_file(self) -> Literal[False]: ... + + if sys.version_info >= (3, 12): + def joinpath(self, *descendants: str) -> abc.Traversable: ... + elif sys.version_info >= (3, 11): + def joinpath(self, child: str) -> abc.Traversable: ... # type: ignore[override] + else: + def joinpath(self, child: str) -> abc.Traversable: ... + + if sys.version_info < (3, 12): + __truediv__ = joinpath + + def open(self, *args: Never, **kwargs: Never) -> NoReturn: ... # type: ignore[override] + @property + def name(self) -> str: ... + + class NamespaceReader(abc.TraversableResources): + path: MultiplexedPath + def __init__(self, namespace_path: Iterable[str]) -> None: ... + def resource_path(self, resource: str) -> str: ... + def files(self) -> MultiplexedPath: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/resources/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/resources/__init__.pyi new file mode 100644 index 0000000..28adc37 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/resources/__init__.pyi @@ -0,0 +1,86 @@ +import os +import sys +from collections.abc import Iterator +from contextlib import AbstractContextManager +from pathlib import Path +from types import ModuleType +from typing import Any, BinaryIO, Literal, TextIO +from typing_extensions import TypeAlias, deprecated + +if sys.version_info >= (3, 11): + from importlib.resources.abc import Traversable +else: + from importlib.abc import Traversable + +if sys.version_info >= (3, 11): + from importlib.resources._common import Package as Package +else: + Package: TypeAlias = str | ModuleType + +__all__ = [ + "Package", + "as_file", + "contents", + "files", + "is_resource", + "open_binary", + "open_text", + "path", + "read_binary", + "read_text", +] + +if sys.version_info >= (3, 10): + __all__ += ["ResourceReader"] + +if sys.version_info < (3, 13): + __all__ += ["Resource"] + +if sys.version_info < (3, 11): + Resource: TypeAlias = str | os.PathLike[Any] +elif sys.version_info < (3, 13): + Resource: TypeAlias = str + +if sys.version_info >= (3, 12): + from importlib.resources._common import Anchor as Anchor + + __all__ += ["Anchor"] + +if sys.version_info >= (3, 13): + from importlib.resources._functional import ( + contents as contents, + is_resource as is_resource, + open_binary as open_binary, + open_text as open_text, + path as path, + read_binary as read_binary, + read_text as read_text, + ) + +else: + def open_binary(package: Package, resource: Resource) -> BinaryIO: ... + def open_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> TextIO: ... + def read_binary(package: Package, resource: Resource) -> bytes: ... + def read_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> str: ... + def path(package: Package, resource: Resource) -> AbstractContextManager[Path, Literal[False]]: ... + def is_resource(package: Package, name: str) -> bool: ... + if sys.version_info >= (3, 11): + @deprecated("Deprecated since Python 3.11. Use `files(anchor).iterdir()`.") + def contents(package: Package) -> Iterator[str]: ... + else: + def contents(package: Package) -> Iterator[str]: ... + +if sys.version_info >= (3, 11): + from importlib.resources._common import as_file as as_file +else: + def as_file(path: Traversable) -> AbstractContextManager[Path, Literal[False]]: ... + +if sys.version_info >= (3, 11): + from importlib.resources._common import files as files +else: + def files(package: Package) -> Traversable: ... + +if sys.version_info >= (3, 11): + from importlib.resources.abc import ResourceReader as ResourceReader +elif sys.version_info >= (3, 10): + from importlib.abc import ResourceReader as ResourceReader diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/resources/_common.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/resources/_common.pyi new file mode 100644 index 0000000..11a93ca --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/resources/_common.pyi @@ -0,0 +1,42 @@ +import sys + +# Even though this file is 3.11+ only, Pyright will complain in stubtest for older versions. +if sys.version_info >= (3, 11): + import types + from collections.abc import Callable + from contextlib import AbstractContextManager + from importlib.resources.abc import ResourceReader, Traversable + from pathlib import Path + from typing import Literal, overload + from typing_extensions import TypeAlias, deprecated + + Package: TypeAlias = str | types.ModuleType + + if sys.version_info >= (3, 12): + Anchor: TypeAlias = Package + + def package_to_anchor( + func: Callable[[Anchor | None], Traversable], + ) -> Callable[[Anchor | None, Anchor | None], Traversable]: ... + @overload + def files(anchor: Anchor | None = None) -> Traversable: ... + @overload + @deprecated("Deprecated since Python 3.12; will be removed in Python 3.15. Use `anchor` parameter instead.") + def files(package: Anchor | None = None) -> Traversable: ... + + else: + def files(package: Package) -> Traversable: ... + + def get_resource_reader(package: types.ModuleType) -> ResourceReader | None: ... + + if sys.version_info >= (3, 12): + def resolve(cand: Anchor | None) -> types.ModuleType: ... + + else: + def resolve(cand: Package) -> types.ModuleType: ... + + if sys.version_info < (3, 12): + def get_package(package: Package) -> types.ModuleType: ... + + def from_package(package: types.ModuleType) -> Traversable: ... + def as_file(path: Traversable) -> AbstractContextManager[Path, Literal[False]]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/resources/_functional.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/resources/_functional.pyi new file mode 100644 index 0000000..71e01bc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/resources/_functional.pyi @@ -0,0 +1,31 @@ +import sys + +# Even though this file is 3.13+ only, Pyright will complain in stubtest for older versions. +if sys.version_info >= (3, 13): + from _typeshed import StrPath + from collections.abc import Iterator + from contextlib import AbstractContextManager + from importlib.resources._common import Anchor + from io import TextIOWrapper + from pathlib import Path + from typing import BinaryIO, Literal, overload + from typing_extensions import Unpack, deprecated + + def open_binary(anchor: Anchor, *path_names: StrPath) -> BinaryIO: ... + @overload + def open_text( + anchor: Anchor, *path_names: Unpack[tuple[StrPath]], encoding: str | None = "utf-8", errors: str | None = "strict" + ) -> TextIOWrapper: ... + @overload + def open_text(anchor: Anchor, *path_names: StrPath, encoding: str | None, errors: str | None = "strict") -> TextIOWrapper: ... + def read_binary(anchor: Anchor, *path_names: StrPath) -> bytes: ... + @overload + def read_text( + anchor: Anchor, *path_names: Unpack[tuple[StrPath]], encoding: str | None = "utf-8", errors: str | None = "strict" + ) -> str: ... + @overload + def read_text(anchor: Anchor, *path_names: StrPath, encoding: str | None, errors: str | None = "strict") -> str: ... + def path(anchor: Anchor, *path_names: StrPath) -> AbstractContextManager[Path, Literal[False]]: ... + def is_resource(anchor: Anchor, *path_names: StrPath) -> bool: ... + @deprecated("Deprecated since Python 3.11. Use `files(anchor).iterdir()`.") + def contents(anchor: Anchor, *path_names: StrPath) -> Iterator[str]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/resources/abc.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/resources/abc.pyi new file mode 100644 index 0000000..80d92a6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/resources/abc.pyi @@ -0,0 +1,55 @@ +import sys +from abc import ABCMeta, abstractmethod +from collections.abc import Iterator +from io import BufferedReader +from typing import IO, Any, Literal, Protocol, overload, runtime_checkable + +if sys.version_info >= (3, 11): + class ResourceReader(metaclass=ABCMeta): + @abstractmethod + def open_resource(self, resource: str) -> IO[bytes]: ... + @abstractmethod + def resource_path(self, resource: str) -> str: ... + @abstractmethod + def is_resource(self, path: str) -> bool: ... + @abstractmethod + def contents(self) -> Iterator[str]: ... + + @runtime_checkable + class Traversable(Protocol): + @abstractmethod + def is_dir(self) -> bool: ... + @abstractmethod + def is_file(self) -> bool: ... + @abstractmethod + def iterdir(self) -> Iterator[Traversable]: ... + @abstractmethod + def joinpath(self, *descendants: str) -> Traversable: ... + + # The documentation and runtime protocol allows *args, **kwargs arguments, + # but this would mean that all implementers would have to support them, + # which is not the case. + @overload + @abstractmethod + def open(self, mode: Literal["r"] = "r", *, encoding: str | None = None, errors: str | None = None) -> IO[str]: ... + @overload + @abstractmethod + def open(self, mode: Literal["rb"]) -> IO[bytes]: ... + @property + @abstractmethod + def name(self) -> str: ... + def __truediv__(self, child: str, /) -> Traversable: ... + @abstractmethod + def read_bytes(self) -> bytes: ... + @abstractmethod + def read_text(self, encoding: str | None = None) -> str: ... + + class TraversableResources(ResourceReader): + @abstractmethod + def files(self) -> Traversable: ... + def open_resource(self, resource: str) -> BufferedReader: ... + def resource_path(self, resource: Any) -> str: ... + def is_resource(self, path: str) -> bool: ... + def contents(self) -> Iterator[str]: ... + + __all__ = ["ResourceReader", "Traversable", "TraversableResources"] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/resources/readers.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/resources/readers.pyi new file mode 100644 index 0000000..0ab21fd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/resources/readers.pyi @@ -0,0 +1,14 @@ +# On py311+, things are actually defined here +# and re-exported from importlib.readers, +# but doing it this way leads to less code duplication for us + +import sys +from collections.abc import Iterable, Iterator +from typing import TypeVar + +if sys.version_info >= (3, 11): + from importlib.readers import * + + _T = TypeVar("_T") + + def remove_duplicates(items: Iterable[_T]) -> Iterator[_T]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/resources/simple.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/resources/simple.pyi new file mode 100644 index 0000000..c4c7581 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/resources/simple.pyi @@ -0,0 +1,56 @@ +import abc +import sys +from collections.abc import Iterator +from io import TextIOWrapper +from typing import IO, Any, BinaryIO, Literal, NoReturn, overload +from typing_extensions import Never + +if sys.version_info >= (3, 11): + from .abc import Traversable, TraversableResources + + class SimpleReader(abc.ABC): + @property + @abc.abstractmethod + def package(self) -> str: ... + @abc.abstractmethod + def children(self) -> list[SimpleReader]: ... + @abc.abstractmethod + def resources(self) -> list[str]: ... + @abc.abstractmethod + def open_binary(self, resource: str) -> BinaryIO: ... + @property + def name(self) -> str: ... + + class ResourceHandle(Traversable, metaclass=abc.ABCMeta): + parent: ResourceContainer + def __init__(self, parent: ResourceContainer, name: str) -> None: ... + def is_file(self) -> Literal[True]: ... + def is_dir(self) -> Literal[False]: ... + @overload + def open( + self, + mode: Literal["r"] = "r", + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + line_buffering: bool = False, + write_through: bool = False, + ) -> TextIOWrapper: ... + @overload + def open(self, mode: Literal["rb"]) -> BinaryIO: ... + @overload + def open(self, mode: str) -> IO[Any]: ... + def joinpath(self, name: Never) -> NoReturn: ... # type: ignore[override] + + class ResourceContainer(Traversable, metaclass=abc.ABCMeta): + reader: SimpleReader + def __init__(self, reader: SimpleReader) -> None: ... + def is_dir(self) -> Literal[True]: ... + def is_file(self) -> Literal[False]: ... + def iterdir(self) -> Iterator[ResourceHandle | ResourceContainer]: ... + def open(self, *args: Never, **kwargs: Never) -> NoReturn: ... # type: ignore[override] + if sys.version_info < (3, 12): + def joinpath(self, *descendants: str) -> Traversable: ... + + class TraversableReader(TraversableResources, SimpleReader, metaclass=abc.ABCMeta): + def files(self) -> ResourceContainer: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/simple.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/simple.pyi new file mode 100644 index 0000000..58d8c66 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/simple.pyi @@ -0,0 +1,11 @@ +import sys + +if sys.version_info >= (3, 11): + from .resources.simple import ( + ResourceContainer as ResourceContainer, + ResourceHandle as ResourceHandle, + SimpleReader as SimpleReader, + TraversableReader as TraversableReader, + ) + + __all__ = ["SimpleReader", "ResourceHandle", "ResourceContainer", "TraversableReader"] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/util.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/util.pyi new file mode 100644 index 0000000..577d3a6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/importlib/util.pyi @@ -0,0 +1,75 @@ +import importlib.machinery +import sys +import types +from _typeshed import ReadableBuffer +from collections.abc import Callable +from importlib._bootstrap import module_from_spec as module_from_spec, spec_from_loader as spec_from_loader +from importlib._bootstrap_external import ( + MAGIC_NUMBER as MAGIC_NUMBER, + cache_from_source as cache_from_source, + decode_source as decode_source, + source_from_cache as source_from_cache, + spec_from_file_location as spec_from_file_location, +) +from importlib.abc import Loader +from types import TracebackType +from typing import Literal +from typing_extensions import ParamSpec, Self, deprecated + +_P = ParamSpec("_P") + +if sys.version_info < (3, 12): + @deprecated( + "Deprecated since Python 3.4; removed in Python 3.12. " + "`__name__`, `__package__` and `__loader__` are now set automatically." + ) + def module_for_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... + @deprecated( + "Deprecated since Python 3.4; removed in Python 3.12. " + "`__name__`, `__package__` and `__loader__` are now set automatically." + ) + def set_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... + @deprecated( + "Deprecated since Python 3.4; removed in Python 3.12. " + "`__name__`, `__package__` and `__loader__` are now set automatically." + ) + def set_package(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... + +def resolve_name(name: str, package: str | None) -> str: ... +def find_spec(name: str, package: str | None = None) -> importlib.machinery.ModuleSpec | None: ... + +class LazyLoader(Loader): + def __init__(self, loader: Loader) -> None: ... + @classmethod + def factory(cls, loader: Loader) -> Callable[..., LazyLoader]: ... + def exec_module(self, module: types.ModuleType) -> None: ... + +def source_hash(source_bytes: ReadableBuffer) -> bytes: ... + +if sys.version_info >= (3, 12): + class _incompatible_extension_module_restrictions: + def __init__(self, *, disable_check: bool) -> None: ... + disable_check: bool + old: Literal[-1, 0, 1] # exists only while entered + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + @property + def override(self) -> Literal[-1, 1]: ... # undocumented + +if sys.version_info >= (3, 14): + __all__ = [ + "LazyLoader", + "Loader", + "MAGIC_NUMBER", + "cache_from_source", + "decode_source", + "find_spec", + "module_from_spec", + "resolve_name", + "source_from_cache", + "source_hash", + "spec_from_file_location", + "spec_from_loader", + ] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/inspect.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/inspect.pyi new file mode 100644 index 0000000..55ae616 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/inspect.pyi @@ -0,0 +1,727 @@ +import dis +import enum +import sys +import types +from _typeshed import AnnotationForm, StrPath +from collections import OrderedDict +from collections.abc import AsyncGenerator, Awaitable, Callable, Coroutine, Generator, Mapping, Sequence, Set as AbstractSet +from types import ( + AsyncGeneratorType, + BuiltinFunctionType, + BuiltinMethodType, + ClassMethodDescriptorType, + CodeType, + CoroutineType, + FrameType, + FunctionType, + GeneratorType, + GetSetDescriptorType, + LambdaType, + MemberDescriptorType, + MethodDescriptorType, + MethodType, + MethodWrapperType, + ModuleType, + TracebackType, + WrapperDescriptorType, +) +from typing import Any, ClassVar, Final, Literal, NamedTuple, Protocol, TypeVar, overload, type_check_only +from typing_extensions import ParamSpec, Self, TypeAlias, TypeGuard, TypeIs, deprecated, disjoint_base + +if sys.version_info >= (3, 14): + from annotationlib import Format + +if sys.version_info >= (3, 11): + __all__ = [ + "ArgInfo", + "Arguments", + "Attribute", + "BlockFinder", + "BoundArguments", + "CORO_CLOSED", + "CORO_CREATED", + "CORO_RUNNING", + "CORO_SUSPENDED", + "CO_ASYNC_GENERATOR", + "CO_COROUTINE", + "CO_GENERATOR", + "CO_ITERABLE_COROUTINE", + "CO_NESTED", + "CO_NEWLOCALS", + "CO_NOFREE", + "CO_OPTIMIZED", + "CO_VARARGS", + "CO_VARKEYWORDS", + "ClassFoundException", + "ClosureVars", + "EndOfBlock", + "FrameInfo", + "FullArgSpec", + "GEN_CLOSED", + "GEN_CREATED", + "GEN_RUNNING", + "GEN_SUSPENDED", + "Parameter", + "Signature", + "TPFLAGS_IS_ABSTRACT", + "Traceback", + "classify_class_attrs", + "cleandoc", + "currentframe", + "findsource", + "formatannotation", + "formatannotationrelativeto", + "formatargvalues", + "get_annotations", + "getabsfile", + "getargs", + "getargvalues", + "getattr_static", + "getblock", + "getcallargs", + "getclasstree", + "getclosurevars", + "getcomments", + "getcoroutinelocals", + "getcoroutinestate", + "getdoc", + "getfile", + "getframeinfo", + "getfullargspec", + "getgeneratorlocals", + "getgeneratorstate", + "getinnerframes", + "getlineno", + "getmembers", + "getmembers_static", + "getmodule", + "getmodulename", + "getmro", + "getouterframes", + "getsource", + "getsourcefile", + "getsourcelines", + "indentsize", + "isabstract", + "isasyncgen", + "isasyncgenfunction", + "isawaitable", + "isbuiltin", + "isclass", + "iscode", + "iscoroutine", + "iscoroutinefunction", + "isdatadescriptor", + "isframe", + "isfunction", + "isgenerator", + "isgeneratorfunction", + "isgetsetdescriptor", + "ismemberdescriptor", + "ismethod", + "ismethoddescriptor", + "ismethodwrapper", + "ismodule", + "isroutine", + "istraceback", + "signature", + "stack", + "trace", + "unwrap", + "walktree", + ] + + if sys.version_info >= (3, 12): + __all__ += [ + "markcoroutinefunction", + "AGEN_CLOSED", + "AGEN_CREATED", + "AGEN_RUNNING", + "AGEN_SUSPENDED", + "getasyncgenlocals", + "getasyncgenstate", + "BufferFlags", + ] + if sys.version_info >= (3, 14): + __all__ += ["CO_HAS_DOCSTRING", "CO_METHOD", "ispackage"] + +_P = ParamSpec("_P") +_T = TypeVar("_T") +_F = TypeVar("_F", bound=Callable[..., Any]) +_T_contra = TypeVar("_T_contra", contravariant=True) +_V_contra = TypeVar("_V_contra", contravariant=True) + +# +# Types and members +# +class EndOfBlock(Exception): ... + +class BlockFinder: + indent: int + islambda: bool + started: bool + passline: bool + indecorator: bool + decoratorhasargs: bool + last: int + def tokeneater(self, type: int, token: str, srowcol: tuple[int, int], erowcol: tuple[int, int], line: str) -> None: ... + +CO_OPTIMIZED: Final = 1 +CO_NEWLOCALS: Final = 2 +CO_VARARGS: Final = 4 +CO_VARKEYWORDS: Final = 8 +CO_NESTED: Final = 16 +CO_GENERATOR: Final = 32 +CO_NOFREE: Final = 64 +CO_COROUTINE: Final = 128 +CO_ITERABLE_COROUTINE: Final = 256 +CO_ASYNC_GENERATOR: Final = 512 +TPFLAGS_IS_ABSTRACT: Final = 1048576 +if sys.version_info >= (3, 14): + CO_HAS_DOCSTRING: Final = 67108864 + CO_METHOD: Final = 134217728 + +modulesbyfile: dict[str, Any] + +_GetMembersPredicateTypeGuard: TypeAlias = Callable[[Any], TypeGuard[_T]] +_GetMembersPredicateTypeIs: TypeAlias = Callable[[Any], TypeIs[_T]] +_GetMembersPredicate: TypeAlias = Callable[[Any], bool] +_GetMembersReturn: TypeAlias = list[tuple[str, _T]] + +@overload +def getmembers(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturn[_T]: ... +@overload +def getmembers(object: object, predicate: _GetMembersPredicateTypeIs[_T]) -> _GetMembersReturn[_T]: ... +@overload +def getmembers(object: object, predicate: _GetMembersPredicate | None = None) -> _GetMembersReturn[Any]: ... + +if sys.version_info >= (3, 11): + @overload + def getmembers_static(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturn[_T]: ... + @overload + def getmembers_static(object: object, predicate: _GetMembersPredicateTypeIs[_T]) -> _GetMembersReturn[_T]: ... + @overload + def getmembers_static(object: object, predicate: _GetMembersPredicate | None = None) -> _GetMembersReturn[Any]: ... + +def getmodulename(path: StrPath) -> str | None: ... +def ismodule(object: object) -> TypeIs[ModuleType]: ... +def isclass(object: object) -> TypeIs[type[Any]]: ... +def ismethod(object: object) -> TypeIs[MethodType]: ... + +if sys.version_info >= (3, 14): + # Not TypeIs because it does not return True for all modules + def ispackage(object: object) -> TypeGuard[ModuleType]: ... + +def isfunction(object: object) -> TypeIs[FunctionType]: ... + +if sys.version_info >= (3, 12): + def markcoroutinefunction(func: _F) -> _F: ... + +@overload +def isgeneratorfunction(obj: Callable[..., Generator[Any, Any, Any]]) -> bool: ... +@overload +def isgeneratorfunction(obj: Callable[_P, Any]) -> TypeGuard[Callable[_P, GeneratorType[Any, Any, Any]]]: ... +@overload +def isgeneratorfunction(obj: object) -> TypeGuard[Callable[..., GeneratorType[Any, Any, Any]]]: ... +@overload +def iscoroutinefunction(obj: Callable[..., Coroutine[Any, Any, Any]]) -> bool: ... +@overload +def iscoroutinefunction(obj: Callable[_P, Awaitable[_T]]) -> TypeGuard[Callable[_P, CoroutineType[Any, Any, _T]]]: ... +@overload +def iscoroutinefunction(obj: Callable[_P, object]) -> TypeGuard[Callable[_P, CoroutineType[Any, Any, Any]]]: ... +@overload +def iscoroutinefunction(obj: object) -> TypeGuard[Callable[..., CoroutineType[Any, Any, Any]]]: ... +def isgenerator(object: object) -> TypeIs[GeneratorType[Any, Any, Any]]: ... +def iscoroutine(object: object) -> TypeIs[CoroutineType[Any, Any, Any]]: ... +def isawaitable(object: object) -> TypeIs[Awaitable[Any]]: ... +@overload +def isasyncgenfunction(obj: Callable[..., AsyncGenerator[Any, Any]]) -> bool: ... +@overload +def isasyncgenfunction(obj: Callable[_P, Any]) -> TypeGuard[Callable[_P, AsyncGeneratorType[Any, Any]]]: ... +@overload +def isasyncgenfunction(obj: object) -> TypeGuard[Callable[..., AsyncGeneratorType[Any, Any]]]: ... +@type_check_only +class _SupportsSet(Protocol[_T_contra, _V_contra]): + def __set__(self, instance: _T_contra, value: _V_contra, /) -> None: ... + +@type_check_only +class _SupportsDelete(Protocol[_T_contra]): + def __delete__(self, instance: _T_contra, /) -> None: ... + +def isasyncgen(object: object) -> TypeIs[AsyncGeneratorType[Any, Any]]: ... +def istraceback(object: object) -> TypeIs[TracebackType]: ... +def isframe(object: object) -> TypeIs[FrameType]: ... +def iscode(object: object) -> TypeIs[CodeType]: ... +def isbuiltin(object: object) -> TypeIs[BuiltinFunctionType]: ... + +if sys.version_info >= (3, 11): + def ismethodwrapper(object: object) -> TypeIs[MethodWrapperType]: ... + +def isroutine( + object: object, +) -> TypeIs[ + FunctionType + | LambdaType + | MethodType + | BuiltinFunctionType + | BuiltinMethodType + | WrapperDescriptorType + | MethodDescriptorType + | ClassMethodDescriptorType +]: ... +def ismethoddescriptor(object: object) -> TypeIs[MethodDescriptorType]: ... +def ismemberdescriptor(object: object) -> TypeIs[MemberDescriptorType]: ... +def isabstract(object: object) -> bool: ... +def isgetsetdescriptor(object: object) -> TypeIs[GetSetDescriptorType]: ... +def isdatadescriptor(object: object) -> TypeIs[_SupportsSet[Any, Any] | _SupportsDelete[Any]]: ... + +# +# Retrieving source code +# +_SourceObjectType: TypeAlias = ( + ModuleType | type[Any] | MethodType | FunctionType | TracebackType | FrameType | CodeType | Callable[..., Any] +) + +def findsource(object: _SourceObjectType) -> tuple[list[str], int]: ... +def getabsfile(object: _SourceObjectType, _filename: str | None = None) -> str: ... + +# Special-case the two most common input types here +# to avoid the annoyingly vague `Sequence[str]` return type +@overload +def getblock(lines: list[str]) -> list[str]: ... +@overload +def getblock(lines: tuple[str, ...]) -> tuple[str, ...]: ... +@overload +def getblock(lines: Sequence[str]) -> Sequence[str]: ... +def getdoc(object: object) -> str | None: ... +def getcomments(object: object) -> str | None: ... +def getfile(object: _SourceObjectType) -> str: ... +def getmodule(object: object, _filename: str | None = None) -> ModuleType | None: ... +def getsourcefile(object: _SourceObjectType) -> str | None: ... +def getsourcelines(object: _SourceObjectType) -> tuple[list[str], int]: ... +def getsource(object: _SourceObjectType) -> str: ... +def cleandoc(doc: str) -> str: ... +def indentsize(line: str) -> int: ... + +_IntrospectableCallable: TypeAlias = Callable[..., Any] + +# +# Introspecting callables with the Signature object +# +if sys.version_info >= (3, 14): + def signature( + obj: _IntrospectableCallable, + *, + follow_wrapped: bool = True, + globals: Mapping[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + eval_str: bool = False, + annotation_format: Format = Format.VALUE, # noqa: Y011 + ) -> Signature: ... + +elif sys.version_info >= (3, 10): + def signature( + obj: _IntrospectableCallable, + *, + follow_wrapped: bool = True, + globals: Mapping[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + eval_str: bool = False, + ) -> Signature: ... + +else: + def signature(obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Signature: ... + +class _void: ... +class _empty: ... + +class Signature: + __slots__ = ("_return_annotation", "_parameters") + def __init__( + self, parameters: Sequence[Parameter] | None = None, *, return_annotation: Any = ..., __validate_parameters__: bool = True + ) -> None: ... + empty = _empty + @property + def parameters(self) -> types.MappingProxyType[str, Parameter]: ... + @property + def return_annotation(self) -> Any: ... + def bind(self, *args: Any, **kwargs: Any) -> BoundArguments: ... + def bind_partial(self, *args: Any, **kwargs: Any) -> BoundArguments: ... + def replace(self, *, parameters: Sequence[Parameter] | type[_void] | None = ..., return_annotation: Any = ...) -> Self: ... + __replace__ = replace + if sys.version_info >= (3, 14): + @classmethod + def from_callable( + cls, + obj: _IntrospectableCallable, + *, + follow_wrapped: bool = True, + globals: Mapping[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + eval_str: bool = False, + annotation_format: Format = Format.VALUE, # noqa: Y011 + ) -> Self: ... + elif sys.version_info >= (3, 10): + @classmethod + def from_callable( + cls, + obj: _IntrospectableCallable, + *, + follow_wrapped: bool = True, + globals: Mapping[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + eval_str: bool = False, + ) -> Self: ... + else: + @classmethod + def from_callable(cls, obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Self: ... + if sys.version_info >= (3, 14): + def format(self, *, max_width: int | None = None, quote_annotation_strings: bool = True) -> str: ... + elif sys.version_info >= (3, 13): + def format(self, *, max_width: int | None = None) -> str: ... + + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + +if sys.version_info >= (3, 14): + from annotationlib import get_annotations as get_annotations +elif sys.version_info >= (3, 10): + def get_annotations( + obj: Callable[..., object] | type[object] | ModuleType, # any callable, class, or module + *, + globals: Mapping[str, Any] | None = None, # value types depend on the key + locals: Mapping[str, Any] | None = None, # value types depend on the key + eval_str: bool = False, + ) -> dict[str, AnnotationForm]: ... # values are type expressions + +# The name is the same as the enum's name in CPython +class _ParameterKind(enum.IntEnum): + POSITIONAL_ONLY = 0 + POSITIONAL_OR_KEYWORD = 1 + VAR_POSITIONAL = 2 + KEYWORD_ONLY = 3 + VAR_KEYWORD = 4 + + @property + def description(self) -> str: ... + +if sys.version_info >= (3, 12): + AGEN_CREATED: Final = "AGEN_CREATED" + AGEN_RUNNING: Final = "AGEN_RUNNING" + AGEN_SUSPENDED: Final = "AGEN_SUSPENDED" + AGEN_CLOSED: Final = "AGEN_CLOSED" + + def getasyncgenstate( + agen: AsyncGenerator[Any, Any], + ) -> Literal["AGEN_CREATED", "AGEN_RUNNING", "AGEN_SUSPENDED", "AGEN_CLOSED"]: ... + def getasyncgenlocals(agen: AsyncGeneratorType[Any, Any]) -> dict[str, Any]: ... + +class Parameter: + __slots__ = ("_name", "_kind", "_default", "_annotation") + def __init__(self, name: str, kind: _ParameterKind, *, default: Any = ..., annotation: Any = ...) -> None: ... + empty = _empty + + POSITIONAL_ONLY: ClassVar[Literal[_ParameterKind.POSITIONAL_ONLY]] + POSITIONAL_OR_KEYWORD: ClassVar[Literal[_ParameterKind.POSITIONAL_OR_KEYWORD]] + VAR_POSITIONAL: ClassVar[Literal[_ParameterKind.VAR_POSITIONAL]] + KEYWORD_ONLY: ClassVar[Literal[_ParameterKind.KEYWORD_ONLY]] + VAR_KEYWORD: ClassVar[Literal[_ParameterKind.VAR_KEYWORD]] + @property + def name(self) -> str: ... + @property + def default(self) -> Any: ... + @property + def kind(self) -> _ParameterKind: ... + @property + def annotation(self) -> Any: ... + def replace( + self, + *, + name: str | type[_void] = ..., + kind: _ParameterKind | type[_void] = ..., + default: Any = ..., + annotation: Any = ..., + ) -> Self: ... + if sys.version_info >= (3, 13): + __replace__ = replace + + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + +class BoundArguments: + __slots__ = ("arguments", "_signature", "__weakref__") + arguments: OrderedDict[str, Any] + @property + def args(self) -> tuple[Any, ...]: ... + @property + def kwargs(self) -> dict[str, Any]: ... + @property + def signature(self) -> Signature: ... + def __init__(self, signature: Signature, arguments: OrderedDict[str, Any]) -> None: ... + def apply_defaults(self) -> None: ... + def __eq__(self, other: object) -> bool: ... + __hash__: ClassVar[None] # type: ignore[assignment] + +# +# Classes and functions +# + +_ClassTreeItem: TypeAlias = list[tuple[type, ...]] | list[_ClassTreeItem] + +def getclasstree(classes: list[type], unique: bool = False) -> _ClassTreeItem: ... +def walktree(classes: list[type], children: Mapping[type[Any], list[type]], parent: type[Any] | None) -> _ClassTreeItem: ... + +class Arguments(NamedTuple): + args: list[str] + varargs: str | None + varkw: str | None + +def getargs(co: CodeType) -> Arguments: ... + +if sys.version_info < (3, 11): + @deprecated("Deprecated since Python 3.0; removed in Python 3.11.") + class ArgSpec(NamedTuple): + args: list[str] + varargs: str | None + keywords: str | None + defaults: tuple[Any, ...] + + @deprecated("Deprecated since Python 3.0; removed in Python 3.11. Use `inspect.signature()` instead.") + def getargspec(func: object) -> ArgSpec: ... + +class FullArgSpec(NamedTuple): + args: list[str] + varargs: str | None + varkw: str | None + defaults: tuple[Any, ...] | None + kwonlyargs: list[str] + kwonlydefaults: dict[str, Any] | None + annotations: dict[str, Any] + +def getfullargspec(func: object) -> FullArgSpec: ... + +class ArgInfo(NamedTuple): + args: list[str] + varargs: str | None + keywords: str | None + locals: dict[str, Any] + +def getargvalues(frame: FrameType) -> ArgInfo: ... + +if sys.version_info >= (3, 14): + def formatannotation(annotation: object, base_module: str | None = None, *, quote_annotation_strings: bool = True) -> str: ... + +else: + def formatannotation(annotation: object, base_module: str | None = None) -> str: ... + +def formatannotationrelativeto(object: object) -> Callable[[object], str]: ... + +if sys.version_info < (3, 11): + @deprecated( + "Deprecated since Python 3.5; removed in Python 3.11. Use `inspect.signature()` and the `Signature` class instead." + ) + def formatargspec( + args: list[str], + varargs: str | None = None, + varkw: str | None = None, + defaults: tuple[Any, ...] | None = None, + kwonlyargs: Sequence[str] | None = (), + kwonlydefaults: Mapping[str, Any] | None = {}, + annotations: Mapping[str, Any] = {}, + formatarg: Callable[[str], str] = ..., + formatvarargs: Callable[[str], str] = ..., + formatvarkw: Callable[[str], str] = ..., + formatvalue: Callable[[Any], str] = ..., + formatreturns: Callable[[Any], str] = ..., + formatannotation: Callable[[Any], str] = ..., + ) -> str: ... + +def formatargvalues( + args: list[str], + varargs: str | None, + varkw: str | None, + locals: Mapping[str, Any] | None, + formatarg: Callable[[str], str] | None = ..., + formatvarargs: Callable[[str], str] | None = ..., + formatvarkw: Callable[[str], str] | None = ..., + formatvalue: Callable[[Any], str] | None = ..., +) -> str: ... +def getmro(cls: type) -> tuple[type, ...]: ... +def getcallargs(func: Callable[_P, Any], /, *args: _P.args, **kwds: _P.kwargs) -> dict[str, Any]: ... + +class ClosureVars(NamedTuple): + nonlocals: Mapping[str, Any] + globals: Mapping[str, Any] + builtins: Mapping[str, Any] + unbound: AbstractSet[str] + +def getclosurevars(func: _IntrospectableCallable) -> ClosureVars: ... +def unwrap(func: Callable[..., Any], *, stop: Callable[[Callable[..., Any]], Any] | None = None) -> Any: ... + +# +# The interpreter stack +# + +if sys.version_info >= (3, 11): + class _Traceback(NamedTuple): + filename: str + lineno: int + function: str + code_context: list[str] | None + index: int | None # type: ignore[assignment] + + class _FrameInfo(NamedTuple): + frame: FrameType + filename: str + lineno: int + function: str + code_context: list[str] | None + index: int | None # type: ignore[assignment] + + if sys.version_info >= (3, 12): + class Traceback(_Traceback): + positions: dis.Positions | None + def __new__( + cls, + filename: str, + lineno: int, + function: str, + code_context: list[str] | None, + index: int | None, + *, + positions: dis.Positions | None = None, + ) -> Self: ... + + class FrameInfo(_FrameInfo): + positions: dis.Positions | None + def __new__( + cls, + frame: FrameType, + filename: str, + lineno: int, + function: str, + code_context: list[str] | None, + index: int | None, + *, + positions: dis.Positions | None = None, + ) -> Self: ... + + else: + @disjoint_base + class Traceback(_Traceback): + positions: dis.Positions | None + def __new__( + cls, + filename: str, + lineno: int, + function: str, + code_context: list[str] | None, + index: int | None, + *, + positions: dis.Positions | None = None, + ) -> Self: ... + + @disjoint_base + class FrameInfo(_FrameInfo): + positions: dis.Positions | None + def __new__( + cls, + frame: FrameType, + filename: str, + lineno: int, + function: str, + code_context: list[str] | None, + index: int | None, + *, + positions: dis.Positions | None = None, + ) -> Self: ... + +else: + class Traceback(NamedTuple): + filename: str + lineno: int + function: str + code_context: list[str] | None + index: int | None # type: ignore[assignment] + + class FrameInfo(NamedTuple): + frame: FrameType + filename: str + lineno: int + function: str + code_context: list[str] | None + index: int | None # type: ignore[assignment] + +def getframeinfo(frame: FrameType | TracebackType, context: int = 1) -> Traceback: ... +def getouterframes(frame: Any, context: int = 1) -> list[FrameInfo]: ... +def getinnerframes(tb: TracebackType, context: int = 1) -> list[FrameInfo]: ... +def getlineno(frame: FrameType) -> int: ... +def currentframe() -> FrameType | None: ... +def stack(context: int = 1) -> list[FrameInfo]: ... +def trace(context: int = 1) -> list[FrameInfo]: ... + +# +# Fetching attributes statically +# + +def getattr_static(obj: object, attr: str, default: Any | None = ...) -> Any: ... + +# +# Current State of Generators and Coroutines +# + +GEN_CREATED: Final = "GEN_CREATED" +GEN_RUNNING: Final = "GEN_RUNNING" +GEN_SUSPENDED: Final = "GEN_SUSPENDED" +GEN_CLOSED: Final = "GEN_CLOSED" + +def getgeneratorstate( + generator: Generator[Any, Any, Any], +) -> Literal["GEN_CREATED", "GEN_RUNNING", "GEN_SUSPENDED", "GEN_CLOSED"]: ... + +CORO_CREATED: Final = "CORO_CREATED" +CORO_RUNNING: Final = "CORO_RUNNING" +CORO_SUSPENDED: Final = "CORO_SUSPENDED" +CORO_CLOSED: Final = "CORO_CLOSED" + +def getcoroutinestate( + coroutine: Coroutine[Any, Any, Any], +) -> Literal["CORO_CREATED", "CORO_RUNNING", "CORO_SUSPENDED", "CORO_CLOSED"]: ... +def getgeneratorlocals(generator: Generator[Any, Any, Any]) -> dict[str, Any]: ... +def getcoroutinelocals(coroutine: Coroutine[Any, Any, Any]) -> dict[str, Any]: ... + +# Create private type alias to avoid conflict with symbol of same +# name created in Attribute class. +_Object: TypeAlias = object + +class Attribute(NamedTuple): + name: str + kind: Literal["class method", "static method", "property", "method", "data"] + defining_class: type + object: _Object + +def classify_class_attrs(cls: type) -> list[Attribute]: ... + +class ClassFoundException(Exception): ... + +if sys.version_info >= (3, 12): + class BufferFlags(enum.IntFlag): + SIMPLE = 0 + WRITABLE = 1 + FORMAT = 4 + ND = 8 + STRIDES = 24 + C_CONTIGUOUS = 56 + F_CONTIGUOUS = 88 + ANY_CONTIGUOUS = 152 + INDIRECT = 280 + CONTIG = 9 + CONTIG_RO = 8 + STRIDED = 25 + STRIDED_RO = 24 + RECORDS = 29 + RECORDS_RO = 28 + FULL = 285 + FULL_RO = 284 + READ = 256 + WRITE = 512 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/io.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/io.pyi new file mode 100644 index 0000000..d301d70 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/io.pyi @@ -0,0 +1,75 @@ +import abc +import sys +from _io import ( + DEFAULT_BUFFER_SIZE as DEFAULT_BUFFER_SIZE, + BlockingIOError as BlockingIOError, + BufferedRandom as BufferedRandom, + BufferedReader as BufferedReader, + BufferedRWPair as BufferedRWPair, + BufferedWriter as BufferedWriter, + BytesIO as BytesIO, + FileIO as FileIO, + IncrementalNewlineDecoder as IncrementalNewlineDecoder, + StringIO as StringIO, + TextIOWrapper as TextIOWrapper, + _BufferedIOBase, + _IOBase, + _RawIOBase, + _TextIOBase, + _WrappedBuffer as _WrappedBuffer, # used elsewhere in typeshed + open as open, + open_code as open_code, +) +from typing import Final, Protocol, TypeVar + +__all__ = [ + "BlockingIOError", + "open", + "open_code", + "IOBase", + "RawIOBase", + "FileIO", + "BytesIO", + "StringIO", + "BufferedIOBase", + "BufferedReader", + "BufferedWriter", + "BufferedRWPair", + "BufferedRandom", + "TextIOBase", + "TextIOWrapper", + "UnsupportedOperation", + "SEEK_SET", + "SEEK_CUR", + "SEEK_END", +] + +if sys.version_info >= (3, 14): + __all__ += ["Reader", "Writer"] + +if sys.version_info >= (3, 11): + from _io import text_encoding as text_encoding + + __all__ += ["DEFAULT_BUFFER_SIZE", "IncrementalNewlineDecoder", "text_encoding"] + +_T_co = TypeVar("_T_co", covariant=True) +_T_contra = TypeVar("_T_contra", contravariant=True) + +SEEK_SET: Final = 0 +SEEK_CUR: Final = 1 +SEEK_END: Final = 2 + +class UnsupportedOperation(OSError, ValueError): ... +class IOBase(_IOBase, metaclass=abc.ABCMeta): ... +class RawIOBase(_RawIOBase, IOBase): ... +class BufferedIOBase(_BufferedIOBase, IOBase): ... +class TextIOBase(_TextIOBase, IOBase): ... + +if sys.version_info >= (3, 14): + class Reader(Protocol[_T_co]): + __slots__ = () + def read(self, size: int = ..., /) -> _T_co: ... + + class Writer(Protocol[_T_contra]): + __slots__ = () + def write(self, data: _T_contra, /) -> int: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ipaddress.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ipaddress.pyi new file mode 100644 index 0000000..e2f3def --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ipaddress.pyi @@ -0,0 +1,247 @@ +import sys +from collections.abc import Iterable, Iterator +from typing import Any, Final, Generic, Literal, TypeVar, overload +from typing_extensions import Self, TypeAlias + +# Undocumented length constants +IPV4LENGTH: Final = 32 +IPV6LENGTH: Final = 128 + +_A = TypeVar("_A", IPv4Address, IPv6Address) +_N = TypeVar("_N", IPv4Network, IPv6Network) + +_RawIPAddress: TypeAlias = int | str | bytes | IPv4Address | IPv6Address +_RawNetworkPart: TypeAlias = IPv4Network | IPv6Network | IPv4Interface | IPv6Interface + +def ip_address(address: _RawIPAddress) -> IPv4Address | IPv6Address: ... +def ip_network( + address: _RawIPAddress | _RawNetworkPart | tuple[_RawIPAddress] | tuple[_RawIPAddress, int], strict: bool = True +) -> IPv4Network | IPv6Network: ... +def ip_interface( + address: _RawIPAddress | _RawNetworkPart | tuple[_RawIPAddress] | tuple[_RawIPAddress, int], +) -> IPv4Interface | IPv6Interface: ... + +class _IPAddressBase: + __slots__ = () + @property + def compressed(self) -> str: ... + @property + def exploded(self) -> str: ... + @property + def reverse_pointer(self) -> str: ... + if sys.version_info < (3, 14): + @property + def version(self) -> int: ... + +class _BaseAddress(_IPAddressBase): + __slots__ = () + def __add__(self, other: int) -> Self: ... + def __hash__(self) -> int: ... + def __int__(self) -> int: ... + def __sub__(self, other: int) -> Self: ... + def __format__(self, fmt: str) -> str: ... + def __eq__(self, other: object) -> bool: ... + def __lt__(self, other: Self) -> bool: ... + if sys.version_info >= (3, 11): + def __ge__(self, other: Self) -> bool: ... + def __gt__(self, other: Self) -> bool: ... + def __le__(self, other: Self) -> bool: ... + else: + def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + +class _BaseNetwork(_IPAddressBase, Generic[_A]): + network_address: _A + netmask: _A + def __contains__(self, other: Any) -> bool: ... + def __getitem__(self, n: int) -> _A: ... + def __iter__(self) -> Iterator[_A]: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def __lt__(self, other: Self) -> bool: ... + if sys.version_info >= (3, 11): + def __ge__(self, other: Self) -> bool: ... + def __gt__(self, other: Self) -> bool: ... + def __le__(self, other: Self) -> bool: ... + else: + def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + + def address_exclude(self, other: Self) -> Iterator[Self]: ... + @property + def broadcast_address(self) -> _A: ... + def compare_networks(self, other: Self) -> int: ... + def hosts(self) -> Iterator[_A] | list[_A]: ... + @property + def is_global(self) -> bool: ... + @property + def is_link_local(self) -> bool: ... + @property + def is_loopback(self) -> bool: ... + @property + def is_multicast(self) -> bool: ... + @property + def is_private(self) -> bool: ... + @property + def is_reserved(self) -> bool: ... + @property + def is_unspecified(self) -> bool: ... + @property + def num_addresses(self) -> int: ... + def overlaps(self, other: _BaseNetwork[IPv4Address] | _BaseNetwork[IPv6Address]) -> bool: ... + @property + def prefixlen(self) -> int: ... + def subnet_of(self, other: Self) -> bool: ... + def supernet_of(self, other: Self) -> bool: ... + def subnets(self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Iterator[Self]: ... + def supernet(self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Self: ... + @property + def with_hostmask(self) -> str: ... + @property + def with_netmask(self) -> str: ... + @property + def with_prefixlen(self) -> str: ... + @property + def hostmask(self) -> _A: ... + +class _BaseV4: + __slots__ = () + if sys.version_info >= (3, 14): + version: Final = 4 + max_prefixlen: Final = 32 + else: + @property + def version(self) -> Literal[4]: ... + @property + def max_prefixlen(self) -> Literal[32]: ... + +class IPv4Address(_BaseV4, _BaseAddress): + __slots__ = ("_ip", "__weakref__") + def __init__(self, address: object) -> None: ... + @property + def is_global(self) -> bool: ... + @property + def is_link_local(self) -> bool: ... + @property + def is_loopback(self) -> bool: ... + @property + def is_multicast(self) -> bool: ... + @property + def is_private(self) -> bool: ... + @property + def is_reserved(self) -> bool: ... + @property + def is_unspecified(self) -> bool: ... + @property + def packed(self) -> bytes: ... + if sys.version_info >= (3, 13): + @property + def ipv6_mapped(self) -> IPv6Address: ... + +class IPv4Network(_BaseV4, _BaseNetwork[IPv4Address]): + def __init__(self, address: object, strict: bool = True) -> None: ... + +class IPv4Interface(IPv4Address): + netmask: IPv4Address + network: IPv4Network + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + @property + def hostmask(self) -> IPv4Address: ... + @property + def ip(self) -> IPv4Address: ... + @property + def with_hostmask(self) -> str: ... + @property + def with_netmask(self) -> str: ... + @property + def with_prefixlen(self) -> str: ... + +class _BaseV6: + __slots__ = () + if sys.version_info >= (3, 14): + version: Final = 6 + max_prefixlen: Final = 128 + else: + @property + def version(self) -> Literal[6]: ... + @property + def max_prefixlen(self) -> Literal[128]: ... + +class IPv6Address(_BaseV6, _BaseAddress): + __slots__ = ("_ip", "_scope_id", "__weakref__") + def __init__(self, address: object) -> None: ... + @property + def is_global(self) -> bool: ... + @property + def is_link_local(self) -> bool: ... + @property + def is_loopback(self) -> bool: ... + @property + def is_multicast(self) -> bool: ... + @property + def is_private(self) -> bool: ... + @property + def is_reserved(self) -> bool: ... + @property + def is_unspecified(self) -> bool: ... + @property + def packed(self) -> bytes: ... + @property + def ipv4_mapped(self) -> IPv4Address | None: ... + @property + def is_site_local(self) -> bool: ... + @property + def sixtofour(self) -> IPv4Address | None: ... + @property + def teredo(self) -> tuple[IPv4Address, IPv4Address] | None: ... + @property + def scope_id(self) -> str | None: ... + def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + +class IPv6Network(_BaseV6, _BaseNetwork[IPv6Address]): + def __init__(self, address: object, strict: bool = True) -> None: ... + @property + def is_site_local(self) -> bool: ... + +class IPv6Interface(IPv6Address): + netmask: IPv6Address + network: IPv6Network + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + @property + def hostmask(self) -> IPv6Address: ... + @property + def ip(self) -> IPv6Address: ... + @property + def with_hostmask(self) -> str: ... + @property + def with_netmask(self) -> str: ... + @property + def with_prefixlen(self) -> str: ... + +def v4_int_to_packed(address: int) -> bytes: ... +def v6_int_to_packed(address: int) -> bytes: ... + +# Third overload is technically incorrect, but convenient when first and last are return values of ip_address() +@overload +def summarize_address_range(first: IPv4Address, last: IPv4Address) -> Iterator[IPv4Network]: ... +@overload +def summarize_address_range(first: IPv6Address, last: IPv6Address) -> Iterator[IPv6Network]: ... +@overload +def summarize_address_range( + first: IPv4Address | IPv6Address, last: IPv4Address | IPv6Address +) -> Iterator[IPv4Network] | Iterator[IPv6Network]: ... +def collapse_addresses(addresses: Iterable[_N]) -> Iterator[_N]: ... +@overload +def get_mixed_type_key(obj: _A) -> tuple[int, _A]: ... +@overload +def get_mixed_type_key(obj: IPv4Network) -> tuple[int, IPv4Address, IPv4Address]: ... +@overload +def get_mixed_type_key(obj: IPv6Network) -> tuple[int, IPv6Address, IPv6Address]: ... + +class AddressValueError(ValueError): ... +class NetmaskValueError(ValueError): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/itertools.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/itertools.pyi new file mode 100644 index 0000000..73745fe --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/itertools.pyi @@ -0,0 +1,351 @@ +import sys +from _typeshed import MaybeNone +from collections.abc import Callable, Iterable, Iterator +from types import GenericAlias +from typing import Any, Generic, Literal, SupportsComplex, SupportsFloat, SupportsIndex, SupportsInt, TypeVar, overload +from typing_extensions import Self, TypeAlias, disjoint_base + +_T = TypeVar("_T") +_S = TypeVar("_S") +_N = TypeVar("_N", int, float, SupportsFloat, SupportsInt, SupportsIndex, SupportsComplex) +_T_co = TypeVar("_T_co", covariant=True) +_S_co = TypeVar("_S_co", covariant=True) +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_T4 = TypeVar("_T4") +_T5 = TypeVar("_T5") +_T6 = TypeVar("_T6") +_T7 = TypeVar("_T7") +_T8 = TypeVar("_T8") +_T9 = TypeVar("_T9") +_T10 = TypeVar("_T10") + +_Step: TypeAlias = SupportsFloat | SupportsInt | SupportsIndex | SupportsComplex + +_Predicate: TypeAlias = Callable[[_T], object] + +# Technically count can take anything that implements a number protocol and has an add method +# but we can't enforce the add method +@disjoint_base +class count(Iterator[_N]): + @overload + def __new__(cls) -> count[int]: ... + @overload + def __new__(cls, start: _N, step: _Step = ...) -> count[_N]: ... + @overload + def __new__(cls, *, step: _N) -> count[_N]: ... + def __next__(self) -> _N: ... + def __iter__(self) -> Self: ... + +@disjoint_base +class cycle(Iterator[_T]): + def __new__(cls, iterable: Iterable[_T], /) -> Self: ... + def __next__(self) -> _T: ... + def __iter__(self) -> Self: ... + +@disjoint_base +class repeat(Iterator[_T]): + @overload + def __new__(cls, object: _T) -> Self: ... + @overload + def __new__(cls, object: _T, times: int) -> Self: ... + def __next__(self) -> _T: ... + def __iter__(self) -> Self: ... + def __length_hint__(self) -> int: ... + +@disjoint_base +class accumulate(Iterator[_T]): + @overload + def __new__(cls, iterable: Iterable[_T], func: None = None, *, initial: _T | None = ...) -> Self: ... + @overload + def __new__(cls, iterable: Iterable[_S], func: Callable[[_T, _S], _T], *, initial: _T | None = ...) -> Self: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... + +@disjoint_base +class chain(Iterator[_T]): + def __new__(cls, *iterables: Iterable[_T]) -> Self: ... + def __next__(self) -> _T: ... + def __iter__(self) -> Self: ... + @classmethod + # We use type[Any] and not type[_S] to not lose the type inference from __iterable + def from_iterable(cls: type[Any], iterable: Iterable[Iterable[_S]], /) -> chain[_S]: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +@disjoint_base +class compress(Iterator[_T]): + def __new__(cls, data: Iterable[_T], selectors: Iterable[Any]) -> Self: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... + +@disjoint_base +class dropwhile(Iterator[_T]): + def __new__(cls, predicate: _Predicate[_T], iterable: Iterable[_T], /) -> Self: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... + +@disjoint_base +class filterfalse(Iterator[_T]): + def __new__(cls, function: _Predicate[_T] | None, iterable: Iterable[_T], /) -> Self: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... + +@disjoint_base +class groupby(Iterator[tuple[_T_co, Iterator[_S_co]]], Generic[_T_co, _S_co]): + @overload + def __new__(cls, iterable: Iterable[_T1], key: None = None) -> groupby[_T1, _T1]: ... + @overload + def __new__(cls, iterable: Iterable[_T1], key: Callable[[_T1], _T2]) -> groupby[_T2, _T1]: ... + def __iter__(self) -> Self: ... + def __next__(self) -> tuple[_T_co, Iterator[_S_co]]: ... + +@disjoint_base +class islice(Iterator[_T]): + @overload + def __new__(cls, iterable: Iterable[_T], stop: int | None, /) -> Self: ... + @overload + def __new__(cls, iterable: Iterable[_T], start: int | None, stop: int | None, step: int | None = ..., /) -> Self: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... + +@disjoint_base +class starmap(Iterator[_T_co]): + def __new__(cls, function: Callable[..., _T], iterable: Iterable[Iterable[Any]], /) -> starmap[_T]: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T_co: ... + +@disjoint_base +class takewhile(Iterator[_T]): + def __new__(cls, predicate: _Predicate[_T], iterable: Iterable[_T], /) -> Self: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... + +def tee(iterable: Iterable[_T], n: int = 2, /) -> tuple[Iterator[_T], ...]: ... +@disjoint_base +class zip_longest(Iterator[_T_co]): + # one iterable (fillvalue doesn't matter) + @overload + def __new__(cls, iter1: Iterable[_T1], /, *, fillvalue: object = ...) -> zip_longest[tuple[_T1]]: ... + # two iterables + @overload + # In the overloads without fillvalue, all of the tuple members could theoretically be None, + # but we return Any instead to avoid false positives for code where we know one of the iterables + # is longer. + def __new__(cls, iter1: Iterable[_T1], iter2: Iterable[_T2], /) -> zip_longest[tuple[_T1 | MaybeNone, _T2 | MaybeNone]]: ... + @overload + def __new__( + cls, iter1: Iterable[_T1], iter2: Iterable[_T2], /, *, fillvalue: _T + ) -> zip_longest[tuple[_T1 | _T, _T2 | _T]]: ... + # three iterables + @overload + def __new__( + cls, iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], / + ) -> zip_longest[tuple[_T1 | MaybeNone, _T2 | MaybeNone, _T3 | MaybeNone]]: ... + @overload + def __new__( + cls, iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], /, *, fillvalue: _T + ) -> zip_longest[tuple[_T1 | _T, _T2 | _T, _T3 | _T]]: ... + # four iterables + @overload + def __new__( + cls, iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], / + ) -> zip_longest[tuple[_T1 | MaybeNone, _T2 | MaybeNone, _T3 | MaybeNone, _T4 | MaybeNone]]: ... + @overload + def __new__( + cls, iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], /, *, fillvalue: _T + ) -> zip_longest[tuple[_T1 | _T, _T2 | _T, _T3 | _T, _T4 | _T]]: ... + # five iterables + @overload + def __new__( + cls, iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], iter5: Iterable[_T5], / + ) -> zip_longest[tuple[_T1 | MaybeNone, _T2 | MaybeNone, _T3 | MaybeNone, _T4 | MaybeNone, _T5 | MaybeNone]]: ... + @overload + def __new__( + cls, + iter1: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + iter4: Iterable[_T4], + iter5: Iterable[_T5], + /, + *, + fillvalue: _T, + ) -> zip_longest[tuple[_T1 | _T, _T2 | _T, _T3 | _T, _T4 | _T, _T5 | _T]]: ... + # six or more iterables + @overload + def __new__( + cls, + iter1: Iterable[_T], + iter2: Iterable[_T], + iter3: Iterable[_T], + iter4: Iterable[_T], + iter5: Iterable[_T], + iter6: Iterable[_T], + /, + *iterables: Iterable[_T], + ) -> zip_longest[tuple[_T | MaybeNone, ...]]: ... + @overload + def __new__( + cls, + iter1: Iterable[_T], + iter2: Iterable[_T], + iter3: Iterable[_T], + iter4: Iterable[_T], + iter5: Iterable[_T], + iter6: Iterable[_T], + /, + *iterables: Iterable[_T], + fillvalue: _T, + ) -> zip_longest[tuple[_T, ...]]: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T_co: ... + +@disjoint_base +class product(Iterator[_T_co]): + @overload + def __new__(cls, iter1: Iterable[_T1], /) -> product[tuple[_T1]]: ... + @overload + def __new__(cls, iter1: Iterable[_T1], iter2: Iterable[_T2], /) -> product[tuple[_T1, _T2]]: ... + @overload + def __new__(cls, iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], /) -> product[tuple[_T1, _T2, _T3]]: ... + @overload + def __new__( + cls, iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], / + ) -> product[tuple[_T1, _T2, _T3, _T4]]: ... + @overload + def __new__( + cls, iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], iter5: Iterable[_T5], / + ) -> product[tuple[_T1, _T2, _T3, _T4, _T5]]: ... + @overload + def __new__( + cls, + iter1: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + iter4: Iterable[_T4], + iter5: Iterable[_T5], + iter6: Iterable[_T6], + /, + ) -> product[tuple[_T1, _T2, _T3, _T4, _T5, _T6]]: ... + @overload + def __new__( + cls, + iter1: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + iter4: Iterable[_T4], + iter5: Iterable[_T5], + iter6: Iterable[_T6], + iter7: Iterable[_T7], + /, + ) -> product[tuple[_T1, _T2, _T3, _T4, _T5, _T6, _T7]]: ... + @overload + def __new__( + cls, + iter1: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + iter4: Iterable[_T4], + iter5: Iterable[_T5], + iter6: Iterable[_T6], + iter7: Iterable[_T7], + iter8: Iterable[_T8], + /, + ) -> product[tuple[_T1, _T2, _T3, _T4, _T5, _T6, _T7, _T8]]: ... + @overload + def __new__( + cls, + iter1: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + iter4: Iterable[_T4], + iter5: Iterable[_T5], + iter6: Iterable[_T6], + iter7: Iterable[_T7], + iter8: Iterable[_T8], + iter9: Iterable[_T9], + /, + ) -> product[tuple[_T1, _T2, _T3, _T4, _T5, _T6, _T7, _T8, _T9]]: ... + @overload + def __new__( + cls, + iter1: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + iter4: Iterable[_T4], + iter5: Iterable[_T5], + iter6: Iterable[_T6], + iter7: Iterable[_T7], + iter8: Iterable[_T8], + iter9: Iterable[_T9], + iter10: Iterable[_T10], + /, + ) -> product[tuple[_T1, _T2, _T3, _T4, _T5, _T6, _T7, _T8, _T9, _T10]]: ... + @overload + def __new__(cls, *iterables: Iterable[_T1], repeat: int = 1) -> product[tuple[_T1, ...]]: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T_co: ... + +@disjoint_base +class permutations(Iterator[_T_co]): + @overload + def __new__(cls, iterable: Iterable[_T], r: Literal[2]) -> permutations[tuple[_T, _T]]: ... + @overload + def __new__(cls, iterable: Iterable[_T], r: Literal[3]) -> permutations[tuple[_T, _T, _T]]: ... + @overload + def __new__(cls, iterable: Iterable[_T], r: Literal[4]) -> permutations[tuple[_T, _T, _T, _T]]: ... + @overload + def __new__(cls, iterable: Iterable[_T], r: Literal[5]) -> permutations[tuple[_T, _T, _T, _T, _T]]: ... + @overload + def __new__(cls, iterable: Iterable[_T], r: int | None = ...) -> permutations[tuple[_T, ...]]: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T_co: ... + +@disjoint_base +class combinations(Iterator[_T_co]): + @overload + def __new__(cls, iterable: Iterable[_T], r: Literal[2]) -> combinations[tuple[_T, _T]]: ... + @overload + def __new__(cls, iterable: Iterable[_T], r: Literal[3]) -> combinations[tuple[_T, _T, _T]]: ... + @overload + def __new__(cls, iterable: Iterable[_T], r: Literal[4]) -> combinations[tuple[_T, _T, _T, _T]]: ... + @overload + def __new__(cls, iterable: Iterable[_T], r: Literal[5]) -> combinations[tuple[_T, _T, _T, _T, _T]]: ... + @overload + def __new__(cls, iterable: Iterable[_T], r: int) -> combinations[tuple[_T, ...]]: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T_co: ... + +@disjoint_base +class combinations_with_replacement(Iterator[_T_co]): + @overload + def __new__(cls, iterable: Iterable[_T], r: Literal[2]) -> combinations_with_replacement[tuple[_T, _T]]: ... + @overload + def __new__(cls, iterable: Iterable[_T], r: Literal[3]) -> combinations_with_replacement[tuple[_T, _T, _T]]: ... + @overload + def __new__(cls, iterable: Iterable[_T], r: Literal[4]) -> combinations_with_replacement[tuple[_T, _T, _T, _T]]: ... + @overload + def __new__(cls, iterable: Iterable[_T], r: Literal[5]) -> combinations_with_replacement[tuple[_T, _T, _T, _T, _T]]: ... + @overload + def __new__(cls, iterable: Iterable[_T], r: int) -> combinations_with_replacement[tuple[_T, ...]]: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T_co: ... + +if sys.version_info >= (3, 10): + @disjoint_base + class pairwise(Iterator[_T_co]): + def __new__(cls, iterable: Iterable[_T], /) -> pairwise[tuple[_T, _T]]: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T_co: ... + +if sys.version_info >= (3, 12): + @disjoint_base + class batched(Iterator[tuple[_T_co, ...]], Generic[_T_co]): + if sys.version_info >= (3, 13): + def __new__(cls, iterable: Iterable[_T_co], n: int, *, strict: bool = False) -> Self: ... + else: + def __new__(cls, iterable: Iterable[_T_co], n: int) -> Self: ... + + def __iter__(self) -> Self: ... + def __next__(self) -> tuple[_T_co, ...]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/json/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/json/__init__.pyi new file mode 100644 index 0000000..63e9718 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/json/__init__.pyi @@ -0,0 +1,61 @@ +from _typeshed import SupportsRead, SupportsWrite +from collections.abc import Callable +from typing import Any + +from .decoder import JSONDecodeError as JSONDecodeError, JSONDecoder as JSONDecoder +from .encoder import JSONEncoder as JSONEncoder + +__all__ = ["dump", "dumps", "load", "loads", "JSONDecoder", "JSONDecodeError", "JSONEncoder"] + +def dumps( + obj: Any, + *, + skipkeys: bool = False, + ensure_ascii: bool = True, + check_circular: bool = True, + allow_nan: bool = True, + cls: type[JSONEncoder] | None = None, + indent: None | int | str = None, + separators: tuple[str, str] | None = None, + default: Callable[[Any], Any] | None = None, + sort_keys: bool = False, + **kwds: Any, +) -> str: ... +def dump( + obj: Any, + fp: SupportsWrite[str], + *, + skipkeys: bool = False, + ensure_ascii: bool = True, + check_circular: bool = True, + allow_nan: bool = True, + cls: type[JSONEncoder] | None = None, + indent: None | int | str = None, + separators: tuple[str, str] | None = None, + default: Callable[[Any], Any] | None = None, + sort_keys: bool = False, + **kwds: Any, +) -> None: ... +def loads( + s: str | bytes | bytearray, + *, + cls: type[JSONDecoder] | None = None, + object_hook: Callable[[dict[Any, Any]], Any] | None = None, + parse_float: Callable[[str], Any] | None = None, + parse_int: Callable[[str], Any] | None = None, + parse_constant: Callable[[str], Any] | None = None, + object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = None, + **kwds: Any, +) -> Any: ... +def load( + fp: SupportsRead[str | bytes], + *, + cls: type[JSONDecoder] | None = None, + object_hook: Callable[[dict[Any, Any]], Any] | None = None, + parse_float: Callable[[str], Any] | None = None, + parse_int: Callable[[str], Any] | None = None, + parse_constant: Callable[[str], Any] | None = None, + object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = None, + **kwds: Any, +) -> Any: ... +def detect_encoding(b: bytes | bytearray) -> str: ... # undocumented diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/json/decoder.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/json/decoder.pyi new file mode 100644 index 0000000..8debfe6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/json/decoder.pyi @@ -0,0 +1,32 @@ +from collections.abc import Callable +from typing import Any + +__all__ = ["JSONDecoder", "JSONDecodeError"] + +class JSONDecodeError(ValueError): + msg: str + doc: str + pos: int + lineno: int + colno: int + def __init__(self, msg: str, doc: str, pos: int) -> None: ... + +class JSONDecoder: + object_hook: Callable[[dict[str, Any]], Any] + parse_float: Callable[[str], Any] + parse_int: Callable[[str], Any] + parse_constant: Callable[[str], Any] + strict: bool + object_pairs_hook: Callable[[list[tuple[str, Any]]], Any] + def __init__( + self, + *, + object_hook: Callable[[dict[str, Any]], Any] | None = None, + parse_float: Callable[[str], Any] | None = None, + parse_int: Callable[[str], Any] | None = None, + parse_constant: Callable[[str], Any] | None = None, + strict: bool = True, + object_pairs_hook: Callable[[list[tuple[str, Any]]], Any] | None = None, + ) -> None: ... + def decode(self, s: str, _w: Callable[..., Any] = ...) -> Any: ... # _w is undocumented + def raw_decode(self, s: str, idx: int = 0) -> tuple[Any, int]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/json/encoder.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/json/encoder.pyi new file mode 100644 index 0000000..83b7866 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/json/encoder.pyi @@ -0,0 +1,40 @@ +from collections.abc import Callable, Iterator +from re import Pattern +from typing import Any, Final + +ESCAPE: Final[Pattern[str]] # undocumented +ESCAPE_ASCII: Final[Pattern[str]] # undocumented +HAS_UTF8: Final[Pattern[bytes]] # undocumented +ESCAPE_DCT: Final[dict[str, str]] # undocumented +INFINITY: Final[float] # undocumented + +def py_encode_basestring(s: str) -> str: ... # undocumented +def py_encode_basestring_ascii(s: str) -> str: ... # undocumented +def encode_basestring(s: str, /) -> str: ... # undocumented +def encode_basestring_ascii(s: str, /) -> str: ... # undocumented + +class JSONEncoder: + item_separator: str + key_separator: str + + skipkeys: bool + ensure_ascii: bool + check_circular: bool + allow_nan: bool + sort_keys: bool + indent: int | str + def __init__( + self, + *, + skipkeys: bool = False, + ensure_ascii: bool = True, + check_circular: bool = True, + allow_nan: bool = True, + sort_keys: bool = False, + indent: int | str | None = None, + separators: tuple[str, str] | None = None, + default: Callable[..., Any] | None = None, + ) -> None: ... + def default(self, o: Any) -> Any: ... + def encode(self, o: Any) -> str: ... + def iterencode(self, o: Any, _one_shot: bool = False) -> Iterator[str]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/json/scanner.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/json/scanner.pyi new file mode 100644 index 0000000..68b42e9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/json/scanner.pyi @@ -0,0 +1,7 @@ +from _json import make_scanner as make_scanner +from re import Pattern +from typing import Final + +__all__ = ["make_scanner"] + +NUMBER_RE: Final[Pattern[str]] # undocumented diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/json/tool.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/json/tool.pyi new file mode 100644 index 0000000..7e7363e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/json/tool.pyi @@ -0,0 +1 @@ +def main() -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/keyword.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/keyword.pyi new file mode 100644 index 0000000..6b8bdad --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/keyword.pyi @@ -0,0 +1,16 @@ +from collections.abc import Sequence +from typing import Final + +__all__ = ["iskeyword", "issoftkeyword", "kwlist", "softkwlist"] + +def iskeyword(s: str, /) -> bool: ... + +# a list at runtime, but you're not meant to mutate it; +# type it as a sequence +kwlist: Final[Sequence[str]] + +def issoftkeyword(s: str, /) -> bool: ... + +# a list at runtime, but you're not meant to mutate it; +# type it as a sequence +softkwlist: Final[Sequence[str]] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/__init__.pyi new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/btm_matcher.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/btm_matcher.pyi new file mode 100644 index 0000000..4c87b66 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/btm_matcher.pyi @@ -0,0 +1,28 @@ +from _typeshed import Incomplete, SupportsGetItem +from collections import defaultdict +from collections.abc import Iterable + +from .fixer_base import BaseFix +from .pytree import Leaf, Node + +class BMNode: + count: Incomplete + transition_table: Incomplete + fixers: Incomplete + id: Incomplete + content: str + def __init__(self) -> None: ... + +class BottomMatcher: + match: Incomplete + root: Incomplete + nodes: Incomplete + fixers: Incomplete + logger: Incomplete + def __init__(self) -> None: ... + def add_fixer(self, fixer: BaseFix) -> None: ... + def add(self, pattern: SupportsGetItem[int | slice, Incomplete] | None, start: BMNode) -> list[BMNode]: ... + def run(self, leaves: Iterable[Leaf]) -> defaultdict[BaseFix, list[Node | Leaf]]: ... + def print_ac(self) -> None: ... + +def type_repr(type_num: int) -> str | int: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixer_base.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixer_base.pyi new file mode 100644 index 0000000..06813c9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixer_base.pyi @@ -0,0 +1,42 @@ +from _typeshed import Incomplete, StrPath +from abc import ABCMeta, abstractmethod +from collections.abc import MutableMapping +from typing import ClassVar, Literal, TypeVar + +from .pytree import Base, Leaf, Node + +_N = TypeVar("_N", bound=Base) + +class BaseFix: + PATTERN: ClassVar[str | None] + pattern: Incomplete | None + pattern_tree: Incomplete | None + options: Incomplete | None + filename: Incomplete | None + numbers: Incomplete + used_names: Incomplete + order: ClassVar[Literal["post", "pre"]] + explicit: ClassVar[bool] + run_order: ClassVar[int] + keep_line_order: ClassVar[bool] + BM_compatible: ClassVar[bool] + syms: Incomplete + log: Incomplete + def __init__(self, options: MutableMapping[str, Incomplete], log: list[str]) -> None: ... + def compile_pattern(self) -> None: ... + def set_filename(self, filename: StrPath) -> None: ... + def match(self, node: _N) -> Literal[False] | dict[str, _N]: ... + @abstractmethod + def transform(self, node: Base, results: dict[str, Base]) -> Node | Leaf | None: ... + def new_name(self, template: str = "xxx_todo_changeme") -> str: ... + first_log: bool + def log_message(self, message: str) -> None: ... + def cannot_convert(self, node: Base, reason: str | None = None) -> None: ... + def warning(self, node: Base, reason: str) -> None: ... + def start_tree(self, tree: Node, filename: StrPath) -> None: ... + def finish_tree(self, tree: Node, filename: StrPath) -> None: ... + +class ConditionalFix(BaseFix, metaclass=ABCMeta): + skip_on: ClassVar[str | None] + def start_tree(self, tree: Node, filename: StrPath, /) -> None: ... + def should_skip(self, node: Base) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/__init__.pyi new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_apply.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_apply.pyi new file mode 100644 index 0000000..e53e3dd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_apply.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixApply(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_asserts.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_asserts.pyi new file mode 100644 index 0000000..1bf7db2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_asserts.pyi @@ -0,0 +1,10 @@ +from typing import ClassVar, Final, Literal + +from ..fixer_base import BaseFix + +NAMES: Final[dict[str, str]] + +class FixAsserts(BaseFix): + BM_compatible: ClassVar[Literal[False]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_basestring.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_basestring.pyi new file mode 100644 index 0000000..8ed5cca --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_basestring.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixBasestring(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[Literal["'basestring'"]] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_buffer.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_buffer.pyi new file mode 100644 index 0000000..1efca62 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_buffer.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixBuffer(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_dict.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_dict.pyi new file mode 100644 index 0000000..08c54c3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_dict.pyi @@ -0,0 +1,16 @@ +from _typeshed import Incomplete +from typing import ClassVar, Literal + +from .. import fixer_base + +iter_exempt: set[str] + +class FixDict(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... + P1: ClassVar[str] + p1: ClassVar[Incomplete] + P2: ClassVar[str] + p2: ClassVar[Incomplete] + def in_special_context(self, node, isiter): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_except.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_except.pyi new file mode 100644 index 0000000..30930a2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_except.pyi @@ -0,0 +1,14 @@ +from collections.abc import Generator, Iterable +from typing import ClassVar, Literal, TypeVar + +from .. import fixer_base +from ..pytree import Base + +_N = TypeVar("_N", bound=Base) + +def find_excepts(nodes: Iterable[_N]) -> Generator[tuple[_N, _N], None, None]: ... + +class FixExcept(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_exec.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_exec.pyi new file mode 100644 index 0000000..71e2a82 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_exec.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixExec(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_execfile.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_execfile.pyi new file mode 100644 index 0000000..8122a63 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_execfile.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixExecfile(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_exitfunc.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_exitfunc.pyi new file mode 100644 index 0000000..7fc910c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_exitfunc.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete, StrPath +from lib2to3 import fixer_base +from typing import ClassVar, Literal + +from ..pytree import Node + +class FixExitfunc(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def __init__(self, *args) -> None: ... + sys_import: Incomplete | None + def start_tree(self, tree: Node, filename: StrPath) -> None: ... + def transform(self, node, results) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_filter.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_filter.pyi new file mode 100644 index 0000000..638889b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_filter.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixFilter(fixer_base.ConditionalFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + skip_on: ClassVar[Literal["future_builtins.filter"]] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_funcattrs.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_funcattrs.pyi new file mode 100644 index 0000000..60487bb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_funcattrs.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixFuncattrs(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_future.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_future.pyi new file mode 100644 index 0000000..12ed93f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_future.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixFuture(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_getcwdu.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_getcwdu.pyi new file mode 100644 index 0000000..aa3ccf5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_getcwdu.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixGetcwdu(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_has_key.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_has_key.pyi new file mode 100644 index 0000000..f6f5a07 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_has_key.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixHasKey(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_idioms.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_idioms.pyi new file mode 100644 index 0000000..6b2723d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_idioms.pyi @@ -0,0 +1,15 @@ +from typing import ClassVar, Final, Literal + +from .. import fixer_base + +CMP: Final[str] +TYPE: Final[str] + +class FixIdioms(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[False]] + PATTERN: ClassVar[str] + def match(self, node): ... + def transform(self, node, results): ... + def transform_isinstance(self, node, results): ... + def transform_while(self, node, results) -> None: ... + def transform_sort(self, node, results) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_import.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_import.pyi new file mode 100644 index 0000000..bf4b2d0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_import.pyi @@ -0,0 +1,16 @@ +from _typeshed import StrPath +from collections.abc import Generator +from typing import ClassVar, Literal + +from .. import fixer_base +from ..pytree import Node + +def traverse_imports(names) -> Generator[str, None, None]: ... + +class FixImport(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + skip: bool + def start_tree(self, tree: Node, name: StrPath) -> None: ... + def transform(self, node, results): ... + def probably_a_local_import(self, imp_name): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_imports.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_imports.pyi new file mode 100644 index 0000000..c747af5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_imports.pyi @@ -0,0 +1,21 @@ +from _typeshed import StrPath +from collections.abc import Generator +from typing import ClassVar, Final, Literal + +from .. import fixer_base +from ..pytree import Node + +MAPPING: Final[dict[str, str]] + +def alternates(members): ... +def build_pattern(mapping=...) -> Generator[str, None, None]: ... + +class FixImports(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + mapping = MAPPING + def build_pattern(self): ... + def compile_pattern(self) -> None: ... + def match(self, node): ... + replace: dict[str, str] + def start_tree(self, tree: Node, filename: StrPath) -> None: ... + def transform(self, node, results) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_imports2.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_imports2.pyi new file mode 100644 index 0000000..618ecd0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_imports2.pyi @@ -0,0 +1,8 @@ +from typing import Final + +from . import fix_imports + +MAPPING: Final[dict[str, str]] + +class FixImports2(fix_imports.FixImports): + mapping = MAPPING diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_input.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_input.pyi new file mode 100644 index 0000000..fc12795 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_input.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete +from typing import ClassVar, Literal + +from .. import fixer_base + +context: Incomplete + +class FixInput(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_intern.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_intern.pyi new file mode 100644 index 0000000..804b7b2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_intern.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixIntern(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + order: ClassVar[Literal["pre"]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_isinstance.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_isinstance.pyi new file mode 100644 index 0000000..31eefd6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_isinstance.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixIsinstance(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_itertools.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_itertools.pyi new file mode 100644 index 0000000..229d86e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_itertools.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixItertools(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + it_funcs: str + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_itertools_imports.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_itertools_imports.pyi new file mode 100644 index 0000000..39a4da5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_itertools_imports.pyi @@ -0,0 +1,7 @@ +from lib2to3 import fixer_base +from typing import ClassVar, Literal + +class FixItertoolsImports(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_long.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_long.pyi new file mode 100644 index 0000000..9ccf271 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_long.pyi @@ -0,0 +1,7 @@ +from lib2to3 import fixer_base +from typing import ClassVar, Literal + +class FixLong(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[Literal["'long'"]] + def transform(self, node, results) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_map.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_map.pyi new file mode 100644 index 0000000..6e60282 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_map.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixMap(fixer_base.ConditionalFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + skip_on: ClassVar[Literal["future_builtins.map"]] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_metaclass.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_metaclass.pyi new file mode 100644 index 0000000..1b1ec82 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_metaclass.pyi @@ -0,0 +1,17 @@ +from collections.abc import Generator +from typing import ClassVar, Literal + +from .. import fixer_base +from ..pytree import Base + +def has_metaclass(parent): ... +def fixup_parse_tree(cls_node) -> None: ... +def fixup_simple_stmt(parent, i, stmt_node) -> None: ... +def remove_trailing_newline(node) -> None: ... +def find_metas(cls_node) -> Generator[tuple[Base, int, Base], None, None]: ... +def fixup_indent(suite) -> None: ... + +class FixMetaclass(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_methodattrs.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_methodattrs.pyi new file mode 100644 index 0000000..ca9b71e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_methodattrs.pyi @@ -0,0 +1,10 @@ +from typing import ClassVar, Final, Literal + +from .. import fixer_base + +MAP: Final[dict[str, str]] + +class FixMethodattrs(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_ne.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_ne.pyi new file mode 100644 index 0000000..6ff1220 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_ne.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixNe(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[False]] + def match(self, node): ... + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_next.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_next.pyi new file mode 100644 index 0000000..b13914a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_next.pyi @@ -0,0 +1,19 @@ +from _typeshed import StrPath +from typing import ClassVar, Literal + +from .. import fixer_base +from ..pytree import Node + +bind_warning: str + +class FixNext(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + order: ClassVar[Literal["pre"]] + shadowed_next: bool + def start_tree(self, tree: Node, filename: StrPath) -> None: ... + def transform(self, node, results) -> None: ... + +def is_assign_target(node): ... +def find_assign(node): ... +def is_subtree(root, node): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_nonzero.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_nonzero.pyi new file mode 100644 index 0000000..5c37fc1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_nonzero.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixNonzero(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_numliterals.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_numliterals.pyi new file mode 100644 index 0000000..113145e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_numliterals.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixNumliterals(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[False]] + def match(self, node): ... + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_operator.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_operator.pyi new file mode 100644 index 0000000..b9863d3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_operator.pyi @@ -0,0 +1,12 @@ +from lib2to3 import fixer_base +from typing import ClassVar, Literal + +def invocation(s): ... + +class FixOperator(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + order: ClassVar[Literal["pre"]] + methods: str + obj: str + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_paren.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_paren.pyi new file mode 100644 index 0000000..237df6c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_paren.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixParen(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_print.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_print.pyi new file mode 100644 index 0000000..e9564b0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_print.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete +from typing import ClassVar, Literal + +from .. import fixer_base + +parend_expr: Incomplete + +class FixPrint(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... + def add_kwarg(self, l_nodes, s_kwd, n_expr) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_raise.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_raise.pyi new file mode 100644 index 0000000..e02c308 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_raise.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixRaise(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_raw_input.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_raw_input.pyi new file mode 100644 index 0000000..d1a0eb0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_raw_input.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixRawInput(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_reduce.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_reduce.pyi new file mode 100644 index 0000000..f8ad876 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_reduce.pyi @@ -0,0 +1,8 @@ +from lib2to3 import fixer_base +from typing import ClassVar, Literal + +class FixReduce(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + order: ClassVar[Literal["pre"]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_reload.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_reload.pyi new file mode 100644 index 0000000..8200754 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_reload.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixReload(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + order: ClassVar[Literal["pre"]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_renames.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_renames.pyi new file mode 100644 index 0000000..652d8f1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_renames.pyi @@ -0,0 +1,17 @@ +from collections.abc import Generator +from typing import ClassVar, Final, Literal + +from .. import fixer_base + +MAPPING: Final[dict[str, dict[str, str]]] +LOOKUP: Final[dict[tuple[str, str], str]] + +def alternates(members): ... +def build_pattern() -> Generator[str, None, None]: ... + +class FixRenames(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + order: ClassVar[Literal["pre"]] + PATTERN: ClassVar[str] + def match(self, node): ... + def transform(self, node, results) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_repr.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_repr.pyi new file mode 100644 index 0000000..3b192d3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_repr.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixRepr(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_set_literal.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_set_literal.pyi new file mode 100644 index 0000000..6962ff3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_set_literal.pyi @@ -0,0 +1,7 @@ +from lib2to3 import fixer_base +from typing import ClassVar, Literal + +class FixSetLiteral(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_standarderror.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_standarderror.pyi new file mode 100644 index 0000000..ba914bc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_standarderror.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixStandarderror(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_sys_exc.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_sys_exc.pyi new file mode 100644 index 0000000..0fa1a47 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_sys_exc.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixSysExc(fixer_base.BaseFix): + exc_info: ClassVar[list[str]] + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_throw.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_throw.pyi new file mode 100644 index 0000000..4c99855 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_throw.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixThrow(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_tuple_params.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_tuple_params.pyi new file mode 100644 index 0000000..7f4f7f4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_tuple_params.pyi @@ -0,0 +1,16 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +def is_docstring(stmt): ... + +class FixTupleParams(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... + def transform_lambda(self, node, results) -> None: ... + +def simplify_args(node): ... +def find_params(node): ... +def map_to_index(param_list, prefix=[], d=None): ... +def tuple_name(param_list): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_types.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_types.pyi new file mode 100644 index 0000000..e26dbec --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_types.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixTypes(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_unicode.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_unicode.pyi new file mode 100644 index 0000000..85d1315 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_unicode.pyi @@ -0,0 +1,12 @@ +from _typeshed import StrPath +from typing import ClassVar, Literal + +from .. import fixer_base +from ..pytree import Node + +class FixUnicode(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + unicode_literals: bool + def start_tree(self, tree: Node, filename: StrPath) -> None: ... + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_urllib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_urllib.pyi new file mode 100644 index 0000000..abdcc0f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_urllib.pyi @@ -0,0 +1,15 @@ +from collections.abc import Generator +from typing import Final, Literal + +from .fix_imports import FixImports + +MAPPING: Final[dict[str, list[tuple[Literal["urllib.request", "urllib.parse", "urllib.error"], list[str]]]]] + +def build_pattern() -> Generator[str, None, None]: ... + +class FixUrllib(FixImports): + def build_pattern(self): ... + def transform_import(self, node, results) -> None: ... + def transform_member(self, node, results): ... + def transform_dot(self, node, results) -> None: ... + def transform(self, node, results) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_ws_comma.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_ws_comma.pyi new file mode 100644 index 0000000..4ce5cb2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_ws_comma.pyi @@ -0,0 +1,12 @@ +from typing import ClassVar, Literal + +from .. import fixer_base +from ..pytree import Leaf + +class FixWsComma(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[False]] + PATTERN: ClassVar[str] + COMMA: Leaf + COLON: Leaf + SEPS: tuple[Leaf, Leaf] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_xrange.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_xrange.pyi new file mode 100644 index 0000000..71318b7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_xrange.pyi @@ -0,0 +1,20 @@ +from _typeshed import Incomplete, StrPath +from typing import ClassVar, Literal + +from .. import fixer_base +from ..pytree import Node + +class FixXrange(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + transformed_xranges: set[Incomplete] | None + def start_tree(self, tree: Node, filename: StrPath) -> None: ... + def finish_tree(self, tree: Node, filename: StrPath) -> None: ... + def transform(self, node, results): ... + def transform_xrange(self, node, results) -> None: ... + def transform_range(self, node, results): ... + P1: ClassVar[str] + p1: ClassVar[Incomplete] + P2: ClassVar[str] + p2: ClassVar[Incomplete] + def in_special_context(self, node): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_xreadlines.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_xreadlines.pyi new file mode 100644 index 0000000..b479414 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_xreadlines.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixXreadlines(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_zip.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_zip.pyi new file mode 100644 index 0000000..805886e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/fixes/fix_zip.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar, Literal + +from .. import fixer_base + +class FixZip(fixer_base.ConditionalFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + skip_on: ClassVar[Literal["future_builtins.zip"]] + def transform(self, node, results): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/main.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/main.pyi new file mode 100644 index 0000000..5b7fdfc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/main.pyi @@ -0,0 +1,42 @@ +from _typeshed import FileDescriptorOrPath +from collections.abc import Container, Iterable, Iterator, Mapping, Sequence +from logging import _ExcInfoType +from typing import AnyStr, Literal + +from . import refactor as refactor + +def diff_texts(a: str, b: str, filename: str) -> Iterator[str]: ... + +class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool): + nobackups: bool + show_diffs: bool + def __init__( + self, + fixers: Iterable[str], + options: Mapping[str, object] | None, + explicit: Container[str] | None, + nobackups: bool, + show_diffs: bool, + input_base_dir: str = "", + output_dir: str = "", + append_suffix: str = "", + ) -> None: ... + # Same as super.log_error and Logger.error + def log_error( # type: ignore[override] + self, + msg: str, + *args: Iterable[str], + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + # Same as super.write_file but without default values + def write_file( # type: ignore[override] + self, new_text: str, filename: FileDescriptorOrPath, old_text: str, encoding: str | None + ) -> None: ... + # filename has to be str + def print_output(self, old: str, new: str, filename: str, equal: bool) -> None: ... # type: ignore[override] + +def warn(msg: object) -> None: ... +def main(fixer_pkg: str, args: Sequence[AnyStr] | None = None) -> Literal[0, 1, 2]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/__init__.pyi new file mode 100644 index 0000000..de8a874 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/__init__.pyi @@ -0,0 +1,9 @@ +from collections.abc import Callable +from typing import Any +from typing_extensions import TypeAlias + +from ..pytree import _RawNode +from .grammar import Grammar + +# This is imported in several lib2to3/pgen2 submodules +_Convert: TypeAlias = Callable[[Grammar, _RawNode], Any] # noqa: Y047 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/driver.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/driver.pyi new file mode 100644 index 0000000..dea13fb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/driver.pyi @@ -0,0 +1,27 @@ +from _typeshed import StrPath +from collections.abc import Iterable +from logging import Logger +from typing import IO + +from ..pytree import _NL +from . import _Convert +from .grammar import Grammar + +__all__ = ["Driver", "load_grammar"] + +class Driver: + grammar: Grammar + logger: Logger + convert: _Convert + def __init__(self, grammar: Grammar, convert: _Convert | None = None, logger: Logger | None = None) -> None: ... + def parse_tokens( + self, tokens: Iterable[tuple[int, str, tuple[int, int], tuple[int, int], str]], debug: bool = False + ) -> _NL: ... + def parse_stream_raw(self, stream: IO[str], debug: bool = False) -> _NL: ... + def parse_stream(self, stream: IO[str], debug: bool = False) -> _NL: ... + def parse_file(self, filename: StrPath, encoding: str | None = None, debug: bool = False) -> _NL: ... + def parse_string(self, text: str, debug: bool = False) -> _NL: ... + +def load_grammar( + gt: str = "Grammar.txt", gp: str | None = None, save: bool = True, force: bool = False, logger: Logger | None = None +) -> Grammar: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/grammar.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/grammar.pyi new file mode 100644 index 0000000..bef0a79 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/grammar.pyi @@ -0,0 +1,24 @@ +from _typeshed import StrPath +from typing_extensions import Self, TypeAlias + +_Label: TypeAlias = tuple[int, str | None] +_DFA: TypeAlias = list[list[tuple[int, int]]] +_DFAS: TypeAlias = tuple[_DFA, dict[int, int]] + +class Grammar: + symbol2number: dict[str, int] + number2symbol: dict[int, str] + states: list[_DFA] + dfas: dict[int, _DFAS] + labels: list[_Label] + keywords: dict[str, int] + tokens: dict[int, int] + symbol2label: dict[str, int] + start: int + def dump(self, filename: StrPath) -> None: ... + def load(self, filename: StrPath) -> None: ... + def copy(self) -> Self: ... + def report(self) -> None: ... + +opmap_raw: str +opmap: dict[str, str] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/literals.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/literals.pyi new file mode 100644 index 0000000..c3fabe8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/literals.pyi @@ -0,0 +1,7 @@ +from re import Match + +simple_escapes: dict[str, str] + +def escape(m: Match[str]) -> str: ... +def evalString(s: str) -> str: ... +def test() -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/parse.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/parse.pyi new file mode 100644 index 0000000..320c5f0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/parse.pyi @@ -0,0 +1,30 @@ +from _typeshed import Incomplete +from collections.abc import Sequence +from typing_extensions import TypeAlias + +from ..pytree import _NL, _RawNode +from . import _Convert +from .grammar import _DFAS, Grammar + +_Context: TypeAlias = Sequence[Incomplete] + +class ParseError(Exception): + msg: str + type: int + value: str | None + context: _Context + def __init__(self, msg: str, type: int, value: str | None, context: _Context) -> None: ... + +class Parser: + grammar: Grammar + convert: _Convert + stack: list[tuple[_DFAS, int, _RawNode]] + rootnode: _NL | None + used_names: set[str] + def __init__(self, grammar: Grammar, convert: _Convert | None = None) -> None: ... + def setup(self, start: int | None = None) -> None: ... + def addtoken(self, type: int, value: str | None, context: _Context) -> bool: ... + def classify(self, type: int, value: str | None, context: _Context) -> int: ... + def shift(self, type: int, value: str | None, newstate: int, context: _Context) -> None: ... + def push(self, type: int, newdfa: _DFAS, newstate: int, context: _Context) -> None: ... + def pop(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/pgen.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/pgen.pyi new file mode 100644 index 0000000..5776d10 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/pgen.pyi @@ -0,0 +1,51 @@ +from _typeshed import Incomplete, StrPath +from collections.abc import Iterable, Iterator +from typing import IO, ClassVar, NoReturn, overload + +from . import grammar +from .tokenize import _TokenInfo + +class PgenGrammar(grammar.Grammar): ... + +class ParserGenerator: + filename: StrPath + stream: IO[str] + generator: Iterator[_TokenInfo] + first: dict[str, dict[str, int]] + def __init__(self, filename: StrPath, stream: IO[str] | None = None) -> None: ... + def make_grammar(self) -> PgenGrammar: ... + def make_first(self, c: PgenGrammar, name: str) -> dict[int, int]: ... + def make_label(self, c: PgenGrammar, label: str) -> int: ... + def addfirstsets(self) -> None: ... + def calcfirst(self, name: str) -> None: ... + def parse(self) -> tuple[dict[str, list[DFAState]], str]: ... + def make_dfa(self, start: NFAState, finish: NFAState) -> list[DFAState]: ... + def dump_nfa(self, name: str, start: NFAState, finish: NFAState) -> list[DFAState]: ... + def dump_dfa(self, name: str, dfa: Iterable[DFAState]) -> None: ... + def simplify_dfa(self, dfa: list[DFAState]) -> None: ... + def parse_rhs(self) -> tuple[NFAState, NFAState]: ... + def parse_alt(self) -> tuple[NFAState, NFAState]: ... + def parse_item(self) -> tuple[NFAState, NFAState]: ... + def parse_atom(self) -> tuple[NFAState, NFAState]: ... + def expect(self, type: int, value: str | None = None) -> str: ... + def gettoken(self) -> None: ... + @overload + def raise_error(self, msg: object) -> NoReturn: ... + @overload + def raise_error(self, msg: str, *args: object) -> NoReturn: ... + +class NFAState: + arcs: list[tuple[str | None, NFAState]] + def addarc(self, next: NFAState, label: str | None = None) -> None: ... + +class DFAState: + nfaset: dict[NFAState, Incomplete] + isfinal: bool + arcs: dict[str, DFAState] + def __init__(self, nfaset: dict[NFAState, Incomplete], final: NFAState) -> None: ... + def addarc(self, next: DFAState, label: str) -> None: ... + def unifystate(self, old: DFAState, new: DFAState) -> None: ... + def __eq__(self, other: DFAState) -> bool: ... # type: ignore[override] + __hash__: ClassVar[None] # type: ignore[assignment] + +def generate_grammar(filename: StrPath = "Grammar.txt") -> PgenGrammar: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/token.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/token.pyi new file mode 100644 index 0000000..6898517 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/token.pyi @@ -0,0 +1,69 @@ +from typing import Final + +ENDMARKER: Final[int] +NAME: Final[int] +NUMBER: Final[int] +STRING: Final[int] +NEWLINE: Final[int] +INDENT: Final[int] +DEDENT: Final[int] +LPAR: Final[int] +RPAR: Final[int] +LSQB: Final[int] +RSQB: Final[int] +COLON: Final[int] +COMMA: Final[int] +SEMI: Final[int] +PLUS: Final[int] +MINUS: Final[int] +STAR: Final[int] +SLASH: Final[int] +VBAR: Final[int] +AMPER: Final[int] +LESS: Final[int] +GREATER: Final[int] +EQUAL: Final[int] +DOT: Final[int] +PERCENT: Final[int] +BACKQUOTE: Final[int] +LBRACE: Final[int] +RBRACE: Final[int] +EQEQUAL: Final[int] +NOTEQUAL: Final[int] +LESSEQUAL: Final[int] +GREATEREQUAL: Final[int] +TILDE: Final[int] +CIRCUMFLEX: Final[int] +LEFTSHIFT: Final[int] +RIGHTSHIFT: Final[int] +DOUBLESTAR: Final[int] +PLUSEQUAL: Final[int] +MINEQUAL: Final[int] +STAREQUAL: Final[int] +SLASHEQUAL: Final[int] +PERCENTEQUAL: Final[int] +AMPEREQUAL: Final[int] +VBAREQUAL: Final[int] +CIRCUMFLEXEQUAL: Final[int] +LEFTSHIFTEQUAL: Final[int] +RIGHTSHIFTEQUAL: Final[int] +DOUBLESTAREQUAL: Final[int] +DOUBLESLASH: Final[int] +DOUBLESLASHEQUAL: Final[int] +OP: Final[int] +COMMENT: Final[int] +NL: Final[int] +RARROW: Final[int] +AT: Final[int] +ATEQUAL: Final[int] +AWAIT: Final[int] +ASYNC: Final[int] +ERRORTOKEN: Final[int] +COLONEQUAL: Final[int] +N_TOKENS: Final[int] +NT_OFFSET: Final[int] +tok_name: dict[int, str] + +def ISTERMINAL(x: int) -> bool: ... +def ISNONTERMINAL(x: int) -> bool: ... +def ISEOF(x: int) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi new file mode 100644 index 0000000..af54de1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi @@ -0,0 +1,96 @@ +from collections.abc import Callable, Iterable, Iterator +from typing_extensions import TypeAlias + +from .token import * + +__all__ = [ + "AMPER", + "AMPEREQUAL", + "ASYNC", + "AT", + "ATEQUAL", + "AWAIT", + "BACKQUOTE", + "CIRCUMFLEX", + "CIRCUMFLEXEQUAL", + "COLON", + "COMMA", + "COMMENT", + "DEDENT", + "DOT", + "DOUBLESLASH", + "DOUBLESLASHEQUAL", + "DOUBLESTAR", + "DOUBLESTAREQUAL", + "ENDMARKER", + "EQEQUAL", + "EQUAL", + "ERRORTOKEN", + "GREATER", + "GREATEREQUAL", + "INDENT", + "ISEOF", + "ISNONTERMINAL", + "ISTERMINAL", + "LBRACE", + "LEFTSHIFT", + "LEFTSHIFTEQUAL", + "LESS", + "LESSEQUAL", + "LPAR", + "LSQB", + "MINEQUAL", + "MINUS", + "NAME", + "NEWLINE", + "NL", + "NOTEQUAL", + "NT_OFFSET", + "NUMBER", + "N_TOKENS", + "OP", + "PERCENT", + "PERCENTEQUAL", + "PLUS", + "PLUSEQUAL", + "RARROW", + "RBRACE", + "RIGHTSHIFT", + "RIGHTSHIFTEQUAL", + "RPAR", + "RSQB", + "SEMI", + "SLASH", + "SLASHEQUAL", + "STAR", + "STAREQUAL", + "STRING", + "TILDE", + "VBAR", + "VBAREQUAL", + "tok_name", + "tokenize", + "generate_tokens", + "untokenize", + "COLONEQUAL", +] + +_Coord: TypeAlias = tuple[int, int] +_TokenEater: TypeAlias = Callable[[int, str, _Coord, _Coord, str], object] +_TokenInfo: TypeAlias = tuple[int, str, _Coord, _Coord, str] + +class TokenError(Exception): ... +class StopTokenizing(Exception): ... + +def tokenize(readline: Callable[[], str], tokeneater: _TokenEater = ...) -> None: ... + +class Untokenizer: + tokens: list[str] + prev_row: int + prev_col: int + def add_whitespace(self, start: _Coord) -> None: ... + def untokenize(self, iterable: Iterable[_TokenInfo]) -> str: ... + def compat(self, token: tuple[int, str], iterable: Iterable[_TokenInfo]) -> None: ... + +def untokenize(iterable: Iterable[_TokenInfo]) -> str: ... +def generate_tokens(readline: Callable[[], str]) -> Iterator[_TokenInfo]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pygram.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pygram.pyi new file mode 100644 index 0000000..86c74b5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pygram.pyi @@ -0,0 +1,114 @@ +from .pgen2.grammar import Grammar + +class Symbols: + def __init__(self, grammar: Grammar) -> None: ... + +class python_symbols(Symbols): + and_expr: int + and_test: int + annassign: int + arglist: int + argument: int + arith_expr: int + assert_stmt: int + async_funcdef: int + async_stmt: int + atom: int + augassign: int + break_stmt: int + classdef: int + comp_for: int + comp_if: int + comp_iter: int + comp_op: int + comparison: int + compound_stmt: int + continue_stmt: int + decorated: int + decorator: int + decorators: int + del_stmt: int + dictsetmaker: int + dotted_as_name: int + dotted_as_names: int + dotted_name: int + encoding_decl: int + eval_input: int + except_clause: int + exec_stmt: int + expr: int + expr_stmt: int + exprlist: int + factor: int + file_input: int + flow_stmt: int + for_stmt: int + funcdef: int + global_stmt: int + if_stmt: int + import_as_name: int + import_as_names: int + import_from: int + import_name: int + import_stmt: int + lambdef: int + listmaker: int + not_test: int + old_lambdef: int + old_test: int + or_test: int + parameters: int + pass_stmt: int + power: int + print_stmt: int + raise_stmt: int + return_stmt: int + shift_expr: int + simple_stmt: int + single_input: int + sliceop: int + small_stmt: int + star_expr: int + stmt: int + subscript: int + subscriptlist: int + suite: int + term: int + test: int + testlist: int + testlist1: int + testlist_gexp: int + testlist_safe: int + testlist_star_expr: int + tfpdef: int + tfplist: int + tname: int + trailer: int + try_stmt: int + typedargslist: int + varargslist: int + vfpdef: int + vfplist: int + vname: int + while_stmt: int + with_item: int + with_stmt: int + with_var: int + xor_expr: int + yield_arg: int + yield_expr: int + yield_stmt: int + +class pattern_symbols(Symbols): + Alternative: int + Alternatives: int + Details: int + Matcher: int + NegatedUnit: int + Repeater: int + Unit: int + +python_grammar: Grammar +python_grammar_no_print_statement: Grammar +python_grammar_no_print_and_exec_statement: Grammar +pattern_grammar: Grammar diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pytree.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pytree.pyi new file mode 100644 index 0000000..51bdbc7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/pytree.pyi @@ -0,0 +1,118 @@ +from _typeshed import Incomplete, SupportsGetItem, SupportsLenAndGetItem, Unused +from abc import abstractmethod +from collections.abc import Iterable, Iterator, MutableSequence +from typing import ClassVar, Final +from typing_extensions import Self, TypeAlias + +from .fixer_base import BaseFix +from .pgen2.grammar import Grammar + +_NL: TypeAlias = Node | Leaf +_Context: TypeAlias = tuple[str, int, int] +_Results: TypeAlias = dict[str, _NL] +_RawNode: TypeAlias = tuple[int, str, _Context, list[_NL] | None] + +HUGE: Final = 0x7FFFFFFF + +def type_repr(type_num: int) -> str | int: ... + +class Base: + type: int + parent: Node | None + prefix: str + children: list[_NL] + was_changed: bool + was_checked: bool + def __eq__(self, other: object) -> bool: ... + __hash__: ClassVar[None] # type: ignore[assignment] + @abstractmethod + def _eq(self, other: Base) -> bool: ... + @abstractmethod + def clone(self) -> Self: ... + @abstractmethod + def post_order(self) -> Iterator[Self]: ... + @abstractmethod + def pre_order(self) -> Iterator[Self]: ... + def replace(self, new: _NL | list[_NL]) -> None: ... + def get_lineno(self) -> int: ... + def changed(self) -> None: ... + def remove(self) -> int | None: ... + @property + def next_sibling(self) -> _NL | None: ... + @property + def prev_sibling(self) -> _NL | None: ... + def leaves(self) -> Iterator[Leaf]: ... + def depth(self) -> int: ... + def get_suffix(self) -> str: ... + +class Node(Base): + fixers_applied: MutableSequence[BaseFix] | None + # Is Unbound until set in refactor.RefactoringTool + future_features: frozenset[Incomplete] + # Is Unbound until set in pgen2.parse.Parser.pop + used_names: set[str] + def __init__( + self, + type: int, + children: Iterable[_NL], + context: Unused = None, + prefix: str | None = None, + fixers_applied: MutableSequence[BaseFix] | None = None, + ) -> None: ... + def _eq(self, other: Base) -> bool: ... + def clone(self) -> Node: ... + def post_order(self) -> Iterator[Self]: ... + def pre_order(self) -> Iterator[Self]: ... + def set_child(self, i: int, child: _NL) -> None: ... + def insert_child(self, i: int, child: _NL) -> None: ... + def append_child(self, child: _NL) -> None: ... + def __unicode__(self) -> str: ... + +class Leaf(Base): + lineno: int + column: int + value: str + fixers_applied: MutableSequence[BaseFix] + def __init__( + self, + type: int, + value: str, + context: _Context | None = None, + prefix: str | None = None, + fixers_applied: MutableSequence[BaseFix] = [], + ) -> None: ... + def _eq(self, other: Base) -> bool: ... + def clone(self) -> Leaf: ... + def post_order(self) -> Iterator[Self]: ... + def pre_order(self) -> Iterator[Self]: ... + def __unicode__(self) -> str: ... + +def convert(gr: Grammar, raw_node: _RawNode) -> _NL: ... + +class BasePattern: + type: int + content: str | None + name: str | None + def optimize(self) -> BasePattern: ... # sic, subclasses are free to optimize themselves into different patterns + def match(self, node: _NL, results: _Results | None = None) -> bool: ... + def match_seq(self, nodes: SupportsLenAndGetItem[_NL], results: _Results | None = None) -> bool: ... + def generate_matches(self, nodes: SupportsGetItem[int, _NL]) -> Iterator[tuple[int, _Results]]: ... + +class LeafPattern(BasePattern): + def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: ... + +class NodePattern(BasePattern): + wildcards: bool + def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: ... + +class WildcardPattern(BasePattern): + min: int + max: int + def __init__(self, content: str | None = None, min: int = 0, max: int = 0x7FFFFFFF, name: str | None = None) -> None: ... + +class NegatedPattern(BasePattern): + def __init__(self, content: str | None = None) -> None: ... + +def generate_matches( + patterns: SupportsGetItem[int | slice, BasePattern] | None, nodes: SupportsGetItem[int | slice, _NL] +) -> Iterator[tuple[int, _Results]]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/refactor.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/refactor.pyi new file mode 100644 index 0000000..a7f3825 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lib2to3/refactor.pyi @@ -0,0 +1,82 @@ +from _typeshed import FileDescriptorOrPath, StrPath, SupportsGetItem +from collections.abc import Container, Generator, Iterable, Mapping +from logging import Logger, _ExcInfoType +from multiprocessing import JoinableQueue +from multiprocessing.synchronize import Lock +from typing import Any, ClassVar, Final, NoReturn, overload + +from .btm_matcher import BottomMatcher +from .fixer_base import BaseFix +from .pgen2.driver import Driver +from .pgen2.grammar import Grammar +from .pytree import Node + +def get_all_fix_names(fixer_pkg: str, remove_prefix: bool = True) -> list[str]: ... +def get_fixers_from_package(pkg_name: str) -> list[str]: ... + +class FixerError(Exception): ... + +class RefactoringTool: + CLASS_PREFIX: ClassVar[str] + FILE_PREFIX: ClassVar[str] + fixers: Iterable[str] + explicit: Container[str] + options: dict[str, Any] + grammar: Grammar + write_unchanged_files: bool + errors: list[tuple[str, Iterable[str], dict[str, _ExcInfoType]]] + logger: Logger + fixer_log: list[str] + wrote: bool + driver: Driver + pre_order: list[BaseFix] + post_order: list[BaseFix] + files: list[StrPath] + BM: BottomMatcher + bmi_pre_order: list[BaseFix] + bmi_post_order: list[BaseFix] + def __init__( + self, fixer_names: Iterable[str], options: Mapping[str, object] | None = None, explicit: Container[str] | None = None + ) -> None: ... + def get_fixers(self) -> tuple[list[BaseFix], list[BaseFix]]: ... + def log_error(self, msg: str, *args: Iterable[str], **kwargs: _ExcInfoType) -> NoReturn: ... + @overload + def log_message(self, msg: object) -> None: ... + @overload + def log_message(self, msg: str, *args: object) -> None: ... + @overload + def log_debug(self, msg: object) -> None: ... + @overload + def log_debug(self, msg: str, *args: object) -> None: ... + def print_output(self, old_text: str, new_text: str, filename: StrPath, equal: bool) -> None: ... + def refactor(self, items: Iterable[str], write: bool = False, doctests_only: bool = False) -> None: ... + def refactor_dir(self, dir_name: str, write: bool = False, doctests_only: bool = False) -> None: ... + def _read_python_source(self, filename: FileDescriptorOrPath) -> tuple[str, str]: ... + def refactor_file(self, filename: StrPath, write: bool = False, doctests_only: bool = False) -> None: ... + def refactor_string(self, data: str, name: str) -> Node | None: ... + def refactor_stdin(self, doctests_only: bool = False) -> None: ... + def refactor_tree(self, tree: Node, name: str) -> bool: ... + def traverse_by(self, fixers: SupportsGetItem[int, Iterable[BaseFix]] | None, traversal: Iterable[Node]) -> None: ... + def processed_file( + self, new_text: str, filename: StrPath, old_text: str | None = None, write: bool = False, encoding: str | None = None + ) -> None: ... + def write_file(self, new_text: str, filename: FileDescriptorOrPath, old_text: str, encoding: str | None = None) -> None: ... + PS1: Final = ">>> " + PS2: Final = "... " + def refactor_docstring(self, input: str, filename: StrPath) -> str: ... + def refactor_doctest(self, block: list[str], lineno: int, indent: int, filename: StrPath) -> list[str]: ... + def summarize(self) -> None: ... + def parse_block(self, block: Iterable[str], lineno: int, indent: int) -> Node: ... + def wrap_toks( + self, block: Iterable[str], lineno: int, indent: int + ) -> Generator[tuple[int, str, tuple[int, int], tuple[int, int], str], None, None]: ... + def gen_lines(self, block: Iterable[str], indent: int) -> Generator[str, None, None]: ... + +class MultiprocessingUnsupported(Exception): ... + +class MultiprocessRefactoringTool(RefactoringTool): + queue: JoinableQueue[None | tuple[Iterable[str], bool | int]] | None + output_lock: Lock | None + def refactor( + self, items: Iterable[str], write: bool = False, doctests_only: bool = False, num_processes: int = 1 + ) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/linecache.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/linecache.pyi new file mode 100644 index 0000000..5379a21 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/linecache.pyi @@ -0,0 +1,19 @@ +from collections.abc import Callable +from typing import Any +from typing_extensions import TypeAlias + +__all__ = ["getline", "clearcache", "checkcache", "lazycache"] + +_ModuleGlobals: TypeAlias = dict[str, Any] +_ModuleMetadata: TypeAlias = tuple[int, float | None, list[str], str] + +_SourceLoader: TypeAlias = tuple[Callable[[], str | None]] + +cache: dict[str, _SourceLoader | _ModuleMetadata] # undocumented + +def getline(filename: str, lineno: int, module_globals: _ModuleGlobals | None = None) -> str: ... +def clearcache() -> None: ... +def getlines(filename: str, module_globals: _ModuleGlobals | None = None) -> list[str]: ... +def checkcache(filename: str | None = None) -> None: ... +def updatecache(filename: str, module_globals: _ModuleGlobals | None = None) -> list[str]: ... +def lazycache(filename: str, module_globals: _ModuleGlobals) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/locale.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/locale.pyi new file mode 100644 index 0000000..80c39a5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/locale.pyi @@ -0,0 +1,166 @@ +import sys +from _locale import ( + CHAR_MAX as CHAR_MAX, + LC_ALL as LC_ALL, + LC_COLLATE as LC_COLLATE, + LC_CTYPE as LC_CTYPE, + LC_MONETARY as LC_MONETARY, + LC_NUMERIC as LC_NUMERIC, + LC_TIME as LC_TIME, + localeconv as localeconv, + strcoll as strcoll, + strxfrm as strxfrm, +) + +# This module defines a function "str()", which is why "str" can't be used +# as a type annotation or type alias. +from builtins import str as _str +from collections.abc import Callable, Iterable +from decimal import Decimal +from typing import Any +from typing_extensions import deprecated + +if sys.version_info >= (3, 11): + from _locale import getencoding as getencoding + +# Some parts of the `_locale` module are platform-specific: +if sys.platform != "win32": + from _locale import ( + ABDAY_1 as ABDAY_1, + ABDAY_2 as ABDAY_2, + ABDAY_3 as ABDAY_3, + ABDAY_4 as ABDAY_4, + ABDAY_5 as ABDAY_5, + ABDAY_6 as ABDAY_6, + ABDAY_7 as ABDAY_7, + ABMON_1 as ABMON_1, + ABMON_2 as ABMON_2, + ABMON_3 as ABMON_3, + ABMON_4 as ABMON_4, + ABMON_5 as ABMON_5, + ABMON_6 as ABMON_6, + ABMON_7 as ABMON_7, + ABMON_8 as ABMON_8, + ABMON_9 as ABMON_9, + ABMON_10 as ABMON_10, + ABMON_11 as ABMON_11, + ABMON_12 as ABMON_12, + ALT_DIGITS as ALT_DIGITS, + AM_STR as AM_STR, + CODESET as CODESET, + CRNCYSTR as CRNCYSTR, + D_FMT as D_FMT, + D_T_FMT as D_T_FMT, + DAY_1 as DAY_1, + DAY_2 as DAY_2, + DAY_3 as DAY_3, + DAY_4 as DAY_4, + DAY_5 as DAY_5, + DAY_6 as DAY_6, + DAY_7 as DAY_7, + ERA as ERA, + ERA_D_FMT as ERA_D_FMT, + ERA_D_T_FMT as ERA_D_T_FMT, + ERA_T_FMT as ERA_T_FMT, + LC_MESSAGES as LC_MESSAGES, + MON_1 as MON_1, + MON_2 as MON_2, + MON_3 as MON_3, + MON_4 as MON_4, + MON_5 as MON_5, + MON_6 as MON_6, + MON_7 as MON_7, + MON_8 as MON_8, + MON_9 as MON_9, + MON_10 as MON_10, + MON_11 as MON_11, + MON_12 as MON_12, + NOEXPR as NOEXPR, + PM_STR as PM_STR, + RADIXCHAR as RADIXCHAR, + T_FMT as T_FMT, + T_FMT_AMPM as T_FMT_AMPM, + THOUSEP as THOUSEP, + YESEXPR as YESEXPR, + bind_textdomain_codeset as bind_textdomain_codeset, + bindtextdomain as bindtextdomain, + dcgettext as dcgettext, + dgettext as dgettext, + gettext as gettext, + nl_langinfo as nl_langinfo, + textdomain as textdomain, + ) + +__all__ = [ + "getlocale", + "getdefaultlocale", + "getpreferredencoding", + "Error", + "setlocale", + "localeconv", + "strcoll", + "strxfrm", + "str", + "atof", + "atoi", + "format_string", + "currency", + "normalize", + "LC_CTYPE", + "LC_COLLATE", + "LC_TIME", + "LC_MONETARY", + "LC_NUMERIC", + "LC_ALL", + "CHAR_MAX", +] + +if sys.version_info >= (3, 11): + __all__ += ["getencoding"] + +if sys.version_info < (3, 12): + __all__ += ["format"] + +if sys.version_info < (3, 13): + __all__ += ["resetlocale"] + +if sys.platform != "win32": + __all__ += ["LC_MESSAGES"] + +class Error(Exception): ... + +def getdefaultlocale( + envvars: tuple[_str, ...] = ("LC_ALL", "LC_CTYPE", "LANG", "LANGUAGE") +) -> tuple[_str | None, _str | None]: ... +def getlocale(category: int = ...) -> tuple[_str | None, _str | None]: ... +def setlocale(category: int, locale: _str | Iterable[_str | None] | None = None) -> _str: ... +def getpreferredencoding(do_setlocale: bool = True) -> _str: ... +def normalize(localename: _str) -> _str: ... + +if sys.version_info < (3, 13): + if sys.version_info >= (3, 11): + @deprecated("Deprecated since Python 3.11; removed in Python 3.13. Use `locale.setlocale(locale.LC_ALL, '')` instead.") + def resetlocale(category: int = ...) -> None: ... + else: + def resetlocale(category: int = ...) -> None: ... + +if sys.version_info < (3, 12): + @deprecated("Deprecated since Python 3.7; removed in Python 3.12. Use `locale.format_string()` instead.") + def format( + percent: _str, value: float | Decimal, grouping: bool = False, monetary: bool = False, *additional: Any + ) -> _str: ... + +def format_string(f: _str, val: Any, grouping: bool = False, monetary: bool = False) -> _str: ... +def currency(val: float | Decimal, symbol: bool = True, grouping: bool = False, international: bool = False) -> _str: ... +def delocalize(string: _str) -> _str: ... + +if sys.version_info >= (3, 10): + def localize(string: _str, grouping: bool = False, monetary: bool = False) -> _str: ... + +def atof(string: _str, func: Callable[[_str], float] = ...) -> float: ... +def atoi(string: _str) -> int: ... +def str(val: float) -> _str: ... + +locale_alias: dict[_str, _str] # undocumented +locale_encoding_alias: dict[_str, _str] # undocumented +windows_locale: dict[int, _str] # undocumented diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/logging/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/logging/__init__.pyi new file mode 100644 index 0000000..8248f82 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/logging/__init__.pyi @@ -0,0 +1,662 @@ +import sys +import threading +from _typeshed import StrPath, SupportsWrite +from collections.abc import Callable, Iterable, Mapping, MutableMapping, Sequence +from io import TextIOWrapper +from re import Pattern +from string import Template +from time import struct_time +from types import FrameType, GenericAlias, TracebackType +from typing import Any, ClassVar, Final, Generic, Literal, Protocol, TextIO, TypeVar, overload, type_check_only +from typing_extensions import Self, TypeAlias, deprecated + +__all__ = [ + "BASIC_FORMAT", + "BufferingFormatter", + "CRITICAL", + "DEBUG", + "ERROR", + "FATAL", + "FileHandler", + "Filter", + "Formatter", + "Handler", + "INFO", + "LogRecord", + "Logger", + "LoggerAdapter", + "NOTSET", + "NullHandler", + "StreamHandler", + "WARN", + "WARNING", + "addLevelName", + "basicConfig", + "captureWarnings", + "critical", + "debug", + "disable", + "error", + "exception", + "fatal", + "getLevelName", + "getLogger", + "getLoggerClass", + "info", + "log", + "makeLogRecord", + "setLoggerClass", + "shutdown", + "warning", + "getLogRecordFactory", + "setLogRecordFactory", + "lastResort", + "raiseExceptions", + "warn", +] + +if sys.version_info >= (3, 11): + __all__ += ["getLevelNamesMapping"] +if sys.version_info >= (3, 12): + __all__ += ["getHandlerByName", "getHandlerNames"] + +_SysExcInfoType: TypeAlias = tuple[type[BaseException], BaseException, TracebackType | None] | tuple[None, None, None] +_ExcInfoType: TypeAlias = None | bool | _SysExcInfoType | BaseException +_ArgsType: TypeAlias = tuple[object, ...] | Mapping[str, object] +_Level: TypeAlias = int | str +_FormatStyle: TypeAlias = Literal["%", "{", "$"] + +if sys.version_info >= (3, 12): + @type_check_only + class _SupportsFilter(Protocol): + def filter(self, record: LogRecord, /) -> bool | LogRecord: ... + + _FilterType: TypeAlias = Filter | Callable[[LogRecord], bool | LogRecord] | _SupportsFilter +else: + @type_check_only + class _SupportsFilter(Protocol): + def filter(self, record: LogRecord, /) -> bool: ... + + _FilterType: TypeAlias = Filter | Callable[[LogRecord], bool] | _SupportsFilter + +raiseExceptions: bool +logThreads: bool +logMultiprocessing: bool +logProcesses: bool +_srcfile: str | None + +def currentframe() -> FrameType: ... + +_levelToName: dict[int, str] +_nameToLevel: dict[str, int] + +class Filterer: + filters: list[_FilterType] + def addFilter(self, filter: _FilterType) -> None: ... + def removeFilter(self, filter: _FilterType) -> None: ... + if sys.version_info >= (3, 12): + def filter(self, record: LogRecord) -> bool | LogRecord: ... + else: + def filter(self, record: LogRecord) -> bool: ... + +class Manager: # undocumented + root: RootLogger + disable: int + emittedNoHandlerWarning: bool + loggerDict: dict[str, Logger | PlaceHolder] + loggerClass: type[Logger] | None + logRecordFactory: Callable[..., LogRecord] | None + def __init__(self, rootnode: RootLogger) -> None: ... + def getLogger(self, name: str) -> Logger: ... + def setLoggerClass(self, klass: type[Logger]) -> None: ... + def setLogRecordFactory(self, factory: Callable[..., LogRecord]) -> None: ... + +class Logger(Filterer): + name: str # undocumented + level: int # undocumented + parent: Logger | None # undocumented + propagate: bool + handlers: list[Handler] # undocumented + disabled: bool # undocumented + root: ClassVar[RootLogger] # undocumented + manager: Manager # undocumented + def __init__(self, name: str, level: _Level = 0) -> None: ... + def setLevel(self, level: _Level) -> None: ... + def isEnabledFor(self, level: int) -> bool: ... + def getEffectiveLevel(self) -> int: ... + def getChild(self, suffix: str) -> Self: ... # see python/typing#980 + if sys.version_info >= (3, 12): + def getChildren(self) -> set[Logger]: ... + + def debug( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def info( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def warning( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + @deprecated("Deprecated since Python 3.3. Use `Logger.warning()` instead.") + def warn( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def error( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def exception( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = True, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def critical( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def log( + self, + level: int, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def _log( + self, + level: int, + msg: object, + args: _ArgsType, + exc_info: _ExcInfoType | None = None, + extra: Mapping[str, object] | None = None, + stack_info: bool = False, + stacklevel: int = 1, + ) -> None: ... # undocumented + fatal = critical + def addHandler(self, hdlr: Handler) -> None: ... + def removeHandler(self, hdlr: Handler) -> None: ... + def findCaller(self, stack_info: bool = False, stacklevel: int = 1) -> tuple[str, int, str, str | None]: ... + def handle(self, record: LogRecord) -> None: ... + def makeRecord( + self, + name: str, + level: int, + fn: str, + lno: int, + msg: object, + args: _ArgsType, + exc_info: _SysExcInfoType | None, + func: str | None = None, + extra: Mapping[str, object] | None = None, + sinfo: str | None = None, + ) -> LogRecord: ... + def hasHandlers(self) -> bool: ... + def callHandlers(self, record: LogRecord) -> None: ... # undocumented + +CRITICAL: Final = 50 +FATAL: Final = CRITICAL +ERROR: Final = 40 +WARNING: Final = 30 +WARN: Final = WARNING +INFO: Final = 20 +DEBUG: Final = 10 +NOTSET: Final = 0 + +class Handler(Filterer): + level: int # undocumented + formatter: Formatter | None # undocumented + lock: threading.Lock | None # undocumented + name: str | None # undocumented + def __init__(self, level: _Level = 0) -> None: ... + def get_name(self) -> str: ... # undocumented + def set_name(self, name: str) -> None: ... # undocumented + def createLock(self) -> None: ... + def acquire(self) -> None: ... + def release(self) -> None: ... + def setLevel(self, level: _Level) -> None: ... + def setFormatter(self, fmt: Formatter | None) -> None: ... + def flush(self) -> None: ... + def close(self) -> None: ... + def handle(self, record: LogRecord) -> bool: ... + def handleError(self, record: LogRecord) -> None: ... + def format(self, record: LogRecord) -> str: ... + def emit(self, record: LogRecord) -> None: ... + +if sys.version_info >= (3, 12): + def getHandlerByName(name: str) -> Handler | None: ... + def getHandlerNames() -> frozenset[str]: ... + +class Formatter: + converter: Callable[[float | None], struct_time] + _fmt: str | None # undocumented + datefmt: str | None # undocumented + _style: PercentStyle # undocumented + default_time_format: str + default_msec_format: str | None + + if sys.version_info >= (3, 10): + def __init__( + self, + fmt: str | None = None, + datefmt: str | None = None, + style: _FormatStyle = "%", + validate: bool = True, + *, + defaults: Mapping[str, Any] | None = None, + ) -> None: ... + else: + def __init__( + self, fmt: str | None = None, datefmt: str | None = None, style: _FormatStyle = "%", validate: bool = True + ) -> None: ... + + def format(self, record: LogRecord) -> str: ... + def formatTime(self, record: LogRecord, datefmt: str | None = None) -> str: ... + def formatException(self, ei: _SysExcInfoType) -> str: ... + def formatMessage(self, record: LogRecord) -> str: ... # undocumented + def formatStack(self, stack_info: str) -> str: ... + def usesTime(self) -> bool: ... # undocumented + +class BufferingFormatter: + linefmt: Formatter + def __init__(self, linefmt: Formatter | None = None) -> None: ... + def formatHeader(self, records: Sequence[LogRecord]) -> str: ... + def formatFooter(self, records: Sequence[LogRecord]) -> str: ... + def format(self, records: Sequence[LogRecord]) -> str: ... + +class Filter: + name: str # undocumented + nlen: int # undocumented + def __init__(self, name: str = "") -> None: ... + if sys.version_info >= (3, 12): + def filter(self, record: LogRecord) -> bool | LogRecord: ... + else: + def filter(self, record: LogRecord) -> bool: ... + +class LogRecord: + # args can be set to None by logging.handlers.QueueHandler + # (see https://bugs.python.org/issue44473) + args: _ArgsType | None + asctime: str + created: float + exc_info: _SysExcInfoType | None + exc_text: str | None + filename: str + funcName: str + levelname: str + levelno: int + lineno: int + module: str + msecs: float + # Only created when logging.Formatter.format is called. See #6132. + message: str + msg: str | Any # The runtime accepts any object, but will be a str in 99% of cases + name: str + pathname: str + process: int | None + processName: str | None + relativeCreated: float + stack_info: str | None + thread: int | None + threadName: str | None + if sys.version_info >= (3, 12): + taskName: str | None + + def __init__( + self, + name: str, + level: int, + pathname: str, + lineno: int, + msg: object, + args: _ArgsType | None, + exc_info: _SysExcInfoType | None, + func: str | None = None, + sinfo: str | None = None, + ) -> None: ... + def getMessage(self) -> str: ... + # Allows setting contextual information on LogRecord objects as per the docs, see #7833 + def __setattr__(self, name: str, value: Any, /) -> None: ... + +_L = TypeVar("_L", bound=Logger | LoggerAdapter[Any]) + +class LoggerAdapter(Generic[_L]): + logger: _L + manager: Manager # undocumented + + if sys.version_info >= (3, 13): + def __init__(self, logger: _L, extra: Mapping[str, object] | None = None, merge_extra: bool = False) -> None: ... + elif sys.version_info >= (3, 10): + def __init__(self, logger: _L, extra: Mapping[str, object] | None = None) -> None: ... + else: + def __init__(self, logger: _L, extra: Mapping[str, object]) -> None: ... + + if sys.version_info >= (3, 10): + extra: Mapping[str, object] | None + else: + extra: Mapping[str, object] + + if sys.version_info >= (3, 13): + merge_extra: bool + + def process(self, msg: Any, kwargs: MutableMapping[str, Any]) -> tuple[Any, MutableMapping[str, Any]]: ... + def debug( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + def info( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + def warning( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + @deprecated("Deprecated since Python 3.3. Use `LoggerAdapter.warning()` instead.") + def warn( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + def error( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + def exception( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = True, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + def critical( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + def log( + self, + level: int, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + def isEnabledFor(self, level: int) -> bool: ... + def getEffectiveLevel(self) -> int: ... + def setLevel(self, level: _Level) -> None: ... + def hasHandlers(self) -> bool: ... + if sys.version_info >= (3, 11): + def _log( + self, + level: int, + msg: object, + args: _ArgsType, + *, + exc_info: _ExcInfoType | None = None, + extra: Mapping[str, object] | None = None, + stack_info: bool = False, + ) -> None: ... # undocumented + else: + def _log( + self, + level: int, + msg: object, + args: _ArgsType, + exc_info: _ExcInfoType | None = None, + extra: Mapping[str, object] | None = None, + stack_info: bool = False, + ) -> None: ... # undocumented + + @property + def name(self) -> str: ... # undocumented + if sys.version_info >= (3, 11): + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +def getLogger(name: str | None = None) -> Logger: ... +def getLoggerClass() -> type[Logger]: ... +def getLogRecordFactory() -> Callable[..., LogRecord]: ... +def debug( + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, +) -> None: ... +def info( + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, +) -> None: ... +def warning( + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, +) -> None: ... +@deprecated("Deprecated since Python 3.3. Use `warning()` instead.") +def warn( + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, +) -> None: ... +def error( + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, +) -> None: ... +def critical( + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, +) -> None: ... +def exception( + msg: object, + *args: object, + exc_info: _ExcInfoType = True, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, +) -> None: ... +def log( + level: int, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, +) -> None: ... + +fatal = critical + +def disable(level: int = 50) -> None: ... +def addLevelName(level: int, levelName: str) -> None: ... +@overload +def getLevelName(level: int) -> str: ... +@overload +@deprecated("The str -> int case is considered a mistake.") +def getLevelName(level: str) -> Any: ... + +if sys.version_info >= (3, 11): + def getLevelNamesMapping() -> dict[str, int]: ... + +def makeLogRecord(dict: Mapping[str, object]) -> LogRecord: ... +def basicConfig( + *, + filename: StrPath | None = ..., + filemode: str = ..., + format: str = ..., + datefmt: str | None = ..., + style: _FormatStyle = ..., + level: _Level | None = ..., + stream: SupportsWrite[str] | None = ..., + handlers: Iterable[Handler] | None = ..., + force: bool | None = ..., + encoding: str | None = ..., + errors: str | None = ..., +) -> None: ... +def shutdown(handlerList: Sequence[Any] = ...) -> None: ... # handlerList is undocumented +def setLoggerClass(klass: type[Logger]) -> None: ... +def captureWarnings(capture: bool) -> None: ... +def setLogRecordFactory(factory: Callable[..., LogRecord]) -> None: ... + +lastResort: Handler | None + +_StreamT = TypeVar("_StreamT", bound=SupportsWrite[str]) + +class StreamHandler(Handler, Generic[_StreamT]): + stream: _StreamT # undocumented + terminator: str + @overload + def __init__(self: StreamHandler[TextIO], stream: None = None) -> None: ... + @overload + def __init__(self: StreamHandler[_StreamT], stream: _StreamT) -> None: ... # pyright: ignore[reportInvalidTypeVarUse] #11780 + def setStream(self, stream: _StreamT) -> _StreamT | None: ... + if sys.version_info >= (3, 11): + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +class FileHandler(StreamHandler[TextIOWrapper]): + baseFilename: str # undocumented + mode: str # undocumented + encoding: str | None # undocumented + delay: bool # undocumented + errors: str | None # undocumented + def __init__( + self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False, errors: str | None = None + ) -> None: ... + def _open(self) -> TextIOWrapper: ... # undocumented + +class NullHandler(Handler): ... + +class PlaceHolder: # undocumented + loggerMap: dict[Logger, None] + def __init__(self, alogger: Logger) -> None: ... + def append(self, alogger: Logger) -> None: ... + +# Below aren't in module docs but still visible + +class RootLogger(Logger): + def __init__(self, level: int) -> None: ... + +root: RootLogger + +class PercentStyle: # undocumented + default_format: str + asctime_format: str + asctime_search: str + validation_pattern: Pattern[str] + _fmt: str + if sys.version_info >= (3, 10): + def __init__(self, fmt: str, *, defaults: Mapping[str, Any] | None = None) -> None: ... + else: + def __init__(self, fmt: str) -> None: ... + + def usesTime(self) -> bool: ... + def validate(self) -> None: ... + def format(self, record: Any) -> str: ... + +class StrFormatStyle(PercentStyle): # undocumented + fmt_spec: Pattern[str] + field_spec: Pattern[str] + +class StringTemplateStyle(PercentStyle): # undocumented + _tpl: Template + +_STYLES: Final[dict[str, tuple[PercentStyle, str]]] + +BASIC_FORMAT: Final = "%(levelname)s:%(name)s:%(message)s" diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/logging/config.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/logging/config.pyi new file mode 100644 index 0000000..72412dd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/logging/config.pyi @@ -0,0 +1,150 @@ +import sys +from _typeshed import StrOrBytesPath +from collections.abc import Callable, Hashable, Iterable, Mapping, Sequence +from configparser import RawConfigParser +from re import Pattern +from threading import Thread +from typing import IO, Any, Final, Literal, SupportsIndex, TypedDict, overload, type_check_only +from typing_extensions import Required, TypeAlias, disjoint_base + +from . import Filter, Filterer, Formatter, Handler, Logger, _FilterType, _FormatStyle, _Level + +DEFAULT_LOGGING_CONFIG_PORT: Final = 9030 +RESET_ERROR: Final[int] # undocumented +IDENTIFIER: Final[Pattern[str]] # undocumented + +if sys.version_info >= (3, 11): + @type_check_only + class _RootLoggerConfiguration(TypedDict, total=False): + level: _Level + filters: Sequence[str | _FilterType] + handlers: Sequence[str] + +else: + @type_check_only + class _RootLoggerConfiguration(TypedDict, total=False): + level: _Level + filters: Sequence[str] + handlers: Sequence[str] + +@type_check_only +class _LoggerConfiguration(_RootLoggerConfiguration, TypedDict, total=False): + propagate: bool + +_FormatterConfigurationTypedDict = TypedDict( + "_FormatterConfigurationTypedDict", {"class": str, "format": str, "datefmt": str, "style": _FormatStyle}, total=False +) + +@type_check_only +class _FilterConfigurationTypedDict(TypedDict): + name: str + +# Formatter and filter configs can specify custom factories via the special `()` key. +# If that is the case, the dictionary can contain any additional keys +# https://docs.python.org/3/library/logging.config.html#user-defined-objects +_FormatterConfiguration: TypeAlias = _FormatterConfigurationTypedDict | dict[str, Any] +_FilterConfiguration: TypeAlias = _FilterConfigurationTypedDict | dict[str, Any] +# Handler config can have additional keys even when not providing a custom factory so we just use `dict`. +_HandlerConfiguration: TypeAlias = dict[str, Any] + +@type_check_only +class _DictConfigArgs(TypedDict, total=False): + version: Required[Literal[1]] + formatters: dict[str, _FormatterConfiguration] + filters: dict[str, _FilterConfiguration] + handlers: dict[str, _HandlerConfiguration] + loggers: dict[str, _LoggerConfiguration] + root: _RootLoggerConfiguration + incremental: bool + disable_existing_loggers: bool + +# Accept dict[str, Any] to avoid false positives if called with a dict +# type, since dict types are not compatible with TypedDicts. +# +# Also accept a TypedDict type, to allow callers to use TypedDict +# types, and for somewhat stricter type checking of dict literals. +def dictConfig(config: _DictConfigArgs | dict[str, Any]) -> None: ... + +if sys.version_info >= (3, 10): + def fileConfig( + fname: StrOrBytesPath | IO[str] | RawConfigParser, + defaults: Mapping[str, str] | None = None, + disable_existing_loggers: bool = True, + encoding: str | None = None, + ) -> None: ... + +else: + def fileConfig( + fname: StrOrBytesPath | IO[str] | RawConfigParser, + defaults: Mapping[str, str] | None = None, + disable_existing_loggers: bool = True, + ) -> None: ... + +def valid_ident(s: str) -> Literal[True]: ... # undocumented +def listen(port: int = 9030, verify: Callable[[bytes], bytes | None] | None = None) -> Thread: ... +def stopListening() -> None: ... + +class ConvertingMixin: # undocumented + def convert_with_key(self, key: Any, value: Any, replace: bool = True) -> Any: ... + def convert(self, value: Any) -> Any: ... + +class ConvertingDict(dict[Hashable, Any], ConvertingMixin): # undocumented + def __getitem__(self, key: Hashable) -> Any: ... + def get(self, key: Hashable, default: Any = None) -> Any: ... + def pop(self, key: Hashable, default: Any = None) -> Any: ... + +class ConvertingList(list[Any], ConvertingMixin): # undocumented + @overload + def __getitem__(self, key: SupportsIndex) -> Any: ... + @overload + def __getitem__(self, key: slice) -> Any: ... + def pop(self, idx: SupportsIndex = -1) -> Any: ... + +if sys.version_info >= (3, 12): + class ConvertingTuple(tuple[Any, ...], ConvertingMixin): # undocumented + @overload + def __getitem__(self, key: SupportsIndex) -> Any: ... + @overload + def __getitem__(self, key: slice) -> Any: ... + +else: + @disjoint_base + class ConvertingTuple(tuple[Any, ...], ConvertingMixin): # undocumented + @overload + def __getitem__(self, key: SupportsIndex) -> Any: ... + @overload + def __getitem__(self, key: slice) -> Any: ... + +class BaseConfigurator: + CONVERT_PATTERN: Pattern[str] + WORD_PATTERN: Pattern[str] + DOT_PATTERN: Pattern[str] + INDEX_PATTERN: Pattern[str] + DIGIT_PATTERN: Pattern[str] + value_converters: dict[str, str] + importer: Callable[..., Any] + + config: dict[str, Any] # undocumented + + def __init__(self, config: _DictConfigArgs | dict[str, Any]) -> None: ... + def resolve(self, s: str) -> Any: ... + def ext_convert(self, value: str) -> Any: ... + def cfg_convert(self, value: str) -> Any: ... + def convert(self, value: Any) -> Any: ... + def configure_custom(self, config: dict[str, Any]) -> Any: ... + def as_tuple(self, value: list[Any] | tuple[Any, ...]) -> tuple[Any, ...]: ... + +class DictConfigurator(BaseConfigurator): + def configure(self) -> None: ... # undocumented + def configure_formatter(self, config: _FormatterConfiguration) -> Formatter | Any: ... # undocumented + def configure_filter(self, config: _FilterConfiguration) -> Filter | Any: ... # undocumented + def add_filters(self, filterer: Filterer, filters: Iterable[_FilterType]) -> None: ... # undocumented + def configure_handler(self, config: _HandlerConfiguration) -> Handler | Any: ... # undocumented + def add_handlers(self, logger: Logger, handlers: Iterable[str]) -> None: ... # undocumented + def common_logger_config( + self, logger: Logger, config: _LoggerConfiguration, incremental: bool = False + ) -> None: ... # undocumented + def configure_logger(self, name: str, config: _LoggerConfiguration, incremental: bool = False) -> None: ... # undocumented + def configure_root(self, config: _LoggerConfiguration, incremental: bool = False) -> None: ... # undocumented + +dictConfigClass = DictConfigurator diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/logging/handlers.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/logging/handlers.pyi new file mode 100644 index 0000000..535f1c6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/logging/handlers.pyi @@ -0,0 +1,258 @@ +import datetime +import http.client +import ssl +import sys +from _typeshed import ReadableBuffer, StrPath +from collections.abc import Callable +from logging import FileHandler, Handler, LogRecord +from re import Pattern +from socket import SocketKind, socket +from threading import Thread +from types import TracebackType +from typing import Any, ClassVar, Final, Protocol, TypeVar, type_check_only +from typing_extensions import Self + +_T = TypeVar("_T") + +DEFAULT_TCP_LOGGING_PORT: Final = 9020 +DEFAULT_UDP_LOGGING_PORT: Final = 9021 +DEFAULT_HTTP_LOGGING_PORT: Final = 9022 +DEFAULT_SOAP_LOGGING_PORT: Final = 9023 +SYSLOG_UDP_PORT: Final = 514 +SYSLOG_TCP_PORT: Final = 514 + +class WatchedFileHandler(FileHandler): + dev: int # undocumented + ino: int # undocumented + def __init__( + self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False, errors: str | None = None + ) -> None: ... + def _statstream(self) -> None: ... # undocumented + def reopenIfNeeded(self) -> None: ... + +class BaseRotatingHandler(FileHandler): + namer: Callable[[str], str] | None + rotator: Callable[[str, str], None] | None + def __init__( + self, filename: StrPath, mode: str, encoding: str | None = None, delay: bool = False, errors: str | None = None + ) -> None: ... + def rotation_filename(self, default_name: str) -> str: ... + def rotate(self, source: str, dest: str) -> None: ... + +class RotatingFileHandler(BaseRotatingHandler): + maxBytes: int # undocumented + backupCount: int # undocumented + def __init__( + self, + filename: StrPath, + mode: str = "a", + maxBytes: int = 0, + backupCount: int = 0, + encoding: str | None = None, + delay: bool = False, + errors: str | None = None, + ) -> None: ... + def doRollover(self) -> None: ... + def shouldRollover(self, record: LogRecord) -> int: ... # undocumented + +class TimedRotatingFileHandler(BaseRotatingHandler): + when: str # undocumented + backupCount: int # undocumented + utc: bool # undocumented + atTime: datetime.time | None # undocumented + interval: int # undocumented + suffix: str # undocumented + dayOfWeek: int # undocumented + rolloverAt: int # undocumented + extMatch: Pattern[str] # undocumented + def __init__( + self, + filename: StrPath, + when: str = "h", + interval: int = 1, + backupCount: int = 0, + encoding: str | None = None, + delay: bool = False, + utc: bool = False, + atTime: datetime.time | None = None, + errors: str | None = None, + ) -> None: ... + def doRollover(self) -> None: ... + def shouldRollover(self, record: LogRecord) -> int: ... # undocumented + def computeRollover(self, currentTime: int) -> int: ... # undocumented + def getFilesToDelete(self) -> list[str]: ... # undocumented + +class SocketHandler(Handler): + host: str # undocumented + port: int | None # undocumented + address: tuple[str, int] | str # undocumented + sock: socket | None # undocumented + closeOnError: bool # undocumented + retryTime: float | None # undocumented + retryStart: float # undocumented + retryFactor: float # undocumented + retryMax: float # undocumented + def __init__(self, host: str, port: int | None) -> None: ... + def makeSocket(self, timeout: float = 1) -> socket: ... # timeout is undocumented + def makePickle(self, record: LogRecord) -> bytes: ... + def send(self, s: ReadableBuffer) -> None: ... + def createSocket(self) -> None: ... + +class DatagramHandler(SocketHandler): + def makeSocket(self) -> socket: ... # type: ignore[override] + +class SysLogHandler(Handler): + LOG_EMERG: int + LOG_ALERT: int + LOG_CRIT: int + LOG_ERR: int + LOG_WARNING: int + LOG_NOTICE: int + LOG_INFO: int + LOG_DEBUG: int + + LOG_KERN: int + LOG_USER: int + LOG_MAIL: int + LOG_DAEMON: int + LOG_AUTH: int + LOG_SYSLOG: int + LOG_LPR: int + LOG_NEWS: int + LOG_UUCP: int + LOG_CRON: int + LOG_AUTHPRIV: int + LOG_FTP: int + LOG_NTP: int + LOG_SECURITY: int + LOG_CONSOLE: int + LOG_SOLCRON: int + LOG_LOCAL0: int + LOG_LOCAL1: int + LOG_LOCAL2: int + LOG_LOCAL3: int + LOG_LOCAL4: int + LOG_LOCAL5: int + LOG_LOCAL6: int + LOG_LOCAL7: int + address: tuple[str, int] | str # undocumented + unixsocket: bool # undocumented + socktype: SocketKind # undocumented + ident: str # undocumented + append_nul: bool # undocumented + facility: int # undocumented + priority_names: ClassVar[dict[str, int]] # undocumented + facility_names: ClassVar[dict[str, int]] # undocumented + priority_map: ClassVar[dict[str, str]] # undocumented + if sys.version_info >= (3, 14): + timeout: float | None + def __init__( + self, + address: tuple[str, int] | str = ("localhost", 514), + facility: str | int = 1, + socktype: SocketKind | None = None, + timeout: float | None = None, + ) -> None: ... + else: + def __init__( + self, address: tuple[str, int] | str = ("localhost", 514), facility: str | int = 1, socktype: SocketKind | None = None + ) -> None: ... + if sys.version_info >= (3, 11): + def createSocket(self) -> None: ... + + def encodePriority(self, facility: int | str, priority: int | str) -> int: ... + def mapPriority(self, levelName: str) -> str: ... + +class NTEventLogHandler(Handler): + def __init__(self, appname: str, dllname: str | None = None, logtype: str = "Application") -> None: ... + def getEventCategory(self, record: LogRecord) -> int: ... + # TODO: correct return value? + def getEventType(self, record: LogRecord) -> int: ... + def getMessageID(self, record: LogRecord) -> int: ... + +class SMTPHandler(Handler): + mailhost: str # undocumented + mailport: int | None # undocumented + username: str | None # undocumented + # password only exists as an attribute if passed credentials is a tuple or list + password: str # undocumented + fromaddr: str # undocumented + toaddrs: list[str] # undocumented + subject: str # undocumented + secure: tuple[()] | tuple[str] | tuple[str, str] | None # undocumented + timeout: float # undocumented + def __init__( + self, + mailhost: str | tuple[str, int], + fromaddr: str, + toaddrs: str | list[str], + subject: str, + credentials: tuple[str, str] | None = None, + secure: tuple[()] | tuple[str] | tuple[str, str] | None = None, + timeout: float = 5.0, + ) -> None: ... + def getSubject(self, record: LogRecord) -> str: ... + +class BufferingHandler(Handler): + capacity: int # undocumented + buffer: list[LogRecord] # undocumented + def __init__(self, capacity: int) -> None: ... + def shouldFlush(self, record: LogRecord) -> bool: ... + +class MemoryHandler(BufferingHandler): + flushLevel: int # undocumented + target: Handler | None # undocumented + flushOnClose: bool # undocumented + def __init__(self, capacity: int, flushLevel: int = 40, target: Handler | None = None, flushOnClose: bool = True) -> None: ... + def setTarget(self, target: Handler | None) -> None: ... + +class HTTPHandler(Handler): + host: str # undocumented + url: str # undocumented + method: str # undocumented + secure: bool # undocumented + credentials: tuple[str, str] | None # undocumented + context: ssl.SSLContext | None # undocumented + def __init__( + self, + host: str, + url: str, + method: str = "GET", + secure: bool = False, + credentials: tuple[str, str] | None = None, + context: ssl.SSLContext | None = None, + ) -> None: ... + def mapLogRecord(self, record: LogRecord) -> dict[str, Any]: ... + def getConnection(self, host: str, secure: bool) -> http.client.HTTPConnection: ... # undocumented + +@type_check_only +class _QueueLike(Protocol[_T]): + def get(self) -> _T: ... + def put_nowait(self, item: _T, /) -> None: ... + +class QueueHandler(Handler): + queue: _QueueLike[Any] + def __init__(self, queue: _QueueLike[Any]) -> None: ... + def prepare(self, record: LogRecord) -> Any: ... + def enqueue(self, record: LogRecord) -> None: ... + if sys.version_info >= (3, 12): + listener: QueueListener | None + +class QueueListener: + handlers: tuple[Handler, ...] # undocumented + respect_handler_level: bool # undocumented + queue: _QueueLike[Any] # undocumented + _thread: Thread | None # undocumented + def __init__(self, queue: _QueueLike[Any], *handlers: Handler, respect_handler_level: bool = False) -> None: ... + def dequeue(self, block: bool) -> LogRecord: ... + def prepare(self, record: LogRecord) -> Any: ... + def start(self) -> None: ... + def stop(self) -> None: ... + def enqueue_sentinel(self) -> None: ... + def handle(self, record: LogRecord) -> None: ... + + if sys.version_info >= (3, 14): + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lzma.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lzma.pyi new file mode 100644 index 0000000..b7ef607 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/lzma.pyi @@ -0,0 +1,180 @@ +import sys +from _lzma import ( + CHECK_CRC32 as CHECK_CRC32, + CHECK_CRC64 as CHECK_CRC64, + CHECK_ID_MAX as CHECK_ID_MAX, + CHECK_NONE as CHECK_NONE, + CHECK_SHA256 as CHECK_SHA256, + CHECK_UNKNOWN as CHECK_UNKNOWN, + FILTER_ARM as FILTER_ARM, + FILTER_ARMTHUMB as FILTER_ARMTHUMB, + FILTER_DELTA as FILTER_DELTA, + FILTER_IA64 as FILTER_IA64, + FILTER_LZMA1 as FILTER_LZMA1, + FILTER_LZMA2 as FILTER_LZMA2, + FILTER_POWERPC as FILTER_POWERPC, + FILTER_SPARC as FILTER_SPARC, + FILTER_X86 as FILTER_X86, + FORMAT_ALONE as FORMAT_ALONE, + FORMAT_AUTO as FORMAT_AUTO, + FORMAT_RAW as FORMAT_RAW, + FORMAT_XZ as FORMAT_XZ, + MF_BT2 as MF_BT2, + MF_BT3 as MF_BT3, + MF_BT4 as MF_BT4, + MF_HC3 as MF_HC3, + MF_HC4 as MF_HC4, + MODE_FAST as MODE_FAST, + MODE_NORMAL as MODE_NORMAL, + PRESET_DEFAULT as PRESET_DEFAULT, + PRESET_EXTREME as PRESET_EXTREME, + LZMACompressor as LZMACompressor, + LZMADecompressor as LZMADecompressor, + LZMAError as LZMAError, + _FilterChain, + is_check_supported as is_check_supported, +) +from _typeshed import ReadableBuffer, StrOrBytesPath +from io import TextIOWrapper +from typing import IO, Literal, overload +from typing_extensions import Self, TypeAlias + +if sys.version_info >= (3, 14): + from compression._common._streams import BaseStream +else: + from _compression import BaseStream + +__all__ = [ + "CHECK_NONE", + "CHECK_CRC32", + "CHECK_CRC64", + "CHECK_SHA256", + "CHECK_ID_MAX", + "CHECK_UNKNOWN", + "FILTER_LZMA1", + "FILTER_LZMA2", + "FILTER_DELTA", + "FILTER_X86", + "FILTER_IA64", + "FILTER_ARM", + "FILTER_ARMTHUMB", + "FILTER_POWERPC", + "FILTER_SPARC", + "FORMAT_AUTO", + "FORMAT_XZ", + "FORMAT_ALONE", + "FORMAT_RAW", + "MF_HC3", + "MF_HC4", + "MF_BT2", + "MF_BT3", + "MF_BT4", + "MODE_FAST", + "MODE_NORMAL", + "PRESET_DEFAULT", + "PRESET_EXTREME", + "LZMACompressor", + "LZMADecompressor", + "LZMAFile", + "LZMAError", + "open", + "compress", + "decompress", + "is_check_supported", +] + +_OpenBinaryWritingMode: TypeAlias = Literal["w", "wb", "x", "xb", "a", "ab"] +_OpenTextWritingMode: TypeAlias = Literal["wt", "xt", "at"] + +_PathOrFile: TypeAlias = StrOrBytesPath | IO[bytes] + +class LZMAFile(BaseStream, IO[bytes]): # type: ignore[misc] # incompatible definitions of writelines in the base classes + def __init__( + self, + filename: _PathOrFile | None = None, + mode: str = "r", + *, + format: int | None = None, + check: int = -1, + preset: int | None = None, + filters: _FilterChain | None = None, + ) -> None: ... + def __enter__(self) -> Self: ... + def peek(self, size: int = -1) -> bytes: ... + def read(self, size: int | None = -1) -> bytes: ... + def read1(self, size: int = -1) -> bytes: ... + def readline(self, size: int | None = -1) -> bytes: ... + def write(self, data: ReadableBuffer) -> int: ... + def seek(self, offset: int, whence: int = 0) -> int: ... + +@overload +def open( + filename: _PathOrFile, + mode: Literal["r", "rb"] = "rb", + *, + format: int | None = None, + check: Literal[-1] = -1, + preset: None = None, + filters: _FilterChain | None = None, + encoding: None = None, + errors: None = None, + newline: None = None, +) -> LZMAFile: ... +@overload +def open( + filename: _PathOrFile, + mode: _OpenBinaryWritingMode, + *, + format: int | None = None, + check: int = -1, + preset: int | None = None, + filters: _FilterChain | None = None, + encoding: None = None, + errors: None = None, + newline: None = None, +) -> LZMAFile: ... +@overload +def open( + filename: StrOrBytesPath, + mode: Literal["rt"], + *, + format: int | None = None, + check: Literal[-1] = -1, + preset: None = None, + filters: _FilterChain | None = None, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> TextIOWrapper: ... +@overload +def open( + filename: StrOrBytesPath, + mode: _OpenTextWritingMode, + *, + format: int | None = None, + check: int = -1, + preset: int | None = None, + filters: _FilterChain | None = None, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> TextIOWrapper: ... +@overload +def open( + filename: _PathOrFile, + mode: str, + *, + format: int | None = None, + check: int = -1, + preset: int | None = None, + filters: _FilterChain | None = None, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> LZMAFile | TextIOWrapper: ... +def compress( + data: ReadableBuffer, format: int = 1, check: int = -1, preset: int | None = None, filters: _FilterChain | None = None +) -> bytes: ... +def decompress( + data: ReadableBuffer, format: int = 0, memlimit: int | None = None, filters: _FilterChain | None = None +) -> bytes: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/mailbox.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/mailbox.pyi new file mode 100644 index 0000000..89bd998 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/mailbox.pyi @@ -0,0 +1,262 @@ +import email.message +import io +import sys +from _typeshed import StrPath, SupportsNoArgReadline, SupportsRead +from abc import ABCMeta, abstractmethod +from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence +from email._policybase import _MessageT +from types import GenericAlias, TracebackType +from typing import IO, Any, AnyStr, Generic, Literal, Protocol, TypeVar, overload, type_check_only +from typing_extensions import Self, TypeAlias + +__all__ = [ + "Mailbox", + "Maildir", + "mbox", + "MH", + "Babyl", + "MMDF", + "Message", + "MaildirMessage", + "mboxMessage", + "MHMessage", + "BabylMessage", + "MMDFMessage", + "Error", + "NoSuchMailboxError", + "NotEmptyError", + "ExternalClashError", + "FormatError", +] + +_T = TypeVar("_T") + +@type_check_only +class _SupportsReadAndReadline(SupportsRead[bytes], SupportsNoArgReadline[bytes], Protocol): ... + +_MessageData: TypeAlias = email.message.Message | bytes | str | io.StringIO | _SupportsReadAndReadline + +@type_check_only +class _HasIteritems(Protocol): + def iteritems(self) -> Iterator[tuple[str, _MessageData]]: ... + +@type_check_only +class _HasItems(Protocol): + def items(self) -> Iterator[tuple[str, _MessageData]]: ... + +linesep: bytes + +class Mailbox(Generic[_MessageT]): + _path: str # undocumented + _factory: Callable[[IO[Any]], _MessageT] | None # undocumented + @overload + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], _MessageT], create: bool = True) -> None: ... + @overload + def __init__(self, path: StrPath, factory: None = None, create: bool = True) -> None: ... + @abstractmethod + def add(self, message: _MessageData) -> str: ... + @abstractmethod + def remove(self, key: str) -> None: ... + def __delitem__(self, key: str) -> None: ... + def discard(self, key: str) -> None: ... + @abstractmethod + def __setitem__(self, key: str, message: _MessageData) -> None: ... + @overload + def get(self, key: str, default: None = None) -> _MessageT | None: ... + @overload + def get(self, key: str, default: _T) -> _MessageT | _T: ... + def __getitem__(self, key: str) -> _MessageT: ... + @abstractmethod + def get_message(self, key: str) -> _MessageT: ... + def get_string(self, key: str) -> str: ... + @abstractmethod + def get_bytes(self, key: str) -> bytes: ... + # As '_ProxyFile' doesn't implement the full IO spec, and BytesIO is incompatible with it, get_file return is Any here + @abstractmethod + def get_file(self, key: str) -> Any: ... + @abstractmethod + def iterkeys(self) -> Iterator[str]: ... + def keys(self) -> list[str]: ... + def itervalues(self) -> Iterator[_MessageT]: ... + def __iter__(self) -> Iterator[_MessageT]: ... + def values(self) -> list[_MessageT]: ... + def iteritems(self) -> Iterator[tuple[str, _MessageT]]: ... + def items(self) -> list[tuple[str, _MessageT]]: ... + @abstractmethod + def __contains__(self, key: str) -> bool: ... + @abstractmethod + def __len__(self) -> int: ... + def clear(self) -> None: ... + @overload + def pop(self, key: str, default: None = None) -> _MessageT | None: ... + @overload + def pop(self, key: str, default: _T) -> _MessageT | _T: ... + def popitem(self) -> tuple[str, _MessageT]: ... + def update(self, arg: _HasIteritems | _HasItems | Iterable[tuple[str, _MessageData]] | None = None) -> None: ... + @abstractmethod + def flush(self) -> None: ... + @abstractmethod + def lock(self) -> None: ... + @abstractmethod + def unlock(self) -> None: ... + @abstractmethod + def close(self) -> None: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +class Maildir(Mailbox[MaildirMessage]): + colon: str + def __init__( + self, dirname: StrPath, factory: Callable[[IO[Any]], MaildirMessage] | None = None, create: bool = True + ) -> None: ... + def add(self, message: _MessageData) -> str: ... + def remove(self, key: str) -> None: ... + def __setitem__(self, key: str, message: _MessageData) -> None: ... + def get_message(self, key: str) -> MaildirMessage: ... + def get_bytes(self, key: str) -> bytes: ... + def get_file(self, key: str) -> _ProxyFile[bytes]: ... + if sys.version_info >= (3, 13): + def get_info(self, key: str) -> str: ... + def set_info(self, key: str, info: str) -> None: ... + def get_flags(self, key: str) -> str: ... + def set_flags(self, key: str, flags: str) -> None: ... + def add_flag(self, key: str, flag: str) -> None: ... + def remove_flag(self, key: str, flag: str) -> None: ... + + def iterkeys(self) -> Iterator[str]: ... + def __contains__(self, key: str) -> bool: ... + def __len__(self) -> int: ... + def flush(self) -> None: ... + def lock(self) -> None: ... + def unlock(self) -> None: ... + def close(self) -> None: ... + def list_folders(self) -> list[str]: ... + def get_folder(self, folder: str) -> Maildir: ... + def add_folder(self, folder: str) -> Maildir: ... + def remove_folder(self, folder: str) -> None: ... + def clean(self) -> None: ... + def next(self) -> str | None: ... + +class _singlefileMailbox(Mailbox[_MessageT], metaclass=ABCMeta): + def add(self, message: _MessageData) -> str: ... + def remove(self, key: str) -> None: ... + def __setitem__(self, key: str, message: _MessageData) -> None: ... + def iterkeys(self) -> Iterator[str]: ... + def __contains__(self, key: str) -> bool: ... + def __len__(self) -> int: ... + def lock(self) -> None: ... + def unlock(self) -> None: ... + def flush(self) -> None: ... + def close(self) -> None: ... + +class _mboxMMDF(_singlefileMailbox[_MessageT]): + def get_message(self, key: str) -> _MessageT: ... + def get_file(self, key: str, from_: bool = False) -> _PartialFile[bytes]: ... + def get_bytes(self, key: str, from_: bool = False) -> bytes: ... + def get_string(self, key: str, from_: bool = False) -> str: ... + +class mbox(_mboxMMDF[mboxMessage]): + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], mboxMessage] | None = None, create: bool = True) -> None: ... + +class MMDF(_mboxMMDF[MMDFMessage]): + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MMDFMessage] | None = None, create: bool = True) -> None: ... + +class MH(Mailbox[MHMessage]): + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MHMessage] | None = None, create: bool = True) -> None: ... + def add(self, message: _MessageData) -> str: ... + def remove(self, key: str) -> None: ... + def __setitem__(self, key: str, message: _MessageData) -> None: ... + def get_message(self, key: str) -> MHMessage: ... + def get_bytes(self, key: str) -> bytes: ... + def get_file(self, key: str) -> _ProxyFile[bytes]: ... + def iterkeys(self) -> Iterator[str]: ... + def __contains__(self, key: str) -> bool: ... + def __len__(self) -> int: ... + def flush(self) -> None: ... + def lock(self) -> None: ... + def unlock(self) -> None: ... + def close(self) -> None: ... + def list_folders(self) -> list[str]: ... + def get_folder(self, folder: StrPath) -> MH: ... + def add_folder(self, folder: StrPath) -> MH: ... + def remove_folder(self, folder: StrPath) -> None: ... + def get_sequences(self) -> dict[str, list[int]]: ... + def set_sequences(self, sequences: Mapping[str, Sequence[int]]) -> None: ... + def pack(self) -> None: ... + +class Babyl(_singlefileMailbox[BabylMessage]): + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], BabylMessage] | None = None, create: bool = True) -> None: ... + def get_message(self, key: str) -> BabylMessage: ... + def get_bytes(self, key: str) -> bytes: ... + def get_file(self, key: str) -> IO[bytes]: ... + def get_labels(self) -> list[str]: ... + +class Message(email.message.Message): + def __init__(self, message: _MessageData | None = None) -> None: ... + +class MaildirMessage(Message): + def get_subdir(self) -> str: ... + def set_subdir(self, subdir: Literal["new", "cur"]) -> None: ... + def get_flags(self) -> str: ... + def set_flags(self, flags: Iterable[str]) -> None: ... + def add_flag(self, flag: str) -> None: ... + def remove_flag(self, flag: str) -> None: ... + def get_date(self) -> int: ... + def set_date(self, date: float) -> None: ... + def get_info(self) -> str: ... + def set_info(self, info: str) -> None: ... + +class _mboxMMDFMessage(Message): + def get_from(self) -> str: ... + def set_from(self, from_: str, time_: bool | tuple[int, int, int, int, int, int, int, int, int] | None = None) -> None: ... + def get_flags(self) -> str: ... + def set_flags(self, flags: Iterable[str]) -> None: ... + def add_flag(self, flag: str) -> None: ... + def remove_flag(self, flag: str) -> None: ... + +class mboxMessage(_mboxMMDFMessage): ... + +class MHMessage(Message): + def get_sequences(self) -> list[str]: ... + def set_sequences(self, sequences: Iterable[str]) -> None: ... + def add_sequence(self, sequence: str) -> None: ... + def remove_sequence(self, sequence: str) -> None: ... + +class BabylMessage(Message): + def get_labels(self) -> list[str]: ... + def set_labels(self, labels: Iterable[str]) -> None: ... + def add_label(self, label: str) -> None: ... + def remove_label(self, label: str) -> None: ... + def get_visible(self) -> Message: ... + def set_visible(self, visible: _MessageData) -> None: ... + def update_visible(self) -> None: ... + +class MMDFMessage(_mboxMMDFMessage): ... + +class _ProxyFile(Generic[AnyStr]): + def __init__(self, f: IO[AnyStr], pos: int | None = None) -> None: ... + def read(self, size: int | None = None) -> AnyStr: ... + def read1(self, size: int | None = None) -> AnyStr: ... + def readline(self, size: int | None = None) -> AnyStr: ... + def readlines(self, sizehint: int | None = None) -> list[AnyStr]: ... + def __iter__(self) -> Iterator[AnyStr]: ... + def tell(self) -> int: ... + def seek(self, offset: int, whence: int = 0) -> None: ... + def close(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ... + def readable(self) -> bool: ... + def writable(self) -> bool: ... + def seekable(self) -> bool: ... + def flush(self) -> None: ... + @property + def closed(self) -> bool: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +class _PartialFile(_ProxyFile[AnyStr]): + def __init__(self, f: IO[AnyStr], start: int | None = None, stop: int | None = None) -> None: ... + +class Error(Exception): ... +class NoSuchMailboxError(Error): ... +class NotEmptyError(Error): ... +class ExternalClashError(Error): ... +class FormatError(Error): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/mailcap.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/mailcap.pyi new file mode 100644 index 0000000..ce549e0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/mailcap.pyi @@ -0,0 +1,11 @@ +from collections.abc import Mapping, Sequence +from typing_extensions import TypeAlias + +_Cap: TypeAlias = dict[str, str | int] + +__all__ = ["getcaps", "findmatch"] + +def findmatch( + caps: Mapping[str, list[_Cap]], MIMEtype: str, key: str = "view", filename: str = "/dev/null", plist: Sequence[str] = [] +) -> tuple[str | None, _Cap | None]: ... +def getcaps() -> dict[str, list[_Cap]]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/marshal.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/marshal.pyi new file mode 100644 index 0000000..46c421e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/marshal.pyi @@ -0,0 +1,49 @@ +import builtins +import sys +import types +from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite +from typing import Any, Final +from typing_extensions import TypeAlias + +version: Final[int] + +_Marshallable: TypeAlias = ( + # handled in w_object() in marshal.c + None + | type[StopIteration] + | builtins.ellipsis + | bool + # handled in w_complex_object() in marshal.c + | int + | float + | complex + | bytes + | str + | tuple[_Marshallable, ...] + | list[Any] + | dict[Any, Any] + | set[Any] + | frozenset[_Marshallable] + | types.CodeType + | ReadableBuffer +) + +if sys.version_info >= (3, 14): + def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 5, /, *, allow_code: bool = True) -> None: ... + def dumps(value: _Marshallable, version: int = 5, /, *, allow_code: bool = True) -> bytes: ... + +elif sys.version_info >= (3, 13): + def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 4, /, *, allow_code: bool = True) -> None: ... + def dumps(value: _Marshallable, version: int = 4, /, *, allow_code: bool = True) -> bytes: ... + +else: + def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 4, /) -> None: ... + def dumps(value: _Marshallable, version: int = 4, /) -> bytes: ... + +if sys.version_info >= (3, 13): + def load(file: SupportsRead[bytes], /, *, allow_code: bool = True) -> Any: ... + def loads(bytes: ReadableBuffer, /, *, allow_code: bool = True) -> Any: ... + +else: + def load(file: SupportsRead[bytes], /) -> Any: ... + def loads(bytes: ReadableBuffer, /) -> Any: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/math.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/math.pyi new file mode 100644 index 0000000..1903d48 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/math.pyi @@ -0,0 +1,140 @@ +import sys +from _typeshed import SupportsMul, SupportsRMul +from collections.abc import Iterable +from typing import Any, Final, Literal, Protocol, SupportsFloat, SupportsIndex, TypeVar, overload, type_check_only +from typing_extensions import TypeAlias + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) + +_SupportsFloatOrIndex: TypeAlias = SupportsFloat | SupportsIndex + +e: Final[float] +pi: Final[float] +inf: Final[float] +nan: Final[float] +tau: Final[float] + +def acos(x: _SupportsFloatOrIndex, /) -> float: ... +def acosh(x: _SupportsFloatOrIndex, /) -> float: ... +def asin(x: _SupportsFloatOrIndex, /) -> float: ... +def asinh(x: _SupportsFloatOrIndex, /) -> float: ... +def atan(x: _SupportsFloatOrIndex, /) -> float: ... +def atan2(y: _SupportsFloatOrIndex, x: _SupportsFloatOrIndex, /) -> float: ... +def atanh(x: _SupportsFloatOrIndex, /) -> float: ... + +if sys.version_info >= (3, 11): + def cbrt(x: _SupportsFloatOrIndex, /) -> float: ... + +@type_check_only +class _SupportsCeil(Protocol[_T_co]): + def __ceil__(self) -> _T_co: ... + +@overload +def ceil(x: _SupportsCeil[_T], /) -> _T: ... +@overload +def ceil(x: _SupportsFloatOrIndex, /) -> int: ... +def comb(n: SupportsIndex, k: SupportsIndex, /) -> int: ... +def copysign(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: ... +def cos(x: _SupportsFloatOrIndex, /) -> float: ... +def cosh(x: _SupportsFloatOrIndex, /) -> float: ... +def degrees(x: _SupportsFloatOrIndex, /) -> float: ... +def dist(p: Iterable[_SupportsFloatOrIndex], q: Iterable[_SupportsFloatOrIndex], /) -> float: ... +def erf(x: _SupportsFloatOrIndex, /) -> float: ... +def erfc(x: _SupportsFloatOrIndex, /) -> float: ... +def exp(x: _SupportsFloatOrIndex, /) -> float: ... + +if sys.version_info >= (3, 11): + def exp2(x: _SupportsFloatOrIndex, /) -> float: ... + +def expm1(x: _SupportsFloatOrIndex, /) -> float: ... +def fabs(x: _SupportsFloatOrIndex, /) -> float: ... +def factorial(x: SupportsIndex, /) -> int: ... +@type_check_only +class _SupportsFloor(Protocol[_T_co]): + def __floor__(self) -> _T_co: ... + +@overload +def floor(x: _SupportsFloor[_T], /) -> _T: ... +@overload +def floor(x: _SupportsFloatOrIndex, /) -> int: ... +def fmod(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: ... +def frexp(x: _SupportsFloatOrIndex, /) -> tuple[float, int]: ... +def fsum(seq: Iterable[_SupportsFloatOrIndex], /) -> float: ... +def gamma(x: _SupportsFloatOrIndex, /) -> float: ... +def gcd(*integers: SupportsIndex) -> int: ... +def hypot(*coordinates: _SupportsFloatOrIndex) -> float: ... +def isclose( + a: _SupportsFloatOrIndex, + b: _SupportsFloatOrIndex, + *, + rel_tol: _SupportsFloatOrIndex = 1e-09, + abs_tol: _SupportsFloatOrIndex = 0.0, +) -> bool: ... +def isinf(x: _SupportsFloatOrIndex, /) -> bool: ... +def isfinite(x: _SupportsFloatOrIndex, /) -> bool: ... +def isnan(x: _SupportsFloatOrIndex, /) -> bool: ... +def isqrt(n: SupportsIndex, /) -> int: ... +def lcm(*integers: SupportsIndex) -> int: ... +def ldexp(x: _SupportsFloatOrIndex, i: int, /) -> float: ... +def lgamma(x: _SupportsFloatOrIndex, /) -> float: ... +def log(x: _SupportsFloatOrIndex, base: _SupportsFloatOrIndex = ...) -> float: ... +def log10(x: _SupportsFloatOrIndex, /) -> float: ... +def log1p(x: _SupportsFloatOrIndex, /) -> float: ... +def log2(x: _SupportsFloatOrIndex, /) -> float: ... +def modf(x: _SupportsFloatOrIndex, /) -> tuple[float, float]: ... + +if sys.version_info >= (3, 12): + def nextafter(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /, *, steps: SupportsIndex | None = None) -> float: ... + +else: + def nextafter(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: ... + +def perm(n: SupportsIndex, k: SupportsIndex | None = None, /) -> int: ... +def pow(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: ... + +_PositiveInteger: TypeAlias = Literal[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25] +_NegativeInteger: TypeAlias = Literal[-1, -2, -3, -4, -5, -6, -7, -8, -9, -10, -11, -12, -13, -14, -15, -16, -17, -18, -19, -20] +_LiteralInteger = _PositiveInteger | _NegativeInteger | Literal[0] # noqa: Y026 # TODO: Use TypeAlias once mypy bugs are fixed + +_MultiplicableT1 = TypeVar("_MultiplicableT1", bound=SupportsMul[Any, Any]) +_MultiplicableT2 = TypeVar("_MultiplicableT2", bound=SupportsMul[Any, Any]) + +@type_check_only +class _SupportsProdWithNoDefaultGiven(SupportsMul[Any, Any], SupportsRMul[int, Any], Protocol): ... + +_SupportsProdNoDefaultT = TypeVar("_SupportsProdNoDefaultT", bound=_SupportsProdWithNoDefaultGiven) + +# This stub is based on the type stub for `builtins.sum`. +# Like `builtins.sum`, it cannot be precisely represented in a type stub +# without introducing many false positives. +# For more details on its limitations and false positives, see #13572. +# Instead, just like `builtins.sum`, we explicitly handle several useful cases. +@overload +def prod(iterable: Iterable[bool | _LiteralInteger], /, *, start: int = 1) -> int: ... # type: ignore[overload-overlap] +@overload +def prod(iterable: Iterable[_SupportsProdNoDefaultT], /) -> _SupportsProdNoDefaultT | Literal[1]: ... +@overload +def prod(iterable: Iterable[_MultiplicableT1], /, *, start: _MultiplicableT2) -> _MultiplicableT1 | _MultiplicableT2: ... +def radians(x: _SupportsFloatOrIndex, /) -> float: ... +def remainder(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: ... +def sin(x: _SupportsFloatOrIndex, /) -> float: ... +def sinh(x: _SupportsFloatOrIndex, /) -> float: ... + +if sys.version_info >= (3, 12): + def sumprod(p: Iterable[float], q: Iterable[float], /) -> float: ... + +def sqrt(x: _SupportsFloatOrIndex, /) -> float: ... +def tan(x: _SupportsFloatOrIndex, /) -> float: ... +def tanh(x: _SupportsFloatOrIndex, /) -> float: ... + +# Is different from `_typeshed.SupportsTrunc`, which is not generic +@type_check_only +class _SupportsTrunc(Protocol[_T_co]): + def __trunc__(self) -> _T_co: ... + +def trunc(x: _SupportsTrunc[_T], /) -> _T: ... +def ulp(x: _SupportsFloatOrIndex, /) -> float: ... + +if sys.version_info >= (3, 13): + def fma(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, z: _SupportsFloatOrIndex, /) -> float: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/mimetypes.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/mimetypes.pyi new file mode 100644 index 0000000..9914a34 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/mimetypes.pyi @@ -0,0 +1,56 @@ +import sys +from _typeshed import StrPath +from collections.abc import Sequence +from typing import IO + +__all__ = [ + "knownfiles", + "inited", + "MimeTypes", + "guess_type", + "guess_all_extensions", + "guess_extension", + "add_type", + "init", + "read_mime_types", + "suffix_map", + "encodings_map", + "types_map", + "common_types", +] + +if sys.version_info >= (3, 13): + __all__ += ["guess_file_type"] + +def guess_type(url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: ... +def guess_all_extensions(type: str, strict: bool = True) -> list[str]: ... +def guess_extension(type: str, strict: bool = True) -> str | None: ... +def init(files: Sequence[str] | None = None) -> None: ... +def read_mime_types(file: str) -> dict[str, str] | None: ... +def add_type(type: str, ext: str, strict: bool = True) -> None: ... + +if sys.version_info >= (3, 13): + def guess_file_type(path: StrPath, *, strict: bool = True) -> tuple[str | None, str | None]: ... + +inited: bool +knownfiles: list[str] +suffix_map: dict[str, str] +encodings_map: dict[str, str] +types_map: dict[str, str] +common_types: dict[str, str] + +class MimeTypes: + suffix_map: dict[str, str] + encodings_map: dict[str, str] + types_map: tuple[dict[str, str], dict[str, str]] + types_map_inv: tuple[dict[str, str], dict[str, str]] + def __init__(self, filenames: tuple[str, ...] = (), strict: bool = True) -> None: ... + def add_type(self, type: str, ext: str, strict: bool = True) -> None: ... + def guess_extension(self, type: str, strict: bool = True) -> str | None: ... + def guess_type(self, url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: ... + def guess_all_extensions(self, type: str, strict: bool = True) -> list[str]: ... + def read(self, filename: str, strict: bool = True) -> None: ... + def readfp(self, fp: IO[str], strict: bool = True) -> None: ... + def read_windows_registry(self, strict: bool = True) -> None: ... + if sys.version_info >= (3, 13): + def guess_file_type(self, path: StrPath, *, strict: bool = True) -> tuple[str | None, str | None]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/mmap.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/mmap.pyi new file mode 100644 index 0000000..8a5baba --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/mmap.pyi @@ -0,0 +1,152 @@ +import os +import sys +from _typeshed import ReadableBuffer, Unused +from collections.abc import Iterator +from typing import Final, Literal, NoReturn, overload +from typing_extensions import Self, disjoint_base + +ACCESS_DEFAULT: Final = 0 +ACCESS_READ: Final = 1 +ACCESS_WRITE: Final = 2 +ACCESS_COPY: Final = 3 + +ALLOCATIONGRANULARITY: Final[int] + +if sys.platform == "linux": + MAP_DENYWRITE: Final[int] + MAP_EXECUTABLE: Final[int] + if sys.version_info >= (3, 10): + MAP_POPULATE: Final[int] +if sys.version_info >= (3, 11) and sys.platform != "win32" and sys.platform != "darwin": + MAP_STACK: Final[int] + +if sys.platform != "win32": + MAP_ANON: Final[int] + MAP_ANONYMOUS: Final[int] + MAP_PRIVATE: Final[int] + MAP_SHARED: Final[int] + PROT_EXEC: Final[int] + PROT_READ: Final[int] + PROT_WRITE: Final[int] + +PAGESIZE: Final[int] + +@disjoint_base +class mmap: + if sys.platform == "win32": + def __new__(self, fileno: int, length: int, tagname: str | None = None, access: int = 0, offset: int = 0) -> Self: ... + else: + if sys.version_info >= (3, 13): + def __new__( + cls, + fileno: int, + length: int, + flags: int = ..., + prot: int = ..., + access: int = 0, + offset: int = 0, + *, + trackfd: bool = True, + ) -> Self: ... + else: + def __new__( + cls, fileno: int, length: int, flags: int = ..., prot: int = ..., access: int = 0, offset: int = 0 + ) -> Self: ... + + def close(self) -> None: ... + def flush(self, offset: int = 0, size: int = ...) -> None: ... + def move(self, dest: int, src: int, count: int) -> None: ... + def read_byte(self) -> int: ... + def readline(self) -> bytes: ... + def resize(self, newsize: int) -> None: ... + if sys.platform != "win32": + def seek(self, pos: int, whence: Literal[0, 1, 2, 3, 4] = os.SEEK_SET) -> None: ... + else: + def seek(self, pos: int, whence: Literal[0, 1, 2] = os.SEEK_SET) -> None: ... + + def size(self) -> int: ... + def tell(self) -> int: ... + def write_byte(self, byte: int) -> None: ... + def __len__(self) -> int: ... + closed: bool + if sys.platform != "win32": + def madvise(self, option: int, start: int = 0, length: int = ...) -> None: ... + + def find(self, sub: ReadableBuffer, start: int = ..., stop: int = ...) -> int: ... + def rfind(self, sub: ReadableBuffer, start: int = ..., stop: int = ...) -> int: ... + def read(self, n: int | None = None) -> bytes: ... + def write(self, bytes: ReadableBuffer) -> int: ... + @overload + def __getitem__(self, key: int, /) -> int: ... + @overload + def __getitem__(self, key: slice, /) -> bytes: ... + def __delitem__(self, key: int | slice, /) -> NoReturn: ... + @overload + def __setitem__(self, key: int, value: int, /) -> None: ... + @overload + def __setitem__(self, key: slice, value: ReadableBuffer, /) -> None: ... + # Doesn't actually exist, but the object actually supports "in" because it has __getitem__, + # so we claim that there is also a __contains__ to help type checkers. + def __contains__(self, o: object, /) -> bool: ... + # Doesn't actually exist, but the object is actually iterable because it has __getitem__ and __len__, + # so we claim that there is also an __iter__ to help type checkers. + def __iter__(self) -> Iterator[int]: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + def __buffer__(self, flags: int, /) -> memoryview: ... + def __release_buffer__(self, buffer: memoryview, /) -> None: ... + if sys.version_info >= (3, 13): + def seekable(self) -> Literal[True]: ... + +if sys.platform != "win32": + MADV_NORMAL: Final[int] + MADV_RANDOM: Final[int] + MADV_SEQUENTIAL: Final[int] + MADV_WILLNEED: Final[int] + MADV_DONTNEED: Final[int] + MADV_FREE: Final[int] + +if sys.platform == "linux": + MADV_REMOVE: Final[int] + MADV_DONTFORK: Final[int] + MADV_DOFORK: Final[int] + MADV_HWPOISON: Final[int] + MADV_MERGEABLE: Final[int] + MADV_UNMERGEABLE: Final[int] + # Seems like this constant is not defined in glibc. + # See https://github.com/python/typeshed/pull/5360 for details + # MADV_SOFT_OFFLINE: Final[int] + MADV_HUGEPAGE: Final[int] + MADV_NOHUGEPAGE: Final[int] + MADV_DONTDUMP: Final[int] + MADV_DODUMP: Final[int] + +# This Values are defined for FreeBSD but type checkers do not support conditions for these +if sys.platform != "linux" and sys.platform != "darwin" and sys.platform != "win32": + MADV_NOSYNC: Final[int] + MADV_AUTOSYNC: Final[int] + MADV_NOCORE: Final[int] + MADV_CORE: Final[int] + MADV_PROTECT: Final[int] + +if sys.version_info >= (3, 10) and sys.platform == "darwin": + MADV_FREE_REUSABLE: Final[int] + MADV_FREE_REUSE: Final[int] + +if sys.version_info >= (3, 13) and sys.platform != "win32": + MAP_32BIT: Final[int] + +if sys.version_info >= (3, 13) and sys.platform == "darwin": + MAP_NORESERVE: Final = 64 + MAP_NOEXTEND: Final = 256 + MAP_HASSEMAPHORE: Final = 512 + MAP_NOCACHE: Final = 1024 + MAP_JIT: Final = 2048 + MAP_RESILIENT_CODESIGN: Final = 8192 + MAP_RESILIENT_MEDIA: Final = 16384 + MAP_TRANSLATED_ALLOW_EXECUTE: Final = 131072 + MAP_UNIX03: Final = 262144 + MAP_TPRO: Final = 524288 + +if sys.version_info >= (3, 13) and sys.platform == "linux": + MAP_NORESERVE: Final = 16384 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/modulefinder.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/modulefinder.pyi new file mode 100644 index 0000000..6db665a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/modulefinder.pyi @@ -0,0 +1,68 @@ +import sys +from collections.abc import Container, Iterable, Iterator, Sequence +from types import CodeType +from typing import IO, Any, Final + +if sys.version_info < (3, 11): + LOAD_CONST: Final[int] # undocumented + IMPORT_NAME: Final[int] # undocumented + STORE_NAME: Final[int] # undocumented + STORE_GLOBAL: Final[int] # undocumented + STORE_OPS: Final[tuple[int, int]] # undocumented + EXTENDED_ARG: Final[int] # undocumented + +packagePathMap: dict[str, list[str]] # undocumented + +def AddPackagePath(packagename: str, path: str) -> None: ... + +replacePackageMap: dict[str, str] # undocumented + +def ReplacePackage(oldname: str, newname: str) -> None: ... + +class Module: # undocumented + def __init__(self, name: str, file: str | None = None, path: str | None = None) -> None: ... + +class ModuleFinder: + modules: dict[str, Module] + path: list[str] # undocumented + badmodules: dict[str, dict[str, int]] # undocumented + debug: int # undocumented + indent: int # undocumented + excludes: Container[str] # undocumented + replace_paths: Sequence[tuple[str, str]] # undocumented + + def __init__( + self, + path: list[str] | None = None, + debug: int = 0, + excludes: Container[str] | None = None, + replace_paths: Sequence[tuple[str, str]] | None = None, + ) -> None: ... + def msg(self, level: int, str: str, *args: Any) -> None: ... # undocumented + def msgin(self, *args: Any) -> None: ... # undocumented + def msgout(self, *args: Any) -> None: ... # undocumented + def run_script(self, pathname: str) -> None: ... + def load_file(self, pathname: str) -> None: ... # undocumented + def import_hook( + self, name: str, caller: Module | None = None, fromlist: list[str] | None = None, level: int = -1 + ) -> Module | None: ... # undocumented + def determine_parent(self, caller: Module | None, level: int = -1) -> Module | None: ... # undocumented + def find_head_package(self, parent: Module, name: str) -> tuple[Module, str]: ... # undocumented + def load_tail(self, q: Module, tail: str) -> Module: ... # undocumented + def ensure_fromlist(self, m: Module, fromlist: Iterable[str], recursive: int = 0) -> None: ... # undocumented + def find_all_submodules(self, m: Module) -> Iterable[str]: ... # undocumented + def import_module(self, partname: str, fqname: str, parent: Module) -> Module | None: ... # undocumented + def load_module(self, fqname: str, fp: IO[str], pathname: str, file_info: tuple[str, str, str]) -> Module: ... # undocumented + def scan_opcodes(self, co: CodeType) -> Iterator[tuple[str, tuple[Any, ...]]]: ... # undocumented + def scan_code(self, co: CodeType, m: Module) -> None: ... # undocumented + def load_package(self, fqname: str, pathname: str) -> Module: ... # undocumented + def add_module(self, fqname: str) -> Module: ... # undocumented + def find_module( + self, name: str, path: str | None, parent: Module | None = None + ) -> tuple[IO[Any] | None, str | None, tuple[str, str, int]]: ... # undocumented + def report(self) -> None: ... + def any_missing(self) -> list[str]: ... # undocumented + def any_missing_maybe(self) -> tuple[list[str], list[str]]: ... # undocumented + def replace_paths_in_code(self, co: CodeType) -> CodeType: ... # undocumented + +def test() -> ModuleFinder | None: ... # undocumented diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/msilib/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/msilib/__init__.pyi new file mode 100644 index 0000000..622f585 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/msilib/__init__.pyi @@ -0,0 +1,177 @@ +import sys +from collections.abc import Container, Iterable, Sequence +from types import ModuleType +from typing import Any, Final + +if sys.platform == "win32": + from _msi import * + from _msi import _Database + + AMD64: Final[bool] + Win64: Final[bool] + + datasizemask: Final = 0x00FF + type_valid: Final = 0x0100 + type_localizable: Final = 0x0200 + typemask: Final = 0x0C00 + type_long: Final = 0x0000 + type_short: Final = 0x0400 + type_string: Final = 0x0C00 + type_binary: Final = 0x0800 + type_nullable: Final = 0x1000 + type_key: Final = 0x2000 + knownbits: Final = 0x3FFF + + class Table: + name: str + fields: list[tuple[int, str, int]] + def __init__(self, name: str) -> None: ... + def add_field(self, index: int, name: str, type: int) -> None: ... + def sql(self) -> str: ... + def create(self, db: _Database) -> None: ... + + class _Unspecified: ... + + def change_sequence( + seq: Sequence[tuple[str, str | None, int]], + action: str, + seqno: int | type[_Unspecified] = ..., + cond: str | type[_Unspecified] = ..., + ) -> None: ... + def add_data(db: _Database, table: str, values: Iterable[tuple[Any, ...]]) -> None: ... + def add_stream(db: _Database, name: str, path: str) -> None: ... + def init_database( + name: str, schema: ModuleType, ProductName: str, ProductCode: str, ProductVersion: str, Manufacturer: str + ) -> _Database: ... + def add_tables(db: _Database, module: ModuleType) -> None: ... + def make_id(str: str) -> str: ... + def gen_uuid() -> str: ... + + class CAB: + name: str + files: list[tuple[str, str]] + filenames: set[str] + index: int + def __init__(self, name: str) -> None: ... + def gen_id(self, file: str) -> str: ... + def append(self, full: str, file: str, logical: str) -> tuple[int, str]: ... + def commit(self, db: _Database) -> None: ... + + _directories: set[str] + + class Directory: + db: _Database + cab: CAB + basedir: str + physical: str + logical: str + component: str | None + short_names: set[str] + ids: set[str] + keyfiles: dict[str, str] + componentflags: int | None + absolute: str + def __init__( + self, + db: _Database, + cab: CAB, + basedir: str, + physical: str, + _logical: str, + default: str, + componentflags: int | None = None, + ) -> None: ... + def start_component( + self, + component: str | None = None, + feature: Feature | None = None, + flags: int | None = None, + keyfile: str | None = None, + uuid: str | None = None, + ) -> None: ... + def make_short(self, file: str) -> str: ... + def add_file(self, file: str, src: str | None = None, version: str | None = None, language: str | None = None) -> str: ... + def glob(self, pattern: str, exclude: Container[str] | None = None) -> list[str]: ... + def remove_pyc(self) -> None: ... + + class Binary: + name: str + def __init__(self, fname: str) -> None: ... + + class Feature: + id: str + def __init__( + self, + db: _Database, + id: str, + title: str, + desc: str, + display: int, + level: int = 1, + parent: Feature | None = None, + directory: str | None = None, + attributes: int = 0, + ) -> None: ... + def set_current(self) -> None: ... + + class Control: + dlg: Dialog + name: str + def __init__(self, dlg: Dialog, name: str) -> None: ... + def event(self, event: str, argument: str, condition: str = "1", ordering: int | None = None) -> None: ... + def mapping(self, event: str, attribute: str) -> None: ... + def condition(self, action: str, condition: str) -> None: ... + + class RadioButtonGroup(Control): + property: str + index: int + def __init__(self, dlg: Dialog, name: str, property: str) -> None: ... + def add(self, name: str, x: int, y: int, w: int, h: int, text: str, value: str | None = None) -> None: ... + + class Dialog: + db: _Database + name: str + x: int + y: int + w: int + h: int + def __init__( + self, + db: _Database, + name: str, + x: int, + y: int, + w: int, + h: int, + attr: int, + title: str, + first: str, + default: str, + cancel: str, + ) -> None: ... + def control( + self, + name: str, + type: str, + x: int, + y: int, + w: int, + h: int, + attr: int, + prop: str | None, + text: str | None, + next: str | None, + help: str | None, + ) -> Control: ... + def text(self, name: str, x: int, y: int, w: int, h: int, attr: int, text: str | None) -> Control: ... + def bitmap(self, name: str, x: int, y: int, w: int, h: int, text: str | None) -> Control: ... + def line(self, name: str, x: int, y: int, w: int, h: int) -> Control: ... + def pushbutton( + self, name: str, x: int, y: int, w: int, h: int, attr: int, text: str | None, next: str | None + ) -> Control: ... + def radiogroup( + self, name: str, x: int, y: int, w: int, h: int, attr: int, prop: str | None, text: str | None, next: str | None + ) -> RadioButtonGroup: ... + def checkbox( + self, name: str, x: int, y: int, w: int, h: int, attr: int, prop: str | None, text: str | None, next: str | None + ) -> Control: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/msilib/schema.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/msilib/schema.pyi new file mode 100644 index 0000000..3bbdc41 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/msilib/schema.pyi @@ -0,0 +1,95 @@ +import sys +from typing import Final + +if sys.platform == "win32": + from . import Table + + _Validation: Table + ActionText: Table + AdminExecuteSequence: Table + Condition: Table + AdminUISequence: Table + AdvtExecuteSequence: Table + AdvtUISequence: Table + AppId: Table + AppSearch: Table + Property: Table + BBControl: Table + Billboard: Table + Feature: Table + Binary: Table + BindImage: Table + File: Table + CCPSearch: Table + CheckBox: Table + Class: Table + Component: Table + Icon: Table + ProgId: Table + ComboBox: Table + CompLocator: Table + Complus: Table + Directory: Table + Control: Table + Dialog: Table + ControlCondition: Table + ControlEvent: Table + CreateFolder: Table + CustomAction: Table + DrLocator: Table + DuplicateFile: Table + Environment: Table + Error: Table + EventMapping: Table + Extension: Table + MIME: Table + FeatureComponents: Table + FileSFPCatalog: Table + SFPCatalog: Table + Font: Table + IniFile: Table + IniLocator: Table + InstallExecuteSequence: Table + InstallUISequence: Table + IsolatedComponent: Table + LaunchCondition: Table + ListBox: Table + ListView: Table + LockPermissions: Table + Media: Table + MoveFile: Table + MsiAssembly: Table + MsiAssemblyName: Table + MsiDigitalCertificate: Table + MsiDigitalSignature: Table + MsiFileHash: Table + MsiPatchHeaders: Table + ODBCAttribute: Table + ODBCDriver: Table + ODBCDataSource: Table + ODBCSourceAttribute: Table + ODBCTranslator: Table + Patch: Table + PatchPackage: Table + PublishComponent: Table + RadioButton: Table + Registry: Table + RegLocator: Table + RemoveFile: Table + RemoveIniFile: Table + RemoveRegistry: Table + ReserveCost: Table + SelfReg: Table + ServiceControl: Table + ServiceInstall: Table + Shortcut: Table + Signature: Table + TextStyle: Table + TypeLib: Table + UIText: Table + Upgrade: Table + Verb: Table + + tables: Final[list[Table]] + + _Validation_records: list[tuple[str, str, str, int | None, int | None, str | None, int | None, str | None, str | None, str]] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/msilib/sequence.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/msilib/sequence.pyi new file mode 100644 index 0000000..a9f5c24 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/msilib/sequence.pyi @@ -0,0 +1,14 @@ +import sys +from typing import Final +from typing_extensions import TypeAlias + +if sys.platform == "win32": + _SequenceType: TypeAlias = list[tuple[str, str | None, int]] + + AdminExecuteSequence: Final[_SequenceType] + AdminUISequence: Final[_SequenceType] + AdvtExecuteSequence: Final[_SequenceType] + InstallExecuteSequence: Final[_SequenceType] + InstallUISequence: Final[_SequenceType] + + tables: Final[list[str]] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/msilib/text.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/msilib/text.pyi new file mode 100644 index 0000000..da3c5fd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/msilib/text.pyi @@ -0,0 +1,8 @@ +import sys +from typing import Final + +if sys.platform == "win32": + ActionText: Final[list[tuple[str, str, str | None]]] + UIText: Final[list[tuple[str, str | None]]] + dirname: str + tables: Final[list[str]] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/msvcrt.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/msvcrt.pyi new file mode 100644 index 0000000..5feca8e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/msvcrt.pyi @@ -0,0 +1,32 @@ +import sys +from typing import Final + +# This module is only available on Windows +if sys.platform == "win32": + CRT_ASSEMBLY_VERSION: Final[str] + LK_UNLCK: Final = 0 + LK_LOCK: Final = 1 + LK_NBLCK: Final = 2 + LK_RLCK: Final = 3 + LK_NBRLCK: Final = 4 + SEM_FAILCRITICALERRORS: Final = 0x0001 + SEM_NOALIGNMENTFAULTEXCEPT: Final = 0x0004 + SEM_NOGPFAULTERRORBOX: Final = 0x0002 + SEM_NOOPENFILEERRORBOX: Final = 0x8000 + def locking(fd: int, mode: int, nbytes: int, /) -> None: ... + def setmode(fd: int, mode: int, /) -> int: ... + def open_osfhandle(handle: int, flags: int, /) -> int: ... + def get_osfhandle(fd: int, /) -> int: ... + def kbhit() -> bool: ... + def getch() -> bytes: ... + def getwch() -> str: ... + def getche() -> bytes: ... + def getwche() -> str: ... + def putch(char: bytes | bytearray, /) -> None: ... + def putwch(unicode_char: str, /) -> None: ... + def ungetch(char: bytes | bytearray, /) -> None: ... + def ungetwch(unicode_char: str, /) -> None: ... + def heapmin() -> None: ... + def SetErrorMode(mode: int, /) -> int: ... + if sys.version_info >= (3, 10): + def GetErrorMode() -> int: ... # undocumented diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/__init__.pyi new file mode 100644 index 0000000..2bd6e28 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/__init__.pyi @@ -0,0 +1,90 @@ +from multiprocessing import context, reduction as reducer +from multiprocessing.context import ( + AuthenticationError as AuthenticationError, + BufferTooShort as BufferTooShort, + Process as Process, + ProcessError as ProcessError, + TimeoutError as TimeoutError, +) +from multiprocessing.process import ( + active_children as active_children, + current_process as current_process, + parent_process as parent_process, +) + +# These are technically functions that return instances of these Queue classes. +# The stub here doesn't reflect reality exactly -- +# while e.g. `multiprocessing.queues.Queue` is a class, +# `multiprocessing.Queue` is actually a function at runtime. +# Avoid using `multiprocessing.Queue` as a type annotation; +# use imports from multiprocessing.queues instead. +# See #4266 and #8450 for discussion. +from multiprocessing.queues import JoinableQueue as JoinableQueue, Queue as Queue, SimpleQueue as SimpleQueue +from multiprocessing.spawn import freeze_support as freeze_support + +__all__ = [ + "Array", + "AuthenticationError", + "Barrier", + "BoundedSemaphore", + "BufferTooShort", + "Condition", + "Event", + "JoinableQueue", + "Lock", + "Manager", + "Pipe", + "Pool", + "Process", + "ProcessError", + "Queue", + "RLock", + "RawArray", + "RawValue", + "Semaphore", + "SimpleQueue", + "TimeoutError", + "Value", + "active_children", + "allow_connection_pickling", + "cpu_count", + "current_process", + "freeze_support", + "get_all_start_methods", + "get_context", + "get_logger", + "get_start_method", + "log_to_stderr", + "parent_process", + "reducer", + "set_executable", + "set_forkserver_preload", + "set_start_method", +] + +# These functions (really bound methods) +# are all autogenerated at runtime here: https://github.com/python/cpython/blob/600c65c094b0b48704d8ec2416930648052ba715/Lib/multiprocessing/__init__.py#L23 +RawValue = context._default_context.RawValue +RawArray = context._default_context.RawArray +Value = context._default_context.Value +Array = context._default_context.Array +Barrier = context._default_context.Barrier +BoundedSemaphore = context._default_context.BoundedSemaphore +Condition = context._default_context.Condition +Event = context._default_context.Event +Lock = context._default_context.Lock +RLock = context._default_context.RLock +Semaphore = context._default_context.Semaphore +Pipe = context._default_context.Pipe +Pool = context._default_context.Pool +allow_connection_pickling = context._default_context.allow_connection_pickling +cpu_count = context._default_context.cpu_count +get_logger = context._default_context.get_logger +log_to_stderr = context._default_context.log_to_stderr +Manager = context._default_context.Manager +set_executable = context._default_context.set_executable +set_forkserver_preload = context._default_context.set_forkserver_preload +get_all_start_methods = context._default_context.get_all_start_methods +get_start_method = context._default_context.get_start_method +set_start_method = context._default_context.set_start_method +get_context = context._default_context.get_context diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/connection.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/connection.pyi new file mode 100644 index 0000000..cd4fa10 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/connection.pyi @@ -0,0 +1,83 @@ +import socket +import sys +from _typeshed import Incomplete, ReadableBuffer +from collections.abc import Iterable +from types import TracebackType +from typing import Any, Generic, SupportsIndex, TypeVar +from typing_extensions import Self, TypeAlias + +__all__ = ["Client", "Listener", "Pipe", "wait"] + +# https://docs.python.org/3/library/multiprocessing.html#address-formats +_Address: TypeAlias = str | tuple[str, int] + +# Defaulting to Any to avoid forcing generics on a lot of pre-existing code +_SendT_contra = TypeVar("_SendT_contra", contravariant=True, default=Any) +_RecvT_co = TypeVar("_RecvT_co", covariant=True, default=Any) + +class _ConnectionBase(Generic[_SendT_contra, _RecvT_co]): + def __init__(self, handle: SupportsIndex, readable: bool = True, writable: bool = True) -> None: ... + @property + def closed(self) -> bool: ... # undocumented + @property + def readable(self) -> bool: ... # undocumented + @property + def writable(self) -> bool: ... # undocumented + def fileno(self) -> int: ... + def close(self) -> None: ... + def send_bytes(self, buf: ReadableBuffer, offset: int = 0, size: int | None = None) -> None: ... + def send(self, obj: _SendT_contra) -> None: ... + def recv_bytes(self, maxlength: int | None = None) -> bytes: ... + def recv_bytes_into(self, buf: Any, offset: int = 0) -> int: ... + def recv(self) -> _RecvT_co: ... + def poll(self, timeout: float | None = 0.0) -> bool: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def __del__(self) -> None: ... + +class Connection(_ConnectionBase[_SendT_contra, _RecvT_co]): ... + +if sys.platform == "win32": + class PipeConnection(_ConnectionBase[_SendT_contra, _RecvT_co]): ... + +class Listener: + def __init__( + self, address: _Address | None = None, family: str | None = None, backlog: int = 1, authkey: bytes | None = None + ) -> None: ... + def accept(self) -> Connection[Incomplete, Incomplete]: ... + def close(self) -> None: ... + @property + def address(self) -> _Address: ... + @property + def last_accepted(self) -> _Address | None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + +# Any: send and recv methods unused +if sys.version_info >= (3, 12): + def deliver_challenge(connection: Connection[Any, Any], authkey: bytes, digest_name: str = "sha256") -> None: ... + +else: + def deliver_challenge(connection: Connection[Any, Any], authkey: bytes) -> None: ... + +def answer_challenge(connection: Connection[Any, Any], authkey: bytes) -> None: ... +def wait( + object_list: Iterable[Connection[_SendT_contra, _RecvT_co] | socket.socket | int], timeout: float | None = None +) -> list[Connection[_SendT_contra, _RecvT_co] | socket.socket | int]: ... +def Client(address: _Address, family: str | None = None, authkey: bytes | None = None) -> Connection[Any, Any]: ... + +# N.B. Keep this in sync with multiprocessing.context.BaseContext.Pipe. +# _ConnectionBase is the common base class of Connection and PipeConnection +# and can be used in cross-platform code. +# +# The two connections should have the same generic types but inverted (Connection[_T1, _T2], Connection[_T2, _T1]). +# However, TypeVars scoped entirely within a return annotation is unspecified in the spec. +if sys.platform != "win32": + def Pipe(duplex: bool = True) -> tuple[Connection[Any, Any], Connection[Any, Any]]: ... + +else: + def Pipe(duplex: bool = True) -> tuple[PipeConnection[Any, Any], PipeConnection[Any, Any]]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/context.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/context.pyi new file mode 100644 index 0000000..03d1d2e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/context.pyi @@ -0,0 +1,206 @@ +import ctypes +import sys +from _ctypes import _CData +from collections.abc import Callable, Iterable, Sequence +from ctypes import _SimpleCData, c_char +from logging import Logger, _Level as _LoggingLevel +from multiprocessing import popen_fork, popen_forkserver, popen_spawn_posix, popen_spawn_win32, queues, synchronize +from multiprocessing.managers import SyncManager +from multiprocessing.pool import Pool as _Pool +from multiprocessing.process import BaseProcess +from multiprocessing.sharedctypes import Synchronized, SynchronizedArray, SynchronizedString +from typing import Any, ClassVar, Literal, TypeVar, overload +from typing_extensions import TypeAlias + +if sys.platform != "win32": + from multiprocessing.connection import Connection +else: + from multiprocessing.connection import PipeConnection + +__all__ = () + +_LockLike: TypeAlias = synchronize.Lock | synchronize.RLock +_T = TypeVar("_T") +_CT = TypeVar("_CT", bound=_CData) + +class ProcessError(Exception): ... +class BufferTooShort(ProcessError): ... +class TimeoutError(ProcessError): ... +class AuthenticationError(ProcessError): ... + +class BaseContext: + ProcessError: ClassVar[type[ProcessError]] + BufferTooShort: ClassVar[type[BufferTooShort]] + TimeoutError: ClassVar[type[TimeoutError]] + AuthenticationError: ClassVar[type[AuthenticationError]] + + # N.B. The methods below are applied at runtime to generate + # multiprocessing.*, so the signatures should be identical (modulo self). + @staticmethod + def current_process() -> BaseProcess: ... + @staticmethod + def parent_process() -> BaseProcess | None: ... + @staticmethod + def active_children() -> list[BaseProcess]: ... + def cpu_count(self) -> int: ... + def Manager(self) -> SyncManager: ... + + # N.B. Keep this in sync with multiprocessing.connection.Pipe. + # _ConnectionBase is the common base class of Connection and PipeConnection + # and can be used in cross-platform code. + # + # The two connections should have the same generic types but inverted (Connection[_T1, _T2], Connection[_T2, _T1]). + # However, TypeVars scoped entirely within a return annotation is unspecified in the spec. + if sys.platform != "win32": + def Pipe(self, duplex: bool = True) -> tuple[Connection[Any, Any], Connection[Any, Any]]: ... + else: + def Pipe(self, duplex: bool = True) -> tuple[PipeConnection[Any, Any], PipeConnection[Any, Any]]: ... + + def Barrier( + self, parties: int, action: Callable[..., object] | None = None, timeout: float | None = None + ) -> synchronize.Barrier: ... + def BoundedSemaphore(self, value: int = 1) -> synchronize.BoundedSemaphore: ... + def Condition(self, lock: _LockLike | None = None) -> synchronize.Condition: ... + def Event(self) -> synchronize.Event: ... + def Lock(self) -> synchronize.Lock: ... + def RLock(self) -> synchronize.RLock: ... + def Semaphore(self, value: int = 1) -> synchronize.Semaphore: ... + def Queue(self, maxsize: int = 0) -> queues.Queue[Any]: ... + def JoinableQueue(self, maxsize: int = 0) -> queues.JoinableQueue[Any]: ... + def SimpleQueue(self) -> queues.SimpleQueue[Any]: ... + def Pool( + self, + processes: int | None = None, + initializer: Callable[..., object] | None = None, + initargs: Iterable[Any] = (), + maxtasksperchild: int | None = None, + ) -> _Pool: ... + @overload + def RawValue(self, typecode_or_type: type[_CT], *args: Any) -> _CT: ... + @overload + def RawValue(self, typecode_or_type: str, *args: Any) -> Any: ... + @overload + def RawArray(self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any]) -> ctypes.Array[_CT]: ... + @overload + def RawArray(self, typecode_or_type: str, size_or_initializer: int | Sequence[Any]) -> Any: ... + @overload + def Value( + self, typecode_or_type: type[_SimpleCData[_T]], *args: Any, lock: Literal[True] | _LockLike = True + ) -> Synchronized[_T]: ... + @overload + def Value(self, typecode_or_type: type[_CT], *args: Any, lock: Literal[False]) -> Synchronized[_CT]: ... + @overload + def Value(self, typecode_or_type: type[_CT], *args: Any, lock: Literal[True] | _LockLike = True) -> Synchronized[_CT]: ... + @overload + def Value(self, typecode_or_type: str, *args: Any, lock: Literal[True] | _LockLike = True) -> Synchronized[Any]: ... + @overload + def Value(self, typecode_or_type: str | type[_CData], *args: Any, lock: bool | _LockLike = True) -> Any: ... + @overload + def Array( + self, typecode_or_type: type[_SimpleCData[_T]], size_or_initializer: int | Sequence[Any], *, lock: Literal[False] + ) -> SynchronizedArray[_T]: ... + @overload + def Array( + self, typecode_or_type: type[c_char], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True + ) -> SynchronizedString: ... + @overload + def Array( + self, + typecode_or_type: type[_SimpleCData[_T]], + size_or_initializer: int | Sequence[Any], + *, + lock: Literal[True] | _LockLike = True, + ) -> SynchronizedArray[_T]: ... + @overload + def Array( + self, typecode_or_type: str, size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True + ) -> SynchronizedArray[Any]: ... + @overload + def Array( + self, typecode_or_type: str | type[_CData], size_or_initializer: int | Sequence[Any], *, lock: bool | _LockLike = True + ) -> Any: ... + def freeze_support(self) -> None: ... + def get_logger(self) -> Logger: ... + def log_to_stderr(self, level: _LoggingLevel | None = None) -> Logger: ... + def allow_connection_pickling(self) -> None: ... + def set_executable(self, executable: str) -> None: ... + def set_forkserver_preload(self, module_names: list[str]) -> None: ... + if sys.platform != "win32": + @overload + def get_context(self, method: None = None) -> DefaultContext: ... + @overload + def get_context(self, method: Literal["spawn"]) -> SpawnContext: ... + @overload + def get_context(self, method: Literal["fork"]) -> ForkContext: ... + @overload + def get_context(self, method: Literal["forkserver"]) -> ForkServerContext: ... + @overload + def get_context(self, method: str) -> BaseContext: ... + else: + @overload + def get_context(self, method: None = None) -> DefaultContext: ... + @overload + def get_context(self, method: Literal["spawn"]) -> SpawnContext: ... + @overload + def get_context(self, method: str) -> BaseContext: ... + + @overload + def get_start_method(self, allow_none: Literal[False] = False) -> str: ... + @overload + def get_start_method(self, allow_none: bool) -> str | None: ... + def set_start_method(self, method: str | None, force: bool = False) -> None: ... + @property + def reducer(self) -> str: ... + @reducer.setter + def reducer(self, reduction: str) -> None: ... + def _check_available(self) -> None: ... + +class Process(BaseProcess): + _start_method: str | None + @staticmethod + def _Popen(process_obj: BaseProcess) -> DefaultContext: ... + +class DefaultContext(BaseContext): + Process: ClassVar[type[Process]] + def __init__(self, context: BaseContext) -> None: ... + def get_start_method(self, allow_none: bool = False) -> str: ... + def get_all_start_methods(self) -> list[str]: ... + +_default_context: DefaultContext + +class SpawnProcess(BaseProcess): + _start_method: str + if sys.platform != "win32": + @staticmethod + def _Popen(process_obj: BaseProcess) -> popen_spawn_posix.Popen: ... + else: + @staticmethod + def _Popen(process_obj: BaseProcess) -> popen_spawn_win32.Popen: ... + +class SpawnContext(BaseContext): + _name: str + Process: ClassVar[type[SpawnProcess]] + +if sys.platform != "win32": + class ForkProcess(BaseProcess): + _start_method: str + @staticmethod + def _Popen(process_obj: BaseProcess) -> popen_fork.Popen: ... + + class ForkServerProcess(BaseProcess): + _start_method: str + @staticmethod + def _Popen(process_obj: BaseProcess) -> popen_forkserver.Popen: ... + + class ForkContext(BaseContext): + _name: str + Process: ClassVar[type[ForkProcess]] + + class ForkServerContext(BaseContext): + _name: str + Process: ClassVar[type[ForkServerProcess]] + +def _force_start_method(method: str) -> None: ... +def get_spawning_popen() -> Any | None: ... +def set_spawning_popen(popen: Any) -> None: ... +def assert_spawning(obj: Any) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/dummy/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/dummy/__init__.pyi new file mode 100644 index 0000000..3cbeeb0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/dummy/__init__.pyi @@ -0,0 +1,77 @@ +import array +import threading +import weakref +from collections.abc import Callable, Iterable, Mapping, Sequence +from queue import Queue as Queue +from threading import ( + Barrier as Barrier, + BoundedSemaphore as BoundedSemaphore, + Condition as Condition, + Event as Event, + Lock as Lock, + RLock as RLock, + Semaphore as Semaphore, +) +from typing import Any, Literal + +from .connection import Pipe as Pipe + +__all__ = [ + "Process", + "current_process", + "active_children", + "freeze_support", + "Lock", + "RLock", + "Semaphore", + "BoundedSemaphore", + "Condition", + "Event", + "Barrier", + "Queue", + "Manager", + "Pipe", + "Pool", + "JoinableQueue", +] + +JoinableQueue = Queue + +class DummyProcess(threading.Thread): + _children: weakref.WeakKeyDictionary[Any, Any] + _parent: threading.Thread + _pid: None + _start_called: int + @property + def exitcode(self) -> Literal[0] | None: ... + def __init__( + self, + group: Any = None, + target: Callable[..., object] | None = None, + name: str | None = None, + args: Iterable[Any] = (), + kwargs: Mapping[str, Any] = {}, + ) -> None: ... + +Process = DummyProcess + +class Namespace: + def __init__(self, **kwds: Any) -> None: ... + def __getattr__(self, name: str, /) -> Any: ... + def __setattr__(self, name: str, value: Any, /) -> None: ... + +class Value: + _typecode: Any + _value: Any + value: Any + def __init__(self, typecode: Any, value: Any, lock: Any = True) -> None: ... + +def Array(typecode: Any, sequence: Sequence[Any], lock: Any = True) -> array.array[Any]: ... +def Manager() -> Any: ... +def Pool(processes: int | None = None, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ()) -> Any: ... +def active_children() -> list[Any]: ... + +current_process = threading.current_thread + +def freeze_support() -> None: ... +def shutdown() -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/dummy/connection.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/dummy/connection.pyi new file mode 100644 index 0000000..d7e9821 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/dummy/connection.pyi @@ -0,0 +1,39 @@ +from multiprocessing.connection import _Address +from queue import Queue +from types import TracebackType +from typing import Any +from typing_extensions import Self + +__all__ = ["Client", "Listener", "Pipe"] + +families: list[None] + +class Connection: + _in: Any + _out: Any + recv: Any + recv_bytes: Any + send: Any + send_bytes: Any + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def __init__(self, _in: Any, _out: Any) -> None: ... + def close(self) -> None: ... + def poll(self, timeout: float = 0.0) -> bool: ... + +class Listener: + _backlog_queue: Queue[Any] | None + @property + def address(self) -> Queue[Any] | None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def __init__(self, address: _Address | None = None, family: int | None = None, backlog: int = 1) -> None: ... + def accept(self) -> Connection: ... + def close(self) -> None: ... + +def Client(address: _Address) -> Connection: ... +def Pipe(duplex: bool = True) -> tuple[Connection, Connection]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/forkserver.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/forkserver.pyi new file mode 100644 index 0000000..c4af295 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/forkserver.pyi @@ -0,0 +1,45 @@ +import sys +from _typeshed import FileDescriptorLike, Unused +from collections.abc import Sequence +from struct import Struct +from typing import Any, Final + +__all__ = ["ensure_running", "get_inherited_fds", "connect_to_new_process", "set_forkserver_preload"] + +MAXFDS_TO_SEND: Final = 256 +SIGNED_STRUCT: Final[Struct] + +class ForkServer: + def set_forkserver_preload(self, modules_names: list[str]) -> None: ... + def get_inherited_fds(self) -> list[int] | None: ... + def connect_to_new_process(self, fds: Sequence[int]) -> tuple[int, int]: ... + def ensure_running(self) -> None: ... + +if sys.version_info >= (3, 14): + def main( + listener_fd: int | None, + alive_r: FileDescriptorLike, + preload: Sequence[str], + main_path: str | None = None, + sys_path: list[str] | None = None, + *, + authkey_r: int | None = None, + ) -> None: ... + +else: + def main( + listener_fd: int | None, + alive_r: FileDescriptorLike, + preload: Sequence[str], + main_path: str | None = None, + sys_path: Unused = None, + ) -> None: ... + +def read_signed(fd: int) -> Any: ... +def write_signed(fd: int, n: int) -> None: ... + +_forkserver: ForkServer +ensure_running = _forkserver.ensure_running +get_inherited_fds = _forkserver.get_inherited_fds +connect_to_new_process = _forkserver.connect_to_new_process +set_forkserver_preload = _forkserver.set_forkserver_preload diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/heap.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/heap.pyi new file mode 100644 index 0000000..38191a0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/heap.pyi @@ -0,0 +1,37 @@ +import sys +from _typeshed import Incomplete +from collections.abc import Callable +from mmap import mmap +from typing import Protocol, type_check_only +from typing_extensions import TypeAlias + +__all__ = ["BufferWrapper"] + +class Arena: + size: int + buffer: mmap + if sys.platform == "win32": + name: str + def __init__(self, size: int) -> None: ... + else: + fd: int + def __init__(self, size: int, fd: int = -1) -> None: ... + +_Block: TypeAlias = tuple[Arena, int, int] + +if sys.platform != "win32": + @type_check_only + class _SupportsDetach(Protocol): + def detach(self) -> int: ... + + def reduce_arena(a: Arena) -> tuple[Callable[[int, _SupportsDetach], Arena], tuple[int, Incomplete]]: ... + def rebuild_arena(size: int, dupfd: _SupportsDetach) -> Arena: ... + +class Heap: + def __init__(self, size: int = ...) -> None: ... + def free(self, block: _Block) -> None: ... + def malloc(self, size: int) -> _Block: ... + +class BufferWrapper: + def __init__(self, size: int) -> None: ... + def create_memoryview(self) -> memoryview: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/managers.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/managers.pyi new file mode 100644 index 0000000..5efe69a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/managers.pyi @@ -0,0 +1,350 @@ +import queue +import sys +import threading +from _typeshed import SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT +from collections.abc import ( + Callable, + Iterable, + Iterator, + Mapping, + MutableMapping, + MutableSequence, + MutableSet, + Sequence, + Set as AbstractSet, +) +from types import GenericAlias, TracebackType +from typing import Any, AnyStr, ClassVar, Generic, SupportsIndex, TypeVar, overload +from typing_extensions import Self, TypeAlias + +from . import pool +from .connection import Connection, _Address +from .context import BaseContext +from .shared_memory import _SLT, ShareableList as _ShareableList, SharedMemory as _SharedMemory +from .util import Finalize as _Finalize + +__all__ = ["BaseManager", "SyncManager", "BaseProxy", "Token", "SharedMemoryManager"] + +_T = TypeVar("_T") +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") +_S = TypeVar("_S") + +class Namespace: + def __init__(self, **kwds: Any) -> None: ... + def __getattr__(self, name: str, /) -> Any: ... + def __setattr__(self, name: str, value: Any, /) -> None: ... + +_Namespace: TypeAlias = Namespace + +class Token: + __slots__ = ("typeid", "address", "id") + typeid: str | bytes | None + address: _Address | None + id: str | bytes | int | None + def __init__(self, typeid: bytes | str | None, address: _Address | None, id: str | bytes | int | None) -> None: ... + def __getstate__(self) -> tuple[str | bytes | None, tuple[str | bytes, int], str | bytes | int | None]: ... + def __setstate__(self, state: tuple[str | bytes | None, tuple[str | bytes, int], str | bytes | int | None]) -> None: ... + +class BaseProxy: + _address_to_local: dict[_Address, Any] + _mutex: Any + def __init__( + self, + token: Any, + serializer: str, + manager: Any = None, + authkey: AnyStr | None = None, + exposed: Any = None, + incref: bool = True, + manager_owned: bool = False, + ) -> None: ... + def __deepcopy__(self, memo: Any | None) -> Any: ... + def _callmethod(self, methodname: str, args: tuple[Any, ...] = (), kwds: dict[Any, Any] = {}) -> None: ... + def _getvalue(self) -> Any: ... + def __reduce__(self) -> tuple[Any, tuple[Any, Any, str, dict[Any, Any]]]: ... + +class ValueProxy(BaseProxy, Generic[_T]): + def get(self) -> _T: ... + def set(self, value: _T) -> None: ... + value: _T + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +if sys.version_info >= (3, 13): + class _BaseDictProxy(BaseProxy, MutableMapping[_KT, _VT]): + __builtins__: ClassVar[dict[str, Any]] + def __len__(self) -> int: ... + def __getitem__(self, key: _KT, /) -> _VT: ... + def __setitem__(self, key: _KT, value: _VT, /) -> None: ... + def __delitem__(self, key: _KT, /) -> None: ... + def __iter__(self) -> Iterator[_KT]: ... + def copy(self) -> dict[_KT, _VT]: ... + @overload # type: ignore[override] + def get(self, key: _KT, /) -> _VT | None: ... + @overload + def get(self, key: _KT, default: _VT, /) -> _VT: ... + @overload + def get(self, key: _KT, default: _T, /) -> _VT | _T: ... + @overload + def pop(self, key: _KT, /) -> _VT: ... + @overload + def pop(self, key: _KT, default: _VT, /) -> _VT: ... + @overload + def pop(self, key: _KT, default: _T, /) -> _VT | _T: ... + def keys(self) -> list[_KT]: ... # type: ignore[override] + def items(self) -> list[tuple[_KT, _VT]]: ... # type: ignore[override] + def values(self) -> list[_VT]: ... # type: ignore[override] + + class DictProxy(_BaseDictProxy[_KT, _VT]): + def __class_getitem__(cls, args: Any, /) -> GenericAlias: ... + +else: + class DictProxy(BaseProxy, MutableMapping[_KT, _VT]): + __builtins__: ClassVar[dict[str, Any]] + def __len__(self) -> int: ... + def __getitem__(self, key: _KT, /) -> _VT: ... + def __setitem__(self, key: _KT, value: _VT, /) -> None: ... + def __delitem__(self, key: _KT, /) -> None: ... + def __iter__(self) -> Iterator[_KT]: ... + def copy(self) -> dict[_KT, _VT]: ... + @overload # type: ignore[override] + def get(self, key: _KT, /) -> _VT | None: ... + @overload + def get(self, key: _KT, default: _VT, /) -> _VT: ... + @overload + def get(self, key: _KT, default: _T, /) -> _VT | _T: ... + @overload + def pop(self, key: _KT, /) -> _VT: ... + @overload + def pop(self, key: _KT, default: _VT, /) -> _VT: ... + @overload + def pop(self, key: _KT, default: _T, /) -> _VT | _T: ... + def keys(self) -> list[_KT]: ... # type: ignore[override] + def items(self) -> list[tuple[_KT, _VT]]: ... # type: ignore[override] + def values(self) -> list[_VT]: ... # type: ignore[override] + +if sys.version_info >= (3, 14): + class _BaseSetProxy(BaseProxy, MutableSet[_T]): + __builtins__: ClassVar[dict[str, Any]] + # Copied from builtins.set + def add(self, element: _T, /) -> None: ... + def copy(self) -> set[_T]: ... + def clear(self) -> None: ... + def difference(self, *s: Iterable[Any]) -> set[_T]: ... + def difference_update(self, *s: Iterable[Any]) -> None: ... + def discard(self, element: _T, /) -> None: ... + def intersection(self, *s: Iterable[Any]) -> set[_T]: ... + def intersection_update(self, *s: Iterable[Any]) -> None: ... + def isdisjoint(self, s: Iterable[Any], /) -> bool: ... + def issubset(self, s: Iterable[Any], /) -> bool: ... + def issuperset(self, s: Iterable[Any], /) -> bool: ... + def pop(self) -> _T: ... + def remove(self, element: _T, /) -> None: ... + def symmetric_difference(self, s: Iterable[_T], /) -> set[_T]: ... + def symmetric_difference_update(self, s: Iterable[_T], /) -> None: ... + def union(self, *s: Iterable[_S]) -> set[_T | _S]: ... + def update(self, *s: Iterable[_T]) -> None: ... + def __len__(self) -> int: ... + def __contains__(self, o: object, /) -> bool: ... + def __iter__(self) -> Iterator[_T]: ... + def __and__(self, value: AbstractSet[object], /) -> set[_T]: ... + def __iand__(self, value: AbstractSet[object], /) -> Self: ... + def __or__(self, value: AbstractSet[_S], /) -> set[_T | _S]: ... + def __ior__(self, value: AbstractSet[_T], /) -> Self: ... # type: ignore[override,misc] + def __sub__(self, value: AbstractSet[_T | None], /) -> set[_T]: ... + def __isub__(self, value: AbstractSet[object], /) -> Self: ... + def __xor__(self, value: AbstractSet[_S], /) -> set[_T | _S]: ... + def __ixor__(self, value: AbstractSet[_T], /) -> Self: ... # type: ignore[override,misc] + def __le__(self, value: AbstractSet[object], /) -> bool: ... + def __lt__(self, value: AbstractSet[object], /) -> bool: ... + def __ge__(self, value: AbstractSet[object], /) -> bool: ... + def __gt__(self, value: AbstractSet[object], /) -> bool: ... + def __eq__(self, value: object, /) -> bool: ... + def __rand__(self, value: AbstractSet[object], /) -> set[_T]: ... + def __ror__(self, value: AbstractSet[_S], /) -> set[_T | _S]: ... # type: ignore[misc] + def __rsub__(self, value: AbstractSet[_T], /) -> set[_T]: ... + def __rxor__(self, value: AbstractSet[_S], /) -> set[_T | _S]: ... # type: ignore[misc] + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + + class SetProxy(_BaseSetProxy[_T]): ... + +class BaseListProxy(BaseProxy, MutableSequence[_T]): + __builtins__: ClassVar[dict[str, Any]] + def __len__(self) -> int: ... + def __add__(self, x: list[_T], /) -> list[_T]: ... + def __delitem__(self, i: SupportsIndex | slice, /) -> None: ... + @overload + def __getitem__(self, i: SupportsIndex, /) -> _T: ... + @overload + def __getitem__(self, s: slice, /) -> list[_T]: ... + @overload + def __setitem__(self, i: SupportsIndex, o: _T, /) -> None: ... + @overload + def __setitem__(self, s: slice, o: Iterable[_T], /) -> None: ... + def __mul__(self, n: SupportsIndex, /) -> list[_T]: ... + def __rmul__(self, n: SupportsIndex, /) -> list[_T]: ... + def __imul__(self, value: SupportsIndex, /) -> Self: ... + def __reversed__(self) -> Iterator[_T]: ... + def append(self, object: _T, /) -> None: ... + def extend(self, iterable: Iterable[_T], /) -> None: ... + def pop(self, index: SupportsIndex = ..., /) -> _T: ... + def index(self, value: _T, start: SupportsIndex = ..., stop: SupportsIndex = ..., /) -> int: ... + def count(self, value: _T, /) -> int: ... + def insert(self, index: SupportsIndex, object: _T, /) -> None: ... + def remove(self, value: _T, /) -> None: ... + # Use BaseListProxy[SupportsRichComparisonT] for the first overload rather than [SupportsRichComparison] + # to work around invariance + @overload + def sort(self: BaseListProxy[SupportsRichComparisonT], *, key: None = None, reverse: bool = ...) -> None: ... + @overload + def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = ...) -> None: ... + +class ListProxy(BaseListProxy[_T]): + def __iadd__(self, value: Iterable[_T], /) -> Self: ... # type: ignore[override] + def __imul__(self, value: SupportsIndex, /) -> Self: ... # type: ignore[override] + if sys.version_info >= (3, 13): + def __class_getitem__(cls, args: Any, /) -> Any: ... + +# Send is (kind, result) +# Receive is (id, methodname, args, kwds) +_ServerConnection: TypeAlias = Connection[tuple[str, Any], tuple[str, str, Iterable[Any], Mapping[str, Any]]] + +# Returned by BaseManager.get_server() +class Server: + address: _Address | None + id_to_obj: dict[str, tuple[Any, set[str], dict[str, str]]] + fallback_mapping: dict[str, Callable[[_ServerConnection, str, Any], Any]] + public: list[str] + # Registry values are (callable, exposed, method_to_typeid, proxytype) + def __init__( + self, + registry: dict[str, tuple[Callable[..., Any], Iterable[str], dict[str, str], Any]], + address: _Address | None, + authkey: bytes, + serializer: str, + ) -> None: ... + def serve_forever(self) -> None: ... + def accepter(self) -> None: ... + if sys.version_info >= (3, 10): + def handle_request(self, conn: _ServerConnection) -> None: ... + else: + def handle_request(self, c: _ServerConnection) -> None: ... + + def serve_client(self, conn: _ServerConnection) -> None: ... + def fallback_getvalue(self, conn: _ServerConnection, ident: str, obj: _T) -> _T: ... + def fallback_str(self, conn: _ServerConnection, ident: str, obj: Any) -> str: ... + def fallback_repr(self, conn: _ServerConnection, ident: str, obj: Any) -> str: ... + def dummy(self, c: _ServerConnection) -> None: ... + def debug_info(self, c: _ServerConnection) -> str: ... + def number_of_objects(self, c: _ServerConnection) -> int: ... + def shutdown(self, c: _ServerConnection) -> None: ... + def create(self, c: _ServerConnection, typeid: str, /, *args: Any, **kwds: Any) -> tuple[str, tuple[str, ...]]: ... + def get_methods(self, c: _ServerConnection, token: Token) -> set[str]: ... + def accept_connection(self, c: _ServerConnection, name: str) -> None: ... + def incref(self, c: _ServerConnection, ident: str) -> None: ... + def decref(self, c: _ServerConnection, ident: str) -> None: ... + +class BaseManager: + if sys.version_info >= (3, 11): + def __init__( + self, + address: _Address | None = None, + authkey: bytes | None = None, + serializer: str = "pickle", + ctx: BaseContext | None = None, + *, + shutdown_timeout: float = 1.0, + ) -> None: ... + else: + def __init__( + self, + address: _Address | None = None, + authkey: bytes | None = None, + serializer: str = "pickle", + ctx: BaseContext | None = None, + ) -> None: ... + + def get_server(self) -> Server: ... + def connect(self) -> None: ... + def start(self, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ()) -> None: ... + shutdown: _Finalize # only available after start() was called + def join(self, timeout: float | None = None) -> None: ... # undocumented + @property + def address(self) -> _Address | None: ... + @classmethod + def register( + cls, + typeid: str, + callable: Callable[..., object] | None = None, + proxytype: Any = None, + exposed: Sequence[str] | None = None, + method_to_typeid: Mapping[str, str] | None = None, + create_method: bool = True, + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + +class SyncManager(BaseManager): + def Barrier( + self, parties: int, action: Callable[[], None] | None = None, timeout: float | None = None + ) -> threading.Barrier: ... + def BoundedSemaphore(self, value: int = 1) -> threading.BoundedSemaphore: ... + def Condition(self, lock: threading.Lock | threading._RLock | None = None) -> threading.Condition: ... + def Event(self) -> threading.Event: ... + def Lock(self) -> threading.Lock: ... + def Namespace(self) -> _Namespace: ... + def Pool( + self, + processes: int | None = None, + initializer: Callable[..., object] | None = None, + initargs: Iterable[Any] = (), + maxtasksperchild: int | None = None, + context: Any | None = None, + ) -> pool.Pool: ... + def Queue(self, maxsize: int = ...) -> queue.Queue[Any]: ... + def JoinableQueue(self, maxsize: int = ...) -> queue.Queue[Any]: ... + def RLock(self) -> threading.RLock: ... + def Semaphore(self, value: int = 1) -> threading.Semaphore: ... + def Array(self, typecode: Any, sequence: Sequence[_T]) -> Sequence[_T]: ... + def Value(self, typecode: Any, value: _T) -> ValueProxy[_T]: ... + # Overloads are copied from builtins.dict.__init__ + @overload + def dict(self) -> DictProxy[Any, Any]: ... + @overload + def dict(self, **kwargs: _VT) -> DictProxy[str, _VT]: ... + @overload + def dict(self, map: SupportsKeysAndGetItem[_KT, _VT], /) -> DictProxy[_KT, _VT]: ... + @overload + def dict(self, map: SupportsKeysAndGetItem[str, _VT], /, **kwargs: _VT) -> DictProxy[str, _VT]: ... + @overload + def dict(self, iterable: Iterable[tuple[_KT, _VT]], /) -> DictProxy[_KT, _VT]: ... + @overload + def dict(self, iterable: Iterable[tuple[str, _VT]], /, **kwargs: _VT) -> DictProxy[str, _VT]: ... + @overload + def dict(self, iterable: Iterable[list[str]], /) -> DictProxy[str, str]: ... + @overload + def dict(self, iterable: Iterable[list[bytes]], /) -> DictProxy[bytes, bytes]: ... + @overload + def list(self, sequence: Sequence[_T], /) -> ListProxy[_T]: ... + @overload + def list(self) -> ListProxy[Any]: ... + if sys.version_info >= (3, 14): + @overload + def set(self, iterable: Iterable[_T], /) -> SetProxy[_T]: ... + @overload + def set(self) -> SetProxy[Any]: ... + +class RemoteError(Exception): ... + +class SharedMemoryServer(Server): + def track_segment(self, c: _ServerConnection, segment_name: str) -> None: ... + def release_segment(self, c: _ServerConnection, segment_name: str) -> None: ... + def list_segments(self, c: _ServerConnection) -> list[str]: ... + +class SharedMemoryManager(BaseManager): + def get_server(self) -> SharedMemoryServer: ... + def SharedMemory(self, size: int) -> _SharedMemory: ... + def ShareableList(self, sequence: Iterable[_SLT] | None) -> _ShareableList[_SLT]: ... + def __del__(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/pool.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/pool.pyi new file mode 100644 index 0000000..f276372 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/pool.pyi @@ -0,0 +1,101 @@ +from collections.abc import Callable, Iterable, Iterator, Mapping +from multiprocessing.context import DefaultContext, Process +from types import GenericAlias, TracebackType +from typing import Any, Final, Generic, TypeVar +from typing_extensions import Self + +__all__ = ["Pool", "ThreadPool"] + +_S = TypeVar("_S") +_T = TypeVar("_T") + +class ApplyResult(Generic[_T]): + def __init__( + self, pool: Pool, callback: Callable[[_T], object] | None, error_callback: Callable[[BaseException], object] | None + ) -> None: ... + def get(self, timeout: float | None = None) -> _T: ... + def wait(self, timeout: float | None = None) -> None: ... + def ready(self) -> bool: ... + def successful(self) -> bool: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +# alias created during issue #17805 +AsyncResult = ApplyResult + +class MapResult(ApplyResult[list[_T]]): + def __init__( + self, + pool: Pool, + chunksize: int, + length: int, + callback: Callable[[list[_T]], object] | None, + error_callback: Callable[[BaseException], object] | None, + ) -> None: ... + +class IMapIterator(Iterator[_T]): + def __init__(self, pool: Pool) -> None: ... + def __iter__(self) -> Self: ... + def next(self, timeout: float | None = None) -> _T: ... + def __next__(self, timeout: float | None = None) -> _T: ... + +class IMapUnorderedIterator(IMapIterator[_T]): ... + +class Pool: + def __init__( + self, + processes: int | None = None, + initializer: Callable[..., object] | None = None, + initargs: Iterable[Any] = (), + maxtasksperchild: int | None = None, + context: Any | None = None, + ) -> None: ... + @staticmethod + def Process(ctx: DefaultContext, *args: Any, **kwds: Any) -> Process: ... + def apply(self, func: Callable[..., _T], args: Iterable[Any] = (), kwds: Mapping[str, Any] = {}) -> _T: ... + def apply_async( + self, + func: Callable[..., _T], + args: Iterable[Any] = (), + kwds: Mapping[str, Any] = {}, + callback: Callable[[_T], object] | None = None, + error_callback: Callable[[BaseException], object] | None = None, + ) -> AsyncResult[_T]: ... + def map(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = None) -> list[_T]: ... + def map_async( + self, + func: Callable[[_S], _T], + iterable: Iterable[_S], + chunksize: int | None = None, + callback: Callable[[list[_T]], object] | None = None, + error_callback: Callable[[BaseException], object] | None = None, + ) -> MapResult[_T]: ... + def imap(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = 1) -> IMapIterator[_T]: ... + def imap_unordered(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = 1) -> IMapIterator[_T]: ... + def starmap(self, func: Callable[..., _T], iterable: Iterable[Iterable[Any]], chunksize: int | None = None) -> list[_T]: ... + def starmap_async( + self, + func: Callable[..., _T], + iterable: Iterable[Iterable[Any]], + chunksize: int | None = None, + callback: Callable[[list[_T]], object] | None = None, + error_callback: Callable[[BaseException], object] | None = None, + ) -> AsyncResult[list[_T]]: ... + def close(self) -> None: ... + def terminate(self) -> None: ... + def join(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def __del__(self) -> None: ... + +class ThreadPool(Pool): + def __init__( + self, processes: int | None = None, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = () + ) -> None: ... + +# undocumented +INIT: Final = "INIT" +RUN: Final = "RUN" +CLOSE: Final = "CLOSE" +TERMINATE: Final = "TERMINATE" diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/popen_fork.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/popen_fork.pyi new file mode 100644 index 0000000..5e53b05 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/popen_fork.pyi @@ -0,0 +1,26 @@ +import sys +from typing import ClassVar + +from .process import BaseProcess +from .util import Finalize + +if sys.platform != "win32": + __all__ = ["Popen"] + + class Popen: + finalizer: Finalize | None + method: ClassVar[str] + pid: int + returncode: int | None + sentinel: int # doesn't exist if os.fork in _launch returns 0 + + def __init__(self, process_obj: BaseProcess) -> None: ... + def duplicate_for_child(self, fd: int) -> int: ... + def poll(self, flag: int = 1) -> int | None: ... + def wait(self, timeout: float | None = None) -> int | None: ... + if sys.version_info >= (3, 14): + def interrupt(self) -> None: ... + + def terminate(self) -> None: ... + def kill(self) -> None: ... + def close(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/popen_forkserver.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/popen_forkserver.pyi new file mode 100644 index 0000000..f7d53bb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/popen_forkserver.pyi @@ -0,0 +1,16 @@ +import sys +from typing import ClassVar + +from . import popen_fork +from .util import Finalize + +if sys.platform != "win32": + __all__ = ["Popen"] + + class _DupFd: + def __init__(self, ind: int) -> None: ... + def detach(self) -> int: ... + + class Popen(popen_fork.Popen): + DupFd: ClassVar[type[_DupFd]] + finalizer: Finalize diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/popen_spawn_posix.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/popen_spawn_posix.pyi new file mode 100644 index 0000000..7e81d39 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/popen_spawn_posix.pyi @@ -0,0 +1,20 @@ +import sys +from typing import ClassVar + +from . import popen_fork +from .util import Finalize + +if sys.platform != "win32": + __all__ = ["Popen"] + + class _DupFd: + fd: int + + def __init__(self, fd: int) -> None: ... + def detach(self) -> int: ... + + class Popen(popen_fork.Popen): + DupFd: ClassVar[type[_DupFd]] + finalizer: Finalize + pid: int # may not exist if _launch raises in second try / except + sentinel: int # may not exist if _launch raises in second try / except diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi new file mode 100644 index 0000000..481b9ee --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi @@ -0,0 +1,30 @@ +import sys +from multiprocessing.process import BaseProcess +from typing import ClassVar, Final + +from .util import Finalize + +if sys.platform == "win32": + __all__ = ["Popen"] + + TERMINATE: Final[int] + WINEXE: Final[bool] + WINSERVICE: Final[bool] + WINENV: Final[bool] + + class Popen: + finalizer: Finalize + method: ClassVar[str] + pid: int + returncode: int | None + sentinel: int + + def __init__(self, process_obj: BaseProcess) -> None: ... + def duplicate_for_child(self, handle: int) -> int: ... + def wait(self, timeout: float | None = None) -> int | None: ... + def poll(self) -> int | None: ... + def terminate(self) -> None: ... + + kill = terminate + + def close(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/process.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/process.pyi new file mode 100644 index 0000000..4d129b2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/process.pyi @@ -0,0 +1,39 @@ +from collections.abc import Callable, Iterable, Mapping +from typing import Any + +__all__ = ["BaseProcess", "current_process", "active_children", "parent_process"] + +class BaseProcess: + name: str + daemon: bool + authkey: bytes + _identity: tuple[int, ...] # undocumented + def __init__( + self, + group: None = None, + target: Callable[..., object] | None = None, + name: str | None = None, + args: Iterable[Any] = (), + kwargs: Mapping[str, Any] = {}, + *, + daemon: bool | None = None, + ) -> None: ... + def run(self) -> None: ... + def start(self) -> None: ... + def terminate(self) -> None: ... + def kill(self) -> None: ... + def close(self) -> None: ... + def join(self, timeout: float | None = None) -> None: ... + def is_alive(self) -> bool: ... + @property + def exitcode(self) -> int | None: ... + @property + def ident(self) -> int | None: ... + @property + def pid(self) -> int | None: ... + @property + def sentinel(self) -> int: ... + +def current_process() -> BaseProcess: ... +def active_children() -> list[BaseProcess]: ... +def parent_process() -> BaseProcess | None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/queues.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/queues.pyi new file mode 100644 index 0000000..a6b00d7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/queues.pyi @@ -0,0 +1,36 @@ +import sys +from types import GenericAlias +from typing import Any, Generic, TypeVar + +__all__ = ["Queue", "SimpleQueue", "JoinableQueue"] + +_T = TypeVar("_T") + +class Queue(Generic[_T]): + # FIXME: `ctx` is a circular dependency and it's not actually optional. + # It's marked as such to be able to use the generic Queue in __init__.pyi. + def __init__(self, maxsize: int = 0, *, ctx: Any = ...) -> None: ... + def put(self, obj: _T, block: bool = True, timeout: float | None = None) -> None: ... + def get(self, block: bool = True, timeout: float | None = None) -> _T: ... + def qsize(self) -> int: ... + def empty(self) -> bool: ... + def full(self) -> bool: ... + def get_nowait(self) -> _T: ... + def put_nowait(self, obj: _T) -> None: ... + def close(self) -> None: ... + def join_thread(self) -> None: ... + def cancel_join_thread(self) -> None: ... + if sys.version_info >= (3, 12): + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +class JoinableQueue(Queue[_T]): + def task_done(self) -> None: ... + def join(self) -> None: ... + +class SimpleQueue(Generic[_T]): + def __init__(self, *, ctx: Any = ...) -> None: ... + def close(self) -> None: ... + def empty(self) -> bool: ... + def get(self) -> _T: ... + def put(self, obj: _T) -> None: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/reduction.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/reduction.pyi new file mode 100644 index 0000000..490ae19 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/reduction.pyi @@ -0,0 +1,88 @@ +import pickle +import sys +from _pickle import _ReducedType +from _typeshed import HasFileno, SupportsWrite, Unused +from abc import ABCMeta +from builtins import type as Type # alias to avoid name clash +from collections.abc import Callable +from copyreg import _DispatchTableType +from multiprocessing import connection +from socket import socket +from typing import Any, Final + +if sys.platform == "win32": + __all__ = ["send_handle", "recv_handle", "ForkingPickler", "register", "dump", "DupHandle", "duplicate", "steal_handle"] +else: + __all__ = ["send_handle", "recv_handle", "ForkingPickler", "register", "dump", "DupFd", "sendfds", "recvfds"] + +HAVE_SEND_HANDLE: Final[bool] + +class ForkingPickler(pickle.Pickler): + dispatch_table: _DispatchTableType + def __init__(self, file: SupportsWrite[bytes], protocol: int | None = ...) -> None: ... + @classmethod + def register(cls, type: Type, reduce: Callable[[Any], _ReducedType]) -> None: ... + @classmethod + def dumps(cls, obj: Any, protocol: int | None = None) -> memoryview: ... + loads = pickle.loads + +register = ForkingPickler.register + +def dump(obj: Any, file: SupportsWrite[bytes], protocol: int | None = None) -> None: ... + +if sys.platform == "win32": + def duplicate( + handle: int, target_process: int | None = None, inheritable: bool = False, *, source_process: int | None = None + ) -> int: ... + def steal_handle(source_pid: int, handle: int) -> int: ... + def send_handle(conn: connection.PipeConnection[DupHandle, Any], handle: int, destination_pid: int) -> None: ... + def recv_handle(conn: connection.PipeConnection[Any, DupHandle]) -> int: ... + + class DupHandle: + def __init__(self, handle: int, access: int, pid: int | None = None) -> None: ... + def detach(self) -> int: ... + +else: + if sys.version_info < (3, 14): + ACKNOWLEDGE: Final[bool] + + def recvfds(sock: socket, size: int) -> list[int]: ... + def send_handle(conn: HasFileno, handle: int, destination_pid: Unused) -> None: ... + def recv_handle(conn: HasFileno) -> int: ... + def sendfds(sock: socket, fds: list[int]) -> None: ... + def DupFd(fd: int) -> Any: ... # Return type is really hard to get right + +# These aliases are to work around pyright complaints. +# Pyright doesn't like it when a class object is defined as an alias +# of a global object with the same name. +_ForkingPickler = ForkingPickler +_register = register +_dump = dump +_send_handle = send_handle +_recv_handle = recv_handle + +if sys.platform == "win32": + _steal_handle = steal_handle + _duplicate = duplicate + _DupHandle = DupHandle +else: + _sendfds = sendfds + _recvfds = recvfds + _DupFd = DupFd + +class AbstractReducer(metaclass=ABCMeta): + ForkingPickler = _ForkingPickler + register = _register + dump = _dump + send_handle = _send_handle + recv_handle = _recv_handle + if sys.platform == "win32": + steal_handle = _steal_handle + duplicate = _duplicate + DupHandle = _DupHandle + else: + sendfds = _sendfds + recvfds = _recvfds + DupFd = _DupFd + + def __init__(self, *args: Unused) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/resource_sharer.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/resource_sharer.pyi new file mode 100644 index 0000000..5fee7cf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/resource_sharer.pyi @@ -0,0 +1,20 @@ +import sys +from socket import socket + +__all__ = ["stop"] + +if sys.platform == "win32": + __all__ += ["DupSocket"] + + class DupSocket: + def __init__(self, sock: socket) -> None: ... + def detach(self) -> socket: ... + +else: + __all__ += ["DupFd"] + + class DupFd: + def __init__(self, fd: int) -> None: ... + def detach(self) -> int: ... + +def stop(timeout: float | None = None) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/resource_tracker.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/resource_tracker.pyi new file mode 100644 index 0000000..cb2f27a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/resource_tracker.pyi @@ -0,0 +1,21 @@ +import sys +from _typeshed import FileDescriptorOrPath +from collections.abc import Sized + +__all__ = ["ensure_running", "register", "unregister"] + +class ResourceTracker: + def getfd(self) -> int | None: ... + def ensure_running(self) -> None: ... + def register(self, name: Sized, rtype: str) -> None: ... + def unregister(self, name: Sized, rtype: str) -> None: ... + if sys.version_info >= (3, 12): + def __del__(self) -> None: ... + +_resource_tracker: ResourceTracker +ensure_running = _resource_tracker.ensure_running +register = _resource_tracker.register +unregister = _resource_tracker.unregister +getfd = _resource_tracker.getfd + +def main(fd: FileDescriptorOrPath) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi new file mode 100644 index 0000000..f75a372 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi @@ -0,0 +1,41 @@ +import sys +from collections.abc import Iterable +from types import GenericAlias +from typing import Any, Generic, TypeVar, overload +from typing_extensions import Self + +__all__ = ["SharedMemory", "ShareableList"] + +_SLT = TypeVar("_SLT", int, float, bool, str, bytes, None) + +class SharedMemory: + if sys.version_info >= (3, 13): + def __init__(self, name: str | None = None, create: bool = False, size: int = 0, *, track: bool = True) -> None: ... + else: + def __init__(self, name: str | None = None, create: bool = False, size: int = 0) -> None: ... + + @property + def buf(self) -> memoryview | None: ... + @property + def name(self) -> str: ... + @property + def size(self) -> int: ... + def close(self) -> None: ... + def unlink(self) -> None: ... + def __del__(self) -> None: ... + +class ShareableList(Generic[_SLT]): + shm: SharedMemory + @overload + def __init__(self, sequence: None = None, *, name: str | None = None) -> None: ... + @overload + def __init__(self, sequence: Iterable[_SLT], *, name: str | None = None) -> None: ... + def __getitem__(self, position: int) -> _SLT: ... + def __setitem__(self, position: int, value: _SLT) -> None: ... + def __reduce__(self) -> tuple[Self, tuple[_SLT, ...]]: ... + def __len__(self) -> int: ... + @property + def format(self) -> str: ... + def count(self, value: _SLT) -> int: ... + def index(self, value: _SLT) -> int: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/sharedctypes.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/sharedctypes.pyi new file mode 100644 index 0000000..e2ec15f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/sharedctypes.pyi @@ -0,0 +1,129 @@ +import ctypes +from _ctypes import _CData +from collections.abc import Callable, Iterable, Sequence +from ctypes import _SimpleCData, c_char +from multiprocessing.context import BaseContext +from multiprocessing.synchronize import _LockLike +from types import TracebackType +from typing import Any, Generic, Literal, Protocol, TypeVar, overload, type_check_only + +__all__ = ["RawValue", "RawArray", "Value", "Array", "copy", "synchronized"] + +_T = TypeVar("_T") +_CT = TypeVar("_CT", bound=_CData) + +@overload +def RawValue(typecode_or_type: type[_CT], *args: Any) -> _CT: ... +@overload +def RawValue(typecode_or_type: str, *args: Any) -> Any: ... +@overload +def RawArray(typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any]) -> ctypes.Array[_CT]: ... +@overload +def RawArray(typecode_or_type: str, size_or_initializer: int | Sequence[Any]) -> Any: ... +@overload +def Value(typecode_or_type: type[_CT], *args: Any, lock: Literal[False], ctx: BaseContext | None = None) -> _CT: ... +@overload +def Value( + typecode_or_type: type[_CT], *args: Any, lock: Literal[True] | _LockLike = True, ctx: BaseContext | None = None +) -> SynchronizedBase[_CT]: ... +@overload +def Value( + typecode_or_type: str, *args: Any, lock: Literal[True] | _LockLike = True, ctx: BaseContext | None = None +) -> SynchronizedBase[Any]: ... +@overload +def Value( + typecode_or_type: str | type[_CData], *args: Any, lock: bool | _LockLike = True, ctx: BaseContext | None = None +) -> Any: ... +@overload +def Array( + typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[False], ctx: BaseContext | None = None +) -> _CT: ... +@overload +def Array( + typecode_or_type: type[c_char], + size_or_initializer: int | Sequence[Any], + *, + lock: Literal[True] | _LockLike = True, + ctx: BaseContext | None = None, +) -> SynchronizedString: ... +@overload +def Array( + typecode_or_type: type[_SimpleCData[_T]], + size_or_initializer: int | Sequence[Any], + *, + lock: Literal[True] | _LockLike = True, + ctx: BaseContext | None = None, +) -> SynchronizedArray[_T]: ... +@overload +def Array( + typecode_or_type: str, + size_or_initializer: int | Sequence[Any], + *, + lock: Literal[True] | _LockLike = True, + ctx: BaseContext | None = None, +) -> SynchronizedArray[Any]: ... +@overload +def Array( + typecode_or_type: str | type[_CData], + size_or_initializer: int | Sequence[Any], + *, + lock: bool | _LockLike = True, + ctx: BaseContext | None = None, +) -> Any: ... +def copy(obj: _CT) -> _CT: ... +@overload +def synchronized(obj: _SimpleCData[_T], lock: _LockLike | None = None, ctx: Any | None = None) -> Synchronized[_T]: ... +@overload +def synchronized(obj: ctypes.Array[c_char], lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedString: ... +@overload +def synchronized( + obj: ctypes.Array[_SimpleCData[_T]], lock: _LockLike | None = None, ctx: Any | None = None +) -> SynchronizedArray[_T]: ... +@overload +def synchronized(obj: _CT, lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedBase[_CT]: ... +@type_check_only +class _AcquireFunc(Protocol): + def __call__(self, block: bool = ..., timeout: float | None = ..., /) -> bool: ... + +class SynchronizedBase(Generic[_CT]): + acquire: _AcquireFunc + release: Callable[[], None] + def __init__(self, obj: Any, lock: _LockLike | None = None, ctx: Any | None = None) -> None: ... + def __reduce__(self) -> tuple[Callable[[Any, _LockLike], SynchronizedBase[Any]], tuple[Any, _LockLike]]: ... + def get_obj(self) -> _CT: ... + def get_lock(self) -> _LockLike: ... + def __enter__(self) -> bool: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, / + ) -> None: ... + +class Synchronized(SynchronizedBase[_SimpleCData[_T]], Generic[_T]): + value: _T + +class SynchronizedArray(SynchronizedBase[ctypes.Array[_SimpleCData[_T]]], Generic[_T]): + def __len__(self) -> int: ... + @overload + def __getitem__(self, i: slice) -> list[_T]: ... + @overload + def __getitem__(self, i: int) -> _T: ... + @overload + def __setitem__(self, i: slice, value: Iterable[_T]) -> None: ... + @overload + def __setitem__(self, i: int, value: _T) -> None: ... + def __getslice__(self, start: int, stop: int) -> list[_T]: ... + def __setslice__(self, start: int, stop: int, values: Iterable[_T]) -> None: ... + +class SynchronizedString(SynchronizedArray[bytes]): + @overload # type: ignore[override] + def __getitem__(self, i: slice) -> bytes: ... + @overload + def __getitem__(self, i: int) -> bytes: ... + @overload # type: ignore[override] + def __setitem__(self, i: slice, value: bytes) -> None: ... + @overload + def __setitem__(self, i: int, value: bytes) -> None: ... + def __getslice__(self, start: int, stop: int) -> bytes: ... # type: ignore[override] + def __setslice__(self, start: int, stop: int, values: bytes) -> None: ... # type: ignore[override] + + value: bytes + raw: bytes diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/spawn.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/spawn.pyi new file mode 100644 index 0000000..4a97532 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/spawn.pyi @@ -0,0 +1,32 @@ +from collections.abc import Mapping, Sequence +from types import ModuleType +from typing import Any, Final + +__all__ = [ + "_main", + "freeze_support", + "set_executable", + "get_executable", + "get_preparation_data", + "get_command_line", + "import_main_path", +] + +WINEXE: Final[bool] +WINSERVICE: Final[bool] + +def set_executable(exe: str) -> None: ... +def get_executable() -> str: ... +def is_forking(argv: Sequence[str]) -> bool: ... +def freeze_support() -> None: ... +def get_command_line(**kwds: Any) -> list[str]: ... +def spawn_main(pipe_handle: int, parent_pid: int | None = None, tracker_fd: int | None = None) -> None: ... + +# undocumented +def _main(fd: int, parent_sentinel: int) -> int: ... +def get_preparation_data(name: str) -> dict[str, Any]: ... + +old_main_modules: list[ModuleType] + +def prepare(data: Mapping[str, Any]) -> None: ... +def import_main_path(main_path: str) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/synchronize.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/synchronize.pyi new file mode 100644 index 0000000..a0d97ba --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/synchronize.pyi @@ -0,0 +1,60 @@ +import threading +from collections.abc import Callable +from multiprocessing.context import BaseContext +from types import TracebackType +from typing_extensions import TypeAlias + +__all__ = ["Lock", "RLock", "Semaphore", "BoundedSemaphore", "Condition", "Event"] + +_LockLike: TypeAlias = Lock | RLock + +class Barrier(threading.Barrier): + def __init__( + self, parties: int, action: Callable[[], object] | None = None, timeout: float | None = None, *, ctx: BaseContext + ) -> None: ... + +class Condition: + def __init__(self, lock: _LockLike | None = None, *, ctx: BaseContext) -> None: ... + def notify(self, n: int = 1) -> None: ... + def notify_all(self) -> None: ... + def wait(self, timeout: float | None = None) -> bool: ... + def wait_for(self, predicate: Callable[[], bool], timeout: float | None = None) -> bool: ... + def __enter__(self) -> bool: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, / + ) -> None: ... + # These methods are copied from the lock passed to the constructor, or an + # instance of ctx.RLock() if lock was None. + def acquire(self, block: bool = True, timeout: float | None = None) -> bool: ... + def release(self) -> None: ... + +class Event: + def __init__(self, *, ctx: BaseContext) -> None: ... + def is_set(self) -> bool: ... + def set(self) -> None: ... + def clear(self) -> None: ... + def wait(self, timeout: float | None = None) -> bool: ... + +# Not part of public API +class SemLock: + def __init__(self, kind: int, value: int, maxvalue: int, *, ctx: BaseContext | None) -> None: ... + def __enter__(self) -> bool: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, / + ) -> None: ... + # These methods are copied from the wrapped _multiprocessing.SemLock object + def acquire(self, block: bool = True, timeout: float | None = None) -> bool: ... + def release(self) -> None: ... + +class Lock(SemLock): + def __init__(self, *, ctx: BaseContext) -> None: ... + +class RLock(SemLock): + def __init__(self, *, ctx: BaseContext) -> None: ... + +class Semaphore(SemLock): + def __init__(self, value: int = 1, *, ctx: BaseContext) -> None: ... + def get_value(self) -> int: ... + +class BoundedSemaphore(Semaphore): + def __init__(self, value: int = 1, *, ctx: BaseContext) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/util.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/util.pyi new file mode 100644 index 0000000..3583194 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/multiprocessing/util.pyi @@ -0,0 +1,108 @@ +import sys +import threading +from _typeshed import ConvertibleToInt, Incomplete, Unused +from collections.abc import Callable, Iterable, Mapping, MutableMapping, Sequence +from logging import Logger, _Level as _LoggingLevel +from typing import Any, Final, Generic, TypeVar, overload + +__all__ = [ + "sub_debug", + "debug", + "info", + "sub_warning", + "get_logger", + "log_to_stderr", + "get_temp_dir", + "register_after_fork", + "is_exiting", + "Finalize", + "ForkAwareThreadLock", + "ForkAwareLocal", + "close_all_fds_except", + "SUBDEBUG", + "SUBWARNING", +] + +if sys.version_info >= (3, 14): + __all__ += ["warn"] + +_T = TypeVar("_T") +_R_co = TypeVar("_R_co", default=Any, covariant=True) + +NOTSET: Final = 0 +SUBDEBUG: Final = 5 +DEBUG: Final = 10 +INFO: Final = 20 +SUBWARNING: Final = 25 +if sys.version_info >= (3, 14): + WARNING: Final = 30 + +LOGGER_NAME: Final[str] +DEFAULT_LOGGING_FORMAT: Final[str] + +def sub_debug(msg: object, *args: object) -> None: ... +def debug(msg: object, *args: object) -> None: ... +def info(msg: object, *args: object) -> None: ... + +if sys.version_info >= (3, 14): + def warn(msg: object, *args: object) -> None: ... + +def sub_warning(msg: object, *args: object) -> None: ... +def get_logger() -> Logger: ... +def log_to_stderr(level: _LoggingLevel | None = None) -> Logger: ... +def is_abstract_socket_namespace(address: str | bytes | None) -> bool: ... + +abstract_sockets_supported: Final[bool] + +def get_temp_dir() -> str: ... +def register_after_fork(obj: _T, func: Callable[[_T], object]) -> None: ... + +class Finalize(Generic[_R_co]): + # "args" and "kwargs" are passed as arguments to "callback". + @overload + def __init__( + self, + obj: None, + callback: Callable[..., _R_co], + *, + args: Sequence[Any] = (), + kwargs: Mapping[str, Any] | None = None, + exitpriority: int, + ) -> None: ... + @overload + def __init__( + self, obj: None, callback: Callable[..., _R_co], args: Sequence[Any], kwargs: Mapping[str, Any] | None, exitpriority: int + ) -> None: ... + @overload + def __init__( + self, + obj: Any, + callback: Callable[..., _R_co], + args: Sequence[Any] = (), + kwargs: Mapping[str, Any] | None = None, + exitpriority: int | None = None, + ) -> None: ... + def __call__( + self, + wr: Unused = None, + _finalizer_registry: MutableMapping[Incomplete, Incomplete] = {}, + sub_debug: Callable[..., object] = ..., + getpid: Callable[[], int] = ..., + ) -> _R_co: ... + def cancel(self) -> None: ... + def still_active(self) -> bool: ... + +def is_exiting() -> bool: ... + +class ForkAwareThreadLock: + acquire: Callable[[bool, float], bool] + release: Callable[[], None] + def __enter__(self) -> bool: ... + def __exit__(self, *args: Unused) -> None: ... + +class ForkAwareLocal(threading.local): ... + +MAXFD: Final[int] + +def close_all_fds_except(fds: Iterable[int]) -> None: ... +def spawnv_passfds(path: bytes, args: Sequence[ConvertibleToInt], passfds: Sequence[int]) -> int: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/netrc.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/netrc.pyi new file mode 100644 index 0000000..480f55a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/netrc.pyi @@ -0,0 +1,23 @@ +import sys +from _typeshed import StrOrBytesPath +from typing_extensions import TypeAlias + +__all__ = ["netrc", "NetrcParseError"] + +class NetrcParseError(Exception): + filename: str | None + lineno: int | None + msg: str + def __init__(self, msg: str, filename: StrOrBytesPath | None = None, lineno: int | None = None) -> None: ... + +# (login, account, password) tuple +if sys.version_info >= (3, 11): + _NetrcTuple: TypeAlias = tuple[str, str, str] +else: + _NetrcTuple: TypeAlias = tuple[str, str | None, str | None] + +class netrc: + hosts: dict[str, _NetrcTuple] + macros: dict[str, list[str]] + def __init__(self, file: StrOrBytesPath | None = None) -> None: ... + def authenticators(self, host: str) -> _NetrcTuple | None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/nis.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/nis.pyi new file mode 100644 index 0000000..10eef23 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/nis.pyi @@ -0,0 +1,9 @@ +import sys + +if sys.platform != "win32": + def cat(map: str, domain: str = ...) -> dict[str, str]: ... + def get_default_domain() -> str: ... + def maps(domain: str = ...) -> list[str]: ... + def match(key: str, map: str, domain: str = ...) -> str: ... + + class error(Exception): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/nntplib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/nntplib.pyi new file mode 100644 index 0000000..1fb1e79 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/nntplib.pyi @@ -0,0 +1,120 @@ +import datetime +import socket +import ssl +from _typeshed import Unused +from builtins import list as _list # conflicts with a method named "list" +from collections.abc import Iterable +from typing import IO, Any, Final, NamedTuple +from typing_extensions import Self, TypeAlias + +__all__ = [ + "NNTP", + "NNTPError", + "NNTPReplyError", + "NNTPTemporaryError", + "NNTPPermanentError", + "NNTPProtocolError", + "NNTPDataError", + "decode_header", + "NNTP_SSL", +] + +_File: TypeAlias = IO[bytes] | bytes | str | None + +class NNTPError(Exception): + response: str + +class NNTPReplyError(NNTPError): ... +class NNTPTemporaryError(NNTPError): ... +class NNTPPermanentError(NNTPError): ... +class NNTPProtocolError(NNTPError): ... +class NNTPDataError(NNTPError): ... + +NNTP_PORT: Final = 119 +NNTP_SSL_PORT: Final = 563 + +class GroupInfo(NamedTuple): + group: str + last: str + first: str + flag: str + +class ArticleInfo(NamedTuple): + number: int + message_id: str + lines: list[bytes] + +def decode_header(header_str: str) -> str: ... + +class NNTP: + encoding: str + errors: str + + host: str + port: int + sock: socket.socket + file: IO[bytes] + debugging: int + welcome: str + readermode_afterauth: bool + tls_on: bool + authenticated: bool + nntp_implementation: str + nntp_version: int + def __init__( + self, + host: str, + port: int = 119, + user: str | None = None, + password: str | None = None, + readermode: bool | None = None, + usenetrc: bool = False, + timeout: float = ..., + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + def getwelcome(self) -> str: ... + def getcapabilities(self) -> dict[str, _list[str]]: ... + def set_debuglevel(self, level: int) -> None: ... + def debug(self, level: int) -> None: ... + def capabilities(self) -> tuple[str, dict[str, _list[str]]]: ... + def newgroups(self, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: ... + def newnews(self, group: str, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: ... + def list(self, group_pattern: str | None = None, *, file: _File = None) -> tuple[str, _list[str]]: ... + def description(self, group: str) -> str: ... + def descriptions(self, group_pattern: str) -> tuple[str, dict[str, str]]: ... + def group(self, name: str) -> tuple[str, int, int, int, str]: ... + def help(self, *, file: _File = None) -> tuple[str, _list[str]]: ... + def stat(self, message_spec: Any = None) -> tuple[str, int, str]: ... + def next(self) -> tuple[str, int, str]: ... + def last(self) -> tuple[str, int, str]: ... + def head(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: ... + def body(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: ... + def article(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: ... + def slave(self) -> str: ... + def xhdr(self, hdr: str, str: Any, *, file: _File = None) -> tuple[str, _list[str]]: ... + def xover(self, start: int, end: int, *, file: _File = None) -> tuple[str, _list[tuple[int, dict[str, str]]]]: ... + def over( + self, message_spec: None | str | _list[Any] | tuple[Any, ...], *, file: _File = None + ) -> tuple[str, _list[tuple[int, dict[str, str]]]]: ... + def date(self) -> tuple[str, datetime.datetime]: ... + def post(self, data: bytes | Iterable[bytes]) -> str: ... + def ihave(self, message_id: Any, data: bytes | Iterable[bytes]) -> str: ... + def quit(self) -> str: ... + def login(self, user: str | None = None, password: str | None = None, usenetrc: bool = True) -> None: ... + def starttls(self, context: ssl.SSLContext | None = None) -> None: ... + +class NNTP_SSL(NNTP): + ssl_context: ssl.SSLContext | None + sock: ssl.SSLSocket + def __init__( + self, + host: str, + port: int = 563, + user: str | None = None, + password: str | None = None, + ssl_context: ssl.SSLContext | None = None, + readermode: bool | None = None, + usenetrc: bool = False, + timeout: float = ..., + ) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/nt.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/nt.pyi new file mode 100644 index 0000000..0c87444 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/nt.pyi @@ -0,0 +1,116 @@ +import sys + +if sys.platform == "win32": + # Actually defined here and re-exported from os at runtime, + # but this leads to less code duplication + from os import ( + F_OK as F_OK, + O_APPEND as O_APPEND, + O_BINARY as O_BINARY, + O_CREAT as O_CREAT, + O_EXCL as O_EXCL, + O_NOINHERIT as O_NOINHERIT, + O_RANDOM as O_RANDOM, + O_RDONLY as O_RDONLY, + O_RDWR as O_RDWR, + O_SEQUENTIAL as O_SEQUENTIAL, + O_SHORT_LIVED as O_SHORT_LIVED, + O_TEMPORARY as O_TEMPORARY, + O_TEXT as O_TEXT, + O_TRUNC as O_TRUNC, + O_WRONLY as O_WRONLY, + P_DETACH as P_DETACH, + P_NOWAIT as P_NOWAIT, + P_NOWAITO as P_NOWAITO, + P_OVERLAY as P_OVERLAY, + P_WAIT as P_WAIT, + R_OK as R_OK, + TMP_MAX as TMP_MAX, + W_OK as W_OK, + X_OK as X_OK, + DirEntry as DirEntry, + abort as abort, + access as access, + chdir as chdir, + chmod as chmod, + close as close, + closerange as closerange, + cpu_count as cpu_count, + device_encoding as device_encoding, + dup as dup, + dup2 as dup2, + error as error, + execv as execv, + execve as execve, + fspath as fspath, + fstat as fstat, + fsync as fsync, + ftruncate as ftruncate, + get_handle_inheritable as get_handle_inheritable, + get_inheritable as get_inheritable, + get_terminal_size as get_terminal_size, + getcwd as getcwd, + getcwdb as getcwdb, + getlogin as getlogin, + getpid as getpid, + getppid as getppid, + isatty as isatty, + kill as kill, + link as link, + listdir as listdir, + lseek as lseek, + lstat as lstat, + mkdir as mkdir, + open as open, + pipe as pipe, + putenv as putenv, + read as read, + readlink as readlink, + remove as remove, + rename as rename, + replace as replace, + rmdir as rmdir, + scandir as scandir, + set_handle_inheritable as set_handle_inheritable, + set_inheritable as set_inheritable, + spawnv as spawnv, + spawnve as spawnve, + startfile as startfile, + stat as stat, + stat_result as stat_result, + statvfs_result as statvfs_result, + strerror as strerror, + symlink as symlink, + system as system, + terminal_size as terminal_size, + times as times, + times_result as times_result, + truncate as truncate, + umask as umask, + uname_result as uname_result, + unlink as unlink, + unsetenv as unsetenv, + urandom as urandom, + utime as utime, + waitpid as waitpid, + waitstatus_to_exitcode as waitstatus_to_exitcode, + write as write, + ) + + if sys.version_info >= (3, 11): + from os import EX_OK as EX_OK + if sys.version_info >= (3, 12): + from os import ( + get_blocking as get_blocking, + listdrives as listdrives, + listmounts as listmounts, + listvolumes as listvolumes, + set_blocking as set_blocking, + ) + if sys.version_info >= (3, 13): + from os import fchmod as fchmod, lchmod as lchmod + + if sys.version_info >= (3, 14): + from os import readinto as readinto + + environ: dict[str, str] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ntpath.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ntpath.pyi new file mode 100644 index 0000000..074df07 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ntpath.pyi @@ -0,0 +1,123 @@ +import sys +from _typeshed import BytesPath, StrOrBytesPath, StrPath +from genericpath import ( + ALLOW_MISSING as ALLOW_MISSING, + _AllowMissingType, + commonprefix as commonprefix, + exists as exists, + getatime as getatime, + getctime as getctime, + getmtime as getmtime, + getsize as getsize, + isdir as isdir, + isfile as isfile, + samefile as samefile, + sameopenfile as sameopenfile, + samestat as samestat, +) +from os import PathLike + +# Re-export common definitions from posixpath to reduce duplication +from posixpath import ( + abspath as abspath, + basename as basename, + commonpath as commonpath, + curdir as curdir, + defpath as defpath, + devnull as devnull, + dirname as dirname, + expanduser as expanduser, + expandvars as expandvars, + extsep as extsep, + isabs as isabs, + islink as islink, + ismount as ismount, + lexists as lexists, + normcase as normcase, + normpath as normpath, + pardir as pardir, + pathsep as pathsep, + relpath as relpath, + sep as sep, + split as split, + splitdrive as splitdrive, + splitext as splitext, + supports_unicode_filenames as supports_unicode_filenames, +) +from typing import AnyStr, overload +from typing_extensions import LiteralString + +if sys.version_info >= (3, 12): + from posixpath import isjunction as isjunction, splitroot as splitroot +if sys.version_info >= (3, 13): + from genericpath import isdevdrive as isdevdrive + +__all__ = [ + "normcase", + "isabs", + "join", + "splitdrive", + "split", + "splitext", + "basename", + "dirname", + "commonprefix", + "getsize", + "getmtime", + "getatime", + "getctime", + "islink", + "exists", + "lexists", + "isdir", + "isfile", + "ismount", + "expanduser", + "expandvars", + "normpath", + "abspath", + "curdir", + "pardir", + "sep", + "pathsep", + "defpath", + "altsep", + "extsep", + "devnull", + "realpath", + "supports_unicode_filenames", + "relpath", + "samefile", + "sameopenfile", + "samestat", + "commonpath", + "ALLOW_MISSING", +] +if sys.version_info >= (3, 12): + __all__ += ["isjunction", "splitroot"] +if sys.version_info >= (3, 13): + __all__ += ["isdevdrive", "isreserved"] + +altsep: LiteralString + +# First parameter is not actually pos-only, +# but must be defined as pos-only in the stub or cross-platform code doesn't type-check, +# as the parameter name is different in posixpath.join() +@overload +def join(path: LiteralString, /, *paths: LiteralString) -> LiteralString: ... +@overload +def join(path: StrPath, /, *paths: StrPath) -> str: ... +@overload +def join(path: BytesPath, /, *paths: BytesPath) -> bytes: ... + +if sys.platform == "win32": + @overload + def realpath(path: PathLike[AnyStr], *, strict: bool | _AllowMissingType = False) -> AnyStr: ... + @overload + def realpath(path: AnyStr, *, strict: bool | _AllowMissingType = False) -> AnyStr: ... + +else: + realpath = abspath + +if sys.version_info >= (3, 13): + def isreserved(path: StrOrBytesPath) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/nturl2path.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/nturl2path.pyi new file mode 100644 index 0000000..014af8a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/nturl2path.pyi @@ -0,0 +1,12 @@ +import sys +from typing_extensions import deprecated + +if sys.version_info >= (3, 14): + @deprecated("The `nturl2path` module is deprecated since Python 3.14.") + def url2pathname(url: str) -> str: ... + @deprecated("The `nturl2path` module is deprecated since Python 3.14.") + def pathname2url(p: str) -> str: ... + +else: + def url2pathname(url: str) -> str: ... + def pathname2url(p: str) -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/numbers.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/numbers.pyi new file mode 100644 index 0000000..64fb165 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/numbers.pyi @@ -0,0 +1,217 @@ +# Note: these stubs are incomplete. The more complex type +# signatures are currently omitted. +# +# Use _ComplexLike, _RealLike and _IntegralLike for return types in this module +# rather than `numbers.Complex`, `numbers.Real` and `numbers.Integral`, +# to avoid an excessive number of `type: ignore`s in subclasses of these ABCs +# (since type checkers don't see `complex` as a subtype of `numbers.Complex`, +# nor `float` as a subtype of `numbers.Real`, etc.) + +from abc import ABCMeta, abstractmethod +from typing import ClassVar, Literal, Protocol, overload, type_check_only + +__all__ = ["Number", "Complex", "Real", "Rational", "Integral"] + +############################ +# Protocols for return types +############################ + +# `_ComplexLike` is a structural-typing approximation +# of the `Complex` ABC, which is not (and cannot be) a protocol +# +# NOTE: We can't include `__complex__` here, +# as we want `int` to be seen as a subtype of `_ComplexLike`, +# and `int.__complex__` does not exist :( +@type_check_only +class _ComplexLike(Protocol): + def __neg__(self) -> _ComplexLike: ... + def __pos__(self) -> _ComplexLike: ... + def __abs__(self) -> _RealLike: ... + +# _RealLike is a structural-typing approximation +# of the `Real` ABC, which is not (and cannot be) a protocol +@type_check_only +class _RealLike(_ComplexLike, Protocol): + def __trunc__(self) -> _IntegralLike: ... + def __floor__(self) -> _IntegralLike: ... + def __ceil__(self) -> _IntegralLike: ... + def __float__(self) -> float: ... + # Overridden from `_ComplexLike` + # for a more precise return type: + def __neg__(self) -> _RealLike: ... + def __pos__(self) -> _RealLike: ... + +# _IntegralLike is a structural-typing approximation +# of the `Integral` ABC, which is not (and cannot be) a protocol +@type_check_only +class _IntegralLike(_RealLike, Protocol): + def __invert__(self) -> _IntegralLike: ... + def __int__(self) -> int: ... + def __index__(self) -> int: ... + # Overridden from `_ComplexLike` + # for a more precise return type: + def __abs__(self) -> _IntegralLike: ... + # Overridden from `RealLike` + # for a more precise return type: + def __neg__(self) -> _IntegralLike: ... + def __pos__(self) -> _IntegralLike: ... + +################# +# Module "proper" +################# + +class Number(metaclass=ABCMeta): + __slots__ = () + @abstractmethod + def __hash__(self) -> int: ... + +# See comment at the top of the file +# for why some of these return types are purposefully vague +class Complex(Number, _ComplexLike): + __slots__ = () + @abstractmethod + def __complex__(self) -> complex: ... + def __bool__(self) -> bool: ... + @property + @abstractmethod + def real(self) -> _RealLike: ... + @property + @abstractmethod + def imag(self) -> _RealLike: ... + @abstractmethod + def __add__(self, other) -> _ComplexLike: ... + @abstractmethod + def __radd__(self, other) -> _ComplexLike: ... + @abstractmethod + def __neg__(self) -> _ComplexLike: ... + @abstractmethod + def __pos__(self) -> _ComplexLike: ... + def __sub__(self, other) -> _ComplexLike: ... + def __rsub__(self, other) -> _ComplexLike: ... + @abstractmethod + def __mul__(self, other) -> _ComplexLike: ... + @abstractmethod + def __rmul__(self, other) -> _ComplexLike: ... + @abstractmethod + def __truediv__(self, other) -> _ComplexLike: ... + @abstractmethod + def __rtruediv__(self, other) -> _ComplexLike: ... + @abstractmethod + def __pow__(self, exponent) -> _ComplexLike: ... + @abstractmethod + def __rpow__(self, base) -> _ComplexLike: ... + @abstractmethod + def __abs__(self) -> _RealLike: ... + @abstractmethod + def conjugate(self) -> _ComplexLike: ... + @abstractmethod + def __eq__(self, other: object) -> bool: ... + __hash__: ClassVar[None] # type: ignore[assignment] + +# See comment at the top of the file +# for why some of these return types are purposefully vague +class Real(Complex, _RealLike): + __slots__ = () + @abstractmethod + def __float__(self) -> float: ... + @abstractmethod + def __trunc__(self) -> _IntegralLike: ... + @abstractmethod + def __floor__(self) -> _IntegralLike: ... + @abstractmethod + def __ceil__(self) -> _IntegralLike: ... + @abstractmethod + @overload + def __round__(self, ndigits: None = None) -> _IntegralLike: ... + @abstractmethod + @overload + def __round__(self, ndigits: int) -> _RealLike: ... + def __divmod__(self, other) -> tuple[_RealLike, _RealLike]: ... + def __rdivmod__(self, other) -> tuple[_RealLike, _RealLike]: ... + @abstractmethod + def __floordiv__(self, other) -> _RealLike: ... + @abstractmethod + def __rfloordiv__(self, other) -> _RealLike: ... + @abstractmethod + def __mod__(self, other) -> _RealLike: ... + @abstractmethod + def __rmod__(self, other) -> _RealLike: ... + @abstractmethod + def __lt__(self, other) -> bool: ... + @abstractmethod + def __le__(self, other) -> bool: ... + def __complex__(self) -> complex: ... + @property + def real(self) -> _RealLike: ... + @property + def imag(self) -> Literal[0]: ... + def conjugate(self) -> _RealLike: ... + # Not actually overridden at runtime, + # but we override these in the stub to give them more precise return types: + @abstractmethod + def __pos__(self) -> _RealLike: ... + @abstractmethod + def __neg__(self) -> _RealLike: ... + +# See comment at the top of the file +# for why some of these return types are purposefully vague +class Rational(Real): + __slots__ = () + @property + @abstractmethod + def numerator(self) -> _IntegralLike: ... + @property + @abstractmethod + def denominator(self) -> _IntegralLike: ... + def __float__(self) -> float: ... + +# See comment at the top of the file +# for why some of these return types are purposefully vague +class Integral(Rational, _IntegralLike): + __slots__ = () + @abstractmethod + def __int__(self) -> int: ... + def __index__(self) -> int: ... + @abstractmethod + def __pow__(self, exponent, modulus=None) -> _IntegralLike: ... + @abstractmethod + def __lshift__(self, other) -> _IntegralLike: ... + @abstractmethod + def __rlshift__(self, other) -> _IntegralLike: ... + @abstractmethod + def __rshift__(self, other) -> _IntegralLike: ... + @abstractmethod + def __rrshift__(self, other) -> _IntegralLike: ... + @abstractmethod + def __and__(self, other) -> _IntegralLike: ... + @abstractmethod + def __rand__(self, other) -> _IntegralLike: ... + @abstractmethod + def __xor__(self, other) -> _IntegralLike: ... + @abstractmethod + def __rxor__(self, other) -> _IntegralLike: ... + @abstractmethod + def __or__(self, other) -> _IntegralLike: ... + @abstractmethod + def __ror__(self, other) -> _IntegralLike: ... + @abstractmethod + def __invert__(self) -> _IntegralLike: ... + def __float__(self) -> float: ... + @property + def numerator(self) -> _IntegralLike: ... + @property + def denominator(self) -> Literal[1]: ... + # Not actually overridden at runtime, + # but we override these in the stub to give them more precise return types: + @abstractmethod + def __pos__(self) -> _IntegralLike: ... + @abstractmethod + def __neg__(self) -> _IntegralLike: ... + @abstractmethod + def __abs__(self) -> _IntegralLike: ... + @abstractmethod + @overload + def __round__(self, ndigits: None = None) -> _IntegralLike: ... + @abstractmethod + @overload + def __round__(self, ndigits: int) -> _IntegralLike: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/opcode.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/opcode.pyi new file mode 100644 index 0000000..ed0e96e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/opcode.pyi @@ -0,0 +1,47 @@ +import sys +from typing import Final, Literal + +__all__ = [ + "cmp_op", + "hasconst", + "hasname", + "hasjrel", + "hasjabs", + "haslocal", + "hascompare", + "hasfree", + "opname", + "opmap", + "HAVE_ARGUMENT", + "EXTENDED_ARG", + "stack_effect", +] +if sys.version_info >= (3, 12): + __all__ += ["hasarg", "hasexc"] +else: + __all__ += ["hasnargs"] +if sys.version_info >= (3, 13): + __all__ += ["hasjump"] + +cmp_op: tuple[Literal["<"], Literal["<="], Literal["=="], Literal["!="], Literal[">"], Literal[">="]] +hasconst: Final[list[int]] +hasname: Final[list[int]] +hasjrel: Final[list[int]] +hasjabs: Final[list[int]] +haslocal: Final[list[int]] +hascompare: Final[list[int]] +hasfree: Final[list[int]] +if sys.version_info >= (3, 12): + hasarg: Final[list[int]] + hasexc: Final[list[int]] +else: + hasnargs: Final[list[int]] +if sys.version_info >= (3, 13): + hasjump: Final[list[int]] +opname: Final[list[str]] + +opmap: Final[dict[str, int]] +HAVE_ARGUMENT: Final = 43 +EXTENDED_ARG: Final = 69 + +def stack_effect(opcode: int, oparg: int | None = None, /, *, jump: bool | None = None) -> int: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/operator.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/operator.pyi new file mode 100644 index 0000000..2f91951 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/operator.pyi @@ -0,0 +1,217 @@ +import sys +from _operator import ( + abs as abs, + add as add, + and_ as and_, + concat as concat, + contains as contains, + countOf as countOf, + delitem as delitem, + eq as eq, + floordiv as floordiv, + ge as ge, + getitem as getitem, + gt as gt, + iadd as iadd, + iand as iand, + iconcat as iconcat, + ifloordiv as ifloordiv, + ilshift as ilshift, + imatmul as imatmul, + imod as imod, + imul as imul, + index as index, + indexOf as indexOf, + inv as inv, + invert as invert, + ior as ior, + ipow as ipow, + irshift as irshift, + is_ as is_, + is_not as is_not, + isub as isub, + itruediv as itruediv, + ixor as ixor, + le as le, + length_hint as length_hint, + lshift as lshift, + lt as lt, + matmul as matmul, + mod as mod, + mul as mul, + ne as ne, + neg as neg, + not_ as not_, + or_ as or_, + pos as pos, + pow as pow, + rshift as rshift, + setitem as setitem, + sub as sub, + truediv as truediv, + truth as truth, + xor as xor, +) +from _typeshed import SupportsGetItem +from typing import Any, Generic, TypeVar, final, overload +from typing_extensions import Self, TypeVarTuple, Unpack + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_Ts = TypeVarTuple("_Ts") + +__all__ = [ + "abs", + "add", + "and_", + "attrgetter", + "concat", + "contains", + "countOf", + "delitem", + "eq", + "floordiv", + "ge", + "getitem", + "gt", + "iadd", + "iand", + "iconcat", + "ifloordiv", + "ilshift", + "imatmul", + "imod", + "imul", + "index", + "indexOf", + "inv", + "invert", + "ior", + "ipow", + "irshift", + "is_", + "is_not", + "isub", + "itemgetter", + "itruediv", + "ixor", + "le", + "length_hint", + "lshift", + "lt", + "matmul", + "methodcaller", + "mod", + "mul", + "ne", + "neg", + "not_", + "or_", + "pos", + "pow", + "rshift", + "setitem", + "sub", + "truediv", + "truth", + "xor", +] + +if sys.version_info >= (3, 11): + from _operator import call as call + + __all__ += ["call"] + +if sys.version_info >= (3, 14): + from _operator import is_none as is_none, is_not_none as is_not_none + + __all__ += ["is_none", "is_not_none"] + +__lt__ = lt +__le__ = le +__eq__ = eq +__ne__ = ne +__ge__ = ge +__gt__ = gt +__not__ = not_ +__abs__ = abs +__add__ = add +__and__ = and_ +__floordiv__ = floordiv +__index__ = index +__inv__ = inv +__invert__ = invert +__lshift__ = lshift +__mod__ = mod +__mul__ = mul +__matmul__ = matmul +__neg__ = neg +__or__ = or_ +__pos__ = pos +__pow__ = pow +__rshift__ = rshift +__sub__ = sub +__truediv__ = truediv +__xor__ = xor +__concat__ = concat +__contains__ = contains +__delitem__ = delitem +__getitem__ = getitem +__setitem__ = setitem +__iadd__ = iadd +__iand__ = iand +__iconcat__ = iconcat +__ifloordiv__ = ifloordiv +__ilshift__ = ilshift +__imod__ = imod +__imul__ = imul +__imatmul__ = imatmul +__ior__ = ior +__ipow__ = ipow +__irshift__ = irshift +__isub__ = isub +__itruediv__ = itruediv +__ixor__ = ixor +if sys.version_info >= (3, 11): + __call__ = call + +# At runtime, these classes are implemented in C as part of the _operator module +# However, they consider themselves to live in the operator module, so we'll put +# them here. +@final +class attrgetter(Generic[_T_co]): + @overload + def __new__(cls, attr: str, /) -> attrgetter[Any]: ... + @overload + def __new__(cls, attr: str, attr2: str, /) -> attrgetter[tuple[Any, Any]]: ... + @overload + def __new__(cls, attr: str, attr2: str, attr3: str, /) -> attrgetter[tuple[Any, Any, Any]]: ... + @overload + def __new__(cls, attr: str, attr2: str, attr3: str, attr4: str, /) -> attrgetter[tuple[Any, Any, Any, Any]]: ... + @overload + def __new__(cls, attr: str, /, *attrs: str) -> attrgetter[tuple[Any, ...]]: ... + def __call__(self, obj: Any, /) -> _T_co: ... + +@final +class itemgetter(Generic[_T_co]): + @overload + def __new__(cls, item: _T, /) -> itemgetter[_T]: ... + @overload + def __new__(cls, item1: _T1, item2: _T2, /, *items: Unpack[_Ts]) -> itemgetter[tuple[_T1, _T2, Unpack[_Ts]]]: ... + # __key: _KT_contra in SupportsGetItem seems to be causing variance issues, ie: + # TypeVar "_KT_contra@SupportsGetItem" is contravariant + # "tuple[int, int]" is incompatible with protocol "SupportsIndex" + # preventing [_T_co, ...] instead of [Any, ...] + # + # If we can't infer a literal key from __new__ (ie: `itemgetter[Literal[0]]` for `itemgetter(0)`), + # then we can't annotate __call__'s return type or it'll break on tuples + # + # These issues are best demonstrated by the `itertools.check_itertools_recipes.unique_justseen` test. + def __call__(self, obj: SupportsGetItem[Any, Any]) -> Any: ... + +@final +class methodcaller: + def __new__(cls, name: str, /, *args: Any, **kwargs: Any) -> Self: ... + def __call__(self, obj: Any) -> Any: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/optparse.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/optparse.pyi new file mode 100644 index 0000000..c522917 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/optparse.pyi @@ -0,0 +1,308 @@ +import builtins +from _typeshed import MaybeNone, SupportsWrite +from abc import abstractmethod +from collections.abc import Callable, Iterable, Mapping, Sequence +from typing import Any, ClassVar, Final, Literal, NoReturn, overload +from typing_extensions import Self + +__all__ = [ + "Option", + "make_option", + "SUPPRESS_HELP", + "SUPPRESS_USAGE", + "Values", + "OptionContainer", + "OptionGroup", + "OptionParser", + "HelpFormatter", + "IndentedHelpFormatter", + "TitledHelpFormatter", + "OptParseError", + "OptionError", + "OptionConflictError", + "OptionValueError", + "BadOptionError", + "check_choice", +] +NO_DEFAULT: Final = ("NO", "DEFAULT") +SUPPRESS_HELP: Final = "SUPPRESSHELP" +SUPPRESS_USAGE: Final = "SUPPRESSUSAGE" + +# Can return complex, float, or int depending on the option's type +def check_builtin(option: Option, opt: str, value: str) -> complex: ... +def check_choice(option: Option, opt: str, value: str) -> str: ... + +class OptParseError(Exception): + msg: str + def __init__(self, msg: str) -> None: ... + +class BadOptionError(OptParseError): + opt_str: str + def __init__(self, opt_str: str) -> None: ... + +class AmbiguousOptionError(BadOptionError): + possibilities: Iterable[str] + def __init__(self, opt_str: str, possibilities: Sequence[str]) -> None: ... + +class OptionError(OptParseError): + option_id: str + def __init__(self, msg: str, option: Option) -> None: ... + +class OptionConflictError(OptionError): ... +class OptionValueError(OptParseError): ... + +class HelpFormatter: + NO_DEFAULT_VALUE: str + _long_opt_fmt: str + _short_opt_fmt: str + current_indent: int + default_tag: str + help_position: int + help_width: int | MaybeNone # initialized as None and computed later as int when storing option strings + indent_increment: int + level: int + max_help_position: int + option_strings: dict[Option, str] + parser: OptionParser + short_first: bool | Literal[0, 1] + width: int + def __init__( + self, indent_increment: int, max_help_position: int, width: int | None, short_first: bool | Literal[0, 1] + ) -> None: ... + def dedent(self) -> None: ... + def expand_default(self, option: Option) -> str: ... + def format_description(self, description: str | None) -> str: ... + def format_epilog(self, epilog: str | None) -> str: ... + @abstractmethod + def format_heading(self, heading: str) -> str: ... + def format_option(self, option: Option) -> str: ... + def format_option_strings(self, option: Option) -> str: ... + @abstractmethod + def format_usage(self, usage: str) -> str: ... + def indent(self) -> None: ... + def set_long_opt_delimiter(self, delim: str) -> None: ... + def set_parser(self, parser: OptionParser) -> None: ... + def set_short_opt_delimiter(self, delim: str) -> None: ... + def store_option_strings(self, parser: OptionParser) -> None: ... + +class IndentedHelpFormatter(HelpFormatter): + def __init__( + self, + indent_increment: int = 2, + max_help_position: int = 24, + width: int | None = None, + short_first: bool | Literal[0, 1] = 1, + ) -> None: ... + def format_heading(self, heading: str) -> str: ... + def format_usage(self, usage: str) -> str: ... + +class TitledHelpFormatter(HelpFormatter): + def __init__( + self, + indent_increment: int = 0, + max_help_position: int = 24, + width: int | None = None, + short_first: bool | Literal[0, 1] = 0, + ) -> None: ... + def format_heading(self, heading: str) -> str: ... + def format_usage(self, usage: str) -> str: ... + +class Option: + ACTIONS: tuple[str, ...] + ALWAYS_TYPED_ACTIONS: tuple[str, ...] + ATTRS: list[str] + CHECK_METHODS: list[Callable[[Self], object]] | None + CONST_ACTIONS: tuple[str, ...] + STORE_ACTIONS: tuple[str, ...] + TYPED_ACTIONS: tuple[str, ...] + TYPES: tuple[str, ...] + TYPE_CHECKER: dict[str, Callable[[Option, str, str], object]] + _long_opts: list[str] + _short_opts: list[str] + action: str + type: str | None + dest: str | None + default: Any # default can be "any" type + nargs: int + const: Any | None # const can be "any" type + choices: list[str] | tuple[str, ...] | None + # Callback args and kwargs cannot be expressed in Python's type system. + # Revisit if ParamSpec is ever changed to work with packed args/kwargs. + callback: Callable[..., object] | None + callback_args: tuple[Any, ...] | None + callback_kwargs: dict[str, Any] | None + help: str | None + metavar: str | None + def __init__( + self, + *opts: str | None, + # The following keywords are handled by the _set_attrs method. All default to + # `None` except for `default`, which defaults to `NO_DEFAULT`. + action: str | None = None, + type: str | builtins.type | None = None, + dest: str | None = None, + default: Any = ..., # = NO_DEFAULT + nargs: int | None = None, + const: Any | None = None, + choices: list[str] | tuple[str, ...] | None = None, + callback: Callable[..., object] | None = None, + callback_args: tuple[Any, ...] | None = None, + callback_kwargs: dict[str, Any] | None = None, + help: str | None = None, + metavar: str | None = None, + ) -> None: ... + def _check_action(self) -> None: ... + def _check_callback(self) -> None: ... + def _check_choice(self) -> None: ... + def _check_const(self) -> None: ... + def _check_dest(self) -> None: ... + def _check_nargs(self) -> None: ... + def _check_opt_strings(self, opts: Iterable[str | None]) -> list[str]: ... + def _check_type(self) -> None: ... + def _set_attrs(self, attrs: dict[str, Any]) -> None: ... # accepted attrs depend on the ATTRS attribute + def _set_opt_strings(self, opts: Iterable[str]) -> None: ... + def check_value(self, opt: str, value: str) -> Any: ... # return type cannot be known statically + def convert_value(self, opt: str, value: str | tuple[str, ...] | None) -> Any: ... # return type cannot be known statically + def get_opt_string(self) -> str: ... + def process(self, opt: str, value: str | tuple[str, ...] | None, values: Values, parser: OptionParser) -> int: ... + # value of take_action can be "any" type + def take_action(self, action: str, dest: str, opt: str, value: Any, values: Values, parser: OptionParser) -> int: ... + def takes_value(self) -> bool: ... + +make_option = Option + +class OptionContainer: + _long_opt: dict[str, Option] + _short_opt: dict[str, Option] + conflict_handler: str + defaults: dict[str, Any] # default values can be "any" type + description: str | None + option_class: type[Option] + def __init__( + self, option_class: type[Option], conflict_handler: Literal["error", "resolve"], description: str | None + ) -> None: ... + def _check_conflict(self, option: Option) -> None: ... + def _create_option_mappings(self) -> None: ... + def _share_option_mappings(self, parser: OptionParser) -> None: ... + @overload + def add_option(self, opt: Option, /) -> Option: ... + @overload + def add_option( + self, + opt_str: str, + /, + *opts: str | None, + action: str | None = None, + type: str | builtins.type | None = None, + dest: str | None = None, + default: Any = ..., # = NO_DEFAULT + nargs: int | None = None, + const: Any | None = None, + choices: list[str] | tuple[str, ...] | None = None, + callback: Callable[..., object] | None = None, + callback_args: tuple[Any, ...] | None = None, + callback_kwargs: dict[str, Any] | None = None, + help: str | None = None, + metavar: str | None = None, + **kwargs, # Allow arbitrary keyword arguments for user defined option_class + ) -> Option: ... + def add_options(self, option_list: Iterable[Option]) -> None: ... + def destroy(self) -> None: ... + def format_option_help(self, formatter: HelpFormatter) -> str: ... + def format_description(self, formatter: HelpFormatter) -> str: ... + def format_help(self, formatter: HelpFormatter) -> str: ... + def get_description(self) -> str | None: ... + def get_option(self, opt_str: str) -> Option | None: ... + def has_option(self, opt_str: str) -> bool: ... + def remove_option(self, opt_str: str) -> None: ... + def set_conflict_handler(self, handler: Literal["error", "resolve"]) -> None: ... + def set_description(self, description: str | None) -> None: ... + +class OptionGroup(OptionContainer): + option_list: list[Option] + parser: OptionParser + title: str + def __init__(self, parser: OptionParser, title: str, description: str | None = None) -> None: ... + def _create_option_list(self) -> None: ... + def set_title(self, title: str) -> None: ... + +class Values: + def __init__(self, defaults: Mapping[str, object] | None = None) -> None: ... + def _update(self, dict: Mapping[str, object], mode: Literal["careful", "loose"]) -> None: ... + def _update_careful(self, dict: Mapping[str, object]) -> None: ... + def _update_loose(self, dict: Mapping[str, object]) -> None: ... + def ensure_value(self, attr: str, value: object) -> Any: ... # return type cannot be known statically + def read_file(self, filename: str, mode: Literal["careful", "loose"] = "careful") -> None: ... + def read_module(self, modname: str, mode: Literal["careful", "loose"] = "careful") -> None: ... + __hash__: ClassVar[None] # type: ignore[assignment] + # __getattr__ doesn't exist, but anything passed as a default to __init__ + # is set on the instance. + def __getattr__(self, name: str) -> Any: ... + # TODO: mypy infers -> object for __getattr__ if __setattr__ has `value: object` + def __setattr__(self, name: str, value: Any, /) -> None: ... + def __eq__(self, other: object) -> bool: ... + +class OptionParser(OptionContainer): + allow_interspersed_args: bool + epilog: str | None + formatter: HelpFormatter + largs: list[str] | None + option_groups: list[OptionGroup] + option_list: list[Option] + process_default_values: bool + prog: str | None + rargs: list[str] | None + standard_option_list: list[Option] + usage: str | None + values: Values | None + version: str + def __init__( + self, + usage: str | None = None, + option_list: Iterable[Option] | None = None, + option_class: type[Option] = ..., + version: str | None = None, + conflict_handler: str = "error", + description: str | None = None, + formatter: HelpFormatter | None = None, + add_help_option: bool = True, + prog: str | None = None, + epilog: str | None = None, + ) -> None: ... + def _add_help_option(self) -> None: ... + def _add_version_option(self) -> None: ... + def _create_option_list(self) -> None: ... + def _get_all_options(self) -> list[Option]: ... + def _get_args(self, args: list[str] | None) -> list[str]: ... + def _init_parsing_state(self) -> None: ... + def _match_long_opt(self, opt: str) -> str: ... + def _populate_option_list(self, option_list: Iterable[Option] | None, add_help: bool = True) -> None: ... + def _process_args(self, largs: list[str], rargs: list[str], values: Values) -> None: ... + def _process_long_opt(self, rargs: list[str], values: Values) -> None: ... + def _process_short_opts(self, rargs: list[str], values: Values) -> None: ... + @overload + def add_option_group(self, opt_group: OptionGroup, /) -> OptionGroup: ... + @overload + def add_option_group(self, title: str, /, description: str | None = None) -> OptionGroup: ... + def check_values(self, values: Values, args: list[str]) -> tuple[Values, list[str]]: ... + def disable_interspersed_args(self) -> None: ... + def enable_interspersed_args(self) -> None: ... + def error(self, msg: str) -> NoReturn: ... + def exit(self, status: int = 0, msg: str | None = None) -> NoReturn: ... + def expand_prog_name(self, s: str) -> str: ... + def format_epilog(self, formatter: HelpFormatter) -> str: ... + def format_help(self, formatter: HelpFormatter | None = None) -> str: ... + def format_option_help(self, formatter: HelpFormatter | None = None) -> str: ... + def get_default_values(self) -> Values: ... + def get_option_group(self, opt_str: str) -> OptionGroup | None: ... + def get_prog_name(self) -> str: ... + def get_usage(self) -> str: ... + def get_version(self) -> str: ... + def parse_args(self, args: list[str] | None = None, values: Values | None = None) -> tuple[Values, list[str]]: ... + def print_usage(self, file: SupportsWrite[str] | None = None) -> None: ... + def print_help(self, file: SupportsWrite[str] | None = None) -> None: ... + def print_version(self, file: SupportsWrite[str] | None = None) -> None: ... + def set_default(self, dest: str, value: Any) -> None: ... # default value can be "any" type + def set_defaults(self, **kwargs: Any) -> None: ... # default values can be "any" type + def set_process_default_values(self, process: bool) -> None: ... + def set_usage(self, usage: str | None) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/os/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/os/__init__.pyi new file mode 100644 index 0000000..bb0a571 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/os/__init__.pyi @@ -0,0 +1,1671 @@ +import sys +from _typeshed import ( + AnyStr_co, + BytesPath, + FileDescriptor, + FileDescriptorLike, + FileDescriptorOrPath, + GenericPath, + OpenBinaryMode, + OpenBinaryModeReading, + OpenBinaryModeUpdating, + OpenBinaryModeWriting, + OpenTextMode, + ReadableBuffer, + StrOrBytesPath, + StrPath, + SupportsLenAndGetItem, + Unused, + WriteableBuffer, + structseq, +) +from abc import ABC, abstractmethod +from builtins import OSError +from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping, Sequence +from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper +from subprocess import Popen +from types import GenericAlias, TracebackType +from typing import ( + IO, + Any, + AnyStr, + BinaryIO, + Final, + Generic, + Literal, + NoReturn, + Protocol, + TypeVar, + final, + overload, + runtime_checkable, + type_check_only, +) +from typing_extensions import LiteralString, Self, TypeAlias, Unpack, deprecated + +from . import path as _path + +# Re-export common definitions from os.path to reduce duplication +from .path import ( + altsep as altsep, + curdir as curdir, + defpath as defpath, + devnull as devnull, + extsep as extsep, + pardir as pardir, + pathsep as pathsep, + sep as sep, +) + +__all__ = [ + "F_OK", + "O_APPEND", + "O_CREAT", + "O_EXCL", + "O_RDONLY", + "O_RDWR", + "O_TRUNC", + "O_WRONLY", + "P_NOWAIT", + "P_NOWAITO", + "P_WAIT", + "R_OK", + "SEEK_CUR", + "SEEK_END", + "SEEK_SET", + "TMP_MAX", + "W_OK", + "X_OK", + "DirEntry", + "_exit", + "abort", + "access", + "altsep", + "chdir", + "chmod", + "close", + "closerange", + "cpu_count", + "curdir", + "defpath", + "device_encoding", + "devnull", + "dup", + "dup2", + "environ", + "error", + "execl", + "execle", + "execlp", + "execlpe", + "execv", + "execve", + "execvp", + "execvpe", + "extsep", + "fdopen", + "fsdecode", + "fsencode", + "fspath", + "fstat", + "fsync", + "ftruncate", + "get_exec_path", + "get_inheritable", + "get_terminal_size", + "getcwd", + "getcwdb", + "getenv", + "getlogin", + "getpid", + "getppid", + "isatty", + "kill", + "linesep", + "link", + "listdir", + "lseek", + "lstat", + "makedirs", + "mkdir", + "name", + "open", + "pardir", + "path", + "pathsep", + "pipe", + "popen", + "putenv", + "read", + "readlink", + "remove", + "removedirs", + "rename", + "renames", + "replace", + "rmdir", + "scandir", + "sep", + "set_inheritable", + "spawnl", + "spawnle", + "spawnv", + "spawnve", + "stat", + "stat_result", + "statvfs_result", + "strerror", + "supports_bytes_environ", + "symlink", + "system", + "terminal_size", + "times", + "times_result", + "truncate", + "umask", + "uname_result", + "unlink", + "unsetenv", + "urandom", + "utime", + "waitpid", + "waitstatus_to_exitcode", + "walk", + "write", +] +if sys.version_info >= (3, 14): + __all__ += ["readinto"] +if sys.platform == "darwin" and sys.version_info >= (3, 12): + __all__ += ["PRIO_DARWIN_BG", "PRIO_DARWIN_NONUI", "PRIO_DARWIN_PROCESS", "PRIO_DARWIN_THREAD"] +if sys.platform == "darwin" and sys.version_info >= (3, 10): + __all__ += ["O_EVTONLY", "O_NOFOLLOW_ANY", "O_SYMLINK"] +if sys.platform == "linux": + __all__ += [ + "GRND_NONBLOCK", + "GRND_RANDOM", + "MFD_ALLOW_SEALING", + "MFD_CLOEXEC", + "MFD_HUGETLB", + "MFD_HUGE_16GB", + "MFD_HUGE_16MB", + "MFD_HUGE_1GB", + "MFD_HUGE_1MB", + "MFD_HUGE_256MB", + "MFD_HUGE_2GB", + "MFD_HUGE_2MB", + "MFD_HUGE_32MB", + "MFD_HUGE_512KB", + "MFD_HUGE_512MB", + "MFD_HUGE_64KB", + "MFD_HUGE_8MB", + "MFD_HUGE_MASK", + "MFD_HUGE_SHIFT", + "O_DIRECT", + "O_LARGEFILE", + "O_NOATIME", + "O_PATH", + "O_RSYNC", + "O_TMPFILE", + "P_PIDFD", + "RTLD_DEEPBIND", + "SCHED_BATCH", + "SCHED_IDLE", + "SCHED_RESET_ON_FORK", + "XATTR_CREATE", + "XATTR_REPLACE", + "XATTR_SIZE_MAX", + "copy_file_range", + "getrandom", + "getxattr", + "listxattr", + "memfd_create", + "pidfd_open", + "removexattr", + "setxattr", + ] +if sys.platform == "linux" and sys.version_info >= (3, 14): + __all__ += ["SCHED_DEADLINE", "SCHED_NORMAL"] +if sys.platform == "linux" and sys.version_info >= (3, 13): + __all__ += [ + "POSIX_SPAWN_CLOSEFROM", + "TFD_CLOEXEC", + "TFD_NONBLOCK", + "TFD_TIMER_ABSTIME", + "TFD_TIMER_CANCEL_ON_SET", + "timerfd_create", + "timerfd_gettime", + "timerfd_gettime_ns", + "timerfd_settime", + "timerfd_settime_ns", + ] +if sys.platform == "linux" and sys.version_info >= (3, 12): + __all__ += [ + "CLONE_FILES", + "CLONE_FS", + "CLONE_NEWCGROUP", + "CLONE_NEWIPC", + "CLONE_NEWNET", + "CLONE_NEWNS", + "CLONE_NEWPID", + "CLONE_NEWTIME", + "CLONE_NEWUSER", + "CLONE_NEWUTS", + "CLONE_SIGHAND", + "CLONE_SYSVSEM", + "CLONE_THREAD", + "CLONE_VM", + "setns", + "unshare", + "PIDFD_NONBLOCK", + ] +if sys.platform == "linux" and sys.version_info >= (3, 10): + __all__ += [ + "EFD_CLOEXEC", + "EFD_NONBLOCK", + "EFD_SEMAPHORE", + "RWF_APPEND", + "SPLICE_F_MORE", + "SPLICE_F_MOVE", + "SPLICE_F_NONBLOCK", + "eventfd", + "eventfd_read", + "eventfd_write", + "splice", + ] +if sys.platform == "win32": + __all__ += [ + "O_BINARY", + "O_NOINHERIT", + "O_RANDOM", + "O_SEQUENTIAL", + "O_SHORT_LIVED", + "O_TEMPORARY", + "O_TEXT", + "P_DETACH", + "P_OVERLAY", + "get_handle_inheritable", + "set_handle_inheritable", + "startfile", + ] +if sys.platform == "win32" and sys.version_info >= (3, 12): + __all__ += ["listdrives", "listmounts", "listvolumes"] +if sys.platform != "win32": + __all__ += [ + "CLD_CONTINUED", + "CLD_DUMPED", + "CLD_EXITED", + "CLD_KILLED", + "CLD_STOPPED", + "CLD_TRAPPED", + "EX_CANTCREAT", + "EX_CONFIG", + "EX_DATAERR", + "EX_IOERR", + "EX_NOHOST", + "EX_NOINPUT", + "EX_NOPERM", + "EX_NOUSER", + "EX_OSERR", + "EX_OSFILE", + "EX_PROTOCOL", + "EX_SOFTWARE", + "EX_TEMPFAIL", + "EX_UNAVAILABLE", + "EX_USAGE", + "F_LOCK", + "F_TEST", + "F_TLOCK", + "F_ULOCK", + "NGROUPS_MAX", + "O_ACCMODE", + "O_ASYNC", + "O_CLOEXEC", + "O_DIRECTORY", + "O_DSYNC", + "O_NDELAY", + "O_NOCTTY", + "O_NOFOLLOW", + "O_NONBLOCK", + "O_SYNC", + "POSIX_SPAWN_CLOSE", + "POSIX_SPAWN_DUP2", + "POSIX_SPAWN_OPEN", + "PRIO_PGRP", + "PRIO_PROCESS", + "PRIO_USER", + "P_ALL", + "P_PGID", + "P_PID", + "RTLD_GLOBAL", + "RTLD_LAZY", + "RTLD_LOCAL", + "RTLD_NODELETE", + "RTLD_NOLOAD", + "RTLD_NOW", + "SCHED_FIFO", + "SCHED_OTHER", + "SCHED_RR", + "SEEK_DATA", + "SEEK_HOLE", + "ST_NOSUID", + "ST_RDONLY", + "WCONTINUED", + "WCOREDUMP", + "WEXITED", + "WEXITSTATUS", + "WIFCONTINUED", + "WIFEXITED", + "WIFSIGNALED", + "WIFSTOPPED", + "WNOHANG", + "WNOWAIT", + "WSTOPPED", + "WSTOPSIG", + "WTERMSIG", + "WUNTRACED", + "chown", + "chroot", + "confstr", + "confstr_names", + "ctermid", + "environb", + "fchdir", + "fchown", + "fork", + "forkpty", + "fpathconf", + "fstatvfs", + "fwalk", + "getegid", + "getenvb", + "geteuid", + "getgid", + "getgrouplist", + "getgroups", + "getloadavg", + "getpgid", + "getpgrp", + "getpriority", + "getsid", + "getuid", + "initgroups", + "killpg", + "lchown", + "lockf", + "major", + "makedev", + "minor", + "mkfifo", + "mknod", + "nice", + "openpty", + "pathconf", + "pathconf_names", + "posix_spawn", + "posix_spawnp", + "pread", + "preadv", + "pwrite", + "pwritev", + "readv", + "register_at_fork", + "sched_get_priority_max", + "sched_get_priority_min", + "sched_yield", + "sendfile", + "setegid", + "seteuid", + "setgid", + "setgroups", + "setpgid", + "setpgrp", + "setpriority", + "setregid", + "setreuid", + "setsid", + "setuid", + "spawnlp", + "spawnlpe", + "spawnvp", + "spawnvpe", + "statvfs", + "sync", + "sysconf", + "sysconf_names", + "tcgetpgrp", + "tcsetpgrp", + "ttyname", + "uname", + "wait", + "wait3", + "wait4", + "writev", + ] +if sys.platform != "win32" and sys.version_info >= (3, 13): + __all__ += ["grantpt", "posix_openpt", "ptsname", "unlockpt"] +if sys.platform != "win32" and sys.version_info >= (3, 11): + __all__ += ["login_tty"] +if sys.platform != "win32" and sys.version_info >= (3, 10): + __all__ += ["O_FSYNC"] +if sys.platform != "darwin" and sys.platform != "win32": + __all__ += [ + "POSIX_FADV_DONTNEED", + "POSIX_FADV_NOREUSE", + "POSIX_FADV_NORMAL", + "POSIX_FADV_RANDOM", + "POSIX_FADV_SEQUENTIAL", + "POSIX_FADV_WILLNEED", + "RWF_DSYNC", + "RWF_HIPRI", + "RWF_NOWAIT", + "RWF_SYNC", + "ST_APPEND", + "ST_MANDLOCK", + "ST_NOATIME", + "ST_NODEV", + "ST_NODIRATIME", + "ST_NOEXEC", + "ST_RELATIME", + "ST_SYNCHRONOUS", + "ST_WRITE", + "fdatasync", + "getresgid", + "getresuid", + "pipe2", + "posix_fadvise", + "posix_fallocate", + "sched_getaffinity", + "sched_getparam", + "sched_getscheduler", + "sched_param", + "sched_rr_get_interval", + "sched_setaffinity", + "sched_setparam", + "sched_setscheduler", + "setresgid", + "setresuid", + ] +if sys.platform != "linux" and sys.platform != "win32": + __all__ += ["O_EXLOCK", "O_SHLOCK", "chflags", "lchflags"] +if sys.platform != "linux" and sys.platform != "win32" and sys.version_info >= (3, 13): + __all__ += ["O_EXEC", "O_SEARCH"] +if sys.platform != "darwin" or sys.version_info >= (3, 13): + if sys.platform != "win32": + __all__ += ["waitid", "waitid_result"] +if sys.platform != "win32" or sys.version_info >= (3, 13): + __all__ += ["fchmod"] + if sys.platform != "linux": + __all__ += ["lchmod"] +if sys.platform != "win32" or sys.version_info >= (3, 12): + __all__ += ["get_blocking", "set_blocking"] +if sys.platform != "win32" or sys.version_info >= (3, 11): + __all__ += ["EX_OK"] + +# This unnecessary alias is to work around various errors +path = _path + +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") + +# ----- os variables ----- + +error = OSError + +supports_bytes_environ: bool + +supports_dir_fd: set[Callable[..., Any]] +supports_fd: set[Callable[..., Any]] +supports_effective_ids: set[Callable[..., Any]] +supports_follow_symlinks: set[Callable[..., Any]] + +if sys.platform != "win32": + # Unix only + PRIO_PROCESS: Final[int] + PRIO_PGRP: Final[int] + PRIO_USER: Final[int] + + F_LOCK: Final[int] + F_TLOCK: Final[int] + F_ULOCK: Final[int] + F_TEST: Final[int] + + if sys.platform != "darwin": + POSIX_FADV_NORMAL: Final[int] + POSIX_FADV_SEQUENTIAL: Final[int] + POSIX_FADV_RANDOM: Final[int] + POSIX_FADV_NOREUSE: Final[int] + POSIX_FADV_WILLNEED: Final[int] + POSIX_FADV_DONTNEED: Final[int] + + if sys.platform != "linux" and sys.platform != "darwin": + # In the os-module docs, these are marked as being available + # on "Unix, not Emscripten, not WASI." + # However, in the source code, a comment indicates they're "FreeBSD constants". + # sys.platform could have one of many values on a FreeBSD Python build, + # so the sys-module docs recommend doing `if sys.platform.startswith('freebsd')` + # to detect FreeBSD builds. Unfortunately that would be too dynamic + # for type checkers, however. + SF_NODISKIO: Final[int] + SF_MNOWAIT: Final[int] + SF_SYNC: Final[int] + + if sys.version_info >= (3, 11): + SF_NOCACHE: Final[int] + + if sys.platform == "linux": + XATTR_SIZE_MAX: Final[int] + XATTR_CREATE: Final[int] + XATTR_REPLACE: Final[int] + + P_PID: Final[int] + P_PGID: Final[int] + P_ALL: Final[int] + + if sys.platform == "linux": + P_PIDFD: Final[int] + + WEXITED: Final[int] + WSTOPPED: Final[int] + WNOWAIT: Final[int] + + CLD_EXITED: Final[int] + CLD_DUMPED: Final[int] + CLD_TRAPPED: Final[int] + CLD_CONTINUED: Final[int] + CLD_KILLED: Final[int] + CLD_STOPPED: Final[int] + + SCHED_OTHER: Final[int] + SCHED_FIFO: Final[int] + SCHED_RR: Final[int] + if sys.platform != "darwin" and sys.platform != "linux": + SCHED_SPORADIC: Final[int] + +if sys.platform == "linux": + SCHED_BATCH: Final[int] + SCHED_IDLE: Final[int] + SCHED_RESET_ON_FORK: Final[int] + +if sys.version_info >= (3, 14) and sys.platform == "linux": + SCHED_DEADLINE: Final[int] + SCHED_NORMAL: Final[int] + +if sys.platform != "win32": + RTLD_LAZY: Final[int] + RTLD_NOW: Final[int] + RTLD_GLOBAL: Final[int] + RTLD_LOCAL: Final[int] + RTLD_NODELETE: Final[int] + RTLD_NOLOAD: Final[int] + +if sys.platform == "linux": + RTLD_DEEPBIND: Final[int] + GRND_NONBLOCK: Final[int] + GRND_RANDOM: Final[int] + +if sys.platform == "darwin" and sys.version_info >= (3, 12): + PRIO_DARWIN_BG: Final[int] + PRIO_DARWIN_NONUI: Final[int] + PRIO_DARWIN_PROCESS: Final[int] + PRIO_DARWIN_THREAD: Final[int] + +SEEK_SET: Final = 0 +SEEK_CUR: Final = 1 +SEEK_END: Final = 2 +if sys.platform != "win32": + SEEK_DATA: Final = 3 + SEEK_HOLE: Final = 4 + +O_RDONLY: Final[int] +O_WRONLY: Final[int] +O_RDWR: Final[int] +O_APPEND: Final[int] +O_CREAT: Final[int] +O_EXCL: Final[int] +O_TRUNC: Final[int] +if sys.platform == "win32": + O_BINARY: Final[int] + O_NOINHERIT: Final[int] + O_SHORT_LIVED: Final[int] + O_TEMPORARY: Final[int] + O_RANDOM: Final[int] + O_SEQUENTIAL: Final[int] + O_TEXT: Final[int] + +if sys.platform != "win32": + O_DSYNC: Final[int] + O_SYNC: Final[int] + O_NDELAY: Final[int] + O_NONBLOCK: Final[int] + O_NOCTTY: Final[int] + O_CLOEXEC: Final[int] + O_ASYNC: Final[int] # Gnu extension if in C library + O_DIRECTORY: Final[int] # Gnu extension if in C library + O_NOFOLLOW: Final[int] # Gnu extension if in C library + O_ACCMODE: Final[int] # TODO: when does this exist? + +if sys.platform == "linux": + O_RSYNC: Final[int] + O_DIRECT: Final[int] # Gnu extension if in C library + O_NOATIME: Final[int] # Gnu extension if in C library + O_PATH: Final[int] # Gnu extension if in C library + O_TMPFILE: Final[int] # Gnu extension if in C library + O_LARGEFILE: Final[int] # Gnu extension if in C library + +if sys.platform != "linux" and sys.platform != "win32": + O_SHLOCK: Final[int] + O_EXLOCK: Final[int] + +if sys.platform == "darwin" and sys.version_info >= (3, 10): + O_EVTONLY: Final[int] + O_NOFOLLOW_ANY: Final[int] + O_SYMLINK: Final[int] + +if sys.platform != "win32" and sys.version_info >= (3, 10): + O_FSYNC: Final[int] + +if sys.platform != "linux" and sys.platform != "win32" and sys.version_info >= (3, 13): + O_EXEC: Final[int] + O_SEARCH: Final[int] + +if sys.platform != "win32" and sys.platform != "darwin": + # posix, but apparently missing on macos + ST_APPEND: Final[int] + ST_MANDLOCK: Final[int] + ST_NOATIME: Final[int] + ST_NODEV: Final[int] + ST_NODIRATIME: Final[int] + ST_NOEXEC: Final[int] + ST_RELATIME: Final[int] + ST_SYNCHRONOUS: Final[int] + ST_WRITE: Final[int] + +if sys.platform != "win32": + NGROUPS_MAX: Final[int] + ST_NOSUID: Final[int] + ST_RDONLY: Final[int] + +linesep: Literal["\n", "\r\n"] +name: LiteralString + +F_OK: Final = 0 +R_OK: Final = 4 +W_OK: Final = 2 +X_OK: Final = 1 + +_EnvironCodeFunc: TypeAlias = Callable[[AnyStr], AnyStr] + +class _Environ(MutableMapping[AnyStr, AnyStr], Generic[AnyStr]): + encodekey: _EnvironCodeFunc[AnyStr] + decodekey: _EnvironCodeFunc[AnyStr] + encodevalue: _EnvironCodeFunc[AnyStr] + decodevalue: _EnvironCodeFunc[AnyStr] + def __init__( + self, + data: MutableMapping[AnyStr, AnyStr], + encodekey: _EnvironCodeFunc[AnyStr], + decodekey: _EnvironCodeFunc[AnyStr], + encodevalue: _EnvironCodeFunc[AnyStr], + decodevalue: _EnvironCodeFunc[AnyStr], + ) -> None: ... + def setdefault(self, key: AnyStr, value: AnyStr) -> AnyStr: ... + def copy(self) -> dict[AnyStr, AnyStr]: ... + def __delitem__(self, key: AnyStr) -> None: ... + def __getitem__(self, key: AnyStr) -> AnyStr: ... + def __setitem__(self, key: AnyStr, value: AnyStr) -> None: ... + def __iter__(self) -> Iterator[AnyStr]: ... + def __len__(self) -> int: ... + def __or__(self, other: Mapping[_T1, _T2]) -> dict[AnyStr | _T1, AnyStr | _T2]: ... + def __ror__(self, other: Mapping[_T1, _T2]) -> dict[AnyStr | _T1, AnyStr | _T2]: ... + # We use @overload instead of a Union for reasons similar to those given for + # overloading MutableMapping.update in stdlib/typing.pyi + # The type: ignore is needed due to incompatible __or__/__ior__ signatures + @overload # type: ignore[misc] + def __ior__(self, other: Mapping[AnyStr, AnyStr]) -> Self: ... + @overload + def __ior__(self, other: Iterable[tuple[AnyStr, AnyStr]]) -> Self: ... + +environ: _Environ[str] +if sys.platform != "win32": + environb: _Environ[bytes] + +if sys.version_info >= (3, 14): + def reload_environ() -> None: ... + +if sys.version_info >= (3, 11) or sys.platform != "win32": + EX_OK: Final[int] + +if sys.platform != "win32": + confstr_names: dict[str, int] + pathconf_names: dict[str, int] + sysconf_names: dict[str, int] + + EX_USAGE: Final[int] + EX_DATAERR: Final[int] + EX_NOINPUT: Final[int] + EX_NOUSER: Final[int] + EX_NOHOST: Final[int] + EX_UNAVAILABLE: Final[int] + EX_SOFTWARE: Final[int] + EX_OSERR: Final[int] + EX_OSFILE: Final[int] + EX_CANTCREAT: Final[int] + EX_IOERR: Final[int] + EX_TEMPFAIL: Final[int] + EX_PROTOCOL: Final[int] + EX_NOPERM: Final[int] + EX_CONFIG: Final[int] + +# Exists on some Unix platforms, e.g. Solaris. +if sys.platform != "win32" and sys.platform != "darwin" and sys.platform != "linux": + EX_NOTFOUND: Final[int] + +P_NOWAIT: Final[int] +P_NOWAITO: Final[int] +P_WAIT: Final[int] +if sys.platform == "win32": + P_DETACH: Final[int] + P_OVERLAY: Final[int] + +# wait()/waitpid() options +if sys.platform != "win32": + WNOHANG: Final[int] # Unix only + WCONTINUED: Final[int] # some Unix systems + WUNTRACED: Final[int] # Unix only + +TMP_MAX: Final[int] # Undocumented, but used by tempfile + +# ----- os classes (structures) ----- +@final +class stat_result(structseq[float], tuple[int, int, int, int, int, int, int, float, float, float]): + # The constructor of this class takes an iterable of variable length (though it must be at least 10). + # + # However, this class behaves like a tuple of 10 elements, + # no matter how long the iterable supplied to the constructor is. + # https://github.com/python/typeshed/pull/6560#discussion_r767162532 + # + # The 10 elements always present are st_mode, st_ino, st_dev, st_nlink, + # st_uid, st_gid, st_size, st_atime, st_mtime, st_ctime. + # + # More items may be added at the end by some implementations. + if sys.version_info >= (3, 10): + __match_args__: Final = ("st_mode", "st_ino", "st_dev", "st_nlink", "st_uid", "st_gid", "st_size") + + @property + def st_mode(self) -> int: ... # protection bits, + @property + def st_ino(self) -> int: ... # inode number, + @property + def st_dev(self) -> int: ... # device, + @property + def st_nlink(self) -> int: ... # number of hard links, + @property + def st_uid(self) -> int: ... # user id of owner, + @property + def st_gid(self) -> int: ... # group id of owner, + @property + def st_size(self) -> int: ... # size of file, in bytes, + @property + def st_atime(self) -> float: ... # time of most recent access, + @property + def st_mtime(self) -> float: ... # time of most recent content modification, + # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows) + if sys.version_info >= (3, 12) and sys.platform == "win32": + @property + @deprecated( + """\ +Use st_birthtime instead to retrieve the file creation time. \ +In the future, this property will contain the last metadata change time.""" + ) + def st_ctime(self) -> float: ... + else: + @property + def st_ctime(self) -> float: ... + + @property + def st_atime_ns(self) -> int: ... # time of most recent access, in nanoseconds + @property + def st_mtime_ns(self) -> int: ... # time of most recent content modification in nanoseconds + # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows) in nanoseconds + @property + def st_ctime_ns(self) -> int: ... + if sys.platform == "win32": + @property + def st_file_attributes(self) -> int: ... + @property + def st_reparse_tag(self) -> int: ... + if sys.version_info >= (3, 12): + @property + def st_birthtime(self) -> float: ... # time of file creation in seconds + @property + def st_birthtime_ns(self) -> int: ... # time of file creation in nanoseconds + else: + @property + def st_blocks(self) -> int: ... # number of blocks allocated for file + @property + def st_blksize(self) -> int: ... # filesystem blocksize + @property + def st_rdev(self) -> int: ... # type of device if an inode device + if sys.platform != "linux": + # These properties are available on MacOS, but not Ubuntu. + # On other Unix systems (such as FreeBSD), the following attributes may be + # available (but may be only filled out if root tries to use them): + @property + def st_gen(self) -> int: ... # file generation number + @property + def st_birthtime(self) -> float: ... # time of file creation in seconds + if sys.platform == "darwin": + @property + def st_flags(self) -> int: ... # user defined flags for file + # Attributes documented as sometimes appearing, but deliberately omitted from the stub: `st_creator`, `st_rsize`, `st_type`. + # See https://github.com/python/typeshed/pull/6560#issuecomment-991253327 + +# mypy and pyright object to this being both ABC and Protocol. +# At runtime it inherits from ABC and is not a Protocol, but it will be +# on the allowlist for use as a Protocol starting in 3.14. +@runtime_checkable +class PathLike(ABC, Protocol[AnyStr_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + __slots__ = () + @abstractmethod + def __fspath__(self) -> AnyStr_co: ... + +@overload +def listdir(path: StrPath | None = None) -> list[str]: ... +@overload +def listdir(path: BytesPath) -> list[bytes]: ... +@overload +def listdir(path: int) -> list[str]: ... +@final +class DirEntry(Generic[AnyStr]): + # This is what the scandir iterator yields + # The constructor is hidden + + @property + def name(self) -> AnyStr: ... + @property + def path(self) -> AnyStr: ... + def inode(self) -> int: ... + def is_dir(self, *, follow_symlinks: bool = True) -> bool: ... + def is_file(self, *, follow_symlinks: bool = True) -> bool: ... + def is_symlink(self) -> bool: ... + def stat(self, *, follow_symlinks: bool = True) -> stat_result: ... + def __fspath__(self) -> AnyStr: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + if sys.version_info >= (3, 12): + def is_junction(self) -> bool: ... + +@final +class statvfs_result(structseq[int], tuple[int, int, int, int, int, int, int, int, int, int, int]): + if sys.version_info >= (3, 10): + __match_args__: Final = ( + "f_bsize", + "f_frsize", + "f_blocks", + "f_bfree", + "f_bavail", + "f_files", + "f_ffree", + "f_favail", + "f_flag", + "f_namemax", + ) + + @property + def f_bsize(self) -> int: ... + @property + def f_frsize(self) -> int: ... + @property + def f_blocks(self) -> int: ... + @property + def f_bfree(self) -> int: ... + @property + def f_bavail(self) -> int: ... + @property + def f_files(self) -> int: ... + @property + def f_ffree(self) -> int: ... + @property + def f_favail(self) -> int: ... + @property + def f_flag(self) -> int: ... + @property + def f_namemax(self) -> int: ... + @property + def f_fsid(self) -> int: ... + +# ----- os function stubs ----- +def fsencode(filename: StrOrBytesPath) -> bytes: ... +def fsdecode(filename: StrOrBytesPath) -> str: ... +@overload +def fspath(path: str) -> str: ... +@overload +def fspath(path: bytes) -> bytes: ... +@overload +def fspath(path: PathLike[AnyStr]) -> AnyStr: ... +def get_exec_path(env: Mapping[str, str] | None = None) -> list[str]: ... +def getlogin() -> str: ... +def getpid() -> int: ... +def getppid() -> int: ... +def strerror(code: int, /) -> str: ... +def umask(mask: int, /) -> int: ... +@final +class uname_result(structseq[str], tuple[str, str, str, str, str]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("sysname", "nodename", "release", "version", "machine") + + @property + def sysname(self) -> str: ... + @property + def nodename(self) -> str: ... + @property + def release(self) -> str: ... + @property + def version(self) -> str: ... + @property + def machine(self) -> str: ... + +if sys.platform != "win32": + def ctermid() -> str: ... + def getegid() -> int: ... + def geteuid() -> int: ... + def getgid() -> int: ... + def getgrouplist(user: str, group: int, /) -> list[int]: ... + def getgroups() -> list[int]: ... # Unix only, behaves differently on Mac + def initgroups(username: str, gid: int, /) -> None: ... + def getpgid(pid: int) -> int: ... + def getpgrp() -> int: ... + def getpriority(which: int, who: int) -> int: ... + def setpriority(which: int, who: int, priority: int) -> None: ... + if sys.platform != "darwin": + def getresuid() -> tuple[int, int, int]: ... + def getresgid() -> tuple[int, int, int]: ... + + def getuid() -> int: ... + def setegid(egid: int, /) -> None: ... + def seteuid(euid: int, /) -> None: ... + def setgid(gid: int, /) -> None: ... + def setgroups(groups: Sequence[int], /) -> None: ... + def setpgrp() -> None: ... + def setpgid(pid: int, pgrp: int, /) -> None: ... + def setregid(rgid: int, egid: int, /) -> None: ... + if sys.platform != "darwin": + def setresgid(rgid: int, egid: int, sgid: int, /) -> None: ... + def setresuid(ruid: int, euid: int, suid: int, /) -> None: ... + + def setreuid(ruid: int, euid: int, /) -> None: ... + def getsid(pid: int, /) -> int: ... + def setsid() -> None: ... + def setuid(uid: int, /) -> None: ... + def uname() -> uname_result: ... + +@overload +def getenv(key: str) -> str | None: ... +@overload +def getenv(key: str, default: _T) -> str | _T: ... + +if sys.platform != "win32": + @overload + def getenvb(key: bytes) -> bytes | None: ... + @overload + def getenvb(key: bytes, default: _T) -> bytes | _T: ... + def putenv(name: StrOrBytesPath, value: StrOrBytesPath, /) -> None: ... + def unsetenv(name: StrOrBytesPath, /) -> None: ... + +else: + def putenv(name: str, value: str, /) -> None: ... + def unsetenv(name: str, /) -> None: ... + +_Opener: TypeAlias = Callable[[str, int], int] + +@overload +def fdopen( + fd: int, + mode: OpenTextMode = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., +) -> TextIOWrapper: ... +@overload +def fdopen( + fd: int, + mode: OpenBinaryMode, + buffering: Literal[0], + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = ..., + opener: _Opener | None = ..., +) -> FileIO: ... +@overload +def fdopen( + fd: int, + mode: OpenBinaryModeUpdating, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = ..., + opener: _Opener | None = ..., +) -> BufferedRandom: ... +@overload +def fdopen( + fd: int, + mode: OpenBinaryModeWriting, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = ..., + opener: _Opener | None = ..., +) -> BufferedWriter: ... +@overload +def fdopen( + fd: int, + mode: OpenBinaryModeReading, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = ..., + opener: _Opener | None = ..., +) -> BufferedReader: ... +@overload +def fdopen( + fd: int, + mode: OpenBinaryMode, + buffering: int = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = ..., + opener: _Opener | None = ..., +) -> BinaryIO: ... +@overload +def fdopen( + fd: int, + mode: str, + buffering: int = -1, + encoding: str | None = None, + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., +) -> IO[Any]: ... +def close(fd: int) -> None: ... +def closerange(fd_low: int, fd_high: int, /) -> None: ... +def device_encoding(fd: int) -> str | None: ... +def dup(fd: int, /) -> int: ... +def dup2(fd: int, fd2: int, inheritable: bool = True) -> int: ... +def fstat(fd: int) -> stat_result: ... +def ftruncate(fd: int, length: int, /) -> None: ... +def fsync(fd: FileDescriptorLike) -> None: ... +def isatty(fd: int, /) -> bool: ... + +if sys.platform != "win32" and sys.version_info >= (3, 11): + def login_tty(fd: int, /) -> None: ... + +if sys.version_info >= (3, 11): + def lseek(fd: int, position: int, whence: int, /) -> int: ... + +else: + def lseek(fd: int, position: int, how: int, /) -> int: ... + +def open(path: StrOrBytesPath, flags: int, mode: int = 0o777, *, dir_fd: int | None = None) -> int: ... +def pipe() -> tuple[int, int]: ... +def read(fd: int, length: int, /) -> bytes: ... + +if sys.version_info >= (3, 12) or sys.platform != "win32": + def get_blocking(fd: int, /) -> bool: ... + def set_blocking(fd: int, blocking: bool, /) -> None: ... + +if sys.platform != "win32": + def fchown(fd: int, uid: int, gid: int) -> None: ... + def fpathconf(fd: int, name: str | int, /) -> int: ... + def fstatvfs(fd: int, /) -> statvfs_result: ... + def lockf(fd: int, command: int, length: int, /) -> None: ... + def openpty() -> tuple[int, int]: ... # some flavors of Unix + if sys.platform != "darwin": + def fdatasync(fd: FileDescriptorLike) -> None: ... + def pipe2(flags: int, /) -> tuple[int, int]: ... # some flavors of Unix + def posix_fallocate(fd: int, offset: int, length: int, /) -> None: ... + def posix_fadvise(fd: int, offset: int, length: int, advice: int, /) -> None: ... + + def pread(fd: int, length: int, offset: int, /) -> bytes: ... + def pwrite(fd: int, buffer: ReadableBuffer, offset: int, /) -> int: ... + # In CI, stubtest sometimes reports that these are available on MacOS, sometimes not + def preadv(fd: int, buffers: SupportsLenAndGetItem[WriteableBuffer], offset: int, flags: int = 0, /) -> int: ... + def pwritev(fd: int, buffers: SupportsLenAndGetItem[ReadableBuffer], offset: int, flags: int = 0, /) -> int: ... + if sys.platform != "darwin": + if sys.version_info >= (3, 10): + RWF_APPEND: Final[int] # docs say available on 3.7+, stubtest says otherwise + RWF_DSYNC: Final[int] + RWF_SYNC: Final[int] + RWF_HIPRI: Final[int] + RWF_NOWAIT: Final[int] + + if sys.platform == "linux": + def sendfile(out_fd: FileDescriptor, in_fd: FileDescriptor, offset: int | None, count: int) -> int: ... + else: + def sendfile( + out_fd: FileDescriptor, + in_fd: FileDescriptor, + offset: int, + count: int, + headers: Sequence[ReadableBuffer] = (), + trailers: Sequence[ReadableBuffer] = (), + flags: int = 0, + ) -> int: ... # FreeBSD and Mac OS X only + + def readv(fd: int, buffers: SupportsLenAndGetItem[WriteableBuffer], /) -> int: ... + def writev(fd: int, buffers: SupportsLenAndGetItem[ReadableBuffer], /) -> int: ... + +if sys.version_info >= (3, 14): + def readinto(fd: int, buffer: ReadableBuffer, /) -> int: ... + +@final +class terminal_size(structseq[int], tuple[int, int]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("columns", "lines") + + @property + def columns(self) -> int: ... + @property + def lines(self) -> int: ... + +def get_terminal_size(fd: int = ..., /) -> terminal_size: ... +def get_inheritable(fd: int, /) -> bool: ... +def set_inheritable(fd: int, inheritable: bool, /) -> None: ... + +if sys.platform == "win32": + def get_handle_inheritable(handle: int, /) -> bool: ... + def set_handle_inheritable(handle: int, inheritable: bool, /) -> None: ... + +if sys.platform != "win32": + # Unix only + def tcgetpgrp(fd: int, /) -> int: ... + def tcsetpgrp(fd: int, pgid: int, /) -> None: ... + def ttyname(fd: int, /) -> str: ... + +def write(fd: int, data: ReadableBuffer, /) -> int: ... +def access( + path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = None, effective_ids: bool = False, follow_symlinks: bool = True +) -> bool: ... +def chdir(path: FileDescriptorOrPath) -> None: ... + +if sys.platform != "win32": + def fchdir(fd: FileDescriptorLike) -> None: ... + +def getcwd() -> str: ... +def getcwdb() -> bytes: ... +def chmod(path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = None, follow_symlinks: bool = True) -> None: ... + +if sys.platform != "win32" and sys.platform != "linux": + def chflags(path: StrOrBytesPath, flags: int, follow_symlinks: bool = True) -> None: ... # some flavors of Unix + def lchflags(path: StrOrBytesPath, flags: int) -> None: ... + +if sys.platform != "win32": + def chroot(path: StrOrBytesPath) -> None: ... + def chown( + path: FileDescriptorOrPath, uid: int, gid: int, *, dir_fd: int | None = None, follow_symlinks: bool = True + ) -> None: ... + def lchown(path: StrOrBytesPath, uid: int, gid: int) -> None: ... + +def link( + src: StrOrBytesPath, + dst: StrOrBytesPath, + *, + src_dir_fd: int | None = None, + dst_dir_fd: int | None = None, + follow_symlinks: bool = True, +) -> None: ... +def lstat(path: StrOrBytesPath, *, dir_fd: int | None = None) -> stat_result: ... +def mkdir(path: StrOrBytesPath, mode: int = 0o777, *, dir_fd: int | None = None) -> None: ... + +if sys.platform != "win32": + def mkfifo(path: StrOrBytesPath, mode: int = 0o666, *, dir_fd: int | None = None) -> None: ... # Unix only + +def makedirs(name: StrOrBytesPath, mode: int = 0o777, exist_ok: bool = False) -> None: ... + +if sys.platform != "win32": + def mknod(path: StrOrBytesPath, mode: int = 0o600, device: int = 0, *, dir_fd: int | None = None) -> None: ... + def major(device: int, /) -> int: ... + def minor(device: int, /) -> int: ... + def makedev(major: int, minor: int, /) -> int: ... + def pathconf(path: FileDescriptorOrPath, name: str | int) -> int: ... # Unix only + +def readlink(path: GenericPath[AnyStr], *, dir_fd: int | None = None) -> AnyStr: ... +def remove(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: ... +def removedirs(name: StrOrBytesPath) -> None: ... +def rename(src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = None, dst_dir_fd: int | None = None) -> None: ... +def renames(old: StrOrBytesPath, new: StrOrBytesPath) -> None: ... +def replace( + src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = None, dst_dir_fd: int | None = None +) -> None: ... +def rmdir(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: ... +@final +@type_check_only +class _ScandirIterator(Generic[AnyStr]): + def __del__(self) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> DirEntry[AnyStr]: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + def close(self) -> None: ... + +@overload +def scandir(path: None = None) -> _ScandirIterator[str]: ... +@overload +def scandir(path: int) -> _ScandirIterator[str]: ... +@overload +def scandir(path: GenericPath[AnyStr]) -> _ScandirIterator[AnyStr]: ... +def stat(path: FileDescriptorOrPath, *, dir_fd: int | None = None, follow_symlinks: bool = True) -> stat_result: ... + +if sys.platform != "win32": + def statvfs(path: FileDescriptorOrPath) -> statvfs_result: ... # Unix only + +def symlink( + src: StrOrBytesPath, dst: StrOrBytesPath, target_is_directory: bool = False, *, dir_fd: int | None = None +) -> None: ... + +if sys.platform != "win32": + def sync() -> None: ... # Unix only + +def truncate(path: FileDescriptorOrPath, length: int) -> None: ... # Unix only up to version 3.4 +def unlink(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: ... +def utime( + path: FileDescriptorOrPath, + times: tuple[int, int] | tuple[float, float] | None = None, + *, + ns: tuple[int, int] = ..., + dir_fd: int | None = None, + follow_symlinks: bool = True, +) -> None: ... + +_OnError: TypeAlias = Callable[[OSError], object] + +def walk( + top: GenericPath[AnyStr], topdown: bool = True, onerror: _OnError | None = None, followlinks: bool = False +) -> Iterator[tuple[AnyStr, list[AnyStr], list[AnyStr]]]: ... + +if sys.platform != "win32": + @overload + def fwalk( + top: StrPath = ".", + topdown: bool = True, + onerror: _OnError | None = None, + *, + follow_symlinks: bool = False, + dir_fd: int | None = None, + ) -> Iterator[tuple[str, list[str], list[str], int]]: ... + @overload + def fwalk( + top: BytesPath, + topdown: bool = True, + onerror: _OnError | None = None, + *, + follow_symlinks: bool = False, + dir_fd: int | None = None, + ) -> Iterator[tuple[bytes, list[bytes], list[bytes], int]]: ... + if sys.platform == "linux": + def getxattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = True) -> bytes: ... + def listxattr(path: FileDescriptorOrPath | None = None, *, follow_symlinks: bool = True) -> list[str]: ... + def removexattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: ... + def setxattr( + path: FileDescriptorOrPath, + attribute: StrOrBytesPath, + value: ReadableBuffer, + flags: int = 0, + *, + follow_symlinks: bool = True, + ) -> None: ... + +def abort() -> NoReturn: ... + +# These are defined as execl(file, *args) but the first *arg is mandatory. +def execl(file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]]]]) -> NoReturn: ... +def execlp(file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]]]]) -> NoReturn: ... + +# These are: execle(file, *args, env) but env is pulled from the last element of the args. +def execle( + file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]], _ExecEnv]] +) -> NoReturn: ... +def execlpe( + file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]], _ExecEnv]] +) -> NoReturn: ... + +# The docs say `args: tuple or list of strings` +# The implementation enforces tuple or list so we can't use Sequence. +# Not separating out PathLike[str] and PathLike[bytes] here because it doesn't make much difference +# in practice, and doing so would explode the number of combinations in this already long union. +# All these combinations are necessary due to list being invariant. +_ExecVArgs: TypeAlias = ( + tuple[StrOrBytesPath, ...] + | list[bytes] + | list[str] + | list[PathLike[Any]] + | list[bytes | str] + | list[bytes | PathLike[Any]] + | list[str | PathLike[Any]] + | list[bytes | str | PathLike[Any]] +) +# Depending on the OS, the keys and values are passed either to +# PyUnicode_FSDecoder (which accepts str | ReadableBuffer) or to +# PyUnicode_FSConverter (which accepts StrOrBytesPath). For simplicity, +# we limit to str | bytes. +_ExecEnv: TypeAlias = Mapping[bytes, bytes | str] | Mapping[str, bytes | str] + +def execv(path: StrOrBytesPath, argv: _ExecVArgs, /) -> NoReturn: ... +def execve(path: FileDescriptorOrPath, argv: _ExecVArgs, env: _ExecEnv) -> NoReturn: ... +def execvp(file: StrOrBytesPath, args: _ExecVArgs) -> NoReturn: ... +def execvpe(file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> NoReturn: ... +def _exit(status: int) -> NoReturn: ... +def kill(pid: int, signal: int, /) -> None: ... + +if sys.platform != "win32": + # Unix only + def fork() -> int: ... + def forkpty() -> tuple[int, int]: ... # some flavors of Unix + def killpg(pgid: int, signal: int, /) -> None: ... + def nice(increment: int, /) -> int: ... + if sys.platform != "darwin" and sys.platform != "linux": + def plock(op: int, /) -> None: ... + +class _wrap_close: + def __init__(self, stream: TextIOWrapper, proc: Popen[str]) -> None: ... + def close(self) -> int | None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def __iter__(self) -> Iterator[str]: ... + # Methods below here don't exist directly on the _wrap_close object, but + # are copied from the wrapped TextIOWrapper object via __getattr__. + # The full set of TextIOWrapper methods are technically available this way, + # but undocumented. Only a subset are currently included here. + def read(self, size: int | None = -1, /) -> str: ... + def readable(self) -> bool: ... + def readline(self, size: int = -1, /) -> str: ... + def readlines(self, hint: int = -1, /) -> list[str]: ... + def writable(self) -> bool: ... + def write(self, s: str, /) -> int: ... + def writelines(self, lines: Iterable[str], /) -> None: ... + +def popen(cmd: str, mode: str = "r", buffering: int = -1) -> _wrap_close: ... +def spawnl(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: ... +def spawnle(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: ... # Imprecise sig + +if sys.platform != "win32": + def spawnv(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: ... + def spawnve(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: ... + +else: + def spawnv(mode: int, path: StrOrBytesPath, argv: _ExecVArgs, /) -> int: ... + def spawnve(mode: int, path: StrOrBytesPath, argv: _ExecVArgs, env: _ExecEnv, /) -> int: ... + +def system(command: StrOrBytesPath) -> int: ... +@final +class times_result(structseq[float], tuple[float, float, float, float, float]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("user", "system", "children_user", "children_system", "elapsed") + + @property + def user(self) -> float: ... + @property + def system(self) -> float: ... + @property + def children_user(self) -> float: ... + @property + def children_system(self) -> float: ... + @property + def elapsed(self) -> float: ... + +def times() -> times_result: ... +def waitpid(pid: int, options: int, /) -> tuple[int, int]: ... + +if sys.platform == "win32": + if sys.version_info >= (3, 10): + def startfile( + filepath: StrOrBytesPath, + operation: str = ..., + arguments: str = "", + cwd: StrOrBytesPath | None = None, + show_cmd: int = 1, + ) -> None: ... + else: + def startfile(filepath: StrOrBytesPath, operation: str = ...) -> None: ... + +else: + def spawnlp(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: ... + def spawnlpe(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: ... # Imprecise signature + def spawnvp(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: ... + def spawnvpe(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: ... + def wait() -> tuple[int, int]: ... # Unix only + # Added to MacOS in 3.13 + if sys.platform != "darwin" or sys.version_info >= (3, 13): + @final + class waitid_result(structseq[int], tuple[int, int, int, int, int]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("si_pid", "si_uid", "si_signo", "si_status", "si_code") + + @property + def si_pid(self) -> int: ... + @property + def si_uid(self) -> int: ... + @property + def si_signo(self) -> int: ... + @property + def si_status(self) -> int: ... + @property + def si_code(self) -> int: ... + + def waitid(idtype: int, ident: int, options: int, /) -> waitid_result | None: ... + + from resource import struct_rusage + + def wait3(options: int) -> tuple[int, int, struct_rusage]: ... + def wait4(pid: int, options: int) -> tuple[int, int, struct_rusage]: ... + def WCOREDUMP(status: int, /) -> bool: ... + def WIFCONTINUED(status: int) -> bool: ... + def WIFSTOPPED(status: int) -> bool: ... + def WIFSIGNALED(status: int) -> bool: ... + def WIFEXITED(status: int) -> bool: ... + def WEXITSTATUS(status: int) -> int: ... + def WSTOPSIG(status: int) -> int: ... + def WTERMSIG(status: int) -> int: ... + def posix_spawn( + path: StrOrBytesPath, + argv: _ExecVArgs, + env: _ExecEnv, + /, + *, + file_actions: Sequence[tuple[Any, ...]] | None = ..., + setpgroup: int | None = ..., + resetids: bool = ..., + setsid: bool = ..., + setsigmask: Iterable[int] = ..., + setsigdef: Iterable[int] = ..., + scheduler: tuple[Any, sched_param] | None = ..., + ) -> int: ... + def posix_spawnp( + path: StrOrBytesPath, + argv: _ExecVArgs, + env: _ExecEnv, + /, + *, + file_actions: Sequence[tuple[Any, ...]] | None = ..., + setpgroup: int | None = ..., + resetids: bool = ..., + setsid: bool = ..., + setsigmask: Iterable[int] = ..., + setsigdef: Iterable[int] = ..., + scheduler: tuple[Any, sched_param] | None = ..., + ) -> int: ... + POSIX_SPAWN_OPEN: Final = 0 + POSIX_SPAWN_CLOSE: Final = 1 + POSIX_SPAWN_DUP2: Final = 2 + +if sys.platform != "win32": + @final + class sched_param(structseq[int], tuple[int]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("sched_priority",) + + def __new__(cls, sched_priority: int) -> Self: ... + @property + def sched_priority(self) -> int: ... + + def sched_get_priority_min(policy: int) -> int: ... # some flavors of Unix + def sched_get_priority_max(policy: int) -> int: ... # some flavors of Unix + def sched_yield() -> None: ... # some flavors of Unix + if sys.platform != "darwin": + def sched_setscheduler(pid: int, policy: int, param: sched_param, /) -> None: ... # some flavors of Unix + def sched_getscheduler(pid: int, /) -> int: ... # some flavors of Unix + def sched_rr_get_interval(pid: int, /) -> float: ... # some flavors of Unix + def sched_setparam(pid: int, param: sched_param, /) -> None: ... # some flavors of Unix + def sched_getparam(pid: int, /) -> sched_param: ... # some flavors of Unix + def sched_setaffinity(pid: int, mask: Iterable[int], /) -> None: ... # some flavors of Unix + def sched_getaffinity(pid: int, /) -> set[int]: ... # some flavors of Unix + +def cpu_count() -> int | None: ... + +if sys.version_info >= (3, 13): + # Documented to return `int | None`, but falls back to `len(sched_getaffinity(0))` when + # available. See https://github.com/python/cpython/blob/417c130/Lib/os.py#L1175-L1186. + if sys.platform != "win32" and sys.platform != "darwin": + def process_cpu_count() -> int: ... + else: + def process_cpu_count() -> int | None: ... + +if sys.platform != "win32": + # Unix only + def confstr(name: str | int, /) -> str | None: ... + def getloadavg() -> tuple[float, float, float]: ... + def sysconf(name: str | int, /) -> int: ... + +if sys.platform == "linux": + def getrandom(size: int, flags: int = 0) -> bytes: ... + +def urandom(size: int, /) -> bytes: ... + +if sys.platform != "win32": + def register_at_fork( + *, + before: Callable[..., Any] | None = ..., + after_in_parent: Callable[..., Any] | None = ..., + after_in_child: Callable[..., Any] | None = ..., + ) -> None: ... + +if sys.platform == "win32": + class _AddedDllDirectory: + path: str | None + def __init__(self, path: str | None, cookie: _T, remove_dll_directory: Callable[[_T], object]) -> None: ... + def close(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + + def add_dll_directory(path: str) -> _AddedDllDirectory: ... + +if sys.platform == "linux": + MFD_CLOEXEC: Final[int] + MFD_ALLOW_SEALING: Final[int] + MFD_HUGETLB: Final[int] + MFD_HUGE_SHIFT: Final[int] + MFD_HUGE_MASK: Final[int] + MFD_HUGE_64KB: Final[int] + MFD_HUGE_512KB: Final[int] + MFD_HUGE_1MB: Final[int] + MFD_HUGE_2MB: Final[int] + MFD_HUGE_8MB: Final[int] + MFD_HUGE_16MB: Final[int] + MFD_HUGE_32MB: Final[int] + MFD_HUGE_256MB: Final[int] + MFD_HUGE_512MB: Final[int] + MFD_HUGE_1GB: Final[int] + MFD_HUGE_2GB: Final[int] + MFD_HUGE_16GB: Final[int] + def memfd_create(name: str, flags: int = ...) -> int: ... + def copy_file_range(src: int, dst: int, count: int, offset_src: int | None = ..., offset_dst: int | None = ...) -> int: ... + +def waitstatus_to_exitcode(status: int) -> int: ... + +if sys.platform == "linux": + def pidfd_open(pid: int, flags: int = ...) -> int: ... + +if sys.version_info >= (3, 12) and sys.platform == "linux": + PIDFD_NONBLOCK: Final = 2048 + +if sys.version_info >= (3, 12) and sys.platform == "win32": + def listdrives() -> list[str]: ... + def listmounts(volume: str) -> list[str]: ... + def listvolumes() -> list[str]: ... + +if sys.version_info >= (3, 10) and sys.platform == "linux": + EFD_CLOEXEC: Final[int] + EFD_NONBLOCK: Final[int] + EFD_SEMAPHORE: Final[int] + SPLICE_F_MORE: Final[int] + SPLICE_F_MOVE: Final[int] + SPLICE_F_NONBLOCK: Final[int] + def eventfd(initval: int, flags: int = 524288) -> FileDescriptor: ... + def eventfd_read(fd: FileDescriptor) -> int: ... + def eventfd_write(fd: FileDescriptor, value: int) -> None: ... + def splice( + src: FileDescriptor, + dst: FileDescriptor, + count: int, + offset_src: int | None = ..., + offset_dst: int | None = ..., + flags: int = 0, + ) -> int: ... + +if sys.version_info >= (3, 12) and sys.platform == "linux": + CLONE_FILES: Final[int] + CLONE_FS: Final[int] + CLONE_NEWCGROUP: Final[int] # Linux 4.6+ + CLONE_NEWIPC: Final[int] # Linux 2.6.19+ + CLONE_NEWNET: Final[int] # Linux 2.6.24+ + CLONE_NEWNS: Final[int] + CLONE_NEWPID: Final[int] # Linux 3.8+ + CLONE_NEWTIME: Final[int] # Linux 5.6+ + CLONE_NEWUSER: Final[int] # Linux 3.8+ + CLONE_NEWUTS: Final[int] # Linux 2.6.19+ + CLONE_SIGHAND: Final[int] + CLONE_SYSVSEM: Final[int] # Linux 2.6.26+ + CLONE_THREAD: Final[int] + CLONE_VM: Final[int] + def unshare(flags: int) -> None: ... + def setns(fd: FileDescriptorLike, nstype: int = 0) -> None: ... + +if sys.version_info >= (3, 13) and sys.platform != "win32": + def posix_openpt(oflag: int, /) -> int: ... + def grantpt(fd: FileDescriptorLike, /) -> None: ... + def unlockpt(fd: FileDescriptorLike, /) -> None: ... + def ptsname(fd: FileDescriptorLike, /) -> str: ... + +if sys.version_info >= (3, 13) and sys.platform == "linux": + TFD_TIMER_ABSTIME: Final = 1 + TFD_TIMER_CANCEL_ON_SET: Final = 2 + TFD_NONBLOCK: Final[int] + TFD_CLOEXEC: Final[int] + POSIX_SPAWN_CLOSEFROM: Final[int] + + def timerfd_create(clockid: int, /, *, flags: int = 0) -> int: ... + def timerfd_settime( + fd: FileDescriptor, /, *, flags: int = 0, initial: float = 0.0, interval: float = 0.0 + ) -> tuple[float, float]: ... + def timerfd_settime_ns(fd: FileDescriptor, /, *, flags: int = 0, initial: int = 0, interval: int = 0) -> tuple[int, int]: ... + def timerfd_gettime(fd: FileDescriptor, /) -> tuple[float, float]: ... + def timerfd_gettime_ns(fd: FileDescriptor, /) -> tuple[int, int]: ... + +if sys.version_info >= (3, 13) or sys.platform != "win32": + # Added to Windows in 3.13. + def fchmod(fd: int, mode: int) -> None: ... + +if sys.platform != "linux": + if sys.version_info >= (3, 13) or sys.platform != "win32": + # Added to Windows in 3.13. + def lchmod(path: StrOrBytesPath, mode: int) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/os/path.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/os/path.pyi new file mode 100644 index 0000000..dc688a9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/os/path.pyi @@ -0,0 +1,8 @@ +import sys + +if sys.platform == "win32": + from ntpath import * + from ntpath import __all__ as __all__ +else: + from posixpath import * + from posixpath import __all__ as __all__ diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ossaudiodev.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ossaudiodev.pyi new file mode 100644 index 0000000..f8230b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ossaudiodev.pyi @@ -0,0 +1,132 @@ +import sys +from typing import Any, Final, Literal, overload + +if sys.platform != "win32" and sys.platform != "darwin": + # Depends on soundcard.h + AFMT_AC3: Final[int] + AFMT_A_LAW: Final[int] + AFMT_IMA_ADPCM: Final[int] + AFMT_MPEG: Final[int] + AFMT_MU_LAW: Final[int] + AFMT_QUERY: Final[int] + AFMT_S16_BE: Final[int] + AFMT_S16_LE: Final[int] + AFMT_S16_NE: Final[int] + AFMT_S8: Final[int] + AFMT_U16_BE: Final[int] + AFMT_U16_LE: Final[int] + AFMT_U8: Final[int] + SNDCTL_COPR_HALT: Final[int] + SNDCTL_COPR_LOAD: Final[int] + SNDCTL_COPR_RCODE: Final[int] + SNDCTL_COPR_RCVMSG: Final[int] + SNDCTL_COPR_RDATA: Final[int] + SNDCTL_COPR_RESET: Final[int] + SNDCTL_COPR_RUN: Final[int] + SNDCTL_COPR_SENDMSG: Final[int] + SNDCTL_COPR_WCODE: Final[int] + SNDCTL_COPR_WDATA: Final[int] + SNDCTL_DSP_BIND_CHANNEL: Final[int] + SNDCTL_DSP_CHANNELS: Final[int] + SNDCTL_DSP_GETBLKSIZE: Final[int] + SNDCTL_DSP_GETCAPS: Final[int] + SNDCTL_DSP_GETCHANNELMASK: Final[int] + SNDCTL_DSP_GETFMTS: Final[int] + SNDCTL_DSP_GETIPTR: Final[int] + SNDCTL_DSP_GETISPACE: Final[int] + SNDCTL_DSP_GETODELAY: Final[int] + SNDCTL_DSP_GETOPTR: Final[int] + SNDCTL_DSP_GETOSPACE: Final[int] + SNDCTL_DSP_GETSPDIF: Final[int] + SNDCTL_DSP_GETTRIGGER: Final[int] + SNDCTL_DSP_MAPINBUF: Final[int] + SNDCTL_DSP_MAPOUTBUF: Final[int] + SNDCTL_DSP_NONBLOCK: Final[int] + SNDCTL_DSP_POST: Final[int] + SNDCTL_DSP_PROFILE: Final[int] + SNDCTL_DSP_RESET: Final[int] + SNDCTL_DSP_SAMPLESIZE: Final[int] + SNDCTL_DSP_SETDUPLEX: Final[int] + SNDCTL_DSP_SETFMT: Final[int] + SNDCTL_DSP_SETFRAGMENT: Final[int] + SNDCTL_DSP_SETSPDIF: Final[int] + SNDCTL_DSP_SETSYNCRO: Final[int] + SNDCTL_DSP_SETTRIGGER: Final[int] + SNDCTL_DSP_SPEED: Final[int] + SNDCTL_DSP_STEREO: Final[int] + SNDCTL_DSP_SUBDIVIDE: Final[int] + SNDCTL_DSP_SYNC: Final[int] + SNDCTL_FM_4OP_ENABLE: Final[int] + SNDCTL_FM_LOAD_INSTR: Final[int] + SNDCTL_MIDI_INFO: Final[int] + SNDCTL_MIDI_MPUCMD: Final[int] + SNDCTL_MIDI_MPUMODE: Final[int] + SNDCTL_MIDI_PRETIME: Final[int] + SNDCTL_SEQ_CTRLRATE: Final[int] + SNDCTL_SEQ_GETINCOUNT: Final[int] + SNDCTL_SEQ_GETOUTCOUNT: Final[int] + SNDCTL_SEQ_GETTIME: Final[int] + SNDCTL_SEQ_NRMIDIS: Final[int] + SNDCTL_SEQ_NRSYNTHS: Final[int] + SNDCTL_SEQ_OUTOFBAND: Final[int] + SNDCTL_SEQ_PANIC: Final[int] + SNDCTL_SEQ_PERCMODE: Final[int] + SNDCTL_SEQ_RESET: Final[int] + SNDCTL_SEQ_RESETSAMPLES: Final[int] + SNDCTL_SEQ_SYNC: Final[int] + SNDCTL_SEQ_TESTMIDI: Final[int] + SNDCTL_SEQ_THRESHOLD: Final[int] + SNDCTL_SYNTH_CONTROL: Final[int] + SNDCTL_SYNTH_ID: Final[int] + SNDCTL_SYNTH_INFO: Final[int] + SNDCTL_SYNTH_MEMAVL: Final[int] + SNDCTL_SYNTH_REMOVESAMPLE: Final[int] + SNDCTL_TMR_CONTINUE: Final[int] + SNDCTL_TMR_METRONOME: Final[int] + SNDCTL_TMR_SELECT: Final[int] + SNDCTL_TMR_SOURCE: Final[int] + SNDCTL_TMR_START: Final[int] + SNDCTL_TMR_STOP: Final[int] + SNDCTL_TMR_TEMPO: Final[int] + SNDCTL_TMR_TIMEBASE: Final[int] + SOUND_MIXER_ALTPCM: Final[int] + SOUND_MIXER_BASS: Final[int] + SOUND_MIXER_CD: Final[int] + SOUND_MIXER_DIGITAL1: Final[int] + SOUND_MIXER_DIGITAL2: Final[int] + SOUND_MIXER_DIGITAL3: Final[int] + SOUND_MIXER_IGAIN: Final[int] + SOUND_MIXER_IMIX: Final[int] + SOUND_MIXER_LINE: Final[int] + SOUND_MIXER_LINE1: Final[int] + SOUND_MIXER_LINE2: Final[int] + SOUND_MIXER_LINE3: Final[int] + SOUND_MIXER_MIC: Final[int] + SOUND_MIXER_MONITOR: Final[int] + SOUND_MIXER_NRDEVICES: Final[int] + SOUND_MIXER_OGAIN: Final[int] + SOUND_MIXER_PCM: Final[int] + SOUND_MIXER_PHONEIN: Final[int] + SOUND_MIXER_PHONEOUT: Final[int] + SOUND_MIXER_RADIO: Final[int] + SOUND_MIXER_RECLEV: Final[int] + SOUND_MIXER_SPEAKER: Final[int] + SOUND_MIXER_SYNTH: Final[int] + SOUND_MIXER_TREBLE: Final[int] + SOUND_MIXER_VIDEO: Final[int] + SOUND_MIXER_VOLUME: Final[int] + + control_labels: list[str] + control_names: list[str] + + # TODO: oss_audio_device return type + @overload + def open(mode: Literal["r", "w", "rw"]) -> Any: ... + @overload + def open(device: str, mode: Literal["r", "w", "rw"]) -> Any: ... + + # TODO: oss_mixer_device return type + def openmixer(device: str = ...) -> Any: ... + + class OSSAudioError(Exception): ... + error = OSSAudioError diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/parser.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/parser.pyi new file mode 100644 index 0000000..9b287fc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/parser.pyi @@ -0,0 +1,25 @@ +from _typeshed import StrOrBytesPath +from collections.abc import Sequence +from types import CodeType +from typing import Any, ClassVar, final + +def expr(source: str) -> STType: ... +def suite(source: str) -> STType: ... +def sequence2st(sequence: Sequence[Any]) -> STType: ... +def tuple2st(sequence: Sequence[Any]) -> STType: ... +def st2list(st: STType, line_info: bool = False, col_info: bool = False) -> list[Any]: ... +def st2tuple(st: STType, line_info: bool = False, col_info: bool = False) -> tuple[Any, ...]: ... +def compilest(st: STType, filename: StrOrBytesPath = ...) -> CodeType: ... +def isexpr(st: STType) -> bool: ... +def issuite(st: STType) -> bool: ... + +class ParserError(Exception): ... + +@final +class STType: + __hash__: ClassVar[None] # type: ignore[assignment] + def compile(self, filename: StrOrBytesPath = ...) -> CodeType: ... + def isexpr(self) -> bool: ... + def issuite(self) -> bool: ... + def tolist(self, line_info: bool = False, col_info: bool = False) -> list[Any]: ... + def totuple(self, line_info: bool = False, col_info: bool = False) -> tuple[Any, ...]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pathlib/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pathlib/__init__.pyi new file mode 100644 index 0000000..fa5143f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pathlib/__init__.pyi @@ -0,0 +1,355 @@ +import sys +import types +from _typeshed import ( + OpenBinaryMode, + OpenBinaryModeReading, + OpenBinaryModeUpdating, + OpenBinaryModeWriting, + OpenTextMode, + ReadableBuffer, + StrOrBytesPath, + StrPath, + Unused, +) +from collections.abc import Callable, Generator, Iterator, Sequence +from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper +from os import PathLike, stat_result +from types import GenericAlias, TracebackType +from typing import IO, Any, BinaryIO, ClassVar, Literal, TypeVar, overload +from typing_extensions import Never, Self, deprecated + +_PathT = TypeVar("_PathT", bound=PurePath) + +__all__ = ["PurePath", "PurePosixPath", "PureWindowsPath", "Path", "PosixPath", "WindowsPath"] + +if sys.version_info >= (3, 14): + from pathlib.types import PathInfo + +if sys.version_info >= (3, 13): + __all__ += ["UnsupportedOperation"] + +class PurePath(PathLike[str]): + if sys.version_info >= (3, 13): + __slots__ = ( + "_raw_paths", + "_drv", + "_root", + "_tail_cached", + "_str", + "_str_normcase_cached", + "_parts_normcase_cached", + "_hash", + ) + elif sys.version_info >= (3, 12): + __slots__ = ( + "_raw_paths", + "_drv", + "_root", + "_tail_cached", + "_str", + "_str_normcase_cached", + "_parts_normcase_cached", + "_lines_cached", + "_hash", + ) + else: + __slots__ = ("_drv", "_root", "_parts", "_str", "_hash", "_pparts", "_cached_cparts") + if sys.version_info >= (3, 13): + parser: ClassVar[types.ModuleType] + def full_match(self, pattern: StrPath, *, case_sensitive: bool | None = None) -> bool: ... + + @property + def parts(self) -> tuple[str, ...]: ... + @property + def drive(self) -> str: ... + @property + def root(self) -> str: ... + @property + def anchor(self) -> str: ... + @property + def name(self) -> str: ... + @property + def suffix(self) -> str: ... + @property + def suffixes(self) -> list[str]: ... + @property + def stem(self) -> str: ... + if sys.version_info >= (3, 12): + def __new__(cls, *args: StrPath, **kwargs: Unused) -> Self: ... + def __init__(self, *args: StrPath) -> None: ... # pyright: ignore[reportInconsistentConstructor] + else: + def __new__(cls, *args: StrPath) -> Self: ... + + def __hash__(self) -> int: ... + def __fspath__(self) -> str: ... + def __lt__(self, other: PurePath) -> bool: ... + def __le__(self, other: PurePath) -> bool: ... + def __gt__(self, other: PurePath) -> bool: ... + def __ge__(self, other: PurePath) -> bool: ... + def __truediv__(self, key: StrPath) -> Self: ... + def __rtruediv__(self, key: StrPath) -> Self: ... + def __bytes__(self) -> bytes: ... + def as_posix(self) -> str: ... + def as_uri(self) -> str: ... + def is_absolute(self) -> bool: ... + if sys.version_info >= (3, 13): + @deprecated( + "Deprecated since Python 3.13; will be removed in Python 3.15. " + "Use `os.path.isreserved()` to detect reserved paths on Windows." + ) + def is_reserved(self) -> bool: ... + else: + def is_reserved(self) -> bool: ... + if sys.version_info >= (3, 14): + def is_relative_to(self, other: StrPath) -> bool: ... + elif sys.version_info >= (3, 12): + def is_relative_to(self, other: StrPath, /, *_deprecated: StrPath) -> bool: ... + else: + def is_relative_to(self, *other: StrPath) -> bool: ... + + if sys.version_info >= (3, 12): + def match(self, path_pattern: str, *, case_sensitive: bool | None = None) -> bool: ... + else: + def match(self, path_pattern: str) -> bool: ... + + if sys.version_info >= (3, 14): + def relative_to(self, other: StrPath, *, walk_up: bool = False) -> Self: ... + elif sys.version_info >= (3, 12): + def relative_to(self, other: StrPath, /, *_deprecated: StrPath, walk_up: bool = False) -> Self: ... + else: + def relative_to(self, *other: StrPath) -> Self: ... + + def with_name(self, name: str) -> Self: ... + def with_stem(self, stem: str) -> Self: ... + def with_suffix(self, suffix: str) -> Self: ... + def joinpath(self, *other: StrPath) -> Self: ... + @property + def parents(self) -> Sequence[Self]: ... + @property + def parent(self) -> Self: ... + if sys.version_info < (3, 11): + def __class_getitem__(cls, type: Any) -> GenericAlias: ... + + if sys.version_info >= (3, 12): + def with_segments(self, *args: StrPath) -> Self: ... + +class PurePosixPath(PurePath): + __slots__ = () + +class PureWindowsPath(PurePath): + __slots__ = () + +class Path(PurePath): + if sys.version_info >= (3, 14): + __slots__ = ("_info",) + elif sys.version_info >= (3, 10): + __slots__ = () + else: + __slots__ = ("_accessor",) + + if sys.version_info >= (3, 12): + def __new__(cls, *args: StrPath, **kwargs: Unused) -> Self: ... # pyright: ignore[reportInconsistentConstructor] + else: + def __new__(cls, *args: StrPath, **kwargs: Unused) -> Self: ... + + @classmethod + def cwd(cls) -> Self: ... + if sys.version_info >= (3, 10): + def stat(self, *, follow_symlinks: bool = True) -> stat_result: ... + def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: ... + else: + def stat(self) -> stat_result: ... + def chmod(self, mode: int) -> None: ... + + if sys.version_info >= (3, 13): + @classmethod + def from_uri(cls, uri: str) -> Self: ... + def is_dir(self, *, follow_symlinks: bool = True) -> bool: ... + def is_file(self, *, follow_symlinks: bool = True) -> bool: ... + def read_text(self, encoding: str | None = None, errors: str | None = None, newline: str | None = None) -> str: ... + else: + def __enter__(self) -> Self: ... + def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + def is_dir(self) -> bool: ... + def is_file(self) -> bool: ... + def read_text(self, encoding: str | None = None, errors: str | None = None) -> str: ... + + if sys.version_info >= (3, 13): + def glob(self, pattern: str, *, case_sensitive: bool | None = None, recurse_symlinks: bool = False) -> Iterator[Self]: ... + def rglob( + self, pattern: str, *, case_sensitive: bool | None = None, recurse_symlinks: bool = False + ) -> Iterator[Self]: ... + elif sys.version_info >= (3, 12): + def glob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: ... + def rglob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: ... + else: + def glob(self, pattern: str) -> Generator[Self, None, None]: ... + def rglob(self, pattern: str) -> Generator[Self, None, None]: ... + + if sys.version_info >= (3, 12): + def exists(self, *, follow_symlinks: bool = True) -> bool: ... + else: + def exists(self) -> bool: ... + + def is_symlink(self) -> bool: ... + def is_socket(self) -> bool: ... + def is_fifo(self) -> bool: ... + def is_block_device(self) -> bool: ... + def is_char_device(self) -> bool: ... + if sys.version_info >= (3, 12): + def is_junction(self) -> bool: ... + + def iterdir(self) -> Generator[Self, None, None]: ... + def lchmod(self, mode: int) -> None: ... + def lstat(self) -> stat_result: ... + def mkdir(self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False) -> None: ... + + if sys.version_info >= (3, 14): + @property + def info(self) -> PathInfo: ... + @overload + def move_into(self, target_dir: _PathT) -> _PathT: ... # type: ignore[overload-overlap] + @overload + def move_into(self, target_dir: StrPath) -> Self: ... # type: ignore[overload-overlap] + @overload + def move(self, target: _PathT) -> _PathT: ... # type: ignore[overload-overlap] + @overload + def move(self, target: StrPath) -> Self: ... # type: ignore[overload-overlap] + @overload + def copy_into(self, target_dir: _PathT, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> _PathT: ... # type: ignore[overload-overlap] + @overload + def copy_into(self, target_dir: StrPath, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> Self: ... # type: ignore[overload-overlap] + @overload + def copy(self, target: _PathT, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> _PathT: ... # type: ignore[overload-overlap] + @overload + def copy(self, target: StrPath, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> Self: ... # type: ignore[overload-overlap] + + # Adapted from builtins.open + # Text mode: always returns a TextIOWrapper + # The Traversable .open in stdlib/importlib/abc.pyi should be kept in sync with this. + @overload + def open( + self, + mode: OpenTextMode = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> TextIOWrapper: ... + # Unbuffered binary mode: returns a FileIO + @overload + def open( + self, mode: OpenBinaryMode, buffering: Literal[0], encoding: None = None, errors: None = None, newline: None = None + ) -> FileIO: ... + # Buffering is on: return BufferedRandom, BufferedReader, or BufferedWriter + @overload + def open( + self, + mode: OpenBinaryModeUpdating, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + ) -> BufferedRandom: ... + @overload + def open( + self, + mode: OpenBinaryModeWriting, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + ) -> BufferedWriter: ... + @overload + def open( + self, + mode: OpenBinaryModeReading, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + ) -> BufferedReader: ... + # Buffering cannot be determined: fall back to BinaryIO + @overload + def open( + self, mode: OpenBinaryMode, buffering: int = -1, encoding: None = None, errors: None = None, newline: None = None + ) -> BinaryIO: ... + # Fallback if mode is not specified + @overload + def open( + self, mode: str, buffering: int = -1, encoding: str | None = None, errors: str | None = None, newline: str | None = None + ) -> IO[Any]: ... + + # These methods do "exist" on Windows, but they always raise NotImplementedError. + if sys.platform == "win32": + if sys.version_info >= (3, 13): + # raises UnsupportedOperation: + def owner(self: Never, *, follow_symlinks: bool = True) -> str: ... # type: ignore[misc] + def group(self: Never, *, follow_symlinks: bool = True) -> str: ... # type: ignore[misc] + else: + def owner(self: Never) -> str: ... # type: ignore[misc] + def group(self: Never) -> str: ... # type: ignore[misc] + else: + if sys.version_info >= (3, 13): + def owner(self, *, follow_symlinks: bool = True) -> str: ... + def group(self, *, follow_symlinks: bool = True) -> str: ... + else: + def owner(self) -> str: ... + def group(self) -> str: ... + + # This method does "exist" on Windows on <3.12, but always raises NotImplementedError + # On py312+, it works properly on Windows, as with all other platforms + if sys.platform == "win32" and sys.version_info < (3, 12): + def is_mount(self: Never) -> bool: ... # type: ignore[misc] + else: + def is_mount(self) -> bool: ... + + def readlink(self) -> Self: ... + + if sys.version_info >= (3, 10): + def rename(self, target: StrPath) -> Self: ... + def replace(self, target: StrPath) -> Self: ... + else: + def rename(self, target: str | PurePath) -> Self: ... + def replace(self, target: str | PurePath) -> Self: ... + + def resolve(self, strict: bool = False) -> Self: ... + def rmdir(self) -> None: ... + def symlink_to(self, target: StrOrBytesPath, target_is_directory: bool = False) -> None: ... + if sys.version_info >= (3, 10): + def hardlink_to(self, target: StrOrBytesPath) -> None: ... + + def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: ... + def unlink(self, missing_ok: bool = False) -> None: ... + @classmethod + def home(cls) -> Self: ... + def absolute(self) -> Self: ... + def expanduser(self) -> Self: ... + def read_bytes(self) -> bytes: ... + def samefile(self, other_path: StrPath) -> bool: ... + def write_bytes(self, data: ReadableBuffer) -> int: ... + if sys.version_info >= (3, 10): + def write_text( + self, data: str, encoding: str | None = None, errors: str | None = None, newline: str | None = None + ) -> int: ... + else: + def write_text(self, data: str, encoding: str | None = None, errors: str | None = None) -> int: ... + if sys.version_info < (3, 12): + if sys.version_info >= (3, 10): + @deprecated("Deprecated since Python 3.10; removed in Python 3.12. Use `hardlink_to()` instead.") + def link_to(self, target: StrOrBytesPath) -> None: ... + else: + def link_to(self, target: StrOrBytesPath) -> None: ... + if sys.version_info >= (3, 12): + def walk( + self, top_down: bool = True, on_error: Callable[[OSError], object] | None = None, follow_symlinks: bool = False + ) -> Iterator[tuple[Self, list[str], list[str]]]: ... + +class PosixPath(Path, PurePosixPath): + __slots__ = () + +class WindowsPath(Path, PureWindowsPath): + __slots__ = () + +if sys.version_info >= (3, 13): + class UnsupportedOperation(NotImplementedError): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pathlib/types.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pathlib/types.pyi new file mode 100644 index 0000000..9f9a650 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pathlib/types.pyi @@ -0,0 +1,8 @@ +from typing import Protocol, runtime_checkable + +@runtime_checkable +class PathInfo(Protocol): + def exists(self, *, follow_symlinks: bool = True) -> bool: ... + def is_dir(self, *, follow_symlinks: bool = True) -> bool: ... + def is_file(self, *, follow_symlinks: bool = True) -> bool: ... + def is_symlink(self) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pdb.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pdb.pyi new file mode 100644 index 0000000..2f114b2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pdb.pyi @@ -0,0 +1,259 @@ +import signal +import sys +from bdb import Bdb, _Backend +from cmd import Cmd +from collections.abc import Callable, Iterable, Mapping, Sequence +from inspect import _SourceObjectType +from linecache import _ModuleGlobals +from rlcompleter import Completer +from types import CodeType, FrameType, TracebackType +from typing import IO, Any, ClassVar, Final, Literal, TypeVar +from typing_extensions import ParamSpec, Self, TypeAlias + +__all__ = ["run", "pm", "Pdb", "runeval", "runctx", "runcall", "set_trace", "post_mortem", "help"] +if sys.version_info >= (3, 14): + __all__ += ["set_default_backend", "get_default_backend"] + +_T = TypeVar("_T") +_P = ParamSpec("_P") +_Mode: TypeAlias = Literal["inline", "cli"] + +line_prefix: Final[str] # undocumented + +class Restart(Exception): ... + +def run(statement: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: ... +def runeval(expression: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> Any: ... +def runctx(statement: str, globals: dict[str, Any], locals: Mapping[str, Any]) -> None: ... +def runcall(func: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> _T | None: ... + +if sys.version_info >= (3, 14): + def set_default_backend(backend: _Backend) -> None: ... + def get_default_backend() -> _Backend: ... + def set_trace(*, header: str | None = None, commands: Iterable[str] | None = None) -> None: ... + async def set_trace_async(*, header: str | None = None, commands: Iterable[str] | None = None) -> None: ... + +else: + def set_trace(*, header: str | None = None) -> None: ... + +def post_mortem(t: TracebackType | None = None) -> None: ... +def pm() -> None: ... + +class Pdb(Bdb, Cmd): + # Everything here is undocumented, except for __init__ + + commands_resuming: ClassVar[list[str]] + + if sys.version_info >= (3, 13): + MAX_CHAINED_EXCEPTION_DEPTH: Final = 999 + + aliases: dict[str, str] + mainpyfile: str + _wait_for_mainpyfile: bool + rcLines: list[str] + commands: dict[int, list[str]] + commands_doprompt: dict[int, bool] + commands_silent: dict[int, bool] + commands_defining: bool + commands_bnum: int | None + lineno: int | None + stack: list[tuple[FrameType, int]] + curindex: int + curframe: FrameType | None + curframe_locals: Mapping[str, Any] + if sys.version_info >= (3, 14): + mode: _Mode | None + colorize: bool + def __init__( + self, + completekey: str = "tab", + stdin: IO[str] | None = None, + stdout: IO[str] | None = None, + skip: Iterable[str] | None = None, + nosigint: bool = False, + readrc: bool = True, + mode: _Mode | None = None, + backend: _Backend | None = None, + colorize: bool = False, + ) -> None: ... + else: + def __init__( + self, + completekey: str = "tab", + stdin: IO[str] | None = None, + stdout: IO[str] | None = None, + skip: Iterable[str] | None = None, + nosigint: bool = False, + readrc: bool = True, + ) -> None: ... + if sys.version_info >= (3, 14): + def set_trace(self, frame: FrameType | None = None, *, commands: Iterable[str] | None = None) -> None: ... + async def set_trace_async(self, frame: FrameType | None = None, *, commands: Iterable[str] | None = None) -> None: ... + + def forget(self) -> None: ... + def setup(self, f: FrameType | None, tb: TracebackType | None) -> None: ... + if sys.version_info < (3, 11): + def execRcLines(self) -> None: ... + + if sys.version_info >= (3, 13): + user_opcode = Bdb.user_line + + def bp_commands(self, frame: FrameType) -> bool: ... + + if sys.version_info >= (3, 13): + def interaction(self, frame: FrameType | None, tb_or_exc: TracebackType | BaseException | None) -> None: ... + else: + def interaction(self, frame: FrameType | None, traceback: TracebackType | None) -> None: ... + + def displayhook(self, obj: object) -> None: ... + def handle_command_def(self, line: str) -> bool: ... + def defaultFile(self) -> str: ... + def lineinfo(self, identifier: str) -> tuple[None, None, None] | tuple[str, str, int]: ... + if sys.version_info >= (3, 14): + def checkline(self, filename: str, lineno: int, module_globals: _ModuleGlobals | None = None) -> int: ... + else: + def checkline(self, filename: str, lineno: int) -> int: ... + + def _getval(self, arg: str) -> object: ... + if sys.version_info >= (3, 14): + def print_stack_trace(self, count: int | None = None) -> None: ... + else: + def print_stack_trace(self) -> None: ... + + def print_stack_entry(self, frame_lineno: tuple[FrameType, int], prompt_prefix: str = "\n-> ") -> None: ... + def lookupmodule(self, filename: str) -> str | None: ... + if sys.version_info < (3, 11): + def _runscript(self, filename: str) -> None: ... + + if sys.version_info >= (3, 14): + def complete_multiline_names(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... + + if sys.version_info >= (3, 13): + def completedefault(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... + + def do_commands(self, arg: str) -> bool | None: ... + if sys.version_info >= (3, 14): + def do_break(self, arg: str, temporary: bool = False) -> bool | None: ... + else: + def do_break(self, arg: str, temporary: bool | Literal[0, 1] = 0) -> bool | None: ... + + def do_tbreak(self, arg: str) -> bool | None: ... + def do_enable(self, arg: str) -> bool | None: ... + def do_disable(self, arg: str) -> bool | None: ... + def do_condition(self, arg: str) -> bool | None: ... + def do_ignore(self, arg: str) -> bool | None: ... + def do_clear(self, arg: str) -> bool | None: ... + def do_where(self, arg: str) -> bool | None: ... + if sys.version_info >= (3, 13): + def do_exceptions(self, arg: str) -> bool | None: ... + + def do_up(self, arg: str) -> bool | None: ... + def do_down(self, arg: str) -> bool | None: ... + def do_until(self, arg: str) -> bool | None: ... + def do_step(self, arg: str) -> bool | None: ... + def do_next(self, arg: str) -> bool | None: ... + def do_run(self, arg: str) -> bool | None: ... + def do_return(self, arg: str) -> bool | None: ... + def do_continue(self, arg: str) -> bool | None: ... + def do_jump(self, arg: str) -> bool | None: ... + def do_debug(self, arg: str) -> bool | None: ... + def do_quit(self, arg: str) -> bool | None: ... + def do_EOF(self, arg: str) -> bool | None: ... + def do_args(self, arg: str) -> bool | None: ... + def do_retval(self, arg: str) -> bool | None: ... + def do_p(self, arg: str) -> bool | None: ... + def do_pp(self, arg: str) -> bool | None: ... + def do_list(self, arg: str) -> bool | None: ... + def do_whatis(self, arg: str) -> bool | None: ... + def do_alias(self, arg: str) -> bool | None: ... + def do_unalias(self, arg: str) -> bool | None: ... + def do_help(self, arg: str) -> bool | None: ... + do_b = do_break + do_cl = do_clear + do_w = do_where + do_bt = do_where + do_u = do_up + do_d = do_down + do_unt = do_until + do_s = do_step + do_n = do_next + do_restart = do_run + do_r = do_return + do_c = do_continue + do_cont = do_continue + do_j = do_jump + do_q = do_quit + do_exit = do_quit + do_a = do_args + do_rv = do_retval + do_l = do_list + do_h = do_help + def help_exec(self) -> None: ... + def help_pdb(self) -> None: ... + def sigint_handler(self, signum: signal.Signals, frame: FrameType) -> None: ... + if sys.version_info >= (3, 13): + def message(self, msg: str, end: str = "\n") -> None: ... + else: + def message(self, msg: str) -> None: ... + + def error(self, msg: str) -> None: ... + if sys.version_info >= (3, 13): + def completenames(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... # type: ignore[override] + if sys.version_info >= (3, 12): + def set_convenience_variable(self, frame: FrameType, name: str, value: Any) -> None: ... + if sys.version_info >= (3, 13) and sys.version_info < (3, 14): + # Added in 3.13.8. + @property + def rlcompleter(self) -> type[Completer]: ... + + def _select_frame(self, number: int) -> None: ... + def _getval_except(self, arg: str, frame: FrameType | None = None) -> object: ... + def _print_lines( + self, lines: Sequence[str], start: int, breaks: Sequence[int] = (), frame: FrameType | None = None + ) -> None: ... + def _cmdloop(self) -> None: ... + def do_display(self, arg: str) -> bool | None: ... + def do_interact(self, arg: str) -> bool | None: ... + def do_longlist(self, arg: str) -> bool | None: ... + def do_source(self, arg: str) -> bool | None: ... + def do_undisplay(self, arg: str) -> bool | None: ... + do_ll = do_longlist + def _complete_location(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... + def _complete_bpnumber(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... + def _complete_expression(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... + def complete_undisplay(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... + def complete_unalias(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... + complete_commands = _complete_bpnumber + complete_break = _complete_location + complete_b = _complete_location + complete_tbreak = _complete_location + complete_enable = _complete_bpnumber + complete_disable = _complete_bpnumber + complete_condition = _complete_bpnumber + complete_ignore = _complete_bpnumber + complete_clear = _complete_location + complete_cl = _complete_location + complete_debug = _complete_expression + complete_print = _complete_expression + complete_p = _complete_expression + complete_pp = _complete_expression + complete_source = _complete_expression + complete_whatis = _complete_expression + complete_display = _complete_expression + + if sys.version_info < (3, 11): + def _runmodule(self, module_name: str) -> None: ... + +# undocumented + +def find_function(funcname: str, filename: str) -> tuple[str, str, int] | None: ... +def main() -> None: ... +def help() -> None: ... + +if sys.version_info < (3, 10): + def getsourcelines(obj: _SourceObjectType) -> tuple[list[str], int]: ... + +def lasti2lineno(code: CodeType, lasti: int) -> int: ... + +class _rstr(str): + def __repr__(self) -> Self: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pickle.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pickle.pyi new file mode 100644 index 0000000..d94fe20 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pickle.pyi @@ -0,0 +1,233 @@ +from _pickle import ( + PickleError as PickleError, + Pickler as Pickler, + PicklingError as PicklingError, + Unpickler as Unpickler, + UnpicklingError as UnpicklingError, + _BufferCallback, + _ReadableFileobj, + _ReducedType, + dump as dump, + dumps as dumps, + load as load, + loads as loads, +) +from _typeshed import ReadableBuffer, SupportsWrite +from collections.abc import Callable, Iterable, Mapping +from typing import Any, ClassVar, Final, SupportsBytes, SupportsIndex, final +from typing_extensions import Self + +__all__ = [ + "PickleBuffer", + "PickleError", + "PicklingError", + "UnpicklingError", + "Pickler", + "Unpickler", + "dump", + "dumps", + "load", + "loads", + "ADDITEMS", + "APPEND", + "APPENDS", + "BINBYTES", + "BINBYTES8", + "BINFLOAT", + "BINGET", + "BININT", + "BININT1", + "BININT2", + "BINPERSID", + "BINPUT", + "BINSTRING", + "BINUNICODE", + "BINUNICODE8", + "BUILD", + "BYTEARRAY8", + "DEFAULT_PROTOCOL", + "DICT", + "DUP", + "EMPTY_DICT", + "EMPTY_LIST", + "EMPTY_SET", + "EMPTY_TUPLE", + "EXT1", + "EXT2", + "EXT4", + "FALSE", + "FLOAT", + "FRAME", + "FROZENSET", + "GET", + "GLOBAL", + "HIGHEST_PROTOCOL", + "INST", + "INT", + "LIST", + "LONG", + "LONG1", + "LONG4", + "LONG_BINGET", + "LONG_BINPUT", + "MARK", + "MEMOIZE", + "NEWFALSE", + "NEWOBJ", + "NEWOBJ_EX", + "NEWTRUE", + "NEXT_BUFFER", + "NONE", + "OBJ", + "PERSID", + "POP", + "POP_MARK", + "PROTO", + "PUT", + "READONLY_BUFFER", + "REDUCE", + "SETITEM", + "SETITEMS", + "SHORT_BINBYTES", + "SHORT_BINSTRING", + "SHORT_BINUNICODE", + "STACK_GLOBAL", + "STOP", + "STRING", + "TRUE", + "TUPLE", + "TUPLE1", + "TUPLE2", + "TUPLE3", + "UNICODE", +] + +HIGHEST_PROTOCOL: Final = 5 +DEFAULT_PROTOCOL: Final = 5 + +bytes_types: tuple[type[Any], ...] # undocumented + +@final +class PickleBuffer: + def __new__(cls, buffer: ReadableBuffer) -> Self: ... + def raw(self) -> memoryview: ... + def release(self) -> None: ... + def __buffer__(self, flags: int, /) -> memoryview: ... + def __release_buffer__(self, buffer: memoryview, /) -> None: ... + +MARK: Final = b"(" +STOP: Final = b"." +POP: Final = b"0" +POP_MARK: Final = b"1" +DUP: Final = b"2" +FLOAT: Final = b"F" +INT: Final = b"I" +BININT: Final = b"J" +BININT1: Final = b"K" +LONG: Final = b"L" +BININT2: Final = b"M" +NONE: Final = b"N" +PERSID: Final = b"P" +BINPERSID: Final = b"Q" +REDUCE: Final = b"R" +STRING: Final = b"S" +BINSTRING: Final = b"T" +SHORT_BINSTRING: Final = b"U" +UNICODE: Final = b"V" +BINUNICODE: Final = b"X" +APPEND: Final = b"a" +BUILD: Final = b"b" +GLOBAL: Final = b"c" +DICT: Final = b"d" +EMPTY_DICT: Final = b"}" +APPENDS: Final = b"e" +GET: Final = b"g" +BINGET: Final = b"h" +INST: Final = b"i" +LONG_BINGET: Final = b"j" +LIST: Final = b"l" +EMPTY_LIST: Final = b"]" +OBJ: Final = b"o" +PUT: Final = b"p" +BINPUT: Final = b"q" +LONG_BINPUT: Final = b"r" +SETITEM: Final = b"s" +TUPLE: Final = b"t" +EMPTY_TUPLE: Final = b")" +SETITEMS: Final = b"u" +BINFLOAT: Final = b"G" + +TRUE: Final = b"I01\n" +FALSE: Final = b"I00\n" + +# protocol 2 +PROTO: Final = b"\x80" +NEWOBJ: Final = b"\x81" +EXT1: Final = b"\x82" +EXT2: Final = b"\x83" +EXT4: Final = b"\x84" +TUPLE1: Final = b"\x85" +TUPLE2: Final = b"\x86" +TUPLE3: Final = b"\x87" +NEWTRUE: Final = b"\x88" +NEWFALSE: Final = b"\x89" +LONG1: Final = b"\x8a" +LONG4: Final = b"\x8b" + +# protocol 3 +BINBYTES: Final = b"B" +SHORT_BINBYTES: Final = b"C" + +# protocol 4 +SHORT_BINUNICODE: Final = b"\x8c" +BINUNICODE8: Final = b"\x8d" +BINBYTES8: Final = b"\x8e" +EMPTY_SET: Final = b"\x8f" +ADDITEMS: Final = b"\x90" +FROZENSET: Final = b"\x91" +NEWOBJ_EX: Final = b"\x92" +STACK_GLOBAL: Final = b"\x93" +MEMOIZE: Final = b"\x94" +FRAME: Final = b"\x95" + +# protocol 5 +BYTEARRAY8: Final = b"\x96" +NEXT_BUFFER: Final = b"\x97" +READONLY_BUFFER: Final = b"\x98" + +def encode_long(x: int) -> bytes: ... # undocumented +def decode_long(data: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer) -> int: ... # undocumented + +# undocumented pure-Python implementations +class _Pickler: + fast: bool + dispatch_table: Mapping[type, Callable[[Any], _ReducedType]] + bin: bool # undocumented + dispatch: ClassVar[dict[type, Callable[[Unpickler, Any], None]]] # undocumented, _Pickler only + reducer_override: Callable[[Any], Any] + def __init__( + self, + file: SupportsWrite[bytes], + protocol: int | None = None, + *, + fix_imports: bool = True, + buffer_callback: _BufferCallback = None, + ) -> None: ... + def dump(self, obj: Any) -> None: ... + def clear_memo(self) -> None: ... + def persistent_id(self, obj: Any) -> Any: ... + +class _Unpickler: + dispatch: ClassVar[dict[int, Callable[[Unpickler], None]]] # undocumented, _Unpickler only + def __init__( + self, + file: _ReadableFileobj, + *, + fix_imports: bool = True, + encoding: str = "ASCII", + errors: str = "strict", + buffers: Iterable[Any] | None = None, + ) -> None: ... + def load(self) -> Any: ... + def find_class(self, module: str, name: str) -> Any: ... + def persistent_load(self, pid: Any) -> Any: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pickletools.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pickletools.pyi new file mode 100644 index 0000000..8bbfaba --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pickletools.pyi @@ -0,0 +1,177 @@ +import sys +from collections.abc import Callable, Iterator, MutableMapping +from typing import IO, Any, Final +from typing_extensions import TypeAlias + +__all__ = ["dis", "genops", "optimize"] + +_Reader: TypeAlias = Callable[[IO[bytes]], Any] +bytes_types: tuple[type[Any], ...] + +UP_TO_NEWLINE: Final = -1 +TAKEN_FROM_ARGUMENT1: Final = -2 +TAKEN_FROM_ARGUMENT4: Final = -3 +TAKEN_FROM_ARGUMENT4U: Final = -4 +TAKEN_FROM_ARGUMENT8U: Final = -5 + +class ArgumentDescriptor: + __slots__ = ("name", "n", "reader", "doc") + name: str + n: int + reader: _Reader + doc: str + def __init__(self, name: str, n: int, reader: _Reader, doc: str) -> None: ... + +def read_uint1(f: IO[bytes]) -> int: ... + +uint1: ArgumentDescriptor + +def read_uint2(f: IO[bytes]) -> int: ... + +uint2: ArgumentDescriptor + +def read_int4(f: IO[bytes]) -> int: ... + +int4: ArgumentDescriptor + +def read_uint4(f: IO[bytes]) -> int: ... + +uint4: ArgumentDescriptor + +def read_uint8(f: IO[bytes]) -> int: ... + +uint8: ArgumentDescriptor + +if sys.version_info >= (3, 12): + def read_stringnl( + f: IO[bytes], decode: bool = True, stripquotes: bool = True, *, encoding: str = "latin-1" + ) -> bytes | str: ... + +else: + def read_stringnl(f: IO[bytes], decode: bool = True, stripquotes: bool = True) -> bytes | str: ... + +stringnl: ArgumentDescriptor + +def read_stringnl_noescape(f: IO[bytes]) -> str: ... + +stringnl_noescape: ArgumentDescriptor + +def read_stringnl_noescape_pair(f: IO[bytes]) -> str: ... + +stringnl_noescape_pair: ArgumentDescriptor + +def read_string1(f: IO[bytes]) -> str: ... + +string1: ArgumentDescriptor + +def read_string4(f: IO[bytes]) -> str: ... + +string4: ArgumentDescriptor + +def read_bytes1(f: IO[bytes]) -> bytes: ... + +bytes1: ArgumentDescriptor + +def read_bytes4(f: IO[bytes]) -> bytes: ... + +bytes4: ArgumentDescriptor + +def read_bytes8(f: IO[bytes]) -> bytes: ... + +bytes8: ArgumentDescriptor + +def read_unicodestringnl(f: IO[bytes]) -> str: ... + +unicodestringnl: ArgumentDescriptor + +def read_unicodestring1(f: IO[bytes]) -> str: ... + +unicodestring1: ArgumentDescriptor + +def read_unicodestring4(f: IO[bytes]) -> str: ... + +unicodestring4: ArgumentDescriptor + +def read_unicodestring8(f: IO[bytes]) -> str: ... + +unicodestring8: ArgumentDescriptor + +def read_decimalnl_short(f: IO[bytes]) -> int: ... +def read_decimalnl_long(f: IO[bytes]) -> int: ... + +decimalnl_short: ArgumentDescriptor +decimalnl_long: ArgumentDescriptor + +def read_floatnl(f: IO[bytes]) -> float: ... + +floatnl: ArgumentDescriptor + +def read_float8(f: IO[bytes]) -> float: ... + +float8: ArgumentDescriptor + +def read_long1(f: IO[bytes]) -> int: ... + +long1: ArgumentDescriptor + +def read_long4(f: IO[bytes]) -> int: ... + +long4: ArgumentDescriptor + +class StackObject: + __slots__ = ("name", "obtype", "doc") + name: str + obtype: type[Any] | tuple[type[Any], ...] + doc: str + def __init__(self, name: str, obtype: type[Any] | tuple[type[Any], ...], doc: str) -> None: ... + +pyint: StackObject +pylong: StackObject +pyinteger_or_bool: StackObject +pybool: StackObject +pyfloat: StackObject +pybytes_or_str: StackObject +pystring: StackObject +pybytes: StackObject +pyunicode: StackObject +pynone: StackObject +pytuple: StackObject +pylist: StackObject +pydict: StackObject +pyset: StackObject +pyfrozenset: StackObject +anyobject: StackObject +markobject: StackObject +stackslice: StackObject + +class OpcodeInfo: + __slots__ = ("name", "code", "arg", "stack_before", "stack_after", "proto", "doc") + name: str + code: str + arg: ArgumentDescriptor | None + stack_before: list[StackObject] + stack_after: list[StackObject] + proto: int + doc: str + def __init__( + self, + name: str, + code: str, + arg: ArgumentDescriptor | None, + stack_before: list[StackObject], + stack_after: list[StackObject], + proto: int, + doc: str, + ) -> None: ... + +opcodes: list[OpcodeInfo] + +def genops(pickle: bytes | bytearray | IO[bytes]) -> Iterator[tuple[OpcodeInfo, Any | None, int | None]]: ... +def optimize(p: bytes | bytearray | IO[bytes]) -> bytes: ... +def dis( + pickle: bytes | bytearray | IO[bytes], + out: IO[str] | None = None, + memo: MutableMapping[int, Any] | None = None, + indentlevel: int = 4, + annotate: int = 0, +) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pipes.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pipes.pyi new file mode 100644 index 0000000..fe680bf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pipes.pyi @@ -0,0 +1,16 @@ +import os + +__all__ = ["Template"] + +class Template: + def reset(self) -> None: ... + def clone(self) -> Template: ... + def debug(self, flag: bool) -> None: ... + def append(self, cmd: str, kind: str) -> None: ... + def prepend(self, cmd: str, kind: str) -> None: ... + def open(self, file: str, rw: str) -> os._wrap_close: ... + def copy(self, infile: str, outfile: str) -> int: ... + +# Not documented, but widely used. +# Documented as shlex.quote since 3.3. +def quote(s: str) -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pkgutil.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pkgutil.pyi new file mode 100644 index 0000000..7c70dcc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pkgutil.pyi @@ -0,0 +1,59 @@ +import sys +from _typeshed import StrOrBytesPath, SupportsRead +from _typeshed.importlib import LoaderProtocol, MetaPathFinderProtocol, PathEntryFinderProtocol +from collections.abc import Callable, Iterable, Iterator +from typing import IO, Any, NamedTuple, TypeVar +from typing_extensions import deprecated + +__all__ = [ + "get_importer", + "iter_importers", + "walk_packages", + "iter_modules", + "get_data", + "read_code", + "extend_path", + "ModuleInfo", +] +if sys.version_info < (3, 14): + __all__ += ["get_loader", "find_loader"] +if sys.version_info < (3, 12): + __all__ += ["ImpImporter", "ImpLoader"] + +_PathT = TypeVar("_PathT", bound=Iterable[str]) + +class ModuleInfo(NamedTuple): + module_finder: MetaPathFinderProtocol | PathEntryFinderProtocol + name: str + ispkg: bool + +def extend_path(path: _PathT, name: str) -> _PathT: ... + +if sys.version_info < (3, 12): + @deprecated("Deprecated since Python 3.3; removed in Python 3.12. Use the `importlib` module instead.") + class ImpImporter: + def __init__(self, path: StrOrBytesPath | None = None) -> None: ... + + @deprecated("Deprecated since Python 3.3; removed in Python 3.12. Use the `importlib` module instead.") + class ImpLoader: + def __init__(self, fullname: str, file: IO[str], filename: StrOrBytesPath, etc: tuple[str, str, int]) -> None: ... + +if sys.version_info < (3, 14): + if sys.version_info >= (3, 12): + @deprecated("Deprecated since Python 3.12; removed in Python 3.14. Use `importlib.util.find_spec()` instead.") + def find_loader(fullname: str) -> LoaderProtocol | None: ... + @deprecated("Deprecated since Python 3.12; removed in Python 3.14. Use `importlib.util.find_spec()` instead.") + def get_loader(module_or_name: str) -> LoaderProtocol | None: ... + else: + def find_loader(fullname: str) -> LoaderProtocol | None: ... + def get_loader(module_or_name: str) -> LoaderProtocol | None: ... + +def get_importer(path_item: StrOrBytesPath) -> PathEntryFinderProtocol | None: ... +def iter_importers(fullname: str = "") -> Iterator[MetaPathFinderProtocol | PathEntryFinderProtocol]: ... +def iter_modules(path: Iterable[StrOrBytesPath] | None = None, prefix: str = "") -> Iterator[ModuleInfo]: ... +def read_code(stream: SupportsRead[bytes]) -> Any: ... # undocumented +def walk_packages( + path: Iterable[StrOrBytesPath] | None = None, prefix: str = "", onerror: Callable[[str], object] | None = None +) -> Iterator[ModuleInfo]: ... +def get_data(package: str, resource: str) -> bytes | None: ... +def resolve_name(name: str) -> Any: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/platform.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/platform.pyi new file mode 100644 index 0000000..69d702b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/platform.pyi @@ -0,0 +1,112 @@ +import sys +from typing import NamedTuple, type_check_only +from typing_extensions import Self, deprecated, disjoint_base + +def libc_ver(executable: str | None = None, lib: str = "", version: str = "", chunksize: int = 16384) -> tuple[str, str]: ... +def win32_ver(release: str = "", version: str = "", csd: str = "", ptype: str = "") -> tuple[str, str, str, str]: ... +def win32_edition() -> str: ... +def win32_is_iot() -> bool: ... +def mac_ver( + release: str = "", versioninfo: tuple[str, str, str] = ("", "", ""), machine: str = "" +) -> tuple[str, tuple[str, str, str], str]: ... + +if sys.version_info >= (3, 13): + @deprecated("Deprecated since Python 3.13; will be removed in Python 3.15.") + def java_ver( + release: str = "", + vendor: str = "", + vminfo: tuple[str, str, str] = ("", "", ""), + osinfo: tuple[str, str, str] = ("", "", ""), + ) -> tuple[str, str, tuple[str, str, str], tuple[str, str, str]]: ... + +else: + def java_ver( + release: str = "", + vendor: str = "", + vminfo: tuple[str, str, str] = ("", "", ""), + osinfo: tuple[str, str, str] = ("", "", ""), + ) -> tuple[str, str, tuple[str, str, str], tuple[str, str, str]]: ... + +def system_alias(system: str, release: str, version: str) -> tuple[str, str, str]: ... +def architecture(executable: str = sys.executable, bits: str = "", linkage: str = "") -> tuple[str, str]: ... + +# This class is not exposed. It calls itself platform.uname_result_base. +# At runtime it only has 5 fields. +@type_check_only +class _uname_result_base(NamedTuple): + system: str + node: str + release: str + version: str + machine: str + # This base class doesn't have this field at runtime, but claiming it + # does is the least bad way to handle the situation. Nobody really + # sees this class anyway. See #13068 + processor: str + +# uname_result emulates a 6-field named tuple, but the processor field +# is lazily evaluated rather than being passed in to the constructor. +if sys.version_info >= (3, 12): + class uname_result(_uname_result_base): + __match_args__ = ("system", "node", "release", "version", "machine") # pyright: ignore[reportAssignmentType] + + def __new__(_cls, system: str, node: str, release: str, version: str, machine: str) -> Self: ... + @property + def processor(self) -> str: ... + +else: + @disjoint_base + class uname_result(_uname_result_base): + if sys.version_info >= (3, 10): + __match_args__ = ("system", "node", "release", "version", "machine") # pyright: ignore[reportAssignmentType] + + def __new__(_cls, system: str, node: str, release: str, version: str, machine: str) -> Self: ... + @property + def processor(self) -> str: ... + +def uname() -> uname_result: ... +def system() -> str: ... +def node() -> str: ... +def release() -> str: ... +def version() -> str: ... +def machine() -> str: ... +def processor() -> str: ... +def python_implementation() -> str: ... +def python_version() -> str: ... +def python_version_tuple() -> tuple[str, str, str]: ... +def python_branch() -> str: ... +def python_revision() -> str: ... +def python_build() -> tuple[str, str]: ... +def python_compiler() -> str: ... +def platform(aliased: bool = False, terse: bool = False) -> str: ... + +if sys.version_info >= (3, 10): + def freedesktop_os_release() -> dict[str, str]: ... + +if sys.version_info >= (3, 13): + class AndroidVer(NamedTuple): + release: str + api_level: int + manufacturer: str + model: str + device: str + is_emulator: bool + + class IOSVersionInfo(NamedTuple): + system: str + release: str + model: str + is_simulator: bool + + def android_ver( + release: str = "", + api_level: int = 0, + manufacturer: str = "", + model: str = "", + device: str = "", + is_emulator: bool = False, + ) -> AndroidVer: ... + def ios_ver(system: str = "", release: str = "", model: str = "", is_simulator: bool = False) -> IOSVersionInfo: ... + +if sys.version_info >= (3, 14): + def invalidate_caches() -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/plistlib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/plistlib.pyi new file mode 100644 index 0000000..dc3247e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/plistlib.pyi @@ -0,0 +1,84 @@ +import sys +from _typeshed import ReadableBuffer +from collections.abc import Mapping, MutableMapping +from datetime import datetime +from enum import Enum +from typing import IO, Any, Final +from typing_extensions import Self + +__all__ = ["InvalidFileException", "FMT_XML", "FMT_BINARY", "load", "dump", "loads", "dumps", "UID"] + +class PlistFormat(Enum): + FMT_XML = 1 + FMT_BINARY = 2 + +FMT_XML: Final = PlistFormat.FMT_XML +FMT_BINARY: Final = PlistFormat.FMT_BINARY +if sys.version_info >= (3, 13): + def load( + fp: IO[bytes], + *, + fmt: PlistFormat | None = None, + dict_type: type[MutableMapping[str, Any]] = ..., + aware_datetime: bool = False, + ) -> Any: ... + def loads( + value: ReadableBuffer | str, + *, + fmt: PlistFormat | None = None, + dict_type: type[MutableMapping[str, Any]] = ..., + aware_datetime: bool = False, + ) -> Any: ... + +else: + def load(fp: IO[bytes], *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ...) -> Any: ... + def loads( + value: ReadableBuffer, *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ... + ) -> Any: ... + +if sys.version_info >= (3, 13): + def dump( + value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, + fp: IO[bytes], + *, + fmt: PlistFormat = ..., + sort_keys: bool = True, + skipkeys: bool = False, + aware_datetime: bool = False, + ) -> None: ... + def dumps( + value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, + *, + fmt: PlistFormat = ..., + skipkeys: bool = False, + sort_keys: bool = True, + aware_datetime: bool = False, + ) -> bytes: ... + +else: + def dump( + value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, + fp: IO[bytes], + *, + fmt: PlistFormat = ..., + sort_keys: bool = True, + skipkeys: bool = False, + ) -> None: ... + def dumps( + value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, + *, + fmt: PlistFormat = ..., + skipkeys: bool = False, + sort_keys: bool = True, + ) -> bytes: ... + +class UID: + data: int + def __init__(self, data: int) -> None: ... + def __index__(self) -> int: ... + def __reduce__(self) -> tuple[type[Self], tuple[int]]: ... + def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + +class InvalidFileException(ValueError): + def __init__(self, message: str = "Invalid file") -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/poplib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/poplib.pyi new file mode 100644 index 0000000..9ff2b76 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/poplib.pyi @@ -0,0 +1,72 @@ +import socket +import ssl +import sys +from builtins import list as _list # conflicts with a method named "list" +from re import Pattern +from typing import Any, BinaryIO, Final, NoReturn, overload +from typing_extensions import TypeAlias + +__all__ = ["POP3", "error_proto", "POP3_SSL"] + +_LongResp: TypeAlias = tuple[bytes, list[bytes], int] + +class error_proto(Exception): ... + +POP3_PORT: Final = 110 +POP3_SSL_PORT: Final = 995 +CR: Final = b"\r" +LF: Final = b"\n" +CRLF: Final = b"\r\n" +HAVE_SSL: Final[bool] + +class POP3: + encoding: str + host: str + port: int + sock: socket.socket + file: BinaryIO + welcome: bytes + def __init__(self, host: str, port: int = 110, timeout: float = ...) -> None: ... + def getwelcome(self) -> bytes: ... + def set_debuglevel(self, level: int) -> None: ... + def user(self, user: str) -> bytes: ... + def pass_(self, pswd: str) -> bytes: ... + def stat(self) -> tuple[int, int]: ... + def list(self, which: Any | None = None) -> _LongResp: ... + def retr(self, which: Any) -> _LongResp: ... + def dele(self, which: Any) -> bytes: ... + def noop(self) -> bytes: ... + def rset(self) -> bytes: ... + def quit(self) -> bytes: ... + def close(self) -> None: ... + def rpop(self, user: str) -> bytes: ... + timestamp: Pattern[str] + def apop(self, user: str, password: str) -> bytes: ... + def top(self, which: Any, howmuch: int) -> _LongResp: ... + @overload + def uidl(self) -> _LongResp: ... + @overload + def uidl(self, which: Any) -> bytes: ... + def utf8(self) -> bytes: ... + def capa(self) -> dict[str, _list[str]]: ... + def stls(self, context: ssl.SSLContext | None = None) -> bytes: ... + +class POP3_SSL(POP3): + if sys.version_info >= (3, 12): + def __init__( + self, host: str, port: int = 995, *, timeout: float = ..., context: ssl.SSLContext | None = None + ) -> None: ... + def stls(self, context: Any = None) -> NoReturn: ... + else: + def __init__( + self, + host: str, + port: int = 995, + keyfile: str | None = None, + certfile: str | None = None, + timeout: float = ..., + context: ssl.SSLContext | None = None, + ) -> None: ... + # "context" is actually the last argument, + # but that breaks LSP and it doesn't really matter because all the arguments are ignored + def stls(self, context: Any = None, keyfile: Any = None, certfile: Any = None) -> NoReturn: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/posix.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/posix.pyi new file mode 100644 index 0000000..6d0d76a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/posix.pyi @@ -0,0 +1,405 @@ +import sys + +if sys.platform != "win32": + # Actually defined here, but defining in os allows sharing code with windows + from os import ( + CLD_CONTINUED as CLD_CONTINUED, + CLD_DUMPED as CLD_DUMPED, + CLD_EXITED as CLD_EXITED, + CLD_KILLED as CLD_KILLED, + CLD_STOPPED as CLD_STOPPED, + CLD_TRAPPED as CLD_TRAPPED, + EX_CANTCREAT as EX_CANTCREAT, + EX_CONFIG as EX_CONFIG, + EX_DATAERR as EX_DATAERR, + EX_IOERR as EX_IOERR, + EX_NOHOST as EX_NOHOST, + EX_NOINPUT as EX_NOINPUT, + EX_NOPERM as EX_NOPERM, + EX_NOUSER as EX_NOUSER, + EX_OK as EX_OK, + EX_OSERR as EX_OSERR, + EX_OSFILE as EX_OSFILE, + EX_PROTOCOL as EX_PROTOCOL, + EX_SOFTWARE as EX_SOFTWARE, + EX_TEMPFAIL as EX_TEMPFAIL, + EX_UNAVAILABLE as EX_UNAVAILABLE, + EX_USAGE as EX_USAGE, + F_LOCK as F_LOCK, + F_OK as F_OK, + F_TEST as F_TEST, + F_TLOCK as F_TLOCK, + F_ULOCK as F_ULOCK, + NGROUPS_MAX as NGROUPS_MAX, + O_ACCMODE as O_ACCMODE, + O_APPEND as O_APPEND, + O_ASYNC as O_ASYNC, + O_CLOEXEC as O_CLOEXEC, + O_CREAT as O_CREAT, + O_DIRECTORY as O_DIRECTORY, + O_DSYNC as O_DSYNC, + O_EXCL as O_EXCL, + O_NDELAY as O_NDELAY, + O_NOCTTY as O_NOCTTY, + O_NOFOLLOW as O_NOFOLLOW, + O_NONBLOCK as O_NONBLOCK, + O_RDONLY as O_RDONLY, + O_RDWR as O_RDWR, + O_SYNC as O_SYNC, + O_TRUNC as O_TRUNC, + O_WRONLY as O_WRONLY, + P_ALL as P_ALL, + P_PGID as P_PGID, + P_PID as P_PID, + POSIX_SPAWN_CLOSE as POSIX_SPAWN_CLOSE, + POSIX_SPAWN_DUP2 as POSIX_SPAWN_DUP2, + POSIX_SPAWN_OPEN as POSIX_SPAWN_OPEN, + PRIO_PGRP as PRIO_PGRP, + PRIO_PROCESS as PRIO_PROCESS, + PRIO_USER as PRIO_USER, + R_OK as R_OK, + RTLD_GLOBAL as RTLD_GLOBAL, + RTLD_LAZY as RTLD_LAZY, + RTLD_LOCAL as RTLD_LOCAL, + RTLD_NODELETE as RTLD_NODELETE, + RTLD_NOLOAD as RTLD_NOLOAD, + RTLD_NOW as RTLD_NOW, + SCHED_FIFO as SCHED_FIFO, + SCHED_OTHER as SCHED_OTHER, + SCHED_RR as SCHED_RR, + SEEK_DATA as SEEK_DATA, + SEEK_HOLE as SEEK_HOLE, + ST_NOSUID as ST_NOSUID, + ST_RDONLY as ST_RDONLY, + TMP_MAX as TMP_MAX, + W_OK as W_OK, + WCONTINUED as WCONTINUED, + WCOREDUMP as WCOREDUMP, + WEXITED as WEXITED, + WEXITSTATUS as WEXITSTATUS, + WIFCONTINUED as WIFCONTINUED, + WIFEXITED as WIFEXITED, + WIFSIGNALED as WIFSIGNALED, + WIFSTOPPED as WIFSTOPPED, + WNOHANG as WNOHANG, + WNOWAIT as WNOWAIT, + WSTOPPED as WSTOPPED, + WSTOPSIG as WSTOPSIG, + WTERMSIG as WTERMSIG, + WUNTRACED as WUNTRACED, + X_OK as X_OK, + DirEntry as DirEntry, + _exit as _exit, + abort as abort, + access as access, + chdir as chdir, + chmod as chmod, + chown as chown, + chroot as chroot, + close as close, + closerange as closerange, + confstr as confstr, + confstr_names as confstr_names, + cpu_count as cpu_count, + ctermid as ctermid, + device_encoding as device_encoding, + dup as dup, + dup2 as dup2, + error as error, + execv as execv, + execve as execve, + fchdir as fchdir, + fchmod as fchmod, + fchown as fchown, + fork as fork, + forkpty as forkpty, + fpathconf as fpathconf, + fspath as fspath, + fstat as fstat, + fstatvfs as fstatvfs, + fsync as fsync, + ftruncate as ftruncate, + get_blocking as get_blocking, + get_inheritable as get_inheritable, + get_terminal_size as get_terminal_size, + getcwd as getcwd, + getcwdb as getcwdb, + getegid as getegid, + geteuid as geteuid, + getgid as getgid, + getgrouplist as getgrouplist, + getgroups as getgroups, + getloadavg as getloadavg, + getlogin as getlogin, + getpgid as getpgid, + getpgrp as getpgrp, + getpid as getpid, + getppid as getppid, + getpriority as getpriority, + getsid as getsid, + getuid as getuid, + initgroups as initgroups, + isatty as isatty, + kill as kill, + killpg as killpg, + lchown as lchown, + link as link, + listdir as listdir, + lockf as lockf, + lseek as lseek, + lstat as lstat, + major as major, + makedev as makedev, + minor as minor, + mkdir as mkdir, + mkfifo as mkfifo, + mknod as mknod, + nice as nice, + open as open, + openpty as openpty, + pathconf as pathconf, + pathconf_names as pathconf_names, + pipe as pipe, + posix_spawn as posix_spawn, + posix_spawnp as posix_spawnp, + pread as pread, + preadv as preadv, + putenv as putenv, + pwrite as pwrite, + pwritev as pwritev, + read as read, + readlink as readlink, + readv as readv, + register_at_fork as register_at_fork, + remove as remove, + rename as rename, + replace as replace, + rmdir as rmdir, + scandir as scandir, + sched_get_priority_max as sched_get_priority_max, + sched_get_priority_min as sched_get_priority_min, + sched_param as sched_param, + sched_yield as sched_yield, + sendfile as sendfile, + set_blocking as set_blocking, + set_inheritable as set_inheritable, + setegid as setegid, + seteuid as seteuid, + setgid as setgid, + setgroups as setgroups, + setpgid as setpgid, + setpgrp as setpgrp, + setpriority as setpriority, + setregid as setregid, + setreuid as setreuid, + setsid as setsid, + setuid as setuid, + stat as stat, + stat_result as stat_result, + statvfs as statvfs, + statvfs_result as statvfs_result, + strerror as strerror, + symlink as symlink, + sync as sync, + sysconf as sysconf, + sysconf_names as sysconf_names, + system as system, + tcgetpgrp as tcgetpgrp, + tcsetpgrp as tcsetpgrp, + terminal_size as terminal_size, + times as times, + times_result as times_result, + truncate as truncate, + ttyname as ttyname, + umask as umask, + uname as uname, + uname_result as uname_result, + unlink as unlink, + unsetenv as unsetenv, + urandom as urandom, + utime as utime, + wait as wait, + wait3 as wait3, + wait4 as wait4, + waitpid as waitpid, + waitstatus_to_exitcode as waitstatus_to_exitcode, + write as write, + writev as writev, + ) + + if sys.version_info >= (3, 10): + from os import O_FSYNC as O_FSYNC + + if sys.version_info >= (3, 11): + from os import login_tty as login_tty + + if sys.version_info >= (3, 13): + from os import grantpt as grantpt, posix_openpt as posix_openpt, ptsname as ptsname, unlockpt as unlockpt + + if sys.version_info >= (3, 13) and sys.platform == "linux": + from os import ( + POSIX_SPAWN_CLOSEFROM as POSIX_SPAWN_CLOSEFROM, + TFD_CLOEXEC as TFD_CLOEXEC, + TFD_NONBLOCK as TFD_NONBLOCK, + TFD_TIMER_ABSTIME as TFD_TIMER_ABSTIME, + TFD_TIMER_CANCEL_ON_SET as TFD_TIMER_CANCEL_ON_SET, + timerfd_create as timerfd_create, + timerfd_gettime as timerfd_gettime, + timerfd_gettime_ns as timerfd_gettime_ns, + timerfd_settime as timerfd_settime, + timerfd_settime_ns as timerfd_settime_ns, + ) + + if sys.version_info >= (3, 14): + from os import readinto as readinto + + if sys.version_info >= (3, 14) and sys.platform == "linux": + from os import SCHED_DEADLINE as SCHED_DEADLINE, SCHED_NORMAL as SCHED_NORMAL + + if sys.platform != "linux": + from os import O_EXLOCK as O_EXLOCK, O_SHLOCK as O_SHLOCK, chflags as chflags, lchflags as lchflags, lchmod as lchmod + + if sys.platform != "linux" and sys.platform != "darwin": + from os import EX_NOTFOUND as EX_NOTFOUND, SCHED_SPORADIC as SCHED_SPORADIC + + if sys.platform != "linux" and sys.version_info >= (3, 13): + from os import O_EXEC as O_EXEC, O_SEARCH as O_SEARCH + + if sys.platform != "darwin": + from os import ( + POSIX_FADV_DONTNEED as POSIX_FADV_DONTNEED, + POSIX_FADV_NOREUSE as POSIX_FADV_NOREUSE, + POSIX_FADV_NORMAL as POSIX_FADV_NORMAL, + POSIX_FADV_RANDOM as POSIX_FADV_RANDOM, + POSIX_FADV_SEQUENTIAL as POSIX_FADV_SEQUENTIAL, + POSIX_FADV_WILLNEED as POSIX_FADV_WILLNEED, + RWF_DSYNC as RWF_DSYNC, + RWF_HIPRI as RWF_HIPRI, + RWF_NOWAIT as RWF_NOWAIT, + RWF_SYNC as RWF_SYNC, + ST_APPEND as ST_APPEND, + ST_MANDLOCK as ST_MANDLOCK, + ST_NOATIME as ST_NOATIME, + ST_NODEV as ST_NODEV, + ST_NODIRATIME as ST_NODIRATIME, + ST_NOEXEC as ST_NOEXEC, + ST_RELATIME as ST_RELATIME, + ST_SYNCHRONOUS as ST_SYNCHRONOUS, + ST_WRITE as ST_WRITE, + fdatasync as fdatasync, + getresgid as getresgid, + getresuid as getresuid, + pipe2 as pipe2, + posix_fadvise as posix_fadvise, + posix_fallocate as posix_fallocate, + sched_getaffinity as sched_getaffinity, + sched_getparam as sched_getparam, + sched_getscheduler as sched_getscheduler, + sched_rr_get_interval as sched_rr_get_interval, + sched_setaffinity as sched_setaffinity, + sched_setparam as sched_setparam, + sched_setscheduler as sched_setscheduler, + setresgid as setresgid, + setresuid as setresuid, + ) + + if sys.version_info >= (3, 10): + from os import RWF_APPEND as RWF_APPEND + + if sys.platform != "darwin" or sys.version_info >= (3, 13): + from os import waitid as waitid, waitid_result as waitid_result + + if sys.platform == "linux": + from os import ( + GRND_NONBLOCK as GRND_NONBLOCK, + GRND_RANDOM as GRND_RANDOM, + MFD_ALLOW_SEALING as MFD_ALLOW_SEALING, + MFD_CLOEXEC as MFD_CLOEXEC, + MFD_HUGE_1GB as MFD_HUGE_1GB, + MFD_HUGE_1MB as MFD_HUGE_1MB, + MFD_HUGE_2GB as MFD_HUGE_2GB, + MFD_HUGE_2MB as MFD_HUGE_2MB, + MFD_HUGE_8MB as MFD_HUGE_8MB, + MFD_HUGE_16GB as MFD_HUGE_16GB, + MFD_HUGE_16MB as MFD_HUGE_16MB, + MFD_HUGE_32MB as MFD_HUGE_32MB, + MFD_HUGE_64KB as MFD_HUGE_64KB, + MFD_HUGE_256MB as MFD_HUGE_256MB, + MFD_HUGE_512KB as MFD_HUGE_512KB, + MFD_HUGE_512MB as MFD_HUGE_512MB, + MFD_HUGE_MASK as MFD_HUGE_MASK, + MFD_HUGE_SHIFT as MFD_HUGE_SHIFT, + MFD_HUGETLB as MFD_HUGETLB, + O_DIRECT as O_DIRECT, + O_LARGEFILE as O_LARGEFILE, + O_NOATIME as O_NOATIME, + O_PATH as O_PATH, + O_RSYNC as O_RSYNC, + O_TMPFILE as O_TMPFILE, + P_PIDFD as P_PIDFD, + RTLD_DEEPBIND as RTLD_DEEPBIND, + SCHED_BATCH as SCHED_BATCH, + SCHED_IDLE as SCHED_IDLE, + SCHED_RESET_ON_FORK as SCHED_RESET_ON_FORK, + XATTR_CREATE as XATTR_CREATE, + XATTR_REPLACE as XATTR_REPLACE, + XATTR_SIZE_MAX as XATTR_SIZE_MAX, + copy_file_range as copy_file_range, + getrandom as getrandom, + getxattr as getxattr, + listxattr as listxattr, + memfd_create as memfd_create, + pidfd_open as pidfd_open, + removexattr as removexattr, + setxattr as setxattr, + ) + + if sys.version_info >= (3, 10): + from os import ( + EFD_CLOEXEC as EFD_CLOEXEC, + EFD_NONBLOCK as EFD_NONBLOCK, + EFD_SEMAPHORE as EFD_SEMAPHORE, + SPLICE_F_MORE as SPLICE_F_MORE, + SPLICE_F_MOVE as SPLICE_F_MOVE, + SPLICE_F_NONBLOCK as SPLICE_F_NONBLOCK, + eventfd as eventfd, + eventfd_read as eventfd_read, + eventfd_write as eventfd_write, + splice as splice, + ) + + if sys.version_info >= (3, 12): + from os import ( + CLONE_FILES as CLONE_FILES, + CLONE_FS as CLONE_FS, + CLONE_NEWCGROUP as CLONE_NEWCGROUP, + CLONE_NEWIPC as CLONE_NEWIPC, + CLONE_NEWNET as CLONE_NEWNET, + CLONE_NEWNS as CLONE_NEWNS, + CLONE_NEWPID as CLONE_NEWPID, + CLONE_NEWTIME as CLONE_NEWTIME, + CLONE_NEWUSER as CLONE_NEWUSER, + CLONE_NEWUTS as CLONE_NEWUTS, + CLONE_SIGHAND as CLONE_SIGHAND, + CLONE_SYSVSEM as CLONE_SYSVSEM, + CLONE_THREAD as CLONE_THREAD, + CLONE_VM as CLONE_VM, + PIDFD_NONBLOCK as PIDFD_NONBLOCK, + setns as setns, + unshare as unshare, + ) + + if sys.platform == "darwin": + if sys.version_info >= (3, 12): + from os import ( + PRIO_DARWIN_BG as PRIO_DARWIN_BG, + PRIO_DARWIN_NONUI as PRIO_DARWIN_NONUI, + PRIO_DARWIN_PROCESS as PRIO_DARWIN_PROCESS, + PRIO_DARWIN_THREAD as PRIO_DARWIN_THREAD, + ) + if sys.platform == "darwin" and sys.version_info >= (3, 10): + from os import O_EVTONLY as O_EVTONLY, O_NOFOLLOW_ANY as O_NOFOLLOW_ANY, O_SYMLINK as O_SYMLINK + + # Not same as os.environ or os.environb + # Because of this variable, we can't do "from posix import *" in os/__init__.pyi + environ: dict[bytes, bytes] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/posixpath.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/posixpath.pyi new file mode 100644 index 0000000..84e1b1e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/posixpath.pyi @@ -0,0 +1,160 @@ +import sys +from _typeshed import AnyOrLiteralStr, BytesPath, FileDescriptorOrPath, StrOrBytesPath, StrPath +from collections.abc import Iterable +from genericpath import ( + ALLOW_MISSING as ALLOW_MISSING, + _AllowMissingType, + commonprefix as commonprefix, + exists as exists, + getatime as getatime, + getctime as getctime, + getmtime as getmtime, + getsize as getsize, + isdir as isdir, + isfile as isfile, + samefile as samefile, + sameopenfile as sameopenfile, + samestat as samestat, +) + +if sys.version_info >= (3, 13): + from genericpath import isdevdrive as isdevdrive +from os import PathLike +from typing import AnyStr, overload +from typing_extensions import LiteralString + +__all__ = [ + "normcase", + "isabs", + "join", + "splitdrive", + "split", + "splitext", + "basename", + "dirname", + "commonprefix", + "getsize", + "getmtime", + "getatime", + "getctime", + "islink", + "exists", + "lexists", + "isdir", + "isfile", + "ismount", + "expanduser", + "expandvars", + "normpath", + "abspath", + "samefile", + "sameopenfile", + "samestat", + "curdir", + "pardir", + "sep", + "pathsep", + "defpath", + "altsep", + "extsep", + "devnull", + "realpath", + "supports_unicode_filenames", + "relpath", + "commonpath", +] +__all__ += ["ALLOW_MISSING"] +if sys.version_info >= (3, 12): + __all__ += ["isjunction", "splitroot"] +if sys.version_info >= (3, 13): + __all__ += ["isdevdrive"] + +supports_unicode_filenames: bool +# aliases (also in os) +curdir: LiteralString +pardir: LiteralString +sep: LiteralString +altsep: LiteralString | None +extsep: LiteralString +pathsep: LiteralString +defpath: LiteralString +devnull: LiteralString + +# Overloads are necessary to work around python/mypy#17952 & python/mypy#11880 +@overload +def abspath(path: PathLike[AnyStr]) -> AnyStr: ... +@overload +def abspath(path: AnyStr) -> AnyStr: ... +@overload +def basename(p: PathLike[AnyStr]) -> AnyStr: ... +@overload +def basename(p: AnyOrLiteralStr) -> AnyOrLiteralStr: ... +@overload +def dirname(p: PathLike[AnyStr]) -> AnyStr: ... +@overload +def dirname(p: AnyOrLiteralStr) -> AnyOrLiteralStr: ... +@overload +def expanduser(path: PathLike[AnyStr]) -> AnyStr: ... +@overload +def expanduser(path: AnyStr) -> AnyStr: ... +@overload +def expandvars(path: PathLike[AnyStr]) -> AnyStr: ... +@overload +def expandvars(path: AnyStr) -> AnyStr: ... +@overload +def normcase(s: PathLike[AnyStr]) -> AnyStr: ... +@overload +def normcase(s: AnyOrLiteralStr) -> AnyOrLiteralStr: ... +@overload +def normpath(path: PathLike[AnyStr]) -> AnyStr: ... +@overload +def normpath(path: AnyOrLiteralStr) -> AnyOrLiteralStr: ... +@overload +def commonpath(paths: Iterable[LiteralString]) -> LiteralString: ... +@overload +def commonpath(paths: Iterable[StrPath]) -> str: ... +@overload +def commonpath(paths: Iterable[BytesPath]) -> bytes: ... + +# First parameter is not actually pos-only, +# but must be defined as pos-only in the stub or cross-platform code doesn't type-check, +# as the parameter name is different in ntpath.join() +@overload +def join(a: LiteralString, /, *paths: LiteralString) -> LiteralString: ... +@overload +def join(a: StrPath, /, *paths: StrPath) -> str: ... +@overload +def join(a: BytesPath, /, *paths: BytesPath) -> bytes: ... +@overload +def realpath(filename: PathLike[AnyStr], *, strict: bool | _AllowMissingType = False) -> AnyStr: ... +@overload +def realpath(filename: AnyStr, *, strict: bool | _AllowMissingType = False) -> AnyStr: ... +@overload +def relpath(path: LiteralString, start: LiteralString | None = None) -> LiteralString: ... +@overload +def relpath(path: BytesPath, start: BytesPath | None = None) -> bytes: ... +@overload +def relpath(path: StrPath, start: StrPath | None = None) -> str: ... +@overload +def split(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... +@overload +def split(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr]: ... +@overload +def splitdrive(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... +@overload +def splitdrive(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr]: ... +@overload +def splitext(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... +@overload +def splitext(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr]: ... +def isabs(s: StrOrBytesPath) -> bool: ... +def islink(path: FileDescriptorOrPath) -> bool: ... +def ismount(path: FileDescriptorOrPath) -> bool: ... +def lexists(path: FileDescriptorOrPath) -> bool: ... + +if sys.version_info >= (3, 12): + def isjunction(path: StrOrBytesPath) -> bool: ... + @overload + def splitroot(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr, AnyOrLiteralStr]: ... + @overload + def splitroot(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr, AnyStr]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pprint.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pprint.pyi new file mode 100644 index 0000000..1e80462 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pprint.pyi @@ -0,0 +1,159 @@ +import sys +from _typeshed import SupportsWrite +from collections import deque +from typing import IO + +__all__ = ["pprint", "pformat", "isreadable", "isrecursive", "saferepr", "PrettyPrinter", "pp"] + +if sys.version_info >= (3, 10): + def pformat( + object: object, + indent: int = 1, + width: int = 80, + depth: int | None = None, + *, + compact: bool = False, + sort_dicts: bool = True, + underscore_numbers: bool = False, + ) -> str: ... + +else: + def pformat( + object: object, + indent: int = 1, + width: int = 80, + depth: int | None = None, + *, + compact: bool = False, + sort_dicts: bool = True, + ) -> str: ... + +if sys.version_info >= (3, 10): + def pp( + object: object, + stream: IO[str] | None = None, + indent: int = 1, + width: int = 80, + depth: int | None = None, + *, + compact: bool = False, + sort_dicts: bool = False, + underscore_numbers: bool = False, + ) -> None: ... + +else: + def pp( + object: object, + stream: IO[str] | None = None, + indent: int = 1, + width: int = 80, + depth: int | None = None, + *, + compact: bool = False, + sort_dicts: bool = False, + ) -> None: ... + +if sys.version_info >= (3, 10): + def pprint( + object: object, + stream: IO[str] | None = None, + indent: int = 1, + width: int = 80, + depth: int | None = None, + *, + compact: bool = False, + sort_dicts: bool = True, + underscore_numbers: bool = False, + ) -> None: ... + +else: + def pprint( + object: object, + stream: IO[str] | None = None, + indent: int = 1, + width: int = 80, + depth: int | None = None, + *, + compact: bool = False, + sort_dicts: bool = True, + ) -> None: ... + +def isreadable(object: object) -> bool: ... +def isrecursive(object: object) -> bool: ... +def saferepr(object: object) -> str: ... + +class PrettyPrinter: + if sys.version_info >= (3, 10): + def __init__( + self, + indent: int = 1, + width: int = 80, + depth: int | None = None, + stream: IO[str] | None = None, + *, + compact: bool = False, + sort_dicts: bool = True, + underscore_numbers: bool = False, + ) -> None: ... + else: + def __init__( + self, + indent: int = 1, + width: int = 80, + depth: int | None = None, + stream: IO[str] | None = None, + *, + compact: bool = False, + sort_dicts: bool = True, + ) -> None: ... + + def pformat(self, object: object) -> str: ... + def pprint(self, object: object) -> None: ... + def isreadable(self, object: object) -> bool: ... + def isrecursive(self, object: object) -> bool: ... + def format(self, object: object, context: dict[int, int], maxlevels: int, level: int) -> tuple[str, bool, bool]: ... + def _format( + self, object: object, stream: SupportsWrite[str], indent: int, allowance: int, context: dict[int, int], level: int + ) -> None: ... + def _pprint_dict( + self, + object: dict[object, object], + stream: SupportsWrite[str], + indent: int, + allowance: int, + context: dict[int, int], + level: int, + ) -> None: ... + def _pprint_list( + self, object: list[object], stream: SupportsWrite[str], indent: int, allowance: int, context: dict[int, int], level: int + ) -> None: ... + def _pprint_tuple( + self, + object: tuple[object, ...], + stream: SupportsWrite[str], + indent: int, + allowance: int, + context: dict[int, int], + level: int, + ) -> None: ... + def _pprint_set( + self, object: set[object], stream: SupportsWrite[str], indent: int, allowance: int, context: dict[int, int], level: int + ) -> None: ... + def _pprint_deque( + self, object: deque[object], stream: SupportsWrite[str], indent: int, allowance: int, context: dict[int, int], level: int + ) -> None: ... + def _format_dict_items( + self, + items: list[tuple[object, object]], + stream: SupportsWrite[str], + indent: int, + allowance: int, + context: dict[int, int], + level: int, + ) -> None: ... + def _format_items( + self, items: list[object], stream: SupportsWrite[str], indent: int, allowance: int, context: dict[int, int], level: int + ) -> None: ... + def _repr(self, object: object, context: dict[int, int], level: int) -> str: ... + if sys.version_info >= (3, 10): + def _safe_repr(self, object: object, context: dict[int, int], maxlevels: int, level: int) -> tuple[str, bool, bool]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/profile.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/profile.pyi new file mode 100644 index 0000000..696193d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/profile.pyi @@ -0,0 +1,31 @@ +from _typeshed import StrOrBytesPath +from collections.abc import Callable, Mapping +from typing import Any, TypeVar +from typing_extensions import ParamSpec, Self, TypeAlias + +__all__ = ["run", "runctx", "Profile"] + +def run(statement: str, filename: str | None = None, sort: str | int = -1) -> None: ... +def runctx( + statement: str, globals: dict[str, Any], locals: Mapping[str, Any], filename: str | None = None, sort: str | int = -1 +) -> None: ... + +_T = TypeVar("_T") +_P = ParamSpec("_P") +_Label: TypeAlias = tuple[str, int, str] + +class Profile: + bias: int + stats: dict[_Label, tuple[int, int, int, int, dict[_Label, tuple[int, int, int, int]]]] # undocumented + def __init__(self, timer: Callable[[], float] | None = None, bias: int | None = None) -> None: ... + def set_cmd(self, cmd: str) -> None: ... + def simulate_call(self, name: str) -> None: ... + def simulate_cmd_complete(self) -> None: ... + def print_stats(self, sort: str | int = -1) -> None: ... + def dump_stats(self, file: StrOrBytesPath) -> None: ... + def create_stats(self) -> None: ... + def snapshot_stats(self) -> None: ... + def run(self, cmd: str) -> Self: ... + def runctx(self, cmd: str, globals: dict[str, Any], locals: Mapping[str, Any]) -> Self: ... + def runcall(self, func: Callable[_P, _T], /, *args: _P.args, **kw: _P.kwargs) -> _T: ... + def calibrate(self, m: int, verbose: int = 0) -> float: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pstats.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pstats.pyi new file mode 100644 index 0000000..c4dee1f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pstats.pyi @@ -0,0 +1,91 @@ +import sys +from _typeshed import StrOrBytesPath +from collections.abc import Iterable +from cProfile import Profile as _cProfile +from dataclasses import dataclass +from profile import Profile +from typing import IO, Any, Literal, overload +from typing_extensions import Self, TypeAlias + +if sys.version_info >= (3, 11): + from enum import StrEnum +else: + from enum import Enum + +__all__ = ["Stats", "SortKey", "FunctionProfile", "StatsProfile"] + +_Selector: TypeAlias = str | float | int + +if sys.version_info >= (3, 11): + class SortKey(StrEnum): + CALLS = "calls" + CUMULATIVE = "cumulative" + FILENAME = "filename" + LINE = "line" + NAME = "name" + NFL = "nfl" + PCALLS = "pcalls" + STDNAME = "stdname" + TIME = "time" + +else: + class SortKey(str, Enum): + CALLS = "calls" + CUMULATIVE = "cumulative" + FILENAME = "filename" + LINE = "line" + NAME = "name" + NFL = "nfl" + PCALLS = "pcalls" + STDNAME = "stdname" + TIME = "time" + +@dataclass(unsafe_hash=True) +class FunctionProfile: + ncalls: str + tottime: float + percall_tottime: float + cumtime: float + percall_cumtime: float + file_name: str + line_number: int + +@dataclass(unsafe_hash=True) +class StatsProfile: + total_tt: float + func_profiles: dict[str, FunctionProfile] + +_SortArgDict: TypeAlias = dict[str, tuple[tuple[tuple[int, int], ...], str]] + +class Stats: + sort_arg_dict_default: _SortArgDict + def __init__( + self, + arg: None | str | Profile | _cProfile = ..., + /, + *args: None | str | Profile | _cProfile | Self, + stream: IO[Any] | None = None, + ) -> None: ... + def init(self, arg: None | str | Profile | _cProfile) -> None: ... + def load_stats(self, arg: None | str | Profile | _cProfile) -> None: ... + def get_top_level_stats(self) -> None: ... + def add(self, *arg_list: None | str | Profile | _cProfile | Self) -> Self: ... + def dump_stats(self, filename: StrOrBytesPath) -> None: ... + def get_sort_arg_defs(self) -> _SortArgDict: ... + @overload + def sort_stats(self, field: Literal[-1, 0, 1, 2]) -> Self: ... + @overload + def sort_stats(self, *field: str) -> Self: ... + def reverse_order(self) -> Self: ... + def strip_dirs(self) -> Self: ... + def calc_callees(self) -> None: ... + def eval_print_amount(self, sel: _Selector, list: list[str], msg: str) -> tuple[list[str], str]: ... + def get_stats_profile(self) -> StatsProfile: ... + def get_print_list(self, sel_list: Iterable[_Selector]) -> tuple[int, list[str]]: ... + def print_stats(self, *amount: _Selector) -> Self: ... + def print_callees(self, *amount: _Selector) -> Self: ... + def print_callers(self, *amount: _Selector) -> Self: ... + def print_call_heading(self, name_size: int, column_title: str) -> None: ... + def print_call_line(self, name_size: int, source: str, call_dict: dict[str, Any], arrow: str = "->") -> None: ... + def print_title(self) -> None: ... + def print_line(self, func: str) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pty.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pty.pyi new file mode 100644 index 0000000..d1c78f9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pty.pyi @@ -0,0 +1,28 @@ +import sys +from collections.abc import Callable, Iterable +from typing import Final +from typing_extensions import TypeAlias, deprecated + +if sys.platform != "win32": + __all__ = ["openpty", "fork", "spawn"] + _Reader: TypeAlias = Callable[[int], bytes] + + STDIN_FILENO: Final = 0 + STDOUT_FILENO: Final = 1 + STDERR_FILENO: Final = 2 + + CHILD: Final = 0 + def openpty() -> tuple[int, int]: ... + + if sys.version_info < (3, 14): + if sys.version_info >= (3, 12): + @deprecated("Deprecated since Python 3.12; removed in Python 3.14. Use `openpty()` instead.") + def master_open() -> tuple[int, str]: ... + @deprecated("Deprecated since Python 3.12; removed in Python 3.14. Use `openpty()` instead.") + def slave_open(tty_name: str) -> int: ... + else: + def master_open() -> tuple[int, str]: ... + def slave_open(tty_name: str) -> int: ... + + def fork() -> tuple[int, int]: ... + def spawn(argv: str | Iterable[str], master_read: _Reader = ..., stdin_read: _Reader = ...) -> int: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pwd.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pwd.pyi new file mode 100644 index 0000000..a84ba32 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pwd.pyi @@ -0,0 +1,28 @@ +import sys +from _typeshed import structseq +from typing import Any, Final, final + +if sys.platform != "win32": + @final + class struct_passwd(structseq[Any], tuple[str, str, int, int, str, str, str]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("pw_name", "pw_passwd", "pw_uid", "pw_gid", "pw_gecos", "pw_dir", "pw_shell") + + @property + def pw_name(self) -> str: ... + @property + def pw_passwd(self) -> str: ... + @property + def pw_uid(self) -> int: ... + @property + def pw_gid(self) -> int: ... + @property + def pw_gecos(self) -> str: ... + @property + def pw_dir(self) -> str: ... + @property + def pw_shell(self) -> str: ... + + def getpwall() -> list[struct_passwd]: ... + def getpwuid(uid: int, /) -> struct_passwd: ... + def getpwnam(name: str, /) -> struct_passwd: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/py_compile.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/py_compile.pyi new file mode 100644 index 0000000..334ce79 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/py_compile.pyi @@ -0,0 +1,34 @@ +import enum +import sys +from typing import AnyStr + +__all__ = ["compile", "main", "PyCompileError", "PycInvalidationMode"] + +class PyCompileError(Exception): + exc_type_name: str + exc_value: BaseException + file: str + msg: str + def __init__(self, exc_type: type[BaseException], exc_value: BaseException, file: str, msg: str = "") -> None: ... + +class PycInvalidationMode(enum.Enum): + TIMESTAMP = 1 + CHECKED_HASH = 2 + UNCHECKED_HASH = 3 + +def _get_default_invalidation_mode() -> PycInvalidationMode: ... +def compile( + file: AnyStr, + cfile: AnyStr | None = None, + dfile: AnyStr | None = None, + doraise: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, + quiet: int = 0, +) -> AnyStr | None: ... + +if sys.version_info >= (3, 10): + def main() -> None: ... + +else: + def main(args: list[str] | None = None) -> int: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pyclbr.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pyclbr.pyi new file mode 100644 index 0000000..504a5d5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pyclbr.pyi @@ -0,0 +1,74 @@ +import sys +from collections.abc import Mapping, Sequence + +__all__ = ["readmodule", "readmodule_ex", "Class", "Function"] + +class _Object: + module: str + name: str + file: int + lineno: int + + if sys.version_info >= (3, 10): + end_lineno: int | None + + parent: _Object | None + + # This is a dict at runtime, but we're typing it as Mapping to + # avoid variance issues in the subclasses + children: Mapping[str, _Object] + + if sys.version_info >= (3, 10): + def __init__( + self, module: str, name: str, file: str, lineno: int, end_lineno: int | None, parent: _Object | None + ) -> None: ... + else: + def __init__(self, module: str, name: str, file: str, lineno: int, parent: _Object | None) -> None: ... + +class Function(_Object): + if sys.version_info >= (3, 10): + is_async: bool + + parent: Function | Class | None + children: dict[str, Class | Function] + + if sys.version_info >= (3, 10): + def __init__( + self, + module: str, + name: str, + file: str, + lineno: int, + parent: Function | Class | None = None, + is_async: bool = False, + *, + end_lineno: int | None = None, + ) -> None: ... + else: + def __init__(self, module: str, name: str, file: str, lineno: int, parent: Function | Class | None = None) -> None: ... + +class Class(_Object): + super: list[Class | str] | None + methods: dict[str, int] + parent: Class | None + children: dict[str, Class | Function] + + if sys.version_info >= (3, 10): + def __init__( + self, + module: str, + name: str, + super_: list[Class | str] | None, + file: str, + lineno: int, + parent: Class | None = None, + *, + end_lineno: int | None = None, + ) -> None: ... + else: + def __init__( + self, module: str, name: str, super: list[Class | str] | None, file: str, lineno: int, parent: Class | None = None + ) -> None: ... + +def readmodule(module: str, path: Sequence[str] | None = None) -> dict[str, Class]: ... +def readmodule_ex(module: str, path: Sequence[str] | None = None) -> dict[str, Class | Function | list[str]]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pydoc.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pydoc.pyi new file mode 100644 index 0000000..935f942 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pydoc.pyi @@ -0,0 +1,341 @@ +import sys +from _typeshed import OptExcInfo, SupportsWrite, Unused +from abc import abstractmethod +from builtins import list as _list # "list" conflicts with method name +from collections.abc import Callable, Container, Mapping, MutableMapping +from reprlib import Repr +from types import MethodType, ModuleType, TracebackType +from typing import IO, Any, AnyStr, Final, NoReturn, Protocol, TypeVar, type_check_only +from typing_extensions import TypeGuard, deprecated + +__all__ = ["help"] + +_T = TypeVar("_T") + +__author__: Final[str] +__date__: Final[str] +__version__: Final[str] +__credits__: Final[str] + +@type_check_only +class _Pager(Protocol): + def __call__(self, text: str, title: str = "") -> None: ... + +def pathdirs() -> list[str]: ... +def getdoc(object: object) -> str: ... +def splitdoc(doc: AnyStr) -> tuple[AnyStr, AnyStr]: ... +def classname(object: object, modname: str) -> str: ... +def isdata(object: object) -> bool: ... +def replace(text: AnyStr, *pairs: AnyStr) -> AnyStr: ... +def cram(text: str, maxlen: int) -> str: ... +def stripid(text: str) -> str: ... +def allmethods(cl: type) -> MutableMapping[str, MethodType]: ... +def visiblename(name: str, all: Container[str] | None = None, obj: object = None) -> bool: ... +def classify_class_attrs(object: object) -> list[tuple[str, str, type, str]]: ... + +if sys.version_info >= (3, 13): + @deprecated("Deprecated since Python 3.13.") + def ispackage(path: str) -> bool: ... # undocumented + +else: + def ispackage(path: str) -> bool: ... # undocumented + +def source_synopsis(file: IO[AnyStr]) -> AnyStr | None: ... +def synopsis(filename: str, cache: MutableMapping[str, tuple[int, str]] = {}) -> str | None: ... + +class ErrorDuringImport(Exception): + filename: str + exc: type[BaseException] | None + value: BaseException | None + tb: TracebackType | None + def __init__(self, filename: str, exc_info: OptExcInfo) -> None: ... + +def importfile(path: str) -> ModuleType: ... +def safeimport(path: str, forceload: bool = ..., cache: MutableMapping[str, ModuleType] = {}) -> ModuleType | None: ... + +class Doc: + PYTHONDOCS: str + def document(self, object: object, name: str | None = None, *args: Any) -> str: ... + def fail(self, object: object, name: str | None = None, *args: Any) -> NoReturn: ... + @abstractmethod + def docmodule(self, object: object, name: str | None = None, *args: Any) -> str: ... + @abstractmethod + def docclass(self, object: object, name: str | None = None, *args: Any) -> str: ... + @abstractmethod + def docroutine(self, object: object, name: str | None = None, *args: Any) -> str: ... + @abstractmethod + def docother(self, object: object, name: str | None = None, *args: Any) -> str: ... + @abstractmethod + def docproperty(self, object: object, name: str | None = None, *args: Any) -> str: ... + @abstractmethod + def docdata(self, object: object, name: str | None = None, *args: Any) -> str: ... + def getdocloc(self, object: object, basedir: str = ...) -> str | None: ... + +class HTMLRepr(Repr): + def __init__(self) -> None: ... + def escape(self, text: str) -> str: ... + def repr(self, object: object) -> str: ... + def repr1(self, x: object, level: complex) -> str: ... + def repr_string(self, x: str, level: complex) -> str: ... + def repr_str(self, x: str, level: complex) -> str: ... + def repr_instance(self, x: object, level: complex) -> str: ... + def repr_unicode(self, x: AnyStr, level: complex) -> str: ... + +class HTMLDoc(Doc): + _repr_instance: HTMLRepr + repr = _repr_instance.repr + escape = _repr_instance.escape + def page(self, title: str, contents: str) -> str: ... + if sys.version_info >= (3, 11): + def heading(self, title: str, extras: str = "") -> str: ... + def section( + self, + title: str, + cls: str, + contents: str, + width: int = 6, + prelude: str = "", + marginalia: str | None = None, + gap: str = " ", + ) -> str: ... + def multicolumn(self, list: list[_T], format: Callable[[_T], str]) -> str: ... + else: + def heading(self, title: str, fgcol: str, bgcol: str, extras: str = "") -> str: ... + def section( + self, + title: str, + fgcol: str, + bgcol: str, + contents: str, + width: int = 6, + prelude: str = "", + marginalia: str | None = None, + gap: str = " ", + ) -> str: ... + def multicolumn(self, list: list[_T], format: Callable[[_T], str], cols: int = 4) -> str: ... + + def bigsection(self, title: str, *args: Any) -> str: ... + def preformat(self, text: str) -> str: ... + def grey(self, text: str) -> str: ... + def namelink(self, name: str, *dicts: MutableMapping[str, str]) -> str: ... + def classlink(self, object: object, modname: str) -> str: ... + def modulelink(self, object: object) -> str: ... + def modpkglink(self, modpkginfo: tuple[str, str, bool, bool]) -> str: ... + def markup( + self, + text: str, + escape: Callable[[str], str] | None = None, + funcs: Mapping[str, str] = {}, + classes: Mapping[str, str] = {}, + methods: Mapping[str, str] = {}, + ) -> str: ... + def formattree( + self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = None + ) -> str: ... + def docmodule(self, object: object, name: str | None = None, mod: str | None = None, *ignored: Unused) -> str: ... + def docclass( + self, + object: object, + name: str | None = None, + mod: str | None = None, + funcs: Mapping[str, str] = {}, + classes: Mapping[str, str] = {}, + *ignored: Unused, + ) -> str: ... + def formatvalue(self, object: object) -> str: ... + def docother(self, object: object, name: str | None = None, mod: Any | None = None, *ignored: Unused) -> str: ... + if sys.version_info >= (3, 11): + def docroutine( # type: ignore[override] + self, + object: object, + name: str | None = None, + mod: str | None = None, + funcs: Mapping[str, str] = {}, + classes: Mapping[str, str] = {}, + methods: Mapping[str, str] = {}, + cl: type | None = None, + homecls: type | None = None, + ) -> str: ... + def docproperty( + self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None, *ignored: Unused + ) -> str: ... + def docdata( + self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None, *ignored: Unused + ) -> str: ... + else: + def docroutine( # type: ignore[override] + self, + object: object, + name: str | None = None, + mod: str | None = None, + funcs: Mapping[str, str] = {}, + classes: Mapping[str, str] = {}, + methods: Mapping[str, str] = {}, + cl: type | None = None, + ) -> str: ... + def docproperty(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def docdata(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + if sys.version_info >= (3, 11): + def parentlink(self, object: type | ModuleType, modname: str) -> str: ... + + def index(self, dir: str, shadowed: MutableMapping[str, bool] | None = None) -> str: ... + def filelink(self, url: str, path: str) -> str: ... + +class TextRepr(Repr): + def __init__(self) -> None: ... + def repr1(self, x: object, level: complex) -> str: ... + def repr_string(self, x: str, level: complex) -> str: ... + def repr_str(self, x: str, level: complex) -> str: ... + def repr_instance(self, x: object, level: complex) -> str: ... + +class TextDoc(Doc): + _repr_instance: TextRepr + repr = _repr_instance.repr + def bold(self, text: str) -> str: ... + def indent(self, text: str, prefix: str = " ") -> str: ... + def section(self, title: str, contents: str) -> str: ... + def formattree( + self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = None, prefix: str = "" + ) -> str: ... + def docclass(self, object: object, name: str | None = None, mod: str | None = None, *ignored: Unused) -> str: ... + def formatvalue(self, object: object) -> str: ... + if sys.version_info >= (3, 11): + def docroutine( # type: ignore[override] + self, + object: object, + name: str | None = None, + mod: str | None = None, + cl: Any | None = None, + homecls: Any | None = None, + ) -> str: ... + def docmodule(self, object: object, name: str | None = None, mod: Any | None = None, *ignored: Unused) -> str: ... + def docproperty( + self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None, *ignored: Unused + ) -> str: ... + def docdata( + self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None, *ignored: Unused + ) -> str: ... + def docother( + self, + object: object, + name: str | None = None, + mod: str | None = None, + parent: str | None = None, + *ignored: Unused, + maxlen: int | None = None, + doc: Any | None = None, + ) -> str: ... + else: + def docroutine(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def docmodule(self, object: object, name: str | None = None, mod: Any | None = None) -> str: ... # type: ignore[override] + def docproperty(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def docdata(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def docother( # type: ignore[override] + self, + object: object, + name: str | None = None, + mod: str | None = None, + parent: str | None = None, + maxlen: int | None = None, + doc: Any | None = None, + ) -> str: ... + +if sys.version_info >= (3, 13): + def pager(text: str, title: str = "") -> None: ... + +else: + def pager(text: str) -> None: ... + +def plain(text: str) -> str: ... +def describe(thing: Any) -> str: ... +def locate(path: str, forceload: bool = ...) -> object: ... + +if sys.version_info >= (3, 13): + def get_pager() -> _Pager: ... + def pipe_pager(text: str, cmd: str, title: str = "") -> None: ... + def tempfile_pager(text: str, cmd: str, title: str = "") -> None: ... + def tty_pager(text: str, title: str = "") -> None: ... + def plain_pager(text: str, title: str = "") -> None: ... + + # For backwards compatibility. + getpager = get_pager + pipepager = pipe_pager + tempfilepager = tempfile_pager + ttypager = tty_pager + plainpager = plain_pager +else: + def getpager() -> Callable[[str], None]: ... + def pipepager(text: str, cmd: str) -> None: ... + def tempfilepager(text: str, cmd: str) -> None: ... + def ttypager(text: str) -> None: ... + def plainpager(text: str) -> None: ... + +text: TextDoc +html: HTMLDoc + +def resolve(thing: str | object, forceload: bool = ...) -> tuple[object, str] | None: ... +def render_doc( + thing: str | object, title: str = "Python Library Documentation: %s", forceload: bool = ..., renderer: Doc | None = None +) -> str: ... + +if sys.version_info >= (3, 11): + def doc( + thing: str | object, + title: str = "Python Library Documentation: %s", + forceload: bool = ..., + output: SupportsWrite[str] | None = None, + is_cli: bool = False, + ) -> None: ... + +else: + def doc( + thing: str | object, + title: str = "Python Library Documentation: %s", + forceload: bool = ..., + output: SupportsWrite[str] | None = None, + ) -> None: ... + +def writedoc(thing: str | object, forceload: bool = ...) -> None: ... +def writedocs(dir: str, pkgpath: str = "", done: Any | None = None) -> None: ... + +class Helper: + keywords: dict[str, str | tuple[str, str]] + symbols: dict[str, str] + topics: dict[str, str | tuple[str, ...]] + def __init__(self, input: IO[str] | None = None, output: IO[str] | None = None) -> None: ... + @property + def input(self) -> IO[str]: ... + @property + def output(self) -> IO[str]: ... + def __call__(self, request: str | Helper | object = ...) -> None: ... + def interact(self) -> None: ... + def getline(self, prompt: str) -> str: ... + if sys.version_info >= (3, 11): + def help(self, request: Any, is_cli: bool = False) -> None: ... + else: + def help(self, request: Any) -> None: ... + + def intro(self) -> None: ... + def list(self, items: _list[str], columns: int = 4, width: int = 80) -> None: ... + def listkeywords(self) -> None: ... + def listsymbols(self) -> None: ... + def listtopics(self) -> None: ... + def showtopic(self, topic: str, more_xrefs: str = "") -> None: ... + def showsymbol(self, symbol: str) -> None: ... + def listmodules(self, key: str = "") -> None: ... + +help: Helper + +class ModuleScanner: + quit: bool + def run( + self, + callback: Callable[[str | None, str, str], object], + key: str | None = None, + completer: Callable[[], object] | None = None, + onerror: Callable[[str], object] | None = None, + ) -> None: ... + +def apropos(key: str) -> None: ... +def ispath(x: object) -> TypeGuard[str]: ... +def cli() -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pydoc_data/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pydoc_data/__init__.pyi new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pydoc_data/topics.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pydoc_data/topics.pyi new file mode 100644 index 0000000..ce907a4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pydoc_data/topics.pyi @@ -0,0 +1,3 @@ +from typing import Final + +topics: Final[dict[str, str]] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pyexpat/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pyexpat/__init__.pyi new file mode 100644 index 0000000..21e6760 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pyexpat/__init__.pyi @@ -0,0 +1,82 @@ +from _typeshed import ReadableBuffer, SupportsRead +from collections.abc import Callable +from pyexpat import errors as errors, model as model +from typing import Any, Final, final +from typing_extensions import CapsuleType, TypeAlias +from xml.parsers.expat import ExpatError as ExpatError + +EXPAT_VERSION: Final[str] # undocumented +version_info: tuple[int, int, int] # undocumented +native_encoding: str # undocumented +features: list[tuple[str, int]] # undocumented + +error = ExpatError +XML_PARAM_ENTITY_PARSING_NEVER: Final = 0 +XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE: Final = 1 +XML_PARAM_ENTITY_PARSING_ALWAYS: Final = 2 + +_Model: TypeAlias = tuple[int, int, str | None, tuple[Any, ...]] + +@final +class XMLParserType: + def Parse(self, data: str | ReadableBuffer, isfinal: bool = False, /) -> int: ... + def ParseFile(self, file: SupportsRead[bytes], /) -> int: ... + def SetBase(self, base: str, /) -> None: ... + def GetBase(self) -> str | None: ... + def GetInputContext(self) -> bytes | None: ... + def ExternalEntityParserCreate(self, context: str | None, encoding: str = ..., /) -> XMLParserType: ... + def SetParamEntityParsing(self, flag: int, /) -> int: ... + def UseForeignDTD(self, flag: bool = True, /) -> None: ... + def GetReparseDeferralEnabled(self) -> bool: ... + def SetReparseDeferralEnabled(self, enabled: bool, /) -> None: ... + @property + def intern(self) -> dict[str, str]: ... + buffer_size: int + buffer_text: bool + buffer_used: int + namespace_prefixes: bool # undocumented + ordered_attributes: bool + specified_attributes: bool + ErrorByteIndex: int + ErrorCode: int + ErrorColumnNumber: int + ErrorLineNumber: int + CurrentByteIndex: int + CurrentColumnNumber: int + CurrentLineNumber: int + XmlDeclHandler: Callable[[str, str | None, int], Any] | None + StartDoctypeDeclHandler: Callable[[str, str | None, str | None, bool], Any] | None + EndDoctypeDeclHandler: Callable[[], Any] | None + ElementDeclHandler: Callable[[str, _Model], Any] | None + AttlistDeclHandler: Callable[[str, str, str, str | None, bool], Any] | None + StartElementHandler: ( + Callable[[str, dict[str, str]], Any] + | Callable[[str, list[str]], Any] + | Callable[[str, dict[str, str], list[str]], Any] + | None + ) + EndElementHandler: Callable[[str], Any] | None + ProcessingInstructionHandler: Callable[[str, str], Any] | None + CharacterDataHandler: Callable[[str], Any] | None + UnparsedEntityDeclHandler: Callable[[str, str | None, str, str | None, str], Any] | None + EntityDeclHandler: Callable[[str, bool, str | None, str | None, str, str | None, str | None], Any] | None + NotationDeclHandler: Callable[[str, str | None, str, str | None], Any] | None + StartNamespaceDeclHandler: Callable[[str, str], Any] | None + EndNamespaceDeclHandler: Callable[[str], Any] | None + CommentHandler: Callable[[str], Any] | None + StartCdataSectionHandler: Callable[[], Any] | None + EndCdataSectionHandler: Callable[[], Any] | None + DefaultHandler: Callable[[str], Any] | None + DefaultHandlerExpand: Callable[[str], Any] | None + NotStandaloneHandler: Callable[[], int] | None + ExternalEntityRefHandler: Callable[[str, str | None, str | None, str | None], int] | None + SkippedEntityHandler: Callable[[str, bool], Any] | None + +def ErrorString(code: int, /) -> str: ... + +# intern is undocumented +def ParserCreate( + encoding: str | None = None, namespace_separator: str | None = None, intern: dict[str, Any] | None = None +) -> XMLParserType: ... + +expat_CAPI: CapsuleType diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pyexpat/errors.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pyexpat/errors.pyi new file mode 100644 index 0000000..493ae03 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pyexpat/errors.pyi @@ -0,0 +1,53 @@ +import sys +from typing import Final +from typing_extensions import LiteralString + +codes: dict[str, int] +messages: dict[int, str] + +XML_ERROR_ABORTED: Final[LiteralString] +XML_ERROR_ASYNC_ENTITY: Final[LiteralString] +XML_ERROR_ATTRIBUTE_EXTERNAL_ENTITY_REF: Final[LiteralString] +XML_ERROR_BAD_CHAR_REF: Final[LiteralString] +XML_ERROR_BINARY_ENTITY_REF: Final[LiteralString] +XML_ERROR_CANT_CHANGE_FEATURE_ONCE_PARSING: Final[LiteralString] +XML_ERROR_DUPLICATE_ATTRIBUTE: Final[LiteralString] +XML_ERROR_ENTITY_DECLARED_IN_PE: Final[LiteralString] +XML_ERROR_EXTERNAL_ENTITY_HANDLING: Final[LiteralString] +XML_ERROR_FEATURE_REQUIRES_XML_DTD: Final[LiteralString] +XML_ERROR_FINISHED: Final[LiteralString] +XML_ERROR_INCOMPLETE_PE: Final[LiteralString] +XML_ERROR_INCORRECT_ENCODING: Final[LiteralString] +XML_ERROR_INVALID_TOKEN: Final[LiteralString] +XML_ERROR_JUNK_AFTER_DOC_ELEMENT: Final[LiteralString] +XML_ERROR_MISPLACED_XML_PI: Final[LiteralString] +XML_ERROR_NOT_STANDALONE: Final[LiteralString] +XML_ERROR_NOT_SUSPENDED: Final[LiteralString] +XML_ERROR_NO_ELEMENTS: Final[LiteralString] +XML_ERROR_NO_MEMORY: Final[LiteralString] +XML_ERROR_PARAM_ENTITY_REF: Final[LiteralString] +XML_ERROR_PARTIAL_CHAR: Final[LiteralString] +XML_ERROR_PUBLICID: Final[LiteralString] +XML_ERROR_RECURSIVE_ENTITY_REF: Final[LiteralString] +XML_ERROR_SUSPENDED: Final[LiteralString] +XML_ERROR_SUSPEND_PE: Final[LiteralString] +XML_ERROR_SYNTAX: Final[LiteralString] +XML_ERROR_TAG_MISMATCH: Final[LiteralString] +XML_ERROR_TEXT_DECL: Final[LiteralString] +XML_ERROR_UNBOUND_PREFIX: Final[LiteralString] +XML_ERROR_UNCLOSED_CDATA_SECTION: Final[LiteralString] +XML_ERROR_UNCLOSED_TOKEN: Final[LiteralString] +XML_ERROR_UNDECLARING_PREFIX: Final[LiteralString] +XML_ERROR_UNDEFINED_ENTITY: Final[LiteralString] +XML_ERROR_UNEXPECTED_STATE: Final[LiteralString] +XML_ERROR_UNKNOWN_ENCODING: Final[LiteralString] +XML_ERROR_XML_DECL: Final[LiteralString] +if sys.version_info >= (3, 11): + XML_ERROR_RESERVED_PREFIX_XML: Final[LiteralString] + XML_ERROR_RESERVED_PREFIX_XMLNS: Final[LiteralString] + XML_ERROR_RESERVED_NAMESPACE_URI: Final[LiteralString] + XML_ERROR_INVALID_ARGUMENT: Final[LiteralString] + XML_ERROR_NO_BUFFER: Final[LiteralString] + XML_ERROR_AMPLIFICATION_LIMIT_BREACH: Final[LiteralString] +if sys.version_info >= (3, 14): + XML_ERROR_NOT_STARTED: Final[LiteralString] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pyexpat/model.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pyexpat/model.pyi new file mode 100644 index 0000000..bac8f36 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/pyexpat/model.pyi @@ -0,0 +1,13 @@ +from typing import Final + +XML_CTYPE_ANY: Final = 2 +XML_CTYPE_EMPTY: Final = 1 +XML_CTYPE_MIXED: Final = 3 +XML_CTYPE_NAME: Final = 4 +XML_CTYPE_CHOICE: Final = 5 +XML_CTYPE_SEQ: Final = 6 + +XML_CQUANT_NONE: Final = 0 +XML_CQUANT_OPT: Final = 1 +XML_CQUANT_REP: Final = 2 +XML_CQUANT_PLUS: Final = 3 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/queue.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/queue.pyi new file mode 100644 index 0000000..65e2ac1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/queue.pyi @@ -0,0 +1,55 @@ +import sys +from _queue import Empty as Empty, SimpleQueue as SimpleQueue +from _typeshed import SupportsRichComparisonT +from threading import Condition, Lock +from types import GenericAlias +from typing import Any, Generic, TypeVar + +__all__ = ["Empty", "Full", "Queue", "PriorityQueue", "LifoQueue", "SimpleQueue"] +if sys.version_info >= (3, 13): + __all__ += ["ShutDown"] + +_T = TypeVar("_T") + +class Full(Exception): ... + +if sys.version_info >= (3, 13): + class ShutDown(Exception): ... + +class Queue(Generic[_T]): + maxsize: int + + mutex: Lock # undocumented + not_empty: Condition # undocumented + not_full: Condition # undocumented + all_tasks_done: Condition # undocumented + unfinished_tasks: int # undocumented + if sys.version_info >= (3, 13): + is_shutdown: bool # undocumented + # Despite the fact that `queue` has `deque` type, + # we treat it as `Any` to allow different implementations in subtypes. + queue: Any # undocumented + def __init__(self, maxsize: int = 0) -> None: ... + def _init(self, maxsize: int) -> None: ... + def empty(self) -> bool: ... + def full(self) -> bool: ... + def get(self, block: bool = True, timeout: float | None = None) -> _T: ... + def get_nowait(self) -> _T: ... + if sys.version_info >= (3, 13): + def shutdown(self, immediate: bool = False) -> None: ... + + def _get(self) -> _T: ... + def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: ... + def put_nowait(self, item: _T) -> None: ... + def _put(self, item: _T) -> None: ... + def join(self) -> None: ... + def qsize(self) -> int: ... + def _qsize(self) -> int: ... + def task_done(self) -> None: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +class PriorityQueue(Queue[SupportsRichComparisonT]): + queue: list[SupportsRichComparisonT] + +class LifoQueue(Queue[_T]): + queue: list[_T] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/quopri.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/quopri.pyi new file mode 100644 index 0000000..be6892f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/quopri.pyi @@ -0,0 +1,12 @@ +from _typeshed import ReadableBuffer, SupportsNoArgReadline, SupportsRead, SupportsWrite +from typing import Protocol, type_check_only + +__all__ = ["encode", "decode", "encodestring", "decodestring"] + +@type_check_only +class _Input(SupportsRead[bytes], SupportsNoArgReadline[bytes], Protocol): ... + +def encode(input: _Input, output: SupportsWrite[bytes], quotetabs: int, header: bool = False) -> None: ... +def encodestring(s: ReadableBuffer, quotetabs: bool = False, header: bool = False) -> bytes: ... +def decode(input: _Input, output: SupportsWrite[bytes], header: bool = False) -> None: ... +def decodestring(s: str | ReadableBuffer, header: bool = False) -> bytes: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/random.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/random.pyi new file mode 100644 index 0000000..a797794 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/random.pyi @@ -0,0 +1,133 @@ +import _random +import sys +from _typeshed import SupportsLenAndGetItem +from collections.abc import Callable, Iterable, MutableSequence, Sequence, Set as AbstractSet +from fractions import Fraction +from typing import Any, ClassVar, NoReturn, TypeVar +from typing_extensions import Self + +__all__ = [ + "Random", + "seed", + "random", + "uniform", + "randint", + "choice", + "sample", + "randrange", + "shuffle", + "normalvariate", + "lognormvariate", + "expovariate", + "vonmisesvariate", + "gammavariate", + "triangular", + "gauss", + "betavariate", + "paretovariate", + "weibullvariate", + "getstate", + "setstate", + "getrandbits", + "choices", + "SystemRandom", + "randbytes", +] + +if sys.version_info >= (3, 12): + __all__ += ["binomialvariate"] + +_T = TypeVar("_T") + +class Random(_random.Random): + VERSION: ClassVar[int] + def __init__(self, x: int | float | str | bytes | bytearray | None = None) -> None: ... # noqa: Y041 + # Using other `seed` types is deprecated since 3.9 and removed in 3.11 + # Ignore Y041, since random.seed doesn't treat int like a float subtype. Having an explicit + # int better documents conventional usage of random.seed. + if sys.version_info < (3, 10): + # this is a workaround for pyright correctly flagging an inconsistent inherited constructor, see #14624 + def __new__(cls, x: int | float | str | bytes | bytearray | None = None) -> Self: ... # noqa: Y041 + + def seed(self, a: int | float | str | bytes | bytearray | None = None, version: int = 2) -> None: ... # type: ignore[override] # noqa: Y041 + def getstate(self) -> tuple[Any, ...]: ... + def setstate(self, state: tuple[Any, ...]) -> None: ... + def randrange(self, start: int, stop: int | None = None, step: int = 1) -> int: ... + def randint(self, a: int, b: int) -> int: ... + def randbytes(self, n: int) -> bytes: ... + def choice(self, seq: SupportsLenAndGetItem[_T]) -> _T: ... + def choices( + self, + population: SupportsLenAndGetItem[_T], + weights: Sequence[float | Fraction] | None = None, + *, + cum_weights: Sequence[float | Fraction] | None = None, + k: int = 1, + ) -> list[_T]: ... + if sys.version_info >= (3, 11): + def shuffle(self, x: MutableSequence[Any]) -> None: ... + else: + def shuffle(self, x: MutableSequence[Any], random: Callable[[], float] | None = None) -> None: ... + if sys.version_info >= (3, 11): + def sample(self, population: Sequence[_T], k: int, *, counts: Iterable[int] | None = None) -> list[_T]: ... + else: + def sample( + self, population: Sequence[_T] | AbstractSet[_T], k: int, *, counts: Iterable[int] | None = None + ) -> list[_T]: ... + + def uniform(self, a: float, b: float) -> float: ... + def triangular(self, low: float = 0.0, high: float = 1.0, mode: float | None = None) -> float: ... + if sys.version_info >= (3, 12): + def binomialvariate(self, n: int = 1, p: float = 0.5) -> int: ... + + def betavariate(self, alpha: float, beta: float) -> float: ... + if sys.version_info >= (3, 12): + def expovariate(self, lambd: float = 1.0) -> float: ... + else: + def expovariate(self, lambd: float) -> float: ... + + def gammavariate(self, alpha: float, beta: float) -> float: ... + if sys.version_info >= (3, 11): + def gauss(self, mu: float = 0.0, sigma: float = 1.0) -> float: ... + def normalvariate(self, mu: float = 0.0, sigma: float = 1.0) -> float: ... + else: + def gauss(self, mu: float, sigma: float) -> float: ... + def normalvariate(self, mu: float, sigma: float) -> float: ... + + def lognormvariate(self, mu: float, sigma: float) -> float: ... + def vonmisesvariate(self, mu: float, kappa: float) -> float: ... + def paretovariate(self, alpha: float) -> float: ... + def weibullvariate(self, alpha: float, beta: float) -> float: ... + +# SystemRandom is not implemented for all OS's; good on Windows & Linux +class SystemRandom(Random): + def getrandbits(self, k: int) -> int: ... # k can be passed by keyword + def getstate(self, *args: Any, **kwds: Any) -> NoReturn: ... + def setstate(self, *args: Any, **kwds: Any) -> NoReturn: ... + +_inst: Random +seed = _inst.seed +random = _inst.random +uniform = _inst.uniform +triangular = _inst.triangular +randint = _inst.randint +choice = _inst.choice +randrange = _inst.randrange +sample = _inst.sample +shuffle = _inst.shuffle +choices = _inst.choices +normalvariate = _inst.normalvariate +lognormvariate = _inst.lognormvariate +expovariate = _inst.expovariate +vonmisesvariate = _inst.vonmisesvariate +gammavariate = _inst.gammavariate +gauss = _inst.gauss +if sys.version_info >= (3, 12): + binomialvariate = _inst.binomialvariate +betavariate = _inst.betavariate +paretovariate = _inst.paretovariate +weibullvariate = _inst.weibullvariate +getstate = _inst.getstate +setstate = _inst.setstate +getrandbits = _inst.getrandbits +randbytes = _inst.randbytes diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/re.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/re.pyi new file mode 100644 index 0000000..fb2a06d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/re.pyi @@ -0,0 +1,314 @@ +import enum +import sre_compile +import sre_constants +import sys +from _typeshed import MaybeNone, ReadableBuffer +from collections.abc import Callable, Iterator, Mapping +from types import GenericAlias +from typing import Any, AnyStr, Final, Generic, Literal, TypeVar, final, overload +from typing_extensions import TypeAlias, deprecated + +__all__ = [ + "match", + "fullmatch", + "search", + "sub", + "subn", + "split", + "findall", + "finditer", + "compile", + "purge", + "escape", + "error", + "A", + "I", + "L", + "M", + "S", + "X", + "U", + "ASCII", + "IGNORECASE", + "LOCALE", + "MULTILINE", + "DOTALL", + "VERBOSE", + "UNICODE", + "Match", + "Pattern", +] +if sys.version_info < (3, 13): + __all__ += ["template"] + +if sys.version_info >= (3, 11): + __all__ += ["NOFLAG", "RegexFlag"] + +if sys.version_info >= (3, 13): + __all__ += ["PatternError"] + + PatternError = sre_constants.error + +_T = TypeVar("_T") + +# The implementation defines this in re._constants (version_info >= 3, 11) or +# sre_constants. Typeshed has it here because its __module__ attribute is set to "re". +class error(Exception): + msg: str + pattern: str | bytes | None + pos: int | None + lineno: int + colno: int + def __init__(self, msg: str, pattern: str | bytes | None = None, pos: int | None = None) -> None: ... + +@final +class Match(Generic[AnyStr]): + @property + def pos(self) -> int: ... + @property + def endpos(self) -> int: ... + @property + def lastindex(self) -> int | None: ... + @property + def lastgroup(self) -> str | None: ... + @property + def string(self) -> AnyStr: ... + + # The regular expression object whose match() or search() method produced + # this match instance. + @property + def re(self) -> Pattern[AnyStr]: ... + @overload + def expand(self: Match[str], template: str) -> str: ... + @overload + def expand(self: Match[bytes], template: ReadableBuffer) -> bytes: ... + @overload + def expand(self, template: AnyStr) -> AnyStr: ... + # group() returns "AnyStr" or "AnyStr | None", depending on the pattern. + @overload + def group(self, group: Literal[0] = 0, /) -> AnyStr: ... + @overload + def group(self, group: str | int, /) -> AnyStr | MaybeNone: ... + @overload + def group(self, group1: str | int, group2: str | int, /, *groups: str | int) -> tuple[AnyStr | MaybeNone, ...]: ... + # Each item of groups()'s return tuple is either "AnyStr" or + # "AnyStr | None", depending on the pattern. + @overload + def groups(self) -> tuple[AnyStr | MaybeNone, ...]: ... + @overload + def groups(self, default: _T) -> tuple[AnyStr | _T, ...]: ... + # Each value in groupdict()'s return dict is either "AnyStr" or + # "AnyStr | None", depending on the pattern. + @overload + def groupdict(self) -> dict[str, AnyStr | MaybeNone]: ... + @overload + def groupdict(self, default: _T) -> dict[str, AnyStr | _T]: ... + def start(self, group: int | str = 0, /) -> int: ... + def end(self, group: int | str = 0, /) -> int: ... + def span(self, group: int | str = 0, /) -> tuple[int, int]: ... + @property + def regs(self) -> tuple[tuple[int, int], ...]: ... # undocumented + # __getitem__() returns "AnyStr" or "AnyStr | None", depending on the pattern. + @overload + def __getitem__(self, key: Literal[0], /) -> AnyStr: ... + @overload + def __getitem__(self, key: int | str, /) -> AnyStr | MaybeNone: ... + def __copy__(self) -> Match[AnyStr]: ... + def __deepcopy__(self, memo: Any, /) -> Match[AnyStr]: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +@final +class Pattern(Generic[AnyStr]): + @property + def flags(self) -> int: ... + @property + def groupindex(self) -> Mapping[str, int]: ... + @property + def groups(self) -> int: ... + @property + def pattern(self) -> AnyStr: ... + @overload + def search(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ... + @overload + def search(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... + @overload + def search(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ... + @overload + def match(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ... + @overload + def match(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... + @overload + def match(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ... + @overload + def fullmatch(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ... + @overload + def fullmatch( + self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize + ) -> Match[bytes] | None: ... + @overload + def fullmatch(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ... + @overload + def split(self: Pattern[str], string: str, maxsplit: int = 0) -> list[str | MaybeNone]: ... + @overload + def split(self: Pattern[bytes], string: ReadableBuffer, maxsplit: int = 0) -> list[bytes | MaybeNone]: ... + @overload + def split(self, string: AnyStr, maxsplit: int = 0) -> list[AnyStr | MaybeNone]: ... + # return type depends on the number of groups in the pattern + @overload + def findall(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> list[Any]: ... + @overload + def findall(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> list[Any]: ... + @overload + def findall(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> list[AnyStr]: ... + @overload + def finditer(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[str]]: ... + @overload + def finditer( + self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize + ) -> Iterator[Match[bytes]]: ... + @overload + def finditer(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[AnyStr]]: ... + @overload + def sub(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> str: ... + @overload + def sub( + self: Pattern[bytes], + repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], + string: ReadableBuffer, + count: int = 0, + ) -> bytes: ... + @overload + def sub(self, repl: AnyStr | Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = 0) -> AnyStr: ... + @overload + def subn(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> tuple[str, int]: ... + @overload + def subn( + self: Pattern[bytes], + repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], + string: ReadableBuffer, + count: int = 0, + ) -> tuple[bytes, int]: ... + @overload + def subn(self, repl: AnyStr | Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = 0) -> tuple[AnyStr, int]: ... + def __copy__(self) -> Pattern[AnyStr]: ... + def __deepcopy__(self, memo: Any, /) -> Pattern[AnyStr]: ... + def __eq__(self, value: object, /) -> bool: ... + def __hash__(self) -> int: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +# ----- re variables and constants ----- + +class RegexFlag(enum.IntFlag): + A = sre_compile.SRE_FLAG_ASCII + ASCII = A + DEBUG = sre_compile.SRE_FLAG_DEBUG + I = sre_compile.SRE_FLAG_IGNORECASE + IGNORECASE = I + L = sre_compile.SRE_FLAG_LOCALE + LOCALE = L + M = sre_compile.SRE_FLAG_MULTILINE + MULTILINE = M + S = sre_compile.SRE_FLAG_DOTALL + DOTALL = S + X = sre_compile.SRE_FLAG_VERBOSE + VERBOSE = X + U = sre_compile.SRE_FLAG_UNICODE + UNICODE = U + if sys.version_info < (3, 13): + T = sre_compile.SRE_FLAG_TEMPLATE + TEMPLATE = T + if sys.version_info >= (3, 11): + NOFLAG = 0 + +A: Final = RegexFlag.A +ASCII: Final = RegexFlag.ASCII +DEBUG: Final = RegexFlag.DEBUG +I: Final = RegexFlag.I +IGNORECASE: Final = RegexFlag.IGNORECASE +L: Final = RegexFlag.L +LOCALE: Final = RegexFlag.LOCALE +M: Final = RegexFlag.M +MULTILINE: Final = RegexFlag.MULTILINE +S: Final = RegexFlag.S +DOTALL: Final = RegexFlag.DOTALL +X: Final = RegexFlag.X +VERBOSE: Final = RegexFlag.VERBOSE +U: Final = RegexFlag.U +UNICODE: Final = RegexFlag.UNICODE +if sys.version_info < (3, 13): + T: Final = RegexFlag.T + TEMPLATE: Final = RegexFlag.TEMPLATE +if sys.version_info >= (3, 11): + NOFLAG: Final = RegexFlag.NOFLAG +_FlagsType: TypeAlias = int | RegexFlag + +# Type-wise the compile() overloads are unnecessary, they could also be modeled using +# unions in the parameter types. However mypy has a bug regarding TypeVar +# constraints (https://github.com/python/mypy/issues/11880), +# which limits us here because AnyStr is a constrained TypeVar. + +# pattern arguments do *not* accept arbitrary buffers such as bytearray, +# because the pattern must be hashable. +@overload +def compile(pattern: AnyStr, flags: _FlagsType = 0) -> Pattern[AnyStr]: ... +@overload +def compile(pattern: Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: ... +@overload +def search(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: ... +@overload +def search(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ... +@overload +def match(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: ... +@overload +def match(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ... +@overload +def fullmatch(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: ... +@overload +def fullmatch(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ... +@overload +def split(pattern: str | Pattern[str], string: str, maxsplit: int = 0, flags: _FlagsType = 0) -> list[str | MaybeNone]: ... +@overload +def split( + pattern: bytes | Pattern[bytes], string: ReadableBuffer, maxsplit: int = 0, flags: _FlagsType = 0 +) -> list[bytes | MaybeNone]: ... +@overload +def findall(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> list[Any]: ... +@overload +def findall(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> list[Any]: ... +@overload +def finditer(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Iterator[Match[str]]: ... +@overload +def finditer(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Iterator[Match[bytes]]: ... +@overload +def sub( + pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0, flags: _FlagsType = 0 +) -> str: ... +@overload +def sub( + pattern: bytes | Pattern[bytes], + repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], + string: ReadableBuffer, + count: int = 0, + flags: _FlagsType = 0, +) -> bytes: ... +@overload +def subn( + pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0, flags: _FlagsType = 0 +) -> tuple[str, int]: ... +@overload +def subn( + pattern: bytes | Pattern[bytes], + repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], + string: ReadableBuffer, + count: int = 0, + flags: _FlagsType = 0, +) -> tuple[bytes, int]: ... +def escape(pattern: AnyStr) -> AnyStr: ... +def purge() -> None: ... + +if sys.version_info < (3, 13): + if sys.version_info >= (3, 11): + @deprecated("Deprecated since Python 3.11; removed in Python 3.13. Use `re.compile()` instead.") + def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: ... # undocumented + else: + def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: ... # undocumented diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/readline.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/readline.pyi new file mode 100644 index 0000000..7325c26 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/readline.pyi @@ -0,0 +1,40 @@ +import sys +from _typeshed import StrOrBytesPath +from collections.abc import Callable, Sequence +from typing import Literal +from typing_extensions import TypeAlias + +if sys.platform != "win32": + _Completer: TypeAlias = Callable[[str, int], str | None] + _CompDisp: TypeAlias = Callable[[str, Sequence[str], int], None] + + def parse_and_bind(string: str, /) -> None: ... + def read_init_file(filename: StrOrBytesPath | None = None, /) -> None: ... + def get_line_buffer() -> str: ... + def insert_text(string: str, /) -> None: ... + def redisplay() -> None: ... + def read_history_file(filename: StrOrBytesPath | None = None, /) -> None: ... + def write_history_file(filename: StrOrBytesPath | None = None, /) -> None: ... + def append_history_file(nelements: int, filename: StrOrBytesPath | None = None, /) -> None: ... + def get_history_length() -> int: ... + def set_history_length(length: int, /) -> None: ... + def clear_history() -> None: ... + def get_current_history_length() -> int: ... + def get_history_item(index: int, /) -> str: ... + def remove_history_item(pos: int, /) -> None: ... + def replace_history_item(pos: int, line: str, /) -> None: ... + def add_history(string: str, /) -> None: ... + def set_auto_history(enabled: bool, /) -> None: ... + def set_startup_hook(function: Callable[[], object] | None = None, /) -> None: ... + def set_pre_input_hook(function: Callable[[], object] | None = None, /) -> None: ... + def set_completer(function: _Completer | None = None, /) -> None: ... + def get_completer() -> _Completer | None: ... + def get_completion_type() -> int: ... + def get_begidx() -> int: ... + def get_endidx() -> int: ... + def set_completer_delims(string: str, /) -> None: ... + def get_completer_delims() -> str: ... + def set_completion_display_matches_hook(function: _CompDisp | None = None, /) -> None: ... + + if sys.version_info >= (3, 13): + backend: Literal["readline", "editline"] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/reprlib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/reprlib.pyi new file mode 100644 index 0000000..68ada65 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/reprlib.pyi @@ -0,0 +1,65 @@ +import sys +from array import array +from collections import deque +from collections.abc import Callable +from typing import Any +from typing_extensions import TypeAlias + +__all__ = ["Repr", "repr", "recursive_repr"] + +_ReprFunc: TypeAlias = Callable[[Any], str] + +def recursive_repr(fillvalue: str = "...") -> Callable[[_ReprFunc], _ReprFunc]: ... + +class Repr: + maxlevel: int + maxdict: int + maxlist: int + maxtuple: int + maxset: int + maxfrozenset: int + maxdeque: int + maxarray: int + maxlong: int + maxstring: int + maxother: int + if sys.version_info >= (3, 11): + fillvalue: str + if sys.version_info >= (3, 12): + indent: str | int | None + + if sys.version_info >= (3, 12): + def __init__( + self, + *, + maxlevel: int = 6, + maxtuple: int = 6, + maxlist: int = 6, + maxarray: int = 5, + maxdict: int = 4, + maxset: int = 6, + maxfrozenset: int = 6, + maxdeque: int = 6, + maxstring: int = 30, + maxlong: int = 40, + maxother: int = 30, + fillvalue: str = "...", + indent: str | int | None = None, + ) -> None: ... + + def repr(self, x: Any) -> str: ... + def repr1(self, x: Any, level: int) -> str: ... + def repr_tuple(self, x: tuple[Any, ...], level: int) -> str: ... + def repr_list(self, x: list[Any], level: int) -> str: ... + def repr_array(self, x: array[Any], level: int) -> str: ... + def repr_set(self, x: set[Any], level: int) -> str: ... + def repr_frozenset(self, x: frozenset[Any], level: int) -> str: ... + def repr_deque(self, x: deque[Any], level: int) -> str: ... + def repr_dict(self, x: dict[Any, Any], level: int) -> str: ... + def repr_str(self, x: str, level: int) -> str: ... + def repr_int(self, x: int, level: int) -> str: ... + def repr_instance(self, x: Any, level: int) -> str: ... + +aRepr: Repr + +def repr(x: object) -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/resource.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/resource.pyi new file mode 100644 index 0000000..f99cd5b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/resource.pyi @@ -0,0 +1,95 @@ +import sys +from _typeshed import structseq +from typing import Final, final + +if sys.platform != "win32": + # Depends on resource.h + RLIMIT_AS: Final[int] + RLIMIT_CORE: Final[int] + RLIMIT_CPU: Final[int] + RLIMIT_DATA: Final[int] + RLIMIT_FSIZE: Final[int] + RLIMIT_MEMLOCK: Final[int] + RLIMIT_NOFILE: Final[int] + RLIMIT_NPROC: Final[int] + RLIMIT_RSS: Final[int] + RLIMIT_STACK: Final[int] + RLIM_INFINITY: Final[int] + RUSAGE_CHILDREN: Final[int] + RUSAGE_SELF: Final[int] + if sys.platform == "linux": + RLIMIT_MSGQUEUE: Final[int] + RLIMIT_NICE: Final[int] + RLIMIT_OFILE: Final[int] + RLIMIT_RTPRIO: Final[int] + RLIMIT_RTTIME: Final[int] + RLIMIT_SIGPENDING: Final[int] + RUSAGE_THREAD: Final[int] + + @final + class struct_rusage( + structseq[float], tuple[float, float, int, int, int, int, int, int, int, int, int, int, int, int, int, int] + ): + if sys.version_info >= (3, 10): + __match_args__: Final = ( + "ru_utime", + "ru_stime", + "ru_maxrss", + "ru_ixrss", + "ru_idrss", + "ru_isrss", + "ru_minflt", + "ru_majflt", + "ru_nswap", + "ru_inblock", + "ru_oublock", + "ru_msgsnd", + "ru_msgrcv", + "ru_nsignals", + "ru_nvcsw", + "ru_nivcsw", + ) + + @property + def ru_utime(self) -> float: ... + @property + def ru_stime(self) -> float: ... + @property + def ru_maxrss(self) -> int: ... + @property + def ru_ixrss(self) -> int: ... + @property + def ru_idrss(self) -> int: ... + @property + def ru_isrss(self) -> int: ... + @property + def ru_minflt(self) -> int: ... + @property + def ru_majflt(self) -> int: ... + @property + def ru_nswap(self) -> int: ... + @property + def ru_inblock(self) -> int: ... + @property + def ru_oublock(self) -> int: ... + @property + def ru_msgsnd(self) -> int: ... + @property + def ru_msgrcv(self) -> int: ... + @property + def ru_nsignals(self) -> int: ... + @property + def ru_nvcsw(self) -> int: ... + @property + def ru_nivcsw(self) -> int: ... + + def getpagesize() -> int: ... + def getrlimit(resource: int, /) -> tuple[int, int]: ... + def getrusage(who: int, /) -> struct_rusage: ... + def setrlimit(resource: int, limits: tuple[int, int], /) -> None: ... + if sys.platform == "linux": + if sys.version_info >= (3, 12): + def prlimit(pid: int, resource: int, limits: tuple[int, int] | None = None, /) -> tuple[int, int]: ... + else: + def prlimit(pid: int, resource: int, limits: tuple[int, int] = ..., /) -> tuple[int, int]: ... + error = OSError diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/rlcompleter.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/rlcompleter.pyi new file mode 100644 index 0000000..8d9477e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/rlcompleter.pyi @@ -0,0 +1,9 @@ +from typing import Any + +__all__ = ["Completer"] + +class Completer: + def __init__(self, namespace: dict[str, Any] | None = None) -> None: ... + def complete(self, text: str, state: int) -> str | None: ... + def attr_matches(self, text: str) -> list[str]: ... + def global_matches(self, text: str) -> list[str]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/runpy.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/runpy.pyi new file mode 100644 index 0000000..d4406ea --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/runpy.pyi @@ -0,0 +1,24 @@ +from _typeshed import Unused +from types import ModuleType +from typing import Any +from typing_extensions import Self + +__all__ = ["run_module", "run_path"] + +class _TempModule: + mod_name: str + module: ModuleType + def __init__(self, mod_name: str) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + +class _ModifiedArgv0: + value: Any + def __init__(self, value: Any) -> None: ... + def __enter__(self) -> None: ... + def __exit__(self, *args: Unused) -> None: ... + +def run_module( + mod_name: str, init_globals: dict[str, Any] | None = None, run_name: str | None = None, alter_sys: bool = False +) -> dict[str, Any]: ... +def run_path(path_name: str, init_globals: dict[str, Any] | None = None, run_name: str | None = None) -> dict[str, Any]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sched.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sched.pyi new file mode 100644 index 0000000..52f87ab --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sched.pyi @@ -0,0 +1,46 @@ +import sys +from collections.abc import Callable +from typing import Any, ClassVar, NamedTuple, type_check_only +from typing_extensions import TypeAlias + +__all__ = ["scheduler"] + +_ActionCallback: TypeAlias = Callable[..., Any] + +if sys.version_info >= (3, 10): + class Event(NamedTuple): + time: float + priority: Any + sequence: int + action: _ActionCallback + argument: tuple[Any, ...] + kwargs: dict[str, Any] + +else: + @type_check_only + class _EventBase(NamedTuple): + time: float + priority: Any + action: _ActionCallback + argument: tuple[Any, ...] + kwargs: dict[str, Any] + + class Event(_EventBase): + __hash__: ClassVar[None] # type: ignore[assignment] + +class scheduler: + timefunc: Callable[[], float] + delayfunc: Callable[[float], object] + + def __init__(self, timefunc: Callable[[], float] = ..., delayfunc: Callable[[float], object] = ...) -> None: ... + def enterabs( + self, time: float, priority: Any, action: _ActionCallback, argument: tuple[Any, ...] = (), kwargs: dict[str, Any] = ... + ) -> Event: ... + def enter( + self, delay: float, priority: Any, action: _ActionCallback, argument: tuple[Any, ...] = (), kwargs: dict[str, Any] = ... + ) -> Event: ... + def run(self, blocking: bool = True) -> float | None: ... + def cancel(self, event: Event) -> None: ... + def empty(self) -> bool: ... + @property + def queue(self) -> list[Event]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/secrets.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/secrets.pyi new file mode 100644 index 0000000..4861b6f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/secrets.pyi @@ -0,0 +1,15 @@ +from _typeshed import SupportsLenAndGetItem +from hmac import compare_digest as compare_digest +from random import SystemRandom as SystemRandom +from typing import TypeVar + +__all__ = ["choice", "randbelow", "randbits", "SystemRandom", "token_bytes", "token_hex", "token_urlsafe", "compare_digest"] + +_T = TypeVar("_T") + +def randbelow(exclusive_upper_bound: int) -> int: ... +def randbits(k: int) -> int: ... +def choice(seq: SupportsLenAndGetItem[_T]) -> _T: ... +def token_bytes(nbytes: int | None = None) -> bytes: ... +def token_hex(nbytes: int | None = None) -> str: ... +def token_urlsafe(nbytes: int | None = None) -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/select.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/select.pyi new file mode 100644 index 0000000..43a9e42 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/select.pyi @@ -0,0 +1,167 @@ +import sys +from _typeshed import FileDescriptorLike +from collections.abc import Iterable +from types import TracebackType +from typing import Any, ClassVar, Final, TypeVar, final +from typing_extensions import Never, Self + +if sys.platform != "win32": + PIPE_BUF: Final[int] + POLLERR: Final[int] + POLLHUP: Final[int] + POLLIN: Final[int] + if sys.platform == "linux": + POLLMSG: Final[int] + POLLNVAL: Final[int] + POLLOUT: Final[int] + POLLPRI: Final[int] + POLLRDBAND: Final[int] + if sys.platform == "linux": + POLLRDHUP: Final[int] + POLLRDNORM: Final[int] + POLLWRBAND: Final[int] + POLLWRNORM: Final[int] + + # This is actually a function that returns an instance of a class. + # The class is not accessible directly, and also calls itself select.poll. + class poll: + # default value is select.POLLIN | select.POLLPRI | select.POLLOUT + def register(self, fd: FileDescriptorLike, eventmask: int = 7, /) -> None: ... + def modify(self, fd: FileDescriptorLike, eventmask: int, /) -> None: ... + def unregister(self, fd: FileDescriptorLike, /) -> None: ... + def poll(self, timeout: float | None = None, /) -> list[tuple[int, int]]: ... + +_R = TypeVar("_R", default=Never) +_W = TypeVar("_W", default=Never) +_X = TypeVar("_X", default=Never) + +def select( + rlist: Iterable[_R], wlist: Iterable[_W], xlist: Iterable[_X], timeout: float | None = None, / +) -> tuple[list[_R], list[_W], list[_X]]: ... + +error = OSError + +if sys.platform != "linux" and sys.platform != "win32": + # BSD only + @final + class kevent: + data: Any + fflags: int + filter: int + flags: int + ident: int + udata: Any + def __init__( + self, + ident: FileDescriptorLike, + filter: int = ..., + flags: int = ..., + fflags: int = ..., + data: Any = ..., + udata: Any = ..., + ) -> None: ... + __hash__: ClassVar[None] # type: ignore[assignment] + + # BSD only + @final + class kqueue: + closed: bool + def __init__(self) -> None: ... + def close(self) -> None: ... + def control( + self, changelist: Iterable[kevent] | None, maxevents: int, timeout: float | None = None, / + ) -> list[kevent]: ... + def fileno(self) -> int: ... + @classmethod + def fromfd(cls, fd: FileDescriptorLike, /) -> kqueue: ... + + KQ_EV_ADD: Final[int] + KQ_EV_CLEAR: Final[int] + KQ_EV_DELETE: Final[int] + KQ_EV_DISABLE: Final[int] + KQ_EV_ENABLE: Final[int] + KQ_EV_EOF: Final[int] + KQ_EV_ERROR: Final[int] + KQ_EV_FLAG1: Final[int] + KQ_EV_ONESHOT: Final[int] + KQ_EV_SYSFLAGS: Final[int] + KQ_FILTER_AIO: Final[int] + if sys.platform != "darwin": + KQ_FILTER_NETDEV: Final[int] + KQ_FILTER_PROC: Final[int] + KQ_FILTER_READ: Final[int] + KQ_FILTER_SIGNAL: Final[int] + KQ_FILTER_TIMER: Final[int] + KQ_FILTER_VNODE: Final[int] + KQ_FILTER_WRITE: Final[int] + KQ_NOTE_ATTRIB: Final[int] + KQ_NOTE_CHILD: Final[int] + KQ_NOTE_DELETE: Final[int] + KQ_NOTE_EXEC: Final[int] + KQ_NOTE_EXIT: Final[int] + KQ_NOTE_EXTEND: Final[int] + KQ_NOTE_FORK: Final[int] + KQ_NOTE_LINK: Final[int] + if sys.platform != "darwin": + KQ_NOTE_LINKDOWN: Final[int] + KQ_NOTE_LINKINV: Final[int] + KQ_NOTE_LINKUP: Final[int] + KQ_NOTE_LOWAT: Final[int] + KQ_NOTE_PCTRLMASK: Final[int] + KQ_NOTE_PDATAMASK: Final[int] + KQ_NOTE_RENAME: Final[int] + KQ_NOTE_REVOKE: Final[int] + KQ_NOTE_TRACK: Final[int] + KQ_NOTE_TRACKERR: Final[int] + KQ_NOTE_WRITE: Final[int] + +if sys.platform == "linux": + @final + class epoll: + def __new__(self, sizehint: int = ..., flags: int = ...) -> Self: ... + def __enter__(self) -> Self: ... + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = ..., + exc_tb: TracebackType | None = None, + /, + ) -> None: ... + def close(self) -> None: ... + closed: bool + def fileno(self) -> int: ... + def register(self, fd: FileDescriptorLike, eventmask: int = ...) -> None: ... + def modify(self, fd: FileDescriptorLike, eventmask: int) -> None: ... + def unregister(self, fd: FileDescriptorLike) -> None: ... + def poll(self, timeout: float | None = None, maxevents: int = -1) -> list[tuple[int, int]]: ... + @classmethod + def fromfd(cls, fd: FileDescriptorLike, /) -> epoll: ... + + EPOLLERR: Final[int] + EPOLLEXCLUSIVE: Final[int] + EPOLLET: Final[int] + EPOLLHUP: Final[int] + EPOLLIN: Final[int] + EPOLLMSG: Final[int] + EPOLLONESHOT: Final[int] + EPOLLOUT: Final[int] + EPOLLPRI: Final[int] + EPOLLRDBAND: Final[int] + EPOLLRDHUP: Final[int] + EPOLLRDNORM: Final[int] + EPOLLWRBAND: Final[int] + EPOLLWRNORM: Final[int] + EPOLL_CLOEXEC: Final[int] + if sys.version_info >= (3, 14): + EPOLLWAKEUP: Final[int] + +if sys.platform != "linux" and sys.platform != "darwin" and sys.platform != "win32": + # Solaris only + class devpoll: + def close(self) -> None: ... + closed: bool + def fileno(self) -> int: ... + def register(self, fd: FileDescriptorLike, eventmask: int = ...) -> None: ... + def modify(self, fd: FileDescriptorLike, eventmask: int = ...) -> None: ... + def unregister(self, fd: FileDescriptorLike) -> None: ... + def poll(self, timeout: float | None = ...) -> list[tuple[int, int]]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/selectors.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/selectors.pyi new file mode 100644 index 0000000..bcca4e3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/selectors.pyi @@ -0,0 +1,69 @@ +import sys +from _typeshed import FileDescriptor, FileDescriptorLike, Unused +from abc import ABCMeta, abstractmethod +from collections.abc import Mapping +from typing import Any, Final, NamedTuple +from typing_extensions import Self, TypeAlias + +_EventMask: TypeAlias = int + +EVENT_READ: Final = 1 +EVENT_WRITE: Final = 2 + +class SelectorKey(NamedTuple): + fileobj: FileDescriptorLike + fd: FileDescriptor + events: _EventMask + data: Any + +class BaseSelector(metaclass=ABCMeta): + @abstractmethod + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + @abstractmethod + def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + def modify(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + @abstractmethod + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... + def close(self) -> None: ... + def get_key(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + @abstractmethod + def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + +class _BaseSelectorImpl(BaseSelector, metaclass=ABCMeta): + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + def modify(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... + +class SelectSelector(_BaseSelectorImpl): + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... + +class _PollLikeSelector(_BaseSelectorImpl): + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... + +if sys.platform != "win32": + class PollSelector(_PollLikeSelector): ... + +if sys.platform == "linux": + class EpollSelector(_PollLikeSelector): + def fileno(self) -> int: ... + +if sys.platform != "linux" and sys.platform != "darwin" and sys.platform != "win32": + # Solaris only + class DevpollSelector(_PollLikeSelector): + def fileno(self) -> int: ... + +if sys.platform != "win32" and sys.platform != "linux": + class KqueueSelector(_BaseSelectorImpl): + def fileno(self) -> int: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... + +# Not a real class at runtime, it is just a conditional alias to other real selectors. +# The runtime logic is more fine-grained than a `sys.platform` check; +# not really expressible in the stubs +class DefaultSelector(_BaseSelectorImpl): + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... + if sys.platform != "win32": + def fileno(self) -> int: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/shelve.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/shelve.pyi new file mode 100644 index 0000000..654c2ea --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/shelve.pyi @@ -0,0 +1,59 @@ +import sys +from _typeshed import StrOrBytesPath +from collections.abc import Iterator, MutableMapping +from dbm import _TFlags +from types import TracebackType +from typing import Any, TypeVar, overload +from typing_extensions import Self + +__all__ = ["Shelf", "BsdDbShelf", "DbfilenameShelf", "open"] + +_T = TypeVar("_T") +_VT = TypeVar("_VT") + +class Shelf(MutableMapping[str, _VT]): + def __init__( + self, dict: MutableMapping[bytes, bytes], protocol: int | None = None, writeback: bool = False, keyencoding: str = "utf-8" + ) -> None: ... + def __iter__(self) -> Iterator[str]: ... + def __len__(self) -> int: ... + @overload # type: ignore[override] + def get(self, key: str, default: None = None) -> _VT | None: ... + @overload + def get(self, key: str, default: _VT) -> _VT: ... + @overload + def get(self, key: str, default: _T) -> _VT | _T: ... + def __getitem__(self, key: str) -> _VT: ... + def __setitem__(self, key: str, value: _VT) -> None: ... + def __delitem__(self, key: str) -> None: ... + def __contains__(self, key: str) -> bool: ... # type: ignore[override] + def __enter__(self) -> Self: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __del__(self) -> None: ... + def close(self) -> None: ... + def sync(self) -> None: ... + +class BsdDbShelf(Shelf[_VT]): + def set_location(self, key: str) -> tuple[str, _VT]: ... + def next(self) -> tuple[str, _VT]: ... + def previous(self) -> tuple[str, _VT]: ... + def first(self) -> tuple[str, _VT]: ... + def last(self) -> tuple[str, _VT]: ... + +class DbfilenameShelf(Shelf[_VT]): + if sys.version_info >= (3, 11): + def __init__( + self, filename: StrOrBytesPath, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False + ) -> None: ... + else: + def __init__(self, filename: str, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> None: ... + +if sys.version_info >= (3, 11): + def open( + filename: StrOrBytesPath, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False + ) -> Shelf[Any]: ... + +else: + def open(filename: str, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> Shelf[Any]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/shlex.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/shlex.pyi new file mode 100644 index 0000000..1c27483 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/shlex.pyi @@ -0,0 +1,63 @@ +import sys +from collections import deque +from collections.abc import Iterable +from io import TextIOWrapper +from typing import Literal, Protocol, overload, type_check_only +from typing_extensions import Self, deprecated + +__all__ = ["shlex", "split", "quote", "join"] + +@type_check_only +class _ShlexInstream(Protocol): + def read(self, size: Literal[1], /) -> str: ... + def readline(self) -> object: ... + def close(self) -> object: ... + +if sys.version_info >= (3, 12): + def split(s: str | _ShlexInstream, comments: bool = False, posix: bool = True) -> list[str]: ... + +else: + @overload + def split(s: str | _ShlexInstream, comments: bool = False, posix: bool = True) -> list[str]: ... + @overload + @deprecated("Passing None for 's' to shlex.split() is deprecated and will raise an error in Python 3.12.") + def split(s: None, comments: bool = False, posix: bool = True) -> list[str]: ... + +def join(split_command: Iterable[str]) -> str: ... +def quote(s: str) -> str: ... + +# TODO: Make generic over infile once PEP 696 is implemented. +class shlex: + commenters: str + wordchars: str + whitespace: str + escape: str + quotes: str + escapedquotes: str + whitespace_split: bool + infile: str | None + instream: _ShlexInstream + source: str + debug: int + lineno: int + token: str + filestack: deque[tuple[str | None, _ShlexInstream, int]] + eof: str | None + @property + def punctuation_chars(self) -> str: ... + def __init__( + self, + instream: str | _ShlexInstream | None = None, + infile: str | None = None, + posix: bool = False, + punctuation_chars: bool | str = False, + ) -> None: ... + def get_token(self) -> str | None: ... + def push_token(self, tok: str) -> None: ... + def read_token(self) -> str | None: ... + def sourcehook(self, newfile: str) -> tuple[str, TextIOWrapper] | None: ... + def push_source(self, newstream: str | _ShlexInstream, newfile: str | None = None) -> None: ... + def pop_source(self) -> None: ... + def error_leader(self, infile: str | None = None, lineno: int | None = None) -> str: ... + def __iter__(self) -> Self: ... + def __next__(self) -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/shutil.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/shutil.pyi new file mode 100644 index 0000000..cc26cfc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/shutil.pyi @@ -0,0 +1,236 @@ +import os +import sys +from _typeshed import BytesPath, ExcInfo, FileDescriptorOrPath, MaybeNone, StrOrBytesPath, StrPath, SupportsRead, SupportsWrite +from collections.abc import Callable, Iterable, Sequence +from tarfile import _TarfileFilter +from typing import Any, AnyStr, NamedTuple, NoReturn, Protocol, TypeVar, overload, type_check_only +from typing_extensions import TypeAlias, deprecated + +__all__ = [ + "copyfileobj", + "copyfile", + "copymode", + "copystat", + "copy", + "copy2", + "copytree", + "move", + "rmtree", + "Error", + "SpecialFileError", + "make_archive", + "get_archive_formats", + "register_archive_format", + "unregister_archive_format", + "get_unpack_formats", + "register_unpack_format", + "unregister_unpack_format", + "unpack_archive", + "ignore_patterns", + "chown", + "which", + "get_terminal_size", + "SameFileError", + "disk_usage", +] +if sys.version_info < (3, 14): + __all__ += ["ExecError"] + +_StrOrBytesPathT = TypeVar("_StrOrBytesPathT", bound=StrOrBytesPath) +_StrPathT = TypeVar("_StrPathT", bound=StrPath) +_BytesPathT = TypeVar("_BytesPathT", bound=BytesPath) + +class Error(OSError): ... +class SameFileError(Error): ... +class SpecialFileError(OSError): ... + +if sys.version_info >= (3, 14): + ExecError = RuntimeError # Deprecated in Python 3.14; removal scheduled for Python 3.16 + +else: + class ExecError(OSError): ... + +class ReadError(OSError): ... +class RegistryError(Exception): ... + +def copyfileobj(fsrc: SupportsRead[AnyStr], fdst: SupportsWrite[AnyStr], length: int = 0) -> None: ... +def copyfile(src: StrOrBytesPath, dst: _StrOrBytesPathT, *, follow_symlinks: bool = True) -> _StrOrBytesPathT: ... +def copymode(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: ... +def copystat(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: ... +@overload +def copy(src: StrPath, dst: _StrPathT, *, follow_symlinks: bool = True) -> _StrPathT | str: ... +@overload +def copy(src: BytesPath, dst: _BytesPathT, *, follow_symlinks: bool = True) -> _BytesPathT | bytes: ... +@overload +def copy2(src: StrPath, dst: _StrPathT, *, follow_symlinks: bool = True) -> _StrPathT | str: ... +@overload +def copy2(src: BytesPath, dst: _BytesPathT, *, follow_symlinks: bool = True) -> _BytesPathT | bytes: ... +def ignore_patterns(*patterns: StrPath) -> Callable[[Any, list[str]], set[str]]: ... +def copytree( + src: StrPath, + dst: _StrPathT, + symlinks: bool = False, + ignore: None | Callable[[str, list[str]], Iterable[str]] | Callable[[StrPath, list[str]], Iterable[str]] = None, + copy_function: Callable[[str, str], object] = ..., + ignore_dangling_symlinks: bool = False, + dirs_exist_ok: bool = False, +) -> _StrPathT: ... + +_OnErrorCallback: TypeAlias = Callable[[Callable[..., Any], str, ExcInfo], object] +_OnExcCallback: TypeAlias = Callable[[Callable[..., Any], str, BaseException], object] + +@type_check_only +class _RmtreeType(Protocol): + avoids_symlink_attacks: bool + if sys.version_info >= (3, 12): + @overload + @deprecated("The `onerror` parameter is deprecated. Use `onexc` instead.") + def __call__( + self, + path: StrOrBytesPath, + ignore_errors: bool, + onerror: _OnErrorCallback | None, + *, + onexc: None = None, + dir_fd: int | None = None, + ) -> None: ... + @overload + @deprecated("The `onerror` parameter is deprecated. Use `onexc` instead.") + def __call__( + self, + path: StrOrBytesPath, + ignore_errors: bool = False, + *, + onerror: _OnErrorCallback | None, + onexc: None = None, + dir_fd: int | None = None, + ) -> None: ... + @overload + def __call__( + self, + path: StrOrBytesPath, + ignore_errors: bool = False, + *, + onexc: _OnExcCallback | None = None, + dir_fd: int | None = None, + ) -> None: ... + elif sys.version_info >= (3, 11): + def __call__( + self, + path: StrOrBytesPath, + ignore_errors: bool = False, + onerror: _OnErrorCallback | None = None, + *, + dir_fd: int | None = None, + ) -> None: ... + + else: + def __call__( + self, path: StrOrBytesPath, ignore_errors: bool = False, onerror: _OnErrorCallback | None = None + ) -> None: ... + +rmtree: _RmtreeType + +_CopyFn: TypeAlias = Callable[[str, str], object] | Callable[[StrPath, StrPath], object] + +# N.B. shutil.move appears to take bytes arguments, however, +# this does not work when dst is (or is within) an existing directory. +# (#6832) +def move(src: StrPath, dst: _StrPathT, copy_function: _CopyFn = ...) -> _StrPathT | str | MaybeNone: ... + +class _ntuple_diskusage(NamedTuple): + total: int + used: int + free: int + +def disk_usage(path: FileDescriptorOrPath) -> _ntuple_diskusage: ... + +# While chown can be imported on Windows, it doesn't actually work; +# see https://bugs.python.org/issue33140. We keep it here because it's +# in __all__. +if sys.version_info >= (3, 13): + @overload + def chown( + path: FileDescriptorOrPath, + user: str | int, + group: None = None, + *, + dir_fd: int | None = None, + follow_symlinks: bool = True, + ) -> None: ... + @overload + def chown( + path: FileDescriptorOrPath, + user: None = None, + *, + group: str | int, + dir_fd: int | None = None, + follow_symlinks: bool = True, + ) -> None: ... + @overload + def chown( + path: FileDescriptorOrPath, user: None, group: str | int, *, dir_fd: int | None = None, follow_symlinks: bool = True + ) -> None: ... + @overload + def chown( + path: FileDescriptorOrPath, user: str | int, group: str | int, *, dir_fd: int | None = None, follow_symlinks: bool = True + ) -> None: ... + +else: + @overload + def chown(path: FileDescriptorOrPath, user: str | int, group: None = None) -> None: ... + @overload + def chown(path: FileDescriptorOrPath, user: None = None, *, group: str | int) -> None: ... + @overload + def chown(path: FileDescriptorOrPath, user: None, group: str | int) -> None: ... + @overload + def chown(path: FileDescriptorOrPath, user: str | int, group: str | int) -> None: ... + +if sys.platform == "win32" and sys.version_info < (3, 12): + @overload + @deprecated("On Windows before Python 3.12, using a PathLike as `cmd` would always fail or return `None`.") + def which(cmd: os.PathLike[str], mode: int = 1, path: StrPath | None = None) -> NoReturn: ... + +@overload +def which(cmd: StrPath, mode: int = 1, path: StrPath | None = None) -> str | None: ... +@overload +def which(cmd: bytes, mode: int = 1, path: StrPath | None = None) -> bytes | None: ... +def make_archive( + base_name: str, + format: str, + root_dir: StrPath | None = None, + base_dir: StrPath | None = None, + verbose: bool = ..., + dry_run: bool = ..., + owner: str | None = None, + group: str | None = None, + logger: Any | None = None, +) -> str: ... +def get_archive_formats() -> list[tuple[str, str]]: ... +@overload +def register_archive_format( + name: str, function: Callable[..., object], extra_args: Sequence[tuple[str, Any] | list[Any]], description: str = "" +) -> None: ... +@overload +def register_archive_format( + name: str, function: Callable[[str, str], object], extra_args: None = None, description: str = "" +) -> None: ... +def unregister_archive_format(name: str) -> None: ... +def unpack_archive( + filename: StrPath, extract_dir: StrPath | None = None, format: str | None = None, *, filter: _TarfileFilter | None = None +) -> None: ... +@overload +def register_unpack_format( + name: str, + extensions: list[str], + function: Callable[..., object], + extra_args: Sequence[tuple[str, Any]], + description: str = "", +) -> None: ... +@overload +def register_unpack_format( + name: str, extensions: list[str], function: Callable[[str, str], object], extra_args: None = None, description: str = "" +) -> None: ... +def unregister_unpack_format(name: str) -> None: ... +def get_unpack_formats() -> list[tuple[str, list[str], str]]: ... +def get_terminal_size(fallback: tuple[int, int] = (80, 24)) -> os.terminal_size: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/signal.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/signal.pyi new file mode 100644 index 0000000..c2668bd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/signal.pyi @@ -0,0 +1,187 @@ +import sys +from _typeshed import structseq +from collections.abc import Callable, Iterable +from enum import IntEnum +from types import FrameType +from typing import Any, Final, final +from typing_extensions import Never, TypeAlias + +NSIG: int + +class Signals(IntEnum): + SIGABRT = 6 + SIGFPE = 8 + SIGILL = 4 + SIGINT = 2 + SIGSEGV = 11 + SIGTERM = 15 + + if sys.platform == "win32": + SIGBREAK = 21 + CTRL_C_EVENT = 0 + CTRL_BREAK_EVENT = 1 + else: + SIGALRM = 14 + SIGBUS = 7 + SIGCHLD = 17 + SIGCONT = 18 + SIGHUP = 1 + SIGIO = 29 + SIGIOT = 6 + SIGKILL = 9 + SIGPIPE = 13 + SIGPROF = 27 + SIGQUIT = 3 + SIGSTOP = 19 + SIGSYS = 31 + SIGTRAP = 5 + SIGTSTP = 20 + SIGTTIN = 21 + SIGTTOU = 22 + SIGURG = 23 + SIGUSR1 = 10 + SIGUSR2 = 12 + SIGVTALRM = 26 + SIGWINCH = 28 + SIGXCPU = 24 + SIGXFSZ = 25 + if sys.platform != "linux": + SIGEMT = 7 + SIGINFO = 29 + if sys.platform != "darwin": + SIGCLD = 17 + SIGPOLL = 29 + SIGPWR = 30 + SIGRTMAX = 64 + SIGRTMIN = 34 + if sys.version_info >= (3, 11): + SIGSTKFLT = 16 + +class Handlers(IntEnum): + SIG_DFL = 0 + SIG_IGN = 1 + +SIG_DFL: Final = Handlers.SIG_DFL +SIG_IGN: Final = Handlers.SIG_IGN + +_SIGNUM: TypeAlias = int | Signals +_HANDLER: TypeAlias = Callable[[int, FrameType | None], Any] | int | Handlers | None + +def default_int_handler(signalnum: int, frame: FrameType | None, /) -> Never: ... + +if sys.version_info >= (3, 10): # arguments changed in 3.10.2 + def getsignal(signalnum: _SIGNUM) -> _HANDLER: ... + def signal(signalnum: _SIGNUM, handler: _HANDLER) -> _HANDLER: ... + +else: + def getsignal(signalnum: _SIGNUM, /) -> _HANDLER: ... + def signal(signalnum: _SIGNUM, handler: _HANDLER, /) -> _HANDLER: ... + +SIGABRT: Final = Signals.SIGABRT +SIGFPE: Final = Signals.SIGFPE +SIGILL: Final = Signals.SIGILL +SIGINT: Final = Signals.SIGINT +SIGSEGV: Final = Signals.SIGSEGV +SIGTERM: Final = Signals.SIGTERM + +if sys.platform == "win32": + SIGBREAK: Final = Signals.SIGBREAK + CTRL_C_EVENT: Final = Signals.CTRL_C_EVENT + CTRL_BREAK_EVENT: Final = Signals.CTRL_BREAK_EVENT +else: + if sys.platform != "linux": + SIGINFO: Final = Signals.SIGINFO + SIGEMT: Final = Signals.SIGEMT + SIGALRM: Final = Signals.SIGALRM + SIGBUS: Final = Signals.SIGBUS + SIGCHLD: Final = Signals.SIGCHLD + SIGCONT: Final = Signals.SIGCONT + SIGHUP: Final = Signals.SIGHUP + SIGIO: Final = Signals.SIGIO + SIGIOT: Final = Signals.SIGABRT # alias + SIGKILL: Final = Signals.SIGKILL + SIGPIPE: Final = Signals.SIGPIPE + SIGPROF: Final = Signals.SIGPROF + SIGQUIT: Final = Signals.SIGQUIT + SIGSTOP: Final = Signals.SIGSTOP + SIGSYS: Final = Signals.SIGSYS + SIGTRAP: Final = Signals.SIGTRAP + SIGTSTP: Final = Signals.SIGTSTP + SIGTTIN: Final = Signals.SIGTTIN + SIGTTOU: Final = Signals.SIGTTOU + SIGURG: Final = Signals.SIGURG + SIGUSR1: Final = Signals.SIGUSR1 + SIGUSR2: Final = Signals.SIGUSR2 + SIGVTALRM: Final = Signals.SIGVTALRM + SIGWINCH: Final = Signals.SIGWINCH + SIGXCPU: Final = Signals.SIGXCPU + SIGXFSZ: Final = Signals.SIGXFSZ + + class ItimerError(OSError): ... + ITIMER_PROF: int + ITIMER_REAL: int + ITIMER_VIRTUAL: int + + class Sigmasks(IntEnum): + SIG_BLOCK = 0 + SIG_UNBLOCK = 1 + SIG_SETMASK = 2 + + SIG_BLOCK: Final = Sigmasks.SIG_BLOCK + SIG_UNBLOCK: Final = Sigmasks.SIG_UNBLOCK + SIG_SETMASK: Final = Sigmasks.SIG_SETMASK + def alarm(seconds: int, /) -> int: ... + def getitimer(which: int, /) -> tuple[float, float]: ... + def pause() -> None: ... + def pthread_kill(thread_id: int, signalnum: int, /) -> None: ... + if sys.version_info >= (3, 10): # arguments changed in 3.10.2 + def pthread_sigmask(how: int, mask: Iterable[int]) -> set[_SIGNUM]: ... + else: + def pthread_sigmask(how: int, mask: Iterable[int], /) -> set[_SIGNUM]: ... + + def setitimer(which: int, seconds: float, interval: float = 0.0, /) -> tuple[float, float]: ... + def siginterrupt(signalnum: int, flag: bool, /) -> None: ... + def sigpending() -> Any: ... + if sys.version_info >= (3, 10): # argument changed in 3.10.2 + def sigwait(sigset: Iterable[int]) -> _SIGNUM: ... + else: + def sigwait(sigset: Iterable[int], /) -> _SIGNUM: ... + if sys.platform != "darwin": + SIGCLD: Final = Signals.SIGCHLD # alias + SIGPOLL: Final = Signals.SIGIO # alias + SIGPWR: Final = Signals.SIGPWR + SIGRTMAX: Final = Signals.SIGRTMAX + SIGRTMIN: Final = Signals.SIGRTMIN + if sys.version_info >= (3, 11): + SIGSTKFLT: Final = Signals.SIGSTKFLT + + @final + class struct_siginfo(structseq[int], tuple[int, int, int, int, int, int, int]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("si_signo", "si_code", "si_errno", "si_pid", "si_uid", "si_status", "si_band") + + @property + def si_signo(self) -> int: ... + @property + def si_code(self) -> int: ... + @property + def si_errno(self) -> int: ... + @property + def si_pid(self) -> int: ... + @property + def si_uid(self) -> int: ... + @property + def si_status(self) -> int: ... + @property + def si_band(self) -> int: ... + + def sigtimedwait(sigset: Iterable[int], timeout: float, /) -> struct_siginfo | None: ... + def sigwaitinfo(sigset: Iterable[int], /) -> struct_siginfo: ... + +def strsignal(signalnum: _SIGNUM, /) -> str | None: ... +def valid_signals() -> set[Signals]: ... +def raise_signal(signalnum: _SIGNUM, /) -> None: ... +def set_wakeup_fd(fd: int, /, *, warn_on_full_buffer: bool = True) -> int: ... + +if sys.platform == "linux": + def pidfd_send_signal(pidfd: int, sig: int, siginfo: None = None, flags: int = 0, /) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/site.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/site.pyi new file mode 100644 index 0000000..6e39677 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/site.pyi @@ -0,0 +1,36 @@ +import sys +from _typeshed import StrPath +from collections.abc import Iterable + +PREFIXES: list[str] +ENABLE_USER_SITE: bool | None +USER_SITE: str | None +USER_BASE: str | None + +def main() -> None: ... +def abs_paths() -> None: ... # undocumented +def addpackage(sitedir: StrPath, name: StrPath, known_paths: set[str] | None) -> set[str] | None: ... # undocumented +def addsitedir(sitedir: str, known_paths: set[str] | None = None) -> None: ... +def addsitepackages(known_paths: set[str] | None, prefixes: Iterable[str] | None = None) -> set[str] | None: ... # undocumented +def addusersitepackages(known_paths: set[str] | None) -> set[str] | None: ... # undocumented +def check_enableusersite() -> bool | None: ... # undocumented + +if sys.version_info >= (3, 13): + def gethistoryfile() -> str: ... # undocumented + +def enablerlcompleter() -> None: ... # undocumented + +if sys.version_info >= (3, 13): + def register_readline() -> None: ... # undocumented + +def execsitecustomize() -> None: ... # undocumented +def execusercustomize() -> None: ... # undocumented +def getsitepackages(prefixes: Iterable[str] | None = None) -> list[str]: ... +def getuserbase() -> str: ... +def getusersitepackages() -> str: ... +def makepath(*paths: StrPath) -> tuple[str, str]: ... # undocumented +def removeduppaths() -> set[str]: ... # undocumented +def setcopyright() -> None: ... # undocumented +def sethelper() -> None: ... # undocumented +def setquit() -> None: ... # undocumented +def venv(known_paths: set[str] | None) -> set[str] | None: ... # undocumented diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/smtpd.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/smtpd.pyi new file mode 100644 index 0000000..dee7e94 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/smtpd.pyi @@ -0,0 +1,92 @@ +import asynchat +import asyncore +import socket +import sys +from collections import defaultdict +from typing import Any +from typing_extensions import TypeAlias, deprecated + +if sys.version_info >= (3, 11): + __all__ = ["SMTPChannel", "SMTPServer", "DebuggingServer", "PureProxy"] +else: + __all__ = ["SMTPChannel", "SMTPServer", "DebuggingServer", "PureProxy", "MailmanProxy"] + +_Address: TypeAlias = tuple[str, int] # (host, port) + +class SMTPChannel(asynchat.async_chat): + COMMAND: int + DATA: int + + command_size_limits: defaultdict[str, int] + smtp_server: SMTPServer + conn: socket.socket + addr: Any + received_lines: list[str] + smtp_state: int + seen_greeting: str + mailfrom: str + rcpttos: list[str] + received_data: str + fqdn: str + peer: str + + command_size_limit: int + data_size_limit: int + + enable_SMTPUTF8: bool + @property + def max_command_size_limit(self) -> int: ... + def __init__( + self, + server: SMTPServer, + conn: socket.socket, + addr: Any, + data_size_limit: int = 33554432, + map: asyncore._MapType | None = None, + enable_SMTPUTF8: bool = False, + decode_data: bool = False, + ) -> None: ... + # base asynchat.async_chat.push() accepts bytes + def push(self, msg: str) -> None: ... # type: ignore[override] + def collect_incoming_data(self, data: bytes) -> None: ... + def found_terminator(self) -> None: ... + def smtp_HELO(self, arg: str) -> None: ... + def smtp_NOOP(self, arg: str) -> None: ... + def smtp_QUIT(self, arg: str) -> None: ... + def smtp_MAIL(self, arg: str) -> None: ... + def smtp_RCPT(self, arg: str) -> None: ... + def smtp_RSET(self, arg: str) -> None: ... + def smtp_DATA(self, arg: str) -> None: ... + def smtp_EHLO(self, arg: str) -> None: ... + def smtp_HELP(self, arg: str) -> None: ... + def smtp_VRFY(self, arg: str) -> None: ... + def smtp_EXPN(self, arg: str) -> None: ... + +class SMTPServer(asyncore.dispatcher): + channel_class: type[SMTPChannel] + + data_size_limit: int + enable_SMTPUTF8: bool + def __init__( + self, + localaddr: _Address, + remoteaddr: _Address, + data_size_limit: int = 33554432, + map: asyncore._MapType | None = None, + enable_SMTPUTF8: bool = False, + decode_data: bool = False, + ) -> None: ... + def handle_accepted(self, conn: socket.socket, addr: Any) -> None: ... + def process_message( + self, peer: _Address, mailfrom: str, rcpttos: list[str], data: bytes | str, **kwargs: Any + ) -> str | None: ... + +class DebuggingServer(SMTPServer): ... + +class PureProxy(SMTPServer): + def process_message(self, peer: _Address, mailfrom: str, rcpttos: list[str], data: bytes | str) -> str | None: ... # type: ignore[override] + +if sys.version_info < (3, 11): + @deprecated("Deprecated since Python 3.9; removed in Python 3.11.") + class MailmanProxy(PureProxy): + def process_message(self, peer: _Address, mailfrom: str, rcpttos: list[str], data: bytes | str) -> str | None: ... # type: ignore[override] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/smtplib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/smtplib.pyi new file mode 100644 index 0000000..6a84676 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/smtplib.pyi @@ -0,0 +1,195 @@ +import sys +from _socket import _Address as _SourceAddress +from _typeshed import ReadableBuffer, SizedBuffer +from collections.abc import Sequence +from email.message import Message as _Message +from re import Pattern +from socket import socket +from ssl import SSLContext +from types import TracebackType +from typing import Any, Final, Protocol, overload, type_check_only +from typing_extensions import Self, TypeAlias + +__all__ = [ + "SMTPException", + "SMTPServerDisconnected", + "SMTPResponseException", + "SMTPSenderRefused", + "SMTPRecipientsRefused", + "SMTPDataError", + "SMTPConnectError", + "SMTPHeloError", + "SMTPAuthenticationError", + "quoteaddr", + "quotedata", + "SMTP", + "SMTP_SSL", + "SMTPNotSupportedError", +] + +_Reply: TypeAlias = tuple[int, bytes] +_SendErrs: TypeAlias = dict[str, _Reply] + +SMTP_PORT: Final = 25 +SMTP_SSL_PORT: Final = 465 +CRLF: Final[str] +bCRLF: Final[bytes] + +OLDSTYLE_AUTH: Final[Pattern[str]] + +class SMTPException(OSError): ... +class SMTPNotSupportedError(SMTPException): ... +class SMTPServerDisconnected(SMTPException): ... + +class SMTPResponseException(SMTPException): + smtp_code: int + smtp_error: bytes | str + args: tuple[int, bytes | str] | tuple[int, bytes, str] + def __init__(self, code: int, msg: bytes | str) -> None: ... + +class SMTPSenderRefused(SMTPResponseException): + smtp_error: bytes + sender: str + args: tuple[int, bytes, str] + def __init__(self, code: int, msg: bytes, sender: str) -> None: ... + +class SMTPRecipientsRefused(SMTPException): + recipients: _SendErrs + args: tuple[_SendErrs] + def __init__(self, recipients: _SendErrs) -> None: ... + +class SMTPDataError(SMTPResponseException): ... +class SMTPConnectError(SMTPResponseException): ... +class SMTPHeloError(SMTPResponseException): ... +class SMTPAuthenticationError(SMTPResponseException): ... + +def quoteaddr(addrstring: str) -> str: ... +def quotedata(data: str) -> str: ... +@type_check_only +class _AuthObject(Protocol): + @overload + def __call__(self, challenge: None = None, /) -> str | None: ... + @overload + def __call__(self, challenge: bytes, /) -> str: ... + +class SMTP: + debuglevel: int + sock: socket | None + # Type of file should match what socket.makefile() returns + file: Any | None + helo_resp: bytes | None + ehlo_msg: str + ehlo_resp: bytes | None + does_esmtp: bool + default_port: int + timeout: float + esmtp_features: dict[str, str] + command_encoding: str + source_address: _SourceAddress | None + local_hostname: str + def __init__( + self, + host: str = "", + port: int = 0, + local_hostname: str | None = None, + timeout: float = ..., + source_address: _SourceAddress | None = None, + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None + ) -> None: ... + def set_debuglevel(self, debuglevel: int) -> None: ... + def connect(self, host: str = "localhost", port: int = 0, source_address: _SourceAddress | None = None) -> _Reply: ... + def send(self, s: ReadableBuffer | str) -> None: ... + def putcmd(self, cmd: str, args: str = "") -> None: ... + def getreply(self) -> _Reply: ... + def docmd(self, cmd: str, args: str = "") -> _Reply: ... + def helo(self, name: str = "") -> _Reply: ... + def ehlo(self, name: str = "") -> _Reply: ... + def has_extn(self, opt: str) -> bool: ... + def help(self, args: str = "") -> bytes: ... + def rset(self) -> _Reply: ... + def noop(self) -> _Reply: ... + def mail(self, sender: str, options: Sequence[str] = ()) -> _Reply: ... + def rcpt(self, recip: str, options: Sequence[str] = ()) -> _Reply: ... + def data(self, msg: ReadableBuffer | str) -> _Reply: ... + def verify(self, address: str) -> _Reply: ... + vrfy = verify + def expn(self, address: str) -> _Reply: ... + def ehlo_or_helo_if_needed(self) -> None: ... + user: str + password: str + def auth(self, mechanism: str, authobject: _AuthObject, *, initial_response_ok: bool = True) -> _Reply: ... + @overload + def auth_cram_md5(self, challenge: None = None) -> None: ... + @overload + def auth_cram_md5(self, challenge: ReadableBuffer) -> str: ... + def auth_plain(self, challenge: ReadableBuffer | None = None) -> str: ... + def auth_login(self, challenge: ReadableBuffer | None = None) -> str: ... + def login(self, user: str, password: str, *, initial_response_ok: bool = True) -> _Reply: ... + if sys.version_info >= (3, 12): + def starttls(self, *, context: SSLContext | None = None) -> _Reply: ... + else: + def starttls( + self, keyfile: str | None = None, certfile: str | None = None, context: SSLContext | None = None + ) -> _Reply: ... + + def sendmail( + self, + from_addr: str, + to_addrs: str | Sequence[str], + msg: SizedBuffer | str, + mail_options: Sequence[str] = (), + rcpt_options: Sequence[str] = (), + ) -> _SendErrs: ... + def send_message( + self, + msg: _Message, + from_addr: str | None = None, + to_addrs: str | Sequence[str] | None = None, + mail_options: Sequence[str] = (), + rcpt_options: Sequence[str] = (), + ) -> _SendErrs: ... + def close(self) -> None: ... + def quit(self) -> _Reply: ... + +class SMTP_SSL(SMTP): + keyfile: str | None + certfile: str | None + context: SSLContext + if sys.version_info >= (3, 12): + def __init__( + self, + host: str = "", + port: int = 0, + local_hostname: str | None = None, + *, + timeout: float = ..., + source_address: _SourceAddress | None = None, + context: SSLContext | None = None, + ) -> None: ... + else: + def __init__( + self, + host: str = "", + port: int = 0, + local_hostname: str | None = None, + keyfile: str | None = None, + certfile: str | None = None, + timeout: float = ..., + source_address: _SourceAddress | None = None, + context: SSLContext | None = None, + ) -> None: ... + +LMTP_PORT: Final = 2003 + +class LMTP(SMTP): + def __init__( + self, + host: str = "", + port: int = 2003, + local_hostname: str | None = None, + source_address: _SourceAddress | None = None, + timeout: float = ..., + ) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sndhdr.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sndhdr.pyi new file mode 100644 index 0000000..f4d4876 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sndhdr.pyi @@ -0,0 +1,14 @@ +from _typeshed import StrOrBytesPath +from typing import NamedTuple + +__all__ = ["what", "whathdr"] + +class SndHeaders(NamedTuple): + filetype: str + framerate: int + nchannels: int + nframes: int + sampwidth: int | str + +def what(filename: StrOrBytesPath) -> SndHeaders | None: ... +def whathdr(filename: StrOrBytesPath) -> SndHeaders | None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/socket.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/socket.pyi new file mode 100644 index 0000000..b10b356 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/socket.pyi @@ -0,0 +1,1433 @@ +# Ideally, we'd just do "from _socket import *". Unfortunately, socket +# overrides some definitions from _socket incompatibly. mypy incorrectly +# prefers the definitions from _socket over those defined here. +import _socket +import sys +from _socket import ( + CAPI as CAPI, + EAI_AGAIN as EAI_AGAIN, + EAI_BADFLAGS as EAI_BADFLAGS, + EAI_FAIL as EAI_FAIL, + EAI_FAMILY as EAI_FAMILY, + EAI_MEMORY as EAI_MEMORY, + EAI_NODATA as EAI_NODATA, + EAI_NONAME as EAI_NONAME, + EAI_SERVICE as EAI_SERVICE, + EAI_SOCKTYPE as EAI_SOCKTYPE, + INADDR_ALLHOSTS_GROUP as INADDR_ALLHOSTS_GROUP, + INADDR_ANY as INADDR_ANY, + INADDR_BROADCAST as INADDR_BROADCAST, + INADDR_LOOPBACK as INADDR_LOOPBACK, + INADDR_MAX_LOCAL_GROUP as INADDR_MAX_LOCAL_GROUP, + INADDR_NONE as INADDR_NONE, + INADDR_UNSPEC_GROUP as INADDR_UNSPEC_GROUP, + IP_ADD_MEMBERSHIP as IP_ADD_MEMBERSHIP, + IP_DROP_MEMBERSHIP as IP_DROP_MEMBERSHIP, + IP_HDRINCL as IP_HDRINCL, + IP_MULTICAST_IF as IP_MULTICAST_IF, + IP_MULTICAST_LOOP as IP_MULTICAST_LOOP, + IP_MULTICAST_TTL as IP_MULTICAST_TTL, + IP_OPTIONS as IP_OPTIONS, + IP_TOS as IP_TOS, + IP_TTL as IP_TTL, + IPPORT_RESERVED as IPPORT_RESERVED, + IPPORT_USERRESERVED as IPPORT_USERRESERVED, + IPPROTO_AH as IPPROTO_AH, + IPPROTO_DSTOPTS as IPPROTO_DSTOPTS, + IPPROTO_EGP as IPPROTO_EGP, + IPPROTO_ESP as IPPROTO_ESP, + IPPROTO_FRAGMENT as IPPROTO_FRAGMENT, + IPPROTO_HOPOPTS as IPPROTO_HOPOPTS, + IPPROTO_ICMP as IPPROTO_ICMP, + IPPROTO_ICMPV6 as IPPROTO_ICMPV6, + IPPROTO_IDP as IPPROTO_IDP, + IPPROTO_IGMP as IPPROTO_IGMP, + IPPROTO_IP as IPPROTO_IP, + IPPROTO_IPV6 as IPPROTO_IPV6, + IPPROTO_NONE as IPPROTO_NONE, + IPPROTO_PIM as IPPROTO_PIM, + IPPROTO_PUP as IPPROTO_PUP, + IPPROTO_RAW as IPPROTO_RAW, + IPPROTO_ROUTING as IPPROTO_ROUTING, + IPPROTO_SCTP as IPPROTO_SCTP, + IPPROTO_TCP as IPPROTO_TCP, + IPPROTO_UDP as IPPROTO_UDP, + IPV6_CHECKSUM as IPV6_CHECKSUM, + IPV6_DONTFRAG as IPV6_DONTFRAG, + IPV6_HOPLIMIT as IPV6_HOPLIMIT, + IPV6_HOPOPTS as IPV6_HOPOPTS, + IPV6_JOIN_GROUP as IPV6_JOIN_GROUP, + IPV6_LEAVE_GROUP as IPV6_LEAVE_GROUP, + IPV6_MULTICAST_HOPS as IPV6_MULTICAST_HOPS, + IPV6_MULTICAST_IF as IPV6_MULTICAST_IF, + IPV6_MULTICAST_LOOP as IPV6_MULTICAST_LOOP, + IPV6_PKTINFO as IPV6_PKTINFO, + IPV6_RECVRTHDR as IPV6_RECVRTHDR, + IPV6_RECVTCLASS as IPV6_RECVTCLASS, + IPV6_RTHDR as IPV6_RTHDR, + IPV6_TCLASS as IPV6_TCLASS, + IPV6_UNICAST_HOPS as IPV6_UNICAST_HOPS, + IPV6_V6ONLY as IPV6_V6ONLY, + NI_DGRAM as NI_DGRAM, + NI_MAXHOST as NI_MAXHOST, + NI_MAXSERV as NI_MAXSERV, + NI_NAMEREQD as NI_NAMEREQD, + NI_NOFQDN as NI_NOFQDN, + NI_NUMERICHOST as NI_NUMERICHOST, + NI_NUMERICSERV as NI_NUMERICSERV, + SHUT_RD as SHUT_RD, + SHUT_RDWR as SHUT_RDWR, + SHUT_WR as SHUT_WR, + SO_ACCEPTCONN as SO_ACCEPTCONN, + SO_BROADCAST as SO_BROADCAST, + SO_DEBUG as SO_DEBUG, + SO_DONTROUTE as SO_DONTROUTE, + SO_ERROR as SO_ERROR, + SO_KEEPALIVE as SO_KEEPALIVE, + SO_LINGER as SO_LINGER, + SO_OOBINLINE as SO_OOBINLINE, + SO_RCVBUF as SO_RCVBUF, + SO_RCVLOWAT as SO_RCVLOWAT, + SO_RCVTIMEO as SO_RCVTIMEO, + SO_REUSEADDR as SO_REUSEADDR, + SO_SNDBUF as SO_SNDBUF, + SO_SNDLOWAT as SO_SNDLOWAT, + SO_SNDTIMEO as SO_SNDTIMEO, + SO_TYPE as SO_TYPE, + SOL_IP as SOL_IP, + SOL_SOCKET as SOL_SOCKET, + SOL_TCP as SOL_TCP, + SOL_UDP as SOL_UDP, + SOMAXCONN as SOMAXCONN, + TCP_FASTOPEN as TCP_FASTOPEN, + TCP_KEEPCNT as TCP_KEEPCNT, + TCP_KEEPINTVL as TCP_KEEPINTVL, + TCP_MAXSEG as TCP_MAXSEG, + TCP_NODELAY as TCP_NODELAY, + SocketType as SocketType, + _Address as _Address, + _RetAddress as _RetAddress, + close as close, + dup as dup, + getdefaulttimeout as getdefaulttimeout, + gethostbyaddr as gethostbyaddr, + gethostbyname as gethostbyname, + gethostbyname_ex as gethostbyname_ex, + gethostname as gethostname, + getnameinfo as getnameinfo, + getprotobyname as getprotobyname, + getservbyname as getservbyname, + getservbyport as getservbyport, + has_ipv6 as has_ipv6, + htonl as htonl, + htons as htons, + if_indextoname as if_indextoname, + if_nameindex as if_nameindex, + if_nametoindex as if_nametoindex, + inet_aton as inet_aton, + inet_ntoa as inet_ntoa, + inet_ntop as inet_ntop, + inet_pton as inet_pton, + ntohl as ntohl, + ntohs as ntohs, + setdefaulttimeout as setdefaulttimeout, +) +from _typeshed import ReadableBuffer, Unused, WriteableBuffer +from collections.abc import Iterable +from enum import IntEnum, IntFlag +from io import BufferedReader, BufferedRWPair, BufferedWriter, IOBase, RawIOBase, TextIOWrapper +from typing import Any, Final, Literal, Protocol, SupportsIndex, overload, type_check_only +from typing_extensions import Self + +__all__ = [ + "fromfd", + "getfqdn", + "create_connection", + "create_server", + "has_dualstack_ipv6", + "AddressFamily", + "SocketKind", + "AF_APPLETALK", + "AF_DECnet", + "AF_INET", + "AF_INET6", + "AF_IPX", + "AF_SNA", + "AF_UNSPEC", + "AI_ADDRCONFIG", + "AI_ALL", + "AI_CANONNAME", + "AI_NUMERICHOST", + "AI_NUMERICSERV", + "AI_PASSIVE", + "AI_V4MAPPED", + "CAPI", + "EAI_AGAIN", + "EAI_BADFLAGS", + "EAI_FAIL", + "EAI_FAMILY", + "EAI_MEMORY", + "EAI_NODATA", + "EAI_NONAME", + "EAI_SERVICE", + "EAI_SOCKTYPE", + "INADDR_ALLHOSTS_GROUP", + "INADDR_ANY", + "INADDR_BROADCAST", + "INADDR_LOOPBACK", + "INADDR_MAX_LOCAL_GROUP", + "INADDR_NONE", + "INADDR_UNSPEC_GROUP", + "IPPORT_RESERVED", + "IPPORT_USERRESERVED", + "IPPROTO_AH", + "IPPROTO_DSTOPTS", + "IPPROTO_EGP", + "IPPROTO_ESP", + "IPPROTO_FRAGMENT", + "IPPROTO_HOPOPTS", + "IPPROTO_ICMP", + "IPPROTO_ICMPV6", + "IPPROTO_IDP", + "IPPROTO_IGMP", + "IPPROTO_IP", + "IPPROTO_IPV6", + "IPPROTO_NONE", + "IPPROTO_PIM", + "IPPROTO_PUP", + "IPPROTO_RAW", + "IPPROTO_ROUTING", + "IPPROTO_SCTP", + "IPPROTO_TCP", + "IPPROTO_UDP", + "IPV6_CHECKSUM", + "IPV6_DONTFRAG", + "IPV6_HOPLIMIT", + "IPV6_HOPOPTS", + "IPV6_JOIN_GROUP", + "IPV6_LEAVE_GROUP", + "IPV6_MULTICAST_HOPS", + "IPV6_MULTICAST_IF", + "IPV6_MULTICAST_LOOP", + "IPV6_PKTINFO", + "IPV6_RECVRTHDR", + "IPV6_RECVTCLASS", + "IPV6_RTHDR", + "IPV6_TCLASS", + "IPV6_UNICAST_HOPS", + "IPV6_V6ONLY", + "IP_ADD_MEMBERSHIP", + "IP_DROP_MEMBERSHIP", + "IP_HDRINCL", + "IP_MULTICAST_IF", + "IP_MULTICAST_LOOP", + "IP_MULTICAST_TTL", + "IP_OPTIONS", + "IP_TOS", + "IP_TTL", + "MSG_CTRUNC", + "MSG_DONTROUTE", + "MSG_OOB", + "MSG_PEEK", + "MSG_TRUNC", + "MSG_WAITALL", + "NI_DGRAM", + "NI_MAXHOST", + "NI_MAXSERV", + "NI_NAMEREQD", + "NI_NOFQDN", + "NI_NUMERICHOST", + "NI_NUMERICSERV", + "SHUT_RD", + "SHUT_RDWR", + "SHUT_WR", + "SOCK_DGRAM", + "SOCK_RAW", + "SOCK_RDM", + "SOCK_SEQPACKET", + "SOCK_STREAM", + "SOL_IP", + "SOL_SOCKET", + "SOL_TCP", + "SOL_UDP", + "SOMAXCONN", + "SO_ACCEPTCONN", + "SO_BROADCAST", + "SO_DEBUG", + "SO_DONTROUTE", + "SO_ERROR", + "SO_KEEPALIVE", + "SO_LINGER", + "SO_OOBINLINE", + "SO_RCVBUF", + "SO_RCVLOWAT", + "SO_RCVTIMEO", + "SO_REUSEADDR", + "SO_SNDBUF", + "SO_SNDLOWAT", + "SO_SNDTIMEO", + "SO_TYPE", + "SocketType", + "TCP_FASTOPEN", + "TCP_KEEPCNT", + "TCP_KEEPINTVL", + "TCP_MAXSEG", + "TCP_NODELAY", + "close", + "dup", + "error", + "gaierror", + "getaddrinfo", + "getdefaulttimeout", + "gethostbyaddr", + "gethostbyname", + "gethostbyname_ex", + "gethostname", + "getnameinfo", + "getprotobyname", + "getservbyname", + "getservbyport", + "has_ipv6", + "herror", + "htonl", + "htons", + "if_indextoname", + "if_nameindex", + "if_nametoindex", + "inet_aton", + "inet_ntoa", + "inet_ntop", + "inet_pton", + "ntohl", + "ntohs", + "setdefaulttimeout", + "socket", + "socketpair", + "timeout", +] + +if sys.platform == "win32": + from _socket import ( + IPPROTO_CBT as IPPROTO_CBT, + IPPROTO_ICLFXBM as IPPROTO_ICLFXBM, + IPPROTO_IGP as IPPROTO_IGP, + IPPROTO_L2TP as IPPROTO_L2TP, + IPPROTO_PGM as IPPROTO_PGM, + IPPROTO_RDP as IPPROTO_RDP, + IPPROTO_ST as IPPROTO_ST, + RCVALL_MAX as RCVALL_MAX, + RCVALL_OFF as RCVALL_OFF, + RCVALL_ON as RCVALL_ON, + RCVALL_SOCKETLEVELONLY as RCVALL_SOCKETLEVELONLY, + SIO_KEEPALIVE_VALS as SIO_KEEPALIVE_VALS, + SIO_LOOPBACK_FAST_PATH as SIO_LOOPBACK_FAST_PATH, + SIO_RCVALL as SIO_RCVALL, + SO_EXCLUSIVEADDRUSE as SO_EXCLUSIVEADDRUSE, + ) + + __all__ += [ + "IPPROTO_CBT", + "IPPROTO_ICLFXBM", + "IPPROTO_IGP", + "IPPROTO_L2TP", + "IPPROTO_PGM", + "IPPROTO_RDP", + "IPPROTO_ST", + "RCVALL_MAX", + "RCVALL_OFF", + "RCVALL_ON", + "RCVALL_SOCKETLEVELONLY", + "SIO_KEEPALIVE_VALS", + "SIO_LOOPBACK_FAST_PATH", + "SIO_RCVALL", + "SO_EXCLUSIVEADDRUSE", + "fromshare", + "errorTab", + "MSG_BCAST", + "MSG_MCAST", + ] + +if sys.platform == "darwin": + from _socket import PF_SYSTEM as PF_SYSTEM, SYSPROTO_CONTROL as SYSPROTO_CONTROL + + __all__ += ["PF_SYSTEM", "SYSPROTO_CONTROL", "AF_SYSTEM"] + +if sys.platform != "darwin": + from _socket import TCP_KEEPIDLE as TCP_KEEPIDLE + + __all__ += ["TCP_KEEPIDLE", "AF_IRDA", "MSG_ERRQUEUE"] + +if sys.version_info >= (3, 10): + from _socket import IP_RECVTOS as IP_RECVTOS + + __all__ += ["IP_RECVTOS"] + +if sys.platform != "win32" and sys.platform != "darwin": + from _socket import ( + IP_TRANSPARENT as IP_TRANSPARENT, + IPX_TYPE as IPX_TYPE, + SCM_CREDENTIALS as SCM_CREDENTIALS, + SO_DOMAIN as SO_DOMAIN, + SO_MARK as SO_MARK, + SO_PASSCRED as SO_PASSCRED, + SO_PASSSEC as SO_PASSSEC, + SO_PEERCRED as SO_PEERCRED, + SO_PEERSEC as SO_PEERSEC, + SO_PRIORITY as SO_PRIORITY, + SO_PROTOCOL as SO_PROTOCOL, + SOL_ATALK as SOL_ATALK, + SOL_AX25 as SOL_AX25, + SOL_HCI as SOL_HCI, + SOL_IPX as SOL_IPX, + SOL_NETROM as SOL_NETROM, + SOL_ROSE as SOL_ROSE, + TCP_CONGESTION as TCP_CONGESTION, + TCP_CORK as TCP_CORK, + TCP_DEFER_ACCEPT as TCP_DEFER_ACCEPT, + TCP_INFO as TCP_INFO, + TCP_LINGER2 as TCP_LINGER2, + TCP_QUICKACK as TCP_QUICKACK, + TCP_SYNCNT as TCP_SYNCNT, + TCP_USER_TIMEOUT as TCP_USER_TIMEOUT, + TCP_WINDOW_CLAMP as TCP_WINDOW_CLAMP, + ) + + __all__ += [ + "IP_TRANSPARENT", + "SCM_CREDENTIALS", + "SO_DOMAIN", + "SO_MARK", + "SO_PASSCRED", + "SO_PASSSEC", + "SO_PEERCRED", + "SO_PEERSEC", + "SO_PRIORITY", + "SO_PROTOCOL", + "TCP_CONGESTION", + "TCP_CORK", + "TCP_DEFER_ACCEPT", + "TCP_INFO", + "TCP_LINGER2", + "TCP_QUICKACK", + "TCP_SYNCNT", + "TCP_USER_TIMEOUT", + "TCP_WINDOW_CLAMP", + "AF_ASH", + "AF_ATMPVC", + "AF_ATMSVC", + "AF_AX25", + "AF_BRIDGE", + "AF_ECONET", + "AF_KEY", + "AF_LLC", + "AF_NETBEUI", + "AF_NETROM", + "AF_PPPOX", + "AF_ROSE", + "AF_SECURITY", + "AF_WANPIPE", + "AF_X25", + "MSG_CMSG_CLOEXEC", + "MSG_CONFIRM", + "MSG_FASTOPEN", + "MSG_MORE", + ] + +if sys.platform != "win32" and sys.platform != "darwin" and sys.version_info >= (3, 11): + from _socket import IP_BIND_ADDRESS_NO_PORT as IP_BIND_ADDRESS_NO_PORT + + __all__ += ["IP_BIND_ADDRESS_NO_PORT"] + +if sys.platform != "win32": + from _socket import ( + CMSG_LEN as CMSG_LEN, + CMSG_SPACE as CMSG_SPACE, + EAI_ADDRFAMILY as EAI_ADDRFAMILY, + EAI_OVERFLOW as EAI_OVERFLOW, + EAI_SYSTEM as EAI_SYSTEM, + IP_DEFAULT_MULTICAST_LOOP as IP_DEFAULT_MULTICAST_LOOP, + IP_DEFAULT_MULTICAST_TTL as IP_DEFAULT_MULTICAST_TTL, + IP_MAX_MEMBERSHIPS as IP_MAX_MEMBERSHIPS, + IP_RECVOPTS as IP_RECVOPTS, + IP_RECVRETOPTS as IP_RECVRETOPTS, + IP_RETOPTS as IP_RETOPTS, + IPPROTO_GRE as IPPROTO_GRE, + IPPROTO_IPIP as IPPROTO_IPIP, + IPPROTO_RSVP as IPPROTO_RSVP, + IPPROTO_TP as IPPROTO_TP, + IPV6_RTHDR_TYPE_0 as IPV6_RTHDR_TYPE_0, + SCM_RIGHTS as SCM_RIGHTS, + SO_REUSEPORT as SO_REUSEPORT, + TCP_NOTSENT_LOWAT as TCP_NOTSENT_LOWAT, + sethostname as sethostname, + ) + + __all__ += [ + "CMSG_LEN", + "CMSG_SPACE", + "EAI_ADDRFAMILY", + "EAI_OVERFLOW", + "EAI_SYSTEM", + "IP_DEFAULT_MULTICAST_LOOP", + "IP_DEFAULT_MULTICAST_TTL", + "IP_MAX_MEMBERSHIPS", + "IP_RECVOPTS", + "IP_RECVRETOPTS", + "IP_RETOPTS", + "IPPROTO_GRE", + "IPPROTO_IPIP", + "IPPROTO_RSVP", + "IPPROTO_TP", + "IPV6_RTHDR_TYPE_0", + "SCM_RIGHTS", + "SO_REUSEPORT", + "TCP_NOTSENT_LOWAT", + "sethostname", + "AF_ROUTE", + "AF_UNIX", + "MSG_DONTWAIT", + "MSG_EOR", + "MSG_NOSIGNAL", + ] + + from _socket import ( + IPV6_DSTOPTS as IPV6_DSTOPTS, + IPV6_NEXTHOP as IPV6_NEXTHOP, + IPV6_PATHMTU as IPV6_PATHMTU, + IPV6_RECVDSTOPTS as IPV6_RECVDSTOPTS, + IPV6_RECVHOPLIMIT as IPV6_RECVHOPLIMIT, + IPV6_RECVHOPOPTS as IPV6_RECVHOPOPTS, + IPV6_RECVPATHMTU as IPV6_RECVPATHMTU, + IPV6_RECVPKTINFO as IPV6_RECVPKTINFO, + IPV6_RTHDRDSTOPTS as IPV6_RTHDRDSTOPTS, + ) + + __all__ += [ + "IPV6_DSTOPTS", + "IPV6_NEXTHOP", + "IPV6_PATHMTU", + "IPV6_RECVDSTOPTS", + "IPV6_RECVHOPLIMIT", + "IPV6_RECVHOPOPTS", + "IPV6_RECVPATHMTU", + "IPV6_RECVPKTINFO", + "IPV6_RTHDRDSTOPTS", + ] + + if sys.platform != "darwin" or sys.version_info >= (3, 13): + from _socket import SO_BINDTODEVICE as SO_BINDTODEVICE + + __all__ += ["SO_BINDTODEVICE"] + +if sys.platform != "darwin" and sys.platform != "linux": + from _socket import BDADDR_ANY as BDADDR_ANY, BDADDR_LOCAL as BDADDR_LOCAL, BTPROTO_RFCOMM as BTPROTO_RFCOMM + + __all__ += ["BDADDR_ANY", "BDADDR_LOCAL", "BTPROTO_RFCOMM"] + +if sys.platform == "darwin" and sys.version_info >= (3, 10): + from _socket import TCP_KEEPALIVE as TCP_KEEPALIVE + + __all__ += ["TCP_KEEPALIVE"] + +if sys.platform == "darwin" and sys.version_info >= (3, 11): + from _socket import TCP_CONNECTION_INFO as TCP_CONNECTION_INFO + + __all__ += ["TCP_CONNECTION_INFO"] + +if sys.platform == "linux": + from _socket import ( + ALG_OP_DECRYPT as ALG_OP_DECRYPT, + ALG_OP_ENCRYPT as ALG_OP_ENCRYPT, + ALG_OP_SIGN as ALG_OP_SIGN, + ALG_OP_VERIFY as ALG_OP_VERIFY, + ALG_SET_AEAD_ASSOCLEN as ALG_SET_AEAD_ASSOCLEN, + ALG_SET_AEAD_AUTHSIZE as ALG_SET_AEAD_AUTHSIZE, + ALG_SET_IV as ALG_SET_IV, + ALG_SET_KEY as ALG_SET_KEY, + ALG_SET_OP as ALG_SET_OP, + ALG_SET_PUBKEY as ALG_SET_PUBKEY, + CAN_BCM as CAN_BCM, + CAN_BCM_CAN_FD_FRAME as CAN_BCM_CAN_FD_FRAME, + CAN_BCM_RX_ANNOUNCE_RESUME as CAN_BCM_RX_ANNOUNCE_RESUME, + CAN_BCM_RX_CHANGED as CAN_BCM_RX_CHANGED, + CAN_BCM_RX_CHECK_DLC as CAN_BCM_RX_CHECK_DLC, + CAN_BCM_RX_DELETE as CAN_BCM_RX_DELETE, + CAN_BCM_RX_FILTER_ID as CAN_BCM_RX_FILTER_ID, + CAN_BCM_RX_NO_AUTOTIMER as CAN_BCM_RX_NO_AUTOTIMER, + CAN_BCM_RX_READ as CAN_BCM_RX_READ, + CAN_BCM_RX_RTR_FRAME as CAN_BCM_RX_RTR_FRAME, + CAN_BCM_RX_SETUP as CAN_BCM_RX_SETUP, + CAN_BCM_RX_STATUS as CAN_BCM_RX_STATUS, + CAN_BCM_RX_TIMEOUT as CAN_BCM_RX_TIMEOUT, + CAN_BCM_SETTIMER as CAN_BCM_SETTIMER, + CAN_BCM_STARTTIMER as CAN_BCM_STARTTIMER, + CAN_BCM_TX_ANNOUNCE as CAN_BCM_TX_ANNOUNCE, + CAN_BCM_TX_COUNTEVT as CAN_BCM_TX_COUNTEVT, + CAN_BCM_TX_CP_CAN_ID as CAN_BCM_TX_CP_CAN_ID, + CAN_BCM_TX_DELETE as CAN_BCM_TX_DELETE, + CAN_BCM_TX_EXPIRED as CAN_BCM_TX_EXPIRED, + CAN_BCM_TX_READ as CAN_BCM_TX_READ, + CAN_BCM_TX_RESET_MULTI_IDX as CAN_BCM_TX_RESET_MULTI_IDX, + CAN_BCM_TX_SEND as CAN_BCM_TX_SEND, + CAN_BCM_TX_SETUP as CAN_BCM_TX_SETUP, + CAN_BCM_TX_STATUS as CAN_BCM_TX_STATUS, + CAN_EFF_FLAG as CAN_EFF_FLAG, + CAN_EFF_MASK as CAN_EFF_MASK, + CAN_ERR_FLAG as CAN_ERR_FLAG, + CAN_ERR_MASK as CAN_ERR_MASK, + CAN_ISOTP as CAN_ISOTP, + CAN_RAW as CAN_RAW, + CAN_RAW_FD_FRAMES as CAN_RAW_FD_FRAMES, + CAN_RAW_FILTER as CAN_RAW_FILTER, + CAN_RAW_LOOPBACK as CAN_RAW_LOOPBACK, + CAN_RAW_RECV_OWN_MSGS as CAN_RAW_RECV_OWN_MSGS, + CAN_RTR_FLAG as CAN_RTR_FLAG, + CAN_SFF_MASK as CAN_SFF_MASK, + IOCTL_VM_SOCKETS_GET_LOCAL_CID as IOCTL_VM_SOCKETS_GET_LOCAL_CID, + NETLINK_CRYPTO as NETLINK_CRYPTO, + NETLINK_DNRTMSG as NETLINK_DNRTMSG, + NETLINK_FIREWALL as NETLINK_FIREWALL, + NETLINK_IP6_FW as NETLINK_IP6_FW, + NETLINK_NFLOG as NETLINK_NFLOG, + NETLINK_ROUTE as NETLINK_ROUTE, + NETLINK_USERSOCK as NETLINK_USERSOCK, + NETLINK_XFRM as NETLINK_XFRM, + PACKET_BROADCAST as PACKET_BROADCAST, + PACKET_FASTROUTE as PACKET_FASTROUTE, + PACKET_HOST as PACKET_HOST, + PACKET_LOOPBACK as PACKET_LOOPBACK, + PACKET_MULTICAST as PACKET_MULTICAST, + PACKET_OTHERHOST as PACKET_OTHERHOST, + PACKET_OUTGOING as PACKET_OUTGOING, + PF_CAN as PF_CAN, + PF_PACKET as PF_PACKET, + PF_RDS as PF_RDS, + RDS_CANCEL_SENT_TO as RDS_CANCEL_SENT_TO, + RDS_CMSG_RDMA_ARGS as RDS_CMSG_RDMA_ARGS, + RDS_CMSG_RDMA_DEST as RDS_CMSG_RDMA_DEST, + RDS_CMSG_RDMA_MAP as RDS_CMSG_RDMA_MAP, + RDS_CMSG_RDMA_STATUS as RDS_CMSG_RDMA_STATUS, + RDS_CONG_MONITOR as RDS_CONG_MONITOR, + RDS_FREE_MR as RDS_FREE_MR, + RDS_GET_MR as RDS_GET_MR, + RDS_GET_MR_FOR_DEST as RDS_GET_MR_FOR_DEST, + RDS_RDMA_DONTWAIT as RDS_RDMA_DONTWAIT, + RDS_RDMA_FENCE as RDS_RDMA_FENCE, + RDS_RDMA_INVALIDATE as RDS_RDMA_INVALIDATE, + RDS_RDMA_NOTIFY_ME as RDS_RDMA_NOTIFY_ME, + RDS_RDMA_READWRITE as RDS_RDMA_READWRITE, + RDS_RDMA_SILENT as RDS_RDMA_SILENT, + RDS_RDMA_USE_ONCE as RDS_RDMA_USE_ONCE, + RDS_RECVERR as RDS_RECVERR, + SO_VM_SOCKETS_BUFFER_MAX_SIZE as SO_VM_SOCKETS_BUFFER_MAX_SIZE, + SO_VM_SOCKETS_BUFFER_MIN_SIZE as SO_VM_SOCKETS_BUFFER_MIN_SIZE, + SO_VM_SOCKETS_BUFFER_SIZE as SO_VM_SOCKETS_BUFFER_SIZE, + SOL_ALG as SOL_ALG, + SOL_CAN_BASE as SOL_CAN_BASE, + SOL_CAN_RAW as SOL_CAN_RAW, + SOL_RDS as SOL_RDS, + SOL_TIPC as SOL_TIPC, + TIPC_ADDR_ID as TIPC_ADDR_ID, + TIPC_ADDR_NAME as TIPC_ADDR_NAME, + TIPC_ADDR_NAMESEQ as TIPC_ADDR_NAMESEQ, + TIPC_CFG_SRV as TIPC_CFG_SRV, + TIPC_CLUSTER_SCOPE as TIPC_CLUSTER_SCOPE, + TIPC_CONN_TIMEOUT as TIPC_CONN_TIMEOUT, + TIPC_CRITICAL_IMPORTANCE as TIPC_CRITICAL_IMPORTANCE, + TIPC_DEST_DROPPABLE as TIPC_DEST_DROPPABLE, + TIPC_HIGH_IMPORTANCE as TIPC_HIGH_IMPORTANCE, + TIPC_IMPORTANCE as TIPC_IMPORTANCE, + TIPC_LOW_IMPORTANCE as TIPC_LOW_IMPORTANCE, + TIPC_MEDIUM_IMPORTANCE as TIPC_MEDIUM_IMPORTANCE, + TIPC_NODE_SCOPE as TIPC_NODE_SCOPE, + TIPC_PUBLISHED as TIPC_PUBLISHED, + TIPC_SRC_DROPPABLE as TIPC_SRC_DROPPABLE, + TIPC_SUB_CANCEL as TIPC_SUB_CANCEL, + TIPC_SUB_PORTS as TIPC_SUB_PORTS, + TIPC_SUB_SERVICE as TIPC_SUB_SERVICE, + TIPC_SUBSCR_TIMEOUT as TIPC_SUBSCR_TIMEOUT, + TIPC_TOP_SRV as TIPC_TOP_SRV, + TIPC_WAIT_FOREVER as TIPC_WAIT_FOREVER, + TIPC_WITHDRAWN as TIPC_WITHDRAWN, + TIPC_ZONE_SCOPE as TIPC_ZONE_SCOPE, + VM_SOCKETS_INVALID_VERSION as VM_SOCKETS_INVALID_VERSION, + VMADDR_CID_ANY as VMADDR_CID_ANY, + VMADDR_CID_HOST as VMADDR_CID_HOST, + VMADDR_PORT_ANY as VMADDR_PORT_ANY, + ) + + __all__ += [ + "ALG_OP_DECRYPT", + "ALG_OP_ENCRYPT", + "ALG_OP_SIGN", + "ALG_OP_VERIFY", + "ALG_SET_AEAD_ASSOCLEN", + "ALG_SET_AEAD_AUTHSIZE", + "ALG_SET_IV", + "ALG_SET_KEY", + "ALG_SET_OP", + "ALG_SET_PUBKEY", + "CAN_BCM", + "CAN_BCM_CAN_FD_FRAME", + "CAN_BCM_RX_ANNOUNCE_RESUME", + "CAN_BCM_RX_CHANGED", + "CAN_BCM_RX_CHECK_DLC", + "CAN_BCM_RX_DELETE", + "CAN_BCM_RX_FILTER_ID", + "CAN_BCM_RX_NO_AUTOTIMER", + "CAN_BCM_RX_READ", + "CAN_BCM_RX_RTR_FRAME", + "CAN_BCM_RX_SETUP", + "CAN_BCM_RX_STATUS", + "CAN_BCM_RX_TIMEOUT", + "CAN_BCM_SETTIMER", + "CAN_BCM_STARTTIMER", + "CAN_BCM_TX_ANNOUNCE", + "CAN_BCM_TX_COUNTEVT", + "CAN_BCM_TX_CP_CAN_ID", + "CAN_BCM_TX_DELETE", + "CAN_BCM_TX_EXPIRED", + "CAN_BCM_TX_READ", + "CAN_BCM_TX_RESET_MULTI_IDX", + "CAN_BCM_TX_SEND", + "CAN_BCM_TX_SETUP", + "CAN_BCM_TX_STATUS", + "CAN_EFF_FLAG", + "CAN_EFF_MASK", + "CAN_ERR_FLAG", + "CAN_ERR_MASK", + "CAN_ISOTP", + "CAN_RAW", + "CAN_RAW_FD_FRAMES", + "CAN_RAW_FILTER", + "CAN_RAW_LOOPBACK", + "CAN_RAW_RECV_OWN_MSGS", + "CAN_RTR_FLAG", + "CAN_SFF_MASK", + "IOCTL_VM_SOCKETS_GET_LOCAL_CID", + "NETLINK_CRYPTO", + "NETLINK_DNRTMSG", + "NETLINK_FIREWALL", + "NETLINK_IP6_FW", + "NETLINK_NFLOG", + "NETLINK_ROUTE", + "NETLINK_USERSOCK", + "NETLINK_XFRM", + "PACKET_BROADCAST", + "PACKET_FASTROUTE", + "PACKET_HOST", + "PACKET_LOOPBACK", + "PACKET_MULTICAST", + "PACKET_OTHERHOST", + "PACKET_OUTGOING", + "PF_CAN", + "PF_PACKET", + "PF_RDS", + "SO_VM_SOCKETS_BUFFER_MAX_SIZE", + "SO_VM_SOCKETS_BUFFER_MIN_SIZE", + "SO_VM_SOCKETS_BUFFER_SIZE", + "SOL_ALG", + "SOL_CAN_BASE", + "SOL_CAN_RAW", + "SOL_RDS", + "SOL_TIPC", + "TIPC_ADDR_ID", + "TIPC_ADDR_NAME", + "TIPC_ADDR_NAMESEQ", + "TIPC_CFG_SRV", + "TIPC_CLUSTER_SCOPE", + "TIPC_CONN_TIMEOUT", + "TIPC_CRITICAL_IMPORTANCE", + "TIPC_DEST_DROPPABLE", + "TIPC_HIGH_IMPORTANCE", + "TIPC_IMPORTANCE", + "TIPC_LOW_IMPORTANCE", + "TIPC_MEDIUM_IMPORTANCE", + "TIPC_NODE_SCOPE", + "TIPC_PUBLISHED", + "TIPC_SRC_DROPPABLE", + "TIPC_SUB_CANCEL", + "TIPC_SUB_PORTS", + "TIPC_SUB_SERVICE", + "TIPC_SUBSCR_TIMEOUT", + "TIPC_TOP_SRV", + "TIPC_WAIT_FOREVER", + "TIPC_WITHDRAWN", + "TIPC_ZONE_SCOPE", + "VM_SOCKETS_INVALID_VERSION", + "VMADDR_CID_ANY", + "VMADDR_CID_HOST", + "VMADDR_PORT_ANY", + "AF_CAN", + "AF_PACKET", + "AF_RDS", + "AF_TIPC", + "AF_ALG", + "AF_NETLINK", + "AF_VSOCK", + "AF_QIPCRTR", + "SOCK_CLOEXEC", + "SOCK_NONBLOCK", + ] + + if sys.version_info < (3, 11): + from _socket import CAN_RAW_ERR_FILTER as CAN_RAW_ERR_FILTER + + __all__ += ["CAN_RAW_ERR_FILTER"] + if sys.version_info >= (3, 13): + from _socket import CAN_RAW_ERR_FILTER as CAN_RAW_ERR_FILTER + + __all__ += ["CAN_RAW_ERR_FILTER"] + +if sys.platform == "linux": + from _socket import ( + CAN_J1939 as CAN_J1939, + CAN_RAW_JOIN_FILTERS as CAN_RAW_JOIN_FILTERS, + IPPROTO_UDPLITE as IPPROTO_UDPLITE, + J1939_EE_INFO_NONE as J1939_EE_INFO_NONE, + J1939_EE_INFO_TX_ABORT as J1939_EE_INFO_TX_ABORT, + J1939_FILTER_MAX as J1939_FILTER_MAX, + J1939_IDLE_ADDR as J1939_IDLE_ADDR, + J1939_MAX_UNICAST_ADDR as J1939_MAX_UNICAST_ADDR, + J1939_NLA_BYTES_ACKED as J1939_NLA_BYTES_ACKED, + J1939_NLA_PAD as J1939_NLA_PAD, + J1939_NO_ADDR as J1939_NO_ADDR, + J1939_NO_NAME as J1939_NO_NAME, + J1939_NO_PGN as J1939_NO_PGN, + J1939_PGN_ADDRESS_CLAIMED as J1939_PGN_ADDRESS_CLAIMED, + J1939_PGN_ADDRESS_COMMANDED as J1939_PGN_ADDRESS_COMMANDED, + J1939_PGN_MAX as J1939_PGN_MAX, + J1939_PGN_PDU1_MAX as J1939_PGN_PDU1_MAX, + J1939_PGN_REQUEST as J1939_PGN_REQUEST, + SCM_J1939_DEST_ADDR as SCM_J1939_DEST_ADDR, + SCM_J1939_DEST_NAME as SCM_J1939_DEST_NAME, + SCM_J1939_ERRQUEUE as SCM_J1939_ERRQUEUE, + SCM_J1939_PRIO as SCM_J1939_PRIO, + SO_J1939_ERRQUEUE as SO_J1939_ERRQUEUE, + SO_J1939_FILTER as SO_J1939_FILTER, + SO_J1939_PROMISC as SO_J1939_PROMISC, + SO_J1939_SEND_PRIO as SO_J1939_SEND_PRIO, + UDPLITE_RECV_CSCOV as UDPLITE_RECV_CSCOV, + UDPLITE_SEND_CSCOV as UDPLITE_SEND_CSCOV, + ) + + __all__ += [ + "CAN_J1939", + "CAN_RAW_JOIN_FILTERS", + "IPPROTO_UDPLITE", + "J1939_EE_INFO_NONE", + "J1939_EE_INFO_TX_ABORT", + "J1939_FILTER_MAX", + "J1939_IDLE_ADDR", + "J1939_MAX_UNICAST_ADDR", + "J1939_NLA_BYTES_ACKED", + "J1939_NLA_PAD", + "J1939_NO_ADDR", + "J1939_NO_NAME", + "J1939_NO_PGN", + "J1939_PGN_ADDRESS_CLAIMED", + "J1939_PGN_ADDRESS_COMMANDED", + "J1939_PGN_MAX", + "J1939_PGN_PDU1_MAX", + "J1939_PGN_REQUEST", + "SCM_J1939_DEST_ADDR", + "SCM_J1939_DEST_NAME", + "SCM_J1939_ERRQUEUE", + "SCM_J1939_PRIO", + "SO_J1939_ERRQUEUE", + "SO_J1939_FILTER", + "SO_J1939_PROMISC", + "SO_J1939_SEND_PRIO", + "UDPLITE_RECV_CSCOV", + "UDPLITE_SEND_CSCOV", + ] +if sys.platform == "linux" and sys.version_info >= (3, 10): + from _socket import IPPROTO_MPTCP as IPPROTO_MPTCP + + __all__ += ["IPPROTO_MPTCP"] +if sys.platform == "linux" and sys.version_info >= (3, 11): + from _socket import SO_INCOMING_CPU as SO_INCOMING_CPU + + __all__ += ["SO_INCOMING_CPU"] +if sys.platform == "linux" and sys.version_info >= (3, 12): + from _socket import ( + TCP_CC_INFO as TCP_CC_INFO, + TCP_FASTOPEN_CONNECT as TCP_FASTOPEN_CONNECT, + TCP_FASTOPEN_KEY as TCP_FASTOPEN_KEY, + TCP_FASTOPEN_NO_COOKIE as TCP_FASTOPEN_NO_COOKIE, + TCP_INQ as TCP_INQ, + TCP_MD5SIG as TCP_MD5SIG, + TCP_MD5SIG_EXT as TCP_MD5SIG_EXT, + TCP_QUEUE_SEQ as TCP_QUEUE_SEQ, + TCP_REPAIR as TCP_REPAIR, + TCP_REPAIR_OPTIONS as TCP_REPAIR_OPTIONS, + TCP_REPAIR_QUEUE as TCP_REPAIR_QUEUE, + TCP_REPAIR_WINDOW as TCP_REPAIR_WINDOW, + TCP_SAVE_SYN as TCP_SAVE_SYN, + TCP_SAVED_SYN as TCP_SAVED_SYN, + TCP_THIN_DUPACK as TCP_THIN_DUPACK, + TCP_THIN_LINEAR_TIMEOUTS as TCP_THIN_LINEAR_TIMEOUTS, + TCP_TIMESTAMP as TCP_TIMESTAMP, + TCP_TX_DELAY as TCP_TX_DELAY, + TCP_ULP as TCP_ULP, + TCP_ZEROCOPY_RECEIVE as TCP_ZEROCOPY_RECEIVE, + ) + + __all__ += [ + "TCP_CC_INFO", + "TCP_FASTOPEN_CONNECT", + "TCP_FASTOPEN_KEY", + "TCP_FASTOPEN_NO_COOKIE", + "TCP_INQ", + "TCP_MD5SIG", + "TCP_MD5SIG_EXT", + "TCP_QUEUE_SEQ", + "TCP_REPAIR", + "TCP_REPAIR_OPTIONS", + "TCP_REPAIR_QUEUE", + "TCP_REPAIR_WINDOW", + "TCP_SAVED_SYN", + "TCP_SAVE_SYN", + "TCP_THIN_DUPACK", + "TCP_THIN_LINEAR_TIMEOUTS", + "TCP_TIMESTAMP", + "TCP_TX_DELAY", + "TCP_ULP", + "TCP_ZEROCOPY_RECEIVE", + ] + +if sys.platform == "linux" and sys.version_info >= (3, 13): + from _socket import NI_IDN as NI_IDN, SO_BINDTOIFINDEX as SO_BINDTOIFINDEX + + __all__ += ["NI_IDN", "SO_BINDTOIFINDEX"] + +if sys.version_info >= (3, 12): + from _socket import ( + IP_ADD_SOURCE_MEMBERSHIP as IP_ADD_SOURCE_MEMBERSHIP, + IP_BLOCK_SOURCE as IP_BLOCK_SOURCE, + IP_DROP_SOURCE_MEMBERSHIP as IP_DROP_SOURCE_MEMBERSHIP, + IP_PKTINFO as IP_PKTINFO, + IP_UNBLOCK_SOURCE as IP_UNBLOCK_SOURCE, + ) + + __all__ += ["IP_ADD_SOURCE_MEMBERSHIP", "IP_BLOCK_SOURCE", "IP_DROP_SOURCE_MEMBERSHIP", "IP_PKTINFO", "IP_UNBLOCK_SOURCE"] + + if sys.platform == "win32": + from _socket import ( + HV_GUID_BROADCAST as HV_GUID_BROADCAST, + HV_GUID_CHILDREN as HV_GUID_CHILDREN, + HV_GUID_LOOPBACK as HV_GUID_LOOPBACK, + HV_GUID_PARENT as HV_GUID_PARENT, + HV_GUID_WILDCARD as HV_GUID_WILDCARD, + HV_GUID_ZERO as HV_GUID_ZERO, + HV_PROTOCOL_RAW as HV_PROTOCOL_RAW, + HVSOCKET_ADDRESS_FLAG_PASSTHRU as HVSOCKET_ADDRESS_FLAG_PASSTHRU, + HVSOCKET_CONNECT_TIMEOUT as HVSOCKET_CONNECT_TIMEOUT, + HVSOCKET_CONNECT_TIMEOUT_MAX as HVSOCKET_CONNECT_TIMEOUT_MAX, + HVSOCKET_CONNECTED_SUSPEND as HVSOCKET_CONNECTED_SUSPEND, + ) + + __all__ += [ + "HV_GUID_BROADCAST", + "HV_GUID_CHILDREN", + "HV_GUID_LOOPBACK", + "HV_GUID_PARENT", + "HV_GUID_WILDCARD", + "HV_GUID_ZERO", + "HV_PROTOCOL_RAW", + "HVSOCKET_ADDRESS_FLAG_PASSTHRU", + "HVSOCKET_CONNECT_TIMEOUT", + "HVSOCKET_CONNECT_TIMEOUT_MAX", + "HVSOCKET_CONNECTED_SUSPEND", + ] + else: + from _socket import ( + ETHERTYPE_ARP as ETHERTYPE_ARP, + ETHERTYPE_IP as ETHERTYPE_IP, + ETHERTYPE_IPV6 as ETHERTYPE_IPV6, + ETHERTYPE_VLAN as ETHERTYPE_VLAN, + ) + + __all__ += ["ETHERTYPE_ARP", "ETHERTYPE_IP", "ETHERTYPE_IPV6", "ETHERTYPE_VLAN"] + + if sys.platform == "linux": + from _socket import ETH_P_ALL as ETH_P_ALL + + __all__ += ["ETH_P_ALL"] + + if sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darwin": + # FreeBSD >= 14.0 + from _socket import PF_DIVERT as PF_DIVERT + + __all__ += ["PF_DIVERT", "AF_DIVERT"] + +if sys.platform != "win32": + __all__ += ["send_fds", "recv_fds"] + +if sys.platform != "linux": + __all__ += ["AF_LINK"] +if sys.platform != "darwin" and sys.platform != "linux": + __all__ += ["AF_BLUETOOTH"] + +if sys.platform == "win32" and sys.version_info >= (3, 12): + __all__ += ["AF_HYPERV"] + +if sys.platform != "win32" and sys.platform != "linux": + from _socket import ( + EAI_BADHINTS as EAI_BADHINTS, + EAI_MAX as EAI_MAX, + EAI_PROTOCOL as EAI_PROTOCOL, + IPPROTO_EON as IPPROTO_EON, + IPPROTO_HELLO as IPPROTO_HELLO, + IPPROTO_IPCOMP as IPPROTO_IPCOMP, + IPPROTO_XTP as IPPROTO_XTP, + IPV6_USE_MIN_MTU as IPV6_USE_MIN_MTU, + LOCAL_PEERCRED as LOCAL_PEERCRED, + SCM_CREDS as SCM_CREDS, + ) + + __all__ += [ + "EAI_BADHINTS", + "EAI_MAX", + "EAI_PROTOCOL", + "IPPROTO_EON", + "IPPROTO_HELLO", + "IPPROTO_IPCOMP", + "IPPROTO_XTP", + "IPV6_USE_MIN_MTU", + "LOCAL_PEERCRED", + "SCM_CREDS", + "AI_DEFAULT", + "AI_MASK", + "AI_V4MAPPED_CFG", + "MSG_EOF", + ] + +if sys.platform != "win32" and sys.platform != "darwin" and sys.platform != "linux": + from _socket import ( + IPPROTO_BIP as IPPROTO_BIP, + IPPROTO_MOBILE as IPPROTO_MOBILE, + IPPROTO_VRRP as IPPROTO_VRRP, + MSG_BTAG as MSG_BTAG, + MSG_ETAG as MSG_ETAG, + SO_SETFIB as SO_SETFIB, + ) + + __all__ += ["SO_SETFIB", "MSG_BTAG", "MSG_ETAG", "IPPROTO_BIP", "IPPROTO_MOBILE", "IPPROTO_VRRP", "MSG_NOTIFICATION"] + +if sys.platform != "linux": + from _socket import ( + IP_RECVDSTADDR as IP_RECVDSTADDR, + IPPROTO_GGP as IPPROTO_GGP, + IPPROTO_IPV4 as IPPROTO_IPV4, + IPPROTO_MAX as IPPROTO_MAX, + IPPROTO_ND as IPPROTO_ND, + SO_USELOOPBACK as SO_USELOOPBACK, + ) + + __all__ += ["IPPROTO_GGP", "IPPROTO_IPV4", "IPPROTO_MAX", "IPPROTO_ND", "IP_RECVDSTADDR", "SO_USELOOPBACK"] + +if sys.version_info >= (3, 14): + from _socket import IP_RECVTTL as IP_RECVTTL + + __all__ += ["IP_RECVTTL"] + + if sys.platform == "win32" or sys.platform == "linux": + from _socket import IP_RECVERR as IP_RECVERR, IPV6_RECVERR as IPV6_RECVERR, SO_ORIGINAL_DST as SO_ORIGINAL_DST + + __all__ += ["IP_RECVERR", "IPV6_RECVERR", "SO_ORIGINAL_DST"] + + if sys.platform == "win32": + from _socket import ( + SO_BTH_ENCRYPT as SO_BTH_ENCRYPT, + SO_BTH_MTU as SO_BTH_MTU, + SO_BTH_MTU_MAX as SO_BTH_MTU_MAX, + SO_BTH_MTU_MIN as SO_BTH_MTU_MIN, + SOL_RFCOMM as SOL_RFCOMM, + TCP_QUICKACK as TCP_QUICKACK, + ) + + __all__ += ["SOL_RFCOMM", "SO_BTH_ENCRYPT", "SO_BTH_MTU", "SO_BTH_MTU_MAX", "SO_BTH_MTU_MIN", "TCP_QUICKACK"] + + if sys.platform == "linux": + from _socket import ( + IP_FREEBIND as IP_FREEBIND, + IP_RECVORIGDSTADDR as IP_RECVORIGDSTADDR, + VMADDR_CID_LOCAL as VMADDR_CID_LOCAL, + ) + + __all__ += ["IP_FREEBIND", "IP_RECVORIGDSTADDR", "VMADDR_CID_LOCAL"] + +# Re-exported from errno +EBADF: Final[int] +EAGAIN: Final[int] +EWOULDBLOCK: Final[int] + +# These errors are implemented in _socket at runtime +# but they consider themselves to live in socket so we'll put them here. +error = OSError + +class herror(error): ... +class gaierror(error): ... + +if sys.version_info >= (3, 10): + timeout = TimeoutError +else: + class timeout(error): ... + +class AddressFamily(IntEnum): + AF_INET = 2 + AF_INET6 = 10 + AF_APPLETALK = 5 + AF_IPX = 4 + AF_SNA = 22 + AF_UNSPEC = 0 + if sys.platform != "darwin": + AF_IRDA = 23 + if sys.platform != "win32": + AF_ROUTE = 16 + AF_UNIX = 1 + if sys.platform == "darwin": + AF_SYSTEM = 32 + if sys.platform != "win32" and sys.platform != "darwin": + AF_ASH = 18 + AF_ATMPVC = 8 + AF_ATMSVC = 20 + AF_AX25 = 3 + AF_BRIDGE = 7 + AF_ECONET = 19 + AF_KEY = 15 + AF_LLC = 26 + AF_NETBEUI = 13 + AF_NETROM = 6 + AF_PPPOX = 24 + AF_ROSE = 11 + AF_SECURITY = 14 + AF_WANPIPE = 25 + AF_X25 = 9 + if sys.platform == "linux": + AF_CAN = 29 + AF_PACKET = 17 + AF_RDS = 21 + AF_TIPC = 30 + AF_ALG = 38 + AF_NETLINK = 16 + AF_VSOCK = 40 + AF_QIPCRTR = 42 + if sys.platform != "linux": + AF_LINK = 33 + if sys.platform != "darwin" and sys.platform != "linux": + AF_BLUETOOTH = 32 + if sys.platform == "win32" and sys.version_info >= (3, 12): + AF_HYPERV = 34 + if sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darwin" and sys.version_info >= (3, 12): + # FreeBSD >= 14.0 + AF_DIVERT = 44 + +AF_INET: Final = AddressFamily.AF_INET +AF_INET6: Final = AddressFamily.AF_INET6 +AF_APPLETALK: Final = AddressFamily.AF_APPLETALK +AF_DECnet: Final = 12 +AF_IPX: Final = AddressFamily.AF_IPX +AF_SNA: Final = AddressFamily.AF_SNA +AF_UNSPEC: Final = AddressFamily.AF_UNSPEC + +if sys.platform != "darwin": + AF_IRDA: Final = AddressFamily.AF_IRDA + +if sys.platform != "win32": + AF_ROUTE: Final = AddressFamily.AF_ROUTE + AF_UNIX: Final = AddressFamily.AF_UNIX + +if sys.platform == "darwin": + AF_SYSTEM: Final = AddressFamily.AF_SYSTEM + +if sys.platform != "win32" and sys.platform != "darwin": + AF_ASH: Final = AddressFamily.AF_ASH + AF_ATMPVC: Final = AddressFamily.AF_ATMPVC + AF_ATMSVC: Final = AddressFamily.AF_ATMSVC + AF_AX25: Final = AddressFamily.AF_AX25 + AF_BRIDGE: Final = AddressFamily.AF_BRIDGE + AF_ECONET: Final = AddressFamily.AF_ECONET + AF_KEY: Final = AddressFamily.AF_KEY + AF_LLC: Final = AddressFamily.AF_LLC + AF_NETBEUI: Final = AddressFamily.AF_NETBEUI + AF_NETROM: Final = AddressFamily.AF_NETROM + AF_PPPOX: Final = AddressFamily.AF_PPPOX + AF_ROSE: Final = AddressFamily.AF_ROSE + AF_SECURITY: Final = AddressFamily.AF_SECURITY + AF_WANPIPE: Final = AddressFamily.AF_WANPIPE + AF_X25: Final = AddressFamily.AF_X25 + +if sys.platform == "linux": + AF_CAN: Final = AddressFamily.AF_CAN + AF_PACKET: Final = AddressFamily.AF_PACKET + AF_RDS: Final = AddressFamily.AF_RDS + AF_TIPC: Final = AddressFamily.AF_TIPC + AF_ALG: Final = AddressFamily.AF_ALG + AF_NETLINK: Final = AddressFamily.AF_NETLINK + AF_VSOCK: Final = AddressFamily.AF_VSOCK + AF_QIPCRTR: Final = AddressFamily.AF_QIPCRTR + +if sys.platform != "linux": + AF_LINK: Final = AddressFamily.AF_LINK +if sys.platform != "darwin" and sys.platform != "linux": + AF_BLUETOOTH: Final = AddressFamily.AF_BLUETOOTH +if sys.platform == "win32" and sys.version_info >= (3, 12): + AF_HYPERV: Final = AddressFamily.AF_HYPERV +if sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darwin" and sys.version_info >= (3, 12): + # FreeBSD >= 14.0 + AF_DIVERT: Final = AddressFamily.AF_DIVERT + +class SocketKind(IntEnum): + SOCK_STREAM = 1 + SOCK_DGRAM = 2 + SOCK_RAW = 3 + SOCK_RDM = 4 + SOCK_SEQPACKET = 5 + if sys.platform == "linux": + SOCK_CLOEXEC = 524288 + SOCK_NONBLOCK = 2048 + +SOCK_STREAM: Final = SocketKind.SOCK_STREAM +SOCK_DGRAM: Final = SocketKind.SOCK_DGRAM +SOCK_RAW: Final = SocketKind.SOCK_RAW +SOCK_RDM: Final = SocketKind.SOCK_RDM +SOCK_SEQPACKET: Final = SocketKind.SOCK_SEQPACKET +if sys.platform == "linux": + SOCK_CLOEXEC: Final = SocketKind.SOCK_CLOEXEC + SOCK_NONBLOCK: Final = SocketKind.SOCK_NONBLOCK + +class MsgFlag(IntFlag): + MSG_CTRUNC = 8 + MSG_DONTROUTE = 4 + MSG_OOB = 1 + MSG_PEEK = 2 + MSG_TRUNC = 32 + MSG_WAITALL = 256 + if sys.platform == "win32": + MSG_BCAST = 1024 + MSG_MCAST = 2048 + + if sys.platform != "darwin": + MSG_ERRQUEUE = 8192 + + if sys.platform != "win32" and sys.platform != "darwin": + MSG_CMSG_CLOEXEC = 1073741821 + MSG_CONFIRM = 2048 + MSG_FASTOPEN = 536870912 + MSG_MORE = 32768 + + if sys.platform != "win32" and sys.platform != "darwin" and sys.platform != "linux": + MSG_NOTIFICATION = 8192 + + if sys.platform != "win32": + MSG_DONTWAIT = 64 + MSG_EOR = 128 + MSG_NOSIGNAL = 16384 # sometimes this exists on darwin, sometimes not + if sys.platform != "win32" and sys.platform != "linux": + MSG_EOF = 256 + +MSG_CTRUNC: Final = MsgFlag.MSG_CTRUNC +MSG_DONTROUTE: Final = MsgFlag.MSG_DONTROUTE +MSG_OOB: Final = MsgFlag.MSG_OOB +MSG_PEEK: Final = MsgFlag.MSG_PEEK +MSG_TRUNC: Final = MsgFlag.MSG_TRUNC +MSG_WAITALL: Final = MsgFlag.MSG_WAITALL + +if sys.platform == "win32": + MSG_BCAST: Final = MsgFlag.MSG_BCAST + MSG_MCAST: Final = MsgFlag.MSG_MCAST + +if sys.platform != "darwin": + MSG_ERRQUEUE: Final = MsgFlag.MSG_ERRQUEUE + +if sys.platform != "win32": + MSG_DONTWAIT: Final = MsgFlag.MSG_DONTWAIT + MSG_EOR: Final = MsgFlag.MSG_EOR + MSG_NOSIGNAL: Final = MsgFlag.MSG_NOSIGNAL # Sometimes this exists on darwin, sometimes not + +if sys.platform != "win32" and sys.platform != "darwin": + MSG_CMSG_CLOEXEC: Final = MsgFlag.MSG_CMSG_CLOEXEC + MSG_CONFIRM: Final = MsgFlag.MSG_CONFIRM + MSG_FASTOPEN: Final = MsgFlag.MSG_FASTOPEN + MSG_MORE: Final = MsgFlag.MSG_MORE + +if sys.platform != "win32" and sys.platform != "darwin" and sys.platform != "linux": + MSG_NOTIFICATION: Final = MsgFlag.MSG_NOTIFICATION + +if sys.platform != "win32" and sys.platform != "linux": + MSG_EOF: Final = MsgFlag.MSG_EOF + +class AddressInfo(IntFlag): + AI_ADDRCONFIG = 32 + AI_ALL = 16 + AI_CANONNAME = 2 + AI_NUMERICHOST = 4 + AI_NUMERICSERV = 1024 + AI_PASSIVE = 1 + AI_V4MAPPED = 8 + if sys.platform != "win32" and sys.platform != "linux": + AI_DEFAULT = 1536 + AI_MASK = 5127 + AI_V4MAPPED_CFG = 512 + +AI_ADDRCONFIG: Final = AddressInfo.AI_ADDRCONFIG +AI_ALL: Final = AddressInfo.AI_ALL +AI_CANONNAME: Final = AddressInfo.AI_CANONNAME +AI_NUMERICHOST: Final = AddressInfo.AI_NUMERICHOST +AI_NUMERICSERV: Final = AddressInfo.AI_NUMERICSERV +AI_PASSIVE: Final = AddressInfo.AI_PASSIVE +AI_V4MAPPED: Final = AddressInfo.AI_V4MAPPED + +if sys.platform != "win32" and sys.platform != "linux": + AI_DEFAULT: Final = AddressInfo.AI_DEFAULT + AI_MASK: Final = AddressInfo.AI_MASK + AI_V4MAPPED_CFG: Final = AddressInfo.AI_V4MAPPED_CFG + +if sys.platform == "win32": + errorTab: dict[int, str] # undocumented + +@type_check_only +class _SendableFile(Protocol): + def read(self, size: int, /) -> bytes: ... + def seek(self, offset: int, /) -> object: ... + + # optional fields: + # + # @property + # def mode(self) -> str: ... + # def fileno(self) -> int: ... + +class socket(_socket.socket): + __slots__ = ["__weakref__", "_io_refs", "_closed"] + def __init__( + self, family: AddressFamily | int = -1, type: SocketKind | int = -1, proto: int = -1, fileno: int | None = None + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + def dup(self) -> Self: ... + def accept(self) -> tuple[socket, _RetAddress]: ... + # Note that the makefile's documented windows-specific behavior is not represented + # mode strings with duplicates are intentionally excluded + @overload + def makefile( + self, + mode: Literal["b", "rb", "br", "wb", "bw", "rwb", "rbw", "wrb", "wbr", "brw", "bwr"], + buffering: Literal[0], + *, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> SocketIO: ... + @overload + def makefile( + self, + mode: Literal["rwb", "rbw", "wrb", "wbr", "brw", "bwr"], + buffering: Literal[-1, 1] | None = None, + *, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> BufferedRWPair: ... + @overload + def makefile( + self, + mode: Literal["rb", "br"], + buffering: Literal[-1, 1] | None = None, + *, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> BufferedReader: ... + @overload + def makefile( + self, + mode: Literal["wb", "bw"], + buffering: Literal[-1, 1] | None = None, + *, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> BufferedWriter: ... + @overload + def makefile( + self, + mode: Literal["b", "rb", "br", "wb", "bw", "rwb", "rbw", "wrb", "wbr", "brw", "bwr"], + buffering: int, + *, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> IOBase: ... + @overload + def makefile( + self, + mode: Literal["r", "w", "rw", "wr", ""] = "r", + buffering: int | None = None, + *, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> TextIOWrapper: ... + def sendfile(self, file: _SendableFile, offset: int = 0, count: int | None = None) -> int: ... + @property + def family(self) -> AddressFamily: ... + @property + def type(self) -> SocketKind: ... + def get_inheritable(self) -> bool: ... + def set_inheritable(self, inheritable: bool) -> None: ... + +def fromfd(fd: SupportsIndex, family: AddressFamily | int, type: SocketKind | int, proto: int = 0) -> socket: ... + +if sys.platform != "win32": + def send_fds( + sock: socket, buffers: Iterable[ReadableBuffer], fds: Iterable[int], flags: Unused = 0, address: Unused = None + ) -> int: ... + def recv_fds(sock: socket, bufsize: int, maxfds: int, flags: int = 0) -> tuple[bytes, list[int], int, Any]: ... + +if sys.platform == "win32": + def fromshare(info: bytes) -> socket: ... + +if sys.platform == "win32": + def socketpair(family: int = ..., type: int = ..., proto: int = 0) -> tuple[socket, socket]: ... + +else: + def socketpair( + family: int | AddressFamily | None = None, type: SocketType | int = ..., proto: int = 0 + ) -> tuple[socket, socket]: ... + +class SocketIO(RawIOBase): + def __init__(self, sock: socket, mode: Literal["r", "w", "rw", "rb", "wb", "rwb"]) -> None: ... + def readinto(self, b: WriteableBuffer) -> int | None: ... + def write(self, b: ReadableBuffer) -> int | None: ... + @property + def name(self) -> int: ... # return value is really "int" + @property + def mode(self) -> Literal["rb", "wb", "rwb"]: ... + +def getfqdn(name: str = "") -> str: ... + +if sys.version_info >= (3, 11): + def create_connection( + address: tuple[str | None, int], + timeout: float | None = ..., + source_address: _Address | None = None, + *, + all_errors: bool = False, + ) -> socket: ... + +else: + def create_connection( + address: tuple[str | None, int], timeout: float | None = ..., source_address: _Address | None = None + ) -> socket: ... + +def has_dualstack_ipv6() -> bool: ... +def create_server( + address: _Address, *, family: int = ..., backlog: int | None = None, reuse_port: bool = False, dualstack_ipv6: bool = False +) -> socket: ... + +# The 5th tuple item is the socket address, for IP4, IP6, or IP6 if Python is compiled with --disable-ipv6, respectively. +def getaddrinfo( + host: bytes | str | None, port: bytes | str | int | None, family: int = 0, type: int = 0, proto: int = 0, flags: int = 0 +) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes]]]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/socketserver.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/socketserver.pyi new file mode 100644 index 0000000..f321d14 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/socketserver.pyi @@ -0,0 +1,170 @@ +import sys +import types +from _socket import _Address, _RetAddress +from _typeshed import ReadableBuffer +from collections.abc import Callable +from io import BufferedIOBase +from socket import socket as _socket +from typing import Any, ClassVar +from typing_extensions import Self, TypeAlias + +__all__ = [ + "BaseServer", + "TCPServer", + "UDPServer", + "ThreadingUDPServer", + "ThreadingTCPServer", + "BaseRequestHandler", + "StreamRequestHandler", + "DatagramRequestHandler", + "ThreadingMixIn", +] +if sys.platform != "win32": + __all__ += [ + "ForkingMixIn", + "ForkingTCPServer", + "ForkingUDPServer", + "ThreadingUnixDatagramServer", + "ThreadingUnixStreamServer", + "UnixDatagramServer", + "UnixStreamServer", + ] + if sys.version_info >= (3, 12): + __all__ += ["ForkingUnixStreamServer", "ForkingUnixDatagramServer"] + +_RequestType: TypeAlias = _socket | tuple[bytes, _socket] +_AfUnixAddress: TypeAlias = str | ReadableBuffer # address acceptable for an AF_UNIX socket +_AfInetAddress: TypeAlias = tuple[str | bytes | bytearray, int] # address acceptable for an AF_INET socket +_AfInet6Address: TypeAlias = tuple[str | bytes | bytearray, int, int, int] # address acceptable for an AF_INET6 socket + +# This can possibly be generic at some point: +class BaseServer: + server_address: _Address + timeout: float | None + RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler] + def __init__( + self, server_address: _Address, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler] + ) -> None: ... + def handle_request(self) -> None: ... + def serve_forever(self, poll_interval: float = 0.5) -> None: ... + def shutdown(self) -> None: ... + def server_close(self) -> None: ... + def finish_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... + def get_request(self) -> tuple[Any, Any]: ... # Not implemented here, but expected to exist on subclasses + def handle_error(self, request: _RequestType, client_address: _RetAddress) -> None: ... + def handle_timeout(self) -> None: ... + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... + def server_activate(self) -> None: ... + def verify_request(self, request: _RequestType, client_address: _RetAddress) -> bool: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None + ) -> None: ... + def service_actions(self) -> None: ... + def shutdown_request(self, request: _RequestType) -> None: ... # undocumented + def close_request(self, request: _RequestType) -> None: ... # undocumented + +class TCPServer(BaseServer): + address_family: int + socket: _socket + allow_reuse_address: bool + request_queue_size: int + socket_type: int + if sys.version_info >= (3, 11): + allow_reuse_port: bool + server_address: _AfInetAddress | _AfInet6Address + def __init__( + self, + server_address: _AfInetAddress | _AfInet6Address, + RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], + bind_and_activate: bool = True, + ) -> None: ... + def fileno(self) -> int: ... + def get_request(self) -> tuple[_socket, _RetAddress]: ... + def server_bind(self) -> None: ... + +class UDPServer(TCPServer): + max_packet_size: ClassVar[int] + def get_request(self) -> tuple[tuple[bytes, _socket], _RetAddress]: ... # type: ignore[override] + +if sys.platform != "win32": + class UnixStreamServer(TCPServer): + server_address: _AfUnixAddress # type: ignore[assignment] + def __init__( + self, + server_address: _AfUnixAddress, + RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], + bind_and_activate: bool = True, + ) -> None: ... + + class UnixDatagramServer(UDPServer): + server_address: _AfUnixAddress # type: ignore[assignment] + def __init__( + self, + server_address: _AfUnixAddress, + RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], + bind_and_activate: bool = True, + ) -> None: ... + +if sys.platform != "win32": + class ForkingMixIn: + timeout: float | None # undocumented + active_children: set[int] | None # undocumented + max_children: int # undocumented + block_on_close: bool + def collect_children(self, *, blocking: bool = False) -> None: ... # undocumented + def handle_timeout(self) -> None: ... # undocumented + def service_actions(self) -> None: ... # undocumented + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... + def server_close(self) -> None: ... + +class ThreadingMixIn: + daemon_threads: bool + block_on_close: bool + def process_request_thread(self, request: _RequestType, client_address: _RetAddress) -> None: ... # undocumented + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... + def server_close(self) -> None: ... + +if sys.platform != "win32": + class ForkingTCPServer(ForkingMixIn, TCPServer): ... + class ForkingUDPServer(ForkingMixIn, UDPServer): ... + if sys.version_info >= (3, 12): + class ForkingUnixStreamServer(ForkingMixIn, UnixStreamServer): ... + class ForkingUnixDatagramServer(ForkingMixIn, UnixDatagramServer): ... + +class ThreadingTCPServer(ThreadingMixIn, TCPServer): ... +class ThreadingUDPServer(ThreadingMixIn, UDPServer): ... + +if sys.platform != "win32": + class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): ... + class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): ... + +class BaseRequestHandler: + # `request` is technically of type _RequestType, + # but there are some concerns that having a union here would cause + # too much inconvenience to people using it (see + # https://github.com/python/typeshed/pull/384#issuecomment-234649696) + # + # Note also that _RetAddress is also just an alias for `Any` + request: Any + client_address: _RetAddress + server: BaseServer + def __init__(self, request: _RequestType, client_address: _RetAddress, server: BaseServer) -> None: ... + def setup(self) -> None: ... + def handle(self) -> None: ... + def finish(self) -> None: ... + +class StreamRequestHandler(BaseRequestHandler): + rbufsize: ClassVar[int] # undocumented + wbufsize: ClassVar[int] # undocumented + timeout: ClassVar[float | None] # undocumented + disable_nagle_algorithm: ClassVar[bool] # undocumented + connection: Any # undocumented + rfile: BufferedIOBase + wfile: BufferedIOBase + +class DatagramRequestHandler(BaseRequestHandler): + packet: bytes # undocumented + socket: _socket # undocumented + rfile: BufferedIOBase + wfile: BufferedIOBase diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/spwd.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/spwd.pyi new file mode 100644 index 0000000..3a5d399 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/spwd.pyi @@ -0,0 +1,46 @@ +import sys +from _typeshed import structseq +from typing import Any, Final, final + +if sys.platform != "win32": + @final + class struct_spwd(structseq[Any], tuple[str, str, int, int, int, int, int, int, int]): + if sys.version_info >= (3, 10): + __match_args__: Final = ( + "sp_namp", + "sp_pwdp", + "sp_lstchg", + "sp_min", + "sp_max", + "sp_warn", + "sp_inact", + "sp_expire", + "sp_flag", + ) + + @property + def sp_namp(self) -> str: ... + @property + def sp_pwdp(self) -> str: ... + @property + def sp_lstchg(self) -> int: ... + @property + def sp_min(self) -> int: ... + @property + def sp_max(self) -> int: ... + @property + def sp_warn(self) -> int: ... + @property + def sp_inact(self) -> int: ... + @property + def sp_expire(self) -> int: ... + @property + def sp_flag(self) -> int: ... + # Deprecated aliases below. + @property + def sp_nam(self) -> str: ... + @property + def sp_pwd(self) -> str: ... + + def getspall() -> list[struct_spwd]: ... + def getspnam(arg: str, /) -> struct_spwd: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sqlite3/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sqlite3/__init__.pyi new file mode 100644 index 0000000..882cd14 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sqlite3/__init__.pyi @@ -0,0 +1,478 @@ +import sys +from _typeshed import MaybeNone, ReadableBuffer, StrOrBytesPath, SupportsLenAndGetItem, Unused +from collections.abc import Callable, Generator, Iterable, Iterator, Mapping, Sequence +from sqlite3.dbapi2 import ( + PARSE_COLNAMES as PARSE_COLNAMES, + PARSE_DECLTYPES as PARSE_DECLTYPES, + SQLITE_ALTER_TABLE as SQLITE_ALTER_TABLE, + SQLITE_ANALYZE as SQLITE_ANALYZE, + SQLITE_ATTACH as SQLITE_ATTACH, + SQLITE_CREATE_INDEX as SQLITE_CREATE_INDEX, + SQLITE_CREATE_TABLE as SQLITE_CREATE_TABLE, + SQLITE_CREATE_TEMP_INDEX as SQLITE_CREATE_TEMP_INDEX, + SQLITE_CREATE_TEMP_TABLE as SQLITE_CREATE_TEMP_TABLE, + SQLITE_CREATE_TEMP_TRIGGER as SQLITE_CREATE_TEMP_TRIGGER, + SQLITE_CREATE_TEMP_VIEW as SQLITE_CREATE_TEMP_VIEW, + SQLITE_CREATE_TRIGGER as SQLITE_CREATE_TRIGGER, + SQLITE_CREATE_VIEW as SQLITE_CREATE_VIEW, + SQLITE_CREATE_VTABLE as SQLITE_CREATE_VTABLE, + SQLITE_DELETE as SQLITE_DELETE, + SQLITE_DENY as SQLITE_DENY, + SQLITE_DETACH as SQLITE_DETACH, + SQLITE_DONE as SQLITE_DONE, + SQLITE_DROP_INDEX as SQLITE_DROP_INDEX, + SQLITE_DROP_TABLE as SQLITE_DROP_TABLE, + SQLITE_DROP_TEMP_INDEX as SQLITE_DROP_TEMP_INDEX, + SQLITE_DROP_TEMP_TABLE as SQLITE_DROP_TEMP_TABLE, + SQLITE_DROP_TEMP_TRIGGER as SQLITE_DROP_TEMP_TRIGGER, + SQLITE_DROP_TEMP_VIEW as SQLITE_DROP_TEMP_VIEW, + SQLITE_DROP_TRIGGER as SQLITE_DROP_TRIGGER, + SQLITE_DROP_VIEW as SQLITE_DROP_VIEW, + SQLITE_DROP_VTABLE as SQLITE_DROP_VTABLE, + SQLITE_FUNCTION as SQLITE_FUNCTION, + SQLITE_IGNORE as SQLITE_IGNORE, + SQLITE_INSERT as SQLITE_INSERT, + SQLITE_OK as SQLITE_OK, + SQLITE_PRAGMA as SQLITE_PRAGMA, + SQLITE_READ as SQLITE_READ, + SQLITE_RECURSIVE as SQLITE_RECURSIVE, + SQLITE_REINDEX as SQLITE_REINDEX, + SQLITE_SAVEPOINT as SQLITE_SAVEPOINT, + SQLITE_SELECT as SQLITE_SELECT, + SQLITE_TRANSACTION as SQLITE_TRANSACTION, + SQLITE_UPDATE as SQLITE_UPDATE, + Binary as Binary, + Date as Date, + DateFromTicks as DateFromTicks, + Time as Time, + TimeFromTicks as TimeFromTicks, + TimestampFromTicks as TimestampFromTicks, + adapt as adapt, + adapters as adapters, + apilevel as apilevel, + complete_statement as complete_statement, + connect as connect, + converters as converters, + enable_callback_tracebacks as enable_callback_tracebacks, + paramstyle as paramstyle, + register_adapter as register_adapter, + register_converter as register_converter, + sqlite_version as sqlite_version, + sqlite_version_info as sqlite_version_info, + threadsafety as threadsafety, +) +from types import TracebackType +from typing import Any, Literal, Protocol, SupportsIndex, TypeVar, final, overload, type_check_only +from typing_extensions import Self, TypeAlias, disjoint_base + +if sys.version_info < (3, 14): + from sqlite3.dbapi2 import version_info as version_info + +if sys.version_info >= (3, 12): + from sqlite3.dbapi2 import ( + LEGACY_TRANSACTION_CONTROL as LEGACY_TRANSACTION_CONTROL, + SQLITE_DBCONFIG_DEFENSIVE as SQLITE_DBCONFIG_DEFENSIVE, + SQLITE_DBCONFIG_DQS_DDL as SQLITE_DBCONFIG_DQS_DDL, + SQLITE_DBCONFIG_DQS_DML as SQLITE_DBCONFIG_DQS_DML, + SQLITE_DBCONFIG_ENABLE_FKEY as SQLITE_DBCONFIG_ENABLE_FKEY, + SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER as SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER, + SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION as SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION, + SQLITE_DBCONFIG_ENABLE_QPSG as SQLITE_DBCONFIG_ENABLE_QPSG, + SQLITE_DBCONFIG_ENABLE_TRIGGER as SQLITE_DBCONFIG_ENABLE_TRIGGER, + SQLITE_DBCONFIG_ENABLE_VIEW as SQLITE_DBCONFIG_ENABLE_VIEW, + SQLITE_DBCONFIG_LEGACY_ALTER_TABLE as SQLITE_DBCONFIG_LEGACY_ALTER_TABLE, + SQLITE_DBCONFIG_LEGACY_FILE_FORMAT as SQLITE_DBCONFIG_LEGACY_FILE_FORMAT, + SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE as SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE, + SQLITE_DBCONFIG_RESET_DATABASE as SQLITE_DBCONFIG_RESET_DATABASE, + SQLITE_DBCONFIG_TRIGGER_EQP as SQLITE_DBCONFIG_TRIGGER_EQP, + SQLITE_DBCONFIG_TRUSTED_SCHEMA as SQLITE_DBCONFIG_TRUSTED_SCHEMA, + SQLITE_DBCONFIG_WRITABLE_SCHEMA as SQLITE_DBCONFIG_WRITABLE_SCHEMA, + ) + +if sys.version_info >= (3, 11): + from sqlite3.dbapi2 import ( + SQLITE_ABORT as SQLITE_ABORT, + SQLITE_ABORT_ROLLBACK as SQLITE_ABORT_ROLLBACK, + SQLITE_AUTH as SQLITE_AUTH, + SQLITE_AUTH_USER as SQLITE_AUTH_USER, + SQLITE_BUSY as SQLITE_BUSY, + SQLITE_BUSY_RECOVERY as SQLITE_BUSY_RECOVERY, + SQLITE_BUSY_SNAPSHOT as SQLITE_BUSY_SNAPSHOT, + SQLITE_BUSY_TIMEOUT as SQLITE_BUSY_TIMEOUT, + SQLITE_CANTOPEN as SQLITE_CANTOPEN, + SQLITE_CANTOPEN_CONVPATH as SQLITE_CANTOPEN_CONVPATH, + SQLITE_CANTOPEN_DIRTYWAL as SQLITE_CANTOPEN_DIRTYWAL, + SQLITE_CANTOPEN_FULLPATH as SQLITE_CANTOPEN_FULLPATH, + SQLITE_CANTOPEN_ISDIR as SQLITE_CANTOPEN_ISDIR, + SQLITE_CANTOPEN_NOTEMPDIR as SQLITE_CANTOPEN_NOTEMPDIR, + SQLITE_CANTOPEN_SYMLINK as SQLITE_CANTOPEN_SYMLINK, + SQLITE_CONSTRAINT as SQLITE_CONSTRAINT, + SQLITE_CONSTRAINT_CHECK as SQLITE_CONSTRAINT_CHECK, + SQLITE_CONSTRAINT_COMMITHOOK as SQLITE_CONSTRAINT_COMMITHOOK, + SQLITE_CONSTRAINT_FOREIGNKEY as SQLITE_CONSTRAINT_FOREIGNKEY, + SQLITE_CONSTRAINT_FUNCTION as SQLITE_CONSTRAINT_FUNCTION, + SQLITE_CONSTRAINT_NOTNULL as SQLITE_CONSTRAINT_NOTNULL, + SQLITE_CONSTRAINT_PINNED as SQLITE_CONSTRAINT_PINNED, + SQLITE_CONSTRAINT_PRIMARYKEY as SQLITE_CONSTRAINT_PRIMARYKEY, + SQLITE_CONSTRAINT_ROWID as SQLITE_CONSTRAINT_ROWID, + SQLITE_CONSTRAINT_TRIGGER as SQLITE_CONSTRAINT_TRIGGER, + SQLITE_CONSTRAINT_UNIQUE as SQLITE_CONSTRAINT_UNIQUE, + SQLITE_CONSTRAINT_VTAB as SQLITE_CONSTRAINT_VTAB, + SQLITE_CORRUPT as SQLITE_CORRUPT, + SQLITE_CORRUPT_INDEX as SQLITE_CORRUPT_INDEX, + SQLITE_CORRUPT_SEQUENCE as SQLITE_CORRUPT_SEQUENCE, + SQLITE_CORRUPT_VTAB as SQLITE_CORRUPT_VTAB, + SQLITE_EMPTY as SQLITE_EMPTY, + SQLITE_ERROR as SQLITE_ERROR, + SQLITE_ERROR_MISSING_COLLSEQ as SQLITE_ERROR_MISSING_COLLSEQ, + SQLITE_ERROR_RETRY as SQLITE_ERROR_RETRY, + SQLITE_ERROR_SNAPSHOT as SQLITE_ERROR_SNAPSHOT, + SQLITE_FORMAT as SQLITE_FORMAT, + SQLITE_FULL as SQLITE_FULL, + SQLITE_INTERNAL as SQLITE_INTERNAL, + SQLITE_INTERRUPT as SQLITE_INTERRUPT, + SQLITE_IOERR as SQLITE_IOERR, + SQLITE_IOERR_ACCESS as SQLITE_IOERR_ACCESS, + SQLITE_IOERR_AUTH as SQLITE_IOERR_AUTH, + SQLITE_IOERR_BEGIN_ATOMIC as SQLITE_IOERR_BEGIN_ATOMIC, + SQLITE_IOERR_BLOCKED as SQLITE_IOERR_BLOCKED, + SQLITE_IOERR_CHECKRESERVEDLOCK as SQLITE_IOERR_CHECKRESERVEDLOCK, + SQLITE_IOERR_CLOSE as SQLITE_IOERR_CLOSE, + SQLITE_IOERR_COMMIT_ATOMIC as SQLITE_IOERR_COMMIT_ATOMIC, + SQLITE_IOERR_CONVPATH as SQLITE_IOERR_CONVPATH, + SQLITE_IOERR_CORRUPTFS as SQLITE_IOERR_CORRUPTFS, + SQLITE_IOERR_DATA as SQLITE_IOERR_DATA, + SQLITE_IOERR_DELETE as SQLITE_IOERR_DELETE, + SQLITE_IOERR_DELETE_NOENT as SQLITE_IOERR_DELETE_NOENT, + SQLITE_IOERR_DIR_CLOSE as SQLITE_IOERR_DIR_CLOSE, + SQLITE_IOERR_DIR_FSYNC as SQLITE_IOERR_DIR_FSYNC, + SQLITE_IOERR_FSTAT as SQLITE_IOERR_FSTAT, + SQLITE_IOERR_FSYNC as SQLITE_IOERR_FSYNC, + SQLITE_IOERR_GETTEMPPATH as SQLITE_IOERR_GETTEMPPATH, + SQLITE_IOERR_LOCK as SQLITE_IOERR_LOCK, + SQLITE_IOERR_MMAP as SQLITE_IOERR_MMAP, + SQLITE_IOERR_NOMEM as SQLITE_IOERR_NOMEM, + SQLITE_IOERR_RDLOCK as SQLITE_IOERR_RDLOCK, + SQLITE_IOERR_READ as SQLITE_IOERR_READ, + SQLITE_IOERR_ROLLBACK_ATOMIC as SQLITE_IOERR_ROLLBACK_ATOMIC, + SQLITE_IOERR_SEEK as SQLITE_IOERR_SEEK, + SQLITE_IOERR_SHMLOCK as SQLITE_IOERR_SHMLOCK, + SQLITE_IOERR_SHMMAP as SQLITE_IOERR_SHMMAP, + SQLITE_IOERR_SHMOPEN as SQLITE_IOERR_SHMOPEN, + SQLITE_IOERR_SHMSIZE as SQLITE_IOERR_SHMSIZE, + SQLITE_IOERR_SHORT_READ as SQLITE_IOERR_SHORT_READ, + SQLITE_IOERR_TRUNCATE as SQLITE_IOERR_TRUNCATE, + SQLITE_IOERR_UNLOCK as SQLITE_IOERR_UNLOCK, + SQLITE_IOERR_VNODE as SQLITE_IOERR_VNODE, + SQLITE_IOERR_WRITE as SQLITE_IOERR_WRITE, + SQLITE_LIMIT_ATTACHED as SQLITE_LIMIT_ATTACHED, + SQLITE_LIMIT_COLUMN as SQLITE_LIMIT_COLUMN, + SQLITE_LIMIT_COMPOUND_SELECT as SQLITE_LIMIT_COMPOUND_SELECT, + SQLITE_LIMIT_EXPR_DEPTH as SQLITE_LIMIT_EXPR_DEPTH, + SQLITE_LIMIT_FUNCTION_ARG as SQLITE_LIMIT_FUNCTION_ARG, + SQLITE_LIMIT_LENGTH as SQLITE_LIMIT_LENGTH, + SQLITE_LIMIT_LIKE_PATTERN_LENGTH as SQLITE_LIMIT_LIKE_PATTERN_LENGTH, + SQLITE_LIMIT_SQL_LENGTH as SQLITE_LIMIT_SQL_LENGTH, + SQLITE_LIMIT_TRIGGER_DEPTH as SQLITE_LIMIT_TRIGGER_DEPTH, + SQLITE_LIMIT_VARIABLE_NUMBER as SQLITE_LIMIT_VARIABLE_NUMBER, + SQLITE_LIMIT_VDBE_OP as SQLITE_LIMIT_VDBE_OP, + SQLITE_LIMIT_WORKER_THREADS as SQLITE_LIMIT_WORKER_THREADS, + SQLITE_LOCKED as SQLITE_LOCKED, + SQLITE_LOCKED_SHAREDCACHE as SQLITE_LOCKED_SHAREDCACHE, + SQLITE_LOCKED_VTAB as SQLITE_LOCKED_VTAB, + SQLITE_MISMATCH as SQLITE_MISMATCH, + SQLITE_MISUSE as SQLITE_MISUSE, + SQLITE_NOLFS as SQLITE_NOLFS, + SQLITE_NOMEM as SQLITE_NOMEM, + SQLITE_NOTADB as SQLITE_NOTADB, + SQLITE_NOTFOUND as SQLITE_NOTFOUND, + SQLITE_NOTICE as SQLITE_NOTICE, + SQLITE_NOTICE_RECOVER_ROLLBACK as SQLITE_NOTICE_RECOVER_ROLLBACK, + SQLITE_NOTICE_RECOVER_WAL as SQLITE_NOTICE_RECOVER_WAL, + SQLITE_OK_LOAD_PERMANENTLY as SQLITE_OK_LOAD_PERMANENTLY, + SQLITE_OK_SYMLINK as SQLITE_OK_SYMLINK, + SQLITE_PERM as SQLITE_PERM, + SQLITE_PROTOCOL as SQLITE_PROTOCOL, + SQLITE_RANGE as SQLITE_RANGE, + SQLITE_READONLY as SQLITE_READONLY, + SQLITE_READONLY_CANTINIT as SQLITE_READONLY_CANTINIT, + SQLITE_READONLY_CANTLOCK as SQLITE_READONLY_CANTLOCK, + SQLITE_READONLY_DBMOVED as SQLITE_READONLY_DBMOVED, + SQLITE_READONLY_DIRECTORY as SQLITE_READONLY_DIRECTORY, + SQLITE_READONLY_RECOVERY as SQLITE_READONLY_RECOVERY, + SQLITE_READONLY_ROLLBACK as SQLITE_READONLY_ROLLBACK, + SQLITE_ROW as SQLITE_ROW, + SQLITE_SCHEMA as SQLITE_SCHEMA, + SQLITE_TOOBIG as SQLITE_TOOBIG, + SQLITE_WARNING as SQLITE_WARNING, + SQLITE_WARNING_AUTOINDEX as SQLITE_WARNING_AUTOINDEX, + ) + +if sys.version_info < (3, 12): + from sqlite3.dbapi2 import enable_shared_cache as enable_shared_cache, version as version + +if sys.version_info < (3, 10): + from sqlite3.dbapi2 import OptimizedUnicode as OptimizedUnicode + +_CursorT = TypeVar("_CursorT", bound=Cursor) +_SqliteData: TypeAlias = str | ReadableBuffer | int | float | None +# Data that is passed through adapters can be of any type accepted by an adapter. +_AdaptedInputData: TypeAlias = _SqliteData | Any +# The Mapping must really be a dict, but making it invariant is too annoying. +_Parameters: TypeAlias = SupportsLenAndGetItem[_AdaptedInputData] | Mapping[str, _AdaptedInputData] +# Controls the legacy transaction handling mode of sqlite3. +_IsolationLevel: TypeAlias = Literal["DEFERRED", "EXCLUSIVE", "IMMEDIATE"] | None + +@type_check_only +class _AnyParamWindowAggregateClass(Protocol): + def step(self, *args: Any) -> object: ... + def inverse(self, *args: Any) -> object: ... + def value(self) -> _SqliteData: ... + def finalize(self) -> _SqliteData: ... + +@type_check_only +class _WindowAggregateClass(Protocol): + step: Callable[..., object] + inverse: Callable[..., object] + def value(self) -> _SqliteData: ... + def finalize(self) -> _SqliteData: ... + +@type_check_only +class _AggregateProtocol(Protocol): + def step(self, value: int, /) -> object: ... + def finalize(self) -> int: ... + +@type_check_only +class _SingleParamWindowAggregateClass(Protocol): + def step(self, param: Any, /) -> object: ... + def inverse(self, param: Any, /) -> object: ... + def value(self) -> _SqliteData: ... + def finalize(self) -> _SqliteData: ... + +# These classes are implemented in the C module _sqlite3. At runtime, they're imported +# from there into sqlite3.dbapi2 and from that module to here. However, they +# consider themselves to live in the sqlite3.* namespace, so we'll define them here. + +class Error(Exception): + if sys.version_info >= (3, 11): + sqlite_errorcode: int + sqlite_errorname: str + +class DatabaseError(Error): ... +class DataError(DatabaseError): ... +class IntegrityError(DatabaseError): ... +class InterfaceError(Error): ... +class InternalError(DatabaseError): ... +class NotSupportedError(DatabaseError): ... +class OperationalError(DatabaseError): ... +class ProgrammingError(DatabaseError): ... +class Warning(Exception): ... + +@disjoint_base +class Connection: + @property + def DataError(self) -> type[DataError]: ... + @property + def DatabaseError(self) -> type[DatabaseError]: ... + @property + def Error(self) -> type[Error]: ... + @property + def IntegrityError(self) -> type[IntegrityError]: ... + @property + def InterfaceError(self) -> type[InterfaceError]: ... + @property + def InternalError(self) -> type[InternalError]: ... + @property + def NotSupportedError(self) -> type[NotSupportedError]: ... + @property + def OperationalError(self) -> type[OperationalError]: ... + @property + def ProgrammingError(self) -> type[ProgrammingError]: ... + @property + def Warning(self) -> type[Warning]: ... + @property + def in_transaction(self) -> bool: ... + isolation_level: _IsolationLevel + @property + def total_changes(self) -> int: ... + if sys.version_info >= (3, 12): + @property + def autocommit(self) -> int: ... + @autocommit.setter + def autocommit(self, val: int) -> None: ... + row_factory: Any + text_factory: Any + if sys.version_info >= (3, 12): + def __init__( + self, + database: StrOrBytesPath, + timeout: float = 5.0, + detect_types: int = 0, + isolation_level: _IsolationLevel = "DEFERRED", + check_same_thread: bool = True, + factory: type[Connection] | None = ..., + cached_statements: int = 128, + uri: bool = False, + autocommit: bool = ..., + ) -> None: ... + else: + def __init__( + self, + database: StrOrBytesPath, + timeout: float = 5.0, + detect_types: int = 0, + isolation_level: _IsolationLevel = "DEFERRED", + check_same_thread: bool = True, + factory: type[Connection] | None = ..., + cached_statements: int = 128, + uri: bool = False, + ) -> None: ... + + def close(self) -> None: ... + if sys.version_info >= (3, 11): + def blobopen(self, table: str, column: str, row: int, /, *, readonly: bool = False, name: str = "main") -> Blob: ... + + def commit(self) -> None: ... + def create_aggregate(self, name: str, n_arg: int, aggregate_class: Callable[[], _AggregateProtocol]) -> None: ... + if sys.version_info >= (3, 11): + # num_params determines how many params will be passed to the aggregate class. We provide an overload + # for the case where num_params = 1, which is expected to be the common case. + @overload + def create_window_function( + self, name: str, num_params: Literal[1], aggregate_class: Callable[[], _SingleParamWindowAggregateClass] | None, / + ) -> None: ... + # And for num_params = -1, which means the aggregate must accept any number of parameters. + @overload + def create_window_function( + self, name: str, num_params: Literal[-1], aggregate_class: Callable[[], _AnyParamWindowAggregateClass] | None, / + ) -> None: ... + @overload + def create_window_function( + self, name: str, num_params: int, aggregate_class: Callable[[], _WindowAggregateClass] | None, / + ) -> None: ... + + def create_collation(self, name: str, callback: Callable[[str, str], int | SupportsIndex] | None, /) -> None: ... + def create_function( + self, name: str, narg: int, func: Callable[..., _SqliteData] | None, *, deterministic: bool = False + ) -> None: ... + @overload + def cursor(self, factory: None = None) -> Cursor: ... + @overload + def cursor(self, factory: Callable[[Connection], _CursorT]) -> _CursorT: ... + def execute(self, sql: str, parameters: _Parameters = ..., /) -> Cursor: ... + def executemany(self, sql: str, parameters: Iterable[_Parameters], /) -> Cursor: ... + def executescript(self, sql_script: str, /) -> Cursor: ... + def interrupt(self) -> None: ... + if sys.version_info >= (3, 13): + def iterdump(self, *, filter: str | None = None) -> Generator[str, None, None]: ... + else: + def iterdump(self) -> Generator[str, None, None]: ... + + def rollback(self) -> None: ... + def set_authorizer( + self, authorizer_callback: Callable[[int, str | None, str | None, str | None, str | None], int] | None + ) -> None: ... + def set_progress_handler(self, progress_handler: Callable[[], int | None] | None, n: int) -> None: ... + def set_trace_callback(self, trace_callback: Callable[[str], object] | None) -> None: ... + # enable_load_extension and load_extension is not available on python distributions compiled + # without sqlite3 loadable extension support. see footnotes https://docs.python.org/3/library/sqlite3.html#f1 + def enable_load_extension(self, enable: bool, /) -> None: ... + if sys.version_info >= (3, 12): + def load_extension(self, name: str, /, *, entrypoint: str | None = None) -> None: ... + else: + def load_extension(self, name: str, /) -> None: ... + + def backup( + self, + target: Connection, + *, + pages: int = -1, + progress: Callable[[int, int, int], object] | None = None, + name: str = "main", + sleep: float = 0.25, + ) -> None: ... + if sys.version_info >= (3, 11): + def setlimit(self, category: int, limit: int, /) -> int: ... + def getlimit(self, category: int, /) -> int: ... + def serialize(self, *, name: str = "main") -> bytes: ... + def deserialize(self, data: ReadableBuffer, /, *, name: str = "main") -> None: ... + if sys.version_info >= (3, 12): + def getconfig(self, op: int, /) -> bool: ... + def setconfig(self, op: int, enable: bool = True, /) -> bool: ... + + def __call__(self, sql: str, /) -> _Statement: ... + def __enter__(self) -> Self: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None, / + ) -> Literal[False]: ... + +@disjoint_base +class Cursor(Iterator[Any]): + arraysize: int + @property + def connection(self) -> Connection: ... + # May be None, but using `| MaybeNone` (`| Any`) instead to avoid slightly annoying false positives. + @property + def description(self) -> tuple[tuple[str, None, None, None, None, None, None], ...] | MaybeNone: ... + @property + def lastrowid(self) -> int | None: ... + row_factory: Callable[[Cursor, Row], object] | None + @property + def rowcount(self) -> int: ... + def __init__(self, cursor: Connection, /) -> None: ... + def close(self) -> None: ... + def execute(self, sql: str, parameters: _Parameters = (), /) -> Self: ... + def executemany(self, sql: str, seq_of_parameters: Iterable[_Parameters], /) -> Self: ... + def executescript(self, sql_script: str, /) -> Cursor: ... + def fetchall(self) -> list[Any]: ... + def fetchmany(self, size: int | None = 1) -> list[Any]: ... + # Returns either a row (as created by the row_factory) or None, but + # putting None in the return annotation causes annoying false positives. + def fetchone(self) -> Any: ... + def setinputsizes(self, sizes: Unused, /) -> None: ... # does nothing + def setoutputsize(self, size: Unused, column: Unused = None, /) -> None: ... # does nothing + def __iter__(self) -> Self: ... + def __next__(self) -> Any: ... + +@final +class PrepareProtocol: + def __init__(self, *args: object, **kwargs: object) -> None: ... + +@disjoint_base +class Row(Sequence[Any]): + def __new__(cls, cursor: Cursor, data: tuple[Any, ...], /) -> Self: ... + def keys(self) -> list[str]: ... + @overload + def __getitem__(self, key: int | str, /) -> Any: ... + @overload + def __getitem__(self, key: slice, /) -> tuple[Any, ...]: ... + def __hash__(self) -> int: ... + def __iter__(self) -> Iterator[Any]: ... + def __len__(self) -> int: ... + # These return NotImplemented for anything that is not a Row. + def __eq__(self, value: object, /) -> bool: ... + def __ge__(self, value: object, /) -> bool: ... + def __gt__(self, value: object, /) -> bool: ... + def __le__(self, value: object, /) -> bool: ... + def __lt__(self, value: object, /) -> bool: ... + def __ne__(self, value: object, /) -> bool: ... + +# This class is not exposed. It calls itself sqlite3.Statement. +@final +@type_check_only +class _Statement: ... + +if sys.version_info >= (3, 11): + @final + class Blob: + def close(self) -> None: ... + def read(self, length: int = -1, /) -> bytes: ... + def write(self, data: ReadableBuffer, /) -> None: ... + def tell(self) -> int: ... + # whence must be one of os.SEEK_SET, os.SEEK_CUR, os.SEEK_END + def seek(self, offset: int, origin: int = 0, /) -> None: ... + def __len__(self) -> int: ... + def __enter__(self) -> Self: ... + def __exit__(self, type: object, val: object, tb: object, /) -> Literal[False]: ... + def __getitem__(self, key: SupportsIndex | slice, /) -> int: ... + def __setitem__(self, key: SupportsIndex | slice, value: int, /) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi new file mode 100644 index 0000000..9e170a8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi @@ -0,0 +1,247 @@ +import sys +from _sqlite3 import ( + PARSE_COLNAMES as PARSE_COLNAMES, + PARSE_DECLTYPES as PARSE_DECLTYPES, + SQLITE_ALTER_TABLE as SQLITE_ALTER_TABLE, + SQLITE_ANALYZE as SQLITE_ANALYZE, + SQLITE_ATTACH as SQLITE_ATTACH, + SQLITE_CREATE_INDEX as SQLITE_CREATE_INDEX, + SQLITE_CREATE_TABLE as SQLITE_CREATE_TABLE, + SQLITE_CREATE_TEMP_INDEX as SQLITE_CREATE_TEMP_INDEX, + SQLITE_CREATE_TEMP_TABLE as SQLITE_CREATE_TEMP_TABLE, + SQLITE_CREATE_TEMP_TRIGGER as SQLITE_CREATE_TEMP_TRIGGER, + SQLITE_CREATE_TEMP_VIEW as SQLITE_CREATE_TEMP_VIEW, + SQLITE_CREATE_TRIGGER as SQLITE_CREATE_TRIGGER, + SQLITE_CREATE_VIEW as SQLITE_CREATE_VIEW, + SQLITE_CREATE_VTABLE as SQLITE_CREATE_VTABLE, + SQLITE_DELETE as SQLITE_DELETE, + SQLITE_DENY as SQLITE_DENY, + SQLITE_DETACH as SQLITE_DETACH, + SQLITE_DONE as SQLITE_DONE, + SQLITE_DROP_INDEX as SQLITE_DROP_INDEX, + SQLITE_DROP_TABLE as SQLITE_DROP_TABLE, + SQLITE_DROP_TEMP_INDEX as SQLITE_DROP_TEMP_INDEX, + SQLITE_DROP_TEMP_TABLE as SQLITE_DROP_TEMP_TABLE, + SQLITE_DROP_TEMP_TRIGGER as SQLITE_DROP_TEMP_TRIGGER, + SQLITE_DROP_TEMP_VIEW as SQLITE_DROP_TEMP_VIEW, + SQLITE_DROP_TRIGGER as SQLITE_DROP_TRIGGER, + SQLITE_DROP_VIEW as SQLITE_DROP_VIEW, + SQLITE_DROP_VTABLE as SQLITE_DROP_VTABLE, + SQLITE_FUNCTION as SQLITE_FUNCTION, + SQLITE_IGNORE as SQLITE_IGNORE, + SQLITE_INSERT as SQLITE_INSERT, + SQLITE_OK as SQLITE_OK, + SQLITE_PRAGMA as SQLITE_PRAGMA, + SQLITE_READ as SQLITE_READ, + SQLITE_RECURSIVE as SQLITE_RECURSIVE, + SQLITE_REINDEX as SQLITE_REINDEX, + SQLITE_SAVEPOINT as SQLITE_SAVEPOINT, + SQLITE_SELECT as SQLITE_SELECT, + SQLITE_TRANSACTION as SQLITE_TRANSACTION, + SQLITE_UPDATE as SQLITE_UPDATE, + adapt as adapt, + adapters as adapters, + complete_statement as complete_statement, + connect as connect, + converters as converters, + enable_callback_tracebacks as enable_callback_tracebacks, + register_adapter as register_adapter, + register_converter as register_converter, + sqlite_version as sqlite_version, +) +from datetime import date, datetime, time +from sqlite3 import ( + Connection as Connection, + Cursor as Cursor, + DatabaseError as DatabaseError, + DataError as DataError, + Error as Error, + IntegrityError as IntegrityError, + InterfaceError as InterfaceError, + InternalError as InternalError, + NotSupportedError as NotSupportedError, + OperationalError as OperationalError, + PrepareProtocol as PrepareProtocol, + ProgrammingError as ProgrammingError, + Row as Row, + Warning as Warning, +) +from typing import Final, Literal +from typing_extensions import deprecated + +if sys.version_info >= (3, 12): + from _sqlite3 import ( + LEGACY_TRANSACTION_CONTROL as LEGACY_TRANSACTION_CONTROL, + SQLITE_DBCONFIG_DEFENSIVE as SQLITE_DBCONFIG_DEFENSIVE, + SQLITE_DBCONFIG_DQS_DDL as SQLITE_DBCONFIG_DQS_DDL, + SQLITE_DBCONFIG_DQS_DML as SQLITE_DBCONFIG_DQS_DML, + SQLITE_DBCONFIG_ENABLE_FKEY as SQLITE_DBCONFIG_ENABLE_FKEY, + SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER as SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER, + SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION as SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION, + SQLITE_DBCONFIG_ENABLE_QPSG as SQLITE_DBCONFIG_ENABLE_QPSG, + SQLITE_DBCONFIG_ENABLE_TRIGGER as SQLITE_DBCONFIG_ENABLE_TRIGGER, + SQLITE_DBCONFIG_ENABLE_VIEW as SQLITE_DBCONFIG_ENABLE_VIEW, + SQLITE_DBCONFIG_LEGACY_ALTER_TABLE as SQLITE_DBCONFIG_LEGACY_ALTER_TABLE, + SQLITE_DBCONFIG_LEGACY_FILE_FORMAT as SQLITE_DBCONFIG_LEGACY_FILE_FORMAT, + SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE as SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE, + SQLITE_DBCONFIG_RESET_DATABASE as SQLITE_DBCONFIG_RESET_DATABASE, + SQLITE_DBCONFIG_TRIGGER_EQP as SQLITE_DBCONFIG_TRIGGER_EQP, + SQLITE_DBCONFIG_TRUSTED_SCHEMA as SQLITE_DBCONFIG_TRUSTED_SCHEMA, + SQLITE_DBCONFIG_WRITABLE_SCHEMA as SQLITE_DBCONFIG_WRITABLE_SCHEMA, + ) + +if sys.version_info >= (3, 11): + from _sqlite3 import ( + SQLITE_ABORT as SQLITE_ABORT, + SQLITE_ABORT_ROLLBACK as SQLITE_ABORT_ROLLBACK, + SQLITE_AUTH as SQLITE_AUTH, + SQLITE_AUTH_USER as SQLITE_AUTH_USER, + SQLITE_BUSY as SQLITE_BUSY, + SQLITE_BUSY_RECOVERY as SQLITE_BUSY_RECOVERY, + SQLITE_BUSY_SNAPSHOT as SQLITE_BUSY_SNAPSHOT, + SQLITE_BUSY_TIMEOUT as SQLITE_BUSY_TIMEOUT, + SQLITE_CANTOPEN as SQLITE_CANTOPEN, + SQLITE_CANTOPEN_CONVPATH as SQLITE_CANTOPEN_CONVPATH, + SQLITE_CANTOPEN_DIRTYWAL as SQLITE_CANTOPEN_DIRTYWAL, + SQLITE_CANTOPEN_FULLPATH as SQLITE_CANTOPEN_FULLPATH, + SQLITE_CANTOPEN_ISDIR as SQLITE_CANTOPEN_ISDIR, + SQLITE_CANTOPEN_NOTEMPDIR as SQLITE_CANTOPEN_NOTEMPDIR, + SQLITE_CANTOPEN_SYMLINK as SQLITE_CANTOPEN_SYMLINK, + SQLITE_CONSTRAINT as SQLITE_CONSTRAINT, + SQLITE_CONSTRAINT_CHECK as SQLITE_CONSTRAINT_CHECK, + SQLITE_CONSTRAINT_COMMITHOOK as SQLITE_CONSTRAINT_COMMITHOOK, + SQLITE_CONSTRAINT_FOREIGNKEY as SQLITE_CONSTRAINT_FOREIGNKEY, + SQLITE_CONSTRAINT_FUNCTION as SQLITE_CONSTRAINT_FUNCTION, + SQLITE_CONSTRAINT_NOTNULL as SQLITE_CONSTRAINT_NOTNULL, + SQLITE_CONSTRAINT_PINNED as SQLITE_CONSTRAINT_PINNED, + SQLITE_CONSTRAINT_PRIMARYKEY as SQLITE_CONSTRAINT_PRIMARYKEY, + SQLITE_CONSTRAINT_ROWID as SQLITE_CONSTRAINT_ROWID, + SQLITE_CONSTRAINT_TRIGGER as SQLITE_CONSTRAINT_TRIGGER, + SQLITE_CONSTRAINT_UNIQUE as SQLITE_CONSTRAINT_UNIQUE, + SQLITE_CONSTRAINT_VTAB as SQLITE_CONSTRAINT_VTAB, + SQLITE_CORRUPT as SQLITE_CORRUPT, + SQLITE_CORRUPT_INDEX as SQLITE_CORRUPT_INDEX, + SQLITE_CORRUPT_SEQUENCE as SQLITE_CORRUPT_SEQUENCE, + SQLITE_CORRUPT_VTAB as SQLITE_CORRUPT_VTAB, + SQLITE_EMPTY as SQLITE_EMPTY, + SQLITE_ERROR as SQLITE_ERROR, + SQLITE_ERROR_MISSING_COLLSEQ as SQLITE_ERROR_MISSING_COLLSEQ, + SQLITE_ERROR_RETRY as SQLITE_ERROR_RETRY, + SQLITE_ERROR_SNAPSHOT as SQLITE_ERROR_SNAPSHOT, + SQLITE_FORMAT as SQLITE_FORMAT, + SQLITE_FULL as SQLITE_FULL, + SQLITE_INTERNAL as SQLITE_INTERNAL, + SQLITE_INTERRUPT as SQLITE_INTERRUPT, + SQLITE_IOERR as SQLITE_IOERR, + SQLITE_IOERR_ACCESS as SQLITE_IOERR_ACCESS, + SQLITE_IOERR_AUTH as SQLITE_IOERR_AUTH, + SQLITE_IOERR_BEGIN_ATOMIC as SQLITE_IOERR_BEGIN_ATOMIC, + SQLITE_IOERR_BLOCKED as SQLITE_IOERR_BLOCKED, + SQLITE_IOERR_CHECKRESERVEDLOCK as SQLITE_IOERR_CHECKRESERVEDLOCK, + SQLITE_IOERR_CLOSE as SQLITE_IOERR_CLOSE, + SQLITE_IOERR_COMMIT_ATOMIC as SQLITE_IOERR_COMMIT_ATOMIC, + SQLITE_IOERR_CONVPATH as SQLITE_IOERR_CONVPATH, + SQLITE_IOERR_CORRUPTFS as SQLITE_IOERR_CORRUPTFS, + SQLITE_IOERR_DATA as SQLITE_IOERR_DATA, + SQLITE_IOERR_DELETE as SQLITE_IOERR_DELETE, + SQLITE_IOERR_DELETE_NOENT as SQLITE_IOERR_DELETE_NOENT, + SQLITE_IOERR_DIR_CLOSE as SQLITE_IOERR_DIR_CLOSE, + SQLITE_IOERR_DIR_FSYNC as SQLITE_IOERR_DIR_FSYNC, + SQLITE_IOERR_FSTAT as SQLITE_IOERR_FSTAT, + SQLITE_IOERR_FSYNC as SQLITE_IOERR_FSYNC, + SQLITE_IOERR_GETTEMPPATH as SQLITE_IOERR_GETTEMPPATH, + SQLITE_IOERR_LOCK as SQLITE_IOERR_LOCK, + SQLITE_IOERR_MMAP as SQLITE_IOERR_MMAP, + SQLITE_IOERR_NOMEM as SQLITE_IOERR_NOMEM, + SQLITE_IOERR_RDLOCK as SQLITE_IOERR_RDLOCK, + SQLITE_IOERR_READ as SQLITE_IOERR_READ, + SQLITE_IOERR_ROLLBACK_ATOMIC as SQLITE_IOERR_ROLLBACK_ATOMIC, + SQLITE_IOERR_SEEK as SQLITE_IOERR_SEEK, + SQLITE_IOERR_SHMLOCK as SQLITE_IOERR_SHMLOCK, + SQLITE_IOERR_SHMMAP as SQLITE_IOERR_SHMMAP, + SQLITE_IOERR_SHMOPEN as SQLITE_IOERR_SHMOPEN, + SQLITE_IOERR_SHMSIZE as SQLITE_IOERR_SHMSIZE, + SQLITE_IOERR_SHORT_READ as SQLITE_IOERR_SHORT_READ, + SQLITE_IOERR_TRUNCATE as SQLITE_IOERR_TRUNCATE, + SQLITE_IOERR_UNLOCK as SQLITE_IOERR_UNLOCK, + SQLITE_IOERR_VNODE as SQLITE_IOERR_VNODE, + SQLITE_IOERR_WRITE as SQLITE_IOERR_WRITE, + SQLITE_LIMIT_ATTACHED as SQLITE_LIMIT_ATTACHED, + SQLITE_LIMIT_COLUMN as SQLITE_LIMIT_COLUMN, + SQLITE_LIMIT_COMPOUND_SELECT as SQLITE_LIMIT_COMPOUND_SELECT, + SQLITE_LIMIT_EXPR_DEPTH as SQLITE_LIMIT_EXPR_DEPTH, + SQLITE_LIMIT_FUNCTION_ARG as SQLITE_LIMIT_FUNCTION_ARG, + SQLITE_LIMIT_LENGTH as SQLITE_LIMIT_LENGTH, + SQLITE_LIMIT_LIKE_PATTERN_LENGTH as SQLITE_LIMIT_LIKE_PATTERN_LENGTH, + SQLITE_LIMIT_SQL_LENGTH as SQLITE_LIMIT_SQL_LENGTH, + SQLITE_LIMIT_TRIGGER_DEPTH as SQLITE_LIMIT_TRIGGER_DEPTH, + SQLITE_LIMIT_VARIABLE_NUMBER as SQLITE_LIMIT_VARIABLE_NUMBER, + SQLITE_LIMIT_VDBE_OP as SQLITE_LIMIT_VDBE_OP, + SQLITE_LIMIT_WORKER_THREADS as SQLITE_LIMIT_WORKER_THREADS, + SQLITE_LOCKED as SQLITE_LOCKED, + SQLITE_LOCKED_SHAREDCACHE as SQLITE_LOCKED_SHAREDCACHE, + SQLITE_LOCKED_VTAB as SQLITE_LOCKED_VTAB, + SQLITE_MISMATCH as SQLITE_MISMATCH, + SQLITE_MISUSE as SQLITE_MISUSE, + SQLITE_NOLFS as SQLITE_NOLFS, + SQLITE_NOMEM as SQLITE_NOMEM, + SQLITE_NOTADB as SQLITE_NOTADB, + SQLITE_NOTFOUND as SQLITE_NOTFOUND, + SQLITE_NOTICE as SQLITE_NOTICE, + SQLITE_NOTICE_RECOVER_ROLLBACK as SQLITE_NOTICE_RECOVER_ROLLBACK, + SQLITE_NOTICE_RECOVER_WAL as SQLITE_NOTICE_RECOVER_WAL, + SQLITE_OK_LOAD_PERMANENTLY as SQLITE_OK_LOAD_PERMANENTLY, + SQLITE_OK_SYMLINK as SQLITE_OK_SYMLINK, + SQLITE_PERM as SQLITE_PERM, + SQLITE_PROTOCOL as SQLITE_PROTOCOL, + SQLITE_RANGE as SQLITE_RANGE, + SQLITE_READONLY as SQLITE_READONLY, + SQLITE_READONLY_CANTINIT as SQLITE_READONLY_CANTINIT, + SQLITE_READONLY_CANTLOCK as SQLITE_READONLY_CANTLOCK, + SQLITE_READONLY_DBMOVED as SQLITE_READONLY_DBMOVED, + SQLITE_READONLY_DIRECTORY as SQLITE_READONLY_DIRECTORY, + SQLITE_READONLY_RECOVERY as SQLITE_READONLY_RECOVERY, + SQLITE_READONLY_ROLLBACK as SQLITE_READONLY_ROLLBACK, + SQLITE_ROW as SQLITE_ROW, + SQLITE_SCHEMA as SQLITE_SCHEMA, + SQLITE_TOOBIG as SQLITE_TOOBIG, + SQLITE_WARNING as SQLITE_WARNING, + SQLITE_WARNING_AUTOINDEX as SQLITE_WARNING_AUTOINDEX, + ) + from sqlite3 import Blob as Blob + +if sys.version_info < (3, 14): + # Deprecated and removed from _sqlite3 in 3.12, but removed from here in 3.14. + version: Final[str] + +if sys.version_info < (3, 12): + if sys.version_info >= (3, 10): + # deprecation wrapper that has a different name for the argument... + @deprecated( + "Deprecated since Python 3.10; removed in Python 3.12. " + "Open database in URI mode using `cache=shared` parameter instead." + ) + def enable_shared_cache(enable: int) -> None: ... + else: + from _sqlite3 import enable_shared_cache as enable_shared_cache + +if sys.version_info < (3, 10): + from _sqlite3 import OptimizedUnicode as OptimizedUnicode + +paramstyle: Final = "qmark" +threadsafety: Literal[0, 1, 3] +apilevel: Final[str] +Date = date +Time = time +Timestamp = datetime + +def DateFromTicks(ticks: float) -> Date: ... +def TimeFromTicks(ticks: float) -> Time: ... +def TimestampFromTicks(ticks: float) -> Timestamp: ... + +if sys.version_info < (3, 14): + # Deprecated in 3.12, removed in 3.14. + version_info: Final[tuple[int, int, int]] + +sqlite_version_info: Final[tuple[int, int, int]] +Binary = memoryview diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sqlite3/dump.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sqlite3/dump.pyi new file mode 100644 index 0000000..ed95fa4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sqlite3/dump.pyi @@ -0,0 +1,2 @@ +# This file is intentionally empty. The runtime module contains only +# private functions. diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sre_compile.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sre_compile.pyi new file mode 100644 index 0000000..d8f0b79 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sre_compile.pyi @@ -0,0 +1,11 @@ +from re import Pattern +from sre_constants import * +from sre_constants import _NamedIntConstant +from sre_parse import SubPattern +from typing import Any, Final + +MAXCODE: Final[int] + +def dis(code: list[_NamedIntConstant]) -> None: ... +def isstring(obj: Any) -> bool: ... +def compile(p: str | bytes | SubPattern, flags: int = 0) -> Pattern[Any]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sre_constants.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sre_constants.pyi new file mode 100644 index 0000000..9a1da4a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sre_constants.pyi @@ -0,0 +1,135 @@ +import sys +from re import error as error +from typing import Final +from typing_extensions import Self, disjoint_base + +MAXGROUPS: Final[int] + +MAGIC: Final[int] + +if sys.version_info >= (3, 12): + class _NamedIntConstant(int): + name: str + def __new__(cls, value: int, name: str) -> Self: ... + +else: + @disjoint_base + class _NamedIntConstant(int): + name: str + def __new__(cls, value: int, name: str) -> Self: ... + +MAXREPEAT: Final[_NamedIntConstant] +OPCODES: list[_NamedIntConstant] +ATCODES: list[_NamedIntConstant] +CHCODES: list[_NamedIntConstant] +OP_IGNORE: dict[_NamedIntConstant, _NamedIntConstant] +OP_LOCALE_IGNORE: dict[_NamedIntConstant, _NamedIntConstant] +OP_UNICODE_IGNORE: dict[_NamedIntConstant, _NamedIntConstant] +AT_MULTILINE: dict[_NamedIntConstant, _NamedIntConstant] +AT_LOCALE: dict[_NamedIntConstant, _NamedIntConstant] +AT_UNICODE: dict[_NamedIntConstant, _NamedIntConstant] +CH_LOCALE: dict[_NamedIntConstant, _NamedIntConstant] +CH_UNICODE: dict[_NamedIntConstant, _NamedIntConstant] +if sys.version_info >= (3, 14): + CH_NEGATE: dict[_NamedIntConstant, _NamedIntConstant] +# flags +if sys.version_info < (3, 13): + SRE_FLAG_TEMPLATE: Final = 1 +SRE_FLAG_IGNORECASE: Final = 2 +SRE_FLAG_LOCALE: Final = 4 +SRE_FLAG_MULTILINE: Final = 8 +SRE_FLAG_DOTALL: Final = 16 +SRE_FLAG_UNICODE: Final = 32 +SRE_FLAG_VERBOSE: Final = 64 +SRE_FLAG_DEBUG: Final = 128 +SRE_FLAG_ASCII: Final = 256 +# flags for INFO primitive +SRE_INFO_PREFIX: Final = 1 +SRE_INFO_LITERAL: Final = 2 +SRE_INFO_CHARSET: Final = 4 + +# Stubgen above; manually defined constants below (dynamic at runtime) + +# from OPCODES +FAILURE: Final[_NamedIntConstant] +SUCCESS: Final[_NamedIntConstant] +ANY: Final[_NamedIntConstant] +ANY_ALL: Final[_NamedIntConstant] +ASSERT: Final[_NamedIntConstant] +ASSERT_NOT: Final[_NamedIntConstant] +AT: Final[_NamedIntConstant] +BRANCH: Final[_NamedIntConstant] +if sys.version_info < (3, 11): + CALL: Final[_NamedIntConstant] +CATEGORY: Final[_NamedIntConstant] +CHARSET: Final[_NamedIntConstant] +BIGCHARSET: Final[_NamedIntConstant] +GROUPREF: Final[_NamedIntConstant] +GROUPREF_EXISTS: Final[_NamedIntConstant] +GROUPREF_IGNORE: Final[_NamedIntConstant] +IN: Final[_NamedIntConstant] +IN_IGNORE: Final[_NamedIntConstant] +INFO: Final[_NamedIntConstant] +JUMP: Final[_NamedIntConstant] +LITERAL: Final[_NamedIntConstant] +LITERAL_IGNORE: Final[_NamedIntConstant] +MARK: Final[_NamedIntConstant] +MAX_UNTIL: Final[_NamedIntConstant] +MIN_UNTIL: Final[_NamedIntConstant] +NOT_LITERAL: Final[_NamedIntConstant] +NOT_LITERAL_IGNORE: Final[_NamedIntConstant] +NEGATE: Final[_NamedIntConstant] +RANGE: Final[_NamedIntConstant] +REPEAT: Final[_NamedIntConstant] +REPEAT_ONE: Final[_NamedIntConstant] +SUBPATTERN: Final[_NamedIntConstant] +MIN_REPEAT_ONE: Final[_NamedIntConstant] +if sys.version_info >= (3, 11): + ATOMIC_GROUP: Final[_NamedIntConstant] + POSSESSIVE_REPEAT: Final[_NamedIntConstant] + POSSESSIVE_REPEAT_ONE: Final[_NamedIntConstant] +RANGE_UNI_IGNORE: Final[_NamedIntConstant] +GROUPREF_LOC_IGNORE: Final[_NamedIntConstant] +GROUPREF_UNI_IGNORE: Final[_NamedIntConstant] +IN_LOC_IGNORE: Final[_NamedIntConstant] +IN_UNI_IGNORE: Final[_NamedIntConstant] +LITERAL_LOC_IGNORE: Final[_NamedIntConstant] +LITERAL_UNI_IGNORE: Final[_NamedIntConstant] +NOT_LITERAL_LOC_IGNORE: Final[_NamedIntConstant] +NOT_LITERAL_UNI_IGNORE: Final[_NamedIntConstant] +MIN_REPEAT: Final[_NamedIntConstant] +MAX_REPEAT: Final[_NamedIntConstant] + +# from ATCODES +AT_BEGINNING: Final[_NamedIntConstant] +AT_BEGINNING_LINE: Final[_NamedIntConstant] +AT_BEGINNING_STRING: Final[_NamedIntConstant] +AT_BOUNDARY: Final[_NamedIntConstant] +AT_NON_BOUNDARY: Final[_NamedIntConstant] +AT_END: Final[_NamedIntConstant] +AT_END_LINE: Final[_NamedIntConstant] +AT_END_STRING: Final[_NamedIntConstant] +AT_LOC_BOUNDARY: Final[_NamedIntConstant] +AT_LOC_NON_BOUNDARY: Final[_NamedIntConstant] +AT_UNI_BOUNDARY: Final[_NamedIntConstant] +AT_UNI_NON_BOUNDARY: Final[_NamedIntConstant] + +# from CHCODES +CATEGORY_DIGIT: Final[_NamedIntConstant] +CATEGORY_NOT_DIGIT: Final[_NamedIntConstant] +CATEGORY_SPACE: Final[_NamedIntConstant] +CATEGORY_NOT_SPACE: Final[_NamedIntConstant] +CATEGORY_WORD: Final[_NamedIntConstant] +CATEGORY_NOT_WORD: Final[_NamedIntConstant] +CATEGORY_LINEBREAK: Final[_NamedIntConstant] +CATEGORY_NOT_LINEBREAK: Final[_NamedIntConstant] +CATEGORY_LOC_WORD: Final[_NamedIntConstant] +CATEGORY_LOC_NOT_WORD: Final[_NamedIntConstant] +CATEGORY_UNI_DIGIT: Final[_NamedIntConstant] +CATEGORY_UNI_NOT_DIGIT: Final[_NamedIntConstant] +CATEGORY_UNI_SPACE: Final[_NamedIntConstant] +CATEGORY_UNI_NOT_SPACE: Final[_NamedIntConstant] +CATEGORY_UNI_WORD: Final[_NamedIntConstant] +CATEGORY_UNI_NOT_WORD: Final[_NamedIntConstant] +CATEGORY_UNI_LINEBREAK: Final[_NamedIntConstant] +CATEGORY_UNI_NOT_LINEBREAK: Final[_NamedIntConstant] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sre_parse.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sre_parse.pyi new file mode 100644 index 0000000..eaacbff --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sre_parse.pyi @@ -0,0 +1,104 @@ +import sys +from collections.abc import Iterable +from re import Match, Pattern as _Pattern +from sre_constants import * +from sre_constants import _NamedIntConstant as _NIC, error as _Error +from typing import Any, Final, overload +from typing_extensions import TypeAlias + +SPECIAL_CHARS: Final = ".\\[{()*+?^$|" +REPEAT_CHARS: Final = "*+?{" +DIGITS: Final[frozenset[str]] +OCTDIGITS: Final[frozenset[str]] +HEXDIGITS: Final[frozenset[str]] +ASCIILETTERS: Final[frozenset[str]] +WHITESPACE: Final[frozenset[str]] +ESCAPES: Final[dict[str, tuple[_NIC, int]]] +CATEGORIES: Final[dict[str, tuple[_NIC, _NIC] | tuple[_NIC, list[tuple[_NIC, _NIC]]]]] +FLAGS: Final[dict[str, int]] +TYPE_FLAGS: Final[int] +GLOBAL_FLAGS: Final[int] + +if sys.version_info >= (3, 11): + MAXWIDTH: Final[int] + +if sys.version_info < (3, 11): + class Verbose(Exception): ... + +_OpSubpatternType: TypeAlias = tuple[int | None, int, int, SubPattern] +_OpGroupRefExistsType: TypeAlias = tuple[int, SubPattern, SubPattern] +_OpInType: TypeAlias = list[tuple[_NIC, int]] +_OpBranchType: TypeAlias = tuple[None, list[SubPattern]] +_AvType: TypeAlias = _OpInType | _OpBranchType | Iterable[SubPattern] | _OpGroupRefExistsType | _OpSubpatternType +_CodeType: TypeAlias = tuple[_NIC, _AvType] + +class State: + flags: int + groupdict: dict[str, int] + groupwidths: list[int | None] + lookbehindgroups: int | None + @property + def groups(self) -> int: ... + def opengroup(self, name: str | None = None) -> int: ... + def closegroup(self, gid: int, p: SubPattern) -> None: ... + def checkgroup(self, gid: int) -> bool: ... + def checklookbehindgroup(self, gid: int, source: Tokenizer) -> None: ... + +class SubPattern: + data: list[_CodeType] + width: int | None + state: State + + def __init__(self, state: State, data: list[_CodeType] | None = None) -> None: ... + def dump(self, level: int = 0) -> None: ... + def __len__(self) -> int: ... + def __delitem__(self, index: int | slice) -> None: ... + def __getitem__(self, index: int | slice) -> SubPattern | _CodeType: ... + def __setitem__(self, index: int | slice, code: _CodeType) -> None: ... + def insert(self, index: int, code: _CodeType) -> None: ... + def append(self, code: _CodeType) -> None: ... + def getwidth(self) -> tuple[int, int]: ... + +class Tokenizer: + istext: bool + string: Any + decoded_string: str + index: int + next: str | None + def __init__(self, string: Any) -> None: ... + def match(self, char: str) -> bool: ... + def get(self) -> str | None: ... + def getwhile(self, n: int, charset: Iterable[str]) -> str: ... + def getuntil(self, terminator: str, name: str) -> str: ... + @property + def pos(self) -> int: ... + def tell(self) -> int: ... + def seek(self, index: int) -> None: ... + def error(self, msg: str, offset: int = 0) -> _Error: ... + + if sys.version_info >= (3, 12): + def checkgroupname(self, name: str, offset: int) -> None: ... + elif sys.version_info >= (3, 11): + def checkgroupname(self, name: str, offset: int, nested: int) -> None: ... + +def fix_flags(src: str | bytes, flags: int) -> int: ... + +_TemplateType: TypeAlias = tuple[list[tuple[int, int]], list[str | None]] +_TemplateByteType: TypeAlias = tuple[list[tuple[int, int]], list[bytes | None]] + +if sys.version_info >= (3, 12): + @overload + def parse_template(source: str, pattern: _Pattern[Any]) -> _TemplateType: ... + @overload + def parse_template(source: bytes, pattern: _Pattern[Any]) -> _TemplateByteType: ... + +else: + @overload + def parse_template(source: str, state: _Pattern[Any]) -> _TemplateType: ... + @overload + def parse_template(source: bytes, state: _Pattern[Any]) -> _TemplateByteType: ... + +def parse(str: str, flags: int = 0, state: State | None = None) -> SubPattern: ... + +if sys.version_info < (3, 12): + def expand_template(template: _TemplateType, match: Match[Any]) -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ssl.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ssl.pyi new file mode 100644 index 0000000..aa94fc8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/ssl.pyi @@ -0,0 +1,540 @@ +import enum +import socket +import sys +from _ssl import ( + _DEFAULT_CIPHERS as _DEFAULT_CIPHERS, + _OPENSSL_API_VERSION as _OPENSSL_API_VERSION, + HAS_ALPN as HAS_ALPN, + HAS_ECDH as HAS_ECDH, + HAS_NPN as HAS_NPN, + HAS_SNI as HAS_SNI, + OPENSSL_VERSION as OPENSSL_VERSION, + OPENSSL_VERSION_INFO as OPENSSL_VERSION_INFO, + OPENSSL_VERSION_NUMBER as OPENSSL_VERSION_NUMBER, + HAS_SSLv2 as HAS_SSLv2, + HAS_SSLv3 as HAS_SSLv3, + HAS_TLSv1 as HAS_TLSv1, + HAS_TLSv1_1 as HAS_TLSv1_1, + HAS_TLSv1_2 as HAS_TLSv1_2, + HAS_TLSv1_3 as HAS_TLSv1_3, + MemoryBIO as MemoryBIO, + RAND_add as RAND_add, + RAND_bytes as RAND_bytes, + RAND_status as RAND_status, + SSLSession as SSLSession, + _PasswordType as _PasswordType, # typeshed only, but re-export for other type stubs to use + _SSLContext, +) +from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer +from collections.abc import Callable, Iterable +from typing import Any, Final, Literal, NamedTuple, TypedDict, overload, type_check_only +from typing_extensions import Never, Self, TypeAlias, deprecated + +if sys.version_info >= (3, 13): + from _ssl import HAS_PSK as HAS_PSK + +if sys.version_info >= (3, 14): + from _ssl import HAS_PHA as HAS_PHA + +if sys.version_info < (3, 12): + from _ssl import RAND_pseudo_bytes as RAND_pseudo_bytes + +if sys.version_info < (3, 10): + from _ssl import RAND_egd as RAND_egd + +if sys.platform == "win32": + from _ssl import enum_certificates as enum_certificates, enum_crls as enum_crls + +_PCTRTT: TypeAlias = tuple[tuple[str, str], ...] +_PCTRTTT: TypeAlias = tuple[_PCTRTT, ...] +_PeerCertRetDictType: TypeAlias = dict[str, str | _PCTRTTT | _PCTRTT] +_PeerCertRetType: TypeAlias = _PeerCertRetDictType | bytes | None +_SrvnmeCbType: TypeAlias = Callable[[SSLSocket | SSLObject, str | None, SSLSocket], int | None] + +socket_error = OSError + +@type_check_only +class _Cipher(TypedDict): + aead: bool + alg_bits: int + auth: str + description: str + digest: str | None + id: int + kea: str + name: str + protocol: str + strength_bits: int + symmetric: str + +class SSLError(OSError): + library: str + reason: str + +class SSLZeroReturnError(SSLError): ... +class SSLWantReadError(SSLError): ... +class SSLWantWriteError(SSLError): ... +class SSLSyscallError(SSLError): ... +class SSLEOFError(SSLError): ... + +class SSLCertVerificationError(SSLError, ValueError): + verify_code: int + verify_message: str + +CertificateError = SSLCertVerificationError + +if sys.version_info < (3, 12): + @deprecated("Deprecated since Python 3.7; removed in Python 3.12. Use `SSLContext.wrap_socket()` instead.") + def wrap_socket( + sock: socket.socket, + keyfile: StrOrBytesPath | None = None, + certfile: StrOrBytesPath | None = None, + server_side: bool = False, + cert_reqs: int = ..., + ssl_version: int = ..., + ca_certs: str | None = None, + do_handshake_on_connect: bool = True, + suppress_ragged_eofs: bool = True, + ciphers: str | None = None, + ) -> SSLSocket: ... + @deprecated("Deprecated since Python 3.7; removed in Python 3.12.") + def match_hostname(cert: _PeerCertRetDictType, hostname: str) -> None: ... + +def cert_time_to_seconds(cert_time: str) -> int: ... + +if sys.version_info >= (3, 10): + def get_server_certificate( + addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = None, timeout: float = ... + ) -> str: ... + +else: + def get_server_certificate(addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = None) -> str: ... + +def DER_cert_to_PEM_cert(der_cert_bytes: ReadableBuffer) -> str: ... +def PEM_cert_to_DER_cert(pem_cert_string: str) -> bytes: ... + +class DefaultVerifyPaths(NamedTuple): + cafile: str + capath: str + openssl_cafile_env: str + openssl_cafile: str + openssl_capath_env: str + openssl_capath: str + +def get_default_verify_paths() -> DefaultVerifyPaths: ... + +class VerifyMode(enum.IntEnum): + CERT_NONE = 0 + CERT_OPTIONAL = 1 + CERT_REQUIRED = 2 + +CERT_NONE: Final = VerifyMode.CERT_NONE +CERT_OPTIONAL: Final = VerifyMode.CERT_OPTIONAL +CERT_REQUIRED: Final = VerifyMode.CERT_REQUIRED + +class VerifyFlags(enum.IntFlag): + VERIFY_DEFAULT = 0 + VERIFY_CRL_CHECK_LEAF = 4 + VERIFY_CRL_CHECK_CHAIN = 12 + VERIFY_X509_STRICT = 32 + VERIFY_X509_TRUSTED_FIRST = 32768 + if sys.version_info >= (3, 10): + VERIFY_ALLOW_PROXY_CERTS = 64 + VERIFY_X509_PARTIAL_CHAIN = 524288 + +VERIFY_DEFAULT: Final = VerifyFlags.VERIFY_DEFAULT +VERIFY_CRL_CHECK_LEAF: Final = VerifyFlags.VERIFY_CRL_CHECK_LEAF +VERIFY_CRL_CHECK_CHAIN: Final = VerifyFlags.VERIFY_CRL_CHECK_CHAIN +VERIFY_X509_STRICT: Final = VerifyFlags.VERIFY_X509_STRICT +VERIFY_X509_TRUSTED_FIRST: Final = VerifyFlags.VERIFY_X509_TRUSTED_FIRST + +if sys.version_info >= (3, 10): + VERIFY_ALLOW_PROXY_CERTS: Final = VerifyFlags.VERIFY_ALLOW_PROXY_CERTS + VERIFY_X509_PARTIAL_CHAIN: Final = VerifyFlags.VERIFY_X509_PARTIAL_CHAIN + +class _SSLMethod(enum.IntEnum): + PROTOCOL_SSLv23 = 2 + PROTOCOL_SSLv2 = ... + PROTOCOL_SSLv3 = ... + PROTOCOL_TLSv1 = 3 + PROTOCOL_TLSv1_1 = 4 + PROTOCOL_TLSv1_2 = 5 + PROTOCOL_TLS = 2 + PROTOCOL_TLS_CLIENT = 16 + PROTOCOL_TLS_SERVER = 17 + +PROTOCOL_SSLv23: Final = _SSLMethod.PROTOCOL_SSLv23 +PROTOCOL_SSLv2: Final = _SSLMethod.PROTOCOL_SSLv2 +PROTOCOL_SSLv3: Final = _SSLMethod.PROTOCOL_SSLv3 +PROTOCOL_TLSv1: Final = _SSLMethod.PROTOCOL_TLSv1 +PROTOCOL_TLSv1_1: Final = _SSLMethod.PROTOCOL_TLSv1_1 +PROTOCOL_TLSv1_2: Final = _SSLMethod.PROTOCOL_TLSv1_2 +PROTOCOL_TLS: Final = _SSLMethod.PROTOCOL_TLS +PROTOCOL_TLS_CLIENT: Final = _SSLMethod.PROTOCOL_TLS_CLIENT +PROTOCOL_TLS_SERVER: Final = _SSLMethod.PROTOCOL_TLS_SERVER + +class Options(enum.IntFlag): + OP_ALL = 2147483728 + OP_NO_SSLv2 = 0 + OP_NO_SSLv3 = 33554432 + OP_NO_TLSv1 = 67108864 + OP_NO_TLSv1_1 = 268435456 + OP_NO_TLSv1_2 = 134217728 + OP_NO_TLSv1_3 = 536870912 + OP_CIPHER_SERVER_PREFERENCE = 4194304 + OP_SINGLE_DH_USE = 0 + OP_SINGLE_ECDH_USE = 0 + OP_NO_COMPRESSION = 131072 + OP_NO_TICKET = 16384 + OP_NO_RENEGOTIATION = 1073741824 + OP_ENABLE_MIDDLEBOX_COMPAT = 1048576 + if sys.version_info >= (3, 12): + OP_LEGACY_SERVER_CONNECT = 4 + OP_ENABLE_KTLS = 8 + if sys.version_info >= (3, 11) or sys.platform == "linux": + OP_IGNORE_UNEXPECTED_EOF = 128 + +OP_ALL: Final = Options.OP_ALL +OP_NO_SSLv2: Final = Options.OP_NO_SSLv2 +OP_NO_SSLv3: Final = Options.OP_NO_SSLv3 +OP_NO_TLSv1: Final = Options.OP_NO_TLSv1 +OP_NO_TLSv1_1: Final = Options.OP_NO_TLSv1_1 +OP_NO_TLSv1_2: Final = Options.OP_NO_TLSv1_2 +OP_NO_TLSv1_3: Final = Options.OP_NO_TLSv1_3 +OP_CIPHER_SERVER_PREFERENCE: Final = Options.OP_CIPHER_SERVER_PREFERENCE +OP_SINGLE_DH_USE: Final = Options.OP_SINGLE_DH_USE +OP_SINGLE_ECDH_USE: Final = Options.OP_SINGLE_ECDH_USE +OP_NO_COMPRESSION: Final = Options.OP_NO_COMPRESSION +OP_NO_TICKET: Final = Options.OP_NO_TICKET +OP_NO_RENEGOTIATION: Final = Options.OP_NO_RENEGOTIATION +OP_ENABLE_MIDDLEBOX_COMPAT: Final = Options.OP_ENABLE_MIDDLEBOX_COMPAT +if sys.version_info >= (3, 12): + OP_LEGACY_SERVER_CONNECT: Final = Options.OP_LEGACY_SERVER_CONNECT + OP_ENABLE_KTLS: Final = Options.OP_ENABLE_KTLS +if sys.version_info >= (3, 11) or sys.platform == "linux": + OP_IGNORE_UNEXPECTED_EOF: Final = Options.OP_IGNORE_UNEXPECTED_EOF + +HAS_NEVER_CHECK_COMMON_NAME: Final[bool] + +CHANNEL_BINDING_TYPES: Final[list[str]] + +class AlertDescription(enum.IntEnum): + ALERT_DESCRIPTION_ACCESS_DENIED = 49 + ALERT_DESCRIPTION_BAD_CERTIFICATE = 42 + ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE = 114 + ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE = 113 + ALERT_DESCRIPTION_BAD_RECORD_MAC = 20 + ALERT_DESCRIPTION_CERTIFICATE_EXPIRED = 45 + ALERT_DESCRIPTION_CERTIFICATE_REVOKED = 44 + ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN = 46 + ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE = 111 + ALERT_DESCRIPTION_CLOSE_NOTIFY = 0 + ALERT_DESCRIPTION_DECODE_ERROR = 50 + ALERT_DESCRIPTION_DECOMPRESSION_FAILURE = 30 + ALERT_DESCRIPTION_DECRYPT_ERROR = 51 + ALERT_DESCRIPTION_HANDSHAKE_FAILURE = 40 + ALERT_DESCRIPTION_ILLEGAL_PARAMETER = 47 + ALERT_DESCRIPTION_INSUFFICIENT_SECURITY = 71 + ALERT_DESCRIPTION_INTERNAL_ERROR = 80 + ALERT_DESCRIPTION_NO_RENEGOTIATION = 100 + ALERT_DESCRIPTION_PROTOCOL_VERSION = 70 + ALERT_DESCRIPTION_RECORD_OVERFLOW = 22 + ALERT_DESCRIPTION_UNEXPECTED_MESSAGE = 10 + ALERT_DESCRIPTION_UNKNOWN_CA = 48 + ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY = 115 + ALERT_DESCRIPTION_UNRECOGNIZED_NAME = 112 + ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE = 43 + ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION = 110 + ALERT_DESCRIPTION_USER_CANCELLED = 90 + +ALERT_DESCRIPTION_HANDSHAKE_FAILURE: Final = AlertDescription.ALERT_DESCRIPTION_HANDSHAKE_FAILURE +ALERT_DESCRIPTION_INTERNAL_ERROR: Final = AlertDescription.ALERT_DESCRIPTION_INTERNAL_ERROR +ALERT_DESCRIPTION_ACCESS_DENIED: Final = AlertDescription.ALERT_DESCRIPTION_ACCESS_DENIED +ALERT_DESCRIPTION_BAD_CERTIFICATE: Final = AlertDescription.ALERT_DESCRIPTION_BAD_CERTIFICATE +ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE: Final = AlertDescription.ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE +ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE: Final = AlertDescription.ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE +ALERT_DESCRIPTION_BAD_RECORD_MAC: Final = AlertDescription.ALERT_DESCRIPTION_BAD_RECORD_MAC +ALERT_DESCRIPTION_CERTIFICATE_EXPIRED: Final = AlertDescription.ALERT_DESCRIPTION_CERTIFICATE_EXPIRED +ALERT_DESCRIPTION_CERTIFICATE_REVOKED: Final = AlertDescription.ALERT_DESCRIPTION_CERTIFICATE_REVOKED +ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN: Final = AlertDescription.ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN +ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE: Final = AlertDescription.ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE +ALERT_DESCRIPTION_CLOSE_NOTIFY: Final = AlertDescription.ALERT_DESCRIPTION_CLOSE_NOTIFY +ALERT_DESCRIPTION_DECODE_ERROR: Final = AlertDescription.ALERT_DESCRIPTION_DECODE_ERROR +ALERT_DESCRIPTION_DECOMPRESSION_FAILURE: Final = AlertDescription.ALERT_DESCRIPTION_DECOMPRESSION_FAILURE +ALERT_DESCRIPTION_DECRYPT_ERROR: Final = AlertDescription.ALERT_DESCRIPTION_DECRYPT_ERROR +ALERT_DESCRIPTION_ILLEGAL_PARAMETER: Final = AlertDescription.ALERT_DESCRIPTION_ILLEGAL_PARAMETER +ALERT_DESCRIPTION_INSUFFICIENT_SECURITY: Final = AlertDescription.ALERT_DESCRIPTION_INSUFFICIENT_SECURITY +ALERT_DESCRIPTION_NO_RENEGOTIATION: Final = AlertDescription.ALERT_DESCRIPTION_NO_RENEGOTIATION +ALERT_DESCRIPTION_PROTOCOL_VERSION: Final = AlertDescription.ALERT_DESCRIPTION_PROTOCOL_VERSION +ALERT_DESCRIPTION_RECORD_OVERFLOW: Final = AlertDescription.ALERT_DESCRIPTION_RECORD_OVERFLOW +ALERT_DESCRIPTION_UNEXPECTED_MESSAGE: Final = AlertDescription.ALERT_DESCRIPTION_UNEXPECTED_MESSAGE +ALERT_DESCRIPTION_UNKNOWN_CA: Final = AlertDescription.ALERT_DESCRIPTION_UNKNOWN_CA +ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY: Final = AlertDescription.ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY +ALERT_DESCRIPTION_UNRECOGNIZED_NAME: Final = AlertDescription.ALERT_DESCRIPTION_UNRECOGNIZED_NAME +ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE: Final = AlertDescription.ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE +ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION: Final = AlertDescription.ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION +ALERT_DESCRIPTION_USER_CANCELLED: Final = AlertDescription.ALERT_DESCRIPTION_USER_CANCELLED + +# This class is not exposed. It calls itself ssl._ASN1Object. +@type_check_only +class _ASN1ObjectBase(NamedTuple): + nid: int + shortname: str + longname: str + oid: str + +class _ASN1Object(_ASN1ObjectBase): + def __new__(cls, oid: str) -> Self: ... + @classmethod + def fromnid(cls, nid: int) -> Self: ... + @classmethod + def fromname(cls, name: str) -> Self: ... + +class Purpose(_ASN1Object, enum.Enum): + # Normally this class would inherit __new__ from _ASN1Object, but + # because this is an enum, the inherited __new__ is replaced at runtime with + # Enum.__new__. + def __new__(cls, value: object) -> Self: ... + SERVER_AUTH = (129, "serverAuth", "TLS Web Server Authentication", "1.3.6.1.5.5.7.3.2") # pyright: ignore[reportCallIssue] + CLIENT_AUTH = (130, "clientAuth", "TLS Web Client Authentication", "1.3.6.1.5.5.7.3.1") # pyright: ignore[reportCallIssue] + +class SSLSocket(socket.socket): + context: SSLContext + server_side: bool + server_hostname: str | None + session: SSLSession | None + @property + def session_reused(self) -> bool | None: ... + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + def connect(self, addr: socket._Address) -> None: ... + def connect_ex(self, addr: socket._Address) -> int: ... + def recv(self, buflen: int = 1024, flags: int = 0) -> bytes: ... + def recv_into(self, buffer: WriteableBuffer, nbytes: int | None = None, flags: int = 0) -> int: ... + def recvfrom(self, buflen: int = 1024, flags: int = 0) -> tuple[bytes, socket._RetAddress]: ... + def recvfrom_into( + self, buffer: WriteableBuffer, nbytes: int | None = None, flags: int = 0 + ) -> tuple[int, socket._RetAddress]: ... + def send(self, data: ReadableBuffer, flags: int = 0) -> int: ... + def sendall(self, data: ReadableBuffer, flags: int = 0) -> None: ... + @overload + def sendto(self, data: ReadableBuffer, flags_or_addr: socket._Address, addr: None = None) -> int: ... + @overload + def sendto(self, data: ReadableBuffer, flags_or_addr: int, addr: socket._Address) -> int: ... + def shutdown(self, how: int) -> None: ... + def read(self, len: int = 1024, buffer: bytearray | None = None) -> bytes: ... + def write(self, data: ReadableBuffer) -> int: ... + def do_handshake(self, block: bool = False) -> None: ... # block is undocumented + @overload + def getpeercert(self, binary_form: Literal[False] = False) -> _PeerCertRetDictType | None: ... + @overload + def getpeercert(self, binary_form: Literal[True]) -> bytes | None: ... + @overload + def getpeercert(self, binary_form: bool) -> _PeerCertRetType: ... + def cipher(self) -> tuple[str, str, int] | None: ... + def shared_ciphers(self) -> list[tuple[str, str, int]] | None: ... + def compression(self) -> str | None: ... + def get_channel_binding(self, cb_type: str = "tls-unique") -> bytes | None: ... + def selected_alpn_protocol(self) -> str | None: ... + if sys.version_info >= (3, 10): + @deprecated("Deprecated since Python 3.10. Use ALPN instead.") + def selected_npn_protocol(self) -> str | None: ... + else: + def selected_npn_protocol(self) -> str | None: ... + + def accept(self) -> tuple[SSLSocket, socket._RetAddress]: ... + def unwrap(self) -> socket.socket: ... + def version(self) -> str | None: ... + def pending(self) -> int: ... + def verify_client_post_handshake(self) -> None: ... + # These methods always raise `NotImplementedError`: + def recvmsg(self, *args: Never, **kwargs: Never) -> Never: ... # type: ignore[override] + def recvmsg_into(self, *args: Never, **kwargs: Never) -> Never: ... # type: ignore[override] + def sendmsg(self, *args: Never, **kwargs: Never) -> Never: ... # type: ignore[override] + if sys.version_info >= (3, 13): + def get_verified_chain(self) -> list[bytes]: ... + def get_unverified_chain(self) -> list[bytes]: ... + +class TLSVersion(enum.IntEnum): + MINIMUM_SUPPORTED = -2 + MAXIMUM_SUPPORTED = -1 + SSLv3 = 768 + TLSv1 = 769 + TLSv1_1 = 770 + TLSv1_2 = 771 + TLSv1_3 = 772 + +class SSLContext(_SSLContext): + options: Options + verify_flags: VerifyFlags + verify_mode: VerifyMode + @property + def protocol(self) -> _SSLMethod: ... # type: ignore[override] + hostname_checks_common_name: bool + maximum_version: TLSVersion + minimum_version: TLSVersion + # The following two attributes have class-level defaults. + # However, the docs explicitly state that it's OK to override these attributes on instances, + # so making these ClassVars wouldn't be appropriate + sslobject_class: type[SSLObject] + sslsocket_class: type[SSLSocket] + keylog_filename: str + post_handshake_auth: bool + if sys.version_info >= (3, 10): + security_level: int + if sys.version_info >= (3, 10): + @overload + def __new__(cls, protocol: int, *args: Any, **kwargs: Any) -> Self: ... + @overload + @deprecated("Deprecated since Python 3.10. Use a specific version of the SSL protocol.") + def __new__(cls, protocol: None = None, *args: Any, **kwargs: Any) -> Self: ... + else: + def __new__(cls, protocol: int = ..., *args: Any, **kwargs: Any) -> Self: ... + + def load_default_certs(self, purpose: Purpose = Purpose.SERVER_AUTH) -> None: ... + def load_verify_locations( + self, + cafile: StrOrBytesPath | None = None, + capath: StrOrBytesPath | None = None, + cadata: str | ReadableBuffer | None = None, + ) -> None: ... + @overload + def get_ca_certs(self, binary_form: Literal[False] = False) -> list[_PeerCertRetDictType]: ... + @overload + def get_ca_certs(self, binary_form: Literal[True]) -> list[bytes]: ... + @overload + def get_ca_certs(self, binary_form: bool = False) -> Any: ... + def get_ciphers(self) -> list[_Cipher]: ... + def set_default_verify_paths(self) -> None: ... + def set_ciphers(self, cipherlist: str, /) -> None: ... + def set_alpn_protocols(self, alpn_protocols: Iterable[str]) -> None: ... + if sys.version_info >= (3, 10): + @deprecated("Deprecated since Python 3.10. Use ALPN instead.") + def set_npn_protocols(self, npn_protocols: Iterable[str]) -> None: ... + else: + def set_npn_protocols(self, npn_protocols: Iterable[str]) -> None: ... + + def set_servername_callback(self, server_name_callback: _SrvnmeCbType | None) -> None: ... + def load_dh_params(self, path: str, /) -> None: ... + def set_ecdh_curve(self, name: str, /) -> None: ... + def wrap_socket( + self, + sock: socket.socket, + server_side: bool = False, + do_handshake_on_connect: bool = True, + suppress_ragged_eofs: bool = True, + server_hostname: str | bytes | None = None, + session: SSLSession | None = None, + ) -> SSLSocket: ... + def wrap_bio( + self, + incoming: MemoryBIO, + outgoing: MemoryBIO, + server_side: bool = False, + server_hostname: str | bytes | None = None, + session: SSLSession | None = None, + ) -> SSLObject: ... + +def create_default_context( + purpose: Purpose = Purpose.SERVER_AUTH, + *, + cafile: StrOrBytesPath | None = None, + capath: StrOrBytesPath | None = None, + cadata: str | ReadableBuffer | None = None, +) -> SSLContext: ... + +if sys.version_info >= (3, 10): + def _create_unverified_context( + protocol: int | None = None, + *, + cert_reqs: int = ..., + check_hostname: bool = False, + purpose: Purpose = Purpose.SERVER_AUTH, + certfile: StrOrBytesPath | None = None, + keyfile: StrOrBytesPath | None = None, + cafile: StrOrBytesPath | None = None, + capath: StrOrBytesPath | None = None, + cadata: str | ReadableBuffer | None = None, + ) -> SSLContext: ... + +else: + def _create_unverified_context( + protocol: int = ..., + *, + cert_reqs: int = ..., + check_hostname: bool = False, + purpose: Purpose = Purpose.SERVER_AUTH, + certfile: StrOrBytesPath | None = None, + keyfile: StrOrBytesPath | None = None, + cafile: StrOrBytesPath | None = None, + capath: StrOrBytesPath | None = None, + cadata: str | ReadableBuffer | None = None, + ) -> SSLContext: ... + +_create_default_https_context = create_default_context + +class SSLObject: + context: SSLContext + @property + def server_side(self) -> bool: ... + @property + def server_hostname(self) -> str | None: ... + session: SSLSession | None + @property + def session_reused(self) -> bool: ... + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + def read(self, len: int = 1024, buffer: bytearray | None = None) -> bytes: ... + def write(self, data: ReadableBuffer) -> int: ... + @overload + def getpeercert(self, binary_form: Literal[False] = False) -> _PeerCertRetDictType | None: ... + @overload + def getpeercert(self, binary_form: Literal[True]) -> bytes | None: ... + @overload + def getpeercert(self, binary_form: bool) -> _PeerCertRetType: ... + def selected_alpn_protocol(self) -> str | None: ... + if sys.version_info >= (3, 10): + @deprecated("Deprecated since Python 3.10. Use ALPN instead.") + def selected_npn_protocol(self) -> str | None: ... + else: + def selected_npn_protocol(self) -> str | None: ... + + def cipher(self) -> tuple[str, str, int] | None: ... + def shared_ciphers(self) -> list[tuple[str, str, int]] | None: ... + def compression(self) -> str | None: ... + def pending(self) -> int: ... + def do_handshake(self) -> None: ... + def unwrap(self) -> None: ... + def version(self) -> str | None: ... + def get_channel_binding(self, cb_type: str = "tls-unique") -> bytes | None: ... + def verify_client_post_handshake(self) -> None: ... + if sys.version_info >= (3, 13): + def get_verified_chain(self) -> list[bytes]: ... + def get_unverified_chain(self) -> list[bytes]: ... + +class SSLErrorNumber(enum.IntEnum): + SSL_ERROR_EOF = 8 + SSL_ERROR_INVALID_ERROR_CODE = 10 + SSL_ERROR_SSL = 1 + SSL_ERROR_SYSCALL = 5 + SSL_ERROR_WANT_CONNECT = 7 + SSL_ERROR_WANT_READ = 2 + SSL_ERROR_WANT_WRITE = 3 + SSL_ERROR_WANT_X509_LOOKUP = 4 + SSL_ERROR_ZERO_RETURN = 6 + +SSL_ERROR_EOF: Final = SSLErrorNumber.SSL_ERROR_EOF # undocumented +SSL_ERROR_INVALID_ERROR_CODE: Final = SSLErrorNumber.SSL_ERROR_INVALID_ERROR_CODE # undocumented +SSL_ERROR_SSL: Final = SSLErrorNumber.SSL_ERROR_SSL # undocumented +SSL_ERROR_SYSCALL: Final = SSLErrorNumber.SSL_ERROR_SYSCALL # undocumented +SSL_ERROR_WANT_CONNECT: Final = SSLErrorNumber.SSL_ERROR_WANT_CONNECT # undocumented +SSL_ERROR_WANT_READ: Final = SSLErrorNumber.SSL_ERROR_WANT_READ # undocumented +SSL_ERROR_WANT_WRITE: Final = SSLErrorNumber.SSL_ERROR_WANT_WRITE # undocumented +SSL_ERROR_WANT_X509_LOOKUP: Final = SSLErrorNumber.SSL_ERROR_WANT_X509_LOOKUP # undocumented +SSL_ERROR_ZERO_RETURN: Final = SSLErrorNumber.SSL_ERROR_ZERO_RETURN # undocumented + +def get_protocol_name(protocol_code: int) -> str: ... + +PEM_FOOTER: Final[str] +PEM_HEADER: Final[str] +SOCK_STREAM: Final = socket.SOCK_STREAM +SOL_SOCKET: Final = socket.SOL_SOCKET +SO_TYPE: Final = socket.SO_TYPE diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/stat.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/stat.pyi new file mode 100644 index 0000000..6c26080 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/stat.pyi @@ -0,0 +1,114 @@ +import sys +from _stat import ( + S_ENFMT as S_ENFMT, + S_IEXEC as S_IEXEC, + S_IFBLK as S_IFBLK, + S_IFCHR as S_IFCHR, + S_IFDIR as S_IFDIR, + S_IFDOOR as S_IFDOOR, + S_IFIFO as S_IFIFO, + S_IFLNK as S_IFLNK, + S_IFMT as S_IFMT, + S_IFPORT as S_IFPORT, + S_IFREG as S_IFREG, + S_IFSOCK as S_IFSOCK, + S_IFWHT as S_IFWHT, + S_IMODE as S_IMODE, + S_IREAD as S_IREAD, + S_IRGRP as S_IRGRP, + S_IROTH as S_IROTH, + S_IRUSR as S_IRUSR, + S_IRWXG as S_IRWXG, + S_IRWXO as S_IRWXO, + S_IRWXU as S_IRWXU, + S_ISBLK as S_ISBLK, + S_ISCHR as S_ISCHR, + S_ISDIR as S_ISDIR, + S_ISDOOR as S_ISDOOR, + S_ISFIFO as S_ISFIFO, + S_ISGID as S_ISGID, + S_ISLNK as S_ISLNK, + S_ISPORT as S_ISPORT, + S_ISREG as S_ISREG, + S_ISSOCK as S_ISSOCK, + S_ISUID as S_ISUID, + S_ISVTX as S_ISVTX, + S_ISWHT as S_ISWHT, + S_IWGRP as S_IWGRP, + S_IWOTH as S_IWOTH, + S_IWRITE as S_IWRITE, + S_IWUSR as S_IWUSR, + S_IXGRP as S_IXGRP, + S_IXOTH as S_IXOTH, + S_IXUSR as S_IXUSR, + SF_APPEND as SF_APPEND, + SF_ARCHIVED as SF_ARCHIVED, + SF_IMMUTABLE as SF_IMMUTABLE, + SF_NOUNLINK as SF_NOUNLINK, + SF_SNAPSHOT as SF_SNAPSHOT, + ST_ATIME as ST_ATIME, + ST_CTIME as ST_CTIME, + ST_DEV as ST_DEV, + ST_GID as ST_GID, + ST_INO as ST_INO, + ST_MODE as ST_MODE, + ST_MTIME as ST_MTIME, + ST_NLINK as ST_NLINK, + ST_SIZE as ST_SIZE, + ST_UID as ST_UID, + UF_APPEND as UF_APPEND, + UF_COMPRESSED as UF_COMPRESSED, + UF_HIDDEN as UF_HIDDEN, + UF_IMMUTABLE as UF_IMMUTABLE, + UF_NODUMP as UF_NODUMP, + UF_NOUNLINK as UF_NOUNLINK, + UF_OPAQUE as UF_OPAQUE, + filemode as filemode, +) +from typing import Final + +if sys.platform == "win32": + from _stat import ( + IO_REPARSE_TAG_APPEXECLINK as IO_REPARSE_TAG_APPEXECLINK, + IO_REPARSE_TAG_MOUNT_POINT as IO_REPARSE_TAG_MOUNT_POINT, + IO_REPARSE_TAG_SYMLINK as IO_REPARSE_TAG_SYMLINK, + ) + +if sys.version_info >= (3, 13): + from _stat import ( + SF_DATALESS as SF_DATALESS, + SF_FIRMLINK as SF_FIRMLINK, + SF_SETTABLE as SF_SETTABLE, + UF_DATAVAULT as UF_DATAVAULT, + UF_SETTABLE as UF_SETTABLE, + UF_TRACKED as UF_TRACKED, + ) + + if sys.platform == "darwin": + from _stat import SF_SUPPORTED as SF_SUPPORTED, SF_SYNTHETIC as SF_SYNTHETIC + +# _stat.c defines FILE_ATTRIBUTE_* constants conditionally, +# making them available only at runtime on Windows. +# stat.py unconditionally redefines the same FILE_ATTRIBUTE_* constants +# on all platforms. +FILE_ATTRIBUTE_ARCHIVE: Final = 32 +FILE_ATTRIBUTE_COMPRESSED: Final = 2048 +FILE_ATTRIBUTE_DEVICE: Final = 64 +FILE_ATTRIBUTE_DIRECTORY: Final = 16 +FILE_ATTRIBUTE_ENCRYPTED: Final = 16384 +FILE_ATTRIBUTE_HIDDEN: Final = 2 +FILE_ATTRIBUTE_INTEGRITY_STREAM: Final = 32768 +FILE_ATTRIBUTE_NORMAL: Final = 128 +FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: Final = 8192 +FILE_ATTRIBUTE_NO_SCRUB_DATA: Final = 131072 +FILE_ATTRIBUTE_OFFLINE: Final = 4096 +FILE_ATTRIBUTE_READONLY: Final = 1 +FILE_ATTRIBUTE_REPARSE_POINT: Final = 1024 +FILE_ATTRIBUTE_SPARSE_FILE: Final = 512 +FILE_ATTRIBUTE_SYSTEM: Final = 4 +FILE_ATTRIBUTE_TEMPORARY: Final = 256 +FILE_ATTRIBUTE_VIRTUAL: Final = 65536 + +if sys.version_info >= (3, 13): + # https://github.com/python/cpython/issues/114081#issuecomment-2119017790 + SF_RESTRICTED: Final = 0x00080000 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/statistics.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/statistics.pyi new file mode 100644 index 0000000..ba9e5f1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/statistics.pyi @@ -0,0 +1,159 @@ +import sys +from _typeshed import SupportsRichComparisonT +from collections.abc import Callable, Hashable, Iterable, Sequence +from decimal import Decimal +from fractions import Fraction +from typing import Literal, NamedTuple, SupportsFloat, SupportsIndex, TypeVar +from typing_extensions import Self, TypeAlias + +__all__ = [ + "StatisticsError", + "fmean", + "geometric_mean", + "mean", + "harmonic_mean", + "pstdev", + "pvariance", + "stdev", + "variance", + "median", + "median_low", + "median_high", + "median_grouped", + "mode", + "multimode", + "NormalDist", + "quantiles", +] + +if sys.version_info >= (3, 10): + __all__ += ["covariance", "correlation", "linear_regression"] +if sys.version_info >= (3, 13): + __all__ += ["kde", "kde_random"] + +# Most functions in this module accept homogeneous collections of one of these types +_Number: TypeAlias = float | Decimal | Fraction +_NumberT = TypeVar("_NumberT", float, Decimal, Fraction) + +# Used in mode, multimode +_HashableT = TypeVar("_HashableT", bound=Hashable) + +# Used in NormalDist.samples and kde_random +_Seed: TypeAlias = int | float | str | bytes | bytearray # noqa: Y041 + +class StatisticsError(ValueError): ... + +if sys.version_info >= (3, 11): + def fmean(data: Iterable[SupportsFloat], weights: Iterable[SupportsFloat] | None = None) -> float: ... + +else: + def fmean(data: Iterable[SupportsFloat]) -> float: ... + +def geometric_mean(data: Iterable[SupportsFloat]) -> float: ... +def mean(data: Iterable[_NumberT]) -> _NumberT: ... + +if sys.version_info >= (3, 10): + def harmonic_mean(data: Iterable[_NumberT], weights: Iterable[_Number] | None = None) -> _NumberT: ... + +else: + def harmonic_mean(data: Iterable[_NumberT]) -> _NumberT: ... + +def median(data: Iterable[_NumberT]) -> _NumberT: ... +def median_low(data: Iterable[SupportsRichComparisonT]) -> SupportsRichComparisonT: ... +def median_high(data: Iterable[SupportsRichComparisonT]) -> SupportsRichComparisonT: ... + +if sys.version_info >= (3, 11): + def median_grouped(data: Iterable[SupportsFloat], interval: SupportsFloat = 1.0) -> float: ... + +else: + def median_grouped(data: Iterable[_NumberT], interval: _NumberT | float = 1) -> _NumberT | float: ... + +def mode(data: Iterable[_HashableT]) -> _HashableT: ... +def multimode(data: Iterable[_HashableT]) -> list[_HashableT]: ... +def pstdev(data: Iterable[_NumberT], mu: _NumberT | None = None) -> _NumberT: ... +def pvariance(data: Iterable[_NumberT], mu: _NumberT | None = None) -> _NumberT: ... +def quantiles( + data: Iterable[_NumberT], *, n: int = 4, method: Literal["inclusive", "exclusive"] = "exclusive" +) -> list[_NumberT]: ... +def stdev(data: Iterable[_NumberT], xbar: _NumberT | None = None) -> _NumberT: ... +def variance(data: Iterable[_NumberT], xbar: _NumberT | None = None) -> _NumberT: ... + +class NormalDist: + __slots__ = {"_mu": "Arithmetic mean of a normal distribution", "_sigma": "Standard deviation of a normal distribution"} + def __init__(self, mu: float = 0.0, sigma: float = 1.0) -> None: ... + @property + def mean(self) -> float: ... + @property + def median(self) -> float: ... + @property + def mode(self) -> float: ... + @property + def stdev(self) -> float: ... + @property + def variance(self) -> float: ... + @classmethod + def from_samples(cls, data: Iterable[SupportsFloat]) -> Self: ... + def samples(self, n: SupportsIndex, *, seed: _Seed | None = None) -> list[float]: ... + def pdf(self, x: float) -> float: ... + def cdf(self, x: float) -> float: ... + def inv_cdf(self, p: float) -> float: ... + def overlap(self, other: NormalDist) -> float: ... + def quantiles(self, n: int = 4) -> list[float]: ... + def zscore(self, x: float) -> float: ... + def __eq__(x1, x2: object) -> bool: ... + def __add__(x1, x2: float | NormalDist) -> NormalDist: ... + def __sub__(x1, x2: float | NormalDist) -> NormalDist: ... + def __mul__(x1, x2: float) -> NormalDist: ... + def __truediv__(x1, x2: float) -> NormalDist: ... + def __pos__(x1) -> NormalDist: ... + def __neg__(x1) -> NormalDist: ... + __radd__ = __add__ + def __rsub__(x1, x2: float | NormalDist) -> NormalDist: ... + __rmul__ = __mul__ + def __hash__(self) -> int: ... + +if sys.version_info >= (3, 12): + def correlation( + x: Sequence[_Number], y: Sequence[_Number], /, *, method: Literal["linear", "ranked"] = "linear" + ) -> float: ... + +elif sys.version_info >= (3, 10): + def correlation(x: Sequence[_Number], y: Sequence[_Number], /) -> float: ... + +if sys.version_info >= (3, 10): + def covariance(x: Sequence[_Number], y: Sequence[_Number], /) -> float: ... + + class LinearRegression(NamedTuple): + slope: float + intercept: float + +if sys.version_info >= (3, 11): + def linear_regression( + regressor: Sequence[_Number], dependent_variable: Sequence[_Number], /, *, proportional: bool = False + ) -> LinearRegression: ... + +elif sys.version_info >= (3, 10): + def linear_regression(regressor: Sequence[_Number], dependent_variable: Sequence[_Number], /) -> LinearRegression: ... + +if sys.version_info >= (3, 13): + _Kernel: TypeAlias = Literal[ + "normal", + "gauss", + "logistic", + "sigmoid", + "rectangular", + "uniform", + "triangular", + "parabolic", + "epanechnikov", + "quartic", + "biweight", + "triweight", + "cosine", + ] + def kde( + data: Sequence[float], h: float, kernel: _Kernel = "normal", *, cumulative: bool = False + ) -> Callable[[float], float]: ... + def kde_random( + data: Sequence[float], h: float, kernel: _Kernel = "normal", *, seed: _Seed | None = None + ) -> Callable[[], float]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/string/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/string/__init__.pyi new file mode 100644 index 0000000..c8b32a9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/string/__init__.pyi @@ -0,0 +1,79 @@ +import sys +from _typeshed import StrOrLiteralStr +from collections.abc import Iterable, Mapping, Sequence +from re import Pattern, RegexFlag +from typing import Any, ClassVar, Final, overload +from typing_extensions import LiteralString + +__all__ = [ + "ascii_letters", + "ascii_lowercase", + "ascii_uppercase", + "capwords", + "digits", + "hexdigits", + "octdigits", + "printable", + "punctuation", + "whitespace", + "Formatter", + "Template", +] + +whitespace: Final = " \t\n\r\v\f" +ascii_lowercase: Final = "abcdefghijklmnopqrstuvwxyz" +ascii_uppercase: Final = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" +ascii_letters: Final[LiteralString] # string too long +digits: Final = "0123456789" +hexdigits: Final = "0123456789abcdefABCDEF" +octdigits: Final = "01234567" +punctuation: Final = r"""!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~""" +printable: Final[LiteralString] # string too long + +def capwords(s: StrOrLiteralStr, sep: StrOrLiteralStr | None = None) -> StrOrLiteralStr: ... + +class Template: + template: str + delimiter: ClassVar[str] + idpattern: ClassVar[str] + braceidpattern: ClassVar[str | None] + if sys.version_info >= (3, 14): + flags: ClassVar[RegexFlag | None] + else: + flags: ClassVar[RegexFlag] + pattern: ClassVar[Pattern[str]] + def __init__(self, template: str) -> None: ... + def substitute(self, mapping: Mapping[str, object] = {}, /, **kwds: object) -> str: ... + def safe_substitute(self, mapping: Mapping[str, object] = {}, /, **kwds: object) -> str: ... + if sys.version_info >= (3, 11): + def get_identifiers(self) -> list[str]: ... + def is_valid(self) -> bool: ... + +class Formatter: + @overload + def format(self, format_string: LiteralString, /, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ... + @overload + def format(self, format_string: str, /, *args: Any, **kwargs: Any) -> str: ... + @overload + def vformat( + self, format_string: LiteralString, args: Sequence[LiteralString], kwargs: Mapping[LiteralString, LiteralString] + ) -> LiteralString: ... + @overload + def vformat(self, format_string: str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> str: ... + def _vformat( # undocumented + self, + format_string: str, + args: Sequence[Any], + kwargs: Mapping[str, Any], + used_args: set[int | str], + recursion_depth: int, + auto_arg_index: int = 0, + ) -> tuple[str, int]: ... + def parse( + self, format_string: StrOrLiteralStr + ) -> Iterable[tuple[StrOrLiteralStr, StrOrLiteralStr | None, StrOrLiteralStr | None, StrOrLiteralStr | None]]: ... + def get_field(self, field_name: str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> Any: ... + def get_value(self, key: int | str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> Any: ... + def check_unused_args(self, used_args: set[int | str], args: Sequence[Any], kwargs: Mapping[str, Any]) -> None: ... + def format_field(self, value: Any, format_spec: str) -> Any: ... + def convert_field(self, value: Any, conversion: str | None) -> Any: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/string/templatelib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/string/templatelib.pyi new file mode 100644 index 0000000..9906d31 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/string/templatelib.pyi @@ -0,0 +1,36 @@ +from collections.abc import Iterator +from types import GenericAlias +from typing import Any, Literal, TypeVar, final, overload + +_T = TypeVar("_T") + +@final +class Template: # TODO: consider making `Template` generic on `TypeVarTuple` + strings: tuple[str, ...] + interpolations: tuple[Interpolation, ...] + + def __new__(cls, *args: str | Interpolation) -> Template: ... + def __iter__(self) -> Iterator[str | Interpolation]: ... + def __add__(self, other: Template, /) -> Template: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + @property + def values(self) -> tuple[Any, ...]: ... # Tuple of interpolation values, which can have any type + +@final +class Interpolation: + value: Any # TODO: consider making `Interpolation` generic in runtime + expression: str + conversion: Literal["a", "r", "s"] | None + format_spec: str + + __match_args__ = ("value", "expression", "conversion", "format_spec") + + def __new__( + cls, value: Any, expression: str = "", conversion: Literal["a", "r", "s"] | None = None, format_spec: str = "" + ) -> Interpolation: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +@overload +def convert(obj: _T, /, conversion: None) -> _T: ... +@overload +def convert(obj: object, /, conversion: Literal["r", "s", "a"]) -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/stringprep.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/stringprep.pyi new file mode 100644 index 0000000..d67955e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/stringprep.pyi @@ -0,0 +1,29 @@ +from typing import Final + +b1_set: Final[set[int]] +b3_exceptions: Final[dict[int, str]] +c22_specials: Final[set[int]] +c6_set: Final[set[int]] +c7_set: Final[set[int]] +c8_set: Final[set[int]] +c9_set: Final[set[int]] + +def in_table_a1(code: str) -> bool: ... +def in_table_b1(code: str) -> bool: ... +def map_table_b3(code: str) -> str: ... +def map_table_b2(a: str) -> str: ... +def in_table_c11(code: str) -> bool: ... +def in_table_c12(code: str) -> bool: ... +def in_table_c11_c12(code: str) -> bool: ... +def in_table_c21(code: str) -> bool: ... +def in_table_c22(code: str) -> bool: ... +def in_table_c21_c22(code: str) -> bool: ... +def in_table_c3(code: str) -> bool: ... +def in_table_c4(code: str) -> bool: ... +def in_table_c5(code: str) -> bool: ... +def in_table_c6(code: str) -> bool: ... +def in_table_c7(code: str) -> bool: ... +def in_table_c8(code: str) -> bool: ... +def in_table_c9(code: str) -> bool: ... +def in_table_d1(code: str) -> bool: ... +def in_table_d2(code: str) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/struct.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/struct.pyi new file mode 100644 index 0000000..2c26908 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/struct.pyi @@ -0,0 +1,5 @@ +from _struct import * + +__all__ = ["calcsize", "pack", "pack_into", "unpack", "unpack_from", "iter_unpack", "Struct", "error"] + +class error(Exception): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/subprocess.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/subprocess.pyi new file mode 100644 index 0000000..e1e25bc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/subprocess.pyi @@ -0,0 +1,2093 @@ +import sys +from _typeshed import MaybeNone, ReadableBuffer, StrOrBytesPath +from collections.abc import Callable, Collection, Iterable, Mapping, Sequence +from types import GenericAlias, TracebackType +from typing import IO, Any, AnyStr, Final, Generic, Literal, TypeVar, overload +from typing_extensions import Self, TypeAlias + +__all__ = [ + "Popen", + "PIPE", + "STDOUT", + "call", + "check_call", + "getstatusoutput", + "getoutput", + "check_output", + "run", + "CalledProcessError", + "DEVNULL", + "SubprocessError", + "TimeoutExpired", + "CompletedProcess", +] + +if sys.platform == "win32": + __all__ += [ + "CREATE_NEW_CONSOLE", + "CREATE_NEW_PROCESS_GROUP", + "STARTF_USESHOWWINDOW", + "STARTF_USESTDHANDLES", + "STARTUPINFO", + "STD_ERROR_HANDLE", + "STD_INPUT_HANDLE", + "STD_OUTPUT_HANDLE", + "SW_HIDE", + "ABOVE_NORMAL_PRIORITY_CLASS", + "BELOW_NORMAL_PRIORITY_CLASS", + "CREATE_BREAKAWAY_FROM_JOB", + "CREATE_DEFAULT_ERROR_MODE", + "CREATE_NO_WINDOW", + "DETACHED_PROCESS", + "HIGH_PRIORITY_CLASS", + "IDLE_PRIORITY_CLASS", + "NORMAL_PRIORITY_CLASS", + "REALTIME_PRIORITY_CLASS", + ] + +# We prefer to annotate inputs to methods (eg subprocess.check_call) with these +# union types. +# For outputs we use laborious literal based overloads to try to determine +# which specific return types to use, and prefer to fall back to Any when +# this does not work, so the caller does not have to use an assertion to confirm +# which type. +# +# For example: +# +# try: +# x = subprocess.check_output(["ls", "-l"]) +# reveal_type(x) # bytes, based on the overloads +# except TimeoutError as e: +# reveal_type(e.cmd) # Any, but morally is _CMD +_FILE: TypeAlias = None | int | IO[Any] +_InputString: TypeAlias = ReadableBuffer | str +_CMD: TypeAlias = StrOrBytesPath | Sequence[StrOrBytesPath] +if sys.platform == "win32": + _ENV: TypeAlias = Mapping[str, str] +else: + _ENV: TypeAlias = Mapping[bytes, StrOrBytesPath] | Mapping[str, StrOrBytesPath] + +_T = TypeVar("_T") + +# These two are private but documented +if sys.version_info >= (3, 11): + _USE_VFORK: Final[bool] +_USE_POSIX_SPAWN: Final[bool] + +class CompletedProcess(Generic[_T]): + # morally: _CMD + args: Any + returncode: int + # These can both be None, but requiring checks for None would be tedious + # and writing all the overloads would be horrific. + stdout: _T + stderr: _T + def __init__(self, args: _CMD, returncode: int, stdout: _T | None = None, stderr: _T | None = None) -> None: ... + def check_returncode(self) -> None: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +if sys.version_info >= (3, 11): + # 3.11 adds "process_group" argument + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str | None = None, + input: str | None = None, + text: Literal[True], + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + capture_output: bool = False, + check: bool = False, + encoding: str, + errors: str | None = None, + input: str | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str, + input: str | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + *, + universal_newlines: Literal[True], + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + # where the *real* keyword only args start + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str | None = None, + input: str | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False] | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + capture_output: bool = False, + check: bool = False, + encoding: None = None, + errors: None = None, + input: ReadableBuffer | None = None, + text: Literal[False] | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> CompletedProcess[bytes]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str | None = None, + input: _InputString | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> CompletedProcess[Any]: ... + +elif sys.version_info >= (3, 10): + # 3.10 adds "pipesize" argument + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str | None = None, + input: str | None = None, + text: Literal[True], + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + capture_output: bool = False, + check: bool = False, + encoding: str, + errors: str | None = None, + input: str | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str, + input: str | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + *, + universal_newlines: Literal[True], + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + # where the *real* keyword only args start + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str | None = None, + input: str | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False] | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + capture_output: bool = False, + check: bool = False, + encoding: None = None, + errors: None = None, + input: ReadableBuffer | None = None, + text: Literal[False] | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> CompletedProcess[bytes]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str | None = None, + input: _InputString | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> CompletedProcess[Any]: ... + +else: + # 3.9 adds arguments "user", "group", "extra_groups" and "umask" + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str | None = None, + input: str | None = None, + text: Literal[True], + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + capture_output: bool = False, + check: bool = False, + encoding: str, + errors: str | None = None, + input: str | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str, + input: str | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + *, + universal_newlines: Literal[True], + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + # where the *real* keyword only args start + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str | None = None, + input: str | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False] | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + capture_output: bool = False, + check: bool = False, + encoding: None = None, + errors: None = None, + input: ReadableBuffer | None = None, + text: Literal[False] | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> CompletedProcess[bytes]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str | None = None, + input: _InputString | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> CompletedProcess[Any]: ... + +# Same args as Popen.__init__ +if sys.version_info >= (3, 11): + # 3.11 adds "process_group" argument + def call( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + encoding: str | None = None, + timeout: float | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> int: ... + +elif sys.version_info >= (3, 10): + # 3.10 adds "pipesize" argument + def call( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + encoding: str | None = None, + timeout: float | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> int: ... + +else: + def call( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + encoding: str | None = None, + timeout: float | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> int: ... + +# Same args as Popen.__init__ +if sys.version_info >= (3, 11): + # 3.11 adds "process_group" argument + def check_call( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + timeout: float | None = None, + *, + encoding: str | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> int: ... + +elif sys.version_info >= (3, 10): + # 3.10 adds "pipesize" argument + def check_call( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + timeout: float | None = None, + *, + encoding: str | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> int: ... + +else: + def check_call( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + timeout: float | None = None, + *, + encoding: str | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> int: ... + +if sys.version_info >= (3, 11): + # 3.11 adds "process_group" argument + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + timeout: float | None = None, + input: _InputString | None = None, + encoding: str | None = None, + errors: str | None = None, + text: Literal[True], + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + timeout: float | None = None, + input: _InputString | None = None, + encoding: str, + errors: str | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + timeout: float | None = None, + input: _InputString | None = None, + encoding: str | None = None, + errors: str, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + *, + universal_newlines: Literal[True], + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + # where the real keyword only ones start + timeout: float | None = None, + input: _InputString | None = None, + encoding: str | None = None, + errors: str | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False] | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + timeout: float | None = None, + input: _InputString | None = None, + encoding: None = None, + errors: None = None, + text: Literal[False] | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> bytes: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + timeout: float | None = None, + input: _InputString | None = None, + encoding: str | None = None, + errors: str | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> Any: ... # morally: -> str | bytes + +elif sys.version_info >= (3, 10): + # 3.10 adds "pipesize" argument + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + timeout: float | None = None, + input: _InputString | None = None, + encoding: str | None = None, + errors: str | None = None, + text: Literal[True], + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + timeout: float | None = None, + input: _InputString | None = None, + encoding: str, + errors: str | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + timeout: float | None = None, + input: _InputString | None = None, + encoding: str | None = None, + errors: str, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + *, + universal_newlines: Literal[True], + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + # where the real keyword only ones start + timeout: float | None = None, + input: _InputString | None = None, + encoding: str | None = None, + errors: str | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False] | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + timeout: float | None = None, + input: _InputString | None = None, + encoding: None = None, + errors: None = None, + text: Literal[False] | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> bytes: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + timeout: float | None = None, + input: _InputString | None = None, + encoding: str | None = None, + errors: str | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> Any: ... # morally: -> str | bytes + +else: + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + timeout: float | None = None, + input: _InputString | None = None, + encoding: str | None = None, + errors: str | None = None, + text: Literal[True], + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + timeout: float | None = None, + input: _InputString | None = None, + encoding: str, + errors: str | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + timeout: float | None = None, + input: _InputString | None = None, + encoding: str | None = None, + errors: str, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + *, + universal_newlines: Literal[True], + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + # where the real keyword only ones start + timeout: float | None = None, + input: _InputString | None = None, + encoding: str | None = None, + errors: str | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False] | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + timeout: float | None = None, + input: _InputString | None = None, + encoding: None = None, + errors: None = None, + text: Literal[False] | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> bytes: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + timeout: float | None = None, + input: _InputString | None = None, + encoding: str | None = None, + errors: str | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> Any: ... # morally: -> str | bytes + +PIPE: Final[int] +STDOUT: Final[int] +DEVNULL: Final[int] + +class SubprocessError(Exception): ... + +class TimeoutExpired(SubprocessError): + def __init__( + self, cmd: _CMD, timeout: float, output: str | bytes | None = None, stderr: str | bytes | None = None + ) -> None: ... + # morally: _CMD + cmd: Any + timeout: float + # morally: str | bytes | None + output: Any + stdout: bytes | None + stderr: bytes | None + +class CalledProcessError(SubprocessError): + returncode: int + # morally: _CMD + cmd: Any + # morally: str | bytes | None + output: Any + + # morally: str | bytes | None + stdout: Any + stderr: Any + def __init__( + self, returncode: int, cmd: _CMD, output: str | bytes | None = None, stderr: str | bytes | None = None + ) -> None: ... + +class Popen(Generic[AnyStr]): + args: _CMD + stdin: IO[AnyStr] | None + stdout: IO[AnyStr] | None + stderr: IO[AnyStr] | None + pid: int + returncode: int | MaybeNone + universal_newlines: bool + + if sys.version_info >= (3, 11): + # process_group is added in 3.11 + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + text: bool | None = None, + encoding: str, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> None: ... + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + text: bool | None = None, + encoding: str | None = None, + errors: str, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> None: ... + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + *, + universal_newlines: Literal[True], + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + # where the *real* keyword only args start + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> None: ... + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + text: Literal[True], + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> None: ... + @overload + def __init__( + self: Popen[bytes], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False] | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + text: Literal[False] | None = None, + encoding: None = None, + errors: None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> None: ... + @overload + def __init__( + self: Popen[Any], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> None: ... + elif sys.version_info >= (3, 10): + # pipesize is added in 3.10 + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + text: bool | None = None, + encoding: str, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> None: ... + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + text: bool | None = None, + encoding: str | None = None, + errors: str, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> None: ... + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + *, + universal_newlines: Literal[True], + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + # where the *real* keyword only args start + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> None: ... + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + text: Literal[True], + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> None: ... + @overload + def __init__( + self: Popen[bytes], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False] | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + text: Literal[False] | None = None, + encoding: None = None, + errors: None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> None: ... + @overload + def __init__( + self: Popen[Any], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> None: ... + else: + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + text: bool | None = None, + encoding: str, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> None: ... + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + text: bool | None = None, + encoding: str | None = None, + errors: str, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> None: ... + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + *, + universal_newlines: Literal[True], + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + # where the *real* keyword only args start + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> None: ... + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + text: Literal[True], + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> None: ... + @overload + def __init__( + self: Popen[bytes], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False] | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + text: Literal[False] | None = None, + encoding: None = None, + errors: None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> None: ... + @overload + def __init__( + self: Popen[Any], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = (), + *, + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> None: ... + + def poll(self) -> int | None: ... + def wait(self, timeout: float | None = None) -> int: ... + # morally the members of the returned tuple should be optional + # TODO: this should allow ReadableBuffer for Popen[bytes], but adding + # overloads for that runs into a mypy bug (python/mypy#14070). + def communicate(self, input: AnyStr | None = None, timeout: float | None = None) -> tuple[AnyStr, AnyStr]: ... + def send_signal(self, sig: int) -> None: ... + def terminate(self) -> None: ... + def kill(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __del__(self) -> None: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +# The result really is always a str. +if sys.version_info >= (3, 11): + def getstatusoutput(cmd: _CMD, *, encoding: str | None = None, errors: str | None = None) -> tuple[int, str]: ... + def getoutput(cmd: _CMD, *, encoding: str | None = None, errors: str | None = None) -> str: ... + +else: + def getstatusoutput(cmd: _CMD) -> tuple[int, str]: ... + def getoutput(cmd: _CMD) -> str: ... + +def list2cmdline(seq: Iterable[StrOrBytesPath]) -> str: ... # undocumented + +if sys.platform == "win32": + if sys.version_info >= (3, 13): + from _winapi import STARTF_FORCEOFFFEEDBACK, STARTF_FORCEONFEEDBACK + + __all__ += ["STARTF_FORCEOFFFEEDBACK", "STARTF_FORCEONFEEDBACK"] + + class STARTUPINFO: + def __init__( + self, + *, + dwFlags: int = 0, + hStdInput: Any | None = None, + hStdOutput: Any | None = None, + hStdError: Any | None = None, + wShowWindow: int = 0, + lpAttributeList: Mapping[str, Any] | None = None, + ) -> None: ... + dwFlags: int + hStdInput: Any | None + hStdOutput: Any | None + hStdError: Any | None + wShowWindow: int + lpAttributeList: Mapping[str, Any] + def copy(self) -> STARTUPINFO: ... + + from _winapi import ( + ABOVE_NORMAL_PRIORITY_CLASS as ABOVE_NORMAL_PRIORITY_CLASS, + BELOW_NORMAL_PRIORITY_CLASS as BELOW_NORMAL_PRIORITY_CLASS, + CREATE_BREAKAWAY_FROM_JOB as CREATE_BREAKAWAY_FROM_JOB, + CREATE_DEFAULT_ERROR_MODE as CREATE_DEFAULT_ERROR_MODE, + CREATE_NEW_CONSOLE as CREATE_NEW_CONSOLE, + CREATE_NEW_PROCESS_GROUP as CREATE_NEW_PROCESS_GROUP, + CREATE_NO_WINDOW as CREATE_NO_WINDOW, + DETACHED_PROCESS as DETACHED_PROCESS, + HIGH_PRIORITY_CLASS as HIGH_PRIORITY_CLASS, + IDLE_PRIORITY_CLASS as IDLE_PRIORITY_CLASS, + NORMAL_PRIORITY_CLASS as NORMAL_PRIORITY_CLASS, + REALTIME_PRIORITY_CLASS as REALTIME_PRIORITY_CLASS, + STARTF_USESHOWWINDOW as STARTF_USESHOWWINDOW, + STARTF_USESTDHANDLES as STARTF_USESTDHANDLES, + STD_ERROR_HANDLE as STD_ERROR_HANDLE, + STD_INPUT_HANDLE as STD_INPUT_HANDLE, + STD_OUTPUT_HANDLE as STD_OUTPUT_HANDLE, + SW_HIDE as SW_HIDE, + ) diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sunau.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sunau.pyi new file mode 100644 index 0000000..f83a0a4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sunau.pyi @@ -0,0 +1,82 @@ +from _typeshed import Unused +from typing import IO, Any, Final, Literal, NamedTuple, NoReturn, overload +from typing_extensions import Self, TypeAlias + +_File: TypeAlias = str | IO[bytes] + +class Error(Exception): ... + +AUDIO_FILE_MAGIC: Final = 0x2E736E64 +AUDIO_FILE_ENCODING_MULAW_8: Final = 1 +AUDIO_FILE_ENCODING_LINEAR_8: Final = 2 +AUDIO_FILE_ENCODING_LINEAR_16: Final = 3 +AUDIO_FILE_ENCODING_LINEAR_24: Final = 4 +AUDIO_FILE_ENCODING_LINEAR_32: Final = 5 +AUDIO_FILE_ENCODING_FLOAT: Final = 6 +AUDIO_FILE_ENCODING_DOUBLE: Final = 7 +AUDIO_FILE_ENCODING_ADPCM_G721: Final = 23 +AUDIO_FILE_ENCODING_ADPCM_G722: Final = 24 +AUDIO_FILE_ENCODING_ADPCM_G723_3: Final = 25 +AUDIO_FILE_ENCODING_ADPCM_G723_5: Final = 26 +AUDIO_FILE_ENCODING_ALAW_8: Final = 27 +AUDIO_UNKNOWN_SIZE: Final = 0xFFFFFFFF + +class _sunau_params(NamedTuple): + nchannels: int + sampwidth: int + framerate: int + nframes: int + comptype: str + compname: str + +class Au_read: + def __init__(self, f: _File) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + def __del__(self) -> None: ... + def getfp(self) -> IO[bytes] | None: ... + def rewind(self) -> None: ... + def close(self) -> None: ... + def tell(self) -> int: ... + def getnchannels(self) -> int: ... + def getnframes(self) -> int: ... + def getsampwidth(self) -> int: ... + def getframerate(self) -> int: ... + def getcomptype(self) -> str: ... + def getcompname(self) -> str: ... + def getparams(self) -> _sunau_params: ... + def getmarkers(self) -> None: ... + def getmark(self, id: Any) -> NoReturn: ... + def setpos(self, pos: int) -> None: ... + def readframes(self, nframes: int) -> bytes | None: ... + +class Au_write: + def __init__(self, f: _File) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + def __del__(self) -> None: ... + def setnchannels(self, nchannels: int) -> None: ... + def getnchannels(self) -> int: ... + def setsampwidth(self, sampwidth: int) -> None: ... + def getsampwidth(self) -> int: ... + def setframerate(self, framerate: float) -> None: ... + def getframerate(self) -> int: ... + def setnframes(self, nframes: int) -> None: ... + def getnframes(self) -> int: ... + def setcomptype(self, type: str, name: str) -> None: ... + def getcomptype(self) -> str: ... + def getcompname(self) -> str: ... + def setparams(self, params: _sunau_params) -> None: ... + def getparams(self) -> _sunau_params: ... + def tell(self) -> int: ... + # should be any bytes-like object after 3.4, but we don't have a type for that + def writeframesraw(self, data: bytes) -> None: ... + def writeframes(self, data: bytes) -> None: ... + def close(self) -> None: ... + +@overload +def open(f: _File, mode: Literal["r", "rb"]) -> Au_read: ... +@overload +def open(f: _File, mode: Literal["w", "wb"]) -> Au_write: ... +@overload +def open(f: _File, mode: str | None = None) -> Any: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/symbol.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/symbol.pyi new file mode 100644 index 0000000..5344ce5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/symbol.pyi @@ -0,0 +1,95 @@ +from typing import Final + +single_input: Final[int] +file_input: Final[int] +eval_input: Final[int] +decorator: Final[int] +decorators: Final[int] +decorated: Final[int] +async_funcdef: Final[int] +funcdef: Final[int] +parameters: Final[int] +typedargslist: Final[int] +tfpdef: Final[int] +varargslist: Final[int] +vfpdef: Final[int] +stmt: Final[int] +simple_stmt: Final[int] +small_stmt: Final[int] +expr_stmt: Final[int] +annassign: Final[int] +testlist_star_expr: Final[int] +augassign: Final[int] +del_stmt: Final[int] +pass_stmt: Final[int] +flow_stmt: Final[int] +break_stmt: Final[int] +continue_stmt: Final[int] +return_stmt: Final[int] +yield_stmt: Final[int] +raise_stmt: Final[int] +import_stmt: Final[int] +import_name: Final[int] +import_from: Final[int] +import_as_name: Final[int] +dotted_as_name: Final[int] +import_as_names: Final[int] +dotted_as_names: Final[int] +dotted_name: Final[int] +global_stmt: Final[int] +nonlocal_stmt: Final[int] +assert_stmt: Final[int] +compound_stmt: Final[int] +async_stmt: Final[int] +if_stmt: Final[int] +while_stmt: Final[int] +for_stmt: Final[int] +try_stmt: Final[int] +with_stmt: Final[int] +with_item: Final[int] +except_clause: Final[int] +suite: Final[int] +test: Final[int] +test_nocond: Final[int] +lambdef: Final[int] +lambdef_nocond: Final[int] +or_test: Final[int] +and_test: Final[int] +not_test: Final[int] +comparison: Final[int] +comp_op: Final[int] +star_expr: Final[int] +expr: Final[int] +xor_expr: Final[int] +and_expr: Final[int] +shift_expr: Final[int] +arith_expr: Final[int] +term: Final[int] +factor: Final[int] +power: Final[int] +atom_expr: Final[int] +atom: Final[int] +testlist_comp: Final[int] +trailer: Final[int] +subscriptlist: Final[int] +subscript: Final[int] +sliceop: Final[int] +exprlist: Final[int] +testlist: Final[int] +dictorsetmaker: Final[int] +classdef: Final[int] +arglist: Final[int] +argument: Final[int] +comp_iter: Final[int] +comp_for: Final[int] +comp_if: Final[int] +encoding_decl: Final[int] +yield_expr: Final[int] +yield_arg: Final[int] +sync_comp_for: Final[int] +func_body_suite: Final[int] +func_type: Final[int] +func_type_input: Final[int] +namedexpr_test: Final[int] +typelist: Final[int] +sym_name: Final[dict[int, str]] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/symtable.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/symtable.pyi new file mode 100644 index 0000000..a727b87 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/symtable.pyi @@ -0,0 +1,89 @@ +import sys +from _collections_abc import dict_keys +from collections.abc import Sequence +from typing import Any +from typing_extensions import deprecated + +__all__ = ["symtable", "SymbolTable", "Class", "Function", "Symbol"] + +if sys.version_info >= (3, 13): + __all__ += ["SymbolTableType"] + +def symtable(code: str, filename: str, compile_type: str) -> SymbolTable: ... + +if sys.version_info >= (3, 13): + from enum import StrEnum + + class SymbolTableType(StrEnum): + MODULE = "module" + FUNCTION = "function" + CLASS = "class" + ANNOTATION = "annotation" + TYPE_ALIAS = "type alias" + TYPE_PARAMETERS = "type parameters" + TYPE_VARIABLE = "type variable" + +class SymbolTable: + def __init__(self, raw_table: Any, filename: str) -> None: ... + if sys.version_info >= (3, 13): + def get_type(self) -> SymbolTableType: ... + else: + def get_type(self) -> str: ... + + def get_id(self) -> int: ... + def get_name(self) -> str: ... + def get_lineno(self) -> int: ... + def is_optimized(self) -> bool: ... + def is_nested(self) -> bool: ... + def has_children(self) -> bool: ... + def get_identifiers(self) -> dict_keys[str, int]: ... + def lookup(self, name: str) -> Symbol: ... + def get_symbols(self) -> list[Symbol]: ... + def get_children(self) -> list[SymbolTable]: ... + +class Function(SymbolTable): + def get_parameters(self) -> tuple[str, ...]: ... + def get_locals(self) -> tuple[str, ...]: ... + def get_globals(self) -> tuple[str, ...]: ... + def get_frees(self) -> tuple[str, ...]: ... + def get_nonlocals(self) -> tuple[str, ...]: ... + +class Class(SymbolTable): + if sys.version_info >= (3, 14): + @deprecated("Deprecated since Python 3.14; will be removed in Python 3.16.") + def get_methods(self) -> tuple[str, ...]: ... + else: + def get_methods(self) -> tuple[str, ...]: ... + +class Symbol: + def __init__( + self, name: str, flags: int, namespaces: Sequence[SymbolTable] | None = None, *, module_scope: bool = False + ) -> None: ... + def is_nonlocal(self) -> bool: ... + def get_name(self) -> str: ... + def is_referenced(self) -> bool: ... + def is_parameter(self) -> bool: ... + if sys.version_info >= (3, 14): + def is_type_parameter(self) -> bool: ... + + def is_global(self) -> bool: ... + def is_declared_global(self) -> bool: ... + def is_local(self) -> bool: ... + def is_annotated(self) -> bool: ... + def is_free(self) -> bool: ... + if sys.version_info >= (3, 14): + def is_free_class(self) -> bool: ... + + def is_imported(self) -> bool: ... + def is_assigned(self) -> bool: ... + if sys.version_info >= (3, 14): + def is_comp_iter(self) -> bool: ... + def is_comp_cell(self) -> bool: ... + + def is_namespace(self) -> bool: ... + def get_namespaces(self) -> Sequence[SymbolTable]: ... + def get_namespace(self) -> SymbolTable: ... + +class SymbolTableFactory: + def new(self, table: Any, filename: str) -> SymbolTable: ... + def __call__(self, table: Any, filename: str) -> SymbolTable: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sys/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sys/__init__.pyi new file mode 100644 index 0000000..6abef85 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sys/__init__.pyi @@ -0,0 +1,520 @@ +import sys +from _typeshed import MaybeNone, OptExcInfo, ProfileFunction, StrOrBytesPath, TraceFunction, structseq +from _typeshed.importlib import MetaPathFinderProtocol, PathEntryFinderProtocol +from builtins import object as _object +from collections.abc import AsyncGenerator, Callable, Sequence +from io import TextIOWrapper +from types import FrameType, ModuleType, TracebackType +from typing import Any, Final, Literal, NoReturn, Protocol, TextIO, TypeVar, final, overload, type_check_only +from typing_extensions import LiteralString, TypeAlias, deprecated + +_T = TypeVar("_T") + +# see https://github.com/python/typeshed/issues/8513#issue-1333671093 for the rationale behind this alias +_ExitCode: TypeAlias = str | int | None + +# ----- sys variables ----- +if sys.platform != "win32": + abiflags: str +argv: list[str] +base_exec_prefix: str +base_prefix: str +byteorder: Literal["little", "big"] +builtin_module_names: Sequence[str] # actually a tuple of strings +copyright: str +if sys.platform == "win32": + dllhandle: int +dont_write_bytecode: bool +displayhook: Callable[[object], Any] +excepthook: Callable[[type[BaseException], BaseException, TracebackType | None], Any] +exec_prefix: str +executable: str +float_repr_style: Literal["short", "legacy"] +hexversion: int +last_type: type[BaseException] | None +last_value: BaseException | None +last_traceback: TracebackType | None +if sys.version_info >= (3, 12): + last_exc: BaseException # or undefined. +maxsize: int +maxunicode: int +meta_path: list[MetaPathFinderProtocol] +modules: dict[str, ModuleType] +if sys.version_info >= (3, 10): + orig_argv: list[str] +path: list[str] +path_hooks: list[Callable[[str], PathEntryFinderProtocol]] +path_importer_cache: dict[str, PathEntryFinderProtocol | None] +platform: LiteralString +platlibdir: str +prefix: str +pycache_prefix: str | None +ps1: object +ps2: object + +# TextIO is used instead of more specific types for the standard streams, +# since they are often monkeypatched at runtime. At startup, the objects +# are initialized to instances of TextIOWrapper, but can also be None under +# some circumstances. +# +# To use methods from TextIOWrapper, use an isinstance check to ensure that +# the streams have not been overridden: +# +# if isinstance(sys.stdout, io.TextIOWrapper): +# sys.stdout.reconfigure(...) +stdin: TextIO | MaybeNone +stdout: TextIO | MaybeNone +stderr: TextIO | MaybeNone + +if sys.version_info >= (3, 10): + stdlib_module_names: frozenset[str] + +__stdin__: Final[TextIOWrapper | None] # Contains the original value of stdin +__stdout__: Final[TextIOWrapper | None] # Contains the original value of stdout +__stderr__: Final[TextIOWrapper | None] # Contains the original value of stderr +tracebacklimit: int | None +version: str +api_version: int +warnoptions: Any +# Each entry is a tuple of the form (action, message, category, module, +# lineno) +if sys.platform == "win32": + winver: str +_xoptions: dict[Any, Any] + +# Type alias used as a mixin for structseq classes that cannot be instantiated at runtime +# This can't be represented in the type system, so we just use `structseq[Any]` +_UninstantiableStructseq: TypeAlias = structseq[Any] + +flags: _flags + +# This class is not exposed at runtime. It calls itself sys.flags. +# As a tuple, it can have a length between 15 and 18. We don't model +# the exact length here because that varies by patch version due to +# the backported security fix int_max_str_digits. The exact length shouldn't +# be relied upon. See #13031 +# This can be re-visited when typeshed drops support for 3.10, +# at which point all supported versions will include int_max_str_digits +# in all patch versions. +# 3.9 is 15 or 16-tuple +# 3.10 is 16 or 17-tuple +# 3.11+ is an 18-tuple. +@final +@type_check_only +class _flags(_UninstantiableStructseq, tuple[int, ...]): + # `safe_path` was added in py311 + if sys.version_info >= (3, 11): + __match_args__: Final = ( + "debug", + "inspect", + "interactive", + "optimize", + "dont_write_bytecode", + "no_user_site", + "no_site", + "ignore_environment", + "verbose", + "bytes_warning", + "quiet", + "hash_randomization", + "isolated", + "dev_mode", + "utf8_mode", + "warn_default_encoding", + "safe_path", + "int_max_str_digits", + ) + elif sys.version_info >= (3, 10): + __match_args__: Final = ( + "debug", + "inspect", + "interactive", + "optimize", + "dont_write_bytecode", + "no_user_site", + "no_site", + "ignore_environment", + "verbose", + "bytes_warning", + "quiet", + "hash_randomization", + "isolated", + "dev_mode", + "utf8_mode", + "warn_default_encoding", + "int_max_str_digits", + ) + + @property + def debug(self) -> int: ... + @property + def inspect(self) -> int: ... + @property + def interactive(self) -> int: ... + @property + def optimize(self) -> int: ... + @property + def dont_write_bytecode(self) -> int: ... + @property + def no_user_site(self) -> int: ... + @property + def no_site(self) -> int: ... + @property + def ignore_environment(self) -> int: ... + @property + def verbose(self) -> int: ... + @property + def bytes_warning(self) -> int: ... + @property + def quiet(self) -> int: ... + @property + def hash_randomization(self) -> int: ... + @property + def isolated(self) -> int: ... + @property + def dev_mode(self) -> bool: ... + @property + def utf8_mode(self) -> int: ... + if sys.version_info >= (3, 10): + @property + def warn_default_encoding(self) -> int: ... + if sys.version_info >= (3, 11): + @property + def safe_path(self) -> bool: ... + if sys.version_info >= (3, 13): + @property + def gil(self) -> Literal[0, 1]: ... + if sys.version_info >= (3, 14): + @property + def thread_inherit_context(self) -> Literal[0, 1]: ... + @property + def context_aware_warnings(self) -> Literal[0, 1]: ... + # Whether or not this exists on lower versions of Python + # may depend on which patch release you're using + # (it was backported to all Python versions on 3.8+ as a security fix) + # Added in: 3.9.14, 3.10.7 + # and present in all versions of 3.11 and later. + @property + def int_max_str_digits(self) -> int: ... + +float_info: _float_info + +# This class is not exposed at runtime. It calls itself sys.float_info. +@final +@type_check_only +class _float_info(structseq[float], tuple[float, int, int, float, int, int, int, int, float, int, int]): + if sys.version_info >= (3, 10): + __match_args__: Final = ( + "max", + "max_exp", + "max_10_exp", + "min", + "min_exp", + "min_10_exp", + "dig", + "mant_dig", + "epsilon", + "radix", + "rounds", + ) + + @property + def max(self) -> float: ... # DBL_MAX + @property + def max_exp(self) -> int: ... # DBL_MAX_EXP + @property + def max_10_exp(self) -> int: ... # DBL_MAX_10_EXP + @property + def min(self) -> float: ... # DBL_MIN + @property + def min_exp(self) -> int: ... # DBL_MIN_EXP + @property + def min_10_exp(self) -> int: ... # DBL_MIN_10_EXP + @property + def dig(self) -> int: ... # DBL_DIG + @property + def mant_dig(self) -> int: ... # DBL_MANT_DIG + @property + def epsilon(self) -> float: ... # DBL_EPSILON + @property + def radix(self) -> int: ... # FLT_RADIX + @property + def rounds(self) -> int: ... # FLT_ROUNDS + +hash_info: _hash_info + +# This class is not exposed at runtime. It calls itself sys.hash_info. +@final +@type_check_only +class _hash_info(structseq[Any | int], tuple[int, int, int, int, int, str, int, int, int]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("width", "modulus", "inf", "nan", "imag", "algorithm", "hash_bits", "seed_bits", "cutoff") + + @property + def width(self) -> int: ... + @property + def modulus(self) -> int: ... + @property + def inf(self) -> int: ... + @property + def nan(self) -> int: ... + @property + def imag(self) -> int: ... + @property + def algorithm(self) -> str: ... + @property + def hash_bits(self) -> int: ... + @property + def seed_bits(self) -> int: ... + @property + def cutoff(self) -> int: ... # undocumented + +implementation: _implementation + +# This class isn't really a thing. At runtime, implementation is an instance +# of types.SimpleNamespace. This allows for better typing. +@type_check_only +class _implementation: + name: str + version: _version_info + hexversion: int + cache_tag: str + # Define __getattr__, as the documentation states: + # > sys.implementation may contain additional attributes specific to the Python implementation. + # > These non-standard attributes must start with an underscore, and are not described here. + def __getattr__(self, name: str) -> Any: ... + +int_info: _int_info + +# This class is not exposed at runtime. It calls itself sys.int_info. +@final +@type_check_only +class _int_info(structseq[int], tuple[int, int, int, int]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("bits_per_digit", "sizeof_digit", "default_max_str_digits", "str_digits_check_threshold") + + @property + def bits_per_digit(self) -> int: ... + @property + def sizeof_digit(self) -> int: ... + @property + def default_max_str_digits(self) -> int: ... + @property + def str_digits_check_threshold(self) -> int: ... + +_ThreadInfoName: TypeAlias = Literal["nt", "pthread", "pthread-stubs", "solaris"] +_ThreadInfoLock: TypeAlias = Literal["semaphore", "mutex+cond"] | None + +# This class is not exposed at runtime. It calls itself sys.thread_info. +@final +@type_check_only +class _thread_info(_UninstantiableStructseq, tuple[_ThreadInfoName, _ThreadInfoLock, str | None]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("name", "lock", "version") + + @property + def name(self) -> _ThreadInfoName: ... + @property + def lock(self) -> _ThreadInfoLock: ... + @property + def version(self) -> str | None: ... + +thread_info: _thread_info +_ReleaseLevel: TypeAlias = Literal["alpha", "beta", "candidate", "final"] + +# This class is not exposed at runtime. It calls itself sys.version_info. +@final +@type_check_only +class _version_info(_UninstantiableStructseq, tuple[int, int, int, _ReleaseLevel, int]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("major", "minor", "micro", "releaselevel", "serial") + + @property + def major(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def micro(self) -> int: ... + @property + def releaselevel(self) -> _ReleaseLevel: ... + @property + def serial(self) -> int: ... + +version_info: _version_info + +def call_tracing(func: Callable[..., _T], args: Any, /) -> _T: ... + +if sys.version_info >= (3, 13): + @deprecated("Deprecated since Python 3.13. Use `_clear_internal_caches()` instead.") + def _clear_type_cache() -> None: ... + +else: + def _clear_type_cache() -> None: ... + +def _current_frames() -> dict[int, FrameType]: ... +def _getframe(depth: int = 0, /) -> FrameType: ... + +# documented -- see https://docs.python.org/3/library/sys.html#sys._current_exceptions +if sys.version_info >= (3, 12): + def _current_exceptions() -> dict[int, BaseException | None]: ... + +else: + def _current_exceptions() -> dict[int, OptExcInfo]: ... + +if sys.version_info >= (3, 12): + def _getframemodulename(depth: int = 0) -> str | None: ... + +def _debugmallocstats() -> None: ... +def __displayhook__(object: object, /) -> None: ... +def __excepthook__(exctype: type[BaseException], value: BaseException, traceback: TracebackType | None, /) -> None: ... +def exc_info() -> OptExcInfo: ... + +if sys.version_info >= (3, 11): + def exception() -> BaseException | None: ... + +def exit(status: _ExitCode = None, /) -> NoReturn: ... + +if sys.platform == "android": # noqa: Y008 + def getandroidapilevel() -> int: ... + +def getallocatedblocks() -> int: ... +def getdefaultencoding() -> Literal["utf-8"]: ... + +if sys.platform != "win32": + def getdlopenflags() -> int: ... + +def getfilesystemencoding() -> LiteralString: ... +def getfilesystemencodeerrors() -> LiteralString: ... +def getrefcount(object: Any, /) -> int: ... +def getrecursionlimit() -> int: ... +def getsizeof(obj: object, default: int = ...) -> int: ... +def getswitchinterval() -> float: ... +def getprofile() -> ProfileFunction | None: ... +def setprofile(function: ProfileFunction | None, /) -> None: ... +def gettrace() -> TraceFunction | None: ... +def settrace(function: TraceFunction | None, /) -> None: ... + +if sys.platform == "win32": + # A tuple of length 5, even though it has more than 5 attributes. + @final + @type_check_only + class _WinVersion(_UninstantiableStructseq, tuple[int, int, int, int, str]): + @property + def major(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def build(self) -> int: ... + @property + def platform(self) -> int: ... + @property + def service_pack(self) -> str: ... + @property + def service_pack_minor(self) -> int: ... + @property + def service_pack_major(self) -> int: ... + @property + def suite_mask(self) -> int: ... + @property + def product_type(self) -> int: ... + @property + def platform_version(self) -> tuple[int, int, int]: ... + + def getwindowsversion() -> _WinVersion: ... + +@overload +def intern(string: LiteralString, /) -> LiteralString: ... +@overload +def intern(string: str, /) -> str: ... # type: ignore[misc] + +__interactivehook__: Callable[[], object] + +if sys.version_info >= (3, 13): + def _is_gil_enabled() -> bool: ... + def _clear_internal_caches() -> None: ... + def _is_interned(string: str, /) -> bool: ... + +def is_finalizing() -> bool: ... +def breakpointhook(*args: Any, **kwargs: Any) -> Any: ... + +__breakpointhook__ = breakpointhook # Contains the original value of breakpointhook + +if sys.platform != "win32": + def setdlopenflags(flags: int, /) -> None: ... + +def setrecursionlimit(limit: int, /) -> None: ... +def setswitchinterval(interval: float, /) -> None: ... +def gettotalrefcount() -> int: ... # Debug builds only + +# Doesn't exist at runtime, but exported in the stubs so pytest etc. can annotate their code more easily. +@type_check_only +class UnraisableHookArgs(Protocol): + exc_type: type[BaseException] + exc_value: BaseException | None + exc_traceback: TracebackType | None + err_msg: str | None + object: _object + +unraisablehook: Callable[[UnraisableHookArgs], Any] + +def __unraisablehook__(unraisable: UnraisableHookArgs, /) -> Any: ... +def addaudithook(hook: Callable[[str, tuple[Any, ...]], Any]) -> None: ... +def audit(event: str, /, *args: Any) -> None: ... + +_AsyncgenHook: TypeAlias = Callable[[AsyncGenerator[Any, Any]], None] | None + +# This class is not exposed at runtime. It calls itself builtins.asyncgen_hooks. +@final +@type_check_only +class _asyncgen_hooks(structseq[_AsyncgenHook], tuple[_AsyncgenHook, _AsyncgenHook]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("firstiter", "finalizer") + + @property + def firstiter(self) -> _AsyncgenHook: ... + @property + def finalizer(self) -> _AsyncgenHook: ... + +def get_asyncgen_hooks() -> _asyncgen_hooks: ... +def set_asyncgen_hooks(firstiter: _AsyncgenHook = ..., finalizer: _AsyncgenHook = ...) -> None: ... + +if sys.platform == "win32": + if sys.version_info >= (3, 13): + @deprecated( + "Deprecated since Python 3.13; will be removed in Python 3.16. " + "Use the `PYTHONLEGACYWINDOWSFSENCODING` environment variable instead." + ) + def _enablelegacywindowsfsencoding() -> None: ... + else: + def _enablelegacywindowsfsencoding() -> None: ... + +def get_coroutine_origin_tracking_depth() -> int: ... +def set_coroutine_origin_tracking_depth(depth: int) -> None: ... + +# The following two functions were added in 3.11.0, 3.10.7, and 3.9.14, +# as part of the response to CVE-2020-10735 +def set_int_max_str_digits(maxdigits: int) -> None: ... +def get_int_max_str_digits() -> int: ... + +if sys.version_info >= (3, 12): + if sys.version_info >= (3, 13): + def getunicodeinternedsize(*, _only_immortal: bool = False) -> int: ... + else: + def getunicodeinternedsize() -> int: ... + + def deactivate_stack_trampoline() -> None: ... + def is_stack_trampoline_active() -> bool: ... + # It always exists, but raises on non-linux platforms: + if sys.platform == "linux": + def activate_stack_trampoline(backend: str, /) -> None: ... + else: + def activate_stack_trampoline(backend: str, /) -> NoReturn: ... + + from . import _monitoring + + monitoring = _monitoring + +if sys.version_info >= (3, 14): + def is_remote_debug_enabled() -> bool: ... + def remote_exec(pid: int, script: StrOrBytesPath) -> None: ... + def _is_immortal(op: object, /) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sys/_monitoring.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sys/_monitoring.pyi new file mode 100644 index 0000000..db799e6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sys/_monitoring.pyi @@ -0,0 +1,68 @@ +# This py312+ module provides annotations for `sys.monitoring`. +# It's named `sys._monitoring` in typeshed, +# because trying to import `sys.monitoring` will fail at runtime! +# At runtime, `sys.monitoring` has the unique status +# of being a `types.ModuleType` instance that cannot be directly imported, +# and exists in the `sys`-module namespace despite `sys` not being a package. + +import sys +from collections.abc import Callable +from types import CodeType +from typing import Any, Final, type_check_only +from typing_extensions import deprecated + +DEBUGGER_ID: Final = 0 +COVERAGE_ID: Final = 1 +PROFILER_ID: Final = 2 +OPTIMIZER_ID: Final = 5 + +def use_tool_id(tool_id: int, name: str, /) -> None: ... + +if sys.version_info >= (3, 14): + def clear_tool_id(tool_id: int, /) -> None: ... + +def free_tool_id(tool_id: int, /) -> None: ... +def get_tool(tool_id: int, /) -> str | None: ... + +events: Final[_events] + +@type_check_only +class _events: + CALL: Final[int] + C_RAISE: Final[int] + C_RETURN: Final[int] + EXCEPTION_HANDLED: Final[int] + INSTRUCTION: Final[int] + JUMP: Final[int] + LINE: Final[int] + NO_EVENTS: Final[int] + PY_RESUME: Final[int] + PY_RETURN: Final[int] + PY_START: Final[int] + PY_THROW: Final[int] + PY_UNWIND: Final[int] + PY_YIELD: Final[int] + RAISE: Final[int] + RERAISE: Final[int] + STOP_ITERATION: Final[int] + if sys.version_info >= (3, 14): + BRANCH_LEFT: Final[int] + BRANCH_RIGHT: Final[int] + + @property + @deprecated("Deprecated since Python 3.14. Use `BRANCH_LEFT` or `BRANCH_RIGHT` instead.") + def BRANCH(self) -> int: ... + + else: + BRANCH: Final[int] + +def get_events(tool_id: int, /) -> int: ... +def set_events(tool_id: int, event_set: int, /) -> None: ... +def get_local_events(tool_id: int, code: CodeType, /) -> int: ... +def set_local_events(tool_id: int, code: CodeType, event_set: int, /) -> int: ... +def restart_events() -> None: ... + +DISABLE: Final[object] +MISSING: Final[object] + +def register_callback(tool_id: int, event: int, func: Callable[..., Any] | None, /) -> Callable[..., Any] | None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sysconfig.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sysconfig.pyi new file mode 100644 index 0000000..c641922 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/sysconfig.pyi @@ -0,0 +1,50 @@ +import sys +from typing import IO, Any, Literal, overload +from typing_extensions import LiteralString, deprecated + +__all__ = [ + "get_config_h_filename", + "get_config_var", + "get_config_vars", + "get_makefile_filename", + "get_path", + "get_path_names", + "get_paths", + "get_platform", + "get_python_version", + "get_scheme_names", + "parse_config_h", +] + +@overload +@deprecated("SO is deprecated, use EXT_SUFFIX. Support is removed in Python 3.11") +def get_config_var(name: Literal["SO"]) -> Any: ... +@overload +def get_config_var(name: str) -> Any: ... +@overload +def get_config_vars() -> dict[str, Any]: ... +@overload +def get_config_vars(arg: str, /, *args: str) -> list[Any]: ... +def get_scheme_names() -> tuple[str, ...]: ... + +if sys.version_info >= (3, 10): + def get_default_scheme() -> LiteralString: ... + def get_preferred_scheme(key: Literal["prefix", "home", "user"]) -> LiteralString: ... + # Documented -- see https://docs.python.org/3/library/sysconfig.html#sysconfig._get_preferred_schemes + def _get_preferred_schemes() -> dict[Literal["prefix", "home", "user"], LiteralString]: ... + +def get_path_names() -> tuple[str, ...]: ... +def get_path(name: str, scheme: str = ..., vars: dict[str, Any] | None = None, expand: bool = True) -> str: ... +def get_paths(scheme: str = ..., vars: dict[str, Any] | None = None, expand: bool = True) -> dict[str, str]: ... +def get_python_version() -> str: ... +def get_platform() -> str: ... + +if sys.version_info >= (3, 11): + def is_python_build(check_home: object = None) -> bool: ... + +else: + def is_python_build(check_home: bool = False) -> bool: ... + +def parse_config_h(fp: IO[Any], vars: dict[str, Any] | None = None) -> dict[str, Any]: ... +def get_config_h_filename() -> str: ... +def get_makefile_filename() -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/syslog.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/syslog.pyi new file mode 100644 index 0000000..1e0d0d3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/syslog.pyi @@ -0,0 +1,57 @@ +import sys +from typing import Final, overload + +if sys.platform != "win32": + LOG_ALERT: Final = 1 + LOG_AUTH: Final = 32 + LOG_AUTHPRIV: Final = 80 + LOG_CONS: Final = 2 + LOG_CRIT: Final = 2 + LOG_CRON: Final = 72 + LOG_DAEMON: Final = 24 + LOG_DEBUG: Final = 7 + LOG_EMERG: Final = 0 + LOG_ERR: Final = 3 + LOG_INFO: Final = 6 + LOG_KERN: Final = 0 + LOG_LOCAL0: Final = 128 + LOG_LOCAL1: Final = 136 + LOG_LOCAL2: Final = 144 + LOG_LOCAL3: Final = 152 + LOG_LOCAL4: Final = 160 + LOG_LOCAL5: Final = 168 + LOG_LOCAL6: Final = 176 + LOG_LOCAL7: Final = 184 + LOG_LPR: Final = 48 + LOG_MAIL: Final = 16 + LOG_NDELAY: Final = 8 + LOG_NEWS: Final = 56 + LOG_NOTICE: Final = 5 + LOG_NOWAIT: Final = 16 + LOG_ODELAY: Final = 4 + LOG_PERROR: Final = 32 + LOG_PID: Final = 1 + LOG_SYSLOG: Final = 40 + LOG_USER: Final = 8 + LOG_UUCP: Final = 64 + LOG_WARNING: Final = 4 + + if sys.version_info >= (3, 13): + LOG_FTP: Final = 88 + + if sys.platform == "darwin": + LOG_INSTALL: Final = 112 + LOG_LAUNCHD: Final = 192 + LOG_NETINFO: Final = 96 + LOG_RAS: Final = 120 + LOG_REMOTEAUTH: Final = 104 + + def LOG_MASK(pri: int, /) -> int: ... + def LOG_UPTO(pri: int, /) -> int: ... + def closelog() -> None: ... + def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: ... + def setlogmask(maskpri: int, /) -> int: ... + @overload + def syslog(priority: int, message: str) -> None: ... + @overload + def syslog(message: str) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tabnanny.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tabnanny.pyi new file mode 100644 index 0000000..8a8592f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tabnanny.pyi @@ -0,0 +1,16 @@ +from _typeshed import StrOrBytesPath +from collections.abc import Iterable + +__all__ = ["check", "NannyNag", "process_tokens"] + +verbose: int +filename_only: int + +class NannyNag(Exception): + def __init__(self, lineno: int, msg: str, line: str) -> None: ... + def get_lineno(self) -> int: ... + def get_msg(self) -> str: ... + def get_line(self) -> str: ... + +def check(file: StrOrBytesPath) -> None: ... +def process_tokens(tokens: Iterable[tuple[int, str, tuple[int, int], tuple[int, int], str]]) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tarfile.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tarfile.pyi new file mode 100644 index 0000000..f6623ea --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tarfile.pyi @@ -0,0 +1,839 @@ +import bz2 +import io +import sys +from _typeshed import ReadableBuffer, StrOrBytesPath, StrPath, SupportsRead, WriteableBuffer +from builtins import list as _list # aliases to avoid name clashes with fields named "type" or "list" +from collections.abc import Callable, Iterable, Iterator, Mapping +from gzip import _ReadableFileobj as _GzipReadableFileobj, _WritableFileobj as _GzipWritableFileobj +from types import TracebackType +from typing import IO, ClassVar, Final, Literal, Protocol, overload, type_check_only +from typing_extensions import Self, TypeAlias, deprecated + +if sys.version_info >= (3, 14): + from compression.zstd import ZstdDict + +__all__ = [ + "TarFile", + "TarInfo", + "is_tarfile", + "TarError", + "ReadError", + "CompressionError", + "StreamError", + "ExtractError", + "HeaderError", + "ENCODING", + "USTAR_FORMAT", + "GNU_FORMAT", + "PAX_FORMAT", + "DEFAULT_FORMAT", + "open", +] +if sys.version_info >= (3, 12): + __all__ += [ + "fully_trusted_filter", + "data_filter", + "tar_filter", + "FilterError", + "AbsoluteLinkError", + "OutsideDestinationError", + "SpecialFileError", + "AbsolutePathError", + "LinkOutsideDestinationError", + ] +if sys.version_info >= (3, 13): + __all__ += ["LinkFallbackError"] + +_FilterFunction: TypeAlias = Callable[[TarInfo, str], TarInfo | None] +_TarfileFilter: TypeAlias = Literal["fully_trusted", "tar", "data"] | _FilterFunction + +@type_check_only +class _Fileobj(Protocol): + def read(self, size: int, /) -> bytes: ... + def write(self, b: bytes, /) -> object: ... + def tell(self) -> int: ... + def seek(self, pos: int, /) -> object: ... + def close(self) -> object: ... + # Optional fields: + # name: str | bytes + # mode: Literal["rb", "r+b", "wb", "xb"] + +@type_check_only +class _Bz2ReadableFileobj(bz2._ReadableFileobj): + def close(self) -> object: ... + +@type_check_only +class _Bz2WritableFileobj(bz2._WritableFileobj): + def close(self) -> object: ... + +# tar constants +NUL: Final = b"\0" +BLOCKSIZE: Final = 512 +RECORDSIZE: Final = 10240 +GNU_MAGIC: Final = b"ustar \0" +POSIX_MAGIC: Final = b"ustar\x0000" + +LENGTH_NAME: Final = 100 +LENGTH_LINK: Final = 100 +LENGTH_PREFIX: Final = 155 + +REGTYPE: Final = b"0" +AREGTYPE: Final = b"\0" +LNKTYPE: Final = b"1" +SYMTYPE: Final = b"2" +CHRTYPE: Final = b"3" +BLKTYPE: Final = b"4" +DIRTYPE: Final = b"5" +FIFOTYPE: Final = b"6" +CONTTYPE: Final = b"7" + +GNUTYPE_LONGNAME: Final = b"L" +GNUTYPE_LONGLINK: Final = b"K" +GNUTYPE_SPARSE: Final = b"S" + +XHDTYPE: Final = b"x" +XGLTYPE: Final = b"g" +SOLARIS_XHDTYPE: Final = b"X" + +_TarFormat: TypeAlias = Literal[0, 1, 2] # does not exist at runtime +USTAR_FORMAT: Final = 0 +GNU_FORMAT: Final = 1 +PAX_FORMAT: Final = 2 +DEFAULT_FORMAT: Final = PAX_FORMAT + +# tarfile constants + +SUPPORTED_TYPES: Final[tuple[bytes, ...]] +REGULAR_TYPES: Final[tuple[bytes, ...]] +GNU_TYPES: Final[tuple[bytes, ...]] +PAX_FIELDS: Final[tuple[str, ...]] +PAX_NUMBER_FIELDS: Final[dict[str, type]] +PAX_NAME_FIELDS: Final[set[str]] + +ENCODING: Final[str] + +class ExFileObject(io.BufferedReader): # undocumented + def __init__(self, tarfile: TarFile, tarinfo: TarInfo) -> None: ... + +class TarFile: + OPEN_METH: ClassVar[Mapping[str, str]] + name: StrOrBytesPath | None + mode: Literal["r", "a", "w", "x"] + fileobj: _Fileobj | None + format: _TarFormat | None + tarinfo: type[TarInfo] + dereference: bool | None + ignore_zeros: bool | None + encoding: str | None + errors: str + fileobject: type[ExFileObject] # undocumented + pax_headers: Mapping[str, str] | None + debug: int | None + errorlevel: int | None + offset: int # undocumented + extraction_filter: _FilterFunction | None + if sys.version_info >= (3, 13): + stream: bool + def __init__( + self, + name: StrOrBytesPath | None = None, + mode: Literal["r", "a", "w", "x"] = "r", + fileobj: _Fileobj | None = None, + format: int | None = None, + tarinfo: type[TarInfo] | None = None, + dereference: bool | None = None, + ignore_zeros: bool | None = None, + encoding: str | None = None, + errors: str = "surrogateescape", + pax_headers: Mapping[str, str] | None = None, + debug: int | None = None, + errorlevel: int | None = None, + copybufsize: int | None = None, # undocumented + stream: bool = False, + ) -> None: ... + else: + def __init__( + self, + name: StrOrBytesPath | None = None, + mode: Literal["r", "a", "w", "x"] = "r", + fileobj: _Fileobj | None = None, + format: int | None = None, + tarinfo: type[TarInfo] | None = None, + dereference: bool | None = None, + ignore_zeros: bool | None = None, + encoding: str | None = None, + errors: str = "surrogateescape", + pax_headers: Mapping[str, str] | None = None, + debug: int | None = None, + errorlevel: int | None = None, + copybufsize: int | None = None, # undocumented + ) -> None: ... + + def __enter__(self) -> Self: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __iter__(self) -> Iterator[TarInfo]: ... + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | None = None, + mode: Literal["r", "r:*", "r:", "r:gz", "r:bz2", "r:xz"] = "r", + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + if sys.version_info >= (3, 14): + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | None, + mode: Literal["r:zst"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + level: None = None, + options: Mapping[int, int] | None = None, + zstd_dict: ZstdDict | None = None, + ) -> Self: ... + + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | None, + mode: Literal["x", "x:", "a", "a:", "w", "w:", "w:tar"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | None = None, + *, + mode: Literal["x", "x:", "a", "a:", "w", "w:", "w:tar"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | None, + mode: Literal["x:gz", "x:bz2", "w:gz", "w:bz2"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + compresslevel: int = 9, + ) -> Self: ... + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | None = None, + *, + mode: Literal["x:gz", "x:bz2", "w:gz", "w:bz2"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + compresslevel: int = 9, + ) -> Self: ... + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | None, + mode: Literal["x:xz", "w:xz"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + preset: Literal[0, 1, 2, 3, 4, 5, 6, 7, 8, 9] | None = ..., + ) -> Self: ... + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | None = None, + *, + mode: Literal["x:xz", "w:xz"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + preset: Literal[0, 1, 2, 3, 4, 5, 6, 7, 8, 9] | None = ..., + ) -> Self: ... + if sys.version_info >= (3, 14): + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | None, + mode: Literal["x:zst", "w:zst"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + options: Mapping[int, int] | None = None, + zstd_dict: ZstdDict | None = None, + ) -> Self: ... + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | None = None, + *, + mode: Literal["x:zst", "w:zst"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + options: Mapping[int, int] | None = None, + zstd_dict: ZstdDict | None = None, + ) -> Self: ... + + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | ReadableBuffer | None, + mode: Literal["r|*", "r|", "r|gz", "r|bz2", "r|xz", "r|zst"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | ReadableBuffer | None = None, + *, + mode: Literal["r|*", "r|", "r|gz", "r|bz2", "r|xz", "r|zst"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | WriteableBuffer | None, + mode: Literal["w|", "w|xz", "w|zst"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | WriteableBuffer | None = None, + *, + mode: Literal["w|", "w|xz", "w|zst"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | WriteableBuffer | None, + mode: Literal["w|gz", "w|bz2"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + compresslevel: int = 9, + ) -> Self: ... + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | WriteableBuffer | None = None, + *, + mode: Literal["w|gz", "w|bz2"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + compresslevel: int = 9, + ) -> Self: ... + @classmethod + def taropen( + cls, + name: StrOrBytesPath | None, + mode: Literal["r", "a", "w", "x"] = "r", + fileobj: _Fileobj | None = None, + *, + compresslevel: int = ..., + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @overload + @classmethod + def gzopen( + cls, + name: StrOrBytesPath | None, + mode: Literal["r"] = "r", + fileobj: _GzipReadableFileobj | None = None, + compresslevel: int = 9, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @overload + @classmethod + def gzopen( + cls, + name: StrOrBytesPath | None, + mode: Literal["w", "x"], + fileobj: _GzipWritableFileobj | None = None, + compresslevel: int = 9, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @overload + @classmethod + def bz2open( + cls, + name: StrOrBytesPath | None, + mode: Literal["w", "x"], + fileobj: _Bz2WritableFileobj | None = None, + compresslevel: int = 9, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @overload + @classmethod + def bz2open( + cls, + name: StrOrBytesPath | None, + mode: Literal["r"] = "r", + fileobj: _Bz2ReadableFileobj | None = None, + compresslevel: int = 9, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @classmethod + def xzopen( + cls, + name: StrOrBytesPath | None, + mode: Literal["r", "w", "x"] = "r", + fileobj: IO[bytes] | None = None, + preset: int | None = None, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + if sys.version_info >= (3, 14): + @overload + @classmethod + def zstopen( + cls, + name: StrOrBytesPath | None, + mode: Literal["r"] = "r", + fileobj: IO[bytes] | None = None, + level: None = None, + options: Mapping[int, int] | None = None, + zstd_dict: ZstdDict | None = None, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @overload + @classmethod + def zstopen( + cls, + name: StrOrBytesPath | None, + mode: Literal["w", "x"], + fileobj: IO[bytes] | None = None, + level: int | None = None, + options: Mapping[int, int] | None = None, + zstd_dict: ZstdDict | None = None, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + + def getmember(self, name: str) -> TarInfo: ... + def getmembers(self) -> _list[TarInfo]: ... + def getnames(self) -> _list[str]: ... + def list(self, verbose: bool = True, *, members: Iterable[TarInfo] | None = None) -> None: ... + def next(self) -> TarInfo | None: ... + # Calling this method without `filter` is deprecated, but it may be set either on the class or in an + # individual call, so we can't mark it as @deprecated here. + def extractall( + self, + path: StrOrBytesPath = ".", + members: Iterable[TarInfo] | None = None, + *, + numeric_owner: bool = False, + filter: _TarfileFilter | None = None, + ) -> None: ... + # Same situation as for `extractall`. + def extract( + self, + member: str | TarInfo, + path: StrOrBytesPath = "", + set_attrs: bool = True, + *, + numeric_owner: bool = False, + filter: _TarfileFilter | None = None, + ) -> None: ... + def _extract_member( + self, + tarinfo: TarInfo, + targetpath: str, + set_attrs: bool = True, + numeric_owner: bool = False, + *, + filter_function: _FilterFunction | None = None, + extraction_root: str | None = None, + ) -> None: ... # undocumented + def extractfile(self, member: str | TarInfo) -> IO[bytes] | None: ... + def makedir(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def makefile(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def makeunknown(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def makefifo(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def makedev(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def makelink(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def makelink_with_filter( + self, tarinfo: TarInfo, targetpath: StrOrBytesPath, filter_function: _FilterFunction, extraction_root: str + ) -> None: ... # undocumented + def chown(self, tarinfo: TarInfo, targetpath: StrOrBytesPath, numeric_owner: bool) -> None: ... # undocumented + def chmod(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def utime(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def add( + self, + name: StrPath, + arcname: StrPath | None = None, + recursive: bool = True, + *, + filter: Callable[[TarInfo], TarInfo | None] | None = None, + ) -> None: ... + def addfile(self, tarinfo: TarInfo, fileobj: SupportsRead[bytes] | None = None) -> None: ... + def gettarinfo( + self, name: StrOrBytesPath | None = None, arcname: str | None = None, fileobj: IO[bytes] | None = None + ) -> TarInfo: ... + def close(self) -> None: ... + +open = TarFile.open + +def is_tarfile(name: StrOrBytesPath | IO[bytes]) -> bool: ... + +class TarError(Exception): ... +class ReadError(TarError): ... +class CompressionError(TarError): ... +class StreamError(TarError): ... +class ExtractError(TarError): ... +class HeaderError(TarError): ... + +class FilterError(TarError): + # This attribute is only set directly on the subclasses, but the documentation guarantees + # that it is always present on FilterError. + tarinfo: TarInfo + +class AbsolutePathError(FilterError): + def __init__(self, tarinfo: TarInfo) -> None: ... + +class OutsideDestinationError(FilterError): + def __init__(self, tarinfo: TarInfo, path: str) -> None: ... + +class SpecialFileError(FilterError): + def __init__(self, tarinfo: TarInfo) -> None: ... + +class AbsoluteLinkError(FilterError): + def __init__(self, tarinfo: TarInfo) -> None: ... + +class LinkOutsideDestinationError(FilterError): + def __init__(self, tarinfo: TarInfo, path: str) -> None: ... + +class LinkFallbackError(FilterError): + def __init__(self, tarinfo: TarInfo, path: str) -> None: ... + +def fully_trusted_filter(member: TarInfo, dest_path: str) -> TarInfo: ... +def tar_filter(member: TarInfo, dest_path: str) -> TarInfo: ... +def data_filter(member: TarInfo, dest_path: str) -> TarInfo: ... + +class TarInfo: + __slots__ = ( + "name", + "mode", + "uid", + "gid", + "size", + "mtime", + "chksum", + "type", + "linkname", + "uname", + "gname", + "devmajor", + "devminor", + "offset", + "offset_data", + "pax_headers", + "sparse", + "_tarfile", + "_sparse_structs", + "_link_target", + ) + name: str + path: str + size: int + mtime: int | float + chksum: int + devmajor: int + devminor: int + offset: int + offset_data: int + sparse: bytes | None + mode: int + type: bytes # usually one of the TYPE constants, but could be an arbitrary byte + linkname: str + uid: int + gid: int + uname: str + gname: str + pax_headers: Mapping[str, str] + def __init__(self, name: str = "") -> None: ... + if sys.version_info >= (3, 13): + @property + @deprecated("Deprecated since Python 3.13; will be removed in Python 3.16.") + def tarfile(self) -> TarFile | None: ... + @tarfile.setter + @deprecated("Deprecated since Python 3.13; will be removed in Python 3.16.") + def tarfile(self, tarfile: TarFile | None) -> None: ... + else: + tarfile: TarFile | None + + @classmethod + def frombuf(cls, buf: bytes | bytearray, encoding: str, errors: str) -> Self: ... + @classmethod + def fromtarfile(cls, tarfile: TarFile) -> Self: ... + @property + def linkpath(self) -> str: ... + @linkpath.setter + def linkpath(self, linkname: str) -> None: ... + def replace( + self, + *, + name: str = ..., + mtime: float = ..., + mode: int = ..., + linkname: str = ..., + uid: int = ..., + gid: int = ..., + uname: str = ..., + gname: str = ..., + deep: bool = True, + ) -> Self: ... + def get_info(self) -> Mapping[str, str | int | bytes | Mapping[str, str]]: ... + def tobuf(self, format: _TarFormat | None = 2, encoding: str | None = "utf-8", errors: str = "surrogateescape") -> bytes: ... + def create_ustar_header( + self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str, errors: str + ) -> bytes: ... + def create_gnu_header( + self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str, errors: str + ) -> bytes: ... + def create_pax_header(self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str) -> bytes: ... + @classmethod + def create_pax_global_header(cls, pax_headers: Mapping[str, str]) -> bytes: ... + def isfile(self) -> bool: ... + def isreg(self) -> bool: ... + def issparse(self) -> bool: ... + def isdir(self) -> bool: ... + def issym(self) -> bool: ... + def islnk(self) -> bool: ... + def ischr(self) -> bool: ... + def isblk(self) -> bool: ... + def isfifo(self) -> bool: ... + def isdev(self) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/telnetlib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/telnetlib.pyi new file mode 100644 index 0000000..88aa43d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/telnetlib.pyi @@ -0,0 +1,123 @@ +import socket +from collections.abc import Callable, MutableSequence, Sequence +from re import Match, Pattern +from types import TracebackType +from typing import Any, Final +from typing_extensions import Self + +__all__ = ["Telnet"] + +DEBUGLEVEL: Final = 0 +TELNET_PORT: Final = 23 + +IAC: Final = b"\xff" +DONT: Final = b"\xfe" +DO: Final = b"\xfd" +WONT: Final = b"\xfc" +WILL: Final = b"\xfb" +theNULL: Final = b"\x00" + +SE: Final = b"\xf0" +NOP: Final = b"\xf1" +DM: Final = b"\xf2" +BRK: Final = b"\xf3" +IP: Final = b"\xf4" +AO: Final = b"\xf5" +AYT: Final = b"\xf6" +EC: Final = b"\xf7" +EL: Final = b"\xf8" +GA: Final = b"\xf9" +SB: Final = b"\xfa" + +BINARY: Final = b"\x00" +ECHO: Final = b"\x01" +RCP: Final = b"\x02" +SGA: Final = b"\x03" +NAMS: Final = b"\x04" +STATUS: Final = b"\x05" +TM: Final = b"\x06" +RCTE: Final = b"\x07" +NAOL: Final = b"\x08" +NAOP: Final = b"\t" +NAOCRD: Final = b"\n" +NAOHTS: Final = b"\x0b" +NAOHTD: Final = b"\x0c" +NAOFFD: Final = b"\r" +NAOVTS: Final = b"\x0e" +NAOVTD: Final = b"\x0f" +NAOLFD: Final = b"\x10" +XASCII: Final = b"\x11" +LOGOUT: Final = b"\x12" +BM: Final = b"\x13" +DET: Final = b"\x14" +SUPDUP: Final = b"\x15" +SUPDUPOUTPUT: Final = b"\x16" +SNDLOC: Final = b"\x17" +TTYPE: Final = b"\x18" +EOR: Final = b"\x19" +TUID: Final = b"\x1a" +OUTMRK: Final = b"\x1b" +TTYLOC: Final = b"\x1c" +VT3270REGIME: Final = b"\x1d" +X3PAD: Final = b"\x1e" +NAWS: Final = b"\x1f" +TSPEED: Final = b" " +LFLOW: Final = b"!" +LINEMODE: Final = b'"' +XDISPLOC: Final = b"#" +OLD_ENVIRON: Final = b"$" +AUTHENTICATION: Final = b"%" +ENCRYPT: Final = b"&" +NEW_ENVIRON: Final = b"'" + +TN3270E: Final = b"(" +XAUTH: Final = b")" +CHARSET: Final = b"*" +RSP: Final = b"+" +COM_PORT_OPTION: Final = b"," +SUPPRESS_LOCAL_ECHO: Final = b"-" +TLS: Final = b"." +KERMIT: Final = b"/" +SEND_URL: Final = b"0" +FORWARD_X: Final = b"1" +PRAGMA_LOGON: Final = b"\x8a" +SSPI_LOGON: Final = b"\x8b" +PRAGMA_HEARTBEAT: Final = b"\x8c" +EXOPL: Final = b"\xff" +NOOPT: Final = b"\x00" + +class Telnet: + host: str | None # undocumented + sock: socket.socket | None # undocumented + def __init__(self, host: str | None = None, port: int = 0, timeout: float = ...) -> None: ... + def open(self, host: str, port: int = 0, timeout: float = ...) -> None: ... + def msg(self, msg: str, *args: Any) -> None: ... + def set_debuglevel(self, debuglevel: int) -> None: ... + def close(self) -> None: ... + def get_socket(self) -> socket.socket: ... + def fileno(self) -> int: ... + def write(self, buffer: bytes) -> None: ... + def read_until(self, match: bytes, timeout: float | None = None) -> bytes: ... + def read_all(self) -> bytes: ... + def read_some(self) -> bytes: ... + def read_very_eager(self) -> bytes: ... + def read_eager(self) -> bytes: ... + def read_lazy(self) -> bytes: ... + def read_very_lazy(self) -> bytes: ... + def read_sb_data(self) -> bytes: ... + def set_option_negotiation_callback(self, callback: Callable[[socket.socket, bytes, bytes], object] | None) -> None: ... + def process_rawq(self) -> None: ... + def rawq_getchar(self) -> bytes: ... + def fill_rawq(self) -> None: ... + def sock_avail(self) -> bool: ... + def interact(self) -> None: ... + def mt_interact(self) -> None: ... + def listener(self) -> None: ... + def expect( + self, list: MutableSequence[Pattern[bytes] | bytes] | Sequence[Pattern[bytes]], timeout: float | None = None + ) -> tuple[int, Match[bytes] | None, bytes]: ... + def __enter__(self) -> Self: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __del__(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tempfile.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tempfile.pyi new file mode 100644 index 0000000..2649107 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tempfile.pyi @@ -0,0 +1,479 @@ +import io +import sys +from _typeshed import ( + BytesPath, + GenericPath, + OpenBinaryMode, + OpenBinaryModeReading, + OpenBinaryModeUpdating, + OpenBinaryModeWriting, + OpenTextMode, + ReadableBuffer, + StrPath, + WriteableBuffer, +) +from collections.abc import Iterable, Iterator +from types import GenericAlias, TracebackType +from typing import IO, Any, AnyStr, Final, Generic, Literal, overload +from typing_extensions import Self, deprecated + +__all__ = [ + "NamedTemporaryFile", + "TemporaryFile", + "SpooledTemporaryFile", + "TemporaryDirectory", + "mkstemp", + "mkdtemp", + "mktemp", + "TMP_MAX", + "gettempprefix", + "tempdir", + "gettempdir", + "gettempprefixb", + "gettempdirb", +] + +# global variables +TMP_MAX: Final[int] +tempdir: str | None +template: str + +if sys.version_info >= (3, 12): + @overload + def NamedTemporaryFile( + mode: OpenTextMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, + *, + errors: str | None = None, + delete_on_close: bool = True, + ) -> _TemporaryFileWrapper[str]: ... + @overload + def NamedTemporaryFile( + mode: OpenBinaryMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, + *, + errors: str | None = None, + delete_on_close: bool = True, + ) -> _TemporaryFileWrapper[bytes]: ... + @overload + def NamedTemporaryFile( + mode: str = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, + *, + errors: str | None = None, + delete_on_close: bool = True, + ) -> _TemporaryFileWrapper[Any]: ... + +else: + @overload + def NamedTemporaryFile( + mode: OpenTextMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, + *, + errors: str | None = None, + ) -> _TemporaryFileWrapper[str]: ... + @overload + def NamedTemporaryFile( + mode: OpenBinaryMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, + *, + errors: str | None = None, + ) -> _TemporaryFileWrapper[bytes]: ... + @overload + def NamedTemporaryFile( + mode: str = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, + *, + errors: str | None = None, + ) -> _TemporaryFileWrapper[Any]: ... + +if sys.platform == "win32": + TemporaryFile = NamedTemporaryFile +else: + # See the comments for builtins.open() for an explanation of the overloads. + @overload + def TemporaryFile( + mode: OpenTextMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + *, + errors: str | None = None, + ) -> io.TextIOWrapper: ... + @overload + def TemporaryFile( + mode: OpenBinaryMode, + buffering: Literal[0], + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + *, + errors: str | None = None, + ) -> io.FileIO: ... + @overload + def TemporaryFile( + *, + buffering: Literal[0], + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + errors: str | None = None, + ) -> io.FileIO: ... + @overload + def TemporaryFile( + mode: OpenBinaryModeWriting, + buffering: Literal[-1, 1] = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + *, + errors: str | None = None, + ) -> io.BufferedWriter: ... + @overload + def TemporaryFile( + mode: OpenBinaryModeReading, + buffering: Literal[-1, 1] = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + *, + errors: str | None = None, + ) -> io.BufferedReader: ... + @overload + def TemporaryFile( + mode: OpenBinaryModeUpdating = "w+b", + buffering: Literal[-1, 1] = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + *, + errors: str | None = None, + ) -> io.BufferedRandom: ... + @overload + def TemporaryFile( + mode: str = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + *, + errors: str | None = None, + ) -> IO[Any]: ... + +class _TemporaryFileWrapper(IO[AnyStr]): + file: IO[AnyStr] # io.TextIOWrapper, io.BufferedReader or io.BufferedWriter + name: str + delete: bool + if sys.version_info >= (3, 12): + def __init__(self, file: IO[AnyStr], name: str, delete: bool = True, delete_on_close: bool = True) -> None: ... + else: + def __init__(self, file: IO[AnyStr], name: str, delete: bool = True) -> None: ... + + def __enter__(self) -> Self: ... + def __exit__(self, exc: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None) -> None: ... + def __getattr__(self, name: str) -> Any: ... + def close(self) -> None: ... + # These methods don't exist directly on this object, but + # are delegated to the underlying IO object through __getattr__. + # We need to add them here so that this class is concrete. + def __iter__(self) -> Iterator[AnyStr]: ... + # FIXME: __next__ doesn't actually exist on this class and should be removed: + # see also https://github.com/python/typeshed/pull/5456#discussion_r633068648 + # >>> import tempfile + # >>> ntf=tempfile.NamedTemporaryFile() + # >>> next(ntf) + # Traceback (most recent call last): + # File "", line 1, in + # TypeError: '_TemporaryFileWrapper' object is not an iterator + def __next__(self) -> AnyStr: ... + def fileno(self) -> int: ... + def flush(self) -> None: ... + def isatty(self) -> bool: ... + def read(self, n: int = ...) -> AnyStr: ... + def readable(self) -> bool: ... + def readline(self, limit: int = ...) -> AnyStr: ... + def readlines(self, hint: int = ...) -> list[AnyStr]: ... + def seek(self, offset: int, whence: int = ...) -> int: ... + def seekable(self) -> bool: ... + def tell(self) -> int: ... + def truncate(self, size: int | None = ...) -> int: ... + def writable(self) -> bool: ... + @overload + def write(self: _TemporaryFileWrapper[str], s: str, /) -> int: ... + @overload + def write(self: _TemporaryFileWrapper[bytes], s: ReadableBuffer, /) -> int: ... + @overload + def write(self, s: AnyStr, /) -> int: ... + @overload + def writelines(self: _TemporaryFileWrapper[str], lines: Iterable[str]) -> None: ... + @overload + def writelines(self: _TemporaryFileWrapper[bytes], lines: Iterable[ReadableBuffer]) -> None: ... + @overload + def writelines(self, lines: Iterable[AnyStr]) -> None: ... + @property + def closed(self) -> bool: ... + +if sys.version_info >= (3, 11): + _SpooledTemporaryFileBase = io.IOBase +else: + _SpooledTemporaryFileBase = object + +# It does not actually derive from IO[AnyStr], but it does mostly behave +# like one. +class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): + _file: IO[AnyStr] + @property + def encoding(self) -> str: ... # undocumented + @property + def newlines(self) -> str | tuple[str, ...] | None: ... # undocumented + # bytes needs to go first, as default mode is to open as bytes + @overload + def __init__( + self: SpooledTemporaryFile[bytes], + max_size: int = 0, + mode: OpenBinaryMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + *, + errors: str | None = None, + ) -> None: ... + @overload + def __init__( + self: SpooledTemporaryFile[str], + max_size: int, + mode: OpenTextMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + *, + errors: str | None = None, + ) -> None: ... + @overload + def __init__( + self: SpooledTemporaryFile[str], + max_size: int = 0, + *, + mode: OpenTextMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + errors: str | None = None, + ) -> None: ... + @overload + def __init__( + self, + max_size: int, + mode: str, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + *, + errors: str | None = None, + ) -> None: ... + @overload + def __init__( + self, + max_size: int = 0, + *, + mode: str, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + errors: str | None = None, + ) -> None: ... + @property + def errors(self) -> str | None: ... + def rollover(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, exc: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None) -> None: ... + # These methods are copied from the abstract methods of IO, because + # SpooledTemporaryFile implements IO. + # See also https://github.com/python/typeshed/pull/2452#issuecomment-420657918. + def close(self) -> None: ... + def fileno(self) -> int: ... + def flush(self) -> None: ... + def isatty(self) -> bool: ... + if sys.version_info >= (3, 11): + # These three work only if the SpooledTemporaryFile is opened in binary mode, + # because the underlying object in text mode does not have these methods. + def read1(self, size: int = ..., /) -> AnyStr: ... + def readinto(self, b: WriteableBuffer) -> int: ... + def readinto1(self, b: WriteableBuffer) -> int: ... + def detach(self) -> io.RawIOBase: ... + + def read(self, n: int = ..., /) -> AnyStr: ... + def readline(self, limit: int | None = ..., /) -> AnyStr: ... # type: ignore[override] + def readlines(self, hint: int = ..., /) -> list[AnyStr]: ... # type: ignore[override] + def seek(self, offset: int, whence: int = ...) -> int: ... + def tell(self) -> int: ... + if sys.version_info >= (3, 11): + def truncate(self, size: int | None = None) -> int: ... + else: + def truncate(self, size: int | None = None) -> None: ... # type: ignore[override] + + @overload + def write(self: SpooledTemporaryFile[str], s: str) -> int: ... + @overload + def write(self: SpooledTemporaryFile[bytes], s: ReadableBuffer) -> int: ... + @overload + def write(self, s: AnyStr) -> int: ... + @overload # type: ignore[override] + def writelines(self: SpooledTemporaryFile[str], iterable: Iterable[str]) -> None: ... + @overload + def writelines(self: SpooledTemporaryFile[bytes], iterable: Iterable[ReadableBuffer]) -> None: ... + @overload + def writelines(self, iterable: Iterable[AnyStr]) -> None: ... + def __iter__(self) -> Iterator[AnyStr]: ... # type: ignore[override] + # These exist at runtime only on 3.11+. + def readable(self) -> bool: ... + def seekable(self) -> bool: ... + def writable(self) -> bool: ... + def __next__(self) -> AnyStr: ... # type: ignore[override] + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +class TemporaryDirectory(Generic[AnyStr]): + name: AnyStr + if sys.version_info >= (3, 12): + @overload + def __init__( + self: TemporaryDirectory[str], + suffix: str | None = None, + prefix: str | None = None, + dir: StrPath | None = None, + ignore_cleanup_errors: bool = False, + *, + delete: bool = True, + ) -> None: ... + @overload + def __init__( + self: TemporaryDirectory[bytes], + suffix: bytes | None = None, + prefix: bytes | None = None, + dir: BytesPath | None = None, + ignore_cleanup_errors: bool = False, + *, + delete: bool = True, + ) -> None: ... + elif sys.version_info >= (3, 10): + @overload + def __init__( + self: TemporaryDirectory[str], + suffix: str | None = None, + prefix: str | None = None, + dir: StrPath | None = None, + ignore_cleanup_errors: bool = False, + ) -> None: ... + @overload + def __init__( + self: TemporaryDirectory[bytes], + suffix: bytes | None = None, + prefix: bytes | None = None, + dir: BytesPath | None = None, + ignore_cleanup_errors: bool = False, + ) -> None: ... + else: + @overload + def __init__( + self: TemporaryDirectory[str], suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None + ) -> None: ... + @overload + def __init__( + self: TemporaryDirectory[bytes], + suffix: bytes | None = None, + prefix: bytes | None = None, + dir: BytesPath | None = None, + ) -> None: ... + + def cleanup(self) -> None: ... + def __enter__(self) -> AnyStr: ... + def __exit__(self, exc: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None) -> None: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +# The overloads overlap, but they should still work fine. +@overload +def mkstemp( + suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None, text: bool = False +) -> tuple[int, str]: ... +@overload +def mkstemp( + suffix: bytes | None = None, prefix: bytes | None = None, dir: BytesPath | None = None, text: bool = False +) -> tuple[int, bytes]: ... + +# The overloads overlap, but they should still work fine. +@overload +def mkdtemp(suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None) -> str: ... +@overload +def mkdtemp(suffix: bytes | None = None, prefix: bytes | None = None, dir: BytesPath | None = None) -> bytes: ... +@deprecated("Deprecated since Python 2.3. Use `mkstemp()` or `NamedTemporaryFile(delete=False)` instead.") +def mktemp(suffix: str = "", prefix: str = "tmp", dir: StrPath | None = None) -> str: ... +def gettempdirb() -> bytes: ... +def gettempprefixb() -> bytes: ... +def gettempdir() -> str: ... +def gettempprefix() -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/termios.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/termios.pyi new file mode 100644 index 0000000..a35be5d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/termios.pyi @@ -0,0 +1,304 @@ +import sys +from _typeshed import FileDescriptorLike +from typing import Any, Final +from typing_extensions import TypeAlias + +# Must be a list of length 7, containing 6 ints and a list of NCCS 1-character bytes or ints. +_Attr: TypeAlias = list[int | list[bytes | int]] | list[int | list[bytes]] | list[int | list[int]] +# Same as _Attr for return types; we use Any to avoid a union. +_AttrReturn: TypeAlias = list[Any] + +if sys.platform != "win32": + # Values depends on the platform + B0: Final[int] + B110: Final[int] + B115200: Final[int] + B1200: Final[int] + B134: Final[int] + B150: Final[int] + B1800: Final[int] + B19200: Final[int] + B200: Final[int] + B230400: Final[int] + B2400: Final[int] + B300: Final[int] + B38400: Final[int] + B4800: Final[int] + B50: Final[int] + B57600: Final[int] + B600: Final[int] + B75: Final[int] + B9600: Final[int] + BRKINT: Final[int] + BS0: Final[int] + BS1: Final[int] + BSDLY: Final[int] + CDSUSP: Final[int] + CEOF: Final[int] + CEOL: Final[int] + CEOT: Final[int] + CERASE: Final[int] + CFLUSH: Final[int] + CINTR: Final[int] + CKILL: Final[int] + CLNEXT: Final[int] + CLOCAL: Final[int] + CQUIT: Final[int] + CR0: Final[int] + CR1: Final[int] + CR2: Final[int] + CR3: Final[int] + CRDLY: Final[int] + CREAD: Final[int] + CRPRNT: Final[int] + CRTSCTS: Final[int] + CS5: Final[int] + CS6: Final[int] + CS7: Final[int] + CS8: Final[int] + CSIZE: Final[int] + CSTART: Final[int] + CSTOP: Final[int] + CSTOPB: Final[int] + CSUSP: Final[int] + CWERASE: Final[int] + ECHO: Final[int] + ECHOCTL: Final[int] + ECHOE: Final[int] + ECHOK: Final[int] + ECHOKE: Final[int] + ECHONL: Final[int] + ECHOPRT: Final[int] + EXTA: Final[int] + EXTB: Final[int] + FF0: Final[int] + FF1: Final[int] + FFDLY: Final[int] + FIOASYNC: Final[int] + FIOCLEX: Final[int] + FIONBIO: Final[int] + FIONCLEX: Final[int] + FIONREAD: Final[int] + FLUSHO: Final[int] + HUPCL: Final[int] + ICANON: Final[int] + ICRNL: Final[int] + IEXTEN: Final[int] + IGNBRK: Final[int] + IGNCR: Final[int] + IGNPAR: Final[int] + IMAXBEL: Final[int] + INLCR: Final[int] + INPCK: Final[int] + ISIG: Final[int] + ISTRIP: Final[int] + IXANY: Final[int] + IXOFF: Final[int] + IXON: Final[int] + NCCS: Final[int] + NL0: Final[int] + NL1: Final[int] + NLDLY: Final[int] + NOFLSH: Final[int] + OCRNL: Final[int] + OFDEL: Final[int] + OFILL: Final[int] + ONLCR: Final[int] + ONLRET: Final[int] + ONOCR: Final[int] + OPOST: Final[int] + PARENB: Final[int] + PARMRK: Final[int] + PARODD: Final[int] + PENDIN: Final[int] + TAB0: Final[int] + TAB1: Final[int] + TAB2: Final[int] + TAB3: Final[int] + TABDLY: Final[int] + TCIFLUSH: Final[int] + TCIOFF: Final[int] + TCIOFLUSH: Final[int] + TCION: Final[int] + TCOFLUSH: Final[int] + TCOOFF: Final[int] + TCOON: Final[int] + TCSADRAIN: Final[int] + TCSAFLUSH: Final[int] + TCSANOW: Final[int] + TIOCCONS: Final[int] + TIOCEXCL: Final[int] + TIOCGETD: Final[int] + TIOCGPGRP: Final[int] + TIOCGWINSZ: Final[int] + TIOCM_CAR: Final[int] + TIOCM_CD: Final[int] + TIOCM_CTS: Final[int] + TIOCM_DSR: Final[int] + TIOCM_DTR: Final[int] + TIOCM_LE: Final[int] + TIOCM_RI: Final[int] + TIOCM_RNG: Final[int] + TIOCM_RTS: Final[int] + TIOCM_SR: Final[int] + TIOCM_ST: Final[int] + TIOCMBIC: Final[int] + TIOCMBIS: Final[int] + TIOCMGET: Final[int] + TIOCMSET: Final[int] + TIOCNOTTY: Final[int] + TIOCNXCL: Final[int] + TIOCOUTQ: Final[int] + TIOCPKT_DATA: Final[int] + TIOCPKT_DOSTOP: Final[int] + TIOCPKT_FLUSHREAD: Final[int] + TIOCPKT_FLUSHWRITE: Final[int] + TIOCPKT_NOSTOP: Final[int] + TIOCPKT_START: Final[int] + TIOCPKT_STOP: Final[int] + TIOCPKT: Final[int] + TIOCSCTTY: Final[int] + TIOCSETD: Final[int] + TIOCSPGRP: Final[int] + TIOCSTI: Final[int] + TIOCSWINSZ: Final[int] + TOSTOP: Final[int] + VDISCARD: Final[int] + VEOF: Final[int] + VEOL: Final[int] + VEOL2: Final[int] + VERASE: Final[int] + VINTR: Final[int] + VKILL: Final[int] + VLNEXT: Final[int] + VMIN: Final[int] + VQUIT: Final[int] + VREPRINT: Final[int] + VSTART: Final[int] + VSTOP: Final[int] + VSUSP: Final[int] + VT0: Final[int] + VT1: Final[int] + VTDLY: Final[int] + VTIME: Final[int] + VWERASE: Final[int] + + if sys.version_info >= (3, 13): + EXTPROC: Final[int] + IUTF8: Final[int] + + if sys.platform == "darwin" and sys.version_info >= (3, 13): + ALTWERASE: Final[int] + B14400: Final[int] + B28800: Final[int] + B7200: Final[int] + B76800: Final[int] + CCAR_OFLOW: Final[int] + CCTS_OFLOW: Final[int] + CDSR_OFLOW: Final[int] + CDTR_IFLOW: Final[int] + CIGNORE: Final[int] + CRTS_IFLOW: Final[int] + MDMBUF: Final[int] + NL2: Final[int] + NL3: Final[int] + NOKERNINFO: Final[int] + ONOEOT: Final[int] + OXTABS: Final[int] + VDSUSP: Final[int] + VSTATUS: Final[int] + + if sys.platform == "darwin" and sys.version_info >= (3, 11): + TIOCGSIZE: Final[int] + TIOCSSIZE: Final[int] + + if sys.platform == "linux": + B1152000: Final[int] + B576000: Final[int] + CBAUD: Final[int] + CBAUDEX: Final[int] + CIBAUD: Final[int] + IOCSIZE_MASK: Final[int] + IOCSIZE_SHIFT: Final[int] + IUCLC: Final[int] + N_MOUSE: Final[int] + N_PPP: Final[int] + N_SLIP: Final[int] + N_STRIP: Final[int] + N_TTY: Final[int] + NCC: Final[int] + OLCUC: Final[int] + TCFLSH: Final[int] + TCGETA: Final[int] + TCGETS: Final[int] + TCSBRK: Final[int] + TCSBRKP: Final[int] + TCSETA: Final[int] + TCSETAF: Final[int] + TCSETAW: Final[int] + TCSETS: Final[int] + TCSETSF: Final[int] + TCSETSW: Final[int] + TCXONC: Final[int] + TIOCGICOUNT: Final[int] + TIOCGLCKTRMIOS: Final[int] + TIOCGSERIAL: Final[int] + TIOCGSOFTCAR: Final[int] + TIOCINQ: Final[int] + TIOCLINUX: Final[int] + TIOCMIWAIT: Final[int] + TIOCTTYGSTRUCT: Final[int] + TIOCSER_TEMT: Final[int] + TIOCSERCONFIG: Final[int] + TIOCSERGETLSR: Final[int] + TIOCSERGETMULTI: Final[int] + TIOCSERGSTRUCT: Final[int] + TIOCSERGWILD: Final[int] + TIOCSERSETMULTI: Final[int] + TIOCSERSWILD: Final[int] + TIOCSLCKTRMIOS: Final[int] + TIOCSSERIAL: Final[int] + TIOCSSOFTCAR: Final[int] + VSWTC: Final[int] + VSWTCH: Final[int] + XCASE: Final[int] + XTABS: Final[int] + + if sys.platform != "darwin": + B1000000: Final[int] + B1500000: Final[int] + B2000000: Final[int] + B2500000: Final[int] + B3000000: Final[int] + B3500000: Final[int] + B4000000: Final[int] + B460800: Final[int] + B500000: Final[int] + B921600: Final[int] + + if sys.platform != "linux": + TCSASOFT: Final[int] + + if sys.platform != "darwin" and sys.platform != "linux": + # not available on FreeBSD either. + CDEL: Final[int] + CEOL2: Final[int] + CESC: Final[int] + CNUL: Final[int] + COMMON: Final[int] + CSWTCH: Final[int] + IBSHIFT: Final[int] + INIT_C_CC: Final[int] + NSWTCH: Final[int] + + def tcgetattr(fd: FileDescriptorLike, /) -> _AttrReturn: ... + def tcsetattr(fd: FileDescriptorLike, when: int, attributes: _Attr, /) -> None: ... + def tcsendbreak(fd: FileDescriptorLike, duration: int, /) -> None: ... + def tcdrain(fd: FileDescriptorLike, /) -> None: ... + def tcflush(fd: FileDescriptorLike, queue: int, /) -> None: ... + def tcflow(fd: FileDescriptorLike, action: int, /) -> None: ... + if sys.version_info >= (3, 11): + def tcgetwinsize(fd: FileDescriptorLike, /) -> tuple[int, int]: ... + def tcsetwinsize(fd: FileDescriptorLike, winsize: tuple[int, int], /) -> None: ... + + class error(Exception): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/textwrap.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/textwrap.pyi new file mode 100644 index 0000000..c00cce3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/textwrap.pyi @@ -0,0 +1,103 @@ +from collections.abc import Callable +from re import Pattern + +__all__ = ["TextWrapper", "wrap", "fill", "dedent", "indent", "shorten"] + +class TextWrapper: + width: int + initial_indent: str + subsequent_indent: str + expand_tabs: bool + replace_whitespace: bool + fix_sentence_endings: bool + drop_whitespace: bool + break_long_words: bool + break_on_hyphens: bool + tabsize: int + max_lines: int | None + placeholder: str + + # Attributes not present in documentation + sentence_end_re: Pattern[str] + wordsep_re: Pattern[str] + wordsep_simple_re: Pattern[str] + whitespace_trans: str + unicode_whitespace_trans: dict[int, int] + uspace: int + x: str # leaked loop variable + def __init__( + self, + width: int = 70, + initial_indent: str = "", + subsequent_indent: str = "", + expand_tabs: bool = True, + replace_whitespace: bool = True, + fix_sentence_endings: bool = False, + break_long_words: bool = True, + drop_whitespace: bool = True, + break_on_hyphens: bool = True, + tabsize: int = 8, + *, + max_lines: int | None = None, + placeholder: str = " [...]", + ) -> None: ... + # Private methods *are* part of the documented API for subclasses. + def _munge_whitespace(self, text: str) -> str: ... + def _split(self, text: str) -> list[str]: ... + def _fix_sentence_endings(self, chunks: list[str]) -> None: ... + def _handle_long_word(self, reversed_chunks: list[str], cur_line: list[str], cur_len: int, width: int) -> None: ... + def _wrap_chunks(self, chunks: list[str]) -> list[str]: ... + def _split_chunks(self, text: str) -> list[str]: ... + def wrap(self, text: str) -> list[str]: ... + def fill(self, text: str) -> str: ... + +def wrap( + text: str, + width: int = 70, + *, + initial_indent: str = "", + subsequent_indent: str = "", + expand_tabs: bool = True, + tabsize: int = 8, + replace_whitespace: bool = True, + fix_sentence_endings: bool = False, + break_long_words: bool = True, + break_on_hyphens: bool = True, + drop_whitespace: bool = True, + max_lines: int | None = None, + placeholder: str = " [...]", +) -> list[str]: ... +def fill( + text: str, + width: int = 70, + *, + initial_indent: str = "", + subsequent_indent: str = "", + expand_tabs: bool = True, + tabsize: int = 8, + replace_whitespace: bool = True, + fix_sentence_endings: bool = False, + break_long_words: bool = True, + break_on_hyphens: bool = True, + drop_whitespace: bool = True, + max_lines: int | None = None, + placeholder: str = " [...]", +) -> str: ... +def shorten( + text: str, + width: int, + *, + initial_indent: str = "", + subsequent_indent: str = "", + expand_tabs: bool = True, + tabsize: int = 8, + replace_whitespace: bool = True, + fix_sentence_endings: bool = False, + break_long_words: bool = True, + break_on_hyphens: bool = True, + drop_whitespace: bool = True, + # Omit `max_lines: int = None`, it is forced to 1 here. + placeholder: str = " [...]", +) -> str: ... +def dedent(text: str) -> str: ... +def indent(text: str, prefix: str, predicate: Callable[[str], bool] | None = None) -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/this.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/this.pyi new file mode 100644 index 0000000..8de996b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/this.pyi @@ -0,0 +1,2 @@ +s: str +d: dict[str, str] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/threading.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/threading.pyi new file mode 100644 index 0000000..7b0f15b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/threading.pyi @@ -0,0 +1,205 @@ +import _thread +import sys +from _thread import _ExceptHookArgs, get_native_id as get_native_id +from _typeshed import ProfileFunction, TraceFunction +from collections.abc import Callable, Iterable, Mapping +from contextvars import ContextVar +from types import TracebackType +from typing import Any, Final, TypeVar, final +from typing_extensions import deprecated + +_T = TypeVar("_T") + +__all__ = [ + "get_ident", + "active_count", + "Condition", + "current_thread", + "enumerate", + "main_thread", + "TIMEOUT_MAX", + "Event", + "Lock", + "RLock", + "Semaphore", + "BoundedSemaphore", + "Thread", + "Barrier", + "BrokenBarrierError", + "Timer", + "ThreadError", + "ExceptHookArgs", + "setprofile", + "settrace", + "local", + "stack_size", + "excepthook", + "get_native_id", +] + +if sys.version_info >= (3, 10): + __all__ += ["getprofile", "gettrace"] + +if sys.version_info >= (3, 12): + __all__ += ["setprofile_all_threads", "settrace_all_threads"] + +_profile_hook: ProfileFunction | None + +def active_count() -> int: ... +@deprecated("Deprecated since Python 3.10. Use `active_count()` instead.") +def activeCount() -> int: ... +def current_thread() -> Thread: ... +@deprecated("Deprecated since Python 3.10. Use `current_thread()` instead.") +def currentThread() -> Thread: ... +def get_ident() -> int: ... +def enumerate() -> list[Thread]: ... +def main_thread() -> Thread: ... +def settrace(func: TraceFunction) -> None: ... +def setprofile(func: ProfileFunction | None) -> None: ... + +if sys.version_info >= (3, 12): + def setprofile_all_threads(func: ProfileFunction | None) -> None: ... + def settrace_all_threads(func: TraceFunction) -> None: ... + +if sys.version_info >= (3, 10): + def gettrace() -> TraceFunction | None: ... + def getprofile() -> ProfileFunction | None: ... + +def stack_size(size: int = 0, /) -> int: ... + +TIMEOUT_MAX: Final[float] + +ThreadError = _thread.error +local = _thread._local + +class Thread: + name: str + @property + def ident(self) -> int | None: ... + daemon: bool + if sys.version_info >= (3, 14): + def __init__( + self, + group: None = None, + target: Callable[..., object] | None = None, + name: str | None = None, + args: Iterable[Any] = (), + kwargs: Mapping[str, Any] | None = None, + *, + daemon: bool | None = None, + context: ContextVar[Any] | None = None, + ) -> None: ... + else: + def __init__( + self, + group: None = None, + target: Callable[..., object] | None = None, + name: str | None = None, + args: Iterable[Any] = (), + kwargs: Mapping[str, Any] | None = None, + *, + daemon: bool | None = None, + ) -> None: ... + + def start(self) -> None: ... + def run(self) -> None: ... + def join(self, timeout: float | None = None) -> None: ... + @property + def native_id(self) -> int | None: ... # only available on some platforms + def is_alive(self) -> bool: ... + @deprecated("Deprecated since Python 3.10. Read the `daemon` attribute instead.") + def isDaemon(self) -> bool: ... + @deprecated("Deprecated since Python 3.10. Set the `daemon` attribute instead.") + def setDaemon(self, daemonic: bool) -> None: ... + @deprecated("Deprecated since Python 3.10. Read the `name` attribute instead.") + def getName(self) -> str: ... + @deprecated("Deprecated since Python 3.10. Set the `name` attribute instead.") + def setName(self, name: str) -> None: ... + +class _DummyThread(Thread): + def __init__(self) -> None: ... + +# This is actually the function _thread.allocate_lock for <= 3.12 +Lock = _thread.LockType + +# Python implementation of RLock. +@final +class _RLock: + _count: int + def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... + def release(self) -> None: ... + __enter__ = acquire + def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + + if sys.version_info >= (3, 14): + def locked(self) -> bool: ... + +RLock = _thread.RLock # Actually a function at runtime. + +class Condition: + def __init__(self, lock: Lock | _RLock | RLock | None = None) -> None: ... + def __enter__(self) -> bool: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... + def release(self) -> None: ... + def wait(self, timeout: float | None = None) -> bool: ... + def wait_for(self, predicate: Callable[[], _T], timeout: float | None = None) -> _T: ... + def notify(self, n: int = 1) -> None: ... + def notify_all(self) -> None: ... + @deprecated("Deprecated since Python 3.10. Use `notify_all()` instead.") + def notifyAll(self) -> None: ... + +class Semaphore: + _value: int + def __init__(self, value: int = 1) -> None: ... + def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + def acquire(self, blocking: bool = True, timeout: float | None = None) -> bool: ... + def __enter__(self, blocking: bool = True, timeout: float | None = None) -> bool: ... + def release(self, n: int = 1) -> None: ... + +class BoundedSemaphore(Semaphore): ... + +class Event: + def is_set(self) -> bool: ... + @deprecated("Deprecated since Python 3.10. Use `is_set()` instead.") + def isSet(self) -> bool: ... + def set(self) -> None: ... + def clear(self) -> None: ... + def wait(self, timeout: float | None = None) -> bool: ... + +excepthook: Callable[[_ExceptHookArgs], object] +if sys.version_info >= (3, 10): + __excepthook__: Callable[[_ExceptHookArgs], object] +ExceptHookArgs = _ExceptHookArgs + +class Timer(Thread): + args: Iterable[Any] # undocumented + finished: Event # undocumented + function: Callable[..., Any] # undocumented + interval: float # undocumented + kwargs: Mapping[str, Any] # undocumented + + def __init__( + self, + interval: float, + function: Callable[..., object], + args: Iterable[Any] | None = None, + kwargs: Mapping[str, Any] | None = None, + ) -> None: ... + def cancel(self) -> None: ... + +class Barrier: + @property + def parties(self) -> int: ... + @property + def n_waiting(self) -> int: ... + @property + def broken(self) -> bool: ... + def __init__(self, parties: int, action: Callable[[], None] | None = None, timeout: float | None = None) -> None: ... + def wait(self, timeout: float | None = None) -> int: ... + def reset(self) -> None: ... + def abort(self) -> None: ... + +class BrokenBarrierError(RuntimeError): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/time.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/time.pyi new file mode 100644 index 0000000..5665efb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/time.pyi @@ -0,0 +1,112 @@ +import sys +from _typeshed import structseq +from typing import Any, Final, Literal, Protocol, final, type_check_only +from typing_extensions import TypeAlias + +_TimeTuple: TypeAlias = tuple[int, int, int, int, int, int, int, int, int] + +altzone: int +daylight: int +timezone: int +tzname: tuple[str, str] + +if sys.platform == "linux": + CLOCK_BOOTTIME: Final[int] +if sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darwin": + CLOCK_PROF: Final[int] # FreeBSD, NetBSD, OpenBSD + CLOCK_UPTIME: Final[int] # FreeBSD, OpenBSD + +if sys.platform != "win32": + CLOCK_MONOTONIC: Final[int] + CLOCK_MONOTONIC_RAW: Final[int] + CLOCK_PROCESS_CPUTIME_ID: Final[int] + CLOCK_REALTIME: Final[int] + CLOCK_THREAD_CPUTIME_ID: Final[int] + if sys.platform != "linux" and sys.platform != "darwin": + CLOCK_HIGHRES: Final[int] # Solaris only + +if sys.platform == "darwin": + CLOCK_UPTIME_RAW: Final[int] + if sys.version_info >= (3, 13): + CLOCK_UPTIME_RAW_APPROX: Final[int] + CLOCK_MONOTONIC_RAW_APPROX: Final[int] + +if sys.platform == "linux": + CLOCK_TAI: Final[int] + +# Constructor takes an iterable of any type, of length between 9 and 11 elements. +# However, it always *behaves* like a tuple of 9 elements, +# even if an iterable with length >9 is passed. +# https://github.com/python/typeshed/pull/6560#discussion_r767162532 +@final +class struct_time(structseq[Any | int], _TimeTuple): + if sys.version_info >= (3, 10): + __match_args__: Final = ("tm_year", "tm_mon", "tm_mday", "tm_hour", "tm_min", "tm_sec", "tm_wday", "tm_yday", "tm_isdst") + + @property + def tm_year(self) -> int: ... + @property + def tm_mon(self) -> int: ... + @property + def tm_mday(self) -> int: ... + @property + def tm_hour(self) -> int: ... + @property + def tm_min(self) -> int: ... + @property + def tm_sec(self) -> int: ... + @property + def tm_wday(self) -> int: ... + @property + def tm_yday(self) -> int: ... + @property + def tm_isdst(self) -> int: ... + # These final two properties only exist if a 10- or 11-item sequence was passed to the constructor. + @property + def tm_zone(self) -> str: ... + @property + def tm_gmtoff(self) -> int: ... + +def asctime(time_tuple: _TimeTuple | struct_time = ..., /) -> str: ... +def ctime(seconds: float | None = None, /) -> str: ... +def gmtime(seconds: float | None = None, /) -> struct_time: ... +def localtime(seconds: float | None = None, /) -> struct_time: ... +def mktime(time_tuple: _TimeTuple | struct_time, /) -> float: ... +def sleep(seconds: float, /) -> None: ... +def strftime(format: str, time_tuple: _TimeTuple | struct_time = ..., /) -> str: ... +def strptime(data_string: str, format: str = "%a %b %d %H:%M:%S %Y", /) -> struct_time: ... +def time() -> float: ... + +if sys.platform != "win32": + def tzset() -> None: ... # Unix only + +@type_check_only +class _ClockInfo(Protocol): + adjustable: bool + implementation: str + monotonic: bool + resolution: float + +def get_clock_info(name: Literal["monotonic", "perf_counter", "process_time", "time", "thread_time"], /) -> _ClockInfo: ... +def monotonic() -> float: ... +def perf_counter() -> float: ... +def process_time() -> float: ... + +if sys.platform != "win32": + def clock_getres(clk_id: int, /) -> float: ... # Unix only + def clock_gettime(clk_id: int, /) -> float: ... # Unix only + def clock_settime(clk_id: int, time: float, /) -> None: ... # Unix only + +if sys.platform != "win32": + def clock_gettime_ns(clk_id: int, /) -> int: ... + def clock_settime_ns(clock_id: int, time: int, /) -> int: ... + +if sys.platform == "linux": + def pthread_getcpuclockid(thread_id: int, /) -> int: ... + +def monotonic_ns() -> int: ... +def perf_counter_ns() -> int: ... +def process_time_ns() -> int: ... +def time_ns() -> int: ... +def thread_time() -> float: ... +def thread_time_ns() -> int: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/timeit.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/timeit.pyi new file mode 100644 index 0000000..a5da943 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/timeit.pyi @@ -0,0 +1,32 @@ +from collections.abc import Callable, Sequence +from typing import IO, Any +from typing_extensions import TypeAlias + +__all__ = ["Timer", "timeit", "repeat", "default_timer"] + +_Timer: TypeAlias = Callable[[], float] +_Stmt: TypeAlias = str | Callable[[], object] + +default_timer: _Timer + +class Timer: + def __init__( + self, stmt: _Stmt = "pass", setup: _Stmt = "pass", timer: _Timer = ..., globals: dict[str, Any] | None = None + ) -> None: ... + def print_exc(self, file: IO[str] | None = None) -> None: ... + def timeit(self, number: int = 1000000) -> float: ... + def repeat(self, repeat: int = 5, number: int = 1000000) -> list[float]: ... + def autorange(self, callback: Callable[[int, float], object] | None = None) -> tuple[int, float]: ... + +def timeit( + stmt: _Stmt = "pass", setup: _Stmt = "pass", timer: _Timer = ..., number: int = 1000000, globals: dict[str, Any] | None = None +) -> float: ... +def repeat( + stmt: _Stmt = "pass", + setup: _Stmt = "pass", + timer: _Timer = ..., + repeat: int = 5, + number: int = 1000000, + globals: dict[str, Any] | None = None, +) -> list[float]: ... +def main(args: Sequence[str] | None = None, *, _wrap_timer: Callable[[_Timer], _Timer] | None = None) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/__init__.pyi new file mode 100644 index 0000000..ef57faa --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/__init__.pyi @@ -0,0 +1,4173 @@ +import _tkinter +import sys +from _typeshed import Incomplete, MaybeNone, StrOrBytesPath +from collections.abc import Callable, Iterable, Mapping, Sequence +from tkinter.constants import * +from tkinter.font import _FontDescription +from types import GenericAlias, TracebackType +from typing import Any, ClassVar, Final, Generic, Literal, NamedTuple, Protocol, TypedDict, TypeVar, overload, type_check_only +from typing_extensions import TypeAlias, TypeVarTuple, Unpack, deprecated, disjoint_base + +if sys.version_info >= (3, 11): + from enum import StrEnum +else: + from enum import Enum + +__all__ = [ + "TclError", + "NO", + "FALSE", + "OFF", + "YES", + "TRUE", + "ON", + "N", + "S", + "W", + "E", + "NW", + "SW", + "NE", + "SE", + "NS", + "EW", + "NSEW", + "CENTER", + "NONE", + "X", + "Y", + "BOTH", + "LEFT", + "TOP", + "RIGHT", + "BOTTOM", + "RAISED", + "SUNKEN", + "FLAT", + "RIDGE", + "GROOVE", + "SOLID", + "HORIZONTAL", + "VERTICAL", + "NUMERIC", + "CHAR", + "WORD", + "BASELINE", + "INSIDE", + "OUTSIDE", + "SEL", + "SEL_FIRST", + "SEL_LAST", + "END", + "INSERT", + "CURRENT", + "ANCHOR", + "ALL", + "NORMAL", + "DISABLED", + "ACTIVE", + "HIDDEN", + "CASCADE", + "CHECKBUTTON", + "COMMAND", + "RADIOBUTTON", + "SEPARATOR", + "SINGLE", + "BROWSE", + "MULTIPLE", + "EXTENDED", + "DOTBOX", + "UNDERLINE", + "PIESLICE", + "CHORD", + "ARC", + "FIRST", + "LAST", + "BUTT", + "PROJECTING", + "ROUND", + "BEVEL", + "MITER", + "MOVETO", + "SCROLL", + "UNITS", + "PAGES", + "TkVersion", + "TclVersion", + "READABLE", + "WRITABLE", + "EXCEPTION", + "EventType", + "Event", + "NoDefaultRoot", + "Variable", + "StringVar", + "IntVar", + "DoubleVar", + "BooleanVar", + "mainloop", + "getint", + "getdouble", + "getboolean", + "Misc", + "CallWrapper", + "XView", + "YView", + "Wm", + "Tk", + "Tcl", + "Pack", + "Place", + "Grid", + "BaseWidget", + "Widget", + "Toplevel", + "Button", + "Canvas", + "Checkbutton", + "Entry", + "Frame", + "Label", + "Listbox", + "Menu", + "Menubutton", + "Message", + "Radiobutton", + "Scale", + "Scrollbar", + "Text", + "OptionMenu", + "Image", + "PhotoImage", + "BitmapImage", + "image_names", + "image_types", + "Spinbox", + "LabelFrame", + "PanedWindow", +] + +# Using anything from tkinter.font in this file means that 'import tkinter' +# seems to also load tkinter.font. That's not how it actually works, but +# unfortunately not much can be done about it. https://github.com/python/typeshed/pull/4346 + +TclError = _tkinter.TclError +wantobjects: int +TkVersion: Final[float] +TclVersion: Final[float] +READABLE: Final = _tkinter.READABLE +WRITABLE: Final = _tkinter.WRITABLE +EXCEPTION: Final = _tkinter.EXCEPTION + +# Quick guide for figuring out which widget class to choose: +# - Misc: any widget (don't use BaseWidget because Tk doesn't inherit from BaseWidget) +# - Widget: anything that is meant to be put into another widget with e.g. pack or grid +# +# Don't trust tkinter's docstrings, because they have been created by copy/pasting from +# Tk's manual pages more than 10 years ago. Use the latest manual pages instead: +# +# $ sudo apt install tk-doc tcl-doc +# $ man 3tk label # tkinter.Label +# $ man 3tk ttk_label # tkinter.ttk.Label +# $ man 3tcl after # tkinter.Misc.after +# +# You can also read the manual pages online: https://www.tcl.tk/doc/ + +# manual page: Tk_GetCursor +_Cursor: TypeAlias = str | tuple[str] | tuple[str, str] | tuple[str, str, str] | tuple[str, str, str, str] + +if sys.version_info >= (3, 11): + @type_check_only + class _VersionInfoTypeBase(NamedTuple): + major: int + minor: int + micro: int + releaselevel: str + serial: int + + if sys.version_info >= (3, 12): + class _VersionInfoType(_VersionInfoTypeBase): ... + else: + @disjoint_base + class _VersionInfoType(_VersionInfoTypeBase): ... + +if sys.version_info >= (3, 11): + class EventType(StrEnum): + Activate = "36" + ButtonPress = "4" + Button = ButtonPress + ButtonRelease = "5" + Circulate = "26" + CirculateRequest = "27" + ClientMessage = "33" + Colormap = "32" + Configure = "22" + ConfigureRequest = "23" + Create = "16" + Deactivate = "37" + Destroy = "17" + Enter = "7" + Expose = "12" + FocusIn = "9" + FocusOut = "10" + GraphicsExpose = "13" + Gravity = "24" + KeyPress = "2" + Key = "2" + KeyRelease = "3" + Keymap = "11" + Leave = "8" + Map = "19" + MapRequest = "20" + Mapping = "34" + Motion = "6" + MouseWheel = "38" + NoExpose = "14" + Property = "28" + Reparent = "21" + ResizeRequest = "25" + Selection = "31" + SelectionClear = "29" + SelectionRequest = "30" + Unmap = "18" + VirtualEvent = "35" + Visibility = "15" + +else: + class EventType(str, Enum): + Activate = "36" + ButtonPress = "4" + Button = ButtonPress + ButtonRelease = "5" + Circulate = "26" + CirculateRequest = "27" + ClientMessage = "33" + Colormap = "32" + Configure = "22" + ConfigureRequest = "23" + Create = "16" + Deactivate = "37" + Destroy = "17" + Enter = "7" + Expose = "12" + FocusIn = "9" + FocusOut = "10" + GraphicsExpose = "13" + Gravity = "24" + KeyPress = "2" + Key = KeyPress + KeyRelease = "3" + Keymap = "11" + Leave = "8" + Map = "19" + MapRequest = "20" + Mapping = "34" + Motion = "6" + MouseWheel = "38" + NoExpose = "14" + Property = "28" + Reparent = "21" + ResizeRequest = "25" + Selection = "31" + SelectionClear = "29" + SelectionRequest = "30" + Unmap = "18" + VirtualEvent = "35" + Visibility = "15" + +_W = TypeVar("_W", bound=Misc) +# Events considered covariant because you should never assign to event.widget. +_W_co = TypeVar("_W_co", covariant=True, bound=Misc, default=Misc) + +class Event(Generic[_W_co]): + serial: int + num: int + focus: bool + height: int + width: int + keycode: int + state: int | str + time: int + x: int + y: int + x_root: int + y_root: int + char: str + send_event: bool + keysym: str + keysym_num: int + type: EventType + widget: _W_co + delta: int + if sys.version_info >= (3, 14): + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +def NoDefaultRoot() -> None: ... + +class Variable: + def __init__(self, master: Misc | None = None, value=None, name: str | None = None) -> None: ... + def set(self, value) -> None: ... + initialize = set + def get(self): ... + def trace_add(self, mode: Literal["array", "read", "write", "unset"], callback: Callable[[str, str, str], object]) -> str: ... + def trace_remove(self, mode: Literal["array", "read", "write", "unset"], cbname: str) -> None: ... + def trace_info(self) -> list[tuple[tuple[Literal["array", "read", "write", "unset"], ...], str]]: ... + if sys.version_info >= (3, 14): + @deprecated("Deprecated since Python 3.14. Use `trace_add()` instead.") + def trace(self, mode, callback) -> str: ... + @deprecated("Deprecated since Python 3.14. Use `trace_add()` instead.") + def trace_variable(self, mode, callback) -> str: ... + @deprecated("Deprecated since Python 3.14. Use `trace_remove()` instead.") + def trace_vdelete(self, mode, cbname) -> None: ... + @deprecated("Deprecated since Python 3.14. Use `trace_info()` instead.") + def trace_vinfo(self) -> list[Incomplete]: ... + else: + def trace(self, mode, callback) -> str: ... + def trace_variable(self, mode, callback) -> str: ... + def trace_vdelete(self, mode, cbname) -> None: ... + def trace_vinfo(self) -> list[Incomplete]: ... + + def __eq__(self, other: object) -> bool: ... + def __del__(self) -> None: ... + __hash__: ClassVar[None] # type: ignore[assignment] + +class StringVar(Variable): + def __init__(self, master: Misc | None = None, value: str | None = None, name: str | None = None) -> None: ... + def set(self, value: str) -> None: ... + initialize = set + def get(self) -> str: ... + +class IntVar(Variable): + def __init__(self, master: Misc | None = None, value: int | None = None, name: str | None = None) -> None: ... + def set(self, value: int) -> None: ... + initialize = set + def get(self) -> int: ... + +class DoubleVar(Variable): + def __init__(self, master: Misc | None = None, value: float | None = None, name: str | None = None) -> None: ... + def set(self, value: float) -> None: ... + initialize = set + def get(self) -> float: ... + +class BooleanVar(Variable): + def __init__(self, master: Misc | None = None, value: bool | None = None, name: str | None = None) -> None: ... + def set(self, value: bool) -> None: ... + initialize = set + def get(self) -> bool: ... + +def mainloop(n: int = 0) -> None: ... + +getint = int +getdouble = float + +def getboolean(s) -> bool: ... + +_Ts = TypeVarTuple("_Ts") + +@type_check_only +class _GridIndexInfo(TypedDict, total=False): + minsize: float | str + pad: float | str + uniform: str | None + weight: int + +@type_check_only +class _BusyInfo(TypedDict): + cursor: _Cursor + +class Misc: + master: Misc | None + tk: _tkinter.TkappType + children: dict[str, Widget] + def destroy(self) -> None: ... + def deletecommand(self, name: str) -> None: ... + def tk_strictMotif(self, boolean=None): ... + def tk_bisque(self) -> None: ... + def tk_setPalette(self, *args, **kw) -> None: ... + def wait_variable(self, name: str | Variable = "PY_VAR") -> None: ... + waitvar = wait_variable + def wait_window(self, window: Misc | None = None) -> None: ... + def wait_visibility(self, window: Misc | None = None) -> None: ... + def setvar(self, name: str = "PY_VAR", value: str = "1") -> None: ... + def getvar(self, name: str = "PY_VAR"): ... + def getint(self, s) -> int: ... + def getdouble(self, s) -> float: ... + def getboolean(self, s) -> bool: ... + def focus_set(self) -> None: ... + focus = focus_set + def focus_force(self) -> None: ... + def focus_get(self) -> Misc | None: ... + def focus_displayof(self) -> Misc | None: ... + def focus_lastfor(self) -> Misc | None: ... + def tk_focusFollowsMouse(self) -> None: ... + def tk_focusNext(self) -> Misc | None: ... + def tk_focusPrev(self) -> Misc | None: ... + # .after() can be called without the "func" argument, but it is basically never what you want. + # It behaves like time.sleep() and freezes the GUI app. + def after(self, ms: int | Literal["idle"], func: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> str: ... + # after_idle is essentially partialmethod(after, "idle") + def after_idle(self, func: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> str: ... + def after_cancel(self, id: str) -> None: ... + if sys.version_info >= (3, 13): + def after_info(self, id: str | None = None) -> tuple[str, ...]: ... + + def bell(self, displayof: Literal[0] | Misc | None = 0) -> None: ... + if sys.version_info >= (3, 13): + # Supports options from `_BusyInfo`` + def tk_busy_cget(self, option: Literal["cursor"]) -> _Cursor: ... + busy_cget = tk_busy_cget + def tk_busy_configure(self, cnf: Any = None, **kw: Any) -> Any: ... + tk_busy_config = tk_busy_configure + busy_configure = tk_busy_configure + busy_config = tk_busy_configure + def tk_busy_current(self, pattern: str | None = None) -> list[Misc]: ... + busy_current = tk_busy_current + def tk_busy_forget(self) -> None: ... + busy_forget = tk_busy_forget + def tk_busy_hold(self, **kw: Unpack[_BusyInfo]) -> None: ... + tk_busy = tk_busy_hold + busy_hold = tk_busy_hold + busy = tk_busy_hold + def tk_busy_status(self) -> bool: ... + busy_status = tk_busy_status + + def clipboard_get(self, *, displayof: Misc = ..., type: str = ...) -> str: ... + def clipboard_clear(self, *, displayof: Misc = ...) -> None: ... + def clipboard_append(self, string: str, *, displayof: Misc = ..., format: str = ..., type: str = ...) -> None: ... + def grab_current(self): ... + def grab_release(self) -> None: ... + def grab_set(self) -> None: ... + def grab_set_global(self) -> None: ... + def grab_status(self) -> Literal["local", "global"] | None: ... + def option_add( + self, pattern, value, priority: int | Literal["widgetDefault", "startupFile", "userDefault", "interactive"] | None = None + ) -> None: ... + def option_clear(self) -> None: ... + def option_get(self, name, className): ... + def option_readfile(self, fileName, priority=None) -> None: ... + def selection_clear(self, **kw) -> None: ... + def selection_get(self, **kw): ... + def selection_handle(self, command, **kw) -> None: ... + def selection_own(self, **kw) -> None: ... + def selection_own_get(self, **kw): ... + def send(self, interp, cmd, *args): ... + def lower(self, belowThis=None) -> None: ... + def tkraise(self, aboveThis=None) -> None: ... + lift = tkraise + if sys.version_info >= (3, 11): + def info_patchlevel(self) -> _VersionInfoType: ... + + def winfo_atom(self, name: str, displayof: Literal[0] | Misc | None = 0) -> int: ... + def winfo_atomname(self, id: int, displayof: Literal[0] | Misc | None = 0) -> str: ... + def winfo_cells(self) -> int: ... + def winfo_children(self) -> list[Widget | Toplevel]: ... + def winfo_class(self) -> str: ... + def winfo_colormapfull(self) -> bool: ... + def winfo_containing(self, rootX: int, rootY: int, displayof: Literal[0] | Misc | None = 0) -> Misc | None: ... + def winfo_depth(self) -> int: ... + def winfo_exists(self) -> bool: ... + def winfo_fpixels(self, number: float | str) -> float: ... + def winfo_geometry(self) -> str: ... + def winfo_height(self) -> int: ... + def winfo_id(self) -> int: ... + def winfo_interps(self, displayof: Literal[0] | Misc | None = 0) -> tuple[str, ...]: ... + def winfo_ismapped(self) -> bool: ... + def winfo_manager(self) -> str: ... + def winfo_name(self) -> str: ... + def winfo_parent(self) -> str: ... # return value needs nametowidget() + def winfo_pathname(self, id: int, displayof: Literal[0] | Misc | None = 0): ... + def winfo_pixels(self, number: float | str) -> int: ... + def winfo_pointerx(self) -> int: ... + def winfo_pointerxy(self) -> tuple[int, int]: ... + def winfo_pointery(self) -> int: ... + def winfo_reqheight(self) -> int: ... + def winfo_reqwidth(self) -> int: ... + def winfo_rgb(self, color: str) -> tuple[int, int, int]: ... + def winfo_rootx(self) -> int: ... + def winfo_rooty(self) -> int: ... + def winfo_screen(self) -> str: ... + def winfo_screencells(self) -> int: ... + def winfo_screendepth(self) -> int: ... + def winfo_screenheight(self) -> int: ... + def winfo_screenmmheight(self) -> int: ... + def winfo_screenmmwidth(self) -> int: ... + def winfo_screenvisual(self) -> str: ... + def winfo_screenwidth(self) -> int: ... + def winfo_server(self) -> str: ... + def winfo_toplevel(self) -> Tk | Toplevel: ... + def winfo_viewable(self) -> bool: ... + def winfo_visual(self) -> str: ... + def winfo_visualid(self) -> str: ... + def winfo_visualsavailable(self, includeids: bool = False) -> list[tuple[str, int]]: ... + def winfo_vrootheight(self) -> int: ... + def winfo_vrootwidth(self) -> int: ... + def winfo_vrootx(self) -> int: ... + def winfo_vrooty(self) -> int: ... + def winfo_width(self) -> int: ... + def winfo_x(self) -> int: ... + def winfo_y(self) -> int: ... + def update(self) -> None: ... + def update_idletasks(self) -> None: ... + @overload + def bindtags(self, tagList: None = None) -> tuple[str, ...]: ... + @overload + def bindtags(self, tagList: list[str] | tuple[str, ...]) -> None: ... + # bind with isinstance(func, str) doesn't return anything, but all other + # binds do. The default value of func is not str. + @overload + def bind( + self, + sequence: str | None = None, + func: Callable[[Event[Misc]], object] | None = None, + add: Literal["", "+"] | bool | None = None, + ) -> str: ... + @overload + def bind(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + @overload + def bind(self, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + # There's no way to know what type of widget bind_all and bind_class + # callbacks will get, so those are Misc. + @overload + def bind_all( + self, + sequence: str | None = None, + func: Callable[[Event[Misc]], object] | None = None, + add: Literal["", "+"] | bool | None = None, + ) -> str: ... + @overload + def bind_all(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + @overload + def bind_all(self, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + @overload + def bind_class( + self, + className: str, + sequence: str | None = None, + func: Callable[[Event[Misc]], object] | None = None, + add: Literal["", "+"] | bool | None = None, + ) -> str: ... + @overload + def bind_class(self, className: str, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + @overload + def bind_class(self, className: str, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + def unbind(self, sequence: str, funcid: str | None = None) -> None: ... + def unbind_all(self, sequence: str) -> None: ... + def unbind_class(self, className: str, sequence: str) -> None: ... + def mainloop(self, n: int = 0) -> None: ... + def quit(self) -> None: ... + @property + def _windowingsystem(self) -> Literal["win32", "aqua", "x11"]: ... + def nametowidget(self, name: str | Misc | _tkinter.Tcl_Obj) -> Any: ... + def register( + self, func: Callable[..., object], subst: Callable[..., Sequence[Any]] | None = None, needcleanup: int = 1 + ) -> str: ... + def keys(self) -> list[str]: ... + @overload + def pack_propagate(self, flag: bool) -> bool | None: ... + @overload + def pack_propagate(self) -> None: ... + propagate = pack_propagate + def grid_anchor(self, anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] | None = None) -> None: ... + anchor = grid_anchor + @overload + def grid_bbox( + self, column: None = None, row: None = None, col2: None = None, row2: None = None + ) -> tuple[int, int, int, int] | None: ... + @overload + def grid_bbox(self, column: int, row: int, col2: None = None, row2: None = None) -> tuple[int, int, int, int] | None: ... + @overload + def grid_bbox(self, column: int, row: int, col2: int, row2: int) -> tuple[int, int, int, int] | None: ... + bbox = grid_bbox + def grid_columnconfigure( + self, + index: int | str | list[int] | tuple[int, ...], + cnf: _GridIndexInfo = {}, + *, + minsize: float | str = ..., + pad: float | str = ..., + uniform: str = ..., + weight: int = ..., + ) -> _GridIndexInfo | MaybeNone: ... # can be None but annoying to check + def grid_rowconfigure( + self, + index: int | str | list[int] | tuple[int, ...], + cnf: _GridIndexInfo = {}, + *, + minsize: float | str = ..., + pad: float | str = ..., + uniform: str = ..., + weight: int = ..., + ) -> _GridIndexInfo | MaybeNone: ... # can be None but annoying to check + columnconfigure = grid_columnconfigure + rowconfigure = grid_rowconfigure + def grid_location(self, x: float | str, y: float | str) -> tuple[int, int]: ... + @overload + def grid_propagate(self, flag: bool) -> None: ... + @overload + def grid_propagate(self) -> bool: ... + def grid_size(self) -> tuple[int, int]: ... + size = grid_size + # Widget because Toplevel or Tk is never a slave + def pack_slaves(self) -> list[Widget]: ... + def grid_slaves(self, row: int | None = None, column: int | None = None) -> list[Widget]: ... + def place_slaves(self) -> list[Widget]: ... + slaves = pack_slaves + def event_add(self, virtual: str, *sequences: str) -> None: ... + def event_delete(self, virtual: str, *sequences: str) -> None: ... + def event_generate( + self, + sequence: str, + *, + above: Misc | int = ..., + borderwidth: float | str = ..., + button: int = ..., + count: int = ..., + data: Any = ..., # anything with usable str() value + delta: int = ..., + detail: str = ..., + focus: bool = ..., + height: float | str = ..., + keycode: int = ..., + keysym: str = ..., + mode: str = ..., + override: bool = ..., + place: Literal["PlaceOnTop", "PlaceOnBottom"] = ..., + root: Misc | int = ..., + rootx: float | str = ..., + rooty: float | str = ..., + sendevent: bool = ..., + serial: int = ..., + state: int | str = ..., + subwindow: Misc | int = ..., + time: int = ..., + warp: bool = ..., + width: float | str = ..., + when: Literal["now", "tail", "head", "mark"] = ..., + x: float | str = ..., + y: float | str = ..., + ) -> None: ... + def event_info(self, virtual: str | None = None) -> tuple[str, ...]: ... + def image_names(self) -> tuple[str, ...]: ... + def image_types(self) -> tuple[str, ...]: ... + # See #4363 and #4891 + def __setitem__(self, key: str, value: Any) -> None: ... + def __getitem__(self, key: str) -> Any: ... + def cget(self, key: str) -> Any: ... + def configure(self, cnf: Any = None) -> Any: ... + # TODO: config is an alias of configure, but adding that here creates + # conflict with the type of config in the subclasses. See #13149 + +class CallWrapper: + func: Incomplete + subst: Incomplete + widget: Incomplete + def __init__(self, func, subst, widget) -> None: ... + def __call__(self, *args): ... + +class XView: + @overload + def xview(self) -> tuple[float, float]: ... + @overload + def xview(self, *args) -> None: ... + def xview_moveto(self, fraction: float) -> None: ... + @overload + def xview_scroll(self, number: int, what: Literal["units", "pages"]) -> None: ... + @overload + def xview_scroll(self, number: float | str, what: Literal["pixels"]) -> None: ... + +class YView: + @overload + def yview(self) -> tuple[float, float]: ... + @overload + def yview(self, *args) -> None: ... + def yview_moveto(self, fraction: float) -> None: ... + @overload + def yview_scroll(self, number: int, what: Literal["units", "pages"]) -> None: ... + @overload + def yview_scroll(self, number: float | str, what: Literal["pixels"]) -> None: ... + +if sys.platform == "darwin": + @type_check_only + class _WmAttributes(TypedDict): + alpha: float + fullscreen: bool + modified: bool + notify: bool + titlepath: str + topmost: bool + transparent: bool + type: str # Present, but not actually used on darwin + +elif sys.platform == "win32": + @type_check_only + class _WmAttributes(TypedDict): + alpha: float + transparentcolor: str + disabled: bool + fullscreen: bool + toolwindow: bool + topmost: bool + +else: + # X11 + @type_check_only + class _WmAttributes(TypedDict): + alpha: float + topmost: bool + zoomed: bool + fullscreen: bool + type: str + +class Wm: + @overload + def wm_aspect(self, minNumer: int, minDenom: int, maxNumer: int, maxDenom: int) -> None: ... + @overload + def wm_aspect( + self, minNumer: None = None, minDenom: None = None, maxNumer: None = None, maxDenom: None = None + ) -> tuple[int, int, int, int] | None: ... + aspect = wm_aspect + if sys.version_info >= (3, 13): + @overload + def wm_attributes(self, *, return_python_dict: Literal[False] = False) -> tuple[Any, ...]: ... + @overload + def wm_attributes(self, *, return_python_dict: Literal[True]) -> _WmAttributes: ... + + else: + @overload + def wm_attributes(self) -> tuple[Any, ...]: ... + + @overload + def wm_attributes(self, option: Literal["-alpha"], /) -> float: ... + @overload + def wm_attributes(self, option: Literal["-fullscreen"], /) -> bool: ... + @overload + def wm_attributes(self, option: Literal["-topmost"], /) -> bool: ... + if sys.platform == "darwin": + @overload + def wm_attributes(self, option: Literal["-modified"], /) -> bool: ... + @overload + def wm_attributes(self, option: Literal["-notify"], /) -> bool: ... + @overload + def wm_attributes(self, option: Literal["-titlepath"], /) -> str: ... + @overload + def wm_attributes(self, option: Literal["-transparent"], /) -> bool: ... + @overload + def wm_attributes(self, option: Literal["-type"], /) -> str: ... + elif sys.platform == "win32": + @overload + def wm_attributes(self, option: Literal["-transparentcolor"], /) -> str: ... + @overload + def wm_attributes(self, option: Literal["-disabled"], /) -> bool: ... + @overload + def wm_attributes(self, option: Literal["-toolwindow"], /) -> bool: ... + else: + # X11 + @overload + def wm_attributes(self, option: Literal["-zoomed"], /) -> bool: ... + @overload + def wm_attributes(self, option: Literal["-type"], /) -> str: ... + if sys.version_info >= (3, 13): + @overload + def wm_attributes(self, option: Literal["alpha"], /) -> float: ... + @overload + def wm_attributes(self, option: Literal["fullscreen"], /) -> bool: ... + @overload + def wm_attributes(self, option: Literal["topmost"], /) -> bool: ... + if sys.platform == "darwin": + @overload + def wm_attributes(self, option: Literal["modified"], /) -> bool: ... + @overload + def wm_attributes(self, option: Literal["notify"], /) -> bool: ... + @overload + def wm_attributes(self, option: Literal["titlepath"], /) -> str: ... + @overload + def wm_attributes(self, option: Literal["transparent"], /) -> bool: ... + @overload + def wm_attributes(self, option: Literal["type"], /) -> str: ... + elif sys.platform == "win32": + @overload + def wm_attributes(self, option: Literal["transparentcolor"], /) -> str: ... + @overload + def wm_attributes(self, option: Literal["disabled"], /) -> bool: ... + @overload + def wm_attributes(self, option: Literal["toolwindow"], /) -> bool: ... + else: + # X11 + @overload + def wm_attributes(self, option: Literal["zoomed"], /) -> bool: ... + @overload + def wm_attributes(self, option: Literal["type"], /) -> str: ... + + @overload + def wm_attributes(self, option: str, /): ... + @overload + def wm_attributes(self, option: Literal["-alpha"], value: float, /) -> Literal[""]: ... + @overload + def wm_attributes(self, option: Literal["-fullscreen"], value: bool, /) -> Literal[""]: ... + @overload + def wm_attributes(self, option: Literal["-topmost"], value: bool, /) -> Literal[""]: ... + if sys.platform == "darwin": + @overload + def wm_attributes(self, option: Literal["-modified"], value: bool, /) -> Literal[""]: ... + @overload + def wm_attributes(self, option: Literal["-notify"], value: bool, /) -> Literal[""]: ... + @overload + def wm_attributes(self, option: Literal["-titlepath"], value: str, /) -> Literal[""]: ... + @overload + def wm_attributes(self, option: Literal["-transparent"], value: bool, /) -> Literal[""]: ... + elif sys.platform == "win32": + @overload + def wm_attributes(self, option: Literal["-transparentcolor"], value: str, /) -> Literal[""]: ... + @overload + def wm_attributes(self, option: Literal["-disabled"], value: bool, /) -> Literal[""]: ... + @overload + def wm_attributes(self, option: Literal["-toolwindow"], value: bool, /) -> Literal[""]: ... + else: + # X11 + @overload + def wm_attributes(self, option: Literal["-zoomed"], value: bool, /) -> Literal[""]: ... + @overload + def wm_attributes(self, option: Literal["-type"], value: str, /) -> Literal[""]: ... + + @overload + def wm_attributes(self, option: str, value, /, *__other_option_value_pairs: Any) -> Literal[""]: ... + if sys.version_info >= (3, 13): + if sys.platform == "darwin": + @overload + def wm_attributes( + self, + *, + alpha: float = ..., + fullscreen: bool = ..., + modified: bool = ..., + notify: bool = ..., + titlepath: str = ..., + topmost: bool = ..., + transparent: bool = ..., + ) -> None: ... + elif sys.platform == "win32": + @overload + def wm_attributes( + self, + *, + alpha: float = ..., + transparentcolor: str = ..., + disabled: bool = ..., + fullscreen: bool = ..., + toolwindow: bool = ..., + topmost: bool = ..., + ) -> None: ... + else: + # X11 + @overload + def wm_attributes( + self, *, alpha: float = ..., topmost: bool = ..., zoomed: bool = ..., fullscreen: bool = ..., type: str = ... + ) -> None: ... + + attributes = wm_attributes + def wm_client(self, name: str | None = None) -> str: ... + client = wm_client + @overload + def wm_colormapwindows(self) -> list[Misc]: ... + @overload + def wm_colormapwindows(self, wlist: list[Misc] | tuple[Misc, ...], /) -> None: ... + @overload + def wm_colormapwindows(self, first_wlist_item: Misc, /, *other_wlist_items: Misc) -> None: ... + colormapwindows = wm_colormapwindows + def wm_command(self, value: str | None = None) -> str: ... + command = wm_command + # Some of these always return empty string, but return type is set to None to prevent accidentally using it + def wm_deiconify(self) -> None: ... + deiconify = wm_deiconify + def wm_focusmodel(self, model: Literal["active", "passive"] | None = None) -> Literal["active", "passive", ""]: ... + focusmodel = wm_focusmodel + def wm_forget(self, window: Wm) -> None: ... + forget = wm_forget + def wm_frame(self) -> str: ... + frame = wm_frame + @overload + def wm_geometry(self, newGeometry: None = None) -> str: ... + @overload + def wm_geometry(self, newGeometry: str) -> None: ... + geometry = wm_geometry + def wm_grid(self, baseWidth=None, baseHeight=None, widthInc=None, heightInc=None): ... + grid = wm_grid + def wm_group(self, pathName=None): ... + group = wm_group + def wm_iconbitmap(self, bitmap=None, default=None): ... + iconbitmap = wm_iconbitmap + def wm_iconify(self) -> None: ... + iconify = wm_iconify + def wm_iconmask(self, bitmap=None): ... + iconmask = wm_iconmask + def wm_iconname(self, newName=None) -> str: ... + iconname = wm_iconname + def wm_iconphoto(self, default: bool, image1: _PhotoImageLike | str, /, *args: _PhotoImageLike | str) -> None: ... + iconphoto = wm_iconphoto + def wm_iconposition(self, x: int | None = None, y: int | None = None) -> tuple[int, int] | None: ... + iconposition = wm_iconposition + def wm_iconwindow(self, pathName=None): ... + iconwindow = wm_iconwindow + def wm_manage(self, widget) -> None: ... + manage = wm_manage + @overload + def wm_maxsize(self, width: None = None, height: None = None) -> tuple[int, int]: ... + @overload + def wm_maxsize(self, width: int, height: int) -> None: ... + maxsize = wm_maxsize + @overload + def wm_minsize(self, width: None = None, height: None = None) -> tuple[int, int]: ... + @overload + def wm_minsize(self, width: int, height: int) -> None: ... + minsize = wm_minsize + @overload + def wm_overrideredirect(self, boolean: None = None) -> bool | None: ... # returns True or None + @overload + def wm_overrideredirect(self, boolean: bool) -> None: ... + overrideredirect = wm_overrideredirect + def wm_positionfrom(self, who: Literal["program", "user"] | None = None) -> Literal["", "program", "user"]: ... + positionfrom = wm_positionfrom + @overload + def wm_protocol(self, name: str, func: Callable[[], object] | str) -> None: ... + @overload + def wm_protocol(self, name: str, func: None = None) -> str: ... + @overload + def wm_protocol(self, name: None = None, func: None = None) -> tuple[str, ...]: ... + protocol = wm_protocol + @overload + def wm_resizable(self, width: None = None, height: None = None) -> tuple[bool, bool]: ... + @overload + def wm_resizable(self, width: bool, height: bool) -> None: ... + resizable = wm_resizable + def wm_sizefrom(self, who: Literal["program", "user"] | None = None) -> Literal["", "program", "user"]: ... + sizefrom = wm_sizefrom + @overload + def wm_state(self, newstate: None = None) -> str: ... + @overload + def wm_state(self, newstate: str) -> None: ... + state = wm_state + @overload + def wm_title(self, string: None = None) -> str: ... + @overload + def wm_title(self, string: str) -> None: ... + title = wm_title + @overload + def wm_transient(self, master: None = None) -> _tkinter.Tcl_Obj: ... + @overload + def wm_transient(self, master: Wm | _tkinter.Tcl_Obj) -> None: ... + transient = wm_transient + def wm_withdraw(self) -> None: ... + withdraw = wm_withdraw + +class Tk(Misc, Wm): + master: None + def __init__( + # Make sure to keep in sync with other functions that use the same + # args. + # use `git grep screenName` to find them + self, + screenName: str | None = None, + baseName: str | None = None, + className: str = "Tk", + useTk: bool = True, + sync: bool = False, + use: str | None = None, + ) -> None: ... + # Keep this in sync with ttktheme.ThemedTk. See issue #13858 + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + background: str = ..., + bd: float | str = ..., + bg: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + cursor: _Cursor = ..., + height: float | str = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + menu: Menu = ..., + padx: float | str = ..., + pady: float | str = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + width: float | str = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def destroy(self) -> None: ... + def readprofile(self, baseName: str, className: str) -> None: ... + report_callback_exception: Callable[[type[BaseException], BaseException, TracebackType | None], object] + # Tk has __getattr__ so that tk_instance.foo falls back to tk_instance.tk.foo + # Please keep in sync with _tkinter.TkappType. + # Some methods are intentionally missing because they are inherited from Misc instead. + def adderrorinfo(self, msg: str, /): ... + def call(self, command: Any, /, *args: Any) -> Any: ... + def createcommand(self, name: str, func, /): ... + if sys.platform != "win32": + def createfilehandler(self, file, mask: int, func, /): ... + def deletefilehandler(self, file, /) -> None: ... + + def createtimerhandler(self, milliseconds: int, func, /): ... + def dooneevent(self, flags: int = 0, /): ... + def eval(self, script: str, /) -> str: ... + def evalfile(self, fileName: str, /): ... + def exprboolean(self, s: str, /): ... + def exprdouble(self, s: str, /): ... + def exprlong(self, s: str, /): ... + def exprstring(self, s: str, /): ... + def globalgetvar(self, *args, **kwargs): ... + def globalsetvar(self, *args, **kwargs): ... + def globalunsetvar(self, *args, **kwargs): ... + def interpaddr(self) -> int: ... + def loadtk(self) -> None: ... + def record(self, script: str, /): ... + if sys.version_info < (3, 11): + @deprecated("Deprecated since Python 3.9; removed in Python 3.11. Use `splitlist()` instead.") + def split(self, arg, /): ... + + def splitlist(self, arg, /): ... + def unsetvar(self, *args, **kwargs): ... + def wantobjects(self, *args, **kwargs): ... + def willdispatch(self) -> None: ... + +def Tcl(screenName: str | None = None, baseName: str | None = None, className: str = "Tk", useTk: bool = False) -> Tk: ... + +_InMiscTotal = TypedDict("_InMiscTotal", {"in": Misc}) +_InMiscNonTotal = TypedDict("_InMiscNonTotal", {"in": Misc}, total=False) + +@type_check_only +class _PackInfo(_InMiscTotal): + # 'before' and 'after' never appear in _PackInfo + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] + expand: bool + fill: Literal["none", "x", "y", "both"] + side: Literal["left", "right", "top", "bottom"] + # Paddings come out as int or tuple of int, even though any screen units + # can be specified in pack(). + ipadx: int + ipady: int + padx: int | tuple[int, int] + pady: int | tuple[int, int] + +class Pack: + # _PackInfo is not the valid type for cnf because pad stuff accepts any + # screen units instead of int only. I didn't bother to create another + # TypedDict for cnf because it appears to be a legacy thing that was + # replaced by **kwargs. + def pack_configure( + self, + cnf: Mapping[str, Any] | None = {}, + *, + after: Misc = ..., + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = ..., + before: Misc = ..., + expand: bool | Literal[0, 1] = 0, + fill: Literal["none", "x", "y", "both"] = ..., + side: Literal["left", "right", "top", "bottom"] = ..., + ipadx: float | str = ..., + ipady: float | str = ..., + padx: float | str | tuple[float | str, float | str] = ..., + pady: float | str | tuple[float | str, float | str] = ..., + in_: Misc = ..., + **kw: Any, # allow keyword argument named 'in', see #4836 + ) -> None: ... + def pack_forget(self) -> None: ... + def pack_info(self) -> _PackInfo: ... # errors if widget hasn't been packed + pack = pack_configure + forget = pack_forget + propagate = Misc.pack_propagate + +@type_check_only +class _PlaceInfo(_InMiscNonTotal): # empty dict if widget hasn't been placed + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] + bordermode: Literal["inside", "outside", "ignore"] + width: str # can be int()ed (even after e.g. widget.place(height='2.3c') or similar) + height: str # can be int()ed + x: str # can be int()ed + y: str # can be int()ed + relheight: str # can be float()ed if not empty string + relwidth: str # can be float()ed if not empty string + relx: str # can be float()ed if not empty string + rely: str # can be float()ed if not empty string + +class Place: + def place_configure( + self, + cnf: Mapping[str, Any] | None = {}, + *, + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = ..., + bordermode: Literal["inside", "outside", "ignore"] = ..., + width: float | str = ..., + height: float | str = ..., + x: float | str = ..., + y: float | str = ..., + # str allowed for compatibility with place_info() + relheight: str | float = ..., + relwidth: str | float = ..., + relx: str | float = ..., + rely: str | float = ..., + in_: Misc = ..., + **kw: Any, # allow keyword argument named 'in', see #4836 + ) -> None: ... + def place_forget(self) -> None: ... + def place_info(self) -> _PlaceInfo: ... + place = place_configure + info = place_info + +@type_check_only +class _GridInfo(_InMiscNonTotal): # empty dict if widget hasn't been gridded + column: int + columnspan: int + row: int + rowspan: int + ipadx: int + ipady: int + padx: int | tuple[int, int] + pady: int | tuple[int, int] + sticky: str # consists of letters 'n', 's', 'w', 'e', no repeats, may be empty + +class Grid: + def grid_configure( + self, + cnf: Mapping[str, Any] | None = {}, + *, + column: int = ..., + columnspan: int = ..., + row: int = ..., + rowspan: int = ..., + ipadx: float | str = ..., + ipady: float | str = ..., + padx: float | str | tuple[float | str, float | str] = ..., + pady: float | str | tuple[float | str, float | str] = ..., + sticky: str = ..., # consists of letters 'n', 's', 'w', 'e', may contain repeats, may be empty + in_: Misc = ..., + **kw: Any, # allow keyword argument named 'in', see #4836 + ) -> None: ... + def grid_forget(self) -> None: ... + def grid_remove(self) -> None: ... + def grid_info(self) -> _GridInfo: ... + grid = grid_configure + location = Misc.grid_location + size = Misc.grid_size + +class BaseWidget(Misc): + master: Misc + widgetName: str + def __init__(self, master, widgetName: str, cnf={}, kw={}, extra=()) -> None: ... + def destroy(self) -> None: ... + +# This class represents any widget except Toplevel or Tk. +class Widget(BaseWidget, Pack, Place, Grid): + # Allow bind callbacks to take e.g. Event[Label] instead of Event[Misc]. + # Tk and Toplevel get notified for their child widgets' events, but other + # widgets don't. + @overload + def bind( + self: _W, + sequence: str | None = None, + func: Callable[[Event[_W]], object] | None = None, + add: Literal["", "+"] | bool | None = None, + ) -> str: ... + @overload + def bind(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + @overload + def bind(self, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + +class Toplevel(BaseWidget, Wm): + # Toplevel and Tk have the same options because they correspond to the same + # Tcl/Tk toplevel widget. For some reason, config and configure must be + # copy/pasted here instead of aliasing as 'config = Tk.config'. + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = {}, + *, + background: str = ..., + bd: float | str = 0, + bg: str = ..., + border: float | str = 0, + borderwidth: float | str = 0, + class_: str = "Toplevel", + colormap: Literal["new", ""] | Misc = "", + container: bool = False, + cursor: _Cursor = "", + height: float | str = 0, + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = 0, + menu: Menu = ..., + name: str = ..., + padx: float | str = 0, + pady: float | str = 0, + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = "flat", + screen: str = "", # can't be changed after creating widget + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = 0, + use: int = ..., + visual: str | tuple[str, int] = "", + width: float | str = 0, + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + background: str = ..., + bd: float | str = ..., + bg: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + cursor: _Cursor = ..., + height: float | str = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + menu: Menu = ..., + padx: float | str = ..., + pady: float | str = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + width: float | str = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class Button(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = {}, + *, + activebackground: str = ..., + activeforeground: str = ..., + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = "center", + background: str = ..., + bd: float | str = ..., # same as borderwidth + bg: str = ..., # same as background + bitmap: str = "", + border: float | str = ..., # same as borderwidth + borderwidth: float | str = ..., + command: str | Callable[[], Any] = "", + compound: Literal["top", "left", "center", "right", "bottom", "none"] = "none", + cursor: _Cursor = "", + default: Literal["normal", "active", "disabled"] = "disabled", + disabledforeground: str = ..., + fg: str = ..., # same as foreground + font: _FontDescription = "TkDefaultFont", + foreground: str = ..., + # width and height must be int for buttons containing just text, but + # buttons with an image accept any screen units. + height: float | str = 0, + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = 1, + image: _Image | str = "", + justify: Literal["left", "center", "right"] = "center", + name: str = ..., + overrelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove", ""] = "", + padx: float | str = ..., + pady: float | str = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + repeatdelay: int = ..., + repeatinterval: int = ..., + state: Literal["normal", "active", "disabled"] = "normal", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", + text: float | str = "", + # We allow the textvariable to be any Variable, not necessarily + # StringVar. This is useful for e.g. a button that displays the value + # of an IntVar. + textvariable: Variable = ..., + underline: int = -1, + width: float | str = 0, + wraplength: float | str = 0, + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + activebackground: str = ..., + activeforeground: str = ..., + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = ..., + background: str = ..., + bd: float | str = ..., + bg: str = ..., + bitmap: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + command: str | Callable[[], Any] = ..., + compound: Literal["top", "left", "center", "right", "bottom", "none"] = ..., + cursor: _Cursor = ..., + default: Literal["normal", "active", "disabled"] = ..., + disabledforeground: str = ..., + fg: str = ..., + font: _FontDescription = ..., + foreground: str = ..., + height: float | str = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + image: _Image | str = ..., + justify: Literal["left", "center", "right"] = ..., + overrelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove", ""] = ..., + padx: float | str = ..., + pady: float | str = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + repeatdelay: int = ..., + repeatinterval: int = ..., + state: Literal["normal", "active", "disabled"] = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + text: float | str = ..., + textvariable: Variable = ..., + underline: int = ..., + width: float | str = ..., + wraplength: float | str = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def flash(self) -> None: ... + def invoke(self) -> Any: ... + +class Canvas(Widget, XView, YView): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = {}, + *, + background: str = ..., + bd: float | str = 0, + bg: str = ..., + border: float | str = 0, + borderwidth: float | str = 0, + closeenough: float = 1.0, + confine: bool = True, + cursor: _Cursor = "", + height: float | str = ..., # see COORDINATES in canvas manual page + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + insertbackground: str = ..., + insertborderwidth: float | str = 0, + insertofftime: int = 300, + insertontime: int = 600, + insertwidth: float | str = 2, + name: str = ..., + offset=..., # undocumented + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = "flat", + # Setting scrollregion to None doesn't reset it back to empty, + # but setting it to () does. + scrollregion: tuple[float | str, float | str, float | str, float | str] | tuple[()] = (), + selectbackground: str = ..., + selectborderwidth: float | str = 1, + selectforeground: str = ..., + # man page says that state can be 'hidden', but it can't + state: Literal["normal", "disabled"] = "normal", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", + width: float | str = ..., + xscrollcommand: str | Callable[[float, float], object] = "", + xscrollincrement: float | str = 0, + yscrollcommand: str | Callable[[float, float], object] = "", + yscrollincrement: float | str = 0, + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + background: str = ..., + bd: float | str = ..., + bg: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + closeenough: float = ..., + confine: bool = ..., + cursor: _Cursor = ..., + height: float | str = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + insertbackground: str = ..., + insertborderwidth: float | str = ..., + insertofftime: int = ..., + insertontime: int = ..., + insertwidth: float | str = ..., + offset=..., # undocumented + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + scrollregion: tuple[float | str, float | str, float | str, float | str] | tuple[()] = ..., + selectbackground: str = ..., + selectborderwidth: float | str = ..., + selectforeground: str = ..., + state: Literal["normal", "disabled"] = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + width: float | str = ..., + xscrollcommand: str | Callable[[float, float], object] = ..., + xscrollincrement: float | str = ..., + yscrollcommand: str | Callable[[float, float], object] = ..., + yscrollincrement: float | str = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def addtag(self, *args): ... # internal method + def addtag_above(self, newtag: str, tagOrId: str | int) -> None: ... + def addtag_all(self, newtag: str) -> None: ... + def addtag_below(self, newtag: str, tagOrId: str | int) -> None: ... + def addtag_closest( + self, newtag: str, x: float | str, y: float | str, halo: float | str | None = None, start: str | int | None = None + ) -> None: ... + def addtag_enclosed(self, newtag: str, x1: float | str, y1: float | str, x2: float | str, y2: float | str) -> None: ... + def addtag_overlapping(self, newtag: str, x1: float | str, y1: float | str, x2: float | str, y2: float | str) -> None: ... + def addtag_withtag(self, newtag: str, tagOrId: str | int) -> None: ... + def find(self, *args): ... # internal method + def find_above(self, tagOrId: str | int) -> tuple[int, ...]: ... + def find_all(self) -> tuple[int, ...]: ... + def find_below(self, tagOrId: str | int) -> tuple[int, ...]: ... + def find_closest( + self, x: float | str, y: float | str, halo: float | str | None = None, start: str | int | None = None + ) -> tuple[int, ...]: ... + def find_enclosed(self, x1: float | str, y1: float | str, x2: float | str, y2: float | str) -> tuple[int, ...]: ... + def find_overlapping(self, x1: float | str, y1: float | str, x2: float | str, y2: float) -> tuple[int, ...]: ... + def find_withtag(self, tagOrId: str | int) -> tuple[int, ...]: ... + # Incompatible with Misc.bbox(), tkinter violates LSP + def bbox(self, *args: str | int) -> tuple[int, int, int, int]: ... # type: ignore[override] + @overload + def tag_bind( + self, + tagOrId: str | int, + sequence: str | None = None, + func: Callable[[Event[Canvas]], object] | None = None, + add: Literal["", "+"] | bool | None = None, + ) -> str: ... + @overload + def tag_bind( + self, tagOrId: str | int, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None + ) -> None: ... + @overload + def tag_bind(self, tagOrId: str | int, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + def tag_unbind(self, tagOrId: str | int, sequence: str, funcid: str | None = None) -> None: ... + def canvasx(self, screenx, gridspacing=None): ... + def canvasy(self, screeny, gridspacing=None): ... + @overload + def coords(self, tagOrId: str | int, /) -> list[float]: ... + @overload + def coords(self, tagOrId: str | int, args: list[int] | list[float] | tuple[float, ...], /) -> None: ... + @overload + def coords(self, tagOrId: str | int, x1: float, y1: float, /, *args: float) -> None: ... + # create_foo() methods accept coords as a list or tuple, or as separate arguments. + # Lists and tuples can be flat as in [1, 2, 3, 4], or nested as in [(1, 2), (3, 4)]. + # Keyword arguments should be the same in all overloads of each method. + def create_arc(self, *args, **kw) -> int: ... + def create_bitmap(self, *args, **kw) -> int: ... + def create_image(self, *args, **kw) -> int: ... + @overload + def create_line( + self, + x0: float, + y0: float, + x1: float, + y1: float, + /, + *, + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., + activestipple: str = ..., + activewidth: float | str = ..., + arrow: Literal["first", "last", "both"] = ..., + arrowshape: tuple[float, float, float] = ..., + capstyle: Literal["round", "projecting", "butt"] = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., + dashoffset: float | str = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledstipple: str = ..., + disabledwidth: float | str = ..., + fill: str = ..., + joinstyle: Literal["round", "bevel", "miter"] = ..., + offset: float | str = ..., + smooth: bool = ..., + splinesteps: float = ..., + state: Literal["normal", "hidden", "disabled"] = ..., + stipple: str = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: float | str = ..., + ) -> int: ... + @overload + def create_line( + self, + xy_pair_0: tuple[float, float], + xy_pair_1: tuple[float, float], + /, + *, + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., + activestipple: str = ..., + activewidth: float | str = ..., + arrow: Literal["first", "last", "both"] = ..., + arrowshape: tuple[float, float, float] = ..., + capstyle: Literal["round", "projecting", "butt"] = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., + dashoffset: float | str = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledstipple: str = ..., + disabledwidth: float | str = ..., + fill: str = ..., + joinstyle: Literal["round", "bevel", "miter"] = ..., + offset: float | str = ..., + smooth: bool = ..., + splinesteps: float = ..., + state: Literal["normal", "hidden", "disabled"] = ..., + stipple: str = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: float | str = ..., + ) -> int: ... + @overload + def create_line( + self, + coords: ( + tuple[float, float, float, float] + | tuple[tuple[float, float], tuple[float, float]] + | list[int] + | list[float] + | list[tuple[int, int]] + | list[tuple[float, float]] + ), + /, + *, + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., + activestipple: str = ..., + activewidth: float | str = ..., + arrow: Literal["first", "last", "both"] = ..., + arrowshape: tuple[float, float, float] = ..., + capstyle: Literal["round", "projecting", "butt"] = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., + dashoffset: float | str = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledstipple: str = ..., + disabledwidth: float | str = ..., + fill: str = ..., + joinstyle: Literal["round", "bevel", "miter"] = ..., + offset: float | str = ..., + smooth: bool = ..., + splinesteps: float = ..., + state: Literal["normal", "hidden", "disabled"] = ..., + stipple: str = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: float | str = ..., + ) -> int: ... + @overload + def create_oval( + self, + x0: float, + y0: float, + x1: float, + y1: float, + /, + *, + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., + activeoutline: str = ..., + activeoutlinestipple: str = ..., + activestipple: str = ..., + activewidth: float | str = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., + dashoffset: float | str = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledoutline: str = ..., + disabledoutlinestipple: str = ..., + disabledstipple: str = ..., + disabledwidth: float | str = ..., + fill: str = ..., + offset: float | str = ..., + outline: str = ..., + outlineoffset: float | str = ..., + outlinestipple: str = ..., + state: Literal["normal", "hidden", "disabled"] = ..., + stipple: str = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: float | str = ..., + ) -> int: ... + @overload + def create_oval( + self, + xy_pair_0: tuple[float, float], + xy_pair_1: tuple[float, float], + /, + *, + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., + activeoutline: str = ..., + activeoutlinestipple: str = ..., + activestipple: str = ..., + activewidth: float | str = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., + dashoffset: float | str = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledoutline: str = ..., + disabledoutlinestipple: str = ..., + disabledstipple: str = ..., + disabledwidth: float | str = ..., + fill: str = ..., + offset: float | str = ..., + outline: str = ..., + outlineoffset: float | str = ..., + outlinestipple: str = ..., + state: Literal["normal", "hidden", "disabled"] = ..., + stipple: str = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: float | str = ..., + ) -> int: ... + @overload + def create_oval( + self, + coords: ( + tuple[float, float, float, float] + | tuple[tuple[float, float], tuple[float, float]] + | list[int] + | list[float] + | list[tuple[int, int]] + | list[tuple[float, float]] + ), + /, + *, + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., + activeoutline: str = ..., + activeoutlinestipple: str = ..., + activestipple: str = ..., + activewidth: float | str = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., + dashoffset: float | str = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledoutline: str = ..., + disabledoutlinestipple: str = ..., + disabledstipple: str = ..., + disabledwidth: float | str = ..., + fill: str = ..., + offset: float | str = ..., + outline: str = ..., + outlineoffset: float | str = ..., + outlinestipple: str = ..., + state: Literal["normal", "hidden", "disabled"] = ..., + stipple: str = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: float | str = ..., + ) -> int: ... + @overload + def create_polygon( + self, + x0: float, + y0: float, + x1: float, + y1: float, + /, + *xy_pairs: float, + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., + activeoutline: str = ..., + activeoutlinestipple: str = ..., + activestipple: str = ..., + activewidth: float | str = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., + dashoffset: float | str = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledoutline: str = ..., + disabledoutlinestipple: str = ..., + disabledstipple: str = ..., + disabledwidth: float | str = ..., + fill: str = ..., + joinstyle: Literal["round", "bevel", "miter"] = ..., + offset: float | str = ..., + outline: str = ..., + outlineoffset: float | str = ..., + outlinestipple: str = ..., + smooth: bool = ..., + splinesteps: float = ..., + state: Literal["normal", "hidden", "disabled"] = ..., + stipple: str = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: float | str = ..., + ) -> int: ... + @overload + def create_polygon( + self, + xy_pair_0: tuple[float, float], + xy_pair_1: tuple[float, float], + /, + *xy_pairs: tuple[float, float], + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., + activeoutline: str = ..., + activeoutlinestipple: str = ..., + activestipple: str = ..., + activewidth: float | str = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., + dashoffset: float | str = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledoutline: str = ..., + disabledoutlinestipple: str = ..., + disabledstipple: str = ..., + disabledwidth: float | str = ..., + fill: str = ..., + joinstyle: Literal["round", "bevel", "miter"] = ..., + offset: float | str = ..., + outline: str = ..., + outlineoffset: float | str = ..., + outlinestipple: str = ..., + smooth: bool = ..., + splinesteps: float = ..., + state: Literal["normal", "hidden", "disabled"] = ..., + stipple: str = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: float | str = ..., + ) -> int: ... + @overload + def create_polygon( + self, + coords: ( + tuple[float, ...] + | tuple[tuple[float, float], ...] + | list[int] + | list[float] + | list[tuple[int, int]] + | list[tuple[float, float]] + ), + /, + *, + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., + activeoutline: str = ..., + activeoutlinestipple: str = ..., + activestipple: str = ..., + activewidth: float | str = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., + dashoffset: float | str = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledoutline: str = ..., + disabledoutlinestipple: str = ..., + disabledstipple: str = ..., + disabledwidth: float | str = ..., + fill: str = ..., + joinstyle: Literal["round", "bevel", "miter"] = ..., + offset: float | str = ..., + outline: str = ..., + outlineoffset: float | str = ..., + outlinestipple: str = ..., + smooth: bool = ..., + splinesteps: float = ..., + state: Literal["normal", "hidden", "disabled"] = ..., + stipple: str = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: float | str = ..., + ) -> int: ... + @overload + def create_rectangle( + self, + x0: float, + y0: float, + x1: float, + y1: float, + /, + *, + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., + activeoutline: str = ..., + activeoutlinestipple: str = ..., + activestipple: str = ..., + activewidth: float | str = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., + dashoffset: float | str = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledoutline: str = ..., + disabledoutlinestipple: str = ..., + disabledstipple: str = ..., + disabledwidth: float | str = ..., + fill: str = ..., + offset: float | str = ..., + outline: str = ..., + outlineoffset: float | str = ..., + outlinestipple: str = ..., + state: Literal["normal", "hidden", "disabled"] = ..., + stipple: str = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: float | str = ..., + ) -> int: ... + @overload + def create_rectangle( + self, + xy_pair_0: tuple[float, float], + xy_pair_1: tuple[float, float], + /, + *, + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., + activeoutline: str = ..., + activeoutlinestipple: str = ..., + activestipple: str = ..., + activewidth: float | str = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., + dashoffset: float | str = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledoutline: str = ..., + disabledoutlinestipple: str = ..., + disabledstipple: str = ..., + disabledwidth: float | str = ..., + fill: str = ..., + offset: float | str = ..., + outline: str = ..., + outlineoffset: float | str = ..., + outlinestipple: str = ..., + state: Literal["normal", "hidden", "disabled"] = ..., + stipple: str = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: float | str = ..., + ) -> int: ... + @overload + def create_rectangle( + self, + coords: ( + tuple[float, float, float, float] + | tuple[tuple[float, float], tuple[float, float]] + | list[int] + | list[float] + | list[tuple[int, int]] + | list[tuple[float, float]] + ), + /, + *, + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., + activeoutline: str = ..., + activeoutlinestipple: str = ..., + activestipple: str = ..., + activewidth: float | str = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., + dashoffset: float | str = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledoutline: str = ..., + disabledoutlinestipple: str = ..., + disabledstipple: str = ..., + disabledwidth: float | str = ..., + fill: str = ..., + offset: float | str = ..., + outline: str = ..., + outlineoffset: float | str = ..., + outlinestipple: str = ..., + state: Literal["normal", "hidden", "disabled"] = ..., + stipple: str = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: float | str = ..., + ) -> int: ... + @overload + def create_text( + self, + x: float, + y: float, + /, + *, + activefill: str = ..., + activestipple: str = ..., + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = ..., + angle: float | str = ..., + disabledfill: str = ..., + disabledstipple: str = ..., + fill: str = ..., + font: _FontDescription = ..., + justify: Literal["left", "center", "right"] = ..., + offset: float | str = ..., + state: Literal["normal", "hidden", "disabled"] = ..., + stipple: str = ..., + tags: str | list[str] | tuple[str, ...] = ..., + text: float | str = ..., + width: float | str = ..., + ) -> int: ... + @overload + def create_text( + self, + coords: tuple[float, float] | list[int] | list[float], + /, + *, + activefill: str = ..., + activestipple: str = ..., + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = ..., + angle: float | str = ..., + disabledfill: str = ..., + disabledstipple: str = ..., + fill: str = ..., + font: _FontDescription = ..., + justify: Literal["left", "center", "right"] = ..., + offset: float | str = ..., + state: Literal["normal", "hidden", "disabled"] = ..., + stipple: str = ..., + tags: str | list[str] | tuple[str, ...] = ..., + text: float | str = ..., + width: float | str = ..., + ) -> int: ... + @overload + def create_window( + self, + x: float, + y: float, + /, + *, + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = ..., + height: float | str = ..., + state: Literal["normal", "hidden", "disabled"] = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: float | str = ..., + window: Widget = ..., + ) -> int: ... + @overload + def create_window( + self, + coords: tuple[float, float] | list[int] | list[float], + /, + *, + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = ..., + height: float | str = ..., + state: Literal["normal", "hidden", "disabled"] = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: float | str = ..., + window: Widget = ..., + ) -> int: ... + def dchars(self, *args) -> None: ... + def delete(self, *tagsOrCanvasIds: str | int) -> None: ... + @overload + def dtag(self, tag: str, tag_to_delete: str | None = ..., /) -> None: ... + @overload + def dtag(self, id: int, tag_to_delete: str, /) -> None: ... + def focus(self, *args): ... + def gettags(self, tagOrId: str | int, /) -> tuple[str, ...]: ... + def icursor(self, *args) -> None: ... + def index(self, *args): ... + def insert(self, *args) -> None: ... + def itemcget(self, tagOrId, option): ... + # itemconfigure kwargs depend on item type, which is not known when type checking + def itemconfigure( + self, tagOrId: str | int, cnf: dict[str, Any] | None = None, **kw: Any + ) -> dict[str, tuple[str, str, str, str, str]] | None: ... + itemconfig = itemconfigure + def move(self, *args) -> None: ... + def moveto(self, tagOrId: str | int, x: Literal[""] | float = "", y: Literal[""] | float = "") -> None: ... + def postscript(self, cnf={}, **kw): ... + # tkinter does: + # lower = tag_lower + # lift = tkraise = tag_raise + # + # But mypy doesn't like aliasing here (maybe because Misc defines the same names) + def tag_lower(self, first: str | int, second: str | int | None = ..., /) -> None: ... + def lower(self, first: str | int, second: str | int | None = ..., /) -> None: ... # type: ignore[override] + def tag_raise(self, first: str | int, second: str | int | None = ..., /) -> None: ... + def tkraise(self, first: str | int, second: str | int | None = ..., /) -> None: ... # type: ignore[override] + def lift(self, first: str | int, second: str | int | None = ..., /) -> None: ... # type: ignore[override] + def scale(self, tagOrId: str | int, xOrigin: float | str, yOrigin: float | str, xScale: float, yScale: float, /) -> None: ... + def scan_mark(self, x, y) -> None: ... + def scan_dragto(self, x, y, gain: int = 10) -> None: ... + def select_adjust(self, tagOrId, index) -> None: ... + def select_clear(self) -> None: ... + def select_from(self, tagOrId, index) -> None: ... + def select_item(self): ... + def select_to(self, tagOrId, index) -> None: ... + def type(self, tagOrId: str | int) -> int | None: ... + +class Checkbutton(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = {}, + *, + activebackground: str = ..., + activeforeground: str = ..., + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = "center", + background: str = ..., + bd: float | str = ..., + bg: str = ..., + bitmap: str = "", + border: float | str = ..., + borderwidth: float | str = ..., + command: str | Callable[[], Any] = "", + compound: Literal["top", "left", "center", "right", "bottom", "none"] = "none", + cursor: _Cursor = "", + disabledforeground: str = ..., + fg: str = ..., + font: _FontDescription = "TkDefaultFont", + foreground: str = ..., + height: float | str = 0, + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = 1, + image: _Image | str = "", + indicatoron: bool = True, + justify: Literal["left", "center", "right"] = "center", + name: str = ..., + offrelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + # The checkbutton puts a value to its variable when it's checked or + # unchecked. We don't restrict the type of that value here, so + # Any-typing is fine. + # + # I think Checkbutton shouldn't be generic, because then specifying + # "any checkbutton regardless of what variable it uses" would be + # difficult, and we might run into issues just like how list[float] + # and list[int] are incompatible. Also, we would need a way to + # specify "Checkbutton not associated with any variable", which is + # done by setting variable to empty string (the default). + offvalue: Any = 0, + onvalue: Any = 1, + overrelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove", ""] = "", + padx: float | str = 1, + pady: float | str = 1, + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = "flat", + selectcolor: str = ..., + selectimage: _Image | str = "", + state: Literal["normal", "active", "disabled"] = "normal", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", + text: float | str = "", + textvariable: Variable = ..., + tristateimage: _Image | str = "", + tristatevalue: Any = "", + underline: int = -1, + variable: Variable | Literal[""] = ..., + width: float | str = 0, + wraplength: float | str = 0, + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + activebackground: str = ..., + activeforeground: str = ..., + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = ..., + background: str = ..., + bd: float | str = ..., + bg: str = ..., + bitmap: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + command: str | Callable[[], Any] = ..., + compound: Literal["top", "left", "center", "right", "bottom", "none"] = ..., + cursor: _Cursor = ..., + disabledforeground: str = ..., + fg: str = ..., + font: _FontDescription = ..., + foreground: str = ..., + height: float | str = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + image: _Image | str = ..., + indicatoron: bool = ..., + justify: Literal["left", "center", "right"] = ..., + offrelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + offvalue: Any = ..., + onvalue: Any = ..., + overrelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove", ""] = ..., + padx: float | str = ..., + pady: float | str = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + selectcolor: str = ..., + selectimage: _Image | str = ..., + state: Literal["normal", "active", "disabled"] = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + text: float | str = ..., + textvariable: Variable = ..., + tristateimage: _Image | str = ..., + tristatevalue: Any = ..., + underline: int = ..., + variable: Variable | Literal[""] = ..., + width: float | str = ..., + wraplength: float | str = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def deselect(self) -> None: ... + def flash(self) -> None: ... + def invoke(self) -> Any: ... + def select(self) -> None: ... + def toggle(self) -> None: ... + +class Entry(Widget, XView): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = {}, + *, + background: str = ..., + bd: float | str = ..., + bg: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + cursor: _Cursor = "xterm", + disabledbackground: str = ..., + disabledforeground: str = ..., + exportselection: bool = True, + fg: str = ..., + font: _FontDescription = "TkTextFont", + foreground: str = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + insertbackground: str = ..., + insertborderwidth: float | str = 0, + insertofftime: int = 300, + insertontime: int = 600, + insertwidth: float | str = ..., + invalidcommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = "", + invcmd: str | list[str] | tuple[str, ...] | Callable[[], bool] = "", # same as invalidcommand + justify: Literal["left", "center", "right"] = "left", + name: str = ..., + readonlybackground: str = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = "sunken", + selectbackground: str = ..., + selectborderwidth: float | str = ..., + selectforeground: str = ..., + show: str = "", + state: Literal["normal", "disabled", "readonly"] = "normal", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", + textvariable: Variable = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = "none", + validatecommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = "", + vcmd: str | list[str] | tuple[str, ...] | Callable[[], bool] = "", # same as validatecommand + width: int = 20, + xscrollcommand: str | Callable[[float, float], object] = "", + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + background: str = ..., + bd: float | str = ..., + bg: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + cursor: _Cursor = ..., + disabledbackground: str = ..., + disabledforeground: str = ..., + exportselection: bool = ..., + fg: str = ..., + font: _FontDescription = ..., + foreground: str = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + insertbackground: str = ..., + insertborderwidth: float | str = ..., + insertofftime: int = ..., + insertontime: int = ..., + insertwidth: float | str = ..., + invalidcommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., + invcmd: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., + justify: Literal["left", "center", "right"] = ..., + readonlybackground: str = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + selectbackground: str = ..., + selectborderwidth: float | str = ..., + selectforeground: str = ..., + show: str = ..., + state: Literal["normal", "disabled", "readonly"] = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + textvariable: Variable = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., + validatecommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., + vcmd: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., + width: int = ..., + xscrollcommand: str | Callable[[float, float], object] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def delete(self, first: str | int, last: str | int | None = None) -> None: ... + def get(self) -> str: ... + def icursor(self, index: str | int) -> None: ... + def index(self, index: str | int) -> int: ... + def insert(self, index: str | int, string: str) -> None: ... + def scan_mark(self, x) -> None: ... + def scan_dragto(self, x) -> None: ... + def selection_adjust(self, index: str | int) -> None: ... + def selection_clear(self) -> None: ... # type: ignore[override] + def selection_from(self, index: str | int) -> None: ... + def selection_present(self) -> bool: ... + def selection_range(self, start: str | int, end: str | int) -> None: ... + def selection_to(self, index: str | int) -> None: ... + select_adjust = selection_adjust + select_clear = selection_clear + select_from = selection_from + select_present = selection_present + select_range = selection_range + select_to = selection_to + +class Frame(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = {}, + *, + background: str = ..., + bd: float | str = 0, + bg: str = ..., + border: float | str = 0, + borderwidth: float | str = 0, + class_: str = "Frame", # can't be changed with configure() + colormap: Literal["new", ""] | Misc = "", # can't be changed with configure() + container: bool = False, # can't be changed with configure() + cursor: _Cursor = "", + height: float | str = 0, + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = 0, + name: str = ..., + padx: float | str = 0, + pady: float | str = 0, + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = "flat", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = 0, + visual: str | tuple[str, int] = "", # can't be changed with configure() + width: float | str = 0, + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + background: str = ..., + bd: float | str = ..., + bg: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + cursor: _Cursor = ..., + height: float | str = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + padx: float | str = ..., + pady: float | str = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + width: float | str = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class Label(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = {}, + *, + activebackground: str = ..., + activeforeground: str = ..., + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = "center", + background: str = ..., + bd: float | str = ..., + bg: str = ..., + bitmap: str = "", + border: float | str = ..., + borderwidth: float | str = ..., + compound: Literal["top", "left", "center", "right", "bottom", "none"] = "none", + cursor: _Cursor = "", + disabledforeground: str = ..., + fg: str = ..., + font: _FontDescription = "TkDefaultFont", + foreground: str = ..., + height: float | str = 0, + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = 0, + image: _Image | str = "", + justify: Literal["left", "center", "right"] = "center", + name: str = ..., + padx: float | str = 1, + pady: float | str = 1, + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = "flat", + state: Literal["normal", "active", "disabled"] = "normal", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = 0, + text: float | str = "", + textvariable: Variable = ..., + underline: int = -1, + width: float | str = 0, + wraplength: float | str = 0, + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + activebackground: str = ..., + activeforeground: str = ..., + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = ..., + background: str = ..., + bd: float | str = ..., + bg: str = ..., + bitmap: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + compound: Literal["top", "left", "center", "right", "bottom", "none"] = ..., + cursor: _Cursor = ..., + disabledforeground: str = ..., + fg: str = ..., + font: _FontDescription = ..., + foreground: str = ..., + height: float | str = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + image: _Image | str = ..., + justify: Literal["left", "center", "right"] = ..., + padx: float | str = ..., + pady: float | str = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + state: Literal["normal", "active", "disabled"] = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + text: float | str = ..., + textvariable: Variable = ..., + underline: int = ..., + width: float | str = ..., + wraplength: float | str = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class Listbox(Widget, XView, YView): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = {}, + *, + activestyle: Literal["dotbox", "none", "underline"] = ..., + background: str = ..., + bd: float | str = 1, + bg: str = ..., + border: float | str = 1, + borderwidth: float | str = 1, + cursor: _Cursor = "", + disabledforeground: str = ..., + exportselection: bool | Literal[0, 1] = 1, + fg: str = ..., + font: _FontDescription = ..., + foreground: str = ..., + height: int = 10, + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + justify: Literal["left", "center", "right"] = "left", + # There's no tkinter.ListVar, but seems like bare tkinter.Variable + # actually works for this: + # + # >>> import tkinter + # >>> lb = tkinter.Listbox() + # >>> var = lb['listvariable'] = tkinter.Variable() + # >>> var.set(['foo', 'bar', 'baz']) + # >>> lb.get(0, 'end') + # ('foo', 'bar', 'baz') + listvariable: Variable = ..., + name: str = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + selectbackground: str = ..., + selectborderwidth: float | str = 0, + selectforeground: str = ..., + # from listbox man page: "The value of the [selectmode] option may be + # arbitrary, but the default bindings expect it to be either single, + # browse, multiple, or extended" + # + # I have never seen anyone setting this to something else than what + # "the default bindings expect", but let's support it anyway. + selectmode: str | Literal["single", "browse", "multiple", "extended"] = "browse", # noqa: Y051 + setgrid: bool = False, + state: Literal["normal", "disabled"] = "normal", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", + width: int = 20, + xscrollcommand: str | Callable[[float, float], object] = "", + yscrollcommand: str | Callable[[float, float], object] = "", + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + activestyle: Literal["dotbox", "none", "underline"] = ..., + background: str = ..., + bd: float | str = ..., + bg: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + cursor: _Cursor = ..., + disabledforeground: str = ..., + exportselection: bool = ..., + fg: str = ..., + font: _FontDescription = ..., + foreground: str = ..., + height: int = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + justify: Literal["left", "center", "right"] = ..., + listvariable: Variable = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + selectbackground: str = ..., + selectborderwidth: float | str = ..., + selectforeground: str = ..., + selectmode: str | Literal["single", "browse", "multiple", "extended"] = ..., # noqa: Y051 + setgrid: bool = ..., + state: Literal["normal", "disabled"] = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + width: int = ..., + xscrollcommand: str | Callable[[float, float], object] = ..., + yscrollcommand: str | Callable[[float, float], object] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def activate(self, index: str | int) -> None: ... + def bbox(self, index: str | int) -> tuple[int, int, int, int] | None: ... # type: ignore[override] + def curselection(self): ... + def delete(self, first: str | int, last: str | int | None = None) -> None: ... + def get(self, first: str | int, last: str | int | None = None): ... + def index(self, index: str | int) -> int: ... + def insert(self, index: str | int, *elements: str | float) -> None: ... + def nearest(self, y): ... + def scan_mark(self, x, y) -> None: ... + def scan_dragto(self, x, y) -> None: ... + def see(self, index: str | int) -> None: ... + def selection_anchor(self, index: str | int) -> None: ... + select_anchor = selection_anchor + def selection_clear(self, first: str | int, last: str | int | None = None) -> None: ... # type: ignore[override] + select_clear = selection_clear + def selection_includes(self, index: str | int): ... + select_includes = selection_includes + def selection_set(self, first: str | int, last: str | int | None = None) -> None: ... + select_set = selection_set + def size(self) -> int: ... # type: ignore[override] + def itemcget(self, index: str | int, option): ... + def itemconfigure(self, index: str | int, cnf=None, **kw): ... + itemconfig = itemconfigure + +class Menu(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = {}, + *, + activebackground: str = ..., + activeborderwidth: float | str = ..., + activeforeground: str = ..., + background: str = ..., + bd: float | str = ..., + bg: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + cursor: _Cursor = "arrow", + disabledforeground: str = ..., + fg: str = ..., + font: _FontDescription = ..., + foreground: str = ..., + name: str = ..., + postcommand: Callable[[], object] | str = "", + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + selectcolor: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = 0, + tearoff: bool | Literal[0, 1] = 1, + # I guess tearoffcommand arguments are supposed to be widget objects, + # but they are widget name strings. Use nametowidget() to handle the + # arguments of tearoffcommand. + tearoffcommand: Callable[[str, str], object] | str = "", + title: str = "", + type: Literal["menubar", "tearoff", "normal"] = "normal", + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + activebackground: str = ..., + activeborderwidth: float | str = ..., + activeforeground: str = ..., + background: str = ..., + bd: float | str = ..., + bg: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + cursor: _Cursor = ..., + disabledforeground: str = ..., + fg: str = ..., + font: _FontDescription = ..., + foreground: str = ..., + postcommand: Callable[[], object] | str = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + selectcolor: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + tearoff: bool = ..., + tearoffcommand: Callable[[str, str], object] | str = ..., + title: str = ..., + type: Literal["menubar", "tearoff", "normal"] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def tk_popup(self, x: int, y: int, entry: str | int = "") -> None: ... + def activate(self, index: str | int) -> None: ... + def add(self, itemType, cnf={}, **kw): ... # docstring says "Internal function." + def insert(self, index, itemType, cnf={}, **kw): ... # docstring says "Internal function." + def add_cascade( + self, + cnf: dict[str, Any] | None = {}, + *, + accelerator: str = ..., + activebackground: str = ..., + activeforeground: str = ..., + background: str = ..., + bitmap: str = ..., + columnbreak: int = ..., + command: Callable[[], object] | str = ..., + compound: Literal["top", "left", "center", "right", "bottom", "none"] = ..., + font: _FontDescription = ..., + foreground: str = ..., + hidemargin: bool = ..., + image: _Image | str = ..., + label: str = ..., + menu: Menu = ..., + state: Literal["normal", "active", "disabled"] = ..., + underline: int = ..., + ) -> None: ... + def add_checkbutton( + self, + cnf: dict[str, Any] | None = {}, + *, + accelerator: str = ..., + activebackground: str = ..., + activeforeground: str = ..., + background: str = ..., + bitmap: str = ..., + columnbreak: int = ..., + command: Callable[[], object] | str = ..., + compound: Literal["top", "left", "center", "right", "bottom", "none"] = ..., + font: _FontDescription = ..., + foreground: str = ..., + hidemargin: bool = ..., + image: _Image | str = ..., + indicatoron: bool = ..., + label: str = ..., + offvalue: Any = ..., + onvalue: Any = ..., + selectcolor: str = ..., + selectimage: _Image | str = ..., + state: Literal["normal", "active", "disabled"] = ..., + underline: int = ..., + variable: Variable = ..., + ) -> None: ... + def add_command( + self, + cnf: dict[str, Any] | None = {}, + *, + accelerator: str = ..., + activebackground: str = ..., + activeforeground: str = ..., + background: str = ..., + bitmap: str = ..., + columnbreak: int = ..., + command: Callable[[], object] | str = ..., + compound: Literal["top", "left", "center", "right", "bottom", "none"] = ..., + font: _FontDescription = ..., + foreground: str = ..., + hidemargin: bool = ..., + image: _Image | str = ..., + label: str = ..., + state: Literal["normal", "active", "disabled"] = ..., + underline: int = ..., + ) -> None: ... + def add_radiobutton( + self, + cnf: dict[str, Any] | None = {}, + *, + accelerator: str = ..., + activebackground: str = ..., + activeforeground: str = ..., + background: str = ..., + bitmap: str = ..., + columnbreak: int = ..., + command: Callable[[], object] | str = ..., + compound: Literal["top", "left", "center", "right", "bottom", "none"] = ..., + font: _FontDescription = ..., + foreground: str = ..., + hidemargin: bool = ..., + image: _Image | str = ..., + indicatoron: bool = ..., + label: str = ..., + selectcolor: str = ..., + selectimage: _Image | str = ..., + state: Literal["normal", "active", "disabled"] = ..., + underline: int = ..., + value: Any = ..., + variable: Variable = ..., + ) -> None: ... + def add_separator(self, cnf: dict[str, Any] | None = {}, *, background: str = ...) -> None: ... + def insert_cascade( + self, + index: str | int, + cnf: dict[str, Any] | None = {}, + *, + accelerator: str = ..., + activebackground: str = ..., + activeforeground: str = ..., + background: str = ..., + bitmap: str = ..., + columnbreak: int = ..., + command: Callable[[], object] | str = ..., + compound: Literal["top", "left", "center", "right", "bottom", "none"] = ..., + font: _FontDescription = ..., + foreground: str = ..., + hidemargin: bool = ..., + image: _Image | str = ..., + label: str = ..., + menu: Menu = ..., + state: Literal["normal", "active", "disabled"] = ..., + underline: int = ..., + ) -> None: ... + def insert_checkbutton( + self, + index: str | int, + cnf: dict[str, Any] | None = {}, + *, + accelerator: str = ..., + activebackground: str = ..., + activeforeground: str = ..., + background: str = ..., + bitmap: str = ..., + columnbreak: int = ..., + command: Callable[[], object] | str = ..., + compound: Literal["top", "left", "center", "right", "bottom", "none"] = ..., + font: _FontDescription = ..., + foreground: str = ..., + hidemargin: bool = ..., + image: _Image | str = ..., + indicatoron: bool = ..., + label: str = ..., + offvalue: Any = ..., + onvalue: Any = ..., + selectcolor: str = ..., + selectimage: _Image | str = ..., + state: Literal["normal", "active", "disabled"] = ..., + underline: int = ..., + variable: Variable = ..., + ) -> None: ... + def insert_command( + self, + index: str | int, + cnf: dict[str, Any] | None = {}, + *, + accelerator: str = ..., + activebackground: str = ..., + activeforeground: str = ..., + background: str = ..., + bitmap: str = ..., + columnbreak: int = ..., + command: Callable[[], object] | str = ..., + compound: Literal["top", "left", "center", "right", "bottom", "none"] = ..., + font: _FontDescription = ..., + foreground: str = ..., + hidemargin: bool = ..., + image: _Image | str = ..., + label: str = ..., + state: Literal["normal", "active", "disabled"] = ..., + underline: int = ..., + ) -> None: ... + def insert_radiobutton( + self, + index: str | int, + cnf: dict[str, Any] | None = {}, + *, + accelerator: str = ..., + activebackground: str = ..., + activeforeground: str = ..., + background: str = ..., + bitmap: str = ..., + columnbreak: int = ..., + command: Callable[[], object] | str = ..., + compound: Literal["top", "left", "center", "right", "bottom", "none"] = ..., + font: _FontDescription = ..., + foreground: str = ..., + hidemargin: bool = ..., + image: _Image | str = ..., + indicatoron: bool = ..., + label: str = ..., + selectcolor: str = ..., + selectimage: _Image | str = ..., + state: Literal["normal", "active", "disabled"] = ..., + underline: int = ..., + value: Any = ..., + variable: Variable = ..., + ) -> None: ... + def insert_separator(self, index: str | int, cnf: dict[str, Any] | None = {}, *, background: str = ...) -> None: ... + def delete(self, index1: str | int, index2: str | int | None = None) -> None: ... + def entrycget(self, index: str | int, option: str) -> Any: ... + def entryconfigure( + self, index: str | int, cnf: dict[str, Any] | None = None, **kw: Any + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + entryconfig = entryconfigure + def index(self, index: str | int) -> int | None: ... + def invoke(self, index: str | int) -> Any: ... + def post(self, x: int, y: int) -> None: ... + def type(self, index: str | int) -> Literal["cascade", "checkbutton", "command", "radiobutton", "separator"]: ... + def unpost(self) -> None: ... + def xposition(self, index: str | int) -> int: ... + def yposition(self, index: str | int) -> int: ... + +class Menubutton(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = {}, + *, + activebackground: str = ..., + activeforeground: str = ..., + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = ..., + background: str = ..., + bd: float | str = ..., + bg: str = ..., + bitmap: str = "", + border: float | str = ..., + borderwidth: float | str = ..., + compound: Literal["top", "left", "center", "right", "bottom", "none"] = "none", + cursor: _Cursor = "", + direction: Literal["above", "below", "left", "right", "flush"] = "below", + disabledforeground: str = ..., + fg: str = ..., + font: _FontDescription = "TkDefaultFont", + foreground: str = ..., + height: float | str = 0, + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = 0, + image: _Image | str = "", + indicatoron: bool = ..., + justify: Literal["left", "center", "right"] = ..., + menu: Menu = ..., + name: str = ..., + padx: float | str = ..., + pady: float | str = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = "flat", + state: Literal["normal", "active", "disabled"] = "normal", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = 0, + text: float | str = "", + textvariable: Variable = ..., + underline: int = -1, + width: float | str = 0, + wraplength: float | str = 0, + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + activebackground: str = ..., + activeforeground: str = ..., + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = ..., + background: str = ..., + bd: float | str = ..., + bg: str = ..., + bitmap: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + compound: Literal["top", "left", "center", "right", "bottom", "none"] = ..., + cursor: _Cursor = ..., + direction: Literal["above", "below", "left", "right", "flush"] = ..., + disabledforeground: str = ..., + fg: str = ..., + font: _FontDescription = ..., + foreground: str = ..., + height: float | str = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + image: _Image | str = ..., + indicatoron: bool = ..., + justify: Literal["left", "center", "right"] = ..., + menu: Menu = ..., + padx: float | str = ..., + pady: float | str = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + state: Literal["normal", "active", "disabled"] = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + text: float | str = ..., + textvariable: Variable = ..., + underline: int = ..., + width: float | str = ..., + wraplength: float | str = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class Message(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = {}, + *, + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = "center", + aspect: int = 150, + background: str = ..., + bd: float | str = 1, + bg: str = ..., + border: float | str = 1, + borderwidth: float | str = 1, + cursor: _Cursor = "", + fg: str = ..., + font: _FontDescription = "TkDefaultFont", + foreground: str = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = 0, + justify: Literal["left", "center", "right"] = "left", + name: str = ..., + padx: float | str = ..., + pady: float | str = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = "flat", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = 0, + text: float | str = "", + textvariable: Variable = ..., + # there's width but no height + width: float | str = 0, + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = ..., + aspect: int = ..., + background: str = ..., + bd: float | str = ..., + bg: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + cursor: _Cursor = ..., + fg: str = ..., + font: _FontDescription = ..., + foreground: str = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + justify: Literal["left", "center", "right"] = ..., + padx: float | str = ..., + pady: float | str = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + text: float | str = ..., + textvariable: Variable = ..., + width: float | str = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class Radiobutton(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = {}, + *, + activebackground: str = ..., + activeforeground: str = ..., + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = "center", + background: str = ..., + bd: float | str = ..., + bg: str = ..., + bitmap: str = "", + border: float | str = ..., + borderwidth: float | str = ..., + command: str | Callable[[], Any] = "", + compound: Literal["top", "left", "center", "right", "bottom", "none"] = "none", + cursor: _Cursor = "", + disabledforeground: str = ..., + fg: str = ..., + font: _FontDescription = "TkDefaultFont", + foreground: str = ..., + height: float | str = 0, + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = 1, + image: _Image | str = "", + indicatoron: bool = True, + justify: Literal["left", "center", "right"] = "center", + name: str = ..., + offrelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + overrelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove", ""] = "", + padx: float | str = 1, + pady: float | str = 1, + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = "flat", + selectcolor: str = ..., + selectimage: _Image | str = "", + state: Literal["normal", "active", "disabled"] = "normal", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", + text: float | str = "", + textvariable: Variable = ..., + tristateimage: _Image | str = "", + tristatevalue: Any = "", + underline: int = -1, + value: Any = "", + variable: Variable | Literal[""] = ..., + width: float | str = 0, + wraplength: float | str = 0, + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + activebackground: str = ..., + activeforeground: str = ..., + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = ..., + background: str = ..., + bd: float | str = ..., + bg: str = ..., + bitmap: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + command: str | Callable[[], Any] = ..., + compound: Literal["top", "left", "center", "right", "bottom", "none"] = ..., + cursor: _Cursor = ..., + disabledforeground: str = ..., + fg: str = ..., + font: _FontDescription = ..., + foreground: str = ..., + height: float | str = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + image: _Image | str = ..., + indicatoron: bool = ..., + justify: Literal["left", "center", "right"] = ..., + offrelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + overrelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove", ""] = ..., + padx: float | str = ..., + pady: float | str = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + selectcolor: str = ..., + selectimage: _Image | str = ..., + state: Literal["normal", "active", "disabled"] = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + text: float | str = ..., + textvariable: Variable = ..., + tristateimage: _Image | str = ..., + tristatevalue: Any = ..., + underline: int = ..., + value: Any = ..., + variable: Variable | Literal[""] = ..., + width: float | str = ..., + wraplength: float | str = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def deselect(self) -> None: ... + def flash(self) -> None: ... + def invoke(self) -> Any: ... + def select(self) -> None: ... + +class Scale(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = {}, + *, + activebackground: str = ..., + background: str = ..., + bd: float | str = 1, + bg: str = ..., + bigincrement: float = 0.0, + border: float | str = 1, + borderwidth: float | str = 1, + # don't know why the callback gets string instead of float + command: str | Callable[[str], object] = "", + cursor: _Cursor = "", + digits: int = 0, + fg: str = ..., + font: _FontDescription = "TkDefaultFont", + foreground: str = ..., + from_: float = 0.0, + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + label: str = "", + length: float | str = 100, + name: str = ..., + orient: Literal["horizontal", "vertical"] = "vertical", + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = "flat", + repeatdelay: int = 300, + repeatinterval: int = 100, + resolution: float = 1.0, + showvalue: bool = True, + sliderlength: float | str = 30, + sliderrelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = "raised", + state: Literal["normal", "active", "disabled"] = "normal", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", + tickinterval: float = 0.0, + to: float = 100.0, + troughcolor: str = ..., + variable: IntVar | DoubleVar = ..., + width: float | str = 15, + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + activebackground: str = ..., + background: str = ..., + bd: float | str = ..., + bg: str = ..., + bigincrement: float = ..., + border: float | str = ..., + borderwidth: float | str = ..., + command: str | Callable[[str], object] = ..., + cursor: _Cursor = ..., + digits: int = ..., + fg: str = ..., + font: _FontDescription = ..., + foreground: str = ..., + from_: float = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + label: str = ..., + length: float | str = ..., + orient: Literal["horizontal", "vertical"] = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + repeatdelay: int = ..., + repeatinterval: int = ..., + resolution: float = ..., + showvalue: bool = ..., + sliderlength: float | str = ..., + sliderrelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + state: Literal["normal", "active", "disabled"] = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + tickinterval: float = ..., + to: float = ..., + troughcolor: str = ..., + variable: IntVar | DoubleVar = ..., + width: float | str = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def get(self) -> float: ... + def set(self, value) -> None: ... + def coords(self, value: float | None = None) -> tuple[int, int]: ... + def identify(self, x, y) -> Literal["", "slider", "trough1", "trough2"]: ... + +class Scrollbar(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = {}, + *, + activebackground: str = ..., + activerelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = "raised", + background: str = ..., + bd: float | str = ..., + bg: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + # There are many ways how the command may get called. Search for + # 'SCROLLING COMMANDS' in scrollbar man page. There doesn't seem to + # be any way to specify an overloaded callback function, so we say + # that it can take any args while it can't in reality. + command: Callable[..., tuple[float, float] | None] | str = "", + cursor: _Cursor = "", + elementborderwidth: float | str = -1, + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = 0, + jump: bool = False, + name: str = ..., + orient: Literal["horizontal", "vertical"] = "vertical", + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + repeatdelay: int = 300, + repeatinterval: int = 100, + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", + troughcolor: str = ..., + width: float | str = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + activebackground: str = ..., + activerelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + background: str = ..., + bd: float | str = ..., + bg: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + command: Callable[..., tuple[float, float] | None] | str = ..., + cursor: _Cursor = ..., + elementborderwidth: float | str = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + jump: bool = ..., + orient: Literal["horizontal", "vertical"] = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + repeatdelay: int = ..., + repeatinterval: int = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + troughcolor: str = ..., + width: float | str = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def activate(self, index=None): ... + def delta(self, deltax: int, deltay: int) -> float: ... + def fraction(self, x: int, y: int) -> float: ... + def identify(self, x: int, y: int) -> Literal["arrow1", "arrow2", "slider", "trough1", "trough2", ""]: ... + def get(self) -> tuple[float, float, float, float] | tuple[float, float]: ... + def set(self, first: float | str, last: float | str) -> None: ... + +_WhatToCount: TypeAlias = Literal[ + "chars", "displaychars", "displayindices", "displaylines", "indices", "lines", "xpixels", "ypixels" +] + +class Text(Widget, XView, YView): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = {}, + *, + autoseparators: bool = True, + background: str = ..., + bd: float | str = ..., + bg: str = ..., + blockcursor: bool = False, + border: float | str = ..., + borderwidth: float | str = ..., + cursor: _Cursor = "xterm", + endline: int | Literal[""] = "", + exportselection: bool = True, + fg: str = ..., + font: _FontDescription = "TkFixedFont", + foreground: str = ..., + # width is always int, but height is allowed to be screen units. + # This doesn't make any sense to me, and this isn't documented. + # The docs seem to say that both should be integers. + height: float | str = 24, + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + inactiveselectbackground: str = ..., + insertbackground: str = ..., + insertborderwidth: float | str = 0, + insertofftime: int = 300, + insertontime: int = 600, + insertunfocussed: Literal["none", "hollow", "solid"] = "none", + insertwidth: float | str = ..., + maxundo: int = 0, + name: str = ..., + padx: float | str = 1, + pady: float | str = 1, + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + selectbackground: str = ..., + selectborderwidth: float | str = ..., + selectforeground: str = ..., + setgrid: bool = False, + spacing1: float | str = 0, + spacing2: float | str = 0, + spacing3: float | str = 0, + startline: int | Literal[""] = "", + state: Literal["normal", "disabled"] = "normal", + # Literal inside Tuple doesn't actually work + tabs: float | str | tuple[float | str, ...] = "", + tabstyle: Literal["tabular", "wordprocessor"] = "tabular", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", + undo: bool = False, + width: int = 80, + wrap: Literal["none", "char", "word"] = "char", + xscrollcommand: str | Callable[[float, float], object] = "", + yscrollcommand: str | Callable[[float, float], object] = "", + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + autoseparators: bool = ..., + background: str = ..., + bd: float | str = ..., + bg: str = ..., + blockcursor: bool = ..., + border: float | str = ..., + borderwidth: float | str = ..., + cursor: _Cursor = ..., + endline: int | Literal[""] = ..., + exportselection: bool = ..., + fg: str = ..., + font: _FontDescription = ..., + foreground: str = ..., + height: float | str = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + inactiveselectbackground: str = ..., + insertbackground: str = ..., + insertborderwidth: float | str = ..., + insertofftime: int = ..., + insertontime: int = ..., + insertunfocussed: Literal["none", "hollow", "solid"] = ..., + insertwidth: float | str = ..., + maxundo: int = ..., + padx: float | str = ..., + pady: float | str = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + selectbackground: str = ..., + selectborderwidth: float | str = ..., + selectforeground: str = ..., + setgrid: bool = ..., + spacing1: float | str = ..., + spacing2: float | str = ..., + spacing3: float | str = ..., + startline: int | Literal[""] = ..., + state: Literal["normal", "disabled"] = ..., + tabs: float | str | tuple[float | str, ...] = ..., + tabstyle: Literal["tabular", "wordprocessor"] = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + undo: bool = ..., + width: int = ..., + wrap: Literal["none", "char", "word"] = ..., + xscrollcommand: str | Callable[[float, float], object] = ..., + yscrollcommand: str | Callable[[float, float], object] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def bbox(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> tuple[int, int, int, int] | None: ... # type: ignore[override] + def compare( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + op: Literal["<", "<=", "==", ">=", ">", "!="], + index2: str | float | _tkinter.Tcl_Obj | Widget, + ) -> bool: ... + if sys.version_info >= (3, 13): + @overload + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + *, + return_ints: Literal[True], + ) -> int: ... + @overload + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + arg: _WhatToCount | Literal["update"], + /, + *, + return_ints: Literal[True], + ) -> int: ... + @overload + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + arg1: Literal["update"], + arg2: _WhatToCount, + /, + *, + return_ints: Literal[True], + ) -> int: ... + @overload + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + arg1: _WhatToCount, + arg2: Literal["update"], + /, + *, + return_ints: Literal[True], + ) -> int: ... + @overload + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + arg1: _WhatToCount, + arg2: _WhatToCount, + /, + *, + return_ints: Literal[True], + ) -> tuple[int, int]: ... + @overload + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + arg1: _WhatToCount | Literal["update"], + arg2: _WhatToCount | Literal["update"], + arg3: _WhatToCount | Literal["update"], + /, + *args: _WhatToCount | Literal["update"], + return_ints: Literal[True], + ) -> tuple[int, ...]: ... + @overload + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + *, + return_ints: Literal[False] = False, + ) -> tuple[int] | None: ... + @overload + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + arg: _WhatToCount | Literal["update"], + /, + *, + return_ints: Literal[False] = False, + ) -> tuple[int] | None: ... + @overload + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + arg1: Literal["update"], + arg2: _WhatToCount, + /, + *, + return_ints: Literal[False] = False, + ) -> int | None: ... + @overload + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + arg1: _WhatToCount, + arg2: Literal["update"], + /, + *, + return_ints: Literal[False] = False, + ) -> int | None: ... + @overload + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + arg1: _WhatToCount, + arg2: _WhatToCount, + /, + *, + return_ints: Literal[False] = False, + ) -> tuple[int, int]: ... + @overload + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + arg1: _WhatToCount | Literal["update"], + arg2: _WhatToCount | Literal["update"], + arg3: _WhatToCount | Literal["update"], + /, + *args: _WhatToCount | Literal["update"], + return_ints: Literal[False] = False, + ) -> tuple[int, ...]: ... + else: + @overload + def count( + self, index1: str | float | _tkinter.Tcl_Obj | Widget, index2: str | float | _tkinter.Tcl_Obj | Widget + ) -> tuple[int] | None: ... + @overload + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + arg: _WhatToCount | Literal["update"], + /, + ) -> tuple[int] | None: ... + @overload + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + arg1: Literal["update"], + arg2: _WhatToCount, + /, + ) -> int | None: ... + @overload + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + arg1: _WhatToCount, + arg2: Literal["update"], + /, + ) -> int | None: ... + @overload + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + arg1: _WhatToCount, + arg2: _WhatToCount, + /, + ) -> tuple[int, int]: ... + @overload + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + arg1: _WhatToCount | Literal["update"], + arg2: _WhatToCount | Literal["update"], + arg3: _WhatToCount | Literal["update"], + /, + *args: _WhatToCount | Literal["update"], + ) -> tuple[int, ...]: ... + + @overload + def debug(self, boolean: None = None) -> bool: ... + @overload + def debug(self, boolean: bool) -> None: ... + def delete( + self, index1: str | float | _tkinter.Tcl_Obj | Widget, index2: str | float | _tkinter.Tcl_Obj | Widget | None = None + ) -> None: ... + def dlineinfo(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> tuple[int, int, int, int, int] | None: ... + @overload + def dump( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget | None = None, + command: None = None, + *, + all: bool = ..., + image: bool = ..., + mark: bool = ..., + tag: bool = ..., + text: bool = ..., + window: bool = ..., + ) -> list[tuple[str, str, str]]: ... + @overload + def dump( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget | None, + command: Callable[[str, str, str], object] | str, + *, + all: bool = ..., + image: bool = ..., + mark: bool = ..., + tag: bool = ..., + text: bool = ..., + window: bool = ..., + ) -> None: ... + @overload + def dump( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget | None = None, + *, + command: Callable[[str, str, str], object] | str, + all: bool = ..., + image: bool = ..., + mark: bool = ..., + tag: bool = ..., + text: bool = ..., + window: bool = ..., + ) -> None: ... + def edit(self, *args): ... # docstring says "Internal method" + @overload + def edit_modified(self, arg: None = None) -> bool: ... # actually returns Literal[0, 1] + @overload + def edit_modified(self, arg: bool) -> None: ... # actually returns empty string + def edit_redo(self) -> None: ... # actually returns empty string + def edit_reset(self) -> None: ... # actually returns empty string + def edit_separator(self) -> None: ... # actually returns empty string + def edit_undo(self) -> None: ... # actually returns empty string + def get( + self, index1: str | float | _tkinter.Tcl_Obj | Widget, index2: str | float | _tkinter.Tcl_Obj | Widget | None = None + ) -> str: ... + @overload + def image_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["image", "name"]) -> str: ... + @overload + def image_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["padx", "pady"]) -> int: ... + @overload + def image_cget( + self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["align"] + ) -> Literal["baseline", "bottom", "center", "top"]: ... + @overload + def image_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: str) -> Any: ... + @overload + def image_configure( + self, index: str | float | _tkinter.Tcl_Obj | Widget, cnf: str + ) -> tuple[str, str, str, str, str | int]: ... + @overload + def image_configure( + self, + index: str | float | _tkinter.Tcl_Obj | Widget, + cnf: dict[str, Any] | None = None, + *, + align: Literal["baseline", "bottom", "center", "top"] = ..., + image: _Image | str = ..., + name: str = ..., + padx: float | str = ..., + pady: float | str = ..., + ) -> dict[str, tuple[str, str, str, str, str | int]] | None: ... + def image_create( + self, + index: str | float | _tkinter.Tcl_Obj | Widget, + cnf: dict[str, Any] | None = {}, + *, + align: Literal["baseline", "bottom", "center", "top"] = ..., + image: _Image | str = ..., + name: str = ..., + padx: float | str = ..., + pady: float | str = ..., + ) -> str: ... + def image_names(self) -> tuple[str, ...]: ... + def index(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> str: ... + def insert( + self, index: str | float | _tkinter.Tcl_Obj | Widget, chars: str, *args: str | list[str] | tuple[str, ...] + ) -> None: ... + @overload + def mark_gravity(self, markName: str, direction: None = None) -> Literal["left", "right"]: ... + @overload + def mark_gravity(self, markName: str, direction: Literal["left", "right"]) -> None: ... # actually returns empty string + def mark_names(self) -> tuple[str, ...]: ... + def mark_set(self, markName: str, index: str | float | _tkinter.Tcl_Obj | Widget) -> None: ... + def mark_unset(self, *markNames: str) -> None: ... + def mark_next(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> str | None: ... + def mark_previous(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> str | None: ... + # **kw of peer_create is same as the kwargs of Text.__init__ + def peer_create(self, newPathName: str | Text, cnf: dict[str, Any] = {}, **kw) -> None: ... + def peer_names(self) -> tuple[_tkinter.Tcl_Obj, ...]: ... + def replace( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + chars: str, + *args: str | list[str] | tuple[str, ...], + ) -> None: ... + def scan_mark(self, x: int, y: int) -> None: ... + def scan_dragto(self, x: int, y: int) -> None: ... + def search( + self, + pattern: str, + index: str | float | _tkinter.Tcl_Obj | Widget, + stopindex: str | float | _tkinter.Tcl_Obj | Widget | None = None, + forwards: bool | None = None, + backwards: bool | None = None, + exact: bool | None = None, + regexp: bool | None = None, + nocase: bool | None = None, + count: Variable | None = None, + elide: bool | None = None, + ) -> str: ... # returns empty string for not found + def see(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> None: ... + def tag_add( + self, tagName: str, index1: str | float | _tkinter.Tcl_Obj | Widget, *args: str | float | _tkinter.Tcl_Obj | Widget + ) -> None: ... + # tag_bind stuff is very similar to Canvas + @overload + def tag_bind( + self, + tagName: str, + sequence: str | None, + func: Callable[[Event[Text]], object] | None, + add: Literal["", "+"] | bool | None = None, + ) -> str: ... + @overload + def tag_bind(self, tagName: str, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + def tag_unbind(self, tagName: str, sequence: str, funcid: str | None = None) -> None: ... + # allowing any string for cget instead of just Literals because there's no other way to look up tag options + def tag_cget(self, tagName: str, option: str): ... + @overload + def tag_configure( + self, + tagName: str, + cnf: dict[str, Any] | None = None, + *, + background: str = ..., + bgstipple: str = ..., + borderwidth: float | str = ..., + border: float | str = ..., # alias for borderwidth + elide: bool = ..., + fgstipple: str = ..., + font: _FontDescription = ..., + foreground: str = ..., + justify: Literal["left", "right", "center"] = ..., + lmargin1: float | str = ..., + lmargin2: float | str = ..., + lmargincolor: str = ..., + offset: float | str = ..., + overstrike: bool = ..., + overstrikefg: str = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + rmargin: float | str = ..., + rmargincolor: str = ..., + selectbackground: str = ..., + selectforeground: str = ..., + spacing1: float | str = ..., + spacing2: float | str = ..., + spacing3: float | str = ..., + tabs: Any = ..., # the exact type is kind of complicated, see manual page + tabstyle: Literal["tabular", "wordprocessor"] = ..., + underline: bool = ..., + underlinefg: str = ..., + wrap: Literal["none", "char", "word"] = ..., # be careful with "none" vs None + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def tag_configure(self, tagName: str, cnf: str) -> tuple[str, str, str, Any, Any]: ... + tag_config = tag_configure + def tag_delete(self, first_tag_name: str, /, *tagNames: str) -> None: ... # error if no tag names given + def tag_lower(self, tagName: str, belowThis: str | None = None) -> None: ... + def tag_names(self, index: str | float | _tkinter.Tcl_Obj | Widget | None = None) -> tuple[str, ...]: ... + def tag_nextrange( + self, + tagName: str, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget | None = None, + ) -> tuple[str, str] | tuple[()]: ... + def tag_prevrange( + self, + tagName: str, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget | None = None, + ) -> tuple[str, str] | tuple[()]: ... + def tag_raise(self, tagName: str, aboveThis: str | None = None) -> None: ... + def tag_ranges(self, tagName: str) -> tuple[_tkinter.Tcl_Obj, ...]: ... + # tag_remove and tag_delete are different + def tag_remove( + self, + tagName: str, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget | None = None, + ) -> None: ... + @overload + def window_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["padx", "pady"]) -> int: ... + @overload + def window_cget( + self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["stretch"] + ) -> bool: ... # actually returns Literal[0, 1] + @overload + def window_cget( + self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["align"] + ) -> Literal["baseline", "bottom", "center", "top"]: ... + @overload # window is set to a widget, but read as the string name. + def window_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["create", "window"]) -> str: ... + @overload + def window_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: str) -> Any: ... + @overload + def window_configure( + self, index: str | float | _tkinter.Tcl_Obj | Widget, cnf: str + ) -> tuple[str, str, str, str, str | int]: ... + @overload + def window_configure( + self, + index: str | float | _tkinter.Tcl_Obj | Widget, + cnf: dict[str, Any] | None = None, + *, + align: Literal["baseline", "bottom", "center", "top"] = ..., + create: str = ..., + padx: float | str = ..., + pady: float | str = ..., + stretch: bool | Literal[0, 1] = ..., + window: Misc | str = ..., + ) -> dict[str, tuple[str, str, str, str, str | int]] | None: ... + window_config = window_configure + def window_create( + self, + index: str | float | _tkinter.Tcl_Obj | Widget, + cnf: dict[str, Any] | None = {}, + *, + align: Literal["baseline", "bottom", "center", "top"] = ..., + create: str = ..., + padx: float | str = ..., + pady: float | str = ..., + stretch: bool | Literal[0, 1] = ..., + window: Misc | str = ..., + ) -> None: ... + def window_names(self) -> tuple[str, ...]: ... + def yview_pickplace(self, *what): ... # deprecated + +class _setit: + def __init__(self, var, value, callback=None) -> None: ... + def __call__(self, *args) -> None: ... + +# manual page: tk_optionMenu +class OptionMenu(Menubutton): + menuname: Incomplete + def __init__( + # differs from other widgets + self, + master: Misc | None, + variable: StringVar, + value: str, + *values: str, + # kwarg only from now on + command: Callable[[StringVar], object] | None = ..., + ) -> None: ... + # configure, config, cget are inherited from Menubutton + # destroy and __getitem__ are overridden, signature does not change + +# This matches tkinter's image classes (PhotoImage and BitmapImage) +# and PIL's tkinter-compatible class (PIL.ImageTk.PhotoImage), +# but not a plain PIL image that isn't tkinter compatible. +# The reason is that PIL has width and height attributes, not methods. +@type_check_only +class _Image(Protocol): + def width(self) -> int: ... + def height(self) -> int: ... + +@type_check_only +class _BitmapImageLike(_Image): ... + +@type_check_only +class _PhotoImageLike(_Image): ... + +class Image(_Image): + name: Incomplete + tk: _tkinter.TkappType + def __init__(self, imgtype, name=None, cnf={}, master: Misc | _tkinter.TkappType | None = None, **kw) -> None: ... + def __del__(self) -> None: ... + def __setitem__(self, key, value) -> None: ... + def __getitem__(self, key): ... + configure: Incomplete + config: Incomplete + def type(self): ... + +class PhotoImage(Image, _PhotoImageLike): + # This should be kept in sync with PIL.ImageTK.PhotoImage.__init__() + def __init__( + self, + name: str | None = None, + cnf: dict[str, Any] = {}, + master: Misc | _tkinter.TkappType | None = None, + *, + data: str | bytes = ..., # not same as data argument of put() + format: str = ..., + file: StrOrBytesPath = ..., + gamma: float = ..., + height: int = ..., + palette: int | str = ..., + width: int = ..., + ) -> None: ... + def configure( + self, + *, + data: str | bytes = ..., + format: str = ..., + file: StrOrBytesPath = ..., + gamma: float = ..., + height: int = ..., + palette: int | str = ..., + width: int = ..., + ) -> None: ... + config = configure + def blank(self) -> None: ... + def cget(self, option: str) -> str: ... + def __getitem__(self, key: str) -> str: ... # always string: image['height'] can be '0' + if sys.version_info >= (3, 13): + def copy( + self, + *, + from_coords: Iterable[int] | None = None, + zoom: int | tuple[int, int] | list[int] | None = None, + subsample: int | tuple[int, int] | list[int] | None = None, + ) -> PhotoImage: ... + def subsample(self, x: int, y: Literal[""] = "", *, from_coords: Iterable[int] | None = None) -> PhotoImage: ... + def zoom(self, x: int, y: Literal[""] = "", *, from_coords: Iterable[int] | None = None) -> PhotoImage: ... + def copy_replace( + self, + sourceImage: PhotoImage | str, + *, + from_coords: Iterable[int] | None = None, + to: Iterable[int] | None = None, + shrink: bool = False, + zoom: int | tuple[int, int] | list[int] | None = None, + subsample: int | tuple[int, int] | list[int] | None = None, + # `None` defaults to overlay. + compositingrule: Literal["overlay", "set"] | None = None, + ) -> None: ... + else: + def copy(self) -> PhotoImage: ... + def zoom(self, x: int, y: int | Literal[""] = "") -> PhotoImage: ... + def subsample(self, x: int, y: int | Literal[""] = "") -> PhotoImage: ... + + def get(self, x: int, y: int) -> tuple[int, int, int]: ... + def put( + self, + data: ( + str + | bytes + | list[str] + | list[list[str]] + | list[tuple[str, ...]] + | tuple[str, ...] + | tuple[list[str], ...] + | tuple[tuple[str, ...], ...] + ), + to: tuple[int, int] | tuple[int, int, int, int] | None = None, + ) -> None: ... + if sys.version_info >= (3, 13): + def read( + self, + filename: StrOrBytesPath, + format: str | None = None, + *, + from_coords: Iterable[int] | None = None, + to: Iterable[int] | None = None, + shrink: bool = False, + ) -> None: ... + def write( + self, + filename: StrOrBytesPath, + format: str | None = None, + from_coords: Iterable[int] | None = None, + *, + background: str | None = None, + grayscale: bool = False, + ) -> None: ... + @overload + def data( + self, format: str, *, from_coords: Iterable[int] | None = None, background: str | None = None, grayscale: bool = False + ) -> bytes: ... + @overload + def data( + self, + format: None = None, + *, + from_coords: Iterable[int] | None = None, + background: str | None = None, + grayscale: bool = False, + ) -> tuple[str, ...]: ... + + else: + def write( + self, filename: StrOrBytesPath, format: str | None = None, from_coords: tuple[int, int] | None = None + ) -> None: ... + + def transparency_get(self, x: int, y: int) -> bool: ... + def transparency_set(self, x: int, y: int, boolean: bool) -> None: ... + +class BitmapImage(Image, _BitmapImageLike): + # This should be kept in sync with PIL.ImageTK.BitmapImage.__init__() + def __init__( + self, + name=None, + cnf: dict[str, Any] = {}, + master: Misc | _tkinter.TkappType | None = None, + *, + background: str = ..., + data: str | bytes = ..., + file: StrOrBytesPath = ..., + foreground: str = ..., + maskdata: str = ..., + maskfile: StrOrBytesPath = ..., + ) -> None: ... + +def image_names() -> tuple[str, ...]: ... +def image_types() -> tuple[str, ...]: ... + +class Spinbox(Widget, XView): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = {}, + *, + activebackground: str = ..., + background: str = ..., + bd: float | str = ..., + bg: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + buttonbackground: str = ..., + buttoncursor: _Cursor = "", + buttondownrelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + buttonuprelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + # percent substitutions don't seem to be supported, it's similar to Entry's validation stuff + command: Callable[[], object] | str | list[str] | tuple[str, ...] = "", + cursor: _Cursor = "xterm", + disabledbackground: str = ..., + disabledforeground: str = ..., + exportselection: bool = True, + fg: str = ..., + font: _FontDescription = "TkTextFont", + foreground: str = ..., + format: str = "", + from_: float = 0.0, + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + increment: float = 1.0, + insertbackground: str = ..., + insertborderwidth: float | str = 0, + insertofftime: int = 300, + insertontime: int = 600, + insertwidth: float | str = ..., + invalidcommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = "", + invcmd: str | list[str] | tuple[str, ...] | Callable[[], bool] = "", + justify: Literal["left", "center", "right"] = "left", + name: str = ..., + readonlybackground: str = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = "sunken", + repeatdelay: int = 400, + repeatinterval: int = 100, + selectbackground: str = ..., + selectborderwidth: float | str = ..., + selectforeground: str = ..., + state: Literal["normal", "disabled", "readonly"] = "normal", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", + textvariable: Variable = ..., + to: float = 0.0, + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = "none", + validatecommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = "", + vcmd: str | list[str] | tuple[str, ...] | Callable[[], bool] = "", + values: list[str] | tuple[str, ...] = ..., + width: int = 20, + wrap: bool = False, + xscrollcommand: str | Callable[[float, float], object] = "", + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + activebackground: str = ..., + background: str = ..., + bd: float | str = ..., + bg: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + buttonbackground: str = ..., + buttoncursor: _Cursor = ..., + buttondownrelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + buttonuprelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + command: Callable[[], object] | str | list[str] | tuple[str, ...] = ..., + cursor: _Cursor = ..., + disabledbackground: str = ..., + disabledforeground: str = ..., + exportselection: bool = ..., + fg: str = ..., + font: _FontDescription = ..., + foreground: str = ..., + format: str = ..., + from_: float = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + increment: float = ..., + insertbackground: str = ..., + insertborderwidth: float | str = ..., + insertofftime: int = ..., + insertontime: int = ..., + insertwidth: float | str = ..., + invalidcommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., + invcmd: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., + justify: Literal["left", "center", "right"] = ..., + readonlybackground: str = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + repeatdelay: int = ..., + repeatinterval: int = ..., + selectbackground: str = ..., + selectborderwidth: float | str = ..., + selectforeground: str = ..., + state: Literal["normal", "disabled", "readonly"] = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + textvariable: Variable = ..., + to: float = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., + validatecommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., + vcmd: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., + values: list[str] | tuple[str, ...] = ..., + width: int = ..., + wrap: bool = ..., + xscrollcommand: str | Callable[[float, float], object] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def bbox(self, index) -> tuple[int, int, int, int] | None: ... # type: ignore[override] + def delete(self, first, last=None) -> Literal[""]: ... + def get(self) -> str: ... + def icursor(self, index): ... + def identify(self, x: int, y: int) -> Literal["", "buttondown", "buttonup", "entry"]: ... + def index(self, index: str | int) -> int: ... + def insert(self, index: str | int, s: str) -> Literal[""]: ... + # spinbox.invoke("asdf") gives error mentioning .invoke("none"), but it's not documented + def invoke(self, element: Literal["none", "buttonup", "buttondown"]) -> Literal[""]: ... + def scan(self, *args): ... + def scan_mark(self, x): ... + def scan_dragto(self, x): ... + def selection(self, *args) -> tuple[int, ...]: ... + def selection_adjust(self, index): ... + def selection_clear(self): ... # type: ignore[override] + def selection_element(self, element=None): ... + def selection_from(self, index: int) -> None: ... + def selection_present(self) -> None: ... + def selection_range(self, start: int, end: int) -> None: ... + def selection_to(self, index: int) -> None: ... + +class LabelFrame(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = {}, + *, + background: str = ..., + bd: float | str = 2, + bg: str = ..., + border: float | str = 2, + borderwidth: float | str = 2, + class_: str = "Labelframe", # can't be changed with configure() + colormap: Literal["new", ""] | Misc = "", # can't be changed with configure() + container: bool = False, # undocumented, can't be changed with configure() + cursor: _Cursor = "", + fg: str = ..., + font: _FontDescription = "TkDefaultFont", + foreground: str = ..., + height: float | str = 0, + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = 0, + # 'ne' and 'en' are valid labelanchors, but only 'ne' is a valid _Anchor. + labelanchor: Literal["nw", "n", "ne", "en", "e", "es", "se", "s", "sw", "ws", "w", "wn"] = "nw", + labelwidget: Misc = ..., + name: str = ..., + padx: float | str = 0, + pady: float | str = 0, + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = "groove", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = 0, + text: float | str = "", + visual: str | tuple[str, int] = "", # can't be changed with configure() + width: float | str = 0, + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + background: str = ..., + bd: float | str = ..., + bg: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + cursor: _Cursor = ..., + fg: str = ..., + font: _FontDescription = ..., + foreground: str = ..., + height: float | str = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: float | str = ..., + labelanchor: Literal["nw", "n", "ne", "en", "e", "es", "se", "s", "sw", "ws", "w", "wn"] = ..., + labelwidget: Misc = ..., + padx: float | str = ..., + pady: float | str = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + text: float | str = ..., + width: float | str = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class PanedWindow(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = {}, + *, + background: str = ..., + bd: float | str = 1, + bg: str = ..., + border: float | str = 1, + borderwidth: float | str = 1, + cursor: _Cursor = "", + handlepad: float | str = 8, + handlesize: float | str = 8, + height: float | str = "", + name: str = ..., + opaqueresize: bool = True, + orient: Literal["horizontal", "vertical"] = "horizontal", + proxybackground: str = "", + proxyborderwidth: float | str = 2, + proxyrelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = "flat", + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = "flat", + sashcursor: _Cursor = "", + sashpad: float | str = 0, + sashrelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = "flat", + sashwidth: float | str = 3, + showhandle: bool = False, + width: float | str = "", + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + background: str = ..., + bd: float | str = ..., + bg: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + cursor: _Cursor = ..., + handlepad: float | str = ..., + handlesize: float | str = ..., + height: float | str = ..., + opaqueresize: bool = ..., + orient: Literal["horizontal", "vertical"] = ..., + proxybackground: str = ..., + proxyborderwidth: float | str = ..., + proxyrelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + sashcursor: _Cursor = ..., + sashpad: float | str = ..., + sashrelief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + sashwidth: float | str = ..., + showhandle: bool = ..., + width: float | str = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def add(self, child: Widget, **kw) -> None: ... + def remove(self, child) -> None: ... + forget = remove # type: ignore[assignment] + def identify(self, x: int, y: int): ... + def proxy(self, *args) -> tuple[Incomplete, ...]: ... + def proxy_coord(self) -> tuple[Incomplete, ...]: ... + def proxy_forget(self) -> tuple[Incomplete, ...]: ... + def proxy_place(self, x, y) -> tuple[Incomplete, ...]: ... + def sash(self, *args) -> tuple[Incomplete, ...]: ... + def sash_coord(self, index) -> tuple[Incomplete, ...]: ... + def sash_mark(self, index) -> tuple[Incomplete, ...]: ... + def sash_place(self, index, x, y) -> tuple[Incomplete, ...]: ... + def panecget(self, child, option): ... + def paneconfigure(self, tagOrId, cnf=None, **kw): ... + paneconfig = paneconfigure + def panes(self): ... + +def _test() -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/colorchooser.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/colorchooser.pyi new file mode 100644 index 0000000..d0d6de8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/colorchooser.pyi @@ -0,0 +1,12 @@ +from tkinter import Misc +from tkinter.commondialog import Dialog +from typing import ClassVar + +__all__ = ["Chooser", "askcolor"] + +class Chooser(Dialog): + command: ClassVar[str] + +def askcolor( + color: str | bytes | None = None, *, initialcolor: str = ..., parent: Misc = ..., title: str = ... +) -> tuple[None, None] | tuple[tuple[int, int, int], str]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/commondialog.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/commondialog.pyi new file mode 100644 index 0000000..6dba6bd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/commondialog.pyi @@ -0,0 +1,14 @@ +from collections.abc import Mapping +from tkinter import Misc +from typing import Any, ClassVar + +__all__ = ["Dialog"] + +class Dialog: + command: ClassVar[str | None] + master: Misc | None + # Types of options are very dynamic. They depend on the command and are + # sometimes changed to a different type. + options: Mapping[str, Any] + def __init__(self, master: Misc | None = None, **options: Any) -> None: ... + def show(self, **options: Any) -> Any: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/constants.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/constants.pyi new file mode 100644 index 0000000..fbfe8b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/constants.pyi @@ -0,0 +1,80 @@ +from typing import Final + +# These are not actually bools. See #4669 +NO: Final[bool] +YES: Final[bool] +TRUE: Final[bool] +FALSE: Final[bool] +ON: Final[bool] +OFF: Final[bool] +N: Final = "n" +S: Final = "s" +W: Final = "w" +E: Final = "e" +NW: Final = "nw" +SW: Final = "sw" +NE: Final = "ne" +SE: Final = "se" +NS: Final = "ns" +EW: Final = "ew" +NSEW: Final = "nsew" +CENTER: Final = "center" +NONE: Final = "none" +X: Final = "x" +Y: Final = "y" +BOTH: Final = "both" +LEFT: Final = "left" +TOP: Final = "top" +RIGHT: Final = "right" +BOTTOM: Final = "bottom" +RAISED: Final = "raised" +SUNKEN: Final = "sunken" +FLAT: Final = "flat" +RIDGE: Final = "ridge" +GROOVE: Final = "groove" +SOLID: Final = "solid" +HORIZONTAL: Final = "horizontal" +VERTICAL: Final = "vertical" +NUMERIC: Final = "numeric" +CHAR: Final = "char" +WORD: Final = "word" +BASELINE: Final = "baseline" +INSIDE: Final = "inside" +OUTSIDE: Final = "outside" +SEL: Final = "sel" +SEL_FIRST: Final = "sel.first" +SEL_LAST: Final = "sel.last" +END: Final = "end" +INSERT: Final = "insert" +CURRENT: Final = "current" +ANCHOR: Final = "anchor" +ALL: Final = "all" +NORMAL: Final = "normal" +DISABLED: Final = "disabled" +ACTIVE: Final = "active" +HIDDEN: Final = "hidden" +CASCADE: Final = "cascade" +CHECKBUTTON: Final = "checkbutton" +COMMAND: Final = "command" +RADIOBUTTON: Final = "radiobutton" +SEPARATOR: Final = "separator" +SINGLE: Final = "single" +BROWSE: Final = "browse" +MULTIPLE: Final = "multiple" +EXTENDED: Final = "extended" +DOTBOX: Final = "dotbox" +UNDERLINE: Final = "underline" +PIESLICE: Final = "pieslice" +CHORD: Final = "chord" +ARC: Final = "arc" +FIRST: Final = "first" +LAST: Final = "last" +BUTT: Final = "butt" +PROJECTING: Final = "projecting" +ROUND: Final = "round" +BEVEL: Final = "bevel" +MITER: Final = "miter" +MOVETO: Final = "moveto" +SCROLL: Final = "scroll" +UNITS: Final = "units" +PAGES: Final = "pages" diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/dialog.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/dialog.pyi new file mode 100644 index 0000000..971b64f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/dialog.pyi @@ -0,0 +1,13 @@ +from collections.abc import Mapping +from tkinter import Widget +from typing import Any, Final + +__all__ = ["Dialog"] + +DIALOG_ICON: Final = "questhead" + +class Dialog(Widget): + widgetName: str + num: int + def __init__(self, master=None, cnf: Mapping[str, Any] = {}, **kw) -> None: ... + def destroy(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/dnd.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/dnd.pyi new file mode 100644 index 0000000..521f451 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/dnd.pyi @@ -0,0 +1,19 @@ +from tkinter import Event, Misc, Tk, Widget +from typing import ClassVar, Protocol, type_check_only + +__all__ = ["dnd_start", "DndHandler"] + +@type_check_only +class _DndSource(Protocol): + def dnd_end(self, target: Widget | None, event: Event[Misc] | None, /) -> None: ... + +class DndHandler: + root: ClassVar[Tk | None] + def __init__(self, source: _DndSource, event: Event[Misc]) -> None: ... + def cancel(self, event: Event[Misc] | None = None) -> None: ... + def finish(self, event: Event[Misc] | None, commit: int = 0) -> None: ... + def on_motion(self, event: Event[Misc]) -> None: ... + def on_release(self, event: Event[Misc]) -> None: ... + def __del__(self) -> None: ... + +def dnd_start(source: _DndSource, event: Event[Misc]) -> DndHandler | None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/filedialog.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/filedialog.pyi new file mode 100644 index 0000000..b6ef8f4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/filedialog.pyi @@ -0,0 +1,149 @@ +from _typeshed import Incomplete, StrOrBytesPath, StrPath +from collections.abc import Hashable, Iterable +from tkinter import Button, Entry, Event, Frame, Listbox, Misc, Scrollbar, StringVar, Toplevel, commondialog +from typing import IO, ClassVar, Literal + +__all__ = [ + "FileDialog", + "LoadFileDialog", + "SaveFileDialog", + "Open", + "SaveAs", + "Directory", + "askopenfilename", + "asksaveasfilename", + "askopenfilenames", + "askopenfile", + "askopenfiles", + "asksaveasfile", + "askdirectory", +] + +dialogstates: dict[Hashable, tuple[str, str]] + +class FileDialog: + title: str + master: Misc + directory: str | None + top: Toplevel + botframe: Frame + selection: Entry + filter: Entry + midframe: Entry + filesbar: Scrollbar + files: Listbox + dirsbar: Scrollbar + dirs: Listbox + ok_button: Button + filter_button: Button + cancel_button: Button + def __init__( + self, master: Misc, title: str | None = None + ) -> None: ... # title is usually a str or None, but e.g. int doesn't raise en exception either + how: str | None + def go(self, dir_or_file: StrPath = ".", pattern: StrPath = "*", default: StrPath = "", key: Hashable | None = None): ... + def quit(self, how: str | None = None) -> None: ... + def dirs_double_event(self, event: Event) -> None: ... + def dirs_select_event(self, event: Event) -> None: ... + def files_double_event(self, event: Event) -> None: ... + def files_select_event(self, event: Event) -> None: ... + def ok_event(self, event: Event) -> None: ... + def ok_command(self) -> None: ... + def filter_command(self, event: Event | None = None) -> None: ... + def get_filter(self) -> tuple[str, str]: ... + def get_selection(self) -> str: ... + def cancel_command(self, event: Event | None = None) -> None: ... + def set_filter(self, dir: StrPath, pat: StrPath) -> None: ... + def set_selection(self, file: StrPath) -> None: ... + +class LoadFileDialog(FileDialog): + title: str + def ok_command(self) -> None: ... + +class SaveFileDialog(FileDialog): + title: str + def ok_command(self) -> None: ... + +class _Dialog(commondialog.Dialog): ... + +class Open(_Dialog): + command: ClassVar[str] + +class SaveAs(_Dialog): + command: ClassVar[str] + +class Directory(commondialog.Dialog): + command: ClassVar[str] + +# TODO: command kwarg available on macos +def asksaveasfilename( + *, + confirmoverwrite: bool | None = True, + defaultextension: str | None = "", + filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., + initialdir: StrOrBytesPath | None = ..., + initialfile: StrOrBytesPath | None = ..., + parent: Misc | None = ..., + title: str | None = ..., + typevariable: StringVar | str | None = ..., +) -> str: ... # can be empty string +def askopenfilename( + *, + defaultextension: str | None = "", + filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., + initialdir: StrOrBytesPath | None = ..., + initialfile: StrOrBytesPath | None = ..., + parent: Misc | None = ..., + title: str | None = ..., + typevariable: StringVar | str | None = ..., +) -> str: ... # can be empty string +def askopenfilenames( + *, + defaultextension: str | None = "", + filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., + initialdir: StrOrBytesPath | None = ..., + initialfile: StrOrBytesPath | None = ..., + parent: Misc | None = ..., + title: str | None = ..., + typevariable: StringVar | str | None = ..., +) -> Literal[""] | tuple[str, ...]: ... +def askdirectory( + *, initialdir: StrOrBytesPath | None = ..., mustexist: bool | None = False, parent: Misc | None = ..., title: str | None = ... +) -> str: ... # can be empty string + +# TODO: If someone actually uses these, overload to have the actual return type of open(..., mode) +def asksaveasfile( + mode: str = "w", + *, + confirmoverwrite: bool | None = True, + defaultextension: str | None = "", + filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., + initialdir: StrOrBytesPath | None = ..., + initialfile: StrOrBytesPath | None = ..., + parent: Misc | None = ..., + title: str | None = ..., + typevariable: StringVar | str | None = ..., +) -> IO[Incomplete] | None: ... +def askopenfile( + mode: str = "r", + *, + defaultextension: str | None = "", + filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., + initialdir: StrOrBytesPath | None = ..., + initialfile: StrOrBytesPath | None = ..., + parent: Misc | None = ..., + title: str | None = ..., + typevariable: StringVar | str | None = ..., +) -> IO[Incomplete] | None: ... +def askopenfiles( + mode: str = "r", + *, + defaultextension: str | None = "", + filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., + initialdir: StrOrBytesPath | None = ..., + initialfile: StrOrBytesPath | None = ..., + parent: Misc | None = ..., + title: str | None = ..., + typevariable: StringVar | str | None = ..., +) -> tuple[IO[Incomplete], ...]: ... # can be empty tuple +def test() -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/font.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/font.pyi new file mode 100644 index 0000000..327ba7a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/font.pyi @@ -0,0 +1,120 @@ +import _tkinter +import itertools +import sys +import tkinter +from typing import Any, ClassVar, Final, Literal, TypedDict, overload, type_check_only +from typing_extensions import TypeAlias, Unpack + +__all__ = ["NORMAL", "ROMAN", "BOLD", "ITALIC", "nametofont", "Font", "families", "names"] + +NORMAL: Final = "normal" +ROMAN: Final = "roman" +BOLD: Final = "bold" +ITALIC: Final = "italic" + +_FontDescription: TypeAlias = ( + str # "Helvetica 12" + | Font # A font object constructed in Python + | list[Any] # ["Helvetica", 12, BOLD] + | tuple[str] # ("Liberation Sans",) needs wrapping in tuple/list to handle spaces + # ("Liberation Sans", 12) or ("Liberation Sans", 12, "bold", "italic", "underline") + | tuple[str, int, Unpack[tuple[str, ...]]] # Any number of trailing options is permitted + | tuple[str, int, list[str] | tuple[str, ...]] # Options can also be passed as list/tuple + | _tkinter.Tcl_Obj # A font object constructed in Tcl +) + +@type_check_only +class _FontDict(TypedDict): + family: str + size: int + weight: Literal["normal", "bold"] + slant: Literal["roman", "italic"] + underline: bool + overstrike: bool + +@type_check_only +class _MetricsDict(TypedDict): + ascent: int + descent: int + linespace: int + fixed: bool + +class Font: + name: str + delete_font: bool + counter: ClassVar[itertools.count[int]] # undocumented + def __init__( + self, + # In tkinter, 'root' refers to tkinter.Tk by convention, but the code + # actually works with any tkinter widget so we use tkinter.Misc. + root: tkinter.Misc | None = None, + font: _FontDescription | None = None, + name: str | None = None, + exists: bool = False, + *, + family: str = ..., + size: int = ..., + weight: Literal["normal", "bold"] = ..., + slant: Literal["roman", "italic"] = ..., + underline: bool = ..., + overstrike: bool = ..., + ) -> None: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __setitem__(self, key: str, value: Any) -> None: ... + @overload + def cget(self, option: Literal["family"]) -> str: ... + @overload + def cget(self, option: Literal["size"]) -> int: ... + @overload + def cget(self, option: Literal["weight"]) -> Literal["normal", "bold"]: ... + @overload + def cget(self, option: Literal["slant"]) -> Literal["roman", "italic"]: ... + @overload + def cget(self, option: Literal["underline", "overstrike"]) -> bool: ... + @overload + def cget(self, option: str) -> Any: ... + __getitem__ = cget + @overload + def actual(self, option: Literal["family"], displayof: tkinter.Misc | None = None) -> str: ... + @overload + def actual(self, option: Literal["size"], displayof: tkinter.Misc | None = None) -> int: ... + @overload + def actual(self, option: Literal["weight"], displayof: tkinter.Misc | None = None) -> Literal["normal", "bold"]: ... + @overload + def actual(self, option: Literal["slant"], displayof: tkinter.Misc | None = None) -> Literal["roman", "italic"]: ... + @overload + def actual(self, option: Literal["underline", "overstrike"], displayof: tkinter.Misc | None = None) -> bool: ... + @overload + def actual(self, option: None, displayof: tkinter.Misc | None = None) -> _FontDict: ... + @overload + def actual(self, *, displayof: tkinter.Misc | None = None) -> _FontDict: ... + def config( + self, + *, + family: str = ..., + size: int = ..., + weight: Literal["normal", "bold"] = ..., + slant: Literal["roman", "italic"] = ..., + underline: bool = ..., + overstrike: bool = ..., + ) -> _FontDict | None: ... + configure = config + def copy(self) -> Font: ... + @overload + def metrics(self, option: Literal["ascent", "descent", "linespace"], /, *, displayof: tkinter.Misc | None = ...) -> int: ... + @overload + def metrics(self, option: Literal["fixed"], /, *, displayof: tkinter.Misc | None = ...) -> bool: ... + @overload + def metrics(self, *, displayof: tkinter.Misc | None = ...) -> _MetricsDict: ... + def measure(self, text: str, displayof: tkinter.Misc | None = None) -> int: ... + def __eq__(self, other: object) -> bool: ... + def __del__(self) -> None: ... + +def families(root: tkinter.Misc | None = None, displayof: tkinter.Misc | None = None) -> tuple[str, ...]: ... +def names(root: tkinter.Misc | None = None) -> tuple[str, ...]: ... + +if sys.version_info >= (3, 10): + def nametofont(name: str, root: tkinter.Misc | None = None) -> Font: ... + +else: + def nametofont(name: str) -> Font: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/messagebox.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/messagebox.pyi new file mode 100644 index 0000000..cd95f0d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/messagebox.pyi @@ -0,0 +1,98 @@ +from tkinter import Misc +from tkinter.commondialog import Dialog +from typing import ClassVar, Final, Literal + +__all__ = ["showinfo", "showwarning", "showerror", "askquestion", "askokcancel", "askyesno", "askyesnocancel", "askretrycancel"] + +ERROR: Final = "error" +INFO: Final = "info" +QUESTION: Final = "question" +WARNING: Final = "warning" +ABORTRETRYIGNORE: Final = "abortretryignore" +OK: Final = "ok" +OKCANCEL: Final = "okcancel" +RETRYCANCEL: Final = "retrycancel" +YESNO: Final = "yesno" +YESNOCANCEL: Final = "yesnocancel" +ABORT: Final = "abort" +RETRY: Final = "retry" +IGNORE: Final = "ignore" +CANCEL: Final = "cancel" +YES: Final = "yes" +NO: Final = "no" + +class Message(Dialog): + command: ClassVar[str] + +def showinfo( + title: str | None = None, + message: str | None = None, + *, + detail: str = ..., + icon: Literal["error", "info", "question", "warning"] = ..., + default: Literal["ok"] = "ok", + parent: Misc = ..., +) -> str: ... +def showwarning( + title: str | None = None, + message: str | None = None, + *, + detail: str = ..., + icon: Literal["error", "info", "question", "warning"] = ..., + default: Literal["ok"] = "ok", + parent: Misc = ..., +) -> str: ... +def showerror( + title: str | None = None, + message: str | None = None, + *, + detail: str = ..., + icon: Literal["error", "info", "question", "warning"] = ..., + default: Literal["ok"] = "ok", + parent: Misc = ..., +) -> str: ... +def askquestion( + title: str | None = None, + message: str | None = None, + *, + detail: str = ..., + icon: Literal["error", "info", "question", "warning"] = ..., + default: Literal["yes", "no"] = ..., + parent: Misc = ..., +) -> str: ... +def askokcancel( + title: str | None = None, + message: str | None = None, + *, + detail: str = ..., + icon: Literal["error", "info", "question", "warning"] = ..., + default: Literal["ok", "cancel"] = ..., + parent: Misc = ..., +) -> bool: ... +def askyesno( + title: str | None = None, + message: str | None = None, + *, + detail: str = ..., + icon: Literal["error", "info", "question", "warning"] = ..., + default: Literal["yes", "no"] = ..., + parent: Misc = ..., +) -> bool: ... +def askyesnocancel( + title: str | None = None, + message: str | None = None, + *, + detail: str = ..., + icon: Literal["error", "info", "question", "warning"] = ..., + default: Literal["cancel", "yes", "no"] = ..., + parent: Misc = ..., +) -> bool | None: ... +def askretrycancel( + title: str | None = None, + message: str | None = None, + *, + detail: str = ..., + icon: Literal["error", "info", "question", "warning"] = ..., + default: Literal["retry", "cancel"] = ..., + parent: Misc = ..., +) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/scrolledtext.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/scrolledtext.pyi new file mode 100644 index 0000000..6f1abc7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/scrolledtext.pyi @@ -0,0 +1,9 @@ +from tkinter import Frame, Misc, Scrollbar, Text + +__all__ = ["ScrolledText"] + +# The methods from Pack, Place, and Grid are dynamically added over the parent's impls +class ScrolledText(Text): + frame: Frame + vbar: Scrollbar + def __init__(self, master: Misc | None = None, **kwargs) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/simpledialog.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/simpledialog.pyi new file mode 100644 index 0000000..45dce21 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/simpledialog.pyi @@ -0,0 +1,54 @@ +from tkinter import Event, Frame, Misc, Toplevel + +class Dialog(Toplevel): + def __init__(self, parent: Misc | None, title: str | None = None) -> None: ... + def body(self, master: Frame) -> Misc | None: ... + def buttonbox(self) -> None: ... + def ok(self, event: Event[Misc] | None = None) -> None: ... + def cancel(self, event: Event[Misc] | None = None) -> None: ... + def validate(self) -> bool: ... + def apply(self) -> None: ... + +class SimpleDialog: + def __init__( + self, + master: Misc | None, + text: str = "", + buttons: list[str] = [], + default: int | None = None, + cancel: int | None = None, + title: str | None = None, + class_: str | None = None, + ) -> None: ... + def go(self) -> int | None: ... + def return_event(self, event: Event[Misc]) -> None: ... + def wm_delete_window(self) -> None: ... + def done(self, num: int) -> None: ... + +def askfloat( + title: str | None, + prompt: str, + *, + initialvalue: float | None = ..., + minvalue: float | None = ..., + maxvalue: float | None = ..., + parent: Misc | None = ..., +) -> float | None: ... +def askinteger( + title: str | None, + prompt: str, + *, + initialvalue: int | None = ..., + minvalue: int | None = ..., + maxvalue: int | None = ..., + parent: Misc | None = ..., +) -> int | None: ... +def askstring( + title: str | None, + prompt: str, + *, + initialvalue: str | None = ..., + show: str | None = ..., + # minvalue/maxvalue is accepted but not useful. + parent: Misc | None = ..., +) -> str | None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/tix.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/tix.pyi new file mode 100644 index 0000000..7891364 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/tix.pyi @@ -0,0 +1,299 @@ +import tkinter +from _typeshed import Incomplete +from typing import Any, Final + +WINDOW: Final = "window" +TEXT: Final = "text" +STATUS: Final = "status" +IMMEDIATE: Final = "immediate" +IMAGE: Final = "image" +IMAGETEXT: Final = "imagetext" +BALLOON: Final = "balloon" +AUTO: Final = "auto" +ACROSSTOP: Final = "acrosstop" + +ASCII: Final = "ascii" +CELL: Final = "cell" +COLUMN: Final = "column" +DECREASING: Final = "decreasing" +INCREASING: Final = "increasing" +INTEGER: Final = "integer" +MAIN: Final = "main" +MAX: Final = "max" +REAL: Final = "real" +ROW: Final = "row" +S_REGION: Final = "s-region" +X_REGION: Final = "x-region" +Y_REGION: Final = "y-region" + +# These should be kept in sync with _tkinter constants, except TCL_ALL_EVENTS which doesn't match ALL_EVENTS +TCL_DONT_WAIT: Final = 2 +TCL_WINDOW_EVENTS: Final = 4 +TCL_FILE_EVENTS: Final = 8 +TCL_TIMER_EVENTS: Final = 16 +TCL_IDLE_EVENTS: Final = 32 +TCL_ALL_EVENTS: Final = 0 + +class tixCommand: + def tix_addbitmapdir(self, directory: str) -> None: ... + def tix_cget(self, option: str) -> Any: ... + def tix_configure(self, cnf: dict[str, Any] | None = None, **kw: Any) -> Any: ... + def tix_filedialog(self, dlgclass: str | None = None) -> str: ... + def tix_getbitmap(self, name: str) -> str: ... + def tix_getimage(self, name: str) -> str: ... + def tix_option_get(self, name: str) -> Any: ... + def tix_resetoptions(self, newScheme: str, newFontSet: str, newScmPrio: str | None = None) -> None: ... + +class Tk(tkinter.Tk, tixCommand): + def __init__(self, screenName: str | None = None, baseName: str | None = None, className: str = "Tix") -> None: ... + +class TixWidget(tkinter.Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + widgetName: str | None = None, + static_options: list[str] | None = None, + cnf: dict[str, Any] = {}, + kw: dict[str, Any] = {}, + ) -> None: ... + def __getattr__(self, name: str): ... + def set_silent(self, value: str) -> None: ... + def subwidget(self, name: str) -> tkinter.Widget: ... + def subwidgets_all(self) -> list[tkinter.Widget]: ... + def config_all(self, option: Any, value: Any) -> None: ... + def image_create(self, imgtype: str, cnf: dict[str, Any] = {}, master: tkinter.Widget | None = None, **kw) -> None: ... + def image_delete(self, imgname: str) -> None: ... + +class TixSubWidget(TixWidget): + def __init__(self, master: tkinter.Widget, name: str, destroy_physically: int = 1, check_intermediate: int = 1) -> None: ... + +class DisplayStyle: + def __init__(self, itemtype: str, cnf: dict[str, Any] = {}, *, master: tkinter.Widget | None = None, **kw) -> None: ... + def __getitem__(self, key: str): ... + def __setitem__(self, key: str, value: Any) -> None: ... + def delete(self) -> None: ... + def config(self, cnf: dict[str, Any] = {}, **kw): ... + +class Balloon(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def bind_widget(self, widget: tkinter.Widget, cnf: dict[str, Any] = {}, **kw) -> None: ... + def unbind_widget(self, widget: tkinter.Widget) -> None: ... + +class ButtonBox(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... + def invoke(self, name: str) -> None: ... + +class ComboBox(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def add_history(self, str: str) -> None: ... + def append_history(self, str: str) -> None: ... + def insert(self, index: int, str: str) -> None: ... + def pick(self, index: int) -> None: ... + +class Control(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def decrement(self) -> None: ... + def increment(self) -> None: ... + def invoke(self) -> None: ... + +class LabelEntry(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... + +class LabelFrame(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... + +class Meter(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... + +class OptionMenu(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def add_command(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... + def add_separator(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... + def delete(self, name: str) -> None: ... + def disable(self, name: str) -> None: ... + def enable(self, name: str) -> None: ... + +class PopupMenu(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def bind_widget(self, widget: tkinter.Widget) -> None: ... + def unbind_widget(self, widget: tkinter.Widget) -> None: ... + def post_widget(self, widget: tkinter.Widget, x: int, y: int) -> None: ... + +class Select(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... + def invoke(self, name: str) -> None: ... + +class StdButtonBox(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def invoke(self, name: str) -> None: ... + +class DirList(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def chdir(self, dir: str) -> None: ... + +class DirTree(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def chdir(self, dir: str) -> None: ... + +class DirSelectDialog(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def popup(self) -> None: ... + def popdown(self) -> None: ... + +class DirSelectBox(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... + +class ExFileSelectBox(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def filter(self) -> None: ... + def invoke(self) -> None: ... + +class FileSelectBox(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def apply_filter(self) -> None: ... + def invoke(self) -> None: ... + +class FileEntry(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def invoke(self) -> None: ... + def file_dialog(self) -> None: ... + +class HList(TixWidget, tkinter.XView, tkinter.YView): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def add(self, entry: str, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... + def add_child(self, parent: str | None = None, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... + def anchor_set(self, entry: str) -> None: ... + def anchor_clear(self) -> None: ... + # FIXME: Overload, certain combos return, others don't + def column_width(self, col: int = 0, width: int | None = None, chars: int | None = None) -> int | None: ... + def delete_all(self) -> None: ... + def delete_entry(self, entry: str) -> None: ... + def delete_offsprings(self, entry: str) -> None: ... + def delete_siblings(self, entry: str) -> None: ... + def dragsite_set(self, index: int) -> None: ... + def dragsite_clear(self) -> None: ... + def dropsite_set(self, index: int) -> None: ... + def dropsite_clear(self) -> None: ... + def header_create(self, col: int, cnf: dict[str, Any] = {}, **kw) -> None: ... + def header_configure(self, col: int, cnf: dict[str, Any] = {}, **kw) -> Incomplete | None: ... + def header_cget(self, col: int, opt): ... + def header_exists(self, col: int) -> bool: ... + def header_exist(self, col: int) -> bool: ... + def header_delete(self, col: int) -> None: ... + def header_size(self, col: int) -> int: ... + def hide_entry(self, entry: str) -> None: ... + def indicator_create(self, entry: str, cnf: dict[str, Any] = {}, **kw) -> None: ... + def indicator_configure(self, entry: str, cnf: dict[str, Any] = {}, **kw) -> Incomplete | None: ... + def indicator_cget(self, entry: str, opt): ... + def indicator_exists(self, entry: str) -> bool: ... + def indicator_delete(self, entry: str) -> None: ... + def indicator_size(self, entry: str) -> int: ... + def info_anchor(self) -> str: ... + def info_bbox(self, entry: str) -> tuple[int, int, int, int]: ... + def info_children(self, entry: str | None = None) -> tuple[str, ...]: ... + def info_data(self, entry: str) -> Any: ... + def info_dragsite(self) -> str: ... + def info_dropsite(self) -> str: ... + def info_exists(self, entry: str) -> bool: ... + def info_hidden(self, entry: str) -> bool: ... + def info_next(self, entry: str) -> str: ... + def info_parent(self, entry: str) -> str: ... + def info_prev(self, entry: str) -> str: ... + def info_selection(self) -> tuple[str, ...]: ... + def item_cget(self, entry: str, col: int, opt): ... + def item_configure(self, entry: str, col: int, cnf: dict[str, Any] = {}, **kw) -> Incomplete | None: ... + def item_create(self, entry: str, col: int, cnf: dict[str, Any] = {}, **kw) -> None: ... + def item_exists(self, entry: str, col: int) -> bool: ... + def item_delete(self, entry: str, col: int) -> None: ... + def entrycget(self, entry: str, opt): ... + def entryconfigure(self, entry: str, cnf: dict[str, Any] = {}, **kw) -> Incomplete | None: ... + def nearest(self, y: int) -> str: ... + def see(self, entry: str) -> None: ... + def selection_clear(self, cnf: dict[str, Any] = {}, **kw) -> None: ... + def selection_includes(self, entry: str) -> bool: ... + def selection_set(self, first: str, last: str | None = None) -> None: ... + def show_entry(self, entry: str) -> None: ... + +class CheckList(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def autosetmode(self) -> None: ... + def close(self, entrypath: str) -> None: ... + def getmode(self, entrypath: str) -> str: ... + def open(self, entrypath: str) -> None: ... + def getselection(self, mode: str = "on") -> tuple[str, ...]: ... + def getstatus(self, entrypath: str) -> str: ... + def setstatus(self, entrypath: str, mode: str = "on") -> None: ... + +class Tree(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def autosetmode(self) -> None: ... + def close(self, entrypath: str) -> None: ... + def getmode(self, entrypath: str) -> str: ... + def open(self, entrypath: str) -> None: ... + def setmode(self, entrypath: str, mode: str = "none") -> None: ... + +class TList(TixWidget, tkinter.XView, tkinter.YView): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def active_set(self, index: int) -> None: ... + def active_clear(self) -> None: ... + def anchor_set(self, index: int) -> None: ... + def anchor_clear(self) -> None: ... + def delete(self, from_: int, to: int | None = None) -> None: ... + def dragsite_set(self, index: int) -> None: ... + def dragsite_clear(self) -> None: ... + def dropsite_set(self, index: int) -> None: ... + def dropsite_clear(self) -> None: ... + def insert(self, index: int, cnf: dict[str, Any] = {}, **kw) -> None: ... + def info_active(self) -> int: ... + def info_anchor(self) -> int: ... + def info_down(self, index: int) -> int: ... + def info_left(self, index: int) -> int: ... + def info_right(self, index: int) -> int: ... + def info_selection(self) -> tuple[int, ...]: ... + def info_size(self) -> int: ... + def info_up(self, index: int) -> int: ... + def nearest(self, x: int, y: int) -> int: ... + def see(self, index: int) -> None: ... + def selection_clear(self, cnf: dict[str, Any] = {}, **kw) -> None: ... + def selection_includes(self, index: int) -> bool: ... + def selection_set(self, first: int, last: int | None = None) -> None: ... + +class PanedWindow(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... + def delete(self, name: str) -> None: ... + def forget(self, name: str) -> None: ... # type: ignore[override] + def panecget(self, entry: str, opt): ... + def paneconfigure(self, entry: str, cnf: dict[str, Any] = {}, **kw) -> Incomplete | None: ... + def panes(self) -> list[tkinter.Widget]: ... + +class ListNoteBook(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... + def page(self, name: str) -> tkinter.Widget: ... + def pages(self) -> list[tkinter.Widget]: ... + def raise_page(self, name: str) -> None: ... + +class NoteBook(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... + def delete(self, name: str) -> None: ... + def page(self, name: str) -> tkinter.Widget: ... + def pages(self) -> list[tkinter.Widget]: ... + def raise_page(self, name: str) -> None: ... + def raised(self) -> bool: ... + +class InputOnly(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... + +class Form: + def __setitem__(self, key: str, value: Any) -> None: ... + def config(self, cnf: dict[str, Any] = {}, **kw) -> None: ... + def form(self, cnf: dict[str, Any] = {}, **kw) -> None: ... + def check(self) -> bool: ... + def forget(self) -> None: ... + def grid(self, xsize: int = 0, ysize: int = 0) -> tuple[int, int] | None: ... + def info(self, option: str | None = None): ... + def slaves(self) -> list[tkinter.Widget]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/ttk.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/ttk.pyi new file mode 100644 index 0000000..1d72acd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tkinter/ttk.pyi @@ -0,0 +1,1344 @@ +import _tkinter +import sys +import tkinter +from _typeshed import MaybeNone +from collections.abc import Callable, Iterable +from tkinter.font import _FontDescription +from typing import Any, Literal, TypedDict, overload, type_check_only +from typing_extensions import Never, TypeAlias, Unpack + +__all__ = [ + "Button", + "Checkbutton", + "Combobox", + "Entry", + "Frame", + "Label", + "Labelframe", + "LabelFrame", + "Menubutton", + "Notebook", + "Panedwindow", + "PanedWindow", + "Progressbar", + "Radiobutton", + "Scale", + "Scrollbar", + "Separator", + "Sizegrip", + "Style", + "Treeview", + "LabeledScale", + "OptionMenu", + "tclobjs_to_py", + "setup_master", + "Spinbox", +] + +def tclobjs_to_py(adict: dict[Any, Any]) -> dict[Any, Any]: ... +def setup_master(master: tkinter.Misc | None = None): ... + +_Padding: TypeAlias = ( + float + | str + | tuple[float | str] + | tuple[float | str, float | str] + | tuple[float | str, float | str, float | str] + | tuple[float | str, float | str, float | str, float | str] +) + +# Last item (option value to apply) varies between different options so use Any. +# It could also be any iterable with items matching the tuple, but that case +# hasn't been added here for consistency with _Padding above. +_Statespec: TypeAlias = tuple[Unpack[tuple[str, ...]], Any] +_ImageStatespec: TypeAlias = tuple[Unpack[tuple[str, ...]], tkinter._Image | str] +_VsapiStatespec: TypeAlias = tuple[Unpack[tuple[str, ...]], int] + +class _Layout(TypedDict, total=False): + side: Literal["left", "right", "top", "bottom"] + sticky: str # consists of letters 'n', 's', 'w', 'e', may contain repeats, may be empty + unit: Literal[0, 1] | bool + children: _LayoutSpec + # Note: there seem to be some other undocumented keys sometimes + +# This could be any sequence when passed as a parameter but will always be a list when returned. +_LayoutSpec: TypeAlias = list[tuple[str, _Layout | None]] + +# Keep these in sync with the appropriate methods in Style +class _ElementCreateImageKwargs(TypedDict, total=False): + border: _Padding + height: float | str + padding: _Padding + sticky: str + width: float | str + +_ElementCreateArgsCrossPlatform: TypeAlias = ( + # Could be any sequence here but types are not homogenous so just type it as tuple + tuple[Literal["image"], tkinter._Image | str, Unpack[tuple[_ImageStatespec, ...]], _ElementCreateImageKwargs] + | tuple[Literal["from"], str, str] + | tuple[Literal["from"], str] # (fromelement is optional) +) +if sys.platform == "win32" and sys.version_info >= (3, 13): + class _ElementCreateVsapiKwargsPadding(TypedDict, total=False): + padding: _Padding + + class _ElementCreateVsapiKwargsMargin(TypedDict, total=False): + padding: _Padding + + class _ElementCreateVsapiKwargsSize(TypedDict): + width: float | str + height: float | str + + _ElementCreateVsapiKwargsDict: TypeAlias = ( + _ElementCreateVsapiKwargsPadding | _ElementCreateVsapiKwargsMargin | _ElementCreateVsapiKwargsSize + ) + _ElementCreateArgs: TypeAlias = ( # noqa: Y047 # It doesn't recognise the usage below for whatever reason + _ElementCreateArgsCrossPlatform + | tuple[Literal["vsapi"], str, int, _ElementCreateVsapiKwargsDict] + | tuple[Literal["vsapi"], str, int, _VsapiStatespec, _ElementCreateVsapiKwargsDict] + ) +else: + _ElementCreateArgs: TypeAlias = _ElementCreateArgsCrossPlatform +_ThemeSettingsValue = TypedDict( + "_ThemeSettingsValue", + { + "configure": dict[str, Any], + "map": dict[str, Iterable[_Statespec]], + "layout": _LayoutSpec, + "element create": _ElementCreateArgs, + }, + total=False, +) +_ThemeSettings: TypeAlias = dict[str, _ThemeSettingsValue] + +class Style: + master: tkinter.Misc + tk: _tkinter.TkappType + def __init__(self, master: tkinter.Misc | None = None) -> None: ... + # For these methods, values given vary between options. Returned values + # seem to be str, but this might not always be the case. + @overload + def configure(self, style: str) -> dict[str, Any] | None: ... # Returns None if no configuration. + @overload + def configure(self, style: str, query_opt: str, **kw: Any) -> Any: ... + @overload + def configure(self, style: str, query_opt: None = None, **kw: Any) -> None: ... + @overload + def map(self, style: str, query_opt: str) -> _Statespec: ... + @overload + def map(self, style: str, query_opt: None = None, **kw: Iterable[_Statespec]) -> dict[str, _Statespec]: ... + def lookup(self, style: str, option: str, state: Iterable[str] | None = None, default: Any | None = None) -> Any: ... + @overload + def layout(self, style: str, layoutspec: _LayoutSpec) -> list[Never]: ... # Always seems to return an empty list + @overload + def layout(self, style: str, layoutspec: None = None) -> _LayoutSpec: ... + @overload + def element_create( + self, + elementname: str, + etype: Literal["image"], + default_image: tkinter._Image | str, + /, + *imagespec: _ImageStatespec, + border: _Padding = ..., + height: float | str = ..., + padding: _Padding = ..., + sticky: str = ..., + width: float | str = ..., + ) -> None: ... + @overload + def element_create(self, elementname: str, etype: Literal["from"], themename: str, fromelement: str = ..., /) -> None: ... + if sys.platform == "win32" and sys.version_info >= (3, 13): # and tk version >= 8.6 + # margin, padding, and (width + height) are mutually exclusive. width + # and height must either both be present or not present at all. Note: + # There are other undocumented options if you look at ttk's source code. + @overload + def element_create( + self, + elementname: str, + etype: Literal["vsapi"], + class_: str, + part: int, + vs_statespec: _VsapiStatespec = ..., + /, + *, + padding: _Padding = ..., + ) -> None: ... + @overload + def element_create( + self, + elementname: str, + etype: Literal["vsapi"], + class_: str, + part: int, + vs_statespec: _VsapiStatespec = ..., + /, + *, + margin: _Padding = ..., + ) -> None: ... + @overload + def element_create( + self, + elementname: str, + etype: Literal["vsapi"], + class_: str, + part: int, + vs_statespec: _VsapiStatespec = ..., + /, + *, + width: float | str, + height: float | str, + ) -> None: ... + + def element_names(self) -> tuple[str, ...]: ... + def element_options(self, elementname: str) -> tuple[str, ...]: ... + def theme_create(self, themename: str, parent: str | None = None, settings: _ThemeSettings | None = None) -> None: ... + def theme_settings(self, themename: str, settings: _ThemeSettings) -> None: ... + def theme_names(self) -> tuple[str, ...]: ... + @overload + def theme_use(self, themename: str) -> None: ... + @overload + def theme_use(self, themename: None = None) -> str: ... + +class Widget(tkinter.Widget): + def __init__(self, master: tkinter.Misc | None, widgetname, kw=None) -> None: ... + def identify(self, x: int, y: int) -> str: ... + def instate(self, statespec, callback=None, *args, **kw): ... + def state(self, statespec=None): ... + +class Button(Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = "", + command: str | Callable[[], Any] = "", + compound: Literal["", "text", "image", "top", "left", "center", "right", "bottom", "none"] = "", + cursor: tkinter._Cursor = "", + default: Literal["normal", "active", "disabled"] = "normal", + image: tkinter._Image | str = "", + name: str = ..., + padding=..., # undocumented + state: str = "normal", + style: str = "", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + text: float | str = "", + textvariable: tkinter.Variable = ..., + underline: int = -1, + width: int | Literal[""] = "", + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + command: str | Callable[[], Any] = ..., + compound: Literal["", "text", "image", "top", "left", "center", "right", "bottom", "none"] = ..., + cursor: tkinter._Cursor = ..., + default: Literal["normal", "active", "disabled"] = ..., + image: tkinter._Image | str = ..., + padding=..., + state: str = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + text: float | str = ..., + textvariable: tkinter.Variable = ..., + underline: int = ..., + width: int | Literal[""] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def invoke(self) -> Any: ... + +class Checkbutton(Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = "", + command: str | Callable[[], Any] = "", + compound: Literal["", "text", "image", "top", "left", "center", "right", "bottom", "none"] = "", + cursor: tkinter._Cursor = "", + image: tkinter._Image | str = "", + name: str = ..., + offvalue: Any = 0, + onvalue: Any = 1, + padding=..., # undocumented + state: str = "normal", + style: str = "", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + text: float | str = "", + textvariable: tkinter.Variable = ..., + underline: int = -1, + # Seems like variable can be empty string, but actually setting it to + # empty string segfaults before Tcl 8.6.9. Search for ttk::checkbutton + # here: https://sourceforge.net/projects/tcl/files/Tcl/8.6.9/tcltk-release-notes-8.6.9.txt/view + variable: tkinter.Variable = ..., + width: int | Literal[""] = "", + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + command: str | Callable[[], Any] = ..., + compound: Literal["", "text", "image", "top", "left", "center", "right", "bottom", "none"] = ..., + cursor: tkinter._Cursor = ..., + image: tkinter._Image | str = ..., + offvalue: Any = ..., + onvalue: Any = ..., + padding=..., + state: str = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + text: float | str = ..., + textvariable: tkinter.Variable = ..., + underline: int = ..., + variable: tkinter.Variable = ..., + width: int | Literal[""] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def invoke(self) -> Any: ... + +class Entry(Widget, tkinter.Entry): + def __init__( + self, + master: tkinter.Misc | None = None, + widget: str | None = None, + *, + background: str = ..., # undocumented + class_: str = "", + cursor: tkinter._Cursor = ..., + exportselection: bool = True, + font: _FontDescription = "TkTextFont", + foreground: str = "", + invalidcommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = "", + justify: Literal["left", "center", "right"] = "left", + name: str = ..., + show: str = "", + state: str = "normal", + style: str = "", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + textvariable: tkinter.Variable = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = "none", + validatecommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = "", + width: int = 20, + xscrollcommand: str | Callable[[float, float], object] = "", + ) -> None: ... + @overload # type: ignore[override] + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + background: str = ..., + cursor: tkinter._Cursor = ..., + exportselection: bool = ..., + font: _FontDescription = ..., + foreground: str = ..., + invalidcommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., + justify: Literal["left", "center", "right"] = ..., + show: str = ..., + state: str = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + textvariable: tkinter.Variable = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., + validatecommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., + width: int = ..., + xscrollcommand: str | Callable[[float, float], object] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + # config must be copy/pasted, otherwise ttk.Entry().config is mypy error (don't know why) + @overload # type: ignore[override] + def config( + self, + cnf: dict[str, Any] | None = None, + *, + background: str = ..., + cursor: tkinter._Cursor = ..., + exportselection: bool = ..., + font: _FontDescription = ..., + foreground: str = ..., + invalidcommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., + justify: Literal["left", "center", "right"] = ..., + show: str = ..., + state: str = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + textvariable: tkinter.Variable = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., + validatecommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., + width: int = ..., + xscrollcommand: str | Callable[[float, float], object] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + def bbox(self, index) -> tuple[int, int, int, int]: ... # type: ignore[override] + def identify(self, x: int, y: int) -> str: ... + def validate(self): ... + +class Combobox(Entry): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + background: str = ..., # undocumented + class_: str = "", + cursor: tkinter._Cursor = "", + exportselection: bool = True, + font: _FontDescription = ..., # undocumented + foreground: str = ..., # undocumented + height: int = 10, + invalidcommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., # undocumented + justify: Literal["left", "center", "right"] = "left", + name: str = ..., + postcommand: Callable[[], object] | str = "", + show=..., # undocumented + state: str = "normal", + style: str = "", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + textvariable: tkinter.Variable = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., # undocumented + validatecommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., # undocumented + values: list[str] | tuple[str, ...] = ..., + width: int = 20, + xscrollcommand: str | Callable[[float, float], object] = ..., # undocumented + ) -> None: ... + @overload # type: ignore[override] + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + background: str = ..., + cursor: tkinter._Cursor = ..., + exportselection: bool = ..., + font: _FontDescription = ..., + foreground: str = ..., + height: int = ..., + invalidcommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., + justify: Literal["left", "center", "right"] = ..., + postcommand: Callable[[], object] | str = ..., + show=..., + state: str = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + textvariable: tkinter.Variable = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., + validatecommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., + values: list[str] | tuple[str, ...] = ..., + width: int = ..., + xscrollcommand: str | Callable[[float, float], object] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + # config must be copy/pasted, otherwise ttk.Combobox().config is mypy error (don't know why) + @overload # type: ignore[override] + def config( + self, + cnf: dict[str, Any] | None = None, + *, + background: str = ..., + cursor: tkinter._Cursor = ..., + exportselection: bool = ..., + font: _FontDescription = ..., + foreground: str = ..., + height: int = ..., + invalidcommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., + justify: Literal["left", "center", "right"] = ..., + postcommand: Callable[[], object] | str = ..., + show=..., + state: str = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + textvariable: tkinter.Variable = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., + validatecommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., + values: list[str] | tuple[str, ...] = ..., + width: int = ..., + xscrollcommand: str | Callable[[float, float], object] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + def current(self, newindex: int | None = None) -> int: ... + def set(self, value: Any) -> None: ... + +class Frame(Widget): + # This should be kept in sync with tkinter.ttk.LabeledScale.__init__() + # (all of these keyword-only arguments are also present there) + def __init__( + self, + master: tkinter.Misc | None = None, + *, + border: float | str = ..., + borderwidth: float | str = ..., + class_: str = "", + cursor: tkinter._Cursor = "", + height: float | str = 0, + name: str = ..., + padding: _Padding = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + style: str = "", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", + width: float | str = 0, + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + border: float | str = ..., + borderwidth: float | str = ..., + cursor: tkinter._Cursor = ..., + height: float | str = ..., + padding: _Padding = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + width: float | str = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class Label(Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = ..., + background: str = "", + border: float | str = ..., # alias for borderwidth + borderwidth: float | str = ..., # undocumented + class_: str = "", + compound: Literal["", "text", "image", "top", "left", "center", "right", "bottom", "none"] = "", + cursor: tkinter._Cursor = "", + font: _FontDescription = ..., + foreground: str = "", + image: tkinter._Image | str = "", + justify: Literal["left", "center", "right"] = ..., + name: str = ..., + padding: _Padding = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + state: str = "normal", + style: str = "", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", + text: float | str = "", + textvariable: tkinter.Variable = ..., + underline: int = -1, + width: int | Literal[""] = "", + wraplength: float | str = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = ..., + background: str = ..., + border: float | str = ..., + borderwidth: float | str = ..., + compound: Literal["", "text", "image", "top", "left", "center", "right", "bottom", "none"] = ..., + cursor: tkinter._Cursor = ..., + font: _FontDescription = ..., + foreground: str = ..., + image: tkinter._Image | str = ..., + justify: Literal["left", "center", "right"] = ..., + padding: _Padding = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + state: str = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + text: float | str = ..., + textvariable: tkinter.Variable = ..., + underline: int = ..., + width: int | Literal[""] = ..., + wraplength: float | str = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class Labelframe(Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + border: float | str = ..., + borderwidth: float | str = ..., # undocumented + class_: str = "", + cursor: tkinter._Cursor = "", + height: float | str = 0, + labelanchor: Literal["nw", "n", "ne", "en", "e", "es", "se", "s", "sw", "ws", "w", "wn"] = ..., + labelwidget: tkinter.Misc = ..., + name: str = ..., + padding: _Padding = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., # undocumented + style: str = "", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", + text: float | str = "", + underline: int = -1, + width: float | str = 0, + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + border: float | str = ..., + borderwidth: float | str = ..., + cursor: tkinter._Cursor = ..., + height: float | str = ..., + labelanchor: Literal["nw", "n", "ne", "en", "e", "es", "se", "s", "sw", "ws", "w", "wn"] = ..., + labelwidget: tkinter.Misc = ..., + padding: _Padding = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + text: float | str = ..., + underline: int = ..., + width: float | str = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +LabelFrame = Labelframe + +class Menubutton(Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = "", + compound: Literal["", "text", "image", "top", "left", "center", "right", "bottom", "none"] = "", + cursor: tkinter._Cursor = "", + direction: Literal["above", "below", "left", "right", "flush"] = "below", + image: tkinter._Image | str = "", + menu: tkinter.Menu = ..., + name: str = ..., + padding=..., # undocumented + state: str = "normal", + style: str = "", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + text: float | str = "", + textvariable: tkinter.Variable = ..., + underline: int = -1, + width: int | Literal[""] = "", + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + compound: Literal["", "text", "image", "top", "left", "center", "right", "bottom", "none"] = ..., + cursor: tkinter._Cursor = ..., + direction: Literal["above", "below", "left", "right", "flush"] = ..., + image: tkinter._Image | str = ..., + menu: tkinter.Menu = ..., + padding=..., + state: str = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + text: float | str = ..., + textvariable: tkinter.Variable = ..., + underline: int = ..., + width: int | Literal[""] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class Notebook(Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = "", + cursor: tkinter._Cursor = "", + height: int = 0, + name: str = ..., + padding: _Padding = ..., + style: str = "", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + width: int = 0, + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + cursor: tkinter._Cursor = ..., + height: int = ..., + padding: _Padding = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + width: int = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def add( + self, + child: tkinter.Widget, + *, + state: Literal["normal", "disabled", "hidden"] = ..., + sticky: str = ..., # consists of letters 'n', 's', 'w', 'e', no repeats, may be empty + padding: _Padding = ..., + text: str = ..., + # `image` is a sequence of an image name, followed by zero or more + # (sequences of one or more state names followed by an image name) + image=..., + compound: Literal["top", "left", "center", "right", "bottom", "none"] = ..., + underline: int = ..., + ) -> None: ... + def forget(self, tab_id) -> None: ... # type: ignore[override] + def hide(self, tab_id) -> None: ... + def identify(self, x: int, y: int) -> str: ... + def index(self, tab_id): ... + def insert(self, pos, child, **kw) -> None: ... + def select(self, tab_id=None): ... + def tab(self, tab_id, option=None, **kw): ... + def tabs(self): ... + def enable_traversal(self) -> None: ... + +class Panedwindow(Widget, tkinter.PanedWindow): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = "", + cursor: tkinter._Cursor = "", + # width and height for tkinter.ttk.Panedwindow are int but for tkinter.PanedWindow they are screen units + height: int = 0, + name: str = ..., + orient: Literal["vertical", "horizontal"] = "vertical", # can't be changed with configure() + style: str = "", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", + width: int = 0, + ) -> None: ... + def add(self, child: tkinter.Widget, *, weight: int = ..., **kw) -> None: ... + @overload # type: ignore[override] + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + cursor: tkinter._Cursor = ..., + height: int = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + width: int = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + # config must be copy/pasted, otherwise ttk.Panedwindow().config is mypy error (don't know why) + @overload # type: ignore[override] + def config( + self, + cnf: dict[str, Any] | None = None, + *, + cursor: tkinter._Cursor = ..., + height: int = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + width: int = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + forget = tkinter.PanedWindow.forget + def insert(self, pos, child, **kw) -> None: ... + def pane(self, pane, option=None, **kw): ... + def sashpos(self, index, newpos=None): ... + +PanedWindow = Panedwindow + +class Progressbar(Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = "", + cursor: tkinter._Cursor = "", + length: float | str = 100, + maximum: float = 100, + mode: Literal["determinate", "indeterminate"] = "determinate", + name: str = ..., + orient: Literal["horizontal", "vertical"] = "horizontal", + phase: int = 0, # docs say read-only but assigning int to this works + style: str = "", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", + value: float = 0.0, + variable: tkinter.IntVar | tkinter.DoubleVar = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + cursor: tkinter._Cursor = ..., + length: float | str = ..., + maximum: float = ..., + mode: Literal["determinate", "indeterminate"] = ..., + orient: Literal["horizontal", "vertical"] = ..., + phase: int = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + value: float = ..., + variable: tkinter.IntVar | tkinter.DoubleVar = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def start(self, interval: Literal["idle"] | int | None = None) -> None: ... + def step(self, amount: float | None = None) -> None: ... + def stop(self) -> None: ... + +class Radiobutton(Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = "", + command: str | Callable[[], Any] = "", + compound: Literal["", "text", "image", "top", "left", "center", "right", "bottom", "none"] = "", + cursor: tkinter._Cursor = "", + image: tkinter._Image | str = "", + name: str = ..., + padding=..., # undocumented + state: str = "normal", + style: str = "", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + text: float | str = "", + textvariable: tkinter.Variable = ..., + underline: int = -1, + value: Any = "1", + variable: tkinter.Variable | Literal[""] = ..., + width: int | Literal[""] = "", + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + command: str | Callable[[], Any] = ..., + compound: Literal["", "text", "image", "top", "left", "center", "right", "bottom", "none"] = ..., + cursor: tkinter._Cursor = ..., + image: tkinter._Image | str = ..., + padding=..., + state: str = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + text: float | str = ..., + textvariable: tkinter.Variable = ..., + underline: int = ..., + value: Any = ..., + variable: tkinter.Variable | Literal[""] = ..., + width: int | Literal[""] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def invoke(self) -> Any: ... + +# type ignore, because identify() methods of Widget and tkinter.Scale are incompatible +class Scale(Widget, tkinter.Scale): # type: ignore[misc] + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = "", + command: str | Callable[[str], object] = "", + cursor: tkinter._Cursor = "", + from_: float = 0, + length: float | str = 100, + name: str = ..., + orient: Literal["horizontal", "vertical"] = "horizontal", + state: str = ..., # undocumented + style: str = "", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + to: float = 1.0, + value: float = 0, + variable: tkinter.IntVar | tkinter.DoubleVar = ..., + ) -> None: ... + @overload # type: ignore[override] + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + command: str | Callable[[str], object] = ..., + cursor: tkinter._Cursor = ..., + from_: float = ..., + length: float | str = ..., + orient: Literal["horizontal", "vertical"] = ..., + state: str = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + to: float = ..., + value: float = ..., + variable: tkinter.IntVar | tkinter.DoubleVar = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + # config must be copy/pasted, otherwise ttk.Scale().config is mypy error (don't know why) + @overload # type: ignore[override] + def config( + self, + cnf: dict[str, Any] | None = None, + *, + command: str | Callable[[str], object] = ..., + cursor: tkinter._Cursor = ..., + from_: float = ..., + length: float | str = ..., + orient: Literal["horizontal", "vertical"] = ..., + state: str = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + to: float = ..., + value: float = ..., + variable: tkinter.IntVar | tkinter.DoubleVar = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + def get(self, x: int | None = None, y: int | None = None) -> float: ... + +# type ignore, because identify() methods of Widget and tkinter.Scale are incompatible +class Scrollbar(Widget, tkinter.Scrollbar): # type: ignore[misc] + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = "", + command: Callable[..., tuple[float, float] | None] | str = "", + cursor: tkinter._Cursor = "", + name: str = ..., + orient: Literal["horizontal", "vertical"] = "vertical", + style: str = "", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", + ) -> None: ... + @overload # type: ignore[override] + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + command: Callable[..., tuple[float, float] | None] | str = ..., + cursor: tkinter._Cursor = ..., + orient: Literal["horizontal", "vertical"] = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + # config must be copy/pasted, otherwise ttk.Scrollbar().config is mypy error (don't know why) + @overload # type: ignore[override] + def config( + self, + cnf: dict[str, Any] | None = None, + *, + command: Callable[..., tuple[float, float] | None] | str = ..., + cursor: tkinter._Cursor = ..., + orient: Literal["horizontal", "vertical"] = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + +class Separator(Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = "", + cursor: tkinter._Cursor = "", + name: str = ..., + orient: Literal["horizontal", "vertical"] = "horizontal", + style: str = "", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + cursor: tkinter._Cursor = ..., + orient: Literal["horizontal", "vertical"] = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class Sizegrip(Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = "", + cursor: tkinter._Cursor = ..., + name: str = ..., + style: str = "", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + cursor: tkinter._Cursor = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class Spinbox(Entry): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + background: str = ..., # undocumented + class_: str = "", + command: Callable[[], object] | str | list[str] | tuple[str, ...] = "", + cursor: tkinter._Cursor = "", + exportselection: bool = ..., # undocumented + font: _FontDescription = ..., # undocumented + foreground: str = ..., # undocumented + format: str = "", + from_: float = 0, + increment: float = 1, + invalidcommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., # undocumented + justify: Literal["left", "center", "right"] = ..., # undocumented + name: str = ..., + show=..., # undocumented + state: str = "normal", + style: str = "", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + textvariable: tkinter.Variable = ..., # undocumented + to: float = 0, + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = "none", + validatecommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = "", + values: list[str] | tuple[str, ...] = ..., + width: int = ..., # undocumented + wrap: bool = False, + xscrollcommand: str | Callable[[float, float], object] = "", + ) -> None: ... + @overload # type: ignore[override] + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + background: str = ..., + command: Callable[[], object] | str | list[str] | tuple[str, ...] = ..., + cursor: tkinter._Cursor = ..., + exportselection: bool = ..., + font: _FontDescription = ..., + foreground: str = ..., + format: str = ..., + from_: float = ..., + increment: float = ..., + invalidcommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., + justify: Literal["left", "center", "right"] = ..., + show=..., + state: str = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + textvariable: tkinter.Variable = ..., + to: float = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., + validatecommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., + values: list[str] | tuple[str, ...] = ..., + width: int = ..., + wrap: bool = ..., + xscrollcommand: str | Callable[[float, float], object] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure # type: ignore[assignment] + def set(self, value: Any) -> None: ... + +@type_check_only +class _TreeviewItemDict(TypedDict): + text: str + image: list[str] | Literal[""] # no idea why it's wrapped in list + values: list[Any] | Literal[""] + open: bool # actually 0 or 1 + tags: list[str] | Literal[""] + +@type_check_only +class _TreeviewTagDict(TypedDict): + # There is also 'text' and 'anchor', but they don't seem to do anything, using them is likely a bug + foreground: str + background: str + font: _FontDescription + image: str # not wrapped in list :D + +@type_check_only +class _TreeviewHeaderDict(TypedDict): + text: str + image: list[str] | Literal[""] + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] + command: str + state: str # Doesn't seem to appear anywhere else than in these dicts + +@type_check_only +class _TreeviewColumnDict(TypedDict): + width: int + minwidth: int + stretch: bool # actually 0 or 1 + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] + id: str + +class Treeview(Widget, tkinter.XView, tkinter.YView): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = "", + columns: str | list[str] | list[int] | list[str | int] | tuple[str | int, ...] = "", + cursor: tkinter._Cursor = "", + displaycolumns: str | int | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] = ("#all",), + height: int = 10, + name: str = ..., + padding: _Padding = ..., + selectmode: Literal["extended", "browse", "none"] = "extended", + # list/tuple of Literal don't actually work in mypy + # + # 'tree headings' is same as ['tree', 'headings'], and I wouldn't be + # surprised if someone is using it. + show: Literal["tree", "headings", "tree headings", ""] | list[str] | tuple[str, ...] = ("tree", "headings"), + style: str = "", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + xscrollcommand: str | Callable[[float, float], object] = "", + yscrollcommand: str | Callable[[float, float], object] = "", + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + columns: str | list[str] | list[int] | list[str | int] | tuple[str | int, ...] = ..., + cursor: tkinter._Cursor = ..., + displaycolumns: str | int | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] = ..., + height: int = ..., + padding: _Padding = ..., + selectmode: Literal["extended", "browse", "none"] = ..., + show: Literal["tree", "headings", "tree headings", ""] | list[str] | tuple[str, ...] = ..., + style: str = ..., + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., + xscrollcommand: str | Callable[[float, float], object] = ..., + yscrollcommand: str | Callable[[float, float], object] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def bbox(self, item: str | int, column: str | int | None = None) -> tuple[int, int, int, int] | Literal[""]: ... # type: ignore[override] + def get_children(self, item: str | int | None = None) -> tuple[str, ...]: ... + def set_children(self, item: str | int, *newchildren: str | int) -> None: ... + @overload + def column(self, column: str | int, option: Literal["width", "minwidth"]) -> int: ... + @overload + def column(self, column: str | int, option: Literal["stretch"]) -> bool: ... # actually 0 or 1 + @overload + def column(self, column: str | int, option: Literal["anchor"]) -> _tkinter.Tcl_Obj: ... + @overload + def column(self, column: str | int, option: Literal["id"]) -> str: ... + @overload + def column(self, column: str | int, option: str) -> Any: ... + @overload + def column( + self, + column: str | int, + option: None = None, + *, + width: int = ..., + minwidth: int = ..., + stretch: bool = ..., + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = ..., + # id is read-only + ) -> _TreeviewColumnDict | None: ... + def delete(self, *items: str | int) -> None: ... + def detach(self, *items: str | int) -> None: ... + def exists(self, item: str | int) -> bool: ... + @overload # type: ignore[override] + def focus(self, item: None = None) -> str: ... # can return empty string + @overload + def focus(self, item: str | int) -> Literal[""]: ... + @overload + def heading(self, column: str | int, option: Literal["text"]) -> str: ... + @overload + def heading(self, column: str | int, option: Literal["image"]) -> tuple[str] | str: ... + @overload + def heading(self, column: str | int, option: Literal["anchor"]) -> _tkinter.Tcl_Obj: ... + @overload + def heading(self, column: str | int, option: Literal["command"]) -> str: ... + @overload + def heading(self, column: str | int, option: str) -> Any: ... + @overload + def heading(self, column: str | int, option: None = None) -> _TreeviewHeaderDict: ... + @overload + def heading( + self, + column: str | int, + option: None = None, + *, + text: str = ..., + image: tkinter._Image | str = ..., + anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = ..., + command: str | Callable[[], object] = ..., + ) -> None: ... + # Internal Method. Leave untyped: + def identify(self, component, x, y): ... # type: ignore[override] + def identify_row(self, y: int) -> str: ... + def identify_column(self, x: int) -> str: ... + def identify_region(self, x: int, y: int) -> Literal["heading", "separator", "tree", "cell", "nothing"]: ... + def identify_element(self, x: int, y: int) -> str: ... # don't know what possible return values are + def index(self, item: str | int) -> int: ... + def insert( + self, + parent: str, + index: int | Literal["end"], + iid: str | int | None = None, + *, + id: str | int = ..., # same as iid + text: str = ..., + image: tkinter._Image | str = ..., + values: list[Any] | tuple[Any, ...] = ..., + open: bool = ..., + tags: str | list[str] | tuple[str, ...] = ..., + ) -> str: ... + @overload + def item(self, item: str | int, option: Literal["text"]) -> str: ... + @overload + def item(self, item: str | int, option: Literal["image"]) -> tuple[str] | Literal[""]: ... + @overload + def item(self, item: str | int, option: Literal["values"]) -> tuple[Any, ...] | Literal[""]: ... + @overload + def item(self, item: str | int, option: Literal["open"]) -> bool: ... # actually 0 or 1 + @overload + def item(self, item: str | int, option: Literal["tags"]) -> tuple[str, ...] | Literal[""]: ... + @overload + def item(self, item: str | int, option: str) -> Any: ... + @overload + def item(self, item: str | int, option: None = None) -> _TreeviewItemDict: ... + @overload + def item( + self, + item: str | int, + option: None = None, + *, + text: str = ..., + image: tkinter._Image | str = ..., + values: list[Any] | tuple[Any, ...] | Literal[""] = ..., + open: bool = ..., + tags: str | list[str] | tuple[str, ...] = ..., + ) -> None: ... + def move(self, item: str | int, parent: str, index: int | Literal["end"]) -> None: ... + reattach = move + def next(self, item: str | int) -> str: ... # returning empty string means last item + def parent(self, item: str | int) -> str: ... + def prev(self, item: str | int) -> str: ... # returning empty string means first item + def see(self, item: str | int) -> None: ... + def selection(self) -> tuple[str, ...]: ... + @overload + def selection_set(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: ... + @overload + def selection_set(self, *items: str | int) -> None: ... + @overload + def selection_add(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: ... + @overload + def selection_add(self, *items: str | int) -> None: ... + @overload + def selection_remove(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: ... + @overload + def selection_remove(self, *items: str | int) -> None: ... + @overload + def selection_toggle(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: ... + @overload + def selection_toggle(self, *items: str | int) -> None: ... + @overload + def set(self, item: str | int, column: None = None, value: None = None) -> dict[str, Any]: ... + @overload + def set(self, item: str | int, column: str | int, value: None = None) -> Any: ... + @overload + def set(self, item: str | int, column: str | int, value: Any) -> Literal[""]: ... + # There's no tag_unbind() or 'add' argument for whatever reason. + # Also, it's 'callback' instead of 'func' here. + @overload + def tag_bind( + self, tagname: str, sequence: str | None = None, callback: Callable[[tkinter.Event[Treeview]], object] | None = None + ) -> str: ... + @overload + def tag_bind(self, tagname: str, sequence: str | None, callback: str) -> None: ... + @overload + def tag_bind(self, tagname: str, *, callback: str) -> None: ... + @overload + def tag_configure(self, tagname: str, option: Literal["foreground", "background"]) -> str: ... + @overload + def tag_configure(self, tagname: str, option: Literal["font"]) -> _FontDescription: ... + @overload + def tag_configure(self, tagname: str, option: Literal["image"]) -> str: ... + @overload + def tag_configure( + self, + tagname: str, + option: None = None, + *, + # There is also 'text' and 'anchor', but they don't seem to do anything, using them is likely a bug + foreground: str = ..., + background: str = ..., + font: _FontDescription = ..., + image: tkinter._Image | str = ..., + ) -> _TreeviewTagDict | MaybeNone: ... # can be None but annoying to check + @overload + def tag_has(self, tagname: str, item: None = None) -> tuple[str, ...]: ... + @overload + def tag_has(self, tagname: str, item: str | int) -> bool: ... + +class LabeledScale(Frame): + label: Label + scale: Scale + # This should be kept in sync with tkinter.ttk.Frame.__init__() + # (all the keyword-only args except compound are from there) + def __init__( + self, + master: tkinter.Misc | None = None, + variable: tkinter.IntVar | tkinter.DoubleVar | None = None, + from_: float = 0, + to: float = 10, + *, + border: float | str = ..., + borderwidth: float | str = ..., + class_: str = "", + compound: Literal["top", "bottom"] = "top", + cursor: tkinter._Cursor = "", + height: float | str = 0, + name: str = ..., + padding: _Padding = ..., + relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., + style: str = "", + takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", + width: float | str = 0, + ) -> None: ... + # destroy is overridden, signature does not change + value: Any + +class OptionMenu(Menubutton): + def __init__( + self, + master: tkinter.Misc | None, + variable: tkinter.StringVar, + default: str | None = None, + *values: str, + # rest of these are keyword-only because *args syntax used above + style: str = "", + direction: Literal["above", "below", "left", "right", "flush"] = "below", + command: Callable[[tkinter.StringVar], object] | None = None, + ) -> None: ... + # configure, config, cget, destroy are inherited from Menubutton + # destroy and __setitem__ are overridden, signature does not change + def set_menu(self, default: str | None = None, *values: str) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/token.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/token.pyi new file mode 100644 index 0000000..fd1b10d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/token.pyi @@ -0,0 +1,169 @@ +import sys +from typing import Final + +__all__ = [ + "AMPER", + "AMPEREQUAL", + "AT", + "ATEQUAL", + "CIRCUMFLEX", + "CIRCUMFLEXEQUAL", + "COLON", + "COLONEQUAL", + "COMMA", + "DEDENT", + "DOT", + "DOUBLESLASH", + "DOUBLESLASHEQUAL", + "DOUBLESTAR", + "DOUBLESTAREQUAL", + "ELLIPSIS", + "ENDMARKER", + "EQEQUAL", + "EQUAL", + "ERRORTOKEN", + "GREATER", + "GREATEREQUAL", + "INDENT", + "ISEOF", + "ISNONTERMINAL", + "ISTERMINAL", + "LBRACE", + "LEFTSHIFT", + "LEFTSHIFTEQUAL", + "LESS", + "LESSEQUAL", + "LPAR", + "LSQB", + "MINEQUAL", + "MINUS", + "NAME", + "NEWLINE", + "NOTEQUAL", + "NT_OFFSET", + "NUMBER", + "N_TOKENS", + "OP", + "PERCENT", + "PERCENTEQUAL", + "PLUS", + "PLUSEQUAL", + "RARROW", + "RBRACE", + "RIGHTSHIFT", + "RIGHTSHIFTEQUAL", + "RPAR", + "RSQB", + "SEMI", + "SLASH", + "SLASHEQUAL", + "STAR", + "STAREQUAL", + "STRING", + "TILDE", + "TYPE_COMMENT", + "TYPE_IGNORE", + "VBAR", + "VBAREQUAL", + "tok_name", + "ENCODING", + "NL", + "COMMENT", +] +if sys.version_info < (3, 13): + __all__ += ["ASYNC", "AWAIT"] + +if sys.version_info >= (3, 10): + __all__ += ["SOFT_KEYWORD"] + +if sys.version_info >= (3, 12): + __all__ += ["EXCLAMATION", "FSTRING_END", "FSTRING_MIDDLE", "FSTRING_START", "EXACT_TOKEN_TYPES"] + +if sys.version_info >= (3, 14): + __all__ += ["TSTRING_START", "TSTRING_MIDDLE", "TSTRING_END"] + +ENDMARKER: Final[int] +NAME: Final[int] +NUMBER: Final[int] +STRING: Final[int] +NEWLINE: Final[int] +INDENT: Final[int] +DEDENT: Final[int] +LPAR: Final[int] +RPAR: Final[int] +LSQB: Final[int] +RSQB: Final[int] +COLON: Final[int] +COMMA: Final[int] +SEMI: Final[int] +PLUS: Final[int] +MINUS: Final[int] +STAR: Final[int] +SLASH: Final[int] +VBAR: Final[int] +AMPER: Final[int] +LESS: Final[int] +GREATER: Final[int] +EQUAL: Final[int] +DOT: Final[int] +PERCENT: Final[int] +LBRACE: Final[int] +RBRACE: Final[int] +EQEQUAL: Final[int] +NOTEQUAL: Final[int] +LESSEQUAL: Final[int] +GREATEREQUAL: Final[int] +TILDE: Final[int] +CIRCUMFLEX: Final[int] +LEFTSHIFT: Final[int] +RIGHTSHIFT: Final[int] +DOUBLESTAR: Final[int] +PLUSEQUAL: Final[int] +MINEQUAL: Final[int] +STAREQUAL: Final[int] +SLASHEQUAL: Final[int] +PERCENTEQUAL: Final[int] +AMPEREQUAL: Final[int] +VBAREQUAL: Final[int] +CIRCUMFLEXEQUAL: Final[int] +LEFTSHIFTEQUAL: Final[int] +RIGHTSHIFTEQUAL: Final[int] +DOUBLESTAREQUAL: Final[int] +DOUBLESLASH: Final[int] +DOUBLESLASHEQUAL: Final[int] +AT: Final[int] +RARROW: Final[int] +ELLIPSIS: Final[int] +ATEQUAL: Final[int] +if sys.version_info < (3, 13): + AWAIT: Final[int] + ASYNC: Final[int] +OP: Final[int] +ERRORTOKEN: Final[int] +N_TOKENS: Final[int] +NT_OFFSET: Final[int] +tok_name: Final[dict[int, str]] +COMMENT: Final[int] +NL: Final[int] +ENCODING: Final[int] +TYPE_COMMENT: Final[int] +TYPE_IGNORE: Final[int] +COLONEQUAL: Final[int] +EXACT_TOKEN_TYPES: Final[dict[str, int]] +if sys.version_info >= (3, 10): + SOFT_KEYWORD: Final[int] + +if sys.version_info >= (3, 12): + EXCLAMATION: Final[int] + FSTRING_END: Final[int] + FSTRING_MIDDLE: Final[int] + FSTRING_START: Final[int] + +if sys.version_info >= (3, 14): + TSTRING_START: Final[int] + TSTRING_MIDDLE: Final[int] + TSTRING_END: Final[int] + +def ISTERMINAL(x: int) -> bool: ... +def ISNONTERMINAL(x: int) -> bool: ... +def ISEOF(x: int) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tokenize.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tokenize.pyi new file mode 100644 index 0000000..00a24b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tokenize.pyi @@ -0,0 +1,203 @@ +import sys +from _typeshed import FileDescriptorOrPath +from collections.abc import Callable, Generator, Iterable, Sequence +from re import Pattern +from token import * +from typing import Any, Final, NamedTuple, TextIO, type_check_only +from typing_extensions import TypeAlias, disjoint_base + +if sys.version_info < (3, 12): + # Avoid double assignment to Final name by imports, which pyright objects to. + # EXACT_TOKEN_TYPES is already defined by 'from token import *' above + # in Python 3.12+. + from token import EXACT_TOKEN_TYPES as EXACT_TOKEN_TYPES + +__all__ = [ + "AMPER", + "AMPEREQUAL", + "AT", + "ATEQUAL", + "CIRCUMFLEX", + "CIRCUMFLEXEQUAL", + "COLON", + "COLONEQUAL", + "COMMA", + "COMMENT", + "DEDENT", + "DOT", + "DOUBLESLASH", + "DOUBLESLASHEQUAL", + "DOUBLESTAR", + "DOUBLESTAREQUAL", + "ELLIPSIS", + "ENCODING", + "ENDMARKER", + "EQEQUAL", + "EQUAL", + "ERRORTOKEN", + "GREATER", + "GREATEREQUAL", + "INDENT", + "ISEOF", + "ISNONTERMINAL", + "ISTERMINAL", + "LBRACE", + "LEFTSHIFT", + "LEFTSHIFTEQUAL", + "LESS", + "LESSEQUAL", + "LPAR", + "LSQB", + "MINEQUAL", + "MINUS", + "NAME", + "NEWLINE", + "NL", + "NOTEQUAL", + "NT_OFFSET", + "NUMBER", + "N_TOKENS", + "OP", + "PERCENT", + "PERCENTEQUAL", + "PLUS", + "PLUSEQUAL", + "RARROW", + "RBRACE", + "RIGHTSHIFT", + "RIGHTSHIFTEQUAL", + "RPAR", + "RSQB", + "SEMI", + "SLASH", + "SLASHEQUAL", + "STAR", + "STAREQUAL", + "STRING", + "TILDE", + "TYPE_COMMENT", + "TYPE_IGNORE", + "TokenInfo", + "VBAR", + "VBAREQUAL", + "detect_encoding", + "generate_tokens", + "tok_name", + "tokenize", + "untokenize", +] +if sys.version_info < (3, 13): + __all__ += ["ASYNC", "AWAIT"] + +if sys.version_info >= (3, 10): + __all__ += ["SOFT_KEYWORD"] + +if sys.version_info >= (3, 12): + __all__ += ["EXCLAMATION", "FSTRING_END", "FSTRING_MIDDLE", "FSTRING_START", "EXACT_TOKEN_TYPES"] + +if sys.version_info >= (3, 13): + __all__ += ["TokenError", "open"] + +if sys.version_info >= (3, 14): + __all__ += ["TSTRING_START", "TSTRING_MIDDLE", "TSTRING_END"] + +cookie_re: Final[Pattern[str]] +blank_re: Final[Pattern[bytes]] + +_Position: TypeAlias = tuple[int, int] + +# This class is not exposed. It calls itself tokenize.TokenInfo. +@type_check_only +class _TokenInfo(NamedTuple): + type: int + string: str + start: _Position + end: _Position + line: str + +if sys.version_info >= (3, 12): + class TokenInfo(_TokenInfo): + @property + def exact_type(self) -> int: ... + +else: + @disjoint_base + class TokenInfo(_TokenInfo): + @property + def exact_type(self) -> int: ... + +# Backwards compatible tokens can be sequences of a shorter length too +_Token: TypeAlias = TokenInfo | Sequence[int | str | _Position] + +class TokenError(Exception): ... + +if sys.version_info < (3, 13): + class StopTokenizing(Exception): ... # undocumented + +class Untokenizer: + tokens: list[str] + prev_row: int + prev_col: int + encoding: str | None + def add_whitespace(self, start: _Position) -> None: ... + if sys.version_info >= (3, 12): + def add_backslash_continuation(self, start: _Position) -> None: ... + + def untokenize(self, iterable: Iterable[_Token]) -> str: ... + def compat(self, token: Sequence[int | str], iterable: Iterable[_Token]) -> None: ... + if sys.version_info >= (3, 12): + def escape_brackets(self, token: str) -> str: ... + +# Returns str, unless the ENCODING token is present, in which case it returns bytes. +def untokenize(iterable: Iterable[_Token]) -> str | Any: ... +def detect_encoding(readline: Callable[[], bytes | bytearray]) -> tuple[str, Sequence[bytes]]: ... +def tokenize(readline: Callable[[], bytes | bytearray]) -> Generator[TokenInfo, None, None]: ... +def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ... +def open(filename: FileDescriptorOrPath) -> TextIO: ... +def group(*choices: str) -> str: ... # undocumented +def any(*choices: str) -> str: ... # undocumented +def maybe(*choices: str) -> str: ... # undocumented + +Whitespace: Final[str] # undocumented +Comment: Final[str] # undocumented +Ignore: Final[str] # undocumented +Name: Final[str] # undocumented + +Hexnumber: Final[str] # undocumented +Binnumber: Final[str] # undocumented +Octnumber: Final[str] # undocumented +Decnumber: Final[str] # undocumented +Intnumber: Final[str] # undocumented +Exponent: Final[str] # undocumented +Pointfloat: Final[str] # undocumented +Expfloat: Final[str] # undocumented +Floatnumber: Final[str] # undocumented +Imagnumber: Final[str] # undocumented +Number: Final[str] # undocumented + +def _all_string_prefixes() -> set[str]: ... # undocumented + +StringPrefix: Final[str] # undocumented + +Single: Final[str] # undocumented +Double: Final[str] # undocumented +Single3: Final[str] # undocumented +Double3: Final[str] # undocumented +Triple: Final[str] # undocumented +String: Final[str] # undocumented + +Special: Final[str] # undocumented +Funny: Final[str] # undocumented + +PlainToken: Final[str] # undocumented +Token: Final[str] # undocumented + +ContStr: Final[str] # undocumented +PseudoExtras: Final[str] # undocumented +PseudoToken: Final[str] # undocumented + +endpats: Final[dict[str, str]] # undocumented +single_quoted: Final[set[str]] # undocumented +triple_quoted: Final[set[str]] # undocumented + +tabsize: Final = 8 # undocumented diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tomllib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tomllib.pyi new file mode 100644 index 0000000..4ff4097 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tomllib.pyi @@ -0,0 +1,26 @@ +import sys +from _typeshed import SupportsRead +from collections.abc import Callable +from typing import Any, overload +from typing_extensions import deprecated + +__all__ = ("loads", "load", "TOMLDecodeError") + +if sys.version_info >= (3, 14): + class TOMLDecodeError(ValueError): + msg: str + doc: str + pos: int + lineno: int + colno: int + @overload + def __init__(self, msg: str, doc: str, pos: int) -> None: ... + @overload + @deprecated("Deprecated since Python 3.14. Set the 'msg', 'doc' and 'pos' arguments only.") + def __init__(self, msg: str | type = ..., doc: str | type = ..., pos: int | type = ..., *args: Any) -> None: ... + +else: + class TOMLDecodeError(ValueError): ... + +def load(fp: SupportsRead[bytes], /, *, parse_float: Callable[[str], Any] = ...) -> dict[str, Any]: ... +def loads(s: str, /, *, parse_float: Callable[[str], Any] = ...) -> dict[str, Any]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/trace.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/trace.pyi new file mode 100644 index 0000000..7e7cc1e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/trace.pyi @@ -0,0 +1,86 @@ +import sys +import types +from _typeshed import Incomplete, StrPath, TraceFunction +from collections.abc import Callable, Iterable, Mapping, Sequence +from typing import Any, TypeVar +from typing_extensions import ParamSpec, TypeAlias + +__all__ = ["Trace", "CoverageResults"] + +_T = TypeVar("_T") +_P = ParamSpec("_P") +_FileModuleFunction: TypeAlias = tuple[str, str | None, str] + +class CoverageResults: + counts: dict[tuple[str, int], int] + counter: dict[tuple[str, int], int] + calledfuncs: dict[_FileModuleFunction, int] + callers: dict[tuple[_FileModuleFunction, _FileModuleFunction], int] + inifile: StrPath | None + outfile: StrPath | None + def __init__( + self, + counts: dict[tuple[str, int], int] | None = None, + calledfuncs: dict[_FileModuleFunction, int] | None = None, + infile: StrPath | None = None, + callers: dict[tuple[_FileModuleFunction, _FileModuleFunction], int] | None = None, + outfile: StrPath | None = None, + ) -> None: ... # undocumented + def update(self, other: CoverageResults) -> None: ... + if sys.version_info >= (3, 13): + def write_results( + self, + show_missing: bool = True, + summary: bool = False, + coverdir: StrPath | None = None, + *, + ignore_missing_files: bool = False, + ) -> None: ... + else: + def write_results(self, show_missing: bool = True, summary: bool = False, coverdir: StrPath | None = None) -> None: ... + + def write_results_file( + self, path: StrPath, lines: Sequence[str], lnotab: Any, lines_hit: Mapping[int, int], encoding: str | None = None + ) -> tuple[int, int]: ... + def is_ignored_filename(self, filename: str) -> bool: ... # undocumented + +class _Ignore: + def __init__(self, modules: Iterable[str] | None = None, dirs: Iterable[StrPath] | None = None) -> None: ... + def names(self, filename: str, modulename: str) -> int: ... + +class Trace: + inifile: StrPath | None + outfile: StrPath | None + ignore: _Ignore + counts: dict[str, int] + pathtobasename: dict[Incomplete, Incomplete] + donothing: int + trace: int + start_time: int | None + globaltrace: TraceFunction + localtrace: TraceFunction + def __init__( + self, + count: int = 1, + trace: int = 1, + countfuncs: int = 0, + countcallers: int = 0, + ignoremods: Sequence[str] = (), + ignoredirs: Sequence[str] = (), + infile: StrPath | None = None, + outfile: StrPath | None = None, + timing: bool = False, + ) -> None: ... + def run(self, cmd: str | types.CodeType) -> None: ... + def runctx( + self, cmd: str | types.CodeType, globals: Mapping[str, Any] | None = None, locals: Mapping[str, Any] | None = None + ) -> None: ... + def runfunc(self, func: Callable[_P, _T], /, *args: _P.args, **kw: _P.kwargs) -> _T: ... + def file_module_function_of(self, frame: types.FrameType) -> _FileModuleFunction: ... + def globaltrace_trackcallers(self, frame: types.FrameType, why: str, arg: Any) -> None: ... + def globaltrace_countfuncs(self, frame: types.FrameType, why: str, arg: Any) -> None: ... + def globaltrace_lt(self, frame: types.FrameType, why: str, arg: Any) -> None: ... + def localtrace_trace_and_count(self, frame: types.FrameType, why: str, arg: Any) -> TraceFunction: ... + def localtrace_trace(self, frame: types.FrameType, why: str, arg: Any) -> TraceFunction: ... + def localtrace_count(self, frame: types.FrameType, why: str, arg: Any) -> TraceFunction: ... + def results(self) -> CoverageResults: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/traceback.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/traceback.pyi new file mode 100644 index 0000000..d587295 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/traceback.pyi @@ -0,0 +1,331 @@ +import sys +from _typeshed import SupportsWrite, Unused +from collections.abc import Generator, Iterable, Iterator, Mapping +from types import FrameType, TracebackType +from typing import Any, ClassVar, Literal, overload +from typing_extensions import Self, TypeAlias, deprecated + +__all__ = [ + "extract_stack", + "extract_tb", + "format_exception", + "format_exception_only", + "format_list", + "format_stack", + "format_tb", + "print_exc", + "format_exc", + "print_exception", + "print_last", + "print_stack", + "print_tb", + "clear_frames", + "FrameSummary", + "StackSummary", + "TracebackException", + "walk_stack", + "walk_tb", +] + +if sys.version_info >= (3, 14): + __all__ += ["print_list"] + +_FrameSummaryTuple: TypeAlias = tuple[str, int, str, str | None] + +def print_tb(tb: TracebackType | None, limit: int | None = None, file: SupportsWrite[str] | None = None) -> None: ... + +if sys.version_info >= (3, 10): + @overload + def print_exception( + exc: type[BaseException] | None, + /, + value: BaseException | None = ..., + tb: TracebackType | None = ..., + limit: int | None = None, + file: SupportsWrite[str] | None = None, + chain: bool = True, + ) -> None: ... + @overload + def print_exception( + exc: BaseException, /, *, limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True + ) -> None: ... + @overload + def format_exception( + exc: type[BaseException] | None, + /, + value: BaseException | None = ..., + tb: TracebackType | None = ..., + limit: int | None = None, + chain: bool = True, + ) -> list[str]: ... + @overload + def format_exception(exc: BaseException, /, *, limit: int | None = None, chain: bool = True) -> list[str]: ... + +else: + def print_exception( + etype: type[BaseException] | None, + value: BaseException | None, + tb: TracebackType | None, + limit: int | None = None, + file: SupportsWrite[str] | None = None, + chain: bool = True, + ) -> None: ... + def format_exception( + etype: type[BaseException] | None, + value: BaseException | None, + tb: TracebackType | None, + limit: int | None = None, + chain: bool = True, + ) -> list[str]: ... + +def print_exc(limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: ... +def print_last(limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: ... +def print_stack(f: FrameType | None = None, limit: int | None = None, file: SupportsWrite[str] | None = None) -> None: ... +def extract_tb(tb: TracebackType | None, limit: int | None = None) -> StackSummary: ... +def extract_stack(f: FrameType | None = None, limit: int | None = None) -> StackSummary: ... +def format_list(extracted_list: Iterable[FrameSummary | _FrameSummaryTuple]) -> list[str]: ... +def print_list(extracted_list: Iterable[FrameSummary | _FrameSummaryTuple], file: SupportsWrite[str] | None = None) -> None: ... + +if sys.version_info >= (3, 13): + @overload + def format_exception_only(exc: BaseException | None, /, *, show_group: bool = False) -> list[str]: ... + @overload + def format_exception_only(exc: Unused, /, value: BaseException | None, *, show_group: bool = False) -> list[str]: ... + +elif sys.version_info >= (3, 10): + @overload + def format_exception_only(exc: BaseException | None, /) -> list[str]: ... + @overload + def format_exception_only(exc: Unused, /, value: BaseException | None) -> list[str]: ... + +else: + def format_exception_only(etype: type[BaseException] | None, value: BaseException | None) -> list[str]: ... + +def format_exc(limit: int | None = None, chain: bool = True) -> str: ... +def format_tb(tb: TracebackType | None, limit: int | None = None) -> list[str]: ... +def format_stack(f: FrameType | None = None, limit: int | None = None) -> list[str]: ... +def clear_frames(tb: TracebackType | None) -> None: ... +def walk_stack(f: FrameType | None) -> Iterator[tuple[FrameType, int]]: ... +def walk_tb(tb: TracebackType | None) -> Iterator[tuple[FrameType, int]]: ... + +if sys.version_info >= (3, 11): + class _ExceptionPrintContext: + def indent(self) -> str: ... + def emit(self, text_gen: str | Iterable[str], margin_char: str | None = None) -> Generator[str, None, None]: ... + +class TracebackException: + __cause__: TracebackException | None + __context__: TracebackException | None + if sys.version_info >= (3, 11): + exceptions: list[TracebackException] | None + __suppress_context__: bool + if sys.version_info >= (3, 11): + __notes__: list[str] | None + stack: StackSummary + + # These fields only exist for `SyntaxError`s, but there is no way to express that in the type system. + filename: str + lineno: str | None + if sys.version_info >= (3, 10): + end_lineno: str | None + text: str + offset: int + if sys.version_info >= (3, 10): + end_offset: int | None + msg: str + + if sys.version_info >= (3, 13): + @property + def exc_type_str(self) -> str: ... + @property + @deprecated("Deprecated since Python 3.13. Use `exc_type_str` instead.") + def exc_type(self) -> type[BaseException] | None: ... + else: + exc_type: type[BaseException] + if sys.version_info >= (3, 13): + def __init__( + self, + exc_type: type[BaseException], + exc_value: BaseException, + exc_traceback: TracebackType | None, + *, + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, + compact: bool = False, + max_group_width: int = 15, + max_group_depth: int = 10, + save_exc_type: bool = True, + _seen: set[int] | None = None, + ) -> None: ... + elif sys.version_info >= (3, 11): + def __init__( + self, + exc_type: type[BaseException], + exc_value: BaseException, + exc_traceback: TracebackType | None, + *, + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, + compact: bool = False, + max_group_width: int = 15, + max_group_depth: int = 10, + _seen: set[int] | None = None, + ) -> None: ... + elif sys.version_info >= (3, 10): + def __init__( + self, + exc_type: type[BaseException], + exc_value: BaseException, + exc_traceback: TracebackType | None, + *, + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, + compact: bool = False, + _seen: set[int] | None = None, + ) -> None: ... + else: + def __init__( + self, + exc_type: type[BaseException], + exc_value: BaseException, + exc_traceback: TracebackType | None, + *, + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, + _seen: set[int] | None = None, + ) -> None: ... + + if sys.version_info >= (3, 11): + @classmethod + def from_exception( + cls, + exc: BaseException, + *, + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, + compact: bool = False, + max_group_width: int = 15, + max_group_depth: int = 10, + ) -> Self: ... + elif sys.version_info >= (3, 10): + @classmethod + def from_exception( + cls, + exc: BaseException, + *, + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, + compact: bool = False, + ) -> Self: ... + else: + @classmethod + def from_exception( + cls, exc: BaseException, *, limit: int | None = None, lookup_lines: bool = True, capture_locals: bool = False + ) -> Self: ... + + def __eq__(self, other: object) -> bool: ... + __hash__: ClassVar[None] # type: ignore[assignment] + if sys.version_info >= (3, 11): + def format(self, *, chain: bool = True, _ctx: _ExceptionPrintContext | None = None) -> Generator[str, None, None]: ... + else: + def format(self, *, chain: bool = True) -> Generator[str, None, None]: ... + + if sys.version_info >= (3, 13): + def format_exception_only(self, *, show_group: bool = False, _depth: int = 0) -> Generator[str, None, None]: ... + else: + def format_exception_only(self) -> Generator[str, None, None]: ... + + if sys.version_info >= (3, 11): + def print(self, *, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: ... + +class FrameSummary: + if sys.version_info >= (3, 13): + __slots__ = ( + "filename", + "lineno", + "end_lineno", + "colno", + "end_colno", + "name", + "_lines", + "_lines_dedented", + "locals", + "_code", + ) + elif sys.version_info >= (3, 11): + __slots__ = ("filename", "lineno", "end_lineno", "colno", "end_colno", "name", "_line", "locals") + else: + __slots__ = ("filename", "lineno", "name", "_line", "locals") + if sys.version_info >= (3, 11): + def __init__( + self, + filename: str, + lineno: int | None, + name: str, + *, + lookup_line: bool = True, + locals: Mapping[str, str] | None = None, + line: str | None = None, + end_lineno: int | None = None, + colno: int | None = None, + end_colno: int | None = None, + ) -> None: ... + end_lineno: int | None + colno: int | None + end_colno: int | None + else: + def __init__( + self, + filename: str, + lineno: int | None, + name: str, + *, + lookup_line: bool = True, + locals: Mapping[str, str] | None = None, + line: str | None = None, + ) -> None: ... + filename: str + lineno: int | None + name: str + locals: dict[str, str] | None + @property + def line(self) -> str | None: ... + @overload + def __getitem__(self, pos: Literal[0]) -> str: ... + @overload + def __getitem__(self, pos: Literal[1]) -> int: ... + @overload + def __getitem__(self, pos: Literal[2]) -> str: ... + @overload + def __getitem__(self, pos: Literal[3]) -> str | None: ... + @overload + def __getitem__(self, pos: int) -> Any: ... + @overload + def __getitem__(self, pos: slice) -> tuple[Any, ...]: ... + def __iter__(self) -> Iterator[Any]: ... + def __eq__(self, other: object) -> bool: ... + def __len__(self) -> Literal[4]: ... + __hash__: ClassVar[None] # type: ignore[assignment] + +class StackSummary(list[FrameSummary]): + @classmethod + def extract( + cls, + frame_gen: Iterable[tuple[FrameType, int]], + *, + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, + ) -> StackSummary: ... + @classmethod + def from_list(cls, a_list: Iterable[FrameSummary | _FrameSummaryTuple]) -> StackSummary: ... + if sys.version_info >= (3, 11): + def format_frame_summary(self, frame_summary: FrameSummary) -> str: ... + + def format(self) -> list[str]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tracemalloc.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tracemalloc.pyi new file mode 100644 index 0000000..31d8f74 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tracemalloc.pyi @@ -0,0 +1,122 @@ +import sys +from _tracemalloc import * +from collections.abc import Sequence +from typing import Any, SupportsIndex, overload +from typing_extensions import TypeAlias + +def get_object_traceback(obj: object) -> Traceback | None: ... +def take_snapshot() -> Snapshot: ... + +class BaseFilter: + inclusive: bool + def __init__(self, inclusive: bool) -> None: ... + +class DomainFilter(BaseFilter): + @property + def domain(self) -> int: ... + def __init__(self, inclusive: bool, domain: int) -> None: ... + +class Filter(BaseFilter): + domain: int | None + lineno: int | None + @property + def filename_pattern(self) -> str: ... + all_frames: bool + def __init__( + self, + inclusive: bool, + filename_pattern: str, + lineno: int | None = None, + all_frames: bool = False, + domain: int | None = None, + ) -> None: ... + +class Statistic: + __slots__ = ("traceback", "size", "count") + count: int + size: int + traceback: Traceback + def __init__(self, traceback: Traceback, size: int, count: int) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + +class StatisticDiff: + __slots__ = ("traceback", "size", "size_diff", "count", "count_diff") + count: int + count_diff: int + size: int + size_diff: int + traceback: Traceback + def __init__(self, traceback: Traceback, size: int, size_diff: int, count: int, count_diff: int) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + +_FrameTuple: TypeAlias = tuple[str, int] + +class Frame: + __slots__ = ("_frame",) + @property + def filename(self) -> str: ... + @property + def lineno(self) -> int: ... + def __init__(self, frame: _FrameTuple) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def __lt__(self, other: Frame) -> bool: ... + if sys.version_info >= (3, 11): + def __gt__(self, other: Frame) -> bool: ... + def __ge__(self, other: Frame) -> bool: ... + def __le__(self, other: Frame) -> bool: ... + else: + def __gt__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... + def __ge__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... + +_TraceTuple: TypeAlias = tuple[int, int, Sequence[_FrameTuple], int | None] | tuple[int, int, Sequence[_FrameTuple]] + +class Trace: + __slots__ = ("_trace",) + @property + def domain(self) -> int: ... + @property + def size(self) -> int: ... + @property + def traceback(self) -> Traceback: ... + def __init__(self, trace: _TraceTuple) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + +class Traceback(Sequence[Frame]): + __slots__ = ("_frames", "_total_nframe") + @property + def total_nframe(self) -> int | None: ... + def __init__(self, frames: Sequence[_FrameTuple], total_nframe: int | None = None) -> None: ... + def format(self, limit: int | None = None, most_recent_first: bool = False) -> list[str]: ... + @overload + def __getitem__(self, index: SupportsIndex) -> Frame: ... + @overload + def __getitem__(self, index: slice) -> Sequence[Frame]: ... + def __contains__(self, frame: Frame) -> bool: ... # type: ignore[override] + def __len__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def __lt__(self, other: Traceback) -> bool: ... + if sys.version_info >= (3, 11): + def __gt__(self, other: Traceback) -> bool: ... + def __ge__(self, other: Traceback) -> bool: ... + def __le__(self, other: Traceback) -> bool: ... + else: + def __gt__(self, other: Traceback, NotImplemented: Any = ...) -> bool: ... + def __ge__(self, other: Traceback, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Traceback, NotImplemented: Any = ...) -> bool: ... + +class Snapshot: + def __init__(self, traces: Sequence[_TraceTuple], traceback_limit: int) -> None: ... + def compare_to(self, old_snapshot: Snapshot, key_type: str, cumulative: bool = False) -> list[StatisticDiff]: ... + def dump(self, filename: str) -> None: ... + def filter_traces(self, filters: Sequence[DomainFilter | Filter]) -> Snapshot: ... + @staticmethod + def load(filename: str) -> Snapshot: ... + def statistics(self, key_type: str, cumulative: bool = False) -> list[Statistic]: ... + traceback_limit: int + traces: Sequence[Trace] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tty.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tty.pyi new file mode 100644 index 0000000..ca3f001 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/tty.pyi @@ -0,0 +1,30 @@ +import sys +import termios +from typing import IO, Final +from typing_extensions import TypeAlias + +if sys.platform != "win32": + __all__ = ["setraw", "setcbreak"] + if sys.version_info >= (3, 12): + __all__ += ["cfmakeraw", "cfmakecbreak"] + + _ModeSetterReturn: TypeAlias = termios._AttrReturn + else: + _ModeSetterReturn: TypeAlias = None + + _FD: TypeAlias = int | IO[str] + + # XXX: Undocumented integer constants + IFLAG: Final = 0 + OFLAG: Final = 1 + CFLAG: Final = 2 + LFLAG: Final = 3 + ISPEED: Final = 4 + OSPEED: Final = 5 + CC: Final = 6 + def setraw(fd: _FD, when: int = 2) -> _ModeSetterReturn: ... + def setcbreak(fd: _FD, when: int = 2) -> _ModeSetterReturn: ... + + if sys.version_info >= (3, 12): + def cfmakeraw(mode: termios._Attr) -> None: ... + def cfmakecbreak(mode: termios._Attr) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/turtle.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/turtle.pyi new file mode 100644 index 0000000..b5f536d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/turtle.pyi @@ -0,0 +1,791 @@ +import sys +from _typeshed import StrPath +from collections.abc import Callable, Generator, Sequence +from contextlib import contextmanager +from tkinter import Canvas, Frame, Misc, PhotoImage, Scrollbar +from typing import Any, ClassVar, Literal, TypedDict, overload, type_check_only +from typing_extensions import Self, TypeAlias, deprecated, disjoint_base + +__all__ = [ + "ScrolledCanvas", + "TurtleScreen", + "Screen", + "RawTurtle", + "Turtle", + "RawPen", + "Pen", + "Shape", + "Vec2D", + "addshape", + "bgcolor", + "bgpic", + "bye", + "clearscreen", + "colormode", + "delay", + "exitonclick", + "getcanvas", + "getshapes", + "listen", + "mainloop", + "mode", + "numinput", + "onkey", + "onkeypress", + "onkeyrelease", + "onscreenclick", + "ontimer", + "register_shape", + "resetscreen", + "screensize", + "setup", + "setworldcoordinates", + "textinput", + "title", + "tracer", + "turtles", + "update", + "window_height", + "window_width", + "back", + "backward", + "begin_fill", + "begin_poly", + "bk", + "circle", + "clear", + "clearstamp", + "clearstamps", + "clone", + "color", + "degrees", + "distance", + "dot", + "down", + "end_fill", + "end_poly", + "fd", + "fillcolor", + "filling", + "forward", + "get_poly", + "getpen", + "getscreen", + "get_shapepoly", + "getturtle", + "goto", + "heading", + "hideturtle", + "home", + "ht", + "isdown", + "isvisible", + "left", + "lt", + "onclick", + "ondrag", + "onrelease", + "pd", + "pen", + "pencolor", + "pendown", + "pensize", + "penup", + "pos", + "position", + "pu", + "radians", + "right", + "reset", + "resizemode", + "rt", + "seth", + "setheading", + "setpos", + "setposition", + "setundobuffer", + "setx", + "sety", + "shape", + "shapesize", + "shapetransform", + "shearfactor", + "showturtle", + "speed", + "st", + "stamp", + "tilt", + "tiltangle", + "towards", + "turtlesize", + "undo", + "undobufferentries", + "up", + "width", + "write", + "xcor", + "ycor", + "write_docstringdict", + "done", + "Terminator", +] + +if sys.version_info >= (3, 14): + __all__ += ["fill", "no_animation", "poly", "save"] + +if sys.version_info >= (3, 12): + __all__ += ["teleport"] + +if sys.version_info < (3, 13): + __all__ += ["settiltangle"] + +# Note: '_Color' is the alias we use for arguments and _AnyColor is the +# alias we use for return types. Really, these two aliases should be the +# same, but as per the "no union returns" typeshed policy, we'll return +# Any instead. +_Color: TypeAlias = str | tuple[float, float, float] +_AnyColor: TypeAlias = Any + +@type_check_only +class _PenState(TypedDict): + shown: bool + pendown: bool + pencolor: _Color + fillcolor: _Color + pensize: int + speed: int + resizemode: Literal["auto", "user", "noresize"] + stretchfactor: tuple[float, float] + shearfactor: float + outline: int + tilt: float + +_Speed: TypeAlias = str | float +_PolygonCoords: TypeAlias = Sequence[tuple[float, float]] + +if sys.version_info >= (3, 12): + class Vec2D(tuple[float, float]): + def __new__(cls, x: float, y: float) -> Self: ... + def __add__(self, other: tuple[float, float]) -> Vec2D: ... # type: ignore[override] + @overload # type: ignore[override] + def __mul__(self, other: Vec2D) -> float: ... + @overload + def __mul__(self, other: float) -> Vec2D: ... + def __rmul__(self, other: float) -> Vec2D: ... # type: ignore[override] + def __sub__(self, other: tuple[float, float]) -> Vec2D: ... + def __neg__(self) -> Vec2D: ... + def __abs__(self) -> float: ... + def rotate(self, angle: float) -> Vec2D: ... + +else: + @disjoint_base + class Vec2D(tuple[float, float]): + def __new__(cls, x: float, y: float) -> Self: ... + def __add__(self, other: tuple[float, float]) -> Vec2D: ... # type: ignore[override] + @overload # type: ignore[override] + def __mul__(self, other: Vec2D) -> float: ... + @overload + def __mul__(self, other: float) -> Vec2D: ... + def __rmul__(self, other: float) -> Vec2D: ... # type: ignore[override] + def __sub__(self, other: tuple[float, float]) -> Vec2D: ... + def __neg__(self) -> Vec2D: ... + def __abs__(self) -> float: ... + def rotate(self, angle: float) -> Vec2D: ... + +# Does not actually inherit from Canvas, but dynamically gets all methods of Canvas +class ScrolledCanvas(Canvas, Frame): # type: ignore[misc] + bg: str + hscroll: Scrollbar + vscroll: Scrollbar + def __init__( + self, master: Misc | None, width: int = 500, height: int = 350, canvwidth: int = 600, canvheight: int = 500 + ) -> None: ... + canvwidth: int + canvheight: int + def reset(self, canvwidth: int | None = None, canvheight: int | None = None, bg: str | None = None) -> None: ... + +class TurtleScreenBase: + cv: Canvas + canvwidth: int + canvheight: int + xscale: float + yscale: float + def __init__(self, cv: Canvas) -> None: ... + def mainloop(self) -> None: ... + def textinput(self, title: str, prompt: str) -> str | None: ... + def numinput( + self, title: str, prompt: str, default: float | None = None, minval: float | None = None, maxval: float | None = None + ) -> float | None: ... + +class Terminator(Exception): ... +class TurtleGraphicsError(Exception): ... + +class Shape: + def __init__( + self, type_: Literal["polygon", "image", "compound"], data: _PolygonCoords | PhotoImage | None = None + ) -> None: ... + def addcomponent(self, poly: _PolygonCoords, fill: _Color, outline: _Color | None = None) -> None: ... + +class TurtleScreen(TurtleScreenBase): + def __init__( + self, cv: Canvas, mode: Literal["standard", "logo", "world"] = "standard", colormode: float = 1.0, delay: int = 10 + ) -> None: ... + def clear(self) -> None: ... + @overload + def mode(self, mode: None = None) -> str: ... + @overload + def mode(self, mode: Literal["standard", "logo", "world"]) -> None: ... + def setworldcoordinates(self, llx: float, lly: float, urx: float, ury: float) -> None: ... + def register_shape(self, name: str, shape: _PolygonCoords | Shape | None = None) -> None: ... + @overload + def colormode(self, cmode: None = None) -> float: ... + @overload + def colormode(self, cmode: float) -> None: ... + def reset(self) -> None: ... + def turtles(self) -> list[Turtle]: ... + @overload + def bgcolor(self) -> _AnyColor: ... + @overload + def bgcolor(self, color: _Color) -> None: ... + @overload + def bgcolor(self, r: float, g: float, b: float) -> None: ... + @overload + def tracer(self, n: None = None) -> int: ... + @overload + def tracer(self, n: int, delay: int | None = None) -> None: ... + @overload + def delay(self, delay: None = None) -> int: ... + @overload + def delay(self, delay: int) -> None: ... + if sys.version_info >= (3, 14): + @contextmanager + def no_animation(self) -> Generator[None]: ... + + def update(self) -> None: ... + def window_width(self) -> int: ... + def window_height(self) -> int: ... + def getcanvas(self) -> Canvas: ... + def getshapes(self) -> list[str]: ... + def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... + def onkey(self, fun: Callable[[], object], key: str) -> None: ... + def listen(self, xdummy: float | None = None, ydummy: float | None = None) -> None: ... + def ontimer(self, fun: Callable[[], object], t: int = 0) -> None: ... + @overload + def bgpic(self, picname: None = None) -> str: ... + @overload + def bgpic(self, picname: str) -> None: ... + @overload + def screensize(self, canvwidth: None = None, canvheight: None = None, bg: None = None) -> tuple[int, int]: ... + # Looks like if self.cv is not a ScrolledCanvas, this could return a tuple as well + @overload + def screensize(self, canvwidth: int, canvheight: int, bg: _Color | None = None) -> None: ... + if sys.version_info >= (3, 14): + def save(self, filename: StrPath, *, overwrite: bool = False) -> None: ... + onscreenclick = onclick + resetscreen = reset + clearscreen = clear + addshape = register_shape + def onkeypress(self, fun: Callable[[], object], key: str | None = None) -> None: ... + onkeyrelease = onkey + +class TNavigator: + START_ORIENTATION: dict[str, Vec2D] + DEFAULT_MODE: str + DEFAULT_ANGLEOFFSET: int + DEFAULT_ANGLEORIENT: int + def __init__(self, mode: Literal["standard", "logo", "world"] = "standard") -> None: ... + def reset(self) -> None: ... + def degrees(self, fullcircle: float = 360.0) -> None: ... + def radians(self) -> None: ... + if sys.version_info >= (3, 12): + def teleport(self, x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: ... + + def forward(self, distance: float) -> None: ... + def back(self, distance: float) -> None: ... + def right(self, angle: float) -> None: ... + def left(self, angle: float) -> None: ... + def pos(self) -> Vec2D: ... + def xcor(self) -> float: ... + def ycor(self) -> float: ... + @overload + def goto(self, x: tuple[float, float], y: None = None) -> None: ... + @overload + def goto(self, x: float, y: float) -> None: ... + def home(self) -> None: ... + def setx(self, x: float) -> None: ... + def sety(self, y: float) -> None: ... + @overload + def distance(self, x: TNavigator | tuple[float, float], y: None = None) -> float: ... + @overload + def distance(self, x: float, y: float) -> float: ... + @overload + def towards(self, x: TNavigator | tuple[float, float], y: None = None) -> float: ... + @overload + def towards(self, x: float, y: float) -> float: ... + def heading(self) -> float: ... + def setheading(self, to_angle: float) -> None: ... + def circle(self, radius: float, extent: float | None = None, steps: int | None = None) -> None: ... + def speed(self, s: int | None = 0) -> int | None: ... + fd = forward + bk = back + backward = back + rt = right + lt = left + position = pos + setpos = goto + setposition = goto + seth = setheading + +class TPen: + def __init__(self, resizemode: Literal["auto", "user", "noresize"] = "noresize") -> None: ... + @overload + def resizemode(self, rmode: None = None) -> str: ... + @overload + def resizemode(self, rmode: Literal["auto", "user", "noresize"]) -> None: ... + @overload + def pensize(self, width: None = None) -> int: ... + @overload + def pensize(self, width: int) -> None: ... + def penup(self) -> None: ... + def pendown(self) -> None: ... + def isdown(self) -> bool: ... + @overload + def speed(self, speed: None = None) -> int: ... + @overload + def speed(self, speed: _Speed) -> None: ... + @overload + def pencolor(self) -> _AnyColor: ... + @overload + def pencolor(self, color: _Color) -> None: ... + @overload + def pencolor(self, r: float, g: float, b: float) -> None: ... + @overload + def fillcolor(self) -> _AnyColor: ... + @overload + def fillcolor(self, color: _Color) -> None: ... + @overload + def fillcolor(self, r: float, g: float, b: float) -> None: ... + @overload + def color(self) -> tuple[_AnyColor, _AnyColor]: ... + @overload + def color(self, color: _Color) -> None: ... + @overload + def color(self, r: float, g: float, b: float) -> None: ... + @overload + def color(self, color1: _Color, color2: _Color) -> None: ... + if sys.version_info >= (3, 12): + def teleport(self, x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: ... + + def showturtle(self) -> None: ... + def hideturtle(self) -> None: ... + def isvisible(self) -> bool: ... + # Note: signatures 1 and 2 overlap unsafely when no arguments are provided + @overload + def pen(self) -> _PenState: ... + @overload + def pen( + self, + pen: _PenState | None = None, + *, + shown: bool = ..., + pendown: bool = ..., + pencolor: _Color = ..., + fillcolor: _Color = ..., + pensize: int = ..., + speed: int = ..., + resizemode: Literal["auto", "user", "noresize"] = ..., + stretchfactor: tuple[float, float] = ..., + outline: int = ..., + tilt: float = ..., + ) -> None: ... + width = pensize + up = penup + pu = penup + pd = pendown + down = pendown + st = showturtle + ht = hideturtle + +class RawTurtle(TPen, TNavigator): # type: ignore[misc] # Conflicting methods in base classes + screen: TurtleScreen + screens: ClassVar[list[TurtleScreen]] + def __init__( + self, + canvas: Canvas | TurtleScreen | None = None, + shape: str = "classic", + undobuffersize: int = 1000, + visible: bool = True, + ) -> None: ... + def reset(self) -> None: ... + def setundobuffer(self, size: int | None) -> None: ... + def undobufferentries(self) -> int: ... + def clear(self) -> None: ... + def clone(self) -> Self: ... + @overload + def shape(self, name: None = None) -> str: ... + @overload + def shape(self, name: str) -> None: ... + # Unsafely overlaps when no arguments are provided + @overload + def shapesize(self) -> tuple[float, float, float]: ... + @overload + def shapesize( + self, stretch_wid: float | None = None, stretch_len: float | None = None, outline: float | None = None + ) -> None: ... + @overload + def shearfactor(self, shear: None = None) -> float: ... + @overload + def shearfactor(self, shear: float) -> None: ... + # Unsafely overlaps when no arguments are provided + @overload + def shapetransform(self) -> tuple[float, float, float, float]: ... + @overload + def shapetransform( + self, t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None + ) -> None: ... + def get_shapepoly(self) -> _PolygonCoords | None: ... + + if sys.version_info < (3, 13): + @deprecated("Deprecated since Python 3.1; removed in Python 3.13. Use `tiltangle()` instead.") + def settiltangle(self, angle: float) -> None: ... + + @overload + def tiltangle(self, angle: None = None) -> float: ... + @overload + def tiltangle(self, angle: float) -> None: ... + def tilt(self, angle: float) -> None: ... + # Can return either 'int' or Tuple[int, ...] based on if the stamp is + # a compound stamp or not. So, as per the "no Union return" policy, + # we return Any. + def stamp(self) -> Any: ... + def clearstamp(self, stampid: int | tuple[int, ...]) -> None: ... + def clearstamps(self, n: int | None = None) -> None: ... + def filling(self) -> bool: ... + if sys.version_info >= (3, 14): + @contextmanager + def fill(self) -> Generator[None]: ... + + def begin_fill(self) -> None: ... + def end_fill(self) -> None: ... + @overload + def dot(self, size: int | _Color | None = None) -> None: ... + @overload + def dot(self, size: int | None, color: _Color, /) -> None: ... + @overload + def dot(self, size: int | None, r: float, g: float, b: float, /) -> None: ... + def write( + self, arg: object, move: bool = False, align: str = "left", font: tuple[str, int, str] = ("Arial", 8, "normal") + ) -> None: ... + if sys.version_info >= (3, 14): + @contextmanager + def poly(self) -> Generator[None]: ... + + def begin_poly(self) -> None: ... + def end_poly(self) -> None: ... + def get_poly(self) -> _PolygonCoords | None: ... + def getscreen(self) -> TurtleScreen: ... + def getturtle(self) -> Self: ... + getpen = getturtle + def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... + def onrelease(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... + def ondrag(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... + def undo(self) -> None: ... + turtlesize = shapesize + +class _Screen(TurtleScreen): + def __init__(self) -> None: ... + # Note int and float are interpreted differently, hence the Union instead of just float + def setup( + self, + width: int | float = 0.5, # noqa: Y041 + height: int | float = 0.75, # noqa: Y041 + startx: int | None = None, + starty: int | None = None, + ) -> None: ... + def title(self, titlestring: str) -> None: ... + def bye(self) -> None: ... + def exitonclick(self) -> None: ... + +class Turtle(RawTurtle): + def __init__(self, shape: str = "classic", undobuffersize: int = 1000, visible: bool = True) -> None: ... + +RawPen = RawTurtle +Pen = Turtle + +def write_docstringdict(filename: str = "turtle_docstringdict") -> None: ... + +# Functions copied from TurtleScreenBase: + +def mainloop() -> None: ... +def textinput(title: str, prompt: str) -> str | None: ... +def numinput( + title: str, prompt: str, default: float | None = None, minval: float | None = None, maxval: float | None = None +) -> float | None: ... + +# Functions copied from TurtleScreen: + +def clear() -> None: ... +@overload +def mode(mode: None = None) -> str: ... +@overload +def mode(mode: Literal["standard", "logo", "world"]) -> None: ... +def setworldcoordinates(llx: float, lly: float, urx: float, ury: float) -> None: ... +def register_shape(name: str, shape: _PolygonCoords | Shape | None = None) -> None: ... +@overload +def colormode(cmode: None = None) -> float: ... +@overload +def colormode(cmode: float) -> None: ... +def reset() -> None: ... +def turtles() -> list[Turtle]: ... +@overload +def bgcolor() -> _AnyColor: ... +@overload +def bgcolor(color: _Color) -> None: ... +@overload +def bgcolor(r: float, g: float, b: float) -> None: ... +@overload +def tracer(n: None = None) -> int: ... +@overload +def tracer(n: int, delay: int | None = None) -> None: ... +@overload +def delay(delay: None = None) -> int: ... +@overload +def delay(delay: int) -> None: ... + +if sys.version_info >= (3, 14): + @contextmanager + def no_animation() -> Generator[None]: ... + +def update() -> None: ... +def window_width() -> int: ... +def window_height() -> int: ... +def getcanvas() -> Canvas: ... +def getshapes() -> list[str]: ... +def onclick(fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... +def onkey(fun: Callable[[], object], key: str) -> None: ... +def listen(xdummy: float | None = None, ydummy: float | None = None) -> None: ... +def ontimer(fun: Callable[[], object], t: int = 0) -> None: ... +@overload +def bgpic(picname: None = None) -> str: ... +@overload +def bgpic(picname: str) -> None: ... +@overload +def screensize(canvwidth: None = None, canvheight: None = None, bg: None = None) -> tuple[int, int]: ... +@overload +def screensize(canvwidth: int, canvheight: int, bg: _Color | None = None) -> None: ... + +if sys.version_info >= (3, 14): + def save(filename: StrPath, *, overwrite: bool = False) -> None: ... + +onscreenclick = onclick +resetscreen = reset +clearscreen = clear +addshape = register_shape + +def onkeypress(fun: Callable[[], object], key: str | None = None) -> None: ... + +onkeyrelease = onkey + +# Functions copied from _Screen: + +def setup(width: float = 0.5, height: float = 0.75, startx: int | None = None, starty: int | None = None) -> None: ... +def title(titlestring: str) -> None: ... +def bye() -> None: ... +def exitonclick() -> None: ... +def Screen() -> _Screen: ... + +# Functions copied from TNavigator: + +def degrees(fullcircle: float = 360.0) -> None: ... +def radians() -> None: ... +def forward(distance: float) -> None: ... +def back(distance: float) -> None: ... +def right(angle: float) -> None: ... +def left(angle: float) -> None: ... +def pos() -> Vec2D: ... +def xcor() -> float: ... +def ycor() -> float: ... +@overload +def goto(x: tuple[float, float], y: None = None) -> None: ... +@overload +def goto(x: float, y: float) -> None: ... +def home() -> None: ... +def setx(x: float) -> None: ... +def sety(y: float) -> None: ... +@overload +def distance(x: TNavigator | tuple[float, float], y: None = None) -> float: ... +@overload +def distance(x: float, y: float) -> float: ... +@overload +def towards(x: TNavigator | tuple[float, float], y: None = None) -> float: ... +@overload +def towards(x: float, y: float) -> float: ... +def heading() -> float: ... +def setheading(to_angle: float) -> None: ... +def circle(radius: float, extent: float | None = None, steps: int | None = None) -> None: ... + +fd = forward +bk = back +backward = back +rt = right +lt = left +position = pos +setpos = goto +setposition = goto +seth = setheading + +# Functions copied from TPen: +@overload +def resizemode(rmode: None = None) -> str: ... +@overload +def resizemode(rmode: Literal["auto", "user", "noresize"]) -> None: ... +@overload +def pensize(width: None = None) -> int: ... +@overload +def pensize(width: int) -> None: ... +def penup() -> None: ... +def pendown() -> None: ... +def isdown() -> bool: ... +@overload +def speed(speed: None = None) -> int: ... +@overload +def speed(speed: _Speed) -> None: ... +@overload +def pencolor() -> _AnyColor: ... +@overload +def pencolor(color: _Color) -> None: ... +@overload +def pencolor(r: float, g: float, b: float) -> None: ... +@overload +def fillcolor() -> _AnyColor: ... +@overload +def fillcolor(color: _Color) -> None: ... +@overload +def fillcolor(r: float, g: float, b: float) -> None: ... +@overload +def color() -> tuple[_AnyColor, _AnyColor]: ... +@overload +def color(color: _Color) -> None: ... +@overload +def color(r: float, g: float, b: float) -> None: ... +@overload +def color(color1: _Color, color2: _Color) -> None: ... +def showturtle() -> None: ... +def hideturtle() -> None: ... +def isvisible() -> bool: ... + +# Note: signatures 1 and 2 overlap unsafely when no arguments are provided +@overload +def pen() -> _PenState: ... +@overload +def pen( + pen: _PenState | None = None, + *, + shown: bool = ..., + pendown: bool = ..., + pencolor: _Color = ..., + fillcolor: _Color = ..., + pensize: int = ..., + speed: int = ..., + resizemode: Literal["auto", "user", "noresize"] = ..., + stretchfactor: tuple[float, float] = ..., + outline: int = ..., + tilt: float = ..., +) -> None: ... + +width = pensize +up = penup +pu = penup +pd = pendown +down = pendown +st = showturtle +ht = hideturtle + +# Functions copied from RawTurtle: + +def setundobuffer(size: int | None) -> None: ... +def undobufferentries() -> int: ... +@overload +def shape(name: None = None) -> str: ... +@overload +def shape(name: str) -> None: ... + +if sys.version_info >= (3, 12): + def teleport(x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: ... + +# Unsafely overlaps when no arguments are provided +@overload +def shapesize() -> tuple[float, float, float]: ... +@overload +def shapesize(stretch_wid: float | None = None, stretch_len: float | None = None, outline: float | None = None) -> None: ... +@overload +def shearfactor(shear: None = None) -> float: ... +@overload +def shearfactor(shear: float) -> None: ... + +# Unsafely overlaps when no arguments are provided +@overload +def shapetransform() -> tuple[float, float, float, float]: ... +@overload +def shapetransform( + t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None +) -> None: ... +def get_shapepoly() -> _PolygonCoords | None: ... + +if sys.version_info < (3, 13): + @deprecated("Deprecated since Python 3.1; removed in Python 3.13. Use `tiltangle()` instead.") + def settiltangle(angle: float) -> None: ... + +@overload +def tiltangle(angle: None = None) -> float: ... +@overload +def tiltangle(angle: float) -> None: ... +def tilt(angle: float) -> None: ... + +# Can return either 'int' or Tuple[int, ...] based on if the stamp is +# a compound stamp or not. So, as per the "no Union return" policy, +# we return Any. +def stamp() -> Any: ... +def clearstamp(stampid: int | tuple[int, ...]) -> None: ... +def clearstamps(n: int | None = None) -> None: ... +def filling() -> bool: ... + +if sys.version_info >= (3, 14): + @contextmanager + def fill() -> Generator[None]: ... + +def begin_fill() -> None: ... +def end_fill() -> None: ... +@overload +def dot(size: int | _Color | None = None) -> None: ... +@overload +def dot(size: int | None, color: _Color, /) -> None: ... +@overload +def dot(size: int | None, r: float, g: float, b: float, /) -> None: ... +def write(arg: object, move: bool = False, align: str = "left", font: tuple[str, int, str] = ("Arial", 8, "normal")) -> None: ... + +if sys.version_info >= (3, 14): + @contextmanager + def poly() -> Generator[None]: ... + +def begin_poly() -> None: ... +def end_poly() -> None: ... +def get_poly() -> _PolygonCoords | None: ... +def getscreen() -> TurtleScreen: ... +def getturtle() -> Turtle: ... + +getpen = getturtle + +def onrelease(fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... +def ondrag(fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... +def undo() -> None: ... + +turtlesize = shapesize + +# Functions copied from RawTurtle with a few tweaks: + +def clone() -> Turtle: ... + +# Extra functions present only in the global scope: + +done = mainloop diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/types.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/types.pyi new file mode 100644 index 0000000..0293e5c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/types.pyi @@ -0,0 +1,745 @@ +import sys +from _typeshed import AnnotationForm, MaybeNone, SupportsKeysAndGetItem +from _typeshed.importlib import LoaderProtocol +from collections.abc import ( + AsyncGenerator, + Awaitable, + Callable, + Coroutine, + Generator, + ItemsView, + Iterable, + Iterator, + KeysView, + Mapping, + MutableSequence, + ValuesView, +) +from importlib.machinery import ModuleSpec +from typing import Any, ClassVar, Literal, TypeVar, final, overload +from typing_extensions import ParamSpec, Self, TypeAliasType, TypeVarTuple, deprecated, disjoint_base + +if sys.version_info >= (3, 14): + from _typeshed import AnnotateFunc + +__all__ = [ + "FunctionType", + "LambdaType", + "CodeType", + "MappingProxyType", + "SimpleNamespace", + "GeneratorType", + "CoroutineType", + "AsyncGeneratorType", + "MethodType", + "BuiltinFunctionType", + "ModuleType", + "TracebackType", + "FrameType", + "GetSetDescriptorType", + "MemberDescriptorType", + "new_class", + "prepare_class", + "DynamicClassAttribute", + "coroutine", + "BuiltinMethodType", + "ClassMethodDescriptorType", + "MethodDescriptorType", + "MethodWrapperType", + "WrapperDescriptorType", + "resolve_bases", + "CellType", + "GenericAlias", +] + +if sys.version_info >= (3, 10): + __all__ += ["EllipsisType", "NoneType", "NotImplementedType", "UnionType"] + +if sys.version_info >= (3, 12): + __all__ += ["get_original_bases"] + +if sys.version_info >= (3, 13): + __all__ += ["CapsuleType"] + +# Note, all classes "defined" here require special handling. + +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_KT_co = TypeVar("_KT_co", covariant=True) +_VT_co = TypeVar("_VT_co", covariant=True) + +# Make sure this class definition stays roughly in line with `builtins.function` +@final +class FunctionType: + @property + def __closure__(self) -> tuple[CellType, ...] | None: ... + __code__: CodeType + __defaults__: tuple[Any, ...] | None + __dict__: dict[str, Any] + @property + def __globals__(self) -> dict[str, Any]: ... + __name__: str + __qualname__: str + __annotations__: dict[str, AnnotationForm] + if sys.version_info >= (3, 14): + __annotate__: AnnotateFunc | None + __kwdefaults__: dict[str, Any] | None + if sys.version_info >= (3, 10): + @property + def __builtins__(self) -> dict[str, Any]: ... + if sys.version_info >= (3, 12): + __type_params__: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] + + __module__: str + if sys.version_info >= (3, 13): + def __new__( + cls, + code: CodeType, + globals: dict[str, Any], + name: str | None = None, + argdefs: tuple[object, ...] | None = None, + closure: tuple[CellType, ...] | None = None, + kwdefaults: dict[str, object] | None = None, + ) -> Self: ... + else: + def __new__( + cls, + code: CodeType, + globals: dict[str, Any], + name: str | None = None, + argdefs: tuple[object, ...] | None = None, + closure: tuple[CellType, ...] | None = None, + ) -> Self: ... + + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + @overload + def __get__(self, instance: None, owner: type, /) -> FunctionType: ... + @overload + def __get__(self, instance: object, owner: type | None = None, /) -> MethodType: ... + +LambdaType = FunctionType + +@final +class CodeType: + def __eq__(self, value: object, /) -> bool: ... + def __hash__(self) -> int: ... + @property + def co_argcount(self) -> int: ... + @property + def co_posonlyargcount(self) -> int: ... + @property + def co_kwonlyargcount(self) -> int: ... + @property + def co_nlocals(self) -> int: ... + @property + def co_stacksize(self) -> int: ... + @property + def co_flags(self) -> int: ... + @property + def co_code(self) -> bytes: ... + @property + def co_consts(self) -> tuple[Any, ...]: ... + @property + def co_names(self) -> tuple[str, ...]: ... + @property + def co_varnames(self) -> tuple[str, ...]: ... + @property + def co_filename(self) -> str: ... + @property + def co_name(self) -> str: ... + @property + def co_firstlineno(self) -> int: ... + if sys.version_info >= (3, 10): + @property + @deprecated("Deprecated since Python 3.10; will be removed in Python 3.15. Use `CodeType.co_lines()` instead.") + def co_lnotab(self) -> bytes: ... + else: + @property + def co_lnotab(self) -> bytes: ... + + @property + def co_freevars(self) -> tuple[str, ...]: ... + @property + def co_cellvars(self) -> tuple[str, ...]: ... + if sys.version_info >= (3, 10): + @property + def co_linetable(self) -> bytes: ... + def co_lines(self) -> Iterator[tuple[int, int, int | None]]: ... + if sys.version_info >= (3, 11): + @property + def co_exceptiontable(self) -> bytes: ... + @property + def co_qualname(self) -> str: ... + def co_positions(self) -> Iterable[tuple[int | None, int | None, int | None, int | None]]: ... + if sys.version_info >= (3, 14): + def co_branches(self) -> Iterator[tuple[int, int, int]]: ... + + if sys.version_info >= (3, 11): + def __new__( + cls, + argcount: int, + posonlyargcount: int, + kwonlyargcount: int, + nlocals: int, + stacksize: int, + flags: int, + codestring: bytes, + constants: tuple[object, ...], + names: tuple[str, ...], + varnames: tuple[str, ...], + filename: str, + name: str, + qualname: str, + firstlineno: int, + linetable: bytes, + exceptiontable: bytes, + freevars: tuple[str, ...] = ..., + cellvars: tuple[str, ...] = ..., + /, + ) -> Self: ... + elif sys.version_info >= (3, 10): + def __new__( + cls, + argcount: int, + posonlyargcount: int, + kwonlyargcount: int, + nlocals: int, + stacksize: int, + flags: int, + codestring: bytes, + constants: tuple[object, ...], + names: tuple[str, ...], + varnames: tuple[str, ...], + filename: str, + name: str, + firstlineno: int, + linetable: bytes, + freevars: tuple[str, ...] = ..., + cellvars: tuple[str, ...] = ..., + /, + ) -> Self: ... + else: + def __new__( + cls, + argcount: int, + posonlyargcount: int, + kwonlyargcount: int, + nlocals: int, + stacksize: int, + flags: int, + codestring: bytes, + constants: tuple[object, ...], + names: tuple[str, ...], + varnames: tuple[str, ...], + filename: str, + name: str, + firstlineno: int, + lnotab: bytes, + freevars: tuple[str, ...] = ..., + cellvars: tuple[str, ...] = ..., + /, + ) -> Self: ... + if sys.version_info >= (3, 11): + def replace( + self, + *, + co_argcount: int = -1, + co_posonlyargcount: int = -1, + co_kwonlyargcount: int = -1, + co_nlocals: int = -1, + co_stacksize: int = -1, + co_flags: int = -1, + co_firstlineno: int = -1, + co_code: bytes = ..., + co_consts: tuple[object, ...] = ..., + co_names: tuple[str, ...] = ..., + co_varnames: tuple[str, ...] = ..., + co_freevars: tuple[str, ...] = ..., + co_cellvars: tuple[str, ...] = ..., + co_filename: str = ..., + co_name: str = ..., + co_qualname: str = ..., + co_linetable: bytes = ..., + co_exceptiontable: bytes = ..., + ) -> Self: ... + elif sys.version_info >= (3, 10): + def replace( + self, + *, + co_argcount: int = -1, + co_posonlyargcount: int = -1, + co_kwonlyargcount: int = -1, + co_nlocals: int = -1, + co_stacksize: int = -1, + co_flags: int = -1, + co_firstlineno: int = -1, + co_code: bytes = ..., + co_consts: tuple[object, ...] = ..., + co_names: tuple[str, ...] = ..., + co_varnames: tuple[str, ...] = ..., + co_freevars: tuple[str, ...] = ..., + co_cellvars: tuple[str, ...] = ..., + co_filename: str = ..., + co_name: str = ..., + co_linetable: bytes = ..., + ) -> Self: ... + else: + def replace( + self, + *, + co_argcount: int = -1, + co_posonlyargcount: int = -1, + co_kwonlyargcount: int = -1, + co_nlocals: int = -1, + co_stacksize: int = -1, + co_flags: int = -1, + co_firstlineno: int = -1, + co_code: bytes = ..., + co_consts: tuple[object, ...] = ..., + co_names: tuple[str, ...] = ..., + co_varnames: tuple[str, ...] = ..., + co_freevars: tuple[str, ...] = ..., + co_cellvars: tuple[str, ...] = ..., + co_filename: str = ..., + co_name: str = ..., + co_lnotab: bytes = ..., + ) -> Self: ... + + if sys.version_info >= (3, 13): + __replace__ = replace + +@final +class MappingProxyType(Mapping[_KT_co, _VT_co]): # type: ignore[type-var] # pyright: ignore[reportInvalidTypeArguments] + __hash__: ClassVar[None] # type: ignore[assignment] + def __new__(cls, mapping: SupportsKeysAndGetItem[_KT_co, _VT_co]) -> Self: ... + def __getitem__(self, key: _KT_co, /) -> _VT_co: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def __iter__(self) -> Iterator[_KT_co]: ... + def __len__(self) -> int: ... + def __eq__(self, value: object, /) -> bool: ... + def copy(self) -> dict[_KT_co, _VT_co]: ... + def keys(self) -> KeysView[_KT_co]: ... + def values(self) -> ValuesView[_VT_co]: ... + def items(self) -> ItemsView[_KT_co, _VT_co]: ... + @overload + def get(self, key: _KT_co, /) -> _VT_co | None: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] # Covariant type as parameter + @overload + def get(self, key: _KT_co, default: _VT_co, /) -> _VT_co: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] # Covariant type as parameter + @overload + def get(self, key: _KT_co, default: _T2, /) -> _VT_co | _T2: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] # Covariant type as parameter + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __reversed__(self) -> Iterator[_KT_co]: ... + def __or__(self, value: Mapping[_T1, _T2], /) -> dict[_KT_co | _T1, _VT_co | _T2]: ... + def __ror__(self, value: Mapping[_T1, _T2], /) -> dict[_KT_co | _T1, _VT_co | _T2]: ... + +if sys.version_info >= (3, 12): + @disjoint_base + class SimpleNamespace: + __hash__: ClassVar[None] # type: ignore[assignment] + if sys.version_info >= (3, 13): + def __init__( + self, mapping_or_iterable: Mapping[str, Any] | Iterable[tuple[str, Any]] = (), /, **kwargs: Any + ) -> None: ... + else: + def __init__(self, **kwargs: Any) -> None: ... + + def __eq__(self, value: object, /) -> bool: ... + def __getattribute__(self, name: str, /) -> Any: ... + def __setattr__(self, name: str, value: Any, /) -> None: ... + def __delattr__(self, name: str, /) -> None: ... + if sys.version_info >= (3, 13): + def __replace__(self, **kwargs: Any) -> Self: ... + +else: + class SimpleNamespace: + __hash__: ClassVar[None] # type: ignore[assignment] + def __init__(self, **kwargs: Any) -> None: ... + def __eq__(self, value: object, /) -> bool: ... + def __getattribute__(self, name: str, /) -> Any: ... + def __setattr__(self, name: str, value: Any, /) -> None: ... + def __delattr__(self, name: str, /) -> None: ... + +@disjoint_base +class ModuleType: + __name__: str + __file__: str | None + @property + def __dict__(self) -> dict[str, Any]: ... # type: ignore[override] + __loader__: LoaderProtocol | None + __package__: str | None + __path__: MutableSequence[str] + __spec__: ModuleSpec | None + # N.B. Although this is the same type as `builtins.object.__doc__`, + # it is deliberately redeclared here. Most symbols declared in the namespace + # of `types.ModuleType` are available as "implicit globals" within a module's + # namespace, but this is not true for symbols declared in the namespace of `builtins.object`. + # Redeclaring `__doc__` here helps some type checkers understand that `__doc__` is available + # as an implicit global in all modules, similar to `__name__`, `__file__`, `__spec__`, etc. + __doc__: str | None + __annotations__: dict[str, AnnotationForm] + if sys.version_info >= (3, 14): + __annotate__: AnnotateFunc | None + + def __init__(self, name: str, doc: str | None = ...) -> None: ... + # __getattr__ doesn't exist at runtime, + # but having it here in typeshed makes dynamic imports + # using `builtins.__import__` or `importlib.import_module` less painful + def __getattr__(self, name: str) -> Any: ... + +@final +class CellType: + def __new__(cls, contents: object = ..., /) -> Self: ... + __hash__: ClassVar[None] # type: ignore[assignment] + cell_contents: Any + +_YieldT_co = TypeVar("_YieldT_co", covariant=True) +_SendT_contra = TypeVar("_SendT_contra", contravariant=True, default=None) +_ReturnT_co = TypeVar("_ReturnT_co", covariant=True, default=None) + +@final +class GeneratorType(Generator[_YieldT_co, _SendT_contra, _ReturnT_co]): + @property + def gi_code(self) -> CodeType: ... + @property + def gi_frame(self) -> FrameType: ... + @property + def gi_running(self) -> bool: ... + @property + def gi_yieldfrom(self) -> Iterator[_YieldT_co] | None: ... + if sys.version_info >= (3, 11): + @property + def gi_suspended(self) -> bool: ... + __name__: str + __qualname__: str + def __iter__(self) -> Self: ... + def __next__(self) -> _YieldT_co: ... + def send(self, arg: _SendT_contra, /) -> _YieldT_co: ... + @overload + def throw( + self, typ: type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ..., / + ) -> _YieldT_co: ... + @overload + def throw(self, typ: BaseException, val: None = None, tb: TracebackType | None = ..., /) -> _YieldT_co: ... + if sys.version_info >= (3, 13): + def __class_getitem__(cls, item: Any, /) -> Any: ... + +@final +class AsyncGeneratorType(AsyncGenerator[_YieldT_co, _SendT_contra]): + @property + def ag_await(self) -> Awaitable[Any] | None: ... + @property + def ag_code(self) -> CodeType: ... + @property + def ag_frame(self) -> FrameType: ... + @property + def ag_running(self) -> bool: ... + __name__: str + __qualname__: str + if sys.version_info >= (3, 12): + @property + def ag_suspended(self) -> bool: ... + + def __aiter__(self) -> Self: ... + def __anext__(self) -> Coroutine[Any, Any, _YieldT_co]: ... + def asend(self, val: _SendT_contra, /) -> Coroutine[Any, Any, _YieldT_co]: ... + @overload + async def athrow( + self, typ: type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ..., / + ) -> _YieldT_co: ... + @overload + async def athrow(self, typ: BaseException, val: None = None, tb: TracebackType | None = ..., /) -> _YieldT_co: ... + def aclose(self) -> Coroutine[Any, Any, None]: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +# Non-default variations to accommodate coroutines +_SendT_nd_contra = TypeVar("_SendT_nd_contra", contravariant=True) +_ReturnT_nd_co = TypeVar("_ReturnT_nd_co", covariant=True) + +@final +class CoroutineType(Coroutine[_YieldT_co, _SendT_nd_contra, _ReturnT_nd_co]): + __name__: str + __qualname__: str + @property + def cr_await(self) -> Any | None: ... + @property + def cr_code(self) -> CodeType: ... + if sys.version_info >= (3, 12): + @property + def cr_frame(self) -> FrameType | None: ... + else: + @property + def cr_frame(self) -> FrameType: ... + + @property + def cr_running(self) -> bool: ... + @property + def cr_origin(self) -> tuple[tuple[str, int, str], ...] | None: ... + if sys.version_info >= (3, 11): + @property + def cr_suspended(self) -> bool: ... + + def close(self) -> None: ... + def __await__(self) -> Generator[Any, None, _ReturnT_nd_co]: ... + def send(self, arg: _SendT_nd_contra, /) -> _YieldT_co: ... + @overload + def throw( + self, typ: type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ..., / + ) -> _YieldT_co: ... + @overload + def throw(self, typ: BaseException, val: None = None, tb: TracebackType | None = ..., /) -> _YieldT_co: ... + if sys.version_info >= (3, 13): + def __class_getitem__(cls, item: Any, /) -> Any: ... + +@final +class MethodType: + @property + def __closure__(self) -> tuple[CellType, ...] | None: ... # inherited from the added function + @property + def __code__(self) -> CodeType: ... # inherited from the added function + @property + def __defaults__(self) -> tuple[Any, ...] | None: ... # inherited from the added function + @property + def __func__(self) -> Callable[..., Any]: ... + @property + def __self__(self) -> object: ... + @property + def __name__(self) -> str: ... # inherited from the added function + @property + def __qualname__(self) -> str: ... # inherited from the added function + def __new__(cls, func: Callable[..., Any], instance: object, /) -> Self: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + + if sys.version_info >= (3, 13): + def __get__(self, instance: object, owner: type | None = None, /) -> Self: ... + + def __eq__(self, value: object, /) -> bool: ... + def __hash__(self) -> int: ... + +@final +class BuiltinFunctionType: + @property + def __self__(self) -> object | ModuleType: ... + @property + def __name__(self) -> str: ... + @property + def __qualname__(self) -> str: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __eq__(self, value: object, /) -> bool: ... + def __hash__(self) -> int: ... + +BuiltinMethodType = BuiltinFunctionType + +@final +class WrapperDescriptorType: + @property + def __name__(self) -> str: ... + @property + def __qualname__(self) -> str: ... + @property + def __objclass__(self) -> type: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ... + +@final +class MethodWrapperType: + @property + def __self__(self) -> object: ... + @property + def __name__(self) -> str: ... + @property + def __qualname__(self) -> str: ... + @property + def __objclass__(self) -> type: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __eq__(self, value: object, /) -> bool: ... + def __ne__(self, value: object, /) -> bool: ... + def __hash__(self) -> int: ... + +@final +class MethodDescriptorType: + @property + def __name__(self) -> str: ... + @property + def __qualname__(self) -> str: ... + @property + def __objclass__(self) -> type: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ... + +@final +class ClassMethodDescriptorType: + @property + def __name__(self) -> str: ... + @property + def __qualname__(self) -> str: ... + @property + def __objclass__(self) -> type: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ... + +@final +class TracebackType: + def __new__(cls, tb_next: TracebackType | None, tb_frame: FrameType, tb_lasti: int, tb_lineno: int) -> Self: ... + tb_next: TracebackType | None + # the rest are read-only + @property + def tb_frame(self) -> FrameType: ... + @property + def tb_lasti(self) -> int: ... + @property + def tb_lineno(self) -> int: ... + +@final +class FrameType: + @property + def f_back(self) -> FrameType | None: ... + @property + def f_builtins(self) -> dict[str, Any]: ... + @property + def f_code(self) -> CodeType: ... + @property + def f_globals(self) -> dict[str, Any]: ... + @property + def f_lasti(self) -> int: ... + # see discussion in #6769: f_lineno *can* sometimes be None, + # but you should probably file a bug report with CPython if you encounter it being None in the wild. + # An `int | None` annotation here causes too many false-positive errors, so applying `int | Any`. + @property + def f_lineno(self) -> int | MaybeNone: ... + @property + def f_locals(self) -> dict[str, Any]: ... + f_trace: Callable[[FrameType, str, Any], Any] | None + f_trace_lines: bool + f_trace_opcodes: bool + def clear(self) -> None: ... + if sys.version_info >= (3, 14): + @property + def f_generator(self) -> GeneratorType[Any, Any, Any] | CoroutineType[Any, Any, Any] | None: ... + +@final +class GetSetDescriptorType: + @property + def __name__(self) -> str: ... + @property + def __qualname__(self) -> str: ... + @property + def __objclass__(self) -> type: ... + def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ... + def __set__(self, instance: Any, value: Any, /) -> None: ... + def __delete__(self, instance: Any, /) -> None: ... + +@final +class MemberDescriptorType: + @property + def __name__(self) -> str: ... + @property + def __qualname__(self) -> str: ... + @property + def __objclass__(self) -> type: ... + def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ... + def __set__(self, instance: Any, value: Any, /) -> None: ... + def __delete__(self, instance: Any, /) -> None: ... + +def new_class( + name: str, + bases: Iterable[object] = (), + kwds: dict[str, Any] | None = None, + exec_body: Callable[[dict[str, Any]], object] | None = None, +) -> type: ... +def resolve_bases(bases: Iterable[object]) -> tuple[Any, ...]: ... +def prepare_class( + name: str, bases: tuple[type, ...] = (), kwds: dict[str, Any] | None = None +) -> tuple[type, dict[str, Any], dict[str, Any]]: ... + +if sys.version_info >= (3, 12): + def get_original_bases(cls: type, /) -> tuple[Any, ...]: ... + +# Does not actually inherit from property, but saying it does makes sure that +# pyright handles this class correctly. +class DynamicClassAttribute(property): + fget: Callable[[Any], Any] | None + fset: Callable[[Any, Any], object] | None # type: ignore[assignment] + fdel: Callable[[Any], object] | None # type: ignore[assignment] + overwrite_doc: bool + __isabstractmethod__: bool + def __init__( + self, + fget: Callable[[Any], Any] | None = None, + fset: Callable[[Any, Any], object] | None = None, + fdel: Callable[[Any], object] | None = None, + doc: str | None = None, + ) -> None: ... + def __get__(self, instance: Any, ownerclass: type | None = None) -> Any: ... + def __set__(self, instance: Any, value: Any) -> None: ... + def __delete__(self, instance: Any) -> None: ... + def getter(self, fget: Callable[[Any], Any]) -> DynamicClassAttribute: ... + def setter(self, fset: Callable[[Any, Any], object]) -> DynamicClassAttribute: ... + def deleter(self, fdel: Callable[[Any], object]) -> DynamicClassAttribute: ... + +_Fn = TypeVar("_Fn", bound=Callable[..., object]) +_R = TypeVar("_R") +_P = ParamSpec("_P") + +# it's not really an Awaitable, but can be used in an await expression. Real type: Generator & Awaitable +@overload +def coroutine(func: Callable[_P, Generator[Any, Any, _R]]) -> Callable[_P, Awaitable[_R]]: ... +@overload +def coroutine(func: _Fn) -> _Fn: ... +@disjoint_base +class GenericAlias: + @property + def __origin__(self) -> type | TypeAliasType: ... + @property + def __args__(self) -> tuple[Any, ...]: ... + @property + def __parameters__(self) -> tuple[Any, ...]: ... + def __new__(cls, origin: type, args: Any, /) -> Self: ... + def __getitem__(self, typeargs: Any, /) -> GenericAlias: ... + def __eq__(self, value: object, /) -> bool: ... + def __hash__(self) -> int: ... + def __mro_entries__(self, bases: Iterable[object], /) -> tuple[type, ...]: ... + if sys.version_info >= (3, 11): + @property + def __unpacked__(self) -> bool: ... + @property + def __typing_unpacked_tuple_args__(self) -> tuple[Any, ...] | None: ... + if sys.version_info >= (3, 10): + def __or__(self, value: Any, /) -> UnionType: ... + def __ror__(self, value: Any, /) -> UnionType: ... + + # GenericAlias delegates attr access to `__origin__` + def __getattr__(self, name: str) -> Any: ... + +if sys.version_info >= (3, 10): + @final + class NoneType: + def __bool__(self) -> Literal[False]: ... + + @final + class EllipsisType: ... + + @final + class NotImplementedType(Any): ... + + @final + class UnionType: + @property + def __args__(self) -> tuple[Any, ...]: ... + @property + def __parameters__(self) -> tuple[Any, ...]: ... + # `(int | str) | Literal["foo"]` returns a generic alias to an instance of `_SpecialForm` (`Union`). + # Normally we'd express this using the return type of `_SpecialForm.__ror__`, + # but because `UnionType.__or__` accepts `Any`, type checkers will use + # the return type of `UnionType.__or__` to infer the result of this operation + # rather than `_SpecialForm.__ror__`. To mitigate this, we use `| Any` + # in the return type of `UnionType.__(r)or__`. + def __or__(self, value: Any, /) -> UnionType | Any: ... + def __ror__(self, value: Any, /) -> UnionType | Any: ... + def __eq__(self, value: object, /) -> bool: ... + def __hash__(self) -> int: ... + # you can only subscript a `UnionType` instance if at least one of the elements + # in the union is a generic alias instance that has a non-empty `__parameters__` + def __getitem__(self, parameters: Any) -> object: ... + +if sys.version_info >= (3, 13): + @final + class CapsuleType: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/typing.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/typing.pyi new file mode 100644 index 0000000..e3e5d1f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/typing.pyi @@ -0,0 +1,1172 @@ +# Since this module defines "overload" it is not recognized by Ruff as typing.overload +# TODO: The collections import is required, otherwise mypy crashes. +# https://github.com/python/mypy/issues/16744 +import collections # noqa: F401 # pyright: ignore[reportUnusedImport] +import sys +import typing_extensions +from _collections_abc import dict_items, dict_keys, dict_values +from _typeshed import IdentityFunction, ReadableBuffer, SupportsGetItem, SupportsGetItemViewable, SupportsKeysAndGetItem, Viewable +from abc import ABCMeta, abstractmethod +from re import Match as Match, Pattern as Pattern +from types import ( + BuiltinFunctionType, + CodeType, + FunctionType, + GenericAlias, + MethodDescriptorType, + MethodType, + MethodWrapperType, + ModuleType, + TracebackType, + WrapperDescriptorType, +) +from typing_extensions import Never as _Never, ParamSpec as _ParamSpec, deprecated + +if sys.version_info >= (3, 14): + from _typeshed import EvaluateFunc + + from annotationlib import Format + +if sys.version_info >= (3, 10): + from types import UnionType + +__all__ = [ + "AbstractSet", + "Annotated", + "Any", + "AnyStr", + "AsyncContextManager", + "AsyncGenerator", + "AsyncIterable", + "AsyncIterator", + "Awaitable", + "BinaryIO", + "ByteString", + "Callable", + "ChainMap", + "ClassVar", + "Collection", + "Container", + "ContextManager", + "Coroutine", + "Counter", + "DefaultDict", + "Deque", + "Dict", + "Final", + "ForwardRef", + "FrozenSet", + "Generator", + "Generic", + "Hashable", + "IO", + "ItemsView", + "Iterable", + "Iterator", + "KeysView", + "List", + "Literal", + "Mapping", + "MappingView", + "Match", + "MutableMapping", + "MutableSequence", + "MutableSet", + "NamedTuple", + "NewType", + "NoReturn", + "Optional", + "OrderedDict", + "Pattern", + "Protocol", + "Reversible", + "Sequence", + "Set", + "Sized", + "SupportsAbs", + "SupportsBytes", + "SupportsComplex", + "SupportsFloat", + "SupportsIndex", + "SupportsInt", + "SupportsRound", + "Text", + "TextIO", + "Tuple", + "Type", + "TypeVar", + "TypedDict", + "Union", + "ValuesView", + "TYPE_CHECKING", + "cast", + "final", + "get_args", + "get_origin", + "get_type_hints", + "no_type_check", + "no_type_check_decorator", + "overload", + "runtime_checkable", +] + +if sys.version_info >= (3, 14): + __all__ += ["evaluate_forward_ref"] + +if sys.version_info >= (3, 10): + __all__ += ["Concatenate", "ParamSpec", "ParamSpecArgs", "ParamSpecKwargs", "TypeAlias", "TypeGuard", "is_typeddict"] + +if sys.version_info >= (3, 11): + __all__ += [ + "LiteralString", + "Never", + "NotRequired", + "Required", + "Self", + "TypeVarTuple", + "Unpack", + "assert_never", + "assert_type", + "clear_overloads", + "dataclass_transform", + "get_overloads", + "reveal_type", + ] + +if sys.version_info >= (3, 12): + __all__ += ["TypeAliasType", "override"] + +if sys.version_info >= (3, 13): + __all__ += ["get_protocol_members", "is_protocol", "NoDefault", "TypeIs", "ReadOnly"] + +# We can't use this name here because it leads to issues with mypy, likely +# due to an import cycle. Below instead we use Any with a comment. +# from _typeshed import AnnotationForm + +class Any: ... + +class _Final: + __slots__ = ("__weakref__",) + +def final(f: _T) -> _T: ... +@final +class TypeVar: + @property + def __name__(self) -> str: ... + @property + def __bound__(self) -> Any | None: ... # AnnotationForm + @property + def __constraints__(self) -> tuple[Any, ...]: ... # AnnotationForm + @property + def __covariant__(self) -> bool: ... + @property + def __contravariant__(self) -> bool: ... + if sys.version_info >= (3, 12): + @property + def __infer_variance__(self) -> bool: ... + if sys.version_info >= (3, 13): + @property + def __default__(self) -> Any: ... # AnnotationForm + if sys.version_info >= (3, 13): + def __new__( + cls, + name: str, + *constraints: Any, # AnnotationForm + bound: Any | None = None, # AnnotationForm + contravariant: bool = False, + covariant: bool = False, + infer_variance: bool = False, + default: Any = ..., # AnnotationForm + ) -> Self: ... + elif sys.version_info >= (3, 12): + def __new__( + cls, + name: str, + *constraints: Any, # AnnotationForm + bound: Any | None = None, # AnnotationForm + covariant: bool = False, + contravariant: bool = False, + infer_variance: bool = False, + ) -> Self: ... + elif sys.version_info >= (3, 11): + def __new__( + cls, + name: str, + *constraints: Any, # AnnotationForm + bound: Any | None = None, # AnnotationForm + covariant: bool = False, + contravariant: bool = False, + ) -> Self: ... + else: + def __init__( + self, + name: str, + *constraints: Any, # AnnotationForm + bound: Any | None = None, # AnnotationForm + covariant: bool = False, + contravariant: bool = False, + ) -> None: ... + if sys.version_info >= (3, 10): + def __or__(self, right: Any, /) -> _SpecialForm: ... # AnnotationForm + def __ror__(self, left: Any, /) -> _SpecialForm: ... # AnnotationForm + if sys.version_info >= (3, 11): + def __typing_subst__(self, arg: Any, /) -> Any: ... + if sys.version_info >= (3, 13): + def __typing_prepare_subst__(self, alias: Any, args: Any, /) -> tuple[Any, ...]: ... + def has_default(self) -> bool: ... + if sys.version_info >= (3, 14): + @property + def evaluate_bound(self) -> EvaluateFunc | None: ... + @property + def evaluate_constraints(self) -> EvaluateFunc | None: ... + @property + def evaluate_default(self) -> EvaluateFunc | None: ... + +# N.B. Keep this definition in sync with typing_extensions._SpecialForm +@final +class _SpecialForm(_Final): + __slots__ = ("_name", "__doc__", "_getitem") + def __getitem__(self, parameters: Any) -> object: ... + if sys.version_info >= (3, 10): + def __or__(self, other: Any) -> _SpecialForm: ... + def __ror__(self, other: Any) -> _SpecialForm: ... + +Union: _SpecialForm +Protocol: _SpecialForm +Callable: _SpecialForm +Type: _SpecialForm +NoReturn: _SpecialForm +ClassVar: _SpecialForm + +Optional: _SpecialForm +Tuple: _SpecialForm +Final: _SpecialForm + +Literal: _SpecialForm +TypedDict: _SpecialForm + +if sys.version_info >= (3, 11): + Self: _SpecialForm + Never: _SpecialForm + Unpack: _SpecialForm + Required: _SpecialForm + NotRequired: _SpecialForm + LiteralString: _SpecialForm + + @final + class TypeVarTuple: + @property + def __name__(self) -> str: ... + if sys.version_info >= (3, 13): + @property + def __default__(self) -> Any: ... # AnnotationForm + def has_default(self) -> bool: ... + if sys.version_info >= (3, 13): + def __new__(cls, name: str, *, default: Any = ...) -> Self: ... # AnnotationForm + elif sys.version_info >= (3, 12): + def __new__(cls, name: str) -> Self: ... + else: + def __init__(self, name: str) -> None: ... + + def __iter__(self) -> Any: ... + def __typing_subst__(self, arg: Never, /) -> Never: ... + def __typing_prepare_subst__(self, alias: Any, args: Any, /) -> tuple[Any, ...]: ... + if sys.version_info >= (3, 14): + @property + def evaluate_default(self) -> EvaluateFunc | None: ... + +if sys.version_info >= (3, 10): + @final + class ParamSpecArgs: + @property + def __origin__(self) -> ParamSpec: ... + if sys.version_info >= (3, 12): + def __new__(cls, origin: ParamSpec) -> Self: ... + else: + def __init__(self, origin: ParamSpec) -> None: ... + + def __eq__(self, other: object, /) -> bool: ... + __hash__: ClassVar[None] # type: ignore[assignment] + + @final + class ParamSpecKwargs: + @property + def __origin__(self) -> ParamSpec: ... + if sys.version_info >= (3, 12): + def __new__(cls, origin: ParamSpec) -> Self: ... + else: + def __init__(self, origin: ParamSpec) -> None: ... + + def __eq__(self, other: object, /) -> bool: ... + __hash__: ClassVar[None] # type: ignore[assignment] + + @final + class ParamSpec: + @property + def __name__(self) -> str: ... + @property + def __bound__(self) -> Any | None: ... # AnnotationForm + @property + def __covariant__(self) -> bool: ... + @property + def __contravariant__(self) -> bool: ... + if sys.version_info >= (3, 12): + @property + def __infer_variance__(self) -> bool: ... + if sys.version_info >= (3, 13): + @property + def __default__(self) -> Any: ... # AnnotationForm + if sys.version_info >= (3, 13): + def __new__( + cls, + name: str, + *, + bound: Any | None = None, # AnnotationForm + contravariant: bool = False, + covariant: bool = False, + infer_variance: bool = False, + default: Any = ..., # AnnotationForm + ) -> Self: ... + elif sys.version_info >= (3, 12): + def __new__( + cls, + name: str, + *, + bound: Any | None = None, # AnnotationForm + contravariant: bool = False, + covariant: bool = False, + infer_variance: bool = False, + ) -> Self: ... + elif sys.version_info >= (3, 11): + def __new__( + cls, + name: str, + *, + bound: Any | None = None, # AnnotationForm + contravariant: bool = False, + covariant: bool = False, + ) -> Self: ... + else: + def __init__( + self, + name: str, + *, + bound: Any | None = None, # AnnotationForm + contravariant: bool = False, + covariant: bool = False, + ) -> None: ... + + @property + def args(self) -> ParamSpecArgs: ... + @property + def kwargs(self) -> ParamSpecKwargs: ... + if sys.version_info >= (3, 11): + def __typing_subst__(self, arg: Any, /) -> Any: ... + def __typing_prepare_subst__(self, alias: Any, args: Any, /) -> tuple[Any, ...]: ... + + def __or__(self, right: Any, /) -> _SpecialForm: ... + def __ror__(self, left: Any, /) -> _SpecialForm: ... + if sys.version_info >= (3, 13): + def has_default(self) -> bool: ... + if sys.version_info >= (3, 14): + @property + def evaluate_default(self) -> EvaluateFunc | None: ... + + Concatenate: _SpecialForm + TypeAlias: _SpecialForm + TypeGuard: _SpecialForm + + class NewType: + def __init__(self, name: str, tp: Any) -> None: ... # AnnotationForm + if sys.version_info >= (3, 11): + @staticmethod + def __call__(x: _T, /) -> _T: ... + else: + def __call__(self, x: _T) -> _T: ... + + def __or__(self, other: Any) -> _SpecialForm: ... + def __ror__(self, other: Any) -> _SpecialForm: ... + __supertype__: type | NewType + +else: + def NewType(name: str, tp: Any) -> Any: ... + +_F = TypeVar("_F", bound=Callable[..., Any]) +_P = _ParamSpec("_P") +_T = TypeVar("_T") + +_FT = TypeVar("_FT", bound=Callable[..., Any] | type) + +# These type variables are used by the container types. +_S = TypeVar("_S") +_KT = TypeVar("_KT") # Key type. +_VT = TypeVar("_VT") # Value type. +_T_co = TypeVar("_T_co", covariant=True) # Any type covariant containers. +_KT_co = TypeVar("_KT_co", covariant=True) # Key type covariant containers. +_VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers. +_TC = TypeVar("_TC", bound=type[object]) + +def overload(func: _F) -> _F: ... +def no_type_check(arg: _F) -> _F: ... +def no_type_check_decorator(decorator: Callable[_P, _T]) -> Callable[_P, _T]: ... + +# This itself is only available during type checking +def type_check_only(func_or_cls: _FT) -> _FT: ... + +# Type aliases and type constructors + +@type_check_only +class _Alias: + # Class for defining generic aliases for library types. + def __getitem__(self, typeargs: Any) -> Any: ... + +List = _Alias() +Dict = _Alias() +DefaultDict = _Alias() +Set = _Alias() +FrozenSet = _Alias() +Counter = _Alias() +Deque = _Alias() +ChainMap = _Alias() + +OrderedDict = _Alias() + +Annotated: _SpecialForm + +# Predefined type variables. +AnyStr = TypeVar("AnyStr", str, bytes) # noqa: Y001 + +@type_check_only +class _Generic: + if sys.version_info < (3, 12): + __slots__ = () + + if sys.version_info >= (3, 10): + @classmethod + def __class_getitem__(cls, args: TypeVar | ParamSpec | tuple[TypeVar | ParamSpec, ...]) -> _Final: ... + else: + @classmethod + def __class_getitem__(cls, args: TypeVar | tuple[TypeVar, ...]) -> _Final: ... + +Generic: type[_Generic] + +class _ProtocolMeta(ABCMeta): + if sys.version_info >= (3, 12): + def __init__(cls, *args: Any, **kwargs: Any) -> None: ... + +# Abstract base classes. + +def runtime_checkable(cls: _TC) -> _TC: ... +@runtime_checkable +class SupportsInt(Protocol, metaclass=ABCMeta): + __slots__ = () + @abstractmethod + def __int__(self) -> int: ... + +@runtime_checkable +class SupportsFloat(Protocol, metaclass=ABCMeta): + __slots__ = () + @abstractmethod + def __float__(self) -> float: ... + +@runtime_checkable +class SupportsComplex(Protocol, metaclass=ABCMeta): + __slots__ = () + @abstractmethod + def __complex__(self) -> complex: ... + +@runtime_checkable +class SupportsBytes(Protocol, metaclass=ABCMeta): + __slots__ = () + @abstractmethod + def __bytes__(self) -> bytes: ... + +@runtime_checkable +class SupportsIndex(Protocol, metaclass=ABCMeta): + __slots__ = () + @abstractmethod + def __index__(self) -> int: ... + +@runtime_checkable +class SupportsAbs(Protocol[_T_co]): + __slots__ = () + @abstractmethod + def __abs__(self) -> _T_co: ... + +@runtime_checkable +class SupportsRound(Protocol[_T_co]): + __slots__ = () + @overload + @abstractmethod + def __round__(self) -> int: ... + @overload + @abstractmethod + def __round__(self, ndigits: int, /) -> _T_co: ... + +@runtime_checkable +class Sized(Protocol, metaclass=ABCMeta): + @abstractmethod + def __len__(self) -> int: ... + +@runtime_checkable +class Hashable(Protocol, metaclass=ABCMeta): + # TODO: This is special, in that a subclass of a hashable class may not be hashable + # (for example, list vs. object). It's not obvious how to represent this. This class + # is currently mostly useless for static checking. + @abstractmethod + def __hash__(self) -> int: ... + +@runtime_checkable +class Iterable(Protocol[_T_co]): + @abstractmethod + def __iter__(self) -> Iterator[_T_co]: ... + +@runtime_checkable +class Iterator(Iterable[_T_co], Protocol[_T_co]): + @abstractmethod + def __next__(self) -> _T_co: ... + def __iter__(self) -> Iterator[_T_co]: ... + +@runtime_checkable +class Reversible(Iterable[_T_co], Protocol[_T_co]): + @abstractmethod + def __reversed__(self) -> Iterator[_T_co]: ... + +_YieldT_co = TypeVar("_YieldT_co", covariant=True) +_SendT_contra = TypeVar("_SendT_contra", contravariant=True, default=None) +_ReturnT_co = TypeVar("_ReturnT_co", covariant=True, default=None) + +@runtime_checkable +class Generator(Iterator[_YieldT_co], Protocol[_YieldT_co, _SendT_contra, _ReturnT_co]): + def __next__(self) -> _YieldT_co: ... + @abstractmethod + def send(self, value: _SendT_contra, /) -> _YieldT_co: ... + @overload + @abstractmethod + def throw( + self, typ: type[BaseException], val: BaseException | object = None, tb: TracebackType | None = None, / + ) -> _YieldT_co: ... + @overload + @abstractmethod + def throw(self, typ: BaseException, val: None = None, tb: TracebackType | None = None, /) -> _YieldT_co: ... + if sys.version_info >= (3, 13): + def close(self) -> _ReturnT_co | None: ... + else: + def close(self) -> None: ... + + def __iter__(self) -> Generator[_YieldT_co, _SendT_contra, _ReturnT_co]: ... + +# NOTE: Prior to Python 3.13 these aliases are lacking the second _ExitT_co parameter +if sys.version_info >= (3, 13): + from contextlib import AbstractAsyncContextManager as AsyncContextManager, AbstractContextManager as ContextManager +else: + from contextlib import AbstractAsyncContextManager, AbstractContextManager + + @runtime_checkable + class ContextManager(AbstractContextManager[_T_co, bool | None], Protocol[_T_co]): ... + + @runtime_checkable + class AsyncContextManager(AbstractAsyncContextManager[_T_co, bool | None], Protocol[_T_co]): ... + +@runtime_checkable +class Awaitable(Protocol[_T_co]): + @abstractmethod + def __await__(self) -> Generator[Any, Any, _T_co]: ... + +# Non-default variations to accommodate coroutines, and `AwaitableGenerator` having a 4th type parameter. +_SendT_nd_contra = TypeVar("_SendT_nd_contra", contravariant=True) +_ReturnT_nd_co = TypeVar("_ReturnT_nd_co", covariant=True) + +class Coroutine(Awaitable[_ReturnT_nd_co], Generic[_YieldT_co, _SendT_nd_contra, _ReturnT_nd_co]): + __name__: str + __qualname__: str + + @abstractmethod + def send(self, value: _SendT_nd_contra, /) -> _YieldT_co: ... + @overload + @abstractmethod + def throw( + self, typ: type[BaseException], val: BaseException | object = None, tb: TracebackType | None = None, / + ) -> _YieldT_co: ... + @overload + @abstractmethod + def throw(self, typ: BaseException, val: None = None, tb: TracebackType | None = None, /) -> _YieldT_co: ... + @abstractmethod + def close(self) -> None: ... + +# NOTE: This type does not exist in typing.py or PEP 484 but mypy needs it to exist. +# The parameters correspond to Generator, but the 4th is the original type. +# Obsolete, use _typeshed._type_checker_internals.AwaitableGenerator instead. +@type_check_only +class AwaitableGenerator( + Awaitable[_ReturnT_nd_co], + Generator[_YieldT_co, _SendT_nd_contra, _ReturnT_nd_co], + Generic[_YieldT_co, _SendT_nd_contra, _ReturnT_nd_co, _S], + metaclass=ABCMeta, +): ... + +@runtime_checkable +class AsyncIterable(Protocol[_T_co]): + @abstractmethod + def __aiter__(self) -> AsyncIterator[_T_co]: ... + +@runtime_checkable +class AsyncIterator(AsyncIterable[_T_co], Protocol[_T_co]): + @abstractmethod + def __anext__(self) -> Awaitable[_T_co]: ... + def __aiter__(self) -> AsyncIterator[_T_co]: ... + +@runtime_checkable +class AsyncGenerator(AsyncIterator[_YieldT_co], Protocol[_YieldT_co, _SendT_contra]): + def __anext__(self) -> Coroutine[Any, Any, _YieldT_co]: ... + @abstractmethod + def asend(self, value: _SendT_contra, /) -> Coroutine[Any, Any, _YieldT_co]: ... + @overload + @abstractmethod + def athrow( + self, typ: type[BaseException], val: BaseException | object = None, tb: TracebackType | None = None, / + ) -> Coroutine[Any, Any, _YieldT_co]: ... + @overload + @abstractmethod + def athrow( + self, typ: BaseException, val: None = None, tb: TracebackType | None = None, / + ) -> Coroutine[Any, Any, _YieldT_co]: ... + def aclose(self) -> Coroutine[Any, Any, None]: ... + +@runtime_checkable +class Container(Protocol[_T_co]): + # This is generic more on vibes than anything else + @abstractmethod + def __contains__(self, x: object, /) -> bool: ... + +@runtime_checkable +class Collection(Iterable[_T_co], Container[_T_co], Protocol[_T_co]): + # Implement Sized (but don't have it as a base class). + @abstractmethod + def __len__(self) -> int: ... + +class Sequence(Reversible[_T_co], Collection[_T_co]): + @overload + @abstractmethod + def __getitem__(self, index: int) -> _T_co: ... + @overload + @abstractmethod + def __getitem__(self, index: slice) -> Sequence[_T_co]: ... + # Mixin methods + def index(self, value: Any, start: int = 0, stop: int = ...) -> int: ... + def count(self, value: Any) -> int: ... + def __contains__(self, value: object) -> bool: ... + def __iter__(self) -> Iterator[_T_co]: ... + def __reversed__(self) -> Iterator[_T_co]: ... + +class MutableSequence(Sequence[_T]): + @abstractmethod + def insert(self, index: int, value: _T) -> None: ... + @overload + @abstractmethod + def __getitem__(self, index: int) -> _T: ... + @overload + @abstractmethod + def __getitem__(self, index: slice) -> MutableSequence[_T]: ... + @overload + @abstractmethod + def __setitem__(self, index: int, value: _T) -> None: ... + @overload + @abstractmethod + def __setitem__(self, index: slice, value: Iterable[_T]) -> None: ... + @overload + @abstractmethod + def __delitem__(self, index: int) -> None: ... + @overload + @abstractmethod + def __delitem__(self, index: slice) -> None: ... + # Mixin methods + def append(self, value: _T) -> None: ... + def clear(self) -> None: ... + def extend(self, values: Iterable[_T]) -> None: ... + def reverse(self) -> None: ... + def pop(self, index: int = -1) -> _T: ... + def remove(self, value: _T) -> None: ... + def __iadd__(self, values: Iterable[_T]) -> typing_extensions.Self: ... + +class AbstractSet(Collection[_T_co]): + @abstractmethod + def __contains__(self, x: object) -> bool: ... + def _hash(self) -> int: ... + # Mixin methods + def __le__(self, other: AbstractSet[Any]) -> bool: ... + def __lt__(self, other: AbstractSet[Any]) -> bool: ... + def __gt__(self, other: AbstractSet[Any]) -> bool: ... + def __ge__(self, other: AbstractSet[Any]) -> bool: ... + def __and__(self, other: AbstractSet[Any]) -> AbstractSet[_T_co]: ... + def __or__(self, other: AbstractSet[_T]) -> AbstractSet[_T_co | _T]: ... + def __sub__(self, other: AbstractSet[Any]) -> AbstractSet[_T_co]: ... + def __xor__(self, other: AbstractSet[_T]) -> AbstractSet[_T_co | _T]: ... + def __eq__(self, other: object) -> bool: ... + def isdisjoint(self, other: Iterable[Any]) -> bool: ... + +class MutableSet(AbstractSet[_T]): + @abstractmethod + def add(self, value: _T) -> None: ... + @abstractmethod + def discard(self, value: _T) -> None: ... + # Mixin methods + def clear(self) -> None: ... + def pop(self) -> _T: ... + def remove(self, value: _T) -> None: ... + def __ior__(self, it: AbstractSet[_T]) -> typing_extensions.Self: ... # type: ignore[override,misc] + def __iand__(self, it: AbstractSet[Any]) -> typing_extensions.Self: ... + def __ixor__(self, it: AbstractSet[_T]) -> typing_extensions.Self: ... # type: ignore[override,misc] + def __isub__(self, it: AbstractSet[Any]) -> typing_extensions.Self: ... + +class MappingView(Sized): + __slots__ = ("_mapping",) + def __init__(self, mapping: Sized) -> None: ... # undocumented + def __len__(self) -> int: ... + +class ItemsView(MappingView, AbstractSet[tuple[_KT_co, _VT_co]], Generic[_KT_co, _VT_co]): + def __init__(self, mapping: SupportsGetItemViewable[_KT_co, _VT_co]) -> None: ... # undocumented + def __and__(self, other: Iterable[Any]) -> set[tuple[_KT_co, _VT_co]]: ... + def __rand__(self, other: Iterable[_T]) -> set[_T]: ... + def __contains__(self, item: tuple[object, object]) -> bool: ... # type: ignore[override] + def __iter__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... + def __or__(self, other: Iterable[_T]) -> set[tuple[_KT_co, _VT_co] | _T]: ... + def __ror__(self, other: Iterable[_T]) -> set[tuple[_KT_co, _VT_co] | _T]: ... + def __sub__(self, other: Iterable[Any]) -> set[tuple[_KT_co, _VT_co]]: ... + def __rsub__(self, other: Iterable[_T]) -> set[_T]: ... + def __xor__(self, other: Iterable[_T]) -> set[tuple[_KT_co, _VT_co] | _T]: ... + def __rxor__(self, other: Iterable[_T]) -> set[tuple[_KT_co, _VT_co] | _T]: ... + +class KeysView(MappingView, AbstractSet[_KT_co]): + def __init__(self, mapping: Viewable[_KT_co]) -> None: ... # undocumented + def __and__(self, other: Iterable[Any]) -> set[_KT_co]: ... + def __rand__(self, other: Iterable[_T]) -> set[_T]: ... + def __contains__(self, key: object) -> bool: ... + def __iter__(self) -> Iterator[_KT_co]: ... + def __or__(self, other: Iterable[_T]) -> set[_KT_co | _T]: ... + def __ror__(self, other: Iterable[_T]) -> set[_KT_co | _T]: ... + def __sub__(self, other: Iterable[Any]) -> set[_KT_co]: ... + def __rsub__(self, other: Iterable[_T]) -> set[_T]: ... + def __xor__(self, other: Iterable[_T]) -> set[_KT_co | _T]: ... + def __rxor__(self, other: Iterable[_T]) -> set[_KT_co | _T]: ... + +class ValuesView(MappingView, Collection[_VT_co]): + def __init__(self, mapping: SupportsGetItemViewable[Any, _VT_co]) -> None: ... # undocumented + def __contains__(self, value: object) -> bool: ... + def __iter__(self) -> Iterator[_VT_co]: ... + +class Mapping(Collection[_KT], Generic[_KT, _VT_co]): + # TODO: We wish the key type could also be covariant, but that doesn't work, + # see discussion in https://github.com/python/typing/pull/273. + @abstractmethod + def __getitem__(self, key: _KT, /) -> _VT_co: ... + # Mixin methods + @overload + def get(self, key: _KT, /) -> _VT_co | None: ... + @overload + def get(self, key: _KT, /, default: _VT_co) -> _VT_co: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] # Covariant type as parameter + @overload + def get(self, key: _KT, /, default: _T) -> _VT_co | _T: ... + def items(self) -> ItemsView[_KT, _VT_co]: ... + def keys(self) -> KeysView[_KT]: ... + def values(self) -> ValuesView[_VT_co]: ... + def __contains__(self, key: object, /) -> bool: ... + def __eq__(self, other: object, /) -> bool: ... + +class MutableMapping(Mapping[_KT, _VT]): + @abstractmethod + def __setitem__(self, key: _KT, value: _VT, /) -> None: ... + @abstractmethod + def __delitem__(self, key: _KT, /) -> None: ... + def clear(self) -> None: ... + @overload + def pop(self, key: _KT, /) -> _VT: ... + @overload + def pop(self, key: _KT, /, default: _VT) -> _VT: ... + @overload + def pop(self, key: _KT, /, default: _T) -> _VT | _T: ... + def popitem(self) -> tuple[_KT, _VT]: ... + # This overload should be allowed only if the value type is compatible with None. + # + # Keep the following methods in line with MutableMapping.setdefault, modulo positional-only differences: + # -- collections.OrderedDict.setdefault + # -- collections.ChainMap.setdefault + # -- weakref.WeakKeyDictionary.setdefault + @overload + def setdefault(self: MutableMapping[_KT, _T | None], key: _KT, default: None = None, /) -> _T | None: ... + @overload + def setdefault(self, key: _KT, default: _VT, /) -> _VT: ... + # 'update' used to take a Union, but using overloading is better. + # The second overloaded type here is a bit too general, because + # Mapping[tuple[_KT, _VT], W] is a subclass of Iterable[tuple[_KT, _VT]], + # but will always have the behavior of the first overloaded type + # at runtime, leading to keys of a mix of types _KT and tuple[_KT, _VT]. + # We don't currently have any way of forcing all Mappings to use + # the first overload, but by using overloading rather than a Union, + # mypy will commit to using the first overload when the argument is + # known to be a Mapping with unknown type parameters, which is closer + # to the behavior we want. See mypy issue #1430. + # + # Various mapping classes have __ior__ methods that should be kept roughly in line with .update(): + # -- dict.__ior__ + # -- os._Environ.__ior__ + # -- collections.UserDict.__ior__ + # -- collections.ChainMap.__ior__ + # -- peewee.attrdict.__add__ + # -- peewee.attrdict.__iadd__ + # -- weakref.WeakValueDictionary.__ior__ + # -- weakref.WeakKeyDictionary.__ior__ + @overload + def update(self, m: SupportsKeysAndGetItem[_KT, _VT], /) -> None: ... + @overload + def update(self: SupportsGetItem[str, _VT], m: SupportsKeysAndGetItem[str, _VT], /, **kwargs: _VT) -> None: ... + @overload + def update(self, m: Iterable[tuple[_KT, _VT]], /) -> None: ... + @overload + def update(self: SupportsGetItem[str, _VT], m: Iterable[tuple[str, _VT]], /, **kwargs: _VT) -> None: ... + @overload + def update(self: SupportsGetItem[str, _VT], **kwargs: _VT) -> None: ... + +Text = str + +TYPE_CHECKING: Final[bool] + +# In stubs, the arguments of the IO class are marked as positional-only. +# This differs from runtime, but better reflects the fact that in reality +# classes deriving from IO use different names for the arguments. +class IO(Generic[AnyStr]): + # At runtime these are all abstract properties, + # but making them abstract in the stub is hugely disruptive, for not much gain. + # See #8726 + __slots__ = () + @property + def mode(self) -> str: ... + # Usually str, but may be bytes if a bytes path was passed to open(). See #10737. + # If PEP 696 becomes available, we may want to use a defaulted TypeVar here. + @property + def name(self) -> str | Any: ... + @abstractmethod + def close(self) -> None: ... + @property + def closed(self) -> bool: ... + @abstractmethod + def fileno(self) -> int: ... + @abstractmethod + def flush(self) -> None: ... + @abstractmethod + def isatty(self) -> bool: ... + @abstractmethod + def read(self, n: int = -1, /) -> AnyStr: ... + @abstractmethod + def readable(self) -> bool: ... + @abstractmethod + def readline(self, limit: int = -1, /) -> AnyStr: ... + @abstractmethod + def readlines(self, hint: int = -1, /) -> list[AnyStr]: ... + @abstractmethod + def seek(self, offset: int, whence: int = 0, /) -> int: ... + @abstractmethod + def seekable(self) -> bool: ... + @abstractmethod + def tell(self) -> int: ... + @abstractmethod + def truncate(self, size: int | None = None, /) -> int: ... + @abstractmethod + def writable(self) -> bool: ... + @abstractmethod + @overload + def write(self: IO[bytes], s: ReadableBuffer, /) -> int: ... + @abstractmethod + @overload + def write(self, s: AnyStr, /) -> int: ... + @abstractmethod + @overload + def writelines(self: IO[bytes], lines: Iterable[ReadableBuffer], /) -> None: ... + @abstractmethod + @overload + def writelines(self, lines: Iterable[AnyStr], /) -> None: ... + @abstractmethod + def __next__(self) -> AnyStr: ... + @abstractmethod + def __iter__(self) -> Iterator[AnyStr]: ... + @abstractmethod + def __enter__(self) -> IO[AnyStr]: ... + @abstractmethod + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None, / + ) -> None: ... + +class BinaryIO(IO[bytes]): + __slots__ = () + @abstractmethod + def __enter__(self) -> BinaryIO: ... + +class TextIO(IO[str]): + # See comment regarding the @properties in the `IO` class + __slots__ = () + @property + def buffer(self) -> BinaryIO: ... + @property + def encoding(self) -> str: ... + @property + def errors(self) -> str | None: ... + @property + def line_buffering(self) -> int: ... # int on PyPy, bool on CPython + @property + def newlines(self) -> Any: ... # None, str or tuple + @abstractmethod + def __enter__(self) -> TextIO: ... + +ByteString: typing_extensions.TypeAlias = bytes | bytearray | memoryview + +# Functions + +_get_type_hints_obj_allowed_types: typing_extensions.TypeAlias = ( # noqa: Y042 + object + | Callable[..., Any] + | FunctionType + | BuiltinFunctionType + | MethodType + | ModuleType + | WrapperDescriptorType + | MethodWrapperType + | MethodDescriptorType +) + +if sys.version_info >= (3, 14): + def get_type_hints( + obj: _get_type_hints_obj_allowed_types, + globalns: dict[str, Any] | None = None, + localns: Mapping[str, Any] | None = None, + include_extras: bool = False, + *, + format: Format | None = None, + ) -> dict[str, Any]: ... # AnnotationForm + +else: + def get_type_hints( + obj: _get_type_hints_obj_allowed_types, + globalns: dict[str, Any] | None = None, + localns: Mapping[str, Any] | None = None, + include_extras: bool = False, + ) -> dict[str, Any]: ... # AnnotationForm + +def get_args(tp: Any) -> tuple[Any, ...]: ... # AnnotationForm + +if sys.version_info >= (3, 10): + @overload + def get_origin(tp: ParamSpecArgs | ParamSpecKwargs) -> ParamSpec: ... + @overload + def get_origin(tp: UnionType) -> type[UnionType]: ... + +@overload +def get_origin(tp: GenericAlias) -> type: ... +@overload +def get_origin(tp: Any) -> Any | None: ... # AnnotationForm +@overload +def cast(typ: type[_T], val: Any) -> _T: ... +@overload +def cast(typ: str, val: Any) -> Any: ... +@overload +def cast(typ: object, val: Any) -> Any: ... + +if sys.version_info >= (3, 11): + def reveal_type(obj: _T, /) -> _T: ... + def assert_never(arg: Never, /) -> Never: ... + def assert_type(val: _T, typ: Any, /) -> _T: ... # AnnotationForm + def clear_overloads() -> None: ... + def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: ... + def dataclass_transform( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + frozen_default: bool = False, # on 3.11, runtime accepts it as part of kwargs + field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = (), + **kwargs: Any, + ) -> IdentityFunction: ... + +# Type constructors + +# Obsolete, will be changed to a function. Use _typeshed._type_checker_internals.NamedTupleFallback instead. +class NamedTuple(tuple[Any, ...]): + _field_defaults: ClassVar[dict[str, Any]] + _fields: ClassVar[tuple[str, ...]] + # __orig_bases__ sometimes exists on <3.12, but not consistently + # So we only add it to the stub on 3.12+. + if sys.version_info >= (3, 12): + __orig_bases__: ClassVar[tuple[Any, ...]] + + @overload + def __init__(self, typename: str, fields: Iterable[tuple[str, Any]], /) -> None: ... + @overload + @typing_extensions.deprecated( + "Creating a typing.NamedTuple using keyword arguments is deprecated and support will be removed in Python 3.15" + ) + def __init__(self, typename: str, fields: None = None, /, **kwargs: Any) -> None: ... + @classmethod + def _make(cls, iterable: Iterable[Any]) -> typing_extensions.Self: ... + def _asdict(self) -> dict[str, Any]: ... + def _replace(self, **kwargs: Any) -> typing_extensions.Self: ... + if sys.version_info >= (3, 13): + def __replace__(self, **kwargs: Any) -> typing_extensions.Self: ... + +# Internal mypy fallback type for all typed dicts (does not exist at runtime) +# N.B. Keep this mostly in sync with typing_extensions._TypedDict/mypy_extensions._TypedDict +# Obsolete, use _typeshed._type_checker_internals.TypedDictFallback instead. +@type_check_only +class _TypedDict(Mapping[str, object], metaclass=ABCMeta): + __total__: ClassVar[bool] + __required_keys__: ClassVar[frozenset[str]] + __optional_keys__: ClassVar[frozenset[str]] + # __orig_bases__ sometimes exists on <3.12, but not consistently, + # so we only add it to the stub on 3.12+ + if sys.version_info >= (3, 12): + __orig_bases__: ClassVar[tuple[Any, ...]] + if sys.version_info >= (3, 13): + __readonly_keys__: ClassVar[frozenset[str]] + __mutable_keys__: ClassVar[frozenset[str]] + + def copy(self) -> typing_extensions.Self: ... + # Using Never so that only calls using mypy plugin hook that specialize the signature + # can go through. + def setdefault(self, k: _Never, default: object) -> object: ... + # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. + def pop(self, k: _Never, default: _T = ...) -> object: ... # pyright: ignore[reportInvalidTypeVarUse] + def update(self, m: typing_extensions.Self, /) -> None: ... + def __delitem__(self, k: _Never) -> None: ... + def items(self) -> dict_items[str, object]: ... + def keys(self) -> dict_keys[str, object]: ... + def values(self) -> dict_values[str, object]: ... + @overload + def __or__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: ... + @overload + def __or__(self, value: dict[str, Any], /) -> dict[str, object]: ... + @overload + def __ror__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: ... + @overload + def __ror__(self, value: dict[str, Any], /) -> dict[str, object]: ... + # supposedly incompatible definitions of __or__ and __ior__ + def __ior__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: ... # type: ignore[misc] + +if sys.version_info >= (3, 14): + from annotationlib import ForwardRef as ForwardRef + + def evaluate_forward_ref( + forward_ref: ForwardRef, + *, + owner: object = None, + globals: dict[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + type_params: tuple[TypeVar, ParamSpec, TypeVarTuple] | None = None, + format: Format | None = None, + ) -> Any: ... # AnnotationForm + +else: + @final + class ForwardRef(_Final): + __slots__ = ( + "__forward_arg__", + "__forward_code__", + "__forward_evaluated__", + "__forward_value__", + "__forward_is_argument__", + "__forward_is_class__", + "__forward_module__", + ) + __forward_arg__: str + __forward_code__: CodeType + __forward_evaluated__: bool + __forward_value__: Any | None # AnnotationForm + __forward_is_argument__: bool + __forward_is_class__: bool + __forward_module__: Any | None + + def __init__(self, arg: str, is_argument: bool = True, module: Any | None = None, *, is_class: bool = False) -> None: ... + + if sys.version_info >= (3, 13): + @overload + @deprecated( + "Failing to pass a value to the 'type_params' parameter of ForwardRef._evaluate() is deprecated, " + "as it leads to incorrect behaviour when evaluating a stringified annotation " + "that references a PEP 695 type parameter. It will be disallowed in Python 3.15." + ) + def _evaluate( + self, globalns: dict[str, Any] | None, localns: Mapping[str, Any] | None, *, recursive_guard: frozenset[str] + ) -> Any | None: ... # AnnotationForm + @overload + def _evaluate( + self, + globalns: dict[str, Any] | None, + localns: Mapping[str, Any] | None, + type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...], + *, + recursive_guard: frozenset[str], + ) -> Any | None: ... # AnnotationForm + elif sys.version_info >= (3, 12): + def _evaluate( + self, + globalns: dict[str, Any] | None, + localns: Mapping[str, Any] | None, + type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] | None = None, + *, + recursive_guard: frozenset[str], + ) -> Any | None: ... # AnnotationForm + else: + def _evaluate( + self, globalns: dict[str, Any] | None, localns: Mapping[str, Any] | None, recursive_guard: frozenset[str] + ) -> Any | None: ... # AnnotationForm + + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + if sys.version_info >= (3, 11): + def __or__(self, other: Any) -> _SpecialForm: ... + def __ror__(self, other: Any) -> _SpecialForm: ... + +if sys.version_info >= (3, 10): + def is_typeddict(tp: object) -> bool: ... + +def _type_repr(obj: object) -> str: ... + +if sys.version_info >= (3, 12): + _TypeParameter: typing_extensions.TypeAlias = ( + TypeVar + | typing_extensions.TypeVar + | ParamSpec + | typing_extensions.ParamSpec + | TypeVarTuple + | typing_extensions.TypeVarTuple + ) + + def override(method: _F, /) -> _F: ... + @final + class TypeAliasType: + def __new__(cls, name: str, value: Any, *, type_params: tuple[_TypeParameter, ...] = ()) -> Self: ... + @property + def __value__(self) -> Any: ... # AnnotationForm + @property + def __type_params__(self) -> tuple[_TypeParameter, ...]: ... + @property + def __parameters__(self) -> tuple[Any, ...]: ... # AnnotationForm + @property + def __name__(self) -> str: ... + # It's writable on types, but not on instances of TypeAliasType. + @property + def __module__(self) -> str | None: ... # type: ignore[override] + def __getitem__(self, parameters: Any, /) -> GenericAlias: ... # AnnotationForm + def __or__(self, right: Any, /) -> _SpecialForm: ... + def __ror__(self, left: Any, /) -> _SpecialForm: ... + if sys.version_info >= (3, 14): + @property + def evaluate_value(self) -> EvaluateFunc: ... + +if sys.version_info >= (3, 13): + def is_protocol(tp: type, /) -> bool: ... + def get_protocol_members(tp: type, /) -> frozenset[str]: ... + @final + @type_check_only + class _NoDefaultType: ... + + NoDefault: _NoDefaultType + TypeIs: _SpecialForm + ReadOnly: _SpecialForm diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/typing_extensions.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/typing_extensions.pyi new file mode 100644 index 0000000..5fd3f45 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/typing_extensions.pyi @@ -0,0 +1,709 @@ +import abc +import enum +import sys +from _collections_abc import dict_items, dict_keys, dict_values +from _typeshed import AnnotationForm, IdentityFunction, Incomplete, Unused +from collections.abc import ( + AsyncGenerator as AsyncGenerator, + AsyncIterable as AsyncIterable, + AsyncIterator as AsyncIterator, + Awaitable as Awaitable, + Collection as Collection, + Container as Container, + Coroutine as Coroutine, + Generator as Generator, + Hashable as Hashable, + ItemsView as ItemsView, + Iterable as Iterable, + Iterator as Iterator, + KeysView as KeysView, + Mapping as Mapping, + MappingView as MappingView, + MutableMapping as MutableMapping, + MutableSequence as MutableSequence, + MutableSet as MutableSet, + Reversible as Reversible, + Sequence as Sequence, + Sized as Sized, + ValuesView as ValuesView, +) +from contextlib import AbstractAsyncContextManager as AsyncContextManager, AbstractContextManager as ContextManager +from re import Match as Match, Pattern as Pattern +from types import GenericAlias, ModuleType +from typing import ( # noqa: Y022,Y037,Y038,Y039,UP035 + IO as IO, + TYPE_CHECKING as TYPE_CHECKING, + AbstractSet as AbstractSet, + Any as Any, + AnyStr as AnyStr, + BinaryIO as BinaryIO, + Callable as Callable, + ChainMap as ChainMap, + ClassVar as ClassVar, + Counter as Counter, + DefaultDict as DefaultDict, + Deque as Deque, + Dict as Dict, + ForwardRef as ForwardRef, + FrozenSet as FrozenSet, + Generic as Generic, + List as List, + NoReturn as NoReturn, + Optional as Optional, + Set as Set, + Text as Text, + TextIO as TextIO, + Tuple as Tuple, + Type as Type, + TypedDict as TypedDict, + TypeVar as _TypeVar, + Union as Union, + _Alias, + _SpecialForm, + cast as cast, + no_type_check as no_type_check, + no_type_check_decorator as no_type_check_decorator, + overload as overload, + type_check_only, +) + +if sys.version_info >= (3, 10): + from types import UnionType + +# Please keep order the same as at runtime. +__all__ = [ + # Super-special typing primitives. + "Any", + "ClassVar", + "Concatenate", + "Final", + "LiteralString", + "ParamSpec", + "ParamSpecArgs", + "ParamSpecKwargs", + "Self", + "Type", + "TypeVar", + "TypeVarTuple", + "Unpack", + # ABCs (from collections.abc). + "Awaitable", + "AsyncIterator", + "AsyncIterable", + "Coroutine", + "AsyncGenerator", + "AsyncContextManager", + "Buffer", + "ChainMap", + # Concrete collection types. + "ContextManager", + "Counter", + "Deque", + "DefaultDict", + "NamedTuple", + "OrderedDict", + "TypedDict", + # Structural checks, a.k.a. protocols. + "SupportsAbs", + "SupportsBytes", + "SupportsComplex", + "SupportsFloat", + "SupportsIndex", + "SupportsInt", + "SupportsRound", + "Reader", + "Writer", + # One-off things. + "Annotated", + "assert_never", + "assert_type", + "clear_overloads", + "dataclass_transform", + "deprecated", + "disjoint_base", + "Doc", + "evaluate_forward_ref", + "get_overloads", + "final", + "Format", + "get_annotations", + "get_args", + "get_origin", + "get_original_bases", + "get_protocol_members", + "get_type_hints", + "IntVar", + "is_protocol", + "is_typeddict", + "Literal", + "NewType", + "overload", + "override", + "Protocol", + "Sentinel", + "reveal_type", + "runtime", + "runtime_checkable", + "Text", + "TypeAlias", + "TypeAliasType", + "TypeForm", + "TypeGuard", + "TypeIs", + "TYPE_CHECKING", + "type_repr", + "Never", + "NoReturn", + "ReadOnly", + "Required", + "NotRequired", + "NoDefault", + "NoExtraItems", + # Pure aliases, have always been in typing + "AbstractSet", + "AnyStr", + "BinaryIO", + "Callable", + "Collection", + "Container", + "Dict", + "ForwardRef", + "FrozenSet", + "Generator", + "Generic", + "Hashable", + "IO", + "ItemsView", + "Iterable", + "Iterator", + "KeysView", + "List", + "Mapping", + "MappingView", + "Match", + "MutableMapping", + "MutableSequence", + "MutableSet", + "Optional", + "Pattern", + "Reversible", + "Sequence", + "Set", + "Sized", + "TextIO", + "Tuple", + "Union", + "ValuesView", + "cast", + "no_type_check", + "no_type_check_decorator", + # Added dynamically + "CapsuleType", +] + +_T = _TypeVar("_T") +_F = _TypeVar("_F", bound=Callable[..., Any]) +_TC = _TypeVar("_TC", bound=type[object]) +_T_co = _TypeVar("_T_co", covariant=True) # Any type covariant containers. +_T_contra = _TypeVar("_T_contra", contravariant=True) + +# Do not import (and re-export) Protocol or runtime_checkable from +# typing module because type checkers need to be able to distinguish +# typing.Protocol and typing_extensions.Protocol so they can properly +# warn users about potential runtime exceptions when using typing.Protocol +# on older versions of Python. +Protocol: _SpecialForm + +def runtime_checkable(cls: _TC) -> _TC: ... + +# This alias for above is kept here for backwards compatibility. +runtime = runtime_checkable +Final: _SpecialForm + +def final(f: _F) -> _F: ... +def disjoint_base(cls: _TC) -> _TC: ... + +Literal: _SpecialForm + +def IntVar(name: str) -> Any: ... # returns a new TypeVar + +# Internal mypy fallback type for all typed dicts (does not exist at runtime) +# N.B. Keep this mostly in sync with typing._TypedDict/mypy_extensions._TypedDict +@type_check_only +class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): + __required_keys__: ClassVar[frozenset[str]] + __optional_keys__: ClassVar[frozenset[str]] + __total__: ClassVar[bool] + __orig_bases__: ClassVar[tuple[Any, ...]] + # PEP 705 + __readonly_keys__: ClassVar[frozenset[str]] + __mutable_keys__: ClassVar[frozenset[str]] + # PEP 728 + __closed__: ClassVar[bool | None] + __extra_items__: ClassVar[AnnotationForm] + def copy(self) -> Self: ... + # Using Never so that only calls using mypy plugin hook that specialize the signature + # can go through. + def setdefault(self, k: Never, default: object) -> object: ... + # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. + def pop(self, k: Never, default: _T = ...) -> object: ... # pyright: ignore[reportInvalidTypeVarUse] + def update(self, m: Self, /) -> None: ... + def items(self) -> dict_items[str, object]: ... + def keys(self) -> dict_keys[str, object]: ... + def values(self) -> dict_values[str, object]: ... + def __delitem__(self, k: Never) -> None: ... + @overload + def __or__(self, value: Self, /) -> Self: ... + @overload + def __or__(self, value: dict[str, Any], /) -> dict[str, object]: ... + @overload + def __ror__(self, value: Self, /) -> Self: ... + @overload + def __ror__(self, value: dict[str, Any], /) -> dict[str, object]: ... + # supposedly incompatible definitions of `__ior__` and `__or__`: + # Since this module defines "Self" it is not recognized by Ruff as typing_extensions.Self + def __ior__(self, value: Self, /) -> Self: ... # type: ignore[misc] + +OrderedDict = _Alias() + +if sys.version_info >= (3, 13): + from typing import get_type_hints as get_type_hints +else: + def get_type_hints( + obj: Any, globalns: dict[str, Any] | None = None, localns: Mapping[str, Any] | None = None, include_extras: bool = False + ) -> dict[str, AnnotationForm]: ... + +def get_args(tp: AnnotationForm) -> tuple[AnnotationForm, ...]: ... + +if sys.version_info >= (3, 10): + @overload + def get_origin(tp: UnionType) -> type[UnionType]: ... + +@overload +def get_origin(tp: GenericAlias) -> type: ... +@overload +def get_origin(tp: ParamSpecArgs | ParamSpecKwargs) -> ParamSpec: ... +@overload +def get_origin(tp: AnnotationForm) -> AnnotationForm | None: ... + +Annotated: _SpecialForm +_AnnotatedAlias: Any # undocumented + +# New and changed things in 3.10 +if sys.version_info >= (3, 10): + from typing import ( + Concatenate as Concatenate, + ParamSpecArgs as ParamSpecArgs, + ParamSpecKwargs as ParamSpecKwargs, + TypeAlias as TypeAlias, + TypeGuard as TypeGuard, + is_typeddict as is_typeddict, + ) +else: + @final + class ParamSpecArgs: + @property + def __origin__(self) -> ParamSpec: ... + def __init__(self, origin: ParamSpec) -> None: ... + + @final + class ParamSpecKwargs: + @property + def __origin__(self) -> ParamSpec: ... + def __init__(self, origin: ParamSpec) -> None: ... + + Concatenate: _SpecialForm + TypeAlias: _SpecialForm + TypeGuard: _SpecialForm + def is_typeddict(tp: object) -> bool: ... + +# New and changed things in 3.11 +if sys.version_info >= (3, 11): + from typing import ( + LiteralString as LiteralString, + NamedTuple as NamedTuple, + Never as Never, + NewType as NewType, + NotRequired as NotRequired, + Required as Required, + Self as Self, + Unpack as Unpack, + assert_never as assert_never, + assert_type as assert_type, + clear_overloads as clear_overloads, + dataclass_transform as dataclass_transform, + get_overloads as get_overloads, + reveal_type as reveal_type, + ) +else: + Self: _SpecialForm + Never: _SpecialForm + def reveal_type(obj: _T, /) -> _T: ... + def assert_never(arg: Never, /) -> Never: ... + def assert_type(val: _T, typ: AnnotationForm, /) -> _T: ... + def clear_overloads() -> None: ... + def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: ... + + Required: _SpecialForm + NotRequired: _SpecialForm + LiteralString: _SpecialForm + Unpack: _SpecialForm + + def dataclass_transform( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + frozen_default: bool = False, + field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = (), + **kwargs: object, + ) -> IdentityFunction: ... + + class NamedTuple(tuple[Any, ...]): + _field_defaults: ClassVar[dict[str, Any]] + _fields: ClassVar[tuple[str, ...]] + __orig_bases__: ClassVar[tuple[Any, ...]] + @overload + def __init__(self, typename: str, fields: Iterable[tuple[str, Any]] = ...) -> None: ... + @overload + def __init__(self, typename: str, fields: None = None, **kwargs: Any) -> None: ... + @classmethod + def _make(cls, iterable: Iterable[Any]) -> Self: ... + def _asdict(self) -> dict[str, Any]: ... + def _replace(self, **kwargs: Any) -> Self: ... + + class NewType: + def __init__(self, name: str, tp: AnnotationForm) -> None: ... + def __call__(self, obj: _T, /) -> _T: ... + __supertype__: type | NewType + if sys.version_info >= (3, 10): + def __or__(self, other: Any) -> _SpecialForm: ... + def __ror__(self, other: Any) -> _SpecialForm: ... + +if sys.version_info >= (3, 12): + from collections.abc import Buffer as Buffer + from types import get_original_bases as get_original_bases + from typing import ( + SupportsAbs as SupportsAbs, + SupportsBytes as SupportsBytes, + SupportsComplex as SupportsComplex, + SupportsFloat as SupportsFloat, + SupportsIndex as SupportsIndex, + SupportsInt as SupportsInt, + SupportsRound as SupportsRound, + override as override, + ) +else: + def override(arg: _F, /) -> _F: ... + def get_original_bases(cls: type, /) -> tuple[Any, ...]: ... + + # mypy and pyright object to this being both ABC and Protocol. + # At runtime it inherits from ABC and is not a Protocol, but it is on the + # allowlist for use as a Protocol. + @runtime_checkable + class Buffer(Protocol, abc.ABC): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + # Not actually a Protocol at runtime; see + # https://github.com/python/typeshed/issues/10224 for why we're defining it this way + def __buffer__(self, flags: int, /) -> memoryview: ... + + @runtime_checkable + class SupportsInt(Protocol, metaclass=abc.ABCMeta): + __slots__ = () + @abc.abstractmethod + def __int__(self) -> int: ... + + @runtime_checkable + class SupportsFloat(Protocol, metaclass=abc.ABCMeta): + __slots__ = () + @abc.abstractmethod + def __float__(self) -> float: ... + + @runtime_checkable + class SupportsComplex(Protocol, metaclass=abc.ABCMeta): + __slots__ = () + @abc.abstractmethod + def __complex__(self) -> complex: ... + + @runtime_checkable + class SupportsBytes(Protocol, metaclass=abc.ABCMeta): + __slots__ = () + @abc.abstractmethod + def __bytes__(self) -> bytes: ... + + @runtime_checkable + class SupportsIndex(Protocol, metaclass=abc.ABCMeta): + __slots__ = () + @abc.abstractmethod + def __index__(self) -> int: ... + + @runtime_checkable + class SupportsAbs(Protocol[_T_co]): + __slots__ = () + @abc.abstractmethod + def __abs__(self) -> _T_co: ... + + @runtime_checkable + class SupportsRound(Protocol[_T_co]): + __slots__ = () + @overload + @abc.abstractmethod + def __round__(self) -> int: ... + @overload + @abc.abstractmethod + def __round__(self, ndigits: int, /) -> _T_co: ... + +if sys.version_info >= (3, 14): + from io import Reader as Reader, Writer as Writer +else: + @runtime_checkable + class Reader(Protocol[_T_co]): + __slots__ = () + @abc.abstractmethod + def read(self, size: int = ..., /) -> _T_co: ... + + @runtime_checkable + class Writer(Protocol[_T_contra]): + __slots__ = () + @abc.abstractmethod + def write(self, data: _T_contra, /) -> int: ... + +if sys.version_info >= (3, 13): + from types import CapsuleType as CapsuleType + from typing import ( + NoDefault as NoDefault, + ParamSpec as ParamSpec, + ReadOnly as ReadOnly, + TypeIs as TypeIs, + TypeVar as TypeVar, + TypeVarTuple as TypeVarTuple, + get_protocol_members as get_protocol_members, + is_protocol as is_protocol, + ) + from warnings import deprecated as deprecated +else: + def is_protocol(tp: type, /) -> bool: ... + def get_protocol_members(tp: type, /) -> frozenset[str]: ... + @final + @type_check_only + class _NoDefaultType: ... + + NoDefault: _NoDefaultType + @final + class CapsuleType: ... + + class deprecated: + message: LiteralString + category: type[Warning] | None + stacklevel: int + def __init__(self, message: LiteralString, /, *, category: type[Warning] | None = ..., stacklevel: int = 1) -> None: ... + def __call__(self, arg: _T, /) -> _T: ... + + @final + class TypeVar: + @property + def __name__(self) -> str: ... + @property + def __bound__(self) -> AnnotationForm | None: ... + @property + def __constraints__(self) -> tuple[AnnotationForm, ...]: ... + @property + def __covariant__(self) -> bool: ... + @property + def __contravariant__(self) -> bool: ... + @property + def __infer_variance__(self) -> bool: ... + @property + def __default__(self) -> AnnotationForm: ... + def __init__( + self, + name: str, + *constraints: AnnotationForm, + bound: AnnotationForm | None = None, + covariant: bool = False, + contravariant: bool = False, + default: AnnotationForm = ..., + infer_variance: bool = False, + ) -> None: ... + def has_default(self) -> bool: ... + def __typing_prepare_subst__(self, alias: Any, args: Any) -> tuple[Any, ...]: ... + if sys.version_info >= (3, 10): + def __or__(self, right: Any) -> _SpecialForm: ... + def __ror__(self, left: Any) -> _SpecialForm: ... + if sys.version_info >= (3, 11): + def __typing_subst__(self, arg: Any) -> Any: ... + + @final + class ParamSpec: + @property + def __name__(self) -> str: ... + @property + def __bound__(self) -> AnnotationForm | None: ... + @property + def __covariant__(self) -> bool: ... + @property + def __contravariant__(self) -> bool: ... + @property + def __infer_variance__(self) -> bool: ... + @property + def __default__(self) -> AnnotationForm: ... + def __init__( + self, + name: str, + *, + bound: None | AnnotationForm | str = None, + contravariant: bool = False, + covariant: bool = False, + default: AnnotationForm = ..., + ) -> None: ... + @property + def args(self) -> ParamSpecArgs: ... + @property + def kwargs(self) -> ParamSpecKwargs: ... + def has_default(self) -> bool: ... + def __typing_prepare_subst__(self, alias: Any, args: Any) -> tuple[Any, ...]: ... + if sys.version_info >= (3, 10): + def __or__(self, right: Any) -> _SpecialForm: ... + def __ror__(self, left: Any) -> _SpecialForm: ... + + @final + class TypeVarTuple: + @property + def __name__(self) -> str: ... + @property + def __default__(self) -> AnnotationForm: ... + def __init__(self, name: str, *, default: AnnotationForm = ...) -> None: ... + def __iter__(self) -> Any: ... # Unpack[Self] + def has_default(self) -> bool: ... + def __typing_prepare_subst__(self, alias: Any, args: Any) -> tuple[Any, ...]: ... + + ReadOnly: _SpecialForm + TypeIs: _SpecialForm + +# TypeAliasType was added in Python 3.12, but had significant changes in 3.14. +if sys.version_info >= (3, 14): + from typing import TypeAliasType as TypeAliasType +else: + @final + class TypeAliasType: + def __init__( + self, name: str, value: AnnotationForm, *, type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] = () + ) -> None: ... + @property + def __value__(self) -> AnnotationForm: ... + @property + def __type_params__(self) -> tuple[TypeVar | ParamSpec | TypeVarTuple, ...]: ... + @property + # `__parameters__` can include special forms if a `TypeVarTuple` was + # passed as a `type_params` element to the constructor method. + def __parameters__(self) -> tuple[TypeVar | ParamSpec | AnnotationForm, ...]: ... + @property + def __name__(self) -> str: ... + # It's writable on types, but not on instances of TypeAliasType. + @property + def __module__(self) -> str | None: ... # type: ignore[override] + # Returns typing._GenericAlias, which isn't stubbed. + def __getitem__(self, parameters: Incomplete | tuple[Incomplete, ...]) -> AnnotationForm: ... + def __init_subclass__(cls, *args: Unused, **kwargs: Unused) -> NoReturn: ... + if sys.version_info >= (3, 10): + def __or__(self, right: Any, /) -> _SpecialForm: ... + def __ror__(self, left: Any, /) -> _SpecialForm: ... + +# PEP 727 +class Doc: + documentation: str + def __init__(self, documentation: str, /) -> None: ... + def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + +# PEP 728 +@type_check_only +class _NoExtraItemsType: ... + +NoExtraItems: _NoExtraItemsType + +# PEP 747 +TypeForm: _SpecialForm + +# PEP 649/749 +if sys.version_info >= (3, 14): + from typing import evaluate_forward_ref as evaluate_forward_ref + + from annotationlib import Format as Format, get_annotations as get_annotations, type_repr as type_repr +else: + class Format(enum.IntEnum): + VALUE = 1 + VALUE_WITH_FAKE_GLOBALS = 2 + FORWARDREF = 3 + STRING = 4 + + @overload + def get_annotations( + obj: Any, # any object with __annotations__ or __annotate__ + *, + globals: Mapping[str, Any] | None = None, # value types depend on the key + locals: Mapping[str, Any] | None = None, # value types depend on the key + eval_str: bool = False, + format: Literal[Format.STRING], + ) -> dict[str, str]: ... + @overload + def get_annotations( + obj: Any, # any object with __annotations__ or __annotate__ + *, + globals: Mapping[str, Any] | None = None, # value types depend on the key + locals: Mapping[str, Any] | None = None, # value types depend on the key + eval_str: bool = False, + format: Literal[Format.FORWARDREF], + ) -> dict[str, AnnotationForm | ForwardRef]: ... + @overload + def get_annotations( + obj: Any, # any object with __annotations__ or __annotate__ + *, + globals: Mapping[str, Any] | None = None, # value types depend on the key + locals: Mapping[str, Any] | None = None, # value types depend on the key + eval_str: bool = False, + format: Format = Format.VALUE, # noqa: Y011 + ) -> dict[str, AnnotationForm]: ... + @overload + def evaluate_forward_ref( + forward_ref: ForwardRef, + *, + owner: Callable[..., object] | type[object] | ModuleType | None = None, # any callable, class, or module + globals: Mapping[str, Any] | None = None, # value types depend on the key + locals: Mapping[str, Any] | None = None, # value types depend on the key + type_params: Iterable[TypeVar | ParamSpec | TypeVarTuple] | None = None, + format: Literal[Format.STRING], + _recursive_guard: Container[str] = ..., + ) -> str: ... + @overload + def evaluate_forward_ref( + forward_ref: ForwardRef, + *, + owner: Callable[..., object] | type[object] | ModuleType | None = None, # any callable, class, or module + globals: Mapping[str, Any] | None = None, # value types depend on the key + locals: Mapping[str, Any] | None = None, # value types depend on the key + type_params: Iterable[TypeVar | ParamSpec | TypeVarTuple] | None = None, + format: Literal[Format.FORWARDREF], + _recursive_guard: Container[str] = ..., + ) -> AnnotationForm | ForwardRef: ... + @overload + def evaluate_forward_ref( + forward_ref: ForwardRef, + *, + owner: Callable[..., object] | type[object] | ModuleType | None = None, # any callable, class, or module + globals: Mapping[str, Any] | None = None, # value types depend on the key + locals: Mapping[str, Any] | None = None, # value types depend on the key + type_params: Iterable[TypeVar | ParamSpec | TypeVarTuple] | None = None, + format: Format | None = None, + _recursive_guard: Container[str] = ..., + ) -> AnnotationForm: ... + def type_repr(value: object) -> str: ... + +# PEP 661 +class Sentinel: + def __init__(self, name: str, repr: str | None = None) -> None: ... + if sys.version_info >= (3, 14): + def __or__(self, other: Any) -> UnionType: ... # other can be any type form legal for unions + def __ror__(self, other: Any) -> UnionType: ... # other can be any type form legal for unions + elif sys.version_info >= (3, 10): + def __or__(self, other: Any) -> _SpecialForm: ... # other can be any type form legal for unions + def __ror__(self, other: Any) -> _SpecialForm: ... # other can be any type form legal for unions diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unicodedata.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unicodedata.pyi new file mode 100644 index 0000000..9fff042 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unicodedata.pyi @@ -0,0 +1,73 @@ +import sys +from _typeshed import ReadOnlyBuffer +from typing import Any, Final, Literal, TypeVar, final, overload +from typing_extensions import TypeAlias + +ucd_3_2_0: UCD +unidata_version: Final[str] + +if sys.version_info < (3, 10): + ucnhash_CAPI: Any + +_T = TypeVar("_T") + +_NormalizationForm: TypeAlias = Literal["NFC", "NFD", "NFKC", "NFKD"] + +def bidirectional(chr: str, /) -> str: ... +def category(chr: str, /) -> str: ... +def combining(chr: str, /) -> int: ... +@overload +def decimal(chr: str, /) -> int: ... +@overload +def decimal(chr: str, default: _T, /) -> int | _T: ... +def decomposition(chr: str, /) -> str: ... +@overload +def digit(chr: str, /) -> int: ... +@overload +def digit(chr: str, default: _T, /) -> int | _T: ... + +_EastAsianWidth: TypeAlias = Literal["F", "H", "W", "Na", "A", "N"] + +def east_asian_width(chr: str, /) -> _EastAsianWidth: ... +def is_normalized(form: _NormalizationForm, unistr: str, /) -> bool: ... +def lookup(name: str | ReadOnlyBuffer, /) -> str: ... +def mirrored(chr: str, /) -> int: ... +@overload +def name(chr: str, /) -> str: ... +@overload +def name(chr: str, default: _T, /) -> str | _T: ... +def normalize(form: _NormalizationForm, unistr: str, /) -> str: ... +@overload +def numeric(chr: str, /) -> float: ... +@overload +def numeric(chr: str, default: _T, /) -> float | _T: ... +@final +class UCD: + # The methods below are constructed from the same array in C + # (unicodedata_functions) and hence identical to the functions above. + unidata_version: str + def bidirectional(self, chr: str, /) -> str: ... + def category(self, chr: str, /) -> str: ... + def combining(self, chr: str, /) -> int: ... + @overload + def decimal(self, chr: str, /) -> int: ... + @overload + def decimal(self, chr: str, default: _T, /) -> int | _T: ... + def decomposition(self, chr: str, /) -> str: ... + @overload + def digit(self, chr: str, /) -> int: ... + @overload + def digit(self, chr: str, default: _T, /) -> int | _T: ... + def east_asian_width(self, chr: str, /) -> _EastAsianWidth: ... + def is_normalized(self, form: _NormalizationForm, unistr: str, /) -> bool: ... + def lookup(self, name: str | ReadOnlyBuffer, /) -> str: ... + def mirrored(self, chr: str, /) -> int: ... + @overload + def name(self, chr: str, /) -> str: ... + @overload + def name(self, chr: str, default: _T, /) -> str | _T: ... + def normalize(self, form: _NormalizationForm, unistr: str, /) -> str: ... + @overload + def numeric(self, chr: str, /) -> float: ... + @overload + def numeric(self, chr: str, default: _T, /) -> float | _T: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/__init__.pyi new file mode 100644 index 0000000..546ea77 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/__init__.pyi @@ -0,0 +1,63 @@ +import sys +from unittest.async_case import * + +from .case import ( + FunctionTestCase as FunctionTestCase, + SkipTest as SkipTest, + TestCase as TestCase, + addModuleCleanup as addModuleCleanup, + expectedFailure as expectedFailure, + skip as skip, + skipIf as skipIf, + skipUnless as skipUnless, +) +from .loader import TestLoader as TestLoader, defaultTestLoader as defaultTestLoader +from .main import TestProgram as TestProgram, main as main +from .result import TestResult as TestResult +from .runner import TextTestResult as TextTestResult, TextTestRunner as TextTestRunner +from .signals import ( + installHandler as installHandler, + registerResult as registerResult, + removeHandler as removeHandler, + removeResult as removeResult, +) +from .suite import BaseTestSuite as BaseTestSuite, TestSuite as TestSuite + +if sys.version_info >= (3, 11): + from .case import doModuleCleanups as doModuleCleanups, enterModuleContext as enterModuleContext + +__all__ = [ + "IsolatedAsyncioTestCase", + "TestResult", + "TestCase", + "TestSuite", + "TextTestRunner", + "TestLoader", + "FunctionTestCase", + "main", + "defaultTestLoader", + "SkipTest", + "skip", + "skipIf", + "skipUnless", + "expectedFailure", + "TextTestResult", + "installHandler", + "registerResult", + "removeResult", + "removeHandler", + "addModuleCleanup", +] + +if sys.version_info < (3, 13): + from .loader import findTestCases as findTestCases, getTestCaseNames as getTestCaseNames, makeSuite as makeSuite + + __all__ += ["getTestCaseNames", "makeSuite", "findTestCases"] + +if sys.version_info >= (3, 11): + __all__ += ["enterModuleContext", "doModuleCleanups"] + +if sys.version_info < (3, 12): + def load_tests(loader: TestLoader, tests: TestSuite, pattern: str | None) -> TestSuite: ... + +def __dir__() -> set[str]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/_log.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/_log.pyi new file mode 100644 index 0000000..011a970 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/_log.pyi @@ -0,0 +1,27 @@ +import logging +import sys +from types import TracebackType +from typing import ClassVar, Generic, NamedTuple, TypeVar +from unittest.case import TestCase, _BaseTestCaseContext + +_L = TypeVar("_L", None, _LoggingWatcher) + +class _LoggingWatcher(NamedTuple): + records: list[logging.LogRecord] + output: list[str] + +class _AssertLogsContext(_BaseTestCaseContext, Generic[_L]): + LOGGING_FORMAT: ClassVar[str] + logger_name: str + level: int + msg: None + if sys.version_info >= (3, 10): + def __init__(self, test_case: TestCase, logger_name: str, level: int, no_logs: bool) -> None: ... + no_logs: bool + else: + def __init__(self, test_case: TestCase, logger_name: str, level: int) -> None: ... + + def __enter__(self) -> _L: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None + ) -> bool | None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/async_case.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/async_case.pyi new file mode 100644 index 0000000..0b3fb91 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/async_case.pyi @@ -0,0 +1,25 @@ +import sys +from asyncio.events import AbstractEventLoop +from collections.abc import Awaitable, Callable +from typing import TypeVar +from typing_extensions import ParamSpec + +from .case import TestCase + +if sys.version_info >= (3, 11): + from contextlib import AbstractAsyncContextManager + +_T = TypeVar("_T") +_P = ParamSpec("_P") + +class IsolatedAsyncioTestCase(TestCase): + if sys.version_info >= (3, 13): + loop_factory: Callable[[], AbstractEventLoop] | None = None + + async def asyncSetUp(self) -> None: ... + async def asyncTearDown(self) -> None: ... + def addAsyncCleanup(self, func: Callable[_P, Awaitable[object]], /, *args: _P.args, **kwargs: _P.kwargs) -> None: ... + if sys.version_info >= (3, 11): + async def enterAsyncContext(self, cm: AbstractAsyncContextManager[_T]) -> _T: ... + + def __del__(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/case.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/case.pyi new file mode 100644 index 0000000..a602196 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/case.pyi @@ -0,0 +1,322 @@ +import logging +import sys +import unittest.result +from _typeshed import SupportsDunderGE, SupportsDunderGT, SupportsDunderLE, SupportsDunderLT, SupportsRSub, SupportsSub +from builtins import _ClassInfo +from collections.abc import Callable, Container, Iterable, Mapping, Sequence, Set as AbstractSet +from contextlib import AbstractContextManager +from re import Pattern +from types import GenericAlias, TracebackType +from typing import Any, AnyStr, Final, Generic, NoReturn, Protocol, SupportsAbs, SupportsRound, TypeVar, overload, type_check_only +from typing_extensions import Never, ParamSpec, Self +from unittest._log import _AssertLogsContext, _LoggingWatcher +from warnings import WarningMessage + +_T = TypeVar("_T") +_S = TypeVar("_S", bound=SupportsSub[Any, Any]) +_E = TypeVar("_E", bound=BaseException) +_FT = TypeVar("_FT", bound=Callable[..., Any]) +_SB = TypeVar("_SB", str, bytes, bytearray) +_P = ParamSpec("_P") + +DIFF_OMITTED: Final[str] + +class _BaseTestCaseContext: + test_case: TestCase + def __init__(self, test_case: TestCase) -> None: ... + +class _AssertRaisesBaseContext(_BaseTestCaseContext): + expected: type[BaseException] | tuple[type[BaseException], ...] + expected_regex: Pattern[str] | None + obj_name: str | None + msg: str | None + + def __init__( + self, + expected: type[BaseException] | tuple[type[BaseException], ...], + test_case: TestCase, + expected_regex: str | Pattern[str] | None = None, + ) -> None: ... + + # This returns Self if args is the empty list, and None otherwise. + # but it's not possible to construct an overload which expresses that + def handle(self, name: str, args: list[Any], kwargs: dict[str, Any]) -> Any: ... + +def addModuleCleanup(function: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> None: ... +def doModuleCleanups() -> None: ... + +if sys.version_info >= (3, 11): + def enterModuleContext(cm: AbstractContextManager[_T]) -> _T: ... + +def expectedFailure(test_item: _FT) -> _FT: ... +def skip(reason: str) -> Callable[[_FT], _FT]: ... +def skipIf(condition: object, reason: str) -> Callable[[_FT], _FT]: ... +def skipUnless(condition: object, reason: str) -> Callable[[_FT], _FT]: ... + +class SkipTest(Exception): + def __init__(self, reason: str) -> None: ... + +@type_check_only +class _SupportsAbsAndDunderGE(SupportsDunderGE[Any], SupportsAbs[Any], Protocol): ... + +class TestCase: + failureException: type[BaseException] + longMessage: bool + maxDiff: int | None + # undocumented + _testMethodName: str + # undocumented + _testMethodDoc: str + def __init__(self, methodName: str = "runTest") -> None: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def setUp(self) -> None: ... + def tearDown(self) -> None: ... + @classmethod + def setUpClass(cls) -> None: ... + @classmethod + def tearDownClass(cls) -> None: ... + def run(self, result: unittest.result.TestResult | None = None) -> unittest.result.TestResult | None: ... + def __call__(self, result: unittest.result.TestResult | None = ...) -> unittest.result.TestResult | None: ... + def skipTest(self, reason: Any) -> NoReturn: ... + def subTest(self, msg: Any = ..., **params: Any) -> AbstractContextManager[None]: ... + def debug(self) -> None: ... + if sys.version_info < (3, 11): + def _addSkip(self, result: unittest.result.TestResult, test_case: TestCase, reason: str) -> None: ... + + def assertEqual(self, first: Any, second: Any, msg: Any = None) -> None: ... + def assertNotEqual(self, first: Any, second: Any, msg: Any = None) -> None: ... + def assertTrue(self, expr: Any, msg: Any = None) -> None: ... + def assertFalse(self, expr: Any, msg: Any = None) -> None: ... + def assertIs(self, expr1: object, expr2: object, msg: Any = None) -> None: ... + def assertIsNot(self, expr1: object, expr2: object, msg: Any = None) -> None: ... + def assertIsNone(self, obj: object, msg: Any = None) -> None: ... + def assertIsNotNone(self, obj: object, msg: Any = None) -> None: ... + def assertIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = None) -> None: ... + def assertNotIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = None) -> None: ... + def assertIsInstance(self, obj: object, cls: _ClassInfo, msg: Any = None) -> None: ... + def assertNotIsInstance(self, obj: object, cls: _ClassInfo, msg: Any = None) -> None: ... + @overload + def assertGreater(self, a: SupportsDunderGT[_T], b: _T, msg: Any = None) -> None: ... + @overload + def assertGreater(self, a: _T, b: SupportsDunderLT[_T], msg: Any = None) -> None: ... + @overload + def assertGreaterEqual(self, a: SupportsDunderGE[_T], b: _T, msg: Any = None) -> None: ... + @overload + def assertGreaterEqual(self, a: _T, b: SupportsDunderLE[_T], msg: Any = None) -> None: ... + @overload + def assertLess(self, a: SupportsDunderLT[_T], b: _T, msg: Any = None) -> None: ... + @overload + def assertLess(self, a: _T, b: SupportsDunderGT[_T], msg: Any = None) -> None: ... + @overload + def assertLessEqual(self, a: SupportsDunderLE[_T], b: _T, msg: Any = None) -> None: ... + @overload + def assertLessEqual(self, a: _T, b: SupportsDunderGE[_T], msg: Any = None) -> None: ... + # `assertRaises`, `assertRaisesRegex`, and `assertRaisesRegexp` + # are not using `ParamSpec` intentionally, + # because they might be used with explicitly wrong arg types to raise some error in tests. + @overload + def assertRaises( + self, + expected_exception: type[BaseException] | tuple[type[BaseException], ...], + callable: Callable[..., object], + *args: Any, + **kwargs: Any, + ) -> None: ... + @overload + def assertRaises( + self, expected_exception: type[_E] | tuple[type[_E], ...], *, msg: Any = ... + ) -> _AssertRaisesContext[_E]: ... + @overload + def assertRaisesRegex( + self, + expected_exception: type[BaseException] | tuple[type[BaseException], ...], + expected_regex: str | Pattern[str], + callable: Callable[..., object], + *args: Any, + **kwargs: Any, + ) -> None: ... + @overload + def assertRaisesRegex( + self, expected_exception: type[_E] | tuple[type[_E], ...], expected_regex: str | Pattern[str], *, msg: Any = ... + ) -> _AssertRaisesContext[_E]: ... + @overload + def assertWarns( + self, + expected_warning: type[Warning] | tuple[type[Warning], ...], + callable: Callable[_P, object], + *args: _P.args, + **kwargs: _P.kwargs, + ) -> None: ... + @overload + def assertWarns( + self, expected_warning: type[Warning] | tuple[type[Warning], ...], *, msg: Any = ... + ) -> _AssertWarnsContext: ... + @overload + def assertWarnsRegex( + self, + expected_warning: type[Warning] | tuple[type[Warning], ...], + expected_regex: str | Pattern[str], + callable: Callable[_P, object], + *args: _P.args, + **kwargs: _P.kwargs, + ) -> None: ... + @overload + def assertWarnsRegex( + self, expected_warning: type[Warning] | tuple[type[Warning], ...], expected_regex: str | Pattern[str], *, msg: Any = ... + ) -> _AssertWarnsContext: ... + def assertLogs( + self, logger: str | logging.Logger | None = None, level: int | str | None = None + ) -> _AssertLogsContext[_LoggingWatcher]: ... + if sys.version_info >= (3, 10): + def assertNoLogs( + self, logger: str | logging.Logger | None = None, level: int | str | None = None + ) -> _AssertLogsContext[None]: ... + + @overload + def assertAlmostEqual(self, first: _S, second: _S, places: None, msg: Any, delta: _SupportsAbsAndDunderGE) -> None: ... + @overload + def assertAlmostEqual( + self, first: _S, second: _S, places: None = None, msg: Any = None, *, delta: _SupportsAbsAndDunderGE + ) -> None: ... + @overload + def assertAlmostEqual( + self, + first: SupportsSub[_T, SupportsAbs[SupportsRound[object]]], + second: _T, + places: int | None = None, + msg: Any = None, + delta: None = None, + ) -> None: ... + @overload + def assertAlmostEqual( + self, + first: _T, + second: SupportsRSub[_T, SupportsAbs[SupportsRound[object]]], + places: int | None = None, + msg: Any = None, + delta: None = None, + ) -> None: ... + @overload + def assertNotAlmostEqual(self, first: _S, second: _S, places: None, msg: Any, delta: _SupportsAbsAndDunderGE) -> None: ... + @overload + def assertNotAlmostEqual( + self, first: _S, second: _S, places: None = None, msg: Any = None, *, delta: _SupportsAbsAndDunderGE + ) -> None: ... + @overload + def assertNotAlmostEqual( + self, + first: SupportsSub[_T, SupportsAbs[SupportsRound[object]]], + second: _T, + places: int | None = None, + msg: Any = None, + delta: None = None, + ) -> None: ... + @overload + def assertNotAlmostEqual( + self, + first: _T, + second: SupportsRSub[_T, SupportsAbs[SupportsRound[object]]], + places: int | None = None, + msg: Any = None, + delta: None = None, + ) -> None: ... + def assertRegex(self, text: AnyStr, expected_regex: AnyStr | Pattern[AnyStr], msg: Any = None) -> None: ... + def assertNotRegex(self, text: AnyStr, unexpected_regex: AnyStr | Pattern[AnyStr], msg: Any = None) -> None: ... + def assertCountEqual(self, first: Iterable[Any], second: Iterable[Any], msg: Any = None) -> None: ... + def addTypeEqualityFunc(self, typeobj: type[Any], function: Callable[..., None]) -> None: ... + def assertMultiLineEqual(self, first: str, second: str, msg: Any = None) -> None: ... + def assertSequenceEqual( + self, seq1: Sequence[Any], seq2: Sequence[Any], msg: Any = None, seq_type: type[Sequence[Any]] | None = None + ) -> None: ... + def assertListEqual(self, list1: list[Any], list2: list[Any], msg: Any = None) -> None: ... + def assertTupleEqual(self, tuple1: tuple[Any, ...], tuple2: tuple[Any, ...], msg: Any = None) -> None: ... + def assertSetEqual(self, set1: AbstractSet[object], set2: AbstractSet[object], msg: Any = None) -> None: ... + # assertDictEqual accepts only true dict instances. We can't use that here, since that would make + # assertDictEqual incompatible with TypedDict. + def assertDictEqual(self, d1: Mapping[Any, object], d2: Mapping[Any, object], msg: Any = None) -> None: ... + def fail(self, msg: Any = None) -> NoReturn: ... + def countTestCases(self) -> int: ... + def defaultTestResult(self) -> unittest.result.TestResult: ... + def id(self) -> str: ... + def shortDescription(self) -> str | None: ... + def addCleanup(self, function: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> None: ... + + if sys.version_info >= (3, 11): + def enterContext(self, cm: AbstractContextManager[_T]) -> _T: ... + + def doCleanups(self) -> None: ... + @classmethod + def addClassCleanup(cls, function: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> None: ... + @classmethod + def doClassCleanups(cls) -> None: ... + + if sys.version_info >= (3, 11): + @classmethod + def enterClassContext(cls, cm: AbstractContextManager[_T]) -> _T: ... + + def _formatMessage(self, msg: str | None, standardMsg: str) -> str: ... # undocumented + def _getAssertEqualityFunc(self, first: Any, second: Any) -> Callable[..., None]: ... # undocumented + if sys.version_info < (3, 12): + failUnlessEqual = assertEqual + assertEquals = assertEqual + failIfEqual = assertNotEqual + assertNotEquals = assertNotEqual + failUnless = assertTrue + assert_ = assertTrue + failIf = assertFalse + failUnlessRaises = assertRaises + failUnlessAlmostEqual = assertAlmostEqual + assertAlmostEquals = assertAlmostEqual + failIfAlmostEqual = assertNotAlmostEqual + assertNotAlmostEquals = assertNotAlmostEqual + assertRegexpMatches = assertRegex + assertNotRegexpMatches = assertNotRegex + assertRaisesRegexp = assertRaisesRegex + def assertDictContainsSubset( + self, subset: Mapping[Any, Any], dictionary: Mapping[Any, Any], msg: object = None + ) -> None: ... + + if sys.version_info >= (3, 10): + # Runtime has *args, **kwargs, but will error if any are supplied + def __init_subclass__(cls, *args: Never, **kwargs: Never) -> None: ... + + if sys.version_info >= (3, 14): + def assertIsSubclass(self, cls: type, superclass: type | tuple[type, ...], msg: Any = None) -> None: ... + def assertNotIsSubclass(self, cls: type, superclass: type | tuple[type, ...], msg: Any = None) -> None: ... + def assertHasAttr(self, obj: object, name: str, msg: Any = None) -> None: ... + def assertNotHasAttr(self, obj: object, name: str, msg: Any = None) -> None: ... + def assertStartsWith(self, s: _SB, prefix: _SB | tuple[_SB, ...], msg: Any = None) -> None: ... + def assertNotStartsWith(self, s: _SB, prefix: _SB | tuple[_SB, ...], msg: Any = None) -> None: ... + def assertEndsWith(self, s: _SB, suffix: _SB | tuple[_SB, ...], msg: Any = None) -> None: ... + def assertNotEndsWith(self, s: _SB, suffix: _SB | tuple[_SB, ...], msg: Any = None) -> None: ... + +class FunctionTestCase(TestCase): + def __init__( + self, + testFunc: Callable[[], object], + setUp: Callable[[], object] | None = None, + tearDown: Callable[[], object] | None = None, + description: str | None = None, + ) -> None: ... + def runTest(self) -> None: ... + def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + +class _AssertRaisesContext(_AssertRaisesBaseContext, Generic[_E]): + exception: _E + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None + ) -> bool: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +class _AssertWarnsContext(_AssertRaisesBaseContext): + warning: WarningMessage + filename: str + lineno: int + warnings: list[WarningMessage] + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None + ) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/loader.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/loader.pyi new file mode 100644 index 0000000..81de40c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/loader.pyi @@ -0,0 +1,72 @@ +import sys +import unittest.case +import unittest.suite +from collections.abc import Callable, Sequence +from re import Pattern +from types import ModuleType +from typing import Any, Final +from typing_extensions import TypeAlias, deprecated + +_SortComparisonMethod: TypeAlias = Callable[[str, str], int] +_SuiteClass: TypeAlias = Callable[[list[unittest.case.TestCase]], unittest.suite.TestSuite] + +VALID_MODULE_NAME: Final[Pattern[str]] + +class TestLoader: + errors: list[type[BaseException]] + testMethodPrefix: str + sortTestMethodsUsing: _SortComparisonMethod + testNamePatterns: list[str] | None + suiteClass: _SuiteClass + def loadTestsFromTestCase(self, testCaseClass: type[unittest.case.TestCase]) -> unittest.suite.TestSuite: ... + if sys.version_info >= (3, 12): + def loadTestsFromModule(self, module: ModuleType, *, pattern: str | None = None) -> unittest.suite.TestSuite: ... + else: + def loadTestsFromModule(self, module: ModuleType, *args: Any, pattern: str | None = None) -> unittest.suite.TestSuite: ... + + def loadTestsFromName(self, name: str, module: ModuleType | None = None) -> unittest.suite.TestSuite: ... + def loadTestsFromNames(self, names: Sequence[str], module: ModuleType | None = None) -> unittest.suite.TestSuite: ... + def getTestCaseNames(self, testCaseClass: type[unittest.case.TestCase]) -> Sequence[str]: ... + def discover( + self, start_dir: str, pattern: str = "test*.py", top_level_dir: str | None = None + ) -> unittest.suite.TestSuite: ... + def _match_path(self, path: str, full_path: str, pattern: str) -> bool: ... + +defaultTestLoader: TestLoader + +if sys.version_info < (3, 13): + if sys.version_info >= (3, 11): + @deprecated("Deprecated since Python 3.11; removed in Python 3.13.") + def getTestCaseNames( + testCaseClass: type[unittest.case.TestCase], + prefix: str, + sortUsing: _SortComparisonMethod = ..., + testNamePatterns: list[str] | None = None, + ) -> Sequence[str]: ... + @deprecated("Deprecated since Python 3.11; removed in Python 3.13.") + def makeSuite( + testCaseClass: type[unittest.case.TestCase], + prefix: str = "test", + sortUsing: _SortComparisonMethod = ..., + suiteClass: _SuiteClass = ..., + ) -> unittest.suite.TestSuite: ... + @deprecated("Deprecated since Python 3.11; removed in Python 3.13.") + def findTestCases( + module: ModuleType, prefix: str = "test", sortUsing: _SortComparisonMethod = ..., suiteClass: _SuiteClass = ... + ) -> unittest.suite.TestSuite: ... + else: + def getTestCaseNames( + testCaseClass: type[unittest.case.TestCase], + prefix: str, + sortUsing: _SortComparisonMethod = ..., + testNamePatterns: list[str] | None = None, + ) -> Sequence[str]: ... + def makeSuite( + testCaseClass: type[unittest.case.TestCase], + prefix: str = "test", + sortUsing: _SortComparisonMethod = ..., + suiteClass: _SuiteClass = ..., + ) -> unittest.suite.TestSuite: ... + def findTestCases( + module: ModuleType, prefix: str = "test", sortUsing: _SortComparisonMethod = ..., suiteClass: _SuiteClass = ... + ) -> unittest.suite.TestSuite: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/main.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/main.pyi new file mode 100644 index 0000000..23ead16 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/main.pyi @@ -0,0 +1,77 @@ +import sys +import unittest.case +import unittest.loader +import unittest.result +import unittest.suite +from collections.abc import Iterable +from types import ModuleType +from typing import Any, Final, Protocol, type_check_only +from typing_extensions import deprecated + +MAIN_EXAMPLES: Final[str] +MODULE_EXAMPLES: Final[str] + +@type_check_only +class _TestRunner(Protocol): + def run(self, test: unittest.suite.TestSuite | unittest.case.TestCase, /) -> unittest.result.TestResult: ... + +# not really documented +class TestProgram: + result: unittest.result.TestResult + module: None | str | ModuleType + verbosity: int + failfast: bool | None + catchbreak: bool | None + buffer: bool | None + progName: str | None + warnings: str | None + testNamePatterns: list[str] | None + if sys.version_info >= (3, 12): + durations: unittest.result._DurationsType | None + def __init__( + self, + module: None | str | ModuleType = "__main__", + defaultTest: str | Iterable[str] | None = None, + argv: list[str] | None = None, + testRunner: type[_TestRunner] | _TestRunner | None = None, + testLoader: unittest.loader.TestLoader = ..., + exit: bool = True, + verbosity: int = 1, + failfast: bool | None = None, + catchbreak: bool | None = None, + buffer: bool | None = None, + warnings: str | None = None, + *, + tb_locals: bool = False, + durations: unittest.result._DurationsType | None = None, + ) -> None: ... + else: + def __init__( + self, + module: None | str | ModuleType = "__main__", + defaultTest: str | Iterable[str] | None = None, + argv: list[str] | None = None, + testRunner: type[_TestRunner] | _TestRunner | None = None, + testLoader: unittest.loader.TestLoader = ..., + exit: bool = True, + verbosity: int = 1, + failfast: bool | None = None, + catchbreak: bool | None = None, + buffer: bool | None = None, + warnings: str | None = None, + *, + tb_locals: bool = False, + ) -> None: ... + + if sys.version_info < (3, 13): + if sys.version_info >= (3, 11): + @deprecated("Deprecated since Python 3.11; removed in Python 3.13.") + def usageExit(self, msg: Any = None) -> None: ... + else: + def usageExit(self, msg: Any = None) -> None: ... + + def parseArgs(self, argv: list[str]) -> None: ... + def createTests(self, from_discovery: bool = False, Loader: unittest.loader.TestLoader | None = None) -> None: ... + def runTests(self) -> None: ... # undocumented + +main = TestProgram diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/mock.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/mock.pyi new file mode 100644 index 0000000..f3e58bc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/mock.pyi @@ -0,0 +1,576 @@ +import sys +from _typeshed import MaybeNone +from collections.abc import Awaitable, Callable, Coroutine, Iterable, Mapping, Sequence +from contextlib import _GeneratorContextManager +from types import TracebackType +from typing import Any, ClassVar, Final, Generic, Literal, TypeVar, overload, type_check_only +from typing_extensions import ParamSpec, Self, TypeAlias, disjoint_base + +_T = TypeVar("_T") +_TT = TypeVar("_TT", bound=type[Any]) +_R = TypeVar("_R") +_F = TypeVar("_F", bound=Callable[..., Any]) +_AF = TypeVar("_AF", bound=Callable[..., Coroutine[Any, Any, Any]]) +_P = ParamSpec("_P") + +if sys.version_info >= (3, 13): + # ThreadingMock added in 3.13 + __all__ = ( + "Mock", + "MagicMock", + "patch", + "sentinel", + "DEFAULT", + "ANY", + "call", + "create_autospec", + "ThreadingMock", + "AsyncMock", + "FILTER_DIR", + "NonCallableMock", + "NonCallableMagicMock", + "mock_open", + "PropertyMock", + "seal", + ) +else: + __all__ = ( + "Mock", + "MagicMock", + "patch", + "sentinel", + "DEFAULT", + "ANY", + "call", + "create_autospec", + "AsyncMock", + "FILTER_DIR", + "NonCallableMock", + "NonCallableMagicMock", + "mock_open", + "PropertyMock", + "seal", + ) + +FILTER_DIR: bool # controls the way mock objects respond to `dir` function + +class _SentinelObject: + name: Any + def __init__(self, name: Any) -> None: ... + +class _Sentinel: + def __getattr__(self, name: str) -> Any: ... + +sentinel: _Sentinel +DEFAULT: Any + +_ArgsKwargs: TypeAlias = tuple[tuple[Any, ...], Mapping[str, Any]] +_NameArgsKwargs: TypeAlias = tuple[str, tuple[Any, ...], Mapping[str, Any]] +_CallValue: TypeAlias = str | tuple[Any, ...] | Mapping[str, Any] | _ArgsKwargs | _NameArgsKwargs + +if sys.version_info >= (3, 12): + class _Call(tuple[Any, ...]): + def __new__( + cls, + value: _CallValue = (), + name: str | None = "", + parent: _Call | None = None, + two: bool = False, + from_kall: bool = True, + ) -> Self: ... + def __init__( + self, + value: _CallValue = (), + name: str | None = None, + parent: _Call | None = None, + two: bool = False, + from_kall: bool = True, + ) -> None: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __eq__(self, other: object) -> bool: ... + def __ne__(self, value: object, /) -> bool: ... + def __call__(self, *args: Any, **kwargs: Any) -> _Call: ... + def __getattr__(self, attr: str) -> Any: ... + def __getattribute__(self, attr: str) -> Any: ... + @property + def args(self) -> tuple[Any, ...]: ... + @property + def kwargs(self) -> Mapping[str, Any]: ... + def call_list(self) -> Any: ... + +else: + @disjoint_base + class _Call(tuple[Any, ...]): + def __new__( + cls, + value: _CallValue = (), + name: str | None = "", + parent: _Call | None = None, + two: bool = False, + from_kall: bool = True, + ) -> Self: ... + def __init__( + self, + value: _CallValue = (), + name: str | None = None, + parent: _Call | None = None, + two: bool = False, + from_kall: bool = True, + ) -> None: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __eq__(self, other: object) -> bool: ... + def __ne__(self, value: object, /) -> bool: ... + def __call__(self, *args: Any, **kwargs: Any) -> _Call: ... + def __getattr__(self, attr: str) -> Any: ... + def __getattribute__(self, attr: str) -> Any: ... + @property + def args(self) -> tuple[Any, ...]: ... + @property + def kwargs(self) -> Mapping[str, Any]: ... + def call_list(self) -> Any: ... + +call: _Call + +class _CallList(list[_Call]): + def __contains__(self, value: Any) -> bool: ... + +class Base: + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + +# We subclass with "Any" because mocks are explicitly designed to stand in for other types, +# something that can't be expressed with our static type system. +class NonCallableMock(Base, Any): + if sys.version_info >= (3, 12): + def __new__( + cls, + spec: list[str] | object | type[object] | None = None, + wraps: Any | None = None, + name: str | None = None, + spec_set: list[str] | object | type[object] | None = None, + parent: NonCallableMock | None = None, + _spec_state: Any | None = None, + _new_name: str = "", + _new_parent: NonCallableMock | None = None, + _spec_as_instance: bool = False, + _eat_self: bool | None = None, + unsafe: bool = False, + **kwargs: Any, + ) -> Self: ... + else: + def __new__(cls, /, *args: Any, **kw: Any) -> Self: ... + + def __init__( + self, + spec: list[str] | object | type[object] | None = None, + wraps: Any | None = None, + name: str | None = None, + spec_set: list[str] | object | type[object] | None = None, + parent: NonCallableMock | None = None, + _spec_state: Any | None = None, + _new_name: str = "", + _new_parent: NonCallableMock | None = None, + _spec_as_instance: bool = False, + _eat_self: bool | None = None, + unsafe: bool = False, + **kwargs: Any, + ) -> None: ... + def __getattr__(self, name: str) -> Any: ... + def __delattr__(self, name: str) -> None: ... + def __setattr__(self, name: str, value: Any) -> None: ... + def __dir__(self) -> list[str]: ... + def assert_called_with(self, *args: Any, **kwargs: Any) -> None: ... + def assert_not_called(self) -> None: ... + def assert_called_once_with(self, *args: Any, **kwargs: Any) -> None: ... + def _format_mock_failure_message(self, args: Any, kwargs: Any, action: str = "call") -> str: ... + def assert_called(self) -> None: ... + def assert_called_once(self) -> None: ... + def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: ... + def _extract_mock_name(self) -> str: ... + def _get_call_signature_from_name(self, name: str) -> Any: ... + def assert_any_call(self, *args: Any, **kwargs: Any) -> None: ... + def assert_has_calls(self, calls: Sequence[_Call], any_order: bool = False) -> None: ... + def mock_add_spec(self, spec: Any, spec_set: bool = False) -> None: ... + def _mock_add_spec(self, spec: Any, spec_set: bool, _spec_as_instance: bool = False, _eat_self: bool = False) -> None: ... + def attach_mock(self, mock: NonCallableMock, attribute: str) -> None: ... + def configure_mock(self, **kwargs: Any) -> None: ... + return_value: Any + side_effect: Any + called: bool + call_count: int + call_args: _Call | MaybeNone + call_args_list: _CallList + mock_calls: _CallList + def _format_mock_call_signature(self, args: Any, kwargs: Any) -> str: ... + def _call_matcher(self, _call: tuple[_Call, ...]) -> _Call: ... + def _get_child_mock(self, **kw: Any) -> NonCallableMock: ... + if sys.version_info >= (3, 13): + def _calls_repr(self) -> str: ... + else: + def _calls_repr(self, prefix: str = "Calls") -> str: ... + +class CallableMixin(Base): + side_effect: Any + def __init__( + self, + spec: Any | None = None, + side_effect: Any | None = None, + return_value: Any = ..., + wraps: Any | None = None, + name: Any | None = None, + spec_set: Any | None = None, + parent: Any | None = None, + _spec_state: Any | None = None, + _new_name: Any = "", + _new_parent: Any | None = None, + **kwargs: Any, + ) -> None: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + +class Mock(CallableMixin, NonCallableMock): ... + +class _patch(Generic[_T]): + attribute_name: Any + getter: Callable[[], Any] + attribute: str + new: _T + new_callable: Any + spec: Any + create: bool + has_local: Any + spec_set: Any + autospec: Any + kwargs: Mapping[str, Any] + additional_patchers: Any + # If new==DEFAULT, self is _patch[Any]. Ideally we'd be able to add an overload for it so that self is _patch[MagicMock], + # but that's impossible with the current type system. + if sys.version_info >= (3, 10): + def __init__( + self: _patch[_T], # pyright: ignore[reportInvalidTypeVarUse] #11780 + getter: Callable[[], Any], + attribute: str, + new: _T, + spec: Any | None, + create: bool, + spec_set: Any | None, + autospec: Any | None, + new_callable: Any | None, + kwargs: Mapping[str, Any], + *, + unsafe: bool = False, + ) -> None: ... + else: + def __init__( + self: _patch[_T], # pyright: ignore[reportInvalidTypeVarUse] #11780 + getter: Callable[[], Any], + attribute: str, + new: _T, + spec: Any | None, + create: bool, + spec_set: Any | None, + autospec: Any | None, + new_callable: Any | None, + kwargs: Mapping[str, Any], + ) -> None: ... + + def copy(self) -> _patch[_T]: ... + @overload + def __call__(self, func: _TT) -> _TT: ... + # If new==DEFAULT, this should add a MagicMock parameter to the function + # arguments. See the _patch_default_new class below for this functionality. + @overload + def __call__(self, func: Callable[_P, _R]) -> Callable[_P, _R]: ... + def decoration_helper( + self, patched: _patch[Any], args: Sequence[Any], keywargs: Any + ) -> _GeneratorContextManager[tuple[Sequence[Any], Any]]: ... + def decorate_class(self, klass: _TT) -> _TT: ... + def decorate_callable(self, func: Callable[..., _R]) -> Callable[..., _R]: ... + def decorate_async_callable(self, func: Callable[..., Awaitable[_R]]) -> Callable[..., Awaitable[_R]]: ... + def get_original(self) -> tuple[Any, bool]: ... + target: Any + temp_original: Any + is_local: bool + def __enter__(self) -> _T: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / + ) -> None: ... + def start(self) -> _T: ... + def stop(self) -> None: ... + +# This class does not exist at runtime, it's a hack to make this work: +# @patch("foo") +# def bar(..., mock: MagicMock) -> None: ... +@type_check_only +class _patch_pass_arg(_patch[_T]): + @overload + def __call__(self, func: _TT) -> _TT: ... + # Can't use the following as ParamSpec is only allowed as last parameter: + # def __call__(self, func: Callable[_P, _R]) -> Callable[Concatenate[_P, MagicMock], _R]: ... + @overload + def __call__(self, func: Callable[..., _R]) -> Callable[..., _R]: ... + +class _patch_dict: + in_dict: Any + values: Any + clear: Any + def __init__(self, in_dict: Any, values: Any = (), clear: Any = False, **kwargs: Any) -> None: ... + def __call__(self, f: Any) -> Any: ... + if sys.version_info >= (3, 10): + def decorate_callable(self, f: _F) -> _F: ... + def decorate_async_callable(self, f: _AF) -> _AF: ... + + def decorate_class(self, klass: Any) -> Any: ... + def __enter__(self) -> Any: ... + def __exit__(self, *args: object) -> Any: ... + start: Any + stop: Any + +# This class does not exist at runtime, it's a hack to add methods to the +# patch() function. +@type_check_only +class _patcher: + TEST_PREFIX: str + dict: type[_patch_dict] + # This overload also covers the case, where new==DEFAULT. In this case, the return type is _patch[Any]. + # Ideally we'd be able to add an overload for it so that the return type is _patch[MagicMock], + # but that's impossible with the current type system. + @overload + def __call__( # type: ignore[overload-overlap] + self, + target: str, + new: _T, + spec: Literal[False] | None = None, + create: bool = False, + spec_set: Literal[False] | None = None, + autospec: Literal[False] | None = None, + new_callable: None = None, + *, + unsafe: bool = False, + ) -> _patch[_T]: ... + @overload + def __call__( + self, + target: str, + *, + # If not False or None, this is passed to new_callable + spec: Any | Literal[False] | None = None, + create: bool = False, + # If not False or None, this is passed to new_callable + spec_set: Any | Literal[False] | None = None, + autospec: Literal[False] | None = None, + new_callable: Callable[..., _T], + unsafe: bool = False, + # kwargs are passed to new_callable + **kwargs: Any, + ) -> _patch_pass_arg[_T]: ... + @overload + def __call__( + self, + target: str, + *, + spec: Any | bool | None = None, + create: bool = False, + spec_set: Any | bool | None = None, + autospec: Any | bool | None = None, + new_callable: None = None, + unsafe: bool = False, + # kwargs are passed to the MagicMock/AsyncMock constructor + **kwargs: Any, + ) -> _patch_pass_arg[MagicMock | AsyncMock]: ... + # This overload also covers the case, where new==DEFAULT. In this case, the return type is _patch[Any]. + # Ideally we'd be able to add an overload for it so that the return type is _patch[MagicMock], + # but that's impossible with the current type system. + @overload + @staticmethod + def object( + target: Any, + attribute: str, + new: _T, + spec: Literal[False] | None = None, + create: bool = False, + spec_set: Literal[False] | None = None, + autospec: Literal[False] | None = None, + new_callable: None = None, + *, + unsafe: bool = False, + ) -> _patch[_T]: ... + @overload + @staticmethod + def object( + target: Any, + attribute: str, + *, + # If not False or None, this is passed to new_callable + spec: Any | Literal[False] | None = None, + create: bool = False, + # If not False or None, this is passed to new_callable + spec_set: Any | Literal[False] | None = None, + autospec: Literal[False] | None = None, + new_callable: Callable[..., _T], + unsafe: bool = False, + # kwargs are passed to new_callable + **kwargs: Any, + ) -> _patch_pass_arg[_T]: ... + @overload + @staticmethod + def object( + target: Any, + attribute: str, + *, + spec: Any | bool | None = None, + create: bool = False, + spec_set: Any | bool | None = None, + autospec: Any | bool | None = None, + new_callable: None = None, + unsafe: bool = False, + # kwargs are passed to the MagicMock/AsyncMock constructor + **kwargs: Any, + ) -> _patch_pass_arg[MagicMock | AsyncMock]: ... + @overload + @staticmethod + def multiple( + target: Any | str, + # If not False or None, this is passed to new_callable + spec: Any | Literal[False] | None = None, + create: bool = False, + # If not False or None, this is passed to new_callable + spec_set: Any | Literal[False] | None = None, + autospec: Literal[False] | None = None, + *, + new_callable: Callable[..., _T], + # The kwargs must be DEFAULT + **kwargs: Any, + ) -> _patch_pass_arg[_T]: ... + @overload + @staticmethod + def multiple( + target: Any | str, + # If not False or None, this is passed to new_callable + spec: Any | Literal[False] | None, + create: bool, + # If not False or None, this is passed to new_callable + spec_set: Any | Literal[False] | None, + autospec: Literal[False] | None, + new_callable: Callable[..., _T], + # The kwargs must be DEFAULT + **kwargs: Any, + ) -> _patch_pass_arg[_T]: ... + @overload + @staticmethod + def multiple( + target: Any | str, + spec: Any | bool | None = None, + create: bool = False, + spec_set: Any | bool | None = None, + autospec: Any | bool | None = None, + new_callable: None = None, + # The kwargs are the mock objects or DEFAULT + **kwargs: Any, + ) -> _patch[Any]: ... + @staticmethod + def stopall() -> None: ... + +patch: _patcher + +class MagicMixin(Base): + def __init__(self, *args: Any, **kw: Any) -> None: ... + +class NonCallableMagicMock(MagicMixin, NonCallableMock): ... +class MagicMock(MagicMixin, Mock): ... + +class AsyncMockMixin(Base): + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + async def _execute_mock_call(self, *args: Any, **kwargs: Any) -> Any: ... + def assert_awaited(self) -> None: ... + def assert_awaited_once(self) -> None: ... + def assert_awaited_with(self, *args: Any, **kwargs: Any) -> None: ... + def assert_awaited_once_with(self, *args: Any, **kwargs: Any) -> None: ... + def assert_any_await(self, *args: Any, **kwargs: Any) -> None: ... + def assert_has_awaits(self, calls: Iterable[_Call], any_order: bool = False) -> None: ... + def assert_not_awaited(self) -> None: ... + def reset_mock(self, *args: Any, **kwargs: Any) -> None: ... + await_count: int + await_args: _Call | None + await_args_list: _CallList + +class AsyncMagicMixin(MagicMixin): + def __init__(self, *args: Any, **kw: Any) -> None: ... + +class AsyncMock(AsyncMockMixin, AsyncMagicMixin, Mock): + # Improving the `reset_mock` signature. + # It is defined on `AsyncMockMixin` with `*args, **kwargs`, which is not ideal. + # But, `NonCallableMock` super-class has the better version. + def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: ... + +class MagicProxy(Base): + name: str + parent: Any + def __init__(self, name: str, parent: Any) -> None: ... + def create_mock(self) -> Any: ... + def __get__(self, obj: Any, _type: Any | None = None) -> Any: ... + +# See https://github.com/python/typeshed/issues/14701 +class _ANY(Any): + def __eq__(self, other: object) -> Literal[True]: ... + def __ne__(self, other: object) -> Literal[False]: ... + __hash__: ClassVar[None] # type: ignore[assignment] + +ANY: _ANY + +if sys.version_info >= (3, 10): + def create_autospec( + spec: Any, + spec_set: Any = False, + instance: Any = False, + _parent: Any | None = None, + _name: Any | None = None, + *, + unsafe: bool = False, + **kwargs: Any, + ) -> Any: ... + +else: + def create_autospec( + spec: Any, + spec_set: Any = False, + instance: Any = False, + _parent: Any | None = None, + _name: Any | None = None, + **kwargs: Any, + ) -> Any: ... + +class _SpecState: + spec: Any + ids: Any + spec_set: Any + parent: Any + instance: Any + name: Any + def __init__( + self, + spec: Any, + spec_set: Any = False, + parent: Any | None = None, + name: Any | None = None, + ids: Any | None = None, + instance: Any = False, + ) -> None: ... + +def mock_open(mock: Any | None = None, read_data: Any = "") -> Any: ... + +class PropertyMock(Mock): + def __get__(self, obj: _T, obj_type: type[_T] | None = None) -> Self: ... + def __set__(self, obj: Any, val: Any) -> None: ... + +if sys.version_info >= (3, 13): + class ThreadingMixin(Base): + DEFAULT_TIMEOUT: Final[float | None] = None + + def __init__(self, /, *args: Any, timeout: float | None | _SentinelObject = ..., **kwargs: Any) -> None: ... + # Same as `NonCallableMock.reset_mock.` + def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: ... + def wait_until_called(self, *, timeout: float | None | _SentinelObject = ...) -> None: ... + def wait_until_any_call_with(self, *args: Any, **kwargs: Any) -> None: ... + + class ThreadingMock(ThreadingMixin, MagicMixin, Mock): ... + +def seal(mock: Any) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/result.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/result.pyi new file mode 100644 index 0000000..0761baa --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/result.pyi @@ -0,0 +1,47 @@ +import sys +import unittest.case +from _typeshed import OptExcInfo +from collections.abc import Callable +from typing import Any, Final, TextIO, TypeVar +from typing_extensions import TypeAlias + +_F = TypeVar("_F", bound=Callable[..., Any]) +_DurationsType: TypeAlias = list[tuple[str, float]] + +STDOUT_LINE: Final[str] +STDERR_LINE: Final[str] + +# undocumented +def failfast(method: _F) -> _F: ... + +class TestResult: + errors: list[tuple[unittest.case.TestCase, str]] + failures: list[tuple[unittest.case.TestCase, str]] + skipped: list[tuple[unittest.case.TestCase, str]] + expectedFailures: list[tuple[unittest.case.TestCase, str]] + unexpectedSuccesses: list[unittest.case.TestCase] + shouldStop: bool + testsRun: int + buffer: bool + failfast: bool + tb_locals: bool + if sys.version_info >= (3, 12): + collectedDurations: _DurationsType + + def __init__(self, stream: TextIO | None = None, descriptions: bool | None = None, verbosity: int | None = None) -> None: ... + def printErrors(self) -> None: ... + def wasSuccessful(self) -> bool: ... + def stop(self) -> None: ... + def startTest(self, test: unittest.case.TestCase) -> None: ... + def stopTest(self, test: unittest.case.TestCase) -> None: ... + def startTestRun(self) -> None: ... + def stopTestRun(self) -> None: ... + def addError(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: ... + def addFailure(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: ... + def addSuccess(self, test: unittest.case.TestCase) -> None: ... + def addSkip(self, test: unittest.case.TestCase, reason: str) -> None: ... + def addExpectedFailure(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: ... + def addUnexpectedSuccess(self, test: unittest.case.TestCase) -> None: ... + def addSubTest(self, test: unittest.case.TestCase, subtest: unittest.case.TestCase, err: OptExcInfo | None) -> None: ... + if sys.version_info >= (3, 12): + def addDuration(self, test: unittest.case.TestCase, elapsed: float) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/runner.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/runner.pyi new file mode 100644 index 0000000..f76771f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/runner.pyi @@ -0,0 +1,93 @@ +import sys +import unittest.case +import unittest.result +import unittest.suite +from _typeshed import SupportsFlush, SupportsWrite +from collections.abc import Callable, Iterable +from typing import Any, Generic, Protocol, TypeVar, type_check_only +from typing_extensions import Never, TypeAlias +from warnings import _ActionKind + +_ResultClassType: TypeAlias = Callable[[_TextTestStream, bool, int], TextTestResult[Any]] + +@type_check_only +class _SupportsWriteAndFlush(SupportsWrite[str], SupportsFlush, Protocol): ... + +# All methods used by unittest.runner.TextTestResult's stream +@type_check_only +class _TextTestStream(_SupportsWriteAndFlush, Protocol): + def writeln(self, arg: str | None = None, /) -> None: ... + +# _WritelnDecorator should have all the same attrs as its stream param. +# But that's not feasible to do Generically +# We can expand the attributes if requested +class _WritelnDecorator: + def __init__(self, stream: _SupportsWriteAndFlush) -> None: ... + def writeln(self, arg: str | None = None) -> None: ... + def __getattr__(self, attr: str) -> Any: ... # Any attribute from the stream type passed to __init__ + # These attributes are prevented by __getattr__ + stream: Never + __getstate__: Never + # Methods proxied from the wrapped stream object via __getattr__ + def flush(self) -> object: ... + def write(self, s: str, /) -> object: ... + +_StreamT = TypeVar("_StreamT", bound=_TextTestStream, default=_WritelnDecorator) + +class TextTestResult(unittest.result.TestResult, Generic[_StreamT]): + descriptions: bool # undocumented + dots: bool # undocumented + separator1: str + separator2: str + showAll: bool # undocumented + stream: _StreamT # undocumented + if sys.version_info >= (3, 12): + durations: int | None + def __init__(self, stream: _StreamT, descriptions: bool, verbosity: int, *, durations: int | None = None) -> None: ... + else: + def __init__(self, stream: _StreamT, descriptions: bool, verbosity: int) -> None: ... + + def getDescription(self, test: unittest.case.TestCase) -> str: ... + def printErrorList(self, flavour: str, errors: Iterable[tuple[unittest.case.TestCase, str]]) -> None: ... + +class TextTestRunner: + resultclass: _ResultClassType + stream: _WritelnDecorator + descriptions: bool + verbosity: int + failfast: bool + buffer: bool + warnings: _ActionKind | None + tb_locals: bool + + if sys.version_info >= (3, 12): + durations: int | None + def __init__( + self, + stream: _SupportsWriteAndFlush | None = None, + descriptions: bool = True, + verbosity: int = 1, + failfast: bool = False, + buffer: bool = False, + resultclass: _ResultClassType | None = None, + warnings: _ActionKind | None = None, + *, + tb_locals: bool = False, + durations: int | None = None, + ) -> None: ... + else: + def __init__( + self, + stream: _SupportsWriteAndFlush | None = None, + descriptions: bool = True, + verbosity: int = 1, + failfast: bool = False, + buffer: bool = False, + resultclass: _ResultClassType | None = None, + warnings: str | None = None, + *, + tb_locals: bool = False, + ) -> None: ... + + def _makeResult(self) -> TextTestResult: ... + def run(self, test: unittest.suite.TestSuite | unittest.case.TestCase) -> TextTestResult: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/signals.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/signals.pyi new file mode 100644 index 0000000..a60133a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/signals.pyi @@ -0,0 +1,15 @@ +import unittest.result +from collections.abc import Callable +from typing import TypeVar, overload +from typing_extensions import ParamSpec + +_P = ParamSpec("_P") +_T = TypeVar("_T") + +def installHandler() -> None: ... +def registerResult(result: unittest.result.TestResult) -> None: ... +def removeResult(result: unittest.result.TestResult) -> bool: ... +@overload +def removeHandler(method: None = None) -> None: ... +@overload +def removeHandler(method: Callable[_P, _T]) -> Callable[_P, _T]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/suite.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/suite.pyi new file mode 100644 index 0000000..4433961 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/suite.pyi @@ -0,0 +1,24 @@ +import unittest.case +import unittest.result +from collections.abc import Iterable, Iterator +from typing import ClassVar +from typing_extensions import TypeAlias + +_TestType: TypeAlias = unittest.case.TestCase | TestSuite + +class BaseTestSuite: + _tests: list[unittest.case.TestCase] + _removed_tests: int + def __init__(self, tests: Iterable[_TestType] = ()) -> None: ... + def __call__(self, result: unittest.result.TestResult) -> unittest.result.TestResult: ... + def addTest(self, test: _TestType) -> None: ... + def addTests(self, tests: Iterable[_TestType]) -> None: ... + def run(self, result: unittest.result.TestResult) -> unittest.result.TestResult: ... + def debug(self) -> None: ... + def countTestCases(self) -> int: ... + def __iter__(self) -> Iterator[_TestType]: ... + def __eq__(self, other: object) -> bool: ... + __hash__: ClassVar[None] # type: ignore[assignment] + +class TestSuite(BaseTestSuite): + def run(self, result: unittest.result.TestResult, debug: bool = False) -> unittest.result.TestResult: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/util.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/util.pyi new file mode 100644 index 0000000..763c147 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/unittest/util.pyi @@ -0,0 +1,40 @@ +from collections.abc import MutableSequence, Sequence +from typing import Any, Final, Literal, Protocol, TypeVar, type_check_only +from typing_extensions import TypeAlias + +@type_check_only +class _SupportsDunderLT(Protocol): + def __lt__(self, other: Any, /) -> bool: ... + +@type_check_only +class _SupportsDunderGT(Protocol): + def __gt__(self, other: Any, /) -> bool: ... + +@type_check_only +class _SupportsDunderLE(Protocol): + def __le__(self, other: Any, /) -> bool: ... + +@type_check_only +class _SupportsDunderGE(Protocol): + def __ge__(self, other: Any, /) -> bool: ... + +_T = TypeVar("_T") +_Mismatch: TypeAlias = tuple[_T, _T, int] +_SupportsComparison: TypeAlias = _SupportsDunderLE | _SupportsDunderGE | _SupportsDunderGT | _SupportsDunderLT + +_MAX_LENGTH: Final = 80 +_PLACEHOLDER_LEN: Final = 12 +_MIN_BEGIN_LEN: Final = 5 +_MIN_END_LEN: Final = 5 +_MIN_COMMON_LEN: Final = 5 +_MIN_DIFF_LEN: Final = 41 + +def _shorten(s: str, prefixlen: int, suffixlen: int) -> str: ... +def _common_shorten_repr(*args: str) -> tuple[str, ...]: ... +def safe_repr(obj: object, short: bool = False) -> str: ... +def strclass(cls: type) -> str: ... +def sorted_list_difference(expected: Sequence[_T], actual: Sequence[_T]) -> tuple[list[_T], list[_T]]: ... +def unorderable_list_difference(expected: MutableSequence[_T], actual: MutableSequence[_T]) -> tuple[list[_T], list[_T]]: ... +def three_way_cmp(x: _SupportsComparison, y: _SupportsComparison) -> Literal[-1, 0, 1]: ... +def _count_diff_all_purpose(actual: Sequence[_T], expected: Sequence[_T]) -> list[_Mismatch[_T]]: ... +def _count_diff_hashable(actual: Sequence[_T], expected: Sequence[_T]) -> list[_Mismatch[_T]]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/urllib/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/urllib/__init__.pyi new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/urllib/error.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/urllib/error.pyi new file mode 100644 index 0000000..2173d7e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/urllib/error.pyi @@ -0,0 +1,28 @@ +from email.message import Message +from typing import IO +from urllib.response import addinfourl + +__all__ = ["URLError", "HTTPError", "ContentTooShortError"] + +class URLError(OSError): + reason: str | BaseException + # The `filename` attribute only exists if it was provided to `__init__` and wasn't `None`. + filename: str + def __init__(self, reason: str | BaseException, filename: str | None = None) -> None: ... + +class HTTPError(URLError, addinfourl): + @property + def headers(self) -> Message: ... + @headers.setter + def headers(self, headers: Message) -> None: ... + @property + def reason(self) -> str: ... # type: ignore[override] + code: int + msg: str + hdrs: Message + fp: IO[bytes] + def __init__(self, url: str, code: int, msg: str, hdrs: Message, fp: IO[bytes] | None) -> None: ... + +class ContentTooShortError(URLError): + content: tuple[str, Message] + def __init__(self, message: str, content: tuple[str, Message]) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/urllib/parse.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/urllib/parse.pyi new file mode 100644 index 0000000..364892e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/urllib/parse.pyi @@ -0,0 +1,201 @@ +import sys +from collections.abc import Iterable, Mapping, Sequence +from types import GenericAlias +from typing import Any, AnyStr, Final, Generic, Literal, NamedTuple, Protocol, overload, type_check_only +from typing_extensions import TypeAlias + +__all__ = [ + "urlparse", + "urlunparse", + "urljoin", + "urldefrag", + "urlsplit", + "urlunsplit", + "urlencode", + "parse_qs", + "parse_qsl", + "quote", + "quote_plus", + "quote_from_bytes", + "unquote", + "unquote_plus", + "unquote_to_bytes", + "DefragResult", + "ParseResult", + "SplitResult", + "DefragResultBytes", + "ParseResultBytes", + "SplitResultBytes", +] + +uses_relative: Final[list[str]] +uses_netloc: Final[list[str]] +uses_params: Final[list[str]] +non_hierarchical: Final[list[str]] +uses_query: Final[list[str]] +uses_fragment: Final[list[str]] +scheme_chars: Final[str] +if sys.version_info < (3, 11): + MAX_CACHE_SIZE: Final[int] + +class _ResultMixinStr: + __slots__ = () + def encode(self, encoding: str = "ascii", errors: str = "strict") -> _ResultMixinBytes: ... + +class _ResultMixinBytes: + __slots__ = () + def decode(self, encoding: str = "ascii", errors: str = "strict") -> _ResultMixinStr: ... + +class _NetlocResultMixinBase(Generic[AnyStr]): + __slots__ = () + @property + def username(self) -> AnyStr | None: ... + @property + def password(self) -> AnyStr | None: ... + @property + def hostname(self) -> AnyStr | None: ... + @property + def port(self) -> int | None: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +class _NetlocResultMixinStr(_NetlocResultMixinBase[str], _ResultMixinStr): + __slots__ = () + +class _NetlocResultMixinBytes(_NetlocResultMixinBase[bytes], _ResultMixinBytes): + __slots__ = () + +class _DefragResultBase(NamedTuple, Generic[AnyStr]): + url: AnyStr + fragment: AnyStr + +class _SplitResultBase(NamedTuple, Generic[AnyStr]): + scheme: AnyStr + netloc: AnyStr + path: AnyStr + query: AnyStr + fragment: AnyStr + +class _ParseResultBase(NamedTuple, Generic[AnyStr]): + scheme: AnyStr + netloc: AnyStr + path: AnyStr + params: AnyStr + query: AnyStr + fragment: AnyStr + +# Structured result objects for string data +class DefragResult(_DefragResultBase[str], _ResultMixinStr): + def geturl(self) -> str: ... + +class SplitResult(_SplitResultBase[str], _NetlocResultMixinStr): + def geturl(self) -> str: ... + +class ParseResult(_ParseResultBase[str], _NetlocResultMixinStr): + def geturl(self) -> str: ... + +# Structured result objects for bytes data +class DefragResultBytes(_DefragResultBase[bytes], _ResultMixinBytes): + def geturl(self) -> bytes: ... + +class SplitResultBytes(_SplitResultBase[bytes], _NetlocResultMixinBytes): + def geturl(self) -> bytes: ... + +class ParseResultBytes(_ParseResultBase[bytes], _NetlocResultMixinBytes): + def geturl(self) -> bytes: ... + +def parse_qs( + qs: AnyStr | None, + keep_blank_values: bool = False, + strict_parsing: bool = False, + encoding: str = "utf-8", + errors: str = "replace", + max_num_fields: int | None = None, + separator: str = "&", +) -> dict[AnyStr, list[AnyStr]]: ... +def parse_qsl( + qs: AnyStr | None, + keep_blank_values: bool = False, + strict_parsing: bool = False, + encoding: str = "utf-8", + errors: str = "replace", + max_num_fields: int | None = None, + separator: str = "&", +) -> list[tuple[AnyStr, AnyStr]]: ... +@overload +def quote(string: str, safe: str | Iterable[int] = "/", encoding: str | None = None, errors: str | None = None) -> str: ... +@overload +def quote(string: bytes | bytearray, safe: str | Iterable[int] = "/") -> str: ... +def quote_from_bytes(bs: bytes | bytearray, safe: str | Iterable[int] = "/") -> str: ... +@overload +def quote_plus(string: str, safe: str | Iterable[int] = "", encoding: str | None = None, errors: str | None = None) -> str: ... +@overload +def quote_plus(string: bytes | bytearray, safe: str | Iterable[int] = "") -> str: ... +def unquote(string: str | bytes, encoding: str = "utf-8", errors: str = "replace") -> str: ... +def unquote_to_bytes(string: str | bytes | bytearray) -> bytes: ... +def unquote_plus(string: str, encoding: str = "utf-8", errors: str = "replace") -> str: ... +@overload +def urldefrag(url: str) -> DefragResult: ... +@overload +def urldefrag(url: bytes | bytearray | None) -> DefragResultBytes: ... + +# The values are passed through `str()` (unless they are bytes), so anything is valid. +_QueryType: TypeAlias = ( + Mapping[str, object] + | Mapping[bytes, object] + | Mapping[str | bytes, object] + | Mapping[str, Sequence[object]] + | Mapping[bytes, Sequence[object]] + | Mapping[str | bytes, Sequence[object]] + | Sequence[tuple[str | bytes, object]] + | Sequence[tuple[str | bytes, Sequence[object]]] +) + +@type_check_only +class _QuoteVia(Protocol): + @overload + def __call__(self, string: str, safe: str | bytes, encoding: str, errors: str, /) -> str: ... + @overload + def __call__(self, string: bytes, safe: str | bytes, /) -> str: ... + +def urlencode( + query: _QueryType, + doseq: bool = False, + safe: str | bytes = "", + encoding: str | None = None, + errors: str | None = None, + quote_via: _QuoteVia = ..., +) -> str: ... +def urljoin(base: AnyStr, url: AnyStr | None, allow_fragments: bool = True) -> AnyStr: ... +@overload +def urlparse(url: str, scheme: str = "", allow_fragments: bool = True) -> ParseResult: ... +@overload +def urlparse( + url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True +) -> ParseResultBytes: ... +@overload +def urlsplit(url: str, scheme: str = "", allow_fragments: bool = True) -> SplitResult: ... + +if sys.version_info >= (3, 11): + @overload + def urlsplit( + url: bytes | None, scheme: bytes | None | Literal[""] = "", allow_fragments: bool = True + ) -> SplitResultBytes: ... + +else: + @overload + def urlsplit( + url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True + ) -> SplitResultBytes: ... + +# Requires an iterable of length 6 +@overload +def urlunparse(components: Iterable[None]) -> Literal[b""]: ... # type: ignore[overload-overlap] +@overload +def urlunparse(components: Iterable[AnyStr | None]) -> AnyStr: ... + +# Requires an iterable of length 5 +@overload +def urlunsplit(components: Iterable[None]) -> Literal[b""]: ... # type: ignore[overload-overlap] +@overload +def urlunsplit(components: Iterable[AnyStr | None]) -> AnyStr: ... +def unwrap(url: str) -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/urllib/request.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/urllib/request.pyi new file mode 100644 index 0000000..876b9d3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/urllib/request.pyi @@ -0,0 +1,417 @@ +import ssl +import sys +from _typeshed import ReadableBuffer, StrOrBytesPath, SupportsRead +from collections.abc import Callable, Iterable, Mapping, MutableMapping, Sequence +from email.message import Message +from http.client import HTTPConnection, HTTPMessage, HTTPResponse +from http.cookiejar import CookieJar +from re import Pattern +from typing import IO, Any, ClassVar, NoReturn, Protocol, TypeVar, overload, type_check_only +from typing_extensions import TypeAlias, deprecated +from urllib.error import HTTPError as HTTPError +from urllib.response import addclosehook, addinfourl + +__all__ = [ + "Request", + "OpenerDirector", + "BaseHandler", + "HTTPDefaultErrorHandler", + "HTTPRedirectHandler", + "HTTPCookieProcessor", + "ProxyHandler", + "HTTPPasswordMgr", + "HTTPPasswordMgrWithDefaultRealm", + "HTTPPasswordMgrWithPriorAuth", + "AbstractBasicAuthHandler", + "HTTPBasicAuthHandler", + "ProxyBasicAuthHandler", + "AbstractDigestAuthHandler", + "HTTPDigestAuthHandler", + "ProxyDigestAuthHandler", + "HTTPHandler", + "FileHandler", + "FTPHandler", + "CacheFTPHandler", + "DataHandler", + "UnknownHandler", + "HTTPErrorProcessor", + "urlopen", + "install_opener", + "build_opener", + "pathname2url", + "url2pathname", + "getproxies", + "urlretrieve", + "urlcleanup", + "HTTPSHandler", +] +if sys.version_info < (3, 14): + __all__ += ["URLopener", "FancyURLopener"] + +_T = TypeVar("_T") +_UrlopenRet: TypeAlias = Any +_DataType: TypeAlias = ReadableBuffer | SupportsRead[bytes] | Iterable[bytes] | None + +if sys.version_info >= (3, 13): + def urlopen( + url: str | Request, data: _DataType | None = None, timeout: float | None = ..., *, context: ssl.SSLContext | None = None + ) -> _UrlopenRet: ... + +else: + def urlopen( + url: str | Request, + data: _DataType | None = None, + timeout: float | None = ..., + *, + cafile: str | None = None, + capath: str | None = None, + cadefault: bool = False, + context: ssl.SSLContext | None = None, + ) -> _UrlopenRet: ... + +def install_opener(opener: OpenerDirector) -> None: ... +def build_opener(*handlers: BaseHandler | Callable[[], BaseHandler]) -> OpenerDirector: ... + +if sys.version_info >= (3, 14): + def url2pathname(url: str, *, require_scheme: bool = False, resolve_host: bool = False) -> str: ... + def pathname2url(pathname: str, *, add_scheme: bool = False) -> str: ... + +else: + if sys.platform == "win32": + from nturl2path import pathname2url as pathname2url, url2pathname as url2pathname + else: + def url2pathname(pathname: str) -> str: ... + def pathname2url(pathname: str) -> str: ... + +def getproxies() -> dict[str, str]: ... +def getproxies_environment() -> dict[str, str]: ... +def parse_http_list(s: str) -> list[str]: ... +def parse_keqv_list(l: list[str]) -> dict[str, str]: ... + +if sys.platform == "win32" or sys.platform == "darwin": + def proxy_bypass(host: str) -> Any: ... # undocumented + +else: + def proxy_bypass(host: str, proxies: Mapping[str, str] | None = None) -> Any: ... # undocumented + +class Request: + @property + def full_url(self) -> str: ... + @full_url.setter + def full_url(self, value: str) -> None: ... + @full_url.deleter + def full_url(self) -> None: ... + type: str + host: str + origin_req_host: str + selector: str + data: _DataType + headers: MutableMapping[str, str] + unredirected_hdrs: dict[str, str] + unverifiable: bool + method: str | None + timeout: float | None # Undocumented, only set after __init__() by OpenerDirector.open() + def __init__( + self, + url: str, + data: _DataType = None, + headers: MutableMapping[str, str] = {}, + origin_req_host: str | None = None, + unverifiable: bool = False, + method: str | None = None, + ) -> None: ... + def get_method(self) -> str: ... + def add_header(self, key: str, val: str) -> None: ... + def add_unredirected_header(self, key: str, val: str) -> None: ... + def has_header(self, header_name: str) -> bool: ... + def remove_header(self, header_name: str) -> None: ... + def get_full_url(self) -> str: ... + def set_proxy(self, host: str, type: str) -> None: ... + @overload + def get_header(self, header_name: str) -> str | None: ... + @overload + def get_header(self, header_name: str, default: _T) -> str | _T: ... + def header_items(self) -> list[tuple[str, str]]: ... + def has_proxy(self) -> bool: ... + +class OpenerDirector: + addheaders: list[tuple[str, str]] + def add_handler(self, handler: BaseHandler) -> None: ... + def open(self, fullurl: str | Request, data: _DataType = None, timeout: float | None = ...) -> _UrlopenRet: ... + def error(self, proto: str, *args: Any) -> _UrlopenRet: ... + def close(self) -> None: ... + +class BaseHandler: + handler_order: ClassVar[int] + parent: OpenerDirector + def add_parent(self, parent: OpenerDirector) -> None: ... + def close(self) -> None: ... + def __lt__(self, other: object) -> bool: ... + +class HTTPDefaultErrorHandler(BaseHandler): + def http_error_default( + self, req: Request, fp: IO[bytes], code: int, msg: str, hdrs: HTTPMessage + ) -> HTTPError: ... # undocumented + +class HTTPRedirectHandler(BaseHandler): + max_redirections: ClassVar[int] # undocumented + max_repeats: ClassVar[int] # undocumented + inf_msg: ClassVar[str] # undocumented + def redirect_request( + self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage, newurl: str + ) -> Request | None: ... + def http_error_301(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... + def http_error_302(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... + def http_error_303(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... + def http_error_307(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... + if sys.version_info >= (3, 11): + def http_error_308( + self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage + ) -> _UrlopenRet | None: ... + +class HTTPCookieProcessor(BaseHandler): + cookiejar: CookieJar + def __init__(self, cookiejar: CookieJar | None = None) -> None: ... + def http_request(self, request: Request) -> Request: ... # undocumented + def http_response(self, request: Request, response: HTTPResponse) -> HTTPResponse: ... # undocumented + def https_request(self, request: Request) -> Request: ... # undocumented + def https_response(self, request: Request, response: HTTPResponse) -> HTTPResponse: ... # undocumented + +class ProxyHandler(BaseHandler): + def __init__(self, proxies: dict[str, str] | None = None) -> None: ... + def proxy_open(self, req: Request, proxy: str, type: str) -> _UrlopenRet | None: ... # undocumented + # TODO: add a method for every (common) proxy protocol + +class HTTPPasswordMgr: + def add_password(self, realm: str, uri: str | Sequence[str], user: str, passwd: str) -> None: ... + def find_user_password(self, realm: str, authuri: str) -> tuple[str | None, str | None]: ... + def is_suburi(self, base: str, test: str) -> bool: ... # undocumented + def reduce_uri(self, uri: str, default_port: bool = True) -> tuple[str, str]: ... # undocumented + +class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): + def add_password(self, realm: str | None, uri: str | Sequence[str], user: str, passwd: str) -> None: ... + def find_user_password(self, realm: str | None, authuri: str) -> tuple[str | None, str | None]: ... + +class HTTPPasswordMgrWithPriorAuth(HTTPPasswordMgrWithDefaultRealm): + def add_password( + self, realm: str | None, uri: str | Sequence[str], user: str, passwd: str, is_authenticated: bool = False + ) -> None: ... + def update_authenticated(self, uri: str | Sequence[str], is_authenticated: bool = False) -> None: ... + def is_authenticated(self, authuri: str) -> bool | None: ... + +class AbstractBasicAuthHandler: + rx: ClassVar[Pattern[str]] # undocumented + passwd: HTTPPasswordMgr + add_password: Callable[[str, str | Sequence[str], str, str], None] + def __init__(self, password_mgr: HTTPPasswordMgr | None = None) -> None: ... + def http_error_auth_reqed(self, authreq: str, host: str, req: Request, headers: HTTPMessage) -> None: ... + def http_request(self, req: Request) -> Request: ... # undocumented + def http_response(self, req: Request, response: HTTPResponse) -> HTTPResponse: ... # undocumented + def https_request(self, req: Request) -> Request: ... # undocumented + def https_response(self, req: Request, response: HTTPResponse) -> HTTPResponse: ... # undocumented + def retry_http_basic_auth(self, host: str, req: Request, realm: str) -> _UrlopenRet | None: ... # undocumented + +class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): + auth_header: ClassVar[str] # undocumented + def http_error_401(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... + +class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): + auth_header: ClassVar[str] + def http_error_407(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... + +class AbstractDigestAuthHandler: + def __init__(self, passwd: HTTPPasswordMgr | None = None) -> None: ... + def reset_retry_count(self) -> None: ... + def http_error_auth_reqed(self, auth_header: str, host: str, req: Request, headers: HTTPMessage) -> None: ... + def retry_http_digest_auth(self, req: Request, auth: str) -> _UrlopenRet | None: ... + def get_cnonce(self, nonce: str) -> str: ... + def get_authorization(self, req: Request, chal: Mapping[str, str]) -> str | None: ... + def get_algorithm_impls(self, algorithm: str) -> tuple[Callable[[str], str], Callable[[str, str], str]]: ... + def get_entity_digest(self, data: ReadableBuffer | None, chal: Mapping[str, str]) -> str | None: ... + +class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): + auth_header: ClassVar[str] # undocumented + def http_error_401(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... + +class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): + auth_header: ClassVar[str] # undocumented + def http_error_407(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... + +@type_check_only +class _HTTPConnectionProtocol(Protocol): + def __call__( + self, + host: str, + /, + *, + port: int | None = ..., + timeout: float = ..., + source_address: tuple[str, int] | None = ..., + blocksize: int = ..., + ) -> HTTPConnection: ... + +class AbstractHTTPHandler(BaseHandler): # undocumented + if sys.version_info >= (3, 12): + def __init__(self, debuglevel: int | None = None) -> None: ... + else: + def __init__(self, debuglevel: int = 0) -> None: ... + + def set_http_debuglevel(self, level: int) -> None: ... + def do_request_(self, request: Request) -> Request: ... + def do_open(self, http_class: _HTTPConnectionProtocol, req: Request, **http_conn_args: Any) -> HTTPResponse: ... + +class HTTPHandler(AbstractHTTPHandler): + def http_open(self, req: Request) -> HTTPResponse: ... + def http_request(self, request: Request) -> Request: ... # undocumented + +class HTTPSHandler(AbstractHTTPHandler): + if sys.version_info >= (3, 12): + def __init__( + self, debuglevel: int | None = None, context: ssl.SSLContext | None = None, check_hostname: bool | None = None + ) -> None: ... + else: + def __init__( + self, debuglevel: int = 0, context: ssl.SSLContext | None = None, check_hostname: bool | None = None + ) -> None: ... + + def https_open(self, req: Request) -> HTTPResponse: ... + def https_request(self, request: Request) -> Request: ... # undocumented + +class FileHandler(BaseHandler): + names: ClassVar[tuple[str, ...] | None] # undocumented + def file_open(self, req: Request) -> addinfourl: ... + def get_names(self) -> tuple[str, ...]: ... # undocumented + def open_local_file(self, req: Request) -> addinfourl: ... # undocumented + +class DataHandler(BaseHandler): + def data_open(self, req: Request) -> addinfourl: ... + +class ftpwrapper: # undocumented + def __init__( + self, user: str, passwd: str, host: str, port: int, dirs: str, timeout: float | None = None, persistent: bool = True + ) -> None: ... + def close(self) -> None: ... + def endtransfer(self) -> None: ... + def file_close(self) -> None: ... + def init(self) -> None: ... + def real_close(self) -> None: ... + def retrfile(self, file: str, type: str) -> tuple[addclosehook, int | None]: ... + +class FTPHandler(BaseHandler): + def ftp_open(self, req: Request) -> addinfourl: ... + def connect_ftp( + self, user: str, passwd: str, host: str, port: int, dirs: str, timeout: float + ) -> ftpwrapper: ... # undocumented + +class CacheFTPHandler(FTPHandler): + def setTimeout(self, t: float) -> None: ... + def setMaxConns(self, m: int) -> None: ... + def check_cache(self) -> None: ... # undocumented + def clear_cache(self) -> None: ... # undocumented + +class UnknownHandler(BaseHandler): + def unknown_open(self, req: Request) -> NoReturn: ... + +class HTTPErrorProcessor(BaseHandler): + def http_response(self, request: Request, response: HTTPResponse) -> _UrlopenRet: ... + def https_response(self, request: Request, response: HTTPResponse) -> _UrlopenRet: ... + +def urlretrieve( + url: str, + filename: StrOrBytesPath | None = None, + reporthook: Callable[[int, int, int], object] | None = None, + data: _DataType = None, +) -> tuple[str, HTTPMessage]: ... +def urlcleanup() -> None: ... + +if sys.version_info < (3, 14): + @deprecated("Deprecated since Python 3.3; removed in Python 3.14. Use newer `urlopen` functions and methods.") + class URLopener: + version: ClassVar[str] + def __init__(self, proxies: dict[str, str] | None = None, **x509: str) -> None: ... + def open(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... + def open_unknown(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... + def retrieve( + self, + url: str, + filename: str | None = None, + reporthook: Callable[[int, int, int], object] | None = None, + data: ReadableBuffer | None = None, + ) -> tuple[str, Message | None]: ... + def addheader(self, *args: tuple[str, str]) -> None: ... # undocumented + def cleanup(self) -> None: ... # undocumented + def close(self) -> None: ... # undocumented + def http_error( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = None + ) -> _UrlopenRet: ... # undocumented + def http_error_default( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage + ) -> _UrlopenRet: ... # undocumented + def open_data(self, url: str, data: ReadableBuffer | None = None) -> addinfourl: ... # undocumented + def open_file(self, url: str) -> addinfourl: ... # undocumented + def open_ftp(self, url: str) -> addinfourl: ... # undocumented + def open_http(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... # undocumented + def open_https(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... # undocumented + def open_local_file(self, url: str) -> addinfourl: ... # undocumented + def open_unknown_proxy(self, proxy: str, fullurl: str, data: ReadableBuffer | None = None) -> None: ... # undocumented + def __del__(self) -> None: ... + + @deprecated("Deprecated since Python 3.3; removed in Python 3.14. Use newer `urlopen` functions and methods.") + class FancyURLopener(URLopener): + def prompt_user_passwd(self, host: str, realm: str) -> tuple[str, str]: ... + def get_user_passwd(self, host: str, realm: str, clear_cache: int = 0) -> tuple[str, str]: ... # undocumented + def http_error_301( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None + ) -> _UrlopenRet | addinfourl | None: ... # undocumented + def http_error_302( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None + ) -> _UrlopenRet | addinfourl | None: ... # undocumented + def http_error_303( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None + ) -> _UrlopenRet | addinfourl | None: ... # undocumented + def http_error_307( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None + ) -> _UrlopenRet | addinfourl | None: ... # undocumented + if sys.version_info >= (3, 11): + def http_error_308( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None + ) -> _UrlopenRet | addinfourl | None: ... # undocumented + + def http_error_401( + self, + url: str, + fp: IO[bytes], + errcode: int, + errmsg: str, + headers: HTTPMessage, + data: ReadableBuffer | None = None, + retry: bool = False, + ) -> _UrlopenRet | None: ... # undocumented + def http_error_407( + self, + url: str, + fp: IO[bytes], + errcode: int, + errmsg: str, + headers: HTTPMessage, + data: ReadableBuffer | None = None, + retry: bool = False, + ) -> _UrlopenRet | None: ... # undocumented + def http_error_default( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage + ) -> addinfourl: ... # undocumented + def redirect_internal( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None + ) -> _UrlopenRet | None: ... # undocumented + def retry_http_basic_auth( + self, url: str, realm: str, data: ReadableBuffer | None = None + ) -> _UrlopenRet | None: ... # undocumented + def retry_https_basic_auth( + self, url: str, realm: str, data: ReadableBuffer | None = None + ) -> _UrlopenRet | None: ... # undocumented + def retry_proxy_http_basic_auth( + self, url: str, realm: str, data: ReadableBuffer | None = None + ) -> _UrlopenRet | None: ... # undocumented + def retry_proxy_https_basic_auth( + self, url: str, realm: str, data: ReadableBuffer | None = None + ) -> _UrlopenRet | None: ... # undocumented diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/urllib/response.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/urllib/response.pyi new file mode 100644 index 0000000..65df9cd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/urllib/response.pyi @@ -0,0 +1,40 @@ +import tempfile +from _typeshed import ReadableBuffer +from collections.abc import Callable, Iterable +from email.message import Message +from types import TracebackType +from typing import IO, Any + +__all__ = ["addbase", "addclosehook", "addinfo", "addinfourl"] + +class addbase(tempfile._TemporaryFileWrapper[bytes]): + fp: IO[bytes] + def __init__(self, fp: IO[bytes]) -> None: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + # These methods don't actually exist, but the class inherits at runtime from + # tempfile._TemporaryFileWrapper, which uses __getattr__ to delegate to the + # underlying file object. To satisfy the BinaryIO interface, we pretend that this + # class has these additional methods. + def write(self, s: ReadableBuffer) -> int: ... + def writelines(self, lines: Iterable[ReadableBuffer]) -> None: ... + +class addclosehook(addbase): + closehook: Callable[..., object] + hookargs: tuple[Any, ...] + def __init__(self, fp: IO[bytes], closehook: Callable[..., object], *hookargs: Any) -> None: ... + +class addinfo(addbase): + headers: Message + def __init__(self, fp: IO[bytes], headers: Message) -> None: ... + def info(self) -> Message: ... + +class addinfourl(addinfo): + url: str + code: int | None + @property + def status(self) -> int | None: ... + def __init__(self, fp: IO[bytes], headers: Message, url: str, code: int | None = None) -> None: ... + def geturl(self) -> str: ... + def getcode(self) -> int | None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/urllib/robotparser.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/urllib/robotparser.pyi new file mode 100644 index 0000000..14ceef5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/urllib/robotparser.pyi @@ -0,0 +1,20 @@ +from collections.abc import Iterable +from typing import NamedTuple + +__all__ = ["RobotFileParser"] + +class RequestRate(NamedTuple): + requests: int + seconds: int + +class RobotFileParser: + def __init__(self, url: str = "") -> None: ... + def set_url(self, url: str) -> None: ... + def read(self) -> None: ... + def parse(self, lines: Iterable[str]) -> None: ... + def can_fetch(self, useragent: str, url: str) -> bool: ... + def mtime(self) -> int: ... + def modified(self) -> None: ... + def crawl_delay(self, useragent: str) -> str | None: ... + def request_rate(self, useragent: str) -> RequestRate | None: ... + def site_maps(self) -> list[str] | None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/uu.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/uu.pyi new file mode 100644 index 0000000..324053e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/uu.pyi @@ -0,0 +1,13 @@ +from typing import BinaryIO +from typing_extensions import TypeAlias + +__all__ = ["Error", "encode", "decode"] + +_File: TypeAlias = str | BinaryIO + +class Error(Exception): ... + +def encode( + in_file: _File, out_file: _File, name: str | None = None, mode: int | None = None, *, backtick: bool = False +) -> None: ... +def decode(in_file: _File, out_file: _File | None = None, mode: int | None = None, quiet: bool = False) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/uuid.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/uuid.pyi new file mode 100644 index 0000000..055f4de --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/uuid.pyi @@ -0,0 +1,106 @@ +import builtins +import sys +from _typeshed import Unused +from enum import Enum +from typing import Final, NoReturn +from typing_extensions import LiteralString, TypeAlias + +_FieldsType: TypeAlias = tuple[int, int, int, int, int, int] + +class SafeUUID(Enum): + safe = 0 + unsafe = -1 + unknown = None + +class UUID: + __slots__ = ("int", "is_safe", "__weakref__") + is_safe: Final[SafeUUID] + int: Final[builtins.int] + + def __init__( + self, + hex: str | None = None, + bytes: builtins.bytes | None = None, + bytes_le: builtins.bytes | None = None, + fields: _FieldsType | None = None, + int: builtins.int | None = None, + version: builtins.int | None = None, + *, + is_safe: SafeUUID = SafeUUID.unknown, + ) -> None: ... + @property + def bytes(self) -> builtins.bytes: ... + @property + def bytes_le(self) -> builtins.bytes: ... + @property + def clock_seq(self) -> builtins.int: ... + @property + def clock_seq_hi_variant(self) -> builtins.int: ... + @property + def clock_seq_low(self) -> builtins.int: ... + @property + def fields(self) -> _FieldsType: ... + @property + def hex(self) -> str: ... + @property + def node(self) -> builtins.int: ... + @property + def time(self) -> builtins.int: ... + @property + def time_hi_version(self) -> builtins.int: ... + @property + def time_low(self) -> builtins.int: ... + @property + def time_mid(self) -> builtins.int: ... + @property + def urn(self) -> str: ... + @property + def variant(self) -> str: ... + @property + def version(self) -> builtins.int | None: ... + def __int__(self) -> builtins.int: ... + def __eq__(self, other: object) -> bool: ... + def __lt__(self, other: UUID) -> bool: ... + def __le__(self, other: UUID) -> bool: ... + def __gt__(self, other: UUID) -> bool: ... + def __ge__(self, other: UUID) -> bool: ... + def __hash__(self) -> builtins.int: ... + def __setattr__(self, name: Unused, value: Unused) -> NoReturn: ... + +def getnode() -> int: ... +def uuid1(node: int | None = None, clock_seq: int | None = None) -> UUID: ... + +if sys.version_info >= (3, 14): + def uuid6(node: int | None = None, clock_seq: int | None = None) -> UUID: ... + def uuid7() -> UUID: ... + def uuid8(a: int | None = None, b: int | None = None, c: int | None = None) -> UUID: ... + +if sys.version_info >= (3, 12): + def uuid3(namespace: UUID, name: str | bytes) -> UUID: ... + +else: + def uuid3(namespace: UUID, name: str) -> UUID: ... + +def uuid4() -> UUID: ... + +if sys.version_info >= (3, 12): + def uuid5(namespace: UUID, name: str | bytes) -> UUID: ... + +else: + def uuid5(namespace: UUID, name: str) -> UUID: ... + +if sys.version_info >= (3, 14): + NIL: Final[UUID] + MAX: Final[UUID] + +NAMESPACE_DNS: Final[UUID] +NAMESPACE_URL: Final[UUID] +NAMESPACE_OID: Final[UUID] +NAMESPACE_X500: Final[UUID] +RESERVED_NCS: Final[LiteralString] +RFC_4122: Final[LiteralString] +RESERVED_MICROSOFT: Final[LiteralString] +RESERVED_FUTURE: Final[LiteralString] + +if sys.version_info >= (3, 12): + def main() -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/venv/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/venv/__init__.pyi new file mode 100644 index 0000000..14db885 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/venv/__init__.pyi @@ -0,0 +1,86 @@ +import logging +import sys +from _typeshed import StrOrBytesPath +from collections.abc import Iterable, Sequence +from types import SimpleNamespace +from typing import Final + +logger: logging.Logger + +CORE_VENV_DEPS: Final[tuple[str, ...]] + +class EnvBuilder: + system_site_packages: bool + clear: bool + symlinks: bool + upgrade: bool + with_pip: bool + prompt: str | None + + if sys.version_info >= (3, 13): + def __init__( + self, + system_site_packages: bool = False, + clear: bool = False, + symlinks: bool = False, + upgrade: bool = False, + with_pip: bool = False, + prompt: str | None = None, + upgrade_deps: bool = False, + *, + scm_ignore_files: Iterable[str] = ..., + ) -> None: ... + else: + def __init__( + self, + system_site_packages: bool = False, + clear: bool = False, + symlinks: bool = False, + upgrade: bool = False, + with_pip: bool = False, + prompt: str | None = None, + upgrade_deps: bool = False, + ) -> None: ... + + def create(self, env_dir: StrOrBytesPath) -> None: ... + def clear_directory(self, path: StrOrBytesPath) -> None: ... # undocumented + def ensure_directories(self, env_dir: StrOrBytesPath) -> SimpleNamespace: ... + def create_configuration(self, context: SimpleNamespace) -> None: ... + def symlink_or_copy( + self, src: StrOrBytesPath, dst: StrOrBytesPath, relative_symlinks_ok: bool = False + ) -> None: ... # undocumented + def setup_python(self, context: SimpleNamespace) -> None: ... + def _setup_pip(self, context: SimpleNamespace) -> None: ... # undocumented + def setup_scripts(self, context: SimpleNamespace) -> None: ... + def post_setup(self, context: SimpleNamespace) -> None: ... + def replace_variables(self, text: str, context: SimpleNamespace) -> str: ... # undocumented + def install_scripts(self, context: SimpleNamespace, path: str) -> None: ... + def upgrade_dependencies(self, context: SimpleNamespace) -> None: ... + if sys.version_info >= (3, 13): + def create_git_ignore_file(self, context: SimpleNamespace) -> None: ... + +if sys.version_info >= (3, 13): + def create( + env_dir: StrOrBytesPath, + system_site_packages: bool = False, + clear: bool = False, + symlinks: bool = False, + with_pip: bool = False, + prompt: str | None = None, + upgrade_deps: bool = False, + *, + scm_ignore_files: Iterable[str] = ..., + ) -> None: ... + +else: + def create( + env_dir: StrOrBytesPath, + system_site_packages: bool = False, + clear: bool = False, + symlinks: bool = False, + with_pip: bool = False, + prompt: str | None = None, + upgrade_deps: bool = False, + ) -> None: ... + +def main(args: Sequence[str] | None = None) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/warnings.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/warnings.pyi new file mode 100644 index 0000000..49c98cb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/warnings.pyi @@ -0,0 +1,126 @@ +import re +import sys +from _warnings import warn as warn, warn_explicit as warn_explicit +from collections.abc import Sequence +from types import ModuleType, TracebackType +from typing import Any, Generic, Literal, TextIO, overload +from typing_extensions import LiteralString, TypeAlias, TypeVar + +__all__ = [ + "warn", + "warn_explicit", + "showwarning", + "formatwarning", + "filterwarnings", + "simplefilter", + "resetwarnings", + "catch_warnings", +] + +if sys.version_info >= (3, 13): + __all__ += ["deprecated"] + +_T = TypeVar("_T") +_W_co = TypeVar("_W_co", bound=list[WarningMessage] | None, default=list[WarningMessage] | None, covariant=True) + +if sys.version_info >= (3, 14): + _ActionKind: TypeAlias = Literal["default", "error", "ignore", "always", "module", "once"] +else: + _ActionKind: TypeAlias = Literal["default", "error", "ignore", "always", "all", "module", "once"] +filters: Sequence[tuple[str, re.Pattern[str] | None, type[Warning], re.Pattern[str] | None, int]] # undocumented, do not mutate + +def showwarning( + message: Warning | str, + category: type[Warning], + filename: str, + lineno: int, + file: TextIO | None = None, + line: str | None = None, +) -> None: ... +def formatwarning( + message: Warning | str, category: type[Warning], filename: str, lineno: int, line: str | None = None +) -> str: ... +def filterwarnings( + action: _ActionKind, message: str = "", category: type[Warning] = ..., module: str = "", lineno: int = 0, append: bool = False +) -> None: ... +def simplefilter(action: _ActionKind, category: type[Warning] = ..., lineno: int = 0, append: bool = False) -> None: ... +def resetwarnings() -> None: ... + +class _OptionError(Exception): ... + +class WarningMessage: + message: Warning | str + category: type[Warning] + filename: str + lineno: int + file: TextIO | None + line: str | None + source: Any | None + def __init__( + self, + message: Warning | str, + category: type[Warning], + filename: str, + lineno: int, + file: TextIO | None = None, + line: str | None = None, + source: Any | None = None, + ) -> None: ... + +class catch_warnings(Generic[_W_co]): + if sys.version_info >= (3, 11): + @overload + def __init__( + self: catch_warnings[None], + *, + record: Literal[False] = False, + module: ModuleType | None = None, + action: _ActionKind | None = None, + category: type[Warning] = ..., + lineno: int = 0, + append: bool = False, + ) -> None: ... + @overload + def __init__( + self: catch_warnings[list[WarningMessage]], + *, + record: Literal[True], + module: ModuleType | None = None, + action: _ActionKind | None = None, + category: type[Warning] = ..., + lineno: int = 0, + append: bool = False, + ) -> None: ... + @overload + def __init__( + self, + *, + record: bool, + module: ModuleType | None = None, + action: _ActionKind | None = None, + category: type[Warning] = ..., + lineno: int = 0, + append: bool = False, + ) -> None: ... + else: + @overload + def __init__(self: catch_warnings[None], *, record: Literal[False] = False, module: ModuleType | None = None) -> None: ... + @overload + def __init__( + self: catch_warnings[list[WarningMessage]], *, record: Literal[True], module: ModuleType | None = None + ) -> None: ... + @overload + def __init__(self, *, record: bool, module: ModuleType | None = None) -> None: ... + + def __enter__(self) -> _W_co: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + +if sys.version_info >= (3, 13): + class deprecated: + message: LiteralString + category: type[Warning] | None + stacklevel: int + def __init__(self, message: LiteralString, /, *, category: type[Warning] | None = ..., stacklevel: int = 1) -> None: ... + def __call__(self, arg: _T, /) -> _T: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wave.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wave.pyi new file mode 100644 index 0000000..fd7dbfa --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wave.pyi @@ -0,0 +1,90 @@ +import sys +from _typeshed import ReadableBuffer, Unused +from typing import IO, Any, BinaryIO, Final, Literal, NamedTuple, NoReturn, overload +from typing_extensions import Self, TypeAlias, deprecated + +__all__ = ["open", "Error", "Wave_read", "Wave_write"] + +_File: TypeAlias = str | IO[bytes] + +class Error(Exception): ... + +WAVE_FORMAT_PCM: Final = 0x0001 + +class _wave_params(NamedTuple): + nchannels: int + sampwidth: int + framerate: int + nframes: int + comptype: str + compname: str + +class Wave_read: + def __init__(self, f: _File) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + def __del__(self) -> None: ... + def getfp(self) -> BinaryIO | None: ... + def rewind(self) -> None: ... + def close(self) -> None: ... + def tell(self) -> int: ... + def getnchannels(self) -> int: ... + def getnframes(self) -> int: ... + def getsampwidth(self) -> int: ... + def getframerate(self) -> int: ... + def getcomptype(self) -> str: ... + def getcompname(self) -> str: ... + def getparams(self) -> _wave_params: ... + if sys.version_info >= (3, 13): + @deprecated("Deprecated since Python 3.13; will be removed in Python 3.15.") + def getmarkers(self) -> None: ... + @deprecated("Deprecated since Python 3.13; will be removed in Python 3.15.") + def getmark(self, id: Any) -> NoReturn: ... + else: + def getmarkers(self) -> None: ... + def getmark(self, id: Any) -> NoReturn: ... + + def setpos(self, pos: int) -> None: ... + def readframes(self, nframes: int) -> bytes: ... + +class Wave_write: + def __init__(self, f: _File) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + def __del__(self) -> None: ... + def setnchannels(self, nchannels: int) -> None: ... + def getnchannels(self) -> int: ... + def setsampwidth(self, sampwidth: int) -> None: ... + def getsampwidth(self) -> int: ... + def setframerate(self, framerate: float) -> None: ... + def getframerate(self) -> int: ... + def setnframes(self, nframes: int) -> None: ... + def getnframes(self) -> int: ... + def setcomptype(self, comptype: str, compname: str) -> None: ... + def getcomptype(self) -> str: ... + def getcompname(self) -> str: ... + def setparams(self, params: _wave_params | tuple[int, int, int, int, str, str]) -> None: ... + def getparams(self) -> _wave_params: ... + if sys.version_info >= (3, 13): + @deprecated("Deprecated since Python 3.13; will be removed in Python 3.15.") + def setmark(self, id: Any, pos: Any, name: Any) -> NoReturn: ... + @deprecated("Deprecated since Python 3.13; will be removed in Python 3.15.") + def getmark(self, id: Any) -> NoReturn: ... + @deprecated("Deprecated since Python 3.13; will be removed in Python 3.15.") + def getmarkers(self) -> None: ... + else: + def setmark(self, id: Any, pos: Any, name: Any) -> NoReturn: ... + def getmark(self, id: Any) -> NoReturn: ... + def getmarkers(self) -> None: ... + + def tell(self) -> int: ... + def writeframesraw(self, data: ReadableBuffer) -> None: ... + def writeframes(self, data: ReadableBuffer) -> None: ... + def close(self) -> None: ... + +@overload +def open(f: _File, mode: Literal["r", "rb"]) -> Wave_read: ... +@overload +def open(f: _File, mode: Literal["w", "wb"]) -> Wave_write: ... +@overload +def open(f: _File, mode: str | None = None) -> Any: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/weakref.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/weakref.pyi new file mode 100644 index 0000000..76ab86b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/weakref.pyi @@ -0,0 +1,198 @@ +from _typeshed import SupportsKeysAndGetItem +from _weakref import getweakrefcount as getweakrefcount, getweakrefs as getweakrefs, proxy as proxy +from _weakrefset import WeakSet as WeakSet +from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping +from types import GenericAlias +from typing import Any, ClassVar, Generic, TypeVar, final, overload +from typing_extensions import ParamSpec, Self, disjoint_base + +__all__ = [ + "ref", + "proxy", + "getweakrefcount", + "getweakrefs", + "WeakKeyDictionary", + "ReferenceType", + "ProxyType", + "CallableProxyType", + "ProxyTypes", + "WeakValueDictionary", + "WeakSet", + "WeakMethod", + "finalize", +] + +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") +_CallableT = TypeVar("_CallableT", bound=Callable[..., Any]) +_P = ParamSpec("_P") + +ProxyTypes: tuple[type[Any], ...] + +# These classes are implemented in C and imported from _weakref at runtime. However, +# they consider themselves to live in the weakref module for sys.version_info >= (3, 11), +# so defining their stubs here means we match their __module__ value. +# Prior to 3.11 they did not declare a module for themselves and ended up looking like they +# came from the builtin module at runtime, which was just wrong, and we won't attempt to +# duplicate that. + +@final +class CallableProxyType(Generic[_CallableT]): # "weakcallableproxy" + def __eq__(self, value: object, /) -> bool: ... + def __getattr__(self, attr: str) -> Any: ... + __call__: _CallableT + __hash__: ClassVar[None] # type: ignore[assignment] + +@final +class ProxyType(Generic[_T]): # "weakproxy" + def __eq__(self, value: object, /) -> bool: ... + def __getattr__(self, attr: str) -> Any: ... + __hash__: ClassVar[None] # type: ignore[assignment] + +@disjoint_base +class ReferenceType(Generic[_T]): # "weakref" + __callback__: Callable[[Self], Any] + def __new__(cls, o: _T, callback: Callable[[Self], Any] | None = ..., /) -> Self: ... + def __call__(self) -> _T | None: ... + def __eq__(self, value: object, /) -> bool: ... + def __hash__(self) -> int: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + +ref = ReferenceType + +# everything below here is implemented in weakref.py + +class WeakMethod(ref[_CallableT]): + __slots__ = ("_func_ref", "_meth_type", "_alive", "__weakref__") + def __new__(cls, meth: _CallableT, callback: Callable[[Self], Any] | None = None) -> Self: ... + def __call__(self) -> _CallableT | None: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + +class WeakValueDictionary(MutableMapping[_KT, _VT]): + @overload + def __init__(self) -> None: ... + @overload + def __init__( + self: WeakValueDictionary[_KT, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780 + other: Mapping[_KT, _VT] | Iterable[tuple[_KT, _VT]], + /, + ) -> None: ... + @overload + def __init__( + self: WeakValueDictionary[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780 + other: Mapping[str, _VT] | Iterable[tuple[str, _VT]] = (), + /, + **kwargs: _VT, + ) -> None: ... + def __len__(self) -> int: ... + def __getitem__(self, key: _KT) -> _VT: ... + def __setitem__(self, key: _KT, value: _VT) -> None: ... + def __delitem__(self, key: _KT) -> None: ... + def __contains__(self, key: object) -> bool: ... + def __iter__(self) -> Iterator[_KT]: ... + def copy(self) -> WeakValueDictionary[_KT, _VT]: ... + __copy__ = copy + def __deepcopy__(self, memo: Any) -> Self: ... + @overload + def get(self, key: _KT, default: None = None) -> _VT | None: ... + @overload + def get(self, key: _KT, default: _VT) -> _VT: ... + @overload + def get(self, key: _KT, default: _T) -> _VT | _T: ... + # These are incompatible with Mapping + def keys(self) -> Iterator[_KT]: ... # type: ignore[override] + def values(self) -> Iterator[_VT]: ... # type: ignore[override] + def items(self) -> Iterator[tuple[_KT, _VT]]: ... # type: ignore[override] + def itervaluerefs(self) -> Iterator[KeyedRef[_KT, _VT]]: ... + def valuerefs(self) -> list[KeyedRef[_KT, _VT]]: ... + def setdefault(self, key: _KT, default: _VT) -> _VT: ... + @overload + def pop(self, key: _KT) -> _VT: ... + @overload + def pop(self, key: _KT, default: _VT) -> _VT: ... + @overload + def pop(self, key: _KT, default: _T) -> _VT | _T: ... + @overload + def update(self, other: SupportsKeysAndGetItem[_KT, _VT], /, **kwargs: _VT) -> None: ... + @overload + def update(self, other: Iterable[tuple[_KT, _VT]], /, **kwargs: _VT) -> None: ... + @overload + def update(self, other: None = None, /, **kwargs: _VT) -> None: ... + def __or__(self, other: Mapping[_T1, _T2]) -> WeakValueDictionary[_KT | _T1, _VT | _T2]: ... + def __ror__(self, other: Mapping[_T1, _T2]) -> WeakValueDictionary[_KT | _T1, _VT | _T2]: ... + # WeakValueDictionary.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + @overload + def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... + +class KeyedRef(ref[_T], Generic[_KT, _T]): + __slots__ = ("key",) + key: _KT + def __new__(type, ob: _T, callback: Callable[[Self], Any], key: _KT) -> Self: ... + def __init__(self, ob: _T, callback: Callable[[Self], Any], key: _KT) -> None: ... + +class WeakKeyDictionary(MutableMapping[_KT, _VT]): + @overload + def __init__(self, dict: None = None) -> None: ... + @overload + def __init__(self, dict: Mapping[_KT, _VT] | Iterable[tuple[_KT, _VT]]) -> None: ... + def __len__(self) -> int: ... + def __getitem__(self, key: _KT) -> _VT: ... + def __setitem__(self, key: _KT, value: _VT) -> None: ... + def __delitem__(self, key: _KT) -> None: ... + def __contains__(self, key: object) -> bool: ... + def __iter__(self) -> Iterator[_KT]: ... + def copy(self) -> WeakKeyDictionary[_KT, _VT]: ... + __copy__ = copy + def __deepcopy__(self, memo: Any) -> Self: ... + @overload + def get(self, key: _KT, default: None = None) -> _VT | None: ... + @overload + def get(self, key: _KT, default: _VT) -> _VT: ... + @overload + def get(self, key: _KT, default: _T) -> _VT | _T: ... + # These are incompatible with Mapping + def keys(self) -> Iterator[_KT]: ... # type: ignore[override] + def values(self) -> Iterator[_VT]: ... # type: ignore[override] + def items(self) -> Iterator[tuple[_KT, _VT]]: ... # type: ignore[override] + def keyrefs(self) -> list[ref[_KT]]: ... + # Keep WeakKeyDictionary.setdefault in line with MutableMapping.setdefault, modulo positional-only differences + @overload + def setdefault(self: WeakKeyDictionary[_KT, _VT | None], key: _KT, default: None = None) -> _VT: ... + @overload + def setdefault(self, key: _KT, default: _VT) -> _VT: ... + @overload + def pop(self, key: _KT) -> _VT: ... + @overload + def pop(self, key: _KT, default: _VT) -> _VT: ... + @overload + def pop(self, key: _KT, default: _T) -> _VT | _T: ... + @overload + def update(self, dict: SupportsKeysAndGetItem[_KT, _VT], /, **kwargs: _VT) -> None: ... + @overload + def update(self, dict: Iterable[tuple[_KT, _VT]], /, **kwargs: _VT) -> None: ... + @overload + def update(self, dict: None = None, /, **kwargs: _VT) -> None: ... + def __or__(self, other: Mapping[_T1, _T2]) -> WeakKeyDictionary[_KT | _T1, _VT | _T2]: ... + def __ror__(self, other: Mapping[_T1, _T2]) -> WeakKeyDictionary[_KT | _T1, _VT | _T2]: ... + # WeakKeyDictionary.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + @overload + def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... + +class finalize(Generic[_P, _T]): + __slots__ = () + def __init__(self, obj: _T, func: Callable[_P, Any], /, *args: _P.args, **kwargs: _P.kwargs) -> None: ... + def __call__(self, _: Any = None) -> Any | None: ... + def detach(self) -> tuple[_T, Callable[_P, Any], tuple[Any, ...], dict[str, Any]] | None: ... + def peek(self) -> tuple[_T, Callable[_P, Any], tuple[Any, ...], dict[str, Any]] | None: ... + @property + def alive(self) -> bool: ... + atexit: bool diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/webbrowser.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/webbrowser.pyi new file mode 100644 index 0000000..56c30f8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/webbrowser.pyi @@ -0,0 +1,84 @@ +import sys +from abc import abstractmethod +from collections.abc import Callable, Sequence +from typing import Literal +from typing_extensions import deprecated + +__all__ = ["Error", "open", "open_new", "open_new_tab", "get", "register"] + +class Error(Exception): ... + +def register( + name: str, klass: Callable[[], BaseBrowser] | None, instance: BaseBrowser | None = None, *, preferred: bool = False +) -> None: ... +def get(using: str | None = None) -> BaseBrowser: ... +def open(url: str, new: int = 0, autoraise: bool = True) -> bool: ... +def open_new(url: str) -> bool: ... +def open_new_tab(url: str) -> bool: ... + +class BaseBrowser: + args: list[str] + name: str + basename: str + def __init__(self, name: str = "") -> None: ... + @abstractmethod + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... + def open_new(self, url: str) -> bool: ... + def open_new_tab(self, url: str) -> bool: ... + +class GenericBrowser(BaseBrowser): + def __init__(self, name: str | Sequence[str]) -> None: ... + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... + +class BackgroundBrowser(GenericBrowser): ... + +class UnixBrowser(BaseBrowser): + def open(self, url: str, new: Literal[0, 1, 2] = 0, autoraise: bool = True) -> bool: ... # type: ignore[override] + raise_opts: list[str] | None + background: bool + redirect_stdout: bool + remote_args: list[str] + remote_action: str + remote_action_newwin: str + remote_action_newtab: str + +class Mozilla(UnixBrowser): ... + +if sys.version_info < (3, 12): + class Galeon(UnixBrowser): + raise_opts: list[str] + + class Grail(BaseBrowser): + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... + +class Chrome(UnixBrowser): ... +class Opera(UnixBrowser): ... +class Elinks(UnixBrowser): ... + +class Konqueror(BaseBrowser): + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... + +if sys.platform == "win32": + class WindowsDefault(BaseBrowser): + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... + +if sys.platform == "darwin": + if sys.version_info < (3, 13): + if sys.version_info >= (3, 11): + @deprecated("Deprecated since Python 3.11; removed in Python 3.13.") + class MacOSX(BaseBrowser): + def __init__(self, name: str) -> None: ... + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... + + else: + class MacOSX(BaseBrowser): + def __init__(self, name: str) -> None: ... + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... + + class MacOSXOSAScript(BaseBrowser): # In runtime this class does not have `name` and `basename` + if sys.version_info >= (3, 11): + def __init__(self, name: str = "default") -> None: ... + else: + def __init__(self, name: str) -> None: ... + + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/winreg.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/winreg.pyi new file mode 100644 index 0000000..a654bbc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/winreg.pyi @@ -0,0 +1,132 @@ +import sys +from _typeshed import ReadableBuffer, Unused +from types import TracebackType +from typing import Any, Final, Literal, final, overload +from typing_extensions import Self, TypeAlias + +if sys.platform == "win32": + _KeyType: TypeAlias = HKEYType | int + def CloseKey(hkey: _KeyType, /) -> None: ... + def ConnectRegistry(computer_name: str | None, key: _KeyType, /) -> HKEYType: ... + def CreateKey(key: _KeyType, sub_key: str | None, /) -> HKEYType: ... + def CreateKeyEx(key: _KeyType, sub_key: str | None, reserved: int = 0, access: int = 131078) -> HKEYType: ... + def DeleteKey(key: _KeyType, sub_key: str, /) -> None: ... + def DeleteKeyEx(key: _KeyType, sub_key: str, access: int = 256, reserved: int = 0) -> None: ... + def DeleteValue(key: _KeyType, value: str, /) -> None: ... + def EnumKey(key: _KeyType, index: int, /) -> str: ... + def EnumValue(key: _KeyType, index: int, /) -> tuple[str, Any, int]: ... + def ExpandEnvironmentStrings(string: str, /) -> str: ... + def FlushKey(key: _KeyType, /) -> None: ... + def LoadKey(key: _KeyType, sub_key: str, file_name: str, /) -> None: ... + def OpenKey(key: _KeyType, sub_key: str | None, reserved: int = 0, access: int = 131097) -> HKEYType: ... + def OpenKeyEx(key: _KeyType, sub_key: str | None, reserved: int = 0, access: int = 131097) -> HKEYType: ... + def QueryInfoKey(key: _KeyType, /) -> tuple[int, int, int]: ... + def QueryValue(key: _KeyType, sub_key: str | None, /) -> str: ... + def QueryValueEx(key: _KeyType, name: str, /) -> tuple[Any, int]: ... + def SaveKey(key: _KeyType, file_name: str, /) -> None: ... + def SetValue(key: _KeyType, sub_key: str | None, type: int, value: str, /) -> None: ... + @overload # type=REG_DWORD|REG_QWORD + def SetValueEx( + key: _KeyType, value_name: str | None, reserved: Unused, type: Literal[4, 5], value: int | None, / + ) -> None: ... + @overload # type=REG_SZ|REG_EXPAND_SZ + def SetValueEx( + key: _KeyType, value_name: str | None, reserved: Unused, type: Literal[1, 2], value: str | None, / + ) -> None: ... + @overload # type=REG_MULTI_SZ + def SetValueEx( + key: _KeyType, value_name: str | None, reserved: Unused, type: Literal[7], value: list[str] | None, / + ) -> None: ... + @overload # type=REG_BINARY and everything else + def SetValueEx( + key: _KeyType, + value_name: str | None, + reserved: Unused, + type: Literal[0, 3, 8, 9, 10, 11], + value: ReadableBuffer | None, + /, + ) -> None: ... + @overload # Unknown or undocumented + def SetValueEx( + key: _KeyType, + value_name: str | None, + reserved: Unused, + type: int, + value: int | str | list[str] | ReadableBuffer | None, + /, + ) -> None: ... + def DisableReflectionKey(key: _KeyType, /) -> None: ... + def EnableReflectionKey(key: _KeyType, /) -> None: ... + def QueryReflectionKey(key: _KeyType, /) -> bool: ... + + HKEY_CLASSES_ROOT: Final[int] + HKEY_CURRENT_USER: Final[int] + HKEY_LOCAL_MACHINE: Final[int] + HKEY_USERS: Final[int] + HKEY_PERFORMANCE_DATA: Final[int] + HKEY_CURRENT_CONFIG: Final[int] + HKEY_DYN_DATA: Final[int] + + KEY_ALL_ACCESS: Final = 983103 + KEY_WRITE: Final = 131078 + KEY_READ: Final = 131097 + KEY_EXECUTE: Final = 131097 + KEY_QUERY_VALUE: Final = 1 + KEY_SET_VALUE: Final = 2 + KEY_CREATE_SUB_KEY: Final = 4 + KEY_ENUMERATE_SUB_KEYS: Final = 8 + KEY_NOTIFY: Final = 16 + KEY_CREATE_LINK: Final = 32 + + KEY_WOW64_64KEY: Final = 256 + KEY_WOW64_32KEY: Final = 512 + + REG_BINARY: Final = 3 + REG_DWORD: Final = 4 + REG_DWORD_LITTLE_ENDIAN: Final = 4 + REG_DWORD_BIG_ENDIAN: Final = 5 + REG_EXPAND_SZ: Final = 2 + REG_LINK: Final = 6 + REG_MULTI_SZ: Final = 7 + REG_NONE: Final = 0 + REG_QWORD: Final = 11 + REG_QWORD_LITTLE_ENDIAN: Final = 11 + REG_RESOURCE_LIST: Final = 8 + REG_FULL_RESOURCE_DESCRIPTOR: Final = 9 + REG_RESOURCE_REQUIREMENTS_LIST: Final = 10 + REG_SZ: Final = 1 + + REG_CREATED_NEW_KEY: Final = 1 # undocumented + REG_LEGAL_CHANGE_FILTER: Final = 268435471 # undocumented + REG_LEGAL_OPTION: Final = 31 # undocumented + REG_NOTIFY_CHANGE_ATTRIBUTES: Final = 2 # undocumented + REG_NOTIFY_CHANGE_LAST_SET: Final = 4 # undocumented + REG_NOTIFY_CHANGE_NAME: Final = 1 # undocumented + REG_NOTIFY_CHANGE_SECURITY: Final = 8 # undocumented + REG_NO_LAZY_FLUSH: Final = 4 # undocumented + REG_OPENED_EXISTING_KEY: Final = 2 # undocumented + REG_OPTION_BACKUP_RESTORE: Final = 4 # undocumented + REG_OPTION_CREATE_LINK: Final = 2 # undocumented + REG_OPTION_NON_VOLATILE: Final = 0 # undocumented + REG_OPTION_OPEN_LINK: Final = 8 # undocumented + REG_OPTION_RESERVED: Final = 0 # undocumented + REG_OPTION_VOLATILE: Final = 1 # undocumented + REG_REFRESH_HIVE: Final = 2 # undocumented + REG_WHOLE_HIVE_VOLATILE: Final = 1 # undocumented + + error = OSError + + # Though this class has a __name__ of PyHKEY, it's exposed as HKEYType for some reason + @final + class HKEYType: + def __bool__(self) -> bool: ... + def __int__(self) -> int: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / + ) -> bool | None: ... + def Close(self) -> None: ... + def Detach(self) -> int: ... + def __hash__(self) -> int: ... + @property + def handle(self) -> int: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/winsound.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/winsound.pyi new file mode 100644 index 0000000..39dfa7b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/winsound.pyi @@ -0,0 +1,38 @@ +import sys +from _typeshed import ReadableBuffer +from typing import Final, Literal, overload + +if sys.platform == "win32": + SND_APPLICATION: Final = 128 + SND_FILENAME: Final = 131072 + SND_ALIAS: Final = 65536 + SND_LOOP: Final = 8 + SND_MEMORY: Final = 4 + SND_PURGE: Final = 64 + SND_ASYNC: Final = 1 + SND_NODEFAULT: Final = 2 + SND_NOSTOP: Final = 16 + SND_NOWAIT: Final = 8192 + if sys.version_info >= (3, 14): + SND_SENTRY: Final = 524288 + SND_SYNC: Final = 0 + SND_SYSTEM: Final = 2097152 + + MB_ICONASTERISK: Final = 64 + MB_ICONEXCLAMATION: Final = 48 + MB_ICONHAND: Final = 16 + MB_ICONQUESTION: Final = 32 + MB_OK: Final = 0 + if sys.version_info >= (3, 14): + MB_ICONERROR: Final = 16 + MB_ICONINFORMATION: Final = 64 + MB_ICONSTOP: Final = 16 + MB_ICONWARNING: Final = 48 + + def Beep(frequency: int, duration: int) -> None: ... + # Can actually accept anything ORed with 4, and if not it's definitely str, but that's inexpressible + @overload + def PlaySound(sound: ReadableBuffer | None, flags: Literal[4]) -> None: ... + @overload + def PlaySound(sound: str | ReadableBuffer | None, flags: int) -> None: ... + def MessageBeep(type: int = 0) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wsgiref/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wsgiref/__init__.pyi new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wsgiref/handlers.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wsgiref/handlers.pyi new file mode 100644 index 0000000..ebead54 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wsgiref/handlers.pyi @@ -0,0 +1,91 @@ +from _typeshed import OptExcInfo +from _typeshed.wsgi import ErrorStream, InputStream, StartResponse, WSGIApplication, WSGIEnvironment +from abc import abstractmethod +from collections.abc import Callable, MutableMapping +from typing import IO + +from .headers import Headers +from .util import FileWrapper + +__all__ = ["BaseHandler", "SimpleHandler", "BaseCGIHandler", "CGIHandler", "IISCGIHandler", "read_environ"] + +def format_date_time(timestamp: float | None) -> str: ... # undocumented +def read_environ() -> dict[str, str]: ... + +class BaseHandler: + wsgi_version: tuple[int, int] # undocumented + wsgi_multithread: bool + wsgi_multiprocess: bool + wsgi_run_once: bool + + origin_server: bool + http_version: str + server_software: str | None + + os_environ: MutableMapping[str, str] + + wsgi_file_wrapper: type[FileWrapper] | None + headers_class: type[Headers] # undocumented + + traceback_limit: int | None + error_status: str + error_headers: list[tuple[str, str]] + error_body: bytes + def run(self, application: WSGIApplication) -> None: ... + def setup_environ(self) -> None: ... + def finish_response(self) -> None: ... + def get_scheme(self) -> str: ... + def set_content_length(self) -> None: ... + def cleanup_headers(self) -> None: ... + def start_response( + self, status: str, headers: list[tuple[str, str]], exc_info: OptExcInfo | None = None + ) -> Callable[[bytes], None]: ... + def send_preamble(self) -> None: ... + def write(self, data: bytes) -> None: ... + def sendfile(self) -> bool: ... + def finish_content(self) -> None: ... + def close(self) -> None: ... + def send_headers(self) -> None: ... + def result_is_file(self) -> bool: ... + def client_is_modern(self) -> bool: ... + def log_exception(self, exc_info: OptExcInfo) -> None: ... + def handle_error(self) -> None: ... + def error_output(self, environ: WSGIEnvironment, start_response: StartResponse) -> list[bytes]: ... + @abstractmethod + def _write(self, data: bytes) -> None: ... + @abstractmethod + def _flush(self) -> None: ... + @abstractmethod + def get_stdin(self) -> InputStream: ... + @abstractmethod + def get_stderr(self) -> ErrorStream: ... + @abstractmethod + def add_cgi_vars(self) -> None: ... + +class SimpleHandler(BaseHandler): + stdin: InputStream + stdout: IO[bytes] + stderr: ErrorStream + base_env: MutableMapping[str, str] + def __init__( + self, + stdin: InputStream, + stdout: IO[bytes], + stderr: ErrorStream, + environ: MutableMapping[str, str], + multithread: bool = True, + multiprocess: bool = False, + ) -> None: ... + def get_stdin(self) -> InputStream: ... + def get_stderr(self) -> ErrorStream: ... + def add_cgi_vars(self) -> None: ... + def _write(self, data: bytes) -> None: ... + def _flush(self) -> None: ... + +class BaseCGIHandler(SimpleHandler): ... + +class CGIHandler(BaseCGIHandler): + def __init__(self) -> None: ... + +class IISCGIHandler(BaseCGIHandler): + def __init__(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wsgiref/headers.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wsgiref/headers.pyi new file mode 100644 index 0000000..9febad4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wsgiref/headers.pyi @@ -0,0 +1,26 @@ +from re import Pattern +from typing import Final, overload +from typing_extensions import TypeAlias + +_HeaderList: TypeAlias = list[tuple[str, str]] + +tspecials: Final[Pattern[str]] # undocumented + +class Headers: + def __init__(self, headers: _HeaderList | None = None) -> None: ... + def __len__(self) -> int: ... + def __setitem__(self, name: str, val: str) -> None: ... + def __delitem__(self, name: str) -> None: ... + def __getitem__(self, name: str) -> str | None: ... + def __contains__(self, name: str) -> bool: ... + def get_all(self, name: str) -> list[str]: ... + @overload + def get(self, name: str, default: str) -> str: ... + @overload + def get(self, name: str, default: str | None = None) -> str | None: ... + def keys(self) -> list[str]: ... + def values(self) -> list[str]: ... + def items(self) -> _HeaderList: ... + def __bytes__(self) -> bytes: ... + def setdefault(self, name: str, value: str) -> str: ... + def add_header(self, _name: str, _value: str | None, **_params: str | None) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wsgiref/simple_server.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wsgiref/simple_server.pyi new file mode 100644 index 0000000..bdf5871 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wsgiref/simple_server.pyi @@ -0,0 +1,37 @@ +from _typeshed.wsgi import ErrorStream, StartResponse, WSGIApplication, WSGIEnvironment +from http.server import BaseHTTPRequestHandler, HTTPServer +from typing import Final, TypeVar, overload + +from .handlers import SimpleHandler + +__all__ = ["WSGIServer", "WSGIRequestHandler", "demo_app", "make_server"] + +server_version: Final[str] # undocumented +sys_version: Final[str] # undocumented +software_version: Final[str] # undocumented + +class ServerHandler(SimpleHandler): # undocumented + server_software: str + +class WSGIServer(HTTPServer): + application: WSGIApplication | None + base_environ: WSGIEnvironment # only available after call to setup_environ() + def setup_environ(self) -> None: ... + def get_app(self) -> WSGIApplication | None: ... + def set_app(self, application: WSGIApplication | None) -> None: ... + +class WSGIRequestHandler(BaseHTTPRequestHandler): + server_version: str + def get_environ(self) -> WSGIEnvironment: ... + def get_stderr(self) -> ErrorStream: ... + +def demo_app(environ: WSGIEnvironment, start_response: StartResponse) -> list[bytes]: ... + +_S = TypeVar("_S", bound=WSGIServer) + +@overload +def make_server(host: str, port: int, app: WSGIApplication, *, handler_class: type[WSGIRequestHandler] = ...) -> WSGIServer: ... +@overload +def make_server( + host: str, port: int, app: WSGIApplication, server_class: type[_S], handler_class: type[WSGIRequestHandler] = ... +) -> _S: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wsgiref/types.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wsgiref/types.pyi new file mode 100644 index 0000000..57276fd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wsgiref/types.pyi @@ -0,0 +1,32 @@ +from _typeshed import OptExcInfo +from collections.abc import Callable, Iterable, Iterator +from typing import Any, Protocol +from typing_extensions import TypeAlias + +__all__ = ["StartResponse", "WSGIEnvironment", "WSGIApplication", "InputStream", "ErrorStream", "FileWrapper"] + +class StartResponse(Protocol): + def __call__( + self, status: str, headers: list[tuple[str, str]], exc_info: OptExcInfo | None = ..., / + ) -> Callable[[bytes], object]: ... + +WSGIEnvironment: TypeAlias = dict[str, Any] +WSGIApplication: TypeAlias = Callable[[WSGIEnvironment, StartResponse], Iterable[bytes]] + +class InputStream(Protocol): + def read(self, size: int = ..., /) -> bytes: ... + def readline(self, size: int = ..., /) -> bytes: ... + def readlines(self, hint: int = ..., /) -> list[bytes]: ... + def __iter__(self) -> Iterator[bytes]: ... + +class ErrorStream(Protocol): + def flush(self) -> object: ... + def write(self, s: str, /) -> object: ... + def writelines(self, seq: list[str], /) -> object: ... + +class _Readable(Protocol): + def read(self, size: int = ..., /) -> bytes: ... + # Optional: def close(self) -> object: ... + +class FileWrapper(Protocol): + def __call__(self, file: _Readable, block_size: int = ..., /) -> Iterable[bytes]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wsgiref/util.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wsgiref/util.pyi new file mode 100644 index 0000000..3966e17 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wsgiref/util.pyi @@ -0,0 +1,26 @@ +import sys +from _typeshed.wsgi import WSGIEnvironment +from collections.abc import Callable +from typing import IO, Any + +__all__ = ["FileWrapper", "guess_scheme", "application_uri", "request_uri", "shift_path_info", "setup_testing_defaults"] +if sys.version_info >= (3, 13): + __all__ += ["is_hop_by_hop"] + +class FileWrapper: + filelike: IO[bytes] + blksize: int + close: Callable[[], None] # only exists if filelike.close exists + def __init__(self, filelike: IO[bytes], blksize: int = 8192) -> None: ... + if sys.version_info < (3, 11): + def __getitem__(self, key: Any) -> bytes: ... + + def __iter__(self) -> FileWrapper: ... + def __next__(self) -> bytes: ... + +def guess_scheme(environ: WSGIEnvironment) -> str: ... +def application_uri(environ: WSGIEnvironment) -> str: ... +def request_uri(environ: WSGIEnvironment, include_query: bool = True) -> str: ... +def shift_path_info(environ: WSGIEnvironment) -> str | None: ... +def setup_testing_defaults(environ: WSGIEnvironment) -> None: ... +def is_hop_by_hop(header_name: str) -> bool: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wsgiref/validate.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wsgiref/validate.pyi new file mode 100644 index 0000000..fa8a6bb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/wsgiref/validate.pyi @@ -0,0 +1,50 @@ +from _typeshed.wsgi import ErrorStream, InputStream, WSGIApplication +from collections.abc import Callable, Iterable, Iterator +from typing import Any, NoReturn +from typing_extensions import TypeAlias + +__all__ = ["validator"] + +class WSGIWarning(Warning): ... + +def validator(application: WSGIApplication) -> WSGIApplication: ... + +class InputWrapper: + input: InputStream + def __init__(self, wsgi_input: InputStream) -> None: ... + def read(self, size: int) -> bytes: ... + def readline(self, size: int = ...) -> bytes: ... + def readlines(self, hint: int = ...) -> bytes: ... + def __iter__(self) -> Iterator[bytes]: ... + def close(self) -> NoReturn: ... + +class ErrorWrapper: + errors: ErrorStream + def __init__(self, wsgi_errors: ErrorStream) -> None: ... + def write(self, s: str) -> None: ... + def flush(self) -> None: ... + def writelines(self, seq: Iterable[str]) -> None: ... + def close(self) -> NoReturn: ... + +_WriterCallback: TypeAlias = Callable[[bytes], Any] + +class WriteWrapper: + writer: _WriterCallback + def __init__(self, wsgi_writer: _WriterCallback) -> None: ... + def __call__(self, s: bytes) -> None: ... + +class PartialIteratorWrapper: + iterator: Iterator[bytes] + def __init__(self, wsgi_iterator: Iterator[bytes]) -> None: ... + def __iter__(self) -> IteratorWrapper: ... + +class IteratorWrapper: + original_iterator: Iterator[bytes] + iterator: Iterator[bytes] + closed: bool + check_start_response: bool | None + def __init__(self, wsgi_iterator: Iterator[bytes], check_start_response: bool | None) -> None: ... + def __iter__(self) -> IteratorWrapper: ... + def __next__(self) -> bytes: ... + def close(self) -> None: ... + def __del__(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xdrlib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xdrlib.pyi new file mode 100644 index 0000000..78f3ece --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xdrlib.pyi @@ -0,0 +1,57 @@ +from collections.abc import Callable, Sequence +from typing import TypeVar + +__all__ = ["Error", "Packer", "Unpacker", "ConversionError"] + +_T = TypeVar("_T") + +class Error(Exception): + msg: str + def __init__(self, msg: str) -> None: ... + +class ConversionError(Error): ... + +class Packer: + def reset(self) -> None: ... + def get_buffer(self) -> bytes: ... + def get_buf(self) -> bytes: ... + def pack_uint(self, x: int) -> None: ... + def pack_int(self, x: int) -> None: ... + def pack_enum(self, x: int) -> None: ... + def pack_bool(self, x: bool) -> None: ... + def pack_uhyper(self, x: int) -> None: ... + def pack_hyper(self, x: int) -> None: ... + def pack_float(self, x: float) -> None: ... + def pack_double(self, x: float) -> None: ... + def pack_fstring(self, n: int, s: bytes) -> None: ... + def pack_fopaque(self, n: int, s: bytes) -> None: ... + def pack_string(self, s: bytes) -> None: ... + def pack_opaque(self, s: bytes) -> None: ... + def pack_bytes(self, s: bytes) -> None: ... + def pack_list(self, list: Sequence[_T], pack_item: Callable[[_T], object]) -> None: ... + def pack_farray(self, n: int, list: Sequence[_T], pack_item: Callable[[_T], object]) -> None: ... + def pack_array(self, list: Sequence[_T], pack_item: Callable[[_T], object]) -> None: ... + +class Unpacker: + def __init__(self, data: bytes) -> None: ... + def reset(self, data: bytes) -> None: ... + def get_position(self) -> int: ... + def set_position(self, position: int) -> None: ... + def get_buffer(self) -> bytes: ... + def done(self) -> None: ... + def unpack_uint(self) -> int: ... + def unpack_int(self) -> int: ... + def unpack_enum(self) -> int: ... + def unpack_bool(self) -> bool: ... + def unpack_uhyper(self) -> int: ... + def unpack_hyper(self) -> int: ... + def unpack_float(self) -> float: ... + def unpack_double(self) -> float: ... + def unpack_fstring(self, n: int) -> bytes: ... + def unpack_fopaque(self, n: int) -> bytes: ... + def unpack_string(self) -> bytes: ... + def unpack_opaque(self) -> bytes: ... + def unpack_bytes(self) -> bytes: ... + def unpack_list(self, unpack_item: Callable[[], _T]) -> list[_T]: ... + def unpack_farray(self, n: int, unpack_item: Callable[[], _T]) -> list[_T]: ... + def unpack_array(self, unpack_item: Callable[[], _T]) -> list[_T]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/__init__.pyi new file mode 100644 index 0000000..7a24096 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/__init__.pyi @@ -0,0 +1,3 @@ +# At runtime, listing submodules in __all__ without them being imported is +# valid, and causes them to be included in a star import. See #6523 +__all__ = ["dom", "parsers", "sax", "etree"] # noqa: F822 # pyright: ignore[reportUnsupportedDunderAll] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/NodeFilter.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/NodeFilter.pyi new file mode 100644 index 0000000..7b30137 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/NodeFilter.pyi @@ -0,0 +1,22 @@ +from typing import Final +from xml.dom.minidom import Node + +class NodeFilter: + FILTER_ACCEPT: Final = 1 + FILTER_REJECT: Final = 2 + FILTER_SKIP: Final = 3 + + SHOW_ALL: Final = 0xFFFFFFFF + SHOW_ELEMENT: Final = 0x00000001 + SHOW_ATTRIBUTE: Final = 0x00000002 + SHOW_TEXT: Final = 0x00000004 + SHOW_CDATA_SECTION: Final = 0x00000008 + SHOW_ENTITY_REFERENCE: Final = 0x00000010 + SHOW_ENTITY: Final = 0x00000020 + SHOW_PROCESSING_INSTRUCTION: Final = 0x00000040 + SHOW_COMMENT: Final = 0x00000080 + SHOW_DOCUMENT: Final = 0x00000100 + SHOW_DOCUMENT_TYPE: Final = 0x00000200 + SHOW_DOCUMENT_FRAGMENT: Final = 0x00000400 + SHOW_NOTATION: Final = 0x00000800 + def acceptNode(self, node: Node) -> int: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/__init__.pyi new file mode 100644 index 0000000..5dbb6c5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/__init__.pyi @@ -0,0 +1,101 @@ +from typing import Any, Final, Literal + +from .domreg import getDOMImplementation as getDOMImplementation, registerDOMImplementation as registerDOMImplementation + +class Node: + __slots__ = () + ELEMENT_NODE: Final = 1 + ATTRIBUTE_NODE: Final = 2 + TEXT_NODE: Final = 3 + CDATA_SECTION_NODE: Final = 4 + ENTITY_REFERENCE_NODE: Final = 5 + ENTITY_NODE: Final = 6 + PROCESSING_INSTRUCTION_NODE: Final = 7 + COMMENT_NODE: Final = 8 + DOCUMENT_NODE: Final = 9 + DOCUMENT_TYPE_NODE: Final = 10 + DOCUMENT_FRAGMENT_NODE: Final = 11 + NOTATION_NODE: Final = 12 + +# ExceptionCode +INDEX_SIZE_ERR: Final = 1 +DOMSTRING_SIZE_ERR: Final = 2 +HIERARCHY_REQUEST_ERR: Final = 3 +WRONG_DOCUMENT_ERR: Final = 4 +INVALID_CHARACTER_ERR: Final = 5 +NO_DATA_ALLOWED_ERR: Final = 6 +NO_MODIFICATION_ALLOWED_ERR: Final = 7 +NOT_FOUND_ERR: Final = 8 +NOT_SUPPORTED_ERR: Final = 9 +INUSE_ATTRIBUTE_ERR: Final = 10 +INVALID_STATE_ERR: Final = 11 +SYNTAX_ERR: Final = 12 +INVALID_MODIFICATION_ERR: Final = 13 +NAMESPACE_ERR: Final = 14 +INVALID_ACCESS_ERR: Final = 15 +VALIDATION_ERR: Final = 16 + +class DOMException(Exception): + code: int + def __init__(self, *args: Any, **kw: Any) -> None: ... + def _get_code(self) -> int: ... + +class IndexSizeErr(DOMException): + code: Literal[1] + +class DomstringSizeErr(DOMException): + code: Literal[2] + +class HierarchyRequestErr(DOMException): + code: Literal[3] + +class WrongDocumentErr(DOMException): + code: Literal[4] + +class InvalidCharacterErr(DOMException): + code: Literal[5] + +class NoDataAllowedErr(DOMException): + code: Literal[6] + +class NoModificationAllowedErr(DOMException): + code: Literal[7] + +class NotFoundErr(DOMException): + code: Literal[8] + +class NotSupportedErr(DOMException): + code: Literal[9] + +class InuseAttributeErr(DOMException): + code: Literal[10] + +class InvalidStateErr(DOMException): + code: Literal[11] + +class SyntaxErr(DOMException): + code: Literal[12] + +class InvalidModificationErr(DOMException): + code: Literal[13] + +class NamespaceErr(DOMException): + code: Literal[14] + +class InvalidAccessErr(DOMException): + code: Literal[15] + +class ValidationErr(DOMException): + code: Literal[16] + +class UserDataHandler: + NODE_CLONED: Final = 1 + NODE_IMPORTED: Final = 2 + NODE_DELETED: Final = 3 + NODE_RENAMED: Final = 4 + +XML_NAMESPACE: Final = "http://www.w3.org/XML/1998/namespace" +XMLNS_NAMESPACE: Final = "http://www.w3.org/2000/xmlns/" +XHTML_NAMESPACE: Final = "http://www.w3.org/1999/xhtml" +EMPTY_NAMESPACE: Final[None] +EMPTY_PREFIX: Final[None] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/domreg.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/domreg.pyi new file mode 100644 index 0000000..346a4bf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/domreg.pyi @@ -0,0 +1,8 @@ +from _typeshed.xml import DOMImplementation +from collections.abc import Callable, Iterable + +well_known_implementations: dict[str, str] +registered: dict[str, Callable[[], DOMImplementation]] + +def registerDOMImplementation(name: str, factory: Callable[[], DOMImplementation]) -> None: ... +def getDOMImplementation(name: str | None = None, features: str | Iterable[tuple[str, str | None]] = ()) -> DOMImplementation: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi new file mode 100644 index 0000000..2b9ac88 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi @@ -0,0 +1,126 @@ +from _typeshed import ReadableBuffer, SupportsRead +from typing import Any, Final, NoReturn +from typing_extensions import TypeAlias +from xml.dom.minidom import Document, DocumentFragment, DOMImplementation, Element, Node, TypeInfo +from xml.dom.xmlbuilder import DOMBuilderFilter, Options +from xml.parsers.expat import XMLParserType + +_Model: TypeAlias = tuple[int, int, str | None, tuple[Any, ...]] # same as in pyexpat + +TEXT_NODE: Final = Node.TEXT_NODE +CDATA_SECTION_NODE: Final = Node.CDATA_SECTION_NODE +DOCUMENT_NODE: Final = Node.DOCUMENT_NODE +FILTER_ACCEPT: Final = DOMBuilderFilter.FILTER_ACCEPT +FILTER_REJECT: Final = DOMBuilderFilter.FILTER_REJECT +FILTER_SKIP: Final = DOMBuilderFilter.FILTER_SKIP +FILTER_INTERRUPT: Final = DOMBuilderFilter.FILTER_INTERRUPT +theDOMImplementation: DOMImplementation + +class ElementInfo: + __slots__ = ("_attr_info", "_model", "tagName") + tagName: str + def __init__(self, tagName: str, model: _Model | None = None) -> None: ... + def getAttributeType(self, aname: str) -> TypeInfo: ... + def getAttributeTypeNS(self, namespaceURI: str | None, localName: str) -> TypeInfo: ... + def isElementContent(self) -> bool: ... + def isEmpty(self) -> bool: ... + def isId(self, aname: str) -> bool: ... + def isIdNS(self, euri: str, ename: str, auri: str, aname: str) -> bool: ... + +class ExpatBuilder: + document: Document # Created in self.reset() + curNode: DocumentFragment | Element | Document # Created in self.reset() + def __init__(self, options: Options | None = None) -> None: ... + def createParser(self) -> XMLParserType: ... + def getParser(self) -> XMLParserType: ... + def reset(self) -> None: ... + def install(self, parser: XMLParserType) -> None: ... + def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> Document: ... + def parseString(self, string: str | ReadableBuffer) -> Document: ... + def start_doctype_decl_handler( + self, doctypeName: str, systemId: str | None, publicId: str | None, has_internal_subset: bool + ) -> None: ... + def end_doctype_decl_handler(self) -> None: ... + def pi_handler(self, target: str, data: str) -> None: ... + def character_data_handler_cdata(self, data: str) -> None: ... + def character_data_handler(self, data: str) -> None: ... + def start_cdata_section_handler(self) -> None: ... + def end_cdata_section_handler(self) -> None: ... + def entity_decl_handler( + self, + entityName: str, + is_parameter_entity: bool, + value: str | None, + base: str | None, + systemId: str, + publicId: str | None, + notationName: str | None, + ) -> None: ... + def notation_decl_handler(self, notationName: str, base: str | None, systemId: str, publicId: str | None) -> None: ... + def comment_handler(self, data: str) -> None: ... + def external_entity_ref_handler(self, context: str, base: str | None, systemId: str | None, publicId: str | None) -> int: ... + def first_element_handler(self, name: str, attributes: list[str]) -> None: ... + def start_element_handler(self, name: str, attributes: list[str]) -> None: ... + def end_element_handler(self, name: str) -> None: ... + def element_decl_handler(self, name: str, model: _Model) -> None: ... + def attlist_decl_handler(self, elem: str, name: str, type: str, default: str | None, required: bool) -> None: ... + def xml_decl_handler(self, version: str, encoding: str | None, standalone: int) -> None: ... + +class FilterVisibilityController: + __slots__ = ("filter",) + filter: DOMBuilderFilter + def __init__(self, filter: DOMBuilderFilter) -> None: ... + def startContainer(self, node: Node) -> int: ... + def acceptNode(self, node: Node) -> int: ... + +class FilterCrutch: + __slots__ = ("_builder", "_level", "_old_start", "_old_end") + def __init__(self, builder: ExpatBuilder) -> None: ... + +class Rejecter(FilterCrutch): + __slots__ = () + def start_element_handler(self, *args: Any) -> None: ... + def end_element_handler(self, *args: Any) -> None: ... + +class Skipper(FilterCrutch): + __slots__ = () + def start_element_handler(self, *args: Any) -> None: ... + def end_element_handler(self, *args: Any) -> None: ... + +class FragmentBuilder(ExpatBuilder): + fragment: DocumentFragment | None + originalDocument: Document + context: Node + def __init__(self, context: Node, options: Options | None = None) -> None: ... + def reset(self) -> None: ... + def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> DocumentFragment: ... # type: ignore[override] + def parseString(self, string: ReadableBuffer | str) -> DocumentFragment: ... # type: ignore[override] + def external_entity_ref_handler(self, context: str, base: str | None, systemId: str | None, publicId: str | None) -> int: ... + +class Namespaces: + def createParser(self) -> XMLParserType: ... + def install(self, parser: XMLParserType) -> None: ... + def start_namespace_decl_handler(self, prefix: str | None, uri: str) -> None: ... + def start_element_handler(self, name: str, attributes: list[str]) -> None: ... + def end_element_handler(self, name: str) -> None: ... # only exists if __debug__ + +class ExpatBuilderNS(Namespaces, ExpatBuilder): ... +class FragmentBuilderNS(Namespaces, FragmentBuilder): ... +class ParseEscape(Exception): ... + +class InternalSubsetExtractor(ExpatBuilder): + subset: str | list[str] | None = None + def getSubset(self) -> str: ... + def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> None: ... # type: ignore[override] + def parseString(self, string: str | ReadableBuffer) -> None: ... # type: ignore[override] + def start_doctype_decl_handler( # type: ignore[override] + self, name: str, publicId: str | None, systemId: str | None, has_internal_subset: bool + ) -> None: ... + def end_doctype_decl_handler(self) -> NoReturn: ... + def start_element_handler(self, name: str, attrs: list[str]) -> NoReturn: ... + +def parse(file: str | SupportsRead[ReadableBuffer | str], namespaces: bool = True) -> Document: ... +def parseString(string: str | ReadableBuffer, namespaces: bool = True) -> Document: ... +def parseFragment(file: str | SupportsRead[ReadableBuffer | str], context: Node, namespaces: bool = True) -> DocumentFragment: ... +def parseFragmentString(string: str | ReadableBuffer, context: Node, namespaces: bool = True) -> DocumentFragment: ... +def makeBuilder(options: Options) -> ExpatBuilderNS | ExpatBuilder: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/minicompat.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/minicompat.pyi new file mode 100644 index 0000000..6fcaee0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/minicompat.pyi @@ -0,0 +1,24 @@ +from collections.abc import Iterable +from typing import Any, Literal, TypeVar + +__all__ = ["NodeList", "EmptyNodeList", "StringTypes", "defproperty"] + +_T = TypeVar("_T") + +StringTypes: tuple[type[str]] + +class NodeList(list[_T]): + __slots__ = () + @property + def length(self) -> int: ... + def item(self, index: int) -> _T | None: ... + +class EmptyNodeList(tuple[()]): + __slots__ = () + @property + def length(self) -> Literal[0]: ... + def item(self, index: int) -> None: ... + def __add__(self, other: Iterable[_T]) -> NodeList[_T]: ... # type: ignore[override] + def __radd__(self, other: Iterable[_T]) -> NodeList[_T]: ... + +def defproperty(klass: type[Any], name: str, doc: str) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/minidom.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/minidom.pyi new file mode 100644 index 0000000..e043141 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/minidom.pyi @@ -0,0 +1,678 @@ +import xml.dom +from _collections_abc import dict_keys, dict_values +from _typeshed import Incomplete, ReadableBuffer, SupportsRead, SupportsWrite +from collections.abc import Iterable, Sequence +from types import TracebackType +from typing import Any, ClassVar, Generic, Literal, NoReturn, Protocol, TypeVar, overload, type_check_only +from typing_extensions import Self, TypeAlias +from xml.dom.minicompat import EmptyNodeList, NodeList +from xml.dom.xmlbuilder import DocumentLS, DOMImplementationLS +from xml.sax.xmlreader import XMLReader + +_NSName: TypeAlias = tuple[str | None, str] + +# Entity can also have children, but it's not implemented the same way as the +# others, so is deliberately omitted here. +_NodesWithChildren: TypeAlias = DocumentFragment | Attr | Element | Document +_NodesThatAreChildren: TypeAlias = CDATASection | Comment | DocumentType | Element | Notation | ProcessingInstruction | Text + +_AttrChildren: TypeAlias = Text # Also EntityReference, but we don't implement it +_ElementChildren: TypeAlias = Element | ProcessingInstruction | Comment | Text | CDATASection +_EntityChildren: TypeAlias = Text # I think; documentation is a little unclear +_DocumentFragmentChildren: TypeAlias = Element | Text | CDATASection | ProcessingInstruction | Comment | Notation +_DocumentChildren: TypeAlias = Comment | DocumentType | Element | ProcessingInstruction + +_N = TypeVar("_N", bound=Node) +_ChildNodeVar = TypeVar("_ChildNodeVar", bound=_NodesThatAreChildren) +_ChildNodePlusFragmentVar = TypeVar("_ChildNodePlusFragmentVar", bound=_NodesThatAreChildren | DocumentFragment) +_DocumentChildrenVar = TypeVar("_DocumentChildrenVar", bound=_DocumentChildren) +_ImportableNodeVar = TypeVar( + "_ImportableNodeVar", + bound=DocumentFragment + | Attr + | Element + | ProcessingInstruction + | CharacterData + | Text + | Comment + | CDATASection + | Entity + | Notation, +) + +@type_check_only +class _DOMErrorHandler(Protocol): + def handleError(self, error: Exception) -> bool: ... + +@type_check_only +class _UserDataHandler(Protocol): + def handle(self, operation: int, key: str, data: Any, src: Node, dst: Node) -> None: ... + +def parse( + file: str | SupportsRead[ReadableBuffer | str], parser: XMLReader | None = None, bufsize: int | None = None +) -> Document: ... +def parseString(string: str | ReadableBuffer, parser: XMLReader | None = None) -> Document: ... +@overload +def getDOMImplementation(features: None = None) -> DOMImplementation: ... +@overload +def getDOMImplementation(features: str | Iterable[tuple[str, str | None]]) -> DOMImplementation | None: ... + +class Node(xml.dom.Node): + parentNode: _NodesWithChildren | Entity | None + ownerDocument: Document | None + nextSibling: _NodesThatAreChildren | None + previousSibling: _NodesThatAreChildren | None + namespaceURI: str | None # non-null only for Element and Attr + prefix: str | None # non-null only for NS Element and Attr + + # These aren't defined on Node, but they exist on all Node subclasses + # and various methods of Node require them to exist. + childNodes: ( + NodeList[_DocumentFragmentChildren] + | NodeList[_AttrChildren] + | NodeList[_ElementChildren] + | NodeList[_DocumentChildren] + | NodeList[_EntityChildren] + | EmptyNodeList + ) + nodeType: ClassVar[Literal[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]] + nodeName: str | None # only possibly None on DocumentType + + # Not defined on Node, but exist on all Node subclasses. + nodeValue: str | None # non-null for Attr, ProcessingInstruction, Text, Comment, and CDATASection + attributes: NamedNodeMap | None # non-null only for Element + + @property + def firstChild(self) -> _NodesThatAreChildren | None: ... + @property + def lastChild(self) -> _NodesThatAreChildren | None: ... + @property + def localName(self) -> str | None: ... # non-null only for Element and Attr + def __bool__(self) -> Literal[True]: ... + @overload + def toxml(self, encoding: str, standalone: bool | None = None) -> bytes: ... + @overload + def toxml(self, encoding: None = None, standalone: bool | None = None) -> str: ... + @overload + def toprettyxml( + self, + indent: str = "\t", + newl: str = "\n", + # Handle any case where encoding is not provided or where it is passed with None + encoding: None = None, + standalone: bool | None = None, + ) -> str: ... + @overload + def toprettyxml( + self, + indent: str, + newl: str, + # Handle cases where encoding is passed as str *positionally* + encoding: str, + standalone: bool | None = None, + ) -> bytes: ... + @overload + def toprettyxml( + self, + indent: str = "\t", + newl: str = "\n", + # Handle all cases where encoding is passed as a keyword argument; because standalone + # comes after, it will also have to be a keyword arg if encoding is + *, + encoding: str, + standalone: bool | None = None, + ) -> bytes: ... + def hasChildNodes(self) -> bool: ... + def insertBefore( # type: ignore[misc] + self: _NodesWithChildren, # pyright: ignore[reportGeneralTypeIssues] + newChild: _ChildNodePlusFragmentVar, + refChild: _NodesThatAreChildren | None, + ) -> _ChildNodePlusFragmentVar: ... + def appendChild( # type: ignore[misc] + self: _NodesWithChildren, node: _ChildNodePlusFragmentVar # pyright: ignore[reportGeneralTypeIssues] + ) -> _ChildNodePlusFragmentVar: ... + @overload + def replaceChild( # type: ignore[misc] + self: _NodesWithChildren, newChild: DocumentFragment, oldChild: _ChildNodeVar + ) -> _ChildNodeVar | DocumentFragment: ... + @overload + def replaceChild( # type: ignore[misc] + self: _NodesWithChildren, newChild: _NodesThatAreChildren, oldChild: _ChildNodeVar + ) -> _ChildNodeVar | None: ... + def removeChild(self: _NodesWithChildren, oldChild: _ChildNodeVar) -> _ChildNodeVar: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def normalize(self: _NodesWithChildren) -> None: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def cloneNode(self, deep: bool) -> Self | None: ... + def isSupported(self, feature: str, version: str | None) -> bool: ... + def isSameNode(self, other: Node) -> bool: ... + def getInterface(self, feature: str) -> Self | None: ... + def getUserData(self, key: str) -> Any | None: ... + def setUserData(self, key: str, data: Any, handler: _UserDataHandler) -> Any: ... + def unlink(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, et: type[BaseException] | None, ev: BaseException | None, tb: TracebackType | None) -> None: ... + +_DFChildrenVar = TypeVar("_DFChildrenVar", bound=_DocumentFragmentChildren) +_DFChildrenPlusFragment = TypeVar("_DFChildrenPlusFragment", bound=_DocumentFragmentChildren | DocumentFragment) + +class DocumentFragment(Node): + nodeType: ClassVar[Literal[11]] + nodeName: Literal["#document-fragment"] + nodeValue: None + attributes: None + + parentNode: None + nextSibling: None + previousSibling: None + childNodes: NodeList[_DocumentFragmentChildren] + @property + def firstChild(self) -> _DocumentFragmentChildren | None: ... + @property + def lastChild(self) -> _DocumentFragmentChildren | None: ... + + namespaceURI: None + prefix: None + @property + def localName(self) -> None: ... + def __init__(self) -> None: ... + def insertBefore( # type: ignore[override] + self, newChild: _DFChildrenPlusFragment, refChild: _DocumentFragmentChildren | None + ) -> _DFChildrenPlusFragment: ... + def appendChild(self, node: _DFChildrenPlusFragment) -> _DFChildrenPlusFragment: ... # type: ignore[override] + @overload # type: ignore[override] + def replaceChild(self, newChild: DocumentFragment, oldChild: _DFChildrenVar) -> _DFChildrenVar | DocumentFragment: ... + @overload + def replaceChild(self, newChild: _DocumentFragmentChildren, oldChild: _DFChildrenVar) -> _DFChildrenVar | None: ... # type: ignore[override] + def removeChild(self, oldChild: _DFChildrenVar) -> _DFChildrenVar: ... # type: ignore[override] + +_AttrChildrenVar = TypeVar("_AttrChildrenVar", bound=_AttrChildren) +_AttrChildrenPlusFragment = TypeVar("_AttrChildrenPlusFragment", bound=_AttrChildren | DocumentFragment) + +class Attr(Node): + __slots__ = ("_name", "_value", "namespaceURI", "_prefix", "childNodes", "_localName", "ownerDocument", "ownerElement") + nodeType: ClassVar[Literal[2]] + nodeName: str # same as Attr.name + nodeValue: str # same as Attr.value + attributes: None + + parentNode: None + nextSibling: None + previousSibling: None + childNodes: NodeList[_AttrChildren] + @property + def firstChild(self) -> _AttrChildren | None: ... + @property + def lastChild(self) -> _AttrChildren | None: ... + + namespaceURI: str | None + prefix: str | None + @property + def localName(self) -> str: ... + + name: str + value: str + specified: bool + ownerElement: Element | None + + def __init__( + self, qName: str, namespaceURI: str | None = None, localName: str | None = None, prefix: str | None = None + ) -> None: ... + def unlink(self) -> None: ... + @property + def isId(self) -> bool: ... + @property + def schemaType(self) -> TypeInfo: ... + def insertBefore(self, newChild: _AttrChildrenPlusFragment, refChild: _AttrChildren | None) -> _AttrChildrenPlusFragment: ... # type: ignore[override] + def appendChild(self, node: _AttrChildrenPlusFragment) -> _AttrChildrenPlusFragment: ... # type: ignore[override] + @overload # type: ignore[override] + def replaceChild(self, newChild: DocumentFragment, oldChild: _AttrChildrenVar) -> _AttrChildrenVar | DocumentFragment: ... + @overload + def replaceChild(self, newChild: _AttrChildren, oldChild: _AttrChildrenVar) -> _AttrChildrenVar | None: ... # type: ignore[override] + def removeChild(self, oldChild: _AttrChildrenVar) -> _AttrChildrenVar: ... # type: ignore[override] + +# In the DOM, this interface isn't specific to Attr, but our implementation is +# because that's the only place we use it. +class NamedNodeMap: + __slots__ = ("_attrs", "_attrsNS", "_ownerElement") + def __init__(self, attrs: dict[str, Attr], attrsNS: dict[_NSName, Attr], ownerElement: Element) -> None: ... + @property + def length(self) -> int: ... + def item(self, index: int) -> Node | None: ... + def items(self) -> list[tuple[str, str]]: ... + def itemsNS(self) -> list[tuple[_NSName, str]]: ... + def __contains__(self, key: str | _NSName) -> bool: ... + def keys(self) -> dict_keys[str, Attr]: ... + def keysNS(self) -> dict_keys[_NSName, Attr]: ... + def values(self) -> dict_values[str, Attr]: ... + def get(self, name: str, value: Attr | None = None) -> Attr | None: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __len__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + def __ge__(self, other: NamedNodeMap) -> bool: ... + def __gt__(self, other: NamedNodeMap) -> bool: ... + def __le__(self, other: NamedNodeMap) -> bool: ... + def __lt__(self, other: NamedNodeMap) -> bool: ... + def __getitem__(self, attname_or_tuple: _NSName | str) -> Attr: ... + def __setitem__(self, attname: str, value: Attr | str) -> None: ... + def getNamedItem(self, name: str) -> Attr | None: ... + def getNamedItemNS(self, namespaceURI: str | None, localName: str) -> Attr | None: ... + def removeNamedItem(self, name: str) -> Attr: ... + def removeNamedItemNS(self, namespaceURI: str | None, localName: str) -> Attr: ... + def setNamedItem(self, node: Attr) -> Attr | None: ... + def setNamedItemNS(self, node: Attr) -> Attr | None: ... + def __delitem__(self, attname_or_tuple: _NSName | str) -> None: ... + +AttributeList = NamedNodeMap + +class TypeInfo: + __slots__ = ("namespace", "name") + namespace: str | None + name: str | None + def __init__(self, namespace: Incomplete | None, name: str | None) -> None: ... + +_ElementChildrenVar = TypeVar("_ElementChildrenVar", bound=_ElementChildren) +_ElementChildrenPlusFragment = TypeVar("_ElementChildrenPlusFragment", bound=_ElementChildren | DocumentFragment) + +class Element(Node): + __slots__ = ( + "ownerDocument", + "parentNode", + "tagName", + "nodeName", + "prefix", + "namespaceURI", + "_localName", + "childNodes", + "_attrs", + "_attrsNS", + "nextSibling", + "previousSibling", + ) + nodeType: ClassVar[Literal[1]] + nodeName: str # same as Element.tagName + nodeValue: None + @property + def attributes(self) -> NamedNodeMap: ... # type: ignore[override] + + parentNode: Document | Element | DocumentFragment | None + nextSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None + previousSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None + childNodes: NodeList[_ElementChildren] + @property + def firstChild(self) -> _ElementChildren | None: ... + @property + def lastChild(self) -> _ElementChildren | None: ... + + namespaceURI: str | None + prefix: str | None + @property + def localName(self) -> str: ... + + schemaType: TypeInfo + tagName: str + + def __init__( + self, tagName: str, namespaceURI: str | None = None, prefix: str | None = None, localName: str | None = None + ) -> None: ... + def unlink(self) -> None: ... + def getAttribute(self, attname: str) -> str: ... + def getAttributeNS(self, namespaceURI: str | None, localName: str) -> str: ... + def setAttribute(self, attname: str, value: str) -> None: ... + def setAttributeNS(self, namespaceURI: str | None, qualifiedName: str, value: str) -> None: ... + def getAttributeNode(self, attrname: str) -> Attr | None: ... + def getAttributeNodeNS(self, namespaceURI: str | None, localName: str) -> Attr | None: ... + def setAttributeNode(self, attr: Attr) -> Attr | None: ... + setAttributeNodeNS = setAttributeNode + def removeAttribute(self, name: str) -> None: ... + def removeAttributeNS(self, namespaceURI: str | None, localName: str) -> None: ... + def removeAttributeNode(self, node: Attr) -> Attr: ... + removeAttributeNodeNS = removeAttributeNode + def hasAttribute(self, name: str) -> bool: ... + def hasAttributeNS(self, namespaceURI: str | None, localName: str) -> bool: ... + def getElementsByTagName(self, name: str) -> NodeList[Element]: ... + def getElementsByTagNameNS(self, namespaceURI: str | None, localName: str) -> NodeList[Element]: ... + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... + def hasAttributes(self) -> bool: ... + def setIdAttribute(self, name: str) -> None: ... + def setIdAttributeNS(self, namespaceURI: str | None, localName: str) -> None: ... + def setIdAttributeNode(self, idAttr: Attr) -> None: ... + def insertBefore( # type: ignore[override] + self, newChild: _ElementChildrenPlusFragment, refChild: _ElementChildren | None + ) -> _ElementChildrenPlusFragment: ... + def appendChild(self, node: _ElementChildrenPlusFragment) -> _ElementChildrenPlusFragment: ... # type: ignore[override] + @overload # type: ignore[override] + def replaceChild( + self, newChild: DocumentFragment, oldChild: _ElementChildrenVar + ) -> _ElementChildrenVar | DocumentFragment: ... + @overload + def replaceChild(self, newChild: _ElementChildren, oldChild: _ElementChildrenVar) -> _ElementChildrenVar | None: ... # type: ignore[override] + def removeChild(self, oldChild: _ElementChildrenVar) -> _ElementChildrenVar: ... # type: ignore[override] + +class Childless: + __slots__ = () + attributes: None + childNodes: EmptyNodeList + @property + def firstChild(self) -> None: ... + @property + def lastChild(self) -> None: ... + def appendChild(self, node: _NodesThatAreChildren | DocumentFragment) -> NoReturn: ... + def hasChildNodes(self) -> Literal[False]: ... + def insertBefore( + self, newChild: _NodesThatAreChildren | DocumentFragment, refChild: _NodesThatAreChildren | None + ) -> NoReturn: ... + def removeChild(self, oldChild: _NodesThatAreChildren) -> NoReturn: ... + def normalize(self) -> None: ... + def replaceChild(self, newChild: _NodesThatAreChildren | DocumentFragment, oldChild: _NodesThatAreChildren) -> NoReturn: ... + +class ProcessingInstruction(Childless, Node): + __slots__ = ("target", "data") + nodeType: ClassVar[Literal[7]] + nodeName: str # same as ProcessingInstruction.target + nodeValue: str # same as ProcessingInstruction.data + attributes: None + + parentNode: Document | Element | DocumentFragment | None + nextSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None + previousSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None + childNodes: EmptyNodeList + @property + def firstChild(self) -> None: ... + @property + def lastChild(self) -> None: ... + + namespaceURI: None + prefix: None + @property + def localName(self) -> None: ... + + target: str + data: str + + def __init__(self, target: str, data: str) -> None: ... + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... + +class CharacterData(Childless, Node): + __slots__ = ("_data", "ownerDocument", "parentNode", "previousSibling", "nextSibling") + nodeValue: str + attributes: None + + childNodes: EmptyNodeList + nextSibling: _NodesThatAreChildren | None + previousSibling: _NodesThatAreChildren | None + + @property + def localName(self) -> None: ... + + ownerDocument: Document | None + data: str + + def __init__(self) -> None: ... + @property + def length(self) -> int: ... + def __len__(self) -> int: ... + def substringData(self, offset: int, count: int) -> str: ... + def appendData(self, arg: str) -> None: ... + def insertData(self, offset: int, arg: str) -> None: ... + def deleteData(self, offset: int, count: int) -> None: ... + def replaceData(self, offset: int, count: int, arg: str) -> None: ... + +class Text(CharacterData): + __slots__ = () + nodeType: ClassVar[Literal[3]] + nodeName: Literal["#text"] + nodeValue: str # same as CharacterData.data, the content of the text node + attributes: None + + parentNode: Attr | Element | DocumentFragment | None + nextSibling: _DocumentFragmentChildren | _ElementChildren | _AttrChildren | None + previousSibling: _DocumentFragmentChildren | _ElementChildren | _AttrChildren | None + childNodes: EmptyNodeList + @property + def firstChild(self) -> None: ... + @property + def lastChild(self) -> None: ... + + namespaceURI: None + prefix: None + @property + def localName(self) -> None: ... + + data: str + def splitText(self, offset: int) -> Self: ... + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... + def replaceWholeText(self, content: str) -> Self | None: ... + @property + def isWhitespaceInElementContent(self) -> bool: ... + @property + def wholeText(self) -> str: ... + +class Comment(CharacterData): + nodeType: ClassVar[Literal[8]] + nodeName: Literal["#comment"] + nodeValue: str # same as CharacterData.data, the content of the comment + attributes: None + + parentNode: Document | Element | DocumentFragment | None + nextSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None + previousSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None + childNodes: EmptyNodeList + @property + def firstChild(self) -> None: ... + @property + def lastChild(self) -> None: ... + + namespaceURI: None + prefix: None + @property + def localName(self) -> None: ... + def __init__(self, data: str) -> None: ... + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... + +class CDATASection(Text): + __slots__ = () + nodeType: ClassVar[Literal[4]] # type: ignore[assignment] + nodeName: Literal["#cdata-section"] # type: ignore[assignment] + nodeValue: str # same as CharacterData.data, the content of the CDATA Section + attributes: None + + parentNode: Element | DocumentFragment | None + nextSibling: _DocumentFragmentChildren | _ElementChildren | None + previousSibling: _DocumentFragmentChildren | _ElementChildren | None + + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... + +class ReadOnlySequentialNamedNodeMap(Generic[_N]): + __slots__ = ("_seq",) + def __init__(self, seq: Sequence[_N] = ()) -> None: ... + def __len__(self) -> int: ... + def getNamedItem(self, name: str) -> _N | None: ... + def getNamedItemNS(self, namespaceURI: str | None, localName: str) -> _N | None: ... + def __getitem__(self, name_or_tuple: str | _NSName) -> _N | None: ... + def item(self, index: int) -> _N | None: ... + def removeNamedItem(self, name: str) -> NoReturn: ... + def removeNamedItemNS(self, namespaceURI: str | None, localName: str) -> NoReturn: ... + def setNamedItem(self, node: Node) -> NoReturn: ... + def setNamedItemNS(self, node: Node) -> NoReturn: ... + @property + def length(self) -> int: ... + +class Identified: + __slots__ = ("publicId", "systemId") + publicId: str | None + systemId: str | None + +class DocumentType(Identified, Childless, Node): + nodeType: ClassVar[Literal[10]] + nodeName: str | None # same as DocumentType.name + nodeValue: None + attributes: None + + parentNode: Document | None + nextSibling: _DocumentChildren | None + previousSibling: _DocumentChildren | None + childNodes: EmptyNodeList + @property + def firstChild(self) -> None: ... + @property + def lastChild(self) -> None: ... + + namespaceURI: None + prefix: None + @property + def localName(self) -> None: ... + + name: str | None + internalSubset: str | None + entities: ReadOnlySequentialNamedNodeMap[Entity] + notations: ReadOnlySequentialNamedNodeMap[Notation] + + def __init__(self, qualifiedName: str | None) -> None: ... + def cloneNode(self, deep: bool) -> DocumentType | None: ... + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... + +class Entity(Identified, Node): + nodeType: ClassVar[Literal[6]] + nodeName: str # entity name + nodeValue: None + attributes: None + + parentNode: None + nextSibling: None + previousSibling: None + childNodes: NodeList[_EntityChildren] + @property + def firstChild(self) -> _EntityChildren | None: ... + @property + def lastChild(self) -> _EntityChildren | None: ... + + namespaceURI: None + prefix: None + @property + def localName(self) -> None: ... + + actualEncoding: str | None + encoding: str | None + version: str | None + notationName: str | None + + def __init__(self, name: str, publicId: str | None, systemId: str | None, notation: str | None) -> None: ... + def appendChild(self, newChild: _EntityChildren) -> NoReturn: ... # type: ignore[override] + def insertBefore(self, newChild: _EntityChildren, refChild: _EntityChildren | None) -> NoReturn: ... # type: ignore[override] + def removeChild(self, oldChild: _EntityChildren) -> NoReturn: ... # type: ignore[override] + def replaceChild(self, newChild: _EntityChildren, oldChild: _EntityChildren) -> NoReturn: ... # type: ignore[override] + +class Notation(Identified, Childless, Node): + nodeType: ClassVar[Literal[12]] + nodeName: str # notation name + nodeValue: None + attributes: None + + parentNode: DocumentFragment | None + nextSibling: _DocumentFragmentChildren | None + previousSibling: _DocumentFragmentChildren | None + childNodes: EmptyNodeList + @property + def firstChild(self) -> None: ... + @property + def lastChild(self) -> None: ... + + namespaceURI: None + prefix: None + @property + def localName(self) -> None: ... + def __init__(self, name: str, publicId: str | None, systemId: str | None) -> None: ... + +class DOMImplementation(DOMImplementationLS): + def hasFeature(self, feature: str, version: str | None) -> bool: ... + def createDocument(self, namespaceURI: str | None, qualifiedName: str | None, doctype: DocumentType | None) -> Document: ... + def createDocumentType(self, qualifiedName: str | None, publicId: str | None, systemId: str | None) -> DocumentType: ... + def getInterface(self, feature: str) -> Self | None: ... + +class ElementInfo: + __slots__ = ("tagName",) + tagName: str + def __init__(self, name: str) -> None: ... + def getAttributeType(self, aname: str) -> TypeInfo: ... + def getAttributeTypeNS(self, namespaceURI: str | None, localName: str) -> TypeInfo: ... + def isElementContent(self) -> bool: ... + def isEmpty(self) -> bool: ... + def isId(self, aname: str) -> bool: ... + def isIdNS(self, namespaceURI: str | None, localName: str) -> bool: ... + +_DocumentChildrenPlusFragment = TypeVar("_DocumentChildrenPlusFragment", bound=_DocumentChildren | DocumentFragment) + +class Document(Node, DocumentLS): + __slots__ = ("_elem_info", "doctype", "_id_search_stack", "childNodes", "_id_cache") + nodeType: ClassVar[Literal[9]] + nodeName: Literal["#document"] + nodeValue: None + attributes: None + + parentNode: None + previousSibling: None + nextSibling: None + childNodes: NodeList[_DocumentChildren] + @property + def firstChild(self) -> _DocumentChildren | None: ... + @property + def lastChild(self) -> _DocumentChildren | None: ... + + namespaceURI: None + prefix: None + @property + def localName(self) -> None: ... + + implementation: DOMImplementation + actualEncoding: str | None + encoding: str | None + standalone: bool | None + version: str | None + strictErrorChecking: bool + errorHandler: _DOMErrorHandler | None + documentURI: str | None + doctype: DocumentType | None + documentElement: Element | None + + def __init__(self) -> None: ... + def appendChild(self, node: _DocumentChildrenVar) -> _DocumentChildrenVar: ... # type: ignore[override] + def removeChild(self, oldChild: _DocumentChildrenVar) -> _DocumentChildrenVar: ... # type: ignore[override] + def unlink(self) -> None: ... + def cloneNode(self, deep: bool) -> Document | None: ... + def createDocumentFragment(self) -> DocumentFragment: ... + def createElement(self, tagName: str) -> Element: ... + def createTextNode(self, data: str) -> Text: ... + def createCDATASection(self, data: str) -> CDATASection: ... + def createComment(self, data: str) -> Comment: ... + def createProcessingInstruction(self, target: str, data: str) -> ProcessingInstruction: ... + def createAttribute(self, qName: str) -> Attr: ... + def createElementNS(self, namespaceURI: str | None, qualifiedName: str) -> Element: ... + def createAttributeNS(self, namespaceURI: str | None, qualifiedName: str) -> Attr: ... + def getElementById(self, id: str) -> Element | None: ... + def getElementsByTagName(self, name: str) -> NodeList[Element]: ... + def getElementsByTagNameNS(self, namespaceURI: str | None, localName: str) -> NodeList[Element]: ... + def isSupported(self, feature: str, version: str | None) -> bool: ... + def importNode(self, node: _ImportableNodeVar, deep: bool) -> _ImportableNodeVar: ... + def writexml( + self, + writer: SupportsWrite[str], + indent: str = "", + addindent: str = "", + newl: str = "", + encoding: str | None = None, + standalone: bool | None = None, + ) -> None: ... + @overload + def renameNode(self, n: Element, namespaceURI: str, name: str) -> Element: ... + @overload + def renameNode(self, n: Attr, namespaceURI: str, name: str) -> Attr: ... + @overload + def renameNode(self, n: Element | Attr, namespaceURI: str, name: str) -> Element | Attr: ... + def insertBefore( + self, newChild: _DocumentChildrenPlusFragment, refChild: _DocumentChildren | None # type: ignore[override] + ) -> _DocumentChildrenPlusFragment: ... + @overload # type: ignore[override] + def replaceChild( + self, newChild: DocumentFragment, oldChild: _DocumentChildrenVar + ) -> _DocumentChildrenVar | DocumentFragment: ... + @overload + def replaceChild(self, newChild: _DocumentChildren, oldChild: _DocumentChildrenVar) -> _DocumentChildrenVar | None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/pulldom.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/pulldom.pyi new file mode 100644 index 0000000..df7a3ad --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/pulldom.pyi @@ -0,0 +1,109 @@ +import sys +from _typeshed import Incomplete, Unused +from collections.abc import MutableSequence, Sequence +from typing import Final, Literal, NoReturn +from typing_extensions import Self, TypeAlias +from xml.dom.minidom import Comment, Document, DOMImplementation, Element, ProcessingInstruction, Text +from xml.sax import _SupportsReadClose +from xml.sax.handler import ContentHandler +from xml.sax.xmlreader import AttributesImpl, AttributesNSImpl, Locator, XMLReader + +START_ELEMENT: Final = "START_ELEMENT" +END_ELEMENT: Final = "END_ELEMENT" +COMMENT: Final = "COMMENT" +START_DOCUMENT: Final = "START_DOCUMENT" +END_DOCUMENT: Final = "END_DOCUMENT" +PROCESSING_INSTRUCTION: Final = "PROCESSING_INSTRUCTION" +IGNORABLE_WHITESPACE: Final = "IGNORABLE_WHITESPACE" +CHARACTERS: Final = "CHARACTERS" + +_NSName: TypeAlias = tuple[str | None, str] +_DocumentFactory: TypeAlias = DOMImplementation | None + +_Event: TypeAlias = ( + tuple[Literal["START_ELEMENT"], Element] + | tuple[Literal["END_ELEMENT"], Element] + | tuple[Literal["COMMENT"], Comment] + | tuple[Literal["START_DOCUMENT"], Document] + | tuple[Literal["END_DOCUMENT"], Document] + | tuple[Literal["PROCESSING_INSTRUCTION"], ProcessingInstruction] + | tuple[Literal["IGNORABLE_WHITESPACE"], Text] + | tuple[Literal["CHARACTERS"], Text] +) + +class PullDOM(ContentHandler): + document: Document | None + documentFactory: _DocumentFactory + + # firstEvent is a list of length 2 + # firstEvent[0] is always None + # firstEvent[1] is None prior to any events, after which it's a + # list of length 2, where the first item is of type _Event + # and the second item is None. + firstEvent: list[Incomplete] + + # lastEvent is also a list of length 2. The second item is always None, + # and the first item is of type _Event + # This is a slight lie: The second item is sometimes temporarily what was just + # described for the type of lastEvent, after which lastEvent is always updated + # with `self.lastEvent = self.lastEvent[1]`. + lastEvent: list[Incomplete] + + elementStack: MutableSequence[Element | Document] + pending_events: ( + list[Sequence[tuple[Literal["COMMENT"], str] | tuple[Literal["PROCESSING_INSTRUCTION"], str, str] | None]] | None + ) + def __init__(self, documentFactory: _DocumentFactory = None) -> None: ... + def pop(self) -> Element | Document: ... + def setDocumentLocator(self, locator: Locator) -> None: ... + def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ... + def endPrefixMapping(self, prefix: str | None) -> None: ... + def startElementNS(self, name: _NSName, tagName: str | None, attrs: AttributesNSImpl) -> None: ... + def endElementNS(self, name: _NSName, tagName: str | None) -> None: ... + def startElement(self, name: str, attrs: AttributesImpl) -> None: ... + def endElement(self, name: str) -> None: ... + def comment(self, s: str) -> None: ... + def processingInstruction(self, target: str, data: str) -> None: ... + def ignorableWhitespace(self, chars: str) -> None: ... + def characters(self, chars: str) -> None: ... + def startDocument(self) -> None: ... + def buildDocument(self, uri: str | None, tagname: str | None) -> Element: ... + def endDocument(self) -> None: ... + def clear(self) -> None: ... + +class ErrorHandler: + def warning(self, exception: BaseException) -> None: ... + def error(self, exception: BaseException) -> NoReturn: ... + def fatalError(self, exception: BaseException) -> NoReturn: ... + +class DOMEventStream: + stream: _SupportsReadClose[bytes] | _SupportsReadClose[str] + parser: XMLReader # Set to none after .clear() is called + bufsize: int + pulldom: PullDOM + def __init__(self, stream: _SupportsReadClose[bytes] | _SupportsReadClose[str], parser: XMLReader, bufsize: int) -> None: ... + if sys.version_info < (3, 11): + def __getitem__(self, pos: Unused) -> _Event: ... + + def __next__(self) -> _Event: ... + def __iter__(self) -> Self: ... + def getEvent(self) -> _Event | None: ... + def expandNode(self, node: Document) -> None: ... + def reset(self) -> None: ... + def clear(self) -> None: ... + +class SAX2DOM(PullDOM): + def startElementNS(self, name: _NSName, tagName: str | None, attrs: AttributesNSImpl) -> None: ... + def startElement(self, name: str, attrs: AttributesImpl) -> None: ... + def processingInstruction(self, target: str, data: str) -> None: ... + def ignorableWhitespace(self, chars: str) -> None: ... + def characters(self, chars: str) -> None: ... + +default_bufsize: Final[int] + +def parse( + stream_or_string: str | _SupportsReadClose[bytes] | _SupportsReadClose[str], + parser: XMLReader | None = None, + bufsize: int | None = None, +) -> DOMEventStream: ... +def parseString(string: str, parser: XMLReader | None = None) -> DOMEventStream: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/xmlbuilder.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/xmlbuilder.pyi new file mode 100644 index 0000000..f19f705 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/dom/xmlbuilder.pyi @@ -0,0 +1,81 @@ +from _typeshed import SupportsRead +from typing import Any, Final, Literal, NoReturn +from xml.dom.minidom import Document, Node, _DOMErrorHandler + +__all__ = ["DOMBuilder", "DOMEntityResolver", "DOMInputSource"] + +class Options: + namespaces: int + namespace_declarations: bool + validation: bool + external_parameter_entities: bool + external_general_entities: bool + external_dtd_subset: bool + validate_if_schema: bool + validate: bool + datatype_normalization: bool + create_entity_ref_nodes: bool + entities: bool + whitespace_in_element_content: bool + cdata_sections: bool + comments: bool + charset_overrides_xml_encoding: bool + infoset: bool + supported_mediatypes_only: bool + errorHandler: _DOMErrorHandler | None + filter: DOMBuilderFilter | None + +class DOMBuilder: + entityResolver: DOMEntityResolver | None + errorHandler: _DOMErrorHandler | None + filter: DOMBuilderFilter | None + ACTION_REPLACE: Final = 1 + ACTION_APPEND_AS_CHILDREN: Final = 2 + ACTION_INSERT_AFTER: Final = 3 + ACTION_INSERT_BEFORE: Final = 4 + def __init__(self) -> None: ... + def setFeature(self, name: str, state: int) -> None: ... + def supportsFeature(self, name: str) -> bool: ... + def canSetFeature(self, name: str, state: Literal[1, 0]) -> bool: ... + # getFeature could return any attribute from an instance of `Options` + def getFeature(self, name: str) -> Any: ... + def parseURI(self, uri: str) -> Document: ... + def parse(self, input: DOMInputSource) -> Document: ... + def parseWithContext(self, input: DOMInputSource, cnode: Node, action: Literal[1, 2, 3, 4]) -> NoReturn: ... + +class DOMEntityResolver: + __slots__ = ("_opener",) + def resolveEntity(self, publicId: str | None, systemId: str) -> DOMInputSource: ... + +class DOMInputSource: + __slots__ = ("byteStream", "characterStream", "stringData", "encoding", "publicId", "systemId", "baseURI") + byteStream: SupportsRead[bytes] | None + characterStream: SupportsRead[str] | None + stringData: str | None + encoding: str | None + publicId: str | None + systemId: str | None + baseURI: str | None + +class DOMBuilderFilter: + FILTER_ACCEPT: Final = 1 + FILTER_REJECT: Final = 2 + FILTER_SKIP: Final = 3 + FILTER_INTERRUPT: Final = 4 + whatToShow: int + def acceptNode(self, element: Node) -> Literal[1, 2, 3, 4]: ... + def startContainer(self, element: Node) -> Literal[1, 2, 3, 4]: ... + +class DocumentLS: + async_: bool + def abort(self) -> NoReturn: ... + def load(self, uri: str) -> NoReturn: ... + def loadXML(self, source: str) -> NoReturn: ... + def saveXML(self, snode: Node | None) -> str: ... + +class DOMImplementationLS: + MODE_SYNCHRONOUS: Final = 1 + MODE_ASYNCHRONOUS: Final = 2 + def createDOMBuilder(self, mode: Literal[1], schemaType: None) -> DOMBuilder: ... + def createDOMWriter(self) -> NoReturn: ... + def createDOMInputSource(self) -> DOMInputSource: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi new file mode 100644 index 0000000..10784e7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi @@ -0,0 +1,27 @@ +from _typeshed import FileDescriptorOrPath +from typing import Final, Literal, Protocol, overload, type_check_only +from xml.etree.ElementTree import Element + +@type_check_only +class _Loader(Protocol): + @overload + def __call__(self, href: FileDescriptorOrPath, parse: Literal["xml"], encoding: str | None = None) -> Element: ... + @overload + def __call__(self, href: FileDescriptorOrPath, parse: Literal["text"], encoding: str | None = None) -> str: ... + +XINCLUDE: Final = "{http://www.w3.org/2001/XInclude}" + +XINCLUDE_INCLUDE: Final = "{http://www.w3.org/2001/XInclude}include" +XINCLUDE_FALLBACK: Final = "{http://www.w3.org/2001/XInclude}fallback" + +DEFAULT_MAX_INCLUSION_DEPTH: Final = 6 + +class FatalIncludeError(SyntaxError): ... + +@overload +def default_loader(href: FileDescriptorOrPath, parse: Literal["xml"], encoding: str | None = None) -> Element: ... +@overload +def default_loader(href: FileDescriptorOrPath, parse: Literal["text"], encoding: str | None = None) -> str: ... +def include(elem: Element, loader: _Loader | None = None, base_url: str | None = None, max_depth: int | None = 6) -> None: ... + +class LimitedRecursiveIncludeError(FatalIncludeError): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/etree/ElementPath.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/etree/ElementPath.pyi new file mode 100644 index 0000000..80f3c55 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/etree/ElementPath.pyi @@ -0,0 +1,41 @@ +from collections.abc import Callable, Generator, Iterable +from re import Pattern +from typing import Any, Final, Literal, TypeVar, overload +from typing_extensions import TypeAlias +from xml.etree.ElementTree import Element + +xpath_tokenizer_re: Final[Pattern[str]] + +_Token: TypeAlias = tuple[str, str] +_Next: TypeAlias = Callable[[], _Token] +_Callback: TypeAlias = Callable[[_SelectorContext, Iterable[Element]], Generator[Element, None, None]] +_T = TypeVar("_T") + +def xpath_tokenizer(pattern: str, namespaces: dict[str, str] | None = None) -> Generator[_Token, None, None]: ... +def get_parent_map(context: _SelectorContext) -> dict[Element, Element]: ... +def prepare_child(next: _Next, token: _Token) -> _Callback: ... +def prepare_star(next: _Next, token: _Token) -> _Callback: ... +def prepare_self(next: _Next, token: _Token) -> _Callback: ... +def prepare_descendant(next: _Next, token: _Token) -> _Callback | None: ... +def prepare_parent(next: _Next, token: _Token) -> _Callback: ... +def prepare_predicate(next: _Next, token: _Token) -> _Callback | None: ... + +ops: Final[dict[str, Callable[[_Next, _Token], _Callback | None]]] + +class _SelectorContext: + parent_map: dict[Element, Element] | None + root: Element + def __init__(self, root: Element) -> None: ... + +@overload +def iterfind( # type: ignore[overload-overlap] + elem: Element[Any], path: Literal[""], namespaces: dict[str, str] | None = None +) -> None: ... +@overload +def iterfind(elem: Element[Any], path: str, namespaces: dict[str, str] | None = None) -> Generator[Element, None, None]: ... +def find(elem: Element[Any], path: str, namespaces: dict[str, str] | None = None) -> Element | None: ... +def findall(elem: Element[Any], path: str, namespaces: dict[str, str] | None = None) -> list[Element]: ... +@overload +def findtext(elem: Element[Any], path: str, default: None = None, namespaces: dict[str, str] | None = None) -> str | None: ... +@overload +def findtext(elem: Element[Any], path: str, default: _T, namespaces: dict[str, str] | None = None) -> _T | str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi new file mode 100644 index 0000000..d42db1b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi @@ -0,0 +1,366 @@ +import sys +from _collections_abc import dict_keys +from _typeshed import FileDescriptorOrPath, ReadableBuffer, SupportsRead, SupportsWrite +from collections.abc import Callable, Generator, ItemsView, Iterable, Iterator, Mapping, Sequence +from typing import Any, Final, Generic, Literal, Protocol, SupportsIndex, TypeVar, overload, type_check_only +from typing_extensions import TypeAlias, TypeGuard, deprecated, disjoint_base +from xml.parsers.expat import XMLParserType + +__all__ = [ + "C14NWriterTarget", + "Comment", + "dump", + "Element", + "ElementTree", + "canonicalize", + "fromstring", + "fromstringlist", + "indent", + "iselement", + "iterparse", + "parse", + "ParseError", + "PI", + "ProcessingInstruction", + "QName", + "SubElement", + "tostring", + "tostringlist", + "TreeBuilder", + "VERSION", + "XML", + "XMLID", + "XMLParser", + "XMLPullParser", + "register_namespace", +] + +_T = TypeVar("_T") +_FileRead: TypeAlias = FileDescriptorOrPath | SupportsRead[bytes] | SupportsRead[str] +_FileWriteC14N: TypeAlias = FileDescriptorOrPath | SupportsWrite[bytes] +_FileWrite: TypeAlias = _FileWriteC14N | SupportsWrite[str] + +VERSION: Final[str] + +class ParseError(SyntaxError): + code: int + position: tuple[int, int] + +# In reality it works based on `.tag` attribute duck typing. +def iselement(element: object) -> TypeGuard[Element]: ... +@overload +def canonicalize( + xml_data: str | ReadableBuffer | None = None, + *, + out: None = None, + from_file: _FileRead | None = None, + with_comments: bool = False, + strip_text: bool = False, + rewrite_prefixes: bool = False, + qname_aware_tags: Iterable[str] | None = None, + qname_aware_attrs: Iterable[str] | None = None, + exclude_attrs: Iterable[str] | None = None, + exclude_tags: Iterable[str] | None = None, +) -> str: ... +@overload +def canonicalize( + xml_data: str | ReadableBuffer | None = None, + *, + out: SupportsWrite[str], + from_file: _FileRead | None = None, + with_comments: bool = False, + strip_text: bool = False, + rewrite_prefixes: bool = False, + qname_aware_tags: Iterable[str] | None = None, + qname_aware_attrs: Iterable[str] | None = None, + exclude_attrs: Iterable[str] | None = None, + exclude_tags: Iterable[str] | None = None, +) -> None: ... + +# The tag for Element can be set to the Comment or ProcessingInstruction +# functions defined in this module. +_ElementCallable: TypeAlias = Callable[..., Element[_ElementCallable]] + +_Tag = TypeVar("_Tag", default=str, bound=str | _ElementCallable) +_OtherTag = TypeVar("_OtherTag", default=str, bound=str | _ElementCallable) + +@disjoint_base +class Element(Generic[_Tag]): + tag: _Tag + attrib: dict[str, str] + text: str | None + tail: str | None + def __init__(self, tag: _Tag, attrib: dict[str, str] = {}, **extra: str) -> None: ... + def append(self, subelement: Element[Any], /) -> None: ... + def clear(self) -> None: ... + def extend(self, elements: Iterable[Element[Any]], /) -> None: ... + def find(self, path: str, namespaces: dict[str, str] | None = None) -> Element | None: ... + def findall(self, path: str, namespaces: dict[str, str] | None = None) -> list[Element]: ... + @overload + def findtext(self, path: str, default: None = None, namespaces: dict[str, str] | None = None) -> str | None: ... + @overload + def findtext(self, path: str, default: _T, namespaces: dict[str, str] | None = None) -> _T | str: ... + @overload + def get(self, key: str, default: None = None) -> str | None: ... + @overload + def get(self, key: str, default: _T) -> str | _T: ... + def insert(self, index: int, subelement: Element[Any], /) -> None: ... + def items(self) -> ItemsView[str, str]: ... + def iter(self, tag: str | None = None) -> Generator[Element, None, None]: ... + @overload + def iterfind(self, path: Literal[""], namespaces: dict[str, str] | None = None) -> None: ... # type: ignore[overload-overlap] + @overload + def iterfind(self, path: str, namespaces: dict[str, str] | None = None) -> Generator[Element, None, None]: ... + def itertext(self) -> Generator[str, None, None]: ... + def keys(self) -> dict_keys[str, str]: ... + # makeelement returns the type of self in Python impl, but not in C impl + def makeelement(self, tag: _OtherTag, attrib: dict[str, str], /) -> Element[_OtherTag]: ... + def remove(self, subelement: Element[Any], /) -> None: ... + def set(self, key: str, value: str, /) -> None: ... + def __copy__(self) -> Element[_Tag]: ... # returns the type of self in Python impl, but not in C impl + def __deepcopy__(self, memo: Any, /) -> Element: ... # Only exists in C impl + def __delitem__(self, key: SupportsIndex | slice, /) -> None: ... + @overload + def __getitem__(self, key: SupportsIndex, /) -> Element: ... + @overload + def __getitem__(self, key: slice, /) -> list[Element]: ... + def __len__(self) -> int: ... + # Doesn't actually exist at runtime, but instance of the class are indeed iterable due to __getitem__. + def __iter__(self) -> Iterator[Element]: ... + @overload + def __setitem__(self, key: SupportsIndex, value: Element[Any], /) -> None: ... + @overload + def __setitem__(self, key: slice, value: Iterable[Element[Any]], /) -> None: ... + + # Doesn't really exist in earlier versions, where __len__ is called implicitly instead + @deprecated("Testing an element's truth value is deprecated.") + def __bool__(self) -> bool: ... + +def SubElement(parent: Element[Any], tag: str, attrib: dict[str, str] = ..., **extra: str) -> Element: ... +def Comment(text: str | None = None) -> Element[_ElementCallable]: ... +def ProcessingInstruction(target: str, text: str | None = None) -> Element[_ElementCallable]: ... + +PI = ProcessingInstruction + +class QName: + text: str + def __init__(self, text_or_uri: str, tag: str | None = None) -> None: ... + def __lt__(self, other: QName | str) -> bool: ... + def __le__(self, other: QName | str) -> bool: ... + def __gt__(self, other: QName | str) -> bool: ... + def __ge__(self, other: QName | str) -> bool: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + +_Root = TypeVar("_Root", Element, Element | None, default=Element | None) + +class ElementTree(Generic[_Root]): + def __init__(self, element: Element[Any] | None = None, file: _FileRead | None = None) -> None: ... + def getroot(self) -> _Root: ... + def parse(self, source: _FileRead, parser: XMLParser | None = None) -> Element: ... + def iter(self, tag: str | None = None) -> Generator[Element, None, None]: ... + def find(self, path: str, namespaces: dict[str, str] | None = None) -> Element | None: ... + @overload + def findtext(self, path: str, default: None = None, namespaces: dict[str, str] | None = None) -> str | None: ... + @overload + def findtext(self, path: str, default: _T, namespaces: dict[str, str] | None = None) -> _T | str: ... + def findall(self, path: str, namespaces: dict[str, str] | None = None) -> list[Element]: ... + @overload + def iterfind(self, path: Literal[""], namespaces: dict[str, str] | None = None) -> None: ... # type: ignore[overload-overlap] + @overload + def iterfind(self, path: str, namespaces: dict[str, str] | None = None) -> Generator[Element, None, None]: ... + def write( + self, + file_or_filename: _FileWrite, + encoding: str | None = None, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + method: Literal["xml", "html", "text", "c14n"] | None = None, + *, + short_empty_elements: bool = True, + ) -> None: ... + def write_c14n(self, file: _FileWriteC14N) -> None: ... + +HTML_EMPTY: Final[set[str]] + +def register_namespace(prefix: str, uri: str) -> None: ... +@overload +def tostring( + element: Element[Any], + encoding: None = None, + method: Literal["xml", "html", "text", "c14n"] | None = None, + *, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, +) -> bytes: ... +@overload +def tostring( + element: Element[Any], + encoding: Literal["unicode"], + method: Literal["xml", "html", "text", "c14n"] | None = None, + *, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, +) -> str: ... +@overload +def tostring( + element: Element[Any], + encoding: str, + method: Literal["xml", "html", "text", "c14n"] | None = None, + *, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, +) -> Any: ... +@overload +def tostringlist( + element: Element[Any], + encoding: None = None, + method: Literal["xml", "html", "text", "c14n"] | None = None, + *, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, +) -> list[bytes]: ... +@overload +def tostringlist( + element: Element[Any], + encoding: Literal["unicode"], + method: Literal["xml", "html", "text", "c14n"] | None = None, + *, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, +) -> list[str]: ... +@overload +def tostringlist( + element: Element[Any], + encoding: str, + method: Literal["xml", "html", "text", "c14n"] | None = None, + *, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, +) -> list[Any]: ... +def dump(elem: Element[Any] | ElementTree[Any]) -> None: ... +def indent(tree: Element[Any] | ElementTree[Any], space: str = " ", level: int = 0) -> None: ... +def parse(source: _FileRead, parser: XMLParser[Any] | None = None) -> ElementTree[Element]: ... + +# This class is defined inside the body of iterparse +@type_check_only +class _IterParseIterator(Iterator[tuple[str, Element]], Protocol): + def __next__(self) -> tuple[str, Element]: ... + if sys.version_info >= (3, 13): + def close(self) -> None: ... + if sys.version_info >= (3, 11): + def __del__(self) -> None: ... + +def iterparse(source: _FileRead, events: Sequence[str] | None = None, parser: XMLParser | None = None) -> _IterParseIterator: ... + +_EventQueue: TypeAlias = tuple[str] | tuple[str, tuple[str, str]] | tuple[str, None] + +class XMLPullParser(Generic[_E]): + def __init__(self, events: Sequence[str] | None = None, *, _parser: XMLParser[_E] | None = None) -> None: ... + def feed(self, data: str | ReadableBuffer) -> None: ... + def close(self) -> None: ... + def read_events(self) -> Iterator[_EventQueue | tuple[str, _E]]: ... + def flush(self) -> None: ... + +def XML(text: str | ReadableBuffer, parser: XMLParser | None = None) -> Element: ... +def XMLID(text: str | ReadableBuffer, parser: XMLParser | None = None) -> tuple[Element, dict[str, Element]]: ... + +# This is aliased to XML in the source. +fromstring = XML + +def fromstringlist(sequence: Sequence[str | ReadableBuffer], parser: XMLParser | None = None) -> Element: ... + +# This type is both not precise enough and too precise. The TreeBuilder +# requires the elementfactory to accept tag and attrs in its args and produce +# some kind of object that has .text and .tail properties. +# I've chosen to constrain the ElementFactory to always produce an Element +# because that is how almost everyone will use it. +# Unfortunately, the type of the factory arguments is dependent on how +# TreeBuilder is called by client code (they could pass strs, bytes or whatever); +# but we don't want to use a too-broad type, or it would be too hard to write +# elementfactories. +_ElementFactory: TypeAlias = Callable[[Any, dict[Any, Any]], Element] + +@disjoint_base +class TreeBuilder: + # comment_factory can take None because passing None to Comment is not an error + def __init__( + self, + element_factory: _ElementFactory | None = None, + *, + comment_factory: Callable[[str | None], Element[Any]] | None = None, + pi_factory: Callable[[str, str | None], Element[Any]] | None = None, + insert_comments: bool = False, + insert_pis: bool = False, + ) -> None: ... + insert_comments: bool + insert_pis: bool + + def close(self) -> Element: ... + def data(self, data: str, /) -> None: ... + # tag and attrs are passed to the element_factory, so they could be anything + # depending on what the particular factory supports. + def start(self, tag: Any, attrs: dict[Any, Any], /) -> Element: ... + def end(self, tag: str, /) -> Element: ... + # These two methods have pos-only parameters in the C implementation + def comment(self, text: str | None, /) -> Element[Any]: ... + def pi(self, target: str, text: str | None = None, /) -> Element[Any]: ... + +class C14NWriterTarget: + def __init__( + self, + write: Callable[[str], object], + *, + with_comments: bool = False, + strip_text: bool = False, + rewrite_prefixes: bool = False, + qname_aware_tags: Iterable[str] | None = None, + qname_aware_attrs: Iterable[str] | None = None, + exclude_attrs: Iterable[str] | None = None, + exclude_tags: Iterable[str] | None = None, + ) -> None: ... + def data(self, data: str) -> None: ... + def start_ns(self, prefix: str, uri: str) -> None: ... + def start(self, tag: str, attrs: Mapping[str, str]) -> None: ... + def end(self, tag: str) -> None: ... + def comment(self, text: str) -> None: ... + def pi(self, target: str, data: str) -> None: ... + +# The target type is tricky, because the implementation doesn't +# require any particular attribute to be present. This documents the attributes +# that can be present, but uncommenting any of them would require them. +@type_check_only +class _Target(Protocol): + # start: Callable[str, dict[str, str], Any] | None + # end: Callable[[str], Any] | None + # start_ns: Callable[[str, str], Any] | None + # end_ns: Callable[[str], Any] | None + # data: Callable[[str], Any] | None + # comment: Callable[[str], Any] + # pi: Callable[[str, str], Any] | None + # close: Callable[[], Any] | None + ... + +_E = TypeVar("_E", default=Element) + +# This is generic because the return type of close() depends on the target. +# The default target is TreeBuilder, which returns Element. +# C14NWriterTarget does not implement a close method, so using it results +# in a type of XMLParser[None]. +@disjoint_base +class XMLParser(Generic[_E]): + parser: XMLParserType + target: _Target + # TODO: what is entity used for??? + entity: dict[str, str] + version: str + def __init__(self, *, target: _Target | None = None, encoding: str | None = None) -> None: ... + def close(self) -> _E: ... + def feed(self, data: str | ReadableBuffer, /) -> None: ... + def flush(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/etree/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/etree/__init__.pyi new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/etree/cElementTree.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/etree/cElementTree.pyi new file mode 100644 index 0000000..02272d8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/etree/cElementTree.pyi @@ -0,0 +1 @@ +from xml.etree.ElementTree import * diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/parsers/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/parsers/__init__.pyi new file mode 100644 index 0000000..cebdb6a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/parsers/__init__.pyi @@ -0,0 +1 @@ +from xml.parsers import expat as expat diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/parsers/expat/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/parsers/expat/__init__.pyi new file mode 100644 index 0000000..d9b7ea5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/parsers/expat/__init__.pyi @@ -0,0 +1,7 @@ +from pyexpat import * + +# This is actually implemented in the C module pyexpat, but considers itself to live here. +class ExpatError(Exception): + code: int + lineno: int + offset: int diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/parsers/expat/errors.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/parsers/expat/errors.pyi new file mode 100644 index 0000000..e22d769 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/parsers/expat/errors.pyi @@ -0,0 +1 @@ +from pyexpat.errors import * diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/parsers/expat/model.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/parsers/expat/model.pyi new file mode 100644 index 0000000..d8f44b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/parsers/expat/model.pyi @@ -0,0 +1 @@ +from pyexpat.model import * diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/sax/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/sax/__init__.pyi new file mode 100644 index 0000000..679466f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/sax/__init__.pyi @@ -0,0 +1,43 @@ +import sys +from _typeshed import ReadableBuffer, StrPath, SupportsRead, _T_co +from collections.abc import Iterable +from typing import Final, Protocol, type_check_only +from typing_extensions import TypeAlias +from xml.sax._exceptions import ( + SAXException as SAXException, + SAXNotRecognizedException as SAXNotRecognizedException, + SAXNotSupportedException as SAXNotSupportedException, + SAXParseException as SAXParseException, + SAXReaderNotAvailable as SAXReaderNotAvailable, +) +from xml.sax.handler import ContentHandler as ContentHandler, ErrorHandler as ErrorHandler +from xml.sax.xmlreader import InputSource as InputSource, XMLReader + +@type_check_only +class _SupportsReadClose(SupportsRead[_T_co], Protocol[_T_co]): + def close(self) -> None: ... + +_Source: TypeAlias = StrPath | _SupportsReadClose[bytes] | _SupportsReadClose[str] + +default_parser_list: Final[list[str]] + +def make_parser(parser_list: Iterable[str] = ()) -> XMLReader: ... +def parse(source: _Source, handler: ContentHandler, errorHandler: ErrorHandler = ...) -> None: ... +def parseString(string: ReadableBuffer | str, handler: ContentHandler, errorHandler: ErrorHandler | None = ...) -> None: ... +def _create_parser(parser_name: str) -> XMLReader: ... + +if sys.version_info >= (3, 14): + __all__ = [ + "ContentHandler", + "ErrorHandler", + "InputSource", + "SAXException", + "SAXNotRecognizedException", + "SAXNotSupportedException", + "SAXParseException", + "SAXReaderNotAvailable", + "default_parser_list", + "make_parser", + "parse", + "parseString", + ] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/sax/_exceptions.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/sax/_exceptions.pyi new file mode 100644 index 0000000..e9cc885 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/sax/_exceptions.pyi @@ -0,0 +1,19 @@ +from typing import NoReturn +from xml.sax.xmlreader import Locator + +class SAXException(Exception): + def __init__(self, msg: str, exception: Exception | None = None) -> None: ... + def getMessage(self) -> str: ... + def getException(self) -> Exception | None: ... + def __getitem__(self, ix: object) -> NoReturn: ... + +class SAXParseException(SAXException): + def __init__(self, msg: str, exception: Exception | None, locator: Locator) -> None: ... + def getColumnNumber(self) -> int | None: ... + def getLineNumber(self) -> int | None: ... + def getPublicId(self) -> str | None: ... + def getSystemId(self) -> str | None: ... + +class SAXNotRecognizedException(SAXException): ... +class SAXNotSupportedException(SAXException): ... +class SAXReaderNotAvailable(SAXNotSupportedException): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/sax/expatreader.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/sax/expatreader.pyi new file mode 100644 index 0000000..3f9573a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/sax/expatreader.pyi @@ -0,0 +1,78 @@ +import sys +from _typeshed import ReadableBuffer +from collections.abc import Mapping +from typing import Any, Final, Literal, overload +from typing_extensions import TypeAlias +from xml.sax import _Source, xmlreader +from xml.sax.handler import _ContentHandlerProtocol + +if sys.version_info >= (3, 10): + from xml.sax.handler import LexicalHandler + +_BoolType: TypeAlias = Literal[0, 1] | bool + +version: Final[str] +AttributesImpl = xmlreader.AttributesImpl +AttributesNSImpl = xmlreader.AttributesNSImpl + +class _ClosedParser: + ErrorColumnNumber: int + ErrorLineNumber: int + +class ExpatLocator(xmlreader.Locator): + def __init__(self, parser: ExpatParser) -> None: ... + def getColumnNumber(self) -> int | None: ... + def getLineNumber(self) -> int: ... + def getPublicId(self) -> str | None: ... + def getSystemId(self) -> str | None: ... + +class ExpatParser(xmlreader.IncrementalParser, xmlreader.Locator): + def __init__(self, namespaceHandling: _BoolType = 0, bufsize: int = 65516) -> None: ... + def parse(self, source: xmlreader.InputSource | _Source) -> None: ... + def prepareParser(self, source: xmlreader.InputSource) -> None: ... + def setContentHandler(self, handler: _ContentHandlerProtocol) -> None: ... + def getFeature(self, name: str) -> _BoolType: ... + def setFeature(self, name: str, state: _BoolType) -> None: ... + if sys.version_info >= (3, 10): + @overload + def getProperty(self, name: Literal["http://xml.org/sax/properties/lexical-handler"]) -> LexicalHandler | None: ... + + @overload + def getProperty(self, name: Literal["http://www.python.org/sax/properties/interning-dict"]) -> dict[str, Any] | None: ... + @overload + def getProperty(self, name: Literal["http://xml.org/sax/properties/xml-string"]) -> bytes | None: ... + @overload + def getProperty(self, name: str) -> object: ... + if sys.version_info >= (3, 10): + @overload + def setProperty(self, name: Literal["http://xml.org/sax/properties/lexical-handler"], value: LexicalHandler) -> None: ... + + @overload + def setProperty( + self, name: Literal["http://www.python.org/sax/properties/interning-dict"], value: dict[str, Any] + ) -> None: ... + @overload + def setProperty(self, name: str, value: object) -> None: ... + def feed(self, data: str | ReadableBuffer, isFinal: bool = False) -> None: ... + def flush(self) -> None: ... + def close(self) -> None: ... + def reset(self) -> None: ... + def getColumnNumber(self) -> int | None: ... + def getLineNumber(self) -> int: ... + def getPublicId(self) -> str | None: ... + def getSystemId(self) -> str | None: ... + def start_element(self, name: str, attrs: Mapping[str, str]) -> None: ... + def end_element(self, name: str) -> None: ... + def start_element_ns(self, name: str, attrs: Mapping[str, str]) -> None: ... + def end_element_ns(self, name: str) -> None: ... + def processing_instruction(self, target: str, data: str) -> None: ... + def character_data(self, data: str) -> None: ... + def start_namespace_decl(self, prefix: str | None, uri: str) -> None: ... + def end_namespace_decl(self, prefix: str | None) -> None: ... + def start_doctype_decl(self, name: str, sysid: str | None, pubid: str | None, has_internal_subset: bool) -> None: ... + def unparsed_entity_decl(self, name: str, base: str | None, sysid: str, pubid: str | None, notation_name: str) -> None: ... + def notation_decl(self, name: str, base: str | None, sysid: str, pubid: str | None) -> None: ... + def external_entity_ref(self, context: str, base: str | None, sysid: str, pubid: str | None) -> int: ... + def skipped_entity_handler(self, name: str, is_pe: bool) -> None: ... + +def create_parser(namespaceHandling: int = 0, bufsize: int = 65516) -> ExpatParser: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/sax/handler.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/sax/handler.pyi new file mode 100644 index 0000000..5ecbfa6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/sax/handler.pyi @@ -0,0 +1,86 @@ +import sys +from typing import Final, NoReturn, Protocol, type_check_only +from xml.sax import xmlreader + +version: Final[str] + +@type_check_only +class _ErrorHandlerProtocol(Protocol): # noqa: Y046 # Protocol is not used + def error(self, exception: BaseException) -> NoReturn: ... + def fatalError(self, exception: BaseException) -> NoReturn: ... + def warning(self, exception: BaseException) -> None: ... + +class ErrorHandler: + def error(self, exception: BaseException) -> NoReturn: ... + def fatalError(self, exception: BaseException) -> NoReturn: ... + def warning(self, exception: BaseException) -> None: ... + +@type_check_only +class _ContentHandlerProtocol(Protocol): # noqa: Y046 # Protocol is not used + def setDocumentLocator(self, locator: xmlreader.Locator) -> None: ... + def startDocument(self) -> None: ... + def endDocument(self) -> None: ... + def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ... + def endPrefixMapping(self, prefix: str | None) -> None: ... + def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: ... + def endElement(self, name: str) -> None: ... + def startElementNS(self, name: tuple[str | None, str], qname: str | None, attrs: xmlreader.AttributesNSImpl) -> None: ... + def endElementNS(self, name: tuple[str | None, str], qname: str | None) -> None: ... + def characters(self, content: str) -> None: ... + def ignorableWhitespace(self, whitespace: str) -> None: ... + def processingInstruction(self, target: str, data: str) -> None: ... + def skippedEntity(self, name: str) -> None: ... + +class ContentHandler: + def setDocumentLocator(self, locator: xmlreader.Locator) -> None: ... + def startDocument(self) -> None: ... + def endDocument(self) -> None: ... + def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ... + def endPrefixMapping(self, prefix: str | None) -> None: ... + def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: ... + def endElement(self, name: str) -> None: ... + def startElementNS(self, name: tuple[str | None, str], qname: str | None, attrs: xmlreader.AttributesNSImpl) -> None: ... + def endElementNS(self, name: tuple[str | None, str], qname: str | None) -> None: ... + def characters(self, content: str) -> None: ... + def ignorableWhitespace(self, whitespace: str) -> None: ... + def processingInstruction(self, target: str, data: str) -> None: ... + def skippedEntity(self, name: str) -> None: ... + +@type_check_only +class _DTDHandlerProtocol(Protocol): # noqa: Y046 # Protocol is not used + def notationDecl(self, name: str, publicId: str | None, systemId: str) -> None: ... + def unparsedEntityDecl(self, name: str, publicId: str | None, systemId: str, ndata: str) -> None: ... + +class DTDHandler: + def notationDecl(self, name: str, publicId: str | None, systemId: str) -> None: ... + def unparsedEntityDecl(self, name: str, publicId: str | None, systemId: str, ndata: str) -> None: ... + +@type_check_only +class _EntityResolverProtocol(Protocol): # noqa: Y046 # Protocol is not used + def resolveEntity(self, publicId: str | None, systemId: str) -> str: ... + +class EntityResolver: + def resolveEntity(self, publicId: str | None, systemId: str) -> str: ... + +feature_namespaces: Final = "http://xml.org/sax/features/namespaces" +feature_namespace_prefixes: Final = "http://xml.org/sax/features/namespace-prefixes" +feature_string_interning: Final = "http://xml.org/sax/features/string-interning" +feature_validation: Final = "http://xml.org/sax/features/validation" +feature_external_ges: Final[str] # too long string +feature_external_pes: Final[str] # too long string +all_features: Final[list[str]] +property_lexical_handler: Final = "http://xml.org/sax/properties/lexical-handler" +property_declaration_handler: Final = "http://xml.org/sax/properties/declaration-handler" +property_dom_node: Final = "http://xml.org/sax/properties/dom-node" +property_xml_string: Final = "http://xml.org/sax/properties/xml-string" +property_encoding: Final = "http://www.python.org/sax/properties/encoding" +property_interning_dict: Final[str] # too long string +all_properties: Final[list[str]] + +if sys.version_info >= (3, 10): + class LexicalHandler: + def comment(self, content: str) -> None: ... + def startDTD(self, name: str, public_id: str | None, system_id: str | None) -> None: ... + def endDTD(self) -> None: ... + def startCDATA(self) -> None: ... + def endCDATA(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/sax/saxutils.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/sax/saxutils.pyi new file mode 100644 index 0000000..a29588f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/sax/saxutils.pyi @@ -0,0 +1,68 @@ +from _typeshed import SupportsWrite +from codecs import StreamReaderWriter, StreamWriter +from collections.abc import Mapping +from io import RawIOBase, TextIOBase +from typing import Literal, NoReturn +from xml.sax import _Source, handler, xmlreader + +def escape(data: str, entities: Mapping[str, str] = {}) -> str: ... +def unescape(data: str, entities: Mapping[str, str] = {}) -> str: ... +def quoteattr(data: str, entities: Mapping[str, str] = {}) -> str: ... + +class XMLGenerator(handler.ContentHandler): + def __init__( + self, + out: TextIOBase | RawIOBase | StreamWriter | StreamReaderWriter | SupportsWrite[bytes] | None = None, + encoding: str = "iso-8859-1", + short_empty_elements: bool = False, + ) -> None: ... + def _qname(self, name: tuple[str | None, str]) -> str: ... + def startDocument(self) -> None: ... + def endDocument(self) -> None: ... + def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ... + def endPrefixMapping(self, prefix: str | None) -> None: ... + def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: ... + def endElement(self, name: str) -> None: ... + def startElementNS(self, name: tuple[str | None, str], qname: str | None, attrs: xmlreader.AttributesNSImpl) -> None: ... + def endElementNS(self, name: tuple[str | None, str], qname: str | None) -> None: ... + def characters(self, content: str) -> None: ... + def ignorableWhitespace(self, content: str) -> None: ... + def processingInstruction(self, target: str, data: str) -> None: ... + +class XMLFilterBase(xmlreader.XMLReader): + def __init__(self, parent: xmlreader.XMLReader | None = None) -> None: ... + # ErrorHandler methods + def error(self, exception: BaseException) -> NoReturn: ... + def fatalError(self, exception: BaseException) -> NoReturn: ... + def warning(self, exception: BaseException) -> None: ... + # ContentHandler methods + def setDocumentLocator(self, locator: xmlreader.Locator) -> None: ... + def startDocument(self) -> None: ... + def endDocument(self) -> None: ... + def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ... + def endPrefixMapping(self, prefix: str | None) -> None: ... + def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: ... + def endElement(self, name: str) -> None: ... + def startElementNS(self, name: tuple[str | None, str], qname: str | None, attrs: xmlreader.AttributesNSImpl) -> None: ... + def endElementNS(self, name: tuple[str | None, str], qname: str | None) -> None: ... + def characters(self, content: str) -> None: ... + def ignorableWhitespace(self, chars: str) -> None: ... + def processingInstruction(self, target: str, data: str) -> None: ... + def skippedEntity(self, name: str) -> None: ... + # DTDHandler methods + def notationDecl(self, name: str, publicId: str | None, systemId: str) -> None: ... + def unparsedEntityDecl(self, name: str, publicId: str | None, systemId: str, ndata: str) -> None: ... + # EntityResolver methods + def resolveEntity(self, publicId: str | None, systemId: str) -> str: ... + # XMLReader methods + def parse(self, source: xmlreader.InputSource | _Source) -> None: ... + def setLocale(self, locale: str) -> None: ... + def getFeature(self, name: str) -> Literal[1, 0] | bool: ... + def setFeature(self, name: str, state: Literal[1, 0] | bool) -> None: ... + def getProperty(self, name: str) -> object: ... + def setProperty(self, name: str, value: object) -> None: ... + # XMLFilter methods + def getParent(self) -> xmlreader.XMLReader | None: ... + def setParent(self, parent: xmlreader.XMLReader) -> None: ... + +def prepare_input_source(source: xmlreader.InputSource | _Source, base: str = "") -> xmlreader.InputSource: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi new file mode 100644 index 0000000..e7d04dd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi @@ -0,0 +1,90 @@ +from _typeshed import ReadableBuffer +from collections.abc import Mapping +from typing import Generic, Literal, TypeVar, overload +from typing_extensions import Self, TypeAlias +from xml.sax import _Source, _SupportsReadClose +from xml.sax.handler import _ContentHandlerProtocol, _DTDHandlerProtocol, _EntityResolverProtocol, _ErrorHandlerProtocol + +class XMLReader: + def parse(self, source: InputSource | _Source) -> None: ... + def getContentHandler(self) -> _ContentHandlerProtocol: ... + def setContentHandler(self, handler: _ContentHandlerProtocol) -> None: ... + def getDTDHandler(self) -> _DTDHandlerProtocol: ... + def setDTDHandler(self, handler: _DTDHandlerProtocol) -> None: ... + def getEntityResolver(self) -> _EntityResolverProtocol: ... + def setEntityResolver(self, resolver: _EntityResolverProtocol) -> None: ... + def getErrorHandler(self) -> _ErrorHandlerProtocol: ... + def setErrorHandler(self, handler: _ErrorHandlerProtocol) -> None: ... + def setLocale(self, locale: str) -> None: ... + def getFeature(self, name: str) -> Literal[0, 1] | bool: ... + def setFeature(self, name: str, state: Literal[0, 1] | bool) -> None: ... + def getProperty(self, name: str) -> object: ... + def setProperty(self, name: str, value: object) -> None: ... + +class IncrementalParser(XMLReader): + def __init__(self, bufsize: int = 65536) -> None: ... + def parse(self, source: InputSource | _Source) -> None: ... + def feed(self, data: str | ReadableBuffer) -> None: ... + def prepareParser(self, source: InputSource) -> None: ... + def close(self) -> None: ... + def reset(self) -> None: ... + +class Locator: + def getColumnNumber(self) -> int | None: ... + def getLineNumber(self) -> int | None: ... + def getPublicId(self) -> str | None: ... + def getSystemId(self) -> str | None: ... + +class InputSource: + def __init__(self, system_id: str | None = None) -> None: ... + def setPublicId(self, public_id: str | None) -> None: ... + def getPublicId(self) -> str | None: ... + def setSystemId(self, system_id: str | None) -> None: ... + def getSystemId(self) -> str | None: ... + def setEncoding(self, encoding: str | None) -> None: ... + def getEncoding(self) -> str | None: ... + def setByteStream(self, bytefile: _SupportsReadClose[bytes] | None) -> None: ... + def getByteStream(self) -> _SupportsReadClose[bytes] | None: ... + def setCharacterStream(self, charfile: _SupportsReadClose[str] | None) -> None: ... + def getCharacterStream(self) -> _SupportsReadClose[str] | None: ... + +_AttrKey = TypeVar("_AttrKey", default=str) + +class AttributesImpl(Generic[_AttrKey]): + def __init__(self, attrs: Mapping[_AttrKey, str]) -> None: ... + def getLength(self) -> int: ... + def getType(self, name: str) -> str: ... + def getValue(self, name: _AttrKey) -> str: ... + def getValueByQName(self, name: str) -> str: ... + def getNameByQName(self, name: str) -> _AttrKey: ... + def getQNameByName(self, name: _AttrKey) -> str: ... + def getNames(self) -> list[_AttrKey]: ... + def getQNames(self) -> list[str]: ... + def __len__(self) -> int: ... + def __getitem__(self, name: _AttrKey) -> str: ... + def keys(self) -> list[_AttrKey]: ... + def __contains__(self, name: _AttrKey) -> bool: ... + @overload + def get(self, name: _AttrKey, alternative: None = None) -> str | None: ... + @overload + def get(self, name: _AttrKey, alternative: str) -> str: ... + def copy(self) -> Self: ... + def items(self) -> list[tuple[_AttrKey, str]]: ... + def values(self) -> list[str]: ... + +_NSName: TypeAlias = tuple[str | None, str] + +class AttributesNSImpl(AttributesImpl[_NSName]): + def __init__(self, attrs: Mapping[_NSName, str], qnames: Mapping[_NSName, str]) -> None: ... + def getValue(self, name: _NSName) -> str: ... + def getNameByQName(self, name: str) -> _NSName: ... + def getQNameByName(self, name: _NSName) -> str: ... + def getNames(self) -> list[_NSName]: ... + def __getitem__(self, name: _NSName) -> str: ... + def keys(self) -> list[_NSName]: ... + def __contains__(self, name: _NSName) -> bool: ... + @overload + def get(self, name: _NSName, alternative: None = None) -> str | None: ... + @overload + def get(self, name: _NSName, alternative: str) -> str: ... + def items(self) -> list[tuple[_NSName, str]]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xmlrpc/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xmlrpc/__init__.pyi new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xmlrpc/client.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xmlrpc/client.pyi new file mode 100644 index 0000000..42420ee --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xmlrpc/client.pyi @@ -0,0 +1,298 @@ +import gzip +import http.client +import time +from _typeshed import ReadableBuffer, SizedBuffer, SupportsRead, SupportsWrite +from collections.abc import Callable, Iterable, Mapping +from datetime import datetime +from io import BytesIO +from types import TracebackType +from typing import Any, ClassVar, Final, Literal, Protocol, overload, type_check_only +from typing_extensions import Self, TypeAlias + +@type_check_only +class _SupportsTimeTuple(Protocol): + def timetuple(self) -> time.struct_time: ... + +_DateTimeComparable: TypeAlias = DateTime | datetime | str | _SupportsTimeTuple +_Marshallable: TypeAlias = ( + bool + | int + | float + | str + | bytes + | bytearray + | None + | tuple[_Marshallable, ...] + # Ideally we'd use _Marshallable for list and dict, but invariance makes that impractical + | list[Any] + | dict[str, Any] + | datetime + | DateTime + | Binary +) +_XMLDate: TypeAlias = int | datetime | tuple[int, ...] | time.struct_time +_HostType: TypeAlias = tuple[str, dict[str, str]] | str + +def escape(s: str) -> str: ... # undocumented + +MAXINT: Final[int] # undocumented +MININT: Final[int] # undocumented + +PARSE_ERROR: Final[int] # undocumented +SERVER_ERROR: Final[int] # undocumented +APPLICATION_ERROR: Final[int] # undocumented +SYSTEM_ERROR: Final[int] # undocumented +TRANSPORT_ERROR: Final[int] # undocumented + +NOT_WELLFORMED_ERROR: Final[int] # undocumented +UNSUPPORTED_ENCODING: Final[int] # undocumented +INVALID_ENCODING_CHAR: Final[int] # undocumented +INVALID_XMLRPC: Final[int] # undocumented +METHOD_NOT_FOUND: Final[int] # undocumented +INVALID_METHOD_PARAMS: Final[int] # undocumented +INTERNAL_ERROR: Final[int] # undocumented + +class Error(Exception): ... + +class ProtocolError(Error): + url: str + errcode: int + errmsg: str + headers: dict[str, str] + def __init__(self, url: str, errcode: int, errmsg: str, headers: dict[str, str]) -> None: ... + +class ResponseError(Error): ... + +class Fault(Error): + faultCode: int + faultString: str + def __init__(self, faultCode: int, faultString: str, **extra: Any) -> None: ... + +boolean = bool +Boolean = bool + +def _iso8601_format(value: datetime) -> str: ... # undocumented +def _strftime(value: _XMLDate) -> str: ... # undocumented + +class DateTime: + value: str # undocumented + def __init__(self, value: int | str | datetime | time.struct_time | tuple[int, ...] = 0) -> None: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __lt__(self, other: _DateTimeComparable) -> bool: ... + def __le__(self, other: _DateTimeComparable) -> bool: ... + def __gt__(self, other: _DateTimeComparable) -> bool: ... + def __ge__(self, other: _DateTimeComparable) -> bool: ... + def __eq__(self, other: _DateTimeComparable) -> bool: ... # type: ignore[override] + def make_comparable(self, other: _DateTimeComparable) -> tuple[str, str]: ... # undocumented + def timetuple(self) -> time.struct_time: ... # undocumented + def decode(self, data: Any) -> None: ... + def encode(self, out: SupportsWrite[str]) -> None: ... + +def _datetime(data: Any) -> DateTime: ... # undocumented +def _datetime_type(data: str) -> datetime: ... # undocumented + +class Binary: + data: bytes + def __init__(self, data: bytes | bytearray | None = None) -> None: ... + def decode(self, data: ReadableBuffer) -> None: ... + def encode(self, out: SupportsWrite[str]) -> None: ... + def __eq__(self, other: object) -> bool: ... + __hash__: ClassVar[None] # type: ignore[assignment] + +def _binary(data: ReadableBuffer) -> Binary: ... # undocumented + +WRAPPERS: Final[tuple[type[DateTime], type[Binary]]] # undocumented + +class ExpatParser: # undocumented + def __init__(self, target: Unmarshaller) -> None: ... + def feed(self, data: str | ReadableBuffer) -> None: ... + def close(self) -> None: ... + +_WriteCallback: TypeAlias = Callable[[str], object] + +class Marshaller: + dispatch: dict[type[_Marshallable] | Literal["_arbitrary_instance"], Callable[[Marshaller, Any, _WriteCallback], None]] + memo: dict[Any, None] + data: None + encoding: str | None + allow_none: bool + def __init__(self, encoding: str | None = None, allow_none: bool = False) -> None: ... + def dumps(self, values: Fault | Iterable[_Marshallable]) -> str: ... + def __dump(self, value: _Marshallable, write: _WriteCallback) -> None: ... # undocumented + def dump_nil(self, value: None, write: _WriteCallback) -> None: ... + def dump_bool(self, value: bool, write: _WriteCallback) -> None: ... + def dump_long(self, value: int, write: _WriteCallback) -> None: ... + def dump_int(self, value: int, write: _WriteCallback) -> None: ... + def dump_double(self, value: float, write: _WriteCallback) -> None: ... + def dump_unicode(self, value: str, write: _WriteCallback, escape: Callable[[str], str] = ...) -> None: ... + def dump_bytes(self, value: ReadableBuffer, write: _WriteCallback) -> None: ... + def dump_array(self, value: Iterable[_Marshallable], write: _WriteCallback) -> None: ... + def dump_struct( + self, value: Mapping[str, _Marshallable], write: _WriteCallback, escape: Callable[[str], str] = ... + ) -> None: ... + def dump_datetime(self, value: _XMLDate, write: _WriteCallback) -> None: ... + def dump_instance(self, value: object, write: _WriteCallback) -> None: ... + +class Unmarshaller: + dispatch: dict[str, Callable[[Unmarshaller, str], None]] + + _type: str | None + _stack: list[_Marshallable] + _marks: list[int] + _data: list[str] + _value: bool + _methodname: str | None + _encoding: str + append: Callable[[Any], None] + _use_datetime: bool + _use_builtin_types: bool + def __init__(self, use_datetime: bool = False, use_builtin_types: bool = False) -> None: ... + def close(self) -> tuple[_Marshallable, ...]: ... + def getmethodname(self) -> str | None: ... + def xml(self, encoding: str, standalone: Any) -> None: ... # Standalone is ignored + def start(self, tag: str, attrs: dict[str, str]) -> None: ... + def data(self, text: str) -> None: ... + def end(self, tag: str) -> None: ... + def end_dispatch(self, tag: str, data: str) -> None: ... + def end_nil(self, data: str) -> None: ... + def end_boolean(self, data: str) -> None: ... + def end_int(self, data: str) -> None: ... + def end_double(self, data: str) -> None: ... + def end_bigdecimal(self, data: str) -> None: ... + def end_string(self, data: str) -> None: ... + def end_array(self, data: str) -> None: ... + def end_struct(self, data: str) -> None: ... + def end_base64(self, data: str) -> None: ... + def end_dateTime(self, data: str) -> None: ... + def end_value(self, data: str) -> None: ... + def end_params(self, data: str) -> None: ... + def end_fault(self, data: str) -> None: ... + def end_methodName(self, data: str) -> None: ... + +class _MultiCallMethod: # undocumented + __call_list: list[tuple[str, tuple[_Marshallable, ...]]] + __name: str + def __init__(self, call_list: list[tuple[str, _Marshallable]], name: str) -> None: ... + def __getattr__(self, name: str) -> _MultiCallMethod: ... + def __call__(self, *args: _Marshallable) -> None: ... + +class MultiCallIterator: # undocumented + results: list[list[_Marshallable]] + def __init__(self, results: list[list[_Marshallable]]) -> None: ... + def __getitem__(self, i: int) -> _Marshallable: ... + +class MultiCall: + __server: ServerProxy + __call_list: list[tuple[str, tuple[_Marshallable, ...]]] + def __init__(self, server: ServerProxy) -> None: ... + def __getattr__(self, name: str) -> _MultiCallMethod: ... + def __call__(self) -> MultiCallIterator: ... + +# A little white lie +FastMarshaller: Marshaller | None +FastParser: ExpatParser | None +FastUnmarshaller: Unmarshaller | None + +def getparser(use_datetime: bool = False, use_builtin_types: bool = False) -> tuple[ExpatParser, Unmarshaller]: ... +def dumps( + params: Fault | tuple[_Marshallable, ...], + methodname: str | None = None, + methodresponse: bool | None = None, + encoding: str | None = None, + allow_none: bool = False, +) -> str: ... +def loads( + data: str | ReadableBuffer, use_datetime: bool = False, use_builtin_types: bool = False +) -> tuple[tuple[_Marshallable, ...], str | None]: ... +def gzip_encode(data: ReadableBuffer) -> bytes: ... # undocumented +def gzip_decode(data: ReadableBuffer, max_decode: int = 20971520) -> bytes: ... # undocumented + +class GzipDecodedResponse(gzip.GzipFile): # undocumented + io: BytesIO + def __init__(self, response: SupportsRead[ReadableBuffer]) -> None: ... + +class _Method: # undocumented + __send: Callable[[str, tuple[_Marshallable, ...]], _Marshallable] + __name: str + def __init__(self, send: Callable[[str, tuple[_Marshallable, ...]], _Marshallable], name: str) -> None: ... + def __getattr__(self, name: str) -> _Method: ... + def __call__(self, *args: _Marshallable) -> _Marshallable: ... + +class Transport: + user_agent: str + accept_gzip_encoding: bool + encode_threshold: int | None + + _use_datetime: bool + _use_builtin_types: bool + _connection: tuple[_HostType | None, http.client.HTTPConnection | None] + _headers: list[tuple[str, str]] + _extra_headers: list[tuple[str, str]] + + def __init__( + self, use_datetime: bool = False, use_builtin_types: bool = False, *, headers: Iterable[tuple[str, str]] = () + ) -> None: ... + def request( + self, host: _HostType, handler: str, request_body: SizedBuffer, verbose: bool = False + ) -> tuple[_Marshallable, ...]: ... + def single_request( + self, host: _HostType, handler: str, request_body: SizedBuffer, verbose: bool = False + ) -> tuple[_Marshallable, ...]: ... + def getparser(self) -> tuple[ExpatParser, Unmarshaller]: ... + def get_host_info(self, host: _HostType) -> tuple[str, list[tuple[str, str]], dict[str, str]]: ... + def make_connection(self, host: _HostType) -> http.client.HTTPConnection: ... + def close(self) -> None: ... + def send_request( + self, host: _HostType, handler: str, request_body: SizedBuffer, debug: bool + ) -> http.client.HTTPConnection: ... + def send_headers(self, connection: http.client.HTTPConnection, headers: list[tuple[str, str]]) -> None: ... + def send_content(self, connection: http.client.HTTPConnection, request_body: SizedBuffer) -> None: ... + def parse_response(self, response: http.client.HTTPResponse) -> tuple[_Marshallable, ...]: ... + +class SafeTransport(Transport): + def __init__( + self, + use_datetime: bool = False, + use_builtin_types: bool = False, + *, + headers: Iterable[tuple[str, str]] = (), + context: Any | None = None, + ) -> None: ... + def make_connection(self, host: _HostType) -> http.client.HTTPSConnection: ... + +class ServerProxy: + __host: str + __handler: str + __transport: Transport + __encoding: str + __verbose: bool + __allow_none: bool + + def __init__( + self, + uri: str, + transport: Transport | None = None, + encoding: str | None = None, + verbose: bool = False, + allow_none: bool = False, + use_datetime: bool = False, + use_builtin_types: bool = False, + *, + headers: Iterable[tuple[str, str]] = (), + context: Any | None = None, + ) -> None: ... + def __getattr__(self, name: str) -> _Method: ... + @overload + def __call__(self, attr: Literal["close"]) -> Callable[[], None]: ... + @overload + def __call__(self, attr: Literal["transport"]) -> Transport: ... + @overload + def __call__(self, attr: str) -> Callable[[], None] | Transport: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def __close(self) -> None: ... # undocumented + def __request(self, methodname: str, params: tuple[_Marshallable, ...]) -> tuple[_Marshallable, ...]: ... # undocumented + +Server = ServerProxy diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xmlrpc/server.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xmlrpc/server.pyi new file mode 100644 index 0000000..286aaf9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xmlrpc/server.pyi @@ -0,0 +1,150 @@ +import http.server +import pydoc +import socketserver +from _typeshed import ReadableBuffer +from collections.abc import Callable, Iterable, Mapping +from re import Pattern +from typing import Any, ClassVar, Protocol, type_check_only +from typing_extensions import TypeAlias +from xmlrpc.client import Fault, _Marshallable + +# The dispatch accepts anywhere from 0 to N arguments, no easy way to allow this in mypy +@type_check_only +class _DispatchArity0(Protocol): + def __call__(self) -> _Marshallable: ... + +@type_check_only +class _DispatchArity1(Protocol): + def __call__(self, arg1: _Marshallable, /) -> _Marshallable: ... + +@type_check_only +class _DispatchArity2(Protocol): + def __call__(self, arg1: _Marshallable, arg2: _Marshallable, /) -> _Marshallable: ... + +@type_check_only +class _DispatchArity3(Protocol): + def __call__(self, arg1: _Marshallable, arg2: _Marshallable, arg3: _Marshallable, /) -> _Marshallable: ... + +@type_check_only +class _DispatchArity4(Protocol): + def __call__( + self, arg1: _Marshallable, arg2: _Marshallable, arg3: _Marshallable, arg4: _Marshallable, / + ) -> _Marshallable: ... + +@type_check_only +class _DispatchArityN(Protocol): + def __call__(self, *args: _Marshallable) -> _Marshallable: ... + +_DispatchProtocol: TypeAlias = ( + _DispatchArity0 | _DispatchArity1 | _DispatchArity2 | _DispatchArity3 | _DispatchArity4 | _DispatchArityN +) + +def resolve_dotted_attribute(obj: Any, attr: str, allow_dotted_names: bool = True) -> Any: ... # undocumented +def list_public_methods(obj: Any) -> list[str]: ... # undocumented + +class SimpleXMLRPCDispatcher: # undocumented + funcs: dict[str, _DispatchProtocol] + instance: Any | None + allow_none: bool + encoding: str + use_builtin_types: bool + def __init__(self, allow_none: bool = False, encoding: str | None = None, use_builtin_types: bool = False) -> None: ... + def register_instance(self, instance: Any, allow_dotted_names: bool = False) -> None: ... + def register_function(self, function: _DispatchProtocol | None = None, name: str | None = None) -> Callable[..., Any]: ... + def register_introspection_functions(self) -> None: ... + def register_multicall_functions(self) -> None: ... + def _marshaled_dispatch( + self, + data: str | ReadableBuffer, + dispatch_method: Callable[[str, tuple[_Marshallable, ...]], Fault | tuple[_Marshallable, ...]] | None = None, + path: Any | None = None, + ) -> str: ... # undocumented + def system_listMethods(self) -> list[str]: ... # undocumented + def system_methodSignature(self, method_name: str) -> str: ... # undocumented + def system_methodHelp(self, method_name: str) -> str: ... # undocumented + def system_multicall(self, call_list: list[dict[str, _Marshallable]]) -> list[_Marshallable]: ... # undocumented + def _dispatch(self, method: str, params: Iterable[_Marshallable]) -> _Marshallable: ... # undocumented + +class SimpleXMLRPCRequestHandler(http.server.BaseHTTPRequestHandler): + rpc_paths: ClassVar[tuple[str, ...]] + encode_threshold: int # undocumented + aepattern: Pattern[str] # undocumented + def accept_encodings(self) -> dict[str, float]: ... + def is_rpc_path_valid(self) -> bool: ... + def do_POST(self) -> None: ... + def decode_request_content(self, data: bytes) -> bytes | None: ... + def report_404(self) -> None: ... + +class SimpleXMLRPCServer(socketserver.TCPServer, SimpleXMLRPCDispatcher): + _send_traceback_handler: bool + def __init__( + self, + addr: tuple[str, int], + requestHandler: type[SimpleXMLRPCRequestHandler] = ..., + logRequests: bool = True, + allow_none: bool = False, + encoding: str | None = None, + bind_and_activate: bool = True, + use_builtin_types: bool = False, + ) -> None: ... + +class MultiPathXMLRPCServer(SimpleXMLRPCServer): # undocumented + dispatchers: dict[str, SimpleXMLRPCDispatcher] + def __init__( + self, + addr: tuple[str, int], + requestHandler: type[SimpleXMLRPCRequestHandler] = ..., + logRequests: bool = True, + allow_none: bool = False, + encoding: str | None = None, + bind_and_activate: bool = True, + use_builtin_types: bool = False, + ) -> None: ... + def add_dispatcher(self, path: str, dispatcher: SimpleXMLRPCDispatcher) -> SimpleXMLRPCDispatcher: ... + def get_dispatcher(self, path: str) -> SimpleXMLRPCDispatcher: ... + +class CGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher): + def __init__(self, allow_none: bool = False, encoding: str | None = None, use_builtin_types: bool = False) -> None: ... + def handle_xmlrpc(self, request_text: str) -> None: ... + def handle_get(self) -> None: ... + def handle_request(self, request_text: str | None = None) -> None: ... + +class ServerHTMLDoc(pydoc.HTMLDoc): # undocumented + def docroutine( # type: ignore[override] + self, + object: object, + name: str, + mod: str | None = None, + funcs: Mapping[str, str] = {}, + classes: Mapping[str, str] = {}, + methods: Mapping[str, str] = {}, + cl: type | None = None, + ) -> str: ... + def docserver(self, server_name: str, package_documentation: str, methods: dict[str, str]) -> str: ... + +class XMLRPCDocGenerator: # undocumented + server_name: str + server_documentation: str + server_title: str + def set_server_title(self, server_title: str) -> None: ... + def set_server_name(self, server_name: str) -> None: ... + def set_server_documentation(self, server_documentation: str) -> None: ... + def generate_html_documentation(self) -> str: ... + +class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler): + def do_GET(self) -> None: ... + +class DocXMLRPCServer(SimpleXMLRPCServer, XMLRPCDocGenerator): + def __init__( + self, + addr: tuple[str, int], + requestHandler: type[SimpleXMLRPCRequestHandler] = ..., + logRequests: bool = True, + allow_none: bool = False, + encoding: str | None = None, + bind_and_activate: bool = True, + use_builtin_types: bool = False, + ) -> None: ... + +class DocCGIXMLRPCRequestHandler(CGIXMLRPCRequestHandler, XMLRPCDocGenerator): + def __init__(self) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xxlimited.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xxlimited.pyi new file mode 100644 index 0000000..78a50b8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/xxlimited.pyi @@ -0,0 +1,24 @@ +import sys +from typing import Any, ClassVar, final + +class Str(str): ... + +@final +class Xxo: + def demo(self) -> None: ... + if sys.version_info >= (3, 11) and sys.platform != "win32": + x_exports: int + +def foo(i: int, j: int, /) -> Any: ... +def new() -> Xxo: ... + +if sys.version_info >= (3, 10): + class Error(Exception): ... + +else: + class error(Exception): ... + + class Null: + __hash__: ClassVar[None] # type: ignore[assignment] + + def roj(b: Any, /) -> None: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zipapp.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zipapp.pyi new file mode 100644 index 0000000..c7cf170 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zipapp.pyi @@ -0,0 +1,20 @@ +from collections.abc import Callable +from pathlib import Path +from typing import BinaryIO +from typing_extensions import TypeAlias + +__all__ = ["ZipAppError", "create_archive", "get_interpreter"] + +_Path: TypeAlias = str | Path | BinaryIO + +class ZipAppError(ValueError): ... + +def create_archive( + source: _Path, + target: _Path | None = None, + interpreter: str | None = None, + main: str | None = None, + filter: Callable[[Path], bool] | None = None, + compressed: bool = False, +) -> None: ... +def get_interpreter(archive: _Path) -> str: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zipfile/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zipfile/__init__.pyi new file mode 100644 index 0000000..e573d04 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zipfile/__init__.pyi @@ -0,0 +1,415 @@ +import io +import sys +from _typeshed import SizedBuffer, StrOrBytesPath, StrPath +from collections.abc import Callable, Iterable, Iterator +from io import TextIOWrapper +from os import PathLike +from types import TracebackType +from typing import IO, Final, Literal, Protocol, overload, type_check_only +from typing_extensions import Self, TypeAlias + +__all__ = [ + "BadZipFile", + "BadZipfile", + "Path", + "error", + "ZIP_STORED", + "ZIP_DEFLATED", + "ZIP_BZIP2", + "ZIP_LZMA", + "is_zipfile", + "ZipInfo", + "ZipFile", + "PyZipFile", + "LargeZipFile", +] + +if sys.version_info >= (3, 14): + __all__ += ["ZIP_ZSTANDARD"] + +# TODO: use TypeAlias for these two when mypy bugs are fixed +# https://github.com/python/mypy/issues/16581 +_DateTuple = tuple[int, int, int, int, int, int] # noqa: Y026 +_ZipFileMode = Literal["r", "w", "x", "a"] # noqa: Y026 + +_ReadWriteMode: TypeAlias = Literal["r", "w"] + +class BadZipFile(Exception): ... + +BadZipfile = BadZipFile +error = BadZipfile + +class LargeZipFile(Exception): ... + +@type_check_only +class _ZipStream(Protocol): + def read(self, n: int, /) -> bytes: ... + # The following methods are optional: + # def seekable(self) -> bool: ... + # def tell(self) -> int: ... + # def seek(self, n: int, /) -> object: ... + +# Stream shape as required by _EndRecData() and _EndRecData64(). +@type_check_only +class _SupportsReadSeekTell(Protocol): + def read(self, n: int = ..., /) -> bytes: ... + def seek(self, cookie: int, whence: int, /) -> object: ... + def tell(self) -> int: ... + +@type_check_only +class _ClosableZipStream(_ZipStream, Protocol): + def close(self) -> object: ... + +class ZipExtFile(io.BufferedIOBase): + MAX_N: int + MIN_READ_SIZE: int + MAX_SEEK_READ: int + newlines: list[bytes] | None + mode: _ReadWriteMode + name: str + @overload + def __init__( + self, fileobj: _ClosableZipStream, mode: _ReadWriteMode, zipinfo: ZipInfo, pwd: bytes | None, close_fileobj: Literal[True] + ) -> None: ... + @overload + def __init__( + self, + fileobj: _ClosableZipStream, + mode: _ReadWriteMode, + zipinfo: ZipInfo, + pwd: bytes | None = None, + *, + close_fileobj: Literal[True], + ) -> None: ... + @overload + def __init__( + self, + fileobj: _ZipStream, + mode: _ReadWriteMode, + zipinfo: ZipInfo, + pwd: bytes | None = None, + close_fileobj: Literal[False] = False, + ) -> None: ... + def read(self, n: int | None = -1) -> bytes: ... + def readline(self, limit: int = -1) -> bytes: ... # type: ignore[override] + def peek(self, n: int = 1) -> bytes: ... + def read1(self, n: int | None) -> bytes: ... # type: ignore[override] + def seek(self, offset: int, whence: int = 0) -> int: ... + +@type_check_only +class _Writer(Protocol): + def write(self, s: str, /) -> object: ... + +@type_check_only +class _ZipReadable(Protocol): + def seek(self, offset: int, whence: int = 0, /) -> int: ... + def read(self, n: int = -1, /) -> bytes: ... + +@type_check_only +class _ZipTellable(Protocol): + def tell(self) -> int: ... + +@type_check_only +class _ZipReadableTellable(_ZipReadable, _ZipTellable, Protocol): ... + +@type_check_only +class _ZipWritable(Protocol): + def flush(self) -> None: ... + def close(self) -> None: ... + def write(self, b: bytes, /) -> int: ... + +class ZipFile: + filename: str | None + debug: int + comment: bytes + filelist: list[ZipInfo] + fp: IO[bytes] | None + NameToInfo: dict[str, ZipInfo] + start_dir: int # undocumented + compression: int # undocumented + compresslevel: int | None # undocumented + mode: _ZipFileMode # undocumented + pwd: bytes | None # undocumented + # metadata_encoding is new in 3.11 + if sys.version_info >= (3, 11): + @overload + def __init__( + self, + file: StrPath | IO[bytes], + mode: _ZipFileMode = "r", + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, + *, + strict_timestamps: bool = True, + metadata_encoding: str | None = None, + ) -> None: ... + # metadata_encoding is only allowed for read mode + @overload + def __init__( + self, + file: StrPath | _ZipReadable, + mode: Literal["r"] = "r", + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, + *, + strict_timestamps: bool = True, + metadata_encoding: str | None = None, + ) -> None: ... + @overload + def __init__( + self, + file: StrPath | _ZipWritable, + mode: Literal["w", "x"], + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, + *, + strict_timestamps: bool = True, + metadata_encoding: None = None, + ) -> None: ... + @overload + def __init__( + self, + file: StrPath | _ZipReadableTellable, + mode: Literal["a"], + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, + *, + strict_timestamps: bool = True, + metadata_encoding: None = None, + ) -> None: ... + else: + @overload + def __init__( + self, + file: StrPath | IO[bytes], + mode: _ZipFileMode = "r", + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, + *, + strict_timestamps: bool = True, + ) -> None: ... + @overload + def __init__( + self, + file: StrPath | _ZipReadable, + mode: Literal["r"] = "r", + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, + *, + strict_timestamps: bool = True, + ) -> None: ... + @overload + def __init__( + self, + file: StrPath | _ZipWritable, + mode: Literal["w", "x"], + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, + *, + strict_timestamps: bool = True, + ) -> None: ... + @overload + def __init__( + self, + file: StrPath | _ZipReadableTellable, + mode: Literal["a"], + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, + *, + strict_timestamps: bool = True, + ) -> None: ... + + def __enter__(self) -> Self: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def close(self) -> None: ... + def getinfo(self, name: str) -> ZipInfo: ... + def infolist(self) -> list[ZipInfo]: ... + def namelist(self) -> list[str]: ... + def open( + self, name: str | ZipInfo, mode: _ReadWriteMode = "r", pwd: bytes | None = None, *, force_zip64: bool = False + ) -> IO[bytes]: ... + def extract(self, member: str | ZipInfo, path: StrPath | None = None, pwd: bytes | None = None) -> str: ... + def extractall( + self, path: StrPath | None = None, members: Iterable[str | ZipInfo] | None = None, pwd: bytes | None = None + ) -> None: ... + def printdir(self, file: _Writer | None = None) -> None: ... + def setpassword(self, pwd: bytes) -> None: ... + def read(self, name: str | ZipInfo, pwd: bytes | None = None) -> bytes: ... + def testzip(self) -> str | None: ... + def write( + self, + filename: StrPath, + arcname: StrPath | None = None, + compress_type: int | None = None, + compresslevel: int | None = None, + ) -> None: ... + def writestr( + self, + zinfo_or_arcname: str | ZipInfo, + data: SizedBuffer | str, + compress_type: int | None = None, + compresslevel: int | None = None, + ) -> None: ... + if sys.version_info >= (3, 11): + def mkdir(self, zinfo_or_directory_name: str | ZipInfo, mode: int = 0o777) -> None: ... + + def __del__(self) -> None: ... + +class PyZipFile(ZipFile): + def __init__( + self, file: str | IO[bytes], mode: _ZipFileMode = "r", compression: int = 0, allowZip64: bool = True, optimize: int = -1 + ) -> None: ... + def writepy(self, pathname: str, basename: str = "", filterfunc: Callable[[str], bool] | None = None) -> None: ... + +class ZipInfo: + __slots__ = ( + "orig_filename", + "filename", + "date_time", + "compress_type", + "compress_level", + "comment", + "extra", + "create_system", + "create_version", + "extract_version", + "reserved", + "flag_bits", + "volume", + "internal_attr", + "external_attr", + "header_offset", + "CRC", + "compress_size", + "file_size", + "_raw_time", + "_end_offset", + ) + filename: str + date_time: _DateTuple + compress_type: int + comment: bytes + extra: bytes + create_system: int + create_version: int + extract_version: int + reserved: int + flag_bits: int + volume: int + internal_attr: int + external_attr: int + header_offset: int + CRC: int + compress_size: int + file_size: int + orig_filename: str # undocumented + if sys.version_info >= (3, 13): + compress_level: int | None + + def __init__(self, filename: str = "NoName", date_time: _DateTuple = (1980, 1, 1, 0, 0, 0)) -> None: ... + @classmethod + def from_file(cls, filename: StrPath, arcname: StrPath | None = None, *, strict_timestamps: bool = True) -> Self: ... + def is_dir(self) -> bool: ... + def FileHeader(self, zip64: bool | None = None) -> bytes: ... + +if sys.version_info >= (3, 12): + from zipfile._path import CompleteDirs as CompleteDirs, Path as Path + +else: + class CompleteDirs(ZipFile): + def resolve_dir(self, name: str) -> str: ... + @overload + @classmethod + def make(cls, source: ZipFile) -> CompleteDirs: ... + @overload + @classmethod + def make(cls, source: StrPath | IO[bytes]) -> Self: ... + + class Path: + root: CompleteDirs + at: str + def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: ... + @property + def name(self) -> str: ... + @property + def parent(self) -> PathLike[str]: ... # undocumented + if sys.version_info >= (3, 10): + @property + def filename(self) -> PathLike[str]: ... # undocumented + if sys.version_info >= (3, 11): + @property + def suffix(self) -> str: ... + @property + def suffixes(self) -> list[str]: ... + @property + def stem(self) -> str: ... + + @overload + def open( + self, + mode: Literal["r", "w"] = "r", + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + line_buffering: bool = ..., + write_through: bool = ..., + *, + pwd: bytes | None = None, + ) -> TextIOWrapper: ... + @overload + def open(self, mode: Literal["rb", "wb"], *, pwd: bytes | None = None) -> IO[bytes]: ... + + if sys.version_info >= (3, 10): + def iterdir(self) -> Iterator[Self]: ... + else: + def iterdir(self) -> Iterator[Path]: ... + + def is_dir(self) -> bool: ... + def is_file(self) -> bool: ... + def exists(self) -> bool: ... + def read_text( + self, + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + line_buffering: bool = ..., + write_through: bool = ..., + ) -> str: ... + def read_bytes(self) -> bytes: ... + if sys.version_info >= (3, 10): + def joinpath(self, *other: StrPath) -> Path: ... + else: + def joinpath(self, add: StrPath) -> Path: ... # undocumented + + def __truediv__(self, add: StrPath) -> Path: ... + +def is_zipfile(filename: StrOrBytesPath | _SupportsReadSeekTell) -> bool: ... + +ZIP64_LIMIT: Final[int] +ZIP_FILECOUNT_LIMIT: Final[int] +ZIP_MAX_COMMENT: Final[int] + +ZIP_STORED: Final = 0 +ZIP_DEFLATED: Final = 8 +ZIP_BZIP2: Final = 12 +ZIP_LZMA: Final = 14 +if sys.version_info >= (3, 14): + ZIP_ZSTANDARD: Final = 93 + +DEFAULT_VERSION: Final[int] +ZIP64_VERSION: Final[int] +BZIP2_VERSION: Final[int] +LZMA_VERSION: Final[int] +if sys.version_info >= (3, 14): + ZSTANDARD_VERSION: Final[int] +MAX_EXTRACT_VERSION: Final[int] diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zipfile/_path/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zipfile/_path/__init__.pyi new file mode 100644 index 0000000..4c7b39e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zipfile/_path/__init__.pyi @@ -0,0 +1,83 @@ +import sys +from _typeshed import StrPath +from collections.abc import Iterator, Sequence +from io import TextIOWrapper +from os import PathLike +from typing import IO, Literal, TypeVar, overload +from typing_extensions import Self +from zipfile import ZipFile + +_ZF = TypeVar("_ZF", bound=ZipFile) + +if sys.version_info >= (3, 12): + __all__ = ["Path"] + + class InitializedState: + def __init__(self, *args: object, **kwargs: object) -> None: ... + def __getstate__(self) -> tuple[list[object], dict[object, object]]: ... + def __setstate__(self, state: Sequence[tuple[list[object], dict[object, object]]]) -> None: ... + + class CompleteDirs(InitializedState, ZipFile): + def resolve_dir(self, name: str) -> str: ... + @overload + @classmethod + def make(cls, source: ZipFile) -> CompleteDirs: ... + @overload + @classmethod + def make(cls, source: StrPath | IO[bytes]) -> Self: ... + if sys.version_info >= (3, 13): + @classmethod + def inject(cls, zf: _ZF) -> _ZF: ... + + class Path: + root: CompleteDirs + at: str + def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: ... + @property + def name(self) -> str: ... + @property + def parent(self) -> PathLike[str]: ... # undocumented + @property + def filename(self) -> PathLike[str]: ... # undocumented + @property + def suffix(self) -> str: ... + @property + def suffixes(self) -> list[str]: ... + @property + def stem(self) -> str: ... + @overload + def open( + self, + mode: Literal["r", "w"] = "r", + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + line_buffering: bool = ..., + write_through: bool = ..., + *, + pwd: bytes | None = None, + ) -> TextIOWrapper: ... + @overload + def open(self, mode: Literal["rb", "wb"], *, pwd: bytes | None = None) -> IO[bytes]: ... + def iterdir(self) -> Iterator[Self]: ... + def is_dir(self) -> bool: ... + def is_file(self) -> bool: ... + def exists(self) -> bool: ... + def read_text( + self, + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + line_buffering: bool = ..., + write_through: bool = ..., + ) -> str: ... + def read_bytes(self) -> bytes: ... + def joinpath(self, *other: StrPath) -> Path: ... + def glob(self, pattern: str) -> Iterator[Self]: ... + def rglob(self, pattern: str) -> Iterator[Self]: ... + def is_symlink(self) -> Literal[False]: ... + def relative_to(self, other: Path, *extra: StrPath) -> str: ... + def match(self, path_pattern: str) -> bool: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def __truediv__(self, add: StrPath) -> Path: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zipfile/_path/glob.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zipfile/_path/glob.pyi new file mode 100644 index 0000000..f6a661b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zipfile/_path/glob.pyi @@ -0,0 +1,26 @@ +import sys +from collections.abc import Iterator +from re import Match + +if sys.version_info >= (3, 13): + class Translator: + if sys.platform == "win32": + def __init__(self, seps: str = "\\/") -> None: ... + else: + def __init__(self, seps: str = "/") -> None: ... + + def translate(self, pattern: str) -> str: ... + def extend(self, pattern: str) -> str: ... + def match_dirs(self, pattern: str) -> str: ... + def translate_core(self, pattern: str) -> str: ... + def replace(self, match: Match[str]) -> str: ... + def restrict_rglob(self, pattern: str) -> None: ... + def star_not_empty(self, pattern: str) -> str: ... + +else: + def translate(pattern: str) -> str: ... + def match_dirs(pattern: str) -> str: ... + def translate_core(pattern: str) -> str: ... + def replace(match: Match[str]) -> str: ... + +def separate(pattern: str) -> Iterator[Match[str]]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zipimport.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zipimport.pyi new file mode 100644 index 0000000..22af3c2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zipimport.pyi @@ -0,0 +1,59 @@ +import sys +from _typeshed import StrOrBytesPath +from importlib.machinery import ModuleSpec +from types import CodeType, ModuleType +from typing_extensions import deprecated + +if sys.version_info >= (3, 10): + from importlib.readers import ZipReader +else: + from importlib.abc import ResourceReader + +if sys.version_info >= (3, 10): + from _frozen_importlib_external import _LoaderBasics +else: + _LoaderBasics = object + +__all__ = ["ZipImportError", "zipimporter"] + +class ZipImportError(ImportError): ... + +class zipimporter(_LoaderBasics): + archive: str + prefix: str + if sys.version_info >= (3, 11): + def __init__(self, path: str) -> None: ... + else: + def __init__(self, path: StrOrBytesPath) -> None: ... + + if sys.version_info < (3, 12): + if sys.version_info >= (3, 10): + @deprecated("Deprecated since Python 3.10; removed in Python 3.12. Use `find_spec()` instead.") + def find_loader(self, fullname: str, path: str | None = None) -> tuple[zipimporter | None, list[str]]: ... + @deprecated("Deprecated since Python 3.10; removed in Python 3.12. Use `find_spec()` instead.") + def find_module(self, fullname: str, path: str | None = None) -> zipimporter | None: ... + else: + def find_loader(self, fullname: str, path: str | None = None) -> tuple[zipimporter | None, list[str]]: ... + def find_module(self, fullname: str, path: str | None = None) -> zipimporter | None: ... + + def get_code(self, fullname: str) -> CodeType: ... + def get_data(self, pathname: str) -> bytes: ... + def get_filename(self, fullname: str) -> str: ... + if sys.version_info >= (3, 14): + def get_resource_reader(self, fullname: str) -> ZipReader: ... # undocumented + elif sys.version_info >= (3, 10): + def get_resource_reader(self, fullname: str) -> ZipReader | None: ... # undocumented + else: + def get_resource_reader(self, fullname: str) -> ResourceReader | None: ... # undocumented + + def get_source(self, fullname: str) -> str | None: ... + def is_package(self, fullname: str) -> bool: ... + if sys.version_info >= (3, 10): + @deprecated("Deprecated since Python 3.10; removed in Python 3.15. Use `exec_module()` instead.") + def load_module(self, fullname: str) -> ModuleType: ... + def exec_module(self, module: ModuleType) -> None: ... + def create_module(self, spec: ModuleSpec) -> None: ... + def find_spec(self, fullname: str, target: ModuleType | None = None) -> ModuleSpec | None: ... + def invalidate_caches(self) -> None: ... + else: + def load_module(self, fullname: str) -> ModuleType: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zlib.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zlib.pyi new file mode 100644 index 0000000..d5998ca --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zlib.pyi @@ -0,0 +1,74 @@ +import sys +from _typeshed import ReadableBuffer +from typing import Any, Final, final, type_check_only +from typing_extensions import Self + +DEFLATED: Final = 8 +DEF_MEM_LEVEL: Final[int] +DEF_BUF_SIZE: Final = 16384 +MAX_WBITS: Final[int] +ZLIB_VERSION: Final[str] +ZLIB_RUNTIME_VERSION: Final[str] +Z_NO_COMPRESSION: Final = 0 +Z_PARTIAL_FLUSH: Final = 1 +Z_BEST_COMPRESSION: Final = 9 +Z_BEST_SPEED: Final = 1 +Z_BLOCK: Final = 5 +Z_DEFAULT_COMPRESSION: Final = -1 +Z_DEFAULT_STRATEGY: Final = 0 +Z_FILTERED: Final = 1 +Z_FINISH: Final = 4 +Z_FIXED: Final = 4 +Z_FULL_FLUSH: Final = 3 +Z_HUFFMAN_ONLY: Final = 2 +Z_NO_FLUSH: Final = 0 +Z_RLE: Final = 3 +Z_SYNC_FLUSH: Final = 2 +Z_TREES: Final = 6 + +if sys.version_info >= (3, 14): + # Available when zlib was built with zlib-ng + ZLIBNG_VERSION: Final[str] + +class error(Exception): ... + +# This class is not exposed at runtime. It calls itself zlib.Compress. +@final +@type_check_only +class _Compress: + def __copy__(self) -> Self: ... + def __deepcopy__(self, memo: Any, /) -> Self: ... + def compress(self, data: ReadableBuffer, /) -> bytes: ... + def flush(self, mode: int = 4, /) -> bytes: ... + def copy(self) -> _Compress: ... + +# This class is not exposed at runtime. It calls itself zlib.Decompress. +@final +@type_check_only +class _Decompress: + @property + def unused_data(self) -> bytes: ... + @property + def unconsumed_tail(self) -> bytes: ... + @property + def eof(self) -> bool: ... + def __copy__(self) -> Self: ... + def __deepcopy__(self, memo: Any, /) -> Self: ... + def decompress(self, data: ReadableBuffer, /, max_length: int = 0) -> bytes: ... + def flush(self, length: int = 16384, /) -> bytes: ... + def copy(self) -> _Decompress: ... + +def adler32(data: ReadableBuffer, value: int = 1, /) -> int: ... + +if sys.version_info >= (3, 11): + def compress(data: ReadableBuffer, /, level: int = -1, wbits: int = 15) -> bytes: ... + +else: + def compress(data: ReadableBuffer, /, level: int = -1) -> bytes: ... + +def compressobj( + level: int = -1, method: int = 8, wbits: int = 15, memLevel: int = 8, strategy: int = 0, zdict: ReadableBuffer | None = None +) -> _Compress: ... +def crc32(data: ReadableBuffer, value: int = 0, /) -> int: ... +def decompress(data: ReadableBuffer, /, wbits: int = 15, bufsize: int = 16384) -> bytes: ... +def decompressobj(wbits: int = 15, zdict: ReadableBuffer = b"") -> _Decompress: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zoneinfo/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zoneinfo/__init__.pyi new file mode 100644 index 0000000..b7433f8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zoneinfo/__init__.pyi @@ -0,0 +1,35 @@ +import sys +from collections.abc import Iterable +from datetime import datetime, timedelta, tzinfo +from typing_extensions import Self, disjoint_base +from zoneinfo._common import ZoneInfoNotFoundError as ZoneInfoNotFoundError, _IOBytes +from zoneinfo._tzpath import ( + TZPATH as TZPATH, + InvalidTZPathWarning as InvalidTZPathWarning, + available_timezones as available_timezones, + reset_tzpath as reset_tzpath, +) + +__all__ = ["ZoneInfo", "reset_tzpath", "available_timezones", "TZPATH", "ZoneInfoNotFoundError", "InvalidTZPathWarning"] + +@disjoint_base +class ZoneInfo(tzinfo): + @property + def key(self) -> str: ... + def __new__(cls, key: str) -> Self: ... + @classmethod + def no_cache(cls, key: str) -> Self: ... + if sys.version_info >= (3, 12): + @classmethod + def from_file(cls, file_obj: _IOBytes, /, key: str | None = None) -> Self: ... + else: + @classmethod + def from_file(cls, fobj: _IOBytes, /, key: str | None = None) -> Self: ... + + @classmethod + def clear_cache(cls, *, only_keys: Iterable[str] | None = None) -> None: ... + def tzname(self, dt: datetime | None, /) -> str | None: ... + def utcoffset(self, dt: datetime | None, /) -> timedelta | None: ... + def dst(self, dt: datetime | None, /) -> timedelta | None: ... + +def __dir__() -> list[str]: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zoneinfo/_common.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zoneinfo/_common.pyi new file mode 100644 index 0000000..e6d2d83 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zoneinfo/_common.pyi @@ -0,0 +1,14 @@ +import io +from typing import Any, Protocol, type_check_only + +@type_check_only +class _IOBytes(Protocol): + def read(self, size: int, /) -> bytes: ... + def seek(self, size: int, whence: int = ..., /) -> Any: ... + +def load_tzdata(key: str) -> io.BufferedReader: ... +def load_data( + fobj: _IOBytes, +) -> tuple[tuple[int, ...], tuple[int, ...], tuple[int, ...], tuple[int, ...], tuple[str, ...], bytes | None]: ... + +class ZoneInfoNotFoundError(KeyError): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zoneinfo/_tzpath.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zoneinfo/_tzpath.pyi new file mode 100644 index 0000000..0ef78d0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stdlib/zoneinfo/_tzpath.pyi @@ -0,0 +1,13 @@ +from _typeshed import StrPath +from collections.abc import Sequence + +# Note: Both here and in clear_cache, the types allow the use of `str` where +# a sequence of strings is required. This should be remedied if a solution +# to this typing bug is found: https://github.com/python/typing/issues/256 +def reset_tzpath(to: Sequence[StrPath] | None = None) -> None: ... +def find_tzfile(key: str) -> str | None: ... +def available_timezones() -> set[str]: ... + +TZPATH: tuple[str, ...] + +class InvalidTZPathWarning(RuntimeWarning): ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stubs/librt/librt/__init__.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stubs/librt/librt/__init__.pyi new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stubs/librt/librt/base64.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stubs/librt/librt/base64.pyi new file mode 100644 index 0000000..1cea838 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stubs/librt/librt/base64.pyi @@ -0,0 +1,2 @@ +def b64encode(s: bytes) -> bytes: ... +def b64decode(s: bytes | str) -> bytes: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stubs/librt/librt/internal.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stubs/librt/librt/internal.pyi new file mode 100644 index 0000000..2969ccf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stubs/librt/librt/internal.pyi @@ -0,0 +1,21 @@ +from mypy_extensions import u8 + +class ReadBuffer: + def __init__(self, source: bytes) -> None: ... + +class WriteBuffer: + def getvalue(self) -> bytes: ... + +def write_bool(data: WriteBuffer, value: bool) -> None: ... +def read_bool(data: ReadBuffer) -> bool: ... +def write_str(data: WriteBuffer, value: str) -> None: ... +def read_str(data: ReadBuffer) -> str: ... +def write_bytes(data: WriteBuffer, value: bytes) -> None: ... +def read_bytes(data: ReadBuffer) -> bytes: ... +def write_float(data: WriteBuffer, value: float) -> None: ... +def read_float(data: ReadBuffer) -> float: ... +def write_int(data: WriteBuffer, value: int) -> None: ... +def read_int(data: ReadBuffer) -> int: ... +def write_tag(data: WriteBuffer, value: u8) -> None: ... +def read_tag(data: ReadBuffer) -> u8: ... +def cache_version() -> u8: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typeshed/stubs/mypy-extensions/mypy_extensions.pyi b/.venv/lib/python3.12/site-packages/mypy/typeshed/stubs/mypy-extensions/mypy_extensions.pyi new file mode 100644 index 0000000..b6358a0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typeshed/stubs/mypy-extensions/mypy_extensions.pyi @@ -0,0 +1,218 @@ +# These stubs are forked from typeshed, since we use some definitions that only make +# sense in the context of mypy/mypyc (in particular, native int types such as i64). + +import abc +import sys +from _collections_abc import dict_items, dict_keys, dict_values +from _typeshed import IdentityFunction, Self +from collections.abc import Mapping +from typing import Any, ClassVar, Generic, SupportsInt, TypeVar, overload, type_check_only +from typing_extensions import Never, SupportsIndex +from _typeshed import ReadableBuffer, SupportsTrunc + +_T = TypeVar("_T") +_U = TypeVar("_U") + +# Internal mypy fallback type for all typed dicts (does not exist at runtime) +# N.B. Keep this mostly in sync with typing(_extensions)._TypedDict +@type_check_only +class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): + __total__: ClassVar[bool] + # Unlike typing(_extensions).TypedDict, + # subclasses of mypy_extensions.TypedDict do NOT have the __required_keys__ and __optional_keys__ ClassVars + def copy(self: Self) -> Self: ... + # Using Never so that only calls using mypy plugin hook that specialize the signature + # can go through. + def setdefault(self, k: Never, default: object) -> object: ... + # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. + def pop(self, k: Never, default: _T = ...) -> object: ... # pyright: ignore[reportInvalidTypeVarUse] + def update(self: Self, __m: Self) -> None: ... + def items(self) -> dict_items[str, object]: ... + def keys(self) -> dict_keys[str, object]: ... + def values(self) -> dict_values[str, object]: ... + def __delitem__(self, k: Never) -> None: ... + if sys.version_info >= (3, 9): + def __or__(self: Self, __other: Self) -> Self: ... + def __ior__(self: Self, __other: Self) -> Self: ... + +def TypedDict(typename: str, fields: dict[str, type[Any]], total: bool = ...) -> type[dict[str, Any]]: ... +@overload +def Arg(type: _T, name: str | None = ...) -> _T: ... +@overload +def Arg(*, name: str | None = ...) -> Any: ... +@overload +def DefaultArg(type: _T, name: str | None = ...) -> _T: ... +@overload +def DefaultArg(*, name: str | None = ...) -> Any: ... +@overload +def NamedArg(type: _T, name: str | None = ...) -> _T: ... +@overload +def NamedArg(*, name: str | None = ...) -> Any: ... +@overload +def DefaultNamedArg(type: _T, name: str | None = ...) -> _T: ... +@overload +def DefaultNamedArg(*, name: str | None = ...) -> Any: ... +@overload +def VarArg(type: _T) -> _T: ... +@overload +def VarArg() -> Any: ... +@overload +def KwArg(type: _T) -> _T: ... +@overload +def KwArg() -> Any: ... + +# Return type that indicates a function does not return. +# Deprecated: Use typing.NoReturn instead. +class NoReturn: ... + +# This is consistent with implementation. Usage intends for this as +# a class decorator, but mypy does not support type[_T] for abstract +# classes until this issue is resolved, https://github.com/python/mypy/issues/4717. +def trait(cls: _T) -> _T: ... +def mypyc_attr(*attrs: str, **kwattrs: object) -> IdentityFunction: ... + +class FlexibleAlias(Generic[_T, _U]): ... + +# Native int types such as i64 are magical and support implicit +# coercions to/from int using special logic in mypy. We generally only +# include operations here for which we have specialized primitives. + +class i64: + @overload + def __new__(cls, __x: str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc = ...) -> i64: ... + @overload + def __new__(cls, __x: str | bytes | bytearray, base: SupportsIndex) -> i64: ... + + def __add__(self, x: i64) -> i64: ... + def __radd__(self, x: i64) -> i64: ... + def __sub__(self, x: i64) -> i64: ... + def __rsub__(self, x: i64) -> i64: ... + def __mul__(self, x: i64) -> i64: ... + def __rmul__(self, x: i64) -> i64: ... + def __floordiv__(self, x: i64) -> i64: ... + def __rfloordiv__(self, x: i64) -> i64: ... + def __mod__(self, x: i64) -> i64: ... + def __rmod__(self, x: i64) -> i64: ... + def __and__(self, x: i64) -> i64: ... + def __rand__(self, x: i64) -> i64: ... + def __or__(self, x: i64) -> i64: ... + def __ror__(self, x: i64) -> i64: ... + def __xor__(self, x: i64) -> i64: ... + def __rxor__(self, x: i64) -> i64: ... + def __lshift__(self, x: i64) -> i64: ... + def __rlshift__(self, x: i64) -> i64: ... + def __rshift__(self, x: i64) -> i64: ... + def __rrshift__(self, x: i64) -> i64: ... + def __neg__(self) -> i64: ... + def __invert__(self) -> i64: ... + def __pos__(self) -> i64: ... + def __lt__(self, x: i64) -> bool: ... + def __le__(self, x: i64) -> bool: ... + def __ge__(self, x: i64) -> bool: ... + def __gt__(self, x: i64) -> bool: ... + def __index__(self) -> int: ... + +class i32: + @overload + def __new__(cls, __x: str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc = ...) -> i32: ... + @overload + def __new__(cls, __x: str | bytes | bytearray, base: SupportsIndex) -> i32: ... + + def __add__(self, x: i32) -> i32: ... + def __radd__(self, x: i32) -> i32: ... + def __sub__(self, x: i32) -> i32: ... + def __rsub__(self, x: i32) -> i32: ... + def __mul__(self, x: i32) -> i32: ... + def __rmul__(self, x: i32) -> i32: ... + def __floordiv__(self, x: i32) -> i32: ... + def __rfloordiv__(self, x: i32) -> i32: ... + def __mod__(self, x: i32) -> i32: ... + def __rmod__(self, x: i32) -> i32: ... + def __and__(self, x: i32) -> i32: ... + def __rand__(self, x: i32) -> i32: ... + def __or__(self, x: i32) -> i32: ... + def __ror__(self, x: i32) -> i32: ... + def __xor__(self, x: i32) -> i32: ... + def __rxor__(self, x: i32) -> i32: ... + def __lshift__(self, x: i32) -> i32: ... + def __rlshift__(self, x: i32) -> i32: ... + def __rshift__(self, x: i32) -> i32: ... + def __rrshift__(self, x: i32) -> i32: ... + def __neg__(self) -> i32: ... + def __invert__(self) -> i32: ... + def __pos__(self) -> i32: ... + def __lt__(self, x: i32) -> bool: ... + def __le__(self, x: i32) -> bool: ... + def __ge__(self, x: i32) -> bool: ... + def __gt__(self, x: i32) -> bool: ... + def __index__(self) -> int: ... + +class i16: + @overload + def __new__(cls, __x: str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc = ...) -> i16: ... + @overload + def __new__(cls, __x: str | bytes | bytearray, base: SupportsIndex) -> i16: ... + + def __add__(self, x: i16) -> i16: ... + def __radd__(self, x: i16) -> i16: ... + def __sub__(self, x: i16) -> i16: ... + def __rsub__(self, x: i16) -> i16: ... + def __mul__(self, x: i16) -> i16: ... + def __rmul__(self, x: i16) -> i16: ... + def __floordiv__(self, x: i16) -> i16: ... + def __rfloordiv__(self, x: i16) -> i16: ... + def __mod__(self, x: i16) -> i16: ... + def __rmod__(self, x: i16) -> i16: ... + def __and__(self, x: i16) -> i16: ... + def __rand__(self, x: i16) -> i16: ... + def __or__(self, x: i16) -> i16: ... + def __ror__(self, x: i16) -> i16: ... + def __xor__(self, x: i16) -> i16: ... + def __rxor__(self, x: i16) -> i16: ... + def __lshift__(self, x: i16) -> i16: ... + def __rlshift__(self, x: i16) -> i16: ... + def __rshift__(self, x: i16) -> i16: ... + def __rrshift__(self, x: i16) -> i16: ... + def __neg__(self) -> i16: ... + def __invert__(self) -> i16: ... + def __pos__(self) -> i16: ... + def __lt__(self, x: i16) -> bool: ... + def __le__(self, x: i16) -> bool: ... + def __ge__(self, x: i16) -> bool: ... + def __gt__(self, x: i16) -> bool: ... + def __index__(self) -> int: ... + +class u8: + @overload + def __new__(cls, __x: str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc = ...) -> u8: ... + @overload + def __new__(cls, __x: str | bytes | bytearray, base: SupportsIndex) -> u8: ... + + def __add__(self, x: u8) -> u8: ... + def __radd__(self, x: u8) -> u8: ... + def __sub__(self, x: u8) -> u8: ... + def __rsub__(self, x: u8) -> u8: ... + def __mul__(self, x: u8) -> u8: ... + def __rmul__(self, x: u8) -> u8: ... + def __floordiv__(self, x: u8) -> u8: ... + def __rfloordiv__(self, x: u8) -> u8: ... + def __mod__(self, x: u8) -> u8: ... + def __rmod__(self, x: u8) -> u8: ... + def __and__(self, x: u8) -> u8: ... + def __rand__(self, x: u8) -> u8: ... + def __or__(self, x: u8) -> u8: ... + def __ror__(self, x: u8) -> u8: ... + def __xor__(self, x: u8) -> u8: ... + def __rxor__(self, x: u8) -> u8: ... + def __lshift__(self, x: u8) -> u8: ... + def __rlshift__(self, x: u8) -> u8: ... + def __rshift__(self, x: u8) -> u8: ... + def __rrshift__(self, x: u8) -> u8: ... + def __neg__(self) -> u8: ... + def __invert__(self) -> u8: ... + def __pos__(self) -> u8: ... + def __lt__(self, x: u8) -> bool: ... + def __le__(self, x: u8) -> bool: ... + def __ge__(self, x: u8) -> bool: ... + def __gt__(self, x: u8) -> bool: ... + def __index__(self) -> int: ... diff --git a/.venv/lib/python3.12/site-packages/mypy/typestate.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/typestate.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..f54d1b1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/typestate.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/typestate.py b/.venv/lib/python3.12/site-packages/mypy/typestate.py new file mode 100644 index 0000000..5746186 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typestate.py @@ -0,0 +1,329 @@ +""" +A shared state for all TypeInfos that holds global cache and dependency information, +and potentially other mutable TypeInfo state. This module contains mutable global state. +""" + +from __future__ import annotations + +from typing import Final +from typing_extensions import TypeAlias as _TypeAlias + +from mypy.nodes import VARIANCE_NOT_READY, TypeInfo +from mypy.server.trigger import make_trigger +from mypy.types import Instance, Type, TypeVarId, TypeVarType, get_proper_type + +MAX_NEGATIVE_CACHE_TYPES: Final = 1000 +MAX_NEGATIVE_CACHE_ENTRIES: Final = 10000 + +# Represents that the 'left' instance is a subtype of the 'right' instance +SubtypeRelationship: _TypeAlias = tuple[Instance, Instance] + +# A tuple encoding the specific conditions under which we performed the subtype check. +# (e.g. did we want a proper subtype? A regular subtype while ignoring variance?) +SubtypeKind: _TypeAlias = tuple[bool, ...] + +# A cache that keeps track of whether the given TypeInfo is a part of a particular +# subtype relationship +SubtypeCache: _TypeAlias = dict[TypeInfo, dict[SubtypeKind, set[SubtypeRelationship]]] + + +class TypeState: + """This class provides subtype caching to improve performance of subtype checks. + It also holds protocol fine grained dependencies. + + Note: to avoid leaking global state, 'reset_all_subtype_caches()' should be called + after a build has finished and after a daemon shutdown. This subtype cache only exists for + performance reasons, resetting subtype caches for a class has no semantic effect. + The protocol dependencies however are only stored here, and shouldn't be deleted unless + not needed any more (e.g. during daemon shutdown). + """ + + # '_subtype_caches' keeps track of (subtype, supertype) pairs where supertypes are + # instances of the given TypeInfo. The cache also keeps track of whether the check + # was done in strict optional mode and of the specific *kind* of subtyping relationship, + # which we represent as an arbitrary hashable tuple. + # We need the caches, since subtype checks for structural types are very slow. + _subtype_caches: Final[SubtypeCache] + + # Same as above but for negative subtyping results. + _negative_subtype_caches: Final[SubtypeCache] + + # This contains protocol dependencies generated after running a full build, + # or after an update. These dependencies are special because: + # * They are a global property of the program; i.e. some dependencies for imported + # classes can be generated in the importing modules. + # * Because of the above, they are serialized separately, after a full run, + # or a full update. + # `proto_deps` can be None if after deserialization it turns out that they are + # inconsistent with the other cache files (or an error occurred during deserialization). + # A blocking error will be generated in this case, since we can't proceed safely. + # For the description of kinds of protocol dependencies and corresponding examples, + # see _snapshot_protocol_deps. + proto_deps: dict[str, set[str]] | None + + # Protocols (full names) a given class attempted to implement. + # Used to calculate fine grained protocol dependencies and optimize protocol + # subtype cache invalidation in fine grained mode. For example, if we pass a value + # of type a.A to a function expecting something compatible with protocol p.P, + # we'd have 'a.A' -> {'p.P', ...} in the map. This map is flushed after every incremental + # update. + _attempted_protocols: Final[dict[str, set[str]]] + # We also snapshot protocol members of the above protocols. For example, if we pass + # a value of type a.A to a function expecting something compatible with Iterable, we'd have + # 'a.A' -> {'__iter__', ...} in the map. This map is also flushed after every incremental + # update. This map is needed to only generate dependencies like -> + # instead of a wildcard to avoid unnecessarily invalidating classes. + _checked_against_members: Final[dict[str, set[str]]] + # TypeInfos that appeared as a left type (subtype) in a subtype check since latest + # dependency snapshot update. This is an optimisation for fine grained mode; during a full + # run we only take a dependency snapshot at the very end, so this set will contain all + # subtype-checked TypeInfos. After a fine grained update however, we can gather only new + # dependencies generated from (typically) few TypeInfos that were subtype-checked + # (i.e. appeared as r.h.s. in an assignment or an argument in a function call in + # a re-checked target) during the update. + _rechecked_types: Final[set[TypeInfo]] + + # The two attributes below are assumption stacks for subtyping relationships between + # recursive type aliases. Normally, one would pass type assumptions as an additional + # arguments to is_subtype(), but this would mean updating dozens of related functions + # threading this through all callsites (see also comment for TypeInfo.assuming). + _assuming: Final[list[tuple[Type, Type]]] + _assuming_proper: Final[list[tuple[Type, Type]]] + # Ditto for inference of generic constraints against recursive type aliases. + inferring: Final[list[tuple[Type, Type]]] + # Whether to use joins or unions when solving constraints, see checkexpr.py for details. + infer_unions: bool + # Whether to use new type inference algorithm that can infer polymorphic types. + # This is temporary and will be removed soon when new algorithm is more polished. + infer_polymorphic: bool + + # N.B: We do all of the accesses to these properties through + # TypeState, instead of making these classmethods and accessing + # via the cls parameter, since mypyc can optimize accesses to + # Final attributes of a directly referenced type. + + def __init__(self) -> None: + self._subtype_caches = {} + self._negative_subtype_caches = {} + self.proto_deps = {} + self._attempted_protocols = {} + self._checked_against_members = {} + self._rechecked_types = set() + self._assuming = [] + self._assuming_proper = [] + self.inferring = [] + self.infer_unions = False + self.infer_polymorphic = False + + def is_assumed_subtype(self, left: Type, right: Type) -> bool: + for l, r in reversed(self._assuming): + if get_proper_type(l) == get_proper_type(left) and get_proper_type( + r + ) == get_proper_type(right): + return True + return False + + def is_assumed_proper_subtype(self, left: Type, right: Type) -> bool: + for l, r in reversed(self._assuming_proper): + if get_proper_type(l) == get_proper_type(left) and get_proper_type( + r + ) == get_proper_type(right): + return True + return False + + def get_assumptions(self, is_proper: bool) -> list[tuple[Type, Type]]: + if is_proper: + return self._assuming_proper + return self._assuming + + def reset_all_subtype_caches(self) -> None: + """Completely reset all known subtype caches.""" + self._subtype_caches.clear() + self._negative_subtype_caches.clear() + + def reset_subtype_caches_for(self, info: TypeInfo) -> None: + """Reset subtype caches (if any) for a given supertype TypeInfo.""" + if info in self._subtype_caches: + self._subtype_caches[info].clear() + if info in self._negative_subtype_caches: + self._negative_subtype_caches[info].clear() + + def reset_all_subtype_caches_for(self, info: TypeInfo) -> None: + """Reset subtype caches (if any) for a given supertype TypeInfo and its MRO.""" + for item in info.mro: + self.reset_subtype_caches_for(item) + + def is_cached_subtype_check(self, kind: SubtypeKind, left: Instance, right: Instance) -> bool: + if left.last_known_value is not None or right.last_known_value is not None: + # If there is a literal last known value, give up. There + # will be an unbounded number of potential types to cache, + # making caching less effective. + return False + info = right.type + cache = self._subtype_caches.get(info) + if cache is None: + return False + subcache = cache.get(kind) + if subcache is None: + return False + return (left, right) in subcache + + def is_cached_negative_subtype_check( + self, kind: SubtypeKind, left: Instance, right: Instance + ) -> bool: + if left.last_known_value is not None or right.last_known_value is not None: + # If there is a literal last known value, give up. There + # will be an unbounded number of potential types to cache, + # making caching less effective. + return False + info = right.type + cache = self._negative_subtype_caches.get(info) + if cache is None: + return False + subcache = cache.get(kind) + if subcache is None: + return False + return (left, right) in subcache + + def record_subtype_cache_entry( + self, kind: SubtypeKind, left: Instance, right: Instance + ) -> None: + if left.last_known_value is not None or right.last_known_value is not None: + # These are unlikely to match, due to the large space of + # possible values. Avoid uselessly increasing cache sizes. + return + if any( + (isinstance(tv, TypeVarType) and tv.variance == VARIANCE_NOT_READY) + for tv in right.type.defn.type_vars + ): + # Variance indeterminate -- don't know the result + return + cache = self._subtype_caches.setdefault(right.type, {}) + cache.setdefault(kind, set()).add((left, right)) + + def record_negative_subtype_cache_entry( + self, kind: SubtypeKind, left: Instance, right: Instance + ) -> None: + if left.last_known_value is not None or right.last_known_value is not None: + # These are unlikely to match, due to the large space of + # possible values. Avoid uselessly increasing cache sizes. + return + if len(self._negative_subtype_caches) > MAX_NEGATIVE_CACHE_TYPES: + self._negative_subtype_caches.clear() + cache = self._negative_subtype_caches.setdefault(right.type, {}) + subcache = cache.setdefault(kind, set()) + if len(subcache) > MAX_NEGATIVE_CACHE_ENTRIES: + subcache.clear() + cache.setdefault(kind, set()).add((left, right)) + + def reset_protocol_deps(self) -> None: + """Reset dependencies after a full run or before a daemon shutdown.""" + self.proto_deps = {} + self._attempted_protocols.clear() + self._checked_against_members.clear() + self._rechecked_types.clear() + + def record_protocol_subtype_check(self, left_type: TypeInfo, right_type: TypeInfo) -> None: + assert right_type.is_protocol + self._rechecked_types.add(left_type) + self._attempted_protocols.setdefault(left_type.fullname, set()).add(right_type.fullname) + self._checked_against_members.setdefault(left_type.fullname, set()).update( + right_type.protocol_members + ) + + def _snapshot_protocol_deps(self) -> dict[str, set[str]]: + """Collect protocol attribute dependencies found so far from registered subtype checks. + + There are three kinds of protocol dependencies. For example, after a subtype check: + + x: Proto = C() + + the following dependencies will be generated: + 1. ..., , -> + 2. ..., , -> [for every attr in Proto members] + 3. -> Proto # this one to invalidate the subtype cache + + The first kind is generated immediately per-module in deps.py (see also an example there + for motivation why it is needed). While two other kinds are generated here after all + modules are type checked and we have recorded all the subtype checks. To understand these + two kinds, consider a simple example: + + class A: + def __iter__(self) -> Iterator[int]: + ... + + it: Iterable[int] = A() + + We add -> to invalidate the assignment (module target in this case), + whenever the signature of a.A.__iter__ changes. We also add -> typing.Iterable, + to invalidate the subtype caches of the latter. (Note that the same logic applies to + proper subtype checks, and calculating meets and joins, if this involves calling + 'subtypes.is_protocol_implementation'). + """ + deps: dict[str, set[str]] = {} + for info in self._rechecked_types: + for attr in self._checked_against_members[info.fullname]: + # The need for full MRO here is subtle, during an update, base classes of + # a concrete class may not be reprocessed, so not all -> deps + # are added. + for base_info in info.mro[:-1]: + trigger = make_trigger(f"{base_info.fullname}.{attr}") + if "typing" in trigger or "builtins" in trigger: + # TODO: avoid everything from typeshed + continue + deps.setdefault(trigger, set()).add(make_trigger(info.fullname)) + for proto in self._attempted_protocols[info.fullname]: + trigger = make_trigger(info.fullname) + if "typing" in trigger or "builtins" in trigger: + continue + # If any class that was checked against a protocol changes, + # we need to reset the subtype cache for the protocol. + # + # Note: strictly speaking, the protocol doesn't need to be + # re-checked, we only need to reset the cache, and its uses + # elsewhere are still valid (unless invalidated by other deps). + deps.setdefault(trigger, set()).add(proto) + return deps + + def update_protocol_deps(self, second_map: dict[str, set[str]] | None = None) -> None: + """Update global protocol dependency map. + + We update the global map incrementally, using a snapshot only from recently + type checked types. If second_map is given, update it as well. This is currently used + by FineGrainedBuildManager that maintains normal (non-protocol) dependencies. + """ + assert self.proto_deps is not None, "This should not be called after failed cache load" + new_deps = self._snapshot_protocol_deps() + for trigger, targets in new_deps.items(): + self.proto_deps.setdefault(trigger, set()).update(targets) + if second_map is not None: + for trigger, targets in new_deps.items(): + second_map.setdefault(trigger, set()).update(targets) + self._rechecked_types.clear() + self._attempted_protocols.clear() + self._checked_against_members.clear() + + def add_all_protocol_deps(self, deps: dict[str, set[str]]) -> None: + """Add all known protocol dependencies to deps. + + This is used by tests and debug output, and also when collecting + all collected or loaded dependencies as part of build. + """ + self.update_protocol_deps() # just in case + if self.proto_deps is not None: + for trigger, targets in self.proto_deps.items(): + deps.setdefault(trigger, set()).update(targets) + + +type_state: Final = TypeState() + + +def reset_global_state() -> None: + """Reset most existing global state. + + Currently most of it is in this module. Few exceptions are strict optional status + and functools.lru_cache. + """ + type_state.reset_all_subtype_caches() + type_state.reset_protocol_deps() + TypeVarId.next_raw_id = 1 diff --git a/.venv/lib/python3.12/site-packages/mypy/typetraverser.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/typetraverser.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..e398279 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/typetraverser.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/typetraverser.py b/.venv/lib/python3.12/site-packages/mypy/typetraverser.py new file mode 100644 index 0000000..abd0f6b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typetraverser.py @@ -0,0 +1,159 @@ +from __future__ import annotations + +from collections.abc import Iterable + +from mypy_extensions import trait + +from mypy.types import ( + AnyType, + CallableArgument, + CallableType, + DeletedType, + EllipsisType, + ErasedType, + Instance, + LiteralType, + NoneType, + Overloaded, + Parameters, + ParamSpecType, + PartialType, + PlaceholderType, + RawExpressionType, + SyntheticTypeVisitor, + TupleType, + Type, + TypeAliasType, + TypedDictType, + TypeList, + TypeType, + TypeVarTupleType, + TypeVarType, + UnboundType, + UninhabitedType, + UnionType, + UnpackType, +) + + +@trait +class TypeTraverserVisitor(SyntheticTypeVisitor[None]): + """Visitor that traverses all components of a type""" + + # Atomic types + + def visit_any(self, t: AnyType, /) -> None: + pass + + def visit_uninhabited_type(self, t: UninhabitedType, /) -> None: + pass + + def visit_none_type(self, t: NoneType, /) -> None: + pass + + def visit_erased_type(self, t: ErasedType, /) -> None: + pass + + def visit_deleted_type(self, t: DeletedType, /) -> None: + pass + + def visit_type_var(self, t: TypeVarType, /) -> None: + # Note that type variable values and upper bound aren't treated as + # components, since they are components of the type variable + # definition. We want to traverse everything just once. + t.default.accept(self) + + def visit_param_spec(self, t: ParamSpecType, /) -> None: + # TODO: do we need to traverse prefix here? + t.default.accept(self) + + def visit_parameters(self, t: Parameters, /) -> None: + self.traverse_type_list(t.arg_types) + + def visit_type_var_tuple(self, t: TypeVarTupleType, /) -> None: + t.default.accept(self) + + def visit_literal_type(self, t: LiteralType, /) -> None: + t.fallback.accept(self) + + # Composite types + + def visit_instance(self, t: Instance, /) -> None: + self.traverse_type_tuple(t.args) + + def visit_callable_type(self, t: CallableType, /) -> None: + # FIX generics + self.traverse_type_list(t.arg_types) + t.ret_type.accept(self) + t.fallback.accept(self) + + if t.type_guard is not None: + t.type_guard.accept(self) + + if t.type_is is not None: + t.type_is.accept(self) + + def visit_tuple_type(self, t: TupleType, /) -> None: + self.traverse_type_list(t.items) + t.partial_fallback.accept(self) + + def visit_typeddict_type(self, t: TypedDictType, /) -> None: + self.traverse_types(t.items.values()) + t.fallback.accept(self) + + def visit_union_type(self, t: UnionType, /) -> None: + self.traverse_type_list(t.items) + + def visit_overloaded(self, t: Overloaded, /) -> None: + self.traverse_types(t.items) + + def visit_type_type(self, t: TypeType, /) -> None: + t.item.accept(self) + + # Special types (not real types) + + def visit_callable_argument(self, t: CallableArgument, /) -> None: + t.typ.accept(self) + + def visit_unbound_type(self, t: UnboundType, /) -> None: + self.traverse_type_tuple(t.args) + + def visit_type_list(self, t: TypeList, /) -> None: + self.traverse_type_list(t.items) + + def visit_ellipsis_type(self, t: EllipsisType, /) -> None: + pass + + def visit_placeholder_type(self, t: PlaceholderType, /) -> None: + self.traverse_type_list(t.args) + + def visit_partial_type(self, t: PartialType, /) -> None: + pass + + def visit_raw_expression_type(self, t: RawExpressionType, /) -> None: + pass + + def visit_type_alias_type(self, t: TypeAliasType, /) -> None: + # TODO: sometimes we want to traverse target as well + # We need to find a way to indicate explicitly the intent, + # maybe make this method abstract (like for TypeTranslator)? + self.traverse_type_list(t.args) + + def visit_unpack_type(self, t: UnpackType, /) -> None: + t.type.accept(self) + + # Helpers + + def traverse_types(self, types: Iterable[Type], /) -> None: + for typ in types: + typ.accept(self) + + def traverse_type_list(self, types: list[Type], /) -> None: + # Micro-optimization: Specialized for lists + for typ in types: + typ.accept(self) + + def traverse_type_tuple(self, types: tuple[Type, ...], /) -> None: + # Micro-optimization: Specialized for tuples + for typ in types: + typ.accept(self) diff --git a/.venv/lib/python3.12/site-packages/mypy/typevars.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/typevars.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..3ab288f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/typevars.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/typevars.py b/.venv/lib/python3.12/site-packages/mypy/typevars.py new file mode 100644 index 0000000..e871973 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typevars.py @@ -0,0 +1,84 @@ +from __future__ import annotations + +from mypy.erasetype import erase_typevars +from mypy.nodes import TypeInfo +from mypy.types import ( + Instance, + ParamSpecType, + ProperType, + TupleType, + Type, + TypeOfAny, + TypeVarLikeType, + TypeVarTupleType, + TypeVarType, + UnpackType, +) +from mypy.typevartuples import erased_vars + + +def fill_typevars(typ: TypeInfo) -> Instance | TupleType: + """For a non-generic type, return instance type representing the type. + + For a generic G type with parameters T1, .., Tn, return G[T1, ..., Tn]. + """ + tvs: list[Type] = [] + # TODO: why do we need to keep both typ.type_vars and typ.defn.type_vars? + for i in range(len(typ.defn.type_vars)): + tv: TypeVarLikeType | UnpackType = typ.defn.type_vars[i] + # Change the line number + if isinstance(tv, TypeVarType): + tv = tv.copy_modified(line=-1, column=-1) + elif isinstance(tv, TypeVarTupleType): + tv = UnpackType( + TypeVarTupleType( + tv.name, + tv.fullname, + tv.id, + tv.upper_bound, + tv.tuple_fallback, + tv.default, + line=-1, + column=-1, + ) + ) + else: + assert isinstance(tv, ParamSpecType) + tv = ParamSpecType( + tv.name, + tv.fullname, + tv.id, + tv.flavor, + tv.upper_bound, + tv.default, + line=-1, + column=-1, + ) + tvs.append(tv) + inst = Instance(typ, tvs) + # TODO: do we need to also handle typeddict_type here and below? + if typ.tuple_type is None: + return inst + return typ.tuple_type.copy_modified(fallback=inst) + + +def fill_typevars_with_any(typ: TypeInfo) -> Instance | TupleType: + """Apply a correct number of Any's as type arguments to a type.""" + inst = Instance(typ, erased_vars(typ.defn.type_vars, TypeOfAny.special_form)) + if typ.tuple_type is None: + return inst + erased_tuple_type = erase_typevars(typ.tuple_type, {tv.id for tv in typ.defn.type_vars}) + assert isinstance(erased_tuple_type, ProperType) + if isinstance(erased_tuple_type, TupleType): + return typ.tuple_type.copy_modified(fallback=inst) + return inst + + +def has_no_typevars(typ: Type) -> bool: + # We test if a type contains type variables by erasing all type variables + # and comparing the result to the original type. We use comparison by equality that + # in turn uses `__eq__` defined for types. Note: we can't use `is_same_type` because + # it is not safe with unresolved forward references, while this function may be called + # before forward references resolution patch pass. Note also that it is not safe to use + # `is` comparison because `erase_typevars` doesn't preserve type identity. + return typ == erase_typevars(typ) diff --git a/.venv/lib/python3.12/site-packages/mypy/typevartuples.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/typevartuples.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..8853e71 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/typevartuples.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/typevartuples.py b/.venv/lib/python3.12/site-packages/mypy/typevartuples.py new file mode 100644 index 0000000..1bf1a59 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/typevartuples.py @@ -0,0 +1,36 @@ +"""Helpers for interacting with type var tuples.""" + +from __future__ import annotations + +from collections.abc import Sequence + +from mypy.types import ( + AnyType, + Instance, + Type, + TypeVarLikeType, + TypeVarTupleType, + UnpackType, + split_with_prefix_and_suffix, +) + + +def split_with_instance( + typ: Instance, +) -> tuple[tuple[Type, ...], tuple[Type, ...], tuple[Type, ...]]: + assert typ.type.type_var_tuple_prefix is not None + assert typ.type.type_var_tuple_suffix is not None + return split_with_prefix_and_suffix( + typ.args, typ.type.type_var_tuple_prefix, typ.type.type_var_tuple_suffix + ) + + +def erased_vars(type_vars: Sequence[TypeVarLikeType], type_of_any: int) -> list[Type]: + args: list[Type] = [] + for tv in type_vars: + # Valid erasure for *Ts is *tuple[Any, ...], not just Any. + if isinstance(tv, TypeVarTupleType): + args.append(UnpackType(tv.tuple_fallback.copy_modified(args=[AnyType(type_of_any)]))) + else: + args.append(AnyType(type_of_any)) + return args diff --git a/.venv/lib/python3.12/site-packages/mypy/util.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/util.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..aa80b82 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/util.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/util.py b/.venv/lib/python3.12/site-packages/mypy/util.py new file mode 100644 index 0000000..c919ff8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/util.py @@ -0,0 +1,952 @@ +"""Utility functions with no non-trivial dependencies.""" + +from __future__ import annotations + +import hashlib +import io +import json +import os +import re +import shutil +import sys +import time +from collections.abc import Container, Iterable, Sequence, Sized +from importlib import resources as importlib_resources +from typing import IO, Any, Callable, Final, Literal, TypeVar + +orjson: Any +try: + import orjson # type: ignore[import-not-found, no-redef, unused-ignore] +except ImportError: + orjson = None + +try: + import _curses # noqa: F401 + import curses + + CURSES_ENABLED = True +except ImportError: + CURSES_ENABLED = False + +T = TypeVar("T") + +TYPESHED_DIR: Final = str(importlib_resources.files("mypy") / "typeshed") + +ENCODING_RE: Final = re.compile(rb"([ \t\v]*#.*(\r\n?|\n))??[ \t\v]*#.*coding[:=][ \t]*([-\w.]+)") + +DEFAULT_SOURCE_OFFSET: Final = 4 +DEFAULT_COLUMNS: Final = 80 + +# At least this number of columns will be shown on each side of +# error location when printing source code snippet. +MINIMUM_WIDTH: Final = 20 + +# VT100 color code processing was added in Windows 10, but only the second major update, +# Threshold 2. Fortunately, everyone (even on LTSB, Long Term Support Branch) should +# have a version of Windows 10 newer than this. Note that Windows 8 and below are not +# supported, but are either going out of support, or make up only a few % of the market. +MINIMUM_WINDOWS_MAJOR_VT100: Final = 10 +MINIMUM_WINDOWS_BUILD_VT100: Final = 10586 + +SPECIAL_DUNDERS: Final = frozenset( + ("__init__", "__new__", "__call__", "__init_subclass__", "__class_getitem__") +) + + +def is_dunder(name: str, exclude_special: bool = False) -> bool: + """Returns whether name is a dunder name. + + Args: + exclude_special: Whether to return False for a couple special dunder methods. + + """ + if exclude_special and name in SPECIAL_DUNDERS: + return False + return name.startswith("__") and name.endswith("__") + + +def is_sunder(name: str) -> bool: + return not is_dunder(name) and name.startswith("_") and name.endswith("_") and name != "_" + + +def split_module_names(mod_name: str) -> list[str]: + """Return the module and all parent module names. + + So, if `mod_name` is 'a.b.c', this function will return + ['a.b.c', 'a.b', and 'a']. + """ + out = [mod_name] + while "." in mod_name: + mod_name = mod_name.rsplit(".", 1)[0] + out.append(mod_name) + return out + + +def module_prefix(modules: Iterable[str], target: str) -> str | None: + result = split_target(modules, target) + if result is None: + return None + return result[0] + + +def split_target(modules: Iterable[str], target: str) -> tuple[str, str] | None: + remaining: list[str] = [] + while True: + if target in modules: + return target, ".".join(remaining) + components = target.rsplit(".", 1) + if len(components) == 1: + return None + target = components[0] + remaining.insert(0, components[1]) + + +def short_type(obj: object) -> str: + """Return the last component of the type name of an object. + + If obj is None, return 'nil'. For example, if obj is 1, return 'int'. + """ + if obj is None: + return "nil" + t = str(type(obj)) + return t.split(".")[-1].rstrip("'>") + + +def find_python_encoding(text: bytes) -> tuple[str, int]: + """PEP-263 for detecting Python file encoding""" + result = ENCODING_RE.match(text) + if result: + line = 2 if result.group(1) else 1 + encoding = result.group(3).decode("ascii") + # Handle some aliases that Python is happy to accept and that are used in the wild. + if encoding.startswith(("iso-latin-1-", "latin-1-")) or encoding == "iso-latin-1": + encoding = "latin-1" + return encoding, line + else: + default_encoding = "utf8" + return default_encoding, -1 + + +def bytes_to_human_readable_repr(b: bytes) -> str: + """Converts bytes into some human-readable representation. Unprintable + bytes such as the nul byte are escaped. For example: + + >>> b = bytes([102, 111, 111, 10, 0]) + >>> s = bytes_to_human_readable_repr(b) + >>> print(s) + foo\n\x00 + >>> print(repr(s)) + 'foo\\n\\x00' + """ + return repr(b)[2:-1] + + +class DecodeError(Exception): + """Exception raised when a file cannot be decoded due to an unknown encoding type. + + Essentially a wrapper for the LookupError raised by `bytearray.decode` + """ + + +def decode_python_encoding(source: bytes) -> str: + """Read the Python file with while obeying PEP-263 encoding detection. + + Returns the source as a string. + """ + # check for BOM UTF-8 encoding and strip it out if present + if source.startswith(b"\xef\xbb\xbf"): + encoding = "utf8" + source = source[3:] + else: + # look at first two lines and check if PEP-263 coding is present + encoding, _ = find_python_encoding(source) + + try: + source_text = source.decode(encoding) + except LookupError as lookuperr: + raise DecodeError(str(lookuperr)) from lookuperr + return source_text + + +def read_py_file(path: str, read: Callable[[str], bytes]) -> list[str] | None: + """Try reading a Python file as list of source lines. + + Return None if something goes wrong. + """ + try: + source = read(path) + except OSError: + return None + else: + try: + source_lines = decode_python_encoding(source).splitlines() + except DecodeError: + return None + return source_lines + + +def trim_source_line(line: str, max_len: int, col: int, min_width: int) -> tuple[str, int]: + """Trim a line of source code to fit into max_len. + + Show 'min_width' characters on each side of 'col' (an error location). If either + start or end is trimmed, this is indicated by adding '...' there. + A typical result looks like this: + ...some_variable = function_to_call(one_arg, other_arg) or... + + Return the trimmed string and the column offset to adjust error location. + """ + if max_len < 2 * min_width + 1: + # In case the window is too tiny it is better to still show something. + max_len = 2 * min_width + 1 + + # Trivial case: line already fits in. + if len(line) <= max_len: + return line, 0 + + # If column is not too large so that there is still min_width after it, + # the line doesn't need to be trimmed at the start. + if col + min_width < max_len: + return line[:max_len] + "...", 0 + + # Otherwise, if the column is not too close to the end, trim both sides. + if col < len(line) - min_width - 1: + offset = col - max_len + min_width + 1 + return "..." + line[offset : col + min_width + 1] + "...", offset - 3 + + # Finally, if the column is near the end, just trim the start. + return "..." + line[-max_len:], len(line) - max_len - 3 + + +def get_mypy_comments(source: str) -> list[tuple[int, str]]: + PREFIX = "# mypy: " + # Don't bother splitting up the lines unless we know it is useful + if PREFIX not in source: + return [] + lines = source.split("\n") + results = [] + for i, line in enumerate(lines): + if line.startswith(PREFIX): + results.append((i + 1, line[len(PREFIX) :])) + + return results + + +JUNIT_HEADER_TEMPLATE: Final = """ + +""" + +JUNIT_TESTCASE_FAIL_TEMPLATE: Final = """ + {text} + +""" + +JUNIT_ERROR_TEMPLATE: Final = """ + {text} + +""" + +JUNIT_TESTCASE_PASS_TEMPLATE: Final = """ + +""" + +JUNIT_FOOTER: Final = """ +""" + + +def _generate_junit_contents( + dt: float, + serious: bool, + messages_by_file: dict[str | None, list[str]], + version: str, + platform: str, +) -> str: + from xml.sax.saxutils import escape + + if serious: + failures = 0 + errors = len(messages_by_file) + else: + failures = len(messages_by_file) + errors = 0 + + xml = JUNIT_HEADER_TEMPLATE.format( + errors=errors, + failures=failures, + time=dt, + # If there are no messages, we still write one "test" indicating success. + tests=len(messages_by_file) or 1, + ) + + if not messages_by_file: + xml += JUNIT_TESTCASE_PASS_TEMPLATE.format(time=dt, ver=version, platform=platform) + else: + for filename, messages in messages_by_file.items(): + if filename is not None: + xml += JUNIT_TESTCASE_FAIL_TEMPLATE.format( + text=escape("\n".join(messages)), + filename=filename, + time=dt, + name="mypy-py{ver}-{platform} {filename}".format( + ver=version, platform=platform, filename=filename + ), + ) + else: + xml += JUNIT_TESTCASE_FAIL_TEMPLATE.format( + text=escape("\n".join(messages)), + filename="mypy", + time=dt, + name=f"mypy-py{version}-{platform}", + ) + + xml += JUNIT_FOOTER + + return xml + + +def write_junit_xml( + dt: float, + serious: bool, + messages_by_file: dict[str | None, list[str]], + path: str, + version: str, + platform: str, +) -> None: + xml = _generate_junit_contents(dt, serious, messages_by_file, version, platform) + + # creates folders if needed + xml_dirs = os.path.dirname(os.path.abspath(path)) + os.makedirs(xml_dirs, exist_ok=True) + + with open(path, "wb") as f: + f.write(xml.encode("utf-8")) + + +class IdMapper: + """Generate integer ids for objects. + + Unlike id(), these start from 0 and increment by 1, and ids won't + get reused across the life-time of IdMapper. + + Assume objects don't redefine __eq__ or __hash__. + """ + + def __init__(self) -> None: + self.id_map: dict[object, int] = {} + self.next_id = 0 + + def id(self, o: object) -> int: + if o not in self.id_map: + self.id_map[o] = self.next_id + self.next_id += 1 + return self.id_map[o] + + +def get_prefix(fullname: str) -> str: + """Drop the final component of a qualified name (e.g. ('x.y' -> 'x').""" + return fullname.rsplit(".", 1)[0] + + +def correct_relative_import( + cur_mod_id: str, relative: int, target: str, is_cur_package_init_file: bool +) -> tuple[str, bool]: + if relative == 0: + return target, True + parts = cur_mod_id.split(".") + rel = relative + if is_cur_package_init_file: + rel -= 1 + ok = len(parts) >= rel + if rel != 0: + cur_mod_id = ".".join(parts[:-rel]) + return cur_mod_id + (("." + target) if target else ""), ok + + +fields_cache: Final[dict[type[object], list[str]]] = {} + + +def get_class_descriptors(cls: type[object]) -> Sequence[str]: + import inspect # Lazy import for minor startup speed win + + # Maintain a cache of type -> attributes defined by descriptors in the class + # (that is, attributes from __slots__ and C extension classes) + if cls not in fields_cache: + members = inspect.getmembers( + cls, lambda o: inspect.isgetsetdescriptor(o) or inspect.ismemberdescriptor(o) + ) + fields_cache[cls] = [x for x, y in members if x != "__weakref__" and x != "__dict__"] + return fields_cache[cls] + + +def replace_object_state( + new: object, old: object, copy_dict: bool = False, skip_slots: tuple[str, ...] = () +) -> None: + """Copy state of old node to the new node. + + This handles cases where there is __dict__ and/or attribute descriptors + (either from slots or because the type is defined in a C extension module). + + Assume that both objects have the same __class__. + """ + if hasattr(old, "__dict__"): + if copy_dict: + new.__dict__ = dict(old.__dict__) + else: + new.__dict__ = old.__dict__ + + for attr in get_class_descriptors(old.__class__): + if attr in skip_slots: + continue + try: + if hasattr(old, attr): + setattr(new, attr, getattr(old, attr)) + elif hasattr(new, attr): + delattr(new, attr) + # There is no way to distinguish getsetdescriptors that allow + # writes from ones that don't (I think?), so we just ignore + # AttributeErrors if we need to. + # TODO: What about getsetdescriptors that act like properties??? + except AttributeError: + pass + + +def is_sub_path_normabs(path: str, dir: str) -> bool: + """Given two paths, return if path is a sub-path of dir. + + Moral equivalent of: Path(dir) in Path(path).parents + + Similar to the pathlib version: + - Treats paths case-sensitively + - Does not fully handle unnormalised paths (e.g. paths with "..") + - Does not handle a mix of absolute and relative paths + Unlike the pathlib version: + - Fast + - On Windows, assumes input has been slash normalised + - Handles even fewer unnormalised paths (e.g. paths with "." and "//") + + As a result, callers should ensure that inputs have had os.path.abspath called on them + (note that os.path.abspath will normalise) + """ + if not dir.endswith(os.sep): + dir += os.sep + return path.startswith(dir) + + +if sys.platform == "linux" or sys.platform == "darwin": + + def os_path_join(path: str, b: str) -> str: + # Based off of os.path.join, but simplified to str-only, 2 args and mypyc can compile it. + if b.startswith("/") or not path: + return b + elif path.endswith("/"): + return path + b + else: + return path + "/" + b + +else: + + def os_path_join(a: str, p: str) -> str: + return os.path.join(a, p) + + +def hard_exit(status: int = 0) -> None: + """Kill the current process without fully cleaning up. + + This can be quite a bit faster than a normal exit() since objects are not freed. + """ + sys.stdout.flush() + sys.stderr.flush() + os._exit(status) + + +def unmangle(name: str) -> str: + """Remove internal suffixes from a short name.""" + return name.rstrip("'") + + +def get_unique_redefinition_name(name: str, existing: Container[str]) -> str: + """Get a simple redefinition name not present among existing. + + For example, for name 'foo' we try 'foo-redefinition', 'foo-redefinition2', + 'foo-redefinition3', etc. until we find one that is not in existing. + """ + r_name = name + "-redefinition" + if r_name not in existing: + return r_name + + i = 2 + while r_name + str(i) in existing: + i += 1 + return r_name + str(i) + + +def check_python_version(program: str) -> None: + """Report issues with the Python used to run mypy, dmypy, or stubgen""" + # Check for known bad Python versions. + if sys.version_info[:2] < (3, 9): # noqa: UP036, RUF100 + sys.exit( + "Running {name} with Python 3.8 or lower is not supported; " + "please upgrade to 3.9 or newer".format(name=program) + ) + + +def count_stats(messages: list[str]) -> tuple[int, int, int]: + """Count total number of errors, notes and error_files in message list.""" + errors = [e for e in messages if ": error:" in e] + error_files = {e.split(":")[0] for e in errors} + notes = [e for e in messages if ": note:" in e] + return len(errors), len(notes), len(error_files) + + +def split_words(msg: str) -> list[str]: + """Split line of text into words (but not within quoted groups).""" + next_word = "" + res: list[str] = [] + allow_break = True + for c in msg: + if c == " " and allow_break: + res.append(next_word) + next_word = "" + continue + if c == '"': + allow_break = not allow_break + next_word += c + res.append(next_word) + return res + + +def get_terminal_width() -> int: + """Get current terminal width if possible, otherwise return the default one.""" + return ( + int(os.getenv("MYPY_FORCE_TERMINAL_WIDTH", "0")) + or shutil.get_terminal_size().columns + or DEFAULT_COLUMNS + ) + + +def soft_wrap(msg: str, max_len: int, first_offset: int, num_indent: int = 0) -> str: + """Wrap a long error message into few lines. + + Breaks will only happen between words, and never inside a quoted group + (to avoid breaking types such as "Union[int, str]"). The 'first_offset' is + the width before the start of first line. + + Pad every next line with 'num_indent' spaces. Every line will be at most 'max_len' + characters, except if it is a single word or quoted group. + + For example: + first_offset + ------------------------ + path/to/file: error: 58: Some very long error message + that needs to be split in separate lines. + "Long[Type, Names]" are never split. + ^^^^-------------------------------------------------- + num_indent max_len + """ + words = split_words(msg) + next_line = words.pop(0) + lines: list[str] = [] + while words: + next_word = words.pop(0) + max_line_len = max_len - num_indent if lines else max_len - first_offset + # Add 1 to account for space between words. + if len(next_line) + len(next_word) + 1 <= max_line_len: + next_line += " " + next_word + else: + lines.append(next_line) + next_line = next_word + lines.append(next_line) + padding = "\n" + " " * num_indent + return padding.join(lines) + + +def hash_digest(data: bytes) -> str: + """Compute a hash digest of some data. + + We use a cryptographic hash because we want a low probability of + accidental collision, but we don't really care about any of the + cryptographic properties. + """ + return hashlib.sha1(data).hexdigest() + + +def hash_digest_bytes(data: bytes) -> bytes: + """Compute a hash digest of some data. + + Similar to above but returns a bytes object. + """ + return hashlib.sha1(data).digest() + + +def parse_gray_color(cup: bytes) -> str: + """Reproduce a gray color in ANSI escape sequence""" + assert sys.platform != "win32", "curses is not available on Windows" + set_color = "".join([cup[:-1].decode(), "m"]) + gray = curses.tparm(set_color.encode("utf-8"), 1, 9).decode() + return gray + + +def should_force_color() -> bool: + env_var = os.getenv("MYPY_FORCE_COLOR", os.getenv("FORCE_COLOR", "0")) + try: + return bool(int(env_var)) + except ValueError: + return bool(env_var) + + +class FancyFormatter: + """Apply color and bold font to terminal output. + + This currently only works on Linux and Mac. + """ + + def __init__( + self, f_out: IO[str], f_err: IO[str], hide_error_codes: bool, hide_success: bool = False + ) -> None: + self.hide_error_codes = hide_error_codes + self.hide_success = hide_success + + # Check if we are in a human-facing terminal on a supported platform. + if sys.platform not in ("linux", "darwin", "win32", "emscripten"): + self.dummy_term = True + return + if not should_force_color() and (not f_out.isatty() or not f_err.isatty()): + self.dummy_term = True + return + if sys.platform == "win32": + self.dummy_term = not self.initialize_win_colors() + elif sys.platform == "emscripten": + self.dummy_term = not self.initialize_vt100_colors() + else: + self.dummy_term = not self.initialize_unix_colors() + if not self.dummy_term: + self.colors = { + "red": self.RED, + "green": self.GREEN, + "blue": self.BLUE, + "yellow": self.YELLOW, + "none": "", + } + + def initialize_vt100_colors(self) -> bool: + """Return True if initialization was successful and we can use colors, False otherwise""" + # Windows and Emscripten can both use ANSI/VT100 escape sequences for color + assert sys.platform in ("win32", "emscripten") + self.BOLD = "\033[1m" + self.UNDER = "\033[4m" + self.BLUE = "\033[94m" + self.GREEN = "\033[92m" + self.RED = "\033[91m" + self.YELLOW = "\033[93m" + self.NORMAL = "\033[0m" + self.DIM = "\033[2m" + return True + + def initialize_win_colors(self) -> bool: + """Return True if initialization was successful and we can use colors, False otherwise""" + # Windows ANSI escape sequences are only supported on Threshold 2 and above. + # we check with an assert at runtime and an if check for mypy, as asserts do not + # yet narrow platform + if sys.platform == "win32": # needed to find win specific sys apis + winver = sys.getwindowsversion() + if ( + winver.major < MINIMUM_WINDOWS_MAJOR_VT100 + or winver.build < MINIMUM_WINDOWS_BUILD_VT100 + ): + return False + import ctypes + + kernel32 = ctypes.windll.kernel32 + ENABLE_PROCESSED_OUTPUT = 0x1 + ENABLE_WRAP_AT_EOL_OUTPUT = 0x2 + ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x4 + STD_OUTPUT_HANDLE = -11 + kernel32.SetConsoleMode( + kernel32.GetStdHandle(STD_OUTPUT_HANDLE), + ENABLE_PROCESSED_OUTPUT + | ENABLE_WRAP_AT_EOL_OUTPUT + | ENABLE_VIRTUAL_TERMINAL_PROCESSING, + ) + self.initialize_vt100_colors() + return True + assert False, "Running not on Windows" + + def initialize_unix_colors(self) -> bool: + """Return True if initialization was successful and we can use colors, False otherwise""" + is_win = sys.platform == "win32" + if is_win or not CURSES_ENABLED: + return False + try: + # setupterm wants a fd to potentially write an "initialization sequence". + # We override sys.stdout for the daemon API so if stdout doesn't have an fd, + # just give it /dev/null. + try: + fd = sys.stdout.fileno() + except io.UnsupportedOperation: + with open("/dev/null", "rb") as f: + curses.setupterm(fd=f.fileno()) + else: + curses.setupterm(fd=fd) + except curses.error: + # Most likely terminfo not found. + return False + bold = curses.tigetstr("bold") + under = curses.tigetstr("smul") + set_color = curses.tigetstr("setaf") + set_eseq = curses.tigetstr("cup") + normal = curses.tigetstr("sgr0") + + if not (bold and under and set_color and set_eseq and normal): + return False + + self.NORMAL = normal.decode() + self.BOLD = bold.decode() + self.UNDER = under.decode() + self.DIM = parse_gray_color(set_eseq) + self.BLUE = curses.tparm(set_color, curses.COLOR_BLUE).decode() + self.GREEN = curses.tparm(set_color, curses.COLOR_GREEN).decode() + self.RED = curses.tparm(set_color, curses.COLOR_RED).decode() + self.YELLOW = curses.tparm(set_color, curses.COLOR_YELLOW).decode() + return True + + def style( + self, + text: str, + color: Literal["red", "green", "blue", "yellow", "none"], + bold: bool = False, + underline: bool = False, + dim: bool = False, + ) -> str: + """Apply simple color and style (underlined or bold).""" + if self.dummy_term: + return text + if bold: + start = self.BOLD + else: + start = "" + if underline: + start += self.UNDER + if dim: + start += self.DIM + return start + self.colors[color] + text + self.NORMAL + + def fit_in_terminal( + self, messages: list[str], fixed_terminal_width: int | None = None + ) -> list[str]: + """Improve readability by wrapping error messages and trimming source code.""" + width = fixed_terminal_width or get_terminal_width() + new_messages = messages.copy() + for i, error in enumerate(messages): + if ": error:" in error: + loc, msg = error.split("error:", maxsplit=1) + msg = soft_wrap(msg, width, first_offset=len(loc) + len("error: ")) + new_messages[i] = loc + "error:" + msg + if error.startswith(" " * DEFAULT_SOURCE_OFFSET) and "^" not in error: + # TODO: detecting source code highlights through an indent can be surprising. + # Restore original error message and error location. + error = error[DEFAULT_SOURCE_OFFSET:] + marker_line = messages[i + 1] + marker_column = marker_line.index("^") + column = marker_column - DEFAULT_SOURCE_OFFSET + if "~" not in marker_line: + marker = "^" + else: + # +1 because both ends are included + marker = marker_line[marker_column : marker_line.rindex("~") + 1] + + # Let source have some space also on the right side, plus 6 + # to accommodate ... on each side. + max_len = width - DEFAULT_SOURCE_OFFSET - 6 + source_line, offset = trim_source_line(error, max_len, column, MINIMUM_WIDTH) + + new_messages[i] = " " * DEFAULT_SOURCE_OFFSET + source_line + # Also adjust the error marker position and trim error marker is needed. + new_marker_line = " " * (DEFAULT_SOURCE_OFFSET + column - offset) + marker + if len(new_marker_line) > len(new_messages[i]) and len(marker) > 3: + new_marker_line = new_marker_line[: len(new_messages[i]) - 3] + "..." + new_messages[i + 1] = new_marker_line + return new_messages + + def colorize(self, error: str) -> str: + """Colorize an output line by highlighting the status and error code.""" + if ": error:" in error: + loc, msg = error.split("error:", maxsplit=1) + if self.hide_error_codes: + return ( + loc + self.style("error:", "red", bold=True) + self.highlight_quote_groups(msg) + ) + codepos = msg.rfind("[") + if codepos != -1: + code = msg[codepos:] + msg = msg[:codepos] + else: + code = "" # no error code specified + return ( + loc + + self.style("error:", "red", bold=True) + + self.highlight_quote_groups(msg) + + self.style(code, "yellow") + ) + elif ": note:" in error: + loc, msg = error.split("note:", maxsplit=1) + formatted = self.highlight_quote_groups(self.underline_link(msg)) + return loc + self.style("note:", "blue") + formatted + elif error.startswith(" " * DEFAULT_SOURCE_OFFSET): + # TODO: detecting source code highlights through an indent can be surprising. + if "^" not in error: + return self.style(error, "none", dim=True) + return self.style(error, "red") + else: + return error + + def highlight_quote_groups(self, msg: str) -> str: + """Make groups quoted with double quotes bold (including quotes). + + This is used to highlight types, attribute names etc. + """ + if msg.count('"') % 2: + # Broken error message, don't do any formatting. + return msg + parts = msg.split('"') + out = "" + for i, part in enumerate(parts): + if i % 2 == 0: + out += self.style(part, "none") + else: + out += self.style('"' + part + '"', "none", bold=True) + return out + + def underline_link(self, note: str) -> str: + """Underline a link in a note message (if any). + + This assumes there is at most one link in the message. + """ + match = re.search(r"https?://\S*", note) + if not match: + return note + start = match.start() + end = match.end() + return note[:start] + self.style(note[start:end], "none", underline=True) + note[end:] + + def format_success(self, n_sources: int, use_color: bool = True) -> str: + """Format short summary in case of success. + + n_sources is total number of files passed directly on command line, + i.e. excluding stubs and followed imports. + """ + if self.hide_success: + return "" + + msg = f"Success: no issues found in {n_sources} source file{plural_s(n_sources)}" + if not use_color: + return msg + return self.style(msg, "green", bold=True) + + def format_error( + self, + n_errors: int, + n_files: int, + n_sources: int, + *, + blockers: bool = False, + use_color: bool = True, + ) -> str: + """Format a short summary in case of errors.""" + msg = f"Found {n_errors} error{plural_s(n_errors)} in {n_files} file{plural_s(n_files)}" + if blockers: + msg += " (errors prevented further checking)" + else: + msg += f" (checked {n_sources} source file{plural_s(n_sources)})" + if not use_color: + return msg + return self.style(msg, "red", bold=True) + + +def is_typeshed_file(typeshed_dir: str | None, file: str) -> bool: + typeshed_dir = typeshed_dir if typeshed_dir is not None else TYPESHED_DIR + try: + return os.path.commonpath((typeshed_dir, os.path.abspath(file))) == typeshed_dir + except ValueError: # Different drives on Windows + return False + + +def is_stdlib_file(typeshed_dir: str | None, file: str) -> bool: + if "stdlib" not in file: + # Fast path + return False + typeshed_dir = typeshed_dir if typeshed_dir is not None else TYPESHED_DIR + stdlib_dir = os.path.join(typeshed_dir, "stdlib") + try: + return os.path.commonpath((stdlib_dir, os.path.abspath(file))) == stdlib_dir + except ValueError: # Different drives on Windows + return False + + +def is_stub_package_file(file: str) -> bool: + # Use hacky heuristics to check whether file is part of a PEP 561 stub package. + if not file.endswith(".pyi"): + return False + return any(component.endswith("-stubs") for component in os.path.split(os.path.abspath(file))) + + +def unnamed_function(name: str | None) -> bool: + return name is not None and name == "_" + + +time_ref = time.perf_counter_ns + + +def time_spent_us(t0: int) -> int: + return int((time.perf_counter_ns() - t0) / 1000) + + +def plural_s(s: int | Sized) -> str: + count = s if isinstance(s, int) else len(s) + if count != 1: + return "s" + else: + return "" + + +def quote_docstring(docstr: str) -> str: + """Returns docstring correctly encapsulated in a single or double quoted form.""" + # Uses repr to get hint on the correct quotes and escape everything properly. + # Creating multiline string for prettier output. + docstr_repr = "\n".join(re.split(r"(?<=[^\\])\\n", repr(docstr))) + + if docstr_repr.startswith("'"): + # Enforce double quotes when it's safe to do so. + # That is when double quotes are not in the string + # or when it doesn't end with a single quote. + if '"' not in docstr_repr[1:-1] and docstr_repr[-2] != "'": + return f'"""{docstr_repr[1:-1]}"""' + return f"''{docstr_repr}''" + else: + return f'""{docstr_repr}""' + + +def json_dumps(obj: object, debug: bool = False) -> bytes: + if orjson is not None: + if debug: + dumps_option = orjson.OPT_INDENT_2 | orjson.OPT_SORT_KEYS + else: + # TODO: If we don't sort keys here, testIncrementalInternalScramble fails + # We should document exactly what is going on there + dumps_option = orjson.OPT_SORT_KEYS + + try: + return orjson.dumps(obj, option=dumps_option) # type: ignore[no-any-return] + except TypeError as e: + if str(e) != "Integer exceeds 64-bit range": + raise + + if debug: + return json.dumps(obj, indent=2, sort_keys=True).encode("utf-8") + else: + # See above for sort_keys comment + return json.dumps(obj, sort_keys=True, separators=(",", ":")).encode("utf-8") + + +def json_loads(data: bytes) -> Any: + if orjson is not None: + return orjson.loads(data) + return json.loads(data) diff --git a/.venv/lib/python3.12/site-packages/mypy/version.py b/.venv/lib/python3.12/site-packages/mypy/version.py new file mode 100644 index 0000000..90fa096 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/version.py @@ -0,0 +1 @@ +__version__ = "1.19.1" diff --git a/.venv/lib/python3.12/site-packages/mypy/visitor.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypy/visitor.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..7e5653b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypy/visitor.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypy/visitor.py b/.venv/lib/python3.12/site-packages/mypy/visitor.py new file mode 100644 index 0000000..e150788 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/visitor.py @@ -0,0 +1,632 @@ +"""Generic abstract syntax tree node visitor""" + +from __future__ import annotations + +from abc import abstractmethod +from typing import TYPE_CHECKING, Generic, TypeVar + +from mypy_extensions import mypyc_attr, trait + +if TYPE_CHECKING: + # break import cycle only needed for mypy + import mypy.nodes + import mypy.patterns + + +T = TypeVar("T") + + +@trait +@mypyc_attr(allow_interpreted_subclasses=True) +class ExpressionVisitor(Generic[T]): + @abstractmethod + def visit_int_expr(self, o: mypy.nodes.IntExpr, /) -> T: + pass + + @abstractmethod + def visit_str_expr(self, o: mypy.nodes.StrExpr, /) -> T: + pass + + @abstractmethod + def visit_bytes_expr(self, o: mypy.nodes.BytesExpr, /) -> T: + pass + + @abstractmethod + def visit_float_expr(self, o: mypy.nodes.FloatExpr, /) -> T: + pass + + @abstractmethod + def visit_complex_expr(self, o: mypy.nodes.ComplexExpr, /) -> T: + pass + + @abstractmethod + def visit_ellipsis(self, o: mypy.nodes.EllipsisExpr, /) -> T: + pass + + @abstractmethod + def visit_star_expr(self, o: mypy.nodes.StarExpr, /) -> T: + pass + + @abstractmethod + def visit_name_expr(self, o: mypy.nodes.NameExpr, /) -> T: + pass + + @abstractmethod + def visit_member_expr(self, o: mypy.nodes.MemberExpr, /) -> T: + pass + + @abstractmethod + def visit_yield_from_expr(self, o: mypy.nodes.YieldFromExpr, /) -> T: + pass + + @abstractmethod + def visit_yield_expr(self, o: mypy.nodes.YieldExpr, /) -> T: + pass + + @abstractmethod + def visit_call_expr(self, o: mypy.nodes.CallExpr, /) -> T: + pass + + @abstractmethod + def visit_op_expr(self, o: mypy.nodes.OpExpr, /) -> T: + pass + + @abstractmethod + def visit_comparison_expr(self, o: mypy.nodes.ComparisonExpr, /) -> T: + pass + + @abstractmethod + def visit_cast_expr(self, o: mypy.nodes.CastExpr, /) -> T: + pass + + @abstractmethod + def visit_type_form_expr(self, o: mypy.nodes.TypeFormExpr, /) -> T: + pass + + @abstractmethod + def visit_assert_type_expr(self, o: mypy.nodes.AssertTypeExpr, /) -> T: + pass + + @abstractmethod + def visit_reveal_expr(self, o: mypy.nodes.RevealExpr, /) -> T: + pass + + @abstractmethod + def visit_super_expr(self, o: mypy.nodes.SuperExpr, /) -> T: + pass + + @abstractmethod + def visit_unary_expr(self, o: mypy.nodes.UnaryExpr, /) -> T: + pass + + @abstractmethod + def visit_assignment_expr(self, o: mypy.nodes.AssignmentExpr, /) -> T: + pass + + @abstractmethod + def visit_list_expr(self, o: mypy.nodes.ListExpr, /) -> T: + pass + + @abstractmethod + def visit_dict_expr(self, o: mypy.nodes.DictExpr, /) -> T: + pass + + @abstractmethod + def visit_tuple_expr(self, o: mypy.nodes.TupleExpr, /) -> T: + pass + + @abstractmethod + def visit_set_expr(self, o: mypy.nodes.SetExpr, /) -> T: + pass + + @abstractmethod + def visit_index_expr(self, o: mypy.nodes.IndexExpr, /) -> T: + pass + + @abstractmethod + def visit_type_application(self, o: mypy.nodes.TypeApplication, /) -> T: + pass + + @abstractmethod + def visit_lambda_expr(self, o: mypy.nodes.LambdaExpr, /) -> T: + pass + + @abstractmethod + def visit_list_comprehension(self, o: mypy.nodes.ListComprehension, /) -> T: + pass + + @abstractmethod + def visit_set_comprehension(self, o: mypy.nodes.SetComprehension, /) -> T: + pass + + @abstractmethod + def visit_dictionary_comprehension(self, o: mypy.nodes.DictionaryComprehension, /) -> T: + pass + + @abstractmethod + def visit_generator_expr(self, o: mypy.nodes.GeneratorExpr, /) -> T: + pass + + @abstractmethod + def visit_slice_expr(self, o: mypy.nodes.SliceExpr, /) -> T: + pass + + @abstractmethod + def visit_conditional_expr(self, o: mypy.nodes.ConditionalExpr, /) -> T: + pass + + @abstractmethod + def visit_type_var_expr(self, o: mypy.nodes.TypeVarExpr, /) -> T: + pass + + @abstractmethod + def visit_paramspec_expr(self, o: mypy.nodes.ParamSpecExpr, /) -> T: + pass + + @abstractmethod + def visit_type_var_tuple_expr(self, o: mypy.nodes.TypeVarTupleExpr, /) -> T: + pass + + @abstractmethod + def visit_type_alias_expr(self, o: mypy.nodes.TypeAliasExpr, /) -> T: + pass + + @abstractmethod + def visit_namedtuple_expr(self, o: mypy.nodes.NamedTupleExpr, /) -> T: + pass + + @abstractmethod + def visit_enum_call_expr(self, o: mypy.nodes.EnumCallExpr, /) -> T: + pass + + @abstractmethod + def visit_typeddict_expr(self, o: mypy.nodes.TypedDictExpr, /) -> T: + pass + + @abstractmethod + def visit_newtype_expr(self, o: mypy.nodes.NewTypeExpr, /) -> T: + pass + + @abstractmethod + def visit__promote_expr(self, o: mypy.nodes.PromoteExpr, /) -> T: + pass + + @abstractmethod + def visit_await_expr(self, o: mypy.nodes.AwaitExpr, /) -> T: + pass + + @abstractmethod + def visit_temp_node(self, o: mypy.nodes.TempNode, /) -> T: + pass + + +@trait +@mypyc_attr(allow_interpreted_subclasses=True) +class StatementVisitor(Generic[T]): + # Definitions + + @abstractmethod + def visit_assignment_stmt(self, o: mypy.nodes.AssignmentStmt, /) -> T: + pass + + @abstractmethod + def visit_for_stmt(self, o: mypy.nodes.ForStmt, /) -> T: + pass + + @abstractmethod + def visit_with_stmt(self, o: mypy.nodes.WithStmt, /) -> T: + pass + + @abstractmethod + def visit_del_stmt(self, o: mypy.nodes.DelStmt, /) -> T: + pass + + @abstractmethod + def visit_func_def(self, o: mypy.nodes.FuncDef, /) -> T: + pass + + @abstractmethod + def visit_overloaded_func_def(self, o: mypy.nodes.OverloadedFuncDef, /) -> T: + pass + + @abstractmethod + def visit_class_def(self, o: mypy.nodes.ClassDef, /) -> T: + pass + + @abstractmethod + def visit_global_decl(self, o: mypy.nodes.GlobalDecl, /) -> T: + pass + + @abstractmethod + def visit_nonlocal_decl(self, o: mypy.nodes.NonlocalDecl, /) -> T: + pass + + @abstractmethod + def visit_decorator(self, o: mypy.nodes.Decorator, /) -> T: + pass + + # Module structure + + @abstractmethod + def visit_import(self, o: mypy.nodes.Import, /) -> T: + pass + + @abstractmethod + def visit_import_from(self, o: mypy.nodes.ImportFrom, /) -> T: + pass + + @abstractmethod + def visit_import_all(self, o: mypy.nodes.ImportAll, /) -> T: + pass + + # Statements + + @abstractmethod + def visit_block(self, o: mypy.nodes.Block, /) -> T: + pass + + @abstractmethod + def visit_expression_stmt(self, o: mypy.nodes.ExpressionStmt, /) -> T: + pass + + @abstractmethod + def visit_operator_assignment_stmt(self, o: mypy.nodes.OperatorAssignmentStmt, /) -> T: + pass + + @abstractmethod + def visit_while_stmt(self, o: mypy.nodes.WhileStmt, /) -> T: + pass + + @abstractmethod + def visit_return_stmt(self, o: mypy.nodes.ReturnStmt, /) -> T: + pass + + @abstractmethod + def visit_assert_stmt(self, o: mypy.nodes.AssertStmt, /) -> T: + pass + + @abstractmethod + def visit_if_stmt(self, o: mypy.nodes.IfStmt, /) -> T: + pass + + @abstractmethod + def visit_break_stmt(self, o: mypy.nodes.BreakStmt, /) -> T: + pass + + @abstractmethod + def visit_continue_stmt(self, o: mypy.nodes.ContinueStmt, /) -> T: + pass + + @abstractmethod + def visit_pass_stmt(self, o: mypy.nodes.PassStmt, /) -> T: + pass + + @abstractmethod + def visit_raise_stmt(self, o: mypy.nodes.RaiseStmt, /) -> T: + pass + + @abstractmethod + def visit_try_stmt(self, o: mypy.nodes.TryStmt, /) -> T: + pass + + @abstractmethod + def visit_match_stmt(self, o: mypy.nodes.MatchStmt, /) -> T: + pass + + @abstractmethod + def visit_type_alias_stmt(self, o: mypy.nodes.TypeAliasStmt, /) -> T: + pass + + +@trait +@mypyc_attr(allow_interpreted_subclasses=True) +class PatternVisitor(Generic[T]): + @abstractmethod + def visit_as_pattern(self, o: mypy.patterns.AsPattern, /) -> T: + pass + + @abstractmethod + def visit_or_pattern(self, o: mypy.patterns.OrPattern, /) -> T: + pass + + @abstractmethod + def visit_value_pattern(self, o: mypy.patterns.ValuePattern, /) -> T: + pass + + @abstractmethod + def visit_singleton_pattern(self, o: mypy.patterns.SingletonPattern, /) -> T: + pass + + @abstractmethod + def visit_sequence_pattern(self, o: mypy.patterns.SequencePattern, /) -> T: + pass + + @abstractmethod + def visit_starred_pattern(self, o: mypy.patterns.StarredPattern, /) -> T: + pass + + @abstractmethod + def visit_mapping_pattern(self, o: mypy.patterns.MappingPattern, /) -> T: + pass + + @abstractmethod + def visit_class_pattern(self, o: mypy.patterns.ClassPattern, /) -> T: + pass + + +@trait +@mypyc_attr(allow_interpreted_subclasses=True) +class NodeVisitor(Generic[T], ExpressionVisitor[T], StatementVisitor[T], PatternVisitor[T]): + """Empty base class for parse tree node visitors. + + The T type argument specifies the return type of the visit + methods. As all methods defined here raise by default, + subclasses do not always need to override all the methods. + """ + + # Not in superclasses: + + def visit_mypy_file(self, o: mypy.nodes.MypyFile, /) -> T: + raise NotImplementedError() + + # TODO: We have a visit_var method, but no visit_typeinfo or any + # other non-Statement SymbolNode (accepting those will raise a + # runtime error). Maybe this should be resolved in some direction. + def visit_var(self, o: mypy.nodes.Var, /) -> T: + raise NotImplementedError() + + # Module structure + + def visit_import(self, o: mypy.nodes.Import, /) -> T: + raise NotImplementedError() + + def visit_import_from(self, o: mypy.nodes.ImportFrom, /) -> T: + raise NotImplementedError() + + def visit_import_all(self, o: mypy.nodes.ImportAll, /) -> T: + raise NotImplementedError() + + # Definitions + + def visit_func_def(self, o: mypy.nodes.FuncDef, /) -> T: + raise NotImplementedError() + + def visit_overloaded_func_def(self, o: mypy.nodes.OverloadedFuncDef, /) -> T: + raise NotImplementedError() + + def visit_class_def(self, o: mypy.nodes.ClassDef, /) -> T: + raise NotImplementedError() + + def visit_global_decl(self, o: mypy.nodes.GlobalDecl, /) -> T: + raise NotImplementedError() + + def visit_nonlocal_decl(self, o: mypy.nodes.NonlocalDecl, /) -> T: + raise NotImplementedError() + + def visit_decorator(self, o: mypy.nodes.Decorator, /) -> T: + raise NotImplementedError() + + def visit_type_alias(self, o: mypy.nodes.TypeAlias, /) -> T: + raise NotImplementedError() + + def visit_placeholder_node(self, o: mypy.nodes.PlaceholderNode, /) -> T: + raise NotImplementedError() + + # Statements + + def visit_block(self, o: mypy.nodes.Block, /) -> T: + raise NotImplementedError() + + def visit_expression_stmt(self, o: mypy.nodes.ExpressionStmt, /) -> T: + raise NotImplementedError() + + def visit_assignment_stmt(self, o: mypy.nodes.AssignmentStmt, /) -> T: + raise NotImplementedError() + + def visit_operator_assignment_stmt(self, o: mypy.nodes.OperatorAssignmentStmt, /) -> T: + raise NotImplementedError() + + def visit_while_stmt(self, o: mypy.nodes.WhileStmt, /) -> T: + raise NotImplementedError() + + def visit_for_stmt(self, o: mypy.nodes.ForStmt, /) -> T: + raise NotImplementedError() + + def visit_return_stmt(self, o: mypy.nodes.ReturnStmt, /) -> T: + raise NotImplementedError() + + def visit_assert_stmt(self, o: mypy.nodes.AssertStmt, /) -> T: + raise NotImplementedError() + + def visit_del_stmt(self, o: mypy.nodes.DelStmt, /) -> T: + raise NotImplementedError() + + def visit_if_stmt(self, o: mypy.nodes.IfStmt, /) -> T: + raise NotImplementedError() + + def visit_break_stmt(self, o: mypy.nodes.BreakStmt, /) -> T: + raise NotImplementedError() + + def visit_continue_stmt(self, o: mypy.nodes.ContinueStmt, /) -> T: + raise NotImplementedError() + + def visit_pass_stmt(self, o: mypy.nodes.PassStmt, /) -> T: + raise NotImplementedError() + + def visit_raise_stmt(self, o: mypy.nodes.RaiseStmt, /) -> T: + raise NotImplementedError() + + def visit_try_stmt(self, o: mypy.nodes.TryStmt, /) -> T: + raise NotImplementedError() + + def visit_with_stmt(self, o: mypy.nodes.WithStmt, /) -> T: + raise NotImplementedError() + + def visit_match_stmt(self, o: mypy.nodes.MatchStmt, /) -> T: + raise NotImplementedError() + + def visit_type_alias_stmt(self, o: mypy.nodes.TypeAliasStmt, /) -> T: + raise NotImplementedError() + + # Expressions (default no-op implementation) + + def visit_int_expr(self, o: mypy.nodes.IntExpr, /) -> T: + raise NotImplementedError() + + def visit_str_expr(self, o: mypy.nodes.StrExpr, /) -> T: + raise NotImplementedError() + + def visit_bytes_expr(self, o: mypy.nodes.BytesExpr, /) -> T: + raise NotImplementedError() + + def visit_float_expr(self, o: mypy.nodes.FloatExpr, /) -> T: + raise NotImplementedError() + + def visit_complex_expr(self, o: mypy.nodes.ComplexExpr, /) -> T: + raise NotImplementedError() + + def visit_ellipsis(self, o: mypy.nodes.EllipsisExpr, /) -> T: + raise NotImplementedError() + + def visit_star_expr(self, o: mypy.nodes.StarExpr, /) -> T: + raise NotImplementedError() + + def visit_name_expr(self, o: mypy.nodes.NameExpr, /) -> T: + raise NotImplementedError() + + def visit_member_expr(self, o: mypy.nodes.MemberExpr, /) -> T: + raise NotImplementedError() + + def visit_yield_from_expr(self, o: mypy.nodes.YieldFromExpr, /) -> T: + raise NotImplementedError() + + def visit_yield_expr(self, o: mypy.nodes.YieldExpr, /) -> T: + raise NotImplementedError() + + def visit_call_expr(self, o: mypy.nodes.CallExpr, /) -> T: + raise NotImplementedError() + + def visit_op_expr(self, o: mypy.nodes.OpExpr, /) -> T: + raise NotImplementedError() + + def visit_comparison_expr(self, o: mypy.nodes.ComparisonExpr, /) -> T: + raise NotImplementedError() + + def visit_cast_expr(self, o: mypy.nodes.CastExpr, /) -> T: + raise NotImplementedError() + + def visit_type_form_expr(self, o: mypy.nodes.TypeFormExpr, /) -> T: + raise NotImplementedError() + + def visit_assert_type_expr(self, o: mypy.nodes.AssertTypeExpr, /) -> T: + raise NotImplementedError() + + def visit_reveal_expr(self, o: mypy.nodes.RevealExpr, /) -> T: + raise NotImplementedError() + + def visit_super_expr(self, o: mypy.nodes.SuperExpr, /) -> T: + raise NotImplementedError() + + def visit_assignment_expr(self, o: mypy.nodes.AssignmentExpr, /) -> T: + raise NotImplementedError() + + def visit_unary_expr(self, o: mypy.nodes.UnaryExpr, /) -> T: + raise NotImplementedError() + + def visit_list_expr(self, o: mypy.nodes.ListExpr, /) -> T: + raise NotImplementedError() + + def visit_dict_expr(self, o: mypy.nodes.DictExpr, /) -> T: + raise NotImplementedError() + + def visit_tuple_expr(self, o: mypy.nodes.TupleExpr, /) -> T: + raise NotImplementedError() + + def visit_set_expr(self, o: mypy.nodes.SetExpr, /) -> T: + raise NotImplementedError() + + def visit_index_expr(self, o: mypy.nodes.IndexExpr, /) -> T: + raise NotImplementedError() + + def visit_type_application(self, o: mypy.nodes.TypeApplication, /) -> T: + raise NotImplementedError() + + def visit_lambda_expr(self, o: mypy.nodes.LambdaExpr, /) -> T: + raise NotImplementedError() + + def visit_list_comprehension(self, o: mypy.nodes.ListComprehension, /) -> T: + raise NotImplementedError() + + def visit_set_comprehension(self, o: mypy.nodes.SetComprehension, /) -> T: + raise NotImplementedError() + + def visit_dictionary_comprehension(self, o: mypy.nodes.DictionaryComprehension, /) -> T: + raise NotImplementedError() + + def visit_generator_expr(self, o: mypy.nodes.GeneratorExpr, /) -> T: + raise NotImplementedError() + + def visit_slice_expr(self, o: mypy.nodes.SliceExpr, /) -> T: + raise NotImplementedError() + + def visit_conditional_expr(self, o: mypy.nodes.ConditionalExpr, /) -> T: + raise NotImplementedError() + + def visit_type_var_expr(self, o: mypy.nodes.TypeVarExpr, /) -> T: + raise NotImplementedError() + + def visit_paramspec_expr(self, o: mypy.nodes.ParamSpecExpr, /) -> T: + raise NotImplementedError() + + def visit_type_var_tuple_expr(self, o: mypy.nodes.TypeVarTupleExpr, /) -> T: + raise NotImplementedError() + + def visit_type_alias_expr(self, o: mypy.nodes.TypeAliasExpr, /) -> T: + raise NotImplementedError() + + def visit_namedtuple_expr(self, o: mypy.nodes.NamedTupleExpr, /) -> T: + raise NotImplementedError() + + def visit_enum_call_expr(self, o: mypy.nodes.EnumCallExpr, /) -> T: + raise NotImplementedError() + + def visit_typeddict_expr(self, o: mypy.nodes.TypedDictExpr, /) -> T: + raise NotImplementedError() + + def visit_newtype_expr(self, o: mypy.nodes.NewTypeExpr, /) -> T: + raise NotImplementedError() + + def visit__promote_expr(self, o: mypy.nodes.PromoteExpr, /) -> T: + raise NotImplementedError() + + def visit_await_expr(self, o: mypy.nodes.AwaitExpr, /) -> T: + raise NotImplementedError() + + def visit_temp_node(self, o: mypy.nodes.TempNode, /) -> T: + raise NotImplementedError() + + # Patterns + + def visit_as_pattern(self, o: mypy.patterns.AsPattern, /) -> T: + raise NotImplementedError() + + def visit_or_pattern(self, o: mypy.patterns.OrPattern, /) -> T: + raise NotImplementedError() + + def visit_value_pattern(self, o: mypy.patterns.ValuePattern, /) -> T: + raise NotImplementedError() + + def visit_singleton_pattern(self, o: mypy.patterns.SingletonPattern, /) -> T: + raise NotImplementedError() + + def visit_sequence_pattern(self, o: mypy.patterns.SequencePattern, /) -> T: + raise NotImplementedError() + + def visit_starred_pattern(self, o: mypy.patterns.StarredPattern, /) -> T: + raise NotImplementedError() + + def visit_mapping_pattern(self, o: mypy.patterns.MappingPattern, /) -> T: + raise NotImplementedError() + + def visit_class_pattern(self, o: mypy.patterns.ClassPattern, /) -> T: + raise NotImplementedError() diff --git a/.venv/lib/python3.12/site-packages/mypy/xml/mypy-html.css b/.venv/lib/python3.12/site-packages/mypy/xml/mypy-html.css new file mode 100644 index 0000000..ec2bdf9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/xml/mypy-html.css @@ -0,0 +1,104 @@ +/* CSS for type check coverage reports */ + +/* + Used by both summary and file. +*/ +body { + font-family: "Helvetica Neue", sans-serif; +} + +/* + Used only by summary. +*/ + +h1 { + text-align: center; + font-size: 135%; + margin: 20px; +} + +table.summary { + border-collapse: collapse; + margin-left: 7%; + margin-right: 7%; + width: 85%; +} + +table caption { + margin: 1em; +} + +table.summary, tr.summary, th.summary, td.summary { + border: 1px solid #aaa; +} + +th.summary, td.summary { + padding: 0.4em; +} + +td.summary a { + text-decoration: none; +} + +.summary-quality-0 { + background-color: #dfd; +} + +.summary-quality-1 { + background-color: #ffa; +} + +.summary-quality-2 { + background-color: #faa; +} + +td.summary-filename, th.summary-filename { + text-align: left; +} + +td.summary-filename { + width: 50%; +} + +.summary-precision { + text-align: center; +} + +.summary-lines { + text-align: center; +} + +/* + Used only by file. +*/ + +td.table-lines { + text-align: right; + padding-right: 0.5em; +} + +td.table-code { } + +span.lineno { + text-align: right; +} + +a:link.lineno, a:visited.lineno { + color: #999; text-decoration: none; +} + +a:hover.lineno, a:active.lineno { + color: #000; text-decoration: underline; +} + +.line-empty, .line-precise { + background-color: #dfd; +} + +.line-imprecise { + background-color: #ffa; +} + +.line-any, .line-unanalyzed { + background-color: #faa; +} diff --git a/.venv/lib/python3.12/site-packages/mypy/xml/mypy-html.xslt b/.venv/lib/python3.12/site-packages/mypy/xml/mypy-html.xslt new file mode 100644 index 0000000..ddd78c2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/xml/mypy-html.xslt @@ -0,0 +1,81 @@ + + + + + + + + + + + + + +

Mypy Type Check Coverage Summary

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Summary from
FileImprecisionLines
Total imprecise LOC
imprecise LOC
+ + +
+ + + + + + +

+ + + + + + + + +
+
+                  
+                    

+                  
+                
+
+
+                  
+                    

+                  
+                
+
+ + +
+
diff --git a/.venv/lib/python3.12/site-packages/mypy/xml/mypy-txt.xslt b/.venv/lib/python3.12/site-packages/mypy/xml/mypy-txt.xslt new file mode 100644 index 0000000..fe12065 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/xml/mypy-txt.xslt @@ -0,0 +1,100 @@ + + + + + + + Mypy Type Check Coverage Summary + ================================ + + Script: + + + + + + + + + + + + + + +- + + -+- + + -+- + + -+ + + | + + + | + + + | + + + | + + +- + + -+- + + -+- + + -+ + + + + + + + | + + + | + + + | + + + | + + + +- + + -+- + + -+- + + -+ + + + + + + + | + + + | + + + | + + + | + + +- + + -+- + + -+- + + -+ + + diff --git a/.venv/lib/python3.12/site-packages/mypy/xml/mypy.xsd b/.venv/lib/python3.12/site-packages/mypy/xml/mypy.xsd new file mode 100644 index 0000000..77d0737 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy/xml/mypy.xsd @@ -0,0 +1,50 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/.venv/lib/python3.12/site-packages/mypy_extensions-1.1.0.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/mypy_extensions-1.1.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy_extensions-1.1.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/mypy_extensions-1.1.0.dist-info/METADATA b/.venv/lib/python3.12/site-packages/mypy_extensions-1.1.0.dist-info/METADATA new file mode 100644 index 0000000..abe8c62 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy_extensions-1.1.0.dist-info/METADATA @@ -0,0 +1,29 @@ +Metadata-Version: 2.4 +Name: mypy_extensions +Version: 1.1.0 +Summary: Type system extensions for programs checked with the mypy type checker. +Author-email: The mypy developers +Requires-Python: >=3.8 +Description-Content-Type: text/markdown +License-Expression: MIT +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Topic :: Software Development +License-File: LICENSE +Project-URL: Homepage, https://github.com/python/mypy_extensions + +Mypy Extensions +=============== + +The `mypy_extensions` module defines extensions to the Python standard +library `typing` module that are supported by the mypy type checker and +the mypyc compiler. + diff --git a/.venv/lib/python3.12/site-packages/mypy_extensions-1.1.0.dist-info/RECORD b/.venv/lib/python3.12/site-packages/mypy_extensions-1.1.0.dist-info/RECORD new file mode 100644 index 0000000..e56660a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy_extensions-1.1.0.dist-info/RECORD @@ -0,0 +1,7 @@ +__pycache__/mypy_extensions.cpython-312.pyc,, +mypy_extensions-1.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +mypy_extensions-1.1.0.dist-info/METADATA,sha256=LEpEk22IXzBDMYHKQLv2JFc1Hx9rCHGxH4eJVk-HR38,1100 +mypy_extensions-1.1.0.dist-info/RECORD,, +mypy_extensions-1.1.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 +mypy_extensions-1.1.0.dist-info/licenses/LICENSE,sha256=pQRQ2h1TzXd7gM7XfFj_lqvgzNh5cGvRQsPsIOJF8LQ,1204 +mypy_extensions.py,sha256=GY7iLl2lPqbLD90PTm2U_SxA4MavvwvzpM8MESYgqRk,7754 diff --git a/.venv/lib/python3.12/site-packages/mypy_extensions-1.1.0.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/mypy_extensions-1.1.0.dist-info/WHEEL new file mode 100644 index 0000000..d8b9936 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy_extensions-1.1.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.12.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.venv/lib/python3.12/site-packages/mypy_extensions-1.1.0.dist-info/licenses/LICENSE b/.venv/lib/python3.12/site-packages/mypy_extensions-1.1.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..bdb7786 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy_extensions-1.1.0.dist-info/licenses/LICENSE @@ -0,0 +1,27 @@ +Mypy extensions are licensed under the terms of the MIT license, reproduced below. + += = = = = + +The MIT License + +Copyright (c) 2016-2017 Jukka Lehtosalo and contributors + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + += = = = = diff --git a/.venv/lib/python3.12/site-packages/mypy_extensions.py b/.venv/lib/python3.12/site-packages/mypy_extensions.py new file mode 100644 index 0000000..1910000 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypy_extensions.py @@ -0,0 +1,251 @@ +"""Defines experimental extensions to the standard "typing" module that are +supported by the mypy typechecker. + +Example usage: + from mypy_extensions import TypedDict +""" + +from typing import Any, Dict + +import sys +# _type_check is NOT a part of public typing API, it is used here only to mimic +# the (convenient) behavior of types provided by typing module. +from typing import _type_check # type: ignore + + +def _check_fails(cls, other): + try: + if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools', 'typing']: + # Typed dicts are only for static structural subtyping. + raise TypeError('TypedDict does not support instance and class checks') + except (AttributeError, ValueError): + pass + return False + + +def _dict_new(cls, *args, **kwargs): + return dict(*args, **kwargs) + + +def _typeddict_new(cls, _typename, _fields=None, **kwargs): + total = kwargs.pop('total', True) + if _fields is None: + _fields = kwargs + elif kwargs: + raise TypeError("TypedDict takes either a dict or keyword arguments," + " but not both") + + ns = {'__annotations__': dict(_fields), '__total__': total} + try: + # Setting correct module is necessary to make typed dict classes pickleable. + ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + + return _TypedDictMeta(_typename, (), ns, _from_functional_call=True) + + +class _TypedDictMeta(type): + def __new__(cls, name, bases, ns, total=True, _from_functional_call=False): + # Create new typed dict class object. + # This method is called directly when TypedDict is subclassed, + # or via _typeddict_new when TypedDict is instantiated. This way + # TypedDict supports all three syntaxes described in its docstring. + # Subclasses and instances of TypedDict return actual dictionaries + # via _dict_new. + + # We need the `if TypedDict in globals()` check, + # or we emit a DeprecationWarning when creating mypy_extensions.TypedDict itself + if 'TypedDict' in globals(): + import warnings + warnings.warn( + ( + "mypy_extensions.TypedDict is deprecated, " + "and will be removed in a future version. " + "Use typing.TypedDict or typing_extensions.TypedDict instead." + ), + DeprecationWarning, + stacklevel=(3 if _from_functional_call else 2) + ) + + ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new + tp_dict = super(_TypedDictMeta, cls).__new__(cls, name, (dict,), ns) + + anns = ns.get('__annotations__', {}) + msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" + anns = {n: _type_check(tp, msg) for n, tp in anns.items()} + for base in bases: + anns.update(base.__dict__.get('__annotations__', {})) + tp_dict.__annotations__ = anns + if not hasattr(tp_dict, '__total__'): + tp_dict.__total__ = total + return tp_dict + + __instancecheck__ = __subclasscheck__ = _check_fails + + +TypedDict = _TypedDictMeta('TypedDict', (dict,), {}) +TypedDict.__module__ = __name__ +TypedDict.__doc__ = \ + """A simple typed name space. At runtime it is equivalent to a plain dict. + + TypedDict creates a dictionary type that expects all of its + instances to have a certain set of keys, with each key + associated with a value of a consistent type. This expectation + is not checked at runtime but is only enforced by typecheckers. + Usage:: + + Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) + a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK + b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check + assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first') + + The type info could be accessed via Point2D.__annotations__. TypedDict + supports two additional equivalent forms:: + + Point2D = TypedDict('Point2D', x=int, y=int, label=str) + + class Point2D(TypedDict): + x: int + y: int + label: str + + The latter syntax is only supported in Python 3.6+, while two other + syntax forms work for 3.2+ + """ + +# Argument constructors for making more-detailed Callables. These all just +# return their type argument, to make them complete noops in terms of the +# `typing` module. + + +def Arg(type=Any, name=None): + """A normal positional argument""" + return type + + +def DefaultArg(type=Any, name=None): + """A positional argument with a default value""" + return type + + +def NamedArg(type=Any, name=None): + """A keyword-only argument""" + return type + + +def DefaultNamedArg(type=Any, name=None): + """A keyword-only argument with a default value""" + return type + + +def VarArg(type=Any): + """A *args-style variadic positional argument""" + return type + + +def KwArg(type=Any): + """A **kwargs-style variadic keyword argument""" + return type + + +# Return type that indicates a function does not return +# Deprecated, use typing or typing_extensions variants instead +class _DEPRECATED_NoReturn: pass + + +def trait(cls): + return cls + + +def mypyc_attr(*attrs, **kwattrs): + return lambda x: x + + +# TODO: We may want to try to properly apply this to any type +# variables left over... +class _FlexibleAliasClsApplied: + def __init__(self, val): + self.val = val + + def __getitem__(self, args): + return self.val + + +class _FlexibleAliasCls: + def __getitem__(self, args): + return _FlexibleAliasClsApplied(args[-1]) + + +FlexibleAlias = _FlexibleAliasCls() + + +class _NativeIntMeta(type): + def __instancecheck__(cls, inst): + return isinstance(inst, int) + + +_sentinel = object() + + +class i64(metaclass=_NativeIntMeta): + def __new__(cls, x=0, base=_sentinel): + if base is not _sentinel: + return int(x, base) + return int(x) + + +class i32(metaclass=_NativeIntMeta): + def __new__(cls, x=0, base=_sentinel): + if base is not _sentinel: + return int(x, base) + return int(x) + + +class i16(metaclass=_NativeIntMeta): + def __new__(cls, x=0, base=_sentinel): + if base is not _sentinel: + return int(x, base) + return int(x) + + +class u8(metaclass=_NativeIntMeta): + def __new__(cls, x=0, base=_sentinel): + if base is not _sentinel: + return int(x, base) + return int(x) + + +for _int_type in i64, i32, i16, u8: + _int_type.__doc__ = \ + """A native fixed-width integer type when used with mypyc. + + In code not compiled with mypyc, behaves like the 'int' type in these + runtime contexts: + + * {name}(x[, base=n]) converts a number or string to 'int' + * isinstance(x, {name}) is the same as isinstance(x, int) + """.format(name=_int_type.__name__) +del _int_type + + +def _warn_deprecation(name: str, module_globals: Dict[str, Any]) -> Any: + if (val := module_globals.get(f"_DEPRECATED_{name}")) is None: + msg = f"module '{__name__}' has no attribute '{name}'" + raise AttributeError(msg) + module_globals[name] = val + if name in {"NoReturn"}: + msg = ( + f"'mypy_extensions.{name}' is deprecated, " + "and will be removed in a future version. " + f"Use 'typing.{name}' or 'typing_extensions.{name}' instead" + ) + else: + assert False, f"Add deprecation message for 'mypy_extensions.{name}'" + import warnings + warnings.warn(msg, DeprecationWarning, stacklevel=3) + return val + + +def __getattr__(name: str) -> Any: + return _warn_deprecation(name, module_globals=globals()) diff --git a/.venv/lib/python3.12/site-packages/mypyc/__init__.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/__init__.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..1b73837 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/__init__.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/__init__.py b/.venv/lib/python3.12/site-packages/mypyc/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypyc/__main__.py b/.venv/lib/python3.12/site-packages/mypyc/__main__.py new file mode 100644 index 0000000..9b39737 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/__main__.py @@ -0,0 +1,72 @@ +"""Mypyc command-line tool. + +Usage: + + $ mypyc foo.py [...] + $ python3 -c 'import foo' # Uses compiled 'foo' + + +This is just a thin wrapper that generates a setup.py file that uses +mypycify, suitable for prototyping and testing. +""" + +from __future__ import annotations + +import os +import os.path +import subprocess +import sys + +base_path = os.path.join(os.path.dirname(__file__), "..") + +setup_format = """\ +from setuptools import setup +from mypyc.build import mypycify + +setup( + name='mypyc_output', + ext_modules=mypycify( + {}, + opt_level="{}", + debug_level="{}", + strict_dunder_typing={}, + log_trace={}, + ), +) +""" + + +def main() -> None: + build_dir = "build" # can this be overridden?? + try: + os.mkdir(build_dir) + except FileExistsError: + pass + + opt_level = os.getenv("MYPYC_OPT_LEVEL", "3") + debug_level = os.getenv("MYPYC_DEBUG_LEVEL", "1") + strict_dunder_typing = bool(int(os.getenv("MYPYC_STRICT_DUNDER_TYPING", "0"))) + # If enabled, compiled code writes a sampled log of executed ops (or events) to + # mypyc_trace.txt. + log_trace = bool(int(os.getenv("MYPYC_LOG_TRACE", "0"))) + + setup_file = os.path.join(build_dir, "setup.py") + with open(setup_file, "w") as f: + f.write( + setup_format.format( + sys.argv[1:], opt_level, debug_level, strict_dunder_typing, log_trace + ) + ) + + # We don't use run_setup (like we do in the test suite) because it throws + # away the error code from distutils, and we don't care about the slight + # performance loss here. + env = os.environ.copy() + base_path = os.path.join(os.path.dirname(__file__), "..") + env["PYTHONPATH"] = base_path + os.pathsep + env.get("PYTHONPATH", "") + cmd = subprocess.run([sys.executable, setup_file, "build_ext", "--inplace"], env=env) + sys.exit(cmd.returncode) + + +if __name__ == "__main__": + main() diff --git a/.venv/lib/python3.12/site-packages/mypyc/analysis/__init__.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/analysis/__init__.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..f2b6e13 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/analysis/__init__.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/analysis/__init__.py b/.venv/lib/python3.12/site-packages/mypyc/analysis/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypyc/analysis/attrdefined.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/analysis/attrdefined.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..f248f0e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/analysis/attrdefined.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/analysis/attrdefined.py b/.venv/lib/python3.12/site-packages/mypyc/analysis/attrdefined.py new file mode 100644 index 0000000..1dfd336 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/analysis/attrdefined.py @@ -0,0 +1,437 @@ +"""Always defined attribute analysis. + +An always defined attribute has some statements in __init__ or the +class body that cause the attribute to be always initialized when an +instance is constructed. It must also not be possible to read the +attribute before initialization, and it can't be deletable. + +We can assume that the value is always defined when reading an always +defined attribute. Otherwise we'll need to raise AttributeError if the +value is undefined (i.e. has the error value). + +We use data flow analysis to figure out attributes that are always +defined. Example: + + class C: + def __init__(self) -> None: + self.x = 0 + if func(): + self.y = 1 + else: + self.y = 2 + self.z = 3 + +In this example, the attributes 'x' and 'y' are always defined, but 'z' +is not. The analysis assumes that we know that there won't be any subclasses. + +The analysis also works if there is a known, closed set of subclasses. +An attribute defined in a base class can only be always defined if it's +also always defined in all subclasses. + +As soon as __init__ contains an op that can 'leak' self to another +function, we will stop inferring always defined attributes, since the +analysis is mostly intra-procedural and only looks at __init__ methods. +The called code could read an uninitialized attribute. Example: + + class C: + def __init__(self) -> None: + self.x = self.foo() + + def foo(self) -> int: + ... + +Now we won't infer 'x' as always defined, since 'foo' might read 'x' +before initialization. + +As an exception to the above limitation, we perform inter-procedural +analysis of super().__init__ calls, since these are very common. + +Our analysis is somewhat optimistic. We assume that nobody calls a +method of a partially uninitialized object through gc.get_objects(), in +particular. Code like this could potentially cause a segfault with a null +pointer dereference. This seems very unlikely to be an issue in practice, +however. + +Accessing an attribute via getattr always checks for undefined attributes +and thus works if the object is partially uninitialized. This can be used +as a workaround if somebody ever needs to inspect partially uninitialized +objects via gc.get_objects(). + +The analysis runs after IR building as a separate pass. Since we only +run this on __init__ methods, this analysis pass will be fairly quick. +""" + +from __future__ import annotations + +from typing import Final + +from mypyc.analysis.dataflow import ( + CFG, + MAYBE_ANALYSIS, + AnalysisResult, + BaseAnalysisVisitor, + get_cfg, + run_analysis, +) +from mypyc.analysis.selfleaks import analyze_self_leaks +from mypyc.ir.class_ir import ClassIR +from mypyc.ir.ops import ( + Assign, + AssignMulti, + BasicBlock, + Branch, + Call, + ControlOp, + GetAttr, + Register, + RegisterOp, + Return, + SetAttr, + SetMem, + Unreachable, +) +from mypyc.ir.rtypes import RInstance + +# If True, print out all always-defined attributes of native classes (to aid +# debugging and testing) +dump_always_defined: Final = False + + +def analyze_always_defined_attrs(class_irs: list[ClassIR]) -> None: + """Find always defined attributes all classes of a compilation unit. + + Also tag attribute initialization ops to not decref the previous + value (as this would read a NULL pointer and segfault). + + Update the _always_initialized_attrs, _sometimes_initialized_attrs + and init_self_leak attributes in ClassIR instances. + + This is the main entry point. + """ + seen: set[ClassIR] = set() + + # First pass: only look at target class and classes in MRO + for cl in class_irs: + analyze_always_defined_attrs_in_class(cl, seen) + + # Second pass: look at all derived class + seen = set() + for cl in class_irs: + update_always_defined_attrs_using_subclasses(cl, seen) + + # Final pass: detect attributes that need to use a bitmap to track definedness + seen = set() + for cl in class_irs: + detect_undefined_bitmap(cl, seen) + + +def analyze_always_defined_attrs_in_class(cl: ClassIR, seen: set[ClassIR]) -> None: + if cl in seen: + return + + seen.add(cl) + + if ( + cl.is_trait + or cl.inherits_python + or cl.allow_interpreted_subclasses + or cl.builtin_base is not None + or cl.children is None + or cl.is_serializable() + or cl.has_method("__new__") + ): + # Give up -- we can't enforce that attributes are always defined. + return + + # First analyze all base classes. Track seen classes to avoid duplicate work. + for base in cl.mro[1:]: + analyze_always_defined_attrs_in_class(base, seen) + + m = cl.get_method("__init__") + if m is None: + cl._always_initialized_attrs = cl.attrs_with_defaults.copy() + cl._sometimes_initialized_attrs = cl.attrs_with_defaults.copy() + return + self_reg = m.arg_regs[0] + cfg = get_cfg(m.blocks) + dirty = analyze_self_leaks(m.blocks, self_reg, cfg) + maybe_defined = analyze_maybe_defined_attrs_in_init( + m.blocks, self_reg, cl.attrs_with_defaults, cfg + ) + all_attrs: set[str] = set() + for base in cl.mro: + all_attrs.update(base.attributes) + maybe_undefined = analyze_maybe_undefined_attrs_in_init( + m.blocks, self_reg, initial_undefined=all_attrs - cl.attrs_with_defaults, cfg=cfg + ) + + always_defined = find_always_defined_attributes( + m.blocks, self_reg, all_attrs, maybe_defined, maybe_undefined, dirty + ) + always_defined = {a for a in always_defined if not cl.is_deletable(a)} + + cl._always_initialized_attrs = always_defined + if dump_always_defined: + print(cl.name, sorted(always_defined)) + cl._sometimes_initialized_attrs = find_sometimes_defined_attributes( + m.blocks, self_reg, maybe_defined, dirty + ) + + mark_attr_initialization_ops(m.blocks, self_reg, maybe_defined, dirty) + + # Check if __init__ can run unpredictable code (leak 'self'). + any_dirty = False + for b in m.blocks: + for i, op in enumerate(b.ops): + if dirty.after[b, i] and not isinstance(op, Return): + any_dirty = True + break + cl.init_self_leak = any_dirty + + +def find_always_defined_attributes( + blocks: list[BasicBlock], + self_reg: Register, + all_attrs: set[str], + maybe_defined: AnalysisResult[str], + maybe_undefined: AnalysisResult[str], + dirty: AnalysisResult[None], +) -> set[str]: + """Find attributes that are always initialized in some basic blocks. + + The analysis results are expected to be up-to-date for the blocks. + + Return a set of always defined attributes. + """ + attrs = all_attrs.copy() + for block in blocks: + for i, op in enumerate(block.ops): + # If an attribute we *read* may be undefined, it isn't always defined. + if isinstance(op, GetAttr) and op.obj is self_reg: + if op.attr in maybe_undefined.before[block, i]: + attrs.discard(op.attr) + # If an attribute we *set* may be sometimes undefined and + # sometimes defined, don't consider it always defined. Unlike + # the get case, it's fine for the attribute to be undefined. + # The set operation will then be treated as initialization. + if isinstance(op, SetAttr) and op.obj is self_reg: + if ( + op.attr in maybe_undefined.before[block, i] + and op.attr in maybe_defined.before[block, i] + ): + attrs.discard(op.attr) + # Treat an op that might run arbitrary code as an "exit" + # in terms of the analysis -- we can't do any inference + # afterwards reliably. + if dirty.after[block, i]: + if not dirty.before[block, i]: + attrs = attrs & ( + maybe_defined.after[block, i] - maybe_undefined.after[block, i] + ) + break + if isinstance(op, ControlOp): + for target in op.targets(): + # Gotos/branches can also be "exits". + if not dirty.after[block, i] and dirty.before[target, 0]: + attrs = attrs & ( + maybe_defined.after[target, 0] - maybe_undefined.after[target, 0] + ) + return attrs + + +def find_sometimes_defined_attributes( + blocks: list[BasicBlock], + self_reg: Register, + maybe_defined: AnalysisResult[str], + dirty: AnalysisResult[None], +) -> set[str]: + """Find attributes that are sometimes initialized in some basic blocks.""" + attrs: set[str] = set() + for block in blocks: + for i, op in enumerate(block.ops): + # Only look at possibly defined attributes at exits. + if dirty.after[block, i]: + if not dirty.before[block, i]: + attrs = attrs | maybe_defined.after[block, i] + break + if isinstance(op, ControlOp): + for target in op.targets(): + if not dirty.after[block, i] and dirty.before[target, 0]: + attrs = attrs | maybe_defined.after[target, 0] + return attrs + + +def mark_attr_initialization_ops( + blocks: list[BasicBlock], + self_reg: Register, + maybe_defined: AnalysisResult[str], + dirty: AnalysisResult[None], +) -> None: + """Tag all SetAttr ops in the basic blocks that initialize attributes. + + Initialization ops assume that the previous attribute value is the error value, + so there's no need to decref or check for definedness. + """ + for block in blocks: + for i, op in enumerate(block.ops): + if isinstance(op, SetAttr) and op.obj is self_reg: + attr = op.attr + if attr not in maybe_defined.before[block, i] and not dirty.after[block, i]: + op.mark_as_initializer() + + +GenAndKill = tuple[set[str], set[str]] + + +def attributes_initialized_by_init_call(op: Call) -> set[str]: + """Calculate attributes that are always initialized by a super().__init__ call.""" + self_type = op.fn.sig.args[0].type + assert isinstance(self_type, RInstance), self_type + cl = self_type.class_ir + return {a for base in cl.mro for a in base.attributes if base.is_always_defined(a)} + + +def attributes_maybe_initialized_by_init_call(op: Call) -> set[str]: + """Calculate attributes that may be initialized by a super().__init__ call.""" + self_type = op.fn.sig.args[0].type + assert isinstance(self_type, RInstance), self_type + cl = self_type.class_ir + return attributes_initialized_by_init_call(op) | cl._sometimes_initialized_attrs + + +class AttributeMaybeDefinedVisitor(BaseAnalysisVisitor[str]): + """Find attributes that may have been defined via some code path. + + Consider initializations in class body and assignments to 'self.x' + and calls to base class '__init__'. + """ + + def __init__(self, self_reg: Register) -> None: + self.self_reg = self_reg + + def visit_branch(self, op: Branch) -> tuple[set[str], set[str]]: + return set(), set() + + def visit_return(self, op: Return) -> tuple[set[str], set[str]]: + return set(), set() + + def visit_unreachable(self, op: Unreachable) -> tuple[set[str], set[str]]: + return set(), set() + + def visit_register_op(self, op: RegisterOp) -> tuple[set[str], set[str]]: + if isinstance(op, SetAttr) and op.obj is self.self_reg: + return {op.attr}, set() + if isinstance(op, Call) and op.fn.class_name and op.fn.name == "__init__": + return attributes_maybe_initialized_by_init_call(op), set() + return set(), set() + + def visit_assign(self, op: Assign) -> tuple[set[str], set[str]]: + return set(), set() + + def visit_assign_multi(self, op: AssignMulti) -> tuple[set[str], set[str]]: + return set(), set() + + def visit_set_mem(self, op: SetMem) -> tuple[set[str], set[str]]: + return set(), set() + + +def analyze_maybe_defined_attrs_in_init( + blocks: list[BasicBlock], self_reg: Register, attrs_with_defaults: set[str], cfg: CFG +) -> AnalysisResult[str]: + return run_analysis( + blocks=blocks, + cfg=cfg, + gen_and_kill=AttributeMaybeDefinedVisitor(self_reg), + initial=attrs_with_defaults, + backward=False, + kind=MAYBE_ANALYSIS, + ) + + +class AttributeMaybeUndefinedVisitor(BaseAnalysisVisitor[str]): + """Find attributes that may be undefined via some code path. + + Consider initializations in class body, assignments to 'self.x' + and calls to base class '__init__'. + """ + + def __init__(self, self_reg: Register) -> None: + self.self_reg = self_reg + + def visit_branch(self, op: Branch) -> tuple[set[str], set[str]]: + return set(), set() + + def visit_return(self, op: Return) -> tuple[set[str], set[str]]: + return set(), set() + + def visit_unreachable(self, op: Unreachable) -> tuple[set[str], set[str]]: + return set(), set() + + def visit_register_op(self, op: RegisterOp) -> tuple[set[str], set[str]]: + if isinstance(op, SetAttr) and op.obj is self.self_reg: + return set(), {op.attr} + if isinstance(op, Call) and op.fn.class_name and op.fn.name == "__init__": + return set(), attributes_initialized_by_init_call(op) + return set(), set() + + def visit_assign(self, op: Assign) -> tuple[set[str], set[str]]: + return set(), set() + + def visit_assign_multi(self, op: AssignMulti) -> tuple[set[str], set[str]]: + return set(), set() + + def visit_set_mem(self, op: SetMem) -> tuple[set[str], set[str]]: + return set(), set() + + +def analyze_maybe_undefined_attrs_in_init( + blocks: list[BasicBlock], self_reg: Register, initial_undefined: set[str], cfg: CFG +) -> AnalysisResult[str]: + return run_analysis( + blocks=blocks, + cfg=cfg, + gen_and_kill=AttributeMaybeUndefinedVisitor(self_reg), + initial=initial_undefined, + backward=False, + kind=MAYBE_ANALYSIS, + ) + + +def update_always_defined_attrs_using_subclasses(cl: ClassIR, seen: set[ClassIR]) -> None: + """Remove attributes not defined in all subclasses from always defined attrs.""" + if cl in seen: + return + if cl.children is None: + # Subclasses are unknown + return + removed = set() + for attr in cl._always_initialized_attrs: + for child in cl.children: + update_always_defined_attrs_using_subclasses(child, seen) + if attr not in child._always_initialized_attrs: + removed.add(attr) + cl._always_initialized_attrs -= removed + seen.add(cl) + + +def detect_undefined_bitmap(cl: ClassIR, seen: set[ClassIR]) -> None: + if cl.is_trait: + return + + if cl in seen: + return + seen.add(cl) + for base in cl.base_mro[1:]: + detect_undefined_bitmap(base, seen) + + if len(cl.base_mro) > 1: + cl.bitmap_attrs.extend(cl.base_mro[1].bitmap_attrs) + for n, t in cl.attributes.items(): + if t.error_overlap and not cl.is_always_defined(n): + cl.bitmap_attrs.append(n) + + for base in cl.mro[1:]: + if base.is_trait: + for n, t in base.attributes.items(): + if t.error_overlap and not cl.is_always_defined(n) and n not in cl.bitmap_attrs: + cl.bitmap_attrs.append(n) diff --git a/.venv/lib/python3.12/site-packages/mypyc/analysis/blockfreq.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/analysis/blockfreq.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..94d598c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/analysis/blockfreq.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/analysis/blockfreq.py b/.venv/lib/python3.12/site-packages/mypyc/analysis/blockfreq.py new file mode 100644 index 0000000..74a1bc0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/analysis/blockfreq.py @@ -0,0 +1,32 @@ +"""Find basic blocks that are likely to be executed frequently. + +For example, this would not include blocks that have exception handlers. + +We can use different optimization heuristics for common and rare code. For +example, we can make IR fast to compile instead of fast to execute for rare +code. +""" + +from __future__ import annotations + +from mypyc.ir.ops import BasicBlock, Branch, Goto + + +def frequently_executed_blocks(entry_point: BasicBlock) -> set[BasicBlock]: + result: set[BasicBlock] = set() + worklist = [entry_point] + while worklist: + block = worklist.pop() + if block in result: + continue + result.add(block) + t = block.terminator + if isinstance(t, Goto): + worklist.append(t.label) + elif isinstance(t, Branch): + if t.rare or t.traceback_entry is not None: + worklist.append(t.false) + else: + worklist.append(t.true) + worklist.append(t.false) + return result diff --git a/.venv/lib/python3.12/site-packages/mypyc/analysis/capsule_deps.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/analysis/capsule_deps.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..bd0ef19 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/analysis/capsule_deps.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/analysis/capsule_deps.py b/.venv/lib/python3.12/site-packages/mypyc/analysis/capsule_deps.py new file mode 100644 index 0000000..ada42ee --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/analysis/capsule_deps.py @@ -0,0 +1,27 @@ +from __future__ import annotations + +from mypyc.ir.func_ir import FuncIR +from mypyc.ir.ops import CallC, PrimitiveOp + + +def find_implicit_capsule_dependencies(fn: FuncIR) -> set[str] | None: + """Find implicit dependencies on capsules that need to be imported. + + Using primitives or types defined in librt submodules such as "librt.base64" + requires a capsule import. + + Note that a module can depend on a librt module even if it doesn't explicitly + import it, for example via re-exported names or via return types of functions + defined in other modules. + """ + deps: set[str] | None = None + for block in fn.blocks: + for op in block.ops: + # TODO: Also determine implicit type object dependencies (e.g. cast targets) + if isinstance(op, CallC) and op.capsule is not None: + if deps is None: + deps = set() + deps.add(op.capsule) + else: + assert not isinstance(op, PrimitiveOp), "Lowered IR is expected" + return deps diff --git a/.venv/lib/python3.12/site-packages/mypyc/analysis/dataflow.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/analysis/dataflow.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..7210748 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/analysis/dataflow.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/analysis/dataflow.py b/.venv/lib/python3.12/site-packages/mypyc/analysis/dataflow.py new file mode 100644 index 0000000..827c70a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/analysis/dataflow.py @@ -0,0 +1,622 @@ +"""Data-flow analyses.""" + +from __future__ import annotations + +from abc import abstractmethod +from collections.abc import Iterable, Iterator +from typing import Generic, TypeVar + +from mypyc.ir.ops import ( + Assign, + AssignMulti, + BasicBlock, + Box, + Branch, + Call, + CallC, + Cast, + ComparisonOp, + ControlOp, + DecRef, + Extend, + Float, + FloatComparisonOp, + FloatNeg, + FloatOp, + GetAttr, + GetElementPtr, + Goto, + IncRef, + InitStatic, + Integer, + IntOp, + KeepAlive, + LoadAddress, + LoadErrorValue, + LoadGlobal, + LoadLiteral, + LoadMem, + LoadStatic, + MethodCall, + Op, + OpVisitor, + PrimitiveOp, + RaiseStandardError, + RegisterOp, + Return, + SetAttr, + SetElement, + SetMem, + Truncate, + TupleGet, + TupleSet, + Unborrow, + Unbox, + Undef, + Unreachable, + Value, +) + + +class CFG: + """Control-flow graph. + + Node 0 is always assumed to be the entry point. There must be a + non-empty set of exits. + """ + + def __init__( + self, + succ: dict[BasicBlock, list[BasicBlock]], + pred: dict[BasicBlock, list[BasicBlock]], + exits: set[BasicBlock], + ) -> None: + assert exits + self.succ = succ + self.pred = pred + self.exits = exits + + def __str__(self) -> str: + exits = sorted(self.exits, key=lambda e: int(e.label)) + return f"exits: {exits}\nsucc: {self.succ}\npred: {self.pred}" + + +def get_cfg(blocks: list[BasicBlock], *, use_yields: bool = False) -> CFG: + """Calculate basic block control-flow graph. + + If use_yields is set, then we treat returns inserted by yields as gotos + instead of exits. + """ + succ_map = {} + pred_map: dict[BasicBlock, list[BasicBlock]] = {} + exits = set() + for block in blocks: + assert not any( + isinstance(op, ControlOp) for op in block.ops[:-1] + ), "Control-flow ops must be at the end of blocks" + + if use_yields and isinstance(block.terminator, Return) and block.terminator.yield_target: + succ = [block.terminator.yield_target] + else: + succ = list(block.terminator.targets()) + if not succ: + exits.add(block) + + # Errors can occur anywhere inside a block, which means that + # we can't assume that the entire block has executed before + # jumping to the error handler. In our CFG construction, we + # model this as saying that a block can jump to its error + # handler or the error handlers of any of its normal + # successors (to represent an error before that next block + # completes). This works well for analyses like "must + # defined", where it implies that registers assigned in a + # block may be undefined in its error handler, but is in + # general not a precise representation of reality; any + # analyses that require more fidelity must wait until after + # exception insertion. + for error_point in [block] + succ: + if error_point.error_handler: + succ.append(error_point.error_handler) + + succ_map[block] = succ + pred_map[block] = [] + for prev, nxt in succ_map.items(): + for label in nxt: + pred_map[label].append(prev) + return CFG(succ_map, pred_map, exits) + + +def get_real_target(label: BasicBlock) -> BasicBlock: + if len(label.ops) == 1 and isinstance(label.ops[-1], Goto): + label = label.ops[-1].label + return label + + +def cleanup_cfg(blocks: list[BasicBlock]) -> None: + """Cleanup the control flow graph. + + This eliminates obviously dead basic blocks and eliminates blocks that contain + nothing but a single jump. + + There is a lot more that could be done. + """ + changed = True + while changed: + # First collapse any jumps to basic block that only contain a goto + for block in blocks: + for i, tgt in enumerate(block.terminator.targets()): + block.terminator.set_target(i, get_real_target(tgt)) + + # Then delete any blocks that have no predecessors + changed = False + cfg = get_cfg(blocks) + orig_blocks = blocks.copy() + blocks.clear() + for i, block in enumerate(orig_blocks): + if i == 0 or cfg.pred[block]: + blocks.append(block) + else: + changed = True + + +T = TypeVar("T") + +AnalysisDict = dict[tuple[BasicBlock, int], set[T]] + + +class AnalysisResult(Generic[T]): + def __init__(self, before: AnalysisDict[T], after: AnalysisDict[T]) -> None: + self.before = before + self.after = after + + def __str__(self) -> str: + return f"before: {self.before}\nafter: {self.after}\n" + + +GenAndKill = tuple[set[T], set[T]] + + +class BaseAnalysisVisitor(OpVisitor[GenAndKill[T]]): + def visit_goto(self, op: Goto) -> GenAndKill[T]: + return set(), set() + + @abstractmethod + def visit_register_op(self, op: RegisterOp) -> GenAndKill[T]: + raise NotImplementedError + + @abstractmethod + def visit_assign(self, op: Assign) -> GenAndKill[T]: + raise NotImplementedError + + @abstractmethod + def visit_assign_multi(self, op: AssignMulti) -> GenAndKill[T]: + raise NotImplementedError + + @abstractmethod + def visit_set_mem(self, op: SetMem) -> GenAndKill[T]: + raise NotImplementedError + + def visit_call(self, op: Call) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_method_call(self, op: MethodCall) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_load_error_value(self, op: LoadErrorValue) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_load_literal(self, op: LoadLiteral) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_get_attr(self, op: GetAttr) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_set_attr(self, op: SetAttr) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_load_static(self, op: LoadStatic) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_init_static(self, op: InitStatic) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_tuple_get(self, op: TupleGet) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_tuple_set(self, op: TupleSet) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_box(self, op: Box) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_unbox(self, op: Unbox) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_cast(self, op: Cast) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_raise_standard_error(self, op: RaiseStandardError) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_call_c(self, op: CallC) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_primitive_op(self, op: PrimitiveOp) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_truncate(self, op: Truncate) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_extend(self, op: Extend) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_load_global(self, op: LoadGlobal) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_int_op(self, op: IntOp) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_float_op(self, op: FloatOp) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_float_neg(self, op: FloatNeg) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_comparison_op(self, op: ComparisonOp) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_float_comparison_op(self, op: FloatComparisonOp) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_load_mem(self, op: LoadMem) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_get_element_ptr(self, op: GetElementPtr) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_set_element(self, op: SetElement) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_load_address(self, op: LoadAddress) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_keep_alive(self, op: KeepAlive) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_unborrow(self, op: Unborrow) -> GenAndKill[T]: + return self.visit_register_op(op) + + +class DefinedVisitor(BaseAnalysisVisitor[Value]): + """Visitor for finding defined registers. + + Note that this only deals with registers and not temporaries, on + the assumption that we never access temporaries when they might be + undefined. + + If strict_errors is True, then we regard any use of LoadErrorValue + as making a register undefined. Otherwise we only do if + `undefines` is set on the error value. + + This lets us only consider the things we care about during + uninitialized variable checking while capturing all possibly + undefined things for refcounting. + """ + + def __init__(self, strict_errors: bool = False) -> None: + self.strict_errors = strict_errors + + def visit_branch(self, op: Branch) -> GenAndKill[Value]: + return set(), set() + + def visit_return(self, op: Return) -> GenAndKill[Value]: + return set(), set() + + def visit_unreachable(self, op: Unreachable) -> GenAndKill[Value]: + return set(), set() + + def visit_register_op(self, op: RegisterOp) -> GenAndKill[Value]: + return set(), set() + + def visit_assign(self, op: Assign) -> GenAndKill[Value]: + # Loading an error value may undefine the register. + if isinstance(op.src, LoadErrorValue) and (op.src.undefines or self.strict_errors): + return set(), {op.dest} + else: + return {op.dest}, set() + + def visit_assign_multi(self, op: AssignMulti) -> GenAndKill[Value]: + # Array registers are special and we don't track the definedness of them. + return set(), set() + + def visit_set_mem(self, op: SetMem) -> GenAndKill[Value]: + return set(), set() + + +def analyze_maybe_defined_regs( + blocks: list[BasicBlock], cfg: CFG, initial_defined: set[Value] +) -> AnalysisResult[Value]: + """Calculate potentially defined registers at each CFG location. + + A register is defined if it has a value along some path from the initial location. + """ + return run_analysis( + blocks=blocks, + cfg=cfg, + gen_and_kill=DefinedVisitor(), + initial=initial_defined, + backward=False, + kind=MAYBE_ANALYSIS, + ) + + +def analyze_must_defined_regs( + blocks: list[BasicBlock], + cfg: CFG, + initial_defined: set[Value], + regs: Iterable[Value], + strict_errors: bool = False, +) -> AnalysisResult[Value]: + """Calculate always defined registers at each CFG location. + + This analysis can work before exception insertion, since it is a + sound assumption that registers defined in a block might not be + initialized in its error handler. + + A register is defined if it has a value along all paths from the + initial location. + """ + return run_analysis( + blocks=blocks, + cfg=cfg, + gen_and_kill=DefinedVisitor(strict_errors=strict_errors), + initial=initial_defined, + backward=False, + kind=MUST_ANALYSIS, + universe=set(regs), + ) + + +class BorrowedArgumentsVisitor(BaseAnalysisVisitor[Value]): + def __init__(self, args: set[Value]) -> None: + self.args = args + + def visit_branch(self, op: Branch) -> GenAndKill[Value]: + return set(), set() + + def visit_return(self, op: Return) -> GenAndKill[Value]: + return set(), set() + + def visit_unreachable(self, op: Unreachable) -> GenAndKill[Value]: + return set(), set() + + def visit_register_op(self, op: RegisterOp) -> GenAndKill[Value]: + return set(), set() + + def visit_assign(self, op: Assign) -> GenAndKill[Value]: + if op.dest in self.args: + return set(), {op.dest} + return set(), set() + + def visit_assign_multi(self, op: AssignMulti) -> GenAndKill[Value]: + return set(), set() + + def visit_set_mem(self, op: SetMem) -> GenAndKill[Value]: + return set(), set() + + +def analyze_borrowed_arguments( + blocks: list[BasicBlock], cfg: CFG, borrowed: set[Value] +) -> AnalysisResult[Value]: + """Calculate arguments that can use references borrowed from the caller. + + When assigning to an argument, it no longer is borrowed. + """ + return run_analysis( + blocks=blocks, + cfg=cfg, + gen_and_kill=BorrowedArgumentsVisitor(borrowed), + initial=borrowed, + backward=False, + kind=MUST_ANALYSIS, + universe=borrowed, + ) + + +class UndefinedVisitor(BaseAnalysisVisitor[Value]): + def visit_branch(self, op: Branch) -> GenAndKill[Value]: + return set(), set() + + def visit_return(self, op: Return) -> GenAndKill[Value]: + return set(), set() + + def visit_unreachable(self, op: Unreachable) -> GenAndKill[Value]: + return set(), set() + + def visit_register_op(self, op: RegisterOp) -> GenAndKill[Value]: + return set(), {op} if not op.is_void else set() + + def visit_assign(self, op: Assign) -> GenAndKill[Value]: + return set(), {op.dest} + + def visit_assign_multi(self, op: AssignMulti) -> GenAndKill[Value]: + return set(), {op.dest} + + def visit_set_mem(self, op: SetMem) -> GenAndKill[Value]: + return set(), set() + + +def non_trivial_sources(op: Op) -> set[Value]: + result = set() + for source in op.sources(): + if not isinstance(source, (Integer, Float, Undef)): + result.add(source) + return result + + +class LivenessVisitor(BaseAnalysisVisitor[Value]): + def visit_branch(self, op: Branch) -> GenAndKill[Value]: + return non_trivial_sources(op), set() + + def visit_return(self, op: Return) -> GenAndKill[Value]: + if not isinstance(op.value, (Integer, Float)): + return {op.value}, set() + else: + return set(), set() + + def visit_unreachable(self, op: Unreachable) -> GenAndKill[Value]: + return set(), set() + + def visit_register_op(self, op: RegisterOp) -> GenAndKill[Value]: + gen = non_trivial_sources(op) + if not op.is_void: + return gen, {op} + else: + return gen, set() + + def visit_assign(self, op: Assign) -> GenAndKill[Value]: + return non_trivial_sources(op), {op.dest} + + def visit_assign_multi(self, op: AssignMulti) -> GenAndKill[Value]: + return non_trivial_sources(op), {op.dest} + + def visit_set_mem(self, op: SetMem) -> GenAndKill[Value]: + return non_trivial_sources(op), set() + + def visit_inc_ref(self, op: IncRef) -> GenAndKill[Value]: + return set(), set() + + def visit_dec_ref(self, op: DecRef) -> GenAndKill[Value]: + return set(), set() + + +def analyze_live_regs(blocks: list[BasicBlock], cfg: CFG) -> AnalysisResult[Value]: + """Calculate live registers at each CFG location. + + A register is live at a location if it can be read along some CFG path starting + from the location. + """ + return run_analysis( + blocks=blocks, + cfg=cfg, + gen_and_kill=LivenessVisitor(), + initial=set(), + backward=True, + kind=MAYBE_ANALYSIS, + ) + + +# Analysis kinds +MUST_ANALYSIS = 0 +MAYBE_ANALYSIS = 1 + + +def run_analysis( + blocks: list[BasicBlock], + cfg: CFG, + gen_and_kill: OpVisitor[GenAndKill[T]], + initial: set[T], + kind: int, + backward: bool, + universe: set[T] | None = None, +) -> AnalysisResult[T]: + """Run a general set-based data flow analysis. + + Args: + blocks: All basic blocks + cfg: Control-flow graph for the code + gen_and_kill: Implementation of gen and kill functions for each op + initial: Value of analysis for the entry points (for a forward analysis) or the + exit points (for a backward analysis) + kind: MUST_ANALYSIS or MAYBE_ANALYSIS + backward: If False, the analysis is a forward analysis; it's backward otherwise + universe: For a must analysis, the set of all possible values. This is the starting + value for the work list algorithm, which will narrow this down until reaching a + fixed point. For a maybe analysis the iteration always starts from an empty set + and this argument is ignored. + + Return analysis results: (before, after) + """ + block_gen = {} + block_kill = {} + + # Calculate kill and gen sets for entire basic blocks. + for block in blocks: + gen: set[T] = set() + kill: set[T] = set() + ops = block.ops + if backward: + ops = list(reversed(ops)) + for op in ops: + opgen, opkill = op.accept(gen_and_kill) + gen = (gen - opkill) | opgen + kill = (kill - opgen) | opkill + block_gen[block] = gen + block_kill[block] = kill + + # Set up initial state for worklist algorithm. + worklist = list(blocks) + if not backward: + worklist.reverse() # Reverse for a small performance improvement + workset = set(worklist) + before: dict[BasicBlock, set[T]] = {} + after: dict[BasicBlock, set[T]] = {} + for block in blocks: + if kind == MAYBE_ANALYSIS: + before[block] = set() + after[block] = set() + else: + assert universe is not None, "Universe must be defined for a must analysis" + before[block] = set(universe) + after[block] = set(universe) + + if backward: + pred_map = cfg.succ + succ_map = cfg.pred + else: + pred_map = cfg.pred + succ_map = cfg.succ + + # Run work list algorithm to generate in and out sets for each basic block. + while worklist: + label = worklist.pop() + workset.remove(label) + if pred_map[label]: + new_before: set[T] | None = None + for pred in pred_map[label]: + if new_before is None: + new_before = set(after[pred]) + elif kind == MAYBE_ANALYSIS: + new_before |= after[pred] + else: + new_before &= after[pred] + assert new_before is not None + else: + new_before = set(initial) + before[label] = new_before + new_after = (new_before - block_kill[label]) | block_gen[label] + if new_after != after[label]: + for succ in succ_map[label]: + if succ not in workset: + worklist.append(succ) + workset.add(succ) + after[label] = new_after + + # Run algorithm for each basic block to generate opcode-level sets. + op_before: dict[tuple[BasicBlock, int], set[T]] = {} + op_after: dict[tuple[BasicBlock, int], set[T]] = {} + for block in blocks: + label = block + cur = before[label] + ops_enum: Iterator[tuple[int, Op]] = enumerate(block.ops) + if backward: + ops_enum = reversed(list(ops_enum)) + for idx, op in ops_enum: + op_before[label, idx] = cur + opgen, opkill = op.accept(gen_and_kill) + cur = (cur - opkill) | opgen + op_after[label, idx] = cur + if backward: + op_after, op_before = op_before, op_after + + return AnalysisResult(op_before, op_after) diff --git a/.venv/lib/python3.12/site-packages/mypyc/analysis/ircheck.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/analysis/ircheck.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..d113007 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/analysis/ircheck.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/analysis/ircheck.py b/.venv/lib/python3.12/site-packages/mypyc/analysis/ircheck.py new file mode 100644 index 0000000..6980c9c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/analysis/ircheck.py @@ -0,0 +1,440 @@ +"""Utilities for checking that internal ir is valid and consistent.""" + +from __future__ import annotations + +from mypyc.ir.func_ir import FUNC_STATICMETHOD, FuncIR +from mypyc.ir.ops import ( + Assign, + AssignMulti, + BaseAssign, + BasicBlock, + Box, + Branch, + Call, + CallC, + Cast, + ComparisonOp, + ControlOp, + DecRef, + Extend, + Float, + FloatComparisonOp, + FloatNeg, + FloatOp, + GetAttr, + GetElementPtr, + Goto, + IncRef, + InitStatic, + Integer, + IntOp, + KeepAlive, + LoadAddress, + LoadErrorValue, + LoadGlobal, + LoadLiteral, + LoadMem, + LoadStatic, + MethodCall, + Op, + OpVisitor, + PrimitiveOp, + RaiseStandardError, + Register, + Return, + SetAttr, + SetElement, + SetMem, + Truncate, + TupleGet, + TupleSet, + Unborrow, + Unbox, + Undef, + Unreachable, + Value, +) +from mypyc.ir.pprint import format_func +from mypyc.ir.rtypes import ( + KNOWN_NATIVE_TYPES, + RArray, + RInstance, + RPrimitive, + RType, + RUnion, + bytes_rprimitive, + dict_rprimitive, + int_rprimitive, + is_float_rprimitive, + is_object_rprimitive, + list_rprimitive, + range_rprimitive, + set_rprimitive, + str_rprimitive, + tuple_rprimitive, +) + + +class FnError: + def __init__(self, source: Op | BasicBlock, desc: str) -> None: + self.source = source + self.desc = desc + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, FnError) and self.source == other.source and self.desc == other.desc + ) + + def __repr__(self) -> str: + return f"FnError(source={self.source}, desc={self.desc})" + + +def check_func_ir(fn: FuncIR) -> list[FnError]: + """Applies validations to a given function ir and returns a list of errors found.""" + errors = [] + + op_set = set() + + for block in fn.blocks: + if not block.terminated: + errors.append( + FnError(source=block.ops[-1] if block.ops else block, desc="Block not terminated") + ) + for op in block.ops[:-1]: + if isinstance(op, ControlOp): + errors.append(FnError(source=op, desc="Block has operations after control op")) + + if op in op_set: + errors.append(FnError(source=op, desc="Func has a duplicate op")) + op_set.add(op) + + errors.extend(check_op_sources_valid(fn)) + if errors: + return errors + + op_checker = OpChecker(fn) + for block in fn.blocks: + for op in block.ops: + op.accept(op_checker) + + return op_checker.errors + + +class IrCheckException(Exception): + pass + + +def assert_func_ir_valid(fn: FuncIR) -> None: + errors = check_func_ir(fn) + if errors: + raise IrCheckException( + "Internal error: Generated invalid IR: \n" + + "\n".join(format_func(fn, [(e.source, e.desc) for e in errors])) + ) + + +def check_op_sources_valid(fn: FuncIR) -> list[FnError]: + errors = [] + valid_ops: set[Op] = set() + valid_registers: set[Register] = set() + + for block in fn.blocks: + valid_ops.update(block.ops) + + for op in block.ops: + if isinstance(op, BaseAssign): + valid_registers.add(op.dest) + elif isinstance(op, LoadAddress) and isinstance(op.src, Register): + valid_registers.add(op.src) + + valid_registers.update(fn.arg_regs) + + for block in fn.blocks: + for op in block.ops: + for source in op.sources(): + if isinstance(source, (Integer, Float, Undef)): + pass + elif isinstance(source, Op): + if source not in valid_ops: + errors.append( + FnError( + source=op, + desc=f"Invalid op reference to op of type {type(source).__name__}", + ) + ) + elif isinstance(source, Register): + if source not in valid_registers: + errors.append( + FnError( + source=op, desc=f"Invalid op reference to register {source.name!r}" + ) + ) + + return errors + + +disjoint_types = { + int_rprimitive.name, + bytes_rprimitive.name, + str_rprimitive.name, + dict_rprimitive.name, + list_rprimitive.name, + set_rprimitive.name, + tuple_rprimitive.name, + range_rprimitive.name, +} | set(KNOWN_NATIVE_TYPES) + + +def can_coerce_to(src: RType, dest: RType) -> bool: + """Check if src can be assigned to dest_rtype. + + Currently okay to have false positives. + """ + if isinstance(dest, RUnion): + return any(can_coerce_to(src, d) for d in dest.items) + + if isinstance(dest, RPrimitive): + if isinstance(src, RPrimitive): + # If either src or dest is a disjoint type, then they must both be. + if src.name in disjoint_types and dest.name in disjoint_types: + return src.name == dest.name + return src.size == dest.size + if isinstance(src, RInstance): + return is_object_rprimitive(dest) + if isinstance(src, RUnion): + # IR doesn't have the ability to narrow unions based on + # control flow, so cannot be a strict all() here. + return any(can_coerce_to(s, dest) for s in src.items) + return False + + return True + + +class OpChecker(OpVisitor[None]): + def __init__(self, parent_fn: FuncIR) -> None: + self.parent_fn = parent_fn + self.errors: list[FnError] = [] + + def fail(self, source: Op, desc: str) -> None: + self.errors.append(FnError(source=source, desc=desc)) + + def check_control_op_targets(self, op: ControlOp) -> None: + for target in op.targets(): + if target not in self.parent_fn.blocks: + self.fail(source=op, desc=f"Invalid control operation target: {target.label}") + + def check_type_coercion(self, op: Op, src: RType, dest: RType) -> None: + if not can_coerce_to(src, dest): + self.fail( + source=op, desc=f"Cannot coerce source type {src.name} to dest type {dest.name}" + ) + + def check_compatibility(self, op: Op, t: RType, s: RType) -> None: + if not can_coerce_to(t, s) or not can_coerce_to(s, t): + self.fail(source=op, desc=f"{t.name} and {s.name} are not compatible") + + def expect_float(self, op: Op, v: Value) -> None: + if not is_float_rprimitive(v.type): + self.fail(op, f"Float expected (actual type is {v.type})") + + def expect_non_float(self, op: Op, v: Value) -> None: + if is_float_rprimitive(v.type): + self.fail(op, "Float not expected") + + def visit_goto(self, op: Goto) -> None: + self.check_control_op_targets(op) + + def visit_branch(self, op: Branch) -> None: + self.check_control_op_targets(op) + + def visit_return(self, op: Return) -> None: + self.check_type_coercion(op, op.value.type, self.parent_fn.decl.sig.ret_type) + + def visit_unreachable(self, op: Unreachable) -> None: + # Unreachables are checked at a higher level since validation + # requires access to the entire basic block. + pass + + def visit_assign(self, op: Assign) -> None: + self.check_type_coercion(op, op.src.type, op.dest.type) + + def visit_assign_multi(self, op: AssignMulti) -> None: + for src in op.src: + assert isinstance(op.dest.type, RArray) + self.check_type_coercion(op, src.type, op.dest.type.item_type) + + def visit_load_error_value(self, op: LoadErrorValue) -> None: + # Currently it is assumed that all types have an error value. + # Once this is fixed we can validate that the rtype here actually + # has an error value. + pass + + def check_tuple_items_valid_literals(self, op: LoadLiteral, t: tuple[object, ...]) -> None: + for x in t: + if x is not None and not isinstance(x, (str, bytes, bool, int, float, complex, tuple)): + self.fail(op, f"Invalid type for item of tuple literal: {type(x)})") + if isinstance(x, tuple): + self.check_tuple_items_valid_literals(op, x) + + def check_frozenset_items_valid_literals(self, op: LoadLiteral, s: frozenset[object]) -> None: + for x in s: + if x is None or isinstance(x, (str, bytes, bool, int, float, complex)): + pass + elif isinstance(x, tuple): + self.check_tuple_items_valid_literals(op, x) + else: + self.fail(op, f"Invalid type for item of frozenset literal: {type(x)})") + + def visit_load_literal(self, op: LoadLiteral) -> None: + expected_type = None + if op.value is None: + expected_type = "builtins.object" + elif isinstance(op.value, int): + expected_type = "builtins.int" + elif isinstance(op.value, str): + expected_type = "builtins.str" + elif isinstance(op.value, bytes): + expected_type = "builtins.bytes" + elif isinstance(op.value, bool): + expected_type = "builtins.object" + elif isinstance(op.value, float): + expected_type = "builtins.float" + elif isinstance(op.value, complex): + expected_type = "builtins.object" + elif isinstance(op.value, tuple): + expected_type = "builtins.tuple" + self.check_tuple_items_valid_literals(op, op.value) + elif isinstance(op.value, frozenset): + # There's no frozenset_rprimitive type since it'd be pretty useless so we just pretend + # it's a set (when it's really a frozenset). + expected_type = "builtins.set" + self.check_frozenset_items_valid_literals(op, op.value) + + assert expected_type is not None, "Missed a case for LoadLiteral check" + + if op.type.name not in [expected_type, "builtins.object"]: + self.fail( + op, + f"Invalid literal value for type: value has " + f"type {expected_type}, but op has type {op.type.name}", + ) + + def visit_get_attr(self, op: GetAttr) -> None: + # Nothing to do. + pass + + def visit_set_attr(self, op: SetAttr) -> None: + # Nothing to do. + pass + + # Static operations cannot be checked at the function level. + def visit_load_static(self, op: LoadStatic) -> None: + pass + + def visit_init_static(self, op: InitStatic) -> None: + pass + + def visit_tuple_get(self, op: TupleGet) -> None: + # Nothing to do. + pass + + def visit_tuple_set(self, op: TupleSet) -> None: + # Nothing to do. + pass + + def visit_inc_ref(self, op: IncRef) -> None: + # Nothing to do. + pass + + def visit_dec_ref(self, op: DecRef) -> None: + # Nothing to do. + pass + + def visit_call(self, op: Call) -> None: + # Length is checked in constructor, and return type is set + # in a way that can't be incorrect + for arg_value, arg_runtime in zip(op.args, op.fn.sig.args): + self.check_type_coercion(op, arg_value.type, arg_runtime.type) + + def visit_method_call(self, op: MethodCall) -> None: + # Similar to above, but we must look up method first. + method_decl = op.receiver_type.class_ir.method_decl(op.method) + if method_decl.kind == FUNC_STATICMETHOD: + decl_index = 0 + else: + decl_index = 1 + + if len(op.args) + decl_index != len(method_decl.sig.args): + self.fail(op, "Incorrect number of args for method call.") + + # Skip the receiver argument (self) + for arg_value, arg_runtime in zip(op.args, method_decl.sig.args[decl_index:]): + self.check_type_coercion(op, arg_value.type, arg_runtime.type) + + def visit_cast(self, op: Cast) -> None: + pass + + def visit_box(self, op: Box) -> None: + pass + + def visit_unbox(self, op: Unbox) -> None: + pass + + def visit_raise_standard_error(self, op: RaiseStandardError) -> None: + pass + + def visit_call_c(self, op: CallC) -> None: + pass + + def visit_primitive_op(self, op: PrimitiveOp) -> None: + pass + + def visit_truncate(self, op: Truncate) -> None: + pass + + def visit_extend(self, op: Extend) -> None: + pass + + def visit_load_global(self, op: LoadGlobal) -> None: + pass + + def visit_int_op(self, op: IntOp) -> None: + self.expect_non_float(op, op.lhs) + self.expect_non_float(op, op.rhs) + + def visit_comparison_op(self, op: ComparisonOp) -> None: + self.check_compatibility(op, op.lhs.type, op.rhs.type) + self.expect_non_float(op, op.lhs) + self.expect_non_float(op, op.rhs) + + def visit_float_op(self, op: FloatOp) -> None: + self.expect_float(op, op.lhs) + self.expect_float(op, op.rhs) + + def visit_float_neg(self, op: FloatNeg) -> None: + self.expect_float(op, op.src) + + def visit_float_comparison_op(self, op: FloatComparisonOp) -> None: + self.expect_float(op, op.lhs) + self.expect_float(op, op.rhs) + + def visit_load_mem(self, op: LoadMem) -> None: + pass + + def visit_set_mem(self, op: SetMem) -> None: + pass + + def visit_get_element_ptr(self, op: GetElementPtr) -> None: + pass + + def visit_set_element(self, op: SetElement) -> None: + pass + + def visit_load_address(self, op: LoadAddress) -> None: + pass + + def visit_keep_alive(self, op: KeepAlive) -> None: + pass + + def visit_unborrow(self, op: Unborrow) -> None: + pass diff --git a/.venv/lib/python3.12/site-packages/mypyc/analysis/selfleaks.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/analysis/selfleaks.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..b2f8225 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/analysis/selfleaks.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/analysis/selfleaks.py b/.venv/lib/python3.12/site-packages/mypyc/analysis/selfleaks.py new file mode 100644 index 0000000..8f46cbe --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/analysis/selfleaks.py @@ -0,0 +1,213 @@ +from __future__ import annotations + +from mypyc.analysis.dataflow import CFG, MAYBE_ANALYSIS, AnalysisResult, run_analysis +from mypyc.ir.ops import ( + Assign, + AssignMulti, + BasicBlock, + Box, + Branch, + Call, + CallC, + Cast, + ComparisonOp, + Extend, + FloatComparisonOp, + FloatNeg, + FloatOp, + GetAttr, + GetElementPtr, + Goto, + InitStatic, + IntOp, + KeepAlive, + LoadAddress, + LoadErrorValue, + LoadGlobal, + LoadLiteral, + LoadMem, + LoadStatic, + MethodCall, + OpVisitor, + PrimitiveOp, + RaiseStandardError, + Register, + RegisterOp, + Return, + SetAttr, + SetElement, + SetMem, + Truncate, + TupleGet, + TupleSet, + Unborrow, + Unbox, + Unreachable, +) +from mypyc.ir.rtypes import RInstance + +GenAndKill = tuple[set[None], set[None]] + +CLEAN: GenAndKill = (set(), set()) +DIRTY: GenAndKill = ({None}, {None}) + + +class SelfLeakedVisitor(OpVisitor[GenAndKill]): + """Analyze whether 'self' may be seen by arbitrary code in '__init__'. + + More formally, the set is not empty if along some path from IR entry point + arbitrary code could have been executed that has access to 'self'. + + (We don't consider access via 'gc.get_objects()'.) + """ + + def __init__(self, self_reg: Register) -> None: + self.self_reg = self_reg + + def visit_goto(self, op: Goto) -> GenAndKill: + return CLEAN + + def visit_branch(self, op: Branch) -> GenAndKill: + return CLEAN + + def visit_return(self, op: Return) -> GenAndKill: + # Consider all exits from the function 'dirty' since they implicitly + # cause 'self' to be returned. + return DIRTY + + def visit_unreachable(self, op: Unreachable) -> GenAndKill: + return CLEAN + + def visit_assign(self, op: Assign) -> GenAndKill: + if op.src is self.self_reg or op.dest is self.self_reg: + return DIRTY + return CLEAN + + def visit_assign_multi(self, op: AssignMulti) -> GenAndKill: + return CLEAN + + def visit_set_mem(self, op: SetMem) -> GenAndKill: + return CLEAN + + def visit_call(self, op: Call) -> GenAndKill: + fn = op.fn + if fn.class_name and fn.name == "__init__": + self_type = op.fn.sig.args[0].type + assert isinstance(self_type, RInstance), self_type + cl = self_type.class_ir + if not cl.init_self_leak: + return CLEAN + return self.check_register_op(op) + + def visit_method_call(self, op: MethodCall) -> GenAndKill: + return self.check_register_op(op) + + def visit_load_error_value(self, op: LoadErrorValue) -> GenAndKill: + return CLEAN + + def visit_load_literal(self, op: LoadLiteral) -> GenAndKill: + return CLEAN + + def visit_get_attr(self, op: GetAttr) -> GenAndKill: + cl = op.class_type.class_ir + if cl.get_method(op.attr): + # Property -- calls a function + return self.check_register_op(op) + return CLEAN + + def visit_set_attr(self, op: SetAttr) -> GenAndKill: + cl = op.class_type.class_ir + if cl.get_method(op.attr): + # Property - calls a function + return self.check_register_op(op) + return CLEAN + + def visit_load_static(self, op: LoadStatic) -> GenAndKill: + return CLEAN + + def visit_init_static(self, op: InitStatic) -> GenAndKill: + return self.check_register_op(op) + + def visit_tuple_get(self, op: TupleGet) -> GenAndKill: + return CLEAN + + def visit_tuple_set(self, op: TupleSet) -> GenAndKill: + return self.check_register_op(op) + + def visit_box(self, op: Box) -> GenAndKill: + return self.check_register_op(op) + + def visit_unbox(self, op: Unbox) -> GenAndKill: + return self.check_register_op(op) + + def visit_cast(self, op: Cast) -> GenAndKill: + return self.check_register_op(op) + + def visit_raise_standard_error(self, op: RaiseStandardError) -> GenAndKill: + return CLEAN + + def visit_call_c(self, op: CallC) -> GenAndKill: + return self.check_register_op(op) + + def visit_primitive_op(self, op: PrimitiveOp) -> GenAndKill: + return self.check_register_op(op) + + def visit_truncate(self, op: Truncate) -> GenAndKill: + return CLEAN + + def visit_extend(self, op: Extend) -> GenAndKill: + return CLEAN + + def visit_load_global(self, op: LoadGlobal) -> GenAndKill: + return CLEAN + + def visit_int_op(self, op: IntOp) -> GenAndKill: + return CLEAN + + def visit_comparison_op(self, op: ComparisonOp) -> GenAndKill: + return CLEAN + + def visit_float_op(self, op: FloatOp) -> GenAndKill: + return CLEAN + + def visit_float_neg(self, op: FloatNeg) -> GenAndKill: + return CLEAN + + def visit_float_comparison_op(self, op: FloatComparisonOp) -> GenAndKill: + return CLEAN + + def visit_load_mem(self, op: LoadMem) -> GenAndKill: + return CLEAN + + def visit_get_element_ptr(self, op: GetElementPtr) -> GenAndKill: + return CLEAN + + def visit_set_element(self, op: SetElement) -> GenAndKill: + return CLEAN + + def visit_load_address(self, op: LoadAddress) -> GenAndKill: + return CLEAN + + def visit_keep_alive(self, op: KeepAlive) -> GenAndKill: + return CLEAN + + def visit_unborrow(self, op: Unborrow) -> GenAndKill: + return CLEAN + + def check_register_op(self, op: RegisterOp) -> GenAndKill: + if any(src is self.self_reg for src in op.sources()): + return DIRTY + return CLEAN + + +def analyze_self_leaks( + blocks: list[BasicBlock], self_reg: Register, cfg: CFG +) -> AnalysisResult[None]: + return run_analysis( + blocks=blocks, + cfg=cfg, + gen_and_kill=SelfLeakedVisitor(self_reg), + initial=set(), + backward=False, + kind=MAYBE_ANALYSIS, + ) diff --git a/.venv/lib/python3.12/site-packages/mypyc/annotate.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/annotate.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..0a96311 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/annotate.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/annotate.py b/.venv/lib/python3.12/site-packages/mypyc/annotate.py new file mode 100644 index 0000000..bc282fc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/annotate.py @@ -0,0 +1,472 @@ +"""Generate source code formatted as HTML, with bottlenecks annotated and highlighted. + +Various heuristics are used to detect common issues that cause slower than +expected performance. +""" + +from __future__ import annotations + +import os.path +import sys +from html import escape +from typing import Final + +from mypy.build import BuildResult +from mypy.nodes import ( + AssignmentStmt, + CallExpr, + ClassDef, + Decorator, + DictionaryComprehension, + Expression, + ForStmt, + FuncDef, + GeneratorExpr, + IndexExpr, + LambdaExpr, + MemberExpr, + MypyFile, + NamedTupleExpr, + NameExpr, + NewTypeExpr, + Node, + OpExpr, + RefExpr, + TupleExpr, + TypedDictExpr, + TypeInfo, + TypeVarExpr, + Var, + WithStmt, +) +from mypy.traverser import TraverserVisitor +from mypy.types import AnyType, Instance, ProperType, Type, TypeOfAny, get_proper_type +from mypy.util import FancyFormatter +from mypyc.ir.func_ir import FuncIR +from mypyc.ir.module_ir import ModuleIR +from mypyc.ir.ops import CallC, LoadLiteral, LoadStatic, Value +from mypyc.irbuild.mapper import Mapper + + +class Annotation: + """HTML annotation for compiled source code""" + + def __init__(self, message: str, priority: int = 1) -> None: + # Message as HTML that describes an issue and/or how to fix it. + # Multiple messages on a line may be concatenated. + self.message = message + # If multiple annotations are generated for a single line, only report + # the highest-priority ones. Some use cases generate multiple annotations, + # and this can be used to reduce verbosity by hiding the lower-priority + # ones. + self.priority = priority + + +op_hints: Final = { + "PyNumber_Add": Annotation('Generic "+" operation.'), + "PyNumber_Subtract": Annotation('Generic "-" operation.'), + "PyNumber_Multiply": Annotation('Generic "*" operation.'), + "PyNumber_TrueDivide": Annotation('Generic "/" operation.'), + "PyNumber_FloorDivide": Annotation('Generic "//" operation.'), + "PyNumber_Positive": Annotation('Generic unary "+" operation.'), + "PyNumber_Negative": Annotation('Generic unary "-" operation.'), + "PyNumber_And": Annotation('Generic "&" operation.'), + "PyNumber_Or": Annotation('Generic "|" operation.'), + "PyNumber_Xor": Annotation('Generic "^" operation.'), + "PyNumber_Lshift": Annotation('Generic "<<" operation.'), + "PyNumber_Rshift": Annotation('Generic ">>" operation.'), + "PyNumber_Invert": Annotation('Generic "~" operation.'), + "PyObject_Call": Annotation("Generic call operation."), + "PyObject_CallObject": Annotation("Generic call operation."), + "PyObject_RichCompare": Annotation("Generic comparison operation."), + "PyObject_GetItem": Annotation("Generic indexing operation."), + "PyObject_SetItem": Annotation("Generic indexed assignment."), +} + +stdlib_hints: Final = { + "functools.partial": Annotation( + '"functools.partial" is inefficient in compiled code.', priority=3 + ), + "itertools.chain": Annotation( + '"itertools.chain" is inefficient in compiled code (hint: replace with for loops).', + priority=3, + ), + "itertools.groupby": Annotation( + '"itertools.groupby" is inefficient in compiled code.', priority=3 + ), + "itertools.islice": Annotation( + '"itertools.islice" is inefficient in compiled code (hint: replace with for loop over index range).', + priority=3, + ), + "copy.deepcopy": Annotation( + '"copy.deepcopy" tends to be slow. Make a shallow copy if possible.', priority=2 + ), +} + +CSS = """\ +.collapsible { + cursor: pointer; +} + +.content { + display: block; + margin-top: 10px; + margin-bottom: 10px; +} + +.hint { + display: inline; + border: 1px solid #ccc; + padding: 5px; +} +""" + +JS = """\ +document.querySelectorAll('.collapsible').forEach(function(collapsible) { + collapsible.addEventListener('click', function() { + const content = this.nextElementSibling; + if (content.style.display === 'none') { + content.style.display = 'block'; + } else { + content.style.display = 'none'; + } + }); +}); +""" + + +class AnnotatedSource: + """Annotations for a single compiled source file.""" + + def __init__(self, path: str, annotations: dict[int, list[Annotation]]) -> None: + self.path = path + self.annotations = annotations + + +def generate_annotated_html( + html_fnam: str, result: BuildResult, modules: dict[str, ModuleIR], mapper: Mapper +) -> None: + annotations = [] + for mod, mod_ir in modules.items(): + path = result.graph[mod].path + tree = result.graph[mod].tree + assert tree is not None + annotations.append( + generate_annotations(path or "", tree, mod_ir, result.types, mapper) + ) + html = generate_html_report(annotations) + with open(html_fnam, "w") as f: + f.write(html) + + formatter = FancyFormatter(sys.stdout, sys.stderr, False) + formatted = formatter.style(os.path.abspath(html_fnam), "none", underline=True, bold=True) + print(f"\nWrote {formatted} -- open in browser to view\n") + + +def generate_annotations( + path: str, tree: MypyFile, ir: ModuleIR, type_map: dict[Expression, Type], mapper: Mapper +) -> AnnotatedSource: + anns = {} + for func_ir in ir.functions: + anns.update(function_annotations(func_ir, tree)) + visitor = ASTAnnotateVisitor(type_map, mapper) + for defn in tree.defs: + defn.accept(visitor) + anns.update(visitor.anns) + for line in visitor.ignored_lines: + if line in anns: + del anns[line] + return AnnotatedSource(path, anns) + + +def function_annotations(func_ir: FuncIR, tree: MypyFile) -> dict[int, list[Annotation]]: + """Generate annotations based on mypyc IR.""" + # TODO: check if func_ir.line is -1 + anns: dict[int, list[Annotation]] = {} + for block in func_ir.blocks: + for op in block.ops: + if isinstance(op, CallC): + name = op.function_name + ann: str | Annotation | None = None + if name == "CPyObject_GetAttr": + attr_name = get_str_literal(op.args[1]) + if attr_name in ("__prepare__", "GeneratorExit", "StopIteration"): + # These attributes are internal to mypyc/CPython, and/or accessed + # implicitly in generated code. The user has little control over + # them. + ann = None + elif attr_name: + ann = f'Get non-native attribute "{attr_name}".' + else: + ann = "Dynamic attribute lookup." + elif name == "PyObject_SetAttr": + attr_name = get_str_literal(op.args[1]) + if attr_name == "__mypyc_attrs__": + # This is set implicitly and can't be avoided. + ann = None + elif attr_name: + ann = f'Set non-native attribute "{attr_name}".' + else: + ann = "Dynamic attribute set." + elif name == "PyObject_VectorcallMethod": + method_name = get_str_literal(op.args[0]) + if method_name: + ann = f'Call non-native method "{method_name}" (it may be defined in a non-native class, or decorated).' + else: + ann = "Dynamic method call." + elif name in op_hints: + ann = op_hints[name] + elif name in ("CPyDict_GetItem", "CPyDict_SetItem"): + if ( + isinstance(op.args[0], LoadStatic) + and isinstance(op.args[1], LoadLiteral) + and func_ir.name != "__top_level__" + ): + load = op.args[0] + name = str(op.args[1].value) + sym = tree.names.get(name) + if ( + sym + and sym.node + and load.namespace == "static" + and load.identifier == "globals" + ): + if sym.node.fullname in stdlib_hints: + ann = stdlib_hints[sym.node.fullname] + elif isinstance(sym.node, Var): + ann = ( + f'Access global "{name}" through namespace ' + + "dictionary (hint: access is faster if you can make it Final)." + ) + else: + ann = f'Access "{name}" through global namespace dictionary.' + if ann: + if isinstance(ann, str): + ann = Annotation(ann) + anns.setdefault(op.line, []).append(ann) + return anns + + +class ASTAnnotateVisitor(TraverserVisitor): + """Generate annotations from mypy AST and inferred types.""" + + def __init__(self, type_map: dict[Expression, Type], mapper: Mapper) -> None: + self.anns: dict[int, list[Annotation]] = {} + self.ignored_lines: set[int] = set() + self.func_depth = 0 + self.type_map = type_map + self.mapper = mapper + + def visit_func_def(self, o: FuncDef, /) -> None: + if self.func_depth > 0: + self.annotate( + o, + "A nested function object is allocated each time statement is executed. " + + "A module-level function would be faster.", + ) + self.func_depth += 1 + super().visit_func_def(o) + self.func_depth -= 1 + + def visit_for_stmt(self, o: ForStmt, /) -> None: + self.check_iteration([o.expr], "For loop") + super().visit_for_stmt(o) + + def visit_dictionary_comprehension(self, o: DictionaryComprehension, /) -> None: + self.check_iteration(o.sequences, "Comprehension") + super().visit_dictionary_comprehension(o) + + def visit_generator_expr(self, o: GeneratorExpr, /) -> None: + self.check_iteration(o.sequences, "Comprehension or generator") + super().visit_generator_expr(o) + + def check_iteration(self, expressions: list[Expression], kind: str) -> None: + for expr in expressions: + typ = self.get_type(expr) + if isinstance(typ, AnyType): + self.annotate(expr, f'{kind} uses generic operations (iterable has type "Any").') + elif isinstance(typ, Instance) and typ.type.fullname in ( + "typing.Iterable", + "typing.Iterator", + "typing.Sequence", + "typing.MutableSequence", + ): + self.annotate( + expr, + f'{kind} uses generic operations (iterable has the abstract type "{typ.type.fullname}").', + ) + + def visit_class_def(self, o: ClassDef, /) -> None: + super().visit_class_def(o) + if self.func_depth == 0: + # Don't complain about base classes at top level + for base in o.base_type_exprs: + self.ignored_lines.add(base.line) + + for s in o.defs.body: + if isinstance(s, AssignmentStmt): + # Don't complain about attribute initializers + self.ignored_lines.add(s.line) + elif isinstance(s, Decorator): + # Don't complain about decorator definitions that generate some + # dynamic operations. This is a bit heavy-handed. + self.ignored_lines.add(s.func.line) + + def visit_with_stmt(self, o: WithStmt, /) -> None: + for expr in o.expr: + if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr): + node = expr.callee.node + if isinstance(node, Decorator): + if any( + isinstance(d, RefExpr) + and d.node + and d.node.fullname == "contextlib.contextmanager" + for d in node.decorators + ): + self.annotate( + expr, + f'"{node.name}" uses @contextmanager, which is slow ' + + "in compiled code. Use a native class with " + + '"__enter__" and "__exit__" methods instead.', + priority=3, + ) + super().visit_with_stmt(o) + + def visit_assignment_stmt(self, o: AssignmentStmt, /) -> None: + special_form = False + if self.func_depth == 0: + analyzed: Expression | None = o.rvalue + if isinstance(o.rvalue, (CallExpr, IndexExpr, OpExpr)): + analyzed = o.rvalue.analyzed + if o.is_alias_def or isinstance( + analyzed, (TypeVarExpr, NamedTupleExpr, TypedDictExpr, NewTypeExpr) + ): + special_form = True + if special_form: + # TODO: Ignore all lines if multi-line + self.ignored_lines.add(o.line) + super().visit_assignment_stmt(o) + + def visit_name_expr(self, o: NameExpr, /) -> None: + if ann := stdlib_hints.get(o.fullname): + self.annotate(o, ann) + + def visit_member_expr(self, o: MemberExpr, /) -> None: + super().visit_member_expr(o) + if ann := stdlib_hints.get(o.fullname): + self.annotate(o, ann) + + def visit_call_expr(self, o: CallExpr, /) -> None: + super().visit_call_expr(o) + if ( + isinstance(o.callee, RefExpr) + and o.callee.fullname == "builtins.isinstance" + and len(o.args) == 2 + ): + arg = o.args[1] + self.check_isinstance_arg(arg) + elif isinstance(o.callee, RefExpr) and isinstance(o.callee.node, TypeInfo): + info = o.callee.node + class_ir = self.mapper.type_to_ir.get(info) + if (class_ir and not class_ir.is_ext_class) or ( + class_ir is None and not info.fullname.startswith("builtins.") + ): + self.annotate( + o, f'Creating an instance of non-native class "{info.name}" ' + "is slow.", 2 + ) + elif class_ir and class_ir.is_augmented: + self.annotate( + o, + f'Class "{info.name}" is only partially native, and ' + + "constructing an instance is slow.", + 2, + ) + elif isinstance(o.callee, RefExpr) and isinstance(o.callee.node, Decorator): + decorator = o.callee.node + if self.mapper.is_native_ref_expr(o.callee): + self.annotate( + o, + f'Calling a decorated function ("{decorator.name}") is inefficient, even if it\'s native.', + 2, + ) + + def check_isinstance_arg(self, arg: Expression) -> None: + if isinstance(arg, RefExpr): + if isinstance(arg.node, TypeInfo) and arg.node.is_protocol: + self.annotate( + arg, f'Expensive isinstance() check against protocol "{arg.node.name}".' + ) + elif isinstance(arg, TupleExpr): + for item in arg.items: + self.check_isinstance_arg(item) + + def visit_lambda_expr(self, o: LambdaExpr, /) -> None: + self.annotate( + o, + "A new object is allocated for lambda each time it is evaluated. " + + "A module-level function would be faster.", + ) + super().visit_lambda_expr(o) + + def annotate(self, o: Node, ann: str | Annotation, priority: int = 1) -> None: + if isinstance(ann, str): + ann = Annotation(ann, priority=priority) + self.anns.setdefault(o.line, []).append(ann) + + def get_type(self, e: Expression) -> ProperType: + t = self.type_map.get(e) + if t: + return get_proper_type(t) + return AnyType(TypeOfAny.unannotated) + + +def get_str_literal(v: Value) -> str | None: + if isinstance(v, LoadLiteral) and isinstance(v.value, str): + return v.value + return None + + +def get_max_prio(anns: list[Annotation]) -> list[Annotation]: + max_prio = max(a.priority for a in anns) + return [a for a in anns if a.priority == max_prio] + + +def generate_html_report(sources: list[AnnotatedSource]) -> str: + html = [] + html.append("\n\n") + html.append(f"") + html.append("\n") + html.append("\n") + for src in sources: + html.append(f"

{src.path}

\n") + html.append("
")
+        src_anns = src.annotations
+        with open(src.path) as f:
+            lines = f.readlines()
+        for i, s in enumerate(lines):
+            s = escape(s)
+            line = i + 1
+            linenum = "%5d" % line
+            if line in src_anns:
+                anns = get_max_prio(src_anns[line])
+                ann_strs = [a.message for a in anns]
+                hint = " ".join(ann_strs)
+                s = colorize_line(linenum, s, hint_html=hint)
+            else:
+                s = linenum + "  " + s
+            html.append(s)
+        html.append("
") + + html.append("") + + html.append("\n") + return "".join(html) + + +def colorize_line(linenum: str, s: str, hint_html: str) -> str: + hint_prefix = " " * len(linenum) + " " + line_span = f'
{linenum} {s}
' + hint_div = f'
{hint_prefix}
{hint_html}
' + return f"{line_span}{hint_div}" diff --git a/.venv/lib/python3.12/site-packages/mypyc/build.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/build.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..f8ecf9e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/build.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/build.py b/.venv/lib/python3.12/site-packages/mypyc/build.py new file mode 100644 index 0000000..69ef6c3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/build.py @@ -0,0 +1,756 @@ +"""Support for building extensions using mypyc with distutils or setuptools + +The main entry point is mypycify, which produces a list of extension +modules to be passed to setup. A trivial setup.py for a mypyc built +project, then, looks like: + + from setuptools import setup + from mypyc.build import mypycify + + setup(name='test_module', + ext_modules=mypycify(['foo.py']), + ) + +See the mypycify docs for additional arguments. + +mypycify can integrate with either distutils or setuptools, but needs +to know at import-time whether it is using distutils or setuputils. We +hackily decide based on whether setuptools has been imported already. +""" + +from __future__ import annotations + +import hashlib +import os.path +import re +import sys +import time +from collections.abc import Iterable +from typing import TYPE_CHECKING, Any, NamedTuple, NoReturn, Union, cast + +import mypyc.build_setup # noqa: F401 +from mypy.build import BuildSource +from mypy.errors import CompileError +from mypy.fscache import FileSystemCache +from mypy.main import process_options +from mypy.options import Options +from mypy.util import write_junit_xml +from mypyc.annotate import generate_annotated_html +from mypyc.codegen import emitmodule +from mypyc.common import IS_FREE_THREADED, RUNTIME_C_FILES, shared_lib_name +from mypyc.errors import Errors +from mypyc.ir.pprint import format_modules +from mypyc.namegen import exported_name +from mypyc.options import CompilerOptions + + +class ModDesc(NamedTuple): + module: str + c_files: list[str] + other_files: list[str] + include_dirs: list[str] + + +LIBRT_MODULES = [ + ModDesc("librt.internal", ["librt_internal.c"], [], []), + ModDesc( + "librt.base64", + [ + "librt_base64.c", + "base64/lib.c", + "base64/codec_choose.c", + "base64/tables/tables.c", + "base64/arch/generic/codec.c", + "base64/arch/ssse3/codec.c", + "base64/arch/sse41/codec.c", + "base64/arch/sse42/codec.c", + "base64/arch/avx/codec.c", + "base64/arch/avx2/codec.c", + "base64/arch/avx512/codec.c", + "base64/arch/neon32/codec.c", + "base64/arch/neon64/codec.c", + ], + [ + "base64/arch/avx/enc_loop_asm.c", + "base64/arch/avx2/enc_loop.c", + "base64/arch/avx2/enc_loop_asm.c", + "base64/arch/avx2/enc_reshuffle.c", + "base64/arch/avx2/enc_translate.c", + "base64/arch/avx2/dec_loop.c", + "base64/arch/avx2/dec_reshuffle.c", + "base64/arch/generic/32/enc_loop.c", + "base64/arch/generic/64/enc_loop.c", + "base64/arch/generic/32/dec_loop.c", + "base64/arch/generic/enc_head.c", + "base64/arch/generic/enc_tail.c", + "base64/arch/generic/dec_head.c", + "base64/arch/generic/dec_tail.c", + "base64/arch/ssse3/dec_reshuffle.c", + "base64/arch/ssse3/dec_loop.c", + "base64/arch/ssse3/enc_loop_asm.c", + "base64/arch/ssse3/enc_translate.c", + "base64/arch/ssse3/enc_reshuffle.c", + "base64/arch/ssse3/enc_loop.c", + "base64/arch/neon64/dec_loop.c", + "base64/arch/neon64/enc_loop_asm.c", + "base64/codecs.h", + "base64/env.h", + "base64/tables/tables.h", + "base64/tables/table_dec_32bit.h", + "base64/tables/table_enc_12bit.h", + ], + ["base64"], + ), +] + +try: + # Import setuptools so that it monkey-patch overrides distutils + import setuptools +except ImportError: + pass + +if TYPE_CHECKING: + if sys.version_info >= (3, 12): + from setuptools import Extension + else: + from distutils.core import Extension as _distutils_Extension + from typing_extensions import TypeAlias + + from setuptools import Extension as _setuptools_Extension + + Extension: TypeAlias = Union[_setuptools_Extension, _distutils_Extension] + +if sys.version_info >= (3, 12): + # From setuptools' monkeypatch + from distutils import ccompiler, sysconfig # type: ignore[import-not-found] +else: + from distutils import ccompiler, sysconfig + + +def get_extension() -> type[Extension]: + # We can work with either setuptools or distutils, and pick setuptools + # if it has been imported. + use_setuptools = "setuptools" in sys.modules + extension_class: type[Extension] + + if sys.version_info < (3, 12) and not use_setuptools: + import distutils.core + + extension_class = distutils.core.Extension + else: + if not use_setuptools: + sys.exit("error: setuptools not installed") + extension_class = setuptools.Extension + + return extension_class + + +def setup_mypycify_vars() -> None: + """Rewrite a bunch of config vars in pretty dubious ways.""" + # There has to be a better approach to this. + + # The vars can contain ints but we only work with str ones + vars = cast(dict[str, str], sysconfig.get_config_vars()) + if sys.platform == "darwin": + # Disable building 32-bit binaries, since we generate too much code + # for a 32-bit Mach-O object. There has to be a better way to do this. + vars["LDSHARED"] = vars["LDSHARED"].replace("-arch i386", "") + vars["LDFLAGS"] = vars["LDFLAGS"].replace("-arch i386", "") + vars["CFLAGS"] = vars["CFLAGS"].replace("-arch i386", "") + + +def fail(message: str) -> NoReturn: + # TODO: Is there something else we should do to fail? + sys.exit(message) + + +def emit_messages(options: Options, messages: list[str], dt: float, serious: bool = False) -> None: + # ... you know, just in case. + if options.junit_xml: + py_version = f"{options.python_version[0]}_{options.python_version[1]}" + write_junit_xml( + dt, + serious, + {None: messages} if messages else {}, + options.junit_xml, + py_version, + options.platform, + ) + if messages: + print("\n".join(messages)) + + +def get_mypy_config( + mypy_options: list[str], + only_compile_paths: Iterable[str] | None, + compiler_options: CompilerOptions, + fscache: FileSystemCache | None, +) -> tuple[list[BuildSource], list[BuildSource], Options]: + """Construct mypy BuildSources and Options from file and options lists""" + all_sources, options = process_options(mypy_options, fscache=fscache) + if only_compile_paths is not None: + paths_set = set(only_compile_paths) + mypyc_sources = [s for s in all_sources if s.path in paths_set] + else: + mypyc_sources = all_sources + + if compiler_options.separate: + mypyc_sources = [ + src for src in mypyc_sources if src.path and not src.path.endswith("__init__.py") + ] + + if not mypyc_sources: + return mypyc_sources, all_sources, options + + # Override whatever python_version is inferred from the .ini file, + # and set the python_version to be the currently used version. + options.python_version = sys.version_info[:2] + + if options.python_version[0] == 2: + fail("Python 2 not supported") + if not options.strict_optional: + fail("Disabling strict optional checking not supported") + options.show_traceback = True + # Needed to get types for all AST nodes + options.export_types = True + # We use mypy incremental mode when doing separate/incremental mypyc compilation + options.incremental = compiler_options.separate + options.preserve_asts = True + + for source in mypyc_sources: + options.per_module_options.setdefault(source.module, {})["mypyc"] = True + + return mypyc_sources, all_sources, options + + +def generate_c_extension_shim( + full_module_name: str, module_name: str, dir_name: str, group_name: str +) -> str: + """Create a C extension shim with a passthrough PyInit function. + + Arguments: + full_module_name: the dotted full module name + module_name: the final component of the module name + dir_name: the directory to place source code + group_name: the name of the group + """ + cname = "%s.c" % full_module_name.replace(".", os.sep) + cpath = os.path.join(dir_name, cname) + + if IS_FREE_THREADED: + # We use multi-phase init in free-threaded builds to enable free threading. + shim_name = "module_shim_no_gil_multiphase.tmpl" + else: + shim_name = "module_shim.tmpl" + + # We load the C extension shim template from a file. + # (So that the file could be reused as a bazel template also.) + with open(os.path.join(include_dir(), shim_name)) as f: + shim_template = f.read() + + write_file( + cpath, + shim_template.format( + modname=module_name, + libname=shared_lib_name(group_name), + full_modname=exported_name(full_module_name), + ), + ) + + return cpath + + +def group_name(modules: list[str]) -> str: + """Produce a probably unique name for a group from a list of module names.""" + if len(modules) == 1: + return modules[0] + + h = hashlib.sha1() + h.update(",".join(modules).encode()) + return h.hexdigest()[:20] + + +def include_dir() -> str: + """Find the path of the lib-rt dir that needs to be included""" + return os.path.join(os.path.abspath(os.path.dirname(__file__)), "lib-rt") + + +def generate_c( + sources: list[BuildSource], + options: Options, + groups: emitmodule.Groups, + fscache: FileSystemCache, + compiler_options: CompilerOptions, +) -> tuple[list[list[tuple[str, str]]], str]: + """Drive the actual core compilation step. + + The groups argument describes how modules are assigned to C + extension modules. See the comments on the Groups type in + mypyc.emitmodule for details. + + Returns the C source code and (for debugging) the pretty printed IR. + """ + t0 = time.time() + + try: + result = emitmodule.parse_and_typecheck( + sources, options, compiler_options, groups, fscache + ) + except CompileError as e: + emit_messages(options, e.messages, time.time() - t0, serious=(not e.use_stdout)) + sys.exit(1) + + t1 = time.time() + if result.errors: + emit_messages(options, result.errors, t1 - t0) + sys.exit(1) + + if compiler_options.verbose: + print(f"Parsed and typechecked in {t1 - t0:.3f}s") + + errors = Errors(options) + modules, ctext, mapper = emitmodule.compile_modules_to_c( + result, compiler_options=compiler_options, errors=errors, groups=groups + ) + t2 = time.time() + emit_messages(options, errors.new_messages(), t2 - t1) + if errors.num_errors: + # No need to stop the build if only warnings were emitted. + sys.exit(1) + + if compiler_options.verbose: + print(f"Compiled to C in {t2 - t1:.3f}s") + + if options.mypyc_annotation_file: + generate_annotated_html(options.mypyc_annotation_file, result, modules, mapper) + + return ctext, "\n".join(format_modules(modules)) + + +def build_using_shared_lib( + sources: list[BuildSource], + group_name: str, + cfiles: list[str], + deps: list[str], + build_dir: str, + extra_compile_args: list[str], +) -> list[Extension]: + """Produce the list of extension modules when a shared library is needed. + + This creates one shared library extension module that all the + others import, and one shim extension module for each + module in the build. Each shim simply calls an initialization function + in the shared library. + + The shared library (which lib_name is the name of) is a Python + extension module that exports the real initialization functions in + Capsules stored in module attributes. + """ + extensions = [ + get_extension()( + shared_lib_name(group_name), + sources=cfiles, + include_dirs=[include_dir(), build_dir], + depends=deps, + extra_compile_args=extra_compile_args, + ) + ] + + for source in sources: + module_name = source.module.split(".")[-1] + shim_file = generate_c_extension_shim(source.module, module_name, build_dir, group_name) + + # We include the __init__ in the "module name" we stick in the Extension, + # since this seems to be needed for it to end up in the right place. + full_module_name = source.module + assert source.path + if os.path.split(source.path)[1] == "__init__.py": + full_module_name += ".__init__" + extensions.append( + get_extension()( + full_module_name, sources=[shim_file], extra_compile_args=extra_compile_args + ) + ) + + return extensions + + +def build_single_module( + sources: list[BuildSource], cfiles: list[str], extra_compile_args: list[str] +) -> list[Extension]: + """Produce the list of extension modules for a standalone extension. + + This contains just one module, since there is no need for a shared module. + """ + return [ + get_extension()( + sources[0].module, + sources=cfiles, + include_dirs=[include_dir()], + extra_compile_args=extra_compile_args, + ) + ] + + +def write_file(path: str, contents: str) -> None: + """Write data into a file. + + If the file already exists and has the same contents we + want to write, skip writing so as to preserve the mtime + and avoid triggering recompilation. + """ + # We encode it ourselves and open the files as binary to avoid windows + # newline translation + encoded_contents = contents.encode("utf-8") + try: + with open(path, "rb") as f: + old_contents: bytes | None = f.read() + except OSError: + old_contents = None + if old_contents != encoded_contents: + os.makedirs(os.path.dirname(path), exist_ok=True) + with open(path, "wb") as g: + g.write(encoded_contents) + + # Fudge the mtime forward because otherwise when two builds happen close + # together (like in a test) setuptools might not realize the source is newer + # than the new artifact. + # XXX: This is bad though. + new_mtime = os.stat(path).st_mtime + 1 + os.utime(path, times=(new_mtime, new_mtime)) + + +def construct_groups( + sources: list[BuildSource], + separate: bool | list[tuple[list[str], str | None]], + use_shared_lib: bool, + group_name_override: str | None, +) -> emitmodule.Groups: + """Compute Groups given the input source list and separate configs. + + separate is the user-specified configuration for how to assign + modules to compilation groups (see mypycify docstring for details). + + This takes that and expands it into our internal representation of + group configuration, documented in mypyc.emitmodule's definition + of Group. + """ + + if separate is True: + groups: emitmodule.Groups = [([source], None) for source in sources] + elif isinstance(separate, list): + groups = [] + used_sources = set() + for files, name in separate: + group_sources = [src for src in sources if src.path in files] + groups.append((group_sources, name)) + used_sources.update(group_sources) + unused_sources = [src for src in sources if src not in used_sources] + if unused_sources: + groups.extend([([source], None) for source in unused_sources]) + else: + groups = [(sources, None)] + + # Generate missing names + for i, (group, name) in enumerate(groups): + if use_shared_lib and not name: + if group_name_override is not None: + name = group_name_override + else: + name = group_name([source.module for source in group]) + groups[i] = (group, name) + + return groups + + +def get_header_deps(cfiles: list[tuple[str, str]]) -> list[str]: + """Find all the headers used by a group of cfiles. + + We do this by just regexping the source, which is a bit simpler than + properly plumbing the data through. + + Arguments: + cfiles: A list of (file name, file contents) pairs. + """ + headers: set[str] = set() + for _, contents in cfiles: + headers.update(re.findall(r'#include "(.*)"', contents)) + + return sorted(headers) + + +def mypyc_build( + paths: list[str], + compiler_options: CompilerOptions, + *, + separate: bool | list[tuple[list[str], str | None]] = False, + only_compile_paths: Iterable[str] | None = None, + skip_cgen_input: Any | None = None, + always_use_shared_lib: bool = False, +) -> tuple[emitmodule.Groups, list[tuple[list[str], list[str]]]]: + """Do the front and middle end of mypyc building, producing and writing out C source.""" + fscache = FileSystemCache() + mypyc_sources, all_sources, options = get_mypy_config( + paths, only_compile_paths, compiler_options, fscache + ) + + # We generate a shared lib if there are multiple modules or if any + # of the modules are in package. (Because I didn't want to fuss + # around with making the single module code handle packages.) + use_shared_lib = ( + len(mypyc_sources) > 1 + or any("." in x.module for x in mypyc_sources) + or always_use_shared_lib + ) + + groups = construct_groups(mypyc_sources, separate, use_shared_lib, compiler_options.group_name) + + if compiler_options.group_name is not None: + assert len(groups) == 1, "If using custom group_name, only one group is expected" + + # We let the test harness just pass in the c file contents instead + # so that it can do a corner-cutting version without full stubs. + if not skip_cgen_input: + group_cfiles, ops_text = generate_c( + all_sources, options, groups, fscache, compiler_options=compiler_options + ) + # TODO: unique names? + write_file(os.path.join(compiler_options.target_dir, "ops.txt"), ops_text) + else: + group_cfiles = skip_cgen_input + + # Write out the generated C and collect the files for each group + # Should this be here?? + group_cfilenames: list[tuple[list[str], list[str]]] = [] + for cfiles in group_cfiles: + cfilenames = [] + for cfile, ctext in cfiles: + cfile = os.path.join(compiler_options.target_dir, cfile) + if not options.mypyc_skip_c_generation: + write_file(cfile, ctext) + if os.path.splitext(cfile)[1] == ".c": + cfilenames.append(cfile) + + deps = [os.path.join(compiler_options.target_dir, dep) for dep in get_header_deps(cfiles)] + group_cfilenames.append((cfilenames, deps)) + + return groups, group_cfilenames + + +def mypycify( + paths: list[str], + *, + only_compile_paths: Iterable[str] | None = None, + verbose: bool = False, + opt_level: str = "3", + debug_level: str = "1", + strip_asserts: bool = False, + multi_file: bool = False, + separate: bool | list[tuple[list[str], str | None]] = False, + skip_cgen_input: Any | None = None, + target_dir: str | None = None, + include_runtime_files: bool | None = None, + strict_dunder_typing: bool = False, + group_name: str | None = None, + log_trace: bool = False, + depends_on_librt_internal: bool = False, + install_librt: bool = False, + experimental_features: bool = False, +) -> list[Extension]: + """Main entry point to building using mypyc. + + This produces a list of Extension objects that should be passed as the + ext_modules parameter to setup. + + Arguments: + paths: A list of file paths to build. It may also contain mypy options. + only_compile_paths: If not None, an iterable of paths that are to be + the only modules compiled, even if other modules + appear in the mypy command line given to paths. + (These modules must still be passed to paths.) + + verbose: Should mypyc be more verbose. Defaults to false. + + opt_level: The optimization level, as a string. Defaults to '3' (meaning '-O3'). + debug_level: The debug level, as a string. Defaults to '1' (meaning '-g1'). + strip_asserts: Should asserts be stripped from the generated code. + + multi_file: Should each Python module be compiled into its own C source file. + This can reduce compile time and memory requirements at the likely + cost of runtime performance of compiled code. Defaults to false. + separate: Should compiled modules be placed in separate extension modules. + If False, all modules are placed in a single shared library. + If True, every module is placed in its own library. + Otherwise, separate should be a list of + (file name list, optional shared library name) pairs specifying + groups of files that should be placed in the same shared library + (while all other modules will be placed in its own library). + + Each group can be compiled independently, which can + speed up compilation, but calls between groups can + be slower than calls within a group and can't be + inlined. + target_dir: The directory to write C output files. Defaults to 'build'. + include_runtime_files: If not None, whether the mypyc runtime library + should be directly #include'd instead of linked + separately in order to reduce compiler invocations. + Defaults to False in multi_file mode, True otherwise. + strict_dunder_typing: If True, force dunder methods to have the return type + of the method strictly, which can lead to more + optimization opportunities. Defaults to False. + group_name: If set, override the default group name derived from + the hash of module names. This is used for the names of the + output C files and the shared library. This is only supported + if there is a single group. [Experimental] + log_trace: If True, compiled code writes a trace log of events in + mypyc_trace.txt (derived from executed operations). This is + useful for performance analysis, such as analyzing which + primitive ops are used the most and on which lines. + depends_on_librt_internal: This is True only for mypy itself. + install_librt: If True, also build the librt extension modules. Normally, + those are build and published on PyPI separately, but during + tests, we want to use their development versions (i.e. from + current commit). + experimental_features: Enable experimental features (install_librt=True is + also needed if using experimental librt features). These + have no backward compatibility guarantees! + """ + + # Figure out our configuration + compiler_options = CompilerOptions( + strip_asserts=strip_asserts, + multi_file=multi_file, + verbose=verbose, + separate=separate is not False, + target_dir=target_dir, + include_runtime_files=include_runtime_files, + strict_dunder_typing=strict_dunder_typing, + group_name=group_name, + log_trace=log_trace, + depends_on_librt_internal=depends_on_librt_internal, + experimental_features=experimental_features, + ) + + # Generate all the actual important C code + groups, group_cfilenames = mypyc_build( + paths, + only_compile_paths=only_compile_paths, + compiler_options=compiler_options, + separate=separate, + skip_cgen_input=skip_cgen_input, + ) + + # Mess around with setuptools and actually get the thing built + setup_mypycify_vars() + + # Create a compiler object so we can make decisions based on what + # compiler is being used. typeshed is missing some attributes on the + # compiler object so we give it type Any + compiler: Any = ccompiler.new_compiler() + sysconfig.customize_compiler(compiler) + + build_dir = compiler_options.target_dir + + cflags: list[str] = [] + if compiler.compiler_type == "unix": + cflags += [ + f"-O{opt_level}", + f"-g{debug_level}", + "-Werror", + "-Wno-unused-function", + "-Wno-unused-label", + "-Wno-unreachable-code", + "-Wno-unused-variable", + "-Wno-unused-command-line-argument", + "-Wno-unknown-warning-option", + "-Wno-unused-but-set-variable", + "-Wno-ignored-optimization-argument", + # Disables C Preprocessor (cpp) warnings + # See https://github.com/mypyc/mypyc/issues/956 + "-Wno-cpp", + ] + if log_trace: + cflags.append("-DMYPYC_LOG_TRACE") + if experimental_features: + cflags.append("-DMYPYC_EXPERIMENTAL") + elif compiler.compiler_type == "msvc": + # msvc doesn't have levels, '/O2' is full and '/Od' is disable + if opt_level == "0": + opt_level = "d" + elif opt_level in ("1", "2", "3"): + opt_level = "2" + if debug_level == "0": + debug_level = "NONE" + elif debug_level == "1": + debug_level = "FASTLINK" + elif debug_level in ("2", "3"): + debug_level = "FULL" + cflags += [ + f"/O{opt_level}", + f"/DEBUG:{debug_level}", + "/wd4102", # unreferenced label + "/wd4101", # unreferenced local variable + "/wd4146", # negating unsigned int + ] + if multi_file: + # Disable whole program optimization in multi-file mode so + # that we actually get the compilation speed and memory + # use wins that multi-file mode is intended for. + cflags += ["/GL-", "/wd9025"] # warning about overriding /GL + if log_trace: + cflags.append("/DMYPYC_LOG_TRACE") + if experimental_features: + cflags.append("/DMYPYC_EXPERIMENTAL") + + # If configured to (defaults to yes in multi-file mode), copy the + # runtime library in. Otherwise it just gets #included to save on + # compiler invocations. + shared_cfilenames = [] + if not compiler_options.include_runtime_files: + for name in RUNTIME_C_FILES: + rt_file = os.path.join(build_dir, name) + with open(os.path.join(include_dir(), name), encoding="utf-8") as f: + write_file(rt_file, f.read()) + shared_cfilenames.append(rt_file) + + extensions = [] + for (group_sources, lib_name), (cfilenames, deps) in zip(groups, group_cfilenames): + if lib_name: + extensions.extend( + build_using_shared_lib( + group_sources, + lib_name, + cfilenames + shared_cfilenames, + deps, + build_dir, + cflags, + ) + ) + else: + extensions.extend( + build_single_module(group_sources, cfilenames + shared_cfilenames, cflags) + ) + + if install_librt: + for name in RUNTIME_C_FILES: + rt_file = os.path.join(build_dir, name) + with open(os.path.join(include_dir(), name), encoding="utf-8") as f: + write_file(rt_file, f.read()) + for mod, file_names, addit_files, includes in LIBRT_MODULES: + for file_name in file_names + addit_files: + rt_file = os.path.join(build_dir, file_name) + with open(os.path.join(include_dir(), file_name), encoding="utf-8") as f: + write_file(rt_file, f.read()) + extensions.append( + get_extension()( + mod, + sources=[ + os.path.join(build_dir, file) for file in file_names + RUNTIME_C_FILES + ], + include_dirs=[include_dir()] + + [os.path.join(include_dir(), d) for d in includes], + extra_compile_args=cflags, + ) + ) + + return extensions diff --git a/.venv/lib/python3.12/site-packages/mypyc/build_setup.py b/.venv/lib/python3.12/site-packages/mypyc/build_setup.py new file mode 100644 index 0000000..a3e7a66 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/build_setup.py @@ -0,0 +1,62 @@ +import platform +import sys + +try: + # Import setuptools so that it monkey-patch overrides distutils + import setuptools # noqa: F401 +except ImportError: + pass + +if sys.version_info >= (3, 12): + # From setuptools' monkeypatch + from distutils import ccompiler # type: ignore[import-not-found] +else: + from distutils import ccompiler + +EXTRA_FLAGS_PER_COMPILER_TYPE_PER_PATH_COMPONENT = { + "unix": { + "base64/arch/ssse3": ["-mssse3"], + "base64/arch/sse41": ["-msse4.1"], + "base64/arch/sse42": ["-msse4.2"], + "base64/arch/avx2": ["-mavx2"], + "base64/arch/avx": ["-mavx"], + }, + "msvc": { + "base64/arch/sse42": ["/arch:SSE4.2"], + "base64/arch/avx2": ["/arch:AVX2"], + "base64/arch/avx": ["/arch:AVX"], + }, +} + +ccompiler.CCompiler.__spawn = ccompiler.CCompiler.spawn # type: ignore[attr-defined] +X86_64 = platform.machine() in ("x86_64", "AMD64", "amd64") + + +def spawn(self, cmd, **kwargs) -> None: # type: ignore[no-untyped-def] + compiler_type: str = self.compiler_type + extra_options = EXTRA_FLAGS_PER_COMPILER_TYPE_PER_PATH_COMPONENT[compiler_type] + new_cmd = list(cmd) + if X86_64 and extra_options is not None: + # filenames are closer to the end of command line + for argument in reversed(new_cmd): + # Check if the matching argument contains a source filename. + if not str(argument).endswith(".c"): + continue + + for path in extra_options.keys(): + if path in str(argument): + if compiler_type == "bcpp": + compiler = new_cmd.pop() + # Borland accepts a source file name at the end, + # insert the options before it + new_cmd.extend(extra_options[path]) + new_cmd.append(compiler) + else: + new_cmd.extend(extra_options[path]) + + # path component is found, no need to search any further + break + self.__spawn(new_cmd, **kwargs) + + +ccompiler.CCompiler.spawn = spawn # type: ignore[method-assign] diff --git a/.venv/lib/python3.12/site-packages/mypyc/codegen/__init__.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/codegen/__init__.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..2195b28 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/codegen/__init__.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/codegen/__init__.py b/.venv/lib/python3.12/site-packages/mypyc/codegen/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypyc/codegen/cstring.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/codegen/cstring.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..beac377 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/codegen/cstring.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/codegen/cstring.py b/.venv/lib/python3.12/site-packages/mypyc/codegen/cstring.py new file mode 100644 index 0000000..853787f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/codegen/cstring.py @@ -0,0 +1,54 @@ +"""Encode valid C string literals from Python strings. + +If a character is not allowed in C string literals, it is either emitted +as a simple escape sequence (e.g. '\\n'), or an octal escape sequence +with exactly three digits ('\\oXXX'). Question marks are escaped to +prevent trigraphs in the string literal from being interpreted. Note +that '\\?' is an invalid escape sequence in Python. + +Consider the string literal "AB\\xCDEF". As one would expect, Python +parses it as ['A', 'B', 0xCD, 'E', 'F']. However, the C standard +specifies that all hexadecimal digits immediately following '\\x' will +be interpreted as part of the escape sequence. Therefore, it is +unexpectedly parsed as ['A', 'B', 0xCDEF]. + +Emitting ("AB\\xCD" "EF") would avoid this behaviour. However, we opt +for simplicity and use octal escape sequences instead. They do not +suffer from the same issue as they are defined to parse at most three +octal digits. +""" + +from __future__ import annotations + +import string +from typing import Final + +CHAR_MAP: Final = [f"\\{i:03o}" for i in range(256)] + +# It is safe to use string.printable as it always uses the C locale. +for c in string.printable: + CHAR_MAP[ord(c)] = c + +# These assignments must come last because we prioritize simple escape +# sequences over any other representation. +for c in ("'", '"', "\\", "a", "b", "f", "n", "r", "t", "v"): + escaped = f"\\{c}" + decoded = escaped.encode("ascii").decode("unicode_escape") + CHAR_MAP[ord(decoded)] = escaped + +# This escape sequence is invalid in Python. +CHAR_MAP[ord("?")] = r"\?" + + +def encode_bytes_as_c_string(b: bytes) -> str: + """Produce contents of a C string literal for a byte string, without quotes.""" + escaped = "".join([CHAR_MAP[i] for i in b]) + return escaped + + +def c_string_initializer(value: bytes) -> str: + """Create initializer for a C char[]/ char * variable from a string. + + For example, if value if b'foo', the result would be '"foo"'. + """ + return '"' + encode_bytes_as_c_string(value) + '"' diff --git a/.venv/lib/python3.12/site-packages/mypyc/codegen/emit.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/codegen/emit.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..bd7d057 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/codegen/emit.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/codegen/emit.py b/.venv/lib/python3.12/site-packages/mypyc/codegen/emit.py new file mode 100644 index 0000000..f2a2271 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/codegen/emit.py @@ -0,0 +1,1240 @@ +"""Utilities for emitting C code.""" + +from __future__ import annotations + +import pprint +import sys +import textwrap +from typing import Callable, Final + +from mypyc.codegen.literals import Literals +from mypyc.common import ( + ATTR_PREFIX, + BITMAP_BITS, + FAST_ISINSTANCE_MAX_SUBCLASSES, + HAVE_IMMORTAL, + NATIVE_PREFIX, + REG_PREFIX, + STATIC_PREFIX, + TYPE_PREFIX, +) +from mypyc.ir.class_ir import ClassIR, all_concrete_classes +from mypyc.ir.func_ir import FuncDecl +from mypyc.ir.ops import BasicBlock, Value +from mypyc.ir.rtypes import ( + RInstance, + RPrimitive, + RTuple, + RType, + RUnion, + int_rprimitive, + is_bool_or_bit_rprimitive, + is_bytes_rprimitive, + is_dict_rprimitive, + is_fixed_width_rtype, + is_float_rprimitive, + is_frozenset_rprimitive, + is_int16_rprimitive, + is_int32_rprimitive, + is_int64_rprimitive, + is_int_rprimitive, + is_list_rprimitive, + is_native_rprimitive, + is_none_rprimitive, + is_object_rprimitive, + is_optional_type, + is_range_rprimitive, + is_set_rprimitive, + is_short_int_rprimitive, + is_str_rprimitive, + is_tuple_rprimitive, + is_uint8_rprimitive, + object_rprimitive, + optional_value_type, +) +from mypyc.namegen import NameGenerator, exported_name +from mypyc.sametype import is_same_type + +# Whether to insert debug asserts for all error handling, to quickly +# catch errors propagating without exceptions set. +DEBUG_ERRORS: Final = False + + +class HeaderDeclaration: + """A representation of a declaration in C. + + This is used to generate declarations in header files and + (optionally) definitions in source files. + + Attributes: + decl: C source code for the declaration. + defn: Optionally, C source code for a definition. + dependencies: The names of any objects that must be declared prior. + is_type: Whether the declaration is of a C type. (C types will be declared in + external header files and not marked 'extern'.) + needs_export: Whether the declared object needs to be exported to + other modules in the linking table. + """ + + def __init__( + self, + decl: str | list[str], + defn: list[str] | None = None, + *, + dependencies: set[str] | None = None, + is_type: bool = False, + needs_export: bool = False, + ) -> None: + self.decl = [decl] if isinstance(decl, str) else decl + self.defn = defn + self.dependencies = dependencies or set() + self.is_type = is_type + self.needs_export = needs_export + + +class EmitterContext: + """Shared emitter state for a compilation group.""" + + def __init__( + self, + names: NameGenerator, + group_name: str | None = None, + group_map: dict[str, str | None] | None = None, + ) -> None: + """Setup shared emitter state. + + Args: + names: The name generator to use + group_map: Map from module names to group name + group_name: Current group name + """ + self.temp_counter = 0 + self.names = names + self.group_name = group_name + self.group_map = group_map or {} + # Groups that this group depends on + self.group_deps: set[str] = set() + + # The map below is used for generating declarations and + # definitions at the top of the C file. The main idea is that they can + # be generated at any time during the emit phase. + + # A map of a C identifier to whatever the C identifier declares. Currently this is + # used for declaring structs and the key corresponds to the name of the struct. + # The declaration contains the body of the struct. + self.declarations: dict[str, HeaderDeclaration] = {} + + self.literals = Literals() + + +class ErrorHandler: + """Describes handling errors in unbox/cast operations.""" + + +class AssignHandler(ErrorHandler): + """Assign an error value on error.""" + + +class GotoHandler(ErrorHandler): + """Goto label on error.""" + + def __init__(self, label: str) -> None: + self.label = label + + +class TracebackAndGotoHandler(ErrorHandler): + """Add traceback item and goto label on error.""" + + def __init__( + self, label: str, source_path: str, module_name: str, traceback_entry: tuple[str, int] + ) -> None: + self.label = label + self.source_path = source_path + self.module_name = module_name + self.traceback_entry = traceback_entry + + +class ReturnHandler(ErrorHandler): + """Return a constant value on error.""" + + def __init__(self, value: str) -> None: + self.value = value + + +class Emitter: + """Helper for C code generation.""" + + def __init__( + self, + context: EmitterContext, + value_names: dict[Value, str] | None = None, + capi_version: tuple[int, int] | None = None, + ) -> None: + self.context = context + self.capi_version = capi_version or sys.version_info[:2] + self.names = context.names + self.value_names = value_names or {} + self.fragments: list[str] = [] + self._indent = 0 + + # Low-level operations + + def indent(self) -> None: + self._indent += 4 + + def dedent(self) -> None: + self._indent -= 4 + assert self._indent >= 0 + + def label(self, label: BasicBlock) -> str: + return "CPyL%s" % label.label + + def reg(self, reg: Value) -> str: + return REG_PREFIX + self.value_names[reg] + + def attr(self, name: str) -> str: + return ATTR_PREFIX + name + + def object_annotation(self, obj: object, line: str) -> str: + """Build a C comment with an object's string representation. + + If the comment exceeds the line length limit, it's wrapped into a + multiline string (with the extra lines indented to be aligned with + the first line's comment). + + If it contains illegal characters, an empty string is returned.""" + line_width = self._indent + len(line) + formatted = pprint.pformat(obj, compact=True, width=max(90 - line_width, 20)) + if any(x in formatted for x in ("/*", "*/", "\0")): + return "" + + if "\n" in formatted: + first_line, rest = formatted.split("\n", maxsplit=1) + comment_continued = textwrap.indent(rest, (line_width + 3) * " ") + return f" /* {first_line}\n{comment_continued} */" + else: + return f" /* {formatted} */" + + def emit_line(self, line: str = "", *, ann: object = None) -> None: + if line.startswith("}"): + self.dedent() + comment = self.object_annotation(ann, line) if ann is not None else "" + self.fragments.append(self._indent * " " + line + comment + "\n") + if line.endswith("{"): + self.indent() + + def emit_lines(self, *lines: str) -> None: + for line in lines: + self.emit_line(line) + + def emit_label(self, label: BasicBlock | str) -> None: + if isinstance(label, str): + text = label + else: + if label.label == 0 or not label.referenced: + return + + text = self.label(label) + # Extra semicolon prevents an error when the next line declares a tempvar + self.fragments.append(f"{text}: ;\n") + + def emit_from_emitter(self, emitter: Emitter) -> None: + self.fragments.extend(emitter.fragments) + + def emit_printf(self, fmt: str, *args: str) -> None: + fmt = fmt.replace("\n", "\\n") + self.emit_line("printf(%s);" % ", ".join(['"%s"' % fmt] + list(args))) + self.emit_line("fflush(stdout);") + + def temp_name(self) -> str: + self.context.temp_counter += 1 + return "__tmp%d" % self.context.temp_counter + + def new_label(self) -> str: + self.context.temp_counter += 1 + return "__LL%d" % self.context.temp_counter + + def get_module_group_prefix(self, module_name: str) -> str: + """Get the group prefix for a module (relative to the current group). + + The prefix should be prepended to the object name whenever + accessing an object from this module. + + If the module lives is in the current compilation group, there is + no prefix. But if it lives in a different group (and hence a separate + extension module), we need to access objects from it indirectly via an + export table. + + For example, for code in group `a` to call a function `bar` in group `b`, + it would need to do `exports_b.CPyDef_bar(...)`, while code that is + also in group `b` can simply do `CPyDef_bar(...)`. + + Thus the prefix for a module in group `b` is 'exports_b.' if the current + group is *not* b and just '' if it is. + """ + groups = self.context.group_map + target_group_name = groups.get(module_name) + if target_group_name and target_group_name != self.context.group_name: + self.context.group_deps.add(target_group_name) + return f"exports_{exported_name(target_group_name)}." + else: + return "" + + def get_group_prefix(self, obj: ClassIR | FuncDecl) -> str: + """Get the group prefix for an object.""" + # See docs above + return self.get_module_group_prefix(obj.module_name) + + def static_name(self, id: str, module: str | None, prefix: str = STATIC_PREFIX) -> str: + """Create name of a C static variable. + + These are used for literals and imported modules, among other + things. + + The caller should ensure that the (id, module) pair cannot + overlap with other calls to this method within a compilation + group. + """ + lib_prefix = "" if not module else self.get_module_group_prefix(module) + # If we are accessing static via the export table, we need to dereference + # the pointer also. + star_maybe = "*" if lib_prefix else "" + suffix = self.names.private_name(module or "", id) + return f"{star_maybe}{lib_prefix}{prefix}{suffix}" + + def type_struct_name(self, cl: ClassIR) -> str: + return self.static_name(cl.name, cl.module_name, prefix=TYPE_PREFIX) + + def ctype(self, rtype: RType) -> str: + return rtype._ctype + + def ctype_spaced(self, rtype: RType) -> str: + """Adds a space after ctype for non-pointers.""" + ctype = self.ctype(rtype) + if ctype[-1] == "*": + return ctype + else: + return ctype + " " + + def c_undefined_value(self, rtype: RType) -> str: + if not rtype.is_unboxed: + return "NULL" + elif isinstance(rtype, RPrimitive): + return rtype.c_undefined + elif isinstance(rtype, RTuple): + return self.tuple_undefined_value(rtype) + assert False, rtype + + def c_error_value(self, rtype: RType) -> str: + return self.c_undefined_value(rtype) + + def native_function_name(self, fn: FuncDecl) -> str: + return f"{NATIVE_PREFIX}{fn.cname(self.names)}" + + def tuple_c_declaration(self, rtuple: RTuple) -> list[str]: + result = [ + f"#ifndef MYPYC_DECLARED_{rtuple.struct_name}", + f"#define MYPYC_DECLARED_{rtuple.struct_name}", + f"typedef struct {rtuple.struct_name} {{", + ] + if len(rtuple.types) == 0: # empty tuple + # Empty tuples contain a flag so that they can still indicate + # error values. + result.append("int empty_struct_error_flag;") + else: + i = 0 + for typ in rtuple.types: + result.append(f"{self.ctype_spaced(typ)}f{i};") + i += 1 + result.append(f"}} {rtuple.struct_name};") + result.append("#endif") + result.append("") + + return result + + def bitmap_field(self, index: int) -> str: + """Return C field name used for attribute bitmap.""" + n = index // BITMAP_BITS + if n == 0: + return "bitmap" + return f"bitmap{n + 1}" + + def attr_bitmap_expr(self, obj: str, cl: ClassIR, index: int) -> str: + """Return reference to the attribute definedness bitmap.""" + cast = f"({cl.struct_name(self.names)} *)" + attr = self.bitmap_field(index) + return f"({cast}{obj})->{attr}" + + def emit_attr_bitmap_set( + self, value: str, obj: str, rtype: RType, cl: ClassIR, attr: str + ) -> None: + """Mark an attribute as defined in the attribute bitmap. + + Assumes that the attribute is tracked in the bitmap (only some attributes + use the bitmap). If 'value' is not equal to the error value, do nothing. + """ + self._emit_attr_bitmap_update(value, obj, rtype, cl, attr, clear=False) + + def emit_attr_bitmap_clear(self, obj: str, rtype: RType, cl: ClassIR, attr: str) -> None: + """Mark an attribute as undefined in the attribute bitmap. + + Unlike emit_attr_bitmap_set, clear unconditionally. + """ + self._emit_attr_bitmap_update("", obj, rtype, cl, attr, clear=True) + + def _emit_attr_bitmap_update( + self, value: str, obj: str, rtype: RType, cl: ClassIR, attr: str, clear: bool + ) -> None: + if value: + check = self.error_value_check(rtype, value, "==") + self.emit_line(f"if (unlikely({check})) {{") + index = cl.bitmap_attrs.index(attr) + mask = 1 << (index & (BITMAP_BITS - 1)) + bitmap = self.attr_bitmap_expr(obj, cl, index) + if clear: + self.emit_line(f"{bitmap} &= ~{mask};") + else: + self.emit_line(f"{bitmap} |= {mask};") + if value: + self.emit_line("}") + + def emit_undefined_attr_check( + self, + rtype: RType, + attr_expr: str, + compare: str, + obj: str, + attr: str, + cl: ClassIR, + *, + unlikely: bool = False, + ) -> None: + check = self.error_value_check(rtype, attr_expr, compare) + if unlikely: + check = f"unlikely({check})" + if rtype.error_overlap: + index = cl.bitmap_attrs.index(attr) + bit = 1 << (index & (BITMAP_BITS - 1)) + attr = self.bitmap_field(index) + obj_expr = f"({cl.struct_name(self.names)} *){obj}" + check = f"{check} && !(({obj_expr})->{attr} & {bit})" + self.emit_line(f"if ({check}) {{") + + def error_value_check(self, rtype: RType, value: str, compare: str) -> str: + if isinstance(rtype, RTuple): + return self.tuple_undefined_check_cond( + rtype, value, self.c_error_value, compare, check_exception=False + ) + else: + return f"{value} {compare} {self.c_error_value(rtype)}" + + def tuple_undefined_check_cond( + self, + rtuple: RTuple, + tuple_expr_in_c: str, + c_type_compare_val: Callable[[RType], str], + compare: str, + *, + check_exception: bool = True, + ) -> str: + if len(rtuple.types) == 0: + # empty tuple + return "{}.empty_struct_error_flag {} {}".format( + tuple_expr_in_c, compare, c_type_compare_val(int_rprimitive) + ) + if rtuple.error_overlap: + i = 0 + item_type = rtuple.types[0] + else: + for i, typ in enumerate(rtuple.types): + if not typ.error_overlap: + item_type = rtuple.types[i] + break + else: + assert False, "not expecting tuple with error overlap" + if isinstance(item_type, RTuple): + return self.tuple_undefined_check_cond( + item_type, tuple_expr_in_c + f".f{i}", c_type_compare_val, compare + ) + else: + check = f"{tuple_expr_in_c}.f{i} {compare} {c_type_compare_val(item_type)}" + if rtuple.error_overlap and check_exception: + check += " && PyErr_Occurred()" + return check + + def tuple_undefined_value(self, rtuple: RTuple) -> str: + """Undefined tuple value suitable in an expression.""" + return f"({rtuple.struct_name}) {self.c_initializer_undefined_value(rtuple)}" + + def c_initializer_undefined_value(self, rtype: RType) -> str: + """Undefined value represented in a form suitable for variable initialization.""" + if isinstance(rtype, RTuple): + if not rtype.types: + # Empty tuples contain a flag so that they can still indicate + # error values. + return f"{{ {int_rprimitive.c_undefined} }}" + items = ", ".join([self.c_initializer_undefined_value(t) for t in rtype.types]) + return f"{{ {items} }}" + else: + return self.c_undefined_value(rtype) + + # Higher-level operations + + def declare_tuple_struct(self, tuple_type: RTuple) -> None: + if tuple_type.struct_name not in self.context.declarations: + dependencies = set() + for typ in tuple_type.types: + # XXX other types might eventually need similar behavior + if isinstance(typ, RTuple): + dependencies.add(typ.struct_name) + + self.context.declarations[tuple_type.struct_name] = HeaderDeclaration( + self.tuple_c_declaration(tuple_type), dependencies=dependencies, is_type=True + ) + + def emit_inc_ref(self, dest: str, rtype: RType, *, rare: bool = False) -> None: + """Increment reference count of C expression `dest`. + + For composite unboxed structures (e.g. tuples) recursively + increment reference counts for each component. + + If rare is True, optimize for code size and compilation speed. + """ + if is_int_rprimitive(rtype): + if rare: + self.emit_line("CPyTagged_IncRef(%s);" % dest) + else: + self.emit_line("CPyTagged_INCREF(%s);" % dest) + elif isinstance(rtype, RTuple): + for i, item_type in enumerate(rtype.types): + self.emit_inc_ref(f"{dest}.f{i}", item_type) + elif not rtype.is_unboxed: + # Always inline, since this is a simple but very hot op + if rtype.may_be_immortal or not HAVE_IMMORTAL: + self.emit_line("CPy_INCREF(%s);" % dest) + else: + self.emit_line("CPy_INCREF_NO_IMM(%s);" % dest) + # Otherwise assume it's an unboxed, pointerless value and do nothing. + + def emit_dec_ref( + self, dest: str, rtype: RType, *, is_xdec: bool = False, rare: bool = False + ) -> None: + """Decrement reference count of C expression `dest`. + + For composite unboxed structures (e.g. tuples) recursively + decrement reference counts for each component. + + If rare is True, optimize for code size and compilation speed. + """ + x = "X" if is_xdec else "" + if is_int_rprimitive(rtype): + if rare: + self.emit_line(f"CPyTagged_{x}DecRef({dest});") + else: + # Inlined + self.emit_line(f"CPyTagged_{x}DECREF({dest});") + elif isinstance(rtype, RTuple): + for i, item_type in enumerate(rtype.types): + self.emit_dec_ref(f"{dest}.f{i}", item_type, is_xdec=is_xdec, rare=rare) + elif not rtype.is_unboxed: + if rare: + self.emit_line(f"CPy_{x}DecRef({dest});") + else: + # Inlined + if rtype.may_be_immortal or not HAVE_IMMORTAL: + self.emit_line(f"CPy_{x}DECREF({dest});") + else: + self.emit_line(f"CPy_{x}DECREF_NO_IMM({dest});") + # Otherwise assume it's an unboxed, pointerless value and do nothing. + + def pretty_name(self, typ: RType) -> str: + value_type = optional_value_type(typ) + if value_type is not None: + return "%s or None" % self.pretty_name(value_type) + return str(typ) + + def emit_cast( + self, + src: str, + dest: str, + typ: RType, + *, + declare_dest: bool = False, + error: ErrorHandler | None = None, + raise_exception: bool = True, + optional: bool = False, + src_type: RType | None = None, + likely: bool = True, + ) -> None: + """Emit code for casting a value of given type. + + Somewhat strangely, this supports unboxed types but only + operates on boxed versions. This is necessary to properly + handle types such as Optional[int] in compatibility glue. + + By default, assign NULL (error value) to dest if the value has + an incompatible type and raise TypeError. These can be customized + using 'error' and 'raise_exception'. + + Always copy/steal the reference in 'src'. + + Args: + src: Name of source C variable + dest: Name of target C variable + typ: Type of value + declare_dest: If True, also declare the variable 'dest' + error: What happens on error + raise_exception: If True, also raise TypeError on failure + likely: If the cast is likely to succeed (can be False for unions) + """ + error = error or AssignHandler() + + # Special case casting *from* optional + if src_type and is_optional_type(src_type) and not is_object_rprimitive(typ): + value_type = optional_value_type(src_type) + assert value_type is not None + if is_same_type(value_type, typ): + if declare_dest: + self.emit_line(f"PyObject *{dest};") + check = "({} != Py_None)" + if likely: + check = f"(likely{check})" + self.emit_arg_check(src, dest, typ, check.format(src), optional) + self.emit_lines(f" {dest} = {src};", "else {") + self.emit_cast_error_handler(error, src, dest, typ, raise_exception) + self.emit_line("}") + return + + # TODO: Verify refcount handling. + if ( + is_list_rprimitive(typ) + or is_dict_rprimitive(typ) + or is_set_rprimitive(typ) + or is_frozenset_rprimitive(typ) + or is_str_rprimitive(typ) + or is_range_rprimitive(typ) + or is_float_rprimitive(typ) + or is_int_rprimitive(typ) + or is_bool_or_bit_rprimitive(typ) + or is_fixed_width_rtype(typ) + ): + if declare_dest: + self.emit_line(f"PyObject *{dest};") + if is_list_rprimitive(typ): + prefix = "PyList" + elif is_dict_rprimitive(typ): + prefix = "PyDict" + elif is_set_rprimitive(typ): + prefix = "PySet" + elif is_frozenset_rprimitive(typ): + prefix = "PyFrozenSet" + elif is_str_rprimitive(typ): + prefix = "PyUnicode" + elif is_range_rprimitive(typ): + prefix = "PyRange" + elif is_float_rprimitive(typ): + prefix = "CPyFloat" + elif is_int_rprimitive(typ) or is_fixed_width_rtype(typ): + # TODO: Range check for fixed-width types? + prefix = "PyLong" + elif is_bool_or_bit_rprimitive(typ): + prefix = "PyBool" + else: + assert False, f"unexpected primitive type: {typ}" + check = "({}_Check({}))" + if likely: + check = f"(likely{check})" + self.emit_arg_check(src, dest, typ, check.format(prefix, src), optional) + self.emit_lines(f" {dest} = {src};", "else {") + self.emit_cast_error_handler(error, src, dest, typ, raise_exception) + self.emit_line("}") + elif is_bytes_rprimitive(typ): + if declare_dest: + self.emit_line(f"PyObject *{dest};") + check = "(PyBytes_Check({}) || PyByteArray_Check({}))" + if likely: + check = f"(likely{check})" + self.emit_arg_check(src, dest, typ, check.format(src, src), optional) + self.emit_lines(f" {dest} = {src};", "else {") + self.emit_cast_error_handler(error, src, dest, typ, raise_exception) + self.emit_line("}") + elif is_tuple_rprimitive(typ): + if declare_dest: + self.emit_line(f"{self.ctype(typ)} {dest};") + check = "(PyTuple_Check({}))" + if likely: + check = f"(likely{check})" + self.emit_arg_check(src, dest, typ, check.format(src), optional) + self.emit_lines(f" {dest} = {src};", "else {") + self.emit_cast_error_handler(error, src, dest, typ, raise_exception) + self.emit_line("}") + elif isinstance(typ, RInstance): + if declare_dest: + self.emit_line(f"PyObject *{dest};") + concrete = all_concrete_classes(typ.class_ir) + # If there are too many concrete subclasses or we can't find any + # (meaning the code ought to be dead or we aren't doing global opts), + # fall back to a normal typecheck. + # Otherwise check all the subclasses. + if not concrete or len(concrete) > FAST_ISINSTANCE_MAX_SUBCLASSES + 1: + check = "(PyObject_TypeCheck({}, {}))".format( + src, self.type_struct_name(typ.class_ir) + ) + else: + full_str = "(Py_TYPE({src}) == {targets[0]})" + for i in range(1, len(concrete)): + full_str += " || (Py_TYPE({src}) == {targets[%d]})" % i + if len(concrete) > 1: + full_str = "(%s)" % full_str + check = full_str.format( + src=src, targets=[self.type_struct_name(ir) for ir in concrete] + ) + if likely: + check = f"(likely{check})" + self.emit_arg_check(src, dest, typ, check, optional) + self.emit_lines(f" {dest} = {src};", "else {") + self.emit_cast_error_handler(error, src, dest, typ, raise_exception) + self.emit_line("}") + elif is_none_rprimitive(typ): + if declare_dest: + self.emit_line(f"PyObject *{dest};") + check = "({} == Py_None)" + if likely: + check = f"(likely{check})" + self.emit_arg_check(src, dest, typ, check.format(src), optional) + self.emit_lines(f" {dest} = {src};", "else {") + self.emit_cast_error_handler(error, src, dest, typ, raise_exception) + self.emit_line("}") + elif is_object_rprimitive(typ): + if declare_dest: + self.emit_line(f"PyObject *{dest};") + self.emit_arg_check(src, dest, typ, "", optional) + self.emit_line(f"{dest} = {src};") + if optional: + self.emit_line("}") + elif is_native_rprimitive(typ): + # Native primitive types have type check functions of form "CPy_Check(...)". + if declare_dest: + self.emit_line(f"PyObject *{dest};") + short_name = typ.name.rsplit(".", 1)[-1] + check = f"(CPy{short_name}_Check({src}))" + if likely: + check = f"(likely{check})" + self.emit_arg_check(src, dest, typ, check, optional) + self.emit_lines(f" {dest} = {src};", "else {") + self.emit_cast_error_handler(error, src, dest, typ, raise_exception) + self.emit_line("}") + elif isinstance(typ, RUnion): + self.emit_union_cast( + src, dest, typ, declare_dest, error, optional, src_type, raise_exception + ) + elif isinstance(typ, RTuple): + assert not optional + self.emit_tuple_cast(src, dest, typ, declare_dest, error, src_type) + else: + assert False, "Cast not implemented: %s" % typ + + def emit_cast_error_handler( + self, error: ErrorHandler, src: str, dest: str, typ: RType, raise_exception: bool + ) -> None: + if raise_exception: + if isinstance(error, TracebackAndGotoHandler): + # Merge raising and emitting traceback entry into a single call. + self.emit_type_error_traceback( + error.source_path, error.module_name, error.traceback_entry, typ=typ, src=src + ) + self.emit_line("goto %s;" % error.label) + return + self.emit_line(f'CPy_TypeError("{self.pretty_name(typ)}", {src}); ') + if isinstance(error, AssignHandler): + self.emit_line("%s = NULL;" % dest) + elif isinstance(error, GotoHandler): + self.emit_line("goto %s;" % error.label) + elif isinstance(error, TracebackAndGotoHandler): + self.emit_line("%s = NULL;" % dest) + self.emit_traceback(error.source_path, error.module_name, error.traceback_entry) + self.emit_line("goto %s;" % error.label) + else: + assert isinstance(error, ReturnHandler), error + self.emit_line("return %s;" % error.value) + + def emit_union_cast( + self, + src: str, + dest: str, + typ: RUnion, + declare_dest: bool, + error: ErrorHandler, + optional: bool, + src_type: RType | None, + raise_exception: bool, + ) -> None: + """Emit cast to a union type. + + The arguments are similar to emit_cast. + """ + if declare_dest: + self.emit_line(f"PyObject *{dest};") + good_label = self.new_label() + if optional: + self.emit_line(f"if ({src} == NULL) {{") + self.emit_line(f"{dest} = {self.c_error_value(typ)};") + self.emit_line(f"goto {good_label};") + self.emit_line("}") + for item in typ.items: + self.emit_cast( + src, + dest, + item, + declare_dest=False, + raise_exception=False, + optional=False, + likely=False, + ) + self.emit_line(f"if ({dest} != NULL) goto {good_label};") + # Handle cast failure. + self.emit_cast_error_handler(error, src, dest, typ, raise_exception) + self.emit_label(good_label) + + def emit_tuple_cast( + self, + src: str, + dest: str, + typ: RTuple, + declare_dest: bool, + error: ErrorHandler, + src_type: RType | None, + ) -> None: + """Emit cast to a tuple type. + + The arguments are similar to emit_cast. + """ + if declare_dest: + self.emit_line(f"PyObject *{dest};") + # This reuse of the variable is super dodgy. We don't even + # care about the values except to check whether they are + # invalid. + out_label = self.new_label() + self.emit_lines( + "if (unlikely(!(PyTuple_Check({r}) && PyTuple_GET_SIZE({r}) == {size}))) {{".format( + r=src, size=len(typ.types) + ), + f"{dest} = NULL;", + f"goto {out_label};", + "}", + ) + for i, item in enumerate(typ.types): + # Since we did the checks above this should never fail + self.emit_cast( + f"PyTuple_GET_ITEM({src}, {i})", + dest, + item, + declare_dest=False, + raise_exception=False, + optional=False, + ) + self.emit_line(f"if ({dest} == NULL) goto {out_label};") + + self.emit_line(f"{dest} = {src};") + self.emit_label(out_label) + + def emit_arg_check(self, src: str, dest: str, typ: RType, check: str, optional: bool) -> None: + if optional: + self.emit_line(f"if ({src} == NULL) {{") + self.emit_line(f"{dest} = {self.c_error_value(typ)};") + if check != "": + self.emit_line("{}if {}".format("} else " if optional else "", check)) + elif optional: + self.emit_line("else {") + + def emit_unbox( + self, + src: str, + dest: str, + typ: RType, + *, + declare_dest: bool = False, + error: ErrorHandler | None = None, + raise_exception: bool = True, + optional: bool = False, + borrow: bool = False, + ) -> None: + """Emit code for unboxing a value of given type (from PyObject *). + + By default, assign error value to dest if the value has an + incompatible type and raise TypeError. These can be customized + using 'error' and 'raise_exception'. + + Generate a new reference unless 'borrow' is True. + + Args: + src: Name of source C variable + dest: Name of target C variable + typ: Type of value + declare_dest: If True, also declare the variable 'dest' + error: What happens on error + raise_exception: If True, also raise TypeError on failure + borrow: If True, create a borrowed reference + + """ + error = error or AssignHandler() + # TODO: Verify refcount handling. + if isinstance(error, AssignHandler): + failure = f"{dest} = {self.c_error_value(typ)};" + elif isinstance(error, GotoHandler): + failure = "goto %s;" % error.label + else: + assert isinstance(error, ReturnHandler), error + failure = "return %s;" % error.value + if raise_exception: + raise_exc = f'CPy_TypeError("{self.pretty_name(typ)}", {src}); ' + failure = raise_exc + failure + if is_int_rprimitive(typ) or is_short_int_rprimitive(typ): + if declare_dest: + self.emit_line(f"CPyTagged {dest};") + self.emit_arg_check(src, dest, typ, f"(likely(PyLong_Check({src})))", optional) + if borrow: + self.emit_line(f" {dest} = CPyTagged_BorrowFromObject({src});") + else: + self.emit_line(f" {dest} = CPyTagged_FromObject({src});") + self.emit_line("else {") + self.emit_line(failure) + self.emit_line("}") + elif is_bool_or_bit_rprimitive(typ): + # Whether we are borrowing or not makes no difference. + if declare_dest: + self.emit_line(f"char {dest};") + self.emit_arg_check(src, dest, typ, f"(unlikely(!PyBool_Check({src}))) {{", optional) + self.emit_line(failure) + self.emit_line("} else") + conversion = f"{src} == Py_True" + self.emit_line(f" {dest} = {conversion};") + elif is_none_rprimitive(typ): + # Whether we are borrowing or not makes no difference. + if declare_dest: + self.emit_line(f"char {dest};") + self.emit_arg_check(src, dest, typ, f"(unlikely({src} != Py_None)) {{", optional) + self.emit_line(failure) + self.emit_line("} else") + self.emit_line(f" {dest} = 1;") + elif is_int64_rprimitive(typ): + # Whether we are borrowing or not makes no difference. + assert not optional # Not supported for overlapping error values + if declare_dest: + self.emit_line(f"int64_t {dest};") + self.emit_line(f"{dest} = CPyLong_AsInt64({src});") + if not isinstance(error, AssignHandler): + self.emit_unbox_failure_with_overlapping_error_value(dest, typ, failure) + elif is_int32_rprimitive(typ): + # Whether we are borrowing or not makes no difference. + assert not optional # Not supported for overlapping error values + if declare_dest: + self.emit_line(f"int32_t {dest};") + self.emit_line(f"{dest} = CPyLong_AsInt32({src});") + if not isinstance(error, AssignHandler): + self.emit_unbox_failure_with_overlapping_error_value(dest, typ, failure) + elif is_int16_rprimitive(typ): + # Whether we are borrowing or not makes no difference. + assert not optional # Not supported for overlapping error values + if declare_dest: + self.emit_line(f"int16_t {dest};") + self.emit_line(f"{dest} = CPyLong_AsInt16({src});") + if not isinstance(error, AssignHandler): + self.emit_unbox_failure_with_overlapping_error_value(dest, typ, failure) + elif is_uint8_rprimitive(typ): + # Whether we are borrowing or not makes no difference. + assert not optional # Not supported for overlapping error values + if declare_dest: + self.emit_line(f"uint8_t {dest};") + self.emit_line(f"{dest} = CPyLong_AsUInt8({src});") + if not isinstance(error, AssignHandler): + self.emit_unbox_failure_with_overlapping_error_value(dest, typ, failure) + elif is_float_rprimitive(typ): + assert not optional # Not supported for overlapping error values + if declare_dest: + self.emit_line(f"double {dest};") + # TODO: Don't use __float__ and __index__ + self.emit_line(f"{dest} = PyFloat_AsDouble({src});") + self.emit_lines(f"if ({dest} == -1.0 && PyErr_Occurred()) {{", failure, "}") + elif isinstance(typ, RTuple): + self.declare_tuple_struct(typ) + if declare_dest: + self.emit_line(f"{self.ctype(typ)} {dest};") + # HACK: The error handling for unboxing tuples is busted + # and instead of fixing it I am just wrapping it in the + # cast code which I think is right. This is not good. + if optional: + self.emit_line(f"if ({src} == NULL) {{") + self.emit_line(f"{dest} = {self.c_error_value(typ)};") + self.emit_line("} else {") + + cast_temp = self.temp_name() + self.emit_tuple_cast( + src, cast_temp, typ, declare_dest=True, error=error, src_type=None + ) + self.emit_line(f"if (unlikely({cast_temp} == NULL)) {{") + + # self.emit_arg_check(src, dest, typ, + # '(!PyTuple_Check({}) || PyTuple_Size({}) != {}) {{'.format( + # src, src, len(typ.types)), optional) + self.emit_line(failure) # TODO: Decrease refcount? + self.emit_line("} else {") + if not typ.types: + self.emit_line(f"{dest}.empty_struct_error_flag = 0;") + for i, item_type in enumerate(typ.types): + temp = self.temp_name() + # emit_tuple_cast above checks the size, so this should not fail + self.emit_line(f"PyObject *{temp} = PyTuple_GET_ITEM({src}, {i});") + temp2 = self.temp_name() + # Unbox or check the item. + if item_type.is_unboxed: + self.emit_unbox( + temp, + temp2, + item_type, + raise_exception=raise_exception, + error=error, + declare_dest=True, + borrow=borrow, + ) + else: + if not borrow: + self.emit_inc_ref(temp, object_rprimitive) + self.emit_cast(temp, temp2, item_type, declare_dest=True) + self.emit_line(f"{dest}.f{i} = {temp2};") + self.emit_line("}") + if optional: + self.emit_line("}") + + else: + assert False, "Unboxing not implemented: %s" % typ + + def emit_box( + self, src: str, dest: str, typ: RType, declare_dest: bool = False, can_borrow: bool = False + ) -> None: + """Emit code for boxing a value of given type. + + Generate a simple assignment if no boxing is needed. + + The source reference count is stolen for the result (no need to decref afterwards). + """ + # TODO: Always generate a new reference (if a reference type) + if declare_dest: + declaration = "PyObject *" + else: + declaration = "" + if is_int_rprimitive(typ) or is_short_int_rprimitive(typ): + # Steal the existing reference if it exists. + self.emit_line(f"{declaration}{dest} = CPyTagged_StealAsObject({src});") + elif is_bool_or_bit_rprimitive(typ): + # N.B: bool is special cased to produce a borrowed value + # after boxing, so we don't need to increment the refcount + # when this comes directly from a Box op. + self.emit_lines(f"{declaration}{dest} = {src} ? Py_True : Py_False;") + if not can_borrow: + self.emit_inc_ref(dest, object_rprimitive) + elif is_none_rprimitive(typ): + # N.B: None is special cased to produce a borrowed value + # after boxing, so we don't need to increment the refcount + # when this comes directly from a Box op. + self.emit_lines(f"{declaration}{dest} = Py_None;") + if not can_borrow: + self.emit_inc_ref(dest, object_rprimitive) + elif is_int32_rprimitive(typ) or is_int16_rprimitive(typ) or is_uint8_rprimitive(typ): + self.emit_line(f"{declaration}{dest} = PyLong_FromLong({src});") + elif is_int64_rprimitive(typ): + self.emit_line(f"{declaration}{dest} = PyLong_FromLongLong({src});") + elif is_float_rprimitive(typ): + self.emit_line(f"{declaration}{dest} = PyFloat_FromDouble({src});") + elif isinstance(typ, RTuple): + self.declare_tuple_struct(typ) + if not typ.types: + self.emit_line(f"{declaration}{dest} = CPyTuple_LoadEmptyTupleConstant();") + else: + self.emit_line(f"{declaration}{dest} = PyTuple_New({len(typ.types)});") + self.emit_line(f"if (unlikely({dest} == NULL))") + self.emit_line(" CPyError_OutOfMemory();") + + # TODO: Fail if dest is None + for i in range(len(typ.types)): + if not typ.is_unboxed: + self.emit_line(f"PyTuple_SET_ITEM({dest}, {i}, {src}.f{i}") + else: + inner_name = self.temp_name() + self.emit_box(f"{src}.f{i}", inner_name, typ.types[i], declare_dest=True) + self.emit_line(f"PyTuple_SET_ITEM({dest}, {i}, {inner_name});") + else: + assert not typ.is_unboxed + # Type is boxed -- trivially just assign. + self.emit_line(f"{declaration}{dest} = {src};") + + def emit_error_check(self, value: str, rtype: RType, failure: str) -> None: + """Emit code for checking a native function return value for uncaught exception.""" + if isinstance(rtype, RTuple): + if len(rtype.types) == 0: + return # empty tuples can't fail. + else: + cond = self.tuple_undefined_check_cond(rtype, value, self.c_error_value, "==") + self.emit_line(f"if ({cond}) {{") + elif rtype.error_overlap: + # The error value is also valid as a normal value, so we need to also check + # for a raised exception. + self.emit_line(f"if ({value} == {self.c_error_value(rtype)} && PyErr_Occurred()) {{") + else: + self.emit_line(f"if ({value} == {self.c_error_value(rtype)}) {{") + self.emit_lines(failure, "}") + + def emit_gc_visit(self, target: str, rtype: RType) -> None: + """Emit code for GC visiting a C variable reference. + + Assume that 'target' represents a C expression that refers to a + struct member, such as 'self->x'. + """ + if not rtype.is_refcounted: + # Not refcounted -> no pointers -> no GC interaction. + return + elif isinstance(rtype, RPrimitive) and rtype.name == "builtins.int": + self.emit_line(f"if (CPyTagged_CheckLong({target})) {{") + self.emit_line(f"Py_VISIT(CPyTagged_LongAsObject({target}));") + self.emit_line("}") + elif isinstance(rtype, RTuple): + for i, item_type in enumerate(rtype.types): + self.emit_gc_visit(f"{target}.f{i}", item_type) + elif self.ctype(rtype) == "PyObject *": + # The simplest case. + self.emit_line(f"Py_VISIT({target});") + else: + assert False, "emit_gc_visit() not implemented for %s" % repr(rtype) + + def emit_gc_clear(self, target: str, rtype: RType) -> None: + """Emit code for clearing a C attribute reference for GC. + + Assume that 'target' represents a C expression that refers to a + struct member, such as 'self->x'. + """ + if not rtype.is_refcounted: + # Not refcounted -> no pointers -> no GC interaction. + return + elif isinstance(rtype, RPrimitive) and rtype.name == "builtins.int": + self.emit_line(f"if (CPyTagged_CheckLong({target})) {{") + self.emit_line(f"CPyTagged __tmp = {target};") + self.emit_line(f"{target} = {self.c_undefined_value(rtype)};") + self.emit_line("Py_XDECREF(CPyTagged_LongAsObject(__tmp));") + self.emit_line("}") + elif isinstance(rtype, RTuple): + for i, item_type in enumerate(rtype.types): + self.emit_gc_clear(f"{target}.f{i}", item_type) + elif self.ctype(rtype) == "PyObject *" and self.c_undefined_value(rtype) == "NULL": + # The simplest case. + self.emit_line(f"Py_CLEAR({target});") + else: + assert False, "emit_gc_clear() not implemented for %s" % repr(rtype) + + def emit_reuse_clear(self, target: str, rtype: RType) -> None: + """Emit attribute clear before object is added into freelist. + + Assume that 'target' represents a C expression that refers to a + struct member, such as 'self->x'. + + Unlike emit_gc_clear(), initialize attribute value to match a freshly + allocated object. + """ + if isinstance(rtype, RTuple): + for i, item_type in enumerate(rtype.types): + self.emit_reuse_clear(f"{target}.f{i}", item_type) + elif not rtype.is_refcounted: + self.emit_line(f"{target} = {rtype.c_undefined};") + elif isinstance(rtype, RPrimitive) and rtype.name == "builtins.int": + self.emit_line(f"if (CPyTagged_CheckLong({target})) {{") + self.emit_line(f"CPyTagged __tmp = {target};") + self.emit_line(f"{target} = {self.c_undefined_value(rtype)};") + self.emit_line("Py_XDECREF(CPyTagged_LongAsObject(__tmp));") + self.emit_line("} else {") + self.emit_line(f"{target} = {self.c_undefined_value(rtype)};") + self.emit_line("}") + else: + self.emit_gc_clear(target, rtype) + + def emit_traceback( + self, source_path: str, module_name: str, traceback_entry: tuple[str, int] + ) -> None: + return self._emit_traceback("CPy_AddTraceback", source_path, module_name, traceback_entry) + + def emit_type_error_traceback( + self, + source_path: str, + module_name: str, + traceback_entry: tuple[str, int], + *, + typ: RType, + src: str, + ) -> None: + func = "CPy_TypeErrorTraceback" + type_str = f'"{self.pretty_name(typ)}"' + return self._emit_traceback( + func, source_path, module_name, traceback_entry, type_str=type_str, src=src + ) + + def _emit_traceback( + self, + func: str, + source_path: str, + module_name: str, + traceback_entry: tuple[str, int], + type_str: str = "", + src: str = "", + ) -> None: + globals_static = self.static_name("globals", module_name) + line = '%s("%s", "%s", %d, %s' % ( + func, + source_path.replace("\\", "\\\\"), + traceback_entry[0], + traceback_entry[1], + globals_static, + ) + if type_str: + assert src + line += f", {type_str}, {src}" + line += ");" + self.emit_line(line) + if DEBUG_ERRORS: + self.emit_line('assert(PyErr_Occurred() != NULL && "failure w/o err!");') + + def emit_unbox_failure_with_overlapping_error_value( + self, dest: str, typ: RType, failure: str + ) -> None: + self.emit_line(f"if ({dest} == {self.c_error_value(typ)} && PyErr_Occurred()) {{") + self.emit_line(failure) + self.emit_line("}") + + +def c_array_initializer(components: list[str], *, indented: bool = False) -> str: + """Construct an initializer for a C array variable. + + Components are C expressions valid in an initializer. + + For example, if components are ["1", "2"], the result + would be "{1, 2}", which can be used like this: + + int a[] = {1, 2}; + + If the result is long, split it into multiple lines. + """ + indent = " " * 4 if indented else "" + res = [] + current: list[str] = [] + cur_len = 0 + for c in components: + if not current or cur_len + 2 + len(indent) + len(c) < 70: + current.append(c) + cur_len += len(c) + 2 + else: + res.append(indent + ", ".join(current)) + current = [c] + cur_len = len(c) + if not res: + # Result fits on a single line + return "{%s}" % ", ".join(current) + # Multi-line result + res.append(indent + ", ".join(current)) + return "{\n " + ",\n ".join(res) + "\n" + indent + "}" diff --git a/.venv/lib/python3.12/site-packages/mypyc/codegen/emitclass.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/codegen/emitclass.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..44b6394 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/codegen/emitclass.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/codegen/emitclass.py b/.venv/lib/python3.12/site-packages/mypyc/codegen/emitclass.py new file mode 100644 index 0000000..e190d45 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/codegen/emitclass.py @@ -0,0 +1,1256 @@ +"""Code generation for native classes and related wrappers.""" + +from __future__ import annotations + +from collections.abc import Mapping +from typing import Callable + +from mypy.nodes import ARG_STAR, ARG_STAR2 +from mypyc.codegen.cstring import c_string_initializer +from mypyc.codegen.emit import Emitter, HeaderDeclaration, ReturnHandler +from mypyc.codegen.emitfunc import native_function_doc_initializer, native_function_header +from mypyc.codegen.emitwrapper import ( + generate_bin_op_wrapper, + generate_bool_wrapper, + generate_contains_wrapper, + generate_dunder_wrapper, + generate_get_wrapper, + generate_hash_wrapper, + generate_ipow_wrapper, + generate_len_wrapper, + generate_richcompare_wrapper, + generate_set_del_item_wrapper, +) +from mypyc.common import BITMAP_BITS, BITMAP_TYPE, NATIVE_PREFIX, PREFIX, REG_PREFIX +from mypyc.ir.class_ir import ClassIR, VTableEntries +from mypyc.ir.func_ir import ( + FUNC_CLASSMETHOD, + FUNC_STATICMETHOD, + FuncDecl, + FuncIR, + get_text_signature, +) +from mypyc.ir.rtypes import RTuple, RType, object_rprimitive +from mypyc.namegen import NameGenerator +from mypyc.sametype import is_same_type + + +def native_slot(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: + return f"{NATIVE_PREFIX}{fn.cname(emitter.names)}" + + +def dunder_attr_slot(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: + wrapper_fn = cl.get_method(fn.name + "__wrapper") + assert wrapper_fn + return f"{NATIVE_PREFIX}{wrapper_fn.cname(emitter.names)}" + + +# We maintain a table from dunder function names to struct slots they +# correspond to and functions that generate a wrapper (if necessary) +# and return the function name to stick in the slot. +# TODO: Add remaining dunder methods +SlotGenerator = Callable[[ClassIR, FuncIR, Emitter], str] +SlotTable = Mapping[str, tuple[str, SlotGenerator]] + +SLOT_DEFS: SlotTable = { + "__init__": ("tp_init", lambda c, t, e: generate_init_for_class(c, t, e)), + "__call__": ("tp_call", lambda c, t, e: generate_call_wrapper(c, t, e)), + "__str__": ("tp_str", native_slot), + "__repr__": ("tp_repr", native_slot), + "__next__": ("tp_iternext", native_slot), + "__iter__": ("tp_iter", native_slot), + "__hash__": ("tp_hash", generate_hash_wrapper), + "__get__": ("tp_descr_get", generate_get_wrapper), + "__getattr__": ("tp_getattro", dunder_attr_slot), + "__setattr__": ("tp_setattro", dunder_attr_slot), +} + +AS_MAPPING_SLOT_DEFS: SlotTable = { + "__getitem__": ("mp_subscript", generate_dunder_wrapper), + "__setitem__": ("mp_ass_subscript", generate_set_del_item_wrapper), + "__delitem__": ("mp_ass_subscript", generate_set_del_item_wrapper), + "__len__": ("mp_length", generate_len_wrapper), +} + +AS_SEQUENCE_SLOT_DEFS: SlotTable = {"__contains__": ("sq_contains", generate_contains_wrapper)} + +AS_NUMBER_SLOT_DEFS: SlotTable = { + # Unary operations. + "__bool__": ("nb_bool", generate_bool_wrapper), + "__int__": ("nb_int", generate_dunder_wrapper), + "__float__": ("nb_float", generate_dunder_wrapper), + "__neg__": ("nb_negative", generate_dunder_wrapper), + "__pos__": ("nb_positive", generate_dunder_wrapper), + "__abs__": ("nb_absolute", generate_dunder_wrapper), + "__invert__": ("nb_invert", generate_dunder_wrapper), + # Binary operations. + "__add__": ("nb_add", generate_bin_op_wrapper), + "__radd__": ("nb_add", generate_bin_op_wrapper), + "__sub__": ("nb_subtract", generate_bin_op_wrapper), + "__rsub__": ("nb_subtract", generate_bin_op_wrapper), + "__mul__": ("nb_multiply", generate_bin_op_wrapper), + "__rmul__": ("nb_multiply", generate_bin_op_wrapper), + "__mod__": ("nb_remainder", generate_bin_op_wrapper), + "__rmod__": ("nb_remainder", generate_bin_op_wrapper), + "__truediv__": ("nb_true_divide", generate_bin_op_wrapper), + "__rtruediv__": ("nb_true_divide", generate_bin_op_wrapper), + "__floordiv__": ("nb_floor_divide", generate_bin_op_wrapper), + "__rfloordiv__": ("nb_floor_divide", generate_bin_op_wrapper), + "__divmod__": ("nb_divmod", generate_bin_op_wrapper), + "__rdivmod__": ("nb_divmod", generate_bin_op_wrapper), + "__lshift__": ("nb_lshift", generate_bin_op_wrapper), + "__rlshift__": ("nb_lshift", generate_bin_op_wrapper), + "__rshift__": ("nb_rshift", generate_bin_op_wrapper), + "__rrshift__": ("nb_rshift", generate_bin_op_wrapper), + "__and__": ("nb_and", generate_bin_op_wrapper), + "__rand__": ("nb_and", generate_bin_op_wrapper), + "__or__": ("nb_or", generate_bin_op_wrapper), + "__ror__": ("nb_or", generate_bin_op_wrapper), + "__xor__": ("nb_xor", generate_bin_op_wrapper), + "__rxor__": ("nb_xor", generate_bin_op_wrapper), + "__matmul__": ("nb_matrix_multiply", generate_bin_op_wrapper), + "__rmatmul__": ("nb_matrix_multiply", generate_bin_op_wrapper), + # In-place binary operations. + "__iadd__": ("nb_inplace_add", generate_dunder_wrapper), + "__isub__": ("nb_inplace_subtract", generate_dunder_wrapper), + "__imul__": ("nb_inplace_multiply", generate_dunder_wrapper), + "__imod__": ("nb_inplace_remainder", generate_dunder_wrapper), + "__itruediv__": ("nb_inplace_true_divide", generate_dunder_wrapper), + "__ifloordiv__": ("nb_inplace_floor_divide", generate_dunder_wrapper), + "__ilshift__": ("nb_inplace_lshift", generate_dunder_wrapper), + "__irshift__": ("nb_inplace_rshift", generate_dunder_wrapper), + "__iand__": ("nb_inplace_and", generate_dunder_wrapper), + "__ior__": ("nb_inplace_or", generate_dunder_wrapper), + "__ixor__": ("nb_inplace_xor", generate_dunder_wrapper), + "__imatmul__": ("nb_inplace_matrix_multiply", generate_dunder_wrapper), + # Ternary operations. (yes, really) + # These are special cased in generate_bin_op_wrapper(). + "__pow__": ("nb_power", generate_bin_op_wrapper), + "__rpow__": ("nb_power", generate_bin_op_wrapper), + "__ipow__": ("nb_inplace_power", generate_ipow_wrapper), +} + +AS_ASYNC_SLOT_DEFS: SlotTable = { + "__await__": ("am_await", native_slot), + "__aiter__": ("am_aiter", native_slot), + "__anext__": ("am_anext", native_slot), +} + +SIDE_TABLES = [ + ("as_mapping", "PyMappingMethods", AS_MAPPING_SLOT_DEFS), + ("as_sequence", "PySequenceMethods", AS_SEQUENCE_SLOT_DEFS), + ("as_number", "PyNumberMethods", AS_NUMBER_SLOT_DEFS), + ("as_async", "PyAsyncMethods", AS_ASYNC_SLOT_DEFS), +] + +# Slots that need to always be filled in because they don't get +# inherited right. +ALWAYS_FILL = {"__hash__"} + + +def generate_call_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: + return "PyVectorcall_Call" + + +def slot_key(attr: str) -> str: + """Map dunder method name to sort key. + + Sort reverse operator methods and __delitem__ after others ('x' > '_'). + """ + if (attr.startswith("__r") and attr != "__rshift__") or attr == "__delitem__": + return "x" + attr + return attr + + +def generate_slots(cl: ClassIR, table: SlotTable, emitter: Emitter) -> dict[str, str]: + fields: dict[str, str] = {} + generated: dict[str, str] = {} + # Sort for determinism on Python 3.5 + for name, (slot, generator) in sorted(table.items(), key=lambda x: slot_key(x[0])): + method_cls = cl.get_method_and_class(name) + if method_cls and (method_cls[1] == cl or name in ALWAYS_FILL): + if slot in generated: + # Reuse previously generated wrapper. + fields[slot] = generated[slot] + else: + # Generate new wrapper. + name = generator(cl, method_cls[0], emitter) + fields[slot] = name + generated[slot] = name + + return fields + + +def generate_class_type_decl( + cl: ClassIR, c_emitter: Emitter, external_emitter: Emitter, emitter: Emitter +) -> None: + context = c_emitter.context + name = emitter.type_struct_name(cl) + context.declarations[name] = HeaderDeclaration( + f"PyTypeObject *{emitter.type_struct_name(cl)};", needs_export=True + ) + + # If this is a non-extension class, all we want is the type object decl. + if not cl.is_ext_class: + return + + generate_object_struct(cl, external_emitter) + generate_full = not cl.is_trait and not cl.builtin_base + if generate_full: + context.declarations[emitter.native_function_name(cl.ctor)] = HeaderDeclaration( + f"{native_function_header(cl.ctor, emitter)};", needs_export=True + ) + + +def generate_class_reuse( + cl: ClassIR, c_emitter: Emitter, external_emitter: Emitter, emitter: Emitter +) -> None: + """Generate a definition of a single-object per-class free "list". + + This speeds up object allocation and freeing when there are many short-lived + objects. + + TODO: Generalize to support a free list with up to N objects. + """ + assert cl.reuse_freed_instance + context = c_emitter.context + name = cl.name_prefix(c_emitter.names) + "_free_instance" + struct_name = cl.struct_name(c_emitter.names) + context.declarations[name] = HeaderDeclaration( + f"CPyThreadLocal {struct_name} *{name};", needs_export=True + ) + + +def generate_class(cl: ClassIR, module: str, emitter: Emitter) -> None: + """Generate C code for a class. + + This is the main entry point to the module. + """ + name = cl.name + name_prefix = cl.name_prefix(emitter.names) + + setup_name = emitter.native_function_name(cl.setup) + new_name = f"{name_prefix}_new" + finalize_name = f"{name_prefix}_finalize" + members_name = f"{name_prefix}_members" + getseters_name = f"{name_prefix}_getseters" + vtable_name = f"{name_prefix}_vtable" + traverse_name = f"{name_prefix}_traverse" + clear_name = f"{name_prefix}_clear" + dealloc_name = f"{name_prefix}_dealloc" + methods_name = f"{name_prefix}_methods" + vtable_setup_name = f"{name_prefix}_trait_vtable_setup" + + fields: dict[str, str] = {"tp_name": f'"{name}"'} + + generate_full = not cl.is_trait and not cl.builtin_base + needs_getseters = cl.needs_getseters or not cl.is_generated or cl.has_dict + + if not cl.builtin_base: + fields["tp_new"] = new_name + + if generate_full: + fields["tp_dealloc"] = f"(destructor){name_prefix}_dealloc" + fields["tp_traverse"] = f"(traverseproc){name_prefix}_traverse" + fields["tp_clear"] = f"(inquiry){name_prefix}_clear" + # Populate .tp_finalize and generate a finalize method only if __del__ is defined for this class. + del_method = next((e.method for e in cl.vtable_entries if e.name == "__del__"), None) + if del_method: + fields["tp_finalize"] = f"(destructor){finalize_name}" + if needs_getseters: + fields["tp_getset"] = getseters_name + fields["tp_methods"] = methods_name + + def emit_line() -> None: + emitter.emit_line() + + emit_line() + + # If the class has a method to initialize default attribute + # values, we need to call it during initialization. + defaults_fn = cl.get_method("__mypyc_defaults_setup") + + # If there is a __init__ method, we'll use it in the native constructor. + init_fn = cl.get_method("__init__") + + # Fill out slots in the type object from dunder methods. + fields.update(generate_slots(cl, SLOT_DEFS, emitter)) + + # Fill out dunder methods that live in tables hanging off the side. + for table_name, type, slot_defs in SIDE_TABLES: + slots = generate_slots(cl, slot_defs, emitter) + if slots: + table_struct_name = generate_side_table_for_class(cl, table_name, type, slots, emitter) + fields[f"tp_{table_name}"] = f"&{table_struct_name}" + + richcompare_name = generate_richcompare_wrapper(cl, emitter) + if richcompare_name: + fields["tp_richcompare"] = richcompare_name + + # If the class inherits from python, make space for a __dict__ + struct_name = cl.struct_name(emitter.names) + if cl.builtin_base: + base_size = f"sizeof({cl.builtin_base})" + elif cl.is_trait: + base_size = "sizeof(PyObject)" + else: + base_size = f"sizeof({struct_name})" + # Since our types aren't allocated using type() we need to + # populate these fields ourselves if we want them to have correct + # values. PyType_Ready will inherit the offsets from tp_base but + # that isn't what we want. + + # XXX: there is no reason for the __weakref__ stuff to be mixed up with __dict__ + if cl.has_dict and not has_managed_dict(cl, emitter): + # __dict__ lives right after the struct and __weakref__ lives right after that + # TODO: They should get members in the struct instead of doing this nonsense. + weak_offset = f"{base_size} + sizeof(PyObject *)" + emitter.emit_lines( + f"PyMemberDef {members_name}[] = {{", + f'{{"__dict__", T_OBJECT_EX, {base_size}, 0, NULL}},', + f'{{"__weakref__", T_OBJECT_EX, {weak_offset}, 0, NULL}},', + "{0}", + "};", + ) + + fields["tp_members"] = members_name + fields["tp_basicsize"] = f"{base_size} + 2*sizeof(PyObject *)" + if emitter.capi_version < (3, 12): + fields["tp_dictoffset"] = base_size + fields["tp_weaklistoffset"] = weak_offset + else: + fields["tp_basicsize"] = base_size + + if generate_full: + assert cl.setup is not None + emitter.emit_line(native_function_header(cl.setup, emitter) + ";") + assert cl.ctor is not None + emitter.emit_line(native_function_header(cl.ctor, emitter) + ";") + + emit_line() + init_fn = cl.get_method("__init__") + generate_new_for_class(cl, new_name, vtable_name, setup_name, init_fn, emitter) + emit_line() + generate_traverse_for_class(cl, traverse_name, emitter) + emit_line() + generate_clear_for_class(cl, clear_name, emitter) + emit_line() + generate_dealloc_for_class(cl, dealloc_name, clear_name, bool(del_method), emitter) + emit_line() + + if cl.allow_interpreted_subclasses: + shadow_vtable_name: str | None = generate_vtables( + cl, vtable_setup_name + "_shadow", vtable_name + "_shadow", emitter, shadow=True + ) + emit_line() + else: + shadow_vtable_name = None + vtable_name = generate_vtables(cl, vtable_setup_name, vtable_name, emitter, shadow=False) + emit_line() + if del_method: + generate_finalize_for_class(del_method, finalize_name, emitter) + emit_line() + if needs_getseters: + generate_getseter_declarations(cl, emitter) + emit_line() + generate_getseters_table(cl, getseters_name, emitter) + emit_line() + + if cl.is_trait: + generate_new_for_trait(cl, new_name, emitter) + + generate_methods_table(cl, methods_name, setup_name if generate_full else None, emitter) + emit_line() + + flags = ["Py_TPFLAGS_DEFAULT", "Py_TPFLAGS_HEAPTYPE", "Py_TPFLAGS_BASETYPE"] + if generate_full: + flags.append("Py_TPFLAGS_HAVE_GC") + if cl.has_method("__call__"): + fields["tp_vectorcall_offset"] = "offsetof({}, vectorcall)".format( + cl.struct_name(emitter.names) + ) + flags.append("_Py_TPFLAGS_HAVE_VECTORCALL") + if not fields.get("tp_vectorcall"): + # This is just a placeholder to please CPython. It will be + # overridden during setup. + fields["tp_call"] = "PyVectorcall_Call" + if has_managed_dict(cl, emitter): + flags.append("Py_TPFLAGS_MANAGED_DICT") + fields["tp_flags"] = " | ".join(flags) + + fields["tp_doc"] = f"PyDoc_STR({native_class_doc_initializer(cl)})" + + emitter.emit_line(f"static PyTypeObject {emitter.type_struct_name(cl)}_template_ = {{") + emitter.emit_line("PyVarObject_HEAD_INIT(NULL, 0)") + for field, value in fields.items(): + emitter.emit_line(f".{field} = {value},") + emitter.emit_line("};") + emitter.emit_line( + "static PyTypeObject *{t}_template = &{t}_template_;".format( + t=emitter.type_struct_name(cl) + ) + ) + + emitter.emit_line() + if generate_full: + generate_setup_for_class(cl, defaults_fn, vtable_name, shadow_vtable_name, emitter) + emitter.emit_line() + generate_constructor_for_class(cl, cl.ctor, init_fn, setup_name, vtable_name, emitter) + emitter.emit_line() + if needs_getseters: + generate_getseters(cl, emitter) + + +def getter_name(cl: ClassIR, attribute: str, names: NameGenerator) -> str: + return names.private_name(cl.module_name, f"{cl.name}_get_{attribute}") + + +def setter_name(cl: ClassIR, attribute: str, names: NameGenerator) -> str: + return names.private_name(cl.module_name, f"{cl.name}_set_{attribute}") + + +def generate_object_struct(cl: ClassIR, emitter: Emitter) -> None: + seen_attrs: set[str] = set() + lines: list[str] = [] + lines += ["typedef struct {", "PyObject_HEAD", "CPyVTableItem *vtable;"] + if cl.has_method("__call__"): + lines.append("vectorcallfunc vectorcall;") + bitmap_attrs = [] + for base in reversed(cl.base_mro): + if not base.is_trait: + if base.bitmap_attrs: + # Do we need another attribute bitmap field? + if emitter.bitmap_field(len(base.bitmap_attrs) - 1) not in bitmap_attrs: + for i in range(0, len(base.bitmap_attrs), BITMAP_BITS): + attr = emitter.bitmap_field(i) + if attr not in bitmap_attrs: + lines.append(f"{BITMAP_TYPE} {attr};") + bitmap_attrs.append(attr) + for attr, rtype in base.attributes.items(): + # Generated class may redefine certain attributes with different + # types in subclasses (this would be unsafe for user-defined classes). + if attr not in seen_attrs: + lines.append(f"{emitter.ctype_spaced(rtype)}{emitter.attr(attr)};") + seen_attrs.add(attr) + + if isinstance(rtype, RTuple): + emitter.declare_tuple_struct(rtype) + + lines.append(f"}} {cl.struct_name(emitter.names)};") + lines.append("") + emitter.context.declarations[cl.struct_name(emitter.names)] = HeaderDeclaration( + lines, is_type=True + ) + + +def generate_vtables( + base: ClassIR, vtable_setup_name: str, vtable_name: str, emitter: Emitter, shadow: bool +) -> str: + """Emit the vtables and vtable setup functions for a class. + + This includes both the primary vtable and any trait implementation vtables. + The trait vtables go before the main vtable, and have the following layout: + { + CPyType_T1, // pointer to type object + C_T1_trait_vtable, // pointer to array of method pointers + C_T1_offset_table, // pointer to array of attribute offsets + CPyType_T2, + C_T2_trait_vtable, + C_T2_offset_table, + ... + } + The method implementations are calculated at the end of IR pass, attribute + offsets are {offsetof(native__C, _x1), offsetof(native__C, _y1), ...}. + + To account for both dynamic loading and dynamic class creation, + vtables are populated dynamically at class creation time, so we + emit empty array definitions to store the vtables and a function to + populate them. + + If shadow is True, generate "shadow vtables" that point to the + shadow glue methods (which should dispatch via the Python C-API). + + Returns the expression to use to refer to the vtable, which might be + different than the name, if there are trait vtables. + """ + + def trait_vtable_name(trait: ClassIR) -> str: + return "{}_{}_trait_vtable{}".format( + base.name_prefix(emitter.names), + trait.name_prefix(emitter.names), + "_shadow" if shadow else "", + ) + + def trait_offset_table_name(trait: ClassIR) -> str: + return "{}_{}_offset_table".format( + base.name_prefix(emitter.names), trait.name_prefix(emitter.names) + ) + + # Emit array definitions with enough space for all the entries + emitter.emit_line( + "static CPyVTableItem {}[{}];".format( + vtable_name, max(1, len(base.vtable_entries) + 3 * len(base.trait_vtables)) + ) + ) + + for trait, vtable in base.trait_vtables.items(): + # Trait methods entry (vtable index -> method implementation). + emitter.emit_line( + f"static CPyVTableItem {trait_vtable_name(trait)}[{max(1, len(vtable))}];" + ) + # Trait attributes entry (attribute number in trait -> offset in actual struct). + emitter.emit_line( + "static size_t {}[{}];".format( + trait_offset_table_name(trait), max(1, len(trait.attributes)) + ) + ) + + # Emit vtable setup function + emitter.emit_line("static bool") + emitter.emit_line(f"{NATIVE_PREFIX}{vtable_setup_name}(void)") + emitter.emit_line("{") + + if base.allow_interpreted_subclasses and not shadow: + emitter.emit_line(f"{NATIVE_PREFIX}{vtable_setup_name}_shadow();") + + subtables = [] + for trait, vtable in base.trait_vtables.items(): + name = trait_vtable_name(trait) + offset_name = trait_offset_table_name(trait) + generate_vtable(vtable, name, emitter, [], shadow) + generate_offset_table(offset_name, emitter, trait, base) + subtables.append((trait, name, offset_name)) + + generate_vtable(base.vtable_entries, vtable_name, emitter, subtables, shadow) + + emitter.emit_line("return 1;") + emitter.emit_line("}") + + return vtable_name if not subtables else f"{vtable_name} + {len(subtables) * 3}" + + +def generate_offset_table( + trait_offset_table_name: str, emitter: Emitter, trait: ClassIR, cl: ClassIR +) -> None: + """Generate attribute offset row of a trait vtable.""" + emitter.emit_line(f"size_t {trait_offset_table_name}_scratch[] = {{") + for attr in trait.attributes: + emitter.emit_line(f"offsetof({cl.struct_name(emitter.names)}, {emitter.attr(attr)}),") + if not trait.attributes: + # This is for msvc. + emitter.emit_line("0") + emitter.emit_line("};") + emitter.emit_line( + "memcpy({name}, {name}_scratch, sizeof({name}));".format(name=trait_offset_table_name) + ) + + +def generate_vtable( + entries: VTableEntries, + vtable_name: str, + emitter: Emitter, + subtables: list[tuple[ClassIR, str, str]], + shadow: bool, +) -> None: + emitter.emit_line(f"CPyVTableItem {vtable_name}_scratch[] = {{") + if subtables: + emitter.emit_line("/* Array of trait vtables */") + for trait, table, offset_table in subtables: + emitter.emit_line( + "(CPyVTableItem){}, (CPyVTableItem){}, (CPyVTableItem){},".format( + emitter.type_struct_name(trait), table, offset_table + ) + ) + emitter.emit_line("/* Start of real vtable */") + + for entry in entries: + method = entry.shadow_method if shadow and entry.shadow_method else entry.method + emitter.emit_line( + "(CPyVTableItem){}{}{},".format( + emitter.get_group_prefix(entry.method.decl), + NATIVE_PREFIX, + method.cname(emitter.names), + ) + ) + + # msvc doesn't allow empty arrays; maybe allowing them at all is an extension? + if not entries: + emitter.emit_line("NULL") + emitter.emit_line("};") + emitter.emit_line("memcpy({name}, {name}_scratch, sizeof({name}));".format(name=vtable_name)) + + +def generate_setup_for_class( + cl: ClassIR, + defaults_fn: FuncIR | None, + vtable_name: str, + shadow_vtable_name: str | None, + emitter: Emitter, +) -> None: + """Generate a native function that allocates an instance of a class.""" + emitter.emit_line(native_function_header(cl.setup, emitter)) + emitter.emit_line("{") + type_arg_name = REG_PREFIX + cl.setup.sig.args[0].name + emitter.emit_line(f"PyTypeObject *type = (PyTypeObject*){type_arg_name};") + struct_name = cl.struct_name(emitter.names) + emitter.emit_line(f"{struct_name} *self;") + + prefix = cl.name_prefix(emitter.names) + if cl.reuse_freed_instance: + # Attempt to use a per-type free list first (a free "list" with up to one object only). + emitter.emit_line(f"if ({prefix}_free_instance != NULL) {{") + emitter.emit_line(f"self = {prefix}_free_instance;") + emitter.emit_line(f"{prefix}_free_instance = NULL;") + emitter.emit_line("Py_SET_REFCNT(self, 1);") + emitter.emit_line("PyObject_GC_Track(self);") + if defaults_fn is not None: + emit_attr_defaults_func_call(defaults_fn, "self", emitter) + emitter.emit_line("return (PyObject *)self;") + emitter.emit_line("}") + + emitter.emit_line(f"self = ({cl.struct_name(emitter.names)} *)type->tp_alloc(type, 0);") + emitter.emit_line("if (self == NULL)") + emitter.emit_line(" return NULL;") + + if shadow_vtable_name: + emitter.emit_line(f"if (type != {emitter.type_struct_name(cl)}) {{") + emitter.emit_line(f"self->vtable = {shadow_vtable_name};") + emitter.emit_line("} else {") + emitter.emit_line(f"self->vtable = {vtable_name};") + emitter.emit_line("}") + else: + emitter.emit_line(f"self->vtable = {vtable_name};") + + emit_clear_bitmaps(cl, emitter) + + if cl.has_method("__call__"): + name = cl.method_decl("__call__").cname(emitter.names) + emitter.emit_line(f"self->vectorcall = {PREFIX}{name};") + + for base in reversed(cl.base_mro): + for attr, rtype in base.attributes.items(): + value = emitter.c_undefined_value(rtype) + + # We don't need to set this field to NULL since tp_alloc() already + # zero-initializes `self`. + if value != "NULL": + emitter.emit_line(rf"self->{emitter.attr(attr)} = {value};") + + # Initialize attributes to default values, if necessary + if defaults_fn is not None: + emit_attr_defaults_func_call(defaults_fn, "self", emitter) + + emitter.emit_line("return (PyObject *)self;") + emitter.emit_line("}") + + +def emit_clear_bitmaps(cl: ClassIR, emitter: Emitter) -> None: + """Emit C code to clear bitmaps that track if attributes have an assigned value.""" + for i in range(0, len(cl.bitmap_attrs), BITMAP_BITS): + field = emitter.bitmap_field(i) + emitter.emit_line(f"self->{field} = 0;") + + +def emit_attr_defaults_func_call(defaults_fn: FuncIR, self_name: str, emitter: Emitter) -> None: + """Emit C code to initialize attribute defaults by calling defaults_fn. + + The code returns NULL on a raised exception. + """ + emitter.emit_lines( + "if ({}{}((PyObject *){}) == 0) {{".format( + NATIVE_PREFIX, defaults_fn.cname(emitter.names), self_name + ), + "Py_DECREF(self);", + "return NULL;", + "}", + ) + + +def emit_setup_or_dunder_new_call( + cl: ClassIR, + setup_name: str, + type_arg: str, + native_prefix: bool, + new_args: str, + emitter: Emitter, +) -> None: + def emit_null_check() -> None: + emitter.emit_line("if (self == NULL)") + emitter.emit_line(" return NULL;") + + new_fn = cl.get_method("__new__") + if not new_fn: + emitter.emit_line(f"PyObject *self = {setup_name}({type_arg});") + emit_null_check() + return + prefix = emitter.get_group_prefix(new_fn.decl) + NATIVE_PREFIX if native_prefix else PREFIX + all_args = type_arg + if new_args != "": + all_args += ", " + new_args + emitter.emit_line(f"PyObject *self = {prefix}{new_fn.cname(emitter.names)}({all_args});") + emit_null_check() + + # skip __init__ if __new__ returns some other type + emitter.emit_line(f"if (Py_TYPE(self) != {emitter.type_struct_name(cl)})") + emitter.emit_line(" return self;") + + +def generate_constructor_for_class( + cl: ClassIR, + fn: FuncDecl, + init_fn: FuncIR | None, + setup_name: str, + vtable_name: str, + emitter: Emitter, +) -> None: + """Generate a native function that allocates and initializes an instance of a class.""" + emitter.emit_line(f"{native_function_header(fn, emitter)}") + emitter.emit_line("{") + + fn_args = [REG_PREFIX + arg.name for arg in fn.sig.args] + type_arg = "(PyObject *)" + emitter.type_struct_name(cl) + new_args = ", ".join(fn_args) + + use_wrapper = ( + cl.has_method("__new__") + and len(fn.sig.args) == 2 + and fn.sig.args[0].kind == ARG_STAR + and fn.sig.args[1].kind == ARG_STAR2 + ) + emit_setup_or_dunder_new_call(cl, setup_name, type_arg, not use_wrapper, new_args, emitter) + + args = ", ".join(["self"] + fn_args) + if init_fn is not None: + prefix = PREFIX if use_wrapper else NATIVE_PREFIX + cast = "!= NULL ? 0 : -1" if use_wrapper else "" + emitter.emit_line( + "char res = {}{}{}({}){};".format( + emitter.get_group_prefix(init_fn.decl), + prefix, + init_fn.cname(emitter.names), + args, + cast, + ) + ) + emitter.emit_line("if (res == 2) {") + emitter.emit_line("Py_DECREF(self);") + emitter.emit_line("return NULL;") + emitter.emit_line("}") + + # If there is a nontrivial ctor that we didn't define, invoke it via tp_init + elif len(fn.sig.args) > 1: + emitter.emit_line(f"int res = {emitter.type_struct_name(cl)}->tp_init({args});") + + emitter.emit_line("if (res < 0) {") + emitter.emit_line("Py_DECREF(self);") + emitter.emit_line("return NULL;") + emitter.emit_line("}") + + emitter.emit_line("return self;") + emitter.emit_line("}") + + +def generate_init_for_class(cl: ClassIR, init_fn: FuncIR, emitter: Emitter) -> str: + """Generate an init function suitable for use as tp_init. + + tp_init needs to be a function that returns an int, and our + __init__ methods return a PyObject. Translate NULL to -1, + everything else to 0. + """ + func_name = f"{cl.name_prefix(emitter.names)}_init" + + emitter.emit_line("static int") + emitter.emit_line(f"{func_name}(PyObject *self, PyObject *args, PyObject *kwds)") + emitter.emit_line("{") + if cl.allow_interpreted_subclasses or cl.builtin_base or cl.has_method("__new__"): + emitter.emit_line( + "return {}{}(self, args, kwds) != NULL ? 0 : -1;".format( + PREFIX, init_fn.cname(emitter.names) + ) + ) + else: + emitter.emit_line("return 0;") + emitter.emit_line("}") + + return func_name + + +def generate_new_for_class( + cl: ClassIR, + func_name: str, + vtable_name: str, + setup_name: str, + init_fn: FuncIR | None, + emitter: Emitter, +) -> None: + emitter.emit_line("static PyObject *") + emitter.emit_line(f"{func_name}(PyTypeObject *type, PyObject *args, PyObject *kwds)") + emitter.emit_line("{") + # TODO: Check and unbox arguments + if not cl.allow_interpreted_subclasses: + emitter.emit_line(f"if (type != {emitter.type_struct_name(cl)}) {{") + emitter.emit_line( + 'PyErr_SetString(PyExc_TypeError, "interpreted classes cannot inherit from compiled");' + ) + emitter.emit_line("return NULL;") + emitter.emit_line("}") + + type_arg = "(PyObject*)type" + new_args = "args, kwds" + emit_setup_or_dunder_new_call(cl, setup_name, type_arg, False, new_args, emitter) + if ( + not init_fn + or cl.allow_interpreted_subclasses + or cl.builtin_base + or cl.is_serializable() + or cl.has_method("__new__") + ): + # Match Python semantics -- __new__ doesn't call __init__. + emitter.emit_line("return self;") + else: + # __new__ of a native class implicitly calls __init__ so that we + # can enforce that instances are always properly initialized. This + # is needed to support always defined attributes. + emitter.emit_line( + f"PyObject *ret = {PREFIX}{init_fn.cname(emitter.names)}(self, args, kwds);" + ) + emitter.emit_lines("if (ret == NULL)", " return NULL;") + emitter.emit_line("return self;") + emitter.emit_line("}") + + +def generate_new_for_trait(cl: ClassIR, func_name: str, emitter: Emitter) -> None: + emitter.emit_line("static PyObject *") + emitter.emit_line(f"{func_name}(PyTypeObject *type, PyObject *args, PyObject *kwds)") + emitter.emit_line("{") + emitter.emit_line(f"if (type != {emitter.type_struct_name(cl)}) {{") + emitter.emit_line( + "PyErr_SetString(PyExc_TypeError, " + '"interpreted classes cannot inherit from compiled traits");' + ) + emitter.emit_line("} else {") + emitter.emit_line('PyErr_SetString(PyExc_TypeError, "traits may not be directly created");') + emitter.emit_line("}") + emitter.emit_line("return NULL;") + emitter.emit_line("}") + + +def generate_traverse_for_class(cl: ClassIR, func_name: str, emitter: Emitter) -> None: + """Emit function that performs cycle GC traversal of an instance.""" + emitter.emit_line("static int") + emitter.emit_line( + f"{func_name}({cl.struct_name(emitter.names)} *self, visitproc visit, void *arg)" + ) + emitter.emit_line("{") + for base in reversed(cl.base_mro): + for attr, rtype in base.attributes.items(): + emitter.emit_gc_visit(f"self->{emitter.attr(attr)}", rtype) + if has_managed_dict(cl, emitter): + emitter.emit_line("PyObject_VisitManagedDict((PyObject *)self, visit, arg);") + elif cl.has_dict: + struct_name = cl.struct_name(emitter.names) + # __dict__ lives right after the struct and __weakref__ lives right after that + emitter.emit_gc_visit( + f"*((PyObject **)((char *)self + sizeof({struct_name})))", object_rprimitive + ) + emitter.emit_gc_visit( + f"*((PyObject **)((char *)self + sizeof(PyObject *) + sizeof({struct_name})))", + object_rprimitive, + ) + emitter.emit_line("return 0;") + emitter.emit_line("}") + + +def generate_clear_for_class(cl: ClassIR, func_name: str, emitter: Emitter) -> None: + emitter.emit_line("static int") + emitter.emit_line(f"{func_name}({cl.struct_name(emitter.names)} *self)") + emitter.emit_line("{") + for base in reversed(cl.base_mro): + for attr, rtype in base.attributes.items(): + emitter.emit_gc_clear(f"self->{emitter.attr(attr)}", rtype) + if has_managed_dict(cl, emitter): + emitter.emit_line("PyObject_ClearManagedDict((PyObject *)self);") + elif cl.has_dict: + struct_name = cl.struct_name(emitter.names) + # __dict__ lives right after the struct and __weakref__ lives right after that + emitter.emit_gc_clear( + f"*((PyObject **)((char *)self + sizeof({struct_name})))", object_rprimitive + ) + emitter.emit_gc_clear( + f"*((PyObject **)((char *)self + sizeof(PyObject *) + sizeof({struct_name})))", + object_rprimitive, + ) + emitter.emit_line("return 0;") + emitter.emit_line("}") + + +def generate_dealloc_for_class( + cl: ClassIR, + dealloc_func_name: str, + clear_func_name: str, + has_tp_finalize: bool, + emitter: Emitter, +) -> None: + emitter.emit_line("static void") + emitter.emit_line(f"{dealloc_func_name}({cl.struct_name(emitter.names)} *self)") + emitter.emit_line("{") + if has_tp_finalize: + emitter.emit_line("PyObject *type, *value, *traceback;") + emitter.emit_line("PyErr_Fetch(&type, &value, &traceback);") + emitter.emit_line("int res = PyObject_CallFinalizerFromDealloc((PyObject *)self);") + # CPython interpreter uses PyErr_WriteUnraisable: https://docs.python.org/3/c-api/exceptions.html#c.PyErr_WriteUnraisable + # However, the message is slightly different due to the way mypyc compiles classes. + # CPython interpreter prints: Exception ignored in: + # mypyc prints: Exception ignored in: + emitter.emit_line("if (PyErr_Occurred() != NULL) {") + # Don't untrack instance if error occurred + emitter.emit_line("PyErr_WriteUnraisable((PyObject *)self);") + emitter.emit_line("res = -1;") + emitter.emit_line("}") + emitter.emit_line("PyErr_Restore(type, value, traceback);") + emitter.emit_line("if (res < 0) {") + emitter.emit_line("goto done;") + emitter.emit_line("}") + emitter.emit_line("PyObject_GC_UnTrack(self);") + if cl.reuse_freed_instance: + emit_reuse_dealloc(cl, emitter) + # The trashcan is needed to handle deep recursive deallocations + emitter.emit_line(f"CPy_TRASHCAN_BEGIN(self, {dealloc_func_name})") + emitter.emit_line(f"{clear_func_name}(self);") + emitter.emit_line("Py_TYPE(self)->tp_free((PyObject *)self);") + emitter.emit_line("CPy_TRASHCAN_END(self)") + emitter.emit_line("done: ;") + emitter.emit_line("}") + + +def emit_reuse_dealloc(cl: ClassIR, emitter: Emitter) -> None: + """Emit code to deallocate object by putting it to per-type free list. + + The free "list" currently can have up to one object. + """ + prefix = cl.name_prefix(emitter.names) + emitter.emit_line(f"if ({prefix}_free_instance == NULL) {{") + emitter.emit_line(f"{prefix}_free_instance = self;") + + # Clear attributes and free referenced objects. + + emit_clear_bitmaps(cl, emitter) + + for base in reversed(cl.base_mro): + for attr, rtype in base.attributes.items(): + emitter.emit_reuse_clear(f"self->{emitter.attr(attr)}", rtype) + + emitter.emit_line("return;") + emitter.emit_line("}") + + +def generate_finalize_for_class( + del_method: FuncIR, finalize_func_name: str, emitter: Emitter +) -> None: + emitter.emit_line("static void") + emitter.emit_line(f"{finalize_func_name}(PyObject *self)") + emitter.emit_line("{") + emitter.emit_line( + "{}{}{}(self);".format( + emitter.get_group_prefix(del_method.decl), + NATIVE_PREFIX, + del_method.cname(emitter.names), + ) + ) + emitter.emit_line("}") + + +def generate_methods_table( + cl: ClassIR, name: str, setup_name: str | None, emitter: Emitter +) -> None: + emitter.emit_line(f"static PyMethodDef {name}[] = {{") + if setup_name: + # Store pointer to the setup function so it can be resolved dynamically + # in case of instance creation in __new__. + # CPy_SetupObject expects this method to be the first one in tp_methods. + emitter.emit_line( + f'{{"__internal_mypyc_setup", (PyCFunction){setup_name}, METH_O, NULL}},' + ) + for fn in cl.methods.values(): + if fn.decl.is_prop_setter or fn.decl.is_prop_getter or fn.internal: + continue + emitter.emit_line(f'{{"{fn.name}",') + emitter.emit_line(f" (PyCFunction){PREFIX}{fn.cname(emitter.names)},") + flags = ["METH_FASTCALL", "METH_KEYWORDS"] + if fn.decl.kind == FUNC_STATICMETHOD: + flags.append("METH_STATIC") + elif fn.decl.kind == FUNC_CLASSMETHOD: + flags.append("METH_CLASS") + + doc = native_function_doc_initializer(fn) + emitter.emit_line(" {}, PyDoc_STR({})}},".format(" | ".join(flags), doc)) + + # Provide a default __getstate__ and __setstate__ + if not cl.has_method("__setstate__") and not cl.has_method("__getstate__"): + emitter.emit_lines( + '{"__setstate__", (PyCFunction)CPyPickle_SetState, METH_O, NULL},', + '{"__getstate__", (PyCFunction)CPyPickle_GetState, METH_NOARGS, NULL},', + ) + + emitter.emit_line("{NULL} /* Sentinel */") + emitter.emit_line("};") + + +def generate_side_table_for_class( + cl: ClassIR, name: str, type: str, slots: dict[str, str], emitter: Emitter +) -> str | None: + name = f"{cl.name_prefix(emitter.names)}_{name}" + emitter.emit_line(f"static {type} {name} = {{") + for field, value in slots.items(): + emitter.emit_line(f".{field} = {value},") + emitter.emit_line("};") + return name + + +def generate_getseter_declarations(cl: ClassIR, emitter: Emitter) -> None: + if not cl.is_trait: + for attr in cl.attributes: + emitter.emit_line("static PyObject *") + emitter.emit_line( + "{}({} *self, void *closure);".format( + getter_name(cl, attr, emitter.names), cl.struct_name(emitter.names) + ) + ) + emitter.emit_line("static int") + emitter.emit_line( + "{}({} *self, PyObject *value, void *closure);".format( + setter_name(cl, attr, emitter.names), cl.struct_name(emitter.names) + ) + ) + + for prop, (getter, setter) in cl.properties.items(): + if getter.decl.implicit: + continue + + # Generate getter declaration + emitter.emit_line("static PyObject *") + emitter.emit_line( + "{}({} *self, void *closure);".format( + getter_name(cl, prop, emitter.names), cl.struct_name(emitter.names) + ) + ) + + # Generate property setter declaration if a setter exists + if setter: + emitter.emit_line("static int") + emitter.emit_line( + "{}({} *self, PyObject *value, void *closure);".format( + setter_name(cl, prop, emitter.names), cl.struct_name(emitter.names) + ) + ) + + +def generate_getseters_table(cl: ClassIR, name: str, emitter: Emitter) -> None: + emitter.emit_line(f"static PyGetSetDef {name}[] = {{") + if not cl.is_trait: + for attr in cl.attributes: + emitter.emit_line(f'{{"{attr}",') + emitter.emit_line( + " (getter){}, (setter){},".format( + getter_name(cl, attr, emitter.names), setter_name(cl, attr, emitter.names) + ) + ) + emitter.emit_line(" NULL, NULL},") + for prop, (getter, setter) in cl.properties.items(): + if getter.decl.implicit: + continue + + emitter.emit_line(f'{{"{prop}",') + emitter.emit_line(f" (getter){getter_name(cl, prop, emitter.names)},") + + if setter: + emitter.emit_line(f" (setter){setter_name(cl, prop, emitter.names)},") + emitter.emit_line("NULL, NULL},") + else: + emitter.emit_line("NULL, NULL, NULL},") + + if cl.has_dict: + emitter.emit_line('{"__dict__", PyObject_GenericGetDict, PyObject_GenericSetDict},') + + emitter.emit_line("{NULL} /* Sentinel */") + emitter.emit_line("};") + + +def generate_getseters(cl: ClassIR, emitter: Emitter) -> None: + if not cl.is_trait: + for i, (attr, rtype) in enumerate(cl.attributes.items()): + generate_getter(cl, attr, rtype, emitter) + emitter.emit_line("") + generate_setter(cl, attr, rtype, emitter) + if i < len(cl.attributes) - 1: + emitter.emit_line("") + for prop, (getter, setter) in cl.properties.items(): + if getter.decl.implicit: + continue + + rtype = getter.sig.ret_type + emitter.emit_line("") + generate_readonly_getter(cl, prop, rtype, getter, emitter) + if setter: + arg_type = setter.sig.args[1].type + emitter.emit_line("") + generate_property_setter(cl, prop, arg_type, setter, emitter) + + +def generate_getter(cl: ClassIR, attr: str, rtype: RType, emitter: Emitter) -> None: + attr_field = emitter.attr(attr) + emitter.emit_line("static PyObject *") + emitter.emit_line( + "{}({} *self, void *closure)".format( + getter_name(cl, attr, emitter.names), cl.struct_name(emitter.names) + ) + ) + emitter.emit_line("{") + attr_expr = f"self->{attr_field}" + + # HACK: Don't consider refcounted values as always defined, since it's possible to + # access uninitialized values via 'gc.get_objects()'. Accessing non-refcounted + # values is benign. + always_defined = cl.is_always_defined(attr) and not rtype.is_refcounted + + if not always_defined: + emitter.emit_undefined_attr_check(rtype, attr_expr, "==", "self", attr, cl, unlikely=True) + emitter.emit_line("PyErr_SetString(PyExc_AttributeError,") + emitter.emit_line(f' "attribute {repr(attr)} of {repr(cl.name)} undefined");') + emitter.emit_line("return NULL;") + emitter.emit_line("}") + emitter.emit_inc_ref(f"self->{attr_field}", rtype) + emitter.emit_box(f"self->{attr_field}", "retval", rtype, declare_dest=True) + emitter.emit_line("return retval;") + emitter.emit_line("}") + + +def generate_setter(cl: ClassIR, attr: str, rtype: RType, emitter: Emitter) -> None: + attr_field = emitter.attr(attr) + emitter.emit_line("static int") + emitter.emit_line( + "{}({} *self, PyObject *value, void *closure)".format( + setter_name(cl, attr, emitter.names), cl.struct_name(emitter.names) + ) + ) + emitter.emit_line("{") + + deletable = cl.is_deletable(attr) + if not deletable: + emitter.emit_line("if (value == NULL) {") + emitter.emit_line("PyErr_SetString(PyExc_AttributeError,") + emitter.emit_line( + f' "{repr(cl.name)} object attribute {repr(attr)} cannot be deleted");' + ) + emitter.emit_line("return -1;") + emitter.emit_line("}") + + # HACK: Don't consider refcounted values as always defined, since it's possible to + # access uninitialized values via 'gc.get_objects()'. Accessing non-refcounted + # values is benign. + always_defined = cl.is_always_defined(attr) and not rtype.is_refcounted + + if rtype.is_refcounted: + attr_expr = f"self->{attr_field}" + if not always_defined: + emitter.emit_undefined_attr_check(rtype, attr_expr, "!=", "self", attr, cl) + emitter.emit_dec_ref(f"self->{attr_field}", rtype) + if not always_defined: + emitter.emit_line("}") + + if deletable: + emitter.emit_line("if (value != NULL) {") + + if rtype.is_unboxed: + emitter.emit_unbox("value", "tmp", rtype, error=ReturnHandler("-1"), declare_dest=True) + elif is_same_type(rtype, object_rprimitive): + emitter.emit_line("PyObject *tmp = value;") + else: + emitter.emit_cast("value", "tmp", rtype, declare_dest=True) + emitter.emit_lines("if (!tmp)", " return -1;") + emitter.emit_inc_ref("tmp", rtype) + emitter.emit_line(f"self->{attr_field} = tmp;") + if rtype.error_overlap and not always_defined: + emitter.emit_attr_bitmap_set("tmp", "self", rtype, cl, attr) + + if deletable: + emitter.emit_line("} else") + emitter.emit_line(f" self->{attr_field} = {emitter.c_undefined_value(rtype)};") + if rtype.error_overlap: + emitter.emit_attr_bitmap_clear("self", rtype, cl, attr) + emitter.emit_line("return 0;") + emitter.emit_line("}") + + +def generate_readonly_getter( + cl: ClassIR, attr: str, rtype: RType, func_ir: FuncIR, emitter: Emitter +) -> None: + emitter.emit_line("static PyObject *") + emitter.emit_line( + "{}({} *self, void *closure)".format( + getter_name(cl, attr, emitter.names), cl.struct_name(emitter.names) + ) + ) + emitter.emit_line("{") + if rtype.is_unboxed: + emitter.emit_line( + "{}retval = {}{}((PyObject *) self);".format( + emitter.ctype_spaced(rtype), NATIVE_PREFIX, func_ir.cname(emitter.names) + ) + ) + emitter.emit_error_check("retval", rtype, "return NULL;") + emitter.emit_box("retval", "retbox", rtype, declare_dest=True) + emitter.emit_line("return retbox;") + else: + emitter.emit_line( + f"return {NATIVE_PREFIX}{func_ir.cname(emitter.names)}((PyObject *) self);" + ) + emitter.emit_line("}") + + +def generate_property_setter( + cl: ClassIR, attr: str, arg_type: RType, func_ir: FuncIR, emitter: Emitter +) -> None: + emitter.emit_line("static int") + emitter.emit_line( + "{}({} *self, PyObject *value, void *closure)".format( + setter_name(cl, attr, emitter.names), cl.struct_name(emitter.names) + ) + ) + emitter.emit_line("{") + if arg_type.is_unboxed: + emitter.emit_unbox("value", "tmp", arg_type, error=ReturnHandler("-1"), declare_dest=True) + emitter.emit_line( + f"{NATIVE_PREFIX}{func_ir.cname(emitter.names)}((PyObject *) self, tmp);" + ) + else: + emitter.emit_line( + f"{NATIVE_PREFIX}{func_ir.cname(emitter.names)}((PyObject *) self, value);" + ) + emitter.emit_line("return 0;") + emitter.emit_line("}") + + +def has_managed_dict(cl: ClassIR, emitter: Emitter) -> bool: + """Should the class get the Py_TPFLAGS_MANAGED_DICT flag?""" + # On 3.11 and earlier the flag doesn't exist and we use + # tp_dictoffset instead. If a class inherits from Exception, the + # flag conflicts with tp_dictoffset set in the base class. + return ( + emitter.capi_version >= (3, 12) + and cl.has_dict + and cl.builtin_base != "PyBaseExceptionObject" + ) + + +def native_class_doc_initializer(cl: ClassIR) -> str: + init_fn = cl.get_method("__init__") + if init_fn is not None: + text_sig = get_text_signature(init_fn, bound=True) + if text_sig is None: + return "NULL" + text_sig = text_sig.replace("__init__", cl.name, 1) + else: + text_sig = f"{cl.name}()" + docstring = f"{text_sig}\n--\n\n" + return c_string_initializer(docstring.encode("ascii", errors="backslashreplace")) diff --git a/.venv/lib/python3.12/site-packages/mypyc/codegen/emitfunc.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/codegen/emitfunc.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..39c5e5c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/codegen/emitfunc.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/codegen/emitfunc.py b/.venv/lib/python3.12/site-packages/mypyc/codegen/emitfunc.py new file mode 100644 index 0000000..a1e1835 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/codegen/emitfunc.py @@ -0,0 +1,994 @@ +"""Code generation for native function bodies.""" + +from __future__ import annotations + +from typing import Final + +from mypyc.analysis.blockfreq import frequently_executed_blocks +from mypyc.codegen.cstring import c_string_initializer +from mypyc.codegen.emit import DEBUG_ERRORS, Emitter, TracebackAndGotoHandler, c_array_initializer +from mypyc.common import ( + GENERATOR_ATTRIBUTE_PREFIX, + HAVE_IMMORTAL, + MODULE_PREFIX, + NATIVE_PREFIX, + REG_PREFIX, + STATIC_PREFIX, + TYPE_PREFIX, + TYPE_VAR_PREFIX, +) +from mypyc.ir.class_ir import ClassIR +from mypyc.ir.func_ir import ( + FUNC_CLASSMETHOD, + FUNC_STATICMETHOD, + FuncDecl, + FuncIR, + all_values, + get_text_signature, +) +from mypyc.ir.ops import ( + ERR_FALSE, + NAMESPACE_MODULE, + NAMESPACE_STATIC, + NAMESPACE_TYPE, + NAMESPACE_TYPE_VAR, + Assign, + AssignMulti, + BasicBlock, + Box, + Branch, + Call, + CallC, + Cast, + ComparisonOp, + ControlOp, + CString, + DecRef, + Extend, + Float, + FloatComparisonOp, + FloatNeg, + FloatOp, + GetAttr, + GetElementPtr, + Goto, + IncRef, + InitStatic, + Integer, + IntOp, + KeepAlive, + LoadAddress, + LoadErrorValue, + LoadGlobal, + LoadLiteral, + LoadMem, + LoadStatic, + MethodCall, + Op, + OpVisitor, + PrimitiveOp, + RaiseStandardError, + Register, + Return, + SetAttr, + SetElement, + SetMem, + Truncate, + TupleGet, + TupleSet, + Unborrow, + Unbox, + Undef, + Unreachable, + Value, +) +from mypyc.ir.pprint import generate_names_for_ir +from mypyc.ir.rtypes import ( + RArray, + RInstance, + RStruct, + RTuple, + RType, + is_bool_or_bit_rprimitive, + is_int32_rprimitive, + is_int64_rprimitive, + is_int_rprimitive, + is_none_rprimitive, + is_pointer_rprimitive, + is_tagged, +) + + +def native_function_type(fn: FuncIR, emitter: Emitter) -> str: + args = ", ".join(emitter.ctype(arg.type) for arg in fn.args) or "void" + ret = emitter.ctype(fn.ret_type) + return f"{ret} (*)({args})" + + +def native_function_header(fn: FuncDecl, emitter: Emitter) -> str: + args = [] + for arg in fn.sig.args: + args.append(f"{emitter.ctype_spaced(arg.type)}{REG_PREFIX}{arg.name}") + + return "{ret_type}{name}({args})".format( + ret_type=emitter.ctype_spaced(fn.sig.ret_type), + name=emitter.native_function_name(fn), + args=", ".join(args) or "void", + ) + + +def native_function_doc_initializer(func: FuncIR) -> str: + text_sig = get_text_signature(func) + if text_sig is None: + return "NULL" + docstring = f"{text_sig}\n--\n\n" + return c_string_initializer(docstring.encode("ascii", errors="backslashreplace")) + + +def generate_native_function( + fn: FuncIR, emitter: Emitter, source_path: str, module_name: str +) -> None: + declarations = Emitter(emitter.context) + names = generate_names_for_ir(fn.arg_regs, fn.blocks) + body = Emitter(emitter.context, names) + visitor = FunctionEmitterVisitor(body, declarations, source_path, module_name) + + declarations.emit_line(f"{native_function_header(fn.decl, emitter)} {{") + body.indent() + + for r in all_values(fn.arg_regs, fn.blocks): + if isinstance(r.type, RTuple): + emitter.declare_tuple_struct(r.type) + if isinstance(r.type, RArray): + continue # Special: declared on first assignment + + if r in fn.arg_regs: + continue # Skip the arguments + + ctype = emitter.ctype_spaced(r.type) + init = "" + declarations.emit_line( + "{ctype}{prefix}{name}{init};".format( + ctype=ctype, prefix=REG_PREFIX, name=names[r], init=init + ) + ) + + # Before we emit the blocks, give them all labels + blocks = fn.blocks + for i, block in enumerate(blocks): + block.label = i + + # Find blocks that are never jumped to or are only jumped to from the + # block directly above it. This allows for more labels and gotos to be + # eliminated during code generation. + for block in fn.blocks: + terminator = block.terminator + assert isinstance(terminator, ControlOp), terminator + + for target in terminator.targets(): + is_next_block = target.label == block.label + 1 + + # Always emit labels for GetAttr error checks since the emit code that + # generates them will add instructions between the branch and the + # next label, causing the label to be wrongly removed. A better + # solution would be to change the IR so that it adds a basic block + # in between the calls. + is_problematic_op = isinstance(terminator, Branch) and any( + isinstance(s, GetAttr) for s in terminator.sources() + ) + + if not is_next_block or is_problematic_op: + fn.blocks[target.label].referenced = True + + common = frequently_executed_blocks(fn.blocks[0]) + + for i in range(len(blocks)): + block = blocks[i] + visitor.rare = block not in common + next_block = None + if i + 1 < len(blocks): + next_block = blocks[i + 1] + body.emit_label(block) + visitor.next_block = next_block + + ops = block.ops + visitor.ops = ops + visitor.op_index = 0 + while visitor.op_index < len(ops): + ops[visitor.op_index].accept(visitor) + visitor.op_index += 1 + + body.emit_line("}") + + emitter.emit_from_emitter(declarations) + emitter.emit_from_emitter(body) + + +class FunctionEmitterVisitor(OpVisitor[None]): + def __init__( + self, emitter: Emitter, declarations: Emitter, source_path: str, module_name: str + ) -> None: + self.emitter = emitter + self.names = emitter.names + self.declarations = declarations + self.source_path = source_path + self.module_name = module_name + self.literals = emitter.context.literals + self.rare = False + # Next basic block to be processed after the current one (if any), set by caller + self.next_block: BasicBlock | None = None + # Ops in the basic block currently being processed, set by caller + self.ops: list[Op] = [] + # Current index within ops; visit methods can increment this to skip/merge ops + self.op_index = 0 + + def temp_name(self) -> str: + return self.emitter.temp_name() + + def visit_goto(self, op: Goto) -> None: + if op.label is not self.next_block: + self.emit_line("goto %s;" % self.label(op.label)) + + def error_value_check(self, value: Value, compare: str) -> str: + typ = value.type + if isinstance(typ, RTuple): + # TODO: What about empty tuple? + return self.emitter.tuple_undefined_check_cond( + typ, self.reg(value), self.c_error_value, compare + ) + else: + return f"{self.reg(value)} {compare} {self.c_error_value(typ)}" + + def visit_branch(self, op: Branch) -> None: + true, false = op.true, op.false + negated = op.negated + negated_rare = False + if true is self.next_block and op.traceback_entry is None: + # Switch true/false since it avoids an else block. + true, false = false, true + negated = not negated + negated_rare = True + + neg = "!" if negated else "" + cond = "" + if op.op == Branch.BOOL: + expr_result = self.reg(op.value) + cond = f"{neg}{expr_result}" + elif op.op == Branch.IS_ERROR: + compare = "!=" if negated else "==" + cond = self.error_value_check(op.value, compare) + else: + assert False, "Invalid branch" + + # For error checks, tell the compiler the branch is unlikely + if op.traceback_entry is not None or op.rare: + if not negated_rare: + cond = f"unlikely({cond})" + else: + cond = f"likely({cond})" + + if false is self.next_block: + if op.traceback_entry is None: + if true is not self.next_block: + self.emit_line(f"if ({cond}) goto {self.label(true)};") + else: + self.emit_line(f"if ({cond}) {{") + self.emit_traceback(op) + self.emit_lines("goto %s;" % self.label(true), "}") + else: + self.emit_line(f"if ({cond}) {{") + self.emit_traceback(op) + + if true is not self.next_block: + self.emit_line("goto %s;" % self.label(true)) + + self.emit_lines("} else", " goto %s;" % self.label(false)) + + def visit_return(self, op: Return) -> None: + value_str = self.reg(op.value) + self.emit_line("return %s;" % value_str) + + def visit_tuple_set(self, op: TupleSet) -> None: + dest = self.reg(op) + tuple_type = op.tuple_type + self.emitter.declare_tuple_struct(tuple_type) + if len(op.items) == 0: # empty tuple + self.emit_line(f"{dest}.empty_struct_error_flag = 0;") + else: + for i, item in enumerate(op.items): + self.emit_line(f"{dest}.f{i} = {self.reg(item)};") + + def visit_assign(self, op: Assign) -> None: + dest = self.reg(op.dest) + src = self.reg(op.src) + # clang whines about self assignment (which we might generate + # for some casts), so don't emit it. + if dest != src: + # We sometimes assign from an integer prepresentation of a pointer + # to a real pointer, and C compilers insist on a cast. + if op.src.type.is_unboxed and not op.dest.type.is_unboxed: + src = f"(void *){src}" + self.emit_line(f"{dest} = {src};") + + def visit_assign_multi(self, op: AssignMulti) -> None: + typ = op.dest.type + assert isinstance(typ, RArray), typ + dest = self.reg(op.dest) + # RArray values can only be assigned to once, so we can always + # declare them on initialization. + self.emit_line( + "%s%s[%d] = %s;" + % ( + self.emitter.ctype_spaced(typ.item_type), + dest, + len(op.src), + c_array_initializer([self.reg(s) for s in op.src], indented=True), + ) + ) + + def visit_load_error_value(self, op: LoadErrorValue) -> None: + if isinstance(op.type, RTuple): + values = [self.c_undefined_value(item) for item in op.type.types] + tmp = self.temp_name() + self.emit_line("{} {} = {{ {} }};".format(self.ctype(op.type), tmp, ", ".join(values))) + self.emit_line(f"{self.reg(op)} = {tmp};") + else: + self.emit_line(f"{self.reg(op)} = {self.c_error_value(op.type)};") + + def visit_load_literal(self, op: LoadLiteral) -> None: + index = self.literals.literal_index(op.value) + if not is_int_rprimitive(op.type): + self.emit_line("%s = CPyStatics[%d];" % (self.reg(op), index), ann=op.value) + else: + self.emit_line( + "%s = (CPyTagged)CPyStatics[%d] | 1;" % (self.reg(op), index), ann=op.value + ) + + def get_attr_expr(self, obj: str, op: GetAttr | SetAttr, decl_cl: ClassIR) -> str: + """Generate attribute accessor for normal (non-property) access. + + This either has a form like obj->attr_name for attributes defined in non-trait + classes, and *(obj + attr_offset) for attributes defined by traits. We also + insert all necessary C casts here. + """ + cast = f"({op.class_type.struct_name(self.emitter.names)} *)" + if decl_cl.is_trait and op.class_type.class_ir.is_trait: + # For pure trait access find the offset first, offsets + # are ordered by attribute position in the cl.attributes dict. + # TODO: pre-calculate the mapping to make this faster. + trait_attr_index = list(decl_cl.attributes).index(op.attr) + # TODO: reuse these names somehow? + offset = self.emitter.temp_name() + self.declarations.emit_line(f"size_t {offset};") + self.emitter.emit_line( + "{} = {};".format( + offset, + "CPy_FindAttrOffset({}, {}, {})".format( + self.emitter.type_struct_name(decl_cl), + f"({cast}{obj})->vtable", + trait_attr_index, + ), + ) + ) + attr_cast = f"({self.ctype(op.class_type.attr_type(op.attr))} *)" + return f"*{attr_cast}((char *){obj} + {offset})" + else: + # Cast to something non-trait. Note: for this to work, all struct + # members for non-trait classes must obey monotonic linear growth. + if op.class_type.class_ir.is_trait: + assert not decl_cl.is_trait + cast = f"({decl_cl.struct_name(self.emitter.names)} *)" + return f"({cast}{obj})->{self.emitter.attr(op.attr)}" + + def visit_get_attr(self, op: GetAttr) -> None: + if op.allow_error_value: + self.get_attr_with_allow_error_value(op) + return + dest = self.reg(op) + obj = self.reg(op.obj) + rtype = op.class_type + cl = rtype.class_ir + attr_rtype, decl_cl = cl.attr_details(op.attr) + prefer_method = cl.is_trait and attr_rtype.error_overlap + if cl.get_method(op.attr, prefer_method=prefer_method): + # Properties are essentially methods, so use vtable access for them. + if cl.is_method_final(op.attr): + self.emit_method_call(f"{dest} = ", op.obj, op.attr, []) + else: + version = "_TRAIT" if cl.is_trait else "" + self.emit_line( + "%s = CPY_GET_ATTR%s(%s, %s, %d, %s, %s); /* %s */" + % ( + dest, + version, + obj, + self.emitter.type_struct_name(rtype.class_ir), + rtype.getter_index(op.attr), + rtype.struct_name(self.names), + self.ctype(rtype.attr_type(op.attr)), + op.attr, + ) + ) + else: + # Otherwise, use direct or offset struct access. + attr_expr = self.get_attr_expr(obj, op, decl_cl) + self.emitter.emit_line(f"{dest} = {attr_expr};") + always_defined = cl.is_always_defined(op.attr) + merged_branch = None + if not always_defined: + self.emitter.emit_undefined_attr_check( + attr_rtype, dest, "==", obj, op.attr, cl, unlikely=True + ) + branch = self.next_branch() + if branch is not None: + if ( + branch.value is op + and branch.op == Branch.IS_ERROR + and branch.traceback_entry is not None + and not branch.negated + ): + # Generate code for the following branch here to avoid + # redundant branches in the generated code. + self.emit_attribute_error(branch, cl.name, op.attr) + self.emit_line("goto %s;" % self.label(branch.true)) + merged_branch = branch + self.emitter.emit_line("}") + if not merged_branch: + exc_class = "PyExc_AttributeError" + self.emitter.emit_line( + 'PyErr_SetString({}, "attribute {} of {} undefined");'.format( + exc_class, + repr(op.attr.removeprefix(GENERATOR_ATTRIBUTE_PREFIX)), + repr(cl.name), + ) + ) + + if attr_rtype.is_refcounted and not op.is_borrowed: + if not merged_branch and not always_defined: + self.emitter.emit_line("} else {") + self.emitter.emit_inc_ref(dest, attr_rtype) + if merged_branch: + if merged_branch.false is not self.next_block: + self.emit_line("goto %s;" % self.label(merged_branch.false)) + self.op_index += 1 + elif not always_defined: + self.emitter.emit_line("}") + + def get_attr_with_allow_error_value(self, op: GetAttr) -> None: + """Handle GetAttr with allow_error_value=True. + + This allows NULL or other error value without raising AttributeError. + """ + dest = self.reg(op) + obj = self.reg(op.obj) + rtype = op.class_type + cl = rtype.class_ir + attr_rtype, decl_cl = cl.attr_details(op.attr) + + # Direct struct access without NULL check + attr_expr = self.get_attr_expr(obj, op, decl_cl) + self.emitter.emit_line(f"{dest} = {attr_expr};") + + # Only emit inc_ref if not NULL + if attr_rtype.is_refcounted and not op.is_borrowed: + check = self.error_value_check(op, "!=") + self.emitter.emit_line(f"if ({check}) {{") + self.emitter.emit_inc_ref(dest, attr_rtype) + self.emitter.emit_line("}") + + def next_branch(self) -> Branch | None: + if self.op_index + 1 < len(self.ops): + next_op = self.ops[self.op_index + 1] + if isinstance(next_op, Branch): + return next_op + return None + + def visit_set_attr(self, op: SetAttr) -> None: + if op.error_kind == ERR_FALSE: + dest = self.reg(op) + obj = self.reg(op.obj) + src = self.reg(op.src) + rtype = op.class_type + cl = rtype.class_ir + attr_rtype, decl_cl = cl.attr_details(op.attr) + if cl.get_method(op.attr): + # Again, use vtable access for properties... + assert not op.is_init and op.error_kind == ERR_FALSE, "%s %d %d %s" % ( + op.attr, + op.is_init, + op.error_kind, + rtype, + ) + version = "_TRAIT" if cl.is_trait else "" + self.emit_line( + "%s = CPY_SET_ATTR%s(%s, %s, %d, %s, %s, %s); /* %s */" + % ( + dest, + version, + obj, + self.emitter.type_struct_name(rtype.class_ir), + rtype.setter_index(op.attr), + src, + rtype.struct_name(self.names), + self.ctype(rtype.attr_type(op.attr)), + op.attr, + ) + ) + else: + # ...and struct access for normal attributes. + attr_expr = self.get_attr_expr(obj, op, decl_cl) + if not op.is_init and attr_rtype.is_refcounted: + # This is not an initialization (where we know that the attribute was + # previously undefined), so decref the old value. + always_defined = cl.is_always_defined(op.attr) + if not always_defined: + self.emitter.emit_undefined_attr_check( + attr_rtype, attr_expr, "!=", obj, op.attr, cl + ) + self.emitter.emit_dec_ref(attr_expr, attr_rtype) + if not always_defined: + self.emitter.emit_line("}") + elif attr_rtype.error_overlap and not cl.is_always_defined(op.attr): + # If there is overlap with the error value, update bitmap to mark + # attribute as defined. + self.emitter.emit_attr_bitmap_set(src, obj, attr_rtype, cl, op.attr) + + # This steals the reference to src, so we don't need to increment the arg + self.emitter.emit_line(f"{attr_expr} = {src};") + if op.error_kind == ERR_FALSE: + self.emitter.emit_line(f"{dest} = 1;") + + PREFIX_MAP: Final = { + NAMESPACE_STATIC: STATIC_PREFIX, + NAMESPACE_TYPE: TYPE_PREFIX, + NAMESPACE_MODULE: MODULE_PREFIX, + NAMESPACE_TYPE_VAR: TYPE_VAR_PREFIX, + } + + def visit_load_static(self, op: LoadStatic) -> None: + dest = self.reg(op) + prefix = self.PREFIX_MAP[op.namespace] + name = self.emitter.static_name(op.identifier, op.module_name, prefix) + if op.namespace == NAMESPACE_TYPE: + name = "(PyObject *)%s" % name + self.emit_line(f"{dest} = {name};", ann=op.ann) + + def visit_init_static(self, op: InitStatic) -> None: + value = self.reg(op.value) + prefix = self.PREFIX_MAP[op.namespace] + name = self.emitter.static_name(op.identifier, op.module_name, prefix) + if op.namespace == NAMESPACE_TYPE: + value = "(PyTypeObject *)%s" % value + self.emit_line(f"{name} = {value};") + self.emit_inc_ref(name, op.value.type) + + def visit_tuple_get(self, op: TupleGet) -> None: + dest = self.reg(op) + src = self.reg(op.src) + self.emit_line(f"{dest} = {src}.f{op.index};") + if not op.is_borrowed: + self.emit_inc_ref(dest, op.type) + + def get_dest_assign(self, dest: Value) -> str: + if not dest.is_void: + return self.reg(dest) + " = " + else: + return "" + + def visit_call(self, op: Call) -> None: + """Call native function.""" + dest = self.get_dest_assign(op) + args = ", ".join(self.reg(arg) for arg in op.args) + lib = self.emitter.get_group_prefix(op.fn) + cname = op.fn.cname(self.names) + self.emit_line(f"{dest}{lib}{NATIVE_PREFIX}{cname}({args});") + + def visit_method_call(self, op: MethodCall) -> None: + """Call native method.""" + dest = self.get_dest_assign(op) + self.emit_method_call(dest, op.obj, op.method, op.args) + + def emit_method_call(self, dest: str, op_obj: Value, name: str, op_args: list[Value]) -> None: + obj = self.reg(op_obj) + rtype = op_obj.type + assert isinstance(rtype, RInstance), rtype + class_ir = rtype.class_ir + method = rtype.class_ir.get_method(name) + assert method is not None + + # Can we call the method directly, bypassing vtable? + is_direct = class_ir.is_method_final(name) + + # The first argument gets omitted for static methods and + # turned into the class for class methods + obj_args = ( + [] + if method.decl.kind == FUNC_STATICMETHOD + else [f"(PyObject *)Py_TYPE({obj})"] if method.decl.kind == FUNC_CLASSMETHOD else [obj] + ) + args = ", ".join(obj_args + [self.reg(arg) for arg in op_args]) + mtype = native_function_type(method, self.emitter) + version = "_TRAIT" if rtype.class_ir.is_trait else "" + if is_direct: + # Directly call method, without going through the vtable. + lib = self.emitter.get_group_prefix(method.decl) + self.emit_line(f"{dest}{lib}{NATIVE_PREFIX}{method.cname(self.names)}({args});") + else: + # Call using vtable. + method_idx = rtype.method_index(name) + self.emit_line( + "{}CPY_GET_METHOD{}({}, {}, {}, {}, {})({}); /* {} */".format( + dest, + version, + obj, + self.emitter.type_struct_name(rtype.class_ir), + method_idx, + rtype.struct_name(self.names), + mtype, + args, + name, + ) + ) + + def visit_inc_ref(self, op: IncRef) -> None: + if ( + isinstance(op.src, Box) + and (is_none_rprimitive(op.src.src.type) or is_bool_or_bit_rprimitive(op.src.src.type)) + and HAVE_IMMORTAL + ): + # On Python 3.12+, None/True/False are immortal, and we can skip inc ref + return + + if isinstance(op.src, LoadLiteral) and HAVE_IMMORTAL: + value = op.src.value + # We can skip inc ref for immortal literals on Python 3.12+ + if type(value) is int and -5 <= value <= 256: + # Small integers are immortal + return + + src = self.reg(op.src) + self.emit_inc_ref(src, op.src.type) + + def visit_dec_ref(self, op: DecRef) -> None: + src = self.reg(op.src) + self.emit_dec_ref(src, op.src.type, is_xdec=op.is_xdec) + + def visit_box(self, op: Box) -> None: + self.emitter.emit_box(self.reg(op.src), self.reg(op), op.src.type, can_borrow=True) + + def visit_cast(self, op: Cast) -> None: + if op.is_unchecked and op.is_borrowed: + self.emit_line(f"{self.reg(op)} = {self.reg(op.src)};") + return + branch = self.next_branch() + handler = None + if branch is not None: + if ( + branch.value is op + and branch.op == Branch.IS_ERROR + and branch.traceback_entry is not None + and not branch.negated + and branch.false is self.next_block + ): + # Generate code also for the following branch here to avoid + # redundant branches in the generated code. + handler = TracebackAndGotoHandler( + self.label(branch.true), + self.source_path, + self.module_name, + branch.traceback_entry, + ) + self.op_index += 1 + + self.emitter.emit_cast( + self.reg(op.src), self.reg(op), op.type, src_type=op.src.type, error=handler + ) + + def visit_unbox(self, op: Unbox) -> None: + self.emitter.emit_unbox(self.reg(op.src), self.reg(op), op.type) + + def visit_unreachable(self, op: Unreachable) -> None: + self.emitter.emit_line("CPy_Unreachable();") + + def visit_raise_standard_error(self, op: RaiseStandardError) -> None: + # TODO: Better escaping of backspaces and such + if op.value is not None: + if isinstance(op.value, str): + message = op.value.replace('"', '\\"') + self.emitter.emit_line(f'PyErr_SetString(PyExc_{op.class_name}, "{message}");') + elif isinstance(op.value, Value): + self.emitter.emit_line( + "PyErr_SetObject(PyExc_{}, {});".format( + op.class_name, self.emitter.reg(op.value) + ) + ) + else: + assert False, "op value type must be either str or Value" + else: + self.emitter.emit_line(f"PyErr_SetNone(PyExc_{op.class_name});") + self.emitter.emit_line(f"{self.reg(op)} = 0;") + + def visit_call_c(self, op: CallC) -> None: + if op.is_void: + dest = "" + else: + dest = self.get_dest_assign(op) + args = ", ".join(self.reg(arg) for arg in op.args) + self.emitter.emit_line(f"{dest}{op.function_name}({args});") + + def visit_primitive_op(self, op: PrimitiveOp) -> None: + raise RuntimeError( + f"unexpected PrimitiveOp {op.desc.name}: they must be lowered before codegen" + ) + + def visit_truncate(self, op: Truncate) -> None: + dest = self.reg(op) + value = self.reg(op.src) + # for C backend the generated code are straight assignments + self.emit_line(f"{dest} = {value};") + + def visit_extend(self, op: Extend) -> None: + dest = self.reg(op) + value = self.reg(op.src) + if op.signed: + src_cast = self.emit_signed_int_cast(op.src.type) + else: + src_cast = self.emit_unsigned_int_cast(op.src.type) + self.emit_line(f"{dest} = {src_cast}{value};") + + def visit_load_global(self, op: LoadGlobal) -> None: + dest = self.reg(op) + self.emit_line(f"{dest} = {op.identifier};", ann=op.ann) + + def visit_int_op(self, op: IntOp) -> None: + dest = self.reg(op) + lhs = self.reg(op.lhs) + rhs = self.reg(op.rhs) + if op.op == IntOp.RIGHT_SHIFT: + # Signed right shift + lhs = self.emit_signed_int_cast(op.lhs.type) + lhs + rhs = self.emit_signed_int_cast(op.rhs.type) + rhs + self.emit_line(f"{dest} = {lhs} {op.op_str[op.op]} {rhs};") + + def visit_comparison_op(self, op: ComparisonOp) -> None: + dest = self.reg(op) + lhs = self.reg(op.lhs) + rhs = self.reg(op.rhs) + lhs_cast = "" + rhs_cast = "" + if op.op in (ComparisonOp.SLT, ComparisonOp.SGT, ComparisonOp.SLE, ComparisonOp.SGE): + # Always signed comparison op + lhs_cast = self.emit_signed_int_cast(op.lhs.type) + rhs_cast = self.emit_signed_int_cast(op.rhs.type) + elif op.op in (ComparisonOp.ULT, ComparisonOp.UGT, ComparisonOp.ULE, ComparisonOp.UGE): + # Always unsigned comparison op + lhs_cast = self.emit_unsigned_int_cast(op.lhs.type) + rhs_cast = self.emit_unsigned_int_cast(op.rhs.type) + elif isinstance(op.lhs, Integer) and op.lhs.value < 0: + # Force signed ==/!= with negative operand + rhs_cast = self.emit_signed_int_cast(op.rhs.type) + elif isinstance(op.rhs, Integer) and op.rhs.value < 0: + # Force signed ==/!= with negative operand + lhs_cast = self.emit_signed_int_cast(op.lhs.type) + self.emit_line(f"{dest} = {lhs_cast}{lhs} {op.op_str[op.op]} {rhs_cast}{rhs};") + + def visit_float_op(self, op: FloatOp) -> None: + dest = self.reg(op) + lhs = self.reg(op.lhs) + rhs = self.reg(op.rhs) + if op.op != FloatOp.MOD: + self.emit_line(f"{dest} = {lhs} {op.op_str[op.op]} {rhs};") + else: + # TODO: This may set errno as a side effect, that is a little sketchy. + self.emit_line(f"{dest} = fmod({lhs}, {rhs});") + + def visit_float_neg(self, op: FloatNeg) -> None: + dest = self.reg(op) + src = self.reg(op.src) + self.emit_line(f"{dest} = -{src};") + + def visit_float_comparison_op(self, op: FloatComparisonOp) -> None: + dest = self.reg(op) + lhs = self.reg(op.lhs) + rhs = self.reg(op.rhs) + self.emit_line(f"{dest} = {lhs} {op.op_str[op.op]} {rhs};") + + def visit_load_mem(self, op: LoadMem) -> None: + dest = self.reg(op) + src = self.reg(op.src) + # TODO: we shouldn't dereference to type that are pointer type so far + type = self.ctype(op.type) + self.emit_line(f"{dest} = *({type} *){src};") + if not op.is_borrowed: + self.emit_inc_ref(dest, op.type) + + def visit_set_mem(self, op: SetMem) -> None: + dest = self.reg(op.dest) + src = self.reg(op.src) + dest_type = self.ctype(op.dest_type) + # clang whines about self assignment (which we might generate + # for some casts), so don't emit it. + if dest != src: + self.emit_line(f"*({dest_type} *){dest} = {src};") + + def visit_get_element_ptr(self, op: GetElementPtr) -> None: + dest = self.reg(op) + src = self.reg(op.src) + # TODO: support tuple type + assert isinstance(op.src_type, RStruct), op.src_type + assert op.field in op.src_type.names, "Invalid field name." + self.emit_line( + "{} = ({})&(({} *){})->{};".format( + dest, op.type._ctype, op.src_type.name, src, op.field + ) + ) + + def visit_set_element(self, op: SetElement) -> None: + dest = self.reg(op) + item = self.reg(op.item) + field = op.field + if isinstance(op.src, Undef): + # First assignment to an undefined struct is trivial. + self.emit_line(f"{dest}.{field} = {item};") + else: + # In the general case create a copy of the struct with a single + # item modified. + # + # TODO: Can we do better if only a subset of fields are initialized? + # TODO: Make this less verbose in the common case + # TODO: Support tuples (or use RStruct for tuples)? + src = self.reg(op.src) + src_type = op.src.type + assert isinstance(src_type, RStruct), src_type + init_items = [] + for n in src_type.names: + if n != field: + init_items.append(f"{src}.{n}") + else: + init_items.append(item) + self.emit_line(f"{dest} = ({self.ctype(src_type)}) {{ {', '.join(init_items)} }};") + + def visit_load_address(self, op: LoadAddress) -> None: + typ = op.type + dest = self.reg(op) + if isinstance(op.src, Register): + src = self.reg(op.src) + elif isinstance(op.src, LoadStatic): + prefix = self.PREFIX_MAP[op.src.namespace] + src = self.emitter.static_name(op.src.identifier, op.src.module_name, prefix) + else: + src = op.src + self.emit_line(f"{dest} = ({typ._ctype})&{src};") + + def visit_keep_alive(self, op: KeepAlive) -> None: + # This is a no-op. + pass + + def visit_unborrow(self, op: Unborrow) -> None: + # This is a no-op that propagates the source value. + dest = self.reg(op) + src = self.reg(op.src) + self.emit_line(f"{dest} = {src};") + + # Helpers + + def label(self, label: BasicBlock) -> str: + return self.emitter.label(label) + + def reg(self, reg: Value) -> str: + if isinstance(reg, Integer): + val = reg.value + if val == 0 and is_pointer_rprimitive(reg.type): + return "NULL" + s = str(val) + if val >= (1 << 31): + # Avoid overflowing signed 32-bit int + if val >= (1 << 63): + s += "ULL" + else: + s += "LL" + elif val == -(1 << 63): + # Avoid overflowing C integer literal + s = "(-9223372036854775807LL - 1)" + elif val <= -(1 << 31): + s += "LL" + return s + elif isinstance(reg, Float): + r = repr(reg.value) + if r == "inf": + return "INFINITY" + elif r == "-inf": + return "-INFINITY" + elif r == "nan": + return "NAN" + return r + elif isinstance(reg, CString): + return '"' + encode_c_string_literal(reg.value) + '"' + else: + return self.emitter.reg(reg) + + def ctype(self, rtype: RType) -> str: + return self.emitter.ctype(rtype) + + def c_error_value(self, rtype: RType) -> str: + return self.emitter.c_error_value(rtype) + + def c_undefined_value(self, rtype: RType) -> str: + return self.emitter.c_undefined_value(rtype) + + def emit_line(self, line: str, *, ann: object = None) -> None: + self.emitter.emit_line(line, ann=ann) + + def emit_lines(self, *lines: str) -> None: + self.emitter.emit_lines(*lines) + + def emit_inc_ref(self, dest: str, rtype: RType) -> None: + self.emitter.emit_inc_ref(dest, rtype, rare=self.rare) + + def emit_dec_ref(self, dest: str, rtype: RType, is_xdec: bool) -> None: + self.emitter.emit_dec_ref(dest, rtype, is_xdec=is_xdec, rare=self.rare) + + def emit_declaration(self, line: str) -> None: + self.declarations.emit_line(line) + + def emit_traceback(self, op: Branch) -> None: + if op.traceback_entry is not None: + self.emitter.emit_traceback(self.source_path, self.module_name, op.traceback_entry) + + def emit_attribute_error(self, op: Branch, class_name: str, attr: str) -> None: + assert op.traceback_entry is not None + globals_static = self.emitter.static_name("globals", self.module_name) + self.emit_line( + 'CPy_AttributeError("%s", "%s", "%s", "%s", %d, %s);' + % ( + self.source_path.replace("\\", "\\\\"), + op.traceback_entry[0], + class_name, + attr.removeprefix(GENERATOR_ATTRIBUTE_PREFIX), + op.traceback_entry[1], + globals_static, + ) + ) + if DEBUG_ERRORS: + self.emit_line('assert(PyErr_Occurred() != NULL && "failure w/o err!");') + + def emit_signed_int_cast(self, type: RType) -> str: + if is_tagged(type): + return "(Py_ssize_t)" + else: + return "" + + def emit_unsigned_int_cast(self, type: RType) -> str: + if is_int32_rprimitive(type): + return "(uint32_t)" + elif is_int64_rprimitive(type): + return "(uint64_t)" + else: + return "" + + +_translation_table: Final[dict[int, str]] = {} + + +def encode_c_string_literal(b: bytes) -> str: + """Convert bytestring to the C string literal syntax (with necessary escaping). + + For example, b'foo\n' gets converted to 'foo\\n' (note that double quotes are not added). + """ + if not _translation_table: + # Initialize the translation table on the first call. + d = { + ord("\n"): "\\n", + ord("\r"): "\\r", + ord("\t"): "\\t", + ord('"'): '\\"', + ord("\\"): "\\\\", + } + for i in range(256): + if i not in d: + if i < 32 or i >= 127: + d[i] = "\\x%.2x" % i + else: + d[i] = chr(i) + _translation_table.update(str.maketrans(d)) + return b.decode("latin1").translate(_translation_table) diff --git a/.venv/lib/python3.12/site-packages/mypyc/codegen/emitmodule.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/codegen/emitmodule.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..2e156a7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/codegen/emitmodule.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/codegen/emitmodule.py b/.venv/lib/python3.12/site-packages/mypyc/codegen/emitmodule.py new file mode 100644 index 0000000..8dd9f75 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/codegen/emitmodule.py @@ -0,0 +1,1314 @@ +"""Generate C code for a Python C extension module from Python source code.""" + +# FIXME: Basically nothing in this file operates on the level of a +# single module and it should be renamed. + +from __future__ import annotations + +import json +import os +import sys +from collections.abc import Iterable +from typing import Optional, TypeVar + +from mypy.build import ( + BuildResult, + BuildSource, + State, + build, + compute_hash, + create_metastore, + get_cache_names, + sorted_components, +) +from mypy.errors import CompileError +from mypy.fscache import FileSystemCache +from mypy.nodes import MypyFile +from mypy.options import Options +from mypy.plugin import Plugin, ReportConfigContext +from mypy.util import hash_digest, json_dumps +from mypyc.analysis.capsule_deps import find_implicit_capsule_dependencies +from mypyc.codegen.cstring import c_string_initializer +from mypyc.codegen.emit import Emitter, EmitterContext, HeaderDeclaration, c_array_initializer +from mypyc.codegen.emitclass import generate_class, generate_class_reuse, generate_class_type_decl +from mypyc.codegen.emitfunc import ( + generate_native_function, + native_function_doc_initializer, + native_function_header, +) +from mypyc.codegen.emitwrapper import ( + generate_legacy_wrapper_function, + generate_wrapper_function, + legacy_wrapper_function_header, + wrapper_function_header, +) +from mypyc.codegen.literals import Literals +from mypyc.common import ( + IS_FREE_THREADED, + MODULE_PREFIX, + PREFIX, + RUNTIME_C_FILES, + TOP_LEVEL_NAME, + TYPE_VAR_PREFIX, + shared_lib_name, + short_id_from_name, +) +from mypyc.errors import Errors +from mypyc.ir.func_ir import FuncIR +from mypyc.ir.module_ir import ModuleIR, ModuleIRs, deserialize_modules +from mypyc.ir.ops import DeserMaps, LoadLiteral +from mypyc.ir.rtypes import RType +from mypyc.irbuild.main import build_ir +from mypyc.irbuild.mapper import Mapper +from mypyc.irbuild.prepare import load_type_map +from mypyc.namegen import NameGenerator, exported_name +from mypyc.options import CompilerOptions +from mypyc.transform.copy_propagation import do_copy_propagation +from mypyc.transform.exceptions import insert_exception_handling +from mypyc.transform.flag_elimination import do_flag_elimination +from mypyc.transform.log_trace import insert_event_trace_logging +from mypyc.transform.lower import lower_ir +from mypyc.transform.refcount import insert_ref_count_opcodes +from mypyc.transform.spill import insert_spills +from mypyc.transform.uninit import insert_uninit_checks + +# All the modules being compiled are divided into "groups". A group +# is a set of modules that are placed into the same shared library. +# Two common configurations are that every module is placed in a group +# by itself (fully separate compilation) and that every module is +# placed in the same group (fully whole-program compilation), but we +# support finer-grained control of the group as well. +# +# In fully whole-program compilation, we will generate N+1 extension +# modules: one shim per module and one shared library containing all +# the actual code. +# In fully separate compilation, we (unfortunately) will generate 2*N +# extension modules: one shim per module and also one library containing +# each module's actual code. (This might be fixable in the future, +# but allows a clean separation between setup of the export tables +# (see generate_export_table) and running module top levels.) +# +# A group is represented as a list of BuildSources containing all of +# its modules along with the name of the group. (Which can be None +# only if we are compiling only a single group with a single file in it +# and not using shared libraries). +Group = tuple[list[BuildSource], Optional[str]] +Groups = list[Group] + +# A list of (file name, file contents) pairs. +FileContents = list[tuple[str, str]] + + +class MarkedDeclaration: + """Add a mark, useful for topological sort.""" + + def __init__(self, declaration: HeaderDeclaration, mark: bool) -> None: + self.declaration = declaration + self.mark = False + + +class MypycPlugin(Plugin): + """Plugin for making mypyc interoperate properly with mypy incremental mode. + + Basically the point of this plugin is to force mypy to recheck things + based on the demands of mypyc in a couple situations: + * Any modules in the same group must be compiled together, so we + tell mypy that modules depend on all their groupmates. + * If the IR metadata is missing or stale or any of the generated + C source files associated missing or stale, then we need to + recompile the module so we mark it as stale. + """ + + def __init__( + self, options: Options, compiler_options: CompilerOptions, groups: Groups + ) -> None: + super().__init__(options) + self.group_map: dict[str, tuple[str | None, list[str]]] = {} + for sources, name in groups: + modules = sorted(source.module for source in sources) + for id in modules: + self.group_map[id] = (name, modules) + + self.compiler_options = compiler_options + self.metastore = create_metastore(options) + + def report_config_data(self, ctx: ReportConfigContext) -> tuple[str | None, list[str]] | None: + # The config data we report is the group map entry for the module. + # If the data is being used to check validity, we do additional checks + # that the IR cache exists and matches the metadata cache and all + # output source files exist and are up to date. + + id, path, is_check = ctx.id, ctx.path, ctx.is_check + + if id not in self.group_map: + return None + + # If we aren't doing validity checks, just return the cache data + if not is_check: + return self.group_map[id] + + # Load the metadata and IR cache + meta_path, _, _ = get_cache_names(id, path, self.options) + ir_path = get_ir_cache_name(id, path, self.options) + try: + meta_json = self.metastore.read(meta_path) + ir_json = self.metastore.read(ir_path) + except FileNotFoundError: + # This could happen if mypyc failed after mypy succeeded + # in the previous run or if some cache files got + # deleted. No big deal, just fail to load the cache. + return None + + ir_data = json.loads(ir_json) + + # Check that the IR cache matches the metadata cache + if hash_digest(meta_json) != ir_data["meta_hash"]: + return None + + # Check that all the source files are present and as + # expected. The main situation where this would come up is the + # user deleting the build directory without deleting + # .mypy_cache, which we should handle gracefully. + for path, hash in ir_data["src_hashes"].items(): + try: + with open(os.path.join(self.compiler_options.target_dir, path), "rb") as f: + contents = f.read() + except FileNotFoundError: + return None + real_hash = hash_digest(contents) + if hash != real_hash: + return None + + return self.group_map[id] + + def get_additional_deps(self, file: MypyFile) -> list[tuple[int, str, int]]: + # Report dependency on modules in the module's group + return [(10, id, -1) for id in self.group_map.get(file.fullname, (None, []))[1]] + + +def parse_and_typecheck( + sources: list[BuildSource], + options: Options, + compiler_options: CompilerOptions, + groups: Groups, + fscache: FileSystemCache | None = None, + alt_lib_path: str | None = None, +) -> BuildResult: + assert options.strict_optional, "strict_optional must be turned on" + result = build( + sources=sources, + options=options, + alt_lib_path=alt_lib_path, + fscache=fscache, + extra_plugins=[MypycPlugin(options, compiler_options, groups)], + ) + if result.errors: + raise CompileError(result.errors) + return result + + +def compile_scc_to_ir( + scc: list[MypyFile], + result: BuildResult, + mapper: Mapper, + compiler_options: CompilerOptions, + errors: Errors, +) -> ModuleIRs: + """Compile an SCC into ModuleIRs. + + Any modules that this SCC depends on must have either been compiled, + type checked, or loaded from a cache into mapper. + + Arguments: + scc: The list of MypyFiles to compile + result: The BuildResult from the mypy front-end + mapper: The Mapper object mapping mypy ASTs to class and func IRs + compiler_options: The compilation options + errors: Where to report any errors encountered + + Returns the IR of the modules. + """ + + if compiler_options.verbose: + print("Compiling {}".format(", ".join(x.name for x in scc))) + + # Generate basic IR, with missing exception and refcount handling. + modules = build_ir(scc, result.graph, result.types, mapper, compiler_options, errors) + if errors.num_errors > 0: + return modules + + env_user_functions = {} + for module in modules.values(): + for cls in module.classes: + if cls.env_user_function: + env_user_functions[cls.env_user_function] = cls + + for module in modules.values(): + for fn in module.functions: + # Insert checks for uninitialized values. + insert_uninit_checks(fn) + # Insert exception handling. + insert_exception_handling(fn) + # Insert reference count handling. + insert_ref_count_opcodes(fn) + + if fn in env_user_functions: + insert_spills(fn, env_user_functions[fn]) + + if compiler_options.log_trace: + insert_event_trace_logging(fn, compiler_options) + + # Switch to lower abstraction level IR. + lower_ir(fn, compiler_options) + # Calculate implicit module dependencies (needed for librt) + capsules = find_implicit_capsule_dependencies(fn) + if capsules is not None: + module.capsules.update(capsules) + # Perform optimizations. + do_copy_propagation(fn, compiler_options) + do_flag_elimination(fn, compiler_options) + + return modules + + +def compile_modules_to_ir( + result: BuildResult, mapper: Mapper, compiler_options: CompilerOptions, errors: Errors +) -> ModuleIRs: + """Compile a collection of modules into ModuleIRs. + + The modules to compile are specified as part of mapper's group_map. + + Returns the IR of the modules. + """ + deser_ctx = DeserMaps({}, {}) + modules = {} + + # Process the graph by SCC in topological order, like we do in mypy.build + for scc in sorted_components(result.graph): + scc_states = [result.graph[id] for id in scc.mod_ids] + trees = [st.tree for st in scc_states if st.id in mapper.group_map and st.tree] + + if not trees: + continue + + fresh = all(id not in result.manager.rechecked_modules for id in scc.mod_ids) + if fresh: + load_scc_from_cache(trees, result, mapper, deser_ctx) + else: + scc_ir = compile_scc_to_ir(trees, result, mapper, compiler_options, errors) + modules.update(scc_ir) + + return modules + + +def compile_ir_to_c( + groups: Groups, + modules: ModuleIRs, + result: BuildResult, + mapper: Mapper, + compiler_options: CompilerOptions, +) -> dict[str | None, list[tuple[str, str]]]: + """Compile a collection of ModuleIRs to C source text. + + Returns a dictionary mapping group names to a list of (file name, + file text) pairs. + """ + source_paths = { + source.module: result.graph[source.module].xpath + for sources, _ in groups + for source in sources + } + + names = NameGenerator( + [[source.module for source in sources] for sources, _ in groups], + separate=compiler_options.separate, + ) + + # Generate C code for each compilation group. Each group will be + # compiled into a separate extension module. + ctext: dict[str | None, list[tuple[str, str]]] = {} + for group_sources, group_name in groups: + group_modules = { + source.module: modules[source.module] + for source in group_sources + if source.module in modules + } + if not group_modules: + ctext[group_name] = [] + continue + generator = GroupGenerator( + group_modules, source_paths, group_name, mapper.group_map, names, compiler_options + ) + ctext[group_name] = generator.generate_c_for_modules() + + return ctext + + +def get_ir_cache_name(id: str, path: str, options: Options) -> str: + meta_path, _, _ = get_cache_names(id, path, options) + # Mypy uses JSON cache even with --fixed-format-cache (for now). + return meta_path.replace(".meta.json", ".ir.json").replace(".meta.ff", ".ir.json") + + +def get_state_ir_cache_name(state: State) -> str: + return get_ir_cache_name(state.id, state.xpath, state.options) + + +def write_cache( + modules: ModuleIRs, + result: BuildResult, + group_map: dict[str, str | None], + ctext: dict[str | None, list[tuple[str, str]]], +) -> None: + """Write out the cache information for modules. + + Each module has the following cache information written (which is + in addition to the cache information written by mypy itself): + * A serialized version of its mypyc IR, minus the bodies of + functions. This allows code that depends on it to use + these serialized data structures when compiling against it + instead of needing to recompile it. (Compiling against a + module requires access to both its mypy and mypyc data + structures.) + * The hash of the mypy metadata cache file for the module. + This is used to ensure that the mypyc cache and the mypy + cache are in sync and refer to the same version of the code. + This is particularly important if mypyc crashes/errors/is + stopped after mypy has written its cache but before mypyc has. + * The hashes of all the source file outputs for the group + the module is in. This is so that the module will be + recompiled if the source outputs are missing. + """ + + hashes = {} + for name, files in ctext.items(): + hashes[name] = {file: compute_hash(data) for file, data in files} + + # Write out cache data + for id, module in modules.items(): + st = result.graph[id] + + meta_path, _, _ = get_cache_names(id, st.xpath, result.manager.options) + # If the metadata isn't there, skip writing the cache. + try: + meta_data = result.manager.metastore.read(meta_path) + except OSError: + continue + + newpath = get_state_ir_cache_name(st) + ir_data = { + "ir": module.serialize(), + "meta_hash": hash_digest(meta_data), + "src_hashes": hashes[group_map[id]], + } + + result.manager.metastore.write(newpath, json_dumps(ir_data)) + + result.manager.metastore.commit() + + +def load_scc_from_cache( + scc: list[MypyFile], result: BuildResult, mapper: Mapper, ctx: DeserMaps +) -> ModuleIRs: + """Load IR for an SCC of modules from the cache. + + Arguments and return are as compile_scc_to_ir. + """ + cache_data = { + k.fullname: json.loads( + result.manager.metastore.read(get_state_ir_cache_name(result.graph[k.fullname])) + )["ir"] + for k in scc + } + modules = deserialize_modules(cache_data, ctx) + load_type_map(mapper, scc, ctx) + return modules + + +def compile_modules_to_c( + result: BuildResult, compiler_options: CompilerOptions, errors: Errors, groups: Groups +) -> tuple[ModuleIRs, list[FileContents], Mapper]: + """Compile Python module(s) to the source of Python C extension modules. + + This generates the source code for the "shared library" module + for each group. The shim modules are generated in mypyc.build. + Each shared library module provides, for each module in its group, + a PyCapsule containing an initialization function. + Additionally, it provides a capsule containing an export table of + pointers to all the group's functions and static variables. + + Arguments: + result: The BuildResult from the mypy front-end + compiler_options: The compilation options + errors: Where to report any errors encountered + groups: The groups that we are compiling. See documentation of Groups type above. + + Returns the IR of the modules and a list containing the generated files for each group. + """ + # Construct a map from modules to what group they belong to + group_map = {source.module: lib_name for group, lib_name in groups for source in group} + mapper = Mapper(group_map) + + # Sometimes when we call back into mypy, there might be errors. + # We don't want to crash when that happens. + result.manager.errors.set_file( + "", module=None, scope=None, options=result.manager.options + ) + + modules = compile_modules_to_ir(result, mapper, compiler_options, errors) + if errors.num_errors > 0: + return {}, [], Mapper({}) + + ctext = compile_ir_to_c(groups, modules, result, mapper, compiler_options) + write_cache(modules, result, group_map, ctext) + + return modules, [ctext[name] for _, name in groups], mapper + + +def generate_function_declaration(fn: FuncIR, emitter: Emitter) -> None: + emitter.context.declarations[emitter.native_function_name(fn.decl)] = HeaderDeclaration( + f"{native_function_header(fn.decl, emitter)};", needs_export=True + ) + if fn.name != TOP_LEVEL_NAME and not fn.internal: + if is_fastcall_supported(fn, emitter.capi_version): + emitter.context.declarations[PREFIX + fn.cname(emitter.names)] = HeaderDeclaration( + f"{wrapper_function_header(fn, emitter.names)};" + ) + else: + emitter.context.declarations[PREFIX + fn.cname(emitter.names)] = HeaderDeclaration( + f"{legacy_wrapper_function_header(fn, emitter.names)};" + ) + + +def pointerize(decl: str, name: str) -> str: + """Given a C decl and its name, modify it to be a declaration to a pointer.""" + # This doesn't work in general but does work for all our types... + if "(" in decl: + # Function pointer. Stick an * in front of the name and wrap it in parens. + return decl.replace(name, f"(*{name})") + else: + # Non-function pointer. Just stick an * in front of the name. + return decl.replace(name, f"*{name}") + + +def group_dir(group_name: str) -> str: + """Given a group name, return the relative directory path for it.""" + return os.sep.join(group_name.split(".")[:-1]) + + +class GroupGenerator: + def __init__( + self, + modules: dict[str, ModuleIR], + source_paths: dict[str, str], + group_name: str | None, + group_map: dict[str, str | None], + names: NameGenerator, + compiler_options: CompilerOptions, + ) -> None: + """Generator for C source for a compilation group. + + The code for a compilation group contains an internal and an + external .h file, and then one .c if not in multi_file mode or + one .c file per module if in multi_file mode. + + Arguments: + modules: (name, ir) pairs for each module in the group + source_paths: Map from module names to source file paths + group_name: The name of the group (or None if this is single-module compilation) + group_map: A map of modules to their group names + names: The name generator for the compilation + compiler_options: Mypyc specific options, including multi_file mode + """ + self.modules = modules + self.source_paths = source_paths + self.context = EmitterContext(names, group_name, group_map) + self.names = names + # Initializations of globals to simple values that we can't + # do statically because the windows loader is bad. + self.simple_inits: list[tuple[str, str]] = [] + self.group_name = group_name + self.use_shared_lib = group_name is not None + self.compiler_options = compiler_options + self.multi_file = compiler_options.multi_file + # Multi-phase init is needed to enable free-threading. In the future we'll + # probably want to enable it always, but we'll wait until it's stable. + self.multi_phase_init = IS_FREE_THREADED + + @property + def group_suffix(self) -> str: + return "_" + exported_name(self.group_name) if self.group_name else "" + + @property + def short_group_suffix(self) -> str: + return "_" + exported_name(self.group_name.split(".")[-1]) if self.group_name else "" + + def generate_c_for_modules(self) -> list[tuple[str, str]]: + file_contents = [] + multi_file = self.use_shared_lib and self.multi_file + + # Collect all literal refs in IR. + for module in self.modules.values(): + for fn in module.functions: + collect_literals(fn, self.context.literals) + + base_emitter = Emitter(self.context) + # Optionally just include the runtime library c files to + # reduce the number of compiler invocations needed + if self.compiler_options.include_runtime_files: + for name in RUNTIME_C_FILES: + base_emitter.emit_line(f'#include "{name}"') + base_emitter.emit_line(f'#include "__native{self.short_group_suffix}.h"') + base_emitter.emit_line(f'#include "__native_internal{self.short_group_suffix}.h"') + emitter = base_emitter + + self.generate_literal_tables() + + for module_name, module in self.modules.items(): + if multi_file: + emitter = Emitter(self.context) + emitter.emit_line(f'#include "__native{self.short_group_suffix}.h"') + emitter.emit_line(f'#include "__native_internal{self.short_group_suffix}.h"') + + self.declare_module(module_name, emitter) + self.declare_internal_globals(module_name, emitter) + self.declare_imports(module.imports, emitter) + + for cl in module.classes: + if cl.is_ext_class: + generate_class(cl, module_name, emitter) + + # Generate Python extension module definitions and module initialization functions. + self.generate_module_def(emitter, module_name, module) + + for fn in module.functions: + emitter.emit_line() + generate_native_function(fn, emitter, self.source_paths[module_name], module_name) + if fn.name != TOP_LEVEL_NAME and not fn.internal: + emitter.emit_line() + if is_fastcall_supported(fn, emitter.capi_version): + generate_wrapper_function( + fn, emitter, self.source_paths[module_name], module_name + ) + else: + generate_legacy_wrapper_function( + fn, emitter, self.source_paths[module_name], module_name + ) + if multi_file: + name = f"__native_{exported_name(module_name)}.c" + file_contents.append((name, "".join(emitter.fragments))) + + # The external header file contains type declarations while + # the internal contains declarations of functions and objects + # (which are shared between shared libraries via dynamic + # exports tables and not accessed directly.) + ext_declarations = Emitter(self.context) + ext_declarations.emit_line(f"#ifndef MYPYC_NATIVE{self.group_suffix}_H") + ext_declarations.emit_line(f"#define MYPYC_NATIVE{self.group_suffix}_H") + ext_declarations.emit_line("#include ") + ext_declarations.emit_line("#include ") + if self.compiler_options.depends_on_librt_internal: + ext_declarations.emit_line("#include ") + if any("librt.base64" in mod.capsules for mod in self.modules.values()): + ext_declarations.emit_line("#include ") + + declarations = Emitter(self.context) + declarations.emit_line(f"#ifndef MYPYC_LIBRT_INTERNAL{self.group_suffix}_H") + declarations.emit_line(f"#define MYPYC_LIBRT_INTERNAL{self.group_suffix}_H") + declarations.emit_line("#include ") + declarations.emit_line("#include ") + declarations.emit_line(f'#include "__native{self.short_group_suffix}.h"') + declarations.emit_line() + declarations.emit_line("int CPyGlobalsInit(void);") + declarations.emit_line() + + for module_name, module in self.modules.items(): + self.declare_finals(module_name, module.final_names, declarations) + for cl in module.classes: + generate_class_type_decl(cl, emitter, ext_declarations, declarations) + if cl.reuse_freed_instance: + generate_class_reuse(cl, emitter, ext_declarations, declarations) + self.declare_type_vars(module_name, module.type_var_names, declarations) + for fn in module.functions: + generate_function_declaration(fn, declarations) + + for lib in sorted(self.context.group_deps): + elib = exported_name(lib) + short_lib = exported_name(lib.split(".")[-1]) + declarations.emit_lines( + "#include <{}>".format(os.path.join(group_dir(lib), f"__native_{short_lib}.h")), + f"struct export_table_{elib} exports_{elib};", + ) + + sorted_decls = self.toposort_declarations() + + emitter = base_emitter + self.generate_globals_init(emitter) + + emitter.emit_line() + + for declaration in sorted_decls: + decls = ext_declarations if declaration.is_type else declarations + if not declaration.is_type: + decls.emit_lines(f"extern {declaration.decl[0]}", *declaration.decl[1:]) + # If there is a definition, emit it. Otherwise, repeat the declaration + # (without an extern). + if declaration.defn: + emitter.emit_lines(*declaration.defn) + else: + emitter.emit_lines(*declaration.decl) + else: + decls.emit_lines(*declaration.decl) + + if self.group_name: + if self.compiler_options.separate: + self.generate_export_table(ext_declarations, emitter) + + self.generate_shared_lib_init(emitter) + + ext_declarations.emit_line("#endif") + declarations.emit_line("#endif") + + output_dir = group_dir(self.group_name) if self.group_name else "" + return file_contents + [ + ( + os.path.join(output_dir, f"__native{self.short_group_suffix}.c"), + "".join(emitter.fragments), + ), + ( + os.path.join(output_dir, f"__native_internal{self.short_group_suffix}.h"), + "".join(declarations.fragments), + ), + ( + os.path.join(output_dir, f"__native{self.short_group_suffix}.h"), + "".join(ext_declarations.fragments), + ), + ] + + def generate_literal_tables(self) -> None: + """Generate tables containing descriptions of Python literals to construct. + + We will store the constructed literals in a single array that contains + literals of all types. This way we can refer to an arbitrary literal by + its index. + """ + literals = self.context.literals + # During module initialization we store all the constructed objects here + self.declare_global("PyObject *[%d]" % literals.num_literals(), "CPyStatics") + # Descriptions of str literals + init_str = c_string_array_initializer(literals.encoded_str_values()) + self.declare_global("const char * const []", "CPyLit_Str", initializer=init_str) + # Descriptions of bytes literals + init_bytes = c_string_array_initializer(literals.encoded_bytes_values()) + self.declare_global("const char * const []", "CPyLit_Bytes", initializer=init_bytes) + # Descriptions of int literals + init_int = c_string_array_initializer(literals.encoded_int_values()) + self.declare_global("const char * const []", "CPyLit_Int", initializer=init_int) + # Descriptions of float literals + init_floats = c_array_initializer(literals.encoded_float_values()) + self.declare_global("const double []", "CPyLit_Float", initializer=init_floats) + # Descriptions of complex literals + init_complex = c_array_initializer(literals.encoded_complex_values()) + self.declare_global("const double []", "CPyLit_Complex", initializer=init_complex) + # Descriptions of tuple literals + init_tuple = c_array_initializer(literals.encoded_tuple_values()) + self.declare_global("const int []", "CPyLit_Tuple", initializer=init_tuple) + # Descriptions of frozenset literals + init_frozenset = c_array_initializer(literals.encoded_frozenset_values()) + self.declare_global("const int []", "CPyLit_FrozenSet", initializer=init_frozenset) + + def generate_export_table(self, decl_emitter: Emitter, code_emitter: Emitter) -> None: + """Generate the declaration and definition of the group's export struct. + + To avoid needing to deal with deeply platform specific issues + involving dynamic library linking (and some possibly + insurmountable issues involving cyclic dependencies), compiled + code accesses functions and data in other compilation groups + via an explicit "export struct". + + Each group declares a struct type that contains a pointer to + every function and static variable it exports. It then + populates this struct and stores a pointer to it in a capsule + stored as an attribute named 'exports' on the group's shared + library's python module. + + On load, a group's init function will import all of its + dependencies' exports tables using the capsule mechanism and + copy the contents into a local copy of the table (to eliminate + the need for a pointer indirection when accessing it). + + Then, all calls to functions in another group and accesses to statics + from another group are done indirectly via the export table. + + For example, a group containing a module b, where b contains a class B + and a function bar, would declare an export table like: + struct export_table_b { + PyTypeObject **CPyType_B; + PyObject *(*CPyDef_B)(CPyTagged cpy_r_x); + CPyTagged (*CPyDef_B___foo)(PyObject *cpy_r_self, CPyTagged cpy_r_y); + tuple_T2OI (*CPyDef_bar)(PyObject *cpy_r_x); + char (*CPyDef___top_level__)(void); + }; + that would be initialized with: + static struct export_table_b exports = { + &CPyType_B, + &CPyDef_B, + &CPyDef_B___foo, + &CPyDef_bar, + &CPyDef___top_level__, + }; + To call `b.foo`, then, a function in another group would do + `exports_b.CPyDef_bar(...)`. + """ + + decls = decl_emitter.context.declarations + + decl_emitter.emit_lines("", f"struct export_table{self.group_suffix} {{") + for name, decl in decls.items(): + if decl.needs_export: + decl_emitter.emit_line(pointerize("\n".join(decl.decl), name)) + + decl_emitter.emit_line("};") + + code_emitter.emit_lines("", f"static struct export_table{self.group_suffix} exports = {{") + for name, decl in decls.items(): + if decl.needs_export: + code_emitter.emit_line(f"&{name},") + + code_emitter.emit_line("};") + + def generate_shared_lib_init(self, emitter: Emitter) -> None: + """Generate the init function for a shared library. + + A shared library contains all the actual code for a + compilation group. + + The init function is responsible for creating Capsules that + wrap pointers to the initialization function of all the real + init functions for modules in this shared library as well as + the export table containing all the exported functions and + values from all the modules. + + These capsules are stored in attributes of the shared library. + """ + assert self.group_name is not None + + emitter.emit_line() + + short_name = shared_lib_name(self.group_name).split(".")[-1] + + emitter.emit_lines( + f"static int exec_{short_name}(PyObject *module)", + "{", + "int res;", + "PyObject *capsule;", + "PyObject *tmp;", + "", + ) + + if self.compiler_options.separate: + emitter.emit_lines( + 'capsule = PyCapsule_New(&exports, "{}.exports", NULL);'.format( + shared_lib_name(self.group_name) + ), + "if (!capsule) {", + "goto fail;", + "}", + 'res = PyObject_SetAttrString(module, "exports", capsule);', + "Py_DECREF(capsule);", + "if (res < 0) {", + "goto fail;", + "}", + "", + ) + + for mod in self.modules: + name = exported_name(mod) + if self.multi_phase_init: + capsule_func_prefix = "CPyExec_" + capsule_name_prefix = "exec_" + emitter.emit_line(f"extern int CPyExec_{name}(PyObject *);") + else: + capsule_func_prefix = "CPyInit_" + capsule_name_prefix = "init_" + emitter.emit_line(f"extern PyObject *CPyInit_{name}(void);") + emitter.emit_lines( + 'capsule = PyCapsule_New((void *){}{}, "{}.{}{}", NULL);'.format( + capsule_func_prefix, + name, + shared_lib_name(self.group_name), + capsule_name_prefix, + name, + ), + "if (!capsule) {", + "goto fail;", + "}", + f'res = PyObject_SetAttrString(module, "{capsule_name_prefix}{name}", capsule);', + "Py_DECREF(capsule);", + "if (res < 0) {", + "goto fail;", + "}", + "", + ) + + for group in sorted(self.context.group_deps): + egroup = exported_name(group) + emitter.emit_lines( + 'tmp = PyImport_ImportModule("{}"); if (!tmp) goto fail; Py_DECREF(tmp);'.format( + shared_lib_name(group) + ), + 'struct export_table_{} *pexports_{} = PyCapsule_Import("{}.exports", 0);'.format( + egroup, egroup, shared_lib_name(group) + ), + f"if (!pexports_{egroup}) {{", + "goto fail;", + "}", + "memcpy(&exports_{group}, pexports_{group}, sizeof(exports_{group}));".format( + group=egroup + ), + "", + ) + + emitter.emit_lines("return 0;", "fail:", "return -1;", "}") + + if self.multi_phase_init: + emitter.emit_lines( + f"static PyModuleDef_Slot slots_{short_name}[] = {{", + f"{{Py_mod_exec, exec_{short_name}}},", + "{Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED},", + "{Py_mod_gil, Py_MOD_GIL_NOT_USED},", + "{0, NULL},", + "};", + ) + + size = 0 if self.multi_phase_init else -1 + emitter.emit_lines( + f"static PyModuleDef module_def_{short_name} = {{", + "PyModuleDef_HEAD_INIT,", + f'.m_name = "{shared_lib_name(self.group_name)}",', + ".m_doc = NULL,", + f".m_size = {size},", + ".m_methods = NULL,", + ) + if self.multi_phase_init: + emitter.emit_line(f".m_slots = slots_{short_name},") + emitter.emit_line("};") + + if self.multi_phase_init: + emitter.emit_lines( + f"PyMODINIT_FUNC PyInit_{short_name}(void) {{", + f"return PyModuleDef_Init(&module_def_{short_name});", + "}", + ) + else: + emitter.emit_lines( + f"PyMODINIT_FUNC PyInit_{short_name}(void) {{", + "static PyObject *module = NULL;", + "if (module) {", + "Py_INCREF(module);", + "return module;", + "}", + f"module = PyModule_Create(&module_def_{short_name});", + "if (!module) {", + "return NULL;", + "}", + f"if (exec_{short_name}(module) < 0) {{", + "Py_DECREF(module);", + "return NULL;", + "}", + "return module;", + "}", + ) + + def generate_globals_init(self, emitter: Emitter) -> None: + emitter.emit_lines( + "", + "int CPyGlobalsInit(void)", + "{", + "static int is_initialized = 0;", + "if (is_initialized) return 0;", + "", + ) + + emitter.emit_line("CPy_Init();") + for symbol, fixup in self.simple_inits: + emitter.emit_line(f"{symbol} = {fixup};") + + values = "CPyLit_Str, CPyLit_Bytes, CPyLit_Int, CPyLit_Float, CPyLit_Complex, CPyLit_Tuple, CPyLit_FrozenSet" + emitter.emit_lines( + f"if (CPyStatics_Initialize(CPyStatics, {values}) < 0) {{", "return -1;", "}" + ) + + emitter.emit_lines("is_initialized = 1;", "return 0;", "}") + + def generate_module_def(self, emitter: Emitter, module_name: str, module: ModuleIR) -> None: + """Emit the PyModuleDef struct for a module and the module init function.""" + module_prefix = emitter.names.private_name(module_name) + self.emit_module_methods(emitter, module_name, module_prefix, module) + self.emit_module_exec_func(emitter, module_name, module_prefix, module) + + # If using multi-phase init and a shared lib, parts of module definition + # will happen in the shim modules, so we skip some steps here. + if not (self.multi_phase_init and self.use_shared_lib): + if self.multi_phase_init: + self.emit_module_def_slots(emitter, module_prefix, module_name) + self.emit_module_def_struct(emitter, module_name, module_prefix) + self.emit_module_init_func(emitter, module_name, module_prefix) + + def emit_module_def_slots( + self, emitter: Emitter, module_prefix: str, module_name: str + ) -> None: + name = f"{module_prefix}_slots" + exec_name = f"CPyExec_{exported_name(module_name)}" + + emitter.emit_line(f"static PyModuleDef_Slot {name}[] = {{") + emitter.emit_line(f"{{Py_mod_exec, {exec_name}}},") + if sys.version_info >= (3, 12): + # Multiple interpreter support requires not using any C global state, + # which we don't support yet. + emitter.emit_line( + "{Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED}," + ) + if sys.version_info >= (3, 13): + # Declare support for free-threading to enable experimentation, + # even if we don't properly support it. + emitter.emit_line("{Py_mod_gil, Py_MOD_GIL_NOT_USED},") + emitter.emit_line("{0, NULL},") + emitter.emit_line("};") + + def emit_module_methods( + self, emitter: Emitter, module_name: str, module_prefix: str, module: ModuleIR + ) -> None: + """Emit module methods (the static PyMethodDef table).""" + emitter.emit_line(f"static PyMethodDef {module_prefix}module_methods[] = {{") + for fn in module.functions: + if fn.class_name is not None or fn.name == TOP_LEVEL_NAME: + continue + name = short_id_from_name(fn.name, fn.decl.shortname, fn.line) + if is_fastcall_supported(fn, emitter.capi_version): + flag = "METH_FASTCALL" + else: + flag = "METH_VARARGS" + doc = native_function_doc_initializer(fn) + emitter.emit_line( + ( + '{{"{name}", (PyCFunction){prefix}{cname}, {flag} | METH_KEYWORDS, ' + "PyDoc_STR({doc}) /* docstring */}}," + ).format( + name=name, cname=fn.cname(emitter.names), prefix=PREFIX, flag=flag, doc=doc + ) + ) + emitter.emit_line("{NULL, NULL, 0, NULL}") + emitter.emit_line("};") + emitter.emit_line() + + def emit_module_def_struct( + self, emitter: Emitter, module_name: str, module_prefix: str + ) -> None: + """Emit the static module definition struct (PyModuleDef).""" + emitter.emit_lines( + f"static struct PyModuleDef {module_prefix}module = {{", + "PyModuleDef_HEAD_INIT,", + f'"{module_name}",', + "NULL, /* docstring */", + "0, /* size of per-interpreter state of the module */", + f"{module_prefix}module_methods,", + ) + if self.multi_phase_init and not self.use_shared_lib: + slots_name = f"{module_prefix}_slots" + emitter.emit_line(f"{slots_name}, /* m_slots */") + else: + emitter.emit_line("NULL,") + emitter.emit_line("};") + emitter.emit_line() + + def emit_module_exec_func( + self, emitter: Emitter, module_name: str, module_prefix: str, module: ModuleIR + ) -> None: + """Emit the module exec function. + + If we are compiling just one module, this will be the normal C API + exec function. If we are compiling 2+ modules, we generate a shared + library for the modules and shims that call into the shared + library, and in this case the shared module defines an internal + exec function for each module and these will be called by the shims + via Capsules. + """ + declaration = f"int CPyExec_{exported_name(module_name)}(PyObject *module)" + module_static = self.module_internal_static_name(module_name, emitter) + emitter.emit_lines(declaration, "{") + if self.compiler_options.depends_on_librt_internal: + emitter.emit_line("if (import_librt_internal() < 0) {") + emitter.emit_line("return -1;") + emitter.emit_line("}") + if "librt.base64" in module.capsules: + emitter.emit_line("if (import_librt_base64() < 0) {") + emitter.emit_line("return -1;") + emitter.emit_line("}") + emitter.emit_line("PyObject* modname = NULL;") + if self.multi_phase_init: + emitter.emit_line(f"{module_static} = module;") + emitter.emit_line( + f'modname = PyObject_GetAttrString((PyObject *){module_static}, "__name__");' + ) + + module_globals = emitter.static_name("globals", module_name) + emitter.emit_lines( + f"{module_globals} = PyModule_GetDict({module_static});", + f"if (unlikely({module_globals} == NULL))", + " goto fail;", + ) + + if self.multi_phase_init: + emitter.emit_lines( + f"if (PyModule_AddFunctions(module, {module_prefix}module_methods) < 0)", + " goto fail;", + ) + + # HACK: Manually instantiate generated classes here + type_structs: list[str] = [] + for cl in module.classes: + type_struct = emitter.type_struct_name(cl) + type_structs.append(type_struct) + if cl.is_generated: + emitter.emit_lines( + "{t} = (PyTypeObject *)CPyType_FromTemplate(" + "(PyObject *){t}_template, NULL, modname);".format(t=type_struct) + ) + emitter.emit_lines(f"if (unlikely(!{type_struct}))", " goto fail;") + name_prefix = cl.name_prefix(emitter.names) + emitter.emit_line(f"CPyDef_{name_prefix}_trait_vtable_setup();") + + emitter.emit_lines("if (CPyGlobalsInit() < 0)", " goto fail;") + + self.generate_top_level_call(module, emitter) + + emitter.emit_lines("Py_DECREF(modname);") + + emitter.emit_line("return 0;") + emitter.emit_lines("fail:") + if self.multi_phase_init: + emitter.emit_lines(f"{module_static} = NULL;", "Py_CLEAR(modname);") + else: + emitter.emit_lines(f"Py_CLEAR({module_static});", "Py_CLEAR(modname);") + for name, typ in module.final_names: + static_name = emitter.static_name(name, module_name) + emitter.emit_dec_ref(static_name, typ, is_xdec=True) + undef = emitter.c_undefined_value(typ) + emitter.emit_line(f"{static_name} = {undef};") + # the type objects returned from CPyType_FromTemplate are all new references + # so we have to decref them + for t in type_structs: + emitter.emit_line(f"Py_CLEAR({t});") + emitter.emit_line("return -1;") + emitter.emit_line("}") + + def emit_module_init_func( + self, emitter: Emitter, module_name: str, module_prefix: str + ) -> None: + if not self.use_shared_lib: + declaration = f"PyMODINIT_FUNC PyInit_{module_name}(void)" + else: + declaration = f"PyObject *CPyInit_{exported_name(module_name)}(void)" + emitter.emit_lines(declaration, "{") + + if self.multi_phase_init: + def_name = f"{module_prefix}module" + emitter.emit_line(f"return PyModuleDef_Init(&{def_name});") + emitter.emit_line("}") + return + + exec_func = f"CPyExec_{exported_name(module_name)}" + + # Store the module reference in a static and return it when necessary. + # This is separate from the *global* reference to the module that will + # be populated when it is imported by a compiled module. We want that + # reference to only be populated when the module has been successfully + # imported, whereas this we want to have to stop a circular import. + module_static = self.module_internal_static_name(module_name, emitter) + + emitter.emit_lines( + f"if ({module_static}) {{", + f"Py_INCREF({module_static});", + f"return {module_static};", + "}", + ) + + emitter.emit_lines( + f"{module_static} = PyModule_Create(&{module_prefix}module);", + f"if (unlikely({module_static} == NULL))", + " goto fail;", + ) + emitter.emit_lines(f"if ({exec_func}({module_static}) != 0)", " goto fail;") + emitter.emit_line(f"return {module_static};") + emitter.emit_lines("fail:", "return NULL;") + emitter.emit_lines("}") + + def generate_top_level_call(self, module: ModuleIR, emitter: Emitter) -> None: + """Generate call to function representing module top level.""" + # Optimization: we tend to put the top level last, so reverse iterate + for fn in reversed(module.functions): + if fn.name == TOP_LEVEL_NAME: + emitter.emit_lines( + f"char result = {emitter.native_function_name(fn.decl)}();", + "if (result == 2)", + " goto fail;", + ) + break + + def toposort_declarations(self) -> list[HeaderDeclaration]: + """Topologically sort the declaration dict by dependencies. + + Declarations can require other declarations to come prior in C (such as declaring structs). + In order to guarantee that the C output will compile the declarations will thus need to + be properly ordered. This simple DFS guarantees that we have a proper ordering. + + This runs in O(V + E). + """ + result = [] + marked_declarations: dict[str, MarkedDeclaration] = {} + for k, v in self.context.declarations.items(): + marked_declarations[k] = MarkedDeclaration(v, False) + + def _toposort_visit(name: str) -> None: + decl = marked_declarations[name] + if decl.mark: + return + + for child in decl.declaration.dependencies: + _toposort_visit(child) + + result.append(decl.declaration) + decl.mark = True + + for name in marked_declarations: + _toposort_visit(name) + + return result + + def declare_global( + self, type_spaced: str, name: str, *, initializer: str | None = None + ) -> None: + if "[" not in type_spaced: + base = f"{type_spaced}{name}" + else: + a, b = type_spaced.split("[", 1) + base = f"{a}{name}[{b}" + + if not initializer: + defn = None + else: + defn = [f"{base} = {initializer};"] + if name not in self.context.declarations: + self.context.declarations[name] = HeaderDeclaration(f"{base};", defn=defn) + + def declare_internal_globals(self, module_name: str, emitter: Emitter) -> None: + static_name = emitter.static_name("globals", module_name) + self.declare_global("PyObject *", static_name) + + def module_internal_static_name(self, module_name: str, emitter: Emitter) -> str: + return emitter.static_name(module_name + "__internal", None, prefix=MODULE_PREFIX) + + def declare_module(self, module_name: str, emitter: Emitter) -> None: + # We declare two globals for each compiled module: + # one used internally in the implementation of module init to cache results + # and prevent infinite recursion in import cycles, and one used + # by other modules to refer to it. + if module_name in self.modules: + internal_static_name = self.module_internal_static_name(module_name, emitter) + self.declare_global("CPyModule *", internal_static_name, initializer="NULL") + static_name = emitter.static_name(module_name, None, prefix=MODULE_PREFIX) + self.declare_global("CPyModule *", static_name) + self.simple_inits.append((static_name, "Py_None")) + + def declare_imports(self, imps: Iterable[str], emitter: Emitter) -> None: + for imp in imps: + self.declare_module(imp, emitter) + + def declare_finals( + self, module: str, final_names: Iterable[tuple[str, RType]], emitter: Emitter + ) -> None: + for name, typ in final_names: + static_name = emitter.static_name(name, module) + emitter.context.declarations[static_name] = HeaderDeclaration( + f"{emitter.ctype_spaced(typ)}{static_name};", + [self.final_definition(module, name, typ, emitter)], + needs_export=True, + ) + + def final_definition(self, module: str, name: str, typ: RType, emitter: Emitter) -> str: + static_name = emitter.static_name(name, module) + # Here we rely on the fact that undefined value and error value are always the same + undefined = emitter.c_initializer_undefined_value(typ) + return f"{emitter.ctype_spaced(typ)}{static_name} = {undefined};" + + def declare_static_pyobject(self, identifier: str, emitter: Emitter) -> None: + symbol = emitter.static_name(identifier, None) + self.declare_global("PyObject *", symbol) + + def declare_type_vars(self, module: str, type_var_names: list[str], emitter: Emitter) -> None: + for name in type_var_names: + static_name = emitter.static_name(name, module, prefix=TYPE_VAR_PREFIX) + emitter.context.declarations[static_name] = HeaderDeclaration( + f"PyObject *{static_name};", + [f"PyObject *{static_name} = NULL;"], + needs_export=False, + ) + + +T = TypeVar("T") + + +def toposort(deps: dict[T, set[T]]) -> list[T]: + """Topologically sort a dict from item to dependencies. + + This runs in O(V + E). + """ + result = [] + visited: set[T] = set() + + def visit(item: T) -> None: + if item in visited: + return + + for child in deps[item]: + visit(child) + + result.append(item) + visited.add(item) + + for item in deps: + visit(item) + + return result + + +def is_fastcall_supported(fn: FuncIR, capi_version: tuple[int, int]) -> bool: + if fn.class_name is not None: + if fn.name == "__call__": + # We can use vectorcalls (PEP 590) when supported + return True + # TODO: Support fastcall for __init__ and __new__. + return fn.name != "__init__" and fn.name != "__new__" + return True + + +def collect_literals(fn: FuncIR, literals: Literals) -> None: + """Store all Python literal object refs in fn. + + Collecting literals must happen only after we have the final IR. + This way we won't include literals that have been optimized away. + """ + for block in fn.blocks: + for op in block.ops: + if isinstance(op, LoadLiteral): + literals.record_literal(op.value) + + +def c_string_array_initializer(components: list[bytes]) -> str: + result = [] + result.append("{\n") + for s in components: + result.append(" " + c_string_initializer(s) + ",\n") + result.append("}") + return "".join(result) diff --git a/.venv/lib/python3.12/site-packages/mypyc/codegen/emitwrapper.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/codegen/emitwrapper.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..2a95f60 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/codegen/emitwrapper.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/codegen/emitwrapper.py b/.venv/lib/python3.12/site-packages/mypyc/codegen/emitwrapper.py new file mode 100644 index 0000000..2e5d7ef --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/codegen/emitwrapper.py @@ -0,0 +1,979 @@ +"""Generate CPython API wrapper functions for native functions. + +The wrapper functions are used by the CPython runtime when calling +native functions from interpreted code, and when the called function +can't be determined statically in compiled code. They validate, match, +unbox and type check function arguments, and box return values as +needed. All wrappers accept and return 'PyObject *' (boxed) values. + +The wrappers aren't used for most calls between two native functions +or methods in a single compilation unit. +""" + +from __future__ import annotations + +from collections.abc import Sequence + +from mypy.nodes import ARG_NAMED, ARG_NAMED_OPT, ARG_OPT, ARG_POS, ARG_STAR, ARG_STAR2, ArgKind +from mypy.operators import op_methods_to_symbols, reverse_op_method_names, reverse_op_methods +from mypyc.codegen.emit import AssignHandler, Emitter, ErrorHandler, GotoHandler, ReturnHandler +from mypyc.common import ( + BITMAP_BITS, + BITMAP_TYPE, + DUNDER_PREFIX, + NATIVE_PREFIX, + PREFIX, + bitmap_name, +) +from mypyc.ir.class_ir import ClassIR +from mypyc.ir.func_ir import FUNC_STATICMETHOD, FuncIR, RuntimeArg +from mypyc.ir.rtypes import ( + RInstance, + RType, + is_bool_rprimitive, + is_int_rprimitive, + is_object_rprimitive, + object_rprimitive, +) +from mypyc.namegen import NameGenerator + +# Generic vectorcall wrapper functions (Python 3.7+) +# +# A wrapper function has a signature like this: +# +# PyObject *fn(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +# +# The function takes a self object, pointer to an array of arguments, +# the number of positional arguments, and a tuple of keyword argument +# names (that are stored starting in args[nargs]). +# +# It returns the returned object, or NULL on an exception. +# +# These are more efficient than legacy wrapper functions, since +# usually no tuple or dict objects need to be created for the +# arguments. Vectorcalls also use pre-constructed str objects for +# keyword argument names and other pre-computed information, instead +# of processing the argument format string on each call. + + +def wrapper_function_header(fn: FuncIR, names: NameGenerator) -> str: + """Return header of a vectorcall wrapper function. + + See comment above for a summary of the arguments. + """ + assert not fn.internal + return ( + "PyObject *{prefix}{name}(" + "PyObject *self, PyObject *const *args, size_t nargs, PyObject *kwnames)" + ).format(prefix=PREFIX, name=fn.cname(names)) + + +def generate_traceback_code( + fn: FuncIR, emitter: Emitter, source_path: str, module_name: str +) -> str: + # If we hit an error while processing arguments, then we emit a + # traceback frame to make it possible to debug where it happened. + # Unlike traceback frames added for exceptions seen in IR, we do this + # even if there is no `traceback_name`. This is because the error will + # have originated here and so we need it in the traceback. + globals_static = emitter.static_name("globals", module_name) + traceback_code = 'CPy_AddTraceback("%s", "%s", %d, %s);' % ( + source_path.replace("\\", "\\\\"), + fn.traceback_name or fn.name, + fn.line, + globals_static, + ) + return traceback_code + + +def make_arg_groups(args: list[RuntimeArg]) -> dict[ArgKind, list[RuntimeArg]]: + """Group arguments by kind.""" + return {k: [arg for arg in args if arg.kind == k] for k in ArgKind} + + +def reorder_arg_groups(groups: dict[ArgKind, list[RuntimeArg]]) -> list[RuntimeArg]: + """Reorder argument groups to match their order in a format string.""" + return groups[ARG_POS] + groups[ARG_OPT] + groups[ARG_NAMED_OPT] + groups[ARG_NAMED] + + +def make_static_kwlist(args: list[RuntimeArg]) -> str: + arg_names = "".join(f'"{arg.name}", ' for arg in args) + return f"static const char * const kwlist[] = {{{arg_names}0}};" + + +def make_format_string(func_name: str | None, groups: dict[ArgKind, list[RuntimeArg]]) -> str: + """Return a format string that specifies the accepted arguments. + + The format string is an extended subset of what is supported by + PyArg_ParseTupleAndKeywords(). Only the type 'O' is used, and we + also support some extensions: + + - Required keyword-only arguments are introduced after '@' + - If the function receives *args or **kwargs, we add a '%' prefix + + Each group requires the previous groups' delimiters to be present + first. + + These are used by both vectorcall and legacy wrapper functions. + """ + format = "" + if groups[ARG_STAR] or groups[ARG_STAR2]: + format += "%" + format += "O" * len(groups[ARG_POS]) + if groups[ARG_OPT] or groups[ARG_NAMED_OPT] or groups[ARG_NAMED]: + format += "|" + "O" * len(groups[ARG_OPT]) + if groups[ARG_NAMED_OPT] or groups[ARG_NAMED]: + format += "$" + "O" * len(groups[ARG_NAMED_OPT]) + if groups[ARG_NAMED]: + format += "@" + "O" * len(groups[ARG_NAMED]) + if func_name is not None: + format += f":{func_name}" + return format + + +def generate_wrapper_function( + fn: FuncIR, emitter: Emitter, source_path: str, module_name: str +) -> None: + """Generate a CPython-compatible vectorcall wrapper for a native function. + + In particular, this handles unboxing the arguments, calling the native function, and + then boxing the return value. + """ + emitter.emit_line(f"{wrapper_function_header(fn, emitter.names)} {{") + + # If fn is a method, then the first argument is a self param + real_args = list(fn.args) + if fn.sig.num_bitmap_args: + real_args = real_args[: -fn.sig.num_bitmap_args] + if fn.class_name and fn.decl.kind != FUNC_STATICMETHOD: + arg = real_args.pop(0) + emitter.emit_line(f"PyObject *obj_{arg.name} = self;") + + # Need to order args as: required, optional, kwonly optional, kwonly required + # This is because CPyArg_ParseStackAndKeywords format string requires + # them grouped in that way. + groups = make_arg_groups(real_args) + reordered_args = reorder_arg_groups(groups) + + emitter.emit_line(make_static_kwlist(reordered_args)) + fmt = make_format_string(fn.name, groups) + # Define the arguments the function accepts (but no types yet) + emitter.emit_line(f'static CPyArg_Parser parser = {{"{fmt}", kwlist, 0}};') + + for arg in real_args: + emitter.emit_line( + "PyObject *obj_{}{};".format(arg.name, " = NULL" if arg.optional else "") + ) + + cleanups = [f"CPy_DECREF(obj_{arg.name});" for arg in groups[ARG_STAR] + groups[ARG_STAR2]] + + arg_ptrs: list[str] = [] + if groups[ARG_STAR] or groups[ARG_STAR2]: + arg_ptrs += [f"&obj_{groups[ARG_STAR][0].name}" if groups[ARG_STAR] else "NULL"] + arg_ptrs += [f"&obj_{groups[ARG_STAR2][0].name}" if groups[ARG_STAR2] else "NULL"] + arg_ptrs += [f"&obj_{arg.name}" for arg in reordered_args] + + if fn.name == "__call__": + nargs = "PyVectorcall_NARGS(nargs)" + else: + nargs = "nargs" + parse_fn = "CPyArg_ParseStackAndKeywords" + # Special case some common signatures + if not real_args: + # No args + parse_fn = "CPyArg_ParseStackAndKeywordsNoArgs" + elif len(real_args) == 1 and len(groups[ARG_POS]) == 1: + # Single positional arg + parse_fn = "CPyArg_ParseStackAndKeywordsOneArg" + elif len(real_args) == len(groups[ARG_POS]) + len(groups[ARG_OPT]): + # No keyword-only args, *args or **kwargs + parse_fn = "CPyArg_ParseStackAndKeywordsSimple" + emitter.emit_lines( + "if (!{}(args, {}, kwnames, &parser{})) {{".format( + parse_fn, nargs, "".join(", " + n for n in arg_ptrs) + ), + "return NULL;", + "}", + ) + for i in range(fn.sig.num_bitmap_args): + name = bitmap_name(i) + emitter.emit_line(f"{BITMAP_TYPE} {name} = 0;") + traceback_code = generate_traceback_code(fn, emitter, source_path, module_name) + generate_wrapper_core( + fn, + emitter, + groups[ARG_OPT] + groups[ARG_NAMED_OPT], + cleanups=cleanups, + traceback_code=traceback_code, + ) + + emitter.emit_line("}") + + +# Legacy generic wrapper functions +# +# These take a self object, a Python tuple of positional arguments, +# and a dict of keyword arguments. These are a lot slower than +# vectorcall wrappers, especially in calls involving keyword +# arguments. + + +def legacy_wrapper_function_header(fn: FuncIR, names: NameGenerator) -> str: + return "PyObject *{prefix}{name}(PyObject *self, PyObject *args, PyObject *kw)".format( + prefix=PREFIX, name=fn.cname(names) + ) + + +def generate_legacy_wrapper_function( + fn: FuncIR, emitter: Emitter, source_path: str, module_name: str +) -> None: + """Generates a CPython-compatible legacy wrapper for a native function. + + In particular, this handles unboxing the arguments, calling the native function, and + then boxing the return value. + """ + emitter.emit_line(f"{legacy_wrapper_function_header(fn, emitter.names)} {{") + + # If fn is a method, then the first argument is a self param + real_args = list(fn.args) + if fn.sig.num_bitmap_args: + real_args = real_args[: -fn.sig.num_bitmap_args] + if fn.class_name and (fn.decl.name == "__new__" or fn.decl.kind != FUNC_STATICMETHOD): + arg = real_args.pop(0) + emitter.emit_line(f"PyObject *obj_{arg.name} = self;") + + # Need to order args as: required, optional, kwonly optional, kwonly required + # This is because CPyArg_ParseTupleAndKeywords format string requires + # them grouped in that way. + groups = make_arg_groups(real_args) + reordered_args = reorder_arg_groups(groups) + + emitter.emit_line(make_static_kwlist(reordered_args)) + for arg in real_args: + emitter.emit_line( + "PyObject *obj_{}{};".format(arg.name, " = NULL" if arg.optional else "") + ) + + cleanups = [f"CPy_DECREF(obj_{arg.name});" for arg in groups[ARG_STAR] + groups[ARG_STAR2]] + + arg_ptrs: list[str] = [] + if groups[ARG_STAR] or groups[ARG_STAR2]: + arg_ptrs += [f"&obj_{groups[ARG_STAR][0].name}" if groups[ARG_STAR] else "NULL"] + arg_ptrs += [f"&obj_{groups[ARG_STAR2][0].name}" if groups[ARG_STAR2] else "NULL"] + arg_ptrs += [f"&obj_{arg.name}" for arg in reordered_args] + + emitter.emit_lines( + 'if (!CPyArg_ParseTupleAndKeywords(args, kw, "{}", "{}", kwlist{})) {{'.format( + make_format_string(None, groups), fn.name, "".join(", " + n for n in arg_ptrs) + ), + "return NULL;", + "}", + ) + for i in range(fn.sig.num_bitmap_args): + name = bitmap_name(i) + emitter.emit_line(f"{BITMAP_TYPE} {name} = 0;") + traceback_code = generate_traceback_code(fn, emitter, source_path, module_name) + generate_wrapper_core( + fn, + emitter, + groups[ARG_OPT] + groups[ARG_NAMED_OPT], + cleanups=cleanups, + traceback_code=traceback_code, + ) + + emitter.emit_line("}") + + +# Specialized wrapper functions + + +def generate_dunder_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: + """Generates a wrapper for native __dunder__ methods to be able to fit into the mapping + protocol slot. This specifically means that the arguments are taken as *PyObjects and returned + as *PyObjects. + """ + gen = WrapperGenerator(cl, emitter) + gen.set_target(fn) + gen.emit_header() + gen.emit_arg_processing() + gen.emit_call() + gen.finish() + return gen.wrapper_name() + + +def generate_ipow_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: + """Generate a wrapper for native __ipow__. + + Since __ipow__ fills a ternary slot, but almost no one defines __ipow__ to take three + arguments, the wrapper needs to tweaked to force it to accept three arguments. + """ + gen = WrapperGenerator(cl, emitter) + gen.set_target(fn) + assert len(fn.args) in (2, 3), "__ipow__ should only take 2 or 3 arguments" + gen.arg_names = ["self", "exp", "mod"] + gen.emit_header() + gen.emit_arg_processing() + handle_third_pow_argument( + fn, + emitter, + gen, + if_unsupported=[ + 'PyErr_SetString(PyExc_TypeError, "__ipow__ takes 2 positional arguments but 3 were given");', + "return NULL;", + ], + ) + gen.emit_call() + gen.finish() + return gen.wrapper_name() + + +def generate_bin_op_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: + """Generates a wrapper for a native binary dunder method. + + The same wrapper that handles the forward method (e.g. __add__) also handles + the corresponding reverse method (e.g. __radd__), if defined. + + Both arguments and the return value are PyObject *. + """ + gen = WrapperGenerator(cl, emitter) + gen.set_target(fn) + if fn.name in ("__pow__", "__rpow__"): + gen.arg_names = ["left", "right", "mod"] + else: + gen.arg_names = ["left", "right"] + wrapper_name = gen.wrapper_name() + + gen.emit_header() + if fn.name not in reverse_op_methods and fn.name in reverse_op_method_names: + # There's only a reverse operator method. + generate_bin_op_reverse_only_wrapper(fn, emitter, gen) + else: + rmethod = reverse_op_methods[fn.name] + fn_rev = cl.get_method(rmethod) + if fn_rev is None: + # There's only a forward operator method. + generate_bin_op_forward_only_wrapper(fn, emitter, gen) + else: + # There's both a forward and a reverse operator method. + generate_bin_op_both_wrappers(cl, fn, fn_rev, emitter, gen) + return wrapper_name + + +def generate_bin_op_forward_only_wrapper( + fn: FuncIR, emitter: Emitter, gen: WrapperGenerator +) -> None: + gen.emit_arg_processing(error=GotoHandler("typefail"), raise_exception=False) + handle_third_pow_argument(fn, emitter, gen, if_unsupported=["goto typefail;"]) + gen.emit_call(not_implemented_handler="goto typefail;") + gen.emit_error_handling() + emitter.emit_label("typefail") + # If some argument has an incompatible type, treat this the same as + # returning NotImplemented, and try to call the reverse operator method. + # + # Note that in normal Python you'd instead of an explicit + # return of NotImplemented, but it doesn't generally work here + # the body won't be executed at all if there is an argument + # type check failure. + # + # The recommended way is to still use a type check in the + # body. This will only be used in interpreted mode: + # + # def __add__(self, other: int) -> Foo: + # if not isinstance(other, int): + # return NotImplemented + # ... + generate_bin_op_reverse_dunder_call(fn, emitter, reverse_op_methods[fn.name]) + gen.finish() + + +def generate_bin_op_reverse_only_wrapper( + fn: FuncIR, emitter: Emitter, gen: WrapperGenerator +) -> None: + gen.arg_names = ["right", "left"] + gen.emit_arg_processing(error=GotoHandler("typefail"), raise_exception=False) + handle_third_pow_argument(fn, emitter, gen, if_unsupported=["goto typefail;"]) + gen.emit_call() + gen.emit_error_handling() + emitter.emit_label("typefail") + emitter.emit_line("Py_INCREF(Py_NotImplemented);") + emitter.emit_line("return Py_NotImplemented;") + gen.finish() + + +def generate_bin_op_both_wrappers( + cl: ClassIR, fn: FuncIR, fn_rev: FuncIR, emitter: Emitter, gen: WrapperGenerator +) -> None: + # There's both a forward and a reverse operator method. First + # check if we should try calling the forward one. If the + # argument type check fails, fall back to the reverse method. + # + # Similar to above, we can't perfectly match Python semantics. + # In regular Python code you'd return NotImplemented if the + # operand has the wrong type, but in compiled code we'll never + # get to execute the type check. + emitter.emit_line( + "if (PyObject_IsInstance(obj_left, (PyObject *){})) {{".format( + emitter.type_struct_name(cl) + ) + ) + gen.emit_arg_processing(error=GotoHandler("typefail"), raise_exception=False) + handle_third_pow_argument(fn, emitter, gen, if_unsupported=["goto typefail2;"]) + # Ternary __rpow__ calls aren't a thing so immediately bail + # if ternary __pow__ returns NotImplemented. + if fn.name == "__pow__" and len(fn.args) == 3: + fwd_not_implemented_handler = "goto typefail2;" + else: + fwd_not_implemented_handler = "goto typefail;" + gen.emit_call(not_implemented_handler=fwd_not_implemented_handler) + gen.emit_error_handling() + emitter.emit_line("}") + emitter.emit_label("typefail") + emitter.emit_line( + "if (PyObject_IsInstance(obj_right, (PyObject *){})) {{".format( + emitter.type_struct_name(cl) + ) + ) + gen.set_target(fn_rev) + gen.arg_names = ["right", "left"] + gen.emit_arg_processing(error=GotoHandler("typefail2"), raise_exception=False) + handle_third_pow_argument(fn_rev, emitter, gen, if_unsupported=["goto typefail2;"]) + gen.emit_call() + gen.emit_error_handling() + emitter.emit_line("} else {") + generate_bin_op_reverse_dunder_call(fn, emitter, fn_rev.name) + emitter.emit_line("}") + emitter.emit_label("typefail2") + emitter.emit_line("Py_INCREF(Py_NotImplemented);") + emitter.emit_line("return Py_NotImplemented;") + gen.finish() + + +def generate_bin_op_reverse_dunder_call(fn: FuncIR, emitter: Emitter, rmethod: str) -> None: + if fn.name in ("__pow__", "__rpow__"): + # Ternary pow() will never call the reverse dunder. + emitter.emit_line("if (obj_mod == Py_None) {") + emitter.emit_line(f"_Py_IDENTIFIER({rmethod});") + emitter.emit_line( + 'return CPy_CallReverseOpMethod(obj_left, obj_right, "{}", &PyId_{});'.format( + op_methods_to_symbols[fn.name], rmethod + ) + ) + if fn.name in ("__pow__", "__rpow__"): + emitter.emit_line("} else {") + emitter.emit_line("Py_INCREF(Py_NotImplemented);") + emitter.emit_line("return Py_NotImplemented;") + emitter.emit_line("}") + + +def handle_third_pow_argument( + fn: FuncIR, emitter: Emitter, gen: WrapperGenerator, *, if_unsupported: list[str] +) -> None: + if fn.name not in ("__pow__", "__rpow__", "__ipow__"): + return + + if (fn.name in ("__pow__", "__ipow__") and len(fn.args) == 2) or fn.name == "__rpow__": + # If the power dunder only supports two arguments and the third + # argument (AKA mod) is set to a non-default value, simply bail. + # + # Importantly, this prevents any ternary __rpow__ calls from + # happening (as per the language specification). + emitter.emit_line("if (obj_mod != Py_None) {") + for line in if_unsupported: + emitter.emit_line(line) + emitter.emit_line("}") + # The slot wrapper will receive three arguments, but the call only + # supports two so make sure that the third argument isn't passed + # along. This is needed as two-argument __(i)pow__ is allowed and + # rather common. + if len(gen.arg_names) == 3: + gen.arg_names.pop() + + +RICHCOMPARE_OPS = { + "__lt__": "Py_LT", + "__gt__": "Py_GT", + "__le__": "Py_LE", + "__ge__": "Py_GE", + "__eq__": "Py_EQ", + "__ne__": "Py_NE", +} + + +def generate_richcompare_wrapper(cl: ClassIR, emitter: Emitter) -> str | None: + """Generates a wrapper for richcompare dunder methods.""" + # Sort for determinism on Python 3.5 + matches = sorted(name for name in RICHCOMPARE_OPS if cl.has_method(name)) + if not matches: + return None + + name = f"{DUNDER_PREFIX}_RichCompare_{cl.name_prefix(emitter.names)}" + emitter.emit_line( + "static PyObject *{name}(PyObject *obj_lhs, PyObject *obj_rhs, int op) {{".format( + name=name + ) + ) + emitter.emit_line("switch (op) {") + for func in matches: + emitter.emit_line(f"case {RICHCOMPARE_OPS[func]}: {{") + method = cl.get_method(func) + assert method is not None + generate_wrapper_core(method, emitter, arg_names=["lhs", "rhs"]) + emitter.emit_line("}") + emitter.emit_line("}") + + emitter.emit_line("Py_INCREF(Py_NotImplemented);") + emitter.emit_line("return Py_NotImplemented;") + + emitter.emit_line("}") + + return name + + +def generate_get_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: + """Generates a wrapper for native __get__ methods.""" + name = f"{DUNDER_PREFIX}{fn.name}{cl.name_prefix(emitter.names)}" + emitter.emit_line( + "static PyObject *{name}(PyObject *self, PyObject *instance, PyObject *owner) {{".format( + name=name + ) + ) + emitter.emit_line("instance = instance ? instance : Py_None;") + emitter.emit_line(f"return {NATIVE_PREFIX}{fn.cname(emitter.names)}(self, instance, owner);") + emitter.emit_line("}") + + return name + + +def generate_hash_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: + """Generates a wrapper for native __hash__ methods.""" + name = f"{DUNDER_PREFIX}{fn.name}{cl.name_prefix(emitter.names)}" + emitter.emit_line(f"static Py_ssize_t {name}(PyObject *self) {{") + emitter.emit_line( + "{}retval = {}{}{}(self);".format( + emitter.ctype_spaced(fn.ret_type), + emitter.get_group_prefix(fn.decl), + NATIVE_PREFIX, + fn.cname(emitter.names), + ) + ) + emitter.emit_error_check("retval", fn.ret_type, "return -1;") + if is_int_rprimitive(fn.ret_type): + emitter.emit_line("Py_ssize_t val = CPyTagged_AsSsize_t(retval);") + else: + emitter.emit_line("Py_ssize_t val = PyLong_AsSsize_t(retval);") + emitter.emit_dec_ref("retval", fn.ret_type) + emitter.emit_line("if (PyErr_Occurred()) return -1;") + # We can't return -1 from a hash function.. + emitter.emit_line("if (val == -1) return -2;") + emitter.emit_line("return val;") + emitter.emit_line("}") + + return name + + +def generate_len_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: + """Generates a wrapper for native __len__ methods.""" + name = f"{DUNDER_PREFIX}{fn.name}{cl.name_prefix(emitter.names)}" + emitter.emit_line(f"static Py_ssize_t {name}(PyObject *self) {{") + emitter.emit_line( + "{}retval = {}{}{}(self);".format( + emitter.ctype_spaced(fn.ret_type), + emitter.get_group_prefix(fn.decl), + NATIVE_PREFIX, + fn.cname(emitter.names), + ) + ) + emitter.emit_error_check("retval", fn.ret_type, "return -1;") + if is_int_rprimitive(fn.ret_type): + emitter.emit_line("Py_ssize_t val = CPyTagged_AsSsize_t(retval);") + else: + emitter.emit_line("Py_ssize_t val = PyLong_AsSsize_t(retval);") + emitter.emit_dec_ref("retval", fn.ret_type) + emitter.emit_line("if (PyErr_Occurred()) return -1;") + emitter.emit_line("return val;") + emitter.emit_line("}") + + return name + + +def generate_bool_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: + """Generates a wrapper for native __bool__ methods.""" + name = f"{DUNDER_PREFIX}{fn.name}{cl.name_prefix(emitter.names)}" + emitter.emit_line(f"static int {name}(PyObject *self) {{") + emitter.emit_line( + "{}val = {}{}(self);".format( + emitter.ctype_spaced(fn.ret_type), NATIVE_PREFIX, fn.cname(emitter.names) + ) + ) + emitter.emit_error_check("val", fn.ret_type, "return -1;") + # This wouldn't be that hard to fix but it seems unimportant and + # getting error handling and unboxing right would be fiddly. (And + # way easier to do in IR!) + assert is_bool_rprimitive(fn.ret_type), "Only bool return supported for __bool__" + emitter.emit_line("return val;") + emitter.emit_line("}") + + return name + + +def generate_del_item_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: + """Generates a wrapper for native __delitem__. + + This is only called from a combined __delitem__/__setitem__ wrapper. + """ + name = "{}{}{}".format(DUNDER_PREFIX, "__delitem__", cl.name_prefix(emitter.names)) + input_args = ", ".join(f"PyObject *obj_{arg.name}" for arg in fn.args) + emitter.emit_line(f"static int {name}({input_args}) {{") + generate_set_del_item_wrapper_inner(fn, emitter, fn.args) + return name + + +def generate_set_del_item_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: + """Generates a wrapper for native __setitem__ method (also works for __delitem__). + + This is used with the mapping protocol slot. Arguments are taken as *PyObjects and we + return a negative C int on error. + + Create a separate wrapper function for __delitem__ as needed and have the + __setitem__ wrapper call it if the value is NULL. Return the name + of the outer (__setitem__) wrapper. + """ + method_cls = cl.get_method_and_class("__delitem__") + del_name = None + if method_cls and method_cls[1] == cl: + # Generate a separate wrapper for __delitem__ + del_name = generate_del_item_wrapper(cl, method_cls[0], emitter) + + args = fn.args + if fn.name == "__delitem__": + # Add an extra argument for value that we expect to be NULL. + args = list(args) + [RuntimeArg("___value", object_rprimitive, ARG_POS)] + + name = "{}{}{}".format(DUNDER_PREFIX, "__setitem__", cl.name_prefix(emitter.names)) + input_args = ", ".join(f"PyObject *obj_{arg.name}" for arg in args) + emitter.emit_line(f"static int {name}({input_args}) {{") + + # First check if this is __delitem__ + emitter.emit_line(f"if (obj_{args[2].name} == NULL) {{") + if del_name is not None: + # We have a native implementation, so call it + emitter.emit_line(f"return {del_name}(obj_{args[0].name}, obj_{args[1].name});") + else: + # Try to call superclass method instead + emitter.emit_line(f"PyObject *super = CPy_Super(CPyModule_builtins, obj_{args[0].name});") + emitter.emit_line("if (super == NULL) return -1;") + emitter.emit_line( + 'PyObject *result = PyObject_CallMethod(super, "__delitem__", "O", obj_{});'.format( + args[1].name + ) + ) + emitter.emit_line("Py_DECREF(super);") + emitter.emit_line("Py_XDECREF(result);") + emitter.emit_line("return result == NULL ? -1 : 0;") + emitter.emit_line("}") + + method_cls = cl.get_method_and_class("__setitem__") + if method_cls and method_cls[1] == cl: + generate_set_del_item_wrapper_inner(fn, emitter, args) + else: + emitter.emit_line(f"PyObject *super = CPy_Super(CPyModule_builtins, obj_{args[0].name});") + emitter.emit_line("if (super == NULL) return -1;") + emitter.emit_line("PyObject *result;") + + if method_cls is None and cl.builtin_base is None: + msg = f"'{cl.name}' object does not support item assignment" + emitter.emit_line(f'PyErr_SetString(PyExc_TypeError, "{msg}");') + emitter.emit_line("result = NULL;") + else: + # A base class may have __setitem__ + emitter.emit_line( + 'result = PyObject_CallMethod(super, "__setitem__", "OO", obj_{}, obj_{});'.format( + args[1].name, args[2].name + ) + ) + emitter.emit_line("Py_DECREF(super);") + emitter.emit_line("Py_XDECREF(result);") + emitter.emit_line("return result == NULL ? -1 : 0;") + emitter.emit_line("}") + return name + + +def generate_set_del_item_wrapper_inner( + fn: FuncIR, emitter: Emitter, args: Sequence[RuntimeArg] +) -> None: + for arg in args: + generate_arg_check(arg.name, arg.type, emitter, GotoHandler("fail")) + native_args = ", ".join(f"arg_{arg.name}" for arg in args) + emitter.emit_line( + "{}val = {}{}({});".format( + emitter.ctype_spaced(fn.ret_type), NATIVE_PREFIX, fn.cname(emitter.names), native_args + ) + ) + emitter.emit_error_check("val", fn.ret_type, "goto fail;") + emitter.emit_dec_ref("val", fn.ret_type) + emitter.emit_line("return 0;") + emitter.emit_label("fail") + emitter.emit_line("return -1;") + emitter.emit_line("}") + + +def generate_contains_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: + """Generates a wrapper for a native __contains__ method.""" + name = f"{DUNDER_PREFIX}{fn.name}{cl.name_prefix(emitter.names)}" + emitter.emit_line(f"static int {name}(PyObject *self, PyObject *obj_item) {{") + generate_arg_check("item", fn.args[1].type, emitter, ReturnHandler("-1")) + emitter.emit_line( + "{}val = {}{}(self, arg_item);".format( + emitter.ctype_spaced(fn.ret_type), NATIVE_PREFIX, fn.cname(emitter.names) + ) + ) + emitter.emit_error_check("val", fn.ret_type, "return -1;") + if is_bool_rprimitive(fn.ret_type): + emitter.emit_line("return val;") + else: + emitter.emit_line("int boolval = PyObject_IsTrue(val);") + emitter.emit_dec_ref("val", fn.ret_type) + emitter.emit_line("return boolval;") + emitter.emit_line("}") + + return name + + +# Helpers + + +def generate_wrapper_core( + fn: FuncIR, + emitter: Emitter, + optional_args: list[RuntimeArg] | None = None, + arg_names: list[str] | None = None, + cleanups: list[str] | None = None, + traceback_code: str | None = None, +) -> None: + """Generates the core part of a wrapper function for a native function. + + This expects each argument as a PyObject * named obj_{arg} as a precondition. + It converts the PyObject *s to the necessary types, checking and unboxing if necessary, + makes the call, then boxes the result if necessary and returns it. + """ + gen = WrapperGenerator(None, emitter) + gen.set_target(fn) + if arg_names: + gen.arg_names = arg_names + gen.cleanups = cleanups or [] + gen.optional_args = optional_args or [] + gen.traceback_code = traceback_code or "" + + error = ReturnHandler("NULL") if not gen.use_goto() else GotoHandler("fail") + gen.emit_arg_processing(error=error) + gen.emit_call() + gen.emit_error_handling() + + +def generate_arg_check( + name: str, + typ: RType, + emitter: Emitter, + error: ErrorHandler | None = None, + *, + optional: bool = False, + raise_exception: bool = True, + bitmap_arg_index: int = 0, +) -> None: + """Insert a runtime check for argument and unbox if necessary. + + The object is named PyObject *obj_{}. This is expected to generate + a value of name arg_{} (unboxed if necessary). For each primitive a runtime + check ensures the correct type. + """ + error = error or AssignHandler() + if typ.is_unboxed: + if typ.error_overlap and optional: + # Update bitmap is value is provided. + init = emitter.c_undefined_value(typ) + emitter.emit_line(f"{emitter.ctype(typ)} arg_{name} = {init};") + emitter.emit_line(f"if (obj_{name} != NULL) {{") + bitmap = bitmap_name(bitmap_arg_index // BITMAP_BITS) + emitter.emit_line(f"{bitmap} |= 1 << {bitmap_arg_index & (BITMAP_BITS - 1)};") + emitter.emit_unbox( + f"obj_{name}", + f"arg_{name}", + typ, + declare_dest=False, + raise_exception=raise_exception, + error=error, + borrow=True, + ) + emitter.emit_line("}") + else: + # Borrow when unboxing to avoid reference count manipulation. + emitter.emit_unbox( + f"obj_{name}", + f"arg_{name}", + typ, + declare_dest=True, + raise_exception=raise_exception, + error=error, + borrow=True, + optional=optional, + ) + elif is_object_rprimitive(typ): + # Object is trivial since any object is valid + if optional: + emitter.emit_line(f"PyObject *arg_{name};") + emitter.emit_line(f"if (obj_{name} == NULL) {{") + emitter.emit_line(f"arg_{name} = {emitter.c_error_value(typ)};") + emitter.emit_lines("} else {", f"arg_{name} = obj_{name}; ", "}") + else: + emitter.emit_line(f"PyObject *arg_{name} = obj_{name};") + else: + emitter.emit_cast( + f"obj_{name}", + f"arg_{name}", + typ, + declare_dest=True, + raise_exception=raise_exception, + error=error, + optional=optional, + ) + + +class WrapperGenerator: + """Helper that simplifies the generation of wrapper functions.""" + + # TODO: Use this for more wrappers + + def __init__(self, cl: ClassIR | None, emitter: Emitter) -> None: + self.cl = cl + self.emitter = emitter + self.cleanups: list[str] = [] + self.optional_args: list[RuntimeArg] = [] + self.traceback_code = "" + + def set_target(self, fn: FuncIR) -> None: + """Set the wrapped function. + + It's fine to modify the attributes initialized here later to customize + the wrapper function. + """ + self.target_name = fn.name + self.target_cname = fn.cname(self.emitter.names) + self.num_bitmap_args = fn.sig.num_bitmap_args + if self.num_bitmap_args: + self.args = fn.args[: -self.num_bitmap_args] + else: + self.args = fn.args + self.arg_names = [arg.name for arg in self.args] + self.ret_type = fn.ret_type + + def wrapper_name(self) -> str: + """Return the name of the wrapper function.""" + return "{}{}{}".format( + DUNDER_PREFIX, + self.target_name, + self.cl.name_prefix(self.emitter.names) if self.cl else "", + ) + + def use_goto(self) -> bool: + """Do we use a goto for error handling (instead of straight return)?""" + return bool(self.cleanups or self.traceback_code) + + def emit_header(self) -> None: + """Emit the function header of the wrapper implementation.""" + input_args = ", ".join(f"PyObject *obj_{arg}" for arg in self.arg_names) + self.emitter.emit_line( + "static PyObject *{name}({input_args}) {{".format( + name=self.wrapper_name(), input_args=input_args + ) + ) + + def emit_arg_processing( + self, error: ErrorHandler | None = None, raise_exception: bool = True + ) -> None: + """Emit validation and unboxing of arguments.""" + error = error or self.error() + bitmap_arg_index = 0 + for arg_name, arg in zip(self.arg_names, self.args): + # Suppress the argument check for *args/**kwargs, since we know it must be right. + typ = arg.type if arg.kind not in (ARG_STAR, ARG_STAR2) else object_rprimitive + optional = arg in self.optional_args + generate_arg_check( + arg_name, + typ, + self.emitter, + error, + raise_exception=raise_exception, + optional=optional, + bitmap_arg_index=bitmap_arg_index, + ) + if optional and typ.error_overlap: + bitmap_arg_index += 1 + + def emit_call(self, not_implemented_handler: str = "") -> None: + """Emit call to the wrapper function. + + If not_implemented_handler is non-empty, use this C code to handle + a NotImplemented return value (if it's possible based on the return type). + """ + native_args = ", ".join(f"arg_{arg}" for arg in self.arg_names) + if self.num_bitmap_args: + bitmap_args = ", ".join( + [bitmap_name(i) for i in reversed(range(self.num_bitmap_args))] + ) + native_args = f"{native_args}, {bitmap_args}" + + ret_type = self.ret_type + emitter = self.emitter + if ret_type.is_unboxed or self.use_goto(): + # TODO: The Py_RETURN macros return the correct PyObject * with reference count + # handling. Are they relevant? + emitter.emit_line( + "{}retval = {}{}({});".format( + emitter.ctype_spaced(ret_type), NATIVE_PREFIX, self.target_cname, native_args + ) + ) + emitter.emit_lines(*self.cleanups) + if ret_type.is_unboxed: + emitter.emit_error_check("retval", ret_type, "return NULL;") + emitter.emit_box("retval", "retbox", ret_type, declare_dest=True) + + emitter.emit_line("return {};".format("retbox" if ret_type.is_unboxed else "retval")) + else: + if not_implemented_handler and not isinstance(ret_type, RInstance): + # The return value type may overlap with NotImplemented. + emitter.emit_line( + "PyObject *retbox = {}{}({});".format( + NATIVE_PREFIX, self.target_cname, native_args + ) + ) + emitter.emit_lines( + "if (retbox == Py_NotImplemented) {", + not_implemented_handler, + "}", + "return retbox;", + ) + else: + emitter.emit_line(f"return {NATIVE_PREFIX}{self.target_cname}({native_args});") + # TODO: Tracebacks? + + def error(self) -> ErrorHandler: + """Figure out how to deal with errors in the wrapper.""" + if self.cleanups or self.traceback_code: + # We'll have a label at the end with error handling code. + return GotoHandler("fail") + else: + # Nothing special needs to done to handle errors, so just return. + return ReturnHandler("NULL") + + def emit_error_handling(self) -> None: + """Emit error handling block at the end of the wrapper, if needed.""" + emitter = self.emitter + if self.use_goto(): + emitter.emit_label("fail") + emitter.emit_lines(*self.cleanups) + if self.traceback_code: + emitter.emit_line(self.traceback_code) + emitter.emit_line("return NULL;") + + def finish(self) -> None: + self.emitter.emit_line("}") diff --git a/.venv/lib/python3.12/site-packages/mypyc/codegen/literals.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/codegen/literals.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..b8779dc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/codegen/literals.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/codegen/literals.py b/.venv/lib/python3.12/site-packages/mypyc/codegen/literals.py new file mode 100644 index 0000000..4cd41e0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/codegen/literals.py @@ -0,0 +1,302 @@ +from __future__ import annotations + +from typing import Final, Union +from typing_extensions import TypeGuard + +# Supported Python literal types. All tuple / frozenset items must have supported +# literal types as well, but we can't represent the type precisely. +LiteralValue = Union[ + str, bytes, int, bool, float, complex, tuple[object, ...], frozenset[object], None +] + + +def _is_literal_value(obj: object) -> TypeGuard[LiteralValue]: + return isinstance(obj, (str, bytes, int, float, complex, tuple, frozenset, type(None))) + + +# Some literals are singletons and handled specially (None, False and True) +NUM_SINGLETONS: Final = 3 + + +class Literals: + """Collection of literal values used in a compilation group and related helpers.""" + + def __init__(self) -> None: + # Each dict maps value to literal index (0, 1, ...) + self.str_literals: dict[str, int] = {} + self.bytes_literals: dict[bytes, int] = {} + self.int_literals: dict[int, int] = {} + self.float_literals: dict[float, int] = {} + self.complex_literals: dict[complex, int] = {} + self.tuple_literals: dict[tuple[object, ...], int] = {} + self.frozenset_literals: dict[frozenset[object], int] = {} + + def record_literal(self, value: LiteralValue) -> None: + """Ensure that the literal value is available in generated code.""" + if value is None or value is True or value is False: + # These are special cased and always present + return + if isinstance(value, str): + str_literals = self.str_literals + if value not in str_literals: + str_literals[value] = len(str_literals) + elif isinstance(value, bytes): + bytes_literals = self.bytes_literals + if value not in bytes_literals: + bytes_literals[value] = len(bytes_literals) + elif isinstance(value, int): + int_literals = self.int_literals + if value not in int_literals: + int_literals[value] = len(int_literals) + elif isinstance(value, float): + float_literals = self.float_literals + if value not in float_literals: + float_literals[value] = len(float_literals) + elif isinstance(value, complex): + complex_literals = self.complex_literals + if value not in complex_literals: + complex_literals[value] = len(complex_literals) + elif isinstance(value, tuple): + tuple_literals = self.tuple_literals + if value not in tuple_literals: + for item in value: + assert _is_literal_value(item) + self.record_literal(item) + tuple_literals[value] = len(tuple_literals) + elif isinstance(value, frozenset): + frozenset_literals = self.frozenset_literals + if value not in frozenset_literals: + for item in value: + assert _is_literal_value(item) + self.record_literal(item) + frozenset_literals[value] = len(frozenset_literals) + else: + assert False, "invalid literal: %r" % value + + def literal_index(self, value: LiteralValue) -> int: + """Return the index to the literals array for given value.""" + # The array contains first None and booleans, followed by all str values, + # followed by bytes values, etc. + if value is None: + return 0 + elif value is False: + return 1 + elif value is True: + return 2 + n = NUM_SINGLETONS + if isinstance(value, str): + return n + self.str_literals[value] + n += len(self.str_literals) + if isinstance(value, bytes): + return n + self.bytes_literals[value] + n += len(self.bytes_literals) + if isinstance(value, int): + return n + self.int_literals[value] + n += len(self.int_literals) + if isinstance(value, float): + return n + self.float_literals[value] + n += len(self.float_literals) + if isinstance(value, complex): + return n + self.complex_literals[value] + n += len(self.complex_literals) + if isinstance(value, tuple): + return n + self.tuple_literals[value] + n += len(self.tuple_literals) + if isinstance(value, frozenset): + return n + self.frozenset_literals[value] + assert False, "invalid literal: %r" % value + + def num_literals(self) -> int: + # The first three are for None, True and False + return ( + NUM_SINGLETONS + + len(self.str_literals) + + len(self.bytes_literals) + + len(self.int_literals) + + len(self.float_literals) + + len(self.complex_literals) + + len(self.tuple_literals) + + len(self.frozenset_literals) + ) + + # The following methods return the C encodings of literal values + # of different types + + def encoded_str_values(self) -> list[bytes]: + return _encode_str_values(self.str_literals) + + def encoded_int_values(self) -> list[bytes]: + return _encode_int_values(self.int_literals) + + def encoded_bytes_values(self) -> list[bytes]: + return _encode_bytes_values(self.bytes_literals) + + def encoded_float_values(self) -> list[str]: + return _encode_float_values(self.float_literals) + + def encoded_complex_values(self) -> list[str]: + return _encode_complex_values(self.complex_literals) + + def encoded_tuple_values(self) -> list[str]: + return self._encode_collection_values(self.tuple_literals) + + def encoded_frozenset_values(self) -> list[str]: + return self._encode_collection_values(self.frozenset_literals) + + def _encode_collection_values( + self, values: dict[tuple[object, ...], int] | dict[frozenset[object], int] + ) -> list[str]: + """Encode tuple/frozenset values into a C array. + + The format of the result is like this: + + + + + ... + + + ... + """ + value_by_index = {index: value for value, index in values.items()} + result = [] + count = len(values) + result.append(str(count)) + for i in range(count): + value = value_by_index[i] + result.append(str(len(value))) + for item in value: + assert _is_literal_value(item) + index = self.literal_index(item) + result.append(str(index)) + return result + + +def _encode_str_values(values: dict[str, int]) -> list[bytes]: + value_by_index = {index: value for value, index in values.items()} + result = [] + line: list[bytes] = [] + line_len = 0 + for i in range(len(values)): + value = value_by_index[i] + c_literal = format_str_literal(value) + c_len = len(c_literal) + if line_len > 0 and line_len + c_len > 70: + result.append(format_int(len(line)) + b"".join(line)) + line = [] + line_len = 0 + line.append(c_literal) + line_len += c_len + if line: + result.append(format_int(len(line)) + b"".join(line)) + result.append(b"") + return result + + +def _encode_bytes_values(values: dict[bytes, int]) -> list[bytes]: + value_by_index = {index: value for value, index in values.items()} + result = [] + line: list[bytes] = [] + line_len = 0 + for i in range(len(values)): + value = value_by_index[i] + c_init = format_int(len(value)) + c_len = len(c_init) + len(value) + if line_len > 0 and line_len + c_len > 70: + result.append(format_int(len(line)) + b"".join(line)) + line = [] + line_len = 0 + line.append(c_init + value) + line_len += c_len + if line: + result.append(format_int(len(line)) + b"".join(line)) + result.append(b"") + return result + + +def format_int(n: int) -> bytes: + """Format an integer using a variable-length binary encoding.""" + if n < 128: + a = [n] + else: + a = [] + while n > 0: + a.insert(0, n & 0x7F) + n >>= 7 + for i in range(len(a) - 1): + # If the highest bit is set, more 7-bit digits follow + a[i] |= 0x80 + return bytes(a) + + +def format_str_literal(s: str) -> bytes: + utf8 = s.encode("utf-8", errors="surrogatepass") + return format_int(len(utf8)) + utf8 + + +def _encode_int_values(values: dict[int, int]) -> list[bytes]: + """Encode int values into C strings. + + Values are stored in base 10 and separated by 0 bytes. + """ + value_by_index = {index: value for value, index in values.items()} + result = [] + line: list[bytes] = [] + line_len = 0 + for i in range(len(values)): + value = value_by_index[i] + encoded = b"%d" % value + if line_len > 0 and line_len + len(encoded) > 70: + result.append(format_int(len(line)) + b"\0".join(line)) + line = [] + line_len = 0 + line.append(encoded) + line_len += len(encoded) + if line: + result.append(format_int(len(line)) + b"\0".join(line)) + result.append(b"") + return result + + +def float_to_c(x: float) -> str: + """Return C literal representation of a float value.""" + s = str(x) + if s == "inf": + return "INFINITY" + elif s == "-inf": + return "-INFINITY" + elif s == "nan": + return "NAN" + return s + + +def _encode_float_values(values: dict[float, int]) -> list[str]: + """Encode float values into a C array values. + + The result contains the number of values followed by individual values. + """ + value_by_index = {index: value for value, index in values.items()} + result = [] + num = len(values) + result.append(str(num)) + for i in range(num): + value = value_by_index[i] + result.append(float_to_c(value)) + return result + + +def _encode_complex_values(values: dict[complex, int]) -> list[str]: + """Encode float values into a C array values. + + The result contains the number of values followed by pairs of doubles + representing complex numbers. + """ + value_by_index = {index: value for value, index in values.items()} + result = [] + num = len(values) + result.append(str(num)) + for i in range(num): + value = value_by_index[i] + result.append(float_to_c(value.real)) + result.append(float_to_c(value.imag)) + return result diff --git a/.venv/lib/python3.12/site-packages/mypyc/common.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/common.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..add170c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/common.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/common.py b/.venv/lib/python3.12/site-packages/mypyc/common.py new file mode 100644 index 0000000..2de63c0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/common.py @@ -0,0 +1,140 @@ +from __future__ import annotations + +import sys +import sysconfig +from typing import Any, Final + +from mypy.util import unnamed_function + +PREFIX: Final = "CPyPy_" # Python wrappers +NATIVE_PREFIX: Final = "CPyDef_" # Native functions etc. +DUNDER_PREFIX: Final = "CPyDunder_" # Wrappers for exposing dunder methods to the API +REG_PREFIX: Final = "cpy_r_" # Registers +STATIC_PREFIX: Final = "CPyStatic_" # Static variables (for literals etc.) +TYPE_PREFIX: Final = "CPyType_" # Type object struct +MODULE_PREFIX: Final = "CPyModule_" # Cached modules +TYPE_VAR_PREFIX: Final = "CPyTypeVar_" # Type variables when using new-style Python 3.12 syntax +ATTR_PREFIX: Final = "_" # Attributes +FAST_PREFIX: Final = "__mypyc_fast_" # Optimized methods in non-extension classes + +ENV_ATTR_NAME: Final = "__mypyc_env__" +NEXT_LABEL_ATTR_NAME: Final = "__mypyc_next_label__" +TEMP_ATTR_NAME: Final = "__mypyc_temp__" +LAMBDA_NAME: Final = "__mypyc_lambda__" +PROPSET_PREFIX: Final = "__mypyc_setter__" +SELF_NAME: Final = "__mypyc_self__" +GENERATOR_ATTRIBUTE_PREFIX: Final = "__mypyc_generator_attribute__" + +# Max short int we accept as a literal is based on 32-bit platforms, +# so that we can just always emit the same code. + +TOP_LEVEL_NAME: Final = "__top_level__" # Special function representing module top level + +# Maximal number of subclasses for a class to trigger fast path in isinstance() checks. +FAST_ISINSTANCE_MAX_SUBCLASSES: Final = 2 + +# Size of size_t, if configured. +SIZEOF_SIZE_T_SYSCONFIG: Final = sysconfig.get_config_var("SIZEOF_SIZE_T") + +SIZEOF_SIZE_T: Final = ( + int(SIZEOF_SIZE_T_SYSCONFIG) + if SIZEOF_SIZE_T_SYSCONFIG is not None + else (sys.maxsize + 1).bit_length() // 8 +) + +IS_32_BIT_PLATFORM: Final = int(SIZEOF_SIZE_T) == 4 + +PLATFORM_SIZE = 4 if IS_32_BIT_PLATFORM else 8 + +# Maximum value for a short tagged integer. +MAX_SHORT_INT: Final = 2 ** (8 * int(SIZEOF_SIZE_T) - 2) - 1 + +# Minimum value for a short tagged integer. +MIN_SHORT_INT: Final = -(MAX_SHORT_INT) - 1 + +# Maximum value for a short tagged integer represented as a C integer literal. +# +# Note: Assume that the compiled code uses the same bit width as mypyc +MAX_LITERAL_SHORT_INT: Final = MAX_SHORT_INT +MIN_LITERAL_SHORT_INT: Final = -MAX_LITERAL_SHORT_INT - 1 + +# Description of the C type used to track the definedness of attributes and +# the presence of argument default values that have types with overlapping +# error values. Each tracked attribute/argument has a dedicated bit in the +# relevant bitmap. +BITMAP_TYPE: Final = "uint32_t" +BITMAP_BITS: Final = 32 + +# Runtime C library files +RUNTIME_C_FILES: Final = [ + "init.c", + "getargs.c", + "getargsfast.c", + "int_ops.c", + "float_ops.c", + "str_ops.c", + "bytes_ops.c", + "list_ops.c", + "dict_ops.c", + "set_ops.c", + "tuple_ops.c", + "exc_ops.c", + "misc_ops.c", + "generic_ops.c", + "pythonsupport.c", +] + +# Python 3.12 introduced immortal objects, specified via a special reference count +# value. The reference counts of immortal objects are normally not modified, but it's +# not strictly wrong to modify them. See PEP 683 for more information, but note that +# some details in the PEP are out of date. +HAVE_IMMORTAL: Final = sys.version_info >= (3, 12) + +# Are we running on a free-threaded build (GIL disabled)? This implies that +# we are on Python 3.13 or later. +IS_FREE_THREADED: Final = bool(sysconfig.get_config_var("Py_GIL_DISABLED")) + + +JsonDict = dict[str, Any] + + +def shared_lib_name(group_name: str) -> str: + """Given a group name, return the actual name of its extension module. + + (This just adds a suffix to the final component.) + """ + return f"{group_name}__mypyc" + + +def short_name(name: str) -> str: + if name.startswith("builtins."): + return name[9:] + return name + + +def get_id_from_name(name: str, fullname: str, line: int) -> str: + """Create a unique id for a function. + + This creates an id that is unique for any given function definition, so that it can be used as + a dictionary key. This is usually the fullname of the function, but this is different in that + it handles the case where the function is named '_', in which case multiple different functions + could have the same name.""" + if unnamed_function(name): + return f"{fullname}.{line}" + else: + return fullname + + +def short_id_from_name(func_name: str, shortname: str, line: int | None) -> str: + if unnamed_function(func_name): + assert line is not None + partial_name = f"{shortname}.{line}" + else: + partial_name = shortname + return partial_name + + +def bitmap_name(index: int) -> str: + if index == 0: + return "__bitmap" + return f"__bitmap{index + 1}" diff --git a/.venv/lib/python3.12/site-packages/mypyc/crash.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/crash.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..07f5e35 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/crash.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/crash.py b/.venv/lib/python3.12/site-packages/mypyc/crash.py new file mode 100644 index 0000000..1227aa8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/crash.py @@ -0,0 +1,32 @@ +from __future__ import annotations + +import sys +import traceback +from collections.abc import Iterator +from contextlib import contextmanager +from typing import NoReturn + + +@contextmanager +def catch_errors(module_path: str, line: int) -> Iterator[None]: + try: + yield + except Exception: + crash_report(module_path, line) + + +def crash_report(module_path: str, line: int) -> NoReturn: + # Adapted from report_internal_error in mypy + err = sys.exc_info()[1] + tb = traceback.extract_stack()[:-4] + # Excise all the traceback from the test runner + for i, x in enumerate(tb): + if x.name == "pytest_runtest_call": + tb = tb[i + 1 :] + break + tb2 = traceback.extract_tb(sys.exc_info()[2])[1:] + print("Traceback (most recent call last):") + for s in traceback.format_list(tb + tb2): + print(s.rstrip("\n")) + print(f"{module_path}:{line}: {type(err).__name__}: {err}") + raise SystemExit(2) diff --git a/.venv/lib/python3.12/site-packages/mypyc/errors.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/errors.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..62ca1c4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/errors.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/errors.py b/.venv/lib/python3.12/site-packages/mypyc/errors.py new file mode 100644 index 0000000..8bc9b27 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/errors.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +import mypy.errors +from mypy.options import Options + + +class Errors: + def __init__(self, options: Options) -> None: + self.num_errors = 0 + self.num_warnings = 0 + self._errors = mypy.errors.Errors(options, hide_error_codes=True) + + def error(self, msg: str, path: str, line: int) -> None: + self._errors.report(line, None, msg, severity="error", file=path) + self.num_errors += 1 + + def note(self, msg: str, path: str, line: int) -> None: + self._errors.report(line, None, msg, severity="note", file=path) + + def warning(self, msg: str, path: str, line: int) -> None: + self._errors.report(line, None, msg, severity="warning", file=path) + self.num_warnings += 1 + + def new_messages(self) -> list[str]: + return self._errors.new_messages() + + def flush_errors(self) -> None: + for error in self.new_messages(): + print(error) diff --git a/.venv/lib/python3.12/site-packages/mypyc/ir/__init__.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/ir/__init__.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..fb74cdd Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/ir/__init__.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/ir/__init__.py b/.venv/lib/python3.12/site-packages/mypyc/ir/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypyc/ir/class_ir.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/ir/class_ir.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..ed78a86 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/ir/class_ir.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/ir/class_ir.py b/.venv/lib/python3.12/site-packages/mypyc/ir/class_ir.py new file mode 100644 index 0000000..0a56aaf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/ir/class_ir.py @@ -0,0 +1,538 @@ +"""Intermediate representation of classes.""" + +from __future__ import annotations + +from typing import NamedTuple + +from mypyc.common import PROPSET_PREFIX, JsonDict +from mypyc.ir.func_ir import FuncDecl, FuncIR, FuncSignature, RuntimeArg +from mypyc.ir.ops import DeserMaps, Value +from mypyc.ir.rtypes import RInstance, RType, deserialize_type, object_rprimitive +from mypyc.namegen import NameGenerator, exported_name + +# Some notes on the vtable layout: Each concrete class has a vtable +# that contains function pointers for its methods. So that subclasses +# may be efficiently used when their parent class is expected, the +# layout of child vtables must be an extension of their base class's +# vtable. +# +# This makes multiple inheritance tricky, since obviously we cannot be +# an extension of multiple parent classes. We solve this by requiring +# all but one parent to be "traits", which we can operate on in a +# somewhat less efficient way. For each trait implemented by a class, +# we generate a separate vtable for the methods in that trait. +# We then store an array of (trait type, trait vtable) pointers alongside +# a class's main vtable. When we want to call a trait method, we +# (at runtime!) search the array of trait vtables to find the correct one, +# then call through it. +# Trait vtables additionally need entries for attribute getters and setters, +# since they can't always be in the same location. +# +# To keep down the number of indirections necessary, we store the +# array of trait vtables in the memory *before* the class vtable, and +# search it backwards. (This is a trick we can only do once---there +# are only two directions to store data in---but I don't think we'll +# need it again.) +# There are some tricks we could try in the future to store the trait +# vtables inline in the trait table (which would cut down one indirection), +# but this seems good enough for now. +# +# As an example: +# Imagine that we have a class B that inherits from a concrete class A +# and traits T1 and T2, and that A has methods foo() and +# bar() and B overrides bar() with a more specific type. +# Then B's vtable will look something like: +# +# T1 type object +# ptr to B's T1 trait vtable +# T2 type object +# ptr to B's T2 trait vtable +# -> | A.foo +# | Glue function that converts between A.bar's type and B.bar +# B.bar +# B.baz +# +# The arrow points to the "start" of the vtable (what vtable pointers +# point to) and the bars indicate which parts correspond to the parent +# class A's vtable layout. +# +# Classes that allow interpreted code to subclass them also have a +# "shadow vtable" that contains implementations that delegate to +# making a pycall, so that overridden methods in interpreted children +# will be called. (A better strategy could dynamically generate these +# vtables based on which methods are overridden in the children.) + +# Descriptions of method and attribute entries in class vtables. +# The 'cls' field is the class that the method/attr was defined in, +# which might be a parent class. +# The 'shadow_method', if present, contains the method that should be +# placed in the class's shadow vtable (if it has one). + + +class VTableMethod(NamedTuple): + cls: "ClassIR" # noqa: UP037 + name: str + method: FuncIR + shadow_method: FuncIR | None + + +VTableEntries = list[VTableMethod] + + +class ClassIR: + """Intermediate representation of a class. + + This also describes the runtime structure of native instances. + """ + + def __init__( + self, + name: str, + module_name: str, + is_trait: bool = False, + is_generated: bool = False, + is_abstract: bool = False, + is_ext_class: bool = True, + is_final_class: bool = False, + ) -> None: + self.name = name + self.module_name = module_name + self.is_trait = is_trait + self.is_generated = is_generated + self.is_abstract = is_abstract + self.is_ext_class = is_ext_class + self.is_final_class = is_final_class + # An augmented class has additional methods separate from what mypyc generates. + # Right now the only one is dataclasses. + self.is_augmented = False + # Does this inherit from a Python class? + self.inherits_python = False + # Do instances of this class have __dict__? + self.has_dict = False + # Do we allow interpreted subclasses? Derived from a mypyc_attr. + self.allow_interpreted_subclasses = False + # Does this class need getseters to be generated for its attributes? (getseters are also + # added if is_generated is False) + self.needs_getseters = False + # Is this class declared as serializable (supports copy.copy + # and pickle) using @mypyc_attr(serializable=True)? + # + # Additionally, any class with this attribute False but with + # an __init__ that can be called without any arguments is + # *implicitly serializable*. In this case __init__ will be + # called during deserialization without arguments. If this is + # True, we match Python semantics and __init__ won't be called + # during deserialization. + # + # This impacts also all subclasses. Use is_serializable() to + # also consider base classes. + self._serializable = False + # If this a subclass of some built-in python class, the name + # of the object for that class. We currently only support this + # in a few ad-hoc cases. + self.builtin_base: str | None = None + # Default empty constructor + self.ctor = FuncDecl(name, None, module_name, FuncSignature([], RInstance(self))) + # Declare setup method that allocates and initializes an object. type is the + # type of the class being initialized, which could be another class if there + # is an interpreted subclass. + # TODO: Make it a regular method and generate its body in IR + self.setup = FuncDecl( + "__mypyc__" + name + "_setup", + None, + module_name, + FuncSignature([RuntimeArg("type", object_rprimitive)], RInstance(self)), + ) + # Attributes defined in the class (not inherited) + self.attributes: dict[str, RType] = {} + # Deletable attributes + self.deletable: list[str] = [] + # We populate method_types with the signatures of every method before + # we generate methods, and we rely on this information being present. + self.method_decls: dict[str, FuncDecl] = {} + # Map of methods that are actually present in an extension class + self.methods: dict[str, FuncIR] = {} + # Glue methods for boxing/unboxing when a class changes the type + # while overriding a method. Maps from (parent class overridden, method) + # to IR of glue method. + self.glue_methods: dict[tuple[ClassIR, str], FuncIR] = {} + + # Properties are accessed like attributes, but have behavior like method calls. + # They don't belong in the methods dictionary, since we don't want to expose them to + # Python's method API. But we want to put them into our own vtable as methods, so that + # they are properly handled and overridden. The property dictionary values are a tuple + # containing a property getter and an optional property setter. + self.properties: dict[str, tuple[FuncIR, FuncIR | None]] = {} + # We generate these in prepare_class_def so that we have access to them when generating + # other methods and properties that rely on these types. + self.property_types: dict[str, RType] = {} + + self.vtable: dict[str, int] | None = None + self.vtable_entries: VTableEntries = [] + self.trait_vtables: dict[ClassIR, VTableEntries] = {} + # N.B: base might not actually quite be the direct base. + # It is the nearest concrete base, but we allow a trait in between. + self.base: ClassIR | None = None + self.traits: list[ClassIR] = [] + # Supply a working mro for most generated classes. Real classes will need to + # fix it up. + self.mro: list[ClassIR] = [self] + # base_mro is the chain of concrete (non-trait) ancestors + self.base_mro: list[ClassIR] = [self] + + # Direct subclasses of this class (use subclasses() to also include non-direct ones) + # None if separate compilation prevents this from working. + # + # Often it's better to use has_no_subclasses() or subclasses() instead. + self.children: list[ClassIR] | None = [] + + # Instance attributes that are initialized in the class body. + self.attrs_with_defaults: set[str] = set() + + # Attributes that are always initialized in __init__ or class body + # (inferred in mypyc.analysis.attrdefined using interprocedural analysis). + # These can never raise AttributeError when accessed. If an attribute + # is *not* always initialized, we normally use the error value for + # an undefined value. If the attribute byte has an overlapping error value + # (the error_overlap attribute is true for the RType), we use a bitmap + # to track if the attribute is defined instead (see bitmap_attrs). + self._always_initialized_attrs: set[str] = set() + + # Attributes that are sometimes initialized in __init__ + self._sometimes_initialized_attrs: set[str] = set() + + # If True, __init__ can make 'self' visible to unanalyzed/arbitrary code + self.init_self_leak = False + + # Definedness of these attributes is backed by a bitmap. Index in the list + # indicates the bit number. Includes inherited attributes. We need the + # bitmap for types such as native ints (i64 etc.) that can't have a dedicated + # error value that doesn't overlap a valid value. The bitmap is used if the + # value of an attribute is the same as the error value. + self.bitmap_attrs: list[str] = [] + + # If this is a generator environment class, what is the actual method for it + self.env_user_function: FuncIR | None = None + + # If True, keep one freed, cleared instance available for immediate reuse to + # speed up allocations. This helps if many objects are freed quickly, before + # other instances of the same class are allocated. This is effectively a + # per-type free "list" of up to length 1. + self.reuse_freed_instance = False + + # Is this a class inheriting from enum.Enum? Such classes can be special-cased. + self.is_enum = False + + def __repr__(self) -> str: + return ( + "ClassIR(" + "name={self.name}, module_name={self.module_name}, " + "is_trait={self.is_trait}, is_generated={self.is_generated}, " + "is_abstract={self.is_abstract}, is_ext_class={self.is_ext_class}, " + "is_final_class={self.is_final_class}" + ")".format(self=self) + ) + + @property + def fullname(self) -> str: + return f"{self.module_name}.{self.name}" + + def real_base(self) -> ClassIR | None: + """Return the actual concrete base class, if there is one.""" + if len(self.mro) > 1 and not self.mro[1].is_trait: + return self.mro[1] + return None + + def vtable_entry(self, name: str) -> int: + assert self.vtable is not None, "vtable not computed yet" + assert name in self.vtable, f"{self.name!r} has no attribute {name!r}" + return self.vtable[name] + + def attr_details(self, name: str) -> tuple[RType, ClassIR]: + for ir in self.mro: + if name in ir.attributes: + return ir.attributes[name], ir + if name in ir.property_types: + return ir.property_types[name], ir + raise KeyError(f"{self.name!r} has no attribute {name!r}") + + def attr_type(self, name: str) -> RType: + return self.attr_details(name)[0] + + def method_decl(self, name: str) -> FuncDecl: + for ir in self.mro: + if name in ir.method_decls: + return ir.method_decls[name] + raise KeyError(f"{self.name!r} has no attribute {name!r}") + + def method_sig(self, name: str) -> FuncSignature: + return self.method_decl(name).sig + + def has_method(self, name: str) -> bool: + try: + self.method_decl(name) + except KeyError: + return False + return True + + def is_method_final(self, name: str) -> bool: + subs = self.subclasses() + if subs is None: + return self.is_final_class + + if self.has_method(name): + method_decl = self.method_decl(name) + for subc in subs: + if subc.method_decl(name) != method_decl: + return False + return True + else: + return not any(subc.has_method(name) for subc in subs) + + def has_attr(self, name: str) -> bool: + try: + self.attr_type(name) + except KeyError: + return False + return True + + def is_deletable(self, name: str) -> bool: + return any(name in ir.deletable for ir in self.mro) + + def is_always_defined(self, name: str) -> bool: + if self.is_deletable(name): + return False + return name in self._always_initialized_attrs + + def name_prefix(self, names: NameGenerator) -> str: + return names.private_name(self.module_name, self.name) + + def struct_name(self, names: NameGenerator) -> str: + return f"{exported_name(self.fullname)}Object" + + def get_method_and_class( + self, name: str, *, prefer_method: bool = False + ) -> tuple[FuncIR, ClassIR] | None: + for ir in self.mro: + if name in ir.methods: + func_ir = ir.methods[name] + if not prefer_method and func_ir.decl.implicit: + # This is an implicit accessor, so there is also an attribute definition + # which the caller prefers. This happens if an attribute overrides a + # property. + return None + return func_ir, ir + + return None + + def get_method(self, name: str, *, prefer_method: bool = False) -> FuncIR | None: + res = self.get_method_and_class(name, prefer_method=prefer_method) + return res[0] if res else None + + def has_method_decl(self, name: str) -> bool: + return any(name in ir.method_decls for ir in self.mro) + + def has_no_subclasses(self) -> bool: + return self.children == [] and not self.allow_interpreted_subclasses + + def subclasses(self) -> set[ClassIR] | None: + """Return all subclasses of this class, both direct and indirect. + + Return None if it is impossible to identify all subclasses, for example + because we are performing separate compilation. + """ + if self.children is None or self.allow_interpreted_subclasses: + return None + result = set(self.children) + for child in self.children: + if child.children: + child_subs = child.subclasses() + if child_subs is None: + return None + result.update(child_subs) + return result + + def concrete_subclasses(self) -> list[ClassIR] | None: + """Return all concrete (i.e. non-trait and non-abstract) subclasses. + + Include both direct and indirect subclasses. Place classes with no children first. + """ + subs = self.subclasses() + if subs is None: + return None + concrete = {c for c in subs if not (c.is_trait or c.is_abstract)} + # We place classes with no children first because they are more likely + # to appear in various isinstance() checks. We then sort leaves by name + # to get stable order. + return sorted(concrete, key=lambda c: (len(c.children or []), c.name)) + + def is_serializable(self) -> bool: + return any(ci._serializable for ci in self.mro) + + def serialize(self) -> JsonDict: + return { + "name": self.name, + "module_name": self.module_name, + "is_trait": self.is_trait, + "is_ext_class": self.is_ext_class, + "is_abstract": self.is_abstract, + "is_generated": self.is_generated, + "is_augmented": self.is_augmented, + "is_final_class": self.is_final_class, + "inherits_python": self.inherits_python, + "has_dict": self.has_dict, + "allow_interpreted_subclasses": self.allow_interpreted_subclasses, + "needs_getseters": self.needs_getseters, + "_serializable": self._serializable, + "builtin_base": self.builtin_base, + "ctor": self.ctor.serialize(), + # We serialize dicts as lists to ensure order is preserved + "attributes": [(k, t.serialize()) for k, t in self.attributes.items()], + # We try to serialize a name reference, but if the decl isn't in methods + # then we can't be sure that will work so we serialize the whole decl. + "method_decls": [ + (k, d.id if k in self.methods else d.serialize()) + for k, d in self.method_decls.items() + ], + # We serialize method fullnames out and put methods in a separate dict + "methods": [(k, m.id) for k, m in self.methods.items()], + "glue_methods": [ + ((cir.fullname, k), m.id) for (cir, k), m in self.glue_methods.items() + ], + # We serialize properties and property_types separately out of an + # abundance of caution about preserving dict ordering... + "property_types": [(k, t.serialize()) for k, t in self.property_types.items()], + "properties": list(self.properties), + "vtable": self.vtable, + "vtable_entries": serialize_vtable(self.vtable_entries), + "trait_vtables": [ + (cir.fullname, serialize_vtable(v)) for cir, v in self.trait_vtables.items() + ], + # References to class IRs are all just names + "base": self.base.fullname if self.base else None, + "traits": [cir.fullname for cir in self.traits], + "mro": [cir.fullname for cir in self.mro], + "base_mro": [cir.fullname for cir in self.base_mro], + "children": ( + [cir.fullname for cir in self.children] if self.children is not None else None + ), + "deletable": self.deletable, + "attrs_with_defaults": sorted(self.attrs_with_defaults), + "_always_initialized_attrs": sorted(self._always_initialized_attrs), + "_sometimes_initialized_attrs": sorted(self._sometimes_initialized_attrs), + "init_self_leak": self.init_self_leak, + "env_user_function": self.env_user_function.id if self.env_user_function else None, + "reuse_freed_instance": self.reuse_freed_instance, + "is_enum": self.is_enum, + } + + @classmethod + def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> ClassIR: + fullname = data["module_name"] + "." + data["name"] + assert fullname in ctx.classes, "Class %s not in deser class map" % fullname + ir = ctx.classes[fullname] + + ir.is_trait = data["is_trait"] + ir.is_generated = data["is_generated"] + ir.is_abstract = data["is_abstract"] + ir.is_ext_class = data["is_ext_class"] + ir.is_augmented = data["is_augmented"] + ir.is_final_class = data["is_final_class"] + ir.inherits_python = data["inherits_python"] + ir.has_dict = data["has_dict"] + ir.allow_interpreted_subclasses = data["allow_interpreted_subclasses"] + ir.needs_getseters = data["needs_getseters"] + ir._serializable = data["_serializable"] + ir.builtin_base = data["builtin_base"] + ir.ctor = FuncDecl.deserialize(data["ctor"], ctx) + ir.attributes = {k: deserialize_type(t, ctx) for k, t in data["attributes"]} + ir.method_decls = { + k: ctx.functions[v].decl if isinstance(v, str) else FuncDecl.deserialize(v, ctx) + for k, v in data["method_decls"] + } + ir.methods = {k: ctx.functions[v] for k, v in data["methods"]} + ir.glue_methods = { + (ctx.classes[c], k): ctx.functions[v] for (c, k), v in data["glue_methods"] + } + ir.property_types = {k: deserialize_type(t, ctx) for k, t in data["property_types"]} + ir.properties = { + k: (ir.methods[k], ir.methods.get(PROPSET_PREFIX + k)) for k in data["properties"] + } + + ir.vtable = data["vtable"] + ir.vtable_entries = deserialize_vtable(data["vtable_entries"], ctx) + ir.trait_vtables = { + ctx.classes[k]: deserialize_vtable(v, ctx) for k, v in data["trait_vtables"] + } + + base = data["base"] + ir.base = ctx.classes[base] if base else None + ir.traits = [ctx.classes[s] for s in data["traits"]] + ir.mro = [ctx.classes[s] for s in data["mro"]] + ir.base_mro = [ctx.classes[s] for s in data["base_mro"]] + ir.children = data["children"] and [ctx.classes[s] for s in data["children"]] + ir.deletable = data["deletable"] + ir.attrs_with_defaults = set(data["attrs_with_defaults"]) + ir._always_initialized_attrs = set(data["_always_initialized_attrs"]) + ir._sometimes_initialized_attrs = set(data["_sometimes_initialized_attrs"]) + ir.init_self_leak = data["init_self_leak"] + ir.env_user_function = ( + ctx.functions[data["env_user_function"]] if data["env_user_function"] else None + ) + ir.reuse_freed_instance = data["reuse_freed_instance"] + ir.is_enum = data["is_enum"] + + return ir + + +class NonExtClassInfo: + """Information needed to construct a non-extension class (Python class). + + Includes the class dictionary, a tuple of base classes, + the class annotations dictionary, and the metaclass. + """ + + def __init__(self, dict: Value, bases: Value, anns: Value, metaclass: Value) -> None: + self.dict = dict + self.bases = bases + self.anns = anns + self.metaclass = metaclass + + +def serialize_vtable_entry(entry: VTableMethod) -> JsonDict: + return { + ".class": "VTableMethod", + "cls": entry.cls.fullname, + "name": entry.name, + "method": entry.method.decl.id, + "shadow_method": entry.shadow_method.decl.id if entry.shadow_method else None, + } + + +def serialize_vtable(vtable: VTableEntries) -> list[JsonDict]: + return [serialize_vtable_entry(v) for v in vtable] + + +def deserialize_vtable_entry(data: JsonDict, ctx: DeserMaps) -> VTableMethod: + if data[".class"] == "VTableMethod": + return VTableMethod( + ctx.classes[data["cls"]], + data["name"], + ctx.functions[data["method"]], + ctx.functions[data["shadow_method"]] if data["shadow_method"] else None, + ) + assert False, "Bogus vtable .class: %s" % data[".class"] + + +def deserialize_vtable(data: list[JsonDict], ctx: DeserMaps) -> VTableEntries: + return [deserialize_vtable_entry(x, ctx) for x in data] + + +def all_concrete_classes(class_ir: ClassIR) -> list[ClassIR] | None: + """Return all concrete classes among the class itself and its subclasses.""" + concrete = class_ir.concrete_subclasses() + if concrete is None: + return None + if not (class_ir.is_abstract or class_ir.is_trait): + concrete.append(class_ir) + return concrete diff --git a/.venv/lib/python3.12/site-packages/mypyc/ir/func_ir.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/ir/func_ir.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..a0d39aa Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/ir/func_ir.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/ir/func_ir.py b/.venv/lib/python3.12/site-packages/mypyc/ir/func_ir.py new file mode 100644 index 0000000..d11fef4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/ir/func_ir.py @@ -0,0 +1,484 @@ +"""Intermediate representation of functions.""" + +from __future__ import annotations + +import inspect +from collections.abc import Sequence +from typing import Final + +from mypy.nodes import ARG_POS, ArgKind, Block, FuncDef +from mypyc.common import BITMAP_BITS, JsonDict, bitmap_name, get_id_from_name, short_id_from_name +from mypyc.ir.ops import ( + Assign, + AssignMulti, + BasicBlock, + Box, + ControlOp, + DeserMaps, + Float, + Integer, + LoadAddress, + LoadLiteral, + Register, + TupleSet, + Value, +) +from mypyc.ir.rtypes import ( + RType, + bitmap_rprimitive, + deserialize_type, + is_bool_rprimitive, + is_none_rprimitive, +) +from mypyc.namegen import NameGenerator + + +class RuntimeArg: + """Description of a function argument in IR. + + Argument kind is one of ARG_* constants defined in mypy.nodes. + """ + + def __init__( + self, name: str, typ: RType, kind: ArgKind = ARG_POS, pos_only: bool = False + ) -> None: + self.name = name + self.type = typ + self.kind = kind + self.pos_only = pos_only + + @property + def optional(self) -> bool: + return self.kind.is_optional() + + def __repr__(self) -> str: + return "RuntimeArg(name={}, type={}, optional={!r}, pos_only={!r})".format( + self.name, self.type, self.optional, self.pos_only + ) + + def serialize(self) -> JsonDict: + return { + "name": self.name, + "type": self.type.serialize(), + "kind": int(self.kind.value), + "pos_only": self.pos_only, + } + + @classmethod + def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> RuntimeArg: + return RuntimeArg( + data["name"], + deserialize_type(data["type"], ctx), + ArgKind(data["kind"]), + data["pos_only"], + ) + + +class FuncSignature: + """Signature of a function in IR.""" + + # TODO: Track if method? + + def __init__(self, args: Sequence[RuntimeArg], ret_type: RType) -> None: + self.args = tuple(args) + self.ret_type = ret_type + # Bitmap arguments are use to mark default values for arguments that + # have types with overlapping error values. + self.num_bitmap_args = num_bitmap_args(self.args) + if self.num_bitmap_args: + extra = [ + RuntimeArg(bitmap_name(i), bitmap_rprimitive, pos_only=True) + for i in range(self.num_bitmap_args) + ] + self.args = self.args + tuple(reversed(extra)) + + def real_args(self) -> tuple[RuntimeArg, ...]: + """Return arguments without any synthetic bitmap arguments.""" + if self.num_bitmap_args: + return self.args[: -self.num_bitmap_args] + return self.args + + def bound_sig(self) -> FuncSignature: + if self.num_bitmap_args: + return FuncSignature(self.args[1 : -self.num_bitmap_args], self.ret_type) + else: + return FuncSignature(self.args[1:], self.ret_type) + + def __repr__(self) -> str: + return f"FuncSignature(args={self.args!r}, ret={self.ret_type!r})" + + def serialize(self) -> JsonDict: + if self.num_bitmap_args: + args = self.args[: -self.num_bitmap_args] + else: + args = self.args + return {"args": [t.serialize() for t in args], "ret_type": self.ret_type.serialize()} + + @classmethod + def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> FuncSignature: + return FuncSignature( + [RuntimeArg.deserialize(arg, ctx) for arg in data["args"]], + deserialize_type(data["ret_type"], ctx), + ) + + +def num_bitmap_args(args: tuple[RuntimeArg, ...]) -> int: + n = 0 + for arg in args: + if arg.type.error_overlap and arg.kind.is_optional(): + n += 1 + return (n + (BITMAP_BITS - 1)) // BITMAP_BITS + + +FUNC_NORMAL: Final = 0 +FUNC_STATICMETHOD: Final = 1 +FUNC_CLASSMETHOD: Final = 2 + + +class FuncDecl: + """Declaration of a function in IR (without body or implementation). + + A function can be a regular module-level function, a method, a + static method, a class method, or a property getter/setter. + """ + + def __init__( + self, + name: str, + class_name: str | None, + module_name: str, + sig: FuncSignature, + kind: int = FUNC_NORMAL, + *, + is_prop_setter: bool = False, + is_prop_getter: bool = False, + is_generator: bool = False, + is_coroutine: bool = False, + implicit: bool = False, + internal: bool = False, + ) -> None: + self.name = name + self.class_name = class_name + self.module_name = module_name + self.sig = sig + self.kind = kind + self.is_prop_setter = is_prop_setter + self.is_prop_getter = is_prop_getter + self.is_generator = is_generator + self.is_coroutine = is_coroutine + if class_name is None: + self.bound_sig: FuncSignature | None = None + else: + if kind == FUNC_STATICMETHOD: + self.bound_sig = sig + else: + self.bound_sig = sig.bound_sig() + + # If True, not present in the mypy AST and must be synthesized during irbuild + # Currently only supported for property getters/setters + self.implicit = implicit + + # If True, only direct C level calls are supported (no wrapper function) + self.internal = internal + + # This is optional because this will be set to the line number when the corresponding + # FuncIR is created + self._line: int | None = None + + @property + def line(self) -> int: + assert self._line is not None + return self._line + + @line.setter + def line(self, line: int) -> None: + self._line = line + + @property + def id(self) -> str: + assert self.line is not None + return get_id_from_name(self.name, self.fullname, self.line) + + @staticmethod + def compute_shortname(class_name: str | None, name: str) -> str: + return class_name + "." + name if class_name else name + + @property + def shortname(self) -> str: + return FuncDecl.compute_shortname(self.class_name, self.name) + + @property + def fullname(self) -> str: + return self.module_name + "." + self.shortname + + def cname(self, names: NameGenerator) -> str: + partial_name = short_id_from_name(self.name, self.shortname, self._line) + return names.private_name(self.module_name, partial_name) + + def serialize(self) -> JsonDict: + return { + "name": self.name, + "class_name": self.class_name, + "module_name": self.module_name, + "sig": self.sig.serialize(), + "kind": self.kind, + "is_prop_setter": self.is_prop_setter, + "is_prop_getter": self.is_prop_getter, + "is_generator": self.is_generator, + "is_coroutine": self.is_coroutine, + "implicit": self.implicit, + "internal": self.internal, + } + + # TODO: move this to FuncIR? + @staticmethod + def get_id_from_json(func_ir: JsonDict) -> str: + """Get the id from the serialized FuncIR associated with this FuncDecl""" + decl = func_ir["decl"] + shortname = FuncDecl.compute_shortname(decl["class_name"], decl["name"]) + fullname = decl["module_name"] + "." + shortname + return get_id_from_name(decl["name"], fullname, func_ir["line"]) + + @classmethod + def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> FuncDecl: + return FuncDecl( + data["name"], + data["class_name"], + data["module_name"], + FuncSignature.deserialize(data["sig"], ctx), + data["kind"], + is_prop_setter=data["is_prop_setter"], + is_prop_getter=data["is_prop_getter"], + is_generator=data["is_generator"], + is_coroutine=data["is_coroutine"], + implicit=data["implicit"], + internal=data["internal"], + ) + + +class FuncIR: + """Intermediate representation of a function with contextual information. + + Unlike FuncDecl, this includes the IR of the body (basic blocks). + """ + + def __init__( + self, + decl: FuncDecl, + arg_regs: list[Register], + blocks: list[BasicBlock], + line: int = -1, + traceback_name: str | None = None, + ) -> None: + # Declaration of the function, including the signature + self.decl = decl + # Registers for all the arguments to the function + self.arg_regs = arg_regs + # Body of the function + self.blocks = blocks + self.decl.line = line + # The name that should be displayed for tracebacks that + # include this function. Function will be omitted from + # tracebacks if None. + self.traceback_name = traceback_name + + @property + def line(self) -> int: + return self.decl.line + + @property + def args(self) -> Sequence[RuntimeArg]: + return self.decl.sig.args + + @property + def ret_type(self) -> RType: + return self.decl.sig.ret_type + + @property + def class_name(self) -> str | None: + return self.decl.class_name + + @property + def sig(self) -> FuncSignature: + return self.decl.sig + + @property + def name(self) -> str: + return self.decl.name + + @property + def fullname(self) -> str: + return self.decl.fullname + + @property + def id(self) -> str: + return self.decl.id + + @property + def internal(self) -> bool: + return self.decl.internal + + def cname(self, names: NameGenerator) -> str: + return self.decl.cname(names) + + def __repr__(self) -> str: + if self.class_name: + return f"" + else: + return f"" + + def serialize(self) -> JsonDict: + # We don't include blocks in the serialized version + return { + "decl": self.decl.serialize(), + "line": self.line, + "traceback_name": self.traceback_name, + } + + @classmethod + def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> FuncIR: + return FuncIR( + FuncDecl.deserialize(data["decl"], ctx), [], [], data["line"], data["traceback_name"] + ) + + +INVALID_FUNC_DEF: Final = FuncDef("", [], Block([])) + + +def all_values(args: list[Register], blocks: list[BasicBlock]) -> list[Value]: + """Return the set of all values that may be initialized in the blocks. + + This omits registers that are only read. + """ + values: list[Value] = list(args) + seen_registers = set(args) + + for block in blocks: + for op in block.ops: + if not isinstance(op, ControlOp): + if isinstance(op, (Assign, AssignMulti)): + if op.dest not in seen_registers: + values.append(op.dest) + seen_registers.add(op.dest) + elif op.is_void: + continue + else: + # If we take the address of a register, it might get initialized. + if ( + isinstance(op, LoadAddress) + and isinstance(op.src, Register) + and op.src not in seen_registers + ): + values.append(op.src) + seen_registers.add(op.src) + values.append(op) + + return values + + +def all_values_full(args: list[Register], blocks: list[BasicBlock]) -> list[Value]: + """Return set of all values that are initialized or accessed.""" + values: list[Value] = list(args) + seen_registers = set(args) + + for block in blocks: + for op in block.ops: + for source in op.sources(): + # Look for uninitialized registers that are accessed. Ignore + # non-registers since we don't allow ops outside basic blocks. + if isinstance(source, Register) and source not in seen_registers: + values.append(source) + seen_registers.add(source) + if not isinstance(op, ControlOp): + if isinstance(op, (Assign, AssignMulti)): + if op.dest not in seen_registers: + values.append(op.dest) + seen_registers.add(op.dest) + elif op.is_void: + continue + else: + values.append(op) + + return values + + +_ARG_KIND_TO_INSPECT: Final = { + ArgKind.ARG_POS: inspect.Parameter.POSITIONAL_OR_KEYWORD, + ArgKind.ARG_OPT: inspect.Parameter.POSITIONAL_OR_KEYWORD, + ArgKind.ARG_STAR: inspect.Parameter.VAR_POSITIONAL, + ArgKind.ARG_NAMED: inspect.Parameter.KEYWORD_ONLY, + ArgKind.ARG_STAR2: inspect.Parameter.VAR_KEYWORD, + ArgKind.ARG_NAMED_OPT: inspect.Parameter.KEYWORD_ONLY, +} + +# Sentinel indicating a value that cannot be represented in a text signature. +_NOT_REPRESENTABLE = object() + + +def get_text_signature(fn: FuncIR, *, bound: bool = False) -> str | None: + """Return a text signature in CPython's internal doc format, or None + if the function's signature cannot be represented. + """ + parameters = [] + mark_self = (fn.class_name is not None) and (fn.decl.kind != FUNC_STATICMETHOD) and not bound + sig = fn.decl.bound_sig if bound and fn.decl.bound_sig is not None else fn.decl.sig + # Pre-scan for end of positional-only parameters. + # This is needed to handle signatures like 'def foo(self, __x)', where mypy + # currently sees 'self' as being positional-or-keyword and '__x' as positional-only. + pos_only_idx = -1 + for idx, arg in enumerate(sig.args): + if arg.pos_only and arg.kind in (ArgKind.ARG_POS, ArgKind.ARG_OPT): + pos_only_idx = idx + for idx, arg in enumerate(sig.args): + if arg.name.startswith(("__bitmap", "__mypyc")): + continue + kind = ( + inspect.Parameter.POSITIONAL_ONLY + if idx <= pos_only_idx + else _ARG_KIND_TO_INSPECT[arg.kind] + ) + default: object = inspect.Parameter.empty + if arg.optional: + default = _find_default_argument(arg.name, fn.blocks) + if default is _NOT_REPRESENTABLE: + # This default argument cannot be represented in a __text_signature__ + return None + + curr_param = inspect.Parameter(arg.name, kind, default=default) + parameters.append(curr_param) + if mark_self: + # Parameter.__init__/Parameter.replace do not accept $ + curr_param._name = f"${arg.name}" # type: ignore[attr-defined] + mark_self = False + return f"{fn.name}{inspect.Signature(parameters)}" + + +def _find_default_argument(name: str, blocks: list[BasicBlock]) -> object: + # Find assignment inserted by gen_arg_defaults. Assumed to be the first assignment. + for block in blocks: + for op in block.ops: + if isinstance(op, Assign) and op.dest.name == name: + return _extract_python_literal(op.src) + return _NOT_REPRESENTABLE + + +def _extract_python_literal(value: Value) -> object: + if isinstance(value, Integer): + if is_none_rprimitive(value.type): + return None + val = value.numeric_value() + if is_bool_rprimitive(value.type): + return bool(val) + return val + elif isinstance(value, Float): + return value.value + elif isinstance(value, LoadLiteral): + return value.value + elif isinstance(value, Box): + return _extract_python_literal(value.src) + elif isinstance(value, TupleSet): + items = tuple(_extract_python_literal(item) for item in value.items) + if any(itm is _NOT_REPRESENTABLE for itm in items): + return _NOT_REPRESENTABLE + return items + return _NOT_REPRESENTABLE diff --git a/.venv/lib/python3.12/site-packages/mypyc/ir/module_ir.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/ir/module_ir.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..925fd3a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/ir/module_ir.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/ir/module_ir.py b/.venv/lib/python3.12/site-packages/mypyc/ir/module_ir.py new file mode 100644 index 0000000..5aef414 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/ir/module_ir.py @@ -0,0 +1,97 @@ +"""Intermediate representation of modules.""" + +from __future__ import annotations + +from mypyc.common import JsonDict +from mypyc.ir.class_ir import ClassIR +from mypyc.ir.func_ir import FuncDecl, FuncIR +from mypyc.ir.ops import DeserMaps +from mypyc.ir.rtypes import RType, deserialize_type + + +class ModuleIR: + """Intermediate representation of a module.""" + + def __init__( + self, + fullname: str, + imports: list[str], + functions: list[FuncIR], + classes: list[ClassIR], + final_names: list[tuple[str, RType]], + type_var_names: list[str], + ) -> None: + self.fullname = fullname + self.imports = imports.copy() + self.functions = functions + self.classes = classes + self.final_names = final_names + # Names of C statics used for Python 3.12 type variable objects. + # These are only visible in the module that defined them, so no need + # to serialize. + self.type_var_names = type_var_names + # Capsules needed by the module, specified via module names such as "librt.base64" + self.capsules: set[str] = set() + + def serialize(self) -> JsonDict: + return { + "fullname": self.fullname, + "imports": self.imports, + "functions": [f.serialize() for f in self.functions], + "classes": [c.serialize() for c in self.classes], + "final_names": [(k, t.serialize()) for k, t in self.final_names], + "capsules": sorted(self.capsules), + } + + @classmethod + def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> ModuleIR: + module = ModuleIR( + data["fullname"], + data["imports"], + [ctx.functions[FuncDecl.get_id_from_json(f)] for f in data["functions"]], + [ClassIR.deserialize(c, ctx) for c in data["classes"]], + [(k, deserialize_type(t, ctx)) for k, t in data["final_names"]], + [], + ) + module.capsules = set(data["capsules"]) + return module + + +def deserialize_modules(data: dict[str, JsonDict], ctx: DeserMaps) -> dict[str, ModuleIR]: + """Deserialize a collection of modules. + + The modules can contain dependencies on each other. + + Arguments: + data: A dict containing the modules to deserialize. + ctx: The deserialization maps to use and to populate. + They are populated with information from the deserialized + modules and as a precondition must have been populated by + deserializing any dependencies of the modules being deserialized + (outside of dependencies between the modules themselves). + + Returns a map containing the deserialized modules. + """ + for mod in data.values(): + # First create ClassIRs for every class so that we can construct types and whatnot + for cls in mod["classes"]: + ir = ClassIR(cls["name"], cls["module_name"]) + assert ir.fullname not in ctx.classes, "Class %s already in map" % ir.fullname + ctx.classes[ir.fullname] = ir + + for mod in data.values(): + # Then deserialize all of the functions so that methods are available + # to the class deserialization. + for method in mod["functions"]: + func = FuncIR.deserialize(method, ctx) + assert func.decl.id not in ctx.functions, ( + "Method %s already in map" % func.decl.fullname + ) + ctx.functions[func.decl.id] = func + + return {k: ModuleIR.deserialize(v, ctx) for k, v in data.items()} + + +# ModulesIRs should also always be an *OrderedDict*, but if we +# declared it that way we would need to put it in quotes everywhere... +ModuleIRs = dict[str, ModuleIR] diff --git a/.venv/lib/python3.12/site-packages/mypyc/ir/ops.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/ir/ops.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..7cc9647 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/ir/ops.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/ir/ops.py b/.venv/lib/python3.12/site-packages/mypyc/ir/ops.py new file mode 100644 index 0000000..2153d47 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/ir/ops.py @@ -0,0 +1,2039 @@ +"""Low-level opcodes for compiler intermediate representation (IR). + +Opcodes operate on abstract values (Value) in a register machine. Each +value has a type (RType). A value can hold various things, such as: + +- local variables or temporaries (Register) +- intermediate values of expressions (RegisterOp subclasses) +- condition flags (true/false) +- literals (integer literals, True, False, etc.) + +NOTE: As a convention, we don't create subclasses of concrete Value/Op + subclasses (e.g. you shouldn't define a subclass of Integer, which + is a concrete class). + + If you want to introduce a variant of an existing class, you'd + typically add an attribute (e.g. a flag) to an existing concrete + class to enable the new behavior. Sometimes adding a new abstract + base class is also an option, or just creating a new subclass + without any inheritance relationship (some duplication of code + is preferred over introducing complex implementation inheritance). + + This makes it possible to use isinstance(x, ) checks without worrying about potential subclasses. +""" + +from __future__ import annotations + +from abc import abstractmethod +from collections.abc import Sequence +from typing import TYPE_CHECKING, Final, Generic, NamedTuple, TypeVar, Union, final + +from mypy_extensions import trait + +from mypyc.ir.rtypes import ( + RArray, + RInstance, + RStruct, + RTuple, + RType, + RVoid, + bit_rprimitive, + bool_rprimitive, + cstring_rprimitive, + float_rprimitive, + int_rprimitive, + is_bool_or_bit_rprimitive, + is_int_rprimitive, + is_none_rprimitive, + is_pointer_rprimitive, + is_short_int_rprimitive, + object_rprimitive, + pointer_rprimitive, + short_int_rprimitive, + void_rtype, +) + +if TYPE_CHECKING: + from mypyc.codegen.literals import LiteralValue + from mypyc.ir.class_ir import ClassIR + from mypyc.ir.func_ir import FuncDecl, FuncIR + +T = TypeVar("T") + + +@final +class BasicBlock: + """IR basic block. + + Contains a sequence of Ops and ends with a ControlOp (Goto, + Branch, Return or Unreachable). Only the last op can be a + ControlOp. + + All generated Ops live in basic blocks. Basic blocks determine the + order of evaluation and control flow within a function. A basic + block is always associated with a single function/method (FuncIR). + + When building the IR, ops that raise exceptions can be included in + the middle of a basic block, but the exceptions aren't checked. + Afterwards we perform a transform that inserts explicit checks for + all error conditions and splits basic blocks accordingly to preserve + the invariant that a jump, branch or return can only ever appear + as the final op in a block. Manually inserting error checking ops + would be boring and error-prone. + + BasicBlocks have an error_handler attribute that determines where + to jump if an error occurs. If none is specified, an error will + propagate up out of the function. This is compiled away by the + `exceptions` module. + + Block labels are used for pretty printing and emitting C code, and + get filled in by those passes. + + Ops that may terminate the program aren't treated as exits. + """ + + def __init__(self, label: int = -1) -> None: + self.label = label + self.ops: list[Op] = [] + self.error_handler: BasicBlock | None = None + self.referenced = False + + @property + def terminated(self) -> bool: + """Does the block end with a jump, branch or return? + + This should always be true after the basic block has been fully built, but + this is false during construction. + """ + return bool(self.ops) and isinstance(self.ops[-1], ControlOp) + + @property + def terminator(self) -> ControlOp: + """The terminator operation of the block.""" + assert bool(self.ops) and isinstance(self.ops[-1], ControlOp) + return self.ops[-1] + + +# Never generates an exception +ERR_NEVER: Final = 0 +# Generates magic value (c_error_value) based on target RType on exception +ERR_MAGIC: Final = 1 +# Generates false (bool) on exception +ERR_FALSE: Final = 2 +# Always fails +ERR_ALWAYS: Final = 3 +# Like ERR_MAGIC, but the magic return overlaps with a possible return value, and +# an extra PyErr_Occurred() check is also required +ERR_MAGIC_OVERLAPPING: Final = 4 + +# Hack: using this line number for an op will suppress it in tracebacks +NO_TRACEBACK_LINE_NO = -10000 + + +class Value: + """Abstract base class for all IR values. + + These include references to registers, literals, and all + operations (Ops), such as assignments, calls and branches. + + Values are often used as inputs of Ops. Register can be used as an + assignment target. + + A Value is part of the IR being compiled if it's included in a BasicBlock + that is reachable from a FuncIR (i.e., is part of a function). + + See also: Op is a subclass of Value that is the base class of all + operations. + """ + + # Source line number (-1 for no/unknown line) + line = -1 + # Type of the value or the result of the operation + type: RType = void_rtype + is_borrowed = False + + @property + def is_void(self) -> bool: + return isinstance(self.type, RVoid) + + +@final +class Register(Value): + """A Register holds a value of a specific type, and it can be read and mutated. + + A Register is always local to a function. Each local variable maps + to a Register, and they are also used for some (but not all) + temporary values. + + Note that the term 'register' is overloaded and is sometimes used + to refer to arbitrary Values (for example, in RegisterOp). + """ + + def __init__(self, type: RType, name: str = "", is_arg: bool = False, line: int = -1) -> None: + self.type = type + self.name = name + self.is_arg = is_arg + self.is_borrowed = is_arg + self.line = line + + @property + def is_void(self) -> bool: + return False + + def __repr__(self) -> str: + return f"" + + +@final +class Integer(Value): + """Short integer literal. + + Integer literals are treated as constant values and are generally + not included in data flow analyses and such, unlike Register and + Op subclasses. + + Integer can represent multiple types: + + * Short tagged integers (short_int_primitive type; the tag bit is clear) + * Ordinary fixed-width integers (e.g., int32_rprimitive) + * Values of other unboxed primitive types that are represented as integers + (none_rprimitive, bool_rprimitive) + * Null pointers (value 0) of various types, including object_rprimitive + """ + + def __init__(self, value: int, rtype: RType = short_int_rprimitive, line: int = -1) -> None: + if is_short_int_rprimitive(rtype) or is_int_rprimitive(rtype): + self.value = value * 2 + else: + self.value = value + self.type = rtype + self.line = line + + def numeric_value(self) -> int: + if is_short_int_rprimitive(self.type) or is_int_rprimitive(self.type): + return self.value // 2 + return self.value + + +@final +class Float(Value): + """Float literal. + + Floating point literals are treated as constant values and are generally + not included in data flow analyses and such, unlike Register and + Op subclasses. + """ + + def __init__(self, value: float, line: int = -1) -> None: + self.value = value + self.type = float_rprimitive + self.line = line + + +@final +class CString(Value): + """C string literal (zero-terminated). + + You can also include zero values in the value, but then you'll need to track + the length of the string separately. + """ + + def __init__(self, value: bytes, line: int = -1) -> None: + self.value = value + self.type = cstring_rprimitive + self.line = line + + +@final +class Undef(Value): + """An undefined value. + + Use Undef() as the initial value followed by one or more SetElement + ops to initialize a struct. Pseudocode example: + + r0 = set_element undef MyStruct, "field1", f1 + r1 = set_element r0, "field2", f2 + # r1 now has new struct value with two fields set + + Warning: Always initialize undefined values before using them, + as otherwise the values are garbage. You shouldn't expect that + undefined values are zeroed, in particular. + """ + + def __init__(self, rtype: RType) -> None: + self.type = rtype + + +class Op(Value): + """Abstract base class for all IR operations. + + Each operation must be stored in a BasicBlock (in 'ops') to be + active in the IR. This is different from non-Op values, including + Register and Integer, where a reference from an active Op is + sufficient to be considered active. + + In well-formed IR an active Op has no references to inactive ops + or ops used in another function. + """ + + def __init__(self, line: int) -> None: + self.line = line + + def can_raise(self) -> bool: + # Override this is if Op may raise an exception. Note that currently the fact that + # only RegisterOps may raise an exception in hard coded in some places. + return False + + @abstractmethod + def sources(self) -> list[Value]: + """All the values the op may read.""" + + @abstractmethod + def set_sources(self, new: list[Value]) -> None: + """Rewrite the sources of an op""" + + def stolen(self) -> list[Value]: + """Return arguments that have a reference count stolen by this op""" + return [] + + def unique_sources(self) -> list[Value]: + result: list[Value] = [] + for reg in self.sources(): + if reg not in result: + result.append(reg) + return result + + @abstractmethod + def accept(self, visitor: OpVisitor[T]) -> T: + pass + + +class BaseAssign(Op): + """Abstract base class for ops that assign to a register.""" + + def __init__(self, dest: Register, line: int = -1) -> None: + super().__init__(line) + self.dest = dest + + +@final +class Assign(BaseAssign): + """Assign a value to a Register (dest = src).""" + + error_kind = ERR_NEVER + + def __init__(self, dest: Register, src: Value, line: int = -1) -> None: + super().__init__(dest, line) + self.src = src + + def sources(self) -> list[Value]: + return [self.src] + + def set_sources(self, new: list[Value]) -> None: + (self.src,) = new + + def stolen(self) -> list[Value]: + return [self.src] + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_assign(self) + + +@final +class AssignMulti(BaseAssign): + """Assign multiple values to a Register (dest = src1, src2, ...). + + This is used to initialize RArray values. It's provided to avoid + very verbose IR for common vectorcall operations. + + Note that this interacts atypically with reference counting. We + assume that each RArray register is initialized exactly once + with this op. + """ + + error_kind = ERR_NEVER + + def __init__(self, dest: Register, src: list[Value], line: int = -1) -> None: + super().__init__(dest, line) + assert src + assert isinstance(dest.type, RArray) + assert dest.type.length == len(src) + self.src = src + + def sources(self) -> list[Value]: + return self.src.copy() + + def set_sources(self, new: list[Value]) -> None: + self.src = new[:] + + def stolen(self) -> list[Value]: + return [] + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_assign_multi(self) + + +class ControlOp(Op): + """Abstract base class for control flow operations.""" + + def targets(self) -> Sequence[BasicBlock]: + """Get all basic block targets of the control operation.""" + return () + + def set_target(self, i: int, new: BasicBlock) -> None: + """Update a basic block target.""" + raise AssertionError(f"Invalid set_target({self}, {i})") + + +@final +class Goto(ControlOp): + """Unconditional jump.""" + + error_kind = ERR_NEVER + + def __init__(self, label: BasicBlock, line: int = -1) -> None: + super().__init__(line) + self.label = label + + def targets(self) -> Sequence[BasicBlock]: + return (self.label,) + + def set_target(self, i: int, new: BasicBlock) -> None: + assert i == 0 + self.label = new + + def __repr__(self) -> str: + return "" % self.label.label + + def sources(self) -> list[Value]: + return [] + + def set_sources(self, new: list[Value]) -> None: + assert not new + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_goto(self) + + +@final +class Branch(ControlOp): + """Branch based on a value. + + If op is BOOL, branch based on a bit/bool value: + if [not] r1 goto L1 else goto L2 + + If op is IS_ERROR, branch based on whether there is an error value: + if [not] is_error(r1) goto L1 else goto L2 + """ + + # Branch ops never raise an exception. + error_kind = ERR_NEVER + + BOOL: Final = 100 + IS_ERROR: Final = 101 + + def __init__( + self, + value: Value, + true_label: BasicBlock, + false_label: BasicBlock, + op: int, + line: int = -1, + *, + rare: bool = False, + ) -> None: + super().__init__(line) + # Target value being checked + self.value = value + # Branch here if the condition is true + self.true = true_label + # Branch here if the condition is false + self.false = false_label + # Branch.BOOL (boolean check) or Branch.IS_ERROR (error value check) + self.op = op + # If True, the condition is negated + self.negated = False + # If not None, the true label should generate a traceback entry (func name, line number) + self.traceback_entry: tuple[str, int] | None = None + # If True, we expect to usually take the false branch (for optimization purposes); + # this is implicitly treated as true if there is a traceback entry + self.rare = rare + + def targets(self) -> Sequence[BasicBlock]: + return (self.true, self.false) + + def set_target(self, i: int, new: BasicBlock) -> None: + assert i == 0 or i == 1 + if i == 0: + self.true = new + else: + self.false = new + + def sources(self) -> list[Value]: + return [self.value] + + def set_sources(self, new: list[Value]) -> None: + (self.value,) = new + + def invert(self) -> None: + self.negated = not self.negated + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_branch(self) + + +@final +class Return(ControlOp): + """Return a value from a function.""" + + error_kind = ERR_NEVER + + def __init__( + self, value: Value, line: int = -1, *, yield_target: BasicBlock | None = None + ) -> None: + super().__init__(line) + self.value = value + # If this return is created by a yield, keep track of the next + # basic block. This doesn't affect the code we generate but + # can feed into analysis that need to understand the + # *original* CFG. + self.yield_target = yield_target + + def sources(self) -> list[Value]: + return [self.value] + + def set_sources(self, new: list[Value]) -> None: + (self.value,) = new + + def stolen(self) -> list[Value]: + return [self.value] + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_return(self) + + +@final +class Unreachable(ControlOp): + """Mark the end of basic block as unreachable. + + This is sometimes necessary when the end of a basic block is never + reached. This can also be explicitly added to the end of non-None + returning functions (in None-returning function we can just return + None). + + Mypy statically guarantees that the end of the function is not + unreachable if there is not a return statement. + + This prevents the block formatter from being confused due to lack + of a leave and also leaves a nifty note in the IR. It is not + generally processed by visitors. + """ + + error_kind = ERR_NEVER + + def __init__(self, line: int = -1) -> None: + super().__init__(line) + + def sources(self) -> list[Value]: + return [] + + def set_sources(self, new: list[Value]) -> None: + assert not new + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_unreachable(self) + + +class RegisterOp(Op): + """Abstract base class for operations that can be written as r1 = f(r2, ..., rn). + + Takes some values, performs an operation, and generates an output + (unless the 'type' attribute is void_rtype, which is the default). + Other ops can refer to the result of the Op by referring to the Op + instance. This doesn't do any explicit control flow, but can raise an + error. + + Note that the operands can be arbitrary Values, not just Register + instances, even though the naming may suggest otherwise. + """ + + error_kind = -1 # Can this raise exception and how is it signalled; one of ERR_* + + _type: RType | None = None + + def __init__(self, line: int) -> None: + super().__init__(line) + assert self.error_kind != -1, "error_kind not defined" + + def can_raise(self) -> bool: + return self.error_kind != ERR_NEVER + + +@final +class IncRef(RegisterOp): + """Increase reference count (inc_ref src).""" + + error_kind = ERR_NEVER + + def __init__(self, src: Value, line: int = -1) -> None: + assert src.type.is_refcounted + super().__init__(line) + self.src = src + + def sources(self) -> list[Value]: + return [self.src] + + def set_sources(self, new: list[Value]) -> None: + (self.src,) = new + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_inc_ref(self) + + +@final +class DecRef(RegisterOp): + """Decrease reference count and free object if zero (dec_ref src). + + The is_xdec flag says to use an XDECREF, which checks if the + pointer is NULL first. + """ + + error_kind = ERR_NEVER + + def __init__(self, src: Value, is_xdec: bool = False, line: int = -1) -> None: + assert src.type.is_refcounted + super().__init__(line) + self.src = src + self.is_xdec = is_xdec + + def __repr__(self) -> str: + return "<{}DecRef {!r}>".format("X" if self.is_xdec else "", self.src) + + def sources(self) -> list[Value]: + return [self.src] + + def set_sources(self, new: list[Value]) -> None: + (self.src,) = new + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_dec_ref(self) + + +@final +class Call(RegisterOp): + """Native call f(arg, ...). + + The call target can be a module-level function or a class. + """ + + def __init__(self, fn: FuncDecl, args: Sequence[Value], line: int) -> None: + self.fn = fn + self.args = list(args) + assert len(self.args) == len(fn.sig.args) + self.type = fn.sig.ret_type + ret_type = fn.sig.ret_type + if not ret_type.error_overlap: + self.error_kind = ERR_MAGIC + else: + self.error_kind = ERR_MAGIC_OVERLAPPING + super().__init__(line) + + def sources(self) -> list[Value]: + return list(self.args.copy()) + + def set_sources(self, new: list[Value]) -> None: + self.args = new[:] + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_call(self) + + +@final +class MethodCall(RegisterOp): + """Native method call obj.method(arg, ...)""" + + def __init__(self, obj: Value, method: str, args: list[Value], line: int = -1) -> None: + self.obj = obj + self.method = method + self.args = args + assert isinstance(obj.type, RInstance), "Methods can only be called on instances" + self.receiver_type = obj.type + method_ir = self.receiver_type.class_ir.method_sig(method) + assert method_ir is not None, "{} doesn't have method {}".format( + self.receiver_type.name, method + ) + ret_type = method_ir.ret_type + self.type = ret_type + if not ret_type.error_overlap: + self.error_kind = ERR_MAGIC + else: + self.error_kind = ERR_MAGIC_OVERLAPPING + super().__init__(line) + + def sources(self) -> list[Value]: + return self.args.copy() + [self.obj] + + def set_sources(self, new: list[Value]) -> None: + *self.args, self.obj = new + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_method_call(self) + + +@final +class PrimitiveDescription: + """Description of a primitive op. + + Primitives get lowered into lower-level ops before code generation. + + If c_function_name is provided, a primitive will be lowered into a CallC op. + Otherwise custom logic will need to be implemented to transform the + primitive into lower-level ops. + """ + + def __init__( + self, + name: str, + arg_types: list[RType], + return_type: RType, # TODO: What about generic? + var_arg_type: RType | None, + truncated_type: RType | None, + c_function_name: str | None, + error_kind: int, + steals: StealsDescription, + is_borrowed: bool, + ordering: list[int] | None, + extra_int_constants: list[tuple[int, RType]], + priority: int, + is_pure: bool, + experimental: bool, + capsule: str | None, + ) -> None: + # Each primitive much have a distinct name, but otherwise they are arbitrary. + self.name: Final = name + self.arg_types: Final = arg_types + self.return_type: Final = return_type + self.var_arg_type: Final = var_arg_type + self.truncated_type: Final = truncated_type + # If non-None, this will map to a call of a C helper function; if None, + # there must be a custom handler function that gets invoked during the lowering + # pass to generate low-level IR for the primitive (in the mypyc.lower package) + self.c_function_name: Final = c_function_name + self.error_kind: Final = error_kind + self.steals: Final = steals + self.is_borrowed: Final = is_borrowed + self.ordering: Final = ordering + self.extra_int_constants: Final = extra_int_constants + self.priority: Final = priority + # Pure primitives have no side effects, take immutable arguments, and + # never fail. They support additional optimizations. + self.is_pure: Final = is_pure + if is_pure: + assert error_kind == ERR_NEVER + # Experimental primitives are not used unless mypyc experimental features are + # explicitly enabled + self.experimental = experimental + # Capsule that needs to imported and configured to call the primitive + # (name of the target module, e.g. "librt.base64"). + self.capsule = capsule + + def __repr__(self) -> str: + return f"" + + +@final +class PrimitiveOp(RegisterOp): + """A higher-level primitive operation. + + Some of these have special compiler support. These will be lowered + (transformed) into lower-level IR ops before code generation, and after + reference counting op insertion. Others will be transformed into CallC + ops. + + Tagged integer equality is a typical primitive op with non-trivial + lowering. It gets transformed into a tag check, followed by different + code paths for short and long representations. + """ + + def __init__(self, args: list[Value], desc: PrimitiveDescription, line: int = -1) -> None: + self.args = args + self.type = desc.return_type + self.error_kind = desc.error_kind + self.desc = desc + + def sources(self) -> list[Value]: + return self.args + + def set_sources(self, new: list[Value]) -> None: + self.args = new[:] + + def stolen(self) -> list[Value]: + steals = self.desc.steals + if isinstance(steals, list): + assert len(steals) == len(self.args) + return [arg for arg, steal in zip(self.args, steals) if steal] + else: + return [] if not steals else self.sources() + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_primitive_op(self) + + +@final +class LoadErrorValue(RegisterOp): + """Load an error value. + + Each type has one reserved value that signals an error (exception). This + loads the error value for a specific type. + """ + + error_kind = ERR_NEVER + + def __init__( + self, rtype: RType, line: int = -1, is_borrowed: bool = False, undefines: bool = False + ) -> None: + super().__init__(line) + self.type = rtype + self.is_borrowed = is_borrowed + # Undefines is true if this should viewed by the definedness + # analysis pass as making the register it is assigned to + # undefined (and thus checks should be added on uses). + self.undefines = undefines + + def sources(self) -> list[Value]: + return [] + + def set_sources(self, new: list[Value]) -> None: + assert not new + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_load_error_value(self) + + +@final +class LoadLiteral(RegisterOp): + """Load a Python literal object (dest = 'foo' / b'foo' / ...). + + This is used to load a static PyObject * value corresponding to + a literal of one of the supported types. + + Tuple / frozenset literals must contain only valid literal values as items. + + NOTE: You can use this to load boxed (Python) int objects. Use + Integer to load unboxed, tagged integers or fixed-width, + low-level integers. + + For int literals, both int_rprimitive (CPyTagged) and + object_primitive (PyObject *) are supported as rtype. However, + when using int_rprimitive, the value must *not* be small enough + to fit in an unboxed integer. + """ + + error_kind = ERR_NEVER + is_borrowed = True + + def __init__(self, value: LiteralValue, rtype: RType) -> None: + self.value = value + self.type = rtype + + def sources(self) -> list[Value]: + return [] + + def set_sources(self, new: list[Value]) -> None: + assert not new + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_load_literal(self) + + +@final +class GetAttr(RegisterOp): + """obj.attr (for a native object)""" + + error_kind = ERR_MAGIC + + def __init__( + self, + obj: Value, + attr: str, + line: int, + *, + borrow: bool = False, + allow_error_value: bool = False, + ) -> None: + super().__init__(line) + self.obj = obj + self.attr = attr + self.allow_error_value = allow_error_value + assert isinstance(obj.type, RInstance), "Attribute access not supported: %s" % obj.type + self.class_type = obj.type + attr_type = obj.type.attr_type(attr) + self.type = attr_type + if allow_error_value: + self.error_kind = ERR_NEVER + elif attr_type.error_overlap: + self.error_kind = ERR_MAGIC_OVERLAPPING + self.is_borrowed = borrow and attr_type.is_refcounted + + def sources(self) -> list[Value]: + return [self.obj] + + def set_sources(self, new: list[Value]) -> None: + (self.obj,) = new + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_get_attr(self) + + +@final +class SetAttr(RegisterOp): + """obj.attr = src (for a native object) + + Steals the reference to src. + """ + + error_kind = ERR_FALSE + + def __init__(self, obj: Value, attr: str, src: Value, line: int) -> None: + super().__init__(line) + self.obj = obj + self.attr = attr + self.src = src + assert isinstance(obj.type, RInstance), "Attribute access not supported: %s" % obj.type + self.class_type = obj.type + self.type = bool_rprimitive + # If True, we can safely assume that the attribute is previously undefined + # and we don't use a setter + self.is_init = False + + def mark_as_initializer(self) -> None: + self.is_init = True + self.error_kind = ERR_NEVER + self.type = void_rtype + + def sources(self) -> list[Value]: + return [self.obj, self.src] + + def set_sources(self, new: list[Value]) -> None: + self.obj, self.src = new + + def stolen(self) -> list[Value]: + return [self.src] + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_set_attr(self) + + +# Default name space for statics, variables +NAMESPACE_STATIC: Final = "static" + +# Static namespace for pointers to native type objects +NAMESPACE_TYPE: Final = "type" + +# Namespace for modules +NAMESPACE_MODULE: Final = "module" + +# Namespace for Python 3.12 type variable objects (implicitly created TypeVar instances, etc.) +NAMESPACE_TYPE_VAR: Final = "typevar" + + +@final +class LoadStatic(RegisterOp): + """Load a static name (name :: static). + + Load a C static variable/pointer. The namespace for statics is shared + for the entire compilation group. You can optionally provide a module + name and a sub-namespace identifier for additional namespacing to avoid + name conflicts. The static namespace does not overlap with other C names, + since the final C name will get a prefix, so conflicts only must be + avoided with other statics. + """ + + error_kind = ERR_NEVER + is_borrowed = True + + def __init__( + self, + type: RType, + identifier: str, + module_name: str | None = None, + namespace: str = NAMESPACE_STATIC, + line: int = -1, + ann: object = None, + ) -> None: + super().__init__(line) + self.identifier = identifier + self.module_name = module_name + self.namespace = namespace + self.type = type + self.ann = ann # An object to pretty print with the load + + def sources(self) -> list[Value]: + return [] + + def set_sources(self, new: list[Value]) -> None: + assert not new + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_load_static(self) + + +@final +class InitStatic(RegisterOp): + """static = value :: static + + Initialize a C static variable/pointer. See everything in LoadStatic. + """ + + error_kind = ERR_NEVER + + def __init__( + self, + value: Value, + identifier: str, + module_name: str | None = None, + namespace: str = NAMESPACE_STATIC, + line: int = -1, + ) -> None: + super().__init__(line) + self.identifier = identifier + self.module_name = module_name + self.namespace = namespace + self.value = value + + def sources(self) -> list[Value]: + return [self.value] + + def set_sources(self, new: list[Value]) -> None: + (self.value,) = new + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_init_static(self) + + +@final +class TupleSet(RegisterOp): + """dest = (reg, ...) (for fixed-length tuple)""" + + error_kind = ERR_NEVER + + def __init__(self, items: list[Value], line: int) -> None: + super().__init__(line) + self.items = items + # Don't keep track of the fact that an int is short after it + # is put into a tuple, since we don't properly implement + # runtime subtyping for tuples. + self.tuple_type = RTuple( + [ + arg.type if not is_short_int_rprimitive(arg.type) else int_rprimitive + for arg in items + ] + ) + self.type = self.tuple_type + + def sources(self) -> list[Value]: + return self.items.copy() + + def stolen(self) -> list[Value]: + return self.items.copy() + + def set_sources(self, new: list[Value]) -> None: + self.items = new[:] + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_tuple_set(self) + + +@final +class TupleGet(RegisterOp): + """Get item of a fixed-length tuple (src[index]).""" + + error_kind = ERR_NEVER + + def __init__(self, src: Value, index: int, line: int = -1, *, borrow: bool = False) -> None: + super().__init__(line) + assert isinstance( + src.type, RTuple + ), f"TupleGet only operates on tuples, not {type(src.type).__name__}" + src_len = len(src.type.types) + self.src = src + self.index = index + if index < 0: + self.index += src_len + assert ( + self.index <= src_len - 1 + ), f"Index out of range.\nsource type: {src.type}\nindex: {index}" + self.type = src.type.types[index] + self.is_borrowed = borrow + + def sources(self) -> list[Value]: + return [self.src] + + def set_sources(self, new: list[Value]) -> None: + (self.src,) = new + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_tuple_get(self) + + +@final +class Cast(RegisterOp): + """cast(type, src) + + Perform a runtime type check (no representation or value conversion). + + DO NOT increment reference counts. + """ + + error_kind = ERR_MAGIC + + def __init__( + self, src: Value, typ: RType, line: int, *, borrow: bool = False, unchecked: bool = False + ) -> None: + super().__init__(line) + self.src = src + self.type = typ + # If true, don't incref the result. + self.is_borrowed = borrow + # If true, don't perform a runtime type check (only changes the static type of + # the operand). Used when we know that the cast will always succeed. + self.is_unchecked = unchecked + if unchecked: + self.error_kind = ERR_NEVER + + def sources(self) -> list[Value]: + return [self.src] + + def set_sources(self, new: list[Value]) -> None: + (self.src,) = new + + def stolen(self) -> list[Value]: + if self.is_borrowed: + return [] + return [self.src] + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_cast(self) + + +@final +class Box(RegisterOp): + """box(type, src) + + This converts from a potentially unboxed representation to a straight Python object. + Only supported for types with an unboxed representation. + """ + + error_kind = ERR_NEVER + + def __init__(self, src: Value, line: int = -1) -> None: + super().__init__(line) + self.src = src + self.type = object_rprimitive + # When we box None and bool values, we produce a borrowed result + if is_none_rprimitive(self.src.type) or is_bool_or_bit_rprimitive(self.src.type): + self.is_borrowed = True + + def sources(self) -> list[Value]: + return [self.src] + + def set_sources(self, new: list[Value]) -> None: + (self.src,) = new + + def stolen(self) -> list[Value]: + return [self.src] + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_box(self) + + +@final +class Unbox(RegisterOp): + """unbox(type, src) + + This is similar to a cast, but it also changes to a (potentially) unboxed runtime + representation. Only supported for types with an unboxed representation. + """ + + def __init__(self, src: Value, typ: RType, line: int) -> None: + self.src = src + self.type = typ + if not typ.error_overlap: + self.error_kind = ERR_MAGIC + else: + self.error_kind = ERR_MAGIC_OVERLAPPING + super().__init__(line) + + def sources(self) -> list[Value]: + return [self.src] + + def set_sources(self, new: list[Value]) -> None: + (self.src,) = new + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_unbox(self) + + +@final +class RaiseStandardError(RegisterOp): + """Raise built-in exception with an optional error string. + + We have a separate opcode for this for convenience and to + generate smaller, more idiomatic C code. + """ + + # TODO: Make it more explicit at IR level that this always raises + + error_kind = ERR_FALSE + + VALUE_ERROR: Final = "ValueError" + ASSERTION_ERROR: Final = "AssertionError" + STOP_ITERATION: Final = "StopIteration" + UNBOUND_LOCAL_ERROR: Final = "UnboundLocalError" + RUNTIME_ERROR: Final = "RuntimeError" + NAME_ERROR: Final = "NameError" + ZERO_DIVISION_ERROR: Final = "ZeroDivisionError" + + def __init__(self, class_name: str, value: str | Value | None, line: int) -> None: + super().__init__(line) + self.class_name = class_name + self.value = value + self.type = bool_rprimitive + + def sources(self) -> list[Value]: + return [] + + def set_sources(self, new: list[Value]) -> None: + assert not new + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_raise_standard_error(self) + + +# True steals all arguments, False steals none, a list steals those in matching positions +StealsDescription = Union[bool, list[bool]] + + +@final +class CallC(RegisterOp): + """result = function(arg0, arg1, ...) + + Call a C function that is not a compiled/native function (for + example, a Python C API function). Use Call to call native + functions. + """ + + def __init__( + self, + function_name: str, + args: list[Value], + ret_type: RType, + steals: StealsDescription, + is_borrowed: bool, + error_kind: int, + line: int, + var_arg_idx: int = -1, + *, + is_pure: bool = False, + returns_null: bool = False, + capsule: str | None = None, + ) -> None: + self.error_kind = error_kind + super().__init__(line) + self.function_name = function_name + self.args = args + self.type = ret_type + self.steals = steals + self.is_borrowed = is_borrowed + # The position of the first variable argument in args (if >= 0) + self.var_arg_idx = var_arg_idx + # Is the function pure? Pure functions have no side effects + # and all the arguments are immutable. Pure functions support + # additional optimizations. Pure functions never fail. + self.is_pure = is_pure + # The function might return a null value that does not indicate + # an error. + self.returns_null = returns_null + # A capsule from this module must be imported and initialized before calling this + # function (used for C functions exported from librt). Example value: "librt.base64" + self.capsule = capsule + if is_pure or returns_null: + assert error_kind == ERR_NEVER + + def sources(self) -> list[Value]: + return self.args[:] + + def set_sources(self, new: list[Value]) -> None: + self.args = new[:] + + def stolen(self) -> list[Value]: + if isinstance(self.steals, list): + assert len(self.steals) == len(self.args) + return [arg for arg, steal in zip(self.args, self.steals) if steal] + else: + return [] if not self.steals else self.sources() + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_call_c(self) + + +@final +class Truncate(RegisterOp): + """result = truncate src from src_type to dst_type + + Truncate a value from type with more bits to type with less bits. + + dst_type and src_type can be native integer types, bools or tagged + integers. Tagged integers should have the tag bit unset. + """ + + error_kind = ERR_NEVER + + def __init__(self, src: Value, dst_type: RType, line: int = -1) -> None: + super().__init__(line) + self.src = src + self.type = dst_type + self.src_type = src.type + + def sources(self) -> list[Value]: + return [self.src] + + def set_sources(self, new: list[Value]) -> None: + (self.src,) = new + + def stolen(self) -> list[Value]: + return [] + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_truncate(self) + + +@final +class Extend(RegisterOp): + """result = extend src from src_type to dst_type + + Extend a value from a type with fewer bits to a type with more bits. + + dst_type and src_type can be native integer types, bools or tagged + integers. Tagged integers should have the tag bit unset. + + If 'signed' is true, perform sign extension. Otherwise, the result will be + zero extended. + """ + + error_kind = ERR_NEVER + + def __init__(self, src: Value, dst_type: RType, signed: bool, line: int = -1) -> None: + super().__init__(line) + self.src = src + self.type = dst_type + self.src_type = src.type + self.signed = signed + + def sources(self) -> list[Value]: + return [self.src] + + def set_sources(self, new: list[Value]) -> None: + (self.src,) = new + + def stolen(self) -> list[Value]: + return [] + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_extend(self) + + +@final +class LoadGlobal(RegisterOp): + """Load a low-level global variable/pointer. + + Note that can't be used to directly load Python module-level + global variable, since they are stored in a globals dictionary + and accessed using dictionary operations. + """ + + error_kind = ERR_NEVER + is_borrowed = True + + def __init__(self, type: RType, identifier: str, line: int = -1, ann: object = None) -> None: + super().__init__(line) + self.identifier = identifier + self.type = type + self.ann = ann # An object to pretty print with the load + + def sources(self) -> list[Value]: + return [] + + def set_sources(self, new: list[Value]) -> None: + assert not new + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_load_global(self) + + +@final +class IntOp(RegisterOp): + """Binary arithmetic or bitwise op on integer operands (e.g., r1 = r2 + r3). + + These ops are low-level and are similar to the corresponding C + operations. + + The left and right values must have low-level integer types with + compatible representations. Fixed-width integers, short_int_rprimitive, + bool_rprimitive and bit_rprimitive are supported. + + For tagged (arbitrary-precision) integer ops look at mypyc.primitives.int_ops. + """ + + error_kind = ERR_NEVER + + # Arithmetic ops + ADD: Final = 0 + SUB: Final = 1 + MUL: Final = 2 + DIV: Final = 3 + MOD: Final = 4 + + # Bitwise ops + AND: Final = 200 + OR: Final = 201 + XOR: Final = 202 + LEFT_SHIFT: Final = 203 + RIGHT_SHIFT: Final = 204 + + op_str: Final = { + ADD: "+", + SUB: "-", + MUL: "*", + DIV: "/", + MOD: "%", + AND: "&", + OR: "|", + XOR: "^", + LEFT_SHIFT: "<<", + RIGHT_SHIFT: ">>", + } + + def __init__(self, type: RType, lhs: Value, rhs: Value, op: int, line: int = -1) -> None: + super().__init__(line) + self.type = type + self.lhs = lhs + self.rhs = rhs + self.op = op + + def sources(self) -> list[Value]: + return [self.lhs, self.rhs] + + def set_sources(self, new: list[Value]) -> None: + self.lhs, self.rhs = new + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_int_op(self) + + +# We can't have this in the IntOp class body, because of +# https://github.com/mypyc/mypyc/issues/932. +int_op_to_id: Final = {op: op_id for op_id, op in IntOp.op_str.items()} + + +@final +class ComparisonOp(RegisterOp): + """Low-level comparison op for integers and pointers. + + Both unsigned and signed comparisons are supported. Supports + comparisons between fixed-width integer types and pointer types. + The operands should have matching sizes. + + The result is always a bit (representing a boolean). + + Python semantics, such as calling __eq__, are not supported. + """ + + # Must be ERR_NEVER or ERR_FALSE. ERR_FALSE means that a false result + # indicates that an exception has been raised and should be propagated. + error_kind = ERR_NEVER + + # S for signed and U for unsigned + EQ: Final = 100 + NEQ: Final = 101 + SLT: Final = 102 + SGT: Final = 103 + SLE: Final = 104 + SGE: Final = 105 + ULT: Final = 106 + UGT: Final = 107 + ULE: Final = 108 + UGE: Final = 109 + + op_str: Final = { + EQ: "==", + NEQ: "!=", + SLT: "<", + SGT: ">", + SLE: "<=", + SGE: ">=", + ULT: "<", + UGT: ">", + ULE: "<=", + UGE: ">=", + } + + signed_ops: Final = {"==": EQ, "!=": NEQ, "<": SLT, ">": SGT, "<=": SLE, ">=": SGE} + unsigned_ops: Final = {"==": EQ, "!=": NEQ, "<": ULT, ">": UGT, "<=": ULE, ">=": UGE} + + def __init__(self, lhs: Value, rhs: Value, op: int, line: int = -1) -> None: + super().__init__(line) + self.type = bit_rprimitive + self.lhs = lhs + self.rhs = rhs + self.op = op + + def sources(self) -> list[Value]: + return [self.lhs, self.rhs] + + def set_sources(self, new: list[Value]) -> None: + self.lhs, self.rhs = new + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_comparison_op(self) + + +@final +class FloatOp(RegisterOp): + """Binary float arithmetic op (e.g., r1 = r2 + r3). + + These ops are low-level and are similar to the corresponding C + operations (and somewhat different from Python operations). + + The left and right values must be floats. + """ + + error_kind = ERR_NEVER + + ADD: Final = 0 + SUB: Final = 1 + MUL: Final = 2 + DIV: Final = 3 + MOD: Final = 4 + + op_str: Final = {ADD: "+", SUB: "-", MUL: "*", DIV: "/", MOD: "%"} + + def __init__(self, lhs: Value, rhs: Value, op: int, line: int = -1) -> None: + super().__init__(line) + self.type = float_rprimitive + self.lhs = lhs + self.rhs = rhs + self.op = op + + def sources(self) -> list[Value]: + return [self.lhs, self.rhs] + + def set_sources(self, new: list[Value]) -> None: + (self.lhs, self.rhs) = new + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_float_op(self) + + +# We can't have this in the FloatOp class body, because of +# https://github.com/mypyc/mypyc/issues/932. +float_op_to_id: Final = {op: op_id for op_id, op in FloatOp.op_str.items()} + + +@final +class FloatNeg(RegisterOp): + """Float negation op (r1 = -r2).""" + + error_kind = ERR_NEVER + + def __init__(self, src: Value, line: int = -1) -> None: + super().__init__(line) + self.type = float_rprimitive + self.src = src + + def sources(self) -> list[Value]: + return [self.src] + + def set_sources(self, new: list[Value]) -> None: + (self.src,) = new + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_float_neg(self) + + +@final +class FloatComparisonOp(RegisterOp): + """Low-level comparison op for floats.""" + + error_kind = ERR_NEVER + + EQ: Final = 200 + NEQ: Final = 201 + LT: Final = 202 + GT: Final = 203 + LE: Final = 204 + GE: Final = 205 + + op_str: Final = {EQ: "==", NEQ: "!=", LT: "<", GT: ">", LE: "<=", GE: ">="} + + def __init__(self, lhs: Value, rhs: Value, op: int, line: int = -1) -> None: + super().__init__(line) + self.type = bit_rprimitive + self.lhs = lhs + self.rhs = rhs + self.op = op + + def sources(self) -> list[Value]: + return [self.lhs, self.rhs] + + def set_sources(self, new: list[Value]) -> None: + (self.lhs, self.rhs) = new + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_float_comparison_op(self) + + +# We can't have this in the FloatOp class body, because of +# https://github.com/mypyc/mypyc/issues/932. +float_comparison_op_to_id: Final = {op: op_id for op_id, op in FloatComparisonOp.op_str.items()} + + +@final +class LoadMem(RegisterOp): + """Read a memory location: result = *(type *)src. + + Attributes: + type: Type of the read value + src: Pointer to memory to read + """ + + error_kind = ERR_NEVER + + def __init__(self, type: RType, src: Value, line: int = -1, *, borrow: bool = False) -> None: + super().__init__(line) + self.type = type + # TODO: Support other native integer types + assert is_pointer_rprimitive(src.type) + self.src = src + self.is_borrowed = borrow and type.is_refcounted + + def sources(self) -> list[Value]: + return [self.src] + + def set_sources(self, new: list[Value]) -> None: + (self.src,) = new + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_load_mem(self) + + +@final +class SetMem(Op): + """Write to a memory location: *(type *)dest = src + + Attributes: + type: Type of the written value + dest: Pointer to memory to write + src: Source value + """ + + error_kind = ERR_NEVER + + def __init__(self, type: RType, dest: Value, src: Value, line: int = -1) -> None: + super().__init__(line) + self.type = void_rtype + self.dest_type = type + self.src = src + self.dest = dest + + def sources(self) -> list[Value]: + return [self.src, self.dest] + + def set_sources(self, new: list[Value]) -> None: + self.src, self.dest = new + + def stolen(self) -> list[Value]: + return [self.src] + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_set_mem(self) + + +@final +class GetElementPtr(RegisterOp): + """Get the address of a struct element. + + Note that you may need to use KeepAlive to avoid the struct + being freed, if it's reference counted, such as PyObject *. + """ + + error_kind = ERR_NEVER + + def __init__(self, src: Value, src_type: RType, field: str, line: int = -1) -> None: + super().__init__(line) + self.type = pointer_rprimitive + self.src = src + self.src_type = src_type + self.field = field + + def sources(self) -> list[Value]: + return [self.src] + + def set_sources(self, new: list[Value]) -> None: + (self.src,) = new + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_get_element_ptr(self) + + +@final +class SetElement(RegisterOp): + """Set the value of a struct element. + + This evaluates to a new struct with the changed value. + + Use together with Undef to initialize a fresh struct value + (see Undef for more details). + """ + + error_kind = ERR_NEVER + + def __init__(self, src: Value, field: str, item: Value, line: int = -1) -> None: + super().__init__(line) + assert isinstance(src.type, RStruct), src.type + self.type = src.type + self.src = src + self.item = item + self.field = field + + def sources(self) -> list[Value]: + return [self.src] + + def set_sources(self, new: list[Value]) -> None: + (self.src,) = new + + def stolen(self) -> list[Value]: + return [self.src] + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_set_element(self) + + +@final +class LoadAddress(RegisterOp): + """Get the address of a value: result = (type)&src + + Attributes: + type: Type of the loaded address(e.g. ptr/object_ptr) + src: Source value (str for globals like 'PyList_Type', + Register for temporary values or locals, LoadStatic + for statics.) + """ + + error_kind = ERR_NEVER + is_borrowed = True + + def __init__(self, type: RType, src: str | Register | LoadStatic, line: int = -1) -> None: + super().__init__(line) + self.type = type + self.src = src + + def sources(self) -> list[Value]: + if isinstance(self.src, Register): + return [self.src] + else: + return [] + + def set_sources(self, new: list[Value]) -> None: + if new: + assert isinstance(new[0], Register) + assert len(new) == 1 + self.src = new[0] + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_load_address(self) + + +@final +class KeepAlive(RegisterOp): + """A no-op operation that ensures source values aren't freed. + + This is sometimes useful to avoid decref when a reference is still + being held but not seen by the compiler. + + A typical use case is like this (C-like pseudocode): + + ptr = &x.item + r = *ptr + keep_alive x # x must not be freed here + # x may be freed here + + If we didn't have "keep_alive x", x could be freed immediately + after taking the address of 'item', resulting in a read after free + on the second line. + + If 'steal' is true, the value is considered to be stolen at + this op, i.e. it won't be decref'd. You need to ensure that + the value is freed otherwise, perhaps by using borrowing + followed by Unborrow. + + Be careful with steal=True -- this can cause memory leaks. + """ + + error_kind = ERR_NEVER + + def __init__(self, src: list[Value], *, steal: bool = False) -> None: + assert src + self.src = src + self.steal = steal + + def sources(self) -> list[Value]: + return self.src.copy() + + def stolen(self) -> list[Value]: + if self.steal: + return self.src.copy() + return [] + + def set_sources(self, new: list[Value]) -> None: + self.src = new[:] + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_keep_alive(self) + + +@final +class Unborrow(RegisterOp): + """A no-op op to create a regular reference from a borrowed one. + + Borrowed references can only be used temporarily and the reference + counts won't be managed. This value will be refcounted normally. + + This is mainly useful if you split an aggregate value, such as + a tuple, into components using borrowed values (to avoid increfs), + and want to treat the components as sharing the original managed + reference. You'll also need to use KeepAlive with steal=True to + "consume" the original tuple reference: + + # t is a 2-tuple + r0 = borrow t[0] + r1 = borrow t[1] + keep_alive steal t + r2 = unborrow r0 + r3 = unborrow r1 + # now (r2, r3) represent the tuple as separate items, that are + # managed again. (Note we need to steal before unborrow, to avoid + # refcount briefly touching zero if r2 or r3 are unused.) + + Be careful with this -- this can easily cause double freeing. + """ + + error_kind = ERR_NEVER + + def __init__(self, src: Value, line: int = -1) -> None: + super().__init__(line) + assert src.is_borrowed + self.src = src + self.type = src.type + + def sources(self) -> list[Value]: + return [self.src] + + def set_sources(self, new: list[Value]) -> None: + (self.src,) = new + + def stolen(self) -> list[Value]: + return [] + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_unborrow(self) + + +@trait +class OpVisitor(Generic[T]): + """Generic visitor over ops (uses the visitor design pattern).""" + + @abstractmethod + def visit_goto(self, op: Goto) -> T: + raise NotImplementedError + + @abstractmethod + def visit_branch(self, op: Branch) -> T: + raise NotImplementedError + + @abstractmethod + def visit_return(self, op: Return) -> T: + raise NotImplementedError + + @abstractmethod + def visit_unreachable(self, op: Unreachable) -> T: + raise NotImplementedError + + @abstractmethod + def visit_assign(self, op: Assign) -> T: + raise NotImplementedError + + @abstractmethod + def visit_assign_multi(self, op: AssignMulti) -> T: + raise NotImplementedError + + @abstractmethod + def visit_load_error_value(self, op: LoadErrorValue) -> T: + raise NotImplementedError + + @abstractmethod + def visit_load_literal(self, op: LoadLiteral) -> T: + raise NotImplementedError + + @abstractmethod + def visit_get_attr(self, op: GetAttr) -> T: + raise NotImplementedError + + @abstractmethod + def visit_set_attr(self, op: SetAttr) -> T: + raise NotImplementedError + + @abstractmethod + def visit_load_static(self, op: LoadStatic) -> T: + raise NotImplementedError + + @abstractmethod + def visit_init_static(self, op: InitStatic) -> T: + raise NotImplementedError + + @abstractmethod + def visit_tuple_get(self, op: TupleGet) -> T: + raise NotImplementedError + + @abstractmethod + def visit_tuple_set(self, op: TupleSet) -> T: + raise NotImplementedError + + def visit_inc_ref(self, op: IncRef) -> T: + raise NotImplementedError + + def visit_dec_ref(self, op: DecRef) -> T: + raise NotImplementedError + + @abstractmethod + def visit_call(self, op: Call) -> T: + raise NotImplementedError + + @abstractmethod + def visit_method_call(self, op: MethodCall) -> T: + raise NotImplementedError + + @abstractmethod + def visit_cast(self, op: Cast) -> T: + raise NotImplementedError + + @abstractmethod + def visit_box(self, op: Box) -> T: + raise NotImplementedError + + @abstractmethod + def visit_unbox(self, op: Unbox) -> T: + raise NotImplementedError + + @abstractmethod + def visit_raise_standard_error(self, op: RaiseStandardError) -> T: + raise NotImplementedError + + @abstractmethod + def visit_call_c(self, op: CallC) -> T: + raise NotImplementedError + + @abstractmethod + def visit_primitive_op(self, op: PrimitiveOp) -> T: + raise NotImplementedError + + @abstractmethod + def visit_truncate(self, op: Truncate) -> T: + raise NotImplementedError + + @abstractmethod + def visit_extend(self, op: Extend) -> T: + raise NotImplementedError + + @abstractmethod + def visit_load_global(self, op: LoadGlobal) -> T: + raise NotImplementedError + + @abstractmethod + def visit_int_op(self, op: IntOp) -> T: + raise NotImplementedError + + @abstractmethod + def visit_comparison_op(self, op: ComparisonOp) -> T: + raise NotImplementedError + + @abstractmethod + def visit_float_op(self, op: FloatOp) -> T: + raise NotImplementedError + + @abstractmethod + def visit_float_neg(self, op: FloatNeg) -> T: + raise NotImplementedError + + @abstractmethod + def visit_float_comparison_op(self, op: FloatComparisonOp) -> T: + raise NotImplementedError + + @abstractmethod + def visit_load_mem(self, op: LoadMem) -> T: + raise NotImplementedError + + @abstractmethod + def visit_set_mem(self, op: SetMem) -> T: + raise NotImplementedError + + @abstractmethod + def visit_get_element_ptr(self, op: GetElementPtr) -> T: + raise NotImplementedError + + @abstractmethod + def visit_set_element(self, op: SetElement) -> T: + raise NotImplementedError + + @abstractmethod + def visit_load_address(self, op: LoadAddress) -> T: + raise NotImplementedError + + @abstractmethod + def visit_keep_alive(self, op: KeepAlive) -> T: + raise NotImplementedError + + @abstractmethod + def visit_unborrow(self, op: Unborrow) -> T: + raise NotImplementedError + + +# TODO: Should the following definition live somewhere else? + + +# We do a three-pass deserialization scheme in order to resolve name +# references. +# 1. Create an empty ClassIR for each class in an SCC. +# 2. Deserialize all of the functions, which can contain references +# to ClassIRs in their types +# 3. Deserialize all of the classes, which contain lots of references +# to the functions they contain. (And to other classes.) +# +# Note that this approach differs from how we deserialize ASTs in mypy itself, +# where everything is deserialized in one pass then a second pass cleans up +# 'cross_refs'. We don't follow that approach here because it seems to be more +# code for not a lot of gain since it is easy in mypyc to identify all the objects +# we might need to reference. +# +# Because of these references, we need to maintain maps from class +# names to ClassIRs and func IDs to FuncIRs. +# +# These are tracked in a DeserMaps which is passed to every +# deserialization function. +# +# (Serialization and deserialization *will* be used for incremental +# compilation but so far it is not hooked up to anything.) +class DeserMaps(NamedTuple): + classes: dict[str, ClassIR] + functions: dict[str, FuncIR] diff --git a/.venv/lib/python3.12/site-packages/mypyc/ir/pprint.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/ir/pprint.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..f65f971 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/ir/pprint.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/ir/pprint.py b/.venv/lib/python3.12/site-packages/mypyc/ir/pprint.py new file mode 100644 index 0000000..efefd76 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/ir/pprint.py @@ -0,0 +1,523 @@ +"""Utilities for pretty-printing IR in a human-readable form.""" + +from __future__ import annotations + +from collections import defaultdict +from collections.abc import Sequence +from typing import Any, Final, Union + +from mypyc.common import short_name +from mypyc.ir.func_ir import FuncIR, all_values_full +from mypyc.ir.module_ir import ModuleIRs +from mypyc.ir.ops import ( + ERR_NEVER, + Assign, + AssignMulti, + BasicBlock, + Box, + Branch, + Call, + CallC, + Cast, + ComparisonOp, + ControlOp, + CString, + DecRef, + Extend, + Float, + FloatComparisonOp, + FloatNeg, + FloatOp, + GetAttr, + GetElementPtr, + Goto, + IncRef, + InitStatic, + Integer, + IntOp, + KeepAlive, + LoadAddress, + LoadErrorValue, + LoadGlobal, + LoadLiteral, + LoadMem, + LoadStatic, + MethodCall, + Op, + OpVisitor, + PrimitiveOp, + RaiseStandardError, + Register, + Return, + SetAttr, + SetElement, + SetMem, + Truncate, + TupleGet, + TupleSet, + Unborrow, + Unbox, + Undef, + Unreachable, + Value, +) +from mypyc.ir.rtypes import RType, is_bool_rprimitive, is_int_rprimitive + +ErrorSource = Union[BasicBlock, Op] + + +class IRPrettyPrintVisitor(OpVisitor[str]): + """Internal visitor that pretty-prints ops.""" + + def __init__(self, names: dict[Value, str]) -> None: + # This should contain a name for all values that are shown as + # registers in the output. This is not just for Register + # instances -- all Ops that produce values need (generated) names. + self.names = names + + def visit_goto(self, op: Goto) -> str: + return self.format("goto %l", op.label) + + branch_op_names: Final = {Branch.BOOL: ("%r", "bool"), Branch.IS_ERROR: ("is_error(%r)", "")} + + def visit_branch(self, op: Branch) -> str: + fmt, typ = self.branch_op_names[op.op] + if op.negated: + fmt = f"not {fmt}" + + cond = self.format(fmt, op.value) + tb = "" + if op.traceback_entry: + tb = " (error at %s:%d)" % op.traceback_entry + fmt = f"if {cond} goto %l{tb} else goto %l" + if typ: + fmt += f" :: {typ}" + return self.format(fmt, op.true, op.false) + + def visit_return(self, op: Return) -> str: + return self.format("return %r", op.value) + + def visit_unreachable(self, op: Unreachable) -> str: + return "unreachable" + + def visit_assign(self, op: Assign) -> str: + return self.format("%r = %r", op.dest, op.src) + + def visit_assign_multi(self, op: AssignMulti) -> str: + return self.format("%r = [%s]", op.dest, ", ".join(self.format("%r", v) for v in op.src)) + + def visit_load_error_value(self, op: LoadErrorValue) -> str: + return self.format("%r = :: %s", op, op.type) + + def visit_load_literal(self, op: LoadLiteral) -> str: + prefix = "" + # For values that have a potential unboxed representation, make + # it explicit that this is a Python object. + if isinstance(op.value, int): + prefix = "object " + + rvalue = repr(op.value) + if isinstance(op.value, frozenset): + # We need to generate a string representation that won't vary + # run-to-run because sets are unordered, otherwise we may get + # spurious irbuild test failures. + # + # Sorting by the item's string representation is a bit of a + # hack, but it's stable and won't cause TypeErrors. + formatted_items = [repr(i) for i in sorted(op.value, key=str)] + rvalue = "frozenset({" + ", ".join(formatted_items) + "})" + return self.format("%r = %s%s", op, prefix, rvalue) + + def visit_get_attr(self, op: GetAttr) -> str: + return self.format("%r = %s%r.%s", op, self.borrow_prefix(op), op.obj, op.attr) + + def borrow_prefix(self, op: Op) -> str: + if op.is_borrowed: + return "borrow " + return "" + + def visit_set_attr(self, op: SetAttr) -> str: + if op.is_init: + assert op.error_kind == ERR_NEVER + if op.error_kind == ERR_NEVER: + # Initialization and direct struct access can never fail + return self.format("%r.%s = %r", op.obj, op.attr, op.src) + else: + return self.format("%r.%s = %r; %r = is_error", op.obj, op.attr, op.src, op) + + def visit_load_static(self, op: LoadStatic) -> str: + ann = f" ({repr(op.ann)})" if op.ann else "" + name = op.identifier + if op.module_name is not None: + name = f"{op.module_name}.{name}" + return self.format("%r = %s :: %s%s", op, name, op.namespace, ann) + + def visit_init_static(self, op: InitStatic) -> str: + name = op.identifier + if op.module_name is not None: + name = f"{op.module_name}.{name}" + return self.format("%s = %r :: %s", name, op.value, op.namespace) + + def visit_tuple_get(self, op: TupleGet) -> str: + return self.format("%r = %s%r[%d]", op, self.borrow_prefix(op), op.src, op.index) + + def visit_tuple_set(self, op: TupleSet) -> str: + item_str = ", ".join(self.format("%r", item) for item in op.items) + return self.format("%r = (%s)", op, item_str) + + def visit_inc_ref(self, op: IncRef) -> str: + s = self.format("inc_ref %r", op.src) + # TODO: Remove bool check (it's unboxed) + if is_bool_rprimitive(op.src.type) or is_int_rprimitive(op.src.type): + s += f" :: {short_name(op.src.type.name)}" + return s + + def visit_dec_ref(self, op: DecRef) -> str: + s = self.format("%sdec_ref %r", "x" if op.is_xdec else "", op.src) + # TODO: Remove bool check (it's unboxed) + if is_bool_rprimitive(op.src.type) or is_int_rprimitive(op.src.type): + s += f" :: {short_name(op.src.type.name)}" + return s + + def visit_call(self, op: Call) -> str: + args = ", ".join(self.format("%r", arg) for arg in op.args) + # TODO: Display long name? + short_name = op.fn.shortname + s = f"{short_name}({args})" + if not op.is_void: + s = self.format("%r = ", op) + s + return s + + def visit_method_call(self, op: MethodCall) -> str: + args = ", ".join(self.format("%r", arg) for arg in op.args) + s = self.format("%r.%s(%s)", op.obj, op.method, args) + if not op.is_void: + s = self.format("%r = ", op) + s + return s + + def visit_cast(self, op: Cast) -> str: + if op.is_unchecked: + prefix = "unchecked " + else: + prefix = "" + return self.format( + "%r = %s%scast(%s, %r)", op, prefix, self.borrow_prefix(op), op.type, op.src + ) + + def visit_box(self, op: Box) -> str: + return self.format("%r = box(%s, %r)", op, op.src.type, op.src) + + def visit_unbox(self, op: Unbox) -> str: + return self.format("%r = unbox(%s, %r)", op, op.type, op.src) + + def visit_raise_standard_error(self, op: RaiseStandardError) -> str: + if op.value is not None: + if isinstance(op.value, str): + return self.format("%r = raise %s(%s)", op, op.class_name, repr(op.value)) + elif isinstance(op.value, Value): + return self.format("%r = raise %s(%r)", op, op.class_name, op.value) + else: + assert False, "value type must be either str or Value" + else: + return self.format("%r = raise %s", op, op.class_name) + + def visit_call_c(self, op: CallC) -> str: + args_str = ", ".join(self.format("%r", arg) for arg in op.args) + if op.is_void: + return self.format("%s(%s)", op.function_name, args_str) + else: + return self.format("%r = %s(%s)", op, op.function_name, args_str) + + def visit_primitive_op(self, op: PrimitiveOp) -> str: + args_str = ", ".join(self.format("%r", arg) for arg in op.args) + if op.is_void: + return self.format("%s %s", op.desc.name, args_str) + else: + return self.format("%r = %s %s", op, op.desc.name, args_str) + + def visit_truncate(self, op: Truncate) -> str: + return self.format("%r = truncate %r: %t to %t", op, op.src, op.src_type, op.type) + + def visit_extend(self, op: Extend) -> str: + if op.signed: + extra = " signed" + else: + extra = "" + return self.format("%r = extend%s %r: %t to %t", op, extra, op.src, op.src_type, op.type) + + def visit_load_global(self, op: LoadGlobal) -> str: + ann = f" ({repr(op.ann)})" if op.ann else "" + return self.format("%r = load_global %s :: static%s", op, op.identifier, ann) + + def visit_int_op(self, op: IntOp) -> str: + return self.format("%r = %r %s %r", op, op.lhs, IntOp.op_str[op.op], op.rhs) + + def visit_comparison_op(self, op: ComparisonOp) -> str: + if op.op in (ComparisonOp.SLT, ComparisonOp.SGT, ComparisonOp.SLE, ComparisonOp.SGE): + sign_format = " :: signed" + elif op.op in (ComparisonOp.ULT, ComparisonOp.UGT, ComparisonOp.ULE, ComparisonOp.UGE): + sign_format = " :: unsigned" + else: + sign_format = "" + return self.format( + "%r = %r %s %r%s", op, op.lhs, ComparisonOp.op_str[op.op], op.rhs, sign_format + ) + + def visit_float_op(self, op: FloatOp) -> str: + return self.format("%r = %r %s %r", op, op.lhs, FloatOp.op_str[op.op], op.rhs) + + def visit_float_neg(self, op: FloatNeg) -> str: + return self.format("%r = -%r", op, op.src) + + def visit_float_comparison_op(self, op: FloatComparisonOp) -> str: + return self.format("%r = %r %s %r", op, op.lhs, op.op_str[op.op], op.rhs) + + def visit_load_mem(self, op: LoadMem) -> str: + return self.format( + "%r = %sload_mem %r :: %t*", op, self.borrow_prefix(op), op.src, op.type + ) + + def visit_set_mem(self, op: SetMem) -> str: + return self.format("set_mem %r, %r :: %t*", op.dest, op.src, op.dest_type) + + def visit_get_element_ptr(self, op: GetElementPtr) -> str: + return self.format("%r = get_element_ptr %r %s :: %t", op, op.src, op.field, op.src_type) + + def visit_set_element(self, op: SetElement) -> str: + return self.format("%r = set_element %r, %s, %r", op, op.src, op.field, op.item) + + def visit_load_address(self, op: LoadAddress) -> str: + if isinstance(op.src, Register): + return self.format("%r = load_address %r", op, op.src) + elif isinstance(op.src, LoadStatic): + name = op.src.identifier + if op.src.module_name is not None: + name = f"{op.src.module_name}.{name}" + return self.format("%r = load_address %s :: %s", op, name, op.src.namespace) + else: + return self.format("%r = load_address %s", op, op.src) + + def visit_keep_alive(self, op: KeepAlive) -> str: + if op.steal: + steal = "steal " + else: + steal = "" + return self.format( + "keep_alive {}{}".format(steal, ", ".join(self.format("%r", v) for v in op.src)) + ) + + def visit_unborrow(self, op: Unborrow) -> str: + return self.format("%r = unborrow %r", op, op.src) + + # Helpers + + def format(self, fmt: str, *args: Any) -> str: + """Helper for formatting strings. + + These format sequences are supported in fmt: + + %s: arbitrary object converted to string using str() + %r: name of IR value/register + %d: int + %f: float + %l: BasicBlock (formatted as label 'Ln') + %t: RType + """ + result = [] + i = 0 + arglist = list(args) + while i < len(fmt): + n = fmt.find("%", i) + if n < 0: + n = len(fmt) + result.append(fmt[i:n]) + if n < len(fmt): + typespec = fmt[n + 1] + arg = arglist.pop(0) + if typespec == "r": + # Register/value + assert isinstance(arg, Value) + if isinstance(arg, Integer): + result.append(str(arg.value)) + elif isinstance(arg, Float): + result.append(repr(arg.value)) + elif isinstance(arg, CString): + result.append(f"CString({arg.value!r})") + elif isinstance(arg, Undef): + result.append(f"undef {arg.type.name}") + else: + result.append(self.names[arg]) + elif typespec == "d": + # Integer + result.append("%d" % arg) + elif typespec == "f": + # Float + result.append("%f" % arg) + elif typespec == "l": + # Basic block (label) + assert isinstance(arg, BasicBlock) + result.append("L%s" % arg.label) + elif typespec == "t": + # RType + assert isinstance(arg, RType) + result.append(arg.name) + elif typespec == "s": + # String + result.append(str(arg)) + else: + raise ValueError(f"Invalid format sequence %{typespec}") + i = n + 2 + else: + i = n + return "".join(result) + + +def format_registers(func_ir: FuncIR, names: dict[Value, str]) -> list[str]: + result = [] + i = 0 + regs = all_values_full(func_ir.arg_regs, func_ir.blocks) + while i < len(regs): + i0 = i + group = [names[regs[i0]]] + while i + 1 < len(regs) and regs[i + 1].type == regs[i0].type: + i += 1 + group.append(names[regs[i]]) + i += 1 + result.append("{} :: {}".format(", ".join(group), regs[i0].type)) + return result + + +def format_blocks( + blocks: list[BasicBlock], + names: dict[Value, str], + source_to_error: dict[ErrorSource, list[str]], +) -> list[str]: + """Format a list of IR basic blocks into a human-readable form.""" + # First label all of the blocks + for i, block in enumerate(blocks): + block.label = i + + handler_map: dict[BasicBlock, list[BasicBlock]] = {} + for b in blocks: + if b.error_handler: + handler_map.setdefault(b.error_handler, []).append(b) + + visitor = IRPrettyPrintVisitor(names) + + lines = [] + for i, block in enumerate(blocks): + handler_msg = "" + if block in handler_map: + labels = sorted("L%d" % b.label for b in handler_map[block]) + handler_msg = " (handler for {})".format(", ".join(labels)) + + lines.append("L%d:%s" % (block.label, handler_msg)) + if block in source_to_error: + for error in source_to_error[block]: + lines.append(f" ERR: {error}") + ops = block.ops + if ( + isinstance(ops[-1], Goto) + and i + 1 < len(blocks) + and ops[-1].label == blocks[i + 1] + and not source_to_error.get(ops[-1], []) + ): + # Hide the last goto if it just goes to the next basic block, + # and there are no assocatiated errors with the op. + ops = ops[:-1] + for op in ops: + line = " " + op.accept(visitor) + lines.append(line) + if op in source_to_error: + for error in source_to_error[op]: + lines.append(f" ERR: {error}") + + if not isinstance(block.ops[-1], (Goto, Branch, Return, Unreachable)): + # Each basic block needs to exit somewhere. + lines.append(" [MISSING BLOCK EXIT OPCODE]") + return lines + + +def format_func(fn: FuncIR, errors: Sequence[tuple[ErrorSource, str]] = ()) -> list[str]: + lines = [] + cls_prefix = fn.class_name + "." if fn.class_name else "" + lines.append( + "def {}{}({}):".format(cls_prefix, fn.name, ", ".join(arg.name for arg in fn.args)) + ) + names = generate_names_for_ir(fn.arg_regs, fn.blocks) + for line in format_registers(fn, names): + lines.append(" " + line) + + source_to_error = defaultdict(list) + for source, error in errors: + source_to_error[source].append(error) + + code = format_blocks(fn.blocks, names, source_to_error) + lines.extend(code) + return lines + + +def format_modules(modules: ModuleIRs) -> list[str]: + ops = [] + for module in modules.values(): + for fn in module.functions: + ops.extend(format_func(fn)) + ops.append("") + return ops + + +def generate_names_for_ir(args: list[Register], blocks: list[BasicBlock]) -> dict[Value, str]: + """Generate unique names for IR values. + + Give names such as 'r5' to temp values in IR which are useful when + pretty-printing or generating C. Ensure generated names are unique. + """ + names: dict[Value, str] = {} + used_names = set() + + temp_index = 0 + + for arg in args: + names[arg] = arg.name + used_names.add(arg.name) + + for block in blocks: + for op in block.ops: + values = [] + + for source in op.sources(): + if source not in names: + values.append(source) + + if isinstance(op, (Assign, AssignMulti)): + values.append(op.dest) + elif isinstance(op, ControlOp) or op.is_void: + continue + elif op not in names: + values.append(op) + + for value in values: + if value in names: + continue + if isinstance(value, Register) and value.name: + name = value.name + elif isinstance(value, (Integer, Float, Undef)): + continue + else: + name = "r%d" % temp_index + temp_index += 1 + + # Append _2, _3, ... if needed to make the name unique. + if name in used_names: + n = 2 + while True: + candidate = "%s_%d" % (name, n) + if candidate not in used_names: + name = candidate + break + n += 1 + + names[value] = name + used_names.add(name) + + return names diff --git a/.venv/lib/python3.12/site-packages/mypyc/ir/rtypes.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/ir/rtypes.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..44ca491 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/ir/rtypes.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/ir/rtypes.py b/.venv/lib/python3.12/site-packages/mypyc/ir/rtypes.py new file mode 100644 index 0000000..66b98e5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/ir/rtypes.py @@ -0,0 +1,1148 @@ +"""Types used in the intermediate representation. + +These are runtime types (RTypes), as opposed to mypy Type objects. +The latter are only used during type checking and not directly used at +runtime. Runtime types are derived from mypy types, but there's no +simple one-to-one correspondence. (Here 'runtime' means 'runtime +checked'.) + +The generated IR ensures some runtime type safety properties based on +RTypes. Compiled code can assume that the runtime value matches the +static RType of a value. If the RType of a register is 'builtins.str' +(str_rprimitive), for example, the generated IR will ensure that the +register will have a 'str' object. + +RTypes are simpler and less expressive than mypy (or PEP 484) +types. For example, all mypy types of form 'list[T]' (for arbitrary T) +are erased to the single RType 'builtins.list' (list_rprimitive). + +mypyc.irbuild.mapper.Mapper.type_to_rtype converts mypy Types to mypyc +RTypes. + +NOTE: As a convention, we don't create subclasses of concrete RType + subclasses (e.g. you shouldn't define a subclass of RTuple, which + is a concrete class). We prefer a flat class hierarchy. + + If you want to introduce a variant of an existing class, you'd + typically add an attribute (e.g. a flag) to an existing concrete + class to enable the new behavior. In rare cases, adding a new + abstract base class could also be an option. Adding a completely + separate class and sharing some functionality using module-level + helper functions may also be reasonable. + + This makes it possible to use isinstance(x, ) checks without worrying about potential subclasses + and avoids most trouble caused by implementation inheritance. +""" + +from __future__ import annotations + +from abc import abstractmethod +from typing import TYPE_CHECKING, ClassVar, Final, Generic, TypeVar, final +from typing_extensions import TypeGuard + +from mypyc.common import HAVE_IMMORTAL, IS_32_BIT_PLATFORM, PLATFORM_SIZE, JsonDict, short_name +from mypyc.namegen import NameGenerator + +if TYPE_CHECKING: + from mypyc.ir.class_ir import ClassIR + from mypyc.ir.ops import DeserMaps + +T = TypeVar("T") + + +class RType: + """Abstract base class for runtime types (erased, only concrete; no generics).""" + + name: str + # If True, the type has a special unboxed representation. If False, the + # type is represented as PyObject *. Even if True, the representation + # may contain pointers. + is_unboxed = False + # This is the C undefined value for this type. It's used for initialization + # if there's no value yet, and for function return value on error/exception. + # + # TODO: This shouldn't be specific to C or a string + c_undefined: str + # If unboxed: does the unboxed version use reference counting? + is_refcounted = True + # C type; use Emitter.ctype() to access + _ctype: str + # If True, error/undefined value overlaps with a valid value. To + # detect an exception, PyErr_Occurred() must be used in addition + # to checking for error value as the return value of a function. + # + # For example, no i64 value can be reserved for error value, so we + # pick an arbitrary value (-113) to signal error, but this is + # also a valid non-error value. The chosen value is rare as a + # normal, non-error value, so most of the time we can avoid calling + # PyErr_Occurred() when checking for errors raised by called + # functions. + # + # This also means that if an attribute with this type might be + # undefined, we can't just rely on the error value to signal this. + # Instead, we add a bitfield to keep track whether attributes with + # "error overlap" have a value. If there is no value, AttributeError + # is raised on attribute read. Parameters with default values also + # use the bitfield trick to indicate whether the caller passed a + # value. (If we can determine that an attribute is "always defined", + # we never raise an AttributeError and don't need the bitfield + # entry.) + error_overlap = False + + @abstractmethod + def accept(self, visitor: RTypeVisitor[T]) -> T: + raise NotImplementedError() + + def short_name(self) -> str: + return short_name(self.name) + + @property + @abstractmethod + def may_be_immortal(self) -> bool: + raise NotImplementedError + + def __str__(self) -> str: + return short_name(self.name) + + def __repr__(self) -> str: + return "<%s>" % self.__class__.__name__ + + def serialize(self) -> JsonDict | str: + raise NotImplementedError(f"Cannot serialize {self.__class__.__name__} instance") + + +def deserialize_type(data: JsonDict | str, ctx: DeserMaps) -> RType: + """Deserialize a JSON-serialized RType. + + Arguments: + data: The decoded JSON of the serialized type + ctx: The deserialization maps to use + """ + # Since there are so few types, we just case on them directly. If + # more get added we should switch to a system like mypy.types + # uses. + if isinstance(data, str): + if data in ctx.classes: + return RInstance(ctx.classes[data]) + elif data in RPrimitive.primitive_map: + return RPrimitive.primitive_map[data] + elif data == "void": + return RVoid() + else: + assert False, f"Can't find class {data}" + elif data[".class"] == "RTuple": + return RTuple.deserialize(data, ctx) + elif data[".class"] == "RUnion": + return RUnion.deserialize(data, ctx) + raise NotImplementedError("unexpected .class {}".format(data[".class"])) + + +class RTypeVisitor(Generic[T]): + """Generic visitor over RTypes (uses the visitor design pattern).""" + + @abstractmethod + def visit_rprimitive(self, typ: RPrimitive, /) -> T: + raise NotImplementedError + + @abstractmethod + def visit_rinstance(self, typ: RInstance, /) -> T: + raise NotImplementedError + + @abstractmethod + def visit_runion(self, typ: RUnion, /) -> T: + raise NotImplementedError + + @abstractmethod + def visit_rtuple(self, typ: RTuple, /) -> T: + raise NotImplementedError + + @abstractmethod + def visit_rstruct(self, typ: RStruct, /) -> T: + raise NotImplementedError + + @abstractmethod + def visit_rarray(self, typ: RArray, /) -> T: + raise NotImplementedError + + @abstractmethod + def visit_rvoid(self, typ: RVoid, /) -> T: + raise NotImplementedError + + +@final +class RVoid(RType): + """The void type (no value). + + This is a singleton -- use void_rtype (below) to refer to this instead of + constructing a new instance. + """ + + is_unboxed = False + name = "void" + ctype = "void" + + def accept(self, visitor: RTypeVisitor[T]) -> T: + return visitor.visit_rvoid(self) + + @property + def may_be_immortal(self) -> bool: + return False + + def serialize(self) -> str: + return "void" + + def __eq__(self, other: object) -> TypeGuard[RVoid]: + return isinstance(other, RVoid) + + def __hash__(self) -> int: + return hash(RVoid) + + +# Singleton instance of RVoid +void_rtype: Final = RVoid() + + +@final +class RPrimitive(RType): + """Primitive type such as 'object' or 'int'. + + These often have custom ops associated with them. The 'object' + primitive type can be used to hold arbitrary Python objects. + + Different primitive types have different representations, and + primitives may be unboxed or boxed. Primitive types don't need to + directly correspond to Python types, but most do. + + NOTE: All supported primitive types are defined below + (e.g. object_rprimitive). + """ + + # Map from primitive names to primitive types and is used by deserialization + primitive_map: ClassVar[dict[str, RPrimitive]] = {} + + def __init__( + self, + name: str, + *, + is_unboxed: bool, + is_refcounted: bool, + is_native_int: bool = False, + is_signed: bool = False, + ctype: str = "PyObject *", + size: int = PLATFORM_SIZE, + error_overlap: bool = False, + may_be_immortal: bool = True, + ) -> None: + RPrimitive.primitive_map[name] = self + + self.name = name + self.is_unboxed = is_unboxed + self.is_refcounted = is_refcounted + self.is_native_int = is_native_int + self.is_signed = is_signed + self._ctype = ctype + self.size = size + self.error_overlap = error_overlap + self._may_be_immortal = may_be_immortal and HAVE_IMMORTAL + if ctype == "CPyTagged": + self.c_undefined = "CPY_INT_TAG" + elif ctype in ("int16_t", "int32_t", "int64_t"): + # This is basically an arbitrary value that is pretty + # unlikely to overlap with a real value. + self.c_undefined = "-113" + elif ctype == "CPyPtr": + # TODO: Invent an overlapping error value? + self.c_undefined = "0" + elif ctype.endswith("*"): + # Boxed and pointer types use the null pointer as the error value. + self.c_undefined = "NULL" + elif ctype == "char": + self.c_undefined = "2" + elif ctype == "double": + self.c_undefined = "-113.0" + elif ctype in ("uint8_t", "uint16_t", "uint32_t", "uint64_t"): + self.c_undefined = "239" # An arbitrary number + else: + assert False, "Unrecognized ctype: %r" % ctype + + def accept(self, visitor: RTypeVisitor[T]) -> T: + return visitor.visit_rprimitive(self) + + @property + def may_be_immortal(self) -> bool: + return self._may_be_immortal + + def serialize(self) -> str: + return self.name + + def __repr__(self) -> str: + return "" % self.name + + def __eq__(self, other: object) -> TypeGuard[RPrimitive]: + return isinstance(other, RPrimitive) and other.name == self.name + + def __hash__(self) -> int: + return hash(self.name) + + +# NOTE: All the supported instances of RPrimitive are defined +# below. Use these instead of creating new instances. + +# Used to represent arbitrary objects and dynamically typed (Any) +# values. There are various ops that let you perform generic, runtime +# checked operations on these (that match Python semantics). See the +# ops in mypyc.primitives.misc_ops, including py_getattr_op, +# py_call_op, and many others. +# +# If there is no more specific RType available for some value, we fall +# back to using this type. +# +# NOTE: Even though this is very flexible, this type should be used as +# little as possible, as generic ops are typically slow. Other types, +# including other primitive types and RInstance, are usually much +# faster. +object_rprimitive: Final = RPrimitive("builtins.object", is_unboxed=False, is_refcounted=True) + +# represents a low level pointer of an object +object_pointer_rprimitive: Final = RPrimitive( + "object_ptr", is_unboxed=False, is_refcounted=False, ctype="PyObject **" +) + +# Arbitrary-precision integer (corresponds to Python 'int'). Small +# enough values are stored unboxed, while large integers are +# represented as a tagged pointer to a Python 'int' PyObject. The +# lowest bit is used as the tag to decide whether it is a signed +# unboxed value (shifted left by one) or a PyObject * pointing to an +# 'int' object. Pointers have the least significant bit set. +# +# The undefined/error value is the null pointer (1 -- only the least +# significant bit is set)). +# +# This cannot represent a subclass of int. An instance of a subclass +# of int is coerced to the corresponding 'int' value. +int_rprimitive: Final = RPrimitive( + "builtins.int", is_unboxed=True, is_refcounted=True, ctype="CPyTagged" +) + +# An unboxed integer. The representation is the same as for unboxed +# int_rprimitive (shifted left by one). These can be used when an +# integer is known to be small enough to fit size_t (CPyTagged). +short_int_rprimitive: Final = RPrimitive( + "short_int", is_unboxed=True, is_refcounted=False, ctype="CPyTagged" +) + +# Low level integer types (correspond to C integer types) + +int16_rprimitive: Final = RPrimitive( + "i16", + is_unboxed=True, + is_refcounted=False, + is_native_int=True, + is_signed=True, + ctype="int16_t", + size=2, + error_overlap=True, +) +int32_rprimitive: Final = RPrimitive( + "i32", + is_unboxed=True, + is_refcounted=False, + is_native_int=True, + is_signed=True, + ctype="int32_t", + size=4, + error_overlap=True, +) +int64_rprimitive: Final = RPrimitive( + "i64", + is_unboxed=True, + is_refcounted=False, + is_native_int=True, + is_signed=True, + ctype="int64_t", + size=8, + error_overlap=True, +) +uint8_rprimitive: Final = RPrimitive( + "u8", + is_unboxed=True, + is_refcounted=False, + is_native_int=True, + is_signed=False, + ctype="uint8_t", + size=1, + error_overlap=True, +) + +# The following unsigned native int types (u16, u32, u64) are not +# exposed to the user. They are for internal use within mypyc only. + +u16_rprimitive: Final = RPrimitive( + "u16", + is_unboxed=True, + is_refcounted=False, + is_native_int=True, + is_signed=False, + ctype="uint16_t", + size=2, + error_overlap=True, +) +uint32_rprimitive: Final = RPrimitive( + "u32", + is_unboxed=True, + is_refcounted=False, + is_native_int=True, + is_signed=False, + ctype="uint32_t", + size=4, + error_overlap=True, +) +uint64_rprimitive: Final = RPrimitive( + "u64", + is_unboxed=True, + is_refcounted=False, + is_native_int=True, + is_signed=False, + ctype="uint64_t", + size=8, + error_overlap=True, +) + +# The C 'int' type +c_int_rprimitive = int32_rprimitive + +if IS_32_BIT_PLATFORM: + c_size_t_rprimitive = uint32_rprimitive + c_pyssize_t_rprimitive = RPrimitive( + "native_int", + is_unboxed=True, + is_refcounted=False, + is_native_int=True, + is_signed=True, + ctype="int32_t", + size=4, + ) +else: + c_size_t_rprimitive = uint64_rprimitive + c_pyssize_t_rprimitive = RPrimitive( + "native_int", + is_unboxed=True, + is_refcounted=False, + is_native_int=True, + is_signed=True, + ctype="int64_t", + size=8, + ) + +# Untyped pointer, represented as integer in the C backend +pointer_rprimitive: Final = RPrimitive("ptr", is_unboxed=True, is_refcounted=False, ctype="CPyPtr") + +# Untyped pointer, represented as void * in the C backend +c_pointer_rprimitive: Final = RPrimitive( + "c_ptr", is_unboxed=False, is_refcounted=False, ctype="void *" +) + +cstring_rprimitive: Final = RPrimitive( + "cstring", is_unboxed=True, is_refcounted=False, ctype="const char *" +) + +# The type corresponding to mypyc.common.BITMAP_TYPE +bitmap_rprimitive: Final = uint32_rprimitive + +# Floats are represent as 'float' PyObject * values. (In the future +# we'll likely switch to a more efficient, unboxed representation.) +float_rprimitive: Final = RPrimitive( + "builtins.float", + is_unboxed=True, + is_refcounted=False, + ctype="double", + size=8, + error_overlap=True, +) + +# An unboxed Python bool value. This actually has three possible values +# (0 -> False, 1 -> True, 2 -> error). If you only need True/False, use +# bit_rprimitive instead. +bool_rprimitive: Final = RPrimitive( + "builtins.bool", is_unboxed=True, is_refcounted=False, ctype="char", size=1 +) + +# A low-level boolean value with two possible values: 0 and 1. Any +# other value results in undefined behavior. Undefined or error values +# are not supported. +bit_rprimitive: Final = RPrimitive( + "bit", is_unboxed=True, is_refcounted=False, ctype="char", size=1 +) + +# The 'None' value. The possible values are 0 -> None and 2 -> error. +none_rprimitive: Final = RPrimitive( + "builtins.None", is_unboxed=True, is_refcounted=False, ctype="char", size=1 +) + +# Python list object (or an instance of a subclass of list). These could be +# immortal, but since this is expected to be very rare, and the immortality checks +# can be pretty expensive for lists, we treat lists as non-immortal. +list_rprimitive: Final = RPrimitive( + "builtins.list", is_unboxed=False, is_refcounted=True, may_be_immortal=False +) + +# Python dict object (or an instance of a subclass of dict). +dict_rprimitive: Final = RPrimitive("builtins.dict", is_unboxed=False, is_refcounted=True) + +# Python set object (or an instance of a subclass of set). +set_rprimitive: Final = RPrimitive("builtins.set", is_unboxed=False, is_refcounted=True) + +# Python frozenset object (or an instance of a subclass of frozenset). +frozenset_rprimitive: Final = RPrimitive( + "builtins.frozenset", is_unboxed=False, is_refcounted=True +) + +# Python str object. At the C layer, str is referred to as unicode +# (PyUnicode). +str_rprimitive: Final = RPrimitive("builtins.str", is_unboxed=False, is_refcounted=True) + +# Python bytes object. +bytes_rprimitive: Final = RPrimitive("builtins.bytes", is_unboxed=False, is_refcounted=True) + +# Tuple of an arbitrary length (corresponds to Tuple[t, ...], with +# explicit '...'). +tuple_rprimitive: Final = RPrimitive("builtins.tuple", is_unboxed=False, is_refcounted=True) + +# Python range object. +range_rprimitive: Final = RPrimitive("builtins.range", is_unboxed=False, is_refcounted=True) + +KNOWN_NATIVE_TYPES: Final = { + name: RPrimitive(name, is_unboxed=False, is_refcounted=True) + for name in ["librt.internal.WriteBuffer", "librt.internal.ReadBuffer"] +} + + +def is_native_rprimitive(rtype: RType) -> bool: + return isinstance(rtype, RPrimitive) and rtype.name in KNOWN_NATIVE_TYPES + + +def is_tagged(rtype: RType) -> TypeGuard[RPrimitive]: + return rtype is int_rprimitive or rtype is short_int_rprimitive + + +def is_int_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return rtype is int_rprimitive + + +def is_short_int_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return rtype is short_int_rprimitive + + +def is_int16_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return rtype is int16_rprimitive + + +def is_int32_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return rtype is int32_rprimitive or ( + rtype is c_pyssize_t_rprimitive and rtype._ctype == "int32_t" + ) + + +def is_int64_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return rtype is int64_rprimitive or ( + rtype is c_pyssize_t_rprimitive and rtype._ctype == "int64_t" + ) + + +def is_fixed_width_rtype(rtype: RType) -> TypeGuard[RPrimitive]: + return ( + is_int64_rprimitive(rtype) + or is_int32_rprimitive(rtype) + or is_int16_rprimitive(rtype) + or is_uint8_rprimitive(rtype) + ) + + +def is_uint8_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return rtype is uint8_rprimitive + + +def is_uint32_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return rtype is uint32_rprimitive + + +def is_uint64_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return rtype is uint64_rprimitive + + +def is_c_py_ssize_t_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return rtype is c_pyssize_t_rprimitive + + +def is_pointer_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return rtype is pointer_rprimitive + + +def is_float_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return isinstance(rtype, RPrimitive) and rtype.name == "builtins.float" + + +def is_bool_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return isinstance(rtype, RPrimitive) and rtype.name == "builtins.bool" + + +def is_bit_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return isinstance(rtype, RPrimitive) and rtype.name == "bit" + + +def is_bool_or_bit_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return is_bool_rprimitive(rtype) or is_bit_rprimitive(rtype) + + +def is_object_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return isinstance(rtype, RPrimitive) and rtype.name == "builtins.object" + + +def is_none_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return isinstance(rtype, RPrimitive) and rtype.name == "builtins.None" + + +def is_list_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return isinstance(rtype, RPrimitive) and rtype.name == "builtins.list" + + +def is_dict_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return isinstance(rtype, RPrimitive) and rtype.name == "builtins.dict" + + +def is_set_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return isinstance(rtype, RPrimitive) and rtype.name == "builtins.set" + + +def is_frozenset_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return isinstance(rtype, RPrimitive) and rtype.name == "builtins.frozenset" + + +def is_str_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return isinstance(rtype, RPrimitive) and rtype.name == "builtins.str" + + +def is_bytes_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return isinstance(rtype, RPrimitive) and rtype.name == "builtins.bytes" + + +def is_tuple_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return isinstance(rtype, RPrimitive) and rtype.name == "builtins.tuple" + + +def is_range_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return isinstance(rtype, RPrimitive) and rtype.name == "builtins.range" + + +def is_sequence_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return isinstance(rtype, RPrimitive) and ( + is_list_rprimitive(rtype) + or is_tuple_rprimitive(rtype) + or is_str_rprimitive(rtype) + or is_bytes_rprimitive(rtype) + ) + + +def is_immutable_rprimitive(rtype: RType) -> TypeGuard[RPrimitive]: + return ( + is_str_rprimitive(rtype) + or is_bytes_rprimitive(rtype) + or is_tuple_rprimitive(rtype) + or is_frozenset_rprimitive(rtype) + ) + + +class TupleNameVisitor(RTypeVisitor[str]): + """Produce a tuple name based on the concrete representations of types.""" + + def visit_rinstance(self, t: RInstance) -> str: + return "O" + + def visit_runion(self, t: RUnion) -> str: + return "O" + + def visit_rprimitive(self, t: RPrimitive) -> str: + if t._ctype == "CPyTagged": + return "I" + elif t._ctype == "char": + return "C" + elif t._ctype == "int64_t": + return "8" # "8 byte integer" + elif t._ctype == "int32_t": + return "4" # "4 byte integer" + elif t._ctype == "int16_t": + return "2" # "2 byte integer" + elif t._ctype == "uint8_t": + return "U1" # "1 byte unsigned integer" + elif t._ctype == "double": + return "F" + assert not t.is_unboxed, f"{t} unexpected unboxed type" + return "O" + + def visit_rtuple(self, t: RTuple) -> str: + parts = [elem.accept(self) for elem in t.types] + return "T{}{}".format(len(parts), "".join(parts)) + + def visit_rstruct(self, t: RStruct) -> str: + assert False, "RStruct not supported in tuple" + + def visit_rarray(self, t: RArray) -> str: + assert False, "RArray not supported in tuple" + + def visit_rvoid(self, t: RVoid) -> str: + assert False, "rvoid in tuple?" + + +@final +class RTuple(RType): + """Fixed-length unboxed tuple (represented as a C struct). + + These are used to represent mypy TupleType values (fixed-length + Python tuples). Since this is unboxed, the identity of a tuple + object is not preserved within compiled code. If the identity of a + tuple is important, or there is a need to have multiple references + to a single tuple object, a variable-length tuple should be used + (tuple_rprimitive or Tuple[T, ...] with explicit '...'), as they + are boxed. + + These aren't immutable. However, user code won't be able to mutate + individual tuple items. + """ + + is_unboxed = True + + def __init__(self, types: list[RType]) -> None: + self.name = "tuple" + self.types = tuple(types) + self.is_refcounted = any(t.is_refcounted for t in self.types) + # Generate a unique id which is used in naming corresponding C identifiers. + # This is necessary since C does not have anonymous structural type equivalence + # in the same way python can just assign a Tuple[int, bool] to a Tuple[int, bool]. + self.unique_id = self.accept(TupleNameVisitor()) + # Nominally the max c length is 31 chars, but I'm not honestly worried about this. + self.struct_name = f"tuple_{self.unique_id}" + self._ctype = f"{self.struct_name}" + self.error_overlap = all(t.error_overlap for t in self.types) and bool(self.types) + + def accept(self, visitor: RTypeVisitor[T]) -> T: + return visitor.visit_rtuple(self) + + @property + def may_be_immortal(self) -> bool: + return False + + def __str__(self) -> str: + return "tuple[%s]" % ", ".join(str(typ) for typ in self.types) + + def __repr__(self) -> str: + return "" % ", ".join(repr(typ) for typ in self.types) + + def __eq__(self, other: object) -> TypeGuard[RTuple]: + return isinstance(other, RTuple) and self.types == other.types + + def __hash__(self) -> int: + return hash((self.name, self.types)) + + def serialize(self) -> JsonDict: + types = [x.serialize() for x in self.types] + return {".class": "RTuple", "types": types} + + @classmethod + def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> RTuple: + types = [deserialize_type(t, ctx) for t in data["types"]] + return RTuple(types) + + +# Exception tuple: (exception class, exception instance, traceback object) +exc_rtuple = RTuple([object_rprimitive, object_rprimitive, object_rprimitive]) + +# Dictionary iterator tuple: (should continue, internal offset, key, value) +# See mypyc.irbuild.for_helpers.ForDictionaryCommon for more details. +dict_next_rtuple_pair = RTuple( + [bool_rprimitive, short_int_rprimitive, object_rprimitive, object_rprimitive] +) +# Same as above but just for key or value. +dict_next_rtuple_single = RTuple([bool_rprimitive, short_int_rprimitive, object_rprimitive]) + + +def compute_rtype_alignment(typ: RType) -> int: + """Compute alignment of a given type based on platform alignment rule""" + platform_alignment = PLATFORM_SIZE + if isinstance(typ, RPrimitive): + return typ.size + elif isinstance(typ, RInstance): + return platform_alignment + elif isinstance(typ, RUnion): + return platform_alignment + elif isinstance(typ, RArray): + return compute_rtype_alignment(typ.item_type) + else: + if isinstance(typ, RTuple): + items = list(typ.types) + elif isinstance(typ, RStruct): + items = typ.types + else: + assert False, "invalid rtype for computing alignment" + max_alignment = max(compute_rtype_alignment(item) for item in items) + return max_alignment + + +def compute_rtype_size(typ: RType) -> int: + """Compute unaligned size of rtype""" + if isinstance(typ, RPrimitive): + return typ.size + elif isinstance(typ, RTuple): + return compute_aligned_offsets_and_size(list(typ.types))[1] + elif isinstance(typ, RUnion): + return PLATFORM_SIZE + elif isinstance(typ, RStruct): + return compute_aligned_offsets_and_size(typ.types)[1] + elif isinstance(typ, RInstance): + return PLATFORM_SIZE + elif isinstance(typ, RArray): + alignment = compute_rtype_alignment(typ) + aligned_size = (compute_rtype_size(typ.item_type) + (alignment - 1)) & ~(alignment - 1) + return aligned_size * typ.length + else: + assert False, "invalid rtype for computing size" + + +def compute_aligned_offsets_and_size(types: list[RType]) -> tuple[list[int], int]: + """Compute offsets and total size of a list of types after alignment + + Note that the types argument are types of values that are stored + sequentially with platform default alignment. + """ + unaligned_sizes = [compute_rtype_size(typ) for typ in types] + alignments = [compute_rtype_alignment(typ) for typ in types] + + current_offset = 0 + offsets = [] + final_size = 0 + for i in range(len(unaligned_sizes)): + offsets.append(current_offset) + if i + 1 < len(unaligned_sizes): + cur_size = unaligned_sizes[i] + current_offset += cur_size + next_alignment = alignments[i + 1] + # compute aligned offset, + # check https://en.wikipedia.org/wiki/Data_structure_alignment for more information + current_offset = (current_offset + (next_alignment - 1)) & -next_alignment + else: + struct_alignment = max(alignments) + final_size = current_offset + unaligned_sizes[i] + final_size = (final_size + (struct_alignment - 1)) & -struct_alignment + return offsets, final_size + + +@final +class RStruct(RType): + """C struct type""" + + def __init__(self, name: str, names: list[str], types: list[RType]) -> None: + self.name = name + self.names = names + self.types = types + # generate dummy names + if len(self.names) < len(self.types): + for i in range(len(self.types) - len(self.names)): + self.names.append("_item" + str(i)) + self.offsets, self.size = compute_aligned_offsets_and_size(types) + self._ctype = name + + def accept(self, visitor: RTypeVisitor[T]) -> T: + return visitor.visit_rstruct(self) + + @property + def may_be_immortal(self) -> bool: + return False + + def __str__(self) -> str: + # if not tuple(unnamed structs) + return "{}{{{}}}".format( + self.name, + ", ".join(name + ":" + str(typ) for name, typ in zip(self.names, self.types)), + ) + + def __repr__(self) -> str: + return "".format( + self.name, + ", ".join(name + ":" + repr(typ) for name, typ in zip(self.names, self.types)), + ) + + def __eq__(self, other: object) -> TypeGuard[RStruct]: + return ( + isinstance(other, RStruct) + and self.name == other.name + and self.names == other.names + and self.types == other.types + ) + + def __hash__(self) -> int: + return hash((self.name, tuple(self.names), tuple(self.types))) + + def serialize(self) -> JsonDict: + assert False + + @classmethod + def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> RStruct: + assert False + + +@final +class RInstance(RType): + """Instance of user-defined class (compiled to C extension class). + + The runtime representation is 'PyObject *', and these are always + boxed and thus reference-counted. + + These support fast method calls and fast attribute access using + vtables, and they usually use a dict-free, struct-based + representation of attributes. Method calls and attribute access + can skip the vtable if we know that there is no overriding. + + These are also sometimes called 'native' types, since these have + the most efficient representation and ops (along with certain + RPrimitive types and RTuple). + """ + + is_unboxed = False + + def __init__(self, class_ir: ClassIR) -> None: + # name is used for formatting the name in messages and debug output + # so we want the fullname for precision. + self.name = class_ir.fullname + self.class_ir = class_ir + self._ctype = "PyObject *" + + def accept(self, visitor: RTypeVisitor[T]) -> T: + return visitor.visit_rinstance(self) + + @property + def may_be_immortal(self) -> bool: + return False + + def struct_name(self, names: NameGenerator) -> str: + return self.class_ir.struct_name(names) + + def getter_index(self, name: str) -> int: + return self.class_ir.vtable_entry(name) + + def setter_index(self, name: str) -> int: + return self.getter_index(name) + 1 + + def method_index(self, name: str) -> int: + return self.class_ir.vtable_entry(name) + + def attr_type(self, name: str) -> RType: + return self.class_ir.attr_type(name) + + def __repr__(self) -> str: + return "" % self.name + + def __eq__(self, other: object) -> TypeGuard[RInstance]: + return isinstance(other, RInstance) and other.name == self.name + + def __hash__(self) -> int: + return hash(self.name) + + def serialize(self) -> str: + return self.name + + +@final +class RUnion(RType): + """union[x, ..., y]""" + + is_unboxed = False + + def __init__(self, items: list[RType]) -> None: + self.name = "union" + self.items = items + self.items_set = frozenset(items) + self._ctype = "PyObject *" + + @staticmethod + def make_simplified_union(items: list[RType]) -> RType: + """Return a normalized union that covers the given items. + + Flatten nested unions and remove duplicate items. + + Overlapping items are *not* simplified. For example, + [object, str] will not be simplified. + """ + items = flatten_nested_unions(items) + assert items + + unique_items = dict.fromkeys(items) + if len(unique_items) > 1: + return RUnion(list(unique_items)) + else: + return next(iter(unique_items)) + + def accept(self, visitor: RTypeVisitor[T]) -> T: + return visitor.visit_runion(self) + + @property + def may_be_immortal(self) -> bool: + return any(item.may_be_immortal for item in self.items) + + def __repr__(self) -> str: + return "" % ", ".join(str(item) for item in self.items) + + def __str__(self) -> str: + return "union[%s]" % ", ".join(str(item) for item in self.items) + + # We compare based on the set because order in a union doesn't matter + def __eq__(self, other: object) -> TypeGuard[RUnion]: + return isinstance(other, RUnion) and self.items_set == other.items_set + + def __hash__(self) -> int: + return hash(("union", self.items_set)) + + def serialize(self) -> JsonDict: + types = [x.serialize() for x in self.items] + return {".class": "RUnion", "types": types} + + @classmethod + def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> RUnion: + types = [deserialize_type(t, ctx) for t in data["types"]] + return RUnion(types) + + +def flatten_nested_unions(types: list[RType]) -> list[RType]: + if not any(isinstance(t, RUnion) for t in types): + return types # Fast path + + flat_items: list[RType] = [] + for t in types: + if isinstance(t, RUnion): + flat_items.extend(flatten_nested_unions(t.items)) + else: + flat_items.append(t) + return flat_items + + +def optional_value_type(rtype: RType) -> RType | None: + """If rtype is the union of none_rprimitive and another type X, return X. + + Otherwise, return None. + """ + if isinstance(rtype, RUnion) and len(rtype.items) == 2: + if rtype.items[0] == none_rprimitive: + return rtype.items[1] + elif rtype.items[1] == none_rprimitive: + return rtype.items[0] + return None + + +def is_optional_type(rtype: RType) -> TypeGuard[RUnion]: + """Is rtype an optional type with exactly two union items?""" + return optional_value_type(rtype) is not None + + +@final +class RArray(RType): + """Fixed-length C array type (for example, int[5]). + + Note that the implementation is a bit limited, and these can basically + be only used for local variables that are initialized in one location. + """ + + def __init__(self, item_type: RType, length: int) -> None: + self.item_type = item_type + # Number of items + self.length = length + self.is_refcounted = False + + def accept(self, visitor: RTypeVisitor[T]) -> T: + return visitor.visit_rarray(self) + + @property + def may_be_immortal(self) -> bool: + return False + + def __str__(self) -> str: + return f"{self.item_type}[{self.length}]" + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> TypeGuard[RArray]: + return ( + isinstance(other, RArray) + and self.item_type == other.item_type + and self.length == other.length + ) + + def __hash__(self) -> int: + return hash((self.item_type, self.length)) + + def serialize(self) -> JsonDict: + assert False + + @classmethod + def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> RArray: + assert False + + +PyObject = RStruct( + name="PyObject", + names=["ob_refcnt", "ob_type"], + types=[c_pyssize_t_rprimitive, pointer_rprimitive], +) + +PyVarObject = RStruct( + name="PyVarObject", names=["ob_base", "ob_size"], types=[PyObject, c_pyssize_t_rprimitive] +) + +setentry = RStruct( + name="setentry", names=["key", "hash"], types=[pointer_rprimitive, c_pyssize_t_rprimitive] +) + +smalltable = RStruct(name="smalltable", names=[], types=[setentry] * 8) + +PySetObject = RStruct( + name="PySetObject", + names=[ + "ob_base", + "fill", + "used", + "mask", + "table", + "hash", + "finger", + "smalltable", + "weakreflist", + ], + types=[ + PyObject, + c_pyssize_t_rprimitive, + c_pyssize_t_rprimitive, + c_pyssize_t_rprimitive, + pointer_rprimitive, + c_pyssize_t_rprimitive, + c_pyssize_t_rprimitive, + smalltable, + pointer_rprimitive, + ], +) + +PyListObject = RStruct( + name="PyListObject", + names=["ob_base", "ob_item", "allocated"], + types=[PyVarObject, pointer_rprimitive, c_pyssize_t_rprimitive], +) + + +def check_native_int_range(rtype: RPrimitive, n: int) -> bool: + """Is n within the range of a native, fixed-width int type? + + Assume the type is a fixed-width int type. + """ + if not rtype.is_signed: + return 0 <= n < (1 << (8 * rtype.size)) + else: + limit = 1 << (rtype.size * 8 - 1) + return -limit <= n < limit diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/__init__.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/__init__.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..76a30f9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/__init__.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/__init__.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/ast_helpers.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/ast_helpers.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..bda7de6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/ast_helpers.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/ast_helpers.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/ast_helpers.py new file mode 100644 index 0000000..3b0f505 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/ast_helpers.py @@ -0,0 +1,123 @@ +"""IRBuilder AST transform helpers shared between expressions and statements. + +Shared code that is tightly coupled to mypy ASTs can be put here instead of +making mypyc.irbuild.builder larger. +""" + +from __future__ import annotations + +from mypy.nodes import ( + LDEF, + BytesExpr, + ComparisonExpr, + Expression, + FloatExpr, + IntExpr, + MemberExpr, + NameExpr, + OpExpr, + StrExpr, + UnaryExpr, + Var, +) +from mypyc.ir.ops import BasicBlock +from mypyc.ir.rtypes import is_fixed_width_rtype, is_tagged +from mypyc.irbuild.builder import IRBuilder +from mypyc.irbuild.constant_fold import constant_fold_expr + + +def process_conditional( + self: IRBuilder, e: Expression, true: BasicBlock, false: BasicBlock +) -> None: + if isinstance(e, OpExpr) and e.op in ["and", "or"]: + if e.op == "and": + # Short circuit 'and' in a conditional context. + new = BasicBlock() + process_conditional(self, e.left, new, false) + self.activate_block(new) + process_conditional(self, e.right, true, false) + else: + # Short circuit 'or' in a conditional context. + new = BasicBlock() + process_conditional(self, e.left, true, new) + self.activate_block(new) + process_conditional(self, e.right, true, false) + elif isinstance(e, UnaryExpr) and e.op == "not": + process_conditional(self, e.expr, false, true) + else: + res = maybe_process_conditional_comparison(self, e, true, false) + if res: + return + # Catch-all for arbitrary expressions. + reg = self.accept(e) + self.add_bool_branch(reg, true, false) + + +def maybe_process_conditional_comparison( + self: IRBuilder, e: Expression, true: BasicBlock, false: BasicBlock +) -> bool: + """Transform simple tagged integer comparisons in a conditional context. + + Return True if the operation is supported (and was transformed). Otherwise, + do nothing and return False. + + Args: + self: IR form Builder + e: Arbitrary expression + true: Branch target if comparison is true + false: Branch target if comparison is false + """ + if not isinstance(e, ComparisonExpr) or len(e.operands) != 2: + return False + ltype = self.node_type(e.operands[0]) + rtype = self.node_type(e.operands[1]) + if not ( + (is_tagged(ltype) or is_fixed_width_rtype(ltype)) + and (is_tagged(rtype) or is_fixed_width_rtype(rtype)) + ): + return False + op = e.operators[0] + if op not in ("==", "!=", "<", "<=", ">", ">="): + return False + left_expr = e.operands[0] + right_expr = e.operands[1] + borrow_left = is_borrow_friendly_expr(self, right_expr) + left = self.accept(left_expr, can_borrow=borrow_left) + right = self.accept(right_expr, can_borrow=True) + if is_fixed_width_rtype(ltype) or is_fixed_width_rtype(rtype): + if not is_fixed_width_rtype(ltype): + left = self.coerce(left, rtype, e.line) + elif not is_fixed_width_rtype(rtype): + right = self.coerce(right, ltype, e.line) + reg = self.binary_op(left, right, op, e.line) + self.builder.flush_keep_alives() + self.add_bool_branch(reg, true, false) + else: + # "left op right" for two tagged integers + reg = self.builder.binary_op(left, right, op, e.line) + self.flush_keep_alives() + self.add_bool_branch(reg, true, false) + return True + + +def is_borrow_friendly_expr(self: IRBuilder, expr: Expression) -> bool: + """Can the result of the expression borrowed temporarily? + + Borrowing means keeping a reference without incrementing the reference count. + """ + if isinstance(expr, (IntExpr, FloatExpr, StrExpr, BytesExpr)): + # Literals are immortal and can always be borrowed + return True + if ( + isinstance(expr, (UnaryExpr, OpExpr, NameExpr, MemberExpr)) + and constant_fold_expr(self, expr) is not None + ): + # Literal expressions are similar to literals + return True + if isinstance(expr, NameExpr): + if isinstance(expr.node, Var) and expr.kind == LDEF: + # Local variable reference can be borrowed + return True + if isinstance(expr, MemberExpr) and self.is_native_attr_ref(expr): + return True + return False diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/builder.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/builder.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..cd10914 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/builder.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/builder.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/builder.py new file mode 100644 index 0000000..51a02ed --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/builder.py @@ -0,0 +1,1594 @@ +"""Builder class to transform a mypy AST to the IR form. + +See the docstring of class IRBuilder for more information. +""" + +from __future__ import annotations + +from collections.abc import Iterator, Sequence +from contextlib import contextmanager +from typing import Any, Callable, Final, Union, overload + +from mypy.build import Graph +from mypy.maptype import map_instance_to_supertype +from mypy.nodes import ( + ARG_NAMED, + ARG_POS, + GDEF, + LDEF, + PARAM_SPEC_KIND, + TYPE_VAR_KIND, + TYPE_VAR_TUPLE_KIND, + ArgKind, + CallExpr, + Decorator, + Expression, + FuncDef, + IndexExpr, + IntExpr, + Lvalue, + MemberExpr, + MypyFile, + NameExpr, + OpExpr, + OverloadedFuncDef, + RefExpr, + StarExpr, + Statement, + SymbolNode, + TupleExpr, + TypeAlias, + TypeInfo, + TypeParam, + Var, +) +from mypy.types import ( + AnyType, + DeletedType, + Instance, + ProperType, + TupleType, + Type, + TypedDictType, + TypeOfAny, + TypeVarLikeType, + UninhabitedType, + UnionType, + get_proper_type, +) +from mypy.util import module_prefix, split_target +from mypy.visitor import ExpressionVisitor, StatementVisitor +from mypyc.common import BITMAP_BITS, GENERATOR_ATTRIBUTE_PREFIX, SELF_NAME, TEMP_ATTR_NAME +from mypyc.crash import catch_errors +from mypyc.errors import Errors +from mypyc.ir.class_ir import ClassIR, NonExtClassInfo +from mypyc.ir.func_ir import INVALID_FUNC_DEF, FuncDecl, FuncIR, FuncSignature, RuntimeArg +from mypyc.ir.ops import ( + NAMESPACE_MODULE, + NAMESPACE_TYPE_VAR, + Assign, + BasicBlock, + Branch, + ComparisonOp, + GetAttr, + InitStatic, + Integer, + IntOp, + LoadStatic, + MethodCall, + Op, + PrimitiveDescription, + RaiseStandardError, + Register, + SetAttr, + TupleGet, + Unreachable, + Value, +) +from mypyc.ir.rtypes import ( + RInstance, + RTuple, + RType, + RUnion, + bitmap_rprimitive, + bytes_rprimitive, + c_pyssize_t_rprimitive, + dict_rprimitive, + int_rprimitive, + is_float_rprimitive, + is_list_rprimitive, + is_none_rprimitive, + is_object_rprimitive, + is_tagged, + is_tuple_rprimitive, + none_rprimitive, + object_rprimitive, + str_rprimitive, +) +from mypyc.irbuild.constant_fold import constant_fold_expr +from mypyc.irbuild.context import FuncInfo, ImplicitClass +from mypyc.irbuild.ll_builder import LowLevelIRBuilder +from mypyc.irbuild.mapper import Mapper +from mypyc.irbuild.nonlocalcontrol import ( + BaseNonlocalControl, + GeneratorNonlocalControl, + LoopNonlocalControl, + NonlocalControl, +) +from mypyc.irbuild.prebuildvisitor import PreBuildVisitor +from mypyc.irbuild.prepare import RegisterImplInfo +from mypyc.irbuild.targets import ( + AssignmentTarget, + AssignmentTargetAttr, + AssignmentTargetIndex, + AssignmentTargetRegister, + AssignmentTargetTuple, +) +from mypyc.irbuild.util import bytes_from_str, is_constant +from mypyc.options import CompilerOptions +from mypyc.primitives.dict_ops import dict_get_item_op, dict_set_item_op +from mypyc.primitives.generic_ops import iter_op, next_op, py_setattr_op +from mypyc.primitives.list_ops import list_get_item_unsafe_op, list_pop_last, to_list +from mypyc.primitives.misc_ops import check_unpack_count_op, get_module_dict_op, import_op +from mypyc.primitives.registry import CFunctionDescription, function_ops +from mypyc.primitives.tuple_ops import tuple_get_item_unsafe_op + +# These int binary operations can borrow their operands safely, since the +# primitives take this into consideration. +int_borrow_friendly_op: Final = {"+", "-", "==", "!=", "<", "<=", ">", ">="} + + +class IRVisitor(ExpressionVisitor[Value], StatementVisitor[None]): + pass + + +class UnsupportedException(Exception): + pass + + +SymbolTarget = Union[AssignmentTargetRegister, AssignmentTargetAttr] + + +class IRBuilder: + """Builder class used to construct mypyc IR from a mypy AST. + + The IRBuilder class maintains IR transformation state and provides access + to various helpers used to implement the transform. + + mypyc.irbuild.visitor.IRBuilderVisitor is used to dispatch based on mypy + AST node type to code that actually does the bulk of the work. For + example, expressions are transformed in mypyc.irbuild.expression and + functions are transformed in mypyc.irbuild.function. + + Use the "accept()" method to translate individual mypy AST nodes to IR. + Other methods are used to generate IR for various lower-level operations. + + This class wraps the lower-level LowLevelIRBuilder class, an instance + of which is available through the "builder" attribute. The low-level + builder class doesn't have any knowledge of the mypy AST. Wrappers for + some LowLevelIRBuilder method are provided for convenience, but others + can also be accessed via the "builder" attribute. + + See also: + * The mypyc IR is defined in the mypyc.ir package. + * The top-level IR transform control logic is in mypyc.irbuild.main. + """ + + def __init__( + self, + current_module: str, + types: dict[Expression, Type], + graph: Graph, + errors: Errors, + mapper: Mapper, + pbv: PreBuildVisitor, + visitor: IRVisitor, + options: CompilerOptions, + singledispatch_impls: dict[FuncDef, list[RegisterImplInfo]], + ) -> None: + self.builder = LowLevelIRBuilder(errors, options) + self.builders = [self.builder] + self.symtables: list[dict[SymbolNode, SymbolTarget]] = [{}] + self.runtime_args: list[list[RuntimeArg]] = [[]] + self.function_name_stack: list[str] = [] + self.class_ir_stack: list[ClassIR] = [] + # Keep track of whether the next statement in a block is reachable + # or not, separately for each block nesting level + self.block_reachable_stack: list[bool] = [True] + + self.current_module = current_module + self.mapper = mapper + self.types = types + self.graph = graph + self.ret_types: list[RType] = [] + self.functions: list[FuncIR] = [] + self.function_names: set[tuple[str | None, str]] = set() + self.classes: list[ClassIR] = [] + self.final_names: list[tuple[str, RType]] = [] + self.type_var_names: list[str] = [] + self.callable_class_names: set[str] = set() + self.options = options + + # These variables keep track of the number of lambdas, implicit indices, and implicit + # iterators instantiated so we avoid name conflicts. The indices and iterators are + # instantiated from for-loops. + self.lambda_counter = 0 + self.temp_counter = 0 + + # These variables are populated from the first-pass PreBuildVisitor. + self.free_variables = pbv.free_variables + self.prop_setters = pbv.prop_setters + self.encapsulating_funcs = pbv.encapsulating_funcs + self.nested_fitems = pbv.nested_funcs.keys() + self.fdefs_to_decorators = pbv.funcs_to_decorators + self.module_import_groups = pbv.module_import_groups + + self.singledispatch_impls = singledispatch_impls + + self.visitor = visitor + + # This list operates similarly to a function call stack for nested functions. Whenever a + # function definition begins to be generated, a FuncInfo instance is added to the stack, + # and information about that function (e.g. whether it is nested, its environment class to + # be generated) is stored in that FuncInfo instance. When the function is done being + # generated, its corresponding FuncInfo is popped off the stack. + self.fn_info = FuncInfo(INVALID_FUNC_DEF, "", "") + self.fn_infos: list[FuncInfo] = [self.fn_info] + + # This list operates as a stack of constructs that modify the + # behavior of nonlocal control flow constructs. + self.nonlocal_control: list[NonlocalControl] = [] + + self.errors = errors + # Notionally a list of all of the modules imported by the + # module being compiled, but stored as an OrderedDict so we + # can also do quick lookups. + self.imports: dict[str, None] = {} + + self.can_borrow = False + + # High-level control + + def set_module(self, module_name: str, module_path: str) -> None: + """Set the name and path of the current module. + + This must be called before transforming any AST nodes. + """ + self.module_name = module_name + self.module_path = module_path + self.builder.set_module(module_name, module_path) + + @overload + def accept(self, node: Expression, *, can_borrow: bool = False) -> Value: ... + + @overload + def accept(self, node: Statement) -> None: ... + + def accept(self, node: Statement | Expression, *, can_borrow: bool = False) -> Value | None: + """Transform an expression or a statement. + + If can_borrow is true, prefer to generate a borrowed reference. + Borrowed references are faster since they don't require reference count + manipulation, but they are only safe to use in specific contexts. + """ + with self.catch_errors(node.line): + if isinstance(node, Expression): + old_can_borrow = self.can_borrow + self.can_borrow = can_borrow + try: + res = node.accept(self.visitor) + res = self.coerce(res, self.node_type(node), node.line) + # If we hit an error during compilation, we want to + # keep trying, so we can produce more error + # messages. Generate a temp of the right type to keep + # from causing more downstream trouble. + except UnsupportedException: + res = Register(self.node_type(node)) + self.can_borrow = old_can_borrow + if not can_borrow: + self.flush_keep_alives() + return res + else: + try: + node.accept(self.visitor) + except UnsupportedException: + pass + return None + + def flush_keep_alives(self) -> None: + self.builder.flush_keep_alives() + + # Pass through methods for the most common low-level builder ops, for convenience. + + def add(self, op: Op) -> Value: + return self.builder.add(op) + + def goto(self, target: BasicBlock) -> None: + self.builder.goto(target) + + def activate_block(self, block: BasicBlock) -> None: + self.builder.activate_block(block) + + def goto_and_activate(self, block: BasicBlock) -> None: + self.builder.goto_and_activate(block) + + def self(self) -> Register: + return self.builder.self() + + def py_get_attr(self, obj: Value, attr: str, line: int) -> Value: + return self.builder.py_get_attr(obj, attr, line) + + def load_str(self, value: str) -> Value: + return self.builder.load_str(value) + + def load_bytes_from_str_literal(self, value: str) -> Value: + """Load bytes object from a string literal. + + The literal characters of BytesExpr (the characters inside b'') + are stored in BytesExpr.value, whose type is 'str' not 'bytes'. + Thus we perform a special conversion here. + """ + return self.builder.load_bytes(bytes_from_str(value)) + + def load_int(self, value: int) -> Value: + return self.builder.load_int(value) + + def load_float(self, value: float) -> Value: + return self.builder.load_float(value) + + def unary_op(self, lreg: Value, expr_op: str, line: int) -> Value: + return self.builder.unary_op(lreg, expr_op, line) + + def binary_op(self, lreg: Value, rreg: Value, expr_op: str, line: int) -> Value: + return self.builder.binary_op(lreg, rreg, expr_op, line) + + def coerce(self, src: Value, target_type: RType, line: int, force: bool = False) -> Value: + return self.builder.coerce(src, target_type, line, force, can_borrow=self.can_borrow) + + def none_object(self) -> Value: + return self.builder.none_object() + + def none(self) -> Value: + return self.builder.none() + + def true(self) -> Value: + return self.builder.true() + + def false(self) -> Value: + return self.builder.false() + + def new_list_op(self, values: list[Value], line: int) -> Value: + return self.builder.new_list_op(values, line) + + def new_set_op(self, values: list[Value], line: int) -> Value: + return self.builder.new_set_op(values, line) + + def translate_is_op(self, lreg: Value, rreg: Value, expr_op: str, line: int) -> Value: + return self.builder.translate_is_op(lreg, rreg, expr_op, line) + + def py_call( + self, + function: Value, + arg_values: list[Value], + line: int, + arg_kinds: list[ArgKind] | None = None, + arg_names: Sequence[str | None] | None = None, + ) -> Value: + return self.builder.py_call(function, arg_values, line, arg_kinds, arg_names) + + def add_bool_branch(self, value: Value, true: BasicBlock, false: BasicBlock) -> None: + self.builder.add_bool_branch(value, true, false) + + def load_native_type_object(self, fullname: str) -> Value: + return self.builder.load_native_type_object(fullname) + + def gen_method_call( + self, + base: Value, + name: str, + arg_values: list[Value], + result_type: RType | None, + line: int, + arg_kinds: list[ArgKind] | None = None, + arg_names: list[str | None] | None = None, + ) -> Value: + return self.builder.gen_method_call( + base, name, arg_values, result_type, line, arg_kinds, arg_names, self.can_borrow + ) + + def load_module(self, name: str) -> Value: + return self.builder.load_module(name) + + def call_c(self, desc: CFunctionDescription, args: list[Value], line: int) -> Value: + return self.builder.call_c(desc, args, line) + + def primitive_op( + self, + desc: PrimitiveDescription, + args: list[Value], + line: int, + result_type: RType | None = None, + ) -> Value: + return self.builder.primitive_op(desc, args, line, result_type) + + def int_op(self, type: RType, lhs: Value, rhs: Value, op: int, line: int) -> Value: + return self.builder.int_op(type, lhs, rhs, op, line) + + def compare_tuples(self, lhs: Value, rhs: Value, op: str, line: int) -> Value: + return self.builder.compare_tuples(lhs, rhs, op, line) + + def builtin_len(self, val: Value, line: int) -> Value: + return self.builder.builtin_len(val, line) + + def new_tuple(self, items: list[Value], line: int) -> Value: + return self.builder.new_tuple(items, line) + + def debug_print(self, toprint: str | Value) -> None: + return self.builder.debug_print(toprint) + + def set_immortal_if_free_threaded(self, v: Value, line: int) -> None: + """Make an object immortal on free-threaded builds (to avoid contention).""" + self.builder.set_immortal_if_free_threaded(v, line) + + # Helpers for IR building + + def add_to_non_ext_dict( + self, non_ext: NonExtClassInfo, key: str, val: Value, line: int + ) -> None: + # Add an attribute entry into the class dict of a non-extension class. + key_unicode = self.load_str(key) + self.primitive_op(dict_set_item_op, [non_ext.dict, key_unicode, val], line) + + # It's important that accessing class dictionary items from multiple threads + # doesn't cause contention. + self.builder.set_immortal_if_free_threaded(val, line) + + def gen_import(self, id: str, line: int) -> None: + self.imports[id] = None + + needs_import, out = BasicBlock(), BasicBlock() + self.check_if_module_loaded(id, line, needs_import, out) + + self.activate_block(needs_import) + value = self.call_c(import_op, [self.load_str(id)], line) + self.add(InitStatic(value, id, namespace=NAMESPACE_MODULE)) + self.goto_and_activate(out) + + def check_if_module_loaded( + self, id: str, line: int, needs_import: BasicBlock, out: BasicBlock + ) -> None: + """Generate code that checks if the module `id` has been loaded yet. + + Arguments: + id: name of module to check if imported + line: line number that the import occurs on + needs_import: the BasicBlock that is run if the module has not been loaded yet + out: the BasicBlock that is run if the module has already been loaded""" + first_load = self.load_module(id) + comparison = self.translate_is_op(first_load, self.none_object(), "is not", line) + self.add_bool_branch(comparison, out, needs_import) + + def get_module(self, module: str, line: int) -> Value: + # Python 3.7 has a nice 'PyImport_GetModule' function that we can't use :( + mod_dict = self.call_c(get_module_dict_op, [], line) + # Get module object from modules dict. + return self.primitive_op(dict_get_item_op, [mod_dict, self.load_str(module)], line) + + def get_module_attr(self, module: str, attr: str, line: int) -> Value: + """Look up an attribute of a module without storing it in the local namespace. + + For example, get_module_attr('typing', 'TypedDict', line) results in + the value of 'typing.TypedDict'. + + Import the module if needed. + """ + self.gen_import(module, line) + module_obj = self.get_module(module, line) + return self.py_get_attr(module_obj, attr, line) + + def assign_if_null(self, target: Register, get_val: Callable[[], Value], line: int) -> None: + """If target is NULL, assign value produced by get_val to it.""" + error_block, body_block = BasicBlock(), BasicBlock() + self.add(Branch(target, error_block, body_block, Branch.IS_ERROR)) + self.activate_block(error_block) + self.add(Assign(target, self.coerce(get_val(), target.type, line))) + self.goto(body_block) + self.activate_block(body_block) + + def assign_if_bitmap_unset( + self, target: Register, get_val: Callable[[], Value], index: int, line: int + ) -> None: + error_block, body_block = BasicBlock(), BasicBlock() + o = self.int_op( + bitmap_rprimitive, + self.builder.args[-1 - index // BITMAP_BITS], + Integer(1 << (index & (BITMAP_BITS - 1)), bitmap_rprimitive), + IntOp.AND, + line, + ) + b = self.add(ComparisonOp(o, Integer(0, bitmap_rprimitive), ComparisonOp.EQ)) + self.add(Branch(b, error_block, body_block, Branch.BOOL)) + self.activate_block(error_block) + self.add(Assign(target, self.coerce(get_val(), target.type, line))) + self.goto(body_block) + self.activate_block(body_block) + + def maybe_add_implicit_return(self) -> None: + if is_none_rprimitive(self.ret_types[-1]) or is_object_rprimitive(self.ret_types[-1]): + self.add_implicit_return() + else: + self.add_implicit_unreachable() + + def add_implicit_return(self) -> None: + block = self.builder.blocks[-1] + if not block.terminated: + retval = self.coerce(self.builder.none(), self.ret_types[-1], -1) + self.nonlocal_control[-1].gen_return(self, retval, self.fn_info.fitem.line) + + def add_implicit_unreachable(self) -> None: + block = self.builder.blocks[-1] + if not block.terminated: + self.add(Unreachable()) + + def disallow_class_assignments(self, lvalues: list[Lvalue], line: int) -> None: + # Some best-effort attempts to disallow assigning to class + # variables that aren't marked ClassVar, since we blatantly + # miscompile the interaction between instance and class + # variables. + for lvalue in lvalues: + if ( + isinstance(lvalue, MemberExpr) + and isinstance(lvalue.expr, RefExpr) + and isinstance(lvalue.expr.node, TypeInfo) + ): + var = lvalue.expr.node[lvalue.name].node + if isinstance(var, Var) and not var.is_classvar: + self.error("Only class variables defined as ClassVar can be assigned to", line) + + def non_function_scope(self) -> bool: + # Currently the stack always has at least two items: dummy and top-level. + return len(self.fn_infos) <= 2 + + def top_level_fn_info(self) -> FuncInfo | None: + if self.non_function_scope(): + return None + return self.fn_infos[2] + + def init_final_static( + self, + lvalue: Lvalue, + rvalue_reg: Value, + class_name: str | None = None, + *, + type_override: RType | None = None, + ) -> None: + assert isinstance(lvalue, NameExpr), lvalue + assert isinstance(lvalue.node, Var), lvalue.node + if lvalue.node.final_value is None: + if class_name is None: + name = lvalue.name + else: + name = f"{class_name}.{lvalue.name}" + assert name is not None, "Full name not set for variable" + coerced = self.coerce(rvalue_reg, type_override or self.node_type(lvalue), lvalue.line) + self.final_names.append((name, coerced.type)) + self.add(InitStatic(coerced, name, self.module_name)) + + def load_final_static( + self, fullname: str, typ: RType, line: int, error_name: str | None = None + ) -> Value: + split_name = split_target(self.graph, fullname) + assert split_name is not None + module, name = split_name + return self.builder.load_static_checked( + typ, + name, + module, + line=line, + error_msg=f'value for final name "{error_name}" was not set', + ) + + def init_type_var(self, value: Value, name: str, line: int) -> None: + unique_name = name + "___" + str(line) + self.type_var_names.append(unique_name) + self.add(InitStatic(value, unique_name, self.module_name, namespace=NAMESPACE_TYPE_VAR)) + + def load_type_var(self, name: str, line: int) -> Value: + return self.add( + LoadStatic( + object_rprimitive, + name + "___" + str(line), + self.module_name, + namespace=NAMESPACE_TYPE_VAR, + ) + ) + + def load_literal_value(self, val: int | str | bytes | float | complex | bool) -> Value: + """Load value of a final name, class-level attribute, or constant folded expression.""" + if isinstance(val, bool): + if val: + return self.true() + else: + return self.false() + elif isinstance(val, int): + return self.builder.load_int(val) + elif isinstance(val, float): + return self.builder.load_float(val) + elif isinstance(val, str): + return self.builder.load_str(val) + elif isinstance(val, bytes): + return self.builder.load_bytes(val) + elif isinstance(val, complex): + return self.builder.load_complex(val) + else: + assert False, "Unsupported literal value" + + def get_assignment_target( + self, lvalue: Lvalue, line: int = -1, *, for_read: bool = False + ) -> AssignmentTarget: + if line == -1: + line = lvalue.line + if isinstance(lvalue, NameExpr): + # If we are visiting a decorator, then the SymbolNode we really want to be looking at + # is the function that is decorated, not the entire Decorator node itself. + symbol = lvalue.node + if isinstance(symbol, Decorator): + symbol = symbol.func + if symbol is None: + # Semantic analyzer doesn't create ad-hoc Vars for special forms. + assert lvalue.is_special_form + symbol = Var(lvalue.name) + if not for_read and isinstance(symbol, Var) and symbol.is_cls: + self.error("Cannot assign to the first argument of classmethod", line) + if lvalue.kind == LDEF: + if symbol not in self.symtables[-1]: + if isinstance(symbol, Var) and not isinstance(symbol.type, DeletedType): + reg_type = self.type_to_rtype(symbol.type) + else: + reg_type = self.node_type(lvalue) + # If the function is a generator function, then first define a new variable + # in the current function's environment class. Next, define a target that + # refers to the newly defined variable in that environment class. Add the + # target to the table containing class environment variables, as well as the + # current environment. + if self.fn_info.is_generator: + return self.add_var_to_env_class( + symbol, + reg_type, + self.fn_info.generator_class, + reassign=False, + prefix=GENERATOR_ATTRIBUTE_PREFIX, + ) + + # Otherwise define a new local variable. + return self.add_local_reg(symbol, reg_type) + else: + # Assign to a previously defined variable. + return self.lookup(symbol) + elif lvalue.kind == GDEF: + globals_dict = self.load_globals_dict() + name = self.load_str(lvalue.name) + return AssignmentTargetIndex(globals_dict, name) + else: + assert False, lvalue.kind + elif isinstance(lvalue, IndexExpr): + # Indexed assignment x[y] = e + base = self.accept(lvalue.base) + index = self.accept(lvalue.index) + return AssignmentTargetIndex(base, index) + elif isinstance(lvalue, MemberExpr): + # Attribute assignment x.y = e + can_borrow = self.is_native_attr_ref(lvalue) + obj = self.accept(lvalue.expr, can_borrow=can_borrow) + return AssignmentTargetAttr(obj, lvalue.name, can_borrow=can_borrow) + elif isinstance(lvalue, TupleExpr): + # Multiple assignment a, ..., b = e + star_idx: int | None = None + lvalues = [] + for idx, item in enumerate(lvalue.items): + targ = self.get_assignment_target(item) + lvalues.append(targ) + if isinstance(item, StarExpr): + if star_idx is not None: + self.error("Two starred expressions in assignment", line) + star_idx = idx + + return AssignmentTargetTuple(lvalues, star_idx) + + elif isinstance(lvalue, StarExpr): + return self.get_assignment_target(lvalue.expr) + + assert False, "Unsupported lvalue: %r" % lvalue + + def read( + self, + target: Value | AssignmentTarget, + line: int = -1, + *, + can_borrow: bool = False, + allow_error_value: bool = False, + ) -> Value: + if isinstance(target, Value): + return target + if isinstance(target, AssignmentTargetRegister): + return target.register + if isinstance(target, AssignmentTargetIndex): + reg = self.gen_method_call( + target.base, "__getitem__", [target.index], target.type, line + ) + if reg is not None: + return reg + assert False, target.base.type + if isinstance(target, AssignmentTargetAttr): + if isinstance(target.obj.type, RInstance) and target.obj.type.class_ir.is_ext_class: + borrow = can_borrow and target.can_borrow + return self.add( + GetAttr( + target.obj, + target.attr, + line, + borrow=borrow, + allow_error_value=allow_error_value, + ) + ) + else: + return self.py_get_attr(target.obj, target.attr, line) + + assert False, "Unsupported lvalue: %r" % target + + def read_nullable_attr(self, obj: Value, attr: str, line: int = -1) -> Value: + """Read an attribute that might have an error value without raising AttributeError.""" + assert isinstance(obj.type, RInstance) and obj.type.class_ir.is_ext_class + return self.add(GetAttr(obj, attr, line, allow_error_value=True)) + + def assign(self, target: Register | AssignmentTarget, rvalue_reg: Value, line: int) -> None: + if isinstance(target, Register): + self.add(Assign(target, self.coerce_rvalue(rvalue_reg, target.type, line))) + elif isinstance(target, AssignmentTargetRegister): + rvalue_reg = self.coerce_rvalue(rvalue_reg, target.type, line) + self.add(Assign(target.register, rvalue_reg)) + elif isinstance(target, AssignmentTargetAttr): + if isinstance(target.obj_type, RInstance): + setattr = target.obj_type.class_ir.get_method("__setattr__") + if setattr: + key = self.load_str(target.attr) + boxed_reg = self.builder.box(rvalue_reg) + call = MethodCall(target.obj, setattr.name, [key, boxed_reg], line) + self.add(call) + else: + rvalue_reg = self.coerce_rvalue(rvalue_reg, target.type, line) + self.add(SetAttr(target.obj, target.attr, rvalue_reg, line)) + else: + key = self.load_str(target.attr) + boxed_reg = self.builder.box(rvalue_reg) + self.primitive_op(py_setattr_op, [target.obj, key, boxed_reg], line) + elif isinstance(target, AssignmentTargetIndex): + target_reg2 = self.gen_method_call( + target.base, "__setitem__", [target.index, rvalue_reg], None, line + ) + assert target_reg2 is not None, target.base.type + elif isinstance(target, AssignmentTargetTuple): + if isinstance(rvalue_reg.type, RTuple) and target.star_idx is None: + rtypes = rvalue_reg.type.types + assert len(rtypes) == len(target.items) + for i in range(len(rtypes)): + item_value = self.add(TupleGet(rvalue_reg, i, line)) + self.assign(target.items[i], item_value, line) + elif ( + is_list_rprimitive(rvalue_reg.type) or is_tuple_rprimitive(rvalue_reg.type) + ) and target.star_idx is None: + self.process_sequence_assignment(target, rvalue_reg, line) + else: + self.process_iterator_tuple_assignment(target, rvalue_reg, line) + else: + assert False, "Unsupported assignment target" + + def coerce_rvalue(self, rvalue: Value, rtype: RType, line: int) -> Value: + if is_float_rprimitive(rtype) and is_tagged(rvalue.type): + typename = rvalue.type.short_name() + if typename == "short_int": + typename = "int" + self.error( + "Incompatible value representations in assignment " + + f'(expression has type "{typename}", variable has type "float")', + line, + ) + return self.coerce(rvalue, rtype, line) + + def process_sequence_assignment( + self, target: AssignmentTargetTuple, rvalue: Value, line: int + ) -> None: + """Process assignment like 'x, y = s', where s is a variable-length list or tuple.""" + # Check the length of sequence. + expected_len = Integer(len(target.items), c_pyssize_t_rprimitive) + self.builder.call_c(check_unpack_count_op, [rvalue, expected_len], line) + + # Read sequence items. + values = [] + for i in range(len(target.items)): + item = target.items[i] + index: Value + if is_list_rprimitive(rvalue.type): + index = Integer(i, c_pyssize_t_rprimitive) + item_value = self.primitive_op(list_get_item_unsafe_op, [rvalue, index], line) + elif is_tuple_rprimitive(rvalue.type): + index = Integer(i, c_pyssize_t_rprimitive) + item_value = self.call_c(tuple_get_item_unsafe_op, [rvalue, index], line) + else: + index = self.builder.load_int(i) + item_value = self.builder.gen_method_call( + rvalue, "__getitem__", [index], item.type, line + ) + values.append(item_value) + + # Assign sequence items to the target lvalues. + for lvalue, value in zip(target.items, values): + self.assign(lvalue, value, line) + + def process_iterator_tuple_assignment_helper( + self, litem: AssignmentTarget, ritem: Value, line: int + ) -> None: + error_block, ok_block = BasicBlock(), BasicBlock() + self.add(Branch(ritem, error_block, ok_block, Branch.IS_ERROR)) + + self.activate_block(error_block) + self.add( + RaiseStandardError(RaiseStandardError.VALUE_ERROR, "not enough values to unpack", line) + ) + self.add(Unreachable()) + + self.activate_block(ok_block) + self.assign(litem, ritem, line) + + def process_iterator_tuple_assignment( + self, target: AssignmentTargetTuple, rvalue_reg: Value, line: int + ) -> None: + iterator = self.primitive_op(iter_op, [rvalue_reg], line) + + # This may be the whole lvalue list if there is no starred value + split_idx = target.star_idx if target.star_idx is not None else len(target.items) + + # Assign values before the first starred value + for litem in target.items[:split_idx]: + ritem = self.call_c(next_op, [iterator], line) + error_block, ok_block = BasicBlock(), BasicBlock() + self.add(Branch(ritem, error_block, ok_block, Branch.IS_ERROR)) + + self.activate_block(error_block) + self.add( + RaiseStandardError( + RaiseStandardError.VALUE_ERROR, "not enough values to unpack", line + ) + ) + self.add(Unreachable()) + + self.activate_block(ok_block) + + self.assign(litem, ritem, line) + + # Assign the starred value and all values after it + if target.star_idx is not None: + post_star_vals = target.items[split_idx + 1 :] + iter_list = self.primitive_op(to_list, [iterator], line) + iter_list_len = self.builtin_len(iter_list, line) + post_star_len = Integer(len(post_star_vals)) + condition = self.binary_op(post_star_len, iter_list_len, "<=", line) + + error_block, ok_block = BasicBlock(), BasicBlock() + self.add(Branch(condition, ok_block, error_block, Branch.BOOL)) + + self.activate_block(error_block) + self.add( + RaiseStandardError( + RaiseStandardError.VALUE_ERROR, "not enough values to unpack", line + ) + ) + self.add(Unreachable()) + + self.activate_block(ok_block) + + for litem in reversed(post_star_vals): + ritem = self.primitive_op(list_pop_last, [iter_list], line) + self.assign(litem, ritem, line) + + # Assign the starred value + self.assign(target.items[target.star_idx], iter_list, line) + + # There is no starred value, so check if there are extra values in rhs that + # have not been assigned. + else: + extra = self.call_c(next_op, [iterator], line) + error_block, ok_block = BasicBlock(), BasicBlock() + self.add(Branch(extra, ok_block, error_block, Branch.IS_ERROR)) + + self.activate_block(error_block) + self.add( + RaiseStandardError( + RaiseStandardError.VALUE_ERROR, "too many values to unpack", line + ) + ) + self.add(Unreachable()) + + self.activate_block(ok_block) + + def push_loop_stack(self, continue_block: BasicBlock, break_block: BasicBlock) -> None: + self.nonlocal_control.append( + LoopNonlocalControl(self.nonlocal_control[-1], continue_block, break_block) + ) + + def pop_loop_stack(self) -> None: + self.nonlocal_control.pop() + + def make_spill_target(self, type: RType) -> AssignmentTarget: + """Moves a given Value instance into the generator class' environment class.""" + name = f"{TEMP_ATTR_NAME}{self.temp_counter}" + self.temp_counter += 1 + target = self.add_var_to_env_class(Var(name), type, self.fn_info.generator_class) + return target + + def spill(self, value: Value) -> AssignmentTarget: + """Moves a given Value instance into the generator class' environment class.""" + target = self.make_spill_target(value.type) + # Shouldn't be able to fail, so -1 for line + self.assign(target, value, -1) + return target + + def maybe_spill(self, value: Value) -> Value | AssignmentTarget: + """ + Moves a given Value instance into the environment class for generator functions. For + non-generator functions, leaves the Value instance as it is. + + Returns an AssignmentTarget associated with the Value for generator functions and the + original Value itself for non-generator functions. + """ + if self.fn_info.is_generator: + return self.spill(value) + return value + + def maybe_spill_assignable(self, value: Value) -> Register | AssignmentTarget: + """ + Moves a given Value instance into the environment class for generator functions. For + non-generator functions, allocate a temporary Register. + + Returns an AssignmentTarget associated with the Value for generator functions and an + assignable Register for non-generator functions. + """ + if self.fn_info.is_generator: + return self.spill(value) + + if isinstance(value, Register): + return value + + # Allocate a temporary register for the assignable value. + reg = Register(value.type) + self.assign(reg, value, -1) + return reg + + def extract_int(self, e: Expression) -> int | None: + folded = constant_fold_expr(self, e) + return folded if isinstance(folded, int) else None + + def get_sequence_type(self, expr: Expression) -> RType: + return self.get_sequence_type_from_type(self.types[expr]) + + def get_sequence_type_from_type(self, target_type: Type) -> RType: + target_type = get_proper_type(target_type) + if isinstance(target_type, UnionType): + return RUnion.make_simplified_union( + [self.get_sequence_type_from_type(item) for item in target_type.items] + ) + elif isinstance(target_type, Instance): + if target_type.type.fullname == "builtins.str": + return str_rprimitive + elif target_type.type.fullname == "builtins.bytes": + return bytes_rprimitive + try: + return self.type_to_rtype(target_type.args[0]) + except IndexError: + raise ValueError(f"{target_type!r} is not a valid sequence.") from None + # This elif-blocks are needed for iterating over classes derived from NamedTuple. + elif isinstance(target_type, TypeVarLikeType): + return self.get_sequence_type_from_type(target_type.upper_bound) + elif isinstance(target_type, TupleType): + items = target_type.items + assert items, "This function does not support empty tuples" + # Tuple might have elements of different types. + rtypes = set(map(self.mapper.type_to_rtype, items)) + if len(rtypes) == 1: + return rtypes.pop() + else: + return RUnion.make_simplified_union(list(rtypes)) + assert False, target_type + + def get_dict_base_type(self, expr: Expression) -> list[Instance]: + """Find dict type of a dict-like expression. + + This is useful for dict subclasses like SymbolTable. + """ + return self.get_dict_base_type_from_type(self.types[expr]) + + def get_dict_base_type_from_type(self, target_type: Type) -> list[Instance]: + target_type = get_proper_type(target_type) + if isinstance(target_type, UnionType): + return [ + inner + for item in target_type.items + for inner in self.get_dict_base_type_from_type(item) + ] + if isinstance(target_type, TypeVarLikeType): + # Match behaviour of self.node_type + # We can only reach this point if `target_type` was a TypeVar(bound=dict[...]) + # or a ParamSpec. + return self.get_dict_base_type_from_type(target_type.upper_bound) + + if isinstance(target_type, TypedDictType): + target_type = target_type.fallback + dict_base = next( + base for base in target_type.type.mro if base.fullname == "typing.Mapping" + ) + elif isinstance(target_type, Instance): + dict_base = next( + base for base in target_type.type.mro if base.fullname == "builtins.dict" + ) + else: + assert False, f"Failed to extract dict base from {target_type}" + return [map_instance_to_supertype(target_type, dict_base)] + + def get_dict_key_type(self, expr: Expression) -> RType: + dict_base_types = self.get_dict_base_type(expr) + rtypes = [self.type_to_rtype(t.args[0]) for t in dict_base_types] + return RUnion.make_simplified_union(rtypes) + + def get_dict_value_type(self, expr: Expression) -> RType: + dict_base_types = self.get_dict_base_type(expr) + rtypes = [self.type_to_rtype(t.args[1]) for t in dict_base_types] + return RUnion.make_simplified_union(rtypes) + + def get_dict_item_type(self, expr: Expression) -> RType: + key_type = self.get_dict_key_type(expr) + value_type = self.get_dict_value_type(expr) + return RTuple([key_type, value_type]) + + def _analyze_iterable_item_type(self, expr: Expression) -> Type: + """Return the item type given by 'expr' in an iterable context.""" + # This logic is copied from mypy's TypeChecker.analyze_iterable_item_type. + if expr not in self.types: + # Mypy thinks this is unreachable. + iterable: ProperType = AnyType(TypeOfAny.from_error) + else: + iterable = get_proper_type(self.types[expr]) + echk = self.graph[self.module_name].type_checker().expr_checker + iterator = echk.check_method_call_by_name("__iter__", iterable, [], [], expr)[0] + + from mypy.join import join_types + + if isinstance(iterable, TupleType): + joined: Type = UninhabitedType() + for item in iterable.items: + joined = join_types(joined, item) + return joined + else: + # Non-tuple iterable. + return echk.check_method_call_by_name("__next__", iterator, [], [], expr)[0] + + def is_native_module(self, module: str) -> bool: + """Is the given module one compiled by mypyc?""" + return self.mapper.is_native_module(module) + + def is_native_ref_expr(self, expr: RefExpr) -> bool: + return self.mapper.is_native_ref_expr(expr) + + def is_native_module_ref_expr(self, expr: RefExpr) -> bool: + return self.mapper.is_native_module_ref_expr(expr) + + def is_synthetic_type(self, typ: TypeInfo) -> bool: + """Is a type something other than just a class we've created?""" + return typ.is_named_tuple or typ.is_newtype or typ.typeddict_type is not None + + def get_final_ref(self, expr: MemberExpr) -> tuple[str, Var, bool] | None: + """Check if `expr` is a final attribute. + + This needs to be done differently for class and module attributes to + correctly determine fully qualified name. Return a tuple that consists of + the qualified name, the corresponding Var node, and a flag indicating whether + the final name was defined in a compiled module. Return None if `expr` does not + refer to a final attribute. + """ + final_var = None + if isinstance(expr.expr, RefExpr) and isinstance(expr.expr.node, TypeInfo): + # a class attribute + sym = expr.expr.node.get(expr.name) + if sym and isinstance(sym.node, Var): + # Enum attribute are treated as final since they are added to the global cache + expr_fullname = expr.expr.node.bases[0].type.fullname + is_final = sym.node.is_final or expr_fullname == "enum.Enum" + if is_final: + final_var = sym.node + fullname = f"{sym.node.info.fullname}.{final_var.name}" + native = self.is_native_module(expr.expr.node.module_name) + elif self.is_module_member_expr(expr): + # a module attribute + if isinstance(expr.node, Var) and expr.node.is_final: + final_var = expr.node + fullname = expr.node.fullname + native = self.is_native_ref_expr(expr) + if final_var is not None: + return fullname, final_var, native + return None + + def emit_load_final( + self, final_var: Var, fullname: str, name: str, native: bool, typ: Type, line: int + ) -> Value | None: + """Emit code for loading value of a final name (if possible). + + Args: + final_var: Var corresponding to the final name + fullname: its qualified name + name: shorter name to show in errors + native: whether the name was defined in a compiled module + typ: its type + line: line number where loading occurs + """ + if final_var.final_value is not None: # this is safe even for non-native names + return self.load_literal_value(final_var.final_value) + elif native and module_prefix(self.graph, fullname): + return self.load_final_static(fullname, self.mapper.type_to_rtype(typ), line, name) + else: + return None + + def is_module_member_expr(self, expr: MemberExpr) -> bool: + return isinstance(expr.expr, RefExpr) and isinstance(expr.expr.node, MypyFile) + + def call_refexpr_with_args( + self, expr: CallExpr, callee: RefExpr, arg_values: list[Value] + ) -> Value: + # Handle data-driven special-cased primitive call ops. + if callee.fullname and expr.arg_kinds == [ARG_POS] * len(arg_values): + fullname = get_call_target_fullname(callee) + primitive_candidates = function_ops.get(fullname, []) + target = self.builder.matching_primitive_op( + primitive_candidates, arg_values, expr.line, self.node_type(expr) + ) + if target: + return target + + # Standard native call if signature and fullname are good and all arguments are positional + # or named. + callee_node = callee.node + if isinstance(callee_node, OverloadedFuncDef): + callee_node = callee_node.impl + # TODO: use native calls for any decorated functions which have all their decorators + # removed, not just singledispatch functions (which we don't do now just in case those + # decorated functions are callable classes or cannot be called without the python API for + # some other reason) + if ( + isinstance(callee_node, Decorator) + and callee_node.func not in self.fdefs_to_decorators + and callee_node.func in self.singledispatch_impls + ): + callee_node = callee_node.func + if ( + callee_node is not None + and callee.fullname + and callee_node in self.mapper.func_to_decl + and all(kind in (ARG_POS, ARG_NAMED) for kind in expr.arg_kinds) + ): + decl = self.mapper.func_to_decl[callee_node] + return self.builder.call(decl, arg_values, expr.arg_kinds, expr.arg_names, expr.line) + + # Fall back to a Python call + function = self.accept(callee) + return self.py_call( + function, arg_values, expr.line, arg_kinds=expr.arg_kinds, arg_names=expr.arg_names + ) + + def shortcircuit_expr(self, expr: OpExpr) -> Value: + def handle_right() -> Value: + if expr.right_unreachable: + self.builder.add( + RaiseStandardError( + RaiseStandardError.RUNTIME_ERROR, + "mypyc internal error: should be unreachable", + expr.right.line, + ) + ) + return self.builder.none() + return self.accept(expr.right) + + return self.builder.shortcircuit_helper( + expr.op, self.node_type(expr), lambda: self.accept(expr.left), handle_right, expr.line + ) + + # Basic helpers + + def flatten_classes(self, arg: RefExpr | TupleExpr) -> list[ClassIR] | None: + """Flatten classes in isinstance(obj, (A, (B, C))). + + If at least one item is not a reference to a native class, return None. + """ + if isinstance(arg, RefExpr): + if isinstance(arg.node, TypeInfo) and self.is_native_module_ref_expr(arg): + ir = self.mapper.type_to_ir.get(arg.node) + if ir: + return [ir] + return None + else: + res: list[ClassIR] = [] + for item in arg.items: + if isinstance(item, (RefExpr, TupleExpr)): + item_part = self.flatten_classes(item) + if item_part is None: + return None + res.extend(item_part) + else: + return None + return res + + def enter(self, fn_info: FuncInfo | str = "", *, ret_type: RType = none_rprimitive) -> None: + if isinstance(fn_info, str): + fn_info = FuncInfo(name=fn_info) + self.builder = LowLevelIRBuilder(self.errors, self.options) + self.builder.set_module(self.module_name, self.module_path) + self.builders.append(self.builder) + self.symtables.append({}) + self.runtime_args.append([]) + self.fn_info = fn_info + self.fn_infos.append(self.fn_info) + self.ret_types.append(ret_type) + if fn_info.is_generator: + self.nonlocal_control.append(GeneratorNonlocalControl()) + else: + self.nonlocal_control.append(BaseNonlocalControl()) + self.activate_block(BasicBlock()) + + def leave(self) -> tuple[list[Register], list[RuntimeArg], list[BasicBlock], RType, FuncInfo]: + builder = self.builders.pop() + self.symtables.pop() + runtime_args = self.runtime_args.pop() + ret_type = self.ret_types.pop() + fn_info = self.fn_infos.pop() + self.nonlocal_control.pop() + self.builder = self.builders[-1] + self.fn_info = self.fn_infos[-1] + return builder.args, runtime_args, builder.blocks, ret_type, fn_info + + @contextmanager + def enter_method( + self, + class_ir: ClassIR, + name: str, + ret_type: RType, + fn_info: FuncInfo | str = "", + self_type: RType | None = None, + internal: bool = False, + ) -> Iterator[None]: + """Generate IR for a method. + + If the method takes arguments, you should immediately afterwards call + add_argument() for each non-self argument (self is created implicitly). + + Args: + class_ir: Add method to this class + name: Short name of the method + ret_type: Return type of the method + fn_info: Optionally, additional information about the method + self_type: If not None, override default type of the implicit 'self' + argument (by default, derive type from class_ir) + """ + self.enter(fn_info, ret_type=ret_type) + self.function_name_stack.append(name) + self.class_ir_stack.append(class_ir) + if self_type is None: + self_type = RInstance(class_ir) + self.add_argument(SELF_NAME, self_type) + try: + yield + finally: + arg_regs, args, blocks, ret_type, fn_info = self.leave() + sig = FuncSignature(args, ret_type) + name = self.function_name_stack.pop() + class_ir = self.class_ir_stack.pop() + decl = FuncDecl(name, class_ir.name, self.module_name, sig, internal=internal) + ir = FuncIR(decl, arg_regs, blocks) + class_ir.methods[name] = ir + class_ir.method_decls[name] = ir.decl + self.functions.append(ir) + + def add_argument(self, var: str | Var, typ: RType, kind: ArgKind = ARG_POS) -> Register: + """Declare an argument in the current function. + + You should use this instead of directly calling add_local() in new code. + """ + if isinstance(var, str): + var = Var(var) + reg = self.add_local(var, typ, is_arg=True) + self.runtime_args[-1].append(RuntimeArg(var.name, typ, kind)) + return reg + + def lookup(self, symbol: SymbolNode) -> SymbolTarget: + return self.symtables[-1][symbol] + + def add_local(self, symbol: SymbolNode, typ: RType, is_arg: bool = False) -> Register: + """Add register that represents a symbol to the symbol table. + + Args: + is_arg: is this a function argument + """ + assert isinstance(symbol, SymbolNode), symbol + reg = Register( + typ, remangle_redefinition_name(symbol.name), is_arg=is_arg, line=symbol.line + ) + self.symtables[-1][symbol] = AssignmentTargetRegister(reg) + if is_arg: + self.builder.args.append(reg) + return reg + + def add_local_reg( + self, symbol: SymbolNode, typ: RType, is_arg: bool = False + ) -> AssignmentTargetRegister: + """Like add_local, but return an assignment target instead of value.""" + self.add_local(symbol, typ, is_arg) + target = self.symtables[-1][symbol] + assert isinstance(target, AssignmentTargetRegister), target + return target + + def add_self_to_env(self, cls: ClassIR) -> AssignmentTargetRegister: + """Low-level function that adds a 'self' argument. + + This is only useful if using enter() instead of enter_method(). + """ + return self.add_local_reg(Var(SELF_NAME), RInstance(cls), is_arg=True) + + def add_target(self, symbol: SymbolNode, target: SymbolTarget) -> SymbolTarget: + self.symtables[-1][symbol] = target + return target + + def type_to_rtype(self, typ: Type | None) -> RType: + return self.mapper.type_to_rtype(typ) + + def node_type(self, node: Expression) -> RType: + if isinstance(node, IntExpr): + # TODO: Don't special case IntExpr + return int_rprimitive + if node not in self.types: + return object_rprimitive + mypy_type = self.types[node] + return self.type_to_rtype(mypy_type) + + def add_var_to_env_class( + self, + var: SymbolNode, + rtype: RType, + base: FuncInfo | ImplicitClass, + reassign: bool = False, + always_defined: bool = False, + prefix: str = "", + ) -> AssignmentTarget: + # First, define the variable name as an attribute of the environment class, and then + # construct a target for that attribute. + name = prefix + remangle_redefinition_name(var.name) + self.fn_info.env_class.attributes[name] = rtype + if always_defined: + self.fn_info.env_class.attrs_with_defaults.add(name) + attr_target = AssignmentTargetAttr(base.curr_env_reg, name) + + if reassign: + # Read the local definition of the variable, and set the corresponding attribute of + # the environment class' variable to be that value. + reg = self.read(self.lookup(var), self.fn_info.fitem.line) + self.add(SetAttr(base.curr_env_reg, name, reg, self.fn_info.fitem.line)) + + # Override the local definition of the variable to instead point at the variable in + # the environment class. + return self.add_target(var, attr_target) + + def is_builtin_ref_expr(self, expr: RefExpr) -> bool: + assert expr.node, "RefExpr not resolved" + return "." in expr.node.fullname and expr.node.fullname.split(".")[0] == "builtins" + + def load_global(self, expr: NameExpr) -> Value: + """Loads a Python-level global. + + This takes a NameExpr and uses its name as a key to retrieve the corresponding PyObject * + from the _globals dictionary in the C-generated code. + """ + # If the global is from 'builtins', turn it into a module attr load instead + if self.is_builtin_ref_expr(expr): + assert expr.node, "RefExpr not resolved" + return self.load_module_attr_by_fullname(expr.node.fullname, expr.line) + if ( + self.is_native_module_ref_expr(expr) + and isinstance(expr.node, TypeInfo) + and not self.is_synthetic_type(expr.node) + ): + assert expr.fullname + return self.load_native_type_object(expr.fullname) + return self.load_global_str(expr.name, expr.line) + + def load_global_str(self, name: str, line: int) -> Value: + _globals = self.load_globals_dict() + reg = self.load_str(name) + return self.primitive_op(dict_get_item_op, [_globals, reg], line) + + def load_globals_dict(self) -> Value: + return self.add(LoadStatic(dict_rprimitive, "globals", self.module_name)) + + def load_module_attr_by_fullname(self, fullname: str, line: int) -> Value: + module, _, name = fullname.rpartition(".") + left = self.load_module(module) + return self.py_get_attr(left, name, line) + + def is_native_attr_ref(self, expr: MemberExpr) -> bool: + """Is expr a direct reference to a native (struct) attribute of an instance?""" + obj_rtype = self.node_type(expr.expr) + return ( + isinstance(obj_rtype, RInstance) + and obj_rtype.class_ir.is_ext_class + and obj_rtype.class_ir.has_attr(expr.name) + and not obj_rtype.class_ir.get_method(expr.name) + ) + + def mark_block_unreachable(self) -> None: + """Mark statements in the innermost block being processed as unreachable. + + This should be called after a statement that unconditionally leaves the + block, such as 'break' or 'return'. + """ + self.block_reachable_stack[-1] = False + + # Lacks a good type because there wasn't a reasonable type in 3.5 :( + def catch_errors(self, line: int) -> Any: + return catch_errors(self.module_path, line) + + def warning(self, msg: str, line: int) -> None: + self.errors.warning(msg, self.module_path, line) + + def error(self, msg: str, line: int) -> None: + self.errors.error(msg, self.module_path, line) + + def note(self, msg: str, line: int) -> None: + self.errors.note(msg, self.module_path, line) + + def add_function(self, func_ir: FuncIR, line: int) -> None: + name = (func_ir.class_name, func_ir.name) + if name in self.function_names: + self.error(f'Duplicate definition of "{name[1]}" not supported by mypyc', line) + return + self.function_names.add(name) + self.functions.append(func_ir) + + def get_current_class_ir(self) -> ClassIR | None: + type_info = self.fn_info.fitem.info + return self.mapper.type_to_ir.get(type_info) + + +def gen_arg_defaults(builder: IRBuilder) -> None: + """Generate blocks for arguments that have default values. + + If the passed value is an error value, then assign the default + value to the argument. + """ + fitem = builder.fn_info.fitem + nb = 0 + for arg in fitem.arguments: + if arg.initializer: + target = builder.lookup(arg.variable) + + def get_default() -> Value: + assert arg.initializer is not None + + # If it is constant, don't bother storing it + if is_constant(arg.initializer): + return builder.accept(arg.initializer) + + # Because gen_arg_defaults runs before calculate_arg_defaults, we + # add the static/attribute to final_names/the class here. + elif not builder.fn_info.is_nested: + name = fitem.fullname + "." + arg.variable.name + builder.final_names.append((name, target.type)) + return builder.add(LoadStatic(target.type, name, builder.module_name)) + else: + name = arg.variable.name + builder.fn_info.callable_class.ir.attributes[name] = target.type + return builder.add( + GetAttr(builder.fn_info.callable_class.self_reg, name, arg.line) + ) + + assert isinstance(target, AssignmentTargetRegister), target + reg = target.register + if not reg.type.error_overlap: + builder.assign_if_null(target.register, get_default, arg.initializer.line) + else: + builder.assign_if_bitmap_unset( + target.register, get_default, nb, arg.initializer.line + ) + nb += 1 + + +def remangle_redefinition_name(name: str) -> str: + """Remangle names produced by mypy when allow-redefinition is used and a name + is used with multiple types within a single block. + + We only need to do this for locals, because the name is used as the name of the register; + for globals, the name itself is stored in a register for the purpose of doing dict + lookups. + """ + return name.replace("'", "__redef__") + + +def get_call_target_fullname(ref: RefExpr) -> str: + if isinstance(ref.node, TypeAlias): + # Resolve simple type aliases. In calls they evaluate to the type they point to. + target = get_proper_type(ref.node.target) + if isinstance(target, Instance): + return target.type.fullname + return ref.fullname + + +def create_type_params( + builder: IRBuilder, typing_mod: Value, type_args: list[TypeParam], line: int +) -> list[Value]: + """Create objects representing various kinds of Python 3.12 type parameters. + + The "typing_mod" argument is the "_typing" module object. The type objects + are looked up from it. + + The returned list has one item for each "type_args" item, in the same order. + Each item is either a TypeVar, TypeVarTuple or ParamSpec instance. + """ + tvs = [] + type_var_imported: Value | None = None + for type_param in type_args: + if type_param.kind == TYPE_VAR_KIND: + if type_var_imported: + # Reuse previously imported value as a minor optimization + tvt = type_var_imported + else: + tvt = builder.py_get_attr(typing_mod, "TypeVar", line) + type_var_imported = tvt + elif type_param.kind == TYPE_VAR_TUPLE_KIND: + tvt = builder.py_get_attr(typing_mod, "TypeVarTuple", line) + else: + assert type_param.kind == PARAM_SPEC_KIND + tvt = builder.py_get_attr(typing_mod, "ParamSpec", line) + if type_param.kind != TYPE_VAR_TUPLE_KIND: + # To match runtime semantics, pass infer_variance=True + tv = builder.py_call( + tvt, + [builder.load_str(type_param.name), builder.true()], + line, + arg_kinds=[ARG_POS, ARG_NAMED], + arg_names=[None, "infer_variance"], + ) + else: + tv = builder.py_call(tvt, [builder.load_str(type_param.name)], line) + builder.init_type_var(tv, type_param.name, line) + tvs.append(tv) + return tvs + + +def calculate_arg_defaults( + builder: IRBuilder, + fn_info: FuncInfo, + func_reg: Value | None, + symtable: dict[SymbolNode, SymbolTarget], +) -> None: + """Calculate default argument values and store them. + + They are stored in statics for top level functions and in + the function objects for nested functions (while constants are + still stored computed on demand). + """ + fitem = fn_info.fitem + for arg in fitem.arguments: + # Constant values don't get stored but just recomputed + if arg.initializer and not is_constant(arg.initializer): + value = builder.coerce( + builder.accept(arg.initializer), symtable[arg.variable].type, arg.line + ) + if not fn_info.is_nested: + name = fitem.fullname + "." + arg.variable.name + builder.add(InitStatic(value, name, builder.module_name)) + else: + assert func_reg is not None + builder.add(SetAttr(func_reg, arg.variable.name, value, arg.line)) diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/callable_class.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/callable_class.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..a011acc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/callable_class.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/callable_class.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/callable_class.py new file mode 100644 index 0000000..bbd1b90 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/callable_class.py @@ -0,0 +1,174 @@ +"""Generate a class that represents a nested function. + +The class defines __call__ for calling the function and allows access to +non-local variables defined in outer scopes. +""" + +from __future__ import annotations + +from mypyc.common import ENV_ATTR_NAME, SELF_NAME +from mypyc.ir.class_ir import ClassIR +from mypyc.ir.func_ir import FuncDecl, FuncIR, FuncSignature, RuntimeArg +from mypyc.ir.ops import BasicBlock, Call, Register, Return, SetAttr, Value +from mypyc.ir.rtypes import RInstance, object_rprimitive +from mypyc.irbuild.builder import IRBuilder +from mypyc.irbuild.context import FuncInfo, ImplicitClass +from mypyc.primitives.misc_ops import method_new_op + + +def setup_callable_class(builder: IRBuilder) -> None: + """Generate an (incomplete) callable class representing a function. + + This can be a nested function or a function within a non-extension + class. Also set up the 'self' variable for that class. + + This takes the most recently visited function and returns a + ClassIR to represent that function. Each callable class contains + an environment attribute which points to another ClassIR + representing the environment class where some of its variables can + be accessed. + + Note that some methods, such as '__call__', are not yet + created here. Use additional functions, such as + add_call_to_callable_class(), to add them. + + Return a newly constructed ClassIR representing the callable + class for the nested function. + """ + # Check to see that the name has not already been taken. If so, + # rename the class. We allow multiple uses of the same function + # name because this is valid in if-else blocks. Example: + # + # if True: + # def foo(): ----> foo_obj() + # return True + # else: + # def foo(): ----> foo_obj_0() + # return False + name = base_name = f"{builder.fn_info.namespaced_name()}_obj" + count = 0 + while name in builder.callable_class_names: + name = base_name + "_" + str(count) + count += 1 + builder.callable_class_names.add(name) + + # Define the actual callable class ClassIR, and set its + # environment to point at the previously defined environment + # class. + callable_class_ir = ClassIR(name, builder.module_name, is_generated=True, is_final_class=True) + callable_class_ir.reuse_freed_instance = True + + # The functools @wraps decorator attempts to call setattr on + # nested functions, so we create a dict for these nested + # functions. + # https://github.com/python/cpython/blob/3.7/Lib/functools.py#L58 + if builder.fn_info.is_nested: + callable_class_ir.has_dict = True + + # If the enclosing class doesn't contain nested (which will happen if + # this is a toplevel lambda), don't set up an environment. + if builder.fn_infos[-2].contains_nested: + callable_class_ir.attributes[ENV_ATTR_NAME] = RInstance(builder.fn_infos[-2].env_class) + callable_class_ir.mro = [callable_class_ir] + builder.fn_info.callable_class = ImplicitClass(callable_class_ir) + builder.classes.append(callable_class_ir) + + # Add a 'self' variable to the environment of the callable class, + # and store that variable in a register to be accessed later. + self_target = builder.add_self_to_env(callable_class_ir) + builder.fn_info.callable_class.self_reg = builder.read(self_target, builder.fn_info.fitem.line) + + +def add_call_to_callable_class( + builder: IRBuilder, + args: list[Register], + blocks: list[BasicBlock], + sig: FuncSignature, + fn_info: FuncInfo, +) -> FuncIR: + """Generate a '__call__' method for a callable class representing a nested function. + + This takes the blocks and signature associated with a function + definition and uses those to build the '__call__' method of a + given callable class, used to represent that function. + """ + # Since we create a method, we also add a 'self' parameter. + nargs = len(sig.args) - sig.num_bitmap_args + sig = FuncSignature( + (RuntimeArg(SELF_NAME, object_rprimitive),) + sig.args[:nargs], sig.ret_type + ) + call_fn_decl = FuncDecl("__call__", fn_info.callable_class.ir.name, builder.module_name, sig) + call_fn_ir = FuncIR( + call_fn_decl, args, blocks, fn_info.fitem.line, traceback_name=fn_info.fitem.name + ) + fn_info.callable_class.ir.methods["__call__"] = call_fn_ir + fn_info.callable_class.ir.method_decls["__call__"] = call_fn_decl + return call_fn_ir + + +def add_get_to_callable_class(builder: IRBuilder, fn_info: FuncInfo) -> None: + """Generate the '__get__' method for a callable class.""" + line = fn_info.fitem.line + with builder.enter_method( + fn_info.callable_class.ir, + "__get__", + object_rprimitive, + fn_info, + self_type=object_rprimitive, + ): + instance = builder.add_argument("instance", object_rprimitive) + builder.add_argument("owner", object_rprimitive) + + # If accessed through the class, just return the callable + # object. If accessed through an object, create a new bound + # instance method object. + instance_block, class_block = BasicBlock(), BasicBlock() + comparison = builder.translate_is_op( + builder.read(instance), builder.none_object(), "is", line + ) + builder.add_bool_branch(comparison, class_block, instance_block) + + builder.activate_block(class_block) + builder.add(Return(builder.self())) + + builder.activate_block(instance_block) + builder.add( + Return(builder.call_c(method_new_op, [builder.self(), builder.read(instance)], line)) + ) + + +def instantiate_callable_class(builder: IRBuilder, fn_info: FuncInfo) -> Value: + """Create an instance of a callable class for a function. + + Calls to the function will actually call this instance. + + Note that fn_info refers to the function being assigned, whereas + builder.fn_info refers to the function encapsulating the function + being turned into a callable class. + """ + fitem = fn_info.fitem + func_reg = builder.add(Call(fn_info.callable_class.ir.ctor, [], fitem.line)) + + # Set the environment attribute of the callable class to point at + # the environment class defined in the callable class' immediate + # outer scope. Note that there are three possible environment + # class registers we may use. This depends on what the encapsulating + # (parent) function is: + # + # - A nested function: the callable class is instantiated + # from the current callable class' '__call__' function, and hence + # the callable class' environment register is used. + # - A generator function: the callable class is instantiated + # from the '__next__' method of the generator class, and hence the + # environment of the generator class is used. + # - Regular function: we use the environment of the original function. + curr_env_reg = None + if builder.fn_info.is_generator: + curr_env_reg = builder.fn_info.generator_class.curr_env_reg + elif builder.fn_info.is_nested: + curr_env_reg = builder.fn_info.callable_class.curr_env_reg + elif builder.fn_info.contains_nested: + curr_env_reg = builder.fn_info.curr_env_reg + if curr_env_reg: + builder.add(SetAttr(func_reg, ENV_ATTR_NAME, curr_env_reg, fitem.line)) + return func_reg diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/classdef.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/classdef.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..f1d5952 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/classdef.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/classdef.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/classdef.py new file mode 100644 index 0000000..324b44b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/classdef.py @@ -0,0 +1,922 @@ +"""Transform class definitions from the mypy AST form to IR.""" + +from __future__ import annotations + +from abc import abstractmethod +from typing import Callable, Final + +from mypy.nodes import ( + EXCLUDED_ENUM_ATTRIBUTES, + TYPE_VAR_TUPLE_KIND, + AssignmentStmt, + CallExpr, + ClassDef, + Decorator, + EllipsisExpr, + ExpressionStmt, + FuncDef, + Lvalue, + MemberExpr, + NameExpr, + OverloadedFuncDef, + PassStmt, + RefExpr, + StrExpr, + TempNode, + TypeInfo, + TypeParam, + is_class_var, +) +from mypy.types import Instance, UnboundType, get_proper_type +from mypyc.common import PROPSET_PREFIX +from mypyc.ir.class_ir import ClassIR, NonExtClassInfo +from mypyc.ir.func_ir import FuncDecl, FuncSignature +from mypyc.ir.ops import ( + NAMESPACE_TYPE, + BasicBlock, + Branch, + Call, + InitStatic, + LoadAddress, + LoadErrorValue, + LoadStatic, + MethodCall, + Register, + Return, + SetAttr, + TupleSet, + Value, +) +from mypyc.ir.rtypes import ( + RType, + bool_rprimitive, + dict_rprimitive, + is_none_rprimitive, + is_object_rprimitive, + is_optional_type, + object_rprimitive, +) +from mypyc.irbuild.builder import IRBuilder, create_type_params +from mypyc.irbuild.function import ( + gen_property_getter_ir, + gen_property_setter_ir, + handle_ext_method, + handle_non_ext_method, + load_type, +) +from mypyc.irbuild.prepare import GENERATOR_HELPER_NAME +from mypyc.irbuild.util import dataclass_type, get_func_def, is_constant, is_dataclass_decorator +from mypyc.primitives.dict_ops import dict_new_op, exact_dict_set_item_op +from mypyc.primitives.generic_ops import ( + iter_op, + next_op, + py_get_item_op, + py_hasattr_op, + py_setattr_op, +) +from mypyc.primitives.misc_ops import ( + dataclass_sleight_of_hand, + import_op, + not_implemented_op, + py_calc_meta_op, + pytype_from_template_op, + type_object_op, +) +from mypyc.subtype import is_subtype + + +def transform_class_def(builder: IRBuilder, cdef: ClassDef) -> None: + """Create IR for a class definition. + + This can generate both extension (native) and non-extension + classes. These are generated in very different ways. In the + latter case we construct a Python type object at runtime by doing + the equivalent of "type(name, bases, dict)" in IR. Extension + classes are defined via C structs that are generated later in + mypyc.codegen.emitclass. + + This is the main entry point to this module. + """ + if cdef.info not in builder.mapper.type_to_ir: + builder.error("Nested class definitions not supported", cdef.line) + return + + ir = builder.mapper.type_to_ir[cdef.info] + + # We do this check here because the base field of parent + # classes aren't necessarily populated yet at + # prepare_class_def time. + if any(ir.base_mro[i].base != ir.base_mro[i + 1] for i in range(len(ir.base_mro) - 1)): + builder.error("Multiple inheritance is not supported (except for traits)", cdef.line) + + if ir.allow_interpreted_subclasses: + for parent in ir.mro: + if not parent.allow_interpreted_subclasses: + builder.error( + 'Base class "{}" does not allow interpreted subclasses'.format( + parent.fullname + ), + cdef.line, + ) + + # Currently, we only create non-extension classes for classes that are + # decorated or inherit from Enum. Classes decorated with @trait do not + # apply here, and are handled in a different way. + if ir.is_ext_class: + cls_type = dataclass_type(cdef) + if cls_type is None: + cls_builder: ClassBuilder = ExtClassBuilder(builder, cdef) + elif cls_type in ["dataclasses", "attr-auto"]: + cls_builder = DataClassBuilder(builder, cdef) + elif cls_type == "attr": + cls_builder = AttrsClassBuilder(builder, cdef) + else: + raise ValueError(cls_type) + else: + cls_builder = NonExtClassBuilder(builder, cdef) + + for stmt in cdef.defs.body: + if ( + isinstance(stmt, (FuncDef, Decorator, OverloadedFuncDef)) + and stmt.name == GENERATOR_HELPER_NAME + ): + builder.error( + f'Method name "{stmt.name}" is reserved for mypyc internal use', stmt.line + ) + + if isinstance(stmt, OverloadedFuncDef) and stmt.is_property: + if isinstance(cls_builder, NonExtClassBuilder): + # properties with both getters and setters in non_extension + # classes not supported + builder.error("Property setters not supported in non-extension classes", stmt.line) + for item in stmt.items: + with builder.catch_errors(stmt.line): + cls_builder.add_method(get_func_def(item)) + elif isinstance(stmt, (FuncDef, Decorator, OverloadedFuncDef)): + # Ignore plugin generated methods (since they have no + # bodies to compile and will need to have the bodies + # provided by some other mechanism.) + if cdef.info.names[stmt.name].plugin_generated: + continue + with builder.catch_errors(stmt.line): + cls_builder.add_method(get_func_def(stmt)) + elif isinstance(stmt, PassStmt) or ( + isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, EllipsisExpr) + ): + continue + elif isinstance(stmt, AssignmentStmt): + if len(stmt.lvalues) != 1: + builder.error("Multiple assignment in class bodies not supported", stmt.line) + continue + lvalue = stmt.lvalues[0] + if not isinstance(lvalue, NameExpr): + builder.error( + "Only assignment to variables is supported in class bodies", stmt.line + ) + continue + # We want to collect class variables in a dictionary for both real + # non-extension classes and fake dataclass ones. + cls_builder.add_attr(lvalue, stmt) + + elif isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, StrExpr): + # Docstring. Ignore + pass + else: + builder.error("Unsupported statement in class body", stmt.line) + + # Generate implicit property setters/getters + for name, decl in ir.method_decls.items(): + if decl.implicit and decl.is_prop_getter: + getter_ir = gen_property_getter_ir(builder, decl, cdef, ir.is_trait) + builder.functions.append(getter_ir) + ir.methods[getter_ir.decl.name] = getter_ir + + setter_ir = None + setter_name = PROPSET_PREFIX + name + if setter_name in ir.method_decls: + setter_ir = gen_property_setter_ir( + builder, ir.method_decls[setter_name], cdef, ir.is_trait + ) + builder.functions.append(setter_ir) + ir.methods[setter_name] = setter_ir + + ir.properties[name] = (getter_ir, setter_ir) + # TODO: Generate glue method if needed? + # TODO: Do we need interpreted glue methods? Maybe not? + + cls_builder.finalize(ir) + + +class ClassBuilder: + """Create IR for a class definition. + + This is an abstract base class. + """ + + def __init__(self, builder: IRBuilder, cdef: ClassDef) -> None: + self.builder = builder + self.cdef = cdef + self.attrs_to_cache: list[tuple[Lvalue, RType]] = [] + + @abstractmethod + def add_method(self, fdef: FuncDef) -> None: + """Add a method to the class IR""" + + @abstractmethod + def add_attr(self, lvalue: NameExpr, stmt: AssignmentStmt) -> None: + """Add an attribute to the class IR""" + + @abstractmethod + def finalize(self, ir: ClassIR) -> None: + """Perform any final operations to complete the class IR""" + + +class NonExtClassBuilder(ClassBuilder): + def __init__(self, builder: IRBuilder, cdef: ClassDef) -> None: + super().__init__(builder, cdef) + self.non_ext = self.create_non_ext_info() + + def create_non_ext_info(self) -> NonExtClassInfo: + non_ext_bases = populate_non_ext_bases(self.builder, self.cdef) + non_ext_metaclass = find_non_ext_metaclass(self.builder, self.cdef, non_ext_bases) + non_ext_dict = setup_non_ext_dict( + self.builder, self.cdef, non_ext_metaclass, non_ext_bases + ) + # We populate __annotations__ for non-extension classes + # because dataclasses uses it to determine which attributes to compute on. + # TODO: Maybe generate more precise types for annotations + non_ext_anns = self.builder.call_c(dict_new_op, [], self.cdef.line) + return NonExtClassInfo(non_ext_dict, non_ext_bases, non_ext_anns, non_ext_metaclass) + + def add_method(self, fdef: FuncDef) -> None: + handle_non_ext_method(self.builder, self.non_ext, self.cdef, fdef) + + def add_attr(self, lvalue: NameExpr, stmt: AssignmentStmt) -> None: + add_non_ext_class_attr_ann(self.builder, self.non_ext, lvalue, stmt) + add_non_ext_class_attr( + self.builder, self.non_ext, lvalue, stmt, self.cdef, self.attrs_to_cache + ) + + def finalize(self, ir: ClassIR) -> None: + # Dynamically create the class via the type constructor + non_ext_class = load_non_ext_class(self.builder, ir, self.non_ext, self.cdef.line) + non_ext_class = load_decorated_class(self.builder, self.cdef, non_ext_class) + + # Try to avoid contention when using free threading. + self.builder.set_immortal_if_free_threaded(non_ext_class, self.cdef.line) + + # Save the decorated class + self.builder.add( + InitStatic(non_ext_class, self.cdef.name, self.builder.module_name, NAMESPACE_TYPE) + ) + + # Add the non-extension class to the dict + self.builder.call_c( + exact_dict_set_item_op, + [ + self.builder.load_globals_dict(), + self.builder.load_str(self.cdef.name), + non_ext_class, + ], + self.cdef.line, + ) + + # Cache any cacheable class attributes + cache_class_attrs(self.builder, self.attrs_to_cache, self.cdef) + + +class ExtClassBuilder(ClassBuilder): + def __init__(self, builder: IRBuilder, cdef: ClassDef) -> None: + super().__init__(builder, cdef) + # If the class is not decorated, generate an extension class for it. + self.type_obj: Value | None = allocate_class(builder, cdef) + + def skip_attr_default(self, name: str, stmt: AssignmentStmt) -> bool: + """Controls whether to skip generating a default for an attribute.""" + return False + + def add_method(self, fdef: FuncDef) -> None: + handle_ext_method(self.builder, self.cdef, fdef) + + def add_attr(self, lvalue: NameExpr, stmt: AssignmentStmt) -> None: + # Variable declaration with no body + if isinstance(stmt.rvalue, TempNode): + return + # Only treat marked class variables as class variables. + if not (is_class_var(lvalue) or stmt.is_final_def): + return + typ = self.builder.load_native_type_object(self.cdef.fullname) + value = self.builder.accept(stmt.rvalue) + self.builder.primitive_op( + py_setattr_op, [typ, self.builder.load_str(lvalue.name), value], stmt.line + ) + if self.builder.non_function_scope() and stmt.is_final_def: + self.builder.init_final_static(lvalue, value, self.cdef.name) + + def finalize(self, ir: ClassIR) -> None: + attrs_with_defaults, default_assignments = find_attr_initializers( + self.builder, self.cdef, self.skip_attr_default + ) + ir.attrs_with_defaults.update(attrs_with_defaults) + generate_attr_defaults_init(self.builder, self.cdef, default_assignments) + create_ne_from_eq(self.builder, self.cdef) + + +class DataClassBuilder(ExtClassBuilder): + # controls whether an __annotations__ attribute should be added to the class + # __dict__. This is not desirable for attrs classes where auto_attribs is + # disabled, as attrs will reject it. + add_annotations_to_dict = True + + def __init__(self, builder: IRBuilder, cdef: ClassDef) -> None: + super().__init__(builder, cdef) + self.non_ext = self.create_non_ext_info() + + def create_non_ext_info(self) -> NonExtClassInfo: + """Set up a NonExtClassInfo to track dataclass attributes. + + In addition to setting up a normal extension class for dataclasses, + we also collect its class attributes like a non-extension class so + that we can hand them to the dataclass decorator. + """ + return NonExtClassInfo( + self.builder.call_c(dict_new_op, [], self.cdef.line), + self.builder.add(TupleSet([], self.cdef.line)), + self.builder.call_c(dict_new_op, [], self.cdef.line), + self.builder.add(LoadAddress(type_object_op.type, type_object_op.src, self.cdef.line)), + ) + + def skip_attr_default(self, name: str, stmt: AssignmentStmt) -> bool: + return stmt.type is not None + + def get_type_annotation(self, stmt: AssignmentStmt) -> TypeInfo | None: + # We populate __annotations__ because dataclasses uses it to determine + # which attributes to compute on. + ann_type = get_proper_type(stmt.type) + if isinstance(ann_type, Instance): + return ann_type.type + return None + + def add_attr(self, lvalue: NameExpr, stmt: AssignmentStmt) -> None: + add_non_ext_class_attr_ann( + self.builder, self.non_ext, lvalue, stmt, self.get_type_annotation + ) + add_non_ext_class_attr( + self.builder, self.non_ext, lvalue, stmt, self.cdef, self.attrs_to_cache + ) + super().add_attr(lvalue, stmt) + + def finalize(self, ir: ClassIR) -> None: + """Generate code to finish instantiating a dataclass. + + This works by replacing all of the attributes on the class + (which will be descriptors) with whatever they would be in a + non-extension class, calling dataclass, then switching them back. + + The resulting class is an extension class and instances of it do not + have a __dict__ (unless something else requires it). + All methods written explicitly in the source are compiled and + may be called through the vtable while the methods generated + by dataclasses are interpreted and may not be. + + (If we just called dataclass without doing this, it would think that all + of the descriptors for our attributes are default values and generate an + incorrect constructor. We need to do the switch so that dataclass gets the + appropriate defaults.) + """ + super().finalize(ir) + assert self.type_obj + add_dunders_to_non_ext_dict( + self.builder, self.non_ext, self.cdef.line, self.add_annotations_to_dict + ) + dec = self.builder.accept( + next(d for d in self.cdef.decorators if is_dataclass_decorator(d)) + ) + dataclass_type_val = self.builder.load_str(dataclass_type(self.cdef) or "unknown") + self.builder.call_c( + dataclass_sleight_of_hand, + [dec, self.type_obj, self.non_ext.dict, self.non_ext.anns, dataclass_type_val], + self.cdef.line, + ) + + +class AttrsClassBuilder(DataClassBuilder): + """Create IR for an attrs class where auto_attribs=False (the default). + + When auto_attribs is enabled, attrs classes behave similarly to dataclasses + (i.e. types are stored as annotations on the class) and are thus handled + by DataClassBuilder, but when auto_attribs is disabled the types are + provided via attr.ib(type=...) + """ + + add_annotations_to_dict = False + + def skip_attr_default(self, name: str, stmt: AssignmentStmt) -> bool: + return True + + def get_type_annotation(self, stmt: AssignmentStmt) -> TypeInfo | None: + if isinstance(stmt.rvalue, CallExpr): + # find the type arg in `attr.ib(type=str)` + callee = stmt.rvalue.callee + if ( + isinstance(callee, MemberExpr) + and callee.fullname in ["attr.ib", "attr.attr"] + and "type" in stmt.rvalue.arg_names + ): + index = stmt.rvalue.arg_names.index("type") + type_name = stmt.rvalue.args[index] + if isinstance(type_name, NameExpr) and isinstance(type_name.node, TypeInfo): + lvalue = stmt.lvalues[0] + assert isinstance(lvalue, NameExpr), lvalue + return type_name.node + return None + + +def allocate_class(builder: IRBuilder, cdef: ClassDef) -> Value: + # OK AND NOW THE FUN PART + base_exprs = cdef.base_type_exprs + cdef.removed_base_type_exprs + new_style_type_args = cdef.type_args + if new_style_type_args: + bases = [make_generic_base_class(builder, cdef.fullname, new_style_type_args, cdef.line)] + else: + bases = [] + + if base_exprs or new_style_type_args: + bases.extend([builder.accept(x) for x in base_exprs]) + tp_bases = builder.new_tuple(bases, cdef.line) + else: + tp_bases = builder.add(LoadErrorValue(object_rprimitive, is_borrowed=True)) + modname = builder.load_str(builder.module_name) + template = builder.add( + LoadStatic(object_rprimitive, cdef.name + "_template", builder.module_name, NAMESPACE_TYPE) + ) + # Create the class + tp = builder.call_c(pytype_from_template_op, [template, tp_bases, modname], cdef.line) + + # Set type object to be immortal if free threaded, as otherwise reference count contention + # can cause a big performance hit. + builder.set_immortal_if_free_threaded(tp, cdef.line) + + # Immediately fix up the trait vtables, before doing anything with the class. + ir = builder.mapper.type_to_ir[cdef.info] + if not ir.is_trait and not ir.builtin_base: + builder.add( + Call( + FuncDecl( + cdef.name + "_trait_vtable_setup", + None, + builder.module_name, + FuncSignature([], bool_rprimitive), + ), + [], + -1, + ) + ) + # Populate a '__mypyc_attrs__' field containing the list of attrs + builder.primitive_op( + py_setattr_op, + [ + tp, + builder.load_str("__mypyc_attrs__"), + create_mypyc_attrs_tuple(builder, builder.mapper.type_to_ir[cdef.info], cdef.line), + ], + cdef.line, + ) + + # Save the class + builder.add(InitStatic(tp, cdef.name, builder.module_name, NAMESPACE_TYPE)) + + # Add it to the dict + builder.call_c( + exact_dict_set_item_op, + [builder.load_globals_dict(), builder.load_str(cdef.name), tp], + cdef.line, + ) + + return tp + + +def make_generic_base_class( + builder: IRBuilder, fullname: str, type_args: list[TypeParam], line: int +) -> Value: + """Construct Generic[...] base class object for a new-style generic class (Python 3.12).""" + mod = builder.call_c(import_op, [builder.load_str("_typing")], line) + tvs = create_type_params(builder, mod, type_args, line) + args = [] + for tv, type_param in zip(tvs, type_args): + if type_param.kind == TYPE_VAR_TUPLE_KIND: + # Evaluate *Ts for a TypeVarTuple + it = builder.primitive_op(iter_op, [tv], line) + tv = builder.call_c(next_op, [it], line) + args.append(tv) + + gent = builder.py_get_attr(mod, "Generic", line) + if len(args) == 1: + arg = args[0] + else: + arg = builder.new_tuple(args, line) + + base = builder.primitive_op(py_get_item_op, [gent, arg], line) + return base + + +# Mypy uses these internally as base classes of TypedDict classes. These are +# lies and don't have any runtime equivalent. +MAGIC_TYPED_DICT_CLASSES: Final[tuple[str, ...]] = ( + "typing._TypedDict", + "typing_extensions._TypedDict", +) + + +def populate_non_ext_bases(builder: IRBuilder, cdef: ClassDef) -> Value: + """Create base class tuple of a non-extension class. + + The tuple is passed to the metaclass constructor. + """ + is_named_tuple = cdef.info.is_named_tuple + ir = builder.mapper.type_to_ir[cdef.info] + bases = [] + for cls in (b.type for b in cdef.info.bases): + if cls.fullname == "builtins.object": + continue + if is_named_tuple and cls.fullname in ( + "typing.Sequence", + "typing.Iterable", + "typing.Collection", + "typing.Reversible", + "typing.Container", + "typing.Sized", + ): + # HAX: Synthesized base classes added by mypy don't exist at runtime, so skip them. + # This could break if they were added explicitly, though... + continue + # Add the current class to the base classes list of concrete subclasses + if cls in builder.mapper.type_to_ir: + base_ir = builder.mapper.type_to_ir[cls] + if base_ir.children is not None: + base_ir.children.append(ir) + + if cls.fullname in MAGIC_TYPED_DICT_CLASSES: + # HAX: Mypy internally represents TypedDict classes differently from what + # should happen at runtime. Replace with something that works. + module = "typing" + name = "_TypedDict" + base = builder.get_module_attr(module, name, cdef.line) + elif is_named_tuple and cls.fullname == "builtins.tuple": + name = "_NamedTuple" + base = builder.get_module_attr("typing", name, cdef.line) + else: + cls_module = cls.fullname.rsplit(".", 1)[0] + if cls_module == builder.current_module: + base = builder.load_global_str(cls.name, cdef.line) + else: + base = builder.load_module_attr_by_fullname(cls.fullname, cdef.line) + bases.append(base) + if cls.fullname in MAGIC_TYPED_DICT_CLASSES: + # The remaining base classes are synthesized by mypy and should be ignored. + break + return builder.new_tuple(bases, cdef.line) + + +def find_non_ext_metaclass(builder: IRBuilder, cdef: ClassDef, bases: Value) -> Value: + """Find the metaclass of a class from its defs and bases.""" + if cdef.metaclass: + declared_metaclass = builder.accept(cdef.metaclass) + else: + if cdef.info.typeddict_type is not None: + # In Python 3.9, the metaclass for class-based TypedDict is typing._TypedDictMeta. + # We can't easily calculate it generically, so special case it. + return builder.get_module_attr("typing", "_TypedDictMeta", cdef.line) + elif cdef.info.is_named_tuple: + # In Python 3.9, the metaclass for class-based NamedTuple is typing.NamedTupleMeta. + # We can't easily calculate it generically, so special case it. + return builder.get_module_attr("typing", "NamedTupleMeta", cdef.line) + + declared_metaclass = builder.add( + LoadAddress(type_object_op.type, type_object_op.src, cdef.line) + ) + + return builder.call_c(py_calc_meta_op, [declared_metaclass, bases], cdef.line) + + +def setup_non_ext_dict( + builder: IRBuilder, cdef: ClassDef, metaclass: Value, bases: Value +) -> Value: + """Initialize the class dictionary for a non-extension class. + + This class dictionary is passed to the metaclass constructor. + """ + # Check if the metaclass defines a __prepare__ method, and if so, call it. + has_prepare = builder.primitive_op( + py_hasattr_op, [metaclass, builder.load_str("__prepare__")], cdef.line + ) + + non_ext_dict = Register(dict_rprimitive) + + true_block, false_block, exit_block = BasicBlock(), BasicBlock(), BasicBlock() + builder.add_bool_branch(has_prepare, true_block, false_block) + + builder.activate_block(true_block) + cls_name = builder.load_str(cdef.name) + prepare_meth = builder.py_get_attr(metaclass, "__prepare__", cdef.line) + prepare_dict = builder.py_call(prepare_meth, [cls_name, bases], cdef.line) + builder.assign(non_ext_dict, prepare_dict, cdef.line) + builder.goto(exit_block) + + builder.activate_block(false_block) + builder.assign(non_ext_dict, builder.call_c(dict_new_op, [], cdef.line), cdef.line) + builder.goto(exit_block) + builder.activate_block(exit_block) + + return non_ext_dict + + +def add_non_ext_class_attr_ann( + builder: IRBuilder, + non_ext: NonExtClassInfo, + lvalue: NameExpr, + stmt: AssignmentStmt, + get_type_info: Callable[[AssignmentStmt], TypeInfo | None] | None = None, +) -> None: + """Add a class attribute to __annotations__ of a non-extension class.""" + # FIXME: try to better preserve the special forms and type parameters of generics. + typ: Value | None = None + if get_type_info is not None: + type_info = get_type_info(stmt) + if type_info: + # NOTE: Using string type information is similar to using + # `from __future__ import annotations` in standard python. + # NOTE: For string types we need to use the fullname since it + # includes the module. If string type doesn't have the module, + # @dataclass will try to get the current module and fail since the + # current module is not in sys.modules. + if builder.current_module == type_info.module_name and stmt.line < type_info.line: + typ = builder.load_str(type_info.fullname) + else: + typ = load_type(builder, type_info, stmt.unanalyzed_type, stmt.line) + + if typ is None: + # FIXME: if get_type_info is not provided, don't fall back to stmt.type? + ann_type = get_proper_type(stmt.type) + if ( + isinstance(stmt.unanalyzed_type, UnboundType) + and stmt.unanalyzed_type.original_str_expr is not None + ): + # Annotation is a forward reference, so don't attempt to load the actual + # type and load the string instead. + # + # TODO: is it possible to determine whether a non-string annotation is + # actually a forward reference due to the __annotations__ future? + typ = builder.load_str(stmt.unanalyzed_type.original_str_expr) + elif isinstance(ann_type, Instance): + typ = load_type(builder, ann_type.type, stmt.unanalyzed_type, stmt.line) + else: + typ = builder.add(LoadAddress(type_object_op.type, type_object_op.src, stmt.line)) + + key = builder.load_str(lvalue.name) + builder.call_c(exact_dict_set_item_op, [non_ext.anns, key, typ], stmt.line) + + +def add_non_ext_class_attr( + builder: IRBuilder, + non_ext: NonExtClassInfo, + lvalue: NameExpr, + stmt: AssignmentStmt, + cdef: ClassDef, + attr_to_cache: list[tuple[Lvalue, RType]], +) -> None: + """Add a class attribute to __dict__ of a non-extension class.""" + # Only add the attribute to the __dict__ if the assignment is of the form: + # x: type = value (don't add attributes of the form 'x: type' to the __dict__). + if not isinstance(stmt.rvalue, TempNode): + rvalue = builder.accept(stmt.rvalue) + builder.add_to_non_ext_dict(non_ext, lvalue.name, rvalue, stmt.line) + # We cache enum attributes to speed up enum attribute lookup since they + # are final. + if ( + cdef.info.bases + # Enum class must be the last parent class. + and cdef.info.bases[-1].type.is_enum + # Skip these since Enum will remove it + and lvalue.name not in EXCLUDED_ENUM_ATTRIBUTES + ): + # Enum values are always boxed, so use object_rprimitive. + attr_to_cache.append((lvalue, object_rprimitive)) + + +def find_attr_initializers( + builder: IRBuilder, cdef: ClassDef, skip: Callable[[str, AssignmentStmt], bool] | None = None +) -> tuple[set[str], list[AssignmentStmt]]: + """Find initializers of attributes in a class body. + + If provided, the skip arg should be a callable which will return whether + to skip generating a default for an attribute. It will be passed the name of + the attribute and the corresponding AssignmentStmt. + """ + cls = builder.mapper.type_to_ir[cdef.info] + if cls.builtin_base: + return set(), [] + + attrs_with_defaults = set() + + # Pull out all assignments in classes in the mro so we can initialize them + # TODO: Support nested statements + default_assignments = [] + for info in reversed(cdef.info.mro): + if info not in builder.mapper.type_to_ir: + continue + for stmt in info.defn.defs.body: + if ( + isinstance(stmt, AssignmentStmt) + and isinstance(stmt.lvalues[0], NameExpr) + and not is_class_var(stmt.lvalues[0]) + and not isinstance(stmt.rvalue, TempNode) + ): + name = stmt.lvalues[0].name + if name == "__slots__": + continue + + if name == "__deletable__": + check_deletable_declaration(builder, cls, stmt.line) + continue + + if skip is not None and skip(name, stmt): + continue + + attr_type = cls.attr_type(name) + + # If the attribute is initialized to None and type isn't optional, + # doesn't initialize it to anything (special case for "# type:" comments). + if isinstance(stmt.rvalue, RefExpr) and stmt.rvalue.fullname == "builtins.None": + if ( + not is_optional_type(attr_type) + and not is_object_rprimitive(attr_type) + and not is_none_rprimitive(attr_type) + ): + continue + + attrs_with_defaults.add(name) + default_assignments.append(stmt) + + return attrs_with_defaults, default_assignments + + +def generate_attr_defaults_init( + builder: IRBuilder, cdef: ClassDef, default_assignments: list[AssignmentStmt] +) -> None: + """Generate an initialization method for default attr values (from class vars).""" + if not default_assignments: + return + cls = builder.mapper.type_to_ir[cdef.info] + if cls.builtin_base: + return + + with builder.enter_method(cls, "__mypyc_defaults_setup", bool_rprimitive): + self_var = builder.self() + for stmt in default_assignments: + lvalue = stmt.lvalues[0] + assert isinstance(lvalue, NameExpr), lvalue + if not stmt.is_final_def and not is_constant(stmt.rvalue): + builder.warning("Unsupported default attribute value", stmt.rvalue.line) + + attr_type = cls.attr_type(lvalue.name) + val = builder.coerce(builder.accept(stmt.rvalue), attr_type, stmt.line) + init = SetAttr(self_var, lvalue.name, val, -1) + init.mark_as_initializer() + builder.add(init) + + builder.add(Return(builder.true())) + + +def check_deletable_declaration(builder: IRBuilder, cl: ClassIR, line: int) -> None: + for attr in cl.deletable: + if attr not in cl.attributes: + if not cl.has_attr(attr): + builder.error(f'Attribute "{attr}" not defined', line) + continue + for base in cl.mro: + if attr in base.property_types: + builder.error(f'Cannot make property "{attr}" deletable', line) + break + else: + _, base = cl.attr_details(attr) + builder.error( + ('Attribute "{}" not defined in "{}" ' + '(defined in "{}")').format( + attr, cl.name, base.name + ), + line, + ) + + +def create_ne_from_eq(builder: IRBuilder, cdef: ClassDef) -> None: + """Create a "__ne__" method from a "__eq__" method (if only latter exists).""" + cls = builder.mapper.type_to_ir[cdef.info] + if cls.has_method("__eq__") and not cls.has_method("__ne__"): + gen_glue_ne_method(builder, cls, cdef.line) + + +def gen_glue_ne_method(builder: IRBuilder, cls: ClassIR, line: int) -> None: + """Generate a "__ne__" method from a "__eq__" method.""" + func_ir = cls.get_method("__eq__") + assert func_ir + eq_sig = func_ir.decl.sig + strict_typing = builder.options.strict_dunders_typing + with builder.enter_method(cls, "__ne__", eq_sig.ret_type): + rhs_type = eq_sig.args[0].type if strict_typing else object_rprimitive + rhs_arg = builder.add_argument("rhs", rhs_type) + eqval = builder.add(MethodCall(builder.self(), "__eq__", [rhs_arg], line)) + + can_return_not_implemented = is_subtype(not_implemented_op.type, eq_sig.ret_type) + return_bool = is_subtype(eq_sig.ret_type, bool_rprimitive) + + if not strict_typing or can_return_not_implemented: + # If __eq__ returns NotImplemented, then __ne__ should also + not_implemented_block, regular_block = BasicBlock(), BasicBlock() + not_implemented = builder.add( + LoadAddress(not_implemented_op.type, not_implemented_op.src, line) + ) + builder.add( + Branch( + builder.translate_is_op(eqval, not_implemented, "is", line), + not_implemented_block, + regular_block, + Branch.BOOL, + ) + ) + builder.activate_block(regular_block) + rettype = bool_rprimitive if return_bool and strict_typing else object_rprimitive + retval = builder.coerce( + builder.builder.unary_not(eqval, line, likely_bool=True), rettype, line + ) + builder.add(Return(retval)) + builder.activate_block(not_implemented_block) + builder.add(Return(not_implemented)) + else: + rettype = bool_rprimitive if return_bool and strict_typing else object_rprimitive + retval = builder.coerce(builder.unary_op(eqval, "not", line), rettype, line) + builder.add(Return(retval)) + + +def load_non_ext_class( + builder: IRBuilder, ir: ClassIR, non_ext: NonExtClassInfo, line: int +) -> Value: + cls_name = builder.load_str(ir.name) + + add_dunders_to_non_ext_dict(builder, non_ext, line) + + class_type_obj = builder.py_call( + non_ext.metaclass, [cls_name, non_ext.bases, non_ext.dict], line + ) + return class_type_obj + + +def load_decorated_class(builder: IRBuilder, cdef: ClassDef, type_obj: Value) -> Value: + """Apply class decorators to create a decorated (non-extension) class object. + + Given a decorated ClassDef and a register containing a + non-extension representation of the ClassDef created via the type + constructor, applies the corresponding decorator functions on that + decorated ClassDef and returns a register containing the decorated + ClassDef. + """ + decorators = cdef.decorators + dec_class = type_obj + for d in reversed(decorators): + decorator = d.accept(builder.visitor) + assert isinstance(decorator, Value), decorator + dec_class = builder.py_call(decorator, [dec_class], dec_class.line) + return dec_class + + +def cache_class_attrs( + builder: IRBuilder, attrs_to_cache: list[tuple[Lvalue, RType]], cdef: ClassDef +) -> None: + """Add class attributes to be cached to the global cache.""" + typ = builder.load_native_type_object(cdef.info.fullname) + for lval, rtype in attrs_to_cache: + assert isinstance(lval, NameExpr), lval + rval = builder.py_get_attr(typ, lval.name, cdef.line) + builder.init_final_static(lval, rval, cdef.name, type_override=rtype) + + +def create_mypyc_attrs_tuple(builder: IRBuilder, ir: ClassIR, line: int) -> Value: + attrs = [name for ancestor in ir.mro for name in ancestor.attributes] + if ir.inherits_python: + attrs.append("__dict__") + items = [builder.load_str(attr) for attr in attrs] + return builder.new_tuple(items, line) + + +def add_dunders_to_non_ext_dict( + builder: IRBuilder, non_ext: NonExtClassInfo, line: int, add_annotations: bool = True +) -> None: + if add_annotations: + # Add __annotations__ to the class dict. + builder.add_to_non_ext_dict(non_ext, "__annotations__", non_ext.anns, line) + + # We add a __doc__ attribute so if the non-extension class is decorated with the + # dataclass decorator, dataclass will not try to look for __text_signature__. + # https://github.com/python/cpython/blob/3.7/Lib/dataclasses.py#L957 + filler_doc_str = "mypyc filler docstring" + builder.add_to_non_ext_dict(non_ext, "__doc__", builder.load_str(filler_doc_str), line) + builder.add_to_non_ext_dict(non_ext, "__module__", builder.load_str(builder.module_name), line) diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/constant_fold.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/constant_fold.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..ba5c246 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/constant_fold.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/constant_fold.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/constant_fold.py new file mode 100644 index 0000000..b1133f9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/constant_fold.py @@ -0,0 +1,97 @@ +"""Constant folding of IR values. + +For example, 3 + 5 can be constant folded into 8. + +This is mostly like mypy.constant_fold, but we can bind some additional +NameExpr and MemberExpr references here, since we have more knowledge +about which definitions can be trusted -- we constant fold only references +to other compiled modules in the same compilation unit. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Final, Union + +from mypy.constant_fold import constant_fold_binary_op, constant_fold_unary_op +from mypy.nodes import ( + BytesExpr, + ComplexExpr, + Expression, + FloatExpr, + IntExpr, + MemberExpr, + NameExpr, + OpExpr, + StrExpr, + UnaryExpr, + Var, +) +from mypyc.irbuild.util import bytes_from_str + +if TYPE_CHECKING: + from mypyc.irbuild.builder import IRBuilder + +# All possible result types of constant folding +ConstantValue = Union[int, float, complex, str, bytes] +CONST_TYPES: Final = (int, float, complex, str, bytes) + + +def constant_fold_expr(builder: IRBuilder, expr: Expression) -> ConstantValue | None: + """Return the constant value of an expression for supported operations. + + Return None otherwise. + """ + if isinstance(expr, IntExpr): + return expr.value + if isinstance(expr, FloatExpr): + return expr.value + if isinstance(expr, StrExpr): + return expr.value + if isinstance(expr, BytesExpr): + return bytes_from_str(expr.value) + if isinstance(expr, ComplexExpr): + return expr.value + elif isinstance(expr, NameExpr): + node = expr.node + if isinstance(node, Var) and node.is_final: + final_value = node.final_value + if isinstance(final_value, (CONST_TYPES)): + return final_value + elif isinstance(expr, MemberExpr): + final = builder.get_final_ref(expr) + if final is not None: + fn, final_var, native = final + if final_var.is_final: + final_value = final_var.final_value + if isinstance(final_value, (CONST_TYPES)): + return final_value + elif isinstance(expr, OpExpr): + left = constant_fold_expr(builder, expr.left) + right = constant_fold_expr(builder, expr.right) + if left is not None and right is not None: + return constant_fold_binary_op_extended(expr.op, left, right) + elif isinstance(expr, UnaryExpr): + value = constant_fold_expr(builder, expr.expr) + if value is not None and not isinstance(value, bytes): + return constant_fold_unary_op(expr.op, value) + return None + + +def constant_fold_binary_op_extended( + op: str, left: ConstantValue, right: ConstantValue +) -> ConstantValue | None: + """Like mypy's constant_fold_binary_op(), but includes bytes support. + + mypy cannot use constant folded bytes easily so it's simpler to only support them in mypyc. + """ + if not isinstance(left, bytes) and not isinstance(right, bytes): + return constant_fold_binary_op(op, left, right) + + if op == "+" and isinstance(left, bytes) and isinstance(right, bytes): + return left + right + elif op == "*" and isinstance(left, bytes) and isinstance(right, int): + return left * right + elif op == "*" and isinstance(left, int) and isinstance(right, bytes): + return left * right + + return None diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/context.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/context.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..4018e55 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/context.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/context.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/context.py new file mode 100644 index 0000000..d5a48bf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/context.py @@ -0,0 +1,200 @@ +"""Helpers that store information about functions and the related classes.""" + +from __future__ import annotations + +from mypy.nodes import FuncItem +from mypyc.ir.class_ir import ClassIR +from mypyc.ir.func_ir import INVALID_FUNC_DEF +from mypyc.ir.ops import BasicBlock, Value +from mypyc.irbuild.targets import AssignmentTarget + + +class FuncInfo: + """Contains information about functions as they are generated.""" + + def __init__( + self, + fitem: FuncItem = INVALID_FUNC_DEF, + name: str = "", + class_name: str | None = None, + namespace: str = "", + is_nested: bool = False, + contains_nested: bool = False, + is_decorated: bool = False, + in_non_ext: bool = False, + add_nested_funcs_to_env: bool = False, + ) -> None: + self.fitem = fitem + self.name = name + self.class_name = class_name + self.ns = namespace + # Callable classes implement the '__call__' method, and are used to represent functions + # that are nested inside of other functions. + self._callable_class: ImplicitClass | None = None + # Environment classes are ClassIR instances that contain attributes representing the + # variables in the environment of the function they correspond to. Environment classes are + # generated for functions that contain nested functions. + self._env_class: ClassIR | None = None + # Generator classes implement the '__next__' method, and are used to represent generators + # returned by generator functions. + self._generator_class: GeneratorClass | None = None + # Environment class registers are the local registers associated with instances of an + # environment class, used for getting and setting attributes. curr_env_reg is the register + # associated with the current environment. + self._curr_env_reg: Value | None = None + # These are flags denoting whether a given function is nested, contains a nested function, + # is decorated, or is within a non-extension class. + self.is_nested = is_nested + self.contains_nested = contains_nested + self.is_decorated = is_decorated + self.in_non_ext = in_non_ext + self.add_nested_funcs_to_env = add_nested_funcs_to_env + + # TODO: add field for ret_type: RType = none_rprimitive + + def namespaced_name(self) -> str: + return "_".join(x for x in [self.name, self.class_name, self.ns] if x) + + @property + def is_generator(self) -> bool: + return self.fitem.is_generator or self.fitem.is_coroutine + + @property + def is_coroutine(self) -> bool: + return self.fitem.is_coroutine + + @property + def callable_class(self) -> ImplicitClass: + assert self._callable_class is not None + return self._callable_class + + @callable_class.setter + def callable_class(self, cls: ImplicitClass) -> None: + self._callable_class = cls + + @property + def env_class(self) -> ClassIR: + assert self._env_class is not None + return self._env_class + + @env_class.setter + def env_class(self, ir: ClassIR) -> None: + self._env_class = ir + + @property + def generator_class(self) -> GeneratorClass: + assert self._generator_class is not None + return self._generator_class + + @generator_class.setter + def generator_class(self, cls: GeneratorClass) -> None: + self._generator_class = cls + + @property + def curr_env_reg(self) -> Value: + assert self._curr_env_reg is not None + return self._curr_env_reg + + def can_merge_generator_and_env_classes(self) -> bool: + # In simple cases we can place the environment into the generator class, + # instead of having two separate classes. + if self._generator_class and not self._generator_class.ir.is_final_class: + result = False + else: + result = self.is_generator and not self.is_nested and not self.contains_nested + return result + + +class ImplicitClass: + """Contains information regarding implicitly generated classes. + + Implicit classes are generated for nested functions and generator + functions. They are not explicitly defined in the source code. + + NOTE: This is both a concrete class and used as a base class. + """ + + def __init__(self, ir: ClassIR) -> None: + # The ClassIR instance associated with this class. + self.ir = ir + # The register associated with the 'self' instance for this generator class. + self._self_reg: Value | None = None + # Environment class registers are the local registers associated with instances of an + # environment class, used for getting and setting attributes. curr_env_reg is the register + # associated with the current environment. prev_env_reg is the self.__mypyc_env__ field + # associated with the previous environment. + self._curr_env_reg: Value | None = None + self._prev_env_reg: Value | None = None + + @property + def self_reg(self) -> Value: + assert self._self_reg is not None + return self._self_reg + + @self_reg.setter + def self_reg(self, reg: Value) -> None: + self._self_reg = reg + + @property + def curr_env_reg(self) -> Value: + assert self._curr_env_reg is not None + return self._curr_env_reg + + @curr_env_reg.setter + def curr_env_reg(self, reg: Value) -> None: + self._curr_env_reg = reg + + @property + def prev_env_reg(self) -> Value: + assert self._prev_env_reg is not None + return self._prev_env_reg + + @prev_env_reg.setter + def prev_env_reg(self, reg: Value) -> None: + self._prev_env_reg = reg + + +class GeneratorClass(ImplicitClass): + """Contains information about implicit generator function classes.""" + + def __init__(self, ir: ClassIR) -> None: + super().__init__(ir) + # This register holds the label number that the '__next__' function should go to the next + # time it is called. + self._next_label_reg: Value | None = None + self._next_label_target: AssignmentTarget | None = None + + # These registers hold the error values for the generator object for the case that the + # 'throw' function is called. + self.exc_regs: tuple[Value, Value, Value] | None = None + + # Holds the arg passed to send + self.send_arg_reg: Value | None = None + + # Holds the PyObject ** pointer through which return value can be passed + # instead of raising StopIteration(ret_value) (only if not NULL). This + # is used for faster native-to-native calls. + self.stop_iter_value_reg: Value | None = None + + # The switch block is used to decide which instruction to go using the value held in the + # next-label register. + self.switch_block = BasicBlock() + self.continuation_blocks: list[BasicBlock] = [] + + @property + def next_label_reg(self) -> Value: + assert self._next_label_reg is not None + return self._next_label_reg + + @next_label_reg.setter + def next_label_reg(self, reg: Value) -> None: + self._next_label_reg = reg + + @property + def next_label_target(self) -> AssignmentTarget: + assert self._next_label_target is not None + return self._next_label_target + + @next_label_target.setter + def next_label_target(self, target: AssignmentTarget) -> None: + self._next_label_target = target diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/env_class.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/env_class.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..1da333a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/env_class.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/env_class.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/env_class.py new file mode 100644 index 0000000..2334b43 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/env_class.py @@ -0,0 +1,282 @@ +"""Generate classes representing function environments (+ related operations). + +If we have a nested function that has non-local (free) variables, access to the +non-locals is via an instance of an environment class. Example: + + def f() -> int: + x = 0 # Make 'x' an attribute of an environment class instance + + def g() -> int: + # We have access to the environment class instance to + # allow accessing 'x' + return x + 2 + + x = x + 1 # Modify the attribute + return g() +""" + +from __future__ import annotations + +from mypy.nodes import Argument, FuncDef, SymbolNode, Var +from mypyc.common import ( + BITMAP_BITS, + ENV_ATTR_NAME, + GENERATOR_ATTRIBUTE_PREFIX, + SELF_NAME, + bitmap_name, +) +from mypyc.ir.class_ir import ClassIR +from mypyc.ir.ops import Call, GetAttr, SetAttr, Value +from mypyc.ir.rtypes import RInstance, bitmap_rprimitive, object_rprimitive +from mypyc.irbuild.builder import IRBuilder, SymbolTarget +from mypyc.irbuild.context import FuncInfo, GeneratorClass, ImplicitClass +from mypyc.irbuild.targets import AssignmentTargetAttr + + +def setup_env_class(builder: IRBuilder) -> ClassIR: + """Generate a class representing a function environment. + + Note that the variables in the function environment are not + actually populated here. This is because when the environment + class is generated, the function environment has not yet been + visited. This behavior is allowed so that when the compiler visits + nested functions, it can use the returned ClassIR instance to + figure out free variables it needs to access. The remaining + attributes of the environment class are populated when the + environment registers are loaded. + + Return a ClassIR representing an environment for a function + containing a nested function. + """ + env_class = ClassIR( + f"{builder.fn_info.namespaced_name()}_env", + builder.module_name, + is_generated=True, + is_final_class=True, + ) + env_class.reuse_freed_instance = True + env_class.attributes[SELF_NAME] = RInstance(env_class) + if builder.fn_info.is_nested: + # If the function is nested, its environment class must contain an environment + # attribute pointing to its encapsulating functions' environment class. + env_class.attributes[ENV_ATTR_NAME] = RInstance(builder.fn_infos[-2].env_class) + env_class.mro = [env_class] + builder.fn_info.env_class = env_class + builder.classes.append(env_class) + return env_class + + +def finalize_env_class(builder: IRBuilder, prefix: str = "") -> None: + """Generate, instantiate, and set up the environment of an environment class.""" + if not builder.fn_info.can_merge_generator_and_env_classes(): + instantiate_env_class(builder) + + # Iterate through the function arguments and replace local definitions (using registers) + # that were previously added to the environment with references to the function's + # environment class. + if builder.fn_info.is_nested: + add_args_to_env(builder, local=False, base=builder.fn_info.callable_class, prefix=prefix) + else: + add_args_to_env(builder, local=False, base=builder.fn_info, prefix=prefix) + + +def instantiate_env_class(builder: IRBuilder) -> Value: + """Assign an environment class to a register named after the given function definition.""" + curr_env_reg = builder.add( + Call(builder.fn_info.env_class.ctor, [], builder.fn_info.fitem.line) + ) + + if builder.fn_info.is_nested: + builder.fn_info.callable_class._curr_env_reg = curr_env_reg + builder.add( + SetAttr( + curr_env_reg, + ENV_ATTR_NAME, + builder.fn_info.callable_class.prev_env_reg, + builder.fn_info.fitem.line, + ) + ) + else: + builder.fn_info._curr_env_reg = curr_env_reg + + return curr_env_reg + + +def load_env_registers(builder: IRBuilder, prefix: str = "") -> None: + """Load the registers for the current FuncItem being visited. + + Adds the arguments of the FuncItem to the environment. If the + FuncItem is nested inside of another function, then this also + loads all of the outer environments of the FuncItem into registers + so that they can be used when accessing free variables. + """ + add_args_to_env(builder, local=True, prefix=prefix) + + fn_info = builder.fn_info + fitem = fn_info.fitem + if fn_info.is_nested: + load_outer_envs(builder, fn_info.callable_class) + # If this is a FuncDef, then make sure to load the FuncDef into its own environment + # class so that the function can be called recursively. + if isinstance(fitem, FuncDef) and fn_info.add_nested_funcs_to_env: + setup_func_for_recursive_call(builder, fitem, fn_info.callable_class, prefix=prefix) + + +def load_outer_env( + builder: IRBuilder, base: Value, outer_env: dict[SymbolNode, SymbolTarget] +) -> Value: + """Load the environment class for a given base into a register. + + Additionally, iterates through all of the SymbolNode and + AssignmentTarget instances of the environment at the given index's + symtable, and adds those instances to the environment of the + current environment. This is done so that the current environment + can access outer environment variables without having to reload + all of the environment registers. + + Returns the register where the environment class was loaded. + """ + env = builder.add(GetAttr(base, ENV_ATTR_NAME, builder.fn_info.fitem.line)) + assert isinstance(env.type, RInstance), f"{env} must be of type RInstance" + + for symbol, target in outer_env.items(): + attr_name = symbol.name + if isinstance(target, AssignmentTargetAttr): + attr_name = target.attr + env.type.class_ir.attributes[attr_name] = target.type + symbol_target = AssignmentTargetAttr(env, attr_name) + builder.add_target(symbol, symbol_target) + + return env + + +def load_outer_envs(builder: IRBuilder, base: ImplicitClass) -> None: + index = len(builder.builders) - 2 + + # Load the first outer environment. This one is special because it gets saved in the + # FuncInfo instance's prev_env_reg field. + if index > 1: + # outer_env = builder.fn_infos[index].environment + outer_env = builder.symtables[index] + if isinstance(base, GeneratorClass): + base.prev_env_reg = load_outer_env(builder, base.curr_env_reg, outer_env) + else: + base.prev_env_reg = load_outer_env(builder, base.self_reg, outer_env) + env_reg = base.prev_env_reg + index -= 1 + + # Load the remaining outer environments into registers. + while index > 1: + # outer_env = builder.fn_infos[index].environment + outer_env = builder.symtables[index] + env_reg = load_outer_env(builder, env_reg, outer_env) + index -= 1 + + +def num_bitmap_args(builder: IRBuilder, args: list[Argument]) -> int: + n = 0 + for arg in args: + t = builder.type_to_rtype(arg.variable.type) + if t.error_overlap and arg.kind.is_optional(): + n += 1 + return (n + (BITMAP_BITS - 1)) // BITMAP_BITS + + +def add_args_to_env( + builder: IRBuilder, + local: bool = True, + base: FuncInfo | ImplicitClass | None = None, + reassign: bool = True, + prefix: str = "", +) -> None: + fn_info = builder.fn_info + args = fn_info.fitem.arguments + nb = num_bitmap_args(builder, args) + if local: + for arg in args: + rtype = builder.type_to_rtype(arg.variable.type) + builder.add_local_reg(arg.variable, rtype, is_arg=True) + for i in reversed(range(nb)): + builder.add_local_reg(Var(bitmap_name(i)), bitmap_rprimitive, is_arg=True) + else: + for arg in args: + if is_free_variable(builder, arg.variable) or fn_info.is_generator: + rtype = builder.type_to_rtype(arg.variable.type) + assert base is not None, "base cannot be None for adding nonlocal args" + builder.add_var_to_env_class( + arg.variable, rtype, base, reassign=reassign, prefix=prefix + ) + + +def add_vars_to_env(builder: IRBuilder, prefix: str = "") -> None: + """Add relevant local variables and nested functions to the environment class. + + Add all variables and functions that are declared/defined within current + function and are referenced in functions nested within this one to this + function's environment class so the nested functions can reference + them even if they are declared after the nested function's definition. + Note that this is done before visiting the body of the function. + """ + env_for_func: FuncInfo | ImplicitClass = builder.fn_info + if builder.fn_info.is_generator: + env_for_func = builder.fn_info.generator_class + elif builder.fn_info.is_nested or builder.fn_info.in_non_ext: + env_for_func = builder.fn_info.callable_class + + if builder.fn_info.fitem in builder.free_variables: + # Sort the variables to keep things deterministic + for var in sorted(builder.free_variables[builder.fn_info.fitem], key=lambda x: x.name): + if isinstance(var, Var): + rtype = builder.type_to_rtype(var.type) + builder.add_var_to_env_class( + var, rtype, env_for_func, reassign=False, prefix=prefix + ) + + if builder.fn_info.fitem in builder.encapsulating_funcs: + for nested_fn in builder.encapsulating_funcs[builder.fn_info.fitem]: + if isinstance(nested_fn, FuncDef): + # The return type is 'object' instead of an RInstance of the + # callable class because differently defined functions with + # the same name and signature across conditional blocks + # will generate different callable classes, so the callable + # class that gets instantiated must be generic. + if nested_fn.is_generator: + prefix = GENERATOR_ATTRIBUTE_PREFIX + builder.add_var_to_env_class( + nested_fn, object_rprimitive, env_for_func, reassign=False, prefix=prefix + ) + + +def setup_func_for_recursive_call( + builder: IRBuilder, fdef: FuncDef, base: ImplicitClass, prefix: str = "" +) -> None: + """Enable calling a nested function (with a callable class) recursively. + + Adds the instance of the callable class representing the given + FuncDef to a register in the environment so that the function can + be called recursively. Note that this needs to be done only for + nested functions. + """ + # First, set the attribute of the environment class so that GetAttr can be called on it. + prev_env = builder.fn_infos[-2].env_class + attr_name = prefix + fdef.name + prev_env.attributes[attr_name] = builder.type_to_rtype(fdef.type) + + if isinstance(base, GeneratorClass): + # If we are dealing with a generator class, then we need to first get the register + # holding the current environment class, and load the previous environment class from + # there. + prev_env_reg = builder.add(GetAttr(base.curr_env_reg, ENV_ATTR_NAME, -1)) + else: + prev_env_reg = base.prev_env_reg + + # Obtain the instance of the callable class representing the FuncDef, and add it to the + # current environment. + val = builder.add(GetAttr(prev_env_reg, attr_name, -1)) + target = builder.add_local_reg(fdef, object_rprimitive) + builder.assign(target, val, -1) + + +def is_free_variable(builder: IRBuilder, symbol: SymbolNode) -> bool: + fitem = builder.fn_info.fitem + return fitem in builder.free_variables and symbol in builder.free_variables[fitem] diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/expression.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/expression.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..27e19f5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/expression.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/expression.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/expression.py new file mode 100644 index 0000000..2ed347c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/expression.py @@ -0,0 +1,1122 @@ +"""Transform mypy expression ASTs to mypyc IR (Intermediate Representation). + +The top-level AST transformation logic is implemented in mypyc.irbuild.visitor +and mypyc.irbuild.builder. +""" + +from __future__ import annotations + +import math +from collections.abc import Sequence +from typing import Callable + +from mypy.nodes import ( + ARG_NAMED, + ARG_POS, + LDEF, + AssertTypeExpr, + AssignmentExpr, + BytesExpr, + CallExpr, + CastExpr, + ComparisonExpr, + ComplexExpr, + ConditionalExpr, + DictExpr, + DictionaryComprehension, + EllipsisExpr, + Expression, + FloatExpr, + GeneratorExpr, + IndexExpr, + IntExpr, + ListComprehension, + ListExpr, + MemberExpr, + MypyFile, + NameExpr, + OpExpr, + RefExpr, + SetComprehension, + SetExpr, + SliceExpr, + StarExpr, + StrExpr, + SuperExpr, + TupleExpr, + TypeApplication, + TypeInfo, + TypeVarLikeExpr, + UnaryExpr, + Var, +) +from mypy.types import Instance, ProperType, TupleType, TypeType, get_proper_type +from mypyc.common import MAX_SHORT_INT +from mypyc.ir.class_ir import ClassIR +from mypyc.ir.func_ir import FUNC_CLASSMETHOD, FUNC_STATICMETHOD +from mypyc.ir.ops import ( + Assign, + BasicBlock, + ComparisonOp, + Integer, + LoadAddress, + LoadLiteral, + PrimitiveDescription, + RaiseStandardError, + Register, + TupleGet, + TupleSet, + Value, +) +from mypyc.ir.rtypes import ( + RInstance, + RTuple, + bool_rprimitive, + int_rprimitive, + is_fixed_width_rtype, + is_int_rprimitive, + is_list_rprimitive, + is_none_rprimitive, + is_object_rprimitive, + object_rprimitive, + set_rprimitive, +) +from mypyc.irbuild.ast_helpers import is_borrow_friendly_expr, process_conditional +from mypyc.irbuild.builder import IRBuilder, int_borrow_friendly_op +from mypyc.irbuild.constant_fold import constant_fold_expr +from mypyc.irbuild.for_helpers import ( + comprehension_helper, + raise_error_if_contains_unreachable_names, + translate_list_comprehension, + translate_set_comprehension, +) +from mypyc.irbuild.format_str_tokenizer import ( + convert_format_expr_to_bytes, + convert_format_expr_to_str, + join_formatted_bytes, + join_formatted_strings, + tokenizer_printf_style, +) +from mypyc.irbuild.specialize import ( + apply_function_specialization, + apply_method_specialization, + translate_object_new, + translate_object_setattr, +) +from mypyc.primitives.bytes_ops import bytes_slice_op +from mypyc.primitives.dict_ops import dict_get_item_op, dict_new_op, exact_dict_set_item_op +from mypyc.primitives.generic_ops import iter_op, name_op +from mypyc.primitives.list_ops import list_append_op, list_extend_op, list_slice_op +from mypyc.primitives.misc_ops import ellipsis_op, get_module_dict_op, new_slice_op, type_op +from mypyc.primitives.registry import builtin_names +from mypyc.primitives.set_ops import set_add_op, set_in_op, set_update_op +from mypyc.primitives.str_ops import str_slice_op +from mypyc.primitives.tuple_ops import list_tuple_op, tuple_slice_op + +# Name and attribute references + + +def transform_name_expr(builder: IRBuilder, expr: NameExpr) -> Value: + if isinstance(expr.node, TypeVarLikeExpr) and expr.node.is_new_style: + # Reference to Python 3.12 implicit TypeVar/TupleVarTuple/... object. + # These are stored in C statics and not visible in Python namespaces. + return builder.load_type_var(expr.node.name, expr.node.line) + if expr.node is None: + builder.add( + RaiseStandardError( + RaiseStandardError.NAME_ERROR, f'name "{expr.name}" is not defined', expr.line + ) + ) + return builder.none() + fullname = expr.node.fullname + if fullname in builtin_names: + typ, src = builtin_names[fullname] + return builder.add(LoadAddress(typ, src, expr.line)) + # special cases + if fullname == "builtins.None": + return builder.none() + if fullname == "builtins.True": + return builder.true() + if fullname == "builtins.False": + return builder.false() + if fullname in ("typing.TYPE_CHECKING", "typing_extensions.TYPE_CHECKING"): + return builder.false() + + math_literal = transform_math_literal(builder, fullname) + if math_literal is not None: + return math_literal + + if isinstance(expr.node, Var) and expr.node.is_final: + value = builder.emit_load_final( + expr.node, + fullname, + expr.name, + builder.is_native_ref_expr(expr), + builder.types[expr], + expr.line, + ) + if value is not None: + return value + + if isinstance(expr.node, MypyFile) and expr.node.fullname in builder.imports: + return builder.load_module(expr.node.fullname) + + # If the expression is locally defined, then read the result from the corresponding + # assignment target and return it. Otherwise if the expression is a global, load it from + # the globals dictionary. + # Except for imports, that currently always happens in the global namespace. + if expr.kind == LDEF and not (isinstance(expr.node, Var) and expr.node.is_suppressed_import): + # Try to detect and error when we hit the irritating mypy bug + # where a local variable is cast to None. (#5423) + if ( + isinstance(expr.node, Var) + and is_none_rprimitive(builder.node_type(expr)) + and expr.node.is_inferred + ): + builder.error( + 'Local variable "{}" has inferred type None; add an annotation'.format( + expr.node.name + ), + expr.node.line, + ) + + # TODO: Behavior currently only defined for Var, FuncDef and MypyFile node types. + if isinstance(expr.node, MypyFile): + # Load reference to a module imported inside function from + # the modules dictionary. It would be closer to Python + # semantics to access modules imported inside functions + # via local variables, but this is tricky since the mypy + # AST doesn't include a Var node for the module. We + # instead load the module separately on each access. + mod_dict = builder.call_c(get_module_dict_op, [], expr.line) + obj = builder.primitive_op( + dict_get_item_op, [mod_dict, builder.load_str(expr.node.fullname)], expr.line + ) + return obj + else: + return builder.read(builder.get_assignment_target(expr, for_read=True), expr.line) + + return builder.load_global(expr) + + +def transform_member_expr(builder: IRBuilder, expr: MemberExpr) -> Value: + # Special Cases + if expr.fullname in ("typing.TYPE_CHECKING", "typing_extensions.TYPE_CHECKING"): + return builder.false() + + # First check if this is maybe a final attribute. + final = builder.get_final_ref(expr) + if final is not None: + fullname, final_var, native = final + value = builder.emit_load_final( + final_var, fullname, final_var.name, native, builder.types[expr], expr.line + ) + if value is not None: + return value + + math_literal = transform_math_literal(builder, expr.fullname) + if math_literal is not None: + return math_literal + + if isinstance(expr.node, MypyFile) and expr.node.fullname in builder.imports: + return builder.load_module(expr.node.fullname) + + can_borrow = builder.is_native_attr_ref(expr) + obj = builder.accept(expr.expr, can_borrow=can_borrow) + rtype = builder.node_type(expr) + + if ( + is_object_rprimitive(obj.type) + and expr.name == "__name__" + and builder.options.capi_version >= (3, 11) + ): + return builder.primitive_op(name_op, [obj], expr.line) + + if isinstance(obj.type, RInstance) and expr.name == "__class__": + # A non-native class could override "__class__" using "__getattribute__", so + # only apply to RInstance types. + return builder.primitive_op(type_op, [obj], expr.line) + + # Special case: for named tuples transform attribute access to faster index access. + typ = get_proper_type(builder.types.get(expr.expr)) + if isinstance(typ, TupleType) and typ.partial_fallback.type.is_named_tuple: + fields = typ.partial_fallback.type.metadata["namedtuple"]["fields"] + if expr.name in fields: + index = builder.builder.load_int(fields.index(expr.name)) + return builder.gen_method_call(obj, "__getitem__", [index], rtype, expr.line) + + check_instance_attribute_access_through_class(builder, expr, typ) + + borrow = can_borrow and builder.can_borrow + return builder.builder.get_attr(obj, expr.name, rtype, expr.line, borrow=borrow) + + +def check_instance_attribute_access_through_class( + builder: IRBuilder, expr: MemberExpr, typ: ProperType | None +) -> None: + """Report error if accessing an instance attribute through class object.""" + if isinstance(expr.expr, RefExpr): + node = expr.expr.node + if isinstance(typ, TypeType) and isinstance(typ.item, Instance): + # TODO: Handle other item types + node = typ.item.type + if isinstance(node, TypeInfo): + class_ir = builder.mapper.type_to_ir.get(node) + if class_ir is not None and class_ir.is_ext_class: + sym = node.get(expr.name) + if ( + sym is not None + and isinstance(sym.node, Var) + and not sym.node.is_classvar + and not sym.node.is_final + ): + builder.error( + 'Cannot access instance attribute "{}" through class object'.format( + expr.name + ), + expr.line, + ) + builder.note( + '(Hint: Use "x: Final = ..." or "x: ClassVar = ..." to define ' + "a class attribute)", + expr.line, + ) + + +def transform_super_expr(builder: IRBuilder, o: SuperExpr) -> Value: + # warning(builder, 'can not optimize super() expression', o.line) + sup_val = builder.load_module_attr_by_fullname("builtins.super", o.line) + if o.call.args: + args = [builder.accept(arg) for arg in o.call.args] + else: + assert o.info is not None + typ = builder.load_native_type_object(o.info.fullname) + ir = builder.mapper.type_to_ir[o.info] + iter_env = iter(builder.builder.args) + # Grab first argument + vself: Value = next(iter_env) + if builder.fn_info.is_generator: + # grab seventh argument (see comment in translate_super_method_call) + self_targ = list(builder.symtables[-1].values())[7] + vself = builder.read(self_targ, builder.fn_info.fitem.line) + elif not ir.is_ext_class: + vself = next(iter_env) # second argument is self if non_extension class + args = [typ, vself] + res = builder.py_call(sup_val, args, o.line) + return builder.py_get_attr(res, o.name, o.line) + + +# Calls + + +def transform_call_expr(builder: IRBuilder, expr: CallExpr) -> Value: + callee = expr.callee + if isinstance(expr.analyzed, CastExpr): + return translate_cast_expr(builder, expr.analyzed) + elif isinstance(expr.analyzed, AssertTypeExpr): + # Compile to a no-op. + return builder.accept(expr.analyzed.expr) + elif ( + isinstance(callee, (NameExpr, MemberExpr)) + and isinstance(callee.node, TypeInfo) + and callee.node.is_newtype + ): + # A call to a NewType type is a no-op at runtime. + return builder.accept(expr.args[0]) + + if isinstance(callee, IndexExpr) and isinstance(callee.analyzed, TypeApplication): + callee = callee.analyzed.expr # Unwrap type application + + if isinstance(callee, MemberExpr): + if isinstance(callee.expr, RefExpr) and isinstance(callee.expr.node, MypyFile): + # Call a module-level function, not a method. + return translate_call(builder, expr, callee) + return apply_method_specialization(builder, expr, callee) or translate_method_call( + builder, expr, callee + ) + elif isinstance(callee, SuperExpr): + return translate_super_method_call(builder, expr, callee) + else: + return translate_call(builder, expr, callee) + + +def translate_call(builder: IRBuilder, expr: CallExpr, callee: Expression) -> Value: + # The common case of calls is refexprs + if isinstance(callee, RefExpr): + return apply_function_specialization(builder, expr, callee) or translate_refexpr_call( + builder, expr, callee + ) + + function = builder.accept(callee) + args = [builder.accept(arg) for arg in expr.args] + return builder.py_call( + function, args, expr.line, arg_kinds=expr.arg_kinds, arg_names=expr.arg_names + ) + + +def translate_refexpr_call(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value: + """Translate a non-method call.""" + # Gen the argument values + arg_values = [builder.accept(arg) for arg in expr.args] + + return builder.call_refexpr_with_args(expr, callee, arg_values) + + +def translate_method_call(builder: IRBuilder, expr: CallExpr, callee: MemberExpr) -> Value: + """Generate IR for an arbitrary call of form e.m(...). + + This can also deal with calls to module-level functions. + """ + if builder.is_native_ref_expr(callee): + # Call to module-level native function or such + return translate_call(builder, expr, callee) + elif ( + isinstance(callee.expr, RefExpr) + and isinstance(callee.expr.node, TypeInfo) + and callee.expr.node in builder.mapper.type_to_ir + and builder.mapper.type_to_ir[callee.expr.node].has_method(callee.name) + and all(kind in (ARG_POS, ARG_NAMED) for kind in expr.arg_kinds) + ): + # Call a method via the *class* + assert isinstance(callee.expr.node, TypeInfo), callee.expr.node + ir = builder.mapper.type_to_ir[callee.expr.node] + return call_classmethod(builder, ir, expr, callee) + elif builder.is_module_member_expr(callee): + # Fall back to a PyCall for non-native module calls + function = builder.accept(callee) + args = [builder.accept(arg) for arg in expr.args] + return builder.py_call( + function, args, expr.line, arg_kinds=expr.arg_kinds, arg_names=expr.arg_names + ) + else: + if isinstance(callee.expr, RefExpr): + node = callee.expr.node + if isinstance(node, Var) and node.is_cls: + typ = get_proper_type(node.type) + if isinstance(typ, TypeType) and isinstance(typ.item, Instance): + class_ir = builder.mapper.type_to_ir.get(typ.item.type) + if class_ir and class_ir.is_ext_class and class_ir.has_no_subclasses(): + # Call a native classmethod via cls that can be statically bound, + # since the class has no subclasses. + return call_classmethod(builder, class_ir, expr, callee) + + receiver_typ = builder.node_type(callee.expr) + + # If there is a specializer for this method name/type, try calling it. + # We would return the first successful one. + val = apply_method_specialization(builder, expr, callee, receiver_typ) + if val is not None: + return val + + obj = builder.accept(callee.expr) + args = [builder.accept(arg) for arg in expr.args] + return builder.gen_method_call( + obj, + callee.name, + args, + builder.node_type(expr), + expr.line, + expr.arg_kinds, + expr.arg_names, + ) + + +def call_classmethod(builder: IRBuilder, ir: ClassIR, expr: CallExpr, callee: MemberExpr) -> Value: + decl = ir.method_decl(callee.name) + args = [] + arg_kinds, arg_names = expr.arg_kinds.copy(), expr.arg_names.copy() + # Add the class argument for class methods in extension classes + if decl.kind == FUNC_CLASSMETHOD and ir.is_ext_class: + args.append(builder.load_native_type_object(ir.fullname)) + arg_kinds.insert(0, ARG_POS) + arg_names.insert(0, None) + args += [builder.accept(arg) for arg in expr.args] + + if ir.is_ext_class: + return builder.builder.call(decl, args, arg_kinds, arg_names, expr.line) + else: + obj = builder.accept(callee.expr) + return builder.gen_method_call( + obj, + callee.name, + args, + builder.node_type(expr), + expr.line, + expr.arg_kinds, + expr.arg_names, + ) + + +def translate_super_method_call(builder: IRBuilder, expr: CallExpr, callee: SuperExpr) -> Value: + if callee.info is None or (len(callee.call.args) != 0 and len(callee.call.args) != 2): + return translate_call(builder, expr, callee) + + # We support two-argument super but only when it is super(CurrentClass, self) + # TODO: We could support it when it is a parent class in many cases? + if len(callee.call.args) == 2: + self_arg = callee.call.args[1] + if ( + not isinstance(self_arg, NameExpr) + or not isinstance(self_arg.node, Var) + or not self_arg.node.is_self + ): + return translate_call(builder, expr, callee) + + typ_arg = callee.call.args[0] + if ( + not isinstance(typ_arg, NameExpr) + or not isinstance(typ_arg.node, TypeInfo) + or callee.info is not typ_arg.node + ): + return translate_call(builder, expr, callee) + + ir = builder.mapper.type_to_ir[callee.info] + # Search for the method in the mro, skipping ourselves. We + # determine targets of super calls to native methods statically. + for base in ir.mro[1:]: + if callee.name in base.method_decls: + break + else: + if callee.name == "__new__": + result = translate_object_new(builder, expr, MemberExpr(callee.call, "__new__")) + if result: + return result + elif callee.name == "__setattr__": + result = translate_object_setattr( + builder, expr, MemberExpr(callee.call, "__setattr__") + ) + if result: + return result + if ir.is_ext_class and ir.builtin_base is None and not ir.inherits_python: + if callee.name == "__init__" and len(expr.args) == 0: + # Call translates to object.__init__(self), which is a + # no-op, so omit the call. + return builder.none() + return translate_call(builder, expr, callee) + + decl = base.method_decl(callee.name) + arg_values = [builder.accept(arg) for arg in expr.args] + arg_kinds, arg_names = expr.arg_kinds.copy(), expr.arg_names.copy() + + if decl.kind != FUNC_STATICMETHOD and decl.name != "__new__": + # Grab first argument + vself: Value = builder.self() + if decl.kind == FUNC_CLASSMETHOD: + vself = builder.primitive_op(type_op, [vself], expr.line) + elif builder.fn_info.is_generator: + # For generator classes, the self target is the 7th value + # in the symbol table (which is an ordered dict). This is sort + # of ugly, but we can't search by name since the 'self' parameter + # could be named anything, and it doesn't get added to the + # environment indexes. + self_targ = list(builder.symtables[-1].values())[7] + vself = builder.read(self_targ, builder.fn_info.fitem.line) + arg_values.insert(0, vself) + arg_kinds.insert(0, ARG_POS) + arg_names.insert(0, None) + + return builder.builder.call(decl, arg_values, arg_kinds, arg_names, expr.line) + + +def translate_cast_expr(builder: IRBuilder, expr: CastExpr) -> Value: + src = builder.accept(expr.expr) + target_type = builder.type_to_rtype(expr.type) + return builder.coerce(src, target_type, expr.line) + + +# Operators + + +def transform_unary_expr(builder: IRBuilder, expr: UnaryExpr) -> Value: + folded = try_constant_fold(builder, expr) + if folded: + return folded + + return builder.unary_op(builder.accept(expr.expr), expr.op, expr.line) + + +def transform_op_expr(builder: IRBuilder, expr: OpExpr) -> Value: + if expr.op in ("and", "or"): + return builder.shortcircuit_expr(expr) + + # Special case for string formatting + if expr.op == "%" and isinstance(expr.left, (StrExpr, BytesExpr)): + ret = translate_printf_style_formatting(builder, expr.left, expr.right) + if ret is not None: + return ret + + folded = try_constant_fold(builder, expr) + if folded: + return folded + + borrow_left = False + borrow_right = False + + ltype = builder.node_type(expr.left) + rtype = builder.node_type(expr.right) + + # Special case some int ops to allow borrowing operands. + if is_int_rprimitive(ltype) and is_int_rprimitive(rtype): + if expr.op == "//": + expr = try_optimize_int_floor_divide(builder, expr) + if expr.op in int_borrow_friendly_op: + borrow_left = is_borrow_friendly_expr(builder, expr.right) + borrow_right = True + elif is_fixed_width_rtype(ltype) and is_fixed_width_rtype(rtype): + borrow_left = is_borrow_friendly_expr(builder, expr.right) + borrow_right = True + + left = builder.accept(expr.left, can_borrow=borrow_left) + right = builder.accept(expr.right, can_borrow=borrow_right) + return builder.binary_op(left, right, expr.op, expr.line) + + +def try_optimize_int_floor_divide(builder: IRBuilder, expr: OpExpr) -> OpExpr: + """Replace // with a power of two with a right shift, if possible.""" + divisor = constant_fold_expr(builder, expr.right) + if not isinstance(divisor, int): + return expr + shift = divisor.bit_length() - 1 + if 0 < shift < 28 and divisor == (1 << shift): + return OpExpr(">>", expr.left, IntExpr(shift)) + return expr + + +def transform_index_expr(builder: IRBuilder, expr: IndexExpr) -> Value: + index = expr.index + base_type = builder.node_type(expr.base) + is_list = is_list_rprimitive(base_type) + can_borrow_base = is_list and is_borrow_friendly_expr(builder, index) + + base = builder.accept(expr.base, can_borrow=can_borrow_base) + + if isinstance(base.type, RTuple): + folded_index = constant_fold_expr(builder, index) + if isinstance(folded_index, int): + length = len(base.type.types) + if -length <= folded_index <= length - 1: + return builder.add(TupleGet(base, folded_index, expr.line)) + + if isinstance(index, SliceExpr): + value = try_gen_slice_op(builder, base, index) + if value: + return value + + index_reg = builder.accept(expr.index, can_borrow=is_list) + return builder.gen_method_call( + base, "__getitem__", [index_reg], builder.node_type(expr), expr.line + ) + + +def try_constant_fold(builder: IRBuilder, expr: Expression) -> Value | None: + """Return the constant value of an expression if possible. + + Return None otherwise. + """ + value = constant_fold_expr(builder, expr) + if value is not None: + return builder.load_literal_value(value) + return None + + +def try_gen_slice_op(builder: IRBuilder, base: Value, index: SliceExpr) -> Value | None: + """Generate specialized slice op for some index expressions. + + Return None if a specialized op isn't available. + + This supports obj[x:y], obj[:x], and obj[x:] for a few types. + """ + if index.stride: + # We can only handle the default stride of 1. + return None + + if index.begin_index: + begin_type = builder.node_type(index.begin_index) + else: + begin_type = int_rprimitive + if index.end_index: + end_type = builder.node_type(index.end_index) + else: + end_type = int_rprimitive + + # Both begin and end index must be int (or missing). + if is_int_rprimitive(begin_type) and is_int_rprimitive(end_type): + if index.begin_index: + begin = builder.accept(index.begin_index) + else: + begin = builder.load_int(0) + if index.end_index: + end = builder.accept(index.end_index) + else: + # Replace missing end index with the largest short integer + # (a sequence can't be longer). + end = builder.load_int(MAX_SHORT_INT) + candidates = [list_slice_op, tuple_slice_op, str_slice_op, bytes_slice_op] + return builder.builder.matching_call_c(candidates, [base, begin, end], index.line) + + return None + + +def transform_conditional_expr(builder: IRBuilder, expr: ConditionalExpr) -> Value: + if_body, else_body, next_block = BasicBlock(), BasicBlock(), BasicBlock() + + process_conditional(builder, expr.cond, if_body, else_body) + expr_type = builder.node_type(expr) + # Having actual Phi nodes would be really nice here! + target = Register(expr_type) + + builder.activate_block(if_body) + true_value = builder.accept(expr.if_expr) + true_value = builder.coerce(true_value, expr_type, expr.line) + builder.add(Assign(target, true_value)) + builder.goto(next_block) + + builder.activate_block(else_body) + false_value = builder.accept(expr.else_expr) + false_value = builder.coerce(false_value, expr_type, expr.line) + builder.add(Assign(target, false_value)) + builder.goto(next_block) + + builder.activate_block(next_block) + + return target + + +def set_literal_values(builder: IRBuilder, items: Sequence[Expression]) -> list[object] | None: + values: list[object] = [] + for item in items: + const_value = constant_fold_expr(builder, item) + if const_value is not None: + values.append(const_value) + continue + + if isinstance(item, RefExpr): + if item.fullname == "builtins.None": + values.append(None) + elif item.fullname == "builtins.True": + values.append(True) + elif item.fullname == "builtins.False": + values.append(False) + elif isinstance(item, TupleExpr): + tuple_values = set_literal_values(builder, item.items) + if tuple_values is not None: + values.append(tuple(tuple_values)) + + if len(values) != len(items): + # Bail if not all items can be converted into values. + return None + return values + + +def precompute_set_literal(builder: IRBuilder, s: SetExpr) -> Value | None: + """Try to pre-compute a frozenset literal during module initialization. + + Return None if it's not possible. + + Supported items: + - Anything supported by irbuild.constant_fold.constant_fold_expr() + - None, True, and False + - Tuple literals with only items listed above + """ + values = set_literal_values(builder, s.items) + if values is not None: + return builder.add(LoadLiteral(frozenset(values), set_rprimitive)) + + return None + + +def transform_comparison_expr(builder: IRBuilder, e: ComparisonExpr) -> Value: + # x in (...)/[...] + # x not in (...)/[...] + first_op = e.operators[0] + if first_op in ["in", "not in"] and len(e.operators) == 1: + result = try_specialize_in_expr(builder, first_op, e.operands[0], e.operands[1], e.line) + if result is not None: + return result + + if len(e.operators) == 1: + # Special some common simple cases + if first_op in ("is", "is not"): + right_expr = e.operands[1] + if isinstance(right_expr, NameExpr) and right_expr.fullname == "builtins.None": + # Special case 'is None' / 'is not None'. + return translate_is_none(builder, e.operands[0], negated=first_op != "is") + left_expr = e.operands[0] + if is_int_rprimitive(builder.node_type(left_expr)): + right_expr = e.operands[1] + if is_int_rprimitive(builder.node_type(right_expr)): + if first_op in int_borrow_friendly_op: + borrow_left = is_borrow_friendly_expr(builder, right_expr) + left = builder.accept(left_expr, can_borrow=borrow_left) + right = builder.accept(right_expr, can_borrow=True) + return builder.binary_op(left, right, first_op, e.line) + + # TODO: Don't produce an expression when used in conditional context + # All of the trickiness here is due to support for chained conditionals + # (`e1 < e2 > e3`, etc). `e1 < e2 > e3` is approximately equivalent to + # `e1 < e2 and e2 > e3` except that `e2` is only evaluated once. + expr_type = builder.node_type(e) + + # go(i, prev) generates code for `ei opi e{i+1} op{i+1} ... en`, + # assuming that prev contains the value of `ei`. + def go(i: int, prev: Value) -> Value: + if i == len(e.operators) - 1: + return transform_basic_comparison( + builder, e.operators[i], prev, builder.accept(e.operands[i + 1]), e.line + ) + + next = builder.accept(e.operands[i + 1]) + return builder.builder.shortcircuit_helper( + "and", + expr_type, + lambda: transform_basic_comparison(builder, e.operators[i], prev, next, e.line), + lambda: go(i + 1, next), + e.line, + ) + + return go(0, builder.accept(e.operands[0])) + + +def try_specialize_in_expr( + builder: IRBuilder, op: str, lhs: Expression, rhs: Expression, line: int +) -> Value | None: + left: Value | None = None + items: list[Value] | None = None + + if isinstance(rhs, (TupleExpr, ListExpr)): + left = builder.accept(lhs) + items = [builder.accept(item) for item in rhs.items] + elif isinstance(builder.node_type(rhs), RTuple): + left = builder.accept(lhs) + tuple_val = builder.accept(rhs) + assert isinstance(tuple_val.type, RTuple) + items = [builder.add(TupleGet(tuple_val, i)) for i in range(len(tuple_val.type.types))] + + if items is not None: + assert left is not None + n_items = len(items) + # x in y -> x == y[0] or ... or x == y[n] + # x not in y -> x != y[0] and ... and x != y[n] + if n_items > 1: + if op == "in": + cmp_op = "==" + else: + cmp_op = "!=" + out = BasicBlock() + for item in items: + cmp = transform_basic_comparison(builder, cmp_op, left, item, line) + bool_val = builder.builder.bool_value(cmp) + next_block = BasicBlock() + if op == "in": + builder.add_bool_branch(bool_val, out, next_block) + else: + builder.add_bool_branch(bool_val, next_block, out) + builder.activate_block(next_block) + result_reg = Register(bool_rprimitive) + end = BasicBlock() + if op == "in": + values = builder.false(), builder.true() + else: + values = builder.true(), builder.false() + builder.assign(result_reg, values[0], line) + builder.goto(end) + builder.activate_block(out) + builder.assign(result_reg, values[1], line) + builder.goto(end) + builder.activate_block(end) + return result_reg + # x in [y]/(y) -> x == y + # x not in [y]/(y) -> x != y + elif n_items == 1: + if op == "in": + cmp_op = "==" + else: + cmp_op = "!=" + right = items[0] + return transform_basic_comparison(builder, cmp_op, left, right, line) + # x in []/() -> False + # x not in []/() -> True + elif n_items == 0: + if op == "in": + return builder.false() + else: + return builder.true() + + # x in {...} + # x not in {...} + if isinstance(rhs, SetExpr): + set_literal = precompute_set_literal(builder, rhs) + if set_literal is not None: + result = builder.builder.primitive_op( + set_in_op, [builder.accept(lhs), set_literal], line, bool_rprimitive + ) + if op == "not in": + return builder.unary_op(result, "not", line) + return result + + return None + + +def translate_is_none(builder: IRBuilder, expr: Expression, negated: bool) -> Value: + v = builder.accept(expr, can_borrow=True) + return builder.binary_op(v, builder.none_object(), "is not" if negated else "is", expr.line) + + +def transform_basic_comparison( + builder: IRBuilder, op: str, left: Value, right: Value, line: int +) -> Value: + if is_fixed_width_rtype(left.type) and op in ComparisonOp.signed_ops: + if right.type == left.type: + if left.type.is_signed: + op_id = ComparisonOp.signed_ops[op] + else: + op_id = ComparisonOp.unsigned_ops[op] + return builder.builder.comparison_op(left, right, op_id, line) + elif isinstance(right, Integer): + if left.type.is_signed: + op_id = ComparisonOp.signed_ops[op] + else: + op_id = ComparisonOp.unsigned_ops[op] + return builder.builder.comparison_op( + left, builder.coerce(right, left.type, line), op_id, line + ) + elif ( + is_fixed_width_rtype(right.type) + and op in ComparisonOp.signed_ops + and isinstance(left, Integer) + ): + if right.type.is_signed: + op_id = ComparisonOp.signed_ops[op] + else: + op_id = ComparisonOp.unsigned_ops[op] + return builder.builder.comparison_op( + builder.coerce(left, right.type, line), right, op_id, line + ) + + negate = False + if op == "is not": + op, negate = "is", True + elif op == "not in": + op, negate = "in", True + + target = builder.binary_op(left, right, op, line) + + if negate: + target = builder.unary_op(target, "not", line) + return target + + +def translate_printf_style_formatting( + builder: IRBuilder, format_expr: StrExpr | BytesExpr, rhs: Expression +) -> Value | None: + tokens = tokenizer_printf_style(format_expr.value) + if tokens is not None: + literals, format_ops = tokens + + exprs = [] + if isinstance(rhs, TupleExpr): + exprs = rhs.items + elif isinstance(rhs, Expression): + exprs.append(rhs) + + if isinstance(format_expr, BytesExpr): + substitutions = convert_format_expr_to_bytes( + builder, format_ops, exprs, format_expr.line + ) + if substitutions is not None: + return join_formatted_bytes(builder, literals, substitutions, format_expr.line) + else: + substitutions = convert_format_expr_to_str( + builder, format_ops, exprs, format_expr.line + ) + if substitutions is not None: + return join_formatted_strings(builder, literals, substitutions, format_expr.line) + + return None + + +# Literals + + +def transform_int_expr(builder: IRBuilder, expr: IntExpr) -> Value: + return builder.builder.load_int(expr.value) + + +def transform_float_expr(builder: IRBuilder, expr: FloatExpr) -> Value: + return builder.builder.load_float(expr.value) + + +def transform_complex_expr(builder: IRBuilder, expr: ComplexExpr) -> Value: + return builder.builder.load_complex(expr.value) + + +def transform_str_expr(builder: IRBuilder, expr: StrExpr) -> Value: + return builder.load_str(expr.value) + + +def transform_bytes_expr(builder: IRBuilder, expr: BytesExpr) -> Value: + return builder.load_bytes_from_str_literal(expr.value) + + +def transform_ellipsis(builder: IRBuilder, o: EllipsisExpr) -> Value: + return builder.add(LoadAddress(ellipsis_op.type, ellipsis_op.src, o.line)) + + +# Display expressions + + +def transform_list_expr(builder: IRBuilder, expr: ListExpr) -> Value: + return _visit_list_display(builder, expr.items, expr.line) + + +def _visit_list_display(builder: IRBuilder, items: list[Expression], line: int) -> Value: + return _visit_display( + builder, items, builder.new_list_op, list_append_op, list_extend_op, line, True + ) + + +def transform_tuple_expr(builder: IRBuilder, expr: TupleExpr) -> Value: + if any(isinstance(item, StarExpr) for item in expr.items): + # create a tuple of unknown length + return _visit_tuple_display(builder, expr) + + # create a tuple of fixed length (RTuple) + tuple_type = builder.node_type(expr) + # When handling NamedTuple et. al we might not have proper type info, + # so make some up if we need it. + types = ( + tuple_type.types + if isinstance(tuple_type, RTuple) + else [object_rprimitive] * len(expr.items) + ) + + items = [] + for item_expr, item_type in zip(expr.items, types): + reg = builder.accept(item_expr) + items.append(builder.coerce(reg, item_type, item_expr.line)) + return builder.add(TupleSet(items, expr.line)) + + +def _visit_tuple_display(builder: IRBuilder, expr: TupleExpr) -> Value: + """Create a list, then turn it into a tuple.""" + val_as_list = _visit_list_display(builder, expr.items, expr.line) + return builder.primitive_op(list_tuple_op, [val_as_list], expr.line) + + +def transform_dict_expr(builder: IRBuilder, expr: DictExpr) -> Value: + """First accepts all keys and values, then makes a dict out of them.""" + key_value_pairs = [] + for key_expr, value_expr in expr.items: + key = builder.accept(key_expr) if key_expr is not None else None + value = builder.accept(value_expr) + key_value_pairs.append((key, value)) + + return builder.builder.make_dict(key_value_pairs, expr.line) + + +def transform_set_expr(builder: IRBuilder, expr: SetExpr) -> Value: + return _visit_display( + builder, expr.items, builder.new_set_op, set_add_op, set_update_op, expr.line, False + ) + + +def _visit_display( + builder: IRBuilder, + items: list[Expression], + constructor_op: Callable[[list[Value], int], Value], + append_op: PrimitiveDescription, + extend_op: PrimitiveDescription, + line: int, + is_list: bool, +) -> Value: + accepted_items = [] + for item in items: + if isinstance(item, StarExpr): + accepted_items.append((True, builder.accept(item.expr))) + else: + accepted_items.append((False, builder.accept(item))) + + result: Value | None = None + initial_items = [] + for starred, value in accepted_items: + if result is None and not starred and is_list: + initial_items.append(value) + continue + + if result is None: + result = constructor_op(initial_items, line) + + builder.primitive_op(extend_op if starred else append_op, [result, value], line) + + if result is None: + result = constructor_op(initial_items, line) + + return result + + +# Comprehensions + + +def transform_list_comprehension(builder: IRBuilder, o: ListComprehension) -> Value: + return translate_list_comprehension(builder, o.generator) + + +def transform_set_comprehension(builder: IRBuilder, o: SetComprehension) -> Value: + return translate_set_comprehension(builder, o.generator) + + +def transform_dictionary_comprehension(builder: IRBuilder, o: DictionaryComprehension) -> Value: + if raise_error_if_contains_unreachable_names(builder, o): + return builder.none() + + d = builder.maybe_spill(builder.call_c(dict_new_op, [], o.line)) + loop_params = list(zip(o.indices, o.sequences, o.condlists, o.is_async)) + + def gen_inner_stmts() -> None: + k = builder.accept(o.key) + v = builder.accept(o.value) + builder.call_c(exact_dict_set_item_op, [builder.read(d), k, v], o.line) + + comprehension_helper(builder, loop_params, gen_inner_stmts, o.line) + return builder.read(d) + + +# Misc + + +def transform_slice_expr(builder: IRBuilder, expr: SliceExpr) -> Value: + def get_arg(arg: Expression | None) -> Value: + if arg is None: + return builder.none_object() + else: + return builder.accept(arg) + + args = [get_arg(expr.begin_index), get_arg(expr.end_index), get_arg(expr.stride)] + return builder.primitive_op(new_slice_op, args, expr.line) + + +def transform_generator_expr(builder: IRBuilder, o: GeneratorExpr) -> Value: + builder.warning("Treating generator comprehension as list", o.line) + return builder.primitive_op(iter_op, [translate_list_comprehension(builder, o)], o.line) + + +def transform_assignment_expr(builder: IRBuilder, o: AssignmentExpr) -> Value: + value = builder.accept(o.value) + target = builder.get_assignment_target(o.target) + builder.assign(target, value, o.line) + return value + + +def transform_math_literal(builder: IRBuilder, fullname: str) -> Value | None: + if fullname == "math.e": + return builder.load_float(math.e) + if fullname == "math.pi": + return builder.load_float(math.pi) + if fullname == "math.inf": + return builder.load_float(math.inf) + if fullname == "math.nan": + return builder.load_float(math.nan) + if fullname == "math.tau": + return builder.load_float(math.tau) + + return None diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/for_helpers.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/for_helpers.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..6997f5c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/for_helpers.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/for_helpers.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/for_helpers.py new file mode 100644 index 0000000..33e4429 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/for_helpers.py @@ -0,0 +1,1268 @@ +"""Helpers for generating for loops and comprehensions. + +We special case certain kinds for loops such as "for x in range(...)" +for better efficiency. Each for loop generator class below deals one +such special case. +""" + +from __future__ import annotations + +from typing import Callable, ClassVar, cast + +from mypy.nodes import ( + ARG_POS, + BytesExpr, + CallExpr, + DictionaryComprehension, + Expression, + GeneratorExpr, + ListExpr, + Lvalue, + MemberExpr, + NameExpr, + RefExpr, + SetExpr, + StarExpr, + StrExpr, + TupleExpr, + TypeAlias, + Var, +) +from mypy.types import LiteralType, TupleType, get_proper_type, get_proper_types +from mypyc.ir.ops import ( + ERR_NEVER, + BasicBlock, + Branch, + Integer, + IntOp, + LoadAddress, + LoadErrorValue, + LoadLiteral, + LoadMem, + MethodCall, + RaiseStandardError, + Register, + TupleGet, + TupleSet, + Value, +) +from mypyc.ir.rtypes import ( + RInstance, + RTuple, + RType, + bool_rprimitive, + c_pyssize_t_rprimitive, + int_rprimitive, + is_dict_rprimitive, + is_fixed_width_rtype, + is_immutable_rprimitive, + is_list_rprimitive, + is_sequence_rprimitive, + is_short_int_rprimitive, + is_str_rprimitive, + is_tuple_rprimitive, + object_pointer_rprimitive, + object_rprimitive, + pointer_rprimitive, + short_int_rprimitive, +) +from mypyc.irbuild.builder import IRBuilder +from mypyc.irbuild.prepare import GENERATOR_HELPER_NAME +from mypyc.irbuild.targets import AssignmentTarget, AssignmentTargetTuple +from mypyc.primitives.dict_ops import ( + dict_check_size_op, + dict_item_iter_op, + dict_key_iter_op, + dict_next_item_op, + dict_next_key_op, + dict_next_value_op, + dict_value_iter_op, +) +from mypyc.primitives.exc_ops import no_err_occurred_op, propagate_if_error_op +from mypyc.primitives.generic_ops import aiter_op, anext_op, iter_op, next_op +from mypyc.primitives.list_ops import list_append_op, list_get_item_unsafe_op, new_list_set_item_op +from mypyc.primitives.misc_ops import stop_async_iteration_op +from mypyc.primitives.registry import CFunctionDescription +from mypyc.primitives.set_ops import set_add_op +from mypyc.primitives.str_ops import str_get_item_unsafe_op +from mypyc.primitives.tuple_ops import tuple_get_item_unsafe_op + +GenFunc = Callable[[], None] + + +def for_loop_helper( + builder: IRBuilder, + index: Lvalue, + expr: Expression, + body_insts: GenFunc, + else_insts: GenFunc | None, + is_async: bool, + line: int, +) -> None: + """Generate IR for a loop. + + Args: + index: the loop index Lvalue + expr: the expression to iterate over + body_insts: a function that generates the body of the loop + else_insts: a function that generates the else block instructions + """ + # Body of the loop + body_block = BasicBlock() + # Block that steps to the next item + step_block = BasicBlock() + # Block for the else clause, if we need it + else_block = BasicBlock() + # Block executed after the loop + exit_block = BasicBlock() + + # Determine where we want to exit, if our condition check fails. + normal_loop_exit = else_block if else_insts is not None else exit_block + + for_gen = make_for_loop_generator( + builder, index, expr, body_block, normal_loop_exit, line, is_async=is_async + ) + + builder.push_loop_stack(step_block, exit_block) + condition_block = BasicBlock() + builder.goto_and_activate(condition_block) + + # Add loop condition check. + for_gen.gen_condition() + + # Generate loop body. + builder.activate_block(body_block) + for_gen.begin_body() + body_insts() + + # We generate a separate step block (which might be empty). + builder.goto_and_activate(step_block) + for_gen.gen_step() + # Go back to loop condition. + builder.goto(condition_block) + + for_gen.add_cleanup(normal_loop_exit) + builder.pop_loop_stack() + + if else_insts is not None: + builder.activate_block(else_block) + else_insts() + builder.goto(exit_block) + + builder.activate_block(exit_block) + + +def for_loop_helper_with_index( + builder: IRBuilder, + index: Lvalue, + expr: Expression, + expr_reg: Value, + body_insts: Callable[[Value], None], + line: int, + length: Value, +) -> None: + """Generate IR for a sequence iteration. + + This function only works for sequence type. Compared to for_loop_helper, + it would feed iteration index to body_insts. + + Args: + index: the loop index Lvalue + expr: the expression to iterate over + body_insts: a function that generates the body of the loop. + It needs a index as parameter. + """ + assert is_sequence_rprimitive(expr_reg.type), (expr_reg, expr_reg.type) + target_type = builder.get_sequence_type(expr) + + body_block = BasicBlock() + step_block = BasicBlock() + exit_block = BasicBlock() + condition_block = BasicBlock() + + for_gen = ForSequence(builder, index, body_block, exit_block, line, False) + for_gen.init(expr_reg, target_type, reverse=False, length=length) + + builder.push_loop_stack(step_block, exit_block) + + if isinstance(length, Integer) and length.value > 0: + builder.goto(body_block) + builder.activate_block(condition_block) + else: + builder.goto_and_activate(condition_block) + + for_gen.gen_condition() + + builder.activate_block(body_block) + for_gen.begin_body() + body_insts(builder.read(for_gen.index_target)) + + builder.goto_and_activate(step_block) + for_gen.gen_step() + builder.goto(condition_block) + + for_gen.add_cleanup(exit_block) + builder.pop_loop_stack() + + builder.activate_block(exit_block) + + +def sequence_from_generator_preallocate_helper( + builder: IRBuilder, + gen: GeneratorExpr, + empty_op_llbuilder: Callable[[Value, int], Value], + set_item_op: CFunctionDescription, +) -> Value | None: + """Generate a new tuple or list from a simple generator expression. + + Currently we only optimize for simplest generator expression, which means that + there is no condition list in the generator and only one original sequence with + one index is allowed. + + e.g. (1) tuple(f(x) for x in a_list/a_tuple/a_str/a_bytes/an_rtuple) + (2) list(f(x) for x in a_list/a_tuple/a_str/a_bytes/an_rtuple) + (3) [f(x) for x in a_list/a_tuple/a_str/a_bytes/an_rtuple] + + Args: + empty_op_llbuilder: A function that can generate an empty sequence op when + passed in length. See `new_list_op_with_length` and `new_tuple_op_with_length` + for detailed implementation. + set_item_op: A primitive that can modify an arbitrary position of a sequence. + The op should have three arguments: + - Self + - Target position + - New Value + See `new_list_set_item_op` and `new_tuple_set_item_op` for detailed + implementation. + """ + if len(gen.sequences) == 1 and len(gen.indices) == 1 and len(gen.condlists[0]) == 0: + line = gen.line + sequence_expr = gen.sequences[0] + rtype = builder.node_type(sequence_expr) + if not (is_sequence_rprimitive(rtype) or isinstance(rtype, RTuple)): + return None + + if isinstance(rtype, RTuple): + # If input is RTuple, box it to tuple_rprimitive for generic iteration + # TODO: this can be optimized a bit better with an unrolled ForRTuple helper + proper_type = get_proper_type(builder.types[sequence_expr]) + assert isinstance(proper_type, TupleType), proper_type + + # the for_loop_helper_with_index crashes for empty tuples, bail out + if not proper_type.items: + return None + + proper_types = get_proper_types(proper_type.items) + + get_item_ops: list[LoadLiteral | TupleGet] + if all(isinstance(typ, LiteralType) for typ in proper_types): + get_item_ops = [ + LoadLiteral(cast(LiteralType, typ).value, object_rprimitive) + for typ in proper_types + ] + + else: + sequence = builder.accept(sequence_expr) + get_item_ops = [ + ( + LoadLiteral(typ.value, object_rprimitive) + if isinstance(typ, LiteralType) + else TupleGet(sequence, i, line) + ) + for i, typ in enumerate(proper_types) + ] + + items = list(map(builder.add, get_item_ops)) + sequence = builder.new_tuple(items, line) + + else: + sequence = builder.accept(sequence_expr) + + length = get_expr_length_value(builder, sequence_expr, sequence, line, use_pyssize_t=True) + + target_op = empty_op_llbuilder(length, line) + + def set_item(item_index: Value) -> None: + e = builder.accept(gen.left_expr) + builder.call_c(set_item_op, [target_op, item_index, e], line) + + for_loop_helper_with_index( + builder, gen.indices[0], sequence_expr, sequence, set_item, line, length + ) + + return target_op + return None + + +def translate_list_comprehension(builder: IRBuilder, gen: GeneratorExpr) -> Value: + if raise_error_if_contains_unreachable_names(builder, gen): + return builder.none() + + # Try simplest list comprehension, otherwise fall back to general one + val = sequence_from_generator_preallocate_helper( + builder, + gen, + empty_op_llbuilder=builder.builder.new_list_op_with_length, + set_item_op=new_list_set_item_op, + ) + if val is not None: + return val + + list_ops = builder.maybe_spill(builder.new_list_op([], gen.line)) + + loop_params = list(zip(gen.indices, gen.sequences, gen.condlists, gen.is_async)) + + def gen_inner_stmts() -> None: + e = builder.accept(gen.left_expr) + builder.primitive_op(list_append_op, [builder.read(list_ops), e], gen.line) + + comprehension_helper(builder, loop_params, gen_inner_stmts, gen.line) + return builder.read(list_ops) + + +def raise_error_if_contains_unreachable_names( + builder: IRBuilder, gen: GeneratorExpr | DictionaryComprehension +) -> bool: + """Raise a runtime error and return True if generator contains unreachable names. + + False is returned if the generator can be safely transformed without crashing. + (It may still be unreachable!) + """ + if any(isinstance(s, NameExpr) and s.node is None for s in gen.indices): + error = RaiseStandardError( + RaiseStandardError.RUNTIME_ERROR, + "mypyc internal error: should be unreachable", + gen.line, + ) + builder.add(error) + return True + + return False + + +def translate_set_comprehension(builder: IRBuilder, gen: GeneratorExpr) -> Value: + if raise_error_if_contains_unreachable_names(builder, gen): + return builder.none() + + set_ops = builder.maybe_spill(builder.new_set_op([], gen.line)) + loop_params = list(zip(gen.indices, gen.sequences, gen.condlists, gen.is_async)) + + def gen_inner_stmts() -> None: + e = builder.accept(gen.left_expr) + builder.primitive_op(set_add_op, [builder.read(set_ops), e], gen.line) + + comprehension_helper(builder, loop_params, gen_inner_stmts, gen.line) + return builder.read(set_ops) + + +def comprehension_helper( + builder: IRBuilder, + loop_params: list[tuple[Lvalue, Expression, list[Expression], bool]], + gen_inner_stmts: Callable[[], None], + line: int, +) -> None: + """Helper function for list comprehensions. + + Args: + loop_params: a list of (index, expr, [conditions]) tuples defining nested loops: + - "index" is the Lvalue indexing that loop; + - "expr" is the expression for the object to be iterated over; + - "conditions" is a list of conditions, evaluated in order with short-circuiting, + that must all be true for the loop body to be executed + gen_inner_stmts: function to generate the IR for the body of the innermost loop + """ + + def handle_loop(loop_params: list[tuple[Lvalue, Expression, list[Expression], bool]]) -> None: + """Generate IR for a loop. + + Given a list of (index, expression, [conditions]) tuples, generate IR + for the nested loops the list defines. + """ + index, expr, conds, is_async = loop_params[0] + for_loop_helper( + builder, + index, + expr, + lambda: loop_contents(conds, loop_params[1:]), + None, + is_async=is_async, + line=line, + ) + + def loop_contents( + conds: list[Expression], + remaining_loop_params: list[tuple[Lvalue, Expression, list[Expression], bool]], + ) -> None: + """Generate the body of the loop. + + Args: + conds: a list of conditions to be evaluated (in order, with short circuiting) + to gate the body of the loop + remaining_loop_params: the parameters for any further nested loops; if it's empty + we'll instead evaluate the "gen_inner_stmts" function + """ + # Check conditions, in order, short circuiting them. + for cond in conds: + cond_val = builder.accept(cond) + cont_block, rest_block = BasicBlock(), BasicBlock() + # If the condition is true we'll skip the continue. + builder.add_bool_branch(cond_val, rest_block, cont_block) + builder.activate_block(cont_block) + builder.nonlocal_control[-1].gen_continue(builder, cond.line) + builder.goto_and_activate(rest_block) + + if remaining_loop_params: + # There's another nested level, so the body of this loop is another loop. + return handle_loop(remaining_loop_params) + else: + # We finally reached the actual body of the generator. + # Generate the IR for the inner loop body. + gen_inner_stmts() + + handle_loop(loop_params) + + +def is_range_ref(expr: RefExpr) -> bool: + return ( + expr.fullname == "builtins.range" + or isinstance(expr.node, TypeAlias) + and expr.fullname == "six.moves.xrange" + ) + + +def make_for_loop_generator( + builder: IRBuilder, + index: Lvalue, + expr: Expression, + body_block: BasicBlock, + loop_exit: BasicBlock, + line: int, + is_async: bool = False, + nested: bool = False, +) -> ForGenerator: + """Return helper object for generating a for loop over an iterable. + + If "nested" is True, this is a nested iterator such as "e" in "enumerate(e)". + """ + + # Do an async loop if needed. async is always generic + if is_async: + expr_reg = builder.accept(expr) + async_obj = ForAsyncIterable(builder, index, body_block, loop_exit, line, nested) + item_type = builder._analyze_iterable_item_type(expr) + item_rtype = builder.type_to_rtype(item_type) + async_obj.init(expr_reg, item_rtype) + return async_obj + + rtyp = builder.node_type(expr) + if is_sequence_rprimitive(rtyp): + # Special case "for x in ". + expr_reg = builder.accept(expr) + target_type = builder.get_sequence_type(expr) + + for_list = ForSequence(builder, index, body_block, loop_exit, line, nested) + for_list.init(expr_reg, target_type, reverse=False) + return for_list + + if is_dict_rprimitive(rtyp): + # Special case "for k in ". + expr_reg = builder.accept(expr) + target_type = builder.get_dict_key_type(expr) + + for_dict = ForDictionaryKeys(builder, index, body_block, loop_exit, line, nested) + for_dict.init(expr_reg, target_type) + return for_dict + + if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr): + if ( + is_range_ref(expr.callee) + and ( + len(expr.args) <= 2 + or (len(expr.args) == 3 and builder.extract_int(expr.args[2]) is not None) + ) + and set(expr.arg_kinds) == {ARG_POS} + ): + # Special case "for x in range(...)". + # We support the 3 arg form but only for int literals, since it doesn't + # seem worth the hassle of supporting dynamically determining which + # direction of comparison to do. + if len(expr.args) == 1: + start_reg: Value = Integer(0) + end_reg = builder.accept(expr.args[0]) + else: + start_reg = builder.accept(expr.args[0]) + end_reg = builder.accept(expr.args[1]) + if len(expr.args) == 3: + step = builder.extract_int(expr.args[2]) + assert step is not None + if step == 0: + builder.error("range() step can't be zero", expr.args[2].line) + else: + step = 1 + + for_range = ForRange(builder, index, body_block, loop_exit, line, nested) + for_range.init(start_reg, end_reg, step) + return for_range + + elif ( + expr.callee.fullname == "builtins.enumerate" + and len(expr.args) == 1 + and expr.arg_kinds == [ARG_POS] + and isinstance(index, TupleExpr) + and len(index.items) == 2 + ): + # Special case "for i, x in enumerate(y)". + lvalue1 = index.items[0] + lvalue2 = index.items[1] + for_enumerate = ForEnumerate(builder, index, body_block, loop_exit, line, nested) + for_enumerate.init(lvalue1, lvalue2, expr.args[0]) + return for_enumerate + + elif ( + expr.callee.fullname == "builtins.zip" + and len(expr.args) >= 2 + and set(expr.arg_kinds) == {ARG_POS} + and isinstance(index, TupleExpr) + and len(index.items) == len(expr.args) + ): + # Special case "for x, y in zip(a, b)". + for_zip = ForZip(builder, index, body_block, loop_exit, line, nested) + for_zip.init(index.items, expr.args) + return for_zip + + if ( + expr.callee.fullname == "builtins.reversed" + and len(expr.args) == 1 + and expr.arg_kinds == [ARG_POS] + and is_sequence_rprimitive(builder.node_type(expr.args[0])) + ): + # Special case "for x in reversed()". + expr_reg = builder.accept(expr.args[0]) + target_type = builder.get_sequence_type(expr) + + for_list = ForSequence(builder, index, body_block, loop_exit, line, nested) + for_list.init(expr_reg, target_type, reverse=True) + return for_list + if isinstance(expr, CallExpr) and isinstance(expr.callee, MemberExpr) and not expr.args: + # Special cases for dictionary iterator methods, like dict.items(). + rtype = builder.node_type(expr.callee.expr) + if is_dict_rprimitive(rtype) and expr.callee.name in ("keys", "values", "items"): + expr_reg = builder.accept(expr.callee.expr) + for_dict_type: type[ForGenerator] | None = None + if expr.callee.name == "keys": + target_type = builder.get_dict_key_type(expr.callee.expr) + for_dict_type = ForDictionaryKeys + elif expr.callee.name == "values": + target_type = builder.get_dict_value_type(expr.callee.expr) + for_dict_type = ForDictionaryValues + else: + target_type = builder.get_dict_item_type(expr.callee.expr) + for_dict_type = ForDictionaryItems + for_dict_gen = for_dict_type(builder, index, body_block, loop_exit, line, nested) + for_dict_gen.init(expr_reg, target_type) + return for_dict_gen + + iterable_expr_reg: Value | None = None + if isinstance(expr, SetExpr): + # Special case "for x in ". + from mypyc.irbuild.expression import precompute_set_literal + + set_literal = precompute_set_literal(builder, expr) + if set_literal is not None: + iterable_expr_reg = set_literal + + # Default to a generic for loop. + if iterable_expr_reg is None: + iterable_expr_reg = builder.accept(expr) + + it = iterable_expr_reg.type + for_obj: ForNativeGenerator | ForIterable + if isinstance(it, RInstance) and it.class_ir.has_method(GENERATOR_HELPER_NAME): + # Directly call generator object methods if iterating over a native generator. + for_obj = ForNativeGenerator(builder, index, body_block, loop_exit, line, nested) + else: + # Generic implementation that works of arbitrary iterables. + for_obj = ForIterable(builder, index, body_block, loop_exit, line, nested) + item_type = builder._analyze_iterable_item_type(expr) + item_rtype = builder.type_to_rtype(item_type) + for_obj.init(iterable_expr_reg, item_rtype) + return for_obj + + +class ForGenerator: + """Abstract base class for generating for loops.""" + + def __init__( + self, + builder: IRBuilder, + index: Lvalue, + body_block: BasicBlock, + loop_exit: BasicBlock, + line: int, + nested: bool, + ) -> None: + self.builder = builder + self.index = index + self.body_block = body_block + self.line = line + # Some for loops need a cleanup block that we execute at exit. We + # create a cleanup block if needed. However, if we are generating a for + # loop for a nested iterator, such as "e" in "enumerate(e)", the + # outermost generator should generate the cleanup block -- we don't + # need to do it here. + if self.need_cleanup() and not nested: + # Create a new block to handle cleanup after loop exit. + self.loop_exit = BasicBlock() + else: + # Just use the existing loop exit block. + self.loop_exit = loop_exit + + def need_cleanup(self) -> bool: + """If this returns true, we need post-loop cleanup.""" + return False + + def add_cleanup(self, exit_block: BasicBlock) -> None: + """Add post-loop cleanup, if needed.""" + if self.need_cleanup(): + self.builder.activate_block(self.loop_exit) + self.gen_cleanup() + self.builder.goto(exit_block) + + def gen_condition(self) -> None: + """Generate check for loop exit (e.g. exhaustion of iteration).""" + + def begin_body(self) -> None: + """Generate ops at the beginning of the body (if needed).""" + + def gen_step(self) -> None: + """Generate stepping to the next item (if needed).""" + + def gen_cleanup(self) -> None: + """Generate post-loop cleanup (if needed).""" + + def load_len(self, expr: Value | AssignmentTarget) -> Value: + """A helper to get collection length, used by several subclasses.""" + return self.builder.builder.builtin_len( + self.builder.read(expr, self.line), self.line, use_pyssize_t=True + ) + + +class ForIterable(ForGenerator): + """Generate IR for a for loop over an arbitrary iterable (the general case).""" + + def need_cleanup(self) -> bool: + # Create a new cleanup block for when the loop is finished. + return True + + def init(self, expr_reg: Value, target_type: RType) -> None: + # Define targets to contain the expression, along with the iterator that will be used + # for the for-loop. If we are inside of a generator function, spill these into the + # environment class. + builder = self.builder + iter_reg = builder.primitive_op(iter_op, [expr_reg], self.line) + builder.maybe_spill(expr_reg) + self.iter_target = builder.maybe_spill(iter_reg) + self.target_type = target_type + + def gen_condition(self) -> None: + # We call __next__ on the iterator and check to see if the return value + # is NULL, which signals either the end of the Iterable being traversed + # or an exception being raised. Note that Branch.IS_ERROR checks only + # for NULL (an exception does not necessarily have to be raised). + builder = self.builder + line = self.line + self.next_reg = builder.call_c(next_op, [builder.read(self.iter_target, line)], line) + builder.add(Branch(self.next_reg, self.loop_exit, self.body_block, Branch.IS_ERROR)) + + def begin_body(self) -> None: + # Assign the value obtained from __next__ to the + # lvalue so that it can be referenced by code in the body of the loop. + builder = self.builder + line = self.line + # We unbox here so that iterating with tuple unpacking generates a tuple based + # unpack instead of an iterator based one. + next_reg = builder.coerce(self.next_reg, self.target_type, line) + builder.assign(builder.get_assignment_target(self.index), next_reg, line) + + def gen_step(self) -> None: + # Nothing to do here, since we get the next item as part of gen_condition(). + pass + + def gen_cleanup(self) -> None: + # We set the branch to go here if the conditional evaluates to true. If + # an exception was raised during the loop, then err_reg will be set to + # True. If no_err_occurred_op returns False, then the exception will be + # propagated using the ERR_FALSE flag. + self.builder.call_c(no_err_occurred_op, [], self.line) + + +class ForNativeGenerator(ForGenerator): + """Generate IR for a for loop over a native generator.""" + + def need_cleanup(self) -> bool: + # Create a new cleanup block for when the loop is finished. + return True + + def init(self, expr_reg: Value, target_type: RType) -> None: + # Define target to contains the generator expression. It's also the iterator. + # If we are inside a generator function, spill these into the environment class. + builder = self.builder + self.iter_target = builder.maybe_spill(expr_reg) + self.target_type = target_type + + def gen_condition(self) -> None: + builder = self.builder + line = self.line + self.return_value = Register(object_rprimitive) + err = builder.add(LoadErrorValue(object_rprimitive, undefines=True)) + builder.assign(self.return_value, err, line) + + # Call generated generator helper method, passing a PyObject ** as the final + # argument that will be used to store the return value in the return value + # register. We ignore the return value but the presence of a return value + # indicates that the generator has finished. This is faster than raising + # and catching StopIteration, which is the non-native way of doing this. + ptr = builder.add(LoadAddress(object_pointer_rprimitive, self.return_value)) + nn = builder.none_object() + helper_call = MethodCall( + builder.read(self.iter_target), GENERATOR_HELPER_NAME, [nn, nn, nn, nn, ptr], line + ) + # We provide custom handling for error values. + helper_call.error_kind = ERR_NEVER + + self.next_reg = builder.add(helper_call) + builder.add(Branch(self.next_reg, self.loop_exit, self.body_block, Branch.IS_ERROR)) + + def begin_body(self) -> None: + # Assign the value obtained from the generator helper method to the + # lvalue so that it can be referenced by code in the body of the loop. + builder = self.builder + line = self.line + # We unbox here so that iterating with tuple unpacking generates a tuple based + # unpack instead of an iterator based one. + next_reg = builder.coerce(self.next_reg, self.target_type, line) + builder.assign(builder.get_assignment_target(self.index), next_reg, line) + + def gen_step(self) -> None: + # Nothing to do here, since we get the next item as part of gen_condition(). + pass + + def gen_cleanup(self) -> None: + # If return value is NULL (it wasn't assigned to by the generator helper method), + # an exception was raised that we need to propagate. + self.builder.primitive_op(propagate_if_error_op, [self.return_value], self.line) + + +class ForAsyncIterable(ForGenerator): + """Generate IR for an async for loop.""" + + def init(self, expr_reg: Value, target_type: RType) -> None: + # Define targets to contain the expression, along with the + # iterator that will be used for the for-loop. We are inside + # of a generator function, so we will spill these into + # environment class. + builder = self.builder + iter_reg = builder.call_c(aiter_op, [expr_reg], self.line) + builder.maybe_spill(expr_reg) + self.iter_target = builder.maybe_spill(iter_reg) + self.target_type = target_type + self.stop_reg = Register(bool_rprimitive) + + def gen_condition(self) -> None: + # This does the test and fetches the next value + # try: + # TARGET = await type(iter).__anext__(iter) + # stop = False + # except StopAsyncIteration: + # stop = True + # + # What a pain. + # There are optimizations available here if we punch through some abstractions. + + from mypyc.irbuild.statement import emit_await, transform_try_except + + builder = self.builder + line = self.line + + def except_match() -> Value: + addr = builder.add(LoadAddress(pointer_rprimitive, stop_async_iteration_op.src, line)) + return builder.add(LoadMem(stop_async_iteration_op.type, addr, borrow=True)) + + def try_body() -> None: + awaitable = builder.call_c(anext_op, [builder.read(self.iter_target)], line) + self.next_reg = emit_await(builder, awaitable, line) + builder.assign(self.stop_reg, builder.false(), -1) + + def except_body() -> None: + builder.assign(self.stop_reg, builder.true(), line) + + transform_try_except( + builder, try_body, [((except_match, line), None, except_body)], None, line + ) + + builder.add(Branch(self.stop_reg, self.loop_exit, self.body_block, Branch.BOOL)) + + def begin_body(self) -> None: + # Assign the value obtained from await __anext__ to the + # lvalue so that it can be referenced by code in the body of the loop. + builder = self.builder + line = self.line + # We unbox here so that iterating with tuple unpacking generates a tuple based + # unpack instead of an iterator based one. + next_reg = builder.coerce(self.next_reg, self.target_type, line) + builder.assign(builder.get_assignment_target(self.index), next_reg, line) + + def gen_step(self) -> None: + # Nothing to do here, since we get the next item as part of gen_condition(). + pass + + +def unsafe_index(builder: IRBuilder, target: Value, index: Value, line: int) -> Value: + """Emit a potentially unsafe index into a target.""" + # This doesn't really fit nicely into any of our data-driven frameworks + # since we want to use __getitem__ if we don't have an unsafe version, + # so we just check manually. + if is_list_rprimitive(target.type): + return builder.primitive_op(list_get_item_unsafe_op, [target, index], line) + elif is_tuple_rprimitive(target.type): + return builder.call_c(tuple_get_item_unsafe_op, [target, index], line) + elif is_str_rprimitive(target.type): + return builder.call_c(str_get_item_unsafe_op, [target, index], line) + else: + return builder.gen_method_call(target, "__getitem__", [index], None, line) + + +class ForSequence(ForGenerator): + """Generate optimized IR for a for loop over a sequence. + + Supports iterating in both forward and reverse. + """ + + length_reg: Value | AssignmentTarget | None + + def init( + self, expr_reg: Value, target_type: RType, reverse: bool, length: Value | None = None + ) -> None: + assert is_sequence_rprimitive(expr_reg.type), (expr_reg, expr_reg.type) + builder = self.builder + # Record a Value indicating the length of the sequence, if known at compile time. + self.length = length + self.reverse = reverse + # Define target to contain the expression, along with the index that will be used + # for the for-loop. If we are inside of a generator function, spill these into the + # environment class. + self.expr_target = builder.maybe_spill(expr_reg) + if is_immutable_rprimitive(expr_reg.type): + # If the expression is an immutable type, we can load the length just once. + self.length_reg = builder.maybe_spill(self.length or self.load_len(self.expr_target)) + else: + # Otherwise, even if the length is known, we must recalculate the length + # at every iteration for compatibility with python semantics. + self.length_reg = None + if not reverse: + index_reg: Value = Integer(0, c_pyssize_t_rprimitive) + else: + if self.length_reg is not None: + len_val = builder.read(self.length_reg) + else: + len_val = self.load_len(self.expr_target) + index_reg = builder.builder.int_sub(len_val, 1) + self.index_target = builder.maybe_spill_assignable(index_reg) + self.target_type = target_type + + def gen_condition(self) -> None: + builder = self.builder + line = self.line + if self.reverse: + # If we are iterating in reverse order, we obviously need + # to check that the index is still positive. Somewhat less + # obviously we still need to check against the length, + # since it could shrink out from under us. + comparison = builder.binary_op( + builder.read(self.index_target, line), Integer(0), ">=", line + ) + second_check = BasicBlock() + builder.add_bool_branch(comparison, second_check, self.loop_exit) + builder.activate_block(second_check) + if self.length_reg is None: + # For compatibility with python semantics we recalculate the length + # at every iteration. + len_reg = self.load_len(self.expr_target) + else: + # (unless input is immutable type). + len_reg = builder.read(self.length_reg, line) + comparison = builder.binary_op(builder.read(self.index_target, line), len_reg, "<", line) + builder.add_bool_branch(comparison, self.body_block, self.loop_exit) + + def begin_body(self) -> None: + builder = self.builder + line = self.line + # Read the next list item. + value_box = unsafe_index( + builder, + builder.read(self.expr_target, line), + builder.read(self.index_target, line), + line, + ) + assert value_box + # We coerce to the type of list elements here so that + # iterating with tuple unpacking generates a tuple based + # unpack instead of an iterator based one. + builder.assign( + builder.get_assignment_target(self.index), + builder.coerce(value_box, self.target_type, line), + line, + ) + + def gen_step(self) -> None: + # Step to the next item. + builder = self.builder + line = self.line + step = 1 if not self.reverse else -1 + add = builder.builder.int_add(builder.read(self.index_target, line), step) + builder.assign(self.index_target, add, line) + + +class ForDictionaryCommon(ForGenerator): + """Generate optimized IR for a for loop over dictionary keys/values. + + The logic is pretty straightforward, we use PyDict_Next() API wrapped in + a tuple, so that we can modify only a single register. The layout of the tuple: + * f0: are there more items (bool) + * f1: current offset (int) + * f2: next key (object) + * f3: next value (object) + For more info see https://docs.python.org/3/c-api/dict.html#c.PyDict_Next. + + Note that for subclasses we fall back to generic PyObject_GetIter() logic, + since they may override some iteration methods in subtly incompatible manner. + The fallback logic is implemented in CPy.h via dynamic type check. + """ + + dict_next_op: ClassVar[CFunctionDescription] + dict_iter_op: ClassVar[CFunctionDescription] + + def need_cleanup(self) -> bool: + # Technically, a dict subclass can raise an unrelated exception + # in __next__(), so we need this. + return True + + def init(self, expr_reg: Value, target_type: RType) -> None: + builder = self.builder + self.target_type = target_type + + # We add some variables to environment class, so they can be read across yield. + self.expr_target = builder.maybe_spill(expr_reg) + offset = Integer(0) + self.offset_target = builder.maybe_spill_assignable(offset) + self.size = builder.maybe_spill(self.load_len(self.expr_target)) + + # For dict class (not a subclass) this is the dictionary itself. + iter_reg = builder.call_c(self.dict_iter_op, [expr_reg], self.line) + self.iter_target = builder.maybe_spill(iter_reg) + + def gen_condition(self) -> None: + """Get next key/value pair, set new offset, and check if we should continue.""" + builder = self.builder + line = self.line + self.next_tuple = self.builder.call_c( + self.dict_next_op, + [builder.read(self.iter_target, line), builder.read(self.offset_target, line)], + line, + ) + + # Do this here instead of in gen_step() to minimize variables in environment. + new_offset = builder.add(TupleGet(self.next_tuple, 1, line)) + builder.assign(self.offset_target, new_offset, line) + + should_continue = builder.add(TupleGet(self.next_tuple, 0, line)) + builder.add(Branch(should_continue, self.body_block, self.loop_exit, Branch.BOOL)) + + def gen_step(self) -> None: + """Check that dictionary didn't change size during iteration. + + Raise RuntimeError if it is not the case to match CPython behavior. + """ + builder = self.builder + line = self.line + # Technically, we don't need a new primitive for this, but it is simpler. + builder.call_c( + dict_check_size_op, + [builder.read(self.expr_target, line), builder.read(self.size, line)], + line, + ) + + def gen_cleanup(self) -> None: + # Same as for generic ForIterable. + self.builder.call_c(no_err_occurred_op, [], self.line) + + +class ForDictionaryKeys(ForDictionaryCommon): + """Generate optimized IR for a for loop over dictionary keys.""" + + dict_next_op = dict_next_key_op + dict_iter_op = dict_key_iter_op + + def begin_body(self) -> None: + builder = self.builder + line = self.line + + # Key is stored at the third place in the tuple. + key = builder.add(TupleGet(self.next_tuple, 2, line)) + builder.assign( + builder.get_assignment_target(self.index), + builder.coerce(key, self.target_type, line), + line, + ) + + +class ForDictionaryValues(ForDictionaryCommon): + """Generate optimized IR for a for loop over dictionary values.""" + + dict_next_op = dict_next_value_op + dict_iter_op = dict_value_iter_op + + def begin_body(self) -> None: + builder = self.builder + line = self.line + + # Value is stored at the third place in the tuple. + value = builder.add(TupleGet(self.next_tuple, 2, line)) + builder.assign( + builder.get_assignment_target(self.index), + builder.coerce(value, self.target_type, line), + line, + ) + + +class ForDictionaryItems(ForDictionaryCommon): + """Generate optimized IR for a for loop over dictionary items.""" + + dict_next_op = dict_next_item_op + dict_iter_op = dict_item_iter_op + + def begin_body(self) -> None: + builder = self.builder + line = self.line + + key = builder.add(TupleGet(self.next_tuple, 2, line)) + value = builder.add(TupleGet(self.next_tuple, 3, line)) + + # Coerce just in case e.g. key is itself a tuple to be unpacked. + assert isinstance(self.target_type, RTuple), self.target_type + key = builder.coerce(key, self.target_type.types[0], line) + value = builder.coerce(value, self.target_type.types[1], line) + + target = builder.get_assignment_target(self.index) + if isinstance(target, AssignmentTargetTuple): + # Simpler code for common case: for k, v in d.items(). + if len(target.items) != 2: + builder.error("Expected a pair for dict item iteration", line) + builder.assign(target.items[0], key, line) + builder.assign(target.items[1], value, line) + else: + rvalue = builder.add(TupleSet([key, value], line)) + builder.assign(target, rvalue, line) + + +class ForRange(ForGenerator): + """Generate optimized IR for a for loop over an integer range.""" + + def init(self, start_reg: Value, end_reg: Value, step: int) -> None: + builder = self.builder + self.start_reg = start_reg + self.end_reg = end_reg + self.step = step + self.end_target = builder.maybe_spill(end_reg) + if is_short_int_rprimitive(start_reg.type) and is_short_int_rprimitive(end_reg.type): + index_type: RType = short_int_rprimitive + elif is_fixed_width_rtype(end_reg.type): + index_type = end_reg.type + else: + index_type = int_rprimitive + index_reg = Register(index_type) + builder.assign(index_reg, start_reg, -1) + self.index_reg = builder.maybe_spill_assignable(index_reg) + # Initialize loop index to 0. Assert that the index target is assignable. + self.index_target: Register | AssignmentTarget = builder.get_assignment_target(self.index) + builder.assign(self.index_target, builder.read(self.index_reg, self.line), self.line) + + def gen_condition(self) -> None: + builder = self.builder + line = self.line + # Add loop condition check. + cmp = "<" if self.step > 0 else ">" + comparison = builder.binary_op( + builder.read(self.index_reg, line), builder.read(self.end_target, line), cmp, line + ) + builder.add_bool_branch(comparison, self.body_block, self.loop_exit) + + def gen_step(self) -> None: + builder = self.builder + line = self.line + + # Increment index register. If the range is known to fit in short ints, use + # short ints. + if is_short_int_rprimitive(self.start_reg.type) and is_short_int_rprimitive( + self.end_reg.type + ): + new_val = builder.int_op( + short_int_rprimitive, + builder.read(self.index_reg, line), + Integer(self.step), + IntOp.ADD, + line, + ) + + else: + new_val = builder.binary_op( + builder.read(self.index_reg, line), Integer(self.step), "+", line + ) + builder.assign(self.index_reg, new_val, line) + builder.assign(self.index_target, new_val, line) + + +class ForInfiniteCounter(ForGenerator): + """Generate optimized IR for a for loop counting from 0 to infinity.""" + + def init(self) -> None: + builder = self.builder + # Create a register to store the state of the loop index and + # initialize this register along with the loop index to 0. + zero = Integer(0) + self.index_reg = builder.maybe_spill_assignable(zero) + self.index_target: Register | AssignmentTarget = builder.get_assignment_target(self.index) + + def gen_step(self) -> None: + builder = self.builder + line = self.line + # We can safely assume that the integer is short, since we are not going to wrap + # around a 63-bit integer. + # NOTE: This would be questionable if short ints could be 32 bits. + new_val = builder.int_op( + short_int_rprimitive, builder.read(self.index_reg, line), Integer(1), IntOp.ADD, line + ) + builder.assign(self.index_reg, new_val, line) + + def begin_body(self) -> None: + self.builder.assign(self.index_target, self.builder.read(self.index_reg), self.line) + + +class ForEnumerate(ForGenerator): + """Generate optimized IR for a for loop of form "for i, x in enumerate(it)".""" + + def need_cleanup(self) -> bool: + # The wrapped for loop might need cleanup. This might generate a + # redundant cleanup block, but that's okay. + return True + + def init(self, index1: Lvalue, index2: Lvalue, expr: Expression) -> None: + # Count from 0 to infinity (for the index lvalue). + self.index_gen = ForInfiniteCounter( + self.builder, index1, self.body_block, self.loop_exit, self.line, nested=True + ) + self.index_gen.init() + # Iterate over the actual iterable. + self.main_gen = make_for_loop_generator( + self.builder, index2, expr, self.body_block, self.loop_exit, self.line, nested=True + ) + + def gen_condition(self) -> None: + # No need for a check for the index generator, since it's unconditional. + self.main_gen.gen_condition() + + def begin_body(self) -> None: + self.index_gen.begin_body() + self.main_gen.begin_body() + + def gen_step(self) -> None: + self.index_gen.gen_step() + self.main_gen.gen_step() + + def gen_cleanup(self) -> None: + self.index_gen.gen_cleanup() + self.main_gen.gen_cleanup() + + +class ForZip(ForGenerator): + """Generate IR for a for loop of form `for x, ... in zip(a, ...)`.""" + + def need_cleanup(self) -> bool: + # The wrapped for loops might need cleanup. We might generate a + # redundant cleanup block, but that's okay. + return True + + def init(self, indexes: list[Lvalue], exprs: list[Expression]) -> None: + assert len(indexes) == len(exprs) + # Condition check will require multiple basic blocks, since there will be + # multiple conditions to check. + self.cond_blocks = [BasicBlock() for _ in range(len(indexes) - 1)] + [self.body_block] + self.gens: list[ForGenerator] = [] + for index, expr, next_block in zip(indexes, exprs, self.cond_blocks): + gen = make_for_loop_generator( + self.builder, index, expr, next_block, self.loop_exit, self.line, nested=True + ) + self.gens.append(gen) + + def gen_condition(self) -> None: + for i, gen in enumerate(self.gens): + gen.gen_condition() + if i < len(self.gens) - 1: + self.builder.activate_block(self.cond_blocks[i]) + + def begin_body(self) -> None: + for gen in self.gens: + gen.begin_body() + + def gen_step(self) -> None: + for gen in self.gens: + gen.gen_step() + + def gen_cleanup(self) -> None: + for gen in self.gens: + gen.gen_cleanup() + + +def get_expr_length(builder: IRBuilder, expr: Expression) -> int | None: + if isinstance(expr, (StrExpr, BytesExpr)): + return len(expr.value) + elif isinstance(expr, (ListExpr, TupleExpr)): + # if there are no star expressions, or we know the length of them, + # we know the length of the expression + stars = [get_expr_length(builder, i) for i in expr.items if isinstance(i, StarExpr)] + if None not in stars: + other = sum(not isinstance(i, StarExpr) for i in expr.items) + return other + sum(stars) # type: ignore [arg-type] + elif isinstance(expr, StarExpr): + return get_expr_length(builder, expr.expr) + elif ( + isinstance(expr, RefExpr) + and isinstance(expr.node, Var) + and expr.node.is_final + and isinstance(expr.node.final_value, str) + and expr.node.has_explicit_value + ): + return len(expr.node.final_value) + # TODO: extend this, passing length of listcomp and genexp should have worthwhile + # performance boost and can be (sometimes) figured out pretty easily. set and dict + # comps *can* be done as well but will need special logic to consider the possibility + # of key conflicts. Range, enumerate, zip are all simple logic. + + # we might still be able to get the length directly from the type + rtype = builder.node_type(expr) + if isinstance(rtype, RTuple): + return len(rtype.types) + return None + + +def get_expr_length_value( + builder: IRBuilder, expr: Expression, expr_reg: Value, line: int, use_pyssize_t: bool +) -> Value: + rtype = builder.node_type(expr) + assert is_sequence_rprimitive(rtype) or isinstance(rtype, RTuple), rtype + length = get_expr_length(builder, expr) + if length is None: + # We cannot compute the length at compile time, so we will fetch it. + return builder.builder.builtin_len(expr_reg, line, use_pyssize_t=use_pyssize_t) + # The expression result is known at compile time, so we can use a constant. + return Integer(length, c_pyssize_t_rprimitive if use_pyssize_t else short_int_rprimitive) diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/format_str_tokenizer.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/format_str_tokenizer.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..56d4ced Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/format_str_tokenizer.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/format_str_tokenizer.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/format_str_tokenizer.py new file mode 100644 index 0000000..5a35900 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/format_str_tokenizer.py @@ -0,0 +1,252 @@ +"""Tokenizers for three string formatting methods""" + +from __future__ import annotations + +from enum import Enum, unique +from typing import Final + +from mypy.checkstrformat import ( + ConversionSpecifier, + parse_conversion_specifiers, + parse_format_value, +) +from mypy.errors import Errors +from mypy.messages import MessageBuilder +from mypy.nodes import Context, Expression, StrExpr +from mypy.options import Options +from mypyc.ir.ops import Integer, Value +from mypyc.ir.rtypes import ( + c_pyssize_t_rprimitive, + is_bytes_rprimitive, + is_int_rprimitive, + is_short_int_rprimitive, + is_str_rprimitive, +) +from mypyc.irbuild.builder import IRBuilder +from mypyc.primitives.bytes_ops import bytes_build_op +from mypyc.primitives.int_ops import int_to_str_op +from mypyc.primitives.str_ops import str_build_op, str_op + + +@unique +class FormatOp(Enum): + """FormatOp represents conversion operations of string formatting during + compile time. + + Compare to ConversionSpecifier, FormatOp has fewer attributes. + For example, to mark a conversion from any object to string, + ConversionSpecifier may have several representations, like '%s', '{}' + or '{:{}}'. However, there would only exist one corresponding FormatOp. + """ + + STR = "s" + INT = "d" + BYTES = "b" + + +def generate_format_ops(specifiers: list[ConversionSpecifier]) -> list[FormatOp] | None: + """Convert ConversionSpecifier to FormatOp. + + Different ConversionSpecifiers may share a same FormatOp. + """ + format_ops = [] + for spec in specifiers: + # TODO: Match specifiers instead of using whole_seq + if spec.whole_seq == "%s" or spec.whole_seq == "{:{}}": + format_op = FormatOp.STR + elif spec.whole_seq == "%d": + format_op = FormatOp.INT + elif spec.whole_seq == "%b": + format_op = FormatOp.BYTES + elif spec.whole_seq: + return None + else: + format_op = FormatOp.STR + format_ops.append(format_op) + return format_ops + + +def tokenizer_printf_style(format_str: str) -> tuple[list[str], list[FormatOp]] | None: + """Tokenize a printf-style format string using regex. + + Return: + A list of string literals and a list of FormatOps. + """ + literals: list[str] = [] + specifiers: list[ConversionSpecifier] = parse_conversion_specifiers(format_str) + format_ops = generate_format_ops(specifiers) + if format_ops is None: + return None + + last_end = 0 + for spec in specifiers: + cur_start = spec.start_pos + literals.append(format_str[last_end:cur_start]) + last_end = cur_start + len(spec.whole_seq) + literals.append(format_str[last_end:]) + + return literals, format_ops + + +# The empty Context as an argument for parse_format_value(). +# It wouldn't be used since the code has passed the type-checking. +EMPTY_CONTEXT: Final = Context() + + +def tokenizer_format_call(format_str: str) -> tuple[list[str], list[FormatOp]] | None: + """Tokenize a str.format() format string. + + The core function parse_format_value() is shared with mypy. + With these specifiers, we then parse the literal substrings + of the original format string and convert `ConversionSpecifier` + to `FormatOp`. + + Return: + A list of string literals and a list of FormatOps. The literals + are interleaved with FormatOps and the length of returned literals + should be exactly one more than FormatOps. + Return None if it cannot parse the string. + """ + # Creates an empty MessageBuilder here. + # It wouldn't be used since the code has passed the type-checking. + specifiers = parse_format_value( + format_str, EMPTY_CONTEXT, MessageBuilder(Errors(Options()), {}) + ) + if specifiers is None: + return None + format_ops = generate_format_ops(specifiers) + if format_ops is None: + return None + + literals: list[str] = [] + last_end = 0 + for spec in specifiers: + # Skip { and } + literals.append(format_str[last_end : spec.start_pos - 1]) + last_end = spec.start_pos + len(spec.whole_seq) + 1 + literals.append(format_str[last_end:]) + # Deal with escaped {{ + literals = [x.replace("{{", "{").replace("}}", "}") for x in literals] + + return literals, format_ops + + +def convert_format_expr_to_str( + builder: IRBuilder, format_ops: list[FormatOp], exprs: list[Expression], line: int +) -> list[Value] | None: + """Convert expressions into string literal objects with the guidance + of FormatOps. Return None when fails.""" + if len(format_ops) != len(exprs): + return None + + converted = [] + for x, format_op in zip(exprs, format_ops): + node_type = builder.node_type(x) + if format_op == FormatOp.STR: + if is_str_rprimitive(node_type) or isinstance( + x, StrExpr + ): # NOTE: why does mypyc think our fake StrExprs are not str rprimitives? + var_str = builder.accept(x) + elif is_int_rprimitive(node_type) or is_short_int_rprimitive(node_type): + var_str = builder.primitive_op(int_to_str_op, [builder.accept(x)], line) + else: + var_str = builder.primitive_op(str_op, [builder.accept(x)], line) + elif format_op == FormatOp.INT: + if is_int_rprimitive(node_type) or is_short_int_rprimitive(node_type): + var_str = builder.primitive_op(int_to_str_op, [builder.accept(x)], line) + else: + return None + else: + return None + converted.append(var_str) + return converted + + +def join_formatted_strings( + builder: IRBuilder, literals: list[str] | None, substitutions: list[Value], line: int +) -> Value: + """Merge the list of literals and the list of substitutions + alternatively using 'str_build_op'. + + `substitutions` is the result value of formatting conversions. + + If the `literals` is set to None, we simply join the substitutions; + Otherwise, the `literals` is the literal substrings of the original + format string and its length should be exactly one more than + substitutions. + + For example: + (1) 'This is a %s and the value is %d' + -> literals: ['This is a ', ' and the value is', ''] + (2) '{} and the value is {}' + -> literals: ['', ' and the value is', ''] + """ + # The first parameter for str_build_op is the total size of + # the following PyObject* + result_list: list[Value] = [Integer(0, c_pyssize_t_rprimitive)] + + if literals is not None: + for a, b in zip(literals, substitutions): + if a: + result_list.append(builder.load_str(a)) + result_list.append(b) + if literals[-1]: + result_list.append(builder.load_str(literals[-1])) + else: + result_list.extend(substitutions) + + # Special case for empty string and literal string + if len(result_list) == 1: + return builder.load_str("") + if not substitutions and len(result_list) == 2: + return result_list[1] + + result_list[0] = Integer(len(result_list) - 1, c_pyssize_t_rprimitive) + return builder.call_c(str_build_op, result_list, line) + + +def convert_format_expr_to_bytes( + builder: IRBuilder, format_ops: list[FormatOp], exprs: list[Expression], line: int +) -> list[Value] | None: + """Convert expressions into bytes literal objects with the guidance + of FormatOps. Return None when fails.""" + if len(format_ops) != len(exprs): + return None + + converted = [] + for x, format_op in zip(exprs, format_ops): + node_type = builder.node_type(x) + # conversion type 's' is an alias of 'b' in bytes formatting + if format_op == FormatOp.BYTES or format_op == FormatOp.STR: + if is_bytes_rprimitive(node_type): + var_bytes = builder.accept(x) + else: + return None + else: + return None + converted.append(var_bytes) + return converted + + +def join_formatted_bytes( + builder: IRBuilder, literals: list[str], substitutions: list[Value], line: int +) -> Value: + """Merge the list of literals and the list of substitutions + alternatively using 'bytes_build_op'.""" + result_list: list[Value] = [Integer(0, c_pyssize_t_rprimitive)] + + for a, b in zip(literals, substitutions): + if a: + result_list.append(builder.load_bytes_from_str_literal(a)) + result_list.append(b) + if literals[-1]: + result_list.append(builder.load_bytes_from_str_literal(literals[-1])) + + # Special case for empty bytes and literal + if len(result_list) == 1: + return builder.load_bytes_from_str_literal("") + if not substitutions and len(result_list) == 2: + return result_list[1] + + result_list[0] = Integer(len(result_list) - 1, c_pyssize_t_rprimitive) + return builder.call_c(bytes_build_op, result_list, line) diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/function.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/function.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..cf5fabc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/function.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/function.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/function.py new file mode 100644 index 0000000..738d19e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/function.py @@ -0,0 +1,1183 @@ +"""Transform mypy AST functions to IR (and related things). + +Normal functions are translated into a list of basic blocks +containing various IR ops (defined in mypyc.ir.ops). + +This also deals with generators, async functions and nested +functions. All of these are transformed into callable classes. These +have a custom __call__ method that implements the call, and state, such +as an environment containing non-local variables, is stored in the +instance of the callable class. +""" + +from __future__ import annotations + +from collections import defaultdict +from collections.abc import Sequence +from typing import NamedTuple + +from mypy.nodes import ( + ArgKind, + ClassDef, + Decorator, + FuncBase, + FuncDef, + FuncItem, + LambdaExpr, + OverloadedFuncDef, + TypeInfo, + Var, +) +from mypy.types import CallableType, Type, UnboundType, get_proper_type +from mypyc.common import FAST_PREFIX, LAMBDA_NAME, PROPSET_PREFIX, SELF_NAME +from mypyc.ir.class_ir import ClassIR, NonExtClassInfo +from mypyc.ir.func_ir import ( + FUNC_CLASSMETHOD, + FUNC_NORMAL, + FUNC_STATICMETHOD, + FuncDecl, + FuncIR, + FuncSignature, + RuntimeArg, +) +from mypyc.ir.ops import ( + BasicBlock, + ComparisonOp, + GetAttr, + Integer, + LoadAddress, + LoadLiteral, + Register, + Return, + SetAttr, + Unbox, + Unreachable, + Value, +) +from mypyc.ir.rtypes import ( + RInstance, + bool_rprimitive, + c_int_rprimitive, + dict_rprimitive, + int_rprimitive, + object_rprimitive, +) +from mypyc.irbuild.builder import IRBuilder, calculate_arg_defaults, gen_arg_defaults +from mypyc.irbuild.callable_class import ( + add_call_to_callable_class, + add_get_to_callable_class, + instantiate_callable_class, + setup_callable_class, +) +from mypyc.irbuild.context import FuncInfo, GeneratorClass +from mypyc.irbuild.env_class import ( + add_vars_to_env, + finalize_env_class, + load_env_registers, + setup_env_class, +) +from mypyc.irbuild.generator import gen_generator_func, gen_generator_func_body +from mypyc.irbuild.targets import AssignmentTarget +from mypyc.primitives.dict_ops import ( + dict_get_method_with_none, + dict_new_op, + exact_dict_set_item_op, +) +from mypyc.primitives.generic_ops import generic_getattr, generic_setattr, py_setattr_op +from mypyc.primitives.misc_ops import register_function +from mypyc.primitives.registry import builtin_names +from mypyc.sametype import is_same_method_signature, is_same_type + +# Top-level transform functions + + +def transform_func_def(builder: IRBuilder, fdef: FuncDef) -> None: + sig = builder.mapper.fdef_to_sig(fdef, builder.options.strict_dunders_typing) + func_ir, func_reg = gen_func_item(builder, fdef, fdef.name, sig) + + # If the function that was visited was a nested function, then either look it up in our + # current environment or define it if it was not already defined. + if func_reg: + builder.assign(get_func_target(builder, fdef), func_reg, fdef.line) + maybe_insert_into_registry_dict(builder, fdef) + builder.add_function(func_ir, fdef.line) + + +def transform_overloaded_func_def(builder: IRBuilder, o: OverloadedFuncDef) -> None: + # Handle regular overload case + assert o.impl + builder.accept(o.impl) + + +def transform_decorator(builder: IRBuilder, dec: Decorator) -> None: + sig = builder.mapper.fdef_to_sig(dec.func, builder.options.strict_dunders_typing) + func_ir, func_reg = gen_func_item(builder, dec.func, dec.func.name, sig) + decorated_func: Value | None = None + if func_reg: + decorated_func = load_decorated_func(builder, dec.func, func_reg) + builder.assign(get_func_target(builder, dec.func), decorated_func, dec.func.line) + # If the prebuild pass didn't put this function in the function to decorators map (for example + # if this is a registered singledispatch implementation with no other decorators), we should + # treat this function as a regular function, not a decorated function + elif dec.func in builder.fdefs_to_decorators: + # Obtain the function name in order to construct the name of the helper function. + name = dec.func.fullname.split(".")[-1] + + # Load the callable object representing the non-decorated function, and decorate it. + orig_func = builder.load_global_str(name, dec.line) + decorated_func = load_decorated_func(builder, dec.func, orig_func) + + if decorated_func is not None: + # Set the callable object representing the decorated function as a global. + builder.call_c( + exact_dict_set_item_op, + [builder.load_globals_dict(), builder.load_str(dec.func.name), decorated_func], + decorated_func.line, + ) + + maybe_insert_into_registry_dict(builder, dec.func) + + builder.functions.append(func_ir) + + +def transform_lambda_expr(builder: IRBuilder, expr: LambdaExpr) -> Value: + typ = get_proper_type(builder.types[expr]) + assert isinstance(typ, CallableType), typ + + runtime_args = [] + for arg, arg_type in zip(expr.arguments, typ.arg_types): + arg.variable.type = arg_type + runtime_args.append( + RuntimeArg(arg.variable.name, builder.type_to_rtype(arg_type), arg.kind) + ) + ret_type = builder.type_to_rtype(typ.ret_type) + + fsig = FuncSignature(runtime_args, ret_type) + + fname = f"{LAMBDA_NAME}{builder.lambda_counter}" + builder.lambda_counter += 1 + func_ir, func_reg = gen_func_item(builder, expr, fname, fsig) + assert func_reg is not None + + builder.functions.append(func_ir) + return func_reg + + +# Internal functions + + +def gen_func_item( + builder: IRBuilder, + fitem: FuncItem, + name: str, + sig: FuncSignature, + cdef: ClassDef | None = None, + make_ext_method: bool = False, +) -> tuple[FuncIR, Value | None]: + """Generate and return the FuncIR for a given FuncDef. + + If the given FuncItem is a nested function, then we generate a + callable class representing the function and use that instead of + the actual function. if the given FuncItem contains a nested + function, then we generate an environment class so that inner + nested functions can access the environment of the given FuncDef. + + Consider the following nested function: + + def a() -> None: + def b() -> None: + def c() -> None: + return None + return None + return None + + The classes generated would look something like the following. + + has pointer to +-------+ + +--------------------------> | a_env | + | +-------+ + | ^ + | | has pointer to + +-------+ associated with +-------+ + | b_obj | -------------------> | b_env | + +-------+ +-------+ + ^ + | + +-------+ has pointer to | + | c_obj | --------------------------+ + +-------+ + """ + + # TODO: do something about abstract methods. + + func_reg: Value | None = None + + # We treat lambdas as always being nested because we always generate + # a class for lambdas, no matter where they are. (It would probably also + # work to special case toplevel lambdas and generate a non-class function.) + is_nested = fitem in builder.nested_fitems or isinstance(fitem, LambdaExpr) + contains_nested = fitem in builder.encapsulating_funcs.keys() + is_decorated = fitem in builder.fdefs_to_decorators + is_singledispatch = fitem in builder.singledispatch_impls + in_non_ext = False + add_nested_funcs_to_env = has_nested_func_self_reference(builder, fitem) + class_name = None + if cdef: + ir = builder.mapper.type_to_ir[cdef.info] + in_non_ext = not ir.is_ext_class and not make_ext_method + class_name = cdef.name + + if is_singledispatch: + func_name = singledispatch_main_func_name(name) + else: + func_name = name + + fn_info = FuncInfo( + fitem=fitem, + name=func_name, + class_name=class_name, + namespace=gen_func_ns(builder), + is_nested=is_nested, + contains_nested=contains_nested, + is_decorated=is_decorated, + in_non_ext=in_non_ext, + add_nested_funcs_to_env=add_nested_funcs_to_env, + ) + is_generator = fn_info.is_generator + builder.enter(fn_info, ret_type=sig.ret_type) + + if is_generator: + fitem = builder.fn_info.fitem + assert isinstance(fitem, FuncDef), fitem + generator_class_ir = builder.mapper.fdef_to_generator[fitem] + builder.fn_info.generator_class = GeneratorClass(generator_class_ir) + + # Functions that contain nested functions need an environment class to store variables that + # are free in their nested functions. Generator functions need an environment class to + # store a variable denoting the next instruction to be executed when the __next__ function + # is called, along with all the variables inside the function itself. + if contains_nested or ( + is_generator and not builder.fn_info.can_merge_generator_and_env_classes() + ): + setup_env_class(builder) + + if is_nested or in_non_ext: + setup_callable_class(builder) + + if is_generator: + # First generate a function that just constructs and returns a generator object. + func_ir, func_reg = gen_generator_func( + builder, + lambda args, blocks, fn_info: gen_func_ir( + builder, args, blocks, sig, fn_info, cdef, is_singledispatch + ), + ) + + # Re-enter the FuncItem and visit the body of the function this time. + gen_generator_func_body(builder, fn_info, func_reg) + else: + func_ir, func_reg = gen_func_body(builder, sig, cdef, is_singledispatch) + + if is_singledispatch: + # add the generated main singledispatch function + builder.functions.append(func_ir) + # create the dispatch function + assert isinstance(fitem, FuncDef), fitem + return gen_dispatch_func_ir(builder, fitem, fn_info.name, name, sig) + + return func_ir, func_reg + + +def gen_func_body( + builder: IRBuilder, sig: FuncSignature, cdef: ClassDef | None, is_singledispatch: bool +) -> tuple[FuncIR, Value | None]: + load_env_registers(builder) + gen_arg_defaults(builder) + if builder.fn_info.contains_nested: + finalize_env_class(builder) + add_vars_to_env(builder) + builder.accept(builder.fn_info.fitem.body) + builder.maybe_add_implicit_return() + + # Hang on to the local symbol table for a while, since we use it + # to calculate argument defaults below. + symtable = builder.symtables[-1] + + args, _, blocks, ret_type, fn_info = builder.leave() + + func_ir, func_reg = gen_func_ir(builder, args, blocks, sig, fn_info, cdef, is_singledispatch) + + # Evaluate argument defaults in the surrounding scope, since we + # calculate them *once* when the function definition is evaluated. + calculate_arg_defaults(builder, fn_info, func_reg, symtable) + return func_ir, func_reg + + +def has_nested_func_self_reference(builder: IRBuilder, fitem: FuncItem) -> bool: + """Does a nested function contain a self-reference in its body? + + If a nested function only has references in the surrounding function, + we don't need to add it to the environment. + """ + if any(isinstance(sym, FuncBase) for sym in builder.free_variables.get(fitem, set())): + return True + return any( + has_nested_func_self_reference(builder, nested) + for nested in builder.encapsulating_funcs.get(fitem, []) + ) + + +def gen_func_ir( + builder: IRBuilder, + args: list[Register], + blocks: list[BasicBlock], + sig: FuncSignature, + fn_info: FuncInfo, + cdef: ClassDef | None, + is_singledispatch_main_func: bool = False, +) -> tuple[FuncIR, Value | None]: + """Generate the FuncIR for a function. + + This takes the basic blocks and function info of a particular + function and returns the IR. If the function is nested, + also returns the register containing the instance of the + corresponding callable class. + """ + func_reg: Value | None = None + if fn_info.is_nested or fn_info.in_non_ext: + func_ir = add_call_to_callable_class(builder, args, blocks, sig, fn_info) + add_get_to_callable_class(builder, fn_info) + func_reg = instantiate_callable_class(builder, fn_info) + else: + fitem = fn_info.fitem + assert isinstance(fitem, FuncDef), fitem + func_decl = builder.mapper.func_to_decl[fitem] + if cdef and fn_info.name == FAST_PREFIX + func_decl.name: + # Special-cased version of a method has a separate FuncDecl, use that one. + func_decl = builder.mapper.type_to_ir[cdef.info].method_decls[fn_info.name] + if fn_info.is_decorated or is_singledispatch_main_func: + class_name = None if cdef is None else cdef.name + func_decl = FuncDecl( + fn_info.name, + class_name, + builder.module_name, + sig, + func_decl.kind, + is_prop_getter=func_decl.is_prop_getter, + is_prop_setter=func_decl.is_prop_setter, + ) + func_ir = FuncIR(func_decl, args, blocks, fitem.line, traceback_name=fitem.name) + else: + func_ir = FuncIR(func_decl, args, blocks, fitem.line, traceback_name=fitem.name) + return (func_ir, func_reg) + + +def generate_getattr_wrapper(builder: IRBuilder, cdef: ClassDef, getattr: FuncDef) -> None: + """ + Generate a wrapper function for __getattr__ that can be put into the tp_getattro slot. + The wrapper takes one argument besides self which is the attribute name. + It first checks if the name matches any of the attributes of this class. + If it does, it returns that attribute. If none match, it calls __getattr__. + + __getattr__ is not supported in classes that allow interpreted subclasses because the + tp_getattro slot is inherited by subclasses and if the subclass overrides __getattr__, + the override would be ignored in our wrapper. TODO: To support this, the wrapper would + have to check type of self and if it's not the compiled class, resolve "__getattr__" against + the type at runtime and call the returned method, like _Py_slot_tp_getattr_hook in cpython. + + __getattr__ is not supported in classes which inherit from non-native classes because those + have __dict__ which currently has some strange interactions when class attributes and + variables are assigned through __dict__ vs. through regular attribute access. Allowing + __getattr__ on top of that could be problematic. + """ + name = getattr.name + "__wrapper" + ir = builder.mapper.type_to_ir[cdef.info] + line = getattr.line + + error_base = f'"__getattr__" not supported in class "{cdef.name}" because ' + if ir.allow_interpreted_subclasses: + builder.error(error_base + "it allows interpreted subclasses", line) + if ir.inherits_python: + builder.error(error_base + "it inherits from a non-native class", line) + + with builder.enter_method(ir, name, object_rprimitive, internal=True): + attr_arg = builder.add_argument("attr", object_rprimitive) + generic_getattr_result = builder.call_c(generic_getattr, [builder.self(), attr_arg], line) + + return_generic, call_getattr = BasicBlock(), BasicBlock() + null = Integer(0, object_rprimitive, line) + got_generic = builder.add( + ComparisonOp(generic_getattr_result, null, ComparisonOp.NEQ, line) + ) + builder.add_bool_branch(got_generic, return_generic, call_getattr) + + builder.activate_block(return_generic) + builder.add(Return(generic_getattr_result, line)) + + builder.activate_block(call_getattr) + # No attribute matched so call user-provided __getattr__. + getattr_result = builder.gen_method_call( + builder.self(), getattr.name, [attr_arg], object_rprimitive, line + ) + builder.add(Return(getattr_result, line)) + + +def generate_setattr_wrapper(builder: IRBuilder, cdef: ClassDef, setattr: FuncDef) -> None: + """ + Generate a wrapper function for __setattr__ that can be put into the tp_setattro slot. + The wrapper takes two arguments besides self - attribute name and the new value. + Returns 0 on success and -1 on failure. Restrictions are similar to the __getattr__ + wrapper above. + + The wrapper calls the user-defined __setattr__ when the value to set is not NULL. + When it's NULL, this means that the call to tp_setattro comes from a del statement, + so it calls __delattr__ instead. If __delattr__ is not overridden in the native class, + this will call the base implementation in object which doesn't work without __dict__. + """ + name = setattr.name + "__wrapper" + ir = builder.mapper.type_to_ir[cdef.info] + line = setattr.line + + error_base = f'"__setattr__" not supported in class "{cdef.name}" because ' + if ir.allow_interpreted_subclasses: + builder.error(error_base + "it allows interpreted subclasses", line) + if ir.inherits_python: + builder.error(error_base + "it inherits from a non-native class", line) + + with builder.enter_method(ir, name, c_int_rprimitive, internal=True): + attr_arg = builder.add_argument("attr", object_rprimitive) + value_arg = builder.add_argument("value", object_rprimitive) + + call_delattr, call_setattr = BasicBlock(), BasicBlock() + null = Integer(0, object_rprimitive, line) + is_delattr = builder.add(ComparisonOp(value_arg, null, ComparisonOp.EQ, line)) + builder.add_bool_branch(is_delattr, call_delattr, call_setattr) + + builder.activate_block(call_delattr) + delattr_symbol = cdef.info.get("__delattr__") + delattr = delattr_symbol.node if delattr_symbol else None + delattr_override = delattr is not None and not delattr.fullname.startswith("builtins.") + if delattr_override: + builder.gen_method_call(builder.self(), "__delattr__", [attr_arg], None, line) + else: + # Call internal function that cpython normally calls when deleting an attribute. + # Cannot call object.__delattr__ here because it calls PyObject_SetAttr internally + # which in turn calls our wrapper and recurses infinitely. + # Note that since native classes don't have __dict__, this will raise AttributeError + # for dynamic attributes. + builder.call_c(generic_setattr, [builder.self(), attr_arg, null], line) + builder.add(Return(Integer(0, c_int_rprimitive), line)) + + builder.activate_block(call_setattr) + builder.gen_method_call(builder.self(), setattr.name, [attr_arg, value_arg], None, line) + builder.add(Return(Integer(0, c_int_rprimitive), line)) + + +def handle_ext_method(builder: IRBuilder, cdef: ClassDef, fdef: FuncDef) -> None: + # Perform the function of visit_method for methods inside extension classes. + name = fdef.name + class_ir = builder.mapper.type_to_ir[cdef.info] + sig = builder.mapper.fdef_to_sig(fdef, builder.options.strict_dunders_typing) + func_ir, func_reg = gen_func_item(builder, fdef, name, sig, cdef) + builder.functions.append(func_ir) + + if is_decorated(builder, fdef): + # Obtain the function name in order to construct the name of the helper function. + _, _, name = fdef.fullname.rpartition(".") + # Read the PyTypeObject representing the class, get the callable object + # representing the non-decorated method + typ = builder.load_native_type_object(cdef.fullname) + orig_func = builder.py_get_attr(typ, name, fdef.line) + + # Decorate the non-decorated method + decorated_func = load_decorated_func(builder, fdef, orig_func) + + # Set the callable object representing the decorated method as an attribute of the + # extension class. + builder.primitive_op( + py_setattr_op, [typ, builder.load_str(name), decorated_func], fdef.line + ) + + if fdef.is_property: + # If there is a property setter, it will be processed after the getter, + # We populate the optional setter field with none for now. + assert name not in class_ir.properties + class_ir.properties[name] = (func_ir, None) + + elif fdef in builder.prop_setters: + # The respective property getter must have been processed already + assert name in class_ir.properties + getter_ir, _ = class_ir.properties[name] + class_ir.properties[name] = (getter_ir, func_ir) + + class_ir.methods[func_ir.decl.name] = func_ir + + # If this overrides a parent class method with a different type, we need + # to generate a glue method to mediate between them. + for base in class_ir.mro[1:]: + if ( + name in base.method_decls + and name != "__init__" + and not is_same_method_signature( + class_ir.method_decls[name].sig, base.method_decls[name].sig + ) + ): + # TODO: Support contravariant subtyping in the input argument for + # property setters. Need to make a special glue method for handling this, + # similar to gen_glue_property. + + f = gen_glue(builder, base.method_decls[name].sig, func_ir, class_ir, base, fdef) + class_ir.glue_methods[(base, name)] = f + builder.functions.append(f) + + # If the class allows interpreted children, create glue + # methods that dispatch via the Python API. These will go in a + # "shadow vtable" that will be assigned to interpreted + # children. + if class_ir.allow_interpreted_subclasses: + f = gen_glue(builder, func_ir.sig, func_ir, class_ir, class_ir, fdef, do_py_ops=True) + class_ir.glue_methods[(class_ir, name)] = f + builder.functions.append(f) + + if fdef.name == "__getattr__": + generate_getattr_wrapper(builder, cdef, fdef) + elif fdef.name == "__setattr__": + generate_setattr_wrapper(builder, cdef, fdef) + elif fdef.name == "__delattr__": + setattr = cdef.info.get("__setattr__") + if not setattr or not setattr.node or setattr.node.fullname.startswith("builtins."): + builder.error( + '"__delattr__" supported only in classes that also override "__setattr__", ' + + "or inherit from a native class that overrides it.", + fdef.line, + ) + + +def handle_non_ext_method( + builder: IRBuilder, non_ext: NonExtClassInfo, cdef: ClassDef, fdef: FuncDef +) -> None: + # Perform the function of visit_method for methods inside non-extension classes. + name = fdef.name + sig = builder.mapper.fdef_to_sig(fdef, builder.options.strict_dunders_typing) + func_ir, func_reg = gen_func_item(builder, fdef, name, sig, cdef) + assert func_reg is not None + builder.functions.append(func_ir) + + if is_decorated(builder, fdef): + # The undecorated method is a generated callable class + orig_func = func_reg + func_reg = load_decorated_func(builder, fdef, orig_func) + + # TODO: Support property setters in non-extension classes + if fdef.is_property: + prop = builder.load_module_attr_by_fullname("builtins.property", fdef.line) + func_reg = builder.py_call(prop, [func_reg], fdef.line) + + elif builder.mapper.func_to_decl[fdef].kind == FUNC_CLASSMETHOD: + cls_meth = builder.load_module_attr_by_fullname("builtins.classmethod", fdef.line) + func_reg = builder.py_call(cls_meth, [func_reg], fdef.line) + + elif builder.mapper.func_to_decl[fdef].kind == FUNC_STATICMETHOD: + stat_meth = builder.load_module_attr_by_fullname("builtins.staticmethod", fdef.line) + func_reg = builder.py_call(stat_meth, [func_reg], fdef.line) + + builder.add_to_non_ext_dict(non_ext, name, func_reg, fdef.line) + + # If we identified that this non-extension class method can be special-cased for + # direct access during prepare phase, generate a "static" version of it. + class_ir = builder.mapper.type_to_ir[cdef.info] + name = FAST_PREFIX + fdef.name + if name in class_ir.method_decls: + func_ir, func_reg = gen_func_item(builder, fdef, name, sig, cdef, make_ext_method=True) + class_ir.methods[name] = func_ir + builder.functions.append(func_ir) + + +def gen_func_ns(builder: IRBuilder) -> str: + """Generate a namespace for a nested function using its outer function names.""" + return "_".join( + info.name + ("" if not info.class_name else "_" + info.class_name) + for info in builder.fn_infos + if info.name and info.name != "" + ) + + +def load_decorated_func(builder: IRBuilder, fdef: FuncDef, orig_func_reg: Value) -> Value: + """Apply decorators to a function. + + Given a decorated FuncDef and an instance of the callable class + representing that FuncDef, apply the corresponding decorator + functions on that decorated FuncDef and return the decorated + function. + """ + if not is_decorated(builder, fdef): + # If there are no decorators associated with the function, then just return the + # original function. + return orig_func_reg + + decorators = builder.fdefs_to_decorators[fdef] + func_reg = orig_func_reg + for d in reversed(decorators): + decorator = d.accept(builder.visitor) + assert isinstance(decorator, Value), decorator + func_reg = builder.py_call(decorator, [func_reg], func_reg.line) + return func_reg + + +def is_decorated(builder: IRBuilder, fdef: FuncDef) -> bool: + return fdef in builder.fdefs_to_decorators + + +def gen_glue( + builder: IRBuilder, + base_sig: FuncSignature, + target: FuncIR, + cls: ClassIR, + base: ClassIR, + fdef: FuncItem, + *, + do_py_ops: bool = False, +) -> FuncIR: + """Generate glue methods that mediate between different method types in subclasses. + + Works on both properties and methods. See gen_glue_methods below + for more details. + + If do_py_ops is True, then the glue methods should use generic + C API operations instead of direct calls, to enable generating + "shadow" glue methods that work with interpreted subclasses. + """ + if fdef.is_property: + return gen_glue_property(builder, base_sig, target, cls, base, fdef.line, do_py_ops) + else: + return gen_glue_method(builder, base_sig, target, cls, base, fdef.line, do_py_ops) + + +class ArgInfo(NamedTuple): + args: list[Value] + arg_names: list[str | None] + arg_kinds: list[ArgKind] + + +def get_args(builder: IRBuilder, rt_args: Sequence[RuntimeArg], line: int) -> ArgInfo: + # The environment operates on Vars, so we make some up + fake_vars = [(Var(arg.name), arg.type) for arg in rt_args] + args = [ + builder.read(builder.add_local_reg(var, type, is_arg=True), line) + for var, type in fake_vars + ] + arg_names = [ + arg.name if arg.kind.is_named() or (arg.kind.is_optional() and not arg.pos_only) else None + for arg in rt_args + ] + arg_kinds = [arg.kind for arg in rt_args] + return ArgInfo(args, arg_names, arg_kinds) + + +def gen_glue_method( + builder: IRBuilder, + base_sig: FuncSignature, + target: FuncIR, + cls: ClassIR, + base: ClassIR, + line: int, + do_pycall: bool, +) -> FuncIR: + """Generate glue methods that mediate between different method types in subclasses. + + For example, if we have: + + class A: + def f(builder: IRBuilder, x: int) -> object: ... + + then it is totally permissible to have a subclass + + class B(A): + def f(builder: IRBuilder, x: object) -> int: ... + + since '(object) -> int' is a subtype of '(int) -> object' by the usual + contra/co-variant function subtyping rules. + + The trickiness here is that int and object have different + runtime representations in mypyc, so A.f and B.f have + different signatures at the native C level. To deal with this, + we need to generate glue methods that mediate between the + different versions by coercing the arguments and return + values. + + If do_pycall is True, then make the call using the C API + instead of a native call. + """ + check_native_override(builder, base_sig, target.decl.sig, line) + + builder.enter() + builder.ret_types[-1] = base_sig.ret_type + + rt_args = list(base_sig.args) + if target.decl.kind == FUNC_NORMAL: + rt_args[0] = RuntimeArg(base_sig.args[0].name, RInstance(cls)) + + arg_info = get_args(builder, rt_args, line) + args, arg_kinds, arg_names = arg_info.args, arg_info.arg_kinds, arg_info.arg_names + + bitmap_args = None + if base_sig.num_bitmap_args: + args = args[: -base_sig.num_bitmap_args] + arg_kinds = arg_kinds[: -base_sig.num_bitmap_args] + arg_names = arg_names[: -base_sig.num_bitmap_args] + bitmap_args = list(builder.builder.args[-base_sig.num_bitmap_args :]) + + # We can do a passthrough *args/**kwargs with a native call, but if the + # args need to get distributed out to arguments, we just let python handle it + if any(kind.is_star() for kind in arg_kinds) and any( + not arg.kind.is_star() for arg in target.decl.sig.args + ): + do_pycall = True + + if do_pycall: + if target.decl.kind == FUNC_STATICMETHOD: + # FIXME: this won't work if we can do interpreted subclasses + first = builder.builder.get_native_type(cls) + st = 0 + else: + first = args[0] + st = 1 + retval = builder.builder.py_method_call( + first, target.name, args[st:], line, arg_kinds[st:], arg_names[st:] + ) + else: + retval = builder.builder.call( + target.decl, args, arg_kinds, arg_names, line, bitmap_args=bitmap_args + ) + retval = builder.coerce(retval, base_sig.ret_type, line) + builder.add(Return(retval)) + + arg_regs, _, blocks, ret_type, _ = builder.leave() + if base_sig.num_bitmap_args: + rt_args = rt_args[: -base_sig.num_bitmap_args] + return FuncIR( + FuncDecl( + target.name + "__" + base.name + "_glue", + cls.name, + builder.module_name, + FuncSignature(rt_args, ret_type), + target.decl.kind, + ), + arg_regs, + blocks, + ) + + +def check_native_override( + builder: IRBuilder, base_sig: FuncSignature, sub_sig: FuncSignature, line: int +) -> None: + """Report an error if an override changes signature in unsupported ways. + + Glue methods can work around many signature changes but not all of them. + """ + for base_arg, sub_arg in zip(base_sig.real_args(), sub_sig.real_args()): + if base_arg.type.error_overlap: + if not base_arg.optional and sub_arg.optional and base_sig.num_bitmap_args: + # This would change the meanings of bits in the argument defaults + # bitmap, which we don't support. We'd need to do tricky bit + # manipulations to support this generally. + builder.error( + "An argument with type " + + f'"{base_arg.type}" cannot be given a default value in a method override', + line, + ) + if base_arg.type.error_overlap or sub_arg.type.error_overlap: + if not is_same_type(base_arg.type, sub_arg.type): + # This would change from signaling a default via an error value to + # signaling a default via bitmap, which we don't support. + builder.error( + "Incompatible argument type " + + f'"{sub_arg.type}" (base class has type "{base_arg.type}")', + line, + ) + + +def gen_glue_property( + builder: IRBuilder, + sig: FuncSignature, + target: FuncIR, + cls: ClassIR, + base: ClassIR, + line: int, + do_pygetattr: bool, +) -> FuncIR: + """Generate glue methods for properties that mediate between different subclass types. + + Similarly to methods, properties of derived types can be covariantly subtyped. Thus, + properties also require glue. However, this only requires the return type to change. + Further, instead of a method call, an attribute get is performed. + + If do_pygetattr is True, then get the attribute using the Python C + API instead of a native call. + """ + builder.enter() + + rt_arg = RuntimeArg(SELF_NAME, RInstance(cls)) + self_target = builder.add_self_to_env(cls) + arg = builder.read(self_target, line) + builder.ret_types[-1] = sig.ret_type + if do_pygetattr: + retval = builder.py_get_attr(arg, target.name, line) + else: + retval = builder.add(GetAttr(arg, target.name, line)) + retbox = builder.coerce(retval, sig.ret_type, line) + builder.add(Return(retbox)) + + args, _, blocks, return_type, _ = builder.leave() + return FuncIR( + FuncDecl( + target.name + "__" + base.name + "_glue", + cls.name, + builder.module_name, + FuncSignature([rt_arg], return_type), + ), + args, + blocks, + ) + + +def get_func_target(builder: IRBuilder, fdef: FuncDef) -> AssignmentTarget: + """Given a FuncDef, return the target for the instance of its callable class. + + If the function was not already defined somewhere, then define it + and add it to the current environment. + """ + if fdef.original_def: + # Get the target associated with the previously defined FuncDef. + return builder.lookup(fdef.original_def) + + if builder.fn_info.is_generator or builder.fn_info.add_nested_funcs_to_env: + return builder.lookup(fdef) + + return builder.add_local_reg(fdef, object_rprimitive) + + +# This function still does not support the following imports. +# import json as _json +# from json import decoder +# Using either _json.JSONDecoder or decoder.JSONDecoder as a type hint for a dataclass field will fail. +# See issue mypyc/mypyc#1099. +def load_type(builder: IRBuilder, typ: TypeInfo, unbounded_type: Type | None, line: int) -> Value: + # typ.fullname contains the module where the class object was defined. However, it is possible + # that the class object's module was not imported in the file currently being compiled. So, we + # use unbounded_type.name (if provided by caller) to load the class object through one of the + # imported modules. + # Example: for `json.JSONDecoder`, typ.fullname is `json.decoder.JSONDecoder` but the Python + # file may import `json` not `json.decoder`. + # Another corner case: The Python file being compiled imports mod1 and has a type hint + # `mod1.OuterClass.InnerClass`. But, mod1/__init__.py might import OuterClass like this: + # `from mod2.mod3 import OuterClass`. In this case, typ.fullname is + # `mod2.mod3.OuterClass.InnerClass` and `unbounded_type.name` is `mod1.OuterClass.InnerClass`. + # So, we must use unbounded_type.name to load the class object. + # See issue mypyc/mypyc#1087. + load_attr_path = ( + unbounded_type.name if isinstance(unbounded_type, UnboundType) else typ.fullname + ).removesuffix(f".{typ.name}") + if typ in builder.mapper.type_to_ir: + class_ir = builder.mapper.type_to_ir[typ] + class_obj = builder.builder.get_native_type(class_ir) + elif typ.fullname in builtin_names: + builtin_addr_type, src = builtin_names[typ.fullname] + class_obj = builder.add(LoadAddress(builtin_addr_type, src, line)) + # This elif-condition finds the longest import that matches the load_attr_path. + elif module_name := max( + (i for i in builder.imports if load_attr_path == i or load_attr_path.startswith(f"{i}.")), + default="", + key=len, + ): + # Load the imported module. + loaded_module = builder.load_module(module_name) + # Recursively load attributes of the imported module. These may be submodules, classes or + # any other object. + for attr in ( + load_attr_path.removeprefix(f"{module_name}.").split(".") + if load_attr_path != module_name + else [] + ): + loaded_module = builder.py_get_attr(loaded_module, attr, line) + class_obj = builder.builder.get_attr( + loaded_module, typ.name, object_rprimitive, line, borrow=False + ) + else: + class_obj = builder.load_global_str(typ.name, line) + + return class_obj + + +def load_func(builder: IRBuilder, func_name: str, fullname: str | None, line: int) -> Value: + if fullname and not fullname.startswith(builder.current_module): + # we're calling a function in a different module + + # We can't use load_module_attr_by_fullname here because we need to load the function using + # func_name, not the name specified by fullname (which can be different for underscore + # function) + module = fullname.rsplit(".")[0] + loaded_module = builder.load_module(module) + + func = builder.py_get_attr(loaded_module, func_name, line) + else: + func = builder.load_global_str(func_name, line) + return func + + +def generate_singledispatch_dispatch_function( + builder: IRBuilder, main_singledispatch_function_name: str, fitem: FuncDef +) -> None: + line = fitem.line + current_func_decl = builder.mapper.func_to_decl[fitem] + arg_info = get_args(builder, current_func_decl.sig.args, line) + + dispatch_func_obj = builder.self() + + arg_type = builder.builder.get_type_of_obj(arg_info.args[0], line) + dispatch_cache = builder.builder.get_attr( + dispatch_func_obj, "dispatch_cache", dict_rprimitive, line + ) + call_find_impl, use_cache, call_func = BasicBlock(), BasicBlock(), BasicBlock() + get_result = builder.primitive_op(dict_get_method_with_none, [dispatch_cache, arg_type], line) + is_not_none = builder.translate_is_op(get_result, builder.none_object(), "is not", line) + impl_to_use = Register(object_rprimitive) + builder.add_bool_branch(is_not_none, use_cache, call_find_impl) + + builder.activate_block(use_cache) + builder.assign(impl_to_use, get_result, line) + builder.goto(call_func) + + builder.activate_block(call_find_impl) + find_impl = builder.load_module_attr_by_fullname("functools._find_impl", line) + registry = load_singledispatch_registry(builder, dispatch_func_obj, line) + uncached_impl = builder.py_call(find_impl, [arg_type, registry], line) + builder.call_c(exact_dict_set_item_op, [dispatch_cache, arg_type, uncached_impl], line) + builder.assign(impl_to_use, uncached_impl, line) + builder.goto(call_func) + + builder.activate_block(call_func) + gen_calls_to_correct_impl(builder, impl_to_use, arg_info, fitem, line) + + +def gen_calls_to_correct_impl( + builder: IRBuilder, impl_to_use: Value, arg_info: ArgInfo, fitem: FuncDef, line: int +) -> None: + current_func_decl = builder.mapper.func_to_decl[fitem] + + def gen_native_func_call_and_return(fdef: FuncDef) -> None: + func_decl = builder.mapper.func_to_decl[fdef] + ret_val = builder.builder.call( + func_decl, arg_info.args, arg_info.arg_kinds, arg_info.arg_names, line + ) + coerced = builder.coerce(ret_val, current_func_decl.sig.ret_type, line) + builder.add(Return(coerced)) + + typ, src = builtin_names["builtins.int"] + int_type_obj = builder.add(LoadAddress(typ, src, line)) + is_int = builder.builder.type_is_op(impl_to_use, int_type_obj, line) + + native_call, non_native_call = BasicBlock(), BasicBlock() + builder.add_bool_branch(is_int, native_call, non_native_call) + builder.activate_block(native_call) + + passed_id = builder.add(Unbox(impl_to_use, int_rprimitive, line)) + + native_ids = get_native_impl_ids(builder, fitem) + for impl, i in native_ids.items(): + call_impl, next_impl = BasicBlock(), BasicBlock() + + current_id = builder.load_int(i) + cond = builder.binary_op(passed_id, current_id, "==", line) + builder.add_bool_branch(cond, call_impl, next_impl) + + # Call the registered implementation + builder.activate_block(call_impl) + + gen_native_func_call_and_return(impl) + builder.activate_block(next_impl) + + # We've already handled all the possible integer IDs, so we should never get here + builder.add(Unreachable()) + + builder.activate_block(non_native_call) + ret_val = builder.py_call( + impl_to_use, arg_info.args, line, arg_info.arg_kinds, arg_info.arg_names + ) + coerced = builder.coerce(ret_val, current_func_decl.sig.ret_type, line) + builder.add(Return(coerced)) + + +def gen_dispatch_func_ir( + builder: IRBuilder, fitem: FuncDef, main_func_name: str, dispatch_name: str, sig: FuncSignature +) -> tuple[FuncIR, Value]: + """Create a dispatch function (a function that checks the first argument type and dispatches + to the correct implementation) + """ + builder.enter(FuncInfo(fitem, dispatch_name)) + setup_callable_class(builder) + builder.fn_info.callable_class.ir.attributes["registry"] = dict_rprimitive + builder.fn_info.callable_class.ir.attributes["dispatch_cache"] = dict_rprimitive + builder.fn_info.callable_class.ir.has_dict = True + builder.fn_info.callable_class.ir.needs_getseters = True + generate_singledispatch_callable_class_ctor(builder) + + generate_singledispatch_dispatch_function(builder, main_func_name, fitem) + args, _, blocks, _, fn_info = builder.leave() + dispatch_callable_class = add_call_to_callable_class(builder, args, blocks, sig, fn_info) + builder.functions.append(dispatch_callable_class) + add_get_to_callable_class(builder, fn_info) + add_register_method_to_callable_class(builder, fn_info) + func_reg = instantiate_callable_class(builder, fn_info) + dispatch_func_ir = generate_dispatch_glue_native_function( + builder, fitem, dispatch_callable_class.decl, dispatch_name + ) + + return dispatch_func_ir, func_reg + + +def generate_dispatch_glue_native_function( + builder: IRBuilder, fitem: FuncDef, callable_class_decl: FuncDecl, dispatch_name: str +) -> FuncIR: + line = fitem.line + builder.enter() + # We store the callable class in the globals dict for this function + callable_class = builder.load_global_str(dispatch_name, line) + decl = builder.mapper.func_to_decl[fitem] + arg_info = get_args(builder, decl.sig.args, line) + args = [callable_class] + arg_info.args + arg_kinds = [ArgKind.ARG_POS] + arg_info.arg_kinds + arg_names = arg_info.arg_names + arg_names.insert(0, "self") + ret_val = builder.builder.call(callable_class_decl, args, arg_kinds, arg_names, line) + builder.add(Return(ret_val)) + arg_regs, _, blocks, _, fn_info = builder.leave() + return FuncIR(decl, arg_regs, blocks) + + +def generate_singledispatch_callable_class_ctor(builder: IRBuilder) -> None: + """Create an __init__ that sets registry and dispatch_cache to empty dicts""" + line = -1 + class_ir = builder.fn_info.callable_class.ir + with builder.enter_method(class_ir, "__init__", bool_rprimitive): + empty_dict = builder.call_c(dict_new_op, [], line) + builder.add(SetAttr(builder.self(), "registry", empty_dict, line)) + cache_dict = builder.call_c(dict_new_op, [], line) + dispatch_cache_str = builder.load_str("dispatch_cache") + # use the py_setattr_op instead of SetAttr so that it also gets added to our __dict__ + builder.primitive_op(py_setattr_op, [builder.self(), dispatch_cache_str, cache_dict], line) + # the generated C code seems to expect that __init__ returns a char, so just return 1 + builder.add(Return(Integer(1, bool_rprimitive, line), line)) + + +def add_register_method_to_callable_class(builder: IRBuilder, fn_info: FuncInfo) -> None: + line = -1 + with builder.enter_method(fn_info.callable_class.ir, "register", object_rprimitive): + cls_arg = builder.add_argument("cls", object_rprimitive) + func_arg = builder.add_argument("func", object_rprimitive, ArgKind.ARG_OPT) + ret_val = builder.call_c(register_function, [builder.self(), cls_arg, func_arg], line) + builder.add(Return(ret_val, line)) + + +def load_singledispatch_registry(builder: IRBuilder, dispatch_func_obj: Value, line: int) -> Value: + return builder.builder.get_attr(dispatch_func_obj, "registry", dict_rprimitive, line) + + +def singledispatch_main_func_name(orig_name: str) -> str: + return f"__mypyc_singledispatch_main_function_{orig_name}__" + + +def maybe_insert_into_registry_dict(builder: IRBuilder, fitem: FuncDef) -> None: + line = fitem.line + is_singledispatch_main_func = fitem in builder.singledispatch_impls + # dict of singledispatch_func to list of register_types (fitem is the function to register) + to_register: defaultdict[FuncDef, list[TypeInfo]] = defaultdict(list) + for main_func, impls in builder.singledispatch_impls.items(): + for dispatch_type, impl in impls: + if fitem == impl: + to_register[main_func].append(dispatch_type) + + if not to_register and not is_singledispatch_main_func: + return + + if is_singledispatch_main_func: + main_func_name = singledispatch_main_func_name(fitem.name) + main_func_obj = load_func(builder, main_func_name, fitem.fullname, line) + + loaded_object_type = builder.load_module_attr_by_fullname("builtins.object", line) + registry_dict = builder.builder.make_dict([(loaded_object_type, main_func_obj)], line) + + dispatch_func_obj = builder.load_global_str(fitem.name, line) + builder.primitive_op( + py_setattr_op, [dispatch_func_obj, builder.load_str("registry"), registry_dict], line + ) + + for singledispatch_func, types in to_register.items(): + # TODO: avoid recomputing the native IDs for all the functions every time we find a new + # function + native_ids = get_native_impl_ids(builder, singledispatch_func) + if fitem not in native_ids: + to_insert = load_func(builder, fitem.name, fitem.fullname, line) + else: + current_id = native_ids[fitem] + load_literal = LoadLiteral(current_id, object_rprimitive) + to_insert = builder.add(load_literal) + # TODO: avoid reloading the registry here if we just created it + dispatch_func_obj = load_func( + builder, singledispatch_func.name, singledispatch_func.fullname, line + ) + registry = load_singledispatch_registry(builder, dispatch_func_obj, line) + for typ in types: + loaded_type = load_type(builder, typ, None, line) + builder.call_c(exact_dict_set_item_op, [registry, loaded_type, to_insert], line) + dispatch_cache = builder.builder.get_attr( + dispatch_func_obj, "dispatch_cache", dict_rprimitive, line + ) + builder.gen_method_call(dispatch_cache, "clear", [], None, line) + + +def get_native_impl_ids(builder: IRBuilder, singledispatch_func: FuncDef) -> dict[FuncDef, int]: + """Return a dict of registered implementation to native implementation ID for all + implementations + """ + impls = builder.singledispatch_impls[singledispatch_func] + return {impl: i for i, (typ, impl) in enumerate(impls) if not is_decorated(builder, impl)} + + +def gen_property_getter_ir( + builder: IRBuilder, func_decl: FuncDecl, cdef: ClassDef, is_trait: bool +) -> FuncIR: + """Generate an implicit trivial property getter for an attribute. + + These are used if an attribute can also be accessed as a property. + """ + name = func_decl.name + builder.enter(name) + self_reg = builder.add_argument("self", func_decl.sig.args[0].type) + if not is_trait: + value = builder.builder.get_attr(self_reg, name, func_decl.sig.ret_type, -1) + builder.add(Return(value)) + else: + builder.add(Unreachable()) + args, _, blocks, ret_type, fn_info = builder.leave() + return FuncIR(func_decl, args, blocks) + + +def gen_property_setter_ir( + builder: IRBuilder, func_decl: FuncDecl, cdef: ClassDef, is_trait: bool +) -> FuncIR: + """Generate an implicit trivial property setter for an attribute. + + These are used if an attribute can also be accessed as a property. + """ + name = func_decl.name + builder.enter(name) + self_reg = builder.add_argument("self", func_decl.sig.args[0].type) + value_reg = builder.add_argument("value", func_decl.sig.args[1].type) + assert name.startswith(PROPSET_PREFIX) + attr_name = name[len(PROPSET_PREFIX) :] + if not is_trait: + builder.add(SetAttr(self_reg, attr_name, value_reg, -1)) + builder.add(Return(builder.none())) + args, _, blocks, ret_type, fn_info = builder.leave() + return FuncIR(func_decl, args, blocks) diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/generator.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/generator.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..950a692 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/generator.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/generator.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/generator.py new file mode 100644 index 0000000..4dcd748 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/generator.py @@ -0,0 +1,437 @@ +"""Generate IR for generator functions. + +A generator function is represented by a class that implements the +generator protocol and keeps track of the generator state, including +local variables. + +The top-level logic for dealing with generator functions is in +mypyc.irbuild.function. +""" + +from __future__ import annotations + +from typing import Callable + +from mypy.nodes import ARG_OPT, FuncDef, Var +from mypyc.common import ENV_ATTR_NAME, GENERATOR_ATTRIBUTE_PREFIX, NEXT_LABEL_ATTR_NAME +from mypyc.ir.class_ir import ClassIR +from mypyc.ir.func_ir import FuncDecl, FuncIR +from mypyc.ir.ops import ( + NO_TRACEBACK_LINE_NO, + BasicBlock, + Branch, + Call, + Goto, + Integer, + MethodCall, + RaiseStandardError, + Register, + Return, + SetAttr, + TupleSet, + Unreachable, + Value, +) +from mypyc.ir.rtypes import ( + RInstance, + int32_rprimitive, + object_pointer_rprimitive, + object_rprimitive, +) +from mypyc.irbuild.builder import IRBuilder, calculate_arg_defaults, gen_arg_defaults +from mypyc.irbuild.context import FuncInfo +from mypyc.irbuild.env_class import ( + add_args_to_env, + add_vars_to_env, + finalize_env_class, + load_env_registers, + load_outer_env, + load_outer_envs, + setup_func_for_recursive_call, +) +from mypyc.irbuild.nonlocalcontrol import ExceptNonlocalControl +from mypyc.irbuild.prepare import GENERATOR_HELPER_NAME +from mypyc.primitives.exc_ops import ( + error_catch_op, + exc_matches_op, + raise_exception_with_tb_op, + reraise_exception_op, + restore_exc_info_op, +) + + +def gen_generator_func( + builder: IRBuilder, + gen_func_ir: Callable[ + [list[Register], list[BasicBlock], FuncInfo], tuple[FuncIR, Value | None] + ], +) -> tuple[FuncIR, Value | None]: + """Generate IR for generator function that returns generator object.""" + setup_generator_class(builder) + load_env_registers(builder, prefix=GENERATOR_ATTRIBUTE_PREFIX) + gen_arg_defaults(builder) + if builder.fn_info.can_merge_generator_and_env_classes(): + gen = instantiate_generator_class(builder) + builder.fn_info._curr_env_reg = gen + finalize_env_class(builder, prefix=GENERATOR_ATTRIBUTE_PREFIX) + else: + finalize_env_class(builder, prefix=GENERATOR_ATTRIBUTE_PREFIX) + gen = instantiate_generator_class(builder) + builder.add(Return(gen)) + + args, _, blocks, ret_type, fn_info = builder.leave() + func_ir, func_reg = gen_func_ir(args, blocks, fn_info) + return func_ir, func_reg + + +def gen_generator_func_body(builder: IRBuilder, fn_info: FuncInfo, func_reg: Value | None) -> None: + """Generate IR based on the body of a generator function. + + Add "__next__", "__iter__" and other generator methods to the generator + class that implements the function (each function gets a separate class). + + Return the symbol table for the body. + """ + builder.enter(fn_info, ret_type=object_rprimitive) + setup_env_for_generator_class(builder) + + load_outer_envs(builder, builder.fn_info.generator_class) + top_level = builder.top_level_fn_info() + fitem = fn_info.fitem + if ( + builder.fn_info.is_nested + and isinstance(fitem, FuncDef) + and top_level + and top_level.add_nested_funcs_to_env + ): + setup_func_for_recursive_call( + builder, fitem, builder.fn_info.generator_class, prefix=GENERATOR_ATTRIBUTE_PREFIX + ) + create_switch_for_generator_class(builder) + add_raise_exception_blocks_to_generator_class(builder, fitem.line) + + add_vars_to_env(builder, prefix=GENERATOR_ATTRIBUTE_PREFIX) + + builder.accept(fitem.body) + builder.maybe_add_implicit_return() + + populate_switch_for_generator_class(builder) + + # Hang on to the local symbol table, since the caller will use it + # to calculate argument defaults. + symtable = builder.symtables[-1] + + args, _, blocks, ret_type, fn_info = builder.leave() + + add_methods_to_generator_class(builder, fn_info, args, blocks, fitem.is_coroutine) + + # Evaluate argument defaults in the surrounding scope, since we + # calculate them *once* when the function definition is evaluated. + calculate_arg_defaults(builder, fn_info, func_reg, symtable) + + +def instantiate_generator_class(builder: IRBuilder) -> Value: + fitem = builder.fn_info.fitem + generator_reg = builder.add(Call(builder.fn_info.generator_class.ir.ctor, [], fitem.line)) + + if builder.fn_info.can_merge_generator_and_env_classes(): + # Set the generator instance to the initial state (zero). + zero = Integer(0) + builder.add(SetAttr(generator_reg, NEXT_LABEL_ATTR_NAME, zero, fitem.line)) + else: + # Get the current environment register. If the current function is nested, then the + # generator class gets instantiated from the callable class' '__call__' method, and hence + # we use the callable class' environment register. Otherwise, we use the original + # function's environment register. + if builder.fn_info.is_nested: + curr_env_reg = builder.fn_info.callable_class.curr_env_reg + else: + curr_env_reg = builder.fn_info.curr_env_reg + + # Set the generator class' environment attribute to point at the environment class + # defined in the current scope. + builder.add(SetAttr(generator_reg, ENV_ATTR_NAME, curr_env_reg, fitem.line)) + + # Set the generator instance's environment to the initial state (zero). + zero = Integer(0) + builder.add(SetAttr(curr_env_reg, NEXT_LABEL_ATTR_NAME, zero, fitem.line)) + return generator_reg + + +def setup_generator_class(builder: IRBuilder) -> ClassIR: + mapper = builder.mapper + assert isinstance(builder.fn_info.fitem, FuncDef), builder.fn_info.fitem + generator_class_ir = mapper.fdef_to_generator[builder.fn_info.fitem] + if builder.fn_info.can_merge_generator_and_env_classes(): + builder.fn_info.env_class = generator_class_ir + else: + generator_class_ir.attributes[ENV_ATTR_NAME] = RInstance(builder.fn_info.env_class) + + builder.classes.append(generator_class_ir) + return generator_class_ir + + +def create_switch_for_generator_class(builder: IRBuilder) -> None: + builder.add(Goto(builder.fn_info.generator_class.switch_block)) + block = BasicBlock() + builder.fn_info.generator_class.continuation_blocks.append(block) + builder.activate_block(block) + + +def populate_switch_for_generator_class(builder: IRBuilder) -> None: + cls = builder.fn_info.generator_class + line = builder.fn_info.fitem.line + + builder.activate_block(cls.switch_block) + for label, true_block in enumerate(cls.continuation_blocks): + false_block = BasicBlock() + comparison = builder.binary_op(cls.next_label_reg, Integer(label), "==", line) + builder.add_bool_branch(comparison, true_block, false_block) + builder.activate_block(false_block) + + builder.add(RaiseStandardError(RaiseStandardError.STOP_ITERATION, None, line)) + builder.add(Unreachable()) + + +def add_raise_exception_blocks_to_generator_class(builder: IRBuilder, line: int) -> None: + """Add error handling blocks to a generator class. + + Generates blocks to check if error flags are set while calling the + helper method for generator functions, and raises an exception if + those flags are set. + """ + cls = builder.fn_info.generator_class + assert cls.exc_regs is not None + exc_type, exc_val, exc_tb = cls.exc_regs + + # Check to see if an exception was raised. + error_block = BasicBlock() + ok_block = BasicBlock() + comparison = builder.translate_is_op(exc_type, builder.none_object(), "is not", line) + builder.add_bool_branch(comparison, error_block, ok_block) + + builder.activate_block(error_block) + builder.call_c(raise_exception_with_tb_op, [exc_type, exc_val, exc_tb], line) + builder.add(Unreachable()) + builder.goto_and_activate(ok_block) + + +def add_methods_to_generator_class( + builder: IRBuilder, + fn_info: FuncInfo, + arg_regs: list[Register], + blocks: list[BasicBlock], + is_coroutine: bool, +) -> None: + helper_fn_decl = add_helper_to_generator_class(builder, arg_regs, blocks, fn_info) + add_next_to_generator_class(builder, fn_info, helper_fn_decl) + add_send_to_generator_class(builder, fn_info, helper_fn_decl) + add_iter_to_generator_class(builder, fn_info) + add_throw_to_generator_class(builder, fn_info, helper_fn_decl) + add_close_to_generator_class(builder, fn_info) + if is_coroutine: + add_await_to_generator_class(builder, fn_info) + + +def add_helper_to_generator_class( + builder: IRBuilder, arg_regs: list[Register], blocks: list[BasicBlock], fn_info: FuncInfo +) -> FuncDecl: + """Generates a helper method for a generator class, called by '__next__' and 'throw'.""" + helper_fn_decl = fn_info.generator_class.ir.method_decls[GENERATOR_HELPER_NAME] + helper_fn_ir = FuncIR( + helper_fn_decl, arg_regs, blocks, fn_info.fitem.line, traceback_name=fn_info.fitem.name + ) + fn_info.generator_class.ir.methods[GENERATOR_HELPER_NAME] = helper_fn_ir + builder.functions.append(helper_fn_ir) + fn_info.env_class.env_user_function = helper_fn_ir + + return helper_fn_decl + + +def add_iter_to_generator_class(builder: IRBuilder, fn_info: FuncInfo) -> None: + """Generates the '__iter__' method for a generator class.""" + with builder.enter_method(fn_info.generator_class.ir, "__iter__", object_rprimitive, fn_info): + builder.add(Return(builder.self())) + + +def add_next_to_generator_class(builder: IRBuilder, fn_info: FuncInfo, fn_decl: FuncDecl) -> None: + """Generates the '__next__' method for a generator class.""" + with builder.enter_method(fn_info.generator_class.ir, "__next__", object_rprimitive, fn_info): + none_reg = builder.none_object() + # Call the helper function with error flags set to Py_None, and return that result. + result = builder.add( + Call( + fn_decl, + [ + builder.self(), + none_reg, + none_reg, + none_reg, + none_reg, + Integer(0, object_pointer_rprimitive), + ], + fn_info.fitem.line, + ) + ) + builder.add(Return(result)) + + +def add_send_to_generator_class(builder: IRBuilder, fn_info: FuncInfo, fn_decl: FuncDecl) -> None: + """Generates the 'send' method for a generator class.""" + with builder.enter_method(fn_info.generator_class.ir, "send", object_rprimitive, fn_info): + arg = builder.add_argument("arg", object_rprimitive) + none_reg = builder.none_object() + # Call the helper function with error flags set to Py_None, and return that result. + result = builder.add( + Call( + fn_decl, + [ + builder.self(), + none_reg, + none_reg, + none_reg, + builder.read(arg), + Integer(0, object_pointer_rprimitive), + ], + fn_info.fitem.line, + ) + ) + builder.add(Return(result)) + + +def add_throw_to_generator_class(builder: IRBuilder, fn_info: FuncInfo, fn_decl: FuncDecl) -> None: + """Generates the 'throw' method for a generator class.""" + with builder.enter_method(fn_info.generator_class.ir, "throw", object_rprimitive, fn_info): + typ = builder.add_argument("type", object_rprimitive) + val = builder.add_argument("value", object_rprimitive, ARG_OPT) + tb = builder.add_argument("traceback", object_rprimitive, ARG_OPT) + + # Because the value and traceback arguments are optional and hence + # can be NULL if not passed in, we have to assign them Py_None if + # they are not passed in. + none_reg = builder.none_object() + builder.assign_if_null(val, lambda: none_reg, builder.fn_info.fitem.line) + builder.assign_if_null(tb, lambda: none_reg, builder.fn_info.fitem.line) + + # Call the helper function using the arguments passed in, and return that result. + result = builder.add( + Call( + fn_decl, + [ + builder.self(), + builder.read(typ), + builder.read(val), + builder.read(tb), + none_reg, + Integer(0, object_pointer_rprimitive), + ], + fn_info.fitem.line, + ) + ) + builder.add(Return(result)) + + +def add_close_to_generator_class(builder: IRBuilder, fn_info: FuncInfo) -> None: + """Generates the '__close__' method for a generator class.""" + with builder.enter_method(fn_info.generator_class.ir, "close", object_rprimitive, fn_info): + except_block, else_block = BasicBlock(), BasicBlock() + builder.builder.push_error_handler(except_block) + builder.goto_and_activate(BasicBlock()) + generator_exit = builder.load_module_attr_by_fullname( + "builtins.GeneratorExit", fn_info.fitem.line + ) + builder.add( + MethodCall( + builder.self(), + "throw", + [generator_exit, builder.none_object(), builder.none_object()], + ) + ) + builder.goto(else_block) + builder.builder.pop_error_handler() + + builder.activate_block(except_block) + old_exc = builder.call_c(error_catch_op, [], fn_info.fitem.line) + builder.nonlocal_control.append( + ExceptNonlocalControl(builder.nonlocal_control[-1], old_exc) + ) + stop_iteration = builder.load_module_attr_by_fullname( + "builtins.StopIteration", fn_info.fitem.line + ) + exceptions = builder.add(TupleSet([generator_exit, stop_iteration], fn_info.fitem.line)) + matches = builder.call_c(exc_matches_op, [exceptions], fn_info.fitem.line) + + match_block, non_match_block = BasicBlock(), BasicBlock() + builder.add(Branch(matches, match_block, non_match_block, Branch.BOOL)) + + builder.activate_block(match_block) + builder.call_c(restore_exc_info_op, [builder.read(old_exc)], fn_info.fitem.line) + builder.add(Return(builder.none_object())) + + builder.activate_block(non_match_block) + builder.call_c(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) + builder.add(Unreachable()) + + builder.nonlocal_control.pop() + + builder.activate_block(else_block) + builder.add( + RaiseStandardError( + RaiseStandardError.RUNTIME_ERROR, + "generator ignored GeneratorExit", + fn_info.fitem.line, + ) + ) + builder.add(Unreachable()) + + +def add_await_to_generator_class(builder: IRBuilder, fn_info: FuncInfo) -> None: + """Generates the '__await__' method for a generator class.""" + with builder.enter_method(fn_info.generator_class.ir, "__await__", object_rprimitive, fn_info): + builder.add(Return(builder.self())) + + +def setup_env_for_generator_class(builder: IRBuilder) -> None: + """Populates the environment for a generator class.""" + fitem = builder.fn_info.fitem + cls = builder.fn_info.generator_class + self_target = builder.add_self_to_env(cls.ir) + + # Add the type, value, and traceback variables to the environment. + exc_type = builder.add_local(Var("type"), object_rprimitive, is_arg=True) + exc_val = builder.add_local(Var("value"), object_rprimitive, is_arg=True) + exc_tb = builder.add_local(Var("traceback"), object_rprimitive, is_arg=True) + # TODO: Use the right type here instead of object? + exc_arg = builder.add_local(Var("arg"), object_rprimitive, is_arg=True) + + # Parameter that can used to pass a pointer which can used instead of + # raising StopIteration(value). If the value is NULL, this won't be used. + stop_iter_value_arg = builder.add_local( + Var("stop_iter_ptr"), object_pointer_rprimitive, is_arg=True + ) + + cls.exc_regs = (exc_type, exc_val, exc_tb) + cls.send_arg_reg = exc_arg + cls.stop_iter_value_reg = stop_iter_value_arg + + cls.self_reg = builder.read(self_target, fitem.line) + if builder.fn_info.can_merge_generator_and_env_classes(): + cls.curr_env_reg = cls.self_reg + else: + cls.curr_env_reg = load_outer_env(builder, cls.self_reg, builder.symtables[-1]) + + # Define a variable representing the label to go to the next time + # the '__next__' function of the generator is called, and add it + # as an attribute to the environment class. + cls.next_label_target = builder.add_var_to_env_class( + Var(NEXT_LABEL_ATTR_NAME), int32_rprimitive, cls, reassign=False, always_defined=True + ) + + # Add arguments from the original generator function to the + # environment of the generator class. + add_args_to_env( + builder, local=False, base=cls, reassign=False, prefix=GENERATOR_ATTRIBUTE_PREFIX + ) + + # Set the next label register for the generator class. + cls.next_label_reg = builder.read(cls.next_label_target, fitem.line) diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/ll_builder.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/ll_builder.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..394f29c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/ll_builder.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/ll_builder.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/ll_builder.py new file mode 100644 index 0000000..fd66288 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/ll_builder.py @@ -0,0 +1,2778 @@ +"""A "low-level" IR builder class. + +See the docstring of class LowLevelIRBuilder for more information. + +""" + +from __future__ import annotations + +import sys +from collections.abc import Sequence +from typing import Callable, Final, Optional, cast +from typing_extensions import TypeGuard + +from mypy.argmap import map_actuals_to_formals +from mypy.nodes import ARG_POS, ARG_STAR, ARG_STAR2, ArgKind +from mypy.operators import op_methods, unary_op_methods +from mypy.types import AnyType, TypeOfAny +from mypyc.common import ( + BITMAP_BITS, + FAST_ISINSTANCE_MAX_SUBCLASSES, + FAST_PREFIX, + IS_FREE_THREADED, + MAX_LITERAL_SHORT_INT, + MAX_SHORT_INT, + MIN_LITERAL_SHORT_INT, + MIN_SHORT_INT, + PLATFORM_SIZE, +) +from mypyc.errors import Errors +from mypyc.ir.class_ir import ClassIR, all_concrete_classes +from mypyc.ir.func_ir import FuncDecl, FuncSignature +from mypyc.ir.ops import ( + ERR_FALSE, + ERR_NEVER, + NAMESPACE_MODULE, + NAMESPACE_STATIC, + NAMESPACE_TYPE, + Assign, + AssignMulti, + BasicBlock, + Box, + Branch, + Call, + CallC, + Cast, + ComparisonOp, + Extend, + Float, + FloatComparisonOp, + FloatNeg, + FloatOp, + GetAttr, + GetElementPtr, + Goto, + Integer, + IntOp, + KeepAlive, + LoadAddress, + LoadErrorValue, + LoadGlobal, + LoadLiteral, + LoadMem, + LoadStatic, + MethodCall, + Op, + PrimitiveDescription, + PrimitiveOp, + RaiseStandardError, + Register, + Truncate, + TupleGet, + TupleSet, + Unbox, + Unreachable, + Value, + float_comparison_op_to_id, + float_op_to_id, + int_op_to_id, +) +from mypyc.ir.rtypes import ( + PyObject, + PySetObject, + RArray, + RInstance, + RPrimitive, + RTuple, + RType, + RUnion, + bit_rprimitive, + bitmap_rprimitive, + bool_rprimitive, + bytes_rprimitive, + c_int_rprimitive, + c_pointer_rprimitive, + c_pyssize_t_rprimitive, + c_size_t_rprimitive, + check_native_int_range, + float_rprimitive, + int_rprimitive, + is_bool_or_bit_rprimitive, + is_bytes_rprimitive, + is_dict_rprimitive, + is_fixed_width_rtype, + is_float_rprimitive, + is_frozenset_rprimitive, + is_int16_rprimitive, + is_int32_rprimitive, + is_int64_rprimitive, + is_int_rprimitive, + is_list_rprimitive, + is_none_rprimitive, + is_object_rprimitive, + is_optional_type, + is_set_rprimitive, + is_short_int_rprimitive, + is_str_rprimitive, + is_tagged, + is_tuple_rprimitive, + is_uint8_rprimitive, + none_rprimitive, + object_pointer_rprimitive, + object_rprimitive, + optional_value_type, + pointer_rprimitive, + short_int_rprimitive, + str_rprimitive, +) +from mypyc.irbuild.util import concrete_arg_kind +from mypyc.options import CompilerOptions +from mypyc.primitives.bytes_ops import bytes_compare +from mypyc.primitives.dict_ops import ( + dict_build_op, + dict_copy, + dict_copy_op, + dict_new_op, + dict_ssize_t_size_op, + dict_update_in_display_op, +) +from mypyc.primitives.exc_ops import err_occurred_op, keep_propagating_op +from mypyc.primitives.float_ops import copysign_op, int_to_float_op +from mypyc.primitives.generic_ops import ( + generic_len_op, + generic_ssize_t_len_op, + py_call_op, + py_call_with_kwargs_op, + py_call_with_posargs_op, + py_getattr_op, + py_method_call_op, + py_vectorcall_method_op, + py_vectorcall_op, +) +from mypyc.primitives.int_ops import ( + int16_divide_op, + int16_mod_op, + int16_overflow, + int32_divide_op, + int32_mod_op, + int32_overflow, + int64_divide_op, + int64_mod_op, + int64_to_int_op, + int_to_int32_op, + int_to_int64_op, + ssize_t_to_int_op, + uint8_overflow, +) +from mypyc.primitives.list_ops import list_build_op, list_extend_op, list_items, new_list_op +from mypyc.primitives.misc_ops import ( + bool_op, + buf_init_item, + debug_print_op, + fast_isinstance_op, + none_object_op, + not_implemented_op, + set_immortal_op, + var_object_size, +) +from mypyc.primitives.registry import ( + ERR_NEG_INT, + CFunctionDescription, + binary_ops, + method_call_ops, + unary_ops, +) +from mypyc.primitives.set_ops import new_set_op +from mypyc.primitives.str_ops import ( + str_check_if_true, + str_eq, + str_eq_literal, + str_ssize_t_size_op, + unicode_compare, +) +from mypyc.primitives.tuple_ops import ( + list_tuple_op, + load_empty_tuple_constant_op, + new_tuple_op, + new_tuple_with_length_op, + sequence_tuple_op, +) +from mypyc.rt_subtype import is_runtime_subtype +from mypyc.sametype import is_same_type +from mypyc.subtype import is_subtype + +DictEntry = tuple[Optional[Value], Value] + +# If the number of items is less than the threshold when initializing +# a list, we would inline the generate IR using SetMem and expanded +# for-loop. Otherwise, we would call `list_build_op` for larger lists. +# TODO: The threshold is a randomly chosen number which needs further +# study on real-world projects for a better balance. +LIST_BUILDING_EXPANSION_THRESHOLD = 10 + +# From CPython +PY_VECTORCALL_ARGUMENTS_OFFSET: Final = 1 << (PLATFORM_SIZE * 8 - 1) + +FIXED_WIDTH_INT_BINARY_OPS: Final = { + "+", + "-", + "*", + "//", + "%", + "&", + "|", + "^", + "<<", + ">>", + "+=", + "-=", + "*=", + "//=", + "%=", + "&=", + "|=", + "^=", + "<<=", + ">>=", +} + +# Binary operations on bools that are specialized and don't just promote operands to int +BOOL_BINARY_OPS: Final = {"&", "&=", "|", "|=", "^", "^=", "==", "!=", "<", "<=", ">", ">="} + + +class LowLevelIRBuilder: + """A "low-level" IR builder class. + + LowLevelIRBuilder provides core abstractions we use for constructing + IR as well as a number of higher-level ones (accessing attributes, + calling functions and methods, and coercing between types, for + example). + + The core principle of the low-level IR builder is that all of its + facilities operate solely on the mypyc IR level and not the mypy AST + level---it has *no knowledge* of mypy types or expressions. + + The mypyc.irbuilder.builder.IRBuilder class wraps an instance of this + class and provides additional functionality to transform mypy AST nodes + to IR. + """ + + def __init__(self, errors: Errors | None, options: CompilerOptions) -> None: + self.errors = errors + self.options = options + self.args: list[Register] = [] + self.blocks: list[BasicBlock] = [] + # Stack of except handler entry blocks + self.error_handlers: list[BasicBlock | None] = [None] + # Values that we need to keep alive as long as we have borrowed + # temporaries. Use flush_keep_alives() to mark the end of the live range. + self.keep_alives: list[Value] = [] + + def set_module(self, module_name: str, module_path: str) -> None: + """Set the name and path of the current module.""" + self.module_name = module_name + self.module_path = module_path + + # Basic operations + + def add(self, op: Op) -> Value: + """Add an op.""" + assert not self.blocks[-1].terminated, "Can't add to finished block" + self.blocks[-1].ops.append(op) + return op + + def goto(self, target: BasicBlock) -> None: + """Add goto to a basic block.""" + if not self.blocks[-1].terminated: + self.add(Goto(target)) + + def activate_block(self, block: BasicBlock) -> None: + """Add a basic block and make it the active one (target of adds).""" + if self.blocks: + assert self.blocks[-1].terminated + + block.error_handler = self.error_handlers[-1] + self.blocks.append(block) + + def goto_and_activate(self, block: BasicBlock) -> None: + """Add goto a block and make it the active block.""" + self.goto(block) + self.activate_block(block) + + def keep_alive(self, values: list[Value], *, steal: bool = False) -> None: + self.add(KeepAlive(values, steal=steal)) + + def load_mem(self, ptr: Value, value_type: RType, *, borrow: bool = False) -> Value: + return self.add(LoadMem(value_type, ptr, borrow=borrow)) + + def push_error_handler(self, handler: BasicBlock | None) -> None: + self.error_handlers.append(handler) + + def pop_error_handler(self) -> BasicBlock | None: + return self.error_handlers.pop() + + def self(self) -> Register: + """Return reference to the 'self' argument. + + This only works in a method. + """ + return self.args[0] + + def flush_keep_alives(self) -> None: + if self.keep_alives: + self.add(KeepAlive(self.keep_alives.copy())) + self.keep_alives = [] + + def debug_print(self, toprint: str | Value) -> None: + if isinstance(toprint, str): + toprint = self.load_str(toprint) + self.primitive_op(debug_print_op, [toprint], -1) + + # Type conversions + + def box(self, src: Value) -> Value: + if src.type.is_unboxed: + if isinstance(src, Integer) and is_tagged(src.type): + return self.add(LoadLiteral(src.value >> 1, rtype=object_rprimitive)) + return self.add(Box(src)) + else: + return src + + def unbox_or_cast( + self, + src: Value, + target_type: RType, + line: int, + *, + can_borrow: bool = False, + unchecked: bool = False, + ) -> Value: + if target_type.is_unboxed: + return self.add(Unbox(src, target_type, line)) + else: + if can_borrow: + self.keep_alives.append(src) + return self.add(Cast(src, target_type, line, borrow=can_borrow, unchecked=unchecked)) + + def coerce( + self, + src: Value, + target_type: RType, + line: int, + force: bool = False, + *, + can_borrow: bool = False, + ) -> Value: + """Generate a coercion/cast from one type to other (only if needed). + + For example, int -> object boxes the source int; int -> int emits nothing; + object -> int unboxes the object. All conversions preserve object value. + + If force is true, always generate an op (even if it is just an assignment) so + that the result will have exactly target_type as the type. + + Returns the register with the converted value (may be same as src). + """ + src_type = src.type + if src_type.is_unboxed and not target_type.is_unboxed: + # Unboxed -> boxed + return self.box(src) + if (src_type.is_unboxed and target_type.is_unboxed) and not is_runtime_subtype( + src_type, target_type + ): + if ( + isinstance(src, Integer) + and is_short_int_rprimitive(src_type) + and is_fixed_width_rtype(target_type) + ): + value = src.numeric_value() + if not check_native_int_range(target_type, value): + self.error(f'Value {value} is out of range for "{target_type}"', line) + return Integer(src.value >> 1, target_type) + elif is_int_rprimitive(src_type) and is_fixed_width_rtype(target_type): + return self.coerce_int_to_fixed_width(src, target_type, line) + elif is_fixed_width_rtype(src_type) and is_int_rprimitive(target_type): + return self.coerce_fixed_width_to_int(src, line) + elif is_short_int_rprimitive(src_type) and is_fixed_width_rtype(target_type): + return self.coerce_short_int_to_fixed_width(src, target_type, line) + elif ( + isinstance(src_type, RPrimitive) + and isinstance(target_type, RPrimitive) + and src_type.is_native_int + and target_type.is_native_int + and src_type.size == target_type.size + and src_type.is_signed == target_type.is_signed + ): + # Equivalent types + return src + elif is_bool_or_bit_rprimitive(src_type) and is_tagged(target_type): + shifted = self.int_op( + bool_rprimitive, src, Integer(1, bool_rprimitive), IntOp.LEFT_SHIFT + ) + return self.add(Extend(shifted, target_type, signed=False)) + elif is_bool_or_bit_rprimitive(src_type) and is_fixed_width_rtype(target_type): + return self.add(Extend(src, target_type, signed=False)) + elif isinstance(src, Integer) and is_float_rprimitive(target_type): + if is_tagged(src_type): + return Float(float(src.value // 2)) + return Float(float(src.value)) + elif is_tagged(src_type) and is_float_rprimitive(target_type): + return self.int_to_float(src, line) + elif ( + isinstance(src_type, RTuple) + and isinstance(target_type, RTuple) + and len(src_type.types) == len(target_type.types) + ): + # Coerce between two tuple types by coercing each item separately + values = [] + for i in range(len(src_type.types)): + v = None + if isinstance(src, TupleSet): + item = src.items[i] + # We can't reuse register values, since they can be modified. + if not isinstance(item, Register): + v = item + if v is None: + v = TupleGet(src, i) + self.add(v) + values.append(v) + return self.add( + TupleSet( + [self.coerce(v, t, line) for v, t in zip(values, target_type.types)], line + ) + ) + # To go between any other unboxed types, we go through a boxed + # in-between value, for simplicity. + tmp = self.box(src) + return self.unbox_or_cast(tmp, target_type, line) + if (not src_type.is_unboxed and target_type.is_unboxed) or not is_subtype( + src_type, target_type + ): + return self.unbox_or_cast(src, target_type, line, can_borrow=can_borrow) + elif force: + tmp = Register(target_type) + self.add(Assign(tmp, src)) + return tmp + return src + + def coerce_int_to_fixed_width(self, src: Value, target_type: RType, line: int) -> Value: + assert is_fixed_width_rtype(target_type), target_type + assert isinstance(target_type, RPrimitive), target_type + + res = Register(target_type) + + fast, slow, end = BasicBlock(), BasicBlock(), BasicBlock() + + check = self.check_tagged_short_int(src, line) + self.add(Branch(check, fast, slow, Branch.BOOL)) + + self.activate_block(fast) + + size = target_type.size + if size < int_rprimitive.size: + # Add a range check when the target type is smaller than the source type + fast2, fast3 = BasicBlock(), BasicBlock() + upper_bound = 1 << (size * 8 - 1) + if not target_type.is_signed: + upper_bound *= 2 + check2 = self.add(ComparisonOp(src, Integer(upper_bound, src.type), ComparisonOp.SLT)) + self.add(Branch(check2, fast2, slow, Branch.BOOL)) + self.activate_block(fast2) + if target_type.is_signed: + lower_bound = -upper_bound + else: + lower_bound = 0 + check3 = self.add(ComparisonOp(src, Integer(lower_bound, src.type), ComparisonOp.SGE)) + self.add(Branch(check3, fast3, slow, Branch.BOOL)) + self.activate_block(fast3) + tmp = self.int_op( + c_pyssize_t_rprimitive, + src, + Integer(1, c_pyssize_t_rprimitive), + IntOp.RIGHT_SHIFT, + line, + ) + tmp = self.add(Truncate(tmp, target_type)) + else: + if size > int_rprimitive.size: + tmp = self.add(Extend(src, target_type, signed=True)) + else: + tmp = src + tmp = self.int_op(target_type, tmp, Integer(1, target_type), IntOp.RIGHT_SHIFT, line) + + self.add(Assign(res, tmp)) + self.goto(end) + + self.activate_block(slow) + if is_int64_rprimitive(target_type) or ( + is_int32_rprimitive(target_type) and size == int_rprimitive.size + ): + # Slow path calls a library function that handles more complex logic + ptr = self.int_op( + pointer_rprimitive, src, Integer(1, pointer_rprimitive), IntOp.XOR, line + ) + ptr2 = Register(c_pointer_rprimitive) + self.add(Assign(ptr2, ptr)) + if is_int64_rprimitive(target_type): + conv_op = int_to_int64_op + else: + conv_op = int_to_int32_op + tmp = self.call_c(conv_op, [ptr2], line) + self.add(Assign(res, tmp)) + self.add(KeepAlive([src])) + self.goto(end) + elif is_int32_rprimitive(target_type): + # Slow path just always generates an OverflowError + self.call_c(int32_overflow, [], line) + self.add(Unreachable()) + elif is_int16_rprimitive(target_type): + # Slow path just always generates an OverflowError + self.call_c(int16_overflow, [], line) + self.add(Unreachable()) + elif is_uint8_rprimitive(target_type): + # Slow path just always generates an OverflowError + self.call_c(uint8_overflow, [], line) + self.add(Unreachable()) + else: + assert False, target_type + + self.activate_block(end) + return res + + def coerce_short_int_to_fixed_width(self, src: Value, target_type: RType, line: int) -> Value: + if is_int64_rprimitive(target_type) or ( + PLATFORM_SIZE == 4 and is_int32_rprimitive(target_type) + ): + return self.int_op(target_type, src, Integer(1, target_type), IntOp.RIGHT_SHIFT, line) + # TODO: i32 on 64-bit platform + assert False, (src.type, target_type, PLATFORM_SIZE) + + def coerce_fixed_width_to_int(self, src: Value, line: int) -> Value: + if ( + (is_int32_rprimitive(src.type) and PLATFORM_SIZE == 8) + or is_int16_rprimitive(src.type) + or is_uint8_rprimitive(src.type) + ): + # Simple case -- just sign extend and shift. + extended = self.add(Extend(src, c_pyssize_t_rprimitive, signed=src.type.is_signed)) + return self.int_op( + int_rprimitive, + extended, + Integer(1, c_pyssize_t_rprimitive), + IntOp.LEFT_SHIFT, + line, + ) + + src_type = src.type + + assert is_fixed_width_rtype(src_type), src_type + assert isinstance(src_type, RPrimitive), src_type + + res = Register(int_rprimitive) + + fast, fast2, slow, end = BasicBlock(), BasicBlock(), BasicBlock(), BasicBlock() + + c1 = self.add(ComparisonOp(src, Integer(MAX_SHORT_INT, src_type), ComparisonOp.SLE)) + self.add(Branch(c1, fast, slow, Branch.BOOL)) + + self.activate_block(fast) + c2 = self.add(ComparisonOp(src, Integer(MIN_SHORT_INT, src_type), ComparisonOp.SGE)) + self.add(Branch(c2, fast2, slow, Branch.BOOL)) + + self.activate_block(slow) + if is_int64_rprimitive(src_type): + conv_op = int64_to_int_op + elif is_int32_rprimitive(src_type): + assert PLATFORM_SIZE == 4 + conv_op = ssize_t_to_int_op + else: + assert False, src_type + x = self.call_c(conv_op, [src], line) + self.add(Assign(res, x)) + self.goto(end) + + self.activate_block(fast2) + if int_rprimitive.size < src_type.size: + tmp = self.add(Truncate(src, c_pyssize_t_rprimitive)) + else: + tmp = src + s = self.int_op(int_rprimitive, tmp, Integer(1, tmp.type), IntOp.LEFT_SHIFT, line) + self.add(Assign(res, s)) + self.goto(end) + + self.activate_block(end) + return res + + def coerce_nullable(self, src: Value, target_type: RType, line: int) -> Value: + """Generate a coercion from a potentially null value.""" + if src.type.is_unboxed == target_type.is_unboxed and ( + (target_type.is_unboxed and is_runtime_subtype(src.type, target_type)) + or (not target_type.is_unboxed and is_subtype(src.type, target_type)) + ): + return src + + target = Register(target_type) + + valid, invalid, out = BasicBlock(), BasicBlock(), BasicBlock() + self.add(Branch(src, invalid, valid, Branch.IS_ERROR)) + + self.activate_block(valid) + coerced = self.coerce(src, target_type, line) + self.add(Assign(target, coerced, line)) + self.goto(out) + + self.activate_block(invalid) + error = self.add(LoadErrorValue(target_type)) + self.add(Assign(target, error, line)) + + self.goto_and_activate(out) + return target + + # Attribute access + + def get_attr( + self, obj: Value, attr: str, result_type: RType, line: int, *, borrow: bool = False + ) -> Value: + """Get a native or Python attribute of an object.""" + if ( + isinstance(obj.type, RInstance) + and obj.type.class_ir.is_ext_class + and obj.type.class_ir.has_attr(attr) + ): + op = GetAttr(obj, attr, line, borrow=borrow) + # For non-refcounted attribute types, the borrow might be + # disabled even if requested, so don't check 'borrow'. + if op.is_borrowed: + self.keep_alives.append(obj) + return self.add(op) + elif isinstance(obj.type, RUnion): + return self.union_get_attr(obj, obj.type, attr, result_type, line) + else: + return self.py_get_attr(obj, attr, line) + + def union_get_attr( + self, obj: Value, rtype: RUnion, attr: str, result_type: RType, line: int + ) -> Value: + """Get an attribute of an object with a union type.""" + + def get_item_attr(value: Value) -> Value: + return self.get_attr(value, attr, result_type, line) + + return self.decompose_union_helper(obj, rtype, result_type, get_item_attr, line) + + def py_get_attr(self, obj: Value, attr: str, line: int) -> Value: + """Get a Python attribute (slow). + + Prefer get_attr() which generates optimized code for native classes. + """ + key = self.load_str(attr) + return self.primitive_op(py_getattr_op, [obj, key], line) + + # isinstance() checks + + def isinstance_helper(self, obj: Value, class_irs: list[ClassIR], line: int) -> Value: + """Fast path for isinstance() that checks against a list of native classes.""" + if not class_irs: + return self.false() + ret = self.isinstance_native(obj, class_irs[0], line) + for class_ir in class_irs[1:]: + + def other() -> Value: + return self.isinstance_native(obj, class_ir, line) + + ret = self.shortcircuit_helper("or", bool_rprimitive, lambda: ret, other, line) + return ret + + def get_type_of_obj(self, obj: Value, line: int) -> Value: + ob_type_address = self.add(GetElementPtr(obj, PyObject, "ob_type", line)) + ob_type = self.load_mem(ob_type_address, object_rprimitive, borrow=True) + self.add(KeepAlive([obj])) + return ob_type + + def type_is_op(self, obj: Value, type_obj: Value, line: int) -> Value: + typ = self.get_type_of_obj(obj, line) + return self.add(ComparisonOp(typ, type_obj, ComparisonOp.EQ, line)) + + def isinstance_native(self, obj: Value, class_ir: ClassIR, line: int) -> Value: + """Fast isinstance() check for a native class. + + If there are three or fewer concrete (non-trait) classes among the class + and all its children, use even faster type comparison checks `type(obj) + is typ`. + """ + concrete = all_concrete_classes(class_ir) + if concrete is None or len(concrete) > FAST_ISINSTANCE_MAX_SUBCLASSES + 1: + return self.primitive_op( + fast_isinstance_op, [obj, self.get_native_type(class_ir)], line + ) + if not concrete: + # There can't be any concrete instance that matches this. + return self.false() + type_obj = self.get_native_type(concrete[0]) + ret = self.type_is_op(obj, type_obj, line) + for c in concrete[1:]: + + def other() -> Value: + return self.type_is_op(obj, self.get_native_type(c), line) + + ret = self.shortcircuit_helper("or", bool_rprimitive, lambda: ret, other, line) + return ret + + # Calls + + def _construct_varargs( + self, + args: Sequence[tuple[Value, ArgKind, str | None]], + line: int, + *, + has_star: bool, + has_star2: bool, + ) -> tuple[Value | None, Value | None]: + """Construct *args and **kwargs from a collection of arguments + + This is pretty complicated, and almost all of the complication here stems from + one of two things (but mostly the second): + * The handling of ARG_STAR/ARG_STAR2. We want to create as much of the args/kwargs + values in one go as we can, so we collect values until our hand is forced, and + then we emit creation of the list/tuple, and expand it from there if needed. + + * Support potentially nullable argument values. This has very narrow applicability, + as this will never be done by our compiled Python code, but is critically used + by gen_glue_method when generating glue methods to mediate between the function + signature of a parent class and its subclasses. + + For named-only arguments, this is quite simple: if it is + null, don't put it in the dict. + + For positional-or-named arguments, things are much more complicated. + * First, anything that was passed as a positional arg + must be forwarded along as a positional arg. It *must + not* be converted to a named arg. This is because mypy + does not enforce that positional-or-named arguments + have the same name in subclasses, and it is not + uncommon for code to have different names in + subclasses (a bunch of mypy's visitors do this, for + example!). This is arguably a bug in both mypy and code doing + this, and they ought to be using positional-only arguments, but + positional-only arguments are new and ugly. + + * On the flip side, we're willing to accept the + infelicity of sometimes turning an argument that was + passed by keyword into a positional argument. It's wrong, + but it's very marginal, and avoiding it would require passing + a bitmask of which arguments were named with every function call, + or something similar. + (See some discussion of this in testComplicatedArgs) + + Thus, our strategy for positional-or-named arguments is to + always pass them as positional, except in the one + situation where we can not, and where we can be absolutely + sure they were passed by name: when an *earlier* + positional argument was missing its value. + + This means that if we have a method `f(self, x: int=..., y: object=...)`: + * x and y present: args=(x, y), kwargs={} + * x present, y missing: args=(x,), kwargs={} + * x missing, y present: args=(), kwargs={'y': y} + + To implement this, when we have multiple optional + positional arguments, we maintain a flag in a register + that tracks whether an argument has been missing, and for + each such optional argument (except the first), we check + the flag to determine whether to append the argument to + the *args list or add it to the **kwargs dict. What a + mess! + + This is what really makes everything here such a tangle; + otherwise the *args and **kwargs code could be separated. + + The arguments has_star and has_star2 indicate whether the target function + takes an ARG_STAR and ARG_STAR2 argument, respectively. + (These will always be true when making a pycall, and be based + on the actual target signature for a native call.) + """ + + star_result: Value | None = None + star2_result: Value | None = None + # We aggregate values that need to go into *args and **kwargs + # in these lists. Once all arguments are processed (in the + # happiest case), or we encounter an ARG_STAR/ARG_STAR2 or a + # nullable arg, then we create the list and/or dict. + star_values: list[Value] = [] + star2_keys: list[Value] = [] + star2_values: list[Value] = [] + + seen_empty_reg: Register | None = None + + for value, kind, name in args: + if kind == ARG_STAR: + if star_result is None: + # star args fastpath + if len(args) == 1: + # fn(*args) + if is_list_rprimitive(value.type): + value = self.primitive_op(list_tuple_op, [value], line) + elif not is_tuple_rprimitive(value.type) and not isinstance( + value.type, RTuple + ): + value = self.primitive_op(sequence_tuple_op, [value], line) + return value, None + elif len(args) == 2 and args[1][1] == ARG_STAR2: + # fn(*args, **kwargs) + # TODO: extend to cover(*args, **k, **w, **a, **r, **g, **s) + if is_tuple_rprimitive(value.type) or isinstance(value.type, RTuple): + star_result = value + elif is_list_rprimitive(value.type): + star_result = self.primitive_op(list_tuple_op, [value], line) + else: + star_result = self.primitive_op(sequence_tuple_op, [value], line) + + star2_arg = args[1] + star2_value = star2_arg[0] + if is_dict_rprimitive(star2_value.type): + star2_fastpath_op = dict_copy_op + else: + star2_fastpath_op = dict_copy + return star_result, self.primitive_op( + star2_fastpath_op, [star2_value], line + ) + # elif ...: TODO extend this to optimize fn(*args, k=1, **kwargs) case + # TODO optimize this case using the length utils - currently in review + star_result = self.new_list_op(star_values, line) + self.primitive_op(list_extend_op, [star_result, value], line) + elif kind == ARG_STAR2: + if star2_result is None: + if len(args) == 1: + # early exit with fastpath if the only arg is ARG_STAR2 + # TODO: can we maintain an empty tuple in memory and just reuse it again and again? + if is_dict_rprimitive(value.type): + star2_fastpath_op = dict_copy_op + else: + star2_fastpath_op = dict_copy + return self.new_tuple([], line), self.primitive_op( + star2_fastpath_op, [value], line + ) + + star2_result = self._create_dict(star2_keys, star2_values, line) + + self.call_c(dict_update_in_display_op, [star2_result, value], line=line) + else: + nullable = kind.is_optional() + maybe_pos = kind.is_positional() and has_star + maybe_named = kind.is_named() or (kind.is_optional() and name and has_star2) + + # If the argument is nullable, we need to create the + # relevant args/kwargs objects so that we can + # conditionally modify them. + if nullable: + if maybe_pos and star_result is None: + star_result = self.new_list_op(star_values, line) + if maybe_named and star2_result is None: + star2_result = self._create_dict(star2_keys, star2_values, line) + + # Easy cases: just collect the argument. + if maybe_pos and star_result is None: + star_values.append(value) + continue + + if maybe_named and star2_result is None: + assert name is not None + key = self.load_str(name) + star2_keys.append(key) + star2_values.append(value) + continue + + # OK, anything that is nullable or *after* a nullable arg needs to be here + # TODO: We could try harder to avoid creating basic blocks in the common case + new_seen_empty_reg = seen_empty_reg + + out = BasicBlock() + if nullable: + # If this is the first nullable positional arg we've seen, create + # a register to track whether anything has been null. + # (We won't *check* the register until the next argument, though.) + if maybe_pos and not seen_empty_reg: + new_seen_empty_reg = Register(bool_rprimitive) + self.add(Assign(new_seen_empty_reg, self.false(), line)) + + skip = BasicBlock() if maybe_pos else out + keep = BasicBlock() + self.add(Branch(value, skip, keep, Branch.IS_ERROR)) + self.activate_block(keep) + + # If this could be positional or named and we /might/ have seen a missing + # positional arg, then we need to compile *both* a positional and named + # version! What a pain! + if maybe_pos and maybe_named and seen_empty_reg: + pos_block, named_block = BasicBlock(), BasicBlock() + self.add(Branch(seen_empty_reg, named_block, pos_block, Branch.BOOL)) + else: + pos_block = named_block = BasicBlock() + self.goto(pos_block) + + if maybe_pos: + self.activate_block(pos_block) + assert star_result + self.translate_special_method_call( + star_result, "append", [value], result_type=None, line=line + ) + self.goto(out) + + if maybe_named and (not maybe_pos or seen_empty_reg): + self.activate_block(named_block) + assert name is not None + key = self.load_str(name) + assert star2_result + self.translate_special_method_call( + star2_result, "__setitem__", [key, value], result_type=None, line=line + ) + self.goto(out) + + if nullable and maybe_pos and new_seen_empty_reg: + assert skip is not out + self.activate_block(skip) + self.add(Assign(new_seen_empty_reg, self.true(), line)) + self.goto(out) + + self.activate_block(out) + + seen_empty_reg = new_seen_empty_reg + + assert not (star_result or star_values) or has_star + assert not (star2_result or star2_values) or has_star2 + if has_star: + # If we managed to make it this far without creating a + # *args list, then we can directly create a + # tuple. Otherwise create the tuple from the list. + if star_result is None: + star_result = self.new_tuple(star_values, line) + elif not is_tuple_rprimitive(star_result.type): + # if star_result is a tuple we took the fast path + star_result = self.primitive_op(list_tuple_op, [star_result], line) + if has_star2 and star2_result is None and len(star2_keys) > 0: + # TODO: use dict_copy_op for simple cases of **kwargs + star2_result = self._create_dict(star2_keys, star2_values, line) + + return star_result, star2_result + + def py_call( + self, + function: Value, + arg_values: list[Value], + line: int, + arg_kinds: list[ArgKind] | None = None, + arg_names: Sequence[str | None] | None = None, + ) -> Value: + """Call a Python function (non-native and slow). + + Use py_call_op or py_call_with_kwargs_op for Python function call. + """ + result = self._py_vector_call(function, arg_values, line, arg_kinds, arg_names) + if result is not None: + return result + + # If all arguments are positional, we can use py_call_op. + if arg_kinds is None or all(kind == ARG_POS for kind in arg_kinds): + return self.call_c(py_call_op, [function] + arg_values, line) + + # Otherwise fallback to py_call_with_posargs_op or py_call_with_kwargs_op. + assert arg_names is not None + + pos_args_tuple, kw_args_dict = self._construct_varargs( + list(zip(arg_values, arg_kinds, arg_names)), line, has_star=True, has_star2=True + ) + assert pos_args_tuple + + if kw_args_dict is None: + return self.call_c(py_call_with_posargs_op, [function, pos_args_tuple], line) + + return self.call_c(py_call_with_kwargs_op, [function, pos_args_tuple, kw_args_dict], line) + + def _py_vector_call( + self, + function: Value, + arg_values: list[Value], + line: int, + arg_kinds: list[ArgKind] | None = None, + arg_names: Sequence[str | None] | None = None, + ) -> Value | None: + """Call function using the vectorcall API if possible. + + Return the return value if successful. Return None if a non-vectorcall + API should be used instead. + """ + # We can do this if all args are positional or named (no *args or **kwargs, not optional). + if arg_kinds is None or all( + not kind.is_star() and not kind.is_optional() for kind in arg_kinds + ): + if arg_values: + # Create a C array containing all arguments as boxed values. + coerced_args = [self.coerce(arg, object_rprimitive, line) for arg in arg_values] + arg_ptr = self.setup_rarray(object_rprimitive, coerced_args, object_ptr=True) + else: + arg_ptr = Integer(0, object_pointer_rprimitive) + num_pos = num_positional_args(arg_values, arg_kinds) + keywords = self._vectorcall_keywords(arg_names) + value = self.call_c( + py_vectorcall_op, + [function, arg_ptr, Integer(num_pos, c_size_t_rprimitive), keywords], + line, + ) + if arg_values: + # Make sure arguments won't be freed until after the call. + # We need this because RArray doesn't support automatic + # memory management. + self.add(KeepAlive(coerced_args)) + return value + return None + + def _vectorcall_keywords(self, arg_names: Sequence[str | None] | None) -> Value: + """Return a reference to a tuple literal with keyword argument names. + + Return null pointer if there are no keyword arguments. + """ + if arg_names: + kw_list = [name for name in arg_names if name is not None] + if kw_list: + return self.add(LoadLiteral(tuple(kw_list), object_rprimitive)) + return Integer(0, object_rprimitive) + + def py_method_call( + self, + obj: Value, + method_name: str, + arg_values: list[Value], + line: int, + arg_kinds: list[ArgKind] | None, + arg_names: Sequence[str | None] | None, + ) -> Value: + """Call a Python method (non-native and slow).""" + result = self._py_vector_method_call( + obj, method_name, arg_values, line, arg_kinds, arg_names + ) + if result is not None: + return result + + if arg_kinds is None or all(kind == ARG_POS for kind in arg_kinds): + # Use legacy method call API + method_name_reg = self.load_str(method_name) + return self.call_c(py_method_call_op, [obj, method_name_reg] + arg_values, line) + else: + # Use py_call since it supports keyword arguments (and vectorcalls). + method = self.py_get_attr(obj, method_name, line) + return self.py_call(method, arg_values, line, arg_kinds=arg_kinds, arg_names=arg_names) + + def _py_vector_method_call( + self, + obj: Value, + method_name: str, + arg_values: list[Value], + line: int, + arg_kinds: list[ArgKind] | None, + arg_names: Sequence[str | None] | None, + ) -> Value | None: + """Call method using the vectorcall API if possible. + + Return the return value if successful. Return None if a non-vectorcall + API should be used instead. + """ + if arg_kinds is None or all( + not kind.is_star() and not kind.is_optional() for kind in arg_kinds + ): + method_name_reg = self.load_str(method_name) + coerced_args = [ + self.coerce(arg, object_rprimitive, line) for arg in [obj] + arg_values + ] + arg_ptr = self.setup_rarray(object_rprimitive, coerced_args, object_ptr=True) + num_pos = num_positional_args(arg_values, arg_kinds) + keywords = self._vectorcall_keywords(arg_names) + value = self.call_c( + py_vectorcall_method_op, + [ + method_name_reg, + arg_ptr, + Integer((num_pos + 1) | PY_VECTORCALL_ARGUMENTS_OFFSET, c_size_t_rprimitive), + keywords, + ], + line, + ) + # Make sure arguments won't be freed until after the call. + # We need this because RArray doesn't support automatic + # memory management. + self.add(KeepAlive(coerced_args)) + return value + return None + + def call( + self, + decl: FuncDecl, + args: Sequence[Value], + arg_kinds: list[ArgKind], + arg_names: Sequence[str | None], + line: int, + *, + bitmap_args: list[Register] | None = None, + ) -> Value: + """Call a native function. + + If bitmap_args is given, they override the values of (some) of the bitmap + arguments used to track the presence of values for certain arguments. By + default, the values of the bitmap arguments are inferred from args. + """ + # Normalize args to positionals. + args = self.native_args_to_positional( + args, arg_kinds, arg_names, decl.sig, line, bitmap_args=bitmap_args + ) + return self.add(Call(decl, args, line)) + + def native_args_to_positional( + self, + args: Sequence[Value], + arg_kinds: list[ArgKind], + arg_names: Sequence[str | None], + sig: FuncSignature, + line: int, + *, + bitmap_args: list[Register] | None = None, + ) -> list[Value]: + """Prepare arguments for a native call. + + Given args/kinds/names and a target signature for a native call, map + keyword arguments to their appropriate place in the argument list, + fill in error values for unspecified default arguments, + package arguments that will go into *args/**kwargs into a tuple/dict, + and coerce arguments to the appropriate type. + """ + + sig_args = sig.args + n = sig.num_bitmap_args + if n: + sig_args = sig_args[:-n] + + sig_arg_kinds = [arg.kind for arg in sig_args] + sig_arg_names = [arg.name for arg in sig_args] + + concrete_kinds = [concrete_arg_kind(arg_kind) for arg_kind in arg_kinds] + formal_to_actual = map_actuals_to_formals( + concrete_kinds, + arg_names, + sig_arg_kinds, + sig_arg_names, + lambda n: AnyType(TypeOfAny.special_form), + ) + + # First scan for */** and construct those + has_star = has_star2 = False + star_arg_entries = [] + for lst, arg in zip(formal_to_actual, sig_args): + if arg.kind.is_star(): + star_arg_entries.extend([(args[i], arg_kinds[i], arg_names[i]) for i in lst]) + has_star = has_star or arg.kind == ARG_STAR + has_star2 = has_star2 or arg.kind == ARG_STAR2 + + star_arg, star2_arg = self._construct_varargs( + star_arg_entries, line, has_star=has_star, has_star2=has_star2 + ) + + # Flatten out the arguments, loading error values for default + # arguments, constructing tuples/dicts for star args, and + # coercing everything to the expected type. + output_args: list[Value] = [] + for lst, arg in zip(formal_to_actual, sig_args): + if arg.kind == ARG_STAR: + assert star_arg + output_arg = star_arg + elif arg.kind == ARG_STAR2: + output_arg = star2_arg or self._create_dict([], [], line) + elif not lst: + if is_fixed_width_rtype(arg.type): + output_arg = Integer(0, arg.type) + elif is_float_rprimitive(arg.type): + output_arg = Float(0.0) + else: + output_arg = self.add(LoadErrorValue(arg.type, is_borrowed=True)) + else: + base_arg = args[lst[0]] + + if arg_kinds[lst[0]].is_optional(): + output_arg = self.coerce_nullable(base_arg, arg.type, line) + else: + output_arg = self.coerce(base_arg, arg.type, line) + + output_args.append(output_arg) + + for i in reversed(range(n)): + if bitmap_args and i < len(bitmap_args): + # Use override provided by caller + output_args.append(bitmap_args[i]) + continue + # Infer values of bitmap args + bitmap = 0 + c = 0 + for lst, arg in zip(formal_to_actual, sig_args): + if arg.kind.is_optional() and arg.type.error_overlap: + if i * BITMAP_BITS <= c < (i + 1) * BITMAP_BITS: + if lst: + bitmap |= 1 << (c & (BITMAP_BITS - 1)) + c += 1 + output_args.append(Integer(bitmap, bitmap_rprimitive)) + + return output_args + + def gen_method_call( + self, + base: Value, + name: str, + arg_values: list[Value], + result_type: RType | None, + line: int, + arg_kinds: list[ArgKind] | None = None, + arg_names: list[str | None] | None = None, + can_borrow: bool = False, + ) -> Value: + """Generate either a native or Python method call.""" + # If we have *args, then fallback to Python method call. + if arg_kinds is not None and any(kind.is_star() for kind in arg_kinds): + return self.py_method_call(base, name, arg_values, line, arg_kinds, arg_names) + + # If the base type is one of ours, do a MethodCall + fast_name = FAST_PREFIX + name + if ( + isinstance(base.type, RInstance) + and (base.type.class_ir.is_ext_class or base.type.class_ir.has_method(fast_name)) + and not base.type.class_ir.builtin_base + ): + name = name if base.type.class_ir.is_ext_class else fast_name + if base.type.class_ir.has_method(name): + decl = base.type.class_ir.method_decl(name) + if arg_kinds is None: + assert arg_names is None, "arg_kinds not present but arg_names is" + arg_kinds = [ARG_POS for _ in arg_values] + arg_names = [None for _ in arg_values] + else: + assert arg_names is not None, "arg_kinds present but arg_names is not" + + # Normalize args to positionals. + assert decl.bound_sig + arg_values = self.native_args_to_positional( + arg_values, arg_kinds, arg_names, decl.bound_sig, line + ) + return self.add(MethodCall(base, name, arg_values, line)) + elif base.type.class_ir.has_attr(name): + function = self.add(GetAttr(base, name, line)) + return self.py_call( + function, arg_values, line, arg_kinds=arg_kinds, arg_names=arg_names + ) + + elif isinstance(base.type, RUnion): + return self.union_method_call( + base, base.type, name, arg_values, result_type, line, arg_kinds, arg_names + ) + + # Try to do a special-cased method call + if not arg_kinds or arg_kinds == [ARG_POS] * len(arg_values): + target = self.translate_special_method_call( + base, name, arg_values, result_type, line, can_borrow=can_borrow + ) + if target: + return target + + # Fall back to Python method call + return self.py_method_call(base, name, arg_values, line, arg_kinds, arg_names) + + def union_method_call( + self, + base: Value, + obj_type: RUnion, + name: str, + arg_values: list[Value], + return_rtype: RType | None, + line: int, + arg_kinds: list[ArgKind] | None, + arg_names: list[str | None] | None, + ) -> Value: + """Generate a method call with a union type for the object.""" + # Union method call needs a return_rtype for the type of the output register. + # If we don't have one, use object_rprimitive. + return_rtype = return_rtype or object_rprimitive + + def call_union_item(value: Value) -> Value: + return self.gen_method_call( + value, name, arg_values, return_rtype, line, arg_kinds, arg_names + ) + + return self.decompose_union_helper(base, obj_type, return_rtype, call_union_item, line) + + # Loading various values + + def none(self) -> Value: + """Load unboxed None value (type: none_rprimitive).""" + return Integer(1, none_rprimitive) + + def true(self) -> Value: + """Load unboxed True value (type: bool_rprimitive).""" + return Integer(1, bool_rprimitive) + + def false(self) -> Value: + """Load unboxed False value (type: bool_rprimitive).""" + return Integer(0, bool_rprimitive) + + def none_object(self) -> Value: + """Load Python None value (type: object_rprimitive).""" + return self.add(LoadAddress(none_object_op.type, none_object_op.src, line=-1)) + + def true_object(self) -> Value: + """Load Python True object (type: object_rprimitive).""" + return self.add(LoadGlobal(object_rprimitive, "Py_True")) + + def false_object(self) -> Value: + """Load Python False object (type: object_rprimitive).""" + return self.add(LoadGlobal(object_rprimitive, "Py_False")) + + def load_int(self, value: int) -> Value: + """Load a tagged (Python) integer literal value.""" + if value > MAX_LITERAL_SHORT_INT or value < MIN_LITERAL_SHORT_INT: + return self.add(LoadLiteral(value, int_rprimitive)) + else: + return Integer(value) + + def load_float(self, value: float) -> Value: + """Load a float literal value.""" + return Float(value) + + def load_str(self, value: str) -> Value: + """Load a str literal value. + + This is useful for more than just str literals; for example, method calls + also require a PyObject * form for the name of the method. + """ + return self.add(LoadLiteral(value, str_rprimitive)) + + def load_bytes(self, value: bytes) -> Value: + """Load a bytes literal value.""" + return self.add(LoadLiteral(value, bytes_rprimitive)) + + def load_complex(self, value: complex) -> Value: + """Load a complex literal value.""" + return self.add(LoadLiteral(value, object_rprimitive)) + + def load_static_checked( + self, + typ: RType, + identifier: str, + module_name: str | None = None, + namespace: str = NAMESPACE_STATIC, + line: int = -1, + error_msg: str | None = None, + ) -> Value: + if error_msg is None: + error_msg = f'name "{identifier}" is not defined' + ok_block, error_block = BasicBlock(), BasicBlock() + value = self.add(LoadStatic(typ, identifier, module_name, namespace, line=line)) + self.add(Branch(value, error_block, ok_block, Branch.IS_ERROR, rare=True)) + self.activate_block(error_block) + self.add(RaiseStandardError(RaiseStandardError.NAME_ERROR, error_msg, line)) + self.add(Unreachable()) + self.activate_block(ok_block) + return value + + def load_module(self, name: str) -> Value: + return self.add(LoadStatic(object_rprimitive, name, namespace=NAMESPACE_MODULE)) + + def get_native_type(self, cls: ClassIR) -> Value: + """Load native type object.""" + fullname = f"{cls.module_name}.{cls.name}" + return self.load_native_type_object(fullname) + + def load_native_type_object(self, fullname: str) -> Value: + module, name = fullname.rsplit(".", 1) + return self.add(LoadStatic(object_rprimitive, name, module, NAMESPACE_TYPE)) + + # Other primitive operations + + def binary_op(self, lreg: Value, rreg: Value, op: str, line: int) -> Value: + """Perform a binary operation. + + Generate specialized operations based on operand types, with a fallback + to generic operations. + """ + ltype = lreg.type + rtype = rreg.type + + # Special case tuple comparison here so that nested tuples can be supported + if isinstance(ltype, RTuple) and isinstance(rtype, RTuple) and op in ("==", "!="): + return self.compare_tuples(lreg, rreg, op, line) + + # Special case == and != when we can resolve the method call statically + if op in ("==", "!="): + value = self.translate_eq_cmp(lreg, rreg, op, line) + if value is not None: + return value + + # Special case various ops + if op in ("is", "is not"): + return self.translate_is_op(lreg, rreg, op, line) + if ( + is_bool_or_bit_rprimitive(ltype) + and is_bool_or_bit_rprimitive(rtype) + and op in BOOL_BINARY_OPS + ): + if op in ComparisonOp.signed_ops: + return self.bool_comparison_op(lreg, rreg, op, line) + else: + return self.bool_bitwise_op(lreg, rreg, op[0], line) + if isinstance(rtype, RInstance) and op in ("in", "not in"): + return self.translate_instance_contains(rreg, lreg, op, line) + if is_fixed_width_rtype(ltype): + if op in FIXED_WIDTH_INT_BINARY_OPS: + op = op.removesuffix("=") + if op != "//": + op_id = int_op_to_id[op] + else: + op_id = IntOp.DIV + if is_bool_or_bit_rprimitive(rtype): + rreg = self.coerce(rreg, ltype, line) + rtype = ltype + if is_fixed_width_rtype(rtype) or is_tagged(rtype): + return self.fixed_width_int_op(ltype, lreg, rreg, op_id, line) + if isinstance(rreg, Integer): + return self.fixed_width_int_op( + ltype, lreg, self.coerce(rreg, ltype, line), op_id, line + ) + elif op in ComparisonOp.signed_ops: + if is_int_rprimitive(rtype): + rreg = self.coerce_int_to_fixed_width(rreg, ltype, line) + elif is_bool_or_bit_rprimitive(rtype): + rreg = self.coerce(rreg, ltype, line) + op_id = ComparisonOp.signed_ops[op] + if is_fixed_width_rtype(rreg.type): + return self.comparison_op(lreg, rreg, op_id, line) + if isinstance(rreg, Integer): + return self.comparison_op(lreg, self.coerce(rreg, ltype, line), op_id, line) + elif is_fixed_width_rtype(rtype): + if op in FIXED_WIDTH_INT_BINARY_OPS: + op = op.removesuffix("=") + if op != "//": + op_id = int_op_to_id[op] + else: + op_id = IntOp.DIV + if isinstance(lreg, Integer): + return self.fixed_width_int_op( + rtype, self.coerce(lreg, rtype, line), rreg, op_id, line + ) + if is_tagged(ltype): + return self.fixed_width_int_op(rtype, lreg, rreg, op_id, line) + if is_bool_or_bit_rprimitive(ltype): + lreg = self.coerce(lreg, rtype, line) + return self.fixed_width_int_op(rtype, lreg, rreg, op_id, line) + elif op in ComparisonOp.signed_ops: + if is_int_rprimitive(ltype): + lreg = self.coerce_int_to_fixed_width(lreg, rtype, line) + elif is_bool_or_bit_rprimitive(ltype): + lreg = self.coerce(lreg, rtype, line) + op_id = ComparisonOp.signed_ops[op] + if isinstance(lreg, Integer): + return self.comparison_op(self.coerce(lreg, rtype, line), rreg, op_id, line) + if is_fixed_width_rtype(lreg.type): + return self.comparison_op(lreg, rreg, op_id, line) + + if is_float_rprimitive(ltype) or is_float_rprimitive(rtype): + if isinstance(lreg, Integer): + lreg = Float(float(lreg.numeric_value())) + elif isinstance(rreg, Integer): + rreg = Float(float(rreg.numeric_value())) + elif is_int_rprimitive(lreg.type): + lreg = self.int_to_float(lreg, line) + elif is_int_rprimitive(rreg.type): + rreg = self.int_to_float(rreg, line) + if is_float_rprimitive(lreg.type) and is_float_rprimitive(rreg.type): + if op in float_comparison_op_to_id: + return self.compare_floats(lreg, rreg, float_comparison_op_to_id[op], line) + if op.endswith("="): + base_op = op[:-1] + else: + base_op = op + if base_op in float_op_to_id: + return self.float_op(lreg, rreg, base_op, line) + + dunder_op = self.dunder_op(lreg, rreg, op, line) + if dunder_op: + return dunder_op + + primitive_ops_candidates = binary_ops.get(op, []) + target = self.matching_primitive_op(primitive_ops_candidates, [lreg, rreg], line) + assert target, "Unsupported binary operation: %s" % op + return target + + def dunder_op(self, lreg: Value, rreg: Value | None, op: str, line: int) -> Value | None: + """ + Dispatch a dunder method if applicable. + + For example for `a + b` it will use `a.__add__(b)` which can lead to higher performance + due to the fact that the method could be already compiled and optimized instead of going + all the way through `PyNumber_Add(a, b)` python api (making a jump into the python DL). + """ + ltype = lreg.type + if not isinstance(ltype, RInstance): + return None + + method_name = op_methods.get(op) if rreg else unary_op_methods.get(op) + if method_name is None: + return None + + if not ltype.class_ir.has_method(method_name): + return None + + decl = ltype.class_ir.method_decl(method_name) + if not rreg and len(decl.sig.args) != 1: + return None + + if rreg and (len(decl.sig.args) != 2 or not is_subtype(rreg.type, decl.sig.args[1].type)): + return None + + if rreg and is_subtype(not_implemented_op.type, decl.sig.ret_type): + # If the method is able to return NotImplemented, we should not optimize it. + # We can just let go so it will be handled through the python api. + return None + + args = [rreg] if rreg else [] + return self.gen_method_call(lreg, method_name, args, decl.sig.ret_type, line) + + def check_tagged_short_int(self, val: Value, line: int, negated: bool = False) -> Value: + """Check if a tagged integer is a short integer. + + Return the result of the check (value of type 'bit'). + """ + int_tag = Integer(1, c_pyssize_t_rprimitive, line) + bitwise_and = self.int_op(c_pyssize_t_rprimitive, val, int_tag, IntOp.AND, line) + zero = Integer(0, c_pyssize_t_rprimitive, line) + op = ComparisonOp.NEQ if negated else ComparisonOp.EQ + check = self.comparison_op(bitwise_and, zero, op, line) + return check + + def compare_strings(self, lhs: Value, rhs: Value, op: str, line: int) -> Value: + """Compare two strings""" + if op == "==": + # We can specialize this case if one or both values are string literals + literal_fastpath = False + + def is_string_literal(value: Value) -> TypeGuard[LoadLiteral]: + return isinstance(value, LoadLiteral) and is_str_rprimitive(value.type) + + if is_string_literal(lhs): + if is_string_literal(rhs): + # we can optimize out the check entirely in some constant-folded cases + return self.true() if lhs.value == rhs.value else self.false() + + # if lhs argument is string literal, switch sides to match specializer C api + lhs, rhs = rhs, lhs + literal_fastpath = True + elif is_string_literal(rhs): + literal_fastpath = True + + if literal_fastpath: + literal_string = cast(str, cast(LoadLiteral, rhs).value) + literal_length = Integer(len(literal_string), c_pyssize_t_rprimitive, line) + return self.primitive_op(str_eq_literal, [lhs, rhs, literal_length], line) + + return self.primitive_op(str_eq, [lhs, rhs], line) + + elif op == "!=": + # perform a standard equality check, then negate + eq = self.compare_strings(lhs, rhs, "==", line) + return self.add(ComparisonOp(eq, self.false(), ComparisonOp.EQ, line)) + + # TODO: modify 'str' to use same interface as 'compare_bytes' as it would avoid + # call to PyErr_Occurred() below + + compare_result = self.call_c(unicode_compare, [lhs, rhs], line) + error_constant = Integer(-1, c_int_rprimitive, line) + compare_error_check = self.add( + ComparisonOp(compare_result, error_constant, ComparisonOp.EQ, line) + ) + exception_check, propagate, final_compare = BasicBlock(), BasicBlock(), BasicBlock() + branch = Branch(compare_error_check, exception_check, final_compare, Branch.BOOL) + branch.negated = False + self.add(branch) + self.activate_block(exception_check) + check_error_result = self.call_c(err_occurred_op, [], line) + null = Integer(0, pointer_rprimitive, line) + compare_error_check = self.add( + ComparisonOp(check_error_result, null, ComparisonOp.NEQ, line) + ) + branch = Branch(compare_error_check, propagate, final_compare, Branch.BOOL) + branch.negated = False + self.add(branch) + self.activate_block(propagate) + self.call_c(keep_propagating_op, [], line) + self.goto(final_compare) + self.activate_block(final_compare) + op_type = ComparisonOp.EQ if op == "==" else ComparisonOp.NEQ + return self.add(ComparisonOp(compare_result, Integer(0, c_int_rprimitive), op_type, line)) + + def compare_bytes(self, lhs: Value, rhs: Value, op: str, line: int) -> Value: + compare_result = self.call_c(bytes_compare, [lhs, rhs], line) + op_type = ComparisonOp.EQ if op == "==" else ComparisonOp.NEQ + return self.add(ComparisonOp(compare_result, Integer(1, c_int_rprimitive), op_type, line)) + + def compare_tuples(self, lhs: Value, rhs: Value, op: str, line: int = -1) -> Value: + """Compare two tuples item by item""" + # type cast to pass mypy check + assert isinstance(lhs.type, RTuple) and isinstance(rhs.type, RTuple), (lhs.type, rhs.type) + equal = True if op == "==" else False + result = Register(bool_rprimitive) + # tuples of different lengths + if len(lhs.type.types) != len(rhs.type.types): + self.add(Assign(result, self.false() if equal else self.true(), line)) + return result + # empty tuples + if len(lhs.type.types) == 0 and len(rhs.type.types) == 0: + self.add(Assign(result, self.true() if equal else self.false(), line)) + return result + length = len(lhs.type.types) + false_assign, true_assign, out = BasicBlock(), BasicBlock(), BasicBlock() + check_blocks = [BasicBlock() for _ in range(length)] + lhs_items = [self.add(TupleGet(lhs, i, line)) for i in range(length)] + rhs_items = [self.add(TupleGet(rhs, i, line)) for i in range(length)] + + if equal: + early_stop, final = false_assign, true_assign + else: + early_stop, final = true_assign, false_assign + + for i in range(len(lhs.type.types)): + if i != 0: + self.activate_block(check_blocks[i]) + lhs_item = lhs_items[i] + rhs_item = rhs_items[i] + compare = self.binary_op(lhs_item, rhs_item, op, line) + # Cast to bool if necessary since most types uses comparison returning a object type + # See generic_ops.py for more information + if not is_bool_or_bit_rprimitive(compare.type): + compare = self.primitive_op(bool_op, [compare], line) + if i < len(lhs.type.types) - 1: + branch = Branch(compare, early_stop, check_blocks[i + 1], Branch.BOOL) + else: + branch = Branch(compare, early_stop, final, Branch.BOOL) + # if op is ==, we branch on false, else branch on true + branch.negated = equal + self.add(branch) + self.activate_block(false_assign) + self.add(Assign(result, self.false(), line)) + self.goto(out) + self.activate_block(true_assign) + self.add(Assign(result, self.true(), line)) + self.goto_and_activate(out) + return result + + def translate_instance_contains(self, inst: Value, item: Value, op: str, line: int) -> Value: + res = self.gen_method_call(inst, "__contains__", [item], None, line) + if not is_bool_or_bit_rprimitive(res.type): + res = self.primitive_op(bool_op, [res], line) + if op == "not in": + res = self.bool_bitwise_op(res, Integer(1, rtype=bool_rprimitive), "^", line) + return res + + def bool_bitwise_op(self, lreg: Value, rreg: Value, op: str, line: int) -> Value: + if op == "&": + code = IntOp.AND + elif op == "|": + code = IntOp.OR + elif op == "^": + code = IntOp.XOR + else: + assert False, op + return self.add(IntOp(bool_rprimitive, lreg, rreg, code, line)) + + def bool_comparison_op(self, lreg: Value, rreg: Value, op: str, line: int) -> Value: + op_id = ComparisonOp.signed_ops[op] + return self.comparison_op(lreg, rreg, op_id, line) + + def _non_specialized_unary_op(self, value: Value, op: str, line: int) -> Value: + if isinstance(value.type, RInstance): + result = self.dunder_op(value, None, op, line) + if result is not None: + return result + primitive_ops_candidates = unary_ops.get(op, []) + target = self.matching_primitive_op(primitive_ops_candidates, [value], line) + assert target, "Unsupported unary operation: %s" % op + return target + + def unary_not(self, value: Value, line: int, *, likely_bool: bool = False) -> Value: + """Perform unary 'not'. + + Args: + likely_bool: The operand is likely a bool value, even if the type is something + more general, so specialize for bool values + """ + typ = value.type + if is_bool_or_bit_rprimitive(typ): + mask = Integer(1, typ, line) + return self.int_op(typ, value, mask, IntOp.XOR, line) + if is_tagged(typ) or is_fixed_width_rtype(typ): + return self.binary_op(value, Integer(0), "==", line) + if ( + is_str_rprimitive(typ) + or is_list_rprimitive(typ) + or is_tuple_rprimitive(typ) + or is_dict_rprimitive(typ) + or isinstance(typ, RInstance) + ): + bool_val = self.bool_value(value) + return self.unary_not(bool_val, line) + if is_optional_type(typ): + value_typ = optional_value_type(typ) + assert value_typ + if ( + is_str_rprimitive(value_typ) + or is_list_rprimitive(value_typ) + or is_tuple_rprimitive(value_typ) + or is_dict_rprimitive(value_typ) + or isinstance(value_typ, RInstance) + ): + # 'X | None' type: Check for None first and then specialize for X. + res = Register(bit_rprimitive) + cmp = self.add(ComparisonOp(value, self.none_object(), ComparisonOp.EQ, line)) + none, not_none, out = BasicBlock(), BasicBlock(), BasicBlock() + self.add(Branch(cmp, none, not_none, Branch.BOOL)) + self.activate_block(none) + self.add(Assign(res, self.true())) + self.goto(out) + self.activate_block(not_none) + val = self.unary_not( + self.unbox_or_cast(value, value_typ, line, can_borrow=True, unchecked=True), + line, + ) + self.add(Assign(res, val)) + self.goto(out) + self.activate_block(out) + return res + if likely_bool and is_object_rprimitive(typ): + # First quickly check if it's a bool, and otherwise fall back to generic op. + res = Register(bit_rprimitive) + false, not_false, true, other = BasicBlock(), BasicBlock(), BasicBlock(), BasicBlock() + out = BasicBlock() + cmp = self.add(ComparisonOp(value, self.true_object(), ComparisonOp.EQ, line)) + self.add(Branch(cmp, false, not_false, Branch.BOOL)) + self.activate_block(false) + self.add(Assign(res, self.false())) + self.goto(out) + self.activate_block(not_false) + cmp = self.add(ComparisonOp(value, self.false_object(), ComparisonOp.EQ, line)) + self.add(Branch(cmp, true, other, Branch.BOOL)) + self.activate_block(true) + self.add(Assign(res, self.true())) + self.goto(out) + self.activate_block(other) + val = self._non_specialized_unary_op(value, "not", line) + self.add(Assign(res, val)) + self.goto(out) + self.activate_block(out) + return res + return self._non_specialized_unary_op(value, "not", line) + + def unary_minus(self, value: Value, line: int) -> Value: + """Perform unary '-'.""" + typ = value.type + if isinstance(value, Integer): + # TODO: Overflow? Unsigned? + return Integer(-value.numeric_value(), typ, line) + elif isinstance(value, Float): + return Float(-value.value, line) + elif is_fixed_width_rtype(typ): + # Translate to '0 - x' + return self.int_op(typ, Integer(0, typ), value, IntOp.SUB, line) + elif is_float_rprimitive(typ): + return self.add(FloatNeg(value, line)) + return self._non_specialized_unary_op(value, "-", line) + + def unary_plus(self, value: Value, line: int) -> Value: + """Perform unary '+'.""" + typ = value.type + if ( + is_tagged(typ) + or is_float_rprimitive(typ) + or is_bool_or_bit_rprimitive(typ) + or is_fixed_width_rtype(typ) + ): + return value + return self._non_specialized_unary_op(value, "+", line) + + def unary_invert(self, value: Value, line: int) -> Value: + """Perform unary '~'.""" + typ = value.type + if is_fixed_width_rtype(typ): + if typ.is_signed: + # Translate to 'x ^ -1' + return self.int_op(typ, value, Integer(-1, typ), IntOp.XOR, line) + else: + # Translate to 'x ^ 0xff...' + mask = (1 << (typ.size * 8)) - 1 + return self.int_op(typ, value, Integer(mask, typ), IntOp.XOR, line) + return self._non_specialized_unary_op(value, "~", line) + + def unary_op(self, value: Value, op: str, line: int) -> Value: + """Perform a unary operation.""" + if op == "not": + return self.unary_not(value, line) + elif op == "-": + return self.unary_minus(value, line) + elif op == "+": + return self.unary_plus(value, line) + elif op == "~": + return self.unary_invert(value, line) + raise RuntimeError("Unsupported unary operation: %s" % op) + + def make_dict(self, key_value_pairs: Sequence[DictEntry], line: int) -> Value: + result: Value | None = None + keys: list[Value] = [] + values: list[Value] = [] + for key, value in key_value_pairs: + if key is not None: + # key:value + if result is None: + keys.append(key) + values.append(value) + continue + + self.translate_special_method_call( + result, "__setitem__", [key, value], result_type=None, line=line + ) + else: + # **value + if result is None: + result = self._create_dict(keys, values, line) + + self.call_c(dict_update_in_display_op, [result, value], line=line) + + if result is None: + result = self._create_dict(keys, values, line) + + return result + + def new_list_op_with_length(self, length: Value, line: int) -> Value: + """This function returns an uninitialized list. + + If the length is non-zero, the caller must initialize the list, before + it can be made visible to user code -- otherwise the list object is broken. + You might need further initialization with `new_list_set_item_op` op. + + Args: + length: desired length of the new list. The rtype should be + c_pyssize_t_rprimitive + line: line number + """ + return self.call_c(new_list_op, [length], line) + + def new_list_op(self, values: list[Value], line: int) -> Value: + length: list[Value] = [Integer(len(values), c_pyssize_t_rprimitive, line)] + if len(values) >= LIST_BUILDING_EXPANSION_THRESHOLD: + return self.call_c(list_build_op, length + values, line) + + # If the length of the list is less than the threshold, + # LIST_BUILDING_EXPANSION_THRESHOLD, we directly expand the + # for-loop and inline the SetMem operation, which is faster + # than list_build_op, however generates more code. + result_list = self.call_c(new_list_op, length, line) + if not values: + return result_list + args = [self.coerce(item, object_rprimitive, line) for item in values] + ob_item_base = self.add(PrimitiveOp([result_list], list_items, line)) + for i in range(len(values)): + self.primitive_op( + buf_init_item, [ob_item_base, Integer(i, c_pyssize_t_rprimitive), args[i]], line + ) + self.add(KeepAlive([result_list])) + return result_list + + def new_set_op(self, values: list[Value], line: int) -> Value: + return self.primitive_op(new_set_op, values, line) + + def setup_rarray( + self, item_type: RType, values: Sequence[Value], *, object_ptr: bool = False + ) -> Value: + """Declare and initialize a new RArray, returning its address.""" + array = Register(RArray(item_type, len(values))) + self.add(AssignMulti(array, list(values))) + return self.add( + LoadAddress(object_pointer_rprimitive if object_ptr else c_pointer_rprimitive, array) + ) + + def shortcircuit_helper( + self, + op: str, + expr_type: RType, + left: Callable[[], Value], + right: Callable[[], Value], + line: int, + ) -> Value: + # Having actual Phi nodes would be really nice here! + target = Register(expr_type) + # left_body takes the value of the left side, right_body the right + left_body, right_body, next_block = BasicBlock(), BasicBlock(), BasicBlock() + # true_body is taken if the left is true, false_body if it is false. + # For 'and' the value is the right side if the left is true, and for 'or' + # it is the right side if the left is false. + true_body, false_body = (right_body, left_body) if op == "and" else (left_body, right_body) + + left_value = left() + self.add_bool_branch(left_value, true_body, false_body) + + self.activate_block(left_body) + left_coerced = self.coerce(left_value, expr_type, line) + self.add(Assign(target, left_coerced)) + self.goto(next_block) + + self.activate_block(right_body) + right_value = right() + right_coerced = self.coerce(right_value, expr_type, line) + self.add(Assign(target, right_coerced)) + self.goto(next_block) + + self.activate_block(next_block) + return target + + def bool_value(self, value: Value) -> Value: + """Return bool(value). + + The result type can be bit_rprimitive or bool_rprimitive. + """ + if is_bool_or_bit_rprimitive(value.type): + result = value + elif is_runtime_subtype(value.type, int_rprimitive): + zero = Integer(0, short_int_rprimitive) + result = self.comparison_op(value, zero, ComparisonOp.NEQ, value.line) + elif is_fixed_width_rtype(value.type): + zero = Integer(0, value.type) + result = self.add(ComparisonOp(value, zero, ComparisonOp.NEQ)) + elif is_str_rprimitive(value.type): + result = self.call_c(str_check_if_true, [value], value.line) + elif ( + is_list_rprimitive(value.type) + or is_dict_rprimitive(value.type) + or is_tuple_rprimitive(value.type) + ): + length = self.builtin_len(value, value.line) + zero = Integer(0) + result = self.binary_op(length, zero, "!=", value.line) + elif ( + isinstance(value.type, RInstance) + and value.type.class_ir.is_ext_class + and value.type.class_ir.has_method("__bool__") + ): + # Directly call the __bool__ method on classes that have it. + result = self.gen_method_call(value, "__bool__", [], bool_rprimitive, value.line) + elif is_float_rprimitive(value.type): + result = self.compare_floats(value, Float(0.0), FloatComparisonOp.NEQ, value.line) + else: + value_type = optional_value_type(value.type) + if value_type is not None: + not_none = self.translate_is_op(value, self.none_object(), "is not", value.line) + always_truthy = False + if isinstance(value_type, RInstance): + # check whether X.__bool__ is always just the default (object.__bool__) + if not value_type.class_ir.has_method( + "__bool__" + ) and value_type.class_ir.is_method_final("__bool__"): + always_truthy = True + + if always_truthy: + result = not_none + else: + # "X | None" where X may be falsey and requires a check + result = Register(bit_rprimitive) + true, false, end = BasicBlock(), BasicBlock(), BasicBlock() + branch = Branch(not_none, true, false, Branch.BOOL) + self.add(branch) + self.activate_block(true) + # unbox_or_cast instead of coerce because we want the + # type to change even if it is a subtype. + remaining = self.unbox_or_cast(value, value_type, value.line) + as_bool = self.bool_value(remaining) + self.add(Assign(result, as_bool)) + self.goto(end) + self.activate_block(false) + self.add(Assign(result, Integer(0, bit_rprimitive))) + self.goto(end) + self.activate_block(end) + else: + result = self.primitive_op(bool_op, [value], value.line) + return result + + def add_bool_branch(self, value: Value, true: BasicBlock, false: BasicBlock) -> None: + opt_value_type = optional_value_type(value.type) + if opt_value_type is None: + bool_value = self.bool_value(value) + self.add(Branch(bool_value, true, false, Branch.BOOL)) + else: + # Special-case optional types + is_none = self.translate_is_op(value, self.none_object(), "is not", value.line) + branch = Branch(is_none, true, false, Branch.BOOL) + self.add(branch) + always_truthy = False + if isinstance(opt_value_type, RInstance): + # check whether X.__bool__ is always just the default (object.__bool__) + if not opt_value_type.class_ir.has_method( + "__bool__" + ) and opt_value_type.class_ir.is_method_final("__bool__"): + always_truthy = True + + if not always_truthy: + # Optional[X] where X may be falsey and requires a check + branch.true = BasicBlock() + self.activate_block(branch.true) + # unbox_or_cast instead of coerce because we want the + # type to change even if it is a subtype. + remaining = self.unbox_or_cast(value, opt_value_type, value.line) + self.add_bool_branch(remaining, true, false) + + def call_c( + self, + desc: CFunctionDescription, + args: list[Value], + line: int, + result_type: RType | None = None, + ) -> Value: + """Call function using C/native calling convention (not a Python callable).""" + # Handle void function via singleton RVoid instance + coerced = [] + # Coerce fixed number arguments + for i in range(min(len(args), len(desc.arg_types))): + formal_type = desc.arg_types[i] + arg = args[i] + arg = self.coerce(arg, formal_type, line) + coerced.append(arg) + # Reorder args if necessary + if desc.ordering is not None: + assert desc.var_arg_type is None + coerced = [coerced[i] for i in desc.ordering] + # Coerce any var_arg + var_arg_idx = -1 + if desc.var_arg_type is not None: + var_arg_idx = len(desc.arg_types) + for i in range(len(desc.arg_types), len(args)): + arg = args[i] + arg = self.coerce(arg, desc.var_arg_type, line) + coerced.append(arg) + # Add extra integer constant if any + for item in desc.extra_int_constants: + val, typ = item + extra_int_constant = Integer(val, typ, line) + coerced.append(extra_int_constant) + error_kind = desc.error_kind + if error_kind == ERR_NEG_INT: + # Handled with an explicit comparison + error_kind = ERR_NEVER + target = self.add( + CallC( + desc.c_function_name, + coerced, + desc.return_type, + desc.steals, + desc.is_borrowed, + error_kind, + line, + var_arg_idx, + is_pure=desc.is_pure, + returns_null=desc.returns_null, + capsule=desc.capsule, + ) + ) + if desc.is_borrowed: + # If the result is borrowed, force the arguments to be + # kept alive afterwards, as otherwise the result might be + # immediately freed, at the risk of a dangling pointer. + for arg in coerced: + if not isinstance(arg, (Integer, LoadLiteral)): + self.keep_alives.append(arg) + if desc.error_kind == ERR_NEG_INT: + comp = ComparisonOp(target, Integer(0, desc.return_type, line), ComparisonOp.SGE, line) + comp.error_kind = ERR_FALSE + self.add(comp) + + if desc.truncated_type is None: + result = target + else: + truncate = self.add(Truncate(target, desc.truncated_type)) + result = truncate + if result_type and not is_runtime_subtype(result.type, result_type): + if is_none_rprimitive(result_type): + # Special case None return. The actual result may actually be a bool + # and so we can't just coerce it. + result = self.none() + else: + result = self.coerce(target, result_type, line, can_borrow=desc.is_borrowed) + return result + + def matching_call_c( + self, + candidates: list[CFunctionDescription], + args: list[Value], + line: int, + result_type: RType | None = None, + can_borrow: bool = False, + ) -> Value | None: + matching: CFunctionDescription | None = None + for desc in candidates: + if len(desc.arg_types) != len(args): + continue + if all( + is_subtype(actual.type, formal) for actual, formal in zip(args, desc.arg_types) + ) and (not desc.is_borrowed or can_borrow): + if matching: + assert matching.priority != desc.priority, "Ambiguous:\n1) {}\n2) {}".format( + matching, desc + ) + if desc.priority > matching.priority: + matching = desc + else: + matching = desc + if matching: + target = self.call_c(matching, args, line, result_type) + return target + return None + + def primitive_op( + self, + desc: PrimitiveDescription, + args: list[Value], + line: int, + result_type: RType | None = None, + ) -> Value: + """Add a primitive op.""" + # Does this primitive map into calling a Python C API + # or an internal mypyc C API function? + if desc.c_function_name: + # TODO: Generate PrimitiveOps here and transform them into CallC + # ops only later in the lowering pass + c_desc = CFunctionDescription( + desc.name, + desc.arg_types, + desc.return_type, + desc.var_arg_type, + desc.truncated_type, + desc.c_function_name, + desc.error_kind, + desc.steals, + desc.is_borrowed, + desc.ordering, + desc.extra_int_constants, + desc.priority, + is_pure=desc.is_pure, + returns_null=False, + capsule=desc.capsule, + ) + return self.call_c(c_desc, args, line, result_type=result_type) + + # This primitive gets transformed in a lowering pass to + # lower-level IR ops using a custom transform function. + + coerced = [] + # Coerce fixed number arguments + for i in range(min(len(args), len(desc.arg_types))): + formal_type = desc.arg_types[i] + arg = args[i] + assert formal_type is not None # TODO + arg = self.coerce(arg, formal_type, line) + coerced.append(arg) + assert desc.ordering is None + assert desc.var_arg_type is None + assert not desc.extra_int_constants + target = self.add(PrimitiveOp(coerced, desc, line=line)) + if desc.is_borrowed: + # If the result is borrowed, force the arguments to be + # kept alive afterwards, as otherwise the result might be + # immediately freed, at the risk of a dangling pointer. + for arg in coerced: + if not isinstance(arg, (Integer, LoadLiteral)): + self.keep_alives.append(arg) + if desc.error_kind == ERR_NEG_INT: + comp = ComparisonOp(target, Integer(0, desc.return_type, line), ComparisonOp.SGE, line) + comp.error_kind = ERR_FALSE + self.add(comp) + + assert desc.truncated_type is None + result = target + if result_type and not is_runtime_subtype(result.type, result_type): + if is_none_rprimitive(result_type): + # Special case None return. The actual result may actually be a bool + # and so we can't just coerce it. + result = self.none() + else: + result = self.coerce(result, result_type, line, can_borrow=desc.is_borrowed) + return result + + def matching_primitive_op( + self, + candidates: list[PrimitiveDescription], + args: list[Value], + line: int, + result_type: RType | None = None, + can_borrow: bool = False, + ) -> Value | None: + matching: PrimitiveDescription | None = None + for desc in candidates: + if len(desc.arg_types) != len(args): + continue + if desc.experimental and not self.options.experimental_features: + continue + if all( + # formal is not None and # TODO + is_subtype(actual.type, formal) + for actual, formal in zip(args, desc.arg_types) + ) and (not desc.is_borrowed or can_borrow): + if matching: + assert matching.priority != desc.priority, "Ambiguous:\n1) {}\n2) {}".format( + matching, desc + ) + if desc.priority > matching.priority: + matching = desc + else: + matching = desc + if matching: + return self.primitive_op(matching, args, line=line, result_type=result_type) + return None + + def int_op(self, type: RType, lhs: Value, rhs: Value, op: int, line: int = -1) -> Value: + """Generate a native integer binary op. + + Use native/C semantics, which sometimes differ from Python + semantics. + + Args: + type: Either int64_rprimitive or int32_rprimitive + op: IntOp.* constant (e.g. IntOp.ADD) + """ + return self.add(IntOp(type, lhs, rhs, op, line)) + + def float_op(self, lhs: Value, rhs: Value, op: str, line: int) -> Value: + """Generate a native float binary arithmetic operation. + + This follows Python semantics (e.g. raise exception on division by zero). + Add a FloatOp directly if you want low-level semantics. + + Args: + op: Binary operator (e.g. '+' or '*') + """ + op_id = float_op_to_id[op] + if op_id in (FloatOp.DIV, FloatOp.MOD): + if not (isinstance(rhs, Float) and rhs.value != 0.0): + c = self.compare_floats(rhs, Float(0.0), FloatComparisonOp.EQ, line) + err, ok = BasicBlock(), BasicBlock() + self.add(Branch(c, err, ok, Branch.BOOL, rare=True)) + self.activate_block(err) + if op_id == FloatOp.DIV: + msg = "float division by zero" + else: + msg = "float modulo" + self.add(RaiseStandardError(RaiseStandardError.ZERO_DIVISION_ERROR, msg, line)) + self.add(Unreachable()) + self.activate_block(ok) + if op_id == FloatOp.MOD: + # Adjust the result to match Python semantics (FloatOp follows C semantics). + return self.float_mod(lhs, rhs, line) + else: + return self.add(FloatOp(lhs, rhs, op_id, line)) + + def float_mod(self, lhs: Value, rhs: Value, line: int) -> Value: + """Perform x % y on floats using Python semantics.""" + mod = self.add(FloatOp(lhs, rhs, FloatOp.MOD, line)) + res = Register(float_rprimitive) + self.add(Assign(res, mod)) + tricky, adjust, copysign, done = BasicBlock(), BasicBlock(), BasicBlock(), BasicBlock() + is_zero = self.add(FloatComparisonOp(res, Float(0.0), FloatComparisonOp.EQ, line)) + self.add(Branch(is_zero, copysign, tricky, Branch.BOOL)) + self.activate_block(tricky) + same_signs = self.is_same_float_signs(lhs, rhs, line) + self.add(Branch(same_signs, done, adjust, Branch.BOOL)) + self.activate_block(adjust) + adj = self.float_op(res, rhs, "+", line) + self.add(Assign(res, adj)) + self.add(Goto(done)) + self.activate_block(copysign) + # If the remainder is zero, CPython ensures the result has the + # same sign as the denominator. + adj = self.primitive_op(copysign_op, [Float(0.0), rhs], line) + self.add(Assign(res, adj)) + self.add(Goto(done)) + self.activate_block(done) + return res + + def compare_floats(self, lhs: Value, rhs: Value, op: int, line: int) -> Value: + return self.add(FloatComparisonOp(lhs, rhs, op, line)) + + def int_add(self, lhs: Value, rhs: Value | int) -> Value: + """Helper to add two native integers. + + The result has the type of lhs. + """ + if isinstance(rhs, int): + rhs = Integer(rhs, lhs.type) + return self.int_op(lhs.type, lhs, rhs, IntOp.ADD, line=-1) + + def int_sub(self, lhs: Value, rhs: Value | int) -> Value: + """Helper to subtract a native integer from another one. + + The result has the type of lhs. + """ + if isinstance(rhs, int): + rhs = Integer(rhs, lhs.type) + return self.int_op(lhs.type, lhs, rhs, IntOp.SUB, line=-1) + + def int_mul(self, lhs: Value, rhs: Value | int) -> Value: + """Helper to multiply two native integers. + + The result has the type of lhs. + """ + if isinstance(rhs, int): + rhs = Integer(rhs, lhs.type) + return self.int_op(lhs.type, lhs, rhs, IntOp.MUL, line=-1) + + def fixed_width_int_op( + self, type: RPrimitive, lhs: Value, rhs: Value, op: int, line: int + ) -> Value: + """Generate a binary op using Python fixed-width integer semantics. + + These may differ in overflow/rounding behavior from native/C ops. + + Args: + type: Either int64_rprimitive or int32_rprimitive + op: IntOp.* constant (e.g. IntOp.ADD) + """ + lhs = self.coerce(lhs, type, line) + rhs = self.coerce(rhs, type, line) + if op == IntOp.DIV: + if isinstance(rhs, Integer) and rhs.value not in (-1, 0): + if not type.is_signed: + return self.int_op(type, lhs, rhs, IntOp.DIV, line) + else: + # Inline simple division by a constant, so that C + # compilers can optimize more + return self.inline_fixed_width_divide(type, lhs, rhs, line) + if is_int64_rprimitive(type): + prim = int64_divide_op + elif is_int32_rprimitive(type): + prim = int32_divide_op + elif is_int16_rprimitive(type): + prim = int16_divide_op + elif is_uint8_rprimitive(type): + self.check_for_zero_division(rhs, type, line) + return self.int_op(type, lhs, rhs, op, line) + else: + assert False, type + return self.call_c(prim, [lhs, rhs], line) + if op == IntOp.MOD: + if isinstance(rhs, Integer) and rhs.value not in (-1, 0): + if not type.is_signed: + return self.int_op(type, lhs, rhs, IntOp.MOD, line) + else: + # Inline simple % by a constant, so that C + # compilers can optimize more + return self.inline_fixed_width_mod(type, lhs, rhs, line) + if is_int64_rprimitive(type): + prim = int64_mod_op + elif is_int32_rprimitive(type): + prim = int32_mod_op + elif is_int16_rprimitive(type): + prim = int16_mod_op + elif is_uint8_rprimitive(type): + self.check_for_zero_division(rhs, type, line) + return self.int_op(type, lhs, rhs, op, line) + else: + assert False, type + return self.call_c(prim, [lhs, rhs], line) + return self.int_op(type, lhs, rhs, op, line) + + def check_for_zero_division(self, rhs: Value, type: RType, line: int) -> None: + err, ok = BasicBlock(), BasicBlock() + is_zero = self.binary_op(rhs, Integer(0, type), "==", line) + self.add(Branch(is_zero, err, ok, Branch.BOOL)) + self.activate_block(err) + self.add( + RaiseStandardError( + RaiseStandardError.ZERO_DIVISION_ERROR, "integer division or modulo by zero", line + ) + ) + self.add(Unreachable()) + self.activate_block(ok) + + def inline_fixed_width_divide(self, type: RType, lhs: Value, rhs: Value, line: int) -> Value: + # Perform floor division (native division truncates) + res = Register(type) + div = self.int_op(type, lhs, rhs, IntOp.DIV, line) + self.add(Assign(res, div)) + same_signs = self.is_same_native_int_signs(type, lhs, rhs, line) + tricky, adjust, done = BasicBlock(), BasicBlock(), BasicBlock() + self.add(Branch(same_signs, done, tricky, Branch.BOOL)) + self.activate_block(tricky) + mul = self.int_op(type, res, rhs, IntOp.MUL, line) + mul_eq = self.add(ComparisonOp(mul, lhs, ComparisonOp.EQ, line)) + self.add(Branch(mul_eq, done, adjust, Branch.BOOL)) + self.activate_block(adjust) + adj = self.int_op(type, res, Integer(1, type), IntOp.SUB, line) + self.add(Assign(res, adj)) + self.add(Goto(done)) + self.activate_block(done) + return res + + def inline_fixed_width_mod(self, type: RType, lhs: Value, rhs: Value, line: int) -> Value: + # Perform floor modulus + res = Register(type) + mod = self.int_op(type, lhs, rhs, IntOp.MOD, line) + self.add(Assign(res, mod)) + same_signs = self.is_same_native_int_signs(type, lhs, rhs, line) + tricky, adjust, done = BasicBlock(), BasicBlock(), BasicBlock() + self.add(Branch(same_signs, done, tricky, Branch.BOOL)) + self.activate_block(tricky) + is_zero = self.add(ComparisonOp(res, Integer(0, type), ComparisonOp.EQ, line)) + self.add(Branch(is_zero, done, adjust, Branch.BOOL)) + self.activate_block(adjust) + adj = self.int_op(type, res, rhs, IntOp.ADD, line) + self.add(Assign(res, adj)) + self.add(Goto(done)) + self.activate_block(done) + return res + + def is_same_native_int_signs(self, type: RType, a: Value, b: Value, line: int) -> Value: + neg1 = self.add(ComparisonOp(a, Integer(0, type), ComparisonOp.SLT, line)) + neg2 = self.add(ComparisonOp(b, Integer(0, type), ComparisonOp.SLT, line)) + return self.add(ComparisonOp(neg1, neg2, ComparisonOp.EQ, line)) + + def is_same_float_signs(self, a: Value, b: Value, line: int) -> Value: + neg1 = self.add(FloatComparisonOp(a, Float(0.0), FloatComparisonOp.LT, line)) + neg2 = self.add(FloatComparisonOp(b, Float(0.0), FloatComparisonOp.LT, line)) + return self.add(ComparisonOp(neg1, neg2, ComparisonOp.EQ, line)) + + def comparison_op(self, lhs: Value, rhs: Value, op: int, line: int) -> Value: + return self.add(ComparisonOp(lhs, rhs, op, line)) + + def builtin_len(self, val: Value, line: int, use_pyssize_t: bool = False) -> Value: + """Generate len(val). + + Return short_int_rprimitive by default. + Return c_pyssize_t if use_pyssize_t is true (unshifted). + """ + typ = val.type + size_value = None + if is_list_rprimitive(typ) or is_tuple_rprimitive(typ) or is_bytes_rprimitive(typ): + size_value = self.primitive_op(var_object_size, [val], line) + elif is_set_rprimitive(typ) or is_frozenset_rprimitive(typ): + elem_address = self.add(GetElementPtr(val, PySetObject, "used")) + size_value = self.load_mem(elem_address, c_pyssize_t_rprimitive) + self.add(KeepAlive([val])) + elif is_dict_rprimitive(typ): + size_value = self.call_c(dict_ssize_t_size_op, [val], line) + elif is_str_rprimitive(typ): + size_value = self.call_c(str_ssize_t_size_op, [val], line) + + if size_value is not None: + if use_pyssize_t: + return size_value + offset = Integer(1, c_pyssize_t_rprimitive, line) + return self.int_op(short_int_rprimitive, size_value, offset, IntOp.LEFT_SHIFT, line) + + if isinstance(typ, RInstance): + # TODO: Support use_pyssize_t + assert not use_pyssize_t + length = self.gen_method_call(val, "__len__", [], int_rprimitive, line) + length = self.coerce(length, int_rprimitive, line) + ok, fail = BasicBlock(), BasicBlock() + cond = self.binary_op(length, Integer(0), ">=", line) + self.add_bool_branch(cond, ok, fail) + self.activate_block(fail) + self.add( + RaiseStandardError( + RaiseStandardError.VALUE_ERROR, "__len__() should return >= 0", line + ) + ) + self.add(Unreachable()) + self.activate_block(ok) + return length + + # generic case + if use_pyssize_t: + return self.call_c(generic_ssize_t_len_op, [val], line) + else: + return self.call_c(generic_len_op, [val], line) + + def new_tuple(self, items: list[Value], line: int) -> Value: + if items: + size: Value = Integer(len(items), c_pyssize_t_rprimitive) + return self.call_c(new_tuple_op, [size] + items, line) + else: + return self.call_c(load_empty_tuple_constant_op, [], line) + + def new_tuple_with_length(self, length: Value, line: int) -> Value: + """This function returns an uninitialized tuple. + + If the length is non-zero, the caller must initialize the tuple, before + it can be made visible to user code -- otherwise the tuple object is broken. + You might need further initialization with `new_tuple_set_item_op` op. + + Args: + length: desired length of the new tuple. The rtype should be + c_pyssize_t_rprimitive + line: line number + """ + return self.call_c(new_tuple_with_length_op, [length], line) + + def int_to_float(self, n: Value, line: int) -> Value: + return self.primitive_op(int_to_float_op, [n], line) + + def set_immortal_if_free_threaded(self, v: Value, line: int) -> None: + """Make an object immortal on free-threaded builds (to avoid contention).""" + if IS_FREE_THREADED and sys.version_info >= (3, 14): + self.primitive_op(set_immortal_op, [v], line) + + # Internal helpers + + def decompose_union_helper( + self, + obj: Value, + rtype: RUnion, + result_type: RType, + process_item: Callable[[Value], Value], + line: int, + ) -> Value: + """Generate isinstance() + specialized operations for union items. + + Say, for Union[A, B] generate ops resembling this (pseudocode): + + if isinstance(obj, A): + result = + else: + result = + + Args: + obj: value with a union type + rtype: the union type + result_type: result of the operation + process_item: callback to generate op for a single union item (arg is coerced + to union item type) + line: line number + """ + # TODO: Optimize cases where a single operation can handle multiple union items + # (say a method is implemented in a common base class) + fast_items = [] + rest_items = [] + for item in rtype.items: + if isinstance(item, RInstance): + fast_items.append(item) + else: + # For everything but RInstance we fall back to C API + rest_items.append(item) + exit_block = BasicBlock() + result = Register(result_type) + for i, item in enumerate(fast_items): + more_types = i < len(fast_items) - 1 or rest_items + if more_types: + # We are not at the final item so we need one more branch + op = self.isinstance_native(obj, item.class_ir, line) + true_block, false_block = BasicBlock(), BasicBlock() + self.add_bool_branch(op, true_block, false_block) + self.activate_block(true_block) + coerced = self.coerce(obj, item, line) + temp = process_item(coerced) + temp2 = self.coerce(temp, result_type, line) + self.add(Assign(result, temp2)) + self.goto(exit_block) + if more_types: + self.activate_block(false_block) + if rest_items: + # For everything else we use generic operation. Use force=True to drop the + # union type. + coerced = self.coerce(obj, object_rprimitive, line, force=True) + temp = process_item(coerced) + temp2 = self.coerce(temp, result_type, line) + self.add(Assign(result, temp2)) + self.goto(exit_block) + self.activate_block(exit_block) + return result + + def translate_special_method_call( + self, + base_reg: Value, + name: str, + args: list[Value], + result_type: RType | None, + line: int, + can_borrow: bool = False, + ) -> Value | None: + """Translate a method call which is handled nongenerically. + + These are special in the sense that we have code generated specifically for them. + They tend to be method calls which have equivalents in C that are more direct + than calling with the PyObject api. + + Return None if no translation found; otherwise return the target register. + """ + primitive_ops_candidates = method_call_ops.get(name, []) + primitive_op = self.matching_primitive_op( + primitive_ops_candidates, [base_reg] + args, line, result_type, can_borrow=can_borrow + ) + return primitive_op + + def translate_eq_cmp(self, lreg: Value, rreg: Value, expr_op: str, line: int) -> Value | None: + """Add an equality comparison operation. + + Note that this doesn't cover all possible types. + + Args: + expr_op: either '==' or '!=' + """ + ltype = lreg.type + rtype = rreg.type + + if is_str_rprimitive(ltype) and is_str_rprimitive(rtype): + return self.compare_strings(lreg, rreg, expr_op, line) + if is_bytes_rprimitive(ltype) and is_bytes_rprimitive(rtype): + return self.compare_bytes(lreg, rreg, expr_op, line) + + lopt = optional_value_type(ltype) + ropt = optional_value_type(rtype) + + # Can we do a quick comparison of two optional types (special case None values)? + fast_opt_eq = False + if lopt is not None: + if ropt is not None and is_same_type(lopt, ropt) and self._never_equal_to_none(lopt): + fast_opt_eq = True + if is_same_type(lopt, rtype) and self._never_equal_to_none(lopt): + fast_opt_eq = True + elif ropt is not None: + if is_same_type(ropt, ltype) and self._never_equal_to_none(ropt): + fast_opt_eq = True + if fast_opt_eq: + return self._translate_fast_optional_eq_cmp(lreg, rreg, expr_op, line) + + if not (isinstance(ltype, RInstance) and ltype == rtype): + return None + + class_ir = ltype.class_ir + # Check whether any subclasses of the operand redefines __eq__ + # or it might be redefined in a Python parent class or by + # dataclasses + cmp_varies_at_runtime = ( + not class_ir.is_method_final("__eq__") + or not class_ir.is_method_final("__ne__") + or class_ir.inherits_python + or class_ir.is_augmented + ) + + if cmp_varies_at_runtime: + # We might need to call left.__eq__(right) or right.__eq__(left) + # depending on which is the more specific type. + return None + + if not class_ir.has_method("__eq__"): + # There's no __eq__ defined, so just use object identity. + identity_ref_op = "is" if expr_op == "==" else "is not" + return self.translate_is_op(lreg, rreg, identity_ref_op, line) + + return self.gen_method_call(lreg, op_methods[expr_op], [rreg], ltype, line) + + def _never_equal_to_none(self, typ: RType) -> bool: + """Are the values of type never equal to None?""" + # TODO: Support RInstance with no custom __eq__/__ne__ and other primitive types. + return is_str_rprimitive(typ) or is_bytes_rprimitive(typ) + + def _translate_fast_optional_eq_cmp( + self, lreg: Value, rreg: Value, expr_op: str, line: int + ) -> Value: + """Generate eq/ne fast path between 'X | None' and ('X | None' or X). + + Assume 'X' never compares equal to None. + """ + if not isinstance(lreg.type, RUnion): + lreg, rreg = rreg, lreg + value_typ = optional_value_type(lreg.type) + assert value_typ + res = Register(bool_rprimitive) + + # Fast path: left value is None? + cmp = self.add(ComparisonOp(lreg, self.none_object(), ComparisonOp.EQ, line)) + l_none = BasicBlock() + l_not_none = BasicBlock() + out = BasicBlock() + self.add(Branch(cmp, l_none, l_not_none, Branch.BOOL)) + self.activate_block(l_none) + if not isinstance(rreg.type, RUnion): + val = self.false() if expr_op == "==" else self.true() + self.add(Assign(res, val)) + else: + op = ComparisonOp.EQ if expr_op == "==" else ComparisonOp.NEQ + cmp = self.add(ComparisonOp(rreg, self.none_object(), op, line)) + self.add(Assign(res, cmp)) + self.goto(out) + + self.activate_block(l_not_none) + if not isinstance(rreg.type, RUnion): + # Both operands are known to be not None, perform specialized comparison + eq = self.translate_eq_cmp( + self.unbox_or_cast(lreg, value_typ, line, can_borrow=True, unchecked=True), + rreg, + expr_op, + line, + ) + assert eq is not None + self.add(Assign(res, eq)) + else: + r_none = BasicBlock() + r_not_none = BasicBlock() + # Fast path: right value is None? + cmp = self.add(ComparisonOp(rreg, self.none_object(), ComparisonOp.EQ, line)) + self.add(Branch(cmp, r_none, r_not_none, Branch.BOOL)) + self.activate_block(r_none) + # None vs not-None + val = self.false() if expr_op == "==" else self.true() + self.add(Assign(res, val)) + self.goto(out) + self.activate_block(r_not_none) + # Both operands are known to be not None, perform specialized comparison + eq = self.translate_eq_cmp( + self.unbox_or_cast(lreg, value_typ, line, can_borrow=True, unchecked=True), + self.unbox_or_cast(rreg, value_typ, line, can_borrow=True, unchecked=True), + expr_op, + line, + ) + assert eq is not None + self.add(Assign(res, eq)) + self.goto(out) + self.activate_block(out) + return res + + def translate_is_op(self, lreg: Value, rreg: Value, expr_op: str, line: int) -> Value: + """Create equality comparison operation between object identities + + Args: + expr_op: either 'is' or 'is not' + """ + op = ComparisonOp.EQ if expr_op == "is" else ComparisonOp.NEQ + lhs = self.coerce(lreg, object_rprimitive, line) + rhs = self.coerce(rreg, object_rprimitive, line) + return self.add(ComparisonOp(lhs, rhs, op, line)) + + def _create_dict(self, keys: list[Value], values: list[Value], line: int) -> Value: + """Create a dictionary(possibly empty) using keys and values""" + # keys and values should have the same number of items + size = len(keys) + if size > 0: + size_value: Value = Integer(size, c_pyssize_t_rprimitive) + # merge keys and values + items = [i for t in list(zip(keys, values)) for i in t] + return self.call_c(dict_build_op, [size_value] + items, line) + else: + return self.call_c(dict_new_op, [], line) + + def error(self, msg: str, line: int) -> None: + assert self.errors is not None, "cannot generate errors in this compiler phase" + self.errors.error(msg, self.module_path, line) + + +def num_positional_args(arg_values: list[Value], arg_kinds: list[ArgKind] | None) -> int: + if arg_kinds is None: + return len(arg_values) + num_pos = 0 + for kind in arg_kinds: + if kind == ARG_POS: + num_pos += 1 + return num_pos diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/main.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/main.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..bbe1f07 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/main.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/main.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/main.py new file mode 100644 index 0000000..f08911a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/main.py @@ -0,0 +1,172 @@ +"""Transform a mypy AST to the IR form (Intermediate Representation). + +For example, consider a function like this: + + def f(x: int) -> int: + return x * 2 + 1 + +It would be translated to something that conceptually looks like this: + + r0 = 2 + r1 = 1 + r2 = x * r0 :: int + r3 = r2 + r1 :: int + return r3 + +This module deals with the module-level IR transformation logic and +putting it all together. The actual IR is implemented in mypyc.ir. + +For the core of the IR transform implementation, look at build_ir() +below, mypyc.irbuild.builder, and mypyc.irbuild.visitor. +""" + +from __future__ import annotations + +from typing import Any, Callable, TypeVar, cast + +from mypy.build import Graph +from mypy.nodes import ClassDef, Expression, FuncDef, MypyFile +from mypy.state import state +from mypy.types import Type +from mypyc.analysis.attrdefined import analyze_always_defined_attrs +from mypyc.common import TOP_LEVEL_NAME +from mypyc.errors import Errors +from mypyc.ir.func_ir import FuncDecl, FuncIR, FuncSignature +from mypyc.ir.module_ir import ModuleIR, ModuleIRs +from mypyc.ir.rtypes import none_rprimitive +from mypyc.irbuild.builder import IRBuilder +from mypyc.irbuild.mapper import Mapper +from mypyc.irbuild.prebuildvisitor import PreBuildVisitor +from mypyc.irbuild.prepare import ( + adjust_generator_classes_of_methods, + build_type_map, + create_generator_class_for_func, + find_singledispatch_register_impls, +) +from mypyc.irbuild.visitor import IRBuilderVisitor +from mypyc.irbuild.vtable import compute_vtable +from mypyc.options import CompilerOptions + +# The stubs for callable contextmanagers are busted so cast it to the +# right type... +F = TypeVar("F", bound=Callable[..., Any]) +strict_optional_dec = cast(Callable[[F], F], state.strict_optional_set(True)) + + +@strict_optional_dec # Turn on strict optional for any type manipulations we do +def build_ir( + modules: list[MypyFile], + graph: Graph, + types: dict[Expression, Type], + mapper: Mapper, + options: CompilerOptions, + errors: Errors, +) -> ModuleIRs: + """Build basic IR for a set of modules that have been type-checked by mypy. + + The returned IR is not complete and requires additional + transformations, such as the insertion of refcount handling. + """ + + build_type_map(mapper, modules, graph, types, options, errors) + adjust_generator_classes_of_methods(mapper) + singledispatch_info = find_singledispatch_register_impls(modules, errors) + + result: ModuleIRs = {} + if errors.num_errors > 0: + return result + + # Generate IR for all modules. + class_irs = [] + + for module in modules: + # First pass to determine free symbols. + pbv = PreBuildVisitor(errors, module, singledispatch_info.decorators_to_remove, types) + module.accept(pbv) + + # Declare generator classes for nested async functions and generators. + for fdef in pbv.nested_funcs: + if isinstance(fdef, FuncDef): + # Make generator class name sufficiently unique. + suffix = f"___{fdef.line}" + if fdef.is_coroutine or fdef.is_generator: + create_generator_class_for_func( + module.fullname, None, fdef, mapper, name_suffix=suffix + ) + + # Construct and configure builder objects (cyclic runtime dependency). + visitor = IRBuilderVisitor() + builder = IRBuilder( + module.fullname, + types, + graph, + errors, + mapper, + pbv, + visitor, + options, + singledispatch_info.singledispatch_impls, + ) + visitor.builder = builder + + # Second pass does the bulk of the work. + transform_mypy_file(builder, module) + module_ir = ModuleIR( + module.fullname, + list(builder.imports), + builder.functions, + builder.classes, + builder.final_names, + builder.type_var_names, + ) + result[module.fullname] = module_ir + class_irs.extend(builder.classes) + + analyze_always_defined_attrs(class_irs) + + # Compute vtables. + for cir in class_irs: + if cir.is_ext_class: + compute_vtable(cir) + + return result + + +def transform_mypy_file(builder: IRBuilder, mypyfile: MypyFile) -> None: + """Generate IR for a single module.""" + + if mypyfile.fullname in ("typing", "abc"): + # These module are special; their contents are currently all + # built-in primitives. + return + + builder.set_module(mypyfile.fullname, mypyfile.path) + + classes = [node for node in mypyfile.defs if isinstance(node, ClassDef)] + + # Collect all classes. + for cls in classes: + ir = builder.mapper.type_to_ir[cls.info] + builder.classes.append(ir) + + builder.enter("") + + # Make sure we have a builtins import + builder.gen_import("builtins", -1) + + # Generate ops. + for node in mypyfile.defs: + builder.accept(node) + + builder.maybe_add_implicit_return() + + # Generate special function representing module top level. + args, _, blocks, ret_type, _ = builder.leave() + sig = FuncSignature([], none_rprimitive) + func_ir = FuncIR( + FuncDecl(TOP_LEVEL_NAME, None, builder.module_name, sig), + args, + blocks, + traceback_name="", + ) + builder.functions.append(func_ir) diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/mapper.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/mapper.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..40d983d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/mapper.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/mapper.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/mapper.py new file mode 100644 index 0000000..c986499 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/mapper.py @@ -0,0 +1,238 @@ +"""Maintain a mapping from mypy concepts to IR/compiled concepts.""" + +from __future__ import annotations + +from mypy.nodes import ARG_STAR, ARG_STAR2, GDEF, ArgKind, FuncDef, RefExpr, SymbolNode, TypeInfo +from mypy.types import ( + AnyType, + CallableType, + Instance, + LiteralType, + NoneTyp, + Overloaded, + PartialType, + TupleType, + Type, + TypedDictType, + TypeType, + TypeVarLikeType, + UnboundType, + UninhabitedType, + UnionType, + find_unpack_in_list, + get_proper_type, +) +from mypyc.ir.class_ir import ClassIR +from mypyc.ir.func_ir import FuncDecl, FuncSignature, RuntimeArg +from mypyc.ir.rtypes import ( + KNOWN_NATIVE_TYPES, + RInstance, + RTuple, + RType, + RUnion, + bool_rprimitive, + bytes_rprimitive, + dict_rprimitive, + float_rprimitive, + frozenset_rprimitive, + int16_rprimitive, + int32_rprimitive, + int64_rprimitive, + int_rprimitive, + list_rprimitive, + none_rprimitive, + object_rprimitive, + range_rprimitive, + set_rprimitive, + str_rprimitive, + tuple_rprimitive, + uint8_rprimitive, +) + + +class Mapper: + """Keep track of mappings from mypy concepts to IR concepts. + + For example, we keep track of how the mypy TypeInfos of compiled + classes map to class IR objects. + + This state is shared across all modules being compiled in all + compilation groups. + """ + + def __init__(self, group_map: dict[str, str | None]) -> None: + self.group_map = group_map + self.type_to_ir: dict[TypeInfo, ClassIR] = {} + self.func_to_decl: dict[SymbolNode, FuncDecl] = {} + self.symbol_fullnames: set[str] = set() + # The corresponding generator class that implements a generator/async function + self.fdef_to_generator: dict[FuncDef, ClassIR] = {} + + def type_to_rtype(self, typ: Type | None) -> RType: + if typ is None: + return object_rprimitive + + typ = get_proper_type(typ) + if isinstance(typ, Instance): + if typ.type.is_newtype: + # Unwrap NewType to its base type for rprimitive mapping + assert len(typ.type.bases) == 1, typ.type.bases + return self.type_to_rtype(typ.type.bases[0]) + if typ.type.fullname == "builtins.int": + return int_rprimitive + elif typ.type.fullname == "builtins.float": + return float_rprimitive + elif typ.type.fullname == "builtins.bool": + return bool_rprimitive + elif typ.type.fullname == "builtins.str": + return str_rprimitive + elif typ.type.fullname == "builtins.bytes": + return bytes_rprimitive + elif typ.type.fullname == "builtins.list": + return list_rprimitive + # Dict subclasses are at least somewhat common and we + # specifically support them, so make sure that dict operations + # get optimized on them. + elif any(cls.fullname == "builtins.dict" for cls in typ.type.mro): + return dict_rprimitive + elif typ.type.fullname == "builtins.set": + return set_rprimitive + elif typ.type.fullname == "builtins.frozenset": + return frozenset_rprimitive + elif typ.type.fullname == "builtins.tuple": + return tuple_rprimitive # Varying-length tuple + elif typ.type.fullname == "builtins.range": + return range_rprimitive + elif typ.type in self.type_to_ir: + inst = RInstance(self.type_to_ir[typ.type]) + # Treat protocols as Union[protocol, object], so that we can do fast + # method calls in the cases where the protocol is explicitly inherited from + # and fall back to generic operations when it isn't. + if typ.type.is_protocol: + return RUnion([inst, object_rprimitive]) + else: + return inst + elif typ.type.fullname == "mypy_extensions.i64": + return int64_rprimitive + elif typ.type.fullname == "mypy_extensions.i32": + return int32_rprimitive + elif typ.type.fullname == "mypy_extensions.i16": + return int16_rprimitive + elif typ.type.fullname == "mypy_extensions.u8": + return uint8_rprimitive + elif typ.type.fullname in KNOWN_NATIVE_TYPES: + return KNOWN_NATIVE_TYPES[typ.type.fullname] + else: + return object_rprimitive + elif isinstance(typ, TupleType): + # Use our unboxed tuples for raw tuples but fall back to + # being boxed for NamedTuple or for variadic tuples. + if ( + typ.partial_fallback.type.fullname == "builtins.tuple" + and find_unpack_in_list(typ.items) is None + ): + return RTuple([self.type_to_rtype(t) for t in typ.items]) + else: + return tuple_rprimitive + elif isinstance(typ, CallableType): + return object_rprimitive + elif isinstance(typ, NoneTyp): + return none_rprimitive + elif isinstance(typ, UnionType): + return RUnion.make_simplified_union([self.type_to_rtype(item) for item in typ.items]) + elif isinstance(typ, AnyType): + return object_rprimitive + elif isinstance(typ, TypeType): + return object_rprimitive + elif isinstance(typ, TypeVarLikeType): + # Erase type variable to upper bound. + # TODO: Erase to union if object has value restriction? + return self.type_to_rtype(typ.upper_bound) + elif isinstance(typ, PartialType): + assert typ.var.type is not None + return self.type_to_rtype(typ.var.type) + elif isinstance(typ, Overloaded): + return object_rprimitive + elif isinstance(typ, TypedDictType): + return dict_rprimitive + elif isinstance(typ, LiteralType): + return self.type_to_rtype(typ.fallback) + elif isinstance(typ, (UninhabitedType, UnboundType)): + # Sure, whatever! + return object_rprimitive + + # I think we've covered everything that is supposed to + # actually show up, so anything else is a bug somewhere. + assert False, "unexpected type %s" % type(typ) + + def get_arg_rtype(self, typ: Type, kind: ArgKind) -> RType: + if kind == ARG_STAR: + return tuple_rprimitive + elif kind == ARG_STAR2: + return dict_rprimitive + else: + return self.type_to_rtype(typ) + + def fdef_to_sig(self, fdef: FuncDef, strict_dunders_typing: bool) -> FuncSignature: + if isinstance(fdef.type, CallableType): + arg_types = [ + self.get_arg_rtype(typ, kind) + for typ, kind in zip(fdef.type.arg_types, fdef.type.arg_kinds) + ] + arg_pos_onlys = [name is None for name in fdef.type.arg_names] + ret = self.type_to_rtype(fdef.type.ret_type) + else: + # Handle unannotated functions + arg_types = [object_rprimitive for _ in fdef.arguments] + arg_pos_onlys = [arg.pos_only for arg in fdef.arguments] + # We at least know the return type for __init__ methods will be None. + is_init_method = fdef.name == "__init__" and bool(fdef.info) + if is_init_method: + ret = none_rprimitive + else: + ret = object_rprimitive + + # mypyc FuncSignatures (unlike mypy types) want to have a name + # present even when the argument is position only, since it is + # the sole way that FuncDecl arguments are tracked. This is + # generally fine except in some cases (like for computing + # init_sig) we need to produce FuncSignatures from a + # deserialized FuncDef that lacks arguments. We won't ever + # need to use those inside of a FuncIR, so we just make up + # some crap. + if hasattr(fdef, "arguments"): + arg_names = [arg.variable.name for arg in fdef.arguments] + else: + arg_names = [name or "" for name in fdef.arg_names] + + args = [ + RuntimeArg(arg_name, arg_type, arg_kind, arg_pos_only) + for arg_name, arg_kind, arg_type, arg_pos_only in zip( + arg_names, fdef.arg_kinds, arg_types, arg_pos_onlys + ) + ] + + if not strict_dunders_typing: + # We force certain dunder methods to return objects to support letting them + # return NotImplemented. It also avoids some pointless boxing and unboxing, + # since tp_richcompare needs an object anyways. + # However, it also prevents some optimizations. + if fdef.name in ("__eq__", "__ne__", "__lt__", "__gt__", "__le__", "__ge__"): + ret = object_rprimitive + + return FuncSignature(args, ret) + + def is_native_module(self, module: str) -> bool: + """Is the given module one compiled by mypyc?""" + return module in self.group_map + + def is_native_ref_expr(self, expr: RefExpr) -> bool: + if expr.node is None: + return False + if "." in expr.node.fullname: + name = expr.node.fullname.rpartition(".")[0] + return self.is_native_module(name) or name in self.symbol_fullnames + return True + + def is_native_module_ref_expr(self, expr: RefExpr) -> bool: + return self.is_native_ref_expr(expr) and expr.kind == GDEF diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/match.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/match.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..b9dd5c9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/match.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/match.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/match.py new file mode 100644 index 0000000..c2ca9cf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/match.py @@ -0,0 +1,362 @@ +from __future__ import annotations + +from collections.abc import Generator +from contextlib import contextmanager + +from mypy.nodes import MatchStmt, NameExpr, TypeInfo +from mypy.patterns import ( + AsPattern, + ClassPattern, + MappingPattern, + OrPattern, + Pattern, + SequencePattern, + SingletonPattern, + StarredPattern, + ValuePattern, +) +from mypy.traverser import TraverserVisitor +from mypy.types import Instance, LiteralType, TupleType, get_proper_type +from mypyc.ir.ops import BasicBlock, Value +from mypyc.ir.rtypes import object_rprimitive +from mypyc.irbuild.builder import IRBuilder +from mypyc.primitives.dict_ops import ( + dict_copy, + dict_del_item, + mapping_has_key, + supports_mapping_protocol, +) +from mypyc.primitives.generic_ops import generic_ssize_t_len_op +from mypyc.primitives.list_ops import ( + sequence_get_item, + sequence_get_slice, + supports_sequence_protocol, +) +from mypyc.primitives.misc_ops import fast_isinstance_op, slow_isinstance_op + +# From: https://peps.python.org/pep-0634/#class-patterns +MATCHABLE_BUILTINS = { + "builtins.bool", + "builtins.bytearray", + "builtins.bytes", + "builtins.dict", + "builtins.float", + "builtins.frozenset", + "builtins.int", + "builtins.list", + "builtins.set", + "builtins.str", + "builtins.tuple", +} + + +class MatchVisitor(TraverserVisitor): + builder: IRBuilder + code_block: BasicBlock + next_block: BasicBlock + final_block: BasicBlock + subject: Value + match: MatchStmt + + as_pattern: AsPattern | None = None + + def __init__(self, builder: IRBuilder, match_node: MatchStmt) -> None: + self.builder = builder + + self.code_block = BasicBlock() + self.next_block = BasicBlock() + self.final_block = BasicBlock() + + self.match = match_node + self.subject = builder.accept(match_node.subject) + + def build_match_body(self, index: int) -> None: + self.builder.activate_block(self.code_block) + + guard = self.match.guards[index] + + if guard: + self.code_block = BasicBlock() + + cond = self.builder.accept(guard) + self.builder.add_bool_branch(cond, self.code_block, self.next_block) + + self.builder.activate_block(self.code_block) + + self.builder.accept(self.match.bodies[index]) + self.builder.goto(self.final_block) + + def visit_match_stmt(self, m: MatchStmt) -> None: + for i, pattern in enumerate(m.patterns): + self.code_block = BasicBlock() + self.next_block = BasicBlock() + + pattern.accept(self) + + self.build_match_body(i) + self.builder.activate_block(self.next_block) + + self.builder.goto_and_activate(self.final_block) + + def visit_value_pattern(self, pattern: ValuePattern) -> None: + value = self.builder.accept(pattern.expr) + + cond = self.builder.binary_op(self.subject, value, "==", pattern.expr.line) + + self.bind_as_pattern(value) + + self.builder.add_bool_branch(cond, self.code_block, self.next_block) + + def visit_or_pattern(self, pattern: OrPattern) -> None: + backup_block = self.next_block + self.next_block = BasicBlock() + + for p in pattern.patterns: + # Hack to ensure the as pattern is bound to each pattern in the + # "or" pattern, but not every subpattern + backup = self.as_pattern + p.accept(self) + self.as_pattern = backup + + self.builder.activate_block(self.next_block) + self.next_block = BasicBlock() + + self.next_block = backup_block + self.builder.goto(self.next_block) + + def visit_class_pattern(self, pattern: ClassPattern) -> None: + # TODO: use faster instance check for native classes (while still + # making sure to account for inheritance) + isinstance_op = ( + fast_isinstance_op + if self.builder.is_builtin_ref_expr(pattern.class_ref) + else slow_isinstance_op + ) + + cond = self.builder.primitive_op( + isinstance_op, [self.subject, self.builder.accept(pattern.class_ref)], pattern.line + ) + + self.builder.add_bool_branch(cond, self.code_block, self.next_block) + + self.bind_as_pattern(self.subject, new_block=True) + + if pattern.positionals: + if pattern.class_ref.fullname in MATCHABLE_BUILTINS: + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + pattern.positionals[0].accept(self) + + return + + node = pattern.class_ref.node + assert isinstance(node, TypeInfo), node + match_args = extract_dunder_match_args_names(node) + + for i, expr in enumerate(pattern.positionals): + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + # TODO: use faster "get_attr" method instead when calling on native or + # builtin objects + positional = self.builder.py_get_attr(self.subject, match_args[i], expr.line) + + with self.enter_subpattern(positional): + expr.accept(self) + + for key, value in zip(pattern.keyword_keys, pattern.keyword_values): + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + # TODO: same as above "get_attr" comment + attr = self.builder.py_get_attr(self.subject, key, value.line) + + with self.enter_subpattern(attr): + value.accept(self) + + def visit_as_pattern(self, pattern: AsPattern) -> None: + if pattern.pattern: + old_pattern = self.as_pattern + self.as_pattern = pattern + pattern.pattern.accept(self) + self.as_pattern = old_pattern + + elif pattern.name: + target = self.builder.get_assignment_target(pattern.name) + + self.builder.assign(target, self.subject, pattern.line) + + self.builder.goto(self.code_block) + + def visit_singleton_pattern(self, pattern: SingletonPattern) -> None: + if pattern.value is None: + obj = self.builder.none_object() + elif pattern.value is True: + obj = self.builder.true() + else: + obj = self.builder.false() + + cond = self.builder.binary_op(self.subject, obj, "is", pattern.line) + + self.builder.add_bool_branch(cond, self.code_block, self.next_block) + + def visit_mapping_pattern(self, pattern: MappingPattern) -> None: + is_dict = self.builder.call_c(supports_mapping_protocol, [self.subject], pattern.line) + + self.builder.add_bool_branch(is_dict, self.code_block, self.next_block) + + keys: list[Value] = [] + + for key, value in zip(pattern.keys, pattern.values): + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + key_value = self.builder.accept(key) + keys.append(key_value) + + exists = self.builder.call_c(mapping_has_key, [self.subject, key_value], pattern.line) + + self.builder.add_bool_branch(exists, self.code_block, self.next_block) + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + item = self.builder.gen_method_call( + self.subject, "__getitem__", [key_value], object_rprimitive, pattern.line + ) + + with self.enter_subpattern(item): + value.accept(self) + + if pattern.rest: + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + rest = self.builder.primitive_op(dict_copy, [self.subject], pattern.rest.line) + + target = self.builder.get_assignment_target(pattern.rest) + + self.builder.assign(target, rest, pattern.rest.line) + + for i, key_name in enumerate(keys): + self.builder.call_c(dict_del_item, [rest, key_name], pattern.keys[i].line) + + self.builder.goto(self.code_block) + + def visit_sequence_pattern(self, seq_pattern: SequencePattern) -> None: + star_index, capture, patterns = prep_sequence_pattern(seq_pattern) + + is_list = self.builder.call_c(supports_sequence_protocol, [self.subject], seq_pattern.line) + + self.builder.add_bool_branch(is_list, self.code_block, self.next_block) + + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + actual_len = self.builder.call_c(generic_ssize_t_len_op, [self.subject], seq_pattern.line) + min_len = len(patterns) + + is_long_enough = self.builder.binary_op( + actual_len, + self.builder.load_int(min_len), + "==" if star_index is None else ">=", + seq_pattern.line, + ) + + self.builder.add_bool_branch(is_long_enough, self.code_block, self.next_block) + + for i, pattern in enumerate(patterns): + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + if star_index is not None and i >= star_index: + current = self.builder.binary_op( + actual_len, self.builder.load_int(min_len - i), "-", pattern.line + ) + + else: + current = self.builder.load_int(i) + + item = self.builder.call_c(sequence_get_item, [self.subject, current], pattern.line) + + with self.enter_subpattern(item): + pattern.accept(self) + + if capture and star_index is not None: + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + capture_end = self.builder.binary_op( + actual_len, self.builder.load_int(min_len - star_index), "-", capture.line + ) + + rest = self.builder.call_c( + sequence_get_slice, + [self.subject, self.builder.load_int(star_index), capture_end], + capture.line, + ) + + target = self.builder.get_assignment_target(capture) + self.builder.assign(target, rest, capture.line) + + self.builder.goto(self.code_block) + + def bind_as_pattern(self, value: Value, new_block: bool = False) -> None: + if self.as_pattern and self.as_pattern.pattern and self.as_pattern.name: + if new_block: + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + target = self.builder.get_assignment_target(self.as_pattern.name) + self.builder.assign(target, value, self.as_pattern.pattern.line) + + self.as_pattern = None + + if new_block: + self.builder.goto(self.code_block) + + @contextmanager + def enter_subpattern(self, subject: Value) -> Generator[None]: + old_subject = self.subject + self.subject = subject + yield + self.subject = old_subject + + +def prep_sequence_pattern( + seq_pattern: SequencePattern, +) -> tuple[int | None, NameExpr | None, list[Pattern]]: + star_index: int | None = None + capture: NameExpr | None = None + patterns: list[Pattern] = [] + + for i, pattern in enumerate(seq_pattern.patterns): + if isinstance(pattern, StarredPattern): + star_index = i + capture = pattern.capture + + else: + patterns.append(pattern) + + return star_index, capture, patterns + + +def extract_dunder_match_args_names(info: TypeInfo) -> list[str]: + ty = info.names.get("__match_args__") + assert ty + match_args_type = get_proper_type(ty.type) + assert isinstance(match_args_type, TupleType), match_args_type + + match_args: list[str] = [] + for item in match_args_type.items: + proper_item = get_proper_type(item) + + match_arg = None + if isinstance(proper_item, Instance) and proper_item.last_known_value: + match_arg = proper_item.last_known_value.value + elif isinstance(proper_item, LiteralType): + match_arg = proper_item.value + assert isinstance(match_arg, str), f"Unrecognized __match_args__ item: {item}" + + match_args.append(match_arg) + return match_args diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/missingtypevisitor.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/missingtypevisitor.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..c903e4c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/missingtypevisitor.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/missingtypevisitor.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/missingtypevisitor.py new file mode 100644 index 0000000..e655d27 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/missingtypevisitor.py @@ -0,0 +1,20 @@ +from __future__ import annotations + +from mypy.nodes import Expression, Node +from mypy.traverser import ExtendedTraverserVisitor +from mypy.types import AnyType, Type, TypeOfAny + + +class MissingTypesVisitor(ExtendedTraverserVisitor): + """AST visitor that can be used to add any missing types as a generic AnyType.""" + + def __init__(self, types: dict[Expression, Type]) -> None: + super().__init__() + self.types: dict[Expression, Type] = types + + def visit(self, o: Node) -> bool: + if isinstance(o, Expression) and o not in self.types: + self.types[o] = AnyType(TypeOfAny.special_form) + + # If returns True, will continue to nested nodes. + return True diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/nonlocalcontrol.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/nonlocalcontrol.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..0f277ea Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/nonlocalcontrol.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/nonlocalcontrol.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/nonlocalcontrol.py new file mode 100644 index 0000000..4a7136f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/nonlocalcontrol.py @@ -0,0 +1,216 @@ +"""Helpers for dealing with nonlocal control such as 'break' and 'return'. + +Model how these behave differently in different contexts. +""" + +from __future__ import annotations + +from abc import abstractmethod +from typing import TYPE_CHECKING + +from mypyc.ir.ops import ( + NO_TRACEBACK_LINE_NO, + BasicBlock, + Branch, + Goto, + Integer, + Register, + Return, + SetMem, + Unreachable, + Value, +) +from mypyc.ir.rtypes import object_rprimitive +from mypyc.irbuild.targets import AssignmentTarget +from mypyc.primitives.exc_ops import restore_exc_info_op, set_stop_iteration_value + +if TYPE_CHECKING: + from mypyc.irbuild.builder import IRBuilder + + +class NonlocalControl: + """ABC representing a stack frame of constructs that modify nonlocal control flow. + + The nonlocal control flow constructs are break, continue, and + return, and their behavior is modified by a number of other + constructs. The most obvious is loop, which override where break + and continue jump to, but also `except` (which needs to clear + exc_info when left) and (eventually) finally blocks (which need to + ensure that the finally block is always executed when leaving the + try/except blocks). + """ + + @abstractmethod + def gen_break(self, builder: IRBuilder, line: int) -> None: + pass + + @abstractmethod + def gen_continue(self, builder: IRBuilder, line: int) -> None: + pass + + @abstractmethod + def gen_return(self, builder: IRBuilder, value: Value, line: int) -> None: + pass + + +class BaseNonlocalControl(NonlocalControl): + """Default nonlocal control outside any statements that affect it.""" + + def gen_break(self, builder: IRBuilder, line: int) -> None: + assert False, "break outside of loop" + + def gen_continue(self, builder: IRBuilder, line: int) -> None: + assert False, "continue outside of loop" + + def gen_return(self, builder: IRBuilder, value: Value, line: int) -> None: + builder.add(Return(value)) + + +class LoopNonlocalControl(NonlocalControl): + """Nonlocal control within a loop.""" + + def __init__( + self, outer: NonlocalControl, continue_block: BasicBlock, break_block: BasicBlock + ) -> None: + self.outer = outer + self.continue_block = continue_block + self.break_block = break_block + + def gen_break(self, builder: IRBuilder, line: int) -> None: + builder.add(Goto(self.break_block)) + + def gen_continue(self, builder: IRBuilder, line: int) -> None: + builder.add(Goto(self.continue_block)) + + def gen_return(self, builder: IRBuilder, value: Value, line: int) -> None: + self.outer.gen_return(builder, value, line) + + +class GeneratorNonlocalControl(BaseNonlocalControl): + """Default nonlocal control in a generator function outside statements.""" + + def gen_return(self, builder: IRBuilder, value: Value, line: int) -> None: + # Assign an invalid next label number so that the next time + # __next__ is called, we jump to the case in which + # StopIteration is raised. + builder.assign(builder.fn_info.generator_class.next_label_target, Integer(-1), line) + + # Raise a StopIteration containing a field for the value that + # should be returned. Before doing so, create a new block + # without an error handler set so that the implicitly thrown + # StopIteration isn't caught by except blocks inside of the + # generator function. + builder.builder.push_error_handler(None) + builder.goto_and_activate(BasicBlock()) + + # Skip creating a traceback frame when we raise here, because + # we don't care about the traceback frame and it is kind of + # expensive since raising StopIteration is an extremely common + # case. Also we call a special internal function to set + # StopIteration instead of using RaiseStandardError because + # the obvious thing doesn't work if the value is a tuple + # (???). + + true, false = BasicBlock(), BasicBlock() + stop_iter_reg = builder.fn_info.generator_class.stop_iter_value_reg + assert stop_iter_reg is not None + + builder.add(Branch(stop_iter_reg, true, false, Branch.IS_ERROR)) + + builder.activate_block(true) + # The default/slow path is to raise a StopIteration exception with + # return value. + builder.call_c(set_stop_iteration_value, [value], NO_TRACEBACK_LINE_NO) + builder.add(Unreachable()) + builder.builder.pop_error_handler() + + builder.activate_block(false) + # The fast path is to store return value via caller-provided pointer + # instead of raising an exception. This can only be used when the + # caller is a native function. + builder.add(SetMem(object_rprimitive, stop_iter_reg, value)) + builder.add(Return(Integer(0, object_rprimitive))) + + +class CleanupNonlocalControl(NonlocalControl): + """Abstract nonlocal control that runs some cleanup code.""" + + def __init__(self, outer: NonlocalControl) -> None: + self.outer = outer + + @abstractmethod + def gen_cleanup(self, builder: IRBuilder, line: int) -> None: ... + + def gen_break(self, builder: IRBuilder, line: int) -> None: + self.gen_cleanup(builder, line) + self.outer.gen_break(builder, line) + + def gen_continue(self, builder: IRBuilder, line: int) -> None: + self.gen_cleanup(builder, line) + self.outer.gen_continue(builder, line) + + def gen_return(self, builder: IRBuilder, value: Value, line: int) -> None: + self.gen_cleanup(builder, line) + self.outer.gen_return(builder, value, line) + + +class TryFinallyNonlocalControl(NonlocalControl): + """Nonlocal control within try/finally.""" + + def __init__(self, target: BasicBlock) -> None: + self.target = target + self.ret_reg: None | Register | AssignmentTarget = None + + def gen_break(self, builder: IRBuilder, line: int) -> None: + builder.error("break inside try/finally block is unimplemented", line) + + def gen_continue(self, builder: IRBuilder, line: int) -> None: + builder.error("continue inside try/finally block is unimplemented", line) + + def gen_return(self, builder: IRBuilder, value: Value, line: int) -> None: + if self.ret_reg is None: + if builder.fn_info.is_generator: + self.ret_reg = builder.make_spill_target(builder.ret_types[-1]) + else: + self.ret_reg = Register(builder.ret_types[-1]) + # assert needed because of apparent mypy bug... it loses track of the union + # and infers the type as object + assert isinstance(self.ret_reg, (Register, AssignmentTarget)), self.ret_reg + builder.assign(self.ret_reg, value, line) + + builder.add(Goto(self.target)) + + +class ExceptNonlocalControl(CleanupNonlocalControl): + """Nonlocal control for except blocks. + + Just makes sure that sys.exc_info always gets restored when we leave. + This is super annoying. + """ + + def __init__(self, outer: NonlocalControl, saved: Value | AssignmentTarget) -> None: + super().__init__(outer) + self.saved = saved + + def gen_cleanup(self, builder: IRBuilder, line: int) -> None: + builder.call_c(restore_exc_info_op, [builder.read(self.saved)], line) + + +class FinallyNonlocalControl(CleanupNonlocalControl): + """Nonlocal control for finally blocks. + + Just makes sure that sys.exc_info always gets restored when we + leave and the return register is decrefed if it isn't null. + """ + + def __init__(self, outer: NonlocalControl, saved: Value) -> None: + super().__init__(outer) + self.saved = saved + + def gen_cleanup(self, builder: IRBuilder, line: int) -> None: + # Restore the old exc_info + target, cleanup = BasicBlock(), BasicBlock() + builder.add(Branch(self.saved, target, cleanup, Branch.IS_ERROR)) + builder.activate_block(cleanup) + builder.call_c(restore_exc_info_op, [self.saved], line) + builder.goto_and_activate(target) diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/prebuildvisitor.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/prebuildvisitor.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..5dea4a5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/prebuildvisitor.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/prebuildvisitor.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/prebuildvisitor.py new file mode 100644 index 0000000..e630fed --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/prebuildvisitor.py @@ -0,0 +1,216 @@ +from __future__ import annotations + +from mypy.nodes import ( + AssignmentStmt, + Block, + Decorator, + Expression, + FuncDef, + FuncItem, + Import, + LambdaExpr, + MemberExpr, + MypyFile, + NameExpr, + Node, + SymbolNode, + Var, +) +from mypy.traverser import ExtendedTraverserVisitor +from mypy.types import Type +from mypyc.errors import Errors +from mypyc.irbuild.missingtypevisitor import MissingTypesVisitor + + +class PreBuildVisitor(ExtendedTraverserVisitor): + """Mypy file AST visitor run before building the IR. + + This collects various things, including: + + * Determine relationships between nested functions and functions that + contain nested functions + * Find non-local variables (free variables) + * Find property setters + * Find decorators of functions + * Find module import groups + + The main IR build pass uses this information. + """ + + def __init__( + self, + errors: Errors, + current_file: MypyFile, + decorators_to_remove: dict[FuncDef, list[int]], + types: dict[Expression, Type], + ) -> None: + super().__init__() + # Dict from a function to symbols defined directly in the + # function that are used as non-local (free) variables within a + # nested function. + self.free_variables: dict[FuncItem, set[SymbolNode]] = {} + + # Intermediate data structure used to find the function where + # a SymbolNode is declared. Initially this may point to a + # function nested inside the function with the declaration, + # but we'll eventually update this to refer to the function + # with the declaration. + self.symbols_to_funcs: dict[SymbolNode, FuncItem] = {} + + # Stack representing current function nesting. + self.funcs: list[FuncItem] = [] + + # All property setters encountered so far. + self.prop_setters: set[FuncDef] = set() + + # A map from any function that contains nested functions to + # a set of all the functions that are nested within it. + self.encapsulating_funcs: dict[FuncItem, list[FuncItem]] = {} + + # Map nested function to its parent/encapsulating function. + self.nested_funcs: dict[FuncItem, FuncItem] = {} + + # Map function to its non-special decorators. + self.funcs_to_decorators: dict[FuncDef, list[Expression]] = {} + + # Map function to indices of decorators to remove + self.decorators_to_remove: dict[FuncDef, list[int]] = decorators_to_remove + + # A mapping of import groups (a series of Import nodes with + # nothing in between) where each group is keyed by its first + # import node. + self.module_import_groups: dict[Import, list[Import]] = {} + self._current_import_group: Import | None = None + + self.errors: Errors = errors + + self.current_file: MypyFile = current_file + + self.missing_types_visitor = MissingTypesVisitor(types) + + def visit(self, o: Node) -> bool: + if not isinstance(o, Import): + self._current_import_group = None + return True + + def visit_assignment_stmt(self, stmt: AssignmentStmt) -> None: + # These are cases where mypy may not have types for certain expressions, + # but mypyc needs some form type to exist. + if stmt.is_alias_def: + stmt.rvalue.accept(self.missing_types_visitor) + return super().visit_assignment_stmt(stmt) + + def visit_block(self, block: Block) -> None: + self._current_import_group = None + super().visit_block(block) + self._current_import_group = None + + def visit_decorator(self, dec: Decorator) -> None: + if dec.decorators: + # Only add the function being decorated if there exist + # (ordinary) decorators in the decorator list. Certain + # decorators (such as @property, @abstractmethod) are + # special cased and removed from this list by + # mypy. Functions decorated only by special decorators + # (and property setters) are not treated as decorated + # functions by the IR builder. + if isinstance(dec.decorators[0], MemberExpr) and dec.decorators[0].name == "setter": + # Property setters are not treated as decorated methods. + self.prop_setters.add(dec.func) + else: + decorators_to_store = dec.decorators.copy() + if dec.func in self.decorators_to_remove: + to_remove = self.decorators_to_remove[dec.func] + + for i in reversed(to_remove): + del decorators_to_store[i] + # if all of the decorators are removed, we shouldn't treat this as a decorated + # function because there aren't any decorators to apply + if not decorators_to_store: + return + + self.funcs_to_decorators[dec.func] = decorators_to_store + super().visit_decorator(dec) + + def visit_func_def(self, fdef: FuncDef) -> None: + # TODO: What about overloaded functions? + self.visit_func(fdef) + self.visit_symbol_node(fdef) + + def visit_lambda_expr(self, expr: LambdaExpr) -> None: + self.visit_func(expr) + + def visit_func(self, func: FuncItem) -> None: + # If there were already functions or lambda expressions + # defined in the function stack, then note the previous + # FuncItem as containing a nested function and the current + # FuncItem as being a nested function. + if self.funcs: + # Add the new func to the set of nested funcs within the + # func at top of the func stack. + self.encapsulating_funcs.setdefault(self.funcs[-1], []).append(func) + # Add the func at top of the func stack as the parent of + # new func. + self.nested_funcs[func] = self.funcs[-1] + + self.funcs.append(func) + super().visit_func(func) + self.funcs.pop() + + def visit_import(self, imp: Import) -> None: + if self._current_import_group is not None: + self.module_import_groups[self._current_import_group].append(imp) + else: + self.module_import_groups[imp] = [imp] + self._current_import_group = imp + super().visit_import(imp) + + def visit_name_expr(self, expr: NameExpr) -> None: + if isinstance(expr.node, (Var, FuncDef)): + self.visit_symbol_node(expr.node) + + def visit_var(self, var: Var) -> None: + self.visit_symbol_node(var) + + def visit_symbol_node(self, symbol: SymbolNode) -> None: + if not self.funcs: + # We are not inside a function and hence do not need to do + # anything regarding free variables. + return + + if symbol in self.symbols_to_funcs: + orig_func = self.symbols_to_funcs[symbol] + if self.is_parent(self.funcs[-1], orig_func): + # The function in which the symbol was previously seen is + # nested within the function currently being visited. Thus + # the current function is a better candidate to contain the + # declaration. + self.symbols_to_funcs[symbol] = self.funcs[-1] + # TODO: Remove from the orig_func free_variables set? + self.free_variables.setdefault(self.funcs[-1], set()).add(symbol) + + elif self.is_parent(orig_func, self.funcs[-1]): + # The SymbolNode instance has already been visited + # before in a parent function, thus it's a non-local + # symbol. + self.add_free_variable(symbol) + + else: + # This is the first time the SymbolNode is being + # visited. We map the SymbolNode to the current FuncDef + # being visited to note where it was first visited. + self.symbols_to_funcs[symbol] = self.funcs[-1] + + def is_parent(self, fitem: FuncItem, child: FuncItem) -> bool: + # Check if child is nested within fdef (possibly indirectly + # within multiple nested functions). + if child not in self.nested_funcs: + return False + parent = self.nested_funcs[child] + return parent == fitem or self.is_parent(fitem, parent) + + def add_free_variable(self, symbol: SymbolNode) -> None: + # Find the function where the symbol was (likely) first declared, + # and mark is as a non-local symbol within that function. + func = self.symbols_to_funcs[symbol] + self.free_variables.setdefault(func, set()).add(symbol) diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/prepare.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/prepare.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..434d9fd Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/prepare.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/prepare.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/prepare.py new file mode 100644 index 0000000..9f3c7fc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/prepare.py @@ -0,0 +1,897 @@ +"""Prepare for IR transform. + +This needs to run after type checking and before generating IR. + +For example, construct partially initialized FuncIR and ClassIR +objects for all functions and classes. This allows us to bind +references to functions and classes before we've generated full IR for +functions or classes. The actual IR transform will then populate all +the missing bits, such as function bodies (basic blocks). + +Also build a mapping from mypy TypeInfos to ClassIR objects. +""" + +from __future__ import annotations + +from collections import defaultdict +from collections.abc import Iterable +from typing import Final, NamedTuple + +from mypy.build import Graph +from mypy.nodes import ( + ARG_STAR, + ARG_STAR2, + CallExpr, + ClassDef, + Decorator, + Expression, + FuncDef, + MemberExpr, + MypyFile, + NameExpr, + OverloadedFuncDef, + RefExpr, + SymbolNode, + TypeInfo, + Var, +) +from mypy.semanal import refers_to_fullname +from mypy.traverser import TraverserVisitor +from mypy.types import Instance, Type, get_proper_type +from mypyc.common import FAST_PREFIX, PROPSET_PREFIX, SELF_NAME, get_id_from_name +from mypyc.crash import catch_errors +from mypyc.errors import Errors +from mypyc.ir.class_ir import ClassIR +from mypyc.ir.func_ir import ( + FUNC_CLASSMETHOD, + FUNC_NORMAL, + FUNC_STATICMETHOD, + FuncDecl, + FuncSignature, + RuntimeArg, +) +from mypyc.ir.ops import DeserMaps +from mypyc.ir.rtypes import ( + RInstance, + RType, + dict_rprimitive, + none_rprimitive, + object_pointer_rprimitive, + object_rprimitive, + tuple_rprimitive, +) +from mypyc.irbuild.mapper import Mapper +from mypyc.irbuild.util import ( + get_func_def, + get_mypyc_attrs, + is_dataclass, + is_extension_class, + is_trait, +) +from mypyc.options import CompilerOptions +from mypyc.sametype import is_same_type + +GENERATOR_HELPER_NAME: Final = "__mypyc_generator_helper__" + + +def build_type_map( + mapper: Mapper, + modules: list[MypyFile], + graph: Graph, + types: dict[Expression, Type], + options: CompilerOptions, + errors: Errors, +) -> None: + # Collect all classes defined in everything we are compiling + classes = [] + for module in modules: + module_classes = [node for node in module.defs if isinstance(node, ClassDef)] + classes.extend([(module, cdef) for cdef in module_classes]) + + # Collect all class mappings so that we can bind arbitrary class name + # references even if there are import cycles. + for module, cdef in classes: + class_ir = ClassIR( + cdef.name, + module.fullname, + is_trait(cdef), + is_abstract=cdef.info.is_abstract, + is_final_class=cdef.info.is_final, + ) + class_ir.is_ext_class = is_extension_class(module.path, cdef, errors) + if class_ir.is_ext_class: + class_ir.deletable = cdef.info.deletable_attributes.copy() + # If global optimizations are disabled, turn of tracking of class children + if not options.global_opts: + class_ir.children = None + mapper.type_to_ir[cdef.info] = class_ir + mapper.symbol_fullnames.add(class_ir.fullname) + class_ir.is_enum = cdef.info.is_enum and len(cdef.info.enum_members) > 0 + + # Populate structural information in class IR for extension classes. + for module, cdef in classes: + with catch_errors(module.path, cdef.line): + if mapper.type_to_ir[cdef.info].is_ext_class: + prepare_class_def(module.path, module.fullname, cdef, errors, mapper, options) + else: + prepare_non_ext_class_def( + module.path, module.fullname, cdef, errors, mapper, options + ) + + # Prepare implicit attribute accessors as needed if an attribute overrides a property. + for module, cdef in classes: + class_ir = mapper.type_to_ir[cdef.info] + if class_ir.is_ext_class: + prepare_implicit_property_accessors(cdef.info, class_ir, module.fullname, mapper) + + # Collect all the functions also. We collect from the symbol table + # so that we can easily pick out the right copy of a function that + # is conditionally defined. This doesn't include nested functions! + for module in modules: + for func in get_module_func_defs(module): + prepare_func_def(module.fullname, None, func, mapper, options) + # TODO: what else? + + # Check for incompatible attribute definitions that were not + # flagged by mypy but can't be supported when compiling. + for module, cdef in classes: + class_ir = mapper.type_to_ir[cdef.info] + for attr in class_ir.attributes: + for base_ir in class_ir.mro[1:]: + if attr in base_ir.attributes: + if not is_same_type(class_ir.attributes[attr], base_ir.attributes[attr]): + node = cdef.info.names[attr].node + assert node is not None + kind = "trait" if base_ir.is_trait else "class" + errors.error( + f'Type of "{attr}" is incompatible with ' + f'definition in {kind} "{base_ir.name}"', + module.path, + node.line, + ) + + +def is_from_module(node: SymbolNode, module: MypyFile) -> bool: + return node.fullname == module.fullname + "." + node.name + + +def load_type_map(mapper: Mapper, modules: list[MypyFile], deser_ctx: DeserMaps) -> None: + """Populate a Mapper with deserialized IR from a list of modules.""" + for module in modules: + for node in module.names.values(): + if ( + isinstance(node.node, TypeInfo) + and is_from_module(node.node, module) + and not node.node.is_newtype + and not node.node.is_named_tuple + and node.node.typeddict_type is None + ): + ir = deser_ctx.classes[node.node.fullname] + mapper.type_to_ir[node.node] = ir + mapper.symbol_fullnames.add(node.node.fullname) + mapper.func_to_decl[node.node] = ir.ctor + + for module in modules: + for func in get_module_func_defs(module): + func_id = get_id_from_name(func.name, func.fullname, func.line) + mapper.func_to_decl[func] = deser_ctx.functions[func_id].decl + + +def get_module_func_defs(module: MypyFile) -> Iterable[FuncDef]: + """Collect all of the (non-method) functions declared in a module.""" + for node in module.names.values(): + # We need to filter out functions that are imported or + # aliases. The best way to do this seems to be by + # checking that the fullname matches. + if isinstance(node.node, (FuncDef, Decorator, OverloadedFuncDef)) and is_from_module( + node.node, module + ): + yield get_func_def(node.node) + + +def prepare_func_def( + module_name: str, + class_name: str | None, + fdef: FuncDef, + mapper: Mapper, + options: CompilerOptions, +) -> FuncDecl: + kind = ( + FUNC_CLASSMETHOD + if fdef.is_class + else (FUNC_STATICMETHOD if fdef.is_static else FUNC_NORMAL) + ) + sig = mapper.fdef_to_sig(fdef, options.strict_dunders_typing) + decl = FuncDecl( + fdef.name, + class_name, + module_name, + sig, + kind, + is_generator=fdef.is_generator, + is_coroutine=fdef.is_coroutine, + ) + mapper.func_to_decl[fdef] = decl + return decl + + +def create_generator_class_for_func( + module_name: str, class_name: str | None, fdef: FuncDef, mapper: Mapper, name_suffix: str = "" +) -> ClassIR: + """For a generator/async function, declare a generator class. + + Each generator and async function gets a dedicated class that implements the + generator protocol with generated methods. + """ + assert fdef.is_coroutine or fdef.is_generator + name = "_".join(x for x in [fdef.name, class_name] if x) + "_gen" + name_suffix + cir = ClassIR(name, module_name, is_generated=True, is_final_class=class_name is None) + cir.reuse_freed_instance = True + mapper.fdef_to_generator[fdef] = cir + + helper_sig = FuncSignature( + ( + RuntimeArg(SELF_NAME, object_rprimitive), + RuntimeArg("type", object_rprimitive), + RuntimeArg("value", object_rprimitive), + RuntimeArg("traceback", object_rprimitive), + RuntimeArg("arg", object_rprimitive), + # If non-NULL, used to store return value instead of raising StopIteration(retv) + RuntimeArg("stop_iter_ptr", object_pointer_rprimitive), + ), + object_rprimitive, + ) + + # The implementation of most generator functionality is behind this magic method. + helper_fn_decl = FuncDecl(GENERATOR_HELPER_NAME, name, module_name, helper_sig, internal=True) + cir.method_decls[helper_fn_decl.name] = helper_fn_decl + return cir + + +def prepare_method_def( + ir: ClassIR, + module_name: str, + cdef: ClassDef, + mapper: Mapper, + node: FuncDef | Decorator, + options: CompilerOptions, +) -> None: + if isinstance(node, FuncDef): + ir.method_decls[node.name] = prepare_func_def( + module_name, cdef.name, node, mapper, options + ) + elif isinstance(node, Decorator): + # TODO: do something about abstract methods here. Currently, they are handled just like + # normal methods. + decl = prepare_func_def(module_name, cdef.name, node.func, mapper, options) + if not node.decorators: + ir.method_decls[node.name] = decl + elif isinstance(node.decorators[0], MemberExpr) and node.decorators[0].name == "setter": + # Make property setter name different than getter name so there are no + # name clashes when generating C code, and property lookup at the IR level + # works correctly. + decl.name = PROPSET_PREFIX + decl.name + decl.is_prop_setter = True + # Making the argument implicitly positional-only avoids unnecessary glue methods + decl.sig.args[1].pos_only = True + ir.method_decls[PROPSET_PREFIX + node.name] = decl + + if node.func.is_property: + assert node.func.type, f"Expected return type annotation for property '{node.name}'" + decl.is_prop_getter = True + ir.property_types[node.name] = decl.sig.ret_type + + +def prepare_fast_path( + ir: ClassIR, + module_name: str, + cdef: ClassDef, + mapper: Mapper, + node: SymbolNode | None, + options: CompilerOptions, +) -> None: + """Add fast (direct) variants of methods in non-extension classes.""" + if ir.is_enum: + # We check that non-empty enums are implicitly final in mypy, so we + # can generate direct calls to enum methods. + if isinstance(node, OverloadedFuncDef): + if node.is_property: + return + node = node.impl + if not isinstance(node, FuncDef): + # TODO: support decorated methods (at least @classmethod and @staticmethod). + return + # The simplest case is a regular or overloaded method without decorators. In this + # case we can generate practically identical IR method body, but with a signature + # suitable for direct calls (usual non-extension class methods are converted to + # callable classes, and thus have an extra __mypyc_self__ argument). + name = FAST_PREFIX + node.name + sig = mapper.fdef_to_sig(node, options.strict_dunders_typing) + decl = FuncDecl(name, cdef.name, module_name, sig, FUNC_NORMAL) + ir.method_decls[name] = decl + return + + +def is_valid_multipart_property_def(prop: OverloadedFuncDef) -> bool: + # Checks to ensure supported property decorator semantics + if len(prop.items) != 2: + return False + + getter = prop.items[0] + setter = prop.items[1] + + return ( + isinstance(getter, Decorator) + and isinstance(setter, Decorator) + and getter.func.is_property + and len(setter.decorators) == 1 + and isinstance(setter.decorators[0], MemberExpr) + and setter.decorators[0].name == "setter" + ) + + +def can_subclass_builtin(builtin_base: str) -> bool: + # BaseException and dict are special cased. + return builtin_base in ( + ( + "builtins.Exception", + "builtins.LookupError", + "builtins.IndexError", + "builtins.Warning", + "builtins.UserWarning", + "builtins.ValueError", + "builtins.object", + ) + ) + + +def prepare_class_def( + path: str, + module_name: str, + cdef: ClassDef, + errors: Errors, + mapper: Mapper, + options: CompilerOptions, +) -> None: + """Populate the interface-level information in a class IR. + + This includes attribute and method declarations, and the MRO, among other things, but + method bodies are generated in a later pass. + """ + + ir = mapper.type_to_ir[cdef.info] + info = cdef.info + + attrs, attrs_lines = get_mypyc_attrs(cdef, path, errors) + if attrs.get("allow_interpreted_subclasses") is True: + ir.allow_interpreted_subclasses = True + if attrs.get("serializable") is True: + # Supports copy.copy and pickle (including subclasses) + ir._serializable = True + + free_list_len = attrs.get("free_list_len") + if free_list_len is not None: + line = attrs_lines["free_list_len"] + if ir.is_trait: + errors.error('"free_list_len" can\'t be used with traits', path, line) + if ir.allow_interpreted_subclasses: + errors.error( + '"free_list_len" can\'t be used in a class that allows interpreted subclasses', + path, + line, + ) + if free_list_len == 1: + ir.reuse_freed_instance = True + else: + errors.error(f'Unsupported value for "free_list_len": {free_list_len}', path, line) + + # Check for subclassing from builtin types + for cls in info.mro: + # Special case exceptions and dicts + # XXX: How do we handle *other* things?? + if cls.fullname == "builtins.BaseException": + ir.builtin_base = "PyBaseExceptionObject" + elif cls.fullname == "builtins.dict": + ir.builtin_base = "PyDictObject" + elif cls.fullname.startswith("builtins."): + if not can_subclass_builtin(cls.fullname): + # Note that if we try to subclass a C extension class that + # isn't in builtins, bad things will happen and we won't + # catch it here! But this should catch a lot of the most + # common pitfalls. + errors.error( + "Inheriting from most builtin types is unimplemented", path, cdef.line + ) + errors.note( + "Potential workaround: @mypy_extensions.mypyc_attr(native_class=False)", + path, + cdef.line, + ) + errors.note( + "https://mypyc.readthedocs.io/en/stable/native_classes.html#defining-non-native-classes", + path, + cdef.line, + ) + + # Set up the parent class + bases = [mapper.type_to_ir[base.type] for base in info.bases if base.type in mapper.type_to_ir] + if len(bases) > 1 and any(not c.is_trait for c in bases) and bases[0].is_trait: + # If the first base is a non-trait, don't ever error here. While it is correct + # to error if a trait comes before the next non-trait base (e.g. non-trait, trait, + # non-trait), it's pointless, confusing noise from the bigger issue: multiple + # inheritance is *not* supported. + errors.error("Non-trait base must appear first in parent list", path, cdef.line) + ir.traits = [c for c in bases if c.is_trait] + + mro = [] # All mypyc base classes + base_mro = [] # Non-trait mypyc base classes + for cls in info.mro: + if cls not in mapper.type_to_ir: + if cls.fullname != "builtins.object": + ir.inherits_python = True + continue + base_ir = mapper.type_to_ir[cls] + if not base_ir.is_trait: + base_mro.append(base_ir) + mro.append(base_ir) + + if cls.defn.removed_base_type_exprs or not base_ir.is_ext_class: + ir.inherits_python = True + + base_idx = 1 if not ir.is_trait else 0 + if len(base_mro) > base_idx: + ir.base = base_mro[base_idx] + ir.mro = mro + ir.base_mro = base_mro + + prepare_methods_and_attributes(cdef, ir, path, module_name, errors, mapper, options) + prepare_init_method(cdef, ir, module_name, mapper) + + for base in bases: + if base.children is not None: + base.children.append(ir) + + if is_dataclass(cdef): + ir.is_augmented = True + + +def prepare_methods_and_attributes( + cdef: ClassDef, + ir: ClassIR, + path: str, + module_name: str, + errors: Errors, + mapper: Mapper, + options: CompilerOptions, +) -> None: + """Populate attribute and method declarations.""" + info = cdef.info + for name, node in info.names.items(): + # Currently all plugin generated methods are dummies and not included. + if node.plugin_generated: + continue + + if isinstance(node.node, Var): + assert node.node.type, "Class member %s missing type" % name + if not node.node.is_classvar and name not in ("__slots__", "__deletable__"): + attr_rtype = mapper.type_to_rtype(node.node.type) + if ir.is_trait and attr_rtype.error_overlap: + # Traits don't have attribute definedness bitmaps, so use + # property accessor methods to access attributes that need them. + # We will generate accessor implementations that use the class bitmap + # for any concrete subclasses. + add_getter_declaration(ir, name, attr_rtype, module_name) + add_setter_declaration(ir, name, attr_rtype, module_name) + ir.attributes[name] = attr_rtype + elif isinstance(node.node, (FuncDef, Decorator)): + prepare_method_def(ir, module_name, cdef, mapper, node.node, options) + elif isinstance(node.node, OverloadedFuncDef): + # Handle case for property with both a getter and a setter + if node.node.is_property: + if is_valid_multipart_property_def(node.node): + for item in node.node.items: + prepare_method_def(ir, module_name, cdef, mapper, item, options) + else: + errors.error("Unsupported property decorator semantics", path, cdef.line) + + # Handle case for regular function overload + else: + if not node.node.impl: + errors.error( + "Overloads without implementation are not supported", path, cdef.line + ) + else: + prepare_method_def(ir, module_name, cdef, mapper, node.node.impl, options) + + if ir.builtin_base: + ir.attributes.clear() + + +def prepare_implicit_property_accessors( + info: TypeInfo, ir: ClassIR, module_name: str, mapper: Mapper +) -> None: + concrete_attributes = set() + for base in ir.base_mro: + for name, attr_rtype in base.attributes.items(): + concrete_attributes.add(name) + add_property_methods_for_attribute_if_needed( + info, ir, name, attr_rtype, module_name, mapper + ) + for base in ir.mro[1:]: + if base.is_trait: + for name, attr_rtype in base.attributes.items(): + if name not in concrete_attributes: + add_property_methods_for_attribute_if_needed( + info, ir, name, attr_rtype, module_name, mapper + ) + + +def add_property_methods_for_attribute_if_needed( + info: TypeInfo, + ir: ClassIR, + attr_name: str, + attr_rtype: RType, + module_name: str, + mapper: Mapper, +) -> None: + """Add getter and/or setter for attribute if defined as property in a base class. + + Only add declarations. The body IR will be synthesized later during irbuild. + """ + for base in info.mro[1:]: + if base in mapper.type_to_ir: + base_ir = mapper.type_to_ir[base] + n = base.names.get(attr_name) + if n is None: + continue + node = n.node + if isinstance(node, Decorator) and node.name not in ir.method_decls: + # Defined as a read-only property in base class/trait + add_getter_declaration(ir, attr_name, attr_rtype, module_name) + elif isinstance(node, OverloadedFuncDef) and is_valid_multipart_property_def(node): + # Defined as a read-write property in base class/trait + add_getter_declaration(ir, attr_name, attr_rtype, module_name) + add_setter_declaration(ir, attr_name, attr_rtype, module_name) + elif base_ir.is_trait and attr_rtype.error_overlap: + add_getter_declaration(ir, attr_name, attr_rtype, module_name) + add_setter_declaration(ir, attr_name, attr_rtype, module_name) + + +def add_getter_declaration( + ir: ClassIR, attr_name: str, attr_rtype: RType, module_name: str +) -> None: + self_arg = RuntimeArg("self", RInstance(ir), pos_only=True) + sig = FuncSignature([self_arg], attr_rtype) + decl = FuncDecl(attr_name, ir.name, module_name, sig, FUNC_NORMAL) + decl.is_prop_getter = True + decl.implicit = True # Triggers synthesization + ir.method_decls[attr_name] = decl + ir.property_types[attr_name] = attr_rtype # TODO: Needed?? + + +def add_setter_declaration( + ir: ClassIR, attr_name: str, attr_rtype: RType, module_name: str +) -> None: + self_arg = RuntimeArg("self", RInstance(ir), pos_only=True) + value_arg = RuntimeArg("value", attr_rtype, pos_only=True) + sig = FuncSignature([self_arg, value_arg], none_rprimitive) + setter_name = PROPSET_PREFIX + attr_name + decl = FuncDecl(setter_name, ir.name, module_name, sig, FUNC_NORMAL) + decl.is_prop_setter = True + decl.implicit = True # Triggers synthesization + ir.method_decls[setter_name] = decl + + +def check_matching_args(init_sig: FuncSignature, new_sig: FuncSignature) -> bool: + num_init_args = len(init_sig.args) - init_sig.num_bitmap_args + num_new_args = len(new_sig.args) - new_sig.num_bitmap_args + if num_init_args != num_new_args: + return False + + for idx in range(1, num_init_args): + init_arg = init_sig.args[idx] + new_arg = new_sig.args[idx] + if init_arg.type != new_arg.type: + return False + + if init_arg.kind != new_arg.kind: + return False + + return True + + +def prepare_init_method(cdef: ClassDef, ir: ClassIR, module_name: str, mapper: Mapper) -> None: + # Set up a constructor decl + init_node = cdef.info["__init__"].node + + new_node: SymbolNode | None = None + new_symbol = cdef.info.get("__new__") + # We are only interested in __new__ method defined in a user-defined class, + # so we ignore it if it comes from a builtin type. It's usually builtins.object + # but could also be builtins.type for metaclasses so we detect the prefix which + # matches both. + if new_symbol and new_symbol.fullname and not new_symbol.fullname.startswith("builtins."): + new_node = new_symbol.node + if isinstance(new_node, (Decorator, OverloadedFuncDef)): + new_node = get_func_def(new_node) + if not ir.is_trait and not ir.builtin_base and isinstance(init_node, FuncDef): + init_sig = mapper.fdef_to_sig(init_node, True) + args_match = True + if isinstance(new_node, FuncDef): + new_sig = mapper.fdef_to_sig(new_node, True) + args_match = check_matching_args(init_sig, new_sig) + + defining_ir = mapper.type_to_ir.get(init_node.info) + # If there is a nontrivial __init__ that wasn't defined in an + # extension class, we need to make the constructor take *args, + # **kwargs so it can call tp_init. + if ( + ( + defining_ir is None + or not defining_ir.is_ext_class + or cdef.info["__init__"].plugin_generated + ) + and init_node.info.fullname != "builtins.object" + ) or not args_match: + init_sig = FuncSignature( + [ + init_sig.args[0], + RuntimeArg("args", tuple_rprimitive, ARG_STAR), + RuntimeArg("kwargs", dict_rprimitive, ARG_STAR2), + ], + init_sig.ret_type, + ) + + last_arg = len(init_sig.args) - init_sig.num_bitmap_args + ctor_sig = FuncSignature(init_sig.args[1:last_arg], RInstance(ir)) + ir.ctor = FuncDecl(cdef.name, None, module_name, ctor_sig) + mapper.func_to_decl[cdef.info] = ir.ctor + + +def prepare_non_ext_class_def( + path: str, + module_name: str, + cdef: ClassDef, + errors: Errors, + mapper: Mapper, + options: CompilerOptions, +) -> None: + ir = mapper.type_to_ir[cdef.info] + info = cdef.info + + for node in info.names.values(): + if isinstance(node.node, (FuncDef, Decorator)): + prepare_method_def(ir, module_name, cdef, mapper, node.node, options) + elif isinstance(node.node, OverloadedFuncDef): + # Handle case for property with both a getter and a setter + if node.node.is_property: + if not is_valid_multipart_property_def(node.node): + errors.error("Unsupported property decorator semantics", path, cdef.line) + for item in node.node.items: + prepare_method_def(ir, module_name, cdef, mapper, item, options) + # Handle case for regular function overload + else: + prepare_method_def(ir, module_name, cdef, mapper, get_func_def(node.node), options) + + prepare_fast_path(ir, module_name, cdef, mapper, node.node, options) + + if any(cls in mapper.type_to_ir and mapper.type_to_ir[cls].is_ext_class for cls in info.mro): + errors.error( + "Non-extension classes may not inherit from extension classes", path, cdef.line + ) + + +RegisterImplInfo = tuple[TypeInfo, FuncDef] + + +class SingledispatchInfo(NamedTuple): + singledispatch_impls: dict[FuncDef, list[RegisterImplInfo]] + decorators_to_remove: dict[FuncDef, list[int]] + + +def find_singledispatch_register_impls( + modules: list[MypyFile], errors: Errors +) -> SingledispatchInfo: + visitor = SingledispatchVisitor(errors) + for module in modules: + visitor.current_path = module.path + module.accept(visitor) + return SingledispatchInfo(visitor.singledispatch_impls, visitor.decorators_to_remove) + + +class SingledispatchVisitor(TraverserVisitor): + current_path: str + + def __init__(self, errors: Errors) -> None: + super().__init__() + + # Map of main singledispatch function to list of registered implementations + self.singledispatch_impls: defaultdict[FuncDef, list[RegisterImplInfo]] = defaultdict(list) + + # Map of decorated function to the indices of any decorators to remove + self.decorators_to_remove: dict[FuncDef, list[int]] = {} + + self.errors: Errors = errors + self.func_stack_depth = 0 + + def visit_func_def(self, o: FuncDef) -> None: + self.func_stack_depth += 1 + super().visit_func_def(o) + self.func_stack_depth -= 1 + + def visit_decorator(self, dec: Decorator) -> None: + if dec.decorators: + decorators_to_store = dec.decorators.copy() + decorators_to_remove: list[int] = [] + # the index of the last non-register decorator before finding a register decorator + # when going through decorators from top to bottom + last_non_register: int | None = None + for i, d in enumerate(decorators_to_store): + impl = get_singledispatch_register_call_info(d, dec.func) + if impl is not None: + if self.func_stack_depth > 0: + self.errors.error( + "Registering nested functions not supported", self.current_path, d.line + ) + self.singledispatch_impls[impl.singledispatch_func].append( + (impl.dispatch_type, dec.func) + ) + decorators_to_remove.append(i) + if last_non_register is not None: + # found a register decorator after a non-register decorator, which we + # don't support because we'd have to make a copy of the function before + # calling the decorator so that we can call it later, which complicates + # the implementation for something that is probably not commonly used + self.errors.error( + "Calling decorator after registering function not supported", + self.current_path, + decorators_to_store[last_non_register].line, + ) + else: + if refers_to_fullname(d, "functools.singledispatch"): + if self.func_stack_depth > 0: + self.errors.error( + "Nested singledispatch functions not supported", + self.current_path, + d.line, + ) + decorators_to_remove.append(i) + # make sure that we still treat the function as a singledispatch function + # even if we don't find any registered implementations (which might happen + # if all registered implementations are registered dynamically) + self.singledispatch_impls.setdefault(dec.func, []) + last_non_register = i + + if decorators_to_remove: + # calling register on a function that tries to dispatch based on type annotations + # raises a TypeError because compiled functions don't have an __annotations__ + # attribute + self.decorators_to_remove[dec.func] = decorators_to_remove + + super().visit_decorator(dec) + + +class RegisteredImpl(NamedTuple): + singledispatch_func: FuncDef + dispatch_type: TypeInfo + + +def get_singledispatch_register_call_info( + decorator: Expression, func: FuncDef +) -> RegisteredImpl | None: + # @fun.register(complex) + # def g(arg): ... + if ( + isinstance(decorator, CallExpr) + and len(decorator.args) == 1 + and isinstance(decorator.args[0], RefExpr) + ): + callee = decorator.callee + dispatch_type = decorator.args[0].node + if not isinstance(dispatch_type, TypeInfo): + return None + + if isinstance(callee, MemberExpr): + return registered_impl_from_possible_register_call(callee, dispatch_type) + # @fun.register + # def g(arg: int): ... + elif isinstance(decorator, MemberExpr): + # we don't know if this is a register call yet, so we can't be sure that the function + # actually has arguments + if not func.arguments: + return None + arg_type = get_proper_type(func.arguments[0].variable.type) + if not isinstance(arg_type, Instance): + return None + info = arg_type.type + return registered_impl_from_possible_register_call(decorator, info) + return None + + +def registered_impl_from_possible_register_call( + expr: MemberExpr, dispatch_type: TypeInfo +) -> RegisteredImpl | None: + if expr.name == "register" and isinstance(expr.expr, NameExpr): + node = expr.expr.node + if isinstance(node, Decorator): + return RegisteredImpl(node.func, dispatch_type) + return None + + +def adjust_generator_classes_of_methods(mapper: Mapper) -> None: + """Make optimizations and adjustments to generated generator classes of methods. + + This is a separate pass after type map has been built, since we need all classes + to be processed to analyze class hierarchies. + """ + + generator_methods = [] + + for fdef, fn_ir in mapper.func_to_decl.items(): + if isinstance(fdef, FuncDef) and (fdef.is_coroutine or fdef.is_generator): + gen_ir = create_generator_class_for_func( + fn_ir.module_name, fn_ir.class_name, fdef, mapper + ) + # TODO: We could probably support decorators sometimes (static and class method?) + if not fdef.is_decorated: + name = fn_ir.name + precise_ret_type = True + if fn_ir.class_name is not None: + class_ir = mapper.type_to_ir[fdef.info] + subcls = class_ir.subclasses() + if subcls is None: + # Override could be of a different type, so we can't make assumptions. + precise_ret_type = False + elif class_ir.is_trait: + # Give up on traits. We could possibly have an abstract base class + # for generator return types to make this use precise types. + precise_ret_type = False + else: + for s in subcls: + if name in s.method_decls: + m = s.method_decls[name] + if ( + m.is_generator != fn_ir.is_generator + or m.is_coroutine != fn_ir.is_coroutine + ): + # Override is of a different kind, and the optimization + # to use a precise generator return type doesn't work. + precise_ret_type = False + else: + class_ir = None + + if precise_ret_type: + # Give a more precise type for generators, so that we can optimize + # code that uses them. They return a generator object, which has a + # specific class. Without this, the type would have to be 'object'. + fn_ir.sig.ret_type = RInstance(gen_ir) + if fn_ir.bound_sig: + fn_ir.bound_sig.ret_type = RInstance(gen_ir) + if class_ir is not None: + if class_ir.is_method_final(name): + gen_ir.is_final_class = True + generator_methods.append((name, class_ir, gen_ir)) + + new_bases = {} + + for name, class_ir, gen in generator_methods: + # For generator methods, we need to have subclass generator classes inherit from + # baseclass generator classes when there are overrides to maintain LSP. + base = class_ir.real_base() + if base is not None: + if base.has_method(name): + base_sig = base.method_sig(name) + if isinstance(base_sig.ret_type, RInstance): + base_gen = base_sig.ret_type.class_ir + new_bases[gen] = base_gen + + # Add generator inheritance relationships by adjusting MROs. + for deriv, base in new_bases.items(): + if base.children is not None: + base.children.append(deriv) + while True: + deriv.mro.append(base) + deriv.base_mro.append(base) + if base not in new_bases: + break + base = new_bases[base] diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/specialize.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/specialize.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..b8adbc8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/specialize.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/specialize.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/specialize.py new file mode 100644 index 0000000..b64f510 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/specialize.py @@ -0,0 +1,1142 @@ +"""Special case IR generation of calls to specific builtin functions. + +Most special cases should be handled using the data driven "primitive +ops" system, but certain operations require special handling that has +access to the AST/IR directly and can make decisions/optimizations +based on it. These special cases can be implemented here. + +For example, we use specializers to statically emit the length of a +fixed length tuple and to emit optimized code for any()/all() calls with +generator comprehensions as the argument. + +See comment below for more documentation. +""" + +from __future__ import annotations + +from typing import Callable, Final, Optional, cast + +from mypy.nodes import ( + ARG_NAMED, + ARG_POS, + CallExpr, + DictExpr, + Expression, + GeneratorExpr, + IntExpr, + ListExpr, + MemberExpr, + NameExpr, + RefExpr, + StrExpr, + SuperExpr, + TupleExpr, + Var, +) +from mypy.types import AnyType, TypeOfAny +from mypyc.ir.ops import ( + BasicBlock, + Call, + Extend, + Integer, + PrimitiveDescription, + RaiseStandardError, + Register, + SetAttr, + Truncate, + Unreachable, + Value, +) +from mypyc.ir.rtypes import ( + RInstance, + RPrimitive, + RTuple, + RType, + bool_rprimitive, + bytes_rprimitive, + c_int_rprimitive, + dict_rprimitive, + int16_rprimitive, + int32_rprimitive, + int64_rprimitive, + int_rprimitive, + is_bool_rprimitive, + is_dict_rprimitive, + is_fixed_width_rtype, + is_float_rprimitive, + is_int16_rprimitive, + is_int32_rprimitive, + is_int64_rprimitive, + is_int_rprimitive, + is_list_rprimitive, + is_uint8_rprimitive, + list_rprimitive, + object_rprimitive, + set_rprimitive, + str_rprimitive, + uint8_rprimitive, +) +from mypyc.irbuild.builder import IRBuilder +from mypyc.irbuild.constant_fold import constant_fold_expr +from mypyc.irbuild.for_helpers import ( + comprehension_helper, + sequence_from_generator_preallocate_helper, + translate_list_comprehension, + translate_set_comprehension, +) +from mypyc.irbuild.format_str_tokenizer import ( + FormatOp, + convert_format_expr_to_str, + join_formatted_strings, + tokenizer_format_call, +) +from mypyc.primitives.bytes_ops import isinstance_bytearray, isinstance_bytes +from mypyc.primitives.dict_ops import ( + dict_items_op, + dict_keys_op, + dict_setdefault_spec_init_op, + dict_values_op, + isinstance_dict, +) +from mypyc.primitives.float_ops import isinstance_float +from mypyc.primitives.generic_ops import generic_setattr, setup_object +from mypyc.primitives.int_ops import isinstance_int +from mypyc.primitives.list_ops import isinstance_list, new_list_set_item_op +from mypyc.primitives.misc_ops import isinstance_bool +from mypyc.primitives.set_ops import isinstance_frozenset, isinstance_set +from mypyc.primitives.str_ops import ( + bytes_decode_ascii_strict, + bytes_decode_latin1_strict, + bytes_decode_utf8_strict, + isinstance_str, + str_encode_ascii_strict, + str_encode_latin1_strict, + str_encode_utf8_strict, +) +from mypyc.primitives.tuple_ops import isinstance_tuple, new_tuple_set_item_op + +# Specializers are attempted before compiling the arguments to the +# function. Specializers can return None to indicate that they failed +# and the call should be compiled normally. Otherwise they should emit +# code for the call and return a Value containing the result. +# +# Specializers take three arguments: the IRBuilder, the CallExpr being +# compiled, and the RefExpr that is the left hand side of the call. +Specializer = Callable[["IRBuilder", CallExpr, RefExpr], Optional[Value]] + +# Dictionary containing all configured specializers. +# +# Specializers can operate on methods as well, and are keyed on the +# name and RType in that case. +specializers: dict[tuple[str, RType | None], list[Specializer]] = {} + + +def _apply_specialization( + builder: IRBuilder, expr: CallExpr, callee: RefExpr, name: str | None, typ: RType | None = None +) -> Value | None: + # TODO: Allow special cases to have default args or named args. Currently they don't since + # they check that everything in arg_kinds is ARG_POS. + + # If there is a specializer for this function, try calling it. + # Return the first successful one. + if name and (name, typ) in specializers: + for specializer in specializers[name, typ]: + val = specializer(builder, expr, callee) + if val is not None: + return val + return None + + +def apply_function_specialization( + builder: IRBuilder, expr: CallExpr, callee: RefExpr +) -> Value | None: + """Invoke the Specializer callback for a function if one has been registered""" + return _apply_specialization(builder, expr, callee, callee.fullname) + + +def apply_method_specialization( + builder: IRBuilder, expr: CallExpr, callee: MemberExpr, typ: RType | None = None +) -> Value | None: + """Invoke the Specializer callback for a method if one has been registered""" + name = callee.fullname if typ is None else callee.name + return _apply_specialization(builder, expr, callee, name, typ) + + +def specialize_function( + name: str, typ: RType | None = None +) -> Callable[[Specializer], Specializer]: + """Decorator to register a function as being a specializer. + + There may exist multiple specializers for one function. When + translating method calls, the earlier appended specializer has + higher priority. + """ + + def wrapper(f: Specializer) -> Specializer: + specializers.setdefault((name, typ), []).append(f) + return f + + return wrapper + + +@specialize_function("builtins.globals") +def translate_globals(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if len(expr.args) == 0: + return builder.load_globals_dict() + return None + + +@specialize_function("builtins.abs") +@specialize_function("builtins.int") +@specialize_function("builtins.float") +@specialize_function("builtins.complex") +@specialize_function("mypy_extensions.i64") +@specialize_function("mypy_extensions.i32") +@specialize_function("mypy_extensions.i16") +@specialize_function("mypy_extensions.u8") +def translate_builtins_with_unary_dunder( + builder: IRBuilder, expr: CallExpr, callee: RefExpr +) -> Value | None: + """Specialize calls on native classes that implement the associated dunder. + + E.g. i64(x) gets specialized to x.__int__() if x is a native instance. + """ + if len(expr.args) == 1 and expr.arg_kinds == [ARG_POS] and isinstance(callee, NameExpr): + arg = expr.args[0] + arg_typ = builder.node_type(arg) + shortname = callee.fullname.split(".")[1] + if shortname in ("i64", "i32", "i16", "u8"): + method = "__int__" + else: + method = f"__{shortname}__" + if isinstance(arg_typ, RInstance) and arg_typ.class_ir.has_method(method): + obj = builder.accept(arg) + return builder.gen_method_call(obj, method, [], None, expr.line) + + return None + + +@specialize_function("builtins.len") +def translate_len(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if len(expr.args) == 1 and expr.arg_kinds == [ARG_POS]: + arg = expr.args[0] + expr_rtype = builder.node_type(arg) + if isinstance(expr_rtype, RTuple): + # len() of fixed-length tuple can be trivially determined + # statically, though we still need to evaluate it. + builder.accept(arg) + return Integer(len(expr_rtype.types)) + else: + if is_list_rprimitive(builder.node_type(arg)): + borrow = True + else: + borrow = False + obj = builder.accept(arg, can_borrow=borrow) + return builder.builtin_len(obj, expr.line) + return None + + +@specialize_function("builtins.list") +def dict_methods_fast_path(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + """Specialize a common case when list() is called on a dictionary + view method call. + + For example: + foo = list(bar.keys()) + """ + if not (len(expr.args) == 1 and expr.arg_kinds == [ARG_POS]): + return None + arg = expr.args[0] + if not (isinstance(arg, CallExpr) and not arg.args and isinstance(arg.callee, MemberExpr)): + return None + base = arg.callee.expr + attr = arg.callee.name + rtype = builder.node_type(base) + if not (is_dict_rprimitive(rtype) and attr in ("keys", "values", "items")): + return None + + obj = builder.accept(base) + # Note that it is not safe to use fast methods on dict subclasses, + # so the corresponding helpers in CPy.h fallback to (inlined) + # generic logic. + if attr == "keys": + return builder.call_c(dict_keys_op, [obj], expr.line) + elif attr == "values": + return builder.call_c(dict_values_op, [obj], expr.line) + else: + return builder.call_c(dict_items_op, [obj], expr.line) + + +@specialize_function("builtins.list") +def translate_list_from_generator_call( + builder: IRBuilder, expr: CallExpr, callee: RefExpr +) -> Value | None: + """Special case for simplest list comprehension. + + For example: + list(f(x) for x in some_list/some_tuple/some_str) + 'translate_list_comprehension()' would take care of other cases + if this fails. + """ + if ( + len(expr.args) == 1 + and expr.arg_kinds[0] == ARG_POS + and isinstance(expr.args[0], GeneratorExpr) + ): + return sequence_from_generator_preallocate_helper( + builder, + expr.args[0], + empty_op_llbuilder=builder.builder.new_list_op_with_length, + set_item_op=new_list_set_item_op, + ) + return None + + +@specialize_function("builtins.tuple") +def translate_tuple_from_generator_call( + builder: IRBuilder, expr: CallExpr, callee: RefExpr +) -> Value | None: + """Special case for simplest tuple creation from a generator. + + For example: + tuple(f(x) for x in some_list/some_tuple/some_str/some_bytes) + 'translate_safe_generator_call()' would take care of other cases + if this fails. + """ + if ( + len(expr.args) == 1 + and expr.arg_kinds[0] == ARG_POS + and isinstance(expr.args[0], GeneratorExpr) + ): + return sequence_from_generator_preallocate_helper( + builder, + expr.args[0], + empty_op_llbuilder=builder.builder.new_tuple_with_length, + set_item_op=new_tuple_set_item_op, + ) + return None + + +@specialize_function("builtins.set") +def translate_set_from_generator_call( + builder: IRBuilder, expr: CallExpr, callee: RefExpr +) -> Value | None: + """Special case for set creation from a generator. + + For example: + set(f(...) for ... in iterator/nested_generators...) + """ + if ( + len(expr.args) == 1 + and expr.arg_kinds[0] == ARG_POS + and isinstance(expr.args[0], GeneratorExpr) + ): + return translate_set_comprehension(builder, expr.args[0]) + return None + + +@specialize_function("builtins.min") +@specialize_function("builtins.max") +def faster_min_max(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if expr.arg_kinds == [ARG_POS, ARG_POS]: + x, y = builder.accept(expr.args[0]), builder.accept(expr.args[1]) + result = Register(builder.node_type(expr)) + # CPython evaluates arguments reversely when calling min(...) or max(...) + if callee.fullname == "builtins.min": + comparison = builder.binary_op(y, x, "<", expr.line) + else: + comparison = builder.binary_op(y, x, ">", expr.line) + + true_block, false_block, next_block = BasicBlock(), BasicBlock(), BasicBlock() + builder.add_bool_branch(comparison, true_block, false_block) + + builder.activate_block(true_block) + builder.assign(result, builder.coerce(y, result.type, expr.line), expr.line) + builder.goto(next_block) + + builder.activate_block(false_block) + builder.assign(result, builder.coerce(x, result.type, expr.line), expr.line) + builder.goto(next_block) + + builder.activate_block(next_block) + return result + return None + + +@specialize_function("builtins.tuple") +@specialize_function("builtins.frozenset") +@specialize_function("builtins.dict") +@specialize_function("builtins.min") +@specialize_function("builtins.max") +@specialize_function("builtins.sorted") +@specialize_function("collections.OrderedDict") +@specialize_function("join", str_rprimitive) +@specialize_function("extend", list_rprimitive) +@specialize_function("update", dict_rprimitive) +@specialize_function("update", set_rprimitive) +def translate_safe_generator_call( + builder: IRBuilder, expr: CallExpr, callee: RefExpr +) -> Value | None: + """Special cases for things that consume iterators where we know we + can safely compile a generator into a list. + """ + if ( + len(expr.args) > 0 + and expr.arg_kinds[0] == ARG_POS + and isinstance(expr.args[0], GeneratorExpr) + ): + if isinstance(callee, MemberExpr): + return builder.gen_method_call( + builder.accept(callee.expr), + callee.name, + ( + [translate_list_comprehension(builder, expr.args[0])] + + [builder.accept(arg) for arg in expr.args[1:]] + ), + builder.node_type(expr), + expr.line, + expr.arg_kinds, + expr.arg_names, + ) + else: + return builder.call_refexpr_with_args( + expr, + callee, + ( + [translate_list_comprehension(builder, expr.args[0])] + + [builder.accept(arg) for arg in expr.args[1:]] + ), + ) + return None + + +@specialize_function("builtins.any") +def translate_any_call(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if ( + len(expr.args) == 1 + and expr.arg_kinds == [ARG_POS] + and isinstance(expr.args[0], GeneratorExpr) + ): + return any_all_helper(builder, expr.args[0], builder.false, lambda x: x, builder.true) + return None + + +@specialize_function("builtins.all") +def translate_all_call(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if ( + len(expr.args) == 1 + and expr.arg_kinds == [ARG_POS] + and isinstance(expr.args[0], GeneratorExpr) + ): + return any_all_helper( + builder, + expr.args[0], + builder.true, + lambda x: builder.unary_op(x, "not", expr.line), + builder.false, + ) + return None + + +def any_all_helper( + builder: IRBuilder, + gen: GeneratorExpr, + initial_value: Callable[[], Value], + modify: Callable[[Value], Value], + new_value: Callable[[], Value], +) -> Value: + retval = Register(bool_rprimitive) + builder.assign(retval, initial_value(), -1) + loop_params = list(zip(gen.indices, gen.sequences, gen.condlists, gen.is_async)) + true_block, false_block, exit_block = BasicBlock(), BasicBlock(), BasicBlock() + + def gen_inner_stmts() -> None: + comparison = modify(builder.accept(gen.left_expr)) + builder.add_bool_branch(comparison, true_block, false_block) + builder.activate_block(true_block) + builder.assign(retval, new_value(), -1) + builder.goto(exit_block) + builder.activate_block(false_block) + + comprehension_helper(builder, loop_params, gen_inner_stmts, gen.line) + builder.goto_and_activate(exit_block) + + return retval + + +@specialize_function("builtins.sum") +def translate_sum_call(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + # specialized implementation is used if: + # - only one or two arguments given (if not, sum() has been given invalid arguments) + # - first argument is a Generator (there is no benefit to optimizing the performance of eg. + # sum([1, 2, 3]), so non-Generator Iterables are not handled) + if not ( + len(expr.args) in (1, 2) + and expr.arg_kinds[0] == ARG_POS + and isinstance(expr.args[0], GeneratorExpr) + ): + return None + + # handle 'start' argument, if given + if len(expr.args) == 2: + # ensure call to sum() was properly constructed + if expr.arg_kinds[1] not in (ARG_POS, ARG_NAMED): + return None + start_expr = expr.args[1] + else: + start_expr = IntExpr(0) + + gen_expr = expr.args[0] + target_type = builder.node_type(expr) + retval = Register(target_type) + builder.assign(retval, builder.coerce(builder.accept(start_expr), target_type, -1), -1) + + def gen_inner_stmts() -> None: + call_expr = builder.accept(gen_expr.left_expr) + builder.assign(retval, builder.binary_op(retval, call_expr, "+", -1), -1) + + loop_params = list( + zip(gen_expr.indices, gen_expr.sequences, gen_expr.condlists, gen_expr.is_async) + ) + comprehension_helper(builder, loop_params, gen_inner_stmts, gen_expr.line) + + return retval + + +@specialize_function("dataclasses.field") +@specialize_function("attr.ib") +@specialize_function("attr.attrib") +@specialize_function("attr.Factory") +def translate_dataclasses_field_call( + builder: IRBuilder, expr: CallExpr, callee: RefExpr +) -> Value | None: + """Special case for 'dataclasses.field', 'attr.attrib', and 'attr.Factory' + function calls because the results of such calls are type-checked + by mypy using the types of the arguments to their respective + functions, resulting in attempted coercions by mypyc that throw a + runtime error. + """ + builder.types[expr] = AnyType(TypeOfAny.from_error) + return None + + +@specialize_function("builtins.next") +def translate_next_call(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + """Special case for calling next() on a generator expression, an + idiom that shows up some in mypy. + + For example, next(x for x in l if x.id == 12, None) will + generate code that searches l for an element where x.id == 12 + and produce the first such object, or None if no such element + exists. + """ + if not ( + expr.arg_kinds in ([ARG_POS], [ARG_POS, ARG_POS]) + and isinstance(expr.args[0], GeneratorExpr) + ): + return None + + gen = expr.args[0] + retval = Register(builder.node_type(expr)) + default_val = builder.accept(expr.args[1]) if len(expr.args) > 1 else None + exit_block = BasicBlock() + + def gen_inner_stmts() -> None: + # next takes the first element of the generator, so if + # something gets produced, we are done. + builder.assign(retval, builder.accept(gen.left_expr), gen.left_expr.line) + builder.goto(exit_block) + + loop_params = list(zip(gen.indices, gen.sequences, gen.condlists, gen.is_async)) + comprehension_helper(builder, loop_params, gen_inner_stmts, gen.line) + + # Now we need the case for when nothing got hit. If there was + # a default value, we produce it, and otherwise we raise + # StopIteration. + if default_val: + builder.assign(retval, default_val, gen.left_expr.line) + builder.goto(exit_block) + else: + builder.add(RaiseStandardError(RaiseStandardError.STOP_ITERATION, None, expr.line)) + builder.add(Unreachable()) + + builder.activate_block(exit_block) + return retval + + +isinstance_primitives: Final = { + "builtins.bool": isinstance_bool, + "builtins.bytearray": isinstance_bytearray, + "builtins.bytes": isinstance_bytes, + "builtins.dict": isinstance_dict, + "builtins.float": isinstance_float, + "builtins.frozenset": isinstance_frozenset, + "builtins.int": isinstance_int, + "builtins.list": isinstance_list, + "builtins.set": isinstance_set, + "builtins.str": isinstance_str, + "builtins.tuple": isinstance_tuple, +} + + +@specialize_function("builtins.isinstance") +def translate_isinstance(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + """Special case for builtins.isinstance. + + Prevent coercions on the thing we are checking the instance of - + there is no need to coerce something to a new type before checking + what type it is, and the coercion could lead to bugs. + """ + if not (len(expr.args) == 2 and expr.arg_kinds == [ARG_POS, ARG_POS]): + return None + + obj_expr = expr.args[0] + type_expr = expr.args[1] + + if isinstance(type_expr, TupleExpr) and not type_expr.items: + # we can compile this case to a noop + return builder.false() + + if isinstance(type_expr, (RefExpr, TupleExpr)): + builder.types[obj_expr] = AnyType(TypeOfAny.from_error) + + irs = builder.flatten_classes(type_expr) + if irs is not None: + can_borrow = all( + ir.is_ext_class and not ir.inherits_python and not ir.allow_interpreted_subclasses + for ir in irs + ) + obj = builder.accept(obj_expr, can_borrow=can_borrow) + return builder.builder.isinstance_helper(obj, irs, expr.line) + + if isinstance(type_expr, RefExpr): + node = type_expr.node + if node: + desc = isinstance_primitives.get(node.fullname) + if desc: + obj = builder.accept(obj_expr) + return builder.primitive_op(desc, [obj], expr.line) + + elif isinstance(type_expr, TupleExpr): + node_names: list[str] = [] + for item in type_expr.items: + if not isinstance(item, RefExpr): + return None + if item.node is None: + return None + if item.node.fullname not in node_names: + node_names.append(item.node.fullname) + + descs = [isinstance_primitives.get(fullname) for fullname in node_names] + if None in descs: + # not all types are primitive types, abort + return None + + obj = builder.accept(obj_expr) + + retval = Register(bool_rprimitive) + pass_block = BasicBlock() + fail_block = BasicBlock() + exit_block = BasicBlock() + + # Chain the checks: if any succeed, jump to pass_block; else, continue + for i, desc in enumerate(descs): + is_last = i == len(descs) - 1 + next_block = fail_block if is_last else BasicBlock() + builder.add_bool_branch( + builder.primitive_op(cast(PrimitiveDescription, desc), [obj], expr.line), + pass_block, + next_block, + ) + if not is_last: + builder.activate_block(next_block) + + # If any check passed + builder.activate_block(pass_block) + builder.assign(retval, builder.true(), expr.line) + builder.goto(exit_block) + + # If all checks failed + builder.activate_block(fail_block) + builder.assign(retval, builder.false(), expr.line) + builder.goto(exit_block) + + # Return the result + builder.activate_block(exit_block) + return retval + + return None + + +@specialize_function("setdefault", dict_rprimitive) +def translate_dict_setdefault(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + """Special case for 'dict.setdefault' which would only construct + default empty collection when needed. + + The dict_setdefault_spec_init_op checks whether the dict contains + the key and would construct the empty collection only once. + + For example, this specializer works for the following cases: + d.setdefault(key, set()).add(value) + d.setdefault(key, []).append(value) + d.setdefault(key, {})[inner_key] = inner_val + """ + if ( + len(expr.args) == 2 + and expr.arg_kinds == [ARG_POS, ARG_POS] + and isinstance(callee, MemberExpr) + ): + arg = expr.args[1] + if isinstance(arg, ListExpr): + if len(arg.items): + return None + data_type = Integer(1, c_int_rprimitive, expr.line) + elif isinstance(arg, DictExpr): + if len(arg.items): + return None + data_type = Integer(2, c_int_rprimitive, expr.line) + elif ( + isinstance(arg, CallExpr) + and isinstance(arg.callee, NameExpr) + and arg.callee.fullname == "builtins.set" + ): + if len(arg.args): + return None + data_type = Integer(3, c_int_rprimitive, expr.line) + else: + return None + + callee_dict = builder.accept(callee.expr) + key_val = builder.accept(expr.args[0]) + return builder.call_c( + dict_setdefault_spec_init_op, [callee_dict, key_val, data_type], expr.line + ) + return None + + +@specialize_function("format", str_rprimitive) +def translate_str_format(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if isinstance(callee, MemberExpr): + folded_callee = constant_fold_expr(builder, callee.expr) + if isinstance(folded_callee, str) and expr.arg_kinds.count(ARG_POS) == len(expr.arg_kinds): + tokens = tokenizer_format_call(folded_callee) + if tokens is None: + return None + literals, format_ops = tokens + # Convert variables to strings + substitutions = convert_format_expr_to_str(builder, format_ops, expr.args, expr.line) + if substitutions is None: + return None + return join_formatted_strings(builder, literals, substitutions, expr.line) + return None + + +@specialize_function("join", str_rprimitive) +def translate_fstring(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + """Special case for f-string, which is translated into str.join() + in mypy AST. + + This specializer optimizes simplest f-strings which don't contain + any format operation. + """ + if ( + isinstance(callee, MemberExpr) + and isinstance(callee.expr, StrExpr) + and callee.expr.value == "" + and expr.arg_kinds == [ARG_POS] + and isinstance(expr.args[0], ListExpr) + ): + for item in expr.args[0].items: + if isinstance(item, StrExpr): + continue + elif isinstance(item, CallExpr): + if not isinstance(item.callee, MemberExpr) or item.callee.name != "format": + return None + elif ( + not isinstance(item.callee.expr, StrExpr) or item.callee.expr.value != "{:{}}" + ): + return None + + if not isinstance(item.args[1], StrExpr) or item.args[1].value != "": + return None + else: + return None + + format_ops = [] + exprs: list[Expression] = [] + + for item in expr.args[0].items: + if isinstance(item, StrExpr) and item.value != "": + format_ops.append(FormatOp.STR) + exprs.append(item) + elif isinstance(item, CallExpr): + format_ops.append(FormatOp.STR) + exprs.append(item.args[0]) + + def get_literal_str(expr: Expression) -> str | None: + if isinstance(expr, StrExpr): + return expr.value + elif isinstance(expr, RefExpr) and isinstance(expr.node, Var) and expr.node.is_final: + final_value = expr.node.final_value + if final_value is not None: + return str(final_value) + return None + + for i in range(len(exprs) - 1): + while ( + len(exprs) >= i + 2 + and (first := get_literal_str(exprs[i])) is not None + and (second := get_literal_str(exprs[i + 1])) is not None + ): + exprs = [*exprs[:i], StrExpr(first + second), *exprs[i + 2 :]] + format_ops = [*format_ops[:i], FormatOp.STR, *format_ops[i + 2 :]] + + substitutions = convert_format_expr_to_str(builder, format_ops, exprs, expr.line) + if substitutions is None: + return None + + return join_formatted_strings(builder, None, substitutions, expr.line) + return None + + +@specialize_function("encode", str_rprimitive) +def str_encode_fast_path(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + """Specialize common cases of str.encode for most used encodings and strict errors.""" + + if not isinstance(callee, MemberExpr): + return None + + # We can only specialize if we have string literals as args + if len(expr.arg_kinds) > 0 and not isinstance(expr.args[0], StrExpr): + return None + if len(expr.arg_kinds) > 1 and not isinstance(expr.args[1], StrExpr): + return None + + encoding = "utf8" + errors = "strict" + if len(expr.arg_kinds) > 0 and isinstance(expr.args[0], StrExpr): + if expr.arg_kinds[0] == ARG_NAMED: + if expr.arg_names[0] == "encoding": + encoding = expr.args[0].value + elif expr.arg_names[0] == "errors": + errors = expr.args[0].value + elif expr.arg_kinds[0] == ARG_POS: + encoding = expr.args[0].value + else: + return None + if len(expr.arg_kinds) > 1 and isinstance(expr.args[1], StrExpr): + if expr.arg_kinds[1] == ARG_NAMED: + if expr.arg_names[1] == "encoding": + encoding = expr.args[1].value + elif expr.arg_names[1] == "errors": + errors = expr.args[1].value + elif expr.arg_kinds[1] == ARG_POS: + errors = expr.args[1].value + else: + return None + + if errors != "strict": + # We can only specialize strict errors + return None + + encoding = encoding.lower().replace("-", "").replace("_", "") # normalize + # Specialized encodings and their accepted aliases + if encoding in ["u8", "utf", "utf8", "cp65001"]: + return builder.call_c(str_encode_utf8_strict, [builder.accept(callee.expr)], expr.line) + elif encoding in ["646", "ascii", "usascii"]: + return builder.call_c(str_encode_ascii_strict, [builder.accept(callee.expr)], expr.line) + elif encoding in ["iso88591", "8859", "cp819", "latin", "latin1", "l1"]: + return builder.call_c(str_encode_latin1_strict, [builder.accept(callee.expr)], expr.line) + + return None + + +@specialize_function("decode", bytes_rprimitive) +def bytes_decode_fast_path(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + """Specialize common cases of obj.decode for most used encodings and strict errors.""" + + if not isinstance(callee, MemberExpr): + return None + + # We can only specialize if we have string literals as args + if len(expr.arg_kinds) > 0 and not isinstance(expr.args[0], StrExpr): + return None + if len(expr.arg_kinds) > 1 and not isinstance(expr.args[1], StrExpr): + return None + + encoding = "utf8" + errors = "strict" + if len(expr.arg_kinds) > 0 and isinstance(expr.args[0], StrExpr): + if expr.arg_kinds[0] == ARG_NAMED: + if expr.arg_names[0] == "encoding": + encoding = expr.args[0].value + elif expr.arg_names[0] == "errors": + errors = expr.args[0].value + elif expr.arg_kinds[0] == ARG_POS: + encoding = expr.args[0].value + else: + return None + if len(expr.arg_kinds) > 1 and isinstance(expr.args[1], StrExpr): + if expr.arg_kinds[1] == ARG_NAMED: + if expr.arg_names[1] == "encoding": + encoding = expr.args[1].value + elif expr.arg_names[1] == "errors": + errors = expr.args[1].value + elif expr.arg_kinds[1] == ARG_POS: + errors = expr.args[1].value + else: + return None + + if errors != "strict": + # We can only specialize strict errors + return None + + encoding = encoding.lower().replace("_", "-") # normalize + # Specialized encodings and their accepted aliases + if encoding in ["u8", "utf", "utf8", "utf-8", "cp65001"]: + return builder.call_c(bytes_decode_utf8_strict, [builder.accept(callee.expr)], expr.line) + elif encoding in ["646", "ascii", "usascii", "us-ascii"]: + return builder.call_c(bytes_decode_ascii_strict, [builder.accept(callee.expr)], expr.line) + elif encoding in [ + "iso8859-1", + "iso-8859-1", + "8859", + "cp819", + "latin", + "latin1", + "latin-1", + "l1", + ]: + return builder.call_c(bytes_decode_latin1_strict, [builder.accept(callee.expr)], expr.line) + + return None + + +@specialize_function("mypy_extensions.i64") +def translate_i64(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if len(expr.args) != 1 or expr.arg_kinds[0] != ARG_POS: + return None + arg = expr.args[0] + arg_type = builder.node_type(arg) + if is_int64_rprimitive(arg_type): + return builder.accept(arg) + elif is_int32_rprimitive(arg_type) or is_int16_rprimitive(arg_type): + val = builder.accept(arg) + return builder.add(Extend(val, int64_rprimitive, signed=True, line=expr.line)) + elif is_uint8_rprimitive(arg_type): + val = builder.accept(arg) + return builder.add(Extend(val, int64_rprimitive, signed=False, line=expr.line)) + elif is_int_rprimitive(arg_type) or is_bool_rprimitive(arg_type): + val = builder.accept(arg) + return builder.coerce(val, int64_rprimitive, expr.line) + return None + + +@specialize_function("mypy_extensions.i32") +def translate_i32(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if len(expr.args) != 1 or expr.arg_kinds[0] != ARG_POS: + return None + arg = expr.args[0] + arg_type = builder.node_type(arg) + if is_int32_rprimitive(arg_type): + return builder.accept(arg) + elif is_int64_rprimitive(arg_type): + val = builder.accept(arg) + return builder.add(Truncate(val, int32_rprimitive, line=expr.line)) + elif is_int16_rprimitive(arg_type): + val = builder.accept(arg) + return builder.add(Extend(val, int32_rprimitive, signed=True, line=expr.line)) + elif is_uint8_rprimitive(arg_type): + val = builder.accept(arg) + return builder.add(Extend(val, int32_rprimitive, signed=False, line=expr.line)) + elif is_int_rprimitive(arg_type) or is_bool_rprimitive(arg_type): + val = builder.accept(arg) + val = truncate_literal(val, int32_rprimitive) + return builder.coerce(val, int32_rprimitive, expr.line) + return None + + +@specialize_function("mypy_extensions.i16") +def translate_i16(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if len(expr.args) != 1 or expr.arg_kinds[0] != ARG_POS: + return None + arg = expr.args[0] + arg_type = builder.node_type(arg) + if is_int16_rprimitive(arg_type): + return builder.accept(arg) + elif is_int32_rprimitive(arg_type) or is_int64_rprimitive(arg_type): + val = builder.accept(arg) + return builder.add(Truncate(val, int16_rprimitive, line=expr.line)) + elif is_uint8_rprimitive(arg_type): + val = builder.accept(arg) + return builder.add(Extend(val, int16_rprimitive, signed=False, line=expr.line)) + elif is_int_rprimitive(arg_type) or is_bool_rprimitive(arg_type): + val = builder.accept(arg) + val = truncate_literal(val, int16_rprimitive) + return builder.coerce(val, int16_rprimitive, expr.line) + return None + + +@specialize_function("mypy_extensions.u8") +def translate_u8(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if len(expr.args) != 1 or expr.arg_kinds[0] != ARG_POS: + return None + arg = expr.args[0] + arg_type = builder.node_type(arg) + if is_uint8_rprimitive(arg_type): + return builder.accept(arg) + elif ( + is_int16_rprimitive(arg_type) + or is_int32_rprimitive(arg_type) + or is_int64_rprimitive(arg_type) + ): + val = builder.accept(arg) + return builder.add(Truncate(val, uint8_rprimitive, line=expr.line)) + elif is_int_rprimitive(arg_type) or is_bool_rprimitive(arg_type): + val = builder.accept(arg) + val = truncate_literal(val, uint8_rprimitive) + return builder.coerce(val, uint8_rprimitive, expr.line) + return None + + +def truncate_literal(value: Value, rtype: RPrimitive) -> Value: + """If value is an integer literal value, truncate it to given native int rtype. + + For example, truncate 256 into 0 if rtype is u8. + """ + if not isinstance(value, Integer): + return value # Not a literal, nothing to do + x = value.numeric_value() + max_unsigned = (1 << (rtype.size * 8)) - 1 + x = x & max_unsigned + if rtype.is_signed and x >= (max_unsigned + 1) // 2: + # Adjust to make it a negative value + x -= max_unsigned + 1 + return Integer(x, rtype) + + +@specialize_function("builtins.int") +def translate_int(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if len(expr.args) != 1 or expr.arg_kinds[0] != ARG_POS: + return None + arg = expr.args[0] + arg_type = builder.node_type(arg) + if ( + is_bool_rprimitive(arg_type) + or is_int_rprimitive(arg_type) + or is_fixed_width_rtype(arg_type) + ): + src = builder.accept(arg) + return builder.coerce(src, int_rprimitive, expr.line) + return None + + +@specialize_function("builtins.bool") +def translate_bool(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if len(expr.args) != 1 or expr.arg_kinds[0] != ARG_POS: + return None + arg = expr.args[0] + src = builder.accept(arg) + return builder.builder.bool_value(src) + + +@specialize_function("builtins.float") +def translate_float(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if len(expr.args) != 1 or expr.arg_kinds[0] != ARG_POS: + return None + arg = expr.args[0] + arg_type = builder.node_type(arg) + if is_float_rprimitive(arg_type): + # No-op float conversion. + return builder.accept(arg) + return None + + +@specialize_function("builtins.ord") +def translate_ord(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if len(expr.args) != 1 or expr.arg_kinds[0] != ARG_POS: + return None + arg = constant_fold_expr(builder, expr.args[0]) + if isinstance(arg, (str, bytes)) and len(arg) == 1: + return Integer(ord(arg)) + return None + + +def is_object(callee: RefExpr) -> bool: + """Returns True for object. calls.""" + return ( + isinstance(callee, MemberExpr) + and isinstance(callee.expr, NameExpr) + and callee.expr.fullname == "builtins.object" + ) + + +def is_super_or_object(expr: CallExpr, callee: RefExpr) -> bool: + """Returns True for super(). or object. calls.""" + return isinstance(expr.callee, SuperExpr) or is_object(callee) + + +@specialize_function("__new__", object_rprimitive) +def translate_object_new(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + fn = builder.fn_info + if fn.name != "__new__" or not is_super_or_object(expr, callee): + return None + + ir = builder.get_current_class_ir() + if ir is None: + return None + + call = '"object.__new__()"' + if not ir.is_ext_class: + builder.error(f"{call} not supported for non-extension classes", expr.line) + return None + if ir.inherits_python: + builder.error( + f"{call} not supported for classes inheriting from non-native classes", expr.line + ) + return None + if len(expr.args) != 1: + builder.error(f"{call} supported only with 1 argument, got {len(expr.args)}", expr.line) + return None + + typ_arg = expr.args[0] + method_args = fn.fitem.arg_names + if isinstance(typ_arg, NameExpr) and len(method_args) > 0 and method_args[0] == typ_arg.name: + subtype = builder.accept(expr.args[0]) + subs = ir.subclasses() + if subs is not None and len(subs) == 0: + return builder.add(Call(ir.setup, [subtype], expr.line)) + # Call a function that dynamically resolves the setup function of extension classes from the type object. + # This is necessary because the setup involves default attribute initialization and setting up + # the vtable which are specific to a given type and will not work if a subtype is created using + # the setup function of its base. + return builder.call_c(setup_object, [subtype], expr.line) + + return None + + +@specialize_function("__setattr__", object_rprimitive) +def translate_object_setattr(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + is_super = isinstance(expr.callee, SuperExpr) + is_object_callee = is_object(callee) + if not ((is_super and len(expr.args) >= 2) or (is_object_callee and len(expr.args) >= 3)): + return None + + self_reg = builder.accept(expr.args[0]) if is_object_callee else builder.self() + ir = builder.get_current_class_ir() + if ir and (not ir.is_ext_class or ir.builtin_base or ir.inherits_python): + return None + # Need to offset by 1 for super().__setattr__ calls because there is no self arg in this case. + name_idx = 0 if is_super else 1 + value_idx = 1 if is_super else 2 + attr_name = expr.args[name_idx] + attr_value = expr.args[value_idx] + value = builder.accept(attr_value) + + if isinstance(attr_name, StrExpr) and ir and ir.has_attr(attr_name.value): + name = attr_name.value + value = builder.coerce(value, ir.attributes[name], expr.line) + return builder.add(SetAttr(self_reg, name, value, expr.line)) + + name_reg = builder.accept(attr_name) + return builder.call_c(generic_setattr, [self_reg, name_reg, value], expr.line) diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/statement.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/statement.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..4866312 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/statement.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/statement.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/statement.py new file mode 100644 index 0000000..fdcf4f7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/statement.py @@ -0,0 +1,1279 @@ +"""Transform mypy statement ASTs to mypyc IR (Intermediate Representation). + +The top-level AST transformation logic is implemented in mypyc.irbuild.visitor +and mypyc.irbuild.builder. + +A few statements are transformed in mypyc.irbuild.function (yield, for example). +""" + +from __future__ import annotations + +import importlib.util +from collections.abc import Sequence +from typing import Callable + +import mypy.nodes +from mypy.nodes import ( + ARG_NAMED, + ARG_POS, + AssertStmt, + AssignmentStmt, + AwaitExpr, + Block, + BreakStmt, + ContinueStmt, + DelStmt, + Expression, + ExpressionStmt, + ForStmt, + IfStmt, + Import, + ImportAll, + ImportFrom, + ListExpr, + Lvalue, + MatchStmt, + NameExpr, + OperatorAssignmentStmt, + RaiseStmt, + ReturnStmt, + StarExpr, + StrExpr, + TempNode, + TryStmt, + TupleExpr, + TypeAliasStmt, + WhileStmt, + WithStmt, + YieldExpr, + YieldFromExpr, +) +from mypyc.common import TEMP_ATTR_NAME +from mypyc.ir.ops import ( + ERR_NEVER, + NAMESPACE_MODULE, + NO_TRACEBACK_LINE_NO, + Assign, + BasicBlock, + Branch, + Call, + InitStatic, + Integer, + LoadAddress, + LoadErrorValue, + LoadLiteral, + LoadStatic, + MethodCall, + PrimitiveDescription, + RaiseStandardError, + Register, + Return, + TupleGet, + Unborrow, + Unreachable, + Value, +) +from mypyc.ir.rtypes import ( + RInstance, + RTuple, + c_pyssize_t_rprimitive, + exc_rtuple, + is_tagged, + none_rprimitive, + object_pointer_rprimitive, + object_rprimitive, +) +from mypyc.irbuild.ast_helpers import is_borrow_friendly_expr, process_conditional +from mypyc.irbuild.builder import IRBuilder, create_type_params, int_borrow_friendly_op +from mypyc.irbuild.for_helpers import for_loop_helper +from mypyc.irbuild.generator import add_raise_exception_blocks_to_generator_class +from mypyc.irbuild.nonlocalcontrol import ( + ExceptNonlocalControl, + FinallyNonlocalControl, + TryFinallyNonlocalControl, +) +from mypyc.irbuild.prepare import GENERATOR_HELPER_NAME +from mypyc.irbuild.targets import ( + AssignmentTarget, + AssignmentTargetAttr, + AssignmentTargetIndex, + AssignmentTargetRegister, + AssignmentTargetTuple, +) +from mypyc.primitives.exc_ops import ( + error_catch_op, + exc_matches_op, + get_exc_info_op, + get_exc_value_op, + keep_propagating_op, + no_err_occurred_op, + propagate_if_error_op, + raise_exception_op, + reraise_exception_op, + restore_exc_info_op, +) +from mypyc.primitives.generic_ops import iter_op, next_raw_op, py_delattr_op +from mypyc.primitives.misc_ops import ( + check_stop_op, + coro_op, + import_from_many_op, + import_many_op, + import_op, + send_op, + set_type_alias_compute_function_op, + type_op, + yield_from_except_op, +) + +from .match import MatchVisitor + +GenFunc = Callable[[], None] +ValueGenFunc = Callable[[], Value] + + +def transform_block(builder: IRBuilder, block: Block) -> None: + if not block.is_unreachable: + builder.block_reachable_stack.append(True) + for stmt in block.body: + builder.accept(stmt) + if not builder.block_reachable_stack[-1]: + # The rest of the block is unreachable, so skip it + break + builder.block_reachable_stack.pop() + # Raise a RuntimeError if we hit a non-empty unreachable block. + # Don't complain about empty unreachable blocks, since mypy inserts + # those after `if MYPY`. + elif block.body: + builder.add( + RaiseStandardError( + RaiseStandardError.RUNTIME_ERROR, "Reached allegedly unreachable code!", block.line + ) + ) + builder.add(Unreachable()) + + +def transform_expression_stmt(builder: IRBuilder, stmt: ExpressionStmt) -> None: + if isinstance(stmt.expr, StrExpr): + # Docstring. Ignore + return + # ExpressionStmts do not need to be coerced like other Expressions, so we shouldn't + # call builder.accept here. + stmt.expr.accept(builder.visitor) + builder.flush_keep_alives() + + +def transform_return_stmt(builder: IRBuilder, stmt: ReturnStmt) -> None: + if stmt.expr: + retval = builder.accept(stmt.expr) + else: + retval = builder.builder.none() + retval = builder.coerce(retval, builder.ret_types[-1], stmt.line) + builder.nonlocal_control[-1].gen_return(builder, retval, stmt.line) + + +def check_unsupported_cls_assignment(builder: IRBuilder, stmt: AssignmentStmt) -> None: + fn = builder.fn_info + method_args = fn.fitem.arg_names + if fn.name != "__new__" or len(method_args) == 0: + return + + ir = builder.get_current_class_ir() + if ir is None or ir.inherits_python or not ir.is_ext_class: + return + + cls_arg = method_args[0] + + def flatten(lvalues: list[Expression]) -> list[Expression]: + flat = [] + for lvalue in lvalues: + if isinstance(lvalue, (TupleExpr, ListExpr)): + flat += flatten(lvalue.items) + else: + flat.append(lvalue) + return flat + + lvalues = flatten(stmt.lvalues) + + for lvalue in lvalues: + if isinstance(lvalue, NameExpr) and lvalue.name == cls_arg: + # Disallowed because it could break the transformation of object.__new__ calls + # inside __new__ methods. + builder.error( + f'Assignment to argument "{cls_arg}" in "__new__" method unsupported', stmt.line + ) + + +def transform_assignment_stmt(builder: IRBuilder, stmt: AssignmentStmt) -> None: + lvalues = stmt.lvalues + assert lvalues + builder.disallow_class_assignments(lvalues, stmt.line) + check_unsupported_cls_assignment(builder, stmt) + first_lvalue = lvalues[0] + if stmt.type and isinstance(stmt.rvalue, TempNode): + # This is actually a variable annotation without initializer. Don't generate + # an assignment but we need to call get_assignment_target since it adds a + # name binding as a side effect. + builder.get_assignment_target(first_lvalue, stmt.line) + return + + # Special case multiple assignments like 'x, y = e1, e2'. + if ( + isinstance(first_lvalue, (TupleExpr, ListExpr)) + and isinstance(stmt.rvalue, (TupleExpr, ListExpr)) + and len(first_lvalue.items) == len(stmt.rvalue.items) + and all(is_simple_lvalue(item) for item in first_lvalue.items) + and len(lvalues) == 1 + ): + temps = [] + for right in stmt.rvalue.items: + rvalue_reg = builder.accept(right) + temp = Register(rvalue_reg.type) + builder.assign(temp, rvalue_reg, stmt.line) + temps.append(temp) + for left, temp in zip(first_lvalue.items, temps): + assignment_target = builder.get_assignment_target(left) + builder.assign(assignment_target, temp, stmt.line) + builder.flush_keep_alives() + return + + line = stmt.rvalue.line + rvalue_reg = builder.accept(stmt.rvalue) + + if builder.non_function_scope() and stmt.is_final_def: + builder.init_final_static(first_lvalue, rvalue_reg) + + # Special-case multiple assignments like 'x, y = expr' to reduce refcount ops. + if ( + isinstance(first_lvalue, (TupleExpr, ListExpr)) + and isinstance(rvalue_reg.type, RTuple) + and len(rvalue_reg.type.types) == len(first_lvalue.items) + and len(lvalues) == 1 + and all(is_simple_lvalue(item) for item in first_lvalue.items) + and any(t.is_refcounted for t in rvalue_reg.type.types) + ): + n = len(first_lvalue.items) + borrows = [builder.add(TupleGet(rvalue_reg, i, borrow=True)) for i in range(n)] + builder.builder.keep_alive([rvalue_reg], steal=True) + for lvalue_item, rvalue_item in zip(first_lvalue.items, borrows): + rvalue_item = builder.add(Unborrow(rvalue_item)) + builder.assign(builder.get_assignment_target(lvalue_item), rvalue_item, line) + builder.flush_keep_alives() + return + + for lvalue in lvalues: + target = builder.get_assignment_target(lvalue) + builder.assign(target, rvalue_reg, line) + builder.flush_keep_alives() + + +def is_simple_lvalue(expr: Expression) -> bool: + return not isinstance(expr, (StarExpr, ListExpr, TupleExpr)) + + +def transform_operator_assignment_stmt(builder: IRBuilder, stmt: OperatorAssignmentStmt) -> None: + """Operator assignment statement such as x += 1""" + builder.disallow_class_assignments([stmt.lvalue], stmt.line) + if ( + is_tagged(builder.node_type(stmt.lvalue)) + and is_tagged(builder.node_type(stmt.rvalue)) + and stmt.op in int_borrow_friendly_op + ): + can_borrow = is_borrow_friendly_expr(builder, stmt.rvalue) and is_borrow_friendly_expr( + builder, stmt.lvalue + ) + else: + can_borrow = False + target = builder.get_assignment_target(stmt.lvalue) + target_value = builder.read(target, stmt.line, can_borrow=can_borrow) + rreg = builder.accept(stmt.rvalue, can_borrow=can_borrow) + # the Python parser strips the '=' from operator assignment statements, so re-add it + op = stmt.op + "=" + res = builder.binary_op(target_value, rreg, op, stmt.line) + # usually operator assignments are done in-place + # but when target doesn't support that we need to manually assign + builder.assign(target, res, res.line) + builder.flush_keep_alives() + + +def import_globals_id_and_name(module_id: str, as_name: str | None) -> tuple[str, str]: + """Compute names for updating the globals dict with the appropriate module. + + * For 'import foo.bar as baz' we add 'foo.bar' with the name 'baz' + * For 'import foo.bar' we add 'foo' with the name 'foo' + + Typically we then ignore these entries and access things directly + via the module static, but we will use the globals version for + modules that mypy couldn't find, since it doesn't analyze module + references from those properly.""" + if as_name: + globals_id = module_id + globals_name = as_name + else: + globals_id = globals_name = module_id.split(".")[0] + + return globals_id, globals_name + + +def transform_import(builder: IRBuilder, node: Import) -> None: + if node.is_mypy_only: + return + + # Imports (not from imports!) are processed in an odd way so they can be + # table-driven and compact. Here's how it works: + # + # Import nodes are divided in groups (in the prebuild visitor). Each group + # consists of consecutive Import nodes: + # + # import mod <| group #1 + # import mod2 | + # + # def foo() -> None: + # import mod3 <- group #2 (*) + # + # import mod4 <| group #3 + # import mod5 | + # + # Every time we encounter the first import of a group, build IR to call a + # helper function that will perform all of the group's imports in one go. + if not node.is_top_level: + # (*) Unless the import is within a function. In that case, prioritize + # speed over codesize when generating IR. + globals = builder.load_globals_dict() + for mod_id, as_name in node.ids: + builder.gen_import(mod_id, node.line) + globals_id, globals_name = import_globals_id_and_name(mod_id, as_name) + builder.gen_method_call( + globals, + "__setitem__", + [builder.load_str(globals_name), builder.get_module(globals_id, node.line)], + result_type=None, + line=node.line, + ) + return + + if node not in builder.module_import_groups: + return + + modules = [] + static_ptrs = [] + # To show the right line number on failure, we have to add the traceback + # entry within the helper function (which is admittedly ugly). To drive + # this, we need the line number corresponding to each module. + mod_lines = [] + for import_node in builder.module_import_groups[node]: + for mod_id, as_name in import_node.ids: + builder.imports[mod_id] = None + modules.append((mod_id, *import_globals_id_and_name(mod_id, as_name))) + mod_static = LoadStatic(object_rprimitive, mod_id, namespace=NAMESPACE_MODULE) + static_ptrs.append(builder.add(LoadAddress(object_pointer_rprimitive, mod_static))) + mod_lines.append(Integer(import_node.line, c_pyssize_t_rprimitive)) + + static_array_ptr = builder.builder.setup_rarray(object_pointer_rprimitive, static_ptrs) + import_line_ptr = builder.builder.setup_rarray(c_pyssize_t_rprimitive, mod_lines) + builder.call_c( + import_many_op, + [ + builder.add(LoadLiteral(tuple(modules), object_rprimitive)), + static_array_ptr, + builder.load_globals_dict(), + builder.load_str(builder.module_path), + builder.load_str(builder.fn_info.name), + import_line_ptr, + ], + NO_TRACEBACK_LINE_NO, + ) + + +def transform_import_from(builder: IRBuilder, node: ImportFrom) -> None: + if node.is_mypy_only: + return + + module_state = builder.graph[builder.module_name] + if builder.module_path.endswith("__init__.py"): + module_package = builder.module_name + elif module_state.ancestors is not None and module_state.ancestors: + module_package = module_state.ancestors[0] + else: + module_package = "" + + id = importlib.util.resolve_name("." * node.relative + node.id, module_package) + builder.imports[id] = None + + names = [name for name, _ in node.names] + as_names = [as_name or name for name, as_name in node.names] + names_literal = builder.add(LoadLiteral(tuple(names), object_rprimitive)) + if as_names == names: + # Reuse names tuple to reduce verbosity. + as_names_literal = names_literal + else: + as_names_literal = builder.add(LoadLiteral(tuple(as_names), object_rprimitive)) + # Note that we miscompile import from inside of functions here, + # since that case *shouldn't* load everything into the globals dict. + # This probably doesn't matter much and the code runs basically right. + module = builder.call_c( + import_from_many_op, + [builder.load_str(id), names_literal, as_names_literal, builder.load_globals_dict()], + node.line, + ) + builder.add(InitStatic(module, id, namespace=NAMESPACE_MODULE)) + + +def transform_import_all(builder: IRBuilder, node: ImportAll) -> None: + if node.is_mypy_only: + return + builder.gen_import(node.id, node.line) + + +def transform_if_stmt(builder: IRBuilder, stmt: IfStmt) -> None: + if_body, next = BasicBlock(), BasicBlock() + else_body = BasicBlock() if stmt.else_body else next + + # If statements are normalized + assert len(stmt.expr) == 1 + + process_conditional(builder, stmt.expr[0], if_body, else_body) + builder.activate_block(if_body) + builder.accept(stmt.body[0]) + builder.goto(next) + if stmt.else_body: + builder.activate_block(else_body) + builder.accept(stmt.else_body) + builder.goto(next) + builder.activate_block(next) + + +def transform_while_stmt(builder: IRBuilder, s: WhileStmt) -> None: + body, next, top, else_block = BasicBlock(), BasicBlock(), BasicBlock(), BasicBlock() + normal_loop_exit = else_block if s.else_body is not None else next + + builder.push_loop_stack(top, next) + + # Split block so that we get a handle to the top of the loop. + builder.goto_and_activate(top) + process_conditional(builder, s.expr, body, normal_loop_exit) + + builder.activate_block(body) + builder.accept(s.body) + # Add branch to the top at the end of the body. + builder.goto(top) + + builder.pop_loop_stack() + + if s.else_body is not None: + builder.activate_block(else_block) + builder.accept(s.else_body) + builder.goto(next) + + builder.activate_block(next) + + +def transform_for_stmt(builder: IRBuilder, s: ForStmt) -> None: + def body() -> None: + builder.accept(s.body) + + def else_block() -> None: + assert s.else_body is not None + builder.accept(s.else_body) + + for_loop_helper( + builder, s.index, s.expr, body, else_block if s.else_body else None, s.is_async, s.line + ) + + +def transform_break_stmt(builder: IRBuilder, node: BreakStmt) -> None: + builder.nonlocal_control[-1].gen_break(builder, node.line) + + +def transform_continue_stmt(builder: IRBuilder, node: ContinueStmt) -> None: + builder.nonlocal_control[-1].gen_continue(builder, node.line) + + +def transform_raise_stmt(builder: IRBuilder, s: RaiseStmt) -> None: + if s.expr is None: + builder.call_c(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) + builder.add(Unreachable()) + return + + exc = builder.accept(s.expr) + builder.call_c(raise_exception_op, [exc], s.line) + builder.add(Unreachable()) + + +def transform_try_except( + builder: IRBuilder, + body: GenFunc, + handlers: Sequence[tuple[tuple[ValueGenFunc, int] | None, Expression | None, GenFunc]], + else_body: GenFunc | None, + line: int, +) -> None: + """Generalized try/except/else handling that takes functions to gen the bodies. + + The point of this is to also be able to support with.""" + assert handlers, "try needs except" + + except_entry, exit_block, cleanup_block = BasicBlock(), BasicBlock(), BasicBlock() + double_except_block = BasicBlock() + # If there is an else block, jump there after the try, otherwise just leave + else_block = BasicBlock() if else_body else exit_block + + # Compile the try block with an error handler + builder.builder.push_error_handler(except_entry) + builder.goto_and_activate(BasicBlock()) + body() + builder.goto(else_block) + builder.builder.pop_error_handler() + + # The error handler catches the error and then checks it + # against the except clauses. We compile the error handler + # itself with an error handler so that it can properly restore + # the *old* exc_info if an exception occurs. + # The exception chaining will be done automatically when the + # exception is raised, based on the exception in exc_info. + builder.builder.push_error_handler(double_except_block) + builder.activate_block(except_entry) + old_exc = builder.maybe_spill(builder.call_c(error_catch_op, [], line)) + # Compile the except blocks with the nonlocal control flow overridden to clear exc_info + builder.nonlocal_control.append(ExceptNonlocalControl(builder.nonlocal_control[-1], old_exc)) + + # Process the bodies + for type, var, handler_body in handlers: + next_block = None + if type: + type_f, type_line = type + next_block, body_block = BasicBlock(), BasicBlock() + matches = builder.call_c(exc_matches_op, [type_f()], type_line) + builder.add(Branch(matches, body_block, next_block, Branch.BOOL)) + builder.activate_block(body_block) + if var: + target = builder.get_assignment_target(var) + builder.assign(target, builder.call_c(get_exc_value_op, [], var.line), var.line) + handler_body() + builder.goto(cleanup_block) + if next_block: + builder.activate_block(next_block) + + # Reraise the exception if needed + if next_block: + builder.call_c(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) + builder.add(Unreachable()) + + builder.nonlocal_control.pop() + builder.builder.pop_error_handler() + + # Cleanup for if we leave except through normal control flow: + # restore the saved exc_info information and continue propagating + # the exception if it exists. + builder.activate_block(cleanup_block) + builder.call_c(restore_exc_info_op, [builder.read(old_exc)], line) + builder.goto(exit_block) + + # Cleanup for if we leave except through a raised exception: + # restore the saved exc_info information and continue propagating + # the exception. + builder.activate_block(double_except_block) + builder.call_c(restore_exc_info_op, [builder.read(old_exc)], line) + builder.call_c(keep_propagating_op, [], NO_TRACEBACK_LINE_NO) + builder.add(Unreachable()) + + # If present, compile the else body in the obvious way + if else_body: + builder.activate_block(else_block) + else_body() + builder.goto(exit_block) + + builder.activate_block(exit_block) + + +def transform_try_except_stmt(builder: IRBuilder, t: TryStmt) -> None: + def body() -> None: + builder.accept(t.body) + + # Work around scoping woes + def make_handler(body: Block) -> GenFunc: + return lambda: builder.accept(body) + + def make_entry(type: Expression) -> tuple[ValueGenFunc, int]: + return (lambda: builder.accept(type), type.line) + + handlers = [ + (make_entry(type) if type else None, var, make_handler(body)) + for type, var, body in zip(t.types, t.vars, t.handlers) + ] + else_body = (lambda: builder.accept(t.else_body)) if t.else_body else None + transform_try_except(builder, body, handlers, else_body, t.line) + + +def try_finally_try( + builder: IRBuilder, + err_handler: BasicBlock, + return_entry: BasicBlock, + main_entry: BasicBlock, + try_body: GenFunc, +) -> Register | AssignmentTarget | None: + # Compile the try block with an error handler + control = TryFinallyNonlocalControl(return_entry) + builder.builder.push_error_handler(err_handler) + + builder.nonlocal_control.append(control) + builder.goto_and_activate(BasicBlock()) + try_body() + builder.goto(main_entry) + builder.nonlocal_control.pop() + builder.builder.pop_error_handler() + + return control.ret_reg + + +def try_finally_entry_blocks( + builder: IRBuilder, + err_handler: BasicBlock, + return_entry: BasicBlock, + main_entry: BasicBlock, + finally_block: BasicBlock, + ret_reg: Register | AssignmentTarget | None, +) -> Value: + old_exc = Register(exc_rtuple) + + # Entry block for non-exceptional flow + builder.activate_block(main_entry) + if ret_reg: + builder.assign(ret_reg, builder.add(LoadErrorValue(builder.ret_types[-1])), -1) + builder.goto(return_entry) + + builder.activate_block(return_entry) + builder.add(Assign(old_exc, builder.add(LoadErrorValue(exc_rtuple)))) + builder.goto(finally_block) + + # Entry block for errors + builder.activate_block(err_handler) + if ret_reg: + builder.assign(ret_reg, builder.add(LoadErrorValue(builder.ret_types[-1])), -1) + builder.add(Assign(old_exc, builder.call_c(error_catch_op, [], -1))) + builder.goto(finally_block) + + return old_exc + + +def try_finally_body( + builder: IRBuilder, finally_block: BasicBlock, finally_body: GenFunc, old_exc: Value +) -> tuple[BasicBlock, FinallyNonlocalControl]: + cleanup_block = BasicBlock() + # Compile the finally block with the nonlocal control flow overridden to restore exc_info + builder.builder.push_error_handler(cleanup_block) + finally_control = FinallyNonlocalControl(builder.nonlocal_control[-1], old_exc) + builder.nonlocal_control.append(finally_control) + builder.activate_block(finally_block) + finally_body() + builder.nonlocal_control.pop() + + return cleanup_block, finally_control + + +def try_finally_resolve_control( + builder: IRBuilder, + cleanup_block: BasicBlock, + finally_control: FinallyNonlocalControl, + old_exc: Value, + ret_reg: Register | AssignmentTarget | None, +) -> BasicBlock: + """Resolve the control flow out of a finally block. + + This means returning if there was a return, propagating + exceptions, break/continue (soon), or just continuing on. + """ + reraise, rest = BasicBlock(), BasicBlock() + builder.add(Branch(old_exc, rest, reraise, Branch.IS_ERROR)) + + # Reraise the exception if there was one + builder.activate_block(reraise) + builder.call_c(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) + builder.add(Unreachable()) + builder.builder.pop_error_handler() + + # If there was a return, keep returning + if ret_reg: + builder.activate_block(rest) + return_block, rest = BasicBlock(), BasicBlock() + # For spill targets in try/finally, use nullable read to avoid AttributeError + if isinstance(ret_reg, AssignmentTargetAttr) and ret_reg.attr.startswith(TEMP_ATTR_NAME): + ret_val = builder.read_nullable_attr(ret_reg.obj, ret_reg.attr, -1) + else: + ret_val = builder.read(ret_reg) + builder.add(Branch(ret_val, rest, return_block, Branch.IS_ERROR)) + + builder.activate_block(return_block) + builder.nonlocal_control[-1].gen_return(builder, ret_val, -1) + + # TODO: handle break/continue + builder.activate_block(rest) + out_block = BasicBlock() + builder.goto(out_block) + + # If there was an exception, restore again + builder.activate_block(cleanup_block) + finally_control.gen_cleanup(builder, -1) + builder.call_c(keep_propagating_op, [], NO_TRACEBACK_LINE_NO) + builder.add(Unreachable()) + + return out_block + + +def transform_try_finally_stmt( + builder: IRBuilder, try_body: GenFunc, finally_body: GenFunc, line: int = -1 +) -> None: + """Generalized try/finally handling that takes functions to gen the bodies. + + The point of this is to also be able to support with.""" + # Finally is a big pain, because there are so many ways that + # exits can occur. We emit 10+ basic blocks for every finally! + + err_handler, main_entry, return_entry, finally_block = ( + BasicBlock(), + BasicBlock(), + BasicBlock(), + BasicBlock(), + ) + + # Compile the body of the try + ret_reg = try_finally_try(builder, err_handler, return_entry, main_entry, try_body) + + # Set up the entry blocks for the finally statement + old_exc = try_finally_entry_blocks( + builder, err_handler, return_entry, main_entry, finally_block, ret_reg + ) + + # Compile the body of the finally + cleanup_block, finally_control = try_finally_body( + builder, finally_block, finally_body, old_exc + ) + + # Resolve the control flow out of the finally block + out_block = try_finally_resolve_control( + builder, cleanup_block, finally_control, old_exc, ret_reg + ) + + builder.activate_block(out_block) + + +def transform_try_finally_stmt_async( + builder: IRBuilder, try_body: GenFunc, finally_body: GenFunc, line: int = -1 +) -> None: + """Async-aware try/finally handling for when finally contains await. + + This version uses a modified approach that preserves exceptions across await.""" + + # We need to handle returns properly, so we'll use TryFinallyNonlocalControl + # to track return values, similar to the regular try/finally implementation + + err_handler, main_entry, return_entry, finally_entry = ( + BasicBlock(), + BasicBlock(), + BasicBlock(), + BasicBlock(), + ) + + # Track if we're returning from the try block + control = TryFinallyNonlocalControl(return_entry) + builder.builder.push_error_handler(err_handler) + builder.nonlocal_control.append(control) + builder.goto_and_activate(BasicBlock()) + try_body() + builder.goto(main_entry) + builder.nonlocal_control.pop() + builder.builder.pop_error_handler() + ret_reg = control.ret_reg + + # Normal case - no exception or return + builder.activate_block(main_entry) + builder.goto(finally_entry) + + # Return case + builder.activate_block(return_entry) + builder.goto(finally_entry) + + # Exception case - need to catch to clear the error indicator + builder.activate_block(err_handler) + # Catch the error to clear Python's error indicator + builder.call_c(error_catch_op, [], line) + # We're not going to use old_exc since it won't survive await + # The exception is now in sys.exc_info() + builder.goto(finally_entry) + + # Finally block + builder.activate_block(finally_entry) + + # Execute finally body + finally_body() + + # After finally, we need to handle exceptions carefully: + # 1. If finally raised a new exception, it's in the error indicator - let it propagate + # 2. If finally didn't raise, check if we need to reraise the original from sys.exc_info() + # 3. If there was a return, return that value + # 4. Otherwise, normal exit + + # First, check if there's a current exception in the error indicator + # (this would be from the finally block) + no_current_exc = builder.call_c(no_err_occurred_op, [], line) + finally_raised = BasicBlock() + check_original = BasicBlock() + builder.add(Branch(no_current_exc, check_original, finally_raised, Branch.BOOL)) + + # Finally raised an exception - let it propagate naturally + builder.activate_block(finally_raised) + builder.call_c(keep_propagating_op, [], NO_TRACEBACK_LINE_NO) + builder.add(Unreachable()) + + # No exception from finally, check if we need to handle return or original exception + builder.activate_block(check_original) + + # Check if we have a return value + if ret_reg: + return_block, check_old_exc = BasicBlock(), BasicBlock() + builder.add( + Branch( + builder.read(ret_reg, allow_error_value=True), + check_old_exc, + return_block, + Branch.IS_ERROR, + ) + ) + + builder.activate_block(return_block) + builder.nonlocal_control[-1].gen_return(builder, builder.read(ret_reg), -1) + + builder.activate_block(check_old_exc) + + # Check if we need to reraise the original exception from sys.exc_info + exc_info = builder.call_c(get_exc_info_op, [], line) + exc_type = builder.add(TupleGet(exc_info, 0, line)) + + # Check if exc_type is None + none_obj = builder.none_object() + has_exc = builder.binary_op(exc_type, none_obj, "is not", line) + + reraise_block, exit_block = BasicBlock(), BasicBlock() + builder.add(Branch(has_exc, reraise_block, exit_block, Branch.BOOL)) + + # Reraise the original exception + builder.activate_block(reraise_block) + builder.call_c(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) + builder.add(Unreachable()) + + # Normal exit + builder.activate_block(exit_block) + + +# A simple visitor to detect await expressions +class AwaitDetector(mypy.traverser.TraverserVisitor): + def __init__(self) -> None: + super().__init__() + self.has_await = False + + def visit_await_expr(self, o: mypy.nodes.AwaitExpr) -> None: + self.has_await = True + super().visit_await_expr(o) + + +def transform_try_stmt(builder: IRBuilder, t: TryStmt) -> None: + # Our compilation strategy for try/except/else/finally is to + # treat try/except/else and try/finally as separate language + # constructs that we compile separately. When we have a + # try/except/else/finally, we treat the try/except/else as the + # body of a try/finally block. + if t.is_star: + builder.error("Exception groups and except* cannot be compiled yet", t.line) + + # Check if we're in an async function with a finally block that contains await + use_async_version = False + if t.finally_body and builder.fn_info.is_coroutine: + detector = AwaitDetector() + t.finally_body.accept(detector) + + if detector.has_await: + # Use the async version that handles exceptions correctly + use_async_version = True + + if t.finally_body: + + def transform_try_body() -> None: + if t.handlers: + transform_try_except_stmt(builder, t) + else: + builder.accept(t.body) + + body = t.finally_body + + if use_async_version: + transform_try_finally_stmt_async( + builder, transform_try_body, lambda: builder.accept(body), t.line + ) + else: + transform_try_finally_stmt( + builder, transform_try_body, lambda: builder.accept(body), t.line + ) + else: + transform_try_except_stmt(builder, t) + + +def get_sys_exc_info(builder: IRBuilder) -> list[Value]: + exc_info = builder.call_c(get_exc_info_op, [], -1) + return [builder.add(TupleGet(exc_info, i, -1)) for i in range(3)] + + +def transform_with( + builder: IRBuilder, + expr: Expression, + target: Lvalue | None, + body: GenFunc, + is_async: bool, + line: int, +) -> None: + # This is basically a straight transcription of the Python code in PEP 343. + # I don't actually understand why a bunch of it is the way it is. + # We could probably optimize the case where the manager is compiled by us, + # but that is not our common case at all, so. + + al = "a" if is_async else "" + + mgr_v = builder.accept(expr) + is_native = isinstance(mgr_v.type, RInstance) + if is_native: + value = builder.add(MethodCall(mgr_v, f"__{al}enter__", args=[], line=line)) + exit_ = None + else: + typ = builder.primitive_op(type_op, [mgr_v], line) + exit_ = builder.maybe_spill(builder.py_get_attr(typ, f"__{al}exit__", line)) + value = builder.py_call(builder.py_get_attr(typ, f"__{al}enter__", line), [mgr_v], line) + + mgr = builder.maybe_spill(mgr_v) + exc = builder.maybe_spill_assignable(builder.true()) + if is_async: + value = emit_await(builder, value, line) + + def maybe_natively_call_exit(exc_info: bool) -> Value: + if exc_info: + args = get_sys_exc_info(builder) + else: + none = builder.none_object() + args = [none, none, none] + + if is_native: + assert isinstance(mgr_v.type, RInstance), mgr_v.type + exit_val = builder.gen_method_call( + builder.read(mgr), + f"__{al}exit__", + arg_values=args, + line=line, + result_type=none_rprimitive, + ) + else: + assert exit_ is not None + exit_val = builder.py_call(builder.read(exit_), [builder.read(mgr)] + args, line) + + if is_async: + return emit_await(builder, exit_val, line) + else: + return exit_val + + def try_body() -> None: + if target: + builder.assign(builder.get_assignment_target(target), value, line) + body() + + def except_body() -> None: + builder.assign(exc, builder.false(), line) + out_block, reraise_block = BasicBlock(), BasicBlock() + builder.add_bool_branch(maybe_natively_call_exit(exc_info=True), out_block, reraise_block) + builder.activate_block(reraise_block) + builder.call_c(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) + builder.add(Unreachable()) + builder.activate_block(out_block) + + def finally_body() -> None: + out_block, exit_block = BasicBlock(), BasicBlock() + builder.add(Branch(builder.read(exc), exit_block, out_block, Branch.BOOL)) + builder.activate_block(exit_block) + + maybe_natively_call_exit(exc_info=False) + builder.goto_and_activate(out_block) + + transform_try_finally_stmt( + builder, + lambda: transform_try_except(builder, try_body, [(None, None, except_body)], None, line), + finally_body, + line, + ) + + +def transform_with_stmt(builder: IRBuilder, o: WithStmt) -> None: + # Generate separate logic for each expr in it, left to right + def generate(i: int) -> None: + if i >= len(o.expr): + builder.accept(o.body) + else: + transform_with( + builder, o.expr[i], o.target[i], lambda: generate(i + 1), o.is_async, o.line + ) + + generate(0) + + +def transform_assert_stmt(builder: IRBuilder, a: AssertStmt) -> None: + if builder.options.strip_asserts: + return + cond = builder.accept(a.expr) + ok_block, error_block = BasicBlock(), BasicBlock() + builder.add_bool_branch(cond, ok_block, error_block) + builder.activate_block(error_block) + if a.msg is None: + # Special case (for simpler generated code) + builder.add(RaiseStandardError(RaiseStandardError.ASSERTION_ERROR, None, a.line)) + elif isinstance(a.msg, StrExpr): + # Another special case + builder.add(RaiseStandardError(RaiseStandardError.ASSERTION_ERROR, a.msg.value, a.line)) + else: + # The general case -- explicitly construct an exception instance + message = builder.accept(a.msg) + exc_type = builder.load_module_attr_by_fullname("builtins.AssertionError", a.line) + exc = builder.py_call(exc_type, [message], a.line) + builder.call_c(raise_exception_op, [exc], a.line) + builder.add(Unreachable()) + builder.activate_block(ok_block) + + +def transform_del_stmt(builder: IRBuilder, o: DelStmt) -> None: + transform_del_item(builder, builder.get_assignment_target(o.expr), o.line) + + +def transform_del_item(builder: IRBuilder, target: AssignmentTarget, line: int) -> None: + if isinstance(target, AssignmentTargetIndex): + builder.gen_method_call( + target.base, "__delitem__", [target.index], result_type=None, line=line + ) + elif isinstance(target, AssignmentTargetAttr): + if isinstance(target.obj_type, RInstance): + cl = target.obj_type.class_ir + if not cl.is_deletable(target.attr): + builder.error(f'"{target.attr}" cannot be deleted', line) + builder.note( + 'Using "__deletable__ = ' + + '[\'\']" in the class body enables "del obj."', + line, + ) + key = builder.load_str(target.attr) + builder.primitive_op(py_delattr_op, [target.obj, key], line) + elif isinstance(target, AssignmentTargetRegister): + # Delete a local by assigning an error value to it, which will + # prompt the insertion of uninit checks. + builder.add( + Assign(target.register, builder.add(LoadErrorValue(target.type, undefines=True))) + ) + elif isinstance(target, AssignmentTargetTuple): + for subtarget in target.items: + transform_del_item(builder, subtarget, line) + + +# yield/yield from/await + +# These are really expressions, not statements... but they depend on try/except/finally + + +def emit_yield(builder: IRBuilder, val: Value, line: int) -> Value: + retval = builder.coerce(val, builder.ret_types[-1], line) + + cls = builder.fn_info.generator_class + # Create a new block for the instructions immediately following the yield expression, and + # set the next label so that the next time '__next__' is called on the generator object, + # the function continues at the new block. + next_block = BasicBlock() + next_label = len(cls.continuation_blocks) + cls.continuation_blocks.append(next_block) + builder.assign(cls.next_label_target, Integer(next_label), line) + builder.add(Return(retval, yield_target=next_block)) + builder.activate_block(next_block) + + add_raise_exception_blocks_to_generator_class(builder, line) + + assert cls.send_arg_reg is not None + return cls.send_arg_reg + + +def emit_yield_from_or_await( + builder: IRBuilder, val: Value, line: int, *, is_await: bool +) -> Value: + # This is basically an implementation of the code in PEP 380. + + # TODO: do we want to use the right types here? + result = Register(object_rprimitive) + to_yield_reg = Register(object_rprimitive) + received_reg = Register(object_rprimitive) + + helper_method = GENERATOR_HELPER_NAME + if ( + isinstance(val, (Call, MethodCall)) + and isinstance(val.type, RInstance) + and val.type.class_ir.has_method(helper_method) + ): + # This is a generated native generator class, and we can use a fast path. + # This allows two optimizations: + # 1) No need to call CPy_GetCoro() or iter() since for native generators + # it just returns the generator object (implemented here). + # 2) Instead of calling next(), call generator helper method directly, + # since next() just calls __next__ which calls the helper method. + iter_val: Value = val + else: + get_op = coro_op if is_await else iter_op + if isinstance(get_op, PrimitiveDescription): + iter_val = builder.primitive_op(get_op, [val], line) + else: + iter_val = builder.call_c(get_op, [val], line) + + iter_reg = builder.maybe_spill_assignable(iter_val) + + stop_block, main_block, done_block = BasicBlock(), BasicBlock(), BasicBlock() + + if isinstance(iter_reg.type, RInstance) and iter_reg.type.class_ir.has_method(helper_method): + # Second fast path optimization: call helper directly (see also comment above). + # + # Calling a generated generator, so avoid raising StopIteration by passing + # an extra PyObject ** argument to helper where the stop iteration value is stored. + fast_path = True + obj = builder.read(iter_reg) + nn = builder.none_object() + stop_iter_val = Register(object_rprimitive) + err = builder.add(LoadErrorValue(object_rprimitive, undefines=True)) + builder.assign(stop_iter_val, err, line) + ptr = builder.add(LoadAddress(object_pointer_rprimitive, stop_iter_val)) + m = MethodCall(obj, helper_method, [nn, nn, nn, nn, ptr], line) + # Generators have custom error handling, so disable normal error handling. + m.error_kind = ERR_NEVER + _y_init = builder.add(m) + else: + fast_path = False + _y_init = builder.call_c(next_raw_op, [builder.read(iter_reg)], line) + + builder.add(Branch(_y_init, stop_block, main_block, Branch.IS_ERROR)) + + builder.activate_block(stop_block) + if fast_path: + builder.primitive_op(propagate_if_error_op, [stop_iter_val], line) + builder.assign(result, stop_iter_val, line) + else: + # Try extracting a return value from a StopIteration and return it. + # If it wasn't, this reraises the exception. + builder.assign(result, builder.call_c(check_stop_op, [], line), line) + # Clear the spilled iterator/coroutine so that it will be freed. + # Otherwise, the freeing of the spilled register would likely be delayed. + err = builder.add(LoadErrorValue(iter_reg.type)) + builder.assign(iter_reg, err, line) + builder.goto(done_block) + + builder.activate_block(main_block) + builder.assign(to_yield_reg, _y_init, line) + + # OK Now the main loop! + loop_block = BasicBlock() + builder.goto_and_activate(loop_block) + + def try_body() -> None: + builder.assign(received_reg, emit_yield(builder, builder.read(to_yield_reg), line), line) + + def except_body() -> None: + # The body of the except is all implemented in a C function to + # reduce how much code we need to generate. It returns a value + # indicating whether to break or yield (or raise an exception). + val = Register(object_rprimitive) + val_address = builder.add(LoadAddress(object_pointer_rprimitive, val)) + to_stop = builder.call_c(yield_from_except_op, [builder.read(iter_reg), val_address], line) + + ok, stop = BasicBlock(), BasicBlock() + builder.add(Branch(to_stop, stop, ok, Branch.BOOL)) + + # The exception got swallowed. Continue, yielding the returned value + builder.activate_block(ok) + builder.assign(to_yield_reg, val, line) + builder.nonlocal_control[-1].gen_continue(builder, line) + + # The exception was a StopIteration. Stop iterating. + builder.activate_block(stop) + builder.assign(result, val, line) + builder.nonlocal_control[-1].gen_break(builder, line) + + def else_body() -> None: + # Do a next() or a .send(). It will return NULL on exception + # but it won't automatically propagate. + _y = builder.call_c(send_op, [builder.read(iter_reg), builder.read(received_reg)], line) + ok, stop = BasicBlock(), BasicBlock() + builder.add(Branch(_y, stop, ok, Branch.IS_ERROR)) + + # Everything's fine. Yield it. + builder.activate_block(ok) + builder.assign(to_yield_reg, _y, line) + builder.nonlocal_control[-1].gen_continue(builder, line) + + # Try extracting a return value from a StopIteration and return it. + # If it wasn't, this rereaises the exception. + builder.activate_block(stop) + builder.assign(result, builder.call_c(check_stop_op, [], line), line) + builder.nonlocal_control[-1].gen_break(builder, line) + + builder.push_loop_stack(loop_block, done_block) + transform_try_except(builder, try_body, [(None, None, except_body)], else_body, line) + builder.pop_loop_stack() + + builder.goto_and_activate(done_block) + return builder.read(result) + + +def emit_await(builder: IRBuilder, val: Value, line: int) -> Value: + return emit_yield_from_or_await(builder, val, line, is_await=True) + + +def transform_yield_expr(builder: IRBuilder, expr: YieldExpr) -> Value: + if builder.fn_info.is_coroutine: + builder.error("async generators are unimplemented", expr.line) + + if expr.expr: + retval = builder.accept(expr.expr) + else: + retval = builder.builder.none() + return emit_yield(builder, retval, expr.line) + + +def transform_yield_from_expr(builder: IRBuilder, o: YieldFromExpr) -> Value: + return emit_yield_from_or_await(builder, builder.accept(o.expr), o.line, is_await=False) + + +def transform_await_expr(builder: IRBuilder, o: AwaitExpr) -> Value: + return emit_yield_from_or_await(builder, builder.accept(o.expr), o.line, is_await=True) + + +def transform_match_stmt(builder: IRBuilder, m: MatchStmt) -> None: + m.accept(MatchVisitor(builder, m)) + + +def transform_type_alias_stmt(builder: IRBuilder, s: TypeAliasStmt) -> None: + line = s.line + # Use "_typing" to avoid importing "typing", as the latter can be expensive. + # "_typing" includes everything we need here. + mod = builder.call_c(import_op, [builder.load_str("_typing")], line) + type_params = create_type_params(builder, mod, s.type_args, s.line) + + type_alias_type = builder.py_get_attr(mod, "TypeAliasType", line) + args = [builder.load_str(s.name.name), builder.none()] + arg_names: list[str | None] = [None, None] + arg_kinds = [ARG_POS, ARG_POS] + if s.type_args: + args.append(builder.new_tuple(type_params, line)) + arg_names.append("type_params") + arg_kinds.append(ARG_NAMED) + alias = builder.py_call(type_alias_type, args, line, arg_names=arg_names, arg_kinds=arg_kinds) + + # Use primitive to set function used to lazily compute type alias type value. + # The value needs to be lazily computed to match Python runtime behavior, but + # Python public APIs don't support this, so we use a C primitive. + compute_fn = s.value.accept(builder.visitor) + builder.builder.primitive_op(set_type_alias_compute_function_op, [alias, compute_fn], line) + + target = builder.get_assignment_target(s.name) + builder.assign(target, alias, line) diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/targets.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/targets.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..ee8929c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/targets.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/targets.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/targets.py new file mode 100644 index 0000000..8bc9da0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/targets.py @@ -0,0 +1,70 @@ +from __future__ import annotations + +from mypyc.ir.ops import Register, Value +from mypyc.ir.rtypes import RInstance, RType, object_rprimitive + + +class AssignmentTarget: + """Abstract base class for assignment targets during IR building.""" + + type: RType = object_rprimitive + + +class AssignmentTargetRegister(AssignmentTarget): + """Register as an assignment target. + + This is used for local variables and some temporaries. + """ + + def __init__(self, register: Register) -> None: + self.register = register + self.type = register.type + + def __repr__(self) -> str: + return f"AssignmentTargetRegister({self.register.name})" + + +class AssignmentTargetIndex(AssignmentTarget): + """base[index] as assignment target""" + + def __init__(self, base: Value, index: Value) -> None: + self.base = base + self.index = index + # TODO: object_rprimitive won't be right for user-defined classes. Store the + # lvalue type in mypy and use a better type to avoid unneeded boxing. + self.type = object_rprimitive + + def __repr__(self) -> str: + return f"AssignmentTargetIndex({self.base!r}, {self.index!r})" + + +class AssignmentTargetAttr(AssignmentTarget): + """obj.attr as assignment target""" + + def __init__(self, obj: Value, attr: str, can_borrow: bool = False) -> None: + self.obj = obj + self.attr = attr + self.can_borrow = can_borrow + if isinstance(obj.type, RInstance) and obj.type.class_ir.has_attr(attr): + # Native attribute reference + self.obj_type: RType = obj.type + self.type = obj.type.attr_type(attr) + else: + # Python attribute reference + self.obj_type = object_rprimitive + self.type = object_rprimitive + + def __repr__(self) -> str: + can_borrow_str = ", can_borrow=True" if self.can_borrow else "" + return f"AssignmentTargetAttr({self.obj!r}.{self.attr}{can_borrow_str})" + + +class AssignmentTargetTuple(AssignmentTarget): + """x, ..., y as assignment target""" + + def __init__(self, items: list[AssignmentTarget], star_idx: int | None = None) -> None: + self.items = items + self.star_idx = star_idx + + def __repr__(self) -> str: + return f"AssignmentTargetTuple({self.items}, {self.star_idx})" diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/util.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/util.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..271fe73 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/util.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/util.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/util.py new file mode 100644 index 0000000..3028e94 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/util.py @@ -0,0 +1,305 @@ +"""Various utilities that don't depend on other modules in mypyc.irbuild.""" + +from __future__ import annotations + +from typing import Any, Final, Literal, TypedDict, cast +from typing_extensions import NotRequired + +from mypy.nodes import ( + ARG_NAMED, + ARG_NAMED_OPT, + ARG_OPT, + ARG_POS, + GDEF, + ArgKind, + BytesExpr, + CallExpr, + ClassDef, + Decorator, + Expression, + FloatExpr, + FuncDef, + IntExpr, + NameExpr, + OverloadedFuncDef, + RefExpr, + StrExpr, + TupleExpr, + UnaryExpr, + Var, +) +from mypy.semanal import refers_to_fullname +from mypy.types import FINAL_DECORATOR_NAMES +from mypyc.errors import Errors + +MYPYC_ATTRS: Final[frozenset[MypycAttr]] = frozenset( + ["native_class", "allow_interpreted_subclasses", "serializable", "free_list_len"] +) + +DATACLASS_DECORATORS: Final = frozenset(["dataclasses.dataclass", "attr.s", "attr.attrs"]) + + +MypycAttr = Literal[ + "native_class", "allow_interpreted_subclasses", "serializable", "free_list_len" +] + + +class MypycAttrs(TypedDict): + native_class: NotRequired[bool] + allow_interpreted_subclasses: NotRequired[bool] + serializable: NotRequired[bool] + free_list_len: NotRequired[int] + + +def is_final_decorator(d: Expression) -> bool: + return refers_to_fullname(d, FINAL_DECORATOR_NAMES) + + +def is_trait_decorator(d: Expression) -> bool: + return isinstance(d, RefExpr) and d.fullname == "mypy_extensions.trait" + + +def is_trait(cdef: ClassDef) -> bool: + return any(is_trait_decorator(d) for d in cdef.decorators) or cdef.info.is_protocol + + +def dataclass_decorator_type(d: Expression) -> str | None: + if isinstance(d, RefExpr) and d.fullname in DATACLASS_DECORATORS: + return d.fullname.split(".")[0] + elif ( + isinstance(d, CallExpr) + and isinstance(d.callee, RefExpr) + and d.callee.fullname in DATACLASS_DECORATORS + ): + name = d.callee.fullname.split(".")[0] + if name == "attr" and "auto_attribs" in d.arg_names: + # Note: the mypy attrs plugin checks that the value of auto_attribs is + # not computed at runtime, so we don't need to perform that check here + auto = d.args[d.arg_names.index("auto_attribs")] + if isinstance(auto, NameExpr) and auto.name == "True": + return "attr-auto" + return name + else: + return None + + +def is_dataclass_decorator(d: Expression) -> bool: + return dataclass_decorator_type(d) is not None + + +def is_dataclass(cdef: ClassDef) -> bool: + return any(is_dataclass_decorator(d) for d in cdef.decorators) + + +# The string values returned by this function are inspected in +# mypyc/lib-rt/misc_ops.c:CPyDataclass_SleightOfHand(...). +def dataclass_type(cdef: ClassDef) -> str | None: + for d in cdef.decorators: + typ = dataclass_decorator_type(d) + if typ is not None: + return typ + return None + + +def get_mypyc_attr_literal(e: Expression) -> Any: + """Convert an expression from a mypyc_attr decorator to a value. + + Supports a pretty limited range.""" + if isinstance(e, (StrExpr, IntExpr, FloatExpr)): + return e.value + elif isinstance(e, RefExpr) and e.fullname == "builtins.True": + return True + elif isinstance(e, RefExpr) and e.fullname == "builtins.False": + return False + elif isinstance(e, RefExpr) and e.fullname == "builtins.None": + return None + elif isinstance(e, IntExpr): + return e.value + return NotImplemented + + +def get_mypyc_attr_call(d: Expression) -> CallExpr | None: + """Check if an expression is a call to mypyc_attr and return it if so.""" + if ( + isinstance(d, CallExpr) + and isinstance(d.callee, RefExpr) + and d.callee.fullname == "mypy_extensions.mypyc_attr" + ): + return d + return None + + +def get_mypyc_attrs( + stmt: ClassDef | Decorator, path: str, errors: Errors +) -> tuple[MypycAttrs, dict[MypycAttr, int]]: + """Collect all the mypyc_attr attributes on a class definition or a function.""" + attrs: MypycAttrs = {} + lines: dict[MypycAttr, int] = {} + + def set_mypyc_attr(key: str, value: Any, line: int) -> None: + if key in MYPYC_ATTRS: + key = cast(MypycAttr, key) + attrs[key] = value + lines[key] = line + else: + errors.error(f'"{key}" is not a supported "mypyc_attr"', path, line) + supported_keys = '", "'.join(sorted(MYPYC_ATTRS)) + errors.note(f'supported keys: "{supported_keys}"', path, line) + + for dec in stmt.decorators: + if d := get_mypyc_attr_call(dec): + line = d.line + for name, arg in zip(d.arg_names, d.args): + if name is None: + if isinstance(arg, StrExpr): + set_mypyc_attr(arg.value, True, line) + else: + errors.error( + 'All "mypyc_attr" positional arguments must be string literals.', + path, + line, + ) + else: + arg_value = get_mypyc_attr_literal(arg) + set_mypyc_attr(name, arg_value, line) + + return attrs, lines + + +def is_extension_class(path: str, cdef: ClassDef, errors: Errors) -> bool: + # Check for @mypyc_attr(native_class=True/False) decorator. + explicit_native_class = get_explicit_native_class(path, cdef, errors) + + # Classes with native_class=False are explicitly marked as non extension. + if explicit_native_class is False: + return False + + implicit_extension_class, reason = is_implicit_extension_class(cdef) + + # Classes with native_class=True should be extension classes, but they might + # not be able to be due to other reasons. Print an error in that case. + if explicit_native_class is True and not implicit_extension_class: + errors.error( + f"Class is marked as native_class=True but it can't be a native class. {reason}", + path, + cdef.line, + ) + + return implicit_extension_class + + +def get_explicit_native_class(path: str, cdef: ClassDef, errors: Errors) -> bool | None: + """Return value of @mypyc_attr(native_class=True/False) decorator. + + Look for a @mypyc_attr decorator with native_class=True/False and return + the value assigned or None if it doesn't exist. Other values are an error. + """ + + for d in cdef.decorators: + mypyc_attr_call = get_mypyc_attr_call(d) + if not mypyc_attr_call: + continue + + for i, name in enumerate(mypyc_attr_call.arg_names): + if name != "native_class": + continue + + arg = mypyc_attr_call.args[i] + if not isinstance(arg, NameExpr): + errors.error("native_class must be used with True or False only", path, cdef.line) + return None + + if arg.name == "False": + return False + elif arg.name == "True": + return True + else: + errors.error("native_class must be used with True or False only", path, cdef.line) + return None + return None + + +def is_implicit_extension_class(cdef: ClassDef) -> tuple[bool, str]: + """Check if class can be extension class and return a user-friendly reason it can't be one.""" + + for d in cdef.decorators: + if ( + not is_trait_decorator(d) + and not is_dataclass_decorator(d) + and not get_mypyc_attr_call(d) + and not is_final_decorator(d) + ): + return ( + False, + "Classes that have decorators other than supported decorators" + " can't be native classes.", + ) + + if cdef.info.typeddict_type: + return False, "TypedDict classes can't be native classes." + if cdef.info.is_named_tuple: + return False, "NamedTuple classes can't be native classes." + if cdef.info.metaclass_type and cdef.info.metaclass_type.type.fullname not in ( + "abc.ABCMeta", + "typing.TypingMeta", + "typing.GenericMeta", + ): + return ( + False, + "Classes with a metaclass other than ABCMeta, TypingMeta or" + " GenericMeta can't be native classes.", + ) + return True, "" + + +def get_func_def(op: FuncDef | Decorator | OverloadedFuncDef) -> FuncDef: + if isinstance(op, OverloadedFuncDef): + assert op.impl + op = op.impl + if isinstance(op, Decorator): + op = op.func + return op + + +def concrete_arg_kind(kind: ArgKind) -> ArgKind: + """Find the concrete version of an arg kind that is being passed.""" + if kind == ARG_OPT: + return ARG_POS + elif kind == ARG_NAMED_OPT: + return ARG_NAMED + else: + return kind + + +def is_constant(e: Expression) -> bool: + """Check whether we allow an expression to appear as a default value. + + We don't currently properly support storing the evaluated + values for default arguments and default attribute values, so + we restrict what expressions we allow. We allow literals of + primitives types, None, and references to Final global + variables. + """ + return ( + isinstance(e, (StrExpr, BytesExpr, IntExpr, FloatExpr)) + or (isinstance(e, UnaryExpr) and e.op == "-" and isinstance(e.expr, (IntExpr, FloatExpr))) + or (isinstance(e, TupleExpr) and all(is_constant(e) for e in e.items)) + or ( + isinstance(e, RefExpr) + and e.kind == GDEF + and ( + e.fullname in ("builtins.True", "builtins.False", "builtins.None") + or (isinstance(e.node, Var) and e.node.is_final) + ) + ) + ) + + +def bytes_from_str(value: str) -> bytes: + """Convert a string representing bytes into actual bytes. + + This is needed because the literal characters of BytesExpr (the + characters inside b'') are stored in BytesExpr.value, whose type is + 'str' not 'bytes'. + """ + return bytes(value, "utf8").decode("unicode-escape").encode("raw-unicode-escape") diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/visitor.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/visitor.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..5e8913a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/visitor.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/visitor.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/visitor.py new file mode 100644 index 0000000..dc81e95 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/visitor.py @@ -0,0 +1,410 @@ +"""Dispatcher used when transforming a mypy AST to the IR form. + +mypyc.irbuild.builder and mypyc.irbuild.main are closely related. +""" + +from __future__ import annotations + +from typing import NoReturn + +from mypy.nodes import ( + AssertStmt, + AssertTypeExpr, + AssignmentExpr, + AssignmentStmt, + AwaitExpr, + Block, + BreakStmt, + BytesExpr, + CallExpr, + CastExpr, + ClassDef, + ComparisonExpr, + ComplexExpr, + ConditionalExpr, + ContinueStmt, + Decorator, + DelStmt, + DictExpr, + DictionaryComprehension, + EllipsisExpr, + EnumCallExpr, + ExpressionStmt, + FloatExpr, + ForStmt, + FuncDef, + GeneratorExpr, + GlobalDecl, + IfStmt, + Import, + ImportAll, + ImportFrom, + IndexExpr, + IntExpr, + LambdaExpr, + ListComprehension, + ListExpr, + MatchStmt, + MemberExpr, + MypyFile, + NamedTupleExpr, + NameExpr, + NewTypeExpr, + NonlocalDecl, + OperatorAssignmentStmt, + OpExpr, + OverloadedFuncDef, + ParamSpecExpr, + PassStmt, + PromoteExpr, + RaiseStmt, + ReturnStmt, + RevealExpr, + SetComprehension, + SetExpr, + SliceExpr, + StarExpr, + StrExpr, + SuperExpr, + TempNode, + TryStmt, + TupleExpr, + TypeAliasExpr, + TypeAliasStmt, + TypeApplication, + TypedDictExpr, + TypeFormExpr, + TypeVarExpr, + TypeVarTupleExpr, + UnaryExpr, + Var, + WhileStmt, + WithStmt, + YieldExpr, + YieldFromExpr, +) +from mypyc.ir.ops import Value +from mypyc.irbuild.builder import IRBuilder, IRVisitor, UnsupportedException +from mypyc.irbuild.classdef import transform_class_def +from mypyc.irbuild.expression import ( + transform_assignment_expr, + transform_bytes_expr, + transform_call_expr, + transform_comparison_expr, + transform_complex_expr, + transform_conditional_expr, + transform_dict_expr, + transform_dictionary_comprehension, + transform_ellipsis, + transform_float_expr, + transform_generator_expr, + transform_index_expr, + transform_int_expr, + transform_list_comprehension, + transform_list_expr, + transform_member_expr, + transform_name_expr, + transform_op_expr, + transform_set_comprehension, + transform_set_expr, + transform_slice_expr, + transform_str_expr, + transform_super_expr, + transform_tuple_expr, + transform_unary_expr, +) +from mypyc.irbuild.function import ( + transform_decorator, + transform_func_def, + transform_lambda_expr, + transform_overloaded_func_def, +) +from mypyc.irbuild.statement import ( + transform_assert_stmt, + transform_assignment_stmt, + transform_await_expr, + transform_block, + transform_break_stmt, + transform_continue_stmt, + transform_del_stmt, + transform_expression_stmt, + transform_for_stmt, + transform_if_stmt, + transform_import, + transform_import_all, + transform_import_from, + transform_match_stmt, + transform_operator_assignment_stmt, + transform_raise_stmt, + transform_return_stmt, + transform_try_stmt, + transform_type_alias_stmt, + transform_while_stmt, + transform_with_stmt, + transform_yield_expr, + transform_yield_from_expr, +) + + +class IRBuilderVisitor(IRVisitor): + """Mypy node visitor that dispatches to node transform implementations. + + This class should have no non-trivial logic. + + This visitor is separated from the rest of code to improve modularity and + to avoid import cycles. + + This is based on the visitor pattern + (https://en.wikipedia.org/wiki/Visitor_pattern). + """ + + # This gets passed to all the implementations and contains all the + # state and many helpers. The attribute is initialized outside + # this class since this class and IRBuilder form a reference loop. + builder: IRBuilder + + def visit_mypy_file(self, mypyfile: MypyFile) -> None: + assert False, "use transform_mypy_file instead" + + def visit_class_def(self, cdef: ClassDef) -> None: + transform_class_def(self.builder, cdef) + + def visit_import(self, node: Import) -> None: + transform_import(self.builder, node) + + def visit_import_from(self, node: ImportFrom) -> None: + transform_import_from(self.builder, node) + + def visit_import_all(self, node: ImportAll) -> None: + transform_import_all(self.builder, node) + + def visit_func_def(self, fdef: FuncDef) -> None: + transform_func_def(self.builder, fdef) + + def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None: + transform_overloaded_func_def(self.builder, o) + + def visit_decorator(self, dec: Decorator) -> None: + transform_decorator(self.builder, dec) + + def visit_block(self, block: Block) -> None: + transform_block(self.builder, block) + + # Statements + + def visit_expression_stmt(self, stmt: ExpressionStmt) -> None: + transform_expression_stmt(self.builder, stmt) + + def visit_return_stmt(self, stmt: ReturnStmt) -> None: + transform_return_stmt(self.builder, stmt) + self.builder.mark_block_unreachable() + + def visit_assignment_stmt(self, stmt: AssignmentStmt) -> None: + transform_assignment_stmt(self.builder, stmt) + + def visit_operator_assignment_stmt(self, stmt: OperatorAssignmentStmt) -> None: + transform_operator_assignment_stmt(self.builder, stmt) + + def visit_if_stmt(self, stmt: IfStmt) -> None: + transform_if_stmt(self.builder, stmt) + + def visit_while_stmt(self, stmt: WhileStmt) -> None: + transform_while_stmt(self.builder, stmt) + + def visit_for_stmt(self, stmt: ForStmt) -> None: + transform_for_stmt(self.builder, stmt) + + def visit_break_stmt(self, stmt: BreakStmt) -> None: + transform_break_stmt(self.builder, stmt) + self.builder.mark_block_unreachable() + + def visit_continue_stmt(self, stmt: ContinueStmt) -> None: + transform_continue_stmt(self.builder, stmt) + self.builder.mark_block_unreachable() + + def visit_raise_stmt(self, stmt: RaiseStmt) -> None: + transform_raise_stmt(self.builder, stmt) + self.builder.mark_block_unreachable() + + def visit_try_stmt(self, stmt: TryStmt) -> None: + transform_try_stmt(self.builder, stmt) + + def visit_with_stmt(self, stmt: WithStmt) -> None: + transform_with_stmt(self.builder, stmt) + + def visit_pass_stmt(self, stmt: PassStmt) -> None: + pass + + def visit_assert_stmt(self, stmt: AssertStmt) -> None: + transform_assert_stmt(self.builder, stmt) + + def visit_del_stmt(self, stmt: DelStmt) -> None: + transform_del_stmt(self.builder, stmt) + + def visit_global_decl(self, stmt: GlobalDecl) -> None: + # Pure declaration -- no runtime effect + pass + + def visit_nonlocal_decl(self, stmt: NonlocalDecl) -> None: + # Pure declaration -- no runtime effect + pass + + def visit_match_stmt(self, stmt: MatchStmt) -> None: + transform_match_stmt(self.builder, stmt) + + def visit_type_alias_stmt(self, stmt: TypeAliasStmt) -> None: + transform_type_alias_stmt(self.builder, stmt) + + # Expressions + + def visit_name_expr(self, expr: NameExpr) -> Value: + return transform_name_expr(self.builder, expr) + + def visit_member_expr(self, expr: MemberExpr) -> Value: + return transform_member_expr(self.builder, expr) + + def visit_super_expr(self, expr: SuperExpr) -> Value: + return transform_super_expr(self.builder, expr) + + def visit_call_expr(self, expr: CallExpr) -> Value: + return transform_call_expr(self.builder, expr) + + def visit_unary_expr(self, expr: UnaryExpr) -> Value: + return transform_unary_expr(self.builder, expr) + + def visit_op_expr(self, expr: OpExpr) -> Value: + return transform_op_expr(self.builder, expr) + + def visit_index_expr(self, expr: IndexExpr) -> Value: + return transform_index_expr(self.builder, expr) + + def visit_conditional_expr(self, expr: ConditionalExpr) -> Value: + return transform_conditional_expr(self.builder, expr) + + def visit_comparison_expr(self, expr: ComparisonExpr) -> Value: + return transform_comparison_expr(self.builder, expr) + + def visit_int_expr(self, expr: IntExpr) -> Value: + return transform_int_expr(self.builder, expr) + + def visit_float_expr(self, expr: FloatExpr) -> Value: + return transform_float_expr(self.builder, expr) + + def visit_complex_expr(self, expr: ComplexExpr) -> Value: + return transform_complex_expr(self.builder, expr) + + def visit_str_expr(self, expr: StrExpr) -> Value: + return transform_str_expr(self.builder, expr) + + def visit_bytes_expr(self, expr: BytesExpr) -> Value: + return transform_bytes_expr(self.builder, expr) + + def visit_ellipsis(self, expr: EllipsisExpr) -> Value: + return transform_ellipsis(self.builder, expr) + + def visit_list_expr(self, expr: ListExpr) -> Value: + return transform_list_expr(self.builder, expr) + + def visit_tuple_expr(self, expr: TupleExpr) -> Value: + return transform_tuple_expr(self.builder, expr) + + def visit_dict_expr(self, expr: DictExpr) -> Value: + return transform_dict_expr(self.builder, expr) + + def visit_set_expr(self, expr: SetExpr) -> Value: + return transform_set_expr(self.builder, expr) + + def visit_list_comprehension(self, expr: ListComprehension) -> Value: + return transform_list_comprehension(self.builder, expr) + + def visit_set_comprehension(self, expr: SetComprehension) -> Value: + return transform_set_comprehension(self.builder, expr) + + def visit_dictionary_comprehension(self, expr: DictionaryComprehension) -> Value: + return transform_dictionary_comprehension(self.builder, expr) + + def visit_slice_expr(self, expr: SliceExpr) -> Value: + return transform_slice_expr(self.builder, expr) + + def visit_generator_expr(self, expr: GeneratorExpr) -> Value: + return transform_generator_expr(self.builder, expr) + + def visit_lambda_expr(self, expr: LambdaExpr) -> Value: + return transform_lambda_expr(self.builder, expr) + + def visit_yield_expr(self, expr: YieldExpr) -> Value: + return transform_yield_expr(self.builder, expr) + + def visit_yield_from_expr(self, o: YieldFromExpr) -> Value: + return transform_yield_from_expr(self.builder, o) + + def visit_await_expr(self, o: AwaitExpr) -> Value: + return transform_await_expr(self.builder, o) + + def visit_assignment_expr(self, o: AssignmentExpr) -> Value: + return transform_assignment_expr(self.builder, o) + + # Constructs that shouldn't ever show up + + def visit_enum_call_expr(self, o: EnumCallExpr) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit__promote_expr(self, o: PromoteExpr) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit_namedtuple_expr(self, o: NamedTupleExpr) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit_newtype_expr(self, o: NewTypeExpr) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit_temp_node(self, o: TempNode) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit_type_alias_expr(self, o: TypeAliasExpr) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit_type_application(self, o: TypeApplication) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit_type_var_expr(self, o: TypeVarExpr) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit_paramspec_expr(self, o: ParamSpecExpr) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit_type_var_tuple_expr(self, o: TypeVarTupleExpr) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit_typeddict_expr(self, o: TypedDictExpr) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit_reveal_expr(self, o: RevealExpr) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit_var(self, o: Var) -> None: + assert False, "can't compile Var; should have been handled already?" + + def visit_cast_expr(self, o: CastExpr) -> Value: + assert False, "CastExpr should have been handled in CallExpr" + + def visit_type_form_expr(self, o: TypeFormExpr) -> Value: + assert False, "TypeFormExpr should have been handled in CallExpr" + + def visit_assert_type_expr(self, o: AssertTypeExpr) -> Value: + assert False, "AssertTypeExpr should have been handled in CallExpr" + + def visit_star_expr(self, o: StarExpr) -> Value: + assert False, "should have been handled in Tuple/List/Set/DictExpr or CallExpr" + + # Helpers + + def bail(self, msg: str, line: int) -> NoReturn: + """Reports an error and aborts compilation up until the last accept() call + + (accept() catches the UnsupportedException and keeps on + processing. This allows errors to be non-blocking without always + needing to write handling for them. + """ + self.builder.error(msg, line) + raise UnsupportedException() diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/vtable.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/irbuild/vtable.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..c6ef901 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/irbuild/vtable.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/irbuild/vtable.py b/.venv/lib/python3.12/site-packages/mypyc/irbuild/vtable.py new file mode 100644 index 0000000..2d4f726 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/irbuild/vtable.py @@ -0,0 +1,82 @@ +"""Compute vtables of native (extension) classes.""" + +from __future__ import annotations + +import itertools + +from mypyc.ir.class_ir import ClassIR, VTableEntries, VTableMethod +from mypyc.sametype import is_same_method_signature + + +def compute_vtable(cls: ClassIR) -> None: + """Compute the vtable structure for a class.""" + if cls.vtable is not None: + return + + if not cls.is_generated: + cls.has_dict = any(x.inherits_python for x in cls.mro) + + for t in cls.mro[1:]: + # Make sure all ancestors are processed first + compute_vtable(t) + # Merge attributes from traits into the class + if not t.is_trait: + continue + for name, typ in t.attributes.items(): + if not cls.is_trait and not any(name in b.attributes for b in cls.base_mro): + cls.attributes[name] = typ + + cls.vtable = {} + if cls.base: + assert cls.base.vtable is not None + cls.vtable.update(cls.base.vtable) + cls.vtable_entries = specialize_parent_vtable(cls, cls.base) + + # Include the vtable from the parent classes, but handle method overrides. + entries = cls.vtable_entries + + all_traits = [t for t in cls.mro if t.is_trait] + + for t in [cls] + cls.traits: + for fn in itertools.chain(t.methods.values()): + # TODO: don't generate a new entry when we overload without changing the type + if fn == cls.get_method(fn.name, prefer_method=True): + cls.vtable[fn.name] = len(entries) + # If the class contains a glue method referring to itself, that is a + # shadow glue method to support interpreted subclasses. + shadow = cls.glue_methods.get((cls, fn.name)) + entries.append(VTableMethod(t, fn.name, fn, shadow)) + + # Compute vtables for all of the traits that the class implements + if not cls.is_trait: + for trait in all_traits: + compute_vtable(trait) + cls.trait_vtables[trait] = specialize_parent_vtable(cls, trait) + + +def specialize_parent_vtable(cls: ClassIR, parent: ClassIR) -> VTableEntries: + """Generate the part of a vtable corresponding to a parent class or trait""" + updated = [] + for entry in parent.vtable_entries: + # Find the original method corresponding to this vtable entry. + # (This may not be the method in the entry, if it was overridden.) + orig_parent_method = entry.cls.get_method(entry.name, prefer_method=True) + assert orig_parent_method + method_cls = cls.get_method_and_class(entry.name, prefer_method=True) + if method_cls: + child_method, defining_cls = method_cls + # TODO: emit a wrapper for __init__ that raises or something + if ( + is_same_method_signature(orig_parent_method.sig, child_method.sig) + or orig_parent_method.name == "__init__" + ): + entry = VTableMethod(entry.cls, entry.name, child_method, entry.shadow_method) + else: + entry = VTableMethod( + entry.cls, + entry.name, + defining_cls.glue_methods[(entry.cls, entry.name)], + entry.shadow_method, + ) + updated.append(entry) + return updated diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/CPy.h b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/CPy.h new file mode 100644 index 0000000..6d1e750 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/CPy.h @@ -0,0 +1,975 @@ +// Mypyc C API + +#ifndef CPY_CPY_H +#define CPY_CPY_H + +#include +#include +#include +#include +#include +#include +#include "pythonsupport.h" +#include "mypyc_util.h" + +#ifdef __cplusplus +extern "C" { +#endif +#if 0 +} // why isn't emacs smart enough to not indent this +#endif + +#define CPYTHON_LARGE_INT_ERRMSG "Python int too large to convert to C ssize_t" + + +// Naming conventions: +// +// Tagged: tagged int +// Long: tagged long int (pointer) +// Short: tagged short int (unboxed) +// Ssize_t: A Py_ssize_t, which ought to be the same width as pointers +// Object: CPython object (PyObject *) + + +// Tuple type definitions needed for API functions + + +#ifndef MYPYC_DECLARED_tuple_T3OOO +#define MYPYC_DECLARED_tuple_T3OOO +typedef struct tuple_T3OOO { + PyObject *f0; + PyObject *f1; + PyObject *f2; +} tuple_T3OOO; +#endif + +// Our return tuple wrapper for dictionary iteration helper. +#ifndef MYPYC_DECLARED_tuple_T3CIO +#define MYPYC_DECLARED_tuple_T3CIO +typedef struct tuple_T3CIO { + char f0; // Should continue? + CPyTagged f1; // Last dict offset + PyObject *f2; // Next dictionary key or value +} tuple_T3CIO; +#endif + +// Same as above but for both key and value. +#ifndef MYPYC_DECLARED_tuple_T4CIOO +#define MYPYC_DECLARED_tuple_T4CIOO +typedef struct tuple_T4CIOO { + char f0; // Should continue? + CPyTagged f1; // Last dict offset + PyObject *f2; // Next dictionary key + PyObject *f3; // Next dictionary value +} tuple_T4CIOO; +#endif + +// System-wide empty tuple constant +extern PyObject * __mypyc_empty_tuple__; + +static inline PyObject *CPyTuple_LoadEmptyTupleConstant(void) { +#if !CPY_3_12_FEATURES + Py_INCREF(__mypyc_empty_tuple__); +#endif + return __mypyc_empty_tuple__; +} + +// Native object operations + + +// Search backwards through the trait part of a vtable (which sits *before* +// the start of the vtable proper) looking for the subvtable describing a trait +// implementation. We don't do any bounds checking so we'd better be pretty sure +// we know that it is there. +static inline CPyVTableItem *CPy_FindTraitVtable(PyTypeObject *trait, CPyVTableItem *vtable) { + int i; + for (i = -3; ; i -= 3) { + if ((PyTypeObject *)vtable[i] == trait) { + return (CPyVTableItem *)vtable[i + 1]; + } + } +} + +// Use the same logic for offset table. +static inline size_t CPy_FindAttrOffset(PyTypeObject *trait, CPyVTableItem *vtable, size_t index) { + int i; + for (i = -3; ; i -= 3) { + if ((PyTypeObject *)vtable[i] == trait) { + return ((size_t *)vtable[i + 2])[index]; + } + } +} + +// Get attribute value using vtable (may return an undefined value) +#define CPY_GET_ATTR(obj, type, vtable_index, object_type, attr_type) \ + ((attr_type (*)(object_type *))((object_type *)obj)->vtable[vtable_index])((object_type *)obj) + +#define CPY_GET_ATTR_TRAIT(obj, trait, vtable_index, object_type, attr_type) \ + ((attr_type (*)(object_type *))(CPy_FindTraitVtable(trait, ((object_type *)obj)->vtable))[vtable_index])((object_type *)obj) + +// Set attribute value using vtable +#define CPY_SET_ATTR(obj, type, vtable_index, value, object_type, attr_type) \ + ((bool (*)(object_type *, attr_type))((object_type *)obj)->vtable[vtable_index])( \ + (object_type *)obj, value) + +#define CPY_SET_ATTR_TRAIT(obj, trait, vtable_index, value, object_type, attr_type) \ + ((bool (*)(object_type *, attr_type))(CPy_FindTraitVtable(trait, ((object_type *)obj)->vtable))[vtable_index])( \ + (object_type *)obj, value) + +#define CPY_GET_METHOD(obj, type, vtable_index, object_type, method_type) \ + ((method_type)(((object_type *)obj)->vtable[vtable_index])) + +#define CPY_GET_METHOD_TRAIT(obj, trait, vtable_index, object_type, method_type) \ + ((method_type)(CPy_FindTraitVtable(trait, ((object_type *)obj)->vtable)[vtable_index])) + + +// Int operations + + +CPyTagged CPyTagged_FromSsize_t(Py_ssize_t value); +CPyTagged CPyTagged_FromVoidPtr(void *ptr); +CPyTagged CPyTagged_FromInt64(int64_t value); +PyObject *CPyTagged_AsObject(CPyTagged x); +PyObject *CPyTagged_StealAsObject(CPyTagged x); +Py_ssize_t CPyTagged_AsSsize_t(CPyTagged x); +void CPyTagged_IncRef(CPyTagged x); +void CPyTagged_DecRef(CPyTagged x); +void CPyTagged_XDecRef(CPyTagged x); + +bool CPyTagged_IsEq_(CPyTagged left, CPyTagged right); +bool CPyTagged_IsLt_(CPyTagged left, CPyTagged right); +CPyTagged CPyTagged_Negate_(CPyTagged num); +CPyTagged CPyTagged_Invert_(CPyTagged num); +CPyTagged CPyTagged_Add_(CPyTagged left, CPyTagged right); +CPyTagged CPyTagged_Subtract_(CPyTagged left, CPyTagged right); +CPyTagged CPyTagged_Multiply_(CPyTagged left, CPyTagged right); +CPyTagged CPyTagged_FloorDivide_(CPyTagged left, CPyTagged right); +CPyTagged CPyTagged_Remainder_(CPyTagged left, CPyTagged right); +CPyTagged CPyTagged_BitwiseLongOp_(CPyTagged a, CPyTagged b, char op); +CPyTagged CPyTagged_Rshift_(CPyTagged left, CPyTagged right); +CPyTagged CPyTagged_Lshift_(CPyTagged left, CPyTagged right); +CPyTagged CPyTagged_BitLength(CPyTagged self); + +PyObject *CPyTagged_Str(CPyTagged n); +CPyTagged CPyTagged_FromFloat(double f); +PyObject *CPyLong_FromStrWithBase(PyObject *o, CPyTagged base); +PyObject *CPyLong_FromStr(PyObject *o); +PyObject *CPyBool_Str(bool b); +int64_t CPyLong_AsInt64_(PyObject *o); +int64_t CPyInt64_Divide(int64_t x, int64_t y); +int64_t CPyInt64_Remainder(int64_t x, int64_t y); +int32_t CPyLong_AsInt32_(PyObject *o); +int32_t CPyInt32_Divide(int32_t x, int32_t y); +int32_t CPyInt32_Remainder(int32_t x, int32_t y); +void CPyInt32_Overflow(void); +int16_t CPyLong_AsInt16_(PyObject *o); +int16_t CPyInt16_Divide(int16_t x, int16_t y); +int16_t CPyInt16_Remainder(int16_t x, int16_t y); +void CPyInt16_Overflow(void); +uint8_t CPyLong_AsUInt8_(PyObject *o); +void CPyUInt8_Overflow(void); +double CPyTagged_TrueDivide(CPyTagged x, CPyTagged y); + +static inline int CPyTagged_CheckLong(CPyTagged x) { + return x & CPY_INT_TAG; +} + +static inline int CPyTagged_CheckShort(CPyTagged x) { + return !CPyTagged_CheckLong(x); +} + +static inline void CPyTagged_INCREF(CPyTagged x) { + if (unlikely(CPyTagged_CheckLong(x))) { + CPyTagged_IncRef(x); + } +} + +static inline void CPyTagged_DECREF(CPyTagged x) { + if (unlikely(CPyTagged_CheckLong(x))) { + CPyTagged_DecRef(x); + } +} + +static inline void CPyTagged_XDECREF(CPyTagged x) { + if (unlikely(CPyTagged_CheckLong(x))) { + CPyTagged_XDecRef(x); + } +} + +static inline Py_ssize_t CPyTagged_ShortAsSsize_t(CPyTagged x) { + // NOTE: Assume that we sign extend. + return (Py_ssize_t)x >> 1; +} + +static inline PyObject *CPyTagged_LongAsObject(CPyTagged x) { + // NOTE: Assume target is not a short int. + return (PyObject *)(x & ~CPY_INT_TAG); +} + +static inline CPyTagged CPyTagged_FromObject(PyObject *object) { + int overflow; + // The overflow check knows about CPyTagged's width + Py_ssize_t value = CPyLong_AsSsize_tAndOverflow(object, &overflow); + if (unlikely(overflow != 0)) { + Py_INCREF(object); + return ((CPyTagged)object) | CPY_INT_TAG; + } else { + return value << 1; + } +} + +static inline CPyTagged CPyTagged_StealFromObject(PyObject *object) { + int overflow; + // The overflow check knows about CPyTagged's width + Py_ssize_t value = CPyLong_AsSsize_tAndOverflow(object, &overflow); + if (unlikely(overflow != 0)) { + return ((CPyTagged)object) | CPY_INT_TAG; + } else { + Py_DECREF(object); + return value << 1; + } +} + +static inline CPyTagged CPyTagged_BorrowFromObject(PyObject *object) { + int overflow; + // The overflow check knows about CPyTagged's width + Py_ssize_t value = CPyLong_AsSsize_tAndOverflow(object, &overflow); + if (unlikely(overflow != 0)) { + return ((CPyTagged)object) | CPY_INT_TAG; + } else { + return value << 1; + } +} + +static inline bool CPyTagged_TooBig(Py_ssize_t value) { + // Micro-optimized for the common case where it fits. + return (size_t)value > CPY_TAGGED_MAX + && (value >= 0 || value < CPY_TAGGED_MIN); +} + +static inline bool CPyTagged_TooBigInt64(int64_t value) { + // Micro-optimized for the common case where it fits. + return (uint64_t)value > CPY_TAGGED_MAX + && (value >= 0 || value < CPY_TAGGED_MIN); +} + +static inline bool CPyTagged_IsAddOverflow(CPyTagged sum, CPyTagged left, CPyTagged right) { + // This check was copied from some of my old code I believe that it works :-) + return (Py_ssize_t)(sum ^ left) < 0 && (Py_ssize_t)(sum ^ right) < 0; +} + +static inline bool CPyTagged_IsSubtractOverflow(CPyTagged diff, CPyTagged left, CPyTagged right) { + // This check was copied from some of my old code I believe that it works :-) + return (Py_ssize_t)(diff ^ left) < 0 && (Py_ssize_t)(diff ^ right) >= 0; +} + +static inline bool CPyTagged_IsMultiplyOverflow(CPyTagged left, CPyTagged right) { + // This is conservative -- return false only in a small number of all non-overflow cases + return left >= (1U << (CPY_INT_BITS/2 - 1)) || right >= (1U << (CPY_INT_BITS/2 - 1)); +} + +static inline bool CPyTagged_MaybeFloorDivideFault(CPyTagged left, CPyTagged right) { + return right == 0 || left == -((size_t)1 << (CPY_INT_BITS-1)); +} + +static inline bool CPyTagged_MaybeRemainderFault(CPyTagged left, CPyTagged right) { + // Division/modulus can fault when dividing INT_MIN by -1, but we + // do our mods on still-tagged integers with the low-bit clear, so + // -1 is actually represented as -2 and can't overflow. + // Mod by 0 can still fault though. + return right == 0; +} + +static inline bool CPyTagged_IsEq(CPyTagged left, CPyTagged right) { + if (CPyTagged_CheckShort(left)) { + return left == right; + } else { + return CPyTagged_IsEq_(left, right); + } +} + +static inline bool CPyTagged_IsNe(CPyTagged left, CPyTagged right) { + if (CPyTagged_CheckShort(left)) { + return left != right; + } else { + return !CPyTagged_IsEq_(left, right); + } +} + +static inline bool CPyTagged_IsLt(CPyTagged left, CPyTagged right) { + if (CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right)) { + return (Py_ssize_t)left < (Py_ssize_t)right; + } else { + return CPyTagged_IsLt_(left, right); + } +} + +static inline bool CPyTagged_IsGe(CPyTagged left, CPyTagged right) { + if (CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right)) { + return (Py_ssize_t)left >= (Py_ssize_t)right; + } else { + return !CPyTagged_IsLt_(left, right); + } +} + +static inline bool CPyTagged_IsGt(CPyTagged left, CPyTagged right) { + if (CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right)) { + return (Py_ssize_t)left > (Py_ssize_t)right; + } else { + return CPyTagged_IsLt_(right, left); + } +} + +static inline bool CPyTagged_IsLe(CPyTagged left, CPyTagged right) { + if (CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right)) { + return (Py_ssize_t)left <= (Py_ssize_t)right; + } else { + return !CPyTagged_IsLt_(right, left); + } +} + +static inline int64_t CPyLong_AsInt64(PyObject *o) { + if (likely(PyLong_Check(o))) { + PyLongObject *lobj = (PyLongObject *)o; + Py_ssize_t size = Py_SIZE(lobj); + if (likely(size == 1)) { + // Fast path + return CPY_LONG_DIGIT(lobj, 0); + } else if (likely(size == 0)) { + return 0; + } + } + // Slow path + return CPyLong_AsInt64_(o); +} + +static inline int32_t CPyLong_AsInt32(PyObject *o) { + if (likely(PyLong_Check(o))) { + #if CPY_3_12_FEATURES + PyLongObject *lobj = (PyLongObject *)o; + size_t tag = CPY_LONG_TAG(lobj); + if (likely(tag == (1 << CPY_NON_SIZE_BITS))) { + // Fast path + return CPY_LONG_DIGIT(lobj, 0); + } else if (likely(tag == CPY_SIGN_ZERO)) { + return 0; + } + #else + PyLongObject *lobj = (PyLongObject *)o; + Py_ssize_t size = lobj->ob_base.ob_size; + if (likely(size == 1)) { + // Fast path + return CPY_LONG_DIGIT(lobj, 0); + } else if (likely(size == 0)) { + return 0; + } + #endif + } + // Slow path + return CPyLong_AsInt32_(o); +} + +static inline int16_t CPyLong_AsInt16(PyObject *o) { + if (likely(PyLong_Check(o))) { + #if CPY_3_12_FEATURES + PyLongObject *lobj = (PyLongObject *)o; + size_t tag = CPY_LONG_TAG(lobj); + if (likely(tag == (1 << CPY_NON_SIZE_BITS))) { + // Fast path + digit x = CPY_LONG_DIGIT(lobj, 0); + if (x < 0x8000) + return x; + } else if (likely(tag == CPY_SIGN_ZERO)) { + return 0; + } + #else + PyLongObject *lobj = (PyLongObject *)o; + Py_ssize_t size = lobj->ob_base.ob_size; + if (likely(size == 1)) { + // Fast path + digit x = lobj->ob_digit[0]; + if (x < 0x8000) + return x; + } else if (likely(size == 0)) { + return 0; + } + #endif + } + // Slow path + return CPyLong_AsInt16_(o); +} + +static inline uint8_t CPyLong_AsUInt8(PyObject *o) { + if (likely(PyLong_Check(o))) { + #if CPY_3_12_FEATURES + PyLongObject *lobj = (PyLongObject *)o; + size_t tag = CPY_LONG_TAG(lobj); + if (likely(tag == (1 << CPY_NON_SIZE_BITS))) { + // Fast path + digit x = CPY_LONG_DIGIT(lobj, 0); + if (x < 256) + return x; + } else if (likely(tag == CPY_SIGN_ZERO)) { + return 0; + } + #else + PyLongObject *lobj = (PyLongObject *)o; + Py_ssize_t size = lobj->ob_base.ob_size; + if (likely(size == 1)) { + // Fast path + digit x = lobj->ob_digit[0]; + if (x < 256) + return x; + } else if (likely(size == 0)) { + return 0; + } + #endif + } + // Slow path + return CPyLong_AsUInt8_(o); +} + +static inline CPyTagged CPyTagged_Negate(CPyTagged num) { + if (likely(CPyTagged_CheckShort(num) + && num != (CPyTagged) ((Py_ssize_t)1 << (CPY_INT_BITS - 1)))) { + // The only possibility of an overflow error happening when negating a short is if we + // attempt to negate the most negative number. + return -num; + } + return CPyTagged_Negate_(num); +} + +static inline CPyTagged CPyTagged_Add(CPyTagged left, CPyTagged right) { + // TODO: Use clang/gcc extension __builtin_saddll_overflow instead. + if (likely(CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right))) { + CPyTagged sum = left + right; + if (likely(!CPyTagged_IsAddOverflow(sum, left, right))) { + return sum; + } + } + return CPyTagged_Add_(left, right); +} + +static inline CPyTagged CPyTagged_Subtract(CPyTagged left, CPyTagged right) { + // TODO: Use clang/gcc extension __builtin_saddll_overflow instead. + if (likely(CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right))) { + CPyTagged diff = left - right; + if (likely(!CPyTagged_IsSubtractOverflow(diff, left, right))) { + return diff; + } + } + return CPyTagged_Subtract_(left, right); +} + +static inline CPyTagged CPyTagged_Multiply(CPyTagged left, CPyTagged right) { + // TODO: Consider using some clang/gcc extension to check for overflow + if (CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right)) { + if (!CPyTagged_IsMultiplyOverflow(left, right)) { + return left * CPyTagged_ShortAsSsize_t(right); + } + } + return CPyTagged_Multiply_(left, right); +} + +static inline CPyTagged CPyTagged_FloorDivide(CPyTagged left, CPyTagged right) { + if (CPyTagged_CheckShort(left) + && CPyTagged_CheckShort(right) + && !CPyTagged_MaybeFloorDivideFault(left, right)) { + Py_ssize_t result = CPyTagged_ShortAsSsize_t(left) / CPyTagged_ShortAsSsize_t(right); + if (((Py_ssize_t)left < 0) != (((Py_ssize_t)right) < 0)) { + if (result * right != left) { + // Round down + result--; + } + } + return result << 1; + } + return CPyTagged_FloorDivide_(left, right); +} + +static inline CPyTagged CPyTagged_Remainder(CPyTagged left, CPyTagged right) { + if (CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right) + && !CPyTagged_MaybeRemainderFault(left, right)) { + Py_ssize_t result = (Py_ssize_t)left % (Py_ssize_t)right; + if (((Py_ssize_t)right < 0) != ((Py_ssize_t)left < 0) && result != 0) { + result += right; + } + return result; + } + return CPyTagged_Remainder_(left, right); +} + +// Bitwise '~' +static inline CPyTagged CPyTagged_Invert(CPyTagged num) { + if (likely(CPyTagged_CheckShort(num) && num != CPY_TAGGED_ABS_MIN)) { + return ~num & ~CPY_INT_TAG; + } + return CPyTagged_Invert_(num); +} + +// Bitwise '&' +static inline CPyTagged CPyTagged_And(CPyTagged left, CPyTagged right) { + if (likely(CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right))) { + return left & right; + } + return CPyTagged_BitwiseLongOp_(left, right, '&'); +} + +// Bitwise '|' +static inline CPyTagged CPyTagged_Or(CPyTagged left, CPyTagged right) { + if (likely(CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right))) { + return left | right; + } + return CPyTagged_BitwiseLongOp_(left, right, '|'); +} + +// Bitwise '^' +static inline CPyTagged CPyTagged_Xor(CPyTagged left, CPyTagged right) { + if (likely(CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right))) { + return left ^ right; + } + return CPyTagged_BitwiseLongOp_(left, right, '^'); +} + +// Bitwise '>>' +static inline CPyTagged CPyTagged_Rshift(CPyTagged left, CPyTagged right) { + if (likely(CPyTagged_CheckShort(left) + && CPyTagged_CheckShort(right) + && (Py_ssize_t)right >= 0)) { + CPyTagged count = CPyTagged_ShortAsSsize_t(right); + if (unlikely(count >= CPY_INT_BITS)) { + if ((Py_ssize_t)left >= 0) { + return 0; + } else { + return CPyTagged_ShortFromInt(-1); + } + } + return ((Py_ssize_t)left >> count) & ~CPY_INT_TAG; + } + return CPyTagged_Rshift_(left, right); +} + +static inline bool IsShortLshiftOverflow(Py_ssize_t short_int, Py_ssize_t shift) { + return ((Py_ssize_t)(short_int << shift) >> shift) != short_int; +} + +// Bitwise '<<' +static inline CPyTagged CPyTagged_Lshift(CPyTagged left, CPyTagged right) { + if (likely(CPyTagged_CheckShort(left) + && CPyTagged_CheckShort(right) + && (Py_ssize_t)right >= 0 + && right < CPY_INT_BITS * 2)) { + CPyTagged shift = CPyTagged_ShortAsSsize_t(right); + if (!IsShortLshiftOverflow(left, shift)) + // Short integers, no overflow + return left << shift; + } + return CPyTagged_Lshift_(left, right); +} + + +// Float operations + + +double CPyFloat_FloorDivide(double x, double y); +double CPyFloat_Pow(double x, double y); +double CPyFloat_Sin(double x); +double CPyFloat_Cos(double x); +double CPyFloat_Tan(double x); +double CPyFloat_Sqrt(double x); +double CPyFloat_Exp(double x); +double CPyFloat_Log(double x); +CPyTagged CPyFloat_Floor(double x); +CPyTagged CPyFloat_Ceil(double x); +double CPyFloat_FromTagged(CPyTagged x); +bool CPyFloat_IsInf(double x); +bool CPyFloat_IsNaN(double x); + + +// Generic operations (that work with arbitrary types) + + +/* We use intentionally non-inlined decrefs in rarely executed code + * paths since it pretty substantially speeds up compile time. We have + * our own copies both to avoid the null check in Py_DecRef and to avoid + * making an indirect PIC call. */ +CPy_NOINLINE +static void CPy_DecRef(PyObject *p) { + CPy_DECREF(p); +} + +CPy_NOINLINE +static void CPy_XDecRef(PyObject *p) { + CPy_XDECREF(p); +} + +static inline CPyTagged CPyObject_Size(PyObject *obj) { + Py_ssize_t s = PyObject_Size(obj); + if (s < 0) { + return CPY_INT_TAG; + } else { + // Technically __len__ could return a really big number, so we + // should allow this to produce a boxed int. In practice it + // shouldn't ever if the data structure actually contains all + // the elements, but... + return CPyTagged_FromSsize_t(s); + } +} + +#ifdef MYPYC_LOG_GETATTR +static void CPy_LogGetAttr(const char *method, PyObject *obj, PyObject *attr) { + PyObject *module = PyImport_ImportModule("getattr_hook"); + if (module) { + PyObject *res = PyObject_CallMethodObjArgs(module, method, obj, attr, NULL); + Py_XDECREF(res); + Py_DECREF(module); + } + PyErr_Clear(); +} +#else +#define CPy_LogGetAttr(method, obj, attr) (void)0 +#endif + +// Intercept a method call and log it. This needs to be a macro +// because there is no API that accepts va_args for making a +// call. Worse, it needs to use the comma operator to return the right +// value. +#define CPyObject_CallMethodObjArgs(obj, attr, ...) \ + (CPy_LogGetAttr("log_method", (obj), (attr)), \ + PyObject_CallMethodObjArgs((obj), (attr), __VA_ARGS__)) + +// This one is a macro for consistency with the above, I guess. +#define CPyObject_GetAttr(obj, attr) \ + (CPy_LogGetAttr("log", (obj), (attr)), \ + PyObject_GetAttr((obj), (attr))) + +CPyTagged CPyObject_Hash(PyObject *o); +PyObject *CPyObject_GetAttr3(PyObject *v, PyObject *name, PyObject *defl); +PyObject *CPyIter_Next(PyObject *iter); +PyObject *CPyNumber_Power(PyObject *base, PyObject *index); +PyObject *CPyNumber_InPlacePower(PyObject *base, PyObject *index); +PyObject *CPyObject_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end); + + +// List operations + + +PyObject *CPyList_Build(Py_ssize_t len, ...); +PyObject *CPyList_GetItem(PyObject *list, CPyTagged index); +PyObject *CPyList_GetItemShort(PyObject *list, CPyTagged index); +PyObject *CPyList_GetItemBorrow(PyObject *list, CPyTagged index); +PyObject *CPyList_GetItemShortBorrow(PyObject *list, CPyTagged index); +PyObject *CPyList_GetItemInt64(PyObject *list, int64_t index); +PyObject *CPyList_GetItemInt64Borrow(PyObject *list, int64_t index); +bool CPyList_SetItem(PyObject *list, CPyTagged index, PyObject *value); +void CPyList_SetItemUnsafe(PyObject *list, Py_ssize_t index, PyObject *value); +bool CPyList_SetItemInt64(PyObject *list, int64_t index, PyObject *value); +PyObject *CPyList_PopLast(PyObject *obj); +PyObject *CPyList_Pop(PyObject *obj, CPyTagged index); +CPyTagged CPyList_Count(PyObject *obj, PyObject *value); +int CPyList_Insert(PyObject *list, CPyTagged index, PyObject *value); +PyObject *CPyList_Extend(PyObject *o1, PyObject *o2); +int CPyList_Remove(PyObject *list, PyObject *obj); +CPyTagged CPyList_Index(PyObject *list, PyObject *obj); +PyObject *CPySequence_Sort(PyObject *seq); +PyObject *CPySequence_Multiply(PyObject *seq, CPyTagged t_size); +PyObject *CPySequence_RMultiply(CPyTagged t_size, PyObject *seq); +PyObject *CPySequence_InPlaceMultiply(PyObject *seq, CPyTagged t_size); +PyObject *CPyList_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end); +char CPyList_Clear(PyObject *list); +PyObject *CPyList_Copy(PyObject *list); +int CPySequence_Check(PyObject *obj); + + +// Dict operations + + +PyObject *CPyDict_GetItem(PyObject *dict, PyObject *key); +int CPyDict_SetItem(PyObject *dict, PyObject *key, PyObject *value); +PyObject *CPyDict_Get(PyObject *dict, PyObject *key, PyObject *fallback); +PyObject *CPyDict_GetWithNone(PyObject *dict, PyObject *key); +PyObject *CPyDict_SetDefault(PyObject *dict, PyObject *key, PyObject *value); +PyObject *CPyDict_SetDefaultWithNone(PyObject *dict, PyObject *key); +PyObject *CPyDict_SetDefaultWithEmptyDatatype(PyObject *dict, PyObject *key, int data_type); +PyObject *CPyDict_Build(Py_ssize_t size, ...); +int CPyDict_Update(PyObject *dict, PyObject *stuff); +int CPyDict_UpdateInDisplay(PyObject *dict, PyObject *stuff); +int CPyDict_UpdateFromAny(PyObject *dict, PyObject *stuff); +PyObject *CPyDict_FromAny(PyObject *obj); +PyObject *CPyDict_KeysView(PyObject *dict); +PyObject *CPyDict_ValuesView(PyObject *dict); +PyObject *CPyDict_ItemsView(PyObject *dict); +PyObject *CPyDict_Keys(PyObject *dict); +PyObject *CPyDict_Values(PyObject *dict); +PyObject *CPyDict_Items(PyObject *dict); +char CPyDict_Clear(PyObject *dict); +PyObject *CPyDict_Copy(PyObject *dict); +PyObject *CPyDict_GetKeysIter(PyObject *dict); +PyObject *CPyDict_GetItemsIter(PyObject *dict); +PyObject *CPyDict_GetValuesIter(PyObject *dict); +tuple_T3CIO CPyDict_NextKey(PyObject *dict_or_iter, CPyTagged offset); +tuple_T3CIO CPyDict_NextValue(PyObject *dict_or_iter, CPyTagged offset); +tuple_T4CIOO CPyDict_NextItem(PyObject *dict_or_iter, CPyTagged offset); +int CPyMapping_Check(PyObject *obj); + +// Check that dictionary didn't change size during iteration. +static inline char CPyDict_CheckSize(PyObject *dict, Py_ssize_t size) { + if (!PyDict_CheckExact(dict)) { + // Dict subclasses will be checked by Python runtime. + return 1; + } + Py_ssize_t dict_size = PyDict_Size(dict); + if (size != dict_size) { + PyErr_SetString(PyExc_RuntimeError, "dictionary changed size during iteration"); + return 0; + } + return 1; +} + + +// Str operations + +// Macros for strip type. These values are copied from CPython. +#define LEFTSTRIP 0 +#define RIGHTSTRIP 1 +#define BOTHSTRIP 2 + +char CPyStr_Equal(PyObject *str1, PyObject *str2); +char CPyStr_EqualLiteral(PyObject *str, PyObject *literal_str, Py_ssize_t literal_length); +PyObject *CPyStr_Build(Py_ssize_t len, ...); +PyObject *CPyStr_GetItem(PyObject *str, CPyTagged index); +PyObject *CPyStr_GetItemUnsafe(PyObject *str, Py_ssize_t index); +CPyTagged CPyStr_Find(PyObject *str, PyObject *substr, CPyTagged start, int direction); +CPyTagged CPyStr_FindWithEnd(PyObject *str, PyObject *substr, CPyTagged start, CPyTagged end, int direction); +PyObject *CPyStr_Split(PyObject *str, PyObject *sep, CPyTagged max_split); +PyObject *CPyStr_RSplit(PyObject *str, PyObject *sep, CPyTagged max_split); +PyObject *_CPyStr_Strip(PyObject *self, int strip_type, PyObject *sep); +static inline PyObject *CPyStr_Strip(PyObject *self, PyObject *sep) { + return _CPyStr_Strip(self, BOTHSTRIP, sep); +} +static inline PyObject *CPyStr_LStrip(PyObject *self, PyObject *sep) { + return _CPyStr_Strip(self, LEFTSTRIP, sep); +} +static inline PyObject *CPyStr_RStrip(PyObject *self, PyObject *sep) { + return _CPyStr_Strip(self, RIGHTSTRIP, sep); +} +PyObject *CPyStr_Replace(PyObject *str, PyObject *old_substr, PyObject *new_substr, CPyTagged max_replace); +PyObject *CPyStr_Append(PyObject *o1, PyObject *o2); +PyObject *CPyStr_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end); +int CPyStr_Startswith(PyObject *self, PyObject *subobj); +int CPyStr_Endswith(PyObject *self, PyObject *subobj); +PyObject *CPyStr_Removeprefix(PyObject *self, PyObject *prefix); +PyObject *CPyStr_Removesuffix(PyObject *self, PyObject *suffix); +bool CPyStr_IsTrue(PyObject *obj); +Py_ssize_t CPyStr_Size_size_t(PyObject *str); +PyObject *CPy_Decode(PyObject *obj, PyObject *encoding, PyObject *errors); +PyObject *CPy_DecodeUTF8(PyObject *bytes); +PyObject *CPy_DecodeASCII(PyObject *bytes); +PyObject *CPy_DecodeLatin1(PyObject *bytes); +PyObject *CPy_Encode(PyObject *obj, PyObject *encoding, PyObject *errors); +Py_ssize_t CPyStr_Count(PyObject *unicode, PyObject *substring, CPyTagged start); +Py_ssize_t CPyStr_CountFull(PyObject *unicode, PyObject *substring, CPyTagged start, CPyTagged end); +CPyTagged CPyStr_Ord(PyObject *obj); + + +// Bytes operations + + +PyObject *CPyBytes_Build(Py_ssize_t len, ...); +PyObject *CPyBytes_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end); +CPyTagged CPyBytes_GetItem(PyObject *o, CPyTagged index); +PyObject *CPyBytes_Concat(PyObject *a, PyObject *b); +PyObject *CPyBytes_Join(PyObject *sep, PyObject *iter); +CPyTagged CPyBytes_Ord(PyObject *obj); + + +int CPyBytes_Compare(PyObject *left, PyObject *right); + + +// Set operations + + +bool CPySet_Remove(PyObject *set, PyObject *key); + + +// Tuple operations + + +PyObject *CPySequenceTuple_GetItem(PyObject *tuple, CPyTagged index); +PyObject *CPySequenceTuple_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end); +PyObject *CPySequenceTuple_GetItemUnsafe(PyObject *tuple, Py_ssize_t index); +void CPySequenceTuple_SetItemUnsafe(PyObject *tuple, Py_ssize_t index, PyObject *value); + + +// Exception operations + + +// mypyc is not very good at dealing with refcount management of +// pointers that might be NULL. As a workaround for this, the +// exception APIs that might want to return NULL pointers instead +// return properly refcounted pointers to this dummy object. +struct ExcDummyStruct { PyObject_HEAD }; +extern struct ExcDummyStruct _CPy_ExcDummyStruct; +extern PyObject *_CPy_ExcDummy; + +static inline void _CPy_ToDummy(PyObject **p) { + if (*p == NULL) { + Py_INCREF(_CPy_ExcDummy); + *p = _CPy_ExcDummy; + } +} + +static inline PyObject *_CPy_FromDummy(PyObject *p) { + if (p == _CPy_ExcDummy) return NULL; + Py_INCREF(p); + return p; +} + +static int CPy_NoErrOccurred(void) { + return PyErr_Occurred() == NULL; +} + +static inline bool CPy_KeepPropagating(void) { + return 0; +} +// We want to avoid the public PyErr_GetExcInfo API for these because +// it requires a bunch of spurious refcount traffic on the parts of +// the triple we don't care about. +#define CPy_ExcState() PyThreadState_GET()->exc_info + +void CPy_Raise(PyObject *exc); +void CPy_Reraise(void); +void CPyErr_SetObjectAndTraceback(PyObject *type, PyObject *value, PyObject *traceback); +tuple_T3OOO CPy_CatchError(void); +void CPy_RestoreExcInfo(tuple_T3OOO info); +bool CPy_ExceptionMatches(PyObject *type); +PyObject *CPy_GetExcValue(void); +tuple_T3OOO CPy_GetExcInfo(void); +void _CPy_GetExcInfo(PyObject **p_type, PyObject **p_value, PyObject **p_traceback); +void CPyError_OutOfMemory(void); +void CPy_TypeError(const char *expected, PyObject *value); +void CPy_AddTraceback(const char *filename, const char *funcname, int line, PyObject *globals); +void CPy_TypeErrorTraceback(const char *filename, const char *funcname, int line, + PyObject *globals, const char *expected, PyObject *value); +void CPy_AttributeError(const char *filename, const char *funcname, const char *classname, + const char *attrname, int line, PyObject *globals); + + +// Misc operations + +#define CPy_TRASHCAN_BEGIN(op, dealloc) Py_TRASHCAN_BEGIN(op, dealloc) +#define CPy_TRASHCAN_END(op) Py_TRASHCAN_END + +// Tweaked version of _PyArg_Parser in CPython +typedef struct CPyArg_Parser { + const char *format; + const char * const *keywords; + const char *fname; + const char *custom_msg; + int pos; /* number of positional-only arguments */ + int min; /* minimal number of arguments */ + int max; /* maximal number of positional arguments */ + int has_required_kws; /* are there any keyword-only arguments? */ + int required_kwonly_start; + int varargs; /* does the function accept *args or **kwargs? */ + PyObject *kwtuple; /* tuple of keyword parameter names */ + struct CPyArg_Parser *next; +} CPyArg_Parser; + +// mypy lets ints silently coerce to floats, so a mypyc runtime float +// might be an int also +static inline bool CPyFloat_Check(PyObject *o) { + return PyFloat_Check(o) || PyLong_Check(o); +} + +// TODO: find an unified way to avoid inline functions in non-C back ends that can not +// use inline functions +static inline bool CPy_TypeCheck(PyObject *o, PyObject *type) { + return PyObject_TypeCheck(o, (PyTypeObject *)type); +} + +static inline PyObject *CPy_TYPE(PyObject *obj) { + PyObject *result = (PyObject *)Py_TYPE(obj); + Py_INCREF(result); + return result; +} + +PyObject *CPy_CalculateMetaclass(PyObject *type, PyObject *o); +PyObject *CPy_GetCoro(PyObject *obj); +PyObject *CPyIter_Send(PyObject *iter, PyObject *val); +int CPy_YieldFromErrorHandle(PyObject *iter, PyObject **outp); +PyObject *CPy_FetchStopIterationValue(void); +PyObject *CPyType_FromTemplate(PyObject *template_, + PyObject *orig_bases, + PyObject *modname); +PyObject *CPyType_FromTemplateWrapper(PyObject *template_, + PyObject *orig_bases, + PyObject *modname); +int CPyDataclass_SleightOfHand(PyObject *dataclass_dec, PyObject *tp, + PyObject *dict, PyObject *annotations, + PyObject *dataclass_type); +PyObject *CPyPickle_SetState(PyObject *obj, PyObject *state); +PyObject *CPyPickle_GetState(PyObject *obj); +CPyTagged CPyTagged_Id(PyObject *o); +void CPyDebug_Print(const char *msg); +void CPyDebug_PrintObject(PyObject *obj); +void CPy_Init(void); +int CPyArg_ParseTupleAndKeywords(PyObject *, PyObject *, + const char *, const char *, const char * const *, ...); +int CPyArg_ParseStackAndKeywords(PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames, + CPyArg_Parser *parser, ...); +int CPyArg_ParseStackAndKeywordsNoArgs(PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames, + CPyArg_Parser *parser, ...); +int CPyArg_ParseStackAndKeywordsOneArg(PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames, + CPyArg_Parser *parser, ...); +int CPyArg_ParseStackAndKeywordsSimple(PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames, + CPyArg_Parser *parser, ...); + +int CPySequence_CheckUnpackCount(PyObject *sequence, Py_ssize_t expected); +int CPyStatics_Initialize(PyObject **statics, + const char * const *strings, + const char * const *bytestrings, + const char * const *ints, + const double *floats, + const double *complex_numbers, + const int *tuples, + const int *frozensets); +PyObject *CPy_Super(PyObject *builtins, PyObject *self); +PyObject *CPy_CallReverseOpMethod(PyObject *left, PyObject *right, const char *op, + _Py_Identifier *method); + +bool CPyImport_ImportMany(PyObject *modules, CPyModule **statics[], PyObject *globals, + PyObject *tb_path, PyObject *tb_function, Py_ssize_t *tb_lines); +PyObject *CPyImport_ImportFromMany(PyObject *mod_id, PyObject *names, PyObject *as_names, + PyObject *globals); + +PyObject *CPySingledispatch_RegisterFunction(PyObject *singledispatch_func, PyObject *cls, + PyObject *func); + +PyObject *CPy_GetAIter(PyObject *obj); +PyObject *CPy_GetANext(PyObject *aiter); +void CPy_SetTypeAliasTypeComputeFunction(PyObject *alias, PyObject *compute_value); +void CPyTrace_LogEvent(const char *location, const char *line, const char *op, const char *details); + +static inline PyObject *CPyObject_GenericGetAttr(PyObject *self, PyObject *name) { + return _PyObject_GenericGetAttrWithDict(self, name, NULL, 1); +} +static inline int CPyObject_GenericSetAttr(PyObject *self, PyObject *name, PyObject *value) { + return _PyObject_GenericSetAttrWithDict(self, name, value, NULL); +} + +PyObject *CPy_SetupObject(PyObject *type); + +#if CPY_3_11_FEATURES +PyObject *CPy_GetName(PyObject *obj); +#endif + +#if CPY_3_14_FEATURES +void CPy_SetImmortal(PyObject *obj); +#endif + +#ifdef __cplusplus +} +#endif + +#endif // CPY_CPY_H diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx/codec.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx/codec.c new file mode 100644 index 0000000..7a64a94 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx/codec.c @@ -0,0 +1,68 @@ +#include +#include +#include + +#include "libbase64.h" +#include "../../tables/tables.h" +#include "../../codecs.h" +#include "config.h" +#include "../../env.h" + +#if HAVE_AVX +#include + +// Only enable inline assembly on supported compilers and on 64-bit CPUs. +#ifndef BASE64_AVX_USE_ASM +# if (defined(__GNUC__) || defined(__clang__)) && BASE64_WORDSIZE == 64 +# define BASE64_AVX_USE_ASM 1 +# else +# define BASE64_AVX_USE_ASM 0 +# endif +#endif + +#include "../ssse3/dec_reshuffle.c" +#include "../ssse3/dec_loop.c" + +#if BASE64_AVX_USE_ASM +# include "./enc_loop_asm.c" +#else +# include "../ssse3/enc_translate.c" +# include "../ssse3/enc_reshuffle.c" +# include "../ssse3/enc_loop.c" +#endif + +#endif // HAVE_AVX + +void +base64_stream_encode_avx BASE64_ENC_PARAMS +{ +#if HAVE_AVX + #include "../generic/enc_head.c" + + // For supported compilers, use a hand-optimized inline assembly + // encoder. Otherwise fall back on the SSSE3 encoder, but compiled with + // AVX flags to generate better optimized AVX code. + +#if BASE64_AVX_USE_ASM + enc_loop_avx(&s, &slen, &o, &olen); +#else + enc_loop_ssse3(&s, &slen, &o, &olen); +#endif + + #include "../generic/enc_tail.c" +#else + base64_enc_stub(state, src, srclen, out, outlen); +#endif +} + +int +base64_stream_decode_avx BASE64_DEC_PARAMS +{ +#if HAVE_AVX + #include "../generic/dec_head.c" + dec_loop_ssse3(&s, &slen, &o, &olen); + #include "../generic/dec_tail.c" +#else + return base64_dec_stub(state, src, srclen, out, outlen); +#endif +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx/enc_loop_asm.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx/enc_loop_asm.c new file mode 100644 index 0000000..979269a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx/enc_loop_asm.c @@ -0,0 +1,264 @@ +// Apologies in advance for combining the preprocessor with inline assembly, +// two notoriously gnarly parts of C, but it was necessary to avoid a lot of +// code repetition. The preprocessor is used to template large sections of +// inline assembly that differ only in the registers used. If the code was +// written out by hand, it would become very large and hard to audit. + +// Generate a block of inline assembly that loads register R0 from memory. The +// offset at which the register is loaded is set by the given round. +#define LOAD(R0, ROUND) \ + "vlddqu ("#ROUND" * 12)(%[src]), %["R0"] \n\t" + +// Generate a block of inline assembly that deinterleaves and shuffles register +// R0 using preloaded constants. Outputs in R0 and R1. +#define SHUF(R0, R1, R2) \ + "vpshufb %[lut0], %["R0"], %["R1"] \n\t" \ + "vpand %["R1"], %[msk0], %["R2"] \n\t" \ + "vpand %["R1"], %[msk2], %["R1"] \n\t" \ + "vpmulhuw %["R2"], %[msk1], %["R2"] \n\t" \ + "vpmullw %["R1"], %[msk3], %["R1"] \n\t" \ + "vpor %["R1"], %["R2"], %["R1"] \n\t" + +// Generate a block of inline assembly that takes R0 and R1 and translates +// their contents to the base64 alphabet, using preloaded constants. +#define TRAN(R0, R1, R2) \ + "vpsubusb %[n51], %["R1"], %["R0"] \n\t" \ + "vpcmpgtb %[n25], %["R1"], %["R2"] \n\t" \ + "vpsubb %["R2"], %["R0"], %["R0"] \n\t" \ + "vpshufb %["R0"], %[lut1], %["R2"] \n\t" \ + "vpaddb %["R1"], %["R2"], %["R0"] \n\t" + +// Generate a block of inline assembly that stores the given register R0 at an +// offset set by the given round. +#define STOR(R0, ROUND) \ + "vmovdqu %["R0"], ("#ROUND" * 16)(%[dst]) \n\t" + +// Generate a block of inline assembly that generates a single self-contained +// encoder round: fetch the data, process it, and store the result. Then update +// the source and destination pointers. +#define ROUND() \ + LOAD("a", 0) \ + SHUF("a", "b", "c") \ + TRAN("a", "b", "c") \ + STOR("a", 0) \ + "add $12, %[src] \n\t" \ + "add $16, %[dst] \n\t" + +// Define a macro that initiates a three-way interleaved encoding round by +// preloading registers a, b and c from memory. +// The register graph shows which registers are in use during each step, and +// is a visual aid for choosing registers for that step. Symbol index: +// +// + indicates that a register is loaded by that step. +// | indicates that a register is in use and must not be touched. +// - indicates that a register is decommissioned by that step. +// x indicates that a register is used as a temporary by that step. +// V indicates that a register is an input or output to the macro. +// +#define ROUND_3_INIT() /* a b c d e f */ \ + LOAD("a", 0) /* + */ \ + SHUF("a", "d", "e") /* | + x */ \ + LOAD("b", 1) /* | + | */ \ + TRAN("a", "d", "e") /* | | - x */ \ + LOAD("c", 2) /* V V V */ + +// Define a macro that translates, shuffles and stores the input registers A, B +// and C, and preloads registers D, E and F for the next round. +// This macro can be arbitrarily daisy-chained by feeding output registers D, E +// and F back into the next round as input registers A, B and C. The macro +// carefully interleaves memory operations with data operations for optimal +// pipelined performance. + +#define ROUND_3(ROUND, A,B,C,D,E,F) /* A B C D E F */ \ + LOAD(D, (ROUND + 3)) /* V V V + */ \ + SHUF(B, E, F) /* | | | | + x */ \ + STOR(A, (ROUND + 0)) /* - | | | | */ \ + TRAN(B, E, F) /* | | | - x */ \ + LOAD(E, (ROUND + 4)) /* | | | + */ \ + SHUF(C, A, F) /* + | | | | x */ \ + STOR(B, (ROUND + 1)) /* | - | | | */ \ + TRAN(C, A, F) /* - | | | x */ \ + LOAD(F, (ROUND + 5)) /* | | | + */ \ + SHUF(D, A, B) /* + x | | | | */ \ + STOR(C, (ROUND + 2)) /* | - | | | */ \ + TRAN(D, A, B) /* - x V V V */ + +// Define a macro that terminates a ROUND_3 macro by taking pre-loaded +// registers D, E and F, and translating, shuffling and storing them. +#define ROUND_3_END(ROUND, A,B,C,D,E,F) /* A B C D E F */ \ + SHUF(E, A, B) /* + x V V V */ \ + STOR(D, (ROUND + 3)) /* | - | | */ \ + TRAN(E, A, B) /* - x | | */ \ + SHUF(F, C, D) /* + x | | */ \ + STOR(E, (ROUND + 4)) /* | - | */ \ + TRAN(F, C, D) /* - x | */ \ + STOR(F, (ROUND + 5)) /* - */ + +// Define a type A round. Inputs are a, b, and c, outputs are d, e, and f. +#define ROUND_3_A(ROUND) \ + ROUND_3(ROUND, "a", "b", "c", "d", "e", "f") + +// Define a type B round. Inputs and outputs are swapped with regard to type A. +#define ROUND_3_B(ROUND) \ + ROUND_3(ROUND, "d", "e", "f", "a", "b", "c") + +// Terminating macro for a type A round. +#define ROUND_3_A_LAST(ROUND) \ + ROUND_3_A(ROUND) \ + ROUND_3_END(ROUND, "a", "b", "c", "d", "e", "f") + +// Terminating macro for a type B round. +#define ROUND_3_B_LAST(ROUND) \ + ROUND_3_B(ROUND) \ + ROUND_3_END(ROUND, "d", "e", "f", "a", "b", "c") + +// Suppress clang's warning that the literal string in the asm statement is +// overlong (longer than the ISO-mandated minimum size of 4095 bytes for C99 +// compilers). It may be true, but the goal here is not C99 portability. +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Woverlength-strings" + +static inline void +enc_loop_avx (const uint8_t **s, size_t *slen, uint8_t **o, size_t *olen) +{ + // For a clearer explanation of the algorithm used by this function, + // please refer to the plain (not inline assembly) implementation. This + // function follows the same basic logic. + + if (*slen < 16) { + return; + } + + // Process blocks of 12 bytes at a time. Input is read in blocks of 16 + // bytes, so "reserve" four bytes from the input buffer to ensure that + // we never read beyond the end of the input buffer. + size_t rounds = (*slen - 4) / 12; + + *slen -= rounds * 12; // 12 bytes consumed per round + *olen += rounds * 16; // 16 bytes produced per round + + // Number of times to go through the 36x loop. + size_t loops = rounds / 36; + + // Number of rounds remaining after the 36x loop. + rounds %= 36; + + // Lookup tables. + const __m128i lut0 = _mm_set_epi8( + 10, 11, 9, 10, 7, 8, 6, 7, 4, 5, 3, 4, 1, 2, 0, 1); + + const __m128i lut1 = _mm_setr_epi8( + 65, 71, -4, -4, -4, -4, -4, -4, -4, -4, -4, -4, -19, -16, 0, 0); + + // Temporary registers. + __m128i a, b, c, d, e, f; + + __asm__ volatile ( + + // If there are 36 rounds or more, enter a 36x unrolled loop of + // interleaved encoding rounds. The rounds interleave memory + // operations (load/store) with data operations (table lookups, + // etc) to maximize pipeline throughput. + " test %[loops], %[loops] \n\t" + " jz 18f \n\t" + " jmp 36f \n\t" + " \n\t" + ".balign 64 \n\t" + "36: " ROUND_3_INIT() + " " ROUND_3_A( 0) + " " ROUND_3_B( 3) + " " ROUND_3_A( 6) + " " ROUND_3_B( 9) + " " ROUND_3_A(12) + " " ROUND_3_B(15) + " " ROUND_3_A(18) + " " ROUND_3_B(21) + " " ROUND_3_A(24) + " " ROUND_3_B(27) + " " ROUND_3_A_LAST(30) + " add $(12 * 36), %[src] \n\t" + " add $(16 * 36), %[dst] \n\t" + " dec %[loops] \n\t" + " jnz 36b \n\t" + + // Enter an 18x unrolled loop for rounds of 18 or more. + "18: cmp $18, %[rounds] \n\t" + " jl 9f \n\t" + " " ROUND_3_INIT() + " " ROUND_3_A(0) + " " ROUND_3_B(3) + " " ROUND_3_A(6) + " " ROUND_3_B(9) + " " ROUND_3_A_LAST(12) + " sub $18, %[rounds] \n\t" + " add $(12 * 18), %[src] \n\t" + " add $(16 * 18), %[dst] \n\t" + + // Enter a 9x unrolled loop for rounds of 9 or more. + "9: cmp $9, %[rounds] \n\t" + " jl 6f \n\t" + " " ROUND_3_INIT() + " " ROUND_3_A(0) + " " ROUND_3_B_LAST(3) + " sub $9, %[rounds] \n\t" + " add $(12 * 9), %[src] \n\t" + " add $(16 * 9), %[dst] \n\t" + + // Enter a 6x unrolled loop for rounds of 6 or more. + "6: cmp $6, %[rounds] \n\t" + " jl 55f \n\t" + " " ROUND_3_INIT() + " " ROUND_3_A_LAST(0) + " sub $6, %[rounds] \n\t" + " add $(12 * 6), %[src] \n\t" + " add $(16 * 6), %[dst] \n\t" + + // Dispatch the remaining rounds 0..5. + "55: cmp $3, %[rounds] \n\t" + " jg 45f \n\t" + " je 3f \n\t" + " cmp $1, %[rounds] \n\t" + " jg 2f \n\t" + " je 1f \n\t" + " jmp 0f \n\t" + + "45: cmp $4, %[rounds] \n\t" + " je 4f \n\t" + + // Block of non-interlaced encoding rounds, which can each + // individually be jumped to. Rounds fall through to the next. + "5: " ROUND() + "4: " ROUND() + "3: " ROUND() + "2: " ROUND() + "1: " ROUND() + "0: \n\t" + + // Outputs (modified). + : [rounds] "+r" (rounds), + [loops] "+r" (loops), + [src] "+r" (*s), + [dst] "+r" (*o), + [a] "=&x" (a), + [b] "=&x" (b), + [c] "=&x" (c), + [d] "=&x" (d), + [e] "=&x" (e), + [f] "=&x" (f) + + // Inputs (not modified). + : [lut0] "x" (lut0), + [lut1] "x" (lut1), + [msk0] "x" (_mm_set1_epi32(0x0FC0FC00)), + [msk1] "x" (_mm_set1_epi32(0x04000040)), + [msk2] "x" (_mm_set1_epi32(0x003F03F0)), + [msk3] "x" (_mm_set1_epi32(0x01000010)), + [n51] "x" (_mm_set1_epi8(51)), + [n25] "x" (_mm_set1_epi8(25)) + + // Clobbers. + : "cc", "memory" + ); +} + +#pragma GCC diagnostic pop diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx2/codec.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx2/codec.c new file mode 100644 index 0000000..a54385b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx2/codec.c @@ -0,0 +1,58 @@ +#include +#include +#include + +#include "libbase64.h" +#include "../../tables/tables.h" +#include "../../codecs.h" +#include "config.h" +#include "../../env.h" + +#if HAVE_AVX2 +#include + +// Only enable inline assembly on supported compilers and on 64-bit CPUs. +#ifndef BASE64_AVX2_USE_ASM +# if (defined(__GNUC__) || defined(__clang__)) && BASE64_WORDSIZE == 64 +# define BASE64_AVX2_USE_ASM 1 +# else +# define BASE64_AVX2_USE_ASM 0 +# endif +#endif + +#include "./dec_reshuffle.c" +#include "./dec_loop.c" + +#if BASE64_AVX2_USE_ASM +# include "./enc_loop_asm.c" +#else +# include "./enc_translate.c" +# include "./enc_reshuffle.c" +# include "./enc_loop.c" +#endif + +#endif // HAVE_AVX2 + +void +base64_stream_encode_avx2 BASE64_ENC_PARAMS +{ +#if HAVE_AVX2 + #include "../generic/enc_head.c" + enc_loop_avx2(&s, &slen, &o, &olen); + #include "../generic/enc_tail.c" +#else + base64_enc_stub(state, src, srclen, out, outlen); +#endif +} + +int +base64_stream_decode_avx2 BASE64_DEC_PARAMS +{ +#if HAVE_AVX2 + #include "../generic/dec_head.c" + dec_loop_avx2(&s, &slen, &o, &olen); + #include "../generic/dec_tail.c" +#else + return base64_dec_stub(state, src, srclen, out, outlen); +#endif +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx2/dec_loop.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx2/dec_loop.c new file mode 100644 index 0000000..b8a4cca --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx2/dec_loop.c @@ -0,0 +1,110 @@ +static BASE64_FORCE_INLINE int +dec_loop_avx2_inner (const uint8_t **s, uint8_t **o, size_t *rounds) +{ + const __m256i lut_lo = _mm256_setr_epi8( + 0x15, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, + 0x11, 0x11, 0x13, 0x1A, 0x1B, 0x1B, 0x1B, 0x1A, + 0x15, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, + 0x11, 0x11, 0x13, 0x1A, 0x1B, 0x1B, 0x1B, 0x1A); + + const __m256i lut_hi = _mm256_setr_epi8( + 0x10, 0x10, 0x01, 0x02, 0x04, 0x08, 0x04, 0x08, + 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, + 0x10, 0x10, 0x01, 0x02, 0x04, 0x08, 0x04, 0x08, + 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10); + + const __m256i lut_roll = _mm256_setr_epi8( + 0, 16, 19, 4, -65, -65, -71, -71, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 16, 19, 4, -65, -65, -71, -71, + 0, 0, 0, 0, 0, 0, 0, 0); + + const __m256i mask_2F = _mm256_set1_epi8(0x2F); + + // Load input: + __m256i str = _mm256_loadu_si256((__m256i *) *s); + + // See the SSSE3 decoder for an explanation of the algorithm. + const __m256i hi_nibbles = _mm256_and_si256(_mm256_srli_epi32(str, 4), mask_2F); + const __m256i lo_nibbles = _mm256_and_si256(str, mask_2F); + const __m256i hi = _mm256_shuffle_epi8(lut_hi, hi_nibbles); + const __m256i lo = _mm256_shuffle_epi8(lut_lo, lo_nibbles); + + if (!_mm256_testz_si256(lo, hi)) { + return 0; + } + + const __m256i eq_2F = _mm256_cmpeq_epi8(str, mask_2F); + const __m256i roll = _mm256_shuffle_epi8(lut_roll, _mm256_add_epi8(eq_2F, hi_nibbles)); + + // Now simply add the delta values to the input: + str = _mm256_add_epi8(str, roll); + + // Reshuffle the input to packed 12-byte output format: + str = dec_reshuffle(str); + + // Store the output: + _mm256_storeu_si256((__m256i *) *o, str); + + *s += 32; + *o += 24; + *rounds -= 1; + + return 1; +} + +static inline void +dec_loop_avx2 (const uint8_t **s, size_t *slen, uint8_t **o, size_t *olen) +{ + if (*slen < 45) { + return; + } + + // Process blocks of 32 bytes per round. Because 8 extra zero bytes are + // written after the output, ensure that there will be at least 13 + // bytes of input data left to cover the gap. (11 data bytes and up to + // two end-of-string markers.) + size_t rounds = (*slen - 13) / 32; + + *slen -= rounds * 32; // 32 bytes consumed per round + *olen += rounds * 24; // 24 bytes produced per round + + do { + if (rounds >= 8) { + if (dec_loop_avx2_inner(s, o, &rounds) && + dec_loop_avx2_inner(s, o, &rounds) && + dec_loop_avx2_inner(s, o, &rounds) && + dec_loop_avx2_inner(s, o, &rounds) && + dec_loop_avx2_inner(s, o, &rounds) && + dec_loop_avx2_inner(s, o, &rounds) && + dec_loop_avx2_inner(s, o, &rounds) && + dec_loop_avx2_inner(s, o, &rounds)) { + continue; + } + break; + } + if (rounds >= 4) { + if (dec_loop_avx2_inner(s, o, &rounds) && + dec_loop_avx2_inner(s, o, &rounds) && + dec_loop_avx2_inner(s, o, &rounds) && + dec_loop_avx2_inner(s, o, &rounds)) { + continue; + } + break; + } + if (rounds >= 2) { + if (dec_loop_avx2_inner(s, o, &rounds) && + dec_loop_avx2_inner(s, o, &rounds)) { + continue; + } + break; + } + dec_loop_avx2_inner(s, o, &rounds); + break; + + } while (rounds > 0); + + // Adjust for any rounds that were skipped: + *slen += rounds * 32; + *olen -= rounds * 24; +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx2/dec_reshuffle.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx2/dec_reshuffle.c new file mode 100644 index 0000000..bc875ce --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx2/dec_reshuffle.c @@ -0,0 +1,34 @@ +static BASE64_FORCE_INLINE __m256i +dec_reshuffle (const __m256i in) +{ + // in, lower lane, bits, upper case are most significant bits, lower + // case are least significant bits: + // 00llllll 00kkkkLL 00jjKKKK 00JJJJJJ + // 00iiiiii 00hhhhII 00ggHHHH 00GGGGGG + // 00ffffff 00eeeeFF 00ddEEEE 00DDDDDD + // 00cccccc 00bbbbCC 00aaBBBB 00AAAAAA + + const __m256i merge_ab_and_bc = _mm256_maddubs_epi16(in, _mm256_set1_epi32(0x01400140)); + // 0000kkkk LLllllll 0000JJJJ JJjjKKKK + // 0000hhhh IIiiiiii 0000GGGG GGggHHHH + // 0000eeee FFffffff 0000DDDD DDddEEEE + // 0000bbbb CCcccccc 0000AAAA AAaaBBBB + + __m256i out = _mm256_madd_epi16(merge_ab_and_bc, _mm256_set1_epi32(0x00011000)); + // 00000000 JJJJJJjj KKKKkkkk LLllllll + // 00000000 GGGGGGgg HHHHhhhh IIiiiiii + // 00000000 DDDDDDdd EEEEeeee FFffffff + // 00000000 AAAAAAaa BBBBbbbb CCcccccc + + // Pack bytes together in each lane: + out = _mm256_shuffle_epi8(out, _mm256_setr_epi8( + 2, 1, 0, 6, 5, 4, 10, 9, 8, 14, 13, 12, -1, -1, -1, -1, + 2, 1, 0, 6, 5, 4, 10, 9, 8, 14, 13, 12, -1, -1, -1, -1)); + // 00000000 00000000 00000000 00000000 + // LLllllll KKKKkkkk JJJJJJjj IIiiiiii + // HHHHhhhh GGGGGGgg FFffffff EEEEeeee + // DDDDDDdd CCcccccc BBBBbbbb AAAAAAaa + + // Pack lanes: + return _mm256_permutevar8x32_epi32(out, _mm256_setr_epi32(0, 1, 2, 4, 5, 6, -1, -1)); +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx2/enc_loop.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx2/enc_loop.c new file mode 100644 index 0000000..6f4aa0a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx2/enc_loop.c @@ -0,0 +1,89 @@ +static BASE64_FORCE_INLINE void +enc_loop_avx2_inner_first (const uint8_t **s, uint8_t **o) +{ + // First load is done at s - 0 to not get a segfault: + __m256i src = _mm256_loadu_si256((__m256i *) *s); + + // Shift by 4 bytes, as required by enc_reshuffle: + src = _mm256_permutevar8x32_epi32(src, _mm256_setr_epi32(0, 0, 1, 2, 3, 4, 5, 6)); + + // Reshuffle, translate, store: + src = enc_reshuffle(src); + src = enc_translate(src); + _mm256_storeu_si256((__m256i *) *o, src); + + // Subsequent loads will be done at s - 4, set pointer for next round: + *s += 20; + *o += 32; +} + +static BASE64_FORCE_INLINE void +enc_loop_avx2_inner (const uint8_t **s, uint8_t **o) +{ + // Load input: + __m256i src = _mm256_loadu_si256((__m256i *) *s); + + // Reshuffle, translate, store: + src = enc_reshuffle(src); + src = enc_translate(src); + _mm256_storeu_si256((__m256i *) *o, src); + + *s += 24; + *o += 32; +} + +static inline void +enc_loop_avx2 (const uint8_t **s, size_t *slen, uint8_t **o, size_t *olen) +{ + if (*slen < 32) { + return; + } + + // Process blocks of 24 bytes at a time. Because blocks are loaded 32 + // bytes at a time an offset of -4, ensure that there will be at least + // 4 remaining bytes after the last round, so that the final read will + // not pass beyond the bounds of the input buffer: + size_t rounds = (*slen - 4) / 24; + + *slen -= rounds * 24; // 24 bytes consumed per round + *olen += rounds * 32; // 32 bytes produced per round + + // The first loop iteration requires special handling to ensure that + // the read, which is done at an offset, does not underflow the buffer: + enc_loop_avx2_inner_first(s, o); + rounds--; + + while (rounds > 0) { + if (rounds >= 8) { + enc_loop_avx2_inner(s, o); + enc_loop_avx2_inner(s, o); + enc_loop_avx2_inner(s, o); + enc_loop_avx2_inner(s, o); + enc_loop_avx2_inner(s, o); + enc_loop_avx2_inner(s, o); + enc_loop_avx2_inner(s, o); + enc_loop_avx2_inner(s, o); + rounds -= 8; + continue; + } + if (rounds >= 4) { + enc_loop_avx2_inner(s, o); + enc_loop_avx2_inner(s, o); + enc_loop_avx2_inner(s, o); + enc_loop_avx2_inner(s, o); + rounds -= 4; + continue; + } + if (rounds >= 2) { + enc_loop_avx2_inner(s, o); + enc_loop_avx2_inner(s, o); + rounds -= 2; + continue; + } + enc_loop_avx2_inner(s, o); + break; + } + + // Add the offset back: + *s += 4; +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx2/enc_loop_asm.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx2/enc_loop_asm.c new file mode 100644 index 0000000..eb775a1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx2/enc_loop_asm.c @@ -0,0 +1,291 @@ +// Apologies in advance for combining the preprocessor with inline assembly, +// two notoriously gnarly parts of C, but it was necessary to avoid a lot of +// code repetition. The preprocessor is used to template large sections of +// inline assembly that differ only in the registers used. If the code was +// written out by hand, it would become very large and hard to audit. + +// Generate a block of inline assembly that loads register R0 from memory. The +// offset at which the register is loaded is set by the given round and a +// constant offset. +#define LOAD(R0, ROUND, OFFSET) \ + "vlddqu ("#ROUND" * 24 + "#OFFSET")(%[src]), %["R0"] \n\t" + +// Generate a block of inline assembly that deinterleaves and shuffles register +// R0 using preloaded constants. Outputs in R0 and R1. +#define SHUF(R0, R1, R2) \ + "vpshufb %[lut0], %["R0"], %["R1"] \n\t" \ + "vpand %["R1"], %[msk0], %["R2"] \n\t" \ + "vpand %["R1"], %[msk2], %["R1"] \n\t" \ + "vpmulhuw %["R2"], %[msk1], %["R2"] \n\t" \ + "vpmullw %["R1"], %[msk3], %["R1"] \n\t" \ + "vpor %["R1"], %["R2"], %["R1"] \n\t" + +// Generate a block of inline assembly that takes R0 and R1 and translates +// their contents to the base64 alphabet, using preloaded constants. +#define TRAN(R0, R1, R2) \ + "vpsubusb %[n51], %["R1"], %["R0"] \n\t" \ + "vpcmpgtb %[n25], %["R1"], %["R2"] \n\t" \ + "vpsubb %["R2"], %["R0"], %["R0"] \n\t" \ + "vpshufb %["R0"], %[lut1], %["R2"] \n\t" \ + "vpaddb %["R1"], %["R2"], %["R0"] \n\t" + +// Generate a block of inline assembly that stores the given register R0 at an +// offset set by the given round. +#define STOR(R0, ROUND) \ + "vmovdqu %["R0"], ("#ROUND" * 32)(%[dst]) \n\t" + +// Generate a block of inline assembly that generates a single self-contained +// encoder round: fetch the data, process it, and store the result. Then update +// the source and destination pointers. +#define ROUND() \ + LOAD("a", 0, -4) \ + SHUF("a", "b", "c") \ + TRAN("a", "b", "c") \ + STOR("a", 0) \ + "add $24, %[src] \n\t" \ + "add $32, %[dst] \n\t" + +// Define a macro that initiates a three-way interleaved encoding round by +// preloading registers a, b and c from memory. +// The register graph shows which registers are in use during each step, and +// is a visual aid for choosing registers for that step. Symbol index: +// +// + indicates that a register is loaded by that step. +// | indicates that a register is in use and must not be touched. +// - indicates that a register is decommissioned by that step. +// x indicates that a register is used as a temporary by that step. +// V indicates that a register is an input or output to the macro. +// +#define ROUND_3_INIT() /* a b c d e f */ \ + LOAD("a", 0, -4) /* + */ \ + SHUF("a", "d", "e") /* | + x */ \ + LOAD("b", 1, -4) /* | + | */ \ + TRAN("a", "d", "e") /* | | - x */ \ + LOAD("c", 2, -4) /* V V V */ + +// Define a macro that translates, shuffles and stores the input registers A, B +// and C, and preloads registers D, E and F for the next round. +// This macro can be arbitrarily daisy-chained by feeding output registers D, E +// and F back into the next round as input registers A, B and C. The macro +// carefully interleaves memory operations with data operations for optimal +// pipelined performance. + +#define ROUND_3(ROUND, A,B,C,D,E,F) /* A B C D E F */ \ + LOAD(D, (ROUND + 3), -4) /* V V V + */ \ + SHUF(B, E, F) /* | | | | + x */ \ + STOR(A, (ROUND + 0)) /* - | | | | */ \ + TRAN(B, E, F) /* | | | - x */ \ + LOAD(E, (ROUND + 4), -4) /* | | | + */ \ + SHUF(C, A, F) /* + | | | | x */ \ + STOR(B, (ROUND + 1)) /* | - | | | */ \ + TRAN(C, A, F) /* - | | | x */ \ + LOAD(F, (ROUND + 5), -4) /* | | | + */ \ + SHUF(D, A, B) /* + x | | | | */ \ + STOR(C, (ROUND + 2)) /* | - | | | */ \ + TRAN(D, A, B) /* - x V V V */ + +// Define a macro that terminates a ROUND_3 macro by taking pre-loaded +// registers D, E and F, and translating, shuffling and storing them. +#define ROUND_3_END(ROUND, A,B,C,D,E,F) /* A B C D E F */ \ + SHUF(E, A, B) /* + x V V V */ \ + STOR(D, (ROUND + 3)) /* | - | | */ \ + TRAN(E, A, B) /* - x | | */ \ + SHUF(F, C, D) /* + x | | */ \ + STOR(E, (ROUND + 4)) /* | - | */ \ + TRAN(F, C, D) /* - x | */ \ + STOR(F, (ROUND + 5)) /* - */ + +// Define a type A round. Inputs are a, b, and c, outputs are d, e, and f. +#define ROUND_3_A(ROUND) \ + ROUND_3(ROUND, "a", "b", "c", "d", "e", "f") + +// Define a type B round. Inputs and outputs are swapped with regard to type A. +#define ROUND_3_B(ROUND) \ + ROUND_3(ROUND, "d", "e", "f", "a", "b", "c") + +// Terminating macro for a type A round. +#define ROUND_3_A_LAST(ROUND) \ + ROUND_3_A(ROUND) \ + ROUND_3_END(ROUND, "a", "b", "c", "d", "e", "f") + +// Terminating macro for a type B round. +#define ROUND_3_B_LAST(ROUND) \ + ROUND_3_B(ROUND) \ + ROUND_3_END(ROUND, "d", "e", "f", "a", "b", "c") + +// Suppress clang's warning that the literal string in the asm statement is +// overlong (longer than the ISO-mandated minimum size of 4095 bytes for C99 +// compilers). It may be true, but the goal here is not C99 portability. +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Woverlength-strings" + +static inline void +enc_loop_avx2 (const uint8_t **s, size_t *slen, uint8_t **o, size_t *olen) +{ + // For a clearer explanation of the algorithm used by this function, + // please refer to the plain (not inline assembly) implementation. This + // function follows the same basic logic. + + if (*slen < 32) { + return; + } + + // Process blocks of 24 bytes at a time. Because blocks are loaded 32 + // bytes at a time an offset of -4, ensure that there will be at least + // 4 remaining bytes after the last round, so that the final read will + // not pass beyond the bounds of the input buffer. + size_t rounds = (*slen - 4) / 24; + + *slen -= rounds * 24; // 24 bytes consumed per round + *olen += rounds * 32; // 32 bytes produced per round + + // Pre-decrement the number of rounds to get the number of rounds + // *after* the first round, which is handled as a special case. + rounds--; + + // Number of times to go through the 36x loop. + size_t loops = rounds / 36; + + // Number of rounds remaining after the 36x loop. + rounds %= 36; + + // Lookup tables. + const __m256i lut0 = _mm256_set_epi8( + 10, 11, 9, 10, 7, 8, 6, 7, 4, 5, 3, 4, 1, 2, 0, 1, + 14, 15, 13, 14, 11, 12, 10, 11, 8, 9, 7, 8, 5, 6, 4, 5); + + const __m256i lut1 = _mm256_setr_epi8( + 65, 71, -4, -4, -4, -4, -4, -4, -4, -4, -4, -4, -19, -16, 0, 0, + 65, 71, -4, -4, -4, -4, -4, -4, -4, -4, -4, -4, -19, -16, 0, 0); + + // Temporary registers. + __m256i a, b, c, d, e; + + // Temporary register f doubles as the shift mask for the first round. + __m256i f = _mm256_setr_epi32(0, 0, 1, 2, 3, 4, 5, 6); + + __asm__ volatile ( + + // The first loop iteration requires special handling to ensure + // that the read, which is normally done at an offset of -4, + // does not underflow the buffer. Load the buffer at an offset + // of 0 and permute the input to achieve the same effect. + LOAD("a", 0, 0) + "vpermd %[a], %[f], %[a] \n\t" + + // Perform the standard shuffling and translation steps. + SHUF("a", "b", "c") + TRAN("a", "b", "c") + + // Store the result and increment the source and dest pointers. + "vmovdqu %[a], (%[dst]) \n\t" + "add $24, %[src] \n\t" + "add $32, %[dst] \n\t" + + // If there are 36 rounds or more, enter a 36x unrolled loop of + // interleaved encoding rounds. The rounds interleave memory + // operations (load/store) with data operations (table lookups, + // etc) to maximize pipeline throughput. + " test %[loops], %[loops] \n\t" + " jz 18f \n\t" + " jmp 36f \n\t" + " \n\t" + ".balign 64 \n\t" + "36: " ROUND_3_INIT() + " " ROUND_3_A( 0) + " " ROUND_3_B( 3) + " " ROUND_3_A( 6) + " " ROUND_3_B( 9) + " " ROUND_3_A(12) + " " ROUND_3_B(15) + " " ROUND_3_A(18) + " " ROUND_3_B(21) + " " ROUND_3_A(24) + " " ROUND_3_B(27) + " " ROUND_3_A_LAST(30) + " add $(24 * 36), %[src] \n\t" + " add $(32 * 36), %[dst] \n\t" + " dec %[loops] \n\t" + " jnz 36b \n\t" + + // Enter an 18x unrolled loop for rounds of 18 or more. + "18: cmp $18, %[rounds] \n\t" + " jl 9f \n\t" + " " ROUND_3_INIT() + " " ROUND_3_A(0) + " " ROUND_3_B(3) + " " ROUND_3_A(6) + " " ROUND_3_B(9) + " " ROUND_3_A_LAST(12) + " sub $18, %[rounds] \n\t" + " add $(24 * 18), %[src] \n\t" + " add $(32 * 18), %[dst] \n\t" + + // Enter a 9x unrolled loop for rounds of 9 or more. + "9: cmp $9, %[rounds] \n\t" + " jl 6f \n\t" + " " ROUND_3_INIT() + " " ROUND_3_A(0) + " " ROUND_3_B_LAST(3) + " sub $9, %[rounds] \n\t" + " add $(24 * 9), %[src] \n\t" + " add $(32 * 9), %[dst] \n\t" + + // Enter a 6x unrolled loop for rounds of 6 or more. + "6: cmp $6, %[rounds] \n\t" + " jl 55f \n\t" + " " ROUND_3_INIT() + " " ROUND_3_A_LAST(0) + " sub $6, %[rounds] \n\t" + " add $(24 * 6), %[src] \n\t" + " add $(32 * 6), %[dst] \n\t" + + // Dispatch the remaining rounds 0..5. + "55: cmp $3, %[rounds] \n\t" + " jg 45f \n\t" + " je 3f \n\t" + " cmp $1, %[rounds] \n\t" + " jg 2f \n\t" + " je 1f \n\t" + " jmp 0f \n\t" + + "45: cmp $4, %[rounds] \n\t" + " je 4f \n\t" + + // Block of non-interlaced encoding rounds, which can each + // individually be jumped to. Rounds fall through to the next. + "5: " ROUND() + "4: " ROUND() + "3: " ROUND() + "2: " ROUND() + "1: " ROUND() + "0: \n\t" + + // Outputs (modified). + : [rounds] "+r" (rounds), + [loops] "+r" (loops), + [src] "+r" (*s), + [dst] "+r" (*o), + [a] "=&x" (a), + [b] "=&x" (b), + [c] "=&x" (c), + [d] "=&x" (d), + [e] "=&x" (e), + [f] "+x" (f) + + // Inputs (not modified). + : [lut0] "x" (lut0), + [lut1] "x" (lut1), + [msk0] "x" (_mm256_set1_epi32(0x0FC0FC00)), + [msk1] "x" (_mm256_set1_epi32(0x04000040)), + [msk2] "x" (_mm256_set1_epi32(0x003F03F0)), + [msk3] "x" (_mm256_set1_epi32(0x01000010)), + [n51] "x" (_mm256_set1_epi8(51)), + [n25] "x" (_mm256_set1_epi8(25)) + + // Clobbers. + : "cc", "memory" + ); +} + +#pragma GCC diagnostic pop diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx2/enc_reshuffle.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx2/enc_reshuffle.c new file mode 100644 index 0000000..82c659b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx2/enc_reshuffle.c @@ -0,0 +1,83 @@ +static BASE64_FORCE_INLINE __m256i +enc_reshuffle (const __m256i input) +{ + // Translation of the SSSE3 reshuffling algorithm to AVX2. This one + // works with shifted (4 bytes) input in order to be able to work + // efficiently in the two 128-bit lanes. + + // Input, bytes MSB to LSB: + // 0 0 0 0 x w v u t s r q p o n m + // l k j i h g f e d c b a 0 0 0 0 + + const __m256i in = _mm256_shuffle_epi8(input, _mm256_set_epi8( + 10, 11, 9, 10, + 7, 8, 6, 7, + 4, 5, 3, 4, + 1, 2, 0, 1, + + 14, 15, 13, 14, + 11, 12, 10, 11, + 8, 9, 7, 8, + 5, 6, 4, 5)); + // in, bytes MSB to LSB: + // w x v w + // t u s t + // q r p q + // n o m n + // k l j k + // h i g h + // e f d e + // b c a b + + const __m256i t0 = _mm256_and_si256(in, _mm256_set1_epi32(0x0FC0FC00)); + // bits, upper case are most significant bits, lower case are least + // significant bits. + // 0000wwww XX000000 VVVVVV00 00000000 + // 0000tttt UU000000 SSSSSS00 00000000 + // 0000qqqq RR000000 PPPPPP00 00000000 + // 0000nnnn OO000000 MMMMMM00 00000000 + // 0000kkkk LL000000 JJJJJJ00 00000000 + // 0000hhhh II000000 GGGGGG00 00000000 + // 0000eeee FF000000 DDDDDD00 00000000 + // 0000bbbb CC000000 AAAAAA00 00000000 + + const __m256i t1 = _mm256_mulhi_epu16(t0, _mm256_set1_epi32(0x04000040)); + // 00000000 00wwwwXX 00000000 00VVVVVV + // 00000000 00ttttUU 00000000 00SSSSSS + // 00000000 00qqqqRR 00000000 00PPPPPP + // 00000000 00nnnnOO 00000000 00MMMMMM + // 00000000 00kkkkLL 00000000 00JJJJJJ + // 00000000 00hhhhII 00000000 00GGGGGG + // 00000000 00eeeeFF 00000000 00DDDDDD + // 00000000 00bbbbCC 00000000 00AAAAAA + + const __m256i t2 = _mm256_and_si256(in, _mm256_set1_epi32(0x003F03F0)); + // 00000000 00xxxxxx 000000vv WWWW0000 + // 00000000 00uuuuuu 000000ss TTTT0000 + // 00000000 00rrrrrr 000000pp QQQQ0000 + // 00000000 00oooooo 000000mm NNNN0000 + // 00000000 00llllll 000000jj KKKK0000 + // 00000000 00iiiiii 000000gg HHHH0000 + // 00000000 00ffffff 000000dd EEEE0000 + // 00000000 00cccccc 000000aa BBBB0000 + + const __m256i t3 = _mm256_mullo_epi16(t2, _mm256_set1_epi32(0x01000010)); + // 00xxxxxx 00000000 00vvWWWW 00000000 + // 00uuuuuu 00000000 00ssTTTT 00000000 + // 00rrrrrr 00000000 00ppQQQQ 00000000 + // 00oooooo 00000000 00mmNNNN 00000000 + // 00llllll 00000000 00jjKKKK 00000000 + // 00iiiiii 00000000 00ggHHHH 00000000 + // 00ffffff 00000000 00ddEEEE 00000000 + // 00cccccc 00000000 00aaBBBB 00000000 + + return _mm256_or_si256(t1, t3); + // 00xxxxxx 00wwwwXX 00vvWWWW 00VVVVVV + // 00uuuuuu 00ttttUU 00ssTTTT 00SSSSSS + // 00rrrrrr 00qqqqRR 00ppQQQQ 00PPPPPP + // 00oooooo 00nnnnOO 00mmNNNN 00MMMMMM + // 00llllll 00kkkkLL 00jjKKKK 00JJJJJJ + // 00iiiiii 00hhhhII 00ggHHHH 00GGGGGG + // 00ffffff 00eeeeFF 00ddEEEE 00DDDDDD + // 00cccccc 00bbbbCC 00aaBBBB 00AAAAAA +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx2/enc_translate.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx2/enc_translate.c new file mode 100644 index 0000000..370da98 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx2/enc_translate.c @@ -0,0 +1,30 @@ +static BASE64_FORCE_INLINE __m256i +enc_translate (const __m256i in) +{ + // A lookup table containing the absolute offsets for all ranges: + const __m256i lut = _mm256_setr_epi8( + 65, 71, -4, -4, -4, -4, -4, -4, -4, -4, -4, -4, -19, -16, 0, 0, + 65, 71, -4, -4, -4, -4, -4, -4, -4, -4, -4, -4, -19, -16, 0, 0); + + // Translate values 0..63 to the Base64 alphabet. There are five sets: + // # From To Abs Index Characters + // 0 [0..25] [65..90] +65 0 ABCDEFGHIJKLMNOPQRSTUVWXYZ + // 1 [26..51] [97..122] +71 1 abcdefghijklmnopqrstuvwxyz + // 2 [52..61] [48..57] -4 [2..11] 0123456789 + // 3 [62] [43] -19 12 + + // 4 [63] [47] -16 13 / + + // Create LUT indices from the input. The index for range #0 is right, + // others are 1 less than expected: + __m256i indices = _mm256_subs_epu8(in, _mm256_set1_epi8(51)); + + // mask is 0xFF (-1) for range #[1..4] and 0x00 for range #0: + const __m256i mask = _mm256_cmpgt_epi8(in, _mm256_set1_epi8(25)); + + // Subtract -1, so add 1 to indices for range #[1..4]. All indices are + // now correct: + indices = _mm256_sub_epi8(indices, mask); + + // Add offsets to input values: + return _mm256_add_epi8(in, _mm256_shuffle_epi8(lut, indices)); +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx512/codec.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx512/codec.c new file mode 100644 index 0000000..9821082 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx512/codec.c @@ -0,0 +1,44 @@ +#include +#include +#include + +#include "libbase64.h" +#include "../../tables/tables.h" +#include "../../codecs.h" +#include "config.h" +#include "../../env.h" + +#if HAVE_AVX512 +#include + +#include "../avx2/dec_reshuffle.c" +#include "../avx2/dec_loop.c" +#include "enc_reshuffle_translate.c" +#include "enc_loop.c" + +#endif // HAVE_AVX512 + +void +base64_stream_encode_avx512 BASE64_ENC_PARAMS +{ +#if HAVE_AVX512 + #include "../generic/enc_head.c" + enc_loop_avx512(&s, &slen, &o, &olen); + #include "../generic/enc_tail.c" +#else + base64_enc_stub(state, src, srclen, out, outlen); +#endif +} + +// Reuse AVX2 decoding. Not supporting AVX512 at present +int +base64_stream_decode_avx512 BASE64_DEC_PARAMS +{ +#if HAVE_AVX512 + #include "../generic/dec_head.c" + dec_loop_avx2(&s, &slen, &o, &olen); + #include "../generic/dec_tail.c" +#else + return base64_dec_stub(state, src, srclen, out, outlen); +#endif +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx512/enc_loop.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx512/enc_loop.c new file mode 100644 index 0000000..cb44696 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx512/enc_loop.c @@ -0,0 +1,61 @@ +static BASE64_FORCE_INLINE void +enc_loop_avx512_inner (const uint8_t **s, uint8_t **o) +{ + // Load input. + __m512i src = _mm512_loadu_si512((__m512i *) *s); + + // Reshuffle, translate, store. + src = enc_reshuffle_translate(src); + _mm512_storeu_si512((__m512i *) *o, src); + + *s += 48; + *o += 64; +} + +static inline void +enc_loop_avx512 (const uint8_t **s, size_t *slen, uint8_t **o, size_t *olen) +{ + if (*slen < 64) { + return; + } + + // Process blocks of 48 bytes at a time. Because blocks are loaded 64 + // bytes at a time, ensure that there will be at least 24 remaining + // bytes after the last round, so that the final read will not pass + // beyond the bounds of the input buffer. + size_t rounds = (*slen - 24) / 48; + + *slen -= rounds * 48; // 48 bytes consumed per round + *olen += rounds * 64; // 64 bytes produced per round + + while (rounds > 0) { + if (rounds >= 8) { + enc_loop_avx512_inner(s, o); + enc_loop_avx512_inner(s, o); + enc_loop_avx512_inner(s, o); + enc_loop_avx512_inner(s, o); + enc_loop_avx512_inner(s, o); + enc_loop_avx512_inner(s, o); + enc_loop_avx512_inner(s, o); + enc_loop_avx512_inner(s, o); + rounds -= 8; + continue; + } + if (rounds >= 4) { + enc_loop_avx512_inner(s, o); + enc_loop_avx512_inner(s, o); + enc_loop_avx512_inner(s, o); + enc_loop_avx512_inner(s, o); + rounds -= 4; + continue; + } + if (rounds >= 2) { + enc_loop_avx512_inner(s, o); + enc_loop_avx512_inner(s, o); + rounds -= 2; + continue; + } + enc_loop_avx512_inner(s, o); + break; + } +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx512/enc_reshuffle_translate.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx512/enc_reshuffle_translate.c new file mode 100644 index 0000000..ae12b3a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/avx512/enc_reshuffle_translate.c @@ -0,0 +1,50 @@ +// AVX512 algorithm is based on permutevar and multishift. The code is based on +// https://github.com/WojciechMula/base64simd which is under BSD-2 license. + +static BASE64_FORCE_INLINE __m512i +enc_reshuffle_translate (const __m512i input) +{ + // 32-bit input + // [ 0 0 0 0 0 0 0 0|c1 c0 d5 d4 d3 d2 d1 d0| + // b3 b2 b1 b0 c5 c4 c3 c2|a5 a4 a3 a2 a1 a0 b5 b4] + // output order [1, 2, 0, 1] + // [b3 b2 b1 b0 c5 c4 c3 c2|c1 c0 d5 d4 d3 d2 d1 d0| + // a5 a4 a3 a2 a1 a0 b5 b4|b3 b2 b1 b0 c3 c2 c1 c0] + + const __m512i shuffle_input = _mm512_setr_epi32(0x01020001, + 0x04050304, + 0x07080607, + 0x0a0b090a, + 0x0d0e0c0d, + 0x10110f10, + 0x13141213, + 0x16171516, + 0x191a1819, + 0x1c1d1b1c, + 0x1f201e1f, + 0x22232122, + 0x25262425, + 0x28292728, + 0x2b2c2a2b, + 0x2e2f2d2e); + + // Reorder bytes + // [b3 b2 b1 b0 c5 c4 c3 c2|c1 c0 d5 d4 d3 d2 d1 d0| + // a5 a4 a3 a2 a1 a0 b5 b4|b3 b2 b1 b0 c3 c2 c1 c0] + const __m512i in = _mm512_permutexvar_epi8(shuffle_input, input); + + // After multishift a single 32-bit lane has following layout + // [c1 c0 d5 d4 d3 d2 d1 d0|b1 b0 c5 c4 c3 c2 c1 c0| + // a1 a0 b5 b4 b3 b2 b1 b0|d1 d0 a5 a4 a3 a2 a1 a0] + // (a = [10:17], b = [4:11], c = [22:27], d = [16:21]) + + // 48, 54, 36, 42, 16, 22, 4, 10 + const __m512i shifts = _mm512_set1_epi64(0x3036242a1016040alu); + __m512i shuffled_in = _mm512_multishift_epi64_epi8(shifts, in); + + // Translate immediately after reshuffled. + const __m512i lookup = _mm512_loadu_si512(base64_table_enc_6bit); + + // Translation 6-bit values to ASCII. + return _mm512_permutexvar_epi8(shuffled_in, lookup); +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/32/dec_loop.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/32/dec_loop.c new file mode 100644 index 0000000..aa290d7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/32/dec_loop.c @@ -0,0 +1,86 @@ +static BASE64_FORCE_INLINE int +dec_loop_generic_32_inner (const uint8_t **s, uint8_t **o, size_t *rounds) +{ + const uint32_t str + = base64_table_dec_32bit_d0[(*s)[0]] + | base64_table_dec_32bit_d1[(*s)[1]] + | base64_table_dec_32bit_d2[(*s)[2]] + | base64_table_dec_32bit_d3[(*s)[3]]; + +#if BASE64_LITTLE_ENDIAN + + // LUTs for little-endian set MSB in case of invalid character: + if (str & UINT32_C(0x80000000)) { + return 0; + } +#else + // LUTs for big-endian set LSB in case of invalid character: + if (str & UINT32_C(1)) { + return 0; + } +#endif + // Store the output: + memcpy(*o, &str, sizeof (str)); + + *s += 4; + *o += 3; + *rounds -= 1; + + return 1; +} + +static inline void +dec_loop_generic_32 (const uint8_t **s, size_t *slen, uint8_t **o, size_t *olen) +{ + if (*slen < 8) { + return; + } + + // Process blocks of 4 bytes per round. Because one extra zero byte is + // written after the output, ensure that there will be at least 4 bytes + // of input data left to cover the gap. (Two data bytes and up to two + // end-of-string markers.) + size_t rounds = (*slen - 4) / 4; + + *slen -= rounds * 4; // 4 bytes consumed per round + *olen += rounds * 3; // 3 bytes produced per round + + do { + if (rounds >= 8) { + if (dec_loop_generic_32_inner(s, o, &rounds) && + dec_loop_generic_32_inner(s, o, &rounds) && + dec_loop_generic_32_inner(s, o, &rounds) && + dec_loop_generic_32_inner(s, o, &rounds) && + dec_loop_generic_32_inner(s, o, &rounds) && + dec_loop_generic_32_inner(s, o, &rounds) && + dec_loop_generic_32_inner(s, o, &rounds) && + dec_loop_generic_32_inner(s, o, &rounds)) { + continue; + } + break; + } + if (rounds >= 4) { + if (dec_loop_generic_32_inner(s, o, &rounds) && + dec_loop_generic_32_inner(s, o, &rounds) && + dec_loop_generic_32_inner(s, o, &rounds) && + dec_loop_generic_32_inner(s, o, &rounds)) { + continue; + } + break; + } + if (rounds >= 2) { + if (dec_loop_generic_32_inner(s, o, &rounds) && + dec_loop_generic_32_inner(s, o, &rounds)) { + continue; + } + break; + } + dec_loop_generic_32_inner(s, o, &rounds); + break; + + } while (rounds > 0); + + // Adjust for any rounds that were skipped: + *slen += rounds * 4; + *olen -= rounds * 3; +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/32/enc_loop.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/32/enc_loop.c new file mode 100644 index 0000000..b5e6eef --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/32/enc_loop.c @@ -0,0 +1,73 @@ +static BASE64_FORCE_INLINE void +enc_loop_generic_32_inner (const uint8_t **s, uint8_t **o) +{ + uint32_t src; + + // Load input: + memcpy(&src, *s, sizeof (src)); + + // Reorder to 32-bit big-endian, if not already in that format. The + // workset must be in big-endian, otherwise the shifted bits do not + // carry over properly among adjacent bytes: + src = BASE64_HTOBE32(src); + + // Two indices for the 12-bit lookup table: + const size_t index0 = (src >> 20) & 0xFFFU; + const size_t index1 = (src >> 8) & 0xFFFU; + + // Table lookup and store: + memcpy(*o + 0, base64_table_enc_12bit + index0, 2); + memcpy(*o + 2, base64_table_enc_12bit + index1, 2); + + *s += 3; + *o += 4; +} + +static inline void +enc_loop_generic_32 (const uint8_t **s, size_t *slen, uint8_t **o, size_t *olen) +{ + if (*slen < 4) { + return; + } + + // Process blocks of 3 bytes at a time. Because blocks are loaded 4 + // bytes at a time, ensure that there will be at least one remaining + // byte after the last round, so that the final read will not pass + // beyond the bounds of the input buffer: + size_t rounds = (*slen - 1) / 3; + + *slen -= rounds * 3; // 3 bytes consumed per round + *olen += rounds * 4; // 4 bytes produced per round + + do { + if (rounds >= 8) { + enc_loop_generic_32_inner(s, o); + enc_loop_generic_32_inner(s, o); + enc_loop_generic_32_inner(s, o); + enc_loop_generic_32_inner(s, o); + enc_loop_generic_32_inner(s, o); + enc_loop_generic_32_inner(s, o); + enc_loop_generic_32_inner(s, o); + enc_loop_generic_32_inner(s, o); + rounds -= 8; + continue; + } + if (rounds >= 4) { + enc_loop_generic_32_inner(s, o); + enc_loop_generic_32_inner(s, o); + enc_loop_generic_32_inner(s, o); + enc_loop_generic_32_inner(s, o); + rounds -= 4; + continue; + } + if (rounds >= 2) { + enc_loop_generic_32_inner(s, o); + enc_loop_generic_32_inner(s, o); + rounds -= 2; + continue; + } + enc_loop_generic_32_inner(s, o); + break; + + } while (rounds > 0); +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/64/enc_loop.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/64/enc_loop.c new file mode 100644 index 0000000..e6a29cd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/64/enc_loop.c @@ -0,0 +1,77 @@ +static BASE64_FORCE_INLINE void +enc_loop_generic_64_inner (const uint8_t **s, uint8_t **o) +{ + uint64_t src; + + // Load input: + memcpy(&src, *s, sizeof (src)); + + // Reorder to 64-bit big-endian, if not already in that format. The + // workset must be in big-endian, otherwise the shifted bits do not + // carry over properly among adjacent bytes: + src = BASE64_HTOBE64(src); + + // Four indices for the 12-bit lookup table: + const size_t index0 = (src >> 52) & 0xFFFU; + const size_t index1 = (src >> 40) & 0xFFFU; + const size_t index2 = (src >> 28) & 0xFFFU; + const size_t index3 = (src >> 16) & 0xFFFU; + + // Table lookup and store: + memcpy(*o + 0, base64_table_enc_12bit + index0, 2); + memcpy(*o + 2, base64_table_enc_12bit + index1, 2); + memcpy(*o + 4, base64_table_enc_12bit + index2, 2); + memcpy(*o + 6, base64_table_enc_12bit + index3, 2); + + *s += 6; + *o += 8; +} + +static inline void +enc_loop_generic_64 (const uint8_t **s, size_t *slen, uint8_t **o, size_t *olen) +{ + if (*slen < 8) { + return; + } + + // Process blocks of 6 bytes at a time. Because blocks are loaded 8 + // bytes at a time, ensure that there will be at least 2 remaining + // bytes after the last round, so that the final read will not pass + // beyond the bounds of the input buffer: + size_t rounds = (*slen - 2) / 6; + + *slen -= rounds * 6; // 6 bytes consumed per round + *olen += rounds * 8; // 8 bytes produced per round + + do { + if (rounds >= 8) { + enc_loop_generic_64_inner(s, o); + enc_loop_generic_64_inner(s, o); + enc_loop_generic_64_inner(s, o); + enc_loop_generic_64_inner(s, o); + enc_loop_generic_64_inner(s, o); + enc_loop_generic_64_inner(s, o); + enc_loop_generic_64_inner(s, o); + enc_loop_generic_64_inner(s, o); + rounds -= 8; + continue; + } + if (rounds >= 4) { + enc_loop_generic_64_inner(s, o); + enc_loop_generic_64_inner(s, o); + enc_loop_generic_64_inner(s, o); + enc_loop_generic_64_inner(s, o); + rounds -= 4; + continue; + } + if (rounds >= 2) { + enc_loop_generic_64_inner(s, o); + enc_loop_generic_64_inner(s, o); + rounds -= 2; + continue; + } + enc_loop_generic_64_inner(s, o); + break; + + } while (rounds > 0); +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/codec.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/codec.c new file mode 100644 index 0000000..1a29be7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/codec.c @@ -0,0 +1,41 @@ +#include +#include +#include + +#include "libbase64.h" +#include "../../tables/tables.h" +#include "../../codecs.h" +#include "config.h" +#include "../../env.h" + +#if BASE64_WORDSIZE == 32 +# include "32/enc_loop.c" +#elif BASE64_WORDSIZE == 64 +# include "64/enc_loop.c" +#endif + +#if BASE64_WORDSIZE >= 32 +# include "32/dec_loop.c" +#endif + +void +base64_stream_encode_plain BASE64_ENC_PARAMS +{ + #include "enc_head.c" +#if BASE64_WORDSIZE == 32 + enc_loop_generic_32(&s, &slen, &o, &olen); +#elif BASE64_WORDSIZE == 64 + enc_loop_generic_64(&s, &slen, &o, &olen); +#endif + #include "enc_tail.c" +} + +int +base64_stream_decode_plain BASE64_DEC_PARAMS +{ + #include "dec_head.c" +#if BASE64_WORDSIZE >= 32 + dec_loop_generic_32(&s, &slen, &o, &olen); +#endif + #include "dec_tail.c" +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/dec_head.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/dec_head.c new file mode 100644 index 0000000..179a31b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/dec_head.c @@ -0,0 +1,37 @@ +int ret = 0; +const uint8_t *s = (const uint8_t *) src; +uint8_t *o = (uint8_t *) out; +uint8_t q; + +// Use local temporaries to avoid cache thrashing: +size_t olen = 0; +size_t slen = srclen; +struct base64_state st; +st.eof = state->eof; +st.bytes = state->bytes; +st.carry = state->carry; + +// If we previously saw an EOF or an invalid character, bail out: +if (st.eof) { + *outlen = 0; + ret = 0; + // If there was a trailing '=' to check, check it: + if (slen && (st.eof == BASE64_AEOF)) { + state->bytes = 0; + state->eof = BASE64_EOF; + ret = ((base64_table_dec_8bit[*s++] == 254) && (slen == 1)) ? 1 : 0; + } + return ret; +} + +// Turn four 6-bit numbers into three bytes: +// out[0] = 11111122 +// out[1] = 22223333 +// out[2] = 33444444 + +// Duff's device again: +switch (st.bytes) +{ + for (;;) + { + case 0: diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/dec_tail.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/dec_tail.c new file mode 100644 index 0000000..e64f724 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/dec_tail.c @@ -0,0 +1,91 @@ + if (slen-- == 0) { + ret = 1; + break; + } + if ((q = base64_table_dec_8bit[*s++]) >= 254) { + st.eof = BASE64_EOF; + // Treat character '=' as invalid for byte 0: + break; + } + st.carry = q << 2; + st.bytes++; + + // Deliberate fallthrough: + BASE64_FALLTHROUGH + + case 1: if (slen-- == 0) { + ret = 1; + break; + } + if ((q = base64_table_dec_8bit[*s++]) >= 254) { + st.eof = BASE64_EOF; + // Treat character '=' as invalid for byte 1: + break; + } + *o++ = st.carry | (q >> 4); + st.carry = q << 4; + st.bytes++; + olen++; + + // Deliberate fallthrough: + BASE64_FALLTHROUGH + + case 2: if (slen-- == 0) { + ret = 1; + break; + } + if ((q = base64_table_dec_8bit[*s++]) >= 254) { + st.bytes++; + // When q == 254, the input char is '='. + // Check if next byte is also '=': + if (q == 254) { + if (slen-- != 0) { + st.bytes = 0; + // EOF: + st.eof = BASE64_EOF; + q = base64_table_dec_8bit[*s++]; + ret = ((q == 254) && (slen == 0)) ? 1 : 0; + break; + } + else { + // Almost EOF + st.eof = BASE64_AEOF; + ret = 1; + break; + } + } + // If we get here, there was an error: + break; + } + *o++ = st.carry | (q >> 2); + st.carry = q << 6; + st.bytes++; + olen++; + + // Deliberate fallthrough: + BASE64_FALLTHROUGH + + case 3: if (slen-- == 0) { + ret = 1; + break; + } + if ((q = base64_table_dec_8bit[*s++]) >= 254) { + st.bytes = 0; + st.eof = BASE64_EOF; + // When q == 254, the input char is '='. Return 1 and EOF. + // When q == 255, the input char is invalid. Return 0 and EOF. + ret = ((q == 254) && (slen == 0)) ? 1 : 0; + break; + } + *o++ = st.carry | q; + st.carry = 0; + st.bytes = 0; + olen++; + } +} + +state->eof = st.eof; +state->bytes = st.bytes; +state->carry = st.carry; +*outlen = olen; +return ret; diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/enc_head.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/enc_head.c new file mode 100644 index 0000000..38d60b2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/enc_head.c @@ -0,0 +1,24 @@ +// Assume that *out is large enough to contain the output. +// Theoretically it should be 4/3 the length of src. +const uint8_t *s = (const uint8_t *) src; +uint8_t *o = (uint8_t *) out; + +// Use local temporaries to avoid cache thrashing: +size_t olen = 0; +size_t slen = srclen; +struct base64_state st; +st.bytes = state->bytes; +st.carry = state->carry; + +// Turn three bytes into four 6-bit numbers: +// in[0] = 00111111 +// in[1] = 00112222 +// in[2] = 00222233 +// in[3] = 00333333 + +// Duff's device, a for() loop inside a switch() statement. Legal! +switch (st.bytes) +{ + for (;;) + { + case 0: diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/enc_tail.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/enc_tail.c new file mode 100644 index 0000000..cbd5733 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/generic/enc_tail.c @@ -0,0 +1,34 @@ + if (slen-- == 0) { + break; + } + *o++ = base64_table_enc_6bit[*s >> 2]; + st.carry = (*s++ << 4) & 0x30; + st.bytes++; + olen += 1; + + // Deliberate fallthrough: + BASE64_FALLTHROUGH + + case 1: if (slen-- == 0) { + break; + } + *o++ = base64_table_enc_6bit[st.carry | (*s >> 4)]; + st.carry = (*s++ << 2) & 0x3C; + st.bytes++; + olen += 1; + + // Deliberate fallthrough: + BASE64_FALLTHROUGH + + case 2: if (slen-- == 0) { + break; + } + *o++ = base64_table_enc_6bit[st.carry | (*s >> 6)]; + *o++ = base64_table_enc_6bit[*s++ & 0x3F]; + st.bytes = 0; + olen += 2; + } +} +state->bytes = st.bytes; +state->carry = st.carry; +*outlen = olen; diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon32/codec.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon32/codec.c new file mode 100644 index 0000000..4a32592 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon32/codec.c @@ -0,0 +1,79 @@ +#include +#include +#include + +#include "libbase64.h" +#include "../../tables/tables.h" +#include "../../codecs.h" +#include "config.h" +#include "../../env.h" + +#ifdef __arm__ +# if (defined(__ARM_NEON__) || defined(__ARM_NEON)) && HAVE_NEON32 +# define BASE64_USE_NEON32 +# endif +#endif + +#ifdef BASE64_USE_NEON32 +#include + +// Only enable inline assembly on supported compilers. +#if defined(__GNUC__) || defined(__clang__) +#define BASE64_NEON32_USE_ASM +#endif + +static BASE64_FORCE_INLINE uint8x16_t +vqtbl1q_u8 (const uint8x16_t lut, const uint8x16_t indices) +{ + // NEON32 only supports 64-bit wide lookups in 128-bit tables. Emulate + // the NEON64 `vqtbl1q_u8` intrinsic to do 128-bit wide lookups. + uint8x8x2_t lut2; + uint8x8x2_t result; + + lut2.val[0] = vget_low_u8(lut); + lut2.val[1] = vget_high_u8(lut); + + result.val[0] = vtbl2_u8(lut2, vget_low_u8(indices)); + result.val[1] = vtbl2_u8(lut2, vget_high_u8(indices)); + + return vcombine_u8(result.val[0], result.val[1]); +} + +#include "../generic/32/dec_loop.c" +#include "../generic/32/enc_loop.c" +#include "dec_loop.c" +#include "enc_reshuffle.c" +#include "enc_translate.c" +#include "enc_loop.c" + +#endif // BASE64_USE_NEON32 + +// Stride size is so large on these NEON 32-bit functions +// (48 bytes encode, 32 bytes decode) that we inline the +// uint32 codec to stay performant on smaller inputs. + +void +base64_stream_encode_neon32 BASE64_ENC_PARAMS +{ +#ifdef BASE64_USE_NEON32 + #include "../generic/enc_head.c" + enc_loop_neon32(&s, &slen, &o, &olen); + enc_loop_generic_32(&s, &slen, &o, &olen); + #include "../generic/enc_tail.c" +#else + base64_enc_stub(state, src, srclen, out, outlen); +#endif +} + +int +base64_stream_decode_neon32 BASE64_DEC_PARAMS +{ +#ifdef BASE64_USE_NEON32 + #include "../generic/dec_head.c" + dec_loop_neon32(&s, &slen, &o, &olen); + dec_loop_generic_32(&s, &slen, &o, &olen); + #include "../generic/dec_tail.c" +#else + return base64_dec_stub(state, src, srclen, out, outlen); +#endif +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon32/dec_loop.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon32/dec_loop.c new file mode 100644 index 0000000..e4caed7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon32/dec_loop.c @@ -0,0 +1,106 @@ +static BASE64_FORCE_INLINE int +is_nonzero (const uint8x16_t v) +{ + uint64_t u64; + const uint64x2_t v64 = vreinterpretq_u64_u8(v); + const uint32x2_t v32 = vqmovn_u64(v64); + + vst1_u64(&u64, vreinterpret_u64_u32(v32)); + return u64 != 0; +} + +static BASE64_FORCE_INLINE uint8x16_t +delta_lookup (const uint8x16_t v) +{ + const uint8x8_t lut = { + 0, 16, 19, 4, (uint8_t) -65, (uint8_t) -65, (uint8_t) -71, (uint8_t) -71, + }; + + return vcombine_u8( + vtbl1_u8(lut, vget_low_u8(v)), + vtbl1_u8(lut, vget_high_u8(v))); +} + +static BASE64_FORCE_INLINE uint8x16_t +dec_loop_neon32_lane (uint8x16_t *lane) +{ + // See the SSSE3 decoder for an explanation of the algorithm. + const uint8x16_t lut_lo = { + 0x15, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, + 0x11, 0x11, 0x13, 0x1A, 0x1B, 0x1B, 0x1B, 0x1A + }; + + const uint8x16_t lut_hi = { + 0x10, 0x10, 0x01, 0x02, 0x04, 0x08, 0x04, 0x08, + 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10 + }; + + const uint8x16_t mask_0F = vdupq_n_u8(0x0F); + const uint8x16_t mask_2F = vdupq_n_u8(0x2F); + + const uint8x16_t hi_nibbles = vshrq_n_u8(*lane, 4); + const uint8x16_t lo_nibbles = vandq_u8(*lane, mask_0F); + const uint8x16_t eq_2F = vceqq_u8(*lane, mask_2F); + + const uint8x16_t hi = vqtbl1q_u8(lut_hi, hi_nibbles); + const uint8x16_t lo = vqtbl1q_u8(lut_lo, lo_nibbles); + + // Now simply add the delta values to the input: + *lane = vaddq_u8(*lane, delta_lookup(vaddq_u8(eq_2F, hi_nibbles))); + + // Return the validity mask: + return vandq_u8(lo, hi); +} + +static inline void +dec_loop_neon32 (const uint8_t **s, size_t *slen, uint8_t **o, size_t *olen) +{ + if (*slen < 64) { + return; + } + + // Process blocks of 64 bytes per round. Unlike the SSE codecs, no + // extra trailing zero bytes are written, so it is not necessary to + // reserve extra input bytes: + size_t rounds = *slen / 64; + + *slen -= rounds * 64; // 64 bytes consumed per round + *olen += rounds * 48; // 48 bytes produced per round + + do { + uint8x16x3_t dec; + + // Load 64 bytes and deinterleave: + uint8x16x4_t str = vld4q_u8(*s); + + // Decode each lane, collect a mask of invalid inputs: + const uint8x16_t classified + = dec_loop_neon32_lane(&str.val[0]) + | dec_loop_neon32_lane(&str.val[1]) + | dec_loop_neon32_lane(&str.val[2]) + | dec_loop_neon32_lane(&str.val[3]); + + // Check for invalid input: if any of the delta values are + // zero, fall back on bytewise code to do error checking and + // reporting: + if (is_nonzero(classified)) { + break; + } + + // Compress four bytes into three: + dec.val[0] = vorrq_u8(vshlq_n_u8(str.val[0], 2), vshrq_n_u8(str.val[1], 4)); + dec.val[1] = vorrq_u8(vshlq_n_u8(str.val[1], 4), vshrq_n_u8(str.val[2], 2)); + dec.val[2] = vorrq_u8(vshlq_n_u8(str.val[2], 6), str.val[3]); + + // Interleave and store decoded result: + vst3q_u8(*o, dec); + + *s += 64; + *o += 48; + + } while (--rounds > 0); + + // Adjust for any rounds that were skipped: + *slen += rounds * 64; + *olen -= rounds * 48; +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon32/enc_loop.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon32/enc_loop.c new file mode 100644 index 0000000..2adff48 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon32/enc_loop.c @@ -0,0 +1,170 @@ +#ifdef BASE64_NEON32_USE_ASM +static BASE64_FORCE_INLINE void +enc_loop_neon32_inner_asm (const uint8_t **s, uint8_t **o) +{ + // This function duplicates the functionality of enc_loop_neon32_inner, + // but entirely with inline assembly. This gives a significant speedup + // over using NEON intrinsics, which do not always generate very good + // code. The logic of the assembly is directly lifted from the + // intrinsics version, so it can be used as a guide to this code. + + // Temporary registers, used as scratch space. + uint8x16_t tmp0, tmp1, tmp2, tmp3; + uint8x16_t mask0, mask1, mask2, mask3; + + // A lookup table containing the absolute offsets for all ranges. + const uint8x16_t lut = { + 65U, 71U, 252U, 252U, + 252U, 252U, 252U, 252U, + 252U, 252U, 252U, 252U, + 237U, 240U, 0U, 0U + }; + + // Numeric constants. + const uint8x16_t n51 = vdupq_n_u8(51); + const uint8x16_t n25 = vdupq_n_u8(25); + const uint8x16_t n63 = vdupq_n_u8(63); + + __asm__ ( + + // Load 48 bytes and deinterleave. The bytes are loaded to + // hard-coded registers q12, q13 and q14, to ensure that they + // are contiguous. Increment the source pointer. + "vld3.8 {d24, d26, d28}, [%[src]]! \n\t" + "vld3.8 {d25, d27, d29}, [%[src]]! \n\t" + + // Reshuffle the bytes using temporaries. + "vshr.u8 %q[t0], q12, #2 \n\t" + "vshr.u8 %q[t1], q13, #4 \n\t" + "vshr.u8 %q[t2], q14, #6 \n\t" + "vsli.8 %q[t1], q12, #4 \n\t" + "vsli.8 %q[t2], q13, #2 \n\t" + "vand.u8 %q[t1], %q[t1], %q[n63] \n\t" + "vand.u8 %q[t2], %q[t2], %q[n63] \n\t" + "vand.u8 %q[t3], q14, %q[n63] \n\t" + + // t0..t3 are the reshuffled inputs. Create LUT indices. + "vqsub.u8 q12, %q[t0], %q[n51] \n\t" + "vqsub.u8 q13, %q[t1], %q[n51] \n\t" + "vqsub.u8 q14, %q[t2], %q[n51] \n\t" + "vqsub.u8 q15, %q[t3], %q[n51] \n\t" + + // Create the mask for range #0. + "vcgt.u8 %q[m0], %q[t0], %q[n25] \n\t" + "vcgt.u8 %q[m1], %q[t1], %q[n25] \n\t" + "vcgt.u8 %q[m2], %q[t2], %q[n25] \n\t" + "vcgt.u8 %q[m3], %q[t3], %q[n25] \n\t" + + // Subtract -1 to correct the LUT indices. + "vsub.u8 q12, %q[m0] \n\t" + "vsub.u8 q13, %q[m1] \n\t" + "vsub.u8 q14, %q[m2] \n\t" + "vsub.u8 q15, %q[m3] \n\t" + + // Lookup the delta values. + "vtbl.u8 d24, {%q[lut]}, d24 \n\t" + "vtbl.u8 d25, {%q[lut]}, d25 \n\t" + "vtbl.u8 d26, {%q[lut]}, d26 \n\t" + "vtbl.u8 d27, {%q[lut]}, d27 \n\t" + "vtbl.u8 d28, {%q[lut]}, d28 \n\t" + "vtbl.u8 d29, {%q[lut]}, d29 \n\t" + "vtbl.u8 d30, {%q[lut]}, d30 \n\t" + "vtbl.u8 d31, {%q[lut]}, d31 \n\t" + + // Add the delta values. + "vadd.u8 q12, %q[t0] \n\t" + "vadd.u8 q13, %q[t1] \n\t" + "vadd.u8 q14, %q[t2] \n\t" + "vadd.u8 q15, %q[t3] \n\t" + + // Store 64 bytes and interleave. Increment the dest pointer. + "vst4.8 {d24, d26, d28, d30}, [%[dst]]! \n\t" + "vst4.8 {d25, d27, d29, d31}, [%[dst]]! \n\t" + + // Outputs (modified). + : [src] "+r" (*s), + [dst] "+r" (*o), + [t0] "=&w" (tmp0), + [t1] "=&w" (tmp1), + [t2] "=&w" (tmp2), + [t3] "=&w" (tmp3), + [m0] "=&w" (mask0), + [m1] "=&w" (mask1), + [m2] "=&w" (mask2), + [m3] "=&w" (mask3) + + // Inputs (not modified). + : [lut] "w" (lut), + [n25] "w" (n25), + [n51] "w" (n51), + [n63] "w" (n63) + + // Clobbers. + : "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31", + "cc", "memory" + ); +} +#endif + +static BASE64_FORCE_INLINE void +enc_loop_neon32_inner (const uint8_t **s, uint8_t **o) +{ +#ifdef BASE64_NEON32_USE_ASM + enc_loop_neon32_inner_asm(s, o); +#else + // Load 48 bytes and deinterleave: + uint8x16x3_t src = vld3q_u8(*s); + + // Reshuffle: + uint8x16x4_t out = enc_reshuffle(src); + + // Translate reshuffled bytes to the Base64 alphabet: + out = enc_translate(out); + + // Interleave and store output: + vst4q_u8(*o, out); + + *s += 48; + *o += 64; +#endif +} + +static inline void +enc_loop_neon32 (const uint8_t **s, size_t *slen, uint8_t **o, size_t *olen) +{ + size_t rounds = *slen / 48; + + *slen -= rounds * 48; // 48 bytes consumed per round + *olen += rounds * 64; // 64 bytes produced per round + + while (rounds > 0) { + if (rounds >= 8) { + enc_loop_neon32_inner(s, o); + enc_loop_neon32_inner(s, o); + enc_loop_neon32_inner(s, o); + enc_loop_neon32_inner(s, o); + enc_loop_neon32_inner(s, o); + enc_loop_neon32_inner(s, o); + enc_loop_neon32_inner(s, o); + enc_loop_neon32_inner(s, o); + rounds -= 8; + continue; + } + if (rounds >= 4) { + enc_loop_neon32_inner(s, o); + enc_loop_neon32_inner(s, o); + enc_loop_neon32_inner(s, o); + enc_loop_neon32_inner(s, o); + rounds -= 4; + continue; + } + if (rounds >= 2) { + enc_loop_neon32_inner(s, o); + enc_loop_neon32_inner(s, o); + rounds -= 2; + continue; + } + enc_loop_neon32_inner(s, o); + break; + } +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon32/enc_reshuffle.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon32/enc_reshuffle.c new file mode 100644 index 0000000..fa94d27 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon32/enc_reshuffle.c @@ -0,0 +1,31 @@ +static BASE64_FORCE_INLINE uint8x16x4_t +enc_reshuffle (uint8x16x3_t in) +{ + uint8x16x4_t out; + + // Input: + // in[0] = a7 a6 a5 a4 a3 a2 a1 a0 + // in[1] = b7 b6 b5 b4 b3 b2 b1 b0 + // in[2] = c7 c6 c5 c4 c3 c2 c1 c0 + + // Output: + // out[0] = 00 00 a7 a6 a5 a4 a3 a2 + // out[1] = 00 00 a1 a0 b7 b6 b5 b4 + // out[2] = 00 00 b3 b2 b1 b0 c7 c6 + // out[3] = 00 00 c5 c4 c3 c2 c1 c0 + + // Move the input bits to where they need to be in the outputs. Except + // for the first output, the high two bits are not cleared. + out.val[0] = vshrq_n_u8(in.val[0], 2); + out.val[1] = vshrq_n_u8(in.val[1], 4); + out.val[2] = vshrq_n_u8(in.val[2], 6); + out.val[1] = vsliq_n_u8(out.val[1], in.val[0], 4); + out.val[2] = vsliq_n_u8(out.val[2], in.val[1], 2); + + // Clear the high two bits in the second, third and fourth output. + out.val[1] = vandq_u8(out.val[1], vdupq_n_u8(0x3F)); + out.val[2] = vandq_u8(out.val[2], vdupq_n_u8(0x3F)); + out.val[3] = vandq_u8(in.val[2], vdupq_n_u8(0x3F)); + + return out; +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon32/enc_translate.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon32/enc_translate.c new file mode 100644 index 0000000..ff3d88d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon32/enc_translate.c @@ -0,0 +1,57 @@ +static BASE64_FORCE_INLINE uint8x16x4_t +enc_translate (const uint8x16x4_t in) +{ + // A lookup table containing the absolute offsets for all ranges: + const uint8x16_t lut = { + 65U, 71U, 252U, 252U, + 252U, 252U, 252U, 252U, + 252U, 252U, 252U, 252U, + 237U, 240U, 0U, 0U + }; + + const uint8x16_t offset = vdupq_n_u8(51); + + uint8x16x4_t indices, mask, delta, out; + + // Translate values 0..63 to the Base64 alphabet. There are five sets: + // # From To Abs Index Characters + // 0 [0..25] [65..90] +65 0 ABCDEFGHIJKLMNOPQRSTUVWXYZ + // 1 [26..51] [97..122] +71 1 abcdefghijklmnopqrstuvwxyz + // 2 [52..61] [48..57] -4 [2..11] 0123456789 + // 3 [62] [43] -19 12 + + // 4 [63] [47] -16 13 / + + // Create LUT indices from input: + // the index for range #0 is right, others are 1 less than expected: + indices.val[0] = vqsubq_u8(in.val[0], offset); + indices.val[1] = vqsubq_u8(in.val[1], offset); + indices.val[2] = vqsubq_u8(in.val[2], offset); + indices.val[3] = vqsubq_u8(in.val[3], offset); + + // mask is 0xFF (-1) for range #[1..4] and 0x00 for range #0: + mask.val[0] = vcgtq_u8(in.val[0], vdupq_n_u8(25)); + mask.val[1] = vcgtq_u8(in.val[1], vdupq_n_u8(25)); + mask.val[2] = vcgtq_u8(in.val[2], vdupq_n_u8(25)); + mask.val[3] = vcgtq_u8(in.val[3], vdupq_n_u8(25)); + + // Subtract -1, so add 1 to indices for range #[1..4], All indices are + // now correct: + indices.val[0] = vsubq_u8(indices.val[0], mask.val[0]); + indices.val[1] = vsubq_u8(indices.val[1], mask.val[1]); + indices.val[2] = vsubq_u8(indices.val[2], mask.val[2]); + indices.val[3] = vsubq_u8(indices.val[3], mask.val[3]); + + // Lookup delta values: + delta.val[0] = vqtbl1q_u8(lut, indices.val[0]); + delta.val[1] = vqtbl1q_u8(lut, indices.val[1]); + delta.val[2] = vqtbl1q_u8(lut, indices.val[2]); + delta.val[3] = vqtbl1q_u8(lut, indices.val[3]); + + // Add delta values: + out.val[0] = vaddq_u8(in.val[0], delta.val[0]); + out.val[1] = vaddq_u8(in.val[1], delta.val[1]); + out.val[2] = vaddq_u8(in.val[2], delta.val[2]); + out.val[3] = vaddq_u8(in.val[3], delta.val[3]); + + return out; +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon64/codec.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon64/codec.c new file mode 100644 index 0000000..70dc463 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon64/codec.c @@ -0,0 +1,93 @@ +#include +#include +#include + +#include "libbase64.h" +#include "../../tables/tables.h" +#include "../../codecs.h" +#include "config.h" +#include "../../env.h" + +#if HAVE_NEON64 +#include + +// Only enable inline assembly on supported compilers. +#if defined(__GNUC__) || defined(__clang__) +#define BASE64_NEON64_USE_ASM +#endif + +static BASE64_FORCE_INLINE uint8x16x4_t +load_64byte_table (const uint8_t *p) +{ +#ifdef BASE64_NEON64_USE_ASM + + // Force the table to be loaded into contiguous registers. GCC will not + // normally allocate contiguous registers for a `uint8x16x4_t'. These + // registers are chosen to not conflict with the ones in the enc loop. + register uint8x16_t t0 __asm__ ("v8"); + register uint8x16_t t1 __asm__ ("v9"); + register uint8x16_t t2 __asm__ ("v10"); + register uint8x16_t t3 __asm__ ("v11"); + + __asm__ ( + "ld1 {%[t0].16b, %[t1].16b, %[t2].16b, %[t3].16b}, [%[src]], #64 \n\t" + : [src] "+r" (p), + [t0] "=w" (t0), + [t1] "=w" (t1), + [t2] "=w" (t2), + [t3] "=w" (t3) + ); + + return (uint8x16x4_t) { + .val[0] = t0, + .val[1] = t1, + .val[2] = t2, + .val[3] = t3, + }; +#else + return vld1q_u8_x4(p); +#endif +} + +#include "../generic/32/dec_loop.c" +#include "../generic/64/enc_loop.c" +#include "dec_loop.c" + +#ifdef BASE64_NEON64_USE_ASM +# include "enc_loop_asm.c" +#else +# include "enc_reshuffle.c" +# include "enc_loop.c" +#endif + +#endif // HAVE_NEON64 + +// Stride size is so large on these NEON 64-bit functions +// (48 bytes encode, 64 bytes decode) that we inline the +// uint64 codec to stay performant on smaller inputs. + +void +base64_stream_encode_neon64 BASE64_ENC_PARAMS +{ +#if HAVE_NEON64 + #include "../generic/enc_head.c" + enc_loop_neon64(&s, &slen, &o, &olen); + enc_loop_generic_64(&s, &slen, &o, &olen); + #include "../generic/enc_tail.c" +#else + base64_enc_stub(state, src, srclen, out, outlen); +#endif +} + +int +base64_stream_decode_neon64 BASE64_DEC_PARAMS +{ +#if HAVE_NEON64 + #include "../generic/dec_head.c" + dec_loop_neon64(&s, &slen, &o, &olen); + dec_loop_generic_32(&s, &slen, &o, &olen); + #include "../generic/dec_tail.c" +#else + return base64_dec_stub(state, src, srclen, out, outlen); +#endif +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon64/dec_loop.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon64/dec_loop.c new file mode 100644 index 0000000..428e065 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon64/dec_loop.c @@ -0,0 +1,129 @@ +// The input consists of five valid character sets in the Base64 alphabet, +// which we need to map back to the 6-bit values they represent. +// There are three ranges, two singles, and then there's the rest. +// +// # From To LUT Characters +// 1 [0..42] [255] #1 invalid input +// 2 [43] [62] #1 + +// 3 [44..46] [255] #1 invalid input +// 4 [47] [63] #1 / +// 5 [48..57] [52..61] #1 0..9 +// 6 [58..63] [255] #1 invalid input +// 7 [64] [255] #2 invalid input +// 8 [65..90] [0..25] #2 A..Z +// 9 [91..96] [255] #2 invalid input +// 10 [97..122] [26..51] #2 a..z +// 11 [123..126] [255] #2 invalid input +// (12) Everything else => invalid input + +// The first LUT will use the VTBL instruction (out of range indices are set to +// 0 in destination). +static const uint8_t dec_lut1[] = { + 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, + 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, + 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, 255U, 62U, 255U, 255U, 255U, 63U, + 52U, 53U, 54U, 55U, 56U, 57U, 58U, 59U, 60U, 61U, 255U, 255U, 255U, 255U, 255U, 255U, +}; + +// The second LUT will use the VTBX instruction (out of range indices will be +// unchanged in destination). Input [64..126] will be mapped to index [1..63] +// in this LUT. Index 0 means that value comes from LUT #1. +static const uint8_t dec_lut2[] = { + 0U, 255U, 0U, 1U, 2U, 3U, 4U, 5U, 6U, 7U, 8U, 9U, 10U, 11U, 12U, 13U, + 14U, 15U, 16U, 17U, 18U, 19U, 20U, 21U, 22U, 23U, 24U, 25U, 255U, 255U, 255U, 255U, + 255U, 255U, 26U, 27U, 28U, 29U, 30U, 31U, 32U, 33U, 34U, 35U, 36U, 37U, 38U, 39U, + 40U, 41U, 42U, 43U, 44U, 45U, 46U, 47U, 48U, 49U, 50U, 51U, 255U, 255U, 255U, 255U, +}; + +// All input values in range for the first look-up will be 0U in the second +// look-up result. All input values out of range for the first look-up will be +// 0U in the first look-up result. Thus, the two results can be ORed without +// conflicts. +// +// Invalid characters that are in the valid range for either look-up will be +// set to 255U in the combined result. Other invalid characters will just be +// passed through with the second look-up result (using the VTBX instruction). +// Since the second LUT is 64 bytes, those passed-through values are guaranteed +// to have a value greater than 63U. Therefore, valid characters will be mapped +// to the valid [0..63] range and all invalid characters will be mapped to +// values greater than 63. + +static inline void +dec_loop_neon64 (const uint8_t **s, size_t *slen, uint8_t **o, size_t *olen) +{ + if (*slen < 64) { + return; + } + + // Process blocks of 64 bytes per round. Unlike the SSE codecs, no + // extra trailing zero bytes are written, so it is not necessary to + // reserve extra input bytes: + size_t rounds = *slen / 64; + + *slen -= rounds * 64; // 64 bytes consumed per round + *olen += rounds * 48; // 48 bytes produced per round + + const uint8x16x4_t tbl_dec1 = load_64byte_table(dec_lut1); + const uint8x16x4_t tbl_dec2 = load_64byte_table(dec_lut2); + + do { + const uint8x16_t offset = vdupq_n_u8(63U); + uint8x16x4_t dec1, dec2; + uint8x16x3_t dec; + + // Load 64 bytes and deinterleave: + uint8x16x4_t str = vld4q_u8((uint8_t *) *s); + + // Get indices for second LUT: + dec2.val[0] = vqsubq_u8(str.val[0], offset); + dec2.val[1] = vqsubq_u8(str.val[1], offset); + dec2.val[2] = vqsubq_u8(str.val[2], offset); + dec2.val[3] = vqsubq_u8(str.val[3], offset); + + // Get values from first LUT: + dec1.val[0] = vqtbl4q_u8(tbl_dec1, str.val[0]); + dec1.val[1] = vqtbl4q_u8(tbl_dec1, str.val[1]); + dec1.val[2] = vqtbl4q_u8(tbl_dec1, str.val[2]); + dec1.val[3] = vqtbl4q_u8(tbl_dec1, str.val[3]); + + // Get values from second LUT: + dec2.val[0] = vqtbx4q_u8(dec2.val[0], tbl_dec2, dec2.val[0]); + dec2.val[1] = vqtbx4q_u8(dec2.val[1], tbl_dec2, dec2.val[1]); + dec2.val[2] = vqtbx4q_u8(dec2.val[2], tbl_dec2, dec2.val[2]); + dec2.val[3] = vqtbx4q_u8(dec2.val[3], tbl_dec2, dec2.val[3]); + + // Get final values: + str.val[0] = vorrq_u8(dec1.val[0], dec2.val[0]); + str.val[1] = vorrq_u8(dec1.val[1], dec2.val[1]); + str.val[2] = vorrq_u8(dec1.val[2], dec2.val[2]); + str.val[3] = vorrq_u8(dec1.val[3], dec2.val[3]); + + // Check for invalid input, any value larger than 63: + const uint8x16_t classified + = vorrq_u8( + vorrq_u8(vcgtq_u8(str.val[0], vdupq_n_u8(63)), vcgtq_u8(str.val[1], vdupq_n_u8(63))), + vorrq_u8(vcgtq_u8(str.val[2], vdupq_n_u8(63)), vcgtq_u8(str.val[3], vdupq_n_u8(63))) + ); + + // Check that all bits are zero: + if (vmaxvq_u8(classified) != 0U) { + break; + } + + // Compress four bytes into three: + dec.val[0] = vorrq_u8(vshlq_n_u8(str.val[0], 2), vshrq_n_u8(str.val[1], 4)); + dec.val[1] = vorrq_u8(vshlq_n_u8(str.val[1], 4), vshrq_n_u8(str.val[2], 2)); + dec.val[2] = vorrq_u8(vshlq_n_u8(str.val[2], 6), str.val[3]); + + // Interleave and store decoded result: + vst3q_u8((uint8_t *) *o, dec); + + *s += 64; + *o += 48; + + } while (--rounds > 0); + + // Adjust for any rounds that were skipped: + *slen += rounds * 64; + *olen -= rounds * 48; +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon64/enc_loop.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon64/enc_loop.c new file mode 100644 index 0000000..8bdd088 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon64/enc_loop.c @@ -0,0 +1,66 @@ +static BASE64_FORCE_INLINE void +enc_loop_neon64_inner (const uint8_t **s, uint8_t **o, const uint8x16x4_t tbl_enc) +{ + // Load 48 bytes and deinterleave: + uint8x16x3_t src = vld3q_u8(*s); + + // Divide bits of three input bytes over four output bytes: + uint8x16x4_t out = enc_reshuffle(src); + + // The bits have now been shifted to the right locations; + // translate their values 0..63 to the Base64 alphabet. + // Use a 64-byte table lookup: + out.val[0] = vqtbl4q_u8(tbl_enc, out.val[0]); + out.val[1] = vqtbl4q_u8(tbl_enc, out.val[1]); + out.val[2] = vqtbl4q_u8(tbl_enc, out.val[2]); + out.val[3] = vqtbl4q_u8(tbl_enc, out.val[3]); + + // Interleave and store output: + vst4q_u8(*o, out); + + *s += 48; + *o += 64; +} + +static inline void +enc_loop_neon64 (const uint8_t **s, size_t *slen, uint8_t **o, size_t *olen) +{ + size_t rounds = *slen / 48; + + *slen -= rounds * 48; // 48 bytes consumed per round + *olen += rounds * 64; // 64 bytes produced per round + + // Load the encoding table: + const uint8x16x4_t tbl_enc = load_64byte_table(base64_table_enc_6bit); + + while (rounds > 0) { + if (rounds >= 8) { + enc_loop_neon64_inner(s, o, tbl_enc); + enc_loop_neon64_inner(s, o, tbl_enc); + enc_loop_neon64_inner(s, o, tbl_enc); + enc_loop_neon64_inner(s, o, tbl_enc); + enc_loop_neon64_inner(s, o, tbl_enc); + enc_loop_neon64_inner(s, o, tbl_enc); + enc_loop_neon64_inner(s, o, tbl_enc); + enc_loop_neon64_inner(s, o, tbl_enc); + rounds -= 8; + continue; + } + if (rounds >= 4) { + enc_loop_neon64_inner(s, o, tbl_enc); + enc_loop_neon64_inner(s, o, tbl_enc); + enc_loop_neon64_inner(s, o, tbl_enc); + enc_loop_neon64_inner(s, o, tbl_enc); + rounds -= 4; + continue; + } + if (rounds >= 2) { + enc_loop_neon64_inner(s, o, tbl_enc); + enc_loop_neon64_inner(s, o, tbl_enc); + rounds -= 2; + continue; + } + enc_loop_neon64_inner(s, o, tbl_enc); + break; + } +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon64/enc_loop_asm.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon64/enc_loop_asm.c new file mode 100644 index 0000000..182e9cd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon64/enc_loop_asm.c @@ -0,0 +1,168 @@ +// Apologies in advance for combining the preprocessor with inline assembly, +// two notoriously gnarly parts of C, but it was necessary to avoid a lot of +// code repetition. The preprocessor is used to template large sections of +// inline assembly that differ only in the registers used. If the code was +// written out by hand, it would become very large and hard to audit. + +// Generate a block of inline assembly that loads three user-defined registers +// A, B, C from memory and deinterleaves them, post-incrementing the src +// pointer. The register set should be sequential. +#define LOAD(A, B, C) \ + "ld3 {"A".16b, "B".16b, "C".16b}, [%[src]], #48 \n\t" + +// Generate a block of inline assembly that takes three deinterleaved registers +// and shuffles the bytes. The output is in temporary registers t0..t3. +#define SHUF(A, B, C) \ + "ushr %[t0].16b, "A".16b, #2 \n\t" \ + "ushr %[t1].16b, "B".16b, #4 \n\t" \ + "ushr %[t2].16b, "C".16b, #6 \n\t" \ + "sli %[t1].16b, "A".16b, #4 \n\t" \ + "sli %[t2].16b, "B".16b, #2 \n\t" \ + "and %[t1].16b, %[t1].16b, %[n63].16b \n\t" \ + "and %[t2].16b, %[t2].16b, %[n63].16b \n\t" \ + "and %[t3].16b, "C".16b, %[n63].16b \n\t" + +// Generate a block of inline assembly that takes temporary registers t0..t3 +// and translates them to the base64 alphabet, using a table loaded into +// v8..v11. The output is in user-defined registers A..D. +#define TRAN(A, B, C, D) \ + "tbl "A".16b, {v8.16b-v11.16b}, %[t0].16b \n\t" \ + "tbl "B".16b, {v8.16b-v11.16b}, %[t1].16b \n\t" \ + "tbl "C".16b, {v8.16b-v11.16b}, %[t2].16b \n\t" \ + "tbl "D".16b, {v8.16b-v11.16b}, %[t3].16b \n\t" + +// Generate a block of inline assembly that interleaves four registers and +// stores them, post-incrementing the destination pointer. +#define STOR(A, B, C, D) \ + "st4 {"A".16b, "B".16b, "C".16b, "D".16b}, [%[dst]], #64 \n\t" + +// Generate a block of inline assembly that generates a single self-contained +// encoder round: fetch the data, process it, and store the result. +#define ROUND() \ + LOAD("v12", "v13", "v14") \ + SHUF("v12", "v13", "v14") \ + TRAN("v12", "v13", "v14", "v15") \ + STOR("v12", "v13", "v14", "v15") + +// Generate a block of assembly that generates a type A interleaved encoder +// round. It uses registers that were loaded by the previous type B round, and +// in turn loads registers for the next type B round. +#define ROUND_A() \ + SHUF("v2", "v3", "v4") \ + LOAD("v12", "v13", "v14") \ + TRAN("v2", "v3", "v4", "v5") \ + STOR("v2", "v3", "v4", "v5") + +// Type B interleaved encoder round. Same as type A, but register sets swapped. +#define ROUND_B() \ + SHUF("v12", "v13", "v14") \ + LOAD("v2", "v3", "v4") \ + TRAN("v12", "v13", "v14", "v15") \ + STOR("v12", "v13", "v14", "v15") + +// The first type A round needs to load its own registers. +#define ROUND_A_FIRST() \ + LOAD("v2", "v3", "v4") \ + ROUND_A() + +// The last type B round omits the load for the next step. +#define ROUND_B_LAST() \ + SHUF("v12", "v13", "v14") \ + TRAN("v12", "v13", "v14", "v15") \ + STOR("v12", "v13", "v14", "v15") + +// Suppress clang's warning that the literal string in the asm statement is +// overlong (longer than the ISO-mandated minimum size of 4095 bytes for C99 +// compilers). It may be true, but the goal here is not C99 portability. +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Woverlength-strings" + +static inline void +enc_loop_neon64 (const uint8_t **s, size_t *slen, uint8_t **o, size_t *olen) +{ + size_t rounds = *slen / 48; + + if (rounds == 0) { + return; + } + + *slen -= rounds * 48; // 48 bytes consumed per round. + *olen += rounds * 64; // 64 bytes produced per round. + + // Number of times to go through the 8x loop. + size_t loops = rounds / 8; + + // Number of rounds remaining after the 8x loop. + rounds %= 8; + + // Temporary registers, used as scratch space. + uint8x16_t tmp0, tmp1, tmp2, tmp3; + + __asm__ volatile ( + + // Load the encoding table into v8..v11. + " ld1 {v8.16b-v11.16b}, [%[tbl]] \n\t" + + // If there are eight rounds or more, enter an 8x unrolled loop + // of interleaved encoding rounds. The rounds interleave memory + // operations (load/store) with data operations to maximize + // pipeline throughput. + " cbz %[loops], 4f \n\t" + + // The SIMD instructions do not touch the flags. + "88: subs %[loops], %[loops], #1 \n\t" + " " ROUND_A_FIRST() + " " ROUND_B() + " " ROUND_A() + " " ROUND_B() + " " ROUND_A() + " " ROUND_B() + " " ROUND_A() + " " ROUND_B_LAST() + " b.ne 88b \n\t" + + // Enter a 4x unrolled loop for rounds of 4 or more. + "4: cmp %[rounds], #4 \n\t" + " b.lt 30f \n\t" + " " ROUND_A_FIRST() + " " ROUND_B() + " " ROUND_A() + " " ROUND_B_LAST() + " sub %[rounds], %[rounds], #4 \n\t" + + // Dispatch the remaining rounds 0..3. + "30: cbz %[rounds], 0f \n\t" + " cmp %[rounds], #2 \n\t" + " b.eq 2f \n\t" + " b.lt 1f \n\t" + + // Block of non-interlaced encoding rounds, which can each + // individually be jumped to. Rounds fall through to the next. + "3: " ROUND() + "2: " ROUND() + "1: " ROUND() + "0: \n\t" + + // Outputs (modified). + : [loops] "+r" (loops), + [src] "+r" (*s), + [dst] "+r" (*o), + [t0] "=&w" (tmp0), + [t1] "=&w" (tmp1), + [t2] "=&w" (tmp2), + [t3] "=&w" (tmp3) + + // Inputs (not modified). + : [rounds] "r" (rounds), + [tbl] "r" (base64_table_enc_6bit), + [n63] "w" (vdupq_n_u8(63)) + + // Clobbers. + : "v2", "v3", "v4", "v5", + "v8", "v9", "v10", "v11", + "v12", "v13", "v14", "v15", + "cc", "memory" + ); +} + +#pragma GCC diagnostic pop diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon64/enc_reshuffle.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon64/enc_reshuffle.c new file mode 100644 index 0000000..2655df1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/neon64/enc_reshuffle.c @@ -0,0 +1,31 @@ +static BASE64_FORCE_INLINE uint8x16x4_t +enc_reshuffle (const uint8x16x3_t in) +{ + uint8x16x4_t out; + + // Input: + // in[0] = a7 a6 a5 a4 a3 a2 a1 a0 + // in[1] = b7 b6 b5 b4 b3 b2 b1 b0 + // in[2] = c7 c6 c5 c4 c3 c2 c1 c0 + + // Output: + // out[0] = 00 00 a7 a6 a5 a4 a3 a2 + // out[1] = 00 00 a1 a0 b7 b6 b5 b4 + // out[2] = 00 00 b3 b2 b1 b0 c7 c6 + // out[3] = 00 00 c5 c4 c3 c2 c1 c0 + + // Move the input bits to where they need to be in the outputs. Except + // for the first output, the high two bits are not cleared. + out.val[0] = vshrq_n_u8(in.val[0], 2); + out.val[1] = vshrq_n_u8(in.val[1], 4); + out.val[2] = vshrq_n_u8(in.val[2], 6); + out.val[1] = vsliq_n_u8(out.val[1], in.val[0], 4); + out.val[2] = vsliq_n_u8(out.val[2], in.val[1], 2); + + // Clear the high two bits in the second, third and fourth output. + out.val[1] = vandq_u8(out.val[1], vdupq_n_u8(0x3F)); + out.val[2] = vandq_u8(out.val[2], vdupq_n_u8(0x3F)); + out.val[3] = vandq_u8(in.val[2], vdupq_n_u8(0x3F)); + + return out; +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/sse41/codec.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/sse41/codec.c new file mode 100644 index 0000000..c627db5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/sse41/codec.c @@ -0,0 +1,58 @@ +#include +#include +#include + +#include "libbase64.h" +#include "../../tables/tables.h" +#include "../../codecs.h" +#include "config.h" +#include "../../env.h" + +#if HAVE_SSE41 +#include + +// Only enable inline assembly on supported compilers and on 64-bit CPUs. +#ifndef BASE64_SSE41_USE_ASM +# if (defined(__GNUC__) || defined(__clang__)) && BASE64_WORDSIZE == 64 +# define BASE64_SSE41_USE_ASM 1 +# else +# define BASE64_SSE41_USE_ASM 0 +# endif +#endif + +#include "../ssse3/dec_reshuffle.c" +#include "../ssse3/dec_loop.c" + +#if BASE64_SSE41_USE_ASM +# include "../ssse3/enc_loop_asm.c" +#else +# include "../ssse3/enc_translate.c" +# include "../ssse3/enc_reshuffle.c" +# include "../ssse3/enc_loop.c" +#endif + +#endif // HAVE_SSE41 + +void +base64_stream_encode_sse41 BASE64_ENC_PARAMS +{ +#if HAVE_SSE41 + #include "../generic/enc_head.c" + enc_loop_ssse3(&s, &slen, &o, &olen); + #include "../generic/enc_tail.c" +#else + base64_enc_stub(state, src, srclen, out, outlen); +#endif +} + +int +base64_stream_decode_sse41 BASE64_DEC_PARAMS +{ +#if HAVE_SSE41 + #include "../generic/dec_head.c" + dec_loop_ssse3(&s, &slen, &o, &olen); + #include "../generic/dec_tail.c" +#else + return base64_dec_stub(state, src, srclen, out, outlen); +#endif +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/sse42/codec.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/sse42/codec.c new file mode 100644 index 0000000..2fe4e29 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/sse42/codec.c @@ -0,0 +1,58 @@ +#include +#include +#include + +#include "libbase64.h" +#include "../../tables/tables.h" +#include "../../codecs.h" +#include "config.h" +#include "../../env.h" + +#if HAVE_SSE42 +#include + +// Only enable inline assembly on supported compilers and on 64-bit CPUs. +#ifndef BASE64_SSE42_USE_ASM +# if (defined(__GNUC__) || defined(__clang__)) && BASE64_WORDSIZE == 64 +# define BASE64_SSE42_USE_ASM 1 +# else +# define BASE64_SSE42_USE_ASM 0 +# endif +#endif + +#include "../ssse3/dec_reshuffle.c" +#include "../ssse3/dec_loop.c" + +#if BASE64_SSE42_USE_ASM +# include "../ssse3/enc_loop_asm.c" +#else +# include "../ssse3/enc_translate.c" +# include "../ssse3/enc_reshuffle.c" +# include "../ssse3/enc_loop.c" +#endif + +#endif // HAVE_SSE42 + +void +base64_stream_encode_sse42 BASE64_ENC_PARAMS +{ +#if HAVE_SSE42 + #include "../generic/enc_head.c" + enc_loop_ssse3(&s, &slen, &o, &olen); + #include "../generic/enc_tail.c" +#else + base64_enc_stub(state, src, srclen, out, outlen); +#endif +} + +int +base64_stream_decode_sse42 BASE64_DEC_PARAMS +{ +#if HAVE_SSE42 + #include "../generic/dec_head.c" + dec_loop_ssse3(&s, &slen, &o, &olen); + #include "../generic/dec_tail.c" +#else + return base64_dec_stub(state, src, srclen, out, outlen); +#endif +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/ssse3/codec.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/ssse3/codec.c new file mode 100644 index 0000000..e51b3df --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/ssse3/codec.c @@ -0,0 +1,60 @@ +#include +#include +#include + +#include "libbase64.h" +#include "../../tables/tables.h" +#include "../../codecs.h" +#include "config.h" +#include "../../env.h" + +#if HAVE_SSSE3 +#include + +// Only enable inline assembly on supported compilers and on 64-bit CPUs. +// 32-bit CPUs with SSSE3 support, such as low-end Atoms, only have eight XMM +// registers, which is not enough to run the inline assembly. +#ifndef BASE64_SSSE3_USE_ASM +# if (defined(__GNUC__) || defined(__clang__)) && BASE64_WORDSIZE == 64 +# define BASE64_SSSE3_USE_ASM 1 +# else +# define BASE64_SSSE3_USE_ASM 0 +# endif +#endif + +#include "dec_reshuffle.c" +#include "dec_loop.c" + +#if BASE64_SSSE3_USE_ASM +# include "enc_loop_asm.c" +#else +# include "enc_reshuffle.c" +# include "enc_translate.c" +# include "enc_loop.c" +#endif + +#endif // HAVE_SSSE3 + +void +base64_stream_encode_ssse3 BASE64_ENC_PARAMS +{ +#if HAVE_SSSE3 + #include "../generic/enc_head.c" + enc_loop_ssse3(&s, &slen, &o, &olen); + #include "../generic/enc_tail.c" +#else + base64_enc_stub(state, src, srclen, out, outlen); +#endif +} + +int +base64_stream_decode_ssse3 BASE64_DEC_PARAMS +{ +#if HAVE_SSSE3 + #include "../generic/dec_head.c" + dec_loop_ssse3(&s, &slen, &o, &olen); + #include "../generic/dec_tail.c" +#else + return base64_dec_stub(state, src, srclen, out, outlen); +#endif +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/ssse3/dec_loop.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/ssse3/dec_loop.c new file mode 100644 index 0000000..7ddb73b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/ssse3/dec_loop.c @@ -0,0 +1,173 @@ +// The input consists of six character sets in the Base64 alphabet, which we +// need to map back to the 6-bit values they represent. There are three ranges, +// two singles, and then there's the rest. +// +// # From To Add Characters +// 1 [43] [62] +19 + +// 2 [47] [63] +16 / +// 3 [48..57] [52..61] +4 0..9 +// 4 [65..90] [0..25] -65 A..Z +// 5 [97..122] [26..51] -71 a..z +// (6) Everything else => invalid input +// +// We will use lookup tables for character validation and offset computation. +// Remember that 0x2X and 0x0X are the same index for _mm_shuffle_epi8, this +// allows to mask with 0x2F instead of 0x0F and thus save one constant +// declaration (register and/or memory access). +// +// For offsets: +// Perfect hash for lut = ((src >> 4) & 0x2F) + ((src == 0x2F) ? 0xFF : 0x00) +// 0000 = garbage +// 0001 = / +// 0010 = + +// 0011 = 0-9 +// 0100 = A-Z +// 0101 = A-Z +// 0110 = a-z +// 0111 = a-z +// 1000 >= garbage +// +// For validation, here's the table. +// A character is valid if and only if the AND of the 2 lookups equals 0: +// +// hi \ lo 0000 0001 0010 0011 0100 0101 0110 0111 1000 1001 1010 1011 1100 1101 1110 1111 +// LUT 0x15 0x11 0x11 0x11 0x11 0x11 0x11 0x11 0x11 0x11 0x13 0x1A 0x1B 0x1B 0x1B 0x1A +// +// 0000 0x10 char NUL SOH STX ETX EOT ENQ ACK BEL BS HT LF VT FF CR SO SI +// andlut 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 +// +// 0001 0x10 char DLE DC1 DC2 DC3 DC4 NAK SYN ETB CAN EM SUB ESC FS GS RS US +// andlut 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 +// +// 0010 0x01 char ! " # $ % & ' ( ) * + , - . / +// andlut 0x01 0x01 0x01 0x01 0x01 0x01 0x01 0x01 0x01 0x01 0x01 0x00 0x01 0x01 0x01 0x00 +// +// 0011 0x02 char 0 1 2 3 4 5 6 7 8 9 : ; < = > ? +// andlut 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x02 0x02 0x02 0x02 0x02 0x02 +// +// 0100 0x04 char @ A B C D E F G H I J K L M N O +// andlut 0x04 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 +// +// 0101 0x08 char P Q R S T U V W X Y Z [ \ ] ^ _ +// andlut 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x08 0x08 0x08 0x08 0x08 +// +// 0110 0x04 char ` a b c d e f g h i j k l m n o +// andlut 0x04 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 +// 0111 0x08 char p q r s t u v w x y z { | } ~ +// andlut 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x08 0x08 0x08 0x08 0x08 +// +// 1000 0x10 andlut 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 +// 1001 0x10 andlut 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 +// 1010 0x10 andlut 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 +// 1011 0x10 andlut 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 +// 1100 0x10 andlut 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 +// 1101 0x10 andlut 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 +// 1110 0x10 andlut 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 +// 1111 0x10 andlut 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 0x10 + +static BASE64_FORCE_INLINE int +dec_loop_ssse3_inner (const uint8_t **s, uint8_t **o, size_t *rounds) +{ + const __m128i lut_lo = _mm_setr_epi8( + 0x15, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, + 0x11, 0x11, 0x13, 0x1A, 0x1B, 0x1B, 0x1B, 0x1A); + + const __m128i lut_hi = _mm_setr_epi8( + 0x10, 0x10, 0x01, 0x02, 0x04, 0x08, 0x04, 0x08, + 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10); + + const __m128i lut_roll = _mm_setr_epi8( + 0, 16, 19, 4, -65, -65, -71, -71, + 0, 0, 0, 0, 0, 0, 0, 0); + + const __m128i mask_2F = _mm_set1_epi8(0x2F); + + // Load input: + __m128i str = _mm_loadu_si128((__m128i *) *s); + + // Table lookups: + const __m128i hi_nibbles = _mm_and_si128(_mm_srli_epi32(str, 4), mask_2F); + const __m128i lo_nibbles = _mm_and_si128(str, mask_2F); + const __m128i hi = _mm_shuffle_epi8(lut_hi, hi_nibbles); + const __m128i lo = _mm_shuffle_epi8(lut_lo, lo_nibbles); + + // Check for invalid input: if any "and" values from lo and hi are not + // zero, fall back on bytewise code to do error checking and reporting: + if (_mm_movemask_epi8(_mm_cmpgt_epi8(_mm_and_si128(lo, hi), _mm_setzero_si128())) != 0) { + return 0; + } + + const __m128i eq_2F = _mm_cmpeq_epi8(str, mask_2F); + const __m128i roll = _mm_shuffle_epi8(lut_roll, _mm_add_epi8(eq_2F, hi_nibbles)); + + // Now simply add the delta values to the input: + str = _mm_add_epi8(str, roll); + + // Reshuffle the input to packed 12-byte output format: + str = dec_reshuffle(str); + + // Store the output: + _mm_storeu_si128((__m128i *) *o, str); + + *s += 16; + *o += 12; + *rounds -= 1; + + return 1; +} + +static inline void +dec_loop_ssse3 (const uint8_t **s, size_t *slen, uint8_t **o, size_t *olen) +{ + if (*slen < 24) { + return; + } + + // Process blocks of 16 bytes per round. Because 4 extra zero bytes are + // written after the output, ensure that there will be at least 8 bytes + // of input data left to cover the gap. (6 data bytes and up to two + // end-of-string markers.) + size_t rounds = (*slen - 8) / 16; + + *slen -= rounds * 16; // 16 bytes consumed per round + *olen += rounds * 12; // 12 bytes produced per round + + do { + if (rounds >= 8) { + if (dec_loop_ssse3_inner(s, o, &rounds) && + dec_loop_ssse3_inner(s, o, &rounds) && + dec_loop_ssse3_inner(s, o, &rounds) && + dec_loop_ssse3_inner(s, o, &rounds) && + dec_loop_ssse3_inner(s, o, &rounds) && + dec_loop_ssse3_inner(s, o, &rounds) && + dec_loop_ssse3_inner(s, o, &rounds) && + dec_loop_ssse3_inner(s, o, &rounds)) { + continue; + } + break; + } + if (rounds >= 4) { + if (dec_loop_ssse3_inner(s, o, &rounds) && + dec_loop_ssse3_inner(s, o, &rounds) && + dec_loop_ssse3_inner(s, o, &rounds) && + dec_loop_ssse3_inner(s, o, &rounds)) { + continue; + } + break; + } + if (rounds >= 2) { + if (dec_loop_ssse3_inner(s, o, &rounds) && + dec_loop_ssse3_inner(s, o, &rounds)) { + continue; + } + break; + } + dec_loop_ssse3_inner(s, o, &rounds); + break; + + } while (rounds > 0); + + // Adjust for any rounds that were skipped: + *slen += rounds * 16; + *olen -= rounds * 12; +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/ssse3/dec_reshuffle.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/ssse3/dec_reshuffle.c new file mode 100644 index 0000000..d3dd395 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/ssse3/dec_reshuffle.c @@ -0,0 +1,33 @@ +static BASE64_FORCE_INLINE __m128i +dec_reshuffle (const __m128i in) +{ + // in, bits, upper case are most significant bits, lower case are least significant bits + // 00llllll 00kkkkLL 00jjKKKK 00JJJJJJ + // 00iiiiii 00hhhhII 00ggHHHH 00GGGGGG + // 00ffffff 00eeeeFF 00ddEEEE 00DDDDDD + // 00cccccc 00bbbbCC 00aaBBBB 00AAAAAA + + const __m128i merge_ab_and_bc = _mm_maddubs_epi16(in, _mm_set1_epi32(0x01400140)); + // 0000kkkk LLllllll 0000JJJJ JJjjKKKK + // 0000hhhh IIiiiiii 0000GGGG GGggHHHH + // 0000eeee FFffffff 0000DDDD DDddEEEE + // 0000bbbb CCcccccc 0000AAAA AAaaBBBB + + const __m128i out = _mm_madd_epi16(merge_ab_and_bc, _mm_set1_epi32(0x00011000)); + // 00000000 JJJJJJjj KKKKkkkk LLllllll + // 00000000 GGGGGGgg HHHHhhhh IIiiiiii + // 00000000 DDDDDDdd EEEEeeee FFffffff + // 00000000 AAAAAAaa BBBBbbbb CCcccccc + + // Pack bytes together: + return _mm_shuffle_epi8(out, _mm_setr_epi8( + 2, 1, 0, + 6, 5, 4, + 10, 9, 8, + 14, 13, 12, + -1, -1, -1, -1)); + // 00000000 00000000 00000000 00000000 + // LLllllll KKKKkkkk JJJJJJjj IIiiiiii + // HHHHhhhh GGGGGGgg FFffffff EEEEeeee + // DDDDDDdd CCcccccc BBBBbbbb AAAAAAaa +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/ssse3/enc_loop.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/ssse3/enc_loop.c new file mode 100644 index 0000000..9b67b70 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/ssse3/enc_loop.c @@ -0,0 +1,67 @@ +static BASE64_FORCE_INLINE void +enc_loop_ssse3_inner (const uint8_t **s, uint8_t **o) +{ + // Load input: + __m128i str = _mm_loadu_si128((__m128i *) *s); + + // Reshuffle: + str = enc_reshuffle(str); + + // Translate reshuffled bytes to the Base64 alphabet: + str = enc_translate(str); + + // Store: + _mm_storeu_si128((__m128i *) *o, str); + + *s += 12; + *o += 16; +} + +static inline void +enc_loop_ssse3 (const uint8_t **s, size_t *slen, uint8_t **o, size_t *olen) +{ + if (*slen < 16) { + return; + } + + // Process blocks of 12 bytes at a time. Because blocks are loaded 16 + // bytes at a time, ensure that there will be at least 4 remaining + // bytes after the last round, so that the final read will not pass + // beyond the bounds of the input buffer: + size_t rounds = (*slen - 4) / 12; + + *slen -= rounds * 12; // 12 bytes consumed per round + *olen += rounds * 16; // 16 bytes produced per round + + do { + if (rounds >= 8) { + enc_loop_ssse3_inner(s, o); + enc_loop_ssse3_inner(s, o); + enc_loop_ssse3_inner(s, o); + enc_loop_ssse3_inner(s, o); + enc_loop_ssse3_inner(s, o); + enc_loop_ssse3_inner(s, o); + enc_loop_ssse3_inner(s, o); + enc_loop_ssse3_inner(s, o); + rounds -= 8; + continue; + } + if (rounds >= 4) { + enc_loop_ssse3_inner(s, o); + enc_loop_ssse3_inner(s, o); + enc_loop_ssse3_inner(s, o); + enc_loop_ssse3_inner(s, o); + rounds -= 4; + continue; + } + if (rounds >= 2) { + enc_loop_ssse3_inner(s, o); + enc_loop_ssse3_inner(s, o); + rounds -= 2; + continue; + } + enc_loop_ssse3_inner(s, o); + break; + + } while (rounds > 0); +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/ssse3/enc_loop_asm.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/ssse3/enc_loop_asm.c new file mode 100644 index 0000000..0cdb340 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/ssse3/enc_loop_asm.c @@ -0,0 +1,268 @@ +// Apologies in advance for combining the preprocessor with inline assembly, +// two notoriously gnarly parts of C, but it was necessary to avoid a lot of +// code repetition. The preprocessor is used to template large sections of +// inline assembly that differ only in the registers used. If the code was +// written out by hand, it would become very large and hard to audit. + +// Generate a block of inline assembly that loads register R0 from memory. The +// offset at which the register is loaded is set by the given round. +#define LOAD(R0, ROUND) \ + "lddqu ("#ROUND" * 12)(%[src]), %["R0"] \n\t" + +// Generate a block of inline assembly that deinterleaves and shuffles register +// R0 using preloaded constants. Outputs in R0 and R1. +#define SHUF(R0, R1) \ + "pshufb %[lut0], %["R0"] \n\t" \ + "movdqa %["R0"], %["R1"] \n\t" \ + "pand %[msk0], %["R0"] \n\t" \ + "pand %[msk2], %["R1"] \n\t" \ + "pmulhuw %[msk1], %["R0"] \n\t" \ + "pmullw %[msk3], %["R1"] \n\t" \ + "por %["R1"], %["R0"] \n\t" + +// Generate a block of inline assembly that takes R0 and R1 and translates +// their contents to the base64 alphabet, using preloaded constants. +#define TRAN(R0, R1, R2) \ + "movdqa %["R0"], %["R1"] \n\t" \ + "movdqa %["R0"], %["R2"] \n\t" \ + "psubusb %[n51], %["R1"] \n\t" \ + "pcmpgtb %[n25], %["R2"] \n\t" \ + "psubb %["R2"], %["R1"] \n\t" \ + "movdqa %[lut1], %["R2"] \n\t" \ + "pshufb %["R1"], %["R2"] \n\t" \ + "paddb %["R2"], %["R0"] \n\t" + +// Generate a block of inline assembly that stores the given register R0 at an +// offset set by the given round. +#define STOR(R0, ROUND) \ + "movdqu %["R0"], ("#ROUND" * 16)(%[dst]) \n\t" + +// Generate a block of inline assembly that generates a single self-contained +// encoder round: fetch the data, process it, and store the result. Then update +// the source and destination pointers. +#define ROUND() \ + LOAD("a", 0) \ + SHUF("a", "b") \ + TRAN("a", "b", "c") \ + STOR("a", 0) \ + "add $12, %[src] \n\t" \ + "add $16, %[dst] \n\t" + +// Define a macro that initiates a three-way interleaved encoding round by +// preloading registers a, b and c from memory. +// The register graph shows which registers are in use during each step, and +// is a visual aid for choosing registers for that step. Symbol index: +// +// + indicates that a register is loaded by that step. +// | indicates that a register is in use and must not be touched. +// - indicates that a register is decommissioned by that step. +// x indicates that a register is used as a temporary by that step. +// V indicates that a register is an input or output to the macro. +// +#define ROUND_3_INIT() /* a b c d e f */ \ + LOAD("a", 0) /* + */ \ + SHUF("a", "d") /* | + */ \ + LOAD("b", 1) /* | + | */ \ + TRAN("a", "d", "e") /* | | - x */ \ + LOAD("c", 2) /* V V V */ + +// Define a macro that translates, shuffles and stores the input registers A, B +// and C, and preloads registers D, E and F for the next round. +// This macro can be arbitrarily daisy-chained by feeding output registers D, E +// and F back into the next round as input registers A, B and C. The macro +// carefully interleaves memory operations with data operations for optimal +// pipelined performance. + +#define ROUND_3(ROUND, A,B,C,D,E,F) /* A B C D E F */ \ + LOAD(D, (ROUND + 3)) /* V V V + */ \ + SHUF(B, E) /* | | | | + */ \ + STOR(A, (ROUND + 0)) /* - | | | | */ \ + TRAN(B, E, F) /* | | | - x */ \ + LOAD(E, (ROUND + 4)) /* | | | + */ \ + SHUF(C, A) /* + | | | | */ \ + STOR(B, (ROUND + 1)) /* | - | | | */ \ + TRAN(C, A, F) /* - | | | x */ \ + LOAD(F, (ROUND + 5)) /* | | | + */ \ + SHUF(D, A) /* + | | | | */ \ + STOR(C, (ROUND + 2)) /* | - | | | */ \ + TRAN(D, A, B) /* - x V V V */ + +// Define a macro that terminates a ROUND_3 macro by taking pre-loaded +// registers D, E and F, and translating, shuffling and storing them. +#define ROUND_3_END(ROUND, A,B,C,D,E,F) /* A B C D E F */ \ + SHUF(E, A) /* + V V V */ \ + STOR(D, (ROUND + 3)) /* | - | | */ \ + TRAN(E, A, B) /* - x | | */ \ + SHUF(F, C) /* + | | */ \ + STOR(E, (ROUND + 4)) /* | - | */ \ + TRAN(F, C, D) /* - x | */ \ + STOR(F, (ROUND + 5)) /* - */ + +// Define a type A round. Inputs are a, b, and c, outputs are d, e, and f. +#define ROUND_3_A(ROUND) \ + ROUND_3(ROUND, "a", "b", "c", "d", "e", "f") + +// Define a type B round. Inputs and outputs are swapped with regard to type A. +#define ROUND_3_B(ROUND) \ + ROUND_3(ROUND, "d", "e", "f", "a", "b", "c") + +// Terminating macro for a type A round. +#define ROUND_3_A_LAST(ROUND) \ + ROUND_3_A(ROUND) \ + ROUND_3_END(ROUND, "a", "b", "c", "d", "e", "f") + +// Terminating macro for a type B round. +#define ROUND_3_B_LAST(ROUND) \ + ROUND_3_B(ROUND) \ + ROUND_3_END(ROUND, "d", "e", "f", "a", "b", "c") + +// Suppress clang's warning that the literal string in the asm statement is +// overlong (longer than the ISO-mandated minimum size of 4095 bytes for C99 +// compilers). It may be true, but the goal here is not C99 portability. +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Woverlength-strings" + +static inline void +enc_loop_ssse3 (const uint8_t **s, size_t *slen, uint8_t **o, size_t *olen) +{ + // For a clearer explanation of the algorithm used by this function, + // please refer to the plain (not inline assembly) implementation. This + // function follows the same basic logic. + + if (*slen < 16) { + return; + } + + // Process blocks of 12 bytes at a time. Input is read in blocks of 16 + // bytes, so "reserve" four bytes from the input buffer to ensure that + // we never read beyond the end of the input buffer. + size_t rounds = (*slen - 4) / 12; + + *slen -= rounds * 12; // 12 bytes consumed per round + *olen += rounds * 16; // 16 bytes produced per round + + // Number of times to go through the 36x loop. + size_t loops = rounds / 36; + + // Number of rounds remaining after the 36x loop. + rounds %= 36; + + // Lookup tables. + const __m128i lut0 = _mm_set_epi8( + 10, 11, 9, 10, 7, 8, 6, 7, 4, 5, 3, 4, 1, 2, 0, 1); + + const __m128i lut1 = _mm_setr_epi8( + 65, 71, -4, -4, -4, -4, -4, -4, -4, -4, -4, -4, -19, -16, 0, 0); + + // Temporary registers. + __m128i a, b, c, d, e, f; + + __asm__ volatile ( + + // If there are 36 rounds or more, enter a 36x unrolled loop of + // interleaved encoding rounds. The rounds interleave memory + // operations (load/store) with data operations (table lookups, + // etc) to maximize pipeline throughput. + " test %[loops], %[loops] \n\t" + " jz 18f \n\t" + " jmp 36f \n\t" + " \n\t" + ".balign 64 \n\t" + "36: " ROUND_3_INIT() + " " ROUND_3_A( 0) + " " ROUND_3_B( 3) + " " ROUND_3_A( 6) + " " ROUND_3_B( 9) + " " ROUND_3_A(12) + " " ROUND_3_B(15) + " " ROUND_3_A(18) + " " ROUND_3_B(21) + " " ROUND_3_A(24) + " " ROUND_3_B(27) + " " ROUND_3_A_LAST(30) + " add $(12 * 36), %[src] \n\t" + " add $(16 * 36), %[dst] \n\t" + " dec %[loops] \n\t" + " jnz 36b \n\t" + + // Enter an 18x unrolled loop for rounds of 18 or more. + "18: cmp $18, %[rounds] \n\t" + " jl 9f \n\t" + " " ROUND_3_INIT() + " " ROUND_3_A(0) + " " ROUND_3_B(3) + " " ROUND_3_A(6) + " " ROUND_3_B(9) + " " ROUND_3_A_LAST(12) + " sub $18, %[rounds] \n\t" + " add $(12 * 18), %[src] \n\t" + " add $(16 * 18), %[dst] \n\t" + + // Enter a 9x unrolled loop for rounds of 9 or more. + "9: cmp $9, %[rounds] \n\t" + " jl 6f \n\t" + " " ROUND_3_INIT() + " " ROUND_3_A(0) + " " ROUND_3_B_LAST(3) + " sub $9, %[rounds] \n\t" + " add $(12 * 9), %[src] \n\t" + " add $(16 * 9), %[dst] \n\t" + + // Enter a 6x unrolled loop for rounds of 6 or more. + "6: cmp $6, %[rounds] \n\t" + " jl 55f \n\t" + " " ROUND_3_INIT() + " " ROUND_3_A_LAST(0) + " sub $6, %[rounds] \n\t" + " add $(12 * 6), %[src] \n\t" + " add $(16 * 6), %[dst] \n\t" + + // Dispatch the remaining rounds 0..5. + "55: cmp $3, %[rounds] \n\t" + " jg 45f \n\t" + " je 3f \n\t" + " cmp $1, %[rounds] \n\t" + " jg 2f \n\t" + " je 1f \n\t" + " jmp 0f \n\t" + + "45: cmp $4, %[rounds] \n\t" + " je 4f \n\t" + + // Block of non-interlaced encoding rounds, which can each + // individually be jumped to. Rounds fall through to the next. + "5: " ROUND() + "4: " ROUND() + "3: " ROUND() + "2: " ROUND() + "1: " ROUND() + "0: \n\t" + + // Outputs (modified). + : [rounds] "+r" (rounds), + [loops] "+r" (loops), + [src] "+r" (*s), + [dst] "+r" (*o), + [a] "=&x" (a), + [b] "=&x" (b), + [c] "=&x" (c), + [d] "=&x" (d), + [e] "=&x" (e), + [f] "=&x" (f) + + // Inputs (not modified). + : [lut0] "x" (lut0), + [lut1] "x" (lut1), + [msk0] "x" (_mm_set1_epi32(0x0FC0FC00)), + [msk1] "x" (_mm_set1_epi32(0x04000040)), + [msk2] "x" (_mm_set1_epi32(0x003F03F0)), + [msk3] "x" (_mm_set1_epi32(0x01000010)), + [n51] "x" (_mm_set1_epi8(51)), + [n25] "x" (_mm_set1_epi8(25)) + + // Clobbers. + : "cc", "memory" + ); +} + +#pragma GCC diagnostic pop diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/ssse3/enc_reshuffle.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/ssse3/enc_reshuffle.c new file mode 100644 index 0000000..f9dc949 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/ssse3/enc_reshuffle.c @@ -0,0 +1,48 @@ +static BASE64_FORCE_INLINE __m128i +enc_reshuffle (__m128i in) +{ + // Input, bytes MSB to LSB: + // 0 0 0 0 l k j i h g f e d c b a + + in = _mm_shuffle_epi8(in, _mm_set_epi8( + 10, 11, 9, 10, + 7, 8, 6, 7, + 4, 5, 3, 4, + 1, 2, 0, 1)); + // in, bytes MSB to LSB: + // k l j k + // h i g h + // e f d e + // b c a b + + const __m128i t0 = _mm_and_si128(in, _mm_set1_epi32(0x0FC0FC00)); + // bits, upper case are most significant bits, lower case are least significant bits + // 0000kkkk LL000000 JJJJJJ00 00000000 + // 0000hhhh II000000 GGGGGG00 00000000 + // 0000eeee FF000000 DDDDDD00 00000000 + // 0000bbbb CC000000 AAAAAA00 00000000 + + const __m128i t1 = _mm_mulhi_epu16(t0, _mm_set1_epi32(0x04000040)); + // 00000000 00kkkkLL 00000000 00JJJJJJ + // 00000000 00hhhhII 00000000 00GGGGGG + // 00000000 00eeeeFF 00000000 00DDDDDD + // 00000000 00bbbbCC 00000000 00AAAAAA + + const __m128i t2 = _mm_and_si128(in, _mm_set1_epi32(0x003F03F0)); + // 00000000 00llllll 000000jj KKKK0000 + // 00000000 00iiiiii 000000gg HHHH0000 + // 00000000 00ffffff 000000dd EEEE0000 + // 00000000 00cccccc 000000aa BBBB0000 + + const __m128i t3 = _mm_mullo_epi16(t2, _mm_set1_epi32(0x01000010)); + // 00llllll 00000000 00jjKKKK 00000000 + // 00iiiiii 00000000 00ggHHHH 00000000 + // 00ffffff 00000000 00ddEEEE 00000000 + // 00cccccc 00000000 00aaBBBB 00000000 + + return _mm_or_si128(t1, t3); + // 00llllll 00kkkkLL 00jjKKKK 00JJJJJJ + // 00iiiiii 00hhhhII 00ggHHHH 00GGGGGG + // 00ffffff 00eeeeFF 00ddEEEE 00DDDDDD + // 00cccccc 00bbbbCC 00aaBBBB 00AAAAAA +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/ssse3/enc_translate.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/ssse3/enc_translate.c new file mode 100644 index 0000000..60d9a42 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/arch/ssse3/enc_translate.c @@ -0,0 +1,33 @@ +static BASE64_FORCE_INLINE __m128i +enc_translate (const __m128i in) +{ + // A lookup table containing the absolute offsets for all ranges: + const __m128i lut = _mm_setr_epi8( + 65, 71, -4, -4, + -4, -4, -4, -4, + -4, -4, -4, -4, + -19, -16, 0, 0 + ); + + // Translate values 0..63 to the Base64 alphabet. There are five sets: + // # From To Abs Index Characters + // 0 [0..25] [65..90] +65 0 ABCDEFGHIJKLMNOPQRSTUVWXYZ + // 1 [26..51] [97..122] +71 1 abcdefghijklmnopqrstuvwxyz + // 2 [52..61] [48..57] -4 [2..11] 0123456789 + // 3 [62] [43] -19 12 + + // 4 [63] [47] -16 13 / + + // Create LUT indices from the input. The index for range #0 is right, + // others are 1 less than expected: + __m128i indices = _mm_subs_epu8(in, _mm_set1_epi8(51)); + + // mask is 0xFF (-1) for range #[1..4] and 0x00 for range #0: + __m128i mask = _mm_cmpgt_epi8(in, _mm_set1_epi8(25)); + + // Subtract -1, so add 1 to indices for range #[1..4]. All indices are + // now correct: + indices = _mm_sub_epi8(indices, mask); + + // Add offsets to input values: + return _mm_add_epi8(in, _mm_shuffle_epi8(lut, indices)); +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/codec_choose.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/codec_choose.c new file mode 100644 index 0000000..74b0aac --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/codec_choose.c @@ -0,0 +1,314 @@ +#include +#include +#include +#include +#include + +#include "libbase64.h" +#include "codecs.h" +#include "config.h" +#include "env.h" + +#if (__x86_64__ || __i386__ || _M_X86 || _M_X64) + #define BASE64_X86 + #if (HAVE_SSSE3 || HAVE_SSE41 || HAVE_SSE42 || HAVE_AVX || HAVE_AVX2 || HAVE_AVX512) + #define BASE64_X86_SIMD + #endif +#endif + +#ifdef BASE64_X86 +#ifdef _MSC_VER + #include + #define __cpuid_count(__level, __count, __eax, __ebx, __ecx, __edx) \ + { \ + int info[4]; \ + __cpuidex(info, __level, __count); \ + __eax = info[0]; \ + __ebx = info[1]; \ + __ecx = info[2]; \ + __edx = info[3]; \ + } + #define __cpuid(__level, __eax, __ebx, __ecx, __edx) \ + __cpuid_count(__level, 0, __eax, __ebx, __ecx, __edx) +#else + #include + #if HAVE_AVX512 || HAVE_AVX2 || HAVE_AVX + #if ((__GNUC__ > 4 || __GNUC__ == 4 && __GNUC_MINOR__ >= 2) || (__clang_major__ >= 3)) + static inline uint64_t _xgetbv (uint32_t index) + { + uint32_t eax, edx; + __asm__ __volatile__("xgetbv" : "=a"(eax), "=d"(edx) : "c"(index)); + return ((uint64_t)edx << 32) | eax; + } + #else + #error "Platform not supported" + #endif + #endif +#endif + +#ifndef bit_AVX512vl +#define bit_AVX512vl (1 << 31) +#endif +#ifndef bit_AVX512vbmi +#define bit_AVX512vbmi (1 << 1) +#endif +#ifndef bit_AVX2 +#define bit_AVX2 (1 << 5) +#endif +#ifndef bit_SSSE3 +#define bit_SSSE3 (1 << 9) +#endif +#ifndef bit_SSE41 +#define bit_SSE41 (1 << 19) +#endif +#ifndef bit_SSE42 +#define bit_SSE42 (1 << 20) +#endif +#ifndef bit_AVX +#define bit_AVX (1 << 28) +#endif + +#define bit_XSAVE_XRSTORE (1 << 27) + +#ifndef _XCR_XFEATURE_ENABLED_MASK +#define _XCR_XFEATURE_ENABLED_MASK 0 +#endif + +#define bit_XMM (1 << 1) +#define bit_YMM (1 << 2) +#define bit_OPMASK (1 << 5) +#define bit_ZMM (1 << 6) +#define bit_HIGH_ZMM (1 << 7) + +#define _XCR_XMM_AND_YMM_STATE_ENABLED_BY_OS (bit_XMM | bit_YMM) + +#define _AVX_512_ENABLED_BY_OS (bit_XMM | bit_YMM | bit_OPMASK | bit_ZMM | bit_HIGH_ZMM) + +#endif + +// Function declarations: +#define BASE64_CODEC_FUNCS(arch) \ + extern void base64_stream_encode_ ## arch BASE64_ENC_PARAMS; \ + extern int base64_stream_decode_ ## arch BASE64_DEC_PARAMS; + +BASE64_CODEC_FUNCS(avx512) +BASE64_CODEC_FUNCS(avx2) +BASE64_CODEC_FUNCS(neon32) +BASE64_CODEC_FUNCS(neon64) +BASE64_CODEC_FUNCS(plain) +BASE64_CODEC_FUNCS(ssse3) +BASE64_CODEC_FUNCS(sse41) +BASE64_CODEC_FUNCS(sse42) +BASE64_CODEC_FUNCS(avx) + +static bool +codec_choose_forced (struct codec *codec, int flags) +{ + // If the user wants to use a certain codec, + // always allow it, even if the codec is a no-op. + // For testing purposes. + + if (!(flags & 0xFFFF)) { + return false; + } + + if (flags & BASE64_FORCE_AVX2) { + codec->enc = base64_stream_encode_avx2; + codec->dec = base64_stream_decode_avx2; + return true; + } + if (flags & BASE64_FORCE_NEON32) { + codec->enc = base64_stream_encode_neon32; + codec->dec = base64_stream_decode_neon32; + return true; + } + if (flags & BASE64_FORCE_NEON64) { + codec->enc = base64_stream_encode_neon64; + codec->dec = base64_stream_decode_neon64; + return true; + } + if (flags & BASE64_FORCE_PLAIN) { + codec->enc = base64_stream_encode_plain; + codec->dec = base64_stream_decode_plain; + return true; + } + if (flags & BASE64_FORCE_SSSE3) { + codec->enc = base64_stream_encode_ssse3; + codec->dec = base64_stream_decode_ssse3; + return true; + } + if (flags & BASE64_FORCE_SSE41) { + codec->enc = base64_stream_encode_sse41; + codec->dec = base64_stream_decode_sse41; + return true; + } + if (flags & BASE64_FORCE_SSE42) { + codec->enc = base64_stream_encode_sse42; + codec->dec = base64_stream_decode_sse42; + return true; + } + if (flags & BASE64_FORCE_AVX) { + codec->enc = base64_stream_encode_avx; + codec->dec = base64_stream_decode_avx; + return true; + } + if (flags & BASE64_FORCE_AVX512) { + codec->enc = base64_stream_encode_avx512; + codec->dec = base64_stream_decode_avx512; + return true; + } + return false; +} + +static bool +codec_choose_arm (struct codec *codec) +{ +#if HAVE_NEON64 || ((defined(__ARM_NEON__) || defined(__ARM_NEON)) && HAVE_NEON32) + + // Unfortunately there is no portable way to check for NEON + // support at runtime from userland in the same way that x86 + // has cpuid, so just stick to the compile-time configuration: + + #if HAVE_NEON64 + codec->enc = base64_stream_encode_neon64; + codec->dec = base64_stream_decode_neon64; + #else + codec->enc = base64_stream_encode_neon32; + codec->dec = base64_stream_decode_neon32; + #endif + + return true; + +#else + (void)codec; + return false; +#endif +} + +static bool +codec_choose_x86 (struct codec *codec) +{ +#ifdef BASE64_X86_SIMD + + unsigned int eax, ebx = 0, ecx = 0, edx; + unsigned int max_level; + + #ifdef _MSC_VER + int info[4]; + __cpuidex(info, 0, 0); + max_level = info[0]; + #else + max_level = __get_cpuid_max(0, NULL); + #endif + + #if HAVE_AVX512 || HAVE_AVX2 || HAVE_AVX + // Check for AVX/AVX2/AVX512 support: + // Checking for AVX requires 3 things: + // 1) CPUID indicates that the OS uses XSAVE and XRSTORE instructions + // (allowing saving YMM registers on context switch) + // 2) CPUID indicates support for AVX + // 3) XGETBV indicates the AVX registers will be saved and restored on + // context switch + // + // Note that XGETBV is only available on 686 or later CPUs, so the + // instruction needs to be conditionally run. + if (max_level >= 1) { + __cpuid_count(1, 0, eax, ebx, ecx, edx); + if (ecx & bit_XSAVE_XRSTORE) { + uint64_t xcr_mask; + xcr_mask = _xgetbv(_XCR_XFEATURE_ENABLED_MASK); + if ((xcr_mask & _XCR_XMM_AND_YMM_STATE_ENABLED_BY_OS) == _XCR_XMM_AND_YMM_STATE_ENABLED_BY_OS) { // check multiple bits at once + #if HAVE_AVX512 + if (max_level >= 7 && ((xcr_mask & _AVX_512_ENABLED_BY_OS) == _AVX_512_ENABLED_BY_OS)) { + __cpuid_count(7, 0, eax, ebx, ecx, edx); + if ((ebx & bit_AVX512vl) && (ecx & bit_AVX512vbmi)) { + codec->enc = base64_stream_encode_avx512; + codec->dec = base64_stream_decode_avx512; + return true; + } + } + #endif + #if HAVE_AVX2 + if (max_level >= 7) { + __cpuid_count(7, 0, eax, ebx, ecx, edx); + if (ebx & bit_AVX2) { + codec->enc = base64_stream_encode_avx2; + codec->dec = base64_stream_decode_avx2; + return true; + } + } + #endif + #if HAVE_AVX + __cpuid_count(1, 0, eax, ebx, ecx, edx); + if (ecx & bit_AVX) { + codec->enc = base64_stream_encode_avx; + codec->dec = base64_stream_decode_avx; + return true; + } + #endif + } + } + } + #endif + + #if HAVE_SSE42 + // Check for SSE42 support: + if (max_level >= 1) { + __cpuid(1, eax, ebx, ecx, edx); + if (ecx & bit_SSE42) { + codec->enc = base64_stream_encode_sse42; + codec->dec = base64_stream_decode_sse42; + return true; + } + } + #endif + + #if HAVE_SSE41 + // Check for SSE41 support: + if (max_level >= 1) { + __cpuid(1, eax, ebx, ecx, edx); + if (ecx & bit_SSE41) { + codec->enc = base64_stream_encode_sse41; + codec->dec = base64_stream_decode_sse41; + return true; + } + } + #endif + + #if HAVE_SSSE3 + // Check for SSSE3 support: + if (max_level >= 1) { + __cpuid(1, eax, ebx, ecx, edx); + if (ecx & bit_SSSE3) { + codec->enc = base64_stream_encode_ssse3; + codec->dec = base64_stream_decode_ssse3; + return true; + } + } + #endif + +#else + (void)codec; +#endif + + return false; +} + +void +codec_choose (struct codec *codec, int flags) +{ + // User forced a codec: + if (codec_choose_forced(codec, flags)) { + return; + } + + // Runtime feature detection: + if (codec_choose_arm(codec)) { + return; + } + if (codec_choose_x86(codec)) { + return; + } + codec->enc = base64_stream_encode_plain; + codec->dec = base64_stream_decode_plain; +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/codecs.h b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/codecs.h new file mode 100644 index 0000000..34d54dc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/codecs.h @@ -0,0 +1,57 @@ +#include "libbase64.h" + +// Function parameters for encoding functions: +#define BASE64_ENC_PARAMS \ + ( struct base64_state *state \ + , const char *src \ + , size_t srclen \ + , char *out \ + , size_t *outlen \ + ) + +// Function parameters for decoding functions: +#define BASE64_DEC_PARAMS \ + ( struct base64_state *state \ + , const char *src \ + , size_t srclen \ + , char *out \ + , size_t *outlen \ + ) + +// This function is used as a stub when a certain encoder is not compiled in. +// It discards the inputs and returns zero output bytes. +static inline void +base64_enc_stub BASE64_ENC_PARAMS +{ + (void) state; + (void) src; + (void) srclen; + (void) out; + + *outlen = 0; +} + +// This function is used as a stub when a certain decoder is not compiled in. +// It discards the inputs and returns an invalid decoding result. +static inline int +base64_dec_stub BASE64_DEC_PARAMS +{ + (void) state; + (void) src; + (void) srclen; + (void) out; + (void) outlen; + + return -1; +} + +typedef void (* base64_enc_fn) BASE64_ENC_PARAMS; +typedef int (* base64_dec_fn) BASE64_DEC_PARAMS; + +struct codec +{ + base64_enc_fn enc; + base64_dec_fn dec; +}; + +extern void codec_choose (struct codec *, int flags); diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/config.h b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/config.h new file mode 100644 index 0000000..467a722 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/config.h @@ -0,0 +1,24 @@ +#ifndef BASE64_CONFIG_H +#define BASE64_CONFIG_H + +#if !defined(__APPLE__) && ((defined(__x86_64__) && defined(__LP64__)) || defined(_M_X64)) + #define HAVE_SSSE3 1 + #define HAVE_SSE41 1 + #define HAVE_SSE42 1 + #define HAVE_AVX 1 + #define HAVE_AVX2 1 + #define HAVE_AVX512 0 +#endif + +#define BASE64_WITH_NEON32 0 +#define HAVE_NEON32 BASE64_WITH_NEON32 + +#if defined(__APPLE__) && defined(__aarch64__) +#define BASE64_WITH_NEON64 1 +#else +#define BASE64_WITH_NEON64 0 +#endif + +#define HAVE_NEON64 BASE64_WITH_NEON64 + +#endif // BASE64_CONFIG_H diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/env.h b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/env.h new file mode 100644 index 0000000..0837065 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/env.h @@ -0,0 +1,84 @@ +#ifndef BASE64_ENV_H +#define BASE64_ENV_H + +#include + +// This header file contains macro definitions that describe certain aspects of +// the compile-time environment. Compatibility and portability macros go here. + +// Define machine endianness. This is for GCC: +#if (__BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__) +# define BASE64_LITTLE_ENDIAN 1 +#else +# define BASE64_LITTLE_ENDIAN 0 +#endif + +// This is for Clang: +#ifdef __LITTLE_ENDIAN__ +# define BASE64_LITTLE_ENDIAN 1 +#endif + +#ifdef __BIG_ENDIAN__ +# define BASE64_LITTLE_ENDIAN 0 +#endif + +// MSVC++ needs intrin.h for _byteswap_uint64 (issue #68): +#if BASE64_LITTLE_ENDIAN && defined(_MSC_VER) +# include +#endif + +// Endian conversion functions: +#if BASE64_LITTLE_ENDIAN +# ifdef _MSC_VER +// Microsoft Visual C++: +# define BASE64_HTOBE32(x) _byteswap_ulong(x) +# define BASE64_HTOBE64(x) _byteswap_uint64(x) +# else +// GCC and Clang: +# define BASE64_HTOBE32(x) __builtin_bswap32(x) +# define BASE64_HTOBE64(x) __builtin_bswap64(x) +# endif +#else +// No conversion needed: +# define BASE64_HTOBE32(x) (x) +# define BASE64_HTOBE64(x) (x) +#endif + +// Detect word size: +#if defined (__x86_64__) +// This also works for the x32 ABI, which has a 64-bit word size. +# define BASE64_WORDSIZE 64 +#elif SIZE_MAX == UINT32_MAX +# define BASE64_WORDSIZE 32 +#elif SIZE_MAX == UINT64_MAX +# define BASE64_WORDSIZE 64 +#else +# error BASE64_WORDSIZE_NOT_DEFINED +#endif + +// End-of-file definitions. +// Almost end-of-file when waiting for the last '=' character: +#define BASE64_AEOF 1 +// End-of-file when stream end has been reached or invalid input provided: +#define BASE64_EOF 2 + +// GCC 7 defaults to issuing a warning for fallthrough in switch statements, +// unless the fallthrough cases are marked with an attribute. As we use +// fallthrough deliberately, define an alias for the attribute: +#if __GNUC__ >= 7 +# define BASE64_FALLTHROUGH __attribute__((fallthrough)); +#else +# define BASE64_FALLTHROUGH +#endif + +// Declare macros to ensure that functions that are intended to be inlined, are +// actually inlined, even when no optimization is applied. A lot of inner loop +// code is factored into separate functions for reasons of readability, but +// that code should always be inlined (and optimized) in the main loop. +#ifdef _MSC_VER +# define BASE64_FORCE_INLINE __forceinline +#else +# define BASE64_FORCE_INLINE inline __attribute__((always_inline)) +#endif + +#endif // BASE64_ENV_H diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/lib.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/lib.c new file mode 100644 index 0000000..0f24d52 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/lib.c @@ -0,0 +1,164 @@ +#include +#include +#ifdef _OPENMP +#include +#endif + +#include "libbase64.h" +#include "tables/tables.h" +#include "codecs.h" +#include "env.h" + +// These static function pointers are initialized once when the library is +// first used, and remain in use for the remaining lifetime of the program. +// The idea being that CPU features don't change at runtime. +static struct codec codec = { NULL, NULL }; + +void +base64_stream_encode_init (struct base64_state *state, int flags) +{ + // If any of the codec flags are set, redo choice: + if (codec.enc == NULL || flags & 0xFF) { + codec_choose(&codec, flags); + } + state->eof = 0; + state->bytes = 0; + state->carry = 0; + state->flags = flags; +} + +void +base64_stream_encode + ( struct base64_state *state + , const char *src + , size_t srclen + , char *out + , size_t *outlen + ) +{ + codec.enc(state, src, srclen, out, outlen); +} + +void +base64_stream_encode_final + ( struct base64_state *state + , char *out + , size_t *outlen + ) +{ + uint8_t *o = (uint8_t *)out; + + if (state->bytes == 1) { + *o++ = base64_table_enc_6bit[state->carry]; + *o++ = '='; + *o++ = '='; + *outlen = 3; + return; + } + if (state->bytes == 2) { + *o++ = base64_table_enc_6bit[state->carry]; + *o++ = '='; + *outlen = 2; + return; + } + *outlen = 0; +} + +void +base64_stream_decode_init (struct base64_state *state, int flags) +{ + // If any of the codec flags are set, redo choice: + if (codec.dec == NULL || flags & 0xFFFF) { + codec_choose(&codec, flags); + } + state->eof = 0; + state->bytes = 0; + state->carry = 0; + state->flags = flags; +} + +int +base64_stream_decode + ( struct base64_state *state + , const char *src + , size_t srclen + , char *out + , size_t *outlen + ) +{ + return codec.dec(state, src, srclen, out, outlen); +} + +#ifdef _OPENMP + + // Due to the overhead of initializing OpenMP and creating a team of + // threads, we require the data length to be larger than a threshold: + #define OMP_THRESHOLD 20000 + + // Conditionally include OpenMP-accelerated codec implementations: + #include "lib_openmp.c" +#endif + +void +base64_encode + ( const char *src + , size_t srclen + , char *out + , size_t *outlen + , int flags + ) +{ + size_t s; + size_t t; + struct base64_state state; + + #ifdef _OPENMP + if (srclen >= OMP_THRESHOLD) { + base64_encode_openmp(src, srclen, out, outlen, flags); + return; + } + #endif + + // Init the stream reader: + base64_stream_encode_init(&state, flags); + + // Feed the whole string to the stream reader: + base64_stream_encode(&state, src, srclen, out, &s); + + // Finalize the stream by writing trailer if any: + base64_stream_encode_final(&state, out + s, &t); + + // Final output length is stream length plus tail: + *outlen = s + t; +} + +int +base64_decode + ( const char *src + , size_t srclen + , char *out + , size_t *outlen + , int flags + ) +{ + int ret; + struct base64_state state; + + #ifdef _OPENMP + if (srclen >= OMP_THRESHOLD) { + return base64_decode_openmp(src, srclen, out, outlen, flags); + } + #endif + + // Init the stream reader: + base64_stream_decode_init(&state, flags); + + // Feed the whole string to the stream reader: + ret = base64_stream_decode(&state, src, srclen, out, outlen); + + // If when decoding a whole block, we're still waiting for input then fail: + if (ret && (state.bytes == 0)) { + return ret; + } + return 0; +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/libbase64.h b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/libbase64.h new file mode 100644 index 0000000..c590897 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/libbase64.h @@ -0,0 +1,146 @@ +#ifndef LIBBASE64_H +#define LIBBASE64_H + +#include /* size_t */ + + +#if defined(_WIN32) || defined(__CYGWIN__) +#define BASE64_SYMBOL_IMPORT __declspec(dllimport) +#define BASE64_SYMBOL_EXPORT __declspec(dllexport) +#define BASE64_SYMBOL_PRIVATE + +#elif __GNUC__ >= 4 +#define BASE64_SYMBOL_IMPORT __attribute__ ((visibility ("default"))) +#define BASE64_SYMBOL_EXPORT __attribute__ ((visibility ("default"))) +#define BASE64_SYMBOL_PRIVATE __attribute__ ((visibility ("hidden"))) + +#else +#define BASE64_SYMBOL_IMPORT +#define BASE64_SYMBOL_EXPORT +#define BASE64_SYMBOL_PRIVATE +#endif + +#if defined(BASE64_STATIC_DEFINE) +#define BASE64_EXPORT +#define BASE64_NO_EXPORT + +#else +#if defined(BASE64_EXPORTS) // defined if we are building the shared library +#define BASE64_EXPORT BASE64_SYMBOL_EXPORT + +#else +#define BASE64_EXPORT BASE64_SYMBOL_IMPORT +#endif + +#define BASE64_NO_EXPORT BASE64_SYMBOL_PRIVATE +#endif + + +#ifdef __cplusplus +extern "C" { +#endif + +/* These are the flags that can be passed in the `flags` argument. The values + * below force the use of a given codec, even if that codec is a no-op in the + * current build. Used in testing. Set to 0 for the default behavior, which is + * runtime feature detection on x86, a compile-time fixed codec on ARM, and + * the plain codec on other platforms: */ +#define BASE64_FORCE_AVX2 (1 << 0) +#define BASE64_FORCE_NEON32 (1 << 1) +#define BASE64_FORCE_NEON64 (1 << 2) +#define BASE64_FORCE_PLAIN (1 << 3) +#define BASE64_FORCE_SSSE3 (1 << 4) +#define BASE64_FORCE_SSE41 (1 << 5) +#define BASE64_FORCE_SSE42 (1 << 6) +#define BASE64_FORCE_AVX (1 << 7) +#define BASE64_FORCE_AVX512 (1 << 8) + +struct base64_state { + int eof; + int bytes; + int flags; + unsigned char carry; +}; + +/* Wrapper function to encode a plain string of given length. Output is written + * to *out without trailing zero. Output length in bytes is written to *outlen. + * The buffer in `out` has been allocated by the caller and is at least 4/3 the + * size of the input. See above for `flags`; set to 0 for default operation: */ +void BASE64_EXPORT base64_encode + ( const char *src + , size_t srclen + , char *out + , size_t *outlen + , int flags + ) ; + +/* Call this before calling base64_stream_encode() to init the state. See above + * for `flags`; set to 0 for default operation: */ +void BASE64_EXPORT base64_stream_encode_init + ( struct base64_state *state + , int flags + ) ; + +/* Encodes the block of data of given length at `src`, into the buffer at + * `out`. Caller is responsible for allocating a large enough out-buffer; it + * must be at least 4/3 the size of the in-buffer, but take some margin. Places + * the number of new bytes written into `outlen` (which is set to zero when the + * function starts). Does not zero-terminate or finalize the output. */ +void BASE64_EXPORT base64_stream_encode + ( struct base64_state *state + , const char *src + , size_t srclen + , char *out + , size_t *outlen + ) ; + +/* Finalizes the output begun by previous calls to `base64_stream_encode()`. + * Adds the required end-of-stream markers if appropriate. `outlen` is modified + * and will contain the number of new bytes written at `out` (which will quite + * often be zero). */ +void BASE64_EXPORT base64_stream_encode_final + ( struct base64_state *state + , char *out + , size_t *outlen + ) ; + +/* Wrapper function to decode a plain string of given length. Output is written + * to *out without trailing zero. Output length in bytes is written to *outlen. + * The buffer in `out` has been allocated by the caller and is at least 3/4 the + * size of the input. See above for `flags`, set to 0 for default operation: */ +int BASE64_EXPORT base64_decode + ( const char *src + , size_t srclen + , char *out + , size_t *outlen + , int flags + ) ; + +/* Call this before calling base64_stream_decode() to init the state. See above + * for `flags`; set to 0 for default operation: */ +void BASE64_EXPORT base64_stream_decode_init + ( struct base64_state *state + , int flags + ) ; + +/* Decodes the block of data of given length at `src`, into the buffer at + * `out`. Caller is responsible for allocating a large enough out-buffer; it + * must be at least 3/4 the size of the in-buffer, but take some margin. Places + * the number of new bytes written into `outlen` (which is set to zero when the + * function starts). Does not zero-terminate the output. Returns 1 if all is + * well, and 0 if a decoding error was found, such as an invalid character. + * Returns -1 if the chosen codec is not included in the current build. Used by + * the test harness to check whether a codec is available for testing. */ +int BASE64_EXPORT base64_stream_decode + ( struct base64_state *state + , const char *src + , size_t srclen + , char *out + , size_t *outlen + ) ; + +#ifdef __cplusplus +} +#endif + +#endif /* LIBBASE64_H */ diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/tables/table_dec_32bit.h b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/tables/table_dec_32bit.h new file mode 100644 index 0000000..f5d951f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/tables/table_dec_32bit.h @@ -0,0 +1,393 @@ +#include +#define CHAR62 '+' +#define CHAR63 '/' +#define CHARPAD '=' + + +#if BASE64_LITTLE_ENDIAN + + +/* SPECIAL DECODE TABLES FOR LITTLE ENDIAN (INTEL) CPUS */ + +const uint32_t base64_table_dec_32bit_d0[256] = { +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0x000000f8, 0xffffffff, 0xffffffff, 0xffffffff, 0x000000fc, +0x000000d0, 0x000000d4, 0x000000d8, 0x000000dc, 0x000000e0, 0x000000e4, +0x000000e8, 0x000000ec, 0x000000f0, 0x000000f4, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, +0x00000004, 0x00000008, 0x0000000c, 0x00000010, 0x00000014, 0x00000018, +0x0000001c, 0x00000020, 0x00000024, 0x00000028, 0x0000002c, 0x00000030, +0x00000034, 0x00000038, 0x0000003c, 0x00000040, 0x00000044, 0x00000048, +0x0000004c, 0x00000050, 0x00000054, 0x00000058, 0x0000005c, 0x00000060, +0x00000064, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0x00000068, 0x0000006c, 0x00000070, 0x00000074, 0x00000078, +0x0000007c, 0x00000080, 0x00000084, 0x00000088, 0x0000008c, 0x00000090, +0x00000094, 0x00000098, 0x0000009c, 0x000000a0, 0x000000a4, 0x000000a8, +0x000000ac, 0x000000b0, 0x000000b4, 0x000000b8, 0x000000bc, 0x000000c0, +0x000000c4, 0x000000c8, 0x000000cc, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff +}; + + +const uint32_t base64_table_dec_32bit_d1[256] = { +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0x0000e003, 0xffffffff, 0xffffffff, 0xffffffff, 0x0000f003, +0x00004003, 0x00005003, 0x00006003, 0x00007003, 0x00008003, 0x00009003, +0x0000a003, 0x0000b003, 0x0000c003, 0x0000d003, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, +0x00001000, 0x00002000, 0x00003000, 0x00004000, 0x00005000, 0x00006000, +0x00007000, 0x00008000, 0x00009000, 0x0000a000, 0x0000b000, 0x0000c000, +0x0000d000, 0x0000e000, 0x0000f000, 0x00000001, 0x00001001, 0x00002001, +0x00003001, 0x00004001, 0x00005001, 0x00006001, 0x00007001, 0x00008001, +0x00009001, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0x0000a001, 0x0000b001, 0x0000c001, 0x0000d001, 0x0000e001, +0x0000f001, 0x00000002, 0x00001002, 0x00002002, 0x00003002, 0x00004002, +0x00005002, 0x00006002, 0x00007002, 0x00008002, 0x00009002, 0x0000a002, +0x0000b002, 0x0000c002, 0x0000d002, 0x0000e002, 0x0000f002, 0x00000003, +0x00001003, 0x00002003, 0x00003003, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff +}; + + +const uint32_t base64_table_dec_32bit_d2[256] = { +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0x00800f00, 0xffffffff, 0xffffffff, 0xffffffff, 0x00c00f00, +0x00000d00, 0x00400d00, 0x00800d00, 0x00c00d00, 0x00000e00, 0x00400e00, +0x00800e00, 0x00c00e00, 0x00000f00, 0x00400f00, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, +0x00400000, 0x00800000, 0x00c00000, 0x00000100, 0x00400100, 0x00800100, +0x00c00100, 0x00000200, 0x00400200, 0x00800200, 0x00c00200, 0x00000300, +0x00400300, 0x00800300, 0x00c00300, 0x00000400, 0x00400400, 0x00800400, +0x00c00400, 0x00000500, 0x00400500, 0x00800500, 0x00c00500, 0x00000600, +0x00400600, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0x00800600, 0x00c00600, 0x00000700, 0x00400700, 0x00800700, +0x00c00700, 0x00000800, 0x00400800, 0x00800800, 0x00c00800, 0x00000900, +0x00400900, 0x00800900, 0x00c00900, 0x00000a00, 0x00400a00, 0x00800a00, +0x00c00a00, 0x00000b00, 0x00400b00, 0x00800b00, 0x00c00b00, 0x00000c00, +0x00400c00, 0x00800c00, 0x00c00c00, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff +}; + + +const uint32_t base64_table_dec_32bit_d3[256] = { +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0x003e0000, 0xffffffff, 0xffffffff, 0xffffffff, 0x003f0000, +0x00340000, 0x00350000, 0x00360000, 0x00370000, 0x00380000, 0x00390000, +0x003a0000, 0x003b0000, 0x003c0000, 0x003d0000, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, +0x00010000, 0x00020000, 0x00030000, 0x00040000, 0x00050000, 0x00060000, +0x00070000, 0x00080000, 0x00090000, 0x000a0000, 0x000b0000, 0x000c0000, +0x000d0000, 0x000e0000, 0x000f0000, 0x00100000, 0x00110000, 0x00120000, +0x00130000, 0x00140000, 0x00150000, 0x00160000, 0x00170000, 0x00180000, +0x00190000, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0x001a0000, 0x001b0000, 0x001c0000, 0x001d0000, 0x001e0000, +0x001f0000, 0x00200000, 0x00210000, 0x00220000, 0x00230000, 0x00240000, +0x00250000, 0x00260000, 0x00270000, 0x00280000, 0x00290000, 0x002a0000, +0x002b0000, 0x002c0000, 0x002d0000, 0x002e0000, 0x002f0000, 0x00300000, +0x00310000, 0x00320000, 0x00330000, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff +}; + + +#else + + +/* SPECIAL DECODE TABLES FOR BIG ENDIAN (IBM/MOTOROLA/SUN) CPUS */ + +const uint32_t base64_table_dec_32bit_d0[256] = { +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xf8000000, 0xffffffff, 0xffffffff, 0xffffffff, 0xfc000000, +0xd0000000, 0xd4000000, 0xd8000000, 0xdc000000, 0xe0000000, 0xe4000000, +0xe8000000, 0xec000000, 0xf0000000, 0xf4000000, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, +0x04000000, 0x08000000, 0x0c000000, 0x10000000, 0x14000000, 0x18000000, +0x1c000000, 0x20000000, 0x24000000, 0x28000000, 0x2c000000, 0x30000000, +0x34000000, 0x38000000, 0x3c000000, 0x40000000, 0x44000000, 0x48000000, +0x4c000000, 0x50000000, 0x54000000, 0x58000000, 0x5c000000, 0x60000000, +0x64000000, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0x68000000, 0x6c000000, 0x70000000, 0x74000000, 0x78000000, +0x7c000000, 0x80000000, 0x84000000, 0x88000000, 0x8c000000, 0x90000000, +0x94000000, 0x98000000, 0x9c000000, 0xa0000000, 0xa4000000, 0xa8000000, +0xac000000, 0xb0000000, 0xb4000000, 0xb8000000, 0xbc000000, 0xc0000000, +0xc4000000, 0xc8000000, 0xcc000000, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff +}; + + +const uint32_t base64_table_dec_32bit_d1[256] = { +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0x03e00000, 0xffffffff, 0xffffffff, 0xffffffff, 0x03f00000, +0x03400000, 0x03500000, 0x03600000, 0x03700000, 0x03800000, 0x03900000, +0x03a00000, 0x03b00000, 0x03c00000, 0x03d00000, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, +0x00100000, 0x00200000, 0x00300000, 0x00400000, 0x00500000, 0x00600000, +0x00700000, 0x00800000, 0x00900000, 0x00a00000, 0x00b00000, 0x00c00000, +0x00d00000, 0x00e00000, 0x00f00000, 0x01000000, 0x01100000, 0x01200000, +0x01300000, 0x01400000, 0x01500000, 0x01600000, 0x01700000, 0x01800000, +0x01900000, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0x01a00000, 0x01b00000, 0x01c00000, 0x01d00000, 0x01e00000, +0x01f00000, 0x02000000, 0x02100000, 0x02200000, 0x02300000, 0x02400000, +0x02500000, 0x02600000, 0x02700000, 0x02800000, 0x02900000, 0x02a00000, +0x02b00000, 0x02c00000, 0x02d00000, 0x02e00000, 0x02f00000, 0x03000000, +0x03100000, 0x03200000, 0x03300000, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff +}; + + +const uint32_t base64_table_dec_32bit_d2[256] = { +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0x000f8000, 0xffffffff, 0xffffffff, 0xffffffff, 0x000fc000, +0x000d0000, 0x000d4000, 0x000d8000, 0x000dc000, 0x000e0000, 0x000e4000, +0x000e8000, 0x000ec000, 0x000f0000, 0x000f4000, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, +0x00004000, 0x00008000, 0x0000c000, 0x00010000, 0x00014000, 0x00018000, +0x0001c000, 0x00020000, 0x00024000, 0x00028000, 0x0002c000, 0x00030000, +0x00034000, 0x00038000, 0x0003c000, 0x00040000, 0x00044000, 0x00048000, +0x0004c000, 0x00050000, 0x00054000, 0x00058000, 0x0005c000, 0x00060000, +0x00064000, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0x00068000, 0x0006c000, 0x00070000, 0x00074000, 0x00078000, +0x0007c000, 0x00080000, 0x00084000, 0x00088000, 0x0008c000, 0x00090000, +0x00094000, 0x00098000, 0x0009c000, 0x000a0000, 0x000a4000, 0x000a8000, +0x000ac000, 0x000b0000, 0x000b4000, 0x000b8000, 0x000bc000, 0x000c0000, +0x000c4000, 0x000c8000, 0x000cc000, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff +}; + + +const uint32_t base64_table_dec_32bit_d3[256] = { +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0x00003e00, 0xffffffff, 0xffffffff, 0xffffffff, 0x00003f00, +0x00003400, 0x00003500, 0x00003600, 0x00003700, 0x00003800, 0x00003900, +0x00003a00, 0x00003b00, 0x00003c00, 0x00003d00, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, +0x00000100, 0x00000200, 0x00000300, 0x00000400, 0x00000500, 0x00000600, +0x00000700, 0x00000800, 0x00000900, 0x00000a00, 0x00000b00, 0x00000c00, +0x00000d00, 0x00000e00, 0x00000f00, 0x00001000, 0x00001100, 0x00001200, +0x00001300, 0x00001400, 0x00001500, 0x00001600, 0x00001700, 0x00001800, +0x00001900, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0x00001a00, 0x00001b00, 0x00001c00, 0x00001d00, 0x00001e00, +0x00001f00, 0x00002000, 0x00002100, 0x00002200, 0x00002300, 0x00002400, +0x00002500, 0x00002600, 0x00002700, 0x00002800, 0x00002900, 0x00002a00, +0x00002b00, 0x00002c00, 0x00002d00, 0x00002e00, 0x00002f00, 0x00003000, +0x00003100, 0x00003200, 0x00003300, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, +0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff +}; + + +#endif diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/tables/table_enc_12bit.h b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/tables/table_enc_12bit.h new file mode 100644 index 0000000..2bc0d23 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/tables/table_enc_12bit.h @@ -0,0 +1,1031 @@ +#include + +const uint16_t base64_table_enc_12bit[] = { +#if BASE64_LITTLE_ENDIAN + 0x4141U, 0x4241U, 0x4341U, 0x4441U, 0x4541U, 0x4641U, 0x4741U, 0x4841U, + 0x4941U, 0x4A41U, 0x4B41U, 0x4C41U, 0x4D41U, 0x4E41U, 0x4F41U, 0x5041U, + 0x5141U, 0x5241U, 0x5341U, 0x5441U, 0x5541U, 0x5641U, 0x5741U, 0x5841U, + 0x5941U, 0x5A41U, 0x6141U, 0x6241U, 0x6341U, 0x6441U, 0x6541U, 0x6641U, + 0x6741U, 0x6841U, 0x6941U, 0x6A41U, 0x6B41U, 0x6C41U, 0x6D41U, 0x6E41U, + 0x6F41U, 0x7041U, 0x7141U, 0x7241U, 0x7341U, 0x7441U, 0x7541U, 0x7641U, + 0x7741U, 0x7841U, 0x7941U, 0x7A41U, 0x3041U, 0x3141U, 0x3241U, 0x3341U, + 0x3441U, 0x3541U, 0x3641U, 0x3741U, 0x3841U, 0x3941U, 0x2B41U, 0x2F41U, + 0x4142U, 0x4242U, 0x4342U, 0x4442U, 0x4542U, 0x4642U, 0x4742U, 0x4842U, + 0x4942U, 0x4A42U, 0x4B42U, 0x4C42U, 0x4D42U, 0x4E42U, 0x4F42U, 0x5042U, + 0x5142U, 0x5242U, 0x5342U, 0x5442U, 0x5542U, 0x5642U, 0x5742U, 0x5842U, + 0x5942U, 0x5A42U, 0x6142U, 0x6242U, 0x6342U, 0x6442U, 0x6542U, 0x6642U, + 0x6742U, 0x6842U, 0x6942U, 0x6A42U, 0x6B42U, 0x6C42U, 0x6D42U, 0x6E42U, + 0x6F42U, 0x7042U, 0x7142U, 0x7242U, 0x7342U, 0x7442U, 0x7542U, 0x7642U, + 0x7742U, 0x7842U, 0x7942U, 0x7A42U, 0x3042U, 0x3142U, 0x3242U, 0x3342U, + 0x3442U, 0x3542U, 0x3642U, 0x3742U, 0x3842U, 0x3942U, 0x2B42U, 0x2F42U, + 0x4143U, 0x4243U, 0x4343U, 0x4443U, 0x4543U, 0x4643U, 0x4743U, 0x4843U, + 0x4943U, 0x4A43U, 0x4B43U, 0x4C43U, 0x4D43U, 0x4E43U, 0x4F43U, 0x5043U, + 0x5143U, 0x5243U, 0x5343U, 0x5443U, 0x5543U, 0x5643U, 0x5743U, 0x5843U, + 0x5943U, 0x5A43U, 0x6143U, 0x6243U, 0x6343U, 0x6443U, 0x6543U, 0x6643U, + 0x6743U, 0x6843U, 0x6943U, 0x6A43U, 0x6B43U, 0x6C43U, 0x6D43U, 0x6E43U, + 0x6F43U, 0x7043U, 0x7143U, 0x7243U, 0x7343U, 0x7443U, 0x7543U, 0x7643U, + 0x7743U, 0x7843U, 0x7943U, 0x7A43U, 0x3043U, 0x3143U, 0x3243U, 0x3343U, + 0x3443U, 0x3543U, 0x3643U, 0x3743U, 0x3843U, 0x3943U, 0x2B43U, 0x2F43U, + 0x4144U, 0x4244U, 0x4344U, 0x4444U, 0x4544U, 0x4644U, 0x4744U, 0x4844U, + 0x4944U, 0x4A44U, 0x4B44U, 0x4C44U, 0x4D44U, 0x4E44U, 0x4F44U, 0x5044U, + 0x5144U, 0x5244U, 0x5344U, 0x5444U, 0x5544U, 0x5644U, 0x5744U, 0x5844U, + 0x5944U, 0x5A44U, 0x6144U, 0x6244U, 0x6344U, 0x6444U, 0x6544U, 0x6644U, + 0x6744U, 0x6844U, 0x6944U, 0x6A44U, 0x6B44U, 0x6C44U, 0x6D44U, 0x6E44U, + 0x6F44U, 0x7044U, 0x7144U, 0x7244U, 0x7344U, 0x7444U, 0x7544U, 0x7644U, + 0x7744U, 0x7844U, 0x7944U, 0x7A44U, 0x3044U, 0x3144U, 0x3244U, 0x3344U, + 0x3444U, 0x3544U, 0x3644U, 0x3744U, 0x3844U, 0x3944U, 0x2B44U, 0x2F44U, + 0x4145U, 0x4245U, 0x4345U, 0x4445U, 0x4545U, 0x4645U, 0x4745U, 0x4845U, + 0x4945U, 0x4A45U, 0x4B45U, 0x4C45U, 0x4D45U, 0x4E45U, 0x4F45U, 0x5045U, + 0x5145U, 0x5245U, 0x5345U, 0x5445U, 0x5545U, 0x5645U, 0x5745U, 0x5845U, + 0x5945U, 0x5A45U, 0x6145U, 0x6245U, 0x6345U, 0x6445U, 0x6545U, 0x6645U, + 0x6745U, 0x6845U, 0x6945U, 0x6A45U, 0x6B45U, 0x6C45U, 0x6D45U, 0x6E45U, + 0x6F45U, 0x7045U, 0x7145U, 0x7245U, 0x7345U, 0x7445U, 0x7545U, 0x7645U, + 0x7745U, 0x7845U, 0x7945U, 0x7A45U, 0x3045U, 0x3145U, 0x3245U, 0x3345U, + 0x3445U, 0x3545U, 0x3645U, 0x3745U, 0x3845U, 0x3945U, 0x2B45U, 0x2F45U, + 0x4146U, 0x4246U, 0x4346U, 0x4446U, 0x4546U, 0x4646U, 0x4746U, 0x4846U, + 0x4946U, 0x4A46U, 0x4B46U, 0x4C46U, 0x4D46U, 0x4E46U, 0x4F46U, 0x5046U, + 0x5146U, 0x5246U, 0x5346U, 0x5446U, 0x5546U, 0x5646U, 0x5746U, 0x5846U, + 0x5946U, 0x5A46U, 0x6146U, 0x6246U, 0x6346U, 0x6446U, 0x6546U, 0x6646U, + 0x6746U, 0x6846U, 0x6946U, 0x6A46U, 0x6B46U, 0x6C46U, 0x6D46U, 0x6E46U, + 0x6F46U, 0x7046U, 0x7146U, 0x7246U, 0x7346U, 0x7446U, 0x7546U, 0x7646U, + 0x7746U, 0x7846U, 0x7946U, 0x7A46U, 0x3046U, 0x3146U, 0x3246U, 0x3346U, + 0x3446U, 0x3546U, 0x3646U, 0x3746U, 0x3846U, 0x3946U, 0x2B46U, 0x2F46U, + 0x4147U, 0x4247U, 0x4347U, 0x4447U, 0x4547U, 0x4647U, 0x4747U, 0x4847U, + 0x4947U, 0x4A47U, 0x4B47U, 0x4C47U, 0x4D47U, 0x4E47U, 0x4F47U, 0x5047U, + 0x5147U, 0x5247U, 0x5347U, 0x5447U, 0x5547U, 0x5647U, 0x5747U, 0x5847U, + 0x5947U, 0x5A47U, 0x6147U, 0x6247U, 0x6347U, 0x6447U, 0x6547U, 0x6647U, + 0x6747U, 0x6847U, 0x6947U, 0x6A47U, 0x6B47U, 0x6C47U, 0x6D47U, 0x6E47U, + 0x6F47U, 0x7047U, 0x7147U, 0x7247U, 0x7347U, 0x7447U, 0x7547U, 0x7647U, + 0x7747U, 0x7847U, 0x7947U, 0x7A47U, 0x3047U, 0x3147U, 0x3247U, 0x3347U, + 0x3447U, 0x3547U, 0x3647U, 0x3747U, 0x3847U, 0x3947U, 0x2B47U, 0x2F47U, + 0x4148U, 0x4248U, 0x4348U, 0x4448U, 0x4548U, 0x4648U, 0x4748U, 0x4848U, + 0x4948U, 0x4A48U, 0x4B48U, 0x4C48U, 0x4D48U, 0x4E48U, 0x4F48U, 0x5048U, + 0x5148U, 0x5248U, 0x5348U, 0x5448U, 0x5548U, 0x5648U, 0x5748U, 0x5848U, + 0x5948U, 0x5A48U, 0x6148U, 0x6248U, 0x6348U, 0x6448U, 0x6548U, 0x6648U, + 0x6748U, 0x6848U, 0x6948U, 0x6A48U, 0x6B48U, 0x6C48U, 0x6D48U, 0x6E48U, + 0x6F48U, 0x7048U, 0x7148U, 0x7248U, 0x7348U, 0x7448U, 0x7548U, 0x7648U, + 0x7748U, 0x7848U, 0x7948U, 0x7A48U, 0x3048U, 0x3148U, 0x3248U, 0x3348U, + 0x3448U, 0x3548U, 0x3648U, 0x3748U, 0x3848U, 0x3948U, 0x2B48U, 0x2F48U, + 0x4149U, 0x4249U, 0x4349U, 0x4449U, 0x4549U, 0x4649U, 0x4749U, 0x4849U, + 0x4949U, 0x4A49U, 0x4B49U, 0x4C49U, 0x4D49U, 0x4E49U, 0x4F49U, 0x5049U, + 0x5149U, 0x5249U, 0x5349U, 0x5449U, 0x5549U, 0x5649U, 0x5749U, 0x5849U, + 0x5949U, 0x5A49U, 0x6149U, 0x6249U, 0x6349U, 0x6449U, 0x6549U, 0x6649U, + 0x6749U, 0x6849U, 0x6949U, 0x6A49U, 0x6B49U, 0x6C49U, 0x6D49U, 0x6E49U, + 0x6F49U, 0x7049U, 0x7149U, 0x7249U, 0x7349U, 0x7449U, 0x7549U, 0x7649U, + 0x7749U, 0x7849U, 0x7949U, 0x7A49U, 0x3049U, 0x3149U, 0x3249U, 0x3349U, + 0x3449U, 0x3549U, 0x3649U, 0x3749U, 0x3849U, 0x3949U, 0x2B49U, 0x2F49U, + 0x414AU, 0x424AU, 0x434AU, 0x444AU, 0x454AU, 0x464AU, 0x474AU, 0x484AU, + 0x494AU, 0x4A4AU, 0x4B4AU, 0x4C4AU, 0x4D4AU, 0x4E4AU, 0x4F4AU, 0x504AU, + 0x514AU, 0x524AU, 0x534AU, 0x544AU, 0x554AU, 0x564AU, 0x574AU, 0x584AU, + 0x594AU, 0x5A4AU, 0x614AU, 0x624AU, 0x634AU, 0x644AU, 0x654AU, 0x664AU, + 0x674AU, 0x684AU, 0x694AU, 0x6A4AU, 0x6B4AU, 0x6C4AU, 0x6D4AU, 0x6E4AU, + 0x6F4AU, 0x704AU, 0x714AU, 0x724AU, 0x734AU, 0x744AU, 0x754AU, 0x764AU, + 0x774AU, 0x784AU, 0x794AU, 0x7A4AU, 0x304AU, 0x314AU, 0x324AU, 0x334AU, + 0x344AU, 0x354AU, 0x364AU, 0x374AU, 0x384AU, 0x394AU, 0x2B4AU, 0x2F4AU, + 0x414BU, 0x424BU, 0x434BU, 0x444BU, 0x454BU, 0x464BU, 0x474BU, 0x484BU, + 0x494BU, 0x4A4BU, 0x4B4BU, 0x4C4BU, 0x4D4BU, 0x4E4BU, 0x4F4BU, 0x504BU, + 0x514BU, 0x524BU, 0x534BU, 0x544BU, 0x554BU, 0x564BU, 0x574BU, 0x584BU, + 0x594BU, 0x5A4BU, 0x614BU, 0x624BU, 0x634BU, 0x644BU, 0x654BU, 0x664BU, + 0x674BU, 0x684BU, 0x694BU, 0x6A4BU, 0x6B4BU, 0x6C4BU, 0x6D4BU, 0x6E4BU, + 0x6F4BU, 0x704BU, 0x714BU, 0x724BU, 0x734BU, 0x744BU, 0x754BU, 0x764BU, + 0x774BU, 0x784BU, 0x794BU, 0x7A4BU, 0x304BU, 0x314BU, 0x324BU, 0x334BU, + 0x344BU, 0x354BU, 0x364BU, 0x374BU, 0x384BU, 0x394BU, 0x2B4BU, 0x2F4BU, + 0x414CU, 0x424CU, 0x434CU, 0x444CU, 0x454CU, 0x464CU, 0x474CU, 0x484CU, + 0x494CU, 0x4A4CU, 0x4B4CU, 0x4C4CU, 0x4D4CU, 0x4E4CU, 0x4F4CU, 0x504CU, + 0x514CU, 0x524CU, 0x534CU, 0x544CU, 0x554CU, 0x564CU, 0x574CU, 0x584CU, + 0x594CU, 0x5A4CU, 0x614CU, 0x624CU, 0x634CU, 0x644CU, 0x654CU, 0x664CU, + 0x674CU, 0x684CU, 0x694CU, 0x6A4CU, 0x6B4CU, 0x6C4CU, 0x6D4CU, 0x6E4CU, + 0x6F4CU, 0x704CU, 0x714CU, 0x724CU, 0x734CU, 0x744CU, 0x754CU, 0x764CU, + 0x774CU, 0x784CU, 0x794CU, 0x7A4CU, 0x304CU, 0x314CU, 0x324CU, 0x334CU, + 0x344CU, 0x354CU, 0x364CU, 0x374CU, 0x384CU, 0x394CU, 0x2B4CU, 0x2F4CU, + 0x414DU, 0x424DU, 0x434DU, 0x444DU, 0x454DU, 0x464DU, 0x474DU, 0x484DU, + 0x494DU, 0x4A4DU, 0x4B4DU, 0x4C4DU, 0x4D4DU, 0x4E4DU, 0x4F4DU, 0x504DU, + 0x514DU, 0x524DU, 0x534DU, 0x544DU, 0x554DU, 0x564DU, 0x574DU, 0x584DU, + 0x594DU, 0x5A4DU, 0x614DU, 0x624DU, 0x634DU, 0x644DU, 0x654DU, 0x664DU, + 0x674DU, 0x684DU, 0x694DU, 0x6A4DU, 0x6B4DU, 0x6C4DU, 0x6D4DU, 0x6E4DU, + 0x6F4DU, 0x704DU, 0x714DU, 0x724DU, 0x734DU, 0x744DU, 0x754DU, 0x764DU, + 0x774DU, 0x784DU, 0x794DU, 0x7A4DU, 0x304DU, 0x314DU, 0x324DU, 0x334DU, + 0x344DU, 0x354DU, 0x364DU, 0x374DU, 0x384DU, 0x394DU, 0x2B4DU, 0x2F4DU, + 0x414EU, 0x424EU, 0x434EU, 0x444EU, 0x454EU, 0x464EU, 0x474EU, 0x484EU, + 0x494EU, 0x4A4EU, 0x4B4EU, 0x4C4EU, 0x4D4EU, 0x4E4EU, 0x4F4EU, 0x504EU, + 0x514EU, 0x524EU, 0x534EU, 0x544EU, 0x554EU, 0x564EU, 0x574EU, 0x584EU, + 0x594EU, 0x5A4EU, 0x614EU, 0x624EU, 0x634EU, 0x644EU, 0x654EU, 0x664EU, + 0x674EU, 0x684EU, 0x694EU, 0x6A4EU, 0x6B4EU, 0x6C4EU, 0x6D4EU, 0x6E4EU, + 0x6F4EU, 0x704EU, 0x714EU, 0x724EU, 0x734EU, 0x744EU, 0x754EU, 0x764EU, + 0x774EU, 0x784EU, 0x794EU, 0x7A4EU, 0x304EU, 0x314EU, 0x324EU, 0x334EU, + 0x344EU, 0x354EU, 0x364EU, 0x374EU, 0x384EU, 0x394EU, 0x2B4EU, 0x2F4EU, + 0x414FU, 0x424FU, 0x434FU, 0x444FU, 0x454FU, 0x464FU, 0x474FU, 0x484FU, + 0x494FU, 0x4A4FU, 0x4B4FU, 0x4C4FU, 0x4D4FU, 0x4E4FU, 0x4F4FU, 0x504FU, + 0x514FU, 0x524FU, 0x534FU, 0x544FU, 0x554FU, 0x564FU, 0x574FU, 0x584FU, + 0x594FU, 0x5A4FU, 0x614FU, 0x624FU, 0x634FU, 0x644FU, 0x654FU, 0x664FU, + 0x674FU, 0x684FU, 0x694FU, 0x6A4FU, 0x6B4FU, 0x6C4FU, 0x6D4FU, 0x6E4FU, + 0x6F4FU, 0x704FU, 0x714FU, 0x724FU, 0x734FU, 0x744FU, 0x754FU, 0x764FU, + 0x774FU, 0x784FU, 0x794FU, 0x7A4FU, 0x304FU, 0x314FU, 0x324FU, 0x334FU, + 0x344FU, 0x354FU, 0x364FU, 0x374FU, 0x384FU, 0x394FU, 0x2B4FU, 0x2F4FU, + 0x4150U, 0x4250U, 0x4350U, 0x4450U, 0x4550U, 0x4650U, 0x4750U, 0x4850U, + 0x4950U, 0x4A50U, 0x4B50U, 0x4C50U, 0x4D50U, 0x4E50U, 0x4F50U, 0x5050U, + 0x5150U, 0x5250U, 0x5350U, 0x5450U, 0x5550U, 0x5650U, 0x5750U, 0x5850U, + 0x5950U, 0x5A50U, 0x6150U, 0x6250U, 0x6350U, 0x6450U, 0x6550U, 0x6650U, + 0x6750U, 0x6850U, 0x6950U, 0x6A50U, 0x6B50U, 0x6C50U, 0x6D50U, 0x6E50U, + 0x6F50U, 0x7050U, 0x7150U, 0x7250U, 0x7350U, 0x7450U, 0x7550U, 0x7650U, + 0x7750U, 0x7850U, 0x7950U, 0x7A50U, 0x3050U, 0x3150U, 0x3250U, 0x3350U, + 0x3450U, 0x3550U, 0x3650U, 0x3750U, 0x3850U, 0x3950U, 0x2B50U, 0x2F50U, + 0x4151U, 0x4251U, 0x4351U, 0x4451U, 0x4551U, 0x4651U, 0x4751U, 0x4851U, + 0x4951U, 0x4A51U, 0x4B51U, 0x4C51U, 0x4D51U, 0x4E51U, 0x4F51U, 0x5051U, + 0x5151U, 0x5251U, 0x5351U, 0x5451U, 0x5551U, 0x5651U, 0x5751U, 0x5851U, + 0x5951U, 0x5A51U, 0x6151U, 0x6251U, 0x6351U, 0x6451U, 0x6551U, 0x6651U, + 0x6751U, 0x6851U, 0x6951U, 0x6A51U, 0x6B51U, 0x6C51U, 0x6D51U, 0x6E51U, + 0x6F51U, 0x7051U, 0x7151U, 0x7251U, 0x7351U, 0x7451U, 0x7551U, 0x7651U, + 0x7751U, 0x7851U, 0x7951U, 0x7A51U, 0x3051U, 0x3151U, 0x3251U, 0x3351U, + 0x3451U, 0x3551U, 0x3651U, 0x3751U, 0x3851U, 0x3951U, 0x2B51U, 0x2F51U, + 0x4152U, 0x4252U, 0x4352U, 0x4452U, 0x4552U, 0x4652U, 0x4752U, 0x4852U, + 0x4952U, 0x4A52U, 0x4B52U, 0x4C52U, 0x4D52U, 0x4E52U, 0x4F52U, 0x5052U, + 0x5152U, 0x5252U, 0x5352U, 0x5452U, 0x5552U, 0x5652U, 0x5752U, 0x5852U, + 0x5952U, 0x5A52U, 0x6152U, 0x6252U, 0x6352U, 0x6452U, 0x6552U, 0x6652U, + 0x6752U, 0x6852U, 0x6952U, 0x6A52U, 0x6B52U, 0x6C52U, 0x6D52U, 0x6E52U, + 0x6F52U, 0x7052U, 0x7152U, 0x7252U, 0x7352U, 0x7452U, 0x7552U, 0x7652U, + 0x7752U, 0x7852U, 0x7952U, 0x7A52U, 0x3052U, 0x3152U, 0x3252U, 0x3352U, + 0x3452U, 0x3552U, 0x3652U, 0x3752U, 0x3852U, 0x3952U, 0x2B52U, 0x2F52U, + 0x4153U, 0x4253U, 0x4353U, 0x4453U, 0x4553U, 0x4653U, 0x4753U, 0x4853U, + 0x4953U, 0x4A53U, 0x4B53U, 0x4C53U, 0x4D53U, 0x4E53U, 0x4F53U, 0x5053U, + 0x5153U, 0x5253U, 0x5353U, 0x5453U, 0x5553U, 0x5653U, 0x5753U, 0x5853U, + 0x5953U, 0x5A53U, 0x6153U, 0x6253U, 0x6353U, 0x6453U, 0x6553U, 0x6653U, + 0x6753U, 0x6853U, 0x6953U, 0x6A53U, 0x6B53U, 0x6C53U, 0x6D53U, 0x6E53U, + 0x6F53U, 0x7053U, 0x7153U, 0x7253U, 0x7353U, 0x7453U, 0x7553U, 0x7653U, + 0x7753U, 0x7853U, 0x7953U, 0x7A53U, 0x3053U, 0x3153U, 0x3253U, 0x3353U, + 0x3453U, 0x3553U, 0x3653U, 0x3753U, 0x3853U, 0x3953U, 0x2B53U, 0x2F53U, + 0x4154U, 0x4254U, 0x4354U, 0x4454U, 0x4554U, 0x4654U, 0x4754U, 0x4854U, + 0x4954U, 0x4A54U, 0x4B54U, 0x4C54U, 0x4D54U, 0x4E54U, 0x4F54U, 0x5054U, + 0x5154U, 0x5254U, 0x5354U, 0x5454U, 0x5554U, 0x5654U, 0x5754U, 0x5854U, + 0x5954U, 0x5A54U, 0x6154U, 0x6254U, 0x6354U, 0x6454U, 0x6554U, 0x6654U, + 0x6754U, 0x6854U, 0x6954U, 0x6A54U, 0x6B54U, 0x6C54U, 0x6D54U, 0x6E54U, + 0x6F54U, 0x7054U, 0x7154U, 0x7254U, 0x7354U, 0x7454U, 0x7554U, 0x7654U, + 0x7754U, 0x7854U, 0x7954U, 0x7A54U, 0x3054U, 0x3154U, 0x3254U, 0x3354U, + 0x3454U, 0x3554U, 0x3654U, 0x3754U, 0x3854U, 0x3954U, 0x2B54U, 0x2F54U, + 0x4155U, 0x4255U, 0x4355U, 0x4455U, 0x4555U, 0x4655U, 0x4755U, 0x4855U, + 0x4955U, 0x4A55U, 0x4B55U, 0x4C55U, 0x4D55U, 0x4E55U, 0x4F55U, 0x5055U, + 0x5155U, 0x5255U, 0x5355U, 0x5455U, 0x5555U, 0x5655U, 0x5755U, 0x5855U, + 0x5955U, 0x5A55U, 0x6155U, 0x6255U, 0x6355U, 0x6455U, 0x6555U, 0x6655U, + 0x6755U, 0x6855U, 0x6955U, 0x6A55U, 0x6B55U, 0x6C55U, 0x6D55U, 0x6E55U, + 0x6F55U, 0x7055U, 0x7155U, 0x7255U, 0x7355U, 0x7455U, 0x7555U, 0x7655U, + 0x7755U, 0x7855U, 0x7955U, 0x7A55U, 0x3055U, 0x3155U, 0x3255U, 0x3355U, + 0x3455U, 0x3555U, 0x3655U, 0x3755U, 0x3855U, 0x3955U, 0x2B55U, 0x2F55U, + 0x4156U, 0x4256U, 0x4356U, 0x4456U, 0x4556U, 0x4656U, 0x4756U, 0x4856U, + 0x4956U, 0x4A56U, 0x4B56U, 0x4C56U, 0x4D56U, 0x4E56U, 0x4F56U, 0x5056U, + 0x5156U, 0x5256U, 0x5356U, 0x5456U, 0x5556U, 0x5656U, 0x5756U, 0x5856U, + 0x5956U, 0x5A56U, 0x6156U, 0x6256U, 0x6356U, 0x6456U, 0x6556U, 0x6656U, + 0x6756U, 0x6856U, 0x6956U, 0x6A56U, 0x6B56U, 0x6C56U, 0x6D56U, 0x6E56U, + 0x6F56U, 0x7056U, 0x7156U, 0x7256U, 0x7356U, 0x7456U, 0x7556U, 0x7656U, + 0x7756U, 0x7856U, 0x7956U, 0x7A56U, 0x3056U, 0x3156U, 0x3256U, 0x3356U, + 0x3456U, 0x3556U, 0x3656U, 0x3756U, 0x3856U, 0x3956U, 0x2B56U, 0x2F56U, + 0x4157U, 0x4257U, 0x4357U, 0x4457U, 0x4557U, 0x4657U, 0x4757U, 0x4857U, + 0x4957U, 0x4A57U, 0x4B57U, 0x4C57U, 0x4D57U, 0x4E57U, 0x4F57U, 0x5057U, + 0x5157U, 0x5257U, 0x5357U, 0x5457U, 0x5557U, 0x5657U, 0x5757U, 0x5857U, + 0x5957U, 0x5A57U, 0x6157U, 0x6257U, 0x6357U, 0x6457U, 0x6557U, 0x6657U, + 0x6757U, 0x6857U, 0x6957U, 0x6A57U, 0x6B57U, 0x6C57U, 0x6D57U, 0x6E57U, + 0x6F57U, 0x7057U, 0x7157U, 0x7257U, 0x7357U, 0x7457U, 0x7557U, 0x7657U, + 0x7757U, 0x7857U, 0x7957U, 0x7A57U, 0x3057U, 0x3157U, 0x3257U, 0x3357U, + 0x3457U, 0x3557U, 0x3657U, 0x3757U, 0x3857U, 0x3957U, 0x2B57U, 0x2F57U, + 0x4158U, 0x4258U, 0x4358U, 0x4458U, 0x4558U, 0x4658U, 0x4758U, 0x4858U, + 0x4958U, 0x4A58U, 0x4B58U, 0x4C58U, 0x4D58U, 0x4E58U, 0x4F58U, 0x5058U, + 0x5158U, 0x5258U, 0x5358U, 0x5458U, 0x5558U, 0x5658U, 0x5758U, 0x5858U, + 0x5958U, 0x5A58U, 0x6158U, 0x6258U, 0x6358U, 0x6458U, 0x6558U, 0x6658U, + 0x6758U, 0x6858U, 0x6958U, 0x6A58U, 0x6B58U, 0x6C58U, 0x6D58U, 0x6E58U, + 0x6F58U, 0x7058U, 0x7158U, 0x7258U, 0x7358U, 0x7458U, 0x7558U, 0x7658U, + 0x7758U, 0x7858U, 0x7958U, 0x7A58U, 0x3058U, 0x3158U, 0x3258U, 0x3358U, + 0x3458U, 0x3558U, 0x3658U, 0x3758U, 0x3858U, 0x3958U, 0x2B58U, 0x2F58U, + 0x4159U, 0x4259U, 0x4359U, 0x4459U, 0x4559U, 0x4659U, 0x4759U, 0x4859U, + 0x4959U, 0x4A59U, 0x4B59U, 0x4C59U, 0x4D59U, 0x4E59U, 0x4F59U, 0x5059U, + 0x5159U, 0x5259U, 0x5359U, 0x5459U, 0x5559U, 0x5659U, 0x5759U, 0x5859U, + 0x5959U, 0x5A59U, 0x6159U, 0x6259U, 0x6359U, 0x6459U, 0x6559U, 0x6659U, + 0x6759U, 0x6859U, 0x6959U, 0x6A59U, 0x6B59U, 0x6C59U, 0x6D59U, 0x6E59U, + 0x6F59U, 0x7059U, 0x7159U, 0x7259U, 0x7359U, 0x7459U, 0x7559U, 0x7659U, + 0x7759U, 0x7859U, 0x7959U, 0x7A59U, 0x3059U, 0x3159U, 0x3259U, 0x3359U, + 0x3459U, 0x3559U, 0x3659U, 0x3759U, 0x3859U, 0x3959U, 0x2B59U, 0x2F59U, + 0x415AU, 0x425AU, 0x435AU, 0x445AU, 0x455AU, 0x465AU, 0x475AU, 0x485AU, + 0x495AU, 0x4A5AU, 0x4B5AU, 0x4C5AU, 0x4D5AU, 0x4E5AU, 0x4F5AU, 0x505AU, + 0x515AU, 0x525AU, 0x535AU, 0x545AU, 0x555AU, 0x565AU, 0x575AU, 0x585AU, + 0x595AU, 0x5A5AU, 0x615AU, 0x625AU, 0x635AU, 0x645AU, 0x655AU, 0x665AU, + 0x675AU, 0x685AU, 0x695AU, 0x6A5AU, 0x6B5AU, 0x6C5AU, 0x6D5AU, 0x6E5AU, + 0x6F5AU, 0x705AU, 0x715AU, 0x725AU, 0x735AU, 0x745AU, 0x755AU, 0x765AU, + 0x775AU, 0x785AU, 0x795AU, 0x7A5AU, 0x305AU, 0x315AU, 0x325AU, 0x335AU, + 0x345AU, 0x355AU, 0x365AU, 0x375AU, 0x385AU, 0x395AU, 0x2B5AU, 0x2F5AU, + 0x4161U, 0x4261U, 0x4361U, 0x4461U, 0x4561U, 0x4661U, 0x4761U, 0x4861U, + 0x4961U, 0x4A61U, 0x4B61U, 0x4C61U, 0x4D61U, 0x4E61U, 0x4F61U, 0x5061U, + 0x5161U, 0x5261U, 0x5361U, 0x5461U, 0x5561U, 0x5661U, 0x5761U, 0x5861U, + 0x5961U, 0x5A61U, 0x6161U, 0x6261U, 0x6361U, 0x6461U, 0x6561U, 0x6661U, + 0x6761U, 0x6861U, 0x6961U, 0x6A61U, 0x6B61U, 0x6C61U, 0x6D61U, 0x6E61U, + 0x6F61U, 0x7061U, 0x7161U, 0x7261U, 0x7361U, 0x7461U, 0x7561U, 0x7661U, + 0x7761U, 0x7861U, 0x7961U, 0x7A61U, 0x3061U, 0x3161U, 0x3261U, 0x3361U, + 0x3461U, 0x3561U, 0x3661U, 0x3761U, 0x3861U, 0x3961U, 0x2B61U, 0x2F61U, + 0x4162U, 0x4262U, 0x4362U, 0x4462U, 0x4562U, 0x4662U, 0x4762U, 0x4862U, + 0x4962U, 0x4A62U, 0x4B62U, 0x4C62U, 0x4D62U, 0x4E62U, 0x4F62U, 0x5062U, + 0x5162U, 0x5262U, 0x5362U, 0x5462U, 0x5562U, 0x5662U, 0x5762U, 0x5862U, + 0x5962U, 0x5A62U, 0x6162U, 0x6262U, 0x6362U, 0x6462U, 0x6562U, 0x6662U, + 0x6762U, 0x6862U, 0x6962U, 0x6A62U, 0x6B62U, 0x6C62U, 0x6D62U, 0x6E62U, + 0x6F62U, 0x7062U, 0x7162U, 0x7262U, 0x7362U, 0x7462U, 0x7562U, 0x7662U, + 0x7762U, 0x7862U, 0x7962U, 0x7A62U, 0x3062U, 0x3162U, 0x3262U, 0x3362U, + 0x3462U, 0x3562U, 0x3662U, 0x3762U, 0x3862U, 0x3962U, 0x2B62U, 0x2F62U, + 0x4163U, 0x4263U, 0x4363U, 0x4463U, 0x4563U, 0x4663U, 0x4763U, 0x4863U, + 0x4963U, 0x4A63U, 0x4B63U, 0x4C63U, 0x4D63U, 0x4E63U, 0x4F63U, 0x5063U, + 0x5163U, 0x5263U, 0x5363U, 0x5463U, 0x5563U, 0x5663U, 0x5763U, 0x5863U, + 0x5963U, 0x5A63U, 0x6163U, 0x6263U, 0x6363U, 0x6463U, 0x6563U, 0x6663U, + 0x6763U, 0x6863U, 0x6963U, 0x6A63U, 0x6B63U, 0x6C63U, 0x6D63U, 0x6E63U, + 0x6F63U, 0x7063U, 0x7163U, 0x7263U, 0x7363U, 0x7463U, 0x7563U, 0x7663U, + 0x7763U, 0x7863U, 0x7963U, 0x7A63U, 0x3063U, 0x3163U, 0x3263U, 0x3363U, + 0x3463U, 0x3563U, 0x3663U, 0x3763U, 0x3863U, 0x3963U, 0x2B63U, 0x2F63U, + 0x4164U, 0x4264U, 0x4364U, 0x4464U, 0x4564U, 0x4664U, 0x4764U, 0x4864U, + 0x4964U, 0x4A64U, 0x4B64U, 0x4C64U, 0x4D64U, 0x4E64U, 0x4F64U, 0x5064U, + 0x5164U, 0x5264U, 0x5364U, 0x5464U, 0x5564U, 0x5664U, 0x5764U, 0x5864U, + 0x5964U, 0x5A64U, 0x6164U, 0x6264U, 0x6364U, 0x6464U, 0x6564U, 0x6664U, + 0x6764U, 0x6864U, 0x6964U, 0x6A64U, 0x6B64U, 0x6C64U, 0x6D64U, 0x6E64U, + 0x6F64U, 0x7064U, 0x7164U, 0x7264U, 0x7364U, 0x7464U, 0x7564U, 0x7664U, + 0x7764U, 0x7864U, 0x7964U, 0x7A64U, 0x3064U, 0x3164U, 0x3264U, 0x3364U, + 0x3464U, 0x3564U, 0x3664U, 0x3764U, 0x3864U, 0x3964U, 0x2B64U, 0x2F64U, + 0x4165U, 0x4265U, 0x4365U, 0x4465U, 0x4565U, 0x4665U, 0x4765U, 0x4865U, + 0x4965U, 0x4A65U, 0x4B65U, 0x4C65U, 0x4D65U, 0x4E65U, 0x4F65U, 0x5065U, + 0x5165U, 0x5265U, 0x5365U, 0x5465U, 0x5565U, 0x5665U, 0x5765U, 0x5865U, + 0x5965U, 0x5A65U, 0x6165U, 0x6265U, 0x6365U, 0x6465U, 0x6565U, 0x6665U, + 0x6765U, 0x6865U, 0x6965U, 0x6A65U, 0x6B65U, 0x6C65U, 0x6D65U, 0x6E65U, + 0x6F65U, 0x7065U, 0x7165U, 0x7265U, 0x7365U, 0x7465U, 0x7565U, 0x7665U, + 0x7765U, 0x7865U, 0x7965U, 0x7A65U, 0x3065U, 0x3165U, 0x3265U, 0x3365U, + 0x3465U, 0x3565U, 0x3665U, 0x3765U, 0x3865U, 0x3965U, 0x2B65U, 0x2F65U, + 0x4166U, 0x4266U, 0x4366U, 0x4466U, 0x4566U, 0x4666U, 0x4766U, 0x4866U, + 0x4966U, 0x4A66U, 0x4B66U, 0x4C66U, 0x4D66U, 0x4E66U, 0x4F66U, 0x5066U, + 0x5166U, 0x5266U, 0x5366U, 0x5466U, 0x5566U, 0x5666U, 0x5766U, 0x5866U, + 0x5966U, 0x5A66U, 0x6166U, 0x6266U, 0x6366U, 0x6466U, 0x6566U, 0x6666U, + 0x6766U, 0x6866U, 0x6966U, 0x6A66U, 0x6B66U, 0x6C66U, 0x6D66U, 0x6E66U, + 0x6F66U, 0x7066U, 0x7166U, 0x7266U, 0x7366U, 0x7466U, 0x7566U, 0x7666U, + 0x7766U, 0x7866U, 0x7966U, 0x7A66U, 0x3066U, 0x3166U, 0x3266U, 0x3366U, + 0x3466U, 0x3566U, 0x3666U, 0x3766U, 0x3866U, 0x3966U, 0x2B66U, 0x2F66U, + 0x4167U, 0x4267U, 0x4367U, 0x4467U, 0x4567U, 0x4667U, 0x4767U, 0x4867U, + 0x4967U, 0x4A67U, 0x4B67U, 0x4C67U, 0x4D67U, 0x4E67U, 0x4F67U, 0x5067U, + 0x5167U, 0x5267U, 0x5367U, 0x5467U, 0x5567U, 0x5667U, 0x5767U, 0x5867U, + 0x5967U, 0x5A67U, 0x6167U, 0x6267U, 0x6367U, 0x6467U, 0x6567U, 0x6667U, + 0x6767U, 0x6867U, 0x6967U, 0x6A67U, 0x6B67U, 0x6C67U, 0x6D67U, 0x6E67U, + 0x6F67U, 0x7067U, 0x7167U, 0x7267U, 0x7367U, 0x7467U, 0x7567U, 0x7667U, + 0x7767U, 0x7867U, 0x7967U, 0x7A67U, 0x3067U, 0x3167U, 0x3267U, 0x3367U, + 0x3467U, 0x3567U, 0x3667U, 0x3767U, 0x3867U, 0x3967U, 0x2B67U, 0x2F67U, + 0x4168U, 0x4268U, 0x4368U, 0x4468U, 0x4568U, 0x4668U, 0x4768U, 0x4868U, + 0x4968U, 0x4A68U, 0x4B68U, 0x4C68U, 0x4D68U, 0x4E68U, 0x4F68U, 0x5068U, + 0x5168U, 0x5268U, 0x5368U, 0x5468U, 0x5568U, 0x5668U, 0x5768U, 0x5868U, + 0x5968U, 0x5A68U, 0x6168U, 0x6268U, 0x6368U, 0x6468U, 0x6568U, 0x6668U, + 0x6768U, 0x6868U, 0x6968U, 0x6A68U, 0x6B68U, 0x6C68U, 0x6D68U, 0x6E68U, + 0x6F68U, 0x7068U, 0x7168U, 0x7268U, 0x7368U, 0x7468U, 0x7568U, 0x7668U, + 0x7768U, 0x7868U, 0x7968U, 0x7A68U, 0x3068U, 0x3168U, 0x3268U, 0x3368U, + 0x3468U, 0x3568U, 0x3668U, 0x3768U, 0x3868U, 0x3968U, 0x2B68U, 0x2F68U, + 0x4169U, 0x4269U, 0x4369U, 0x4469U, 0x4569U, 0x4669U, 0x4769U, 0x4869U, + 0x4969U, 0x4A69U, 0x4B69U, 0x4C69U, 0x4D69U, 0x4E69U, 0x4F69U, 0x5069U, + 0x5169U, 0x5269U, 0x5369U, 0x5469U, 0x5569U, 0x5669U, 0x5769U, 0x5869U, + 0x5969U, 0x5A69U, 0x6169U, 0x6269U, 0x6369U, 0x6469U, 0x6569U, 0x6669U, + 0x6769U, 0x6869U, 0x6969U, 0x6A69U, 0x6B69U, 0x6C69U, 0x6D69U, 0x6E69U, + 0x6F69U, 0x7069U, 0x7169U, 0x7269U, 0x7369U, 0x7469U, 0x7569U, 0x7669U, + 0x7769U, 0x7869U, 0x7969U, 0x7A69U, 0x3069U, 0x3169U, 0x3269U, 0x3369U, + 0x3469U, 0x3569U, 0x3669U, 0x3769U, 0x3869U, 0x3969U, 0x2B69U, 0x2F69U, + 0x416AU, 0x426AU, 0x436AU, 0x446AU, 0x456AU, 0x466AU, 0x476AU, 0x486AU, + 0x496AU, 0x4A6AU, 0x4B6AU, 0x4C6AU, 0x4D6AU, 0x4E6AU, 0x4F6AU, 0x506AU, + 0x516AU, 0x526AU, 0x536AU, 0x546AU, 0x556AU, 0x566AU, 0x576AU, 0x586AU, + 0x596AU, 0x5A6AU, 0x616AU, 0x626AU, 0x636AU, 0x646AU, 0x656AU, 0x666AU, + 0x676AU, 0x686AU, 0x696AU, 0x6A6AU, 0x6B6AU, 0x6C6AU, 0x6D6AU, 0x6E6AU, + 0x6F6AU, 0x706AU, 0x716AU, 0x726AU, 0x736AU, 0x746AU, 0x756AU, 0x766AU, + 0x776AU, 0x786AU, 0x796AU, 0x7A6AU, 0x306AU, 0x316AU, 0x326AU, 0x336AU, + 0x346AU, 0x356AU, 0x366AU, 0x376AU, 0x386AU, 0x396AU, 0x2B6AU, 0x2F6AU, + 0x416BU, 0x426BU, 0x436BU, 0x446BU, 0x456BU, 0x466BU, 0x476BU, 0x486BU, + 0x496BU, 0x4A6BU, 0x4B6BU, 0x4C6BU, 0x4D6BU, 0x4E6BU, 0x4F6BU, 0x506BU, + 0x516BU, 0x526BU, 0x536BU, 0x546BU, 0x556BU, 0x566BU, 0x576BU, 0x586BU, + 0x596BU, 0x5A6BU, 0x616BU, 0x626BU, 0x636BU, 0x646BU, 0x656BU, 0x666BU, + 0x676BU, 0x686BU, 0x696BU, 0x6A6BU, 0x6B6BU, 0x6C6BU, 0x6D6BU, 0x6E6BU, + 0x6F6BU, 0x706BU, 0x716BU, 0x726BU, 0x736BU, 0x746BU, 0x756BU, 0x766BU, + 0x776BU, 0x786BU, 0x796BU, 0x7A6BU, 0x306BU, 0x316BU, 0x326BU, 0x336BU, + 0x346BU, 0x356BU, 0x366BU, 0x376BU, 0x386BU, 0x396BU, 0x2B6BU, 0x2F6BU, + 0x416CU, 0x426CU, 0x436CU, 0x446CU, 0x456CU, 0x466CU, 0x476CU, 0x486CU, + 0x496CU, 0x4A6CU, 0x4B6CU, 0x4C6CU, 0x4D6CU, 0x4E6CU, 0x4F6CU, 0x506CU, + 0x516CU, 0x526CU, 0x536CU, 0x546CU, 0x556CU, 0x566CU, 0x576CU, 0x586CU, + 0x596CU, 0x5A6CU, 0x616CU, 0x626CU, 0x636CU, 0x646CU, 0x656CU, 0x666CU, + 0x676CU, 0x686CU, 0x696CU, 0x6A6CU, 0x6B6CU, 0x6C6CU, 0x6D6CU, 0x6E6CU, + 0x6F6CU, 0x706CU, 0x716CU, 0x726CU, 0x736CU, 0x746CU, 0x756CU, 0x766CU, + 0x776CU, 0x786CU, 0x796CU, 0x7A6CU, 0x306CU, 0x316CU, 0x326CU, 0x336CU, + 0x346CU, 0x356CU, 0x366CU, 0x376CU, 0x386CU, 0x396CU, 0x2B6CU, 0x2F6CU, + 0x416DU, 0x426DU, 0x436DU, 0x446DU, 0x456DU, 0x466DU, 0x476DU, 0x486DU, + 0x496DU, 0x4A6DU, 0x4B6DU, 0x4C6DU, 0x4D6DU, 0x4E6DU, 0x4F6DU, 0x506DU, + 0x516DU, 0x526DU, 0x536DU, 0x546DU, 0x556DU, 0x566DU, 0x576DU, 0x586DU, + 0x596DU, 0x5A6DU, 0x616DU, 0x626DU, 0x636DU, 0x646DU, 0x656DU, 0x666DU, + 0x676DU, 0x686DU, 0x696DU, 0x6A6DU, 0x6B6DU, 0x6C6DU, 0x6D6DU, 0x6E6DU, + 0x6F6DU, 0x706DU, 0x716DU, 0x726DU, 0x736DU, 0x746DU, 0x756DU, 0x766DU, + 0x776DU, 0x786DU, 0x796DU, 0x7A6DU, 0x306DU, 0x316DU, 0x326DU, 0x336DU, + 0x346DU, 0x356DU, 0x366DU, 0x376DU, 0x386DU, 0x396DU, 0x2B6DU, 0x2F6DU, + 0x416EU, 0x426EU, 0x436EU, 0x446EU, 0x456EU, 0x466EU, 0x476EU, 0x486EU, + 0x496EU, 0x4A6EU, 0x4B6EU, 0x4C6EU, 0x4D6EU, 0x4E6EU, 0x4F6EU, 0x506EU, + 0x516EU, 0x526EU, 0x536EU, 0x546EU, 0x556EU, 0x566EU, 0x576EU, 0x586EU, + 0x596EU, 0x5A6EU, 0x616EU, 0x626EU, 0x636EU, 0x646EU, 0x656EU, 0x666EU, + 0x676EU, 0x686EU, 0x696EU, 0x6A6EU, 0x6B6EU, 0x6C6EU, 0x6D6EU, 0x6E6EU, + 0x6F6EU, 0x706EU, 0x716EU, 0x726EU, 0x736EU, 0x746EU, 0x756EU, 0x766EU, + 0x776EU, 0x786EU, 0x796EU, 0x7A6EU, 0x306EU, 0x316EU, 0x326EU, 0x336EU, + 0x346EU, 0x356EU, 0x366EU, 0x376EU, 0x386EU, 0x396EU, 0x2B6EU, 0x2F6EU, + 0x416FU, 0x426FU, 0x436FU, 0x446FU, 0x456FU, 0x466FU, 0x476FU, 0x486FU, + 0x496FU, 0x4A6FU, 0x4B6FU, 0x4C6FU, 0x4D6FU, 0x4E6FU, 0x4F6FU, 0x506FU, + 0x516FU, 0x526FU, 0x536FU, 0x546FU, 0x556FU, 0x566FU, 0x576FU, 0x586FU, + 0x596FU, 0x5A6FU, 0x616FU, 0x626FU, 0x636FU, 0x646FU, 0x656FU, 0x666FU, + 0x676FU, 0x686FU, 0x696FU, 0x6A6FU, 0x6B6FU, 0x6C6FU, 0x6D6FU, 0x6E6FU, + 0x6F6FU, 0x706FU, 0x716FU, 0x726FU, 0x736FU, 0x746FU, 0x756FU, 0x766FU, + 0x776FU, 0x786FU, 0x796FU, 0x7A6FU, 0x306FU, 0x316FU, 0x326FU, 0x336FU, + 0x346FU, 0x356FU, 0x366FU, 0x376FU, 0x386FU, 0x396FU, 0x2B6FU, 0x2F6FU, + 0x4170U, 0x4270U, 0x4370U, 0x4470U, 0x4570U, 0x4670U, 0x4770U, 0x4870U, + 0x4970U, 0x4A70U, 0x4B70U, 0x4C70U, 0x4D70U, 0x4E70U, 0x4F70U, 0x5070U, + 0x5170U, 0x5270U, 0x5370U, 0x5470U, 0x5570U, 0x5670U, 0x5770U, 0x5870U, + 0x5970U, 0x5A70U, 0x6170U, 0x6270U, 0x6370U, 0x6470U, 0x6570U, 0x6670U, + 0x6770U, 0x6870U, 0x6970U, 0x6A70U, 0x6B70U, 0x6C70U, 0x6D70U, 0x6E70U, + 0x6F70U, 0x7070U, 0x7170U, 0x7270U, 0x7370U, 0x7470U, 0x7570U, 0x7670U, + 0x7770U, 0x7870U, 0x7970U, 0x7A70U, 0x3070U, 0x3170U, 0x3270U, 0x3370U, + 0x3470U, 0x3570U, 0x3670U, 0x3770U, 0x3870U, 0x3970U, 0x2B70U, 0x2F70U, + 0x4171U, 0x4271U, 0x4371U, 0x4471U, 0x4571U, 0x4671U, 0x4771U, 0x4871U, + 0x4971U, 0x4A71U, 0x4B71U, 0x4C71U, 0x4D71U, 0x4E71U, 0x4F71U, 0x5071U, + 0x5171U, 0x5271U, 0x5371U, 0x5471U, 0x5571U, 0x5671U, 0x5771U, 0x5871U, + 0x5971U, 0x5A71U, 0x6171U, 0x6271U, 0x6371U, 0x6471U, 0x6571U, 0x6671U, + 0x6771U, 0x6871U, 0x6971U, 0x6A71U, 0x6B71U, 0x6C71U, 0x6D71U, 0x6E71U, + 0x6F71U, 0x7071U, 0x7171U, 0x7271U, 0x7371U, 0x7471U, 0x7571U, 0x7671U, + 0x7771U, 0x7871U, 0x7971U, 0x7A71U, 0x3071U, 0x3171U, 0x3271U, 0x3371U, + 0x3471U, 0x3571U, 0x3671U, 0x3771U, 0x3871U, 0x3971U, 0x2B71U, 0x2F71U, + 0x4172U, 0x4272U, 0x4372U, 0x4472U, 0x4572U, 0x4672U, 0x4772U, 0x4872U, + 0x4972U, 0x4A72U, 0x4B72U, 0x4C72U, 0x4D72U, 0x4E72U, 0x4F72U, 0x5072U, + 0x5172U, 0x5272U, 0x5372U, 0x5472U, 0x5572U, 0x5672U, 0x5772U, 0x5872U, + 0x5972U, 0x5A72U, 0x6172U, 0x6272U, 0x6372U, 0x6472U, 0x6572U, 0x6672U, + 0x6772U, 0x6872U, 0x6972U, 0x6A72U, 0x6B72U, 0x6C72U, 0x6D72U, 0x6E72U, + 0x6F72U, 0x7072U, 0x7172U, 0x7272U, 0x7372U, 0x7472U, 0x7572U, 0x7672U, + 0x7772U, 0x7872U, 0x7972U, 0x7A72U, 0x3072U, 0x3172U, 0x3272U, 0x3372U, + 0x3472U, 0x3572U, 0x3672U, 0x3772U, 0x3872U, 0x3972U, 0x2B72U, 0x2F72U, + 0x4173U, 0x4273U, 0x4373U, 0x4473U, 0x4573U, 0x4673U, 0x4773U, 0x4873U, + 0x4973U, 0x4A73U, 0x4B73U, 0x4C73U, 0x4D73U, 0x4E73U, 0x4F73U, 0x5073U, + 0x5173U, 0x5273U, 0x5373U, 0x5473U, 0x5573U, 0x5673U, 0x5773U, 0x5873U, + 0x5973U, 0x5A73U, 0x6173U, 0x6273U, 0x6373U, 0x6473U, 0x6573U, 0x6673U, + 0x6773U, 0x6873U, 0x6973U, 0x6A73U, 0x6B73U, 0x6C73U, 0x6D73U, 0x6E73U, + 0x6F73U, 0x7073U, 0x7173U, 0x7273U, 0x7373U, 0x7473U, 0x7573U, 0x7673U, + 0x7773U, 0x7873U, 0x7973U, 0x7A73U, 0x3073U, 0x3173U, 0x3273U, 0x3373U, + 0x3473U, 0x3573U, 0x3673U, 0x3773U, 0x3873U, 0x3973U, 0x2B73U, 0x2F73U, + 0x4174U, 0x4274U, 0x4374U, 0x4474U, 0x4574U, 0x4674U, 0x4774U, 0x4874U, + 0x4974U, 0x4A74U, 0x4B74U, 0x4C74U, 0x4D74U, 0x4E74U, 0x4F74U, 0x5074U, + 0x5174U, 0x5274U, 0x5374U, 0x5474U, 0x5574U, 0x5674U, 0x5774U, 0x5874U, + 0x5974U, 0x5A74U, 0x6174U, 0x6274U, 0x6374U, 0x6474U, 0x6574U, 0x6674U, + 0x6774U, 0x6874U, 0x6974U, 0x6A74U, 0x6B74U, 0x6C74U, 0x6D74U, 0x6E74U, + 0x6F74U, 0x7074U, 0x7174U, 0x7274U, 0x7374U, 0x7474U, 0x7574U, 0x7674U, + 0x7774U, 0x7874U, 0x7974U, 0x7A74U, 0x3074U, 0x3174U, 0x3274U, 0x3374U, + 0x3474U, 0x3574U, 0x3674U, 0x3774U, 0x3874U, 0x3974U, 0x2B74U, 0x2F74U, + 0x4175U, 0x4275U, 0x4375U, 0x4475U, 0x4575U, 0x4675U, 0x4775U, 0x4875U, + 0x4975U, 0x4A75U, 0x4B75U, 0x4C75U, 0x4D75U, 0x4E75U, 0x4F75U, 0x5075U, + 0x5175U, 0x5275U, 0x5375U, 0x5475U, 0x5575U, 0x5675U, 0x5775U, 0x5875U, + 0x5975U, 0x5A75U, 0x6175U, 0x6275U, 0x6375U, 0x6475U, 0x6575U, 0x6675U, + 0x6775U, 0x6875U, 0x6975U, 0x6A75U, 0x6B75U, 0x6C75U, 0x6D75U, 0x6E75U, + 0x6F75U, 0x7075U, 0x7175U, 0x7275U, 0x7375U, 0x7475U, 0x7575U, 0x7675U, + 0x7775U, 0x7875U, 0x7975U, 0x7A75U, 0x3075U, 0x3175U, 0x3275U, 0x3375U, + 0x3475U, 0x3575U, 0x3675U, 0x3775U, 0x3875U, 0x3975U, 0x2B75U, 0x2F75U, + 0x4176U, 0x4276U, 0x4376U, 0x4476U, 0x4576U, 0x4676U, 0x4776U, 0x4876U, + 0x4976U, 0x4A76U, 0x4B76U, 0x4C76U, 0x4D76U, 0x4E76U, 0x4F76U, 0x5076U, + 0x5176U, 0x5276U, 0x5376U, 0x5476U, 0x5576U, 0x5676U, 0x5776U, 0x5876U, + 0x5976U, 0x5A76U, 0x6176U, 0x6276U, 0x6376U, 0x6476U, 0x6576U, 0x6676U, + 0x6776U, 0x6876U, 0x6976U, 0x6A76U, 0x6B76U, 0x6C76U, 0x6D76U, 0x6E76U, + 0x6F76U, 0x7076U, 0x7176U, 0x7276U, 0x7376U, 0x7476U, 0x7576U, 0x7676U, + 0x7776U, 0x7876U, 0x7976U, 0x7A76U, 0x3076U, 0x3176U, 0x3276U, 0x3376U, + 0x3476U, 0x3576U, 0x3676U, 0x3776U, 0x3876U, 0x3976U, 0x2B76U, 0x2F76U, + 0x4177U, 0x4277U, 0x4377U, 0x4477U, 0x4577U, 0x4677U, 0x4777U, 0x4877U, + 0x4977U, 0x4A77U, 0x4B77U, 0x4C77U, 0x4D77U, 0x4E77U, 0x4F77U, 0x5077U, + 0x5177U, 0x5277U, 0x5377U, 0x5477U, 0x5577U, 0x5677U, 0x5777U, 0x5877U, + 0x5977U, 0x5A77U, 0x6177U, 0x6277U, 0x6377U, 0x6477U, 0x6577U, 0x6677U, + 0x6777U, 0x6877U, 0x6977U, 0x6A77U, 0x6B77U, 0x6C77U, 0x6D77U, 0x6E77U, + 0x6F77U, 0x7077U, 0x7177U, 0x7277U, 0x7377U, 0x7477U, 0x7577U, 0x7677U, + 0x7777U, 0x7877U, 0x7977U, 0x7A77U, 0x3077U, 0x3177U, 0x3277U, 0x3377U, + 0x3477U, 0x3577U, 0x3677U, 0x3777U, 0x3877U, 0x3977U, 0x2B77U, 0x2F77U, + 0x4178U, 0x4278U, 0x4378U, 0x4478U, 0x4578U, 0x4678U, 0x4778U, 0x4878U, + 0x4978U, 0x4A78U, 0x4B78U, 0x4C78U, 0x4D78U, 0x4E78U, 0x4F78U, 0x5078U, + 0x5178U, 0x5278U, 0x5378U, 0x5478U, 0x5578U, 0x5678U, 0x5778U, 0x5878U, + 0x5978U, 0x5A78U, 0x6178U, 0x6278U, 0x6378U, 0x6478U, 0x6578U, 0x6678U, + 0x6778U, 0x6878U, 0x6978U, 0x6A78U, 0x6B78U, 0x6C78U, 0x6D78U, 0x6E78U, + 0x6F78U, 0x7078U, 0x7178U, 0x7278U, 0x7378U, 0x7478U, 0x7578U, 0x7678U, + 0x7778U, 0x7878U, 0x7978U, 0x7A78U, 0x3078U, 0x3178U, 0x3278U, 0x3378U, + 0x3478U, 0x3578U, 0x3678U, 0x3778U, 0x3878U, 0x3978U, 0x2B78U, 0x2F78U, + 0x4179U, 0x4279U, 0x4379U, 0x4479U, 0x4579U, 0x4679U, 0x4779U, 0x4879U, + 0x4979U, 0x4A79U, 0x4B79U, 0x4C79U, 0x4D79U, 0x4E79U, 0x4F79U, 0x5079U, + 0x5179U, 0x5279U, 0x5379U, 0x5479U, 0x5579U, 0x5679U, 0x5779U, 0x5879U, + 0x5979U, 0x5A79U, 0x6179U, 0x6279U, 0x6379U, 0x6479U, 0x6579U, 0x6679U, + 0x6779U, 0x6879U, 0x6979U, 0x6A79U, 0x6B79U, 0x6C79U, 0x6D79U, 0x6E79U, + 0x6F79U, 0x7079U, 0x7179U, 0x7279U, 0x7379U, 0x7479U, 0x7579U, 0x7679U, + 0x7779U, 0x7879U, 0x7979U, 0x7A79U, 0x3079U, 0x3179U, 0x3279U, 0x3379U, + 0x3479U, 0x3579U, 0x3679U, 0x3779U, 0x3879U, 0x3979U, 0x2B79U, 0x2F79U, + 0x417AU, 0x427AU, 0x437AU, 0x447AU, 0x457AU, 0x467AU, 0x477AU, 0x487AU, + 0x497AU, 0x4A7AU, 0x4B7AU, 0x4C7AU, 0x4D7AU, 0x4E7AU, 0x4F7AU, 0x507AU, + 0x517AU, 0x527AU, 0x537AU, 0x547AU, 0x557AU, 0x567AU, 0x577AU, 0x587AU, + 0x597AU, 0x5A7AU, 0x617AU, 0x627AU, 0x637AU, 0x647AU, 0x657AU, 0x667AU, + 0x677AU, 0x687AU, 0x697AU, 0x6A7AU, 0x6B7AU, 0x6C7AU, 0x6D7AU, 0x6E7AU, + 0x6F7AU, 0x707AU, 0x717AU, 0x727AU, 0x737AU, 0x747AU, 0x757AU, 0x767AU, + 0x777AU, 0x787AU, 0x797AU, 0x7A7AU, 0x307AU, 0x317AU, 0x327AU, 0x337AU, + 0x347AU, 0x357AU, 0x367AU, 0x377AU, 0x387AU, 0x397AU, 0x2B7AU, 0x2F7AU, + 0x4130U, 0x4230U, 0x4330U, 0x4430U, 0x4530U, 0x4630U, 0x4730U, 0x4830U, + 0x4930U, 0x4A30U, 0x4B30U, 0x4C30U, 0x4D30U, 0x4E30U, 0x4F30U, 0x5030U, + 0x5130U, 0x5230U, 0x5330U, 0x5430U, 0x5530U, 0x5630U, 0x5730U, 0x5830U, + 0x5930U, 0x5A30U, 0x6130U, 0x6230U, 0x6330U, 0x6430U, 0x6530U, 0x6630U, + 0x6730U, 0x6830U, 0x6930U, 0x6A30U, 0x6B30U, 0x6C30U, 0x6D30U, 0x6E30U, + 0x6F30U, 0x7030U, 0x7130U, 0x7230U, 0x7330U, 0x7430U, 0x7530U, 0x7630U, + 0x7730U, 0x7830U, 0x7930U, 0x7A30U, 0x3030U, 0x3130U, 0x3230U, 0x3330U, + 0x3430U, 0x3530U, 0x3630U, 0x3730U, 0x3830U, 0x3930U, 0x2B30U, 0x2F30U, + 0x4131U, 0x4231U, 0x4331U, 0x4431U, 0x4531U, 0x4631U, 0x4731U, 0x4831U, + 0x4931U, 0x4A31U, 0x4B31U, 0x4C31U, 0x4D31U, 0x4E31U, 0x4F31U, 0x5031U, + 0x5131U, 0x5231U, 0x5331U, 0x5431U, 0x5531U, 0x5631U, 0x5731U, 0x5831U, + 0x5931U, 0x5A31U, 0x6131U, 0x6231U, 0x6331U, 0x6431U, 0x6531U, 0x6631U, + 0x6731U, 0x6831U, 0x6931U, 0x6A31U, 0x6B31U, 0x6C31U, 0x6D31U, 0x6E31U, + 0x6F31U, 0x7031U, 0x7131U, 0x7231U, 0x7331U, 0x7431U, 0x7531U, 0x7631U, + 0x7731U, 0x7831U, 0x7931U, 0x7A31U, 0x3031U, 0x3131U, 0x3231U, 0x3331U, + 0x3431U, 0x3531U, 0x3631U, 0x3731U, 0x3831U, 0x3931U, 0x2B31U, 0x2F31U, + 0x4132U, 0x4232U, 0x4332U, 0x4432U, 0x4532U, 0x4632U, 0x4732U, 0x4832U, + 0x4932U, 0x4A32U, 0x4B32U, 0x4C32U, 0x4D32U, 0x4E32U, 0x4F32U, 0x5032U, + 0x5132U, 0x5232U, 0x5332U, 0x5432U, 0x5532U, 0x5632U, 0x5732U, 0x5832U, + 0x5932U, 0x5A32U, 0x6132U, 0x6232U, 0x6332U, 0x6432U, 0x6532U, 0x6632U, + 0x6732U, 0x6832U, 0x6932U, 0x6A32U, 0x6B32U, 0x6C32U, 0x6D32U, 0x6E32U, + 0x6F32U, 0x7032U, 0x7132U, 0x7232U, 0x7332U, 0x7432U, 0x7532U, 0x7632U, + 0x7732U, 0x7832U, 0x7932U, 0x7A32U, 0x3032U, 0x3132U, 0x3232U, 0x3332U, + 0x3432U, 0x3532U, 0x3632U, 0x3732U, 0x3832U, 0x3932U, 0x2B32U, 0x2F32U, + 0x4133U, 0x4233U, 0x4333U, 0x4433U, 0x4533U, 0x4633U, 0x4733U, 0x4833U, + 0x4933U, 0x4A33U, 0x4B33U, 0x4C33U, 0x4D33U, 0x4E33U, 0x4F33U, 0x5033U, + 0x5133U, 0x5233U, 0x5333U, 0x5433U, 0x5533U, 0x5633U, 0x5733U, 0x5833U, + 0x5933U, 0x5A33U, 0x6133U, 0x6233U, 0x6333U, 0x6433U, 0x6533U, 0x6633U, + 0x6733U, 0x6833U, 0x6933U, 0x6A33U, 0x6B33U, 0x6C33U, 0x6D33U, 0x6E33U, + 0x6F33U, 0x7033U, 0x7133U, 0x7233U, 0x7333U, 0x7433U, 0x7533U, 0x7633U, + 0x7733U, 0x7833U, 0x7933U, 0x7A33U, 0x3033U, 0x3133U, 0x3233U, 0x3333U, + 0x3433U, 0x3533U, 0x3633U, 0x3733U, 0x3833U, 0x3933U, 0x2B33U, 0x2F33U, + 0x4134U, 0x4234U, 0x4334U, 0x4434U, 0x4534U, 0x4634U, 0x4734U, 0x4834U, + 0x4934U, 0x4A34U, 0x4B34U, 0x4C34U, 0x4D34U, 0x4E34U, 0x4F34U, 0x5034U, + 0x5134U, 0x5234U, 0x5334U, 0x5434U, 0x5534U, 0x5634U, 0x5734U, 0x5834U, + 0x5934U, 0x5A34U, 0x6134U, 0x6234U, 0x6334U, 0x6434U, 0x6534U, 0x6634U, + 0x6734U, 0x6834U, 0x6934U, 0x6A34U, 0x6B34U, 0x6C34U, 0x6D34U, 0x6E34U, + 0x6F34U, 0x7034U, 0x7134U, 0x7234U, 0x7334U, 0x7434U, 0x7534U, 0x7634U, + 0x7734U, 0x7834U, 0x7934U, 0x7A34U, 0x3034U, 0x3134U, 0x3234U, 0x3334U, + 0x3434U, 0x3534U, 0x3634U, 0x3734U, 0x3834U, 0x3934U, 0x2B34U, 0x2F34U, + 0x4135U, 0x4235U, 0x4335U, 0x4435U, 0x4535U, 0x4635U, 0x4735U, 0x4835U, + 0x4935U, 0x4A35U, 0x4B35U, 0x4C35U, 0x4D35U, 0x4E35U, 0x4F35U, 0x5035U, + 0x5135U, 0x5235U, 0x5335U, 0x5435U, 0x5535U, 0x5635U, 0x5735U, 0x5835U, + 0x5935U, 0x5A35U, 0x6135U, 0x6235U, 0x6335U, 0x6435U, 0x6535U, 0x6635U, + 0x6735U, 0x6835U, 0x6935U, 0x6A35U, 0x6B35U, 0x6C35U, 0x6D35U, 0x6E35U, + 0x6F35U, 0x7035U, 0x7135U, 0x7235U, 0x7335U, 0x7435U, 0x7535U, 0x7635U, + 0x7735U, 0x7835U, 0x7935U, 0x7A35U, 0x3035U, 0x3135U, 0x3235U, 0x3335U, + 0x3435U, 0x3535U, 0x3635U, 0x3735U, 0x3835U, 0x3935U, 0x2B35U, 0x2F35U, + 0x4136U, 0x4236U, 0x4336U, 0x4436U, 0x4536U, 0x4636U, 0x4736U, 0x4836U, + 0x4936U, 0x4A36U, 0x4B36U, 0x4C36U, 0x4D36U, 0x4E36U, 0x4F36U, 0x5036U, + 0x5136U, 0x5236U, 0x5336U, 0x5436U, 0x5536U, 0x5636U, 0x5736U, 0x5836U, + 0x5936U, 0x5A36U, 0x6136U, 0x6236U, 0x6336U, 0x6436U, 0x6536U, 0x6636U, + 0x6736U, 0x6836U, 0x6936U, 0x6A36U, 0x6B36U, 0x6C36U, 0x6D36U, 0x6E36U, + 0x6F36U, 0x7036U, 0x7136U, 0x7236U, 0x7336U, 0x7436U, 0x7536U, 0x7636U, + 0x7736U, 0x7836U, 0x7936U, 0x7A36U, 0x3036U, 0x3136U, 0x3236U, 0x3336U, + 0x3436U, 0x3536U, 0x3636U, 0x3736U, 0x3836U, 0x3936U, 0x2B36U, 0x2F36U, + 0x4137U, 0x4237U, 0x4337U, 0x4437U, 0x4537U, 0x4637U, 0x4737U, 0x4837U, + 0x4937U, 0x4A37U, 0x4B37U, 0x4C37U, 0x4D37U, 0x4E37U, 0x4F37U, 0x5037U, + 0x5137U, 0x5237U, 0x5337U, 0x5437U, 0x5537U, 0x5637U, 0x5737U, 0x5837U, + 0x5937U, 0x5A37U, 0x6137U, 0x6237U, 0x6337U, 0x6437U, 0x6537U, 0x6637U, + 0x6737U, 0x6837U, 0x6937U, 0x6A37U, 0x6B37U, 0x6C37U, 0x6D37U, 0x6E37U, + 0x6F37U, 0x7037U, 0x7137U, 0x7237U, 0x7337U, 0x7437U, 0x7537U, 0x7637U, + 0x7737U, 0x7837U, 0x7937U, 0x7A37U, 0x3037U, 0x3137U, 0x3237U, 0x3337U, + 0x3437U, 0x3537U, 0x3637U, 0x3737U, 0x3837U, 0x3937U, 0x2B37U, 0x2F37U, + 0x4138U, 0x4238U, 0x4338U, 0x4438U, 0x4538U, 0x4638U, 0x4738U, 0x4838U, + 0x4938U, 0x4A38U, 0x4B38U, 0x4C38U, 0x4D38U, 0x4E38U, 0x4F38U, 0x5038U, + 0x5138U, 0x5238U, 0x5338U, 0x5438U, 0x5538U, 0x5638U, 0x5738U, 0x5838U, + 0x5938U, 0x5A38U, 0x6138U, 0x6238U, 0x6338U, 0x6438U, 0x6538U, 0x6638U, + 0x6738U, 0x6838U, 0x6938U, 0x6A38U, 0x6B38U, 0x6C38U, 0x6D38U, 0x6E38U, + 0x6F38U, 0x7038U, 0x7138U, 0x7238U, 0x7338U, 0x7438U, 0x7538U, 0x7638U, + 0x7738U, 0x7838U, 0x7938U, 0x7A38U, 0x3038U, 0x3138U, 0x3238U, 0x3338U, + 0x3438U, 0x3538U, 0x3638U, 0x3738U, 0x3838U, 0x3938U, 0x2B38U, 0x2F38U, + 0x4139U, 0x4239U, 0x4339U, 0x4439U, 0x4539U, 0x4639U, 0x4739U, 0x4839U, + 0x4939U, 0x4A39U, 0x4B39U, 0x4C39U, 0x4D39U, 0x4E39U, 0x4F39U, 0x5039U, + 0x5139U, 0x5239U, 0x5339U, 0x5439U, 0x5539U, 0x5639U, 0x5739U, 0x5839U, + 0x5939U, 0x5A39U, 0x6139U, 0x6239U, 0x6339U, 0x6439U, 0x6539U, 0x6639U, + 0x6739U, 0x6839U, 0x6939U, 0x6A39U, 0x6B39U, 0x6C39U, 0x6D39U, 0x6E39U, + 0x6F39U, 0x7039U, 0x7139U, 0x7239U, 0x7339U, 0x7439U, 0x7539U, 0x7639U, + 0x7739U, 0x7839U, 0x7939U, 0x7A39U, 0x3039U, 0x3139U, 0x3239U, 0x3339U, + 0x3439U, 0x3539U, 0x3639U, 0x3739U, 0x3839U, 0x3939U, 0x2B39U, 0x2F39U, + 0x412BU, 0x422BU, 0x432BU, 0x442BU, 0x452BU, 0x462BU, 0x472BU, 0x482BU, + 0x492BU, 0x4A2BU, 0x4B2BU, 0x4C2BU, 0x4D2BU, 0x4E2BU, 0x4F2BU, 0x502BU, + 0x512BU, 0x522BU, 0x532BU, 0x542BU, 0x552BU, 0x562BU, 0x572BU, 0x582BU, + 0x592BU, 0x5A2BU, 0x612BU, 0x622BU, 0x632BU, 0x642BU, 0x652BU, 0x662BU, + 0x672BU, 0x682BU, 0x692BU, 0x6A2BU, 0x6B2BU, 0x6C2BU, 0x6D2BU, 0x6E2BU, + 0x6F2BU, 0x702BU, 0x712BU, 0x722BU, 0x732BU, 0x742BU, 0x752BU, 0x762BU, + 0x772BU, 0x782BU, 0x792BU, 0x7A2BU, 0x302BU, 0x312BU, 0x322BU, 0x332BU, + 0x342BU, 0x352BU, 0x362BU, 0x372BU, 0x382BU, 0x392BU, 0x2B2BU, 0x2F2BU, + 0x412FU, 0x422FU, 0x432FU, 0x442FU, 0x452FU, 0x462FU, 0x472FU, 0x482FU, + 0x492FU, 0x4A2FU, 0x4B2FU, 0x4C2FU, 0x4D2FU, 0x4E2FU, 0x4F2FU, 0x502FU, + 0x512FU, 0x522FU, 0x532FU, 0x542FU, 0x552FU, 0x562FU, 0x572FU, 0x582FU, + 0x592FU, 0x5A2FU, 0x612FU, 0x622FU, 0x632FU, 0x642FU, 0x652FU, 0x662FU, + 0x672FU, 0x682FU, 0x692FU, 0x6A2FU, 0x6B2FU, 0x6C2FU, 0x6D2FU, 0x6E2FU, + 0x6F2FU, 0x702FU, 0x712FU, 0x722FU, 0x732FU, 0x742FU, 0x752FU, 0x762FU, + 0x772FU, 0x782FU, 0x792FU, 0x7A2FU, 0x302FU, 0x312FU, 0x322FU, 0x332FU, + 0x342FU, 0x352FU, 0x362FU, 0x372FU, 0x382FU, 0x392FU, 0x2B2FU, 0x2F2FU, +#else + 0x4141U, 0x4142U, 0x4143U, 0x4144U, 0x4145U, 0x4146U, 0x4147U, 0x4148U, + 0x4149U, 0x414AU, 0x414BU, 0x414CU, 0x414DU, 0x414EU, 0x414FU, 0x4150U, + 0x4151U, 0x4152U, 0x4153U, 0x4154U, 0x4155U, 0x4156U, 0x4157U, 0x4158U, + 0x4159U, 0x415AU, 0x4161U, 0x4162U, 0x4163U, 0x4164U, 0x4165U, 0x4166U, + 0x4167U, 0x4168U, 0x4169U, 0x416AU, 0x416BU, 0x416CU, 0x416DU, 0x416EU, + 0x416FU, 0x4170U, 0x4171U, 0x4172U, 0x4173U, 0x4174U, 0x4175U, 0x4176U, + 0x4177U, 0x4178U, 0x4179U, 0x417AU, 0x4130U, 0x4131U, 0x4132U, 0x4133U, + 0x4134U, 0x4135U, 0x4136U, 0x4137U, 0x4138U, 0x4139U, 0x412BU, 0x412FU, + 0x4241U, 0x4242U, 0x4243U, 0x4244U, 0x4245U, 0x4246U, 0x4247U, 0x4248U, + 0x4249U, 0x424AU, 0x424BU, 0x424CU, 0x424DU, 0x424EU, 0x424FU, 0x4250U, + 0x4251U, 0x4252U, 0x4253U, 0x4254U, 0x4255U, 0x4256U, 0x4257U, 0x4258U, + 0x4259U, 0x425AU, 0x4261U, 0x4262U, 0x4263U, 0x4264U, 0x4265U, 0x4266U, + 0x4267U, 0x4268U, 0x4269U, 0x426AU, 0x426BU, 0x426CU, 0x426DU, 0x426EU, + 0x426FU, 0x4270U, 0x4271U, 0x4272U, 0x4273U, 0x4274U, 0x4275U, 0x4276U, + 0x4277U, 0x4278U, 0x4279U, 0x427AU, 0x4230U, 0x4231U, 0x4232U, 0x4233U, + 0x4234U, 0x4235U, 0x4236U, 0x4237U, 0x4238U, 0x4239U, 0x422BU, 0x422FU, + 0x4341U, 0x4342U, 0x4343U, 0x4344U, 0x4345U, 0x4346U, 0x4347U, 0x4348U, + 0x4349U, 0x434AU, 0x434BU, 0x434CU, 0x434DU, 0x434EU, 0x434FU, 0x4350U, + 0x4351U, 0x4352U, 0x4353U, 0x4354U, 0x4355U, 0x4356U, 0x4357U, 0x4358U, + 0x4359U, 0x435AU, 0x4361U, 0x4362U, 0x4363U, 0x4364U, 0x4365U, 0x4366U, + 0x4367U, 0x4368U, 0x4369U, 0x436AU, 0x436BU, 0x436CU, 0x436DU, 0x436EU, + 0x436FU, 0x4370U, 0x4371U, 0x4372U, 0x4373U, 0x4374U, 0x4375U, 0x4376U, + 0x4377U, 0x4378U, 0x4379U, 0x437AU, 0x4330U, 0x4331U, 0x4332U, 0x4333U, + 0x4334U, 0x4335U, 0x4336U, 0x4337U, 0x4338U, 0x4339U, 0x432BU, 0x432FU, + 0x4441U, 0x4442U, 0x4443U, 0x4444U, 0x4445U, 0x4446U, 0x4447U, 0x4448U, + 0x4449U, 0x444AU, 0x444BU, 0x444CU, 0x444DU, 0x444EU, 0x444FU, 0x4450U, + 0x4451U, 0x4452U, 0x4453U, 0x4454U, 0x4455U, 0x4456U, 0x4457U, 0x4458U, + 0x4459U, 0x445AU, 0x4461U, 0x4462U, 0x4463U, 0x4464U, 0x4465U, 0x4466U, + 0x4467U, 0x4468U, 0x4469U, 0x446AU, 0x446BU, 0x446CU, 0x446DU, 0x446EU, + 0x446FU, 0x4470U, 0x4471U, 0x4472U, 0x4473U, 0x4474U, 0x4475U, 0x4476U, + 0x4477U, 0x4478U, 0x4479U, 0x447AU, 0x4430U, 0x4431U, 0x4432U, 0x4433U, + 0x4434U, 0x4435U, 0x4436U, 0x4437U, 0x4438U, 0x4439U, 0x442BU, 0x442FU, + 0x4541U, 0x4542U, 0x4543U, 0x4544U, 0x4545U, 0x4546U, 0x4547U, 0x4548U, + 0x4549U, 0x454AU, 0x454BU, 0x454CU, 0x454DU, 0x454EU, 0x454FU, 0x4550U, + 0x4551U, 0x4552U, 0x4553U, 0x4554U, 0x4555U, 0x4556U, 0x4557U, 0x4558U, + 0x4559U, 0x455AU, 0x4561U, 0x4562U, 0x4563U, 0x4564U, 0x4565U, 0x4566U, + 0x4567U, 0x4568U, 0x4569U, 0x456AU, 0x456BU, 0x456CU, 0x456DU, 0x456EU, + 0x456FU, 0x4570U, 0x4571U, 0x4572U, 0x4573U, 0x4574U, 0x4575U, 0x4576U, + 0x4577U, 0x4578U, 0x4579U, 0x457AU, 0x4530U, 0x4531U, 0x4532U, 0x4533U, + 0x4534U, 0x4535U, 0x4536U, 0x4537U, 0x4538U, 0x4539U, 0x452BU, 0x452FU, + 0x4641U, 0x4642U, 0x4643U, 0x4644U, 0x4645U, 0x4646U, 0x4647U, 0x4648U, + 0x4649U, 0x464AU, 0x464BU, 0x464CU, 0x464DU, 0x464EU, 0x464FU, 0x4650U, + 0x4651U, 0x4652U, 0x4653U, 0x4654U, 0x4655U, 0x4656U, 0x4657U, 0x4658U, + 0x4659U, 0x465AU, 0x4661U, 0x4662U, 0x4663U, 0x4664U, 0x4665U, 0x4666U, + 0x4667U, 0x4668U, 0x4669U, 0x466AU, 0x466BU, 0x466CU, 0x466DU, 0x466EU, + 0x466FU, 0x4670U, 0x4671U, 0x4672U, 0x4673U, 0x4674U, 0x4675U, 0x4676U, + 0x4677U, 0x4678U, 0x4679U, 0x467AU, 0x4630U, 0x4631U, 0x4632U, 0x4633U, + 0x4634U, 0x4635U, 0x4636U, 0x4637U, 0x4638U, 0x4639U, 0x462BU, 0x462FU, + 0x4741U, 0x4742U, 0x4743U, 0x4744U, 0x4745U, 0x4746U, 0x4747U, 0x4748U, + 0x4749U, 0x474AU, 0x474BU, 0x474CU, 0x474DU, 0x474EU, 0x474FU, 0x4750U, + 0x4751U, 0x4752U, 0x4753U, 0x4754U, 0x4755U, 0x4756U, 0x4757U, 0x4758U, + 0x4759U, 0x475AU, 0x4761U, 0x4762U, 0x4763U, 0x4764U, 0x4765U, 0x4766U, + 0x4767U, 0x4768U, 0x4769U, 0x476AU, 0x476BU, 0x476CU, 0x476DU, 0x476EU, + 0x476FU, 0x4770U, 0x4771U, 0x4772U, 0x4773U, 0x4774U, 0x4775U, 0x4776U, + 0x4777U, 0x4778U, 0x4779U, 0x477AU, 0x4730U, 0x4731U, 0x4732U, 0x4733U, + 0x4734U, 0x4735U, 0x4736U, 0x4737U, 0x4738U, 0x4739U, 0x472BU, 0x472FU, + 0x4841U, 0x4842U, 0x4843U, 0x4844U, 0x4845U, 0x4846U, 0x4847U, 0x4848U, + 0x4849U, 0x484AU, 0x484BU, 0x484CU, 0x484DU, 0x484EU, 0x484FU, 0x4850U, + 0x4851U, 0x4852U, 0x4853U, 0x4854U, 0x4855U, 0x4856U, 0x4857U, 0x4858U, + 0x4859U, 0x485AU, 0x4861U, 0x4862U, 0x4863U, 0x4864U, 0x4865U, 0x4866U, + 0x4867U, 0x4868U, 0x4869U, 0x486AU, 0x486BU, 0x486CU, 0x486DU, 0x486EU, + 0x486FU, 0x4870U, 0x4871U, 0x4872U, 0x4873U, 0x4874U, 0x4875U, 0x4876U, + 0x4877U, 0x4878U, 0x4879U, 0x487AU, 0x4830U, 0x4831U, 0x4832U, 0x4833U, + 0x4834U, 0x4835U, 0x4836U, 0x4837U, 0x4838U, 0x4839U, 0x482BU, 0x482FU, + 0x4941U, 0x4942U, 0x4943U, 0x4944U, 0x4945U, 0x4946U, 0x4947U, 0x4948U, + 0x4949U, 0x494AU, 0x494BU, 0x494CU, 0x494DU, 0x494EU, 0x494FU, 0x4950U, + 0x4951U, 0x4952U, 0x4953U, 0x4954U, 0x4955U, 0x4956U, 0x4957U, 0x4958U, + 0x4959U, 0x495AU, 0x4961U, 0x4962U, 0x4963U, 0x4964U, 0x4965U, 0x4966U, + 0x4967U, 0x4968U, 0x4969U, 0x496AU, 0x496BU, 0x496CU, 0x496DU, 0x496EU, + 0x496FU, 0x4970U, 0x4971U, 0x4972U, 0x4973U, 0x4974U, 0x4975U, 0x4976U, + 0x4977U, 0x4978U, 0x4979U, 0x497AU, 0x4930U, 0x4931U, 0x4932U, 0x4933U, + 0x4934U, 0x4935U, 0x4936U, 0x4937U, 0x4938U, 0x4939U, 0x492BU, 0x492FU, + 0x4A41U, 0x4A42U, 0x4A43U, 0x4A44U, 0x4A45U, 0x4A46U, 0x4A47U, 0x4A48U, + 0x4A49U, 0x4A4AU, 0x4A4BU, 0x4A4CU, 0x4A4DU, 0x4A4EU, 0x4A4FU, 0x4A50U, + 0x4A51U, 0x4A52U, 0x4A53U, 0x4A54U, 0x4A55U, 0x4A56U, 0x4A57U, 0x4A58U, + 0x4A59U, 0x4A5AU, 0x4A61U, 0x4A62U, 0x4A63U, 0x4A64U, 0x4A65U, 0x4A66U, + 0x4A67U, 0x4A68U, 0x4A69U, 0x4A6AU, 0x4A6BU, 0x4A6CU, 0x4A6DU, 0x4A6EU, + 0x4A6FU, 0x4A70U, 0x4A71U, 0x4A72U, 0x4A73U, 0x4A74U, 0x4A75U, 0x4A76U, + 0x4A77U, 0x4A78U, 0x4A79U, 0x4A7AU, 0x4A30U, 0x4A31U, 0x4A32U, 0x4A33U, + 0x4A34U, 0x4A35U, 0x4A36U, 0x4A37U, 0x4A38U, 0x4A39U, 0x4A2BU, 0x4A2FU, + 0x4B41U, 0x4B42U, 0x4B43U, 0x4B44U, 0x4B45U, 0x4B46U, 0x4B47U, 0x4B48U, + 0x4B49U, 0x4B4AU, 0x4B4BU, 0x4B4CU, 0x4B4DU, 0x4B4EU, 0x4B4FU, 0x4B50U, + 0x4B51U, 0x4B52U, 0x4B53U, 0x4B54U, 0x4B55U, 0x4B56U, 0x4B57U, 0x4B58U, + 0x4B59U, 0x4B5AU, 0x4B61U, 0x4B62U, 0x4B63U, 0x4B64U, 0x4B65U, 0x4B66U, + 0x4B67U, 0x4B68U, 0x4B69U, 0x4B6AU, 0x4B6BU, 0x4B6CU, 0x4B6DU, 0x4B6EU, + 0x4B6FU, 0x4B70U, 0x4B71U, 0x4B72U, 0x4B73U, 0x4B74U, 0x4B75U, 0x4B76U, + 0x4B77U, 0x4B78U, 0x4B79U, 0x4B7AU, 0x4B30U, 0x4B31U, 0x4B32U, 0x4B33U, + 0x4B34U, 0x4B35U, 0x4B36U, 0x4B37U, 0x4B38U, 0x4B39U, 0x4B2BU, 0x4B2FU, + 0x4C41U, 0x4C42U, 0x4C43U, 0x4C44U, 0x4C45U, 0x4C46U, 0x4C47U, 0x4C48U, + 0x4C49U, 0x4C4AU, 0x4C4BU, 0x4C4CU, 0x4C4DU, 0x4C4EU, 0x4C4FU, 0x4C50U, + 0x4C51U, 0x4C52U, 0x4C53U, 0x4C54U, 0x4C55U, 0x4C56U, 0x4C57U, 0x4C58U, + 0x4C59U, 0x4C5AU, 0x4C61U, 0x4C62U, 0x4C63U, 0x4C64U, 0x4C65U, 0x4C66U, + 0x4C67U, 0x4C68U, 0x4C69U, 0x4C6AU, 0x4C6BU, 0x4C6CU, 0x4C6DU, 0x4C6EU, + 0x4C6FU, 0x4C70U, 0x4C71U, 0x4C72U, 0x4C73U, 0x4C74U, 0x4C75U, 0x4C76U, + 0x4C77U, 0x4C78U, 0x4C79U, 0x4C7AU, 0x4C30U, 0x4C31U, 0x4C32U, 0x4C33U, + 0x4C34U, 0x4C35U, 0x4C36U, 0x4C37U, 0x4C38U, 0x4C39U, 0x4C2BU, 0x4C2FU, + 0x4D41U, 0x4D42U, 0x4D43U, 0x4D44U, 0x4D45U, 0x4D46U, 0x4D47U, 0x4D48U, + 0x4D49U, 0x4D4AU, 0x4D4BU, 0x4D4CU, 0x4D4DU, 0x4D4EU, 0x4D4FU, 0x4D50U, + 0x4D51U, 0x4D52U, 0x4D53U, 0x4D54U, 0x4D55U, 0x4D56U, 0x4D57U, 0x4D58U, + 0x4D59U, 0x4D5AU, 0x4D61U, 0x4D62U, 0x4D63U, 0x4D64U, 0x4D65U, 0x4D66U, + 0x4D67U, 0x4D68U, 0x4D69U, 0x4D6AU, 0x4D6BU, 0x4D6CU, 0x4D6DU, 0x4D6EU, + 0x4D6FU, 0x4D70U, 0x4D71U, 0x4D72U, 0x4D73U, 0x4D74U, 0x4D75U, 0x4D76U, + 0x4D77U, 0x4D78U, 0x4D79U, 0x4D7AU, 0x4D30U, 0x4D31U, 0x4D32U, 0x4D33U, + 0x4D34U, 0x4D35U, 0x4D36U, 0x4D37U, 0x4D38U, 0x4D39U, 0x4D2BU, 0x4D2FU, + 0x4E41U, 0x4E42U, 0x4E43U, 0x4E44U, 0x4E45U, 0x4E46U, 0x4E47U, 0x4E48U, + 0x4E49U, 0x4E4AU, 0x4E4BU, 0x4E4CU, 0x4E4DU, 0x4E4EU, 0x4E4FU, 0x4E50U, + 0x4E51U, 0x4E52U, 0x4E53U, 0x4E54U, 0x4E55U, 0x4E56U, 0x4E57U, 0x4E58U, + 0x4E59U, 0x4E5AU, 0x4E61U, 0x4E62U, 0x4E63U, 0x4E64U, 0x4E65U, 0x4E66U, + 0x4E67U, 0x4E68U, 0x4E69U, 0x4E6AU, 0x4E6BU, 0x4E6CU, 0x4E6DU, 0x4E6EU, + 0x4E6FU, 0x4E70U, 0x4E71U, 0x4E72U, 0x4E73U, 0x4E74U, 0x4E75U, 0x4E76U, + 0x4E77U, 0x4E78U, 0x4E79U, 0x4E7AU, 0x4E30U, 0x4E31U, 0x4E32U, 0x4E33U, + 0x4E34U, 0x4E35U, 0x4E36U, 0x4E37U, 0x4E38U, 0x4E39U, 0x4E2BU, 0x4E2FU, + 0x4F41U, 0x4F42U, 0x4F43U, 0x4F44U, 0x4F45U, 0x4F46U, 0x4F47U, 0x4F48U, + 0x4F49U, 0x4F4AU, 0x4F4BU, 0x4F4CU, 0x4F4DU, 0x4F4EU, 0x4F4FU, 0x4F50U, + 0x4F51U, 0x4F52U, 0x4F53U, 0x4F54U, 0x4F55U, 0x4F56U, 0x4F57U, 0x4F58U, + 0x4F59U, 0x4F5AU, 0x4F61U, 0x4F62U, 0x4F63U, 0x4F64U, 0x4F65U, 0x4F66U, + 0x4F67U, 0x4F68U, 0x4F69U, 0x4F6AU, 0x4F6BU, 0x4F6CU, 0x4F6DU, 0x4F6EU, + 0x4F6FU, 0x4F70U, 0x4F71U, 0x4F72U, 0x4F73U, 0x4F74U, 0x4F75U, 0x4F76U, + 0x4F77U, 0x4F78U, 0x4F79U, 0x4F7AU, 0x4F30U, 0x4F31U, 0x4F32U, 0x4F33U, + 0x4F34U, 0x4F35U, 0x4F36U, 0x4F37U, 0x4F38U, 0x4F39U, 0x4F2BU, 0x4F2FU, + 0x5041U, 0x5042U, 0x5043U, 0x5044U, 0x5045U, 0x5046U, 0x5047U, 0x5048U, + 0x5049U, 0x504AU, 0x504BU, 0x504CU, 0x504DU, 0x504EU, 0x504FU, 0x5050U, + 0x5051U, 0x5052U, 0x5053U, 0x5054U, 0x5055U, 0x5056U, 0x5057U, 0x5058U, + 0x5059U, 0x505AU, 0x5061U, 0x5062U, 0x5063U, 0x5064U, 0x5065U, 0x5066U, + 0x5067U, 0x5068U, 0x5069U, 0x506AU, 0x506BU, 0x506CU, 0x506DU, 0x506EU, + 0x506FU, 0x5070U, 0x5071U, 0x5072U, 0x5073U, 0x5074U, 0x5075U, 0x5076U, + 0x5077U, 0x5078U, 0x5079U, 0x507AU, 0x5030U, 0x5031U, 0x5032U, 0x5033U, + 0x5034U, 0x5035U, 0x5036U, 0x5037U, 0x5038U, 0x5039U, 0x502BU, 0x502FU, + 0x5141U, 0x5142U, 0x5143U, 0x5144U, 0x5145U, 0x5146U, 0x5147U, 0x5148U, + 0x5149U, 0x514AU, 0x514BU, 0x514CU, 0x514DU, 0x514EU, 0x514FU, 0x5150U, + 0x5151U, 0x5152U, 0x5153U, 0x5154U, 0x5155U, 0x5156U, 0x5157U, 0x5158U, + 0x5159U, 0x515AU, 0x5161U, 0x5162U, 0x5163U, 0x5164U, 0x5165U, 0x5166U, + 0x5167U, 0x5168U, 0x5169U, 0x516AU, 0x516BU, 0x516CU, 0x516DU, 0x516EU, + 0x516FU, 0x5170U, 0x5171U, 0x5172U, 0x5173U, 0x5174U, 0x5175U, 0x5176U, + 0x5177U, 0x5178U, 0x5179U, 0x517AU, 0x5130U, 0x5131U, 0x5132U, 0x5133U, + 0x5134U, 0x5135U, 0x5136U, 0x5137U, 0x5138U, 0x5139U, 0x512BU, 0x512FU, + 0x5241U, 0x5242U, 0x5243U, 0x5244U, 0x5245U, 0x5246U, 0x5247U, 0x5248U, + 0x5249U, 0x524AU, 0x524BU, 0x524CU, 0x524DU, 0x524EU, 0x524FU, 0x5250U, + 0x5251U, 0x5252U, 0x5253U, 0x5254U, 0x5255U, 0x5256U, 0x5257U, 0x5258U, + 0x5259U, 0x525AU, 0x5261U, 0x5262U, 0x5263U, 0x5264U, 0x5265U, 0x5266U, + 0x5267U, 0x5268U, 0x5269U, 0x526AU, 0x526BU, 0x526CU, 0x526DU, 0x526EU, + 0x526FU, 0x5270U, 0x5271U, 0x5272U, 0x5273U, 0x5274U, 0x5275U, 0x5276U, + 0x5277U, 0x5278U, 0x5279U, 0x527AU, 0x5230U, 0x5231U, 0x5232U, 0x5233U, + 0x5234U, 0x5235U, 0x5236U, 0x5237U, 0x5238U, 0x5239U, 0x522BU, 0x522FU, + 0x5341U, 0x5342U, 0x5343U, 0x5344U, 0x5345U, 0x5346U, 0x5347U, 0x5348U, + 0x5349U, 0x534AU, 0x534BU, 0x534CU, 0x534DU, 0x534EU, 0x534FU, 0x5350U, + 0x5351U, 0x5352U, 0x5353U, 0x5354U, 0x5355U, 0x5356U, 0x5357U, 0x5358U, + 0x5359U, 0x535AU, 0x5361U, 0x5362U, 0x5363U, 0x5364U, 0x5365U, 0x5366U, + 0x5367U, 0x5368U, 0x5369U, 0x536AU, 0x536BU, 0x536CU, 0x536DU, 0x536EU, + 0x536FU, 0x5370U, 0x5371U, 0x5372U, 0x5373U, 0x5374U, 0x5375U, 0x5376U, + 0x5377U, 0x5378U, 0x5379U, 0x537AU, 0x5330U, 0x5331U, 0x5332U, 0x5333U, + 0x5334U, 0x5335U, 0x5336U, 0x5337U, 0x5338U, 0x5339U, 0x532BU, 0x532FU, + 0x5441U, 0x5442U, 0x5443U, 0x5444U, 0x5445U, 0x5446U, 0x5447U, 0x5448U, + 0x5449U, 0x544AU, 0x544BU, 0x544CU, 0x544DU, 0x544EU, 0x544FU, 0x5450U, + 0x5451U, 0x5452U, 0x5453U, 0x5454U, 0x5455U, 0x5456U, 0x5457U, 0x5458U, + 0x5459U, 0x545AU, 0x5461U, 0x5462U, 0x5463U, 0x5464U, 0x5465U, 0x5466U, + 0x5467U, 0x5468U, 0x5469U, 0x546AU, 0x546BU, 0x546CU, 0x546DU, 0x546EU, + 0x546FU, 0x5470U, 0x5471U, 0x5472U, 0x5473U, 0x5474U, 0x5475U, 0x5476U, + 0x5477U, 0x5478U, 0x5479U, 0x547AU, 0x5430U, 0x5431U, 0x5432U, 0x5433U, + 0x5434U, 0x5435U, 0x5436U, 0x5437U, 0x5438U, 0x5439U, 0x542BU, 0x542FU, + 0x5541U, 0x5542U, 0x5543U, 0x5544U, 0x5545U, 0x5546U, 0x5547U, 0x5548U, + 0x5549U, 0x554AU, 0x554BU, 0x554CU, 0x554DU, 0x554EU, 0x554FU, 0x5550U, + 0x5551U, 0x5552U, 0x5553U, 0x5554U, 0x5555U, 0x5556U, 0x5557U, 0x5558U, + 0x5559U, 0x555AU, 0x5561U, 0x5562U, 0x5563U, 0x5564U, 0x5565U, 0x5566U, + 0x5567U, 0x5568U, 0x5569U, 0x556AU, 0x556BU, 0x556CU, 0x556DU, 0x556EU, + 0x556FU, 0x5570U, 0x5571U, 0x5572U, 0x5573U, 0x5574U, 0x5575U, 0x5576U, + 0x5577U, 0x5578U, 0x5579U, 0x557AU, 0x5530U, 0x5531U, 0x5532U, 0x5533U, + 0x5534U, 0x5535U, 0x5536U, 0x5537U, 0x5538U, 0x5539U, 0x552BU, 0x552FU, + 0x5641U, 0x5642U, 0x5643U, 0x5644U, 0x5645U, 0x5646U, 0x5647U, 0x5648U, + 0x5649U, 0x564AU, 0x564BU, 0x564CU, 0x564DU, 0x564EU, 0x564FU, 0x5650U, + 0x5651U, 0x5652U, 0x5653U, 0x5654U, 0x5655U, 0x5656U, 0x5657U, 0x5658U, + 0x5659U, 0x565AU, 0x5661U, 0x5662U, 0x5663U, 0x5664U, 0x5665U, 0x5666U, + 0x5667U, 0x5668U, 0x5669U, 0x566AU, 0x566BU, 0x566CU, 0x566DU, 0x566EU, + 0x566FU, 0x5670U, 0x5671U, 0x5672U, 0x5673U, 0x5674U, 0x5675U, 0x5676U, + 0x5677U, 0x5678U, 0x5679U, 0x567AU, 0x5630U, 0x5631U, 0x5632U, 0x5633U, + 0x5634U, 0x5635U, 0x5636U, 0x5637U, 0x5638U, 0x5639U, 0x562BU, 0x562FU, + 0x5741U, 0x5742U, 0x5743U, 0x5744U, 0x5745U, 0x5746U, 0x5747U, 0x5748U, + 0x5749U, 0x574AU, 0x574BU, 0x574CU, 0x574DU, 0x574EU, 0x574FU, 0x5750U, + 0x5751U, 0x5752U, 0x5753U, 0x5754U, 0x5755U, 0x5756U, 0x5757U, 0x5758U, + 0x5759U, 0x575AU, 0x5761U, 0x5762U, 0x5763U, 0x5764U, 0x5765U, 0x5766U, + 0x5767U, 0x5768U, 0x5769U, 0x576AU, 0x576BU, 0x576CU, 0x576DU, 0x576EU, + 0x576FU, 0x5770U, 0x5771U, 0x5772U, 0x5773U, 0x5774U, 0x5775U, 0x5776U, + 0x5777U, 0x5778U, 0x5779U, 0x577AU, 0x5730U, 0x5731U, 0x5732U, 0x5733U, + 0x5734U, 0x5735U, 0x5736U, 0x5737U, 0x5738U, 0x5739U, 0x572BU, 0x572FU, + 0x5841U, 0x5842U, 0x5843U, 0x5844U, 0x5845U, 0x5846U, 0x5847U, 0x5848U, + 0x5849U, 0x584AU, 0x584BU, 0x584CU, 0x584DU, 0x584EU, 0x584FU, 0x5850U, + 0x5851U, 0x5852U, 0x5853U, 0x5854U, 0x5855U, 0x5856U, 0x5857U, 0x5858U, + 0x5859U, 0x585AU, 0x5861U, 0x5862U, 0x5863U, 0x5864U, 0x5865U, 0x5866U, + 0x5867U, 0x5868U, 0x5869U, 0x586AU, 0x586BU, 0x586CU, 0x586DU, 0x586EU, + 0x586FU, 0x5870U, 0x5871U, 0x5872U, 0x5873U, 0x5874U, 0x5875U, 0x5876U, + 0x5877U, 0x5878U, 0x5879U, 0x587AU, 0x5830U, 0x5831U, 0x5832U, 0x5833U, + 0x5834U, 0x5835U, 0x5836U, 0x5837U, 0x5838U, 0x5839U, 0x582BU, 0x582FU, + 0x5941U, 0x5942U, 0x5943U, 0x5944U, 0x5945U, 0x5946U, 0x5947U, 0x5948U, + 0x5949U, 0x594AU, 0x594BU, 0x594CU, 0x594DU, 0x594EU, 0x594FU, 0x5950U, + 0x5951U, 0x5952U, 0x5953U, 0x5954U, 0x5955U, 0x5956U, 0x5957U, 0x5958U, + 0x5959U, 0x595AU, 0x5961U, 0x5962U, 0x5963U, 0x5964U, 0x5965U, 0x5966U, + 0x5967U, 0x5968U, 0x5969U, 0x596AU, 0x596BU, 0x596CU, 0x596DU, 0x596EU, + 0x596FU, 0x5970U, 0x5971U, 0x5972U, 0x5973U, 0x5974U, 0x5975U, 0x5976U, + 0x5977U, 0x5978U, 0x5979U, 0x597AU, 0x5930U, 0x5931U, 0x5932U, 0x5933U, + 0x5934U, 0x5935U, 0x5936U, 0x5937U, 0x5938U, 0x5939U, 0x592BU, 0x592FU, + 0x5A41U, 0x5A42U, 0x5A43U, 0x5A44U, 0x5A45U, 0x5A46U, 0x5A47U, 0x5A48U, + 0x5A49U, 0x5A4AU, 0x5A4BU, 0x5A4CU, 0x5A4DU, 0x5A4EU, 0x5A4FU, 0x5A50U, + 0x5A51U, 0x5A52U, 0x5A53U, 0x5A54U, 0x5A55U, 0x5A56U, 0x5A57U, 0x5A58U, + 0x5A59U, 0x5A5AU, 0x5A61U, 0x5A62U, 0x5A63U, 0x5A64U, 0x5A65U, 0x5A66U, + 0x5A67U, 0x5A68U, 0x5A69U, 0x5A6AU, 0x5A6BU, 0x5A6CU, 0x5A6DU, 0x5A6EU, + 0x5A6FU, 0x5A70U, 0x5A71U, 0x5A72U, 0x5A73U, 0x5A74U, 0x5A75U, 0x5A76U, + 0x5A77U, 0x5A78U, 0x5A79U, 0x5A7AU, 0x5A30U, 0x5A31U, 0x5A32U, 0x5A33U, + 0x5A34U, 0x5A35U, 0x5A36U, 0x5A37U, 0x5A38U, 0x5A39U, 0x5A2BU, 0x5A2FU, + 0x6141U, 0x6142U, 0x6143U, 0x6144U, 0x6145U, 0x6146U, 0x6147U, 0x6148U, + 0x6149U, 0x614AU, 0x614BU, 0x614CU, 0x614DU, 0x614EU, 0x614FU, 0x6150U, + 0x6151U, 0x6152U, 0x6153U, 0x6154U, 0x6155U, 0x6156U, 0x6157U, 0x6158U, + 0x6159U, 0x615AU, 0x6161U, 0x6162U, 0x6163U, 0x6164U, 0x6165U, 0x6166U, + 0x6167U, 0x6168U, 0x6169U, 0x616AU, 0x616BU, 0x616CU, 0x616DU, 0x616EU, + 0x616FU, 0x6170U, 0x6171U, 0x6172U, 0x6173U, 0x6174U, 0x6175U, 0x6176U, + 0x6177U, 0x6178U, 0x6179U, 0x617AU, 0x6130U, 0x6131U, 0x6132U, 0x6133U, + 0x6134U, 0x6135U, 0x6136U, 0x6137U, 0x6138U, 0x6139U, 0x612BU, 0x612FU, + 0x6241U, 0x6242U, 0x6243U, 0x6244U, 0x6245U, 0x6246U, 0x6247U, 0x6248U, + 0x6249U, 0x624AU, 0x624BU, 0x624CU, 0x624DU, 0x624EU, 0x624FU, 0x6250U, + 0x6251U, 0x6252U, 0x6253U, 0x6254U, 0x6255U, 0x6256U, 0x6257U, 0x6258U, + 0x6259U, 0x625AU, 0x6261U, 0x6262U, 0x6263U, 0x6264U, 0x6265U, 0x6266U, + 0x6267U, 0x6268U, 0x6269U, 0x626AU, 0x626BU, 0x626CU, 0x626DU, 0x626EU, + 0x626FU, 0x6270U, 0x6271U, 0x6272U, 0x6273U, 0x6274U, 0x6275U, 0x6276U, + 0x6277U, 0x6278U, 0x6279U, 0x627AU, 0x6230U, 0x6231U, 0x6232U, 0x6233U, + 0x6234U, 0x6235U, 0x6236U, 0x6237U, 0x6238U, 0x6239U, 0x622BU, 0x622FU, + 0x6341U, 0x6342U, 0x6343U, 0x6344U, 0x6345U, 0x6346U, 0x6347U, 0x6348U, + 0x6349U, 0x634AU, 0x634BU, 0x634CU, 0x634DU, 0x634EU, 0x634FU, 0x6350U, + 0x6351U, 0x6352U, 0x6353U, 0x6354U, 0x6355U, 0x6356U, 0x6357U, 0x6358U, + 0x6359U, 0x635AU, 0x6361U, 0x6362U, 0x6363U, 0x6364U, 0x6365U, 0x6366U, + 0x6367U, 0x6368U, 0x6369U, 0x636AU, 0x636BU, 0x636CU, 0x636DU, 0x636EU, + 0x636FU, 0x6370U, 0x6371U, 0x6372U, 0x6373U, 0x6374U, 0x6375U, 0x6376U, + 0x6377U, 0x6378U, 0x6379U, 0x637AU, 0x6330U, 0x6331U, 0x6332U, 0x6333U, + 0x6334U, 0x6335U, 0x6336U, 0x6337U, 0x6338U, 0x6339U, 0x632BU, 0x632FU, + 0x6441U, 0x6442U, 0x6443U, 0x6444U, 0x6445U, 0x6446U, 0x6447U, 0x6448U, + 0x6449U, 0x644AU, 0x644BU, 0x644CU, 0x644DU, 0x644EU, 0x644FU, 0x6450U, + 0x6451U, 0x6452U, 0x6453U, 0x6454U, 0x6455U, 0x6456U, 0x6457U, 0x6458U, + 0x6459U, 0x645AU, 0x6461U, 0x6462U, 0x6463U, 0x6464U, 0x6465U, 0x6466U, + 0x6467U, 0x6468U, 0x6469U, 0x646AU, 0x646BU, 0x646CU, 0x646DU, 0x646EU, + 0x646FU, 0x6470U, 0x6471U, 0x6472U, 0x6473U, 0x6474U, 0x6475U, 0x6476U, + 0x6477U, 0x6478U, 0x6479U, 0x647AU, 0x6430U, 0x6431U, 0x6432U, 0x6433U, + 0x6434U, 0x6435U, 0x6436U, 0x6437U, 0x6438U, 0x6439U, 0x642BU, 0x642FU, + 0x6541U, 0x6542U, 0x6543U, 0x6544U, 0x6545U, 0x6546U, 0x6547U, 0x6548U, + 0x6549U, 0x654AU, 0x654BU, 0x654CU, 0x654DU, 0x654EU, 0x654FU, 0x6550U, + 0x6551U, 0x6552U, 0x6553U, 0x6554U, 0x6555U, 0x6556U, 0x6557U, 0x6558U, + 0x6559U, 0x655AU, 0x6561U, 0x6562U, 0x6563U, 0x6564U, 0x6565U, 0x6566U, + 0x6567U, 0x6568U, 0x6569U, 0x656AU, 0x656BU, 0x656CU, 0x656DU, 0x656EU, + 0x656FU, 0x6570U, 0x6571U, 0x6572U, 0x6573U, 0x6574U, 0x6575U, 0x6576U, + 0x6577U, 0x6578U, 0x6579U, 0x657AU, 0x6530U, 0x6531U, 0x6532U, 0x6533U, + 0x6534U, 0x6535U, 0x6536U, 0x6537U, 0x6538U, 0x6539U, 0x652BU, 0x652FU, + 0x6641U, 0x6642U, 0x6643U, 0x6644U, 0x6645U, 0x6646U, 0x6647U, 0x6648U, + 0x6649U, 0x664AU, 0x664BU, 0x664CU, 0x664DU, 0x664EU, 0x664FU, 0x6650U, + 0x6651U, 0x6652U, 0x6653U, 0x6654U, 0x6655U, 0x6656U, 0x6657U, 0x6658U, + 0x6659U, 0x665AU, 0x6661U, 0x6662U, 0x6663U, 0x6664U, 0x6665U, 0x6666U, + 0x6667U, 0x6668U, 0x6669U, 0x666AU, 0x666BU, 0x666CU, 0x666DU, 0x666EU, + 0x666FU, 0x6670U, 0x6671U, 0x6672U, 0x6673U, 0x6674U, 0x6675U, 0x6676U, + 0x6677U, 0x6678U, 0x6679U, 0x667AU, 0x6630U, 0x6631U, 0x6632U, 0x6633U, + 0x6634U, 0x6635U, 0x6636U, 0x6637U, 0x6638U, 0x6639U, 0x662BU, 0x662FU, + 0x6741U, 0x6742U, 0x6743U, 0x6744U, 0x6745U, 0x6746U, 0x6747U, 0x6748U, + 0x6749U, 0x674AU, 0x674BU, 0x674CU, 0x674DU, 0x674EU, 0x674FU, 0x6750U, + 0x6751U, 0x6752U, 0x6753U, 0x6754U, 0x6755U, 0x6756U, 0x6757U, 0x6758U, + 0x6759U, 0x675AU, 0x6761U, 0x6762U, 0x6763U, 0x6764U, 0x6765U, 0x6766U, + 0x6767U, 0x6768U, 0x6769U, 0x676AU, 0x676BU, 0x676CU, 0x676DU, 0x676EU, + 0x676FU, 0x6770U, 0x6771U, 0x6772U, 0x6773U, 0x6774U, 0x6775U, 0x6776U, + 0x6777U, 0x6778U, 0x6779U, 0x677AU, 0x6730U, 0x6731U, 0x6732U, 0x6733U, + 0x6734U, 0x6735U, 0x6736U, 0x6737U, 0x6738U, 0x6739U, 0x672BU, 0x672FU, + 0x6841U, 0x6842U, 0x6843U, 0x6844U, 0x6845U, 0x6846U, 0x6847U, 0x6848U, + 0x6849U, 0x684AU, 0x684BU, 0x684CU, 0x684DU, 0x684EU, 0x684FU, 0x6850U, + 0x6851U, 0x6852U, 0x6853U, 0x6854U, 0x6855U, 0x6856U, 0x6857U, 0x6858U, + 0x6859U, 0x685AU, 0x6861U, 0x6862U, 0x6863U, 0x6864U, 0x6865U, 0x6866U, + 0x6867U, 0x6868U, 0x6869U, 0x686AU, 0x686BU, 0x686CU, 0x686DU, 0x686EU, + 0x686FU, 0x6870U, 0x6871U, 0x6872U, 0x6873U, 0x6874U, 0x6875U, 0x6876U, + 0x6877U, 0x6878U, 0x6879U, 0x687AU, 0x6830U, 0x6831U, 0x6832U, 0x6833U, + 0x6834U, 0x6835U, 0x6836U, 0x6837U, 0x6838U, 0x6839U, 0x682BU, 0x682FU, + 0x6941U, 0x6942U, 0x6943U, 0x6944U, 0x6945U, 0x6946U, 0x6947U, 0x6948U, + 0x6949U, 0x694AU, 0x694BU, 0x694CU, 0x694DU, 0x694EU, 0x694FU, 0x6950U, + 0x6951U, 0x6952U, 0x6953U, 0x6954U, 0x6955U, 0x6956U, 0x6957U, 0x6958U, + 0x6959U, 0x695AU, 0x6961U, 0x6962U, 0x6963U, 0x6964U, 0x6965U, 0x6966U, + 0x6967U, 0x6968U, 0x6969U, 0x696AU, 0x696BU, 0x696CU, 0x696DU, 0x696EU, + 0x696FU, 0x6970U, 0x6971U, 0x6972U, 0x6973U, 0x6974U, 0x6975U, 0x6976U, + 0x6977U, 0x6978U, 0x6979U, 0x697AU, 0x6930U, 0x6931U, 0x6932U, 0x6933U, + 0x6934U, 0x6935U, 0x6936U, 0x6937U, 0x6938U, 0x6939U, 0x692BU, 0x692FU, + 0x6A41U, 0x6A42U, 0x6A43U, 0x6A44U, 0x6A45U, 0x6A46U, 0x6A47U, 0x6A48U, + 0x6A49U, 0x6A4AU, 0x6A4BU, 0x6A4CU, 0x6A4DU, 0x6A4EU, 0x6A4FU, 0x6A50U, + 0x6A51U, 0x6A52U, 0x6A53U, 0x6A54U, 0x6A55U, 0x6A56U, 0x6A57U, 0x6A58U, + 0x6A59U, 0x6A5AU, 0x6A61U, 0x6A62U, 0x6A63U, 0x6A64U, 0x6A65U, 0x6A66U, + 0x6A67U, 0x6A68U, 0x6A69U, 0x6A6AU, 0x6A6BU, 0x6A6CU, 0x6A6DU, 0x6A6EU, + 0x6A6FU, 0x6A70U, 0x6A71U, 0x6A72U, 0x6A73U, 0x6A74U, 0x6A75U, 0x6A76U, + 0x6A77U, 0x6A78U, 0x6A79U, 0x6A7AU, 0x6A30U, 0x6A31U, 0x6A32U, 0x6A33U, + 0x6A34U, 0x6A35U, 0x6A36U, 0x6A37U, 0x6A38U, 0x6A39U, 0x6A2BU, 0x6A2FU, + 0x6B41U, 0x6B42U, 0x6B43U, 0x6B44U, 0x6B45U, 0x6B46U, 0x6B47U, 0x6B48U, + 0x6B49U, 0x6B4AU, 0x6B4BU, 0x6B4CU, 0x6B4DU, 0x6B4EU, 0x6B4FU, 0x6B50U, + 0x6B51U, 0x6B52U, 0x6B53U, 0x6B54U, 0x6B55U, 0x6B56U, 0x6B57U, 0x6B58U, + 0x6B59U, 0x6B5AU, 0x6B61U, 0x6B62U, 0x6B63U, 0x6B64U, 0x6B65U, 0x6B66U, + 0x6B67U, 0x6B68U, 0x6B69U, 0x6B6AU, 0x6B6BU, 0x6B6CU, 0x6B6DU, 0x6B6EU, + 0x6B6FU, 0x6B70U, 0x6B71U, 0x6B72U, 0x6B73U, 0x6B74U, 0x6B75U, 0x6B76U, + 0x6B77U, 0x6B78U, 0x6B79U, 0x6B7AU, 0x6B30U, 0x6B31U, 0x6B32U, 0x6B33U, + 0x6B34U, 0x6B35U, 0x6B36U, 0x6B37U, 0x6B38U, 0x6B39U, 0x6B2BU, 0x6B2FU, + 0x6C41U, 0x6C42U, 0x6C43U, 0x6C44U, 0x6C45U, 0x6C46U, 0x6C47U, 0x6C48U, + 0x6C49U, 0x6C4AU, 0x6C4BU, 0x6C4CU, 0x6C4DU, 0x6C4EU, 0x6C4FU, 0x6C50U, + 0x6C51U, 0x6C52U, 0x6C53U, 0x6C54U, 0x6C55U, 0x6C56U, 0x6C57U, 0x6C58U, + 0x6C59U, 0x6C5AU, 0x6C61U, 0x6C62U, 0x6C63U, 0x6C64U, 0x6C65U, 0x6C66U, + 0x6C67U, 0x6C68U, 0x6C69U, 0x6C6AU, 0x6C6BU, 0x6C6CU, 0x6C6DU, 0x6C6EU, + 0x6C6FU, 0x6C70U, 0x6C71U, 0x6C72U, 0x6C73U, 0x6C74U, 0x6C75U, 0x6C76U, + 0x6C77U, 0x6C78U, 0x6C79U, 0x6C7AU, 0x6C30U, 0x6C31U, 0x6C32U, 0x6C33U, + 0x6C34U, 0x6C35U, 0x6C36U, 0x6C37U, 0x6C38U, 0x6C39U, 0x6C2BU, 0x6C2FU, + 0x6D41U, 0x6D42U, 0x6D43U, 0x6D44U, 0x6D45U, 0x6D46U, 0x6D47U, 0x6D48U, + 0x6D49U, 0x6D4AU, 0x6D4BU, 0x6D4CU, 0x6D4DU, 0x6D4EU, 0x6D4FU, 0x6D50U, + 0x6D51U, 0x6D52U, 0x6D53U, 0x6D54U, 0x6D55U, 0x6D56U, 0x6D57U, 0x6D58U, + 0x6D59U, 0x6D5AU, 0x6D61U, 0x6D62U, 0x6D63U, 0x6D64U, 0x6D65U, 0x6D66U, + 0x6D67U, 0x6D68U, 0x6D69U, 0x6D6AU, 0x6D6BU, 0x6D6CU, 0x6D6DU, 0x6D6EU, + 0x6D6FU, 0x6D70U, 0x6D71U, 0x6D72U, 0x6D73U, 0x6D74U, 0x6D75U, 0x6D76U, + 0x6D77U, 0x6D78U, 0x6D79U, 0x6D7AU, 0x6D30U, 0x6D31U, 0x6D32U, 0x6D33U, + 0x6D34U, 0x6D35U, 0x6D36U, 0x6D37U, 0x6D38U, 0x6D39U, 0x6D2BU, 0x6D2FU, + 0x6E41U, 0x6E42U, 0x6E43U, 0x6E44U, 0x6E45U, 0x6E46U, 0x6E47U, 0x6E48U, + 0x6E49U, 0x6E4AU, 0x6E4BU, 0x6E4CU, 0x6E4DU, 0x6E4EU, 0x6E4FU, 0x6E50U, + 0x6E51U, 0x6E52U, 0x6E53U, 0x6E54U, 0x6E55U, 0x6E56U, 0x6E57U, 0x6E58U, + 0x6E59U, 0x6E5AU, 0x6E61U, 0x6E62U, 0x6E63U, 0x6E64U, 0x6E65U, 0x6E66U, + 0x6E67U, 0x6E68U, 0x6E69U, 0x6E6AU, 0x6E6BU, 0x6E6CU, 0x6E6DU, 0x6E6EU, + 0x6E6FU, 0x6E70U, 0x6E71U, 0x6E72U, 0x6E73U, 0x6E74U, 0x6E75U, 0x6E76U, + 0x6E77U, 0x6E78U, 0x6E79U, 0x6E7AU, 0x6E30U, 0x6E31U, 0x6E32U, 0x6E33U, + 0x6E34U, 0x6E35U, 0x6E36U, 0x6E37U, 0x6E38U, 0x6E39U, 0x6E2BU, 0x6E2FU, + 0x6F41U, 0x6F42U, 0x6F43U, 0x6F44U, 0x6F45U, 0x6F46U, 0x6F47U, 0x6F48U, + 0x6F49U, 0x6F4AU, 0x6F4BU, 0x6F4CU, 0x6F4DU, 0x6F4EU, 0x6F4FU, 0x6F50U, + 0x6F51U, 0x6F52U, 0x6F53U, 0x6F54U, 0x6F55U, 0x6F56U, 0x6F57U, 0x6F58U, + 0x6F59U, 0x6F5AU, 0x6F61U, 0x6F62U, 0x6F63U, 0x6F64U, 0x6F65U, 0x6F66U, + 0x6F67U, 0x6F68U, 0x6F69U, 0x6F6AU, 0x6F6BU, 0x6F6CU, 0x6F6DU, 0x6F6EU, + 0x6F6FU, 0x6F70U, 0x6F71U, 0x6F72U, 0x6F73U, 0x6F74U, 0x6F75U, 0x6F76U, + 0x6F77U, 0x6F78U, 0x6F79U, 0x6F7AU, 0x6F30U, 0x6F31U, 0x6F32U, 0x6F33U, + 0x6F34U, 0x6F35U, 0x6F36U, 0x6F37U, 0x6F38U, 0x6F39U, 0x6F2BU, 0x6F2FU, + 0x7041U, 0x7042U, 0x7043U, 0x7044U, 0x7045U, 0x7046U, 0x7047U, 0x7048U, + 0x7049U, 0x704AU, 0x704BU, 0x704CU, 0x704DU, 0x704EU, 0x704FU, 0x7050U, + 0x7051U, 0x7052U, 0x7053U, 0x7054U, 0x7055U, 0x7056U, 0x7057U, 0x7058U, + 0x7059U, 0x705AU, 0x7061U, 0x7062U, 0x7063U, 0x7064U, 0x7065U, 0x7066U, + 0x7067U, 0x7068U, 0x7069U, 0x706AU, 0x706BU, 0x706CU, 0x706DU, 0x706EU, + 0x706FU, 0x7070U, 0x7071U, 0x7072U, 0x7073U, 0x7074U, 0x7075U, 0x7076U, + 0x7077U, 0x7078U, 0x7079U, 0x707AU, 0x7030U, 0x7031U, 0x7032U, 0x7033U, + 0x7034U, 0x7035U, 0x7036U, 0x7037U, 0x7038U, 0x7039U, 0x702BU, 0x702FU, + 0x7141U, 0x7142U, 0x7143U, 0x7144U, 0x7145U, 0x7146U, 0x7147U, 0x7148U, + 0x7149U, 0x714AU, 0x714BU, 0x714CU, 0x714DU, 0x714EU, 0x714FU, 0x7150U, + 0x7151U, 0x7152U, 0x7153U, 0x7154U, 0x7155U, 0x7156U, 0x7157U, 0x7158U, + 0x7159U, 0x715AU, 0x7161U, 0x7162U, 0x7163U, 0x7164U, 0x7165U, 0x7166U, + 0x7167U, 0x7168U, 0x7169U, 0x716AU, 0x716BU, 0x716CU, 0x716DU, 0x716EU, + 0x716FU, 0x7170U, 0x7171U, 0x7172U, 0x7173U, 0x7174U, 0x7175U, 0x7176U, + 0x7177U, 0x7178U, 0x7179U, 0x717AU, 0x7130U, 0x7131U, 0x7132U, 0x7133U, + 0x7134U, 0x7135U, 0x7136U, 0x7137U, 0x7138U, 0x7139U, 0x712BU, 0x712FU, + 0x7241U, 0x7242U, 0x7243U, 0x7244U, 0x7245U, 0x7246U, 0x7247U, 0x7248U, + 0x7249U, 0x724AU, 0x724BU, 0x724CU, 0x724DU, 0x724EU, 0x724FU, 0x7250U, + 0x7251U, 0x7252U, 0x7253U, 0x7254U, 0x7255U, 0x7256U, 0x7257U, 0x7258U, + 0x7259U, 0x725AU, 0x7261U, 0x7262U, 0x7263U, 0x7264U, 0x7265U, 0x7266U, + 0x7267U, 0x7268U, 0x7269U, 0x726AU, 0x726BU, 0x726CU, 0x726DU, 0x726EU, + 0x726FU, 0x7270U, 0x7271U, 0x7272U, 0x7273U, 0x7274U, 0x7275U, 0x7276U, + 0x7277U, 0x7278U, 0x7279U, 0x727AU, 0x7230U, 0x7231U, 0x7232U, 0x7233U, + 0x7234U, 0x7235U, 0x7236U, 0x7237U, 0x7238U, 0x7239U, 0x722BU, 0x722FU, + 0x7341U, 0x7342U, 0x7343U, 0x7344U, 0x7345U, 0x7346U, 0x7347U, 0x7348U, + 0x7349U, 0x734AU, 0x734BU, 0x734CU, 0x734DU, 0x734EU, 0x734FU, 0x7350U, + 0x7351U, 0x7352U, 0x7353U, 0x7354U, 0x7355U, 0x7356U, 0x7357U, 0x7358U, + 0x7359U, 0x735AU, 0x7361U, 0x7362U, 0x7363U, 0x7364U, 0x7365U, 0x7366U, + 0x7367U, 0x7368U, 0x7369U, 0x736AU, 0x736BU, 0x736CU, 0x736DU, 0x736EU, + 0x736FU, 0x7370U, 0x7371U, 0x7372U, 0x7373U, 0x7374U, 0x7375U, 0x7376U, + 0x7377U, 0x7378U, 0x7379U, 0x737AU, 0x7330U, 0x7331U, 0x7332U, 0x7333U, + 0x7334U, 0x7335U, 0x7336U, 0x7337U, 0x7338U, 0x7339U, 0x732BU, 0x732FU, + 0x7441U, 0x7442U, 0x7443U, 0x7444U, 0x7445U, 0x7446U, 0x7447U, 0x7448U, + 0x7449U, 0x744AU, 0x744BU, 0x744CU, 0x744DU, 0x744EU, 0x744FU, 0x7450U, + 0x7451U, 0x7452U, 0x7453U, 0x7454U, 0x7455U, 0x7456U, 0x7457U, 0x7458U, + 0x7459U, 0x745AU, 0x7461U, 0x7462U, 0x7463U, 0x7464U, 0x7465U, 0x7466U, + 0x7467U, 0x7468U, 0x7469U, 0x746AU, 0x746BU, 0x746CU, 0x746DU, 0x746EU, + 0x746FU, 0x7470U, 0x7471U, 0x7472U, 0x7473U, 0x7474U, 0x7475U, 0x7476U, + 0x7477U, 0x7478U, 0x7479U, 0x747AU, 0x7430U, 0x7431U, 0x7432U, 0x7433U, + 0x7434U, 0x7435U, 0x7436U, 0x7437U, 0x7438U, 0x7439U, 0x742BU, 0x742FU, + 0x7541U, 0x7542U, 0x7543U, 0x7544U, 0x7545U, 0x7546U, 0x7547U, 0x7548U, + 0x7549U, 0x754AU, 0x754BU, 0x754CU, 0x754DU, 0x754EU, 0x754FU, 0x7550U, + 0x7551U, 0x7552U, 0x7553U, 0x7554U, 0x7555U, 0x7556U, 0x7557U, 0x7558U, + 0x7559U, 0x755AU, 0x7561U, 0x7562U, 0x7563U, 0x7564U, 0x7565U, 0x7566U, + 0x7567U, 0x7568U, 0x7569U, 0x756AU, 0x756BU, 0x756CU, 0x756DU, 0x756EU, + 0x756FU, 0x7570U, 0x7571U, 0x7572U, 0x7573U, 0x7574U, 0x7575U, 0x7576U, + 0x7577U, 0x7578U, 0x7579U, 0x757AU, 0x7530U, 0x7531U, 0x7532U, 0x7533U, + 0x7534U, 0x7535U, 0x7536U, 0x7537U, 0x7538U, 0x7539U, 0x752BU, 0x752FU, + 0x7641U, 0x7642U, 0x7643U, 0x7644U, 0x7645U, 0x7646U, 0x7647U, 0x7648U, + 0x7649U, 0x764AU, 0x764BU, 0x764CU, 0x764DU, 0x764EU, 0x764FU, 0x7650U, + 0x7651U, 0x7652U, 0x7653U, 0x7654U, 0x7655U, 0x7656U, 0x7657U, 0x7658U, + 0x7659U, 0x765AU, 0x7661U, 0x7662U, 0x7663U, 0x7664U, 0x7665U, 0x7666U, + 0x7667U, 0x7668U, 0x7669U, 0x766AU, 0x766BU, 0x766CU, 0x766DU, 0x766EU, + 0x766FU, 0x7670U, 0x7671U, 0x7672U, 0x7673U, 0x7674U, 0x7675U, 0x7676U, + 0x7677U, 0x7678U, 0x7679U, 0x767AU, 0x7630U, 0x7631U, 0x7632U, 0x7633U, + 0x7634U, 0x7635U, 0x7636U, 0x7637U, 0x7638U, 0x7639U, 0x762BU, 0x762FU, + 0x7741U, 0x7742U, 0x7743U, 0x7744U, 0x7745U, 0x7746U, 0x7747U, 0x7748U, + 0x7749U, 0x774AU, 0x774BU, 0x774CU, 0x774DU, 0x774EU, 0x774FU, 0x7750U, + 0x7751U, 0x7752U, 0x7753U, 0x7754U, 0x7755U, 0x7756U, 0x7757U, 0x7758U, + 0x7759U, 0x775AU, 0x7761U, 0x7762U, 0x7763U, 0x7764U, 0x7765U, 0x7766U, + 0x7767U, 0x7768U, 0x7769U, 0x776AU, 0x776BU, 0x776CU, 0x776DU, 0x776EU, + 0x776FU, 0x7770U, 0x7771U, 0x7772U, 0x7773U, 0x7774U, 0x7775U, 0x7776U, + 0x7777U, 0x7778U, 0x7779U, 0x777AU, 0x7730U, 0x7731U, 0x7732U, 0x7733U, + 0x7734U, 0x7735U, 0x7736U, 0x7737U, 0x7738U, 0x7739U, 0x772BU, 0x772FU, + 0x7841U, 0x7842U, 0x7843U, 0x7844U, 0x7845U, 0x7846U, 0x7847U, 0x7848U, + 0x7849U, 0x784AU, 0x784BU, 0x784CU, 0x784DU, 0x784EU, 0x784FU, 0x7850U, + 0x7851U, 0x7852U, 0x7853U, 0x7854U, 0x7855U, 0x7856U, 0x7857U, 0x7858U, + 0x7859U, 0x785AU, 0x7861U, 0x7862U, 0x7863U, 0x7864U, 0x7865U, 0x7866U, + 0x7867U, 0x7868U, 0x7869U, 0x786AU, 0x786BU, 0x786CU, 0x786DU, 0x786EU, + 0x786FU, 0x7870U, 0x7871U, 0x7872U, 0x7873U, 0x7874U, 0x7875U, 0x7876U, + 0x7877U, 0x7878U, 0x7879U, 0x787AU, 0x7830U, 0x7831U, 0x7832U, 0x7833U, + 0x7834U, 0x7835U, 0x7836U, 0x7837U, 0x7838U, 0x7839U, 0x782BU, 0x782FU, + 0x7941U, 0x7942U, 0x7943U, 0x7944U, 0x7945U, 0x7946U, 0x7947U, 0x7948U, + 0x7949U, 0x794AU, 0x794BU, 0x794CU, 0x794DU, 0x794EU, 0x794FU, 0x7950U, + 0x7951U, 0x7952U, 0x7953U, 0x7954U, 0x7955U, 0x7956U, 0x7957U, 0x7958U, + 0x7959U, 0x795AU, 0x7961U, 0x7962U, 0x7963U, 0x7964U, 0x7965U, 0x7966U, + 0x7967U, 0x7968U, 0x7969U, 0x796AU, 0x796BU, 0x796CU, 0x796DU, 0x796EU, + 0x796FU, 0x7970U, 0x7971U, 0x7972U, 0x7973U, 0x7974U, 0x7975U, 0x7976U, + 0x7977U, 0x7978U, 0x7979U, 0x797AU, 0x7930U, 0x7931U, 0x7932U, 0x7933U, + 0x7934U, 0x7935U, 0x7936U, 0x7937U, 0x7938U, 0x7939U, 0x792BU, 0x792FU, + 0x7A41U, 0x7A42U, 0x7A43U, 0x7A44U, 0x7A45U, 0x7A46U, 0x7A47U, 0x7A48U, + 0x7A49U, 0x7A4AU, 0x7A4BU, 0x7A4CU, 0x7A4DU, 0x7A4EU, 0x7A4FU, 0x7A50U, + 0x7A51U, 0x7A52U, 0x7A53U, 0x7A54U, 0x7A55U, 0x7A56U, 0x7A57U, 0x7A58U, + 0x7A59U, 0x7A5AU, 0x7A61U, 0x7A62U, 0x7A63U, 0x7A64U, 0x7A65U, 0x7A66U, + 0x7A67U, 0x7A68U, 0x7A69U, 0x7A6AU, 0x7A6BU, 0x7A6CU, 0x7A6DU, 0x7A6EU, + 0x7A6FU, 0x7A70U, 0x7A71U, 0x7A72U, 0x7A73U, 0x7A74U, 0x7A75U, 0x7A76U, + 0x7A77U, 0x7A78U, 0x7A79U, 0x7A7AU, 0x7A30U, 0x7A31U, 0x7A32U, 0x7A33U, + 0x7A34U, 0x7A35U, 0x7A36U, 0x7A37U, 0x7A38U, 0x7A39U, 0x7A2BU, 0x7A2FU, + 0x3041U, 0x3042U, 0x3043U, 0x3044U, 0x3045U, 0x3046U, 0x3047U, 0x3048U, + 0x3049U, 0x304AU, 0x304BU, 0x304CU, 0x304DU, 0x304EU, 0x304FU, 0x3050U, + 0x3051U, 0x3052U, 0x3053U, 0x3054U, 0x3055U, 0x3056U, 0x3057U, 0x3058U, + 0x3059U, 0x305AU, 0x3061U, 0x3062U, 0x3063U, 0x3064U, 0x3065U, 0x3066U, + 0x3067U, 0x3068U, 0x3069U, 0x306AU, 0x306BU, 0x306CU, 0x306DU, 0x306EU, + 0x306FU, 0x3070U, 0x3071U, 0x3072U, 0x3073U, 0x3074U, 0x3075U, 0x3076U, + 0x3077U, 0x3078U, 0x3079U, 0x307AU, 0x3030U, 0x3031U, 0x3032U, 0x3033U, + 0x3034U, 0x3035U, 0x3036U, 0x3037U, 0x3038U, 0x3039U, 0x302BU, 0x302FU, + 0x3141U, 0x3142U, 0x3143U, 0x3144U, 0x3145U, 0x3146U, 0x3147U, 0x3148U, + 0x3149U, 0x314AU, 0x314BU, 0x314CU, 0x314DU, 0x314EU, 0x314FU, 0x3150U, + 0x3151U, 0x3152U, 0x3153U, 0x3154U, 0x3155U, 0x3156U, 0x3157U, 0x3158U, + 0x3159U, 0x315AU, 0x3161U, 0x3162U, 0x3163U, 0x3164U, 0x3165U, 0x3166U, + 0x3167U, 0x3168U, 0x3169U, 0x316AU, 0x316BU, 0x316CU, 0x316DU, 0x316EU, + 0x316FU, 0x3170U, 0x3171U, 0x3172U, 0x3173U, 0x3174U, 0x3175U, 0x3176U, + 0x3177U, 0x3178U, 0x3179U, 0x317AU, 0x3130U, 0x3131U, 0x3132U, 0x3133U, + 0x3134U, 0x3135U, 0x3136U, 0x3137U, 0x3138U, 0x3139U, 0x312BU, 0x312FU, + 0x3241U, 0x3242U, 0x3243U, 0x3244U, 0x3245U, 0x3246U, 0x3247U, 0x3248U, + 0x3249U, 0x324AU, 0x324BU, 0x324CU, 0x324DU, 0x324EU, 0x324FU, 0x3250U, + 0x3251U, 0x3252U, 0x3253U, 0x3254U, 0x3255U, 0x3256U, 0x3257U, 0x3258U, + 0x3259U, 0x325AU, 0x3261U, 0x3262U, 0x3263U, 0x3264U, 0x3265U, 0x3266U, + 0x3267U, 0x3268U, 0x3269U, 0x326AU, 0x326BU, 0x326CU, 0x326DU, 0x326EU, + 0x326FU, 0x3270U, 0x3271U, 0x3272U, 0x3273U, 0x3274U, 0x3275U, 0x3276U, + 0x3277U, 0x3278U, 0x3279U, 0x327AU, 0x3230U, 0x3231U, 0x3232U, 0x3233U, + 0x3234U, 0x3235U, 0x3236U, 0x3237U, 0x3238U, 0x3239U, 0x322BU, 0x322FU, + 0x3341U, 0x3342U, 0x3343U, 0x3344U, 0x3345U, 0x3346U, 0x3347U, 0x3348U, + 0x3349U, 0x334AU, 0x334BU, 0x334CU, 0x334DU, 0x334EU, 0x334FU, 0x3350U, + 0x3351U, 0x3352U, 0x3353U, 0x3354U, 0x3355U, 0x3356U, 0x3357U, 0x3358U, + 0x3359U, 0x335AU, 0x3361U, 0x3362U, 0x3363U, 0x3364U, 0x3365U, 0x3366U, + 0x3367U, 0x3368U, 0x3369U, 0x336AU, 0x336BU, 0x336CU, 0x336DU, 0x336EU, + 0x336FU, 0x3370U, 0x3371U, 0x3372U, 0x3373U, 0x3374U, 0x3375U, 0x3376U, + 0x3377U, 0x3378U, 0x3379U, 0x337AU, 0x3330U, 0x3331U, 0x3332U, 0x3333U, + 0x3334U, 0x3335U, 0x3336U, 0x3337U, 0x3338U, 0x3339U, 0x332BU, 0x332FU, + 0x3441U, 0x3442U, 0x3443U, 0x3444U, 0x3445U, 0x3446U, 0x3447U, 0x3448U, + 0x3449U, 0x344AU, 0x344BU, 0x344CU, 0x344DU, 0x344EU, 0x344FU, 0x3450U, + 0x3451U, 0x3452U, 0x3453U, 0x3454U, 0x3455U, 0x3456U, 0x3457U, 0x3458U, + 0x3459U, 0x345AU, 0x3461U, 0x3462U, 0x3463U, 0x3464U, 0x3465U, 0x3466U, + 0x3467U, 0x3468U, 0x3469U, 0x346AU, 0x346BU, 0x346CU, 0x346DU, 0x346EU, + 0x346FU, 0x3470U, 0x3471U, 0x3472U, 0x3473U, 0x3474U, 0x3475U, 0x3476U, + 0x3477U, 0x3478U, 0x3479U, 0x347AU, 0x3430U, 0x3431U, 0x3432U, 0x3433U, + 0x3434U, 0x3435U, 0x3436U, 0x3437U, 0x3438U, 0x3439U, 0x342BU, 0x342FU, + 0x3541U, 0x3542U, 0x3543U, 0x3544U, 0x3545U, 0x3546U, 0x3547U, 0x3548U, + 0x3549U, 0x354AU, 0x354BU, 0x354CU, 0x354DU, 0x354EU, 0x354FU, 0x3550U, + 0x3551U, 0x3552U, 0x3553U, 0x3554U, 0x3555U, 0x3556U, 0x3557U, 0x3558U, + 0x3559U, 0x355AU, 0x3561U, 0x3562U, 0x3563U, 0x3564U, 0x3565U, 0x3566U, + 0x3567U, 0x3568U, 0x3569U, 0x356AU, 0x356BU, 0x356CU, 0x356DU, 0x356EU, + 0x356FU, 0x3570U, 0x3571U, 0x3572U, 0x3573U, 0x3574U, 0x3575U, 0x3576U, + 0x3577U, 0x3578U, 0x3579U, 0x357AU, 0x3530U, 0x3531U, 0x3532U, 0x3533U, + 0x3534U, 0x3535U, 0x3536U, 0x3537U, 0x3538U, 0x3539U, 0x352BU, 0x352FU, + 0x3641U, 0x3642U, 0x3643U, 0x3644U, 0x3645U, 0x3646U, 0x3647U, 0x3648U, + 0x3649U, 0x364AU, 0x364BU, 0x364CU, 0x364DU, 0x364EU, 0x364FU, 0x3650U, + 0x3651U, 0x3652U, 0x3653U, 0x3654U, 0x3655U, 0x3656U, 0x3657U, 0x3658U, + 0x3659U, 0x365AU, 0x3661U, 0x3662U, 0x3663U, 0x3664U, 0x3665U, 0x3666U, + 0x3667U, 0x3668U, 0x3669U, 0x366AU, 0x366BU, 0x366CU, 0x366DU, 0x366EU, + 0x366FU, 0x3670U, 0x3671U, 0x3672U, 0x3673U, 0x3674U, 0x3675U, 0x3676U, + 0x3677U, 0x3678U, 0x3679U, 0x367AU, 0x3630U, 0x3631U, 0x3632U, 0x3633U, + 0x3634U, 0x3635U, 0x3636U, 0x3637U, 0x3638U, 0x3639U, 0x362BU, 0x362FU, + 0x3741U, 0x3742U, 0x3743U, 0x3744U, 0x3745U, 0x3746U, 0x3747U, 0x3748U, + 0x3749U, 0x374AU, 0x374BU, 0x374CU, 0x374DU, 0x374EU, 0x374FU, 0x3750U, + 0x3751U, 0x3752U, 0x3753U, 0x3754U, 0x3755U, 0x3756U, 0x3757U, 0x3758U, + 0x3759U, 0x375AU, 0x3761U, 0x3762U, 0x3763U, 0x3764U, 0x3765U, 0x3766U, + 0x3767U, 0x3768U, 0x3769U, 0x376AU, 0x376BU, 0x376CU, 0x376DU, 0x376EU, + 0x376FU, 0x3770U, 0x3771U, 0x3772U, 0x3773U, 0x3774U, 0x3775U, 0x3776U, + 0x3777U, 0x3778U, 0x3779U, 0x377AU, 0x3730U, 0x3731U, 0x3732U, 0x3733U, + 0x3734U, 0x3735U, 0x3736U, 0x3737U, 0x3738U, 0x3739U, 0x372BU, 0x372FU, + 0x3841U, 0x3842U, 0x3843U, 0x3844U, 0x3845U, 0x3846U, 0x3847U, 0x3848U, + 0x3849U, 0x384AU, 0x384BU, 0x384CU, 0x384DU, 0x384EU, 0x384FU, 0x3850U, + 0x3851U, 0x3852U, 0x3853U, 0x3854U, 0x3855U, 0x3856U, 0x3857U, 0x3858U, + 0x3859U, 0x385AU, 0x3861U, 0x3862U, 0x3863U, 0x3864U, 0x3865U, 0x3866U, + 0x3867U, 0x3868U, 0x3869U, 0x386AU, 0x386BU, 0x386CU, 0x386DU, 0x386EU, + 0x386FU, 0x3870U, 0x3871U, 0x3872U, 0x3873U, 0x3874U, 0x3875U, 0x3876U, + 0x3877U, 0x3878U, 0x3879U, 0x387AU, 0x3830U, 0x3831U, 0x3832U, 0x3833U, + 0x3834U, 0x3835U, 0x3836U, 0x3837U, 0x3838U, 0x3839U, 0x382BU, 0x382FU, + 0x3941U, 0x3942U, 0x3943U, 0x3944U, 0x3945U, 0x3946U, 0x3947U, 0x3948U, + 0x3949U, 0x394AU, 0x394BU, 0x394CU, 0x394DU, 0x394EU, 0x394FU, 0x3950U, + 0x3951U, 0x3952U, 0x3953U, 0x3954U, 0x3955U, 0x3956U, 0x3957U, 0x3958U, + 0x3959U, 0x395AU, 0x3961U, 0x3962U, 0x3963U, 0x3964U, 0x3965U, 0x3966U, + 0x3967U, 0x3968U, 0x3969U, 0x396AU, 0x396BU, 0x396CU, 0x396DU, 0x396EU, + 0x396FU, 0x3970U, 0x3971U, 0x3972U, 0x3973U, 0x3974U, 0x3975U, 0x3976U, + 0x3977U, 0x3978U, 0x3979U, 0x397AU, 0x3930U, 0x3931U, 0x3932U, 0x3933U, + 0x3934U, 0x3935U, 0x3936U, 0x3937U, 0x3938U, 0x3939U, 0x392BU, 0x392FU, + 0x2B41U, 0x2B42U, 0x2B43U, 0x2B44U, 0x2B45U, 0x2B46U, 0x2B47U, 0x2B48U, + 0x2B49U, 0x2B4AU, 0x2B4BU, 0x2B4CU, 0x2B4DU, 0x2B4EU, 0x2B4FU, 0x2B50U, + 0x2B51U, 0x2B52U, 0x2B53U, 0x2B54U, 0x2B55U, 0x2B56U, 0x2B57U, 0x2B58U, + 0x2B59U, 0x2B5AU, 0x2B61U, 0x2B62U, 0x2B63U, 0x2B64U, 0x2B65U, 0x2B66U, + 0x2B67U, 0x2B68U, 0x2B69U, 0x2B6AU, 0x2B6BU, 0x2B6CU, 0x2B6DU, 0x2B6EU, + 0x2B6FU, 0x2B70U, 0x2B71U, 0x2B72U, 0x2B73U, 0x2B74U, 0x2B75U, 0x2B76U, + 0x2B77U, 0x2B78U, 0x2B79U, 0x2B7AU, 0x2B30U, 0x2B31U, 0x2B32U, 0x2B33U, + 0x2B34U, 0x2B35U, 0x2B36U, 0x2B37U, 0x2B38U, 0x2B39U, 0x2B2BU, 0x2B2FU, + 0x2F41U, 0x2F42U, 0x2F43U, 0x2F44U, 0x2F45U, 0x2F46U, 0x2F47U, 0x2F48U, + 0x2F49U, 0x2F4AU, 0x2F4BU, 0x2F4CU, 0x2F4DU, 0x2F4EU, 0x2F4FU, 0x2F50U, + 0x2F51U, 0x2F52U, 0x2F53U, 0x2F54U, 0x2F55U, 0x2F56U, 0x2F57U, 0x2F58U, + 0x2F59U, 0x2F5AU, 0x2F61U, 0x2F62U, 0x2F63U, 0x2F64U, 0x2F65U, 0x2F66U, + 0x2F67U, 0x2F68U, 0x2F69U, 0x2F6AU, 0x2F6BU, 0x2F6CU, 0x2F6DU, 0x2F6EU, + 0x2F6FU, 0x2F70U, 0x2F71U, 0x2F72U, 0x2F73U, 0x2F74U, 0x2F75U, 0x2F76U, + 0x2F77U, 0x2F78U, 0x2F79U, 0x2F7AU, 0x2F30U, 0x2F31U, 0x2F32U, 0x2F33U, + 0x2F34U, 0x2F35U, 0x2F36U, 0x2F37U, 0x2F38U, 0x2F39U, 0x2F2BU, 0x2F2FU, +#endif +}; diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/tables/tables.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/tables/tables.c new file mode 100644 index 0000000..45778b6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/tables/tables.c @@ -0,0 +1,40 @@ +#include "tables.h" + +const uint8_t +base64_table_enc_6bit[] = + "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + "abcdefghijklmnopqrstuvwxyz" + "0123456789" + "+/"; + +// In the lookup table below, note that the value for '=' (character 61) is +// 254, not 255. This character is used for in-band signaling of the end of +// the datastream, and we will use that later. The characters A-Z, a-z, 0-9 +// and + / are mapped to their "decoded" values. The other bytes all map to +// the value 255, which flags them as "invalid input". + +const uint8_t +base64_table_dec_8bit[] = +{ + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, // 0..15 + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, // 16..31 + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 62, 255, 255, 255, 63, // 32..47 + 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 255, 255, 255, 254, 255, 255, // 48..63 + 255, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, // 64..79 + 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 255, 255, 255, 255, 255, // 80..95 + 255, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, // 96..111 + 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 255, 255, 255, 255, 255, // 112..127 + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, // 128..143 + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, +}; + +#if BASE64_WORDSIZE >= 32 +# include "table_dec_32bit.h" +# include "table_enc_12bit.h" +#endif diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/tables/tables.h b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/tables/tables.h new file mode 100644 index 0000000..cb74268 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/base64/tables/tables.h @@ -0,0 +1,23 @@ +#ifndef BASE64_TABLES_H +#define BASE64_TABLES_H + +#include + +#include "../env.h" + +// These tables are used by all codecs for fallback plain encoding/decoding: +extern const uint8_t base64_table_enc_6bit[]; +extern const uint8_t base64_table_dec_8bit[]; + +// These tables are used for the 32-bit and 64-bit generic decoders: +#if BASE64_WORDSIZE >= 32 +extern const uint32_t base64_table_dec_32bit_d0[]; +extern const uint32_t base64_table_dec_32bit_d1[]; +extern const uint32_t base64_table_dec_32bit_d2[]; +extern const uint32_t base64_table_dec_32bit_d3[]; + +// This table is used by the 32 and 64-bit generic encoders: +extern const uint16_t base64_table_enc_12bit[]; +#endif + +#endif // BASE64_TABLES_H diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/bytes_ops.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/bytes_ops.c new file mode 100644 index 0000000..6ff34b0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/bytes_ops.c @@ -0,0 +1,164 @@ +// Bytes primitive operations +// +// These are registered in mypyc.primitives.bytes_ops. + +#include +#include "CPy.h" + +// Returns -1 on error, 0 on inequality, 1 on equality. +// +// Falls back to PyObject_RichCompareBool. +int CPyBytes_Compare(PyObject *left, PyObject *right) { + if (PyBytes_CheckExact(left) && PyBytes_CheckExact(right)) { + if (left == right) { + return 1; + } + + // Adapted from cpython internal implementation of bytes_compare. + Py_ssize_t len = Py_SIZE(left); + if (Py_SIZE(right) != len) { + return 0; + } + PyBytesObject *left_b = (PyBytesObject *)left; + PyBytesObject *right_b = (PyBytesObject *)right; + if (left_b->ob_sval[0] != right_b->ob_sval[0]) { + return 0; + } + + return memcmp(left_b->ob_sval, right_b->ob_sval, len) == 0; + } + return PyObject_RichCompareBool(left, right, Py_EQ); +} + +CPyTagged CPyBytes_GetItem(PyObject *o, CPyTagged index) { + if (CPyTagged_CheckShort(index)) { + Py_ssize_t n = CPyTagged_ShortAsSsize_t(index); + Py_ssize_t size = ((PyVarObject *)o)->ob_size; + if (n < 0) + n += size; + if (n < 0 || n >= size) { + PyErr_SetString(PyExc_IndexError, "index out of range"); + return CPY_INT_TAG; + } + unsigned char num = PyBytes_Check(o) ? ((PyBytesObject *)o)->ob_sval[n] + : ((PyByteArrayObject *)o)->ob_bytes[n]; + return num << 1; + } else { + PyErr_SetString(PyExc_OverflowError, CPYTHON_LARGE_INT_ERRMSG); + return CPY_INT_TAG; + } +} + +PyObject *CPyBytes_Concat(PyObject *a, PyObject *b) { + if (PyBytes_Check(a) && PyBytes_Check(b)) { + Py_ssize_t a_len = ((PyVarObject *)a)->ob_size; + Py_ssize_t b_len = ((PyVarObject *)b)->ob_size; + PyBytesObject *ret = (PyBytesObject *)PyBytes_FromStringAndSize(NULL, a_len + b_len); + if (ret != NULL) { + memcpy(ret->ob_sval, ((PyBytesObject *)a)->ob_sval, a_len); + memcpy(ret->ob_sval + a_len, ((PyBytesObject *)b)->ob_sval, b_len); + } + return (PyObject *)ret; + } else if (PyByteArray_Check(a)) { + return PyByteArray_Concat(a, b); + } else { + PyBytes_Concat(&a, b); + return a; + } +} + +static inline Py_ssize_t Clamp(Py_ssize_t a, Py_ssize_t b, Py_ssize_t c) { + return a < b ? b : (a >= c ? c : a); +} + +PyObject *CPyBytes_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end) { + if ((PyBytes_Check(obj) || PyByteArray_Check(obj)) + && CPyTagged_CheckShort(start) && CPyTagged_CheckShort(end)) { + Py_ssize_t startn = CPyTagged_ShortAsSsize_t(start); + Py_ssize_t endn = CPyTagged_ShortAsSsize_t(end); + Py_ssize_t len = ((PyVarObject *)obj)->ob_size; + if (startn < 0) { + startn += len; + } + if (endn < 0) { + endn += len; + } + startn = Clamp(startn, 0, len); + endn = Clamp(endn, 0, len); + Py_ssize_t slice_len = endn - startn; + if (PyBytes_Check(obj)) { + return PyBytes_FromStringAndSize(PyBytes_AS_STRING(obj) + startn, slice_len); + } else { + return PyByteArray_FromStringAndSize(PyByteArray_AS_STRING(obj) + startn, slice_len); + } + } + return CPyObject_GetSlice(obj, start, end); +} + +// Like _PyBytes_Join but fallback to dynamic call if 'sep' is not bytes +// (mostly commonly, for bytearrays) +PyObject *CPyBytes_Join(PyObject *sep, PyObject *iter) { + if (PyBytes_CheckExact(sep)) { + return PyBytes_Join(sep, iter); + } else { + _Py_IDENTIFIER(join); + PyObject *name = _PyUnicode_FromId(&PyId_join); /* borrowed */ + if (name == NULL) { + return NULL; + } + return PyObject_CallMethodOneArg(sep, name, iter); + } +} + +PyObject *CPyBytes_Build(Py_ssize_t len, ...) { + Py_ssize_t i; + Py_ssize_t sz = 0; + + va_list args; + va_start(args, len); + for (i = 0; i < len; i++) { + PyObject *item = va_arg(args, PyObject *); + size_t add_sz = ((PyVarObject *)item)->ob_size; + // Using size_t to avoid overflow during arithmetic calculation + if (add_sz > (size_t)(PY_SSIZE_T_MAX - sz)) { + PyErr_SetString(PyExc_OverflowError, + "join() result is too long for a Python bytes"); + return NULL; + } + sz += add_sz; + } + va_end(args); + + PyBytesObject *ret = (PyBytesObject *)PyBytes_FromStringAndSize(NULL, sz); + if (ret != NULL) { + char *res_data = ret->ob_sval; + va_start(args, len); + for (i = 0; i < len; i++) { + PyObject *item = va_arg(args, PyObject *); + Py_ssize_t item_sz = ((PyVarObject *)item)->ob_size; + memcpy(res_data, ((PyBytesObject *)item)->ob_sval, item_sz); + res_data += item_sz; + } + va_end(args); + assert(res_data == ret->ob_sval + ((PyVarObject *)ret)->ob_size); + } + + return (PyObject *)ret; +} + + +CPyTagged CPyBytes_Ord(PyObject *obj) { + if (PyBytes_Check(obj)) { + Py_ssize_t s = PyBytes_GET_SIZE(obj); + if (s == 1) { + return (unsigned char)(PyBytes_AS_STRING(obj)[0]) << 1; + } + } else if (PyByteArray_Check(obj)) { + Py_ssize_t s = PyByteArray_GET_SIZE(obj); + if (s == 1) { + return (unsigned char)(PyByteArray_AS_STRING(obj)[0]) << 1; + } + } + PyErr_SetString(PyExc_TypeError, "ord() expects a character"); + return CPY_INT_TAG; +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/dict_ops.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/dict_ops.c new file mode 100644 index 0000000..b102aba --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/dict_ops.c @@ -0,0 +1,491 @@ +// Dict primitive operations +// +// These are registered in mypyc.primitives.dict_ops. + +#include +#include "CPy.h" + +#ifndef Py_TPFLAGS_MAPPING +#define Py_TPFLAGS_MAPPING (1 << 6) +#endif + +// Dict subclasses like defaultdict override things in interesting +// ways, so we don't want to just directly use the dict methods. Not +// sure if it is actually worth doing all this stuff, but it saves +// some indirections. +PyObject *CPyDict_GetItem(PyObject *dict, PyObject *key) { + if (PyDict_CheckExact(dict)) { + PyObject *res = PyDict_GetItemWithError(dict, key); + if (!res) { + if (!PyErr_Occurred()) { + PyErr_SetObject(PyExc_KeyError, key); + } + } else { + Py_INCREF(res); + } + return res; + } else { + return PyObject_GetItem(dict, key); + } +} + +PyObject *CPyDict_Build(Py_ssize_t size, ...) { + Py_ssize_t i; + + PyObject *res = _PyDict_NewPresized(size); + if (res == NULL) { + return NULL; + } + + va_list args; + va_start(args, size); + + for (i = 0; i < size; i++) { + PyObject *key = va_arg(args, PyObject *); + PyObject *value = va_arg(args, PyObject *); + if (PyDict_SetItem(res, key, value)) { + Py_DECREF(res); + return NULL; + } + } + + va_end(args); + return res; +} + +PyObject *CPyDict_Get(PyObject *dict, PyObject *key, PyObject *fallback) { + // We are dodgily assuming that get on a subclass doesn't have + // different behavior. + PyObject *res = PyDict_GetItemWithError(dict, key); + if (!res) { + if (PyErr_Occurred()) { + return NULL; + } + res = fallback; + } + Py_INCREF(res); + return res; +} + +PyObject *CPyDict_GetWithNone(PyObject *dict, PyObject *key) { + return CPyDict_Get(dict, key, Py_None); +} + +PyObject *CPyDict_SetDefault(PyObject *dict, PyObject *key, PyObject *value) { + if (PyDict_CheckExact(dict)) { + PyObject* ret = PyDict_SetDefault(dict, key, value); + Py_XINCREF(ret); + return ret; + } + _Py_IDENTIFIER(setdefault); + PyObject *name = _PyUnicode_FromId(&PyId_setdefault); /* borrowed */ + if (name == NULL) { + return NULL; + } + return PyObject_CallMethodObjArgs(dict, name, key, value, NULL); +} + +PyObject *CPyDict_SetDefaultWithNone(PyObject *dict, PyObject *key) { + return CPyDict_SetDefault(dict, key, Py_None); +} + +PyObject *CPyDict_SetDefaultWithEmptyDatatype(PyObject *dict, PyObject *key, + int data_type) { + PyObject *res = CPyDict_GetItem(dict, key); + if (!res) { + // CPyDict_GetItem() would generates a PyExc_KeyError + // when key is not found. + PyErr_Clear(); + + PyObject *new_obj; + if (data_type == 1) { + new_obj = PyList_New(0); + } else if (data_type == 2) { + new_obj = PyDict_New(); + } else if (data_type == 3) { + new_obj = PySet_New(NULL); + } else { + return NULL; + } + + if (CPyDict_SetItem(dict, key, new_obj) == -1) { + return NULL; + } else { + return new_obj; + } + } else { + return res; + } +} + +int CPyDict_SetItem(PyObject *dict, PyObject *key, PyObject *value) { + if (PyDict_CheckExact(dict)) { + return PyDict_SetItem(dict, key, value); + } else { + return PyObject_SetItem(dict, key, value); + } +} + +static inline int CPy_ObjectToStatus(PyObject *obj) { + if (obj) { + Py_DECREF(obj); + return 0; + } else { + return -1; + } +} + +static int CPyDict_UpdateGeneral(PyObject *dict, PyObject *stuff) { + _Py_IDENTIFIER(update); + PyObject *name = _PyUnicode_FromId(&PyId_update); /* borrowed */ + if (name == NULL) { + return -1; + } + PyObject *res = PyObject_CallMethodOneArg(dict, name, stuff); + return CPy_ObjectToStatus(res); +} + +int CPyDict_UpdateInDisplay(PyObject *dict, PyObject *stuff) { + // from https://github.com/python/cpython/blob/55d035113dfb1bd90495c8571758f504ae8d4802/Python/ceval.c#L2710 + int ret = PyDict_Update(dict, stuff); + if (ret < 0) { + if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Format(PyExc_TypeError, + "'%.200s' object is not a mapping", + Py_TYPE(stuff)->tp_name); + } + } + return ret; +} + +int CPyDict_Update(PyObject *dict, PyObject *stuff) { + if (PyDict_CheckExact(dict)) { + return PyDict_Update(dict, stuff); + } else { + return CPyDict_UpdateGeneral(dict, stuff); + } +} + +int CPyDict_UpdateFromAny(PyObject *dict, PyObject *stuff) { + if (PyDict_CheckExact(dict)) { + // Argh this sucks + _Py_IDENTIFIER(keys); + if (PyDict_Check(stuff) || _CPyObject_HasAttrId(stuff, &PyId_keys)) { + return PyDict_Update(dict, stuff); + } else { + return PyDict_MergeFromSeq2(dict, stuff, 1); + } + } else { + return CPyDict_UpdateGeneral(dict, stuff); + } +} + +PyObject *CPyDict_FromAny(PyObject *obj) { + if (PyDict_Check(obj)) { + return PyDict_Copy(obj); + } else { + int res; + PyObject *dict = PyDict_New(); + if (!dict) { + return NULL; + } + _Py_IDENTIFIER(keys); + if (_CPyObject_HasAttrId(obj, &PyId_keys)) { + res = PyDict_Update(dict, obj); + } else { + res = PyDict_MergeFromSeq2(dict, obj, 1); + } + if (res < 0) { + Py_DECREF(dict); + return NULL; + } + return dict; + } +} + +PyObject *CPyDict_KeysView(PyObject *dict) { + if (PyDict_CheckExact(dict)){ + return _CPyDictView_New(dict, &PyDictKeys_Type); + } + _Py_IDENTIFIER(keys); + PyObject *name = _PyUnicode_FromId(&PyId_keys); /* borrowed */ + if (name == NULL) { + return NULL; + } + return PyObject_CallMethodNoArgs(dict, name); +} + +PyObject *CPyDict_ValuesView(PyObject *dict) { + if (PyDict_CheckExact(dict)){ + return _CPyDictView_New(dict, &PyDictValues_Type); + } + _Py_IDENTIFIER(values); + PyObject *name = _PyUnicode_FromId(&PyId_values); /* borrowed */ + if (name == NULL) { + return NULL; + } + return PyObject_CallMethodNoArgs(dict, name); +} + +PyObject *CPyDict_ItemsView(PyObject *dict) { + if (PyDict_CheckExact(dict)){ + return _CPyDictView_New(dict, &PyDictItems_Type); + } + _Py_IDENTIFIER(items); + PyObject *name = _PyUnicode_FromId(&PyId_items); /* borrowed */ + if (name == NULL) { + return NULL; + } + return PyObject_CallMethodNoArgs(dict, name); +} + +PyObject *CPyDict_Keys(PyObject *dict) { + if (PyDict_CheckExact(dict)) { + return PyDict_Keys(dict); + } + // Inline generic fallback logic to also return a list. + PyObject *list = PyList_New(0); + _Py_IDENTIFIER(keys); + PyObject *name = _PyUnicode_FromId(&PyId_keys); /* borrowed */ + if (name == NULL) { + return NULL; + } + PyObject *view = PyObject_CallMethodNoArgs(dict, name); + if (view == NULL) { + return NULL; + } + int res = PyList_Extend(list, view); + Py_DECREF(view); + if (res < 0) { + return NULL; + } + return list; +} + +PyObject *CPyDict_Values(PyObject *dict) { + if (PyDict_CheckExact(dict)) { + return PyDict_Values(dict); + } + // Inline generic fallback logic to also return a list. + PyObject *list = PyList_New(0); + _Py_IDENTIFIER(values); + PyObject *name = _PyUnicode_FromId(&PyId_values); /* borrowed */ + if (name == NULL) { + return NULL; + } + PyObject *view = PyObject_CallMethodNoArgs(dict, name); + if (view == NULL) { + return NULL; + } + int res = PyList_Extend(list, view); + Py_DECREF(view); + if (res < 0) { + return NULL; + } + return list; +} + +PyObject *CPyDict_Items(PyObject *dict) { + if (PyDict_CheckExact(dict)) { + return PyDict_Items(dict); + } + // Inline generic fallback logic to also return a list. + PyObject *list = PyList_New(0); + _Py_IDENTIFIER(items); + PyObject *name = _PyUnicode_FromId(&PyId_items); /* borrowed */ + if (name == NULL) { + return NULL; + } + PyObject *view = PyObject_CallMethodNoArgs(dict, name); + if (view == NULL) { + return NULL; + } + int res = PyList_Extend(list, view); + Py_DECREF(view); + if (res < 0) { + return NULL; + } + return list; +} + +char CPyDict_Clear(PyObject *dict) { + if (PyDict_CheckExact(dict)) { + PyDict_Clear(dict); + } else { + _Py_IDENTIFIER(clear); + PyObject *name = _PyUnicode_FromId(&PyId_clear); /* borrowed */ + if (name == NULL) { + return 0; + } + PyObject *res = PyObject_CallMethodNoArgs(dict, name); + if (res == NULL) { + return 0; + } + } + return 1; +} + +PyObject *CPyDict_Copy(PyObject *dict) { + if (PyDict_CheckExact(dict)) { + return PyDict_Copy(dict); + } + _Py_IDENTIFIER(copy); + PyObject *name = _PyUnicode_FromId(&PyId_copy); /* borrowed */ + if (name == NULL) { + return NULL; + } + return PyObject_CallMethodNoArgs(dict, name); +} + +PyObject *CPyDict_GetKeysIter(PyObject *dict) { + if (PyDict_CheckExact(dict)) { + // Return dict itself to indicate we can use fast path instead. + Py_INCREF(dict); + return dict; + } + return PyObject_GetIter(dict); +} + +PyObject *CPyDict_GetItemsIter(PyObject *dict) { + if (PyDict_CheckExact(dict)) { + // Return dict itself to indicate we can use fast path instead. + Py_INCREF(dict); + return dict; + } + _Py_IDENTIFIER(items); + PyObject *name = _PyUnicode_FromId(&PyId_items); /* borrowed */ + if (name == NULL) { + return NULL; + } + PyObject *view = PyObject_CallMethodNoArgs(dict, name); + if (view == NULL) { + return NULL; + } + PyObject *iter = PyObject_GetIter(view); + Py_DECREF(view); + return iter; +} + +PyObject *CPyDict_GetValuesIter(PyObject *dict) { + if (PyDict_CheckExact(dict)) { + // Return dict itself to indicate we can use fast path instead. + Py_INCREF(dict); + return dict; + } + _Py_IDENTIFIER(values); + PyObject *name = _PyUnicode_FromId(&PyId_values); /* borrowed */ + if (name == NULL) { + return NULL; + } + PyObject *view = PyObject_CallMethodNoArgs(dict, name); + if (view == NULL) { + return NULL; + } + PyObject *iter = PyObject_GetIter(view); + Py_DECREF(view); + return iter; +} + +static void _CPyDict_FromNext(tuple_T3CIO *ret, PyObject *dict_iter) { + // Get next item from iterator and set "should continue" flag. + ret->f2 = PyIter_Next(dict_iter); + if (ret->f2 == NULL) { + ret->f0 = 0; + Py_INCREF(Py_None); + ret->f2 = Py_None; + } else { + ret->f0 = 1; + } +} + +// Helpers for fast dictionary iteration, return a single tuple +// instead of writing to multiple registers, for exact dicts use +// the fast path, and fall back to generic iterator logic for subclasses. +tuple_T3CIO CPyDict_NextKey(PyObject *dict_or_iter, CPyTagged offset) { + tuple_T3CIO ret; + Py_ssize_t py_offset = CPyTagged_AsSsize_t(offset); + PyObject *dummy; + + if (PyDict_CheckExact(dict_or_iter)) { + ret.f0 = PyDict_Next(dict_or_iter, &py_offset, &ret.f2, &dummy); + if (ret.f0) { + ret.f1 = CPyTagged_FromSsize_t(py_offset); + } else { + // Set key to None, so mypyc can manage refcounts. + ret.f1 = 0; + ret.f2 = Py_None; + } + // PyDict_Next() returns borrowed references. + Py_INCREF(ret.f2); + } else { + // offset is dummy in this case, just use the old value. + ret.f1 = offset; + _CPyDict_FromNext(&ret, dict_or_iter); + } + return ret; +} + +tuple_T3CIO CPyDict_NextValue(PyObject *dict_or_iter, CPyTagged offset) { + tuple_T3CIO ret; + Py_ssize_t py_offset = CPyTagged_AsSsize_t(offset); + PyObject *dummy; + + if (PyDict_CheckExact(dict_or_iter)) { + ret.f0 = PyDict_Next(dict_or_iter, &py_offset, &dummy, &ret.f2); + if (ret.f0) { + ret.f1 = CPyTagged_FromSsize_t(py_offset); + } else { + // Set value to None, so mypyc can manage refcounts. + ret.f1 = 0; + ret.f2 = Py_None; + } + // PyDict_Next() returns borrowed references. + Py_INCREF(ret.f2); + } else { + // offset is dummy in this case, just use the old value. + ret.f1 = offset; + _CPyDict_FromNext(&ret, dict_or_iter); + } + return ret; +} + +tuple_T4CIOO CPyDict_NextItem(PyObject *dict_or_iter, CPyTagged offset) { + tuple_T4CIOO ret; + Py_ssize_t py_offset = CPyTagged_AsSsize_t(offset); + + if (PyDict_CheckExact(dict_or_iter)) { + ret.f0 = PyDict_Next(dict_or_iter, &py_offset, &ret.f2, &ret.f3); + if (ret.f0) { + ret.f1 = CPyTagged_FromSsize_t(py_offset); + } else { + // Set key and value to None, so mypyc can manage refcounts. + ret.f1 = 0; + ret.f2 = Py_None; + ret.f3 = Py_None; + } + } else { + ret.f1 = offset; + PyObject *item = PyIter_Next(dict_or_iter); + if (item == NULL || !PyTuple_Check(item) || PyTuple_GET_SIZE(item) != 2) { + if (item != NULL) { + PyErr_SetString(PyExc_TypeError, "a tuple of length 2 expected"); + } + ret.f0 = 0; + ret.f2 = Py_None; + ret.f3 = Py_None; + } else { + ret.f0 = 1; + ret.f2 = PyTuple_GET_ITEM(item, 0); + ret.f3 = PyTuple_GET_ITEM(item, 1); + Py_DECREF(item); + } + } + // PyDict_Next() returns borrowed references. + Py_INCREF(ret.f2); + Py_INCREF(ret.f3); + return ret; +} + +int CPyMapping_Check(PyObject *obj) { + return Py_TYPE(obj)->tp_flags & Py_TPFLAGS_MAPPING; +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/exc_ops.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/exc_ops.c new file mode 100644 index 0000000..8549842 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/exc_ops.c @@ -0,0 +1,261 @@ +#include "pythoncapi_compat.h" + +// Exception related primitive operations +// +// These are registered in mypyc.primitives.exc_ops. + +#include +#include "CPy.h" + +void CPy_Raise(PyObject *exc) { + if (PyObject_IsInstance(exc, (PyObject *)&PyType_Type)) { + PyObject *obj = PyObject_CallNoArgs(exc); + if (!obj) + return; + PyErr_SetObject(exc, obj); + Py_DECREF(obj); + } else { + PyErr_SetObject((PyObject *)Py_TYPE(exc), exc); + } +} + +void CPy_Reraise(void) { + PyObject *p_type, *p_value, *p_traceback; + PyErr_GetExcInfo(&p_type, &p_value, &p_traceback); + PyErr_Restore(p_type, p_value, p_traceback); +} + +void CPyErr_SetObjectAndTraceback(PyObject *type, PyObject *value, PyObject *traceback) { + if (!PyType_Check(type) && Py_IsNone(value)) { + // The first argument must be an exception instance + value = type; + type = (PyObject *)Py_TYPE(value); + } + + // Set the value and traceback of an error. Because calling + // PyErr_Restore takes away a reference to each object passed in + // as an argument, we manually increase the reference count of + // each argument before calling it. + Py_INCREF(type); + Py_INCREF(value); + Py_INCREF(traceback); + PyErr_Restore(type, value, traceback); +} + +tuple_T3OOO CPy_CatchError(void) { + // We need to return the existing sys.exc_info() information, so + // that it can be restored when we finish handling the error we + // are catching now. Grab that triple and convert NULL values to + // the ExcDummy object in order to simplify refcount handling in + // generated code. + tuple_T3OOO ret; + PyErr_GetExcInfo(&ret.f0, &ret.f1, &ret.f2); + _CPy_ToDummy(&ret.f0); + _CPy_ToDummy(&ret.f1); + _CPy_ToDummy(&ret.f2); + + if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_RuntimeError, "CPy_CatchError called with no error!"); + } + + // Retrieve the error info and normalize it so that it looks like + // what python code needs it to be. + PyObject *type, *value, *traceback; + PyErr_Fetch(&type, &value, &traceback); + // Could we avoid always normalizing? + PyErr_NormalizeException(&type, &value, &traceback); + if (traceback != NULL) { + PyException_SetTraceback(value, traceback); + } + // Indicate that we are now handling this exception by stashing it + // in sys.exc_info(). mypyc routines that need access to the + // exception will read it out of there. + PyErr_SetExcInfo(type, value, traceback); + // Clear the error indicator, since the exception isn't + // propagating anymore. + PyErr_Clear(); + + return ret; +} + +void CPy_RestoreExcInfo(tuple_T3OOO info) { + PyErr_SetExcInfo(_CPy_FromDummy(info.f0), _CPy_FromDummy(info.f1), _CPy_FromDummy(info.f2)); +} + +bool CPy_ExceptionMatches(PyObject *type) { + return PyErr_GivenExceptionMatches((PyObject *)Py_TYPE(CPy_ExcState()->exc_value), type); +} + +PyObject *CPy_GetExcValue(void) { + PyObject *exc = CPy_ExcState()->exc_value; + Py_INCREF(exc); + return exc; +} + +static inline void _CPy_ToNone(PyObject **p) { + if (*p == NULL) { + Py_INCREF(Py_None); + *p = Py_None; + } +} + +void _CPy_GetExcInfo(PyObject **p_type, PyObject **p_value, PyObject **p_traceback) { + PyErr_GetExcInfo(p_type, p_value, p_traceback); + _CPy_ToNone(p_type); + _CPy_ToNone(p_value); + _CPy_ToNone(p_traceback); +} + +tuple_T3OOO CPy_GetExcInfo(void) { + tuple_T3OOO ret; + _CPy_GetExcInfo(&ret.f0, &ret.f1, &ret.f2); + return ret; +} + +void CPyError_OutOfMemory(void) { + fprintf(stderr, "fatal: out of memory\n"); + fflush(stderr); + abort(); +} + +// Construct a nicely formatted type name based on __module__ and __name__. +static PyObject *CPy_GetTypeName(PyObject *type) { + PyObject *module = NULL, *name = NULL; + PyObject *full = NULL; + + module = PyObject_GetAttrString(type, "__module__"); + if (!module || !PyUnicode_Check(module)) { + goto out; + } + name = PyObject_GetAttrString(type, "__qualname__"); + if (!name || !PyUnicode_Check(name)) { + goto out; + } + + if (PyUnicode_CompareWithASCIIString(module, "builtins") == 0) { + Py_INCREF(name); + full = name; + } else { + full = PyUnicode_FromFormat("%U.%U", module, name); + } + +out: + Py_XDECREF(module); + Py_XDECREF(name); + return full; +} + +// Get the type of a value as a string, expanding tuples to include +// all the element types. +static PyObject *CPy_FormatTypeName(PyObject *value) { + if (Py_IsNone(value)) { + return PyUnicode_FromString("None"); + } + + if (!PyTuple_CheckExact(value)) { + return CPy_GetTypeName((PyObject *)Py_TYPE(value)); + } + + if (PyTuple_GET_SIZE(value) > 10) { + return PyUnicode_FromFormat("tuple[<%d items>]", PyTuple_GET_SIZE(value)); + } + + // Most of the logic is all for tuples, which is the only interesting case + PyObject *output = PyUnicode_FromString("tuple["); + if (!output) { + return NULL; + } + /* This is quadratic but if that ever matters something is really weird. */ + int i; + for (i = 0; i < PyTuple_GET_SIZE(value); i++) { + PyObject *s = CPy_FormatTypeName(PyTuple_GET_ITEM(value, i)); + if (!s) { + Py_DECREF(output); + return NULL; + } + PyObject *next = PyUnicode_FromFormat("%U%U%s", output, s, + i + 1 == PyTuple_GET_SIZE(value) ? "]" : ", "); + Py_DECREF(output); + Py_DECREF(s); + if (!next) { + return NULL; + } + output = next; + } + return output; +} + +CPy_NOINLINE +void CPy_TypeError(const char *expected, PyObject *value) { + PyObject *out = CPy_FormatTypeName(value); + if (out) { + PyErr_Format(PyExc_TypeError, "%s object expected; got %U", expected, out); + Py_DECREF(out); + } else { + PyErr_Format(PyExc_TypeError, "%s object expected; and errored formatting real type!", + expected); + } +} + +// The PyFrameObject type definition (struct _frame) has been moved +// to the internal C API: to the pycore_frame.h header file. +// https://github.com/python/cpython/pull/31530 +#if PY_VERSION_HEX >= 0x030b00a6 +#include "internal/pycore_frame.h" +#endif + +// This function is basically exactly the same with _PyTraceback_Add +// which is available in all the versions we support. +// We're continuing to use this because we'll probably optimize this later. +void CPy_AddTraceback(const char *filename, const char *funcname, int line, PyObject *globals) { + PyObject *exc, *val, *tb; + PyThreadState *thread_state = PyThreadState_GET(); + PyFrameObject *frame_obj; + + // We need to save off the exception state because in 3.8, + // PyFrame_New fails if there is an error set and it fails to look + // up builtins in the globals. (_PyTraceback_Add documents that it + // needs to do it because it decodes the filename according to the + // FS encoding, which could have a decoder in Python. We don't do + // that so *that* doesn't apply to us.) + PyErr_Fetch(&exc, &val, &tb); + PyCodeObject *code_obj = PyCode_NewEmpty(filename, funcname, line); + if (code_obj == NULL) { + goto error; + } + + frame_obj = PyFrame_New(thread_state, code_obj, globals, 0); + if (frame_obj == NULL) { + Py_DECREF(code_obj); + goto error; + } + frame_obj->f_lineno = line; + PyErr_Restore(exc, val, tb); + PyTraceBack_Here(frame_obj); + Py_DECREF(code_obj); + Py_DECREF(frame_obj); + + return; + +error: +#if CPY_3_12_FEATURES + _PyErr_ChainExceptions1(exc); +#else + _PyErr_ChainExceptions(exc, val, tb); +#endif +} + +CPy_NOINLINE +void CPy_TypeErrorTraceback(const char *filename, const char *funcname, int line, + PyObject *globals, const char *expected, PyObject *value) { + CPy_TypeError(expected, value); + CPy_AddTraceback(filename, funcname, line, globals); +} + +void CPy_AttributeError(const char *filename, const char *funcname, const char *classname, + const char *attrname, int line, PyObject *globals) { + char buf[500]; + snprintf(buf, sizeof(buf), "attribute '%.200s' of '%.200s' undefined", attrname, classname); + PyErr_SetString(PyExc_AttributeError, buf); + CPy_AddTraceback(filename, funcname, line, globals); +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/float_ops.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/float_ops.c new file mode 100644 index 0000000..3190657 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/float_ops.c @@ -0,0 +1,239 @@ +// Float primitive operations +// +// These are registered in mypyc.primitives.float_ops. + +#include +#include "CPy.h" + + +static double CPy_DomainError(void) { + PyErr_SetString(PyExc_ValueError, "math domain error"); + return CPY_FLOAT_ERROR; +} + +static double CPy_MathRangeError(void) { + PyErr_SetString(PyExc_OverflowError, "math range error"); + return CPY_FLOAT_ERROR; +} + +static double CPy_MathExpectedNonNegativeInputError(double x) { + char *buf = PyOS_double_to_string(x, 'r', 0, Py_DTSF_ADD_DOT_0, NULL); + if (buf) { + PyErr_Format(PyExc_ValueError, "expected a nonnegative input, got %s", buf); + PyMem_Free(buf); + } + return CPY_FLOAT_ERROR; +} + +static double CPy_MathExpectedPositiveInputError(double x) { + char *buf = PyOS_double_to_string(x, 'r', 0, Py_DTSF_ADD_DOT_0, NULL); + if (buf) { + PyErr_Format(PyExc_ValueError, "expected a positive input, got %s", buf); + PyMem_Free(buf); + } + return CPY_FLOAT_ERROR; +} + +static double CPy_MathExpectedFiniteInput(double x) { + char *buf = PyOS_double_to_string(x, 'r', 0, Py_DTSF_ADD_DOT_0, NULL); + if (buf) { + PyErr_Format(PyExc_ValueError, "expected a finite input, got %s", buf); + PyMem_Free(buf); + } + return CPY_FLOAT_ERROR; +} + +double CPyFloat_FromTagged(CPyTagged x) { + if (CPyTagged_CheckShort(x)) { + return CPyTagged_ShortAsSsize_t(x); + } + double result = PyFloat_AsDouble(CPyTagged_LongAsObject(x)); + if (unlikely(result == -1.0) && PyErr_Occurred()) { + return CPY_FLOAT_ERROR; + } + return result; +} + +double CPyFloat_Sin(double x) { + double v = sin(x); + if (unlikely(isnan(v)) && !isnan(x)) { +#if CPY_3_14_FEATURES + return CPy_MathExpectedFiniteInput(x); +#else + return CPy_DomainError(); +#endif + } + return v; +} + +double CPyFloat_Cos(double x) { + double v = cos(x); + if (unlikely(isnan(v)) && !isnan(x)) { +#if CPY_3_14_FEATURES + return CPy_MathExpectedFiniteInput(x); +#else + return CPy_DomainError(); +#endif + } + return v; +} + +double CPyFloat_Tan(double x) { + if (unlikely(isinf(x))) { +#if CPY_3_14_FEATURES + return CPy_MathExpectedFiniteInput(x); +#else + return CPy_DomainError(); +#endif + } + return tan(x); +} + +double CPyFloat_Sqrt(double x) { + if (x < 0.0) { +#if CPY_3_14_FEATURES + return CPy_MathExpectedNonNegativeInputError(x); +#else + return CPy_DomainError(); +#endif + } + return sqrt(x); +} + +double CPyFloat_Exp(double x) { + double v = exp(x); + if (unlikely(v == INFINITY) && x != INFINITY) { + return CPy_MathRangeError(); + } + return v; +} + +double CPyFloat_Log(double x) { + if (x <= 0.0) { +#if CPY_3_14_FEATURES + return CPy_MathExpectedPositiveInputError(x); +#else + return CPy_DomainError(); +#endif + } + return log(x); +} + +CPyTagged CPyFloat_Floor(double x) { + double v = floor(x); + return CPyTagged_FromFloat(v); +} + +CPyTagged CPyFloat_Ceil(double x) { + double v = ceil(x); + return CPyTagged_FromFloat(v); +} + +bool CPyFloat_IsInf(double x) { + return isinf(x) != 0; +} + +bool CPyFloat_IsNaN(double x) { + return isnan(x) != 0; +} + +// From CPython 3.10.0, Objects/floatobject.c +static void +_float_div_mod(double vx, double wx, double *floordiv, double *mod) +{ + double div; + *mod = fmod(vx, wx); + /* fmod is typically exact, so vx-mod is *mathematically* an + exact multiple of wx. But this is fp arithmetic, and fp + vx - mod is an approximation; the result is that div may + not be an exact integral value after the division, although + it will always be very close to one. + */ + div = (vx - *mod) / wx; + if (*mod) { + /* ensure the remainder has the same sign as the denominator */ + if ((wx < 0) != (*mod < 0)) { + *mod += wx; + div -= 1.0; + } + } + else { + /* the remainder is zero, and in the presence of signed zeroes + fmod returns different results across platforms; ensure + it has the same sign as the denominator. */ + *mod = copysign(0.0, wx); + } + /* snap quotient to nearest integral value */ + if (div) { + *floordiv = floor(div); + if (div - *floordiv > 0.5) { + *floordiv += 1.0; + } + } + else { + /* div is zero - get the same sign as the true quotient */ + *floordiv = copysign(0.0, vx / wx); /* zero w/ sign of vx/wx */ + } +} + +double CPyFloat_FloorDivide(double x, double y) { + double mod, floordiv; + if (y == 0) { + PyErr_SetString(PyExc_ZeroDivisionError, "float floor division by zero"); + return CPY_FLOAT_ERROR; + } + _float_div_mod(x, y, &floordiv, &mod); + return floordiv; +} + +// Adapted from CPython 3.10.7 +double CPyFloat_Pow(double x, double y) { + if (!isfinite(x) || !isfinite(y)) { + if (isnan(x)) + return y == 0.0 ? 1.0 : x; /* NaN**0 = 1 */ + else if (isnan(y)) + return x == 1.0 ? 1.0 : y; /* 1**NaN = 1 */ + else if (isinf(x)) { + int odd_y = isfinite(y) && fmod(fabs(y), 2.0) == 1.0; + if (y > 0.0) + return odd_y ? x : fabs(x); + else if (y == 0.0) + return 1.0; + else /* y < 0. */ + return odd_y ? copysign(0.0, x) : 0.0; + } + else if (isinf(y)) { + if (fabs(x) == 1.0) + return 1.0; + else if (y > 0.0 && fabs(x) > 1.0) + return y; + else if (y < 0.0 && fabs(x) < 1.0) { + #if PY_VERSION_HEX < 0x030B0000 + if (x == 0.0) { /* 0**-inf: divide-by-zero */ + return CPy_DomainError(); + } + #endif + return -y; /* result is +inf */ + } else + return 0.0; + } + } + double r = pow(x, y); + if (!isfinite(r)) { + if (isnan(r)) { + return CPy_DomainError(); + } + /* + an infinite result here arises either from: + (A) (+/-0.)**negative (-> divide-by-zero) + (B) overflow of x**y with x and y finite + */ + else if (isinf(r)) { + if (x == 0.0) + return CPy_DomainError(); + else + return CPy_MathRangeError(); + } + } + return r; +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/generic_ops.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/generic_ops.c new file mode 100644 index 0000000..1e1e184 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/generic_ops.c @@ -0,0 +1,84 @@ +// Generic primitive operations +// +// These are registered in mypyc.primitives.generic_ops. + +#include +#include "CPy.h" + +CPyTagged CPyObject_Hash(PyObject *o) { + Py_hash_t h = PyObject_Hash(o); + if (h == -1) { + return CPY_INT_TAG; + } else { + // This is tragically annoying. The range of hash values in + // 64-bit python covers 64-bits, and our short integers only + // cover 63. This means that half the time we are boxing the + // result for basically no good reason. To add insult to + // injury it is probably about to be immediately unboxed by a + // tp_hash wrapper. + return CPyTagged_FromSsize_t(h); + } +} + +PyObject *CPyObject_GetAttr3(PyObject *v, PyObject *name, PyObject *defl) +{ + PyObject *result = PyObject_GetAttr(v, name); + if (!result && PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + Py_INCREF(defl); + result = defl; + } + return result; +} + +PyObject *CPyIter_Next(PyObject *iter) +{ + return (*Py_TYPE(iter)->tp_iternext)(iter); +} + +PyObject *CPyNumber_Power(PyObject *base, PyObject *index) +{ + return PyNumber_Power(base, index, Py_None); +} + +PyObject *CPyNumber_InPlacePower(PyObject *base, PyObject *index) +{ + return PyNumber_InPlacePower(base, index, Py_None); +} + +PyObject *CPyObject_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end) { + PyObject *start_obj = CPyTagged_AsObject(start); + PyObject *end_obj = CPyTagged_AsObject(end); + if (unlikely(start_obj == NULL || end_obj == NULL)) { + return NULL; + } + PyObject *slice = PySlice_New(start_obj, end_obj, NULL); + Py_DECREF(start_obj); + Py_DECREF(end_obj); + if (unlikely(slice == NULL)) { + return NULL; + } + PyObject *result = PyObject_GetItem(obj, slice); + Py_DECREF(slice); + return result; +} + +typedef PyObject *(*SetupFunction)(PyObject *); + +PyObject *CPy_SetupObject(PyObject *type) { + PyTypeObject *tp = (PyTypeObject *)type; + PyMethodDef *def = NULL; + for(; tp; tp = tp->tp_base) { + def = tp->tp_methods; + if (!def || !def->ml_name) { + continue; + } + + if (!strcmp(def->ml_name, "__internal_mypyc_setup")) { + return ((SetupFunction)(void(*)(void))def->ml_meth)(type); + } + } + + PyErr_SetString(PyExc_RuntimeError, "Internal mypyc error: Unable to find object setup function"); + return NULL; +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/getargs.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/getargs.c new file mode 100644 index 0000000..163b9ac --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/getargs.c @@ -0,0 +1,451 @@ +/* getargs implementation copied from Python 3.8 and stripped down to only include + * the functions we need. + * We also add support for required kwonly args and accepting *args / **kwargs. + * A good idea would be to also vendor in the Fast versions and get our stuff + * working with *that*. + * Another probably good idea is to strip out all the formatting stuff we don't need + * and then add in custom stuff that we do need. + * + * DOCUMENTATION OF THE EXTENSIONS: + * - Arguments given after a @ format specify are required keyword-only arguments. + * The | and $ specifiers must both appear before @. + * - If the first character of a format string is %, then the function can support + * *args and **kwargs. On seeing a %, the parser will consume two arguments, + * which should be pointers to variables to store the *args and **kwargs, respectively. + * Either pointer can be NULL, in which case the function doesn't take that + * variety of vararg. + * Unlike most format specifiers, the caller takes ownership of these objects + * and is responsible for decrefing them. + * - All arguments must use the 'O' format. + * - There's minimal error checking of format strings. They are generated + * programmatically and can be assumed valid. + */ + +// These macro definitions are copied from pyport.h in Python 3.9 and later +// https://bugs.python.org/issue19569 +#if defined(__clang__) +#define _Py_COMP_DIAG_PUSH _Pragma("clang diagnostic push") +#define _Py_COMP_DIAG_IGNORE_DEPR_DECLS \ + _Pragma("clang diagnostic ignored \"-Wdeprecated-declarations\"") +#define _Py_COMP_DIAG_POP _Pragma("clang diagnostic pop") +#elif defined(__GNUC__) \ + && ((__GNUC__ >= 5) || (__GNUC__ == 4) && (__GNUC_MINOR__ >= 6)) +#define _Py_COMP_DIAG_PUSH _Pragma("GCC diagnostic push") +#define _Py_COMP_DIAG_IGNORE_DEPR_DECLS \ + _Pragma("GCC diagnostic ignored \"-Wdeprecated-declarations\"") +#define _Py_COMP_DIAG_POP _Pragma("GCC diagnostic pop") +#elif defined(_MSC_VER) +#define _Py_COMP_DIAG_PUSH __pragma(warning(push)) +#define _Py_COMP_DIAG_IGNORE_DEPR_DECLS __pragma(warning(disable: 4996)) +#define _Py_COMP_DIAG_POP __pragma(warning(pop)) +#else +#define _Py_COMP_DIAG_PUSH +#define _Py_COMP_DIAG_IGNORE_DEPR_DECLS +#define _Py_COMP_DIAG_POP +#endif + +#include "Python.h" +#include "pythonsupport.h" + +#include +#include + +#ifndef PyDict_GET_SIZE +#define PyDict_GET_SIZE(d) PyDict_Size(d) +#endif + + +#ifdef __cplusplus +extern "C" { +#endif +int CPyArg_ParseTupleAndKeywords(PyObject *, PyObject *, + const char *, const char *, const char * const *, ...); + +/* Forward */ +static int vgetargskeywords(PyObject *, PyObject *, + const char *, const char *, const char * const *, va_list *); +static void skipitem(const char **, va_list *); + +/* Support for keyword arguments donated by + Geoff Philbrick */ + +/* Return false (0) for error, else true. */ +int +CPyArg_ParseTupleAndKeywords(PyObject *args, + PyObject *keywords, + const char *format, + const char *fname, + const char * const *kwlist, ...) +{ + int retval; + va_list va; + + va_start(va, kwlist); + retval = vgetargskeywords(args, keywords, format, fname, kwlist, &va); + va_end(va); + return retval; +} + +#define IS_END_OF_FORMAT(c) (c == '\0' || c == ';' || c == ':') + +static int +vgetargskeywords(PyObject *args, PyObject *kwargs, const char *format, + const char *fname, const char * const *kwlist, va_list *p_va) +{ + int min = INT_MAX; + int max = INT_MAX; + int required_kwonly_start = INT_MAX; + int has_required_kws = 0; + int i, pos, len; + int skip = 0; + Py_ssize_t nargs, nkwargs; + PyObject *current_arg; + int bound_pos_args; + + PyObject **p_args = NULL, **p_kwargs = NULL; + + assert(args != NULL && PyTuple_Check(args)); + assert(kwargs == NULL || PyDict_Check(kwargs)); + assert(format != NULL); + assert(kwlist != NULL); + assert(p_va != NULL); + + /* scan kwlist and count the number of positional-only parameters */ + for (pos = 0; kwlist[pos] && !*kwlist[pos]; pos++) { + } + /* scan kwlist and get greatest possible nbr of args */ + for (len = pos; kwlist[len]; len++) { +#ifdef DEBUG + if (!*kwlist[len]) { + PyErr_SetString(PyExc_SystemError, + "Empty keyword parameter name"); + return 0; + } +#endif + } + + if (*format == '%') { + p_args = va_arg(*p_va, PyObject **); + p_kwargs = va_arg(*p_va, PyObject **); + format++; + } + + nargs = PyTuple_GET_SIZE(args); + nkwargs = (kwargs == NULL) ? 0 : PyDict_GET_SIZE(kwargs); + if (unlikely(nargs + nkwargs > len && !p_args && !p_kwargs)) { + /* Adding "keyword" (when nargs == 0) prevents producing wrong error + messages in some special cases (see bpo-31229). */ + PyErr_Format(PyExc_TypeError, + "%.200s%s takes at most %d %sargument%s (%zd given)", + (fname == NULL) ? "function" : fname, + (fname == NULL) ? "" : "()", + len, + (nargs == 0) ? "keyword " : "", + (len == 1) ? "" : "s", + nargs + nkwargs); + return 0; + } + + /* convert tuple args and keyword args in same loop, using kwlist to drive process */ + for (i = 0; i < len; i++) { + if (*format == '|') { +#ifdef DEBUG + if (min != INT_MAX) { + PyErr_SetString(PyExc_SystemError, + "Invalid format string (| specified twice)"); + return 0; + } +#endif + + min = i; + format++; + +#ifdef DEBUG + if (max != INT_MAX) { + PyErr_SetString(PyExc_SystemError, + "Invalid format string ($ before |)"); + return 0; + } +#endif + + /* If there are optional args, figure out whether we have + * required keyword arguments so that we don't bail without + * enforcing them. */ + has_required_kws = strchr(format, '@') != NULL; + } + if (*format == '$') { +#ifdef DEBUG + if (max != INT_MAX) { + PyErr_SetString(PyExc_SystemError, + "Invalid format string ($ specified twice)"); + return 0; + } +#endif + + max = i; + format++; + +#ifdef DEBUG + if (max < pos) { + PyErr_SetString(PyExc_SystemError, + "Empty parameter name after $"); + return 0; + } +#endif + if (skip) { + /* Now we know the minimal and the maximal numbers of + * positional arguments and can raise an exception with + * informative message (see below). */ + break; + } + if (unlikely(max < nargs && !p_args)) { + if (max == 0) { + PyErr_Format(PyExc_TypeError, + "%.200s%s takes no positional arguments", + (fname == NULL) ? "function" : fname, + (fname == NULL) ? "" : "()"); + } + else { + PyErr_Format(PyExc_TypeError, + "%.200s%s takes %s %d positional argument%s" + " (%zd given)", + (fname == NULL) ? "function" : fname, + (fname == NULL) ? "" : "()", + (min < max) ? "at most" : "exactly", + max, + max == 1 ? "" : "s", + nargs); + } + return 0; + } + } + if (*format == '@') { +#ifdef DEBUG + if (min == INT_MAX && max == INT_MAX) { + PyErr_SetString(PyExc_SystemError, + "Invalid format string " + "(@ without preceding | and $)"); + return 0; + } + if (required_kwonly_start != INT_MAX) { + PyErr_SetString(PyExc_SystemError, + "Invalid format string (@ specified twice)"); + return 0; + } +#endif + + required_kwonly_start = i; + format++; + } +#ifdef DEBUG + if (IS_END_OF_FORMAT(*format)) { + PyErr_Format(PyExc_SystemError, + "More keyword list entries (%d) than " + "format specifiers (%d)", len, i); + return 0; + } +#endif + if (!skip) { + if (i < nargs && i < max) { + current_arg = Py_NewRef(PyTuple_GET_ITEM(args, i)); + } + else if (nkwargs && i >= pos) { + if (unlikely(PyDict_GetItemStringRef(kwargs, kwlist[i], ¤t_arg) < 0)) { + return 0; + } + if (current_arg) { + --nkwargs; + } + } + else { + current_arg = NULL; + } + + if (current_arg) { + PyObject **p = va_arg(*p_va, PyObject **); + *p = current_arg; + Py_DECREF(current_arg); + format++; + continue; + } + + if (i < min || i >= required_kwonly_start) { + if (likely(i < pos)) { + assert (min == INT_MAX); + assert (max == INT_MAX); + skip = 1; + /* At that moment we still don't know the minimal and + * the maximal numbers of positional arguments. Raising + * an exception is deferred until we encounter | and $ + * or the end of the format. */ + } + else { + if (i >= max) { + PyErr_Format(PyExc_TypeError, + "%.200s%s missing required " + "keyword-only argument '%s'", + (fname == NULL) ? "function" : fname, + (fname == NULL) ? "" : "()", + kwlist[i]); + } + else { + PyErr_Format(PyExc_TypeError, + "%.200s%s missing required " + "argument '%s' (pos %d)", + (fname == NULL) ? "function" : fname, + (fname == NULL) ? "" : "()", + kwlist[i], i+1); + } + return 0; + } + } + /* current code reports success when all required args + * fulfilled and no keyword args left, with no further + * validation. XXX Maybe skip this in debug build ? + */ + if (!nkwargs && !skip && !has_required_kws && + !p_args && !p_kwargs) + { + return 1; + } + } + + /* We are into optional args, skip through to any remaining + * keyword args */ + skipitem(&format, p_va); + } + + if (unlikely(skip)) { + PyErr_Format(PyExc_TypeError, + "%.200s%s takes %s %d positional argument%s" + " (%zd given)", + (fname == NULL) ? "function" : fname, + (fname == NULL) ? "" : "()", + (Py_MIN(pos, min) < i) ? "at least" : "exactly", + Py_MIN(pos, min), + Py_MIN(pos, min) == 1 ? "" : "s", + nargs); + return 0; + } + +#ifdef DEBUG + if (!IS_END_OF_FORMAT(*format) && + (*format != '|') && (*format != '$') && (*format != '@')) + { + PyErr_Format(PyExc_SystemError, + "more argument specifiers than keyword list entries " + "(remaining format:'%s')", format); + return 0; + } +#endif + + bound_pos_args = Py_MIN(nargs, Py_MIN(max, len)); + if (p_args) { + *p_args = PyTuple_GetSlice(args, bound_pos_args, nargs); + if (!*p_args) { + return 0; + } + } + + if (p_kwargs) { + /* This unfortunately needs to be special cased because if len is 0 then we + * never go through the main loop. */ + if (unlikely(nargs > 0 && len == 0 && !p_args)) { + PyErr_Format(PyExc_TypeError, + "%.200s%s takes no positional arguments", + (fname == NULL) ? "function" : fname, + (fname == NULL) ? "" : "()"); + + return 0; + } + + *p_kwargs = PyDict_New(); + if (!*p_kwargs) { + goto latefail; + } + } + + if (nkwargs > 0) { + PyObject *key, *value; + Py_ssize_t j; + /* make sure there are no arguments given by name and position */ + for (i = pos; i < bound_pos_args && i < len; i++) { + PyObject *current_arg; + if (unlikely(PyDict_GetItemStringRef(kwargs, kwlist[i], ¤t_arg) < 0)) { + goto latefail; + } + if (unlikely(current_arg != NULL)) { + Py_DECREF(current_arg); + /* arg present in tuple and in dict */ + PyErr_Format(PyExc_TypeError, + "argument for %.200s%s given by name ('%s') " + "and position (%d)", + (fname == NULL) ? "function" : fname, + (fname == NULL) ? "" : "()", + kwlist[i], i+1); + goto latefail; + } + } + /* make sure there are no extraneous keyword arguments */ + j = 0; + while (PyDict_Next(kwargs, &j, &key, &value)) { + int match = 0; + if (unlikely(!PyUnicode_Check(key))) { + PyErr_SetString(PyExc_TypeError, + "keywords must be strings"); + goto latefail; + } + for (i = pos; i < len; i++) { + if (PyUnicode_EqualToUTF8(key, kwlist[i])) { + match = 1; + break; + } + } + if (!match) { + if (unlikely(!p_kwargs)) { + PyErr_Format(PyExc_TypeError, + "'%U' is an invalid keyword " + "argument for %.200s%s", + key, + (fname == NULL) ? "this function" : fname, + (fname == NULL) ? "" : "()"); + goto latefail; + } else { + if (PyDict_SetItem(*p_kwargs, key, value) < 0) { + goto latefail; + } + } + } + } + } + + return 1; + /* Handle failures that have happened after we have tried to + * create *args and **kwargs, if they exist. */ +latefail: + if (p_args) { + Py_XDECREF(*p_args); + } + if (p_kwargs) { + Py_XDECREF(*p_kwargs); + } + return 0; +} + + +static void +skipitem(const char **p_format, va_list *p_va) +{ + const char *format = *p_format; + char c = *format++; + + if (p_va != NULL) { + (void) va_arg(*p_va, PyObject **); + } + + *p_format = format; +} + +#ifdef __cplusplus +}; +#endif diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/getargsfast.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/getargsfast.c new file mode 100644 index 0000000..e5667e2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/getargsfast.c @@ -0,0 +1,569 @@ +/* getargskeywordsfast implementation copied from Python 3.9 and stripped down to + * only include the functionality we need. + * + * We also add support for required kwonly args and accepting *args / **kwargs. + * + * DOCUMENTATION OF THE EXTENSIONS: + * - Arguments given after a @ format specify required keyword-only arguments. + * The | and $ specifiers must both appear before @. + * - If the first character of a format string is %, then the function can support + * *args and/or **kwargs. In this case the parser will consume two arguments, + * which should be pointers to variables to store the *args and **kwargs, respectively. + * Either pointer can be NULL, in which case the function doesn't take that + * variety of vararg. + * Unlike most format specifiers, the caller takes ownership of these objects + * and is responsible for decrefing them. + */ + +#include +#include "CPy.h" + +#define PARSER_INITED(parser) ((parser)->kwtuple != NULL) + +/* Forward */ +static int +vgetargskeywordsfast_impl(PyObject *const *args, Py_ssize_t nargs, + PyObject *kwargs, PyObject *kwnames, + CPyArg_Parser *parser, + va_list *p_va); +static void skipitem_fast(const char **, va_list *); + +/* Parse args for an arbitrary signature */ +int +CPyArg_ParseStackAndKeywords(PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames, + CPyArg_Parser *parser, ...) +{ + int retval; + va_list va; + + va_start(va, parser); + retval = vgetargskeywordsfast_impl(args, nargs, NULL, kwnames, parser, &va); + va_end(va); + return retval; +} + +/* Parse args for a function that takes no args */ +int +CPyArg_ParseStackAndKeywordsNoArgs(PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames, + CPyArg_Parser *parser, ...) +{ + int retval; + va_list va; + + va_start(va, parser); + if (nargs == 0 && kwnames == NULL) { + // Fast path: no arguments + retval = 1; + } else { + retval = vgetargskeywordsfast_impl(args, nargs, NULL, kwnames, parser, &va); + } + va_end(va); + return retval; +} + +/* Parse args for a function that takes one arg */ +int +CPyArg_ParseStackAndKeywordsOneArg(PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames, + CPyArg_Parser *parser, ...) +{ + int retval; + va_list va; + + va_start(va, parser); + if (kwnames == NULL && nargs == 1) { + // Fast path: one positional argument + PyObject **p; + p = va_arg(va, PyObject **); + *p = args[0]; + retval = 1; + } else { + retval = vgetargskeywordsfast_impl(args, nargs, NULL, kwnames, parser, &va); + } + va_end(va); + return retval; +} + +/* Parse args for a function that takes no keyword-only args, *args or **kwargs */ +int +CPyArg_ParseStackAndKeywordsSimple(PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames, + CPyArg_Parser *parser, ...) +{ + int retval; + va_list va; + + va_start(va, parser); + if (kwnames == NULL && PARSER_INITED(parser) && + nargs >= parser->min && nargs <= parser->max) { + // Fast path: correct number of positional arguments only + PyObject **p; + Py_ssize_t i; + for (i = 0; i < nargs; i++) { + p = va_arg(va, PyObject **); + *p = args[i]; + } + retval = 1; + } else { + retval = vgetargskeywordsfast_impl(args, nargs, NULL, kwnames, parser, &va); + } + va_end(va); + return retval; +} + +#define IS_END_OF_FORMAT(c) (c == '\0' || c == ';' || c == ':') + + +/* List of static parsers. */ +static struct CPyArg_Parser *static_arg_parsers = NULL; + +static int +parser_init(CPyArg_Parser *parser) +{ + const char * const *keywords; + const char *format, *msg; + int i, len, min, max, nkw; + PyObject *kwtuple; + + assert(parser->keywords != NULL); + if (PARSER_INITED(parser)) { + return 1; + } + + keywords = parser->keywords; + /* scan keywords and count the number of positional-only parameters */ + for (i = 0; keywords[i] && !*keywords[i]; i++) { + } + parser->pos = i; + /* scan keywords and get greatest possible nbr of args */ + for (; keywords[i]; i++) { + if (!*keywords[i]) { + PyErr_SetString(PyExc_SystemError, + "Empty keyword parameter name"); + return 0; + } + } + len = i; + + parser->required_kwonly_start = INT_MAX; + if (*parser->format == '%') { + parser->format++; + parser->varargs = 1; + } + + format = parser->format; + if (format) { + /* grab the function name or custom error msg first (mutually exclusive) */ + parser->fname = strchr(parser->format, ':'); + if (parser->fname) { + parser->fname++; + parser->custom_msg = NULL; + } + else { + parser->custom_msg = strchr(parser->format,';'); + if (parser->custom_msg) + parser->custom_msg++; + } + + min = max = INT_MAX; + for (i = 0; i < len; i++) { + if (*format == '|') { + if (min != INT_MAX) { + PyErr_SetString(PyExc_SystemError, + "Invalid format string (| specified twice)"); + return 0; + } + if (max != INT_MAX) { + PyErr_SetString(PyExc_SystemError, + "Invalid format string ($ before |)"); + return 0; + } + min = i; + format++; + } + if (*format == '$') { + if (max != INT_MAX) { + PyErr_SetString(PyExc_SystemError, + "Invalid format string ($ specified twice)"); + return 0; + } + if (i < parser->pos) { + PyErr_SetString(PyExc_SystemError, + "Empty parameter name after $"); + return 0; + } + max = i; + format++; + } + if (*format == '@') { + if (parser->required_kwonly_start != INT_MAX) { + PyErr_SetString(PyExc_SystemError, + "Invalid format string (@ specified twice)"); + return 0; + } + if (min == INT_MAX && max == INT_MAX) { + PyErr_SetString(PyExc_SystemError, + "Invalid format string " + "(@ without preceding | and $)"); + return 0; + } + format++; + parser->has_required_kws = 1; + parser->required_kwonly_start = i; + } + if (IS_END_OF_FORMAT(*format)) { + PyErr_Format(PyExc_SystemError, + "More keyword list entries (%d) than " + "format specifiers (%d)", len, i); + return 0; + } + + skipitem_fast(&format, NULL); + } + parser->min = Py_MIN(min, len); + parser->max = Py_MIN(max, len); + + if (!IS_END_OF_FORMAT(*format) && (*format != '|') && (*format != '$')) { + PyErr_Format(PyExc_SystemError, + "more argument specifiers than keyword list entries " + "(remaining format:'%s')", format); + return 0; + } + } + + nkw = len - parser->pos; + kwtuple = PyTuple_New(nkw); + if (kwtuple == NULL) { + return 0; + } + keywords = parser->keywords + parser->pos; + for (i = 0; i < nkw; i++) { + PyObject *str = PyUnicode_FromString(keywords[i]); + if (str == NULL) { + Py_DECREF(kwtuple); + return 0; + } + PyUnicode_InternInPlace(&str); + PyTuple_SET_ITEM(kwtuple, i, str); + } + parser->kwtuple = kwtuple; + + assert(parser->next == NULL); + parser->next = static_arg_parsers; + static_arg_parsers = parser; + return 1; +} + +static PyObject* +find_keyword(PyObject *kwnames, PyObject *const *kwstack, PyObject *key) +{ + Py_ssize_t i, nkwargs; + + nkwargs = PyTuple_GET_SIZE(kwnames); + for (i = 0; i < nkwargs; i++) { + PyObject *kwname = PyTuple_GET_ITEM(kwnames, i); + + /* kwname == key will normally find a match in since keyword keys + should be interned strings; if not retry below in a new loop. */ + if (kwname == key) { + return kwstack[i]; + } + } + + for (i = 0; i < nkwargs; i++) { + PyObject *kwname = PyTuple_GET_ITEM(kwnames, i); + assert(PyUnicode_Check(kwname)); + if (PyUnicode_Equal(kwname, key)) { + return kwstack[i]; + } + } + return NULL; +} + +static int +vgetargskeywordsfast_impl(PyObject *const *args, Py_ssize_t nargs, + PyObject *kwargs, PyObject *kwnames, + CPyArg_Parser *parser, + va_list *p_va) +{ + PyObject *kwtuple; + const char *format; + PyObject *keyword; + int i, pos, len; + Py_ssize_t nkwargs; + PyObject *current_arg; + PyObject *const *kwstack = NULL; + int bound_pos_args; + PyObject **p_args = NULL, **p_kwargs = NULL; + + assert(kwargs == NULL || PyDict_Check(kwargs)); + assert(kwargs == NULL || kwnames == NULL); + assert(p_va != NULL); + + if (!parser_init(parser)) { + return 0; + } + + kwtuple = parser->kwtuple; + pos = parser->pos; + len = pos + (int)PyTuple_GET_SIZE(kwtuple); + + if (parser->varargs) { + p_args = va_arg(*p_va, PyObject **); + p_kwargs = va_arg(*p_va, PyObject **); + } + + if (kwargs != NULL) { + nkwargs = PyDict_GET_SIZE(kwargs); + } + else if (kwnames != NULL) { + nkwargs = PyTuple_GET_SIZE(kwnames); + kwstack = args + nargs; + } + else { + nkwargs = 0; + } + if (nargs + nkwargs > len && !p_args && !p_kwargs) { + /* Adding "keyword" (when nargs == 0) prevents producing wrong error + messages in some special cases (see bpo-31229). */ + PyErr_Format(PyExc_TypeError, + "%.200s%s takes at most %d %sargument%s (%zd given)", + (parser->fname == NULL) ? "function" : parser->fname, + (parser->fname == NULL) ? "" : "()", + len, + (nargs == 0) ? "keyword " : "", + (len == 1) ? "" : "s", + nargs + nkwargs); + return 0; + } + if (parser->max < nargs && !p_args) { + if (parser->max == 0) { + PyErr_Format(PyExc_TypeError, + "%.200s%s takes no positional arguments", + (parser->fname == NULL) ? "function" : parser->fname, + (parser->fname == NULL) ? "" : "()"); + } + else { + PyErr_Format(PyExc_TypeError, + "%.200s%s takes %s %d positional argument%s (%zd given)", + (parser->fname == NULL) ? "function" : parser->fname, + (parser->fname == NULL) ? "" : "()", + (parser->min < parser->max) ? "at most" : "exactly", + parser->max, + parser->max == 1 ? "" : "s", + nargs); + } + return 0; + } + + format = parser->format; + + /* convert tuple args and keyword args in same loop, using kwtuple to drive process */ + for (i = 0; i < len; i++) { + if (*format == '|') { + format++; + } + if (*format == '$') { + format++; + } + if (*format == '@') { + format++; + } + assert(!IS_END_OF_FORMAT(*format)); + + if (i < nargs && i < parser->max) { + current_arg = args[i]; + } + else if (nkwargs && i >= pos) { + keyword = PyTuple_GET_ITEM(kwtuple, i - pos); + if (kwargs != NULL) { + current_arg = PyDict_GetItemWithError(kwargs, keyword); + if (!current_arg && PyErr_Occurred()) { + return 0; + } + } + else { + current_arg = find_keyword(kwnames, kwstack, keyword); + } + if (current_arg) { + --nkwargs; + } + } + else { + current_arg = NULL; + } + + if (current_arg) { + PyObject **p = va_arg(*p_va, PyObject **); + *p = current_arg; + format++; + continue; + } + + if (i < parser->min || i >= parser->required_kwonly_start) { + /* Less arguments than required */ + if (i < pos) { + Py_ssize_t min = Py_MIN(pos, parser->min); + PyErr_Format(PyExc_TypeError, + "%.200s%s takes %s %d positional argument%s" + " (%zd given)", + (parser->fname == NULL) ? "function" : parser->fname, + (parser->fname == NULL) ? "" : "()", + min < parser->max ? "at least" : "exactly", + min, + min == 1 ? "" : "s", + nargs); + } + else { + keyword = PyTuple_GET_ITEM(kwtuple, i - pos); + if (i >= parser->max) { + PyErr_Format(PyExc_TypeError, "%.200s%s missing required " + "keyword-only argument '%U'", + (parser->fname == NULL) ? "function" : parser->fname, + (parser->fname == NULL) ? "" : "()", + keyword); + } + else { + PyErr_Format(PyExc_TypeError, "%.200s%s missing required " + "argument '%U' (pos %d)", + (parser->fname == NULL) ? "function" : parser->fname, + (parser->fname == NULL) ? "" : "()", + keyword, i+1); + } + } + return 0; + } + /* current code reports success when all required args + * fulfilled and no keyword args left, with no further + * validation. XXX Maybe skip this in debug build ? + */ + if (!nkwargs && !parser->has_required_kws && !p_args && !p_kwargs) { + return 1; + } + + /* We are into optional args, skip through to any remaining + * keyword args */ + skipitem_fast(&format, p_va); + } + + assert(IS_END_OF_FORMAT(*format) || (*format == '|') || (*format == '$')); + + bound_pos_args = Py_MIN(nargs, Py_MIN(parser->max, len)); + if (p_args) { + *p_args = PyTuple_New(nargs - bound_pos_args); + if (!*p_args) { + return 0; + } + for (i = bound_pos_args; i < nargs; i++) { + PyObject *arg = args[i]; + Py_INCREF(arg); + PyTuple_SET_ITEM(*p_args, i - bound_pos_args, arg); + } + } + + if (p_kwargs) { + /* This unfortunately needs to be special cased because if len is 0 then we + * never go through the main loop. */ + if (nargs > 0 && len == 0 && !p_args) { + PyErr_Format(PyExc_TypeError, + "%.200s%s takes no positional arguments", + (parser->fname == NULL) ? "function" : parser->fname, + (parser->fname == NULL) ? "" : "()"); + + return 0; + } + + *p_kwargs = PyDict_New(); + if (!*p_kwargs) { + goto latefail; + } + } + + if (nkwargs > 0) { + Py_ssize_t j; + PyObject *value; + /* make sure there are no arguments given by name and position */ + for (i = pos; i < bound_pos_args; i++) { + keyword = PyTuple_GET_ITEM(kwtuple, i - pos); + if (kwargs != NULL) { + current_arg = PyDict_GetItemWithError(kwargs, keyword); + if (!current_arg && PyErr_Occurred()) { + goto latefail; + } + } + else { + current_arg = find_keyword(kwnames, kwstack, keyword); + } + if (current_arg) { + /* arg present in tuple and in dict */ + PyErr_Format(PyExc_TypeError, + "argument for %.200s%s given by name ('%U') " + "and position (%d)", + (parser->fname == NULL) ? "function" : parser->fname, + (parser->fname == NULL) ? "" : "()", + keyword, i+1); + goto latefail; + } + } + /* make sure there are no extraneous keyword arguments */ + j = 0; + while (1) { + int match; + if (kwargs != NULL) { + if (!PyDict_Next(kwargs, &j, &keyword, &value)) + break; + } + else { + if (j >= PyTuple_GET_SIZE(kwnames)) + break; + keyword = PyTuple_GET_ITEM(kwnames, j); + value = kwstack[j]; + j++; + } + + match = PySequence_Contains(kwtuple, keyword); + if (match <= 0) { + if (!match) { + if (!p_kwargs) { + PyErr_Format(PyExc_TypeError, + "'%S' is an invalid keyword " + "argument for %.200s%s", + keyword, + (parser->fname == NULL) ? "this function" : parser->fname, + (parser->fname == NULL) ? "" : "()"); + goto latefail; + } else { + if (PyDict_SetItem(*p_kwargs, keyword, value) < 0) { + goto latefail; + } + } + } else { + goto latefail; + } + } + } + } + + return 1; + /* Handle failures that have happened after we have tried to + * create *args and **kwargs, if they exist. */ +latefail: + if (p_args) { + Py_XDECREF(*p_args); + } + if (p_kwargs) { + Py_XDECREF(*p_kwargs); + } + return 0; +} + +static void +skipitem_fast(const char **p_format, va_list *p_va) +{ + const char *format = *p_format; + char c = *format++; + + if (p_va != NULL) { + (void) va_arg(*p_va, PyObject **); + } + + *p_format = format; +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/init.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/init.c new file mode 100644 index 0000000..9215c2d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/init.c @@ -0,0 +1,24 @@ +#include +#include "CPy.h" + +struct ExcDummyStruct _CPy_ExcDummyStruct = { PyObject_HEAD_INIT(NULL) }; +PyObject *_CPy_ExcDummy = (PyObject *)&_CPy_ExcDummyStruct; + +// System-wide empty tuple constant +PyObject * __mypyc_empty_tuple__ = NULL; + +// Because its dynamic linker is more restricted than linux/OS X, +// Windows doesn't allow initializing globals with values from +// other dynamic libraries. This means we need to initialize +// things at load time. +void CPy_Init(void) { + _CPy_ExcDummyStruct.ob_base.ob_type = &PyBaseObject_Type; + + // Initialize system-wide empty tuple constant + if (__mypyc_empty_tuple__ == NULL) { + __mypyc_empty_tuple__ = PyTuple_New(0); + if (!__mypyc_empty_tuple__) { + CPyError_OutOfMemory(); + } + } +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/int_ops.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/int_ops.c new file mode 100644 index 0000000..333783a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/int_ops.c @@ -0,0 +1,647 @@ +// Int primitive operations (tagged arbitrary-precision integers) +// +// These are registered in mypyc.primitives.int_ops. + +#include +#include "CPy.h" + +#ifdef _MSC_VER +#include +#endif + +#ifndef _WIN32 +// On 64-bit Linux and macOS, ssize_t and long are both 64 bits, and +// PyLong_FromLong is faster than PyLong_FromSsize_t, so use the faster one +#define CPyLong_FromSsize_t PyLong_FromLong +#else +// On 64-bit Windows, ssize_t is 64 bits but long is 32 bits, so we +// can't use the above trick +#define CPyLong_FromSsize_t PyLong_FromSsize_t +#endif + +#if defined(__GNUC__) || defined(__clang__) +# if defined(__x86_64__) || defined(_M_X64) || defined(__aarch64__) || (defined(__SIZEOF_POINTER__) && __SIZEOF_POINTER__ == 8) +# define CPY_CLZ(x) __builtin_clzll((unsigned long long)(x)) +# define CPY_BITS 64 +# else +# define CPY_CLZ(x) __builtin_clz((unsigned int)(x)) +# define CPY_BITS 32 +# endif +#endif + + +CPyTagged CPyTagged_FromSsize_t(Py_ssize_t value) { + // We use a Python object if the value shifted left by 1 is too + // large for Py_ssize_t + if (unlikely(CPyTagged_TooBig(value))) { + PyObject *object = PyLong_FromSsize_t(value); + return ((CPyTagged)object) | CPY_INT_TAG; + } else { + return value << 1; + } +} + +CPyTagged CPyTagged_FromVoidPtr(void *ptr) { + if ((uintptr_t)ptr > PY_SSIZE_T_MAX) { + PyObject *object = PyLong_FromVoidPtr(ptr); + return ((CPyTagged)object) | CPY_INT_TAG; + } else { + return CPyTagged_FromSsize_t((Py_ssize_t)ptr); + } +} + +CPyTagged CPyTagged_FromInt64(int64_t value) { + if (unlikely(CPyTagged_TooBigInt64(value))) { + PyObject *object = PyLong_FromLongLong(value); + return ((CPyTagged)object) | CPY_INT_TAG; + } else { + return value << 1; + } +} + +PyObject *CPyTagged_AsObject(CPyTagged x) { + PyObject *value; + if (unlikely(CPyTagged_CheckLong(x))) { + value = CPyTagged_LongAsObject(x); + Py_INCREF(value); + } else { + value = CPyLong_FromSsize_t(CPyTagged_ShortAsSsize_t(x)); + if (value == NULL) { + CPyError_OutOfMemory(); + } + } + return value; +} + +PyObject *CPyTagged_StealAsObject(CPyTagged x) { + PyObject *value; + if (unlikely(CPyTagged_CheckLong(x))) { + value = CPyTagged_LongAsObject(x); + } else { + value = CPyLong_FromSsize_t(CPyTagged_ShortAsSsize_t(x)); + if (value == NULL) { + CPyError_OutOfMemory(); + } + } + return value; +} + +Py_ssize_t CPyTagged_AsSsize_t(CPyTagged x) { + if (likely(CPyTagged_CheckShort(x))) { + return CPyTagged_ShortAsSsize_t(x); + } else { + return PyLong_AsSsize_t(CPyTagged_LongAsObject(x)); + } +} + +CPy_NOINLINE +void CPyTagged_IncRef(CPyTagged x) { + if (unlikely(CPyTagged_CheckLong(x))) { + Py_INCREF(CPyTagged_LongAsObject(x)); + } +} + +CPy_NOINLINE +void CPyTagged_DecRef(CPyTagged x) { + if (unlikely(CPyTagged_CheckLong(x))) { + Py_DECREF(CPyTagged_LongAsObject(x)); + } +} + +CPy_NOINLINE +void CPyTagged_XDecRef(CPyTagged x) { + if (unlikely(CPyTagged_CheckLong(x))) { + Py_XDECREF(CPyTagged_LongAsObject(x)); + } +} + +// Tagged int negation slow path, where the result may be a long integer +CPyTagged CPyTagged_Negate_(CPyTagged num) { + PyObject *num_obj = CPyTagged_AsObject(num); + PyObject *result = PyNumber_Negative(num_obj); + if (result == NULL) { + CPyError_OutOfMemory(); + } + Py_DECREF(num_obj); + return CPyTagged_StealFromObject(result); +} + +// Tagged int addition slow path, where the result may be a long integer +CPyTagged CPyTagged_Add_(CPyTagged left, CPyTagged right) { + PyObject *left_obj = CPyTagged_AsObject(left); + PyObject *right_obj = CPyTagged_AsObject(right); + PyObject *result = PyNumber_Add(left_obj, right_obj); + if (result == NULL) { + CPyError_OutOfMemory(); + } + Py_DECREF(left_obj); + Py_DECREF(right_obj); + return CPyTagged_StealFromObject(result); +} + +// Tagged int subtraction slow path, where the result may be a long integer +CPyTagged CPyTagged_Subtract_(CPyTagged left, CPyTagged right) { + PyObject *left_obj = CPyTagged_AsObject(left); + PyObject *right_obj = CPyTagged_AsObject(right); + PyObject *result = PyNumber_Subtract(left_obj, right_obj); + if (result == NULL) { + CPyError_OutOfMemory(); + } + Py_DECREF(left_obj); + Py_DECREF(right_obj); + return CPyTagged_StealFromObject(result); +} + +// Tagged int multiplication slow path, where the result may be a long integer +CPyTagged CPyTagged_Multiply_(CPyTagged left, CPyTagged right) { + PyObject *left_obj = CPyTagged_AsObject(left); + PyObject *right_obj = CPyTagged_AsObject(right); + PyObject *result = PyNumber_Multiply(left_obj, right_obj); + if (result == NULL) { + CPyError_OutOfMemory(); + } + Py_DECREF(left_obj); + Py_DECREF(right_obj); + return CPyTagged_StealFromObject(result); +} + +// Tagged int // slow path, where the result may be a long integer (or raise) +CPyTagged CPyTagged_FloorDivide_(CPyTagged left, CPyTagged right) { + PyObject *left_obj = CPyTagged_AsObject(left); + PyObject *right_obj = CPyTagged_AsObject(right); + PyObject *result = PyNumber_FloorDivide(left_obj, right_obj); + Py_DECREF(left_obj); + Py_DECREF(right_obj); + // Handle exceptions honestly because it could be ZeroDivisionError + if (result == NULL) { + return CPY_INT_TAG; + } else { + return CPyTagged_StealFromObject(result); + } +} + +// Tagged int % slow path, where the result may be a long integer (or raise) +CPyTagged CPyTagged_Remainder_(CPyTagged left, CPyTagged right) { + PyObject *left_obj = CPyTagged_AsObject(left); + PyObject *right_obj = CPyTagged_AsObject(right); + PyObject *result = PyNumber_Remainder(left_obj, right_obj); + Py_DECREF(left_obj); + Py_DECREF(right_obj); + // Handle exceptions honestly because it could be ZeroDivisionError + if (result == NULL) { + return CPY_INT_TAG; + } else { + return CPyTagged_StealFromObject(result); + } +} + +bool CPyTagged_IsEq_(CPyTagged left, CPyTagged right) { + if (CPyTagged_CheckShort(right)) { + return false; + } else { + PyObject *left_obj = CPyTagged_AsObject(left); + PyObject *right_obj = CPyTagged_AsObject(right); + int result = PyObject_RichCompareBool(left_obj, right_obj, Py_EQ); + Py_DECREF(left_obj); + Py_DECREF(right_obj); + if (result == -1) { + CPyError_OutOfMemory(); + } + return result; + } +} + +bool CPyTagged_IsLt_(CPyTagged left, CPyTagged right) { + PyObject *left_obj = CPyTagged_AsObject(left); + PyObject *right_obj = CPyTagged_AsObject(right); + int result = PyObject_RichCompareBool(left_obj, right_obj, Py_LT); + Py_DECREF(left_obj); + Py_DECREF(right_obj); + if (result == -1) { + CPyError_OutOfMemory(); + } + return result; +} + +PyObject *CPyLong_FromStrWithBase(PyObject *o, CPyTagged base) { + Py_ssize_t base_size_t = CPyTagged_AsSsize_t(base); + return PyLong_FromUnicodeObject(o, base_size_t); +} + +PyObject *CPyLong_FromStr(PyObject *o) { + CPyTagged base = CPyTagged_FromSsize_t(10); + return CPyLong_FromStrWithBase(o, base); +} + +CPyTagged CPyTagged_FromFloat(double f) { + if (f < ((double)CPY_TAGGED_MAX + 1.0) && f > (CPY_TAGGED_MIN - 1.0)) { + return (Py_ssize_t)f << 1; + } + PyObject *o = PyLong_FromDouble(f); + if (o == NULL) + return CPY_INT_TAG; + return CPyTagged_StealFromObject(o); +} + +PyObject *CPyBool_Str(bool b) { + return PyObject_Str(b ? Py_True : Py_False); +} + +// Bitwise op '&', '|' or '^' using the generic (slow) API +static CPyTagged GenericBitwiseOp(CPyTagged a, CPyTagged b, char op) { + PyObject *aobj = CPyTagged_AsObject(a); + PyObject *bobj = CPyTagged_AsObject(b); + PyObject *r; + if (op == '&') { + r = PyNumber_And(aobj, bobj); + } else if (op == '|') { + r = PyNumber_Or(aobj, bobj); + } else { + r = PyNumber_Xor(aobj, bobj); + } + if (unlikely(r == NULL)) { + CPyError_OutOfMemory(); + } + Py_DECREF(aobj); + Py_DECREF(bobj); + return CPyTagged_StealFromObject(r); +} + +// Return pointer to digits of a PyLong object. If it's a short +// integer, place digits in the buffer buf instead to avoid memory +// allocation (it's assumed to be big enough). Return the number of +// digits in *size. *size is negative if the integer is negative. +static digit *GetIntDigits(CPyTagged n, Py_ssize_t *size, digit *buf) { + if (CPyTagged_CheckShort(n)) { + Py_ssize_t val = CPyTagged_ShortAsSsize_t(n); + bool neg = val < 0; + int len = 1; + if (neg) { + val = -val; + } + buf[0] = val & PyLong_MASK; + if (val > (Py_ssize_t)PyLong_MASK) { + val >>= PyLong_SHIFT; + buf[1] = val & PyLong_MASK; + if (val > (Py_ssize_t)PyLong_MASK) { + buf[2] = val >> PyLong_SHIFT; + len = 3; + } else { + len = 2; + } + } + *size = neg ? -len : len; + return buf; + } else { + PyLongObject *obj = (PyLongObject *)CPyTagged_LongAsObject(n); + *size = CPY_LONG_SIZE_SIGNED(obj); + return &CPY_LONG_DIGIT(obj, 0); + } +} + +// Shared implementation of bitwise '&', '|' and '^' (specified by op) for at least +// one long operand. This is somewhat optimized for performance. +CPyTagged CPyTagged_BitwiseLongOp_(CPyTagged a, CPyTagged b, char op) { + // Directly access the digits, as there is no fast C API function for this. + digit abuf[3]; + digit bbuf[3]; + Py_ssize_t asize; + Py_ssize_t bsize; + digit *adigits = GetIntDigits(a, &asize, abuf); + digit *bdigits = GetIntDigits(b, &bsize, bbuf); + + if (unlikely(asize < 0 || bsize < 0)) { + // Negative operand. This is slower, but bitwise ops on them are pretty rare. + return GenericBitwiseOp(a, b, op); + } + // Optimized implementation for two non-negative integers. + // Swap a and b as needed to ensure a is no longer than b. + if (asize > bsize) { + digit *tmp = adigits; + adigits = bdigits; + bdigits = tmp; + Py_ssize_t tmp_size = asize; + asize = bsize; + bsize = tmp_size; + } + void *digits = NULL; + PyLongWriter *writer = PyLongWriter_Create(0, op == '&' ? asize : bsize, &digits); + if (unlikely(writer == NULL)) { + CPyError_OutOfMemory(); + } + Py_ssize_t i; + if (op == '&') { + for (i = 0; i < asize; i++) { + ((digit *)digits)[i] = adigits[i] & bdigits[i]; + } + } else { + if (op == '|') { + for (i = 0; i < asize; i++) { + ((digit *)digits)[i] = adigits[i] | bdigits[i]; + } + } else { + for (i = 0; i < asize; i++) { + ((digit *)digits)[i] = adigits[i] ^ bdigits[i]; + } + } + for (; i < bsize; i++) { + ((digit *)digits)[i] = bdigits[i]; + } + } + return CPyTagged_StealFromObject(PyLongWriter_Finish(writer)); +} + +// Bitwise '~' slow path +CPyTagged CPyTagged_Invert_(CPyTagged num) { + PyObject *obj = CPyTagged_AsObject(num); + PyObject *result = PyNumber_Invert(obj); + if (unlikely(result == NULL)) { + CPyError_OutOfMemory(); + } + Py_DECREF(obj); + return CPyTagged_StealFromObject(result); +} + +// Bitwise '>>' slow path +CPyTagged CPyTagged_Rshift_(CPyTagged left, CPyTagged right) { + // Long integer or negative shift -- use generic op + PyObject *lobj = CPyTagged_AsObject(left); + PyObject *robj = CPyTagged_AsObject(right); + PyObject *result = PyNumber_Rshift(lobj, robj); + Py_DECREF(lobj); + Py_DECREF(robj); + if (result == NULL) { + // Propagate error (could be negative shift count) + return CPY_INT_TAG; + } + return CPyTagged_StealFromObject(result); +} + +// Bitwise '<<' slow path +CPyTagged CPyTagged_Lshift_(CPyTagged left, CPyTagged right) { + // Long integer or out of range shift -- use generic op + PyObject *lobj = CPyTagged_AsObject(left); + PyObject *robj = CPyTagged_AsObject(right); + PyObject *result = PyNumber_Lshift(lobj, robj); + Py_DECREF(lobj); + Py_DECREF(robj); + if (result == NULL) { + // Propagate error (could be negative shift count) + return CPY_INT_TAG; + } + return CPyTagged_StealFromObject(result); +} + +// i64 unboxing slow path +int64_t CPyLong_AsInt64_(PyObject *o) { + int overflow; + int64_t result = PyLong_AsLongLongAndOverflow(o, &overflow); + if (result == -1) { + if (PyErr_Occurred()) { + return CPY_LL_INT_ERROR; + } else if (overflow) { + PyErr_SetString(PyExc_OverflowError, "int too large to convert to i64"); + return CPY_LL_INT_ERROR; + } + } + return result; +} + +int64_t CPyInt64_Divide(int64_t x, int64_t y) { + if (y == 0) { + PyErr_SetString(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + return CPY_LL_INT_ERROR; + } + if (y == -1 && x == INT64_MIN) { + PyErr_SetString(PyExc_OverflowError, "integer division overflow"); + return CPY_LL_INT_ERROR; + } + int64_t d = x / y; + // Adjust for Python semantics + if (((x < 0) != (y < 0)) && d * y != x) { + d--; + } + return d; +} + +int64_t CPyInt64_Remainder(int64_t x, int64_t y) { + if (y == 0) { + PyErr_SetString(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + return CPY_LL_INT_ERROR; + } + // Edge case: avoid core dump + if (y == -1 && x == INT64_MIN) { + return 0; + } + int64_t d = x % y; + // Adjust for Python semantics + if (((x < 0) != (y < 0)) && d != 0) { + d += y; + } + return d; +} + +// i32 unboxing slow path +int32_t CPyLong_AsInt32_(PyObject *o) { + int overflow; + long result = PyLong_AsLongAndOverflow(o, &overflow); + if (result > 0x7fffffffLL || result < -0x80000000LL) { + overflow = 1; + result = -1; + } + if (result == -1) { + if (PyErr_Occurred()) { + return CPY_LL_INT_ERROR; + } else if (overflow) { + PyErr_SetString(PyExc_OverflowError, "int too large to convert to i32"); + return CPY_LL_INT_ERROR; + } + } + return result; +} + +int32_t CPyInt32_Divide(int32_t x, int32_t y) { + if (y == 0) { + PyErr_SetString(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + return CPY_LL_INT_ERROR; + } + if (y == -1 && x == INT32_MIN) { + PyErr_SetString(PyExc_OverflowError, "integer division overflow"); + return CPY_LL_INT_ERROR; + } + int32_t d = x / y; + // Adjust for Python semantics + if (((x < 0) != (y < 0)) && d * y != x) { + d--; + } + return d; +} + +int32_t CPyInt32_Remainder(int32_t x, int32_t y) { + if (y == 0) { + PyErr_SetString(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + return CPY_LL_INT_ERROR; + } + // Edge case: avoid core dump + if (y == -1 && x == INT32_MIN) { + return 0; + } + int32_t d = x % y; + // Adjust for Python semantics + if (((x < 0) != (y < 0)) && d != 0) { + d += y; + } + return d; +} + +void CPyInt32_Overflow() { + PyErr_SetString(PyExc_OverflowError, "int too large to convert to i32"); +} + +// i16 unboxing slow path +int16_t CPyLong_AsInt16_(PyObject *o) { + int overflow; + long result = PyLong_AsLongAndOverflow(o, &overflow); + if (result > 0x7fff || result < -0x8000) { + overflow = 1; + result = -1; + } + if (result == -1) { + if (PyErr_Occurred()) { + return CPY_LL_INT_ERROR; + } else if (overflow) { + PyErr_SetString(PyExc_OverflowError, "int too large to convert to i16"); + return CPY_LL_INT_ERROR; + } + } + return result; +} + +int16_t CPyInt16_Divide(int16_t x, int16_t y) { + if (y == 0) { + PyErr_SetString(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + return CPY_LL_INT_ERROR; + } + if (y == -1 && x == INT16_MIN) { + PyErr_SetString(PyExc_OverflowError, "integer division overflow"); + return CPY_LL_INT_ERROR; + } + int16_t d = x / y; + // Adjust for Python semantics + if (((x < 0) != (y < 0)) && d * y != x) { + d--; + } + return d; +} + +int16_t CPyInt16_Remainder(int16_t x, int16_t y) { + if (y == 0) { + PyErr_SetString(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + return CPY_LL_INT_ERROR; + } + // Edge case: avoid core dump + if (y == -1 && x == INT16_MIN) { + return 0; + } + int16_t d = x % y; + // Adjust for Python semantics + if (((x < 0) != (y < 0)) && d != 0) { + d += y; + } + return d; +} + +void CPyInt16_Overflow() { + PyErr_SetString(PyExc_OverflowError, "int too large to convert to i16"); +} + +// u8 unboxing slow path +uint8_t CPyLong_AsUInt8_(PyObject *o) { + int overflow; + long result = PyLong_AsLongAndOverflow(o, &overflow); + if (result < 0 || result >= 256) { + overflow = 1; + result = -1; + } + if (result == -1) { + if (PyErr_Occurred()) { + return CPY_LL_UINT_ERROR; + } else if (overflow) { + PyErr_SetString(PyExc_OverflowError, "int too large or small to convert to u8"); + return CPY_LL_UINT_ERROR; + } + } + return result; +} + +void CPyUInt8_Overflow() { + PyErr_SetString(PyExc_OverflowError, "int too large or small to convert to u8"); +} + +double CPyTagged_TrueDivide(CPyTagged x, CPyTagged y) { + if (unlikely(y == 0)) { + PyErr_SetString(PyExc_ZeroDivisionError, "division by zero"); + return CPY_FLOAT_ERROR; + } + if (likely(!CPyTagged_CheckLong(x) && !CPyTagged_CheckLong(y))) { + return (double)((Py_ssize_t)x >> 1) / (double)((Py_ssize_t)y >> 1); + } else { + PyObject *xo = CPyTagged_AsObject(x); + PyObject *yo = CPyTagged_AsObject(y); + PyObject *result = PyNumber_TrueDivide(xo, yo); + if (result == NULL) { + return CPY_FLOAT_ERROR; + } + return PyFloat_AsDouble(result); + } + return 1.0; +} + +// int.bit_length() +CPyTagged CPyTagged_BitLength(CPyTagged self) { + // Handle zero + if (self == 0) { + return 0; + } + + // Fast path for small (tagged) ints + if (CPyTagged_CheckShort(self)) { + Py_ssize_t val = CPyTagged_ShortAsSsize_t(self); + Py_ssize_t absval = val < 0 ? -val : val; + int bits = 0; + if (absval) { +#if defined(_MSC_VER) + #if defined(_WIN64) + unsigned long idx; + if (_BitScanReverse64(&idx, (unsigned __int64)absval)) { + bits = (int)(idx + 1); + } + #else + unsigned long idx; + if (_BitScanReverse(&idx, (unsigned long)absval)) { + bits = (int)(idx + 1); + } + #endif +#elif defined(__GNUC__) || defined(__clang__) + bits = (int)(CPY_BITS - CPY_CLZ(absval)); +#else + // Fallback to loop if no builtin + while (absval) { + absval >>= 1; + bits++; + } +#endif + } + return bits << 1; + } + + // Slow path for big ints + PyObject *pyint = CPyTagged_AsObject(self); + int bits = _PyLong_NumBits(pyint); + Py_DECREF(pyint); + if (bits < 0) { + // _PyLong_NumBits sets an error on failure + return CPY_INT_TAG; + } + return bits << 1; +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/librt_base64.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/librt_base64.c new file mode 100644 index 0000000..1720359 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/librt_base64.c @@ -0,0 +1,311 @@ +#define PY_SSIZE_T_CLEAN +#include +#include +#include "librt_base64.h" +#include "libbase64.h" +#include "pythoncapi_compat.h" + +#ifdef MYPYC_EXPERIMENTAL + +static PyObject * +b64decode_handle_invalid_input( + PyObject *out_bytes, char *outbuf, size_t max_out, const char *src, size_t srclen); + +#define BASE64_MAXBIN ((PY_SSIZE_T_MAX - 3) / 2) + +#define STACK_BUFFER_SIZE 1024 + +static PyObject * +b64encode_internal(PyObject *obj) { + unsigned char *ascii_data; + char *bin_data; + int leftbits = 0; + unsigned char this_ch; + unsigned int leftchar = 0; + Py_ssize_t bin_len, out_len; + PyBytesWriter *writer; + int newline = 0; // TODO + + if (!PyBytes_Check(obj)) { + PyErr_SetString(PyExc_TypeError, "base64() expects a bytes object"); + return NULL; + } + + bin_data = PyBytes_AS_STRING(obj); + bin_len = PyBytes_GET_SIZE(obj); + assert(bin_len >= 0); + + if (bin_len > BASE64_MAXBIN) { + PyErr_SetString(PyExc_ValueError, "Too much data for base64 line"); + return NULL; + } + + Py_ssize_t buflen = 4 * bin_len / 3 + 4; + char *buf; + char stack_buf[STACK_BUFFER_SIZE]; + if (buflen <= STACK_BUFFER_SIZE) { + buf = stack_buf; + } else { + buf = PyMem_Malloc(buflen); + if (buf == NULL) { + return PyErr_NoMemory(); + } + } + size_t actual_len; + base64_encode(bin_data, bin_len, buf, &actual_len, 0); + PyObject *res = PyBytes_FromStringAndSize(buf, actual_len); + if (buflen > STACK_BUFFER_SIZE) + PyMem_Free(buf); + return res; +} + +static PyObject* +b64encode(PyObject *self, PyObject *const *args, size_t nargs) { + if (nargs != 1) { + PyErr_SetString(PyExc_TypeError, "b64encode() takes exactly one argument"); + return 0; + } + return b64encode_internal(args[0]); +} + +static inline int +is_valid_base64_char(char c, bool allow_padding) { + return ((c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || + (c >= '0' && c <= '9') || (c == '+') || (c == '/') || (allow_padding && c == '=')); +} + +static PyObject * +b64decode_internal(PyObject *arg) { + const char *src; + Py_ssize_t srclen_ssz; + + // Get input pointer and length + if (PyBytes_Check(arg)) { + src = PyBytes_AS_STRING(arg); + srclen_ssz = PyBytes_GET_SIZE(arg); + } else if (PyUnicode_Check(arg)) { + if (!PyUnicode_IS_ASCII(arg)) { + PyErr_SetString(PyExc_ValueError, + "string argument should contain only ASCII characters"); + return NULL; + } + src = (const char *)PyUnicode_1BYTE_DATA(arg); + srclen_ssz = PyUnicode_GET_LENGTH(arg); + } else { + PyErr_SetString(PyExc_TypeError, + "argument should be a bytes-like object or ASCII string"); + return NULL; + } + + // Fast-path: empty input + if (srclen_ssz == 0) { + return PyBytes_FromStringAndSize(NULL, 0); + } + + // Quickly ignore invalid characters at the end. Other invalid characters + // are also accepted, but they need a slow path. + while (srclen_ssz > 0 && !is_valid_base64_char(src[srclen_ssz - 1], true)) { + srclen_ssz--; + } + + // Compute an output capacity that's at least 3/4 of input, without overflow: + // ceil(3/4 * N) == N - floor(N/4) + size_t srclen = (size_t)srclen_ssz; + size_t max_out = srclen - (srclen / 4); + if (max_out == 0) { + max_out = 1; // defensive (srclen > 0 implies >= 1 anyway) + } + if (max_out > (size_t)PY_SSIZE_T_MAX) { + PyErr_SetString(PyExc_OverflowError, "input too large"); + return NULL; + } + + // Allocate output bytes (uninitialized) of the max capacity + PyObject *out_bytes = PyBytes_FromStringAndSize(NULL, (Py_ssize_t)max_out); + if (out_bytes == NULL) { + return NULL; // Propagate memory error + } + + char *outbuf = PyBytes_AS_STRING(out_bytes); + size_t outlen = max_out; + + int ret = base64_decode(src, srclen, outbuf, &outlen, 0); + + if (ret != 1) { + if (ret == 0) { + // Slow path: handle non-base64 input + return b64decode_handle_invalid_input(out_bytes, outbuf, max_out, src, srclen); + } + Py_DECREF(out_bytes); + if (ret == -1) { + PyErr_SetString(PyExc_NotImplementedError, "base64 codec not available in this build"); + } else { + PyErr_SetString(PyExc_RuntimeError, "base64_decode failed"); + } + return NULL; + } + + // Sanity-check contract (decoder must not overflow our buffer) + if (outlen > max_out) { + Py_DECREF(out_bytes); + PyErr_SetString(PyExc_RuntimeError, "decoder wrote past output buffer"); + return NULL; + } + + // Shrink in place to the actual decoded length + if (_PyBytes_Resize(&out_bytes, (Py_ssize_t)outlen) < 0) { + // _PyBytes_Resize sets an exception and may free the old object + return NULL; + } + return out_bytes; +} + +// Process non-base64 input by ignoring non-base64 characters, for compatibility +// with stdlib b64decode. +static PyObject * +b64decode_handle_invalid_input( + PyObject *out_bytes, char *outbuf, size_t max_out, const char *src, size_t srclen) +{ + // Copy input to a temporary buffer, with non-base64 characters and extra suffix + // characters removed + size_t newbuf_len = 0; + char *newbuf = PyMem_Malloc(srclen); + if (newbuf == NULL) { + Py_DECREF(out_bytes); + return PyErr_NoMemory(); + } + + // Copy base64 characters and some padding to the new buffer + for (size_t i = 0; i < srclen; i++) { + char c = src[i]; + if (is_valid_base64_char(c, false)) { + newbuf[newbuf_len++] = c; + } else if (c == '=') { + // Copy a necessary amount of padding + int remainder = newbuf_len % 4; + if (remainder == 0) { + // No padding needed + break; + } + int numpad = 4 - remainder; + // Check that there is at least the required amount padding (CPython ignores + // extra padding) + while (numpad > 0) { + if (i == srclen || src[i] != '=') { + break; + } + newbuf[newbuf_len++] = '='; + i++; + numpad--; + // Skip non-base64 alphabet characters within padding + while (i < srclen && !is_valid_base64_char(src[i], true)) { + i++; + } + } + break; + } + } + + // Stdlib always performs a non-strict padding check + if (newbuf_len % 4 != 0) { + Py_DECREF(out_bytes); + PyMem_Free(newbuf); + PyErr_SetString(PyExc_ValueError, "Incorrect padding"); + return NULL; + } + + size_t outlen = max_out; + int ret = base64_decode(newbuf, newbuf_len, outbuf, &outlen, 0); + PyMem_Free(newbuf); + + if (ret != 1) { + Py_DECREF(out_bytes); + if (ret == 0) { + PyErr_SetString(PyExc_ValueError, "Only base64 data is allowed"); + } + if (ret == -1) { + PyErr_SetString(PyExc_NotImplementedError, "base64 codec not available in this build"); + } else { + PyErr_SetString(PyExc_RuntimeError, "base64_decode failed"); + } + return NULL; + } + + // Shrink in place to the actual decoded length + if (_PyBytes_Resize(&out_bytes, (Py_ssize_t)outlen) < 0) { + // _PyBytes_Resize sets an exception and may free the old object + return NULL; + } + return out_bytes; +} + + +static PyObject* +b64decode(PyObject *self, PyObject *const *args, size_t nargs) { + if (nargs != 1) { + PyErr_SetString(PyExc_TypeError, "b64decode() takes exactly one argument"); + return 0; + } + return b64decode_internal(args[0]); +} + +#endif + +static PyMethodDef librt_base64_module_methods[] = { +#ifdef MYPYC_EXPERIMENTAL + {"b64encode", (PyCFunction)b64encode, METH_FASTCALL, PyDoc_STR("Encode bytes object using Base64.")}, + {"b64decode", (PyCFunction)b64decode, METH_FASTCALL, PyDoc_STR("Decode a Base64 encoded bytes object or ASCII string.")}, +#endif + {NULL, NULL, 0, NULL} +}; + +static int +base64_abi_version(void) { + return 0; +} + +static int +base64_api_version(void) { + return 0; +} + +static int +librt_base64_module_exec(PyObject *m) +{ +#ifdef MYPYC_EXPERIMENTAL + // Export mypy internal C API, be careful with the order! + static void *base64_api[LIBRT_BASE64_API_LEN] = { + (void *)base64_abi_version, + (void *)base64_api_version, + (void *)b64encode_internal, + }; + PyObject *c_api_object = PyCapsule_New((void *)base64_api, "librt.base64._C_API", NULL); + if (PyModule_Add(m, "_C_API", c_api_object) < 0) { + return -1; + } +#endif + return 0; +} + +static PyModuleDef_Slot librt_base64_module_slots[] = { + {Py_mod_exec, librt_base64_module_exec}, +#ifdef Py_MOD_GIL_NOT_USED + {Py_mod_gil, Py_MOD_GIL_NOT_USED}, +#endif + {0, NULL} +}; + +static PyModuleDef librt_base64_module = { + .m_base = PyModuleDef_HEAD_INIT, + .m_name = "base64", + .m_doc = "Fast base64 encoding and decoding optimized for mypyc", + .m_size = 0, + .m_methods = librt_base64_module_methods, + .m_slots = librt_base64_module_slots, +}; + +PyMODINIT_FUNC +PyInit_base64(void) +{ + return PyModuleDef_Init(&librt_base64_module); +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/librt_base64.h b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/librt_base64.h new file mode 100644 index 0000000..cc97e54 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/librt_base64.h @@ -0,0 +1,60 @@ +#ifndef LIBRT_BASE64_H +#define LIBRT_BASE64_H + +#ifndef MYPYC_EXPERIMENTAL + +static int +import_librt_base64(void) +{ + // All librt.base64 features are experimental for now, so don't set up the API here + return 0; +} + +#else // MYPYC_EXPERIMENTAL + +#define LIBRT_BASE64_ABI_VERSION 0 +#define LIBRT_BASE64_API_VERSION 0 +#define LIBRT_BASE64_API_LEN 3 + +static void *LibRTBase64_API[LIBRT_BASE64_API_LEN]; + +#define LibRTBase64_ABIVersion (*(int (*)(void)) LibRTBase64_API[0]) +#define LibRTBase64_APIVersion (*(int (*)(void)) LibRTBase64_API[1]) +#define LibRTBase64_b64encode_internal (*(PyObject* (*)(PyObject *source)) LibRTBase64_API[2]) + +static int +import_librt_base64(void) +{ + PyObject *mod = PyImport_ImportModule("librt.base64"); + if (mod == NULL) + return -1; + Py_DECREF(mod); // we import just for the side effect of making the below work. + void *capsule = PyCapsule_Import("librt.base64._C_API", 0); + if (capsule == NULL) + return -1; + memcpy(LibRTBase64_API, capsule, sizeof(LibRTBase64_API)); + if (LibRTBase64_ABIVersion() != LIBRT_BASE64_ABI_VERSION) { + char err[128]; + snprintf(err, sizeof(err), "ABI version conflict for librt.base64, expected %d, found %d", + LIBRT_BASE64_ABI_VERSION, + LibRTBase64_ABIVersion() + ); + PyErr_SetString(PyExc_ValueError, err); + return -1; + } + if (LibRTBase64_APIVersion() < LIBRT_BASE64_API_VERSION) { + char err[128]; + snprintf(err, sizeof(err), + "API version conflict for librt.base64, expected %d or newer, found %d (hint: upgrade librt)", + LIBRT_BASE64_API_VERSION, + LibRTBase64_APIVersion() + ); + PyErr_SetString(PyExc_ValueError, err); + return -1; + } + return 0; +} + +#endif // MYPYC_EXPERIMENTAL + +#endif // LIBRT_BASE64_H diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/librt_internal.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/librt_internal.c new file mode 100644 index 0000000..fe18c54 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/librt_internal.c @@ -0,0 +1,1037 @@ +#include "pythoncapi_compat.h" + +#define PY_SSIZE_T_CLEAN +#include +#include +#include "CPy.h" +#define LIBRT_INTERNAL_MODULE +#include "librt_internal.h" + +#define START_SIZE 512 + +// See comment in read_int_internal() on motivation for these values. +#define MIN_ONE_BYTE_INT -10 +#define MAX_ONE_BYTE_INT 117 // 2 ** 7 - 1 - 10 +#define MIN_TWO_BYTES_INT -100 +#define MAX_TWO_BYTES_INT 16283 // 2 ** (8 + 6) - 1 - 100 +#define MIN_FOUR_BYTES_INT -10000 +#define MAX_FOUR_BYTES_INT 536860911 // 2 ** (3 * 8 + 5) - 1 - 10000 + +#define TWO_BYTES_INT_BIT 1 +#define FOUR_BYTES_INT_BIT 2 +#define LONG_INT_BIT 4 + +#define FOUR_BYTES_INT_TRAILER 3 +// We add one reserved bit here so that we can potentially support +// 8 bytes format in the future. +#define LONG_INT_TRAILER 15 + +#define CPY_BOOL_ERROR 2 +#define CPY_NONE_ERROR 2 +#define CPY_NONE 1 + +#define _CHECK_READ_BUFFER(data, err) if (unlikely(_check_read_buffer(data) == CPY_NONE_ERROR)) \ + return err; +#define _CHECK_WRITE_BUFFER(data, err) if (unlikely(_check_write_buffer(data) == CPY_NONE_ERROR)) \ + return err; +#define _CHECK_WRITE(data, need) if (unlikely(_check_size((WriteBufferObject *)data, need) == CPY_NONE_ERROR)) \ + return CPY_NONE_ERROR; +#define _CHECK_READ(data, size, err) if (unlikely(_check_read((ReadBufferObject *)data, size) == CPY_NONE_ERROR)) \ + return err; + +#define _READ(result, data, type) \ + do { \ + *(result) = *(type *)(((ReadBufferObject *)data)->ptr); \ + ((ReadBufferObject *)data)->ptr += sizeof(type); \ + } while (0) + +#define _WRITE(data, type, v) \ + do { \ + *(type *)(((WriteBufferObject *)data)->ptr) = v; \ + ((WriteBufferObject *)data)->ptr += sizeof(type); \ + } while (0) + +// +// ReadBuffer +// + +#if PY_BIG_ENDIAN +uint16_t reverse_16(uint16_t number) { + return (number << 8) | (number >> 8); +} + +uint32_t reverse_32(uint32_t number) { + return ((number & 0xFF) << 24) | ((number & 0xFF00) << 8) | ((number & 0xFF0000) >> 8) | (number >> 24); +} +#endif + +typedef struct { + PyObject_HEAD + char *ptr; // Current read location in the buffer + char *end; // End of the buffer + PyObject *source; // The object that contains the buffer +} ReadBufferObject; + +static PyTypeObject ReadBufferType; + +static PyObject* +ReadBuffer_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +{ + if (type != &ReadBufferType) { + PyErr_SetString(PyExc_TypeError, "ReadBuffer should not be subclassed"); + return NULL; + } + + ReadBufferObject *self = (ReadBufferObject *)type->tp_alloc(type, 0); + if (self != NULL) { + self->source = NULL; + self->ptr = NULL; + self->end = NULL; + } + return (PyObject *) self; +} + +static int +ReadBuffer_init_internal(ReadBufferObject *self, PyObject *source) { + if (!PyBytes_CheckExact(source)) { + PyErr_SetString(PyExc_TypeError, "source must be a bytes object"); + return -1; + } + self->source = Py_NewRef(source); + self->ptr = PyBytes_AS_STRING(source); + self->end = self->ptr + PyBytes_GET_SIZE(source); + return 0; +} + +static PyObject* +ReadBuffer_internal(PyObject *source) { + ReadBufferObject *self = (ReadBufferObject *)ReadBufferType.tp_alloc(&ReadBufferType, 0); + if (self == NULL) + return NULL; + self->ptr = NULL; + self->end = NULL; + self->source = NULL; + if (ReadBuffer_init_internal(self, source) == -1) { + Py_DECREF(self); + return NULL; + } + return (PyObject *)self; +} + +static int +ReadBuffer_init(ReadBufferObject *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"source", NULL}; + PyObject *source = NULL; + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "|O", kwlist, &source)) + return -1; + + return ReadBuffer_init_internal(self, source); +} + +static void +ReadBuffer_dealloc(ReadBufferObject *self) +{ + Py_CLEAR(self->source); + Py_TYPE(self)->tp_free((PyObject *)self); +} + +static PyMethodDef ReadBuffer_methods[] = { + {NULL} /* Sentinel */ +}; + +static PyTypeObject ReadBufferType = { + .ob_base = PyVarObject_HEAD_INIT(NULL, 0) + .tp_name = "ReadBuffer", + .tp_doc = PyDoc_STR("Mypy cache buffer objects"), + .tp_basicsize = sizeof(ReadBufferObject), + .tp_itemsize = 0, + .tp_flags = Py_TPFLAGS_DEFAULT, + .tp_new = ReadBuffer_new, + .tp_init = (initproc) ReadBuffer_init, + .tp_dealloc = (destructor) ReadBuffer_dealloc, + .tp_methods = ReadBuffer_methods, +}; + +// +// WriteBuffer +// + +typedef struct { + PyObject_HEAD + char *buf; // Beginning of the buffer + char *ptr; // Current write location in the buffer + char *end; // End of the buffer +} WriteBufferObject; + +static PyTypeObject WriteBufferType; + +static PyObject* +WriteBuffer_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +{ + if (type != &WriteBufferType) { + PyErr_SetString(PyExc_TypeError, "WriteBuffer cannot be subclassed"); + return NULL; + } + + WriteBufferObject *self = (WriteBufferObject *)type->tp_alloc(type, 0); + if (self != NULL) { + self->buf = NULL; + self->ptr = NULL; + self->end = NULL; + } + return (PyObject *)self; +} + +static int +WriteBuffer_init_internal(WriteBufferObject *self) { + Py_ssize_t size = START_SIZE; + self->buf = PyMem_Malloc(size + 1); + if (self->buf == NULL) { + PyErr_NoMemory(); + return -1; + } + self->ptr = self->buf; + self->end = self->buf + size; + return 0; +} + +static PyObject* +WriteBuffer_internal(void) { + WriteBufferObject *self = (WriteBufferObject *)WriteBufferType.tp_alloc(&WriteBufferType, 0); + if (self == NULL) + return NULL; + self->buf = NULL; + self->ptr = NULL; + self->end = NULL; + if (WriteBuffer_init_internal(self) == -1) { + Py_DECREF(self); + return NULL; + } + return (PyObject *)self; +} + +static int +WriteBuffer_init(WriteBufferObject *self, PyObject *args, PyObject *kwds) +{ + if (!PyArg_ParseTuple(args, "")) { + return -1; + } + + if (kwds != NULL && PyDict_Size(kwds) > 0) { + PyErr_SetString(PyExc_TypeError, + "WriteBuffer() takes no keyword arguments"); + return -1; + } + + return WriteBuffer_init_internal(self); +} + +static void +WriteBuffer_dealloc(WriteBufferObject *self) +{ + PyMem_Free(self->buf); + self->buf = NULL; + Py_TYPE(self)->tp_free((PyObject *)self); +} + +static PyObject* +WriteBuffer_getvalue_internal(PyObject *self) +{ + WriteBufferObject *obj = (WriteBufferObject *)self; + return PyBytes_FromStringAndSize(obj->buf, obj->ptr - obj->buf); +} + +static PyObject* +WriteBuffer_getvalue(WriteBufferObject *self, PyObject *Py_UNUSED(ignored)) +{ + return PyBytes_FromStringAndSize(self->buf, self->ptr - self->buf); +} + +static PyMethodDef WriteBuffer_methods[] = { + {"getvalue", (PyCFunction) WriteBuffer_getvalue, METH_NOARGS, + "Return the buffer content as bytes object" + }, + {NULL} /* Sentinel */ +}; + +static PyTypeObject WriteBufferType = { + .ob_base = PyVarObject_HEAD_INIT(NULL, 0) + .tp_name = "WriteBuffer", + .tp_doc = PyDoc_STR("Mypy cache buffer objects"), + .tp_basicsize = sizeof(WriteBufferObject), + .tp_itemsize = 0, + .tp_flags = Py_TPFLAGS_DEFAULT, + .tp_new = WriteBuffer_new, + .tp_init = (initproc) WriteBuffer_init, + .tp_dealloc = (destructor) WriteBuffer_dealloc, + .tp_methods = WriteBuffer_methods, +}; + +// ---------- + +static inline char +_check_read_buffer(PyObject *data) { + if (unlikely(Py_TYPE(data) != &ReadBufferType)) { + PyErr_Format( + PyExc_TypeError, "data must be a ReadBuffer object, got %s", Py_TYPE(data)->tp_name + ); + return CPY_NONE_ERROR; + } + return CPY_NONE; +} + +static inline char +_check_write_buffer(PyObject *data) { + if (unlikely(Py_TYPE(data) != &WriteBufferType)) { + PyErr_Format( + PyExc_TypeError, "data must be a WriteBuffer object, got %s", Py_TYPE(data)->tp_name + ); + return CPY_NONE_ERROR; + } + return CPY_NONE; +} + +static inline char +_check_size(WriteBufferObject *data, Py_ssize_t need) { + if (data->end - data->ptr >= need) + return CPY_NONE; + Py_ssize_t index = data->ptr - data->buf; + Py_ssize_t target = index + need; + Py_ssize_t size = data->end - data->buf; + do { + size *= 2; + } while (target >= size); + data->buf = PyMem_Realloc(data->buf, size); + if (unlikely(data->buf == NULL)) { + PyErr_NoMemory(); + return CPY_NONE_ERROR; + } + data->ptr = data->buf + index; + data->end = data->buf + size; + return CPY_NONE; +} + +static inline char +_check_read(ReadBufferObject *data, Py_ssize_t need) { + if (unlikely((data->end - data->ptr) < need)) { + PyErr_SetString(PyExc_ValueError, "reading past the buffer end"); + return CPY_NONE_ERROR; + } + return CPY_NONE; +} + +/* +bool format: single byte + \x00 - False + \x01 - True +*/ + +static char +read_bool_internal(PyObject *data) { + _CHECK_READ(data, 1, CPY_BOOL_ERROR) + char res; + _READ(&res, data, char); + if (unlikely((res != 0) & (res != 1))) { + PyErr_SetString(PyExc_ValueError, "invalid bool value"); + return CPY_BOOL_ERROR; + } + return res; +} + +static PyObject* +read_bool(PyObject *self, PyObject *const *args, size_t nargs, PyObject *kwnames) { + static const char * const kwlist[] = {"data", 0}; + static CPyArg_Parser parser = {"O:read_bool", kwlist, 0}; + PyObject *data; + if (unlikely(!CPyArg_ParseStackAndKeywordsOneArg(args, nargs, kwnames, &parser, &data))) { + return NULL; + } + _CHECK_READ_BUFFER(data, NULL) + char res = read_bool_internal(data); + if (unlikely(res == CPY_BOOL_ERROR)) + return NULL; + PyObject *retval = res ? Py_True : Py_False; + Py_INCREF(retval); + return retval; +} + +static char +write_bool_internal(PyObject *data, char value) { + _CHECK_WRITE(data, 1) + _WRITE(data, char, value); + return CPY_NONE; +} + +static PyObject* +write_bool(PyObject *self, PyObject *const *args, size_t nargs, PyObject *kwnames) { + static const char * const kwlist[] = {"data", "value", 0}; + static CPyArg_Parser parser = {"OO:write_bool", kwlist, 0}; + PyObject *data; + PyObject *value; + if (unlikely(!CPyArg_ParseStackAndKeywordsSimple(args, nargs, kwnames, &parser, &data, &value))) { + return NULL; + } + _CHECK_WRITE_BUFFER(data, NULL) + if (unlikely(!PyBool_Check(value))) { + PyErr_SetString(PyExc_TypeError, "value must be a bool"); + return NULL; + } + if (unlikely(write_bool_internal(data, Py_IsTrue(value)) == CPY_NONE_ERROR)) { + return NULL; + } + Py_INCREF(Py_None); + return Py_None; +} + +/* +str format: size as int (see below) followed by UTF-8 bytes +*/ + +static inline CPyTagged +_read_short_int(PyObject *data, uint8_t first) { + uint8_t second; + uint16_t two_more; + if ((first & TWO_BYTES_INT_BIT) == 0) { + // Note we use tagged ints since this function can return an error. + return ((Py_ssize_t)(first >> 1) + MIN_ONE_BYTE_INT) << 1; + } + if ((first & FOUR_BYTES_INT_BIT) == 0) { + _CHECK_READ(data, 1, CPY_INT_TAG) + _READ(&second, data, uint8_t); + return ((((Py_ssize_t)second) << 6) + (Py_ssize_t)(first >> 2) + MIN_TWO_BYTES_INT) << 1; + } + // The caller is responsible to verify this is called only for short ints. + _CHECK_READ(data, 3, CPY_INT_TAG) + // TODO: check if compilers emit optimal code for these two reads, and tweak if needed. + _READ(&second, data, uint8_t); + _READ(&two_more, data, uint16_t); +#if PY_BIG_ENDIAN + two_more = reverse_16(two_more); +#endif + Py_ssize_t higher = (((Py_ssize_t)two_more) << 13) + (((Py_ssize_t)second) << 5); + return (higher + (Py_ssize_t)(first >> 3) + MIN_FOUR_BYTES_INT) << 1; +} + +static PyObject* +read_str_internal(PyObject *data) { + // Read string length. + _CHECK_READ(data, 1, NULL) + uint8_t first; + _READ(&first, data, uint8_t); + if (unlikely(first == LONG_INT_TRAILER)) { + // Fail fast for invalid/tampered data. + PyErr_SetString(PyExc_ValueError, "invalid str size"); + return NULL; + } + CPyTagged tagged_size = _read_short_int(data, first); + if (tagged_size == CPY_INT_TAG) + return NULL; + if ((Py_ssize_t)tagged_size < 0) { + // Fail fast for invalid/tampered data. + PyErr_SetString(PyExc_ValueError, "invalid str size"); + return NULL; + } + Py_ssize_t size = tagged_size >> 1; + // Read string content. + char *ptr = ((ReadBufferObject *)data)->ptr; + _CHECK_READ(data, size, NULL) + PyObject *res = PyUnicode_FromStringAndSize(ptr, (Py_ssize_t)size); + if (unlikely(res == NULL)) + return NULL; + ((ReadBufferObject *)data)->ptr += size; + return res; +} + +static PyObject* +read_str(PyObject *self, PyObject *const *args, size_t nargs, PyObject *kwnames) { + static const char * const kwlist[] = {"data", 0}; + static CPyArg_Parser parser = {"O:read_str", kwlist, 0}; + PyObject *data; + if (unlikely(!CPyArg_ParseStackAndKeywordsOneArg(args, nargs, kwnames, &parser, &data))) { + return NULL; + } + _CHECK_READ_BUFFER(data, NULL) + return read_str_internal(data); +} + +// The caller *must* check that real_value is within allowed range (29 bits). +static inline char +_write_short_int(PyObject *data, Py_ssize_t real_value) { + if (real_value >= MIN_ONE_BYTE_INT && real_value <= MAX_ONE_BYTE_INT) { + _CHECK_WRITE(data, 1) + _WRITE(data, uint8_t, (uint8_t)(real_value - MIN_ONE_BYTE_INT) << 1); + } else if (real_value >= MIN_TWO_BYTES_INT && real_value <= MAX_TWO_BYTES_INT) { + _CHECK_WRITE(data, 2) +#if PY_BIG_ENDIAN + uint16_t to_write = ((uint16_t)(real_value - MIN_TWO_BYTES_INT) << 2) | TWO_BYTES_INT_BIT; + _WRITE(data, uint16_t, reverse_16(to_write)); +#else + _WRITE(data, uint16_t, ((uint16_t)(real_value - MIN_TWO_BYTES_INT) << 2) | TWO_BYTES_INT_BIT); +#endif + } else { + _CHECK_WRITE(data, 4) +#if PY_BIG_ENDIAN + uint32_t to_write = ((uint32_t)(real_value - MIN_FOUR_BYTES_INT) << 3) | FOUR_BYTES_INT_TRAILER; + _WRITE(data, uint32_t, reverse_32(to_write)); +#else + _WRITE(data, uint32_t, ((uint32_t)(real_value - MIN_FOUR_BYTES_INT) << 3) | FOUR_BYTES_INT_TRAILER); +#endif + } + return CPY_NONE; +} + +static char +write_str_internal(PyObject *data, PyObject *value) { + Py_ssize_t size; + const char *chunk = PyUnicode_AsUTF8AndSize(value, &size); + if (unlikely(chunk == NULL)) + return CPY_NONE_ERROR; + + // Write string length. + if (likely(size >= MIN_FOUR_BYTES_INT && size <= MAX_FOUR_BYTES_INT)) { + if (_write_short_int(data, size) == CPY_NONE_ERROR) + return CPY_NONE_ERROR; + } else { + PyErr_SetString(PyExc_ValueError, "str too long to serialize"); + return CPY_NONE_ERROR; + } + // Write string content. + _CHECK_WRITE(data, size) + char *ptr = ((WriteBufferObject *)data)->ptr; + memcpy(ptr, chunk, size); + ((WriteBufferObject *)data)->ptr += size; + return CPY_NONE; +} + +static PyObject* +write_str(PyObject *self, PyObject *const *args, size_t nargs, PyObject *kwnames) { + static const char * const kwlist[] = {"data", "value", 0}; + static CPyArg_Parser parser = {"OO:write_str", kwlist, 0}; + PyObject *data; + PyObject *value; + if (unlikely(!CPyArg_ParseStackAndKeywordsSimple(args, nargs, kwnames, &parser, &data, &value))) { + return NULL; + } + _CHECK_WRITE_BUFFER(data, NULL) + if (unlikely(!PyUnicode_Check(value))) { + PyErr_SetString(PyExc_TypeError, "value must be a str"); + return NULL; + } + if (unlikely(write_str_internal(data, value) == CPY_NONE_ERROR)) { + return NULL; + } + Py_INCREF(Py_None); + return Py_None; +} + +/* +bytes format: size as int (see below) followed by bytes +*/ + +static PyObject* +read_bytes_internal(PyObject *data) { + // Read length. + _CHECK_READ(data, 1, NULL) + uint8_t first; + _READ(&first, data, uint8_t); + if (unlikely(first == LONG_INT_TRAILER)) { + // Fail fast for invalid/tampered data. + PyErr_SetString(PyExc_ValueError, "invalid bytes size"); + return NULL; + } + CPyTagged tagged_size = _read_short_int(data, first); + if (tagged_size == CPY_INT_TAG) + return NULL; + if ((Py_ssize_t)tagged_size < 0) { + // Fail fast for invalid/tampered data. + PyErr_SetString(PyExc_ValueError, "invalid bytes size"); + return NULL; + } + Py_ssize_t size = tagged_size >> 1; + // Read bytes content. + char *ptr = ((ReadBufferObject *)data)->ptr; + _CHECK_READ(data, size, NULL) + PyObject *res = PyBytes_FromStringAndSize(ptr, (Py_ssize_t)size); + if (unlikely(res == NULL)) + return NULL; + ((ReadBufferObject *)data)->ptr += size; + return res; +} + +static PyObject* +read_bytes(PyObject *self, PyObject *const *args, size_t nargs, PyObject *kwnames) { + static const char * const kwlist[] = {"data", 0}; + static CPyArg_Parser parser = {"O:read_bytes", kwlist, 0}; + PyObject *data; + if (unlikely(!CPyArg_ParseStackAndKeywordsOneArg(args, nargs, kwnames, &parser, &data))) { + return NULL; + } + _CHECK_READ_BUFFER(data, NULL) + return read_bytes_internal(data); +} + +static char +write_bytes_internal(PyObject *data, PyObject *value) { + const char *chunk = PyBytes_AsString(value); + if (unlikely(chunk == NULL)) + return CPY_NONE_ERROR; + Py_ssize_t size = PyBytes_GET_SIZE(value); + + // Write length. + if (likely(size >= MIN_FOUR_BYTES_INT && size <= MAX_FOUR_BYTES_INT)) { + if (_write_short_int(data, size) == CPY_NONE_ERROR) + return CPY_NONE_ERROR; + } else { + PyErr_SetString(PyExc_ValueError, "bytes too long to serialize"); + return CPY_NONE_ERROR; + } + // Write bytes content. + _CHECK_WRITE(data, size) + char *ptr = ((WriteBufferObject *)data)->ptr; + memcpy(ptr, chunk, size); + ((WriteBufferObject *)data)->ptr += size; + return CPY_NONE; +} + +static PyObject* +write_bytes(PyObject *self, PyObject *const *args, size_t nargs, PyObject *kwnames) { + static const char * const kwlist[] = {"data", "value", 0}; + static CPyArg_Parser parser = {"OO:write_bytes", kwlist, 0}; + PyObject *data; + PyObject *value; + if (unlikely(!CPyArg_ParseStackAndKeywordsSimple(args, nargs, kwnames, &parser, &data, &value))) { + return NULL; + } + _CHECK_WRITE_BUFFER(data, NULL) + if (unlikely(!PyBytes_Check(value))) { + PyErr_SetString(PyExc_TypeError, "value must be a bytes object"); + return NULL; + } + if (unlikely(write_bytes_internal(data, value) == CPY_NONE_ERROR)) { + return NULL; + } + Py_INCREF(Py_None); + return Py_None; +} + +/* +float format: + stored using PyFloat helpers in little-endian format. +*/ + +static double +read_float_internal(PyObject *data) { + _CHECK_READ(data, 8, CPY_FLOAT_ERROR) + char *ptr = ((ReadBufferObject *)data)->ptr; + double res = PyFloat_Unpack8(ptr, 1); + if (unlikely((res == -1.0) && PyErr_Occurred())) + return CPY_FLOAT_ERROR; + ((ReadBufferObject *)data)->ptr += 8; + return res; +} + +static PyObject* +read_float(PyObject *self, PyObject *const *args, size_t nargs, PyObject *kwnames) { + static const char * const kwlist[] = {"data", 0}; + static CPyArg_Parser parser = {"O:read_float", kwlist, 0}; + PyObject *data; + if (unlikely(!CPyArg_ParseStackAndKeywordsOneArg(args, nargs, kwnames, &parser, &data))) { + return NULL; + } + _CHECK_READ_BUFFER(data, NULL) + double retval = read_float_internal(data); + if (unlikely(retval == CPY_FLOAT_ERROR && PyErr_Occurred())) { + return NULL; + } + return PyFloat_FromDouble(retval); +} + +static char +write_float_internal(PyObject *data, double value) { + _CHECK_WRITE(data, 8) + char *ptr = ((WriteBufferObject *)data)->ptr; + int res = PyFloat_Pack8(value, ptr, 1); + if (unlikely(res == -1)) + return CPY_NONE_ERROR; + ((WriteBufferObject *)data)->ptr += 8; + return CPY_NONE; +} + +static PyObject* +write_float(PyObject *self, PyObject *const *args, size_t nargs, PyObject *kwnames) { + static const char * const kwlist[] = {"data", "value", 0}; + static CPyArg_Parser parser = {"OO:write_float", kwlist, 0}; + PyObject *data; + PyObject *value; + if (unlikely(!CPyArg_ParseStackAndKeywordsSimple(args, nargs, kwnames, &parser, &data, &value))) { + return NULL; + } + _CHECK_WRITE_BUFFER(data, NULL) + if (unlikely(!PyFloat_Check(value))) { + PyErr_SetString(PyExc_TypeError, "value must be a float"); + return NULL; + } + if (unlikely(write_float_internal(data, PyFloat_AsDouble(value)) == CPY_NONE_ERROR)) { + return NULL; + } + Py_INCREF(Py_None); + return Py_None; +} + +/* +int format: + one byte: last bit 0, 7 bits used + two bytes: last two bits 01, 14 bits used + four bytes: last three bits 011, 29 bits used + everything else: 00001111 followed by serialized string representation + +Note: for fixed size formats we skew ranges towards more positive values, +since negative integers are much more rare. +*/ + +static CPyTagged +read_int_internal(PyObject *data) { + _CHECK_READ(data, 1, CPY_INT_TAG) + + uint8_t first; + _READ(&first, data, uint8_t); + if (likely(first != LONG_INT_TRAILER)) { + return _read_short_int(data, first); + } + + // Long integer encoding -- byte length and sign, followed by a byte array. + + // Read byte length and sign. + _CHECK_READ(data, 1, CPY_INT_TAG) + _READ(&first, data, uint8_t); + Py_ssize_t size_and_sign = _read_short_int(data, first); + if (size_and_sign == CPY_INT_TAG) + return CPY_INT_TAG; + if ((Py_ssize_t)size_and_sign < 0) { + PyErr_SetString(PyExc_ValueError, "invalid int data"); + return CPY_INT_TAG; + } + bool sign = (size_and_sign >> 1) & 1; + Py_ssize_t size = size_and_sign >> 2; + + // Construct an int object from the byte array. + _CHECK_READ(data, size, CPY_INT_TAG) + char *ptr = ((ReadBufferObject *)data)->ptr; + PyObject *num = _PyLong_FromByteArray((unsigned char *)ptr, size, 1, 0); + if (num == NULL) + return CPY_INT_TAG; + ((ReadBufferObject *)data)->ptr += size; + if (sign) { + PyObject *old = num; + num = PyNumber_Negative(old); + Py_DECREF(old); + if (num == NULL) { + return CPY_INT_TAG; + } + } + return CPyTagged_StealFromObject(num); +} + +static PyObject* +read_int(PyObject *self, PyObject *const *args, size_t nargs, PyObject *kwnames) { + static const char * const kwlist[] = {"data", 0}; + static CPyArg_Parser parser = {"O:read_int", kwlist, 0}; + PyObject *data; + if (unlikely(!CPyArg_ParseStackAndKeywordsOneArg(args, nargs, kwnames, &parser, &data))) { + return NULL; + } + _CHECK_READ_BUFFER(data, NULL) + CPyTagged retval = read_int_internal(data); + if (unlikely(retval == CPY_INT_TAG)) { + return NULL; + } + return CPyTagged_StealAsObject(retval); +} + + +static inline int hex_to_int(char c) { + if (c >= '0' && c <= '9') + return c - '0'; + else if (c >= 'a' && c <= 'f') + return c - 'a' + 10; + else + return c - 'A' + 10; // Assume valid hex digit +} + +static inline char +_write_long_int(PyObject *data, CPyTagged value) { + _CHECK_WRITE(data, 1) + _WRITE(data, uint8_t, LONG_INT_TRAILER); + + PyObject *hex_str = NULL; + PyObject* int_value = CPyTagged_AsObject(value); + if (unlikely(int_value == NULL)) + goto error; + + hex_str = PyNumber_ToBase(int_value, 16); + if (hex_str == NULL) + goto error; + Py_DECREF(int_value); + int_value = NULL; + + const char *str = PyUnicode_AsUTF8(hex_str); + if (str == NULL) + goto error; + Py_ssize_t len = strlen(str); + bool neg; + if (str[0] == '-') { + str++; + len--; + neg = true; + } else { + neg = false; + } + // Skip the 0x hex prefix. + str += 2; + len -= 2; + + // Write bytes encoded length and sign. + Py_ssize_t size = (len + 1) / 2; + Py_ssize_t encoded_size = (size << 1) | neg; + if (encoded_size <= MAX_FOUR_BYTES_INT) { + if (_write_short_int(data, encoded_size) == CPY_NONE_ERROR) + goto error; + } else { + PyErr_SetString(PyExc_ValueError, "int too long to serialize"); + goto error; + } + + // Write absolute integer value as byte array in a variable-length little endian format. + int i; + for (i = len; i > 1; i -= 2) { + if (write_tag_internal( + data, hex_to_int(str[i - 1]) | (hex_to_int(str[i - 2]) << 4)) == CPY_NONE_ERROR) + goto error; + } + // The final byte may correspond to only one hex digit. + if (i == 1) { + if (write_tag_internal(data, hex_to_int(str[i - 1])) == CPY_NONE_ERROR) + goto error; + } + + Py_DECREF(hex_str); + return CPY_NONE; + + error: + + Py_XDECREF(int_value); + Py_XDECREF(hex_str); + return CPY_NONE_ERROR; +} + +static char +write_int_internal(PyObject *data, CPyTagged value) { + if (likely((value & CPY_INT_TAG) == 0)) { + Py_ssize_t real_value = CPyTagged_ShortAsSsize_t(value); + if (likely(real_value >= MIN_FOUR_BYTES_INT && real_value <= MAX_FOUR_BYTES_INT)) { + return _write_short_int(data, real_value); + } else { + return _write_long_int(data, value); + } + } else { + return _write_long_int(data, value); + } +} + +static PyObject* +write_int(PyObject *self, PyObject *const *args, size_t nargs, PyObject *kwnames) { + static const char * const kwlist[] = {"data", "value", 0}; + static CPyArg_Parser parser = {"OO:write_int", kwlist, 0}; + PyObject *data; + PyObject *value; + if (unlikely(!CPyArg_ParseStackAndKeywordsSimple(args, nargs, kwnames, &parser, &data, &value))) { + return NULL; + } + _CHECK_WRITE_BUFFER(data, NULL) + if (unlikely(!PyLong_Check(value))) { + PyErr_SetString(PyExc_TypeError, "value must be an int"); + return NULL; + } + CPyTagged tagged_value = CPyTagged_BorrowFromObject(value); + if (unlikely(write_int_internal(data, tagged_value) == CPY_NONE_ERROR)) { + return NULL; + } + Py_INCREF(Py_None); + return Py_None; +} + +/* +integer tag format (0 <= t <= 255): + stored as a uint8_t +*/ + +static uint8_t +read_tag_internal(PyObject *data) { + _CHECK_READ(data, 1, CPY_LL_UINT_ERROR) + uint8_t ret; + _READ(&ret, data, uint8_t); + return ret; +} + +static PyObject* +read_tag(PyObject *self, PyObject *const *args, size_t nargs, PyObject *kwnames) { + static const char * const kwlist[] = {"data", 0}; + static CPyArg_Parser parser = {"O:read_tag", kwlist, 0}; + PyObject *data; + if (unlikely(!CPyArg_ParseStackAndKeywordsOneArg(args, nargs, kwnames, &parser, &data))) { + return NULL; + } + _CHECK_READ_BUFFER(data, NULL) + uint8_t retval = read_tag_internal(data); + if (unlikely(retval == CPY_LL_UINT_ERROR && PyErr_Occurred())) { + return NULL; + } + return PyLong_FromLong(retval); +} + +static char +write_tag_internal(PyObject *data, uint8_t value) { + _CHECK_WRITE(data, 1) + _WRITE(data, uint8_t, value); + return CPY_NONE; +} + +static PyObject* +write_tag(PyObject *self, PyObject *const *args, size_t nargs, PyObject *kwnames) { + static const char * const kwlist[] = {"data", "value", 0}; + static CPyArg_Parser parser = {"OO:write_tag", kwlist, 0}; + PyObject *data; + PyObject *value; + if (unlikely(!CPyArg_ParseStackAndKeywordsSimple(args, nargs, kwnames, &parser, &data, &value))) { + return NULL; + } + _CHECK_WRITE_BUFFER(data, NULL) + uint8_t unboxed = CPyLong_AsUInt8(value); + if (unlikely(unboxed == CPY_LL_UINT_ERROR && PyErr_Occurred())) { + CPy_TypeError("u8", value); + return NULL; + } + if (unlikely(write_tag_internal(data, unboxed) == CPY_NONE_ERROR)) { + return NULL; + } + Py_INCREF(Py_None); + return Py_None; +} + +static uint8_t +cache_version_internal(void) { + return 0; +} + +static PyObject* +cache_version(PyObject *self, PyObject *Py_UNUSED(ignored)) { + return PyLong_FromLong(cache_version_internal()); +} + +static PyTypeObject * +ReadBuffer_type_internal(void) { + return &ReadBufferType; // Return borrowed reference +} + +static PyTypeObject * +WriteBuffer_type_internal(void) { + return &WriteBufferType; // Return borrowed reference +}; + +static PyMethodDef librt_internal_module_methods[] = { + {"write_bool", (PyCFunction)write_bool, METH_FASTCALL | METH_KEYWORDS, PyDoc_STR("write a bool")}, + {"read_bool", (PyCFunction)read_bool, METH_FASTCALL | METH_KEYWORDS, PyDoc_STR("read a bool")}, + {"write_str", (PyCFunction)write_str, METH_FASTCALL | METH_KEYWORDS, PyDoc_STR("write a string")}, + {"read_str", (PyCFunction)read_str, METH_FASTCALL | METH_KEYWORDS, PyDoc_STR("read a string")}, + {"write_bytes", (PyCFunction)write_bytes, METH_FASTCALL | METH_KEYWORDS, PyDoc_STR("write bytes")}, + {"read_bytes", (PyCFunction)read_bytes, METH_FASTCALL | METH_KEYWORDS, PyDoc_STR("read bytes")}, + {"write_float", (PyCFunction)write_float, METH_FASTCALL | METH_KEYWORDS, PyDoc_STR("write a float")}, + {"read_float", (PyCFunction)read_float, METH_FASTCALL | METH_KEYWORDS, PyDoc_STR("read a float")}, + {"write_int", (PyCFunction)write_int, METH_FASTCALL | METH_KEYWORDS, PyDoc_STR("write an int")}, + {"read_int", (PyCFunction)read_int, METH_FASTCALL | METH_KEYWORDS, PyDoc_STR("read an int")}, + {"write_tag", (PyCFunction)write_tag, METH_FASTCALL | METH_KEYWORDS, PyDoc_STR("write a short int")}, + {"read_tag", (PyCFunction)read_tag, METH_FASTCALL | METH_KEYWORDS, PyDoc_STR("read a short int")}, + {"cache_version", (PyCFunction)cache_version, METH_NOARGS, PyDoc_STR("cache format version")}, + {NULL, NULL, 0, NULL} +}; + +static int +NativeInternal_ABI_Version(void) { + return LIBRT_INTERNAL_ABI_VERSION; +} + +static int +NativeInternal_API_Version(void) { + return LIBRT_INTERNAL_API_VERSION; +} + +static int +librt_internal_module_exec(PyObject *m) +{ + if (PyType_Ready(&ReadBufferType) < 0) { + return -1; + } + if (PyType_Ready(&WriteBufferType) < 0) { + return -1; + } + if (PyModule_AddObjectRef(m, "ReadBuffer", (PyObject *) &ReadBufferType) < 0) { + return -1; + } + if (PyModule_AddObjectRef(m, "WriteBuffer", (PyObject *) &WriteBufferType) < 0) { + return -1; + } + + // Export mypy internal C API, be careful with the order! + static void *NativeInternal_API[LIBRT_INTERNAL_API_LEN] = { + (void *)ReadBuffer_internal, + (void *)WriteBuffer_internal, + (void *)WriteBuffer_getvalue_internal, + (void *)write_bool_internal, + (void *)read_bool_internal, + (void *)write_str_internal, + (void *)read_str_internal, + (void *)write_float_internal, + (void *)read_float_internal, + (void *)write_int_internal, + (void *)read_int_internal, + (void *)write_tag_internal, + (void *)read_tag_internal, + (void *)NativeInternal_ABI_Version, + (void *)write_bytes_internal, + (void *)read_bytes_internal, + (void *)cache_version_internal, + (void *)ReadBuffer_type_internal, + (void *)WriteBuffer_type_internal, + (void *)NativeInternal_API_Version, + }; + PyObject *c_api_object = PyCapsule_New((void *)NativeInternal_API, "librt.internal._C_API", NULL); + if (PyModule_Add(m, "_C_API", c_api_object) < 0) { + return -1; + } + return 0; +} + +static PyModuleDef_Slot librt_internal_module_slots[] = { + {Py_mod_exec, librt_internal_module_exec}, +#ifdef Py_MOD_GIL_NOT_USED + {Py_mod_gil, Py_MOD_GIL_NOT_USED}, +#endif + {0, NULL} +}; + +static PyModuleDef librt_internal_module = { + .m_base = PyModuleDef_HEAD_INIT, + .m_name = "internal", + .m_doc = "Mypy cache serialization utils", + .m_size = 0, + .m_methods = librt_internal_module_methods, + .m_slots = librt_internal_module_slots, +}; + +PyMODINIT_FUNC +PyInit_internal(void) +{ + return PyModuleDef_Init(&librt_internal_module); +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/librt_internal.h b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/librt_internal.h new file mode 100644 index 0000000..501162a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/librt_internal.h @@ -0,0 +1,111 @@ +#ifndef LIBRT_INTERNAL_H +#define LIBRT_INTERNAL_H + +// ABI version -- only an exact match is compatible. This will only be changed in +// very exceptional cases (likely never) due to strict backward compatibility +// requirements. +#define LIBRT_INTERNAL_ABI_VERSION 2 + +// API version -- more recent versions must maintain backward compatibility, i.e. +// we can add new features but not remove or change existing features (unless +// ABI version is changed, but see the comment above). + #define LIBRT_INTERNAL_API_VERSION 0 + +// Number of functions in the capsule API. If you add a new function, also increase +// LIBRT_INTERNAL_API_VERSION. +#define LIBRT_INTERNAL_API_LEN 20 + +#ifdef LIBRT_INTERNAL_MODULE + +static PyObject *ReadBuffer_internal(PyObject *source); +static PyObject *WriteBuffer_internal(void); +static PyObject *WriteBuffer_getvalue_internal(PyObject *self); +static PyObject *ReadBuffer_internal(PyObject *source); +static PyObject *ReadBuffer_internal_empty(void); +static char write_bool_internal(PyObject *data, char value); +static char read_bool_internal(PyObject *data); +static char write_str_internal(PyObject *data, PyObject *value); +static PyObject *read_str_internal(PyObject *data); +static char write_float_internal(PyObject *data, double value); +static double read_float_internal(PyObject *data); +static char write_int_internal(PyObject *data, CPyTagged value); +static CPyTagged read_int_internal(PyObject *data); +static char write_tag_internal(PyObject *data, uint8_t value); +static uint8_t read_tag_internal(PyObject *data); +static int NativeInternal_ABI_Version(void); +static char write_bytes_internal(PyObject *data, PyObject *value); +static PyObject *read_bytes_internal(PyObject *data); +static uint8_t cache_version_internal(void); +static PyTypeObject *ReadBuffer_type_internal(void); +static PyTypeObject *WriteBuffer_type_internal(void); +static int NativeInternal_API_Version(void); + +#else + +static void *NativeInternal_API[LIBRT_INTERNAL_API_LEN]; + +#define ReadBuffer_internal (*(PyObject* (*)(PyObject *source)) NativeInternal_API[0]) +#define WriteBuffer_internal (*(PyObject* (*)(void)) NativeInternal_API[1]) +#define WriteBuffer_getvalue_internal (*(PyObject* (*)(PyObject *source)) NativeInternal_API[2]) +#define write_bool_internal (*(char (*)(PyObject *source, char value)) NativeInternal_API[3]) +#define read_bool_internal (*(char (*)(PyObject *source)) NativeInternal_API[4]) +#define write_str_internal (*(char (*)(PyObject *source, PyObject *value)) NativeInternal_API[5]) +#define read_str_internal (*(PyObject* (*)(PyObject *source)) NativeInternal_API[6]) +#define write_float_internal (*(char (*)(PyObject *source, double value)) NativeInternal_API[7]) +#define read_float_internal (*(double (*)(PyObject *source)) NativeInternal_API[8]) +#define write_int_internal (*(char (*)(PyObject *source, CPyTagged value)) NativeInternal_API[9]) +#define read_int_internal (*(CPyTagged (*)(PyObject *source)) NativeInternal_API[10]) +#define write_tag_internal (*(char (*)(PyObject *source, uint8_t value)) NativeInternal_API[11]) +#define read_tag_internal (*(uint8_t (*)(PyObject *source)) NativeInternal_API[12]) +#define NativeInternal_ABI_Version (*(int (*)(void)) NativeInternal_API[13]) +#define write_bytes_internal (*(char (*)(PyObject *source, PyObject *value)) NativeInternal_API[14]) +#define read_bytes_internal (*(PyObject* (*)(PyObject *source)) NativeInternal_API[15]) +#define cache_version_internal (*(uint8_t (*)(void)) NativeInternal_API[16]) +#define ReadBuffer_type_internal (*(PyTypeObject* (*)(void)) NativeInternal_API[17]) +#define WriteBuffer_type_internal (*(PyTypeObject* (*)(void)) NativeInternal_API[18]) +#define NativeInternal_API_Version (*(int (*)(void)) NativeInternal_API[19]) + +static int +import_librt_internal(void) +{ + PyObject *mod = PyImport_ImportModule("librt.internal"); + if (mod == NULL) + return -1; + Py_DECREF(mod); // we import just for the side effect of making the below work. + void *capsule = PyCapsule_Import("librt.internal._C_API", 0); + if (capsule == NULL) + return -1; + memcpy(NativeInternal_API, capsule, sizeof(NativeInternal_API)); + if (NativeInternal_ABI_Version() != LIBRT_INTERNAL_ABI_VERSION) { + char err[128]; + snprintf(err, sizeof(err), "ABI version conflict for librt.internal, expected %d, found %d", + LIBRT_INTERNAL_ABI_VERSION, + NativeInternal_ABI_Version() + ); + PyErr_SetString(PyExc_ValueError, err); + return -1; + } + if (NativeInternal_API_Version() < LIBRT_INTERNAL_API_VERSION) { + char err[128]; + snprintf(err, sizeof(err), + "API version conflict for librt.internal, expected %d or newer, found %d (hint: upgrade librt)", + LIBRT_INTERNAL_API_VERSION, + NativeInternal_API_Version() + ); + PyErr_SetString(PyExc_ValueError, err); + return -1; + } + return 0; +} + +#endif + +static inline bool CPyReadBuffer_Check(PyObject *obj) { + return Py_TYPE(obj) == ReadBuffer_type_internal(); +} + +static inline bool CPyWriteBuffer_Check(PyObject *obj) { + return Py_TYPE(obj) == WriteBuffer_type_internal(); +} + +#endif // LIBRT_INTERNAL_H diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/list_ops.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/list_ops.c new file mode 100644 index 0000000..c611907 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/list_ops.c @@ -0,0 +1,406 @@ +// List primitive operations +// +// These are registered in mypyc.primitives.list_ops. + +#include +#include "CPy.h" + +#ifndef Py_TPFLAGS_SEQUENCE +#define Py_TPFLAGS_SEQUENCE (1 << 5) +#endif + +PyObject *CPyList_Build(Py_ssize_t len, ...) { + Py_ssize_t i; + + PyObject *res = PyList_New(len); + if (res == NULL) { + return NULL; + } + + va_list args; + va_start(args, len); + for (i = 0; i < len; i++) { + // Steals the reference + PyObject *value = va_arg(args, PyObject *); + PyList_SET_ITEM(res, i, value); + } + va_end(args); + + return res; +} + +char CPyList_Clear(PyObject *list) { + if (PyList_CheckExact(list)) { + PyList_Clear(list); + } else { + _Py_IDENTIFIER(clear); + PyObject *name = _PyUnicode_FromId(&PyId_clear); + if (name == NULL) { + return 0; + } + PyObject *res = PyObject_CallMethodNoArgs(list, name); + if (res == NULL) { + return 0; + } + } + return 1; +} + +PyObject *CPyList_Copy(PyObject *list) { + if(PyList_CheckExact(list)) { + return PyList_GetSlice(list, 0, PyList_GET_SIZE(list)); + } + _Py_IDENTIFIER(copy); + + PyObject *name = _PyUnicode_FromId(&PyId_copy); + if (name == NULL) { + return NULL; + } + return PyObject_CallMethodNoArgs(list, name); +} + +PyObject *CPyList_GetItemShort(PyObject *list, CPyTagged index) { + Py_ssize_t n = CPyTagged_ShortAsSsize_t(index); + Py_ssize_t size = PyList_GET_SIZE(list); + if (n >= 0) { + if (n >= size) { + PyErr_SetString(PyExc_IndexError, "list index out of range"); + return NULL; + } + } else { + n += size; + if (n < 0) { + PyErr_SetString(PyExc_IndexError, "list index out of range"); + return NULL; + } + } + PyObject *result = PyList_GET_ITEM(list, n); + Py_INCREF(result); + return result; +} + +PyObject *CPyList_GetItemShortBorrow(PyObject *list, CPyTagged index) { + Py_ssize_t n = CPyTagged_ShortAsSsize_t(index); + Py_ssize_t size = PyList_GET_SIZE(list); + if (n >= 0) { + if (n >= size) { + PyErr_SetString(PyExc_IndexError, "list index out of range"); + return NULL; + } + } else { + n += size; + if (n < 0) { + PyErr_SetString(PyExc_IndexError, "list index out of range"); + return NULL; + } + } + return PyList_GET_ITEM(list, n); +} + +PyObject *CPyList_GetItem(PyObject *list, CPyTagged index) { + if (CPyTagged_CheckShort(index)) { + Py_ssize_t n = CPyTagged_ShortAsSsize_t(index); + Py_ssize_t size = PyList_GET_SIZE(list); + if (n >= 0) { + if (n >= size) { + PyErr_SetString(PyExc_IndexError, "list index out of range"); + return NULL; + } + } else { + n += size; + if (n < 0) { + PyErr_SetString(PyExc_IndexError, "list index out of range"); + return NULL; + } + } + PyObject *result = PyList_GET_ITEM(list, n); + Py_INCREF(result); + return result; + } else { + PyErr_SetString(PyExc_OverflowError, CPYTHON_LARGE_INT_ERRMSG); + return NULL; + } +} + +PyObject *CPyList_GetItemBorrow(PyObject *list, CPyTagged index) { + if (CPyTagged_CheckShort(index)) { + Py_ssize_t n = CPyTagged_ShortAsSsize_t(index); + Py_ssize_t size = PyList_GET_SIZE(list); + if (n >= 0) { + if (n >= size) { + PyErr_SetString(PyExc_IndexError, "list index out of range"); + return NULL; + } + } else { + n += size; + if (n < 0) { + PyErr_SetString(PyExc_IndexError, "list index out of range"); + return NULL; + } + } + return PyList_GET_ITEM(list, n); + } else { + PyErr_SetString(PyExc_OverflowError, CPYTHON_LARGE_INT_ERRMSG); + return NULL; + } +} + +PyObject *CPyList_GetItemInt64(PyObject *list, int64_t index) { + size_t size = PyList_GET_SIZE(list); + if (likely((uint64_t)index < size)) { + PyObject *result = PyList_GET_ITEM(list, index); + Py_INCREF(result); + return result; + } + if (index >= 0) { + PyErr_SetString(PyExc_IndexError, "list index out of range"); + return NULL; + } + index += size; + if (index < 0) { + PyErr_SetString(PyExc_IndexError, "list index out of range"); + return NULL; + } + PyObject *result = PyList_GET_ITEM(list, index); + Py_INCREF(result); + return result; +} + +PyObject *CPyList_GetItemInt64Borrow(PyObject *list, int64_t index) { + size_t size = PyList_GET_SIZE(list); + if (likely((uint64_t)index < size)) { + return PyList_GET_ITEM(list, index); + } + if (index >= 0) { + PyErr_SetString(PyExc_IndexError, "list index out of range"); + return NULL; + } + index += size; + if (index < 0) { + PyErr_SetString(PyExc_IndexError, "list index out of range"); + return NULL; + } + return PyList_GET_ITEM(list, index); +} + +bool CPyList_SetItem(PyObject *list, CPyTagged index, PyObject *value) { + if (CPyTagged_CheckShort(index)) { + Py_ssize_t n = CPyTagged_ShortAsSsize_t(index); + Py_ssize_t size = PyList_GET_SIZE(list); + if (n >= 0) { + if (n >= size) { + PyErr_SetString(PyExc_IndexError, "list assignment index out of range"); + return false; + } + } else { + n += size; + if (n < 0) { + PyErr_SetString(PyExc_IndexError, "list assignment index out of range"); + return false; + } + } + // PyList_SET_ITEM doesn't decref the old element, so we do + Py_DECREF(PyList_GET_ITEM(list, n)); + // N.B: Steals reference + PyList_SET_ITEM(list, n, value); + return true; + } else { + PyErr_SetString(PyExc_OverflowError, CPYTHON_LARGE_INT_ERRMSG); + return false; + } +} + +bool CPyList_SetItemInt64(PyObject *list, int64_t index, PyObject *value) { + size_t size = PyList_GET_SIZE(list); + if (unlikely((uint64_t)index >= size)) { + if (index > 0) { + PyErr_SetString(PyExc_IndexError, "list assignment index out of range"); + return false; + } + index += size; + if (index < 0) { + PyErr_SetString(PyExc_IndexError, "list assignment index out of range"); + return false; + } + } + // PyList_SET_ITEM doesn't decref the old element, so we do + Py_DECREF(PyList_GET_ITEM(list, index)); + // N.B: Steals reference + PyList_SET_ITEM(list, index, value); + return true; +} + +// This function should only be used to fill in brand new lists. +void CPyList_SetItemUnsafe(PyObject *list, Py_ssize_t index, PyObject *value) { + PyList_SET_ITEM(list, index, value); +} + +#ifdef Py_GIL_DISABLED +// The original optimized list.pop implementation doesn't work on free-threaded +// builds, so provide an alternative that is a bit slower but works. +// +// Note that this implementation isn't intended to be atomic. +static inline PyObject *list_pop_index(PyObject *list, Py_ssize_t index) { + PyObject *item = PyList_GetItemRef(list, index); + if (item == NULL) { + return NULL; + } + if (PySequence_DelItem(list, index) < 0) { + Py_DECREF(item); + return NULL; + } + return item; +} +#endif + +PyObject *CPyList_PopLast(PyObject *list) +{ +#ifdef Py_GIL_DISABLED + // The other implementation causes segfaults on a free-threaded Python 3.14b4 build. + Py_ssize_t index = PyList_GET_SIZE(list) - 1; + return list_pop_index(list, index); +#else + // I tried a specalized version of pop_impl for just removing the + // last element and it wasn't any faster in microbenchmarks than + // the generic one so I ditched it. + return list_pop_impl((PyListObject *)list, -1); +#endif +} + +PyObject *CPyList_Pop(PyObject *obj, CPyTagged index) +{ + if (CPyTagged_CheckShort(index)) { + Py_ssize_t n = CPyTagged_ShortAsSsize_t(index); +#ifdef Py_GIL_DISABLED + // We must use a slower implementation on free-threaded builds. + if (n < 0) { + n += PyList_GET_SIZE(obj); + } + return list_pop_index(obj, n); +#else + return list_pop_impl((PyListObject *)obj, n); +#endif + } else { + PyErr_SetString(PyExc_OverflowError, CPYTHON_LARGE_INT_ERRMSG); + return NULL; + } +} + +CPyTagged CPyList_Count(PyObject *obj, PyObject *value) +{ + return list_count((PyListObject *)obj, value); +} + +int CPyList_Insert(PyObject *list, CPyTagged index, PyObject *value) +{ + if (CPyTagged_CheckShort(index)) { + Py_ssize_t n = CPyTagged_ShortAsSsize_t(index); + return PyList_Insert(list, n, value); + } + // The max range doesn't exactly coincide with ssize_t, but we still + // want to keep the error message compatible with CPython. + PyErr_SetString(PyExc_OverflowError, CPYTHON_LARGE_INT_ERRMSG); + return -1; +} + +PyObject *CPyList_Extend(PyObject *o1, PyObject *o2) { + if (PyList_Extend(o1, o2) < 0) { + return NULL; + } + Py_RETURN_NONE; +} + +// Return -2 or error, -1 if not found, or index of first match otherwise. +static Py_ssize_t _CPyList_Find(PyObject *list, PyObject *obj) { + Py_ssize_t i; + for (i = 0; i < Py_SIZE(list); i++) { + PyObject *item = PyList_GET_ITEM(list, i); + Py_INCREF(item); + int cmp = PyObject_RichCompareBool(item, obj, Py_EQ); + Py_DECREF(item); + if (cmp != 0) { + if (cmp > 0) { + return i; + } else { + return -2; + } + } + } + return -1; +} + +int CPyList_Remove(PyObject *list, PyObject *obj) { + Py_ssize_t index = _CPyList_Find(list, obj); + if (index == -2) { + return -1; + } + if (index == -1) { + PyErr_SetString(PyExc_ValueError, "list.remove(x): x not in list"); + return -1; + } + return PyList_SetSlice(list, index, index + 1, NULL); +} + +CPyTagged CPyList_Index(PyObject *list, PyObject *obj) { + Py_ssize_t index = _CPyList_Find(list, obj); + if (index == -2) { + return CPY_INT_TAG; + } + if (index == -1) { + PyErr_SetString(PyExc_ValueError, "value is not in list"); + return CPY_INT_TAG; + } + return index << 1; +} + +PyObject *CPySequence_Sort(PyObject *seq) { + PyObject *newlist = PySequence_List(seq); + if (newlist == NULL) + return NULL; + int res = PyList_Sort(newlist); + if (res < 0) { + Py_DECREF(newlist); + return NULL; + } + return newlist; +} + +PyObject *CPySequence_Multiply(PyObject *seq, CPyTagged t_size) { + Py_ssize_t size = CPyTagged_AsSsize_t(t_size); + if (size == -1 && PyErr_Occurred()) { + return NULL; + } + return PySequence_Repeat(seq, size); +} + +PyObject *CPySequence_RMultiply(CPyTagged t_size, PyObject *seq) { + return CPySequence_Multiply(seq, t_size); +} + +PyObject *CPySequence_InPlaceMultiply(PyObject *seq, CPyTagged t_size) { + Py_ssize_t size = CPyTagged_AsSsize_t(t_size); + if (size == -1 && PyErr_Occurred()) { + return NULL; + } + return PySequence_InPlaceRepeat(seq, size); +} + +PyObject *CPyList_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end) { + if (likely(PyList_CheckExact(obj) + && CPyTagged_CheckShort(start) && CPyTagged_CheckShort(end))) { + Py_ssize_t startn = CPyTagged_ShortAsSsize_t(start); + Py_ssize_t endn = CPyTagged_ShortAsSsize_t(end); + if (startn < 0) { + startn += PyList_GET_SIZE(obj); + } + if (endn < 0) { + endn += PyList_GET_SIZE(obj); + } + return PyList_GetSlice(obj, startn, endn); + } + return CPyObject_GetSlice(obj, start, end); +} + +int CPySequence_Check(PyObject *obj) { + return Py_TYPE(obj)->tp_flags & Py_TPFLAGS_SEQUENCE; +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/misc_ops.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/misc_ops.c new file mode 100644 index 0000000..8e5bfff --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/misc_ops.c @@ -0,0 +1,1142 @@ +#include "pythoncapi_compat.h" + +// Misc primitive operations + C helpers +// +// These are registered in mypyc.primitives.misc_ops. + +#include +#include +#include "CPy.h" + +PyObject *CPy_GetCoro(PyObject *obj) +{ + // If the type has an __await__ method, call it, + // otherwise, fallback to calling __iter__. + PyAsyncMethods* async_struct = Py_TYPE(obj)->tp_as_async; + if (async_struct != NULL && async_struct->am_await != NULL) { + return (async_struct->am_await)(obj); + } else { + // TODO: We should check that the type is a generator decorated with + // asyncio.coroutine + return PyObject_GetIter(obj); + } +} + +PyObject *CPyIter_Send(PyObject *iter, PyObject *val) +{ + // Do a send, or a next if second arg is None. + // (This behavior is to match the PEP 380 spec for yield from.) + if (Py_IsNone(val)) { + return CPyIter_Next(iter); + } else { + _Py_IDENTIFIER(send); + PyObject *name = _PyUnicode_FromId(&PyId_send); /* borrowed */ + if (name == NULL) { + return NULL; + } + return PyObject_CallMethodOneArg(iter, name, val); + } +} + +// A somewhat hairy implementation of specifically most of the error handling +// in `yield from` error handling. The point here is to reduce code size. +// +// This implements most of the bodies of the `except` blocks in the +// pseudocode in PEP 380. +// +// Returns true (1) if a StopIteration was received and we should return. +// Returns false (0) if a value should be yielded. +// In both cases the value is stored in outp. +// Signals an error (2) if the an exception should be propagated. +int CPy_YieldFromErrorHandle(PyObject *iter, PyObject **outp) +{ + _Py_IDENTIFIER(close); + _Py_IDENTIFIER(throw); + PyObject *exc_type = (PyObject *)Py_TYPE(CPy_ExcState()->exc_value); + PyObject *type, *value, *traceback; + PyObject *_m; + PyObject *res; + *outp = NULL; + + if (PyErr_GivenExceptionMatches(exc_type, PyExc_GeneratorExit)) { + _m = _PyObject_GetAttrId(iter, &PyId_close); + if (_m) { + res = PyObject_CallNoArgs(_m); + Py_DECREF(_m); + if (!res) + return 2; + Py_DECREF(res); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + return 2; + } + } else { + _m = _PyObject_GetAttrId(iter, &PyId_throw); + if (_m) { + _CPy_GetExcInfo(&type, &value, &traceback); + res = PyObject_CallFunctionObjArgs(_m, type, value, traceback, NULL); + Py_DECREF(type); + Py_DECREF(value); + Py_DECREF(traceback); + Py_DECREF(_m); + if (res) { + *outp = res; + return 0; + } else { + res = CPy_FetchStopIterationValue(); + if (res) { + *outp = res; + return 1; + } + } + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + return 2; + } + } + + CPy_Reraise(); + return 2; +} + +PyObject *CPy_FetchStopIterationValue(void) +{ + PyObject *val = NULL; + _PyGen_FetchStopIterationValue(&val); + return val; +} + +static bool _CPy_IsSafeMetaClass(PyTypeObject *metaclass) { + // mypyc classes can't work with metaclasses in + // general. Through some various nasty hacks we *do* + // manage to work with TypingMeta and its friends. + if (metaclass == &PyType_Type) + return true; + PyObject *module = PyObject_GetAttrString((PyObject *)metaclass, "__module__"); + if (!module) { + PyErr_Clear(); + return false; + } + + bool matches = false; + if (PyUnicode_CompareWithASCIIString(module, "typing") == 0 && + (strcmp(metaclass->tp_name, "TypingMeta") == 0 + || strcmp(metaclass->tp_name, "GenericMeta") == 0 + || strcmp(metaclass->tp_name, "_ProtocolMeta") == 0)) { + matches = true; + } else if (PyUnicode_CompareWithASCIIString(module, "typing_extensions") == 0 && + strcmp(metaclass->tp_name, "_ProtocolMeta") == 0) { + matches = true; + } else if (PyUnicode_CompareWithASCIIString(module, "abc") == 0 && + strcmp(metaclass->tp_name, "ABCMeta") == 0) { + matches = true; + } + Py_DECREF(module); + return matches; +} + +#if CPY_3_13_FEATURES + +// Adapted from CPython 3.13.0b3 +/* Determine the most derived metatype. */ +PyObject *CPy_CalculateMetaclass(PyObject *metatype, PyObject *bases) +{ + Py_ssize_t i, nbases; + PyTypeObject *winner; + PyObject *tmp; + PyTypeObject *tmptype; + + /* Determine the proper metatype to deal with this, + and check for metatype conflicts while we're at it. + Note that if some other metatype wins to contract, + it's possible that its instances are not types. */ + + nbases = PyTuple_GET_SIZE(bases); + winner = (PyTypeObject *)metatype; + for (i = 0; i < nbases; i++) { + tmp = PyTuple_GET_ITEM(bases, i); + tmptype = Py_TYPE(tmp); + if (PyType_IsSubtype(winner, tmptype)) + continue; + if (PyType_IsSubtype(tmptype, winner)) { + winner = tmptype; + continue; + } + /* else: */ + PyErr_SetString(PyExc_TypeError, + "metaclass conflict: " + "the metaclass of a derived class " + "must be a (non-strict) subclass " + "of the metaclasses of all its bases"); + return NULL; + } + return (PyObject *)winner; +} + +#else + +PyObject *CPy_CalculateMetaclass(PyObject *metatype, PyObject *bases) { + return (PyObject *)_PyType_CalculateMetaclass((PyTypeObject *)metatype, bases); +} + +#endif + +// Create a heap type based on a template non-heap type. +// This is super hacky and maybe we should suck it up and use PyType_FromSpec instead. +// We allow bases to be NULL to represent just inheriting from object. +// We don't support NULL bases and a non-type metaclass. +PyObject *CPyType_FromTemplate(PyObject *template, + PyObject *orig_bases, + PyObject *modname) { + PyTypeObject *template_ = (PyTypeObject *)template; + PyHeapTypeObject *t = NULL; + PyTypeObject *dummy_class = NULL; + PyObject *name = NULL; + PyObject *bases = NULL; + PyObject *slots; + + // If the type of the class (the metaclass) is NULL, we default it + // to being type. (This allows us to avoid needing to initialize + // it explicitly on windows.) + if (!Py_TYPE(template_)) { + Py_SET_TYPE(template_, &PyType_Type); + } + PyTypeObject *metaclass = Py_TYPE(template_); + + if (orig_bases) { + bases = update_bases(orig_bases); + // update_bases doesn't increment the refcount if nothing changes, + // so we do it to make sure we have distinct "references" to both + if (bases == orig_bases) + Py_INCREF(bases); + + // Find the appropriate metaclass from our base classes. We + // care about this because Generic uses a metaclass prior to + // Python 3.7. + metaclass = (PyTypeObject *)CPy_CalculateMetaclass((PyObject *)metaclass, bases); + if (!metaclass) + goto error; + + if (!_CPy_IsSafeMetaClass(metaclass)) { + PyErr_SetString(PyExc_TypeError, "mypyc classes can't have a metaclass"); + goto error; + } + } + + name = PyUnicode_FromString(template_->tp_name); + if (!name) + goto error; + + if (template_->tp_doc) { + // cpython expects tp_doc to be heap-allocated so convert it here to + // avoid segfaults on deallocation. + Py_ssize_t size = strlen(template_->tp_doc) + 1; + char *doc = (char *)PyMem_Malloc(size); + if (!doc) + goto error; + memcpy(doc, template_->tp_doc, size); + template_->tp_doc = doc; + } + + // Allocate the type and then copy the main stuff in. + t = (PyHeapTypeObject*)PyType_GenericAlloc(&PyType_Type, 0); + if (!t) + goto error; + memcpy((char *)t + sizeof(PyVarObject), + (char *)template_ + sizeof(PyVarObject), + sizeof(PyTypeObject) - sizeof(PyVarObject)); + + if (bases != orig_bases) { + if (PyObject_SetAttrString((PyObject *)t, "__orig_bases__", orig_bases) < 0) + goto error; + } + + // Having tp_base set is I think required for stuff to get + // inherited in PyType_Ready, which we needed for subclassing + // BaseException. XXX: Taking the first element is wrong I think though. + if (bases) { + t->ht_type.tp_base = (PyTypeObject *)PyTuple_GET_ITEM(bases, 0); + Py_INCREF((PyObject *)t->ht_type.tp_base); + } + + t->ht_name = name; + Py_INCREF(name); + t->ht_qualname = name; + t->ht_type.tp_bases = bases; + // references stolen so NULL these out + bases = name = NULL; + + if (PyType_Ready((PyTypeObject *)t) < 0) + goto error; + + assert(t->ht_type.tp_base != NULL); + + // XXX: This is a terrible hack to work around a cpython check on + // the mro. It was needed for mypy.stats. I need to investigate + // what is actually going on here. + Py_INCREF(metaclass); + Py_SET_TYPE(t, metaclass); + + if (dummy_class) { + if (PyDict_Merge(t->ht_type.tp_dict, dummy_class->tp_dict, 0) != 0) + goto error; + // This is the *really* tasteless bit. GenericMeta's __new__ + // in certain versions of typing sets _gorg to point back to + // the class. We need to override it to keep it from pointing + // to the proxy. + if (PyDict_SetItemString(t->ht_type.tp_dict, "_gorg", (PyObject *)t) < 0) + goto error; + } + + // Reject anything that would give us a nontrivial __slots__, + // because the layout will conflict + slots = PyObject_GetAttrString((PyObject *)t, "__slots__"); + if (slots) { + // don't fail on an empty __slots__ + int is_true = PyObject_IsTrue(slots); + Py_DECREF(slots); + if (is_true > 0) + PyErr_SetString(PyExc_TypeError, "mypyc classes can't have __slots__"); + if (is_true != 0) + goto error; + } else { + PyErr_Clear(); + } + + if (PyObject_SetAttrString((PyObject *)t, "__module__", modname) < 0) + goto error; + + if (init_subclass((PyTypeObject *)t, NULL)) + goto error; + + Py_XDECREF(dummy_class); + + // Unlike the tp_doc slots of most other object, a heap type's tp_doc + // must be heap allocated. + if (template_->tp_doc) { + // Silently truncate the docstring if it contains a null byte + Py_ssize_t size = strlen(template_->tp_doc) + 1; + char *tp_doc = (char *)PyMem_Malloc(size); + if (tp_doc == NULL) { + PyErr_NoMemory(); + goto error; + } + + memcpy(tp_doc, template_->tp_doc, size); + t->ht_type.tp_doc = tp_doc; + } + +#if PY_MINOR_VERSION == 11 + // This is a hack. Python 3.11 doesn't include good public APIs to work with managed + // dicts, which are the default for heap types. So we try to opt-out until Python 3.12. + t->ht_type.tp_flags &= ~Py_TPFLAGS_MANAGED_DICT; +#endif + return (PyObject *)t; + +error: + Py_XDECREF(t); + Py_XDECREF(bases); + Py_XDECREF(dummy_class); + Py_XDECREF(name); + return NULL; +} + +static int _CPy_UpdateObjFromDict(PyObject *obj, PyObject *dict) +{ + Py_ssize_t pos = 0; + PyObject *key, *value; + while (PyDict_Next(dict, &pos, &key, &value)) { + if (PyObject_SetAttr(obj, key, value) != 0) { + return -1; + } + } + return 0; +} + +/* Support for our partial built-in support for dataclasses. + * + * Take a class we want to make a dataclass, remove any descriptors + * for annotated attributes, swap in the actual values of the class + * variables invoke dataclass, and then restore all of the + * descriptors. + * + * The purpose of all this is that dataclasses uses the values of + * class variables to drive which attributes are required and what the + * default values/factories are for optional attributes. This means + * that the class dict needs to contain those values instead of getset + * descriptors for the attributes when we invoke dataclass. + * + * We need to remove descriptors for attributes even when there is no + * default value for them, or else dataclass will think the descriptor + * is the default value. We remove only the attributes, since we don't + * want dataclasses to try generating functions when they are already + * implemented. + * + * Args: + * dataclass_dec: The decorator to apply + * tp: The class we are making a dataclass + * dict: The dictionary containing values that dataclasses needs + * annotations: The type annotation dictionary + * dataclass_type: A str object with the return value of util.py:dataclass_type() + */ +int +CPyDataclass_SleightOfHand(PyObject *dataclass_dec, PyObject *tp, + PyObject *dict, PyObject *annotations, + PyObject *dataclass_type) { + PyTypeObject *ttp = (PyTypeObject *)tp; + Py_ssize_t pos; + PyObject *res = NULL; + + /* Make a copy of the original class __dict__ */ + PyObject *orig_dict = PyDict_Copy(ttp->tp_dict); + if (!orig_dict) { + goto fail; + } + + /* Delete anything that had an annotation */ + pos = 0; + PyObject *key; + while (PyDict_Next(annotations, &pos, &key, NULL)) { + // Check and delete key. Key may be absent from tp for InitVar variables. + if (PyObject_HasAttr(tp, key) == 1 && PyObject_DelAttr(tp, key) != 0) { + goto fail; + } + } + + /* Copy in all the attributes that we want dataclass to see */ + if (_CPy_UpdateObjFromDict(tp, dict) != 0) { + goto fail; + } + + /* Run the @dataclass descriptor */ + res = PyObject_CallOneArg(dataclass_dec, tp); + if (!res) { + goto fail; + } + const char *dataclass_type_ptr = PyUnicode_AsUTF8(dataclass_type); + if (dataclass_type_ptr == NULL) { + goto fail; + } + if (strcmp(dataclass_type_ptr, "attr") == 0 || + strcmp(dataclass_type_ptr, "attr-auto") == 0) { + // These attributes are added or modified by @attr.s(slots=True). + const char * const keys[] = {"__attrs_attrs__", "__attrs_own_setattr__", "__init__", ""}; + for (const char * const *key_iter = keys; **key_iter != '\0'; key_iter++) { + PyObject *value = NULL; + int rv = PyObject_GetOptionalAttrString(res, *key_iter, &value); + if (rv == 1) { + PyObject_SetAttrString(tp, *key_iter, value); + Py_DECREF(value); + } else if (rv == -1) { + goto fail; + } + } + } + + /* Copy back the original contents of the dict */ + if (_CPy_UpdateObjFromDict(tp, orig_dict) != 0) { + goto fail; + } + + Py_DECREF(res); + Py_DECREF(orig_dict); + return 1; + +fail: + Py_XDECREF(res); + Py_XDECREF(orig_dict); + return 0; +} + +// Support for pickling; reusable getstate and setstate functions +PyObject * +CPyPickle_SetState(PyObject *obj, PyObject *state) +{ + if (_CPy_UpdateObjFromDict(obj, state) != 0) { + return NULL; + } + Py_RETURN_NONE; +} + +PyObject * +CPyPickle_GetState(PyObject *obj) +{ + PyObject *attrs = NULL, *state = NULL; + + attrs = PyObject_GetAttrString((PyObject *)Py_TYPE(obj), "__mypyc_attrs__"); + if (!attrs) { + goto fail; + } + if (!PyTuple_Check(attrs)) { + PyErr_SetString(PyExc_TypeError, "__mypyc_attrs__ is not a tuple"); + goto fail; + } + state = PyDict_New(); + if (!state) { + goto fail; + } + + // Collect all the values of attributes in __mypyc_attrs__ + // Attributes that are missing we just ignore + int i; + for (i = 0; i < PyTuple_GET_SIZE(attrs); i++) { + PyObject *key = PyTuple_GET_ITEM(attrs, i); + PyObject *value = PyObject_GetAttr(obj, key); + if (!value) { + if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + continue; + } + goto fail; + } + int result = PyDict_SetItem(state, key, value); + Py_DECREF(value); + if (result != 0) { + goto fail; + } + } + + Py_DECREF(attrs); + + return state; +fail: + Py_XDECREF(attrs); + Py_XDECREF(state); + return NULL; +} + +CPyTagged CPyTagged_Id(PyObject *o) { + return CPyTagged_FromVoidPtr(o); +} + +#define MAX_INT_CHARS 22 +#define _PyUnicode_LENGTH(op) \ + (((PyASCIIObject *)(op))->length) + +// using snprintf or PyUnicode_FromFormat was way slower than +// boxing the int and calling PyObject_Str on it, so we implement our own +static int fmt_ssize_t(char *out, Py_ssize_t n) { + bool neg = n < 0; + if (neg) n = -n; + + // buf gets filled backward and then we copy it forward + char buf[MAX_INT_CHARS]; + int i = 0; + do { + buf[i] = (n % 10) + '0'; + n /= 10; + i++; + } while (n); + + + int len = i; + int j = 0; + if (neg) { + out[j++] = '-'; + len++; + } + + for (; j < len; j++, i--) { + out[j] = buf[i-1]; + } + out[j] = '\0'; + + return len; +} + +static PyObject *CPyTagged_ShortToStr(Py_ssize_t n) { + PyObject *obj = PyUnicode_New(MAX_INT_CHARS, 127); + if (!obj) return NULL; + int len = fmt_ssize_t((char *)PyUnicode_1BYTE_DATA(obj), n); + _PyUnicode_LENGTH(obj) = len; + return obj; +} + +PyObject *CPyTagged_Str(CPyTagged n) { + if (CPyTagged_CheckShort(n)) { + return CPyTagged_ShortToStr(CPyTagged_ShortAsSsize_t(n)); + } else { + return PyObject_Str(CPyTagged_AsObject(n)); + } +} + +void CPyDebug_Print(const char *msg) { + printf("%s\n", msg); + fflush(stdout); +} + +void CPyDebug_PrintObject(PyObject *obj) { + // Printing can cause errors. We don't want this to affect any existing + // state so we'll save any existing error and restore it at the end. + PyObject *exc_type, *exc_value, *exc_traceback; + PyErr_Fetch(&exc_type, &exc_value, &exc_traceback); + + if (PyObject_Print(obj, stderr, 0) == -1) { + PyErr_Print(); + } else { + fprintf(stderr, "\n"); + } + fflush(stderr); + + PyErr_Restore(exc_type, exc_value, exc_traceback); +} + +int CPySequence_CheckUnpackCount(PyObject *sequence, Py_ssize_t expected) { + Py_ssize_t actual = Py_SIZE(sequence); + if (unlikely(actual != expected)) { + if (actual < expected) { + PyErr_Format(PyExc_ValueError, "not enough values to unpack (expected %zd, got %zd)", + expected, actual); + } else { + PyErr_Format(PyExc_ValueError, "too many values to unpack (expected %zd)", expected); + } + return -1; + } + return 0; +} + +// Parse an integer (size_t) encoded as a variable-length binary sequence. +static const char *parse_int(const char *s, size_t *len) { + Py_ssize_t n = 0; + while ((unsigned char)*s >= 0x80) { + n = (n << 7) + (*s & 0x7f); + s++; + } + n = (n << 7) | *s++; + *len = n; + return s; +} + +// Initialize static constant array of literal values +int CPyStatics_Initialize(PyObject **statics, + const char * const *strings, + const char * const *bytestrings, + const char * const *ints, + const double *floats, + const double *complex_numbers, + const int *tuples, + const int *frozensets) { + PyObject **result = statics; + // Start with some hard-coded values + *result++ = Py_None; + Py_INCREF(Py_None); + *result++ = Py_False; + Py_INCREF(Py_False); + *result++ = Py_True; + Py_INCREF(Py_True); + if (strings) { + for (; **strings != '\0'; strings++) { + size_t num; + const char *data = *strings; + data = parse_int(data, &num); + while (num-- > 0) { + size_t len; + data = parse_int(data, &len); + PyObject *obj = PyUnicode_DecodeUTF8(data, len, "surrogatepass"); + if (obj == NULL) { + return -1; + } + PyUnicode_InternInPlace(&obj); + *result++ = obj; + data += len; + } + } + } + if (bytestrings) { + for (; **bytestrings != '\0'; bytestrings++) { + size_t num; + const char *data = *bytestrings; + data = parse_int(data, &num); + while (num-- > 0) { + size_t len; + data = parse_int(data, &len); + PyObject *obj = PyBytes_FromStringAndSize(data, len); + if (obj == NULL) { + return -1; + } + *result++ = obj; + data += len; + } + } + } + if (ints) { + for (; **ints != '\0'; ints++) { + size_t num; + const char *data = *ints; + data = parse_int(data, &num); + while (num-- > 0) { + char *end; + PyObject *obj = PyLong_FromString(data, &end, 10); + if (obj == NULL) { + return -1; + } + data = end; + data++; + *result++ = obj; + } + } + } + if (floats) { + size_t num = (size_t)*floats++; + while (num-- > 0) { + PyObject *obj = PyFloat_FromDouble(*floats++); + if (obj == NULL) { + return -1; + } + *result++ = obj; + } + } + if (complex_numbers) { + size_t num = (size_t)*complex_numbers++; + while (num-- > 0) { + double real = *complex_numbers++; + double imag = *complex_numbers++; + PyObject *obj = PyComplex_FromDoubles(real, imag); + if (obj == NULL) { + return -1; + } + *result++ = obj; + } + } + if (tuples) { + int num = *tuples++; + while (num-- > 0) { + int num_items = *tuples++; + PyObject *obj = PyTuple_New(num_items); + if (obj == NULL) { + return -1; + } + int i; + for (i = 0; i < num_items; i++) { + PyObject *item = statics[*tuples++]; + Py_INCREF(item); + PyTuple_SET_ITEM(obj, i, item); + } + *result++ = obj; + } + } + if (frozensets) { + int num = *frozensets++; + while (num-- > 0) { + int num_items = *frozensets++; + PyObject *obj = PyFrozenSet_New(NULL); + if (obj == NULL) { + return -1; + } + for (int i = 0; i < num_items; i++) { + PyObject *item = statics[*frozensets++]; + Py_INCREF(item); + if (PySet_Add(obj, item) == -1) { + return -1; + } + } + *result++ = obj; + } + } + return 0; +} + +// Call super(type(self), self) +PyObject * +CPy_Super(PyObject *builtins, PyObject *self) { + PyObject *super_type = PyObject_GetAttrString(builtins, "super"); + if (!super_type) + return NULL; + PyObject *result = PyObject_CallFunctionObjArgs( + super_type, (PyObject*)Py_TYPE(self), self, NULL); + Py_DECREF(super_type); + return result; +} + +static bool import_single(PyObject *mod_id, PyObject **mod_static, + PyObject *globals_id, PyObject *globals_name, PyObject *globals) { + if (Py_IsNone(*mod_static)) { + CPyModule *mod = PyImport_Import(mod_id); + if (mod == NULL) { + return false; + } + *mod_static = mod; + } + + PyObject *mod_dict = PyImport_GetModuleDict(); + CPyModule *globals_mod = CPyDict_GetItem(mod_dict, globals_id); + if (globals_mod == NULL) { + return false; + } + int ret = CPyDict_SetItem(globals, globals_name, globals_mod); + Py_DECREF(globals_mod); + if (ret < 0) { + return false; + } + + return true; +} + +// Table-driven import helper. See transform_import() in irbuild for the details. +bool CPyImport_ImportMany(PyObject *modules, CPyModule **statics[], PyObject *globals, + PyObject *tb_path, PyObject *tb_function, Py_ssize_t *tb_lines) { + for (Py_ssize_t i = 0; i < PyTuple_GET_SIZE(modules); i++) { + PyObject *module = PyTuple_GET_ITEM(modules, i); + PyObject *mod_id = PyTuple_GET_ITEM(module, 0); + PyObject *globals_id = PyTuple_GET_ITEM(module, 1); + PyObject *globals_name = PyTuple_GET_ITEM(module, 2); + + if (!import_single(mod_id, statics[i], globals_id, globals_name, globals)) { + assert(PyErr_Occurred() && "error indicator should be set on bad import!"); + PyObject *typ, *val, *tb; + PyErr_Fetch(&typ, &val, &tb); + const char *path = PyUnicode_AsUTF8(tb_path); + if (path == NULL) { + path = ""; + } + const char *function = PyUnicode_AsUTF8(tb_function); + if (function == NULL) { + function = ""; + } + PyErr_Restore(typ, val, tb); + CPy_AddTraceback(path, function, tb_lines[i], globals); + return false; + } + } + return true; +} + +// This helper function is a simplification of cpython/ceval.c/import_from() +static PyObject *CPyImport_ImportFrom(PyObject *module, PyObject *package_name, + PyObject *import_name, PyObject *as_name) { + // check if the imported module has an attribute by that name + PyObject *x = PyObject_GetAttr(module, import_name); + if (x == NULL) { + // if not, attempt to import a submodule with that name + PyObject *fullmodname = PyUnicode_FromFormat("%U.%U", package_name, import_name); + if (fullmodname == NULL) { + goto fail; + } + + // The following code is a simplification of cpython/import.c/PyImport_GetModule() + x = PyObject_GetItem(module, fullmodname); + Py_DECREF(fullmodname); + if (x == NULL) { + goto fail; + } + } + return x; + +fail: + PyErr_Clear(); + PyObject *package_path = PyModule_GetFilenameObject(module); + PyObject *errmsg = PyUnicode_FromFormat("cannot import name %R from %R (%S)", + import_name, package_name, package_path); + // NULL checks for errmsg and package_name done by PyErr_SetImportError. + PyErr_SetImportError(errmsg, package_name, package_path); + Py_DECREF(package_path); + Py_DECREF(errmsg); + return NULL; +} + +PyObject *CPyImport_ImportFromMany(PyObject *mod_id, PyObject *names, PyObject *as_names, + PyObject *globals) { + PyObject *mod = PyImport_ImportModuleLevelObject(mod_id, globals, 0, names, 0); + if (mod == NULL) { + return NULL; + } + + for (Py_ssize_t i = 0; i < PyTuple_GET_SIZE(names); i++) { + PyObject *name = PyTuple_GET_ITEM(names, i); + PyObject *as_name = PyTuple_GET_ITEM(as_names, i); + PyObject *obj = CPyImport_ImportFrom(mod, mod_id, name, as_name); + if (obj == NULL) { + Py_DECREF(mod); + return NULL; + } + int ret = CPyDict_SetItem(globals, as_name, obj); + Py_DECREF(obj); + if (ret < 0) { + Py_DECREF(mod); + return NULL; + } + } + return mod; +} + +// From CPython +static PyObject * +CPy_BinopTypeError(PyObject *left, PyObject *right, const char *op) { + PyErr_Format(PyExc_TypeError, + "unsupported operand type(s) for %.100s: " + "'%.100s' and '%.100s'", + op, + Py_TYPE(left)->tp_name, + Py_TYPE(right)->tp_name); + return NULL; +} + +PyObject * +CPy_CallReverseOpMethod(PyObject *left, + PyObject *right, + const char *op, + _Py_Identifier *method) { + // Look up reverse method + PyObject *m = _PyObject_GetAttrId(right, method); + if (m == NULL) { + // If reverse method not defined, generate TypeError instead AttributeError + if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + CPy_BinopTypeError(left, right, op); + } + return NULL; + } + // Call reverse method + PyObject *result = PyObject_CallOneArg(m, left); + Py_DECREF(m); + return result; +} + +PyObject *CPySingledispatch_RegisterFunction(PyObject *singledispatch_func, + PyObject *cls, + PyObject *func) { + PyObject *registry = PyObject_GetAttrString(singledispatch_func, "registry"); + PyObject *register_func = NULL; + PyObject *typing = NULL; + PyObject *get_type_hints = NULL; + PyObject *type_hints = NULL; + + if (registry == NULL) goto fail; + if (func == NULL) { + // one argument case + if (PyType_Check(cls)) { + // passed a class + // bind cls to the first argument so that register gets called again with both the + // class and the function + register_func = PyObject_GetAttrString(singledispatch_func, "register"); + if (register_func == NULL) goto fail; + return PyMethod_New(register_func, cls); + } + // passed a function + PyObject *annotations = PyFunction_GetAnnotations(cls); + const char *invalid_first_arg_msg = + "Invalid first argument to `register()`: %R. " + "Use either `@register(some_class)` or plain `@register` " + "on an annotated function."; + + if (annotations == NULL) { + PyErr_Format(PyExc_TypeError, invalid_first_arg_msg, cls); + goto fail; + } + + Py_INCREF(annotations); + + func = cls; + typing = PyImport_ImportModule("typing"); + if (typing == NULL) goto fail; + get_type_hints = PyObject_GetAttrString(typing, "get_type_hints"); + + type_hints = PyObject_CallOneArg(get_type_hints, func); + PyObject *argname; + Py_ssize_t pos = 0; + if (!PyDict_Next(type_hints, &pos, &argname, &cls)) { + // the functools implementation raises the same type error if annotations is an empty dict + PyErr_Format(PyExc_TypeError, invalid_first_arg_msg, cls); + goto fail; + } + if (!PyType_Check(cls)) { + const char *invalid_annotation_msg = "Invalid annotation for %R. %R is not a class."; + PyErr_Format(PyExc_TypeError, invalid_annotation_msg, argname, cls); + goto fail; + } + } + if (PyDict_SetItem(registry, cls, func) == -1) { + goto fail; + } + + // clear the cache so we consider the newly added function when dispatching + PyObject *dispatch_cache = PyObject_GetAttrString(singledispatch_func, "dispatch_cache"); + if (dispatch_cache == NULL) goto fail; + PyDict_Clear(dispatch_cache); + + Py_INCREF(func); + return func; + +fail: + Py_XDECREF(registry); + Py_XDECREF(register_func); + Py_XDECREF(typing); + Py_XDECREF(get_type_hints); + Py_XDECREF(type_hints); + return NULL; + +} + +// Adapted from ceval.c GET_AITER +PyObject *CPy_GetAIter(PyObject *obj) +{ + unaryfunc getter = NULL; + PyTypeObject *type = Py_TYPE(obj); + + if (type->tp_as_async != NULL) { + getter = type->tp_as_async->am_aiter; + } + + if (getter == NULL) { + PyErr_Format(PyExc_TypeError, + "'async for' requires an object with " + "__aiter__ method, got %.100s", + type->tp_name); + Py_DECREF(obj); + return NULL; + } + + PyObject *iter = (*getter)(obj); + if (!iter) { + return NULL; + } + + if (Py_TYPE(iter)->tp_as_async == NULL || + Py_TYPE(iter)->tp_as_async->am_anext == NULL) { + + PyErr_Format(PyExc_TypeError, + "'async for' received an object from __aiter__ " + "that does not implement __anext__: %.100s", + Py_TYPE(iter)->tp_name); + Py_DECREF(iter); + return NULL; + } + + return iter; +} + +// Adapted from ceval.c GET_ANEXT +PyObject *CPy_GetANext(PyObject *aiter) +{ + unaryfunc getter = NULL; + PyObject *next_iter = NULL; + PyObject *awaitable = NULL; + PyTypeObject *type = Py_TYPE(aiter); + + if (PyAsyncGen_CheckExact(aiter)) { + awaitable = type->tp_as_async->am_anext(aiter); + if (awaitable == NULL) { + goto error; + } + } else { + if (type->tp_as_async != NULL){ + getter = type->tp_as_async->am_anext; + } + + if (getter != NULL) { + next_iter = (*getter)(aiter); + if (next_iter == NULL) { + goto error; + } + } + else { + PyErr_Format(PyExc_TypeError, + "'async for' requires an iterator with " + "__anext__ method, got %.100s", + type->tp_name); + goto error; + } + + awaitable = CPyCoro_GetAwaitableIter(next_iter); + if (awaitable == NULL) { + _PyErr_FormatFromCause( + PyExc_TypeError, + "'async for' received an invalid object " + "from __anext__: %.100s", + Py_TYPE(next_iter)->tp_name); + + Py_DECREF(next_iter); + goto error; + } else { + Py_DECREF(next_iter); + } + } + + return awaitable; +error: + return NULL; +} + +#if CPY_3_11_FEATURES + +// Return obj.__name__ (specialized to type objects, which are the most common target). +PyObject *CPy_GetName(PyObject *obj) { + if (PyType_Check(obj)) { + return PyType_GetName((PyTypeObject *)obj); + } + _Py_IDENTIFIER(__name__); + PyObject *name = _PyUnicode_FromId(&PyId___name__); /* borrowed */ + return PyObject_GetAttr(obj, name); +} + +#endif + +#ifdef MYPYC_LOG_TRACE + +// This is only compiled in if trace logging is enabled by user + +static int TraceCounter = 0; +static const int TRACE_EVERY_NTH = 1009; // Should be a prime number +#define TRACE_LOG_FILE_NAME "mypyc_trace.txt" +static FILE *TraceLogFile = NULL; + +// Log a tracing event on every Nth call +void CPyTrace_LogEvent(const char *location, const char *line, const char *op, const char *details) { + if (TraceLogFile == NULL) { + if ((TraceLogFile = fopen(TRACE_LOG_FILE_NAME, "w")) == NULL) { + fprintf(stderr, "error: Could not open trace file %s\n", TRACE_LOG_FILE_NAME); + abort(); + } + } + if (TraceCounter == 0) { + fprintf(TraceLogFile, "%s:%s:%s:%s\n", location, line, op, details); + } + TraceCounter++; + if (TraceCounter == TRACE_EVERY_NTH) { + TraceCounter = 0; + } +} + +#endif + +#if CPY_3_12_FEATURES + +// Copied from Python 3.12.3, since this struct is internal to CPython. It defines +// the structure of typing.TypeAliasType objects. We need it since compute_value is +// not part of the public API, and we need to set it to match Python runtime semantics. +// +// IMPORTANT: This needs to be kept in sync with CPython! +typedef struct { + PyObject_HEAD + PyObject *name; + PyObject *type_params; + PyObject *compute_value; + PyObject *value; + PyObject *module; +} typealiasobject; + +void CPy_SetTypeAliasTypeComputeFunction(PyObject *alias, PyObject *compute_value) { + typealiasobject *obj = (typealiasobject *)alias; + if (obj->value != NULL) { + Py_DECREF(obj->value); + } + obj->value = NULL; + Py_INCREF(compute_value); + if (obj->compute_value != NULL) { + Py_DECREF(obj->compute_value); + } + obj->compute_value = compute_value; +} + +#endif + +#if CPY_3_14_FEATURES + +#include "internal/pycore_object.h" + +void CPy_SetImmortal(PyObject *obj) { + _Py_SetImmortal(obj); +} + +#endif diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/module_shim.tmpl b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/module_shim.tmpl new file mode 100644 index 0000000..28cce94 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/module_shim.tmpl @@ -0,0 +1,21 @@ +#include + +PyMODINIT_FUNC +PyInit_{modname}(void) +{{ + PyObject *tmp; + if (!(tmp = PyImport_ImportModule("{libname}"))) return NULL; + PyObject *capsule = PyObject_GetAttrString(tmp, "init_{full_modname}"); + Py_DECREF(tmp); + if (capsule == NULL) return NULL; + void *init_func = PyCapsule_GetPointer(capsule, "{libname}.init_{full_modname}"); + Py_DECREF(capsule); + if (!init_func) {{ + return NULL; + }} + return ((PyObject *(*)(void))init_func)(); +}} + +// distutils sometimes spuriously tells cl to export CPyInit___init__, +// so provide that so it chills out +PyMODINIT_FUNC PyInit___init__(void) {{ return PyInit_{modname}(); }} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/module_shim_no_gil_multiphase.tmpl b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/module_shim_no_gil_multiphase.tmpl new file mode 100644 index 0000000..b9bfe9c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/module_shim_no_gil_multiphase.tmpl @@ -0,0 +1,41 @@ +#include + +static int {modname}_exec(PyObject *module) +{{ + PyObject *tmp; + if (!(tmp = PyImport_ImportModule("{libname}"))) return -1; + PyObject *capsule = PyObject_GetAttrString(tmp, "exec_{full_modname}"); + Py_DECREF(tmp); + if (capsule == NULL) return -1; + void *exec_func = PyCapsule_GetPointer(capsule, "{libname}.exec_{full_modname}"); + Py_DECREF(capsule); + if (!exec_func) return -1; + if (((int (*)(PyObject *))exec_func)(module) != 0) return -1; + return 0; +}} + +static PyModuleDef_Slot {modname}_slots[] = {{ + {{Py_mod_exec, {modname}_exec}}, + {{Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED}}, + {{Py_mod_gil, Py_MOD_GIL_NOT_USED}}, + {{0, NULL}}, +}}; + +static struct PyModuleDef {modname}_module = {{ + PyModuleDef_HEAD_INIT, + .m_name = "{modname}", + .m_doc = NULL, + .m_methods = NULL, + .m_size = 0, + .m_slots = {modname}_slots, +}}; + +PyMODINIT_FUNC +PyInit_{modname}(void) +{{ + return PyModuleDef_Init(&{modname}_module); +}} + +// distutils sometimes spuriously tells cl to export CPyInit___init__, +// so provide that so it chills out +PyMODINIT_FUNC PyInit___init__(void) {{ return PyInit_{modname}(); }} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/mypyc_util.h b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/mypyc_util.h new file mode 100644 index 0000000..4168d3c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/mypyc_util.h @@ -0,0 +1,182 @@ +#ifndef MYPYC_UTIL_H +#define MYPYC_UTIL_H + +#include +#include +#include + +#if defined(__clang__) || defined(__GNUC__) +#define likely(x) __builtin_expect((x),1) +#define unlikely(x) __builtin_expect((x),0) +#define CPy_Unreachable() __builtin_unreachable() +#else +#define likely(x) (x) +#define unlikely(x) (x) +#define CPy_Unreachable() abort() +#endif + +#if defined(__clang__) || defined(__GNUC__) +#define CPy_NOINLINE __attribute__((noinline)) +#elif defined(_MSC_VER) +#define CPy_NOINLINE __declspec(noinline) +#else +#define CPy_NOINLINE +#endif + +#ifndef Py_GIL_DISABLED + +// Everything is running in the same thread, so no need for thread locals +#define CPyThreadLocal + +#else + +// 1. Use C11 standard thread_local storage, if available +#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201112L && !defined(__STDC_NO_THREADS__) +#define CPyThreadLocal _Thread_local + +// 2. Microsoft Visual Studio fallback +#elif defined(_MSC_VER) +#define CPyThreadLocal __declspec(thread) + +// 3. GNU thread local storage for GCC/Clang targets that still need it +#elif defined(__GNUC__) || defined(__clang__) +#define CPyThreadLocal __thread + +#else +#error "Can't define CPyThreadLocal for this compiler/target (consider using a non-free-threaded Python build)" +#endif + +#endif // Py_GIL_DISABLED + +// INCREF and DECREF that assert the pointer is not NULL. +// asserts are disabled in release builds so there shouldn't be a perf hit. +// I'm honestly kind of surprised that this isn't done by default. +#define CPy_INCREF(p) do { assert(p); Py_INCREF(p); } while (0) +#define CPy_DECREF(p) do { assert(p); Py_DECREF(p); } while (0) +// Here just for consistency +#define CPy_XDECREF(p) Py_XDECREF(p) + +#ifndef Py_GIL_DISABLED + +// The *_NO_IMM operations below perform refcount manipulation for +// non-immortal objects (Python 3.12 and later). +// +// Py_INCREF and other CPython operations check for immortality. This +// can be expensive when we know that an object cannot be immortal. +// +// This optimization cannot be performed in free-threaded mode so we +// fall back to just calling the normal incref/decref operations. + +static inline void CPy_INCREF_NO_IMM(PyObject *op) +{ + op->ob_refcnt++; +} + +static inline void CPy_DECREF_NO_IMM(PyObject *op) +{ + if (--op->ob_refcnt == 0) { + _Py_Dealloc(op); + } +} + +static inline void CPy_XDECREF_NO_IMM(PyObject *op) +{ + if (op != NULL && --op->ob_refcnt == 0) { + _Py_Dealloc(op); + } +} + +#define CPy_INCREF_NO_IMM(op) CPy_INCREF_NO_IMM((PyObject *)(op)) +#define CPy_DECREF_NO_IMM(op) CPy_DECREF_NO_IMM((PyObject *)(op)) +#define CPy_XDECREF_NO_IMM(op) CPy_XDECREF_NO_IMM((PyObject *)(op)) + +#else + +#define CPy_INCREF_NO_IMM(op) CPy_INCREF(op) +#define CPy_DECREF_NO_IMM(op) CPy_DECREF(op) +#define CPy_XDECREF_NO_IMM(op) CPy_XDECREF(op) + +#endif + +// Tagged integer -- our representation of Python 'int' objects. +// Small enough integers are represented as unboxed integers (shifted +// left by 1); larger integers (larger than 63 bits on a 64-bit +// platform) are stored as a tagged pointer (PyObject *) +// representing a Python int object, with the lowest bit set. +// Tagged integers are always normalized. A small integer *must not* +// have the tag bit set. +typedef size_t CPyTagged; + +typedef size_t CPyPtr; + +#define CPY_INT_BITS (CHAR_BIT * sizeof(CPyTagged)) + +#define CPY_TAGGED_MAX (((Py_ssize_t)1 << (CPY_INT_BITS - 2)) - 1) +#define CPY_TAGGED_MIN (-((Py_ssize_t)1 << (CPY_INT_BITS - 2))) +#define CPY_TAGGED_ABS_MIN (0-(size_t)CPY_TAGGED_MIN) + +typedef PyObject CPyModule; + +// Tag bit used for long integers +#define CPY_INT_TAG 1 + +// Error value for signed fixed-width (low-level) integers +#define CPY_LL_INT_ERROR -113 + +// Error value for unsigned fixed-width (low-level) integers +#define CPY_LL_UINT_ERROR 239 + +// Error value for floats +#define CPY_FLOAT_ERROR -113.0 + +typedef void (*CPyVTableItem)(void); + +static inline CPyTagged CPyTagged_ShortFromInt(int x) { + return x << 1; +} + +static inline CPyTagged CPyTagged_ShortFromSsize_t(Py_ssize_t x) { + return x << 1; +} + +// Are we targeting Python 3.X or newer? +#define CPY_3_11_FEATURES (PY_VERSION_HEX >= 0x030b0000) +#define CPY_3_12_FEATURES (PY_VERSION_HEX >= 0x030c0000) +#define CPY_3_14_FEATURES (PY_VERSION_HEX >= 0x030e0000) + +#if CPY_3_12_FEATURES + +// Same as macros in CPython internal/pycore_long.h, but with a CPY_ prefix +#define CPY_NON_SIZE_BITS 3 +#define CPY_SIGN_ZERO 1 +#define CPY_SIGN_NEGATIVE 2 +#define CPY_SIGN_MASK 3 + +#define CPY_LONG_DIGIT(o, n) ((o)->long_value.ob_digit[n]) + +// Only available on Python 3.12 and later +#define CPY_LONG_TAG(o) ((o)->long_value.lv_tag) +#define CPY_LONG_IS_NEGATIVE(o) (((o)->long_value.lv_tag & CPY_SIGN_MASK) == CPY_SIGN_NEGATIVE) +// Only available on Python 3.12 and later +#define CPY_LONG_SIZE(o) ((o)->long_value.lv_tag >> CPY_NON_SIZE_BITS) +// Number of digits; negative for negative ints +#define CPY_LONG_SIZE_SIGNED(o) (CPY_LONG_IS_NEGATIVE(o) ? -CPY_LONG_SIZE(o) : CPY_LONG_SIZE(o)) +// Number of digits, assuming int is non-negative +#define CPY_LONG_SIZE_UNSIGNED(o) CPY_LONG_SIZE(o) + +#else + +#define CPY_LONG_DIGIT(o, n) ((o)->ob_digit[n]) +#define CPY_LONG_IS_NEGATIVE(o) (((o)->ob_base.ob_size < 0) +#define CPY_LONG_SIZE_SIGNED(o) ((o)->ob_base.ob_size) +#define CPY_LONG_SIZE_UNSIGNED(o) ((o)->ob_base.ob_size) + +#endif + +// Are we targeting Python 3.13 or newer? +#define CPY_3_13_FEATURES (PY_VERSION_HEX >= 0x030d0000) + +// Are we targeting Python 3.14 or newer? +#define CPY_3_14_FEATURES (PY_VERSION_HEX >= 0x030e0000) + +#endif diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/pythoncapi_compat.h b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/pythoncapi_compat.h new file mode 100644 index 0000000..b16075f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/pythoncapi_compat.h @@ -0,0 +1,2594 @@ +// Header file providing new C API functions to old Python versions. +// +// File distributed under the Zero Clause BSD (0BSD) license. +// Copyright Contributors to the pythoncapi_compat project. +// +// Homepage: +// https://github.com/python/pythoncapi_compat +// +// Latest version: +// https://raw.githubusercontent.com/python/pythoncapi-compat/main/pythoncapi_compat.h +// +// SPDX-License-Identifier: 0BSD + +#ifndef PYTHONCAPI_COMPAT +#define PYTHONCAPI_COMPAT + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include // offsetof() + +// Python 3.11.0b4 added PyFrame_Back() to Python.h +#if PY_VERSION_HEX < 0x030b00B4 && !defined(PYPY_VERSION) +# include "frameobject.h" // PyFrameObject, PyFrame_GetBack() +#endif +#if PY_VERSION_HEX < 0x030C00A3 +# include // T_SHORT, READONLY +#endif + + +#ifndef _Py_CAST +# define _Py_CAST(type, expr) ((type)(expr)) +#endif + +// Static inline functions should use _Py_NULL rather than using directly NULL +// to prevent C++ compiler warnings. On C23 and newer and on C++11 and newer, +// _Py_NULL is defined as nullptr. +#ifndef _Py_NULL +# if (defined (__STDC_VERSION__) && __STDC_VERSION__ > 201710L) \ + || (defined(__cplusplus) && __cplusplus >= 201103) +# define _Py_NULL nullptr +# else +# define _Py_NULL NULL +# endif +#endif + +// Cast argument to PyObject* type. +#ifndef _PyObject_CAST +# define _PyObject_CAST(op) _Py_CAST(PyObject*, op) +#endif + +#ifndef Py_BUILD_ASSERT +# define Py_BUILD_ASSERT(cond) \ + do { \ + (void)sizeof(char [1 - 2 * !(cond)]); \ + } while(0) +#endif + + +// bpo-42262 added Py_NewRef() to Python 3.10.0a3 +#if PY_VERSION_HEX < 0x030A00A3 && !defined(Py_NewRef) +static inline PyObject* _Py_NewRef(PyObject *obj) +{ + Py_INCREF(obj); + return obj; +} +#define Py_NewRef(obj) _Py_NewRef(_PyObject_CAST(obj)) +#endif + + +// bpo-42262 added Py_XNewRef() to Python 3.10.0a3 +#if PY_VERSION_HEX < 0x030A00A3 && !defined(Py_XNewRef) +static inline PyObject* _Py_XNewRef(PyObject *obj) +{ + Py_XINCREF(obj); + return obj; +} +#define Py_XNewRef(obj) _Py_XNewRef(_PyObject_CAST(obj)) +#endif + + +// bpo-39573 added Py_SET_REFCNT() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_REFCNT) +static inline void _Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) +{ + ob->ob_refcnt = refcnt; +} +#define Py_SET_REFCNT(ob, refcnt) _Py_SET_REFCNT(_PyObject_CAST(ob), refcnt) +#endif + + +// Py_SETREF() and Py_XSETREF() were added to Python 3.5.2. +// It is excluded from the limited C API. +#if (PY_VERSION_HEX < 0x03050200 && !defined(Py_SETREF)) && !defined(Py_LIMITED_API) +#define Py_SETREF(dst, src) \ + do { \ + PyObject **_tmp_dst_ptr = _Py_CAST(PyObject**, &(dst)); \ + PyObject *_tmp_dst = (*_tmp_dst_ptr); \ + *_tmp_dst_ptr = _PyObject_CAST(src); \ + Py_DECREF(_tmp_dst); \ + } while (0) + +#define Py_XSETREF(dst, src) \ + do { \ + PyObject **_tmp_dst_ptr = _Py_CAST(PyObject**, &(dst)); \ + PyObject *_tmp_dst = (*_tmp_dst_ptr); \ + *_tmp_dst_ptr = _PyObject_CAST(src); \ + Py_XDECREF(_tmp_dst); \ + } while (0) +#endif + + +// bpo-43753 added Py_Is(), Py_IsNone(), Py_IsTrue() and Py_IsFalse() +// to Python 3.10.0b1. +#if PY_VERSION_HEX < 0x030A00B1 && !defined(Py_Is) +# define Py_Is(x, y) ((x) == (y)) +#endif +#if PY_VERSION_HEX < 0x030A00B1 && !defined(Py_IsNone) +# define Py_IsNone(x) Py_Is(x, Py_None) +#endif +#if (PY_VERSION_HEX < 0x030A00B1 || defined(PYPY_VERSION)) && !defined(Py_IsTrue) +# define Py_IsTrue(x) Py_Is(x, Py_True) +#endif +#if (PY_VERSION_HEX < 0x030A00B1 || defined(PYPY_VERSION)) && !defined(Py_IsFalse) +# define Py_IsFalse(x) Py_Is(x, Py_False) +#endif + + +// bpo-39573 added Py_SET_TYPE() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_TYPE) +static inline void _Py_SET_TYPE(PyObject *ob, PyTypeObject *type) +{ + ob->ob_type = type; +} +#define Py_SET_TYPE(ob, type) _Py_SET_TYPE(_PyObject_CAST(ob), type) +#endif + + +// bpo-39573 added Py_SET_SIZE() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_SIZE) +static inline void _Py_SET_SIZE(PyVarObject *ob, Py_ssize_t size) +{ + ob->ob_size = size; +} +#define Py_SET_SIZE(ob, size) _Py_SET_SIZE((PyVarObject*)(ob), size) +#endif + + +// bpo-40421 added PyFrame_GetCode() to Python 3.9.0b1 +#if PY_VERSION_HEX < 0x030900B1 || defined(PYPY_VERSION) +static inline PyCodeObject* PyFrame_GetCode(PyFrameObject *frame) +{ + assert(frame != _Py_NULL); + assert(frame->f_code != _Py_NULL); + return _Py_CAST(PyCodeObject*, Py_NewRef(frame->f_code)); +} +#endif + +static inline PyCodeObject* _PyFrame_GetCodeBorrow(PyFrameObject *frame) +{ + PyCodeObject *code = PyFrame_GetCode(frame); + Py_DECREF(code); + return code; +} + + +// bpo-40421 added PyFrame_GetBack() to Python 3.9.0b1 +#if PY_VERSION_HEX < 0x030900B1 && !defined(PYPY_VERSION) +static inline PyFrameObject* PyFrame_GetBack(PyFrameObject *frame) +{ + assert(frame != _Py_NULL); + return _Py_CAST(PyFrameObject*, Py_XNewRef(frame->f_back)); +} +#endif + +#if !defined(PYPY_VERSION) +static inline PyFrameObject* _PyFrame_GetBackBorrow(PyFrameObject *frame) +{ + PyFrameObject *back = PyFrame_GetBack(frame); + Py_XDECREF(back); + return back; +} +#endif + + +// bpo-40421 added PyFrame_GetLocals() to Python 3.11.0a7 +#if PY_VERSION_HEX < 0x030B00A7 && !defined(PYPY_VERSION) +static inline PyObject* PyFrame_GetLocals(PyFrameObject *frame) +{ +#if PY_VERSION_HEX >= 0x030400B1 + if (PyFrame_FastToLocalsWithError(frame) < 0) { + return NULL; + } +#else + PyFrame_FastToLocals(frame); +#endif + return Py_NewRef(frame->f_locals); +} +#endif + + +// bpo-40421 added PyFrame_GetGlobals() to Python 3.11.0a7 +#if PY_VERSION_HEX < 0x030B00A7 && !defined(PYPY_VERSION) +static inline PyObject* PyFrame_GetGlobals(PyFrameObject *frame) +{ + return Py_NewRef(frame->f_globals); +} +#endif + + +// bpo-40421 added PyFrame_GetBuiltins() to Python 3.11.0a7 +#if PY_VERSION_HEX < 0x030B00A7 && !defined(PYPY_VERSION) +static inline PyObject* PyFrame_GetBuiltins(PyFrameObject *frame) +{ + return Py_NewRef(frame->f_builtins); +} +#endif + + +// bpo-40421 added PyFrame_GetLasti() to Python 3.11.0b1 +#if PY_VERSION_HEX < 0x030B00B1 && !defined(PYPY_VERSION) +static inline int PyFrame_GetLasti(PyFrameObject *frame) +{ +#if PY_VERSION_HEX >= 0x030A00A7 + // bpo-27129: Since Python 3.10.0a7, f_lasti is an instruction offset, + // not a bytes offset anymore. Python uses 16-bit "wordcode" (2 bytes) + // instructions. + if (frame->f_lasti < 0) { + return -1; + } + return frame->f_lasti * 2; +#else + return frame->f_lasti; +#endif +} +#endif + + +// gh-91248 added PyFrame_GetVar() to Python 3.12.0a2 +#if PY_VERSION_HEX < 0x030C00A2 && !defined(PYPY_VERSION) +static inline PyObject* PyFrame_GetVar(PyFrameObject *frame, PyObject *name) +{ + PyObject *locals, *value; + + locals = PyFrame_GetLocals(frame); + if (locals == NULL) { + return NULL; + } +#if PY_VERSION_HEX >= 0x03000000 + value = PyDict_GetItemWithError(locals, name); +#else + value = _PyDict_GetItemWithError(locals, name); +#endif + Py_DECREF(locals); + + if (value == NULL) { + if (PyErr_Occurred()) { + return NULL; + } +#if PY_VERSION_HEX >= 0x03000000 + PyErr_Format(PyExc_NameError, "variable %R does not exist", name); +#else + PyErr_SetString(PyExc_NameError, "variable does not exist"); +#endif + return NULL; + } + return Py_NewRef(value); +} +#endif + + +// gh-91248 added PyFrame_GetVarString() to Python 3.12.0a2 +#if PY_VERSION_HEX < 0x030C00A2 && !defined(PYPY_VERSION) +static inline PyObject* +PyFrame_GetVarString(PyFrameObject *frame, const char *name) +{ + PyObject *name_obj, *value; +#if PY_VERSION_HEX >= 0x03000000 + name_obj = PyUnicode_FromString(name); +#else + name_obj = PyString_FromString(name); +#endif + if (name_obj == NULL) { + return NULL; + } + value = PyFrame_GetVar(frame, name_obj); + Py_DECREF(name_obj); + return value; +} +#endif + + +// bpo-39947 added PyThreadState_GetInterpreter() to Python 3.9.0a5 +#if PY_VERSION_HEX < 0x030900A5 || (defined(PYPY_VERSION) && PY_VERSION_HEX < 0x030B0000) +static inline PyInterpreterState * +PyThreadState_GetInterpreter(PyThreadState *tstate) +{ + assert(tstate != _Py_NULL); + return tstate->interp; +} +#endif + + +// bpo-40429 added PyThreadState_GetFrame() to Python 3.9.0b1 +#if PY_VERSION_HEX < 0x030900B1 && !defined(PYPY_VERSION) +static inline PyFrameObject* PyThreadState_GetFrame(PyThreadState *tstate) +{ + assert(tstate != _Py_NULL); + return _Py_CAST(PyFrameObject *, Py_XNewRef(tstate->frame)); +} +#endif + +#if !defined(PYPY_VERSION) +static inline PyFrameObject* +_PyThreadState_GetFrameBorrow(PyThreadState *tstate) +{ + PyFrameObject *frame = PyThreadState_GetFrame(tstate); + Py_XDECREF(frame); + return frame; +} +#endif + + +// bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a5 +#if PY_VERSION_HEX < 0x030900A5 || defined(PYPY_VERSION) +static inline PyInterpreterState* PyInterpreterState_Get(void) +{ + PyThreadState *tstate; + PyInterpreterState *interp; + + tstate = PyThreadState_GET(); + if (tstate == _Py_NULL) { + Py_FatalError("GIL released (tstate is NULL)"); + } + interp = tstate->interp; + if (interp == _Py_NULL) { + Py_FatalError("no current interpreter"); + } + return interp; +} +#endif + + +// bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a6 +#if 0x030700A1 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x030900A6 && !defined(PYPY_VERSION) +static inline uint64_t PyThreadState_GetID(PyThreadState *tstate) +{ + assert(tstate != _Py_NULL); + return tstate->id; +} +#endif + +// bpo-43760 added PyThreadState_EnterTracing() to Python 3.11.0a2 +#if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION) +static inline void PyThreadState_EnterTracing(PyThreadState *tstate) +{ + tstate->tracing++; +#if PY_VERSION_HEX >= 0x030A00A1 + tstate->cframe->use_tracing = 0; +#else + tstate->use_tracing = 0; +#endif +} +#endif + +// bpo-43760 added PyThreadState_LeaveTracing() to Python 3.11.0a2 +#if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION) +static inline void PyThreadState_LeaveTracing(PyThreadState *tstate) +{ + int use_tracing = (tstate->c_tracefunc != _Py_NULL + || tstate->c_profilefunc != _Py_NULL); + tstate->tracing--; +#if PY_VERSION_HEX >= 0x030A00A1 + tstate->cframe->use_tracing = use_tracing; +#else + tstate->use_tracing = use_tracing; +#endif +} +#endif + + +// bpo-37194 added PyObject_CallNoArgs() to Python 3.9.0a1 +// PyObject_CallNoArgs() added to PyPy 3.9.16-v7.3.11 +#if !defined(PyObject_CallNoArgs) && PY_VERSION_HEX < 0x030900A1 +static inline PyObject* PyObject_CallNoArgs(PyObject *func) +{ + return PyObject_CallFunctionObjArgs(func, NULL); +} +#endif + + +// bpo-39245 made PyObject_CallOneArg() public (previously called +// _PyObject_CallOneArg) in Python 3.9.0a4 +// PyObject_CallOneArg() added to PyPy 3.9.16-v7.3.11 +#if !defined(PyObject_CallOneArg) && PY_VERSION_HEX < 0x030900A4 +static inline PyObject* PyObject_CallOneArg(PyObject *func, PyObject *arg) +{ + return PyObject_CallFunctionObjArgs(func, arg, NULL); +} +#endif + + +// bpo-1635741 added PyModule_AddObjectRef() to Python 3.10.0a3 +#if PY_VERSION_HEX < 0x030A00A3 +static inline int +PyModule_AddObjectRef(PyObject *module, const char *name, PyObject *value) +{ + int res; + + if (!value && !PyErr_Occurred()) { + // PyModule_AddObject() raises TypeError in this case + PyErr_SetString(PyExc_SystemError, + "PyModule_AddObjectRef() must be called " + "with an exception raised if value is NULL"); + return -1; + } + + Py_XINCREF(value); + res = PyModule_AddObject(module, name, value); + if (res < 0) { + Py_XDECREF(value); + } + return res; +} +#endif + + +// bpo-40024 added PyModule_AddType() to Python 3.9.0a5 +#if PY_VERSION_HEX < 0x030900A5 +static inline int PyModule_AddType(PyObject *module, PyTypeObject *type) +{ + const char *name, *dot; + + if (PyType_Ready(type) < 0) { + return -1; + } + + // inline _PyType_Name() + name = type->tp_name; + assert(name != _Py_NULL); + dot = strrchr(name, '.'); + if (dot != _Py_NULL) { + name = dot + 1; + } + + return PyModule_AddObjectRef(module, name, _PyObject_CAST(type)); +} +#endif + + +// bpo-40241 added PyObject_GC_IsTracked() to Python 3.9.0a6. +// bpo-4688 added _PyObject_GC_IS_TRACKED() to Python 2.7.0a2. +#if PY_VERSION_HEX < 0x030900A6 && !defined(PYPY_VERSION) +static inline int PyObject_GC_IsTracked(PyObject* obj) +{ + return (PyObject_IS_GC(obj) && _PyObject_GC_IS_TRACKED(obj)); +} +#endif + +// bpo-40241 added PyObject_GC_IsFinalized() to Python 3.9.0a6. +// bpo-18112 added _PyGCHead_FINALIZED() to Python 3.4.0 final. +#if PY_VERSION_HEX < 0x030900A6 && PY_VERSION_HEX >= 0x030400F0 && !defined(PYPY_VERSION) +static inline int PyObject_GC_IsFinalized(PyObject *obj) +{ + PyGC_Head *gc = _Py_CAST(PyGC_Head*, obj) - 1; + return (PyObject_IS_GC(obj) && _PyGCHead_FINALIZED(gc)); +} +#endif + + +// bpo-39573 added Py_IS_TYPE() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_IS_TYPE) +static inline int _Py_IS_TYPE(PyObject *ob, PyTypeObject *type) { + return Py_TYPE(ob) == type; +} +#define Py_IS_TYPE(ob, type) _Py_IS_TYPE(_PyObject_CAST(ob), type) +#endif + + +// bpo-46906 added PyFloat_Pack2() and PyFloat_Unpack2() to Python 3.11a7. +// bpo-11734 added _PyFloat_Pack2() and _PyFloat_Unpack2() to Python 3.6.0b1. +// Python 3.11a2 moved _PyFloat_Pack2() and _PyFloat_Unpack2() to the internal +// C API: Python 3.11a2-3.11a6 versions are not supported. +#if 0x030600B1 <= PY_VERSION_HEX && PY_VERSION_HEX <= 0x030B00A1 && !defined(PYPY_VERSION) +static inline int PyFloat_Pack2(double x, char *p, int le) +{ return _PyFloat_Pack2(x, (unsigned char*)p, le); } + +static inline double PyFloat_Unpack2(const char *p, int le) +{ return _PyFloat_Unpack2((const unsigned char *)p, le); } +#endif + + +// bpo-46906 added PyFloat_Pack4(), PyFloat_Pack8(), PyFloat_Unpack4() and +// PyFloat_Unpack8() to Python 3.11a7. +// Python 3.11a2 moved _PyFloat_Pack4(), _PyFloat_Pack8(), _PyFloat_Unpack4() +// and _PyFloat_Unpack8() to the internal C API: Python 3.11a2-3.11a6 versions +// are not supported. +#if PY_VERSION_HEX <= 0x030B00A1 && !defined(PYPY_VERSION) +static inline int PyFloat_Pack4(double x, char *p, int le) +{ return _PyFloat_Pack4(x, (unsigned char*)p, le); } + +static inline int PyFloat_Pack8(double x, char *p, int le) +{ return _PyFloat_Pack8(x, (unsigned char*)p, le); } + +static inline double PyFloat_Unpack4(const char *p, int le) +{ return _PyFloat_Unpack4((const unsigned char *)p, le); } + +static inline double PyFloat_Unpack8(const char *p, int le) +{ return _PyFloat_Unpack8((const unsigned char *)p, le); } +#endif + + +// gh-92154 added PyCode_GetCode() to Python 3.11.0b1 +#if PY_VERSION_HEX < 0x030B00B1 && !defined(PYPY_VERSION) +static inline PyObject* PyCode_GetCode(PyCodeObject *code) +{ + return Py_NewRef(code->co_code); +} +#endif + + +// gh-95008 added PyCode_GetVarnames() to Python 3.11.0rc1 +#if PY_VERSION_HEX < 0x030B00C1 && !defined(PYPY_VERSION) +static inline PyObject* PyCode_GetVarnames(PyCodeObject *code) +{ + return Py_NewRef(code->co_varnames); +} +#endif + +// gh-95008 added PyCode_GetFreevars() to Python 3.11.0rc1 +#if PY_VERSION_HEX < 0x030B00C1 && !defined(PYPY_VERSION) +static inline PyObject* PyCode_GetFreevars(PyCodeObject *code) +{ + return Py_NewRef(code->co_freevars); +} +#endif + +// gh-95008 added PyCode_GetCellvars() to Python 3.11.0rc1 +#if PY_VERSION_HEX < 0x030B00C1 && !defined(PYPY_VERSION) +static inline PyObject* PyCode_GetCellvars(PyCodeObject *code) +{ + return Py_NewRef(code->co_cellvars); +} +#endif + + +// Py_UNUSED() was added to Python 3.4.0b2. +#if PY_VERSION_HEX < 0x030400B2 && !defined(Py_UNUSED) +# if defined(__GNUC__) || defined(__clang__) +# define Py_UNUSED(name) _unused_ ## name __attribute__((unused)) +# else +# define Py_UNUSED(name) _unused_ ## name +# endif +#endif + + +// gh-105922 added PyImport_AddModuleRef() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A0 +static inline PyObject* PyImport_AddModuleRef(const char *name) +{ + return Py_XNewRef(PyImport_AddModule(name)); +} +#endif + + +// gh-105927 added PyWeakref_GetRef() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D0000 +static inline int PyWeakref_GetRef(PyObject *ref, PyObject **pobj) +{ + PyObject *obj; + if (ref != NULL && !PyWeakref_Check(ref)) { + *pobj = NULL; + PyErr_SetString(PyExc_TypeError, "expected a weakref"); + return -1; + } + obj = PyWeakref_GetObject(ref); + if (obj == NULL) { + // SystemError if ref is NULL + *pobj = NULL; + return -1; + } + if (obj == Py_None) { + *pobj = NULL; + return 0; + } + *pobj = Py_NewRef(obj); + return 1; +} +#endif + + +// bpo-36974 added PY_VECTORCALL_ARGUMENTS_OFFSET to Python 3.8b1 +#ifndef PY_VECTORCALL_ARGUMENTS_OFFSET +# define PY_VECTORCALL_ARGUMENTS_OFFSET (_Py_CAST(size_t, 1) << (8 * sizeof(size_t) - 1)) +#endif + +// bpo-36974 added PyVectorcall_NARGS() to Python 3.8b1 +#if PY_VERSION_HEX < 0x030800B1 +static inline Py_ssize_t PyVectorcall_NARGS(size_t n) +{ + return n & ~PY_VECTORCALL_ARGUMENTS_OFFSET; +} +#endif + + +// gh-105922 added PyObject_Vectorcall() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 +static inline PyObject* +PyObject_Vectorcall(PyObject *callable, PyObject *const *args, + size_t nargsf, PyObject *kwnames) +{ +#if PY_VERSION_HEX >= 0x030800B1 && !defined(PYPY_VERSION) + // bpo-36974 added _PyObject_Vectorcall() to Python 3.8.0b1 + return _PyObject_Vectorcall(callable, args, nargsf, kwnames); +#else + PyObject *posargs = NULL, *kwargs = NULL; + PyObject *res; + Py_ssize_t nposargs, nkwargs, i; + + if (nargsf != 0 && args == NULL) { + PyErr_BadInternalCall(); + goto error; + } + if (kwnames != NULL && !PyTuple_Check(kwnames)) { + PyErr_BadInternalCall(); + goto error; + } + + nposargs = (Py_ssize_t)PyVectorcall_NARGS(nargsf); + if (kwnames) { + nkwargs = PyTuple_GET_SIZE(kwnames); + } + else { + nkwargs = 0; + } + + posargs = PyTuple_New(nposargs); + if (posargs == NULL) { + goto error; + } + if (nposargs) { + for (i=0; i < nposargs; i++) { + PyTuple_SET_ITEM(posargs, i, Py_NewRef(*args)); + args++; + } + } + + if (nkwargs) { + kwargs = PyDict_New(); + if (kwargs == NULL) { + goto error; + } + + for (i = 0; i < nkwargs; i++) { + PyObject *key = PyTuple_GET_ITEM(kwnames, i); + PyObject *value = *args; + args++; + if (PyDict_SetItem(kwargs, key, value) < 0) { + goto error; + } + } + } + else { + kwargs = NULL; + } + + res = PyObject_Call(callable, posargs, kwargs); + Py_DECREF(posargs); + Py_XDECREF(kwargs); + return res; + +error: + Py_DECREF(posargs); + Py_XDECREF(kwargs); + return NULL; +#endif +} +#endif + + +// gh-106521 added PyObject_GetOptionalAttr() and +// PyObject_GetOptionalAttrString() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyObject_GetOptionalAttr(PyObject *obj, PyObject *attr_name, PyObject **result) +{ + // bpo-32571 added _PyObject_LookupAttr() to Python 3.7.0b1 +#if PY_VERSION_HEX >= 0x030700B1 && !defined(PYPY_VERSION) + return _PyObject_LookupAttr(obj, attr_name, result); +#else + *result = PyObject_GetAttr(obj, attr_name); + if (*result != NULL) { + return 1; + } + if (!PyErr_Occurred()) { + return 0; + } + if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + return 0; + } + return -1; +#endif +} + +static inline int +PyObject_GetOptionalAttrString(PyObject *obj, const char *attr_name, PyObject **result) +{ + PyObject *name_obj; + int rc; +#if PY_VERSION_HEX >= 0x03000000 + name_obj = PyUnicode_FromString(attr_name); +#else + name_obj = PyString_FromString(attr_name); +#endif + if (name_obj == NULL) { + *result = NULL; + return -1; + } + rc = PyObject_GetOptionalAttr(obj, name_obj, result); + Py_DECREF(name_obj); + return rc; +} +#endif + + +// gh-106307 added PyObject_GetOptionalAttr() and +// PyMapping_GetOptionalItemString() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyMapping_GetOptionalItem(PyObject *obj, PyObject *key, PyObject **result) +{ + *result = PyObject_GetItem(obj, key); + if (*result) { + return 1; + } + if (!PyErr_ExceptionMatches(PyExc_KeyError)) { + return -1; + } + PyErr_Clear(); + return 0; +} + +static inline int +PyMapping_GetOptionalItemString(PyObject *obj, const char *key, PyObject **result) +{ + PyObject *key_obj; + int rc; +#if PY_VERSION_HEX >= 0x03000000 + key_obj = PyUnicode_FromString(key); +#else + key_obj = PyString_FromString(key); +#endif + if (key_obj == NULL) { + *result = NULL; + return -1; + } + rc = PyMapping_GetOptionalItem(obj, key_obj, result); + Py_DECREF(key_obj); + return rc; +} +#endif + +// gh-108511 added PyMapping_HasKeyWithError() and +// PyMapping_HasKeyStringWithError() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyMapping_HasKeyWithError(PyObject *obj, PyObject *key) +{ + PyObject *res; + int rc = PyMapping_GetOptionalItem(obj, key, &res); + Py_XDECREF(res); + return rc; +} + +static inline int +PyMapping_HasKeyStringWithError(PyObject *obj, const char *key) +{ + PyObject *res; + int rc = PyMapping_GetOptionalItemString(obj, key, &res); + Py_XDECREF(res); + return rc; +} +#endif + + +// gh-108511 added PyObject_HasAttrWithError() and +// PyObject_HasAttrStringWithError() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyObject_HasAttrWithError(PyObject *obj, PyObject *attr) +{ + PyObject *res; + int rc = PyObject_GetOptionalAttr(obj, attr, &res); + Py_XDECREF(res); + return rc; +} + +static inline int +PyObject_HasAttrStringWithError(PyObject *obj, const char *attr) +{ + PyObject *res; + int rc = PyObject_GetOptionalAttrString(obj, attr, &res); + Py_XDECREF(res); + return rc; +} +#endif + + +// gh-106004 added PyDict_GetItemRef() and PyDict_GetItemStringRef() +// to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyDict_GetItemRef(PyObject *mp, PyObject *key, PyObject **result) +{ +#if PY_VERSION_HEX >= 0x03000000 + PyObject *item = PyDict_GetItemWithError(mp, key); +#else + PyObject *item = _PyDict_GetItemWithError(mp, key); +#endif + if (item != NULL) { + *result = Py_NewRef(item); + return 1; // found + } + if (!PyErr_Occurred()) { + *result = NULL; + return 0; // not found + } + *result = NULL; + return -1; +} + +static inline int +PyDict_GetItemStringRef(PyObject *mp, const char *key, PyObject **result) +{ + int res; +#if PY_VERSION_HEX >= 0x03000000 + PyObject *key_obj = PyUnicode_FromString(key); +#else + PyObject *key_obj = PyString_FromString(key); +#endif + if (key_obj == NULL) { + *result = NULL; + return -1; + } + res = PyDict_GetItemRef(mp, key_obj, result); + Py_DECREF(key_obj); + return res; +} +#endif + + +// gh-106307 added PyModule_Add() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyModule_Add(PyObject *mod, const char *name, PyObject *value) +{ + int res = PyModule_AddObjectRef(mod, name, value); + Py_XDECREF(value); + return res; +} +#endif + + +// gh-108014 added Py_IsFinalizing() to Python 3.13.0a1 +// bpo-1856 added _Py_Finalizing to Python 3.2.1b1. +// _Py_IsFinalizing() was added to PyPy 7.3.0. +#if (0x030201B1 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x030D00A1) \ + && (!defined(PYPY_VERSION_NUM) || PYPY_VERSION_NUM >= 0x7030000) +static inline int Py_IsFinalizing(void) +{ +#if PY_VERSION_HEX >= 0x030700A1 + // _Py_IsFinalizing() was added to Python 3.7.0a1. + return _Py_IsFinalizing(); +#else + return (_Py_Finalizing != NULL); +#endif +} +#endif + + +// gh-108323 added PyDict_ContainsString() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int PyDict_ContainsString(PyObject *op, const char *key) +{ + PyObject *key_obj = PyUnicode_FromString(key); + if (key_obj == NULL) { + return -1; + } + int res = PyDict_Contains(op, key_obj); + Py_DECREF(key_obj); + return res; +} +#endif + + +// gh-108445 added PyLong_AsInt() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int PyLong_AsInt(PyObject *obj) +{ +#ifdef PYPY_VERSION + long value = PyLong_AsLong(obj); + if (value == -1 && PyErr_Occurred()) { + return -1; + } + if (value < (long)INT_MIN || (long)INT_MAX < value) { + PyErr_SetString(PyExc_OverflowError, + "Python int too large to convert to C int"); + return -1; + } + return (int)value; +#else + return _PyLong_AsInt(obj); +#endif +} +#endif + + +// gh-107073 added PyObject_VisitManagedDict() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyObject_VisitManagedDict(PyObject *obj, visitproc visit, void *arg) +{ + PyObject **dict = _PyObject_GetDictPtr(obj); + if (dict == NULL || *dict == NULL) { + return -1; + } + Py_VISIT(*dict); + return 0; +} + +static inline void +PyObject_ClearManagedDict(PyObject *obj) +{ + PyObject **dict = _PyObject_GetDictPtr(obj); + if (dict == NULL || *dict == NULL) { + return; + } + Py_CLEAR(*dict); +} +#endif + +// gh-108867 added PyThreadState_GetUnchecked() to Python 3.13.0a1 +// Python 3.5.2 added _PyThreadState_UncheckedGet(). +#if PY_VERSION_HEX >= 0x03050200 && PY_VERSION_HEX < 0x030D00A1 +static inline PyThreadState* +PyThreadState_GetUnchecked(void) +{ + return _PyThreadState_UncheckedGet(); +} +#endif + +// gh-110289 added PyUnicode_EqualToUTF8() and PyUnicode_EqualToUTF8AndSize() +// to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyUnicode_EqualToUTF8AndSize(PyObject *unicode, const char *str, Py_ssize_t str_len) +{ + Py_ssize_t len; + const void *utf8; + PyObject *exc_type, *exc_value, *exc_tb; + int res; + + // API cannot report errors so save/restore the exception + PyErr_Fetch(&exc_type, &exc_value, &exc_tb); + + // Python 3.3.0a1 added PyUnicode_AsUTF8AndSize() +#if PY_VERSION_HEX >= 0x030300A1 + if (PyUnicode_IS_ASCII(unicode)) { + utf8 = PyUnicode_DATA(unicode); + len = PyUnicode_GET_LENGTH(unicode); + } + else { + utf8 = PyUnicode_AsUTF8AndSize(unicode, &len); + if (utf8 == NULL) { + // Memory allocation failure. The API cannot report error, + // so ignore the exception and return 0. + res = 0; + goto done; + } + } + + if (len != str_len) { + res = 0; + goto done; + } + res = (memcmp(utf8, str, (size_t)len) == 0); +#else + PyObject *bytes = PyUnicode_AsUTF8String(unicode); + if (bytes == NULL) { + // Memory allocation failure. The API cannot report error, + // so ignore the exception and return 0. + res = 0; + goto done; + } + +#if PY_VERSION_HEX >= 0x03000000 + len = PyBytes_GET_SIZE(bytes); + utf8 = PyBytes_AS_STRING(bytes); +#else + len = PyString_GET_SIZE(bytes); + utf8 = PyString_AS_STRING(bytes); +#endif + if (len != str_len) { + Py_DECREF(bytes); + res = 0; + goto done; + } + + res = (memcmp(utf8, str, (size_t)len) == 0); + Py_DECREF(bytes); +#endif + +done: + PyErr_Restore(exc_type, exc_value, exc_tb); + return res; +} + +static inline int +PyUnicode_EqualToUTF8(PyObject *unicode, const char *str) +{ + return PyUnicode_EqualToUTF8AndSize(unicode, str, (Py_ssize_t)strlen(str)); +} +#endif + + +// gh-111138 added PyList_Extend() and PyList_Clear() to Python 3.13.0a2 +#if PY_VERSION_HEX < 0x030D00A2 +static inline int +PyList_Extend(PyObject *list, PyObject *iterable) +{ + return PyList_SetSlice(list, PY_SSIZE_T_MAX, PY_SSIZE_T_MAX, iterable); +} + +static inline int +PyList_Clear(PyObject *list) +{ + return PyList_SetSlice(list, 0, PY_SSIZE_T_MAX, NULL); +} +#endif + +// gh-111262 added PyDict_Pop() and PyDict_PopString() to Python 3.13.0a2 +#if PY_VERSION_HEX < 0x030D00A2 +static inline int +PyDict_Pop(PyObject *dict, PyObject *key, PyObject **result) +{ + PyObject *value; + + if (!PyDict_Check(dict)) { + PyErr_BadInternalCall(); + if (result) { + *result = NULL; + } + return -1; + } + + // bpo-16991 added _PyDict_Pop() to Python 3.5.0b2. + // Python 3.6.0b3 changed _PyDict_Pop() first argument type to PyObject*. + // Python 3.13.0a1 removed _PyDict_Pop(). +#if defined(PYPY_VERSION) || PY_VERSION_HEX < 0x030500b2 || PY_VERSION_HEX >= 0x030D0000 + value = PyObject_CallMethod(dict, "pop", "O", key); +#elif PY_VERSION_HEX < 0x030600b3 + value = _PyDict_Pop(_Py_CAST(PyDictObject*, dict), key, NULL); +#else + value = _PyDict_Pop(dict, key, NULL); +#endif + if (value == NULL) { + if (result) { + *result = NULL; + } + if (PyErr_Occurred() && !PyErr_ExceptionMatches(PyExc_KeyError)) { + return -1; + } + PyErr_Clear(); + return 0; + } + if (result) { + *result = value; + } + else { + Py_DECREF(value); + } + return 1; +} + +static inline int +PyDict_PopString(PyObject *dict, const char *key, PyObject **result) +{ + PyObject *key_obj = PyUnicode_FromString(key); + if (key_obj == NULL) { + if (result != NULL) { + *result = NULL; + } + return -1; + } + + int res = PyDict_Pop(dict, key_obj, result); + Py_DECREF(key_obj); + return res; +} +#endif + + +#if PY_VERSION_HEX < 0x030200A4 +// Python 3.2.0a4 added Py_hash_t type +typedef Py_ssize_t Py_hash_t; +#endif + + +// gh-111545 added Py_HashPointer() to Python 3.13.0a3 +#if PY_VERSION_HEX < 0x030D00A3 +static inline Py_hash_t Py_HashPointer(const void *ptr) +{ +#if PY_VERSION_HEX >= 0x030900A4 && !defined(PYPY_VERSION) + return _Py_HashPointer(ptr); +#else + return _Py_HashPointer(_Py_CAST(void*, ptr)); +#endif +} +#endif + + +// Python 3.13a4 added a PyTime API. +// Use the private API added to Python 3.5. +#if PY_VERSION_HEX < 0x030D00A4 && PY_VERSION_HEX >= 0x03050000 +typedef _PyTime_t PyTime_t; +#define PyTime_MIN _PyTime_MIN +#define PyTime_MAX _PyTime_MAX + +static inline double PyTime_AsSecondsDouble(PyTime_t t) +{ return _PyTime_AsSecondsDouble(t); } + +static inline int PyTime_Monotonic(PyTime_t *result) +{ return _PyTime_GetMonotonicClockWithInfo(result, NULL); } + +static inline int PyTime_Time(PyTime_t *result) +{ return _PyTime_GetSystemClockWithInfo(result, NULL); } + +static inline int PyTime_PerfCounter(PyTime_t *result) +{ +#if PY_VERSION_HEX >= 0x03070000 && !defined(PYPY_VERSION) + return _PyTime_GetPerfCounterWithInfo(result, NULL); +#elif PY_VERSION_HEX >= 0x03070000 + // Call time.perf_counter_ns() and convert Python int object to PyTime_t. + // Cache time.perf_counter_ns() function for best performance. + static PyObject *func = NULL; + if (func == NULL) { + PyObject *mod = PyImport_ImportModule("time"); + if (mod == NULL) { + return -1; + } + + func = PyObject_GetAttrString(mod, "perf_counter_ns"); + Py_DECREF(mod); + if (func == NULL) { + return -1; + } + } + + PyObject *res = PyObject_CallNoArgs(func); + if (res == NULL) { + return -1; + } + long long value = PyLong_AsLongLong(res); + Py_DECREF(res); + + if (value == -1 && PyErr_Occurred()) { + return -1; + } + + Py_BUILD_ASSERT(sizeof(value) >= sizeof(PyTime_t)); + *result = (PyTime_t)value; + return 0; +#else + // Call time.perf_counter() and convert C double to PyTime_t. + // Cache time.perf_counter() function for best performance. + static PyObject *func = NULL; + if (func == NULL) { + PyObject *mod = PyImport_ImportModule("time"); + if (mod == NULL) { + return -1; + } + + func = PyObject_GetAttrString(mod, "perf_counter"); + Py_DECREF(mod); + if (func == NULL) { + return -1; + } + } + + PyObject *res = PyObject_CallNoArgs(func); + if (res == NULL) { + return -1; + } + double d = PyFloat_AsDouble(res); + Py_DECREF(res); + + if (d == -1.0 && PyErr_Occurred()) { + return -1; + } + + // Avoid floor() to avoid having to link to libm + *result = (PyTime_t)(d * 1e9); + return 0; +#endif +} + +#endif + +// gh-111389 added hash constants to Python 3.13.0a5. These constants were +// added first as private macros to Python 3.4.0b1 and PyPy 7.3.8. +#if (!defined(PyHASH_BITS) \ + && ((!defined(PYPY_VERSION) && PY_VERSION_HEX >= 0x030400B1) \ + || (defined(PYPY_VERSION) && PY_VERSION_HEX >= 0x03070000 \ + && PYPY_VERSION_NUM >= 0x07030800))) +# define PyHASH_BITS _PyHASH_BITS +# define PyHASH_MODULUS _PyHASH_MODULUS +# define PyHASH_INF _PyHASH_INF +# define PyHASH_IMAG _PyHASH_IMAG +#endif + + +// gh-111545 added Py_GetConstant() and Py_GetConstantBorrowed() +// to Python 3.13.0a6 +#if PY_VERSION_HEX < 0x030D00A6 && !defined(Py_CONSTANT_NONE) + +#define Py_CONSTANT_NONE 0 +#define Py_CONSTANT_FALSE 1 +#define Py_CONSTANT_TRUE 2 +#define Py_CONSTANT_ELLIPSIS 3 +#define Py_CONSTANT_NOT_IMPLEMENTED 4 +#define Py_CONSTANT_ZERO 5 +#define Py_CONSTANT_ONE 6 +#define Py_CONSTANT_EMPTY_STR 7 +#define Py_CONSTANT_EMPTY_BYTES 8 +#define Py_CONSTANT_EMPTY_TUPLE 9 + +static inline PyObject* Py_GetConstant(unsigned int constant_id) +{ + static PyObject* constants[Py_CONSTANT_EMPTY_TUPLE + 1] = {NULL}; + + if (constants[Py_CONSTANT_NONE] == NULL) { + constants[Py_CONSTANT_NONE] = Py_None; + constants[Py_CONSTANT_FALSE] = Py_False; + constants[Py_CONSTANT_TRUE] = Py_True; + constants[Py_CONSTANT_ELLIPSIS] = Py_Ellipsis; + constants[Py_CONSTANT_NOT_IMPLEMENTED] = Py_NotImplemented; + + constants[Py_CONSTANT_ZERO] = PyLong_FromLong(0); + if (constants[Py_CONSTANT_ZERO] == NULL) { + goto fatal_error; + } + + constants[Py_CONSTANT_ONE] = PyLong_FromLong(1); + if (constants[Py_CONSTANT_ONE] == NULL) { + goto fatal_error; + } + + constants[Py_CONSTANT_EMPTY_STR] = PyUnicode_FromStringAndSize("", 0); + if (constants[Py_CONSTANT_EMPTY_STR] == NULL) { + goto fatal_error; + } + + constants[Py_CONSTANT_EMPTY_BYTES] = PyBytes_FromStringAndSize("", 0); + if (constants[Py_CONSTANT_EMPTY_BYTES] == NULL) { + goto fatal_error; + } + + constants[Py_CONSTANT_EMPTY_TUPLE] = PyTuple_New(0); + if (constants[Py_CONSTANT_EMPTY_TUPLE] == NULL) { + goto fatal_error; + } + // goto dance to avoid compiler warnings about Py_FatalError() + goto init_done; + +fatal_error: + // This case should never happen + Py_FatalError("Py_GetConstant() failed to get constants"); + } + +init_done: + if (constant_id <= Py_CONSTANT_EMPTY_TUPLE) { + return Py_NewRef(constants[constant_id]); + } + else { + PyErr_BadInternalCall(); + return NULL; + } +} + +static inline PyObject* Py_GetConstantBorrowed(unsigned int constant_id) +{ + PyObject *obj = Py_GetConstant(constant_id); + Py_XDECREF(obj); + return obj; +} +#endif + + +// gh-114329 added PyList_GetItemRef() to Python 3.13.0a4 +#if PY_VERSION_HEX < 0x030D00A4 +static inline PyObject * +PyList_GetItemRef(PyObject *op, Py_ssize_t index) +{ + PyObject *item = PyList_GetItem(op, index); + Py_XINCREF(item); + return item; +} +#endif + + +// gh-114329 added PyList_GetItemRef() to Python 3.13.0a4 +#if PY_VERSION_HEX < 0x030D00A4 +static inline int +PyDict_SetDefaultRef(PyObject *d, PyObject *key, PyObject *default_value, + PyObject **result) +{ + PyObject *value; + if (PyDict_GetItemRef(d, key, &value) < 0) { + // get error + if (result) { + *result = NULL; + } + return -1; + } + if (value != NULL) { + // present + if (result) { + *result = value; + } + else { + Py_DECREF(value); + } + return 1; + } + + // missing: set the item + if (PyDict_SetItem(d, key, default_value) < 0) { + // set error + if (result) { + *result = NULL; + } + return -1; + } + if (result) { + *result = Py_NewRef(default_value); + } + return 0; +} +#endif + +#if PY_VERSION_HEX < 0x030D00B3 +# define Py_BEGIN_CRITICAL_SECTION(op) { +# define Py_END_CRITICAL_SECTION() } +# define Py_BEGIN_CRITICAL_SECTION2(a, b) { +# define Py_END_CRITICAL_SECTION2() } +#endif + +#if PY_VERSION_HEX < 0x030E0000 && PY_VERSION_HEX >= 0x03060000 && !defined(PYPY_VERSION) +typedef struct PyUnicodeWriter PyUnicodeWriter; + +static inline void PyUnicodeWriter_Discard(PyUnicodeWriter *writer) +{ + _PyUnicodeWriter_Dealloc((_PyUnicodeWriter*)writer); + PyMem_Free(writer); +} + +static inline PyUnicodeWriter* PyUnicodeWriter_Create(Py_ssize_t length) +{ + if (length < 0) { + PyErr_SetString(PyExc_ValueError, + "length must be positive"); + return NULL; + } + + const size_t size = sizeof(_PyUnicodeWriter); + PyUnicodeWriter *pub_writer = (PyUnicodeWriter *)PyMem_Malloc(size); + if (pub_writer == _Py_NULL) { + PyErr_NoMemory(); + return _Py_NULL; + } + _PyUnicodeWriter *writer = (_PyUnicodeWriter *)pub_writer; + + _PyUnicodeWriter_Init(writer); + if (_PyUnicodeWriter_Prepare(writer, length, 127) < 0) { + PyUnicodeWriter_Discard(pub_writer); + return NULL; + } + writer->overallocate = 1; + return pub_writer; +} + +static inline PyObject* PyUnicodeWriter_Finish(PyUnicodeWriter *writer) +{ + PyObject *str = _PyUnicodeWriter_Finish((_PyUnicodeWriter*)writer); + assert(((_PyUnicodeWriter*)writer)->buffer == NULL); + PyMem_Free(writer); + return str; +} + +static inline int +PyUnicodeWriter_WriteChar(PyUnicodeWriter *writer, Py_UCS4 ch) +{ + if (ch > 0x10ffff) { + PyErr_SetString(PyExc_ValueError, + "character must be in range(0x110000)"); + return -1; + } + + return _PyUnicodeWriter_WriteChar((_PyUnicodeWriter*)writer, ch); +} + +static inline int +PyUnicodeWriter_WriteStr(PyUnicodeWriter *writer, PyObject *obj) +{ + PyObject *str = PyObject_Str(obj); + if (str == NULL) { + return -1; + } + + int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str); + Py_DECREF(str); + return res; +} + +static inline int +PyUnicodeWriter_WriteRepr(PyUnicodeWriter *writer, PyObject *obj) +{ + PyObject *str = PyObject_Repr(obj); + if (str == NULL) { + return -1; + } + + int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str); + Py_DECREF(str); + return res; +} + +static inline int +PyUnicodeWriter_WriteUTF8(PyUnicodeWriter *writer, + const char *str, Py_ssize_t size) +{ + if (size < 0) { + size = (Py_ssize_t)strlen(str); + } + + PyObject *str_obj = PyUnicode_FromStringAndSize(str, size); + if (str_obj == _Py_NULL) { + return -1; + } + + int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str_obj); + Py_DECREF(str_obj); + return res; +} + +static inline int +PyUnicodeWriter_WriteASCII(PyUnicodeWriter *writer, + const char *str, Py_ssize_t size) +{ + if (size < 0) { + size = (Py_ssize_t)strlen(str); + } + + return _PyUnicodeWriter_WriteASCIIString((_PyUnicodeWriter*)writer, + str, size); +} + +static inline int +PyUnicodeWriter_WriteWideChar(PyUnicodeWriter *writer, + const wchar_t *str, Py_ssize_t size) +{ + if (size < 0) { + size = (Py_ssize_t)wcslen(str); + } + + PyObject *str_obj = PyUnicode_FromWideChar(str, size); + if (str_obj == _Py_NULL) { + return -1; + } + + int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str_obj); + Py_DECREF(str_obj); + return res; +} + +static inline int +PyUnicodeWriter_WriteSubstring(PyUnicodeWriter *writer, PyObject *str, + Py_ssize_t start, Py_ssize_t end) +{ + if (!PyUnicode_Check(str)) { + PyErr_Format(PyExc_TypeError, "expect str, not %s", + Py_TYPE(str)->tp_name); + return -1; + } + if (start < 0 || start > end) { + PyErr_Format(PyExc_ValueError, "invalid start argument"); + return -1; + } + if (end > PyUnicode_GET_LENGTH(str)) { + PyErr_Format(PyExc_ValueError, "invalid end argument"); + return -1; + } + + return _PyUnicodeWriter_WriteSubstring((_PyUnicodeWriter*)writer, str, + start, end); +} + +static inline int +PyUnicodeWriter_Format(PyUnicodeWriter *writer, const char *format, ...) +{ + va_list vargs; + va_start(vargs, format); + PyObject *str = PyUnicode_FromFormatV(format, vargs); + va_end(vargs); + if (str == _Py_NULL) { + return -1; + } + + int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str); + Py_DECREF(str); + return res; +} +#endif // PY_VERSION_HEX < 0x030E0000 + +// gh-116560 added PyLong_GetSign() to Python 3.14.0a0 +#if PY_VERSION_HEX < 0x030E00A0 +static inline int PyLong_GetSign(PyObject *obj, int *sign) +{ + if (!PyLong_Check(obj)) { + PyErr_Format(PyExc_TypeError, "expect int, got %s", Py_TYPE(obj)->tp_name); + return -1; + } + + *sign = _PyLong_Sign(obj); + return 0; +} +#endif + +// gh-126061 added PyLong_IsPositive/Negative/Zero() to Python in 3.14.0a2 +#if PY_VERSION_HEX < 0x030E00A2 +static inline int PyLong_IsPositive(PyObject *obj) +{ + if (!PyLong_Check(obj)) { + PyErr_Format(PyExc_TypeError, "expected int, got %s", Py_TYPE(obj)->tp_name); + return -1; + } + return _PyLong_Sign(obj) == 1; +} + +static inline int PyLong_IsNegative(PyObject *obj) +{ + if (!PyLong_Check(obj)) { + PyErr_Format(PyExc_TypeError, "expected int, got %s", Py_TYPE(obj)->tp_name); + return -1; + } + return _PyLong_Sign(obj) == -1; +} + +static inline int PyLong_IsZero(PyObject *obj) +{ + if (!PyLong_Check(obj)) { + PyErr_Format(PyExc_TypeError, "expected int, got %s", Py_TYPE(obj)->tp_name); + return -1; + } + return _PyLong_Sign(obj) == 0; +} +#endif + + +// gh-124502 added PyUnicode_Equal() to Python 3.14.0a0 +#if PY_VERSION_HEX < 0x030E00A0 +static inline int PyUnicode_Equal(PyObject *str1, PyObject *str2) +{ + if (!PyUnicode_Check(str1)) { + PyErr_Format(PyExc_TypeError, "first argument must be str, not %s", + Py_TYPE(str1)->tp_name); + return -1; + } + if (!PyUnicode_Check(str2)) { + PyErr_Format(PyExc_TypeError, "second argument must be str, not %s", + Py_TYPE(str2)->tp_name); + return -1; + } + +#if PY_VERSION_HEX >= 0x030d0000 && !defined(PYPY_VERSION) + PyAPI_FUNC(int) _PyUnicode_Equal(PyObject *str1, PyObject *str2); + + return _PyUnicode_Equal(str1, str2); +#elif PY_VERSION_HEX >= 0x03060000 && !defined(PYPY_VERSION) + return _PyUnicode_EQ(str1, str2); +#elif PY_VERSION_HEX >= 0x03090000 && defined(PYPY_VERSION) + return _PyUnicode_EQ(str1, str2); +#else + return (PyUnicode_Compare(str1, str2) == 0); +#endif +} +#endif + + +// gh-121645 added PyBytes_Join() to Python 3.14.0a0 +#if PY_VERSION_HEX < 0x030E00A0 +static inline PyObject* PyBytes_Join(PyObject *sep, PyObject *iterable) +{ + return _PyBytes_Join(sep, iterable); +} +#endif + + +#if PY_VERSION_HEX < 0x030E00A0 +static inline Py_hash_t Py_HashBuffer(const void *ptr, Py_ssize_t len) +{ +#if PY_VERSION_HEX >= 0x03000000 && !defined(PYPY_VERSION) + PyAPI_FUNC(Py_hash_t) _Py_HashBytes(const void *src, Py_ssize_t len); + + return _Py_HashBytes(ptr, len); +#else + Py_hash_t hash; + PyObject *bytes = PyBytes_FromStringAndSize((const char*)ptr, len); + if (bytes == NULL) { + return -1; + } + hash = PyObject_Hash(bytes); + Py_DECREF(bytes); + return hash; +#endif +} +#endif + + +#if PY_VERSION_HEX < 0x030E00A0 +static inline int PyIter_NextItem(PyObject *iter, PyObject **item) +{ + iternextfunc tp_iternext; + + assert(iter != NULL); + assert(item != NULL); + + tp_iternext = Py_TYPE(iter)->tp_iternext; + if (tp_iternext == NULL) { + *item = NULL; + PyErr_Format(PyExc_TypeError, "expected an iterator, got '%s'", + Py_TYPE(iter)->tp_name); + return -1; + } + + if ((*item = tp_iternext(iter))) { + return 1; + } + if (!PyErr_Occurred()) { + return 0; + } + if (PyErr_ExceptionMatches(PyExc_StopIteration)) { + PyErr_Clear(); + return 0; + } + return -1; +} +#endif + + +#if PY_VERSION_HEX < 0x030E00A0 +static inline PyObject* PyLong_FromInt32(int32_t value) +{ + Py_BUILD_ASSERT(sizeof(long) >= 4); + return PyLong_FromLong(value); +} + +static inline PyObject* PyLong_FromInt64(int64_t value) +{ + Py_BUILD_ASSERT(sizeof(long long) >= 8); + return PyLong_FromLongLong(value); +} + +static inline PyObject* PyLong_FromUInt32(uint32_t value) +{ + Py_BUILD_ASSERT(sizeof(unsigned long) >= 4); + return PyLong_FromUnsignedLong(value); +} + +static inline PyObject* PyLong_FromUInt64(uint64_t value) +{ + Py_BUILD_ASSERT(sizeof(unsigned long long) >= 8); + return PyLong_FromUnsignedLongLong(value); +} + +static inline int PyLong_AsInt32(PyObject *obj, int32_t *pvalue) +{ + Py_BUILD_ASSERT(sizeof(int) == 4); + int value = PyLong_AsInt(obj); + if (value == -1 && PyErr_Occurred()) { + return -1; + } + *pvalue = (int32_t)value; + return 0; +} + +static inline int PyLong_AsInt64(PyObject *obj, int64_t *pvalue) +{ + Py_BUILD_ASSERT(sizeof(long long) == 8); + long long value = PyLong_AsLongLong(obj); + if (value == -1 && PyErr_Occurred()) { + return -1; + } + *pvalue = (int64_t)value; + return 0; +} + +static inline int PyLong_AsUInt32(PyObject *obj, uint32_t *pvalue) +{ + Py_BUILD_ASSERT(sizeof(long) >= 4); + unsigned long value = PyLong_AsUnsignedLong(obj); + if (value == (unsigned long)-1 && PyErr_Occurred()) { + return -1; + } +#if SIZEOF_LONG > 4 + if ((unsigned long)UINT32_MAX < value) { + PyErr_SetString(PyExc_OverflowError, + "Python int too large to convert to C uint32_t"); + return -1; + } +#endif + *pvalue = (uint32_t)value; + return 0; +} + +static inline int PyLong_AsUInt64(PyObject *obj, uint64_t *pvalue) +{ + Py_BUILD_ASSERT(sizeof(long long) == 8); + unsigned long long value = PyLong_AsUnsignedLongLong(obj); + if (value == (unsigned long long)-1 && PyErr_Occurred()) { + return -1; + } + *pvalue = (uint64_t)value; + return 0; +} +#endif + + +// gh-102471 added import and export API for integers to 3.14.0a2. +#if PY_VERSION_HEX < 0x030E00A2 && PY_VERSION_HEX >= 0x03000000 && !defined(PYPY_VERSION) +// Helpers to access PyLongObject internals. +static inline void +_PyLong_SetSignAndDigitCount(PyLongObject *op, int sign, Py_ssize_t size) +{ +#if PY_VERSION_HEX >= 0x030C0000 + op->long_value.lv_tag = (uintptr_t)(1 - sign) | ((uintptr_t)(size) << 3); +#elif PY_VERSION_HEX >= 0x030900A4 + Py_SET_SIZE(op, sign * size); +#else + Py_SIZE(op) = sign * size; +#endif +} + +static inline Py_ssize_t +_PyLong_DigitCount(const PyLongObject *op) +{ +#if PY_VERSION_HEX >= 0x030C0000 + return (Py_ssize_t)(op->long_value.lv_tag >> 3); +#else + return _PyLong_Sign((PyObject*)op) < 0 ? -Py_SIZE(op) : Py_SIZE(op); +#endif +} + +static inline digit* +_PyLong_GetDigits(const PyLongObject *op) +{ +#if PY_VERSION_HEX >= 0x030C0000 + return (digit*)(op->long_value.ob_digit); +#else + return (digit*)(op->ob_digit); +#endif +} + +typedef struct PyLongLayout { + uint8_t bits_per_digit; + uint8_t digit_size; + int8_t digits_order; + int8_t digit_endianness; +} PyLongLayout; + +typedef struct PyLongExport { + int64_t value; + uint8_t negative; + Py_ssize_t ndigits; + const void *digits; + Py_uintptr_t _reserved; +} PyLongExport; + +typedef struct PyLongWriter PyLongWriter; + +static inline const PyLongLayout* +PyLong_GetNativeLayout(void) +{ + static const PyLongLayout PyLong_LAYOUT = { + PyLong_SHIFT, + sizeof(digit), + -1, // least significant first + PY_LITTLE_ENDIAN ? -1 : 1, + }; + + return &PyLong_LAYOUT; +} + +static inline int +PyLong_Export(PyObject *obj, PyLongExport *export_long) +{ + if (!PyLong_Check(obj)) { + memset(export_long, 0, sizeof(*export_long)); + PyErr_Format(PyExc_TypeError, "expected int, got %s", + Py_TYPE(obj)->tp_name); + return -1; + } + + // Fast-path: try to convert to a int64_t + PyLongObject *self = (PyLongObject*)obj; + int overflow; +#if SIZEOF_LONG == 8 + long value = PyLong_AsLongAndOverflow(obj, &overflow); +#else + // Windows has 32-bit long, so use 64-bit long long instead + long long value = PyLong_AsLongLongAndOverflow(obj, &overflow); +#endif + Py_BUILD_ASSERT(sizeof(value) == sizeof(int64_t)); + // the function cannot fail since obj is a PyLongObject + assert(!(value == -1 && PyErr_Occurred())); + + if (!overflow) { + export_long->value = value; + export_long->negative = 0; + export_long->ndigits = 0; + export_long->digits = 0; + export_long->_reserved = 0; + } + else { + export_long->value = 0; + export_long->negative = _PyLong_Sign(obj) < 0; + export_long->ndigits = _PyLong_DigitCount(self); + if (export_long->ndigits == 0) { + export_long->ndigits = 1; + } + export_long->digits = _PyLong_GetDigits(self); + export_long->_reserved = (Py_uintptr_t)Py_NewRef(obj); + } + return 0; +} + +static inline void +PyLong_FreeExport(PyLongExport *export_long) +{ + PyObject *obj = (PyObject*)export_long->_reserved; + + if (obj) { + export_long->_reserved = 0; + Py_DECREF(obj); + } +} + +static inline PyLongWriter* +PyLongWriter_Create(int negative, Py_ssize_t ndigits, void **digits) +{ + if (ndigits <= 0) { + PyErr_SetString(PyExc_ValueError, "ndigits must be positive"); + return NULL; + } + assert(digits != NULL); + + PyLongObject *obj = _PyLong_New(ndigits); + if (obj == NULL) { + return NULL; + } + _PyLong_SetSignAndDigitCount(obj, negative?-1:1, ndigits); + + *digits = _PyLong_GetDigits(obj); + return (PyLongWriter*)obj; +} + +static inline void +PyLongWriter_Discard(PyLongWriter *writer) +{ + PyLongObject *obj = (PyLongObject *)writer; + + assert(Py_REFCNT(obj) == 1); + Py_DECREF(obj); +} + +static inline PyObject* +PyLongWriter_Finish(PyLongWriter *writer) +{ + PyObject *obj = (PyObject *)writer; + PyLongObject *self = (PyLongObject*)obj; + Py_ssize_t j = _PyLong_DigitCount(self); + Py_ssize_t i = j; + int sign = _PyLong_Sign(obj); + + assert(Py_REFCNT(obj) == 1); + + // Normalize and get singleton if possible + while (i > 0 && _PyLong_GetDigits(self)[i-1] == 0) { + --i; + } + if (i != j) { + if (i == 0) { + sign = 0; + } + _PyLong_SetSignAndDigitCount(self, sign, i); + } + if (i <= 1) { + long val = sign * (long)(_PyLong_GetDigits(self)[0]); + Py_DECREF(obj); + return PyLong_FromLong(val); + } + + return obj; +} +#endif + + +#if PY_VERSION_HEX < 0x030C00A3 +# define Py_T_SHORT T_SHORT +# define Py_T_INT T_INT +# define Py_T_LONG T_LONG +# define Py_T_FLOAT T_FLOAT +# define Py_T_DOUBLE T_DOUBLE +# define Py_T_STRING T_STRING +# define _Py_T_OBJECT T_OBJECT +# define Py_T_CHAR T_CHAR +# define Py_T_BYTE T_BYTE +# define Py_T_UBYTE T_UBYTE +# define Py_T_USHORT T_USHORT +# define Py_T_UINT T_UINT +# define Py_T_ULONG T_ULONG +# define Py_T_STRING_INPLACE T_STRING_INPLACE +# define Py_T_BOOL T_BOOL +# define Py_T_OBJECT_EX T_OBJECT_EX +# define Py_T_LONGLONG T_LONGLONG +# define Py_T_ULONGLONG T_ULONGLONG +# define Py_T_PYSSIZET T_PYSSIZET + +# if PY_VERSION_HEX >= 0x03000000 && !defined(PYPY_VERSION) +# define _Py_T_NONE T_NONE +# endif + +# define Py_READONLY READONLY +# define Py_AUDIT_READ READ_RESTRICTED +# define _Py_WRITE_RESTRICTED PY_WRITE_RESTRICTED +#endif + + +// gh-127350 added Py_fopen() and Py_fclose() to Python 3.14a4 +#if PY_VERSION_HEX < 0x030E00A4 +static inline FILE* Py_fopen(PyObject *path, const char *mode) +{ +#if 0x030400A2 <= PY_VERSION_HEX && !defined(PYPY_VERSION) + PyAPI_FUNC(FILE*) _Py_fopen_obj(PyObject *path, const char *mode); + + return _Py_fopen_obj(path, mode); +#else + FILE *f; + PyObject *bytes; +#if PY_VERSION_HEX >= 0x03000000 + if (!PyUnicode_FSConverter(path, &bytes)) { + return NULL; + } +#else + if (!PyString_Check(path)) { + PyErr_SetString(PyExc_TypeError, "except str"); + return NULL; + } + bytes = Py_NewRef(path); +#endif + const char *path_bytes = PyBytes_AS_STRING(bytes); + + f = fopen(path_bytes, mode); + Py_DECREF(bytes); + + if (f == NULL) { + PyErr_SetFromErrnoWithFilenameObject(PyExc_OSError, path); + return NULL; + } + return f; +#endif +} + +static inline int Py_fclose(FILE *file) +{ + return fclose(file); +} +#endif + + +#if 0x03080000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x030E0000 && !defined(PYPY_VERSION) +static inline PyObject* +PyConfig_Get(const char *name) +{ + typedef enum { + _PyConfig_MEMBER_INT, + _PyConfig_MEMBER_UINT, + _PyConfig_MEMBER_ULONG, + _PyConfig_MEMBER_BOOL, + _PyConfig_MEMBER_WSTR, + _PyConfig_MEMBER_WSTR_OPT, + _PyConfig_MEMBER_WSTR_LIST, + } PyConfigMemberType; + + typedef struct { + const char *name; + size_t offset; + PyConfigMemberType type; + const char *sys_attr; + } PyConfigSpec; + +#define PYTHONCAPI_COMPAT_SPEC(MEMBER, TYPE, sys_attr) \ + {#MEMBER, offsetof(PyConfig, MEMBER), \ + _PyConfig_MEMBER_##TYPE, sys_attr} + + static const PyConfigSpec config_spec[] = { + PYTHONCAPI_COMPAT_SPEC(argv, WSTR_LIST, "argv"), + PYTHONCAPI_COMPAT_SPEC(base_exec_prefix, WSTR_OPT, "base_exec_prefix"), + PYTHONCAPI_COMPAT_SPEC(base_executable, WSTR_OPT, "_base_executable"), + PYTHONCAPI_COMPAT_SPEC(base_prefix, WSTR_OPT, "base_prefix"), + PYTHONCAPI_COMPAT_SPEC(bytes_warning, UINT, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(exec_prefix, WSTR_OPT, "exec_prefix"), + PYTHONCAPI_COMPAT_SPEC(executable, WSTR_OPT, "executable"), + PYTHONCAPI_COMPAT_SPEC(inspect, BOOL, _Py_NULL), +#if 0x030C0000 <= PY_VERSION_HEX + PYTHONCAPI_COMPAT_SPEC(int_max_str_digits, UINT, _Py_NULL), +#endif + PYTHONCAPI_COMPAT_SPEC(interactive, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(module_search_paths, WSTR_LIST, "path"), + PYTHONCAPI_COMPAT_SPEC(optimization_level, UINT, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(parser_debug, BOOL, _Py_NULL), +#if 0x03090000 <= PY_VERSION_HEX + PYTHONCAPI_COMPAT_SPEC(platlibdir, WSTR, "platlibdir"), +#endif + PYTHONCAPI_COMPAT_SPEC(prefix, WSTR_OPT, "prefix"), + PYTHONCAPI_COMPAT_SPEC(pycache_prefix, WSTR_OPT, "pycache_prefix"), + PYTHONCAPI_COMPAT_SPEC(quiet, BOOL, _Py_NULL), +#if 0x030B0000 <= PY_VERSION_HEX + PYTHONCAPI_COMPAT_SPEC(stdlib_dir, WSTR_OPT, "_stdlib_dir"), +#endif + PYTHONCAPI_COMPAT_SPEC(use_environment, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(verbose, UINT, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(warnoptions, WSTR_LIST, "warnoptions"), + PYTHONCAPI_COMPAT_SPEC(write_bytecode, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(xoptions, WSTR_LIST, "_xoptions"), + PYTHONCAPI_COMPAT_SPEC(buffered_stdio, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(check_hash_pycs_mode, WSTR, _Py_NULL), +#if 0x030B0000 <= PY_VERSION_HEX + PYTHONCAPI_COMPAT_SPEC(code_debug_ranges, BOOL, _Py_NULL), +#endif + PYTHONCAPI_COMPAT_SPEC(configure_c_stdio, BOOL, _Py_NULL), +#if 0x030D0000 <= PY_VERSION_HEX + PYTHONCAPI_COMPAT_SPEC(cpu_count, INT, _Py_NULL), +#endif + PYTHONCAPI_COMPAT_SPEC(dev_mode, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(dump_refs, BOOL, _Py_NULL), +#if 0x030B0000 <= PY_VERSION_HEX + PYTHONCAPI_COMPAT_SPEC(dump_refs_file, WSTR_OPT, _Py_NULL), +#endif +#ifdef Py_GIL_DISABLED + PYTHONCAPI_COMPAT_SPEC(enable_gil, INT, _Py_NULL), +#endif + PYTHONCAPI_COMPAT_SPEC(faulthandler, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(filesystem_encoding, WSTR, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(filesystem_errors, WSTR, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(hash_seed, ULONG, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(home, WSTR_OPT, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(import_time, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(install_signal_handlers, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(isolated, BOOL, _Py_NULL), +#ifdef MS_WINDOWS + PYTHONCAPI_COMPAT_SPEC(legacy_windows_stdio, BOOL, _Py_NULL), +#endif + PYTHONCAPI_COMPAT_SPEC(malloc_stats, BOOL, _Py_NULL), +#if 0x030A0000 <= PY_VERSION_HEX + PYTHONCAPI_COMPAT_SPEC(orig_argv, WSTR_LIST, "orig_argv"), +#endif + PYTHONCAPI_COMPAT_SPEC(parse_argv, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(pathconfig_warnings, BOOL, _Py_NULL), +#if 0x030C0000 <= PY_VERSION_HEX + PYTHONCAPI_COMPAT_SPEC(perf_profiling, UINT, _Py_NULL), +#endif + PYTHONCAPI_COMPAT_SPEC(program_name, WSTR, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(run_command, WSTR_OPT, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(run_filename, WSTR_OPT, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(run_module, WSTR_OPT, _Py_NULL), +#if 0x030B0000 <= PY_VERSION_HEX + PYTHONCAPI_COMPAT_SPEC(safe_path, BOOL, _Py_NULL), +#endif + PYTHONCAPI_COMPAT_SPEC(show_ref_count, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(site_import, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(skip_source_first_line, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(stdio_encoding, WSTR, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(stdio_errors, WSTR, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(tracemalloc, UINT, _Py_NULL), +#if 0x030B0000 <= PY_VERSION_HEX + PYTHONCAPI_COMPAT_SPEC(use_frozen_modules, BOOL, _Py_NULL), +#endif + PYTHONCAPI_COMPAT_SPEC(use_hash_seed, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(user_site_directory, BOOL, _Py_NULL), +#if 0x030A0000 <= PY_VERSION_HEX + PYTHONCAPI_COMPAT_SPEC(warn_default_encoding, BOOL, _Py_NULL), +#endif + }; + +#undef PYTHONCAPI_COMPAT_SPEC + + const PyConfigSpec *spec; + int found = 0; + for (size_t i=0; i < sizeof(config_spec) / sizeof(config_spec[0]); i++) { + spec = &config_spec[i]; + if (strcmp(spec->name, name) == 0) { + found = 1; + break; + } + } + if (found) { + if (spec->sys_attr != NULL) { + PyObject *value = PySys_GetObject(spec->sys_attr); + if (value == NULL) { + PyErr_Format(PyExc_RuntimeError, "lost sys.%s", spec->sys_attr); + return NULL; + } + return Py_NewRef(value); + } + + PyAPI_FUNC(const PyConfig*) _Py_GetConfig(void); + + const PyConfig *config = _Py_GetConfig(); + void *member = (char *)config + spec->offset; + switch (spec->type) { + case _PyConfig_MEMBER_INT: + case _PyConfig_MEMBER_UINT: + { + int value = *(int *)member; + return PyLong_FromLong(value); + } + case _PyConfig_MEMBER_BOOL: + { + int value = *(int *)member; + return PyBool_FromLong(value != 0); + } + case _PyConfig_MEMBER_ULONG: + { + unsigned long value = *(unsigned long *)member; + return PyLong_FromUnsignedLong(value); + } + case _PyConfig_MEMBER_WSTR: + case _PyConfig_MEMBER_WSTR_OPT: + { + wchar_t *wstr = *(wchar_t **)member; + if (wstr != NULL) { + return PyUnicode_FromWideChar(wstr, -1); + } + else { + return Py_NewRef(Py_None); + } + } + case _PyConfig_MEMBER_WSTR_LIST: + { + const PyWideStringList *list = (const PyWideStringList *)member; + PyObject *tuple = PyTuple_New(list->length); + if (tuple == NULL) { + return NULL; + } + + for (Py_ssize_t i = 0; i < list->length; i++) { + PyObject *item = PyUnicode_FromWideChar(list->items[i], -1); + if (item == NULL) { + Py_DECREF(tuple); + return NULL; + } + PyTuple_SET_ITEM(tuple, i, item); + } + return tuple; + } + default: + Py_UNREACHABLE(); + } + } + + PyErr_Format(PyExc_ValueError, "unknown config option name: %s", name); + return NULL; +} + +static inline int +PyConfig_GetInt(const char *name, int *value) +{ + PyObject *obj = PyConfig_Get(name); + if (obj == NULL) { + return -1; + } + + if (!PyLong_Check(obj)) { + Py_DECREF(obj); + PyErr_Format(PyExc_TypeError, "config option %s is not an int", name); + return -1; + } + + int as_int = PyLong_AsInt(obj); + Py_DECREF(obj); + if (as_int == -1 && PyErr_Occurred()) { + PyErr_Format(PyExc_OverflowError, + "config option %s value does not fit into a C int", name); + return -1; + } + + *value = as_int; + return 0; +} +#endif // PY_VERSION_HEX > 0x03090000 && !defined(PYPY_VERSION) + +// gh-133144 added PyUnstable_Object_IsUniquelyReferenced() to Python 3.14.0b1. +// Adapted from _PyObject_IsUniquelyReferenced() implementation. +#if PY_VERSION_HEX < 0x030E00B0 +static inline int PyUnstable_Object_IsUniquelyReferenced(PyObject *obj) +{ +#if !defined(Py_GIL_DISABLED) + return Py_REFCNT(obj) == 1; +#else + // NOTE: the entire ob_ref_shared field must be zero, including flags, to + // ensure that other threads cannot concurrently create new references to + // this object. + return (_Py_IsOwnedByCurrentThread(obj) && + _Py_atomic_load_uint32_relaxed(&obj->ob_ref_local) == 1 && + _Py_atomic_load_ssize_relaxed(&obj->ob_ref_shared) == 0); +#endif +} +#endif + + +#if PY_VERSION_HEX < 0x030F0000 +static inline PyObject* +PySys_GetAttrString(const char *name) +{ +#if PY_VERSION_HEX >= 0x03000000 + PyObject *value = Py_XNewRef(PySys_GetObject(name)); +#else + PyObject *value = Py_XNewRef(PySys_GetObject((char*)name)); +#endif + if (value != NULL) { + return value; + } + if (!PyErr_Occurred()) { + PyErr_Format(PyExc_RuntimeError, "lost sys.%s", name); + } + return NULL; +} + +static inline PyObject* +PySys_GetAttr(PyObject *name) +{ +#if PY_VERSION_HEX >= 0x03000000 + const char *name_str = PyUnicode_AsUTF8(name); +#else + const char *name_str = PyString_AsString(name); +#endif + if (name_str == NULL) { + return NULL; + } + + return PySys_GetAttrString(name_str); +} + +static inline int +PySys_GetOptionalAttrString(const char *name, PyObject **value) +{ +#if PY_VERSION_HEX >= 0x03000000 + *value = Py_XNewRef(PySys_GetObject(name)); +#else + *value = Py_XNewRef(PySys_GetObject((char*)name)); +#endif + if (*value != NULL) { + return 1; + } + return 0; +} + +static inline int +PySys_GetOptionalAttr(PyObject *name, PyObject **value) +{ +#if PY_VERSION_HEX >= 0x03000000 + const char *name_str = PyUnicode_AsUTF8(name); +#else + const char *name_str = PyString_AsString(name); +#endif + if (name_str == NULL) { + *value = NULL; + return -1; + } + + return PySys_GetOptionalAttrString(name_str, value); +} +#endif // PY_VERSION_HEX < 0x030F00A1 + + +#if PY_VERSION_HEX < 0x030F00A1 +typedef struct PyBytesWriter { + char small_buffer[256]; + PyObject *obj; + Py_ssize_t size; +} PyBytesWriter; + +static inline Py_ssize_t +_PyBytesWriter_GetAllocated(PyBytesWriter *writer) +{ + if (writer->obj == NULL) { + return sizeof(writer->small_buffer); + } + else { + return PyBytes_GET_SIZE(writer->obj); + } +} + + +static inline int +_PyBytesWriter_Resize_impl(PyBytesWriter *writer, Py_ssize_t size, + int resize) +{ + int overallocate = resize; + assert(size >= 0); + + if (size <= _PyBytesWriter_GetAllocated(writer)) { + return 0; + } + + if (overallocate) { +#ifdef MS_WINDOWS + /* On Windows, overallocate by 50% is the best factor */ + if (size <= (PY_SSIZE_T_MAX - size / 2)) { + size += size / 2; + } +#else + /* On Linux, overallocate by 25% is the best factor */ + if (size <= (PY_SSIZE_T_MAX - size / 4)) { + size += size / 4; + } +#endif + } + + if (writer->obj != NULL) { + if (_PyBytes_Resize(&writer->obj, size)) { + return -1; + } + assert(writer->obj != NULL); + } + else { + writer->obj = PyBytes_FromStringAndSize(NULL, size); + if (writer->obj == NULL) { + return -1; + } + + if (resize) { + assert((size_t)size > sizeof(writer->small_buffer)); + memcpy(PyBytes_AS_STRING(writer->obj), + writer->small_buffer, + sizeof(writer->small_buffer)); + } + } + return 0; +} + +static inline void* +PyBytesWriter_GetData(PyBytesWriter *writer) +{ + if (writer->obj == NULL) { + return writer->small_buffer; + } + else { + return PyBytes_AS_STRING(writer->obj); + } +} + +static inline Py_ssize_t +PyBytesWriter_GetSize(PyBytesWriter *writer) +{ + return writer->size; +} + +static inline void +PyBytesWriter_Discard(PyBytesWriter *writer) +{ + if (writer == NULL) { + return; + } + + Py_XDECREF(writer->obj); + PyMem_Free(writer); +} + +static inline PyBytesWriter* +PyBytesWriter_Create(Py_ssize_t size) +{ + if (size < 0) { + PyErr_SetString(PyExc_ValueError, "size must be >= 0"); + return NULL; + } + + PyBytesWriter *writer = (PyBytesWriter*)PyMem_Malloc(sizeof(PyBytesWriter)); + if (writer == NULL) { + PyErr_NoMemory(); + return NULL; + } + + writer->obj = NULL; + writer->size = 0; + + if (size >= 1) { + if (_PyBytesWriter_Resize_impl(writer, size, 0) < 0) { + PyBytesWriter_Discard(writer); + return NULL; + } + writer->size = size; + } + return writer; +} + +static inline PyObject* +PyBytesWriter_FinishWithSize(PyBytesWriter *writer, Py_ssize_t size) +{ + PyObject *result; + if (size == 0) { + result = PyBytes_FromStringAndSize("", 0); + } + else if (writer->obj != NULL) { + if (size != PyBytes_GET_SIZE(writer->obj)) { + if (_PyBytes_Resize(&writer->obj, size)) { + goto error; + } + } + result = writer->obj; + writer->obj = NULL; + } + else { + result = PyBytes_FromStringAndSize(writer->small_buffer, size); + } + PyBytesWriter_Discard(writer); + return result; + +error: + PyBytesWriter_Discard(writer); + return NULL; +} + +static inline PyObject* +PyBytesWriter_Finish(PyBytesWriter *writer) +{ + return PyBytesWriter_FinishWithSize(writer, writer->size); +} + +static inline PyObject* +PyBytesWriter_FinishWithPointer(PyBytesWriter *writer, void *buf) +{ + Py_ssize_t size = (char*)buf - (char*)PyBytesWriter_GetData(writer); + if (size < 0 || size > _PyBytesWriter_GetAllocated(writer)) { + PyBytesWriter_Discard(writer); + PyErr_SetString(PyExc_ValueError, "invalid end pointer"); + return NULL; + } + + return PyBytesWriter_FinishWithSize(writer, size); +} + +static inline int +PyBytesWriter_Resize(PyBytesWriter *writer, Py_ssize_t size) +{ + if (size < 0) { + PyErr_SetString(PyExc_ValueError, "size must be >= 0"); + return -1; + } + if (_PyBytesWriter_Resize_impl(writer, size, 1) < 0) { + return -1; + } + writer->size = size; + return 0; +} + +static inline int +PyBytesWriter_Grow(PyBytesWriter *writer, Py_ssize_t size) +{ + if (size < 0 && writer->size + size < 0) { + PyErr_SetString(PyExc_ValueError, "invalid size"); + return -1; + } + if (size > PY_SSIZE_T_MAX - writer->size) { + PyErr_NoMemory(); + return -1; + } + size = writer->size + size; + + if (_PyBytesWriter_Resize_impl(writer, size, 1) < 0) { + return -1; + } + writer->size = size; + return 0; +} + +static inline void* +PyBytesWriter_GrowAndUpdatePointer(PyBytesWriter *writer, + Py_ssize_t size, void *buf) +{ + Py_ssize_t pos = (char*)buf - (char*)PyBytesWriter_GetData(writer); + if (PyBytesWriter_Grow(writer, size) < 0) { + return NULL; + } + return (char*)PyBytesWriter_GetData(writer) + pos; +} + +static inline int +PyBytesWriter_WriteBytes(PyBytesWriter *writer, + const void *bytes, Py_ssize_t size) +{ + if (size < 0) { + size_t len = strlen((const char*)bytes); + if (len > (size_t)PY_SSIZE_T_MAX) { + PyErr_NoMemory(); + return -1; + } + size = (Py_ssize_t)len; + } + + Py_ssize_t pos = writer->size; + if (PyBytesWriter_Grow(writer, size) < 0) { + return -1; + } + char *buf = (char*)PyBytesWriter_GetData(writer); + memcpy(buf + pos, bytes, (size_t)size); + return 0; +} + +static inline int +PyBytesWriter_Format(PyBytesWriter *writer, const char *format, ...) + Py_GCC_ATTRIBUTE((format(printf, 2, 3))); + +static inline int +PyBytesWriter_Format(PyBytesWriter *writer, const char *format, ...) +{ + va_list vargs; + va_start(vargs, format); + PyObject *str = PyBytes_FromFormatV(format, vargs); + va_end(vargs); + + if (str == NULL) { + return -1; + } + int res = PyBytesWriter_WriteBytes(writer, + PyBytes_AS_STRING(str), + PyBytes_GET_SIZE(str)); + Py_DECREF(str); + return res; +} +#endif // PY_VERSION_HEX < 0x030F00A1 + + +#if PY_VERSION_HEX < 0x030F00A1 +static inline PyObject* +PyTuple_FromArray(PyObject *const *array, Py_ssize_t size) +{ + PyObject *tuple = PyTuple_New(size); + if (tuple == NULL) { + return NULL; + } + for (Py_ssize_t i=0; i < size; i++) { + PyObject *item = array[i]; + PyTuple_SET_ITEM(tuple, i, Py_NewRef(item)); + } + return tuple; +} +#endif + + +#if PY_VERSION_HEX < 0x030F00A1 +static inline Py_hash_t +PyUnstable_Unicode_GET_CACHED_HASH(PyObject *op) +{ +#ifdef PYPY_VERSION + (void)op; // unused argument + return -1; +#elif PY_VERSION_HEX >= 0x03000000 + return ((PyASCIIObject*)op)->hash; +#else + return ((PyUnicodeObject*)op)->hash; +#endif +} +#endif + + +#ifdef __cplusplus +} +#endif +#endif // PYTHONCAPI_COMPAT diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/pythonsupport.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/pythonsupport.c new file mode 100644 index 0000000..90fb697 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/pythonsupport.c @@ -0,0 +1,106 @@ +// Collects code that was copied in from cpython, for a couple of different reasons: +// * We wanted to modify it to produce a more efficient version for our uses +// * We needed to call it and it was static :( +// * We wanted to call it and needed to backport it + +#include "pythonsupport.h" + +#if CPY_3_12_FEATURES + +// Slow path of CPyLong_AsSsize_tAndOverflow (non-inlined) +Py_ssize_t +CPyLong_AsSsize_tAndOverflow_(PyObject *vv, int *overflow) +{ + PyLongObject *v = (PyLongObject *)vv; + size_t x, prev; + Py_ssize_t res; + Py_ssize_t i; + int sign; + + *overflow = 0; + + res = -1; + i = CPY_LONG_TAG(v); + + sign = 1; + x = 0; + if (i & CPY_SIGN_NEGATIVE) { + sign = -1; + } + i >>= CPY_NON_SIZE_BITS; + while (--i >= 0) { + prev = x; + x = (x << PyLong_SHIFT) + CPY_LONG_DIGIT(v, i); + if ((x >> PyLong_SHIFT) != prev) { + *overflow = sign; + goto exit; + } + } + /* Haven't lost any bits, but casting to long requires extra + * care. + */ + if (x <= (size_t)CPY_TAGGED_MAX) { + res = (Py_ssize_t)x * sign; + } + else if (sign < 0 && x == CPY_TAGGED_ABS_MIN) { + res = CPY_TAGGED_MIN; + } + else { + *overflow = sign; + /* res is already set to -1 */ + } + exit: + return res; +} + +#else + +// Slow path of CPyLong_AsSsize_tAndOverflow (non-inlined, Python 3.11 and earlier) +Py_ssize_t +CPyLong_AsSsize_tAndOverflow_(PyObject *vv, int *overflow) +{ + /* This version by Tim Peters */ + PyLongObject *v = (PyLongObject *)vv; + size_t x, prev; + Py_ssize_t res; + Py_ssize_t i; + int sign; + + *overflow = 0; + + res = -1; + i = Py_SIZE(v); + + sign = 1; + x = 0; + if (i < 0) { + sign = -1; + i = -(i); + } + while (--i >= 0) { + prev = x; + x = (x << PyLong_SHIFT) + CPY_LONG_DIGIT(v, i); + if ((x >> PyLong_SHIFT) != prev) { + *overflow = sign; + goto exit; + } + } + /* Haven't lost any bits, but casting to long requires extra + * care. + */ + if (x <= (size_t)CPY_TAGGED_MAX) { + res = (Py_ssize_t)x * sign; + } + else if (sign < 0 && x == CPY_TAGGED_ABS_MIN) { + res = CPY_TAGGED_MIN; + } + else { + *overflow = sign; + /* res is already set to -1 */ + } + exit: + return res; +} + + +#endif diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/pythonsupport.h b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/pythonsupport.h new file mode 100644 index 0000000..7019c12 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/pythonsupport.h @@ -0,0 +1,478 @@ +// Collects code that was copied in from cpython, for a couple of different reasons: +// * We wanted to modify it to produce a more efficient version for our uses +// * We needed to call it and it was static :( +// * We wanted to call it and needed to backport it + +#ifndef CPY_PYTHONSUPPORT_H +#define CPY_PYTHONSUPPORT_H + +#include +#include +#include "pythoncapi_compat.h" +#include +#include +#include "mypyc_util.h" + +#if CPY_3_13_FEATURES +#ifndef Py_BUILD_CORE +#define Py_BUILD_CORE +#endif +#include "internal/pycore_genobject.h" // _PyGen_FetchStopIterationValue +#include "internal/pycore_pyerrors.h" // _PyErr_FormatFromCause, _PyErr_SetKeyError +#include "internal/pycore_setobject.h" // _PySet_Update +#endif + +#if CPY_3_12_FEATURES +#include "internal/pycore_frame.h" +#endif + +#ifdef __cplusplus +extern "C" { +#endif +#if 0 +} // why isn't emacs smart enough to not indent this +#endif + +///////////////////////////////////////// +// Adapted from bltinmodule.c in Python 3.7.0 +_Py_IDENTIFIER(__mro_entries__); +static PyObject* +update_bases(PyObject *bases) +{ + Py_ssize_t i, j; + PyObject *base, *meth, *new_base, *result, *new_bases = NULL; + PyObject *stack[1] = {bases}; + assert(PyTuple_Check(bases)); + + Py_ssize_t nargs = PyTuple_GET_SIZE(bases); + for (i = 0; i < nargs; i++) { + base = PyTuple_GET_ITEM(bases, i); + if (PyType_Check(base)) { + if (new_bases) { + /* If we already have made a replacement, then we append every normal base, + otherwise just skip it. */ + if (PyList_Append(new_bases, base) < 0) { + goto error; + } + } + continue; + } + if (PyObject_GetOptionalAttrString(base, PyId___mro_entries__.string, &meth) < 0) { + goto error; + } + if (!meth) { + if (new_bases) { + if (PyList_Append(new_bases, base) < 0) { + goto error; + } + } + continue; + } + new_base = PyObject_Vectorcall(meth, stack, 1, NULL); + Py_DECREF(meth); + if (!new_base) { + goto error; + } + if (!PyTuple_Check(new_base)) { + PyErr_SetString(PyExc_TypeError, + "__mro_entries__ must return a tuple"); + Py_DECREF(new_base); + goto error; + } + if (!new_bases) { + /* If this is a first successful replacement, create new_bases list and + copy previously encountered bases. */ + if (!(new_bases = PyList_New(i))) { + goto error; + } + for (j = 0; j < i; j++) { + base = PyTuple_GET_ITEM(bases, j); + PyList_SET_ITEM(new_bases, j, base); + Py_INCREF(base); + } + } + j = PyList_GET_SIZE(new_bases); + if (PyList_SetSlice(new_bases, j, j, new_base) < 0) { + goto error; + } + Py_DECREF(new_base); + } + if (!new_bases) { + return bases; + } + result = PyList_AsTuple(new_bases); + Py_DECREF(new_bases); + return result; + +error: + Py_XDECREF(new_bases); + return NULL; +} + +// From Python 3.7's typeobject.c +_Py_IDENTIFIER(__init_subclass__); +static int +init_subclass(PyTypeObject *type, PyObject *kwds) +{ + PyObject *super, *func, *result; + PyObject *args[2] = {(PyObject *)type, (PyObject *)type}; + + super = PyObject_Vectorcall((PyObject *)&PySuper_Type, args, 2, NULL); + if (super == NULL) { + return -1; + } + + func = _PyObject_GetAttrId(super, &PyId___init_subclass__); + Py_DECREF(super); + if (func == NULL) { + return -1; + } + + result = _PyObject_FastCallDict(func, NULL, 0, kwds); + Py_DECREF(func); + if (result == NULL) { + return -1; + } + + Py_DECREF(result); + return 0; +} + +Py_ssize_t +CPyLong_AsSsize_tAndOverflow_(PyObject *vv, int *overflow); + +#if CPY_3_12_FEATURES + +static inline Py_ssize_t +CPyLong_AsSsize_tAndOverflow(PyObject *vv, int *overflow) +{ + /* This version by Tim Peters */ + PyLongObject *v = (PyLongObject *)vv; + Py_ssize_t res; + Py_ssize_t i; + + *overflow = 0; + + res = -1; + i = CPY_LONG_TAG(v); + + // TODO: Combine zero and non-zero cases helow? + if (likely(i == (1 << CPY_NON_SIZE_BITS))) { + res = CPY_LONG_DIGIT(v, 0); + } else if (likely(i == CPY_SIGN_ZERO)) { + res = 0; + } else if (i == ((1 << CPY_NON_SIZE_BITS) | CPY_SIGN_NEGATIVE)) { + res = -(sdigit)CPY_LONG_DIGIT(v, 0); + } else { + // Slow path is moved to a non-inline helper function to + // limit size of generated code + int overflow_local; + res = CPyLong_AsSsize_tAndOverflow_(vv, &overflow_local); + *overflow = overflow_local; + } + return res; +} + +#else + +// Adapted from longobject.c in Python 3.7.0 + +/* This function adapted from PyLong_AsLongLongAndOverflow, but with + * some safety checks removed and specialized to only work for objects + * that are already longs. + * About half of the win this provides, though, just comes from being + * able to inline the function, which in addition to saving function call + * overhead allows the out-parameter overflow flag to be collapsed into + * control flow. + * Additionally, we check against the possible range of CPyTagged, not of + * Py_ssize_t. */ +static inline Py_ssize_t +CPyLong_AsSsize_tAndOverflow(PyObject *vv, int *overflow) +{ + /* This version by Tim Peters */ + PyLongObject *v = (PyLongObject *)vv; + Py_ssize_t res; + Py_ssize_t i; + + *overflow = 0; + + res = -1; + i = Py_SIZE(v); + + if (likely(i == 1)) { + res = CPY_LONG_DIGIT(v, 0); + } else if (likely(i == 0)) { + res = 0; + } else if (i == -1) { + res = -(sdigit)CPY_LONG_DIGIT(v, 0); + } else { + // Slow path is moved to a non-inline helper function to + // limit size of generated code + int overflow_local; + res = CPyLong_AsSsize_tAndOverflow_(vv, &overflow_local); + *overflow = overflow_local; + } + return res; +} + +#endif + +// Adapted from listobject.c in Python 3.7.0 +static int +list_resize(PyListObject *self, Py_ssize_t newsize) +{ + PyObject **items; + size_t new_allocated, num_allocated_bytes; + Py_ssize_t allocated = self->allocated; + + /* Bypass realloc() when a previous overallocation is large enough + to accommodate the newsize. If the newsize falls lower than half + the allocated size, then proceed with the realloc() to shrink the list. + */ + if (allocated >= newsize && newsize >= (allocated >> 1)) { + assert(self->ob_item != NULL || newsize == 0); + Py_SET_SIZE(self, newsize); + return 0; + } + + /* This over-allocates proportional to the list size, making room + * for additional growth. The over-allocation is mild, but is + * enough to give linear-time amortized behavior over a long + * sequence of appends() in the presence of a poorly-performing + * system realloc(). + * The growth pattern is: 0, 4, 8, 16, 25, 35, 46, 58, 72, 88, ... + * Note: new_allocated won't overflow because the largest possible value + * is PY_SSIZE_T_MAX * (9 / 8) + 6 which always fits in a size_t. + */ + new_allocated = (size_t)newsize + (newsize >> 3) + (newsize < 9 ? 3 : 6); + if (new_allocated > (size_t)PY_SSIZE_T_MAX / sizeof(PyObject *)) { + PyErr_NoMemory(); + return -1; + } + + if (newsize == 0) + new_allocated = 0; + num_allocated_bytes = new_allocated * sizeof(PyObject *); + items = (PyObject **)PyMem_Realloc(self->ob_item, num_allocated_bytes); + if (items == NULL) { + PyErr_NoMemory(); + return -1; + } + self->ob_item = items; + Py_SET_SIZE(self, newsize); + self->allocated = new_allocated; + return 0; +} + +// Changed to use PyList_SetSlice instead of the internal list_ass_slice +static PyObject * +list_pop_impl(PyListObject *self, Py_ssize_t index) +{ + PyObject *v; + int status; + + if (Py_SIZE(self) == 0) { + /* Special-case most common failure cause */ + PyErr_SetString(PyExc_IndexError, "pop from empty list"); + return NULL; + } + if (index < 0) + index += Py_SIZE(self); + if (index < 0 || index >= Py_SIZE(self)) { + PyErr_SetString(PyExc_IndexError, "pop index out of range"); + return NULL; + } + v = self->ob_item[index]; + if (index == Py_SIZE(self) - 1) { + status = list_resize(self, Py_SIZE(self) - 1); + if (status >= 0) + return v; /* and v now owns the reference the list had */ + else + return NULL; + } + Py_INCREF(v); + status = PyList_SetSlice((PyObject *)self, index, index+1, (PyObject *)NULL); + if (status < 0) { + Py_DECREF(v); + return NULL; + } + return v; +} + +// Tweaked to directly use CPyTagged +static CPyTagged +list_count(PyListObject *self, PyObject *value) +{ + Py_ssize_t count = 0; + Py_ssize_t i; + + for (i = 0; i < Py_SIZE(self); i++) { + int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ); + if (cmp > 0) + count++; + else if (cmp < 0) + return CPY_INT_TAG; + } + return CPyTagged_ShortFromSsize_t(count); +} + +// Adapted from genobject.c in Python 3.7.2 +// Copied because it wasn't in 3.5.2 and it is undocumented anyways. +/* + * Set StopIteration with specified value. Value can be arbitrary object + * or NULL. + * + * Returns 0 if StopIteration is set and -1 if any other exception is set. + */ +static int +CPyGen_SetStopIterationValue(PyObject *value) +{ + PyObject *e; + + if (value == NULL || + (!PyTuple_Check(value) && !PyExceptionInstance_Check(value))) + { + /* Delay exception instantiation if we can */ + PyErr_SetObject(PyExc_StopIteration, value); + return 0; + } + /* Construct an exception instance manually with + * PyObject_CallOneArg and pass it to PyErr_SetObject. + * + * We do this to handle a situation when "value" is a tuple, in which + * case PyErr_SetObject would set the value of StopIteration to + * the first element of the tuple. + * + * (See PyErr_SetObject/_PyErr_CreateException code for details.) + */ + e = PyObject_CallOneArg(PyExc_StopIteration, value); + if (e == NULL) { + return -1; + } + PyErr_SetObject(PyExc_StopIteration, e); + Py_DECREF(e); + return 0; +} + +// Copied from dictobject.c and dictobject.h, these are not Public before +// Python 3.8. Also remove some error checks that we do in the callers. +typedef struct { + PyObject_HEAD + PyDictObject *dv_dict; +} _CPyDictViewObject; + +static PyObject * +_CPyDictView_New(PyObject *dict, PyTypeObject *type) +{ + _CPyDictViewObject *dv = PyObject_GC_New(_CPyDictViewObject, type); + if (dv == NULL) + return NULL; + Py_INCREF(dict); + dv->dv_dict = (PyDictObject *)dict; + PyObject_GC_Track(dv); + return (PyObject *)dv; +} + +#ifdef __cplusplus +} +#endif + +#if PY_VERSION_HEX >= 0x030A0000 // 3.10 +static int +_CPyObject_HasAttrId(PyObject *v, _Py_Identifier *name) { + PyObject *tmp = NULL; + int result = PyObject_GetOptionalAttrString(v, name->string, &tmp); + if (tmp) { + Py_DECREF(tmp); + } + return result; +} +#else +#define _CPyObject_HasAttrId _PyObject_HasAttrId +#endif + +#if CPY_3_12_FEATURES + +// These are copied from genobject.c in Python 3.12 + +static int +gen_is_coroutine(PyObject *o) +{ + if (PyGen_CheckExact(o)) { + PyCodeObject *code = PyGen_GetCode((PyGenObject*)o); + if (code->co_flags & CO_ITERABLE_COROUTINE) { + return 1; + } + } + return 0; +} + +#else + +// Copied from genobject.c in Python 3.10 +static int +gen_is_coroutine(PyObject *o) +{ + if (PyGen_CheckExact(o)) { + PyCodeObject *code = (PyCodeObject *)((PyGenObject*)o)->gi_code; + if (code->co_flags & CO_ITERABLE_COROUTINE) { + return 1; + } + } + return 0; +} + +#endif + +/* + * This helper function returns an awaitable for `o`: + * - `o` if `o` is a coroutine-object; + * - `type(o)->tp_as_async->am_await(o)` + * + * Raises a TypeError if it's not possible to return + * an awaitable and returns NULL. + */ +static PyObject * +CPyCoro_GetAwaitableIter(PyObject *o) +{ + unaryfunc getter = NULL; + PyTypeObject *ot; + + if (PyCoro_CheckExact(o) || gen_is_coroutine(o)) { + /* 'o' is a coroutine. */ + Py_INCREF(o); + return o; + } + + ot = Py_TYPE(o); + if (ot->tp_as_async != NULL) { + getter = ot->tp_as_async->am_await; + } + if (getter != NULL) { + PyObject *res = (*getter)(o); + if (res != NULL) { + if (PyCoro_CheckExact(res) || gen_is_coroutine(res)) { + /* __await__ must return an *iterator*, not + a coroutine or another awaitable (see PEP 492) */ + PyErr_SetString(PyExc_TypeError, + "__await__() returned a coroutine"); + Py_CLEAR(res); + } else if (!PyIter_Check(res)) { + PyErr_Format(PyExc_TypeError, + "__await__() returned non-iterator " + "of type '%.100s'", + Py_TYPE(res)->tp_name); + Py_CLEAR(res); + } + } + return res; + } + + PyErr_Format(PyExc_TypeError, + "object %.100s can't be used in 'await' expression", + ot->tp_name); + return NULL; +} + + +#endif diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/set_ops.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/set_ops.c new file mode 100644 index 0000000..7e76967 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/set_ops.c @@ -0,0 +1,17 @@ +// Set primitive operations +// +// These are registered in mypyc.primitives.set_ops. + +#include +#include "CPy.h" + +bool CPySet_Remove(PyObject *set, PyObject *key) { + int success = PySet_Discard(set, key); + if (success == 1) { + return true; + } + if (success == 0) { + _PyErr_SetKeyError(key); + } + return false; +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/str_ops.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/str_ops.c new file mode 100644 index 0000000..721a2bb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/str_ops.c @@ -0,0 +1,623 @@ +#include "pythoncapi_compat.h" + +// String primitive operations +// +// These are registered in mypyc.primitives.str_ops. + +#include +#include "CPy.h" + +// The _PyUnicode_CheckConsistency definition has been moved to the internal API +// https://github.com/python/cpython/pull/106398 +#if defined(Py_DEBUG) && defined(CPY_3_13_FEATURES) +#include "internal/pycore_unicodeobject.h" +#endif + +// Copied from cpython.git:Objects/unicodeobject.c@0ef4ffeefd1737c18dc9326133c7894d58108c2e. +#define BLOOM_MASK unsigned long +#define BLOOM(mask, ch) ((mask & (1UL << ((ch) & (BLOOM_WIDTH - 1))))) +#if LONG_BIT >= 128 +#define BLOOM_WIDTH 128 +#elif LONG_BIT >= 64 +#define BLOOM_WIDTH 64 +#elif LONG_BIT >= 32 +#define BLOOM_WIDTH 32 +#else +#error "LONG_BIT is smaller than 32" +#endif + +// Copied from cpython.git:Objects/unicodeobject.c@0ef4ffeefd1737c18dc9326133c7894d58108c2e. +// This is needed for str.strip("..."). +static inline BLOOM_MASK +make_bloom_mask(int kind, const void* ptr, Py_ssize_t len) +{ +#define BLOOM_UPDATE(TYPE, MASK, PTR, LEN) \ + do { \ + TYPE *data = (TYPE *)PTR; \ + TYPE *end = data + LEN; \ + Py_UCS4 ch; \ + for (; data != end; data++) { \ + ch = *data; \ + MASK |= (1UL << (ch & (BLOOM_WIDTH - 1))); \ + } \ + break; \ + } while (0) + + /* calculate simple bloom-style bitmask for a given unicode string */ + + BLOOM_MASK mask; + + mask = 0; + switch (kind) { + case PyUnicode_1BYTE_KIND: + BLOOM_UPDATE(Py_UCS1, mask, ptr, len); + break; + case PyUnicode_2BYTE_KIND: + BLOOM_UPDATE(Py_UCS2, mask, ptr, len); + break; + case PyUnicode_4BYTE_KIND: + BLOOM_UPDATE(Py_UCS4, mask, ptr, len); + break; + default: + Py_UNREACHABLE(); + } + return mask; + +#undef BLOOM_UPDATE +} + +static inline char _CPyStr_Equal_NoIdentCheck(PyObject *str1, PyObject *str2, Py_ssize_t str2_length) { + // This helper function only exists to deduplicate code in CPyStr_Equal and CPyStr_EqualLiteral + Py_ssize_t str1_length = PyUnicode_GET_LENGTH(str1); + if (str1_length != str2_length) + return 0; + int kind = PyUnicode_KIND(str1); + if (PyUnicode_KIND(str2) != kind) + return 0; + const void *data1 = PyUnicode_DATA(str1); + const void *data2 = PyUnicode_DATA(str2); + return memcmp(data1, data2, str1_length * kind) == 0; +} + +// Adapted from CPython 3.13.1 (_PyUnicode_Equal) +char CPyStr_Equal(PyObject *str1, PyObject *str2) { + if (str1 == str2) { + return 1; + } + Py_ssize_t str2_length = PyUnicode_GET_LENGTH(str2); + return _CPyStr_Equal_NoIdentCheck(str1, str2, str2_length); +} + +char CPyStr_EqualLiteral(PyObject *str, PyObject *literal_str, Py_ssize_t literal_length) { + if (str == literal_str) { + return 1; + } + return _CPyStr_Equal_NoIdentCheck(str, literal_str, literal_length); +} + +PyObject *CPyStr_GetItem(PyObject *str, CPyTagged index) { + if (PyUnicode_READY(str) != -1) { + if (CPyTagged_CheckShort(index)) { + Py_ssize_t n = CPyTagged_ShortAsSsize_t(index); + Py_ssize_t size = PyUnicode_GET_LENGTH(str); + if (n < 0) + n += size; + if (n < 0 || n >= size) { + PyErr_SetString(PyExc_IndexError, "string index out of range"); + return NULL; + } + enum PyUnicode_Kind kind = (enum PyUnicode_Kind)PyUnicode_KIND(str); + void *data = PyUnicode_DATA(str); + Py_UCS4 ch = PyUnicode_READ(kind, data, n); + PyObject *unicode = PyUnicode_New(1, ch); + if (unicode == NULL) + return NULL; + + if (PyUnicode_KIND(unicode) == PyUnicode_1BYTE_KIND) { + PyUnicode_1BYTE_DATA(unicode)[0] = (Py_UCS1)ch; + } else if (PyUnicode_KIND(unicode) == PyUnicode_2BYTE_KIND) { + PyUnicode_2BYTE_DATA(unicode)[0] = (Py_UCS2)ch; + } else { + assert(PyUnicode_KIND(unicode) == PyUnicode_4BYTE_KIND); + PyUnicode_4BYTE_DATA(unicode)[0] = ch; + } + return unicode; + } else { + PyErr_SetString(PyExc_OverflowError, CPYTHON_LARGE_INT_ERRMSG); + return NULL; + } + } else { + PyObject *index_obj = CPyTagged_AsObject(index); + return PyObject_GetItem(str, index_obj); + } +} + +PyObject *CPyStr_GetItemUnsafe(PyObject *str, Py_ssize_t index) { + // This is unsafe since we don't check for overflow when doing <<. + return CPyStr_GetItem(str, index << 1); +} + +// A simplification of _PyUnicode_JoinArray() from CPython 3.9.6 +PyObject *CPyStr_Build(Py_ssize_t len, ...) { + Py_ssize_t i; + va_list args; + + // Calculate the total amount of space and check + // whether all components have the same kind. + Py_ssize_t sz = 0; + Py_UCS4 maxchar = 0; + int use_memcpy = 1; // Use memcpy by default + PyObject *last_obj = NULL; + + va_start(args, len); + for (i = 0; i < len; i++) { + PyObject *item = va_arg(args, PyObject *); + if (!PyUnicode_Check(item)) { + PyErr_Format(PyExc_TypeError, + "sequence item %zd: expected str instance," + " %.80s found", + i, Py_TYPE(item)->tp_name); + return NULL; + } + if (PyUnicode_READY(item) == -1) + return NULL; + + size_t add_sz = PyUnicode_GET_LENGTH(item); + Py_UCS4 item_maxchar = PyUnicode_MAX_CHAR_VALUE(item); + maxchar = Py_MAX(maxchar, item_maxchar); + + // Using size_t to avoid overflow during arithmetic calculation + if (add_sz > (size_t)(PY_SSIZE_T_MAX - sz)) { + PyErr_SetString(PyExc_OverflowError, + "join() result is too long for a Python string"); + return NULL; + } + sz += add_sz; + + // If these strings have different kind, we would call + // _PyUnicode_FastCopyCharacters() in the following part. + if (use_memcpy && last_obj != NULL) { + if (PyUnicode_KIND(last_obj) != PyUnicode_KIND(item)) + use_memcpy = 0; + } + last_obj = item; + } + va_end(args); + + // Construct the string + PyObject *res = PyUnicode_New(sz, maxchar); + if (res == NULL) + return NULL; + + if (use_memcpy) { + unsigned char *res_data = PyUnicode_1BYTE_DATA(res); + unsigned int kind = PyUnicode_KIND(res); + + va_start(args, len); + for (i = 0; i < len; ++i) { + PyObject *item = va_arg(args, PyObject *); + Py_ssize_t itemlen = PyUnicode_GET_LENGTH(item); + if (itemlen != 0) { + memcpy(res_data, PyUnicode_DATA(item), kind * itemlen); + res_data += kind * itemlen; + } + } + va_end(args); + assert(res_data == PyUnicode_1BYTE_DATA(res) + kind * PyUnicode_GET_LENGTH(res)); + } else { + Py_ssize_t res_offset = 0; + + va_start(args, len); + for (i = 0; i < len; ++i) { + PyObject *item = va_arg(args, PyObject *); + Py_ssize_t itemlen = PyUnicode_GET_LENGTH(item); + if (itemlen != 0) { +#if CPY_3_13_FEATURES + PyUnicode_CopyCharacters(res, res_offset, item, 0, itemlen); +#else + _PyUnicode_FastCopyCharacters(res, res_offset, item, 0, itemlen); +#endif + res_offset += itemlen; + } + } + va_end(args); + assert(res_offset == PyUnicode_GET_LENGTH(res)); + } + +#ifdef Py_DEBUG + assert(_PyUnicode_CheckConsistency(res, 1)); +#endif + return res; +} + +CPyTagged CPyStr_Find(PyObject *str, PyObject *substr, CPyTagged start, int direction) { + CPyTagged end = PyUnicode_GET_LENGTH(str) << 1; + return CPyStr_FindWithEnd(str, substr, start, end, direction); +} + +CPyTagged CPyStr_FindWithEnd(PyObject *str, PyObject *substr, CPyTagged start, CPyTagged end, int direction) { + Py_ssize_t temp_start = CPyTagged_AsSsize_t(start); + if (temp_start == -1 && PyErr_Occurred()) { + PyErr_SetString(PyExc_OverflowError, CPYTHON_LARGE_INT_ERRMSG); + return CPY_INT_TAG; + } + Py_ssize_t temp_end = CPyTagged_AsSsize_t(end); + if (temp_end == -1 && PyErr_Occurred()) { + PyErr_SetString(PyExc_OverflowError, CPYTHON_LARGE_INT_ERRMSG); + return CPY_INT_TAG; + } + Py_ssize_t index = PyUnicode_Find(str, substr, temp_start, temp_end, direction); + if (unlikely(index == -2)) { + return CPY_INT_TAG; + } + return index << 1; +} + +PyObject *CPyStr_Split(PyObject *str, PyObject *sep, CPyTagged max_split) { + Py_ssize_t temp_max_split = CPyTagged_AsSsize_t(max_split); + if (temp_max_split == -1 && PyErr_Occurred()) { + PyErr_SetString(PyExc_OverflowError, CPYTHON_LARGE_INT_ERRMSG); + return NULL; + } + return PyUnicode_Split(str, sep, temp_max_split); +} + +PyObject *CPyStr_RSplit(PyObject *str, PyObject *sep, CPyTagged max_split) { + Py_ssize_t temp_max_split = CPyTagged_AsSsize_t(max_split); + if (temp_max_split == -1 && PyErr_Occurred()) { + PyErr_SetString(PyExc_OverflowError, CPYTHON_LARGE_INT_ERRMSG); + return NULL; + } + return PyUnicode_RSplit(str, sep, temp_max_split); +} + +// This function has been copied from _PyUnicode_XStrip in cpython.git:Objects/unicodeobject.c@0ef4ffeefd1737c18dc9326133c7894d58108c2e. +static PyObject *_PyStr_XStrip(PyObject *self, int striptype, PyObject *sepobj) { + const void *data; + int kind; + Py_ssize_t i, j, len; + BLOOM_MASK sepmask; + Py_ssize_t seplen; + + // This check is needed from Python 3.9 and earlier. + if (PyUnicode_READY(self) == -1 || PyUnicode_READY(sepobj) == -1) + return NULL; + + kind = PyUnicode_KIND(self); + data = PyUnicode_DATA(self); + len = PyUnicode_GET_LENGTH(self); + seplen = PyUnicode_GET_LENGTH(sepobj); + sepmask = make_bloom_mask(PyUnicode_KIND(sepobj), + PyUnicode_DATA(sepobj), + seplen); + + i = 0; + if (striptype != RIGHTSTRIP) { + while (i < len) { + Py_UCS4 ch = PyUnicode_READ(kind, data, i); + if (!BLOOM(sepmask, ch)) + break; + if (PyUnicode_FindChar(sepobj, ch, 0, seplen, 1) < 0) + break; + i++; + } + } + + j = len; + if (striptype != LEFTSTRIP) { + j--; + while (j >= i) { + Py_UCS4 ch = PyUnicode_READ(kind, data, j); + if (!BLOOM(sepmask, ch)) + break; + if (PyUnicode_FindChar(sepobj, ch, 0, seplen, 1) < 0) + break; + j--; + } + + j++; + } + + return PyUnicode_Substring(self, i, j); +} + +// Copied from do_strip function in cpython.git/Objects/unicodeobject.c@0ef4ffeefd1737c18dc9326133c7894d58108c2e. +PyObject *_CPyStr_Strip(PyObject *self, int strip_type, PyObject *sep) { + if (sep == NULL || Py_IsNone(sep)) { + Py_ssize_t len, i, j; + + // This check is needed from Python 3.9 and earlier. + if (PyUnicode_READY(self) == -1) + return NULL; + + len = PyUnicode_GET_LENGTH(self); + + if (PyUnicode_IS_ASCII(self)) { + const Py_UCS1 *data = PyUnicode_1BYTE_DATA(self); + + i = 0; + if (strip_type != RIGHTSTRIP) { + while (i < len) { + Py_UCS1 ch = data[i]; + if (!_Py_ascii_whitespace[ch]) + break; + i++; + } + } + + j = len; + if (strip_type != LEFTSTRIP) { + j--; + while (j >= i) { + Py_UCS1 ch = data[j]; + if (!_Py_ascii_whitespace[ch]) + break; + j--; + } + j++; + } + } + else { + int kind = PyUnicode_KIND(self); + const void *data = PyUnicode_DATA(self); + + i = 0; + if (strip_type != RIGHTSTRIP) { + while (i < len) { + Py_UCS4 ch = PyUnicode_READ(kind, data, i); + if (!Py_UNICODE_ISSPACE(ch)) + break; + i++; + } + } + + j = len; + if (strip_type != LEFTSTRIP) { + j--; + while (j >= i) { + Py_UCS4 ch = PyUnicode_READ(kind, data, j); + if (!Py_UNICODE_ISSPACE(ch)) + break; + j--; + } + j++; + } + } + + return PyUnicode_Substring(self, i, j); + } + return _PyStr_XStrip(self, strip_type, sep); +} + +PyObject *CPyStr_Replace(PyObject *str, PyObject *old_substr, + PyObject *new_substr, CPyTagged max_replace) { + Py_ssize_t temp_max_replace = CPyTagged_AsSsize_t(max_replace); + if (temp_max_replace == -1 && PyErr_Occurred()) { + PyErr_SetString(PyExc_OverflowError, CPYTHON_LARGE_INT_ERRMSG); + return NULL; + } + return PyUnicode_Replace(str, old_substr, new_substr, temp_max_replace); +} + +int CPyStr_Startswith(PyObject *self, PyObject *subobj) { + Py_ssize_t start = 0; + Py_ssize_t end = PyUnicode_GET_LENGTH(self); + if (PyTuple_Check(subobj)) { + Py_ssize_t i; + for (i = 0; i < PyTuple_GET_SIZE(subobj); i++) { + PyObject *substring = PyTuple_GET_ITEM(subobj, i); + if (!PyUnicode_Check(substring)) { + PyErr_Format(PyExc_TypeError, + "tuple for startswith must only contain str, " + "not %.100s", + Py_TYPE(substring)->tp_name); + return 2; + } + int result = PyUnicode_Tailmatch(self, substring, start, end, -1); + if (result) { + return 1; + } + } + return 0; + } + return PyUnicode_Tailmatch(self, subobj, start, end, -1); +} + +int CPyStr_Endswith(PyObject *self, PyObject *subobj) { + Py_ssize_t start = 0; + Py_ssize_t end = PyUnicode_GET_LENGTH(self); + if (PyTuple_Check(subobj)) { + Py_ssize_t i; + for (i = 0; i < PyTuple_GET_SIZE(subobj); i++) { + PyObject *substring = PyTuple_GET_ITEM(subobj, i); + if (!PyUnicode_Check(substring)) { + PyErr_Format(PyExc_TypeError, + "tuple for endswith must only contain str, " + "not %.100s", + Py_TYPE(substring)->tp_name); + return 2; + } + int result = PyUnicode_Tailmatch(self, substring, start, end, 1); + if (result) { + return 1; + } + } + return 0; + } + return PyUnicode_Tailmatch(self, subobj, start, end, 1); +} + +PyObject *CPyStr_Removeprefix(PyObject *self, PyObject *prefix) { + Py_ssize_t end = PyUnicode_GET_LENGTH(self); + int match = PyUnicode_Tailmatch(self, prefix, 0, end, -1); + if (match) { + Py_ssize_t prefix_end = PyUnicode_GET_LENGTH(prefix); + return PyUnicode_Substring(self, prefix_end, end); + } + return Py_NewRef(self); +} + +PyObject *CPyStr_Removesuffix(PyObject *self, PyObject *suffix) { + Py_ssize_t end = PyUnicode_GET_LENGTH(self); + int match = PyUnicode_Tailmatch(self, suffix, 0, end, 1); + if (match) { + Py_ssize_t suffix_end = PyUnicode_GET_LENGTH(suffix); + return PyUnicode_Substring(self, 0, end - suffix_end); + } + return Py_NewRef(self); +} + +/* This does a dodgy attempt to append in place */ +PyObject *CPyStr_Append(PyObject *o1, PyObject *o2) { + PyUnicode_Append(&o1, o2); + return o1; +} + +PyObject *CPyStr_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end) { + if (likely(PyUnicode_CheckExact(obj) + && CPyTagged_CheckShort(start) && CPyTagged_CheckShort(end))) { + Py_ssize_t startn = CPyTagged_ShortAsSsize_t(start); + Py_ssize_t endn = CPyTagged_ShortAsSsize_t(end); + if (startn < 0) { + startn += PyUnicode_GET_LENGTH(obj); + if (startn < 0) { + startn = 0; + } + } + if (endn < 0) { + endn += PyUnicode_GET_LENGTH(obj); + if (endn < 0) { + endn = 0; + } + } + return PyUnicode_Substring(obj, startn, endn); + } + return CPyObject_GetSlice(obj, start, end); +} + +/* Check if the given string is true (i.e. its length isn't zero) */ +bool CPyStr_IsTrue(PyObject *obj) { + Py_ssize_t length = PyUnicode_GET_LENGTH(obj); + return length != 0; +} + +Py_ssize_t CPyStr_Size_size_t(PyObject *str) { + if (PyUnicode_READY(str) != -1) { + return PyUnicode_GET_LENGTH(str); + } + return -1; +} + +PyObject *CPy_Decode(PyObject *obj, PyObject *encoding, PyObject *errors) { + const char *enc = NULL; + const char *err = NULL; + if (encoding) { + enc = PyUnicode_AsUTF8AndSize(encoding, NULL); + if (!enc) return NULL; + } + if (errors) { + err = PyUnicode_AsUTF8AndSize(errors, NULL); + if (!err) return NULL; + } + if (PyBytes_Check(obj)) { + return PyUnicode_Decode(((PyBytesObject *)obj)->ob_sval, + ((PyVarObject *)obj)->ob_size, + enc, err); + } else { + return PyUnicode_FromEncodedObject(obj, enc, err); + } +} + +PyObject *CPy_DecodeUTF8(PyObject *bytes) { + if (PyBytes_CheckExact(bytes)) { + char *buffer = PyBytes_AsString(bytes); // Borrowed reference + if (buffer == NULL) { + return NULL; + } + Py_ssize_t size = PyBytes_Size(bytes); + return PyUnicode_DecodeUTF8(buffer, size, "strict"); + } else { + return PyUnicode_FromEncodedObject(bytes, "utf-8", "strict"); + } +} + +PyObject *CPy_DecodeASCII(PyObject *bytes) { + if (PyBytes_CheckExact(bytes)) { + char *buffer = PyBytes_AsString(bytes); // Borrowed reference + if (buffer == NULL) { + return NULL; + } + Py_ssize_t size = PyBytes_Size(bytes); + return PyUnicode_DecodeASCII(buffer, size, "strict");; + } else { + return PyUnicode_FromEncodedObject(bytes, "ascii", "strict"); + } +} + +PyObject *CPy_DecodeLatin1(PyObject *bytes) { + if (PyBytes_CheckExact(bytes)) { + char *buffer = PyBytes_AsString(bytes); // Borrowed reference + if (buffer == NULL) { + return NULL; + } + Py_ssize_t size = PyBytes_Size(bytes); + return PyUnicode_DecodeLatin1(buffer, size, "strict"); + } else { + return PyUnicode_FromEncodedObject(bytes, "latin1", "strict"); + } +} + +PyObject *CPy_Encode(PyObject *obj, PyObject *encoding, PyObject *errors) { + const char *enc = NULL; + const char *err = NULL; + if (encoding) { + enc = PyUnicode_AsUTF8AndSize(encoding, NULL); + if (!enc) return NULL; + } + if (errors) { + err = PyUnicode_AsUTF8AndSize(errors, NULL); + if (!err) return NULL; + } + if (PyUnicode_Check(obj)) { + return PyUnicode_AsEncodedString(obj, enc, err); + } else { + PyErr_BadArgument(); + return NULL; + } +} + +Py_ssize_t CPyStr_Count(PyObject *unicode, PyObject *substring, CPyTagged start) { + Py_ssize_t temp_start = CPyTagged_AsSsize_t(start); + if (temp_start == -1 && PyErr_Occurred()) { + PyErr_SetString(PyExc_OverflowError, CPYTHON_LARGE_INT_ERRMSG); + return -1; + } + Py_ssize_t end = PyUnicode_GET_LENGTH(unicode); + return PyUnicode_Count(unicode, substring, temp_start, end); +} + +Py_ssize_t CPyStr_CountFull(PyObject *unicode, PyObject *substring, CPyTagged start, CPyTagged end) { + Py_ssize_t temp_start = CPyTagged_AsSsize_t(start); + if (temp_start == -1 && PyErr_Occurred()) { + PyErr_SetString(PyExc_OverflowError, CPYTHON_LARGE_INT_ERRMSG); + return -1; + } + Py_ssize_t temp_end = CPyTagged_AsSsize_t(end); + if (temp_end == -1 && PyErr_Occurred()) { + PyErr_SetString(PyExc_OverflowError, CPYTHON_LARGE_INT_ERRMSG); + return -1; + } + return PyUnicode_Count(unicode, substring, temp_start, temp_end); +} + + +CPyTagged CPyStr_Ord(PyObject *obj) { + Py_ssize_t s = PyUnicode_GET_LENGTH(obj); + if (s == 1) { + int kind = PyUnicode_KIND(obj); + return PyUnicode_READ(kind, PyUnicode_DATA(obj), 0) << 1; + } + PyErr_Format( + PyExc_TypeError, "ord() expected a character, but a string of length %zd found", s); + return CPY_INT_TAG; +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lib-rt/tuple_ops.c b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/tuple_ops.c new file mode 100644 index 0000000..1df73f1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lib-rt/tuple_ops.c @@ -0,0 +1,62 @@ +// Tuple primitive operations +// +// These are registered in mypyc.primitives.tuple_ops. + +#include +#include "CPy.h" + +PyObject *CPySequenceTuple_GetItem(PyObject *tuple, CPyTagged index) { + if (CPyTagged_CheckShort(index)) { + Py_ssize_t n = CPyTagged_ShortAsSsize_t(index); + Py_ssize_t size = PyTuple_GET_SIZE(tuple); + if (n >= 0) { + if (n >= size) { + PyErr_SetString(PyExc_IndexError, "tuple index out of range"); + return NULL; + } + } else { + n += size; + if (n < 0) { + PyErr_SetString(PyExc_IndexError, "tuple index out of range"); + return NULL; + } + } + PyObject *result = PyTuple_GET_ITEM(tuple, n); + Py_INCREF(result); + return result; + } else { + PyErr_SetString(PyExc_OverflowError, CPYTHON_LARGE_INT_ERRMSG); + return NULL; + } +} + +PyObject *CPySequenceTuple_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end) { + if (likely(PyTuple_CheckExact(obj) + && CPyTagged_CheckShort(start) && CPyTagged_CheckShort(end))) { + Py_ssize_t startn = CPyTagged_ShortAsSsize_t(start); + Py_ssize_t endn = CPyTagged_ShortAsSsize_t(end); + if (startn < 0) { + startn += PyTuple_GET_SIZE(obj); + } + if (endn < 0) { + endn += PyTuple_GET_SIZE(obj); + } + return PyTuple_GetSlice(obj, startn, endn); + } + return CPyObject_GetSlice(obj, start, end); +} + +// No error checking +PyObject *CPySequenceTuple_GetItemUnsafe(PyObject *tuple, Py_ssize_t index) +{ + PyObject *result = PyTuple_GET_ITEM(tuple, index); + Py_INCREF(result); + return result; +} + +// PyTuple_SET_ITEM does no error checking, +// and should only be used to fill in brand new tuples. +void CPySequenceTuple_SetItemUnsafe(PyObject *tuple, Py_ssize_t index, PyObject *value) +{ + PyTuple_SET_ITEM(tuple, index, value); +} diff --git a/.venv/lib/python3.12/site-packages/mypyc/lower/__init__.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/lower/__init__.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..9e77ea0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/lower/__init__.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/lower/__init__.py b/.venv/lib/python3.12/site-packages/mypyc/lower/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypyc/lower/int_ops.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/lower/int_ops.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..72078b8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/lower/int_ops.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/lower/int_ops.py b/.venv/lib/python3.12/site-packages/mypyc/lower/int_ops.py new file mode 100644 index 0000000..adfb4c2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lower/int_ops.py @@ -0,0 +1,113 @@ +"""Convert tagged int primitive ops to lower-level ops.""" + +from __future__ import annotations + +from typing import NamedTuple + +from mypyc.ir.ops import Assign, BasicBlock, Branch, ComparisonOp, Register, Value +from mypyc.ir.rtypes import bool_rprimitive, is_short_int_rprimitive +from mypyc.irbuild.ll_builder import LowLevelIRBuilder +from mypyc.lower.registry import lower_primitive_op +from mypyc.primitives.int_ops import int_equal_, int_less_than_ +from mypyc.primitives.registry import CFunctionDescription + + +# Description for building int comparison ops +# +# Fields: +# binary_op_variant: identify which IntOp to use when operands are short integers +# c_func_description: the C function to call when operands are tagged integers +# c_func_negated: whether to negate the C function call's result +# c_func_swap_operands: whether to swap lhs and rhs when call the function +class IntComparisonOpDescription(NamedTuple): + binary_op_variant: int + c_func_description: CFunctionDescription + c_func_negated: bool + c_func_swap_operands: bool + + +# Provide mapping from textual op to short int's op variant and boxed int's description. +# Note that these are not complete implementations and require extra IR. +int_comparison_op_mapping: dict[str, IntComparisonOpDescription] = { + "==": IntComparisonOpDescription(ComparisonOp.EQ, int_equal_, False, False), + "!=": IntComparisonOpDescription(ComparisonOp.NEQ, int_equal_, True, False), + "<": IntComparisonOpDescription(ComparisonOp.SLT, int_less_than_, False, False), + "<=": IntComparisonOpDescription(ComparisonOp.SLE, int_less_than_, True, True), + ">": IntComparisonOpDescription(ComparisonOp.SGT, int_less_than_, False, True), + ">=": IntComparisonOpDescription(ComparisonOp.SGE, int_less_than_, True, False), +} + + +def compare_tagged(self: LowLevelIRBuilder, lhs: Value, rhs: Value, op: str, line: int) -> Value: + """Compare two tagged integers using given operator (value context).""" + # generate fast binary logic ops on short ints + if (is_short_int_rprimitive(lhs.type) or is_short_int_rprimitive(rhs.type)) and op in ( + "==", + "!=", + ): + quick = True + else: + quick = is_short_int_rprimitive(lhs.type) and is_short_int_rprimitive(rhs.type) + if quick: + return self.comparison_op(lhs, rhs, int_comparison_op_mapping[op][0], line) + op_type, c_func_desc, negate_result, swap_op = int_comparison_op_mapping[op] + result = Register(bool_rprimitive) + short_int_block, int_block, out = BasicBlock(), BasicBlock(), BasicBlock() + check_lhs = self.check_tagged_short_int(lhs, line, negated=True) + if op in ("==", "!="): + self.add(Branch(check_lhs, int_block, short_int_block, Branch.BOOL)) + else: + # for non-equality logical ops (less/greater than, etc.), need to check both sides + short_lhs = BasicBlock() + self.add(Branch(check_lhs, int_block, short_lhs, Branch.BOOL)) + self.activate_block(short_lhs) + check_rhs = self.check_tagged_short_int(rhs, line, negated=True) + self.add(Branch(check_rhs, int_block, short_int_block, Branch.BOOL)) + self.activate_block(int_block) + if swap_op: + args = [rhs, lhs] + else: + args = [lhs, rhs] + call = self.call_c(c_func_desc, args, line) + if negate_result: + # TODO: introduce UnaryIntOp? + call_result = self.unary_op(call, "not", line) + else: + call_result = call + self.add(Assign(result, call_result, line)) + self.goto(out) + self.activate_block(short_int_block) + eq = self.comparison_op(lhs, rhs, op_type, line) + self.add(Assign(result, eq, line)) + self.goto_and_activate(out) + return result + + +@lower_primitive_op("int_eq") +def lower_int_eq(builder: LowLevelIRBuilder, args: list[Value], line: int) -> Value: + return compare_tagged(builder, args[0], args[1], "==", line) + + +@lower_primitive_op("int_ne") +def lower_int_ne(builder: LowLevelIRBuilder, args: list[Value], line: int) -> Value: + return compare_tagged(builder, args[0], args[1], "!=", line) + + +@lower_primitive_op("int_lt") +def lower_int_lt(builder: LowLevelIRBuilder, args: list[Value], line: int) -> Value: + return compare_tagged(builder, args[0], args[1], "<", line) + + +@lower_primitive_op("int_le") +def lower_int_le(builder: LowLevelIRBuilder, args: list[Value], line: int) -> Value: + return compare_tagged(builder, args[0], args[1], "<=", line) + + +@lower_primitive_op("int_gt") +def lower_int_gt(builder: LowLevelIRBuilder, args: list[Value], line: int) -> Value: + return compare_tagged(builder, args[0], args[1], ">", line) + + +@lower_primitive_op("int_ge") +def lower_int_ge(builder: LowLevelIRBuilder, args: list[Value], line: int) -> Value: + return compare_tagged(builder, args[0], args[1], ">=", line) diff --git a/.venv/lib/python3.12/site-packages/mypyc/lower/list_ops.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/lower/list_ops.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..b16fedb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/lower/list_ops.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/lower/list_ops.py b/.venv/lib/python3.12/site-packages/mypyc/lower/list_ops.py new file mode 100644 index 0000000..631008d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lower/list_ops.py @@ -0,0 +1,71 @@ +from __future__ import annotations + +from mypyc.common import PLATFORM_SIZE +from mypyc.ir.ops import GetElementPtr, Integer, IntOp, SetMem, Value +from mypyc.ir.rtypes import ( + PyListObject, + c_pyssize_t_rprimitive, + object_rprimitive, + pointer_rprimitive, +) +from mypyc.irbuild.ll_builder import LowLevelIRBuilder +from mypyc.lower.registry import lower_primitive_op + + +@lower_primitive_op("buf_init_item") +def buf_init_item(builder: LowLevelIRBuilder, args: list[Value], line: int) -> Value: + """Initialize an item in a buffer of "PyObject *" values at given index. + + This can be used to initialize the data buffer of a freshly allocated list + object. + """ + base = args[0] + index_value = args[1] + value = args[2] + assert isinstance(index_value, Integer), index_value + index = index_value.numeric_value() + if index == 0: + ptr = base + else: + ptr = builder.add( + IntOp( + pointer_rprimitive, + base, + Integer(index * PLATFORM_SIZE, c_pyssize_t_rprimitive), + IntOp.ADD, + line, + ) + ) + return builder.add(SetMem(object_rprimitive, ptr, value, line)) + + +@lower_primitive_op("list_items") +def list_items(builder: LowLevelIRBuilder, args: list[Value], line: int) -> Value: + ob_item_ptr = builder.add(GetElementPtr(args[0], PyListObject, "ob_item", line)) + return builder.load_mem(ob_item_ptr, pointer_rprimitive) + + +def list_item_ptr(builder: LowLevelIRBuilder, obj: Value, index: Value, line: int) -> Value: + """Get a pointer to a list item (index must be valid and non-negative). + + Type of index must be c_pyssize_t_rprimitive, and obj must refer to a list object. + """ + # List items are represented as an array of pointers. Pointer to the item obj[index] is + # + index * . + items = list_items(builder, [obj], line) + delta = builder.add( + IntOp( + c_pyssize_t_rprimitive, + index, + Integer(PLATFORM_SIZE, c_pyssize_t_rprimitive), + IntOp.MUL, + ) + ) + return builder.add(IntOp(pointer_rprimitive, items, delta, IntOp.ADD)) + + +@lower_primitive_op("list_get_item_unsafe") +def list_get_item_unsafe(builder: LowLevelIRBuilder, args: list[Value], line: int) -> Value: + index = builder.coerce(args[1], c_pyssize_t_rprimitive, line) + item_ptr = list_item_ptr(builder, args[0], index, line) + return builder.load_mem(item_ptr, object_rprimitive) diff --git a/.venv/lib/python3.12/site-packages/mypyc/lower/misc_ops.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/lower/misc_ops.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..8cdf91e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/lower/misc_ops.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/lower/misc_ops.py b/.venv/lib/python3.12/site-packages/mypyc/lower/misc_ops.py new file mode 100644 index 0000000..3c42257 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lower/misc_ops.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from mypyc.ir.ops import ComparisonOp, GetElementPtr, Integer, LoadMem, Value +from mypyc.ir.rtypes import PyVarObject, c_pyssize_t_rprimitive, object_rprimitive +from mypyc.irbuild.ll_builder import LowLevelIRBuilder +from mypyc.lower.registry import lower_primitive_op + + +@lower_primitive_op("var_object_size") +def var_object_size(builder: LowLevelIRBuilder, args: list[Value], line: int) -> Value: + elem_address = builder.add(GetElementPtr(args[0], PyVarObject, "ob_size")) + return builder.add(LoadMem(c_pyssize_t_rprimitive, elem_address)) + + +@lower_primitive_op("propagate_if_error") +def propagate_if_error_op(builder: LowLevelIRBuilder, args: list[Value], line: int) -> Value: + # Return False on NULL. The primitive uses ERR_FALSE, so this is an error. + return builder.add(ComparisonOp(args[0], Integer(0, object_rprimitive), ComparisonOp.NEQ)) diff --git a/.venv/lib/python3.12/site-packages/mypyc/lower/registry.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/lower/registry.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..d79c5b2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/lower/registry.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/lower/registry.py b/.venv/lib/python3.12/site-packages/mypyc/lower/registry.py new file mode 100644 index 0000000..a20990f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/lower/registry.py @@ -0,0 +1,28 @@ +from __future__ import annotations + +from typing import Callable, Final, Optional, TypeVar + +from mypyc.ir.ops import Value +from mypyc.irbuild.ll_builder import LowLevelIRBuilder + +LowerFunc = Callable[[LowLevelIRBuilder, list[Value], int], Value] +LowerFuncOpt = Callable[[LowLevelIRBuilder, list[Value], int], Optional[Value]] + +lowering_registry: Final[dict[str, LowerFuncOpt]] = {} + +LF = TypeVar("LF", LowerFunc, LowerFuncOpt) + + +def lower_primitive_op(name: str) -> Callable[[LF], LF]: + """Register a handler that generates low-level IR for a primitive op.""" + + def wrapper(f: LF) -> LF: + assert name not in lowering_registry + lowering_registry[name] = f + return f + + return wrapper + + +# Import various modules that set up global state. +from mypyc.lower import int_ops, list_ops, misc_ops # noqa: F401 diff --git a/.venv/lib/python3.12/site-packages/mypyc/namegen.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/namegen.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..293b125 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/namegen.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/namegen.py b/.venv/lib/python3.12/site-packages/mypyc/namegen.py new file mode 100644 index 0000000..1e05531 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/namegen.py @@ -0,0 +1,124 @@ +from __future__ import annotations + +from collections.abc import Iterable + + +class NameGenerator: + """Utility for generating distinct C names from Python names. + + Since C names can't use '.' (or unicode), some care is required to + make C names generated from Python names unique. Also, we want to + avoid generating overly long C names since they make the generated + code harder to read. + + Note that we don't restrict ourselves to a 32-character distinguishing + prefix guaranteed by the C standard since all the compilers we care + about at the moment support longer names without issues. + + For names that are exported in a shared library (not static) use + exported_name() instead. + + Summary of the approach: + + * Generate a unique name prefix from suffix of fully-qualified + module name used for static names. If only compiling a single + module, this can be empty. For example, if the modules are + 'foo.bar' and 'foo.baz', the prefixes can be 'bar_' and 'baz_', + respectively. If the modules are 'bar.foo' and 'baz.foo', the + prefixes will be 'bar_foo_' and 'baz_foo_'. + + * Replace '.' in the Python name with '___' in the C name. (And + replace the unlikely but possible '___' with '___3_'. This + collides '___' with '.3_', but this is OK because names + may not start with a digit.) + + The generated should be internal to a build and thus the mapping is + arbitrary. Just generating names '1', '2', ... would be correct, + though not very usable. The generated names may be visible in CPU + profiles and when debugging using native debuggers. + """ + + def __init__(self, groups: Iterable[list[str]], *, separate: bool = False) -> None: + """Initialize with a list of modules in each compilation group. + + The names of modules are used to shorten names referring to + modules, for convenience. Arbitrary module + names are supported for generated names, but uncompiled modules + will use long names. + + If separate is True, assume separate compilation. This implies + that we don't have knowledge of all sources that will be linked + together. In this case we won't trim module prefixes, since we + don't have enough information to determine common module prefixes. + """ + self.module_map: dict[str, str] = {} + for names in groups: + if not separate: + self.module_map.update(make_module_translation_map(names)) + else: + for name in names: + self.module_map[name] = name + "." + self.translations: dict[tuple[str, str], str] = {} + self.used_names: set[str] = set() + + def private_name(self, module: str, partial_name: str | None = None) -> str: + """Return a C name usable for a static definition. + + Return a distinct result for each (module, partial_name) pair. + + The caller should add a suitable prefix to the name to avoid + conflicts with other C names. Only ensure that the results of + this function are unique, not that they aren't overlapping with + arbitrary names. + + If a name is not specific to any module, the module argument can + be an empty string. + """ + # TODO: Support unicode + if partial_name is None: + return exported_name(self.module_map[module].rstrip(".")) + if (module, partial_name) in self.translations: + return self.translations[module, partial_name] + if module in self.module_map: + module_prefix = self.module_map[module] + elif module: + module_prefix = module + "." + else: + module_prefix = "" + actual = exported_name(f"{module_prefix}{partial_name}") + self.translations[module, partial_name] = actual + return actual + + +def exported_name(fullname: str) -> str: + """Return a C name usable for an exported definition. + + This is like private_name(), but the output only depends on the + 'fullname' argument, so the names are distinct across multiple + builds. + """ + # TODO: Support unicode + return fullname.replace("___", "___3_").replace(".", "___") + + +def make_module_translation_map(names: list[str]) -> dict[str, str]: + num_instances: dict[str, int] = {} + for name in names: + for suffix in candidate_suffixes(name): + num_instances[suffix] = num_instances.get(suffix, 0) + 1 + result = {} + for name in names: + for suffix in candidate_suffixes(name): + if num_instances[suffix] == 1: + break + # Takes the last suffix if none are unique + result[name] = suffix + return result + + +def candidate_suffixes(fullname: str) -> list[str]: + components = fullname.split(".") + result = [""] + for i in range(len(components)): + result.append(".".join(components[-i - 1 :]) + ".") + return result diff --git a/.venv/lib/python3.12/site-packages/mypyc/options.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/options.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..8e2ecde Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/options.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/options.py b/.venv/lib/python3.12/site-packages/mypyc/options.py new file mode 100644 index 0000000..9f16c07 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/options.py @@ -0,0 +1,62 @@ +from __future__ import annotations + +import sys + + +class CompilerOptions: + def __init__( + self, + strip_asserts: bool = False, + multi_file: bool = False, + verbose: bool = False, + separate: bool = False, + target_dir: str | None = None, + include_runtime_files: bool | None = None, + capi_version: tuple[int, int] | None = None, + python_version: tuple[int, int] | None = None, + strict_dunder_typing: bool = False, + group_name: str | None = None, + log_trace: bool = False, + depends_on_librt_internal: bool = False, + experimental_features: bool = False, + ) -> None: + self.strip_asserts = strip_asserts + self.multi_file = multi_file + self.verbose = verbose + self.separate = separate + self.global_opts = not separate + self.target_dir = target_dir or "build" + self.include_runtime_files = ( + include_runtime_files if include_runtime_files is not None else not multi_file + ) + # The target Python C API version. Overriding this is mostly + # useful in IR tests, since there's no guarantee that + # binaries are backward compatible even if no recent API + # features are used. + self.capi_version = capi_version or sys.version_info[:2] + self.python_version = python_version + # Make possible to inline dunder methods in the generated code. + # Typically, the convention is the dunder methods can return `NotImplemented` + # even when its return type is just `bool`. + # By enabling this option, this convention is no longer valid and the dunder + # will assume the return type of the method strictly, which can lead to + # more optimization opportunities. + self.strict_dunders_typing = strict_dunder_typing + # Override the automatic group name derived from the hash of module names. + # This affects the names of generated .c, .h and shared library files. + # This is only supported when compiling exactly one group, and a shared + # library is generated (with shims). This can be used to make the output + # file names more predictable. + self.group_name = group_name + # If enabled, write a trace log of events based on executed operations to + # mypyc_trace.txt when compiled module is executed. This is useful for + # performance analysis. + self.log_trace = log_trace + # If enabled, add capsule imports of librt.internal API. This should be used + # only for mypy itself, third-party code compiled with mypyc should not use + # librt.internal. + self.depends_on_librt_internal = depends_on_librt_internal + # Some experimental features are only available when building librt in + # experimental mode (e.g. use _experimental suffix in librt run test). + # These can't be used with a librt wheel installed from PyPI. + self.experimental_features = experimental_features diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/__init__.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/primitives/__init__.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..c5c29e3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/primitives/__init__.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/__init__.py b/.venv/lib/python3.12/site-packages/mypyc/primitives/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/bytes_ops.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/primitives/bytes_ops.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..38b6dd9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/primitives/bytes_ops.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/bytes_ops.py b/.venv/lib/python3.12/site-packages/mypyc/primitives/bytes_ops.py new file mode 100644 index 0000000..c88e89d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/primitives/bytes_ops.py @@ -0,0 +1,128 @@ +"""Primitive bytes ops.""" + +from __future__ import annotations + +from mypyc.ir.ops import ERR_MAGIC, ERR_NEVER +from mypyc.ir.rtypes import ( + RUnion, + bit_rprimitive, + bytes_rprimitive, + c_int_rprimitive, + c_pyssize_t_rprimitive, + dict_rprimitive, + int_rprimitive, + list_rprimitive, + object_rprimitive, + str_rprimitive, +) +from mypyc.primitives.registry import ( + ERR_NEG_INT, + binary_op, + custom_op, + function_op, + load_address_op, + method_op, +) + +# Get the 'bytes' type object. +load_address_op(name="builtins.bytes", type=object_rprimitive, src="PyBytes_Type") + +# bytes(obj) +function_op( + name="builtins.bytes", + arg_types=[RUnion([list_rprimitive, dict_rprimitive, str_rprimitive])], + return_type=bytes_rprimitive, + c_function_name="PyBytes_FromObject", + error_kind=ERR_MAGIC, +) + +# translate isinstance(obj, bytes) +isinstance_bytes = function_op( + name="builtins.isinstance", + arg_types=[object_rprimitive], + return_type=bit_rprimitive, + c_function_name="PyBytes_Check", + error_kind=ERR_NEVER, +) + +# bytearray(obj) +function_op( + name="builtins.bytearray", + arg_types=[object_rprimitive], + return_type=bytes_rprimitive, + c_function_name="PyByteArray_FromObject", + error_kind=ERR_MAGIC, +) + +# translate isinstance(obj, bytearray) +isinstance_bytearray = function_op( + name="builtins.isinstance", + arg_types=[object_rprimitive], + return_type=bit_rprimitive, + c_function_name="PyByteArray_Check", + error_kind=ERR_NEVER, +) + +# bytes ==/!= (return -1/0/1) +bytes_compare = custom_op( + arg_types=[bytes_rprimitive, bytes_rprimitive], + return_type=c_int_rprimitive, + c_function_name="CPyBytes_Compare", + error_kind=ERR_NEG_INT, +) + +# bytes + bytes +# bytearray + bytearray +binary_op( + name="+", + arg_types=[bytes_rprimitive, bytes_rprimitive], + return_type=bytes_rprimitive, + c_function_name="CPyBytes_Concat", + error_kind=ERR_MAGIC, + steals=[True, False], +) + +# bytes[begin:end] +bytes_slice_op = custom_op( + arg_types=[bytes_rprimitive, int_rprimitive, int_rprimitive], + return_type=bytes_rprimitive, + c_function_name="CPyBytes_GetSlice", + error_kind=ERR_MAGIC, +) + +# bytes[index] +# bytearray[index] +method_op( + name="__getitem__", + arg_types=[bytes_rprimitive, int_rprimitive], + return_type=int_rprimitive, + c_function_name="CPyBytes_GetItem", + error_kind=ERR_MAGIC, +) + +# bytes.join(obj) +method_op( + name="join", + arg_types=[bytes_rprimitive, object_rprimitive], + return_type=bytes_rprimitive, + c_function_name="CPyBytes_Join", + error_kind=ERR_MAGIC, +) + +# Join bytes objects and return a new bytes. +# The first argument is the total number of the following bytes. +bytes_build_op = custom_op( + arg_types=[c_pyssize_t_rprimitive], + return_type=bytes_rprimitive, + c_function_name="CPyBytes_Build", + error_kind=ERR_MAGIC, + var_arg_type=bytes_rprimitive, +) + +function_op( + name="builtins.ord", + arg_types=[bytes_rprimitive], + return_type=int_rprimitive, + c_function_name="CPyBytes_Ord", + error_kind=ERR_MAGIC, +) diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/dict_ops.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/primitives/dict_ops.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..ad12616 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/primitives/dict_ops.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/dict_ops.py b/.venv/lib/python3.12/site-packages/mypyc/primitives/dict_ops.py new file mode 100644 index 0000000..f98bcc8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/primitives/dict_ops.py @@ -0,0 +1,343 @@ +"""Primitive dict ops.""" + +from __future__ import annotations + +from mypyc.ir.ops import ERR_FALSE, ERR_MAGIC, ERR_NEVER +from mypyc.ir.rtypes import ( + bit_rprimitive, + bool_rprimitive, + c_int_rprimitive, + c_pyssize_t_rprimitive, + dict_next_rtuple_pair, + dict_next_rtuple_single, + dict_rprimitive, + int_rprimitive, + list_rprimitive, + object_rprimitive, +) +from mypyc.primitives.registry import ( + ERR_NEG_INT, + binary_op, + custom_op, + function_op, + load_address_op, + method_op, +) + +# Get the 'dict' type object. +load_address_op(name="builtins.dict", type=object_rprimitive, src="PyDict_Type") + +# Construct an empty dictionary via dict(). +function_op( + name="builtins.dict", + arg_types=[], + return_type=dict_rprimitive, + c_function_name="PyDict_New", + error_kind=ERR_MAGIC, +) + +# Construct an empty dictionary. +dict_new_op = custom_op( + arg_types=[], return_type=dict_rprimitive, c_function_name="PyDict_New", error_kind=ERR_MAGIC +) + +# Construct a dictionary from keys and values. +# Positional argument is the number of key-value pairs +# Variable arguments are (key1, value1, ..., keyN, valueN). +dict_build_op = custom_op( + arg_types=[c_pyssize_t_rprimitive], + return_type=dict_rprimitive, + c_function_name="CPyDict_Build", + error_kind=ERR_MAGIC, + var_arg_type=object_rprimitive, +) + +# Construct a dictionary from another dictionary. +dict_copy_op = function_op( + name="builtins.dict", + arg_types=[dict_rprimitive], + return_type=dict_rprimitive, + c_function_name="PyDict_Copy", + error_kind=ERR_MAGIC, + priority=2, +) + +# Generic one-argument dict constructor: dict(obj) +dict_copy = function_op( + name="builtins.dict", + arg_types=[object_rprimitive], + return_type=dict_rprimitive, + c_function_name="CPyDict_FromAny", + error_kind=ERR_MAGIC, +) + +# translate isinstance(obj, dict) +isinstance_dict = function_op( + name="builtins.isinstance", + arg_types=[object_rprimitive], + return_type=bit_rprimitive, + c_function_name="PyDict_Check", + error_kind=ERR_NEVER, +) + +# dict[key] +dict_get_item_op = method_op( + name="__getitem__", + arg_types=[dict_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyDict_GetItem", + error_kind=ERR_MAGIC, +) + +# dict[key] = value +dict_set_item_op = method_op( + name="__setitem__", + arg_types=[dict_rprimitive, object_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="CPyDict_SetItem", + error_kind=ERR_NEG_INT, +) + +# dict[key] = value (exact dict only, no subclasses) +# NOTE: this is currently for internal use only, and not used for CallExpr specialization +exact_dict_set_item_op = custom_op( + arg_types=[dict_rprimitive, object_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PyDict_SetItem", + error_kind=ERR_NEG_INT, +) + +# key in dict +binary_op( + name="in", + arg_types=[object_rprimitive, dict_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PyDict_Contains", + error_kind=ERR_NEG_INT, + truncated_type=bool_rprimitive, + ordering=[1, 0], +) + +# dict1.update(dict2) +dict_update_op = method_op( + name="update", + arg_types=[dict_rprimitive, dict_rprimitive], + return_type=c_int_rprimitive, + c_function_name="CPyDict_Update", + error_kind=ERR_NEG_INT, + priority=2, +) + +# Operation used for **value in dict displays. +# This is mostly like dict.update(obj), but has customized error handling. +dict_update_in_display_op = custom_op( + arg_types=[dict_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="CPyDict_UpdateInDisplay", + error_kind=ERR_NEG_INT, +) + +# dict.update(obj) +method_op( + name="update", + arg_types=[dict_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="CPyDict_UpdateFromAny", + error_kind=ERR_NEG_INT, +) + +# dict.get(key, default) +method_op( + name="get", + arg_types=[dict_rprimitive, object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyDict_Get", + error_kind=ERR_MAGIC, +) + +# dict.get(key) +dict_get_method_with_none = method_op( + name="get", + arg_types=[dict_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyDict_GetWithNone", + error_kind=ERR_MAGIC, +) + +# dict.setdefault(key, default) +dict_setdefault_op = method_op( + name="setdefault", + arg_types=[dict_rprimitive, object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyDict_SetDefault", + error_kind=ERR_MAGIC, +) + +# dict.setdefault(key) +method_op( + name="setdefault", + arg_types=[dict_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyDict_SetDefaultWithNone", + error_kind=ERR_MAGIC, +) + +# dict.setdefault(key, empty tuple/list/set) +# The third argument marks the data type of the second argument. +# 1: list 2: dict 3: set +# Other number would lead to an error. +dict_setdefault_spec_init_op = custom_op( + arg_types=[dict_rprimitive, object_rprimitive, c_int_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyDict_SetDefaultWithEmptyDatatype", + error_kind=ERR_MAGIC, +) + +# dict.keys() +method_op( + name="keys", + arg_types=[dict_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyDict_KeysView", + error_kind=ERR_MAGIC, +) + +# dict.values() +method_op( + name="values", + arg_types=[dict_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyDict_ValuesView", + error_kind=ERR_MAGIC, +) + +# dict.items() +method_op( + name="items", + arg_types=[dict_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyDict_ItemsView", + error_kind=ERR_MAGIC, +) + +# dict.clear() +method_op( + name="clear", + arg_types=[dict_rprimitive], + return_type=bit_rprimitive, + c_function_name="CPyDict_Clear", + error_kind=ERR_FALSE, +) + +# dict.copy() +method_op( + name="copy", + arg_types=[dict_rprimitive], + return_type=dict_rprimitive, + c_function_name="CPyDict_Copy", + error_kind=ERR_MAGIC, +) + +# list(dict.keys()) +dict_keys_op = custom_op( + arg_types=[dict_rprimitive], + return_type=list_rprimitive, + c_function_name="CPyDict_Keys", + error_kind=ERR_MAGIC, +) + +# list(dict.values()) +dict_values_op = custom_op( + arg_types=[dict_rprimitive], + return_type=list_rprimitive, + c_function_name="CPyDict_Values", + error_kind=ERR_MAGIC, +) + +# list(dict.items()) +dict_items_op = custom_op( + arg_types=[dict_rprimitive], + return_type=list_rprimitive, + c_function_name="CPyDict_Items", + error_kind=ERR_MAGIC, +) + +# PyDict_Next() fast iteration +dict_key_iter_op = custom_op( + arg_types=[dict_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyDict_GetKeysIter", + error_kind=ERR_MAGIC, +) + +dict_value_iter_op = custom_op( + arg_types=[dict_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyDict_GetValuesIter", + error_kind=ERR_MAGIC, +) + +dict_item_iter_op = custom_op( + arg_types=[dict_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyDict_GetItemsIter", + error_kind=ERR_MAGIC, +) + +dict_next_key_op = custom_op( + arg_types=[object_rprimitive, int_rprimitive], + return_type=dict_next_rtuple_single, + c_function_name="CPyDict_NextKey", + error_kind=ERR_NEVER, +) + +dict_next_value_op = custom_op( + arg_types=[object_rprimitive, int_rprimitive], + return_type=dict_next_rtuple_single, + c_function_name="CPyDict_NextValue", + error_kind=ERR_NEVER, +) + +dict_next_item_op = custom_op( + arg_types=[object_rprimitive, int_rprimitive], + return_type=dict_next_rtuple_pair, + c_function_name="CPyDict_NextItem", + error_kind=ERR_NEVER, +) + +# check that len(dict) == const during iteration +dict_check_size_op = custom_op( + arg_types=[dict_rprimitive, c_pyssize_t_rprimitive], + return_type=bit_rprimitive, + c_function_name="CPyDict_CheckSize", + error_kind=ERR_FALSE, +) + +dict_ssize_t_size_op = custom_op( + arg_types=[dict_rprimitive], + return_type=c_pyssize_t_rprimitive, + c_function_name="PyDict_Size", + error_kind=ERR_NEVER, +) + +# Delete an item from a dict +dict_del_item = custom_op( + arg_types=[object_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PyDict_DelItem", + error_kind=ERR_NEG_INT, +) + +supports_mapping_protocol = custom_op( + arg_types=[object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="CPyMapping_Check", + error_kind=ERR_NEVER, +) + +mapping_has_key = custom_op( + arg_types=[object_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PyMapping_HasKey", + error_kind=ERR_NEVER, +) diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/exc_ops.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/primitives/exc_ops.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..8ba4a48 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/primitives/exc_ops.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/exc_ops.py b/.venv/lib/python3.12/site-packages/mypyc/primitives/exc_ops.py new file mode 100644 index 0000000..e1234f8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/primitives/exc_ops.py @@ -0,0 +1,111 @@ +"""Exception-related primitive ops.""" + +from __future__ import annotations + +from mypyc.ir.ops import ERR_ALWAYS, ERR_FALSE, ERR_NEVER +from mypyc.ir.rtypes import bit_rprimitive, exc_rtuple, object_rprimitive, void_rtype +from mypyc.primitives.registry import custom_op, custom_primitive_op + +# If the argument is a class, raise an instance of the class. Otherwise, assume +# that the argument is an exception object, and raise it. +raise_exception_op = custom_op( + arg_types=[object_rprimitive], + return_type=void_rtype, + c_function_name="CPy_Raise", + error_kind=ERR_ALWAYS, +) + +# Raise StopIteration exception with the specified value (which can be NULL). +set_stop_iteration_value = custom_op( + arg_types=[object_rprimitive], + return_type=void_rtype, + c_function_name="CPyGen_SetStopIterationValue", + error_kind=ERR_ALWAYS, +) + +# Raise exception with traceback. +# Arguments are (exception type, exception value, traceback). +raise_exception_with_tb_op = custom_op( + arg_types=[object_rprimitive, object_rprimitive, object_rprimitive], + return_type=void_rtype, + c_function_name="CPyErr_SetObjectAndTraceback", + error_kind=ERR_ALWAYS, +) + +# Reraise the currently raised exception. +reraise_exception_op = custom_op( + arg_types=[], return_type=void_rtype, c_function_name="CPy_Reraise", error_kind=ERR_ALWAYS +) + +# Propagate exception if the CPython error indicator is set (an exception was raised). +no_err_occurred_op = custom_op( + arg_types=[], + return_type=bit_rprimitive, + c_function_name="CPy_NoErrOccurred", + error_kind=ERR_FALSE, +) + +err_occurred_op = custom_op( + arg_types=[], + return_type=object_rprimitive, + c_function_name="PyErr_Occurred", + error_kind=ERR_NEVER, + is_borrowed=True, +) + +# Keep propagating a raised exception by unconditionally giving an error value. +# This doesn't actually raise an exception. +keep_propagating_op = custom_op( + arg_types=[], + return_type=bit_rprimitive, + c_function_name="CPy_KeepPropagating", + error_kind=ERR_FALSE, +) + +# If argument is NULL, propagate currently raised exception (in this case +# an exception must have been raised). If this can be used, it's faster +# than using PyErr_Occurred(). +propagate_if_error_op = custom_primitive_op( + "propagate_if_error", + arg_types=[object_rprimitive], + return_type=bit_rprimitive, + error_kind=ERR_FALSE, +) + +# Catches a propagating exception and makes it the "currently +# handled exception" (by sticking it into sys.exc_info()). Returns the +# exception that was previously being handled, which must be restored +# later. +error_catch_op = custom_op( + arg_types=[], return_type=exc_rtuple, c_function_name="CPy_CatchError", error_kind=ERR_NEVER +) + +# Restore an old "currently handled exception" returned from. +# error_catch (by sticking it into sys.exc_info()) +restore_exc_info_op = custom_op( + arg_types=[exc_rtuple], + return_type=void_rtype, + c_function_name="CPy_RestoreExcInfo", + error_kind=ERR_NEVER, +) + +# Checks whether the exception currently being handled matches a particular type. +exc_matches_op = custom_op( + arg_types=[object_rprimitive], + return_type=bit_rprimitive, + c_function_name="CPy_ExceptionMatches", + error_kind=ERR_NEVER, +) + +# Get the value of the exception currently being handled. +get_exc_value_op = custom_op( + arg_types=[], + return_type=object_rprimitive, + c_function_name="CPy_GetExcValue", + error_kind=ERR_NEVER, +) + +# Get exception info (exception type, exception instance, traceback object). +get_exc_info_op = custom_op( + arg_types=[], return_type=exc_rtuple, c_function_name="CPy_GetExcInfo", error_kind=ERR_NEVER +) diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/float_ops.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/primitives/float_ops.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..c02bad3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/primitives/float_ops.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/float_ops.py b/.venv/lib/python3.12/site-packages/mypyc/primitives/float_ops.py new file mode 100644 index 0000000..542192a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/primitives/float_ops.py @@ -0,0 +1,178 @@ +"""Primitive float ops.""" + +from __future__ import annotations + +from mypyc.ir.ops import ERR_MAGIC, ERR_MAGIC_OVERLAPPING, ERR_NEVER +from mypyc.ir.rtypes import ( + bit_rprimitive, + bool_rprimitive, + float_rprimitive, + int_rprimitive, + object_rprimitive, + str_rprimitive, +) +from mypyc.primitives.registry import binary_op, function_op, load_address_op + +# Get the 'builtins.float' type object. +load_address_op(name="builtins.float", type=object_rprimitive, src="PyFloat_Type") + +binary_op( + name="//", + arg_types=[float_rprimitive, float_rprimitive], + return_type=float_rprimitive, + c_function_name="CPyFloat_FloorDivide", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +# float(int) +int_to_float_op = function_op( + name="builtins.float", + arg_types=[int_rprimitive], + return_type=float_rprimitive, + c_function_name="CPyFloat_FromTagged", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +# float(str) +function_op( + name="builtins.float", + arg_types=[str_rprimitive], + return_type=object_rprimitive, + c_function_name="PyFloat_FromString", + error_kind=ERR_MAGIC, +) + +# abs(float) +function_op( + name="builtins.abs", + arg_types=[float_rprimitive], + return_type=float_rprimitive, + c_function_name="fabs", + error_kind=ERR_NEVER, +) + +# math.sin(float) +function_op( + name="math.sin", + arg_types=[float_rprimitive], + return_type=float_rprimitive, + c_function_name="CPyFloat_Sin", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +# math.cos(float) +function_op( + name="math.cos", + arg_types=[float_rprimitive], + return_type=float_rprimitive, + c_function_name="CPyFloat_Cos", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +# math.tan(float) +function_op( + name="math.tan", + arg_types=[float_rprimitive], + return_type=float_rprimitive, + c_function_name="CPyFloat_Tan", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +# math.sqrt(float) +function_op( + name="math.sqrt", + arg_types=[float_rprimitive], + return_type=float_rprimitive, + c_function_name="CPyFloat_Sqrt", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +# math.exp(float) +function_op( + name="math.exp", + arg_types=[float_rprimitive], + return_type=float_rprimitive, + c_function_name="CPyFloat_Exp", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +# math.log(float) +function_op( + name="math.log", + arg_types=[float_rprimitive], + return_type=float_rprimitive, + c_function_name="CPyFloat_Log", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +# math.floor(float) +function_op( + name="math.floor", + arg_types=[float_rprimitive], + return_type=int_rprimitive, + c_function_name="CPyFloat_Floor", + error_kind=ERR_MAGIC, +) + +# math.ceil(float) +function_op( + name="math.ceil", + arg_types=[float_rprimitive], + return_type=int_rprimitive, + c_function_name="CPyFloat_Ceil", + error_kind=ERR_MAGIC, +) + +# math.fabs(float) +function_op( + name="math.fabs", + arg_types=[float_rprimitive], + return_type=float_rprimitive, + c_function_name="fabs", + error_kind=ERR_NEVER, +) + +# math.pow(float, float) +pow_op = function_op( + name="math.pow", + arg_types=[float_rprimitive, float_rprimitive], + return_type=float_rprimitive, + c_function_name="CPyFloat_Pow", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +# math.copysign(float, float) +copysign_op = function_op( + name="math.copysign", + arg_types=[float_rprimitive, float_rprimitive], + return_type=float_rprimitive, + c_function_name="copysign", + error_kind=ERR_NEVER, +) + +# math.isinf(float) +function_op( + name="math.isinf", + arg_types=[float_rprimitive], + return_type=bool_rprimitive, + c_function_name="CPyFloat_IsInf", + error_kind=ERR_NEVER, +) + +# math.isnan(float) +function_op( + name="math.isnan", + arg_types=[float_rprimitive], + return_type=bool_rprimitive, + c_function_name="CPyFloat_IsNaN", + error_kind=ERR_NEVER, +) + +# translate isinstance(obj, float) +isinstance_float = function_op( + name="builtins.isinstance", + arg_types=[object_rprimitive], + return_type=bit_rprimitive, + c_function_name="PyFloat_Check", + error_kind=ERR_NEVER, +) diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/generic_ops.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/primitives/generic_ops.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..87dd27a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/primitives/generic_ops.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/generic_ops.py b/.venv/lib/python3.12/site-packages/mypyc/primitives/generic_ops.py new file mode 100644 index 0000000..1003fda --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/primitives/generic_ops.py @@ -0,0 +1,426 @@ +"""Fallback primitive operations that operate on 'object' operands. + +These just call the relevant Python C API function or a thin wrapper +around an API function. Most of these also have faster, specialized +ops that operate on some more specific types. + +Many of these ops are given a low priority (0) so that specialized ops +will take precedence. If your specialized op doesn't seem to be used, +check that the priorities are configured properly. +""" + +from __future__ import annotations + +from mypyc.ir.ops import ERR_MAGIC, ERR_NEVER +from mypyc.ir.rtypes import ( + bool_rprimitive, + c_int_rprimitive, + c_pyssize_t_rprimitive, + c_size_t_rprimitive, + int_rprimitive, + object_pointer_rprimitive, + object_rprimitive, + pointer_rprimitive, +) +from mypyc.primitives.registry import ( + ERR_NEG_INT, + binary_op, + custom_op, + custom_primitive_op, + function_op, + method_op, + unary_op, +) + +# Binary operations + +for op, opid in [ + ("==", 2), # PY_EQ + ("!=", 3), # PY_NE + ("<", 0), # PY_LT + ("<=", 1), # PY_LE + (">", 4), # PY_GT + (">=", 5), +]: # PY_GE + # The result type is 'object' since that's what PyObject_RichCompare returns. + binary_op( + name=op, + arg_types=[object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="PyObject_RichCompare", + error_kind=ERR_MAGIC, + extra_int_constants=[(opid, c_int_rprimitive)], + priority=0, + ) + +for op, funcname in [ + ("+", "PyNumber_Add"), + ("-", "PyNumber_Subtract"), + ("*", "PyNumber_Multiply"), + ("//", "PyNumber_FloorDivide"), + ("/", "PyNumber_TrueDivide"), + ("%", "PyNumber_Remainder"), + ("<<", "PyNumber_Lshift"), + (">>", "PyNumber_Rshift"), + ("&", "PyNumber_And"), + ("^", "PyNumber_Xor"), + ("|", "PyNumber_Or"), + ("@", "PyNumber_MatrixMultiply"), +]: + binary_op( + name=op, + arg_types=[object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name=funcname, + error_kind=ERR_MAGIC, + priority=0, + ) + + +function_op( + name="builtins.divmod", + arg_types=[object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="PyNumber_Divmod", + error_kind=ERR_MAGIC, + priority=0, +) + + +for op, funcname in [ + ("+=", "PyNumber_InPlaceAdd"), + ("-=", "PyNumber_InPlaceSubtract"), + ("*=", "PyNumber_InPlaceMultiply"), + ("@=", "PyNumber_InPlaceMatrixMultiply"), + ("//=", "PyNumber_InPlaceFloorDivide"), + ("/=", "PyNumber_InPlaceTrueDivide"), + ("%=", "PyNumber_InPlaceRemainder"), + ("<<=", "PyNumber_InPlaceLshift"), + (">>=", "PyNumber_InPlaceRshift"), + ("&=", "PyNumber_InPlaceAnd"), + ("^=", "PyNumber_InPlaceXor"), + ("|=", "PyNumber_InPlaceOr"), +]: + binary_op( + name=op, + arg_types=[object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name=funcname, + error_kind=ERR_MAGIC, + priority=0, + ) + +for op, c_function in (("**", "CPyNumber_Power"), ("**=", "CPyNumber_InPlacePower")): + binary_op( + name=op, + arg_types=[object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + error_kind=ERR_MAGIC, + c_function_name=c_function, + priority=0, + ) + +for arg_count, c_function in ((2, "CPyNumber_Power"), (3, "PyNumber_Power")): + function_op( + name="builtins.pow", + arg_types=[object_rprimitive] * arg_count, + return_type=object_rprimitive, + error_kind=ERR_MAGIC, + c_function_name=c_function, + priority=0, + ) + +binary_op( + name="in", + arg_types=[object_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PySequence_Contains", + error_kind=ERR_NEG_INT, + truncated_type=bool_rprimitive, + ordering=[1, 0], + priority=0, +) + + +# Unary operations + +for op, funcname in [ + ("-", "PyNumber_Negative"), + ("+", "PyNumber_Positive"), + ("~", "PyNumber_Invert"), +]: + unary_op( + name=op, + arg_type=object_rprimitive, + return_type=object_rprimitive, + c_function_name=funcname, + error_kind=ERR_MAGIC, + priority=0, + ) + +unary_op( + name="not", + arg_type=object_rprimitive, + return_type=c_int_rprimitive, + c_function_name="PyObject_Not", + error_kind=ERR_NEG_INT, + truncated_type=bool_rprimitive, + priority=0, +) + +# abs(obj) +function_op( + name="builtins.abs", + arg_types=[object_rprimitive], + return_type=object_rprimitive, + c_function_name="PyNumber_Absolute", + error_kind=ERR_MAGIC, + priority=0, +) + +# obj1[obj2] +py_get_item_op = method_op( + name="__getitem__", + arg_types=[object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="PyObject_GetItem", + error_kind=ERR_MAGIC, + priority=0, +) + +# obj1[obj2] = obj3 +method_op( + name="__setitem__", + arg_types=[object_rprimitive, object_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PyObject_SetItem", + error_kind=ERR_NEG_INT, + priority=0, +) + +# del obj1[obj2] +method_op( + name="__delitem__", + arg_types=[object_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PyObject_DelItem", + error_kind=ERR_NEG_INT, + priority=0, +) + +# hash(obj) +function_op( + name="builtins.hash", + arg_types=[object_rprimitive], + return_type=int_rprimitive, + c_function_name="CPyObject_Hash", + error_kind=ERR_MAGIC, +) + +# getattr(obj, attr) +py_getattr_op = function_op( + name="builtins.getattr", + arg_types=[object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyObject_GetAttr", + error_kind=ERR_MAGIC, +) + +# getattr(obj, attr, default) +function_op( + name="builtins.getattr", + arg_types=[object_rprimitive, object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyObject_GetAttr3", + error_kind=ERR_MAGIC, +) + +# setattr(obj, attr, value) +py_setattr_op = function_op( + name="builtins.setattr", + arg_types=[object_rprimitive, object_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PyObject_SetAttr", + error_kind=ERR_NEG_INT, +) + +# hasattr(obj, attr) +py_hasattr_op = function_op( + name="builtins.hasattr", + arg_types=[object_rprimitive, object_rprimitive], + return_type=bool_rprimitive, + c_function_name="PyObject_HasAttr", + error_kind=ERR_NEVER, +) + +# del obj.attr +py_delattr_op = function_op( + name="builtins.delattr", + arg_types=[object_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PyObject_DelAttr", + error_kind=ERR_NEG_INT, +) + +# Call callable object with N positional arguments: func(arg1, ..., argN) +# Arguments are (func, arg1, ..., argN). +py_call_op = custom_op( + arg_types=[], + return_type=object_rprimitive, + c_function_name="PyObject_CallFunctionObjArgs", + error_kind=ERR_MAGIC, + var_arg_type=object_rprimitive, + extra_int_constants=[(0, pointer_rprimitive)], +) + +# Call callable object using positional and/or keyword arguments (Python 3.8+) +py_vectorcall_op = custom_op( + arg_types=[ + object_rprimitive, # Callable + object_pointer_rprimitive, # Args (PyObject **) + c_size_t_rprimitive, # Number of positional args + object_rprimitive, + ], # Keyword arg names tuple (or NULL) + return_type=object_rprimitive, + c_function_name="PyObject_Vectorcall", + error_kind=ERR_MAGIC, +) + +# Call method using positional and/or keyword arguments (Python 3.9+) +py_vectorcall_method_op = custom_op( + arg_types=[ + object_rprimitive, # Method name + object_pointer_rprimitive, # Args, including self (PyObject **) + c_size_t_rprimitive, # Number of positional args, including self + object_rprimitive, + ], # Keyword arg names tuple (or NULL) + return_type=object_rprimitive, + c_function_name="PyObject_VectorcallMethod", + error_kind=ERR_MAGIC, +) + +# Call callable object with positional + keyword args: func(*args, **kwargs) +# Arguments are (func, *args tuple, **kwargs dict). +py_call_with_kwargs_op = custom_op( + arg_types=[object_rprimitive, object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="PyObject_Call", + error_kind=ERR_MAGIC, +) + +# Call callable object with positional args only: func(*args) +# Arguments are (func, *args tuple). +py_call_with_posargs_op = custom_op( + arg_types=[object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="PyObject_CallObject", + error_kind=ERR_MAGIC, +) + +# Call method with positional arguments: obj.method(arg1, ...) +# Arguments are (object, attribute name, arg1, ...). +py_method_call_op = custom_op( + arg_types=[], + return_type=object_rprimitive, + c_function_name="CPyObject_CallMethodObjArgs", + error_kind=ERR_MAGIC, + var_arg_type=object_rprimitive, + extra_int_constants=[(0, pointer_rprimitive)], +) + +# len(obj) +generic_len_op = custom_op( + arg_types=[object_rprimitive], + return_type=int_rprimitive, + c_function_name="CPyObject_Size", + error_kind=ERR_MAGIC, +) + +# len(obj) +# same as generic_len_op, however return py_ssize_t +generic_ssize_t_len_op = custom_op( + arg_types=[object_rprimitive], + return_type=c_pyssize_t_rprimitive, + c_function_name="PyObject_Size", + error_kind=ERR_NEG_INT, +) + +# iter(obj) +iter_op = function_op( + name="builtins.iter", + arg_types=[object_rprimitive], + return_type=object_rprimitive, + c_function_name="PyObject_GetIter", + error_kind=ERR_MAGIC, +) +# next(iterator) +# +# Although the error_kind is set to be ERR_NEVER, this can actually +# return NULL, and thus it must be checked using Branch.IS_ERROR. +next_op = custom_op( + arg_types=[object_rprimitive], + return_type=object_rprimitive, + c_function_name="PyIter_Next", + error_kind=ERR_NEVER, +) +# next(iterator) +# +# Do a next, don't swallow StopIteration, but also don't propagate an +# error. (N.B: This can still return NULL without an error to +# represent an implicit StopIteration, but if StopIteration is +# *explicitly* raised this will not swallow it.) +# Can return NULL: see next_op. +next_raw_op = custom_op( + arg_types=[object_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyIter_Next", + error_kind=ERR_NEVER, +) + +# this would be aiter(obj) if it existed +aiter_op = custom_op( + arg_types=[object_rprimitive], + return_type=object_rprimitive, + c_function_name="CPy_GetAIter", + error_kind=ERR_MAGIC, +) + +# this would be anext(obj) if it existed +anext_op = custom_op( + arg_types=[object_rprimitive], + return_type=object_rprimitive, + c_function_name="CPy_GetANext", + error_kind=ERR_MAGIC, +) + +# x.__name__ (requires Python 3.11+) +name_op = custom_primitive_op( + name="__name__", + arg_types=[object_rprimitive], + return_type=object_rprimitive, + c_function_name="CPy_GetName", + error_kind=ERR_MAGIC, +) + +# look-up name in tp_dict but don't raise AttributeError on failure +generic_getattr = custom_op( + arg_types=[object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyObject_GenericGetAttr", + error_kind=ERR_NEVER, + returns_null=True, +) + +generic_setattr = custom_op( + arg_types=[object_rprimitive, object_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="CPyObject_GenericSetAttr", + error_kind=ERR_NEG_INT, +) + +setup_object = custom_op( + arg_types=[object_rprimitive], + return_type=object_rprimitive, + c_function_name="CPy_SetupObject", + error_kind=ERR_MAGIC, +) diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/int_ops.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/primitives/int_ops.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..b5a1f46 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/primitives/int_ops.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/int_ops.py b/.venv/lib/python3.12/site-packages/mypyc/primitives/int_ops.py new file mode 100644 index 0000000..8f43140 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/primitives/int_ops.py @@ -0,0 +1,323 @@ +"""Arbitrary-precision integer primitive ops. + +These mostly operate on (usually) unboxed integers that use a tagged pointer +representation (CPyTagged) and correspond to the Python 'int' type. + +See also the documentation for mypyc.rtypes.int_rprimitive. + +Use mypyc.ir.ops.IntOp for operations on fixed-width/C integers. +""" + +from __future__ import annotations + +from mypyc.ir.ops import ( + ERR_ALWAYS, + ERR_MAGIC, + ERR_MAGIC_OVERLAPPING, + ERR_NEVER, + PrimitiveDescription, +) +from mypyc.ir.rtypes import ( + RType, + bit_rprimitive, + bool_rprimitive, + c_pyssize_t_rprimitive, + float_rprimitive, + int16_rprimitive, + int32_rprimitive, + int64_rprimitive, + int_rprimitive, + object_rprimitive, + str_rprimitive, + void_rtype, +) +from mypyc.primitives.registry import ( + binary_op, + custom_op, + function_op, + load_address_op, + method_op, + unary_op, +) + +# Constructors for builtins.int and native int types have the same behavior. In +# interpreted mode, native int types are just aliases to 'int'. +for int_name in ( + "builtins.int", + "mypy_extensions.i64", + "mypy_extensions.i32", + "mypy_extensions.i16", + "mypy_extensions.u8", +): + # These int constructors produce object_rprimitives that then need to be unboxed + # I guess unboxing ourselves would save a check and branch though? + + # Get the type object for 'builtins.int' or a native int type. + # For ordinary calls to int() we use a load_address to the type. + # Native ints don't have a separate type object -- we just use 'builtins.int'. + load_address_op(name=int_name, type=object_rprimitive, src="PyLong_Type") + + # int(float). We could do a bit better directly. + function_op( + name=int_name, + arg_types=[float_rprimitive], + return_type=int_rprimitive, + c_function_name="CPyTagged_FromFloat", + error_kind=ERR_MAGIC, + ) + + # int(string) + function_op( + name=int_name, + arg_types=[str_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyLong_FromStr", + error_kind=ERR_MAGIC, + ) + + # int(string, base) + function_op( + name=int_name, + arg_types=[str_rprimitive, int_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyLong_FromStrWithBase", + error_kind=ERR_MAGIC, + ) + +for name in ("builtins.str", "builtins.repr"): + # str(int) and repr(int) + int_to_str_op = function_op( + name=name, + arg_types=[int_rprimitive], + return_type=str_rprimitive, + c_function_name="CPyTagged_Str", + error_kind=ERR_MAGIC, + priority=2, + ) + # We need a specialization for str on bools also since the int one is wrong... + function_op( + name=name, + arg_types=[bool_rprimitive], + return_type=str_rprimitive, + c_function_name="CPyBool_Str", + error_kind=ERR_MAGIC, + priority=3, + ) + + +def int_binary_primitive( + op: str, primitive_name: str, return_type: RType = int_rprimitive, error_kind: int = ERR_NEVER +) -> PrimitiveDescription: + return binary_op( + name=op, + arg_types=[int_rprimitive, int_rprimitive], + return_type=return_type, + primitive_name=primitive_name, + error_kind=error_kind, + ) + + +int_eq = int_binary_primitive(op="==", primitive_name="int_eq", return_type=bit_rprimitive) +int_ne = int_binary_primitive(op="!=", primitive_name="int_ne", return_type=bit_rprimitive) +int_lt = int_binary_primitive(op="<", primitive_name="int_lt", return_type=bit_rprimitive) +int_le = int_binary_primitive(op="<=", primitive_name="int_le", return_type=bit_rprimitive) +int_gt = int_binary_primitive(op=">", primitive_name="int_gt", return_type=bit_rprimitive) +int_ge = int_binary_primitive(op=">=", primitive_name="int_ge", return_type=bit_rprimitive) + + +def int_binary_op( + name: str, + c_function_name: str, + return_type: RType = int_rprimitive, + error_kind: int = ERR_NEVER, +) -> None: + binary_op( + name=name, + arg_types=[int_rprimitive, int_rprimitive], + return_type=return_type, + c_function_name=c_function_name, + error_kind=error_kind, + ) + + +# Binary, unary and augmented assignment operations that operate on CPyTagged ints +# are implemented as C functions. + +int_binary_op("+", "CPyTagged_Add") +int_binary_op("-", "CPyTagged_Subtract") +int_binary_op("*", "CPyTagged_Multiply") +int_binary_op("&", "CPyTagged_And") +int_binary_op("|", "CPyTagged_Or") +int_binary_op("^", "CPyTagged_Xor") +# Divide and remainder we honestly propagate errors from because they +# can raise ZeroDivisionError +int_binary_op("//", "CPyTagged_FloorDivide", error_kind=ERR_MAGIC) +int_binary_op("%", "CPyTagged_Remainder", error_kind=ERR_MAGIC) +# Negative shift counts raise an exception +int_binary_op(">>", "CPyTagged_Rshift", error_kind=ERR_MAGIC) +int_binary_op("<<", "CPyTagged_Lshift", error_kind=ERR_MAGIC) + +int_binary_op( + "/", "CPyTagged_TrueDivide", return_type=float_rprimitive, error_kind=ERR_MAGIC_OVERLAPPING +) + +# This should work because assignment operators are parsed differently +# and the code in irbuild that handles it does the assignment +# regardless of whether or not the operator works in place anyway. +int_binary_op("+=", "CPyTagged_Add") +int_binary_op("-=", "CPyTagged_Subtract") +int_binary_op("*=", "CPyTagged_Multiply") +int_binary_op("&=", "CPyTagged_And") +int_binary_op("|=", "CPyTagged_Or") +int_binary_op("^=", "CPyTagged_Xor") +int_binary_op("//=", "CPyTagged_FloorDivide", error_kind=ERR_MAGIC) +int_binary_op("%=", "CPyTagged_Remainder", error_kind=ERR_MAGIC) +int_binary_op(">>=", "CPyTagged_Rshift", error_kind=ERR_MAGIC) +int_binary_op("<<=", "CPyTagged_Lshift", error_kind=ERR_MAGIC) + + +def int_unary_op(name: str, c_function_name: str) -> PrimitiveDescription: + return unary_op( + name=name, + arg_type=int_rprimitive, + return_type=int_rprimitive, + c_function_name=c_function_name, + error_kind=ERR_NEVER, + ) + + +int_neg_op = int_unary_op("-", "CPyTagged_Negate") +int_invert_op = int_unary_op("~", "CPyTagged_Invert") + + +# Primitives related to integer comparison operations: + + +# Equals operation on two boxed tagged integers +int_equal_ = custom_op( + arg_types=[int_rprimitive, int_rprimitive], + return_type=bit_rprimitive, + c_function_name="CPyTagged_IsEq_", + error_kind=ERR_NEVER, + is_pure=True, +) + +# Less than operation on two boxed tagged integers +int_less_than_ = custom_op( + arg_types=[int_rprimitive, int_rprimitive], + return_type=bit_rprimitive, + c_function_name="CPyTagged_IsLt_", + error_kind=ERR_NEVER, + is_pure=True, +) + +int64_divide_op = custom_op( + arg_types=[int64_rprimitive, int64_rprimitive], + return_type=int64_rprimitive, + c_function_name="CPyInt64_Divide", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +int64_mod_op = custom_op( + arg_types=[int64_rprimitive, int64_rprimitive], + return_type=int64_rprimitive, + c_function_name="CPyInt64_Remainder", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +int32_divide_op = custom_op( + arg_types=[int32_rprimitive, int32_rprimitive], + return_type=int32_rprimitive, + c_function_name="CPyInt32_Divide", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +int32_mod_op = custom_op( + arg_types=[int32_rprimitive, int32_rprimitive], + return_type=int32_rprimitive, + c_function_name="CPyInt32_Remainder", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +int16_divide_op = custom_op( + arg_types=[int16_rprimitive, int16_rprimitive], + return_type=int16_rprimitive, + c_function_name="CPyInt16_Divide", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +int16_mod_op = custom_op( + arg_types=[int16_rprimitive, int16_rprimitive], + return_type=int16_rprimitive, + c_function_name="CPyInt16_Remainder", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +# Convert tagged int (as PyObject *) to i64 +int_to_int64_op = custom_op( + arg_types=[object_rprimitive], + return_type=int64_rprimitive, + c_function_name="CPyLong_AsInt64", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +ssize_t_to_int_op = custom_op( + arg_types=[c_pyssize_t_rprimitive], + return_type=int_rprimitive, + c_function_name="CPyTagged_FromSsize_t", + error_kind=ERR_MAGIC, +) + +int64_to_int_op = custom_op( + arg_types=[int64_rprimitive], + return_type=int_rprimitive, + c_function_name="CPyTagged_FromInt64", + error_kind=ERR_MAGIC, +) + +# Convert tagged int (as PyObject *) to i32 +int_to_int32_op = custom_op( + arg_types=[object_rprimitive], + return_type=int32_rprimitive, + c_function_name="CPyLong_AsInt32", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +int32_overflow = custom_op( + arg_types=[], + return_type=void_rtype, + c_function_name="CPyInt32_Overflow", + error_kind=ERR_ALWAYS, +) + +int16_overflow = custom_op( + arg_types=[], + return_type=void_rtype, + c_function_name="CPyInt16_Overflow", + error_kind=ERR_ALWAYS, +) + +uint8_overflow = custom_op( + arg_types=[], + return_type=void_rtype, + c_function_name="CPyUInt8_Overflow", + error_kind=ERR_ALWAYS, +) + +# translate isinstance(obj, int) +isinstance_int = function_op( + name="builtints.isinstance", + arg_types=[object_rprimitive], + return_type=bit_rprimitive, + c_function_name="PyLong_Check", + error_kind=ERR_NEVER, +) + +# int.bit_length() +method_op( + name="bit_length", + arg_types=[int_rprimitive], + return_type=int_rprimitive, + c_function_name="CPyTagged_BitLength", + error_kind=ERR_MAGIC, +) diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/list_ops.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/primitives/list_ops.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..640d6a5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/primitives/list_ops.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/list_ops.py b/.venv/lib/python3.12/site-packages/mypyc/primitives/list_ops.py new file mode 100644 index 0000000..b9d20a2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/primitives/list_ops.py @@ -0,0 +1,374 @@ +"""List primitive ops.""" + +from __future__ import annotations + +from mypyc.ir.ops import ERR_FALSE, ERR_MAGIC, ERR_NEVER +from mypyc.ir.rtypes import ( + bit_rprimitive, + c_int_rprimitive, + c_pyssize_t_rprimitive, + int64_rprimitive, + int_rprimitive, + list_rprimitive, + object_rprimitive, + pointer_rprimitive, + short_int_rprimitive, + void_rtype, +) +from mypyc.primitives.registry import ( + ERR_NEG_INT, + binary_op, + custom_op, + custom_primitive_op, + function_op, + load_address_op, + method_op, +) + +# Get the 'builtins.list' type object. +load_address_op(name="builtins.list", type=object_rprimitive, src="PyList_Type") + +# sorted(obj) +function_op( + name="builtins.sorted", + arg_types=[object_rprimitive], + return_type=list_rprimitive, + c_function_name="CPySequence_Sort", + error_kind=ERR_MAGIC, +) + +# list(obj) +to_list = function_op( + name="builtins.list", + arg_types=[object_rprimitive], + return_type=list_rprimitive, + c_function_name="PySequence_List", + error_kind=ERR_MAGIC, +) + +# Construct an empty list via list(). +function_op( + name="builtins.list", + arg_types=[], + return_type=list_rprimitive, + c_function_name="PyList_New", + error_kind=ERR_MAGIC, + extra_int_constants=[(0, int_rprimitive)], +) + +# translate isinstance(obj, list) +isinstance_list = function_op( + name="builtins.isinstance", + arg_types=[object_rprimitive], + return_type=bit_rprimitive, + c_function_name="PyList_Check", + error_kind=ERR_NEVER, +) + +new_list_op = custom_op( + arg_types=[c_pyssize_t_rprimitive], + return_type=list_rprimitive, + c_function_name="PyList_New", + error_kind=ERR_MAGIC, +) + +list_build_op = custom_op( + arg_types=[c_pyssize_t_rprimitive], + return_type=list_rprimitive, + c_function_name="CPyList_Build", + error_kind=ERR_MAGIC, + var_arg_type=object_rprimitive, + steals=True, +) + +# Get pointer to list items (ob_item PyListObject field) +list_items = custom_primitive_op( + name="list_items", + arg_types=[list_rprimitive], + return_type=pointer_rprimitive, + error_kind=ERR_NEVER, +) + +# list[index] (for an integer index) +list_get_item_op = method_op( + name="__getitem__", + arg_types=[list_rprimitive, int_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyList_GetItem", + error_kind=ERR_MAGIC, +) + +# list[index] version with no int tag check for when it is known to be short +method_op( + name="__getitem__", + arg_types=[list_rprimitive, short_int_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyList_GetItemShort", + error_kind=ERR_MAGIC, + priority=2, +) + +# list[index] that produces a borrowed result +method_op( + name="__getitem__", + arg_types=[list_rprimitive, int_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyList_GetItemBorrow", + error_kind=ERR_MAGIC, + is_borrowed=True, + priority=3, +) + +# list[index] that produces a borrowed result and index is known to be short +method_op( + name="__getitem__", + arg_types=[list_rprimitive, short_int_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyList_GetItemShortBorrow", + error_kind=ERR_MAGIC, + is_borrowed=True, + priority=4, +) + +# Version with native int index +method_op( + name="__getitem__", + arg_types=[list_rprimitive, int64_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyList_GetItemInt64", + error_kind=ERR_MAGIC, + priority=5, +) + +# Version with native int index +method_op( + name="__getitem__", + arg_types=[list_rprimitive, int64_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyList_GetItemInt64Borrow", + is_borrowed=True, + error_kind=ERR_MAGIC, + priority=6, +) + +# This is unsafe because it assumes that the index is a non-negative short integer +# that is in-bounds for the list. +list_get_item_unsafe_op = custom_primitive_op( + name="list_get_item_unsafe", + arg_types=[list_rprimitive, c_pyssize_t_rprimitive], + return_type=object_rprimitive, + error_kind=ERR_NEVER, +) + +# list[index] = obj +list_set_item_op = method_op( + name="__setitem__", + arg_types=[list_rprimitive, int_rprimitive, object_rprimitive], + return_type=bit_rprimitive, + c_function_name="CPyList_SetItem", + error_kind=ERR_FALSE, + steals=[False, False, True], +) + +# list[index_i64] = obj +method_op( + name="__setitem__", + arg_types=[list_rprimitive, int64_rprimitive, object_rprimitive], + return_type=bit_rprimitive, + c_function_name="CPyList_SetItemInt64", + error_kind=ERR_FALSE, + steals=[False, False, True], + priority=2, +) + +# PyList_SET_ITEM does no error checking, +# and should only be used to fill in brand new lists. +new_list_set_item_op = custom_op( + arg_types=[list_rprimitive, c_pyssize_t_rprimitive, object_rprimitive], + return_type=void_rtype, + c_function_name="CPyList_SetItemUnsafe", + error_kind=ERR_NEVER, + steals=[False, False, True], +) + +# list.append(obj) +list_append_op = method_op( + name="append", + arg_types=[list_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PyList_Append", + error_kind=ERR_NEG_INT, +) + +# list.extend(obj) +list_extend_op = method_op( + name="extend", + arg_types=[list_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyList_Extend", + error_kind=ERR_MAGIC, +) + +# list.pop() +list_pop_last = method_op( + name="pop", + arg_types=[list_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyList_PopLast", + error_kind=ERR_MAGIC, +) + +# list.pop(index) +method_op( + name="pop", + arg_types=[list_rprimitive, int_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyList_Pop", + error_kind=ERR_MAGIC, +) + +# list.count(obj) +method_op( + name="count", + arg_types=[list_rprimitive, object_rprimitive], + return_type=short_int_rprimitive, + c_function_name="CPyList_Count", + error_kind=ERR_MAGIC, +) + +# list.insert(index, obj) +method_op( + name="insert", + arg_types=[list_rprimitive, int_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="CPyList_Insert", + error_kind=ERR_NEG_INT, +) + +# list.sort() +method_op( + name="sort", + arg_types=[list_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PyList_Sort", + error_kind=ERR_NEG_INT, +) + +# list.reverse() +method_op( + name="reverse", + arg_types=[list_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PyList_Reverse", + error_kind=ERR_NEG_INT, +) + +# list.remove(obj) +method_op( + name="remove", + arg_types=[list_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="CPyList_Remove", + error_kind=ERR_NEG_INT, +) + +# list.index(obj) +method_op( + name="index", + arg_types=[list_rprimitive, object_rprimitive], + return_type=int_rprimitive, + c_function_name="CPyList_Index", + error_kind=ERR_MAGIC, +) + +# list.clear() +method_op( + name="clear", + arg_types=[list_rprimitive], + return_type=bit_rprimitive, + c_function_name="CPyList_Clear", + error_kind=ERR_FALSE, +) + +# list.copy() +method_op( + name="copy", + arg_types=[list_rprimitive], + return_type=list_rprimitive, + c_function_name="CPyList_Copy", + error_kind=ERR_MAGIC, +) + +# list + list +binary_op( + name="+", + arg_types=[list_rprimitive, list_rprimitive], + return_type=list_rprimitive, + c_function_name="PySequence_Concat", + error_kind=ERR_MAGIC, +) + +# list += list +binary_op( + name="+=", + arg_types=[list_rprimitive, object_rprimitive], + return_type=list_rprimitive, + c_function_name="PySequence_InPlaceConcat", + error_kind=ERR_MAGIC, +) + +# list * int +binary_op( + name="*", + arg_types=[list_rprimitive, int_rprimitive], + return_type=list_rprimitive, + c_function_name="CPySequence_Multiply", + error_kind=ERR_MAGIC, +) + +# int * list +binary_op( + name="*", + arg_types=[int_rprimitive, list_rprimitive], + return_type=list_rprimitive, + c_function_name="CPySequence_RMultiply", + error_kind=ERR_MAGIC, +) + +# list *= int +binary_op( + name="*=", + arg_types=[list_rprimitive, int_rprimitive], + return_type=list_rprimitive, + c_function_name="CPySequence_InPlaceMultiply", + error_kind=ERR_MAGIC, +) + +# list[begin:end] +list_slice_op = custom_op( + arg_types=[list_rprimitive, int_rprimitive, int_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyList_GetSlice", + error_kind=ERR_MAGIC, +) + +supports_sequence_protocol = custom_op( + arg_types=[object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="CPySequence_Check", + error_kind=ERR_NEVER, +) + +sequence_get_item = custom_op( + arg_types=[object_rprimitive, c_pyssize_t_rprimitive], + return_type=object_rprimitive, + c_function_name="PySequence_GetItem", + error_kind=ERR_NEVER, +) + +sequence_get_slice = custom_op( + arg_types=[object_rprimitive, c_pyssize_t_rprimitive, c_pyssize_t_rprimitive], + return_type=object_rprimitive, + c_function_name="PySequence_GetSlice", + error_kind=ERR_MAGIC, +) diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/misc_ops.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/primitives/misc_ops.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..5bb1be5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/primitives/misc_ops.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/misc_ops.py b/.venv/lib/python3.12/site-packages/mypyc/primitives/misc_ops.py new file mode 100644 index 0000000..bb225a7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/primitives/misc_ops.py @@ -0,0 +1,477 @@ +"""Miscellaneous primitive ops.""" + +from __future__ import annotations + +from mypyc.ir.ops import ERR_FALSE, ERR_MAGIC, ERR_MAGIC_OVERLAPPING, ERR_NEVER +from mypyc.ir.rtypes import ( + KNOWN_NATIVE_TYPES, + bit_rprimitive, + bool_rprimitive, + bytes_rprimitive, + c_int_rprimitive, + c_pointer_rprimitive, + c_pyssize_t_rprimitive, + cstring_rprimitive, + dict_rprimitive, + float_rprimitive, + int_rprimitive, + none_rprimitive, + object_pointer_rprimitive, + object_rprimitive, + pointer_rprimitive, + str_rprimitive, + uint8_rprimitive, + void_rtype, +) +from mypyc.primitives.registry import ( + ERR_NEG_INT, + custom_op, + custom_primitive_op, + function_op, + load_address_op, + method_op, +) + +# Get the 'bool' type object. +load_address_op(name="builtins.bool", type=object_rprimitive, src="PyBool_Type") + +# Get the 'range' type object. +load_address_op(name="builtins.range", type=object_rprimitive, src="PyRange_Type") + +# Get the boxed Python 'None' object +none_object_op = load_address_op(name="Py_None", type=object_rprimitive, src="_Py_NoneStruct") + +# Get the boxed object '...' +ellipsis_op = load_address_op(name="...", type=object_rprimitive, src="_Py_EllipsisObject") + +# Get the boxed NotImplemented object +not_implemented_op = load_address_op( + name="builtins.NotImplemented", type=object_rprimitive, src="_Py_NotImplementedStruct" +) + +# Get the boxed StopAsyncIteration object +stop_async_iteration_op = load_address_op( + name="builtins.StopAsyncIteration", type=object_rprimitive, src="PyExc_StopAsyncIteration" +) + +# id(obj) +function_op( + name="builtins.id", + arg_types=[object_rprimitive], + return_type=int_rprimitive, + c_function_name="CPyTagged_Id", + error_kind=ERR_NEVER, +) + +# Return the result of obj.__await()__ or obj.__iter__() (if no __await__ exists) +coro_op = custom_op( + arg_types=[object_rprimitive], + return_type=object_rprimitive, + c_function_name="CPy_GetCoro", + error_kind=ERR_MAGIC, +) + +# Do obj.send(value), or a next(obj) if second arg is None. +# (This behavior is to match the PEP 380 spec for yield from.) +# Like next_raw_op, don't swallow StopIteration, +# but also don't propagate an error. +# Can return NULL: see next_op. +send_op = custom_op( + arg_types=[object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyIter_Send", + error_kind=ERR_NEVER, +) + +# This is sort of unfortunate but oh well: yield_from_except performs most of the +# error handling logic in `yield from` operations. It returns a bool and passes +# a value by address. +# If the bool is true, then a StopIteration was received and we should return. +# If the bool is false, then the value should be yielded. +# The normal case is probably that it signals an exception, which gets +# propagated. +# Op used for "yield from" error handling. +# See comment in CPy_YieldFromErrorHandle for more information. +yield_from_except_op = custom_op( + arg_types=[object_rprimitive, object_pointer_rprimitive], + return_type=bool_rprimitive, + c_function_name="CPy_YieldFromErrorHandle", + error_kind=ERR_MAGIC, +) + +# Create method object from a callable object and self. +method_new_op = custom_op( + arg_types=[object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="PyMethod_New", + error_kind=ERR_MAGIC, +) + +# Check if the current exception is a StopIteration and return its value if so. +# Treats "no exception" as StopIteration with a None value. +# If it is a different exception, re-reraise it. +check_stop_op = custom_op( + arg_types=[], + return_type=object_rprimitive, + c_function_name="CPy_FetchStopIterationValue", + error_kind=ERR_MAGIC, +) + +# Determine the most derived metaclass and check for metaclass conflicts. +# Arguments are (metaclass, bases). +py_calc_meta_op = custom_op( + arg_types=[object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="CPy_CalculateMetaclass", + error_kind=ERR_MAGIC, + is_borrowed=True, +) + +# Import a module (plain) +import_op = custom_op( + arg_types=[str_rprimitive], + return_type=object_rprimitive, + c_function_name="PyImport_Import", + error_kind=ERR_MAGIC, +) + +# Table-driven import op. +import_many_op = custom_op( + arg_types=[ + object_rprimitive, + c_pointer_rprimitive, + object_rprimitive, + object_rprimitive, + object_rprimitive, + c_pointer_rprimitive, + ], + return_type=bit_rprimitive, + c_function_name="CPyImport_ImportMany", + error_kind=ERR_FALSE, +) + +# From import helper op +import_from_many_op = custom_op( + arg_types=[object_rprimitive, object_rprimitive, object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyImport_ImportFromMany", + error_kind=ERR_MAGIC, +) + +# Get the sys.modules dictionary +get_module_dict_op = custom_op( + arg_types=[], + return_type=dict_rprimitive, + c_function_name="PyImport_GetModuleDict", + error_kind=ERR_NEVER, + is_borrowed=True, +) + +# isinstance(obj, cls) +slow_isinstance_op = function_op( + name="builtins.isinstance", + arg_types=[object_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PyObject_IsInstance", + error_kind=ERR_NEG_INT, + truncated_type=bool_rprimitive, +) + +# Faster isinstance(obj, cls) that only works with native classes and doesn't perform +# type checking of the type argument. +fast_isinstance_op = function_op( + "builtins.isinstance", + arg_types=[object_rprimitive, object_rprimitive], + return_type=bool_rprimitive, + c_function_name="CPy_TypeCheck", + error_kind=ERR_NEVER, + priority=0, +) + +# bool(obj) with unboxed result +bool_op = function_op( + name="builtins.bool", + arg_types=[object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PyObject_IsTrue", + error_kind=ERR_NEG_INT, + truncated_type=bool_rprimitive, +) + +# isinstance(obj, bool) +isinstance_bool = function_op( + name="builtins.isinstance", + arg_types=[object_rprimitive], + return_type=bit_rprimitive, + c_function_name="PyBool_Check", + error_kind=ERR_NEVER, +) + +# slice(start, stop, step) +new_slice_op = function_op( + name="builtins.slice", + arg_types=[object_rprimitive, object_rprimitive, object_rprimitive], + c_function_name="PySlice_New", + return_type=object_rprimitive, + error_kind=ERR_MAGIC, +) + +# type(obj) +type_op = function_op( + name="builtins.type", + arg_types=[object_rprimitive], + c_function_name="CPy_TYPE", + return_type=object_rprimitive, + error_kind=ERR_NEVER, +) + +# Get 'builtins.type' (base class of all classes) +type_object_op = load_address_op(name="builtins.type", type=object_rprimitive, src="PyType_Type") + +# Create a heap type based on a template non-heap type. +# See CPyType_FromTemplate for more docs. +pytype_from_template_op = custom_op( + arg_types=[object_rprimitive, object_rprimitive, str_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyType_FromTemplate", + error_kind=ERR_MAGIC, +) + +# Create a dataclass from an extension class. See +# CPyDataclass_SleightOfHand for more docs. +dataclass_sleight_of_hand = custom_op( + arg_types=[ + object_rprimitive, + object_rprimitive, + dict_rprimitive, + dict_rprimitive, + str_rprimitive, + ], + return_type=bit_rprimitive, + c_function_name="CPyDataclass_SleightOfHand", + error_kind=ERR_FALSE, +) + +# Raise ValueError if length of first argument is not equal to the second argument. +# The first argument must be a list or a variable-length tuple. +check_unpack_count_op = custom_op( + arg_types=[object_rprimitive, c_pyssize_t_rprimitive], + return_type=c_int_rprimitive, + c_function_name="CPySequence_CheckUnpackCount", + error_kind=ERR_NEG_INT, +) + + +# Register an implementation for a singledispatch function +register_function = custom_op( + arg_types=[object_rprimitive, object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="CPySingledispatch_RegisterFunction", + error_kind=ERR_MAGIC, +) + + +# Initialize a PyObject * item in a memory buffer (steal the value) +buf_init_item = custom_primitive_op( + name="buf_init_item", + arg_types=[pointer_rprimitive, c_pyssize_t_rprimitive, object_rprimitive], + return_type=void_rtype, + error_kind=ERR_NEVER, + steals=[False, False, True], +) + +# Get length of PyVarObject instance (e.g. list or tuple) +var_object_size = custom_primitive_op( + name="var_object_size", + arg_types=[object_rprimitive], + return_type=c_pyssize_t_rprimitive, + error_kind=ERR_NEVER, +) + +# Set the lazy value compute function of an TypeAliasType instance (Python 3.12+). +# This must only be used as part of initializing the object. Any existing value +# will be cleared. +set_type_alias_compute_function_op = custom_primitive_op( + name="set_type_alias_compute_function", + c_function_name="CPy_SetTypeAliasTypeComputeFunction", + # (alias object, value compute function) + arg_types=[object_rprimitive, object_rprimitive], + return_type=void_rtype, + error_kind=ERR_NEVER, +) + +debug_print_op = custom_primitive_op( + name="debug_print", + c_function_name="CPyDebug_PrintObject", + arg_types=[object_rprimitive], + return_type=void_rtype, + error_kind=ERR_NEVER, +) + +# Log an event to a trace log, which is written to a file during execution. +log_trace_event = custom_primitive_op( + name="log_trace_event", + c_function_name="CPyTrace_LogEvent", + # (fullname of function/location, line number, operation name, operation details) + arg_types=[cstring_rprimitive, cstring_rprimitive, cstring_rprimitive, cstring_rprimitive], + return_type=void_rtype, + error_kind=ERR_NEVER, +) + +# Mark object as immortal -- it won't be freed via reference counting, as +# the reference count won't be updated any longer. Immortal objects support +# fast concurrent read-only access from multiple threads when using free +# threading, since this eliminates contention from concurrent reference count +# updates. +# +# Needs at least Python 3.14. +set_immortal_op = custom_primitive_op( + name="set_immmortal", + c_function_name="CPy_SetImmortal", + arg_types=[object_rprimitive], + return_type=void_rtype, + error_kind=ERR_NEVER, +) + +write_buffer_rprimitive = KNOWN_NATIVE_TYPES["librt.internal.WriteBuffer"] +read_buffer_rprimitive = KNOWN_NATIVE_TYPES["librt.internal.ReadBuffer"] + +# ReadBuffer(source) +function_op( + name="librt.internal.ReadBuffer", + arg_types=[bytes_rprimitive], + return_type=read_buffer_rprimitive, + c_function_name="ReadBuffer_internal", + error_kind=ERR_MAGIC, +) + +# WriteBuffer() +function_op( + name="librt.internal.WriteBuffer", + arg_types=[], + return_type=write_buffer_rprimitive, + c_function_name="WriteBuffer_internal", + error_kind=ERR_MAGIC, +) + +method_op( + name="getvalue", + arg_types=[write_buffer_rprimitive], + return_type=bytes_rprimitive, + c_function_name="WriteBuffer_getvalue_internal", + error_kind=ERR_MAGIC, +) + +function_op( + name="librt.internal.write_bool", + arg_types=[object_rprimitive, bool_rprimitive], + return_type=none_rprimitive, + c_function_name="write_bool_internal", + error_kind=ERR_MAGIC, +) + +function_op( + name="librt.internal.read_bool", + arg_types=[object_rprimitive], + return_type=bool_rprimitive, + c_function_name="read_bool_internal", + error_kind=ERR_MAGIC, +) + +function_op( + name="librt.internal.write_str", + arg_types=[object_rprimitive, str_rprimitive], + return_type=none_rprimitive, + c_function_name="write_str_internal", + error_kind=ERR_MAGIC, +) + +function_op( + name="librt.internal.read_str", + arg_types=[object_rprimitive], + return_type=str_rprimitive, + c_function_name="read_str_internal", + error_kind=ERR_MAGIC, +) + +function_op( + name="librt.internal.write_bytes", + arg_types=[object_rprimitive, bytes_rprimitive], + return_type=none_rprimitive, + c_function_name="write_bytes_internal", + error_kind=ERR_MAGIC, +) + +function_op( + name="librt.internal.read_bytes", + arg_types=[object_rprimitive], + return_type=bytes_rprimitive, + c_function_name="read_bytes_internal", + error_kind=ERR_MAGIC, +) + +function_op( + name="librt.internal.write_float", + arg_types=[object_rprimitive, float_rprimitive], + return_type=none_rprimitive, + c_function_name="write_float_internal", + error_kind=ERR_MAGIC, +) + +function_op( + name="librt.internal.read_float", + arg_types=[object_rprimitive], + return_type=float_rprimitive, + c_function_name="read_float_internal", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +function_op( + name="librt.internal.write_int", + arg_types=[object_rprimitive, int_rprimitive], + return_type=none_rprimitive, + c_function_name="write_int_internal", + error_kind=ERR_MAGIC, +) + +function_op( + name="librt.internal.read_int", + arg_types=[object_rprimitive], + return_type=int_rprimitive, + c_function_name="read_int_internal", + error_kind=ERR_MAGIC, +) + +function_op( + name="librt.internal.write_tag", + arg_types=[object_rprimitive, uint8_rprimitive], + return_type=none_rprimitive, + c_function_name="write_tag_internal", + error_kind=ERR_MAGIC, +) + +function_op( + name="librt.internal.read_tag", + arg_types=[object_rprimitive], + return_type=uint8_rprimitive, + c_function_name="read_tag_internal", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +function_op( + name="librt.internal.cache_version", + arg_types=[], + return_type=uint8_rprimitive, + c_function_name="cache_version_internal", + error_kind=ERR_NEVER, +) + +function_op( + name="librt.base64.b64encode", + arg_types=[bytes_rprimitive], + return_type=bytes_rprimitive, + c_function_name="LibRTBase64_b64encode_internal", + error_kind=ERR_MAGIC, + experimental=True, + capsule="librt.base64", +) diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/registry.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/primitives/registry.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..e9f9e26 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/primitives/registry.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/registry.py b/.venv/lib/python3.12/site-packages/mypyc/primitives/registry.py new file mode 100644 index 0000000..2f66b19 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/primitives/registry.py @@ -0,0 +1,396 @@ +"""Utilities for defining primitive ops. + +Most of the ops can be automatically generated by matching against AST +nodes and types. For example, a func_op is automatically generated when +a specific function is called with the specific positional argument +count and argument types. + +Example op definition: + +list_len_op = func_op(name='builtins.len', + arg_types=[list_rprimitive], + result_type=short_int_rprimitive, + error_kind=ERR_NEVER, + emit=emit_len) + +This op is automatically generated for calls to len() with a single +list argument. The result type is short_int_rprimitive, and this +never raises an exception (ERR_NEVER). The function emit_len is used +to generate C for this op. The op can also be manually generated using +"list_len_op". Ops that are only generated automatically don't need to +be assigned to a module attribute. + +Ops defined with custom_op are only explicitly generated in +mypyc.irbuild and won't be generated automatically. They are always +assigned to a module attribute, as otherwise they won't be accessible. + +The actual ops are defined in other submodules of this package, grouped +by category. + +Most operations have fallback implementations that apply to all possible +arguments and types. For example, there are generic implementations of +arbitrary function and method calls, and binary operators. These generic +implementations are typically slower than specialized ones, but we tend +to rely on them for infrequently used ops. It's impractical to have +optimized implementations of all ops. +""" + +from __future__ import annotations + +from typing import Final, NamedTuple + +from mypyc.ir.ops import PrimitiveDescription, StealsDescription +from mypyc.ir.rtypes import RType + +# Error kind for functions that return negative integer on exception. This +# is only used for primitives. We translate it away during IR building. +ERR_NEG_INT: Final = 10 + + +class CFunctionDescription(NamedTuple): + name: str + arg_types: list[RType] + return_type: RType + var_arg_type: RType | None + truncated_type: RType | None + c_function_name: str + error_kind: int + steals: StealsDescription + is_borrowed: bool + ordering: list[int] | None + extra_int_constants: list[tuple[int, RType]] + priority: int + is_pure: bool + returns_null: bool + capsule: str | None + + +# A description for C load operations including LoadGlobal and LoadAddress +class LoadAddressDescription(NamedTuple): + name: str + type: RType + src: str # name of the target to load + + +# Primitive ops for method call (such as 'str.join') +method_call_ops: dict[str, list[PrimitiveDescription]] = {} + +# Primitive ops for top level function call (such as 'builtins.list') +function_ops: dict[str, list[PrimitiveDescription]] = {} + +# Primitive ops for binary operations +binary_ops: dict[str, list[PrimitiveDescription]] = {} + +# Primitive ops for unary ops +unary_ops: dict[str, list[PrimitiveDescription]] = {} + +builtin_names: dict[str, tuple[RType, str]] = {} + + +def method_op( + name: str, + arg_types: list[RType], + return_type: RType, + c_function_name: str, + error_kind: int, + var_arg_type: RType | None = None, + truncated_type: RType | None = None, + ordering: list[int] | None = None, + extra_int_constants: list[tuple[int, RType]] | None = None, + steals: StealsDescription = False, + is_borrowed: bool = False, + priority: int = 1, + is_pure: bool = False, + capsule: str | None = None, +) -> PrimitiveDescription: + """Define a c function call op that replaces a method call. + + This will be automatically generated by matching against the AST. + + Args: + name: short name of the method (for example, 'append') + arg_types: argument types; the receiver is always the first argument + return_type: type of the return value. Use void_rtype to represent void. + c_function_name: name of the C function to call + error_kind: how errors are represented in the result (one of ERR_*) + var_arg_type: type of all variable arguments + truncated_type: type to truncated to(See Truncate for info) + if it's defined both return_type and it should be non-referenced + integer types or bool type + ordering: optional ordering of the arguments, if defined, + reorders the arguments accordingly. + should never be used together with var_arg_type. + all the other arguments(such as arg_types) are in the order + accepted by the python syntax(before reordering) + extra_int_constants: optional extra integer constants as the last arguments to a C call + steals: description of arguments that this steals (ref count wise) + is_borrowed: if True, returned value is borrowed (no need to decrease refcount) + priority: if multiple ops match, the one with the highest priority is picked + is_pure: if True, declare that the C function has no side effects, takes immutable + arguments, and never raises an exception + """ + if extra_int_constants is None: + extra_int_constants = [] + ops = method_call_ops.setdefault(name, []) + desc = PrimitiveDescription( + name, + arg_types, + return_type, + var_arg_type, + truncated_type, + c_function_name, + error_kind, + steals, + is_borrowed, + ordering, + extra_int_constants, + priority, + is_pure=is_pure, + experimental=False, + capsule=capsule, + ) + ops.append(desc) + return desc + + +def function_op( + name: str, + arg_types: list[RType], + return_type: RType, + c_function_name: str, + error_kind: int, + var_arg_type: RType | None = None, + truncated_type: RType | None = None, + ordering: list[int] | None = None, + extra_int_constants: list[tuple[int, RType]] | None = None, + steals: StealsDescription = False, + is_borrowed: bool = False, + priority: int = 1, + experimental: bool = False, + capsule: str | None = None, +) -> PrimitiveDescription: + """Define a C function call op that replaces a function call. + + This will be automatically generated by matching against the AST. + + Most arguments are similar to method_op(). + + Args: + name: full name of the function + arg_types: positional argument types for which this applies + """ + if extra_int_constants is None: + extra_int_constants = [] + ops = function_ops.setdefault(name, []) + desc = PrimitiveDescription( + name, + arg_types, + return_type, + var_arg_type=var_arg_type, + truncated_type=truncated_type, + c_function_name=c_function_name, + error_kind=error_kind, + steals=steals, + is_borrowed=is_borrowed, + ordering=ordering, + extra_int_constants=extra_int_constants, + priority=priority, + is_pure=False, + experimental=experimental, + capsule=capsule, + ) + ops.append(desc) + return desc + + +def binary_op( + name: str, + arg_types: list[RType], + return_type: RType, + error_kind: int, + c_function_name: str | None = None, + primitive_name: str | None = None, + var_arg_type: RType | None = None, + truncated_type: RType | None = None, + ordering: list[int] | None = None, + extra_int_constants: list[tuple[int, RType]] | None = None, + steals: StealsDescription = False, + is_borrowed: bool = False, + priority: int = 1, + capsule: str | None = None, +) -> PrimitiveDescription: + """Define a c function call op for a binary operation. + + This will be automatically generated by matching against the AST. + + Most arguments are similar to method_op(), but exactly two argument types + are expected. + """ + assert c_function_name is not None or primitive_name is not None + assert not (c_function_name is not None and primitive_name is not None) + if extra_int_constants is None: + extra_int_constants = [] + ops = binary_ops.setdefault(name, []) + desc = PrimitiveDescription( + name=primitive_name or name, + arg_types=arg_types, + return_type=return_type, + var_arg_type=var_arg_type, + truncated_type=truncated_type, + c_function_name=c_function_name, + error_kind=error_kind, + steals=steals, + is_borrowed=is_borrowed, + ordering=ordering, + extra_int_constants=extra_int_constants, + priority=priority, + is_pure=False, + experimental=False, + capsule=capsule, + ) + ops.append(desc) + return desc + + +def custom_op( + arg_types: list[RType], + return_type: RType, + c_function_name: str, + error_kind: int, + var_arg_type: RType | None = None, + truncated_type: RType | None = None, + ordering: list[int] | None = None, + extra_int_constants: list[tuple[int, RType]] | None = None, + steals: StealsDescription = False, + is_borrowed: bool = False, + *, + is_pure: bool = False, + returns_null: bool = False, +) -> CFunctionDescription: + """Create a one-off CallC op that can't be automatically generated from the AST. + + Most arguments are similar to method_op(). + """ + if extra_int_constants is None: + extra_int_constants = [] + return CFunctionDescription( + "", + arg_types, + return_type, + var_arg_type, + truncated_type, + c_function_name, + error_kind, + steals, + is_borrowed, + ordering, + extra_int_constants, + 0, + is_pure=is_pure, + returns_null=returns_null, + capsule=None, + ) + + +def custom_primitive_op( + name: str, + arg_types: list[RType], + return_type: RType, + error_kind: int, + c_function_name: str | None = None, + var_arg_type: RType | None = None, + truncated_type: RType | None = None, + ordering: list[int] | None = None, + extra_int_constants: list[tuple[int, RType]] | None = None, + steals: StealsDescription = False, + is_borrowed: bool = False, + is_pure: bool = False, + capsule: str | None = None, +) -> PrimitiveDescription: + """Define a primitive op that can't be automatically generated based on the AST. + + Most arguments are similar to method_op(). + """ + if extra_int_constants is None: + extra_int_constants = [] + return PrimitiveDescription( + name=name, + arg_types=arg_types, + return_type=return_type, + var_arg_type=var_arg_type, + truncated_type=truncated_type, + c_function_name=c_function_name, + error_kind=error_kind, + steals=steals, + is_borrowed=is_borrowed, + ordering=ordering, + extra_int_constants=extra_int_constants, + priority=0, + is_pure=is_pure, + experimental=False, + capsule=capsule, + ) + + +def unary_op( + name: str, + arg_type: RType, + return_type: RType, + c_function_name: str, + error_kind: int, + truncated_type: RType | None = None, + ordering: list[int] | None = None, + extra_int_constants: list[tuple[int, RType]] | None = None, + steals: StealsDescription = False, + is_borrowed: bool = False, + priority: int = 1, + is_pure: bool = False, + capsule: str | None = None, +) -> PrimitiveDescription: + """Define a primitive op for an unary operation. + + This will be automatically generated by matching against the AST. + + Most arguments are similar to method_op(), but exactly one argument type + is expected. + """ + if extra_int_constants is None: + extra_int_constants = [] + ops = unary_ops.setdefault(name, []) + desc = PrimitiveDescription( + name, + [arg_type], + return_type, + var_arg_type=None, + truncated_type=truncated_type, + c_function_name=c_function_name, + error_kind=error_kind, + steals=steals, + is_borrowed=is_borrowed, + ordering=ordering, + extra_int_constants=extra_int_constants, + priority=priority, + is_pure=is_pure, + experimental=False, + capsule=capsule, + ) + ops.append(desc) + return desc + + +def load_address_op(name: str, type: RType, src: str) -> LoadAddressDescription: + assert name not in builtin_names, "already defined: %s" % name + builtin_names[name] = (type, src) + return LoadAddressDescription(name, type, src) + + +# Import various modules that set up global state. +import mypyc.primitives.bytes_ops +import mypyc.primitives.dict_ops +import mypyc.primitives.float_ops +import mypyc.primitives.int_ops +import mypyc.primitives.list_ops +import mypyc.primitives.misc_ops +import mypyc.primitives.str_ops +import mypyc.primitives.tuple_ops +import mypyc.primitives.weakref_ops # noqa: F401 diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/set_ops.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/primitives/set_ops.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..1ec9aac Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/primitives/set_ops.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/set_ops.py b/.venv/lib/python3.12/site-packages/mypyc/primitives/set_ops.py new file mode 100644 index 0000000..786de00 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/primitives/set_ops.py @@ -0,0 +1,161 @@ +"""Primitive set and frozenset ops.""" + +from __future__ import annotations + +from mypyc.ir.ops import ERR_FALSE, ERR_MAGIC, ERR_NEVER +from mypyc.ir.rtypes import ( + bit_rprimitive, + bool_rprimitive, + c_int_rprimitive, + frozenset_rprimitive, + object_rprimitive, + pointer_rprimitive, + set_rprimitive, +) +from mypyc.primitives.registry import ( + ERR_NEG_INT, + binary_op, + function_op, + load_address_op, + method_op, +) + +# Get the 'builtins.set' type object. +load_address_op(name="builtins.set", type=object_rprimitive, src="PySet_Type") + +# Get the 'builtins.frozenset' type object. +load_address_op(name="builtins.frozenset", type=object_rprimitive, src="PyFrozenSet_Type") + +# Construct an empty set. +new_set_op = function_op( + name="builtins.set", + arg_types=[], + return_type=set_rprimitive, + c_function_name="PySet_New", + error_kind=ERR_MAGIC, + extra_int_constants=[(0, pointer_rprimitive)], +) + +# set(obj) +function_op( + name="builtins.set", + arg_types=[object_rprimitive], + return_type=set_rprimitive, + c_function_name="PySet_New", + error_kind=ERR_MAGIC, +) + +# Construct an empty frozenset +function_op( + name="builtins.frozenset", + arg_types=[], + return_type=frozenset_rprimitive, + c_function_name="PyFrozenSet_New", + error_kind=ERR_MAGIC, + extra_int_constants=[(0, pointer_rprimitive)], +) + +# frozenset(obj) +function_op( + name="builtins.frozenset", + arg_types=[object_rprimitive], + return_type=frozenset_rprimitive, + c_function_name="PyFrozenSet_New", + error_kind=ERR_MAGIC, +) + +# translate isinstance(obj, set) +isinstance_set = function_op( + name="builtins.isinstance", + arg_types=[object_rprimitive], + return_type=bit_rprimitive, + c_function_name="PySet_Check", + error_kind=ERR_NEVER, +) + +# translate isinstance(obj, frozenset) +isinstance_frozenset = function_op( + name="builtins.isinstance", + arg_types=[object_rprimitive], + return_type=bit_rprimitive, + c_function_name="PyFrozenSet_Check", + error_kind=ERR_NEVER, +) + +# item in set +set_in_op = binary_op( + name="in", + arg_types=[object_rprimitive, set_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PySet_Contains", + error_kind=ERR_NEG_INT, + truncated_type=bool_rprimitive, + ordering=[1, 0], +) + +# item in frozenset +binary_op( + name="in", + arg_types=[object_rprimitive, frozenset_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PySet_Contains", + error_kind=ERR_NEG_INT, + truncated_type=bool_rprimitive, + ordering=[1, 0], +) + +# set.remove(obj) +method_op( + name="remove", + arg_types=[set_rprimitive, object_rprimitive], + return_type=bit_rprimitive, + c_function_name="CPySet_Remove", + error_kind=ERR_FALSE, +) + +# set.discard(obj) +method_op( + name="discard", + arg_types=[set_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PySet_Discard", + error_kind=ERR_NEG_INT, +) + +# set.add(obj) +set_add_op = method_op( + name="add", + arg_types=[set_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PySet_Add", + error_kind=ERR_NEG_INT, +) + +# set.update(obj) +# +# This is not a public API but looks like it should be fine. +set_update_op = method_op( + name="update", + arg_types=[set_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="_PySet_Update", + error_kind=ERR_NEG_INT, +) + +# set.clear() +method_op( + name="clear", + arg_types=[set_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PySet_Clear", + error_kind=ERR_NEG_INT, +) + +# set.pop() +method_op( + name="pop", + arg_types=[set_rprimitive], + return_type=object_rprimitive, + c_function_name="PySet_Pop", + error_kind=ERR_MAGIC, +) diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/str_ops.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/primitives/str_ops.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..b669dc2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/primitives/str_ops.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/str_ops.py b/.venv/lib/python3.12/site-packages/mypyc/primitives/str_ops.py new file mode 100644 index 0000000..d39f1f8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/primitives/str_ops.py @@ -0,0 +1,490 @@ +"""Primitive str ops.""" + +from __future__ import annotations + +from mypyc.ir.ops import ERR_MAGIC, ERR_NEVER +from mypyc.ir.rtypes import ( + RType, + bit_rprimitive, + bool_rprimitive, + bytes_rprimitive, + c_int_rprimitive, + c_pyssize_t_rprimitive, + int_rprimitive, + list_rprimitive, + object_rprimitive, + pointer_rprimitive, + str_rprimitive, + tuple_rprimitive, +) +from mypyc.primitives.registry import ( + ERR_NEG_INT, + binary_op, + custom_op, + custom_primitive_op, + function_op, + load_address_op, + method_op, +) + +# Get the 'str' type object. +load_address_op(name="builtins.str", type=object_rprimitive, src="PyUnicode_Type") + +# str(obj) +str_op = function_op( + name="builtins.str", + arg_types=[object_rprimitive], + return_type=str_rprimitive, + c_function_name="PyObject_Str", + error_kind=ERR_MAGIC, +) + +# repr(obj) +function_op( + name="builtins.repr", + arg_types=[object_rprimitive], + return_type=str_rprimitive, + c_function_name="PyObject_Repr", + error_kind=ERR_MAGIC, +) + +# translate isinstance(obj, str) +isinstance_str = function_op( + name="builtins.isinstance", + arg_types=[object_rprimitive], + return_type=bit_rprimitive, + c_function_name="PyUnicode_Check", + error_kind=ERR_NEVER, +) + +# str1 + str2 +binary_op( + name="+", + arg_types=[str_rprimitive, str_rprimitive], + return_type=str_rprimitive, + c_function_name="PyUnicode_Concat", + error_kind=ERR_MAGIC, +) + +# str1 += str2 +# +# PyUnicode_Append makes an effort to reuse the LHS when the refcount +# is 1. This is super dodgy but oh well, the interpreter does it. +binary_op( + name="+=", + arg_types=[str_rprimitive, str_rprimitive], + return_type=str_rprimitive, + c_function_name="CPyStr_Append", + error_kind=ERR_MAGIC, + steals=[True, False], +) + +# str1 == str2 (very common operation, so we provide our own) +str_eq = custom_primitive_op( + name="str_eq", + c_function_name="CPyStr_Equal", + arg_types=[str_rprimitive, str_rprimitive], + return_type=bool_rprimitive, + error_kind=ERR_NEVER, +) + +str_eq_literal = custom_primitive_op( + name="str_eq_literal", + c_function_name="CPyStr_EqualLiteral", + arg_types=[str_rprimitive, str_rprimitive, c_pyssize_t_rprimitive], + return_type=bool_rprimitive, + error_kind=ERR_NEVER, +) + +unicode_compare = custom_op( + arg_types=[str_rprimitive, str_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PyUnicode_Compare", + error_kind=ERR_NEVER, +) + +# str[index] (for an int index) +method_op( + name="__getitem__", + arg_types=[str_rprimitive, int_rprimitive], + return_type=str_rprimitive, + c_function_name="CPyStr_GetItem", + error_kind=ERR_MAGIC, +) + +# This is unsafe since it assumes that the index is within reasonable bounds. +# In the future this might do no bounds checking at all. +str_get_item_unsafe_op = custom_op( + arg_types=[str_rprimitive, c_pyssize_t_rprimitive], + return_type=str_rprimitive, + c_function_name="CPyStr_GetItemUnsafe", + error_kind=ERR_MAGIC, +) + +# str[begin:end] +str_slice_op = custom_op( + arg_types=[str_rprimitive, int_rprimitive, int_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyStr_GetSlice", + error_kind=ERR_MAGIC, +) + +# item in str +binary_op( + name="in", + arg_types=[str_rprimitive, str_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PyUnicode_Contains", + error_kind=ERR_NEG_INT, + truncated_type=bool_rprimitive, + ordering=[1, 0], +) + +# str.find(...) and str.rfind(...) +str_find_types: list[RType] = [str_rprimitive, str_rprimitive, int_rprimitive, int_rprimitive] +str_find_functions = ["CPyStr_Find", "CPyStr_Find", "CPyStr_FindWithEnd"] +str_find_constants: list[list[tuple[int, RType]]] = [[(0, c_int_rprimitive)], [], []] +str_rfind_constants: list[list[tuple[int, RType]]] = [[(0, c_int_rprimitive)], [], []] +for i in range(len(str_find_types) - 1): + method_op( + name="find", + arg_types=str_find_types[0 : i + 2], + return_type=int_rprimitive, + c_function_name=str_find_functions[i], + extra_int_constants=str_find_constants[i] + [(1, c_int_rprimitive)], + error_kind=ERR_MAGIC, + ) + method_op( + name="rfind", + arg_types=str_find_types[0 : i + 2], + return_type=int_rprimitive, + c_function_name=str_find_functions[i], + extra_int_constants=str_rfind_constants[i] + [(-1, c_int_rprimitive)], + error_kind=ERR_MAGIC, + ) + +# str.join(obj) +method_op( + name="join", + arg_types=[str_rprimitive, object_rprimitive], + return_type=str_rprimitive, + c_function_name="PyUnicode_Join", + error_kind=ERR_MAGIC, +) + +str_build_op = custom_op( + arg_types=[c_pyssize_t_rprimitive], + return_type=str_rprimitive, + c_function_name="CPyStr_Build", + error_kind=ERR_MAGIC, + var_arg_type=str_rprimitive, +) + +# str.strip, str.lstrip, str.rstrip +for strip_prefix in ["l", "r", ""]: + method_op( + name=f"{strip_prefix}strip", + arg_types=[str_rprimitive, str_rprimitive], + return_type=str_rprimitive, + c_function_name=f"CPyStr_{strip_prefix.upper()}Strip", + error_kind=ERR_NEVER, + ) + method_op( + name=f"{strip_prefix}strip", + arg_types=[str_rprimitive], + return_type=str_rprimitive, + c_function_name=f"CPyStr_{strip_prefix.upper()}Strip", + # This 0 below is implicitly treated as NULL in C. + extra_int_constants=[(0, c_int_rprimitive)], + error_kind=ERR_NEVER, + ) + +# str.startswith(str) +method_op( + name="startswith", + arg_types=[str_rprimitive, str_rprimitive], + return_type=c_int_rprimitive, + c_function_name="CPyStr_Startswith", + truncated_type=bool_rprimitive, + error_kind=ERR_NEVER, +) + +# str.startswith(tuple) +method_op( + name="startswith", + arg_types=[str_rprimitive, tuple_rprimitive], + return_type=bool_rprimitive, + c_function_name="CPyStr_Startswith", + error_kind=ERR_MAGIC, +) + +# str.endswith(str) +method_op( + name="endswith", + arg_types=[str_rprimitive, str_rprimitive], + return_type=c_int_rprimitive, + c_function_name="CPyStr_Endswith", + truncated_type=bool_rprimitive, + error_kind=ERR_NEVER, +) + +# str.endswith(tuple) +method_op( + name="endswith", + arg_types=[str_rprimitive, tuple_rprimitive], + return_type=bool_rprimitive, + c_function_name="CPyStr_Endswith", + error_kind=ERR_MAGIC, +) + +# str.removeprefix(str) +method_op( + name="removeprefix", + arg_types=[str_rprimitive, str_rprimitive], + return_type=str_rprimitive, + c_function_name="CPyStr_Removeprefix", + error_kind=ERR_NEVER, +) + +# str.removesuffix(str) +method_op( + name="removesuffix", + arg_types=[str_rprimitive, str_rprimitive], + return_type=str_rprimitive, + c_function_name="CPyStr_Removesuffix", + error_kind=ERR_NEVER, +) + +# str.split(...) and str.rsplit(...) +str_split_types: list[RType] = [str_rprimitive, str_rprimitive, int_rprimitive] +str_split_functions = ["PyUnicode_Split", "PyUnicode_Split", "CPyStr_Split"] +str_rsplit_functions = ["PyUnicode_RSplit", "PyUnicode_RSplit", "CPyStr_RSplit"] +str_split_constants: list[list[tuple[int, RType]]] = [ + [(0, pointer_rprimitive), (-1, c_int_rprimitive)], + [(-1, c_int_rprimitive)], + [], +] +for i in range(len(str_split_types)): + method_op( + name="split", + arg_types=str_split_types[0 : i + 1], + return_type=list_rprimitive, + c_function_name=str_split_functions[i], + extra_int_constants=str_split_constants[i], + error_kind=ERR_MAGIC, + ) + method_op( + name="rsplit", + arg_types=str_split_types[0 : i + 1], + return_type=list_rprimitive, + c_function_name=str_rsplit_functions[i], + extra_int_constants=str_split_constants[i], + error_kind=ERR_MAGIC, + ) + +# str.splitlines(...) +str_splitlines_types: list[RType] = [str_rprimitive, bool_rprimitive] +str_splitlines_constants: list[list[tuple[int, RType]]] = [[(0, c_int_rprimitive)], []] +for i in range(2): + method_op( + name="splitlines", + arg_types=str_splitlines_types[0 : i + 1], + return_type=list_rprimitive, + c_function_name="PyUnicode_Splitlines", + extra_int_constants=str_splitlines_constants[i], + error_kind=ERR_NEVER, + ) + +# str.partition(str) +method_op( + name="partition", + arg_types=[str_rprimitive, str_rprimitive], + return_type=tuple_rprimitive, + c_function_name="PyUnicode_Partition", + error_kind=ERR_MAGIC, +) + +# str.rpartition(str) +method_op( + name="rpartition", + arg_types=[str_rprimitive, str_rprimitive], + return_type=tuple_rprimitive, + c_function_name="PyUnicode_RPartition", + error_kind=ERR_MAGIC, +) + +# str.count(substring) +method_op( + name="count", + arg_types=[str_rprimitive, str_rprimitive], + return_type=c_pyssize_t_rprimitive, + c_function_name="CPyStr_Count", + error_kind=ERR_NEG_INT, + extra_int_constants=[(0, c_pyssize_t_rprimitive)], +) + +# str.count(substring, start) +method_op( + name="count", + arg_types=[str_rprimitive, str_rprimitive, int_rprimitive], + return_type=c_pyssize_t_rprimitive, + c_function_name="CPyStr_Count", + error_kind=ERR_NEG_INT, +) + +# str.count(substring, start, end) +method_op( + name="count", + arg_types=[str_rprimitive, str_rprimitive, int_rprimitive, int_rprimitive], + return_type=c_pyssize_t_rprimitive, + c_function_name="CPyStr_CountFull", + error_kind=ERR_NEG_INT, +) + +# str.replace(old, new) +method_op( + name="replace", + arg_types=[str_rprimitive, str_rprimitive, str_rprimitive], + return_type=str_rprimitive, + c_function_name="PyUnicode_Replace", + error_kind=ERR_MAGIC, + extra_int_constants=[(-1, c_int_rprimitive)], +) + +# str.replace(old, new, count) +method_op( + name="replace", + arg_types=[str_rprimitive, str_rprimitive, str_rprimitive, int_rprimitive], + return_type=str_rprimitive, + c_function_name="CPyStr_Replace", + error_kind=ERR_MAGIC, +) + +# check if a string is true (isn't an empty string) +str_check_if_true = custom_op( + arg_types=[str_rprimitive], + return_type=bit_rprimitive, + c_function_name="CPyStr_IsTrue", + error_kind=ERR_NEVER, +) + +str_ssize_t_size_op = custom_op( + arg_types=[str_rprimitive], + return_type=c_pyssize_t_rprimitive, + c_function_name="CPyStr_Size_size_t", + error_kind=ERR_NEG_INT, +) + +# obj.decode() +method_op( + name="decode", + arg_types=[bytes_rprimitive], + return_type=str_rprimitive, + c_function_name="CPy_Decode", + error_kind=ERR_MAGIC, + extra_int_constants=[(0, pointer_rprimitive), (0, pointer_rprimitive)], +) + +# obj.decode(encoding) +method_op( + name="decode", + arg_types=[bytes_rprimitive, str_rprimitive], + return_type=str_rprimitive, + c_function_name="CPy_Decode", + error_kind=ERR_MAGIC, + extra_int_constants=[(0, pointer_rprimitive)], +) + +# bytes.decode(encoding, errors) +method_op( + name="decode", + arg_types=[bytes_rprimitive, str_rprimitive, str_rprimitive], + return_type=str_rprimitive, + c_function_name="CPy_Decode", + error_kind=ERR_MAGIC, +) + +# bytes.decode(encoding) - utf8 strict specialization +bytes_decode_utf8_strict = custom_op( + arg_types=[bytes_rprimitive], + return_type=str_rprimitive, + c_function_name="CPy_DecodeUTF8", + error_kind=ERR_MAGIC, +) + +# bytes.decode(encoding) - ascii strict specialization +bytes_decode_ascii_strict = custom_op( + arg_types=[bytes_rprimitive], + return_type=str_rprimitive, + c_function_name="CPy_DecodeASCII", + error_kind=ERR_MAGIC, +) + +# bytes.decode(encoding) - latin1 strict specialization +bytes_decode_latin1_strict = custom_op( + arg_types=[bytes_rprimitive], + return_type=str_rprimitive, + c_function_name="CPy_DecodeLatin1", + error_kind=ERR_MAGIC, +) + +# str.encode() +method_op( + name="encode", + arg_types=[str_rprimitive], + return_type=bytes_rprimitive, + c_function_name="CPy_Encode", + error_kind=ERR_MAGIC, + extra_int_constants=[(0, pointer_rprimitive), (0, pointer_rprimitive)], +) + +# str.encode(encoding) +method_op( + name="encode", + arg_types=[str_rprimitive, str_rprimitive], + return_type=bytes_rprimitive, + c_function_name="CPy_Encode", + error_kind=ERR_MAGIC, + extra_int_constants=[(0, pointer_rprimitive)], +) + +# str.encode(encoding) - utf8 strict specialization +str_encode_utf8_strict = custom_op( + arg_types=[str_rprimitive], + return_type=bytes_rprimitive, + c_function_name="PyUnicode_AsUTF8String", + error_kind=ERR_MAGIC, +) + +# str.encode(encoding) - ascii strict specialization +str_encode_ascii_strict = custom_op( + arg_types=[str_rprimitive], + return_type=bytes_rprimitive, + c_function_name="PyUnicode_AsASCIIString", + error_kind=ERR_MAGIC, +) + +# str.encode(encoding) - latin1 strict specialization +str_encode_latin1_strict = custom_op( + arg_types=[str_rprimitive], + return_type=bytes_rprimitive, + c_function_name="PyUnicode_AsLatin1String", + error_kind=ERR_MAGIC, +) + +# str.encode(encoding, errors) +method_op( + name="encode", + arg_types=[str_rprimitive, str_rprimitive, str_rprimitive], + return_type=bytes_rprimitive, + c_function_name="CPy_Encode", + error_kind=ERR_MAGIC, +) + +function_op( + name="builtins.ord", + arg_types=[str_rprimitive], + return_type=int_rprimitive, + c_function_name="CPyStr_Ord", + error_kind=ERR_MAGIC, +) diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/tuple_ops.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/primitives/tuple_ops.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..37fe4e8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/primitives/tuple_ops.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/tuple_ops.py b/.venv/lib/python3.12/site-packages/mypyc/primitives/tuple_ops.py new file mode 100644 index 0000000..ab23f8c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/primitives/tuple_ops.py @@ -0,0 +1,136 @@ +"""Primitive tuple ops for *variable-length* tuples. + +Note: Varying-length tuples are represented as boxed Python tuple +objects, i.e. tuple_rprimitive (RPrimitive), not RTuple. +""" + +from __future__ import annotations + +from mypyc.ir.ops import ERR_MAGIC, ERR_NEVER +from mypyc.ir.rtypes import ( + bit_rprimitive, + c_pyssize_t_rprimitive, + int_rprimitive, + list_rprimitive, + object_rprimitive, + tuple_rprimitive, + void_rtype, +) +from mypyc.primitives.registry import binary_op, custom_op, function_op, load_address_op, method_op + +# Get the 'builtins.tuple' type object. +load_address_op(name="builtins.tuple", type=object_rprimitive, src="PyTuple_Type") + +# tuple[index] (for an int index) +tuple_get_item_op = method_op( + name="__getitem__", + arg_types=[tuple_rprimitive, int_rprimitive], + return_type=object_rprimitive, + c_function_name="CPySequenceTuple_GetItem", + error_kind=ERR_MAGIC, +) + +# This is unsafe because it assumes that the index is a non-negative integer +# that is in-bounds for the tuple. +tuple_get_item_unsafe_op = custom_op( + arg_types=[tuple_rprimitive, c_pyssize_t_rprimitive], + return_type=object_rprimitive, + c_function_name="CPySequenceTuple_GetItemUnsafe", + error_kind=ERR_NEVER, +) + +# Construct a boxed tuple from items: (item1, item2, ...) +new_tuple_op = custom_op( + arg_types=[c_pyssize_t_rprimitive], + return_type=tuple_rprimitive, + c_function_name="PyTuple_Pack", + error_kind=ERR_MAGIC, + var_arg_type=object_rprimitive, +) + +new_tuple_with_length_op = custom_op( + arg_types=[c_pyssize_t_rprimitive], + return_type=tuple_rprimitive, + c_function_name="PyTuple_New", + error_kind=ERR_MAGIC, +) + +load_empty_tuple_constant_op = custom_op( + arg_types=[], + return_type=tuple_rprimitive, + c_function_name="CPyTuple_LoadEmptyTupleConstant", + error_kind=ERR_NEVER, +) + +# PyTuple_SET_ITEM does no error checking, +# and should only be used to fill in brand new tuples. +new_tuple_set_item_op = custom_op( + arg_types=[tuple_rprimitive, c_pyssize_t_rprimitive, object_rprimitive], + return_type=void_rtype, + c_function_name="CPySequenceTuple_SetItemUnsafe", + error_kind=ERR_NEVER, + steals=[False, False, True], +) + +# Construct tuple from a list. +list_tuple_op = function_op( + name="builtins.tuple", + arg_types=[list_rprimitive], + return_type=tuple_rprimitive, + c_function_name="PyList_AsTuple", + error_kind=ERR_MAGIC, + priority=2, +) + +# Construct tuple from an arbitrary (iterable) object. +sequence_tuple_op = function_op( + name="builtins.tuple", + arg_types=[object_rprimitive], + return_type=tuple_rprimitive, + c_function_name="PySequence_Tuple", + error_kind=ERR_MAGIC, +) + +# translate isinstance(obj, tuple) +isinstance_tuple = function_op( + name="builtins.isinstance", + arg_types=[object_rprimitive], + return_type=bit_rprimitive, + c_function_name="PyTuple_Check", + error_kind=ERR_NEVER, +) + +# tuple + tuple +binary_op( + name="+", + arg_types=[tuple_rprimitive, tuple_rprimitive], + return_type=tuple_rprimitive, + c_function_name="PySequence_Concat", + error_kind=ERR_MAGIC, +) + +# tuple * int +binary_op( + name="*", + arg_types=[tuple_rprimitive, int_rprimitive], + return_type=tuple_rprimitive, + c_function_name="CPySequence_Multiply", + error_kind=ERR_MAGIC, +) + +# int * tuple +binary_op( + name="*", + arg_types=[int_rprimitive, tuple_rprimitive], + return_type=tuple_rprimitive, + c_function_name="CPySequence_RMultiply", + error_kind=ERR_MAGIC, +) + +# tuple[begin:end] +tuple_slice_op = custom_op( + arg_types=[tuple_rprimitive, int_rprimitive, int_rprimitive], + return_type=object_rprimitive, + c_function_name="CPySequenceTuple_GetSlice", + error_kind=ERR_MAGIC, +) diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/weakref_ops.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/primitives/weakref_ops.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..db7cb9b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/primitives/weakref_ops.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/primitives/weakref_ops.py b/.venv/lib/python3.12/site-packages/mypyc/primitives/weakref_ops.py new file mode 100644 index 0000000..21379d3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/primitives/weakref_ops.py @@ -0,0 +1,40 @@ +from mypyc.ir.ops import ERR_MAGIC +from mypyc.ir.rtypes import object_rprimitive, pointer_rprimitive +from mypyc.primitives.registry import function_op + +# Weakref operations + +new_ref_op = function_op( + name="weakref.ReferenceType", + arg_types=[object_rprimitive], + return_type=object_rprimitive, + c_function_name="PyWeakref_NewRef", + extra_int_constants=[(0, pointer_rprimitive)], + error_kind=ERR_MAGIC, +) + +new_ref__with_callback_op = function_op( + name="weakref.ReferenceType", + arg_types=[object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="PyWeakref_NewRef", + error_kind=ERR_MAGIC, +) + +new_proxy_op = function_op( + name="_weakref.proxy", + arg_types=[object_rprimitive], + return_type=object_rprimitive, + c_function_name="PyWeakref_NewProxy", + extra_int_constants=[(0, pointer_rprimitive)], + error_kind=ERR_MAGIC, +) + +new_proxy_with_callback_op = function_op( + name="_weakref.proxy", + arg_types=[object_rprimitive, object_rprimitive], + # steals=[True, False], + return_type=object_rprimitive, + c_function_name="PyWeakref_NewProxy", + error_kind=ERR_MAGIC, +) diff --git a/.venv/lib/python3.12/site-packages/mypyc/py.typed b/.venv/lib/python3.12/site-packages/mypyc/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypyc/rt_subtype.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/rt_subtype.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..ef98c3e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/rt_subtype.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/rt_subtype.py b/.venv/lib/python3.12/site-packages/mypyc/rt_subtype.py new file mode 100644 index 0000000..004e56e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/rt_subtype.py @@ -0,0 +1,77 @@ +"""'Runtime subtype' check for RTypes. + +A type S is a runtime subtype of T if a value of type S can be used at runtime +when a value of type T is expected without requiring any runtime conversions. + +For boxed types, runtime subtyping is the same as regular subtyping. +Unboxed subtypes, on the other hand, are not runtime subtypes of object +(since they require boxing to be used as an object), but short ints +are runtime subtypes of int. + +Subtyping is used to determine whether an object can be in a +particular place and runtime subtyping is used to determine whether a +coercion is necessary first. +""" + +from __future__ import annotations + +from mypyc.ir.rtypes import ( + RArray, + RInstance, + RPrimitive, + RStruct, + RTuple, + RType, + RTypeVisitor, + RUnion, + RVoid, + is_bit_rprimitive, + is_bool_rprimitive, + is_int_rprimitive, + is_short_int_rprimitive, +) +from mypyc.subtype import is_subtype + + +def is_runtime_subtype(left: RType, right: RType) -> bool: + return left.accept(RTSubtypeVisitor(right)) + + +class RTSubtypeVisitor(RTypeVisitor[bool]): + """Is left a runtime subtype of right? + + A few special cases such as right being 'object' are handled in + is_runtime_subtype and don't need to be covered here. + """ + + def __init__(self, right: RType) -> None: + self.right = right + + def visit_rinstance(self, left: RInstance) -> bool: + return is_subtype(left, self.right) + + def visit_runion(self, left: RUnion) -> bool: + return not self.right.is_unboxed and is_subtype(left, self.right) + + def visit_rprimitive(self, left: RPrimitive) -> bool: + if is_short_int_rprimitive(left) and is_int_rprimitive(self.right): + return True + if is_bit_rprimitive(left) and is_bool_rprimitive(self.right): + return True + return left is self.right + + def visit_rtuple(self, left: RTuple) -> bool: + if isinstance(self.right, RTuple): + return len(self.right.types) == len(left.types) and all( + is_runtime_subtype(t1, t2) for t1, t2 in zip(left.types, self.right.types) + ) + return False + + def visit_rstruct(self, left: RStruct) -> bool: + return isinstance(self.right, RStruct) and self.right.name == left.name + + def visit_rarray(self, left: RArray) -> bool: + return left == self.right + + def visit_rvoid(self, left: RVoid) -> bool: + return isinstance(self.right, RVoid) diff --git a/.venv/lib/python3.12/site-packages/mypyc/sametype.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/sametype.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..633d184 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/sametype.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/sametype.py b/.venv/lib/python3.12/site-packages/mypyc/sametype.py new file mode 100644 index 0000000..1b811d4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/sametype.py @@ -0,0 +1,83 @@ +"""Same type check for RTypes.""" + +from __future__ import annotations + +from mypyc.ir.func_ir import FuncSignature +from mypyc.ir.rtypes import ( + RArray, + RInstance, + RPrimitive, + RStruct, + RTuple, + RType, + RTypeVisitor, + RUnion, + RVoid, +) + + +def is_same_type(a: RType, b: RType) -> bool: + return a.accept(SameTypeVisitor(b)) + + +def is_same_signature(a: FuncSignature, b: FuncSignature) -> bool: + return ( + len(a.args) == len(b.args) + and is_same_type(a.ret_type, b.ret_type) + and all( + is_same_type(t1.type, t2.type) and t1.name == t2.name for t1, t2 in zip(a.args, b.args) + ) + ) + + +def is_same_method_signature(a: FuncSignature, b: FuncSignature) -> bool: + return ( + len(a.args) == len(b.args) + and is_same_type(a.ret_type, b.ret_type) + and all( + is_same_type(t1.type, t2.type) + and ((t1.pos_only and t2.pos_only) or t1.name == t2.name) + and t1.optional == t2.optional + for t1, t2 in zip(a.args[1:], b.args[1:]) + ) + ) + + +class SameTypeVisitor(RTypeVisitor[bool]): + def __init__(self, right: RType) -> None: + self.right = right + + def visit_rinstance(self, left: RInstance) -> bool: + return isinstance(self.right, RInstance) and left.name == self.right.name + + def visit_runion(self, left: RUnion) -> bool: + if isinstance(self.right, RUnion): + items = list(self.right.items) + for left_item in left.items: + for j, right_item in enumerate(items): + if is_same_type(left_item, right_item): + del items[j] + break + else: + return False + return not items + return False + + def visit_rprimitive(self, left: RPrimitive) -> bool: + return left is self.right + + def visit_rtuple(self, left: RTuple) -> bool: + return ( + isinstance(self.right, RTuple) + and len(self.right.types) == len(left.types) + and all(is_same_type(t1, t2) for t1, t2 in zip(left.types, self.right.types)) + ) + + def visit_rstruct(self, left: RStruct) -> bool: + return isinstance(self.right, RStruct) and self.right.name == left.name + + def visit_rarray(self, left: RArray) -> bool: + return left == self.right + + def visit_rvoid(self, left: RVoid) -> bool: + return isinstance(self.right, RVoid) diff --git a/.venv/lib/python3.12/site-packages/mypyc/subtype.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/subtype.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..8344b26 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/subtype.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/subtype.py b/.venv/lib/python3.12/site-packages/mypyc/subtype.py new file mode 100644 index 0000000..726a48d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/subtype.py @@ -0,0 +1,88 @@ +"""Subtype check for RTypes.""" + +from __future__ import annotations + +from mypyc.ir.rtypes import ( + RArray, + RInstance, + RPrimitive, + RStruct, + RTuple, + RType, + RTypeVisitor, + RUnion, + RVoid, + is_bit_rprimitive, + is_bool_rprimitive, + is_fixed_width_rtype, + is_int_rprimitive, + is_object_rprimitive, + is_short_int_rprimitive, + is_tagged, + is_tuple_rprimitive, +) + + +def is_subtype(left: RType, right: RType) -> bool: + if is_object_rprimitive(right): + return True + elif isinstance(right, RUnion): + if isinstance(left, RUnion): + for left_item in left.items: + if not any(is_subtype(left_item, right_item) for right_item in right.items): + return False + return True + else: + return any(is_subtype(left, item) for item in right.items) + return left.accept(SubtypeVisitor(right)) + + +class SubtypeVisitor(RTypeVisitor[bool]): + """Is left a subtype of right? + + A few special cases such as right being 'object' are handled in + is_subtype and don't need to be covered here. + """ + + def __init__(self, right: RType) -> None: + self.right = right + + def visit_rinstance(self, left: RInstance) -> bool: + return isinstance(self.right, RInstance) and self.right.class_ir in left.class_ir.mro + + def visit_runion(self, left: RUnion) -> bool: + return all(is_subtype(item, self.right) for item in left.items) + + def visit_rprimitive(self, left: RPrimitive) -> bool: + right = self.right + if is_bool_rprimitive(left): + if is_tagged(right) or is_fixed_width_rtype(right): + return True + elif is_bit_rprimitive(left): + if is_bool_rprimitive(right) or is_tagged(right) or is_fixed_width_rtype(right): + return True + elif is_short_int_rprimitive(left): + if is_int_rprimitive(right): + return True + elif is_fixed_width_rtype(left): + if is_int_rprimitive(right): + return True + return left is right + + def visit_rtuple(self, left: RTuple) -> bool: + if is_tuple_rprimitive(self.right): + return True + if isinstance(self.right, RTuple): + return len(self.right.types) == len(left.types) and all( + is_subtype(t1, t2) for t1, t2 in zip(left.types, self.right.types) + ) + return False + + def visit_rstruct(self, left: RStruct) -> bool: + return isinstance(self.right, RStruct) and self.right.name == left.name + + def visit_rarray(self, left: RArray) -> bool: + return left == self.right + + def visit_rvoid(self, left: RVoid) -> bool: + return isinstance(self.right, RVoid) diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/__init__.py b/.venv/lib/python3.12/site-packages/mypyc/test/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/config.py b/.venv/lib/python3.12/site-packages/mypyc/test/config.py new file mode 100644 index 0000000..8345cd9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/config.py @@ -0,0 +1,13 @@ +from __future__ import annotations + +import os + +provided_prefix = os.getenv("MYPY_TEST_PREFIX", None) +if provided_prefix: + PREFIX = provided_prefix +else: + this_file_dir = os.path.dirname(os.path.realpath(__file__)) + PREFIX = os.path.dirname(os.path.dirname(this_file_dir)) + +# Location of test data files such as test case descriptions. +test_data_prefix = os.path.join(PREFIX, "mypyc", "test-data") diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_alwaysdefined.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_alwaysdefined.py new file mode 100644 index 0000000..9f1487a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_alwaysdefined.py @@ -0,0 +1,46 @@ +"""Test cases for inferring always defined attributes in classes.""" + +from __future__ import annotations + +import os.path + +from mypy.errors import CompileError +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase +from mypyc.test.testutil import ( + ICODE_GEN_BUILTINS, + MypycDataSuite, + assert_test_output, + build_ir_for_single_file2, + infer_ir_build_options_from_test_name, + use_custom_builtins, +) + +files = ["alwaysdefined.test"] + + +class TestAlwaysDefined(MypycDataSuite): + files = files + base_path = test_temp_dir + + def run_case(self, testcase: DataDrivenTestCase) -> None: + """Perform a runtime checking transformation test case.""" + options = infer_ir_build_options_from_test_name(testcase.name) + if options is None: + # Skipped test case + return + with use_custom_builtins(os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase): + try: + ir = build_ir_for_single_file2(testcase.input, options)[0] + except CompileError as e: + actual = e.messages + else: + actual = [] + for cl in ir.classes: + if cl.name.startswith("_"): + continue + actual.append( + "{}: [{}]".format(cl.name, ", ".join(sorted(cl._always_initialized_attrs))) + ) + + assert_test_output(testcase, actual, "Invalid test output", testcase.output) diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_analysis.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_analysis.py new file mode 100644 index 0000000..7d297ea --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_analysis.py @@ -0,0 +1,77 @@ +"""Test runner for data-flow analysis test cases.""" + +from __future__ import annotations + +import os.path + +from mypy.errors import CompileError +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase +from mypyc.analysis import dataflow +from mypyc.common import TOP_LEVEL_NAME +from mypyc.ir.func_ir import all_values +from mypyc.ir.ops import Value +from mypyc.ir.pprint import format_func, generate_names_for_ir +from mypyc.test.testutil import ( + ICODE_GEN_BUILTINS, + MypycDataSuite, + assert_test_output, + build_ir_for_single_file, + use_custom_builtins, +) +from mypyc.transform import exceptions + +files = ["analysis.test"] + + +class TestAnalysis(MypycDataSuite): + files = files + base_path = test_temp_dir + optional_out = True + + def run_case(self, testcase: DataDrivenTestCase) -> None: + """Perform a data-flow analysis test case.""" + + with use_custom_builtins(os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase): + try: + ir = build_ir_for_single_file(testcase.input) + except CompileError as e: + actual = e.messages + else: + actual = [] + for fn in ir: + if fn.name == TOP_LEVEL_NAME and not testcase.name.endswith("_toplevel"): + continue + exceptions.insert_exception_handling(fn) + actual.extend(format_func(fn)) + cfg = dataflow.get_cfg(fn.blocks) + args: set[Value] = set(fn.arg_regs) + name = testcase.name + if name.endswith("_MaybeDefined"): + # Forward, maybe + analysis_result = dataflow.analyze_maybe_defined_regs(fn.blocks, cfg, args) + elif name.endswith("_Liveness"): + # Backward, maybe + analysis_result = dataflow.analyze_live_regs(fn.blocks, cfg) + elif name.endswith("_MustDefined"): + # Forward, must + analysis_result = dataflow.analyze_must_defined_regs( + fn.blocks, cfg, args, regs=all_values(fn.arg_regs, fn.blocks) + ) + elif name.endswith("_BorrowedArgument"): + # Forward, must + analysis_result = dataflow.analyze_borrowed_arguments(fn.blocks, cfg, args) + else: + assert False, "No recognized _AnalysisName suffix in test case" + + names = generate_names_for_ir(fn.arg_regs, fn.blocks) + + for key in sorted( + analysis_result.before.keys(), key=lambda x: (x[0].label, x[1]) + ): + pre = ", ".join(sorted(names[reg] for reg in analysis_result.before[key])) + post = ", ".join(sorted(names[reg] for reg in analysis_result.after[key])) + actual.append( + "%-8s %-23s %s" % ((key[0].label, key[1]), "{%s}" % pre, "{%s}" % post) + ) + assert_test_output(testcase, actual, "Invalid source code output") diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_annotate.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_annotate.py new file mode 100644 index 0000000..4a9a2c1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_annotate.py @@ -0,0 +1,71 @@ +"""Test cases for annotating source code to highlight inefficiencies.""" + +from __future__ import annotations + +import os.path + +from mypy.errors import CompileError +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase +from mypyc.annotate import generate_annotations, get_max_prio +from mypyc.ir.pprint import format_func +from mypyc.test.testutil import ( + ICODE_GEN_BUILTINS, + MypycDataSuite, + assert_test_output, + build_ir_for_single_file2, + infer_ir_build_options_from_test_name, + remove_comment_lines, + use_custom_builtins, +) + +files = ["annotate-basic.test"] + + +class TestReport(MypycDataSuite): + files = files + base_path = test_temp_dir + optional_out = True + + def run_case(self, testcase: DataDrivenTestCase) -> None: + """Perform a runtime checking transformation test case.""" + options = infer_ir_build_options_from_test_name(testcase.name) + if options is None: + # Skipped test case + return + with use_custom_builtins(os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase): + expected_output = remove_comment_lines(testcase.output) + + # Parse "# A: " comments. + for i, line in enumerate(testcase.input): + if "# A:" in line: + msg = line.rpartition("# A:")[2].strip() + expected_output.append(f"main:{i + 1}: {msg}") + + ir = None + try: + ir, tree, type_map, mapper = build_ir_for_single_file2(testcase.input, options) + except CompileError as e: + actual = e.messages + else: + annotations = generate_annotations("native.py", tree, ir, type_map, mapper) + actual = [] + for line_num, line_anns in sorted( + annotations.annotations.items(), key=lambda it: it[0] + ): + anns = get_max_prio(line_anns) + str_anns = [a.message for a in anns] + s = " ".join(str_anns) + actual.append(f"main:{line_num}: {s}") + + try: + assert_test_output(testcase, actual, "Invalid source code output", expected_output) + except BaseException: + if ir: + print("Generated IR:\n") + for fn in ir.functions: + if fn.name == "__top_level__": + continue + for s in format_func(fn): + print(s) + raise diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_cheader.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_cheader.py new file mode 100644 index 0000000..ec9e2c4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_cheader.py @@ -0,0 +1,43 @@ +"""Test that C functions used in primitives are declared in a header such as CPy.h.""" + +from __future__ import annotations + +import glob +import os +import re +import unittest + +from mypyc.primitives import registry + + +class TestHeaderInclusion(unittest.TestCase): + def test_primitives_included_in_header(self) -> None: + base_dir = os.path.join(os.path.dirname(__file__), "..", "lib-rt") + with open(os.path.join(base_dir, "CPy.h")) as f: + header = f.read() + with open(os.path.join(base_dir, "pythonsupport.h")) as f: + header += f.read() + + def check_name(name: str) -> None: + if name.startswith("CPy"): + assert re.search( + rf"\b{name}\b", header + ), f'"{name}" is used in mypyc.primitives but not declared in CPy.h' + + for values in [ + registry.method_call_ops.values(), + registry.binary_ops.values(), + registry.unary_ops.values(), + registry.function_ops.values(), + ]: + for ops in values: + for op in ops: + if op.c_function_name is not None: + check_name(op.c_function_name) + + primitives_path = os.path.join(os.path.dirname(__file__), "..", "primitives") + for fnam in glob.glob(f"{primitives_path}/*.py"): + with open(fnam) as f: + content = f.read() + for name in re.findall(r'c_function_name=["\'](CPy[A-Z_a-z0-9]+)', content): + check_name(name) diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_commandline.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_commandline.py new file mode 100644 index 0000000..f66ca2e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_commandline.py @@ -0,0 +1,82 @@ +"""Test cases for invoking mypyc on the command line. + +These are slow -- do not add test cases unless you have a very good reason to do so. +""" + +from __future__ import annotations + +import glob +import os +import os.path +import re +import subprocess +import sys + +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase +from mypy.test.helpers import normalize_error_messages +from mypyc.test.testutil import MypycDataSuite, assert_test_output + +files = ["commandline.test"] + + +base_path = os.path.join(os.path.dirname(__file__), "..", "..") + +python3_path = sys.executable + + +class TestCommandLine(MypycDataSuite): + files = files + base_path = test_temp_dir + optional_out = True + + def run_case(self, testcase: DataDrivenTestCase) -> None: + # Parse options from test case description (arguments must not have spaces) + text = "\n".join(testcase.input) + m = re.search(r"# *cmd: *(.*)", text) + assert m is not None, 'Test case missing "# cmd: " section' + args = m.group(1).split() + + # Write main program to run (not compiled) + program = "_%s.py" % testcase.name + program_path = os.path.join(test_temp_dir, program) + with open(program_path, "w") as f: + f.write(text) + + env = os.environ.copy() + env["PYTHONPATH"] = base_path + + out = b"" + try: + # Compile program + cmd = subprocess.run( + [sys.executable, "-m", "mypyc", *args], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + cwd="tmp", + env=env, + ) + if "ErrorOutput" in testcase.name or cmd.returncode != 0: + out += cmd.stdout + elif "WarningOutput" in testcase.name: + # Strip out setuptools build related output since we're only + # interested in the messages emitted during compilation. + messages, _, _ = cmd.stdout.partition(b"running build_ext") + out += messages + + if cmd.returncode == 0: + # Run main program + out += subprocess.check_output([python3_path, program], cwd="tmp") + finally: + suffix = "pyd" if sys.platform == "win32" else "so" + so_paths = glob.glob(f"tmp/**/*.{suffix}", recursive=True) + for path in so_paths: + os.remove(path) + + # Strip out 'tmp/' from error message paths in the testcase output, + # due to a mismatch between this test and mypy's test suite. + expected = [x.replace("tmp/", "") for x in testcase.output] + + # Verify output + actual = normalize_error_messages(out.decode().splitlines()) + assert_test_output(testcase, actual, "Invalid output", expected=expected) diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_emit.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_emit.py new file mode 100644 index 0000000..1baed39 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_emit.py @@ -0,0 +1,170 @@ +from __future__ import annotations + +import unittest + +from mypyc.codegen.emit import Emitter, EmitterContext +from mypyc.common import HAVE_IMMORTAL +from mypyc.ir.class_ir import ClassIR +from mypyc.ir.ops import BasicBlock, Register, Value +from mypyc.ir.rtypes import ( + RInstance, + RTuple, + RUnion, + bool_rprimitive, + int_rprimitive, + list_rprimitive, + none_rprimitive, + object_rprimitive, + str_rprimitive, +) +from mypyc.irbuild.vtable import compute_vtable +from mypyc.namegen import NameGenerator + + +class TestEmitter(unittest.TestCase): + def setUp(self) -> None: + self.n = Register(int_rprimitive, "n") + self.context = EmitterContext(NameGenerator([["mod"]])) + self.emitter = Emitter(self.context, {}) + + ir = ClassIR("A", "mod") + compute_vtable(ir) + ir.mro = [ir] + self.instance_a = RInstance(ir) + + def test_label(self) -> None: + assert self.emitter.label(BasicBlock(4)) == "CPyL4" + + def test_reg(self) -> None: + names: dict[Value, str] = {self.n: "n"} + emitter = Emitter(self.context, names) + assert emitter.reg(self.n) == "cpy_r_n" + + def test_object_annotation(self) -> None: + assert self.emitter.object_annotation("hello, world", "line;") == " /* 'hello, world' */" + assert ( + self.emitter.object_annotation(list(range(30)), "line;") + == """\ + /* [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, + 23, 24, 25, 26, 27, 28, 29] */""" + ) + + def test_emit_line(self) -> None: + emitter = self.emitter + emitter.emit_line("line;") + emitter.emit_line("a {") + emitter.emit_line("f();") + emitter.emit_line("}") + assert emitter.fragments == ["line;\n", "a {\n", " f();\n", "}\n"] + emitter = Emitter(self.context, {}) + emitter.emit_line("CPyStatics[0];", ann="hello, world") + emitter.emit_line("CPyStatics[1];", ann=list(range(30))) + assert emitter.fragments[0] == "CPyStatics[0]; /* 'hello, world' */\n" + assert ( + emitter.fragments[1] + == """\ +CPyStatics[1]; /* [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29] */\n""" + ) + + def test_emit_undefined_value_for_simple_type(self) -> None: + emitter = self.emitter + assert emitter.c_undefined_value(int_rprimitive) == "CPY_INT_TAG" + assert emitter.c_undefined_value(str_rprimitive) == "NULL" + assert emitter.c_undefined_value(bool_rprimitive) == "2" + + def test_emit_undefined_value_for_tuple(self) -> None: + emitter = self.emitter + assert ( + emitter.c_undefined_value(RTuple([str_rprimitive, int_rprimitive, bool_rprimitive])) + == "(tuple_T3OIC) { NULL, CPY_INT_TAG, 2 }" + ) + assert emitter.c_undefined_value(RTuple([str_rprimitive])) == "(tuple_T1O) { NULL }" + assert ( + emitter.c_undefined_value(RTuple([RTuple([str_rprimitive]), bool_rprimitive])) + == "(tuple_T2T1OC) { { NULL }, 2 }" + ) + + def test_emit_inc_ref_object(self) -> None: + self.emitter.emit_inc_ref("x", object_rprimitive) + self.assert_output("CPy_INCREF(x);\n") + + def test_emit_inc_ref_int(self) -> None: + self.emitter.emit_inc_ref("x", int_rprimitive) + self.assert_output("CPyTagged_INCREF(x);\n") + + def test_emit_inc_ref_rare(self) -> None: + self.emitter.emit_inc_ref("x", object_rprimitive, rare=True) + self.assert_output("CPy_INCREF(x);\n") + self.emitter.emit_inc_ref("x", int_rprimitive, rare=True) + self.assert_output("CPyTagged_IncRef(x);\n") + + def test_emit_inc_ref_list(self) -> None: + self.emitter.emit_inc_ref("x", list_rprimitive) + if HAVE_IMMORTAL: + self.assert_output("CPy_INCREF_NO_IMM(x);\n") + else: + self.assert_output("CPy_INCREF(x);\n") + + def test_emit_inc_ref_instance(self) -> None: + self.emitter.emit_inc_ref("x", self.instance_a) + if HAVE_IMMORTAL: + self.assert_output("CPy_INCREF_NO_IMM(x);\n") + else: + self.assert_output("CPy_INCREF(x);\n") + + def test_emit_inc_ref_optional(self) -> None: + optional = RUnion([self.instance_a, none_rprimitive]) + self.emitter.emit_inc_ref("o", optional) + self.assert_output("CPy_INCREF(o);\n") + + def test_emit_dec_ref_object(self) -> None: + self.emitter.emit_dec_ref("x", object_rprimitive) + self.assert_output("CPy_DECREF(x);\n") + self.emitter.emit_dec_ref("x", object_rprimitive, is_xdec=True) + self.assert_output("CPy_XDECREF(x);\n") + + def test_emit_dec_ref_int(self) -> None: + self.emitter.emit_dec_ref("x", int_rprimitive) + self.assert_output("CPyTagged_DECREF(x);\n") + self.emitter.emit_dec_ref("x", int_rprimitive, is_xdec=True) + self.assert_output("CPyTagged_XDECREF(x);\n") + + def test_emit_dec_ref_rare(self) -> None: + self.emitter.emit_dec_ref("x", object_rprimitive, rare=True) + self.assert_output("CPy_DecRef(x);\n") + self.emitter.emit_dec_ref("x", int_rprimitive, rare=True) + self.assert_output("CPyTagged_DecRef(x);\n") + + def test_emit_dec_ref_list(self) -> None: + self.emitter.emit_dec_ref("x", list_rprimitive) + if HAVE_IMMORTAL: + self.assert_output("CPy_DECREF_NO_IMM(x);\n") + else: + self.assert_output("CPy_DECREF(x);\n") + self.emitter.emit_dec_ref("x", list_rprimitive, is_xdec=True) + if HAVE_IMMORTAL: + self.assert_output("CPy_XDECREF_NO_IMM(x);\n") + else: + self.assert_output("CPy_XDECREF(x);\n") + + def test_emit_dec_ref_instance(self) -> None: + self.emitter.emit_dec_ref("x", self.instance_a) + if HAVE_IMMORTAL: + self.assert_output("CPy_DECREF_NO_IMM(x);\n") + else: + self.assert_output("CPy_DECREF(x);\n") + self.emitter.emit_dec_ref("x", self.instance_a, is_xdec=True) + if HAVE_IMMORTAL: + self.assert_output("CPy_XDECREF_NO_IMM(x);\n") + else: + self.assert_output("CPy_XDECREF(x);\n") + + def test_emit_dec_ref_optional(self) -> None: + optional = RUnion([self.instance_a, none_rprimitive]) + self.emitter.emit_dec_ref("o", optional) + self.assert_output("CPy_DECREF(o);\n") + + def assert_output(self, expected: str) -> None: + assert "".join(self.emitter.fragments) == expected + self.emitter.fragments = [] diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_emitclass.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_emitclass.py new file mode 100644 index 0000000..eb04b22 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_emitclass.py @@ -0,0 +1,35 @@ +from __future__ import annotations + +import unittest + +from mypyc.codegen.emitclass import getter_name, setter_name, slot_key +from mypyc.ir.class_ir import ClassIR +from mypyc.namegen import NameGenerator + + +class TestEmitClass(unittest.TestCase): + def test_slot_key(self) -> None: + attrs = ["__add__", "__radd__", "__rshift__", "__rrshift__", "__setitem__", "__delitem__"] + s = sorted(attrs, key=lambda x: slot_key(x)) + # __delitem__ and reverse methods should come last. + assert s == [ + "__add__", + "__rshift__", + "__setitem__", + "__delitem__", + "__radd__", + "__rrshift__", + ] + + def test_setter_name(self) -> None: + cls = ClassIR(module_name="testing", name="SomeClass") + generator = NameGenerator([["mod"]]) + + # This should never be `setup`, as it will conflict with the class `setup` + assert setter_name(cls, "up", generator) == "testing___SomeClass_set_up" + + def test_getter_name(self) -> None: + cls = ClassIR(module_name="testing", name="SomeClass") + generator = NameGenerator([["mod"]]) + + assert getter_name(cls, "down", generator) == "testing___SomeClass_get_down" diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_emitfunc.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_emitfunc.py new file mode 100644 index 0000000..6382271 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_emitfunc.py @@ -0,0 +1,1010 @@ +from __future__ import annotations + +import unittest + +from mypy.test.helpers import assert_string_arrays_equal +from mypyc.codegen.emit import Emitter, EmitterContext +from mypyc.codegen.emitfunc import FunctionEmitterVisitor, generate_native_function +from mypyc.common import HAVE_IMMORTAL, PLATFORM_SIZE +from mypyc.ir.class_ir import ClassIR +from mypyc.ir.func_ir import FuncDecl, FuncIR, FuncSignature, RuntimeArg +from mypyc.ir.ops import ( + ERR_NEVER, + Assign, + AssignMulti, + BasicBlock, + Box, + Branch, + Call, + CallC, + Cast, + ComparisonOp, + CString, + DecRef, + Extend, + GetAttr, + GetElementPtr, + Goto, + IncRef, + Integer, + IntOp, + LoadAddress, + LoadLiteral, + LoadMem, + Op, + Register, + Return, + SetAttr, + SetElement, + SetMem, + TupleGet, + Unbox, + Undef, + Unreachable, + Value, +) +from mypyc.ir.pprint import generate_names_for_ir +from mypyc.ir.rtypes import ( + RArray, + RInstance, + RStruct, + RTuple, + RType, + bool_rprimitive, + c_int_rprimitive, + cstring_rprimitive, + dict_rprimitive, + int32_rprimitive, + int64_rprimitive, + int_rprimitive, + list_rprimitive, + none_rprimitive, + object_rprimitive, + pointer_rprimitive, + short_int_rprimitive, +) +from mypyc.irbuild.vtable import compute_vtable +from mypyc.namegen import NameGenerator +from mypyc.primitives.dict_ops import ( + dict_get_item_op, + dict_new_op, + dict_set_item_op, + dict_update_op, +) +from mypyc.primitives.int_ops import int_neg_op +from mypyc.primitives.list_ops import list_append_op, list_get_item_op, list_set_item_op +from mypyc.primitives.misc_ops import none_object_op +from mypyc.primitives.registry import binary_ops +from mypyc.subtype import is_subtype + + +class TestFunctionEmitterVisitor(unittest.TestCase): + """Test generation of fragments of C from individual IR ops.""" + + def setUp(self) -> None: + self.registers: list[Register] = [] + + def add_local(name: str, rtype: RType) -> Register: + reg = Register(rtype, name) + self.registers.append(reg) + return reg + + self.n = add_local("n", int_rprimitive) + self.m = add_local("m", int_rprimitive) + self.k = add_local("k", int_rprimitive) + self.l = add_local("l", list_rprimitive) + self.ll = add_local("ll", list_rprimitive) + self.o = add_local("o", object_rprimitive) + self.o2 = add_local("o2", object_rprimitive) + self.d = add_local("d", dict_rprimitive) + self.b = add_local("b", bool_rprimitive) + self.s1 = add_local("s1", short_int_rprimitive) + self.s2 = add_local("s2", short_int_rprimitive) + self.i32 = add_local("i32", int32_rprimitive) + self.i32_1 = add_local("i32_1", int32_rprimitive) + self.i64 = add_local("i64", int64_rprimitive) + self.i64_1 = add_local("i64_1", int64_rprimitive) + self.ptr = add_local("ptr", pointer_rprimitive) + self.t = add_local("t", RTuple([int_rprimitive, bool_rprimitive])) + self.tt = add_local( + "tt", RTuple([RTuple([int_rprimitive, bool_rprimitive]), bool_rprimitive]) + ) + ir = ClassIR("A", "mod") + ir.attributes = { + "x": bool_rprimitive, + "y": int_rprimitive, + "i1": int64_rprimitive, + "i2": int32_rprimitive, + "t": RTuple([object_rprimitive, object_rprimitive]), + } + ir.bitmap_attrs = ["i1", "i2"] + compute_vtable(ir) + ir.mro = [ir] + self.r = add_local("r", RInstance(ir)) + self.none = add_local("none", none_rprimitive) + + self.struct_type = RStruct( + "Foo", ["b", "x", "y"], [bool_rprimitive, int32_rprimitive, int64_rprimitive] + ) + self.st = add_local("st", self.struct_type) + + self.context = EmitterContext(NameGenerator([["mod"]])) + + def test_goto(self) -> None: + self.assert_emit(Goto(BasicBlock(2)), "goto CPyL2;") + + def test_goto_next_block(self) -> None: + next_block = BasicBlock(2) + self.assert_emit(Goto(next_block), "", next_block=next_block) + + def test_return(self) -> None: + self.assert_emit(Return(self.m), "return cpy_r_m;") + + def test_integer(self) -> None: + self.assert_emit(Assign(self.n, Integer(5)), "cpy_r_n = 10;") + self.assert_emit(Assign(self.i32, Integer(5, c_int_rprimitive)), "cpy_r_i32 = 5;") + + def test_tuple_get(self) -> None: + self.assert_emit(TupleGet(self.t, 1, 0), "cpy_r_r0 = cpy_r_t.f1;") + + def test_load_None(self) -> None: # noqa: N802 + self.assert_emit( + LoadAddress(none_object_op.type, none_object_op.src, 0), + "cpy_r_r0 = (PyObject *)&_Py_NoneStruct;", + ) + + def test_assign_int(self) -> None: + self.assert_emit(Assign(self.m, self.n), "cpy_r_m = cpy_r_n;") + + def test_int_add(self) -> None: + self.assert_emit_binary_op( + "+", self.n, self.m, self.k, "cpy_r_r0 = CPyTagged_Add(cpy_r_m, cpy_r_k);" + ) + + def test_int_sub(self) -> None: + self.assert_emit_binary_op( + "-", self.n, self.m, self.k, "cpy_r_r0 = CPyTagged_Subtract(cpy_r_m, cpy_r_k);" + ) + + def test_int_neg(self) -> None: + assert int_neg_op.c_function_name is not None + self.assert_emit( + CallC( + int_neg_op.c_function_name, + [self.m], + int_neg_op.return_type, + int_neg_op.steals, + int_neg_op.is_borrowed, + int_neg_op.is_borrowed, + int_neg_op.error_kind, + 55, + ), + "cpy_r_r0 = CPyTagged_Negate(cpy_r_m);", + ) + + def test_branch(self) -> None: + self.assert_emit( + Branch(self.b, BasicBlock(8), BasicBlock(9), Branch.BOOL), + """if (cpy_r_b) { + goto CPyL8; + } else + goto CPyL9; + """, + ) + b = Branch(self.b, BasicBlock(8), BasicBlock(9), Branch.BOOL) + b.negated = True + self.assert_emit( + b, + """if (!cpy_r_b) { + goto CPyL8; + } else + goto CPyL9; + """, + ) + + def test_branch_no_else(self) -> None: + next_block = BasicBlock(9) + b = Branch(self.b, BasicBlock(8), next_block, Branch.BOOL) + self.assert_emit(b, """if (cpy_r_b) goto CPyL8;""", next_block=next_block) + next_block = BasicBlock(9) + b = Branch(self.b, BasicBlock(8), next_block, Branch.BOOL) + b.negated = True + self.assert_emit(b, """if (!cpy_r_b) goto CPyL8;""", next_block=next_block) + + def test_branch_no_else_negated(self) -> None: + next_block = BasicBlock(1) + b = Branch(self.b, next_block, BasicBlock(2), Branch.BOOL) + self.assert_emit(b, """if (!cpy_r_b) goto CPyL2;""", next_block=next_block) + next_block = BasicBlock(1) + b = Branch(self.b, next_block, BasicBlock(2), Branch.BOOL) + b.negated = True + self.assert_emit(b, """if (cpy_r_b) goto CPyL2;""", next_block=next_block) + + def test_branch_is_error(self) -> None: + b = Branch(self.b, BasicBlock(8), BasicBlock(9), Branch.IS_ERROR) + self.assert_emit( + b, + """if (cpy_r_b == 2) { + goto CPyL8; + } else + goto CPyL9; + """, + ) + b = Branch(self.b, BasicBlock(8), BasicBlock(9), Branch.IS_ERROR) + b.negated = True + self.assert_emit( + b, + """if (cpy_r_b != 2) { + goto CPyL8; + } else + goto CPyL9; + """, + ) + + def test_branch_is_error_next_block(self) -> None: + next_block = BasicBlock(8) + b = Branch(self.b, next_block, BasicBlock(9), Branch.IS_ERROR) + self.assert_emit(b, """if (cpy_r_b != 2) goto CPyL9;""", next_block=next_block) + b = Branch(self.b, next_block, BasicBlock(9), Branch.IS_ERROR) + b.negated = True + self.assert_emit(b, """if (cpy_r_b == 2) goto CPyL9;""", next_block=next_block) + + def test_branch_rare(self) -> None: + self.assert_emit( + Branch(self.b, BasicBlock(8), BasicBlock(9), Branch.BOOL, rare=True), + """if (unlikely(cpy_r_b)) { + goto CPyL8; + } else + goto CPyL9; + """, + ) + next_block = BasicBlock(9) + self.assert_emit( + Branch(self.b, BasicBlock(8), next_block, Branch.BOOL, rare=True), + """if (unlikely(cpy_r_b)) goto CPyL8;""", + next_block=next_block, + ) + next_block = BasicBlock(8) + b = Branch(self.b, next_block, BasicBlock(9), Branch.BOOL, rare=True) + self.assert_emit(b, """if (likely(!cpy_r_b)) goto CPyL9;""", next_block=next_block) + next_block = BasicBlock(8) + b = Branch(self.b, next_block, BasicBlock(9), Branch.BOOL, rare=True) + b.negated = True + self.assert_emit(b, """if (likely(cpy_r_b)) goto CPyL9;""", next_block=next_block) + + def test_call(self) -> None: + decl = FuncDecl( + "myfn", None, "mod", FuncSignature([RuntimeArg("m", int_rprimitive)], int_rprimitive) + ) + self.assert_emit(Call(decl, [self.m], 55), "cpy_r_r0 = CPyDef_myfn(cpy_r_m);") + + def test_call_two_args(self) -> None: + decl = FuncDecl( + "myfn", + None, + "mod", + FuncSignature( + [RuntimeArg("m", int_rprimitive), RuntimeArg("n", int_rprimitive)], int_rprimitive + ), + ) + self.assert_emit( + Call(decl, [self.m, self.k], 55), "cpy_r_r0 = CPyDef_myfn(cpy_r_m, cpy_r_k);" + ) + + def test_inc_ref(self) -> None: + self.assert_emit(IncRef(self.o), "CPy_INCREF(cpy_r_o);") + self.assert_emit(IncRef(self.o), "CPy_INCREF(cpy_r_o);", rare=True) + + def test_dec_ref(self) -> None: + self.assert_emit(DecRef(self.o), "CPy_DECREF(cpy_r_o);") + self.assert_emit(DecRef(self.o), "CPy_DecRef(cpy_r_o);", rare=True) + + def test_inc_ref_int(self) -> None: + self.assert_emit(IncRef(self.m), "CPyTagged_INCREF(cpy_r_m);") + self.assert_emit(IncRef(self.m), "CPyTagged_IncRef(cpy_r_m);", rare=True) + + def test_dec_ref_int(self) -> None: + self.assert_emit(DecRef(self.m), "CPyTagged_DECREF(cpy_r_m);") + self.assert_emit(DecRef(self.m), "CPyTagged_DecRef(cpy_r_m);", rare=True) + + def test_dec_ref_tuple(self) -> None: + self.assert_emit(DecRef(self.t), "CPyTagged_DECREF(cpy_r_t.f0);") + + def test_dec_ref_tuple_nested(self) -> None: + self.assert_emit(DecRef(self.tt), "CPyTagged_DECREF(cpy_r_tt.f0.f0);") + + def test_list_get_item(self) -> None: + self.assert_emit( + CallC( + str(list_get_item_op.c_function_name), + [self.m, self.k], + list_get_item_op.return_type, + list_get_item_op.steals, + list_get_item_op.is_borrowed, + list_get_item_op.error_kind, + 55, + ), + """cpy_r_r0 = CPyList_GetItem(cpy_r_m, cpy_r_k);""", + ) + + def test_list_set_item(self) -> None: + self.assert_emit( + CallC( + str(list_set_item_op.c_function_name), + [self.l, self.n, self.o], + list_set_item_op.return_type, + list_set_item_op.steals, + list_set_item_op.is_borrowed, + list_set_item_op.error_kind, + 55, + ), + """cpy_r_r0 = CPyList_SetItem(cpy_r_l, cpy_r_n, cpy_r_o);""", + ) + + def test_box_int(self) -> None: + self.assert_emit(Box(self.n), """cpy_r_r0 = CPyTagged_StealAsObject(cpy_r_n);""") + + def test_unbox_int(self) -> None: + self.assert_emit( + Unbox(self.m, int_rprimitive, 55), + """if (likely(PyLong_Check(cpy_r_m))) + cpy_r_r0 = CPyTagged_FromObject(cpy_r_m); + else { + CPy_TypeError("int", cpy_r_m); cpy_r_r0 = CPY_INT_TAG; + } + """, + ) + + def test_box_i64(self) -> None: + self.assert_emit(Box(self.i64), """cpy_r_r0 = PyLong_FromLongLong(cpy_r_i64);""") + + def test_unbox_i64(self) -> None: + self.assert_emit( + Unbox(self.o, int64_rprimitive, 55), """cpy_r_r0 = CPyLong_AsInt64(cpy_r_o);""" + ) + + def test_list_append(self) -> None: + self.assert_emit( + CallC( + str(list_append_op.c_function_name), + [self.l, self.o], + list_append_op.return_type, + list_append_op.steals, + list_append_op.is_borrowed, + list_append_op.error_kind, + 1, + ), + """cpy_r_r0 = PyList_Append(cpy_r_l, cpy_r_o);""", + ) + + def test_get_attr(self) -> None: + self.assert_emit( + GetAttr(self.r, "y", 1), + """cpy_r_r0 = ((mod___AObject *)cpy_r_r)->_y; + if (unlikely(cpy_r_r0 == CPY_INT_TAG)) { + PyErr_SetString(PyExc_AttributeError, "attribute 'y' of 'A' undefined"); + } else { + CPyTagged_INCREF(cpy_r_r0); + } + """, + ) + + def test_get_attr_non_refcounted(self) -> None: + self.assert_emit( + GetAttr(self.r, "x", 1), + """cpy_r_r0 = ((mod___AObject *)cpy_r_r)->_x; + if (unlikely(cpy_r_r0 == 2)) { + PyErr_SetString(PyExc_AttributeError, "attribute 'x' of 'A' undefined"); + } + """, + ) + + def test_get_attr_merged(self) -> None: + op = GetAttr(self.r, "y", 1) + branch = Branch(op, BasicBlock(8), BasicBlock(9), Branch.IS_ERROR) + branch.traceback_entry = ("foobar", 123) + self.assert_emit( + op, + """\ + cpy_r_r0 = ((mod___AObject *)cpy_r_r)->_y; + if (unlikely(cpy_r_r0 == CPY_INT_TAG)) { + CPy_AttributeError("prog.py", "foobar", "A", "y", 123, CPyStatic_prog___globals); + goto CPyL8; + } + CPyTagged_INCREF(cpy_r_r0); + goto CPyL9; + """, + next_branch=branch, + skip_next=True, + ) + + def test_get_attr_with_bitmap(self) -> None: + self.assert_emit( + GetAttr(self.r, "i1", 1), + """cpy_r_r0 = ((mod___AObject *)cpy_r_r)->_i1; + if (unlikely(cpy_r_r0 == -113) && !(((mod___AObject *)cpy_r_r)->bitmap & 1)) { + PyErr_SetString(PyExc_AttributeError, "attribute 'i1' of 'A' undefined"); + } + """, + ) + + def test_get_attr_nullable_with_tuple(self) -> None: + self.assert_emit( + GetAttr(self.r, "t", 1, allow_error_value=True), + """cpy_r_r0 = ((mod___AObject *)cpy_r_r)->_t; + if (cpy_r_r0.f0 != NULL) { + CPy_INCREF(cpy_r_r0.f0); + CPy_INCREF(cpy_r_r0.f1); + } + """, + ) + + def test_set_attr(self) -> None: + self.assert_emit( + SetAttr(self.r, "y", self.m, 1), + """if (((mod___AObject *)cpy_r_r)->_y != CPY_INT_TAG) { + CPyTagged_DECREF(((mod___AObject *)cpy_r_r)->_y); + } + ((mod___AObject *)cpy_r_r)->_y = cpy_r_m; + cpy_r_r0 = 1; + """, + ) + + def test_set_attr_non_refcounted(self) -> None: + self.assert_emit( + SetAttr(self.r, "x", self.b, 1), + """((mod___AObject *)cpy_r_r)->_x = cpy_r_b; + cpy_r_r0 = 1; + """, + ) + + def test_set_attr_no_error(self) -> None: + op = SetAttr(self.r, "y", self.m, 1) + op.error_kind = ERR_NEVER + self.assert_emit( + op, + """if (((mod___AObject *)cpy_r_r)->_y != CPY_INT_TAG) { + CPyTagged_DECREF(((mod___AObject *)cpy_r_r)->_y); + } + ((mod___AObject *)cpy_r_r)->_y = cpy_r_m; + """, + ) + + def test_set_attr_non_refcounted_no_error(self) -> None: + op = SetAttr(self.r, "x", self.b, 1) + op.error_kind = ERR_NEVER + self.assert_emit( + op, + """((mod___AObject *)cpy_r_r)->_x = cpy_r_b; + """, + ) + + def test_set_attr_with_bitmap(self) -> None: + # For some rtypes the error value overlaps a valid value, so we need + # to use a separate bitmap to track defined attributes. + self.assert_emit( + SetAttr(self.r, "i1", self.i64, 1), + """if (unlikely(cpy_r_i64 == -113)) { + ((mod___AObject *)cpy_r_r)->bitmap |= 1; + } + ((mod___AObject *)cpy_r_r)->_i1 = cpy_r_i64; + cpy_r_r0 = 1; + """, + ) + self.assert_emit( + SetAttr(self.r, "i2", self.i32, 1), + """if (unlikely(cpy_r_i32 == -113)) { + ((mod___AObject *)cpy_r_r)->bitmap |= 2; + } + ((mod___AObject *)cpy_r_r)->_i2 = cpy_r_i32; + cpy_r_r0 = 1; + """, + ) + + def test_set_attr_init_with_bitmap(self) -> None: + op = SetAttr(self.r, "i1", self.i64, 1) + op.is_init = True + self.assert_emit( + op, + """if (unlikely(cpy_r_i64 == -113)) { + ((mod___AObject *)cpy_r_r)->bitmap |= 1; + } + ((mod___AObject *)cpy_r_r)->_i1 = cpy_r_i64; + cpy_r_r0 = 1; + """, + ) + + def test_dict_get_item(self) -> None: + self.assert_emit( + CallC( + str(dict_get_item_op.c_function_name), + [self.d, self.o2], + dict_get_item_op.return_type, + dict_get_item_op.steals, + dict_get_item_op.is_borrowed, + dict_get_item_op.error_kind, + 1, + ), + """cpy_r_r0 = CPyDict_GetItem(cpy_r_d, cpy_r_o2);""", + ) + + def test_dict_set_item(self) -> None: + self.assert_emit( + CallC( + str(dict_set_item_op.c_function_name), + [self.d, self.o, self.o2], + dict_set_item_op.return_type, + dict_set_item_op.steals, + dict_set_item_op.is_borrowed, + dict_set_item_op.error_kind, + 1, + ), + """cpy_r_r0 = CPyDict_SetItem(cpy_r_d, cpy_r_o, cpy_r_o2);""", + ) + + def test_dict_update(self) -> None: + self.assert_emit( + CallC( + str(dict_update_op.c_function_name), + [self.d, self.o], + dict_update_op.return_type, + dict_update_op.steals, + dict_update_op.is_borrowed, + dict_update_op.error_kind, + 1, + ), + """cpy_r_r0 = CPyDict_Update(cpy_r_d, cpy_r_o);""", + ) + + def test_new_dict(self) -> None: + self.assert_emit( + CallC( + dict_new_op.c_function_name, + [], + dict_new_op.return_type, + dict_new_op.steals, + dict_new_op.is_borrowed, + dict_new_op.error_kind, + 1, + ), + """cpy_r_r0 = PyDict_New();""", + ) + + def test_dict_contains(self) -> None: + self.assert_emit_binary_op( + "in", self.b, self.o, self.d, """cpy_r_r0 = PyDict_Contains(cpy_r_d, cpy_r_o);""" + ) + + def test_int_op(self) -> None: + self.assert_emit( + IntOp(short_int_rprimitive, self.s1, self.s2, IntOp.ADD, 1), + """cpy_r_r0 = cpy_r_s1 + cpy_r_s2;""", + ) + self.assert_emit( + IntOp(short_int_rprimitive, self.s1, self.s2, IntOp.SUB, 1), + """cpy_r_r0 = cpy_r_s1 - cpy_r_s2;""", + ) + self.assert_emit( + IntOp(short_int_rprimitive, self.s1, self.s2, IntOp.MUL, 1), + """cpy_r_r0 = cpy_r_s1 * cpy_r_s2;""", + ) + self.assert_emit( + IntOp(short_int_rprimitive, self.s1, self.s2, IntOp.DIV, 1), + """cpy_r_r0 = cpy_r_s1 / cpy_r_s2;""", + ) + self.assert_emit( + IntOp(short_int_rprimitive, self.s1, self.s2, IntOp.MOD, 1), + """cpy_r_r0 = cpy_r_s1 % cpy_r_s2;""", + ) + self.assert_emit( + IntOp(short_int_rprimitive, self.s1, self.s2, IntOp.AND, 1), + """cpy_r_r0 = cpy_r_s1 & cpy_r_s2;""", + ) + self.assert_emit( + IntOp(short_int_rprimitive, self.s1, self.s2, IntOp.OR, 1), + """cpy_r_r0 = cpy_r_s1 | cpy_r_s2;""", + ) + self.assert_emit( + IntOp(short_int_rprimitive, self.s1, self.s2, IntOp.XOR, 1), + """cpy_r_r0 = cpy_r_s1 ^ cpy_r_s2;""", + ) + self.assert_emit( + IntOp(short_int_rprimitive, self.s1, self.s2, IntOp.LEFT_SHIFT, 1), + """cpy_r_r0 = cpy_r_s1 << cpy_r_s2;""", + ) + self.assert_emit( + IntOp(short_int_rprimitive, self.s1, self.s2, IntOp.RIGHT_SHIFT, 1), + """cpy_r_r0 = (Py_ssize_t)cpy_r_s1 >> (Py_ssize_t)cpy_r_s2;""", + ) + self.assert_emit( + IntOp(short_int_rprimitive, self.i64, self.i64_1, IntOp.RIGHT_SHIFT, 1), + """cpy_r_r0 = cpy_r_i64 >> cpy_r_i64_1;""", + ) + + def test_comparison_op(self) -> None: + # signed + self.assert_emit( + ComparisonOp(self.s1, self.s2, ComparisonOp.SLT, 1), + """cpy_r_r0 = (Py_ssize_t)cpy_r_s1 < (Py_ssize_t)cpy_r_s2;""", + ) + self.assert_emit( + ComparisonOp(self.i32, self.i32_1, ComparisonOp.SLT, 1), + """cpy_r_r0 = cpy_r_i32 < cpy_r_i32_1;""", + ) + self.assert_emit( + ComparisonOp(self.i64, self.i64_1, ComparisonOp.SLT, 1), + """cpy_r_r0 = cpy_r_i64 < cpy_r_i64_1;""", + ) + # unsigned + self.assert_emit( + ComparisonOp(self.s1, self.s2, ComparisonOp.ULT, 1), + """cpy_r_r0 = cpy_r_s1 < cpy_r_s2;""", + ) + self.assert_emit( + ComparisonOp(self.i32, self.i32_1, ComparisonOp.ULT, 1), + """cpy_r_r0 = (uint32_t)cpy_r_i32 < (uint32_t)cpy_r_i32_1;""", + ) + self.assert_emit( + ComparisonOp(self.i64, self.i64_1, ComparisonOp.ULT, 1), + """cpy_r_r0 = (uint64_t)cpy_r_i64 < (uint64_t)cpy_r_i64_1;""", + ) + + # object type + self.assert_emit( + ComparisonOp(self.o, self.o2, ComparisonOp.EQ, 1), + """cpy_r_r0 = cpy_r_o == cpy_r_o2;""", + ) + self.assert_emit( + ComparisonOp(self.o, self.o2, ComparisonOp.NEQ, 1), + """cpy_r_r0 = cpy_r_o != cpy_r_o2;""", + ) + + def test_load_mem(self) -> None: + self.assert_emit(LoadMem(bool_rprimitive, self.ptr), """cpy_r_r0 = *(char *)cpy_r_ptr;""") + + def test_set_mem(self) -> None: + self.assert_emit( + SetMem(bool_rprimitive, self.ptr, self.b), """*(char *)cpy_r_ptr = cpy_r_b;""" + ) + + def test_get_element_ptr(self) -> None: + r = RStruct( + "Foo", ["b", "i32", "i64"], [bool_rprimitive, int32_rprimitive, int64_rprimitive] + ) + self.assert_emit( + GetElementPtr(self.o, r, "b"), """cpy_r_r0 = (CPyPtr)&((Foo *)cpy_r_o)->b;""" + ) + self.assert_emit( + GetElementPtr(self.o, r, "i32"), """cpy_r_r0 = (CPyPtr)&((Foo *)cpy_r_o)->i32;""" + ) + self.assert_emit( + GetElementPtr(self.o, r, "i64"), """cpy_r_r0 = (CPyPtr)&((Foo *)cpy_r_o)->i64;""" + ) + + def test_set_element(self) -> None: + # Use compact syntax when setting the initial element of an undefined value + self.assert_emit( + SetElement(Undef(self.struct_type), "b", self.b), """cpy_r_r0.b = cpy_r_b;""" + ) + # We propagate the unchanged values in subsequent assignments + self.assert_emit( + SetElement(self.st, "x", self.i32), + """cpy_r_r0 = (Foo) { cpy_r_st.b, cpy_r_i32, cpy_r_st.y };""", + ) + + def test_load_address(self) -> None: + self.assert_emit( + LoadAddress(object_rprimitive, "PyDict_Type"), + """cpy_r_r0 = (PyObject *)&PyDict_Type;""", + ) + + def test_assign_multi(self) -> None: + t = RArray(object_rprimitive, 2) + a = Register(t, "a") + self.registers.append(a) + self.assert_emit( + AssignMulti(a, [self.o, self.o2]), """PyObject *cpy_r_a[2] = {cpy_r_o, cpy_r_o2};""" + ) + + def test_long_unsigned(self) -> None: + a = Register(int64_rprimitive, "a") + self.assert_emit( + Assign(a, Integer(1 << 31, int64_rprimitive)), """cpy_r_a = 2147483648LL;""" + ) + self.assert_emit( + Assign(a, Integer((1 << 31) - 1, int64_rprimitive)), """cpy_r_a = 2147483647;""" + ) + + def test_long_signed(self) -> None: + a = Register(int64_rprimitive, "a") + self.assert_emit( + Assign(a, Integer(-(1 << 31) + 1, int64_rprimitive)), """cpy_r_a = -2147483647;""" + ) + self.assert_emit( + Assign(a, Integer(-(1 << 31), int64_rprimitive)), """cpy_r_a = -2147483648LL;""" + ) + + def test_cast_and_branch_merge(self) -> None: + op = Cast(self.r, dict_rprimitive, 1) + next_block = BasicBlock(9) + branch = Branch(op, BasicBlock(8), next_block, Branch.IS_ERROR) + branch.traceback_entry = ("foobar", 123) + self.assert_emit( + op, + """\ +if (likely(PyDict_Check(cpy_r_r))) + cpy_r_r0 = cpy_r_r; +else { + CPy_TypeErrorTraceback("prog.py", "foobar", 123, CPyStatic_prog___globals, "dict", cpy_r_r); + goto CPyL8; +} +""", + next_block=next_block, + next_branch=branch, + skip_next=True, + ) + + def test_cast_and_branch_no_merge_1(self) -> None: + op = Cast(self.r, dict_rprimitive, 1) + branch = Branch(op, BasicBlock(8), BasicBlock(9), Branch.IS_ERROR) + branch.traceback_entry = ("foobar", 123) + self.assert_emit( + op, + """\ + if (likely(PyDict_Check(cpy_r_r))) + cpy_r_r0 = cpy_r_r; + else { + CPy_TypeError("dict", cpy_r_r); + cpy_r_r0 = NULL; + } + """, + next_block=BasicBlock(10), + next_branch=branch, + skip_next=False, + ) + + def test_cast_and_branch_no_merge_2(self) -> None: + op = Cast(self.r, dict_rprimitive, 1) + next_block = BasicBlock(9) + branch = Branch(op, BasicBlock(8), next_block, Branch.IS_ERROR) + branch.negated = True + branch.traceback_entry = ("foobar", 123) + self.assert_emit( + op, + """\ + if (likely(PyDict_Check(cpy_r_r))) + cpy_r_r0 = cpy_r_r; + else { + CPy_TypeError("dict", cpy_r_r); + cpy_r_r0 = NULL; + } + """, + next_block=next_block, + next_branch=branch, + ) + + def test_cast_and_branch_no_merge_3(self) -> None: + op = Cast(self.r, dict_rprimitive, 1) + next_block = BasicBlock(9) + branch = Branch(op, BasicBlock(8), next_block, Branch.BOOL) + branch.traceback_entry = ("foobar", 123) + self.assert_emit( + op, + """\ + if (likely(PyDict_Check(cpy_r_r))) + cpy_r_r0 = cpy_r_r; + else { + CPy_TypeError("dict", cpy_r_r); + cpy_r_r0 = NULL; + } + """, + next_block=next_block, + next_branch=branch, + ) + + def test_cast_and_branch_no_merge_4(self) -> None: + op = Cast(self.r, dict_rprimitive, 1) + next_block = BasicBlock(9) + branch = Branch(op, BasicBlock(8), next_block, Branch.IS_ERROR) + self.assert_emit( + op, + """\ + if (likely(PyDict_Check(cpy_r_r))) + cpy_r_r0 = cpy_r_r; + else { + CPy_TypeError("dict", cpy_r_r); + cpy_r_r0 = NULL; + } + """, + next_block=next_block, + next_branch=branch, + ) + + def test_extend(self) -> None: + a = Register(int32_rprimitive, "a") + self.assert_emit(Extend(a, int64_rprimitive, signed=True), """cpy_r_r0 = cpy_r_a;""") + self.assert_emit( + Extend(a, int64_rprimitive, signed=False), """cpy_r_r0 = (uint32_t)cpy_r_a;""" + ) + if PLATFORM_SIZE == 4: + self.assert_emit( + Extend(self.n, int64_rprimitive, signed=True), + """cpy_r_r0 = (Py_ssize_t)cpy_r_n;""", + ) + self.assert_emit( + Extend(self.n, int64_rprimitive, signed=False), """cpy_r_r0 = cpy_r_n;""" + ) + if PLATFORM_SIZE == 8: + self.assert_emit(Extend(a, int_rprimitive, signed=True), """cpy_r_r0 = cpy_r_a;""") + self.assert_emit( + Extend(a, int_rprimitive, signed=False), """cpy_r_r0 = (uint32_t)cpy_r_a;""" + ) + + def test_inc_ref_none(self) -> None: + b = Box(self.none) + self.assert_emit([b, IncRef(b)], "" if HAVE_IMMORTAL else "CPy_INCREF(cpy_r_r0);") + + def test_inc_ref_bool(self) -> None: + b = Box(self.b) + self.assert_emit([b, IncRef(b)], "" if HAVE_IMMORTAL else "CPy_INCREF(cpy_r_r0);") + + def test_inc_ref_int_literal(self) -> None: + for x in -5, 0, 1, 5, 255, 256: + b = LoadLiteral(x, object_rprimitive) + self.assert_emit([b, IncRef(b)], "" if HAVE_IMMORTAL else "CPy_INCREF(cpy_r_r0);") + for x in -1123355, -6, 257, 123235345: + b = LoadLiteral(x, object_rprimitive) + self.assert_emit([b, IncRef(b)], "CPy_INCREF(cpy_r_r0);") + + def test_c_string(self) -> None: + s = Register(cstring_rprimitive, "s") + self.assert_emit(Assign(s, CString(b"foo")), """cpy_r_s = "foo";""") + self.assert_emit(Assign(s, CString(b'foo "o')), r"""cpy_r_s = "foo \"o";""") + self.assert_emit(Assign(s, CString(b"\x00")), r"""cpy_r_s = "\x00";""") + self.assert_emit(Assign(s, CString(b"\\")), r"""cpy_r_s = "\\";""") + for i in range(256): + b = bytes([i]) + if b == b"\n": + target = "\\n" + elif b == b"\r": + target = "\\r" + elif b == b"\t": + target = "\\t" + elif b == b'"': + target = '\\"' + elif b == b"\\": + target = "\\\\" + elif i < 32 or i >= 127: + target = "\\x%.2x" % i + else: + target = b.decode("ascii") + self.assert_emit(Assign(s, CString(b)), f'cpy_r_s = "{target}";') + + def assert_emit( + self, + op: Op | list[Op], + expected: str, + next_block: BasicBlock | None = None, + *, + rare: bool = False, + next_branch: Branch | None = None, + skip_next: bool = False, + ) -> None: + block = BasicBlock(0) + if isinstance(op, Op): + block.ops.append(op) + else: + block.ops.extend(op) + op = op[-1] + value_names = generate_names_for_ir(self.registers, [block]) + emitter = Emitter(self.context, value_names) + declarations = Emitter(self.context, value_names) + emitter.fragments = [] + declarations.fragments = [] + + visitor = FunctionEmitterVisitor(emitter, declarations, "prog.py", "prog") + visitor.next_block = next_block + visitor.rare = rare + if next_branch: + visitor.ops = [op, next_branch] + else: + visitor.ops = [op] + visitor.op_index = 0 + + op.accept(visitor) + frags = declarations.fragments + emitter.fragments + actual_lines = [line.strip(" ") for line in frags] + assert all(line.endswith("\n") for line in actual_lines) + actual_lines = [line.rstrip("\n") for line in actual_lines] + if not expected.strip(): + expected_lines = [] + else: + expected_lines = expected.rstrip().split("\n") + expected_lines = [line.strip(" ") for line in expected_lines] + assert_string_arrays_equal( + expected_lines, actual_lines, msg="Generated code unexpected", traceback=True + ) + if skip_next: + assert visitor.op_index == 1 + else: + assert visitor.op_index == 0 + + def assert_emit_binary_op( + self, op: str, dest: Value, left: Value, right: Value, expected: str + ) -> None: + if op in binary_ops: + ops = binary_ops[op] + for desc in ops: + if is_subtype(left.type, desc.arg_types[0]) and is_subtype( + right.type, desc.arg_types[1] + ): + args = [left, right] + if desc.ordering is not None: + args = [args[i] for i in desc.ordering] + # This only supports primitives that map to C calls + assert desc.c_function_name is not None + self.assert_emit( + CallC( + desc.c_function_name, + args, + desc.return_type, + desc.steals, + desc.is_borrowed, + desc.error_kind, + 55, + ), + expected, + ) + return + else: + assert False, "Could not find matching op" + + +class TestGenerateFunction(unittest.TestCase): + def setUp(self) -> None: + self.arg = RuntimeArg("arg", int_rprimitive) + self.reg = Register(int_rprimitive, "arg") + self.block = BasicBlock(0) + + def test_simple(self) -> None: + self.block.ops.append(Return(self.reg)) + fn = FuncIR( + FuncDecl("myfunc", None, "mod", FuncSignature([self.arg], int_rprimitive)), + [self.reg], + [self.block], + ) + value_names = generate_names_for_ir(fn.arg_regs, fn.blocks) + emitter = Emitter(EmitterContext(NameGenerator([["mod"]])), value_names) + generate_native_function(fn, emitter, "prog.py", "prog") + result = emitter.fragments + assert_string_arrays_equal( + ["CPyTagged CPyDef_myfunc(CPyTagged cpy_r_arg) {\n", " return cpy_r_arg;\n", "}\n"], + result, + msg="Generated code invalid", + ) + + def test_register(self) -> None: + reg = Register(int_rprimitive) + op = Assign(reg, Integer(5)) + self.block.ops.append(op) + self.block.ops.append(Unreachable()) + fn = FuncIR( + FuncDecl("myfunc", None, "mod", FuncSignature([self.arg], list_rprimitive)), + [self.reg], + [self.block], + ) + value_names = generate_names_for_ir(fn.arg_regs, fn.blocks) + emitter = Emitter(EmitterContext(NameGenerator([["mod"]])), value_names) + generate_native_function(fn, emitter, "prog.py", "prog") + result = emitter.fragments + assert_string_arrays_equal( + [ + "PyObject *CPyDef_myfunc(CPyTagged cpy_r_arg) {\n", + " CPyTagged cpy_r_r0;\n", + " cpy_r_r0 = 10;\n", + " CPy_Unreachable();\n", + "}\n", + ], + result, + msg="Generated code invalid", + ) diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_emitwrapper.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_emitwrapper.py new file mode 100644 index 0000000..c446565 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_emitwrapper.py @@ -0,0 +1,60 @@ +from __future__ import annotations + +import unittest + +from mypy.test.helpers import assert_string_arrays_equal +from mypyc.codegen.emit import Emitter, EmitterContext, ReturnHandler +from mypyc.codegen.emitwrapper import generate_arg_check +from mypyc.ir.rtypes import int_rprimitive, list_rprimitive +from mypyc.namegen import NameGenerator + + +class TestArgCheck(unittest.TestCase): + def setUp(self) -> None: + self.context = EmitterContext(NameGenerator([["mod"]])) + + def test_check_list(self) -> None: + emitter = Emitter(self.context) + generate_arg_check("x", list_rprimitive, emitter, ReturnHandler("NULL")) + lines = emitter.fragments + self.assert_lines( + [ + "PyObject *arg_x;", + "if (likely(PyList_Check(obj_x)))", + " arg_x = obj_x;", + "else {", + ' CPy_TypeError("list", obj_x);', + " return NULL;", + "}", + ], + lines, + ) + + def test_check_int(self) -> None: + emitter = Emitter(self.context) + generate_arg_check("x", int_rprimitive, emitter, ReturnHandler("NULL")) + generate_arg_check("y", int_rprimitive, emitter, ReturnHandler("NULL"), optional=True) + lines = emitter.fragments + self.assert_lines( + [ + "CPyTagged arg_x;", + "if (likely(PyLong_Check(obj_x)))", + " arg_x = CPyTagged_BorrowFromObject(obj_x);", + "else {", + ' CPy_TypeError("int", obj_x); return NULL;', + "}", + "CPyTagged arg_y;", + "if (obj_y == NULL) {", + " arg_y = CPY_INT_TAG;", + "} else if (likely(PyLong_Check(obj_y)))", + " arg_y = CPyTagged_BorrowFromObject(obj_y);", + "else {", + ' CPy_TypeError("int", obj_y); return NULL;', + "}", + ], + lines, + ) + + def assert_lines(self, expected: list[str], actual: list[str]) -> None: + actual = [line.rstrip("\n") for line in actual] + assert_string_arrays_equal(expected, actual, "Invalid output") diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_exceptions.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_exceptions.py new file mode 100644 index 0000000..71587e6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_exceptions.py @@ -0,0 +1,56 @@ +"""Test runner for exception handling transform test cases. + +The transform inserts exception handling branch operations to IR. +""" + +from __future__ import annotations + +import os.path + +from mypy.errors import CompileError +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase +from mypyc.analysis.blockfreq import frequently_executed_blocks +from mypyc.common import TOP_LEVEL_NAME +from mypyc.ir.pprint import format_func +from mypyc.test.testutil import ( + ICODE_GEN_BUILTINS, + MypycDataSuite, + assert_test_output, + build_ir_for_single_file, + remove_comment_lines, + use_custom_builtins, +) +from mypyc.transform.exceptions import insert_exception_handling +from mypyc.transform.refcount import insert_ref_count_opcodes +from mypyc.transform.uninit import insert_uninit_checks + +files = ["exceptions.test", "exceptions-freq.test"] + + +class TestExceptionTransform(MypycDataSuite): + files = files + base_path = test_temp_dir + + def run_case(self, testcase: DataDrivenTestCase) -> None: + """Perform a runtime checking transformation test case.""" + with use_custom_builtins(os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase): + expected_output = remove_comment_lines(testcase.output) + try: + ir = build_ir_for_single_file(testcase.input) + except CompileError as e: + actual = e.messages + else: + actual = [] + for fn in ir: + if fn.name == TOP_LEVEL_NAME and not testcase.name.endswith("_toplevel"): + continue + insert_uninit_checks(fn) + insert_exception_handling(fn) + insert_ref_count_opcodes(fn) + actual.extend(format_func(fn)) + if testcase.name.endswith("_freq"): + common = frequently_executed_blocks(fn.blocks[0]) + actual.append("hot blocks: %s" % sorted(b.label for b in common)) + + assert_test_output(testcase, actual, "Invalid source code output", expected_output) diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_external.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_external.py new file mode 100644 index 0000000..a416cf2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_external.py @@ -0,0 +1,52 @@ +"""Test cases that run tests as subprocesses.""" + +from __future__ import annotations + +import os +import subprocess +import sys +import tempfile +import unittest + +base_dir = os.path.join(os.path.dirname(__file__), "..", "..") + + +class TestExternal(unittest.TestCase): + # TODO: Get this to work on Windows. + # (Or don't. It is probably not a good use of time.) + @unittest.skipIf(sys.platform.startswith("win"), "rt tests don't work on windows") + def test_c_unit_test(self) -> None: + """Run C unit tests in a subprocess.""" + cppflags: list[str] = [] + env = os.environ.copy() + if sys.platform == "darwin": + cppflags += ["-O0", "-mmacosx-version-min=10.10", "-stdlib=libc++"] + elif sys.platform == "linux": + cppflags += ["-O0"] + env["CPPFLAGS"] = " ".join(cppflags) + # Build Python wrapper for C unit tests. + + with tempfile.TemporaryDirectory() as tmpdir: + status = subprocess.check_call( + [ + sys.executable, + "setup.py", + "build_ext", + f"--build-lib={tmpdir}", + f"--build-temp={tmpdir}", + "--run-capi-tests", + ], + env=env, + cwd=os.path.join(base_dir, "mypyc", "lib-rt"), + ) + # Run C unit tests. + env = os.environ.copy() + if "GTEST_COLOR" not in os.environ: + env["GTEST_COLOR"] = "yes" # Use fancy colors + status = subprocess.call( + [sys.executable, "-c", "import sys, test_capi; sys.exit(test_capi.run_tests())"], + env=env, + cwd=tmpdir, + ) + if status != 0: + raise AssertionError("make test: C unit test failure") diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_irbuild.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_irbuild.py new file mode 100644 index 0000000..7c24864 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_irbuild.py @@ -0,0 +1,93 @@ +"""Test cases for IR generation.""" + +from __future__ import annotations + +import os.path +import sys + +from mypy.errors import CompileError +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase +from mypyc.common import IS_FREE_THREADED, TOP_LEVEL_NAME +from mypyc.ir.pprint import format_func +from mypyc.test.testutil import ( + ICODE_GEN_BUILTINS, + MypycDataSuite, + assert_test_output, + build_ir_for_single_file, + infer_ir_build_options_from_test_name, + remove_comment_lines, + replace_word_size, + use_custom_builtins, +) + +files = [ + "irbuild-basic.test", + "irbuild-int.test", + "irbuild-bool.test", + "irbuild-lists.test", + "irbuild-tuple.test", + "irbuild-dict.test", + "irbuild-set.test", + "irbuild-str.test", + "irbuild-bytes.test", + "irbuild-float.test", + "irbuild-frozenset.test", + "irbuild-statements.test", + "irbuild-nested.test", + "irbuild-classes.test", + "irbuild-optional.test", + "irbuild-any.test", + "irbuild-generics.test", + "irbuild-try.test", + "irbuild-strip-asserts.test", + "irbuild-i64.test", + "irbuild-i32.test", + "irbuild-i16.test", + "irbuild-u8.test", + "irbuild-vectorcall.test", + "irbuild-unreachable.test", + "irbuild-isinstance.test", + "irbuild-dunders.test", + "irbuild-singledispatch.test", + "irbuild-constant-fold.test", + "irbuild-glue-methods.test", + "irbuild-math.test", + "irbuild-weakref.test", + "irbuild-base64.test", +] + +if sys.version_info >= (3, 10): + files.append("irbuild-match.test") + + +class TestGenOps(MypycDataSuite): + files = files + base_path = test_temp_dir + optional_out = True + + def run_case(self, testcase: DataDrivenTestCase) -> None: + """Perform a runtime checking transformation test case.""" + options = infer_ir_build_options_from_test_name(testcase.name) + if options is None: + # Skipped test case + return + if "_withgil" in testcase.name and IS_FREE_THREADED: + # Test case should only run on a non-free-threaded build. + return + with use_custom_builtins(os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase): + expected_output = remove_comment_lines(testcase.output) + expected_output = replace_word_size(expected_output) + name = testcase.name + try: + ir = build_ir_for_single_file(testcase.input, options) + except CompileError as e: + actual = e.messages + else: + actual = [] + for fn in ir: + if fn.name == TOP_LEVEL_NAME and not name.endswith("_toplevel"): + continue + actual.extend(format_func(fn)) + + assert_test_output(testcase, actual, "Invalid source code output", expected_output) diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_ircheck.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_ircheck.py new file mode 100644 index 0000000..7f7063c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_ircheck.py @@ -0,0 +1,199 @@ +from __future__ import annotations + +import unittest + +from mypyc.analysis.ircheck import FnError, can_coerce_to, check_func_ir +from mypyc.ir.class_ir import ClassIR +from mypyc.ir.func_ir import FuncDecl, FuncIR, FuncSignature +from mypyc.ir.ops import ( + Assign, + BasicBlock, + Goto, + Integer, + LoadAddress, + LoadLiteral, + Op, + Register, + Return, +) +from mypyc.ir.pprint import format_func +from mypyc.ir.rtypes import ( + RInstance, + RType, + RUnion, + bytes_rprimitive, + int32_rprimitive, + int64_rprimitive, + none_rprimitive, + object_rprimitive, + pointer_rprimitive, + str_rprimitive, +) + + +def assert_has_error(fn: FuncIR, error: FnError) -> None: + errors = check_func_ir(fn) + assert errors == [error] + + +def assert_no_errors(fn: FuncIR) -> None: + assert not check_func_ir(fn) + + +NONE_VALUE = Integer(0, rtype=none_rprimitive) + + +class TestIrcheck(unittest.TestCase): + def setUp(self) -> None: + self.label = 0 + + def basic_block(self, ops: list[Op]) -> BasicBlock: + self.label += 1 + block = BasicBlock(self.label) + block.ops = ops + return block + + def func_decl(self, name: str, ret_type: RType | None = None) -> FuncDecl: + if ret_type is None: + ret_type = none_rprimitive + return FuncDecl( + name=name, + class_name=None, + module_name="module", + sig=FuncSignature(args=[], ret_type=ret_type), + ) + + def test_valid_fn(self) -> None: + assert_no_errors( + FuncIR( + decl=self.func_decl(name="func_1"), + arg_regs=[], + blocks=[self.basic_block(ops=[Return(value=NONE_VALUE)])], + ) + ) + + def test_block_not_terminated_empty_block(self) -> None: + block = self.basic_block([]) + fn = FuncIR(decl=self.func_decl(name="func_1"), arg_regs=[], blocks=[block]) + assert_has_error(fn, FnError(source=block, desc="Block not terminated")) + + def test_valid_goto(self) -> None: + block_1 = self.basic_block([Return(value=NONE_VALUE)]) + block_2 = self.basic_block([Goto(label=block_1)]) + fn = FuncIR(decl=self.func_decl(name="func_1"), arg_regs=[], blocks=[block_1, block_2]) + assert_no_errors(fn) + + def test_invalid_goto(self) -> None: + block_1 = self.basic_block([Return(value=NONE_VALUE)]) + goto = Goto(label=block_1) + block_2 = self.basic_block([goto]) + fn = FuncIR( + decl=self.func_decl(name="func_1"), + arg_regs=[], + # block_1 omitted + blocks=[block_2], + ) + assert_has_error(fn, FnError(source=goto, desc="Invalid control operation target: 1")) + + def test_invalid_register_source(self) -> None: + ret = Return(value=Register(type=none_rprimitive, name="r1")) + block = self.basic_block([ret]) + fn = FuncIR(decl=self.func_decl(name="func_1"), arg_regs=[], blocks=[block]) + assert_has_error(fn, FnError(source=ret, desc="Invalid op reference to register 'r1'")) + + def test_invalid_op_source(self) -> None: + ret = Return(value=LoadLiteral(value="foo", rtype=str_rprimitive)) + block = self.basic_block([ret]) + fn = FuncIR(decl=self.func_decl(name="func_1"), arg_regs=[], blocks=[block]) + assert_has_error( + fn, FnError(source=ret, desc="Invalid op reference to op of type LoadLiteral") + ) + + def test_invalid_return_type(self) -> None: + ret = Return(value=Integer(value=5, rtype=int32_rprimitive)) + fn = FuncIR( + decl=self.func_decl(name="func_1", ret_type=int64_rprimitive), + arg_regs=[], + blocks=[self.basic_block([ret])], + ) + assert_has_error( + fn, FnError(source=ret, desc="Cannot coerce source type i32 to dest type i64") + ) + + def test_invalid_assign(self) -> None: + arg_reg = Register(type=int64_rprimitive, name="r1") + assign = Assign(dest=arg_reg, src=Integer(value=5, rtype=int32_rprimitive)) + ret = Return(value=NONE_VALUE) + fn = FuncIR( + decl=self.func_decl(name="func_1"), + arg_regs=[arg_reg], + blocks=[self.basic_block([assign, ret])], + ) + assert_has_error( + fn, FnError(source=assign, desc="Cannot coerce source type i32 to dest type i64") + ) + + def test_can_coerce_to(self) -> None: + cls = ClassIR(name="Cls", module_name="cls") + valid_cases = [ + (int64_rprimitive, int64_rprimitive), + (str_rprimitive, str_rprimitive), + (str_rprimitive, object_rprimitive), + (object_rprimitive, str_rprimitive), + (RUnion([bytes_rprimitive, str_rprimitive]), str_rprimitive), + (str_rprimitive, RUnion([bytes_rprimitive, str_rprimitive])), + (RInstance(cls), object_rprimitive), + ] + + invalid_cases = [ + (int64_rprimitive, int32_rprimitive), + (RInstance(cls), str_rprimitive), + (str_rprimitive, bytes_rprimitive), + ] + + for src, dest in valid_cases: + assert can_coerce_to(src, dest) + for src, dest in invalid_cases: + assert not can_coerce_to(src, dest) + + def test_duplicate_op(self) -> None: + arg_reg = Register(type=int32_rprimitive, name="r1") + assign = Assign(dest=arg_reg, src=Integer(value=5, rtype=int32_rprimitive)) + block = self.basic_block([assign, assign, Return(value=NONE_VALUE)]) + fn = FuncIR(decl=self.func_decl(name="func_1"), arg_regs=[], blocks=[block]) + assert_has_error(fn, FnError(source=assign, desc="Func has a duplicate op")) + + def test_pprint(self) -> None: + block_1 = self.basic_block([Return(value=NONE_VALUE)]) + goto = Goto(label=block_1) + block_2 = self.basic_block([goto]) + fn = FuncIR( + decl=self.func_decl(name="func_1"), + arg_regs=[], + # block_1 omitted + blocks=[block_2], + ) + errors = [(goto, "Invalid control operation target: 1")] + formatted = format_func(fn, errors) + assert formatted == [ + "def func_1():", + "L0:", + " goto L1", + " ERR: Invalid control operation target: 1", + ] + + def test_load_address_declares_register(self) -> None: + rx = Register(str_rprimitive, "x") + ry = Register(pointer_rprimitive, "y") + load_addr = LoadAddress(pointer_rprimitive, rx) + assert_no_errors( + FuncIR( + decl=self.func_decl(name="func_1"), + arg_regs=[], + blocks=[ + self.basic_block( + ops=[load_addr, Assign(ry, load_addr), Return(value=NONE_VALUE)] + ) + ], + ) + ) diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_literals.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_literals.py new file mode 100644 index 0000000..a8c17d1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_literals.py @@ -0,0 +1,90 @@ +"""Test code geneneration for literals.""" + +from __future__ import annotations + +import unittest + +from mypyc.codegen.literals import ( + Literals, + _encode_bytes_values, + _encode_int_values, + _encode_str_values, + format_str_literal, +) + + +class TestLiterals(unittest.TestCase): + def test_format_str_literal(self) -> None: + assert format_str_literal("") == b"\x00" + assert format_str_literal("xyz") == b"\x03xyz" + assert format_str_literal("x" * 127) == b"\x7f" + b"x" * 127 + assert format_str_literal("x" * 128) == b"\x81\x00" + b"x" * 128 + assert format_str_literal("x" * 131) == b"\x81\x03" + b"x" * 131 + + def test_encode_str_values(self) -> None: + assert _encode_str_values({}) == [b""] + assert _encode_str_values({"foo": 0}) == [b"\x01\x03foo", b""] + assert _encode_str_values({"foo": 0, "b": 1}) == [b"\x02\x03foo\x01b", b""] + assert _encode_str_values({"foo": 0, "x" * 70: 1}) == [ + b"\x01\x03foo", + bytes([1, 70]) + b"x" * 70, + b"", + ] + assert _encode_str_values({"y" * 100: 0}) == [bytes([1, 100]) + b"y" * 100, b""] + + def test_encode_bytes_values(self) -> None: + assert _encode_bytes_values({}) == [b""] + assert _encode_bytes_values({b"foo": 0}) == [b"\x01\x03foo", b""] + assert _encode_bytes_values({b"foo": 0, b"b": 1}) == [b"\x02\x03foo\x01b", b""] + assert _encode_bytes_values({b"foo": 0, b"x" * 70: 1}) == [ + b"\x01\x03foo", + bytes([1, 70]) + b"x" * 70, + b"", + ] + assert _encode_bytes_values({b"y" * 100: 0}) == [bytes([1, 100]) + b"y" * 100, b""] + + def test_encode_int_values(self) -> None: + assert _encode_int_values({}) == [b""] + assert _encode_int_values({123: 0}) == [b"\x01123", b""] + assert _encode_int_values({123: 0, 9: 1}) == [b"\x02123\x009", b""] + assert _encode_int_values({123: 0, 45: 1, 5 * 10**70: 2}) == [ + b"\x02123\x0045", + b"\x015" + b"0" * 70, + b"", + ] + assert _encode_int_values({6 * 10**100: 0}) == [b"\x016" + b"0" * 100, b""] + + def test_simple_literal_index(self) -> None: + lit = Literals() + lit.record_literal(1) + lit.record_literal("y") + lit.record_literal(True) + lit.record_literal(None) + lit.record_literal(False) + assert lit.literal_index(None) == 0 + assert lit.literal_index(False) == 1 + assert lit.literal_index(True) == 2 + assert lit.literal_index("y") == 3 + assert lit.literal_index(1) == 4 + + def test_tuple_literal(self) -> None: + lit = Literals() + lit.record_literal((1, "y", None, (b"a", "b"))) + lit.record_literal((b"a", "b")) + lit.record_literal(()) + assert lit.literal_index((b"a", "b")) == 7 + assert lit.literal_index((1, "y", None, (b"a", "b"))) == 8 + assert lit.literal_index(()) == 9 + print(lit.encoded_tuple_values()) + assert lit.encoded_tuple_values() == [ + "3", # Number of tuples + "2", + "5", + "4", # First tuple (length=2) + "4", + "6", + "3", + "0", + "7", # Second tuple (length=4) + "0", # Third tuple (length=0) + ] diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_lowering.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_lowering.py new file mode 100644 index 0000000..86745b6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_lowering.py @@ -0,0 +1,61 @@ +"""Runner for lowering transform tests.""" + +from __future__ import annotations + +import os.path + +from mypy.errors import CompileError +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase +from mypyc.common import TOP_LEVEL_NAME +from mypyc.ir.pprint import format_func +from mypyc.options import CompilerOptions +from mypyc.test.testutil import ( + ICODE_GEN_BUILTINS, + MypycDataSuite, + assert_test_output, + build_ir_for_single_file, + infer_ir_build_options_from_test_name, + remove_comment_lines, + replace_word_size, + use_custom_builtins, +) +from mypyc.transform.exceptions import insert_exception_handling +from mypyc.transform.flag_elimination import do_flag_elimination +from mypyc.transform.lower import lower_ir +from mypyc.transform.refcount import insert_ref_count_opcodes +from mypyc.transform.uninit import insert_uninit_checks + + +class TestLowering(MypycDataSuite): + files = ["lowering-int.test", "lowering-list.test"] + base_path = test_temp_dir + + def run_case(self, testcase: DataDrivenTestCase) -> None: + options = infer_ir_build_options_from_test_name(testcase.name) + if options is None: + # Skipped test case + return + with use_custom_builtins(os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase): + expected_output = remove_comment_lines(testcase.output) + expected_output = replace_word_size(expected_output) + try: + ir = build_ir_for_single_file(testcase.input, options) + except CompileError as e: + actual = e.messages + else: + actual = [] + for fn in ir: + if fn.name == TOP_LEVEL_NAME and not testcase.name.endswith("_toplevel"): + continue + options = CompilerOptions() + # Lowering happens after exception handling and ref count opcodes have + # been added. Any changes must maintain reference counting semantics. + insert_uninit_checks(fn) + insert_exception_handling(fn) + insert_ref_count_opcodes(fn) + lower_ir(fn, options) + do_flag_elimination(fn, options) + actual.extend(format_func(fn)) + + assert_test_output(testcase, actual, "Invalid source code output", expected_output) diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_misc.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_misc.py new file mode 100644 index 0000000..f92da2c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_misc.py @@ -0,0 +1,20 @@ +from __future__ import annotations + +import unittest + +from mypyc.ir.ops import BasicBlock +from mypyc.ir.pprint import format_blocks, generate_names_for_ir +from mypyc.irbuild.ll_builder import LowLevelIRBuilder +from mypyc.options import CompilerOptions + + +class TestMisc(unittest.TestCase): + def test_debug_op(self) -> None: + block = BasicBlock() + builder = LowLevelIRBuilder(errors=None, options=CompilerOptions()) + builder.activate_block(block) + builder.debug_print("foo") + + names = generate_names_for_ir([], [block]) + code = format_blocks([block], names, {}) + assert code[:-1] == ["L0:", " r0 = 'foo'", " CPyDebug_PrintObject(r0)"] diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_namegen.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_namegen.py new file mode 100644 index 0000000..a468874 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_namegen.py @@ -0,0 +1,68 @@ +from __future__ import annotations + +import unittest + +from mypyc.namegen import ( + NameGenerator, + candidate_suffixes, + exported_name, + make_module_translation_map, +) + + +class TestNameGen(unittest.TestCase): + def test_candidate_suffixes(self) -> None: + assert candidate_suffixes("foo") == ["", "foo."] + assert candidate_suffixes("foo.bar") == ["", "bar.", "foo.bar."] + + def test_exported_name(self) -> None: + assert exported_name("foo") == "foo" + assert exported_name("foo.bar") == "foo___bar" + + def test_make_module_translation_map(self) -> None: + assert make_module_translation_map(["foo", "bar"]) == {"foo": "foo.", "bar": "bar."} + assert make_module_translation_map(["foo.bar", "foo.baz"]) == { + "foo.bar": "bar.", + "foo.baz": "baz.", + } + assert make_module_translation_map(["zar", "foo.bar", "foo.baz"]) == { + "foo.bar": "bar.", + "foo.baz": "baz.", + "zar": "zar.", + } + assert make_module_translation_map(["foo.bar", "fu.bar", "foo.baz"]) == { + "foo.bar": "foo.bar.", + "fu.bar": "fu.bar.", + "foo.baz": "baz.", + } + assert make_module_translation_map(["foo", "foo.foo", "bar.foo", "bar.foo.bar.foo"]) == { + "foo": "foo.", + "foo.foo": "foo.foo.", + "bar.foo": "bar.foo.", + "bar.foo.bar.foo": "foo.bar.foo.", + } + + def test_name_generator(self) -> None: + g = NameGenerator([["foo", "foo.zar"]]) + assert g.private_name("foo", "f") == "foo___f" + assert g.private_name("foo", "C.x.y") == "foo___C___x___y" + assert g.private_name("foo", "C.x.y") == "foo___C___x___y" + assert g.private_name("foo.zar", "C.x.y") == "zar___C___x___y" + assert g.private_name("foo", "C.x_y") == "foo___C___x_y" + assert g.private_name("foo", "C_x_y") == "foo___C_x_y" + assert g.private_name("foo", "C_x_y") == "foo___C_x_y" + assert g.private_name("foo", "___") == "foo______3_" + + g = NameGenerator([["foo.zar"]]) + assert g.private_name("foo.zar", "f") == "f" + + def test_name_generator_with_separate(self) -> None: + g = NameGenerator([["foo", "foo.zar"]], separate=True) + assert g.private_name("foo", "f") == "foo___f" + assert g.private_name("foo", "C.x.y") == "foo___C___x___y" + assert g.private_name("foo.zar", "C.x.y") == "foo___zar___C___x___y" + assert g.private_name("foo", "C.x_y") == "foo___C___x_y" + assert g.private_name("foo", "___") == "foo______3_" + + g = NameGenerator([["foo.zar"]], separate=True) + assert g.private_name("foo.zar", "f") == "foo___zar___f" diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_optimizations.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_optimizations.py new file mode 100644 index 0000000..3f1f46a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_optimizations.py @@ -0,0 +1,68 @@ +"""Runner for IR optimization tests.""" + +from __future__ import annotations + +import os.path + +from mypy.errors import CompileError +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase +from mypyc.common import TOP_LEVEL_NAME +from mypyc.ir.func_ir import FuncIR +from mypyc.ir.pprint import format_func +from mypyc.options import CompilerOptions +from mypyc.test.testutil import ( + ICODE_GEN_BUILTINS, + MypycDataSuite, + assert_test_output, + build_ir_for_single_file, + remove_comment_lines, + use_custom_builtins, +) +from mypyc.transform.copy_propagation import do_copy_propagation +from mypyc.transform.flag_elimination import do_flag_elimination +from mypyc.transform.uninit import insert_uninit_checks + + +class OptimizationSuite(MypycDataSuite): + """Base class for IR optimization test suites. + + To use this, add a base class and define "files" and "do_optimizations". + """ + + base_path = test_temp_dir + + def run_case(self, testcase: DataDrivenTestCase) -> None: + with use_custom_builtins(os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase): + expected_output = remove_comment_lines(testcase.output) + try: + ir = build_ir_for_single_file(testcase.input) + except CompileError as e: + actual = e.messages + else: + actual = [] + for fn in ir: + if fn.name == TOP_LEVEL_NAME and not testcase.name.endswith("_toplevel"): + continue + insert_uninit_checks(fn) + self.do_optimizations(fn) + actual.extend(format_func(fn)) + + assert_test_output(testcase, actual, "Invalid source code output", expected_output) + + def do_optimizations(self, fn: FuncIR) -> None: + raise NotImplementedError + + +class TestCopyPropagation(OptimizationSuite): + files = ["opt-copy-propagation.test"] + + def do_optimizations(self, fn: FuncIR) -> None: + do_copy_propagation(fn, CompilerOptions()) + + +class TestFlagElimination(OptimizationSuite): + files = ["opt-flag-elimination.test"] + + def do_optimizations(self, fn: FuncIR) -> None: + do_flag_elimination(fn, CompilerOptions()) diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_pprint.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_pprint.py new file mode 100644 index 0000000..d9e2bdb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_pprint.py @@ -0,0 +1,42 @@ +from __future__ import annotations + +import unittest + +from mypyc.ir.ops import Assign, BasicBlock, Integer, IntOp, Op, Register, Unreachable +from mypyc.ir.pprint import generate_names_for_ir +from mypyc.ir.rtypes import int_rprimitive + + +def register(name: str) -> Register: + return Register(int_rprimitive, "foo", is_arg=True) + + +def make_block(ops: list[Op]) -> BasicBlock: + block = BasicBlock() + block.ops.extend(ops) + return block + + +class TestGenerateNames(unittest.TestCase): + def test_empty(self) -> None: + assert generate_names_for_ir([], []) == {} + + def test_arg(self) -> None: + reg = register("foo") + assert generate_names_for_ir([reg], []) == {reg: "foo"} + + def test_int_op(self) -> None: + n1 = Integer(2) + n2 = Integer(4) + op1 = IntOp(int_rprimitive, n1, n2, IntOp.ADD) + op2 = IntOp(int_rprimitive, op1, n2, IntOp.ADD) + block = make_block([op1, op2, Unreachable()]) + assert generate_names_for_ir([], [block]) == {op1: "r0", op2: "r1"} + + def test_assign(self) -> None: + reg = register("foo") + n = Integer(2) + op1 = Assign(reg, n) + op2 = Assign(reg, n) + block = make_block([op1, op2]) + assert generate_names_for_ir([reg], [block]) == {reg: "foo"} diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_rarray.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_rarray.py new file mode 100644 index 0000000..b8d788b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_rarray.py @@ -0,0 +1,48 @@ +"""Unit tests for RArray types.""" + +from __future__ import annotations + +import unittest + +from mypyc.common import PLATFORM_SIZE +from mypyc.ir.rtypes import ( + RArray, + bool_rprimitive, + compute_rtype_alignment, + compute_rtype_size, + int_rprimitive, +) + + +class TestRArray(unittest.TestCase): + def test_basics(self) -> None: + a = RArray(int_rprimitive, 10) + assert a.item_type == int_rprimitive + assert a.length == 10 + + def test_str_conversion(self) -> None: + a = RArray(int_rprimitive, 10) + assert str(a) == "int[10]" + assert repr(a) == "[10]>" + + def test_eq(self) -> None: + a = RArray(int_rprimitive, 10) + assert a == RArray(int_rprimitive, 10) + assert a != RArray(bool_rprimitive, 10) + assert a != RArray(int_rprimitive, 9) + + def test_hash(self) -> None: + assert hash(RArray(int_rprimitive, 10)) == hash(RArray(int_rprimitive, 10)) + assert hash(RArray(bool_rprimitive, 5)) == hash(RArray(bool_rprimitive, 5)) + + def test_alignment(self) -> None: + a = RArray(int_rprimitive, 10) + assert compute_rtype_alignment(a) == PLATFORM_SIZE + b = RArray(bool_rprimitive, 55) + assert compute_rtype_alignment(b) == 1 + + def test_size(self) -> None: + a = RArray(int_rprimitive, 9) + assert compute_rtype_size(a) == 9 * PLATFORM_SIZE + b = RArray(bool_rprimitive, 3) + assert compute_rtype_size(b) == 3 diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_refcount.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_refcount.py new file mode 100644 index 0000000..afeda89 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_refcount.py @@ -0,0 +1,59 @@ +"""Test runner for reference count opcode insertion transform test cases. + +The transform inserts needed reference count increment/decrement +operations to IR. +""" + +from __future__ import annotations + +import os.path + +from mypy.errors import CompileError +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase +from mypyc.common import TOP_LEVEL_NAME +from mypyc.ir.pprint import format_func +from mypyc.test.testutil import ( + ICODE_GEN_BUILTINS, + MypycDataSuite, + assert_test_output, + build_ir_for_single_file, + infer_ir_build_options_from_test_name, + remove_comment_lines, + replace_word_size, + use_custom_builtins, +) +from mypyc.transform.refcount import insert_ref_count_opcodes +from mypyc.transform.uninit import insert_uninit_checks + +files = ["refcount.test"] + + +class TestRefCountTransform(MypycDataSuite): + files = files + base_path = test_temp_dir + optional_out = True + + def run_case(self, testcase: DataDrivenTestCase) -> None: + """Perform a runtime checking transformation test case.""" + options = infer_ir_build_options_from_test_name(testcase.name) + if options is None: + # Skipped test case + return + with use_custom_builtins(os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase): + expected_output = remove_comment_lines(testcase.output) + expected_output = replace_word_size(expected_output) + try: + ir = build_ir_for_single_file(testcase.input, options) + except CompileError as e: + actual = e.messages + else: + actual = [] + for fn in ir: + if fn.name == TOP_LEVEL_NAME and not testcase.name.endswith("_toplevel"): + continue + insert_uninit_checks(fn) + insert_ref_count_opcodes(fn) + actual.extend(format_func(fn)) + + assert_test_output(testcase, actual, "Invalid source code output", expected_output) diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_run.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_run.py new file mode 100644 index 0000000..6b63a4d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_run.py @@ -0,0 +1,499 @@ +"""Test cases for building an C extension and running it.""" + +from __future__ import annotations + +import ast +import contextlib +import glob +import os.path +import re +import shutil +import subprocess +import sys +import time +from collections.abc import Iterator +from typing import Any + +from mypy import build +from mypy.errors import CompileError +from mypy.options import Options +from mypy.test.config import mypyc_output_dir, test_temp_dir +from mypy.test.data import DataDrivenTestCase +from mypy.test.helpers import assert_module_equivalence, perform_file_operations +from mypyc.build import construct_groups +from mypyc.codegen import emitmodule +from mypyc.errors import Errors +from mypyc.options import CompilerOptions +from mypyc.test.config import test_data_prefix +from mypyc.test.test_serialization import check_serialization_roundtrip +from mypyc.test.testutil import ( + ICODE_GEN_BUILTINS, + TESTUTIL_PATH, + MypycDataSuite, + assert_test_output, + fudge_dir_mtimes, + show_c, + use_custom_builtins, +) + +files = [ + "run-async.test", + "run-misc.test", + "run-functions.test", + "run-integers.test", + "run-i64.test", + "run-i32.test", + "run-i16.test", + "run-u8.test", + "run-floats.test", + "run-math.test", + "run-bools.test", + "run-strings.test", + "run-bytes.test", + "run-tuples.test", + "run-lists.test", + "run-dicts.test", + "run-sets.test", + "run-primitives.test", + "run-loops.test", + "run-exceptions.test", + "run-imports.test", + "run-classes.test", + "run-traits.test", + "run-generators.test", + "run-generics.test", + "run-multimodule.test", + "run-bench.test", + "run-mypy-sim.test", + "run-dunders.test", + "run-dunders-special.test", + "run-singledispatch.test", + "run-attrs.test", + "run-signatures.test", + "run-weakref.test", + "run-python37.test", + "run-python38.test", + "run-base64.test", +] + +if sys.version_info >= (3, 10): + files.append("run-match.test") +if sys.version_info >= (3, 12): + files.append("run-python312.test") + +setup_format = """\ +from setuptools import setup +from mypyc.build import mypycify + +setup(name='test_run_output', + ext_modules=mypycify({}, separate={}, skip_cgen_input={!r}, strip_asserts=False, + multi_file={}, opt_level='{}', install_librt={}, + experimental_features={}), +) +""" + +WORKDIR = "build" + + +def run_setup(script_name: str, script_args: list[str]) -> bool: + """Run a setup script in a somewhat controlled environment. + + This is adapted from code in distutils and our goal here is that is + faster to not need to spin up a python interpreter to run it. + + We had to fork it because the real run_setup swallows errors + and KeyboardInterrupt with no way to recover them (!). + The real version has some extra features that we removed since + we weren't using them. + + Returns whether the setup succeeded. + """ + save_argv = sys.argv.copy() + g = {"__file__": script_name} + try: + try: + sys.argv[0] = script_name + sys.argv[1:] = script_args + with open(script_name, "rb") as f: + exec(f.read(), g) + finally: + sys.argv = save_argv + except SystemExit as e: + # distutils converts KeyboardInterrupt into a SystemExit with + # "interrupted" as the argument. Convert it back so that + # pytest will exit instead of just failing the test. + if e.code == "interrupted": + raise KeyboardInterrupt from e + + return e.code == 0 or e.code is None + + return True + + +@contextlib.contextmanager +def chdir_manager(target: str) -> Iterator[None]: + dir = os.getcwd() + os.chdir(target) + try: + yield + finally: + os.chdir(dir) + + +class TestRun(MypycDataSuite): + """Test cases that build a C extension and run code.""" + + files = files + base_path = test_temp_dir + optional_out = True + multi_file = False + separate = False # If True, using separate (incremental) compilation + strict_dunder_typing = False + + def run_case(self, testcase: DataDrivenTestCase) -> None: + # setup.py wants to be run from the root directory of the package, which we accommodate + # by chdiring into tmp/ + with ( + use_custom_builtins(os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase), + chdir_manager("tmp"), + ): + self.run_case_inner(testcase) + + def run_case_inner(self, testcase: DataDrivenTestCase) -> None: + if not os.path.isdir(WORKDIR): # (one test puts something in build...) + os.mkdir(WORKDIR) + + text = "\n".join(testcase.input) + + with open("native.py", "w", encoding="utf-8") as f: + f.write(text) + with open("interpreted.py", "w", encoding="utf-8") as f: + f.write(text) + + shutil.copyfile(TESTUTIL_PATH, "testutil.py") + + step = 1 + self.run_case_step(testcase, step) + + steps = testcase.find_steps() + if steps == [[]]: + steps = [] + + for operations in steps: + # To make sure that any new changes get picked up as being + # new by distutils, shift the mtime of all of the + # generated artifacts back by a second. + fudge_dir_mtimes(WORKDIR, -1) + # On some OS, changing the mtime doesn't work reliably. As + # a workaround, sleep. + # TODO: Figure out a better approach, since this slows down tests. + time.sleep(1.0) + + step += 1 + with chdir_manager(".."): + perform_file_operations(operations) + self.run_case_step(testcase, step) + + def run_case_step(self, testcase: DataDrivenTestCase, incremental_step: int) -> None: + bench = testcase.config.getoption("--bench", False) and "Benchmark" in testcase.name + + options = Options() + options.use_builtins_fixtures = True + options.show_traceback = True + options.strict_optional = True + options.python_version = sys.version_info[:2] + options.export_types = True + options.preserve_asts = True + options.allow_empty_bodies = True + options.incremental = self.separate + + # Avoid checking modules/packages named 'unchecked', to provide a way + # to test interacting with code we don't have types for. + options.per_module_options["unchecked.*"] = {"follow_imports": "error"} + + source = build.BuildSource("native.py", "native", None) + sources = [source] + module_names = ["native"] + module_paths = ["native.py"] + + # Hard code another module name to compile in the same compilation unit. + to_delete = [] + for fn, text in testcase.files: + fn = os.path.relpath(fn, test_temp_dir) + + if os.path.basename(fn).startswith("other") and fn.endswith(".py"): + name = fn.split(".")[0].replace(os.sep, ".") + module_names.append(name) + sources.append(build.BuildSource(fn, name, None)) + to_delete.append(fn) + module_paths.append(fn) + + shutil.copyfile(fn, os.path.join(os.path.dirname(fn), name + "_interpreted.py")) + + for source in sources: + options.per_module_options.setdefault(source.module, {})["mypyc"] = True + + separate = ( + self.get_separate("\n".join(testcase.input), incremental_step) + if self.separate + else False + ) + + groups = construct_groups(sources, separate, len(module_names) > 1, None) + + # Use _librt_internal to test mypy-specific parts of librt (they have + # some special-casing in mypyc), for everything else use _librt suffix. + librt_internal = testcase.name.endswith("_librt_internal") + librt = testcase.name.endswith("_librt") or "_librt_" in testcase.name + # Enable experimental features (local librt build also includes experimental features) + experimental_features = testcase.name.endswith("_experimental") + try: + compiler_options = CompilerOptions( + multi_file=self.multi_file, + separate=self.separate, + strict_dunder_typing=self.strict_dunder_typing, + depends_on_librt_internal=librt_internal, + experimental_features=experimental_features, + ) + result = emitmodule.parse_and_typecheck( + sources=sources, + options=options, + compiler_options=compiler_options, + groups=groups, + alt_lib_path=".", + ) + errors = Errors(options) + ir, cfiles, _ = emitmodule.compile_modules_to_c( + result, compiler_options=compiler_options, errors=errors, groups=groups + ) + if errors.num_errors: + errors.flush_errors() + assert False, "Compile error" + except CompileError as e: + for line in e.messages: + print(fix_native_line_number(line, testcase.file, testcase.line)) + assert False, "Compile error" + + # Check that serialization works on this IR. (Only on the first + # step because the returned ir only includes updated code.) + if incremental_step == 1: + check_serialization_roundtrip(ir) + + opt_level = int(os.environ.get("MYPYC_OPT_LEVEL", 0)) + + setup_file = os.path.abspath(os.path.join(WORKDIR, "setup.py")) + # We pass the C file information to the build script via setup.py unfortunately + with open(setup_file, "w", encoding="utf-8") as f: + f.write( + setup_format.format( + module_paths, + separate, + cfiles, + self.multi_file, + opt_level, + librt, + experimental_features, + ) + ) + + if librt: + # This hack forces Python to prefer the local "installation". + os.makedirs("librt", exist_ok=True) + with open(os.path.join("librt", "__init__.py"), "a"): + pass + + if not run_setup(setup_file, ["build_ext", "--inplace"]): + if testcase.config.getoption("--mypyc-showc"): + show_c(cfiles) + copy_output_files(mypyc_output_dir) + assert False, "Compilation failed" + + # Assert that an output file got created + suffix = "pyd" if sys.platform == "win32" else "so" + assert glob.glob(f"native.*.{suffix}") or glob.glob(f"native.{suffix}") + + driver_path = "driver.py" + if not os.path.isfile(driver_path): + # No driver.py provided by test case. Use the default one + # (mypyc/test-data/driver/driver.py) that calls each + # function named test_*. + default_driver = os.path.join(test_data_prefix, "driver", "driver.py") + shutil.copy(default_driver, driver_path) + env = os.environ.copy() + env["MYPYC_RUN_BENCH"] = "1" if bench else "0" + + debugger = testcase.config.getoption("debugger") + if debugger: + if debugger == "lldb": + subprocess.check_call(["lldb", "--", sys.executable, driver_path], env=env) + elif debugger == "gdb": + subprocess.check_call(["gdb", "--args", sys.executable, driver_path], env=env) + else: + assert False, "Unsupported debugger" + # TODO: find a way to automatically disable capturing + # stdin/stdout when in debugging mode + assert False, ( + "Test can't pass in debugging mode. " + "(Make sure to pass -s to pytest to interact with the debugger)" + ) + proc = subprocess.Popen( + [sys.executable, driver_path], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + env=env, + ) + if sys.version_info >= (3, 12): + # TODO: testDecorators1 hangs on 3.12, remove this once fixed + proc.wait(timeout=30) + output = proc.communicate()[0].decode("utf8") + output = output.replace(f' File "{os.getcwd()}{os.sep}', ' File "') + outlines = output.splitlines() + + if testcase.config.getoption("--mypyc-showc"): + show_c(cfiles) + if proc.returncode != 0: + print() + signal = proc.returncode == -11 + extra = "" + if signal: + extra = " (likely segmentation fault)" + print(f"*** Exit status: {proc.returncode}{extra}") + if signal and not sys.platform.startswith("win"): + print() + if sys.platform == "darwin": + debugger = "lldb" + else: + debugger = "gdb" + print( + f'hint: Use "pytest -n0 -s --mypyc-debug={debugger} -k " to run test in debugger' + ) + print("hint: You may need to build a debug version of Python first and use it") + print('hint: See also "Debugging Segfaults" in mypyc/doc/dev-intro.md') + copy_output_files(mypyc_output_dir) + + # Verify output. + if bench: + print("Test output:") + print(output) + else: + if incremental_step == 1: + msg = "Invalid output" + expected = testcase.output + else: + msg = f"Invalid output (step {incremental_step})" + expected = testcase.output2.get(incremental_step, []) + + if not expected: + # Tweak some line numbers, but only if the expected output is empty, + # as tweaked output might not match expected output. + outlines = [ + fix_native_line_number(line, testcase.file, testcase.line) for line in outlines + ] + assert_test_output(testcase, outlines, msg, expected) + + if incremental_step > 1 and options.incremental: + suffix = "" if incremental_step == 2 else str(incremental_step - 1) + expected_rechecked = testcase.expected_rechecked_modules.get(incremental_step - 1) + if expected_rechecked is not None: + assert_module_equivalence( + "rechecked" + suffix, expected_rechecked, result.manager.rechecked_modules + ) + expected_stale = testcase.expected_stale_modules.get(incremental_step - 1) + if expected_stale is not None: + assert_module_equivalence( + "stale" + suffix, expected_stale, result.manager.stale_modules + ) + + assert proc.returncode == 0 + + def get_separate(self, program_text: str, incremental_step: int) -> Any: + template = r"# separate{}: (\[.*\])$" + m = re.search(template.format(incremental_step), program_text, flags=re.MULTILINE) + if not m: + m = re.search(template.format(""), program_text, flags=re.MULTILINE) + if m: + return ast.literal_eval(m.group(1)) + else: + return True + + +class TestRunMultiFile(TestRun): + """Run the main multi-module tests in multi-file compilation mode. + + In multi-file mode each module gets compiled into a separate C file, + but all modules (C files) are compiled together. + """ + + multi_file = True + test_name_suffix = "_multi" + files = ["run-multimodule.test", "run-mypy-sim.test"] + + +class TestRunSeparate(TestRun): + """Run the main multi-module tests in separate compilation mode. + + In this mode there are multiple compilation groups, which are compiled + incrementally. Each group is compiled to a separate C file, and these C + files are compiled separately. + + Each compiled module is placed into a separate compilation group, unless + overridden by a special comment. Consider this example: + + # separate: [(["other.py", "other_b.py"], "stuff")] + + This puts other.py and other_b.py into a compilation group named "stuff". + Any files not mentioned in the comment will get single-file groups. + """ + + separate = True + test_name_suffix = "_separate" + files = ["run-multimodule.test", "run-mypy-sim.test"] + + +class TestRunStrictDunderTyping(TestRun): + """Run the tests with strict dunder typing.""" + + strict_dunder_typing = True + test_name_suffix = "_dunder_typing" + files = ["run-dunders.test", "run-floats.test"] + + +def fix_native_line_number(message: str, fnam: str, delta: int) -> str: + """Update code locations in test case output to point to the .test file. + + The description of the test case is written to native.py, and line numbers + in test case output often are relative to native.py. This translates the + line numbers to be relative to the .test file that contains the test case + description, and also updates the file name to the .test file name. + + Args: + message: message to update + fnam: path of the .test file + delta: line number of the beginning of the test case in the .test file + + Returns updated message (or original message if we couldn't find anything). + """ + fnam = os.path.basename(fnam) + message = re.sub( + r"native\.py:([0-9]+):", lambda m: "%s:%d:" % (fnam, int(m.group(1)) + delta), message + ) + message = re.sub( + r'"native.py", line ([0-9]+),', + lambda m: '"%s", line %d,' % (fnam, int(m.group(1)) + delta), + message, + ) + return message + + +def copy_output_files(target_dir: str) -> None: + try: + os.mkdir(target_dir) + except OSError: + # Only copy data for the first failure, to avoid excessive output in case + # many tests fail + return + + for fnam in glob.glob("build/*.[ch]"): + shutil.copy(fnam, target_dir) + + sys.stderr.write(f"\nGenerated files: {target_dir} (for first failure only)\n\n") diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_serialization.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_serialization.py new file mode 100644 index 0000000..19de05d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_serialization.py @@ -0,0 +1,108 @@ +"""Functions to check that serialization round-tripped properly.""" + +# This file is named test_serialization.py even though it doesn't +# contain its own tests so that pytest will rewrite the asserts... + +from __future__ import annotations + +from collections.abc import Iterable +from typing import Any + +from mypyc.ir.class_ir import ClassIR +from mypyc.ir.func_ir import FuncDecl, FuncIR, FuncSignature +from mypyc.ir.module_ir import ModuleIR, deserialize_modules +from mypyc.ir.ops import DeserMaps +from mypyc.ir.rtypes import RType +from mypyc.sametype import is_same_signature, is_same_type + + +def get_dict(x: Any) -> dict[str, Any]: + if hasattr(x, "__mypyc_attrs__"): + return {k: getattr(x, k) for k in x.__mypyc_attrs__ if hasattr(x, k)} + else: + return dict(x.__dict__) + + +def get_function_dict(x: FuncIR) -> dict[str, Any]: + """Get a dict of function attributes safe to compare across serialization""" + d = get_dict(x) + d.pop("blocks", None) + d.pop("env", None) + return d + + +def assert_blobs_same(x: Any, y: Any, trail: tuple[Any, ...]) -> None: + """Compare two blobs of IR as best we can. + + FuncDecls, FuncIRs, and ClassIRs are compared by fullname to avoid + infinite recursion. + (More detailed comparisons should be done manually.) + + Types and signatures are compared using mypyc.sametype. + + Containers are compared recursively. + + Anything else is compared with ==. + + The `trail` argument is used in error messages. + """ + + assert type(x) is type(y), (f"Type mismatch at {trail}", type(x), type(y)) + if isinstance(x, (FuncDecl, FuncIR, ClassIR)): + assert x.fullname == y.fullname, f"Name mismatch at {trail}" + elif isinstance(x, dict): + assert len(x.keys()) == len(y.keys()), f"Keys mismatch at {trail}" + for (xk, xv), (yk, yv) in zip(x.items(), y.items()): + assert_blobs_same(xk, yk, trail + ("keys",)) + assert_blobs_same(xv, yv, trail + (xk,)) + elif isinstance(x, dict): + assert x.keys() == y.keys(), f"Keys mismatch at {trail}" + for k in x.keys(): + assert_blobs_same(x[k], y[k], trail + (k,)) + elif isinstance(x, Iterable) and not isinstance(x, (str, set)): + # Special case iterables to generate better assert error messages. + # We can't use this for sets since the ordering is unpredictable, + # and strings should be treated as atomic values. + for i, (xv, yv) in enumerate(zip(x, y)): + assert_blobs_same(xv, yv, trail + (i,)) + elif isinstance(x, RType): + assert is_same_type(x, y), f"RType mismatch at {trail}" + elif isinstance(x, FuncSignature): + assert is_same_signature(x, y), f"Signature mismatch at {trail}" + else: + assert x == y, f"Value mismatch at {trail}" + + +def assert_modules_same(ir1: ModuleIR, ir2: ModuleIR) -> None: + """Assert that two module IRs are the same (*). + + * Or rather, as much as we care about preserving across + serialization. We drop the actual IR bodies of functions but try + to preserve everything else. + """ + assert ir1.fullname == ir2.fullname + + assert ir1.imports == ir2.imports + + for cls1, cls2 in zip(ir1.classes, ir2.classes): + assert_blobs_same(get_dict(cls1), get_dict(cls2), (ir1.fullname, cls1.fullname)) + + for fn1, fn2 in zip(ir1.functions, ir2.functions): + assert_blobs_same( + get_function_dict(fn1), get_function_dict(fn2), (ir1.fullname, fn1.fullname) + ) + assert_blobs_same(get_dict(fn1.decl), get_dict(fn2.decl), (ir1.fullname, fn1.fullname)) + + assert_blobs_same(ir1.final_names, ir2.final_names, (ir1.fullname, "final_names")) + + +def check_serialization_roundtrip(irs: dict[str, ModuleIR]) -> None: + """Check that we can serialize modules out and deserialize them to the same thing.""" + serialized = {k: ir.serialize() for k, ir in irs.items()} + + ctx = DeserMaps({}, {}) + irs2 = deserialize_modules(serialized, ctx) + assert irs.keys() == irs2.keys() + + for k in irs: + assert_modules_same(irs[k], irs2[k]) diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_struct.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_struct.py new file mode 100644 index 0000000..82990e6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_struct.py @@ -0,0 +1,112 @@ +from __future__ import annotations + +import unittest + +from mypyc.ir.rtypes import ( + RStruct, + bool_rprimitive, + int32_rprimitive, + int64_rprimitive, + int_rprimitive, + object_rprimitive, +) +from mypyc.rt_subtype import is_runtime_subtype + + +class TestStruct(unittest.TestCase): + def test_struct_offsets(self) -> None: + # test per-member alignment + r = RStruct("", [], [bool_rprimitive, int32_rprimitive, int64_rprimitive]) + assert r.size == 16 + assert r.offsets == [0, 4, 8] + + # test final alignment + r1 = RStruct("", [], [bool_rprimitive, bool_rprimitive]) + assert r1.size == 2 + assert r1.offsets == [0, 1] + r2 = RStruct("", [], [int32_rprimitive, bool_rprimitive]) + r3 = RStruct("", [], [int64_rprimitive, bool_rprimitive]) + assert r2.offsets == [0, 4] + assert r3.offsets == [0, 8] + assert r2.size == 8 + assert r3.size == 16 + + r4 = RStruct("", [], [bool_rprimitive, bool_rprimitive, bool_rprimitive, int32_rprimitive]) + assert r4.size == 8 + assert r4.offsets == [0, 1, 2, 4] + + # test nested struct + r5 = RStruct("", [], [bool_rprimitive, r]) + assert r5.offsets == [0, 8] + assert r5.size == 24 + r6 = RStruct("", [], [int32_rprimitive, r5]) + assert r6.offsets == [0, 8] + assert r6.size == 32 + # test nested struct with alignment less than 8 + r7 = RStruct("", [], [bool_rprimitive, r4]) + assert r7.offsets == [0, 4] + assert r7.size == 12 + + def test_struct_str(self) -> None: + r = RStruct("Foo", ["a", "b"], [bool_rprimitive, object_rprimitive]) + assert str(r) == "Foo{a:bool, b:object}" + assert ( + repr(r) == ", " + "b:}>" + ) + r1 = RStruct("Bar", ["c"], [int32_rprimitive]) + assert str(r1) == "Bar{c:i32}" + assert repr(r1) == "}>" + r2 = RStruct("Baz", [], []) + assert str(r2) == "Baz{}" + assert repr(r2) == "" + + def test_runtime_subtype(self) -> None: + # right type to check with + r = RStruct("Foo", ["a", "b"], [bool_rprimitive, int_rprimitive]) + + # using the exact same fields + r1 = RStruct("Foo", ["a", "b"], [bool_rprimitive, int_rprimitive]) + + # names different + r2 = RStruct("Bar", ["c", "b"], [bool_rprimitive, int_rprimitive]) + + # name different + r3 = RStruct("Baz", ["a", "b"], [bool_rprimitive, int_rprimitive]) + + # type different + r4 = RStruct("FooBar", ["a", "b"], [bool_rprimitive, int32_rprimitive]) + + # number of types different + r5 = RStruct( + "FooBarBaz", ["a", "b", "c"], [bool_rprimitive, int_rprimitive, bool_rprimitive] + ) + + assert is_runtime_subtype(r1, r) is True + assert is_runtime_subtype(r2, r) is False + assert is_runtime_subtype(r3, r) is False + assert is_runtime_subtype(r4, r) is False + assert is_runtime_subtype(r5, r) is False + + def test_eq_and_hash(self) -> None: + r = RStruct("Foo", ["a", "b"], [bool_rprimitive, int_rprimitive]) + + # using the exact same fields + r1 = RStruct("Foo", ["a", "b"], [bool_rprimitive, int_rprimitive]) + assert hash(r) == hash(r1) + assert r == r1 + + # different name + r2 = RStruct("Foq", ["a", "b"], [bool_rprimitive, int_rprimitive]) + assert hash(r) != hash(r2) + assert r != r2 + + # different names + r3 = RStruct("Foo", ["a", "c"], [bool_rprimitive, int_rprimitive]) + assert hash(r) != hash(r3) + assert r != r3 + + # different type + r4 = RStruct("Foo", ["a", "b"], [bool_rprimitive, int_rprimitive, bool_rprimitive]) + assert hash(r) != hash(r4) + assert r != r4 diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_tuplename.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_tuplename.py new file mode 100644 index 0000000..5dd51d4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_tuplename.py @@ -0,0 +1,33 @@ +from __future__ import annotations + +import unittest + +from mypyc.ir.class_ir import ClassIR +from mypyc.ir.rtypes import ( + RInstance, + RTuple, + RUnion, + bool_rprimitive, + int_rprimitive, + list_rprimitive, + object_rprimitive, +) + + +class TestTupleNames(unittest.TestCase): + def setUp(self) -> None: + self.inst_a = RInstance(ClassIR("A", "__main__")) + self.inst_b = RInstance(ClassIR("B", "__main__")) + + def test_names(self) -> None: + assert RTuple([int_rprimitive, int_rprimitive]).unique_id == "T2II" + assert RTuple([list_rprimitive, object_rprimitive, self.inst_a]).unique_id == "T3OOO" + assert RTuple([list_rprimitive, object_rprimitive, self.inst_b]).unique_id == "T3OOO" + assert RTuple([]).unique_id == "T0" + assert ( + RTuple([RTuple([]), RTuple([int_rprimitive, int_rprimitive])]).unique_id == "T2T0T2II" + ) + assert ( + RTuple([bool_rprimitive, RUnion([bool_rprimitive, int_rprimitive])]).unique_id + == "T2CO" + ) diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/test_typeops.py b/.venv/lib/python3.12/site-packages/mypyc/test/test_typeops.py new file mode 100644 index 0000000..ff2c05a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/test_typeops.py @@ -0,0 +1,97 @@ +"""Test cases for various RType operations.""" + +from __future__ import annotations + +import unittest + +from mypyc.ir.rtypes import ( + RUnion, + bit_rprimitive, + bool_rprimitive, + int16_rprimitive, + int32_rprimitive, + int64_rprimitive, + int_rprimitive, + object_rprimitive, + short_int_rprimitive, + str_rprimitive, +) +from mypyc.rt_subtype import is_runtime_subtype +from mypyc.subtype import is_subtype + +native_int_types = [int64_rprimitive, int32_rprimitive, int16_rprimitive] + + +class TestSubtype(unittest.TestCase): + def test_bit(self) -> None: + assert is_subtype(bit_rprimitive, bool_rprimitive) + assert is_subtype(bit_rprimitive, int_rprimitive) + assert is_subtype(bit_rprimitive, short_int_rprimitive) + for rt in native_int_types: + assert is_subtype(bit_rprimitive, rt) + + def test_bool(self) -> None: + assert not is_subtype(bool_rprimitive, bit_rprimitive) + assert is_subtype(bool_rprimitive, int_rprimitive) + assert is_subtype(bool_rprimitive, short_int_rprimitive) + for rt in native_int_types: + assert is_subtype(bool_rprimitive, rt) + + def test_int64(self) -> None: + assert is_subtype(int64_rprimitive, int64_rprimitive) + assert is_subtype(int64_rprimitive, int_rprimitive) + assert not is_subtype(int64_rprimitive, short_int_rprimitive) + assert not is_subtype(int64_rprimitive, int32_rprimitive) + assert not is_subtype(int64_rprimitive, int16_rprimitive) + + def test_int32(self) -> None: + assert is_subtype(int32_rprimitive, int32_rprimitive) + assert is_subtype(int32_rprimitive, int_rprimitive) + assert not is_subtype(int32_rprimitive, short_int_rprimitive) + assert not is_subtype(int32_rprimitive, int64_rprimitive) + assert not is_subtype(int32_rprimitive, int16_rprimitive) + + def test_int16(self) -> None: + assert is_subtype(int16_rprimitive, int16_rprimitive) + assert is_subtype(int16_rprimitive, int_rprimitive) + assert not is_subtype(int16_rprimitive, short_int_rprimitive) + assert not is_subtype(int16_rprimitive, int64_rprimitive) + assert not is_subtype(int16_rprimitive, int32_rprimitive) + + +class TestRuntimeSubtype(unittest.TestCase): + def test_bit(self) -> None: + assert is_runtime_subtype(bit_rprimitive, bool_rprimitive) + assert not is_runtime_subtype(bit_rprimitive, int_rprimitive) + + def test_bool(self) -> None: + assert not is_runtime_subtype(bool_rprimitive, bit_rprimitive) + assert not is_runtime_subtype(bool_rprimitive, int_rprimitive) + + def test_union(self) -> None: + bool_int_mix = RUnion([bool_rprimitive, int_rprimitive]) + assert not is_runtime_subtype(bool_int_mix, short_int_rprimitive) + assert not is_runtime_subtype(bool_int_mix, int_rprimitive) + assert not is_runtime_subtype(short_int_rprimitive, bool_int_mix) + assert not is_runtime_subtype(int_rprimitive, bool_int_mix) + + +class TestUnionSimplification(unittest.TestCase): + def test_simple_type_result(self) -> None: + assert RUnion.make_simplified_union([int_rprimitive]) == int_rprimitive + + def test_remove_duplicate(self) -> None: + assert RUnion.make_simplified_union([int_rprimitive, int_rprimitive]) == int_rprimitive + + def test_cannot_simplify(self) -> None: + assert RUnion.make_simplified_union( + [int_rprimitive, str_rprimitive, object_rprimitive] + ) == RUnion([int_rprimitive, str_rprimitive, object_rprimitive]) + + def test_nested(self) -> None: + assert RUnion.make_simplified_union( + [int_rprimitive, RUnion([str_rprimitive, int_rprimitive])] + ) == RUnion([int_rprimitive, str_rprimitive]) + assert RUnion.make_simplified_union( + [int_rprimitive, RUnion([str_rprimitive, RUnion([int_rprimitive])])] + ) == RUnion([int_rprimitive, str_rprimitive]) diff --git a/.venv/lib/python3.12/site-packages/mypyc/test/testutil.py b/.venv/lib/python3.12/site-packages/mypyc/test/testutil.py new file mode 100644 index 0000000..3e9abc2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/test/testutil.py @@ -0,0 +1,286 @@ +"""Helpers for writing tests""" + +from __future__ import annotations + +import contextlib +import os +import os.path +import re +import shutil +from collections.abc import Iterator +from typing import Callable + +from mypy import build +from mypy.errors import CompileError +from mypy.nodes import Expression, MypyFile +from mypy.options import Options +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase, DataSuite +from mypy.test.helpers import assert_string_arrays_equal +from mypy.types import Type +from mypyc.analysis.ircheck import assert_func_ir_valid +from mypyc.common import IS_32_BIT_PLATFORM, PLATFORM_SIZE +from mypyc.errors import Errors +from mypyc.ir.func_ir import FuncIR +from mypyc.ir.module_ir import ModuleIR +from mypyc.irbuild.main import build_ir +from mypyc.irbuild.mapper import Mapper +from mypyc.options import CompilerOptions +from mypyc.test.config import test_data_prefix + +# The builtins stub used during icode generation test cases. +ICODE_GEN_BUILTINS = os.path.join(test_data_prefix, "fixtures/ir.py") +# The testutil support library +TESTUTIL_PATH = os.path.join(test_data_prefix, "fixtures/testutil.py") + + +class MypycDataSuite(DataSuite): + # Need to list no files, since this will be picked up as a suite of tests + files: list[str] = [] + data_prefix = test_data_prefix + + +def builtins_wrapper( + func: Callable[[DataDrivenTestCase], None], path: str +) -> Callable[[DataDrivenTestCase], None]: + """Decorate a function that implements a data-driven test case to copy an + alternative builtins module implementation in place before performing the + test case. Clean up after executing the test case. + """ + return lambda testcase: perform_test(func, path, testcase) + + +@contextlib.contextmanager +def use_custom_builtins(builtins_path: str, testcase: DataDrivenTestCase) -> Iterator[None]: + for path, _ in testcase.files: + if os.path.basename(path) == "builtins.pyi": + default_builtins = False + break + else: + # Use default builtins. + builtins = os.path.abspath(os.path.join(test_temp_dir, "builtins.pyi")) + shutil.copyfile(builtins_path, builtins) + default_builtins = True + + # Actually perform the test case. + try: + yield None + finally: + if default_builtins: + # Clean up. + os.remove(builtins) + + +def perform_test( + func: Callable[[DataDrivenTestCase], None], builtins_path: str, testcase: DataDrivenTestCase +) -> None: + for path, _ in testcase.files: + if os.path.basename(path) == "builtins.py": + default_builtins = False + break + else: + # Use default builtins. + builtins = os.path.join(test_temp_dir, "builtins.py") + shutil.copyfile(builtins_path, builtins) + default_builtins = True + + # Actually perform the test case. + func(testcase) + + if default_builtins: + # Clean up. + os.remove(builtins) + + +def build_ir_for_single_file( + input_lines: list[str], compiler_options: CompilerOptions | None = None +) -> list[FuncIR]: + return build_ir_for_single_file2(input_lines, compiler_options)[0].functions + + +def build_ir_for_single_file2( + input_lines: list[str], compiler_options: CompilerOptions | None = None +) -> tuple[ModuleIR, MypyFile, dict[Expression, Type], Mapper]: + program_text = "\n".join(input_lines) + + # By default generate IR compatible with the earliest supported Python C API. + # If a test needs more recent API features, this should be overridden. + compiler_options = compiler_options or CompilerOptions(capi_version=(3, 9)) + options = Options() + options.show_traceback = True + options.hide_error_codes = True + options.use_builtins_fixtures = True + options.strict_optional = True + options.python_version = compiler_options.python_version or (3, 9) + options.export_types = True + options.preserve_asts = True + options.allow_empty_bodies = True + options.per_module_options["__main__"] = {"mypyc": True} + + source = build.BuildSource("main", "__main__", program_text) + # Construct input as a single single. + # Parse and type check the input program. + result = build.build(sources=[source], options=options, alt_lib_path=test_temp_dir) + if result.errors: + raise CompileError(result.errors) + + errors = Errors(options) + mapper = Mapper({"__main__": None}) + modules = build_ir( + [result.files["__main__"]], result.graph, result.types, mapper, compiler_options, errors + ) + if errors.num_errors: + raise CompileError(errors.new_messages()) + + module = list(modules.values())[0] + for fn in module.functions: + assert_func_ir_valid(fn) + tree = result.graph[module.fullname].tree + assert tree is not None + return module, tree, result.types, mapper + + +def update_testcase_output(testcase: DataDrivenTestCase, output: list[str]) -> None: + # TODO: backport this to mypy + assert testcase.old_cwd is not None, "test was not properly set up" + testcase_path = os.path.join(testcase.old_cwd, testcase.file) + with open(testcase_path) as f: + data_lines = f.read().splitlines() + + # We can't rely on the test line numbers to *find* the test, since + # we might fix multiple tests in a run. So find it by the case + # header. Give up if there are multiple tests with the same name. + test_slug = f"[case {testcase.name}]" + if data_lines.count(test_slug) != 1: + return + start_idx = data_lines.index(test_slug) + stop_idx = start_idx + 11 + while stop_idx < len(data_lines) and not data_lines[stop_idx].startswith("[case "): + stop_idx += 1 + + test = data_lines[start_idx:stop_idx] + out_start = test.index("[out]") + test[out_start + 1 :] = output + data_lines[start_idx:stop_idx] = test + [""] + data = "\n".join(data_lines) + + with open(testcase_path, "w") as f: + print(data, file=f) + + +def assert_test_output( + testcase: DataDrivenTestCase, + actual: list[str], + message: str, + expected: list[str] | None = None, + formatted: list[str] | None = None, +) -> None: + __tracebackhide__ = True + + expected_output = expected if expected is not None else testcase.output + if expected_output != actual and testcase.config.getoption("--update-data", False): + update_testcase_output(testcase, actual) + + assert_string_arrays_equal( + expected_output, actual, f"{message} ({testcase.file}, line {testcase.line})" + ) + + +def get_func_names(expected: list[str]) -> list[str]: + res = [] + for s in expected: + m = re.match(r"def ([_a-zA-Z0-9.*$]+)\(", s) + if m: + res.append(m.group(1)) + return res + + +def remove_comment_lines(a: list[str]) -> list[str]: + """Return a copy of array with comments removed. + + Lines starting with '--' (but not with '---') are removed. + """ + r = [] + for s in a: + if s.strip().startswith("--") and not s.strip().startswith("---"): + pass + else: + r.append(s) + return r + + +def print_with_line_numbers(s: str) -> None: + lines = s.splitlines() + for i, line in enumerate(lines): + print("%-4d %s" % (i + 1, line)) + + +def heading(text: str) -> None: + print("=" * 20 + " " + text + " " + "=" * 20) + + +def show_c(cfiles: list[list[tuple[str, str]]]) -> None: + heading("Generated C") + for group in cfiles: + for cfile, ctext in group: + print(f"== {cfile} ==") + print_with_line_numbers(ctext) + heading("End C") + + +def fudge_dir_mtimes(dir: str, delta: int) -> None: + for dirpath, _, filenames in os.walk(dir): + for name in filenames: + path = os.path.join(dirpath, name) + new_mtime = os.stat(path).st_mtime + delta + os.utime(path, times=(new_mtime, new_mtime)) + + +def replace_word_size(text: list[str]) -> list[str]: + """Replace WORDSIZE with platform specific word sizes""" + result = [] + for line in text: + index = line.find("WORD_SIZE") + if index != -1: + # get 'WORDSIZE*n' token + word_size_token = line[index:].split()[0] + n = int(word_size_token[10:]) + replace_str = str(PLATFORM_SIZE * n) + result.append(line.replace(word_size_token, replace_str)) + else: + result.append(line) + return result + + +def infer_ir_build_options_from_test_name(name: str) -> CompilerOptions | None: + """Look for magic substrings in test case name to set compiler options. + + Return None if the test case should be skipped (always pass). + + Supported naming conventions: + + *_64bit*: + Run test case only on 64-bit platforms + *_32bit*: + Run test caseonly on 32-bit platforms + *_python3_8* (or for any Python version): + Use Python 3.8+ C API features (default: lowest supported version) + *StripAssert*: + Don't generate code for assert statements + """ + # If this is specific to some bit width, always pass if platform doesn't match. + if "_64bit" in name and IS_32_BIT_PLATFORM: + return None + if "_32bit" in name and not IS_32_BIT_PLATFORM: + return None + options = CompilerOptions(strip_asserts="StripAssert" in name, capi_version=(3, 9)) + # A suffix like _python3_9 is used to set the target C API version. + m = re.search(r"_python([3-9]+)_([0-9]+)(_|\b)", name) + if m: + options.capi_version = (int(m.group(1)), int(m.group(2))) + options.python_version = options.capi_version + elif "_py" in name or "_Python" in name: + assert False, f"Invalid _py* suffix (should be _pythonX_Y): {name}" + if re.search("_experimental(_|$)", name): + options.experimental_features = True + return options diff --git a/.venv/lib/python3.12/site-packages/mypyc/transform/__init__.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/transform/__init__.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..dac2929 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/transform/__init__.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/transform/__init__.py b/.venv/lib/python3.12/site-packages/mypyc/transform/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/mypyc/transform/copy_propagation.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/transform/copy_propagation.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..31ba5ef Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/transform/copy_propagation.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/transform/copy_propagation.py b/.venv/lib/python3.12/site-packages/mypyc/transform/copy_propagation.py new file mode 100644 index 0000000..49de616 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/transform/copy_propagation.py @@ -0,0 +1,94 @@ +"""Simple copy propagation optimization. + +Example input: + + x = f() + y = x + +The register x is redundant and we can directly assign its value to y: + + y = f() + +This can optimize away registers that are assigned to once. +""" + +from __future__ import annotations + +from mypyc.ir.func_ir import FuncIR +from mypyc.ir.ops import Assign, AssignMulti, LoadAddress, LoadErrorValue, Register, Value +from mypyc.irbuild.ll_builder import LowLevelIRBuilder +from mypyc.options import CompilerOptions +from mypyc.sametype import is_same_type +from mypyc.transform.ir_transform import IRTransform + + +def do_copy_propagation(fn: FuncIR, options: CompilerOptions) -> None: + """Perform copy propagation optimization for fn.""" + + # Anything with an assignment count >1 will not be optimized + # here, as it would be require data flow analysis and we want to + # keep this simple and fast, at least until we've made data flow + # analysis much faster. + counts: dict[Value, int] = {} + replacements: dict[Value, Value] = {} + for arg in fn.arg_regs: + # Arguments are always assigned to initially + counts[arg] = 1 + + for block in fn.blocks: + for op in block.ops: + if isinstance(op, Assign): + c = counts.get(op.dest, 0) + counts[op.dest] = c + 1 + # Does this look like a supported assignment? + # TODO: Something needs LoadErrorValue assignments to be preserved? + if ( + c == 0 + and is_same_type(op.dest.type, op.src.type) + and not isinstance(op.src, LoadErrorValue) + ): + replacements[op.dest] = op.src + elif c == 1: + # Too many assignments -- don't replace this one + replacements.pop(op.dest, 0) + elif isinstance(op, AssignMulti): + # Copy propagation not supported for AssignMulti destinations + counts[op.dest] = 2 + replacements.pop(op.dest, 0) + elif isinstance(op, LoadAddress): + # We don't support taking the address of an arbitrary Value, + # so we'll need to preserve the operands of LoadAddress. + if isinstance(op.src, Register): + counts[op.src] = 2 + replacements.pop(op.src, 0) + + # Follow chains of propagation with more than one assignment. + for src, dst in list(replacements.items()): + if counts.get(dst, 0) > 1: + # Not supported + del replacements[src] + else: + while dst in replacements: + dst = replacements[dst] + if counts.get(dst, 0) > 1: + # Not supported + del replacements[src] + if src in replacements: + replacements[src] = dst + + builder = LowLevelIRBuilder(None, options) + transform = CopyPropagationTransform(builder, replacements) + transform.transform_blocks(fn.blocks) + fn.blocks = builder.blocks + + +class CopyPropagationTransform(IRTransform): + def __init__(self, builder: LowLevelIRBuilder, map: dict[Value, Value]) -> None: + super().__init__(builder) + self.op_map.update(map) + self.removed = set(map) + + def visit_assign(self, op: Assign) -> Value | None: + if op.dest in self.removed: + return None + return self.add(op) diff --git a/.venv/lib/python3.12/site-packages/mypyc/transform/exceptions.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/transform/exceptions.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..86e9567 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/transform/exceptions.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/transform/exceptions.py b/.venv/lib/python3.12/site-packages/mypyc/transform/exceptions.py new file mode 100644 index 0000000..33dfeb6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/transform/exceptions.py @@ -0,0 +1,182 @@ +"""Transform that inserts error checks after opcodes. + +When initially building the IR, the code doesn't perform error checks +for exceptions. This module is used to insert all required error checks +afterwards. Each Op describes how it indicates an error condition (if +at all). + +We need to split basic blocks on each error check since branches can +only be placed at the end of a basic block. +""" + +from __future__ import annotations + +from mypyc.ir.func_ir import FuncIR +from mypyc.ir.ops import ( + ERR_ALWAYS, + ERR_FALSE, + ERR_MAGIC, + ERR_MAGIC_OVERLAPPING, + ERR_NEVER, + NO_TRACEBACK_LINE_NO, + BasicBlock, + Branch, + CallC, + ComparisonOp, + Float, + GetAttr, + Integer, + LoadErrorValue, + Op, + RegisterOp, + Return, + SetAttr, + TupleGet, + Value, +) +from mypyc.ir.rtypes import RTuple, bool_rprimitive, is_float_rprimitive +from mypyc.primitives.exc_ops import err_occurred_op +from mypyc.primitives.registry import CFunctionDescription + + +def insert_exception_handling(ir: FuncIR) -> None: + # Generate error block if any ops may raise an exception. If an op + # fails without its own error handler, we'll branch to this + # block. The block just returns an error value. + error_label: BasicBlock | None = None + for block in ir.blocks: + adjust_error_kinds(block) + if error_label is None and any(op.can_raise() for op in block.ops): + error_label = add_default_handler_block(ir) + if error_label: + ir.blocks = split_blocks_at_errors(ir.blocks, error_label, ir.traceback_name) + + +def add_default_handler_block(ir: FuncIR) -> BasicBlock: + block = BasicBlock() + ir.blocks.append(block) + op = LoadErrorValue(ir.ret_type) + block.ops.append(op) + block.ops.append(Return(op)) + return block + + +def split_blocks_at_errors( + blocks: list[BasicBlock], default_error_handler: BasicBlock, func_name: str | None +) -> list[BasicBlock]: + new_blocks: list[BasicBlock] = [] + + # First split blocks on ops that may raise. + for block in blocks: + ops = block.ops + block.ops = [] + cur_block = block + new_blocks.append(cur_block) + + # If the block has an error handler specified, use it. Otherwise + # fall back to the default. + error_label = block.error_handler or default_error_handler + block.error_handler = None + + for op in ops: + target: Value = op + cur_block.ops.append(op) + if isinstance(op, RegisterOp) and op.error_kind != ERR_NEVER: + # Split + new_block = BasicBlock() + new_blocks.append(new_block) + + if op.error_kind == ERR_MAGIC: + # Op returns an error value on error that depends on result RType. + variant = Branch.IS_ERROR + negated = False + elif op.error_kind == ERR_FALSE: + # Op returns a C false value on error. + variant = Branch.BOOL + negated = True + elif op.error_kind == ERR_ALWAYS: + variant = Branch.BOOL + negated = True + # this is a hack to represent the always fail + # semantics, using a temporary bool with value false + target = Integer(0, bool_rprimitive) + elif op.error_kind == ERR_MAGIC_OVERLAPPING: + comp = insert_overlapping_error_value_check(cur_block.ops, target) + new_block2 = BasicBlock() + new_blocks.append(new_block2) + branch = Branch( + comp, + true_label=new_block2, + false_label=new_block, + op=Branch.BOOL, + rare=True, + ) + cur_block.ops.append(branch) + cur_block = new_block2 + target = primitive_call(err_occurred_op, [], target.line) + cur_block.ops.append(target) + variant = Branch.IS_ERROR + negated = True + else: + assert False, "unknown error kind %d" % op.error_kind + + # Void ops can't generate errors since error is always + # indicated by a special value stored in a register. + if op.error_kind != ERR_ALWAYS: + assert not op.is_void, "void op generating errors?" + + branch = Branch( + target, true_label=error_label, false_label=new_block, op=variant, line=op.line + ) + branch.negated = negated + if op.line != NO_TRACEBACK_LINE_NO and func_name is not None: + branch.traceback_entry = (func_name, op.line) + cur_block.ops.append(branch) + cur_block = new_block + + return new_blocks + + +def primitive_call(desc: CFunctionDescription, args: list[Value], line: int) -> CallC: + return CallC( + desc.c_function_name, + [], + desc.return_type, + desc.steals, + desc.is_borrowed, + desc.error_kind, + line, + ) + + +def adjust_error_kinds(block: BasicBlock) -> None: + """Infer more precise error_kind attributes for ops. + + We have access here to more information than what was available + when the IR was initially built. + """ + for op in block.ops: + if isinstance(op, GetAttr): + if op.class_type.class_ir.is_always_defined(op.attr): + op.error_kind = ERR_NEVER + if isinstance(op, SetAttr): + if op.class_type.class_ir.is_always_defined(op.attr): + op.error_kind = ERR_NEVER + + +def insert_overlapping_error_value_check(ops: list[Op], target: Value) -> ComparisonOp: + """Append to ops to check for an overlapping error value.""" + typ = target.type + if isinstance(typ, RTuple): + item = TupleGet(target, 0) + ops.append(item) + return insert_overlapping_error_value_check(ops, item) + else: + errvalue: Value + if is_float_rprimitive(target.type): + errvalue = Float(float(typ.c_undefined)) + else: + errvalue = Integer(int(typ.c_undefined), rtype=typ) + op = ComparisonOp(target, errvalue, ComparisonOp.EQ) + ops.append(op) + return op diff --git a/.venv/lib/python3.12/site-packages/mypyc/transform/flag_elimination.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/transform/flag_elimination.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..bda8795 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/transform/flag_elimination.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/transform/flag_elimination.py b/.venv/lib/python3.12/site-packages/mypyc/transform/flag_elimination.py new file mode 100644 index 0000000..c78e60d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/transform/flag_elimination.py @@ -0,0 +1,107 @@ +"""Bool register elimination optimization. + +Example input: + + L1: + r0 = f() + b = r0 + goto L3 + L2: + r1 = g() + b = r1 + goto L3 + L3: + if b goto L4 else goto L5 + +The register b is redundant and we replace the assignments with two copies of +the branch in L3: + + L1: + r0 = f() + if r0 goto L4 else goto L5 + L2: + r1 = g() + if r1 goto L4 else goto L5 + +This helps generate simpler IR for tagged integers comparisons, for example. +""" + +from __future__ import annotations + +from mypyc.ir.func_ir import FuncIR +from mypyc.ir.ops import Assign, BasicBlock, Branch, Goto, Register, Unreachable +from mypyc.irbuild.ll_builder import LowLevelIRBuilder +from mypyc.options import CompilerOptions +from mypyc.transform.ir_transform import IRTransform + + +def do_flag_elimination(fn: FuncIR, options: CompilerOptions) -> None: + # Find registers that are used exactly once as source, and in a branch. + counts: dict[Register, int] = {} + branches: dict[Register, Branch] = {} + labels: dict[Register, BasicBlock] = {} + for block in fn.blocks: + for i, op in enumerate(block.ops): + for src in op.sources(): + if isinstance(src, Register): + counts[src] = counts.get(src, 0) + 1 + if i == 0 and isinstance(op, Branch) and isinstance(op.value, Register): + branches[op.value] = op + labels[op.value] = block + + # Based on these we can find the candidate registers. + candidates: set[Register] = { + r for r in branches if counts.get(r, 0) == 1 and r not in fn.arg_regs + } + + # Remove candidates with invalid assignments. + for block in fn.blocks: + for i, op in enumerate(block.ops): + if isinstance(op, Assign) and op.dest in candidates: + next_op = block.ops[i + 1] + if not (isinstance(next_op, Goto) and next_op.label is labels[op.dest]): + # Not right + candidates.remove(op.dest) + + builder = LowLevelIRBuilder(None, options) + transform = FlagEliminationTransform( + builder, {x: y for x, y in branches.items() if x in candidates} + ) + transform.transform_blocks(fn.blocks) + fn.blocks = builder.blocks + + +class FlagEliminationTransform(IRTransform): + def __init__(self, builder: LowLevelIRBuilder, branch_map: dict[Register, Branch]) -> None: + super().__init__(builder) + self.branch_map = branch_map + self.branches = set(branch_map.values()) + + def visit_assign(self, op: Assign) -> None: + if old_branch := self.branch_map.get(op.dest): + # Replace assignment with a copy of the old branch, which is in a + # separate basic block. The old branch will be deleted in visit_branch. + new_branch = Branch( + op.src, + old_branch.true, + old_branch.false, + old_branch.op, + old_branch.line, + rare=old_branch.rare, + ) + new_branch.negated = old_branch.negated + new_branch.traceback_entry = old_branch.traceback_entry + self.add(new_branch) + else: + self.add(op) + + def visit_goto(self, op: Goto) -> None: + # This is a no-op if basic block already terminated + self.builder.goto(op.label) + + def visit_branch(self, op: Branch) -> None: + if op in self.branches: + # This branch is optimized away + self.add(Unreachable()) + else: + self.add(op) diff --git a/.venv/lib/python3.12/site-packages/mypyc/transform/ir_transform.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/transform/ir_transform.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..169c90d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/transform/ir_transform.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/transform/ir_transform.py b/.venv/lib/python3.12/site-packages/mypyc/transform/ir_transform.py new file mode 100644 index 0000000..bcb6db9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/transform/ir_transform.py @@ -0,0 +1,378 @@ +"""Helpers for implementing generic IR to IR transforms.""" + +from __future__ import annotations + +from typing import Final, Optional + +from mypyc.ir.ops import ( + Assign, + AssignMulti, + BasicBlock, + Box, + Branch, + Call, + CallC, + Cast, + ComparisonOp, + DecRef, + Extend, + FloatComparisonOp, + FloatNeg, + FloatOp, + GetAttr, + GetElementPtr, + Goto, + IncRef, + InitStatic, + IntOp, + KeepAlive, + LoadAddress, + LoadErrorValue, + LoadGlobal, + LoadLiteral, + LoadMem, + LoadStatic, + MethodCall, + Op, + OpVisitor, + PrimitiveOp, + RaiseStandardError, + Return, + SetAttr, + SetElement, + SetMem, + Truncate, + TupleGet, + TupleSet, + Unborrow, + Unbox, + Unreachable, + Value, +) +from mypyc.irbuild.ll_builder import LowLevelIRBuilder + + +class IRTransform(OpVisitor[Optional[Value]]): + """Identity transform. + + Subclass and override to perform changes to IR. + + Subclass IRTransform and override any OpVisitor visit_* methods + that perform any IR changes. The default implementations implement + an identity transform. + + A visit method can return None to remove ops. In this case the + transform must ensure that no op uses the original removed op + as a source after the transform. + + You can retain old BasicBlock and op references in ops. The transform + will automatically patch these for you as needed. + """ + + def __init__(self, builder: LowLevelIRBuilder) -> None: + self.builder = builder + # Subclasses add additional op mappings here. A None value indicates + # that the op/register is deleted. + self.op_map: dict[Value, Value | None] = {} + + def transform_blocks(self, blocks: list[BasicBlock]) -> None: + """Transform basic blocks that represent a single function. + + The result of the transform will be collected at self.builder.blocks. + """ + block_map: dict[BasicBlock, BasicBlock] = {} + op_map = self.op_map + empties = set() + for block in blocks: + new_block = BasicBlock() + block_map[block] = new_block + self.builder.activate_block(new_block) + new_block.error_handler = block.error_handler + for op in block.ops: + new_op = op.accept(self) + if new_op is not op: + op_map[op] = new_op + # A transform can produce empty blocks which can be removed. + if is_empty_block(new_block) and not is_empty_block(block): + empties.add(new_block) + self.builder.blocks = [block for block in self.builder.blocks if block not in empties] + # Update all op/block references to point to the transformed ones. + patcher = PatchVisitor(op_map, block_map) + for block in self.builder.blocks: + for op in block.ops: + op.accept(patcher) + if block.error_handler is not None: + block.error_handler = block_map.get(block.error_handler, block.error_handler) + + def add(self, op: Op) -> Value: + return self.builder.add(op) + + def visit_goto(self, op: Goto) -> None: + self.add(op) + + def visit_branch(self, op: Branch) -> None: + self.add(op) + + def visit_return(self, op: Return) -> None: + self.add(op) + + def visit_unreachable(self, op: Unreachable) -> None: + self.add(op) + + def visit_assign(self, op: Assign) -> Value | None: + if op.src in self.op_map and self.op_map[op.src] is None: + # Special case: allow removing register initialization assignments + return None + return self.add(op) + + def visit_assign_multi(self, op: AssignMulti) -> Value | None: + return self.add(op) + + def visit_load_error_value(self, op: LoadErrorValue) -> Value | None: + return self.add(op) + + def visit_load_literal(self, op: LoadLiteral) -> Value | None: + return self.add(op) + + def visit_get_attr(self, op: GetAttr) -> Value | None: + return self.add(op) + + def visit_set_attr(self, op: SetAttr) -> Value | None: + return self.add(op) + + def visit_load_static(self, op: LoadStatic) -> Value | None: + return self.add(op) + + def visit_init_static(self, op: InitStatic) -> Value | None: + return self.add(op) + + def visit_tuple_get(self, op: TupleGet) -> Value | None: + return self.add(op) + + def visit_tuple_set(self, op: TupleSet) -> Value | None: + return self.add(op) + + def visit_inc_ref(self, op: IncRef) -> Value | None: + return self.add(op) + + def visit_dec_ref(self, op: DecRef) -> Value | None: + return self.add(op) + + def visit_call(self, op: Call) -> Value | None: + return self.add(op) + + def visit_method_call(self, op: MethodCall) -> Value | None: + return self.add(op) + + def visit_cast(self, op: Cast) -> Value | None: + return self.add(op) + + def visit_box(self, op: Box) -> Value | None: + return self.add(op) + + def visit_unbox(self, op: Unbox) -> Value | None: + return self.add(op) + + def visit_raise_standard_error(self, op: RaiseStandardError) -> Value | None: + return self.add(op) + + def visit_call_c(self, op: CallC) -> Value | None: + return self.add(op) + + def visit_primitive_op(self, op: PrimitiveOp) -> Value | None: + return self.add(op) + + def visit_truncate(self, op: Truncate) -> Value | None: + return self.add(op) + + def visit_extend(self, op: Extend) -> Value | None: + return self.add(op) + + def visit_load_global(self, op: LoadGlobal) -> Value | None: + return self.add(op) + + def visit_int_op(self, op: IntOp) -> Value | None: + return self.add(op) + + def visit_comparison_op(self, op: ComparisonOp) -> Value | None: + return self.add(op) + + def visit_float_op(self, op: FloatOp) -> Value | None: + return self.add(op) + + def visit_float_neg(self, op: FloatNeg) -> Value | None: + return self.add(op) + + def visit_float_comparison_op(self, op: FloatComparisonOp) -> Value | None: + return self.add(op) + + def visit_load_mem(self, op: LoadMem) -> Value | None: + return self.add(op) + + def visit_set_mem(self, op: SetMem) -> Value | None: + return self.add(op) + + def visit_get_element_ptr(self, op: GetElementPtr) -> Value | None: + return self.add(op) + + def visit_set_element(self, op: SetElement) -> Value | None: + return self.add(op) + + def visit_load_address(self, op: LoadAddress) -> Value | None: + return self.add(op) + + def visit_keep_alive(self, op: KeepAlive) -> Value | None: + return self.add(op) + + def visit_unborrow(self, op: Unborrow) -> Value | None: + return self.add(op) + + +class PatchVisitor(OpVisitor[None]): + def __init__( + self, op_map: dict[Value, Value | None], block_map: dict[BasicBlock, BasicBlock] + ) -> None: + self.op_map: Final = op_map + self.block_map: Final = block_map + + def fix_op(self, op: Value) -> Value: + new = self.op_map.get(op, op) + assert new is not None, "use of removed op" + return new + + def fix_block(self, block: BasicBlock) -> BasicBlock: + return self.block_map.get(block, block) + + def visit_goto(self, op: Goto) -> None: + op.label = self.fix_block(op.label) + + def visit_branch(self, op: Branch) -> None: + op.value = self.fix_op(op.value) + op.true = self.fix_block(op.true) + op.false = self.fix_block(op.false) + + def visit_return(self, op: Return) -> None: + op.value = self.fix_op(op.value) + + def visit_unreachable(self, op: Unreachable) -> None: + pass + + def visit_assign(self, op: Assign) -> None: + op.src = self.fix_op(op.src) + + def visit_assign_multi(self, op: AssignMulti) -> None: + op.src = [self.fix_op(s) for s in op.src] + + def visit_load_error_value(self, op: LoadErrorValue) -> None: + pass + + def visit_load_literal(self, op: LoadLiteral) -> None: + pass + + def visit_get_attr(self, op: GetAttr) -> None: + op.obj = self.fix_op(op.obj) + + def visit_set_attr(self, op: SetAttr) -> None: + op.obj = self.fix_op(op.obj) + op.src = self.fix_op(op.src) + + def visit_load_static(self, op: LoadStatic) -> None: + pass + + def visit_init_static(self, op: InitStatic) -> None: + op.value = self.fix_op(op.value) + + def visit_tuple_get(self, op: TupleGet) -> None: + op.src = self.fix_op(op.src) + + def visit_tuple_set(self, op: TupleSet) -> None: + op.items = [self.fix_op(item) for item in op.items] + + def visit_inc_ref(self, op: IncRef) -> None: + op.src = self.fix_op(op.src) + + def visit_dec_ref(self, op: DecRef) -> None: + op.src = self.fix_op(op.src) + + def visit_call(self, op: Call) -> None: + op.args = [self.fix_op(arg) for arg in op.args] + + def visit_method_call(self, op: MethodCall) -> None: + op.obj = self.fix_op(op.obj) + op.args = [self.fix_op(arg) for arg in op.args] + + def visit_cast(self, op: Cast) -> None: + op.src = self.fix_op(op.src) + + def visit_box(self, op: Box) -> None: + op.src = self.fix_op(op.src) + + def visit_unbox(self, op: Unbox) -> None: + op.src = self.fix_op(op.src) + + def visit_raise_standard_error(self, op: RaiseStandardError) -> None: + if isinstance(op.value, Value): + op.value = self.fix_op(op.value) + + def visit_call_c(self, op: CallC) -> None: + op.args = [self.fix_op(arg) for arg in op.args] + + def visit_primitive_op(self, op: PrimitiveOp) -> None: + op.args = [self.fix_op(arg) for arg in op.args] + + def visit_truncate(self, op: Truncate) -> None: + op.src = self.fix_op(op.src) + + def visit_extend(self, op: Extend) -> None: + op.src = self.fix_op(op.src) + + def visit_load_global(self, op: LoadGlobal) -> None: + pass + + def visit_int_op(self, op: IntOp) -> None: + op.lhs = self.fix_op(op.lhs) + op.rhs = self.fix_op(op.rhs) + + def visit_comparison_op(self, op: ComparisonOp) -> None: + op.lhs = self.fix_op(op.lhs) + op.rhs = self.fix_op(op.rhs) + + def visit_float_op(self, op: FloatOp) -> None: + op.lhs = self.fix_op(op.lhs) + op.rhs = self.fix_op(op.rhs) + + def visit_float_neg(self, op: FloatNeg) -> None: + op.src = self.fix_op(op.src) + + def visit_float_comparison_op(self, op: FloatComparisonOp) -> None: + op.lhs = self.fix_op(op.lhs) + op.rhs = self.fix_op(op.rhs) + + def visit_load_mem(self, op: LoadMem) -> None: + op.src = self.fix_op(op.src) + + def visit_set_mem(self, op: SetMem) -> None: + op.dest = self.fix_op(op.dest) + op.src = self.fix_op(op.src) + + def visit_get_element_ptr(self, op: GetElementPtr) -> None: + op.src = self.fix_op(op.src) + + def visit_set_element(self, op: SetElement) -> None: + op.src = self.fix_op(op.src) + + def visit_load_address(self, op: LoadAddress) -> None: + if isinstance(op.src, LoadStatic): + new = self.fix_op(op.src) + assert isinstance(new, LoadStatic), new + op.src = new + + def visit_keep_alive(self, op: KeepAlive) -> None: + op.src = [self.fix_op(s) for s in op.src] + + def visit_unborrow(self, op: Unborrow) -> None: + op.src = self.fix_op(op.src) + + +def is_empty_block(block: BasicBlock) -> bool: + return len(block.ops) == 1 and isinstance(block.ops[0], Unreachable) diff --git a/.venv/lib/python3.12/site-packages/mypyc/transform/log_trace.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/transform/log_trace.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..3450f72 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/transform/log_trace.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/transform/log_trace.py b/.venv/lib/python3.12/site-packages/mypyc/transform/log_trace.py new file mode 100644 index 0000000..cec76b9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/transform/log_trace.py @@ -0,0 +1,158 @@ +"""This optional pass adds logging of various executed operations. + +Some subset of the executed operations are logged to the mypyc_trace.txt file. + +This is useful for performance analysis. For example, it's possible +to identify how frequently various primitive functions are called, +and in which code locations they are called. +""" + +from __future__ import annotations + +from typing import Final + +from mypyc.ir.func_ir import FuncIR +from mypyc.ir.ops import ( + Box, + Call, + CallC, + Cast, + CString, + DecRef, + GetAttr, + IncRef, + LoadLiteral, + LoadStatic, + Op, + PrimitiveOp, + SetAttr, + Unbox, + Value, +) +from mypyc.ir.rtypes import none_rprimitive +from mypyc.irbuild.ll_builder import LowLevelIRBuilder +from mypyc.options import CompilerOptions +from mypyc.primitives.misc_ops import log_trace_event +from mypyc.transform.ir_transform import IRTransform + + +def insert_event_trace_logging(fn: FuncIR, options: CompilerOptions) -> None: + builder = LowLevelIRBuilder(None, options) + transform = LogTraceEventTransform(builder, fn.decl.fullname) + transform.transform_blocks(fn.blocks) + fn.blocks = builder.blocks + + +def get_load_global_name(op: CallC) -> str | None: + name = op.function_name + if name == "CPyDict_GetItem": + arg = op.args[0] + if ( + isinstance(arg, LoadStatic) + and arg.namespace == "static" + and arg.identifier == "globals" + and isinstance(op.args[1], LoadLiteral) + ): + return str(op.args[1].value) + return None + + +# These primitives perform an implicit IncRef for the return value. Only some of the most common ones +# are included, and mostly ops that could be switched to use borrowing in some contexts. +primitives_that_inc_ref: Final = { + "list_get_item_unsafe", + "CPyList_GetItemShort", + "CPyDict_GetWithNone", + "CPyList_GetItem", + "CPyDict_GetItem", + "CPyList_PopLast", +} + + +class LogTraceEventTransform(IRTransform): + def __init__(self, builder: LowLevelIRBuilder, fullname: str) -> None: + super().__init__(builder) + self.fullname = fullname.encode("utf-8") + + def visit_call(self, op: Call) -> Value: + # TODO: Use different op name when constructing an instance + return self.log(op, "call", op.fn.fullname) + + def visit_primitive_op(self, op: PrimitiveOp) -> Value: + value = self.log(op, "primitive_op", op.desc.name) + if op.desc.name in primitives_that_inc_ref: + self.log_inc_ref(value) + return value + + def visit_call_c(self, op: CallC) -> Value: + if global_name := get_load_global_name(op): + return self.log(op, "globals_dict_get_item", global_name) + + func_name = op.function_name + if func_name == "PyObject_Vectorcall" and isinstance(op.args[0], CallC): + if global_name := get_load_global_name(op.args[0]): + return self.log(op, "python_call_global", global_name) + elif func_name == "CPyObject_GetAttr" and isinstance(op.args[1], LoadLiteral): + return self.log(op, "python_get_attr", str(op.args[1].value)) + elif func_name == "PyObject_VectorcallMethod" and isinstance(op.args[0], LoadLiteral): + return self.log(op, "python_call_method", str(op.args[0].value)) + + value = self.log(op, "call_c", func_name) + if func_name in primitives_that_inc_ref: + self.log_inc_ref(value) + return value + + def visit_get_attr(self, op: GetAttr) -> Value: + value = self.log(op, "get_attr", f"{op.class_type.name}.{op.attr}") + if not op.is_borrowed and op.type.is_refcounted: + self.log_inc_ref(op) + return value + + def visit_set_attr(self, op: SetAttr) -> Value: + name = "set_attr" if not op.is_init else "set_attr_init" + return self.log(op, name, f"{op.class_type.name}.{op.attr}") + + def visit_box(self, op: Box) -> Value: + if op.src.type is none_rprimitive: + # Boxing 'None' is a very quick operation, so we don't log it. + return self.add(op) + else: + return self.log(op, "box", str(op.src.type)) + + def visit_unbox(self, op: Unbox) -> Value: + return self.log(op, "unbox", str(op.type)) + + def visit_cast(self, op: Cast) -> Value | None: + value = self.log(op, "cast", str(op.type)) + if not op.is_borrowed: + self.log_inc_ref(value) + return value + + def visit_inc_ref(self, op: IncRef) -> Value: + return self.log(op, "inc_ref", str(op.src.type)) + + def visit_dec_ref(self, op: DecRef) -> Value: + return self.log(op, "dec_ref", str(op.src.type)) + + def log_inc_ref(self, value: Value) -> None: + self.log_event("inc_ref", str(value.type), value.line) + + def log(self, op: Op, name: str, details: str) -> Value: + self.log_event(name, details, op.line) + return self.add(op) + + def log_event(self, name: str, details: str, line: int) -> None: + if line >= 0: + line_str = str(line) + else: + line_str = "" + self.builder.primitive_op( + log_trace_event, + [ + CString(self.fullname), + CString(line_str.encode("ascii")), + CString(name.encode("utf-8")), + CString(details.encode("utf-8")), + ], + line, + ) diff --git a/.venv/lib/python3.12/site-packages/mypyc/transform/lower.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/transform/lower.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..b544081 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/transform/lower.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/transform/lower.py b/.venv/lib/python3.12/site-packages/mypyc/transform/lower.py new file mode 100644 index 0000000..f576824 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/transform/lower.py @@ -0,0 +1,35 @@ +"""Transform IR to lower-level ops. + +Higher-level ops are used in earlier compiler passes, as they make +various analyses, optimizations and transforms easier to implement. +Later passes use lower-level ops, as they are easier to generate code +from, and they help with lower-level optimizations. + +Lowering of various primitive ops is implemented in the mypyc.lower +package. +""" + +from __future__ import annotations + +from mypyc.ir.func_ir import FuncIR +from mypyc.ir.ops import PrimitiveOp, Value +from mypyc.irbuild.ll_builder import LowLevelIRBuilder +from mypyc.lower.registry import lowering_registry +from mypyc.options import CompilerOptions +from mypyc.transform.ir_transform import IRTransform + + +def lower_ir(ir: FuncIR, options: CompilerOptions) -> None: + builder = LowLevelIRBuilder(None, options) + visitor = LoweringVisitor(builder) + visitor.transform_blocks(ir.blocks) + ir.blocks = builder.blocks + + +class LoweringVisitor(IRTransform): + def visit_primitive_op(self, op: PrimitiveOp) -> Value | None: + # The lowering implementation functions of various primitive ops are stored + # in a registry, which is populated using function decorators. The name + # of op (such as "int_eq") is used as the key. + lower_fn = lowering_registry[op.desc.name] + return lower_fn(self.builder, op.args, op.line) diff --git a/.venv/lib/python3.12/site-packages/mypyc/transform/refcount.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/transform/refcount.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..3ef7e6b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/transform/refcount.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/transform/refcount.py b/.venv/lib/python3.12/site-packages/mypyc/transform/refcount.py new file mode 100644 index 0000000..beacb40 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/transform/refcount.py @@ -0,0 +1,298 @@ +"""Transformation for inserting refrecence count inc/dec opcodes. + +This transformation happens towards the end of compilation. Before this +transformation, reference count management is not explicitly handled at all. +By postponing this pass, the previous passes are simpler as they don't have +to update reference count opcodes. + +The approach is to decrement reference counts soon after a value is no +longer live, to quickly free memory (and call __del__ methods), though +there are no strict guarantees -- other than that local variables are +freed before return from a function. + +Function arguments are a little special. They are initially considered +'borrowed' from the caller and their reference counts don't need to be +decremented before returning. An assignment to a borrowed value turns it +into a regular, owned reference that needs to freed before return. +""" + +from __future__ import annotations + +from collections.abc import Iterable + +from mypyc.analysis.dataflow import ( + AnalysisDict, + analyze_borrowed_arguments, + analyze_live_regs, + analyze_must_defined_regs, + cleanup_cfg, + get_cfg, +) +from mypyc.ir.func_ir import FuncIR, all_values +from mypyc.ir.ops import ( + Assign, + BasicBlock, + Branch, + CallC, + ControlOp, + DecRef, + Goto, + IncRef, + Integer, + KeepAlive, + LoadAddress, + Op, + Register, + RegisterOp, + Undef, + Value, +) + +Decs = tuple[tuple[Value, bool], ...] +Incs = tuple[Value, ...] + +# A cache of basic blocks that decrement and increment specific values +# and then jump to some target block. This lets us cut down on how +# much code we generate in some circumstances. +BlockCache = dict[tuple[BasicBlock, Decs, Incs], BasicBlock] + + +def insert_ref_count_opcodes(ir: FuncIR) -> None: + """Insert reference count inc/dec opcodes to a function. + + This is the entry point to this module. + """ + cfg = get_cfg(ir.blocks) + values = all_values(ir.arg_regs, ir.blocks) + + borrowed = {value for value in values if value.is_borrowed} + args: set[Value] = set(ir.arg_regs) + live = analyze_live_regs(ir.blocks, cfg) + borrow = analyze_borrowed_arguments(ir.blocks, cfg, borrowed) + defined = analyze_must_defined_regs(ir.blocks, cfg, args, values, strict_errors=True) + ordering = make_value_ordering(ir) + cache: BlockCache = {} + for block in ir.blocks.copy(): + if isinstance(block.ops[-1], (Branch, Goto)): + insert_branch_inc_and_decrefs( + block, + cache, + ir.blocks, + live.before, + borrow.before, + borrow.after, + defined.after, + ordering, + ) + transform_block(block, live.before, live.after, borrow.before, defined.after) + + cleanup_cfg(ir.blocks) + + +def is_maybe_undefined(post_must_defined: set[Value], src: Value) -> bool: + return (isinstance(src, Register) and src not in post_must_defined) or ( + isinstance(src, CallC) and src.returns_null + ) + + +def maybe_append_dec_ref( + ops: list[Op], dest: Value, defined: AnalysisDict[Value], key: tuple[BasicBlock, int] +) -> None: + if dest.type.is_refcounted and not isinstance(dest, (Integer, Undef)): + ops.append(DecRef(dest, is_xdec=is_maybe_undefined(defined[key], dest))) + + +def maybe_append_inc_ref(ops: list[Op], dest: Value) -> None: + if dest.type.is_refcounted: + ops.append(IncRef(dest)) + + +def transform_block( + block: BasicBlock, + pre_live: AnalysisDict[Value], + post_live: AnalysisDict[Value], + pre_borrow: AnalysisDict[Value], + post_must_defined: AnalysisDict[Value], +) -> None: + old_ops = block.ops + ops: list[Op] = [] + for i, op in enumerate(old_ops): + key = (block, i) + + assert op not in pre_live[key] + dest = op.dest if isinstance(op, Assign) else op + stolen = op.stolen() + + # Incref any references that are being stolen that stay live, were borrowed, + # or are stolen more than once by this operation. + for j, src in enumerate(stolen): + if src in post_live[key] or src in pre_borrow[key] or src in stolen[:j]: + maybe_append_inc_ref(ops, src) + # For assignments to registers that were already live, + # decref the old value. + if dest not in pre_borrow[key] and dest in pre_live[key]: + assert isinstance(op, Assign), op + maybe_append_dec_ref(ops, dest, post_must_defined, key) + + # Strip KeepAlive. Its only purpose is to help with this transform. + if not isinstance(op, KeepAlive): + ops.append(op) + + # Control ops don't have any space to insert ops after them, so + # their inc/decrefs get inserted by insert_branch_inc_and_decrefs. + if isinstance(op, ControlOp): + continue + + for src in op.unique_sources(): + # Decrement source that won't be live afterwards. + if src not in post_live[key] and src not in pre_borrow[key] and src not in stolen: + maybe_append_dec_ref(ops, src, post_must_defined, key) + # Decrement the destination if it is dead after the op and + # wasn't a borrowed RegisterOp + if ( + not dest.is_void + and dest not in post_live[key] + and not (isinstance(op, RegisterOp) and dest.is_borrowed) + ): + maybe_append_dec_ref(ops, dest, post_must_defined, key) + block.ops = ops + + +def insert_branch_inc_and_decrefs( + block: BasicBlock, + cache: BlockCache, + blocks: list[BasicBlock], + pre_live: AnalysisDict[Value], + pre_borrow: AnalysisDict[Value], + post_borrow: AnalysisDict[Value], + post_must_defined: AnalysisDict[Value], + ordering: dict[Value, int], +) -> None: + """Insert inc_refs and/or dec_refs after a branch/goto. + + Add dec_refs for registers that become dead after a branch. + Add inc_refs for registers that become unborrowed after a branch or goto. + + Branches are special as the true and false targets may have a different + live and borrowed register sets. Add new blocks before the true/false target + blocks that tweak reference counts. + + Example where we need to add an inc_ref: + + def f(a: int) -> None + if a: + a = 1 + return a # a is borrowed if condition is false and unborrowed if true + """ + prev_key = (block, len(block.ops) - 1) + source_live_regs = pre_live[prev_key] + source_borrowed = post_borrow[prev_key] + source_defined = post_must_defined[prev_key] + + term = block.terminator + for i, target in enumerate(term.targets()): + # HAX: After we've checked against an error value the value we must not touch the + # refcount since it will be a null pointer. The correct way to do this would be + # to perform data flow analysis on whether a value can be null (or is always + # null). + omitted: Iterable[Value] + if isinstance(term, Branch) and term.op == Branch.IS_ERROR and i == 0: + omitted = (term.value,) + else: + omitted = () + + decs = after_branch_decrefs( + target, pre_live, source_defined, source_borrowed, source_live_regs, ordering, omitted + ) + incs = after_branch_increfs(target, pre_live, pre_borrow, source_borrowed, ordering) + term.set_target(i, add_block(decs, incs, cache, blocks, target)) + + +def after_branch_decrefs( + label: BasicBlock, + pre_live: AnalysisDict[Value], + source_defined: set[Value], + source_borrowed: set[Value], + source_live_regs: set[Value], + ordering: dict[Value, int], + omitted: Iterable[Value], +) -> tuple[tuple[Value, bool], ...]: + target_pre_live = pre_live[label, 0] + decref = source_live_regs - target_pre_live - source_borrowed + if decref: + return tuple( + (reg, is_maybe_undefined(source_defined, reg)) + for reg in sorted(decref, key=lambda r: ordering[r]) + if reg.type.is_refcounted and reg not in omitted + ) + return () + + +def after_branch_increfs( + label: BasicBlock, + pre_live: AnalysisDict[Value], + pre_borrow: AnalysisDict[Value], + source_borrowed: set[Value], + ordering: dict[Value, int], +) -> tuple[Value, ...]: + target_pre_live = pre_live[label, 0] + target_borrowed = pre_borrow[label, 0] + incref = (source_borrowed - target_borrowed) & target_pre_live + if incref: + return tuple( + reg for reg in sorted(incref, key=lambda r: ordering[r]) if reg.type.is_refcounted + ) + return () + + +def add_block( + decs: Decs, incs: Incs, cache: BlockCache, blocks: list[BasicBlock], label: BasicBlock +) -> BasicBlock: + if not decs and not incs: + return label + + # TODO: be able to share *partial* results + if (label, decs, incs) in cache: + return cache[label, decs, incs] + + block = BasicBlock() + blocks.append(block) + block.ops.extend(DecRef(reg, is_xdec=xdec) for reg, xdec in decs) + block.ops.extend(IncRef(reg) for reg in incs) + block.ops.append(Goto(label)) + cache[label, decs, incs] = block + return block + + +def make_value_ordering(ir: FuncIR) -> dict[Value, int]: + """Create a ordering of values that allows them to be sorted. + + This omits registers that are only ever read. + """ + # TODO: Never initialized values?? + result: dict[Value, int] = {} + n = 0 + + for arg in ir.arg_regs: + result[arg] = n + n += 1 + + for block in ir.blocks: + for op in block.ops: + if ( + isinstance(op, LoadAddress) + and isinstance(op.src, Register) + and op.src not in result + ): + # Taking the address of a register allows initialization. + result[op.src] = n + n += 1 + if isinstance(op, Assign): + if op.dest not in result: + result[op.dest] = n + n += 1 + elif op not in result: + result[op] = n + n += 1 + + return result diff --git a/.venv/lib/python3.12/site-packages/mypyc/transform/spill.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/transform/spill.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..2d020ef Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/transform/spill.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/transform/spill.py b/.venv/lib/python3.12/site-packages/mypyc/transform/spill.py new file mode 100644 index 0000000..d92dd66 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/transform/spill.py @@ -0,0 +1,113 @@ +"""Insert spills for values that are live across yields.""" + +from __future__ import annotations + +from mypyc.analysis.dataflow import AnalysisResult, analyze_live_regs, get_cfg +from mypyc.common import TEMP_ATTR_NAME +from mypyc.ir.class_ir import ClassIR +from mypyc.ir.func_ir import FuncIR +from mypyc.ir.ops import ( + BasicBlock, + Branch, + DecRef, + GetAttr, + IncRef, + LoadErrorValue, + Register, + SetAttr, + Value, +) + + +def insert_spills(ir: FuncIR, env: ClassIR) -> None: + cfg = get_cfg(ir.blocks, use_yields=True) + live = analyze_live_regs(ir.blocks, cfg) + entry_live = live.before[ir.blocks[0], 0] + + entry_live = {op for op in entry_live if not (isinstance(op, Register) and op.is_arg)} + # TODO: Actually for now, no Registers at all -- we keep the manual spills + entry_live = {op for op in entry_live if not isinstance(op, Register)} + + ir.blocks = spill_regs(ir.blocks, env, entry_live, live, ir.arg_regs[0]) + + +def spill_regs( + blocks: list[BasicBlock], + env: ClassIR, + to_spill: set[Value], + live: AnalysisResult[Value], + self_reg: Register, +) -> list[BasicBlock]: + env_reg: Value + for op in blocks[0].ops: + if isinstance(op, GetAttr) and op.attr == "__mypyc_env__": + env_reg = op + break + else: + # Environment has been merged into generator object + env_reg = self_reg + + spill_locs = {} + for i, val in enumerate(to_spill): + name = f"{TEMP_ATTR_NAME}2_{i}" + env.attributes[name] = val.type + if val.type.error_overlap: + # We can safely treat as always initialized, since the type has no pointers. + # This way we also don't need to manage the defined attribute bitfield. + env._always_initialized_attrs.add(name) + spill_locs[val] = name + + for block in blocks: + ops = block.ops + block.ops = [] + + for i, op in enumerate(ops): + to_decref = [] + + if isinstance(op, IncRef) and op.src in spill_locs: + raise AssertionError("not sure what to do with an incref of a spill...") + if isinstance(op, DecRef) and op.src in spill_locs: + # When we decref a spilled value, we turn that into + # NULLing out the attribute, but only if the spilled + # value is not live *when we include yields in the + # CFG*. (The original decrefs are computed without that.) + # + # We also skip a decref is the env register is not + # live. That should only happen when an exception is + # being raised, so everything should be handled there. + if op.src not in live.after[block, i] and env_reg in live.after[block, i]: + # Skip the DecRef but null out the spilled location + null = LoadErrorValue(op.src.type) + block.ops.extend([null, SetAttr(env_reg, spill_locs[op.src], null, op.line)]) + continue + + if ( + any(src in spill_locs for src in op.sources()) + # N.B: IS_ERROR should be before a spill happens + # XXX: but could we have a regular branch? + and not (isinstance(op, Branch) and op.op == Branch.IS_ERROR) + ): + new_sources: list[Value] = [] + stolen = op.stolen() + for src in op.sources(): + if src in spill_locs: + read = GetAttr(env_reg, spill_locs[src], op.line) + block.ops.append(read) + new_sources.append(read) + if src.type.is_refcounted and src not in stolen: + to_decref.append(read) + else: + new_sources.append(src) + + op.set_sources(new_sources) + + block.ops.append(op) + + for dec in to_decref: + block.ops.append(DecRef(dec)) + + if op in spill_locs: + # XXX: could we set uninit? + block.ops.append(SetAttr(env_reg, spill_locs[op], op, op.line)) + + return blocks diff --git a/.venv/lib/python3.12/site-packages/mypyc/transform/uninit.cpython-312-x86_64-linux-gnu.so b/.venv/lib/python3.12/site-packages/mypyc/transform/uninit.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..7e97d87 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/mypyc/transform/uninit.cpython-312-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.12/site-packages/mypyc/transform/uninit.py b/.venv/lib/python3.12/site-packages/mypyc/transform/uninit.py new file mode 100644 index 0000000..45b4035 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/mypyc/transform/uninit.py @@ -0,0 +1,195 @@ +"""Insert checks for uninitialized values.""" + +from __future__ import annotations + +from mypyc.analysis.dataflow import AnalysisDict, analyze_must_defined_regs, cleanup_cfg, get_cfg +from mypyc.common import BITMAP_BITS +from mypyc.ir.func_ir import FuncIR, all_values +from mypyc.ir.ops import ( + Assign, + BasicBlock, + Branch, + ComparisonOp, + Integer, + IntOp, + LoadAddress, + LoadErrorValue, + Op, + RaiseStandardError, + Register, + Unreachable, + Value, +) +from mypyc.ir.rtypes import bitmap_rprimitive + + +def insert_uninit_checks(ir: FuncIR) -> None: + # Remove dead blocks from the CFG, which helps avoid spurious + # checks due to unused error handling blocks. + cleanup_cfg(ir.blocks) + + cfg = get_cfg(ir.blocks) + must_defined = analyze_must_defined_regs( + ir.blocks, cfg, set(ir.arg_regs), all_values(ir.arg_regs, ir.blocks) + ) + + ir.blocks = split_blocks_at_uninits(ir.blocks, must_defined.before) + + +def split_blocks_at_uninits( + blocks: list[BasicBlock], pre_must_defined: AnalysisDict[Value] +) -> list[BasicBlock]: + new_blocks: list[BasicBlock] = [] + + init_registers = [] + init_registers_set = set() + bitmap_registers: list[Register] = [] # Init status bitmaps + bitmap_backed: list[Register] = [] # These use bitmaps to track init status + + # First split blocks on ops that may raise. + for block in blocks: + ops = block.ops + block.ops = [] + cur_block = block + new_blocks.append(cur_block) + + for i, op in enumerate(ops): + defined = pre_must_defined[block, i] + for src in op.unique_sources(): + # If a register operand is not guaranteed to be + # initialized is an operand to something other than a + # check that it is defined, insert a check. + + # Note that for register operand in a LoadAddress op, + # we should be able to use it without initialization + # as we may need to use its address to update itself + if ( + isinstance(src, Register) + and src not in defined + and not (isinstance(op, Branch) and op.op == Branch.IS_ERROR) + and not isinstance(op, LoadAddress) + ): + if src not in init_registers_set: + init_registers.append(src) + init_registers_set.add(src) + + # XXX: if src.name is empty, it should be a + # temp... and it should be OK?? + if not src.name: + continue + + new_block, error_block = BasicBlock(), BasicBlock() + new_block.error_handler = error_block.error_handler = cur_block.error_handler + new_blocks += [error_block, new_block] + + if not src.type.error_overlap: + cur_block.ops.append( + Branch( + src, + true_label=error_block, + false_label=new_block, + op=Branch.IS_ERROR, + line=op.line, + ) + ) + else: + # We need to use bitmap for this one. + check_for_uninit_using_bitmap( + cur_block.ops, + src, + bitmap_registers, + bitmap_backed, + error_block, + new_block, + op.line, + ) + + raise_std = RaiseStandardError( + RaiseStandardError.UNBOUND_LOCAL_ERROR, + f'local variable "{src.name}" referenced before assignment', + op.line, + ) + error_block.ops.append(raise_std) + error_block.ops.append(Unreachable()) + cur_block = new_block + cur_block.ops.append(op) + + if bitmap_backed: + update_register_assignments_to_set_bitmap(new_blocks, bitmap_registers, bitmap_backed) + + if init_registers: + new_ops: list[Op] = [] + for reg in init_registers: + err = LoadErrorValue(reg.type, undefines=True) + new_ops.append(err) + new_ops.append(Assign(reg, err)) + for reg in bitmap_registers: + new_ops.append(Assign(reg, Integer(0, bitmap_rprimitive))) + new_blocks[0].ops[0:0] = new_ops + + return new_blocks + + +def check_for_uninit_using_bitmap( + ops: list[Op], + src: Register, + bitmap_registers: list[Register], + bitmap_backed: list[Register], + error_block: BasicBlock, + ok_block: BasicBlock, + line: int, +) -> None: + """Check if src is defined using a bitmap. + + Modifies ops, bitmap_registers and bitmap_backed. + """ + if src not in bitmap_backed: + # Set up a new bitmap backed register. + bitmap_backed.append(src) + n = (len(bitmap_backed) - 1) // BITMAP_BITS + if len(bitmap_registers) <= n: + bitmap_registers.append(Register(bitmap_rprimitive, f"__locals_bitmap{n}")) + + index = bitmap_backed.index(src) + masked = IntOp( + bitmap_rprimitive, + bitmap_registers[index // BITMAP_BITS], + Integer(1 << (index & (BITMAP_BITS - 1)), bitmap_rprimitive), + IntOp.AND, + line, + ) + ops.append(masked) + chk = ComparisonOp(masked, Integer(0, bitmap_rprimitive), ComparisonOp.EQ) + ops.append(chk) + ops.append(Branch(chk, error_block, ok_block, Branch.BOOL)) + + +def update_register_assignments_to_set_bitmap( + blocks: list[BasicBlock], bitmap_registers: list[Register], bitmap_backed: list[Register] +) -> None: + """Update some assignments to registers to also set a bit in a bitmap. + + The bitmaps are used to track if a local variable has been assigned to. + + Modifies blocks. + """ + for block in blocks: + if any(isinstance(op, Assign) and op.dest in bitmap_backed for op in block.ops): + new_ops: list[Op] = [] + for op in block.ops: + if isinstance(op, Assign) and op.dest in bitmap_backed: + index = bitmap_backed.index(op.dest) + new_ops.append(op) + reg = bitmap_registers[index // BITMAP_BITS] + new = IntOp( + bitmap_rprimitive, + reg, + Integer(1 << (index & (BITMAP_BITS - 1)), bitmap_rprimitive), + IntOp.OR, + op.line, + ) + new_ops.append(new) + new_ops.append(Assign(reg, new)) + else: + new_ops.append(op) + block.ops = new_ops diff --git a/.venv/lib/python3.12/site-packages/packaging-26.0.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/packaging-26.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging-26.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.12/site-packages/packaging-26.0.dist-info/METADATA b/.venv/lib/python3.12/site-packages/packaging-26.0.dist-info/METADATA new file mode 100644 index 0000000..3200e60 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging-26.0.dist-info/METADATA @@ -0,0 +1,107 @@ +Metadata-Version: 2.4 +Name: packaging +Version: 26.0 +Summary: Core utilities for Python packages +Author-email: Donald Stufft +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-Expression: Apache-2.0 OR BSD-2-Clause +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Typing :: Typed +License-File: LICENSE +License-File: LICENSE.APACHE +License-File: LICENSE.BSD +Project-URL: Documentation, https://packaging.pypa.io/ +Project-URL: Source, https://github.com/pypa/packaging + +packaging +========= + +.. start-intro + +Reusable core utilities for various Python Packaging +`interoperability specifications `_. + +This library provides utilities that implement the interoperability +specifications which have clearly one correct behaviour (eg: :pep:`440`) +or benefit greatly from having a single shared implementation (eg: :pep:`425`). + +.. end-intro + +The ``packaging`` project includes the following: version handling, specifiers, +markers, requirements, tags, metadata, lockfiles, utilities. + +Documentation +------------- + +The `documentation`_ provides information and the API for the following: + +- Version Handling +- Specifiers +- Markers +- Requirements +- Tags +- Metadata +- Lockfiles +- Utilities + +Installation +------------ + +Use ``pip`` to install these utilities:: + + pip install packaging + +The ``packaging`` library uses calendar-based versioning (``YY.N``). + +Discussion +---------- + +If you run into bugs, you can file them in our `issue tracker`_. + +You can also join ``#pypa`` on Freenode to ask questions or get involved. + + +.. _`documentation`: https://packaging.pypa.io/ +.. _`issue tracker`: https://github.com/pypa/packaging/issues + + +Code of Conduct +--------------- + +Everyone interacting in the packaging project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. + +.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + +Contributing +------------ + +The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as +well as how to report a potential security issue. The documentation for this +project also covers information about `project development`_ and `security`_. + +.. _`project development`: https://packaging.pypa.io/en/latest/development/ +.. _`security`: https://packaging.pypa.io/en/latest/security/ + +Project History +--------------- + +Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for +recent changes and project history. + +.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/ + diff --git a/.venv/lib/python3.12/site-packages/packaging-26.0.dist-info/RECORD b/.venv/lib/python3.12/site-packages/packaging-26.0.dist-info/RECORD new file mode 100644 index 0000000..c4d0eb5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging-26.0.dist-info/RECORD @@ -0,0 +1,42 @@ +packaging-26.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +packaging-26.0.dist-info/METADATA,sha256=M2K7fWom2iliuo2qpHhc0LrKwhq6kIoRlcyPWVgKJlo,3309 +packaging-26.0.dist-info/RECORD,, +packaging-26.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 +packaging-26.0.dist-info/licenses/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197 +packaging-26.0.dist-info/licenses/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174 +packaging-26.0.dist-info/licenses/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344 +packaging/__init__.py,sha256=y4lVbpeBzCGk-IPDw5BGBZ_b0P3ukEEJZAbGYc6Ey8c,494 +packaging/__pycache__/__init__.cpython-312.pyc,, +packaging/__pycache__/_elffile.cpython-312.pyc,, +packaging/__pycache__/_manylinux.cpython-312.pyc,, +packaging/__pycache__/_musllinux.cpython-312.pyc,, +packaging/__pycache__/_parser.cpython-312.pyc,, +packaging/__pycache__/_structures.cpython-312.pyc,, +packaging/__pycache__/_tokenizer.cpython-312.pyc,, +packaging/__pycache__/markers.cpython-312.pyc,, +packaging/__pycache__/metadata.cpython-312.pyc,, +packaging/__pycache__/pylock.cpython-312.pyc,, +packaging/__pycache__/requirements.cpython-312.pyc,, +packaging/__pycache__/specifiers.cpython-312.pyc,, +packaging/__pycache__/tags.cpython-312.pyc,, +packaging/__pycache__/utils.cpython-312.pyc,, +packaging/__pycache__/version.cpython-312.pyc,, +packaging/_elffile.py,sha256=-sKkptYqzYw2-x3QByJa5mB4rfPWu1pxkZHRx1WAFCY,3211 +packaging/_manylinux.py,sha256=Hf6nB0cOrayEs96-p3oIXAgGnFquv20DO5l-o2_Xnv0,9559 +packaging/_musllinux.py,sha256=Z6swjH3MA7XS3qXnmMN7QPhqP3fnoYI0eQ18e9-HgAE,2707 +packaging/_parser.py,sha256=U_DajsEx2VoC_F46fSVV3hDKNCWoQYkPkasO3dld0ig,10518 +packaging/_structures.py,sha256=Hn49Ta8zV9Wo8GiCL8Nl2ARZY983Un3pruZGVNldPwE,1514 +packaging/_tokenizer.py,sha256=M8EwNIdXeL9NMFuFrQtiOKwjka_xFx8KjRQnfE8O_z8,5421 +packaging/licenses/__init__.py,sha256=TwXLHZCXwSgdFwRLPxW602T6mSieunSFHM6fp8pgW78,5819 +packaging/licenses/__pycache__/__init__.cpython-312.pyc,, +packaging/licenses/__pycache__/_spdx.cpython-312.pyc,, +packaging/licenses/_spdx.py,sha256=WW7DXiyg68up_YND_wpRYlr1SHhiV4FfJLQffghhMxQ,51122 +packaging/markers.py,sha256=ZX-cLvW1S3cZcEc0fHI4z7zSx5U2T19yMpDP_mE-CYw,12771 +packaging/metadata.py,sha256=CWVZpN_HfoYMSSDuCP7igOvGgqA9AOmpW8f3qTisfnc,39360 +packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +packaging/pylock.py,sha256=-R1uNfJ4PaLto7Mg62YsGOHgvskuiIEqPwxOywl42Jk,22537 +packaging/requirements.py,sha256=PMCAWD8aNMnVD-6uZMedhBuAVX2573eZ4yPBLXmz04I,2870 +packaging/specifiers.py,sha256=EPNPimY_zFivthv1vdjZYz5IqkKGsnKR2yKh-EVyvZw,40797 +packaging/tags.py,sha256=cXLV1pJD3UtJlDg7Wz3zrfdQhRZqr8jumSAKKAAd2xE,22856 +packaging/utils.py,sha256=N4c6oZzFJy6klTZ3AnkNz7sSkJesuFWPp68LA3B5dAo,5040 +packaging/version.py,sha256=7XWlL2IDYLwDYC0ht6cFEhapLwLWbmyo4rb7sEFj0x8,23272 diff --git a/.venv/lib/python3.12/site-packages/packaging-26.0.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/packaging-26.0.dist-info/WHEEL new file mode 100644 index 0000000..d8b9936 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging-26.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.12.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.venv/lib/python3.12/site-packages/packaging-26.0.dist-info/licenses/LICENSE b/.venv/lib/python3.12/site-packages/packaging-26.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..6f62d44 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging-26.0.dist-info/licenses/LICENSE @@ -0,0 +1,3 @@ +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made +under the terms of *both* these licenses. diff --git a/.venv/lib/python3.12/site-packages/packaging-26.0.dist-info/licenses/LICENSE.APACHE b/.venv/lib/python3.12/site-packages/packaging-26.0.dist-info/licenses/LICENSE.APACHE new file mode 100644 index 0000000..f433b1a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging-26.0.dist-info/licenses/LICENSE.APACHE @@ -0,0 +1,177 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/.venv/lib/python3.12/site-packages/packaging-26.0.dist-info/licenses/LICENSE.BSD b/.venv/lib/python3.12/site-packages/packaging-26.0.dist-info/licenses/LICENSE.BSD new file mode 100644 index 0000000..42ce7b7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging-26.0.dist-info/licenses/LICENSE.BSD @@ -0,0 +1,23 @@ +Copyright (c) Donald Stufft and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/lib/python3.12/site-packages/packaging/__init__.py b/.venv/lib/python3.12/site-packages/packaging/__init__.py new file mode 100644 index 0000000..21695a7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging/__init__.py @@ -0,0 +1,15 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +__title__ = "packaging" +__summary__ = "Core utilities for Python packages" +__uri__ = "https://github.com/pypa/packaging" + +__version__ = "26.0" + +__author__ = "Donald Stufft and individual contributors" +__email__ = "donald@stufft.io" + +__license__ = "BSD-2-Clause or Apache-2.0" +__copyright__ = f"2014 {__author__}" diff --git a/.venv/lib/python3.12/site-packages/packaging/_elffile.py b/.venv/lib/python3.12/site-packages/packaging/_elffile.py new file mode 100644 index 0000000..497b064 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging/_elffile.py @@ -0,0 +1,108 @@ +""" +ELF file parser. + +This provides a class ``ELFFile`` that parses an ELF executable in a similar +interface to ``ZipFile``. Only the read interface is implemented. + +ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html +""" + +from __future__ import annotations + +import enum +import os +import struct +from typing import IO + + +class ELFInvalid(ValueError): + pass + + +class EIClass(enum.IntEnum): + C32 = 1 + C64 = 2 + + +class EIData(enum.IntEnum): + Lsb = 1 + Msb = 2 + + +class EMachine(enum.IntEnum): + I386 = 3 + S390 = 22 + Arm = 40 + X8664 = 62 + AArc64 = 183 + + +class ELFFile: + """ + Representation of an ELF executable. + """ + + def __init__(self, f: IO[bytes]) -> None: + self._f = f + + try: + ident = self._read("16B") + except struct.error as e: + raise ELFInvalid("unable to parse identification") from e + magic = bytes(ident[:4]) + if magic != b"\x7fELF": + raise ELFInvalid(f"invalid magic: {magic!r}") + + self.capacity = ident[4] # Format for program header (bitness). + self.encoding = ident[5] # Data structure encoding (endianness). + + try: + # e_fmt: Format for program header. + # p_fmt: Format for section header. + # p_idx: Indexes to find p_type, p_offset, and p_filesz. + e_fmt, self._p_fmt, self._p_idx = { + (1, 1): ("HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB. + (2, 1): ("HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB. + }[(self.capacity, self.encoding)] + except KeyError as e: + raise ELFInvalid( + f"unrecognized capacity ({self.capacity}) or encoding ({self.encoding})" + ) from e + + try: + ( + _, + self.machine, # Architecture type. + _, + _, + self._e_phoff, # Offset of program header. + _, + self.flags, # Processor-specific flags. + _, + self._e_phentsize, # Size of section. + self._e_phnum, # Number of sections. + ) = self._read(e_fmt) + except struct.error as e: + raise ELFInvalid("unable to parse machine and section information") from e + + def _read(self, fmt: str) -> tuple[int, ...]: + return struct.unpack(fmt, self._f.read(struct.calcsize(fmt))) + + @property + def interpreter(self) -> str | None: + """ + The path recorded in the ``PT_INTERP`` section header. + """ + for index in range(self._e_phnum): + self._f.seek(self._e_phoff + self._e_phentsize * index) + try: + data = self._read(self._p_fmt) + except struct.error: + continue + if data[self._p_idx[0]] != 3: # Not PT_INTERP. + continue + self._f.seek(data[self._p_idx[1]]) + return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0") + return None diff --git a/.venv/lib/python3.12/site-packages/packaging/_manylinux.py b/.venv/lib/python3.12/site-packages/packaging/_manylinux.py new file mode 100644 index 0000000..0e79e8a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging/_manylinux.py @@ -0,0 +1,262 @@ +from __future__ import annotations + +import collections +import contextlib +import functools +import os +import re +import sys +import warnings +from typing import Generator, Iterator, NamedTuple, Sequence + +from ._elffile import EIClass, EIData, ELFFile, EMachine + +EF_ARM_ABIMASK = 0xFF000000 +EF_ARM_ABI_VER5 = 0x05000000 +EF_ARM_ABI_FLOAT_HARD = 0x00000400 + +_ALLOWED_ARCHS = { + "x86_64", + "aarch64", + "ppc64", + "ppc64le", + "s390x", + "loongarch64", + "riscv64", +} + + +# `os.PathLike` not a generic type until Python 3.9, so sticking with `str` +# as the type for `path` until then. +@contextlib.contextmanager +def _parse_elf(path: str) -> Generator[ELFFile | None, None, None]: + try: + with open(path, "rb") as f: + yield ELFFile(f) + except (OSError, TypeError, ValueError): + yield None + + +def _is_linux_armhf(executable: str) -> bool: + # hard-float ABI can be detected from the ELF header of the running + # process + # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf + with _parse_elf(executable) as f: + return ( + f is not None + and f.capacity == EIClass.C32 + and f.encoding == EIData.Lsb + and f.machine == EMachine.Arm + and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5 + and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD + ) + + +def _is_linux_i686(executable: str) -> bool: + with _parse_elf(executable) as f: + return ( + f is not None + and f.capacity == EIClass.C32 + and f.encoding == EIData.Lsb + and f.machine == EMachine.I386 + ) + + +def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool: + if "armv7l" in archs: + return _is_linux_armhf(executable) + if "i686" in archs: + return _is_linux_i686(executable) + return any(arch in _ALLOWED_ARCHS for arch in archs) + + +# If glibc ever changes its major version, we need to know what the last +# minor version was, so we can build the complete list of all versions. +# For now, guess what the highest minor version might be, assume it will +# be 50 for testing. Once this actually happens, update the dictionary +# with the actual value. +_LAST_GLIBC_MINOR: dict[int, int] = collections.defaultdict(lambda: 50) + + +class _GLibCVersion(NamedTuple): + major: int + minor: int + + +def _glibc_version_string_confstr() -> str | None: + """ + Primary implementation of glibc_version_string using os.confstr. + """ + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module. + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 + try: + # Should be a string like "glibc 2.17". + version_string: str | None = os.confstr("CS_GNU_LIBC_VERSION") + assert version_string is not None + _, version = version_string.rsplit() + except (AssertionError, AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def _glibc_version_string_ctypes() -> str | None: + """ + Fallback implementation of glibc_version_string using ctypes. + """ + try: + import ctypes # noqa: PLC0415 + except ImportError: + return None + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + # + # We must also handle the special case where the executable is not a + # dynamically linked executable. This can occur when using musl libc, + # for example. In this situation, dlopen() will error, leading to an + # OSError. Interestingly, at least in the case of musl, there is no + # errno set on the OSError. The single string argument used to construct + # OSError comes from libc itself and is therefore not portable to + # hard code here. In any case, failure to call dlopen() means we + # can proceed, so we bail on our attempt. + try: + process_namespace = ctypes.CDLL(None) + except OSError: + return None + + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str: str = gnu_get_libc_version() + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +def _glibc_version_string() -> str | None: + """Returns glibc version string, or None if not using glibc.""" + return _glibc_version_string_confstr() or _glibc_version_string_ctypes() + + +def _parse_glibc_version(version_str: str) -> _GLibCVersion: + """Parse glibc version. + + We use a regexp instead of str.split because we want to discard any + random junk that might come after the minor version -- this might happen + in patched/forked versions of glibc (e.g. Linaro's version of glibc + uses version strings like "2.20-2014.11"). See gh-3588. + """ + m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) + if not m: + warnings.warn( + f"Expected glibc version with 2 components major.minor, got: {version_str}", + RuntimeWarning, + stacklevel=2, + ) + return _GLibCVersion(-1, -1) + return _GLibCVersion(int(m.group("major")), int(m.group("minor"))) + + +@functools.lru_cache +def _get_glibc_version() -> _GLibCVersion: + version_str = _glibc_version_string() + if version_str is None: + return _GLibCVersion(-1, -1) + return _parse_glibc_version(version_str) + + +# From PEP 513, PEP 600 +def _is_compatible(arch: str, version: _GLibCVersion) -> bool: + sys_glibc = _get_glibc_version() + if sys_glibc < version: + return False + # Check for presence of _manylinux module. + try: + import _manylinux # noqa: PLC0415 + except ImportError: + return True + if hasattr(_manylinux, "manylinux_compatible"): + result = _manylinux.manylinux_compatible(version[0], version[1], arch) + if result is not None: + return bool(result) + return True + if version == _GLibCVersion(2, 5) and hasattr(_manylinux, "manylinux1_compatible"): + return bool(_manylinux.manylinux1_compatible) + if version == _GLibCVersion(2, 12) and hasattr( + _manylinux, "manylinux2010_compatible" + ): + return bool(_manylinux.manylinux2010_compatible) + if version == _GLibCVersion(2, 17) and hasattr( + _manylinux, "manylinux2014_compatible" + ): + return bool(_manylinux.manylinux2014_compatible) + return True + + +_LEGACY_MANYLINUX_MAP: dict[_GLibCVersion, str] = { + # CentOS 7 w/ glibc 2.17 (PEP 599) + _GLibCVersion(2, 17): "manylinux2014", + # CentOS 6 w/ glibc 2.12 (PEP 571) + _GLibCVersion(2, 12): "manylinux2010", + # CentOS 5 w/ glibc 2.5 (PEP 513) + _GLibCVersion(2, 5): "manylinux1", +} + + +def platform_tags(archs: Sequence[str]) -> Iterator[str]: + """Generate manylinux tags compatible to the current platform. + + :param archs: Sequence of compatible architectures. + The first one shall be the closest to the actual architecture and be the part of + platform tag after the ``linux_`` prefix, e.g. ``x86_64``. + The ``linux_`` prefix is assumed as a prerequisite for the current platform to + be manylinux-compatible. + + :returns: An iterator of compatible manylinux tags. + """ + if not _have_compatible_abi(sys.executable, archs): + return + # Oldest glibc to be supported regardless of architecture is (2, 17). + too_old_glibc2 = _GLibCVersion(2, 16) + if set(archs) & {"x86_64", "i686"}: + # On x86/i686 also oldest glibc to be supported is (2, 5). + too_old_glibc2 = _GLibCVersion(2, 4) + current_glibc = _GLibCVersion(*_get_glibc_version()) + glibc_max_list = [current_glibc] + # We can assume compatibility across glibc major versions. + # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 + # + # Build a list of maximum glibc versions so that we can + # output the canonical list of all glibc from current_glibc + # down to too_old_glibc2, including all intermediary versions. + for glibc_major in range(current_glibc.major - 1, 1, -1): + glibc_minor = _LAST_GLIBC_MINOR[glibc_major] + glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor)) + for arch in archs: + for glibc_max in glibc_max_list: + if glibc_max.major == too_old_glibc2.major: + min_minor = too_old_glibc2.minor + else: + # For other glibc major versions oldest supported is (x, 0). + min_minor = -1 + for glibc_minor in range(glibc_max.minor, min_minor, -1): + glibc_version = _GLibCVersion(glibc_max.major, glibc_minor) + if _is_compatible(arch, glibc_version): + yield "manylinux_{}_{}_{}".format(*glibc_version, arch) + + # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. + if legacy_tag := _LEGACY_MANYLINUX_MAP.get(glibc_version): + yield f"{legacy_tag}_{arch}" diff --git a/.venv/lib/python3.12/site-packages/packaging/_musllinux.py b/.venv/lib/python3.12/site-packages/packaging/_musllinux.py new file mode 100644 index 0000000..4e8116a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging/_musllinux.py @@ -0,0 +1,85 @@ +"""PEP 656 support. + +This module implements logic to detect if the currently running Python is +linked against musl, and what musl version is used. +""" + +from __future__ import annotations + +import functools +import re +import subprocess +import sys +from typing import Iterator, NamedTuple, Sequence + +from ._elffile import ELFFile + + +class _MuslVersion(NamedTuple): + major: int + minor: int + + +def _parse_musl_version(output: str) -> _MuslVersion | None: + lines = [n for n in (n.strip() for n in output.splitlines()) if n] + if len(lines) < 2 or lines[0][:4] != "musl": + return None + m = re.match(r"Version (\d+)\.(\d+)", lines[1]) + if not m: + return None + return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2))) + + +@functools.lru_cache +def _get_musl_version(executable: str) -> _MuslVersion | None: + """Detect currently-running musl runtime version. + + This is done by checking the specified executable's dynamic linking + information, and invoking the loader to parse its output for a version + string. If the loader is musl, the output would be something like:: + + musl libc (x86_64) + Version 1.2.2 + Dynamic Program Loader + """ + try: + with open(executable, "rb") as f: + ld = ELFFile(f).interpreter + except (OSError, TypeError, ValueError): + return None + if ld is None or "musl" not in ld: + return None + proc = subprocess.run([ld], check=False, stderr=subprocess.PIPE, text=True) + return _parse_musl_version(proc.stderr) + + +def platform_tags(archs: Sequence[str]) -> Iterator[str]: + """Generate musllinux tags compatible to the current platform. + + :param archs: Sequence of compatible architectures. + The first one shall be the closest to the actual architecture and be the part of + platform tag after the ``linux_`` prefix, e.g. ``x86_64``. + The ``linux_`` prefix is assumed as a prerequisite for the current platform to + be musllinux-compatible. + + :returns: An iterator of compatible musllinux tags. + """ + sys_musl = _get_musl_version(sys.executable) + if sys_musl is None: # Python not dynamically linked against musl. + return + for arch in archs: + for minor in range(sys_musl.minor, -1, -1): + yield f"musllinux_{sys_musl.major}_{minor}_{arch}" + + +if __name__ == "__main__": # pragma: no cover + import sysconfig + + plat = sysconfig.get_platform() + assert plat.startswith("linux-"), "not linux" + + print("plat:", plat) + print("musl:", _get_musl_version(sys.executable)) + print("tags:", end=" ") + for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])): + print(t, end="\n ") diff --git a/.venv/lib/python3.12/site-packages/packaging/_parser.py b/.venv/lib/python3.12/site-packages/packaging/_parser.py new file mode 100644 index 0000000..f6c1f5c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging/_parser.py @@ -0,0 +1,365 @@ +"""Handwritten parser of dependency specifiers. + +The docstring for each __parse_* function contains EBNF-inspired grammar representing +the implementation. +""" + +from __future__ import annotations + +import ast +from typing import List, Literal, NamedTuple, Sequence, Tuple, Union + +from ._tokenizer import DEFAULT_RULES, Tokenizer + + +class Node: + __slots__ = ("value",) + + def __init__(self, value: str) -> None: + self.value = value + + def __str__(self) -> str: + return self.value + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}({self.value!r})>" + + def serialize(self) -> str: + raise NotImplementedError + + +class Variable(Node): + __slots__ = () + + def serialize(self) -> str: + return str(self) + + +class Value(Node): + __slots__ = () + + def serialize(self) -> str: + return f'"{self}"' + + +class Op(Node): + __slots__ = () + + def serialize(self) -> str: + return str(self) + + +MarkerLogical = Literal["and", "or"] +MarkerVar = Union[Variable, Value] +MarkerItem = Tuple[MarkerVar, Op, MarkerVar] +MarkerAtom = Union[MarkerItem, Sequence["MarkerAtom"]] +MarkerList = List[Union["MarkerList", MarkerAtom, MarkerLogical]] + + +class ParsedRequirement(NamedTuple): + name: str + url: str + extras: list[str] + specifier: str + marker: MarkerList | None + + +# -------------------------------------------------------------------------------------- +# Recursive descent parser for dependency specifier +# -------------------------------------------------------------------------------------- +def parse_requirement(source: str) -> ParsedRequirement: + return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES)) + + +def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement: + """ + requirement = WS? IDENTIFIER WS? extras WS? requirement_details + """ + tokenizer.consume("WS") + + name_token = tokenizer.expect( + "IDENTIFIER", expected="package name at the start of dependency specifier" + ) + name = name_token.text + tokenizer.consume("WS") + + extras = _parse_extras(tokenizer) + tokenizer.consume("WS") + + url, specifier, marker = _parse_requirement_details(tokenizer) + tokenizer.expect("END", expected="end of dependency specifier") + + return ParsedRequirement(name, url, extras, specifier, marker) + + +def _parse_requirement_details( + tokenizer: Tokenizer, +) -> tuple[str, str, MarkerList | None]: + """ + requirement_details = AT URL (WS requirement_marker?)? + | specifier WS? (requirement_marker)? + """ + + specifier = "" + url = "" + marker = None + + if tokenizer.check("AT"): + tokenizer.read() + tokenizer.consume("WS") + + url_start = tokenizer.position + url = tokenizer.expect("URL", expected="URL after @").text + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + tokenizer.expect("WS", expected="whitespace after URL") + + # The input might end after whitespace. + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + marker = _parse_requirement_marker( + tokenizer, + span_start=url_start, + expected="semicolon (after URL and whitespace)", + ) + else: + specifier_start = tokenizer.position + specifier = _parse_specifier(tokenizer) + tokenizer.consume("WS") + + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + marker = _parse_requirement_marker( + tokenizer, + span_start=specifier_start, + expected=( + "comma (within version specifier), semicolon (after version specifier)" + if specifier + else "semicolon (after name with no version specifier)" + ), + ) + + return (url, specifier, marker) + + +def _parse_requirement_marker( + tokenizer: Tokenizer, *, span_start: int, expected: str +) -> MarkerList: + """ + requirement_marker = SEMICOLON marker WS? + """ + + if not tokenizer.check("SEMICOLON"): + tokenizer.raise_syntax_error( + f"Expected {expected} or end", + span_start=span_start, + span_end=None, + ) + tokenizer.read() + + marker = _parse_marker(tokenizer) + tokenizer.consume("WS") + + return marker + + +def _parse_extras(tokenizer: Tokenizer) -> list[str]: + """ + extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)? + """ + if not tokenizer.check("LEFT_BRACKET", peek=True): + return [] + + with tokenizer.enclosing_tokens( + "LEFT_BRACKET", + "RIGHT_BRACKET", + around="extras", + ): + tokenizer.consume("WS") + extras = _parse_extras_list(tokenizer) + tokenizer.consume("WS") + + return extras + + +def _parse_extras_list(tokenizer: Tokenizer) -> list[str]: + """ + extras_list = identifier (wsp* ',' wsp* identifier)* + """ + extras: list[str] = [] + + if not tokenizer.check("IDENTIFIER"): + return extras + + extras.append(tokenizer.read().text) + + while True: + tokenizer.consume("WS") + if tokenizer.check("IDENTIFIER", peek=True): + tokenizer.raise_syntax_error("Expected comma between extra names") + elif not tokenizer.check("COMMA"): + break + + tokenizer.read() + tokenizer.consume("WS") + + extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma") + extras.append(extra_token.text) + + return extras + + +def _parse_specifier(tokenizer: Tokenizer) -> str: + """ + specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS + | WS? version_many WS? + """ + with tokenizer.enclosing_tokens( + "LEFT_PARENTHESIS", + "RIGHT_PARENTHESIS", + around="version specifier", + ): + tokenizer.consume("WS") + parsed_specifiers = _parse_version_many(tokenizer) + tokenizer.consume("WS") + + return parsed_specifiers + + +def _parse_version_many(tokenizer: Tokenizer) -> str: + """ + version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)? + """ + parsed_specifiers = "" + while tokenizer.check("SPECIFIER"): + span_start = tokenizer.position + parsed_specifiers += tokenizer.read().text + if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True): + tokenizer.raise_syntax_error( + ".* suffix can only be used with `==` or `!=` operators", + span_start=span_start, + span_end=tokenizer.position + 1, + ) + if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True): + tokenizer.raise_syntax_error( + "Local version label can only be used with `==` or `!=` operators", + span_start=span_start, + span_end=tokenizer.position, + ) + tokenizer.consume("WS") + if not tokenizer.check("COMMA"): + break + parsed_specifiers += tokenizer.read().text + tokenizer.consume("WS") + + return parsed_specifiers + + +# -------------------------------------------------------------------------------------- +# Recursive descent parser for marker expression +# -------------------------------------------------------------------------------------- +def parse_marker(source: str) -> MarkerList: + return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES)) + + +def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList: + retval = _parse_marker(tokenizer) + tokenizer.expect("END", expected="end of marker expression") + return retval + + +def _parse_marker(tokenizer: Tokenizer) -> MarkerList: + """ + marker = marker_atom (BOOLOP marker_atom)+ + """ + expression = [_parse_marker_atom(tokenizer)] + while tokenizer.check("BOOLOP"): + token = tokenizer.read() + expr_right = _parse_marker_atom(tokenizer) + expression.extend((token.text, expr_right)) + return expression + + +def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom: + """ + marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS? + | WS? marker_item WS? + """ + + tokenizer.consume("WS") + if tokenizer.check("LEFT_PARENTHESIS", peek=True): + with tokenizer.enclosing_tokens( + "LEFT_PARENTHESIS", + "RIGHT_PARENTHESIS", + around="marker expression", + ): + tokenizer.consume("WS") + marker: MarkerAtom = _parse_marker(tokenizer) + tokenizer.consume("WS") + else: + marker = _parse_marker_item(tokenizer) + tokenizer.consume("WS") + return marker + + +def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem: + """ + marker_item = WS? marker_var WS? marker_op WS? marker_var WS? + """ + tokenizer.consume("WS") + marker_var_left = _parse_marker_var(tokenizer) + tokenizer.consume("WS") + marker_op = _parse_marker_op(tokenizer) + tokenizer.consume("WS") + marker_var_right = _parse_marker_var(tokenizer) + tokenizer.consume("WS") + return (marker_var_left, marker_op, marker_var_right) + + +def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar: # noqa: RET503 + """ + marker_var = VARIABLE | QUOTED_STRING + """ + if tokenizer.check("VARIABLE"): + return process_env_var(tokenizer.read().text.replace(".", "_")) + elif tokenizer.check("QUOTED_STRING"): + return process_python_str(tokenizer.read().text) + else: + tokenizer.raise_syntax_error( + message="Expected a marker variable or quoted string" + ) + + +def process_env_var(env_var: str) -> Variable: + if env_var in ("platform_python_implementation", "python_implementation"): + return Variable("platform_python_implementation") + else: + return Variable(env_var) + + +def process_python_str(python_str: str) -> Value: + value = ast.literal_eval(python_str) + return Value(str(value)) + + +def _parse_marker_op(tokenizer: Tokenizer) -> Op: + """ + marker_op = IN | NOT IN | OP + """ + if tokenizer.check("IN"): + tokenizer.read() + return Op("in") + elif tokenizer.check("NOT"): + tokenizer.read() + tokenizer.expect("WS", expected="whitespace after 'not'") + tokenizer.expect("IN", expected="'in' after 'not'") + return Op("not in") + elif tokenizer.check("OP"): + return Op(tokenizer.read().text) + else: + return tokenizer.raise_syntax_error( + "Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in" + ) diff --git a/.venv/lib/python3.12/site-packages/packaging/_structures.py b/.venv/lib/python3.12/site-packages/packaging/_structures.py new file mode 100644 index 0000000..225e2ee --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging/_structures.py @@ -0,0 +1,69 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + + +@typing.final +class InfinityType: + __slots__ = () + + def __repr__(self) -> str: + return "Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return False + + def __le__(self, other: object) -> bool: + return False + + def __eq__(self, other: object) -> bool: + return isinstance(other, self.__class__) + + def __gt__(self, other: object) -> bool: + return True + + def __ge__(self, other: object) -> bool: + return True + + def __neg__(self: object) -> "NegativeInfinityType": + return NegativeInfinity + + +Infinity = InfinityType() + + +@typing.final +class NegativeInfinityType: + __slots__ = () + + def __repr__(self) -> str: + return "-Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return True + + def __le__(self, other: object) -> bool: + return True + + def __eq__(self, other: object) -> bool: + return isinstance(other, self.__class__) + + def __gt__(self, other: object) -> bool: + return False + + def __ge__(self, other: object) -> bool: + return False + + def __neg__(self: object) -> InfinityType: + return Infinity + + +NegativeInfinity = NegativeInfinityType() diff --git a/.venv/lib/python3.12/site-packages/packaging/_tokenizer.py b/.venv/lib/python3.12/site-packages/packaging/_tokenizer.py new file mode 100644 index 0000000..e6d20dd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging/_tokenizer.py @@ -0,0 +1,193 @@ +from __future__ import annotations + +import contextlib +import re +from dataclasses import dataclass +from typing import Generator, Mapping, NoReturn + +from .specifiers import Specifier + + +@dataclass +class Token: + name: str + text: str + position: int + + +class ParserSyntaxError(Exception): + """The provided source text could not be parsed correctly.""" + + def __init__( + self, + message: str, + *, + source: str, + span: tuple[int, int], + ) -> None: + self.span = span + self.message = message + self.source = source + + super().__init__() + + def __str__(self) -> str: + marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^" + return f"{self.message}\n {self.source}\n {marker}" + + +DEFAULT_RULES: dict[str, re.Pattern[str]] = { + "LEFT_PARENTHESIS": re.compile(r"\("), + "RIGHT_PARENTHESIS": re.compile(r"\)"), + "LEFT_BRACKET": re.compile(r"\["), + "RIGHT_BRACKET": re.compile(r"\]"), + "SEMICOLON": re.compile(r";"), + "COMMA": re.compile(r","), + "QUOTED_STRING": re.compile( + r""" + ( + ('[^']*') + | + ("[^"]*") + ) + """, + re.VERBOSE, + ), + "OP": re.compile(r"(===|==|~=|!=|<=|>=|<|>)"), + "BOOLOP": re.compile(r"\b(or|and)\b"), + "IN": re.compile(r"\bin\b"), + "NOT": re.compile(r"\bnot\b"), + "VARIABLE": re.compile( + r""" + \b( + python_version + |python_full_version + |os[._]name + |sys[._]platform + |platform_(release|system) + |platform[._](version|machine|python_implementation) + |python_implementation + |implementation_(name|version) + |extras? + |dependency_groups + )\b + """, + re.VERBOSE, + ), + "SPECIFIER": re.compile( + Specifier._operator_regex_str + Specifier._version_regex_str, + re.VERBOSE | re.IGNORECASE, + ), + "AT": re.compile(r"\@"), + "URL": re.compile(r"[^ \t]+"), + "IDENTIFIER": re.compile(r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b"), + "VERSION_PREFIX_TRAIL": re.compile(r"\.\*"), + "VERSION_LOCAL_LABEL_TRAIL": re.compile(r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*"), + "WS": re.compile(r"[ \t]+"), + "END": re.compile(r"$"), +} + + +class Tokenizer: + """Context-sensitive token parsing. + + Provides methods to examine the input stream to check whether the next token + matches. + """ + + def __init__( + self, + source: str, + *, + rules: Mapping[str, re.Pattern[str]], + ) -> None: + self.source = source + self.rules = rules + self.next_token: Token | None = None + self.position = 0 + + def consume(self, name: str) -> None: + """Move beyond provided token name, if at current position.""" + if self.check(name): + self.read() + + def check(self, name: str, *, peek: bool = False) -> bool: + """Check whether the next token has the provided name. + + By default, if the check succeeds, the token *must* be read before + another check. If `peek` is set to `True`, the token is not loaded and + would need to be checked again. + """ + assert self.next_token is None, ( + f"Cannot check for {name!r}, already have {self.next_token!r}" + ) + assert name in self.rules, f"Unknown token name: {name!r}" + + expression = self.rules[name] + + match = expression.match(self.source, self.position) + if match is None: + return False + if not peek: + self.next_token = Token(name, match[0], self.position) + return True + + def expect(self, name: str, *, expected: str) -> Token: + """Expect a certain token name next, failing with a syntax error otherwise. + + The token is *not* read. + """ + if not self.check(name): + raise self.raise_syntax_error(f"Expected {expected}") + return self.read() + + def read(self) -> Token: + """Consume the next token and return it.""" + token = self.next_token + assert token is not None + + self.position += len(token.text) + self.next_token = None + + return token + + def raise_syntax_error( + self, + message: str, + *, + span_start: int | None = None, + span_end: int | None = None, + ) -> NoReturn: + """Raise ParserSyntaxError at the given position.""" + span = ( + self.position if span_start is None else span_start, + self.position if span_end is None else span_end, + ) + raise ParserSyntaxError( + message, + source=self.source, + span=span, + ) + + @contextlib.contextmanager + def enclosing_tokens( + self, open_token: str, close_token: str, *, around: str + ) -> Generator[None, None, None]: + if self.check(open_token): + open_position = self.position + self.read() + else: + open_position = None + + yield + + if open_position is None: + return + + if not self.check(close_token): + self.raise_syntax_error( + f"Expected matching {close_token} for {open_token}, after {around}", + span_start=open_position, + ) + + self.read() diff --git a/.venv/lib/python3.12/site-packages/packaging/licenses/__init__.py b/.venv/lib/python3.12/site-packages/packaging/licenses/__init__.py new file mode 100644 index 0000000..335b275 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging/licenses/__init__.py @@ -0,0 +1,147 @@ +####################################################################################### +# +# Adapted from: +# https://github.com/pypa/hatch/blob/5352e44/backend/src/hatchling/licenses/parse.py +# +# MIT License +# +# Copyright (c) 2017-present Ofek Lev +# +# Permission is hereby granted, free of charge, to any person obtaining a copy of this +# software and associated documentation files (the "Software"), to deal in the Software +# without restriction, including without limitation the rights to use, copy, modify, +# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be included in all copies +# or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF +# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE +# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# +# +# With additional allowance of arbitrary `LicenseRef-` identifiers, not just +# `LicenseRef-Public-Domain` and `LicenseRef-Proprietary`. +# +####################################################################################### +from __future__ import annotations + +import re +from typing import NewType, cast + +from ._spdx import EXCEPTIONS, LICENSES + +__all__ = [ + "InvalidLicenseExpression", + "NormalizedLicenseExpression", + "canonicalize_license_expression", +] + +license_ref_allowed = re.compile("^[A-Za-z0-9.-]*$") + +NormalizedLicenseExpression = NewType("NormalizedLicenseExpression", str) + + +class InvalidLicenseExpression(ValueError): + """Raised when a license-expression string is invalid + + >>> canonicalize_license_expression("invalid") + Traceback (most recent call last): + ... + packaging.licenses.InvalidLicenseExpression: Invalid license expression: 'invalid' + """ + + +def canonicalize_license_expression( + raw_license_expression: str, +) -> NormalizedLicenseExpression: + if not raw_license_expression: + message = f"Invalid license expression: {raw_license_expression!r}" + raise InvalidLicenseExpression(message) + + # Pad any parentheses so tokenization can be achieved by merely splitting on + # whitespace. + license_expression = raw_license_expression.replace("(", " ( ").replace(")", " ) ") + licenseref_prefix = "LicenseRef-" + license_refs = { + ref.lower(): "LicenseRef-" + ref[len(licenseref_prefix) :] + for ref in license_expression.split() + if ref.lower().startswith(licenseref_prefix.lower()) + } + + # Normalize to lower case so we can look up licenses/exceptions + # and so boolean operators are Python-compatible. + license_expression = license_expression.lower() + + tokens = license_expression.split() + + # Rather than implementing a parenthesis/boolean logic parser, create an + # expression that Python can parse. Everything that is not involved with the + # grammar itself is replaced with the placeholder `False` and the resultant + # expression should become a valid Python expression. + python_tokens = [] + for token in tokens: + if token not in {"or", "and", "with", "(", ")"}: + python_tokens.append("False") + elif token == "with": + python_tokens.append("or") + elif ( + token == "(" + and python_tokens + and python_tokens[-1] not in {"or", "and", "("} + ) or (token == ")" and python_tokens and python_tokens[-1] == "("): + message = f"Invalid license expression: {raw_license_expression!r}" + raise InvalidLicenseExpression(message) + else: + python_tokens.append(token) + + python_expression = " ".join(python_tokens) + try: + compile(python_expression, "", "eval") + except SyntaxError: + message = f"Invalid license expression: {raw_license_expression!r}" + raise InvalidLicenseExpression(message) from None + + # Take a final pass to check for unknown licenses/exceptions. + normalized_tokens = [] + for token in tokens: + if token in {"or", "and", "with", "(", ")"}: + normalized_tokens.append(token.upper()) + continue + + if normalized_tokens and normalized_tokens[-1] == "WITH": + if token not in EXCEPTIONS: + message = f"Unknown license exception: {token!r}" + raise InvalidLicenseExpression(message) + + normalized_tokens.append(EXCEPTIONS[token]["id"]) + else: + if token.endswith("+"): + final_token = token[:-1] + suffix = "+" + else: + final_token = token + suffix = "" + + if final_token.startswith("licenseref-"): + if not license_ref_allowed.match(final_token): + message = f"Invalid licenseref: {final_token!r}" + raise InvalidLicenseExpression(message) + normalized_tokens.append(license_refs[final_token] + suffix) + else: + if final_token not in LICENSES: + message = f"Unknown license: {final_token!r}" + raise InvalidLicenseExpression(message) + normalized_tokens.append(LICENSES[final_token]["id"] + suffix) + + normalized_expression = " ".join(normalized_tokens) + + return cast( + "NormalizedLicenseExpression", + normalized_expression.replace("( ", "(").replace(" )", ")"), + ) diff --git a/.venv/lib/python3.12/site-packages/packaging/licenses/_spdx.py b/.venv/lib/python3.12/site-packages/packaging/licenses/_spdx.py new file mode 100644 index 0000000..a277af2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging/licenses/_spdx.py @@ -0,0 +1,799 @@ + +from __future__ import annotations + +from typing import TypedDict + +class SPDXLicense(TypedDict): + id: str + deprecated: bool + +class SPDXException(TypedDict): + id: str + deprecated: bool + + +VERSION = '3.27.0' + +LICENSES: dict[str, SPDXLicense] = { + '0bsd': {'id': '0BSD', 'deprecated': False}, + '3d-slicer-1.0': {'id': '3D-Slicer-1.0', 'deprecated': False}, + 'aal': {'id': 'AAL', 'deprecated': False}, + 'abstyles': {'id': 'Abstyles', 'deprecated': False}, + 'adacore-doc': {'id': 'AdaCore-doc', 'deprecated': False}, + 'adobe-2006': {'id': 'Adobe-2006', 'deprecated': False}, + 'adobe-display-postscript': {'id': 'Adobe-Display-PostScript', 'deprecated': False}, + 'adobe-glyph': {'id': 'Adobe-Glyph', 'deprecated': False}, + 'adobe-utopia': {'id': 'Adobe-Utopia', 'deprecated': False}, + 'adsl': {'id': 'ADSL', 'deprecated': False}, + 'afl-1.1': {'id': 'AFL-1.1', 'deprecated': False}, + 'afl-1.2': {'id': 'AFL-1.2', 'deprecated': False}, + 'afl-2.0': {'id': 'AFL-2.0', 'deprecated': False}, + 'afl-2.1': {'id': 'AFL-2.1', 'deprecated': False}, + 'afl-3.0': {'id': 'AFL-3.0', 'deprecated': False}, + 'afmparse': {'id': 'Afmparse', 'deprecated': False}, + 'agpl-1.0': {'id': 'AGPL-1.0', 'deprecated': True}, + 'agpl-1.0-only': {'id': 'AGPL-1.0-only', 'deprecated': False}, + 'agpl-1.0-or-later': {'id': 'AGPL-1.0-or-later', 'deprecated': False}, + 'agpl-3.0': {'id': 'AGPL-3.0', 'deprecated': True}, + 'agpl-3.0-only': {'id': 'AGPL-3.0-only', 'deprecated': False}, + 'agpl-3.0-or-later': {'id': 'AGPL-3.0-or-later', 'deprecated': False}, + 'aladdin': {'id': 'Aladdin', 'deprecated': False}, + 'amd-newlib': {'id': 'AMD-newlib', 'deprecated': False}, + 'amdplpa': {'id': 'AMDPLPA', 'deprecated': False}, + 'aml': {'id': 'AML', 'deprecated': False}, + 'aml-glslang': {'id': 'AML-glslang', 'deprecated': False}, + 'ampas': {'id': 'AMPAS', 'deprecated': False}, + 'antlr-pd': {'id': 'ANTLR-PD', 'deprecated': False}, + 'antlr-pd-fallback': {'id': 'ANTLR-PD-fallback', 'deprecated': False}, + 'any-osi': {'id': 'any-OSI', 'deprecated': False}, + 'any-osi-perl-modules': {'id': 'any-OSI-perl-modules', 'deprecated': False}, + 'apache-1.0': {'id': 'Apache-1.0', 'deprecated': False}, + 'apache-1.1': {'id': 'Apache-1.1', 'deprecated': False}, + 'apache-2.0': {'id': 'Apache-2.0', 'deprecated': False}, + 'apafml': {'id': 'APAFML', 'deprecated': False}, + 'apl-1.0': {'id': 'APL-1.0', 'deprecated': False}, + 'app-s2p': {'id': 'App-s2p', 'deprecated': False}, + 'apsl-1.0': {'id': 'APSL-1.0', 'deprecated': False}, + 'apsl-1.1': {'id': 'APSL-1.1', 'deprecated': False}, + 'apsl-1.2': {'id': 'APSL-1.2', 'deprecated': False}, + 'apsl-2.0': {'id': 'APSL-2.0', 'deprecated': False}, + 'arphic-1999': {'id': 'Arphic-1999', 'deprecated': False}, + 'artistic-1.0': {'id': 'Artistic-1.0', 'deprecated': False}, + 'artistic-1.0-cl8': {'id': 'Artistic-1.0-cl8', 'deprecated': False}, + 'artistic-1.0-perl': {'id': 'Artistic-1.0-Perl', 'deprecated': False}, + 'artistic-2.0': {'id': 'Artistic-2.0', 'deprecated': False}, + 'artistic-dist': {'id': 'Artistic-dist', 'deprecated': False}, + 'aspell-ru': {'id': 'Aspell-RU', 'deprecated': False}, + 'aswf-digital-assets-1.0': {'id': 'ASWF-Digital-Assets-1.0', 'deprecated': False}, + 'aswf-digital-assets-1.1': {'id': 'ASWF-Digital-Assets-1.1', 'deprecated': False}, + 'baekmuk': {'id': 'Baekmuk', 'deprecated': False}, + 'bahyph': {'id': 'Bahyph', 'deprecated': False}, + 'barr': {'id': 'Barr', 'deprecated': False}, + 'bcrypt-solar-designer': {'id': 'bcrypt-Solar-Designer', 'deprecated': False}, + 'beerware': {'id': 'Beerware', 'deprecated': False}, + 'bitstream-charter': {'id': 'Bitstream-Charter', 'deprecated': False}, + 'bitstream-vera': {'id': 'Bitstream-Vera', 'deprecated': False}, + 'bittorrent-1.0': {'id': 'BitTorrent-1.0', 'deprecated': False}, + 'bittorrent-1.1': {'id': 'BitTorrent-1.1', 'deprecated': False}, + 'blessing': {'id': 'blessing', 'deprecated': False}, + 'blueoak-1.0.0': {'id': 'BlueOak-1.0.0', 'deprecated': False}, + 'boehm-gc': {'id': 'Boehm-GC', 'deprecated': False}, + 'boehm-gc-without-fee': {'id': 'Boehm-GC-without-fee', 'deprecated': False}, + 'borceux': {'id': 'Borceux', 'deprecated': False}, + 'brian-gladman-2-clause': {'id': 'Brian-Gladman-2-Clause', 'deprecated': False}, + 'brian-gladman-3-clause': {'id': 'Brian-Gladman-3-Clause', 'deprecated': False}, + 'bsd-1-clause': {'id': 'BSD-1-Clause', 'deprecated': False}, + 'bsd-2-clause': {'id': 'BSD-2-Clause', 'deprecated': False}, + 'bsd-2-clause-darwin': {'id': 'BSD-2-Clause-Darwin', 'deprecated': False}, + 'bsd-2-clause-first-lines': {'id': 'BSD-2-Clause-first-lines', 'deprecated': False}, + 'bsd-2-clause-freebsd': {'id': 'BSD-2-Clause-FreeBSD', 'deprecated': True}, + 'bsd-2-clause-netbsd': {'id': 'BSD-2-Clause-NetBSD', 'deprecated': True}, + 'bsd-2-clause-patent': {'id': 'BSD-2-Clause-Patent', 'deprecated': False}, + 'bsd-2-clause-pkgconf-disclaimer': {'id': 'BSD-2-Clause-pkgconf-disclaimer', 'deprecated': False}, + 'bsd-2-clause-views': {'id': 'BSD-2-Clause-Views', 'deprecated': False}, + 'bsd-3-clause': {'id': 'BSD-3-Clause', 'deprecated': False}, + 'bsd-3-clause-acpica': {'id': 'BSD-3-Clause-acpica', 'deprecated': False}, + 'bsd-3-clause-attribution': {'id': 'BSD-3-Clause-Attribution', 'deprecated': False}, + 'bsd-3-clause-clear': {'id': 'BSD-3-Clause-Clear', 'deprecated': False}, + 'bsd-3-clause-flex': {'id': 'BSD-3-Clause-flex', 'deprecated': False}, + 'bsd-3-clause-hp': {'id': 'BSD-3-Clause-HP', 'deprecated': False}, + 'bsd-3-clause-lbnl': {'id': 'BSD-3-Clause-LBNL', 'deprecated': False}, + 'bsd-3-clause-modification': {'id': 'BSD-3-Clause-Modification', 'deprecated': False}, + 'bsd-3-clause-no-military-license': {'id': 'BSD-3-Clause-No-Military-License', 'deprecated': False}, + 'bsd-3-clause-no-nuclear-license': {'id': 'BSD-3-Clause-No-Nuclear-License', 'deprecated': False}, + 'bsd-3-clause-no-nuclear-license-2014': {'id': 'BSD-3-Clause-No-Nuclear-License-2014', 'deprecated': False}, + 'bsd-3-clause-no-nuclear-warranty': {'id': 'BSD-3-Clause-No-Nuclear-Warranty', 'deprecated': False}, + 'bsd-3-clause-open-mpi': {'id': 'BSD-3-Clause-Open-MPI', 'deprecated': False}, + 'bsd-3-clause-sun': {'id': 'BSD-3-Clause-Sun', 'deprecated': False}, + 'bsd-4-clause': {'id': 'BSD-4-Clause', 'deprecated': False}, + 'bsd-4-clause-shortened': {'id': 'BSD-4-Clause-Shortened', 'deprecated': False}, + 'bsd-4-clause-uc': {'id': 'BSD-4-Clause-UC', 'deprecated': False}, + 'bsd-4.3reno': {'id': 'BSD-4.3RENO', 'deprecated': False}, + 'bsd-4.3tahoe': {'id': 'BSD-4.3TAHOE', 'deprecated': False}, + 'bsd-advertising-acknowledgement': {'id': 'BSD-Advertising-Acknowledgement', 'deprecated': False}, + 'bsd-attribution-hpnd-disclaimer': {'id': 'BSD-Attribution-HPND-disclaimer', 'deprecated': False}, + 'bsd-inferno-nettverk': {'id': 'BSD-Inferno-Nettverk', 'deprecated': False}, + 'bsd-protection': {'id': 'BSD-Protection', 'deprecated': False}, + 'bsd-source-beginning-file': {'id': 'BSD-Source-beginning-file', 'deprecated': False}, + 'bsd-source-code': {'id': 'BSD-Source-Code', 'deprecated': False}, + 'bsd-systemics': {'id': 'BSD-Systemics', 'deprecated': False}, + 'bsd-systemics-w3works': {'id': 'BSD-Systemics-W3Works', 'deprecated': False}, + 'bsl-1.0': {'id': 'BSL-1.0', 'deprecated': False}, + 'busl-1.1': {'id': 'BUSL-1.1', 'deprecated': False}, + 'bzip2-1.0.5': {'id': 'bzip2-1.0.5', 'deprecated': True}, + 'bzip2-1.0.6': {'id': 'bzip2-1.0.6', 'deprecated': False}, + 'c-uda-1.0': {'id': 'C-UDA-1.0', 'deprecated': False}, + 'cal-1.0': {'id': 'CAL-1.0', 'deprecated': False}, + 'cal-1.0-combined-work-exception': {'id': 'CAL-1.0-Combined-Work-Exception', 'deprecated': False}, + 'caldera': {'id': 'Caldera', 'deprecated': False}, + 'caldera-no-preamble': {'id': 'Caldera-no-preamble', 'deprecated': False}, + 'catharon': {'id': 'Catharon', 'deprecated': False}, + 'catosl-1.1': {'id': 'CATOSL-1.1', 'deprecated': False}, + 'cc-by-1.0': {'id': 'CC-BY-1.0', 'deprecated': False}, + 'cc-by-2.0': {'id': 'CC-BY-2.0', 'deprecated': False}, + 'cc-by-2.5': {'id': 'CC-BY-2.5', 'deprecated': False}, + 'cc-by-2.5-au': {'id': 'CC-BY-2.5-AU', 'deprecated': False}, + 'cc-by-3.0': {'id': 'CC-BY-3.0', 'deprecated': False}, + 'cc-by-3.0-at': {'id': 'CC-BY-3.0-AT', 'deprecated': False}, + 'cc-by-3.0-au': {'id': 'CC-BY-3.0-AU', 'deprecated': False}, + 'cc-by-3.0-de': {'id': 'CC-BY-3.0-DE', 'deprecated': False}, + 'cc-by-3.0-igo': {'id': 'CC-BY-3.0-IGO', 'deprecated': False}, + 'cc-by-3.0-nl': {'id': 'CC-BY-3.0-NL', 'deprecated': False}, + 'cc-by-3.0-us': {'id': 'CC-BY-3.0-US', 'deprecated': False}, + 'cc-by-4.0': {'id': 'CC-BY-4.0', 'deprecated': False}, + 'cc-by-nc-1.0': {'id': 'CC-BY-NC-1.0', 'deprecated': False}, + 'cc-by-nc-2.0': {'id': 'CC-BY-NC-2.0', 'deprecated': False}, + 'cc-by-nc-2.5': {'id': 'CC-BY-NC-2.5', 'deprecated': False}, + 'cc-by-nc-3.0': {'id': 'CC-BY-NC-3.0', 'deprecated': False}, + 'cc-by-nc-3.0-de': {'id': 'CC-BY-NC-3.0-DE', 'deprecated': False}, + 'cc-by-nc-4.0': {'id': 'CC-BY-NC-4.0', 'deprecated': False}, + 'cc-by-nc-nd-1.0': {'id': 'CC-BY-NC-ND-1.0', 'deprecated': False}, + 'cc-by-nc-nd-2.0': {'id': 'CC-BY-NC-ND-2.0', 'deprecated': False}, + 'cc-by-nc-nd-2.5': {'id': 'CC-BY-NC-ND-2.5', 'deprecated': False}, + 'cc-by-nc-nd-3.0': {'id': 'CC-BY-NC-ND-3.0', 'deprecated': False}, + 'cc-by-nc-nd-3.0-de': {'id': 'CC-BY-NC-ND-3.0-DE', 'deprecated': False}, + 'cc-by-nc-nd-3.0-igo': {'id': 'CC-BY-NC-ND-3.0-IGO', 'deprecated': False}, + 'cc-by-nc-nd-4.0': {'id': 'CC-BY-NC-ND-4.0', 'deprecated': False}, + 'cc-by-nc-sa-1.0': {'id': 'CC-BY-NC-SA-1.0', 'deprecated': False}, + 'cc-by-nc-sa-2.0': {'id': 'CC-BY-NC-SA-2.0', 'deprecated': False}, + 'cc-by-nc-sa-2.0-de': {'id': 'CC-BY-NC-SA-2.0-DE', 'deprecated': False}, + 'cc-by-nc-sa-2.0-fr': {'id': 'CC-BY-NC-SA-2.0-FR', 'deprecated': False}, + 'cc-by-nc-sa-2.0-uk': {'id': 'CC-BY-NC-SA-2.0-UK', 'deprecated': False}, + 'cc-by-nc-sa-2.5': {'id': 'CC-BY-NC-SA-2.5', 'deprecated': False}, + 'cc-by-nc-sa-3.0': {'id': 'CC-BY-NC-SA-3.0', 'deprecated': False}, + 'cc-by-nc-sa-3.0-de': {'id': 'CC-BY-NC-SA-3.0-DE', 'deprecated': False}, + 'cc-by-nc-sa-3.0-igo': {'id': 'CC-BY-NC-SA-3.0-IGO', 'deprecated': False}, + 'cc-by-nc-sa-4.0': {'id': 'CC-BY-NC-SA-4.0', 'deprecated': False}, + 'cc-by-nd-1.0': {'id': 'CC-BY-ND-1.0', 'deprecated': False}, + 'cc-by-nd-2.0': {'id': 'CC-BY-ND-2.0', 'deprecated': False}, + 'cc-by-nd-2.5': {'id': 'CC-BY-ND-2.5', 'deprecated': False}, + 'cc-by-nd-3.0': {'id': 'CC-BY-ND-3.0', 'deprecated': False}, + 'cc-by-nd-3.0-de': {'id': 'CC-BY-ND-3.0-DE', 'deprecated': False}, + 'cc-by-nd-4.0': {'id': 'CC-BY-ND-4.0', 'deprecated': False}, + 'cc-by-sa-1.0': {'id': 'CC-BY-SA-1.0', 'deprecated': False}, + 'cc-by-sa-2.0': {'id': 'CC-BY-SA-2.0', 'deprecated': False}, + 'cc-by-sa-2.0-uk': {'id': 'CC-BY-SA-2.0-UK', 'deprecated': False}, + 'cc-by-sa-2.1-jp': {'id': 'CC-BY-SA-2.1-JP', 'deprecated': False}, + 'cc-by-sa-2.5': {'id': 'CC-BY-SA-2.5', 'deprecated': False}, + 'cc-by-sa-3.0': {'id': 'CC-BY-SA-3.0', 'deprecated': False}, + 'cc-by-sa-3.0-at': {'id': 'CC-BY-SA-3.0-AT', 'deprecated': False}, + 'cc-by-sa-3.0-de': {'id': 'CC-BY-SA-3.0-DE', 'deprecated': False}, + 'cc-by-sa-3.0-igo': {'id': 'CC-BY-SA-3.0-IGO', 'deprecated': False}, + 'cc-by-sa-4.0': {'id': 'CC-BY-SA-4.0', 'deprecated': False}, + 'cc-pddc': {'id': 'CC-PDDC', 'deprecated': False}, + 'cc-pdm-1.0': {'id': 'CC-PDM-1.0', 'deprecated': False}, + 'cc-sa-1.0': {'id': 'CC-SA-1.0', 'deprecated': False}, + 'cc0-1.0': {'id': 'CC0-1.0', 'deprecated': False}, + 'cddl-1.0': {'id': 'CDDL-1.0', 'deprecated': False}, + 'cddl-1.1': {'id': 'CDDL-1.1', 'deprecated': False}, + 'cdl-1.0': {'id': 'CDL-1.0', 'deprecated': False}, + 'cdla-permissive-1.0': {'id': 'CDLA-Permissive-1.0', 'deprecated': False}, + 'cdla-permissive-2.0': {'id': 'CDLA-Permissive-2.0', 'deprecated': False}, + 'cdla-sharing-1.0': {'id': 'CDLA-Sharing-1.0', 'deprecated': False}, + 'cecill-1.0': {'id': 'CECILL-1.0', 'deprecated': False}, + 'cecill-1.1': {'id': 'CECILL-1.1', 'deprecated': False}, + 'cecill-2.0': {'id': 'CECILL-2.0', 'deprecated': False}, + 'cecill-2.1': {'id': 'CECILL-2.1', 'deprecated': False}, + 'cecill-b': {'id': 'CECILL-B', 'deprecated': False}, + 'cecill-c': {'id': 'CECILL-C', 'deprecated': False}, + 'cern-ohl-1.1': {'id': 'CERN-OHL-1.1', 'deprecated': False}, + 'cern-ohl-1.2': {'id': 'CERN-OHL-1.2', 'deprecated': False}, + 'cern-ohl-p-2.0': {'id': 'CERN-OHL-P-2.0', 'deprecated': False}, + 'cern-ohl-s-2.0': {'id': 'CERN-OHL-S-2.0', 'deprecated': False}, + 'cern-ohl-w-2.0': {'id': 'CERN-OHL-W-2.0', 'deprecated': False}, + 'cfitsio': {'id': 'CFITSIO', 'deprecated': False}, + 'check-cvs': {'id': 'check-cvs', 'deprecated': False}, + 'checkmk': {'id': 'checkmk', 'deprecated': False}, + 'clartistic': {'id': 'ClArtistic', 'deprecated': False}, + 'clips': {'id': 'Clips', 'deprecated': False}, + 'cmu-mach': {'id': 'CMU-Mach', 'deprecated': False}, + 'cmu-mach-nodoc': {'id': 'CMU-Mach-nodoc', 'deprecated': False}, + 'cnri-jython': {'id': 'CNRI-Jython', 'deprecated': False}, + 'cnri-python': {'id': 'CNRI-Python', 'deprecated': False}, + 'cnri-python-gpl-compatible': {'id': 'CNRI-Python-GPL-Compatible', 'deprecated': False}, + 'coil-1.0': {'id': 'COIL-1.0', 'deprecated': False}, + 'community-spec-1.0': {'id': 'Community-Spec-1.0', 'deprecated': False}, + 'condor-1.1': {'id': 'Condor-1.1', 'deprecated': False}, + 'copyleft-next-0.3.0': {'id': 'copyleft-next-0.3.0', 'deprecated': False}, + 'copyleft-next-0.3.1': {'id': 'copyleft-next-0.3.1', 'deprecated': False}, + 'cornell-lossless-jpeg': {'id': 'Cornell-Lossless-JPEG', 'deprecated': False}, + 'cpal-1.0': {'id': 'CPAL-1.0', 'deprecated': False}, + 'cpl-1.0': {'id': 'CPL-1.0', 'deprecated': False}, + 'cpol-1.02': {'id': 'CPOL-1.02', 'deprecated': False}, + 'cronyx': {'id': 'Cronyx', 'deprecated': False}, + 'crossword': {'id': 'Crossword', 'deprecated': False}, + 'cryptoswift': {'id': 'CryptoSwift', 'deprecated': False}, + 'crystalstacker': {'id': 'CrystalStacker', 'deprecated': False}, + 'cua-opl-1.0': {'id': 'CUA-OPL-1.0', 'deprecated': False}, + 'cube': {'id': 'Cube', 'deprecated': False}, + 'curl': {'id': 'curl', 'deprecated': False}, + 'cve-tou': {'id': 'cve-tou', 'deprecated': False}, + 'd-fsl-1.0': {'id': 'D-FSL-1.0', 'deprecated': False}, + 'dec-3-clause': {'id': 'DEC-3-Clause', 'deprecated': False}, + 'diffmark': {'id': 'diffmark', 'deprecated': False}, + 'dl-de-by-2.0': {'id': 'DL-DE-BY-2.0', 'deprecated': False}, + 'dl-de-zero-2.0': {'id': 'DL-DE-ZERO-2.0', 'deprecated': False}, + 'doc': {'id': 'DOC', 'deprecated': False}, + 'docbook-dtd': {'id': 'DocBook-DTD', 'deprecated': False}, + 'docbook-schema': {'id': 'DocBook-Schema', 'deprecated': False}, + 'docbook-stylesheet': {'id': 'DocBook-Stylesheet', 'deprecated': False}, + 'docbook-xml': {'id': 'DocBook-XML', 'deprecated': False}, + 'dotseqn': {'id': 'Dotseqn', 'deprecated': False}, + 'drl-1.0': {'id': 'DRL-1.0', 'deprecated': False}, + 'drl-1.1': {'id': 'DRL-1.1', 'deprecated': False}, + 'dsdp': {'id': 'DSDP', 'deprecated': False}, + 'dtoa': {'id': 'dtoa', 'deprecated': False}, + 'dvipdfm': {'id': 'dvipdfm', 'deprecated': False}, + 'ecl-1.0': {'id': 'ECL-1.0', 'deprecated': False}, + 'ecl-2.0': {'id': 'ECL-2.0', 'deprecated': False}, + 'ecos-2.0': {'id': 'eCos-2.0', 'deprecated': True}, + 'efl-1.0': {'id': 'EFL-1.0', 'deprecated': False}, + 'efl-2.0': {'id': 'EFL-2.0', 'deprecated': False}, + 'egenix': {'id': 'eGenix', 'deprecated': False}, + 'elastic-2.0': {'id': 'Elastic-2.0', 'deprecated': False}, + 'entessa': {'id': 'Entessa', 'deprecated': False}, + 'epics': {'id': 'EPICS', 'deprecated': False}, + 'epl-1.0': {'id': 'EPL-1.0', 'deprecated': False}, + 'epl-2.0': {'id': 'EPL-2.0', 'deprecated': False}, + 'erlpl-1.1': {'id': 'ErlPL-1.1', 'deprecated': False}, + 'etalab-2.0': {'id': 'etalab-2.0', 'deprecated': False}, + 'eudatagrid': {'id': 'EUDatagrid', 'deprecated': False}, + 'eupl-1.0': {'id': 'EUPL-1.0', 'deprecated': False}, + 'eupl-1.1': {'id': 'EUPL-1.1', 'deprecated': False}, + 'eupl-1.2': {'id': 'EUPL-1.2', 'deprecated': False}, + 'eurosym': {'id': 'Eurosym', 'deprecated': False}, + 'fair': {'id': 'Fair', 'deprecated': False}, + 'fbm': {'id': 'FBM', 'deprecated': False}, + 'fdk-aac': {'id': 'FDK-AAC', 'deprecated': False}, + 'ferguson-twofish': {'id': 'Ferguson-Twofish', 'deprecated': False}, + 'frameworx-1.0': {'id': 'Frameworx-1.0', 'deprecated': False}, + 'freebsd-doc': {'id': 'FreeBSD-DOC', 'deprecated': False}, + 'freeimage': {'id': 'FreeImage', 'deprecated': False}, + 'fsfap': {'id': 'FSFAP', 'deprecated': False}, + 'fsfap-no-warranty-disclaimer': {'id': 'FSFAP-no-warranty-disclaimer', 'deprecated': False}, + 'fsful': {'id': 'FSFUL', 'deprecated': False}, + 'fsfullr': {'id': 'FSFULLR', 'deprecated': False}, + 'fsfullrsd': {'id': 'FSFULLRSD', 'deprecated': False}, + 'fsfullrwd': {'id': 'FSFULLRWD', 'deprecated': False}, + 'fsl-1.1-alv2': {'id': 'FSL-1.1-ALv2', 'deprecated': False}, + 'fsl-1.1-mit': {'id': 'FSL-1.1-MIT', 'deprecated': False}, + 'ftl': {'id': 'FTL', 'deprecated': False}, + 'furuseth': {'id': 'Furuseth', 'deprecated': False}, + 'fwlw': {'id': 'fwlw', 'deprecated': False}, + 'game-programming-gems': {'id': 'Game-Programming-Gems', 'deprecated': False}, + 'gcr-docs': {'id': 'GCR-docs', 'deprecated': False}, + 'gd': {'id': 'GD', 'deprecated': False}, + 'generic-xts': {'id': 'generic-xts', 'deprecated': False}, + 'gfdl-1.1': {'id': 'GFDL-1.1', 'deprecated': True}, + 'gfdl-1.1-invariants-only': {'id': 'GFDL-1.1-invariants-only', 'deprecated': False}, + 'gfdl-1.1-invariants-or-later': {'id': 'GFDL-1.1-invariants-or-later', 'deprecated': False}, + 'gfdl-1.1-no-invariants-only': {'id': 'GFDL-1.1-no-invariants-only', 'deprecated': False}, + 'gfdl-1.1-no-invariants-or-later': {'id': 'GFDL-1.1-no-invariants-or-later', 'deprecated': False}, + 'gfdl-1.1-only': {'id': 'GFDL-1.1-only', 'deprecated': False}, + 'gfdl-1.1-or-later': {'id': 'GFDL-1.1-or-later', 'deprecated': False}, + 'gfdl-1.2': {'id': 'GFDL-1.2', 'deprecated': True}, + 'gfdl-1.2-invariants-only': {'id': 'GFDL-1.2-invariants-only', 'deprecated': False}, + 'gfdl-1.2-invariants-or-later': {'id': 'GFDL-1.2-invariants-or-later', 'deprecated': False}, + 'gfdl-1.2-no-invariants-only': {'id': 'GFDL-1.2-no-invariants-only', 'deprecated': False}, + 'gfdl-1.2-no-invariants-or-later': {'id': 'GFDL-1.2-no-invariants-or-later', 'deprecated': False}, + 'gfdl-1.2-only': {'id': 'GFDL-1.2-only', 'deprecated': False}, + 'gfdl-1.2-or-later': {'id': 'GFDL-1.2-or-later', 'deprecated': False}, + 'gfdl-1.3': {'id': 'GFDL-1.3', 'deprecated': True}, + 'gfdl-1.3-invariants-only': {'id': 'GFDL-1.3-invariants-only', 'deprecated': False}, + 'gfdl-1.3-invariants-or-later': {'id': 'GFDL-1.3-invariants-or-later', 'deprecated': False}, + 'gfdl-1.3-no-invariants-only': {'id': 'GFDL-1.3-no-invariants-only', 'deprecated': False}, + 'gfdl-1.3-no-invariants-or-later': {'id': 'GFDL-1.3-no-invariants-or-later', 'deprecated': False}, + 'gfdl-1.3-only': {'id': 'GFDL-1.3-only', 'deprecated': False}, + 'gfdl-1.3-or-later': {'id': 'GFDL-1.3-or-later', 'deprecated': False}, + 'giftware': {'id': 'Giftware', 'deprecated': False}, + 'gl2ps': {'id': 'GL2PS', 'deprecated': False}, + 'glide': {'id': 'Glide', 'deprecated': False}, + 'glulxe': {'id': 'Glulxe', 'deprecated': False}, + 'glwtpl': {'id': 'GLWTPL', 'deprecated': False}, + 'gnuplot': {'id': 'gnuplot', 'deprecated': False}, + 'gpl-1.0': {'id': 'GPL-1.0', 'deprecated': True}, + 'gpl-1.0+': {'id': 'GPL-1.0+', 'deprecated': True}, + 'gpl-1.0-only': {'id': 'GPL-1.0-only', 'deprecated': False}, + 'gpl-1.0-or-later': {'id': 'GPL-1.0-or-later', 'deprecated': False}, + 'gpl-2.0': {'id': 'GPL-2.0', 'deprecated': True}, + 'gpl-2.0+': {'id': 'GPL-2.0+', 'deprecated': True}, + 'gpl-2.0-only': {'id': 'GPL-2.0-only', 'deprecated': False}, + 'gpl-2.0-or-later': {'id': 'GPL-2.0-or-later', 'deprecated': False}, + 'gpl-2.0-with-autoconf-exception': {'id': 'GPL-2.0-with-autoconf-exception', 'deprecated': True}, + 'gpl-2.0-with-bison-exception': {'id': 'GPL-2.0-with-bison-exception', 'deprecated': True}, + 'gpl-2.0-with-classpath-exception': {'id': 'GPL-2.0-with-classpath-exception', 'deprecated': True}, + 'gpl-2.0-with-font-exception': {'id': 'GPL-2.0-with-font-exception', 'deprecated': True}, + 'gpl-2.0-with-gcc-exception': {'id': 'GPL-2.0-with-GCC-exception', 'deprecated': True}, + 'gpl-3.0': {'id': 'GPL-3.0', 'deprecated': True}, + 'gpl-3.0+': {'id': 'GPL-3.0+', 'deprecated': True}, + 'gpl-3.0-only': {'id': 'GPL-3.0-only', 'deprecated': False}, + 'gpl-3.0-or-later': {'id': 'GPL-3.0-or-later', 'deprecated': False}, + 'gpl-3.0-with-autoconf-exception': {'id': 'GPL-3.0-with-autoconf-exception', 'deprecated': True}, + 'gpl-3.0-with-gcc-exception': {'id': 'GPL-3.0-with-GCC-exception', 'deprecated': True}, + 'graphics-gems': {'id': 'Graphics-Gems', 'deprecated': False}, + 'gsoap-1.3b': {'id': 'gSOAP-1.3b', 'deprecated': False}, + 'gtkbook': {'id': 'gtkbook', 'deprecated': False}, + 'gutmann': {'id': 'Gutmann', 'deprecated': False}, + 'haskellreport': {'id': 'HaskellReport', 'deprecated': False}, + 'hdf5': {'id': 'HDF5', 'deprecated': False}, + 'hdparm': {'id': 'hdparm', 'deprecated': False}, + 'hidapi': {'id': 'HIDAPI', 'deprecated': False}, + 'hippocratic-2.1': {'id': 'Hippocratic-2.1', 'deprecated': False}, + 'hp-1986': {'id': 'HP-1986', 'deprecated': False}, + 'hp-1989': {'id': 'HP-1989', 'deprecated': False}, + 'hpnd': {'id': 'HPND', 'deprecated': False}, + 'hpnd-dec': {'id': 'HPND-DEC', 'deprecated': False}, + 'hpnd-doc': {'id': 'HPND-doc', 'deprecated': False}, + 'hpnd-doc-sell': {'id': 'HPND-doc-sell', 'deprecated': False}, + 'hpnd-export-us': {'id': 'HPND-export-US', 'deprecated': False}, + 'hpnd-export-us-acknowledgement': {'id': 'HPND-export-US-acknowledgement', 'deprecated': False}, + 'hpnd-export-us-modify': {'id': 'HPND-export-US-modify', 'deprecated': False}, + 'hpnd-export2-us': {'id': 'HPND-export2-US', 'deprecated': False}, + 'hpnd-fenneberg-livingston': {'id': 'HPND-Fenneberg-Livingston', 'deprecated': False}, + 'hpnd-inria-imag': {'id': 'HPND-INRIA-IMAG', 'deprecated': False}, + 'hpnd-intel': {'id': 'HPND-Intel', 'deprecated': False}, + 'hpnd-kevlin-henney': {'id': 'HPND-Kevlin-Henney', 'deprecated': False}, + 'hpnd-markus-kuhn': {'id': 'HPND-Markus-Kuhn', 'deprecated': False}, + 'hpnd-merchantability-variant': {'id': 'HPND-merchantability-variant', 'deprecated': False}, + 'hpnd-mit-disclaimer': {'id': 'HPND-MIT-disclaimer', 'deprecated': False}, + 'hpnd-netrek': {'id': 'HPND-Netrek', 'deprecated': False}, + 'hpnd-pbmplus': {'id': 'HPND-Pbmplus', 'deprecated': False}, + 'hpnd-sell-mit-disclaimer-xserver': {'id': 'HPND-sell-MIT-disclaimer-xserver', 'deprecated': False}, + 'hpnd-sell-regexpr': {'id': 'HPND-sell-regexpr', 'deprecated': False}, + 'hpnd-sell-variant': {'id': 'HPND-sell-variant', 'deprecated': False}, + 'hpnd-sell-variant-mit-disclaimer': {'id': 'HPND-sell-variant-MIT-disclaimer', 'deprecated': False}, + 'hpnd-sell-variant-mit-disclaimer-rev': {'id': 'HPND-sell-variant-MIT-disclaimer-rev', 'deprecated': False}, + 'hpnd-uc': {'id': 'HPND-UC', 'deprecated': False}, + 'hpnd-uc-export-us': {'id': 'HPND-UC-export-US', 'deprecated': False}, + 'htmltidy': {'id': 'HTMLTIDY', 'deprecated': False}, + 'ibm-pibs': {'id': 'IBM-pibs', 'deprecated': False}, + 'icu': {'id': 'ICU', 'deprecated': False}, + 'iec-code-components-eula': {'id': 'IEC-Code-Components-EULA', 'deprecated': False}, + 'ijg': {'id': 'IJG', 'deprecated': False}, + 'ijg-short': {'id': 'IJG-short', 'deprecated': False}, + 'imagemagick': {'id': 'ImageMagick', 'deprecated': False}, + 'imatix': {'id': 'iMatix', 'deprecated': False}, + 'imlib2': {'id': 'Imlib2', 'deprecated': False}, + 'info-zip': {'id': 'Info-ZIP', 'deprecated': False}, + 'inner-net-2.0': {'id': 'Inner-Net-2.0', 'deprecated': False}, + 'innosetup': {'id': 'InnoSetup', 'deprecated': False}, + 'intel': {'id': 'Intel', 'deprecated': False}, + 'intel-acpi': {'id': 'Intel-ACPI', 'deprecated': False}, + 'interbase-1.0': {'id': 'Interbase-1.0', 'deprecated': False}, + 'ipa': {'id': 'IPA', 'deprecated': False}, + 'ipl-1.0': {'id': 'IPL-1.0', 'deprecated': False}, + 'isc': {'id': 'ISC', 'deprecated': False}, + 'isc-veillard': {'id': 'ISC-Veillard', 'deprecated': False}, + 'jam': {'id': 'Jam', 'deprecated': False}, + 'jasper-2.0': {'id': 'JasPer-2.0', 'deprecated': False}, + 'jove': {'id': 'jove', 'deprecated': False}, + 'jpl-image': {'id': 'JPL-image', 'deprecated': False}, + 'jpnic': {'id': 'JPNIC', 'deprecated': False}, + 'json': {'id': 'JSON', 'deprecated': False}, + 'kastrup': {'id': 'Kastrup', 'deprecated': False}, + 'kazlib': {'id': 'Kazlib', 'deprecated': False}, + 'knuth-ctan': {'id': 'Knuth-CTAN', 'deprecated': False}, + 'lal-1.2': {'id': 'LAL-1.2', 'deprecated': False}, + 'lal-1.3': {'id': 'LAL-1.3', 'deprecated': False}, + 'latex2e': {'id': 'Latex2e', 'deprecated': False}, + 'latex2e-translated-notice': {'id': 'Latex2e-translated-notice', 'deprecated': False}, + 'leptonica': {'id': 'Leptonica', 'deprecated': False}, + 'lgpl-2.0': {'id': 'LGPL-2.0', 'deprecated': True}, + 'lgpl-2.0+': {'id': 'LGPL-2.0+', 'deprecated': True}, + 'lgpl-2.0-only': {'id': 'LGPL-2.0-only', 'deprecated': False}, + 'lgpl-2.0-or-later': {'id': 'LGPL-2.0-or-later', 'deprecated': False}, + 'lgpl-2.1': {'id': 'LGPL-2.1', 'deprecated': True}, + 'lgpl-2.1+': {'id': 'LGPL-2.1+', 'deprecated': True}, + 'lgpl-2.1-only': {'id': 'LGPL-2.1-only', 'deprecated': False}, + 'lgpl-2.1-or-later': {'id': 'LGPL-2.1-or-later', 'deprecated': False}, + 'lgpl-3.0': {'id': 'LGPL-3.0', 'deprecated': True}, + 'lgpl-3.0+': {'id': 'LGPL-3.0+', 'deprecated': True}, + 'lgpl-3.0-only': {'id': 'LGPL-3.0-only', 'deprecated': False}, + 'lgpl-3.0-or-later': {'id': 'LGPL-3.0-or-later', 'deprecated': False}, + 'lgpllr': {'id': 'LGPLLR', 'deprecated': False}, + 'libpng': {'id': 'Libpng', 'deprecated': False}, + 'libpng-1.6.35': {'id': 'libpng-1.6.35', 'deprecated': False}, + 'libpng-2.0': {'id': 'libpng-2.0', 'deprecated': False}, + 'libselinux-1.0': {'id': 'libselinux-1.0', 'deprecated': False}, + 'libtiff': {'id': 'libtiff', 'deprecated': False}, + 'libutil-david-nugent': {'id': 'libutil-David-Nugent', 'deprecated': False}, + 'liliq-p-1.1': {'id': 'LiLiQ-P-1.1', 'deprecated': False}, + 'liliq-r-1.1': {'id': 'LiLiQ-R-1.1', 'deprecated': False}, + 'liliq-rplus-1.1': {'id': 'LiLiQ-Rplus-1.1', 'deprecated': False}, + 'linux-man-pages-1-para': {'id': 'Linux-man-pages-1-para', 'deprecated': False}, + 'linux-man-pages-copyleft': {'id': 'Linux-man-pages-copyleft', 'deprecated': False}, + 'linux-man-pages-copyleft-2-para': {'id': 'Linux-man-pages-copyleft-2-para', 'deprecated': False}, + 'linux-man-pages-copyleft-var': {'id': 'Linux-man-pages-copyleft-var', 'deprecated': False}, + 'linux-openib': {'id': 'Linux-OpenIB', 'deprecated': False}, + 'loop': {'id': 'LOOP', 'deprecated': False}, + 'lpd-document': {'id': 'LPD-document', 'deprecated': False}, + 'lpl-1.0': {'id': 'LPL-1.0', 'deprecated': False}, + 'lpl-1.02': {'id': 'LPL-1.02', 'deprecated': False}, + 'lppl-1.0': {'id': 'LPPL-1.0', 'deprecated': False}, + 'lppl-1.1': {'id': 'LPPL-1.1', 'deprecated': False}, + 'lppl-1.2': {'id': 'LPPL-1.2', 'deprecated': False}, + 'lppl-1.3a': {'id': 'LPPL-1.3a', 'deprecated': False}, + 'lppl-1.3c': {'id': 'LPPL-1.3c', 'deprecated': False}, + 'lsof': {'id': 'lsof', 'deprecated': False}, + 'lucida-bitmap-fonts': {'id': 'Lucida-Bitmap-Fonts', 'deprecated': False}, + 'lzma-sdk-9.11-to-9.20': {'id': 'LZMA-SDK-9.11-to-9.20', 'deprecated': False}, + 'lzma-sdk-9.22': {'id': 'LZMA-SDK-9.22', 'deprecated': False}, + 'mackerras-3-clause': {'id': 'Mackerras-3-Clause', 'deprecated': False}, + 'mackerras-3-clause-acknowledgment': {'id': 'Mackerras-3-Clause-acknowledgment', 'deprecated': False}, + 'magaz': {'id': 'magaz', 'deprecated': False}, + 'mailprio': {'id': 'mailprio', 'deprecated': False}, + 'makeindex': {'id': 'MakeIndex', 'deprecated': False}, + 'man2html': {'id': 'man2html', 'deprecated': False}, + 'martin-birgmeier': {'id': 'Martin-Birgmeier', 'deprecated': False}, + 'mcphee-slideshow': {'id': 'McPhee-slideshow', 'deprecated': False}, + 'metamail': {'id': 'metamail', 'deprecated': False}, + 'minpack': {'id': 'Minpack', 'deprecated': False}, + 'mips': {'id': 'MIPS', 'deprecated': False}, + 'miros': {'id': 'MirOS', 'deprecated': False}, + 'mit': {'id': 'MIT', 'deprecated': False}, + 'mit-0': {'id': 'MIT-0', 'deprecated': False}, + 'mit-advertising': {'id': 'MIT-advertising', 'deprecated': False}, + 'mit-click': {'id': 'MIT-Click', 'deprecated': False}, + 'mit-cmu': {'id': 'MIT-CMU', 'deprecated': False}, + 'mit-enna': {'id': 'MIT-enna', 'deprecated': False}, + 'mit-feh': {'id': 'MIT-feh', 'deprecated': False}, + 'mit-festival': {'id': 'MIT-Festival', 'deprecated': False}, + 'mit-khronos-old': {'id': 'MIT-Khronos-old', 'deprecated': False}, + 'mit-modern-variant': {'id': 'MIT-Modern-Variant', 'deprecated': False}, + 'mit-open-group': {'id': 'MIT-open-group', 'deprecated': False}, + 'mit-testregex': {'id': 'MIT-testregex', 'deprecated': False}, + 'mit-wu': {'id': 'MIT-Wu', 'deprecated': False}, + 'mitnfa': {'id': 'MITNFA', 'deprecated': False}, + 'mmixware': {'id': 'MMIXware', 'deprecated': False}, + 'motosoto': {'id': 'Motosoto', 'deprecated': False}, + 'mpeg-ssg': {'id': 'MPEG-SSG', 'deprecated': False}, + 'mpi-permissive': {'id': 'mpi-permissive', 'deprecated': False}, + 'mpich2': {'id': 'mpich2', 'deprecated': False}, + 'mpl-1.0': {'id': 'MPL-1.0', 'deprecated': False}, + 'mpl-1.1': {'id': 'MPL-1.1', 'deprecated': False}, + 'mpl-2.0': {'id': 'MPL-2.0', 'deprecated': False}, + 'mpl-2.0-no-copyleft-exception': {'id': 'MPL-2.0-no-copyleft-exception', 'deprecated': False}, + 'mplus': {'id': 'mplus', 'deprecated': False}, + 'ms-lpl': {'id': 'MS-LPL', 'deprecated': False}, + 'ms-pl': {'id': 'MS-PL', 'deprecated': False}, + 'ms-rl': {'id': 'MS-RL', 'deprecated': False}, + 'mtll': {'id': 'MTLL', 'deprecated': False}, + 'mulanpsl-1.0': {'id': 'MulanPSL-1.0', 'deprecated': False}, + 'mulanpsl-2.0': {'id': 'MulanPSL-2.0', 'deprecated': False}, + 'multics': {'id': 'Multics', 'deprecated': False}, + 'mup': {'id': 'Mup', 'deprecated': False}, + 'naist-2003': {'id': 'NAIST-2003', 'deprecated': False}, + 'nasa-1.3': {'id': 'NASA-1.3', 'deprecated': False}, + 'naumen': {'id': 'Naumen', 'deprecated': False}, + 'nbpl-1.0': {'id': 'NBPL-1.0', 'deprecated': False}, + 'ncbi-pd': {'id': 'NCBI-PD', 'deprecated': False}, + 'ncgl-uk-2.0': {'id': 'NCGL-UK-2.0', 'deprecated': False}, + 'ncl': {'id': 'NCL', 'deprecated': False}, + 'ncsa': {'id': 'NCSA', 'deprecated': False}, + 'net-snmp': {'id': 'Net-SNMP', 'deprecated': True}, + 'netcdf': {'id': 'NetCDF', 'deprecated': False}, + 'newsletr': {'id': 'Newsletr', 'deprecated': False}, + 'ngpl': {'id': 'NGPL', 'deprecated': False}, + 'ngrep': {'id': 'ngrep', 'deprecated': False}, + 'nicta-1.0': {'id': 'NICTA-1.0', 'deprecated': False}, + 'nist-pd': {'id': 'NIST-PD', 'deprecated': False}, + 'nist-pd-fallback': {'id': 'NIST-PD-fallback', 'deprecated': False}, + 'nist-software': {'id': 'NIST-Software', 'deprecated': False}, + 'nlod-1.0': {'id': 'NLOD-1.0', 'deprecated': False}, + 'nlod-2.0': {'id': 'NLOD-2.0', 'deprecated': False}, + 'nlpl': {'id': 'NLPL', 'deprecated': False}, + 'nokia': {'id': 'Nokia', 'deprecated': False}, + 'nosl': {'id': 'NOSL', 'deprecated': False}, + 'noweb': {'id': 'Noweb', 'deprecated': False}, + 'npl-1.0': {'id': 'NPL-1.0', 'deprecated': False}, + 'npl-1.1': {'id': 'NPL-1.1', 'deprecated': False}, + 'nposl-3.0': {'id': 'NPOSL-3.0', 'deprecated': False}, + 'nrl': {'id': 'NRL', 'deprecated': False}, + 'ntia-pd': {'id': 'NTIA-PD', 'deprecated': False}, + 'ntp': {'id': 'NTP', 'deprecated': False}, + 'ntp-0': {'id': 'NTP-0', 'deprecated': False}, + 'nunit': {'id': 'Nunit', 'deprecated': True}, + 'o-uda-1.0': {'id': 'O-UDA-1.0', 'deprecated': False}, + 'oar': {'id': 'OAR', 'deprecated': False}, + 'occt-pl': {'id': 'OCCT-PL', 'deprecated': False}, + 'oclc-2.0': {'id': 'OCLC-2.0', 'deprecated': False}, + 'odbl-1.0': {'id': 'ODbL-1.0', 'deprecated': False}, + 'odc-by-1.0': {'id': 'ODC-By-1.0', 'deprecated': False}, + 'offis': {'id': 'OFFIS', 'deprecated': False}, + 'ofl-1.0': {'id': 'OFL-1.0', 'deprecated': False}, + 'ofl-1.0-no-rfn': {'id': 'OFL-1.0-no-RFN', 'deprecated': False}, + 'ofl-1.0-rfn': {'id': 'OFL-1.0-RFN', 'deprecated': False}, + 'ofl-1.1': {'id': 'OFL-1.1', 'deprecated': False}, + 'ofl-1.1-no-rfn': {'id': 'OFL-1.1-no-RFN', 'deprecated': False}, + 'ofl-1.1-rfn': {'id': 'OFL-1.1-RFN', 'deprecated': False}, + 'ogc-1.0': {'id': 'OGC-1.0', 'deprecated': False}, + 'ogdl-taiwan-1.0': {'id': 'OGDL-Taiwan-1.0', 'deprecated': False}, + 'ogl-canada-2.0': {'id': 'OGL-Canada-2.0', 'deprecated': False}, + 'ogl-uk-1.0': {'id': 'OGL-UK-1.0', 'deprecated': False}, + 'ogl-uk-2.0': {'id': 'OGL-UK-2.0', 'deprecated': False}, + 'ogl-uk-3.0': {'id': 'OGL-UK-3.0', 'deprecated': False}, + 'ogtsl': {'id': 'OGTSL', 'deprecated': False}, + 'oldap-1.1': {'id': 'OLDAP-1.1', 'deprecated': False}, + 'oldap-1.2': {'id': 'OLDAP-1.2', 'deprecated': False}, + 'oldap-1.3': {'id': 'OLDAP-1.3', 'deprecated': False}, + 'oldap-1.4': {'id': 'OLDAP-1.4', 'deprecated': False}, + 'oldap-2.0': {'id': 'OLDAP-2.0', 'deprecated': False}, + 'oldap-2.0.1': {'id': 'OLDAP-2.0.1', 'deprecated': False}, + 'oldap-2.1': {'id': 'OLDAP-2.1', 'deprecated': False}, + 'oldap-2.2': {'id': 'OLDAP-2.2', 'deprecated': False}, + 'oldap-2.2.1': {'id': 'OLDAP-2.2.1', 'deprecated': False}, + 'oldap-2.2.2': {'id': 'OLDAP-2.2.2', 'deprecated': False}, + 'oldap-2.3': {'id': 'OLDAP-2.3', 'deprecated': False}, + 'oldap-2.4': {'id': 'OLDAP-2.4', 'deprecated': False}, + 'oldap-2.5': {'id': 'OLDAP-2.5', 'deprecated': False}, + 'oldap-2.6': {'id': 'OLDAP-2.6', 'deprecated': False}, + 'oldap-2.7': {'id': 'OLDAP-2.7', 'deprecated': False}, + 'oldap-2.8': {'id': 'OLDAP-2.8', 'deprecated': False}, + 'olfl-1.3': {'id': 'OLFL-1.3', 'deprecated': False}, + 'oml': {'id': 'OML', 'deprecated': False}, + 'openpbs-2.3': {'id': 'OpenPBS-2.3', 'deprecated': False}, + 'openssl': {'id': 'OpenSSL', 'deprecated': False}, + 'openssl-standalone': {'id': 'OpenSSL-standalone', 'deprecated': False}, + 'openvision': {'id': 'OpenVision', 'deprecated': False}, + 'opl-1.0': {'id': 'OPL-1.0', 'deprecated': False}, + 'opl-uk-3.0': {'id': 'OPL-UK-3.0', 'deprecated': False}, + 'opubl-1.0': {'id': 'OPUBL-1.0', 'deprecated': False}, + 'oset-pl-2.1': {'id': 'OSET-PL-2.1', 'deprecated': False}, + 'osl-1.0': {'id': 'OSL-1.0', 'deprecated': False}, + 'osl-1.1': {'id': 'OSL-1.1', 'deprecated': False}, + 'osl-2.0': {'id': 'OSL-2.0', 'deprecated': False}, + 'osl-2.1': {'id': 'OSL-2.1', 'deprecated': False}, + 'osl-3.0': {'id': 'OSL-3.0', 'deprecated': False}, + 'padl': {'id': 'PADL', 'deprecated': False}, + 'parity-6.0.0': {'id': 'Parity-6.0.0', 'deprecated': False}, + 'parity-7.0.0': {'id': 'Parity-7.0.0', 'deprecated': False}, + 'pddl-1.0': {'id': 'PDDL-1.0', 'deprecated': False}, + 'php-3.0': {'id': 'PHP-3.0', 'deprecated': False}, + 'php-3.01': {'id': 'PHP-3.01', 'deprecated': False}, + 'pixar': {'id': 'Pixar', 'deprecated': False}, + 'pkgconf': {'id': 'pkgconf', 'deprecated': False}, + 'plexus': {'id': 'Plexus', 'deprecated': False}, + 'pnmstitch': {'id': 'pnmstitch', 'deprecated': False}, + 'polyform-noncommercial-1.0.0': {'id': 'PolyForm-Noncommercial-1.0.0', 'deprecated': False}, + 'polyform-small-business-1.0.0': {'id': 'PolyForm-Small-Business-1.0.0', 'deprecated': False}, + 'postgresql': {'id': 'PostgreSQL', 'deprecated': False}, + 'ppl': {'id': 'PPL', 'deprecated': False}, + 'psf-2.0': {'id': 'PSF-2.0', 'deprecated': False}, + 'psfrag': {'id': 'psfrag', 'deprecated': False}, + 'psutils': {'id': 'psutils', 'deprecated': False}, + 'python-2.0': {'id': 'Python-2.0', 'deprecated': False}, + 'python-2.0.1': {'id': 'Python-2.0.1', 'deprecated': False}, + 'python-ldap': {'id': 'python-ldap', 'deprecated': False}, + 'qhull': {'id': 'Qhull', 'deprecated': False}, + 'qpl-1.0': {'id': 'QPL-1.0', 'deprecated': False}, + 'qpl-1.0-inria-2004': {'id': 'QPL-1.0-INRIA-2004', 'deprecated': False}, + 'radvd': {'id': 'radvd', 'deprecated': False}, + 'rdisc': {'id': 'Rdisc', 'deprecated': False}, + 'rhecos-1.1': {'id': 'RHeCos-1.1', 'deprecated': False}, + 'rpl-1.1': {'id': 'RPL-1.1', 'deprecated': False}, + 'rpl-1.5': {'id': 'RPL-1.5', 'deprecated': False}, + 'rpsl-1.0': {'id': 'RPSL-1.0', 'deprecated': False}, + 'rsa-md': {'id': 'RSA-MD', 'deprecated': False}, + 'rscpl': {'id': 'RSCPL', 'deprecated': False}, + 'ruby': {'id': 'Ruby', 'deprecated': False}, + 'ruby-pty': {'id': 'Ruby-pty', 'deprecated': False}, + 'sax-pd': {'id': 'SAX-PD', 'deprecated': False}, + 'sax-pd-2.0': {'id': 'SAX-PD-2.0', 'deprecated': False}, + 'saxpath': {'id': 'Saxpath', 'deprecated': False}, + 'scea': {'id': 'SCEA', 'deprecated': False}, + 'schemereport': {'id': 'SchemeReport', 'deprecated': False}, + 'sendmail': {'id': 'Sendmail', 'deprecated': False}, + 'sendmail-8.23': {'id': 'Sendmail-8.23', 'deprecated': False}, + 'sendmail-open-source-1.1': {'id': 'Sendmail-Open-Source-1.1', 'deprecated': False}, + 'sgi-b-1.0': {'id': 'SGI-B-1.0', 'deprecated': False}, + 'sgi-b-1.1': {'id': 'SGI-B-1.1', 'deprecated': False}, + 'sgi-b-2.0': {'id': 'SGI-B-2.0', 'deprecated': False}, + 'sgi-opengl': {'id': 'SGI-OpenGL', 'deprecated': False}, + 'sgp4': {'id': 'SGP4', 'deprecated': False}, + 'shl-0.5': {'id': 'SHL-0.5', 'deprecated': False}, + 'shl-0.51': {'id': 'SHL-0.51', 'deprecated': False}, + 'simpl-2.0': {'id': 'SimPL-2.0', 'deprecated': False}, + 'sissl': {'id': 'SISSL', 'deprecated': False}, + 'sissl-1.2': {'id': 'SISSL-1.2', 'deprecated': False}, + 'sl': {'id': 'SL', 'deprecated': False}, + 'sleepycat': {'id': 'Sleepycat', 'deprecated': False}, + 'smail-gpl': {'id': 'SMAIL-GPL', 'deprecated': False}, + 'smlnj': {'id': 'SMLNJ', 'deprecated': False}, + 'smppl': {'id': 'SMPPL', 'deprecated': False}, + 'snia': {'id': 'SNIA', 'deprecated': False}, + 'snprintf': {'id': 'snprintf', 'deprecated': False}, + 'sofa': {'id': 'SOFA', 'deprecated': False}, + 'softsurfer': {'id': 'softSurfer', 'deprecated': False}, + 'soundex': {'id': 'Soundex', 'deprecated': False}, + 'spencer-86': {'id': 'Spencer-86', 'deprecated': False}, + 'spencer-94': {'id': 'Spencer-94', 'deprecated': False}, + 'spencer-99': {'id': 'Spencer-99', 'deprecated': False}, + 'spl-1.0': {'id': 'SPL-1.0', 'deprecated': False}, + 'ssh-keyscan': {'id': 'ssh-keyscan', 'deprecated': False}, + 'ssh-openssh': {'id': 'SSH-OpenSSH', 'deprecated': False}, + 'ssh-short': {'id': 'SSH-short', 'deprecated': False}, + 'ssleay-standalone': {'id': 'SSLeay-standalone', 'deprecated': False}, + 'sspl-1.0': {'id': 'SSPL-1.0', 'deprecated': False}, + 'standardml-nj': {'id': 'StandardML-NJ', 'deprecated': True}, + 'sugarcrm-1.1.3': {'id': 'SugarCRM-1.1.3', 'deprecated': False}, + 'sul-1.0': {'id': 'SUL-1.0', 'deprecated': False}, + 'sun-ppp': {'id': 'Sun-PPP', 'deprecated': False}, + 'sun-ppp-2000': {'id': 'Sun-PPP-2000', 'deprecated': False}, + 'sunpro': {'id': 'SunPro', 'deprecated': False}, + 'swl': {'id': 'SWL', 'deprecated': False}, + 'swrule': {'id': 'swrule', 'deprecated': False}, + 'symlinks': {'id': 'Symlinks', 'deprecated': False}, + 'tapr-ohl-1.0': {'id': 'TAPR-OHL-1.0', 'deprecated': False}, + 'tcl': {'id': 'TCL', 'deprecated': False}, + 'tcp-wrappers': {'id': 'TCP-wrappers', 'deprecated': False}, + 'termreadkey': {'id': 'TermReadKey', 'deprecated': False}, + 'tgppl-1.0': {'id': 'TGPPL-1.0', 'deprecated': False}, + 'thirdeye': {'id': 'ThirdEye', 'deprecated': False}, + 'threeparttable': {'id': 'threeparttable', 'deprecated': False}, + 'tmate': {'id': 'TMate', 'deprecated': False}, + 'torque-1.1': {'id': 'TORQUE-1.1', 'deprecated': False}, + 'tosl': {'id': 'TOSL', 'deprecated': False}, + 'tpdl': {'id': 'TPDL', 'deprecated': False}, + 'tpl-1.0': {'id': 'TPL-1.0', 'deprecated': False}, + 'trustedqsl': {'id': 'TrustedQSL', 'deprecated': False}, + 'ttwl': {'id': 'TTWL', 'deprecated': False}, + 'ttyp0': {'id': 'TTYP0', 'deprecated': False}, + 'tu-berlin-1.0': {'id': 'TU-Berlin-1.0', 'deprecated': False}, + 'tu-berlin-2.0': {'id': 'TU-Berlin-2.0', 'deprecated': False}, + 'ubuntu-font-1.0': {'id': 'Ubuntu-font-1.0', 'deprecated': False}, + 'ucar': {'id': 'UCAR', 'deprecated': False}, + 'ucl-1.0': {'id': 'UCL-1.0', 'deprecated': False}, + 'ulem': {'id': 'ulem', 'deprecated': False}, + 'umich-merit': {'id': 'UMich-Merit', 'deprecated': False}, + 'unicode-3.0': {'id': 'Unicode-3.0', 'deprecated': False}, + 'unicode-dfs-2015': {'id': 'Unicode-DFS-2015', 'deprecated': False}, + 'unicode-dfs-2016': {'id': 'Unicode-DFS-2016', 'deprecated': False}, + 'unicode-tou': {'id': 'Unicode-TOU', 'deprecated': False}, + 'unixcrypt': {'id': 'UnixCrypt', 'deprecated': False}, + 'unlicense': {'id': 'Unlicense', 'deprecated': False}, + 'unlicense-libtelnet': {'id': 'Unlicense-libtelnet', 'deprecated': False}, + 'unlicense-libwhirlpool': {'id': 'Unlicense-libwhirlpool', 'deprecated': False}, + 'upl-1.0': {'id': 'UPL-1.0', 'deprecated': False}, + 'urt-rle': {'id': 'URT-RLE', 'deprecated': False}, + 'vim': {'id': 'Vim', 'deprecated': False}, + 'vostrom': {'id': 'VOSTROM', 'deprecated': False}, + 'vsl-1.0': {'id': 'VSL-1.0', 'deprecated': False}, + 'w3c': {'id': 'W3C', 'deprecated': False}, + 'w3c-19980720': {'id': 'W3C-19980720', 'deprecated': False}, + 'w3c-20150513': {'id': 'W3C-20150513', 'deprecated': False}, + 'w3m': {'id': 'w3m', 'deprecated': False}, + 'watcom-1.0': {'id': 'Watcom-1.0', 'deprecated': False}, + 'widget-workshop': {'id': 'Widget-Workshop', 'deprecated': False}, + 'wsuipa': {'id': 'Wsuipa', 'deprecated': False}, + 'wtfpl': {'id': 'WTFPL', 'deprecated': False}, + 'wwl': {'id': 'wwl', 'deprecated': False}, + 'wxwindows': {'id': 'wxWindows', 'deprecated': True}, + 'x11': {'id': 'X11', 'deprecated': False}, + 'x11-distribute-modifications-variant': {'id': 'X11-distribute-modifications-variant', 'deprecated': False}, + 'x11-swapped': {'id': 'X11-swapped', 'deprecated': False}, + 'xdebug-1.03': {'id': 'Xdebug-1.03', 'deprecated': False}, + 'xerox': {'id': 'Xerox', 'deprecated': False}, + 'xfig': {'id': 'Xfig', 'deprecated': False}, + 'xfree86-1.1': {'id': 'XFree86-1.1', 'deprecated': False}, + 'xinetd': {'id': 'xinetd', 'deprecated': False}, + 'xkeyboard-config-zinoviev': {'id': 'xkeyboard-config-Zinoviev', 'deprecated': False}, + 'xlock': {'id': 'xlock', 'deprecated': False}, + 'xnet': {'id': 'Xnet', 'deprecated': False}, + 'xpp': {'id': 'xpp', 'deprecated': False}, + 'xskat': {'id': 'XSkat', 'deprecated': False}, + 'xzoom': {'id': 'xzoom', 'deprecated': False}, + 'ypl-1.0': {'id': 'YPL-1.0', 'deprecated': False}, + 'ypl-1.1': {'id': 'YPL-1.1', 'deprecated': False}, + 'zed': {'id': 'Zed', 'deprecated': False}, + 'zeeff': {'id': 'Zeeff', 'deprecated': False}, + 'zend-2.0': {'id': 'Zend-2.0', 'deprecated': False}, + 'zimbra-1.3': {'id': 'Zimbra-1.3', 'deprecated': False}, + 'zimbra-1.4': {'id': 'Zimbra-1.4', 'deprecated': False}, + 'zlib': {'id': 'Zlib', 'deprecated': False}, + 'zlib-acknowledgement': {'id': 'zlib-acknowledgement', 'deprecated': False}, + 'zpl-1.1': {'id': 'ZPL-1.1', 'deprecated': False}, + 'zpl-2.0': {'id': 'ZPL-2.0', 'deprecated': False}, + 'zpl-2.1': {'id': 'ZPL-2.1', 'deprecated': False}, +} + +EXCEPTIONS: dict[str, SPDXException] = { + '389-exception': {'id': '389-exception', 'deprecated': False}, + 'asterisk-exception': {'id': 'Asterisk-exception', 'deprecated': False}, + 'asterisk-linking-protocols-exception': {'id': 'Asterisk-linking-protocols-exception', 'deprecated': False}, + 'autoconf-exception-2.0': {'id': 'Autoconf-exception-2.0', 'deprecated': False}, + 'autoconf-exception-3.0': {'id': 'Autoconf-exception-3.0', 'deprecated': False}, + 'autoconf-exception-generic': {'id': 'Autoconf-exception-generic', 'deprecated': False}, + 'autoconf-exception-generic-3.0': {'id': 'Autoconf-exception-generic-3.0', 'deprecated': False}, + 'autoconf-exception-macro': {'id': 'Autoconf-exception-macro', 'deprecated': False}, + 'bison-exception-1.24': {'id': 'Bison-exception-1.24', 'deprecated': False}, + 'bison-exception-2.2': {'id': 'Bison-exception-2.2', 'deprecated': False}, + 'bootloader-exception': {'id': 'Bootloader-exception', 'deprecated': False}, + 'cgal-linking-exception': {'id': 'CGAL-linking-exception', 'deprecated': False}, + 'classpath-exception-2.0': {'id': 'Classpath-exception-2.0', 'deprecated': False}, + 'clisp-exception-2.0': {'id': 'CLISP-exception-2.0', 'deprecated': False}, + 'cryptsetup-openssl-exception': {'id': 'cryptsetup-OpenSSL-exception', 'deprecated': False}, + 'digia-qt-lgpl-exception-1.1': {'id': 'Digia-Qt-LGPL-exception-1.1', 'deprecated': False}, + 'digirule-foss-exception': {'id': 'DigiRule-FOSS-exception', 'deprecated': False}, + 'ecos-exception-2.0': {'id': 'eCos-exception-2.0', 'deprecated': False}, + 'erlang-otp-linking-exception': {'id': 'erlang-otp-linking-exception', 'deprecated': False}, + 'fawkes-runtime-exception': {'id': 'Fawkes-Runtime-exception', 'deprecated': False}, + 'fltk-exception': {'id': 'FLTK-exception', 'deprecated': False}, + 'fmt-exception': {'id': 'fmt-exception', 'deprecated': False}, + 'font-exception-2.0': {'id': 'Font-exception-2.0', 'deprecated': False}, + 'freertos-exception-2.0': {'id': 'freertos-exception-2.0', 'deprecated': False}, + 'gcc-exception-2.0': {'id': 'GCC-exception-2.0', 'deprecated': False}, + 'gcc-exception-2.0-note': {'id': 'GCC-exception-2.0-note', 'deprecated': False}, + 'gcc-exception-3.1': {'id': 'GCC-exception-3.1', 'deprecated': False}, + 'gmsh-exception': {'id': 'Gmsh-exception', 'deprecated': False}, + 'gnat-exception': {'id': 'GNAT-exception', 'deprecated': False}, + 'gnome-examples-exception': {'id': 'GNOME-examples-exception', 'deprecated': False}, + 'gnu-compiler-exception': {'id': 'GNU-compiler-exception', 'deprecated': False}, + 'gnu-javamail-exception': {'id': 'gnu-javamail-exception', 'deprecated': False}, + 'gpl-3.0-389-ds-base-exception': {'id': 'GPL-3.0-389-ds-base-exception', 'deprecated': False}, + 'gpl-3.0-interface-exception': {'id': 'GPL-3.0-interface-exception', 'deprecated': False}, + 'gpl-3.0-linking-exception': {'id': 'GPL-3.0-linking-exception', 'deprecated': False}, + 'gpl-3.0-linking-source-exception': {'id': 'GPL-3.0-linking-source-exception', 'deprecated': False}, + 'gpl-cc-1.0': {'id': 'GPL-CC-1.0', 'deprecated': False}, + 'gstreamer-exception-2005': {'id': 'GStreamer-exception-2005', 'deprecated': False}, + 'gstreamer-exception-2008': {'id': 'GStreamer-exception-2008', 'deprecated': False}, + 'harbour-exception': {'id': 'harbour-exception', 'deprecated': False}, + 'i2p-gpl-java-exception': {'id': 'i2p-gpl-java-exception', 'deprecated': False}, + 'independent-modules-exception': {'id': 'Independent-modules-exception', 'deprecated': False}, + 'kicad-libraries-exception': {'id': 'KiCad-libraries-exception', 'deprecated': False}, + 'lgpl-3.0-linking-exception': {'id': 'LGPL-3.0-linking-exception', 'deprecated': False}, + 'libpri-openh323-exception': {'id': 'libpri-OpenH323-exception', 'deprecated': False}, + 'libtool-exception': {'id': 'Libtool-exception', 'deprecated': False}, + 'linux-syscall-note': {'id': 'Linux-syscall-note', 'deprecated': False}, + 'llgpl': {'id': 'LLGPL', 'deprecated': False}, + 'llvm-exception': {'id': 'LLVM-exception', 'deprecated': False}, + 'lzma-exception': {'id': 'LZMA-exception', 'deprecated': False}, + 'mif-exception': {'id': 'mif-exception', 'deprecated': False}, + 'mxml-exception': {'id': 'mxml-exception', 'deprecated': False}, + 'nokia-qt-exception-1.1': {'id': 'Nokia-Qt-exception-1.1', 'deprecated': True}, + 'ocaml-lgpl-linking-exception': {'id': 'OCaml-LGPL-linking-exception', 'deprecated': False}, + 'occt-exception-1.0': {'id': 'OCCT-exception-1.0', 'deprecated': False}, + 'openjdk-assembly-exception-1.0': {'id': 'OpenJDK-assembly-exception-1.0', 'deprecated': False}, + 'openvpn-openssl-exception': {'id': 'openvpn-openssl-exception', 'deprecated': False}, + 'pcre2-exception': {'id': 'PCRE2-exception', 'deprecated': False}, + 'polyparse-exception': {'id': 'polyparse-exception', 'deprecated': False}, + 'ps-or-pdf-font-exception-20170817': {'id': 'PS-or-PDF-font-exception-20170817', 'deprecated': False}, + 'qpl-1.0-inria-2004-exception': {'id': 'QPL-1.0-INRIA-2004-exception', 'deprecated': False}, + 'qt-gpl-exception-1.0': {'id': 'Qt-GPL-exception-1.0', 'deprecated': False}, + 'qt-lgpl-exception-1.1': {'id': 'Qt-LGPL-exception-1.1', 'deprecated': False}, + 'qwt-exception-1.0': {'id': 'Qwt-exception-1.0', 'deprecated': False}, + 'romic-exception': {'id': 'romic-exception', 'deprecated': False}, + 'rrdtool-floss-exception-2.0': {'id': 'RRDtool-FLOSS-exception-2.0', 'deprecated': False}, + 'sane-exception': {'id': 'SANE-exception', 'deprecated': False}, + 'shl-2.0': {'id': 'SHL-2.0', 'deprecated': False}, + 'shl-2.1': {'id': 'SHL-2.1', 'deprecated': False}, + 'stunnel-exception': {'id': 'stunnel-exception', 'deprecated': False}, + 'swi-exception': {'id': 'SWI-exception', 'deprecated': False}, + 'swift-exception': {'id': 'Swift-exception', 'deprecated': False}, + 'texinfo-exception': {'id': 'Texinfo-exception', 'deprecated': False}, + 'u-boot-exception-2.0': {'id': 'u-boot-exception-2.0', 'deprecated': False}, + 'ubdl-exception': {'id': 'UBDL-exception', 'deprecated': False}, + 'universal-foss-exception-1.0': {'id': 'Universal-FOSS-exception-1.0', 'deprecated': False}, + 'vsftpd-openssl-exception': {'id': 'vsftpd-openssl-exception', 'deprecated': False}, + 'wxwindows-exception-3.1': {'id': 'WxWindows-exception-3.1', 'deprecated': False}, + 'x11vnc-openssl-exception': {'id': 'x11vnc-openssl-exception', 'deprecated': False}, +} diff --git a/.venv/lib/python3.12/site-packages/packaging/markers.py b/.venv/lib/python3.12/site-packages/packaging/markers.py new file mode 100644 index 0000000..ca3706f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging/markers.py @@ -0,0 +1,388 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import operator +import os +import platform +import sys +from typing import AbstractSet, Callable, Literal, Mapping, TypedDict, Union, cast + +from ._parser import MarkerAtom, MarkerList, Op, Value, Variable +from ._parser import parse_marker as _parse_marker +from ._tokenizer import ParserSyntaxError +from .specifiers import InvalidSpecifier, Specifier +from .utils import canonicalize_name + +__all__ = [ + "Environment", + "EvaluateContext", + "InvalidMarker", + "Marker", + "UndefinedComparison", + "UndefinedEnvironmentName", + "default_environment", +] + +Operator = Callable[[str, Union[str, AbstractSet[str]]], bool] +EvaluateContext = Literal["metadata", "lock_file", "requirement"] +MARKERS_ALLOWING_SET = {"extras", "dependency_groups"} +MARKERS_REQUIRING_VERSION = { + "implementation_version", + "platform_release", + "python_full_version", + "python_version", +} + + +class InvalidMarker(ValueError): + """ + An invalid marker was found, users should refer to PEP 508. + """ + + +class UndefinedComparison(ValueError): + """ + An invalid operation was attempted on a value that doesn't support it. + """ + + +class UndefinedEnvironmentName(ValueError): + """ + A name was attempted to be used that does not exist inside of the + environment. + """ + + +class Environment(TypedDict): + implementation_name: str + """The implementation's identifier, e.g. ``'cpython'``.""" + + implementation_version: str + """ + The implementation's version, e.g. ``'3.13.0a2'`` for CPython 3.13.0a2, or + ``'7.3.13'`` for PyPy3.10 v7.3.13. + """ + + os_name: str + """ + The value of :py:data:`os.name`. The name of the operating system dependent module + imported, e.g. ``'posix'``. + """ + + platform_machine: str + """ + Returns the machine type, e.g. ``'i386'``. + + An empty string if the value cannot be determined. + """ + + platform_release: str + """ + The system's release, e.g. ``'2.2.0'`` or ``'NT'``. + + An empty string if the value cannot be determined. + """ + + platform_system: str + """ + The system/OS name, e.g. ``'Linux'``, ``'Windows'`` or ``'Java'``. + + An empty string if the value cannot be determined. + """ + + platform_version: str + """ + The system's release version, e.g. ``'#3 on degas'``. + + An empty string if the value cannot be determined. + """ + + python_full_version: str + """ + The Python version as string ``'major.minor.patchlevel'``. + + Note that unlike the Python :py:data:`sys.version`, this value will always include + the patchlevel (it defaults to 0). + """ + + platform_python_implementation: str + """ + A string identifying the Python implementation, e.g. ``'CPython'``. + """ + + python_version: str + """The Python version as string ``'major.minor'``.""" + + sys_platform: str + """ + This string contains a platform identifier that can be used to append + platform-specific components to :py:data:`sys.path`, for instance. + + For Unix systems, except on Linux and AIX, this is the lowercased OS name as + returned by ``uname -s`` with the first part of the version as returned by + ``uname -r`` appended, e.g. ``'sunos5'`` or ``'freebsd8'``, at the time when Python + was built. + """ + + +def _normalize_extras( + result: MarkerList | MarkerAtom | str, +) -> MarkerList | MarkerAtom | str: + if not isinstance(result, tuple): + return result + + lhs, op, rhs = result + if isinstance(lhs, Variable) and lhs.value == "extra": + normalized_extra = canonicalize_name(rhs.value) + rhs = Value(normalized_extra) + elif isinstance(rhs, Variable) and rhs.value == "extra": + normalized_extra = canonicalize_name(lhs.value) + lhs = Value(normalized_extra) + return lhs, op, rhs + + +def _normalize_extra_values(results: MarkerList) -> MarkerList: + """ + Normalize extra values. + """ + + return [_normalize_extras(r) for r in results] + + +def _format_marker( + marker: list[str] | MarkerAtom | str, first: bool | None = True +) -> str: + assert isinstance(marker, (list, tuple, str)) + + # Sometimes we have a structure like [[...]] which is a single item list + # where the single item is itself it's own list. In that case we want skip + # the rest of this function so that we don't get extraneous () on the + # outside. + if ( + isinstance(marker, list) + and len(marker) == 1 + and isinstance(marker[0], (list, tuple)) + ): + return _format_marker(marker[0]) + + if isinstance(marker, list): + inner = (_format_marker(m, first=False) for m in marker) + if first: + return " ".join(inner) + else: + return "(" + " ".join(inner) + ")" + elif isinstance(marker, tuple): + return " ".join([m.serialize() for m in marker]) + else: + return marker + + +_operators: dict[str, Operator] = { + "in": lambda lhs, rhs: lhs in rhs, + "not in": lambda lhs, rhs: lhs not in rhs, + "<": lambda _lhs, _rhs: False, + "<=": operator.eq, + "==": operator.eq, + "!=": operator.ne, + ">=": operator.eq, + ">": lambda _lhs, _rhs: False, +} + + +def _eval_op(lhs: str, op: Op, rhs: str | AbstractSet[str], *, key: str) -> bool: + op_str = op.serialize() + if key in MARKERS_REQUIRING_VERSION: + try: + spec = Specifier(f"{op_str}{rhs}") + except InvalidSpecifier: + pass + else: + return spec.contains(lhs, prereleases=True) + + oper: Operator | None = _operators.get(op_str) + if oper is None: + raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.") + + return oper(lhs, rhs) + + +def _normalize( + lhs: str, rhs: str | AbstractSet[str], key: str +) -> tuple[str, str | AbstractSet[str]]: + # PEP 685 - Comparison of extra names for optional distribution dependencies + # https://peps.python.org/pep-0685/ + # > When comparing extra names, tools MUST normalize the names being + # > compared using the semantics outlined in PEP 503 for names + if key == "extra": + assert isinstance(rhs, str), "extra value must be a string" + # Both sides are normalized at this point already + return (lhs, rhs) + if key in MARKERS_ALLOWING_SET: + if isinstance(rhs, str): # pragma: no cover + return (canonicalize_name(lhs), canonicalize_name(rhs)) + else: + return (canonicalize_name(lhs), {canonicalize_name(v) for v in rhs}) + + # other environment markers don't have such standards + return lhs, rhs + + +def _evaluate_markers( + markers: MarkerList, environment: dict[str, str | AbstractSet[str]] +) -> bool: + groups: list[list[bool]] = [[]] + + for marker in markers: + if isinstance(marker, list): + groups[-1].append(_evaluate_markers(marker, environment)) + elif isinstance(marker, tuple): + lhs, op, rhs = marker + + if isinstance(lhs, Variable): + environment_key = lhs.value + lhs_value = environment[environment_key] + rhs_value = rhs.value + else: + lhs_value = lhs.value + environment_key = rhs.value + rhs_value = environment[environment_key] + + assert isinstance(lhs_value, str), "lhs must be a string" + lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key) + groups[-1].append(_eval_op(lhs_value, op, rhs_value, key=environment_key)) + elif marker == "or": + groups.append([]) + elif marker == "and": + pass + else: # pragma: nocover + raise TypeError(f"Unexpected marker {marker!r}") + + return any(all(item) for item in groups) + + +def format_full_version(info: sys._version_info) -> str: + version = f"{info.major}.{info.minor}.{info.micro}" + kind = info.releaselevel + if kind != "final": + version += kind[0] + str(info.serial) + return version + + +def default_environment() -> Environment: + iver = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + return { + "implementation_name": implementation_name, + "implementation_version": iver, + "os_name": os.name, + "platform_machine": platform.machine(), + "platform_release": platform.release(), + "platform_system": platform.system(), + "platform_version": platform.version(), + "python_full_version": platform.python_version(), + "platform_python_implementation": platform.python_implementation(), + "python_version": ".".join(platform.python_version_tuple()[:2]), + "sys_platform": sys.platform, + } + + +class Marker: + def __init__(self, marker: str) -> None: + # Note: We create a Marker object without calling this constructor in + # packaging.requirements.Requirement. If any additional logic is + # added here, make sure to mirror/adapt Requirement. + + # If this fails and throws an error, the repr still expects _markers to + # be defined. + self._markers: MarkerList = [] + + try: + self._markers = _normalize_extra_values(_parse_marker(marker)) + # The attribute `_markers` can be described in terms of a recursive type: + # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]] + # + # For example, the following expression: + # python_version > "3.6" or (python_version == "3.6" and os_name == "unix") + # + # is parsed into: + # [ + # (, ')>, ), + # 'and', + # [ + # (, , ), + # 'or', + # (, , ) + # ] + # ] + except ParserSyntaxError as e: + raise InvalidMarker(str(e)) from e + + def __str__(self) -> str: + return _format_marker(self._markers) + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}('{self}')>" + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Marker): + return NotImplemented + + return str(self) == str(other) + + def evaluate( + self, + environment: Mapping[str, str | AbstractSet[str]] | None = None, + context: EvaluateContext = "metadata", + ) -> bool: + """Evaluate a marker. + + Return the boolean from evaluating the given marker against the + environment. environment is an optional argument to override all or + part of the determined environment. The *context* parameter specifies what + context the markers are being evaluated for, which influences what markers + are considered valid. Acceptable values are "metadata" (for core metadata; + default), "lock_file", and "requirement" (i.e. all other situations). + + The environment is determined from the current Python process. + """ + current_environment = cast( + "dict[str, str | AbstractSet[str]]", default_environment() + ) + if context == "lock_file": + current_environment.update( + extras=frozenset(), dependency_groups=frozenset() + ) + elif context == "metadata": + current_environment["extra"] = "" + + if environment is not None: + current_environment.update(environment) + if "extra" in current_environment: + # The API used to allow setting extra to None. We need to handle + # this case for backwards compatibility. Also skip running + # normalize name if extra is empty. + extra = cast("str | None", current_environment["extra"]) + current_environment["extra"] = canonicalize_name(extra) if extra else "" + + return _evaluate_markers( + self._markers, _repair_python_full_version(current_environment) + ) + + +def _repair_python_full_version( + env: dict[str, str | AbstractSet[str]], +) -> dict[str, str | AbstractSet[str]]: + """ + Work around platform.python_version() returning something that is not PEP 440 + compliant for non-tagged Python builds. + """ + python_full_version = cast("str", env["python_full_version"]) + if python_full_version.endswith("+"): + env["python_full_version"] = f"{python_full_version}local" + return env diff --git a/.venv/lib/python3.12/site-packages/packaging/metadata.py b/.venv/lib/python3.12/site-packages/packaging/metadata.py new file mode 100644 index 0000000..253f6b1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging/metadata.py @@ -0,0 +1,978 @@ +from __future__ import annotations + +import email.feedparser +import email.header +import email.message +import email.parser +import email.policy +import keyword +import pathlib +import sys +import typing +from typing import ( + Any, + Callable, + Generic, + Literal, + TypedDict, + cast, +) + +from . import licenses, requirements, specifiers, utils +from . import version as version_module + +if typing.TYPE_CHECKING: + from .licenses import NormalizedLicenseExpression + +T = typing.TypeVar("T") + + +if sys.version_info >= (3, 11): # pragma: no cover + ExceptionGroup = ExceptionGroup # noqa: F821 +else: # pragma: no cover + + class ExceptionGroup(Exception): + """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11. + + If :external:exc:`ExceptionGroup` is already defined by Python itself, + that version is used instead. + """ + + message: str + exceptions: list[Exception] + + def __init__(self, message: str, exceptions: list[Exception]) -> None: + self.message = message + self.exceptions = exceptions + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})" + + +class InvalidMetadata(ValueError): + """A metadata field contains invalid data.""" + + field: str + """The name of the field that contains invalid data.""" + + def __init__(self, field: str, message: str) -> None: + self.field = field + super().__init__(message) + + +# The RawMetadata class attempts to make as few assumptions about the underlying +# serialization formats as possible. The idea is that as long as a serialization +# formats offer some very basic primitives in *some* way then we can support +# serializing to and from that format. +class RawMetadata(TypedDict, total=False): + """A dictionary of raw core metadata. + + Each field in core metadata maps to a key of this dictionary (when data is + provided). The key is lower-case and underscores are used instead of dashes + compared to the equivalent core metadata field. Any core metadata field that + can be specified multiple times or can hold multiple values in a single + field have a key with a plural name. See :class:`Metadata` whose attributes + match the keys of this dictionary. + + Core metadata fields that can be specified multiple times are stored as a + list or dict depending on which is appropriate for the field. Any fields + which hold multiple values in a single field are stored as a list. + + """ + + # Metadata 1.0 - PEP 241 + metadata_version: str + name: str + version: str + platforms: list[str] + summary: str + description: str + keywords: list[str] + home_page: str + author: str + author_email: str + license: str + + # Metadata 1.1 - PEP 314 + supported_platforms: list[str] + download_url: str + classifiers: list[str] + requires: list[str] + provides: list[str] + obsoletes: list[str] + + # Metadata 1.2 - PEP 345 + maintainer: str + maintainer_email: str + requires_dist: list[str] + provides_dist: list[str] + obsoletes_dist: list[str] + requires_python: str + requires_external: list[str] + project_urls: dict[str, str] + + # Metadata 2.0 + # PEP 426 attempted to completely revamp the metadata format + # but got stuck without ever being able to build consensus on + # it and ultimately ended up withdrawn. + # + # However, a number of tools had started emitting METADATA with + # `2.0` Metadata-Version, so for historical reasons, this version + # was skipped. + + # Metadata 2.1 - PEP 566 + description_content_type: str + provides_extra: list[str] + + # Metadata 2.2 - PEP 643 + dynamic: list[str] + + # Metadata 2.3 - PEP 685 + # No new fields were added in PEP 685, just some edge case were + # tightened up to provide better interoperability. + + # Metadata 2.4 - PEP 639 + license_expression: str + license_files: list[str] + + # Metadata 2.5 - PEP 794 + import_names: list[str] + import_namespaces: list[str] + + +# 'keywords' is special as it's a string in the core metadata spec, but we +# represent it as a list. +_STRING_FIELDS = { + "author", + "author_email", + "description", + "description_content_type", + "download_url", + "home_page", + "license", + "license_expression", + "maintainer", + "maintainer_email", + "metadata_version", + "name", + "requires_python", + "summary", + "version", +} + +_LIST_FIELDS = { + "classifiers", + "dynamic", + "license_files", + "obsoletes", + "obsoletes_dist", + "platforms", + "provides", + "provides_dist", + "provides_extra", + "requires", + "requires_dist", + "requires_external", + "supported_platforms", + "import_names", + "import_namespaces", +} + +_DICT_FIELDS = { + "project_urls", +} + + +def _parse_keywords(data: str) -> list[str]: + """Split a string of comma-separated keywords into a list of keywords.""" + return [k.strip() for k in data.split(",")] + + +def _parse_project_urls(data: list[str]) -> dict[str, str]: + """Parse a list of label/URL string pairings separated by a comma.""" + urls = {} + for pair in data: + # Our logic is slightly tricky here as we want to try and do + # *something* reasonable with malformed data. + # + # The main thing that we have to worry about, is data that does + # not have a ',' at all to split the label from the Value. There + # isn't a singular right answer here, and we will fail validation + # later on (if the caller is validating) so it doesn't *really* + # matter, but since the missing value has to be an empty str + # and our return value is dict[str, str], if we let the key + # be the missing value, then they'd have multiple '' values that + # overwrite each other in a accumulating dict. + # + # The other potential issue is that it's possible to have the + # same label multiple times in the metadata, with no solid "right" + # answer with what to do in that case. As such, we'll do the only + # thing we can, which is treat the field as unparsable and add it + # to our list of unparsed fields. + # + # TODO: The spec doesn't say anything about if the keys should be + # considered case sensitive or not... logically they should + # be case-preserving and case-insensitive, but doing that + # would open up more cases where we might have duplicate + # entries. + label, _, url = (s.strip() for s in pair.partition(",")) + + if label in urls: + # The label already exists in our set of urls, so this field + # is unparsable, and we can just add the whole thing to our + # unparsable data and stop processing it. + raise KeyError("duplicate labels in project urls") + urls[label] = url + + return urls + + +def _get_payload(msg: email.message.Message, source: bytes | str) -> str: + """Get the body of the message.""" + # If our source is a str, then our caller has managed encodings for us, + # and we don't need to deal with it. + if isinstance(source, str): + payload = msg.get_payload() + assert isinstance(payload, str) + return payload + # If our source is a bytes, then we're managing the encoding and we need + # to deal with it. + else: + bpayload = msg.get_payload(decode=True) + assert isinstance(bpayload, bytes) + try: + return bpayload.decode("utf8", "strict") + except UnicodeDecodeError as exc: + raise ValueError("payload in an invalid encoding") from exc + + +# The various parse_FORMAT functions here are intended to be as lenient as +# possible in their parsing, while still returning a correctly typed +# RawMetadata. +# +# To aid in this, we also generally want to do as little touching of the +# data as possible, except where there are possibly some historic holdovers +# that make valid data awkward to work with. +# +# While this is a lower level, intermediate format than our ``Metadata`` +# class, some light touch ups can make a massive difference in usability. + +# Map METADATA fields to RawMetadata. +_EMAIL_TO_RAW_MAPPING = { + "author": "author", + "author-email": "author_email", + "classifier": "classifiers", + "description": "description", + "description-content-type": "description_content_type", + "download-url": "download_url", + "dynamic": "dynamic", + "home-page": "home_page", + "import-name": "import_names", + "import-namespace": "import_namespaces", + "keywords": "keywords", + "license": "license", + "license-expression": "license_expression", + "license-file": "license_files", + "maintainer": "maintainer", + "maintainer-email": "maintainer_email", + "metadata-version": "metadata_version", + "name": "name", + "obsoletes": "obsoletes", + "obsoletes-dist": "obsoletes_dist", + "platform": "platforms", + "project-url": "project_urls", + "provides": "provides", + "provides-dist": "provides_dist", + "provides-extra": "provides_extra", + "requires": "requires", + "requires-dist": "requires_dist", + "requires-external": "requires_external", + "requires-python": "requires_python", + "summary": "summary", + "supported-platform": "supported_platforms", + "version": "version", +} +_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()} + + +# This class is for writing RFC822 messages +class RFC822Policy(email.policy.EmailPolicy): + """ + This is :class:`email.policy.EmailPolicy`, but with a simple ``header_store_parse`` + implementation that handles multi-line values, and some nice defaults. + """ + + utf8 = True + mangle_from_ = False + max_line_length = 0 + + def header_store_parse(self, name: str, value: str) -> tuple[str, str]: + size = len(name) + 2 + value = value.replace("\n", "\n" + " " * size) + return (name, value) + + +# This class is for writing RFC822 messages +class RFC822Message(email.message.EmailMessage): + """ + This is :class:`email.message.EmailMessage` with two small changes: it defaults to + our `RFC822Policy`, and it correctly writes unicode when being called + with `bytes()`. + """ + + def __init__(self) -> None: + super().__init__(policy=RFC822Policy()) + + def as_bytes( + self, unixfrom: bool = False, policy: email.policy.Policy | None = None + ) -> bytes: + """ + Return the bytes representation of the message. + + This handles unicode encoding. + """ + return self.as_string(unixfrom, policy=policy).encode("utf-8") + + +def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]: + """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``). + + This function returns a two-item tuple of dicts. The first dict is of + recognized fields from the core metadata specification. Fields that can be + parsed and translated into Python's built-in types are converted + appropriately. All other fields are left as-is. Fields that are allowed to + appear multiple times are stored as lists. + + The second dict contains all other fields from the metadata. This includes + any unrecognized fields. It also includes any fields which are expected to + be parsed into a built-in type but were not formatted appropriately. Finally, + any fields that are expected to appear only once but are repeated are + included in this dict. + + """ + raw: dict[str, str | list[str] | dict[str, str]] = {} + unparsed: dict[str, list[str]] = {} + + if isinstance(data, str): + parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data) + else: + parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data) + + # We have to wrap parsed.keys() in a set, because in the case of multiple + # values for a key (a list), the key will appear multiple times in the + # list of keys, but we're avoiding that by using get_all(). + for name_with_case in frozenset(parsed.keys()): + # Header names in RFC are case insensitive, so we'll normalize to all + # lower case to make comparisons easier. + name = name_with_case.lower() + + # We use get_all() here, even for fields that aren't multiple use, + # because otherwise someone could have e.g. two Name fields, and we + # would just silently ignore it rather than doing something about it. + headers = parsed.get_all(name) or [] + + # The way the email module works when parsing bytes is that it + # unconditionally decodes the bytes as ascii using the surrogateescape + # handler. When you pull that data back out (such as with get_all() ), + # it looks to see if the str has any surrogate escapes, and if it does + # it wraps it in a Header object instead of returning the string. + # + # As such, we'll look for those Header objects, and fix up the encoding. + value = [] + # Flag if we have run into any issues processing the headers, thus + # signalling that the data belongs in 'unparsed'. + valid_encoding = True + for h in headers: + # It's unclear if this can return more types than just a Header or + # a str, so we'll just assert here to make sure. + assert isinstance(h, (email.header.Header, str)) + + # If it's a header object, we need to do our little dance to get + # the real data out of it. In cases where there is invalid data + # we're going to end up with mojibake, but there's no obvious, good + # way around that without reimplementing parts of the Header object + # ourselves. + # + # That should be fine since, if mojibacked happens, this key is + # going into the unparsed dict anyways. + if isinstance(h, email.header.Header): + # The Header object stores it's data as chunks, and each chunk + # can be independently encoded, so we'll need to check each + # of them. + chunks: list[tuple[bytes, str | None]] = [] + for binary, _encoding in email.header.decode_header(h): + try: + binary.decode("utf8", "strict") + except UnicodeDecodeError: + # Enable mojibake. + encoding = "latin1" + valid_encoding = False + else: + encoding = "utf8" + chunks.append((binary, encoding)) + + # Turn our chunks back into a Header object, then let that + # Header object do the right thing to turn them into a + # string for us. + value.append(str(email.header.make_header(chunks))) + # This is already a string, so just add it. + else: + value.append(h) + + # We've processed all of our values to get them into a list of str, + # but we may have mojibake data, in which case this is an unparsed + # field. + if not valid_encoding: + unparsed[name] = value + continue + + raw_name = _EMAIL_TO_RAW_MAPPING.get(name) + if raw_name is None: + # This is a bit of a weird situation, we've encountered a key that + # we don't know what it means, so we don't know whether it's meant + # to be a list or not. + # + # Since we can't really tell one way or another, we'll just leave it + # as a list, even though it may be a single item list, because that's + # what makes the most sense for email headers. + unparsed[name] = value + continue + + # If this is one of our string fields, then we'll check to see if our + # value is a list of a single item. If it is then we'll assume that + # it was emitted as a single string, and unwrap the str from inside + # the list. + # + # If it's any other kind of data, then we haven't the faintest clue + # what we should parse it as, and we have to just add it to our list + # of unparsed stuff. + if raw_name in _STRING_FIELDS and len(value) == 1: + raw[raw_name] = value[0] + # If this is import_names, we need to special case the empty field + # case, which converts to an empty list instead of None. We can't let + # the empty case slip through, as it will fail validation. + elif raw_name == "import_names" and value == [""]: + raw[raw_name] = [] + # If this is one of our list of string fields, then we can just assign + # the value, since email *only* has strings, and our get_all() call + # above ensures that this is a list. + elif raw_name in _LIST_FIELDS: + raw[raw_name] = value + # Special Case: Keywords + # The keywords field is implemented in the metadata spec as a str, + # but it conceptually is a list of strings, and is serialized using + # ", ".join(keywords), so we'll do some light data massaging to turn + # this into what it logically is. + elif raw_name == "keywords" and len(value) == 1: + raw[raw_name] = _parse_keywords(value[0]) + # Special Case: Project-URL + # The project urls is implemented in the metadata spec as a list of + # specially-formatted strings that represent a key and a value, which + # is fundamentally a mapping, however the email format doesn't support + # mappings in a sane way, so it was crammed into a list of strings + # instead. + # + # We will do a little light data massaging to turn this into a map as + # it logically should be. + elif raw_name == "project_urls": + try: + raw[raw_name] = _parse_project_urls(value) + except KeyError: + unparsed[name] = value + # Nothing that we've done has managed to parse this, so it'll just + # throw it in our unparsable data and move on. + else: + unparsed[name] = value + + # We need to support getting the Description from the message payload in + # addition to getting it from the the headers. This does mean, though, there + # is the possibility of it being set both ways, in which case we put both + # in 'unparsed' since we don't know which is right. + try: + payload = _get_payload(parsed, data) + except ValueError: + unparsed.setdefault("description", []).append( + parsed.get_payload(decode=isinstance(data, bytes)) # type: ignore[call-overload] + ) + else: + if payload: + # Check to see if we've already got a description, if so then both + # it, and this body move to unparsable. + if "description" in raw: + description_header = cast("str", raw.pop("description")) + unparsed.setdefault("description", []).extend( + [description_header, payload] + ) + elif "description" in unparsed: + unparsed["description"].append(payload) + else: + raw["description"] = payload + + # We need to cast our `raw` to a metadata, because a TypedDict only support + # literal key names, but we're computing our key names on purpose, but the + # way this function is implemented, our `TypedDict` can only have valid key + # names. + return cast("RawMetadata", raw), unparsed + + +_NOT_FOUND = object() + + +# Keep the two values in sync. +_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4", "2.5"] +_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4", "2.5"] + +_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"]) + + +class _Validator(Generic[T]): + """Validate a metadata field. + + All _process_*() methods correspond to a core metadata field. The method is + called with the field's raw value. If the raw value is valid it is returned + in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field). + If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause + as appropriate). + """ + + name: str + raw_name: str + added: _MetadataVersion + + def __init__( + self, + *, + added: _MetadataVersion = "1.0", + ) -> None: + self.added = added + + def __set_name__(self, _owner: Metadata, name: str) -> None: + self.name = name + self.raw_name = _RAW_TO_EMAIL_MAPPING[name] + + def __get__(self, instance: Metadata, _owner: type[Metadata]) -> T: + # With Python 3.8, the caching can be replaced with functools.cached_property(). + # No need to check the cache as attribute lookup will resolve into the + # instance's __dict__ before __get__ is called. + cache = instance.__dict__ + value = instance._raw.get(self.name) + + # To make the _process_* methods easier, we'll check if the value is None + # and if this field is NOT a required attribute, and if both of those + # things are true, we'll skip the the converter. This will mean that the + # converters never have to deal with the None union. + if self.name in _REQUIRED_ATTRS or value is not None: + try: + converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}") + except AttributeError: + pass + else: + value = converter(value) + + cache[self.name] = value + try: + del instance._raw[self.name] # type: ignore[misc] + except KeyError: + pass + + return cast("T", value) + + def _invalid_metadata( + self, msg: str, cause: Exception | None = None + ) -> InvalidMetadata: + exc = InvalidMetadata( + self.raw_name, msg.format_map({"field": repr(self.raw_name)}) + ) + exc.__cause__ = cause + return exc + + def _process_metadata_version(self, value: str) -> _MetadataVersion: + # Implicitly makes Metadata-Version required. + if value not in _VALID_METADATA_VERSIONS: + raise self._invalid_metadata(f"{value!r} is not a valid metadata version") + return cast("_MetadataVersion", value) + + def _process_name(self, value: str) -> str: + if not value: + raise self._invalid_metadata("{field} is a required field") + # Validate the name as a side-effect. + try: + utils.canonicalize_name(value, validate=True) + except utils.InvalidName as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) from exc + else: + return value + + def _process_version(self, value: str) -> version_module.Version: + if not value: + raise self._invalid_metadata("{field} is a required field") + try: + return version_module.parse(value) + except version_module.InvalidVersion as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) from exc + + def _process_summary(self, value: str) -> str: + """Check the field contains no newlines.""" + if "\n" in value: + raise self._invalid_metadata("{field} must be a single line") + return value + + def _process_description_content_type(self, value: str) -> str: + content_types = {"text/plain", "text/x-rst", "text/markdown"} + message = email.message.EmailMessage() + message["content-type"] = value + + content_type, parameters = ( + # Defaults to `text/plain` if parsing failed. + message.get_content_type().lower(), + message["content-type"].params, + ) + # Check if content-type is valid or defaulted to `text/plain` and thus was + # not parseable. + if content_type not in content_types or content_type not in value.lower(): + raise self._invalid_metadata( + f"{{field}} must be one of {list(content_types)}, not {value!r}" + ) + + charset = parameters.get("charset", "UTF-8") + if charset != "UTF-8": + raise self._invalid_metadata( + f"{{field}} can only specify the UTF-8 charset, not {list(charset)}" + ) + + markdown_variants = {"GFM", "CommonMark"} + variant = parameters.get("variant", "GFM") # Use an acceptable default. + if content_type == "text/markdown" and variant not in markdown_variants: + raise self._invalid_metadata( + f"valid Markdown variants for {{field}} are {list(markdown_variants)}, " + f"not {variant!r}", + ) + return value + + def _process_dynamic(self, value: list[str]) -> list[str]: + for dynamic_field in map(str.lower, value): + if dynamic_field in {"name", "version", "metadata-version"}: + raise self._invalid_metadata( + f"{dynamic_field!r} is not allowed as a dynamic field" + ) + elif dynamic_field not in _EMAIL_TO_RAW_MAPPING: + raise self._invalid_metadata( + f"{dynamic_field!r} is not a valid dynamic field" + ) + return list(map(str.lower, value)) + + def _process_provides_extra( + self, + value: list[str], + ) -> list[utils.NormalizedName]: + normalized_names = [] + try: + for name in value: + normalized_names.append(utils.canonicalize_name(name, validate=True)) + except utils.InvalidName as exc: + raise self._invalid_metadata( + f"{name!r} is invalid for {{field}}", cause=exc + ) from exc + else: + return normalized_names + + def _process_requires_python(self, value: str) -> specifiers.SpecifierSet: + try: + return specifiers.SpecifierSet(value) + except specifiers.InvalidSpecifier as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) from exc + + def _process_requires_dist( + self, + value: list[str], + ) -> list[requirements.Requirement]: + reqs = [] + try: + for req in value: + reqs.append(requirements.Requirement(req)) + except requirements.InvalidRequirement as exc: + raise self._invalid_metadata( + f"{req!r} is invalid for {{field}}", cause=exc + ) from exc + else: + return reqs + + def _process_license_expression(self, value: str) -> NormalizedLicenseExpression: + try: + return licenses.canonicalize_license_expression(value) + except ValueError as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) from exc + + def _process_license_files(self, value: list[str]) -> list[str]: + paths = [] + for path in value: + if ".." in path: + raise self._invalid_metadata( + f"{path!r} is invalid for {{field}}, " + "parent directory indicators are not allowed" + ) + if "*" in path: + raise self._invalid_metadata( + f"{path!r} is invalid for {{field}}, paths must be resolved" + ) + if ( + pathlib.PurePosixPath(path).is_absolute() + or pathlib.PureWindowsPath(path).is_absolute() + ): + raise self._invalid_metadata( + f"{path!r} is invalid for {{field}}, paths must be relative" + ) + if pathlib.PureWindowsPath(path).as_posix() != path: + raise self._invalid_metadata( + f"{path!r} is invalid for {{field}}, paths must use '/' delimiter" + ) + paths.append(path) + return paths + + def _process_import_names(self, value: list[str]) -> list[str]: + for import_name in value: + name, semicolon, private = import_name.partition(";") + name = name.rstrip() + for identifier in name.split("."): + if not identifier.isidentifier(): + raise self._invalid_metadata( + f"{name!r} is invalid for {{field}}; " + f"{identifier!r} is not a valid identifier" + ) + elif keyword.iskeyword(identifier): + raise self._invalid_metadata( + f"{name!r} is invalid for {{field}}; " + f"{identifier!r} is a keyword" + ) + if semicolon and private.lstrip() != "private": + raise self._invalid_metadata( + f"{import_name!r} is invalid for {{field}}; " + "the only valid option is 'private'" + ) + return value + + _process_import_namespaces = _process_import_names + + +class Metadata: + """Representation of distribution metadata. + + Compared to :class:`RawMetadata`, this class provides objects representing + metadata fields instead of only using built-in types. Any invalid metadata + will cause :exc:`InvalidMetadata` to be raised (with a + :py:attr:`~BaseException.__cause__` attribute as appropriate). + """ + + _raw: RawMetadata + + @classmethod + def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> Metadata: + """Create an instance from :class:`RawMetadata`. + + If *validate* is true, all metadata will be validated. All exceptions + related to validation will be gathered and raised as an :class:`ExceptionGroup`. + """ + ins = cls() + ins._raw = data.copy() # Mutations occur due to caching enriched values. + + if validate: + exceptions: list[Exception] = [] + try: + metadata_version = ins.metadata_version + metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version) + except InvalidMetadata as metadata_version_exc: + exceptions.append(metadata_version_exc) + metadata_version = None + + # Make sure to check for the fields that are present, the required + # fields (so their absence can be reported). + fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS + # Remove fields that have already been checked. + fields_to_check -= {"metadata_version"} + + for key in fields_to_check: + try: + if metadata_version: + # Can't use getattr() as that triggers descriptor protocol which + # will fail due to no value for the instance argument. + try: + field_metadata_version = cls.__dict__[key].added + except KeyError: + exc = InvalidMetadata(key, f"unrecognized field: {key!r}") + exceptions.append(exc) + continue + field_age = _VALID_METADATA_VERSIONS.index( + field_metadata_version + ) + if field_age > metadata_age: + field = _RAW_TO_EMAIL_MAPPING[key] + exc = InvalidMetadata( + field, + f"{field} introduced in metadata version " + f"{field_metadata_version}, not {metadata_version}", + ) + exceptions.append(exc) + continue + getattr(ins, key) + except InvalidMetadata as exc: + exceptions.append(exc) + + if exceptions: + raise ExceptionGroup("invalid metadata", exceptions) + + return ins + + @classmethod + def from_email(cls, data: bytes | str, *, validate: bool = True) -> Metadata: + """Parse metadata from email headers. + + If *validate* is true, the metadata will be validated. All exceptions + related to validation will be gathered and raised as an :class:`ExceptionGroup`. + """ + raw, unparsed = parse_email(data) + + if validate: + exceptions: list[Exception] = [] + for unparsed_key in unparsed: + if unparsed_key in _EMAIL_TO_RAW_MAPPING: + message = f"{unparsed_key!r} has invalid data" + else: + message = f"unrecognized field: {unparsed_key!r}" + exceptions.append(InvalidMetadata(unparsed_key, message)) + + if exceptions: + raise ExceptionGroup("unparsed", exceptions) + + try: + return cls.from_raw(raw, validate=validate) + except ExceptionGroup as exc_group: + raise ExceptionGroup( + "invalid or unparsed metadata", exc_group.exceptions + ) from None + + metadata_version: _Validator[_MetadataVersion] = _Validator() + """:external:ref:`core-metadata-metadata-version` + (required; validated to be a valid metadata version)""" + # `name` is not normalized/typed to NormalizedName so as to provide access to + # the original/raw name. + name: _Validator[str] = _Validator() + """:external:ref:`core-metadata-name` + (required; validated using :func:`~packaging.utils.canonicalize_name` and its + *validate* parameter)""" + version: _Validator[version_module.Version] = _Validator() + """:external:ref:`core-metadata-version` (required)""" + dynamic: _Validator[list[str] | None] = _Validator( + added="2.2", + ) + """:external:ref:`core-metadata-dynamic` + (validated against core metadata field names and lowercased)""" + platforms: _Validator[list[str] | None] = _Validator() + """:external:ref:`core-metadata-platform`""" + supported_platforms: _Validator[list[str] | None] = _Validator(added="1.1") + """:external:ref:`core-metadata-supported-platform`""" + summary: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-summary` (validated to contain no newlines)""" + description: _Validator[str | None] = _Validator() # TODO 2.1: can be in body + """:external:ref:`core-metadata-description`""" + description_content_type: _Validator[str | None] = _Validator(added="2.1") + """:external:ref:`core-metadata-description-content-type` (validated)""" + keywords: _Validator[list[str] | None] = _Validator() + """:external:ref:`core-metadata-keywords`""" + home_page: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-home-page`""" + download_url: _Validator[str | None] = _Validator(added="1.1") + """:external:ref:`core-metadata-download-url`""" + author: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-author`""" + author_email: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-author-email`""" + maintainer: _Validator[str | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-maintainer`""" + maintainer_email: _Validator[str | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-maintainer-email`""" + license: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-license`""" + license_expression: _Validator[NormalizedLicenseExpression | None] = _Validator( + added="2.4" + ) + """:external:ref:`core-metadata-license-expression`""" + license_files: _Validator[list[str] | None] = _Validator(added="2.4") + """:external:ref:`core-metadata-license-file`""" + classifiers: _Validator[list[str] | None] = _Validator(added="1.1") + """:external:ref:`core-metadata-classifier`""" + requires_dist: _Validator[list[requirements.Requirement] | None] = _Validator( + added="1.2" + ) + """:external:ref:`core-metadata-requires-dist`""" + requires_python: _Validator[specifiers.SpecifierSet | None] = _Validator( + added="1.2" + ) + """:external:ref:`core-metadata-requires-python`""" + # Because `Requires-External` allows for non-PEP 440 version specifiers, we + # don't do any processing on the values. + requires_external: _Validator[list[str] | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-requires-external`""" + project_urls: _Validator[dict[str, str] | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-project-url`""" + # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation + # regardless of metadata version. + provides_extra: _Validator[list[utils.NormalizedName] | None] = _Validator( + added="2.1", + ) + """:external:ref:`core-metadata-provides-extra`""" + provides_dist: _Validator[list[str] | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-provides-dist`""" + obsoletes_dist: _Validator[list[str] | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-obsoletes-dist`""" + import_names: _Validator[list[str] | None] = _Validator(added="2.5") + """:external:ref:`core-metadata-import-name`""" + import_namespaces: _Validator[list[str] | None] = _Validator(added="2.5") + """:external:ref:`core-metadata-import-namespace`""" + requires: _Validator[list[str] | None] = _Validator(added="1.1") + """``Requires`` (deprecated)""" + provides: _Validator[list[str] | None] = _Validator(added="1.1") + """``Provides`` (deprecated)""" + obsoletes: _Validator[list[str] | None] = _Validator(added="1.1") + """``Obsoletes`` (deprecated)""" + + def as_rfc822(self) -> RFC822Message: + """ + Return an RFC822 message with the metadata. + """ + message = RFC822Message() + self._write_metadata(message) + return message + + def _write_metadata(self, message: RFC822Message) -> None: + """ + Return an RFC822 message with the metadata. + """ + for name, validator in self.__class__.__dict__.items(): + if isinstance(validator, _Validator) and name != "description": + value = getattr(self, name) + email_name = _RAW_TO_EMAIL_MAPPING[name] + if value is not None: + if email_name == "project-url": + for label, url in value.items(): + message[email_name] = f"{label}, {url}" + elif email_name == "keywords": + message[email_name] = ",".join(value) + elif email_name == "import-name" and value == []: + message[email_name] = "" + elif isinstance(value, list): + for item in value: + message[email_name] = str(item) + else: + message[email_name] = str(value) + + # The description is a special case because it is in the body of the message. + if self.description is not None: + message.set_payload(self.description) diff --git a/.venv/lib/python3.12/site-packages/packaging/py.typed b/.venv/lib/python3.12/site-packages/packaging/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/packaging/pylock.py b/.venv/lib/python3.12/site-packages/packaging/pylock.py new file mode 100644 index 0000000..a564f15 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging/pylock.py @@ -0,0 +1,635 @@ +from __future__ import annotations + +import dataclasses +import logging +import re +from collections.abc import Mapping, Sequence +from dataclasses import dataclass +from datetime import datetime +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Protocol, + TypeVar, +) + +from .markers import Marker +from .specifiers import SpecifierSet +from .utils import NormalizedName, is_normalized_name +from .version import Version + +if TYPE_CHECKING: # pragma: no cover + from pathlib import Path + + from typing_extensions import Self + +_logger = logging.getLogger(__name__) + +__all__ = [ + "Package", + "PackageArchive", + "PackageDirectory", + "PackageSdist", + "PackageVcs", + "PackageWheel", + "Pylock", + "PylockUnsupportedVersionError", + "PylockValidationError", + "is_valid_pylock_path", +] + +_T = TypeVar("_T") +_T2 = TypeVar("_T2") + + +class _FromMappingProtocol(Protocol): # pragma: no cover + @classmethod + def _from_dict(cls, d: Mapping[str, Any]) -> Self: ... + + +_FromMappingProtocolT = TypeVar("_FromMappingProtocolT", bound=_FromMappingProtocol) + + +_PYLOCK_FILE_NAME_RE = re.compile(r"^pylock\.([^.]+)\.toml$") + + +def is_valid_pylock_path(path: Path) -> bool: + """Check if the given path is a valid pylock file path.""" + return path.name == "pylock.toml" or bool(_PYLOCK_FILE_NAME_RE.match(path.name)) + + +def _toml_key(key: str) -> str: + return key.replace("_", "-") + + +def _toml_value(key: str, value: Any) -> Any: # noqa: ANN401 + if isinstance(value, (Version, Marker, SpecifierSet)): + return str(value) + if isinstance(value, Sequence) and key == "environments": + return [str(v) for v in value] + return value + + +def _toml_dict_factory(data: list[tuple[str, Any]]) -> dict[str, Any]: + return { + _toml_key(key): _toml_value(key, value) + for key, value in data + if value is not None + } + + +def _get(d: Mapping[str, Any], expected_type: type[_T], key: str) -> _T | None: + """Get a value from the dictionary and verify it's the expected type.""" + if (value := d.get(key)) is None: + return None + if not isinstance(value, expected_type): + raise PylockValidationError( + f"Unexpected type {type(value).__name__} " + f"(expected {expected_type.__name__})", + context=key, + ) + return value + + +def _get_required(d: Mapping[str, Any], expected_type: type[_T], key: str) -> _T: + """Get a required value from the dictionary and verify it's the expected type.""" + if (value := _get(d, expected_type, key)) is None: + raise _PylockRequiredKeyError(key) + return value + + +def _get_sequence( + d: Mapping[str, Any], expected_item_type: type[_T], key: str +) -> Sequence[_T] | None: + """Get a list value from the dictionary and verify it's the expected items type.""" + if (value := _get(d, Sequence, key)) is None: # type: ignore[type-abstract] + return None + if isinstance(value, (str, bytes)): + # special case: str and bytes are Sequences, but we want to reject it + raise PylockValidationError( + f"Unexpected type {type(value).__name__} (expected Sequence)", + context=key, + ) + for i, item in enumerate(value): + if not isinstance(item, expected_item_type): + raise PylockValidationError( + f"Unexpected type {type(item).__name__} " + f"(expected {expected_item_type.__name__})", + context=f"{key}[{i}]", + ) + return value + + +def _get_as( + d: Mapping[str, Any], + expected_type: type[_T], + target_type: Callable[[_T], _T2], + key: str, +) -> _T2 | None: + """Get a value from the dictionary, verify it's the expected type, + and convert to the target type. + + This assumes the target_type constructor accepts the value. + """ + if (value := _get(d, expected_type, key)) is None: + return None + try: + return target_type(value) + except Exception as e: + raise PylockValidationError(e, context=key) from e + + +def _get_required_as( + d: Mapping[str, Any], + expected_type: type[_T], + target_type: Callable[[_T], _T2], + key: str, +) -> _T2: + """Get a required value from the dict, verify it's the expected type, + and convert to the target type.""" + if (value := _get_as(d, expected_type, target_type, key)) is None: + raise _PylockRequiredKeyError(key) + return value + + +def _get_sequence_as( + d: Mapping[str, Any], + expected_item_type: type[_T], + target_item_type: Callable[[_T], _T2], + key: str, +) -> list[_T2] | None: + """Get list value from dictionary and verify expected items type.""" + if (value := _get_sequence(d, expected_item_type, key)) is None: + return None + result = [] + try: + for item in value: + typed_item = target_item_type(item) + result.append(typed_item) + except Exception as e: + raise PylockValidationError(e, context=f"{key}[{len(result)}]") from e + return result + + +def _get_object( + d: Mapping[str, Any], target_type: type[_FromMappingProtocolT], key: str +) -> _FromMappingProtocolT | None: + """Get a dictionary value from the dictionary and convert it to a dataclass.""" + if (value := _get(d, Mapping, key)) is None: # type: ignore[type-abstract] + return None + try: + return target_type._from_dict(value) + except Exception as e: + raise PylockValidationError(e, context=key) from e + + +def _get_sequence_of_objects( + d: Mapping[str, Any], target_item_type: type[_FromMappingProtocolT], key: str +) -> list[_FromMappingProtocolT] | None: + """Get a list value from the dictionary and convert its items to a dataclass.""" + if (value := _get_sequence(d, Mapping, key)) is None: # type: ignore[type-abstract] + return None + result: list[_FromMappingProtocolT] = [] + try: + for item in value: + typed_item = target_item_type._from_dict(item) + result.append(typed_item) + except Exception as e: + raise PylockValidationError(e, context=f"{key}[{len(result)}]") from e + return result + + +def _get_required_sequence_of_objects( + d: Mapping[str, Any], target_item_type: type[_FromMappingProtocolT], key: str +) -> Sequence[_FromMappingProtocolT]: + """Get a required list value from the dictionary and convert its items to a + dataclass.""" + if (result := _get_sequence_of_objects(d, target_item_type, key)) is None: + raise _PylockRequiredKeyError(key) + return result + + +def _validate_normalized_name(name: str) -> NormalizedName: + """Validate that a string is a NormalizedName.""" + if not is_normalized_name(name): + raise PylockValidationError(f"Name {name!r} is not normalized") + return NormalizedName(name) + + +def _validate_path_url(path: str | None, url: str | None) -> None: + if not path and not url: + raise PylockValidationError("path or url must be provided") + + +def _validate_hashes(hashes: Mapping[str, Any]) -> Mapping[str, Any]: + if not hashes: + raise PylockValidationError("At least one hash must be provided") + if not all(isinstance(hash_val, str) for hash_val in hashes.values()): + raise PylockValidationError("Hash values must be strings") + return hashes + + +class PylockValidationError(Exception): + """Raised when when input data is not spec-compliant.""" + + context: str | None = None + message: str + + def __init__( + self, + cause: str | Exception, + *, + context: str | None = None, + ) -> None: + if isinstance(cause, PylockValidationError): + if cause.context: + self.context = ( + f"{context}.{cause.context}" if context else cause.context + ) + else: + self.context = context + self.message = cause.message + else: + self.context = context + self.message = str(cause) + + def __str__(self) -> str: + if self.context: + return f"{self.message} in {self.context!r}" + return self.message + + +class _PylockRequiredKeyError(PylockValidationError): + def __init__(self, key: str) -> None: + super().__init__("Missing required value", context=key) + + +class PylockUnsupportedVersionError(PylockValidationError): + """Raised when encountering an unsupported `lock_version`.""" + + +@dataclass(frozen=True, init=False) +class PackageVcs: + type: str + url: str | None = None + path: str | None = None + requested_revision: str | None = None + commit_id: str # type: ignore[misc] + subdirectory: str | None = None + + def __init__( + self, + *, + type: str, + url: str | None = None, + path: str | None = None, + requested_revision: str | None = None, + commit_id: str, + subdirectory: str | None = None, + ) -> None: + # In Python 3.10+ make dataclass kw_only=True and remove __init__ + object.__setattr__(self, "type", type) + object.__setattr__(self, "url", url) + object.__setattr__(self, "path", path) + object.__setattr__(self, "requested_revision", requested_revision) + object.__setattr__(self, "commit_id", commit_id) + object.__setattr__(self, "subdirectory", subdirectory) + + @classmethod + def _from_dict(cls, d: Mapping[str, Any]) -> Self: + package_vcs = cls( + type=_get_required(d, str, "type"), + url=_get(d, str, "url"), + path=_get(d, str, "path"), + requested_revision=_get(d, str, "requested-revision"), + commit_id=_get_required(d, str, "commit-id"), + subdirectory=_get(d, str, "subdirectory"), + ) + _validate_path_url(package_vcs.path, package_vcs.url) + return package_vcs + + +@dataclass(frozen=True, init=False) +class PackageDirectory: + path: str + editable: bool | None = None + subdirectory: str | None = None + + def __init__( + self, + *, + path: str, + editable: bool | None = None, + subdirectory: str | None = None, + ) -> None: + # In Python 3.10+ make dataclass kw_only=True and remove __init__ + object.__setattr__(self, "path", path) + object.__setattr__(self, "editable", editable) + object.__setattr__(self, "subdirectory", subdirectory) + + @classmethod + def _from_dict(cls, d: Mapping[str, Any]) -> Self: + return cls( + path=_get_required(d, str, "path"), + editable=_get(d, bool, "editable"), + subdirectory=_get(d, str, "subdirectory"), + ) + + +@dataclass(frozen=True, init=False) +class PackageArchive: + url: str | None = None + path: str | None = None + size: int | None = None + upload_time: datetime | None = None + hashes: Mapping[str, str] # type: ignore[misc] + subdirectory: str | None = None + + def __init__( + self, + *, + url: str | None = None, + path: str | None = None, + size: int | None = None, + upload_time: datetime | None = None, + hashes: Mapping[str, str], + subdirectory: str | None = None, + ) -> None: + # In Python 3.10+ make dataclass kw_only=True and remove __init__ + object.__setattr__(self, "url", url) + object.__setattr__(self, "path", path) + object.__setattr__(self, "size", size) + object.__setattr__(self, "upload_time", upload_time) + object.__setattr__(self, "hashes", hashes) + object.__setattr__(self, "subdirectory", subdirectory) + + @classmethod + def _from_dict(cls, d: Mapping[str, Any]) -> Self: + package_archive = cls( + url=_get(d, str, "url"), + path=_get(d, str, "path"), + size=_get(d, int, "size"), + upload_time=_get(d, datetime, "upload-time"), + hashes=_get_required_as(d, Mapping, _validate_hashes, "hashes"), # type: ignore[type-abstract] + subdirectory=_get(d, str, "subdirectory"), + ) + _validate_path_url(package_archive.path, package_archive.url) + return package_archive + + +@dataclass(frozen=True, init=False) +class PackageSdist: + name: str | None = None + upload_time: datetime | None = None + url: str | None = None + path: str | None = None + size: int | None = None + hashes: Mapping[str, str] # type: ignore[misc] + + def __init__( + self, + *, + name: str | None = None, + upload_time: datetime | None = None, + url: str | None = None, + path: str | None = None, + size: int | None = None, + hashes: Mapping[str, str], + ) -> None: + # In Python 3.10+ make dataclass kw_only=True and remove __init__ + object.__setattr__(self, "name", name) + object.__setattr__(self, "upload_time", upload_time) + object.__setattr__(self, "url", url) + object.__setattr__(self, "path", path) + object.__setattr__(self, "size", size) + object.__setattr__(self, "hashes", hashes) + + @classmethod + def _from_dict(cls, d: Mapping[str, Any]) -> Self: + package_sdist = cls( + name=_get(d, str, "name"), + upload_time=_get(d, datetime, "upload-time"), + url=_get(d, str, "url"), + path=_get(d, str, "path"), + size=_get(d, int, "size"), + hashes=_get_required_as(d, Mapping, _validate_hashes, "hashes"), # type: ignore[type-abstract] + ) + _validate_path_url(package_sdist.path, package_sdist.url) + return package_sdist + + +@dataclass(frozen=True, init=False) +class PackageWheel: + name: str | None = None + upload_time: datetime | None = None + url: str | None = None + path: str | None = None + size: int | None = None + hashes: Mapping[str, str] # type: ignore[misc] + + def __init__( + self, + *, + name: str | None = None, + upload_time: datetime | None = None, + url: str | None = None, + path: str | None = None, + size: int | None = None, + hashes: Mapping[str, str], + ) -> None: + # In Python 3.10+ make dataclass kw_only=True and remove __init__ + object.__setattr__(self, "name", name) + object.__setattr__(self, "upload_time", upload_time) + object.__setattr__(self, "url", url) + object.__setattr__(self, "path", path) + object.__setattr__(self, "size", size) + object.__setattr__(self, "hashes", hashes) + + @classmethod + def _from_dict(cls, d: Mapping[str, Any]) -> Self: + package_wheel = cls( + name=_get(d, str, "name"), + upload_time=_get(d, datetime, "upload-time"), + url=_get(d, str, "url"), + path=_get(d, str, "path"), + size=_get(d, int, "size"), + hashes=_get_required_as(d, Mapping, _validate_hashes, "hashes"), # type: ignore[type-abstract] + ) + _validate_path_url(package_wheel.path, package_wheel.url) + return package_wheel + + +@dataclass(frozen=True, init=False) +class Package: + name: NormalizedName + version: Version | None = None + marker: Marker | None = None + requires_python: SpecifierSet | None = None + dependencies: Sequence[Mapping[str, Any]] | None = None + vcs: PackageVcs | None = None + directory: PackageDirectory | None = None + archive: PackageArchive | None = None + index: str | None = None + sdist: PackageSdist | None = None + wheels: Sequence[PackageWheel] | None = None + attestation_identities: Sequence[Mapping[str, Any]] | None = None + tool: Mapping[str, Any] | None = None + + def __init__( + self, + *, + name: NormalizedName, + version: Version | None = None, + marker: Marker | None = None, + requires_python: SpecifierSet | None = None, + dependencies: Sequence[Mapping[str, Any]] | None = None, + vcs: PackageVcs | None = None, + directory: PackageDirectory | None = None, + archive: PackageArchive | None = None, + index: str | None = None, + sdist: PackageSdist | None = None, + wheels: Sequence[PackageWheel] | None = None, + attestation_identities: Sequence[Mapping[str, Any]] | None = None, + tool: Mapping[str, Any] | None = None, + ) -> None: + # In Python 3.10+ make dataclass kw_only=True and remove __init__ + object.__setattr__(self, "name", name) + object.__setattr__(self, "version", version) + object.__setattr__(self, "marker", marker) + object.__setattr__(self, "requires_python", requires_python) + object.__setattr__(self, "dependencies", dependencies) + object.__setattr__(self, "vcs", vcs) + object.__setattr__(self, "directory", directory) + object.__setattr__(self, "archive", archive) + object.__setattr__(self, "index", index) + object.__setattr__(self, "sdist", sdist) + object.__setattr__(self, "wheels", wheels) + object.__setattr__(self, "attestation_identities", attestation_identities) + object.__setattr__(self, "tool", tool) + + @classmethod + def _from_dict(cls, d: Mapping[str, Any]) -> Self: + package = cls( + name=_get_required_as(d, str, _validate_normalized_name, "name"), + version=_get_as(d, str, Version, "version"), + requires_python=_get_as(d, str, SpecifierSet, "requires-python"), + dependencies=_get_sequence(d, Mapping, "dependencies"), # type: ignore[type-abstract] + marker=_get_as(d, str, Marker, "marker"), + vcs=_get_object(d, PackageVcs, "vcs"), + directory=_get_object(d, PackageDirectory, "directory"), + archive=_get_object(d, PackageArchive, "archive"), + index=_get(d, str, "index"), + sdist=_get_object(d, PackageSdist, "sdist"), + wheels=_get_sequence_of_objects(d, PackageWheel, "wheels"), + attestation_identities=_get_sequence(d, Mapping, "attestation-identities"), # type: ignore[type-abstract] + tool=_get(d, Mapping, "tool"), # type: ignore[type-abstract] + ) + distributions = bool(package.sdist) + len(package.wheels or []) + direct_urls = ( + bool(package.vcs) + bool(package.directory) + bool(package.archive) + ) + if distributions > 0 and direct_urls > 0: + raise PylockValidationError( + "None of vcs, directory, archive must be set if sdist or wheels are set" + ) + if distributions == 0 and direct_urls != 1: + raise PylockValidationError( + "Exactly one of vcs, directory, archive must be set " + "if sdist and wheels are not set" + ) + try: + for i, attestation_identity in enumerate( # noqa: B007 + package.attestation_identities or [] + ): + _get_required(attestation_identity, str, "kind") + except Exception as e: + raise PylockValidationError( + e, context=f"attestation-identities[{i}]" + ) from e + return package + + @property + def is_direct(self) -> bool: + return not (self.sdist or self.wheels) + + +@dataclass(frozen=True, init=False) +class Pylock: + """A class representing a pylock file.""" + + lock_version: Version + environments: Sequence[Marker] | None = None + requires_python: SpecifierSet | None = None + extras: Sequence[NormalizedName] | None = None + dependency_groups: Sequence[str] | None = None + default_groups: Sequence[str] | None = None + created_by: str # type: ignore[misc] + packages: Sequence[Package] # type: ignore[misc] + tool: Mapping[str, Any] | None = None + + def __init__( + self, + *, + lock_version: Version, + environments: Sequence[Marker] | None = None, + requires_python: SpecifierSet | None = None, + extras: Sequence[NormalizedName] | None = None, + dependency_groups: Sequence[str] | None = None, + default_groups: Sequence[str] | None = None, + created_by: str, + packages: Sequence[Package], + tool: Mapping[str, Any] | None = None, + ) -> None: + # In Python 3.10+ make dataclass kw_only=True and remove __init__ + object.__setattr__(self, "lock_version", lock_version) + object.__setattr__(self, "environments", environments) + object.__setattr__(self, "requires_python", requires_python) + object.__setattr__(self, "extras", extras) + object.__setattr__(self, "dependency_groups", dependency_groups) + object.__setattr__(self, "default_groups", default_groups) + object.__setattr__(self, "created_by", created_by) + object.__setattr__(self, "packages", packages) + object.__setattr__(self, "tool", tool) + + @classmethod + def _from_dict(cls, d: Mapping[str, Any]) -> Self: + pylock = cls( + lock_version=_get_required_as(d, str, Version, "lock-version"), + environments=_get_sequence_as(d, str, Marker, "environments"), + extras=_get_sequence_as(d, str, _validate_normalized_name, "extras"), + dependency_groups=_get_sequence(d, str, "dependency-groups"), + default_groups=_get_sequence(d, str, "default-groups"), + created_by=_get_required(d, str, "created-by"), + requires_python=_get_as(d, str, SpecifierSet, "requires-python"), + packages=_get_required_sequence_of_objects(d, Package, "packages"), + tool=_get(d, Mapping, "tool"), # type: ignore[type-abstract] + ) + if not Version("1") <= pylock.lock_version < Version("2"): + raise PylockUnsupportedVersionError( + f"pylock version {pylock.lock_version} is not supported" + ) + if pylock.lock_version > Version("1.0"): + _logger.warning( + "pylock minor version %s is not supported", pylock.lock_version + ) + return pylock + + @classmethod + def from_dict(cls, d: Mapping[str, Any], /) -> Self: + """Create and validate a Pylock instance from a TOML dictionary. + + Raises :class:`PylockValidationError` if the input data is not + spec-compliant. + """ + return cls._from_dict(d) + + def to_dict(self) -> Mapping[str, Any]: + """Convert the Pylock instance to a TOML dictionary.""" + return dataclasses.asdict(self, dict_factory=_toml_dict_factory) + + def validate(self) -> None: + """Validate the Pylock instance against the specification. + + Raises :class:`PylockValidationError` otherwise.""" + self.from_dict(self.to_dict()) diff --git a/.venv/lib/python3.12/site-packages/packaging/requirements.py b/.venv/lib/python3.12/site-packages/packaging/requirements.py new file mode 100644 index 0000000..3079be6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging/requirements.py @@ -0,0 +1,86 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import annotations + +from typing import Iterator + +from ._parser import parse_requirement as _parse_requirement +from ._tokenizer import ParserSyntaxError +from .markers import Marker, _normalize_extra_values +from .specifiers import SpecifierSet +from .utils import canonicalize_name + + +class InvalidRequirement(ValueError): + """ + An invalid requirement was found, users should refer to PEP 508. + """ + + +class Requirement: + """Parse a requirement. + + Parse a given requirement string into its parts, such as name, specifier, + URL, and extras. Raises InvalidRequirement on a badly-formed requirement + string. + """ + + # TODO: Can we test whether something is contained within a requirement? + # If so how do we do that? Do we need to test against the _name_ of + # the thing as well as the version? What about the markers? + # TODO: Can we normalize the name and extra name? + + def __init__(self, requirement_string: str) -> None: + try: + parsed = _parse_requirement(requirement_string) + except ParserSyntaxError as e: + raise InvalidRequirement(str(e)) from e + + self.name: str = parsed.name + self.url: str | None = parsed.url or None + self.extras: set[str] = set(parsed.extras or []) + self.specifier: SpecifierSet = SpecifierSet(parsed.specifier) + self.marker: Marker | None = None + if parsed.marker is not None: + self.marker = Marker.__new__(Marker) + self.marker._markers = _normalize_extra_values(parsed.marker) + + def _iter_parts(self, name: str) -> Iterator[str]: + yield name + + if self.extras: + formatted_extras = ",".join(sorted(self.extras)) + yield f"[{formatted_extras}]" + + if self.specifier: + yield str(self.specifier) + + if self.url: + yield f" @ {self.url}" + if self.marker: + yield " " + + if self.marker: + yield f"; {self.marker}" + + def __str__(self) -> str: + return "".join(self._iter_parts(self.name)) + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}('{self}')>" + + def __hash__(self) -> int: + return hash(tuple(self._iter_parts(canonicalize_name(self.name)))) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Requirement): + return NotImplemented + + return ( + canonicalize_name(self.name) == canonicalize_name(other.name) + and self.extras == other.extras + and self.specifier == other.specifier + and self.url == other.url + and self.marker == other.marker + ) diff --git a/.venv/lib/python3.12/site-packages/packaging/specifiers.py b/.venv/lib/python3.12/site-packages/packaging/specifiers.py new file mode 100644 index 0000000..5d26b0d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging/specifiers.py @@ -0,0 +1,1068 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +""" +.. testsetup:: + + from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier + from packaging.version import Version +""" + +from __future__ import annotations + +import abc +import itertools +import re +from typing import Callable, Final, Iterable, Iterator, TypeVar, Union + +from .utils import canonicalize_version +from .version import InvalidVersion, Version + +UnparsedVersion = Union[Version, str] +UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion) +CallableOperator = Callable[[Version, str], bool] + + +def _coerce_version(version: UnparsedVersion) -> Version | None: + if not isinstance(version, Version): + try: + version = Version(version) + except InvalidVersion: + return None + return version + + +def _public_version(version: Version) -> Version: + return version.__replace__(local=None) + + +def _base_version(version: Version) -> Version: + return version.__replace__(pre=None, post=None, dev=None, local=None) + + +class InvalidSpecifier(ValueError): + """ + Raised when attempting to create a :class:`Specifier` with a specifier + string that is invalid. + + >>> Specifier("lolwat") + Traceback (most recent call last): + ... + packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat' + """ + + +class BaseSpecifier(metaclass=abc.ABCMeta): + __slots__ = () + __match_args__ = ("_str",) + + @property + def _str(self) -> str: + """Internal property for match_args""" + return str(self) + + @abc.abstractmethod + def __str__(self) -> str: + """ + Returns the str representation of this Specifier-like object. This + should be representative of the Specifier itself. + """ + + @abc.abstractmethod + def __hash__(self) -> int: + """ + Returns a hash value for this Specifier-like object. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Returns a boolean representing whether or not the two Specifier-like + objects are equal. + + :param other: The other object to check against. + """ + + @property + @abc.abstractmethod + def prereleases(self) -> bool | None: + """Whether or not pre-releases as a whole are allowed. + + This can be set to either ``True`` or ``False`` to explicitly enable or disable + prereleases or it can be set to ``None`` (the default) to use default semantics. + """ + + @prereleases.setter # noqa: B027 + def prereleases(self, value: bool) -> None: + """Setter for :attr:`prereleases`. + + :param value: The value to set. + """ + + @abc.abstractmethod + def contains(self, item: str, prereleases: bool | None = None) -> bool: + """ + Determines if the given item is contained within this specifier. + """ + + @abc.abstractmethod + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None + ) -> Iterator[UnparsedVersionVar]: + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + + +class Specifier(BaseSpecifier): + """This class abstracts handling of version specifiers. + + .. tip:: + + It is generally not required to instantiate this manually. You should instead + prefer to work with :class:`SpecifierSet` instead, which can parse + comma-separated version specifiers (which is what package metadata contains). + """ + + __slots__ = ("_prereleases", "_spec", "_spec_version") + + _operator_regex_str = r""" + (?P(~=|==|!=|<=|>=|<|>|===)) + """ + _version_regex_str = r""" + (?P + (?: + # The identity operators allow for an escape hatch that will + # do an exact string match of the version you wish to install. + # This will not be parsed by PEP 440 and we cannot determine + # any semantic meaning from it. This operator is discouraged + # but included entirely as an escape hatch. + (?<====) # Only match for the identity operator + \s* + [^\s;)]* # The arbitrary version can be just about anything, + # we match everything except for whitespace, a + # semi-colon for marker support, and a closing paren + # since versions can be enclosed in them. + ) + | + (?: + # The (non)equality operators allow for wild card and local + # versions to be specified so we have to define these two + # operators separately to enable that. + (?<===|!=) # Only match for equals and not equals + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + + # You cannot use a wild card and a pre-release, post-release, a dev or + # local version together so group them with a | and make them optional. + (?: + \.\* # Wild card syntax of .* + | + (?: # pre release + [-_\.]? + (alpha|beta|preview|pre|a|b|c|rc) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local + )? + ) + | + (?: + # The compatible operator requires at least two digits in the + # release segment. + (?<=~=) # Only match for the compatible operator + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) + (?: # pre release + [-_\.]? + (alpha|beta|preview|pre|a|b|c|rc) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + | + (?: + # All other operators only allow a sub set of what the + # (non)equality operators do. Specifically they do not allow + # local versions to be specified nor do they allow the prefix + # matching wild cards. + (?=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + "===": "arbitrary", + } + + def __init__(self, spec: str = "", prereleases: bool | None = None) -> None: + """Initialize a Specifier instance. + + :param spec: + The string representation of a specifier which will be parsed and + normalized before use. + :param prereleases: + This tells the specifier if it should accept prerelease versions if + applicable or not. The default of ``None`` will autodetect it from the + given specifiers. + :raises InvalidSpecifier: + If the given specifier is invalid (i.e. bad syntax). + """ + match = self._regex.fullmatch(spec) + if not match: + raise InvalidSpecifier(f"Invalid specifier: {spec!r}") + + self._spec: tuple[str, str] = ( + match.group("operator").strip(), + match.group("version").strip(), + ) + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + # Specifier version cache + self._spec_version: tuple[str, Version] | None = None + + def _get_spec_version(self, version: str) -> Version | None: + """One element cache, as only one spec Version is needed per Specifier.""" + if self._spec_version is not None and self._spec_version[0] == version: + return self._spec_version[1] + + version_specifier = _coerce_version(version) + if version_specifier is None: + return None + + self._spec_version = (version, version_specifier) + return version_specifier + + def _require_spec_version(self, version: str) -> Version: + """Get spec version, asserting it's valid (not for === operator). + + This method should only be called for operators where version + strings are guaranteed to be valid PEP 440 versions (not ===). + """ + spec_version = self._get_spec_version(version) + assert spec_version is not None + return spec_version + + @property + def prereleases(self) -> bool | None: + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Only the "!=" operator does not imply prereleases when + # the version in the specifier is a prerelease. + operator, version_str = self._spec + if operator != "!=": + # The == specifier with trailing .* cannot include prereleases + # e.g. "==1.0a1.*" is not valid. + if operator == "==" and version_str.endswith(".*"): + return False + + # "===" can have arbitrary string versions, so we cannot parse + # those, we take prereleases as unknown (None) for those. + version = self._get_spec_version(version_str) + if version is None: + return None + + # For all other operators, use the check if spec Version + # object implies pre-releases. + if version.is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value: bool | None) -> None: + self._prereleases = value + + @property + def operator(self) -> str: + """The operator of this specifier. + + >>> Specifier("==1.2.3").operator + '==' + """ + return self._spec[0] + + @property + def version(self) -> str: + """The version of this specifier. + + >>> Specifier("==1.2.3").version + '1.2.3' + """ + return self._spec[1] + + def __repr__(self) -> str: + """A representation of the Specifier that shows all internal state. + + >>> Specifier('>=1.0.0') + =1.0.0')> + >>> Specifier('>=1.0.0', prereleases=False) + =1.0.0', prereleases=False)> + >>> Specifier('>=1.0.0', prereleases=True) + =1.0.0', prereleases=True)> + """ + pre = ( + f", prereleases={self.prereleases!r}" + if self._prereleases is not None + else "" + ) + + return f"<{self.__class__.__name__}({str(self)!r}{pre})>" + + def __str__(self) -> str: + """A string representation of the Specifier that can be round-tripped. + + >>> str(Specifier('>=1.0.0')) + '>=1.0.0' + >>> str(Specifier('>=1.0.0', prereleases=False)) + '>=1.0.0' + """ + return "{}{}".format(*self._spec) + + @property + def _canonical_spec(self) -> tuple[str, str]: + operator, version = self._spec + if operator == "===" or version.endswith(".*"): + return operator, version + + spec_version = self._require_spec_version(version) + + canonical_version = canonicalize_version( + spec_version, strip_trailing_zero=(operator != "~=") + ) + + return operator, canonical_version + + def __hash__(self) -> int: + return hash(self._canonical_spec) + + def __eq__(self, other: object) -> bool: + """Whether or not the two Specifier-like objects are equal. + + :param other: The other object to check against. + + The value of :attr:`prereleases` is ignored. + + >>> Specifier("==1.2.3") == Specifier("== 1.2.3.0") + True + >>> (Specifier("==1.2.3", prereleases=False) == + ... Specifier("==1.2.3", prereleases=True)) + True + >>> Specifier("==1.2.3") == "==1.2.3" + True + >>> Specifier("==1.2.3") == Specifier("==1.2.4") + False + >>> Specifier("==1.2.3") == Specifier("~=1.2.3") + False + """ + if isinstance(other, str): + try: + other = self.__class__(str(other)) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._canonical_spec == other._canonical_spec + + def _get_operator(self, op: str) -> CallableOperator: + operator_callable: CallableOperator = getattr( + self, f"_compare_{self._operators[op]}" + ) + return operator_callable + + def _compare_compatible(self, prospective: Version, spec: str) -> bool: + # Compatible releases have an equivalent combination of >= and ==. That + # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to + # implement this in terms of the other specifiers instead of + # implementing it ourselves. The only thing we need to do is construct + # the other specifiers. + + # We want everything but the last item in the version, but we want to + # ignore suffix segments. + prefix = _version_join( + list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1] + ) + + # Add the prefix notation to the end of our string + prefix += ".*" + + return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( + prospective, prefix + ) + + def _compare_equal(self, prospective: Version, spec: str) -> bool: + # We need special logic to handle prefix matching + if spec.endswith(".*"): + # In the case of prefix matching we want to ignore local segment. + normalized_prospective = canonicalize_version( + _public_version(prospective), strip_trailing_zero=False + ) + # Get the normalized version string ignoring the trailing .* + normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False) + # Split the spec out by bangs and dots, and pretend that there is + # an implicit dot in between a release segment and a pre-release segment. + split_spec = _version_split(normalized_spec) + + # Split the prospective version out by bangs and dots, and pretend + # that there is an implicit dot in between a release segment and + # a pre-release segment. + split_prospective = _version_split(normalized_prospective) + + # 0-pad the prospective version before shortening it to get the correct + # shortened version. + padded_prospective, _ = _pad_version(split_prospective, split_spec) + + # Shorten the prospective version to be the same length as the spec + # so that we can determine if the specifier is a prefix of the + # prospective version or not. + shortened_prospective = padded_prospective[: len(split_spec)] + + return shortened_prospective == split_spec + else: + # Convert our spec string into a Version + spec_version = self._require_spec_version(spec) + + # If the specifier does not have a local segment, then we want to + # act as if the prospective version also does not have a local + # segment. + if not spec_version.local: + prospective = _public_version(prospective) + + return prospective == spec_version + + def _compare_not_equal(self, prospective: Version, spec: str) -> bool: + return not self._compare_equal(prospective, spec) + + def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool: + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return _public_version(prospective) <= self._require_spec_version(spec) + + def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool: + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return _public_version(prospective) >= self._require_spec_version(spec) + + def _compare_less_than(self, prospective: Version, spec_str: str) -> bool: + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = self._require_spec_version(spec_str) + + # Check to see if the prospective version is less than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective < spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a pre-release version, that we do not accept pre-release + # versions for the version mentioned in the specifier (e.g. <3.1 should + # not match 3.1.dev0, but should match 3.0.dev0). + if ( + not spec.is_prerelease + and prospective.is_prerelease + and _base_version(prospective) == _base_version(spec) + ): + return False + + # If we've gotten to here, it means that prospective version is both + # less than the spec version *and* it's not a pre-release of the same + # version in the spec. + return True + + def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool: + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = self._require_spec_version(spec_str) + + # Check to see if the prospective version is greater than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective > spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a post-release version, that we do not accept + # post-release versions for the version mentioned in the specifier + # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). + if ( + not spec.is_postrelease + and prospective.is_postrelease + and _base_version(prospective) == _base_version(spec) + ): + return False + + # Ensure that we do not allow a local version of the version mentioned + # in the specifier, which is technically greater than, to match. + if prospective.local is not None and _base_version( + prospective + ) == _base_version(spec): + return False + + # If we've gotten to here, it means that prospective version is both + # greater than the spec version *and* it's not a pre-release of the + # same version in the spec. + return True + + def _compare_arbitrary(self, prospective: Version | str, spec: str) -> bool: + return str(prospective).lower() == str(spec).lower() + + def __contains__(self, item: str | Version) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: The item to check for. + + This is used for the ``in`` operator and behaves the same as + :meth:`contains` with no ``prereleases`` argument passed. + + >>> "1.2.3" in Specifier(">=1.2.3") + True + >>> Version("1.2.3") in Specifier(">=1.2.3") + True + >>> "1.0.0" in Specifier(">=1.2.3") + False + >>> "1.3.0a1" in Specifier(">=1.2.3") + True + >>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True) + True + """ + return self.contains(item) + + def contains(self, item: UnparsedVersion, prereleases: bool | None = None) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: + The item to check for, which can be a version string or a + :class:`Version` instance. + :param prereleases: + Whether or not to match prereleases with this Specifier. If set to + ``None`` (the default), it will follow the recommendation from + :pep:`440` and match prereleases, as there are no other versions. + + >>> Specifier(">=1.2.3").contains("1.2.3") + True + >>> Specifier(">=1.2.3").contains(Version("1.2.3")) + True + >>> Specifier(">=1.2.3").contains("1.0.0") + False + >>> Specifier(">=1.2.3").contains("1.3.0a1") + True + >>> Specifier(">=1.2.3", prereleases=False).contains("1.3.0a1") + False + >>> Specifier(">=1.2.3").contains("1.3.0a1") + True + """ + + return bool(list(self.filter([item], prereleases=prereleases))) + + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None + ) -> Iterator[UnparsedVersionVar]: + """Filter items in the given iterable, that match the specifier. + + :param iterable: + An iterable that can contain version strings and :class:`Version` instances. + The items in the iterable will be filtered according to the specifier. + :param prereleases: + Whether or not to allow prereleases in the returned iterator. If set to + ``None`` (the default), it will follow the recommendation from :pep:`440` + and match prereleases if there are no other versions. + + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) + ['1.3'] + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")])) + ['1.2.3', '1.3', ] + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"])) + ['1.5a1'] + >>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + >>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + """ + prereleases_versions = [] + found_non_prereleases = False + + # Determine if to include prereleases by default + include_prereleases = ( + prereleases if prereleases is not None else self.prereleases + ) + + # Get the matching operator + operator_callable = self._get_operator(self.operator) + + # Filter versions + for version in iterable: + parsed_version = _coerce_version(version) + if parsed_version is None: + # === operator can match arbitrary (non-version) strings + if self.operator == "===" and self._compare_arbitrary( + version, self.version + ): + yield version + elif operator_callable(parsed_version, self.version): + # If it's not a prerelease or prereleases are allowed, yield it directly + if not parsed_version.is_prerelease or include_prereleases: + found_non_prereleases = True + yield version + # Otherwise collect prereleases for potential later use + elif prereleases is None and self._prereleases is not False: + prereleases_versions.append(version) + + # If no non-prereleases were found and prereleases weren't + # explicitly forbidden, yield the collected prereleases + if ( + not found_non_prereleases + and prereleases is None + and self._prereleases is not False + ): + yield from prereleases_versions + + +_prefix_regex = re.compile(r"([0-9]+)((?:a|b|c|rc)[0-9]+)") + + +def _version_split(version: str) -> list[str]: + """Split version into components. + + The split components are intended for version comparison. The logic does + not attempt to retain the original version string, so joining the + components back with :func:`_version_join` may not produce the original + version string. + """ + result: list[str] = [] + + epoch, _, rest = version.rpartition("!") + result.append(epoch or "0") + + for item in rest.split("."): + match = _prefix_regex.fullmatch(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + return result + + +def _version_join(components: list[str]) -> str: + """Join split version components into a version string. + + This function assumes the input came from :func:`_version_split`, where the + first component must be the epoch (either empty or numeric), and all other + components numeric. + """ + epoch, *rest = components + return f"{epoch}!{'.'.join(rest)}" + + +def _is_not_suffix(segment: str) -> bool: + return not any( + segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post") + ) + + +def _pad_version(left: list[str], right: list[str]) -> tuple[list[str], list[str]]: + left_split, right_split = [], [] + + # Get the release segment of our versions + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + + # Get the rest of our versions + left_split.append(left[len(left_split[0]) :]) + right_split.append(right[len(right_split[0]) :]) + + # Insert our padding + left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) + right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) + + return ( + list(itertools.chain.from_iterable(left_split)), + list(itertools.chain.from_iterable(right_split)), + ) + + +class SpecifierSet(BaseSpecifier): + """This class abstracts handling of a set of version specifiers. + + It can be passed a single specifier (``>=3.0``), a comma-separated list of + specifiers (``>=3.0,!=3.1``), or no specifier at all. + """ + + __slots__ = ("_prereleases", "_specs") + + def __init__( + self, + specifiers: str | Iterable[Specifier] = "", + prereleases: bool | None = None, + ) -> None: + """Initialize a SpecifierSet instance. + + :param specifiers: + The string representation of a specifier or a comma-separated list of + specifiers which will be parsed and normalized before use. + May also be an iterable of ``Specifier`` instances, which will be used + as is. + :param prereleases: + This tells the SpecifierSet if it should accept prerelease versions if + applicable or not. The default of ``None`` will autodetect it from the + given specifiers. + + :raises InvalidSpecifier: + If the given ``specifiers`` are not parseable than this exception will be + raised. + """ + + if isinstance(specifiers, str): + # Split on `,` to break each individual specifier into its own item, and + # strip each item to remove leading/trailing whitespace. + split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + + # Make each individual specifier a Specifier and save in a frozen set + # for later. + self._specs = frozenset(map(Specifier, split_specifiers)) + else: + # Save the supplied specifiers in a frozen set. + self._specs = frozenset(specifiers) + + # Store our prereleases value so we can use it later to determine if + # we accept prereleases or not. + self._prereleases = prereleases + + @property + def prereleases(self) -> bool | None: + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # If we don't have any specifiers, and we don't have a forced value, + # then we'll just return None since we don't know if this should have + # pre-releases or not. + if not self._specs: + return None + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + if any(s.prereleases for s in self._specs): + return True + + return None + + @prereleases.setter + def prereleases(self, value: bool | None) -> None: + self._prereleases = value + + def __repr__(self) -> str: + """A representation of the specifier set that shows all internal state. + + Note that the ordering of the individual specifiers within the set may not + match the input string. + + >>> SpecifierSet('>=1.0.0,!=2.0.0') + =1.0.0')> + >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False) + =1.0.0', prereleases=False)> + >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True) + =1.0.0', prereleases=True)> + """ + pre = ( + f", prereleases={self.prereleases!r}" + if self._prereleases is not None + else "" + ) + + return f"" + + def __str__(self) -> str: + """A string representation of the specifier set that can be round-tripped. + + Note that the ordering of the individual specifiers within the set may not + match the input string. + + >>> str(SpecifierSet(">=1.0.0,!=1.0.1")) + '!=1.0.1,>=1.0.0' + >>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False)) + '!=1.0.1,>=1.0.0' + """ + return ",".join(sorted(str(s) for s in self._specs)) + + def __hash__(self) -> int: + return hash(self._specs) + + def __and__(self, other: SpecifierSet | str) -> SpecifierSet: + """Return a SpecifierSet which is a combination of the two sets. + + :param other: The other object to combine with. + + >>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1' + =1.0.0')> + >>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1') + =1.0.0')> + """ + if isinstance(other, str): + other = SpecifierSet(other) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + elif ( + self._prereleases is not None and other._prereleases is None + ) or self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError( + "Cannot combine SpecifierSets with True and False prerelease overrides." + ) + + return specifier + + def __eq__(self, other: object) -> bool: + """Whether or not the two SpecifierSet-like objects are equal. + + :param other: The other object to check against. + + The value of :attr:`prereleases` is ignored. + + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) == + ... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)) + True + >>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1" + True + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2") + False + """ + if isinstance(other, (str, Specifier)): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs == other._specs + + def __len__(self) -> int: + """Returns the number of specifiers in this specifier set.""" + return len(self._specs) + + def __iter__(self) -> Iterator[Specifier]: + """ + Returns an iterator over all the underlying :class:`Specifier` instances + in this specifier set. + + >>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str) + [, =1.0.0')>] + """ + return iter(self._specs) + + def __contains__(self, item: UnparsedVersion) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: The item to check for. + + This is used for the ``in`` operator and behaves the same as + :meth:`contains` with no ``prereleases`` argument passed. + + >>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1") + False + >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True) + True + """ + return self.contains(item) + + def contains( + self, + item: UnparsedVersion, + prereleases: bool | None = None, + installed: bool | None = None, + ) -> bool: + """Return whether or not the item is contained in this SpecifierSet. + + :param item: + The item to check for, which can be a version string or a + :class:`Version` instance. + :param prereleases: + Whether or not to match prereleases with this SpecifierSet. If set to + ``None`` (the default), it will follow the recommendation from :pep:`440` + and match prereleases, as there are no other versions. + :param installed: + Whether or not the item is installed. If set to ``True``, it will + accept prerelease versions even if the specifier does not allow them. + + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3") + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3")) + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1") + True + >>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False).contains("1.3.0a1") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True) + True + """ + version = _coerce_version(item) + + if version is not None and installed and version.is_prerelease: + prereleases = True + + check_item = item if version is None else version + return bool(list(self.filter([check_item], prereleases=prereleases))) + + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None + ) -> Iterator[UnparsedVersionVar]: + """Filter items in the given iterable, that match the specifiers in this set. + + :param iterable: + An iterable that can contain version strings and :class:`Version` instances. + The items in the iterable will be filtered according to the specifier. + :param prereleases: + Whether or not to allow prereleases in the returned iterator. If set to + ``None`` (the default), it will follow the recommendation from :pep:`440` + and match prereleases if there are no other versions. + + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) + ['1.3'] + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")])) + ['1.3', ] + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"])) + ['1.5a1'] + >>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + >>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + + An "empty" SpecifierSet will filter items based on the presence of prerelease + versions in the set. + + >>> list(SpecifierSet("").filter(["1.3", "1.5a1"])) + ['1.3'] + >>> list(SpecifierSet("").filter(["1.5a1"])) + ['1.5a1'] + >>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + >>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + """ + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None and self.prereleases is not None: + prereleases = self.prereleases + + # If we have any specifiers, then we want to wrap our iterable in the + # filter method for each one, this will act as a logical AND amongst + # each specifier. + if self._specs: + # When prereleases is None, we need to let all versions through + # the individual filters, then decide about prereleases at the end + # based on whether any non-prereleases matched ALL specs. + for spec in self._specs: + iterable = spec.filter( + iterable, prereleases=True if prereleases is None else prereleases + ) + + if prereleases is not None: + # If we have a forced prereleases value, + # we can immediately return the iterator. + return iter(iterable) + else: + # Handle empty SpecifierSet cases where prereleases is not None. + if prereleases is True: + return iter(iterable) + + if prereleases is False: + return ( + item + for item in iterable + if (version := _coerce_version(item)) is None + or not version.is_prerelease + ) + + # Finally if prereleases is None, apply PEP 440 logic: + # exclude prereleases unless there are no final releases that matched. + filtered_items: list[UnparsedVersionVar] = [] + found_prereleases: list[UnparsedVersionVar] = [] + found_final_release = False + + for item in iterable: + parsed_version = _coerce_version(item) + # Arbitrary strings are always included as it is not + # possible to determine if they are prereleases, + # and they have already passed all specifiers. + if parsed_version is None: + filtered_items.append(item) + found_prereleases.append(item) + elif parsed_version.is_prerelease: + found_prereleases.append(item) + else: + filtered_items.append(item) + found_final_release = True + + return iter(filtered_items if found_final_release else found_prereleases) diff --git a/.venv/lib/python3.12/site-packages/packaging/tags.py b/.venv/lib/python3.12/site-packages/packaging/tags.py new file mode 100644 index 0000000..5ef27c8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging/tags.py @@ -0,0 +1,651 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import logging +import platform +import re +import struct +import subprocess +import sys +import sysconfig +from importlib.machinery import EXTENSION_SUFFIXES +from typing import ( + Any, + Iterable, + Iterator, + Sequence, + Tuple, + cast, +) + +from . import _manylinux, _musllinux + +logger = logging.getLogger(__name__) + +PythonVersion = Sequence[int] +AppleVersion = Tuple[int, int] + +INTERPRETER_SHORT_NAMES: dict[str, str] = { + "python": "py", # Generic. + "cpython": "cp", + "pypy": "pp", + "ironpython": "ip", + "jython": "jy", +} + + +_32_BIT_INTERPRETER = struct.calcsize("P") == 4 + + +class Tag: + """ + A representation of the tag triple for a wheel. + + Instances are considered immutable and thus are hashable. Equality checking + is also supported. + """ + + __slots__ = ["_abi", "_hash", "_interpreter", "_platform"] + + def __init__(self, interpreter: str, abi: str, platform: str) -> None: + self._interpreter = interpreter.lower() + self._abi = abi.lower() + self._platform = platform.lower() + # The __hash__ of every single element in a Set[Tag] will be evaluated each time + # that a set calls its `.disjoint()` method, which may be called hundreds of + # times when scanning a page of links for packages with tags matching that + # Set[Tag]. Pre-computing the value here produces significant speedups for + # downstream consumers. + self._hash = hash((self._interpreter, self._abi, self._platform)) + + @property + def interpreter(self) -> str: + return self._interpreter + + @property + def abi(self) -> str: + return self._abi + + @property + def platform(self) -> str: + return self._platform + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Tag): + return NotImplemented + + return ( + (self._hash == other._hash) # Short-circuit ASAP for perf reasons. + and (self._platform == other._platform) + and (self._abi == other._abi) + and (self._interpreter == other._interpreter) + ) + + def __hash__(self) -> int: + return self._hash + + def __str__(self) -> str: + return f"{self._interpreter}-{self._abi}-{self._platform}" + + def __repr__(self) -> str: + return f"<{self} @ {id(self)}>" + + def __setstate__(self, state: tuple[None, dict[str, Any]]) -> None: + # The cached _hash is wrong when unpickling. + _, slots = state + for k, v in slots.items(): + setattr(self, k, v) + self._hash = hash((self._interpreter, self._abi, self._platform)) + + +def parse_tag(tag: str) -> frozenset[Tag]: + """ + Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. + + Returning a set is required due to the possibility that the tag is a + compressed tag set. + """ + tags = set() + interpreters, abis, platforms = tag.split("-") + for interpreter in interpreters.split("."): + for abi in abis.split("."): + for platform_ in platforms.split("."): + tags.add(Tag(interpreter, abi, platform_)) + return frozenset(tags) + + +def _get_config_var(name: str, warn: bool = False) -> int | str | None: + value: int | str | None = sysconfig.get_config_var(name) + if value is None and warn: + logger.debug( + "Config variable '%s' is unset, Python ABI tag may be incorrect", name + ) + return value + + +def _normalize_string(string: str) -> str: + return string.replace(".", "_").replace("-", "_").replace(" ", "_") + + +def _is_threaded_cpython(abis: list[str]) -> bool: + """ + Determine if the ABI corresponds to a threaded (`--disable-gil`) build. + + The threaded builds are indicated by a "t" in the abiflags. + """ + if len(abis) == 0: + return False + # expect e.g., cp313 + m = re.match(r"cp\d+(.*)", abis[0]) + if not m: + return False + abiflags = m.group(1) + return "t" in abiflags + + +def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool: + """ + Determine if the Python version supports abi3. + + PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`) + builds do not support abi3. + """ + return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading + + +def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> list[str]: + py_version = tuple(py_version) # To allow for version comparison. + abis = [] + version = _version_nodot(py_version[:2]) + threading = debug = pymalloc = ucs4 = "" + with_debug = _get_config_var("Py_DEBUG", warn) + has_refcount = hasattr(sys, "gettotalrefcount") + # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled + # extension modules is the best option. + # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 + has_ext = "_d.pyd" in EXTENSION_SUFFIXES + if with_debug or (with_debug is None and (has_refcount or has_ext)): + debug = "d" + if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn): + threading = "t" + if py_version < (3, 8): + with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) + if with_pymalloc or with_pymalloc is None: + pymalloc = "m" + if py_version < (3, 3): + unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) + if unicode_size == 4 or ( + unicode_size is None and sys.maxunicode == 0x10FFFF + ): + ucs4 = "u" + elif debug: + # Debug builds can also load "normal" extension modules. + # We can also assume no UCS-4 or pymalloc requirement. + abis.append(f"cp{version}{threading}") + abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}") + return abis + + +def cpython_tags( + python_version: PythonVersion | None = None, + abis: Iterable[str] | None = None, + platforms: Iterable[str] | None = None, + *, + warn: bool = False, +) -> Iterator[Tag]: + """ + Yields the tags for a CPython interpreter. + + The tags consist of: + - cp-- + - cp-abi3- + - cp-none- + - cp-abi3- # Older Python versions down to 3.2. + + If python_version only specifies a major version then user-provided ABIs and + the 'none' ABItag will be used. + + If 'abi3' or 'none' are specified in 'abis' then they will be yielded at + their normal position and not at the beginning. + """ + if not python_version: + python_version = sys.version_info[:2] + + interpreter = f"cp{_version_nodot(python_version[:2])}" + + if abis is None: + abis = _cpython_abis(python_version, warn) if len(python_version) > 1 else [] + abis = list(abis) + # 'abi3' and 'none' are explicitly handled later. + for explicit_abi in ("abi3", "none"): + try: + abis.remove(explicit_abi) + except ValueError: # noqa: PERF203 + pass + + platforms = list(platforms or platform_tags()) + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + threading = _is_threaded_cpython(abis) + use_abi3 = _abi3_applies(python_version, threading) + if use_abi3: + yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms) + yield from (Tag(interpreter, "none", platform_) for platform_ in platforms) + + if use_abi3: + for minor_version in range(python_version[1] - 1, 1, -1): + for platform_ in platforms: + version = _version_nodot((python_version[0], minor_version)) + interpreter = f"cp{version}" + yield Tag(interpreter, "abi3", platform_) + + +def _generic_abi() -> list[str]: + """ + Return the ABI tag based on EXT_SUFFIX. + """ + # The following are examples of `EXT_SUFFIX`. + # We want to keep the parts which are related to the ABI and remove the + # parts which are related to the platform: + # - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310 + # - mac: '.cpython-310-darwin.so' => cp310 + # - win: '.cp310-win_amd64.pyd' => cp310 + # - win: '.pyd' => cp37 (uses _cpython_abis()) + # - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73 + # - graalpy: '.graalpy-38-native-x86_64-darwin.dylib' + # => graalpy_38_native + + ext_suffix = _get_config_var("EXT_SUFFIX", warn=True) + if not isinstance(ext_suffix, str) or ext_suffix[0] != ".": + raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')") + parts = ext_suffix.split(".") + if len(parts) < 3: + # CPython3.7 and earlier uses ".pyd" on Windows. + return _cpython_abis(sys.version_info[:2]) + soabi = parts[1] + if soabi.startswith("cpython"): + # non-windows + abi = "cp" + soabi.split("-")[1] + elif soabi.startswith("cp"): + # windows + abi = soabi.split("-")[0] + elif soabi.startswith("pypy"): + abi = "-".join(soabi.split("-")[:2]) + elif soabi.startswith("graalpy"): + abi = "-".join(soabi.split("-")[:3]) + elif soabi: + # pyston, ironpython, others? + abi = soabi + else: + return [] + return [_normalize_string(abi)] + + +def generic_tags( + interpreter: str | None = None, + abis: Iterable[str] | None = None, + platforms: Iterable[str] | None = None, + *, + warn: bool = False, +) -> Iterator[Tag]: + """ + Yields the tags for a generic interpreter. + + The tags consist of: + - -- + + The "none" ABI will be added if it was not explicitly provided. + """ + if not interpreter: + interp_name = interpreter_name() + interp_version = interpreter_version(warn=warn) + interpreter = f"{interp_name}{interp_version}" + abis = _generic_abi() if abis is None else list(abis) + platforms = list(platforms or platform_tags()) + if "none" not in abis: + abis.append("none") + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + +def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: + """ + Yields Python versions in descending order. + + After the latest version, the major-only version will be yielded, and then + all previous versions of that major version. + """ + if len(py_version) > 1: + yield f"py{_version_nodot(py_version[:2])}" + yield f"py{py_version[0]}" + if len(py_version) > 1: + for minor in range(py_version[1] - 1, -1, -1): + yield f"py{_version_nodot((py_version[0], minor))}" + + +def compatible_tags( + python_version: PythonVersion | None = None, + interpreter: str | None = None, + platforms: Iterable[str] | None = None, +) -> Iterator[Tag]: + """ + Yields the sequence of tags that are compatible with a specific version of Python. + + The tags consist of: + - py*-none- + - -none-any # ... if `interpreter` is provided. + - py*-none-any + """ + if not python_version: + python_version = sys.version_info[:2] + platforms = list(platforms or platform_tags()) + for version in _py_interpreter_range(python_version): + for platform_ in platforms: + yield Tag(version, "none", platform_) + if interpreter: + yield Tag(interpreter, "none", "any") + for version in _py_interpreter_range(python_version): + yield Tag(version, "none", "any") + + +def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str: + if not is_32bit: + return arch + + if arch.startswith("ppc"): + return "ppc" + + return "i386" + + +def _mac_binary_formats(version: AppleVersion, cpu_arch: str) -> list[str]: + formats = [cpu_arch] + if cpu_arch == "x86_64": + if version < (10, 4): + return [] + formats.extend(["intel", "fat64", "fat32"]) + + elif cpu_arch == "i386": + if version < (10, 4): + return [] + formats.extend(["intel", "fat32", "fat"]) + + elif cpu_arch == "ppc64": + # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? + if version > (10, 5) or version < (10, 4): + return [] + formats.append("fat64") + + elif cpu_arch == "ppc": + if version > (10, 6): + return [] + formats.extend(["fat32", "fat"]) + + if cpu_arch in {"arm64", "x86_64"}: + formats.append("universal2") + + if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}: + formats.append("universal") + + return formats + + +def mac_platforms( + version: AppleVersion | None = None, arch: str | None = None +) -> Iterator[str]: + """ + Yields the platform tags for a macOS system. + + The `version` parameter is a two-item tuple specifying the macOS version to + generate platform tags for. The `arch` parameter is the CPU architecture to + generate platform tags for. Both parameters default to the appropriate value + for the current system. + """ + version_str, _, cpu_arch = platform.mac_ver() + if version is None: + version = cast("AppleVersion", tuple(map(int, version_str.split(".")[:2]))) + if version == (10, 16): + # When built against an older macOS SDK, Python will report macOS 10.16 + # instead of the real version. + version_str = subprocess.run( + [ + sys.executable, + "-sS", + "-c", + "import platform; print(platform.mac_ver()[0])", + ], + check=True, + env={"SYSTEM_VERSION_COMPAT": "0"}, + stdout=subprocess.PIPE, + text=True, + ).stdout + version = cast("AppleVersion", tuple(map(int, version_str.split(".")[:2]))) + + if arch is None: + arch = _mac_arch(cpu_arch) + + if (10, 0) <= version < (11, 0): + # Prior to Mac OS 11, each yearly release of Mac OS bumped the + # "minor" version number. The major version was always 10. + major_version = 10 + for minor_version in range(version[1], -1, -1): + compat_version = major_version, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield f"macosx_{major_version}_{minor_version}_{binary_format}" + + if version >= (11, 0): + # Starting with Mac OS 11, each yearly release bumps the major version + # number. The minor versions are now the midyear updates. + minor_version = 0 + for major_version in range(version[0], 10, -1): + compat_version = major_version, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield f"macosx_{major_version}_{minor_version}_{binary_format}" + + if version >= (11, 0): + # Mac OS 11 on x86_64 is compatible with binaries from previous releases. + # Arm64 support was introduced in 11.0, so no Arm binaries from previous + # releases exist. + # + # However, the "universal2" binary format can have a + # macOS version earlier than 11.0 when the x86_64 part of the binary supports + # that version of macOS. + major_version = 10 + if arch == "x86_64": + for minor_version in range(16, 3, -1): + compat_version = major_version, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield f"macosx_{major_version}_{minor_version}_{binary_format}" + else: + for minor_version in range(16, 3, -1): + compat_version = major_version, minor_version + binary_format = "universal2" + yield f"macosx_{major_version}_{minor_version}_{binary_format}" + + +def ios_platforms( + version: AppleVersion | None = None, multiarch: str | None = None +) -> Iterator[str]: + """ + Yields the platform tags for an iOS system. + + :param version: A two-item tuple specifying the iOS version to generate + platform tags for. Defaults to the current iOS version. + :param multiarch: The CPU architecture+ABI to generate platform tags for - + (the value used by `sys.implementation._multiarch` e.g., + `arm64_iphoneos` or `x84_64_iphonesimulator`). Defaults to the current + multiarch value. + """ + if version is None: + # if iOS is the current platform, ios_ver *must* be defined. However, + # it won't exist for CPython versions before 3.13, which causes a mypy + # error. + _, release, _, _ = platform.ios_ver() # type: ignore[attr-defined, unused-ignore] + version = cast("AppleVersion", tuple(map(int, release.split(".")[:2]))) + + if multiarch is None: + multiarch = sys.implementation._multiarch + multiarch = multiarch.replace("-", "_") + + ios_platform_template = "ios_{major}_{minor}_{multiarch}" + + # Consider any iOS major.minor version from the version requested, down to + # 12.0. 12.0 is the first iOS version that is known to have enough features + # to support CPython. Consider every possible minor release up to X.9. There + # highest the minor has ever gone is 8 (14.8 and 15.8) but having some extra + # candidates that won't ever match doesn't really hurt, and it saves us from + # having to keep an explicit list of known iOS versions in the code. Return + # the results descending order of version number. + + # If the requested major version is less than 12, there won't be any matches. + if version[0] < 12: + return + + # Consider the actual X.Y version that was requested. + yield ios_platform_template.format( + major=version[0], minor=version[1], multiarch=multiarch + ) + + # Consider every minor version from X.0 to the minor version prior to the + # version requested by the platform. + for minor in range(version[1] - 1, -1, -1): + yield ios_platform_template.format( + major=version[0], minor=minor, multiarch=multiarch + ) + + for major in range(version[0] - 1, 11, -1): + for minor in range(9, -1, -1): + yield ios_platform_template.format( + major=major, minor=minor, multiarch=multiarch + ) + + +def android_platforms( + api_level: int | None = None, abi: str | None = None +) -> Iterator[str]: + """ + Yields the :attr:`~Tag.platform` tags for Android. If this function is invoked on + non-Android platforms, the ``api_level`` and ``abi`` arguments are required. + + :param int api_level: The maximum `API level + `__ to return. Defaults + to the current system's version, as returned by ``platform.android_ver``. + :param str abi: The `Android ABI `__, + e.g. ``arm64_v8a``. Defaults to the current system's ABI , as returned by + ``sysconfig.get_platform``. Hyphens and periods will be replaced with + underscores. + """ + if platform.system() != "Android" and (api_level is None or abi is None): + raise TypeError( + "on non-Android platforms, the api_level and abi arguments are required" + ) + + if api_level is None: + # Python 3.13 was the first version to return platform.system() == "Android", + # and also the first version to define platform.android_ver(). + api_level = platform.android_ver().api_level # type: ignore[attr-defined] + + if abi is None: + abi = sysconfig.get_platform().split("-")[-1] + abi = _normalize_string(abi) + + # 16 is the minimum API level known to have enough features to support CPython + # without major patching. Yield every API level from the maximum down to the + # minimum, inclusive. + min_api_level = 16 + for ver in range(api_level, min_api_level - 1, -1): + yield f"android_{ver}_{abi}" + + +def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]: + linux = _normalize_string(sysconfig.get_platform()) + if not linux.startswith("linux_"): + # we should never be here, just yield the sysconfig one and return + yield linux + return + if is_32bit: + if linux == "linux_x86_64": + linux = "linux_i686" + elif linux == "linux_aarch64": + linux = "linux_armv8l" + _, arch = linux.split("_", 1) + archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch]) + yield from _manylinux.platform_tags(archs) + yield from _musllinux.platform_tags(archs) + for arch in archs: + yield f"linux_{arch}" + + +def _generic_platforms() -> Iterator[str]: + yield _normalize_string(sysconfig.get_platform()) + + +def platform_tags() -> Iterator[str]: + """ + Provides the platform tags for this installation. + """ + if platform.system() == "Darwin": + return mac_platforms() + elif platform.system() == "iOS": + return ios_platforms() + elif platform.system() == "Android": + return android_platforms() + elif platform.system() == "Linux": + return _linux_platforms() + else: + return _generic_platforms() + + +def interpreter_name() -> str: + """ + Returns the name of the running interpreter. + + Some implementations have a reserved, two-letter abbreviation which will + be returned when appropriate. + """ + name = sys.implementation.name + return INTERPRETER_SHORT_NAMES.get(name) or name + + +def interpreter_version(*, warn: bool = False) -> str: + """ + Returns the version of the running interpreter. + """ + version = _get_config_var("py_version_nodot", warn=warn) + return str(version) if version else _version_nodot(sys.version_info[:2]) + + +def _version_nodot(version: PythonVersion) -> str: + return "".join(map(str, version)) + + +def sys_tags(*, warn: bool = False) -> Iterator[Tag]: + """ + Returns the sequence of tag triples for the running interpreter. + + The order of the sequence corresponds to priority order for the + interpreter, from most to least important. + """ + + interp_name = interpreter_name() + if interp_name == "cp": + yield from cpython_tags(warn=warn) + else: + yield from generic_tags() + + if interp_name == "pp": + interp = "pp3" + elif interp_name == "cp": + interp = "cp" + interpreter_version(warn=warn) + else: + interp = None + yield from compatible_tags(interpreter=interp) diff --git a/.venv/lib/python3.12/site-packages/packaging/utils.py b/.venv/lib/python3.12/site-packages/packaging/utils.py new file mode 100644 index 0000000..c41c813 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging/utils.py @@ -0,0 +1,158 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import re +from typing import NewType, Tuple, Union, cast + +from .tags import Tag, parse_tag +from .version import InvalidVersion, Version, _TrimmedRelease + +BuildTag = Union[Tuple[()], Tuple[int, str]] +NormalizedName = NewType("NormalizedName", str) + + +class InvalidName(ValueError): + """ + An invalid distribution name; users should refer to the packaging user guide. + """ + + +class InvalidWheelFilename(ValueError): + """ + An invalid wheel filename was found, users should refer to PEP 427. + """ + + +class InvalidSdistFilename(ValueError): + """ + An invalid sdist filename was found, users should refer to the packaging user guide. + """ + + +# Core metadata spec for `Name` +_validate_regex = re.compile(r"[A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9]", re.IGNORECASE) +_normalized_regex = re.compile(r"[a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9]") +# PEP 427: The build number must start with a digit. +_build_tag_regex = re.compile(r"(\d+)(.*)") + + +def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName: + if validate and not _validate_regex.fullmatch(name): + raise InvalidName(f"name is invalid: {name!r}") + # Ensure all ``.`` and ``_`` are ``-`` + # Emulates ``re.sub(r"[-_.]+", "-", name).lower()`` from PEP 503 + # Much faster than re, and even faster than str.translate + value = name.lower().replace("_", "-").replace(".", "-") + # Condense repeats (faster than regex) + while "--" in value: + value = value.replace("--", "-") + return cast("NormalizedName", value) + + +def is_normalized_name(name: str) -> bool: + return _normalized_regex.fullmatch(name) is not None + + +def canonicalize_version( + version: Version | str, *, strip_trailing_zero: bool = True +) -> str: + """ + Return a canonical form of a version as a string. + + >>> canonicalize_version('1.0.1') + '1.0.1' + + Per PEP 625, versions may have multiple canonical forms, differing + only by trailing zeros. + + >>> canonicalize_version('1.0.0') + '1' + >>> canonicalize_version('1.0.0', strip_trailing_zero=False) + '1.0.0' + + Invalid versions are returned unaltered. + + >>> canonicalize_version('foo bar baz') + 'foo bar baz' + """ + if isinstance(version, str): + try: + version = Version(version) + except InvalidVersion: + return str(version) + return str(_TrimmedRelease(version) if strip_trailing_zero else version) + + +def parse_wheel_filename( + filename: str, +) -> tuple[NormalizedName, Version, BuildTag, frozenset[Tag]]: + if not filename.endswith(".whl"): + raise InvalidWheelFilename( + f"Invalid wheel filename (extension must be '.whl'): {filename!r}" + ) + + filename = filename[:-4] + dashes = filename.count("-") + if dashes not in (4, 5): + raise InvalidWheelFilename( + f"Invalid wheel filename (wrong number of parts): {filename!r}" + ) + + parts = filename.split("-", dashes - 2) + name_part = parts[0] + # See PEP 427 for the rules on escaping the project name. + if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None: + raise InvalidWheelFilename(f"Invalid project name: {filename!r}") + name = canonicalize_name(name_part) + + try: + version = Version(parts[1]) + except InvalidVersion as e: + raise InvalidWheelFilename( + f"Invalid wheel filename (invalid version): {filename!r}" + ) from e + + if dashes == 5: + build_part = parts[2] + build_match = _build_tag_regex.match(build_part) + if build_match is None: + raise InvalidWheelFilename( + f"Invalid build number: {build_part} in {filename!r}" + ) + build = cast("BuildTag", (int(build_match.group(1)), build_match.group(2))) + else: + build = () + tags = parse_tag(parts[-1]) + return (name, version, build, tags) + + +def parse_sdist_filename(filename: str) -> tuple[NormalizedName, Version]: + if filename.endswith(".tar.gz"): + file_stem = filename[: -len(".tar.gz")] + elif filename.endswith(".zip"): + file_stem = filename[: -len(".zip")] + else: + raise InvalidSdistFilename( + f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):" + f" {filename!r}" + ) + + # We are requiring a PEP 440 version, which cannot contain dashes, + # so we split on the last dash. + name_part, sep, version_part = file_stem.rpartition("-") + if not sep: + raise InvalidSdistFilename(f"Invalid sdist filename: {filename!r}") + + name = canonicalize_name(name_part) + + try: + version = Version(version_part) + except InvalidVersion as e: + raise InvalidSdistFilename( + f"Invalid sdist filename (invalid version): {filename!r}" + ) from e + + return (name, version) diff --git a/.venv/lib/python3.12/site-packages/packaging/version.py b/.venv/lib/python3.12/site-packages/packaging/version.py new file mode 100644 index 0000000..1206c46 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/packaging/version.py @@ -0,0 +1,792 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +""" +.. testsetup:: + + from packaging.version import parse, Version +""" + +from __future__ import annotations + +import re +import sys +import typing +from typing import ( + Any, + Callable, + Literal, + NamedTuple, + SupportsInt, + Tuple, + TypedDict, + Union, +) + +from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType + +if typing.TYPE_CHECKING: + from typing_extensions import Self, Unpack + +if sys.version_info >= (3, 13): # pragma: no cover + from warnings import deprecated as _deprecated +elif typing.TYPE_CHECKING: + from typing_extensions import deprecated as _deprecated +else: # pragma: no cover + import functools + import warnings + + def _deprecated(message: str) -> object: + def decorator(func: object) -> object: + @functools.wraps(func) + def wrapper(*args: object, **kwargs: object) -> object: + warnings.warn( + message, + category=DeprecationWarning, + stacklevel=2, + ) + return func(*args, **kwargs) + + return wrapper + + return decorator + + +_LETTER_NORMALIZATION = { + "alpha": "a", + "beta": "b", + "c": "rc", + "pre": "rc", + "preview": "rc", + "rev": "post", + "r": "post", +} + +__all__ = ["VERSION_PATTERN", "InvalidVersion", "Version", "parse"] + +LocalType = Tuple[Union[int, str], ...] + +CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]] +CmpLocalType = Union[ + NegativeInfinityType, + Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...], +] +CmpKey = Tuple[ + int, + Tuple[int, ...], + CmpPrePostDevType, + CmpPrePostDevType, + CmpPrePostDevType, + CmpLocalType, +] +VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool] + + +class _VersionReplace(TypedDict, total=False): + epoch: int | None + release: tuple[int, ...] | None + pre: tuple[Literal["a", "b", "rc"], int] | None + post: int | None + dev: int | None + local: str | None + + +def parse(version: str) -> Version: + """Parse the given version string. + + >>> parse('1.0.dev1') + + + :param version: The version string to parse. + :raises InvalidVersion: When the version string is not a valid version. + """ + return Version(version) + + +class InvalidVersion(ValueError): + """Raised when a version string is not a valid version. + + >>> Version("invalid") + Traceback (most recent call last): + ... + packaging.version.InvalidVersion: Invalid version: 'invalid' + """ + + +class _BaseVersion: + __slots__ = () + + # This can also be a normal member (see the packaging_legacy package); + # we are just requiring it to be readable. Actually defining a property + # has runtime effect on subclasses, so it's typing only. + if typing.TYPE_CHECKING: + + @property + def _key(self) -> tuple[Any, ...]: ... + + def __hash__(self) -> int: + return hash(self._key) + + # Please keep the duplicated `isinstance` check + # in the six comparisons hereunder + # unless you find a way to avoid adding overhead function calls. + def __lt__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key < other._key + + def __le__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key <= other._key + + def __eq__(self, other: object) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key == other._key + + def __ge__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key >= other._key + + def __gt__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key > other._key + + def __ne__(self, other: object) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key != other._key + + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse + +# Note that ++ doesn't behave identically on CPython and PyPy, so not using it here +_VERSION_PATTERN = r""" + v?+ # optional leading v + (?: + (?:(?P[0-9]+)!)?+ # epoch + (?P[0-9]+(?:\.[0-9]+)*+) # release segment + (?P
                                          # pre-release
+            [._-]?+
+            (?Palpha|a|beta|b|preview|pre|c|rc)
+            [._-]?+
+            (?P[0-9]+)?
+        )?+
+        (?P                                         # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [._-]?
+                (?Ppost|rev|r)
+                [._-]?
+                (?P[0-9]+)?
+            )
+        )?+
+        (?P                                          # dev release
+            [._-]?+
+            (?Pdev)
+            [._-]?+
+            (?P[0-9]+)?
+        )?+
+    )
+    (?:\+
+        (?P                                        # local version
+            [a-z0-9]+
+            (?:[._-][a-z0-9]+)*+
+        )
+    )?+
+"""
+
+_VERSION_PATTERN_OLD = _VERSION_PATTERN.replace("*+", "*").replace("?+", "?")
+
+# Possessive qualifiers were added in Python 3.11.
+# CPython 3.11.0-3.11.4 had a bug: https://github.com/python/cpython/pull/107795
+# Older PyPy also had a bug.
+VERSION_PATTERN = (
+    _VERSION_PATTERN_OLD
+    if (sys.implementation.name == "cpython" and sys.version_info < (3, 11, 5))
+    or (sys.implementation.name == "pypy" and sys.version_info < (3, 11, 13))
+    or sys.version_info < (3, 11)
+    else _VERSION_PATTERN
+)
+"""
+A string containing the regular expression used to match a valid version.
+
+The pattern is not anchored at either end, and is intended for embedding in larger
+expressions (for example, matching a version number as part of a file name). The
+regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
+flags set.
+
+:meta hide-value:
+"""
+
+
+# Validation pattern for local version in replace()
+_LOCAL_PATTERN = re.compile(r"[a-z0-9]+(?:[._-][a-z0-9]+)*", re.IGNORECASE)
+
+
+def _validate_epoch(value: object, /) -> int:
+    epoch = value or 0
+    if isinstance(epoch, int) and epoch >= 0:
+        return epoch
+    msg = f"epoch must be non-negative integer, got {epoch}"
+    raise InvalidVersion(msg)
+
+
+def _validate_release(value: object, /) -> tuple[int, ...]:
+    release = (0,) if value is None else value
+    if (
+        isinstance(release, tuple)
+        and len(release) > 0
+        and all(isinstance(i, int) and i >= 0 for i in release)
+    ):
+        return release
+    msg = f"release must be a non-empty tuple of non-negative integers, got {release}"
+    raise InvalidVersion(msg)
+
+
+def _validate_pre(value: object, /) -> tuple[Literal["a", "b", "rc"], int] | None:
+    if value is None:
+        return value
+    if (
+        isinstance(value, tuple)
+        and len(value) == 2
+        and value[0] in ("a", "b", "rc")
+        and isinstance(value[1], int)
+        and value[1] >= 0
+    ):
+        return value
+    msg = f"pre must be a tuple of ('a'|'b'|'rc', non-negative int), got {value}"
+    raise InvalidVersion(msg)
+
+
+def _validate_post(value: object, /) -> tuple[Literal["post"], int] | None:
+    if value is None:
+        return value
+    if isinstance(value, int) and value >= 0:
+        return ("post", value)
+    msg = f"post must be non-negative integer, got {value}"
+    raise InvalidVersion(msg)
+
+
+def _validate_dev(value: object, /) -> tuple[Literal["dev"], int] | None:
+    if value is None:
+        return value
+    if isinstance(value, int) and value >= 0:
+        return ("dev", value)
+    msg = f"dev must be non-negative integer, got {value}"
+    raise InvalidVersion(msg)
+
+
+def _validate_local(value: object, /) -> LocalType | None:
+    if value is None:
+        return value
+    if isinstance(value, str) and _LOCAL_PATTERN.fullmatch(value):
+        return _parse_local_version(value)
+    msg = f"local must be a valid version string, got {value!r}"
+    raise InvalidVersion(msg)
+
+
+# Backward compatibility for internals before 26.0. Do not use.
+class _Version(NamedTuple):
+    epoch: int
+    release: tuple[int, ...]
+    dev: tuple[str, int] | None
+    pre: tuple[str, int] | None
+    post: tuple[str, int] | None
+    local: LocalType | None
+
+
+class Version(_BaseVersion):
+    """This class abstracts handling of a project's versions.
+
+    A :class:`Version` instance is comparison aware and can be compared and
+    sorted using the standard Python interfaces.
+
+    >>> v1 = Version("1.0a5")
+    >>> v2 = Version("1.0")
+    >>> v1
+    
+    >>> v2
+    
+    >>> v1 < v2
+    True
+    >>> v1 == v2
+    False
+    >>> v1 > v2
+    False
+    >>> v1 >= v2
+    False
+    >>> v1 <= v2
+    True
+    """
+
+    __slots__ = ("_dev", "_epoch", "_key_cache", "_local", "_post", "_pre", "_release")
+    __match_args__ = ("_str",)
+
+    _regex = re.compile(r"\s*" + VERSION_PATTERN + r"\s*", re.VERBOSE | re.IGNORECASE)
+
+    _epoch: int
+    _release: tuple[int, ...]
+    _dev: tuple[str, int] | None
+    _pre: tuple[str, int] | None
+    _post: tuple[str, int] | None
+    _local: LocalType | None
+
+    _key_cache: CmpKey | None
+
+    def __init__(self, version: str) -> None:
+        """Initialize a Version object.
+
+        :param version:
+            The string representation of a version which will be parsed and normalized
+            before use.
+        :raises InvalidVersion:
+            If the ``version`` does not conform to PEP 440 in any way then this
+            exception will be raised.
+        """
+        # Validate the version and parse it into pieces
+        match = self._regex.fullmatch(version)
+        if not match:
+            raise InvalidVersion(f"Invalid version: {version!r}")
+        self._epoch = int(match.group("epoch")) if match.group("epoch") else 0
+        self._release = tuple(map(int, match.group("release").split(".")))
+        self._pre = _parse_letter_version(match.group("pre_l"), match.group("pre_n"))
+        self._post = _parse_letter_version(
+            match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+        )
+        self._dev = _parse_letter_version(match.group("dev_l"), match.group("dev_n"))
+        self._local = _parse_local_version(match.group("local"))
+
+        # Key which will be used for sorting
+        self._key_cache = None
+
+    def __replace__(self, **kwargs: Unpack[_VersionReplace]) -> Self:
+        epoch = _validate_epoch(kwargs["epoch"]) if "epoch" in kwargs else self._epoch
+        release = (
+            _validate_release(kwargs["release"])
+            if "release" in kwargs
+            else self._release
+        )
+        pre = _validate_pre(kwargs["pre"]) if "pre" in kwargs else self._pre
+        post = _validate_post(kwargs["post"]) if "post" in kwargs else self._post
+        dev = _validate_dev(kwargs["dev"]) if "dev" in kwargs else self._dev
+        local = _validate_local(kwargs["local"]) if "local" in kwargs else self._local
+
+        if (
+            epoch == self._epoch
+            and release == self._release
+            and pre == self._pre
+            and post == self._post
+            and dev == self._dev
+            and local == self._local
+        ):
+            return self
+
+        new_version = self.__class__.__new__(self.__class__)
+        new_version._key_cache = None
+        new_version._epoch = epoch
+        new_version._release = release
+        new_version._pre = pre
+        new_version._post = post
+        new_version._dev = dev
+        new_version._local = local
+
+        return new_version
+
+    @property
+    def _key(self) -> CmpKey:
+        if self._key_cache is None:
+            self._key_cache = _cmpkey(
+                self._epoch,
+                self._release,
+                self._pre,
+                self._post,
+                self._dev,
+                self._local,
+            )
+        return self._key_cache
+
+    @property
+    @_deprecated("Version._version is private and will be removed soon")
+    def _version(self) -> _Version:
+        return _Version(
+            self._epoch, self._release, self._dev, self._pre, self._post, self._local
+        )
+
+    @_version.setter
+    @_deprecated("Version._version is private and will be removed soon")
+    def _version(self, value: _Version) -> None:
+        self._epoch = value.epoch
+        self._release = value.release
+        self._dev = value.dev
+        self._pre = value.pre
+        self._post = value.post
+        self._local = value.local
+        self._key_cache = None
+
+    def __repr__(self) -> str:
+        """A representation of the Version that shows all internal state.
+
+        >>> Version('1.0.0')
+        
+        """
+        return f""
+
+    def __str__(self) -> str:
+        """A string representation of the version that can be round-tripped.
+
+        >>> str(Version("1.0a5"))
+        '1.0a5'
+        """
+        # This is a hot function, so not calling self.base_version
+        version = ".".join(map(str, self.release))
+
+        # Epoch
+        if self.epoch:
+            version = f"{self.epoch}!{version}"
+
+        # Pre-release
+        if self.pre is not None:
+            version += "".join(map(str, self.pre))
+
+        # Post-release
+        if self.post is not None:
+            version += f".post{self.post}"
+
+        # Development release
+        if self.dev is not None:
+            version += f".dev{self.dev}"
+
+        # Local version segment
+        if self.local is not None:
+            version += f"+{self.local}"
+
+        return version
+
+    @property
+    def _str(self) -> str:
+        """Internal property for match_args"""
+        return str(self)
+
+    @property
+    def epoch(self) -> int:
+        """The epoch of the version.
+
+        >>> Version("2.0.0").epoch
+        0
+        >>> Version("1!2.0.0").epoch
+        1
+        """
+        return self._epoch
+
+    @property
+    def release(self) -> tuple[int, ...]:
+        """The components of the "release" segment of the version.
+
+        >>> Version("1.2.3").release
+        (1, 2, 3)
+        >>> Version("2.0.0").release
+        (2, 0, 0)
+        >>> Version("1!2.0.0.post0").release
+        (2, 0, 0)
+
+        Includes trailing zeroes but not the epoch or any pre-release / development /
+        post-release suffixes.
+        """
+        return self._release
+
+    @property
+    def pre(self) -> tuple[str, int] | None:
+        """The pre-release segment of the version.
+
+        >>> print(Version("1.2.3").pre)
+        None
+        >>> Version("1.2.3a1").pre
+        ('a', 1)
+        >>> Version("1.2.3b1").pre
+        ('b', 1)
+        >>> Version("1.2.3rc1").pre
+        ('rc', 1)
+        """
+        return self._pre
+
+    @property
+    def post(self) -> int | None:
+        """The post-release number of the version.
+
+        >>> print(Version("1.2.3").post)
+        None
+        >>> Version("1.2.3.post1").post
+        1
+        """
+        return self._post[1] if self._post else None
+
+    @property
+    def dev(self) -> int | None:
+        """The development number of the version.
+
+        >>> print(Version("1.2.3").dev)
+        None
+        >>> Version("1.2.3.dev1").dev
+        1
+        """
+        return self._dev[1] if self._dev else None
+
+    @property
+    def local(self) -> str | None:
+        """The local version segment of the version.
+
+        >>> print(Version("1.2.3").local)
+        None
+        >>> Version("1.2.3+abc").local
+        'abc'
+        """
+        if self._local:
+            return ".".join(str(x) for x in self._local)
+        else:
+            return None
+
+    @property
+    def public(self) -> str:
+        """The public portion of the version.
+
+        >>> Version("1.2.3").public
+        '1.2.3'
+        >>> Version("1.2.3+abc").public
+        '1.2.3'
+        >>> Version("1!1.2.3dev1+abc").public
+        '1!1.2.3.dev1'
+        """
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self) -> str:
+        """The "base version" of the version.
+
+        >>> Version("1.2.3").base_version
+        '1.2.3'
+        >>> Version("1.2.3+abc").base_version
+        '1.2.3'
+        >>> Version("1!1.2.3dev1+abc").base_version
+        '1!1.2.3'
+
+        The "base version" is the public version of the project without any pre or post
+        release markers.
+        """
+        release_segment = ".".join(map(str, self.release))
+        return f"{self.epoch}!{release_segment}" if self.epoch else release_segment
+
+    @property
+    def is_prerelease(self) -> bool:
+        """Whether this version is a pre-release.
+
+        >>> Version("1.2.3").is_prerelease
+        False
+        >>> Version("1.2.3a1").is_prerelease
+        True
+        >>> Version("1.2.3b1").is_prerelease
+        True
+        >>> Version("1.2.3rc1").is_prerelease
+        True
+        >>> Version("1.2.3dev1").is_prerelease
+        True
+        """
+        return self.dev is not None or self.pre is not None
+
+    @property
+    def is_postrelease(self) -> bool:
+        """Whether this version is a post-release.
+
+        >>> Version("1.2.3").is_postrelease
+        False
+        >>> Version("1.2.3.post1").is_postrelease
+        True
+        """
+        return self.post is not None
+
+    @property
+    def is_devrelease(self) -> bool:
+        """Whether this version is a development release.
+
+        >>> Version("1.2.3").is_devrelease
+        False
+        >>> Version("1.2.3.dev1").is_devrelease
+        True
+        """
+        return self.dev is not None
+
+    @property
+    def major(self) -> int:
+        """The first item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").major
+        1
+        """
+        return self.release[0] if len(self.release) >= 1 else 0
+
+    @property
+    def minor(self) -> int:
+        """The second item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").minor
+        2
+        >>> Version("1").minor
+        0
+        """
+        return self.release[1] if len(self.release) >= 2 else 0
+
+    @property
+    def micro(self) -> int:
+        """The third item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").micro
+        3
+        >>> Version("1").micro
+        0
+        """
+        return self.release[2] if len(self.release) >= 3 else 0
+
+
+class _TrimmedRelease(Version):
+    __slots__ = ()
+
+    def __init__(self, version: str | Version) -> None:
+        if isinstance(version, Version):
+            self._epoch = version._epoch
+            self._release = version._release
+            self._dev = version._dev
+            self._pre = version._pre
+            self._post = version._post
+            self._local = version._local
+            self._key_cache = version._key_cache
+            return
+        super().__init__(version)  # pragma: no cover
+
+    @property
+    def release(self) -> tuple[int, ...]:
+        """
+        Release segment without any trailing zeros.
+
+        >>> _TrimmedRelease('1.0.0').release
+        (1,)
+        >>> _TrimmedRelease('0.0').release
+        (0,)
+        """
+        # This leaves one 0.
+        rel = super().release
+        len_release = len(rel)
+        i = len_release
+        while i > 1 and rel[i - 1] == 0:
+            i -= 1
+        return rel if i == len_release else rel[:i]
+
+
+def _parse_letter_version(
+    letter: str | None, number: str | bytes | SupportsInt | None
+) -> tuple[str, int] | None:
+    if letter:
+        # We normalize any letters to their lower case form
+        letter = letter.lower()
+
+        # We consider some words to be alternate spellings of other words and
+        # in those cases we want to normalize the spellings to our preferred
+        # spelling.
+        letter = _LETTER_NORMALIZATION.get(letter, letter)
+
+        # We consider there to be an implicit 0 in a pre-release if there is
+        # not a numeral associated with it.
+        return letter, int(number or 0)
+
+    if number:
+        # We assume if we are given a number, but we are not given a letter
+        # then this is using the implicit post release syntax (e.g. 1.0-1)
+        return "post", int(number)
+
+    return None
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local: str | None) -> LocalType | None:
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in _local_version_separators.split(local)
+        )
+    return None
+
+
+def _cmpkey(
+    epoch: int,
+    release: tuple[int, ...],
+    pre: tuple[str, int] | None,
+    post: tuple[str, int] | None,
+    dev: tuple[str, int] | None,
+    local: LocalType | None,
+) -> CmpKey:
+    # When we compare a release version, we want to compare it with all of the
+    # trailing zeros removed. We will use this for our sorting key.
+    len_release = len(release)
+    i = len_release
+    while i and release[i - 1] == 0:
+        i -= 1
+    _release = release if i == len_release else release[:i]
+
+    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+    # We'll do this by abusing the pre segment, but we _only_ want to do this
+    # if there is not a pre or a post segment. If we have one of those then
+    # the normal sorting rules will handle this case correctly.
+    if pre is None and post is None and dev is not None:
+        _pre: CmpPrePostDevType = NegativeInfinity
+    # Versions without a pre-release (except as noted above) should sort after
+    # those with one.
+    elif pre is None:
+        _pre = Infinity
+    else:
+        _pre = pre
+
+    # Versions without a post segment should sort before those with one.
+    if post is None:
+        _post: CmpPrePostDevType = NegativeInfinity
+
+    else:
+        _post = post
+
+    # Versions without a development segment should sort after those with one.
+    if dev is None:
+        _dev: CmpPrePostDevType = Infinity
+
+    else:
+        _dev = dev
+
+    if local is None:
+        # Versions without a local segment should sort before those with one.
+        _local: CmpLocalType = NegativeInfinity
+    else:
+        # Versions with a local segment need that segment parsed to implement
+        # the sorting rules in PEP440.
+        # - Alpha numeric segments sort before numeric segments
+        # - Alpha numeric segments sort lexicographically
+        # - Numeric segments sort numerically
+        # - Shorter versions sort before longer versions when the prefixes
+        #   match exactly
+        _local = tuple(
+            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
+        )
+
+    return epoch, _release, _pre, _post, _dev, _local
diff --git a/.venv/lib/python3.12/site-packages/pathspec-1.0.4.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/pathspec-1.0.4.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec-1.0.4.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/.venv/lib/python3.12/site-packages/pathspec-1.0.4.dist-info/METADATA b/.venv/lib/python3.12/site-packages/pathspec-1.0.4.dist-info/METADATA
new file mode 100644
index 0000000..348a68b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec-1.0.4.dist-info/METADATA
@@ -0,0 +1,356 @@
+Metadata-Version: 2.4
+Name: pathspec
+Version: 1.0.4
+Summary: Utility library for gitignore style pattern matching of file paths.
+Author-email: "Caleb P. Burns" 
+Requires-Python: >=3.9
+Description-Content-Type: text/x-rst
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Utilities
+License-File: LICENSE
+Requires-Dist: hyperscan >=0.7 ; extra == "hyperscan"
+Requires-Dist: typing-extensions >=4 ; extra == "optional"
+Requires-Dist: google-re2 >=1.1 ; extra == "re2"
+Requires-Dist: pytest >=9 ; extra == "tests"
+Requires-Dist: typing-extensions >=4.15 ; extra == "tests"
+Project-URL: Documentation, https://python-path-specification.readthedocs.io/en/latest/index.html
+Project-URL: Issue Tracker, https://github.com/cpburnz/python-pathspec/issues
+Project-URL: Source Code, https://github.com/cpburnz/python-pathspec
+Provides-Extra: hyperscan
+Provides-Extra: optional
+Provides-Extra: re2
+Provides-Extra: tests
+
+
+PathSpec
+========
+
+*pathspec* is a utility library for pattern matching of file paths. So far this
+only includes Git's `gitignore`_ pattern matching.
+
+.. _`gitignore`: http://git-scm.com/docs/gitignore
+
+
+Tutorial
+--------
+
+Say you have a "Projects" directory and you want to back it up, but only
+certain files, and ignore others depending on certain conditions::
+
+	>>> from pathspec import PathSpec
+	>>> # The gitignore-style patterns for files to select, but we're including
+	>>> # instead of ignoring.
+	>>> spec_text = """
+	...
+	... # This is a comment because the line begins with a hash: "#"
+	...
+	... # Include several project directories (and all descendants) relative to
+	... # the current directory. To reference only a directory you must end with a
+	... # slash: "/"
+	... /project-a/
+	... /project-b/
+	... /project-c/
+	...
+	... # Patterns can be negated by prefixing with exclamation mark: "!"
+	...
+	... # Ignore temporary files beginning or ending with "~" and ending with
+	... # ".swp".
+	... !~*
+	... !*~
+	... !*.swp
+	...
+	... # These are python projects so ignore compiled python files from
+	... # testing.
+	... !*.pyc
+	...
+	... # Ignore the build directories but only directly under the project
+	... # directories.
+	... !/*/build/
+	...
+	... """
+
+The ``PathSpec`` class provides an abstraction around pattern implementations,
+and we want to compile our patterns as "gitignore" patterns. You could call it a
+wrapper for a list of compiled patterns::
+
+	>>> spec = PathSpec.from_lines('gitignore', spec_text.splitlines())
+
+If we wanted to manually compile the patterns, we can use the ``GitIgnoreBasicPattern``
+class directly. It is used in the background for "gitignore" which internally
+converts patterns to regular expressions::
+
+	>>> from pathspec.patterns.gitignore.basic import GitIgnoreBasicPattern
+	>>> patterns = map(GitIgnoreBasicPattern, spec_text.splitlines())
+	>>> spec = PathSpec(patterns)
+
+``PathSpec.from_lines()`` is a class method which simplifies that.
+
+If you want to load the patterns from file, you can pass the file object
+directly as well::
+
+	>>> with open('patterns.list', 'r') as fh:
+	>>>     spec = PathSpec.from_lines('gitignore', fh)
+
+You can perform matching on a whole directory tree with::
+
+	>>> matches = set(spec.match_tree_files('path/to/directory'))
+
+Or you can perform matching on a specific set of file paths with::
+
+	>>> matches = set(spec.match_files(file_paths))
+
+Or check to see if an individual file matches::
+
+	>>> is_matched = spec.match_file(file_path)
+
+There's actually two implementations of "gitignore". The basic implementation is
+used by ``PathSpec`` and follows patterns as documented by `gitignore`_.
+However, Git's behavior differs from the documented patterns. There's some
+edge-cases, and in particular, Git allows including files from excluded
+directories which appears to contradict the documentation. ``GitIgnoreSpec``
+handles these cases to more closely replicate Git's behavior::
+
+	>>> from pathspec import GitIgnoreSpec
+	>>> spec = GitIgnoreSpec.from_lines(spec_text.splitlines())
+
+You do not specify the style of pattern for ``GitIgnoreSpec`` because it should
+always use ``GitIgnoreSpecPattern`` internally.
+
+
+Performance
+-----------
+
+Running lots of regular expression matches against thousands of files in Python
+is slow. Alternate regular expression backends can be used to improve
+performance. ``PathSpec`` and ``GitIgnoreSpec`` both accept a ``backend``
+parameter to control the backend. The default is "best" to automatically choose
+the best available backend. There are currently 3 backends.
+
+The "simple" backend is the default and it simply uses Python's ``re.Pattern``
+objects that are normally created. This can be the fastest when there's only 1
+or 2 patterns.
+
+The "hyperscan" backend uses the `hyperscan`_ library. Hyperscan tends to be at
+least 2 times faster than "simple", and generally slower than "re2". This can be
+faster than "re2" under the right conditions with pattern counts of 1-25.
+
+The "re2" backend uses the `google-re2`_ library (not to be confused with the
+*re2* library on PyPI which is unrelated and abandoned). Google's re2 tends to
+be significantly faster than "simple", and 3 times faster than "hyperscan" at
+high pattern counts.
+
+See `benchmarks_backends.md`_ for comparisons between native Python regular
+expressions and the optional backends.
+
+
+.. _`benchmarks_backends.md`: https://github.com/cpburnz/python-pathspec/blob/master/benchmarks_backends.md
+.. _`google-re2`: https://pypi.org/project/google-re2/
+.. _`hyperscan`: https://pypi.org/project/hyperscan/
+
+
+FAQ
+---
+
+
+1. How do I ignore files like *.gitignore*?
++++++++++++++++++++++++++++++++++++++++++++
+
+``GitIgnoreSpec`` (and ``PathSpec``) positively match files by default. To find
+the files to keep, and exclude files like *.gitignore*, you need to set
+``negate=True`` to flip the results::
+
+	>>> from pathspec import GitIgnoreSpec
+	>>> spec = GitIgnoreSpec.from_lines([...])
+	>>> keep_files = set(spec.match_tree_files('path/to/directory', negate=True))
+	>>> ignore_files = set(spec.match_tree_files('path/to/directory'))
+
+
+License
+-------
+
+*pathspec* is licensed under the `Mozilla Public License Version 2.0`_. See
+`LICENSE`_ or the `FAQ`_ for more information.
+
+In summary, you may use *pathspec* with any closed or open source project
+without affecting the license of the larger work so long as you:
+
+- give credit where credit is due,
+
+- and release any custom changes made to *pathspec*.
+
+.. _`Mozilla Public License Version 2.0`: http://www.mozilla.org/MPL/2.0
+.. _`LICENSE`: LICENSE
+.. _`FAQ`: http://www.mozilla.org/MPL/2.0/FAQ.html
+
+
+Source
+------
+
+The source code for *pathspec* is available from the GitHub repo
+`cpburnz/python-pathspec`_.
+
+.. _`cpburnz/python-pathspec`: https://github.com/cpburnz/python-pathspec
+
+
+Installation
+------------
+
+*pathspec* is available for install through `PyPI`_::
+
+	pip install pathspec
+
+*pathspec* can also be built from source. The following packages will be
+required:
+
+- `build`_ (>=0.6.0)
+
+*pathspec* can then be built and installed with::
+
+	python -m build
+	pip install dist/pathspec-*-py3-none-any.whl
+
+The following optional dependencies can be installed:
+
+- `google-re2`_: Enables optional "re2" backend.
+- `hyperscan`_: Enables optional "hyperscan" backend.
+- `typing-extensions`_: Improves some type hints.
+
+.. _`PyPI`: http://pypi.python.org/pypi/pathspec
+.. _`build`: https://pypi.org/project/build/
+.. _`typing-extensions`: https://pypi.org/project/typing-extensions/
+
+
+Documentation
+-------------
+
+Documentation for *pathspec* is available on `Read the Docs`_.
+
+The full change history can be found in `CHANGES.rst`_ and `Change History`_.
+
+An upgrade guide is available in `UPGRADING.rst`_ and `Upgrade Guide`_.
+
+.. _`CHANGES.rst`: https://github.com/cpburnz/python-pathspec/blob/master/CHANGES.rst
+.. _`Change History`: https://python-path-specification.readthedocs.io/en/stable/changes.html
+.. _`Read the Docs`: https://python-path-specification.readthedocs.io
+.. _`UPGRADING.rst`: https://github.com/cpburnz/python-pathspec/blob/master/UPGRADING.rst
+.. _`Upgrade Guide`: https://python-path-specification.readthedocs.io/en/stable/upgrading.html
+
+
+Other Languages
+---------------
+
+The related project `pathspec-ruby`_ (by *highb*) provides a similar library as
+a `Ruby gem`_.
+
+.. _`pathspec-ruby`: https://github.com/highb/pathspec-ruby
+.. _`Ruby gem`: https://rubygems.org/gems/pathspec
+
+
+Change History
+==============
+
+
+1.0.4 (2026-01-26)
+------------------
+
+- `Issue #103`_: Using re2 fails if pyre2 is also installed.
+
+.. _`Issue #103`: https://github.com/cpburnz/python-pathspec/issues/103
+
+
+1.0.3 (2026-01-09)
+------------------
+
+Bug fixes:
+
+- `Issue #101`_: pyright strict errors with pathspec >= 1.0.0.
+- `Issue #102`_: No module named 'tomllib'.
+
+
+.. _`Issue #101`: https://github.com/cpburnz/python-pathspec/issues/101
+.. _`Issue #102`: https://github.com/cpburnz/python-pathspec/issues/102
+
+
+1.0.2 (2026-01-07)
+------------------
+
+Bug fixes:
+
+- Type hint `collections.abc.Callable` does not properly replace `typing.Callable` until Python 3.9.2.
+
+
+1.0.1 (2026-01-06)
+------------------
+
+Bug fixes:
+
+- `Issue #100`_: ValueError(f"{patterns=!r} cannot be empty.") when using black.
+
+
+.. _`Issue #100`: https://github.com/cpburnz/python-pathspec/issues/100
+
+
+1.0.0 (2026-01-05)
+------------------
+
+Major changes:
+
+- `Issue #91`_: Dropped support of EoL Python 3.8.
+- Added concept of backends to allow for faster regular expression matching. The backend can be controlled using the `backend` argument to `PathSpec()`, `PathSpec.from_lines()`, `GitIgnoreSpec()`, and `GitIgnoreSpec.from_lines()`.
+- Renamed "gitwildmatch" pattern back to "gitignore". The "gitignore" pattern behaves slightly differently when used with `PathSpec` (*gitignore* as documented) than with `GitIgnoreSpec` (replicates *Git*'s edge cases).
+
+API changes:
+
+- Breaking: protected method `pathspec.pathspec.PathSpec._match_file()` (with a leading underscore) has been removed and replaced by backends. This does not affect normal usage of `PathSpec` or `GitIgnoreSpec`. Only custom subclasses will be affected. If this breaks your usage, let me know by `opening an issue `_.
+- Deprecated: "gitwildmatch" is now an alias for "gitignore".
+- Deprecated: `pathspec.patterns.GitWildMatchPattern` is now an alias for `pathspec.patterns.gitignore.spec.GitIgnoreSpecPattern`.
+- Deprecated: `pathspec.patterns.gitwildmatch` module has been replaced by the `pathspec.patterns.gitignore` package.
+- Deprecated: `pathspec.patterns.gitwildmatch.GitWildMatchPattern` is now an alias for `pathspec.patterns.gitignore.spec.GitIgnoreSpecPattern`.
+- Deprecated: `pathspec.patterns.gitwildmatch.GitWildMatchPatternError` is now an alias for `pathspec.patterns.gitignore.GitIgnorePatternError`.
+- Removed: `pathspec.patterns.gitwildmatch.GitIgnorePattern` has been deprecated since v0.4 (2016-07-15).
+- Signature of method `pathspec.pattern.RegexPattern.match_file()` has been changed from `def match_file(self, file: str) -> RegexMatchResult | None` to `def match_file(self, file: AnyStr) -> RegexMatchResult | None` to reflect usage.
+- Signature of class method `pathspec.pattern.RegexPattern.pattern_to_regex()` has been changed from `def pattern_to_regex(cls, pattern: str) -> tuple[str, bool]` to `def pattern_to_regex(cls, pattern: AnyStr) -> tuple[AnyStr | None, bool | None]` to reflect usage and documentation.
+
+New features:
+
+- Added optional "hyperscan" backend using `hyperscan`_ library. It will automatically be used when installed. This dependency can be installed with ``pip install 'pathspec[hyperscan]'``.
+- Added optional "re2" backend using the `google-re2`_ library. It will automatically be used when installed. This dependency can be installed with ``pip install 'pathspec[re2]'``.
+- Added optional dependency on `typing-extensions`_ library to improve some type hints.
+
+Bug fixes:
+
+- `Issue #93`_: Do not remove leading spaces.
+- `Issue #95`_: Matching for files inside folder does not seem to behave like .gitignore's.
+- `Issue #98`_: UnboundLocalError in RegexPattern when initialized with `pattern=None`.
+- Type hint on return value of `pathspec.pattern.RegexPattern.match_file()` to match documentation.
+
+Improvements:
+
+- Mark Python 3.13 and 3.14 as supported.
+- No-op patterns are now filtered out when matching files, slightly improving performance.
+- Fix performance regression in `iter_tree_files()` from v0.10.
+
+
+.. _`Issue #38`: https://github.com/cpburnz/python-pathspec/issues/38
+.. _`Issue #91`: https://github.com/cpburnz/python-pathspec/issues/91
+.. _`Issue #93`: https://github.com/cpburnz/python-pathspec/issues/93
+.. _`Issue #95`: https://github.com/cpburnz/python-pathspec/issues/95
+.. _`Issue #98`: https://github.com/cpburnz/python-pathspec/issues/98
+.. _`google-re2`: https://pypi.org/project/google-re2/
+.. _`hyperscan`: https://pypi.org/project/hyperscan/
+.. _`typing-extensions`: https://pypi.org/project/typing-extensions/
+
diff --git a/.venv/lib/python3.12/site-packages/pathspec-1.0.4.dist-info/RECORD b/.venv/lib/python3.12/site-packages/pathspec-1.0.4.dist-info/RECORD
new file mode 100644
index 0000000..0341868
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec-1.0.4.dist-info/RECORD
@@ -0,0 +1,68 @@
+pathspec-1.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+pathspec-1.0.4.dist-info/METADATA,sha256=pekHVZjpp_VHVlDo7U032-fIhSGEbY_V8jjmYrEgaWM,13755
+pathspec-1.0.4.dist-info/RECORD,,
+pathspec-1.0.4.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
+pathspec-1.0.4.dist-info/licenses/LICENSE,sha256=-rPda9qyJvHAhjCx3ZF-Efy07F4eAg4sFvg6ChOGPoU,16726
+pathspec/__init__.py,sha256=0PnZCecVo4UjsfA0EFGsAUikyz1jSDFmQP9gCoKXW_Y,1408
+pathspec/__pycache__/__init__.cpython-312.pyc,,
+pathspec/__pycache__/_meta.cpython-312.pyc,,
+pathspec/__pycache__/_typing.cpython-312.pyc,,
+pathspec/__pycache__/_version.cpython-312.pyc,,
+pathspec/__pycache__/backend.cpython-312.pyc,,
+pathspec/__pycache__/gitignore.cpython-312.pyc,,
+pathspec/__pycache__/pathspec.cpython-312.pyc,,
+pathspec/__pycache__/pattern.cpython-312.pyc,,
+pathspec/__pycache__/util.cpython-312.pyc,,
+pathspec/_backends/__init__.py,sha256=CjgX4uSPMC5UH4iy_IrdFXrcLQ_gwK8MKW5Qbspz_uE,130
+pathspec/_backends/__pycache__/__init__.cpython-312.pyc,,
+pathspec/_backends/__pycache__/_utils.cpython-312.pyc,,
+pathspec/_backends/__pycache__/agg.cpython-312.pyc,,
+pathspec/_backends/_utils.py,sha256=mDjbGpndOyVkt9Fue0WDWKTkk-jVqOejof9Bv9pzArE,1066
+pathspec/_backends/agg.py,sha256=naHFqYXMR53hwtgHtEHrwNJEBFpbUWbdMbF0zguxHlE,2505
+pathspec/_backends/hyperscan/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pathspec/_backends/hyperscan/__pycache__/__init__.cpython-312.pyc,,
+pathspec/_backends/hyperscan/__pycache__/_base.cpython-312.pyc,,
+pathspec/_backends/hyperscan/__pycache__/base.cpython-312.pyc,,
+pathspec/_backends/hyperscan/__pycache__/gitignore.cpython-312.pyc,,
+pathspec/_backends/hyperscan/__pycache__/pathspec.cpython-312.pyc,,
+pathspec/_backends/hyperscan/_base.py,sha256=b8E_kClW6Wtkdserr3qZzMPWVomrI4yhfxSlGVYdT3c,1719
+pathspec/_backends/hyperscan/base.py,sha256=BclDnsbCH6Fvx58YT6wqxGDcfWKNUQAcy_9jV63WkCI,563
+pathspec/_backends/hyperscan/gitignore.py,sha256=OyqtXEoZWrMB3Uh_2xNzY0aGK5UdBBjkFeGAFKQh7Oo,6761
+pathspec/_backends/hyperscan/pathspec.py,sha256=74RsGQt9x3nTxjz5S5grEQI34x8eFew78wluiIzhOpw,6500
+pathspec/_backends/re2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pathspec/_backends/re2/__pycache__/__init__.cpython-312.pyc,,
+pathspec/_backends/re2/__pycache__/_base.cpython-312.pyc,,
+pathspec/_backends/re2/__pycache__/base.cpython-312.pyc,,
+pathspec/_backends/re2/__pycache__/gitignore.cpython-312.pyc,,
+pathspec/_backends/re2/__pycache__/pathspec.cpython-312.pyc,,
+pathspec/_backends/re2/_base.py,sha256=VDThfjwEOnrDOfri_EnPifXH8pOYt71nxq3tUQAScfU,2149
+pathspec/_backends/re2/base.py,sha256=0sCZzhDpvyZLg9imO7BdE9KOmy3L0mgyHuzPhHWNbRU,462
+pathspec/_backends/re2/gitignore.py,sha256=0RPjCzg1vxE_6qDOL29V4qAyi9UnMKT2bb3k2XDimew,5094
+pathspec/_backends/re2/pathspec.py,sha256=aUtY_DdVHQyxHMbMGiovmXTIpuLKgIAeGtZerMVHIhI,4871
+pathspec/_backends/simple/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pathspec/_backends/simple/__pycache__/__init__.cpython-312.pyc,,
+pathspec/_backends/simple/__pycache__/gitignore.cpython-312.pyc,,
+pathspec/_backends/simple/__pycache__/pathspec.cpython-312.pyc,,
+pathspec/_backends/simple/gitignore.py,sha256=45SfH2SM-YF7CppdSrQ15z7A4GUAesFzLWs8QaKdER4,2865
+pathspec/_backends/simple/pathspec.py,sha256=Zzebst2evN8-juZr5w6VBwIox7LToYT4K2zD4Jp3M7U,2095
+pathspec/_meta.py,sha256=3sxdG_ghfAmwhV7AGeJS9VUZptsmaBFVSPhQqVLpiMk,2937
+pathspec/_typing.py,sha256=xega7efBH3B4StmBzxpGvrk-yJWYKnD6Lk5Id0IiHzc,1642
+pathspec/_version.py,sha256=iV7XOjXu_8FpfpC966oeh6PC-5XA35XwWlO7oI-p2ys,64
+pathspec/backend.py,sha256=QXFus8SgZ1hKH8LZ8eOnZcyGNTO1_YQYwRM_kTkvi2M,1161
+pathspec/gitignore.py,sha256=oFWfSgeecaJFSCgI0TwdYxz0jluQxztgf-T779OxIN8,5263
+pathspec/pathspec.py,sha256=5JhgxfZTyzUcG0bEUN91xTdcvF_S9sdhXGK59nIpDOY,15151
+pathspec/pattern.py,sha256=smqkNSWc9LmPZS1MqYBGjXFXZRteiSpwF8iAy9250DY,6695
+pathspec/patterns/__init__.py,sha256=6pfTpyrSIJxN8A12hKWpa9JFvVMTR39FV3QE1HBQbho,404
+pathspec/patterns/__pycache__/__init__.cpython-312.pyc,,
+pathspec/patterns/__pycache__/gitwildmatch.cpython-312.pyc,,
+pathspec/patterns/gitignore/__init__.py,sha256=MaSAZd0DDg0vCH9k1LslaJjBJw5DkX4ty-FuLmB1z_4,422
+pathspec/patterns/gitignore/__pycache__/__init__.cpython-312.pyc,,
+pathspec/patterns/gitignore/__pycache__/base.cpython-312.pyc,,
+pathspec/patterns/gitignore/__pycache__/basic.cpython-312.pyc,,
+pathspec/patterns/gitignore/__pycache__/spec.cpython-312.pyc,,
+pathspec/patterns/gitignore/base.py,sha256=mkLYm-prSD2SXNDpxnFhL0FRV8FRPAsIBVeXyNOWjCI,4688
+pathspec/patterns/gitignore/basic.py,sha256=0pTlzzJt8qMpy-SnGHhozZVWVDH9ErPDy29MV3Q8UOw,9924
+pathspec/patterns/gitignore/spec.py,sha256=8jB3Q7Wbb6fLvtIfNax89tEtw2UZgATbAKnpGQleU8Q,10186
+pathspec/patterns/gitwildmatch.py,sha256=bF2PUtc9gOFHuFwHJ035x91y3R8An5dIY5oRibylsco,1463
+pathspec/py.typed,sha256=wq7wwDeyBungK6DsiV4O-IujgKzARwHz94uQshdpdEU,68
+pathspec/util.py,sha256=KbG9seqfTOBLPoSJ8I4CdeDFVof6rDGCMy69cZb4Du4,24728
diff --git a/.venv/lib/python3.12/site-packages/pathspec-1.0.4.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/pathspec-1.0.4.dist-info/WHEEL
new file mode 100644
index 0000000..d8b9936
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec-1.0.4.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: flit 3.12.0
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/.venv/lib/python3.12/site-packages/pathspec-1.0.4.dist-info/licenses/LICENSE b/.venv/lib/python3.12/site-packages/pathspec-1.0.4.dist-info/licenses/LICENSE
new file mode 100644
index 0000000..14e2f77
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec-1.0.4.dist-info/licenses/LICENSE
@@ -0,0 +1,373 @@
+Mozilla Public License Version 2.0
+==================================
+
+1. Definitions
+--------------
+
+1.1. "Contributor"
+    means each individual or legal entity that creates, contributes to
+    the creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+    means the combination of the Contributions of others (if any) used
+    by a Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+    means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+    means Source Code Form to which the initial Contributor has attached
+    the notice in Exhibit A, the Executable Form of such Source Code
+    Form, and Modifications of such Source Code Form, in each case
+    including portions thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+    means
+
+    (a) that the initial Contributor has attached the notice described
+        in Exhibit B to the Covered Software; or
+
+    (b) that the Covered Software was made available under the terms of
+        version 1.1 or earlier of the License, but not also under the
+        terms of a Secondary License.
+
+1.6. "Executable Form"
+    means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+    means a work that combines Covered Software with other material, in 
+    a separate file or files, that is not Covered Software.
+
+1.8. "License"
+    means this document.
+
+1.9. "Licensable"
+    means having the right to grant, to the maximum extent possible,
+    whether at the time of the initial grant or subsequently, any and
+    all of the rights conveyed by this License.
+
+1.10. "Modifications"
+    means any of the following:
+
+    (a) any file in Source Code Form that results from an addition to,
+        deletion from, or modification of the contents of Covered
+        Software; or
+
+    (b) any new file in Source Code Form that contains any Covered
+        Software.
+
+1.11. "Patent Claims" of a Contributor
+    means any patent claim(s), including without limitation, method,
+    process, and apparatus claims, in any patent Licensable by such
+    Contributor that would be infringed, but for the grant of the
+    License, by the making, using, selling, offering for sale, having
+    made, import, or transfer of either its Contributions or its
+    Contributor Version.
+
+1.12. "Secondary License"
+    means either the GNU General Public License, Version 2.0, the GNU
+    Lesser General Public License, Version 2.1, the GNU Affero General
+    Public License, Version 3.0, or any later versions of those
+    licenses.
+
+1.13. "Source Code Form"
+    means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+    means an individual or a legal entity exercising rights under this
+    License. For legal entities, "You" includes any entity that
+    controls, is controlled by, or is under common control with You. For
+    purposes of this definition, "control" means (a) the power, direct
+    or indirect, to cause the direction or management of such entity,
+    whether by contract or otherwise, or (b) ownership of more than
+    fifty percent (50%) of the outstanding shares or beneficial
+    ownership of such entity.
+
+2. License Grants and Conditions
+--------------------------------
+
+2.1. Grants
+
+Each Contributor hereby grants You a world-wide, royalty-free,
+non-exclusive license:
+
+(a) under intellectual property rights (other than patent or trademark)
+    Licensable by such Contributor to use, reproduce, make available,
+    modify, display, perform, distribute, and otherwise exploit its
+    Contributions, either on an unmodified basis, with Modifications, or
+    as part of a Larger Work; and
+
+(b) under Patent Claims of such Contributor to make, use, sell, offer
+    for sale, have made, import, and otherwise transfer either its
+    Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+The licenses granted in Section 2.1 with respect to any Contribution
+become effective for each Contribution on the date the Contributor first
+distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+The licenses granted in this Section 2 are the only rights granted under
+this License. No additional rights or licenses will be implied from the
+distribution or licensing of Covered Software under this License.
+Notwithstanding Section 2.1(b) above, no patent license is granted by a
+Contributor:
+
+(a) for any code that a Contributor has removed from Covered Software;
+    or
+
+(b) for infringements caused by: (i) Your and any other third party's
+    modifications of Covered Software, or (ii) the combination of its
+    Contributions with other software (except as part of its Contributor
+    Version); or
+
+(c) under Patent Claims infringed by Covered Software in the absence of
+    its Contributions.
+
+This License does not grant any rights in the trademarks, service marks,
+or logos of any Contributor (except as may be necessary to comply with
+the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+No Contributor makes additional grants as a result of Your choice to
+distribute the Covered Software under a subsequent version of this
+License (see Section 10.2) or under the terms of a Secondary License (if
+permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+Each Contributor represents that the Contributor believes its
+Contributions are its original creation(s) or it has sufficient rights
+to grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+This License is not intended to limit any rights You have under
+applicable copyright doctrines of fair use, fair dealing, or other
+equivalents.
+
+2.7. Conditions
+
+Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
+in Section 2.1.
+
+3. Responsibilities
+-------------------
+
+3.1. Distribution of Source Form
+
+All distribution of Covered Software in Source Code Form, including any
+Modifications that You create or to which You contribute, must be under
+the terms of this License. You must inform recipients that the Source
+Code Form of the Covered Software is governed by the terms of this
+License, and how they can obtain a copy of this License. You may not
+attempt to alter or restrict the recipients' rights in the Source Code
+Form.
+
+3.2. Distribution of Executable Form
+
+If You distribute Covered Software in Executable Form then:
+
+(a) such Covered Software must also be made available in Source Code
+    Form, as described in Section 3.1, and You must inform recipients of
+    the Executable Form how they can obtain a copy of such Source Code
+    Form by reasonable means in a timely manner, at a charge no more
+    than the cost of distribution to the recipient; and
+
+(b) You may distribute such Executable Form under the terms of this
+    License, or sublicense it under different terms, provided that the
+    license for the Executable Form does not attempt to limit or alter
+    the recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+You may create and distribute a Larger Work under terms of Your choice,
+provided that You also comply with the requirements of this License for
+the Covered Software. If the Larger Work is a combination of Covered
+Software with a work governed by one or more Secondary Licenses, and the
+Covered Software is not Incompatible With Secondary Licenses, this
+License permits You to additionally distribute such Covered Software
+under the terms of such Secondary License(s), so that the recipient of
+the Larger Work may, at their option, further distribute the Covered
+Software under the terms of either this License or such Secondary
+License(s).
+
+3.4. Notices
+
+You may not remove or alter the substance of any license notices
+(including copyright notices, patent notices, disclaimers of warranty,
+or limitations of liability) contained within the Source Code Form of
+the Covered Software, except that You may alter any license notices to
+the extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+You may choose to offer, and to charge a fee for, warranty, support,
+indemnity or liability obligations to one or more recipients of Covered
+Software. However, You may do so only on Your own behalf, and not on
+behalf of any Contributor. You must make it absolutely clear that any
+such warranty, support, indemnity, or liability obligation is offered by
+You alone, and You hereby agree to indemnify every Contributor for any
+liability incurred by such Contributor as a result of warranty, support,
+indemnity or liability terms You offer. You may include additional
+disclaimers of warranty and limitations of liability specific to any
+jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+---------------------------------------------------
+
+If it is impossible for You to comply with any of the terms of this
+License with respect to some or all of the Covered Software due to
+statute, judicial order, or regulation then You must: (a) comply with
+the terms of this License to the maximum extent possible; and (b)
+describe the limitations and the code they affect. Such description must
+be placed in a text file included with all distributions of the Covered
+Software under this License. Except to the extent prohibited by statute
+or regulation, such description must be sufficiently detailed for a
+recipient of ordinary skill to be able to understand it.
+
+5. Termination
+--------------
+
+5.1. The rights granted under this License will terminate automatically
+if You fail to comply with any of its terms. However, if You become
+compliant, then the rights granted under this License from a particular
+Contributor are reinstated (a) provisionally, unless and until such
+Contributor explicitly and finally terminates Your grants, and (b) on an
+ongoing basis, if such Contributor fails to notify You of the
+non-compliance by some reasonable means prior to 60 days after You have
+come back into compliance. Moreover, Your grants from a particular
+Contributor are reinstated on an ongoing basis if such Contributor
+notifies You of the non-compliance by some reasonable means, this is the
+first time You have received notice of non-compliance with this License
+from such Contributor, and You become compliant prior to 30 days after
+Your receipt of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+infringement claim (excluding declaratory judgment actions,
+counter-claims, and cross-claims) alleging that a Contributor Version
+directly or indirectly infringes any patent, then the rights granted to
+You by any and all Contributors for the Covered Software under Section
+2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all
+end user license agreements (excluding distributors and resellers) which
+have been validly granted by You or Your distributors under this License
+prior to termination shall survive termination.
+
+************************************************************************
+*                                                                      *
+*  6. Disclaimer of Warranty                                           *
+*  -------------------------                                           *
+*                                                                      *
+*  Covered Software is provided under this License on an "as is"       *
+*  basis, without warranty of any kind, either expressed, implied, or  *
+*  statutory, including, without limitation, warranties that the       *
+*  Covered Software is free of defects, merchantable, fit for a        *
+*  particular purpose or non-infringing. The entire risk as to the     *
+*  quality and performance of the Covered Software is with You.        *
+*  Should any Covered Software prove defective in any respect, You     *
+*  (not any Contributor) assume the cost of any necessary servicing,   *
+*  repair, or correction. This disclaimer of warranty constitutes an   *
+*  essential part of this License. No use of any Covered Software is   *
+*  authorized under this License except under this disclaimer.         *
+*                                                                      *
+************************************************************************
+
+************************************************************************
+*                                                                      *
+*  7. Limitation of Liability                                          *
+*  --------------------------                                          *
+*                                                                      *
+*  Under no circumstances and under no legal theory, whether tort      *
+*  (including negligence), contract, or otherwise, shall any           *
+*  Contributor, or anyone who distributes Covered Software as          *
+*  permitted above, be liable to You for any direct, indirect,         *
+*  special, incidental, or consequential damages of any character      *
+*  including, without limitation, damages for lost profits, loss of    *
+*  goodwill, work stoppage, computer failure or malfunction, or any    *
+*  and all other commercial damages or losses, even if such party      *
+*  shall have been informed of the possibility of such damages. This   *
+*  limitation of liability shall not apply to liability for death or   *
+*  personal injury resulting from such party's negligence to the       *
+*  extent applicable law prohibits such limitation. Some               *
+*  jurisdictions do not allow the exclusion or limitation of           *
+*  incidental or consequential damages, so this exclusion and          *
+*  limitation may not apply to You.                                    *
+*                                                                      *
+************************************************************************
+
+8. Litigation
+-------------
+
+Any litigation relating to this License may be brought only in the
+courts of a jurisdiction where the defendant maintains its principal
+place of business and such litigation shall be governed by laws of that
+jurisdiction, without reference to its conflict-of-law provisions.
+Nothing in this Section shall prevent a party's ability to bring
+cross-claims or counter-claims.
+
+9. Miscellaneous
+----------------
+
+This License represents the complete agreement concerning the subject
+matter hereof. If any provision of this License is held to be
+unenforceable, such provision shall be reformed only to the extent
+necessary to make it enforceable. Any law or regulation which provides
+that the language of a contract shall be construed against the drafter
+shall not be used to construe this License against a Contributor.
+
+10. Versions of the License
+---------------------------
+
+10.1. New Versions
+
+Mozilla Foundation is the license steward. Except as provided in Section
+10.3, no one other than the license steward has the right to modify or
+publish new versions of this License. Each version will be given a
+distinguishing version number.
+
+10.2. Effect of New Versions
+
+You may distribute the Covered Software under the terms of the version
+of the License under which You originally received the Covered Software,
+or under the terms of any subsequent version published by the license
+steward.
+
+10.3. Modified Versions
+
+If you create software not governed by this License, and you want to
+create a new license for such software, you may create and use a
+modified version of this License if you rename the license and remove
+any references to the name of the license steward (except to note that
+such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+Licenses
+
+If You choose to distribute Source Code Form that is Incompatible With
+Secondary Licenses under the terms of this version of the License, the
+notice described in Exhibit B of this License must be attached.
+
+Exhibit A - Source Code Form License Notice
+-------------------------------------------
+
+  This Source Code Form is subject to the terms of the Mozilla Public
+  License, v. 2.0. If a copy of the MPL was not distributed with this
+  file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular
+file, then You may include the notice in a location (such as a LICENSE
+file in a relevant directory) where a recipient would be likely to look
+for such a notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+---------------------------------------------------------
+
+  This Source Code Form is "Incompatible With Secondary Licenses", as
+  defined by the Mozilla Public License, v. 2.0.
diff --git a/.venv/lib/python3.12/site-packages/pathspec/__init__.py b/.venv/lib/python3.12/site-packages/pathspec/__init__.py
new file mode 100644
index 0000000..f41cfee
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/__init__.py
@@ -0,0 +1,68 @@
+"""
+The *pathspec* package provides pattern matching for file paths. So far this
+only includes Git's *gitignore* patterns.
+
+The following classes are imported and made available from the root of the
+`pathspec` package:
+
+-	:class:`pathspec.gitignore.GitIgnoreSpec`
+
+-	:class:`pathspec.pathspec.PathSpec`
+
+-	:class:`pathspec.pattern.Pattern`
+
+-	:class:`pathspec.pattern.RegexPattern`
+
+-	:class:`pathspec.util.RecursionError`
+
+The following functions are also imported:
+
+-	:func:`pathspec.util.lookup_pattern`
+
+The following deprecated functions are also imported to maintain backward
+compatibility:
+
+-	:func:`pathspec.util.iter_tree`
+
+-	:func:`pathspec.util.match_files`
+"""
+
+from .gitignore import (
+	GitIgnoreSpec)
+from .pathspec import (
+	PathSpec)
+from .pattern import (
+	Pattern,
+	RegexPattern)
+from .util import (
+	RecursionError,
+	iter_tree,  # Deprecated since 0.10.0.
+	lookup_pattern,
+	match_files)  # Deprecated since 0.10.0.
+
+from ._meta import (
+	__author__,
+	__copyright__,
+	__credits__,
+	__license__)
+from ._version import (
+	__version__)
+
+# Load pattern implementations.
+from . import patterns
+
+# Declare private imports as part of the public interface. Deprecated imports
+# are deliberately excluded.
+__all__ = [
+	'GitIgnoreSpec',
+	'PathSpec',
+	'Pattern',
+	'RecursionError',
+	'RegexPattern',
+	'__author__',
+	'__copyright__',
+	'__credits__',
+	'__license__',
+	'__version__',
+	'lookup_pattern',
+]
diff --git a/.venv/lib/python3.12/site-packages/pathspec/_backends/__init__.py b/.venv/lib/python3.12/site-packages/pathspec/_backends/__init__.py
new file mode 100644
index 0000000..72c4949
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/_backends/__init__.py
@@ -0,0 +1,4 @@
+"""
+WARNING: The *pathspec._backends* package is not part of the public API. Its
+contents and structure are likely to change.
+"""
diff --git a/.venv/lib/python3.12/site-packages/pathspec/_backends/_utils.py b/.venv/lib/python3.12/site-packages/pathspec/_backends/_utils.py
new file mode 100644
index 0000000..77c7cd9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/_backends/_utils.py
@@ -0,0 +1,45 @@
+"""
+This module provides private utility functions for backends.
+
+WARNING: The *pathspec._backends* package is not part of the public API. Its
+contents and structure are likely to change.
+"""
+
+from collections.abc import (
+	Iterable)
+from typing import (
+	TypeVar)
+
+from pathspec.pattern import (
+	Pattern)
+
+TPattern = TypeVar("TPattern", bound=Pattern)
+
+
+def enumerate_patterns(
+	patterns: Iterable[TPattern],
+	filter: bool,
+	reverse: bool,
+) -> list[tuple[int, TPattern]]:
+	"""
+	Enumerate the patterns.
+
+	*patterns* (:class:`Iterable` of :class:`.Pattern`) contains the patterns.
+
+	*filter* (:class:`bool`) is whether to remove no-op patterns (:data:`True`),
+	or keep them (:data:`False`).
+
+	*reverse* (:class:`bool`) is whether to reverse the pattern order
+	(:data:`True`), or keep the order (:data:`True`).
+
+	Returns the enumerated patterns (:class:`list` of :class:`tuple`).
+	"""
+	out_patterns = [
+		(__i, __pat)
+		for __i, __pat in enumerate(patterns)
+		if not filter or __pat.include is not None
+	]
+	if reverse:
+		out_patterns.reverse()
+
+	return out_patterns
diff --git a/.venv/lib/python3.12/site-packages/pathspec/_backends/agg.py b/.venv/lib/python3.12/site-packages/pathspec/_backends/agg.py
new file mode 100644
index 0000000..c387146
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/_backends/agg.py
@@ -0,0 +1,104 @@
+"""
+This module provides aggregated private data and utilities functions about the
+available backends.
+
+WARNING: The *pathspec._backends* package is not part of the public API. Its
+contents and structure are likely to change.
+"""
+
+from collections.abc import (
+	Sequence)
+from typing import (
+	cast)
+
+from pathspec.backend import (
+	BackendNamesHint,
+	_Backend)
+from pathspec.pattern import (
+	Pattern,
+	RegexPattern)
+
+from .hyperscan.base import (
+	hyperscan_error)
+from .hyperscan.gitignore import (
+	HyperscanGiBackend)
+from .hyperscan.pathspec import (
+	HyperscanPsBackend)
+from .re2.base import (
+	re2_error)
+from .re2.gitignore import (
+	Re2GiBackend)
+from .re2.pathspec import (
+	Re2PsBackend)
+from .simple.gitignore import (
+	SimpleGiBackend)
+from .simple.pathspec import (
+	SimplePsBackend)
+
+_BEST_BACKEND: BackendNamesHint
+"""
+The best available backend.
+"""
+
+if re2_error is None:
+	_BEST_BACKEND = 're2'
+elif hyperscan_error is None:
+	_BEST_BACKEND = 'hyperscan'
+else:
+	_BEST_BACKEND = 'simple'
+
+
+def make_gitignore_backend(
+	name: BackendNamesHint,
+	patterns: Sequence[Pattern],
+) -> _Backend:
+	"""
+	Create the specified backend with the supplied patterns for
+	:class:`~pathspec.gitignore.GitIgnoreSpec`.
+
+	*name* (:class:`str`) is the name of the backend.
+
+	*patterns* (:class:`.Iterable` of :class:`.Pattern`) contains the compiled
+	patterns.
+
+	Returns the backend (:class:`._Backend`).
+	"""
+	if name == 'best':
+		name = _BEST_BACKEND
+
+	if name == 'hyperscan':
+		return HyperscanGiBackend(cast(Sequence[RegexPattern], patterns))
+	elif name == 're2':
+		return Re2GiBackend(cast(Sequence[RegexPattern], patterns))
+	elif name == 'simple':
+		return SimpleGiBackend(cast(Sequence[RegexPattern], patterns))
+	else:
+		raise ValueError(f"Backend {name=!r} is invalid.")
+
+
+def make_pathspec_backend(
+	name: BackendNamesHint,
+	patterns: Sequence[Pattern],
+) -> _Backend:
+	"""
+	Create the specified backend with the supplied patterns for
+	:class:`~pathspec.pathspec.PathSpec`.
+
+	*name* (:class:`str`) is the name of the backend.
+
+	*patterns* (:class:`Iterable` of :class:`Pattern`) contains the compiled
+	patterns.
+
+	Returns the backend (:class:`._Backend`).
+	"""
+	if name == 'best':
+		name = _BEST_BACKEND
+
+	if name == 'hyperscan':
+		return HyperscanPsBackend(cast(Sequence[RegexPattern], patterns))
+	elif name == 're2':
+		return Re2PsBackend(cast(Sequence[RegexPattern], patterns))
+	elif name == 'simple':
+		return SimplePsBackend(patterns)
+	else:
+		raise ValueError(f"Backend {name=!r} is invalid.")
diff --git a/.venv/lib/python3.12/site-packages/pathspec/_backends/hyperscan/__init__.py b/.venv/lib/python3.12/site-packages/pathspec/_backends/hyperscan/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/.venv/lib/python3.12/site-packages/pathspec/_backends/hyperscan/_base.py b/.venv/lib/python3.12/site-packages/pathspec/_backends/hyperscan/_base.py
new file mode 100644
index 0000000..cb58f48
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/_backends/hyperscan/_base.py
@@ -0,0 +1,78 @@
+"""
+This module provides private data for the base implementation for the
+:module:`hyperscan` library.
+
+WARNING: The *pathspec._backends.hyperscan* package is not part of the public
+API. Its contents and structure are likely to change.
+"""
+from __future__ import annotations
+
+from dataclasses import (
+	dataclass)
+from typing import (
+	Union)  # Replaced by `X | Y` in 3.10.
+
+try:
+	import hyperscan
+except ModuleNotFoundError:
+	hyperscan = None
+	HS_FLAGS = 0
+else:
+	HS_FLAGS = hyperscan.HS_FLAG_SINGLEMATCH | hyperscan.HS_FLAG_UTF8
+
+HS_FLAGS: int
+"""
+The hyperscan flags to use:
+
+-	HS_FLAG_SINGLEMATCH is needed to ensure the partial patterns only match once.
+
+-	HS_FLAG_UTF8 is required to support unicode paths.
+"""
+
+
+@dataclass(frozen=True)
+class HyperscanExprDat(object):
+	"""
+	The :class:`HyperscanExprDat` class is used to store data related to an
+	expression.
+	"""
+
+	# The slots argument is not supported until Python 3.10.
+	__slots__ = [
+		'include',
+		'index',
+		'is_dir_pattern',
+	]
+
+	include: bool
+	"""
+	*include* (:class:`bool`) is whether is whether the matched files should be
+	included (:data:`True`), or excluded (:data:`False`).
+	"""
+
+	index: int
+	"""
+	*index* (:class:`int`) is the pattern index.
+	"""
+
+	is_dir_pattern: bool
+	"""
+	*is_dir_pattern* (:class:`bool`) is whether the pattern is a directory
+	pattern for gitignore.
+	"""
+
+
+@dataclass(frozen=True)
+class HyperscanExprDebug(HyperscanExprDat):
+	"""
+	The :class:`HyperscanExprDebug` class stores additional debug information
+	related to an expression.
+	"""
+
+	# The slots argument is not supported until Python 3.10.
+	__slots__ = ['regex']
+
+	regex: Union[str, bytes]
+	"""
+	*regex* (:class:`str` or :class:`bytes`) is the regular expression.
+	"""
diff --git a/.venv/lib/python3.12/site-packages/pathspec/_backends/hyperscan/base.py b/.venv/lib/python3.12/site-packages/pathspec/_backends/hyperscan/base.py
new file mode 100644
index 0000000..ac219b4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/_backends/hyperscan/base.py
@@ -0,0 +1,24 @@
+"""
+This module provides the base implementation for the :module:`hyperscan`
+backend.
+
+WARNING: The *pathspec._backends.hyperscan* package is not part of the public
+API. Its contents and structure are likely to change.
+"""
+from __future__ import annotations
+
+from typing import (
+	Optional)
+
+try:
+	import hyperscan
+	hyperscan_error = None
+except ModuleNotFoundError as e:
+	hyperscan = None
+	hyperscan_error = e
+
+hyperscan_error: Optional[ModuleNotFoundError]
+"""
+*hyperscan_error* (:class:`ModuleNotFoundError` or :data:`None`) is the
+hyperscan import error.
+"""
diff --git a/.venv/lib/python3.12/site-packages/pathspec/_backends/hyperscan/gitignore.py b/.venv/lib/python3.12/site-packages/pathspec/_backends/hyperscan/gitignore.py
new file mode 100644
index 0000000..2428b59
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/_backends/hyperscan/gitignore.py
@@ -0,0 +1,245 @@
+"""
+This module provides the :module:`hyperscan` backend for :class:`~pathspec.gitignore.GitIgnoreSpec`.
+
+WARNING: The *pathspec._backends.hyperscan* package is not part of the public
+API. Its contents and structure are likely to change.
+"""
+from __future__ import annotations
+
+from collections.abc import (
+	Sequence)
+from typing import (
+	Any,
+	Callable,  # Replaced by `collections.abc.Callable` in 3.9.2.
+	Optional,  # Replaced by `X | None` in 3.10.
+	Union)  # Replaced by `X | Y` in 3.10.
+
+try:
+	import hyperscan
+except ModuleNotFoundError:
+	hyperscan = None
+
+from pathspec.pattern import (
+	RegexPattern)
+from pathspec.patterns.gitignore.spec import (
+	GitIgnoreSpecPattern,
+	_BYTES_ENCODING,
+	_DIR_MARK_CG,
+	_DIR_MARK_OPT)
+from pathspec._typing import (
+	override)  # Added in 3.12.
+
+from ._base import (
+	HS_FLAGS,
+	HyperscanExprDat,
+	HyperscanExprDebug)
+from .pathspec import (
+	HyperscanPsBackend)
+
+
+class HyperscanGiBackend(HyperscanPsBackend):
+	"""
+	The :class:`HyperscanGiBackend` class is the :module:`hyperscan`
+	implementation used by :class:`~pathspec.gitignore.GitIgnoreSpec`. The
+	Hyperscan database uses block mode for matching files.
+	"""
+
+	# Change type hint.
+	_out: tuple[Optional[bool], int, int]
+
+	def __init__(
+		self,
+		patterns: Sequence[RegexPattern],
+		*,
+		_debug_exprs: Optional[bool] = None,
+		_test_sort: Optional[Callable[[list], None]] = None,
+	) -> None:
+		"""
+		Initialize the :class:`HyperscanMatcher` instance.
+
+		*patterns* (:class:`Sequence` of :class:`.RegexPattern`) contains the
+		compiled patterns.
+		"""
+		super().__init__(patterns, _debug_exprs=_debug_exprs, _test_sort=_test_sort)
+
+		self._out = (None, -1, 0)
+		"""
+		*_out* (:class:`tuple`) stores the current match:
+
+		-	*0* (:class:`bool` or :data:`None`) is the match include.
+
+		-	*1* (:class:`int`) is the match index.
+
+		-	*2* (:class:`int`) is the match priority.
+		"""
+
+	@override
+	@staticmethod
+	def _init_db(
+		db: hyperscan.Database,
+		debug: bool,
+		patterns: list[tuple[int, RegexPattern]],
+		sort_ids: Optional[Callable[[list[int]], None]],
+	) -> list[HyperscanExprDat]:
+		"""
+		Create the Hyperscan database from the given patterns.
+
+		*db* (:class:`hyperscan.Hyperscan`) is the Hyperscan database.
+
+		*debug* (:class:`bool`) is whether to include additional debugging
+		information for the expressions.
+
+		*patterns* (:class:`~collections.abc.Sequence` of :class:`.RegexPattern`)
+		contains the patterns.
+
+		*sort_ids* (:class:`callable` or :data:`None`) is a function used to sort
+		the compiled expression ids. This is used during testing to ensure the order
+		of expressions is not accidentally relied on.
+
+		Returns a :class:`list` indexed by expression id (:class:`int`) to its data
+		(:class:`HyperscanExprDat`).
+		"""
+		# WARNING: Hyperscan raises a `hyperscan.error` exception when compiled with
+		# zero elements.
+		assert patterns, patterns
+
+		# Prepare patterns.
+		expr_data: list[HyperscanExprDat] = []
+		exprs: list[bytes] = []
+		for pattern_index, pattern in patterns:
+			assert pattern.include is not None, (pattern_index, pattern)
+
+			# Encode regex.
+			assert isinstance(pattern, RegexPattern), pattern
+			regex = pattern.regex.pattern
+
+			use_regexes: list[tuple[Union[str, bytes], bool]] = []
+			if isinstance(pattern, GitIgnoreSpecPattern):
+				# GitIgnoreSpecPattern uses capture groups for its directory marker but
+				# Hyperscan does not support capture groups. Handle this scenario.
+				regex_str: str
+				if isinstance(regex, str):
+					regex_str: str = regex
+				else:
+					assert isinstance(regex, bytes), regex
+					regex_str = regex.decode(_BYTES_ENCODING)
+
+				if _DIR_MARK_CG in regex_str:
+					# Found directory marker.
+					if regex_str.endswith(_DIR_MARK_OPT):
+						# Regex has optional directory marker. Split regex into directory
+						# and file variants.
+						base_regex = regex_str[:-len(_DIR_MARK_OPT)]
+						use_regexes.append((f'{base_regex}/', True))
+						use_regexes.append((f'{base_regex}$', False))
+					else:
+						# Remove capture group.
+						base_regex = regex_str.replace(_DIR_MARK_CG, '/')
+						use_regexes.append((base_regex, True))
+
+			if not use_regexes:
+				# No special case for regex.
+				use_regexes.append((regex, False))
+
+			for regex, is_dir_pattern in use_regexes:
+				if isinstance(regex, bytes):
+					regex_bytes = regex
+				else:
+					assert isinstance(regex, str), regex
+					regex_bytes = regex.encode('utf8')
+
+				if debug:
+					expr_data.append(HyperscanExprDebug(
+						include=pattern.include,
+						index=pattern_index,
+						is_dir_pattern=is_dir_pattern,
+						regex=regex,
+					))
+				else:
+					expr_data.append(HyperscanExprDat(
+						include=pattern.include,
+						index=pattern_index,
+						is_dir_pattern=is_dir_pattern,
+					))
+
+				exprs.append(regex_bytes)
+
+		# Sort expressions.
+		ids = list(range(len(exprs)))
+		if sort_ids is not None:
+			sort_ids(ids)
+			exprs = [exprs[__id] for __id in ids]
+
+		# Compile patterns.
+		db.compile(
+			expressions=exprs,
+			ids=ids,
+			elements=len(exprs),
+			flags=HS_FLAGS,
+		)
+		return expr_data
+
+	@override
+	def match_file(self, file: str) -> tuple[Optional[bool], Optional[int]]:
+		"""
+		Check the file against the patterns.
+
+		*file* (:class:`str`) is the normalized file path to check.
+
+		Returns a :class:`tuple` containing whether to include *file* (:class:`bool`
+		or :data:`None`), and the index of the last matched pattern (:class:`int` or
+		:data:`None`).
+		"""
+		# NOTICE: According to benchmarking, a method callback is 13% faster than
+		# using a closure here.
+		db = self._db
+		if self._db is None:
+			# Database was not initialized because there were no patterns. Return no
+			# match.
+			return (None, None)
+
+		self._out = (None, -1, 0)
+		db.scan(file.encode('utf8'), match_event_handler=self.__on_match)
+
+		out_include, out_index = self._out[:2]
+		if out_index == -1:
+			out_index = None
+
+		return (out_include, out_index)
+
+	@override
+	def __on_match(
+		self,
+		expr_id: int,
+		_from: int,
+		_to: int,
+		_flags: int,
+		_context: Any,
+	) -> Optional[bool]:
+		"""
+		Called on each match.
+
+		*expr_id* (:class:`int`) is the expression id (index) of the matched
+		pattern.
+		"""
+		expr_dat = self._expr_data[expr_id]
+
+		is_dir_pattern = expr_dat.is_dir_pattern
+		if is_dir_pattern:
+			# Pattern matched by a directory pattern.
+			priority = 1
+		else:
+			# Pattern matched by a file pattern.
+			priority = 2
+
+		# WARNING: Hyperscan does not guarantee matches will be produced in order!
+		include = expr_dat.include
+		index = expr_dat.index
+		prev_index = self._out[1]
+		prev_priority = self._out[2]
+		if (
+			(include and is_dir_pattern and index > prev_index)
+			or (priority == prev_priority and index > prev_index)
+			or priority > prev_priority
+		):
+			self._out = (include, expr_dat.index, priority)
diff --git a/.venv/lib/python3.12/site-packages/pathspec/_backends/hyperscan/pathspec.py b/.venv/lib/python3.12/site-packages/pathspec/_backends/hyperscan/pathspec.py
new file mode 100644
index 0000000..d55c314
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/_backends/hyperscan/pathspec.py
@@ -0,0 +1,251 @@
+"""
+This module provides the :module:`hyperscan` backend for :class:`~pathspec.pathspec.PathSpec`.
+
+WARNING: The *pathspec._backends.hyperscan* package is not part of the public
+API. Its contents and structure are likely to change.
+"""
+from __future__ import annotations
+
+from collections.abc import (
+	Sequence)
+from typing import (
+	Any,
+	Callable,  # Replaced by `collections.abc.Callable` in 3.9.2.
+	Optional)  # Replaced by `X | None` in 3.10.
+
+try:
+	import hyperscan
+except ModuleNotFoundError:
+	hyperscan = None
+
+from pathspec.backend import (
+	_Backend)
+from pathspec.pattern import (
+	RegexPattern)
+from pathspec._typing import (
+	override)  # Added in 3.12.
+
+from .._utils import (
+	enumerate_patterns)
+
+from .base import (
+	hyperscan_error)
+from ._base import (
+	HS_FLAGS,
+	HyperscanExprDat,
+	HyperscanExprDebug)
+
+
+class HyperscanPsBackend(_Backend):
+	"""
+	The :class:`HyperscanPsBackend` class is the :module:`hyperscan`
+	implementation used by :class:`~pathspec.pathspec.PathSpec` for matching
+	files. The Hyperscan database uses block mode for matching files.
+	"""
+
+	def __init__(
+		self,
+		patterns: Sequence[RegexPattern],
+		*,
+		_debug_exprs: Optional[bool] = None,
+		_test_sort: Optional[Callable[[list], None]] = None,
+	) -> None:
+		"""
+		Initialize the :class:`HyperscanPsBackend` instance.
+
+		*patterns* (:class:`Sequence` of :class:`.RegexPattern`) contains the
+		compiled patterns.
+		"""
+		if hyperscan is None:
+			raise hyperscan_error
+
+		if patterns and not isinstance(patterns[0], RegexPattern):
+			raise TypeError(f"{patterns[0]=!r} must be a RegexPattern.")
+
+		use_patterns = enumerate_patterns(
+			patterns, filter=True, reverse=False,
+		)
+
+		debug_exprs = bool(_debug_exprs)
+		if use_patterns:
+			db = self._make_db()
+			expr_data = self._init_db(
+				db=db,
+				debug=debug_exprs,
+				patterns=use_patterns,
+				sort_ids=_test_sort,
+			)
+		else:
+			# WARNING: The hyperscan database cannot be initialized with zero
+			# patterns.
+			db = None
+			expr_data = []
+
+		self._db: Optional[hyperscan.Database] = db
+		"""
+		*_db* (:class:`hyperscan.Database`) is the Hyperscan database.
+		"""
+
+		self._debug_exprs = debug_exprs
+		"""
+		*_debug_exprs* (:class:`bool`) is whether to include additional debugging
+		information for the expressions.
+		"""
+
+		self._expr_data: list[HyperscanExprDat] = expr_data
+		"""
+		*_expr_data* (:class:`list`) maps expression index (:class:`int`) to
+		expression data (:class:`:class:`HyperscanExprDat`).
+		"""
+
+		self._out: tuple[Optional[bool], int] = (None, -1)
+		"""
+		*_out* (:class:`tuple`) stores the current match:
+
+		-	*0* (:class:`bool` or :data:`None`) is the match include.
+
+		-	*1* (:class:`int`) is the match index.
+		"""
+
+		self._patterns: dict[int, RegexPattern] = dict(use_patterns)
+		"""
+		*_patterns* (:class:`dict`) maps pattern index (:class:`int`) to pattern
+		(:class:`RegexPattern`).
+		"""
+
+	@staticmethod
+	def _init_db(
+		db: hyperscan.Database,
+		debug: bool,
+		patterns: list[tuple[int, RegexPattern]],
+		sort_ids: Optional[Callable[[list[int]], None]],
+	) -> list[HyperscanExprDat]:
+		"""
+		Initialize the Hyperscan database from the given patterns.
+
+		*db* (:class:`hyperscan.Hyperscan`) is the Hyperscan database.
+
+		*debug* (:class:`bool`) is whether to include additional debugging
+		information for the expressions.
+
+		*patterns* (:class:`~collections.abc.Sequence` of :class:`.RegexPattern`)
+		contains the patterns.
+
+		*sort_ids* (:class:`callable` or :data:`None`) is a function used to sort
+		the compiled expression ids. This is used during testing to ensure the order
+		of expressions is not accidentally relied on.
+
+		Returns a :class:`list` indexed by expression id (:class:`int`) to its data
+		(:class:`HyperscanExprDat`).
+		"""
+		# WARNING: Hyperscan raises a `hyperscan.error` exception when compiled with
+		# zero elements.
+		assert patterns, patterns
+
+		# Prepare patterns.
+		expr_data: list[HyperscanExprDat] = []
+		exprs: list[bytes] = []
+		for pattern_index, pattern in patterns:
+			assert pattern.include is not None, (pattern_index, pattern)
+
+			# Encode regex.
+			assert isinstance(pattern, RegexPattern), pattern
+			regex = pattern.regex.pattern
+
+			if isinstance(regex, bytes):
+				regex_bytes = regex
+			else:
+				assert isinstance(regex, str), regex
+				regex_bytes = regex.encode('utf8')
+
+			if debug:
+				expr_data.append(HyperscanExprDebug(
+					include=pattern.include,
+					index=pattern_index,
+					is_dir_pattern=False,
+					regex=regex,
+				))
+			else:
+				expr_data.append(HyperscanExprDat(
+					include=pattern.include,
+					index=pattern_index,
+					is_dir_pattern=False,
+				))
+
+			exprs.append(regex_bytes)
+
+		# Sort expressions.
+		ids = list(range(len(exprs)))
+		if sort_ids is not None:
+			sort_ids(ids)
+			exprs = [exprs[__id] for __id in ids]
+
+		# Compile patterns.
+		db.compile(
+			expressions=exprs,
+			ids=ids,
+			elements=len(exprs),
+			flags=HS_FLAGS,
+		)
+
+		return expr_data
+
+	@override
+	def match_file(self, file: str) -> tuple[Optional[bool], Optional[int]]:
+		"""
+		Check the file against the patterns.
+
+		*file* (:class:`str`) is the normalized file path to check.
+
+		Returns a :class:`tuple` containing whether to include *file* (:class:`bool`
+		or :data:`None`), and the index of the last matched pattern (:class:`int` or
+		:data:`None`).
+		"""
+		# NOTICE: According to benchmarking, a method callback is 20% faster than
+		# using a closure here.
+		db = self._db
+		if self._db is None:
+			# Database was not initialized because there were no patterns. Return no
+			# match.
+			return (None, None)
+
+		self._out = (None, -1)
+		db.scan(file.encode('utf8'), match_event_handler=self.__on_match)
+
+		out_include, out_index = self._out
+		if out_index == -1:
+			out_index = None
+
+		return (out_include, out_index)
+
+	@staticmethod
+	def _make_db() -> hyperscan.Database:
+		"""
+		Create the Hyperscan database.
+
+		Returns the database (:class:`hyperscan.Database`).
+		"""
+		return hyperscan.Database(mode=hyperscan.HS_MODE_BLOCK)
+
+	def __on_match(
+		self,
+		expr_id: int,
+		_from: int,
+		_to: int,
+		_flags: int,
+		_context: Any,
+	) -> Optional[bool]:
+		"""
+		Called on each match.
+
+		*expr_id* (:class:`int`) is the expression id (index) of the matched
+		pattern.
+		"""
+		# Store match.
+		# - WARNING: Hyperscan does not guarantee matches will be produced in order!
+		#   Later expressions have higher priority.
+		expr_dat = self._expr_data[expr_id]
+		index = expr_dat.index
+		prev_index = self._out[1]
+		if index > prev_index:
+			self._out = (expr_dat.include, index)
diff --git a/.venv/lib/python3.12/site-packages/pathspec/_backends/re2/__init__.py b/.venv/lib/python3.12/site-packages/pathspec/_backends/re2/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/.venv/lib/python3.12/site-packages/pathspec/_backends/re2/_base.py b/.venv/lib/python3.12/site-packages/pathspec/_backends/re2/_base.py
new file mode 100644
index 0000000..4e6ae9f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/_backends/re2/_base.py
@@ -0,0 +1,95 @@
+"""
+This module provides private data for the base implementation for the
+:module:`re2` library.
+
+WARNING: The *pathspec._backends.re2* package is not part of the public API. Its
+contents and structure are likely to change.
+"""
+from __future__ import annotations
+
+from dataclasses import (
+	dataclass)
+from typing import (
+	Optional,  # Replaced by `X | None` in 3.10.
+	Union)  # Replaced by `X | Y` in 3.10.
+
+try:
+	import re2
+	re2_error = None
+except ModuleNotFoundError as e:
+	re2 = None
+	re2_error = e
+	RE2_OPTIONS = None
+else:
+	# Both the `google-re2` and `pyre2` libraries use the `re2` namespace.
+	# `google-re2` is the only one currently supported.
+	try:
+		RE2_OPTIONS = re2.Options()
+		RE2_OPTIONS.log_errors = False
+		RE2_OPTIONS.never_capture = True
+	except Exception as e:
+		re2_error = e
+		RE2_OPTIONS = None
+
+RE2_OPTIONS: re2.Options
+"""
+The re2 options to use:
+
+-	`log_errors=False` disables logging to stderr.
+
+-	`never_capture=True` disables capture groups because they effectively cannot
+	be utilized with :class:`re2.Set`.
+"""
+
+re2_error: Optional[Exception]
+"""
+*re2_error* (:class:`Exception` or :data:`None`) is the re2 import error.
+"""
+
+
+@dataclass(frozen=True)
+class Re2RegexDat(object):
+	"""
+	The :class:`Re2RegexDat` class is used to store data related to a regular
+	expression.
+	"""
+
+	# The slots argument is not supported until Python 3.10.
+	__slots__ = [
+		'include',
+		'index',
+		'is_dir_pattern',
+	]
+
+	include: bool
+	"""
+	*include* (:class:`bool`) is whether is whether the matched files should be
+	included (:data:`True`), or excluded (:data:`False`).
+	"""
+
+	index: int
+	"""
+	*index* (:class:`int`) is the pattern index.
+	"""
+
+	is_dir_pattern: bool
+	"""
+	*is_dir_pattern* (:class:`bool`) is whether the pattern is a directory
+	pattern for gitignore.
+	"""
+
+
+@dataclass(frozen=True)
+class Re2RegexDebug(Re2RegexDat):
+	"""
+	The :class:`Re2RegexDebug` class stores additional debug information related
+	to a regular expression.
+	"""
+
+	# The slots argument is not supported until Python 3.10.
+	__slots__ = ['regex']
+
+	regex: Union[str, bytes]
+	"""
+	*regex* (:class:`str` or :class:`bytes`) is the regular expression.
+	"""
diff --git a/.venv/lib/python3.12/site-packages/pathspec/_backends/re2/base.py b/.venv/lib/python3.12/site-packages/pathspec/_backends/re2/base.py
new file mode 100644
index 0000000..fa24f4d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/_backends/re2/base.py
@@ -0,0 +1,18 @@
+"""
+This module provides the base implementation for the :module:`re2` backend.
+
+WARNING: The *pathspec._backends.re2* package is not part of the public API. Its
+contents and structure are likely to change.
+"""
+from __future__ import annotations
+
+from typing import (
+	Optional)  # Replaced by `X | None` in 3.10.
+
+from ._base import (
+	re2_error)
+
+re2_error: Optional[Exception]
+"""
+*re2_error* (:class:`Exception` or :data:`None`) is the re2 import error.
+"""
diff --git a/.venv/lib/python3.12/site-packages/pathspec/_backends/re2/gitignore.py b/.venv/lib/python3.12/site-packages/pathspec/_backends/re2/gitignore.py
new file mode 100644
index 0000000..cb2525f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/_backends/re2/gitignore.py
@@ -0,0 +1,179 @@
+"""
+This module provides the :module:`re2` backend for :class:`~pathspec.gitignore.GitIgnoreSpec`.
+
+WARNING: The *pathspec._backends.re2* package is not part of the public API. Its
+contents and structure are likely to change.
+"""
+from __future__ import annotations
+
+from typing import (
+	Callable,  # Replaced by `collections.abc.Callable` in 3.9.2.
+	Optional,  # Replaced by `X | None` in 3.10.
+	Union)  # Replaced by `X | Y` in 3.10.
+
+try:
+	import re2
+except ModuleNotFoundError:
+	re2 = None
+
+from pathspec.pattern import (
+	RegexPattern)
+from pathspec.patterns.gitignore.spec import (
+	GitIgnoreSpecPattern,
+	_BYTES_ENCODING,
+	_DIR_MARK_CG,
+	_DIR_MARK_OPT)
+from pathspec._typing import (
+	override)  # Added in 3.12.
+
+from ._base import (
+	Re2RegexDat,
+	Re2RegexDebug)
+from .pathspec import (
+	Re2PsBackend)
+
+
+class Re2GiBackend(Re2PsBackend):
+	"""
+	The :class:`Re2GiBackend` class is the :module:`re2` implementation used by
+	:class:`~pathspec.gitignore.GitIgnoreSpec` for matching files.
+	"""
+
+	@override
+	@staticmethod
+	def _init_set(
+		debug: bool,
+		patterns: dict[int, RegexPattern],
+		regex_set: re2.Set,
+		sort_indices: Optional[Callable[[list[int]], None]],
+	) -> list[Re2RegexDat]:
+		"""
+		Create the re2 regex set.
+
+		*debug* (:class:`bool`) is whether to include additional debugging
+		information for the regular expressions.
+
+		*patterns* (:class:`dict`) maps pattern index (:class:`int`) to pattern
+		(:class:`.RegexPattern`).
+
+		*regex_set* (:class:`re2.Set`) is the regex set.
+
+		*sort_indices* (:class:`callable` or :data:`None`) is a function used to
+		sort the patterns by index. This is used during testing to ensure the order
+		of patterns is not accidentally relied on.
+
+		Returns a :class:`list` indexed by regex id (:class:`int`) to its data
+		(:class:`Re2RegexDat`).
+		"""
+		# Sort patterns.
+		indices = list(patterns.keys())
+		if sort_indices is not None:
+			sort_indices(indices)
+
+		# Prepare patterns.
+		regex_data: list[Re2RegexDat] = []
+		for pattern_index in indices:
+			pattern = patterns[pattern_index]
+			if pattern.include is None:
+				continue
+
+			assert isinstance(pattern, RegexPattern), pattern
+			regex = pattern.regex.pattern
+
+			use_regexes: list[tuple[Union[str, bytes], bool]] = []
+			if isinstance(pattern, GitIgnoreSpecPattern):
+				# GitIgnoreSpecPattern uses capture groups for its directory marker. Re2
+				# supports capture groups, but they cannot be utilized when using
+				# `re2.Set`. Handle this scenario.
+				regex_str: str
+				if isinstance(regex, str):
+					regex_str = regex
+				else:
+					assert isinstance(regex, bytes), regex
+					regex_str = regex.decode(_BYTES_ENCODING)
+
+				if _DIR_MARK_CG in regex_str:
+					# Found directory marker.
+					if regex_str.endswith(_DIR_MARK_OPT):
+						# Regex has optional directory marker. Split regex into directory
+						# and file variants.
+						base_regex = regex_str[:-len(_DIR_MARK_OPT)]
+						use_regexes.append((f'{base_regex}/', True))
+						use_regexes.append((f'{base_regex}$', False))
+					else:
+						# Remove capture group.
+						base_regex = regex_str.replace(_DIR_MARK_CG, '/')
+						use_regexes.append((base_regex, True))
+
+			if not use_regexes:
+				# No special case for regex.
+				use_regexes.append((regex, False))
+
+			for regex, is_dir_pattern in use_regexes:
+				if debug:
+					regex_data.append(Re2RegexDebug(
+						include=pattern.include,
+						index=pattern_index,
+						is_dir_pattern=is_dir_pattern,
+						regex=regex,
+					))
+				else:
+					regex_data.append(Re2RegexDat(
+						include=pattern.include,
+						index=pattern_index,
+						is_dir_pattern=is_dir_pattern,
+					))
+
+				regex_set.Add(regex)
+
+		# Compile patterns.
+		regex_set.Compile()
+		return regex_data
+
+	@override
+	def match_file(self, file: str) -> tuple[Optional[bool], Optional[int]]:
+		"""
+		Check the file against the patterns.
+
+		*file* (:class:`str`) is the normalized file path to check.
+
+		Returns a :class:`tuple` containing whether to include *file* (:class:`bool`
+		or :data:`None`), and the index of the last matched pattern (:class:`int` or
+		:data:`None`).
+		"""
+		# Find best match.
+		match_ids: Optional[list[int]] = self._set.Match(file)
+		if not match_ids:
+			return (None, None)
+
+		out_include: Optional[bool] = None
+		out_index: int = -1
+		out_priority = -1
+
+		regex_data = self._regex_data
+		for regex_id in match_ids:
+			regex_dat = regex_data[regex_id]
+
+			is_dir_pattern = regex_dat.is_dir_pattern
+			if is_dir_pattern:
+				# Pattern matched by a directory pattern.
+				priority = 1
+			else:
+				# Pattern matched by a file pattern.
+				priority = 2
+
+			# WARNING: According to the documentation on `RE2::Set::Match()`, there is
+			# no guarantee matches will be produced in order!
+			include = regex_dat.include
+			index = regex_dat.index
+			if (
+				(include and is_dir_pattern and index > out_index)
+				or (priority == out_priority and index > out_index)
+				or priority > out_priority
+			):
+				out_include = include
+				out_index = index
+				out_priority = priority
+
+		assert out_index != -1, (out_index, out_include, out_priority)
+		return (out_include, out_index)
diff --git a/.venv/lib/python3.12/site-packages/pathspec/_backends/re2/pathspec.py b/.venv/lib/python3.12/site-packages/pathspec/_backends/re2/pathspec.py
new file mode 100644
index 0000000..2c58b45
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/_backends/re2/pathspec.py
@@ -0,0 +1,187 @@
+"""
+This module provides the :module:`re2` backend for :class:`~pathspec.pathspec.PathSpec`.
+
+WARNING: The *pathspec._backends.re2* package is not part of the public API. Its
+contents and structure are likely to change.
+"""
+from __future__ import annotations
+
+from collections.abc import (
+	Sequence)
+from typing import (
+	Callable,  # Replaced by `collections.abc.Callable` in 3.9.2.
+	Optional)  # Replaced by `X | None` in 3.10.
+
+try:
+	import re2
+except ModuleNotFoundError:
+	re2 = None
+
+from pathspec.backend import (
+	_Backend)
+from pathspec.pattern import (
+	RegexPattern)
+from pathspec._typing import (
+	override)  # Added in 3.12.
+
+from .._utils import (
+	enumerate_patterns)
+
+from .base import (
+	re2_error)
+from ._base import (
+	RE2_OPTIONS,
+	Re2RegexDat,
+	Re2RegexDebug)
+
+
+class Re2PsBackend(_Backend):
+	"""
+	The :class:`Re2PsBackend` class is the :module:`re2` implementation used by
+	:class:`~pathspec.pathspec.PathSpec` for matching files.
+	"""
+
+	def __init__(
+		self,
+		patterns: Sequence[RegexPattern],
+		*,
+		_debug_regex: Optional[bool] = None,
+		_test_sort: Optional[Callable[[list], None]] = None,
+	) -> None:
+		"""
+		Initialize the :class:`Re2PsBackend` instance.
+
+		*patterns* (:class:`Sequence` of :class:`.RegexPattern`) contains the
+		compiled patterns.
+		"""
+		if re2_error is not None:
+			raise re2_error
+
+		if patterns and not isinstance(patterns[0], RegexPattern):
+			raise TypeError(f"{patterns[0]=!r} must be a RegexPattern.")
+
+		use_patterns = dict(enumerate_patterns(
+			patterns, filter=True, reverse=False,
+		))
+		regex_set = self._make_set()
+
+		self._debug_regex = bool(_debug_regex)
+		"""
+		*_debug_regex* (:class:`bool`) is whether to include additional debugging
+		information for the regular expressions.
+		"""
+
+		self._patterns: dict[int, RegexPattern] = use_patterns
+		"""
+		*_patterns* (:class:`dict`) maps pattern index (:class:`int`) to pattern
+		(:class:`RegexPattern`).
+		"""
+
+		self._regex_data: list[Re2RegexDat] = self._init_set(
+			debug=self._debug_regex,
+			patterns=use_patterns,
+			regex_set=regex_set,
+			sort_indices=_test_sort,
+		)
+		"""
+		*_regex_data* (:class:`list`) maps regex index (:class:`int`) to regex data
+		(:class:`Re2RegexDat`).
+		"""
+
+		self._set: re2.Set = regex_set
+		"""
+		*_set* (:class:`re2.Set`) is the re2 regex set.
+		"""
+
+	@staticmethod
+	def _init_set(
+		debug: bool,
+		patterns: dict[int, RegexPattern],
+		regex_set: re2.Set,
+		sort_indices: Optional[Callable[[list[int]], None]],
+	) -> list[Re2RegexDat]:
+		"""
+		Create the re2 regex set.
+
+		*debug* (:class:`bool`) is whether to include additional debugging
+		information for the regular expressions.
+
+		*patterns* (:class:`dict`) maps pattern index (:class:`int`) to pattern
+		(:class:`.RegexPattern`).
+
+		*regex_set* (:class:`re2.Set`) is the regex set.
+
+		*sort_indices* (:class:`callable` or :data:`None`) is a function used to
+		sort the patterns by index. This is used during testing to ensure the order
+		of patterns is not accidentally relied on.
+
+		Returns a :class:`list` indexed by regex id (:class:`int`) to its data
+		(:class:`Re2RegexDat`).
+		"""
+		# Sort patterns.
+		indices = list(patterns.keys())
+		if sort_indices is not None:
+			sort_indices(indices)
+
+		# Prepare patterns.
+		regex_data: list[Re2RegexDat] = []
+		for pattern_index in indices:
+			pattern = patterns[pattern_index]
+			if pattern.include is None:
+				continue
+
+			assert isinstance(pattern, RegexPattern), pattern
+			regex = pattern.regex.pattern
+
+			if debug:
+				regex_data.append(Re2RegexDebug(
+					include=pattern.include,
+					index=pattern_index,
+					is_dir_pattern=False,
+					regex=regex,
+				))
+			else:
+				regex_data.append(Re2RegexDat(
+					include=pattern.include,
+					index=pattern_index,
+					is_dir_pattern=False,
+				))
+
+			regex_set.Add(regex)
+
+		# Compile patterns.
+		regex_set.Compile()
+		return regex_data
+
+	@staticmethod
+	def _make_set() -> re2.Set:
+		"""
+		Create the re2 regex set.
+
+		Returns the set (:class:`re2.Set`).
+		"""
+		return re2.Set.SearchSet(RE2_OPTIONS)
+
+	@override
+	def match_file(self, file: str) -> tuple[Optional[bool], Optional[int]]:
+		"""
+		Check the file against the patterns.
+
+		*file* (:class:`str`) is the normalized file path to check.
+
+		Returns a :class:`tuple` containing whether to include *file* (:class:`bool`
+		or :data:`None`), and the index of the last matched pattern (:class:`int` or
+		:data:`None`).
+		"""
+		# Find best match.
+		# - WARNING: According to the documentation on `RE2::Set::Match()`, there is
+		#   no guarantee matches will be produced in order! Later expressions have
+		#   higher priority.
+		match_ids: Optional[list[int]] = self._set.Match(file)
+		if not match_ids:
+			return (None, None)
+
+		regex_data = self._regex_data
+		pattern_index = max(regex_data[__id].index for __id in match_ids)
+		pattern = self._patterns[pattern_index]
+		return (pattern.include, pattern_index)
diff --git a/.venv/lib/python3.12/site-packages/pathspec/_backends/simple/__init__.py b/.venv/lib/python3.12/site-packages/pathspec/_backends/simple/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/.venv/lib/python3.12/site-packages/pathspec/_backends/simple/gitignore.py b/.venv/lib/python3.12/site-packages/pathspec/_backends/simple/gitignore.py
new file mode 100644
index 0000000..bdacc7e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/_backends/simple/gitignore.py
@@ -0,0 +1,104 @@
+"""
+This module provides the simple backend for :class:`~pathspec.gitignore.GitIgnoreSpec`.
+
+WARNING: The *pathspec._backends.simple* package is not part of the public API.
+Its contents and structure are likely to change.
+"""
+
+from collections.abc import (
+	Sequence)
+from typing import (
+	Optional)  # Replaced by `X | None` in 3.10.
+
+from pathspec.pattern import (
+	RegexPattern)
+from pathspec.patterns.gitignore.spec import (
+	_DIR_MARK)
+from pathspec._typing import (
+	override)  # Added in 3.12.
+
+from .pathspec import (
+	SimplePsBackend)
+
+
+class SimpleGiBackend(SimplePsBackend):
+	"""
+	The :class:`SimpleGiBackend` class is the default (or simple) implementation
+	used by :class:`~pathspec.gitignore.GitIgnoreSpec` for matching files.
+	"""
+
+	# Change type hint.
+	_patterns: list[tuple[int, RegexPattern]]
+
+	def __init__(
+		self,
+		patterns: Sequence[RegexPattern],
+		*,
+		no_filter: Optional[bool] = None,
+		no_reverse: Optional[bool] = None,
+	) -> None:
+		"""
+		Initialize the :class:`SimpleGiBackend` instance.
+
+		*patterns* (:class:`Sequence` of :class:`.RegexPattern`) contains the
+		compiled patterns.
+
+		*no_filter* (:class:`bool`) is whether to keep no-op patterns (:data:`True`),
+		or remove them (:data:`False`).
+
+		*no_reverse* (:class:`bool`) is whether to keep the pattern order
+		(:data:`True`), or reverse the order (:data:`True`).
+		"""
+		super().__init__(patterns, no_filter=no_filter, no_reverse=no_reverse)
+
+	@override
+	def match_file(self, file: str) -> tuple[Optional[bool], Optional[int]]:
+		"""
+		Check the file against the patterns.
+
+		*file* (:class:`str`) is the normalized file path to check.
+
+		Returns a :class:`tuple` containing whether to include *file* (:class:`bool`
+		or :data:`None`), and the index of the last matched pattern (:class:`int` or
+		:data:`None`).
+		"""
+		is_reversed = self._is_reversed
+
+		out_include: Optional[bool] = None
+		out_index: Optional[int] = None
+		out_priority = 0
+		for index, pattern in self._patterns:
+			if (
+				(include := pattern.include) is not None
+				and (match := pattern.match_file(file)) is not None
+			):
+				# Pattern matched.
+
+				# Check for directory marker.
+				dir_mark = match.match.groupdict().get(_DIR_MARK)
+
+				if dir_mark:
+					# Pattern matched by a directory pattern.
+					priority = 1
+				else:
+					# Pattern matched by a file pattern.
+					priority = 2
+
+				if is_reversed:
+					if priority > out_priority:
+						out_include = include
+						out_index = index
+						out_priority = priority
+				else:
+					# Forward.
+					if (include and dir_mark) or priority >= out_priority:
+						out_include = include
+						out_index = index
+						out_priority = priority
+
+				if is_reversed and priority == 2:
+					# Patterns are being checked in reverse order. The first pattern that
+					# matches with priority 2 takes precedence.
+					break
+
+		return (out_include, out_index)
diff --git a/.venv/lib/python3.12/site-packages/pathspec/_backends/simple/pathspec.py b/.venv/lib/python3.12/site-packages/pathspec/_backends/simple/pathspec.py
new file mode 100644
index 0000000..2ded1be
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/_backends/simple/pathspec.py
@@ -0,0 +1,76 @@
+"""
+This module provides the simple backend for :class:`~pathspec.pathspec.PathSpec`.
+
+WARNING: The *pathspec._backends.simple* package is not part of the public API.
+Its contents and structure are likely to change.
+"""
+
+from collections.abc import (
+	Sequence)
+from typing import (
+	Optional)  # Replaced by `X | None` in 3.10.
+
+from pathspec.backend import (
+	_Backend)
+from pathspec.pattern import (
+	Pattern)
+from pathspec._typing import (
+	override)  # Added in 3.12.
+from pathspec.util import (
+	check_match_file)
+
+from .._utils import (
+	enumerate_patterns)
+
+
+class SimplePsBackend(_Backend):
+	"""
+	The :class:`SimplePsBackend` class is the default (or simple) implementation
+	used by :class:`~pathspec.pathspec.PathSpec` for matching files.
+	"""
+
+	def __init__(
+		self,
+		patterns: Sequence[Pattern],
+		*,
+		no_filter: Optional[bool] = None,
+		no_reverse: Optional[bool] = None,
+	) -> None:
+		"""
+		Initialize the :class:`SimplePsBackend` instance.
+
+		*patterns* (:class:`Sequence` of :class:`.Pattern`) contains the compiled
+		patterns.
+
+		*no_filter* (:class:`bool`) is whether to keep no-op patterns (:data:`True`),
+		or remove them (:data:`False`).
+
+		*no_reverse* (:class:`bool`) is whether to keep the pattern order
+		(:data:`True`), or reverse the order (:data:`True`).
+		"""
+
+		self._is_reversed: bool = not no_reverse
+		"""
+		*_is_reversed* (:class:`bool`) is whether to the pattern order was reversed.
+		"""
+
+		self._patterns: list[tuple[int, Pattern]] = enumerate_patterns(
+			patterns, filter=not no_filter, reverse=not no_reverse,
+		)
+		"""
+		*_patterns* (:class:`list` of :class:`tuple`) contains the enumerated
+		patterns.
+		"""
+
+	@override
+	def match_file(self, file: str) -> tuple[Optional[bool], Optional[int]]:
+		"""
+		Check the file against the patterns.
+
+		*file* (:class:`str`) is the normalized file path to check.
+
+		Returns a :class:`tuple` containing whether to include *file* (:class:`bool`
+		or :data:`None`), and the index of the last matched pattern (:class:`int` or
+		:data:`None`).
+		"""
+		return check_match_file(self._patterns, file, self._is_reversed)
diff --git a/.venv/lib/python3.12/site-packages/pathspec/_meta.py b/.venv/lib/python3.12/site-packages/pathspec/_meta.py
new file mode 100644
index 0000000..4e4c782
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/_meta.py
@@ -0,0 +1,67 @@
+"""
+This module contains the project meta-data.
+"""
+
+__author__ = "Caleb P. Burns"
+__copyright__ = "Copyright © 2013-2026 Caleb P. Burns"
+__credits__ = [
+	"Hong Minhee ",
+	"Brandon High ",
+	"029xue ",
+	"Michael Huynh ",
+	"Nick Humrich ",
+	"David Fraser ",
+	"Charles Samborski ",
+	"George Hickman ",
+	"Vincent Driessen ",
+	"Adrien Vergé ",
+	"Anders Blomdell ",
+	"Xavier Thomas ",
+	"Wim Jeantine-Glenn ",
+	"Hugo van Kemenade ",
+	"Dan Cecile ",
+	"MrOutis ",
+	"Jon Dufresne ",
+	"Greg Roodt ",
+	"Florin T. ",
+	"Ben Felder ",
+	"Nicholas Hollander ",
+	"KOLANICH ",
+	"Jon Hays ",
+	"Isaac0616 ",
+	"Sebastiaan Zeeff ",
+	"Roel Adriaans ",
+	"Ravi Selker ",
+	"Johan Vergeer ",
+	"danjer ",
+	"Jan-Hein Bührman ",
+	"Wim-Peter Dirks ",
+	"Karthikeyan Singaravelan ",
+	"John Vandenberg ",
+	"John T. Wodder II ",
+	"Tomasz Kłoczko ",
+	"Oren ",
+	"SP Mohanty ",
+	"Richard Si ",
+	"Jakub Kuczys ",
+	"Michał Górny ",
+	"Bartłomiej Żak ",
+	"Matthias ",
+	"Avasam ",
+	"Anıl Karagenç ",
+	"Yannic Schröder ",
+	"axesider ",
+	"TomRuk ",
+	"Oleh Prypin ",
+	"Lumina ",
+	"Kurt McKee ",
+	"Dobatymo ",
+	"Tomoki Nakamaru ",
+	"Sebastien Eskenazi ",
+	"Bar Vered ",
+	"Tzach Shabtay ",
+	"Adam Dangoor ",
+	"Marcel Telka ",
+	"Dmytro Kostochko ",
+]
+__license__ = "MPL 2.0"
diff --git a/.venv/lib/python3.12/site-packages/pathspec/_typing.py b/.venv/lib/python3.12/site-packages/pathspec/_typing.py
new file mode 100644
index 0000000..049966c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/_typing.py
@@ -0,0 +1,64 @@
+"""
+This module provides stubs for type hints not supported by all relevant Python
+versions.
+
+NOTICE: This project should have zero required dependencies which means it
+cannot simply require :module:`typing_extensions`, and I do not want to maintain
+a vendored copy of :module:`typing_extensions`.
+"""
+
+import functools
+import warnings
+from typing import (
+	Any,
+	Callable,  # Replaced by `collections.abc.Callable` in 3.9.2.
+	Optional,  # Replaced by `X | None` in 3.10.
+	TypeVar)
+try:
+	from typing import AnyStr  # Removed in 3.18.
+except ImportError:
+	AnyStr = TypeVar('AnyStr', str, bytes)
+try:
+	from typing import Never  # Added in 3.11.
+except ImportError:
+	from typing import NoReturn as Never
+
+F = TypeVar('F', bound=Callable[..., Any])
+
+try:
+	from warnings import deprecated  # Added in 3.13.
+except ImportError:
+	try:
+		from typing_extensions import deprecated
+	except ImportError:
+		def deprecated(
+			message: str,
+			/, *,
+			category: Optional[type[Warning]] = DeprecationWarning,
+			stacklevel: int = 1,
+		) -> Callable[[F], F]:
+			def decorator(f: F) -> F:
+				@functools.wraps(f)
+				def wrapper(*a, **k):
+					warnings.warn(message, category=category, stacklevel=stacklevel+1)
+					return f(*a, **k)
+				return wrapper
+			return decorator
+
+try:
+	from typing import override  # Added in 3.12.
+except ImportError:
+	try:
+		from typing_extensions import override
+	except ImportError:
+		def override(f: F) -> F:
+			return f
+
+
+def assert_unreachable(message: str) -> Never:
+	"""
+	The code path is unreachable. Raises an :class:`AssertionError`.
+
+	*message* (:class:`str`) is the error message.
+	"""
+	raise AssertionError(message)
diff --git a/.venv/lib/python3.12/site-packages/pathspec/_version.py b/.venv/lib/python3.12/site-packages/pathspec/_version.py
new file mode 100644
index 0000000..421d8fa
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/_version.py
@@ -0,0 +1,5 @@
+"""
+This module defines the version.
+"""
+
+__version__ = "1.0.4"
diff --git a/.venv/lib/python3.12/site-packages/pathspec/backend.py b/.venv/lib/python3.12/site-packages/pathspec/backend.py
new file mode 100644
index 0000000..f1def28
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/backend.py
@@ -0,0 +1,40 @@
+"""
+This module defines the necessary classes and type hints for exposing the bare
+minimum of the internal implementations for the pattern (regular expression)
+matching backends. The exact structure of the backends is not solidified and is
+subject to change.
+"""
+
+from typing import (
+	Literal,
+	Optional)
+
+BackendNamesHint = Literal['best', 'hyperscan', 're2', 'simple']
+"""
+The supported backend values.
+"""
+
+
+class _Backend(object):
+	"""
+	.. warning:: This class is not part of the public API. It is subject to
+		change.
+
+	The :class:`_Backend` class is the abstract base class defining how to match
+	files against patterns.
+	"""
+
+	def match_file(self, file: str) -> tuple[Optional[bool], Optional[int]]:
+		"""
+		Check the file against the patterns.
+
+		*file* (:class:`str`) is the normalized file path to check.
+
+		Returns a :class:`tuple` containing whether to include *file* (:class:`bool`
+		or :data:`None`), and the index of the last matched pattern (:class:`int` or
+		:data:`None`).
+		"""
+		raise NotImplementedError((
+			f"{self.__class__.__module__}.{self.__class__.__qualname__}.match_file() "
+			f"must be implemented."
+		))  # NotImplementedError
diff --git a/.venv/lib/python3.12/site-packages/pathspec/gitignore.py b/.venv/lib/python3.12/site-packages/pathspec/gitignore.py
new file mode 100644
index 0000000..93c3d76
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/gitignore.py
@@ -0,0 +1,165 @@
+"""
+This module provides :class:`.GitIgnoreSpec` which replicates *.gitignore*
+behavior, and handles edge-cases where Git's behavior differs from what's
+documented. Git allows including files from excluded directories which directly
+contradicts the documentation. This uses :class:`.GitIgnoreSpecPattern` to fully
+replicate Git's handling.
+"""
+from __future__ import annotations
+
+from collections.abc import (
+	Iterable,
+	Sequence)
+from typing import (
+	Callable,  # Replaced by `collections.abc.Callable` in 3.9.2.
+	Optional,  # Replaced by `X | None` in 3.10.
+	TypeVar,
+	Union,  # Replaced by `X | Y` in 3.10.
+	cast,
+	overload)
+
+from pathspec.backend import (
+	BackendNamesHint,
+	_Backend)
+from pathspec._backends.agg import (
+	make_gitignore_backend)
+from pathspec.pathspec import (
+	PathSpec)
+from pathspec.pattern import (
+	Pattern)
+from pathspec.patterns.gitignore.basic import (
+	GitIgnoreBasicPattern)
+from pathspec.patterns.gitignore.spec import (
+	GitIgnoreSpecPattern)
+from pathspec._typing import (
+	AnyStr,  # Removed in 3.18.
+	override)  # Added in 3.12.
+from pathspec.util import (
+	_is_iterable,
+	lookup_pattern)
+
+Self = TypeVar("Self", bound='GitIgnoreSpec')
+"""
+:class:`.GitIgnoreSpec` self type hint to support Python v<3.11 using PEP 673
+recommendation.
+"""
+
+
+class GitIgnoreSpec(PathSpec):
+	"""
+	The :class:`GitIgnoreSpec` class extends :class:`.PathSpec` to replicate
+	*gitignore* behavior. This is uses :class:`.GitIgnoreSpecPattern` to fully
+	replicate Git's handling.
+	"""
+
+	def __eq__(self, other: object) -> bool:
+		"""
+		Tests the equality of this gitignore-spec with *other* (:class:`.GitIgnoreSpec`)
+		by comparing their :attr:`self.patterns <.PathSpec.patterns>` attributes. A
+		non-:class:`GitIgnoreSpec` will not compare equal.
+		"""
+		if isinstance(other, GitIgnoreSpec):
+			return super().__eq__(other)
+		elif isinstance(other, PathSpec):
+			return False
+		else:
+			return NotImplemented
+
+	# Support reversed order of arguments from PathSpec.
+	@overload
+	@classmethod
+	def from_lines(
+		cls: type[Self],
+		pattern_factory: Union[str, Callable[[AnyStr], Pattern], None],
+		lines: Iterable[AnyStr],
+		*,
+		backend: Union[BackendNamesHint, str, None] = None,
+		_test_backend_factory: Optional[Callable[[Sequence[Pattern]], _Backend]] = None,
+	) -> Self:
+		...
+
+	@overload
+	@classmethod
+	def from_lines(
+		cls: type[Self],
+		lines: Iterable[AnyStr],
+		pattern_factory: Union[str, Callable[[AnyStr], Pattern], None] = None,
+		*,
+		backend: Union[BackendNamesHint, str, None] = None,
+		_test_backend_factory: Optional[Callable[[Sequence[Pattern]], _Backend]] = None,
+	) -> Self:
+		...
+
+	@override
+	@classmethod
+	def from_lines(
+		cls: type[Self],
+		lines: Iterable[AnyStr],
+		pattern_factory: Union[str, Callable[[AnyStr], Pattern], None] = None,
+		*,
+		backend: Union[BackendNamesHint, str, None] = None,
+		_test_backend_factory: Optional[Callable[[Sequence[Pattern]], _Backend]] = None,
+	) -> Self:
+		"""
+		Compiles the pattern lines.
+
+		*lines* (:class:`~collections.abc.Iterable`) yields each uncompiled pattern
+		(:class:`str`). This simply has to yield each line, so it can be a
+		:class:`io.TextIOBase` (e.g., from :func:`open` or :class:`io.StringIO`) or
+		the result from :meth:`str.splitlines`.
+
+		*pattern_factory* does not need to be set for :class:`GitIgnoreSpec`. If
+		set, it should be either ``"gitignore"`` or :class:`.GitIgnoreSpecPattern`.
+		There is no guarantee it will work with any other pattern class. Default is
+		:data:`None` for :class:`.GitIgnoreSpecPattern`.
+
+		*backend* (:class:`str` or :data:`None`) is the pattern (regular expression)
+		matching backend to use. Default is :data:`None` for "best" to use the best
+		available backend. Priority of backends is: "re2", "hyperscan", "simple".
+		The "simple" backend is always available.
+
+		Returns the :class:`GitIgnoreSpec` instance.
+		"""
+		if (isinstance(lines, (str, bytes)) or callable(lines)) and _is_iterable(pattern_factory):
+			# Support reversed order of arguments from PathSpec.
+			pattern_factory, lines = lines, pattern_factory
+
+		if pattern_factory is None:
+			pattern_factory = GitIgnoreSpecPattern
+		elif pattern_factory == 'gitignore':
+			# Force use of GitIgnoreSpecPattern for "gitignore" to handle edge-cases.
+			# This makes usage easier.
+			pattern_factory = GitIgnoreSpecPattern
+
+		if isinstance(pattern_factory, str):
+			pattern_factory = lookup_pattern(pattern_factory)
+
+		if issubclass(pattern_factory, GitIgnoreBasicPattern):
+			raise TypeError((
+				f"{pattern_factory=!r} cannot be {GitIgnoreBasicPattern} because it "
+				f"will give unexpected results."
+			))  # TypeError
+
+		self = super().from_lines(pattern_factory, lines, backend=backend, _test_backend_factory=_test_backend_factory)
+		return cast(Self, self)
+
+	@override
+	@staticmethod
+	def _make_backend(
+		name: BackendNamesHint,
+		patterns: Sequence[Pattern],
+	) -> _Backend:
+		"""
+		.. warning:: This method is not part of the public API. It is subject to
+			change.
+
+		Create the backend for the patterns.
+
+		*name* (:class:`str`) is the name of the backend.
+
+		*patterns* (:class:`~collections.abc.Sequence` of :class:`.Pattern`)
+		contains the compiled patterns.
+
+		Returns the backend (:class:`._Backend`).
+		"""
+		return make_gitignore_backend(name, patterns)
diff --git a/.venv/lib/python3.12/site-packages/pathspec/pathspec.py b/.venv/lib/python3.12/site-packages/pathspec/pathspec.py
new file mode 100644
index 0000000..bb88cbf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/pathspec.py
@@ -0,0 +1,460 @@
+"""
+This module provides :class:`.PathSpec` which is an object-oriented interface
+for pattern matching of files.
+"""
+from __future__ import annotations
+
+from collections.abc import (
+	Collection,
+	Iterable,
+	Iterator,
+	Sequence)
+from itertools import (
+	zip_longest)
+from typing import (
+	Callable,  # Replaced by `collections.abc.Callable` in 3.9.2.
+	Optional,  # Replaced by `X | None` in 3.10.
+	TypeVar,
+	Union,  # Replaced by `X | Y` in 3.10.
+	cast)
+
+Self = TypeVar("Self", bound='PathSpec')
+"""
+:class:`.PathSpec` self type hint to support Python v<3.11 using PEP 673
+recommendation.
+"""
+
+from pathspec import util
+from pathspec.backend import (
+	_Backend,
+	BackendNamesHint)
+from pathspec._backends.agg import (
+	make_pathspec_backend)
+from pathspec.pattern import (
+	Pattern)
+from pathspec._typing import (
+	AnyStr,  # Removed in 3.18.
+	deprecated)  # Added in 3.13.
+from pathspec.util import (
+	CheckResult,
+	StrPath,
+	TStrPath,
+	TreeEntry,
+	_is_iterable,
+	normalize_file)
+
+
+class PathSpec(object):
+	"""
+	The :class:`PathSpec` class is a wrapper around a list of compiled
+	:class:`.Pattern` instances.
+	"""
+
+	def __init__(
+		self,
+		patterns: Union[Sequence[Pattern], Iterable[Pattern]],
+		*,
+		backend: Union[BackendNamesHint, str, None] = None,
+		_test_backend_factory: Optional[Callable[[Sequence[Pattern]], _Backend]] = None,
+	) -> None:
+		"""
+		Initializes the :class:`.PathSpec` instance.
+
+		*patterns* (:class:`~collections.abc.Sequence` or :class:`~collections.abc.Iterable`)
+		contains each compiled pattern (:class:`.Pattern`). If not a sequence, it
+		will be converted to a :class:`list`.
+
+		*backend* (:class:`str` or :data:`None`) is the pattern (regular expression)
+		matching backend to use. Default is :data:`None` for "best" to use the best
+		available backend. Priority of backends is: "re2", "hyperscan", "simple".
+		The "simple" backend is always available.
+		"""
+		if not isinstance(patterns, Sequence):
+			patterns = list(patterns)
+
+		if backend is None:
+			backend = 'best'
+
+		backend = cast(BackendNamesHint, backend)
+		if _test_backend_factory is not None:
+			use_backend = _test_backend_factory(patterns)
+		else:
+			use_backend = self._make_backend(backend, patterns)
+
+		self._backend: _Backend = use_backend
+		"""
+		*_backend* (:class:`._Backend`) is the pattern (regular expression) matching
+		backend.
+		"""
+
+		self._backend_name: BackendNamesHint = backend
+		"""
+		*_backend_name* (:class:`str`) is the name of backend to use.
+		"""
+
+		self.patterns: Sequence[Pattern] = patterns
+		"""
+		*patterns* (:class:`~collections.abc.Sequence` of :class:`.Pattern`)
+		contains the compiled patterns.
+		"""
+
+	def __add__(self: Self, other: PathSpec) -> Self:
+		"""
+		Combines the :attr:`self.patterns <.PathSpec.patterns>` patterns from two
+		:class:`PathSpec` instances.
+		"""
+		if isinstance(other, PathSpec):
+			return self.__class__(self.patterns + other.patterns, backend=self._backend_name)
+		else:
+			return NotImplemented
+
+	def __eq__(self, other: object) -> bool:
+		"""
+		Tests the equality of this path-spec with *other* (:class:`PathSpec`) by
+		comparing their :attr:`self.patterns <.PathSpec.patterns>` attributes.
+		"""
+		if isinstance(other, PathSpec):
+			paired_patterns = zip_longest(self.patterns, other.patterns)
+			return all(a == b for a, b in paired_patterns)
+		else:
+			return NotImplemented
+
+	def __iadd__(self: Self, other: PathSpec) -> Self:
+		"""
+		Adds the :attr:`self.patterns <.PathSpec.patterns>` from *other*
+		(:class:`PathSpec`) to this instance.
+		"""
+		if isinstance(other, PathSpec):
+			self.patterns += other.patterns
+			self._backend = self._make_backend(self._backend_name, self.patterns)
+			return self
+		else:
+			return NotImplemented
+
+	def __len__(self) -> int:
+		"""
+		Returns the number of :attr:`self.patterns <.PathSpec.patterns>` this
+		path-spec contains (:class:`int`).
+		"""
+		return len(self.patterns)
+
+	def check_file(
+		self,
+		file: TStrPath,
+		separators: Optional[Collection[str]] = None,
+	) -> CheckResult[TStrPath]:
+		"""
+		Check the files against this path-spec.
+
+		*file* (:class:`str` or :class:`os.PathLike`) is the file path to be matched
+		against :attr:`self.patterns <.PathSpec.patterns>`.
+
+		*separators* (:class:`~collections.abc.Collection` of :class:`str`; or
+		:data:`None`) optionally contains the path separators to normalize. See
+		:func:`.normalize_file` for more information.
+
+		Returns the file check result (:class:`.CheckResult`).
+		"""
+		norm_file = normalize_file(file, separators)
+		include, index = self._backend.match_file(norm_file)
+		return CheckResult(file, include, index)
+
+	def check_files(
+		self,
+		files: Iterable[TStrPath],
+		separators: Optional[Collection[str]] = None,
+	) -> Iterator[CheckResult[TStrPath]]:
+		"""
+		Check the files against this path-spec.
+
+		*files* (:class:`~collections.abc.Iterable` of :class:`str` or
+		:class:`os.PathLike`) contains the file paths to be checked against
+		:attr:`self.patterns <.PathSpec.patterns>`.
+
+		*separators* (:class:`~collections.abc.Collection` of :class:`str`; or
+		:data:`None`) optionally contains the path separators to normalize. See
+		:func:`.normalize_file` for more information.
+
+		Returns an :class:`~collections.abc.Iterator` yielding each file check
+		result (:class:`.CheckResult`).
+		"""
+		if not _is_iterable(files):
+			raise TypeError(f"files:{files!r} is not an iterable.")
+
+		for orig_file in files:
+			norm_file = normalize_file(orig_file, separators)
+			include, index = self._backend.match_file(norm_file)
+			yield CheckResult(orig_file, include, index)
+
+	def check_tree_files(
+		self,
+		root: StrPath,
+		on_error: Optional[Callable[[OSError], None]] = None,
+		follow_links: Optional[bool] = None,
+	) -> Iterator[CheckResult[str]]:
+		"""
+		Walks the specified root path for all files and checks them against this
+		path-spec.
+
+		*root* (:class:`str` or :class:`os.PathLike`) is the root directory to
+		search for files.
+
+		*on_error* (:class:`~collections.abc.Callable` or :data:`None`) optionally
+		is the error handler for file-system exceptions. It will be called with the
+		exception (:exc:`OSError`). Reraise the exception to abort the walk. Default
+		is :data:`None` to ignore file-system exceptions.
+
+		*follow_links* (:class:`bool` or :data:`None`) optionally is whether to walk
+		symbolic links that resolve to directories. Default is :data:`None` for
+		:data:`True`.
+
+		*negate* (:class:`bool` or :data:`None`) is whether to negate the match
+		results of the patterns. If :data:`True`, a pattern matching a file will
+		exclude the file rather than include it. Default is :data:`None` for
+		:data:`False`.
+
+		Returns an :class:`~collections.abc.Iterator` yielding each file check
+		result (:class:`.CheckResult`).
+		"""
+		files = util.iter_tree_files(root, on_error=on_error, follow_links=follow_links)
+		yield from self.check_files(files)
+
+	@classmethod
+	def from_lines(
+		cls: type[Self],
+		pattern_factory: Union[str, Callable[[AnyStr], Pattern]],
+		lines: Iterable[AnyStr],
+		*,
+		backend: Union[BackendNamesHint, str, None] = None,
+		_test_backend_factory: Optional[Callable[[Sequence[Pattern]], _Backend]] = None,
+	) -> Self:
+		"""
+		Compiles the pattern lines.
+
+		*pattern_factory* can be either the name of a registered pattern factory
+		(:class:`str`), or a :class:`~collections.abc.Callable` used to compile
+		patterns. It must accept an uncompiled pattern (:class:`str`) and return the
+		compiled pattern (:class:`.Pattern`).
+
+		*lines* (:class:`~collections.abc.Iterable`) yields each uncompiled pattern
+		(:class:`str`). This simply has to yield each line so that it can be a
+		:class:`io.TextIOBase` (e.g., from :func:`open` or :class:`io.StringIO`) or
+		the result from :meth:`str.splitlines`.
+
+		*backend* (:class:`str` or :data:`None`) is the pattern (or regular
+		expression) matching backend to use. Default is :data:`None` for "best" to
+		use the best available backend. Priority of backends is: "re2", "hyperscan",
+		"simple". The "simple" backend is always available.
+
+		Returns the :class:`PathSpec` instance.
+		"""
+		if isinstance(pattern_factory, str):
+			pattern_factory = util.lookup_pattern(pattern_factory)
+
+		if not callable(pattern_factory):
+			raise TypeError(f"pattern_factory:{pattern_factory!r} is not callable.")
+
+		if not _is_iterable(lines):
+			raise TypeError(f"lines:{lines!r} is not an iterable.")
+
+		patterns = [pattern_factory(line) for line in lines if line]
+		return cls(patterns, backend=backend, _test_backend_factory=_test_backend_factory)
+
+	@staticmethod
+	def _make_backend(
+		name: BackendNamesHint,
+		patterns: Sequence[Pattern],
+	) -> _Backend:
+		"""
+		.. warning:: This method is not part of the public API. It is subject to
+			change.
+
+		Create the backend for the patterns.
+
+		*name* (:class:`str`) is the name of the backend.
+
+		*patterns* (:class:`~collections.abc.Sequence` of :class:`.Pattern`)
+		contains the compiled patterns.
+
+		Returns the matcher (:class:`._Backend`).
+		"""
+		return make_pathspec_backend(name, patterns)
+
+	def match_entries(
+		self,
+		entries: Iterable[TreeEntry],
+		separators: Optional[Collection[str]] = None,
+		*,
+		negate: Optional[bool] = None,
+	) -> Iterator[TreeEntry]:
+		"""
+		Matches the entries to this path-spec.
+
+		*entries* (:class:`~collections.abc.Iterable` of :class:`.TreeEntry`)
+		contains the entries to be matched against :attr:`self.patterns <.PathSpec.patterns>`.
+
+		*separators* (:class:`~collections.abc.Collection` of :class:`str`; or
+		:data:`None`) optionally contains the path separators to normalize. See
+		:func:`.normalize_file` for more information.
+
+		*negate* (:class:`bool` or :data:`None`) is whether to negate the match
+		results of the patterns. If :data:`True`, a pattern matching a file will
+		exclude the file rather than include it. Default is :data:`None` for
+		:data:`False`.
+
+		Returns the matched entries (:class:`~collections.abc.Iterator` of
+		:class:`.TreeEntry`).
+		"""
+		if not _is_iterable(entries):
+			raise TypeError(f"entries:{entries!r} is not an iterable.")
+
+		for entry in entries:
+			norm_file = normalize_file(entry.path, separators)
+			include, _index = self._backend.match_file(norm_file)
+
+			if negate:
+				include = not include
+
+			if include:
+				yield entry
+
+	def match_file(
+		self,
+		file: StrPath,
+		separators: Optional[Collection[str]] = None,
+	) -> bool:
+		"""
+		Matches the file to this path-spec.
+
+		*file* (:class:`str` or :class:`os.PathLike`) is the file path to be matched
+		against :attr:`self.patterns <.PathSpec.patterns>`.
+
+		*separators* (:class:`~collections.abc.Collection` of :class:`str`)
+		optionally contains the path separators to normalize. See
+		:func:`.normalize_file` for more information.
+
+		Returns :data:`True` if *file* matched; otherwise, :data:`False`.
+		"""
+		norm_file = normalize_file(file, separators)
+		include, _index = self._backend.match_file(norm_file)
+		return bool(include)
+
+	def match_files(
+		self,
+		files: Iterable[StrPath],
+		separators: Optional[Collection[str]] = None,
+		*,
+		negate: Optional[bool] = None,
+	) -> Iterator[StrPath]:
+		"""
+		Matches the files to this path-spec.
+
+		*files* (:class:`~collections.abc.Iterable` of :class:`str` or
+		:class:`os.PathLike`) contains the file paths to be matched against
+		:attr:`self.patterns <.PathSpec.patterns>`.
+
+		*separators* (:class:`~collections.abc.Collection` of :class:`str`; or
+		:data:`None`) optionally contains the path separators to normalize. See
+		:func:`.normalize_file` for more information.
+
+		*negate* (:class:`bool` or :data:`None`) is whether to negate the match
+		results of the patterns. If :data:`True`, a pattern matching a file will
+		exclude the file rather than include it. Default is :data:`None` for
+		:data:`False`.
+
+		Returns the matched files (:class:`~collections.abc.Iterator` of
+		:class:`str` or :class:`os.PathLike`).
+		"""
+		if not _is_iterable(files):
+			raise TypeError(f"files:{files!r} is not an iterable.")
+
+		for orig_file in files:
+			norm_file = normalize_file(orig_file, separators)
+			include, _index = self._backend.match_file(norm_file)
+
+			if negate:
+				include = not include
+
+			if include:
+				yield orig_file
+
+	def match_tree_entries(
+		self,
+		root: StrPath,
+		on_error: Optional[Callable[[OSError], None]] = None,
+		follow_links: Optional[bool] = None,
+		*,
+		negate: Optional[bool] = None,
+	) -> Iterator[TreeEntry]:
+		"""
+		Walks the specified root path for all files and matches them to this
+		path-spec.
+
+		*root* (:class:`str` or :class:`os.PathLike`) is the root directory to
+		search.
+
+		*on_error* (:class:`~collections.abc.Callable` or :data:`None`) optionally
+		is the error handler for file-system exceptions. It will be called with the
+		exception (:exc:`OSError`). Reraise the exception to abort the walk. Default
+		is :data:`None` to ignore file-system exceptions.
+
+		*follow_links* (:class:`bool` or :data:`None`) optionally is whether to walk
+		symbolic links that resolve to directories. Default is :data:`None` for
+		:data:`True`.
+
+		*negate* (:class:`bool` or :data:`None`) is whether to negate the match
+		results of the patterns. If :data:`True`, a pattern matching a file will
+		exclude the file rather than include it. Default is :data:`None` for
+		:data:`False`.
+
+		Returns the matched files (:class:`~collections.abc.Iterator` of
+		:class:`.TreeEntry`).
+		"""
+		entries = util.iter_tree_entries(root, on_error=on_error, follow_links=follow_links)
+		yield from self.match_entries(entries, negate=negate)
+
+	# NOTICE: The deprecation warning was only added in 1.0.0 (from 2026-01-05).
+	@deprecated((
+		"PathSpec.match_tree() is deprecated. Use .match_tree_files() instead."
+	))
+	def match_tree(self, *args, **kw) -> Iterator[str]:
+		"""
+		.. version-deprecated:: 0.3.2
+			This is an alias for the :meth:`self.match_tree_files <.PathSpec.match_tree_files>`
+			method.
+		"""
+		return self.match_tree_files(*args, **kw)
+
+	def match_tree_files(
+		self,
+		root: StrPath,
+		on_error: Optional[Callable[[OSError], None]] = None,
+		follow_links: Optional[bool] = None,
+		*,
+		negate: Optional[bool] = None,
+	) -> Iterator[str]:
+		"""
+		Walks the specified root path for all files and matches them to this
+		path-spec.
+
+		*root* (:class:`str` or :class:`os.PathLike`) is the root directory to
+		search for files.
+
+		*on_error* (:class:`~collections.abc.Callable` or :data:`None`) optionally
+		is the error handler for file-system exceptions. It will be called with the
+		exception (:exc:`OSError`). Reraise the exception to abort the walk. Default
+		is :data:`None` to ignore file-system exceptions.
+
+		*follow_links* (:class:`bool` or :data:`None`) optionally is whether to walk
+		symbolic links that resolve to directories. Default is :data:`None` for
+		:data:`True`.
+
+		*negate* (:class:`bool` or :data:`None`) is whether to negate the match
+		results of the patterns. If :data:`True`, a pattern matching a file will
+		exclude the file rather than include it. Default is :data:`None` for
+		:data:`False`.
+
+		Returns the matched files (:class:`~collections.abc.Iterable` of :class:`str`).
+		"""
+		files = util.iter_tree_files(root, on_error=on_error, follow_links=follow_links)
+		yield from self.match_files(files, negate=negate)
diff --git a/.venv/lib/python3.12/site-packages/pathspec/pattern.py b/.venv/lib/python3.12/site-packages/pathspec/pattern.py
new file mode 100644
index 0000000..a4b8a2c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/pattern.py
@@ -0,0 +1,241 @@
+"""
+This module provides the base definition for patterns.
+"""
+from __future__ import annotations
+
+import re
+from collections.abc import (
+	Iterable,
+	Iterator)
+from dataclasses import (
+	dataclass)
+from typing import (
+	Any,
+	Optional,  # Replaced by `X | None` in 3.10.
+	TypeVar,
+	Union)  # Replaced by `X | Y` in 3.10.
+
+from ._typing import (
+	AnyStr,  # Removed in 3.18.
+	deprecated,  # Added in 3.13.
+	override)  # Added in 3.12.
+
+RegexPatternSelf = TypeVar("RegexPatternSelf", bound='RegexPattern')
+"""
+:class:`.RegexPattern` self type hint to support Python v<3.11 using PEP 673
+recommendation.
+"""
+
+class Pattern(object):
+	"""
+	The :class:`Pattern` class is the abstract definition of a pattern.
+	"""
+
+	# Make the class dict-less.
+	__slots__ = (
+		'include',
+	)
+
+	def __init__(self, include: Optional[bool]) -> None:
+		"""
+		Initializes the :class:`Pattern` instance.
+
+		*include* (:class:`bool` or :data:`None`) is whether the matched files
+		should be included (:data:`True`), excluded (:data:`False`), or is a
+		null-operation (:data:`None`).
+		"""
+
+		self.include = include
+		"""
+		*include* (:class:`bool` or :data:`None`) is whether the matched files
+		should be included (:data:`True`), excluded (:data:`False`), or is a
+		null-operation (:data:`None`).
+		"""
+
+	@deprecated((
+		"Pattern.match() is deprecated. Use Pattern.match_file() with a loop for "
+		"similar results."
+	))
+	def match(self, files: Iterable[str]) -> Iterator[str]:
+		"""
+		.. version-deprecated:: 0.10.0
+			This method is no longer used. Use the :meth:`self.match_file <.Pattern.match_file>`
+			method with a loop for similar results.
+
+		Matches this pattern against the specified files.
+
+		*files* (:class:`~collections.abc.Iterable` of :class:`str`) contains each
+		file relative to the root directory.
+
+		Returns an :class:`~collections.abc.Iterable` yielding each matched file
+		path (:class:`str`).
+		"""
+		for file in files:
+			if self.match_file(file) is not None:
+				yield file
+
+	def match_file(self, file: str) -> Optional[Any]:
+		"""
+		Matches this pattern against the specified file.
+
+		*file* (:class:`str`) is the normalized file path to match against.
+
+		Returns the match result if *file* matched; otherwise, :data:`None`.
+		"""
+		raise NotImplementedError((
+			"{cls.__module__}.{cls.__qualname__} must override match_file()."
+		).format(cls=self.__class__))
+
+
+class RegexPattern(Pattern):
+	"""
+	The :class:`RegexPattern` class is an implementation of a pattern using
+	regular expressions.
+	"""
+
+	# Keep the class dict-less.
+	__slots__ = (
+		'pattern',
+		'regex',
+	)
+
+	def __init__(
+		self,
+		pattern: Union[AnyStr, re.Pattern, None],
+		include: Optional[bool] = None,
+	) -> None:
+		"""
+		Initializes the :class:`RegexPattern` instance.
+
+		*pattern* (:class:`str`, :class:`bytes`, :class:`re.Pattern`, or
+		:data:`None`) is the pattern to compile into a regular expression.
+
+		*include* (:class:`bool` or :data:`None`) must be :data:`None` unless
+		*pattern* is a precompiled regular expression (:class:`re.Pattern`) in which
+		case it is whether matched files should be included (:data:`True`), excluded
+		(:data:`False`), or is a null operation (:data:`None`).
+
+			.. note:: Subclasses do not need to support the *include* parameter.
+		"""
+
+		if isinstance(pattern, (str, bytes)):
+			assert include is None, (
+				f"include:{include!r} must be null when pattern:{pattern!r} is a string."
+			)
+			regex, include = self.pattern_to_regex(pattern)
+			# NOTE: Make sure to allow a null regular expression to be
+			# returned for a null-operation.
+			if include is not None:
+				regex = re.compile(regex)
+
+		elif pattern is not None and hasattr(pattern, 'match'):
+			# Assume pattern is a precompiled regular expression.
+			# - NOTE: Used specified *include*.
+			regex = pattern
+
+		elif pattern is None:
+			# NOTE: Make sure to allow a null pattern to be passed for a
+			# null-operation.
+			assert include is None, (
+				f"include:{include!r} must be null when pattern:{pattern!r} is null."
+			)
+			regex = None
+
+		else:
+			raise TypeError(f"pattern:{pattern!r} is not a string, re.Pattern, or None.")
+
+		super(RegexPattern, self).__init__(include)
+
+		self.pattern: Union[AnyStr, re.Pattern, None] = pattern
+		"""
+		*pattern* (:class:`str`, :class:`bytes`, :class:`re.Pattern`, or
+		:data:`None`) is the uncompiled, input pattern. This is for reference.
+		"""
+
+		self.regex: Optional[re.Pattern] = regex
+		"""
+		*regex* (:class:`re.Pattern` or :data:`None`) is the compiled regular
+		expression for the pattern.
+		"""
+
+	def __copy__(self: RegexPatternSelf) -> RegexPatternSelf:
+		"""
+		Performa a shallow copy of the pattern.
+
+		Returns the copy (:class:`RegexPattern`).
+		"""
+		other = self.__class__(self.regex, self.include)
+		other.pattern = self.pattern
+		return other
+
+	def __eq__(self, other: RegexPattern) -> bool:
+		"""
+		Tests the equality of this regex pattern with *other* (:class:`RegexPattern`)
+		by comparing their :attr:`~Pattern.include` and :attr:`~RegexPattern.regex`
+		attributes.
+		"""
+		if isinstance(other, RegexPattern):
+			return self.include == other.include and self.regex == other.regex
+		else:
+			return NotImplemented
+
+	@override
+	def match_file(self, file: AnyStr) -> Optional[RegexMatchResult]:
+		"""
+		Matches this pattern against the specified file.
+
+		*file* (:class:`str` or :class:`bytes`) is the file path relative to the
+		root directory (e.g., "relative/path/to/file").
+
+		Returns the match result (:class:`.RegexMatchResult`) if *file* matched;
+		otherwise, :data:`None`.
+		"""
+		if self.include is not None:
+			match = self.regex.search(file)
+			if match is not None:
+				return RegexMatchResult(match)
+
+		return None
+
+	@classmethod
+	def pattern_to_regex(
+		cls,
+		pattern: AnyStr,
+	) -> tuple[Optional[AnyStr], Optional[bool]]:
+		"""
+		Convert the pattern into an uncompiled regular expression.
+
+		*pattern* (:class:`str` or :class:`bytes`) is the pattern to convert into a
+		regular expression.
+
+		Returns a :class:`tuple` containing:
+
+			-	*pattern* (:class:`str`, :class:`bytes` or :data:`None`) is the
+				uncompiled regular expression .
+
+			-	*include* (:class:`bool` or :data:`None`) is whether matched files
+				should be included (:data:`True`), excluded (:data:`False`), or is a
+				null-operation (:data:`None`).
+
+			.. note:: The default implementation simply returns *pattern* and
+			   :data:`True`.
+		"""
+		return pattern, True
+
+
+@dataclass()
+class RegexMatchResult(object):
+	"""
+	The :class:`RegexMatchResult` data class is used to return information about
+	the matched regular expression.
+	"""
+
+	# Keep the class dict-less.
+	__slots__ = (
+		'match',
+	)
+
+	match: re.Match
+	"""
+	*match* (:class:`re.Match`) is the regex match result.
+	"""
diff --git a/.venv/lib/python3.12/site-packages/pathspec/patterns/__init__.py b/.venv/lib/python3.12/site-packages/pathspec/patterns/__init__.py
new file mode 100644
index 0000000..f1738a5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/patterns/__init__.py
@@ -0,0 +1,12 @@
+"""
+The *pathspec.patterns* package contains the pattern matching implementations.
+"""
+
+# Load pattern implementations.
+from .gitignore import basic as _
+from .gitignore import spec as _
+
+# DEPRECATED: Deprecated since 0.11.0 (from 2023-01-24). Expose the
+# GitWildMatchPattern class in this module for backward compatibility with
+# 0.5.0 (from 2016-08-22).
+from .gitwildmatch import GitWildMatchPattern
diff --git a/.venv/lib/python3.12/site-packages/pathspec/patterns/gitignore/__init__.py b/.venv/lib/python3.12/site-packages/pathspec/patterns/gitignore/__init__.py
new file mode 100644
index 0000000..e440754
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/patterns/gitignore/__init__.py
@@ -0,0 +1,17 @@
+"""
+The *pathspec.patterns.gitignore* package provides the *gitignore*
+implementations.
+
+The following classes are imported and made available from this package:
+
+- :class:`pathspec.patterns.gitignore.base.GitIgnorePatternError`
+"""
+
+# Expose the GitIgnorePatternError for convenience.
+from .base import (
+	GitIgnorePatternError)
+
+# Declare imports as part of the public interface.
+__all__ = [
+	'GitIgnorePatternError',
+]
diff --git a/.venv/lib/python3.12/site-packages/pathspec/patterns/gitignore/base.py b/.venv/lib/python3.12/site-packages/pathspec/patterns/gitignore/base.py
new file mode 100644
index 0000000..0e1dd3c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/patterns/gitignore/base.py
@@ -0,0 +1,176 @@
+"""
+This module provides common classes for the gitignore patterns.
+"""
+
+import re
+
+from pathspec.pattern import (
+	RegexPattern)
+from pathspec._typing import (
+	AnyStr)  # Removed in 3.18.
+
+_BYTES_ENCODING = 'latin1'
+"""
+The encoding to use when parsing a byte string pattern.
+"""
+
+
+class _GitIgnoreBasePattern(RegexPattern):
+	"""
+	.. warning:: This class is not part of the public API. It is subject to
+		change.
+
+	The :class:`_GitIgnoreBasePattern` class is the base implementation for a
+	compiled gitignore pattern.
+	"""
+
+	# Keep the dict-less class hierarchy.
+	__slots__ = ()
+
+	@staticmethod
+	def escape(s: AnyStr) -> AnyStr:
+		"""
+		Escape special characters in the given string.
+
+		*s* (:class:`str` or :class:`bytes`) a filename or a string that you want to
+		escape, usually before adding it to a ".gitignore".
+
+		Returns the escaped string (:class:`str` or :class:`bytes`).
+		"""
+		if isinstance(s, str):
+			return_type = str
+			string = s
+		elif isinstance(s, bytes):
+			return_type = bytes
+			string = s.decode(_BYTES_ENCODING)
+		else:
+			raise TypeError(f"s:{s!r} is not a unicode or byte string.")
+
+		# Reference: https://git-scm.com/docs/gitignore#_pattern_format
+		out_string = ''.join((f"\\{x}" if x in '[]!*#?' else x) for x in string)
+
+		if return_type is bytes:
+			return out_string.encode(_BYTES_ENCODING)
+		else:
+			return out_string
+
+	@staticmethod
+	def _translate_segment_glob(pattern: str) -> str:
+		"""
+		Translates the glob pattern to a regular expression. This is used in the
+		constructor to translate a path segment glob pattern to its corresponding
+		regular expression.
+
+		*pattern* (:class:`str`) is the glob pattern.
+
+		Returns the regular expression (:class:`str`).
+		"""
+		# NOTE: This is derived from `fnmatch.translate()` and is similar to the
+		# POSIX function `fnmatch()` with the `FNM_PATHNAME` flag set.
+
+		escape = False
+		regex = ''
+		i, end = 0, len(pattern)
+		while i < end:
+			# Get next character.
+			char = pattern[i]
+			i += 1
+
+			if escape:
+				# Escape the character.
+				escape = False
+				regex += re.escape(char)
+
+			elif char == '\\':
+				# Escape character, escape next character.
+				escape = True
+
+			elif char == '*':
+				# Multi-character wildcard. Match any string (except slashes), including
+				# an empty string.
+				regex += '[^/]*'
+
+			elif char == '?':
+				# Single-character wildcard. Match any single character (except a
+				# slash).
+				regex += '[^/]'
+
+			elif char == '[':
+				# Bracket expression wildcard. Except for the beginning exclamation
+				# mark, the whole bracket expression can be used directly as regex, but
+				# we have to find where the expression ends.
+				# - "[][!]" matches ']', '[' and '!'.
+				# - "[]-]" matches ']' and '-'.
+				# - "[!]a-]" matches any character except ']', 'a' and '-'.
+				j = i
+
+				# Pass bracket expression negation.
+				if j < end and (pattern[j] == '!' or pattern[j] == '^'):
+					j += 1
+
+				# Pass first closing bracket if it is at the beginning of the
+				# expression.
+				if j < end and pattern[j] == ']':
+					j += 1
+
+				# Find closing bracket. Stop once we reach the end or find it.
+				while j < end and pattern[j] != ']':
+					j += 1
+
+				if j < end:
+					# Found end of bracket expression. Increment j to be one past the
+					# closing bracket:
+					#
+					#  [...]
+					#   ^   ^
+					#   i   j
+					#
+					j += 1
+					expr = '['
+
+					if pattern[i] == '!':
+						# Bracket expression needs to be negated.
+						expr += '^'
+						i += 1
+					elif pattern[i] == '^':
+						# POSIX declares that the regex bracket expression negation "[^...]"
+						# is undefined in a glob pattern. Python's `fnmatch.translate()`
+						# escapes the caret ('^') as a literal. Git supports the using a
+						# caret for negation. Maintain consistency with Git because that is
+						# the expected behavior.
+						expr += '^'
+						i += 1
+
+					# Build regex bracket expression. Escape slashes so they are treated
+					# as literal slashes by regex as defined by POSIX.
+					expr += pattern[i:j].replace('\\', '\\\\')
+
+					# Add regex bracket expression to regex result.
+					regex += expr
+
+					# Set i to one past the closing bracket.
+					i = j
+
+				else:
+					# Failed to find closing bracket, treat opening bracket as a bracket
+					# literal instead of as an expression.
+					regex += '\\['
+
+			else:
+				# Regular character, escape it for regex.
+				regex += re.escape(char)
+
+		if escape:
+			raise ValueError((
+				f"Escape character found with no next character to escape: {pattern!r}"
+			))  # ValueError
+
+		return regex
+
+
+class GitIgnorePatternError(ValueError):
+	"""
+	The :class:`GitIgnorePatternError` class indicates an invalid gitignore
+	pattern.
+	"""
+	pass
diff --git a/.venv/lib/python3.12/site-packages/pathspec/patterns/gitignore/basic.py b/.venv/lib/python3.12/site-packages/pathspec/patterns/gitignore/basic.py
new file mode 100644
index 0000000..95d7915
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/patterns/gitignore/basic.py
@@ -0,0 +1,317 @@
+"""
+This module provides :class:`GitIgnoreBasicPattern` which implements Git's
+`gitignore`_ patterns as documented. This differs from how Git actually behaves
+when including files in excluded directories.
+
+.. _`gitignore`: https://git-scm.com/docs/gitignore
+"""
+
+from typing import (
+	Optional)  # Replaced by `X | None` in 3.10.
+
+from pathspec import util
+from pathspec._typing import (
+	AnyStr,  # Removed in 3.18.
+	assert_unreachable,
+	override)  # Added in 3.12.
+
+from .base import (
+	GitIgnorePatternError,
+	_BYTES_ENCODING,
+	_GitIgnoreBasePattern)
+
+
+class GitIgnoreBasicPattern(_GitIgnoreBasePattern):
+	"""
+	The :class:`GitIgnoreBasicPattern` class represents a compiled gitignore
+	pattern as documented. This is registered as "gitignore".
+	"""
+
+	# Keep the dict-less class hierarchy.
+	__slots__ = ()
+
+	@staticmethod
+	def __normalize_segments(
+		is_dir_pattern: bool,
+		pattern_segs: list[str],
+	) -> tuple[Optional[list[str]], Optional[str]]:
+		"""
+		Normalize the pattern segments to make processing easier.
+
+		*is_dir_pattern* (:class:`bool`) is whether the pattern is a directory
+		pattern (i.e., ends with a slash '/').
+
+		*pattern_segs* (:class:`list` of :class:`str`) contains the pattern
+		segments. This may be modified in place.
+
+		Returns a :class:`tuple` containing either:
+
+		- The normalized segments (:class:`list` of :class:`str`; or :data:`None`).
+
+		- The regular expression override (:class:`str` or :data:`None`).
+		"""
+		if not pattern_segs[0]:
+			# A pattern beginning with a slash ('/') should match relative to the root
+			# directory. Remove the empty first segment to make the pattern relative
+			# to root.
+			del pattern_segs[0]
+
+		elif len(pattern_segs) == 1 or (len(pattern_segs) == 2 and not pattern_segs[1]):
+			# A single segment pattern with or without a trailing slash ('/') will
+			# match any descendant path. This is equivalent to "**/{pattern}". Prepend
+			# double-asterisk segment to make pattern relative to root.
+			if pattern_segs[0] != '**':
+				pattern_segs.insert(0, '**')
+
+		else:
+			# A pattern without a beginning slash ('/') but contains at least one
+			# prepended directory (e.g., "dir/{pattern}") should match relative to the
+			# root directory. No segment modification is needed.
+			pass
+
+		if not pattern_segs:
+			# After normalization, we end up with no pattern at all. This must be
+			# because the pattern is invalid.
+			raise ValueError("Pattern normalized to nothing.")
+
+		if not pattern_segs[-1]:
+			# A pattern ending with a slash ('/') will match all descendant paths if
+			# it is a directory but not if it is a regular file. This is equivalent to
+			# "{pattern}/**". Set empty last segment to a double-asterisk to include
+			# all descendants.
+			pattern_segs[-1] = '**'
+
+		# EDGE CASE: Collapse duplicate double-asterisk sequences (i.e., '**/**').
+		# Iterate over the segments in reverse order and remove the duplicate double
+		# asterisks as we go.
+		for i in range(len(pattern_segs) - 1, 0, -1):
+			prev = pattern_segs[i-1]
+			seg = pattern_segs[i]
+			if prev == '**' and seg == '**':
+				del pattern_segs[i]
+
+		seg_count = len(pattern_segs)
+		if seg_count == 1 and pattern_segs[0] == '**':
+			if is_dir_pattern:
+				# The pattern "**/" will be normalized to "**", but it should match
+				# everything except for files in the root. Special case this pattern.
+				return (None, '/')
+			else:
+				# The pattern "**" will match every path. Special case this pattern.
+				return (None, '.')
+
+		elif (
+			seg_count == 2
+			and pattern_segs[0] == '**'
+			and pattern_segs[1] == '*'
+		):
+			# The pattern "*" will be normalized to "**/*" and will match every
+			# path. Special case this pattern for efficiency.
+			return (None, '.')
+
+		elif (
+			seg_count == 3
+			and pattern_segs[0] == '**'
+			and pattern_segs[1] == '*'
+			and pattern_segs[2] == '**'
+		):
+			# The pattern "*/" will be normalized to "**/*/**" which will match every
+			# file not in the root directory. Special case this pattern for
+			# efficiency.
+			return (None, '/')
+
+		# No regular expression override, return modified pattern segments.
+		return (pattern_segs, None)
+
+	@override
+	@classmethod
+	def pattern_to_regex(
+		cls,
+		pattern: AnyStr,
+	) -> tuple[Optional[AnyStr], Optional[bool]]:
+		"""
+		Convert the pattern into a regular expression.
+
+		*pattern* (:class:`str` or :class:`bytes`) is the pattern to convert into a
+		regular expression.
+
+		Returns a :class:`tuple` containing:
+
+			-	*pattern* (:class:`str`, :class:`bytes` or :data:`None`) is the
+				uncompiled regular expression.
+
+			-	*include* (:class:`bool` or :data:`None`) is whether matched files
+				should be included (:data:`True`), excluded (:data:`False`), or is a
+				null-operation (:data:`None`).
+		"""
+		if isinstance(pattern, str):
+			pattern_str = pattern
+			return_type = str
+		elif isinstance(pattern, bytes):
+			pattern_str = pattern.decode(_BYTES_ENCODING)
+			return_type = bytes
+		else:
+			raise TypeError(f"{pattern=!r} is not a unicode or byte string.")
+
+		original_pattern = pattern_str
+		del pattern
+
+		if pattern_str.endswith('\\ '):
+			# EDGE CASE: Spaces can be escaped with backslash. If a pattern that ends
+			# with a backslash is followed by a space, do not strip from the left.
+			pass
+		else:
+			# EDGE CASE: Leading spaces should be kept (only trailing spaces should be
+			# removed).
+			pattern_str = pattern_str.rstrip()
+
+		regex: Optional[str]
+		include: Optional[bool]
+
+		if not pattern_str:
+			# A blank pattern is a null-operation (neither includes nor excludes
+			# files).
+			return (None, None)
+
+		elif pattern_str.startswith('#'):
+			# A pattern starting with a hash ('#') serves as a comment (neither
+			# includes nor excludes files). Escape the hash with a backslash to match
+			# a literal hash (i.e., '\#').
+			return (None, None)
+
+		if pattern_str.startswith('!'):
+			# A pattern starting with an exclamation mark ('!') negates the pattern
+			# (exclude instead of include). Escape the exclamation mark with a back
+			# slash to match a literal exclamation mark (i.e., '\!').
+			include = False
+			# Remove leading exclamation mark.
+			pattern_str = pattern_str[1:]
+		else:
+			include = True
+
+		# Split pattern into segments.
+		pattern_segs = pattern_str.split('/')
+
+		# Check whether the pattern is specifically a directory pattern before
+		# normalization.
+		is_dir_pattern = not pattern_segs[-1]
+
+		if pattern_str == '/':
+			# EDGE CASE: A single slash ('/') is not addressed by the gitignore
+			# documentation. Git treats it as a no-op (does not match any files). The
+			# straight forward interpretation is to treat it as a directory and match
+			# every descendant path (equivalent to '**'). Remove the directory pattern
+			# flag so that it is treated as '**' instead of '**/'.
+			is_dir_pattern = False
+
+		# Normalize pattern to make processing easier.
+		try:
+			pattern_segs, override_regex = cls.__normalize_segments(
+				is_dir_pattern, pattern_segs,
+			)
+		except ValueError as e:
+			raise GitIgnorePatternError((
+				f"Invalid git pattern: {original_pattern!r}"
+			)) from e  # GitIgnorePatternError
+
+		if override_regex is not None:
+			# Use regex override.
+			regex = override_regex
+
+		elif pattern_segs is not None:
+			# Build regular expression from pattern.
+			try:
+				regex_parts = cls.__translate_segments(pattern_segs)
+			except ValueError as e:
+				raise GitIgnorePatternError((
+					f"Invalid git pattern: {original_pattern!r}"
+				)) from e  # GitIgnorePatternError
+
+			regex = ''.join(regex_parts)
+
+		else:
+			assert_unreachable((
+				f"{override_regex=} and {pattern_segs=} cannot both be null."
+			))  # assert_unreachable
+
+		# Encode regex if needed.
+		out_regex: AnyStr
+		if regex is not None and return_type is bytes:
+			out_regex = regex.encode(_BYTES_ENCODING)
+		else:
+			out_regex = regex
+
+		return (out_regex, include)
+
+	@classmethod
+	def __translate_segments(cls, pattern_segs: list[str]) -> list[str]:
+		"""
+		Translate the pattern segments to regular expressions.
+
+		*pattern_segs* (:class:`list` of :class:`str`) contains the pattern
+		segments.
+
+		Returns the regular expression parts (:class:`list` of :class:`str`).
+		"""
+		# Build regular expression from pattern.
+		out_parts = []
+		need_slash = False
+		end = len(pattern_segs) - 1
+		for i, seg in enumerate(pattern_segs):
+			if seg == '**':
+				if i == 0:
+					# A normalized pattern beginning with double-asterisks ('**') will
+					# match any leading path segments.
+					# - NOTICE: '(?:^|/)' benchmarks slower using p15 (sm=0.9382,
+					#   hs=0.9966, re2=0.9337).
+					out_parts.append('^(?:.+/)?')
+
+				elif i < end:
+					# A pattern with inner double-asterisks ('**') will match multiple (or
+					# zero) inner path segments.
+					out_parts.append('(?:/.+)?')
+					need_slash = True
+
+				else:
+					assert i == end, (i, end)
+					# A normalized pattern ending with double-asterisks ('**') will match
+					# any trailing path segments.
+					out_parts.append('/')
+
+			else:
+				# Match path segment.
+				if i == 0:
+					# Anchor to root directory.
+					out_parts.append('^')
+
+				if need_slash:
+					out_parts.append('/')
+
+				if seg == '*':
+					# Match whole path segment.
+					out_parts.append('[^/]+')
+
+				else:
+					# Match segment glob pattern.
+					out_parts.append(cls._translate_segment_glob(seg))
+
+				if i == end:
+					if seg == '*':
+						# A pattern ending with an asterisk ('*') will match a file or
+						# directory (without matching descendant paths). E.g., "foo/*"
+						# matches "foo/test.json", "foo/bar/", but not "foo/bar/hello.c".
+						out_parts.append('/?$')
+
+					else:
+						# A pattern ending without a slash ('/') will match a file or a
+						# directory (with paths underneath it). E.g., "foo" matches "foo",
+						# "foo/bar", "foo/bar/baz", etc.
+						out_parts.append('(?:/|$)')
+
+				need_slash = True
+
+		return out_parts
+
+
+# Register GitIgnoreBasicPattern as "gitignore".
+util.register_pattern('gitignore', GitIgnoreBasicPattern)
diff --git a/.venv/lib/python3.12/site-packages/pathspec/patterns/gitignore/spec.py b/.venv/lib/python3.12/site-packages/pathspec/patterns/gitignore/spec.py
new file mode 100644
index 0000000..ee77457
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/patterns/gitignore/spec.py
@@ -0,0 +1,335 @@
+"""
+This module provides :class:`GitIgnoreSpecPattern` which implements Git's
+`gitignore`_ patterns, and handles edge-cases where Git's behavior differs from
+what's documented. Git allows including files from excluded directories which
+appears to contradict the documentation. This is used by
+:class:`~pathspec.gitignore.GitIgnoreSpec` to fully replicate Git's handling.
+
+.. _`gitignore`: https://git-scm.com/docs/gitignore
+"""
+
+from typing import (
+	Optional)  # Replaced by `X | None` in 3.10.
+
+from pathspec._typing import (
+	AnyStr,  # Removed in 3.18.
+	assert_unreachable,
+	override)  # Added in 3.12.
+
+from .base import (
+	GitIgnorePatternError,
+	_BYTES_ENCODING,
+	_GitIgnoreBasePattern)
+
+_DIR_MARK = 'ps_d'
+"""
+The regex group name for the directory marker. This is only used by
+:class:`GitIgnoreSpec`.
+"""
+
+_DIR_MARK_CG = f'(?P<{_DIR_MARK}>/)'
+"""
+This regular expression matches the directory marker.
+"""
+
+_DIR_MARK_OPT = f'(?:{_DIR_MARK_CG}|$)'
+"""
+This regular expression matches the optional directory marker and sub-path.
+"""
+
+
+class GitIgnoreSpecPattern(_GitIgnoreBasePattern):
+	"""
+	The :class:`GitIgnoreSpecPattern` class represents a compiled gitignore
+	pattern with special handling for edge-cases to replicate Git's behavior.
+
+	This is registered under the deprecated name "gitwildmatch" for backward
+	compatibility with v0.12. The registered name will be removed in a future
+	version.
+	"""
+
+	# Keep the dict-less class hierarchy.
+	__slots__ = ()
+
+	@staticmethod
+	def __normalize_segments(
+		is_dir_pattern: bool,
+		pattern_segs: list[str],
+	) -> tuple[Optional[list[str]], Optional[str]]:
+		"""
+		Normalize the pattern segments to make processing easier.
+
+		*is_dir_pattern* (:class:`bool`) is whether the pattern is a directory
+		pattern (i.e., ends with a slash '/').
+
+		*pattern_segs* (:class:`list` of :class:`str`) contains the pattern
+		segments. This may be modified in place.
+
+		Returns a :class:`tuple` containing either:
+
+		- The normalized segments (:class:`list` of :class:`str`; or :data:`None`).
+
+		- The regular expression override (:class:`str` or :data:`None`).
+		"""
+		if not pattern_segs[0]:
+			# A pattern beginning with a slash ('/') should match relative to the root
+			# directory. Remove the empty first segment to make the pattern relative
+			# to root.
+			del pattern_segs[0]
+
+		elif len(pattern_segs) == 1 or (len(pattern_segs) == 2 and not pattern_segs[1]):
+			# A single segment pattern with or without a trailing slash ('/') will
+			# match any descendant path. This is equivalent to "**/{pattern}". Prepend
+			# double-asterisk segment to make pattern relative to root.
+			if pattern_segs[0] != '**':
+				pattern_segs.insert(0, '**')
+
+		else:
+			# A pattern without a beginning slash ('/') but contains at least one
+			# prepended directory (e.g., "dir/{pattern}") should match relative to the
+			# root directory. No segment modification is needed.
+			pass
+
+		if not pattern_segs:
+			# After normalization, we end up with no pattern at all. This must be
+			# because the pattern is invalid.
+			raise ValueError("Pattern normalized to nothing.")
+
+		if not pattern_segs[-1]:
+			# A pattern ending with a slash ('/') will match all descendant paths if
+			# it is a directory but not if it is a regular file. This is equivalent to
+			# "{pattern}/**". Set empty last segment to a double-asterisk to include
+			# all descendants.
+			pattern_segs[-1] = '**'
+
+		# EDGE CASE: Collapse duplicate double-asterisk sequences (i.e., '**/**').
+		# Iterate over the segments in reverse order and remove the duplicate double
+		# asterisks as we go.
+		for i in range(len(pattern_segs) - 1, 0, -1):
+			prev = pattern_segs[i-1]
+			seg = pattern_segs[i]
+			if prev == '**' and seg == '**':
+				del pattern_segs[i]
+
+		seg_count = len(pattern_segs)
+		if seg_count == 1 and pattern_segs[0] == '**':
+			if is_dir_pattern:
+				# The pattern "**/" will be normalized to "**", but it should match
+				# everything except for files in the root. Special case this pattern.
+				return (None, _DIR_MARK_CG)
+			else:
+				# The pattern "**" will match every path. Special case this pattern.
+				return (None, '.')
+
+		elif (
+			seg_count == 2
+			and pattern_segs[0] == '**'
+			and pattern_segs[1] == '*'
+		):
+			# The pattern "*" will be normalized to "**/*" and will match every
+			# path. Special case this pattern for efficiency.
+			return (None, '.')
+
+		elif (
+			seg_count == 3
+			and pattern_segs[0] == '**'
+			and pattern_segs[1] == '*'
+			and pattern_segs[2] == '**'
+		):
+			# The pattern "*/" will be normalized to "**/*/**" which will match every
+			# file not in the root directory. Special case this pattern for
+			# efficiency.
+			if is_dir_pattern:
+				return (None, _DIR_MARK_CG)
+			else:
+				return (None, '/')
+
+		# No regular expression override, return modified pattern segments.
+		return (pattern_segs, None)
+
+	@override
+	@classmethod
+	def pattern_to_regex(
+		cls,
+		pattern: AnyStr,
+	) -> tuple[Optional[AnyStr], Optional[bool]]:
+		"""
+		Convert the pattern into a regular expression.
+
+		*pattern* (:class:`str` or :class:`bytes`) is the pattern to convert into a
+		regular expression.
+
+		Returns a :class:`tuple` containing:
+
+			-	*pattern* (:class:`str`, :class:`bytes` or :data:`None`) is the
+				uncompiled regular expression.
+
+			-	*include* (:class:`bool` or :data:`None`) is whether matched files
+				should be included (:data:`True`), excluded (:data:`False`), or is a
+				null-operation (:data:`None`).
+		"""
+		if isinstance(pattern, str):
+			pattern_str = pattern
+			return_type = str
+		elif isinstance(pattern, bytes):
+			pattern_str = pattern.decode(_BYTES_ENCODING)
+			return_type = bytes
+		else:
+			raise TypeError(f"{pattern=!r} is not a unicode or byte string.")
+
+		original_pattern = pattern_str
+		del pattern
+
+		if pattern_str.endswith('\\ '):
+			# EDGE CASE: Spaces can be escaped with backslash. If a pattern that ends
+			# with a backslash is followed by a space, do not strip from the left.
+			pass
+		else:
+			# EDGE CASE: Leading spaces should be kept (only trailing spaces should be
+			# removed). Git does not remove leading spaces.
+			pattern_str = pattern_str.rstrip()
+
+		regex: Optional[str]
+		include: Optional[bool]
+
+		if not pattern_str:
+			# A blank pattern is a null-operation (neither includes nor excludes
+			# files).
+			return (None, None)
+
+		elif pattern_str.startswith('#'):
+			# A pattern starting with a hash ('#') serves as a comment (neither
+			# includes nor excludes files). Escape the hash with a backslash to match
+			# a literal hash (i.e., '\#').
+			return (None, None)
+
+		elif pattern_str == '/':
+			# EDGE CASE: According to `git check-ignore` (v2.4.1), a single '/' does
+			# not match any file.
+			return (None, None)
+
+		if pattern_str.startswith('!'):
+			# A pattern starting with an exclamation mark ('!') negates the pattern
+			# (exclude instead of include). Escape the exclamation mark with a back
+			# slash to match a literal exclamation mark (i.e., '\!').
+			include = False
+			# Remove leading exclamation mark.
+			pattern_str = pattern_str[1:]
+		else:
+			include = True
+
+		# Split pattern into segments.
+		pattern_segs = pattern_str.split('/')
+
+		# Check whether the pattern is specifically a directory pattern before
+		# normalization.
+		is_dir_pattern = not pattern_segs[-1]
+
+		# Normalize pattern to make processing easier.
+		try:
+			pattern_segs, override_regex = cls.__normalize_segments(
+				is_dir_pattern, pattern_segs,
+			)
+		except ValueError as e:
+			raise GitIgnorePatternError((
+				f"Invalid git pattern: {original_pattern!r}"
+			)) from e  # GitIgnorePatternError
+
+		if override_regex is not None:
+			# Use regex override.
+			regex = override_regex
+
+		elif pattern_segs is not None:
+			# Build regular expression from pattern.
+			try:
+				regex_parts = cls.__translate_segments(is_dir_pattern, pattern_segs)
+			except ValueError as e:
+				raise GitIgnorePatternError((
+					f"Invalid git pattern: {original_pattern!r}"
+				)) from e  # GitIgnorePatternError
+
+			regex = ''.join(regex_parts)
+
+		else:
+			assert_unreachable((
+				f"{override_regex=} and {pattern_segs=} cannot both be null."
+			))  # assert_unreachable
+
+		# Encode regex if needed.
+		out_regex: AnyStr
+		if regex is not None and return_type is bytes:
+			out_regex = regex.encode(_BYTES_ENCODING)
+		else:
+			out_regex = regex
+
+		return (out_regex, include)
+
+	@classmethod
+	def __translate_segments(
+		cls,
+		is_dir_pattern: bool,
+		pattern_segs: list[str],
+	) -> list[str]:
+		"""
+		Translate the pattern segments to regular expressions.
+
+		*is_dir_pattern* (:class:`bool`) is whether the pattern is a directory
+		pattern (i.e., ends with a slash '/').
+
+		*pattern_segs* (:class:`list` of :class:`str`) contains the pattern
+		segments.
+
+		Returns the regular expression parts (:class:`list` of :class:`str`).
+		"""
+		# Build regular expression from pattern.
+		out_parts = []
+		need_slash = False
+		end = len(pattern_segs) - 1
+		for i, seg in enumerate(pattern_segs):
+			if seg == '**':
+				if i == 0:
+					# A normalized pattern beginning with double-asterisks ('**') will
+					# match any leading path segments.
+					out_parts.append('^(?:.+/)?')
+
+				elif i < end:
+					# A pattern with inner double-asterisks ('**') will match multiple (or
+					# zero) inner path segments.
+					out_parts.append('(?:/.+)?')
+					need_slash = True
+
+				else:
+					assert i == end, (i, end)
+					# A normalized pattern ending with double-asterisks ('**') will match
+					# any trailing path segments.
+					if is_dir_pattern:
+						out_parts.append(_DIR_MARK_CG)
+					else:
+						out_parts.append('/')
+
+			else:
+				# Match path segment.
+				if i == 0:
+					# Anchor to root directory.
+					out_parts.append('^')
+
+				if need_slash:
+					out_parts.append('/')
+
+				if seg == '*':
+					# Match whole path segment.
+					out_parts.append('[^/]+')
+
+				else:
+					# Match segment glob pattern.
+					out_parts.append(cls._translate_segment_glob(seg))
+
+				if i == end:
+					# A pattern ending without a slash ('/') will match a file or a
+					# directory (with paths underneath it). E.g., "foo" matches "foo",
+					# "foo/bar", "foo/bar/baz", etc.
+					out_parts.append(_DIR_MARK_OPT)
+
+				need_slash = True
+
+		return out_parts
diff --git a/.venv/lib/python3.12/site-packages/pathspec/patterns/gitwildmatch.py b/.venv/lib/python3.12/site-packages/pathspec/patterns/gitwildmatch.py
new file mode 100644
index 0000000..b44d961
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/patterns/gitwildmatch.py
@@ -0,0 +1,52 @@
+"""
+.. version-deprecated: 1.0.0
+	This module is superseded by :module:`pathspec.patterns.gitignore`.
+"""
+
+from pathspec import util
+from pathspec._typing import (
+	deprecated,  # Added in 3.13.
+	override)  # Added in 3.12.
+
+from .gitignore.spec import (
+	GitIgnoreSpecPattern)
+
+# DEPRECATED: Deprecated since version 1.0.0. Expose GitWildMatchPatternError
+# in this module for backward compatibility.
+from .gitignore import (
+	GitIgnorePatternError as GitWildMatchPatternError)
+
+
+class GitWildMatchPattern(GitIgnoreSpecPattern):
+	"""
+	.. version-deprecated:: 1.0.0
+		This class is superseded by :class:`GitIgnoreSpecPattern` and
+		:class:`~pathspec.patterns.gitignore.basic.GitIgnoreBasicPattern`.
+	"""
+
+	@deprecated((
+		"GitWildMatchPattern ('gitwildmatch') is deprecated. Use 'gitignore' for "
+		"GitIgnoreBasicPattern or GitIgnoreSpecPattern instead."
+	))
+	def __init__(self, *args, **kw) -> None:
+		"""
+		Warn about deprecation.
+		"""
+		super().__init__(*args, **kw)
+
+	@override
+	@classmethod
+	@deprecated((
+		"GitWildMatchPattern ('gitwildmatch') is deprecated. Use 'gitignore' for "
+		"GitIgnoreBasicPattern or GitIgnoreSpecPattern instead."
+	))
+	def pattern_to_regex(cls, *args, **kw):
+		"""
+		Warn about deprecation.
+		"""
+		return super().pattern_to_regex(*args, **kw)
+
+
+# DEPRECATED: Deprecated since version 1.0.0. Register GitWildMatchPattern as
+# "gitwildmatch" for backward compatibility.
+util.register_pattern('gitwildmatch', GitWildMatchPattern)
diff --git a/.venv/lib/python3.12/site-packages/pathspec/py.typed b/.venv/lib/python3.12/site-packages/pathspec/py.typed
new file mode 100644
index 0000000..b01eaaf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561.  The pathspec package uses inline types.
diff --git a/.venv/lib/python3.12/site-packages/pathspec/util.py b/.venv/lib/python3.12/site-packages/pathspec/util.py
new file mode 100644
index 0000000..ea2dbee
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pathspec/util.py
@@ -0,0 +1,847 @@
+"""
+This module provides utility methods for dealing with path-specs.
+"""
+
+import os
+import os.path
+import pathlib
+import posixpath
+import stat
+from collections.abc import (
+	Collection,
+	Iterable,
+	Iterator,
+	Sequence)
+from dataclasses import (
+	dataclass)
+from typing import (
+	Any,
+	Callable,  # Replaced by `collections.abc.Callable` in 3.9.2.
+	Generic,
+	Optional,  # Replaced by `X | None` in 3.10.
+	TypeVar,
+	Union)  # Replaced by `X | Y` in 3.10.
+
+from .pattern import (
+	Pattern)
+from ._typing import (
+	AnyStr,  # Removed in 3.18.
+	deprecated)  # Added in 3.13.
+
+StrPath = Union[str, os.PathLike[str]]
+
+TStrPath = TypeVar("TStrPath", bound=StrPath)
+"""
+Type variable for :class:`str` or :class:`os.PathLike`.
+"""
+
+NORMALIZE_PATH_SEPS = [
+	__sep
+	for __sep in [os.sep, os.altsep]
+	if __sep and __sep != posixpath.sep
+]
+"""
+*NORMALIZE_PATH_SEPS* (:class:`list` of :class:`str`) contains the path
+separators that need to be normalized to the POSIX separator for the current
+operating system. The separators are determined by examining :data:`os.sep` and
+:data:`os.altsep`.
+"""
+
+_registered_patterns = {}
+"""
+*_registered_patterns* (:class:`dict`) maps a name (:class:`str`) to the
+registered pattern factory (:class:`~collections.abc.Callable`).
+"""
+
+
+def append_dir_sep(path: pathlib.Path) -> str:
+	"""
+	Appends the path separator to the path if the path is a directory. This can be
+	used to aid in distinguishing between directories and files on the file-system
+	by relying on the presence of a trailing path separator.
+
+	*path* (:class:`pathlib.Path`) is the path to use.
+
+	Returns the path (:class:`str`).
+	"""
+	str_path = str(path)
+	if path.is_dir():
+		str_path += os.sep
+
+	return str_path
+
+
+def check_match_file(
+	patterns: Iterable[tuple[int, Pattern]],
+	file: str,
+	is_reversed: Optional[bool] = None,
+) -> tuple[Optional[bool], Optional[int]]:
+	"""
+	Check the file against the patterns.
+
+	*patterns* (:class:`~collections.abc.Iterable`) yields each indexed pattern
+	(:class:`tuple`) which contains the pattern index (:class:`int`) and actua
+	pattern (:class:`.Pattern`).
+
+	*file* (:class:`str`) is the normalized file path to be matched against
+	*patterns*.
+
+	*is_reversed* (:class:`bool` or :data:`None`) is whether the order of the
+	patterns has been reversed. Default is :data:`None` for :data:`False`.
+	Reversing the order of the patterns is an optimization.
+
+	Returns a :class:`tuple` containing whether to include *file* (:class:`bool`
+	or :data:`None`), and the index of the last matched pattern (:class:`int` or
+	:data:`None`).
+	"""
+	if is_reversed:
+		# Check patterns in reverse order. The first pattern that matches takes
+		# precedence.
+		for index, pattern in patterns:
+			if pattern.include is not None and pattern.match_file(file) is not None:
+				return pattern.include, index
+
+		return None, None
+
+	else:
+		# Check all patterns. The last pattern that matches takes precedence.
+		out_include: Optional[bool] = None
+		out_index: Optional[int] = None
+		for index, pattern in patterns:
+			if pattern.include is not None and pattern.match_file(file) is not None:
+				out_include = pattern.include
+				out_index = index
+
+		return out_include, out_index
+
+
+def detailed_match_files(
+	patterns: Iterable[Pattern],
+	files: Iterable[str],
+	all_matches: Optional[bool] = None,
+) -> dict[str, 'MatchDetail']:
+	"""
+	Matches the files to the patterns, and returns which patterns matched the
+	files.
+
+	*patterns* (:class:`~collections.abc.Iterable` of :class:`.Pattern`) contains
+	the patterns to use.
+
+	*files* (:class:`~collections.abc.Iterable` of :class:`str`) contains the
+	normalized file paths to be matched against *patterns*.
+
+	*all_matches* (:class:`bool` or :data:`None`) is whether to return all matches
+	patterns (:data:`True`), or only the last matched pattern (:data:`False`).
+	Default is :data:`None` for :data:`False`.
+
+	Returns the matched files (:class:`dict`) which maps each matched file
+	(:class:`str`) to the patterns that matched in order (:class:`.MatchDetail`).
+	"""
+	all_files = files if isinstance(files, Collection) else list(files)
+	return_files = {}
+	for pattern in patterns:
+		if pattern.include is not None:
+			result_files = pattern.match(all_files)  # TODO: Replace with `.match_file()`.
+			if pattern.include:
+				# Add files and record pattern.
+				for result_file in result_files:
+					if result_file in return_files:
+						if all_matches:
+							return_files[result_file].patterns.append(pattern)
+						else:
+							return_files[result_file].patterns[0] = pattern
+					else:
+						return_files[result_file] = MatchDetail([pattern])
+
+			else:
+				# Remove files.
+				for file in result_files:
+					del return_files[file]
+
+	return return_files
+
+
+def _filter_check_patterns(
+	patterns: Iterable[Pattern],
+) -> list[tuple[int, Pattern]]:
+	"""
+	Filters out null-patterns.
+
+	*patterns* (:class:`~collections.abc.Iterable` of :class:`.Pattern`) contains
+	the patterns.
+
+	Returns a :class:`list` containing each indexed pattern (:class:`tuple`) which
+	contains the pattern index (:class:`int`) and the actual pattern
+	(:class:`.Pattern`).
+	"""
+	return [
+		(__index, __pat)
+		for __index, __pat in enumerate(patterns)
+		if __pat.include is not None
+	]
+
+
+def _is_iterable(value: Any) -> bool:
+	"""
+	Check whether the value is an iterable (excludes strings).
+
+	*value* is the value to check,
+
+	Returns whether *value* is an iterable (:class:`bool`).
+	"""
+	return isinstance(value, Iterable) and not isinstance(value, (str, bytes))
+
+
+@deprecated((
+	"pathspec.util.iter_tree() is deprecated. Use iter_tree_files() instead."
+))
+def iter_tree(root, on_error=None, follow_links=None):
+	"""
+	.. version-deprecated:: 0.10.0
+		This is an alias for the :func:`.iter_tree_files` function.
+	"""
+	return iter_tree_files(root, on_error=on_error, follow_links=follow_links)
+
+
+def iter_tree_entries(
+	root: StrPath,
+	on_error: Optional[Callable[[OSError], None]] = None,
+	follow_links: Optional[bool] = None,
+) -> Iterator['TreeEntry']:
+	"""
+	Walks the specified directory for all files and directories.
+
+	*root* (:class:`str` or :class:`os.PathLike`) is the root directory to search.
+
+	*on_error* (:class:`~collections.abc.Callable` or :data:`None`) optionally is
+	the error handler for file-system exceptions. It will be called with the
+	exception (:exc:`OSError`). Reraise the exception to abort the walk. Default
+	is :data:`None` to ignore file-system exceptions.
+
+	*follow_links* (:class:`bool` or :data:`None`) optionally is whether to walk
+	symbolic links that resolve to directories. Default is :data:`None` for
+	:data:`True`.
+
+	Raises :exc:`.RecursionError` if recursion is detected.
+
+	Returns an :class:`~collections.abc.Iterator` yielding each file or directory
+	entry (:class:`.TreeEntry`) relative to *root*.
+	"""
+	if on_error is not None and not callable(on_error):
+		raise TypeError(f"on_error:{on_error!r} is not callable.")
+
+	if follow_links is None:
+		follow_links = True
+
+	yield from _iter_tree_entries_next(os.path.abspath(root), '', {}, on_error, follow_links)
+
+
+def _iter_tree_entries_next(
+	root_full: str,
+	dir_rel: str,
+	memo: dict[str, str],
+	on_error: Callable[[OSError], None],
+	follow_links: bool,
+) -> Iterator['TreeEntry']:
+	"""
+	Scan the directory for all descendant files.
+
+	*root_full* (:class:`str`) the absolute path to the root directory.
+
+	*dir_rel* (:class:`str`) the path to the directory to scan relative to
+	*root_full*.
+
+	*memo* (:class:`dict`) keeps track of ancestor directories encountered. Maps
+	each ancestor real path (:class:`str`) to relative path (:class:`str`).
+
+	*on_error* (:class:`~collections.abc.Callable` or :data:`None`) optionally is
+	the error handler for file-system exceptions.
+
+	*follow_links* (:class:`bool`) is whether to walk symbolic links that resolve
+	to directories.
+
+	Yields each entry (:class:`.TreeEntry`).
+	"""
+	dir_full = os.path.join(root_full, dir_rel)
+	dir_real = os.path.realpath(dir_full)
+
+	# Remember each encountered ancestor directory and its canonical (real) path.
+	# If a canonical path is encountered more than once, recursion has occurred.
+	if dir_real not in memo:
+		memo[dir_real] = dir_rel
+	else:
+		raise RecursionError(real_path=dir_real, first_path=memo[dir_real], second_path=dir_rel)
+
+	with os.scandir(dir_full) as scan_iter:
+		node_ent: os.DirEntry
+		for node_ent in scan_iter:
+			node_rel = os.path.join(dir_rel, node_ent.name)
+
+			# Inspect child node.
+			try:
+				node_lstat = node_ent.stat(follow_symlinks=False)
+			except OSError as e:
+				if on_error is not None:
+					on_error(e)
+				continue
+
+			if node_ent.is_symlink():
+				# Child node is a link, inspect the target node.
+				try:
+					node_stat = node_ent.stat()
+				except OSError as e:
+					if on_error is not None:
+						on_error(e)
+					continue
+			else:
+				node_stat = node_lstat
+
+			if node_ent.is_dir(follow_symlinks=follow_links):
+				# Child node is a directory, recurse into it and yield its descendant
+				# files.
+				yield TreeEntry(node_ent.name, node_rel, node_lstat, node_stat)
+
+				yield from _iter_tree_entries_next(root_full, node_rel, memo, on_error, follow_links)
+
+			elif node_ent.is_file() or node_ent.is_symlink():
+				# Child node is either a file or an unfollowed link, yield it.
+				yield TreeEntry(node_ent.name, node_rel, node_lstat, node_stat)
+
+	# NOTE: Make sure to remove the canonical (real) path of the directory from
+	# the ancestors memo once we are done with it. This allows the same directory
+	# to appear multiple times. If this is not done, the second occurrence of the
+	# directory will be incorrectly interpreted as a recursion. See
+	# .
+	del memo[dir_real]
+
+
+def iter_tree_files(
+	root: StrPath,
+	on_error: Optional[Callable[[OSError], None]] = None,
+	follow_links: Optional[bool] = None,
+) -> Iterator[str]:
+	"""
+	Walks the specified directory for all files.
+
+	*root* (:class:`str` or :class:`os.PathLike`) is the root directory to search
+	for files.
+
+	*on_error* (:class:`~collections.abc.Callable` or :data:`None`) optionally is
+	the error handler for file-system exceptions. It will be called with the
+	exception (:exc:`OSError`). Reraise the exception to abort the walk. Default
+	is :data:`None` to ignore file-system exceptions.
+
+	*follow_links* (:class:`bool` or :data:`None`) optionally is whether to walk
+	symbolic links that resolve to directories. Default is :data:`None` for
+	:data:`True`.
+
+	Raises :exc:`.RecursionError` if recursion is detected.
+
+	Returns an :class:`~collections.abc.Iterator` yielding the path to each file
+	(:class:`str`) relative to *root*.
+	"""
+	if on_error is not None and not callable(on_error):
+		raise TypeError(f"on_error:{on_error!r} is not callable.")
+
+	if follow_links is None:
+		follow_links = True
+
+	yield from _iter_tree_files_next(os.path.abspath(root), '', {}, on_error, follow_links)
+
+
+def _iter_tree_files_next(
+	root_full: str,
+	dir_rel: str,
+	memo: dict[str, str],
+	on_error: Callable[[OSError], None],
+	follow_links: bool,
+) -> Iterator[str]:
+	"""
+	Scan the directory for all descendant files.
+
+	*root_full* (:class:`str`) the absolute path to the root directory.
+
+	*dir_rel* (:class:`str`) the path to the directory to scan relative to
+	*root_full*.
+
+	*memo* (:class:`dict`) keeps track of ancestor directories encountered. Maps
+	each ancestor real path (:class:`str`) to relative path (:class:`str`).
+
+	*on_error* (:class:`~collections.abc.Callable` or :data:`None`) optionally is
+	the error handler for file-system exceptions.
+
+	*follow_links* (:class:`bool`) is whether to walk symbolic links that resolve
+	to directories.
+
+	Yields each file path (:class:`str`).
+	"""
+	dir_full = os.path.join(root_full, dir_rel)
+	dir_real = os.path.realpath(dir_full)
+
+	# Remember each encountered ancestor directory and its canonical (real) path.
+	# If a canonical path is encountered more than once, recursion has occurred.
+	if dir_real not in memo:
+		memo[dir_real] = dir_rel
+	else:
+		raise RecursionError(real_path=dir_real, first_path=memo[dir_real], second_path=dir_rel)
+
+	with os.scandir(dir_full) as scan_iter:
+		node_ent: os.DirEntry
+		for node_ent in scan_iter:
+			node_rel = os.path.join(dir_rel, node_ent.name)
+
+			if node_ent.is_dir(follow_symlinks=follow_links):
+				# Child node is a directory, recurse into it and yield its descendant
+				# files.
+				yield from _iter_tree_files_next(root_full, node_rel, memo, on_error, follow_links)
+
+			elif node_ent.is_file():
+				# Child node is a file, yield it.
+				yield node_rel
+
+			elif not follow_links and node_ent.is_symlink():
+				# Child node is an unfollowed link, yield it.
+				yield node_rel
+
+	# NOTE: Make sure to remove the canonical (real) path of the directory from
+	# the ancestors memo once we are done with it. This allows the same directory
+	# to appear multiple times. If this is not done, the second occurrence of the
+	# directory will be incorrectly interpreted as a recursion. See
+	# .
+	del memo[dir_real]
+
+
+def lookup_pattern(name: str) -> Callable[[AnyStr], Pattern]:
+	"""
+	Lookups a registered pattern factory by name.
+
+	*name* (:class:`str`) is the name of the pattern factory.
+
+	Returns the registered pattern factory (:class:`~collections.abc.Callable`).
+	If no pattern factory is registered, raises :exc:`KeyError`.
+	"""
+	return _registered_patterns[name]
+
+
+def match_file(patterns: Iterable[Pattern], file: str) -> bool:
+	"""
+	Matches the file to the patterns.
+
+	*patterns* (:class:`~collections.abc.Iterable` of :class:`.Pattern`) contains
+	the patterns to use.
+
+	*file* (:class:`str`) is the normalized file path to be matched against
+	*patterns*.
+
+	Returns :data:`True` if *file* matched; otherwise, :data:`False`.
+	"""
+	matched = False
+	for pattern in patterns:
+		if pattern.include is not None and pattern.match_file(file) is not None:
+			matched = pattern.include
+
+	return matched
+
+
+@deprecated((
+	"pathspec.util.match_files() is deprecated. Use match_file() with a loop for "
+	"better results."
+))
+def match_files(
+	patterns: Iterable[Pattern],
+	files: Iterable[str],
+) -> set[str]:
+	"""
+	.. version-deprecated:: 0.10.0
+		This function is no longer used. Use the :func:`.match_file` function with a
+		loop for better results.
+
+	Matches the files to the patterns.
+
+	*patterns* (:class:`~collections.abc.Iterable` of :class:`.Pattern`) contains
+	the patterns to use.
+
+	*files* (:class:`~collections.abc.Iterable` of :class:`str`) contains the
+	normalized file paths to be matched against *patterns*.
+
+	Returns the matched files (:class:`set` of :class:`str`).
+	"""
+	use_patterns = [__pat for __pat in patterns if __pat.include is not None]
+
+	return_files = set()
+	for file in files:
+		if match_file(use_patterns, file):
+			return_files.add(file)
+
+	return return_files
+
+
+def normalize_file(
+	file: StrPath,
+	separators: Optional[Collection[str]] = None,
+) -> str:
+	"""
+	Normalizes the file path to use the POSIX path separator (i.e., ``"/"``), and
+	make the paths relative (remove leading ``"/"``).
+
+	*file* (:class:`str` or :class:`os.PathLike`) is the file path.
+
+	*separators* (:class:`~collections.abc.Collection` of :class:`str`; or
+	:data:`None`) optionally contains the path separators to normalize. This does
+	not need to include the POSIX path separator (``"/"``), but including it will
+	not affect the results. Default is ``None`` for :data:`.NORMALIZE_PATH_SEPS`.
+	To prevent normalization, pass an empty container (e.g., an empty tuple
+	``()``).
+
+	Returns the normalized file path (:class:`str`).
+	"""
+	# Normalize path separators.
+	if separators is None:
+		separators = NORMALIZE_PATH_SEPS
+
+	# Convert path object to string.
+	norm_file: str = os.fspath(file)
+
+	for sep in separators:
+		norm_file = norm_file.replace(sep, posixpath.sep)
+
+	if norm_file.startswith('/'):
+		# Make path relative.
+		norm_file = norm_file[1:]
+
+	elif norm_file.startswith('./'):
+		# Remove current directory prefix.
+		norm_file = norm_file[2:]
+
+	return norm_file
+
+
+@deprecated((
+	"pathspec.util.normalize_files() is deprecated. Use normalize_file() with a "
+	"loop for better results."
+))
+def normalize_files(
+	files: Iterable[StrPath],
+	separators: Optional[Collection[str]] = None,
+) -> dict[str, list[StrPath]]:
+	"""
+	.. version-deprecated:: 0.10.0
+		This function is no longer used. Use the :func:`.normalize_file` function
+		with a loop for better results.
+
+	Normalizes the file paths to use the POSIX path separator.
+
+	*files* (:class:`~collections.abc.Iterable` of :class:`str` or
+	:class:`os.PathLike`) contains the file paths to be normalized.
+
+	*separators* (:class:`~collections.abc.Collection` of :class:`str`; or
+	:data:`None`) optionally contains the path separators to normalize. See
+	:func:`.normalize_file` for more information.
+
+	Returns a :class:`dict` mapping each normalized file path (:class:`str`) to
+	the original file paths (:class:`list` of :class:`str` or
+	:class:`os.PathLike`).
+	"""
+	norm_files = {}
+	for path in files:
+		norm_file = normalize_file(path, separators=separators)
+		if norm_file in norm_files:
+			norm_files[norm_file].append(path)
+		else:
+			norm_files[norm_file] = [path]
+
+	return norm_files
+
+
+def register_pattern(
+	name: str,
+	pattern_factory: Callable[[AnyStr], Pattern],
+	override: Optional[bool] = None,
+) -> None:
+	"""
+	Registers the specified pattern factory.
+
+	*name* (:class:`str`) is the name to register the pattern factory under.
+
+	*pattern_factory* (:class:`~collections.abc.Callable`) is used to compile
+	patterns. It must accept an uncompiled pattern (:class:`str`) and return the
+	compiled pattern (:class:`.Pattern`).
+
+	*override* (:class:`bool` or :data:`None`) optionally is whether to allow
+	overriding an already registered pattern under the same name (:data:`True`),
+	instead of raising an :exc:`.AlreadyRegisteredError` (:data:`False`). Default
+	is :data:`None` for :data:`False`.
+	"""
+	if not isinstance(name, str):
+		raise TypeError(f"name:{name!r} is not a string.")
+
+	if not callable(pattern_factory):
+		raise TypeError(f"pattern_factory:{pattern_factory!r} is not callable.")
+
+	if name in _registered_patterns and not override:
+		raise AlreadyRegisteredError(name, _registered_patterns[name])
+
+	_registered_patterns[name] = pattern_factory
+
+
+class AlreadyRegisteredError(Exception):
+	"""
+	The :exc:`AlreadyRegisteredError` exception is raised when a pattern factory
+	is registered under a name already in use.
+	"""
+
+	def __init__(
+		self,
+		name: str,
+		pattern_factory: Callable[[AnyStr], Pattern],
+	) -> None:
+		"""
+		Initializes the :exc:`AlreadyRegisteredError` instance.
+
+		*name* (:class:`str`) is the name of the registered pattern.
+
+		*pattern_factory* (:class:`~collections.abc.Callable`) is the registered
+		pattern factory.
+		"""
+		super().__init__(name, pattern_factory)
+
+	@property
+	def message(self) -> str:
+		"""
+		*message* (:class:`str`) is the error message.
+		"""
+		return (
+			f"{self.name!r} is already registered for pattern factory="
+			f"{self.pattern_factory!r}."
+		)
+
+	@property
+	def name(self) -> str:
+		"""
+		*name* (:class:`str`) is the name of the registered pattern.
+		"""
+		return self.args[0]
+
+	@property
+	def pattern_factory(self) -> Callable[[AnyStr], Pattern]:
+		"""
+		*pattern_factory* (:class:`~collections.abc.Callable`) is the registered
+		pattern factory.
+		"""
+		return self.args[1]
+
+
+class RecursionError(Exception):
+	"""
+	The :exc:`RecursionError` exception is raised when recursion is detected.
+	"""
+
+	def __init__(
+		self,
+		real_path: str,
+		first_path: str,
+		second_path: str,
+	) -> None:
+		"""
+		Initializes the :exc:`RecursionError` instance.
+
+		*real_path* (:class:`str`) is the real path that recursion was encountered
+		on.
+
+		*first_path* (:class:`str`) is the first path encountered for *real_path*.
+
+		*second_path* (:class:`str`) is the second path encountered for *real_path*.
+		"""
+		super().__init__(real_path, first_path, second_path)
+
+	@property
+	def first_path(self) -> str:
+		"""
+		*first_path* (:class:`str`) is the first path encountered for
+		:attr:`self.real_path `.
+		"""
+		return self.args[1]
+
+	@property
+	def message(self) -> str:
+		"""
+		*message* (:class:`str`) is the error message.
+		"""
+		return (
+			f"Real path {self.real_path!r} was encountered at {self.first_path!r} "
+			f"and then {self.second_path!r}."
+		)
+
+	@property
+	def real_path(self) -> str:
+		"""
+		*real_path* (:class:`str`) is the real path that recursion was
+		encountered on.
+		"""
+		return self.args[0]
+
+	@property
+	def second_path(self) -> str:
+		"""
+		*second_path* (:class:`str`) is the second path encountered for
+		:attr:`self.real_path `.
+		"""
+		return self.args[2]
+
+
+@dataclass(frozen=True)
+class CheckResult(Generic[TStrPath]):
+	"""
+	The :class:`CheckResult` class contains information about the file and which
+	pattern matched it.
+	"""
+
+	# Make the class dict-less.
+	__slots__ = (
+		'file',
+		'include',
+		'index',
+	)
+
+	file: TStrPath
+	"""
+	*file* (:class:`str` or :class:`os.PathLike`) is the file path.
+	"""
+
+	include: Optional[bool]
+	"""
+	*include* (:class:`bool` or :data:`None`) is whether to include or exclude the
+	file. If :data:`None`, no pattern matched.
+	"""
+
+	index: Optional[int]
+	"""
+	*index* (:class:`int` or :data:`None`) is the index of the last pattern that
+	matched. If :data:`None`, no pattern matched.
+	"""
+
+
+class MatchDetail(object):
+	"""
+	The :class:`.MatchDetail` class contains information about
+	"""
+
+	# Make the class dict-less.
+	__slots__ = ('patterns',)
+
+	def __init__(self, patterns: Sequence[Pattern]) -> None:
+		"""
+		Initialize the :class:`.MatchDetail` instance.
+
+		*patterns* (:class:`~collections.abc.Sequence` of :class:`.Pattern`)
+		contains the patterns that matched the file in the order they were encountered.
+		"""
+
+		self.patterns = patterns
+		"""
+		*patterns* (:class:`~collections.abc.Sequence` of :class:`.Pattern`)
+		contains the patterns that matched the file in the order they were
+		encountered.
+		"""
+
+
+class TreeEntry(object):
+	"""
+	The :class:`TreeEntry` class contains information about a file-system entry.
+	"""
+
+	# Make the class dict-less.
+	__slots__ = ('_lstat', 'name', 'path', '_stat')
+
+	def __init__(
+		self,
+		name: str,
+		path: str,
+		lstat: os.stat_result,
+		stat: os.stat_result,
+	) -> None:
+		"""
+		Initialize the :class:`TreeEntry` instance.
+
+		*name* (:class:`str`) is the base name of the entry.
+
+		*path* (:class:`str`) is the relative path of the entry.
+
+		*lstat* (:class:`os.stat_result`) is the stat result of the direct entry.
+
+		*stat* (:class:`os.stat_result`) is the stat result of the entry,
+		potentially linked.
+		"""
+
+		self._lstat: os.stat_result = lstat
+		"""
+		*_lstat* (:class:`os.stat_result`) is the stat result of the direct entry.
+		"""
+
+		self.name: str = name
+		"""
+		*name* (:class:`str`) is the base name of the entry.
+		"""
+
+		self.path: str = path
+		"""
+		*path* (:class:`str`) is the path of the entry.
+		"""
+
+		self._stat: os.stat_result = stat
+		"""
+		*_stat* (:class:`os.stat_result`) is the stat result of the linked entry.
+		"""
+
+	def is_dir(self, follow_links: Optional[bool] = None) -> bool:
+		"""
+		Get whether the entry is a directory.
+
+		*follow_links* (:class:`bool` or :data:`None`) is whether to follow symbolic
+		links. If this is :data:`True`, a symlink to a directory will result in
+		:data:`True`. Default is :data:`None` for :data:`True`.
+
+		Returns whether the entry is a directory (:class:`bool`).
+		"""
+		if follow_links is None:
+			follow_links = True
+
+		node_stat = self._stat if follow_links else self._lstat
+		return stat.S_ISDIR(node_stat.st_mode)
+
+	def is_file(self, follow_links: Optional[bool] = None) -> bool:
+		"""
+		Get whether the entry is a regular file.
+
+		*follow_links* (:class:`bool` or :data:`None`) is whether to follow symbolic
+		links. If this is :data:`True`, a symlink to a regular file will result in
+		:data:`True`. Default is :data:`None` for :data:`True`.
+
+		Returns whether the entry is a regular file (:class:`bool`).
+		"""
+		if follow_links is None:
+			follow_links = True
+
+		node_stat = self._stat if follow_links else self._lstat
+		return stat.S_ISREG(node_stat.st_mode)
+
+	def is_symlink(self) -> bool:
+		"""
+		Returns whether the entry is a symbolic link (:class:`bool`).
+		"""
+		return stat.S_ISLNK(self._lstat.st_mode)
+
+	def stat(self, follow_links: Optional[bool] = None) -> os.stat_result:
+		"""
+		Get the cached stat result for the entry.
+
+		*follow_links* (:class:`bool` or :data:`None`) is whether to follow symbolic
+		links. If this is :data:`True`, the stat result of the linked file will be
+		returned. Default is :data:`None` for :data:`True`.
+
+		Returns that stat result (:class:`os.stat_result`).
+		"""
+		if follow_links is None:
+			follow_links = True
+
+		return self._stat if follow_links else self._lstat
diff --git a/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/AUTHORS.txt b/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/AUTHORS.txt
new file mode 100644
index 0000000..0e63548
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/AUTHORS.txt
@@ -0,0 +1,760 @@
+@Switch01
+A_Rog
+Aakanksha Agrawal
+Abhinav Sagar
+ABHYUDAY PRATAP SINGH
+abs51295
+AceGentile
+Adam Chainz
+Adam Tse
+Adam Wentz
+admin
+Adrien Morison
+ahayrapetyan
+Ahilya
+AinsworthK
+Akash Srivastava
+Alan Yee
+Albert Tugushev
+Albert-Guan
+albertg
+Alberto Sottile
+Aleks Bunin
+Ales Erjavec
+Alethea Flowers
+Alex Gaynor
+Alex Grönholm
+Alex Hedges
+Alex Loosley
+Alex Morega
+Alex Stachowiak
+Alexander Shtyrov
+Alexandre Conrad
+Alexey Popravka
+Aleš Erjavec
+Alli
+Ami Fischman
+Ananya Maiti
+Anatoly Techtonik
+Anders Kaseorg
+Andre Aguiar
+Andreas Lutro
+Andrei Geacar
+Andrew Gaul
+Andrew Shymanel
+Andrey Bienkowski
+Andrey Bulgakov
+Andrés Delfino
+Andy Freeland
+Andy Kluger
+Ani Hayrapetyan
+Aniruddha Basak
+Anish Tambe
+Anrs Hu
+Anthony Sottile
+Antoine Musso
+Anton Ovchinnikov
+Anton Patrushev
+Antonio Alvarado Hernandez
+Antony Lee
+Antti Kaihola
+Anubhav Patel
+Anudit Nagar
+Anuj Godase
+AQNOUCH Mohammed
+AraHaan
+Arindam Choudhury
+Armin Ronacher
+Artem
+Arun Babu Neelicattu
+Ashley Manton
+Ashwin Ramaswami
+atse
+Atsushi Odagiri
+Avinash Karhana
+Avner Cohen
+Awit (Ah-Wit) Ghirmai
+Baptiste Mispelon
+Barney Gale
+barneygale
+Bartek Ogryczak
+Bastian Venthur
+Ben Bodenmiller
+Ben Darnell
+Ben Hoyt
+Ben Mares
+Ben Rosser
+Bence Nagy
+Benjamin Peterson
+Benjamin VanEvery
+Benoit Pierre
+Berker Peksag
+Bernard
+Bernard Tyers
+Bernardo B. Marques
+Bernhard M. Wiedemann
+Bertil Hatt
+Bhavam Vidyarthi
+Blazej Michalik
+Bogdan Opanchuk
+BorisZZZ
+Brad Erickson
+Bradley Ayers
+Brandon L. Reiss
+Brandt Bucher
+Brett Randall
+Brett Rosen
+Brian Cristante
+Brian Rosner
+briantracy
+BrownTruck
+Bruno Oliveira
+Bruno Renié
+Bruno S
+Bstrdsmkr
+Buck Golemon
+burrows
+Bussonnier Matthias
+bwoodsend
+c22
+Caleb Martinez
+Calvin Smith
+Carl Meyer
+Carlos Liam
+Carol Willing
+Carter Thayer
+Cass
+Chandrasekhar Atina
+Chih-Hsuan Yen
+Chris Brinker
+Chris Hunt
+Chris Jerdonek
+Chris Kuehl
+Chris McDonough
+Chris Pawley
+Chris Pryer
+Chris Wolfe
+Christian Clauss
+Christian Heimes
+Christian Oudard
+Christoph Reiter
+Christopher Hunt
+Christopher Snyder
+cjc7373
+Clark Boylan
+Claudio Jolowicz
+Clay McClure
+Cody
+Cody Soyland
+Colin Watson
+Collin Anderson
+Connor Osborn
+Cooper Lees
+Cooper Ry Lees
+Cory Benfield
+Cory Wright
+Craig Kerstiens
+Cristian Sorinel
+Cristina
+Cristina Muñoz
+Curtis Doty
+cytolentino
+Daan De Meyer
+Dale
+Damian
+Damian Quiroga
+Damian Shaw
+Dan Black
+Dan Savilonis
+Dan Sully
+Dane Hillard
+daniel
+Daniel Collins
+Daniel Hahler
+Daniel Holth
+Daniel Jost
+Daniel Katz
+Daniel Shaulov
+Daniele Esposti
+Daniele Nicolodi
+Daniele Procida
+Daniil Konovalenko
+Danny Hermes
+Danny McClanahan
+Darren Kavanagh
+Dav Clark
+Dave Abrahams
+Dave Jones
+David Aguilar
+David Black
+David Bordeynik
+David Caro
+David D Lowe
+David Evans
+David Hewitt
+David Linke
+David Poggi
+David Pursehouse
+David Runge
+David Tucker
+David Wales
+Davidovich
+ddelange
+Deepak Sharma
+Deepyaman Datta
+Denise Yu
+dependabot[bot]
+derwolfe
+Desetude
+Devesh Kumar Singh
+Diego Caraballo
+Diego Ramirez
+DiegoCaraballo
+Dimitri Merejkowsky
+Dimitri Papadopoulos
+Dirk Stolle
+Dmitry Gladkov
+Dmitry Volodin
+Domen Kožar
+Dominic Davis-Foster
+Donald Stufft
+Dongweiming
+doron zarhi
+Dos Moonen
+Douglas Thor
+DrFeathers
+Dustin Ingram
+Dwayne Bailey
+Ed Morley
+Edgar Ramírez
+Edgar Ramírez Mondragón
+Ee Durbin
+Efflam Lemaillet
+efflamlemaillet
+Eitan Adler
+ekristina
+elainechan
+Eli Schwartz
+Elisha Hollander
+Ellen Marie Dash
+Emil Burzo
+Emil Styrke
+Emmanuel Arias
+Endoh Takanao
+enoch
+Erdinc Mutlu
+Eric Cousineau
+Eric Gillingham
+Eric Hanchrow
+Eric Hopper
+Erik M. Bray
+Erik Rose
+Erwin Janssen
+Eugene Vereshchagin
+everdimension
+Federico
+Felipe Peter
+Felix Yan
+fiber-space
+Filip Kokosiński
+Filipe Laíns
+Finn Womack
+finnagin
+Flavio Amurrio
+Florian Briand
+Florian Rathgeber
+Francesco
+Francesco Montesano
+Frost Ming
+Gabriel Curio
+Gabriel de Perthuis
+Garry Polley
+gavin
+gdanielson
+Geoffrey Sneddon
+George Song
+Georgi Valkov
+Georgy Pchelkin
+ghost
+Giftlin Rajaiah
+gizmoguy1
+gkdoc
+Godefroid Chapelle
+Gopinath M
+GOTO Hayato
+gousaiyang
+gpiks
+Greg Roodt
+Greg Ward
+Guilherme Espada
+Guillaume Seguin
+gutsytechster
+Guy Rozendorn
+Guy Tuval
+gzpan123
+Hanjun Kim
+Hari Charan
+Harsh Vardhan
+harupy
+Harutaka Kawamura
+hauntsaninja
+Henrich Hartzer
+Henry Schreiner
+Herbert Pfennig
+Holly Stotelmyer
+Honnix
+Hsiaoming Yang
+Hugo Lopes Tavares
+Hugo van Kemenade
+Hugues Bruant
+Hynek Schlawack
+Ian Bicking
+Ian Cordasco
+Ian Lee
+Ian Stapleton Cordasco
+Ian Wienand
+Igor Kuzmitshov
+Igor Sobreira
+Ilan Schnell
+Illia Volochii
+Ilya Baryshev
+Inada Naoki
+Ionel Cristian Mărieș
+Ionel Maries Cristian
+Itamar Turner-Trauring
+Ivan Pozdeev
+J. Nick Koston
+Jacob Kim
+Jacob Walls
+Jaime Sanz
+jakirkham
+Jakub Kuczys
+Jakub Stasiak
+Jakub Vysoky
+Jakub Wilk
+James Cleveland
+James Curtin
+James Firth
+James Gerity
+James Polley
+Jan Pokorný
+Jannis Leidel
+Jarek Potiuk
+jarondl
+Jason Curtis
+Jason R. Coombs
+JasonMo
+JasonMo1
+Jay Graves
+Jean Abou Samra
+Jean-Christophe Fillion-Robin
+Jeff Barber
+Jeff Dairiki
+Jeff Widman
+Jelmer Vernooij
+jenix21
+Jeremy Stanley
+Jeremy Zafran
+Jesse Rittner
+Jiashuo Li
+Jim Fisher
+Jim Garrison
+Jiun Bae
+Jivan Amara
+Joe Bylund
+Joe Michelini
+John Paton
+John T. Wodder II
+John-Scott Atlakson
+johnthagen
+Jon Banafato
+Jon Dufresne
+Jon Parise
+Jonas Nockert
+Jonathan Herbert
+Joonatan Partanen
+Joost Molenaar
+Jorge Niedbalski
+Joseph Bylund
+Joseph Long
+Josh Bronson
+Josh Hansen
+Josh Schneier
+Joshua
+Juan Luis Cano Rodríguez
+Juanjo Bazán
+Judah Rand
+Julian Berman
+Julian Gethmann
+Julien Demoor
+Jussi Kukkonen
+jwg4
+Jyrki Pulliainen
+Kai Chen
+Kai Mueller
+Kamal Bin Mustafa
+kasium
+kaustav haldar
+keanemind
+Keith Maxwell
+Kelsey Hightower
+Kenneth Belitzky
+Kenneth Reitz
+Kevin Burke
+Kevin Carter
+Kevin Frommelt
+Kevin R Patterson
+Kexuan Sun
+Kit Randel
+Klaas van Schelven
+KOLANICH
+kpinc
+Krishna Oza
+Kumar McMillan
+Kurt McKee
+Kyle Persohn
+lakshmanaram
+Laszlo Kiss-Kollar
+Laurent Bristiel
+Laurent LAPORTE
+Laurie O
+Laurie Opperman
+layday
+Leon Sasson
+Lev Givon
+Lincoln de Sousa
+Lipis
+lorddavidiii
+Loren Carvalho
+Lucas Cimon
+Ludovic Gasc
+Lukas Geiger
+Lukas Juhrich
+Luke Macken
+Luo Jiebin
+luojiebin
+luz.paz
+László Kiss Kollár
+M00nL1ght
+Marc Abramowitz
+Marc Tamlyn
+Marcus Smith
+Mariatta
+Mark Kohler
+Mark Williams
+Markus Hametner
+Martey Dodoo
+Martin Fischer
+Martin Häcker
+Martin Pavlasek
+Masaki
+Masklinn
+Matej Stuchlik
+Mathew Jennings
+Mathieu Bridon
+Mathieu Kniewallner
+Matt Bacchi
+Matt Good
+Matt Maker
+Matt Robenolt
+matthew
+Matthew Einhorn
+Matthew Feickert
+Matthew Gilliard
+Matthew Iversen
+Matthew Treinish
+Matthew Trumbell
+Matthew Willson
+Matthias Bussonnier
+mattip
+Maurits van Rees
+Max W Chase
+Maxim Kurnikov
+Maxime Rouyrre
+mayeut
+mbaluna
+mdebi
+memoselyk
+meowmeowcat
+Michael
+Michael Aquilina
+Michael E. Karpeles
+Michael Klich
+Michael Mintz
+Michael Williamson
+michaelpacer
+Michał Górny
+Mickaël Schoentgen
+Miguel Araujo Perez
+Mihir Singh
+Mike
+Mike Hendricks
+Min RK
+MinRK
+Miro Hrončok
+Monica Baluna
+montefra
+Monty Taylor
+Muha Ajjan‮
+Nadav Wexler
+Nahuel Ambrosini
+Nate Coraor
+Nate Prewitt
+Nathan Houghton
+Nathaniel J. Smith
+Nehal J Wani
+Neil Botelho
+Nguyễn Gia Phong
+Nicholas Serra
+Nick Coghlan
+Nick Stenning
+Nick Timkovich
+Nicolas Bock
+Nicole Harris
+Nikhil Benesch
+Nikhil Ladha
+Nikita Chepanov
+Nikolay Korolev
+Nipunn Koorapati
+Nitesh Sharma
+Niyas Sait
+Noah
+Noah Gorny
+Nowell Strite
+NtaleGrey
+nvdv
+OBITORASU
+Ofek Lev
+ofrinevo
+Oliver Freund
+Oliver Jeeves
+Oliver Mannion
+Oliver Tonnhofer
+Olivier Girardot
+Olivier Grisel
+Ollie Rutherfurd
+OMOTO Kenji
+Omry Yadan
+onlinejudge95
+Oren Held
+Oscar Benjamin
+Oz N Tiram
+Pachwenko
+Patrick Dubroy
+Patrick Jenkins
+Patrick Lawson
+patricktokeeffe
+Patrik Kopkan
+Paul Ganssle
+Paul Kehrer
+Paul Moore
+Paul Nasrat
+Paul Oswald
+Paul van der Linden
+Paulus Schoutsen
+Pavel Safronov
+Pavithra Eswaramoorthy
+Pawel Jasinski
+Paweł Szramowski
+Pekka Klärck
+Peter Gessler
+Peter Lisák
+Peter Waller
+petr-tik
+Phaneendra Chiruvella
+Phil Elson
+Phil Freo
+Phil Pennock
+Phil Whelan
+Philip Jägenstedt
+Philip Molloy
+Philippe Ombredanne
+Pi Delport
+Pierre-Yves Rofes
+Pieter Degroote
+pip
+Prabakaran Kumaresshan
+Prabhjyotsing Surjit Singh Sodhi
+Prabhu Marappan
+Pradyun Gedam
+Prashant Sharma
+Pratik Mallya
+pre-commit-ci[bot]
+Preet Thakkar
+Preston Holmes
+Przemek Wrzos
+Pulkit Goyal
+q0w
+Qiangning Hong
+Qiming Xu
+Quentin Lee
+Quentin Pradet
+R. David Murray
+Rafael Caricio
+Ralf Schmitt
+Razzi Abuissa
+rdb
+Reece Dunham
+Remi Rampin
+Rene Dudfield
+Riccardo Magliocchetti
+Riccardo Schirone
+Richard Jones
+Richard Si
+Ricky Ng-Adam
+Rishi
+RobberPhex
+Robert Collins
+Robert McGibbon
+Robert Pollak
+Robert T. McGibbon
+robin elisha robinson
+Roey Berman
+Rohan Jain
+Roman Bogorodskiy
+Roman Donchenko
+Romuald Brunet
+ronaudinho
+Ronny Pfannschmidt
+Rory McCann
+Ross Brattain
+Roy Wellington Ⅳ
+Ruairidh MacLeod
+Russell Keith-Magee
+Ryan Shepherd
+Ryan Wooden
+ryneeverett
+Sachi King
+Salvatore Rinchiera
+sandeepkiran-js
+Sander Van Balen
+Savio Jomton
+schlamar
+Scott Kitterman
+Sean
+seanj
+Sebastian Jordan
+Sebastian Schaetz
+Segev Finer
+SeongSoo Cho
+Sergey Vasilyev
+Seth Michael Larson
+Seth Woodworth
+Shahar Epstein
+Shantanu
+shireenrao
+Shivansh-007
+Shlomi Fish
+Shovan Maity
+Simeon Visser
+Simon Cross
+Simon Pichugin
+sinoroc
+sinscary
+snook92
+socketubs
+Sorin Sbarnea
+Srinivas Nyayapati
+Stavros Korokithakis
+Stefan Scherfke
+Stefano Rivera
+Stephan Erb
+Stephen Rosen
+stepshal
+Steve (Gadget) Barnes
+Steve Barnes
+Steve Dower
+Steve Kowalik
+Steven Myint
+Steven Silvester
+stonebig
+studioj
+Stéphane Bidoul
+Stéphane Bidoul (ACSONE)
+Stéphane Klein
+Sumana Harihareswara
+Surbhi Sharma
+Sviatoslav Sydorenko
+Swat009
+Sylvain
+Takayuki SHIMIZUKAWA
+Taneli Hukkinen
+tbeswick
+Thiago
+Thijs Triemstra
+Thomas Fenzl
+Thomas Grainger
+Thomas Guettler
+Thomas Johansson
+Thomas Kluyver
+Thomas Smith
+Thomas VINCENT
+Tim D. Smith
+Tim Gates
+Tim Harder
+Tim Heap
+tim smith
+tinruufu
+Tobias Hermann
+Tom Forbes
+Tom Freudenheim
+Tom V
+Tomas Hrnciar
+Tomas Orsava
+Tomer Chachamu
+Tommi Enenkel | AnB
+Tomáš Hrnčiar
+Tony Beswick
+Tony Narlock
+Tony Zhaocheng Tan
+TonyBeswick
+toonarmycaptain
+Toshio Kuratomi
+toxinu
+Travis Swicegood
+Tushar Sadhwani
+Tzu-ping Chung
+Valentin Haenel
+Victor Stinner
+victorvpaulo
+Vikram - Google
+Viktor Szépe
+Ville Skyttä
+Vinay Sajip
+Vincent Philippon
+Vinicyus Macedo
+Vipul Kumar
+Vitaly Babiy
+Vladimir Fokow
+Vladimir Rutsky
+W. Trevor King
+Wil Tan
+Wilfred Hughes
+William Edwards
+William ML Leslie
+William T Olson
+William Woodruff
+Wilson Mo
+wim glenn
+Winson Luk
+Wolfgang Maier
+Wu Zhenyu
+XAMES3
+Xavier Fernandez
+xoviat
+xtreak
+YAMAMOTO Takashi
+Yen Chi Hsuan
+Yeray Diaz Diaz
+Yoval P
+Yu Jian
+Yuan Jing Vincent Yan
+Yusuke Hayashi
+Zearin
+Zhiping Deng
+ziebam
+Zvezdan Petkovic
+Łukasz Langa
+Роман Донченко
+Семён Марьясин
+‮rekcäH nitraM‮
diff --git a/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/LICENSE.txt b/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/LICENSE.txt
new file mode 100644
index 0000000..8e7b65e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/LICENSE.txt
@@ -0,0 +1,20 @@
+Copyright (c) 2008-present The pip developers (see AUTHORS.txt file)
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/METADATA b/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/METADATA
new file mode 100644
index 0000000..e5b45bd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/METADATA
@@ -0,0 +1,88 @@
+Metadata-Version: 2.1
+Name: pip
+Version: 24.0
+Summary: The PyPA recommended tool for installing Python packages.
+Author-email: The pip developers 
+License: MIT
+Project-URL: Homepage, https://pip.pypa.io/
+Project-URL: Documentation, https://pip.pypa.io
+Project-URL: Source, https://github.com/pypa/pip
+Project-URL: Changelog, https://pip.pypa.io/en/stable/news/
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Topic :: Software Development :: Build Tools
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE.txt
+License-File: AUTHORS.txt
+
+pip - The Python Package Installer
+==================================
+
+.. image:: https://img.shields.io/pypi/v/pip.svg
+   :target: https://pypi.org/project/pip/
+   :alt: PyPI
+
+.. image:: https://img.shields.io/pypi/pyversions/pip
+   :target: https://pypi.org/project/pip
+   :alt: PyPI - Python Version
+
+.. image:: https://readthedocs.org/projects/pip/badge/?version=latest
+   :target: https://pip.pypa.io/en/latest
+   :alt: Documentation
+
+pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes.
+
+Please take a look at our documentation for how to install and use pip:
+
+* `Installation`_
+* `Usage`_
+
+We release updates regularly, with a new version every 3 months. Find more details in our documentation:
+
+* `Release notes`_
+* `Release process`_
+
+If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms:
+
+* `Issue tracking`_
+* `Discourse channel`_
+* `User IRC`_
+
+If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms:
+
+* `GitHub page`_
+* `Development documentation`_
+* `Development IRC`_
+
+Code of Conduct
+---------------
+
+Everyone interacting in the pip project's codebases, issue trackers, chat
+rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
+
+.. _package installer: https://packaging.python.org/guides/tool-recommendations/
+.. _Python Package Index: https://pypi.org
+.. _Installation: https://pip.pypa.io/en/stable/installation/
+.. _Usage: https://pip.pypa.io/en/stable/
+.. _Release notes: https://pip.pypa.io/en/stable/news.html
+.. _Release process: https://pip.pypa.io/en/latest/development/release-process/
+.. _GitHub page: https://github.com/pypa/pip
+.. _Development documentation: https://pip.pypa.io/en/latest/development
+.. _Issue tracking: https://github.com/pypa/pip/issues
+.. _Discourse channel: https://discuss.python.org/c/packaging
+.. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa
+.. _Development IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa-dev
+.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
diff --git a/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/RECORD b/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/RECORD
new file mode 100644
index 0000000..9a833e5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/RECORD
@@ -0,0 +1,1005 @@
+../../../bin/pip,sha256=UFSplFLjoN4wnIW7H6LNMSTucvUAUaBSNklNcWixBA4,253
+../../../bin/pip3,sha256=UFSplFLjoN4wnIW7H6LNMSTucvUAUaBSNklNcWixBA4,253
+../../../bin/pip3.12,sha256=UFSplFLjoN4wnIW7H6LNMSTucvUAUaBSNklNcWixBA4,253
+pip-24.0.dist-info/AUTHORS.txt,sha256=SwXm4nkwRkmtnO1ZY-dLy7EPeoQNXMNLby5CN3GlNhY,10388
+pip-24.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+pip-24.0.dist-info/LICENSE.txt,sha256=Y0MApmnUmurmWxLGxIySTFGkzfPR_whtw0VtyLyqIQQ,1093
+pip-24.0.dist-info/METADATA,sha256=kNEfJ3_Vho2mee4lfJdlbd5RHIqsfQJSMUB-bOkIOeI,3581
+pip-24.0.dist-info/RECORD,,
+pip-24.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pip-24.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
+pip-24.0.dist-info/entry_points.txt,sha256=Fa_c0b-xGFaYxagIruvpJD6qqXmNTA02vAVIkmMj-9o,125
+pip-24.0.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+pip/__init__.py,sha256=oAk1nFpLmUVS5Ln7NxvNoGUn5Vkn6FGQjPaNDf8Q8pk,355
+pip/__main__.py,sha256=WzbhHXTbSE6gBY19mNN9m4s5o_365LOvTYSgqgbdBhE,854
+pip/__pip-runner__.py,sha256=EnrfKmKMzWAdqg_JicLCOP9Y95Ux7zHh4ObvqLtQcjo,1444
+pip/__pycache__/__init__.cpython-312.pyc,,
+pip/__pycache__/__main__.cpython-312.pyc,,
+pip/__pycache__/__pip-runner__.cpython-312.pyc,,
+pip/_internal/__init__.py,sha256=iqZ5-YQsQV08tkUc7L806Reop6tguLFWf70ySF6be0Y,515
+pip/_internal/__pycache__/__init__.cpython-312.pyc,,
+pip/_internal/__pycache__/build_env.cpython-312.pyc,,
+pip/_internal/__pycache__/cache.cpython-312.pyc,,
+pip/_internal/__pycache__/configuration.cpython-312.pyc,,
+pip/_internal/__pycache__/exceptions.cpython-312.pyc,,
+pip/_internal/__pycache__/main.cpython-312.pyc,,
+pip/_internal/__pycache__/pyproject.cpython-312.pyc,,
+pip/_internal/__pycache__/self_outdated_check.cpython-312.pyc,,
+pip/_internal/__pycache__/wheel_builder.cpython-312.pyc,,
+pip/_internal/build_env.py,sha256=1ESpqw0iupS_K7phZK5zshVE5Czy9BtGLFU4W6Enva8,10243
+pip/_internal/cache.py,sha256=uiYD-9F0Bv1C8ZyWE85lpzDmQf7hcUkgL99GmI8I41Q,10370
+pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132
+pip/_internal/cli/__pycache__/__init__.cpython-312.pyc,,
+pip/_internal/cli/__pycache__/autocompletion.cpython-312.pyc,,
+pip/_internal/cli/__pycache__/base_command.cpython-312.pyc,,
+pip/_internal/cli/__pycache__/cmdoptions.cpython-312.pyc,,
+pip/_internal/cli/__pycache__/command_context.cpython-312.pyc,,
+pip/_internal/cli/__pycache__/main.cpython-312.pyc,,
+pip/_internal/cli/__pycache__/main_parser.cpython-312.pyc,,
+pip/_internal/cli/__pycache__/parser.cpython-312.pyc,,
+pip/_internal/cli/__pycache__/progress_bars.cpython-312.pyc,,
+pip/_internal/cli/__pycache__/req_command.cpython-312.pyc,,
+pip/_internal/cli/__pycache__/spinners.cpython-312.pyc,,
+pip/_internal/cli/__pycache__/status_codes.cpython-312.pyc,,
+pip/_internal/cli/autocompletion.py,sha256=_br_5NgSxSuvPjMF0MLHzS5s6BpSkQAQHKrLK89VauM,6690
+pip/_internal/cli/base_command.py,sha256=iuVWGa2oTq7gBReo0er3Z0tXJ2oqBIC6QjDHcnDhKXY,8733
+pip/_internal/cli/cmdoptions.py,sha256=V8ggG6AtHpHKkH_6tRU0mhJaZTeqtrFpu75ghvMXXJk,30063
+pip/_internal/cli/command_context.py,sha256=RHgIPwtObh5KhMrd3YZTkl8zbVG-6Okml7YbFX4Ehg0,774
+pip/_internal/cli/main.py,sha256=Uzxt_YD1hIvB1AW5mxt6IVcht5G712AtMqdo51UMhmQ,2816
+pip/_internal/cli/main_parser.py,sha256=laDpsuBDl6kyfywp9eMMA9s84jfH2TJJn-vmL0GG90w,4338
+pip/_internal/cli/parser.py,sha256=KW6C3-7-4ErTNB0TfLTKwOdHcd-qefCeGnrOoE2r0RQ,10781
+pip/_internal/cli/progress_bars.py,sha256=So4mPoSjXkXiSHiTzzquH3VVyVD_njXlHJSExYPXAow,1968
+pip/_internal/cli/req_command.py,sha256=c7_XHABnXmD3_qlK9-r37KqdKBAcgmVKvQ2WcTrNLfc,18369
+pip/_internal/cli/spinners.py,sha256=hIJ83GerdFgFCdobIA23Jggetegl_uC4Sp586nzFbPE,5118
+pip/_internal/cli/status_codes.py,sha256=sEFHUaUJbqv8iArL3HAtcztWZmGOFX01hTesSytDEh0,116
+pip/_internal/commands/__init__.py,sha256=5oRO9O3dM2vGuh0bFw4HOVletryrz5HHMmmPWwJrH9U,3882
+pip/_internal/commands/__pycache__/__init__.cpython-312.pyc,,
+pip/_internal/commands/__pycache__/cache.cpython-312.pyc,,
+pip/_internal/commands/__pycache__/check.cpython-312.pyc,,
+pip/_internal/commands/__pycache__/completion.cpython-312.pyc,,
+pip/_internal/commands/__pycache__/configuration.cpython-312.pyc,,
+pip/_internal/commands/__pycache__/debug.cpython-312.pyc,,
+pip/_internal/commands/__pycache__/download.cpython-312.pyc,,
+pip/_internal/commands/__pycache__/freeze.cpython-312.pyc,,
+pip/_internal/commands/__pycache__/hash.cpython-312.pyc,,
+pip/_internal/commands/__pycache__/help.cpython-312.pyc,,
+pip/_internal/commands/__pycache__/index.cpython-312.pyc,,
+pip/_internal/commands/__pycache__/inspect.cpython-312.pyc,,
+pip/_internal/commands/__pycache__/install.cpython-312.pyc,,
+pip/_internal/commands/__pycache__/list.cpython-312.pyc,,
+pip/_internal/commands/__pycache__/search.cpython-312.pyc,,
+pip/_internal/commands/__pycache__/show.cpython-312.pyc,,
+pip/_internal/commands/__pycache__/uninstall.cpython-312.pyc,,
+pip/_internal/commands/__pycache__/wheel.cpython-312.pyc,,
+pip/_internal/commands/cache.py,sha256=xg76_ZFEBC6zoQ3gXLRfMZJft4z2a0RwH4GEFZC6nnU,7944
+pip/_internal/commands/check.py,sha256=Rb13Q28yoLh0j1gpx5SU0jlResNct21eQCRsnaO9xKA,1782
+pip/_internal/commands/completion.py,sha256=HT4lD0bgsflHq2IDgYfiEdp7IGGtE7s6MgI3xn0VQEw,4287
+pip/_internal/commands/configuration.py,sha256=n98enwp6y0b5G6fiRQjaZo43FlJKYve_daMhN-4BRNc,9766
+pip/_internal/commands/debug.py,sha256=63972uUCeMIGOdMMVeIUGrOjTOqTVWplFC82a-hcKyA,6777
+pip/_internal/commands/download.py,sha256=e4hw088zGo26WmJaMIRvCniLlLmoOjqolGyfHjsCkCQ,5335
+pip/_internal/commands/freeze.py,sha256=qrIHS_-c6JPrQ92hMhAv9kkl0bHgFpRLwYJDdbcYr1o,3243
+pip/_internal/commands/hash.py,sha256=EVVOuvGtoPEdFi8SNnmdqlCQrhCxV-kJsdwtdcCnXGQ,1703
+pip/_internal/commands/help.py,sha256=gcc6QDkcgHMOuAn5UxaZwAStsRBrnGSn_yxjS57JIoM,1132
+pip/_internal/commands/index.py,sha256=CNXQer_PeZKSJooURcCFCBEKGfwyNoUWYP_MWczAcOM,4775
+pip/_internal/commands/inspect.py,sha256=2wSPt9yfr3r6g-s2S5L6PvRtaHNVyb4TuodMStJ39cw,3188
+pip/_internal/commands/install.py,sha256=VxDd-BD3a27ApeE2OK34rfBXS6Zo2wtemK9-HCwPqxM,28782
+pip/_internal/commands/list.py,sha256=-QbpPuGDiGN1SdThsk2ml8beBnepliefbGhMAN8tkzU,12547
+pip/_internal/commands/search.py,sha256=sbBZiARRc050QquOKcCvOr2K3XLsoYebLKZGRi__iUI,5697
+pip/_internal/commands/show.py,sha256=t5jia4zcYJRJZy4U_Von7zMl03hJmmcofj6oDNTnj7Y,6419
+pip/_internal/commands/uninstall.py,sha256=OIqO9tqadY8kM4HwhFf1Q62fUIp7v8KDrTRo8yWMz7Y,3886
+pip/_internal/commands/wheel.py,sha256=CSnX8Pmf1oPCnd7j7bn1_f58G9KHNiAblvVJ5zykN-A,6476
+pip/_internal/configuration.py,sha256=XkAiBS0hpzsM-LF0Qu5hvPWO_Bs67-oQKRYFBuMbESs,14006
+pip/_internal/distributions/__init__.py,sha256=Hq6kt6gXBgjNit5hTTWLAzeCNOKoB-N0pGYSqehrli8,858
+pip/_internal/distributions/__pycache__/__init__.cpython-312.pyc,,
+pip/_internal/distributions/__pycache__/base.cpython-312.pyc,,
+pip/_internal/distributions/__pycache__/installed.cpython-312.pyc,,
+pip/_internal/distributions/__pycache__/sdist.cpython-312.pyc,,
+pip/_internal/distributions/__pycache__/wheel.cpython-312.pyc,,
+pip/_internal/distributions/base.py,sha256=oRSEvnv2ZjBnargamnv2fcJa1n6gUDKaW0g6CWSEpWs,1743
+pip/_internal/distributions/installed.py,sha256=QinHFbWAQ8oE0pbD8MFZWkwlnfU1QYTccA1vnhrlYOU,842
+pip/_internal/distributions/sdist.py,sha256=4K3V0VNMllHbBzCJibjwd_tylUKpmIdu2AQyhplvCQo,6709
+pip/_internal/distributions/wheel.py,sha256=-ma3sOtUQj0AxXCEb6_Fhmjl3nh4k3A0HC2taAb2N-4,1277
+pip/_internal/exceptions.py,sha256=TmF1iNFEneSWaemwlg6a5bpPuq2cMHK7d1-SvjsQHb0,23634
+pip/_internal/index/__init__.py,sha256=vpt-JeTZefh8a-FC22ZeBSXFVbuBcXSGiILhQZJaNpQ,30
+pip/_internal/index/__pycache__/__init__.cpython-312.pyc,,
+pip/_internal/index/__pycache__/collector.cpython-312.pyc,,
+pip/_internal/index/__pycache__/package_finder.cpython-312.pyc,,
+pip/_internal/index/__pycache__/sources.cpython-312.pyc,,
+pip/_internal/index/collector.py,sha256=sH0tL_cOoCk6pLLfCSGVjFM4rPEJtllF-VobvAvLSH4,16590
+pip/_internal/index/package_finder.py,sha256=S_nC8gzVIMY6ikWfKoSOzRtoesUqnfNhAPl_BwSOusA,37843
+pip/_internal/index/sources.py,sha256=dJegiR9f86kslaAHcv9-R5L_XBf5Rzm_FkyPteDuPxI,8688
+pip/_internal/locations/__init__.py,sha256=Dh8LJWG8LRlDK4JIj9sfRF96TREzE--N_AIlx7Tqoe4,15365
+pip/_internal/locations/__pycache__/__init__.cpython-312.pyc,,
+pip/_internal/locations/__pycache__/_distutils.cpython-312.pyc,,
+pip/_internal/locations/__pycache__/_sysconfig.cpython-312.pyc,,
+pip/_internal/locations/__pycache__/base.cpython-312.pyc,,
+pip/_internal/locations/_distutils.py,sha256=H9ZHK_35rdDV1Qsmi4QeaBULjFT4Mbu6QuoVGkJ6QHI,6009
+pip/_internal/locations/_sysconfig.py,sha256=jyNVtUfMIf0mtyY-Xp1m9yQ8iwECozSVVFmjkN9a2yw,7680
+pip/_internal/locations/base.py,sha256=RQiPi1d4FVM2Bxk04dQhXZ2PqkeljEL2fZZ9SYqIQ78,2556
+pip/_internal/main.py,sha256=r-UnUe8HLo5XFJz8inTcOOTiu_sxNhgHb6VwlGUllOI,340
+pip/_internal/metadata/__init__.py,sha256=9pU3W3s-6HtjFuYhWcLTYVmSaziklPv7k2x8p7X1GmA,4339
+pip/_internal/metadata/__pycache__/__init__.cpython-312.pyc,,
+pip/_internal/metadata/__pycache__/_json.cpython-312.pyc,,
+pip/_internal/metadata/__pycache__/base.cpython-312.pyc,,
+pip/_internal/metadata/__pycache__/pkg_resources.cpython-312.pyc,,
+pip/_internal/metadata/_json.py,sha256=Rz5M5ciSNvITwaTQR6NfN8TgKgM5WfTws4D6CFknovE,2627
+pip/_internal/metadata/base.py,sha256=l3Wgku4xlgr8s4p6fS-3qQ4QKOpPbWLRwi5d9omEFG4,25907
+pip/_internal/metadata/importlib/__init__.py,sha256=jUUidoxnHcfITHHaAWG1G2i5fdBYklv_uJcjo2x7VYE,135
+pip/_internal/metadata/importlib/__pycache__/__init__.cpython-312.pyc,,
+pip/_internal/metadata/importlib/__pycache__/_compat.cpython-312.pyc,,
+pip/_internal/metadata/importlib/__pycache__/_dists.cpython-312.pyc,,
+pip/_internal/metadata/importlib/__pycache__/_envs.cpython-312.pyc,,
+pip/_internal/metadata/importlib/_compat.py,sha256=GAe_prIfCE4iUylrnr_2dJRlkkBVRUbOidEoID7LPoE,1882
+pip/_internal/metadata/importlib/_dists.py,sha256=UPl1wUujFqiwiltRJ1tMF42WRINO1sSpNNlYQ2mX0mk,8297
+pip/_internal/metadata/importlib/_envs.py,sha256=XTaFIYERP2JF0QUZuPx2ETiugXbPEcZ8q8ZKeht6Lpc,7456
+pip/_internal/metadata/pkg_resources.py,sha256=opjw4IBSqHvie6sXJ_cbT42meygoPEUfNURJuWZY7sk,10035
+pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63
+pip/_internal/models/__pycache__/__init__.cpython-312.pyc,,
+pip/_internal/models/__pycache__/candidate.cpython-312.pyc,,
+pip/_internal/models/__pycache__/direct_url.cpython-312.pyc,,
+pip/_internal/models/__pycache__/format_control.cpython-312.pyc,,
+pip/_internal/models/__pycache__/index.cpython-312.pyc,,
+pip/_internal/models/__pycache__/installation_report.cpython-312.pyc,,
+pip/_internal/models/__pycache__/link.cpython-312.pyc,,
+pip/_internal/models/__pycache__/scheme.cpython-312.pyc,,
+pip/_internal/models/__pycache__/search_scope.cpython-312.pyc,,
+pip/_internal/models/__pycache__/selection_prefs.cpython-312.pyc,,
+pip/_internal/models/__pycache__/target_python.cpython-312.pyc,,
+pip/_internal/models/__pycache__/wheel.cpython-312.pyc,,
+pip/_internal/models/candidate.py,sha256=hEPu8VdGE5qVASv6vLz-R-Rgh5-7LMbai1jgthMCd8M,931
+pip/_internal/models/direct_url.py,sha256=FwouYBKcqckh7B-k2H3HVgRhhFTukFwqiS3kfvtFLSk,6889
+pip/_internal/models/format_control.py,sha256=wtsQqSK9HaUiNxQEuB-C62eVimw6G4_VQFxV9-_KDBE,2486
+pip/_internal/models/index.py,sha256=tYnL8oxGi4aSNWur0mG8DAP7rC6yuha_MwJO8xw0crI,1030
+pip/_internal/models/installation_report.py,sha256=zRVZoaz-2vsrezj_H3hLOhMZCK9c7TbzWgC-jOalD00,2818
+pip/_internal/models/link.py,sha256=XirOAGv1jgMu7vu87kuPbohGj7VHpwVrd2q3KUgVQNg,20777
+pip/_internal/models/scheme.py,sha256=3EFQp_ICu_shH1-TBqhl0QAusKCPDFOlgHFeN4XowWs,738
+pip/_internal/models/search_scope.py,sha256=ASVyyZxiJILw7bTIVVpJx8J293M3Hk5F33ilGn0e80c,4643
+pip/_internal/models/selection_prefs.py,sha256=KZdi66gsR-_RUXUr9uejssk3rmTHrQVJWeNA2sV-VSY,1907
+pip/_internal/models/target_python.py,sha256=34EkorrMuRvRp-bjqHKJ-bOO71m9xdjN2b8WWFEC2HU,4272
+pip/_internal/models/wheel.py,sha256=YqazoIZyma_Q1ejFa1C7NHKQRRWlvWkdK96VRKmDBeI,3600
+pip/_internal/network/__init__.py,sha256=jf6Tt5nV_7zkARBrKojIXItgejvoegVJVKUbhAa5Ioc,50
+pip/_internal/network/__pycache__/__init__.cpython-312.pyc,,
+pip/_internal/network/__pycache__/auth.cpython-312.pyc,,
+pip/_internal/network/__pycache__/cache.cpython-312.pyc,,
+pip/_internal/network/__pycache__/download.cpython-312.pyc,,
+pip/_internal/network/__pycache__/lazy_wheel.cpython-312.pyc,,
+pip/_internal/network/__pycache__/session.cpython-312.pyc,,
+pip/_internal/network/__pycache__/utils.cpython-312.pyc,,
+pip/_internal/network/__pycache__/xmlrpc.cpython-312.pyc,,
+pip/_internal/network/auth.py,sha256=TC-OcW2KU4W6R1hU4qPgQXvVH54adACpZz6sWq-R9NA,20541
+pip/_internal/network/cache.py,sha256=48A971qCzKNFvkb57uGEk7-0xaqPS0HWj2711QNTxkU,3935
+pip/_internal/network/download.py,sha256=i0Tn55CD5D7XYEFY3TxiYaCf0OaaTQ6SScNgCsSeV14,6086
+pip/_internal/network/lazy_wheel.py,sha256=2PXVduYZPCPZkkQFe1J1GbfHJWeCU--FXonGyIfw9eU,7638
+pip/_internal/network/session.py,sha256=9tqEDD8JiVaFdplOEXJxNo9cjRfBZ6RIa0yQQ_qBNiM,18698
+pip/_internal/network/utils.py,sha256=6A5SrUJEEUHxbGtbscwU2NpCyz-3ztiDlGWHpRRhsJ8,4073
+pip/_internal/network/xmlrpc.py,sha256=sAxzOacJ-N1NXGPvap9jC3zuYWSnnv3GXtgR2-E2APA,1838
+pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pip/_internal/operations/__pycache__/__init__.cpython-312.pyc,,
+pip/_internal/operations/__pycache__/check.cpython-312.pyc,,
+pip/_internal/operations/__pycache__/freeze.cpython-312.pyc,,
+pip/_internal/operations/__pycache__/prepare.cpython-312.pyc,,
+pip/_internal/operations/build/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pip/_internal/operations/build/__pycache__/__init__.cpython-312.pyc,,
+pip/_internal/operations/build/__pycache__/build_tracker.cpython-312.pyc,,
+pip/_internal/operations/build/__pycache__/metadata.cpython-312.pyc,,
+pip/_internal/operations/build/__pycache__/metadata_editable.cpython-312.pyc,,
+pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-312.pyc,,
+pip/_internal/operations/build/__pycache__/wheel.cpython-312.pyc,,
+pip/_internal/operations/build/__pycache__/wheel_editable.cpython-312.pyc,,
+pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-312.pyc,,
+pip/_internal/operations/build/build_tracker.py,sha256=z-H5DOknZdBa3dh2Vq6VBMY5qLYIKmlj2p6CGZK5Lc8,4832
+pip/_internal/operations/build/metadata.py,sha256=9S0CUD8U3QqZeXp-Zyt8HxwU90lE4QrnYDgrqZDzBnc,1422
+pip/_internal/operations/build/metadata_editable.py,sha256=VLL7LvntKE8qxdhUdEJhcotFzUsOSI8NNS043xULKew,1474
+pip/_internal/operations/build/metadata_legacy.py,sha256=o-eU21As175hDC7dluM1fJJ_FqokTIShyWpjKaIpHZw,2198
+pip/_internal/operations/build/wheel.py,sha256=sT12FBLAxDC6wyrDorh8kvcZ1jG5qInCRWzzP-UkJiQ,1075
+pip/_internal/operations/build/wheel_editable.py,sha256=yOtoH6zpAkoKYEUtr8FhzrYnkNHQaQBjWQ2HYae1MQg,1417
+pip/_internal/operations/build/wheel_legacy.py,sha256=C9j6rukgQI1n_JeQLoZGuDdfUwzCXShyIdPTp6edbMQ,3064
+pip/_internal/operations/check.py,sha256=fsqA88iGaqftCr2tlP3sSU202CSkoODRtW0O-JU9M4Y,6806
+pip/_internal/operations/freeze.py,sha256=uqoeTAf6HOYVMR2UgAT8N85UZoGEVEoQdan_Ao6SOfk,9816
+pip/_internal/operations/install/__init__.py,sha256=mX7hyD2GNBO2mFGokDQ30r_GXv7Y_PLdtxcUv144e-s,51
+pip/_internal/operations/install/__pycache__/__init__.cpython-312.pyc,,
+pip/_internal/operations/install/__pycache__/editable_legacy.cpython-312.pyc,,
+pip/_internal/operations/install/__pycache__/wheel.cpython-312.pyc,,
+pip/_internal/operations/install/editable_legacy.py,sha256=YeR0KadWXw_ZheC1NtAG1qVIEkOgRGHc23x-YtGW7NU,1282
+pip/_internal/operations/install/wheel.py,sha256=9hGb1c4bRnPIb2FG7CtUSPfPxqprmHQBtwIAlWPNTtE,27311
+pip/_internal/operations/prepare.py,sha256=57Oq87HfunX3Rbqp47FdaJr9cHbAKUm_3gv7WhBAqbE,28128
+pip/_internal/pyproject.py,sha256=4Xszp11xgr126yzG6BbJA0oaQ9WXuhb0jyUb-y_6lPQ,7152
+pip/_internal/req/__init__.py,sha256=TELFgZOof3lhMmaICVWL9U7PlhXo9OufokbMAJ6J2GI,2738
+pip/_internal/req/__pycache__/__init__.cpython-312.pyc,,
+pip/_internal/req/__pycache__/constructors.cpython-312.pyc,,
+pip/_internal/req/__pycache__/req_file.cpython-312.pyc,,
+pip/_internal/req/__pycache__/req_install.cpython-312.pyc,,
+pip/_internal/req/__pycache__/req_set.cpython-312.pyc,,
+pip/_internal/req/__pycache__/req_uninstall.cpython-312.pyc,,
+pip/_internal/req/constructors.py,sha256=8hlY56imEthLORRwmloyKz3YOyXymIaKsNB6P9ewvNI,19018
+pip/_internal/req/req_file.py,sha256=M8ttOZL-PwAj7scPElhW3ZD2hiD9mm_6FJAGIbwAzEI,17790
+pip/_internal/req/req_install.py,sha256=wtOPxkyRSM8comTks8oL1Gp2oyGqbH7JwIDRci2QiPk,35460
+pip/_internal/req/req_set.py,sha256=iMYDUToSgkxFyrP_OrTtPSgw4dwjRyGRDpGooTqeA4Y,4704
+pip/_internal/req/req_uninstall.py,sha256=nmvTQaRCC0iu-5Tw0djlXJhSj6WmqHRvT3qkkEdC35E,24551
+pip/_internal/resolution/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pip/_internal/resolution/__pycache__/__init__.cpython-312.pyc,,
+pip/_internal/resolution/__pycache__/base.cpython-312.pyc,,
+pip/_internal/resolution/base.py,sha256=qlmh325SBVfvG6Me9gc5Nsh5sdwHBwzHBq6aEXtKsLA,583
+pip/_internal/resolution/legacy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pip/_internal/resolution/legacy/__pycache__/__init__.cpython-312.pyc,,
+pip/_internal/resolution/legacy/__pycache__/resolver.cpython-312.pyc,,
+pip/_internal/resolution/legacy/resolver.py,sha256=Xk24jQ62GvLr4Mc7IjN_qiO88qp0BImzVmPIFz9QLOE,24025
+pip/_internal/resolution/resolvelib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-312.pyc,,
+pip/_internal/resolution/resolvelib/__pycache__/base.cpython-312.pyc,,
+pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-312.pyc,,
+pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-312.pyc,,
+pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-312.pyc,,
+pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-312.pyc,,
+pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-312.pyc,,
+pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-312.pyc,,
+pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-312.pyc,,
+pip/_internal/resolution/resolvelib/base.py,sha256=jg5COmHLhmBIKOR-4spdJD3jyULYa1BdsqiBu2YJnJ4,5173
+pip/_internal/resolution/resolvelib/candidates.py,sha256=19Ki91Po-MSxBknGIfOGkaWkFdOznN0W_nKv7jL28L0,21052
+pip/_internal/resolution/resolvelib/factory.py,sha256=vqqk-hjchdhShwWVdeW2_A-5ZblLhE_nC_v3Mhz4Svc,32292
+pip/_internal/resolution/resolvelib/found_candidates.py,sha256=hvL3Hoa9VaYo-qEOZkBi2Iqw251UDxPz-uMHVaWmLpE,5705
+pip/_internal/resolution/resolvelib/provider.py,sha256=4t23ivjruqM6hKBX1KpGiTt-M4HGhRcZnGLV0c01K7U,9824
+pip/_internal/resolution/resolvelib/reporter.py,sha256=YFm9hQvz4DFCbjZeFTQ56hTz3Ac-mDBnHkeNRVvMHLY,3100
+pip/_internal/resolution/resolvelib/requirements.py,sha256=-kJONP0WjDfdTvBAs2vUXPgAnOyNIBEAXY4b72ogtPE,5696
+pip/_internal/resolution/resolvelib/resolver.py,sha256=nLJOsVMEVi2gQUVJoUFKMZAeu2f7GRMjGMvNSWyz0Bc,12592
+pip/_internal/self_outdated_check.py,sha256=saxQLB8UzIFtMScquytG10TOTsYVFJQ_mkW1NY-46wE,8378
+pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pip/_internal/utils/__pycache__/__init__.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/_jaraco_text.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/_log.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/appdirs.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/compat.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/compatibility_tags.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/datetime.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/deprecation.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/direct_url_helpers.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/egg_link.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/encoding.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/entrypoints.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/filesystem.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/filetypes.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/glibc.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/hashes.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/logging.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/misc.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/models.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/packaging.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/setuptools_build.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/subprocess.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/temp_dir.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/unpacking.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/urls.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/virtualenv.cpython-312.pyc,,
+pip/_internal/utils/__pycache__/wheel.cpython-312.pyc,,
+pip/_internal/utils/_jaraco_text.py,sha256=yvDGelTVugRayPaOF2k4ab0Ky4d3uOkAfuOQjASjImY,3351
+pip/_internal/utils/_log.py,sha256=-jHLOE_THaZz5BFcCnoSL9EYAtJ0nXem49s9of4jvKw,1015
+pip/_internal/utils/appdirs.py,sha256=swgcTKOm3daLeXTW6v5BUS2Ti2RvEnGRQYH_yDXklAo,1665
+pip/_internal/utils/compat.py,sha256=ACyBfLgj3_XG-iA5omEDrXqDM0cQKzi8h8HRBInzG6Q,1884
+pip/_internal/utils/compatibility_tags.py,sha256=ydin8QG8BHqYRsPY4OL6cmb44CbqXl1T0xxS97VhHkk,5377
+pip/_internal/utils/datetime.py,sha256=m21Y3wAtQc-ji6Veb6k_M5g6A0ZyFI4egchTdnwh-pQ,242
+pip/_internal/utils/deprecation.py,sha256=NKo8VqLioJ4nnXXGmW4KdasxF90EFHkZaHeX1fT08C8,3627
+pip/_internal/utils/direct_url_helpers.py,sha256=6F1tc2rcKaCZmgfVwsE6ObIe_Pux23mUVYA-2D9wCFc,3206
+pip/_internal/utils/egg_link.py,sha256=0FePZoUYKv4RGQ2t6x7w5Z427wbA_Uo3WZnAkrgsuqo,2463
+pip/_internal/utils/encoding.py,sha256=qqsXDtiwMIjXMEiIVSaOjwH5YmirCaK-dIzb6-XJsL0,1169
+pip/_internal/utils/entrypoints.py,sha256=YlhLTRl2oHBAuqhc-zmL7USS67TPWVHImjeAQHreZTQ,3064
+pip/_internal/utils/filesystem.py,sha256=RhMIXUaNVMGjc3rhsDahWQ4MavvEQDdqXqgq-F6fpw8,5122
+pip/_internal/utils/filetypes.py,sha256=i8XAQ0eFCog26Fw9yV0Yb1ygAqKYB1w9Cz9n0fj8gZU,716
+pip/_internal/utils/glibc.py,sha256=Mesxxgg3BLxheLZx-dSf30b6gKpOgdVXw6W--uHSszQ,3113
+pip/_internal/utils/hashes.py,sha256=MjOigC75z6qoRMkgHiHqot7eqxfwDZSrEflJMPm-bHE,5118
+pip/_internal/utils/logging.py,sha256=fdtuZJ-AKkqwDTANDvGcBEpssL8el7T1jnwk1CnZl3Y,11603
+pip/_internal/utils/misc.py,sha256=fNXwaeeikvnUt4CPMFIL4-IQbZDxxjj4jDpzCi4ZsOw,23623
+pip/_internal/utils/models.py,sha256=5GoYU586SrxURMvDn_jBMJInitviJg4O5-iOU-6I0WY,1193
+pip/_internal/utils/packaging.py,sha256=5Wm6_x7lKrlqVjPI5MBN_RurcRHwVYoQ7Ksrs84de7s,2108
+pip/_internal/utils/setuptools_build.py,sha256=ouXpud-jeS8xPyTPsXJ-m34NPvK5os45otAzdSV_IJE,4435
+pip/_internal/utils/subprocess.py,sha256=zzdimb75jVLE1GU4WlTZ055gczhD7n1y1xTcNc7vNZQ,9207
+pip/_internal/utils/temp_dir.py,sha256=DUAw22uFruQdK43i2L2K53C-CDjRCPeAsBKJpu-rHQ4,9312
+pip/_internal/utils/unpacking.py,sha256=SBb2iV1crb89MDRTEKY86R4A_UOWApTQn9VQVcMDOlE,8821
+pip/_internal/utils/urls.py,sha256=AhaesUGl-9it6uvG6fsFPOr9ynFpGaTMk4t5XTX7Z_Q,1759
+pip/_internal/utils/virtualenv.py,sha256=S6f7csYorRpiD6cvn3jISZYc3I8PJC43H5iMFpRAEDU,3456
+pip/_internal/utils/wheel.py,sha256=i4BwUNHattzN0ixy3HBAF04tZPRh2CcxaT6t86viwkE,4499
+pip/_internal/vcs/__init__.py,sha256=UAqvzpbi0VbZo3Ub6skEeZAw-ooIZR-zX_WpCbxyCoU,596
+pip/_internal/vcs/__pycache__/__init__.cpython-312.pyc,,
+pip/_internal/vcs/__pycache__/bazaar.cpython-312.pyc,,
+pip/_internal/vcs/__pycache__/git.cpython-312.pyc,,
+pip/_internal/vcs/__pycache__/mercurial.cpython-312.pyc,,
+pip/_internal/vcs/__pycache__/subversion.cpython-312.pyc,,
+pip/_internal/vcs/__pycache__/versioncontrol.cpython-312.pyc,,
+pip/_internal/vcs/bazaar.py,sha256=j0oin0fpGRHcCFCxEcpPCQoFEvA-DMLULKdGP8Nv76o,3519
+pip/_internal/vcs/git.py,sha256=CeKBGJnl6uskvvjkAUXrJVxbHJrpS_B_pyfFdjL3CRc,18121
+pip/_internal/vcs/mercurial.py,sha256=oULOhzJ2Uie-06d1omkL-_Gc6meGaUkyogvqG9ZCyPs,5249
+pip/_internal/vcs/subversion.py,sha256=vhZs8L-TNggXqM1bbhl-FpbxE3TrIB6Tgnx8fh3S2HE,11729
+pip/_internal/vcs/versioncontrol.py,sha256=3eIjtOMYvOY5qP6BMYIYDZ375CSuec6kSEB0bOo1cSs,22787
+pip/_internal/wheel_builder.py,sha256=qTTzQV8F6b1jNsFCda1TRQC8J7gK-m7iuRNgKo7Dj68,11801
+pip/_vendor/__init__.py,sha256=U51NPwXdA-wXOiANIQncYjcMp6txgeOL5nHxksJeyas,4993
+pip/_vendor/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/__pycache__/six.cpython-312.pyc,,
+pip/_vendor/__pycache__/typing_extensions.cpython-312.pyc,,
+pip/_vendor/cachecontrol/__init__.py,sha256=ctHagMhQXuvQDdm4TirZrwDOT5H8oBNAJqzdKI6sovk,676
+pip/_vendor/cachecontrol/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-312.pyc,,
+pip/_vendor/cachecontrol/__pycache__/adapter.cpython-312.pyc,,
+pip/_vendor/cachecontrol/__pycache__/cache.cpython-312.pyc,,
+pip/_vendor/cachecontrol/__pycache__/controller.cpython-312.pyc,,
+pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-312.pyc,,
+pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-312.pyc,,
+pip/_vendor/cachecontrol/__pycache__/serialize.cpython-312.pyc,,
+pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-312.pyc,,
+pip/_vendor/cachecontrol/_cmd.py,sha256=iist2EpzJvDVIhMAxXq8iFnTBsiZAd6iplxfmNboNyk,1737
+pip/_vendor/cachecontrol/adapter.py,sha256=_CcWvUP9048qAZjsNqViaHbdcLs9mmFNixVfpO7oebE,6392
+pip/_vendor/cachecontrol/cache.py,sha256=OTQj72tUf8C1uEgczdl3Gc8vkldSzsTITKtDGKMx4z8,1952
+pip/_vendor/cachecontrol/caches/__init__.py,sha256=dtrrroK5BnADR1GWjCZ19aZ0tFsMfvFBtLQQU1sp_ag,303
+pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-312.pyc,,
+pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-312.pyc,,
+pip/_vendor/cachecontrol/caches/file_cache.py,sha256=3z8AWKD-vfKeiJqIzLmJyIYtR2yd6Tsh3u1TyLRQoIQ,5352
+pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=9rmqwtYu_ljVkW6_oLqbC7EaX_a8YT_yLuna-eS0dgo,1386
+pip/_vendor/cachecontrol/controller.py,sha256=keCFA3ZaNVaWTwHd6F1zqWhb4vyvNx_UvZuo5iIYMfo,18384
+pip/_vendor/cachecontrol/filewrapper.py,sha256=STttGmIPBvZzt2b51dUOwoWX5crcMCpKZOisM3f5BNc,4292
+pip/_vendor/cachecontrol/heuristics.py,sha256=fdFbk9W8IeLrjteIz_fK4mj2HD_Y7COXF2Uc8TgjT1c,4828
+pip/_vendor/cachecontrol/serialize.py,sha256=0dHeMaDwysVAAnGVlhMOP4tDliohgNK0Jxk_zsOiWxw,7173
+pip/_vendor/cachecontrol/wrapper.py,sha256=hsGc7g8QGQTT-4f8tgz3AM5qwScg6FO0BSdLSRdEvpU,1417
+pip/_vendor/certifi/__init__.py,sha256=L_j-d0kYuA_MzA2_2hraF1ovf6KT6DTquRdV3paQwOk,94
+pip/_vendor/certifi/__main__.py,sha256=1k3Cr95vCxxGRGDljrW3wMdpZdL3Nhf0u1n-k2qdsCY,255
+pip/_vendor/certifi/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/certifi/__pycache__/__main__.cpython-312.pyc,,
+pip/_vendor/certifi/__pycache__/core.cpython-312.pyc,,
+pip/_vendor/certifi/cacert.pem,sha256=eU0Dn_3yd8BH4m8sfVj4Glhl2KDrcCSg-sEWT-pNJ88,281617
+pip/_vendor/certifi/core.py,sha256=DNTl8b_B6C4vO3Vc9_q2uvwHpNnBQoy5onDC4McImxc,4531
+pip/_vendor/chardet/__init__.py,sha256=57R-HSxj0PWmILMN0GFmUNqEMfrEVSamXyjD-W6_fbs,4797
+pip/_vendor/chardet/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/big5freq.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/big5prober.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/chardistribution.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/charsetprober.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/codingstatemachinedict.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/cp949prober.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/enums.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/escprober.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/escsm.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/eucjpprober.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/euckrfreq.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/euckrprober.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/euctwfreq.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/euctwprober.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/gb2312freq.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/gb2312prober.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/hebrewprober.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/jisfreq.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/johabfreq.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/johabprober.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/jpcntx.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/langthaimodel.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/latin1prober.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/macromanprober.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/mbcssm.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/resultdict.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/sjisprober.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/universaldetector.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/utf1632prober.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/utf8prober.cpython-312.pyc,,
+pip/_vendor/chardet/__pycache__/version.cpython-312.pyc,,
+pip/_vendor/chardet/big5freq.py,sha256=ltcfP-3PjlNHCoo5e4a7C4z-2DhBTXRfY6jbMbB7P30,31274
+pip/_vendor/chardet/big5prober.py,sha256=lPMfwCX6v2AaPgvFh_cSWZcgLDbWiFCHLZ_p9RQ9uxE,1763
+pip/_vendor/chardet/chardistribution.py,sha256=13B8XUG4oXDuLdXvfbIWwLFeR-ZU21AqTS1zcdON8bU,10032
+pip/_vendor/chardet/charsetgroupprober.py,sha256=UKK3SaIZB2PCdKSIS0gnvMtLR9JJX62M-fZJu3OlWyg,3915
+pip/_vendor/chardet/charsetprober.py,sha256=L3t8_wIOov8em-vZWOcbkdsrwe43N6_gqNh5pH7WPd4,5420
+pip/_vendor/chardet/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pip/_vendor/chardet/cli/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-312.pyc,,
+pip/_vendor/chardet/cli/chardetect.py,sha256=zibMVg5RpKb-ME9_7EYG4ZM2Sf07NHcQzZ12U-rYJho,3242
+pip/_vendor/chardet/codingstatemachine.py,sha256=K7k69sw3jY5DmTXoSJQVsUtFIQKYPQVOSJJhBuGv_yE,3732
+pip/_vendor/chardet/codingstatemachinedict.py,sha256=0GY3Hi2qIZvDrOOJ3AtqppM1RsYxr_66ER4EHjuMiMc,542
+pip/_vendor/chardet/cp949prober.py,sha256=0jKRV7fECuWI16rNnks0ZECKA1iZYCIEaP8A1ZvjUSI,1860
+pip/_vendor/chardet/enums.py,sha256=TzECiZoCKNMqgwU76cPCeKWFBqaWvAdLMev5_bCkhY8,1683
+pip/_vendor/chardet/escprober.py,sha256=Kho48X65xE0scFylIdeJjM2bcbvRvv0h0WUbMWrJD3A,4006
+pip/_vendor/chardet/escsm.py,sha256=AqyXpA2FQFD7k-buBty_7itGEYkhmVa8X09NLRul3QM,12176
+pip/_vendor/chardet/eucjpprober.py,sha256=5KYaM9fsxkRYzw1b5k0fL-j_-ezIw-ij9r97a9MHxLY,3934
+pip/_vendor/chardet/euckrfreq.py,sha256=3mHuRvXfsq_QcQysDQFb8qSudvTiol71C6Ic2w57tKM,13566
+pip/_vendor/chardet/euckrprober.py,sha256=hiFT6wM174GIwRvqDsIcuOc-dDsq2uPKMKbyV8-1Xnc,1753
+pip/_vendor/chardet/euctwfreq.py,sha256=2alILE1Lh5eqiFJZjzRkMQXolNJRHY5oBQd-vmZYFFM,36913
+pip/_vendor/chardet/euctwprober.py,sha256=NxbpNdBtU0VFI0bKfGfDkpP7S2_8_6FlO87dVH0ogws,1753
+pip/_vendor/chardet/gb2312freq.py,sha256=49OrdXzD-HXqwavkqjo8Z7gvs58hONNzDhAyMENNkvY,20735
+pip/_vendor/chardet/gb2312prober.py,sha256=KPEBueaSLSvBpFeINMu0D6TgHcR90e5PaQawifzF4o0,1759
+pip/_vendor/chardet/hebrewprober.py,sha256=96T_Lj_OmW-fK7JrSHojYjyG3fsGgbzkoTNleZ3kfYE,14537
+pip/_vendor/chardet/jisfreq.py,sha256=mm8tfrwqhpOd3wzZKS4NJqkYBQVcDfTM2JiQ5aW932E,25796
+pip/_vendor/chardet/johabfreq.py,sha256=dBpOYG34GRX6SL8k_LbS9rxZPMjLjoMlgZ03Pz5Hmqc,42498
+pip/_vendor/chardet/johabprober.py,sha256=O1Qw9nVzRnun7vZp4UZM7wvJSv9W941mEU9uDMnY3DU,1752
+pip/_vendor/chardet/jpcntx.py,sha256=uhHrYWkLxE_rF5OkHKInm0HUsrjgKHHVQvtt3UcvotA,27055
+pip/_vendor/chardet/langbulgarianmodel.py,sha256=vmbvYFP8SZkSxoBvLkFqKiH1sjma5ihk3PTpdy71Rr4,104562
+pip/_vendor/chardet/langgreekmodel.py,sha256=JfB7bupjjJH2w3X_mYnQr9cJA_7EuITC2cRW13fUjeI,98484
+pip/_vendor/chardet/langhebrewmodel.py,sha256=3HXHaLQPNAGcXnJjkIJfozNZLTvTJmf4W5Awi6zRRKc,98196
+pip/_vendor/chardet/langhungarianmodel.py,sha256=WxbeQIxkv8YtApiNqxQcvj-tMycsoI4Xy-fwkDHpP_Y,101363
+pip/_vendor/chardet/langrussianmodel.py,sha256=s395bTZ87ESTrZCOdgXbEjZ9P1iGPwCl_8xSsac_DLY,128035
+pip/_vendor/chardet/langthaimodel.py,sha256=7bJlQitRpTnVGABmbSznHnJwOHDy3InkTvtFUx13WQI,102774
+pip/_vendor/chardet/langturkishmodel.py,sha256=XY0eGdTIy4eQ9Xg1LVPZacb-UBhHBR-cq0IpPVHowKc,95372
+pip/_vendor/chardet/latin1prober.py,sha256=p15EEmFbmQUwbKLC7lOJVGHEZwcG45ubEZYTGu01J5g,5380
+pip/_vendor/chardet/macromanprober.py,sha256=9anfzmY6TBfUPDyBDOdY07kqmTHpZ1tK0jL-p1JWcOY,6077
+pip/_vendor/chardet/mbcharsetprober.py,sha256=Wr04WNI4F3X_VxEverNG-H25g7u-MDDKlNt-JGj-_uU,3715
+pip/_vendor/chardet/mbcsgroupprober.py,sha256=iRpaNBjV0DNwYPu_z6TiHgRpwYahiM7ztI_4kZ4Uz9A,2131
+pip/_vendor/chardet/mbcssm.py,sha256=hUtPvDYgWDaA2dWdgLsshbwRfm3Q5YRlRogdmeRUNQw,30391
+pip/_vendor/chardet/metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/chardet/metadata/__pycache__/languages.cpython-312.pyc,,
+pip/_vendor/chardet/metadata/languages.py,sha256=FhvBIdZFxRQ-dTwkb_0madRKgVBCaUMQz9I5xqjE5iQ,13560
+pip/_vendor/chardet/resultdict.py,sha256=ez4FRvN5KaSosJeJ2WzUyKdDdg35HDy_SSLPXKCdt5M,402
+pip/_vendor/chardet/sbcharsetprober.py,sha256=-nd3F90i7GpXLjehLVHqVBE0KlWzGvQUPETLBNn4o6U,6400
+pip/_vendor/chardet/sbcsgroupprober.py,sha256=gcgI0fOfgw_3YTClpbra_MNxwyEyJ3eUXraoLHYb59E,4137
+pip/_vendor/chardet/sjisprober.py,sha256=aqQufMzRw46ZpFlzmYaYeT2-nzmKb-hmcrApppJ862k,4007
+pip/_vendor/chardet/universaldetector.py,sha256=xYBrg4x0dd9WnT8qclfADVD9ondrUNkqPmvte1pa520,14848
+pip/_vendor/chardet/utf1632prober.py,sha256=pw1epGdMj1hDGiCu1AHqqzOEfjX8MVdiW7O1BlT8-eQ,8505
+pip/_vendor/chardet/utf8prober.py,sha256=8m08Ub5490H4jQ6LYXvFysGtgKoKsHUd2zH_i8_TnVw,2812
+pip/_vendor/chardet/version.py,sha256=lGtJcxGM44Qz4Cbk4rbbmrKxnNr1-97U25TameLehZw,244
+pip/_vendor/colorama/__init__.py,sha256=wePQA4U20tKgYARySLEC047ucNX-g8pRLpYBuiHlLb8,266
+pip/_vendor/colorama/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/colorama/__pycache__/ansi.cpython-312.pyc,,
+pip/_vendor/colorama/__pycache__/ansitowin32.cpython-312.pyc,,
+pip/_vendor/colorama/__pycache__/initialise.cpython-312.pyc,,
+pip/_vendor/colorama/__pycache__/win32.cpython-312.pyc,,
+pip/_vendor/colorama/__pycache__/winterm.cpython-312.pyc,,
+pip/_vendor/colorama/ansi.py,sha256=Top4EeEuaQdBWdteKMEcGOTeKeF19Q-Wo_6_Cj5kOzQ,2522
+pip/_vendor/colorama/ansitowin32.py,sha256=vPNYa3OZbxjbuFyaVo0Tmhmy1FZ1lKMWCnT7odXpItk,11128
+pip/_vendor/colorama/initialise.py,sha256=-hIny86ClXo39ixh5iSCfUIa2f_h_bgKRDW7gqs-KLU,3325
+pip/_vendor/colorama/tests/__init__.py,sha256=MkgPAEzGQd-Rq0w0PZXSX2LadRWhUECcisJY8lSrm4Q,75
+pip/_vendor/colorama/tests/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/colorama/tests/__pycache__/ansi_test.cpython-312.pyc,,
+pip/_vendor/colorama/tests/__pycache__/ansitowin32_test.cpython-312.pyc,,
+pip/_vendor/colorama/tests/__pycache__/initialise_test.cpython-312.pyc,,
+pip/_vendor/colorama/tests/__pycache__/isatty_test.cpython-312.pyc,,
+pip/_vendor/colorama/tests/__pycache__/utils.cpython-312.pyc,,
+pip/_vendor/colorama/tests/__pycache__/winterm_test.cpython-312.pyc,,
+pip/_vendor/colorama/tests/ansi_test.py,sha256=FeViDrUINIZcr505PAxvU4AjXz1asEiALs9GXMhwRaE,2839
+pip/_vendor/colorama/tests/ansitowin32_test.py,sha256=RN7AIhMJ5EqDsYaCjVo-o4u8JzDD4ukJbmevWKS70rY,10678
+pip/_vendor/colorama/tests/initialise_test.py,sha256=BbPy-XfyHwJ6zKozuQOvNvQZzsx9vdb_0bYXn7hsBTc,6741
+pip/_vendor/colorama/tests/isatty_test.py,sha256=Pg26LRpv0yQDB5Ac-sxgVXG7hsA1NYvapFgApZfYzZg,1866
+pip/_vendor/colorama/tests/utils.py,sha256=1IIRylG39z5-dzq09R_ngufxyPZxgldNbrxKxUGwGKE,1079
+pip/_vendor/colorama/tests/winterm_test.py,sha256=qoWFPEjym5gm2RuMwpf3pOis3a5r_PJZFCzK254JL8A,3709
+pip/_vendor/colorama/win32.py,sha256=YQOKwMTwtGBbsY4dL5HYTvwTeP9wIQra5MvPNddpxZs,6181
+pip/_vendor/colorama/winterm.py,sha256=XCQFDHjPi6AHYNdZwy0tA02H-Jh48Jp-HvCjeLeLp3U,7134
+pip/_vendor/distlib/__init__.py,sha256=hJKF7FHoqbmGckncDuEINWo_OYkDNiHODtYXSMcvjcc,625
+pip/_vendor/distlib/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/distlib/__pycache__/compat.cpython-312.pyc,,
+pip/_vendor/distlib/__pycache__/database.cpython-312.pyc,,
+pip/_vendor/distlib/__pycache__/index.cpython-312.pyc,,
+pip/_vendor/distlib/__pycache__/locators.cpython-312.pyc,,
+pip/_vendor/distlib/__pycache__/manifest.cpython-312.pyc,,
+pip/_vendor/distlib/__pycache__/markers.cpython-312.pyc,,
+pip/_vendor/distlib/__pycache__/metadata.cpython-312.pyc,,
+pip/_vendor/distlib/__pycache__/resources.cpython-312.pyc,,
+pip/_vendor/distlib/__pycache__/scripts.cpython-312.pyc,,
+pip/_vendor/distlib/__pycache__/util.cpython-312.pyc,,
+pip/_vendor/distlib/__pycache__/version.cpython-312.pyc,,
+pip/_vendor/distlib/__pycache__/wheel.cpython-312.pyc,,
+pip/_vendor/distlib/compat.py,sha256=Un-uIBvy02w-D267OG4VEhuddqWgKj9nNkxVltAb75w,41487
+pip/_vendor/distlib/database.py,sha256=0V9Qvs0Vrxa2F_-hLWitIyVyRifJ0pCxyOI-kEOBwsA,51965
+pip/_vendor/distlib/index.py,sha256=lTbw268rRhj8dw1sib3VZ_0EhSGgoJO3FKJzSFMOaeA,20797
+pip/_vendor/distlib/locators.py,sha256=o1r_M86_bRLafSpetmyfX8KRtFu-_Q58abvQrnOSnbA,51767
+pip/_vendor/distlib/manifest.py,sha256=3qfmAmVwxRqU1o23AlfXrQGZzh6g_GGzTAP_Hb9C5zQ,14168
+pip/_vendor/distlib/markers.py,sha256=n3DfOh1yvZ_8EW7atMyoYeZFXjYla0Nz0itQlojCd0A,5268
+pip/_vendor/distlib/metadata.py,sha256=pB9WZ9mBfmQxc9OVIldLS5CjOoQRvKAvUwwQyKwKQtQ,39693
+pip/_vendor/distlib/resources.py,sha256=LwbPksc0A1JMbi6XnuPdMBUn83X7BPuFNWqPGEKI698,10820
+pip/_vendor/distlib/scripts.py,sha256=nQFXN6G7nOWNDUyxirUep-3WOlJhB7McvCs9zOnkGTI,18315
+pip/_vendor/distlib/util.py,sha256=XSznxEi_i3T20UJuaVc0qXHz5ksGUCW1khYlBprN_QE,67530
+pip/_vendor/distlib/version.py,sha256=9pXkduchve_aN7JG6iL9VTYV_kqNSGoc2Dwl8JuySnQ,23747
+pip/_vendor/distlib/wheel.py,sha256=FVQCve8u-L0QYk5-YTZc7s4WmNQdvjRWTK08KXzZVX4,43958
+pip/_vendor/distro/__init__.py,sha256=2fHjF-SfgPvjyNZ1iHh_wjqWdR_Yo5ODHwZC0jLBPhc,981
+pip/_vendor/distro/__main__.py,sha256=bu9d3TifoKciZFcqRBuygV3GSuThnVD_m2IK4cz96Vs,64
+pip/_vendor/distro/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/distro/__pycache__/__main__.cpython-312.pyc,,
+pip/_vendor/distro/__pycache__/distro.cpython-312.pyc,,
+pip/_vendor/distro/distro.py,sha256=UZO1LjIhtFCMdlbiz39gj3raV-Amf3SBwzGzfApiMHw,49330
+pip/_vendor/idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849
+pip/_vendor/idna/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/idna/__pycache__/codec.cpython-312.pyc,,
+pip/_vendor/idna/__pycache__/compat.cpython-312.pyc,,
+pip/_vendor/idna/__pycache__/core.cpython-312.pyc,,
+pip/_vendor/idna/__pycache__/idnadata.cpython-312.pyc,,
+pip/_vendor/idna/__pycache__/intranges.cpython-312.pyc,,
+pip/_vendor/idna/__pycache__/package_data.cpython-312.pyc,,
+pip/_vendor/idna/__pycache__/uts46data.cpython-312.pyc,,
+pip/_vendor/idna/codec.py,sha256=6ly5odKfqrytKT9_7UrlGklHnf1DSK2r9C6cSM4sa28,3374
+pip/_vendor/idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321
+pip/_vendor/idna/core.py,sha256=kkCFNJOrE6I3WwBTXcGNuc24V_QZQ8AULE6EYe1iHlU,12813
+pip/_vendor/idna/idnadata.py,sha256=9NIhTqC2piUpeIMOGZ9Bu_7eAFQ-Ic8TkP_hOzUpnDc,78344
+pip/_vendor/idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881
+pip/_vendor/idna/package_data.py,sha256=C_jHJzmX8PI4xq0jpzmcTMxpb5lDsq4o5VyxQzlVrZE,21
+pip/_vendor/idna/uts46data.py,sha256=zvjZU24s58_uAS850Mcd0NnD0X7_gCMAMjzWNIeUJdc,206539
+pip/_vendor/msgpack/__init__.py,sha256=hyGhlnmcJkxryJBKC3X5FnEph375kQoL_mG8LZUuXgY,1132
+pip/_vendor/msgpack/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/msgpack/__pycache__/exceptions.cpython-312.pyc,,
+pip/_vendor/msgpack/__pycache__/ext.cpython-312.pyc,,
+pip/_vendor/msgpack/__pycache__/fallback.cpython-312.pyc,,
+pip/_vendor/msgpack/exceptions.py,sha256=dCTWei8dpkrMsQDcjQk74ATl9HsIBH0ybt8zOPNqMYc,1081
+pip/_vendor/msgpack/ext.py,sha256=C5MK8JhVYGYFWPvxsORsqZAnvOXefYQ57m1Ym0luW5M,6079
+pip/_vendor/msgpack/fallback.py,sha256=tvNBHyxxFbuVlC8GZShETClJxjLiDMOja4XwwyvNm2g,34544
+pip/_vendor/packaging/__about__.py,sha256=ugASIO2w1oUyH8_COqQ2X_s0rDhjbhQC3yJocD03h2c,661
+pip/_vendor/packaging/__init__.py,sha256=b9Kk5MF7KxhhLgcDmiUWukN-LatWFxPdNug0joPhHSk,497
+pip/_vendor/packaging/__pycache__/__about__.cpython-312.pyc,,
+pip/_vendor/packaging/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/packaging/__pycache__/_manylinux.cpython-312.pyc,,
+pip/_vendor/packaging/__pycache__/_musllinux.cpython-312.pyc,,
+pip/_vendor/packaging/__pycache__/_structures.cpython-312.pyc,,
+pip/_vendor/packaging/__pycache__/markers.cpython-312.pyc,,
+pip/_vendor/packaging/__pycache__/requirements.cpython-312.pyc,,
+pip/_vendor/packaging/__pycache__/specifiers.cpython-312.pyc,,
+pip/_vendor/packaging/__pycache__/tags.cpython-312.pyc,,
+pip/_vendor/packaging/__pycache__/utils.cpython-312.pyc,,
+pip/_vendor/packaging/__pycache__/version.cpython-312.pyc,,
+pip/_vendor/packaging/_manylinux.py,sha256=XcbiXB-qcjv3bcohp6N98TMpOP4_j3m-iOA8ptK2GWY,11488
+pip/_vendor/packaging/_musllinux.py,sha256=_KGgY_qc7vhMGpoqss25n2hiLCNKRtvz9mCrS7gkqyc,4378
+pip/_vendor/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
+pip/_vendor/packaging/markers.py,sha256=AJBOcY8Oq0kYc570KuuPTkvuqjAlhufaE2c9sCUbm64,8487
+pip/_vendor/packaging/requirements.py,sha256=NtDlPBtojpn1IUC85iMjPNsUmufjpSlwnNA-Xb4m5NA,4676
+pip/_vendor/packaging/specifiers.py,sha256=LRQ0kFsHrl5qfcFNEEJrIFYsnIHQUJXY9fIsakTrrqE,30110
+pip/_vendor/packaging/tags.py,sha256=lmsnGNiJ8C4D_Pf9PbM0qgbZvD9kmB9lpZBQUZa3R_Y,15699
+pip/_vendor/packaging/utils.py,sha256=dJjeat3BS-TYn1RrUFVwufUMasbtzLfYRoy_HXENeFQ,4200
+pip/_vendor/packaging/version.py,sha256=_fLRNrFrxYcHVfyo8vk9j8s6JM8N_xsSxVFr6RJyco8,14665
+pip/_vendor/pkg_resources/__init__.py,sha256=hTAeJCNYb7dJseIDVsYK3mPQep_gphj4tQh-bspX8bg,109364
+pip/_vendor/pkg_resources/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/platformdirs/__init__.py,sha256=SkhEYVyC_HUHC6KX7n4M_6coyRMtEB38QMyOYIAX6Yk,20155
+pip/_vendor/platformdirs/__main__.py,sha256=fVvSiTzr2-RM6IsjWjj4fkaOtDOgDhUWv6sA99do4CQ,1476
+pip/_vendor/platformdirs/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/platformdirs/__pycache__/__main__.cpython-312.pyc,,
+pip/_vendor/platformdirs/__pycache__/android.cpython-312.pyc,,
+pip/_vendor/platformdirs/__pycache__/api.cpython-312.pyc,,
+pip/_vendor/platformdirs/__pycache__/macos.cpython-312.pyc,,
+pip/_vendor/platformdirs/__pycache__/unix.cpython-312.pyc,,
+pip/_vendor/platformdirs/__pycache__/version.cpython-312.pyc,,
+pip/_vendor/platformdirs/__pycache__/windows.cpython-312.pyc,,
+pip/_vendor/platformdirs/android.py,sha256=y_EEMKwYl2-bzYBDovksSn8m76on0Lda8eyJksVQE9U,7211
+pip/_vendor/platformdirs/api.py,sha256=jWtX06jAJytYrkJDOqEls97mCkyHRSZkoqUlbMK5Qew,7132
+pip/_vendor/platformdirs/macos.py,sha256=LueVOoVgGWDBwQb8OFwXkVKfVn33CM1Lkwf1-A86tRQ,3678
+pip/_vendor/platformdirs/unix.py,sha256=22JhR8ZY0aLxSVCFnKrc6f1iz6Gv42K24Daj7aTjfSg,8809
+pip/_vendor/platformdirs/version.py,sha256=mavZTQIJIXfdewEaSTn7EWrNfPZWeRofb-74xqW5f2M,160
+pip/_vendor/platformdirs/windows.py,sha256=4TtbPGoWG2PRgI11uquDa7eRk8TcxvnUNuuMGZItnXc,9573
+pip/_vendor/pygments/__init__.py,sha256=6AuDljQtvf89DTNUyWM7k3oUlP_lq70NU-INKKteOBY,2983
+pip/_vendor/pygments/__main__.py,sha256=es8EKMvXj5yToIfQ-pf3Dv5TnIeeM6sME0LW-n4ecHo,353
+pip/_vendor/pygments/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/pygments/__pycache__/__main__.cpython-312.pyc,,
+pip/_vendor/pygments/__pycache__/cmdline.cpython-312.pyc,,
+pip/_vendor/pygments/__pycache__/console.cpython-312.pyc,,
+pip/_vendor/pygments/__pycache__/filter.cpython-312.pyc,,
+pip/_vendor/pygments/__pycache__/formatter.cpython-312.pyc,,
+pip/_vendor/pygments/__pycache__/lexer.cpython-312.pyc,,
+pip/_vendor/pygments/__pycache__/modeline.cpython-312.pyc,,
+pip/_vendor/pygments/__pycache__/plugin.cpython-312.pyc,,
+pip/_vendor/pygments/__pycache__/regexopt.cpython-312.pyc,,
+pip/_vendor/pygments/__pycache__/scanner.cpython-312.pyc,,
+pip/_vendor/pygments/__pycache__/sphinxext.cpython-312.pyc,,
+pip/_vendor/pygments/__pycache__/style.cpython-312.pyc,,
+pip/_vendor/pygments/__pycache__/token.cpython-312.pyc,,
+pip/_vendor/pygments/__pycache__/unistring.cpython-312.pyc,,
+pip/_vendor/pygments/__pycache__/util.cpython-312.pyc,,
+pip/_vendor/pygments/cmdline.py,sha256=byxYJp9gnjVeyhRlZ3UTMgo_LhkXh1afvN8wJBtAcc8,23685
+pip/_vendor/pygments/console.py,sha256=2wZ5W-U6TudJD1_NLUwjclMpbomFM91lNv11_60sfGY,1697
+pip/_vendor/pygments/filter.py,sha256=j5aLM9a9wSx6eH1oy473oSkJ02hGWNptBlVo4s1g_30,1938
+pip/_vendor/pygments/filters/__init__.py,sha256=h_koYkUFo-FFUxjs564JHUAz7O3yJpVwI6fKN3MYzG0,40386
+pip/_vendor/pygments/filters/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/pygments/formatter.py,sha256=J9OL9hXLJKZk7moUgKwpjW9HNf4WlJFg_o_-Z_S_tTY,4178
+pip/_vendor/pygments/formatters/__init__.py,sha256=_xgAcdFKr0QNYwh_i98AU9hvfP3X2wAkhElFcRRF3Uo,5424
+pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-312.pyc,,
+pip/_vendor/pygments/formatters/__pycache__/bbcode.cpython-312.pyc,,
+pip/_vendor/pygments/formatters/__pycache__/groff.cpython-312.pyc,,
+pip/_vendor/pygments/formatters/__pycache__/html.cpython-312.pyc,,
+pip/_vendor/pygments/formatters/__pycache__/img.cpython-312.pyc,,
+pip/_vendor/pygments/formatters/__pycache__/irc.cpython-312.pyc,,
+pip/_vendor/pygments/formatters/__pycache__/latex.cpython-312.pyc,,
+pip/_vendor/pygments/formatters/__pycache__/other.cpython-312.pyc,,
+pip/_vendor/pygments/formatters/__pycache__/pangomarkup.cpython-312.pyc,,
+pip/_vendor/pygments/formatters/__pycache__/rtf.cpython-312.pyc,,
+pip/_vendor/pygments/formatters/__pycache__/svg.cpython-312.pyc,,
+pip/_vendor/pygments/formatters/__pycache__/terminal.cpython-312.pyc,,
+pip/_vendor/pygments/formatters/__pycache__/terminal256.cpython-312.pyc,,
+pip/_vendor/pygments/formatters/_mapping.py,sha256=1Cw37FuQlNacnxRKmtlPX4nyLoX9_ttko5ZwscNUZZ4,4176
+pip/_vendor/pygments/formatters/bbcode.py,sha256=r1b7wzWTJouADDLh-Z11iRi4iQxD0JKJ1qHl6mOYxsA,3314
+pip/_vendor/pygments/formatters/groff.py,sha256=xy8Zf3tXOo6MWrXh7yPGWx3lVEkg_DhY4CxmsDb0IVo,5094
+pip/_vendor/pygments/formatters/html.py,sha256=PIzAyilNqaTzSSP2slDG2VDLE3qNioWy2rgtSSoviuI,35610
+pip/_vendor/pygments/formatters/img.py,sha256=XKXmg2_XONrR4mtq2jfEU8XCsoln3VSGTw-UYiEokys,21938
+pip/_vendor/pygments/formatters/irc.py,sha256=Ep-m8jd3voFO6Fv57cUGFmz6JVA67IEgyiBOwv0N4a0,4981
+pip/_vendor/pygments/formatters/latex.py,sha256=FGzJ-YqSTE8z_voWPdzvLY5Tq8jE_ygjGjM6dXZJ8-k,19351
+pip/_vendor/pygments/formatters/other.py,sha256=gPxkk5BdAzWTCgbEHg1lpLi-1F6ZPh5A_aotgLXHnzg,5073
+pip/_vendor/pygments/formatters/pangomarkup.py,sha256=6LKnQc8yh49f802bF0sPvbzck4QivMYqqoXAPaYP8uU,2212
+pip/_vendor/pygments/formatters/rtf.py,sha256=aA0v_psW6KZI3N18TKDifxeL6mcF8EDXcPXDWI4vhVQ,5014
+pip/_vendor/pygments/formatters/svg.py,sha256=dQONWypbzfvzGCDtdp3M_NJawScJvM2DiHbx1k-ww7g,7335
+pip/_vendor/pygments/formatters/terminal.py,sha256=FG-rpjRpFmNpiGB4NzIucvxq6sQIXB3HOTo2meTKtrU,4674
+pip/_vendor/pygments/formatters/terminal256.py,sha256=13SJ3D5pFdqZ9zROE6HbWnBDwHvOGE8GlsmqGhprRp4,11753
+pip/_vendor/pygments/lexer.py,sha256=2BpqLlT2ExvOOi7vnjK5nB4Fp-m52ldiPaXMox5uwug,34618
+pip/_vendor/pygments/lexers/__init__.py,sha256=j5KEi5O_VQ5GS59H49l-10gzUOkWKxlwGeVMlGO2MMk,12130
+pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-312.pyc,,
+pip/_vendor/pygments/lexers/__pycache__/python.cpython-312.pyc,,
+pip/_vendor/pygments/lexers/_mapping.py,sha256=Hts4r_ZQ8icftGM7gkBPeED5lyVSv4affFgXYE6Ap04,72281
+pip/_vendor/pygments/lexers/python.py,sha256=c7jnmKFU9DLxTJW0UbwXt6Z9FJqbBlVsWA1Qr9xSA_w,53424
+pip/_vendor/pygments/modeline.py,sha256=eF2vO4LpOGoPvIKKkbPfnyut8hT4UiebZPpb-BYGQdI,986
+pip/_vendor/pygments/plugin.py,sha256=j1Fh310RbV2DQ9nvkmkqvlj38gdyuYKllLnGxbc8sJM,2591
+pip/_vendor/pygments/regexopt.py,sha256=jg1ALogcYGU96TQS9isBl6dCrvw5y5--BP_K-uFk_8s,3072
+pip/_vendor/pygments/scanner.py,sha256=b_nu5_f3HCgSdp5S_aNRBQ1MSCm4ZjDwec2OmTRickw,3092
+pip/_vendor/pygments/sphinxext.py,sha256=wBFYm180qea9JKt__UzhRlNRNhczPDFDaqGD21sbuso,6882
+pip/_vendor/pygments/style.py,sha256=C4qyoJrUTkq-OV3iO-8Vz3UtWYpJwSTdh5_vlGCGdNQ,6257
+pip/_vendor/pygments/styles/__init__.py,sha256=he7HjQx7sC0d2kfTVLjUs0J15mtToJM6M1brwIm9--Q,3700
+pip/_vendor/pygments/styles/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/pygments/token.py,sha256=seNsmcch9OEHXYirh8Ool7w8xDhfNTbLj5rHAC-gc_o,6184
+pip/_vendor/pygments/unistring.py,sha256=FaUfG14NBJEKLQoY9qj6JYeXrpYcLmKulghdxOGFaOc,63223
+pip/_vendor/pygments/util.py,sha256=AEVY0qonyyEMgv4Do2dINrrqUAwUk2XYSqHM650uzek,10230
+pip/_vendor/pyparsing/__init__.py,sha256=9m1JbE2JTLdBG0Mb6B0lEaZj181Wx5cuPXZpsbHEYgE,9116
+pip/_vendor/pyparsing/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/pyparsing/__pycache__/actions.cpython-312.pyc,,
+pip/_vendor/pyparsing/__pycache__/common.cpython-312.pyc,,
+pip/_vendor/pyparsing/__pycache__/core.cpython-312.pyc,,
+pip/_vendor/pyparsing/__pycache__/exceptions.cpython-312.pyc,,
+pip/_vendor/pyparsing/__pycache__/helpers.cpython-312.pyc,,
+pip/_vendor/pyparsing/__pycache__/results.cpython-312.pyc,,
+pip/_vendor/pyparsing/__pycache__/testing.cpython-312.pyc,,
+pip/_vendor/pyparsing/__pycache__/unicode.cpython-312.pyc,,
+pip/_vendor/pyparsing/__pycache__/util.cpython-312.pyc,,
+pip/_vendor/pyparsing/actions.py,sha256=05uaIPOznJPQ7VgRdmGCmG4sDnUPtwgv5qOYIqbL2UY,6567
+pip/_vendor/pyparsing/common.py,sha256=p-3c83E5-DjlkF35G0O9-kjQRpoejP-2_z0hxZ-eol4,13387
+pip/_vendor/pyparsing/core.py,sha256=yvuRlLpXSF8mgk-QhiW3OVLqD9T0rsj9tbibhRH4Yaw,224445
+pip/_vendor/pyparsing/diagram/__init__.py,sha256=nxmDOoYF9NXuLaGYy01tKFjkNReWJlrGFuJNWEiTo84,24215
+pip/_vendor/pyparsing/diagram/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/pyparsing/exceptions.py,sha256=6Jc6W1eDZBzyFu1J0YrcdNFVBC-RINujZmveSnB8Rxw,9523
+pip/_vendor/pyparsing/helpers.py,sha256=BZJHCA8SS0pYio30KGQTc9w2qMOaK4YpZ7hcvHbnTgk,38646
+pip/_vendor/pyparsing/results.py,sha256=9dyqQ-w3MjfmxWbFt8KEPU6IfXeyRdoWp2Og802rUQY,26692
+pip/_vendor/pyparsing/testing.py,sha256=eJncg0p83zm1FTPvM9auNT6oavIvXaibmRFDf1qmwkY,13488
+pip/_vendor/pyparsing/unicode.py,sha256=fAPdsJiARFbkPAih6NkYry0dpj4jPqelGVMlE4wWFW8,10646
+pip/_vendor/pyparsing/util.py,sha256=vTMzTdwSDyV8d_dSgquUTdWgBFoA_W30nfxEJDsshRQ,8670
+pip/_vendor/pyproject_hooks/__init__.py,sha256=kCehmy0UaBa9oVMD7ZIZrnswfnP3LXZ5lvnNJAL5JBM,491
+pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/pyproject_hooks/__pycache__/_compat.cpython-312.pyc,,
+pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-312.pyc,,
+pip/_vendor/pyproject_hooks/_compat.py,sha256=by6evrYnqkisiM-MQcvOKs5bgDMzlOSgZqRHNqf04zE,138
+pip/_vendor/pyproject_hooks/_impl.py,sha256=61GJxzQip0IInhuO69ZI5GbNQ82XEDUB_1Gg5_KtUoc,11920
+pip/_vendor/pyproject_hooks/_in_process/__init__.py,sha256=9gQATptbFkelkIy0OfWFEACzqxXJMQDWCH9rBOAZVwQ,546
+pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/pyproject_hooks/_in_process/__pycache__/_in_process.cpython-312.pyc,,
+pip/_vendor/pyproject_hooks/_in_process/_in_process.py,sha256=m2b34c917IW5o-Q_6TYIHlsK9lSUlNiyrITTUH_zwew,10927
+pip/_vendor/requests/__init__.py,sha256=owujob4dk45Siy4EYtbCKR6wcFph7E04a_v_OuAacBA,5169
+pip/_vendor/requests/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/requests/__pycache__/__version__.cpython-312.pyc,,
+pip/_vendor/requests/__pycache__/_internal_utils.cpython-312.pyc,,
+pip/_vendor/requests/__pycache__/adapters.cpython-312.pyc,,
+pip/_vendor/requests/__pycache__/api.cpython-312.pyc,,
+pip/_vendor/requests/__pycache__/auth.cpython-312.pyc,,
+pip/_vendor/requests/__pycache__/certs.cpython-312.pyc,,
+pip/_vendor/requests/__pycache__/compat.cpython-312.pyc,,
+pip/_vendor/requests/__pycache__/cookies.cpython-312.pyc,,
+pip/_vendor/requests/__pycache__/exceptions.cpython-312.pyc,,
+pip/_vendor/requests/__pycache__/help.cpython-312.pyc,,
+pip/_vendor/requests/__pycache__/hooks.cpython-312.pyc,,
+pip/_vendor/requests/__pycache__/models.cpython-312.pyc,,
+pip/_vendor/requests/__pycache__/packages.cpython-312.pyc,,
+pip/_vendor/requests/__pycache__/sessions.cpython-312.pyc,,
+pip/_vendor/requests/__pycache__/status_codes.cpython-312.pyc,,
+pip/_vendor/requests/__pycache__/structures.cpython-312.pyc,,
+pip/_vendor/requests/__pycache__/utils.cpython-312.pyc,,
+pip/_vendor/requests/__version__.py,sha256=ssI3Ezt7PaxgkOW45GhtwPUclo_SO_ygtIm4A74IOfw,435
+pip/_vendor/requests/_internal_utils.py,sha256=nMQymr4hs32TqVo5AbCrmcJEhvPUh7xXlluyqwslLiQ,1495
+pip/_vendor/requests/adapters.py,sha256=idj6cZcId3L5xNNeJ7ieOLtw3awJk5A64xUfetHwq3M,19697
+pip/_vendor/requests/api.py,sha256=q61xcXq4tmiImrvcSVLTbFyCiD2F-L_-hWKGbz4y8vg,6449
+pip/_vendor/requests/auth.py,sha256=h-HLlVx9j8rKV5hfSAycP2ApOSglTz77R0tz7qCbbEE,10187
+pip/_vendor/requests/certs.py,sha256=PVPooB0jP5hkZEULSCwC074532UFbR2Ptgu0I5zwmCs,575
+pip/_vendor/requests/compat.py,sha256=IhK9quyX0RRuWTNcg6d2JGSAOUbM6mym2p_2XjLTwf4,1286
+pip/_vendor/requests/cookies.py,sha256=kD3kNEcCj-mxbtf5fJsSaT86eGoEYpD3X0CSgpzl7BM,18560
+pip/_vendor/requests/exceptions.py,sha256=FA-_kVwBZ2jhXauRctN_ewHVK25b-fj0Azyz1THQ0Kk,3823
+pip/_vendor/requests/help.py,sha256=FnAAklv8MGm_qb2UilDQgS6l0cUttiCFKUjx0zn2XNA,3879
+pip/_vendor/requests/hooks.py,sha256=CiuysiHA39V5UfcCBXFIx83IrDpuwfN9RcTUgv28ftQ,733
+pip/_vendor/requests/models.py,sha256=dDZ-iThotky-Noq9yy97cUEJhr3wnY6mv-xR_ePg_lk,35288
+pip/_vendor/requests/packages.py,sha256=njJmVifY4aSctuW3PP5EFRCxjEwMRDO6J_feG2dKWsI,695
+pip/_vendor/requests/sessions.py,sha256=-LvTzrPtetSTrR3buxu4XhdgMrJFLB1q5D7P--L2Xhw,30373
+pip/_vendor/requests/status_codes.py,sha256=FvHmT5uH-_uimtRz5hH9VCbt7VV-Nei2J9upbej6j8g,4235
+pip/_vendor/requests/structures.py,sha256=-IbmhVz06S-5aPSZuUthZ6-6D9XOjRuTXHOabY041XM,2912
+pip/_vendor/requests/utils.py,sha256=BvQDKkLuXCSaVfSW_1blUN0IzJSfNC8njNr8vhKj76Y,33189
+pip/_vendor/resolvelib/__init__.py,sha256=h509TdEcpb5-44JonaU3ex2TM15GVBLjM9CNCPwnTTs,537
+pip/_vendor/resolvelib/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/resolvelib/__pycache__/providers.cpython-312.pyc,,
+pip/_vendor/resolvelib/__pycache__/reporters.cpython-312.pyc,,
+pip/_vendor/resolvelib/__pycache__/resolvers.cpython-312.pyc,,
+pip/_vendor/resolvelib/__pycache__/structs.cpython-312.pyc,,
+pip/_vendor/resolvelib/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-312.pyc,,
+pip/_vendor/resolvelib/compat/collections_abc.py,sha256=uy8xUZ-NDEw916tugUXm8HgwCGiMO0f-RcdnpkfXfOs,156
+pip/_vendor/resolvelib/providers.py,sha256=fuuvVrCetu5gsxPB43ERyjfO8aReS3rFQHpDgiItbs4,5871
+pip/_vendor/resolvelib/reporters.py,sha256=TSbRmWzTc26w0ggsV1bxVpeWDB8QNIre6twYl7GIZBE,1601
+pip/_vendor/resolvelib/resolvers.py,sha256=G8rsLZSq64g5VmIq-lB7UcIJ1gjAxIQJmTF4REZleQ0,20511
+pip/_vendor/resolvelib/structs.py,sha256=0_1_XO8z_CLhegP3Vpf9VJ3zJcfLm0NOHRM-i0Ykz3o,4963
+pip/_vendor/rich/__init__.py,sha256=dRxjIL-SbFVY0q3IjSMrfgBTHrm1LZDgLOygVBwiYZc,6090
+pip/_vendor/rich/__main__.py,sha256=TT8sb9PTnsnKhhrGuHkLN0jdN0dtKhtPkEr9CidDbPM,8478
+pip/_vendor/rich/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/__main__.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/_cell_widths.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/_emoji_codes.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/_emoji_replace.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/_export_format.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/_extension.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/_fileno.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/_inspect.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/_log_render.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/_loop.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/_null_file.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/_palettes.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/_pick.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/_ratio.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/_spinners.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/_stack.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/_timer.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/_win32_console.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/_windows.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/_windows_renderer.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/_wrap.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/abc.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/align.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/ansi.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/bar.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/box.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/cells.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/color.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/color_triplet.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/columns.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/console.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/constrain.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/containers.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/control.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/default_styles.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/diagnose.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/emoji.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/errors.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/file_proxy.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/filesize.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/highlighter.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/json.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/jupyter.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/layout.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/live.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/live_render.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/logging.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/markup.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/measure.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/padding.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/pager.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/palette.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/panel.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/pretty.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/progress.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/progress_bar.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/prompt.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/protocol.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/region.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/repr.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/rule.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/scope.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/screen.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/segment.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/spinner.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/status.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/style.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/styled.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/syntax.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/table.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/terminal_theme.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/text.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/theme.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/themes.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/traceback.cpython-312.pyc,,
+pip/_vendor/rich/__pycache__/tree.cpython-312.pyc,,
+pip/_vendor/rich/_cell_widths.py,sha256=2n4EiJi3X9sqIq0O16kUZ_zy6UYMd3xFfChlKfnW1Hc,10096
+pip/_vendor/rich/_emoji_codes.py,sha256=hu1VL9nbVdppJrVoijVshRlcRRe_v3dju3Mmd2sKZdY,140235
+pip/_vendor/rich/_emoji_replace.py,sha256=n-kcetsEUx2ZUmhQrfeMNc-teeGhpuSQ5F8VPBsyvDo,1064
+pip/_vendor/rich/_export_format.py,sha256=qxgV3nKnXQu1hfbnRVswPYy-AwIg1X0LSC47cK5s8jk,2100
+pip/_vendor/rich/_extension.py,sha256=Xt47QacCKwYruzjDi-gOBq724JReDj9Cm9xUi5fr-34,265
+pip/_vendor/rich/_fileno.py,sha256=HWZxP5C2ajMbHryvAQZseflVfQoGzsKOHzKGsLD8ynQ,799
+pip/_vendor/rich/_inspect.py,sha256=oZJGw31e64dwXSCmrDnvZbwVb1ZKhWfU8wI3VWohjJk,9695
+pip/_vendor/rich/_log_render.py,sha256=1ByI0PA1ZpxZY3CGJOK54hjlq4X-Bz_boIjIqCd8Kns,3225
+pip/_vendor/rich/_loop.py,sha256=hV_6CLdoPm0va22Wpw4zKqM0RYsz3TZxXj0PoS-9eDQ,1236
+pip/_vendor/rich/_null_file.py,sha256=tGSXk_v-IZmbj1GAzHit8A3kYIQMiCpVsCFfsC-_KJ4,1387
+pip/_vendor/rich/_palettes.py,sha256=cdev1JQKZ0JvlguV9ipHgznTdnvlIzUFDBb0It2PzjI,7063
+pip/_vendor/rich/_pick.py,sha256=evDt8QN4lF5CiwrUIXlOJCntitBCOsI3ZLPEIAVRLJU,423
+pip/_vendor/rich/_ratio.py,sha256=2lLSliL025Y-YMfdfGbutkQDevhcyDqc-DtUYW9mU70,5472
+pip/_vendor/rich/_spinners.py,sha256=U2r1_g_1zSjsjiUdAESc2iAMc3i4ri_S8PYP6kQ5z1I,19919
+pip/_vendor/rich/_stack.py,sha256=-C8OK7rxn3sIUdVwxZBBpeHhIzX0eI-VM3MemYfaXm0,351
+pip/_vendor/rich/_timer.py,sha256=zelxbT6oPFZnNrwWPpc1ktUeAT-Vc4fuFcRZLQGLtMI,417
+pip/_vendor/rich/_win32_console.py,sha256=P0vxI2fcndym1UU1S37XAzQzQnkyY7YqAKmxm24_gug,22820
+pip/_vendor/rich/_windows.py,sha256=dvNl9TmfPzNVxiKk5WDFihErZ5796g2UC9-KGGyfXmk,1926
+pip/_vendor/rich/_windows_renderer.py,sha256=t74ZL3xuDCP3nmTp9pH1L5LiI2cakJuQRQleHCJerlk,2783
+pip/_vendor/rich/_wrap.py,sha256=xfV_9t0Sg6rzimmrDru8fCVmUlalYAcHLDfrJZnbbwQ,1840
+pip/_vendor/rich/abc.py,sha256=ON-E-ZqSSheZ88VrKX2M3PXpFbGEUUZPMa_Af0l-4f0,890
+pip/_vendor/rich/align.py,sha256=Ji-Yokfkhnfe_xMmr4ISjZB07TJXggBCOYoYa-HDAr8,10368
+pip/_vendor/rich/ansi.py,sha256=iD6532QYqnBm6hADulKjrV8l8kFJ-9fEVooHJHH3hMg,6906
+pip/_vendor/rich/bar.py,sha256=a7UD303BccRCrEhGjfMElpv5RFYIinaAhAuqYqhUvmw,3264
+pip/_vendor/rich/box.py,sha256=FJ6nI3jD7h2XNFU138bJUt2HYmWOlRbltoCEuIAZhew,9842
+pip/_vendor/rich/cells.py,sha256=627ztJs9zOL-38HJ7kXBerR-gT8KBfYC8UzEwMJDYYo,4509
+pip/_vendor/rich/color.py,sha256=9Gh958U3f75WVdLTeC0U9nkGTn2n0wnojKpJ6jQEkIE,18224
+pip/_vendor/rich/color_triplet.py,sha256=3lhQkdJbvWPoLDO-AnYImAWmJvV5dlgYNCVZ97ORaN4,1054
+pip/_vendor/rich/columns.py,sha256=HUX0KcMm9dsKNi11fTbiM_h2iDtl8ySCaVcxlalEzq8,7131
+pip/_vendor/rich/console.py,sha256=pDvkbLkvtZIMIwQx_jkZ-seyNl4zGBLviXoWXte9fwg,99218
+pip/_vendor/rich/constrain.py,sha256=1VIPuC8AgtKWrcncQrjBdYqA3JVWysu6jZo1rrh7c7Q,1288
+pip/_vendor/rich/containers.py,sha256=aKgm5UDHn5Nmui6IJaKdsZhbHClh_X7D-_Wg8Ehrr7s,5497
+pip/_vendor/rich/control.py,sha256=DSkHTUQLorfSERAKE_oTAEUFefZnZp4bQb4q8rHbKws,6630
+pip/_vendor/rich/default_styles.py,sha256=-Fe318kMVI_IwciK5POpThcO0-9DYJ67TZAN6DlmlmM,8082
+pip/_vendor/rich/diagnose.py,sha256=an6uouwhKPAlvQhYpNNpGq9EJysfMIOvvCbO3oSoR24,972
+pip/_vendor/rich/emoji.py,sha256=omTF9asaAnsM4yLY94eR_9dgRRSm1lHUszX20D1yYCQ,2501
+pip/_vendor/rich/errors.py,sha256=5pP3Kc5d4QJ_c0KFsxrfyhjiPVe7J1zOqSFbFAzcV-Y,642
+pip/_vendor/rich/file_proxy.py,sha256=Tl9THMDZ-Pk5Wm8sI1gGg_U5DhusmxD-FZ0fUbcU0W0,1683
+pip/_vendor/rich/filesize.py,sha256=9fTLAPCAwHmBXdRv7KZU194jSgNrRb6Wx7RIoBgqeKY,2508
+pip/_vendor/rich/highlighter.py,sha256=p3C1g4QYzezFKdR7NF9EhPbzQDvdPUhGRgSyGGEmPko,9584
+pip/_vendor/rich/json.py,sha256=EYp9ucj-nDjYDkHCV6Mk1ve8nUOpuFLaW76X50Mis2M,5032
+pip/_vendor/rich/jupyter.py,sha256=QyoKoE_8IdCbrtiSHp9TsTSNyTHY0FO5whE7jOTd9UE,3252
+pip/_vendor/rich/layout.py,sha256=RFYL6HdCFsHf9WRpcvi3w-fpj-8O5dMZ8W96VdKNdbI,14007
+pip/_vendor/rich/live.py,sha256=vZzYvu7fqwlv3Gthl2xiw1Dc_O80VlGcCV0DOHwCyDM,14273
+pip/_vendor/rich/live_render.py,sha256=zElm3PrfSIvjOce28zETHMIUf9pFYSUA5o0AflgUP64,3667
+pip/_vendor/rich/logging.py,sha256=uB-cB-3Q4bmXDLLpbOWkmFviw-Fde39zyMV6tKJ2WHQ,11903
+pip/_vendor/rich/markup.py,sha256=xzF4uAafiEeEYDJYt_vUnJOGoTU8RrH-PH7WcWYXjCg,8198
+pip/_vendor/rich/measure.py,sha256=HmrIJX8sWRTHbgh8MxEay_83VkqNW_70s8aKP5ZcYI8,5305
+pip/_vendor/rich/padding.py,sha256=kTFGsdGe0os7tXLnHKpwTI90CXEvrceeZGCshmJy5zw,4970
+pip/_vendor/rich/pager.py,sha256=SO_ETBFKbg3n_AgOzXm41Sv36YxXAyI3_R-KOY2_uSc,828
+pip/_vendor/rich/palette.py,sha256=lInvR1ODDT2f3UZMfL1grq7dY_pDdKHw4bdUgOGaM4Y,3396
+pip/_vendor/rich/panel.py,sha256=wGMe40J8KCGgQoM0LyjRErmGIkv2bsYA71RCXThD0xE,10574
+pip/_vendor/rich/pretty.py,sha256=eLEYN9xVaMNuA6EJVYm4li7HdOHxCqmVKvnOqJpyFt0,35852
+pip/_vendor/rich/progress.py,sha256=n4KF9vky8_5iYeXcyZPEvzyLplWlDvFLkM5JI0Bs08A,59706
+pip/_vendor/rich/progress_bar.py,sha256=cEoBfkc3lLwqba4XKsUpy4vSQKDh2QQ5J2J94-ACFoo,8165
+pip/_vendor/rich/prompt.py,sha256=x0mW-pIPodJM4ry6grgmmLrl8VZp99kqcmdnBe70YYA,11303
+pip/_vendor/rich/protocol.py,sha256=5hHHDDNHckdk8iWH5zEbi-zuIVSF5hbU2jIo47R7lTE,1391
+pip/_vendor/rich/region.py,sha256=rNT9xZrVZTYIXZC0NYn41CJQwYNbR-KecPOxTgQvB8Y,166
+pip/_vendor/rich/repr.py,sha256=9Z8otOmM-tyxnyTodvXlectP60lwahjGiDTrbrxPSTg,4431
+pip/_vendor/rich/rule.py,sha256=0fNaS_aERa3UMRc3T5WMpN_sumtDxfaor2y3of1ftBk,4602
+pip/_vendor/rich/scope.py,sha256=TMUU8qo17thyqQCPqjDLYpg_UU1k5qVd-WwiJvnJVas,2843
+pip/_vendor/rich/screen.py,sha256=YoeReESUhx74grqb0mSSb9lghhysWmFHYhsbMVQjXO8,1591
+pip/_vendor/rich/segment.py,sha256=XLnJEFvcV3bjaVzMNUJiem3n8lvvI9TJ5PTu-IG2uTg,24247
+pip/_vendor/rich/spinner.py,sha256=15koCmF0DQeD8-k28Lpt6X_zJQUlzEhgo_6A6uy47lc,4339
+pip/_vendor/rich/status.py,sha256=gJsIXIZeSo3urOyxRUjs6VrhX5CZrA0NxIQ-dxhCnwo,4425
+pip/_vendor/rich/style.py,sha256=3hiocH_4N8vwRm3-8yFWzM7tSwjjEven69XqWasSQwM,27073
+pip/_vendor/rich/styled.py,sha256=eZNnzGrI4ki_54pgY3Oj0T-x3lxdXTYh4_ryDB24wBU,1258
+pip/_vendor/rich/syntax.py,sha256=jgDiVCK6cpR0NmBOpZmIu-Ud4eaW7fHvjJZkDbjpcSA,35173
+pip/_vendor/rich/table.py,sha256=-WzesL-VJKsaiDU3uyczpJMHy6VCaSewBYJwx8RudI8,39684
+pip/_vendor/rich/terminal_theme.py,sha256=1j5-ufJfnvlAo5Qsi_ACZiXDmwMXzqgmFByObT9-yJY,3370
+pip/_vendor/rich/text.py,sha256=_8JBlSau0c2z8ENOZMi1hJ7M1ZGY408E4-hXjHyyg1A,45525
+pip/_vendor/rich/theme.py,sha256=belFJogzA0W0HysQabKaHOc3RWH2ko3fQAJhoN-AFdo,3777
+pip/_vendor/rich/themes.py,sha256=0xgTLozfabebYtcJtDdC5QkX5IVUEaviqDUJJh4YVFk,102
+pip/_vendor/rich/traceback.py,sha256=yCLVrCtyoFNENd9mkm2xeG3KmqkTwH9xpFOO7p2Bq0A,29604
+pip/_vendor/rich/tree.py,sha256=BMbUYNjS9uodNPfvtY_odmU09GA5QzcMbQ5cJZhllQI,9169
+pip/_vendor/six.py,sha256=TOOfQi7nFGfMrIvtdr6wX4wyHH8M7aknmuLfo2cBBrM,34549
+pip/_vendor/tenacity/__init__.py,sha256=3kvAL6KClq8GFo2KFhmOzskRKSDQI-ubrlfZ8AQEEI0,20493
+pip/_vendor/tenacity/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/tenacity/__pycache__/_asyncio.cpython-312.pyc,,
+pip/_vendor/tenacity/__pycache__/_utils.cpython-312.pyc,,
+pip/_vendor/tenacity/__pycache__/after.cpython-312.pyc,,
+pip/_vendor/tenacity/__pycache__/before.cpython-312.pyc,,
+pip/_vendor/tenacity/__pycache__/before_sleep.cpython-312.pyc,,
+pip/_vendor/tenacity/__pycache__/nap.cpython-312.pyc,,
+pip/_vendor/tenacity/__pycache__/retry.cpython-312.pyc,,
+pip/_vendor/tenacity/__pycache__/stop.cpython-312.pyc,,
+pip/_vendor/tenacity/__pycache__/tornadoweb.cpython-312.pyc,,
+pip/_vendor/tenacity/__pycache__/wait.cpython-312.pyc,,
+pip/_vendor/tenacity/_asyncio.py,sha256=Qi6wgQsGa9MQibYRy3OXqcDQswIZZ00dLOoSUGN-6o8,3551
+pip/_vendor/tenacity/_utils.py,sha256=ubs6a7sxj3JDNRKWCyCU2j5r1CB7rgyONgZzYZq6D_4,2179
+pip/_vendor/tenacity/after.py,sha256=S5NCISScPeIrKwIeXRwdJl3kV9Q4nqZfnNPDx6Hf__g,1682
+pip/_vendor/tenacity/before.py,sha256=dIZE9gmBTffisfwNkK0F1xFwGPV41u5GK70UY4Pi5Kc,1562
+pip/_vendor/tenacity/before_sleep.py,sha256=YmpgN9Y7HGlH97U24vvq_YWb5deaK4_DbiD8ZuFmy-E,2372
+pip/_vendor/tenacity/nap.py,sha256=fRWvnz1aIzbIq9Ap3gAkAZgDH6oo5zxMrU6ZOVByq0I,1383
+pip/_vendor/tenacity/retry.py,sha256=jrzD_mxA5mSTUEdiYB7SHpxltjhPSYZSnSRATb-ggRc,8746
+pip/_vendor/tenacity/stop.py,sha256=YMJs7ZgZfND65PRLqlGB_agpfGXlemx_5Hm4PKnBqpQ,3086
+pip/_vendor/tenacity/tornadoweb.py,sha256=po29_F1Mt8qZpsFjX7EVwAT0ydC_NbVia9gVi7R_wXA,2142
+pip/_vendor/tenacity/wait.py,sha256=3FcBJoCDgym12_dN6xfK8C1gROY0Hn4NSI2u8xv50uE,8024
+pip/_vendor/tomli/__init__.py,sha256=JhUwV66DB1g4Hvt1UQCVMdfCu-IgAV8FXmvDU9onxd4,396
+pip/_vendor/tomli/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/tomli/__pycache__/_parser.cpython-312.pyc,,
+pip/_vendor/tomli/__pycache__/_re.cpython-312.pyc,,
+pip/_vendor/tomli/__pycache__/_types.cpython-312.pyc,,
+pip/_vendor/tomli/_parser.py,sha256=g9-ENaALS-B8dokYpCuzUFalWlog7T-SIYMjLZSWrtM,22633
+pip/_vendor/tomli/_re.py,sha256=dbjg5ChZT23Ka9z9DHOXfdtSpPwUfdgMXnj8NOoly-w,2943
+pip/_vendor/tomli/_types.py,sha256=-GTG2VUqkpxwMqzmVO4F7ybKddIbAnuAHXfmWQcTi3Q,254
+pip/_vendor/truststore/__init__.py,sha256=qzTLSH8PvAkY1fr6QQ2vV-KwE_M83wdXugtpJaP_AbM,403
+pip/_vendor/truststore/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/truststore/__pycache__/_api.cpython-312.pyc,,
+pip/_vendor/truststore/__pycache__/_macos.cpython-312.pyc,,
+pip/_vendor/truststore/__pycache__/_openssl.cpython-312.pyc,,
+pip/_vendor/truststore/__pycache__/_ssl_constants.cpython-312.pyc,,
+pip/_vendor/truststore/__pycache__/_windows.cpython-312.pyc,,
+pip/_vendor/truststore/_api.py,sha256=xjuEu_rlH4hcdJTROImEyOEqdw-F8t5vO2H2BToY0Ro,9893
+pip/_vendor/truststore/_macos.py,sha256=BjvAKoAjXhdIPuxpY124HJIFswDb0pq8DjynzJOVwqc,17694
+pip/_vendor/truststore/_openssl.py,sha256=LLUZ7ZGaio-i5dpKKjKCSeSufmn6T8pi9lDcFnvSyq0,2324
+pip/_vendor/truststore/_ssl_constants.py,sha256=NUD4fVKdSD02ri7-db0tnO0VqLP9aHuzmStcW7tAl08,1130
+pip/_vendor/truststore/_windows.py,sha256=1x_EhROeJ9QK1sMAjfnZC7awYI8UnBJYL-TjACUYI4A,17468
+pip/_vendor/typing_extensions.py,sha256=EWpcpyQnVmc48E9fSyPGs-vXgHcAk9tQABQIxmMsCGk,111130
+pip/_vendor/urllib3/__init__.py,sha256=iXLcYiJySn0GNbWOOZDDApgBL1JgP44EZ8i1760S8Mc,3333
+pip/_vendor/urllib3/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/urllib3/__pycache__/_collections.cpython-312.pyc,,
+pip/_vendor/urllib3/__pycache__/_version.cpython-312.pyc,,
+pip/_vendor/urllib3/__pycache__/connection.cpython-312.pyc,,
+pip/_vendor/urllib3/__pycache__/connectionpool.cpython-312.pyc,,
+pip/_vendor/urllib3/__pycache__/exceptions.cpython-312.pyc,,
+pip/_vendor/urllib3/__pycache__/fields.cpython-312.pyc,,
+pip/_vendor/urllib3/__pycache__/filepost.cpython-312.pyc,,
+pip/_vendor/urllib3/__pycache__/poolmanager.cpython-312.pyc,,
+pip/_vendor/urllib3/__pycache__/request.cpython-312.pyc,,
+pip/_vendor/urllib3/__pycache__/response.cpython-312.pyc,,
+pip/_vendor/urllib3/_collections.py,sha256=pyASJJhW7wdOpqJj9QJA8FyGRfr8E8uUUhqUvhF0728,11372
+pip/_vendor/urllib3/_version.py,sha256=azoM7M7BUADl2kBhMVR6PPf2GhBDI90me1fcnzTwdcw,64
+pip/_vendor/urllib3/connection.py,sha256=92k9td_y4PEiTIjNufCUa1NzMB3J3w0LEdyokYgXnW8,20300
+pip/_vendor/urllib3/connectionpool.py,sha256=Be6q65SR9laoikg-h_jmc_p8OWtEmwgq_Om_Xtig-2M,40285
+pip/_vendor/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-312.pyc,,
+pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-312.pyc,,
+pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-312.pyc,,
+pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-312.pyc,,
+pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-312.pyc,,
+pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-312.pyc,,
+pip/_vendor/urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957
+pip/_vendor/urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-312.pyc,,
+pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-312.pyc,,
+pip/_vendor/urllib3/contrib/_securetransport/bindings.py,sha256=4Xk64qIkPBt09A5q-RIFUuDhNc9mXilVapm7WnYnzRw,17632
+pip/_vendor/urllib3/contrib/_securetransport/low_level.py,sha256=B2JBB2_NRP02xK6DCa1Pa9IuxrPwxzDzZbixQkb7U9M,13922
+pip/_vendor/urllib3/contrib/appengine.py,sha256=VR68eAVE137lxTgjBDwCna5UiBZTOKa01Aj_-5BaCz4,11036
+pip/_vendor/urllib3/contrib/ntlmpool.py,sha256=NlfkW7WMdW8ziqudopjHoW299og1BTWi0IeIibquFwk,4528
+pip/_vendor/urllib3/contrib/pyopenssl.py,sha256=hDJh4MhyY_p-oKlFcYcQaVQRDv6GMmBGuW9yjxyeejM,17081
+pip/_vendor/urllib3/contrib/securetransport.py,sha256=yhZdmVjY6PI6EeFbp7qYOp6-vp1Rkv2NMuOGaEj7pmc,34448
+pip/_vendor/urllib3/contrib/socks.py,sha256=aRi9eWXo9ZEb95XUxef4Z21CFlnnjbEiAo9HOseoMt4,7097
+pip/_vendor/urllib3/exceptions.py,sha256=0Mnno3KHTNfXRfY7638NufOPkUb6mXOm-Lqj-4x2w8A,8217
+pip/_vendor/urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579
+pip/_vendor/urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440
+pip/_vendor/urllib3/packages/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/urllib3/packages/__pycache__/six.cpython-312.pyc,,
+pip/_vendor/urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-312.pyc,,
+pip/_vendor/urllib3/packages/backports/__pycache__/weakref_finalize.cpython-312.pyc,,
+pip/_vendor/urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417
+pip/_vendor/urllib3/packages/backports/weakref_finalize.py,sha256=tRCal5OAhNSRyb0DhHp-38AtIlCsRP8BxF3NX-6rqIA,5343
+pip/_vendor/urllib3/packages/six.py,sha256=b9LM0wBXv7E7SrbCjAm4wwN-hrH-iNxv18LgWNMMKPo,34665
+pip/_vendor/urllib3/poolmanager.py,sha256=mJmZWy_Mb4-dHbmNCKbDqv3fZS9UF_2bVDuiECHyPaI,20943
+pip/_vendor/urllib3/request.py,sha256=YTWFNr7QIwh7E1W9dde9LM77v2VWTJ5V78XuTTw7D1A,6691
+pip/_vendor/urllib3/response.py,sha256=fmDJAFkG71uFTn-sVSTh2Iw0WmcXQYqkbRjihvwBjU8,30641
+pip/_vendor/urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155
+pip/_vendor/urllib3/util/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/urllib3/util/__pycache__/connection.cpython-312.pyc,,
+pip/_vendor/urllib3/util/__pycache__/proxy.cpython-312.pyc,,
+pip/_vendor/urllib3/util/__pycache__/queue.cpython-312.pyc,,
+pip/_vendor/urllib3/util/__pycache__/request.cpython-312.pyc,,
+pip/_vendor/urllib3/util/__pycache__/response.cpython-312.pyc,,
+pip/_vendor/urllib3/util/__pycache__/retry.cpython-312.pyc,,
+pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-312.pyc,,
+pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-312.pyc,,
+pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-312.pyc,,
+pip/_vendor/urllib3/util/__pycache__/timeout.cpython-312.pyc,,
+pip/_vendor/urllib3/util/__pycache__/url.cpython-312.pyc,,
+pip/_vendor/urllib3/util/__pycache__/wait.cpython-312.pyc,,
+pip/_vendor/urllib3/util/connection.py,sha256=5Lx2B1PW29KxBn2T0xkN1CBgRBa3gGVJBKoQoRogEVk,4901
+pip/_vendor/urllib3/util/proxy.py,sha256=zUvPPCJrp6dOF0N4GAVbOcl6o-4uXKSrGiTkkr5vUS4,1605
+pip/_vendor/urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498
+pip/_vendor/urllib3/util/request.py,sha256=C0OUt2tcU6LRiQJ7YYNP9GvPrSvl7ziIBekQ-5nlBZk,3997
+pip/_vendor/urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510
+pip/_vendor/urllib3/util/retry.py,sha256=6ENvOZ8PBDzh8kgixpql9lIrb2dxH-k7ZmBanJF2Ng4,22050
+pip/_vendor/urllib3/util/ssl_.py,sha256=X4-AqW91aYPhPx6-xbf66yHFQKbqqfC_5Zt4WkLX1Hc,17177
+pip/_vendor/urllib3/util/ssl_match_hostname.py,sha256=Ir4cZVEjmAk8gUAIHWSi7wtOO83UCYABY2xFD1Ql_WA,5758
+pip/_vendor/urllib3/util/ssltransport.py,sha256=NA-u5rMTrDFDFC8QzRKUEKMG0561hOD4qBTr3Z4pv6E,6895
+pip/_vendor/urllib3/util/timeout.py,sha256=cwq4dMk87mJHSBktK1miYJ-85G-3T3RmT20v7SFCpno,10168
+pip/_vendor/urllib3/util/url.py,sha256=lCAE7M5myA8EDdW0sJuyyZhVB9K_j38ljWhHAnFaWoE,14296
+pip/_vendor/urllib3/util/wait.py,sha256=fOX0_faozG2P7iVojQoE1mbydweNyTcm-hXEfFrTtLI,5403
+pip/_vendor/vendor.txt,sha256=4NKk7fQhVsZw0U-0zmm9Q2LgGyaPXacFbnJAaS0Q6EY,493
+pip/_vendor/webencodings/__init__.py,sha256=qOBJIuPy_4ByYH6W_bNgJF-qYQ2DoU-dKsDu5yRWCXg,10579
+pip/_vendor/webencodings/__pycache__/__init__.cpython-312.pyc,,
+pip/_vendor/webencodings/__pycache__/labels.cpython-312.pyc,,
+pip/_vendor/webencodings/__pycache__/mklabels.cpython-312.pyc,,
+pip/_vendor/webencodings/__pycache__/tests.cpython-312.pyc,,
+pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-312.pyc,,
+pip/_vendor/webencodings/labels.py,sha256=4AO_KxTddqGtrL9ns7kAPjb0CcN6xsCIxbK37HY9r3E,8979
+pip/_vendor/webencodings/mklabels.py,sha256=GYIeywnpaLnP0GSic8LFWgd0UVvO_l1Nc6YoF-87R_4,1305
+pip/_vendor/webencodings/tests.py,sha256=OtGLyjhNY1fvkW1GvLJ_FV9ZoqC9Anyjr7q3kxTbzNs,6563
+pip/_vendor/webencodings/x_user_defined.py,sha256=yOqWSdmpytGfUgh_Z6JYgDNhoc-BAHyyeeT15Fr42tM,4307
+pip/py.typed,sha256=EBVvvPRTn_eIpz5e5QztSCdrMX7Qwd7VP93RSoIlZ2I,286
diff --git a/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/REQUESTED b/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/WHEEL
new file mode 100644
index 0000000..98c0d20
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.42.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/entry_points.txt b/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/entry_points.txt
new file mode 100644
index 0000000..26fa361
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/entry_points.txt
@@ -0,0 +1,4 @@
+[console_scripts]
+pip = pip._internal.cli.main:main
+pip3 = pip._internal.cli.main:main
+pip3.12 = pip._internal.cli.main:main
diff --git a/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/top_level.txt b/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/top_level.txt
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip-24.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+pip
diff --git a/.venv/lib/python3.12/site-packages/pip/__init__.py b/.venv/lib/python3.12/site-packages/pip/__init__.py
new file mode 100644
index 0000000..be0e3ed
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/__init__.py
@@ -0,0 +1,13 @@
+from typing import List, Optional
+
+__version__ = "24.0"
+
+
+def main(args: Optional[List[str]] = None) -> int:
+    """This is an internal API only meant for use by pip's own console scripts.
+
+    For additional details, see https://github.com/pypa/pip/issues/7498.
+    """
+    from pip._internal.utils.entrypoints import _wrapper
+
+    return _wrapper(args)
diff --git a/.venv/lib/python3.12/site-packages/pip/__main__.py b/.venv/lib/python3.12/site-packages/pip/__main__.py
new file mode 100644
index 0000000..5991326
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/__main__.py
@@ -0,0 +1,24 @@
+import os
+import sys
+
+# Remove '' and current working directory from the first entry
+# of sys.path, if present to avoid using current directory
+# in pip commands check, freeze, install, list and show,
+# when invoked as python -m pip 
+if sys.path[0] in ("", os.getcwd()):
+    sys.path.pop(0)
+
+# If we are running from a wheel, add the wheel to sys.path
+# This allows the usage python pip-*.whl/pip install pip-*.whl
+if __package__ == "":
+    # __file__ is pip-*.whl/pip/__main__.py
+    # first dirname call strips of '/__main__.py', second strips off '/pip'
+    # Resulting path is the name of the wheel itself
+    # Add that to sys.path so we can import pip
+    path = os.path.dirname(os.path.dirname(__file__))
+    sys.path.insert(0, path)
+
+if __name__ == "__main__":
+    from pip._internal.cli.main import main as _main
+
+    sys.exit(_main())
diff --git a/.venv/lib/python3.12/site-packages/pip/__pip-runner__.py b/.venv/lib/python3.12/site-packages/pip/__pip-runner__.py
new file mode 100644
index 0000000..49a148a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/__pip-runner__.py
@@ -0,0 +1,50 @@
+"""Execute exactly this copy of pip, within a different environment.
+
+This file is named as it is, to ensure that this module can't be imported via
+an import statement.
+"""
+
+# /!\ This version compatibility check section must be Python 2 compatible. /!\
+
+import sys
+
+# Copied from setup.py
+PYTHON_REQUIRES = (3, 7)
+
+
+def version_str(version):  # type: ignore
+    return ".".join(str(v) for v in version)
+
+
+if sys.version_info[:2] < PYTHON_REQUIRES:
+    raise SystemExit(
+        "This version of pip does not support python {} (requires >={}).".format(
+            version_str(sys.version_info[:2]), version_str(PYTHON_REQUIRES)
+        )
+    )
+
+# From here on, we can use Python 3 features, but the syntax must remain
+# Python 2 compatible.
+
+import runpy  # noqa: E402
+from importlib.machinery import PathFinder  # noqa: E402
+from os.path import dirname  # noqa: E402
+
+PIP_SOURCES_ROOT = dirname(dirname(__file__))
+
+
+class PipImportRedirectingFinder:
+    @classmethod
+    def find_spec(self, fullname, path=None, target=None):  # type: ignore
+        if fullname != "pip":
+            return None
+
+        spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target)
+        assert spec, (PIP_SOURCES_ROOT, fullname)
+        return spec
+
+
+sys.meta_path.insert(0, PipImportRedirectingFinder())
+
+assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module"
+runpy.run_module("pip", run_name="__main__", alter_sys=True)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/__init__.py b/.venv/lib/python3.12/site-packages/pip/_internal/__init__.py
new file mode 100644
index 0000000..96c6b88
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/__init__.py
@@ -0,0 +1,18 @@
+from typing import List, Optional
+
+from pip._internal.utils import _log
+
+# init_logging() must be called before any call to logging.getLogger()
+# which happens at import of most modules.
+_log.init_logging()
+
+
+def main(args: (Optional[List[str]]) = None) -> int:
+    """This is preserved for old console scripts that may still be referencing
+    it.
+
+    For additional details, see https://github.com/pypa/pip/issues/7498.
+    """
+    from pip._internal.utils.entrypoints import _wrapper
+
+    return _wrapper(args)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/build_env.py b/.venv/lib/python3.12/site-packages/pip/_internal/build_env.py
new file mode 100644
index 0000000..4f704a3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/build_env.py
@@ -0,0 +1,311 @@
+"""Build Environment used for isolation during sdist building
+"""
+
+import logging
+import os
+import pathlib
+import site
+import sys
+import textwrap
+from collections import OrderedDict
+from types import TracebackType
+from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type, Union
+
+from pip._vendor.certifi import where
+from pip._vendor.packaging.requirements import Requirement
+from pip._vendor.packaging.version import Version
+
+from pip import __file__ as pip_location
+from pip._internal.cli.spinners import open_spinner
+from pip._internal.locations import get_platlib, get_purelib, get_scheme
+from pip._internal.metadata import get_default_environment, get_environment
+from pip._internal.utils.subprocess import call_subprocess
+from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
+
+if TYPE_CHECKING:
+    from pip._internal.index.package_finder import PackageFinder
+
+logger = logging.getLogger(__name__)
+
+
+def _dedup(a: str, b: str) -> Union[Tuple[str], Tuple[str, str]]:
+    return (a, b) if a != b else (a,)
+
+
+class _Prefix:
+    def __init__(self, path: str) -> None:
+        self.path = path
+        self.setup = False
+        scheme = get_scheme("", prefix=path)
+        self.bin_dir = scheme.scripts
+        self.lib_dirs = _dedup(scheme.purelib, scheme.platlib)
+
+
+def get_runnable_pip() -> str:
+    """Get a file to pass to a Python executable, to run the currently-running pip.
+
+    This is used to run a pip subprocess, for installing requirements into the build
+    environment.
+    """
+    source = pathlib.Path(pip_location).resolve().parent
+
+    if not source.is_dir():
+        # This would happen if someone is using pip from inside a zip file. In that
+        # case, we can use that directly.
+        return str(source)
+
+    return os.fsdecode(source / "__pip-runner__.py")
+
+
+def _get_system_sitepackages() -> Set[str]:
+    """Get system site packages
+
+    Usually from site.getsitepackages,
+    but fallback on `get_purelib()/get_platlib()` if unavailable
+    (e.g. in a virtualenv created by virtualenv<20)
+
+    Returns normalized set of strings.
+    """
+    if hasattr(site, "getsitepackages"):
+        system_sites = site.getsitepackages()
+    else:
+        # virtualenv < 20 overwrites site.py without getsitepackages
+        # fallback on get_purelib/get_platlib.
+        # this is known to miss things, but shouldn't in the cases
+        # where getsitepackages() has been removed (inside a virtualenv)
+        system_sites = [get_purelib(), get_platlib()]
+    return {os.path.normcase(path) for path in system_sites}
+
+
+class BuildEnvironment:
+    """Creates and manages an isolated environment to install build deps"""
+
+    def __init__(self) -> None:
+        temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV, globally_managed=True)
+
+        self._prefixes = OrderedDict(
+            (name, _Prefix(os.path.join(temp_dir.path, name)))
+            for name in ("normal", "overlay")
+        )
+
+        self._bin_dirs: List[str] = []
+        self._lib_dirs: List[str] = []
+        for prefix in reversed(list(self._prefixes.values())):
+            self._bin_dirs.append(prefix.bin_dir)
+            self._lib_dirs.extend(prefix.lib_dirs)
+
+        # Customize site to:
+        # - ensure .pth files are honored
+        # - prevent access to system site packages
+        system_sites = _get_system_sitepackages()
+
+        self._site_dir = os.path.join(temp_dir.path, "site")
+        if not os.path.exists(self._site_dir):
+            os.mkdir(self._site_dir)
+        with open(
+            os.path.join(self._site_dir, "sitecustomize.py"), "w", encoding="utf-8"
+        ) as fp:
+            fp.write(
+                textwrap.dedent(
+                    """
+                import os, site, sys
+
+                # First, drop system-sites related paths.
+                original_sys_path = sys.path[:]
+                known_paths = set()
+                for path in {system_sites!r}:
+                    site.addsitedir(path, known_paths=known_paths)
+                system_paths = set(
+                    os.path.normcase(path)
+                    for path in sys.path[len(original_sys_path):]
+                )
+                original_sys_path = [
+                    path for path in original_sys_path
+                    if os.path.normcase(path) not in system_paths
+                ]
+                sys.path = original_sys_path
+
+                # Second, add lib directories.
+                # ensuring .pth file are processed.
+                for path in {lib_dirs!r}:
+                    assert not path in sys.path
+                    site.addsitedir(path)
+                """
+                ).format(system_sites=system_sites, lib_dirs=self._lib_dirs)
+            )
+
+    def __enter__(self) -> None:
+        self._save_env = {
+            name: os.environ.get(name, None)
+            for name in ("PATH", "PYTHONNOUSERSITE", "PYTHONPATH")
+        }
+
+        path = self._bin_dirs[:]
+        old_path = self._save_env["PATH"]
+        if old_path:
+            path.extend(old_path.split(os.pathsep))
+
+        pythonpath = [self._site_dir]
+
+        os.environ.update(
+            {
+                "PATH": os.pathsep.join(path),
+                "PYTHONNOUSERSITE": "1",
+                "PYTHONPATH": os.pathsep.join(pythonpath),
+            }
+        )
+
+    def __exit__(
+        self,
+        exc_type: Optional[Type[BaseException]],
+        exc_val: Optional[BaseException],
+        exc_tb: Optional[TracebackType],
+    ) -> None:
+        for varname, old_value in self._save_env.items():
+            if old_value is None:
+                os.environ.pop(varname, None)
+            else:
+                os.environ[varname] = old_value
+
+    def check_requirements(
+        self, reqs: Iterable[str]
+    ) -> Tuple[Set[Tuple[str, str]], Set[str]]:
+        """Return 2 sets:
+        - conflicting requirements: set of (installed, wanted) reqs tuples
+        - missing requirements: set of reqs
+        """
+        missing = set()
+        conflicting = set()
+        if reqs:
+            env = (
+                get_environment(self._lib_dirs)
+                if hasattr(self, "_lib_dirs")
+                else get_default_environment()
+            )
+            for req_str in reqs:
+                req = Requirement(req_str)
+                # We're explicitly evaluating with an empty extra value, since build
+                # environments are not provided any mechanism to select specific extras.
+                if req.marker is not None and not req.marker.evaluate({"extra": ""}):
+                    continue
+                dist = env.get_distribution(req.name)
+                if not dist:
+                    missing.add(req_str)
+                    continue
+                if isinstance(dist.version, Version):
+                    installed_req_str = f"{req.name}=={dist.version}"
+                else:
+                    installed_req_str = f"{req.name}==={dist.version}"
+                if not req.specifier.contains(dist.version, prereleases=True):
+                    conflicting.add((installed_req_str, req_str))
+                # FIXME: Consider direct URL?
+        return conflicting, missing
+
+    def install_requirements(
+        self,
+        finder: "PackageFinder",
+        requirements: Iterable[str],
+        prefix_as_string: str,
+        *,
+        kind: str,
+    ) -> None:
+        prefix = self._prefixes[prefix_as_string]
+        assert not prefix.setup
+        prefix.setup = True
+        if not requirements:
+            return
+        self._install_requirements(
+            get_runnable_pip(),
+            finder,
+            requirements,
+            prefix,
+            kind=kind,
+        )
+
+    @staticmethod
+    def _install_requirements(
+        pip_runnable: str,
+        finder: "PackageFinder",
+        requirements: Iterable[str],
+        prefix: _Prefix,
+        *,
+        kind: str,
+    ) -> None:
+        args: List[str] = [
+            sys.executable,
+            pip_runnable,
+            "install",
+            "--ignore-installed",
+            "--no-user",
+            "--prefix",
+            prefix.path,
+            "--no-warn-script-location",
+        ]
+        if logger.getEffectiveLevel() <= logging.DEBUG:
+            args.append("-v")
+        for format_control in ("no_binary", "only_binary"):
+            formats = getattr(finder.format_control, format_control)
+            args.extend(
+                (
+                    "--" + format_control.replace("_", "-"),
+                    ",".join(sorted(formats or {":none:"})),
+                )
+            )
+
+        index_urls = finder.index_urls
+        if index_urls:
+            args.extend(["-i", index_urls[0]])
+            for extra_index in index_urls[1:]:
+                args.extend(["--extra-index-url", extra_index])
+        else:
+            args.append("--no-index")
+        for link in finder.find_links:
+            args.extend(["--find-links", link])
+
+        for host in finder.trusted_hosts:
+            args.extend(["--trusted-host", host])
+        if finder.allow_all_prereleases:
+            args.append("--pre")
+        if finder.prefer_binary:
+            args.append("--prefer-binary")
+        args.append("--")
+        args.extend(requirements)
+        extra_environ = {"_PIP_STANDALONE_CERT": where()}
+        with open_spinner(f"Installing {kind}") as spinner:
+            call_subprocess(
+                args,
+                command_desc=f"pip subprocess to install {kind}",
+                spinner=spinner,
+                extra_environ=extra_environ,
+            )
+
+
+class NoOpBuildEnvironment(BuildEnvironment):
+    """A no-op drop-in replacement for BuildEnvironment"""
+
+    def __init__(self) -> None:
+        pass
+
+    def __enter__(self) -> None:
+        pass
+
+    def __exit__(
+        self,
+        exc_type: Optional[Type[BaseException]],
+        exc_val: Optional[BaseException],
+        exc_tb: Optional[TracebackType],
+    ) -> None:
+        pass
+
+    def cleanup(self) -> None:
+        pass
+
+    def install_requirements(
+        self,
+        finder: "PackageFinder",
+        requirements: Iterable[str],
+        prefix_as_string: str,
+        *,
+        kind: str,
+    ) -> None:
+        raise NotImplementedError()
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/cache.py b/.venv/lib/python3.12/site-packages/pip/_internal/cache.py
new file mode 100644
index 0000000..f45ac23
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/cache.py
@@ -0,0 +1,290 @@
+"""Cache Management
+"""
+
+import hashlib
+import json
+import logging
+import os
+from pathlib import Path
+from typing import Any, Dict, List, Optional
+
+from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.exceptions import InvalidWheelFilename
+from pip._internal.models.direct_url import DirectUrl
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
+from pip._internal.utils.urls import path_to_url
+
+logger = logging.getLogger(__name__)
+
+ORIGIN_JSON_NAME = "origin.json"
+
+
+def _hash_dict(d: Dict[str, str]) -> str:
+    """Return a stable sha224 of a dictionary."""
+    s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
+    return hashlib.sha224(s.encode("ascii")).hexdigest()
+
+
+class Cache:
+    """An abstract class - provides cache directories for data from links
+
+    :param cache_dir: The root of the cache.
+    """
+
+    def __init__(self, cache_dir: str) -> None:
+        super().__init__()
+        assert not cache_dir or os.path.isabs(cache_dir)
+        self.cache_dir = cache_dir or None
+
+    def _get_cache_path_parts(self, link: Link) -> List[str]:
+        """Get parts of part that must be os.path.joined with cache_dir"""
+
+        # We want to generate an url to use as our cache key, we don't want to
+        # just re-use the URL because it might have other items in the fragment
+        # and we don't care about those.
+        key_parts = {"url": link.url_without_fragment}
+        if link.hash_name is not None and link.hash is not None:
+            key_parts[link.hash_name] = link.hash
+        if link.subdirectory_fragment:
+            key_parts["subdirectory"] = link.subdirectory_fragment
+
+        # Include interpreter name, major and minor version in cache key
+        # to cope with ill-behaved sdists that build a different wheel
+        # depending on the python version their setup.py is being run on,
+        # and don't encode the difference in compatibility tags.
+        # https://github.com/pypa/pip/issues/7296
+        key_parts["interpreter_name"] = interpreter_name()
+        key_parts["interpreter_version"] = interpreter_version()
+
+        # Encode our key url with sha224, we'll use this because it has similar
+        # security properties to sha256, but with a shorter total output (and
+        # thus less secure). However the differences don't make a lot of
+        # difference for our use case here.
+        hashed = _hash_dict(key_parts)
+
+        # We want to nest the directories some to prevent having a ton of top
+        # level directories where we might run out of sub directories on some
+        # FS.
+        parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
+
+        return parts
+
+    def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]:
+        can_not_cache = not self.cache_dir or not canonical_package_name or not link
+        if can_not_cache:
+            return []
+
+        path = self.get_path_for_link(link)
+        if os.path.isdir(path):
+            return [(candidate, path) for candidate in os.listdir(path)]
+        return []
+
+    def get_path_for_link(self, link: Link) -> str:
+        """Return a directory to store cached items in for link."""
+        raise NotImplementedError()
+
+    def get(
+        self,
+        link: Link,
+        package_name: Optional[str],
+        supported_tags: List[Tag],
+    ) -> Link:
+        """Returns a link to a cached item if it exists, otherwise returns the
+        passed link.
+        """
+        raise NotImplementedError()
+
+
+class SimpleWheelCache(Cache):
+    """A cache of wheels for future installs."""
+
+    def __init__(self, cache_dir: str) -> None:
+        super().__init__(cache_dir)
+
+    def get_path_for_link(self, link: Link) -> str:
+        """Return a directory to store cached wheels for link
+
+        Because there are M wheels for any one sdist, we provide a directory
+        to cache them in, and then consult that directory when looking up
+        cache hits.
+
+        We only insert things into the cache if they have plausible version
+        numbers, so that we don't contaminate the cache with things that were
+        not unique. E.g. ./package might have dozens of installs done for it
+        and build a version of 0.0...and if we built and cached a wheel, we'd
+        end up using the same wheel even if the source has been edited.
+
+        :param link: The link of the sdist for which this will cache wheels.
+        """
+        parts = self._get_cache_path_parts(link)
+        assert self.cache_dir
+        # Store wheels within the root cache_dir
+        return os.path.join(self.cache_dir, "wheels", *parts)
+
+    def get(
+        self,
+        link: Link,
+        package_name: Optional[str],
+        supported_tags: List[Tag],
+    ) -> Link:
+        candidates = []
+
+        if not package_name:
+            return link
+
+        canonical_package_name = canonicalize_name(package_name)
+        for wheel_name, wheel_dir in self._get_candidates(link, canonical_package_name):
+            try:
+                wheel = Wheel(wheel_name)
+            except InvalidWheelFilename:
+                continue
+            if canonicalize_name(wheel.name) != canonical_package_name:
+                logger.debug(
+                    "Ignoring cached wheel %s for %s as it "
+                    "does not match the expected distribution name %s.",
+                    wheel_name,
+                    link,
+                    package_name,
+                )
+                continue
+            if not wheel.supported(supported_tags):
+                # Built for a different python/arch/etc
+                continue
+            candidates.append(
+                (
+                    wheel.support_index_min(supported_tags),
+                    wheel_name,
+                    wheel_dir,
+                )
+            )
+
+        if not candidates:
+            return link
+
+        _, wheel_name, wheel_dir = min(candidates)
+        return Link(path_to_url(os.path.join(wheel_dir, wheel_name)))
+
+
+class EphemWheelCache(SimpleWheelCache):
+    """A SimpleWheelCache that creates it's own temporary cache directory"""
+
+    def __init__(self) -> None:
+        self._temp_dir = TempDirectory(
+            kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
+            globally_managed=True,
+        )
+
+        super().__init__(self._temp_dir.path)
+
+
+class CacheEntry:
+    def __init__(
+        self,
+        link: Link,
+        persistent: bool,
+    ):
+        self.link = link
+        self.persistent = persistent
+        self.origin: Optional[DirectUrl] = None
+        origin_direct_url_path = Path(self.link.file_path).parent / ORIGIN_JSON_NAME
+        if origin_direct_url_path.exists():
+            try:
+                self.origin = DirectUrl.from_json(
+                    origin_direct_url_path.read_text(encoding="utf-8")
+                )
+            except Exception as e:
+                logger.warning(
+                    "Ignoring invalid cache entry origin file %s for %s (%s)",
+                    origin_direct_url_path,
+                    link.filename,
+                    e,
+                )
+
+
+class WheelCache(Cache):
+    """Wraps EphemWheelCache and SimpleWheelCache into a single Cache
+
+    This Cache allows for gracefully degradation, using the ephem wheel cache
+    when a certain link is not found in the simple wheel cache first.
+    """
+
+    def __init__(self, cache_dir: str) -> None:
+        super().__init__(cache_dir)
+        self._wheel_cache = SimpleWheelCache(cache_dir)
+        self._ephem_cache = EphemWheelCache()
+
+    def get_path_for_link(self, link: Link) -> str:
+        return self._wheel_cache.get_path_for_link(link)
+
+    def get_ephem_path_for_link(self, link: Link) -> str:
+        return self._ephem_cache.get_path_for_link(link)
+
+    def get(
+        self,
+        link: Link,
+        package_name: Optional[str],
+        supported_tags: List[Tag],
+    ) -> Link:
+        cache_entry = self.get_cache_entry(link, package_name, supported_tags)
+        if cache_entry is None:
+            return link
+        return cache_entry.link
+
+    def get_cache_entry(
+        self,
+        link: Link,
+        package_name: Optional[str],
+        supported_tags: List[Tag],
+    ) -> Optional[CacheEntry]:
+        """Returns a CacheEntry with a link to a cached item if it exists or
+        None. The cache entry indicates if the item was found in the persistent
+        or ephemeral cache.
+        """
+        retval = self._wheel_cache.get(
+            link=link,
+            package_name=package_name,
+            supported_tags=supported_tags,
+        )
+        if retval is not link:
+            return CacheEntry(retval, persistent=True)
+
+        retval = self._ephem_cache.get(
+            link=link,
+            package_name=package_name,
+            supported_tags=supported_tags,
+        )
+        if retval is not link:
+            return CacheEntry(retval, persistent=False)
+
+        return None
+
+    @staticmethod
+    def record_download_origin(cache_dir: str, download_info: DirectUrl) -> None:
+        origin_path = Path(cache_dir) / ORIGIN_JSON_NAME
+        if origin_path.exists():
+            try:
+                origin = DirectUrl.from_json(origin_path.read_text(encoding="utf-8"))
+            except Exception as e:
+                logger.warning(
+                    "Could not read origin file %s in cache entry (%s). "
+                    "Will attempt to overwrite it.",
+                    origin_path,
+                    e,
+                )
+            else:
+                # TODO: use DirectUrl.equivalent when
+                # https://github.com/pypa/pip/pull/10564 is merged.
+                if origin.url != download_info.url:
+                    logger.warning(
+                        "Origin URL %s in cache entry %s does not match download URL "
+                        "%s. This is likely a pip bug or a cache corruption issue. "
+                        "Will overwrite it with the new value.",
+                        origin.url,
+                        cache_dir,
+                        download_info.url,
+                    )
+        origin_path.write_text(download_info.to_json(), encoding="utf-8")
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/cli/__init__.py b/.venv/lib/python3.12/site-packages/pip/_internal/cli/__init__.py
new file mode 100644
index 0000000..e589bb9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/cli/__init__.py
@@ -0,0 +1,4 @@
+"""Subpackage containing all of pip's command line interface related code
+"""
+
+# This file intentionally does not import submodules
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/cli/autocompletion.py b/.venv/lib/python3.12/site-packages/pip/_internal/cli/autocompletion.py
new file mode 100644
index 0000000..e5950b9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/cli/autocompletion.py
@@ -0,0 +1,172 @@
+"""Logic that powers autocompletion installed by ``pip completion``.
+"""
+
+import optparse
+import os
+import sys
+from itertools import chain
+from typing import Any, Iterable, List, Optional
+
+from pip._internal.cli.main_parser import create_main_parser
+from pip._internal.commands import commands_dict, create_command
+from pip._internal.metadata import get_default_environment
+
+
+def autocomplete() -> None:
+    """Entry Point for completion of main and subcommand options."""
+    # Don't complete if user hasn't sourced bash_completion file.
+    if "PIP_AUTO_COMPLETE" not in os.environ:
+        return
+    cwords = os.environ["COMP_WORDS"].split()[1:]
+    cword = int(os.environ["COMP_CWORD"])
+    try:
+        current = cwords[cword - 1]
+    except IndexError:
+        current = ""
+
+    parser = create_main_parser()
+    subcommands = list(commands_dict)
+    options = []
+
+    # subcommand
+    subcommand_name: Optional[str] = None
+    for word in cwords:
+        if word in subcommands:
+            subcommand_name = word
+            break
+    # subcommand options
+    if subcommand_name is not None:
+        # special case: 'help' subcommand has no options
+        if subcommand_name == "help":
+            sys.exit(1)
+        # special case: list locally installed dists for show and uninstall
+        should_list_installed = not current.startswith("-") and subcommand_name in [
+            "show",
+            "uninstall",
+        ]
+        if should_list_installed:
+            env = get_default_environment()
+            lc = current.lower()
+            installed = [
+                dist.canonical_name
+                for dist in env.iter_installed_distributions(local_only=True)
+                if dist.canonical_name.startswith(lc)
+                and dist.canonical_name not in cwords[1:]
+            ]
+            # if there are no dists installed, fall back to option completion
+            if installed:
+                for dist in installed:
+                    print(dist)
+                sys.exit(1)
+
+        should_list_installables = (
+            not current.startswith("-") and subcommand_name == "install"
+        )
+        if should_list_installables:
+            for path in auto_complete_paths(current, "path"):
+                print(path)
+            sys.exit(1)
+
+        subcommand = create_command(subcommand_name)
+
+        for opt in subcommand.parser.option_list_all:
+            if opt.help != optparse.SUPPRESS_HELP:
+                options += [
+                    (opt_str, opt.nargs) for opt_str in opt._long_opts + opt._short_opts
+                ]
+
+        # filter out previously specified options from available options
+        prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]
+        options = [(x, v) for (x, v) in options if x not in prev_opts]
+        # filter options by current input
+        options = [(k, v) for k, v in options if k.startswith(current)]
+        # get completion type given cwords and available subcommand options
+        completion_type = get_path_completion_type(
+            cwords,
+            cword,
+            subcommand.parser.option_list_all,
+        )
+        # get completion files and directories if ``completion_type`` is
+        # ````, ```` or ````
+        if completion_type:
+            paths = auto_complete_paths(current, completion_type)
+            options = [(path, 0) for path in paths]
+        for option in options:
+            opt_label = option[0]
+            # append '=' to options which require args
+            if option[1] and option[0][:2] == "--":
+                opt_label += "="
+            print(opt_label)
+    else:
+        # show main parser options only when necessary
+
+        opts = [i.option_list for i in parser.option_groups]
+        opts.append(parser.option_list)
+        flattened_opts = chain.from_iterable(opts)
+        if current.startswith("-"):
+            for opt in flattened_opts:
+                if opt.help != optparse.SUPPRESS_HELP:
+                    subcommands += opt._long_opts + opt._short_opts
+        else:
+            # get completion type given cwords and all available options
+            completion_type = get_path_completion_type(cwords, cword, flattened_opts)
+            if completion_type:
+                subcommands = list(auto_complete_paths(current, completion_type))
+
+        print(" ".join([x for x in subcommands if x.startswith(current)]))
+    sys.exit(1)
+
+
+def get_path_completion_type(
+    cwords: List[str], cword: int, opts: Iterable[Any]
+) -> Optional[str]:
+    """Get the type of path completion (``file``, ``dir``, ``path`` or None)
+
+    :param cwords: same as the environmental variable ``COMP_WORDS``
+    :param cword: same as the environmental variable ``COMP_CWORD``
+    :param opts: The available options to check
+    :return: path completion type (``file``, ``dir``, ``path`` or None)
+    """
+    if cword < 2 or not cwords[cword - 2].startswith("-"):
+        return None
+    for opt in opts:
+        if opt.help == optparse.SUPPRESS_HELP:
+            continue
+        for o in str(opt).split("/"):
+            if cwords[cword - 2].split("=")[0] == o:
+                if not opt.metavar or any(
+                    x in ("path", "file", "dir") for x in opt.metavar.split("/")
+                ):
+                    return opt.metavar
+    return None
+
+
+def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]:
+    """If ``completion_type`` is ``file`` or ``path``, list all regular files
+    and directories starting with ``current``; otherwise only list directories
+    starting with ``current``.
+
+    :param current: The word to be completed
+    :param completion_type: path completion type(``file``, ``path`` or ``dir``)
+    :return: A generator of regular files and/or directories
+    """
+    directory, filename = os.path.split(current)
+    current_path = os.path.abspath(directory)
+    # Don't complete paths if they can't be accessed
+    if not os.access(current_path, os.R_OK):
+        return
+    filename = os.path.normcase(filename)
+    # list all files that start with ``filename``
+    file_list = (
+        x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename)
+    )
+    for f in file_list:
+        opt = os.path.join(current_path, f)
+        comp_file = os.path.normcase(os.path.join(directory, f))
+        # complete regular files when there is not ```` after option
+        # complete directories when there is ````, ```` or
+        # ````after option
+        if completion_type != "dir" and os.path.isfile(opt):
+            yield comp_file
+        elif os.path.isdir(opt):
+            yield os.path.join(comp_file, "")
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/cli/base_command.py b/.venv/lib/python3.12/site-packages/pip/_internal/cli/base_command.py
new file mode 100644
index 0000000..db9d5cc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/cli/base_command.py
@@ -0,0 +1,236 @@
+"""Base Command class, and related routines"""
+
+import functools
+import logging
+import logging.config
+import optparse
+import os
+import sys
+import traceback
+from optparse import Values
+from typing import Any, Callable, List, Optional, Tuple
+
+from pip._vendor.rich import traceback as rich_traceback
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.command_context import CommandContextMixIn
+from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
+from pip._internal.cli.status_codes import (
+    ERROR,
+    PREVIOUS_BUILD_DIR_ERROR,
+    UNKNOWN_ERROR,
+    VIRTUALENV_NOT_FOUND,
+)
+from pip._internal.exceptions import (
+    BadCommand,
+    CommandError,
+    DiagnosticPipError,
+    InstallationError,
+    NetworkConnectionError,
+    PreviousBuildDirError,
+    UninstallationError,
+)
+from pip._internal.utils.filesystem import check_path_owner
+from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
+from pip._internal.utils.misc import get_prog, normalize_path
+from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirRegistry
+from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+__all__ = ["Command"]
+
+logger = logging.getLogger(__name__)
+
+
+class Command(CommandContextMixIn):
+    usage: str = ""
+    ignore_require_venv: bool = False
+
+    def __init__(self, name: str, summary: str, isolated: bool = False) -> None:
+        super().__init__()
+
+        self.name = name
+        self.summary = summary
+        self.parser = ConfigOptionParser(
+            usage=self.usage,
+            prog=f"{get_prog()} {name}",
+            formatter=UpdatingDefaultsHelpFormatter(),
+            add_help_option=False,
+            name=name,
+            description=self.__doc__,
+            isolated=isolated,
+        )
+
+        self.tempdir_registry: Optional[TempDirRegistry] = None
+
+        # Commands should add options to this option group
+        optgroup_name = f"{self.name.capitalize()} Options"
+        self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
+
+        # Add the general options
+        gen_opts = cmdoptions.make_option_group(
+            cmdoptions.general_group,
+            self.parser,
+        )
+        self.parser.add_option_group(gen_opts)
+
+        self.add_options()
+
+    def add_options(self) -> None:
+        pass
+
+    def handle_pip_version_check(self, options: Values) -> None:
+        """
+        This is a no-op so that commands by default do not do the pip version
+        check.
+        """
+        # Make sure we do the pip version check if the index_group options
+        # are present.
+        assert not hasattr(options, "no_index")
+
+    def run(self, options: Values, args: List[str]) -> int:
+        raise NotImplementedError
+
+    def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]:
+        # factored out for testability
+        return self.parser.parse_args(args)
+
+    def main(self, args: List[str]) -> int:
+        try:
+            with self.main_context():
+                return self._main(args)
+        finally:
+            logging.shutdown()
+
+    def _main(self, args: List[str]) -> int:
+        # We must initialize this before the tempdir manager, otherwise the
+        # configuration would not be accessible by the time we clean up the
+        # tempdir manager.
+        self.tempdir_registry = self.enter_context(tempdir_registry())
+        # Intentionally set as early as possible so globally-managed temporary
+        # directories are available to the rest of the code.
+        self.enter_context(global_tempdir_manager())
+
+        options, args = self.parse_args(args)
+
+        # Set verbosity so that it can be used elsewhere.
+        self.verbosity = options.verbose - options.quiet
+
+        level_number = setup_logging(
+            verbosity=self.verbosity,
+            no_color=options.no_color,
+            user_log_file=options.log,
+        )
+
+        always_enabled_features = set(options.features_enabled) & set(
+            cmdoptions.ALWAYS_ENABLED_FEATURES
+        )
+        if always_enabled_features:
+            logger.warning(
+                "The following features are always enabled: %s. ",
+                ", ".join(sorted(always_enabled_features)),
+            )
+
+        # Make sure that the --python argument isn't specified after the
+        # subcommand. We can tell, because if --python was specified,
+        # we should only reach this point if we're running in the created
+        # subprocess, which has the _PIP_RUNNING_IN_SUBPROCESS environment
+        # variable set.
+        if options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
+            logger.critical(
+                "The --python option must be placed before the pip subcommand name"
+            )
+            sys.exit(ERROR)
+
+        # TODO: Try to get these passing down from the command?
+        #       without resorting to os.environ to hold these.
+        #       This also affects isolated builds and it should.
+
+        if options.no_input:
+            os.environ["PIP_NO_INPUT"] = "1"
+
+        if options.exists_action:
+            os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action)
+
+        if options.require_venv and not self.ignore_require_venv:
+            # If a venv is required check if it can really be found
+            if not running_under_virtualenv():
+                logger.critical("Could not find an activated virtualenv (required).")
+                sys.exit(VIRTUALENV_NOT_FOUND)
+
+        if options.cache_dir:
+            options.cache_dir = normalize_path(options.cache_dir)
+            if not check_path_owner(options.cache_dir):
+                logger.warning(
+                    "The directory '%s' or its parent directory is not owned "
+                    "or is not writable by the current user. The cache "
+                    "has been disabled. Check the permissions and owner of "
+                    "that directory. If executing pip with sudo, you should "
+                    "use sudo's -H flag.",
+                    options.cache_dir,
+                )
+                options.cache_dir = None
+
+        def intercepts_unhandled_exc(
+            run_func: Callable[..., int]
+        ) -> Callable[..., int]:
+            @functools.wraps(run_func)
+            def exc_logging_wrapper(*args: Any) -> int:
+                try:
+                    status = run_func(*args)
+                    assert isinstance(status, int)
+                    return status
+                except DiagnosticPipError as exc:
+                    logger.error("%s", exc, extra={"rich": True})
+                    logger.debug("Exception information:", exc_info=True)
+
+                    return ERROR
+                except PreviousBuildDirError as exc:
+                    logger.critical(str(exc))
+                    logger.debug("Exception information:", exc_info=True)
+
+                    return PREVIOUS_BUILD_DIR_ERROR
+                except (
+                    InstallationError,
+                    UninstallationError,
+                    BadCommand,
+                    NetworkConnectionError,
+                ) as exc:
+                    logger.critical(str(exc))
+                    logger.debug("Exception information:", exc_info=True)
+
+                    return ERROR
+                except CommandError as exc:
+                    logger.critical("%s", exc)
+                    logger.debug("Exception information:", exc_info=True)
+
+                    return ERROR
+                except BrokenStdoutLoggingError:
+                    # Bypass our logger and write any remaining messages to
+                    # stderr because stdout no longer works.
+                    print("ERROR: Pipe to stdout was broken", file=sys.stderr)
+                    if level_number <= logging.DEBUG:
+                        traceback.print_exc(file=sys.stderr)
+
+                    return ERROR
+                except KeyboardInterrupt:
+                    logger.critical("Operation cancelled by user")
+                    logger.debug("Exception information:", exc_info=True)
+
+                    return ERROR
+                except BaseException:
+                    logger.critical("Exception:", exc_info=True)
+
+                    return UNKNOWN_ERROR
+
+            return exc_logging_wrapper
+
+        try:
+            if not options.debug_mode:
+                run = intercepts_unhandled_exc(self.run)
+            else:
+                run = self.run
+                rich_traceback.install(show_locals=True)
+            return run(options, args)
+        finally:
+            self.handle_pip_version_check(options)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/cli/cmdoptions.py b/.venv/lib/python3.12/site-packages/pip/_internal/cli/cmdoptions.py
new file mode 100644
index 0000000..d643256
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/cli/cmdoptions.py
@@ -0,0 +1,1074 @@
+"""
+shared options and groups
+
+The principle here is to define options once, but *not* instantiate them
+globally. One reason being that options with action='append' can carry state
+between parses. pip parses general options twice internally, and shouldn't
+pass on state. To be consistent, all options will follow this design.
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+import importlib.util
+import logging
+import os
+import textwrap
+from functools import partial
+from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values
+from textwrap import dedent
+from typing import Any, Callable, Dict, Optional, Tuple
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.cli.parser import ConfigOptionParser
+from pip._internal.exceptions import CommandError
+from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
+from pip._internal.models.format_control import FormatControl
+from pip._internal.models.index import PyPI
+from pip._internal.models.target_python import TargetPython
+from pip._internal.utils.hashes import STRONG_HASHES
+from pip._internal.utils.misc import strtobool
+
+logger = logging.getLogger(__name__)
+
+
+def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None:
+    """
+    Raise an option parsing error using parser.error().
+
+    Args:
+      parser: an OptionParser instance.
+      option: an Option instance.
+      msg: the error text.
+    """
+    msg = f"{option} error: {msg}"
+    msg = textwrap.fill(" ".join(msg.split()))
+    parser.error(msg)
+
+
+def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> OptionGroup:
+    """
+    Return an OptionGroup object
+    group  -- assumed to be dict with 'name' and 'options' keys
+    parser -- an optparse Parser
+    """
+    option_group = OptionGroup(parser, group["name"])
+    for option in group["options"]:
+        option_group.add_option(option())
+    return option_group
+
+
+def check_dist_restriction(options: Values, check_target: bool = False) -> None:
+    """Function for determining if custom platform options are allowed.
+
+    :param options: The OptionParser options.
+    :param check_target: Whether or not to check if --target is being used.
+    """
+    dist_restriction_set = any(
+        [
+            options.python_version,
+            options.platforms,
+            options.abis,
+            options.implementation,
+        ]
+    )
+
+    binary_only = FormatControl(set(), {":all:"})
+    sdist_dependencies_allowed = (
+        options.format_control != binary_only and not options.ignore_dependencies
+    )
+
+    # Installations or downloads using dist restrictions must not combine
+    # source distributions and dist-specific wheels, as they are not
+    # guaranteed to be locally compatible.
+    if dist_restriction_set and sdist_dependencies_allowed:
+        raise CommandError(
+            "When restricting platform and interpreter constraints using "
+            "--python-version, --platform, --abi, or --implementation, "
+            "either --no-deps must be set, or --only-binary=:all: must be "
+            "set and --no-binary must not be set (or must be set to "
+            ":none:)."
+        )
+
+    if check_target:
+        if not options.dry_run and dist_restriction_set and not options.target_dir:
+            raise CommandError(
+                "Can not use any platform or abi specific options unless "
+                "installing via '--target' or using '--dry-run'"
+            )
+
+
+def _path_option_check(option: Option, opt: str, value: str) -> str:
+    return os.path.expanduser(value)
+
+
+def _package_name_option_check(option: Option, opt: str, value: str) -> str:
+    return canonicalize_name(value)
+
+
+class PipOption(Option):
+    TYPES = Option.TYPES + ("path", "package_name")
+    TYPE_CHECKER = Option.TYPE_CHECKER.copy()
+    TYPE_CHECKER["package_name"] = _package_name_option_check
+    TYPE_CHECKER["path"] = _path_option_check
+
+
+###########
+# options #
+###########
+
+help_: Callable[..., Option] = partial(
+    Option,
+    "-h",
+    "--help",
+    dest="help",
+    action="help",
+    help="Show help.",
+)
+
+debug_mode: Callable[..., Option] = partial(
+    Option,
+    "--debug",
+    dest="debug_mode",
+    action="store_true",
+    default=False,
+    help=(
+        "Let unhandled exceptions propagate outside the main subroutine, "
+        "instead of logging them to stderr."
+    ),
+)
+
+isolated_mode: Callable[..., Option] = partial(
+    Option,
+    "--isolated",
+    dest="isolated_mode",
+    action="store_true",
+    default=False,
+    help=(
+        "Run pip in an isolated mode, ignoring environment variables and user "
+        "configuration."
+    ),
+)
+
+require_virtualenv: Callable[..., Option] = partial(
+    Option,
+    "--require-virtualenv",
+    "--require-venv",
+    dest="require_venv",
+    action="store_true",
+    default=False,
+    help=(
+        "Allow pip to only run in a virtual environment; "
+        "exit with an error otherwise."
+    ),
+)
+
+override_externally_managed: Callable[..., Option] = partial(
+    Option,
+    "--break-system-packages",
+    dest="override_externally_managed",
+    action="store_true",
+    help="Allow pip to modify an EXTERNALLY-MANAGED Python installation",
+)
+
+python: Callable[..., Option] = partial(
+    Option,
+    "--python",
+    dest="python",
+    help="Run pip with the specified Python interpreter.",
+)
+
+verbose: Callable[..., Option] = partial(
+    Option,
+    "-v",
+    "--verbose",
+    dest="verbose",
+    action="count",
+    default=0,
+    help="Give more output. Option is additive, and can be used up to 3 times.",
+)
+
+no_color: Callable[..., Option] = partial(
+    Option,
+    "--no-color",
+    dest="no_color",
+    action="store_true",
+    default=False,
+    help="Suppress colored output.",
+)
+
+version: Callable[..., Option] = partial(
+    Option,
+    "-V",
+    "--version",
+    dest="version",
+    action="store_true",
+    help="Show version and exit.",
+)
+
+quiet: Callable[..., Option] = partial(
+    Option,
+    "-q",
+    "--quiet",
+    dest="quiet",
+    action="count",
+    default=0,
+    help=(
+        "Give less output. Option is additive, and can be used up to 3"
+        " times (corresponding to WARNING, ERROR, and CRITICAL logging"
+        " levels)."
+    ),
+)
+
+progress_bar: Callable[..., Option] = partial(
+    Option,
+    "--progress-bar",
+    dest="progress_bar",
+    type="choice",
+    choices=["on", "off"],
+    default="on",
+    help="Specify whether the progress bar should be used [on, off] (default: on)",
+)
+
+log: Callable[..., Option] = partial(
+    PipOption,
+    "--log",
+    "--log-file",
+    "--local-log",
+    dest="log",
+    metavar="path",
+    type="path",
+    help="Path to a verbose appending log.",
+)
+
+no_input: Callable[..., Option] = partial(
+    Option,
+    # Don't ask for input
+    "--no-input",
+    dest="no_input",
+    action="store_true",
+    default=False,
+    help="Disable prompting for input.",
+)
+
+keyring_provider: Callable[..., Option] = partial(
+    Option,
+    "--keyring-provider",
+    dest="keyring_provider",
+    choices=["auto", "disabled", "import", "subprocess"],
+    default="auto",
+    help=(
+        "Enable the credential lookup via the keyring library if user input is allowed."
+        " Specify which mechanism to use [disabled, import, subprocess]."
+        " (default: disabled)"
+    ),
+)
+
+proxy: Callable[..., Option] = partial(
+    Option,
+    "--proxy",
+    dest="proxy",
+    type="str",
+    default="",
+    help="Specify a proxy in the form scheme://[user:passwd@]proxy.server:port.",
+)
+
+retries: Callable[..., Option] = partial(
+    Option,
+    "--retries",
+    dest="retries",
+    type="int",
+    default=5,
+    help="Maximum number of retries each connection should attempt "
+    "(default %default times).",
+)
+
+timeout: Callable[..., Option] = partial(
+    Option,
+    "--timeout",
+    "--default-timeout",
+    metavar="sec",
+    dest="timeout",
+    type="float",
+    default=15,
+    help="Set the socket timeout (default %default seconds).",
+)
+
+
+def exists_action() -> Option:
+    return Option(
+        # Option when path already exist
+        "--exists-action",
+        dest="exists_action",
+        type="choice",
+        choices=["s", "i", "w", "b", "a"],
+        default=[],
+        action="append",
+        metavar="action",
+        help="Default action when a path already exists: "
+        "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
+    )
+
+
+cert: Callable[..., Option] = partial(
+    PipOption,
+    "--cert",
+    dest="cert",
+    type="path",
+    metavar="path",
+    help=(
+        "Path to PEM-encoded CA certificate bundle. "
+        "If provided, overrides the default. "
+        "See 'SSL Certificate Verification' in pip documentation "
+        "for more information."
+    ),
+)
+
+client_cert: Callable[..., Option] = partial(
+    PipOption,
+    "--client-cert",
+    dest="client_cert",
+    type="path",
+    default=None,
+    metavar="path",
+    help="Path to SSL client certificate, a single file containing the "
+    "private key and the certificate in PEM format.",
+)
+
+index_url: Callable[..., Option] = partial(
+    Option,
+    "-i",
+    "--index-url",
+    "--pypi-url",
+    dest="index_url",
+    metavar="URL",
+    default=PyPI.simple_url,
+    help="Base URL of the Python Package Index (default %default). "
+    "This should point to a repository compliant with PEP 503 "
+    "(the simple repository API) or a local directory laid out "
+    "in the same format.",
+)
+
+
+def extra_index_url() -> Option:
+    return Option(
+        "--extra-index-url",
+        dest="extra_index_urls",
+        metavar="URL",
+        action="append",
+        default=[],
+        help="Extra URLs of package indexes to use in addition to "
+        "--index-url. Should follow the same rules as "
+        "--index-url.",
+    )
+
+
+no_index: Callable[..., Option] = partial(
+    Option,
+    "--no-index",
+    dest="no_index",
+    action="store_true",
+    default=False,
+    help="Ignore package index (only looking at --find-links URLs instead).",
+)
+
+
+def find_links() -> Option:
+    return Option(
+        "-f",
+        "--find-links",
+        dest="find_links",
+        action="append",
+        default=[],
+        metavar="url",
+        help="If a URL or path to an html file, then parse for links to "
+        "archives such as sdist (.tar.gz) or wheel (.whl) files. "
+        "If a local path or file:// URL that's a directory, "
+        "then look for archives in the directory listing. "
+        "Links to VCS project URLs are not supported.",
+    )
+
+
+def trusted_host() -> Option:
+    return Option(
+        "--trusted-host",
+        dest="trusted_hosts",
+        action="append",
+        metavar="HOSTNAME",
+        default=[],
+        help="Mark this host or host:port pair as trusted, even though it "
+        "does not have valid or any HTTPS.",
+    )
+
+
+def constraints() -> Option:
+    return Option(
+        "-c",
+        "--constraint",
+        dest="constraints",
+        action="append",
+        default=[],
+        metavar="file",
+        help="Constrain versions using the given constraints file. "
+        "This option can be used multiple times.",
+    )
+
+
+def requirements() -> Option:
+    return Option(
+        "-r",
+        "--requirement",
+        dest="requirements",
+        action="append",
+        default=[],
+        metavar="file",
+        help="Install from the given requirements file. "
+        "This option can be used multiple times.",
+    )
+
+
+def editable() -> Option:
+    return Option(
+        "-e",
+        "--editable",
+        dest="editables",
+        action="append",
+        default=[],
+        metavar="path/url",
+        help=(
+            "Install a project in editable mode (i.e. setuptools "
+            '"develop mode") from a local project path or a VCS url.'
+        ),
+    )
+
+
+def _handle_src(option: Option, opt_str: str, value: str, parser: OptionParser) -> None:
+    value = os.path.abspath(value)
+    setattr(parser.values, option.dest, value)
+
+
+src: Callable[..., Option] = partial(
+    PipOption,
+    "--src",
+    "--source",
+    "--source-dir",
+    "--source-directory",
+    dest="src_dir",
+    type="path",
+    metavar="dir",
+    default=get_src_prefix(),
+    action="callback",
+    callback=_handle_src,
+    help="Directory to check out editable projects into. "
+    'The default in a virtualenv is "/src". '
+    'The default for global installs is "/src".',
+)
+
+
+def _get_format_control(values: Values, option: Option) -> Any:
+    """Get a format_control object."""
+    return getattr(values, option.dest)
+
+
+def _handle_no_binary(
+    option: Option, opt_str: str, value: str, parser: OptionParser
+) -> None:
+    existing = _get_format_control(parser.values, option)
+    FormatControl.handle_mutual_excludes(
+        value,
+        existing.no_binary,
+        existing.only_binary,
+    )
+
+
+def _handle_only_binary(
+    option: Option, opt_str: str, value: str, parser: OptionParser
+) -> None:
+    existing = _get_format_control(parser.values, option)
+    FormatControl.handle_mutual_excludes(
+        value,
+        existing.only_binary,
+        existing.no_binary,
+    )
+
+
+def no_binary() -> Option:
+    format_control = FormatControl(set(), set())
+    return Option(
+        "--no-binary",
+        dest="format_control",
+        action="callback",
+        callback=_handle_no_binary,
+        type="str",
+        default=format_control,
+        help="Do not use binary packages. Can be supplied multiple times, and "
+        'each time adds to the existing value. Accepts either ":all:" to '
+        'disable all binary packages, ":none:" to empty the set (notice '
+        "the colons), or one or more package names with commas between "
+        "them (no colons). Note that some packages are tricky to compile "
+        "and may fail to install when this option is used on them.",
+    )
+
+
+def only_binary() -> Option:
+    format_control = FormatControl(set(), set())
+    return Option(
+        "--only-binary",
+        dest="format_control",
+        action="callback",
+        callback=_handle_only_binary,
+        type="str",
+        default=format_control,
+        help="Do not use source packages. Can be supplied multiple times, and "
+        'each time adds to the existing value. Accepts either ":all:" to '
+        'disable all source packages, ":none:" to empty the set, or one '
+        "or more package names with commas between them. Packages "
+        "without binary distributions will fail to install when this "
+        "option is used on them.",
+    )
+
+
+platforms: Callable[..., Option] = partial(
+    Option,
+    "--platform",
+    dest="platforms",
+    metavar="platform",
+    action="append",
+    default=None,
+    help=(
+        "Only use wheels compatible with . Defaults to the "
+        "platform of the running system. Use this option multiple times to "
+        "specify multiple platforms supported by the target interpreter."
+    ),
+)
+
+
+# This was made a separate function for unit-testing purposes.
+def _convert_python_version(value: str) -> Tuple[Tuple[int, ...], Optional[str]]:
+    """
+    Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
+
+    :return: A 2-tuple (version_info, error_msg), where `error_msg` is
+        non-None if and only if there was a parsing error.
+    """
+    if not value:
+        # The empty string is the same as not providing a value.
+        return (None, None)
+
+    parts = value.split(".")
+    if len(parts) > 3:
+        return ((), "at most three version parts are allowed")
+
+    if len(parts) == 1:
+        # Then we are in the case of "3" or "37".
+        value = parts[0]
+        if len(value) > 1:
+            parts = [value[0], value[1:]]
+
+    try:
+        version_info = tuple(int(part) for part in parts)
+    except ValueError:
+        return ((), "each version part must be an integer")
+
+    return (version_info, None)
+
+
+def _handle_python_version(
+    option: Option, opt_str: str, value: str, parser: OptionParser
+) -> None:
+    """
+    Handle a provided --python-version value.
+    """
+    version_info, error_msg = _convert_python_version(value)
+    if error_msg is not None:
+        msg = f"invalid --python-version value: {value!r}: {error_msg}"
+        raise_option_error(parser, option=option, msg=msg)
+
+    parser.values.python_version = version_info
+
+
+python_version: Callable[..., Option] = partial(
+    Option,
+    "--python-version",
+    dest="python_version",
+    metavar="python_version",
+    action="callback",
+    callback=_handle_python_version,
+    type="str",
+    default=None,
+    help=dedent(
+        """\
+    The Python interpreter version to use for wheel and "Requires-Python"
+    compatibility checks. Defaults to a version derived from the running
+    interpreter. The version can be specified using up to three dot-separated
+    integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor
+    version can also be given as a string without dots (e.g. "37" for 3.7.0).
+    """
+    ),
+)
+
+
+implementation: Callable[..., Option] = partial(
+    Option,
+    "--implementation",
+    dest="implementation",
+    metavar="implementation",
+    default=None,
+    help=(
+        "Only use wheels compatible with Python "
+        "implementation , e.g. 'pp', 'jy', 'cp', "
+        " or 'ip'. If not specified, then the current "
+        "interpreter implementation is used.  Use 'py' to force "
+        "implementation-agnostic wheels."
+    ),
+)
+
+
+abis: Callable[..., Option] = partial(
+    Option,
+    "--abi",
+    dest="abis",
+    metavar="abi",
+    action="append",
+    default=None,
+    help=(
+        "Only use wheels compatible with Python abi , e.g. 'pypy_41'. "
+        "If not specified, then the current interpreter abi tag is used. "
+        "Use this option multiple times to specify multiple abis supported "
+        "by the target interpreter. Generally you will need to specify "
+        "--implementation, --platform, and --python-version when using this "
+        "option."
+    ),
+)
+
+
+def add_target_python_options(cmd_opts: OptionGroup) -> None:
+    cmd_opts.add_option(platforms())
+    cmd_opts.add_option(python_version())
+    cmd_opts.add_option(implementation())
+    cmd_opts.add_option(abis())
+
+
+def make_target_python(options: Values) -> TargetPython:
+    target_python = TargetPython(
+        platforms=options.platforms,
+        py_version_info=options.python_version,
+        abis=options.abis,
+        implementation=options.implementation,
+    )
+
+    return target_python
+
+
+def prefer_binary() -> Option:
+    return Option(
+        "--prefer-binary",
+        dest="prefer_binary",
+        action="store_true",
+        default=False,
+        help=(
+            "Prefer binary packages over source packages, even if the "
+            "source packages are newer."
+        ),
+    )
+
+
+cache_dir: Callable[..., Option] = partial(
+    PipOption,
+    "--cache-dir",
+    dest="cache_dir",
+    default=USER_CACHE_DIR,
+    metavar="dir",
+    type="path",
+    help="Store the cache data in .",
+)
+
+
+def _handle_no_cache_dir(
+    option: Option, opt: str, value: str, parser: OptionParser
+) -> None:
+    """
+    Process a value provided for the --no-cache-dir option.
+
+    This is an optparse.Option callback for the --no-cache-dir option.
+    """
+    # The value argument will be None if --no-cache-dir is passed via the
+    # command-line, since the option doesn't accept arguments.  However,
+    # the value can be non-None if the option is triggered e.g. by an
+    # environment variable, like PIP_NO_CACHE_DIR=true.
+    if value is not None:
+        # Then parse the string value to get argument error-checking.
+        try:
+            strtobool(value)
+        except ValueError as exc:
+            raise_option_error(parser, option=option, msg=str(exc))
+
+    # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
+    # converted to 0 (like "false" or "no") caused cache_dir to be disabled
+    # rather than enabled (logic would say the latter).  Thus, we disable
+    # the cache directory not just on values that parse to True, but (for
+    # backwards compatibility reasons) also on values that parse to False.
+    # In other words, always set it to False if the option is provided in
+    # some (valid) form.
+    parser.values.cache_dir = False
+
+
+no_cache: Callable[..., Option] = partial(
+    Option,
+    "--no-cache-dir",
+    dest="cache_dir",
+    action="callback",
+    callback=_handle_no_cache_dir,
+    help="Disable the cache.",
+)
+
+no_deps: Callable[..., Option] = partial(
+    Option,
+    "--no-deps",
+    "--no-dependencies",
+    dest="ignore_dependencies",
+    action="store_true",
+    default=False,
+    help="Don't install package dependencies.",
+)
+
+ignore_requires_python: Callable[..., Option] = partial(
+    Option,
+    "--ignore-requires-python",
+    dest="ignore_requires_python",
+    action="store_true",
+    help="Ignore the Requires-Python information.",
+)
+
+no_build_isolation: Callable[..., Option] = partial(
+    Option,
+    "--no-build-isolation",
+    dest="build_isolation",
+    action="store_false",
+    default=True,
+    help="Disable isolation when building a modern source distribution. "
+    "Build dependencies specified by PEP 518 must be already installed "
+    "if this option is used.",
+)
+
+check_build_deps: Callable[..., Option] = partial(
+    Option,
+    "--check-build-dependencies",
+    dest="check_build_deps",
+    action="store_true",
+    default=False,
+    help="Check the build dependencies when PEP517 is used.",
+)
+
+
+def _handle_no_use_pep517(
+    option: Option, opt: str, value: str, parser: OptionParser
+) -> None:
+    """
+    Process a value provided for the --no-use-pep517 option.
+
+    This is an optparse.Option callback for the no_use_pep517 option.
+    """
+    # Since --no-use-pep517 doesn't accept arguments, the value argument
+    # will be None if --no-use-pep517 is passed via the command-line.
+    # However, the value can be non-None if the option is triggered e.g.
+    # by an environment variable, for example "PIP_NO_USE_PEP517=true".
+    if value is not None:
+        msg = """A value was passed for --no-use-pep517,
+        probably using either the PIP_NO_USE_PEP517 environment variable
+        or the "no-use-pep517" config file option. Use an appropriate value
+        of the PIP_USE_PEP517 environment variable or the "use-pep517"
+        config file option instead.
+        """
+        raise_option_error(parser, option=option, msg=msg)
+
+    # If user doesn't wish to use pep517, we check if setuptools and wheel are installed
+    # and raise error if it is not.
+    packages = ("setuptools", "wheel")
+    if not all(importlib.util.find_spec(package) for package in packages):
+        msg = (
+            f"It is not possible to use --no-use-pep517 "
+            f"without {' and '.join(packages)} installed."
+        )
+        raise_option_error(parser, option=option, msg=msg)
+
+    # Otherwise, --no-use-pep517 was passed via the command-line.
+    parser.values.use_pep517 = False
+
+
+use_pep517: Any = partial(
+    Option,
+    "--use-pep517",
+    dest="use_pep517",
+    action="store_true",
+    default=None,
+    help="Use PEP 517 for building source distributions "
+    "(use --no-use-pep517 to force legacy behaviour).",
+)
+
+no_use_pep517: Any = partial(
+    Option,
+    "--no-use-pep517",
+    dest="use_pep517",
+    action="callback",
+    callback=_handle_no_use_pep517,
+    default=None,
+    help=SUPPRESS_HELP,
+)
+
+
+def _handle_config_settings(
+    option: Option, opt_str: str, value: str, parser: OptionParser
+) -> None:
+    key, sep, val = value.partition("=")
+    if sep != "=":
+        parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL")
+    dest = getattr(parser.values, option.dest)
+    if dest is None:
+        dest = {}
+        setattr(parser.values, option.dest, dest)
+    if key in dest:
+        if isinstance(dest[key], list):
+            dest[key].append(val)
+        else:
+            dest[key] = [dest[key], val]
+    else:
+        dest[key] = val
+
+
+config_settings: Callable[..., Option] = partial(
+    Option,
+    "-C",
+    "--config-settings",
+    dest="config_settings",
+    type=str,
+    action="callback",
+    callback=_handle_config_settings,
+    metavar="settings",
+    help="Configuration settings to be passed to the PEP 517 build backend. "
+    "Settings take the form KEY=VALUE. Use multiple --config-settings options "
+    "to pass multiple keys to the backend.",
+)
+
+build_options: Callable[..., Option] = partial(
+    Option,
+    "--build-option",
+    dest="build_options",
+    metavar="options",
+    action="append",
+    help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
+)
+
+global_options: Callable[..., Option] = partial(
+    Option,
+    "--global-option",
+    dest="global_options",
+    action="append",
+    metavar="options",
+    help="Extra global options to be supplied to the setup.py "
+    "call before the install or bdist_wheel command.",
+)
+
+no_clean: Callable[..., Option] = partial(
+    Option,
+    "--no-clean",
+    action="store_true",
+    default=False,
+    help="Don't clean up build directories.",
+)
+
+pre: Callable[..., Option] = partial(
+    Option,
+    "--pre",
+    action="store_true",
+    default=False,
+    help="Include pre-release and development versions. By default, "
+    "pip only finds stable versions.",
+)
+
+disable_pip_version_check: Callable[..., Option] = partial(
+    Option,
+    "--disable-pip-version-check",
+    dest="disable_pip_version_check",
+    action="store_true",
+    default=True,
+    help="Don't periodically check PyPI to determine whether a new version "
+    "of pip is available for download. Implied with --no-index.",
+)
+
+root_user_action: Callable[..., Option] = partial(
+    Option,
+    "--root-user-action",
+    dest="root_user_action",
+    default="warn",
+    choices=["warn", "ignore"],
+    help="Action if pip is run as a root user. By default, a warning message is shown.",
+)
+
+
+def _handle_merge_hash(
+    option: Option, opt_str: str, value: str, parser: OptionParser
+) -> None:
+    """Given a value spelled "algo:digest", append the digest to a list
+    pointed to in a dict by the algo name."""
+    if not parser.values.hashes:
+        parser.values.hashes = {}
+    try:
+        algo, digest = value.split(":", 1)
+    except ValueError:
+        parser.error(
+            f"Arguments to {opt_str} must be a hash name "
+            "followed by a value, like --hash=sha256:"
+            "abcde..."
+        )
+    if algo not in STRONG_HASHES:
+        parser.error(
+            "Allowed hash algorithms for {} are {}.".format(
+                opt_str, ", ".join(STRONG_HASHES)
+            )
+        )
+    parser.values.hashes.setdefault(algo, []).append(digest)
+
+
+hash: Callable[..., Option] = partial(
+    Option,
+    "--hash",
+    # Hash values eventually end up in InstallRequirement.hashes due to
+    # __dict__ copying in process_line().
+    dest="hashes",
+    action="callback",
+    callback=_handle_merge_hash,
+    type="string",
+    help="Verify that the package's archive matches this "
+    "hash before installing. Example: --hash=sha256:abcdef...",
+)
+
+
+require_hashes: Callable[..., Option] = partial(
+    Option,
+    "--require-hashes",
+    dest="require_hashes",
+    action="store_true",
+    default=False,
+    help="Require a hash to check each requirement against, for "
+    "repeatable installs. This option is implied when any package in a "
+    "requirements file has a --hash option.",
+)
+
+
+list_path: Callable[..., Option] = partial(
+    PipOption,
+    "--path",
+    dest="path",
+    type="path",
+    action="append",
+    help="Restrict to the specified installation path for listing "
+    "packages (can be used multiple times).",
+)
+
+
+def check_list_path_option(options: Values) -> None:
+    if options.path and (options.user or options.local):
+        raise CommandError("Cannot combine '--path' with '--user' or '--local'")
+
+
+list_exclude: Callable[..., Option] = partial(
+    PipOption,
+    "--exclude",
+    dest="excludes",
+    action="append",
+    metavar="package",
+    type="package_name",
+    help="Exclude specified package from the output",
+)
+
+
+no_python_version_warning: Callable[..., Option] = partial(
+    Option,
+    "--no-python-version-warning",
+    dest="no_python_version_warning",
+    action="store_true",
+    default=False,
+    help="Silence deprecation warnings for upcoming unsupported Pythons.",
+)
+
+
+# Features that are now always on. A warning is printed if they are used.
+ALWAYS_ENABLED_FEATURES = [
+    "no-binary-enable-wheel-cache",  # always on since 23.1
+]
+
+use_new_feature: Callable[..., Option] = partial(
+    Option,
+    "--use-feature",
+    dest="features_enabled",
+    metavar="feature",
+    action="append",
+    default=[],
+    choices=[
+        "fast-deps",
+        "truststore",
+    ]
+    + ALWAYS_ENABLED_FEATURES,
+    help="Enable new functionality, that may be backward incompatible.",
+)
+
+use_deprecated_feature: Callable[..., Option] = partial(
+    Option,
+    "--use-deprecated",
+    dest="deprecated_features_enabled",
+    metavar="feature",
+    action="append",
+    default=[],
+    choices=[
+        "legacy-resolver",
+    ],
+    help=("Enable deprecated functionality, that will be removed in the future."),
+)
+
+
+##########
+# groups #
+##########
+
+general_group: Dict[str, Any] = {
+    "name": "General Options",
+    "options": [
+        help_,
+        debug_mode,
+        isolated_mode,
+        require_virtualenv,
+        python,
+        verbose,
+        version,
+        quiet,
+        log,
+        no_input,
+        keyring_provider,
+        proxy,
+        retries,
+        timeout,
+        exists_action,
+        trusted_host,
+        cert,
+        client_cert,
+        cache_dir,
+        no_cache,
+        disable_pip_version_check,
+        no_color,
+        no_python_version_warning,
+        use_new_feature,
+        use_deprecated_feature,
+    ],
+}
+
+index_group: Dict[str, Any] = {
+    "name": "Package Index Options",
+    "options": [
+        index_url,
+        extra_index_url,
+        no_index,
+        find_links,
+    ],
+}
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/cli/command_context.py b/.venv/lib/python3.12/site-packages/pip/_internal/cli/command_context.py
new file mode 100644
index 0000000..139995a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/cli/command_context.py
@@ -0,0 +1,27 @@
+from contextlib import ExitStack, contextmanager
+from typing import ContextManager, Generator, TypeVar
+
+_T = TypeVar("_T", covariant=True)
+
+
+class CommandContextMixIn:
+    def __init__(self) -> None:
+        super().__init__()
+        self._in_main_context = False
+        self._main_context = ExitStack()
+
+    @contextmanager
+    def main_context(self) -> Generator[None, None, None]:
+        assert not self._in_main_context
+
+        self._in_main_context = True
+        try:
+            with self._main_context:
+                yield
+        finally:
+            self._in_main_context = False
+
+    def enter_context(self, context_provider: ContextManager[_T]) -> _T:
+        assert self._in_main_context
+
+        return self._main_context.enter_context(context_provider)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/cli/main.py b/.venv/lib/python3.12/site-packages/pip/_internal/cli/main.py
new file mode 100644
index 0000000..7e061f5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/cli/main.py
@@ -0,0 +1,79 @@
+"""Primary application entrypoint.
+"""
+import locale
+import logging
+import os
+import sys
+import warnings
+from typing import List, Optional
+
+from pip._internal.cli.autocompletion import autocomplete
+from pip._internal.cli.main_parser import parse_command
+from pip._internal.commands import create_command
+from pip._internal.exceptions import PipError
+from pip._internal.utils import deprecation
+
+logger = logging.getLogger(__name__)
+
+
+# Do not import and use main() directly! Using it directly is actively
+# discouraged by pip's maintainers. The name, location and behavior of
+# this function is subject to change, so calling it directly is not
+# portable across different pip versions.
+
+# In addition, running pip in-process is unsupported and unsafe. This is
+# elaborated in detail at
+# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program.
+# That document also provides suggestions that should work for nearly
+# all users that are considering importing and using main() directly.
+
+# However, we know that certain users will still want to invoke pip
+# in-process. If you understand and accept the implications of using pip
+# in an unsupported manner, the best approach is to use runpy to avoid
+# depending on the exact location of this entry point.
+
+# The following example shows how to use runpy to invoke pip in that
+# case:
+#
+#     sys.argv = ["pip", your, args, here]
+#     runpy.run_module("pip", run_name="__main__")
+#
+# Note that this will exit the process after running, unlike a direct
+# call to main. As it is not safe to do any processing after calling
+# main, this should not be an issue in practice.
+
+
+def main(args: Optional[List[str]] = None) -> int:
+    if args is None:
+        args = sys.argv[1:]
+
+    # Suppress the pkg_resources deprecation warning
+    # Note - we use a module of .*pkg_resources to cover
+    # the normal case (pip._vendor.pkg_resources) and the
+    # devendored case (a bare pkg_resources)
+    warnings.filterwarnings(
+        action="ignore", category=DeprecationWarning, module=".*pkg_resources"
+    )
+
+    # Configure our deprecation warnings to be sent through loggers
+    deprecation.install_warning_logger()
+
+    autocomplete()
+
+    try:
+        cmd_name, cmd_args = parse_command(args)
+    except PipError as exc:
+        sys.stderr.write(f"ERROR: {exc}")
+        sys.stderr.write(os.linesep)
+        sys.exit(1)
+
+    # Needed for locale.getpreferredencoding(False) to work
+    # in pip._internal.utils.encoding.auto_decode
+    try:
+        locale.setlocale(locale.LC_ALL, "")
+    except locale.Error as e:
+        # setlocale can apparently crash if locale are uninitialized
+        logger.debug("Ignoring error %s when setting locale", e)
+    command = create_command(cmd_name, isolated=("--isolated" in cmd_args))
+
+    return command.main(cmd_args)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/cli/main_parser.py b/.venv/lib/python3.12/site-packages/pip/_internal/cli/main_parser.py
new file mode 100644
index 0000000..5ade356
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/cli/main_parser.py
@@ -0,0 +1,134 @@
+"""A single place for constructing and exposing the main parser
+"""
+
+import os
+import subprocess
+import sys
+from typing import List, Optional, Tuple
+
+from pip._internal.build_env import get_runnable_pip
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
+from pip._internal.commands import commands_dict, get_similar_commands
+from pip._internal.exceptions import CommandError
+from pip._internal.utils.misc import get_pip_version, get_prog
+
+__all__ = ["create_main_parser", "parse_command"]
+
+
+def create_main_parser() -> ConfigOptionParser:
+    """Creates and returns the main parser for pip's CLI"""
+
+    parser = ConfigOptionParser(
+        usage="\n%prog  [options]",
+        add_help_option=False,
+        formatter=UpdatingDefaultsHelpFormatter(),
+        name="global",
+        prog=get_prog(),
+    )
+    parser.disable_interspersed_args()
+
+    parser.version = get_pip_version()
+
+    # add the general options
+    gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
+    parser.add_option_group(gen_opts)
+
+    # so the help formatter knows
+    parser.main = True  # type: ignore
+
+    # create command listing for description
+    description = [""] + [
+        f"{name:27} {command_info.summary}"
+        for name, command_info in commands_dict.items()
+    ]
+    parser.description = "\n".join(description)
+
+    return parser
+
+
+def identify_python_interpreter(python: str) -> Optional[str]:
+    # If the named file exists, use it.
+    # If it's a directory, assume it's a virtual environment and
+    # look for the environment's Python executable.
+    if os.path.exists(python):
+        if os.path.isdir(python):
+            # bin/python for Unix, Scripts/python.exe for Windows
+            # Try both in case of odd cases like cygwin.
+            for exe in ("bin/python", "Scripts/python.exe"):
+                py = os.path.join(python, exe)
+                if os.path.exists(py):
+                    return py
+        else:
+            return python
+
+    # Could not find the interpreter specified
+    return None
+
+
+def parse_command(args: List[str]) -> Tuple[str, List[str]]:
+    parser = create_main_parser()
+
+    # Note: parser calls disable_interspersed_args(), so the result of this
+    # call is to split the initial args into the general options before the
+    # subcommand and everything else.
+    # For example:
+    #  args: ['--timeout=5', 'install', '--user', 'INITools']
+    #  general_options: ['--timeout==5']
+    #  args_else: ['install', '--user', 'INITools']
+    general_options, args_else = parser.parse_args(args)
+
+    # --python
+    if general_options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
+        # Re-invoke pip using the specified Python interpreter
+        interpreter = identify_python_interpreter(general_options.python)
+        if interpreter is None:
+            raise CommandError(
+                f"Could not locate Python interpreter {general_options.python}"
+            )
+
+        pip_cmd = [
+            interpreter,
+            get_runnable_pip(),
+        ]
+        pip_cmd.extend(args)
+
+        # Set a flag so the child doesn't re-invoke itself, causing
+        # an infinite loop.
+        os.environ["_PIP_RUNNING_IN_SUBPROCESS"] = "1"
+        returncode = 0
+        try:
+            proc = subprocess.run(pip_cmd)
+            returncode = proc.returncode
+        except (subprocess.SubprocessError, OSError) as exc:
+            raise CommandError(f"Failed to run pip under {interpreter}: {exc}")
+        sys.exit(returncode)
+
+    # --version
+    if general_options.version:
+        sys.stdout.write(parser.version)
+        sys.stdout.write(os.linesep)
+        sys.exit()
+
+    # pip || pip help -> print_help()
+    if not args_else or (args_else[0] == "help" and len(args_else) == 1):
+        parser.print_help()
+        sys.exit()
+
+    # the subcommand name
+    cmd_name = args_else[0]
+
+    if cmd_name not in commands_dict:
+        guess = get_similar_commands(cmd_name)
+
+        msg = [f'unknown command "{cmd_name}"']
+        if guess:
+            msg.append(f'maybe you meant "{guess}"')
+
+        raise CommandError(" - ".join(msg))
+
+    # all the args without the subcommand
+    cmd_args = args[:]
+    cmd_args.remove(cmd_name)
+
+    return cmd_name, cmd_args
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/cli/parser.py b/.venv/lib/python3.12/site-packages/pip/_internal/cli/parser.py
new file mode 100644
index 0000000..ae554b2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/cli/parser.py
@@ -0,0 +1,294 @@
+"""Base option parser setup"""
+
+import logging
+import optparse
+import shutil
+import sys
+import textwrap
+from contextlib import suppress
+from typing import Any, Dict, Generator, List, Tuple
+
+from pip._internal.cli.status_codes import UNKNOWN_ERROR
+from pip._internal.configuration import Configuration, ConfigurationError
+from pip._internal.utils.misc import redact_auth_from_url, strtobool
+
+logger = logging.getLogger(__name__)
+
+
+class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
+    """A prettier/less verbose help formatter for optparse."""
+
+    def __init__(self, *args: Any, **kwargs: Any) -> None:
+        # help position must be aligned with __init__.parseopts.description
+        kwargs["max_help_position"] = 30
+        kwargs["indent_increment"] = 1
+        kwargs["width"] = shutil.get_terminal_size()[0] - 2
+        super().__init__(*args, **kwargs)
+
+    def format_option_strings(self, option: optparse.Option) -> str:
+        return self._format_option_strings(option)
+
+    def _format_option_strings(
+        self, option: optparse.Option, mvarfmt: str = " <{}>", optsep: str = ", "
+    ) -> str:
+        """
+        Return a comma-separated list of option strings and metavars.
+
+        :param option:  tuple of (short opt, long opt), e.g: ('-f', '--format')
+        :param mvarfmt: metavar format string
+        :param optsep:  separator
+        """
+        opts = []
+
+        if option._short_opts:
+            opts.append(option._short_opts[0])
+        if option._long_opts:
+            opts.append(option._long_opts[0])
+        if len(opts) > 1:
+            opts.insert(1, optsep)
+
+        if option.takes_value():
+            assert option.dest is not None
+            metavar = option.metavar or option.dest.lower()
+            opts.append(mvarfmt.format(metavar.lower()))
+
+        return "".join(opts)
+
+    def format_heading(self, heading: str) -> str:
+        if heading == "Options":
+            return ""
+        return heading + ":\n"
+
+    def format_usage(self, usage: str) -> str:
+        """
+        Ensure there is only one newline between usage and the first heading
+        if there is no description.
+        """
+        msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), "  "))
+        return msg
+
+    def format_description(self, description: str) -> str:
+        # leave full control over description to us
+        if description:
+            if hasattr(self.parser, "main"):
+                label = "Commands"
+            else:
+                label = "Description"
+            # some doc strings have initial newlines, some don't
+            description = description.lstrip("\n")
+            # some doc strings have final newlines and spaces, some don't
+            description = description.rstrip()
+            # dedent, then reindent
+            description = self.indent_lines(textwrap.dedent(description), "  ")
+            description = f"{label}:\n{description}\n"
+            return description
+        else:
+            return ""
+
+    def format_epilog(self, epilog: str) -> str:
+        # leave full control over epilog to us
+        if epilog:
+            return epilog
+        else:
+            return ""
+
+    def indent_lines(self, text: str, indent: str) -> str:
+        new_lines = [indent + line for line in text.split("\n")]
+        return "\n".join(new_lines)
+
+
+class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
+    """Custom help formatter for use in ConfigOptionParser.
+
+    This is updates the defaults before expanding them, allowing
+    them to show up correctly in the help listing.
+
+    Also redact auth from url type options
+    """
+
+    def expand_default(self, option: optparse.Option) -> str:
+        default_values = None
+        if self.parser is not None:
+            assert isinstance(self.parser, ConfigOptionParser)
+            self.parser._update_defaults(self.parser.defaults)
+            assert option.dest is not None
+            default_values = self.parser.defaults.get(option.dest)
+        help_text = super().expand_default(option)
+
+        if default_values and option.metavar == "URL":
+            if isinstance(default_values, str):
+                default_values = [default_values]
+
+            # If its not a list, we should abort and just return the help text
+            if not isinstance(default_values, list):
+                default_values = []
+
+            for val in default_values:
+                help_text = help_text.replace(val, redact_auth_from_url(val))
+
+        return help_text
+
+
+class CustomOptionParser(optparse.OptionParser):
+    def insert_option_group(
+        self, idx: int, *args: Any, **kwargs: Any
+    ) -> optparse.OptionGroup:
+        """Insert an OptionGroup at a given position."""
+        group = self.add_option_group(*args, **kwargs)
+
+        self.option_groups.pop()
+        self.option_groups.insert(idx, group)
+
+        return group
+
+    @property
+    def option_list_all(self) -> List[optparse.Option]:
+        """Get a list of all options, including those in option groups."""
+        res = self.option_list[:]
+        for i in self.option_groups:
+            res.extend(i.option_list)
+
+        return res
+
+
+class ConfigOptionParser(CustomOptionParser):
+    """Custom option parser which updates its defaults by checking the
+    configuration files and environmental variables"""
+
+    def __init__(
+        self,
+        *args: Any,
+        name: str,
+        isolated: bool = False,
+        **kwargs: Any,
+    ) -> None:
+        self.name = name
+        self.config = Configuration(isolated)
+
+        assert self.name
+        super().__init__(*args, **kwargs)
+
+    def check_default(self, option: optparse.Option, key: str, val: Any) -> Any:
+        try:
+            return option.check_value(key, val)
+        except optparse.OptionValueError as exc:
+            print(f"An error occurred during configuration: {exc}")
+            sys.exit(3)
+
+    def _get_ordered_configuration_items(
+        self,
+    ) -> Generator[Tuple[str, Any], None, None]:
+        # Configuration gives keys in an unordered manner. Order them.
+        override_order = ["global", self.name, ":env:"]
+
+        # Pool the options into different groups
+        section_items: Dict[str, List[Tuple[str, Any]]] = {
+            name: [] for name in override_order
+        }
+        for section_key, val in self.config.items():
+            # ignore empty values
+            if not val:
+                logger.debug(
+                    "Ignoring configuration key '%s' as it's value is empty.",
+                    section_key,
+                )
+                continue
+
+            section, key = section_key.split(".", 1)
+            if section in override_order:
+                section_items[section].append((key, val))
+
+        # Yield each group in their override order
+        for section in override_order:
+            for key, val in section_items[section]:
+                yield key, val
+
+    def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]:
+        """Updates the given defaults with values from the config files and
+        the environ. Does a little special handling for certain types of
+        options (lists)."""
+
+        # Accumulate complex default state.
+        self.values = optparse.Values(self.defaults)
+        late_eval = set()
+        # Then set the options with those values
+        for key, val in self._get_ordered_configuration_items():
+            # '--' because configuration supports only long names
+            option = self.get_option("--" + key)
+
+            # Ignore options not present in this parser. E.g. non-globals put
+            # in [global] by users that want them to apply to all applicable
+            # commands.
+            if option is None:
+                continue
+
+            assert option.dest is not None
+
+            if option.action in ("store_true", "store_false"):
+                try:
+                    val = strtobool(val)
+                except ValueError:
+                    self.error(
+                        f"{val} is not a valid value for {key} option, "
+                        "please specify a boolean value like yes/no, "
+                        "true/false or 1/0 instead."
+                    )
+            elif option.action == "count":
+                with suppress(ValueError):
+                    val = strtobool(val)
+                with suppress(ValueError):
+                    val = int(val)
+                if not isinstance(val, int) or val < 0:
+                    self.error(
+                        f"{val} is not a valid value for {key} option, "
+                        "please instead specify either a non-negative integer "
+                        "or a boolean value like yes/no or false/true "
+                        "which is equivalent to 1/0."
+                    )
+            elif option.action == "append":
+                val = val.split()
+                val = [self.check_default(option, key, v) for v in val]
+            elif option.action == "callback":
+                assert option.callback is not None
+                late_eval.add(option.dest)
+                opt_str = option.get_opt_string()
+                val = option.convert_value(opt_str, val)
+                # From take_action
+                args = option.callback_args or ()
+                kwargs = option.callback_kwargs or {}
+                option.callback(option, opt_str, val, self, *args, **kwargs)
+            else:
+                val = self.check_default(option, key, val)
+
+            defaults[option.dest] = val
+
+        for key in late_eval:
+            defaults[key] = getattr(self.values, key)
+        self.values = None
+        return defaults
+
+    def get_default_values(self) -> optparse.Values:
+        """Overriding to make updating the defaults after instantiation of
+        the option parser possible, _update_defaults() does the dirty work."""
+        if not self.process_default_values:
+            # Old, pre-Optik 1.5 behaviour.
+            return optparse.Values(self.defaults)
+
+        # Load the configuration, or error out in case of an error
+        try:
+            self.config.load()
+        except ConfigurationError as err:
+            self.exit(UNKNOWN_ERROR, str(err))
+
+        defaults = self._update_defaults(self.defaults.copy())  # ours
+        for option in self._get_all_options():
+            assert option.dest is not None
+            default = defaults.get(option.dest)
+            if isinstance(default, str):
+                opt_str = option.get_opt_string()
+                defaults[option.dest] = option.check_value(opt_str, default)
+        return optparse.Values(defaults)
+
+    def error(self, msg: str) -> None:
+        self.print_usage(sys.stderr)
+        self.exit(UNKNOWN_ERROR, f"{msg}\n")
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/cli/progress_bars.py b/.venv/lib/python3.12/site-packages/pip/_internal/cli/progress_bars.py
new file mode 100644
index 0000000..0ad1403
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/cli/progress_bars.py
@@ -0,0 +1,68 @@
+import functools
+from typing import Callable, Generator, Iterable, Iterator, Optional, Tuple
+
+from pip._vendor.rich.progress import (
+    BarColumn,
+    DownloadColumn,
+    FileSizeColumn,
+    Progress,
+    ProgressColumn,
+    SpinnerColumn,
+    TextColumn,
+    TimeElapsedColumn,
+    TimeRemainingColumn,
+    TransferSpeedColumn,
+)
+
+from pip._internal.utils.logging import get_indentation
+
+DownloadProgressRenderer = Callable[[Iterable[bytes]], Iterator[bytes]]
+
+
+def _rich_progress_bar(
+    iterable: Iterable[bytes],
+    *,
+    bar_type: str,
+    size: int,
+) -> Generator[bytes, None, None]:
+    assert bar_type == "on", "This should only be used in the default mode."
+
+    if not size:
+        total = float("inf")
+        columns: Tuple[ProgressColumn, ...] = (
+            TextColumn("[progress.description]{task.description}"),
+            SpinnerColumn("line", speed=1.5),
+            FileSizeColumn(),
+            TransferSpeedColumn(),
+            TimeElapsedColumn(),
+        )
+    else:
+        total = size
+        columns = (
+            TextColumn("[progress.description]{task.description}"),
+            BarColumn(),
+            DownloadColumn(),
+            TransferSpeedColumn(),
+            TextColumn("eta"),
+            TimeRemainingColumn(),
+        )
+
+    progress = Progress(*columns, refresh_per_second=30)
+    task_id = progress.add_task(" " * (get_indentation() + 2), total=total)
+    with progress:
+        for chunk in iterable:
+            yield chunk
+            progress.update(task_id, advance=len(chunk))
+
+
+def get_download_progress_renderer(
+    *, bar_type: str, size: Optional[int] = None
+) -> DownloadProgressRenderer:
+    """Get an object that can be used to render the download progress.
+
+    Returns a callable, that takes an iterable to "wrap".
+    """
+    if bar_type == "on":
+        return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size)
+    else:
+        return iter  # no-op, when passed an iterator
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/cli/req_command.py b/.venv/lib/python3.12/site-packages/pip/_internal/cli/req_command.py
new file mode 100644
index 0000000..6f2f79c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/cli/req_command.py
@@ -0,0 +1,505 @@
+"""Contains the Command base classes that depend on PipSession.
+
+The classes in this module are in a separate module so the commands not
+needing download / PackageFinder capability don't unnecessarily import the
+PackageFinder machinery and all its vendored dependencies, etc.
+"""
+
+import logging
+import os
+import sys
+from functools import partial
+from optparse import Values
+from typing import TYPE_CHECKING, Any, List, Optional, Tuple
+
+from pip._internal.cache import WheelCache
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.command_context import CommandContextMixIn
+from pip._internal.exceptions import CommandError, PreviousBuildDirError
+from pip._internal.index.collector import LinkCollector
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.models.target_python import TargetPython
+from pip._internal.network.session import PipSession
+from pip._internal.operations.build.build_tracker import BuildTracker
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req.constructors import (
+    install_req_from_editable,
+    install_req_from_line,
+    install_req_from_parsed_requirement,
+    install_req_from_req_string,
+)
+from pip._internal.req.req_file import parse_requirements
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.resolution.base import BaseResolver
+from pip._internal.self_outdated_check import pip_self_version_check
+from pip._internal.utils.temp_dir import (
+    TempDirectory,
+    TempDirectoryTypeRegistry,
+    tempdir_kinds,
+)
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+if TYPE_CHECKING:
+    from ssl import SSLContext
+
+logger = logging.getLogger(__name__)
+
+
+def _create_truststore_ssl_context() -> Optional["SSLContext"]:
+    if sys.version_info < (3, 10):
+        raise CommandError("The truststore feature is only available for Python 3.10+")
+
+    try:
+        import ssl
+    except ImportError:
+        logger.warning("Disabling truststore since ssl support is missing")
+        return None
+
+    try:
+        from pip._vendor import truststore
+    except ImportError as e:
+        raise CommandError(f"The truststore feature is unavailable: {e}")
+
+    return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
+
+
+class SessionCommandMixin(CommandContextMixIn):
+
+    """
+    A class mixin for command classes needing _build_session().
+    """
+
+    def __init__(self) -> None:
+        super().__init__()
+        self._session: Optional[PipSession] = None
+
+    @classmethod
+    def _get_index_urls(cls, options: Values) -> Optional[List[str]]:
+        """Return a list of index urls from user-provided options."""
+        index_urls = []
+        if not getattr(options, "no_index", False):
+            url = getattr(options, "index_url", None)
+            if url:
+                index_urls.append(url)
+        urls = getattr(options, "extra_index_urls", None)
+        if urls:
+            index_urls.extend(urls)
+        # Return None rather than an empty list
+        return index_urls or None
+
+    def get_default_session(self, options: Values) -> PipSession:
+        """Get a default-managed session."""
+        if self._session is None:
+            self._session = self.enter_context(self._build_session(options))
+            # there's no type annotation on requests.Session, so it's
+            # automatically ContextManager[Any] and self._session becomes Any,
+            # then https://github.com/python/mypy/issues/7696 kicks in
+            assert self._session is not None
+        return self._session
+
+    def _build_session(
+        self,
+        options: Values,
+        retries: Optional[int] = None,
+        timeout: Optional[int] = None,
+        fallback_to_certifi: bool = False,
+    ) -> PipSession:
+        cache_dir = options.cache_dir
+        assert not cache_dir or os.path.isabs(cache_dir)
+
+        if "truststore" in options.features_enabled:
+            try:
+                ssl_context = _create_truststore_ssl_context()
+            except Exception:
+                if not fallback_to_certifi:
+                    raise
+                ssl_context = None
+        else:
+            ssl_context = None
+
+        session = PipSession(
+            cache=os.path.join(cache_dir, "http-v2") if cache_dir else None,
+            retries=retries if retries is not None else options.retries,
+            trusted_hosts=options.trusted_hosts,
+            index_urls=self._get_index_urls(options),
+            ssl_context=ssl_context,
+        )
+
+        # Handle custom ca-bundles from the user
+        if options.cert:
+            session.verify = options.cert
+
+        # Handle SSL client certificate
+        if options.client_cert:
+            session.cert = options.client_cert
+
+        # Handle timeouts
+        if options.timeout or timeout:
+            session.timeout = timeout if timeout is not None else options.timeout
+
+        # Handle configured proxies
+        if options.proxy:
+            session.proxies = {
+                "http": options.proxy,
+                "https": options.proxy,
+            }
+
+        # Determine if we can prompt the user for authentication or not
+        session.auth.prompting = not options.no_input
+        session.auth.keyring_provider = options.keyring_provider
+
+        return session
+
+
+class IndexGroupCommand(Command, SessionCommandMixin):
+
+    """
+    Abstract base class for commands with the index_group options.
+
+    This also corresponds to the commands that permit the pip version check.
+    """
+
+    def handle_pip_version_check(self, options: Values) -> None:
+        """
+        Do the pip version check if not disabled.
+
+        This overrides the default behavior of not doing the check.
+        """
+        # Make sure the index_group options are present.
+        assert hasattr(options, "no_index")
+
+        if options.disable_pip_version_check or options.no_index:
+            return
+
+        # Otherwise, check if we're using the latest version of pip available.
+        session = self._build_session(
+            options,
+            retries=0,
+            timeout=min(5, options.timeout),
+            # This is set to ensure the function does not fail when truststore is
+            # specified in use-feature but cannot be loaded. This usually raises a
+            # CommandError and shows a nice user-facing error, but this function is not
+            # called in that try-except block.
+            fallback_to_certifi=True,
+        )
+        with session:
+            pip_self_version_check(session, options)
+
+
+KEEPABLE_TEMPDIR_TYPES = [
+    tempdir_kinds.BUILD_ENV,
+    tempdir_kinds.EPHEM_WHEEL_CACHE,
+    tempdir_kinds.REQ_BUILD,
+]
+
+
+def warn_if_run_as_root() -> None:
+    """Output a warning for sudo users on Unix.
+
+    In a virtual environment, sudo pip still writes to virtualenv.
+    On Windows, users may run pip as Administrator without issues.
+    This warning only applies to Unix root users outside of virtualenv.
+    """
+    if running_under_virtualenv():
+        return
+    if not hasattr(os, "getuid"):
+        return
+    # On Windows, there are no "system managed" Python packages. Installing as
+    # Administrator via pip is the correct way of updating system environments.
+    #
+    # We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform
+    # checks: https://mypy.readthedocs.io/en/stable/common_issues.html
+    if sys.platform == "win32" or sys.platform == "cygwin":
+        return
+
+    if os.getuid() != 0:
+        return
+
+    logger.warning(
+        "Running pip as the 'root' user can result in broken permissions and "
+        "conflicting behaviour with the system package manager. "
+        "It is recommended to use a virtual environment instead: "
+        "https://pip.pypa.io/warnings/venv"
+    )
+
+
+def with_cleanup(func: Any) -> Any:
+    """Decorator for common logic related to managing temporary
+    directories.
+    """
+
+    def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None:
+        for t in KEEPABLE_TEMPDIR_TYPES:
+            registry.set_delete(t, False)
+
+    def wrapper(
+        self: RequirementCommand, options: Values, args: List[Any]
+    ) -> Optional[int]:
+        assert self.tempdir_registry is not None
+        if options.no_clean:
+            configure_tempdir_registry(self.tempdir_registry)
+
+        try:
+            return func(self, options, args)
+        except PreviousBuildDirError:
+            # This kind of conflict can occur when the user passes an explicit
+            # build directory with a pre-existing folder. In that case we do
+            # not want to accidentally remove it.
+            configure_tempdir_registry(self.tempdir_registry)
+            raise
+
+    return wrapper
+
+
+class RequirementCommand(IndexGroupCommand):
+    def __init__(self, *args: Any, **kw: Any) -> None:
+        super().__init__(*args, **kw)
+
+        self.cmd_opts.add_option(cmdoptions.no_clean())
+
+    @staticmethod
+    def determine_resolver_variant(options: Values) -> str:
+        """Determines which resolver should be used, based on the given options."""
+        if "legacy-resolver" in options.deprecated_features_enabled:
+            return "legacy"
+
+        return "resolvelib"
+
+    @classmethod
+    def make_requirement_preparer(
+        cls,
+        temp_build_dir: TempDirectory,
+        options: Values,
+        build_tracker: BuildTracker,
+        session: PipSession,
+        finder: PackageFinder,
+        use_user_site: bool,
+        download_dir: Optional[str] = None,
+        verbosity: int = 0,
+    ) -> RequirementPreparer:
+        """
+        Create a RequirementPreparer instance for the given parameters.
+        """
+        temp_build_dir_path = temp_build_dir.path
+        assert temp_build_dir_path is not None
+        legacy_resolver = False
+
+        resolver_variant = cls.determine_resolver_variant(options)
+        if resolver_variant == "resolvelib":
+            lazy_wheel = "fast-deps" in options.features_enabled
+            if lazy_wheel:
+                logger.warning(
+                    "pip is using lazily downloaded wheels using HTTP "
+                    "range requests to obtain dependency information. "
+                    "This experimental feature is enabled through "
+                    "--use-feature=fast-deps and it is not ready for "
+                    "production."
+                )
+        else:
+            legacy_resolver = True
+            lazy_wheel = False
+            if "fast-deps" in options.features_enabled:
+                logger.warning(
+                    "fast-deps has no effect when used with the legacy resolver."
+                )
+
+        return RequirementPreparer(
+            build_dir=temp_build_dir_path,
+            src_dir=options.src_dir,
+            download_dir=download_dir,
+            build_isolation=options.build_isolation,
+            check_build_deps=options.check_build_deps,
+            build_tracker=build_tracker,
+            session=session,
+            progress_bar=options.progress_bar,
+            finder=finder,
+            require_hashes=options.require_hashes,
+            use_user_site=use_user_site,
+            lazy_wheel=lazy_wheel,
+            verbosity=verbosity,
+            legacy_resolver=legacy_resolver,
+        )
+
+    @classmethod
+    def make_resolver(
+        cls,
+        preparer: RequirementPreparer,
+        finder: PackageFinder,
+        options: Values,
+        wheel_cache: Optional[WheelCache] = None,
+        use_user_site: bool = False,
+        ignore_installed: bool = True,
+        ignore_requires_python: bool = False,
+        force_reinstall: bool = False,
+        upgrade_strategy: str = "to-satisfy-only",
+        use_pep517: Optional[bool] = None,
+        py_version_info: Optional[Tuple[int, ...]] = None,
+    ) -> BaseResolver:
+        """
+        Create a Resolver instance for the given parameters.
+        """
+        make_install_req = partial(
+            install_req_from_req_string,
+            isolated=options.isolated_mode,
+            use_pep517=use_pep517,
+        )
+        resolver_variant = cls.determine_resolver_variant(options)
+        # The long import name and duplicated invocation is needed to convince
+        # Mypy into correctly typechecking. Otherwise it would complain the
+        # "Resolver" class being redefined.
+        if resolver_variant == "resolvelib":
+            import pip._internal.resolution.resolvelib.resolver
+
+            return pip._internal.resolution.resolvelib.resolver.Resolver(
+                preparer=preparer,
+                finder=finder,
+                wheel_cache=wheel_cache,
+                make_install_req=make_install_req,
+                use_user_site=use_user_site,
+                ignore_dependencies=options.ignore_dependencies,
+                ignore_installed=ignore_installed,
+                ignore_requires_python=ignore_requires_python,
+                force_reinstall=force_reinstall,
+                upgrade_strategy=upgrade_strategy,
+                py_version_info=py_version_info,
+            )
+        import pip._internal.resolution.legacy.resolver
+
+        return pip._internal.resolution.legacy.resolver.Resolver(
+            preparer=preparer,
+            finder=finder,
+            wheel_cache=wheel_cache,
+            make_install_req=make_install_req,
+            use_user_site=use_user_site,
+            ignore_dependencies=options.ignore_dependencies,
+            ignore_installed=ignore_installed,
+            ignore_requires_python=ignore_requires_python,
+            force_reinstall=force_reinstall,
+            upgrade_strategy=upgrade_strategy,
+            py_version_info=py_version_info,
+        )
+
+    def get_requirements(
+        self,
+        args: List[str],
+        options: Values,
+        finder: PackageFinder,
+        session: PipSession,
+    ) -> List[InstallRequirement]:
+        """
+        Parse command-line arguments into the corresponding requirements.
+        """
+        requirements: List[InstallRequirement] = []
+        for filename in options.constraints:
+            for parsed_req in parse_requirements(
+                filename,
+                constraint=True,
+                finder=finder,
+                options=options,
+                session=session,
+            ):
+                req_to_add = install_req_from_parsed_requirement(
+                    parsed_req,
+                    isolated=options.isolated_mode,
+                    user_supplied=False,
+                )
+                requirements.append(req_to_add)
+
+        for req in args:
+            req_to_add = install_req_from_line(
+                req,
+                comes_from=None,
+                isolated=options.isolated_mode,
+                use_pep517=options.use_pep517,
+                user_supplied=True,
+                config_settings=getattr(options, "config_settings", None),
+            )
+            requirements.append(req_to_add)
+
+        for req in options.editables:
+            req_to_add = install_req_from_editable(
+                req,
+                user_supplied=True,
+                isolated=options.isolated_mode,
+                use_pep517=options.use_pep517,
+                config_settings=getattr(options, "config_settings", None),
+            )
+            requirements.append(req_to_add)
+
+        # NOTE: options.require_hashes may be set if --require-hashes is True
+        for filename in options.requirements:
+            for parsed_req in parse_requirements(
+                filename, finder=finder, options=options, session=session
+            ):
+                req_to_add = install_req_from_parsed_requirement(
+                    parsed_req,
+                    isolated=options.isolated_mode,
+                    use_pep517=options.use_pep517,
+                    user_supplied=True,
+                    config_settings=parsed_req.options.get("config_settings")
+                    if parsed_req.options
+                    else None,
+                )
+                requirements.append(req_to_add)
+
+        # If any requirement has hash options, enable hash checking.
+        if any(req.has_hash_options for req in requirements):
+            options.require_hashes = True
+
+        if not (args or options.editables or options.requirements):
+            opts = {"name": self.name}
+            if options.find_links:
+                raise CommandError(
+                    "You must give at least one requirement to {name} "
+                    '(maybe you meant "pip {name} {links}"?)'.format(
+                        **dict(opts, links=" ".join(options.find_links))
+                    )
+                )
+            else:
+                raise CommandError(
+                    "You must give at least one requirement to {name} "
+                    '(see "pip help {name}")'.format(**opts)
+                )
+
+        return requirements
+
+    @staticmethod
+    def trace_basic_info(finder: PackageFinder) -> None:
+        """
+        Trace basic information about the provided objects.
+        """
+        # Display where finder is looking for packages
+        search_scope = finder.search_scope
+        locations = search_scope.get_formatted_locations()
+        if locations:
+            logger.info(locations)
+
+    def _build_package_finder(
+        self,
+        options: Values,
+        session: PipSession,
+        target_python: Optional[TargetPython] = None,
+        ignore_requires_python: Optional[bool] = None,
+    ) -> PackageFinder:
+        """
+        Create a package finder appropriate to this requirement command.
+
+        :param ignore_requires_python: Whether to ignore incompatible
+            "Requires-Python" values in links. Defaults to False.
+        """
+        link_collector = LinkCollector.create(session, options=options)
+        selection_prefs = SelectionPreferences(
+            allow_yanked=True,
+            format_control=options.format_control,
+            allow_all_prereleases=options.pre,
+            prefer_binary=options.prefer_binary,
+            ignore_requires_python=ignore_requires_python,
+        )
+
+        return PackageFinder.create(
+            link_collector=link_collector,
+            selection_prefs=selection_prefs,
+            target_python=target_python,
+        )
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/cli/spinners.py b/.venv/lib/python3.12/site-packages/pip/_internal/cli/spinners.py
new file mode 100644
index 0000000..cf2b976
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/cli/spinners.py
@@ -0,0 +1,159 @@
+import contextlib
+import itertools
+import logging
+import sys
+import time
+from typing import IO, Generator, Optional
+
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.logging import get_indentation
+
+logger = logging.getLogger(__name__)
+
+
+class SpinnerInterface:
+    def spin(self) -> None:
+        raise NotImplementedError()
+
+    def finish(self, final_status: str) -> None:
+        raise NotImplementedError()
+
+
+class InteractiveSpinner(SpinnerInterface):
+    def __init__(
+        self,
+        message: str,
+        file: Optional[IO[str]] = None,
+        spin_chars: str = "-\\|/",
+        # Empirically, 8 updates/second looks nice
+        min_update_interval_seconds: float = 0.125,
+    ):
+        self._message = message
+        if file is None:
+            file = sys.stdout
+        self._file = file
+        self._rate_limiter = RateLimiter(min_update_interval_seconds)
+        self._finished = False
+
+        self._spin_cycle = itertools.cycle(spin_chars)
+
+        self._file.write(" " * get_indentation() + self._message + " ... ")
+        self._width = 0
+
+    def _write(self, status: str) -> None:
+        assert not self._finished
+        # Erase what we wrote before by backspacing to the beginning, writing
+        # spaces to overwrite the old text, and then backspacing again
+        backup = "\b" * self._width
+        self._file.write(backup + " " * self._width + backup)
+        # Now we have a blank slate to add our status
+        self._file.write(status)
+        self._width = len(status)
+        self._file.flush()
+        self._rate_limiter.reset()
+
+    def spin(self) -> None:
+        if self._finished:
+            return
+        if not self._rate_limiter.ready():
+            return
+        self._write(next(self._spin_cycle))
+
+    def finish(self, final_status: str) -> None:
+        if self._finished:
+            return
+        self._write(final_status)
+        self._file.write("\n")
+        self._file.flush()
+        self._finished = True
+
+
+# Used for dumb terminals, non-interactive installs (no tty), etc.
+# We still print updates occasionally (once every 60 seconds by default) to
+# act as a keep-alive for systems like Travis-CI that take lack-of-output as
+# an indication that a task has frozen.
+class NonInteractiveSpinner(SpinnerInterface):
+    def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None:
+        self._message = message
+        self._finished = False
+        self._rate_limiter = RateLimiter(min_update_interval_seconds)
+        self._update("started")
+
+    def _update(self, status: str) -> None:
+        assert not self._finished
+        self._rate_limiter.reset()
+        logger.info("%s: %s", self._message, status)
+
+    def spin(self) -> None:
+        if self._finished:
+            return
+        if not self._rate_limiter.ready():
+            return
+        self._update("still running...")
+
+    def finish(self, final_status: str) -> None:
+        if self._finished:
+            return
+        self._update(f"finished with status '{final_status}'")
+        self._finished = True
+
+
+class RateLimiter:
+    def __init__(self, min_update_interval_seconds: float) -> None:
+        self._min_update_interval_seconds = min_update_interval_seconds
+        self._last_update: float = 0
+
+    def ready(self) -> bool:
+        now = time.time()
+        delta = now - self._last_update
+        return delta >= self._min_update_interval_seconds
+
+    def reset(self) -> None:
+        self._last_update = time.time()
+
+
+@contextlib.contextmanager
+def open_spinner(message: str) -> Generator[SpinnerInterface, None, None]:
+    # Interactive spinner goes directly to sys.stdout rather than being routed
+    # through the logging system, but it acts like it has level INFO,
+    # i.e. it's only displayed if we're at level INFO or better.
+    # Non-interactive spinner goes through the logging system, so it is always
+    # in sync with logging configuration.
+    if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
+        spinner: SpinnerInterface = InteractiveSpinner(message)
+    else:
+        spinner = NonInteractiveSpinner(message)
+    try:
+        with hidden_cursor(sys.stdout):
+            yield spinner
+    except KeyboardInterrupt:
+        spinner.finish("canceled")
+        raise
+    except Exception:
+        spinner.finish("error")
+        raise
+    else:
+        spinner.finish("done")
+
+
+HIDE_CURSOR = "\x1b[?25l"
+SHOW_CURSOR = "\x1b[?25h"
+
+
+@contextlib.contextmanager
+def hidden_cursor(file: IO[str]) -> Generator[None, None, None]:
+    # The Windows terminal does not support the hide/show cursor ANSI codes,
+    # even via colorama. So don't even try.
+    if WINDOWS:
+        yield
+    # We don't want to clutter the output with control characters if we're
+    # writing to a file, or if the user is running with --quiet.
+    # See https://github.com/pypa/pip/issues/3418
+    elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
+        yield
+    else:
+        file.write(HIDE_CURSOR)
+        try:
+            yield
+        finally:
+            file.write(SHOW_CURSOR)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/cli/status_codes.py b/.venv/lib/python3.12/site-packages/pip/_internal/cli/status_codes.py
new file mode 100644
index 0000000..5e29502
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/cli/status_codes.py
@@ -0,0 +1,6 @@
+SUCCESS = 0
+ERROR = 1
+UNKNOWN_ERROR = 2
+VIRTUALENV_NOT_FOUND = 3
+PREVIOUS_BUILD_DIR_ERROR = 4
+NO_MATCHES_FOUND = 23
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/commands/__init__.py b/.venv/lib/python3.12/site-packages/pip/_internal/commands/__init__.py
new file mode 100644
index 0000000..858a410
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/commands/__init__.py
@@ -0,0 +1,132 @@
+"""
+Package containing all pip commands
+"""
+
+import importlib
+from collections import namedtuple
+from typing import Any, Dict, Optional
+
+from pip._internal.cli.base_command import Command
+
+CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary")
+
+# This dictionary does a bunch of heavy lifting for help output:
+# - Enables avoiding additional (costly) imports for presenting `--help`.
+# - The ordering matters for help display.
+#
+# Even though the module path starts with the same "pip._internal.commands"
+# prefix, the full path makes testing easier (specifically when modifying
+# `commands_dict` in test setup / teardown).
+commands_dict: Dict[str, CommandInfo] = {
+    "install": CommandInfo(
+        "pip._internal.commands.install",
+        "InstallCommand",
+        "Install packages.",
+    ),
+    "download": CommandInfo(
+        "pip._internal.commands.download",
+        "DownloadCommand",
+        "Download packages.",
+    ),
+    "uninstall": CommandInfo(
+        "pip._internal.commands.uninstall",
+        "UninstallCommand",
+        "Uninstall packages.",
+    ),
+    "freeze": CommandInfo(
+        "pip._internal.commands.freeze",
+        "FreezeCommand",
+        "Output installed packages in requirements format.",
+    ),
+    "inspect": CommandInfo(
+        "pip._internal.commands.inspect",
+        "InspectCommand",
+        "Inspect the python environment.",
+    ),
+    "list": CommandInfo(
+        "pip._internal.commands.list",
+        "ListCommand",
+        "List installed packages.",
+    ),
+    "show": CommandInfo(
+        "pip._internal.commands.show",
+        "ShowCommand",
+        "Show information about installed packages.",
+    ),
+    "check": CommandInfo(
+        "pip._internal.commands.check",
+        "CheckCommand",
+        "Verify installed packages have compatible dependencies.",
+    ),
+    "config": CommandInfo(
+        "pip._internal.commands.configuration",
+        "ConfigurationCommand",
+        "Manage local and global configuration.",
+    ),
+    "search": CommandInfo(
+        "pip._internal.commands.search",
+        "SearchCommand",
+        "Search PyPI for packages.",
+    ),
+    "cache": CommandInfo(
+        "pip._internal.commands.cache",
+        "CacheCommand",
+        "Inspect and manage pip's wheel cache.",
+    ),
+    "index": CommandInfo(
+        "pip._internal.commands.index",
+        "IndexCommand",
+        "Inspect information available from package indexes.",
+    ),
+    "wheel": CommandInfo(
+        "pip._internal.commands.wheel",
+        "WheelCommand",
+        "Build wheels from your requirements.",
+    ),
+    "hash": CommandInfo(
+        "pip._internal.commands.hash",
+        "HashCommand",
+        "Compute hashes of package archives.",
+    ),
+    "completion": CommandInfo(
+        "pip._internal.commands.completion",
+        "CompletionCommand",
+        "A helper command used for command completion.",
+    ),
+    "debug": CommandInfo(
+        "pip._internal.commands.debug",
+        "DebugCommand",
+        "Show information useful for debugging.",
+    ),
+    "help": CommandInfo(
+        "pip._internal.commands.help",
+        "HelpCommand",
+        "Show help for commands.",
+    ),
+}
+
+
+def create_command(name: str, **kwargs: Any) -> Command:
+    """
+    Create an instance of the Command class with the given name.
+    """
+    module_path, class_name, summary = commands_dict[name]
+    module = importlib.import_module(module_path)
+    command_class = getattr(module, class_name)
+    command = command_class(name=name, summary=summary, **kwargs)
+
+    return command
+
+
+def get_similar_commands(name: str) -> Optional[str]:
+    """Command name auto-correct."""
+    from difflib import get_close_matches
+
+    name = name.lower()
+
+    close_commands = get_close_matches(name, commands_dict.keys())
+
+    if close_commands:
+        return close_commands[0]
+    else:
+        return None
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/commands/cache.py b/.venv/lib/python3.12/site-packages/pip/_internal/commands/cache.py
new file mode 100644
index 0000000..3283361
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/commands/cache.py
@@ -0,0 +1,225 @@
+import os
+import textwrap
+from optparse import Values
+from typing import Any, List
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.exceptions import CommandError, PipError
+from pip._internal.utils import filesystem
+from pip._internal.utils.logging import getLogger
+
+logger = getLogger(__name__)
+
+
+class CacheCommand(Command):
+    """
+    Inspect and manage pip's wheel cache.
+
+    Subcommands:
+
+    - dir: Show the cache directory.
+    - info: Show information about the cache.
+    - list: List filenames of packages stored in the cache.
+    - remove: Remove one or more package from the cache.
+    - purge: Remove all items from the cache.
+
+    ```` can be a glob expression or a package name.
+    """
+
+    ignore_require_venv = True
+    usage = """
+        %prog dir
+        %prog info
+        %prog list [] [--format=[human, abspath]]
+        %prog remove 
+        %prog purge
+    """
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(
+            "--format",
+            action="store",
+            dest="list_format",
+            default="human",
+            choices=("human", "abspath"),
+            help="Select the output format among: human (default) or abspath",
+        )
+
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def run(self, options: Values, args: List[str]) -> int:
+        handlers = {
+            "dir": self.get_cache_dir,
+            "info": self.get_cache_info,
+            "list": self.list_cache_items,
+            "remove": self.remove_cache_items,
+            "purge": self.purge_cache,
+        }
+
+        if not options.cache_dir:
+            logger.error("pip cache commands can not function since cache is disabled.")
+            return ERROR
+
+        # Determine action
+        if not args or args[0] not in handlers:
+            logger.error(
+                "Need an action (%s) to perform.",
+                ", ".join(sorted(handlers)),
+            )
+            return ERROR
+
+        action = args[0]
+
+        # Error handling happens here, not in the action-handlers.
+        try:
+            handlers[action](options, args[1:])
+        except PipError as e:
+            logger.error(e.args[0])
+            return ERROR
+
+        return SUCCESS
+
+    def get_cache_dir(self, options: Values, args: List[Any]) -> None:
+        if args:
+            raise CommandError("Too many arguments")
+
+        logger.info(options.cache_dir)
+
+    def get_cache_info(self, options: Values, args: List[Any]) -> None:
+        if args:
+            raise CommandError("Too many arguments")
+
+        num_http_files = len(self._find_http_files(options))
+        num_packages = len(self._find_wheels(options, "*"))
+
+        http_cache_location = self._cache_dir(options, "http-v2")
+        old_http_cache_location = self._cache_dir(options, "http")
+        wheels_cache_location = self._cache_dir(options, "wheels")
+        http_cache_size = filesystem.format_size(
+            filesystem.directory_size(http_cache_location)
+            + filesystem.directory_size(old_http_cache_location)
+        )
+        wheels_cache_size = filesystem.format_directory_size(wheels_cache_location)
+
+        message = (
+            textwrap.dedent(
+                """
+                    Package index page cache location (pip v23.3+): {http_cache_location}
+                    Package index page cache location (older pips): {old_http_cache_location}
+                    Package index page cache size: {http_cache_size}
+                    Number of HTTP files: {num_http_files}
+                    Locally built wheels location: {wheels_cache_location}
+                    Locally built wheels size: {wheels_cache_size}
+                    Number of locally built wheels: {package_count}
+                """  # noqa: E501
+            )
+            .format(
+                http_cache_location=http_cache_location,
+                old_http_cache_location=old_http_cache_location,
+                http_cache_size=http_cache_size,
+                num_http_files=num_http_files,
+                wheels_cache_location=wheels_cache_location,
+                package_count=num_packages,
+                wheels_cache_size=wheels_cache_size,
+            )
+            .strip()
+        )
+
+        logger.info(message)
+
+    def list_cache_items(self, options: Values, args: List[Any]) -> None:
+        if len(args) > 1:
+            raise CommandError("Too many arguments")
+
+        if args:
+            pattern = args[0]
+        else:
+            pattern = "*"
+
+        files = self._find_wheels(options, pattern)
+        if options.list_format == "human":
+            self.format_for_human(files)
+        else:
+            self.format_for_abspath(files)
+
+    def format_for_human(self, files: List[str]) -> None:
+        if not files:
+            logger.info("No locally built wheels cached.")
+            return
+
+        results = []
+        for filename in files:
+            wheel = os.path.basename(filename)
+            size = filesystem.format_file_size(filename)
+            results.append(f" - {wheel} ({size})")
+        logger.info("Cache contents:\n")
+        logger.info("\n".join(sorted(results)))
+
+    def format_for_abspath(self, files: List[str]) -> None:
+        if files:
+            logger.info("\n".join(sorted(files)))
+
+    def remove_cache_items(self, options: Values, args: List[Any]) -> None:
+        if len(args) > 1:
+            raise CommandError("Too many arguments")
+
+        if not args:
+            raise CommandError("Please provide a pattern")
+
+        files = self._find_wheels(options, args[0])
+
+        no_matching_msg = "No matching packages"
+        if args[0] == "*":
+            # Only fetch http files if no specific pattern given
+            files += self._find_http_files(options)
+        else:
+            # Add the pattern to the log message
+            no_matching_msg += f' for pattern "{args[0]}"'
+
+        if not files:
+            logger.warning(no_matching_msg)
+
+        for filename in files:
+            os.unlink(filename)
+            logger.verbose("Removed %s", filename)
+        logger.info("Files removed: %s", len(files))
+
+    def purge_cache(self, options: Values, args: List[Any]) -> None:
+        if args:
+            raise CommandError("Too many arguments")
+
+        return self.remove_cache_items(options, ["*"])
+
+    def _cache_dir(self, options: Values, subdir: str) -> str:
+        return os.path.join(options.cache_dir, subdir)
+
+    def _find_http_files(self, options: Values) -> List[str]:
+        old_http_dir = self._cache_dir(options, "http")
+        new_http_dir = self._cache_dir(options, "http-v2")
+        return filesystem.find_files(old_http_dir, "*") + filesystem.find_files(
+            new_http_dir, "*"
+        )
+
+    def _find_wheels(self, options: Values, pattern: str) -> List[str]:
+        wheel_dir = self._cache_dir(options, "wheels")
+
+        # The wheel filename format, as specified in PEP 427, is:
+        #     {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl
+        #
+        # Additionally, non-alphanumeric values in the distribution are
+        # normalized to underscores (_), meaning hyphens can never occur
+        # before `-{version}`.
+        #
+        # Given that information:
+        # - If the pattern we're given contains a hyphen (-), the user is
+        #   providing at least the version. Thus, we can just append `*.whl`
+        #   to match the rest of it.
+        # - If the pattern we're given doesn't contain a hyphen (-), the
+        #   user is only providing the name. Thus, we append `-*.whl` to
+        #   match the hyphen before the version, followed by anything else.
+        #
+        # PEP 427: https://www.python.org/dev/peps/pep-0427/
+        pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl")
+
+        return filesystem.find_files(wheel_dir, pattern)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/commands/check.py b/.venv/lib/python3.12/site-packages/pip/_internal/commands/check.py
new file mode 100644
index 0000000..5efd0a3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/commands/check.py
@@ -0,0 +1,54 @@
+import logging
+from optparse import Values
+from typing import List
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.operations.check import (
+    check_package_set,
+    create_package_set_from_installed,
+    warn_legacy_versions_and_specifiers,
+)
+from pip._internal.utils.misc import write_output
+
+logger = logging.getLogger(__name__)
+
+
+class CheckCommand(Command):
+    """Verify installed packages have compatible dependencies."""
+
+    usage = """
+      %prog [options]"""
+
+    def run(self, options: Values, args: List[str]) -> int:
+        package_set, parsing_probs = create_package_set_from_installed()
+        warn_legacy_versions_and_specifiers(package_set)
+        missing, conflicting = check_package_set(package_set)
+
+        for project_name in missing:
+            version = package_set[project_name].version
+            for dependency in missing[project_name]:
+                write_output(
+                    "%s %s requires %s, which is not installed.",
+                    project_name,
+                    version,
+                    dependency[0],
+                )
+
+        for project_name in conflicting:
+            version = package_set[project_name].version
+            for dep_name, dep_version, req in conflicting[project_name]:
+                write_output(
+                    "%s %s has requirement %s, but you have %s %s.",
+                    project_name,
+                    version,
+                    req,
+                    dep_name,
+                    dep_version,
+                )
+
+        if missing or conflicting or parsing_probs:
+            return ERROR
+        else:
+            write_output("No broken requirements found.")
+            return SUCCESS
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/commands/completion.py b/.venv/lib/python3.12/site-packages/pip/_internal/commands/completion.py
new file mode 100644
index 0000000..9e89e27
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/commands/completion.py
@@ -0,0 +1,130 @@
+import sys
+import textwrap
+from optparse import Values
+from typing import List
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.utils.misc import get_prog
+
+BASE_COMPLETION = """
+# pip {shell} completion start{script}# pip {shell} completion end
+"""
+
+COMPLETION_SCRIPTS = {
+    "bash": """
+        _pip_completion()
+        {{
+            COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\
+                           COMP_CWORD=$COMP_CWORD \\
+                           PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) )
+        }}
+        complete -o default -F _pip_completion {prog}
+    """,
+    "zsh": """
+        #compdef -P pip[0-9.]#
+        __pip() {{
+          compadd $( COMP_WORDS="$words[*]" \\
+                     COMP_CWORD=$((CURRENT-1)) \\
+                     PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )
+        }}
+        if [[ $zsh_eval_context[-1] == loadautofunc ]]; then
+          # autoload from fpath, call function directly
+          __pip "$@"
+        else
+          # eval/source/. command, register function for later
+          compdef __pip -P 'pip[0-9.]#'
+        fi
+    """,
+    "fish": """
+        function __fish_complete_pip
+            set -lx COMP_WORDS (commandline -o) ""
+            set -lx COMP_CWORD ( \\
+                math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
+            )
+            set -lx PIP_AUTO_COMPLETE 1
+            string split \\  -- (eval $COMP_WORDS[1])
+        end
+        complete -fa "(__fish_complete_pip)" -c {prog}
+    """,
+    "powershell": """
+        if ((Test-Path Function:\\TabExpansion) -and -not `
+            (Test-Path Function:\\_pip_completeBackup)) {{
+            Rename-Item Function:\\TabExpansion _pip_completeBackup
+        }}
+        function TabExpansion($line, $lastWord) {{
+            $lastBlock = [regex]::Split($line, '[|;]')[-1].TrimStart()
+            if ($lastBlock.StartsWith("{prog} ")) {{
+                $Env:COMP_WORDS=$lastBlock
+                $Env:COMP_CWORD=$lastBlock.Split().Length - 1
+                $Env:PIP_AUTO_COMPLETE=1
+                (& {prog}).Split()
+                Remove-Item Env:COMP_WORDS
+                Remove-Item Env:COMP_CWORD
+                Remove-Item Env:PIP_AUTO_COMPLETE
+            }}
+            elseif (Test-Path Function:\\_pip_completeBackup) {{
+                # Fall back on existing tab expansion
+                _pip_completeBackup $line $lastWord
+            }}
+        }}
+    """,
+}
+
+
+class CompletionCommand(Command):
+    """A helper command to be used for command completion."""
+
+    ignore_require_venv = True
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(
+            "--bash",
+            "-b",
+            action="store_const",
+            const="bash",
+            dest="shell",
+            help="Emit completion code for bash",
+        )
+        self.cmd_opts.add_option(
+            "--zsh",
+            "-z",
+            action="store_const",
+            const="zsh",
+            dest="shell",
+            help="Emit completion code for zsh",
+        )
+        self.cmd_opts.add_option(
+            "--fish",
+            "-f",
+            action="store_const",
+            const="fish",
+            dest="shell",
+            help="Emit completion code for fish",
+        )
+        self.cmd_opts.add_option(
+            "--powershell",
+            "-p",
+            action="store_const",
+            const="powershell",
+            dest="shell",
+            help="Emit completion code for powershell",
+        )
+
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def run(self, options: Values, args: List[str]) -> int:
+        """Prints the completion code of the given shell"""
+        shells = COMPLETION_SCRIPTS.keys()
+        shell_options = ["--" + shell for shell in sorted(shells)]
+        if options.shell in shells:
+            script = textwrap.dedent(
+                COMPLETION_SCRIPTS.get(options.shell, "").format(prog=get_prog())
+            )
+            print(BASE_COMPLETION.format(script=script, shell=options.shell))
+            return SUCCESS
+        else:
+            sys.stderr.write(
+                "ERROR: You must pass {}\n".format(" or ".join(shell_options))
+            )
+            return SUCCESS
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/commands/configuration.py b/.venv/lib/python3.12/site-packages/pip/_internal/commands/configuration.py
new file mode 100644
index 0000000..1a1dc6b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/commands/configuration.py
@@ -0,0 +1,280 @@
+import logging
+import os
+import subprocess
+from optparse import Values
+from typing import Any, List, Optional
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.configuration import (
+    Configuration,
+    Kind,
+    get_configuration_files,
+    kinds,
+)
+from pip._internal.exceptions import PipError
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import get_prog, write_output
+
+logger = logging.getLogger(__name__)
+
+
+class ConfigurationCommand(Command):
+    """
+    Manage local and global configuration.
+
+    Subcommands:
+
+    - list: List the active configuration (or from the file specified)
+    - edit: Edit the configuration file in an editor
+    - get: Get the value associated with command.option
+    - set: Set the command.option=value
+    - unset: Unset the value associated with command.option
+    - debug: List the configuration files and values defined under them
+
+    Configuration keys should be dot separated command and option name,
+    with the special prefix "global" affecting any command. For example,
+    "pip config set global.index-url https://example.org/" would configure
+    the index url for all commands, but "pip config set download.timeout 10"
+    would configure a 10 second timeout only for "pip download" commands.
+
+    If none of --user, --global and --site are passed, a virtual
+    environment configuration file is used if one is active and the file
+    exists. Otherwise, all modifications happen to the user file by
+    default.
+    """
+
+    ignore_require_venv = True
+    usage = """
+        %prog [] list
+        %prog [] [--editor ] edit
+
+        %prog [] get command.option
+        %prog [] set command.option value
+        %prog [] unset command.option
+        %prog [] debug
+    """
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(
+            "--editor",
+            dest="editor",
+            action="store",
+            default=None,
+            help=(
+                "Editor to use to edit the file. Uses VISUAL or EDITOR "
+                "environment variables if not provided."
+            ),
+        )
+
+        self.cmd_opts.add_option(
+            "--global",
+            dest="global_file",
+            action="store_true",
+            default=False,
+            help="Use the system-wide configuration file only",
+        )
+
+        self.cmd_opts.add_option(
+            "--user",
+            dest="user_file",
+            action="store_true",
+            default=False,
+            help="Use the user configuration file only",
+        )
+
+        self.cmd_opts.add_option(
+            "--site",
+            dest="site_file",
+            action="store_true",
+            default=False,
+            help="Use the current environment configuration file only",
+        )
+
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def run(self, options: Values, args: List[str]) -> int:
+        handlers = {
+            "list": self.list_values,
+            "edit": self.open_in_editor,
+            "get": self.get_name,
+            "set": self.set_name_value,
+            "unset": self.unset_name,
+            "debug": self.list_config_values,
+        }
+
+        # Determine action
+        if not args or args[0] not in handlers:
+            logger.error(
+                "Need an action (%s) to perform.",
+                ", ".join(sorted(handlers)),
+            )
+            return ERROR
+
+        action = args[0]
+
+        # Determine which configuration files are to be loaded
+        #    Depends on whether the command is modifying.
+        try:
+            load_only = self._determine_file(
+                options, need_value=(action in ["get", "set", "unset", "edit"])
+            )
+        except PipError as e:
+            logger.error(e.args[0])
+            return ERROR
+
+        # Load a new configuration
+        self.configuration = Configuration(
+            isolated=options.isolated_mode, load_only=load_only
+        )
+        self.configuration.load()
+
+        # Error handling happens here, not in the action-handlers.
+        try:
+            handlers[action](options, args[1:])
+        except PipError as e:
+            logger.error(e.args[0])
+            return ERROR
+
+        return SUCCESS
+
+    def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]:
+        file_options = [
+            key
+            for key, value in (
+                (kinds.USER, options.user_file),
+                (kinds.GLOBAL, options.global_file),
+                (kinds.SITE, options.site_file),
+            )
+            if value
+        ]
+
+        if not file_options:
+            if not need_value:
+                return None
+            # Default to user, unless there's a site file.
+            elif any(
+                os.path.exists(site_config_file)
+                for site_config_file in get_configuration_files()[kinds.SITE]
+            ):
+                return kinds.SITE
+            else:
+                return kinds.USER
+        elif len(file_options) == 1:
+            return file_options[0]
+
+        raise PipError(
+            "Need exactly one file to operate upon "
+            "(--user, --site, --global) to perform."
+        )
+
+    def list_values(self, options: Values, args: List[str]) -> None:
+        self._get_n_args(args, "list", n=0)
+
+        for key, value in sorted(self.configuration.items()):
+            write_output("%s=%r", key, value)
+
+    def get_name(self, options: Values, args: List[str]) -> None:
+        key = self._get_n_args(args, "get [name]", n=1)
+        value = self.configuration.get_value(key)
+
+        write_output("%s", value)
+
+    def set_name_value(self, options: Values, args: List[str]) -> None:
+        key, value = self._get_n_args(args, "set [name] [value]", n=2)
+        self.configuration.set_value(key, value)
+
+        self._save_configuration()
+
+    def unset_name(self, options: Values, args: List[str]) -> None:
+        key = self._get_n_args(args, "unset [name]", n=1)
+        self.configuration.unset_value(key)
+
+        self._save_configuration()
+
+    def list_config_values(self, options: Values, args: List[str]) -> None:
+        """List config key-value pairs across different config files"""
+        self._get_n_args(args, "debug", n=0)
+
+        self.print_env_var_values()
+        # Iterate over config files and print if they exist, and the
+        # key-value pairs present in them if they do
+        for variant, files in sorted(self.configuration.iter_config_files()):
+            write_output("%s:", variant)
+            for fname in files:
+                with indent_log():
+                    file_exists = os.path.exists(fname)
+                    write_output("%s, exists: %r", fname, file_exists)
+                    if file_exists:
+                        self.print_config_file_values(variant)
+
+    def print_config_file_values(self, variant: Kind) -> None:
+        """Get key-value pairs from the file of a variant"""
+        for name, value in self.configuration.get_values_in_config(variant).items():
+            with indent_log():
+                write_output("%s: %s", name, value)
+
+    def print_env_var_values(self) -> None:
+        """Get key-values pairs present as environment variables"""
+        write_output("%s:", "env_var")
+        with indent_log():
+            for key, value in sorted(self.configuration.get_environ_vars()):
+                env_var = f"PIP_{key.upper()}"
+                write_output("%s=%r", env_var, value)
+
+    def open_in_editor(self, options: Values, args: List[str]) -> None:
+        editor = self._determine_editor(options)
+
+        fname = self.configuration.get_file_to_edit()
+        if fname is None:
+            raise PipError("Could not determine appropriate file.")
+        elif '"' in fname:
+            # This shouldn't happen, unless we see a username like that.
+            # If that happens, we'd appreciate a pull request fixing this.
+            raise PipError(
+                f'Can not open an editor for a file name containing "\n{fname}'
+            )
+
+        try:
+            subprocess.check_call(f'{editor} "{fname}"', shell=True)
+        except FileNotFoundError as e:
+            if not e.filename:
+                e.filename = editor
+            raise
+        except subprocess.CalledProcessError as e:
+            raise PipError(f"Editor Subprocess exited with exit code {e.returncode}")
+
+    def _get_n_args(self, args: List[str], example: str, n: int) -> Any:
+        """Helper to make sure the command got the right number of arguments"""
+        if len(args) != n:
+            msg = (
+                f"Got unexpected number of arguments, expected {n}. "
+                f'(example: "{get_prog()} config {example}")'
+            )
+            raise PipError(msg)
+
+        if n == 1:
+            return args[0]
+        else:
+            return args
+
+    def _save_configuration(self) -> None:
+        # We successfully ran a modifying command. Need to save the
+        # configuration.
+        try:
+            self.configuration.save()
+        except Exception:
+            logger.exception(
+                "Unable to save configuration. Please report this as a bug."
+            )
+            raise PipError("Internal Error.")
+
+    def _determine_editor(self, options: Values) -> str:
+        if options.editor is not None:
+            return options.editor
+        elif "VISUAL" in os.environ:
+            return os.environ["VISUAL"]
+        elif "EDITOR" in os.environ:
+            return os.environ["EDITOR"]
+        else:
+            raise PipError("Could not determine editor to use.")
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/commands/debug.py b/.venv/lib/python3.12/site-packages/pip/_internal/commands/debug.py
new file mode 100644
index 0000000..7e5271c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/commands/debug.py
@@ -0,0 +1,201 @@
+import importlib.resources
+import locale
+import logging
+import os
+import sys
+from optparse import Values
+from types import ModuleType
+from typing import Any, Dict, List, Optional
+
+import pip._vendor
+from pip._vendor.certifi import where
+from pip._vendor.packaging.version import parse as parse_version
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.cmdoptions import make_target_python
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.configuration import Configuration
+from pip._internal.metadata import get_environment
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import get_pip_version
+
+logger = logging.getLogger(__name__)
+
+
+def show_value(name: str, value: Any) -> None:
+    logger.info("%s: %s", name, value)
+
+
+def show_sys_implementation() -> None:
+    logger.info("sys.implementation:")
+    implementation_name = sys.implementation.name
+    with indent_log():
+        show_value("name", implementation_name)
+
+
+def create_vendor_txt_map() -> Dict[str, str]:
+    with importlib.resources.open_text("pip._vendor", "vendor.txt") as f:
+        # Purge non version specifying lines.
+        # Also, remove any space prefix or suffixes (including comments).
+        lines = [
+            line.strip().split(" ", 1)[0] for line in f.readlines() if "==" in line
+        ]
+
+    # Transform into "module" -> version dict.
+    return dict(line.split("==", 1) for line in lines)
+
+
+def get_module_from_module_name(module_name: str) -> Optional[ModuleType]:
+    # Module name can be uppercase in vendor.txt for some reason...
+    module_name = module_name.lower().replace("-", "_")
+    # PATCH: setuptools is actually only pkg_resources.
+    if module_name == "setuptools":
+        module_name = "pkg_resources"
+
+    try:
+        __import__(f"pip._vendor.{module_name}", globals(), locals(), level=0)
+        return getattr(pip._vendor, module_name)
+    except ImportError:
+        # We allow 'truststore' to fail to import due
+        # to being unavailable on Python 3.9 and earlier.
+        if module_name == "truststore" and sys.version_info < (3, 10):
+            return None
+        raise
+
+
+def get_vendor_version_from_module(module_name: str) -> Optional[str]:
+    module = get_module_from_module_name(module_name)
+    version = getattr(module, "__version__", None)
+
+    if module and not version:
+        # Try to find version in debundled module info.
+        assert module.__file__ is not None
+        env = get_environment([os.path.dirname(module.__file__)])
+        dist = env.get_distribution(module_name)
+        if dist:
+            version = str(dist.version)
+
+    return version
+
+
+def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None:
+    """Log the actual version and print extra info if there is
+    a conflict or if the actual version could not be imported.
+    """
+    for module_name, expected_version in vendor_txt_versions.items():
+        extra_message = ""
+        actual_version = get_vendor_version_from_module(module_name)
+        if not actual_version:
+            extra_message = (
+                " (Unable to locate actual module version, using"
+                " vendor.txt specified version)"
+            )
+            actual_version = expected_version
+        elif parse_version(actual_version) != parse_version(expected_version):
+            extra_message = (
+                " (CONFLICT: vendor.txt suggests version should"
+                f" be {expected_version})"
+            )
+        logger.info("%s==%s%s", module_name, actual_version, extra_message)
+
+
+def show_vendor_versions() -> None:
+    logger.info("vendored library versions:")
+
+    vendor_txt_versions = create_vendor_txt_map()
+    with indent_log():
+        show_actual_vendor_versions(vendor_txt_versions)
+
+
+def show_tags(options: Values) -> None:
+    tag_limit = 10
+
+    target_python = make_target_python(options)
+    tags = target_python.get_sorted_tags()
+
+    # Display the target options that were explicitly provided.
+    formatted_target = target_python.format_given()
+    suffix = ""
+    if formatted_target:
+        suffix = f" (target: {formatted_target})"
+
+    msg = f"Compatible tags: {len(tags)}{suffix}"
+    logger.info(msg)
+
+    if options.verbose < 1 and len(tags) > tag_limit:
+        tags_limited = True
+        tags = tags[:tag_limit]
+    else:
+        tags_limited = False
+
+    with indent_log():
+        for tag in tags:
+            logger.info(str(tag))
+
+        if tags_limited:
+            msg = f"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
+            logger.info(msg)
+
+
+def ca_bundle_info(config: Configuration) -> str:
+    levels = {key.split(".", 1)[0] for key, _ in config.items()}
+    if not levels:
+        return "Not specified"
+
+    levels_that_override_global = ["install", "wheel", "download"]
+    global_overriding_level = [
+        level for level in levels if level in levels_that_override_global
+    ]
+    if not global_overriding_level:
+        return "global"
+
+    if "global" in levels:
+        levels.remove("global")
+    return ", ".join(levels)
+
+
+class DebugCommand(Command):
+    """
+    Display debug information.
+    """
+
+    usage = """
+      %prog """
+    ignore_require_venv = True
+
+    def add_options(self) -> None:
+        cmdoptions.add_target_python_options(self.cmd_opts)
+        self.parser.insert_option_group(0, self.cmd_opts)
+        self.parser.config.load()
+
+    def run(self, options: Values, args: List[str]) -> int:
+        logger.warning(
+            "This command is only meant for debugging. "
+            "Do not use this with automation for parsing and getting these "
+            "details, since the output and options of this command may "
+            "change without notice."
+        )
+        show_value("pip version", get_pip_version())
+        show_value("sys.version", sys.version)
+        show_value("sys.executable", sys.executable)
+        show_value("sys.getdefaultencoding", sys.getdefaultencoding())
+        show_value("sys.getfilesystemencoding", sys.getfilesystemencoding())
+        show_value(
+            "locale.getpreferredencoding",
+            locale.getpreferredencoding(),
+        )
+        show_value("sys.platform", sys.platform)
+        show_sys_implementation()
+
+        show_value("'cert' config value", ca_bundle_info(self.parser.config))
+        show_value("REQUESTS_CA_BUNDLE", os.environ.get("REQUESTS_CA_BUNDLE"))
+        show_value("CURL_CA_BUNDLE", os.environ.get("CURL_CA_BUNDLE"))
+        show_value("pip._vendor.certifi.where()", where())
+        show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED)
+
+        show_vendor_versions()
+
+        show_tags(options)
+
+        return SUCCESS
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/commands/download.py b/.venv/lib/python3.12/site-packages/pip/_internal/commands/download.py
new file mode 100644
index 0000000..54247a7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/commands/download.py
@@ -0,0 +1,147 @@
+import logging
+import os
+from optparse import Values
+from typing import List
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.cmdoptions import make_target_python
+from pip._internal.cli.req_command import RequirementCommand, with_cleanup
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.operations.build.build_tracker import get_build_tracker
+from pip._internal.req.req_install import check_legacy_setup_py_options
+from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
+from pip._internal.utils.temp_dir import TempDirectory
+
+logger = logging.getLogger(__name__)
+
+
+class DownloadCommand(RequirementCommand):
+    """
+    Download packages from:
+
+    - PyPI (and other indexes) using requirement specifiers.
+    - VCS project urls.
+    - Local project directories.
+    - Local or remote source archives.
+
+    pip also supports downloading from "requirements files", which provide
+    an easy way to specify a whole environment to be downloaded.
+    """
+
+    usage = """
+      %prog [options]  [package-index-options] ...
+      %prog [options] -r  [package-index-options] ...
+      %prog [options]  ...
+      %prog [options]  ...
+      %prog [options]  ..."""
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(cmdoptions.constraints())
+        self.cmd_opts.add_option(cmdoptions.requirements())
+        self.cmd_opts.add_option(cmdoptions.no_deps())
+        self.cmd_opts.add_option(cmdoptions.global_options())
+        self.cmd_opts.add_option(cmdoptions.no_binary())
+        self.cmd_opts.add_option(cmdoptions.only_binary())
+        self.cmd_opts.add_option(cmdoptions.prefer_binary())
+        self.cmd_opts.add_option(cmdoptions.src())
+        self.cmd_opts.add_option(cmdoptions.pre())
+        self.cmd_opts.add_option(cmdoptions.require_hashes())
+        self.cmd_opts.add_option(cmdoptions.progress_bar())
+        self.cmd_opts.add_option(cmdoptions.no_build_isolation())
+        self.cmd_opts.add_option(cmdoptions.use_pep517())
+        self.cmd_opts.add_option(cmdoptions.no_use_pep517())
+        self.cmd_opts.add_option(cmdoptions.check_build_deps())
+        self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
+
+        self.cmd_opts.add_option(
+            "-d",
+            "--dest",
+            "--destination-dir",
+            "--destination-directory",
+            dest="download_dir",
+            metavar="dir",
+            default=os.curdir,
+            help="Download packages into .",
+        )
+
+        cmdoptions.add_target_python_options(self.cmd_opts)
+
+        index_opts = cmdoptions.make_option_group(
+            cmdoptions.index_group,
+            self.parser,
+        )
+
+        self.parser.insert_option_group(0, index_opts)
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    @with_cleanup
+    def run(self, options: Values, args: List[str]) -> int:
+        options.ignore_installed = True
+        # editable doesn't really make sense for `pip download`, but the bowels
+        # of the RequirementSet code require that property.
+        options.editables = []
+
+        cmdoptions.check_dist_restriction(options)
+
+        options.download_dir = normalize_path(options.download_dir)
+        ensure_dir(options.download_dir)
+
+        session = self.get_default_session(options)
+
+        target_python = make_target_python(options)
+        finder = self._build_package_finder(
+            options=options,
+            session=session,
+            target_python=target_python,
+            ignore_requires_python=options.ignore_requires_python,
+        )
+
+        build_tracker = self.enter_context(get_build_tracker())
+
+        directory = TempDirectory(
+            delete=not options.no_clean,
+            kind="download",
+            globally_managed=True,
+        )
+
+        reqs = self.get_requirements(args, options, finder, session)
+        check_legacy_setup_py_options(options, reqs)
+
+        preparer = self.make_requirement_preparer(
+            temp_build_dir=directory,
+            options=options,
+            build_tracker=build_tracker,
+            session=session,
+            finder=finder,
+            download_dir=options.download_dir,
+            use_user_site=False,
+            verbosity=self.verbosity,
+        )
+
+        resolver = self.make_resolver(
+            preparer=preparer,
+            finder=finder,
+            options=options,
+            ignore_requires_python=options.ignore_requires_python,
+            use_pep517=options.use_pep517,
+            py_version_info=options.python_version,
+        )
+
+        self.trace_basic_info(finder)
+
+        requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
+
+        downloaded: List[str] = []
+        for req in requirement_set.requirements.values():
+            if req.satisfied_by is None:
+                assert req.name is not None
+                preparer.save_linked_requirement(req)
+                downloaded.append(req.name)
+
+        preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
+        requirement_set.warn_legacy_versions_and_specifiers()
+
+        if downloaded:
+            write_output("Successfully downloaded %s", " ".join(downloaded))
+
+        return SUCCESS
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/commands/freeze.py b/.venv/lib/python3.12/site-packages/pip/_internal/commands/freeze.py
new file mode 100644
index 0000000..e64cb3d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/commands/freeze.py
@@ -0,0 +1,109 @@
+import sys
+from optparse import Values
+from typing import AbstractSet, List
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.operations.freeze import freeze
+from pip._internal.utils.compat import stdlib_pkgs
+
+
+def _should_suppress_build_backends() -> bool:
+    return sys.version_info < (3, 12)
+
+
+def _dev_pkgs() -> AbstractSet[str]:
+    pkgs = {"pip"}
+
+    if _should_suppress_build_backends():
+        pkgs |= {"setuptools", "distribute", "wheel"}
+        pkgs |= {"setuptools", "distribute", "wheel", "pkg-resources"}
+
+    return pkgs
+
+
+class FreezeCommand(Command):
+    """
+    Output installed packages in requirements format.
+
+    packages are listed in a case-insensitive sorted order.
+    """
+
+    usage = """
+      %prog [options]"""
+    log_streams = ("ext://sys.stderr", "ext://sys.stderr")
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(
+            "-r",
+            "--requirement",
+            dest="requirements",
+            action="append",
+            default=[],
+            metavar="file",
+            help=(
+                "Use the order in the given requirements file and its "
+                "comments when generating output. This option can be "
+                "used multiple times."
+            ),
+        )
+        self.cmd_opts.add_option(
+            "-l",
+            "--local",
+            dest="local",
+            action="store_true",
+            default=False,
+            help=(
+                "If in a virtualenv that has global access, do not output "
+                "globally-installed packages."
+            ),
+        )
+        self.cmd_opts.add_option(
+            "--user",
+            dest="user",
+            action="store_true",
+            default=False,
+            help="Only output packages installed in user-site.",
+        )
+        self.cmd_opts.add_option(cmdoptions.list_path())
+        self.cmd_opts.add_option(
+            "--all",
+            dest="freeze_all",
+            action="store_true",
+            help=(
+                "Do not skip these packages in the output:"
+                " {}".format(", ".join(_dev_pkgs()))
+            ),
+        )
+        self.cmd_opts.add_option(
+            "--exclude-editable",
+            dest="exclude_editable",
+            action="store_true",
+            help="Exclude editable package from output.",
+        )
+        self.cmd_opts.add_option(cmdoptions.list_exclude())
+
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def run(self, options: Values, args: List[str]) -> int:
+        skip = set(stdlib_pkgs)
+        if not options.freeze_all:
+            skip.update(_dev_pkgs())
+
+        if options.excludes:
+            skip.update(options.excludes)
+
+        cmdoptions.check_list_path_option(options)
+
+        for line in freeze(
+            requirement=options.requirements,
+            local_only=options.local,
+            user_only=options.user,
+            paths=options.path,
+            isolated=options.isolated_mode,
+            skip=skip,
+            exclude_editable=options.exclude_editable,
+        ):
+            sys.stdout.write(line + "\n")
+        return SUCCESS
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/commands/hash.py b/.venv/lib/python3.12/site-packages/pip/_internal/commands/hash.py
new file mode 100644
index 0000000..042dac8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/commands/hash.py
@@ -0,0 +1,59 @@
+import hashlib
+import logging
+import sys
+from optparse import Values
+from typing import List
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
+from pip._internal.utils.misc import read_chunks, write_output
+
+logger = logging.getLogger(__name__)
+
+
+class HashCommand(Command):
+    """
+    Compute a hash of a local package archive.
+
+    These can be used with --hash in a requirements file to do repeatable
+    installs.
+    """
+
+    usage = "%prog [options]  ..."
+    ignore_require_venv = True
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(
+            "-a",
+            "--algorithm",
+            dest="algorithm",
+            choices=STRONG_HASHES,
+            action="store",
+            default=FAVORITE_HASH,
+            help="The hash algorithm to use: one of {}".format(
+                ", ".join(STRONG_HASHES)
+            ),
+        )
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def run(self, options: Values, args: List[str]) -> int:
+        if not args:
+            self.parser.print_usage(sys.stderr)
+            return ERROR
+
+        algorithm = options.algorithm
+        for path in args:
+            write_output(
+                "%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm)
+            )
+        return SUCCESS
+
+
+def _hash_of_file(path: str, algorithm: str) -> str:
+    """Return the hash digest of a file."""
+    with open(path, "rb") as archive:
+        hash = hashlib.new(algorithm)
+        for chunk in read_chunks(archive):
+            hash.update(chunk)
+    return hash.hexdigest()
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/commands/help.py b/.venv/lib/python3.12/site-packages/pip/_internal/commands/help.py
new file mode 100644
index 0000000..6206631
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/commands/help.py
@@ -0,0 +1,41 @@
+from optparse import Values
+from typing import List
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.exceptions import CommandError
+
+
+class HelpCommand(Command):
+    """Show help for commands"""
+
+    usage = """
+      %prog """
+    ignore_require_venv = True
+
+    def run(self, options: Values, args: List[str]) -> int:
+        from pip._internal.commands import (
+            commands_dict,
+            create_command,
+            get_similar_commands,
+        )
+
+        try:
+            # 'pip help' with no args is handled by pip.__init__.parseopt()
+            cmd_name = args[0]  # the command we need help for
+        except IndexError:
+            return SUCCESS
+
+        if cmd_name not in commands_dict:
+            guess = get_similar_commands(cmd_name)
+
+            msg = [f'unknown command "{cmd_name}"']
+            if guess:
+                msg.append(f'maybe you meant "{guess}"')
+
+            raise CommandError(" - ".join(msg))
+
+        command = create_command(cmd_name)
+        command.parser.print_help()
+
+        return SUCCESS
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/commands/index.py b/.venv/lib/python3.12/site-packages/pip/_internal/commands/index.py
new file mode 100644
index 0000000..f55e9e4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/commands/index.py
@@ -0,0 +1,139 @@
+import logging
+from optparse import Values
+from typing import Any, Iterable, List, Optional, Union
+
+from pip._vendor.packaging.version import LegacyVersion, Version
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.req_command import IndexGroupCommand
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.commands.search import print_dist_installation_info
+from pip._internal.exceptions import CommandError, DistributionNotFound, PipError
+from pip._internal.index.collector import LinkCollector
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.models.target_python import TargetPython
+from pip._internal.network.session import PipSession
+from pip._internal.utils.misc import write_output
+
+logger = logging.getLogger(__name__)
+
+
+class IndexCommand(IndexGroupCommand):
+    """
+    Inspect information available from package indexes.
+    """
+
+    ignore_require_venv = True
+    usage = """
+        %prog versions 
+    """
+
+    def add_options(self) -> None:
+        cmdoptions.add_target_python_options(self.cmd_opts)
+
+        self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
+        self.cmd_opts.add_option(cmdoptions.pre())
+        self.cmd_opts.add_option(cmdoptions.no_binary())
+        self.cmd_opts.add_option(cmdoptions.only_binary())
+
+        index_opts = cmdoptions.make_option_group(
+            cmdoptions.index_group,
+            self.parser,
+        )
+
+        self.parser.insert_option_group(0, index_opts)
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def run(self, options: Values, args: List[str]) -> int:
+        handlers = {
+            "versions": self.get_available_package_versions,
+        }
+
+        logger.warning(
+            "pip index is currently an experimental command. "
+            "It may be removed/changed in a future release "
+            "without prior warning."
+        )
+
+        # Determine action
+        if not args or args[0] not in handlers:
+            logger.error(
+                "Need an action (%s) to perform.",
+                ", ".join(sorted(handlers)),
+            )
+            return ERROR
+
+        action = args[0]
+
+        # Error handling happens here, not in the action-handlers.
+        try:
+            handlers[action](options, args[1:])
+        except PipError as e:
+            logger.error(e.args[0])
+            return ERROR
+
+        return SUCCESS
+
+    def _build_package_finder(
+        self,
+        options: Values,
+        session: PipSession,
+        target_python: Optional[TargetPython] = None,
+        ignore_requires_python: Optional[bool] = None,
+    ) -> PackageFinder:
+        """
+        Create a package finder appropriate to the index command.
+        """
+        link_collector = LinkCollector.create(session, options=options)
+
+        # Pass allow_yanked=False to ignore yanked versions.
+        selection_prefs = SelectionPreferences(
+            allow_yanked=False,
+            allow_all_prereleases=options.pre,
+            ignore_requires_python=ignore_requires_python,
+        )
+
+        return PackageFinder.create(
+            link_collector=link_collector,
+            selection_prefs=selection_prefs,
+            target_python=target_python,
+        )
+
+    def get_available_package_versions(self, options: Values, args: List[Any]) -> None:
+        if len(args) != 1:
+            raise CommandError("You need to specify exactly one argument")
+
+        target_python = cmdoptions.make_target_python(options)
+        query = args[0]
+
+        with self._build_session(options) as session:
+            finder = self._build_package_finder(
+                options=options,
+                session=session,
+                target_python=target_python,
+                ignore_requires_python=options.ignore_requires_python,
+            )
+
+            versions: Iterable[Union[LegacyVersion, Version]] = (
+                candidate.version for candidate in finder.find_all_candidates(query)
+            )
+
+            if not options.pre:
+                # Remove prereleases
+                versions = (
+                    version for version in versions if not version.is_prerelease
+                )
+            versions = set(versions)
+
+            if not versions:
+                raise DistributionNotFound(
+                    f"No matching distribution found for {query}"
+                )
+
+            formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)]
+            latest = formatted_versions[0]
+
+        write_output(f"{query} ({latest})")
+        write_output("Available versions: {}".format(", ".join(formatted_versions)))
+        print_dist_installation_info(query, latest)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/commands/inspect.py b/.venv/lib/python3.12/site-packages/pip/_internal/commands/inspect.py
new file mode 100644
index 0000000..27c8fa3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/commands/inspect.py
@@ -0,0 +1,92 @@
+import logging
+from optparse import Values
+from typing import Any, Dict, List
+
+from pip._vendor.packaging.markers import default_environment
+from pip._vendor.rich import print_json
+
+from pip import __version__
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.req_command import Command
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.metadata import BaseDistribution, get_environment
+from pip._internal.utils.compat import stdlib_pkgs
+from pip._internal.utils.urls import path_to_url
+
+logger = logging.getLogger(__name__)
+
+
+class InspectCommand(Command):
+    """
+    Inspect the content of a Python environment and produce a report in JSON format.
+    """
+
+    ignore_require_venv = True
+    usage = """
+      %prog [options]"""
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(
+            "--local",
+            action="store_true",
+            default=False,
+            help=(
+                "If in a virtualenv that has global access, do not list "
+                "globally-installed packages."
+            ),
+        )
+        self.cmd_opts.add_option(
+            "--user",
+            dest="user",
+            action="store_true",
+            default=False,
+            help="Only output packages installed in user-site.",
+        )
+        self.cmd_opts.add_option(cmdoptions.list_path())
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def run(self, options: Values, args: List[str]) -> int:
+        cmdoptions.check_list_path_option(options)
+        dists = get_environment(options.path).iter_installed_distributions(
+            local_only=options.local,
+            user_only=options.user,
+            skip=set(stdlib_pkgs),
+        )
+        output = {
+            "version": "1",
+            "pip_version": __version__,
+            "installed": [self._dist_to_dict(dist) for dist in dists],
+            "environment": default_environment(),
+            # TODO tags? scheme?
+        }
+        print_json(data=output)
+        return SUCCESS
+
+    def _dist_to_dict(self, dist: BaseDistribution) -> Dict[str, Any]:
+        res: Dict[str, Any] = {
+            "metadata": dist.metadata_dict,
+            "metadata_location": dist.info_location,
+        }
+        # direct_url. Note that we don't have download_info (as in the installation
+        # report) since it is not recorded in installed metadata.
+        direct_url = dist.direct_url
+        if direct_url is not None:
+            res["direct_url"] = direct_url.to_dict()
+        else:
+            # Emulate direct_url for legacy editable installs.
+            editable_project_location = dist.editable_project_location
+            if editable_project_location is not None:
+                res["direct_url"] = {
+                    "url": path_to_url(editable_project_location),
+                    "dir_info": {
+                        "editable": True,
+                    },
+                }
+        # installer
+        installer = dist.installer
+        if dist.installer:
+            res["installer"] = installer
+        # requested
+        if dist.installed_with_dist_info:
+            res["requested"] = dist.requested
+        return res
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/commands/install.py b/.venv/lib/python3.12/site-packages/pip/_internal/commands/install.py
new file mode 100644
index 0000000..e944bb9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/commands/install.py
@@ -0,0 +1,774 @@
+import errno
+import json
+import operator
+import os
+import shutil
+import site
+from optparse import SUPPRESS_HELP, Values
+from typing import List, Optional
+
+from pip._vendor.rich import print_json
+
+from pip._internal.cache import WheelCache
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.cmdoptions import make_target_python
+from pip._internal.cli.req_command import (
+    RequirementCommand,
+    warn_if_run_as_root,
+    with_cleanup,
+)
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.exceptions import CommandError, InstallationError
+from pip._internal.locations import get_scheme
+from pip._internal.metadata import get_environment
+from pip._internal.models.installation_report import InstallationReport
+from pip._internal.operations.build.build_tracker import get_build_tracker
+from pip._internal.operations.check import ConflictDetails, check_install_conflicts
+from pip._internal.req import install_given_reqs
+from pip._internal.req.req_install import (
+    InstallRequirement,
+    check_legacy_setup_py_options,
+)
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.filesystem import test_writable_dir
+from pip._internal.utils.logging import getLogger
+from pip._internal.utils.misc import (
+    check_externally_managed,
+    ensure_dir,
+    get_pip_version,
+    protect_pip_from_modification_on_windows,
+    write_output,
+)
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.virtualenv import (
+    running_under_virtualenv,
+    virtualenv_no_global,
+)
+from pip._internal.wheel_builder import build, should_build_for_install_command
+
+logger = getLogger(__name__)
+
+
+class InstallCommand(RequirementCommand):
+    """
+    Install packages from:
+
+    - PyPI (and other indexes) using requirement specifiers.
+    - VCS project urls.
+    - Local project directories.
+    - Local or remote source archives.
+
+    pip also supports installing from "requirements files", which provide
+    an easy way to specify a whole environment to be installed.
+    """
+
+    usage = """
+      %prog [options]  [package-index-options] ...
+      %prog [options] -r  [package-index-options] ...
+      %prog [options] [-e]  ...
+      %prog [options] [-e]  ...
+      %prog [options]  ..."""
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(cmdoptions.requirements())
+        self.cmd_opts.add_option(cmdoptions.constraints())
+        self.cmd_opts.add_option(cmdoptions.no_deps())
+        self.cmd_opts.add_option(cmdoptions.pre())
+
+        self.cmd_opts.add_option(cmdoptions.editable())
+        self.cmd_opts.add_option(
+            "--dry-run",
+            action="store_true",
+            dest="dry_run",
+            default=False,
+            help=(
+                "Don't actually install anything, just print what would be. "
+                "Can be used in combination with --ignore-installed "
+                "to 'resolve' the requirements."
+            ),
+        )
+        self.cmd_opts.add_option(
+            "-t",
+            "--target",
+            dest="target_dir",
+            metavar="dir",
+            default=None,
+            help=(
+                "Install packages into . "
+                "By default this will not replace existing files/folders in "
+                ". Use --upgrade to replace existing packages in  "
+                "with new versions."
+            ),
+        )
+        cmdoptions.add_target_python_options(self.cmd_opts)
+
+        self.cmd_opts.add_option(
+            "--user",
+            dest="use_user_site",
+            action="store_true",
+            help=(
+                "Install to the Python user install directory for your "
+                "platform. Typically ~/.local/, or %APPDATA%\\Python on "
+                "Windows. (See the Python documentation for site.USER_BASE "
+                "for full details.)"
+            ),
+        )
+        self.cmd_opts.add_option(
+            "--no-user",
+            dest="use_user_site",
+            action="store_false",
+            help=SUPPRESS_HELP,
+        )
+        self.cmd_opts.add_option(
+            "--root",
+            dest="root_path",
+            metavar="dir",
+            default=None,
+            help="Install everything relative to this alternate root directory.",
+        )
+        self.cmd_opts.add_option(
+            "--prefix",
+            dest="prefix_path",
+            metavar="dir",
+            default=None,
+            help=(
+                "Installation prefix where lib, bin and other top-level "
+                "folders are placed. Note that the resulting installation may "
+                "contain scripts and other resources which reference the "
+                "Python interpreter of pip, and not that of ``--prefix``. "
+                "See also the ``--python`` option if the intention is to "
+                "install packages into another (possibly pip-free) "
+                "environment."
+            ),
+        )
+
+        self.cmd_opts.add_option(cmdoptions.src())
+
+        self.cmd_opts.add_option(
+            "-U",
+            "--upgrade",
+            dest="upgrade",
+            action="store_true",
+            help=(
+                "Upgrade all specified packages to the newest available "
+                "version. The handling of dependencies depends on the "
+                "upgrade-strategy used."
+            ),
+        )
+
+        self.cmd_opts.add_option(
+            "--upgrade-strategy",
+            dest="upgrade_strategy",
+            default="only-if-needed",
+            choices=["only-if-needed", "eager"],
+            help=(
+                "Determines how dependency upgrading should be handled "
+                "[default: %default]. "
+                '"eager" - dependencies are upgraded regardless of '
+                "whether the currently installed version satisfies the "
+                "requirements of the upgraded package(s). "
+                '"only-if-needed" -  are upgraded only when they do not '
+                "satisfy the requirements of the upgraded package(s)."
+            ),
+        )
+
+        self.cmd_opts.add_option(
+            "--force-reinstall",
+            dest="force_reinstall",
+            action="store_true",
+            help="Reinstall all packages even if they are already up-to-date.",
+        )
+
+        self.cmd_opts.add_option(
+            "-I",
+            "--ignore-installed",
+            dest="ignore_installed",
+            action="store_true",
+            help=(
+                "Ignore the installed packages, overwriting them. "
+                "This can break your system if the existing package "
+                "is of a different version or was installed "
+                "with a different package manager!"
+            ),
+        )
+
+        self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
+        self.cmd_opts.add_option(cmdoptions.no_build_isolation())
+        self.cmd_opts.add_option(cmdoptions.use_pep517())
+        self.cmd_opts.add_option(cmdoptions.no_use_pep517())
+        self.cmd_opts.add_option(cmdoptions.check_build_deps())
+        self.cmd_opts.add_option(cmdoptions.override_externally_managed())
+
+        self.cmd_opts.add_option(cmdoptions.config_settings())
+        self.cmd_opts.add_option(cmdoptions.global_options())
+
+        self.cmd_opts.add_option(
+            "--compile",
+            action="store_true",
+            dest="compile",
+            default=True,
+            help="Compile Python source files to bytecode",
+        )
+
+        self.cmd_opts.add_option(
+            "--no-compile",
+            action="store_false",
+            dest="compile",
+            help="Do not compile Python source files to bytecode",
+        )
+
+        self.cmd_opts.add_option(
+            "--no-warn-script-location",
+            action="store_false",
+            dest="warn_script_location",
+            default=True,
+            help="Do not warn when installing scripts outside PATH",
+        )
+        self.cmd_opts.add_option(
+            "--no-warn-conflicts",
+            action="store_false",
+            dest="warn_about_conflicts",
+            default=True,
+            help="Do not warn about broken dependencies",
+        )
+        self.cmd_opts.add_option(cmdoptions.no_binary())
+        self.cmd_opts.add_option(cmdoptions.only_binary())
+        self.cmd_opts.add_option(cmdoptions.prefer_binary())
+        self.cmd_opts.add_option(cmdoptions.require_hashes())
+        self.cmd_opts.add_option(cmdoptions.progress_bar())
+        self.cmd_opts.add_option(cmdoptions.root_user_action())
+
+        index_opts = cmdoptions.make_option_group(
+            cmdoptions.index_group,
+            self.parser,
+        )
+
+        self.parser.insert_option_group(0, index_opts)
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+        self.cmd_opts.add_option(
+            "--report",
+            dest="json_report_file",
+            metavar="file",
+            default=None,
+            help=(
+                "Generate a JSON file describing what pip did to install "
+                "the provided requirements. "
+                "Can be used in combination with --dry-run and --ignore-installed "
+                "to 'resolve' the requirements. "
+                "When - is used as file name it writes to stdout. "
+                "When writing to stdout, please combine with the --quiet option "
+                "to avoid mixing pip logging output with JSON output."
+            ),
+        )
+
+    @with_cleanup
+    def run(self, options: Values, args: List[str]) -> int:
+        if options.use_user_site and options.target_dir is not None:
+            raise CommandError("Can not combine '--user' and '--target'")
+
+        # Check whether the environment we're installing into is externally
+        # managed, as specified in PEP 668. Specifying --root, --target, or
+        # --prefix disables the check, since there's no reliable way to locate
+        # the EXTERNALLY-MANAGED file for those cases. An exception is also
+        # made specifically for "--dry-run --report" for convenience.
+        installing_into_current_environment = (
+            not (options.dry_run and options.json_report_file)
+            and options.root_path is None
+            and options.target_dir is None
+            and options.prefix_path is None
+        )
+        if (
+            installing_into_current_environment
+            and not options.override_externally_managed
+        ):
+            check_externally_managed()
+
+        upgrade_strategy = "to-satisfy-only"
+        if options.upgrade:
+            upgrade_strategy = options.upgrade_strategy
+
+        cmdoptions.check_dist_restriction(options, check_target=True)
+
+        logger.verbose("Using %s", get_pip_version())
+        options.use_user_site = decide_user_install(
+            options.use_user_site,
+            prefix_path=options.prefix_path,
+            target_dir=options.target_dir,
+            root_path=options.root_path,
+            isolated_mode=options.isolated_mode,
+        )
+
+        target_temp_dir: Optional[TempDirectory] = None
+        target_temp_dir_path: Optional[str] = None
+        if options.target_dir:
+            options.ignore_installed = True
+            options.target_dir = os.path.abspath(options.target_dir)
+            if (
+                # fmt: off
+                os.path.exists(options.target_dir) and
+                not os.path.isdir(options.target_dir)
+                # fmt: on
+            ):
+                raise CommandError(
+                    "Target path exists but is not a directory, will not continue."
+                )
+
+            # Create a target directory for using with the target option
+            target_temp_dir = TempDirectory(kind="target")
+            target_temp_dir_path = target_temp_dir.path
+            self.enter_context(target_temp_dir)
+
+        global_options = options.global_options or []
+
+        session = self.get_default_session(options)
+
+        target_python = make_target_python(options)
+        finder = self._build_package_finder(
+            options=options,
+            session=session,
+            target_python=target_python,
+            ignore_requires_python=options.ignore_requires_python,
+        )
+        build_tracker = self.enter_context(get_build_tracker())
+
+        directory = TempDirectory(
+            delete=not options.no_clean,
+            kind="install",
+            globally_managed=True,
+        )
+
+        try:
+            reqs = self.get_requirements(args, options, finder, session)
+            check_legacy_setup_py_options(options, reqs)
+
+            wheel_cache = WheelCache(options.cache_dir)
+
+            # Only when installing is it permitted to use PEP 660.
+            # In other circumstances (pip wheel, pip download) we generate
+            # regular (i.e. non editable) metadata and wheels.
+            for req in reqs:
+                req.permit_editable_wheels = True
+
+            preparer = self.make_requirement_preparer(
+                temp_build_dir=directory,
+                options=options,
+                build_tracker=build_tracker,
+                session=session,
+                finder=finder,
+                use_user_site=options.use_user_site,
+                verbosity=self.verbosity,
+            )
+            resolver = self.make_resolver(
+                preparer=preparer,
+                finder=finder,
+                options=options,
+                wheel_cache=wheel_cache,
+                use_user_site=options.use_user_site,
+                ignore_installed=options.ignore_installed,
+                ignore_requires_python=options.ignore_requires_python,
+                force_reinstall=options.force_reinstall,
+                upgrade_strategy=upgrade_strategy,
+                use_pep517=options.use_pep517,
+            )
+
+            self.trace_basic_info(finder)
+
+            requirement_set = resolver.resolve(
+                reqs, check_supported_wheels=not options.target_dir
+            )
+
+            if options.json_report_file:
+                report = InstallationReport(requirement_set.requirements_to_install)
+                if options.json_report_file == "-":
+                    print_json(data=report.to_dict())
+                else:
+                    with open(options.json_report_file, "w", encoding="utf-8") as f:
+                        json.dump(report.to_dict(), f, indent=2, ensure_ascii=False)
+
+            if options.dry_run:
+                # In non dry-run mode, the legacy versions and specifiers check
+                # will be done as part of conflict detection.
+                requirement_set.warn_legacy_versions_and_specifiers()
+                would_install_items = sorted(
+                    (r.metadata["name"], r.metadata["version"])
+                    for r in requirement_set.requirements_to_install
+                )
+                if would_install_items:
+                    write_output(
+                        "Would install %s",
+                        " ".join("-".join(item) for item in would_install_items),
+                    )
+                return SUCCESS
+
+            try:
+                pip_req = requirement_set.get_requirement("pip")
+            except KeyError:
+                modifying_pip = False
+            else:
+                # If we're not replacing an already installed pip,
+                # we're not modifying it.
+                modifying_pip = pip_req.satisfied_by is None
+            protect_pip_from_modification_on_windows(modifying_pip=modifying_pip)
+
+            reqs_to_build = [
+                r
+                for r in requirement_set.requirements.values()
+                if should_build_for_install_command(r)
+            ]
+
+            _, build_failures = build(
+                reqs_to_build,
+                wheel_cache=wheel_cache,
+                verify=True,
+                build_options=[],
+                global_options=global_options,
+            )
+
+            if build_failures:
+                raise InstallationError(
+                    "Could not build wheels for {}, which is required to "
+                    "install pyproject.toml-based projects".format(
+                        ", ".join(r.name for r in build_failures)  # type: ignore
+                    )
+                )
+
+            to_install = resolver.get_installation_order(requirement_set)
+
+            # Check for conflicts in the package set we're installing.
+            conflicts: Optional[ConflictDetails] = None
+            should_warn_about_conflicts = (
+                not options.ignore_dependencies and options.warn_about_conflicts
+            )
+            if should_warn_about_conflicts:
+                conflicts = self._determine_conflicts(to_install)
+
+            # Don't warn about script install locations if
+            # --target or --prefix has been specified
+            warn_script_location = options.warn_script_location
+            if options.target_dir or options.prefix_path:
+                warn_script_location = False
+
+            installed = install_given_reqs(
+                to_install,
+                global_options,
+                root=options.root_path,
+                home=target_temp_dir_path,
+                prefix=options.prefix_path,
+                warn_script_location=warn_script_location,
+                use_user_site=options.use_user_site,
+                pycompile=options.compile,
+            )
+
+            lib_locations = get_lib_location_guesses(
+                user=options.use_user_site,
+                home=target_temp_dir_path,
+                root=options.root_path,
+                prefix=options.prefix_path,
+                isolated=options.isolated_mode,
+            )
+            env = get_environment(lib_locations)
+
+            installed.sort(key=operator.attrgetter("name"))
+            items = []
+            for result in installed:
+                item = result.name
+                try:
+                    installed_dist = env.get_distribution(item)
+                    if installed_dist is not None:
+                        item = f"{item}-{installed_dist.version}"
+                except Exception:
+                    pass
+                items.append(item)
+
+            if conflicts is not None:
+                self._warn_about_conflicts(
+                    conflicts,
+                    resolver_variant=self.determine_resolver_variant(options),
+                )
+
+            installed_desc = " ".join(items)
+            if installed_desc:
+                write_output(
+                    "Successfully installed %s",
+                    installed_desc,
+                )
+        except OSError as error:
+            show_traceback = self.verbosity >= 1
+
+            message = create_os_error_message(
+                error,
+                show_traceback,
+                options.use_user_site,
+            )
+            logger.error(message, exc_info=show_traceback)
+
+            return ERROR
+
+        if options.target_dir:
+            assert target_temp_dir
+            self._handle_target_dir(
+                options.target_dir, target_temp_dir, options.upgrade
+            )
+        if options.root_user_action == "warn":
+            warn_if_run_as_root()
+        return SUCCESS
+
+    def _handle_target_dir(
+        self, target_dir: str, target_temp_dir: TempDirectory, upgrade: bool
+    ) -> None:
+        ensure_dir(target_dir)
+
+        # Checking both purelib and platlib directories for installed
+        # packages to be moved to target directory
+        lib_dir_list = []
+
+        # Checking both purelib and platlib directories for installed
+        # packages to be moved to target directory
+        scheme = get_scheme("", home=target_temp_dir.path)
+        purelib_dir = scheme.purelib
+        platlib_dir = scheme.platlib
+        data_dir = scheme.data
+
+        if os.path.exists(purelib_dir):
+            lib_dir_list.append(purelib_dir)
+        if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
+            lib_dir_list.append(platlib_dir)
+        if os.path.exists(data_dir):
+            lib_dir_list.append(data_dir)
+
+        for lib_dir in lib_dir_list:
+            for item in os.listdir(lib_dir):
+                if lib_dir == data_dir:
+                    ddir = os.path.join(data_dir, item)
+                    if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
+                        continue
+                target_item_dir = os.path.join(target_dir, item)
+                if os.path.exists(target_item_dir):
+                    if not upgrade:
+                        logger.warning(
+                            "Target directory %s already exists. Specify "
+                            "--upgrade to force replacement.",
+                            target_item_dir,
+                        )
+                        continue
+                    if os.path.islink(target_item_dir):
+                        logger.warning(
+                            "Target directory %s already exists and is "
+                            "a link. pip will not automatically replace "
+                            "links, please remove if replacement is "
+                            "desired.",
+                            target_item_dir,
+                        )
+                        continue
+                    if os.path.isdir(target_item_dir):
+                        shutil.rmtree(target_item_dir)
+                    else:
+                        os.remove(target_item_dir)
+
+                shutil.move(os.path.join(lib_dir, item), target_item_dir)
+
+    def _determine_conflicts(
+        self, to_install: List[InstallRequirement]
+    ) -> Optional[ConflictDetails]:
+        try:
+            return check_install_conflicts(to_install)
+        except Exception:
+            logger.exception(
+                "Error while checking for conflicts. Please file an issue on "
+                "pip's issue tracker: https://github.com/pypa/pip/issues/new"
+            )
+            return None
+
+    def _warn_about_conflicts(
+        self, conflict_details: ConflictDetails, resolver_variant: str
+    ) -> None:
+        package_set, (missing, conflicting) = conflict_details
+        if not missing and not conflicting:
+            return
+
+        parts: List[str] = []
+        if resolver_variant == "legacy":
+            parts.append(
+                "pip's legacy dependency resolver does not consider dependency "
+                "conflicts when selecting packages. This behaviour is the "
+                "source of the following dependency conflicts."
+            )
+        else:
+            assert resolver_variant == "resolvelib"
+            parts.append(
+                "pip's dependency resolver does not currently take into account "
+                "all the packages that are installed. This behaviour is the "
+                "source of the following dependency conflicts."
+            )
+
+        # NOTE: There is some duplication here, with commands/check.py
+        for project_name in missing:
+            version = package_set[project_name][0]
+            for dependency in missing[project_name]:
+                message = (
+                    f"{project_name} {version} requires {dependency[1]}, "
+                    "which is not installed."
+                )
+                parts.append(message)
+
+        for project_name in conflicting:
+            version = package_set[project_name][0]
+            for dep_name, dep_version, req in conflicting[project_name]:
+                message = (
+                    "{name} {version} requires {requirement}, but {you} have "
+                    "{dep_name} {dep_version} which is incompatible."
+                ).format(
+                    name=project_name,
+                    version=version,
+                    requirement=req,
+                    dep_name=dep_name,
+                    dep_version=dep_version,
+                    you=("you" if resolver_variant == "resolvelib" else "you'll"),
+                )
+                parts.append(message)
+
+        logger.critical("\n".join(parts))
+
+
+def get_lib_location_guesses(
+    user: bool = False,
+    home: Optional[str] = None,
+    root: Optional[str] = None,
+    isolated: bool = False,
+    prefix: Optional[str] = None,
+) -> List[str]:
+    scheme = get_scheme(
+        "",
+        user=user,
+        home=home,
+        root=root,
+        isolated=isolated,
+        prefix=prefix,
+    )
+    return [scheme.purelib, scheme.platlib]
+
+
+def site_packages_writable(root: Optional[str], isolated: bool) -> bool:
+    return all(
+        test_writable_dir(d)
+        for d in set(get_lib_location_guesses(root=root, isolated=isolated))
+    )
+
+
+def decide_user_install(
+    use_user_site: Optional[bool],
+    prefix_path: Optional[str] = None,
+    target_dir: Optional[str] = None,
+    root_path: Optional[str] = None,
+    isolated_mode: bool = False,
+) -> bool:
+    """Determine whether to do a user install based on the input options.
+
+    If use_user_site is False, no additional checks are done.
+    If use_user_site is True, it is checked for compatibility with other
+    options.
+    If use_user_site is None, the default behaviour depends on the environment,
+    which is provided by the other arguments.
+    """
+    # In some cases (config from tox), use_user_site can be set to an integer
+    # rather than a bool, which 'use_user_site is False' wouldn't catch.
+    if (use_user_site is not None) and (not use_user_site):
+        logger.debug("Non-user install by explicit request")
+        return False
+
+    if use_user_site:
+        if prefix_path:
+            raise CommandError(
+                "Can not combine '--user' and '--prefix' as they imply "
+                "different installation locations"
+            )
+        if virtualenv_no_global():
+            raise InstallationError(
+                "Can not perform a '--user' install. User site-packages "
+                "are not visible in this virtualenv."
+            )
+        logger.debug("User install by explicit request")
+        return True
+
+    # If we are here, user installs have not been explicitly requested/avoided
+    assert use_user_site is None
+
+    # user install incompatible with --prefix/--target
+    if prefix_path or target_dir:
+        logger.debug("Non-user install due to --prefix or --target option")
+        return False
+
+    # If user installs are not enabled, choose a non-user install
+    if not site.ENABLE_USER_SITE:
+        logger.debug("Non-user install because user site-packages disabled")
+        return False
+
+    # If we have permission for a non-user install, do that,
+    # otherwise do a user install.
+    if site_packages_writable(root=root_path, isolated=isolated_mode):
+        logger.debug("Non-user install because site-packages writeable")
+        return False
+
+    logger.info(
+        "Defaulting to user installation because normal site-packages "
+        "is not writeable"
+    )
+    return True
+
+
+def create_os_error_message(
+    error: OSError, show_traceback: bool, using_user_site: bool
+) -> str:
+    """Format an error message for an OSError
+
+    It may occur anytime during the execution of the install command.
+    """
+    parts = []
+
+    # Mention the error if we are not going to show a traceback
+    parts.append("Could not install packages due to an OSError")
+    if not show_traceback:
+        parts.append(": ")
+        parts.append(str(error))
+    else:
+        parts.append(".")
+
+    # Spilt the error indication from a helper message (if any)
+    parts[-1] += "\n"
+
+    # Suggest useful actions to the user:
+    #  (1) using user site-packages or (2) verifying the permissions
+    if error.errno == errno.EACCES:
+        user_option_part = "Consider using the `--user` option"
+        permissions_part = "Check the permissions"
+
+        if not running_under_virtualenv() and not using_user_site:
+            parts.extend(
+                [
+                    user_option_part,
+                    " or ",
+                    permissions_part.lower(),
+                ]
+            )
+        else:
+            parts.append(permissions_part)
+        parts.append(".\n")
+
+    # Suggest the user to enable Long Paths if path length is
+    # more than 260
+    if (
+        WINDOWS
+        and error.errno == errno.ENOENT
+        and error.filename
+        and len(error.filename) > 260
+    ):
+        parts.append(
+            "HINT: This error might have occurred since "
+            "this system does not have Windows Long Path "
+            "support enabled. You can find information on "
+            "how to enable this at "
+            "https://pip.pypa.io/warnings/enable-long-paths\n"
+        )
+
+    return "".join(parts).strip() + "\n"
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/commands/list.py b/.venv/lib/python3.12/site-packages/pip/_internal/commands/list.py
new file mode 100644
index 0000000..32fb19b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/commands/list.py
@@ -0,0 +1,370 @@
+import json
+import logging
+from optparse import Values
+from typing import TYPE_CHECKING, Generator, List, Optional, Sequence, Tuple, cast
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.req_command import IndexGroupCommand
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.exceptions import CommandError
+from pip._internal.index.collector import LinkCollector
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import BaseDistribution, get_environment
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.network.session import PipSession
+from pip._internal.utils.compat import stdlib_pkgs
+from pip._internal.utils.misc import tabulate, write_output
+
+if TYPE_CHECKING:
+    from pip._internal.metadata.base import DistributionVersion
+
+    class _DistWithLatestInfo(BaseDistribution):
+        """Give the distribution object a couple of extra fields.
+
+        These will be populated during ``get_outdated()``. This is dirty but
+        makes the rest of the code much cleaner.
+        """
+
+        latest_version: DistributionVersion
+        latest_filetype: str
+
+    _ProcessedDists = Sequence[_DistWithLatestInfo]
+
+
+from pip._vendor.packaging.version import parse
+
+logger = logging.getLogger(__name__)
+
+
+class ListCommand(IndexGroupCommand):
+    """
+    List installed packages, including editables.
+
+    Packages are listed in a case-insensitive sorted order.
+    """
+
+    ignore_require_venv = True
+    usage = """
+      %prog [options]"""
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(
+            "-o",
+            "--outdated",
+            action="store_true",
+            default=False,
+            help="List outdated packages",
+        )
+        self.cmd_opts.add_option(
+            "-u",
+            "--uptodate",
+            action="store_true",
+            default=False,
+            help="List uptodate packages",
+        )
+        self.cmd_opts.add_option(
+            "-e",
+            "--editable",
+            action="store_true",
+            default=False,
+            help="List editable projects.",
+        )
+        self.cmd_opts.add_option(
+            "-l",
+            "--local",
+            action="store_true",
+            default=False,
+            help=(
+                "If in a virtualenv that has global access, do not list "
+                "globally-installed packages."
+            ),
+        )
+        self.cmd_opts.add_option(
+            "--user",
+            dest="user",
+            action="store_true",
+            default=False,
+            help="Only output packages installed in user-site.",
+        )
+        self.cmd_opts.add_option(cmdoptions.list_path())
+        self.cmd_opts.add_option(
+            "--pre",
+            action="store_true",
+            default=False,
+            help=(
+                "Include pre-release and development versions. By default, "
+                "pip only finds stable versions."
+            ),
+        )
+
+        self.cmd_opts.add_option(
+            "--format",
+            action="store",
+            dest="list_format",
+            default="columns",
+            choices=("columns", "freeze", "json"),
+            help=(
+                "Select the output format among: columns (default), freeze, or json. "
+                "The 'freeze' format cannot be used with the --outdated option."
+            ),
+        )
+
+        self.cmd_opts.add_option(
+            "--not-required",
+            action="store_true",
+            dest="not_required",
+            help="List packages that are not dependencies of installed packages.",
+        )
+
+        self.cmd_opts.add_option(
+            "--exclude-editable",
+            action="store_false",
+            dest="include_editable",
+            help="Exclude editable package from output.",
+        )
+        self.cmd_opts.add_option(
+            "--include-editable",
+            action="store_true",
+            dest="include_editable",
+            help="Include editable package from output.",
+            default=True,
+        )
+        self.cmd_opts.add_option(cmdoptions.list_exclude())
+        index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser)
+
+        self.parser.insert_option_group(0, index_opts)
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def _build_package_finder(
+        self, options: Values, session: PipSession
+    ) -> PackageFinder:
+        """
+        Create a package finder appropriate to this list command.
+        """
+        link_collector = LinkCollector.create(session, options=options)
+
+        # Pass allow_yanked=False to ignore yanked versions.
+        selection_prefs = SelectionPreferences(
+            allow_yanked=False,
+            allow_all_prereleases=options.pre,
+        )
+
+        return PackageFinder.create(
+            link_collector=link_collector,
+            selection_prefs=selection_prefs,
+        )
+
+    def run(self, options: Values, args: List[str]) -> int:
+        if options.outdated and options.uptodate:
+            raise CommandError("Options --outdated and --uptodate cannot be combined.")
+
+        if options.outdated and options.list_format == "freeze":
+            raise CommandError(
+                "List format 'freeze' cannot be used with the --outdated option."
+            )
+
+        cmdoptions.check_list_path_option(options)
+
+        skip = set(stdlib_pkgs)
+        if options.excludes:
+            skip.update(canonicalize_name(n) for n in options.excludes)
+
+        packages: "_ProcessedDists" = [
+            cast("_DistWithLatestInfo", d)
+            for d in get_environment(options.path).iter_installed_distributions(
+                local_only=options.local,
+                user_only=options.user,
+                editables_only=options.editable,
+                include_editables=options.include_editable,
+                skip=skip,
+            )
+        ]
+
+        # get_not_required must be called firstly in order to find and
+        # filter out all dependencies correctly. Otherwise a package
+        # can't be identified as requirement because some parent packages
+        # could be filtered out before.
+        if options.not_required:
+            packages = self.get_not_required(packages, options)
+
+        if options.outdated:
+            packages = self.get_outdated(packages, options)
+        elif options.uptodate:
+            packages = self.get_uptodate(packages, options)
+
+        self.output_package_listing(packages, options)
+        return SUCCESS
+
+    def get_outdated(
+        self, packages: "_ProcessedDists", options: Values
+    ) -> "_ProcessedDists":
+        return [
+            dist
+            for dist in self.iter_packages_latest_infos(packages, options)
+            if parse(str(dist.latest_version)) > parse(str(dist.version))
+        ]
+
+    def get_uptodate(
+        self, packages: "_ProcessedDists", options: Values
+    ) -> "_ProcessedDists":
+        return [
+            dist
+            for dist in self.iter_packages_latest_infos(packages, options)
+            if parse(str(dist.latest_version)) == parse(str(dist.version))
+        ]
+
+    def get_not_required(
+        self, packages: "_ProcessedDists", options: Values
+    ) -> "_ProcessedDists":
+        dep_keys = {
+            canonicalize_name(dep.name)
+            for dist in packages
+            for dep in (dist.iter_dependencies() or ())
+        }
+
+        # Create a set to remove duplicate packages, and cast it to a list
+        # to keep the return type consistent with get_outdated and
+        # get_uptodate
+        return list({pkg for pkg in packages if pkg.canonical_name not in dep_keys})
+
+    def iter_packages_latest_infos(
+        self, packages: "_ProcessedDists", options: Values
+    ) -> Generator["_DistWithLatestInfo", None, None]:
+        with self._build_session(options) as session:
+            finder = self._build_package_finder(options, session)
+
+            def latest_info(
+                dist: "_DistWithLatestInfo",
+            ) -> Optional["_DistWithLatestInfo"]:
+                all_candidates = finder.find_all_candidates(dist.canonical_name)
+                if not options.pre:
+                    # Remove prereleases
+                    all_candidates = [
+                        candidate
+                        for candidate in all_candidates
+                        if not candidate.version.is_prerelease
+                    ]
+
+                evaluator = finder.make_candidate_evaluator(
+                    project_name=dist.canonical_name,
+                )
+                best_candidate = evaluator.sort_best_candidate(all_candidates)
+                if best_candidate is None:
+                    return None
+
+                remote_version = best_candidate.version
+                if best_candidate.link.is_wheel:
+                    typ = "wheel"
+                else:
+                    typ = "sdist"
+                dist.latest_version = remote_version
+                dist.latest_filetype = typ
+                return dist
+
+            for dist in map(latest_info, packages):
+                if dist is not None:
+                    yield dist
+
+    def output_package_listing(
+        self, packages: "_ProcessedDists", options: Values
+    ) -> None:
+        packages = sorted(
+            packages,
+            key=lambda dist: dist.canonical_name,
+        )
+        if options.list_format == "columns" and packages:
+            data, header = format_for_columns(packages, options)
+            self.output_package_listing_columns(data, header)
+        elif options.list_format == "freeze":
+            for dist in packages:
+                if options.verbose >= 1:
+                    write_output(
+                        "%s==%s (%s)", dist.raw_name, dist.version, dist.location
+                    )
+                else:
+                    write_output("%s==%s", dist.raw_name, dist.version)
+        elif options.list_format == "json":
+            write_output(format_for_json(packages, options))
+
+    def output_package_listing_columns(
+        self, data: List[List[str]], header: List[str]
+    ) -> None:
+        # insert the header first: we need to know the size of column names
+        if len(data) > 0:
+            data.insert(0, header)
+
+        pkg_strings, sizes = tabulate(data)
+
+        # Create and add a separator.
+        if len(data) > 0:
+            pkg_strings.insert(1, " ".join("-" * x for x in sizes))
+
+        for val in pkg_strings:
+            write_output(val)
+
+
+def format_for_columns(
+    pkgs: "_ProcessedDists", options: Values
+) -> Tuple[List[List[str]], List[str]]:
+    """
+    Convert the package data into something usable
+    by output_package_listing_columns.
+    """
+    header = ["Package", "Version"]
+
+    running_outdated = options.outdated
+    if running_outdated:
+        header.extend(["Latest", "Type"])
+
+    has_editables = any(x.editable for x in pkgs)
+    if has_editables:
+        header.append("Editable project location")
+
+    if options.verbose >= 1:
+        header.append("Location")
+    if options.verbose >= 1:
+        header.append("Installer")
+
+    data = []
+    for proj in pkgs:
+        # if we're working on the 'outdated' list, separate out the
+        # latest_version and type
+        row = [proj.raw_name, str(proj.version)]
+
+        if running_outdated:
+            row.append(str(proj.latest_version))
+            row.append(proj.latest_filetype)
+
+        if has_editables:
+            row.append(proj.editable_project_location or "")
+
+        if options.verbose >= 1:
+            row.append(proj.location or "")
+        if options.verbose >= 1:
+            row.append(proj.installer)
+
+        data.append(row)
+
+    return data, header
+
+
+def format_for_json(packages: "_ProcessedDists", options: Values) -> str:
+    data = []
+    for dist in packages:
+        info = {
+            "name": dist.raw_name,
+            "version": str(dist.version),
+        }
+        if options.verbose >= 1:
+            info["location"] = dist.location or ""
+            info["installer"] = dist.installer
+        if options.outdated:
+            info["latest_version"] = str(dist.latest_version)
+            info["latest_filetype"] = dist.latest_filetype
+        editable_project_location = dist.editable_project_location
+        if editable_project_location:
+            info["editable_project_location"] = editable_project_location
+        data.append(info)
+    return json.dumps(data)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/commands/search.py b/.venv/lib/python3.12/site-packages/pip/_internal/commands/search.py
new file mode 100644
index 0000000..03ed925
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/commands/search.py
@@ -0,0 +1,174 @@
+import logging
+import shutil
+import sys
+import textwrap
+import xmlrpc.client
+from collections import OrderedDict
+from optparse import Values
+from typing import TYPE_CHECKING, Dict, List, Optional
+
+from pip._vendor.packaging.version import parse as parse_version
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.req_command import SessionCommandMixin
+from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
+from pip._internal.exceptions import CommandError
+from pip._internal.metadata import get_default_environment
+from pip._internal.models.index import PyPI
+from pip._internal.network.xmlrpc import PipXmlrpcTransport
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import write_output
+
+if TYPE_CHECKING:
+    from typing import TypedDict
+
+    class TransformedHit(TypedDict):
+        name: str
+        summary: str
+        versions: List[str]
+
+
+logger = logging.getLogger(__name__)
+
+
+class SearchCommand(Command, SessionCommandMixin):
+    """Search for PyPI packages whose name or summary contains ."""
+
+    usage = """
+      %prog [options] """
+    ignore_require_venv = True
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(
+            "-i",
+            "--index",
+            dest="index",
+            metavar="URL",
+            default=PyPI.pypi_url,
+            help="Base URL of Python Package Index (default %default)",
+        )
+
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def run(self, options: Values, args: List[str]) -> int:
+        if not args:
+            raise CommandError("Missing required argument (search query).")
+        query = args
+        pypi_hits = self.search(query, options)
+        hits = transform_hits(pypi_hits)
+
+        terminal_width = None
+        if sys.stdout.isatty():
+            terminal_width = shutil.get_terminal_size()[0]
+
+        print_results(hits, terminal_width=terminal_width)
+        if pypi_hits:
+            return SUCCESS
+        return NO_MATCHES_FOUND
+
+    def search(self, query: List[str], options: Values) -> List[Dict[str, str]]:
+        index_url = options.index
+
+        session = self.get_default_session(options)
+
+        transport = PipXmlrpcTransport(index_url, session)
+        pypi = xmlrpc.client.ServerProxy(index_url, transport)
+        try:
+            hits = pypi.search({"name": query, "summary": query}, "or")
+        except xmlrpc.client.Fault as fault:
+            message = "XMLRPC request failed [code: {code}]\n{string}".format(
+                code=fault.faultCode,
+                string=fault.faultString,
+            )
+            raise CommandError(message)
+        assert isinstance(hits, list)
+        return hits
+
+
+def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]:
+    """
+    The list from pypi is really a list of versions. We want a list of
+    packages with the list of versions stored inline. This converts the
+    list from pypi into one we can use.
+    """
+    packages: Dict[str, "TransformedHit"] = OrderedDict()
+    for hit in hits:
+        name = hit["name"]
+        summary = hit["summary"]
+        version = hit["version"]
+
+        if name not in packages.keys():
+            packages[name] = {
+                "name": name,
+                "summary": summary,
+                "versions": [version],
+            }
+        else:
+            packages[name]["versions"].append(version)
+
+            # if this is the highest version, replace summary and score
+            if version == highest_version(packages[name]["versions"]):
+                packages[name]["summary"] = summary
+
+    return list(packages.values())
+
+
+def print_dist_installation_info(name: str, latest: str) -> None:
+    env = get_default_environment()
+    dist = env.get_distribution(name)
+    if dist is not None:
+        with indent_log():
+            if dist.version == latest:
+                write_output("INSTALLED: %s (latest)", dist.version)
+            else:
+                write_output("INSTALLED: %s", dist.version)
+                if parse_version(latest).pre:
+                    write_output(
+                        "LATEST:    %s (pre-release; install"
+                        " with `pip install --pre`)",
+                        latest,
+                    )
+                else:
+                    write_output("LATEST:    %s", latest)
+
+
+def print_results(
+    hits: List["TransformedHit"],
+    name_column_width: Optional[int] = None,
+    terminal_width: Optional[int] = None,
+) -> None:
+    if not hits:
+        return
+    if name_column_width is None:
+        name_column_width = (
+            max(
+                [
+                    len(hit["name"]) + len(highest_version(hit.get("versions", ["-"])))
+                    for hit in hits
+                ]
+            )
+            + 4
+        )
+
+    for hit in hits:
+        name = hit["name"]
+        summary = hit["summary"] or ""
+        latest = highest_version(hit.get("versions", ["-"]))
+        if terminal_width is not None:
+            target_width = terminal_width - name_column_width - 5
+            if target_width > 10:
+                # wrap and indent summary to fit terminal
+                summary_lines = textwrap.wrap(summary, target_width)
+                summary = ("\n" + " " * (name_column_width + 3)).join(summary_lines)
+
+        name_latest = f"{name} ({latest})"
+        line = f"{name_latest:{name_column_width}} - {summary}"
+        try:
+            write_output(line)
+            print_dist_installation_info(name, latest)
+        except UnicodeEncodeError:
+            pass
+
+
+def highest_version(versions: List[str]) -> str:
+    return max(versions, key=parse_version)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/commands/show.py b/.venv/lib/python3.12/site-packages/pip/_internal/commands/show.py
new file mode 100644
index 0000000..3f10701
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/commands/show.py
@@ -0,0 +1,189 @@
+import logging
+from optparse import Values
+from typing import Generator, Iterable, Iterator, List, NamedTuple, Optional
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.metadata import BaseDistribution, get_default_environment
+from pip._internal.utils.misc import write_output
+
+logger = logging.getLogger(__name__)
+
+
+class ShowCommand(Command):
+    """
+    Show information about one or more installed packages.
+
+    The output is in RFC-compliant mail header format.
+    """
+
+    usage = """
+      %prog [options]  ..."""
+    ignore_require_venv = True
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(
+            "-f",
+            "--files",
+            dest="files",
+            action="store_true",
+            default=False,
+            help="Show the full list of installed files for each package.",
+        )
+
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def run(self, options: Values, args: List[str]) -> int:
+        if not args:
+            logger.warning("ERROR: Please provide a package name or names.")
+            return ERROR
+        query = args
+
+        results = search_packages_info(query)
+        if not print_results(
+            results, list_files=options.files, verbose=options.verbose
+        ):
+            return ERROR
+        return SUCCESS
+
+
+class _PackageInfo(NamedTuple):
+    name: str
+    version: str
+    location: str
+    editable_project_location: Optional[str]
+    requires: List[str]
+    required_by: List[str]
+    installer: str
+    metadata_version: str
+    classifiers: List[str]
+    summary: str
+    homepage: str
+    project_urls: List[str]
+    author: str
+    author_email: str
+    license: str
+    entry_points: List[str]
+    files: Optional[List[str]]
+
+
+def search_packages_info(query: List[str]) -> Generator[_PackageInfo, None, None]:
+    """
+    Gather details from installed distributions. Print distribution name,
+    version, location, and installed files. Installed files requires a
+    pip generated 'installed-files.txt' in the distributions '.egg-info'
+    directory.
+    """
+    env = get_default_environment()
+
+    installed = {dist.canonical_name: dist for dist in env.iter_all_distributions()}
+    query_names = [canonicalize_name(name) for name in query]
+    missing = sorted(
+        [name for name, pkg in zip(query, query_names) if pkg not in installed]
+    )
+    if missing:
+        logger.warning("Package(s) not found: %s", ", ".join(missing))
+
+    def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]:
+        return (
+            dist.metadata["Name"] or "UNKNOWN"
+            for dist in installed.values()
+            if current_dist.canonical_name
+            in {canonicalize_name(d.name) for d in dist.iter_dependencies()}
+        )
+
+    for query_name in query_names:
+        try:
+            dist = installed[query_name]
+        except KeyError:
+            continue
+
+        requires = sorted((req.name for req in dist.iter_dependencies()), key=str.lower)
+        required_by = sorted(_get_requiring_packages(dist), key=str.lower)
+
+        try:
+            entry_points_text = dist.read_text("entry_points.txt")
+            entry_points = entry_points_text.splitlines(keepends=False)
+        except FileNotFoundError:
+            entry_points = []
+
+        files_iter = dist.iter_declared_entries()
+        if files_iter is None:
+            files: Optional[List[str]] = None
+        else:
+            files = sorted(files_iter)
+
+        metadata = dist.metadata
+
+        yield _PackageInfo(
+            name=dist.raw_name,
+            version=str(dist.version),
+            location=dist.location or "",
+            editable_project_location=dist.editable_project_location,
+            requires=requires,
+            required_by=required_by,
+            installer=dist.installer,
+            metadata_version=dist.metadata_version or "",
+            classifiers=metadata.get_all("Classifier", []),
+            summary=metadata.get("Summary", ""),
+            homepage=metadata.get("Home-page", ""),
+            project_urls=metadata.get_all("Project-URL", []),
+            author=metadata.get("Author", ""),
+            author_email=metadata.get("Author-email", ""),
+            license=metadata.get("License", ""),
+            entry_points=entry_points,
+            files=files,
+        )
+
+
+def print_results(
+    distributions: Iterable[_PackageInfo],
+    list_files: bool,
+    verbose: bool,
+) -> bool:
+    """
+    Print the information from installed distributions found.
+    """
+    results_printed = False
+    for i, dist in enumerate(distributions):
+        results_printed = True
+        if i > 0:
+            write_output("---")
+
+        write_output("Name: %s", dist.name)
+        write_output("Version: %s", dist.version)
+        write_output("Summary: %s", dist.summary)
+        write_output("Home-page: %s", dist.homepage)
+        write_output("Author: %s", dist.author)
+        write_output("Author-email: %s", dist.author_email)
+        write_output("License: %s", dist.license)
+        write_output("Location: %s", dist.location)
+        if dist.editable_project_location is not None:
+            write_output(
+                "Editable project location: %s", dist.editable_project_location
+            )
+        write_output("Requires: %s", ", ".join(dist.requires))
+        write_output("Required-by: %s", ", ".join(dist.required_by))
+
+        if verbose:
+            write_output("Metadata-Version: %s", dist.metadata_version)
+            write_output("Installer: %s", dist.installer)
+            write_output("Classifiers:")
+            for classifier in dist.classifiers:
+                write_output("  %s", classifier)
+            write_output("Entry-points:")
+            for entry in dist.entry_points:
+                write_output("  %s", entry.strip())
+            write_output("Project-URLs:")
+            for project_url in dist.project_urls:
+                write_output("  %s", project_url)
+        if list_files:
+            write_output("Files:")
+            if dist.files is None:
+                write_output("Cannot locate RECORD or installed-files.txt")
+            else:
+                for line in dist.files:
+                    write_output("  %s", line.strip())
+    return results_printed
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/commands/uninstall.py b/.venv/lib/python3.12/site-packages/pip/_internal/commands/uninstall.py
new file mode 100644
index 0000000..f198fc3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/commands/uninstall.py
@@ -0,0 +1,113 @@
+import logging
+from optparse import Values
+from typing import List
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.exceptions import InstallationError
+from pip._internal.req import parse_requirements
+from pip._internal.req.constructors import (
+    install_req_from_line,
+    install_req_from_parsed_requirement,
+)
+from pip._internal.utils.misc import (
+    check_externally_managed,
+    protect_pip_from_modification_on_windows,
+)
+
+logger = logging.getLogger(__name__)
+
+
+class UninstallCommand(Command, SessionCommandMixin):
+    """
+    Uninstall packages.
+
+    pip is able to uninstall most installed packages. Known exceptions are:
+
+    - Pure distutils packages installed with ``python setup.py install``, which
+      leave behind no metadata to determine what files were installed.
+    - Script wrappers installed by ``python setup.py develop``.
+    """
+
+    usage = """
+      %prog [options]  ...
+      %prog [options] -r  ..."""
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(
+            "-r",
+            "--requirement",
+            dest="requirements",
+            action="append",
+            default=[],
+            metavar="file",
+            help=(
+                "Uninstall all the packages listed in the given requirements "
+                "file.  This option can be used multiple times."
+            ),
+        )
+        self.cmd_opts.add_option(
+            "-y",
+            "--yes",
+            dest="yes",
+            action="store_true",
+            help="Don't ask for confirmation of uninstall deletions.",
+        )
+        self.cmd_opts.add_option(cmdoptions.root_user_action())
+        self.cmd_opts.add_option(cmdoptions.override_externally_managed())
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def run(self, options: Values, args: List[str]) -> int:
+        session = self.get_default_session(options)
+
+        reqs_to_uninstall = {}
+        for name in args:
+            req = install_req_from_line(
+                name,
+                isolated=options.isolated_mode,
+            )
+            if req.name:
+                reqs_to_uninstall[canonicalize_name(req.name)] = req
+            else:
+                logger.warning(
+                    "Invalid requirement: %r ignored -"
+                    " the uninstall command expects named"
+                    " requirements.",
+                    name,
+                )
+        for filename in options.requirements:
+            for parsed_req in parse_requirements(
+                filename, options=options, session=session
+            ):
+                req = install_req_from_parsed_requirement(
+                    parsed_req, isolated=options.isolated_mode
+                )
+                if req.name:
+                    reqs_to_uninstall[canonicalize_name(req.name)] = req
+        if not reqs_to_uninstall:
+            raise InstallationError(
+                f"You must give at least one requirement to {self.name} (see "
+                f'"pip help {self.name}")'
+            )
+
+        if not options.override_externally_managed:
+            check_externally_managed()
+
+        protect_pip_from_modification_on_windows(
+            modifying_pip="pip" in reqs_to_uninstall
+        )
+
+        for req in reqs_to_uninstall.values():
+            uninstall_pathset = req.uninstall(
+                auto_confirm=options.yes,
+                verbose=self.verbosity > 0,
+            )
+            if uninstall_pathset:
+                uninstall_pathset.commit()
+        if options.root_user_action == "warn":
+            warn_if_run_as_root()
+        return SUCCESS
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/commands/wheel.py b/.venv/lib/python3.12/site-packages/pip/_internal/commands/wheel.py
new file mode 100644
index 0000000..ed578aa
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/commands/wheel.py
@@ -0,0 +1,183 @@
+import logging
+import os
+import shutil
+from optparse import Values
+from typing import List
+
+from pip._internal.cache import WheelCache
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.req_command import RequirementCommand, with_cleanup
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.exceptions import CommandError
+from pip._internal.operations.build.build_tracker import get_build_tracker
+from pip._internal.req.req_install import (
+    InstallRequirement,
+    check_legacy_setup_py_options,
+)
+from pip._internal.utils.misc import ensure_dir, normalize_path
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.wheel_builder import build, should_build_for_wheel_command
+
+logger = logging.getLogger(__name__)
+
+
+class WheelCommand(RequirementCommand):
+    """
+    Build Wheel archives for your requirements and dependencies.
+
+    Wheel is a built-package format, and offers the advantage of not
+    recompiling your software during every install. For more details, see the
+    wheel docs: https://wheel.readthedocs.io/en/latest/
+
+    'pip wheel' uses the build system interface as described here:
+    https://pip.pypa.io/en/stable/reference/build-system/
+
+    """
+
+    usage = """
+      %prog [options]  ...
+      %prog [options] -r  ...
+      %prog [options] [-e]  ...
+      %prog [options] [-e]  ...
+      %prog [options]  ..."""
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(
+            "-w",
+            "--wheel-dir",
+            dest="wheel_dir",
+            metavar="dir",
+            default=os.curdir,
+            help=(
+                "Build wheels into , where the default is the "
+                "current working directory."
+            ),
+        )
+        self.cmd_opts.add_option(cmdoptions.no_binary())
+        self.cmd_opts.add_option(cmdoptions.only_binary())
+        self.cmd_opts.add_option(cmdoptions.prefer_binary())
+        self.cmd_opts.add_option(cmdoptions.no_build_isolation())
+        self.cmd_opts.add_option(cmdoptions.use_pep517())
+        self.cmd_opts.add_option(cmdoptions.no_use_pep517())
+        self.cmd_opts.add_option(cmdoptions.check_build_deps())
+        self.cmd_opts.add_option(cmdoptions.constraints())
+        self.cmd_opts.add_option(cmdoptions.editable())
+        self.cmd_opts.add_option(cmdoptions.requirements())
+        self.cmd_opts.add_option(cmdoptions.src())
+        self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
+        self.cmd_opts.add_option(cmdoptions.no_deps())
+        self.cmd_opts.add_option(cmdoptions.progress_bar())
+
+        self.cmd_opts.add_option(
+            "--no-verify",
+            dest="no_verify",
+            action="store_true",
+            default=False,
+            help="Don't verify if built wheel is valid.",
+        )
+
+        self.cmd_opts.add_option(cmdoptions.config_settings())
+        self.cmd_opts.add_option(cmdoptions.build_options())
+        self.cmd_opts.add_option(cmdoptions.global_options())
+
+        self.cmd_opts.add_option(
+            "--pre",
+            action="store_true",
+            default=False,
+            help=(
+                "Include pre-release and development versions. By default, "
+                "pip only finds stable versions."
+            ),
+        )
+
+        self.cmd_opts.add_option(cmdoptions.require_hashes())
+
+        index_opts = cmdoptions.make_option_group(
+            cmdoptions.index_group,
+            self.parser,
+        )
+
+        self.parser.insert_option_group(0, index_opts)
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    @with_cleanup
+    def run(self, options: Values, args: List[str]) -> int:
+        session = self.get_default_session(options)
+
+        finder = self._build_package_finder(options, session)
+
+        options.wheel_dir = normalize_path(options.wheel_dir)
+        ensure_dir(options.wheel_dir)
+
+        build_tracker = self.enter_context(get_build_tracker())
+
+        directory = TempDirectory(
+            delete=not options.no_clean,
+            kind="wheel",
+            globally_managed=True,
+        )
+
+        reqs = self.get_requirements(args, options, finder, session)
+        check_legacy_setup_py_options(options, reqs)
+
+        wheel_cache = WheelCache(options.cache_dir)
+
+        preparer = self.make_requirement_preparer(
+            temp_build_dir=directory,
+            options=options,
+            build_tracker=build_tracker,
+            session=session,
+            finder=finder,
+            download_dir=options.wheel_dir,
+            use_user_site=False,
+            verbosity=self.verbosity,
+        )
+
+        resolver = self.make_resolver(
+            preparer=preparer,
+            finder=finder,
+            options=options,
+            wheel_cache=wheel_cache,
+            ignore_requires_python=options.ignore_requires_python,
+            use_pep517=options.use_pep517,
+        )
+
+        self.trace_basic_info(finder)
+
+        requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
+
+        reqs_to_build: List[InstallRequirement] = []
+        for req in requirement_set.requirements.values():
+            if req.is_wheel:
+                preparer.save_linked_requirement(req)
+            elif should_build_for_wheel_command(req):
+                reqs_to_build.append(req)
+
+        preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
+        requirement_set.warn_legacy_versions_and_specifiers()
+
+        # build wheels
+        build_successes, build_failures = build(
+            reqs_to_build,
+            wheel_cache=wheel_cache,
+            verify=(not options.no_verify),
+            build_options=options.build_options or [],
+            global_options=options.global_options or [],
+        )
+        for req in build_successes:
+            assert req.link and req.link.is_wheel
+            assert req.local_file_path
+            # copy from cache to target directory
+            try:
+                shutil.copy(req.local_file_path, options.wheel_dir)
+            except OSError as e:
+                logger.warning(
+                    "Building wheel for %s failed: %s",
+                    req.name,
+                    e,
+                )
+                build_failures.append(req)
+        if len(build_failures) != 0:
+            raise CommandError("Failed to build one or more wheels")
+
+        return SUCCESS
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/configuration.py b/.venv/lib/python3.12/site-packages/pip/_internal/configuration.py
new file mode 100644
index 0000000..c25273d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/configuration.py
@@ -0,0 +1,383 @@
+"""Configuration management setup
+
+Some terminology:
+- name
+  As written in config files.
+- value
+  Value associated with a name
+- key
+  Name combined with it's section (section.name)
+- variant
+  A single word describing where the configuration key-value pair came from
+"""
+
+import configparser
+import locale
+import os
+import sys
+from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple
+
+from pip._internal.exceptions import (
+    ConfigurationError,
+    ConfigurationFileCouldNotBeLoaded,
+)
+from pip._internal.utils import appdirs
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.logging import getLogger
+from pip._internal.utils.misc import ensure_dir, enum
+
+RawConfigParser = configparser.RawConfigParser  # Shorthand
+Kind = NewType("Kind", str)
+
+CONFIG_BASENAME = "pip.ini" if WINDOWS else "pip.conf"
+ENV_NAMES_IGNORED = "version", "help"
+
+# The kinds of configurations there are.
+kinds = enum(
+    USER="user",  # User Specific
+    GLOBAL="global",  # System Wide
+    SITE="site",  # [Virtual] Environment Specific
+    ENV="env",  # from PIP_CONFIG_FILE
+    ENV_VAR="env-var",  # from Environment Variables
+)
+OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
+VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE
+
+logger = getLogger(__name__)
+
+
+# NOTE: Maybe use the optionx attribute to normalize keynames.
+def _normalize_name(name: str) -> str:
+    """Make a name consistent regardless of source (environment or file)"""
+    name = name.lower().replace("_", "-")
+    if name.startswith("--"):
+        name = name[2:]  # only prefer long opts
+    return name
+
+
+def _disassemble_key(name: str) -> List[str]:
+    if "." not in name:
+        error_message = (
+            "Key does not contain dot separated section and key. "
+            f"Perhaps you wanted to use 'global.{name}' instead?"
+        )
+        raise ConfigurationError(error_message)
+    return name.split(".", 1)
+
+
+def get_configuration_files() -> Dict[Kind, List[str]]:
+    global_config_files = [
+        os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip")
+    ]
+
+    site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
+    legacy_config_file = os.path.join(
+        os.path.expanduser("~"),
+        "pip" if WINDOWS else ".pip",
+        CONFIG_BASENAME,
+    )
+    new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME)
+    return {
+        kinds.GLOBAL: global_config_files,
+        kinds.SITE: [site_config_file],
+        kinds.USER: [legacy_config_file, new_config_file],
+    }
+
+
+class Configuration:
+    """Handles management of configuration.
+
+    Provides an interface to accessing and managing configuration files.
+
+    This class converts provides an API that takes "section.key-name" style
+    keys and stores the value associated with it as "key-name" under the
+    section "section".
+
+    This allows for a clean interface wherein the both the section and the
+    key-name are preserved in an easy to manage form in the configuration files
+    and the data stored is also nice.
+    """
+
+    def __init__(self, isolated: bool, load_only: Optional[Kind] = None) -> None:
+        super().__init__()
+
+        if load_only is not None and load_only not in VALID_LOAD_ONLY:
+            raise ConfigurationError(
+                "Got invalid value for load_only - should be one of {}".format(
+                    ", ".join(map(repr, VALID_LOAD_ONLY))
+                )
+            )
+        self.isolated = isolated
+        self.load_only = load_only
+
+        # Because we keep track of where we got the data from
+        self._parsers: Dict[Kind, List[Tuple[str, RawConfigParser]]] = {
+            variant: [] for variant in OVERRIDE_ORDER
+        }
+        self._config: Dict[Kind, Dict[str, Any]] = {
+            variant: {} for variant in OVERRIDE_ORDER
+        }
+        self._modified_parsers: List[Tuple[str, RawConfigParser]] = []
+
+    def load(self) -> None:
+        """Loads configuration from configuration files and environment"""
+        self._load_config_files()
+        if not self.isolated:
+            self._load_environment_vars()
+
+    def get_file_to_edit(self) -> Optional[str]:
+        """Returns the file with highest priority in configuration"""
+        assert self.load_only is not None, "Need to be specified a file to be editing"
+
+        try:
+            return self._get_parser_to_modify()[0]
+        except IndexError:
+            return None
+
+    def items(self) -> Iterable[Tuple[str, Any]]:
+        """Returns key-value pairs like dict.items() representing the loaded
+        configuration
+        """
+        return self._dictionary.items()
+
+    def get_value(self, key: str) -> Any:
+        """Get a value from the configuration."""
+        orig_key = key
+        key = _normalize_name(key)
+        try:
+            return self._dictionary[key]
+        except KeyError:
+            # disassembling triggers a more useful error message than simply
+            # "No such key" in the case that the key isn't in the form command.option
+            _disassemble_key(key)
+            raise ConfigurationError(f"No such key - {orig_key}")
+
+    def set_value(self, key: str, value: Any) -> None:
+        """Modify a value in the configuration."""
+        key = _normalize_name(key)
+        self._ensure_have_load_only()
+
+        assert self.load_only
+        fname, parser = self._get_parser_to_modify()
+
+        if parser is not None:
+            section, name = _disassemble_key(key)
+
+            # Modify the parser and the configuration
+            if not parser.has_section(section):
+                parser.add_section(section)
+            parser.set(section, name, value)
+
+        self._config[self.load_only][key] = value
+        self._mark_as_modified(fname, parser)
+
+    def unset_value(self, key: str) -> None:
+        """Unset a value in the configuration."""
+        orig_key = key
+        key = _normalize_name(key)
+        self._ensure_have_load_only()
+
+        assert self.load_only
+        if key not in self._config[self.load_only]:
+            raise ConfigurationError(f"No such key - {orig_key}")
+
+        fname, parser = self._get_parser_to_modify()
+
+        if parser is not None:
+            section, name = _disassemble_key(key)
+            if not (
+                parser.has_section(section) and parser.remove_option(section, name)
+            ):
+                # The option was not removed.
+                raise ConfigurationError(
+                    "Fatal Internal error [id=1]. Please report as a bug."
+                )
+
+            # The section may be empty after the option was removed.
+            if not parser.items(section):
+                parser.remove_section(section)
+            self._mark_as_modified(fname, parser)
+
+        del self._config[self.load_only][key]
+
+    def save(self) -> None:
+        """Save the current in-memory state."""
+        self._ensure_have_load_only()
+
+        for fname, parser in self._modified_parsers:
+            logger.info("Writing to %s", fname)
+
+            # Ensure directory exists.
+            ensure_dir(os.path.dirname(fname))
+
+            # Ensure directory's permission(need to be writeable)
+            try:
+                with open(fname, "w") as f:
+                    parser.write(f)
+            except OSError as error:
+                raise ConfigurationError(
+                    f"An error occurred while writing to the configuration file "
+                    f"{fname}: {error}"
+                )
+
+    #
+    # Private routines
+    #
+
+    def _ensure_have_load_only(self) -> None:
+        if self.load_only is None:
+            raise ConfigurationError("Needed a specific file to be modifying.")
+        logger.debug("Will be working with %s variant only", self.load_only)
+
+    @property
+    def _dictionary(self) -> Dict[str, Any]:
+        """A dictionary representing the loaded configuration."""
+        # NOTE: Dictionaries are not populated if not loaded. So, conditionals
+        #       are not needed here.
+        retval = {}
+
+        for variant in OVERRIDE_ORDER:
+            retval.update(self._config[variant])
+
+        return retval
+
+    def _load_config_files(self) -> None:
+        """Loads configuration from configuration files"""
+        config_files = dict(self.iter_config_files())
+        if config_files[kinds.ENV][0:1] == [os.devnull]:
+            logger.debug(
+                "Skipping loading configuration files due to "
+                "environment's PIP_CONFIG_FILE being os.devnull"
+            )
+            return
+
+        for variant, files in config_files.items():
+            for fname in files:
+                # If there's specific variant set in `load_only`, load only
+                # that variant, not the others.
+                if self.load_only is not None and variant != self.load_only:
+                    logger.debug("Skipping file '%s' (variant: %s)", fname, variant)
+                    continue
+
+                parser = self._load_file(variant, fname)
+
+                # Keeping track of the parsers used
+                self._parsers[variant].append((fname, parser))
+
+    def _load_file(self, variant: Kind, fname: str) -> RawConfigParser:
+        logger.verbose("For variant '%s', will try loading '%s'", variant, fname)
+        parser = self._construct_parser(fname)
+
+        for section in parser.sections():
+            items = parser.items(section)
+            self._config[variant].update(self._normalized_keys(section, items))
+
+        return parser
+
+    def _construct_parser(self, fname: str) -> RawConfigParser:
+        parser = configparser.RawConfigParser()
+        # If there is no such file, don't bother reading it but create the
+        # parser anyway, to hold the data.
+        # Doing this is useful when modifying and saving files, where we don't
+        # need to construct a parser.
+        if os.path.exists(fname):
+            locale_encoding = locale.getpreferredencoding(False)
+            try:
+                parser.read(fname, encoding=locale_encoding)
+            except UnicodeDecodeError:
+                # See https://github.com/pypa/pip/issues/4963
+                raise ConfigurationFileCouldNotBeLoaded(
+                    reason=f"contains invalid {locale_encoding} characters",
+                    fname=fname,
+                )
+            except configparser.Error as error:
+                # See https://github.com/pypa/pip/issues/4893
+                raise ConfigurationFileCouldNotBeLoaded(error=error)
+        return parser
+
+    def _load_environment_vars(self) -> None:
+        """Loads configuration from environment variables"""
+        self._config[kinds.ENV_VAR].update(
+            self._normalized_keys(":env:", self.get_environ_vars())
+        )
+
+    def _normalized_keys(
+        self, section: str, items: Iterable[Tuple[str, Any]]
+    ) -> Dict[str, Any]:
+        """Normalizes items to construct a dictionary with normalized keys.
+
+        This routine is where the names become keys and are made the same
+        regardless of source - configuration files or environment.
+        """
+        normalized = {}
+        for name, val in items:
+            key = section + "." + _normalize_name(name)
+            normalized[key] = val
+        return normalized
+
+    def get_environ_vars(self) -> Iterable[Tuple[str, str]]:
+        """Returns a generator with all environmental vars with prefix PIP_"""
+        for key, val in os.environ.items():
+            if key.startswith("PIP_"):
+                name = key[4:].lower()
+                if name not in ENV_NAMES_IGNORED:
+                    yield name, val
+
+    # XXX: This is patched in the tests.
+    def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]:
+        """Yields variant and configuration files associated with it.
+
+        This should be treated like items of a dictionary. The order
+        here doesn't affect what gets overridden. That is controlled
+        by OVERRIDE_ORDER. However this does control the order they are
+        displayed to the user. It's probably most ergononmic to display
+        things in the same order as OVERRIDE_ORDER
+        """
+        # SMELL: Move the conditions out of this function
+
+        env_config_file = os.environ.get("PIP_CONFIG_FILE", None)
+        config_files = get_configuration_files()
+
+        yield kinds.GLOBAL, config_files[kinds.GLOBAL]
+
+        # per-user config is not loaded when env_config_file exists
+        should_load_user_config = not self.isolated and not (
+            env_config_file and os.path.exists(env_config_file)
+        )
+        if should_load_user_config:
+            # The legacy config file is overridden by the new config file
+            yield kinds.USER, config_files[kinds.USER]
+
+        # virtualenv config
+        yield kinds.SITE, config_files[kinds.SITE]
+
+        if env_config_file is not None:
+            yield kinds.ENV, [env_config_file]
+        else:
+            yield kinds.ENV, []
+
+    def get_values_in_config(self, variant: Kind) -> Dict[str, Any]:
+        """Get values present in a config file"""
+        return self._config[variant]
+
+    def _get_parser_to_modify(self) -> Tuple[str, RawConfigParser]:
+        # Determine which parser to modify
+        assert self.load_only
+        parsers = self._parsers[self.load_only]
+        if not parsers:
+            # This should not happen if everything works correctly.
+            raise ConfigurationError(
+                "Fatal Internal error [id=2]. Please report as a bug."
+            )
+
+        # Use the highest priority parser.
+        return parsers[-1]
+
+    # XXX: This is patched in the tests.
+    def _mark_as_modified(self, fname: str, parser: RawConfigParser) -> None:
+        file_parser_tuple = (fname, parser)
+        if file_parser_tuple not in self._modified_parsers:
+            self._modified_parsers.append(file_parser_tuple)
+
+    def __repr__(self) -> str:
+        return f"{self.__class__.__name__}({self._dictionary!r})"
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/distributions/__init__.py b/.venv/lib/python3.12/site-packages/pip/_internal/distributions/__init__.py
new file mode 100644
index 0000000..9a89a83
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/distributions/__init__.py
@@ -0,0 +1,21 @@
+from pip._internal.distributions.base import AbstractDistribution
+from pip._internal.distributions.sdist import SourceDistribution
+from pip._internal.distributions.wheel import WheelDistribution
+from pip._internal.req.req_install import InstallRequirement
+
+
+def make_distribution_for_install_requirement(
+    install_req: InstallRequirement,
+) -> AbstractDistribution:
+    """Returns a Distribution for the given InstallRequirement"""
+    # Editable requirements will always be source distributions. They use the
+    # legacy logic until we create a modern standard for them.
+    if install_req.editable:
+        return SourceDistribution(install_req)
+
+    # If it's a wheel, it's a WheelDistribution
+    if install_req.is_wheel:
+        return WheelDistribution(install_req)
+
+    # Otherwise, a SourceDistribution
+    return SourceDistribution(install_req)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/distributions/base.py b/.venv/lib/python3.12/site-packages/pip/_internal/distributions/base.py
new file mode 100644
index 0000000..6fb0d7b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/distributions/base.py
@@ -0,0 +1,51 @@
+import abc
+from typing import Optional
+
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata.base import BaseDistribution
+from pip._internal.req import InstallRequirement
+
+
+class AbstractDistribution(metaclass=abc.ABCMeta):
+    """A base class for handling installable artifacts.
+
+    The requirements for anything installable are as follows:
+
+     - we must be able to determine the requirement name
+       (or we can't correctly handle the non-upgrade case).
+
+     - for packages with setup requirements, we must also be able
+       to determine their requirements without installing additional
+       packages (for the same reason as run-time dependencies)
+
+     - we must be able to create a Distribution object exposing the
+       above metadata.
+
+     - if we need to do work in the build tracker, we must be able to generate a unique
+       string to identify the requirement in the build tracker.
+    """
+
+    def __init__(self, req: InstallRequirement) -> None:
+        super().__init__()
+        self.req = req
+
+    @abc.abstractproperty
+    def build_tracker_id(self) -> Optional[str]:
+        """A string that uniquely identifies this requirement to the build tracker.
+
+        If None, then this dist has no work to do in the build tracker, and
+        ``.prepare_distribution_metadata()`` will not be called."""
+        raise NotImplementedError()
+
+    @abc.abstractmethod
+    def get_metadata_distribution(self) -> BaseDistribution:
+        raise NotImplementedError()
+
+    @abc.abstractmethod
+    def prepare_distribution_metadata(
+        self,
+        finder: PackageFinder,
+        build_isolation: bool,
+        check_build_deps: bool,
+    ) -> None:
+        raise NotImplementedError()
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/distributions/installed.py b/.venv/lib/python3.12/site-packages/pip/_internal/distributions/installed.py
new file mode 100644
index 0000000..ab8d53b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/distributions/installed.py
@@ -0,0 +1,29 @@
+from typing import Optional
+
+from pip._internal.distributions.base import AbstractDistribution
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import BaseDistribution
+
+
+class InstalledDistribution(AbstractDistribution):
+    """Represents an installed package.
+
+    This does not need any preparation as the required information has already
+    been computed.
+    """
+
+    @property
+    def build_tracker_id(self) -> Optional[str]:
+        return None
+
+    def get_metadata_distribution(self) -> BaseDistribution:
+        assert self.req.satisfied_by is not None, "not actually installed"
+        return self.req.satisfied_by
+
+    def prepare_distribution_metadata(
+        self,
+        finder: PackageFinder,
+        build_isolation: bool,
+        check_build_deps: bool,
+    ) -> None:
+        pass
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/distributions/sdist.py b/.venv/lib/python3.12/site-packages/pip/_internal/distributions/sdist.py
new file mode 100644
index 0000000..15ff42b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/distributions/sdist.py
@@ -0,0 +1,156 @@
+import logging
+from typing import Iterable, Optional, Set, Tuple
+
+from pip._internal.build_env import BuildEnvironment
+from pip._internal.distributions.base import AbstractDistribution
+from pip._internal.exceptions import InstallationError
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import BaseDistribution
+from pip._internal.utils.subprocess import runner_with_spinner_message
+
+logger = logging.getLogger(__name__)
+
+
+class SourceDistribution(AbstractDistribution):
+    """Represents a source distribution.
+
+    The preparation step for these needs metadata for the packages to be
+    generated, either using PEP 517 or using the legacy `setup.py egg_info`.
+    """
+
+    @property
+    def build_tracker_id(self) -> Optional[str]:
+        """Identify this requirement uniquely by its link."""
+        assert self.req.link
+        return self.req.link.url_without_fragment
+
+    def get_metadata_distribution(self) -> BaseDistribution:
+        return self.req.get_dist()
+
+    def prepare_distribution_metadata(
+        self,
+        finder: PackageFinder,
+        build_isolation: bool,
+        check_build_deps: bool,
+    ) -> None:
+        # Load pyproject.toml, to determine whether PEP 517 is to be used
+        self.req.load_pyproject_toml()
+
+        # Set up the build isolation, if this requirement should be isolated
+        should_isolate = self.req.use_pep517 and build_isolation
+        if should_isolate:
+            # Setup an isolated environment and install the build backend static
+            # requirements in it.
+            self._prepare_build_backend(finder)
+            # Check that if the requirement is editable, it either supports PEP 660 or
+            # has a setup.py or a setup.cfg. This cannot be done earlier because we need
+            # to setup the build backend to verify it supports build_editable, nor can
+            # it be done later, because we want to avoid installing build requirements
+            # needlessly. Doing it here also works around setuptools generating
+            # UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory
+            # without setup.py nor setup.cfg.
+            self.req.isolated_editable_sanity_check()
+            # Install the dynamic build requirements.
+            self._install_build_reqs(finder)
+        # Check if the current environment provides build dependencies
+        should_check_deps = self.req.use_pep517 and check_build_deps
+        if should_check_deps:
+            pyproject_requires = self.req.pyproject_requires
+            assert pyproject_requires is not None
+            conflicting, missing = self.req.build_env.check_requirements(
+                pyproject_requires
+            )
+            if conflicting:
+                self._raise_conflicts("the backend dependencies", conflicting)
+            if missing:
+                self._raise_missing_reqs(missing)
+        self.req.prepare_metadata()
+
+    def _prepare_build_backend(self, finder: PackageFinder) -> None:
+        # Isolate in a BuildEnvironment and install the build-time
+        # requirements.
+        pyproject_requires = self.req.pyproject_requires
+        assert pyproject_requires is not None
+
+        self.req.build_env = BuildEnvironment()
+        self.req.build_env.install_requirements(
+            finder, pyproject_requires, "overlay", kind="build dependencies"
+        )
+        conflicting, missing = self.req.build_env.check_requirements(
+            self.req.requirements_to_check
+        )
+        if conflicting:
+            self._raise_conflicts("PEP 517/518 supported requirements", conflicting)
+        if missing:
+            logger.warning(
+                "Missing build requirements in pyproject.toml for %s.",
+                self.req,
+            )
+            logger.warning(
+                "The project does not specify a build backend, and "
+                "pip cannot fall back to setuptools without %s.",
+                " and ".join(map(repr, sorted(missing))),
+            )
+
+    def _get_build_requires_wheel(self) -> Iterable[str]:
+        with self.req.build_env:
+            runner = runner_with_spinner_message("Getting requirements to build wheel")
+            backend = self.req.pep517_backend
+            assert backend is not None
+            with backend.subprocess_runner(runner):
+                return backend.get_requires_for_build_wheel()
+
+    def _get_build_requires_editable(self) -> Iterable[str]:
+        with self.req.build_env:
+            runner = runner_with_spinner_message(
+                "Getting requirements to build editable"
+            )
+            backend = self.req.pep517_backend
+            assert backend is not None
+            with backend.subprocess_runner(runner):
+                return backend.get_requires_for_build_editable()
+
+    def _install_build_reqs(self, finder: PackageFinder) -> None:
+        # Install any extra build dependencies that the backend requests.
+        # This must be done in a second pass, as the pyproject.toml
+        # dependencies must be installed before we can call the backend.
+        if (
+            self.req.editable
+            and self.req.permit_editable_wheels
+            and self.req.supports_pyproject_editable()
+        ):
+            build_reqs = self._get_build_requires_editable()
+        else:
+            build_reqs = self._get_build_requires_wheel()
+        conflicting, missing = self.req.build_env.check_requirements(build_reqs)
+        if conflicting:
+            self._raise_conflicts("the backend dependencies", conflicting)
+        self.req.build_env.install_requirements(
+            finder, missing, "normal", kind="backend dependencies"
+        )
+
+    def _raise_conflicts(
+        self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]]
+    ) -> None:
+        format_string = (
+            "Some build dependencies for {requirement} "
+            "conflict with {conflicting_with}: {description}."
+        )
+        error_message = format_string.format(
+            requirement=self.req,
+            conflicting_with=conflicting_with,
+            description=", ".join(
+                f"{installed} is incompatible with {wanted}"
+                for installed, wanted in sorted(conflicting_reqs)
+            ),
+        )
+        raise InstallationError(error_message)
+
+    def _raise_missing_reqs(self, missing: Set[str]) -> None:
+        format_string = (
+            "Some build dependencies for {requirement} are missing: {missing}."
+        )
+        error_message = format_string.format(
+            requirement=self.req, missing=", ".join(map(repr, sorted(missing)))
+        )
+        raise InstallationError(error_message)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/distributions/wheel.py b/.venv/lib/python3.12/site-packages/pip/_internal/distributions/wheel.py
new file mode 100644
index 0000000..eb16e25
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/distributions/wheel.py
@@ -0,0 +1,40 @@
+from typing import Optional
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.distributions.base import AbstractDistribution
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import (
+    BaseDistribution,
+    FilesystemWheel,
+    get_wheel_distribution,
+)
+
+
+class WheelDistribution(AbstractDistribution):
+    """Represents a wheel distribution.
+
+    This does not need any preparation as wheels can be directly unpacked.
+    """
+
+    @property
+    def build_tracker_id(self) -> Optional[str]:
+        return None
+
+    def get_metadata_distribution(self) -> BaseDistribution:
+        """Loads the metadata from the wheel file into memory and returns a
+        Distribution that uses it, not relying on the wheel file or
+        requirement.
+        """
+        assert self.req.local_file_path, "Set as part of preparation during download"
+        assert self.req.name, "Wheels are never unnamed"
+        wheel = FilesystemWheel(self.req.local_file_path)
+        return get_wheel_distribution(wheel, canonicalize_name(self.req.name))
+
+    def prepare_distribution_metadata(
+        self,
+        finder: PackageFinder,
+        build_isolation: bool,
+        check_build_deps: bool,
+    ) -> None:
+        pass
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/exceptions.py b/.venv/lib/python3.12/site-packages/pip/_internal/exceptions.py
new file mode 100644
index 0000000..5007a62
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/exceptions.py
@@ -0,0 +1,728 @@
+"""Exceptions used throughout package.
+
+This module MUST NOT try to import from anything within `pip._internal` to
+operate. This is expected to be importable from any/all files within the
+subpackage and, thus, should not depend on them.
+"""
+
+import configparser
+import contextlib
+import locale
+import logging
+import pathlib
+import re
+import sys
+from itertools import chain, groupby, repeat
+from typing import TYPE_CHECKING, Dict, Iterator, List, Optional, Union
+
+from pip._vendor.requests.models import Request, Response
+from pip._vendor.rich.console import Console, ConsoleOptions, RenderResult
+from pip._vendor.rich.markup import escape
+from pip._vendor.rich.text import Text
+
+if TYPE_CHECKING:
+    from hashlib import _Hash
+    from typing import Literal
+
+    from pip._internal.metadata import BaseDistribution
+    from pip._internal.req.req_install import InstallRequirement
+
+logger = logging.getLogger(__name__)
+
+
+#
+# Scaffolding
+#
+def _is_kebab_case(s: str) -> bool:
+    return re.match(r"^[a-z]+(-[a-z]+)*$", s) is not None
+
+
+def _prefix_with_indent(
+    s: Union[Text, str],
+    console: Console,
+    *,
+    prefix: str,
+    indent: str,
+) -> Text:
+    if isinstance(s, Text):
+        text = s
+    else:
+        text = console.render_str(s)
+
+    return console.render_str(prefix, overflow="ignore") + console.render_str(
+        f"\n{indent}", overflow="ignore"
+    ).join(text.split(allow_blank=True))
+
+
+class PipError(Exception):
+    """The base pip error."""
+
+
+class DiagnosticPipError(PipError):
+    """An error, that presents diagnostic information to the user.
+
+    This contains a bunch of logic, to enable pretty presentation of our error
+    messages. Each error gets a unique reference. Each error can also include
+    additional context, a hint and/or a note -- which are presented with the
+    main error message in a consistent style.
+
+    This is adapted from the error output styling in `sphinx-theme-builder`.
+    """
+
+    reference: str
+
+    def __init__(
+        self,
+        *,
+        kind: 'Literal["error", "warning"]' = "error",
+        reference: Optional[str] = None,
+        message: Union[str, Text],
+        context: Optional[Union[str, Text]],
+        hint_stmt: Optional[Union[str, Text]],
+        note_stmt: Optional[Union[str, Text]] = None,
+        link: Optional[str] = None,
+    ) -> None:
+        # Ensure a proper reference is provided.
+        if reference is None:
+            assert hasattr(self, "reference"), "error reference not provided!"
+            reference = self.reference
+        assert _is_kebab_case(reference), "error reference must be kebab-case!"
+
+        self.kind = kind
+        self.reference = reference
+
+        self.message = message
+        self.context = context
+
+        self.note_stmt = note_stmt
+        self.hint_stmt = hint_stmt
+
+        self.link = link
+
+        super().__init__(f"<{self.__class__.__name__}: {self.reference}>")
+
+    def __repr__(self) -> str:
+        return (
+            f"<{self.__class__.__name__}("
+            f"reference={self.reference!r}, "
+            f"message={self.message!r}, "
+            f"context={self.context!r}, "
+            f"note_stmt={self.note_stmt!r}, "
+            f"hint_stmt={self.hint_stmt!r}"
+            ")>"
+        )
+
+    def __rich_console__(
+        self,
+        console: Console,
+        options: ConsoleOptions,
+    ) -> RenderResult:
+        colour = "red" if self.kind == "error" else "yellow"
+
+        yield f"[{colour} bold]{self.kind}[/]: [bold]{self.reference}[/]"
+        yield ""
+
+        if not options.ascii_only:
+            # Present the main message, with relevant context indented.
+            if self.context is not None:
+                yield _prefix_with_indent(
+                    self.message,
+                    console,
+                    prefix=f"[{colour}]×[/] ",
+                    indent=f"[{colour}]│[/] ",
+                )
+                yield _prefix_with_indent(
+                    self.context,
+                    console,
+                    prefix=f"[{colour}]╰─>[/] ",
+                    indent=f"[{colour}]   [/] ",
+                )
+            else:
+                yield _prefix_with_indent(
+                    self.message,
+                    console,
+                    prefix="[red]×[/] ",
+                    indent="  ",
+                )
+        else:
+            yield self.message
+            if self.context is not None:
+                yield ""
+                yield self.context
+
+        if self.note_stmt is not None or self.hint_stmt is not None:
+            yield ""
+
+        if self.note_stmt is not None:
+            yield _prefix_with_indent(
+                self.note_stmt,
+                console,
+                prefix="[magenta bold]note[/]: ",
+                indent="      ",
+            )
+        if self.hint_stmt is not None:
+            yield _prefix_with_indent(
+                self.hint_stmt,
+                console,
+                prefix="[cyan bold]hint[/]: ",
+                indent="      ",
+            )
+
+        if self.link is not None:
+            yield ""
+            yield f"Link: {self.link}"
+
+
+#
+# Actual Errors
+#
+class ConfigurationError(PipError):
+    """General exception in configuration"""
+
+
+class InstallationError(PipError):
+    """General exception during installation"""
+
+
+class UninstallationError(PipError):
+    """General exception during uninstallation"""
+
+
+class MissingPyProjectBuildRequires(DiagnosticPipError):
+    """Raised when pyproject.toml has `build-system`, but no `build-system.requires`."""
+
+    reference = "missing-pyproject-build-system-requires"
+
+    def __init__(self, *, package: str) -> None:
+        super().__init__(
+            message=f"Can not process {escape(package)}",
+            context=Text(
+                "This package has an invalid pyproject.toml file.\n"
+                "The [build-system] table is missing the mandatory `requires` key."
+            ),
+            note_stmt="This is an issue with the package mentioned above, not pip.",
+            hint_stmt=Text("See PEP 518 for the detailed specification."),
+        )
+
+
+class InvalidPyProjectBuildRequires(DiagnosticPipError):
+    """Raised when pyproject.toml an invalid `build-system.requires`."""
+
+    reference = "invalid-pyproject-build-system-requires"
+
+    def __init__(self, *, package: str, reason: str) -> None:
+        super().__init__(
+            message=f"Can not process {escape(package)}",
+            context=Text(
+                "This package has an invalid `build-system.requires` key in "
+                f"pyproject.toml.\n{reason}"
+            ),
+            note_stmt="This is an issue with the package mentioned above, not pip.",
+            hint_stmt=Text("See PEP 518 for the detailed specification."),
+        )
+
+
+class NoneMetadataError(PipError):
+    """Raised when accessing a Distribution's "METADATA" or "PKG-INFO".
+
+    This signifies an inconsistency, when the Distribution claims to have
+    the metadata file (if not, raise ``FileNotFoundError`` instead), but is
+    not actually able to produce its content. This may be due to permission
+    errors.
+    """
+
+    def __init__(
+        self,
+        dist: "BaseDistribution",
+        metadata_name: str,
+    ) -> None:
+        """
+        :param dist: A Distribution object.
+        :param metadata_name: The name of the metadata being accessed
+            (can be "METADATA" or "PKG-INFO").
+        """
+        self.dist = dist
+        self.metadata_name = metadata_name
+
+    def __str__(self) -> str:
+        # Use `dist` in the error message because its stringification
+        # includes more information, like the version and location.
+        return f"None {self.metadata_name} metadata found for distribution: {self.dist}"
+
+
+class UserInstallationInvalid(InstallationError):
+    """A --user install is requested on an environment without user site."""
+
+    def __str__(self) -> str:
+        return "User base directory is not specified"
+
+
+class InvalidSchemeCombination(InstallationError):
+    def __str__(self) -> str:
+        before = ", ".join(str(a) for a in self.args[:-1])
+        return f"Cannot set {before} and {self.args[-1]} together"
+
+
+class DistributionNotFound(InstallationError):
+    """Raised when a distribution cannot be found to satisfy a requirement"""
+
+
+class RequirementsFileParseError(InstallationError):
+    """Raised when a general error occurs parsing a requirements file line."""
+
+
+class BestVersionAlreadyInstalled(PipError):
+    """Raised when the most up-to-date version of a package is already
+    installed."""
+
+
+class BadCommand(PipError):
+    """Raised when virtualenv or a command is not found"""
+
+
+class CommandError(PipError):
+    """Raised when there is an error in command-line arguments"""
+
+
+class PreviousBuildDirError(PipError):
+    """Raised when there's a previous conflicting build directory"""
+
+
+class NetworkConnectionError(PipError):
+    """HTTP connection error"""
+
+    def __init__(
+        self,
+        error_msg: str,
+        response: Optional[Response] = None,
+        request: Optional[Request] = None,
+    ) -> None:
+        """
+        Initialize NetworkConnectionError with  `request` and `response`
+        objects.
+        """
+        self.response = response
+        self.request = request
+        self.error_msg = error_msg
+        if (
+            self.response is not None
+            and not self.request
+            and hasattr(response, "request")
+        ):
+            self.request = self.response.request
+        super().__init__(error_msg, response, request)
+
+    def __str__(self) -> str:
+        return str(self.error_msg)
+
+
+class InvalidWheelFilename(InstallationError):
+    """Invalid wheel filename."""
+
+
+class UnsupportedWheel(InstallationError):
+    """Unsupported wheel."""
+
+
+class InvalidWheel(InstallationError):
+    """Invalid (e.g. corrupt) wheel."""
+
+    def __init__(self, location: str, name: str):
+        self.location = location
+        self.name = name
+
+    def __str__(self) -> str:
+        return f"Wheel '{self.name}' located at {self.location} is invalid."
+
+
+class MetadataInconsistent(InstallationError):
+    """Built metadata contains inconsistent information.
+
+    This is raised when the metadata contains values (e.g. name and version)
+    that do not match the information previously obtained from sdist filename,
+    user-supplied ``#egg=`` value, or an install requirement name.
+    """
+
+    def __init__(
+        self, ireq: "InstallRequirement", field: str, f_val: str, m_val: str
+    ) -> None:
+        self.ireq = ireq
+        self.field = field
+        self.f_val = f_val
+        self.m_val = m_val
+
+    def __str__(self) -> str:
+        return (
+            f"Requested {self.ireq} has inconsistent {self.field}: "
+            f"expected {self.f_val!r}, but metadata has {self.m_val!r}"
+        )
+
+
+class InstallationSubprocessError(DiagnosticPipError, InstallationError):
+    """A subprocess call failed."""
+
+    reference = "subprocess-exited-with-error"
+
+    def __init__(
+        self,
+        *,
+        command_description: str,
+        exit_code: int,
+        output_lines: Optional[List[str]],
+    ) -> None:
+        if output_lines is None:
+            output_prompt = Text("See above for output.")
+        else:
+            output_prompt = (
+                Text.from_markup(f"[red][{len(output_lines)} lines of output][/]\n")
+                + Text("".join(output_lines))
+                + Text.from_markup(R"[red]\[end of output][/]")
+            )
+
+        super().__init__(
+            message=(
+                f"[green]{escape(command_description)}[/] did not run successfully.\n"
+                f"exit code: {exit_code}"
+            ),
+            context=output_prompt,
+            hint_stmt=None,
+            note_stmt=(
+                "This error originates from a subprocess, and is likely not a "
+                "problem with pip."
+            ),
+        )
+
+        self.command_description = command_description
+        self.exit_code = exit_code
+
+    def __str__(self) -> str:
+        return f"{self.command_description} exited with {self.exit_code}"
+
+
+class MetadataGenerationFailed(InstallationSubprocessError, InstallationError):
+    reference = "metadata-generation-failed"
+
+    def __init__(
+        self,
+        *,
+        package_details: str,
+    ) -> None:
+        super(InstallationSubprocessError, self).__init__(
+            message="Encountered error while generating package metadata.",
+            context=escape(package_details),
+            hint_stmt="See above for details.",
+            note_stmt="This is an issue with the package mentioned above, not pip.",
+        )
+
+    def __str__(self) -> str:
+        return "metadata generation failed"
+
+
+class HashErrors(InstallationError):
+    """Multiple HashError instances rolled into one for reporting"""
+
+    def __init__(self) -> None:
+        self.errors: List["HashError"] = []
+
+    def append(self, error: "HashError") -> None:
+        self.errors.append(error)
+
+    def __str__(self) -> str:
+        lines = []
+        self.errors.sort(key=lambda e: e.order)
+        for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
+            lines.append(cls.head)
+            lines.extend(e.body() for e in errors_of_cls)
+        if lines:
+            return "\n".join(lines)
+        return ""
+
+    def __bool__(self) -> bool:
+        return bool(self.errors)
+
+
+class HashError(InstallationError):
+    """
+    A failure to verify a package against known-good hashes
+
+    :cvar order: An int sorting hash exception classes by difficulty of
+        recovery (lower being harder), so the user doesn't bother fretting
+        about unpinned packages when he has deeper issues, like VCS
+        dependencies, to deal with. Also keeps error reports in a
+        deterministic order.
+    :cvar head: A section heading for display above potentially many
+        exceptions of this kind
+    :ivar req: The InstallRequirement that triggered this error. This is
+        pasted on after the exception is instantiated, because it's not
+        typically available earlier.
+
+    """
+
+    req: Optional["InstallRequirement"] = None
+    head = ""
+    order: int = -1
+
+    def body(self) -> str:
+        """Return a summary of me for display under the heading.
+
+        This default implementation simply prints a description of the
+        triggering requirement.
+
+        :param req: The InstallRequirement that provoked this error, with
+            its link already populated by the resolver's _populate_link().
+
+        """
+        return f"    {self._requirement_name()}"
+
+    def __str__(self) -> str:
+        return f"{self.head}\n{self.body()}"
+
+    def _requirement_name(self) -> str:
+        """Return a description of the requirement that triggered me.
+
+        This default implementation returns long description of the req, with
+        line numbers
+
+        """
+        return str(self.req) if self.req else "unknown package"
+
+
+class VcsHashUnsupported(HashError):
+    """A hash was provided for a version-control-system-based requirement, but
+    we don't have a method for hashing those."""
+
+    order = 0
+    head = (
+        "Can't verify hashes for these requirements because we don't "
+        "have a way to hash version control repositories:"
+    )
+
+
+class DirectoryUrlHashUnsupported(HashError):
+    """A hash was provided for a version-control-system-based requirement, but
+    we don't have a method for hashing those."""
+
+    order = 1
+    head = (
+        "Can't verify hashes for these file:// requirements because they "
+        "point to directories:"
+    )
+
+
+class HashMissing(HashError):
+    """A hash was needed for a requirement but is absent."""
+
+    order = 2
+    head = (
+        "Hashes are required in --require-hashes mode, but they are "
+        "missing from some requirements. Here is a list of those "
+        "requirements along with the hashes their downloaded archives "
+        "actually had. Add lines like these to your requirements files to "
+        "prevent tampering. (If you did not enable --require-hashes "
+        "manually, note that it turns on automatically when any package "
+        "has a hash.)"
+    )
+
+    def __init__(self, gotten_hash: str) -> None:
+        """
+        :param gotten_hash: The hash of the (possibly malicious) archive we
+            just downloaded
+        """
+        self.gotten_hash = gotten_hash
+
+    def body(self) -> str:
+        # Dodge circular import.
+        from pip._internal.utils.hashes import FAVORITE_HASH
+
+        package = None
+        if self.req:
+            # In the case of URL-based requirements, display the original URL
+            # seen in the requirements file rather than the package name,
+            # so the output can be directly copied into the requirements file.
+            package = (
+                self.req.original_link
+                if self.req.is_direct
+                # In case someone feeds something downright stupid
+                # to InstallRequirement's constructor.
+                else getattr(self.req, "req", None)
+            )
+        return "    {} --hash={}:{}".format(
+            package or "unknown package", FAVORITE_HASH, self.gotten_hash
+        )
+
+
+class HashUnpinned(HashError):
+    """A requirement had a hash specified but was not pinned to a specific
+    version."""
+
+    order = 3
+    head = (
+        "In --require-hashes mode, all requirements must have their "
+        "versions pinned with ==. These do not:"
+    )
+
+
+class HashMismatch(HashError):
+    """
+    Distribution file hash values don't match.
+
+    :ivar package_name: The name of the package that triggered the hash
+        mismatch. Feel free to write to this after the exception is raise to
+        improve its error message.
+
+    """
+
+    order = 4
+    head = (
+        "THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS "
+        "FILE. If you have updated the package versions, please update "
+        "the hashes. Otherwise, examine the package contents carefully; "
+        "someone may have tampered with them."
+    )
+
+    def __init__(self, allowed: Dict[str, List[str]], gots: Dict[str, "_Hash"]) -> None:
+        """
+        :param allowed: A dict of algorithm names pointing to lists of allowed
+            hex digests
+        :param gots: A dict of algorithm names pointing to hashes we
+            actually got from the files under suspicion
+        """
+        self.allowed = allowed
+        self.gots = gots
+
+    def body(self) -> str:
+        return f"    {self._requirement_name()}:\n{self._hash_comparison()}"
+
+    def _hash_comparison(self) -> str:
+        """
+        Return a comparison of actual and expected hash values.
+
+        Example::
+
+               Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
+                            or 123451234512345123451234512345123451234512345
+                    Got        bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
+
+        """
+
+        def hash_then_or(hash_name: str) -> "chain[str]":
+            # For now, all the decent hashes have 6-char names, so we can get
+            # away with hard-coding space literals.
+            return chain([hash_name], repeat("    or"))
+
+        lines: List[str] = []
+        for hash_name, expecteds in self.allowed.items():
+            prefix = hash_then_or(hash_name)
+            lines.extend((f"        Expected {next(prefix)} {e}") for e in expecteds)
+            lines.append(
+                f"             Got        {self.gots[hash_name].hexdigest()}\n"
+            )
+        return "\n".join(lines)
+
+
+class UnsupportedPythonVersion(InstallationError):
+    """Unsupported python version according to Requires-Python package
+    metadata."""
+
+
+class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
+    """When there are errors while loading a configuration file"""
+
+    def __init__(
+        self,
+        reason: str = "could not be loaded",
+        fname: Optional[str] = None,
+        error: Optional[configparser.Error] = None,
+    ) -> None:
+        super().__init__(error)
+        self.reason = reason
+        self.fname = fname
+        self.error = error
+
+    def __str__(self) -> str:
+        if self.fname is not None:
+            message_part = f" in {self.fname}."
+        else:
+            assert self.error is not None
+            message_part = f".\n{self.error}\n"
+        return f"Configuration file {self.reason}{message_part}"
+
+
+_DEFAULT_EXTERNALLY_MANAGED_ERROR = f"""\
+The Python environment under {sys.prefix} is managed externally, and may not be
+manipulated by the user. Please use specific tooling from the distributor of
+the Python installation to interact with this environment instead.
+"""
+
+
+class ExternallyManagedEnvironment(DiagnosticPipError):
+    """The current environment is externally managed.
+
+    This is raised when the current environment is externally managed, as
+    defined by `PEP 668`_. The ``EXTERNALLY-MANAGED`` configuration is checked
+    and displayed when the error is bubbled up to the user.
+
+    :param error: The error message read from ``EXTERNALLY-MANAGED``.
+    """
+
+    reference = "externally-managed-environment"
+
+    def __init__(self, error: Optional[str]) -> None:
+        if error is None:
+            context = Text(_DEFAULT_EXTERNALLY_MANAGED_ERROR)
+        else:
+            context = Text(error)
+        super().__init__(
+            message="This environment is externally managed",
+            context=context,
+            note_stmt=(
+                "If you believe this is a mistake, please contact your "
+                "Python installation or OS distribution provider. "
+                "You can override this, at the risk of breaking your Python "
+                "installation or OS, by passing --break-system-packages."
+            ),
+            hint_stmt=Text("See PEP 668 for the detailed specification."),
+        )
+
+    @staticmethod
+    def _iter_externally_managed_error_keys() -> Iterator[str]:
+        # LC_MESSAGES is in POSIX, but not the C standard. The most common
+        # platform that does not implement this category is Windows, where
+        # using other categories for console message localization is equally
+        # unreliable, so we fall back to the locale-less vendor message. This
+        # can always be re-evaluated when a vendor proposes a new alternative.
+        try:
+            category = locale.LC_MESSAGES
+        except AttributeError:
+            lang: Optional[str] = None
+        else:
+            lang, _ = locale.getlocale(category)
+        if lang is not None:
+            yield f"Error-{lang}"
+            for sep in ("-", "_"):
+                before, found, _ = lang.partition(sep)
+                if not found:
+                    continue
+                yield f"Error-{before}"
+        yield "Error"
+
+    @classmethod
+    def from_config(
+        cls,
+        config: Union[pathlib.Path, str],
+    ) -> "ExternallyManagedEnvironment":
+        parser = configparser.ConfigParser(interpolation=None)
+        try:
+            parser.read(config, encoding="utf-8")
+            section = parser["externally-managed"]
+            for key in cls._iter_externally_managed_error_keys():
+                with contextlib.suppress(KeyError):
+                    return cls(section[key])
+        except KeyError:
+            pass
+        except (OSError, UnicodeDecodeError, configparser.ParsingError):
+            from pip._internal.utils._log import VERBOSE
+
+            exc_info = logger.isEnabledFor(VERBOSE)
+            logger.warning("Failed to read %s", config, exc_info=exc_info)
+        return cls(None)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/index/__init__.py b/.venv/lib/python3.12/site-packages/pip/_internal/index/__init__.py
new file mode 100644
index 0000000..7a17b7b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/index/__init__.py
@@ -0,0 +1,2 @@
+"""Index interaction code
+"""
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/index/collector.py b/.venv/lib/python3.12/site-packages/pip/_internal/index/collector.py
new file mode 100644
index 0000000..08c8bdd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/index/collector.py
@@ -0,0 +1,507 @@
+"""
+The main purpose of this module is to expose LinkCollector.collect_sources().
+"""
+
+import collections
+import email.message
+import functools
+import itertools
+import json
+import logging
+import os
+import urllib.parse
+import urllib.request
+from html.parser import HTMLParser
+from optparse import Values
+from typing import (
+    TYPE_CHECKING,
+    Callable,
+    Dict,
+    Iterable,
+    List,
+    MutableMapping,
+    NamedTuple,
+    Optional,
+    Sequence,
+    Tuple,
+    Union,
+)
+
+from pip._vendor import requests
+from pip._vendor.requests import Response
+from pip._vendor.requests.exceptions import RetryError, SSLError
+
+from pip._internal.exceptions import NetworkConnectionError
+from pip._internal.models.link import Link
+from pip._internal.models.search_scope import SearchScope
+from pip._internal.network.session import PipSession
+from pip._internal.network.utils import raise_for_status
+from pip._internal.utils.filetypes import is_archive_file
+from pip._internal.utils.misc import redact_auth_from_url
+from pip._internal.vcs import vcs
+
+from .sources import CandidatesFromPage, LinkSource, build_source
+
+if TYPE_CHECKING:
+    from typing import Protocol
+else:
+    Protocol = object
+
+logger = logging.getLogger(__name__)
+
+ResponseHeaders = MutableMapping[str, str]
+
+
+def _match_vcs_scheme(url: str) -> Optional[str]:
+    """Look for VCS schemes in the URL.
+
+    Returns the matched VCS scheme, or None if there's no match.
+    """
+    for scheme in vcs.schemes:
+        if url.lower().startswith(scheme) and url[len(scheme)] in "+:":
+            return scheme
+    return None
+
+
+class _NotAPIContent(Exception):
+    def __init__(self, content_type: str, request_desc: str) -> None:
+        super().__init__(content_type, request_desc)
+        self.content_type = content_type
+        self.request_desc = request_desc
+
+
+def _ensure_api_header(response: Response) -> None:
+    """
+    Check the Content-Type header to ensure the response contains a Simple
+    API Response.
+
+    Raises `_NotAPIContent` if the content type is not a valid content-type.
+    """
+    content_type = response.headers.get("Content-Type", "Unknown")
+
+    content_type_l = content_type.lower()
+    if content_type_l.startswith(
+        (
+            "text/html",
+            "application/vnd.pypi.simple.v1+html",
+            "application/vnd.pypi.simple.v1+json",
+        )
+    ):
+        return
+
+    raise _NotAPIContent(content_type, response.request.method)
+
+
+class _NotHTTP(Exception):
+    pass
+
+
+def _ensure_api_response(url: str, session: PipSession) -> None:
+    """
+    Send a HEAD request to the URL, and ensure the response contains a simple
+    API Response.
+
+    Raises `_NotHTTP` if the URL is not available for a HEAD request, or
+    `_NotAPIContent` if the content type is not a valid content type.
+    """
+    scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url)
+    if scheme not in {"http", "https"}:
+        raise _NotHTTP()
+
+    resp = session.head(url, allow_redirects=True)
+    raise_for_status(resp)
+
+    _ensure_api_header(resp)
+
+
+def _get_simple_response(url: str, session: PipSession) -> Response:
+    """Access an Simple API response with GET, and return the response.
+
+    This consists of three parts:
+
+    1. If the URL looks suspiciously like an archive, send a HEAD first to
+       check the Content-Type is HTML or Simple API, to avoid downloading a
+       large file. Raise `_NotHTTP` if the content type cannot be determined, or
+       `_NotAPIContent` if it is not HTML or a Simple API.
+    2. Actually perform the request. Raise HTTP exceptions on network failures.
+    3. Check the Content-Type header to make sure we got a Simple API response,
+       and raise `_NotAPIContent` otherwise.
+    """
+    if is_archive_file(Link(url).filename):
+        _ensure_api_response(url, session=session)
+
+    logger.debug("Getting page %s", redact_auth_from_url(url))
+
+    resp = session.get(
+        url,
+        headers={
+            "Accept": ", ".join(
+                [
+                    "application/vnd.pypi.simple.v1+json",
+                    "application/vnd.pypi.simple.v1+html; q=0.1",
+                    "text/html; q=0.01",
+                ]
+            ),
+            # We don't want to blindly returned cached data for
+            # /simple/, because authors generally expecting that
+            # twine upload && pip install will function, but if
+            # they've done a pip install in the last ~10 minutes
+            # it won't. Thus by setting this to zero we will not
+            # blindly use any cached data, however the benefit of
+            # using max-age=0 instead of no-cache, is that we will
+            # still support conditional requests, so we will still
+            # minimize traffic sent in cases where the page hasn't
+            # changed at all, we will just always incur the round
+            # trip for the conditional GET now instead of only
+            # once per 10 minutes.
+            # For more information, please see pypa/pip#5670.
+            "Cache-Control": "max-age=0",
+        },
+    )
+    raise_for_status(resp)
+
+    # The check for archives above only works if the url ends with
+    # something that looks like an archive. However that is not a
+    # requirement of an url. Unless we issue a HEAD request on every
+    # url we cannot know ahead of time for sure if something is a
+    # Simple API response or not. However we can check after we've
+    # downloaded it.
+    _ensure_api_header(resp)
+
+    logger.debug(
+        "Fetched page %s as %s",
+        redact_auth_from_url(url),
+        resp.headers.get("Content-Type", "Unknown"),
+    )
+
+    return resp
+
+
+def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]:
+    """Determine if we have any encoding information in our headers."""
+    if headers and "Content-Type" in headers:
+        m = email.message.Message()
+        m["content-type"] = headers["Content-Type"]
+        charset = m.get_param("charset")
+        if charset:
+            return str(charset)
+    return None
+
+
+class CacheablePageContent:
+    def __init__(self, page: "IndexContent") -> None:
+        assert page.cache_link_parsing
+        self.page = page
+
+    def __eq__(self, other: object) -> bool:
+        return isinstance(other, type(self)) and self.page.url == other.page.url
+
+    def __hash__(self) -> int:
+        return hash(self.page.url)
+
+
+class ParseLinks(Protocol):
+    def __call__(self, page: "IndexContent") -> Iterable[Link]:
+        ...
+
+
+def with_cached_index_content(fn: ParseLinks) -> ParseLinks:
+    """
+    Given a function that parses an Iterable[Link] from an IndexContent, cache the
+    function's result (keyed by CacheablePageContent), unless the IndexContent
+    `page` has `page.cache_link_parsing == False`.
+    """
+
+    @functools.lru_cache(maxsize=None)
+    def wrapper(cacheable_page: CacheablePageContent) -> List[Link]:
+        return list(fn(cacheable_page.page))
+
+    @functools.wraps(fn)
+    def wrapper_wrapper(page: "IndexContent") -> List[Link]:
+        if page.cache_link_parsing:
+            return wrapper(CacheablePageContent(page))
+        return list(fn(page))
+
+    return wrapper_wrapper
+
+
+@with_cached_index_content
+def parse_links(page: "IndexContent") -> Iterable[Link]:
+    """
+    Parse a Simple API's Index Content, and yield its anchor elements as Link objects.
+    """
+
+    content_type_l = page.content_type.lower()
+    if content_type_l.startswith("application/vnd.pypi.simple.v1+json"):
+        data = json.loads(page.content)
+        for file in data.get("files", []):
+            link = Link.from_json(file, page.url)
+            if link is None:
+                continue
+            yield link
+        return
+
+    parser = HTMLLinkParser(page.url)
+    encoding = page.encoding or "utf-8"
+    parser.feed(page.content.decode(encoding))
+
+    url = page.url
+    base_url = parser.base_url or url
+    for anchor in parser.anchors:
+        link = Link.from_element(anchor, page_url=url, base_url=base_url)
+        if link is None:
+            continue
+        yield link
+
+
+class IndexContent:
+    """Represents one response (or page), along with its URL"""
+
+    def __init__(
+        self,
+        content: bytes,
+        content_type: str,
+        encoding: Optional[str],
+        url: str,
+        cache_link_parsing: bool = True,
+    ) -> None:
+        """
+        :param encoding: the encoding to decode the given content.
+        :param url: the URL from which the HTML was downloaded.
+        :param cache_link_parsing: whether links parsed from this page's url
+                                   should be cached. PyPI index urls should
+                                   have this set to False, for example.
+        """
+        self.content = content
+        self.content_type = content_type
+        self.encoding = encoding
+        self.url = url
+        self.cache_link_parsing = cache_link_parsing
+
+    def __str__(self) -> str:
+        return redact_auth_from_url(self.url)
+
+
+class HTMLLinkParser(HTMLParser):
+    """
+    HTMLParser that keeps the first base HREF and a list of all anchor
+    elements' attributes.
+    """
+
+    def __init__(self, url: str) -> None:
+        super().__init__(convert_charrefs=True)
+
+        self.url: str = url
+        self.base_url: Optional[str] = None
+        self.anchors: List[Dict[str, Optional[str]]] = []
+
+    def handle_starttag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None:
+        if tag == "base" and self.base_url is None:
+            href = self.get_href(attrs)
+            if href is not None:
+                self.base_url = href
+        elif tag == "a":
+            self.anchors.append(dict(attrs))
+
+    def get_href(self, attrs: List[Tuple[str, Optional[str]]]) -> Optional[str]:
+        for name, value in attrs:
+            if name == "href":
+                return value
+        return None
+
+
+def _handle_get_simple_fail(
+    link: Link,
+    reason: Union[str, Exception],
+    meth: Optional[Callable[..., None]] = None,
+) -> None:
+    if meth is None:
+        meth = logger.debug
+    meth("Could not fetch URL %s: %s - skipping", link, reason)
+
+
+def _make_index_content(
+    response: Response, cache_link_parsing: bool = True
+) -> IndexContent:
+    encoding = _get_encoding_from_headers(response.headers)
+    return IndexContent(
+        response.content,
+        response.headers["Content-Type"],
+        encoding=encoding,
+        url=response.url,
+        cache_link_parsing=cache_link_parsing,
+    )
+
+
+def _get_index_content(link: Link, *, session: PipSession) -> Optional["IndexContent"]:
+    url = link.url.split("#", 1)[0]
+
+    # Check for VCS schemes that do not support lookup as web pages.
+    vcs_scheme = _match_vcs_scheme(url)
+    if vcs_scheme:
+        logger.warning(
+            "Cannot look at %s URL %s because it does not support lookup as web pages.",
+            vcs_scheme,
+            link,
+        )
+        return None
+
+    # Tack index.html onto file:// URLs that point to directories
+    scheme, _, path, _, _, _ = urllib.parse.urlparse(url)
+    if scheme == "file" and os.path.isdir(urllib.request.url2pathname(path)):
+        # add trailing slash if not present so urljoin doesn't trim
+        # final segment
+        if not url.endswith("/"):
+            url += "/"
+        # TODO: In the future, it would be nice if pip supported PEP 691
+        #       style responses in the file:// URLs, however there's no
+        #       standard file extension for application/vnd.pypi.simple.v1+json
+        #       so we'll need to come up with something on our own.
+        url = urllib.parse.urljoin(url, "index.html")
+        logger.debug(" file: URL is directory, getting %s", url)
+
+    try:
+        resp = _get_simple_response(url, session=session)
+    except _NotHTTP:
+        logger.warning(
+            "Skipping page %s because it looks like an archive, and cannot "
+            "be checked by a HTTP HEAD request.",
+            link,
+        )
+    except _NotAPIContent as exc:
+        logger.warning(
+            "Skipping page %s because the %s request got Content-Type: %s. "
+            "The only supported Content-Types are application/vnd.pypi.simple.v1+json, "
+            "application/vnd.pypi.simple.v1+html, and text/html",
+            link,
+            exc.request_desc,
+            exc.content_type,
+        )
+    except NetworkConnectionError as exc:
+        _handle_get_simple_fail(link, exc)
+    except RetryError as exc:
+        _handle_get_simple_fail(link, exc)
+    except SSLError as exc:
+        reason = "There was a problem confirming the ssl certificate: "
+        reason += str(exc)
+        _handle_get_simple_fail(link, reason, meth=logger.info)
+    except requests.ConnectionError as exc:
+        _handle_get_simple_fail(link, f"connection error: {exc}")
+    except requests.Timeout:
+        _handle_get_simple_fail(link, "timed out")
+    else:
+        return _make_index_content(resp, cache_link_parsing=link.cache_link_parsing)
+    return None
+
+
+class CollectedSources(NamedTuple):
+    find_links: Sequence[Optional[LinkSource]]
+    index_urls: Sequence[Optional[LinkSource]]
+
+
+class LinkCollector:
+
+    """
+    Responsible for collecting Link objects from all configured locations,
+    making network requests as needed.
+
+    The class's main method is its collect_sources() method.
+    """
+
+    def __init__(
+        self,
+        session: PipSession,
+        search_scope: SearchScope,
+    ) -> None:
+        self.search_scope = search_scope
+        self.session = session
+
+    @classmethod
+    def create(
+        cls,
+        session: PipSession,
+        options: Values,
+        suppress_no_index: bool = False,
+    ) -> "LinkCollector":
+        """
+        :param session: The Session to use to make requests.
+        :param suppress_no_index: Whether to ignore the --no-index option
+            when constructing the SearchScope object.
+        """
+        index_urls = [options.index_url] + options.extra_index_urls
+        if options.no_index and not suppress_no_index:
+            logger.debug(
+                "Ignoring indexes: %s",
+                ",".join(redact_auth_from_url(url) for url in index_urls),
+            )
+            index_urls = []
+
+        # Make sure find_links is a list before passing to create().
+        find_links = options.find_links or []
+
+        search_scope = SearchScope.create(
+            find_links=find_links,
+            index_urls=index_urls,
+            no_index=options.no_index,
+        )
+        link_collector = LinkCollector(
+            session=session,
+            search_scope=search_scope,
+        )
+        return link_collector
+
+    @property
+    def find_links(self) -> List[str]:
+        return self.search_scope.find_links
+
+    def fetch_response(self, location: Link) -> Optional[IndexContent]:
+        """
+        Fetch an HTML page containing package links.
+        """
+        return _get_index_content(location, session=self.session)
+
+    def collect_sources(
+        self,
+        project_name: str,
+        candidates_from_page: CandidatesFromPage,
+    ) -> CollectedSources:
+        # The OrderedDict calls deduplicate sources by URL.
+        index_url_sources = collections.OrderedDict(
+            build_source(
+                loc,
+                candidates_from_page=candidates_from_page,
+                page_validator=self.session.is_secure_origin,
+                expand_dir=False,
+                cache_link_parsing=False,
+                project_name=project_name,
+            )
+            for loc in self.search_scope.get_index_urls_locations(project_name)
+        ).values()
+        find_links_sources = collections.OrderedDict(
+            build_source(
+                loc,
+                candidates_from_page=candidates_from_page,
+                page_validator=self.session.is_secure_origin,
+                expand_dir=True,
+                cache_link_parsing=True,
+                project_name=project_name,
+            )
+            for loc in self.find_links
+        ).values()
+
+        if logger.isEnabledFor(logging.DEBUG):
+            lines = [
+                f"* {s.link}"
+                for s in itertools.chain(find_links_sources, index_url_sources)
+                if s is not None and s.link is not None
+            ]
+            lines = [
+                f"{len(lines)} location(s) to search "
+                f"for versions of {project_name}:"
+            ] + lines
+            logger.debug("\n".join(lines))
+
+        return CollectedSources(
+            find_links=list(find_links_sources),
+            index_urls=list(index_url_sources),
+        )
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/index/package_finder.py b/.venv/lib/python3.12/site-packages/pip/_internal/index/package_finder.py
new file mode 100644
index 0000000..ec9ebc3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/index/package_finder.py
@@ -0,0 +1,1027 @@
+"""Routines related to PyPI, indexes"""
+
+import enum
+import functools
+import itertools
+import logging
+import re
+from typing import TYPE_CHECKING, FrozenSet, Iterable, List, Optional, Set, Tuple, Union
+
+from pip._vendor.packaging import specifiers
+from pip._vendor.packaging.tags import Tag
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.packaging.version import _BaseVersion
+from pip._vendor.packaging.version import parse as parse_version
+
+from pip._internal.exceptions import (
+    BestVersionAlreadyInstalled,
+    DistributionNotFound,
+    InvalidWheelFilename,
+    UnsupportedWheel,
+)
+from pip._internal.index.collector import LinkCollector, parse_links
+from pip._internal.models.candidate import InstallationCandidate
+from pip._internal.models.format_control import FormatControl
+from pip._internal.models.link import Link
+from pip._internal.models.search_scope import SearchScope
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.models.target_python import TargetPython
+from pip._internal.models.wheel import Wheel
+from pip._internal.req import InstallRequirement
+from pip._internal.utils._log import getLogger
+from pip._internal.utils.filetypes import WHEEL_EXTENSION
+from pip._internal.utils.hashes import Hashes
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import build_netloc
+from pip._internal.utils.packaging import check_requires_python
+from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS
+
+if TYPE_CHECKING:
+    from pip._vendor.typing_extensions import TypeGuard
+
+__all__ = ["FormatControl", "BestCandidateResult", "PackageFinder"]
+
+
+logger = getLogger(__name__)
+
+BuildTag = Union[Tuple[()], Tuple[int, str]]
+CandidateSortingKey = Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag]
+
+
+def _check_link_requires_python(
+    link: Link,
+    version_info: Tuple[int, int, int],
+    ignore_requires_python: bool = False,
+) -> bool:
+    """
+    Return whether the given Python version is compatible with a link's
+    "Requires-Python" value.
+
+    :param version_info: A 3-tuple of ints representing the Python
+        major-minor-micro version to check.
+    :param ignore_requires_python: Whether to ignore the "Requires-Python"
+        value if the given Python version isn't compatible.
+    """
+    try:
+        is_compatible = check_requires_python(
+            link.requires_python,
+            version_info=version_info,
+        )
+    except specifiers.InvalidSpecifier:
+        logger.debug(
+            "Ignoring invalid Requires-Python (%r) for link: %s",
+            link.requires_python,
+            link,
+        )
+    else:
+        if not is_compatible:
+            version = ".".join(map(str, version_info))
+            if not ignore_requires_python:
+                logger.verbose(
+                    "Link requires a different Python (%s not in: %r): %s",
+                    version,
+                    link.requires_python,
+                    link,
+                )
+                return False
+
+            logger.debug(
+                "Ignoring failed Requires-Python check (%s not in: %r) for link: %s",
+                version,
+                link.requires_python,
+                link,
+            )
+
+    return True
+
+
+class LinkType(enum.Enum):
+    candidate = enum.auto()
+    different_project = enum.auto()
+    yanked = enum.auto()
+    format_unsupported = enum.auto()
+    format_invalid = enum.auto()
+    platform_mismatch = enum.auto()
+    requires_python_mismatch = enum.auto()
+
+
+class LinkEvaluator:
+
+    """
+    Responsible for evaluating links for a particular project.
+    """
+
+    _py_version_re = re.compile(r"-py([123]\.?[0-9]?)$")
+
+    # Don't include an allow_yanked default value to make sure each call
+    # site considers whether yanked releases are allowed. This also causes
+    # that decision to be made explicit in the calling code, which helps
+    # people when reading the code.
+    def __init__(
+        self,
+        project_name: str,
+        canonical_name: str,
+        formats: FrozenSet[str],
+        target_python: TargetPython,
+        allow_yanked: bool,
+        ignore_requires_python: Optional[bool] = None,
+    ) -> None:
+        """
+        :param project_name: The user supplied package name.
+        :param canonical_name: The canonical package name.
+        :param formats: The formats allowed for this package. Should be a set
+            with 'binary' or 'source' or both in it.
+        :param target_python: The target Python interpreter to use when
+            evaluating link compatibility. This is used, for example, to
+            check wheel compatibility, as well as when checking the Python
+            version, e.g. the Python version embedded in a link filename
+            (or egg fragment) and against an HTML link's optional PEP 503
+            "data-requires-python" attribute.
+        :param allow_yanked: Whether files marked as yanked (in the sense
+            of PEP 592) are permitted to be candidates for install.
+        :param ignore_requires_python: Whether to ignore incompatible
+            PEP 503 "data-requires-python" values in HTML links. Defaults
+            to False.
+        """
+        if ignore_requires_python is None:
+            ignore_requires_python = False
+
+        self._allow_yanked = allow_yanked
+        self._canonical_name = canonical_name
+        self._ignore_requires_python = ignore_requires_python
+        self._formats = formats
+        self._target_python = target_python
+
+        self.project_name = project_name
+
+    def evaluate_link(self, link: Link) -> Tuple[LinkType, str]:
+        """
+        Determine whether a link is a candidate for installation.
+
+        :return: A tuple (result, detail), where *result* is an enum
+            representing whether the evaluation found a candidate, or the reason
+            why one is not found. If a candidate is found, *detail* will be the
+            candidate's version string; if one is not found, it contains the
+            reason the link fails to qualify.
+        """
+        version = None
+        if link.is_yanked and not self._allow_yanked:
+            reason = link.yanked_reason or ""
+            return (LinkType.yanked, f"yanked for reason: {reason}")
+
+        if link.egg_fragment:
+            egg_info = link.egg_fragment
+            ext = link.ext
+        else:
+            egg_info, ext = link.splitext()
+            if not ext:
+                return (LinkType.format_unsupported, "not a file")
+            if ext not in SUPPORTED_EXTENSIONS:
+                return (
+                    LinkType.format_unsupported,
+                    f"unsupported archive format: {ext}",
+                )
+            if "binary" not in self._formats and ext == WHEEL_EXTENSION:
+                reason = f"No binaries permitted for {self.project_name}"
+                return (LinkType.format_unsupported, reason)
+            if "macosx10" in link.path and ext == ".zip":
+                return (LinkType.format_unsupported, "macosx10 one")
+            if ext == WHEEL_EXTENSION:
+                try:
+                    wheel = Wheel(link.filename)
+                except InvalidWheelFilename:
+                    return (
+                        LinkType.format_invalid,
+                        "invalid wheel filename",
+                    )
+                if canonicalize_name(wheel.name) != self._canonical_name:
+                    reason = f"wrong project name (not {self.project_name})"
+                    return (LinkType.different_project, reason)
+
+                supported_tags = self._target_python.get_unsorted_tags()
+                if not wheel.supported(supported_tags):
+                    # Include the wheel's tags in the reason string to
+                    # simplify troubleshooting compatibility issues.
+                    file_tags = ", ".join(wheel.get_formatted_file_tags())
+                    reason = (
+                        f"none of the wheel's tags ({file_tags}) are compatible "
+                        f"(run pip debug --verbose to show compatible tags)"
+                    )
+                    return (LinkType.platform_mismatch, reason)
+
+                version = wheel.version
+
+        # This should be up by the self.ok_binary check, but see issue 2700.
+        if "source" not in self._formats and ext != WHEEL_EXTENSION:
+            reason = f"No sources permitted for {self.project_name}"
+            return (LinkType.format_unsupported, reason)
+
+        if not version:
+            version = _extract_version_from_fragment(
+                egg_info,
+                self._canonical_name,
+            )
+        if not version:
+            reason = f"Missing project version for {self.project_name}"
+            return (LinkType.format_invalid, reason)
+
+        match = self._py_version_re.search(version)
+        if match:
+            version = version[: match.start()]
+            py_version = match.group(1)
+            if py_version != self._target_python.py_version:
+                return (
+                    LinkType.platform_mismatch,
+                    "Python version is incorrect",
+                )
+
+        supports_python = _check_link_requires_python(
+            link,
+            version_info=self._target_python.py_version_info,
+            ignore_requires_python=self._ignore_requires_python,
+        )
+        if not supports_python:
+            reason = f"{version} Requires-Python {link.requires_python}"
+            return (LinkType.requires_python_mismatch, reason)
+
+        logger.debug("Found link %s, version: %s", link, version)
+
+        return (LinkType.candidate, version)
+
+
+def filter_unallowed_hashes(
+    candidates: List[InstallationCandidate],
+    hashes: Optional[Hashes],
+    project_name: str,
+) -> List[InstallationCandidate]:
+    """
+    Filter out candidates whose hashes aren't allowed, and return a new
+    list of candidates.
+
+    If at least one candidate has an allowed hash, then all candidates with
+    either an allowed hash or no hash specified are returned.  Otherwise,
+    the given candidates are returned.
+
+    Including the candidates with no hash specified when there is a match
+    allows a warning to be logged if there is a more preferred candidate
+    with no hash specified.  Returning all candidates in the case of no
+    matches lets pip report the hash of the candidate that would otherwise
+    have been installed (e.g. permitting the user to more easily update
+    their requirements file with the desired hash).
+    """
+    if not hashes:
+        logger.debug(
+            "Given no hashes to check %s links for project %r: "
+            "discarding no candidates",
+            len(candidates),
+            project_name,
+        )
+        # Make sure we're not returning back the given value.
+        return list(candidates)
+
+    matches_or_no_digest = []
+    # Collect the non-matches for logging purposes.
+    non_matches = []
+    match_count = 0
+    for candidate in candidates:
+        link = candidate.link
+        if not link.has_hash:
+            pass
+        elif link.is_hash_allowed(hashes=hashes):
+            match_count += 1
+        else:
+            non_matches.append(candidate)
+            continue
+
+        matches_or_no_digest.append(candidate)
+
+    if match_count:
+        filtered = matches_or_no_digest
+    else:
+        # Make sure we're not returning back the given value.
+        filtered = list(candidates)
+
+    if len(filtered) == len(candidates):
+        discard_message = "discarding no candidates"
+    else:
+        discard_message = "discarding {} non-matches:\n  {}".format(
+            len(non_matches),
+            "\n  ".join(str(candidate.link) for candidate in non_matches),
+        )
+
+    logger.debug(
+        "Checked %s links for project %r against %s hashes "
+        "(%s matches, %s no digest): %s",
+        len(candidates),
+        project_name,
+        hashes.digest_count,
+        match_count,
+        len(matches_or_no_digest) - match_count,
+        discard_message,
+    )
+
+    return filtered
+
+
+class CandidatePreferences:
+
+    """
+    Encapsulates some of the preferences for filtering and sorting
+    InstallationCandidate objects.
+    """
+
+    def __init__(
+        self,
+        prefer_binary: bool = False,
+        allow_all_prereleases: bool = False,
+    ) -> None:
+        """
+        :param allow_all_prereleases: Whether to allow all pre-releases.
+        """
+        self.allow_all_prereleases = allow_all_prereleases
+        self.prefer_binary = prefer_binary
+
+
+class BestCandidateResult:
+    """A collection of candidates, returned by `PackageFinder.find_best_candidate`.
+
+    This class is only intended to be instantiated by CandidateEvaluator's
+    `compute_best_candidate()` method.
+    """
+
+    def __init__(
+        self,
+        candidates: List[InstallationCandidate],
+        applicable_candidates: List[InstallationCandidate],
+        best_candidate: Optional[InstallationCandidate],
+    ) -> None:
+        """
+        :param candidates: A sequence of all available candidates found.
+        :param applicable_candidates: The applicable candidates.
+        :param best_candidate: The most preferred candidate found, or None
+            if no applicable candidates were found.
+        """
+        assert set(applicable_candidates) <= set(candidates)
+
+        if best_candidate is None:
+            assert not applicable_candidates
+        else:
+            assert best_candidate in applicable_candidates
+
+        self._applicable_candidates = applicable_candidates
+        self._candidates = candidates
+
+        self.best_candidate = best_candidate
+
+    def iter_all(self) -> Iterable[InstallationCandidate]:
+        """Iterate through all candidates."""
+        return iter(self._candidates)
+
+    def iter_applicable(self) -> Iterable[InstallationCandidate]:
+        """Iterate through the applicable candidates."""
+        return iter(self._applicable_candidates)
+
+
+class CandidateEvaluator:
+
+    """
+    Responsible for filtering and sorting candidates for installation based
+    on what tags are valid.
+    """
+
+    @classmethod
+    def create(
+        cls,
+        project_name: str,
+        target_python: Optional[TargetPython] = None,
+        prefer_binary: bool = False,
+        allow_all_prereleases: bool = False,
+        specifier: Optional[specifiers.BaseSpecifier] = None,
+        hashes: Optional[Hashes] = None,
+    ) -> "CandidateEvaluator":
+        """Create a CandidateEvaluator object.
+
+        :param target_python: The target Python interpreter to use when
+            checking compatibility. If None (the default), a TargetPython
+            object will be constructed from the running Python.
+        :param specifier: An optional object implementing `filter`
+            (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
+            versions.
+        :param hashes: An optional collection of allowed hashes.
+        """
+        if target_python is None:
+            target_python = TargetPython()
+        if specifier is None:
+            specifier = specifiers.SpecifierSet()
+
+        supported_tags = target_python.get_sorted_tags()
+
+        return cls(
+            project_name=project_name,
+            supported_tags=supported_tags,
+            specifier=specifier,
+            prefer_binary=prefer_binary,
+            allow_all_prereleases=allow_all_prereleases,
+            hashes=hashes,
+        )
+
+    def __init__(
+        self,
+        project_name: str,
+        supported_tags: List[Tag],
+        specifier: specifiers.BaseSpecifier,
+        prefer_binary: bool = False,
+        allow_all_prereleases: bool = False,
+        hashes: Optional[Hashes] = None,
+    ) -> None:
+        """
+        :param supported_tags: The PEP 425 tags supported by the target
+            Python in order of preference (most preferred first).
+        """
+        self._allow_all_prereleases = allow_all_prereleases
+        self._hashes = hashes
+        self._prefer_binary = prefer_binary
+        self._project_name = project_name
+        self._specifier = specifier
+        self._supported_tags = supported_tags
+        # Since the index of the tag in the _supported_tags list is used
+        # as a priority, precompute a map from tag to index/priority to be
+        # used in wheel.find_most_preferred_tag.
+        self._wheel_tag_preferences = {
+            tag: idx for idx, tag in enumerate(supported_tags)
+        }
+
+    def get_applicable_candidates(
+        self,
+        candidates: List[InstallationCandidate],
+    ) -> List[InstallationCandidate]:
+        """
+        Return the applicable candidates from a list of candidates.
+        """
+        # Using None infers from the specifier instead.
+        allow_prereleases = self._allow_all_prereleases or None
+        specifier = self._specifier
+        versions = {
+            str(v)
+            for v in specifier.filter(
+                # We turn the version object into a str here because otherwise
+                # when we're debundled but setuptools isn't, Python will see
+                # packaging.version.Version and
+                # pkg_resources._vendor.packaging.version.Version as different
+                # types. This way we'll use a str as a common data interchange
+                # format. If we stop using the pkg_resources provided specifier
+                # and start using our own, we can drop the cast to str().
+                (str(c.version) for c in candidates),
+                prereleases=allow_prereleases,
+            )
+        }
+
+        # Again, converting version to str to deal with debundling.
+        applicable_candidates = [c for c in candidates if str(c.version) in versions]
+
+        filtered_applicable_candidates = filter_unallowed_hashes(
+            candidates=applicable_candidates,
+            hashes=self._hashes,
+            project_name=self._project_name,
+        )
+
+        return sorted(filtered_applicable_candidates, key=self._sort_key)
+
+    def _sort_key(self, candidate: InstallationCandidate) -> CandidateSortingKey:
+        """
+        Function to pass as the `key` argument to a call to sorted() to sort
+        InstallationCandidates by preference.
+
+        Returns a tuple such that tuples sorting as greater using Python's
+        default comparison operator are more preferred.
+
+        The preference is as follows:
+
+        First and foremost, candidates with allowed (matching) hashes are
+        always preferred over candidates without matching hashes. This is
+        because e.g. if the only candidate with an allowed hash is yanked,
+        we still want to use that candidate.
+
+        Second, excepting hash considerations, candidates that have been
+        yanked (in the sense of PEP 592) are always less preferred than
+        candidates that haven't been yanked. Then:
+
+        If not finding wheels, they are sorted by version only.
+        If finding wheels, then the sort order is by version, then:
+          1. existing installs
+          2. wheels ordered via Wheel.support_index_min(self._supported_tags)
+          3. source archives
+        If prefer_binary was set, then all wheels are sorted above sources.
+
+        Note: it was considered to embed this logic into the Link
+              comparison operators, but then different sdist links
+              with the same version, would have to be considered equal
+        """
+        valid_tags = self._supported_tags
+        support_num = len(valid_tags)
+        build_tag: BuildTag = ()
+        binary_preference = 0
+        link = candidate.link
+        if link.is_wheel:
+            # can raise InvalidWheelFilename
+            wheel = Wheel(link.filename)
+            try:
+                pri = -(
+                    wheel.find_most_preferred_tag(
+                        valid_tags, self._wheel_tag_preferences
+                    )
+                )
+            except ValueError:
+                raise UnsupportedWheel(
+                    f"{wheel.filename} is not a supported wheel for this platform. It "
+                    "can't be sorted."
+                )
+            if self._prefer_binary:
+                binary_preference = 1
+            if wheel.build_tag is not None:
+                match = re.match(r"^(\d+)(.*)$", wheel.build_tag)
+                assert match is not None, "guaranteed by filename validation"
+                build_tag_groups = match.groups()
+                build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
+        else:  # sdist
+            pri = -(support_num)
+        has_allowed_hash = int(link.is_hash_allowed(self._hashes))
+        yank_value = -1 * int(link.is_yanked)  # -1 for yanked.
+        return (
+            has_allowed_hash,
+            yank_value,
+            binary_preference,
+            candidate.version,
+            pri,
+            build_tag,
+        )
+
+    def sort_best_candidate(
+        self,
+        candidates: List[InstallationCandidate],
+    ) -> Optional[InstallationCandidate]:
+        """
+        Return the best candidate per the instance's sort order, or None if
+        no candidate is acceptable.
+        """
+        if not candidates:
+            return None
+        best_candidate = max(candidates, key=self._sort_key)
+        return best_candidate
+
+    def compute_best_candidate(
+        self,
+        candidates: List[InstallationCandidate],
+    ) -> BestCandidateResult:
+        """
+        Compute and return a `BestCandidateResult` instance.
+        """
+        applicable_candidates = self.get_applicable_candidates(candidates)
+
+        best_candidate = self.sort_best_candidate(applicable_candidates)
+
+        return BestCandidateResult(
+            candidates,
+            applicable_candidates=applicable_candidates,
+            best_candidate=best_candidate,
+        )
+
+
+class PackageFinder:
+    """This finds packages.
+
+    This is meant to match easy_install's technique for looking for
+    packages, by reading pages and looking for appropriate links.
+    """
+
+    def __init__(
+        self,
+        link_collector: LinkCollector,
+        target_python: TargetPython,
+        allow_yanked: bool,
+        format_control: Optional[FormatControl] = None,
+        candidate_prefs: Optional[CandidatePreferences] = None,
+        ignore_requires_python: Optional[bool] = None,
+    ) -> None:
+        """
+        This constructor is primarily meant to be used by the create() class
+        method and from tests.
+
+        :param format_control: A FormatControl object, used to control
+            the selection of source packages / binary packages when consulting
+            the index and links.
+        :param candidate_prefs: Options to use when creating a
+            CandidateEvaluator object.
+        """
+        if candidate_prefs is None:
+            candidate_prefs = CandidatePreferences()
+
+        format_control = format_control or FormatControl(set(), set())
+
+        self._allow_yanked = allow_yanked
+        self._candidate_prefs = candidate_prefs
+        self._ignore_requires_python = ignore_requires_python
+        self._link_collector = link_collector
+        self._target_python = target_python
+
+        self.format_control = format_control
+
+        # These are boring links that have already been logged somehow.
+        self._logged_links: Set[Tuple[Link, LinkType, str]] = set()
+
+    # Don't include an allow_yanked default value to make sure each call
+    # site considers whether yanked releases are allowed. This also causes
+    # that decision to be made explicit in the calling code, which helps
+    # people when reading the code.
+    @classmethod
+    def create(
+        cls,
+        link_collector: LinkCollector,
+        selection_prefs: SelectionPreferences,
+        target_python: Optional[TargetPython] = None,
+    ) -> "PackageFinder":
+        """Create a PackageFinder.
+
+        :param selection_prefs: The candidate selection preferences, as a
+            SelectionPreferences object.
+        :param target_python: The target Python interpreter to use when
+            checking compatibility. If None (the default), a TargetPython
+            object will be constructed from the running Python.
+        """
+        if target_python is None:
+            target_python = TargetPython()
+
+        candidate_prefs = CandidatePreferences(
+            prefer_binary=selection_prefs.prefer_binary,
+            allow_all_prereleases=selection_prefs.allow_all_prereleases,
+        )
+
+        return cls(
+            candidate_prefs=candidate_prefs,
+            link_collector=link_collector,
+            target_python=target_python,
+            allow_yanked=selection_prefs.allow_yanked,
+            format_control=selection_prefs.format_control,
+            ignore_requires_python=selection_prefs.ignore_requires_python,
+        )
+
+    @property
+    def target_python(self) -> TargetPython:
+        return self._target_python
+
+    @property
+    def search_scope(self) -> SearchScope:
+        return self._link_collector.search_scope
+
+    @search_scope.setter
+    def search_scope(self, search_scope: SearchScope) -> None:
+        self._link_collector.search_scope = search_scope
+
+    @property
+    def find_links(self) -> List[str]:
+        return self._link_collector.find_links
+
+    @property
+    def index_urls(self) -> List[str]:
+        return self.search_scope.index_urls
+
+    @property
+    def trusted_hosts(self) -> Iterable[str]:
+        for host_port in self._link_collector.session.pip_trusted_origins:
+            yield build_netloc(*host_port)
+
+    @property
+    def allow_all_prereleases(self) -> bool:
+        return self._candidate_prefs.allow_all_prereleases
+
+    def set_allow_all_prereleases(self) -> None:
+        self._candidate_prefs.allow_all_prereleases = True
+
+    @property
+    def prefer_binary(self) -> bool:
+        return self._candidate_prefs.prefer_binary
+
+    def set_prefer_binary(self) -> None:
+        self._candidate_prefs.prefer_binary = True
+
+    def requires_python_skipped_reasons(self) -> List[str]:
+        reasons = {
+            detail
+            for _, result, detail in self._logged_links
+            if result == LinkType.requires_python_mismatch
+        }
+        return sorted(reasons)
+
+    def make_link_evaluator(self, project_name: str) -> LinkEvaluator:
+        canonical_name = canonicalize_name(project_name)
+        formats = self.format_control.get_allowed_formats(canonical_name)
+
+        return LinkEvaluator(
+            project_name=project_name,
+            canonical_name=canonical_name,
+            formats=formats,
+            target_python=self._target_python,
+            allow_yanked=self._allow_yanked,
+            ignore_requires_python=self._ignore_requires_python,
+        )
+
+    def _sort_links(self, links: Iterable[Link]) -> List[Link]:
+        """
+        Returns elements of links in order, non-egg links first, egg links
+        second, while eliminating duplicates
+        """
+        eggs, no_eggs = [], []
+        seen: Set[Link] = set()
+        for link in links:
+            if link not in seen:
+                seen.add(link)
+                if link.egg_fragment:
+                    eggs.append(link)
+                else:
+                    no_eggs.append(link)
+        return no_eggs + eggs
+
+    def _log_skipped_link(self, link: Link, result: LinkType, detail: str) -> None:
+        entry = (link, result, detail)
+        if entry not in self._logged_links:
+            # Put the link at the end so the reason is more visible and because
+            # the link string is usually very long.
+            logger.debug("Skipping link: %s: %s", detail, link)
+            self._logged_links.add(entry)
+
+    def get_install_candidate(
+        self, link_evaluator: LinkEvaluator, link: Link
+    ) -> Optional[InstallationCandidate]:
+        """
+        If the link is a candidate for install, convert it to an
+        InstallationCandidate and return it. Otherwise, return None.
+        """
+        result, detail = link_evaluator.evaluate_link(link)
+        if result != LinkType.candidate:
+            self._log_skipped_link(link, result, detail)
+            return None
+
+        return InstallationCandidate(
+            name=link_evaluator.project_name,
+            link=link,
+            version=detail,
+        )
+
+    def evaluate_links(
+        self, link_evaluator: LinkEvaluator, links: Iterable[Link]
+    ) -> List[InstallationCandidate]:
+        """
+        Convert links that are candidates to InstallationCandidate objects.
+        """
+        candidates = []
+        for link in self._sort_links(links):
+            candidate = self.get_install_candidate(link_evaluator, link)
+            if candidate is not None:
+                candidates.append(candidate)
+
+        return candidates
+
+    def process_project_url(
+        self, project_url: Link, link_evaluator: LinkEvaluator
+    ) -> List[InstallationCandidate]:
+        logger.debug(
+            "Fetching project page and analyzing links: %s",
+            project_url,
+        )
+        index_response = self._link_collector.fetch_response(project_url)
+        if index_response is None:
+            return []
+
+        page_links = list(parse_links(index_response))
+
+        with indent_log():
+            package_links = self.evaluate_links(
+                link_evaluator,
+                links=page_links,
+            )
+
+        return package_links
+
+    @functools.lru_cache(maxsize=None)
+    def find_all_candidates(self, project_name: str) -> List[InstallationCandidate]:
+        """Find all available InstallationCandidate for project_name
+
+        This checks index_urls and find_links.
+        All versions found are returned as an InstallationCandidate list.
+
+        See LinkEvaluator.evaluate_link() for details on which files
+        are accepted.
+        """
+        link_evaluator = self.make_link_evaluator(project_name)
+
+        collected_sources = self._link_collector.collect_sources(
+            project_name=project_name,
+            candidates_from_page=functools.partial(
+                self.process_project_url,
+                link_evaluator=link_evaluator,
+            ),
+        )
+
+        page_candidates_it = itertools.chain.from_iterable(
+            source.page_candidates()
+            for sources in collected_sources
+            for source in sources
+            if source is not None
+        )
+        page_candidates = list(page_candidates_it)
+
+        file_links_it = itertools.chain.from_iterable(
+            source.file_links()
+            for sources in collected_sources
+            for source in sources
+            if source is not None
+        )
+        file_candidates = self.evaluate_links(
+            link_evaluator,
+            sorted(file_links_it, reverse=True),
+        )
+
+        if logger.isEnabledFor(logging.DEBUG) and file_candidates:
+            paths = []
+            for candidate in file_candidates:
+                assert candidate.link.url  # we need to have a URL
+                try:
+                    paths.append(candidate.link.file_path)
+                except Exception:
+                    paths.append(candidate.link.url)  # it's not a local file
+
+            logger.debug("Local files found: %s", ", ".join(paths))
+
+        # This is an intentional priority ordering
+        return file_candidates + page_candidates
+
+    def make_candidate_evaluator(
+        self,
+        project_name: str,
+        specifier: Optional[specifiers.BaseSpecifier] = None,
+        hashes: Optional[Hashes] = None,
+    ) -> CandidateEvaluator:
+        """Create a CandidateEvaluator object to use."""
+        candidate_prefs = self._candidate_prefs
+        return CandidateEvaluator.create(
+            project_name=project_name,
+            target_python=self._target_python,
+            prefer_binary=candidate_prefs.prefer_binary,
+            allow_all_prereleases=candidate_prefs.allow_all_prereleases,
+            specifier=specifier,
+            hashes=hashes,
+        )
+
+    @functools.lru_cache(maxsize=None)
+    def find_best_candidate(
+        self,
+        project_name: str,
+        specifier: Optional[specifiers.BaseSpecifier] = None,
+        hashes: Optional[Hashes] = None,
+    ) -> BestCandidateResult:
+        """Find matches for the given project and specifier.
+
+        :param specifier: An optional object implementing `filter`
+            (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
+            versions.
+
+        :return: A `BestCandidateResult` instance.
+        """
+        candidates = self.find_all_candidates(project_name)
+        candidate_evaluator = self.make_candidate_evaluator(
+            project_name=project_name,
+            specifier=specifier,
+            hashes=hashes,
+        )
+        return candidate_evaluator.compute_best_candidate(candidates)
+
+    def find_requirement(
+        self, req: InstallRequirement, upgrade: bool
+    ) -> Optional[InstallationCandidate]:
+        """Try to find a Link matching req
+
+        Expects req, an InstallRequirement and upgrade, a boolean
+        Returns a InstallationCandidate if found,
+        Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
+        """
+        hashes = req.hashes(trust_internet=False)
+        best_candidate_result = self.find_best_candidate(
+            req.name,
+            specifier=req.specifier,
+            hashes=hashes,
+        )
+        best_candidate = best_candidate_result.best_candidate
+
+        installed_version: Optional[_BaseVersion] = None
+        if req.satisfied_by is not None:
+            installed_version = req.satisfied_by.version
+
+        def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str:
+            # This repeated parse_version and str() conversion is needed to
+            # handle different vendoring sources from pip and pkg_resources.
+            # If we stop using the pkg_resources provided specifier and start
+            # using our own, we can drop the cast to str().
+            return (
+                ", ".join(
+                    sorted(
+                        {str(c.version) for c in cand_iter},
+                        key=parse_version,
+                    )
+                )
+                or "none"
+            )
+
+        if installed_version is None and best_candidate is None:
+            logger.critical(
+                "Could not find a version that satisfies the requirement %s "
+                "(from versions: %s)",
+                req,
+                _format_versions(best_candidate_result.iter_all()),
+            )
+
+            raise DistributionNotFound(f"No matching distribution found for {req}")
+
+        def _should_install_candidate(
+            candidate: Optional[InstallationCandidate],
+        ) -> "TypeGuard[InstallationCandidate]":
+            if installed_version is None:
+                return True
+            if best_candidate is None:
+                return False
+            return best_candidate.version > installed_version
+
+        if not upgrade and installed_version is not None:
+            if _should_install_candidate(best_candidate):
+                logger.debug(
+                    "Existing installed version (%s) satisfies requirement "
+                    "(most up-to-date version is %s)",
+                    installed_version,
+                    best_candidate.version,
+                )
+            else:
+                logger.debug(
+                    "Existing installed version (%s) is most up-to-date and "
+                    "satisfies requirement",
+                    installed_version,
+                )
+            return None
+
+        if _should_install_candidate(best_candidate):
+            logger.debug(
+                "Using version %s (newest of versions: %s)",
+                best_candidate.version,
+                _format_versions(best_candidate_result.iter_applicable()),
+            )
+            return best_candidate
+
+        # We have an existing version, and its the best version
+        logger.debug(
+            "Installed version (%s) is most up-to-date (past versions: %s)",
+            installed_version,
+            _format_versions(best_candidate_result.iter_applicable()),
+        )
+        raise BestVersionAlreadyInstalled
+
+
+def _find_name_version_sep(fragment: str, canonical_name: str) -> int:
+    """Find the separator's index based on the package's canonical name.
+
+    :param fragment: A + filename "fragment" (stem) or
+        egg fragment.
+    :param canonical_name: The package's canonical name.
+
+    This function is needed since the canonicalized name does not necessarily
+    have the same length as the egg info's name part. An example::
+
+    >>> fragment = 'foo__bar-1.0'
+    >>> canonical_name = 'foo-bar'
+    >>> _find_name_version_sep(fragment, canonical_name)
+    8
+    """
+    # Project name and version must be separated by one single dash. Find all
+    # occurrences of dashes; if the string in front of it matches the canonical
+    # name, this is the one separating the name and version parts.
+    for i, c in enumerate(fragment):
+        if c != "-":
+            continue
+        if canonicalize_name(fragment[:i]) == canonical_name:
+            return i
+    raise ValueError(f"{fragment} does not match {canonical_name}")
+
+
+def _extract_version_from_fragment(fragment: str, canonical_name: str) -> Optional[str]:
+    """Parse the version string from a + filename
+    "fragment" (stem) or egg fragment.
+
+    :param fragment: The string to parse. E.g. foo-2.1
+    :param canonical_name: The canonicalized name of the package this
+        belongs to.
+    """
+    try:
+        version_start = _find_name_version_sep(fragment, canonical_name) + 1
+    except ValueError:
+        return None
+    version = fragment[version_start:]
+    if not version:
+        return None
+    return version
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/index/sources.py b/.venv/lib/python3.12/site-packages/pip/_internal/index/sources.py
new file mode 100644
index 0000000..f4626d7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/index/sources.py
@@ -0,0 +1,285 @@
+import logging
+import mimetypes
+import os
+from collections import defaultdict
+from typing import Callable, Dict, Iterable, List, Optional, Tuple
+
+from pip._vendor.packaging.utils import (
+    InvalidSdistFilename,
+    InvalidVersion,
+    InvalidWheelFilename,
+    canonicalize_name,
+    parse_sdist_filename,
+    parse_wheel_filename,
+)
+
+from pip._internal.models.candidate import InstallationCandidate
+from pip._internal.models.link import Link
+from pip._internal.utils.urls import path_to_url, url_to_path
+from pip._internal.vcs import is_url
+
+logger = logging.getLogger(__name__)
+
+FoundCandidates = Iterable[InstallationCandidate]
+FoundLinks = Iterable[Link]
+CandidatesFromPage = Callable[[Link], Iterable[InstallationCandidate]]
+PageValidator = Callable[[Link], bool]
+
+
+class LinkSource:
+    @property
+    def link(self) -> Optional[Link]:
+        """Returns the underlying link, if there's one."""
+        raise NotImplementedError()
+
+    def page_candidates(self) -> FoundCandidates:
+        """Candidates found by parsing an archive listing HTML file."""
+        raise NotImplementedError()
+
+    def file_links(self) -> FoundLinks:
+        """Links found by specifying archives directly."""
+        raise NotImplementedError()
+
+
+def _is_html_file(file_url: str) -> bool:
+    return mimetypes.guess_type(file_url, strict=False)[0] == "text/html"
+
+
+class _FlatDirectoryToUrls:
+    """Scans directory and caches results"""
+
+    def __init__(self, path: str) -> None:
+        self._path = path
+        self._page_candidates: List[str] = []
+        self._project_name_to_urls: Dict[str, List[str]] = defaultdict(list)
+        self._scanned_directory = False
+
+    def _scan_directory(self) -> None:
+        """Scans directory once and populates both page_candidates
+        and project_name_to_urls at the same time
+        """
+        for entry in os.scandir(self._path):
+            url = path_to_url(entry.path)
+            if _is_html_file(url):
+                self._page_candidates.append(url)
+                continue
+
+            # File must have a valid wheel or sdist name,
+            # otherwise not worth considering as a package
+            try:
+                project_filename = parse_wheel_filename(entry.name)[0]
+            except (InvalidWheelFilename, InvalidVersion):
+                try:
+                    project_filename = parse_sdist_filename(entry.name)[0]
+                except (InvalidSdistFilename, InvalidVersion):
+                    continue
+
+            self._project_name_to_urls[project_filename].append(url)
+        self._scanned_directory = True
+
+    @property
+    def page_candidates(self) -> List[str]:
+        if not self._scanned_directory:
+            self._scan_directory()
+
+        return self._page_candidates
+
+    @property
+    def project_name_to_urls(self) -> Dict[str, List[str]]:
+        if not self._scanned_directory:
+            self._scan_directory()
+
+        return self._project_name_to_urls
+
+
+class _FlatDirectorySource(LinkSource):
+    """Link source specified by ``--find-links=``.
+
+    This looks the content of the directory, and returns:
+
+    * ``page_candidates``: Links listed on each HTML file in the directory.
+    * ``file_candidates``: Archives in the directory.
+    """
+
+    _paths_to_urls: Dict[str, _FlatDirectoryToUrls] = {}
+
+    def __init__(
+        self,
+        candidates_from_page: CandidatesFromPage,
+        path: str,
+        project_name: str,
+    ) -> None:
+        self._candidates_from_page = candidates_from_page
+        self._project_name = canonicalize_name(project_name)
+
+        # Get existing instance of _FlatDirectoryToUrls if it exists
+        if path in self._paths_to_urls:
+            self._path_to_urls = self._paths_to_urls[path]
+        else:
+            self._path_to_urls = _FlatDirectoryToUrls(path=path)
+            self._paths_to_urls[path] = self._path_to_urls
+
+    @property
+    def link(self) -> Optional[Link]:
+        return None
+
+    def page_candidates(self) -> FoundCandidates:
+        for url in self._path_to_urls.page_candidates:
+            yield from self._candidates_from_page(Link(url))
+
+    def file_links(self) -> FoundLinks:
+        for url in self._path_to_urls.project_name_to_urls[self._project_name]:
+            yield Link(url)
+
+
+class _LocalFileSource(LinkSource):
+    """``--find-links=`` or ``--[extra-]index-url=``.
+
+    If a URL is supplied, it must be a ``file:`` URL. If a path is supplied to
+    the option, it is converted to a URL first. This returns:
+
+    * ``page_candidates``: Links listed on an HTML file.
+    * ``file_candidates``: The non-HTML file.
+    """
+
+    def __init__(
+        self,
+        candidates_from_page: CandidatesFromPage,
+        link: Link,
+    ) -> None:
+        self._candidates_from_page = candidates_from_page
+        self._link = link
+
+    @property
+    def link(self) -> Optional[Link]:
+        return self._link
+
+    def page_candidates(self) -> FoundCandidates:
+        if not _is_html_file(self._link.url):
+            return
+        yield from self._candidates_from_page(self._link)
+
+    def file_links(self) -> FoundLinks:
+        if _is_html_file(self._link.url):
+            return
+        yield self._link
+
+
+class _RemoteFileSource(LinkSource):
+    """``--find-links=`` or ``--[extra-]index-url=``.
+
+    This returns:
+
+    * ``page_candidates``: Links listed on an HTML file.
+    * ``file_candidates``: The non-HTML file.
+    """
+
+    def __init__(
+        self,
+        candidates_from_page: CandidatesFromPage,
+        page_validator: PageValidator,
+        link: Link,
+    ) -> None:
+        self._candidates_from_page = candidates_from_page
+        self._page_validator = page_validator
+        self._link = link
+
+    @property
+    def link(self) -> Optional[Link]:
+        return self._link
+
+    def page_candidates(self) -> FoundCandidates:
+        if not self._page_validator(self._link):
+            return
+        yield from self._candidates_from_page(self._link)
+
+    def file_links(self) -> FoundLinks:
+        yield self._link
+
+
+class _IndexDirectorySource(LinkSource):
+    """``--[extra-]index-url=``.
+
+    This is treated like a remote URL; ``candidates_from_page`` contains logic
+    for this by appending ``index.html`` to the link.
+    """
+
+    def __init__(
+        self,
+        candidates_from_page: CandidatesFromPage,
+        link: Link,
+    ) -> None:
+        self._candidates_from_page = candidates_from_page
+        self._link = link
+
+    @property
+    def link(self) -> Optional[Link]:
+        return self._link
+
+    def page_candidates(self) -> FoundCandidates:
+        yield from self._candidates_from_page(self._link)
+
+    def file_links(self) -> FoundLinks:
+        return ()
+
+
+def build_source(
+    location: str,
+    *,
+    candidates_from_page: CandidatesFromPage,
+    page_validator: PageValidator,
+    expand_dir: bool,
+    cache_link_parsing: bool,
+    project_name: str,
+) -> Tuple[Optional[str], Optional[LinkSource]]:
+    path: Optional[str] = None
+    url: Optional[str] = None
+    if os.path.exists(location):  # Is a local path.
+        url = path_to_url(location)
+        path = location
+    elif location.startswith("file:"):  # A file: URL.
+        url = location
+        path = url_to_path(location)
+    elif is_url(location):
+        url = location
+
+    if url is None:
+        msg = (
+            "Location '%s' is ignored: "
+            "it is either a non-existing path or lacks a specific scheme."
+        )
+        logger.warning(msg, location)
+        return (None, None)
+
+    if path is None:
+        source: LinkSource = _RemoteFileSource(
+            candidates_from_page=candidates_from_page,
+            page_validator=page_validator,
+            link=Link(url, cache_link_parsing=cache_link_parsing),
+        )
+        return (url, source)
+
+    if os.path.isdir(path):
+        if expand_dir:
+            source = _FlatDirectorySource(
+                candidates_from_page=candidates_from_page,
+                path=path,
+                project_name=project_name,
+            )
+        else:
+            source = _IndexDirectorySource(
+                candidates_from_page=candidates_from_page,
+                link=Link(url, cache_link_parsing=cache_link_parsing),
+            )
+        return (url, source)
+    elif os.path.isfile(path):
+        source = _LocalFileSource(
+            candidates_from_page=candidates_from_page,
+            link=Link(url, cache_link_parsing=cache_link_parsing),
+        )
+        return (url, source)
+    logger.warning(
+        "Location '%s' is ignored: it is neither a file nor a directory.",
+        location,
+    )
+    return (url, None)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/locations/__init__.py b/.venv/lib/python3.12/site-packages/pip/_internal/locations/__init__.py
new file mode 100644
index 0000000..d54bc63
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/locations/__init__.py
@@ -0,0 +1,467 @@
+import functools
+import logging
+import os
+import pathlib
+import sys
+import sysconfig
+from typing import Any, Dict, Generator, Optional, Tuple
+
+from pip._internal.models.scheme import SCHEME_KEYS, Scheme
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.deprecation import deprecated
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+from . import _sysconfig
+from .base import (
+    USER_CACHE_DIR,
+    get_major_minor_version,
+    get_src_prefix,
+    is_osx_framework,
+    site_packages,
+    user_site,
+)
+
+__all__ = [
+    "USER_CACHE_DIR",
+    "get_bin_prefix",
+    "get_bin_user",
+    "get_major_minor_version",
+    "get_platlib",
+    "get_purelib",
+    "get_scheme",
+    "get_src_prefix",
+    "site_packages",
+    "user_site",
+]
+
+
+logger = logging.getLogger(__name__)
+
+
+_PLATLIBDIR: str = getattr(sys, "platlibdir", "lib")
+
+_USE_SYSCONFIG_DEFAULT = sys.version_info >= (3, 10)
+
+
+def _should_use_sysconfig() -> bool:
+    """This function determines the value of _USE_SYSCONFIG.
+
+    By default, pip uses sysconfig on Python 3.10+.
+    But Python distributors can override this decision by setting:
+        sysconfig._PIP_USE_SYSCONFIG = True / False
+    Rationale in https://github.com/pypa/pip/issues/10647
+
+    This is a function for testability, but should be constant during any one
+    run.
+    """
+    return bool(getattr(sysconfig, "_PIP_USE_SYSCONFIG", _USE_SYSCONFIG_DEFAULT))
+
+
+_USE_SYSCONFIG = _should_use_sysconfig()
+
+if not _USE_SYSCONFIG:
+    # Import distutils lazily to avoid deprecation warnings,
+    # but import it soon enough that it is in memory and available during
+    # a pip reinstall.
+    from . import _distutils
+
+# Be noisy about incompatibilities if this platforms "should" be using
+# sysconfig, but is explicitly opting out and using distutils instead.
+if _USE_SYSCONFIG_DEFAULT and not _USE_SYSCONFIG:
+    _MISMATCH_LEVEL = logging.WARNING
+else:
+    _MISMATCH_LEVEL = logging.DEBUG
+
+
+def _looks_like_bpo_44860() -> bool:
+    """The resolution to bpo-44860 will change this incorrect platlib.
+
+    See .
+    """
+    from distutils.command.install import INSTALL_SCHEMES
+
+    try:
+        unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"]
+    except KeyError:
+        return False
+    return unix_user_platlib == "$usersite"
+
+
+def _looks_like_red_hat_patched_platlib_purelib(scheme: Dict[str, str]) -> bool:
+    platlib = scheme["platlib"]
+    if "/$platlibdir/" in platlib:
+        platlib = platlib.replace("/$platlibdir/", f"/{_PLATLIBDIR}/")
+    if "/lib64/" not in platlib:
+        return False
+    unpatched = platlib.replace("/lib64/", "/lib/")
+    return unpatched.replace("$platbase/", "$base/") == scheme["purelib"]
+
+
+@functools.lru_cache(maxsize=None)
+def _looks_like_red_hat_lib() -> bool:
+    """Red Hat patches platlib in unix_prefix and unix_home, but not purelib.
+
+    This is the only way I can see to tell a Red Hat-patched Python.
+    """
+    from distutils.command.install import INSTALL_SCHEMES
+
+    return all(
+        k in INSTALL_SCHEMES
+        and _looks_like_red_hat_patched_platlib_purelib(INSTALL_SCHEMES[k])
+        for k in ("unix_prefix", "unix_home")
+    )
+
+
+@functools.lru_cache(maxsize=None)
+def _looks_like_debian_scheme() -> bool:
+    """Debian adds two additional schemes."""
+    from distutils.command.install import INSTALL_SCHEMES
+
+    return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES
+
+
+@functools.lru_cache(maxsize=None)
+def _looks_like_red_hat_scheme() -> bool:
+    """Red Hat patches ``sys.prefix`` and ``sys.exec_prefix``.
+
+    Red Hat's ``00251-change-user-install-location.patch`` changes the install
+    command's ``prefix`` and ``exec_prefix`` to append ``"/local"``. This is
+    (fortunately?) done quite unconditionally, so we create a default command
+    object without any configuration to detect this.
+    """
+    from distutils.command.install import install
+    from distutils.dist import Distribution
+
+    cmd: Any = install(Distribution())
+    cmd.finalize_options()
+    return (
+        cmd.exec_prefix == f"{os.path.normpath(sys.exec_prefix)}/local"
+        and cmd.prefix == f"{os.path.normpath(sys.prefix)}/local"
+    )
+
+
+@functools.lru_cache(maxsize=None)
+def _looks_like_slackware_scheme() -> bool:
+    """Slackware patches sysconfig but fails to patch distutils and site.
+
+    Slackware changes sysconfig's user scheme to use ``"lib64"`` for the lib
+    path, but does not do the same to the site module.
+    """
+    if user_site is None:  # User-site not available.
+        return False
+    try:
+        paths = sysconfig.get_paths(scheme="posix_user", expand=False)
+    except KeyError:  # User-site not available.
+        return False
+    return "/lib64/" in paths["purelib"] and "/lib64/" not in user_site
+
+
+@functools.lru_cache(maxsize=None)
+def _looks_like_msys2_mingw_scheme() -> bool:
+    """MSYS2 patches distutils and sysconfig to use a UNIX-like scheme.
+
+    However, MSYS2 incorrectly patches sysconfig ``nt`` scheme. The fix is
+    likely going to be included in their 3.10 release, so we ignore the warning.
+    See msys2/MINGW-packages#9319.
+
+    MSYS2 MINGW's patch uses lowercase ``"lib"`` instead of the usual uppercase,
+    and is missing the final ``"site-packages"``.
+    """
+    paths = sysconfig.get_paths("nt", expand=False)
+    return all(
+        "Lib" not in p and "lib" in p and not p.endswith("site-packages")
+        for p in (paths[key] for key in ("platlib", "purelib"))
+    )
+
+
+def _fix_abiflags(parts: Tuple[str]) -> Generator[str, None, None]:
+    ldversion = sysconfig.get_config_var("LDVERSION")
+    abiflags = getattr(sys, "abiflags", None)
+
+    # LDVERSION does not end with sys.abiflags. Just return the path unchanged.
+    if not ldversion or not abiflags or not ldversion.endswith(abiflags):
+        yield from parts
+        return
+
+    # Strip sys.abiflags from LDVERSION-based path components.
+    for part in parts:
+        if part.endswith(ldversion):
+            part = part[: (0 - len(abiflags))]
+        yield part
+
+
+@functools.lru_cache(maxsize=None)
+def _warn_mismatched(old: pathlib.Path, new: pathlib.Path, *, key: str) -> None:
+    issue_url = "https://github.com/pypa/pip/issues/10151"
+    message = (
+        "Value for %s does not match. Please report this to <%s>"
+        "\ndistutils: %s"
+        "\nsysconfig: %s"
+    )
+    logger.log(_MISMATCH_LEVEL, message, key, issue_url, old, new)
+
+
+def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool:
+    if old == new:
+        return False
+    _warn_mismatched(old, new, key=key)
+    return True
+
+
+@functools.lru_cache(maxsize=None)
+def _log_context(
+    *,
+    user: bool = False,
+    home: Optional[str] = None,
+    root: Optional[str] = None,
+    prefix: Optional[str] = None,
+) -> None:
+    parts = [
+        "Additional context:",
+        "user = %r",
+        "home = %r",
+        "root = %r",
+        "prefix = %r",
+    ]
+
+    logger.log(_MISMATCH_LEVEL, "\n".join(parts), user, home, root, prefix)
+
+
+def get_scheme(
+    dist_name: str,
+    user: bool = False,
+    home: Optional[str] = None,
+    root: Optional[str] = None,
+    isolated: bool = False,
+    prefix: Optional[str] = None,
+) -> Scheme:
+    new = _sysconfig.get_scheme(
+        dist_name,
+        user=user,
+        home=home,
+        root=root,
+        isolated=isolated,
+        prefix=prefix,
+    )
+    if _USE_SYSCONFIG:
+        return new
+
+    old = _distutils.get_scheme(
+        dist_name,
+        user=user,
+        home=home,
+        root=root,
+        isolated=isolated,
+        prefix=prefix,
+    )
+
+    warning_contexts = []
+    for k in SCHEME_KEYS:
+        old_v = pathlib.Path(getattr(old, k))
+        new_v = pathlib.Path(getattr(new, k))
+
+        if old_v == new_v:
+            continue
+
+        # distutils incorrectly put PyPy packages under ``site-packages/python``
+        # in the ``posix_home`` scheme, but PyPy devs said they expect the
+        # directory name to be ``pypy`` instead. So we treat this as a bug fix
+        # and not warn about it. See bpo-43307 and python/cpython#24628.
+        skip_pypy_special_case = (
+            sys.implementation.name == "pypy"
+            and home is not None
+            and k in ("platlib", "purelib")
+            and old_v.parent == new_v.parent
+            and old_v.name.startswith("python")
+            and new_v.name.startswith("pypy")
+        )
+        if skip_pypy_special_case:
+            continue
+
+        # sysconfig's ``osx_framework_user`` does not include ``pythonX.Y`` in
+        # the ``include`` value, but distutils's ``headers`` does. We'll let
+        # CPython decide whether this is a bug or feature. See bpo-43948.
+        skip_osx_framework_user_special_case = (
+            user
+            and is_osx_framework()
+            and k == "headers"
+            and old_v.parent.parent == new_v.parent
+            and old_v.parent.name.startswith("python")
+        )
+        if skip_osx_framework_user_special_case:
+            continue
+
+        # On Red Hat and derived Linux distributions, distutils is patched to
+        # use "lib64" instead of "lib" for platlib.
+        if k == "platlib" and _looks_like_red_hat_lib():
+            continue
+
+        # On Python 3.9+, sysconfig's posix_user scheme sets platlib against
+        # sys.platlibdir, but distutils's unix_user incorrectly coninutes
+        # using the same $usersite for both platlib and purelib. This creates a
+        # mismatch when sys.platlibdir is not "lib".
+        skip_bpo_44860 = (
+            user
+            and k == "platlib"
+            and not WINDOWS
+            and sys.version_info >= (3, 9)
+            and _PLATLIBDIR != "lib"
+            and _looks_like_bpo_44860()
+        )
+        if skip_bpo_44860:
+            continue
+
+        # Slackware incorrectly patches posix_user to use lib64 instead of lib,
+        # but not usersite to match the location.
+        skip_slackware_user_scheme = (
+            user
+            and k in ("platlib", "purelib")
+            and not WINDOWS
+            and _looks_like_slackware_scheme()
+        )
+        if skip_slackware_user_scheme:
+            continue
+
+        # Both Debian and Red Hat patch Python to place the system site under
+        # /usr/local instead of /usr. Debian also places lib in dist-packages
+        # instead of site-packages, but the /usr/local check should cover it.
+        skip_linux_system_special_case = (
+            not (user or home or prefix or running_under_virtualenv())
+            and old_v.parts[1:3] == ("usr", "local")
+            and len(new_v.parts) > 1
+            and new_v.parts[1] == "usr"
+            and (len(new_v.parts) < 3 or new_v.parts[2] != "local")
+            and (_looks_like_red_hat_scheme() or _looks_like_debian_scheme())
+        )
+        if skip_linux_system_special_case:
+            continue
+
+        # On Python 3.7 and earlier, sysconfig does not include sys.abiflags in
+        # the "pythonX.Y" part of the path, but distutils does.
+        skip_sysconfig_abiflag_bug = (
+            sys.version_info < (3, 8)
+            and not WINDOWS
+            and k in ("headers", "platlib", "purelib")
+            and tuple(_fix_abiflags(old_v.parts)) == new_v.parts
+        )
+        if skip_sysconfig_abiflag_bug:
+            continue
+
+        # MSYS2 MINGW's sysconfig patch does not include the "site-packages"
+        # part of the path. This is incorrect and will be fixed in MSYS.
+        skip_msys2_mingw_bug = (
+            WINDOWS and k in ("platlib", "purelib") and _looks_like_msys2_mingw_scheme()
+        )
+        if skip_msys2_mingw_bug:
+            continue
+
+        # CPython's POSIX install script invokes pip (via ensurepip) against the
+        # interpreter located in the source tree, not the install site. This
+        # triggers special logic in sysconfig that's not present in distutils.
+        # https://github.com/python/cpython/blob/8c21941ddaf/Lib/sysconfig.py#L178-L194
+        skip_cpython_build = (
+            sysconfig.is_python_build(check_home=True)
+            and not WINDOWS
+            and k in ("headers", "include", "platinclude")
+        )
+        if skip_cpython_build:
+            continue
+
+        warning_contexts.append((old_v, new_v, f"scheme.{k}"))
+
+    if not warning_contexts:
+        return old
+
+    # Check if this path mismatch is caused by distutils config files. Those
+    # files will no longer work once we switch to sysconfig, so this raises a
+    # deprecation message for them.
+    default_old = _distutils.distutils_scheme(
+        dist_name,
+        user,
+        home,
+        root,
+        isolated,
+        prefix,
+        ignore_config_files=True,
+    )
+    if any(default_old[k] != getattr(old, k) for k in SCHEME_KEYS):
+        deprecated(
+            reason=(
+                "Configuring installation scheme with distutils config files "
+                "is deprecated and will no longer work in the near future. If you "
+                "are using a Homebrew or Linuxbrew Python, please see discussion "
+                "at https://github.com/Homebrew/homebrew-core/issues/76621"
+            ),
+            replacement=None,
+            gone_in=None,
+        )
+        return old
+
+    # Post warnings about this mismatch so user can report them back.
+    for old_v, new_v, key in warning_contexts:
+        _warn_mismatched(old_v, new_v, key=key)
+    _log_context(user=user, home=home, root=root, prefix=prefix)
+
+    return old
+
+
+def get_bin_prefix() -> str:
+    new = _sysconfig.get_bin_prefix()
+    if _USE_SYSCONFIG:
+        return new
+
+    old = _distutils.get_bin_prefix()
+    if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"):
+        _log_context()
+    return old
+
+
+def get_bin_user() -> str:
+    return _sysconfig.get_scheme("", user=True).scripts
+
+
+def _looks_like_deb_system_dist_packages(value: str) -> bool:
+    """Check if the value is Debian's APT-controlled dist-packages.
+
+    Debian's ``distutils.sysconfig.get_python_lib()`` implementation returns the
+    default package path controlled by APT, but does not patch ``sysconfig`` to
+    do the same. This is similar to the bug worked around in ``get_scheme()``,
+    but here the default is ``deb_system`` instead of ``unix_local``. Ultimately
+    we can't do anything about this Debian bug, and this detection allows us to
+    skip the warning when needed.
+    """
+    if not _looks_like_debian_scheme():
+        return False
+    if value == "/usr/lib/python3/dist-packages":
+        return True
+    return False
+
+
+def get_purelib() -> str:
+    """Return the default pure-Python lib location."""
+    new = _sysconfig.get_purelib()
+    if _USE_SYSCONFIG:
+        return new
+
+    old = _distutils.get_purelib()
+    if _looks_like_deb_system_dist_packages(old):
+        return old
+    if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"):
+        _log_context()
+    return old
+
+
+def get_platlib() -> str:
+    """Return the default platform-shared lib location."""
+    new = _sysconfig.get_platlib()
+    if _USE_SYSCONFIG:
+        return new
+
+    from . import _distutils
+
+    old = _distutils.get_platlib()
+    if _looks_like_deb_system_dist_packages(old):
+        return old
+    if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"):
+        _log_context()
+    return old
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/locations/_distutils.py b/.venv/lib/python3.12/site-packages/pip/_internal/locations/_distutils.py
new file mode 100644
index 0000000..0e18c6e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/locations/_distutils.py
@@ -0,0 +1,172 @@
+"""Locations where we look for configs, install stuff, etc"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+# If pip's going to use distutils, it should not be using the copy that setuptools
+# might have injected into the environment. This is done by removing the injected
+# shim, if it's injected.
+#
+# See https://github.com/pypa/pip/issues/8761 for the original discussion and
+# rationale for why this is done within pip.
+try:
+    __import__("_distutils_hack").remove_shim()
+except (ImportError, AttributeError):
+    pass
+
+import logging
+import os
+import sys
+from distutils.cmd import Command as DistutilsCommand
+from distutils.command.install import SCHEME_KEYS
+from distutils.command.install import install as distutils_install_command
+from distutils.sysconfig import get_python_lib
+from typing import Dict, List, Optional, Union, cast
+
+from pip._internal.models.scheme import Scheme
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+from .base import get_major_minor_version
+
+logger = logging.getLogger(__name__)
+
+
+def distutils_scheme(
+    dist_name: str,
+    user: bool = False,
+    home: Optional[str] = None,
+    root: Optional[str] = None,
+    isolated: bool = False,
+    prefix: Optional[str] = None,
+    *,
+    ignore_config_files: bool = False,
+) -> Dict[str, str]:
+    """
+    Return a distutils install scheme
+    """
+    from distutils.dist import Distribution
+
+    dist_args: Dict[str, Union[str, List[str]]] = {"name": dist_name}
+    if isolated:
+        dist_args["script_args"] = ["--no-user-cfg"]
+
+    d = Distribution(dist_args)
+    if not ignore_config_files:
+        try:
+            d.parse_config_files()
+        except UnicodeDecodeError:
+            paths = d.find_config_files()
+            logger.warning(
+                "Ignore distutils configs in %s due to encoding errors.",
+                ", ".join(os.path.basename(p) for p in paths),
+            )
+    obj: Optional[DistutilsCommand] = None
+    obj = d.get_command_obj("install", create=True)
+    assert obj is not None
+    i = cast(distutils_install_command, obj)
+    # NOTE: setting user or home has the side-effect of creating the home dir
+    # or user base for installations during finalize_options()
+    # ideally, we'd prefer a scheme class that has no side-effects.
+    assert not (user and prefix), f"user={user} prefix={prefix}"
+    assert not (home and prefix), f"home={home} prefix={prefix}"
+    i.user = user or i.user
+    if user or home:
+        i.prefix = ""
+    i.prefix = prefix or i.prefix
+    i.home = home or i.home
+    i.root = root or i.root
+    i.finalize_options()
+
+    scheme = {}
+    for key in SCHEME_KEYS:
+        scheme[key] = getattr(i, "install_" + key)
+
+    # install_lib specified in setup.cfg should install *everything*
+    # into there (i.e. it takes precedence over both purelib and
+    # platlib).  Note, i.install_lib is *always* set after
+    # finalize_options(); we only want to override here if the user
+    # has explicitly requested it hence going back to the config
+    if "install_lib" in d.get_option_dict("install"):
+        scheme.update({"purelib": i.install_lib, "platlib": i.install_lib})
+
+    if running_under_virtualenv():
+        if home:
+            prefix = home
+        elif user:
+            prefix = i.install_userbase
+        else:
+            prefix = i.prefix
+        scheme["headers"] = os.path.join(
+            prefix,
+            "include",
+            "site",
+            f"python{get_major_minor_version()}",
+            dist_name,
+        )
+
+        if root is not None:
+            path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1]
+            scheme["headers"] = os.path.join(root, path_no_drive[1:])
+
+    return scheme
+
+
+def get_scheme(
+    dist_name: str,
+    user: bool = False,
+    home: Optional[str] = None,
+    root: Optional[str] = None,
+    isolated: bool = False,
+    prefix: Optional[str] = None,
+) -> Scheme:
+    """
+    Get the "scheme" corresponding to the input parameters. The distutils
+    documentation provides the context for the available schemes:
+    https://docs.python.org/3/install/index.html#alternate-installation
+
+    :param dist_name: the name of the package to retrieve the scheme for, used
+        in the headers scheme path
+    :param user: indicates to use the "user" scheme
+    :param home: indicates to use the "home" scheme and provides the base
+        directory for the same
+    :param root: root under which other directories are re-based
+    :param isolated: equivalent to --no-user-cfg, i.e. do not consider
+        ~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for
+        scheme paths
+    :param prefix: indicates to use the "prefix" scheme and provides the
+        base directory for the same
+    """
+    scheme = distutils_scheme(dist_name, user, home, root, isolated, prefix)
+    return Scheme(
+        platlib=scheme["platlib"],
+        purelib=scheme["purelib"],
+        headers=scheme["headers"],
+        scripts=scheme["scripts"],
+        data=scheme["data"],
+    )
+
+
+def get_bin_prefix() -> str:
+    # XXX: In old virtualenv versions, sys.prefix can contain '..' components,
+    # so we need to call normpath to eliminate them.
+    prefix = os.path.normpath(sys.prefix)
+    if WINDOWS:
+        bin_py = os.path.join(prefix, "Scripts")
+        # buildout uses 'bin' on Windows too?
+        if not os.path.exists(bin_py):
+            bin_py = os.path.join(prefix, "bin")
+        return bin_py
+    # Forcing to use /usr/local/bin for standard macOS framework installs
+    # Also log to ~/Library/Logs/ for use with the Console.app log viewer
+    if sys.platform[:6] == "darwin" and prefix[:16] == "/System/Library/":
+        return "/usr/local/bin"
+    return os.path.join(prefix, "bin")
+
+
+def get_purelib() -> str:
+    return get_python_lib(plat_specific=False)
+
+
+def get_platlib() -> str:
+    return get_python_lib(plat_specific=True)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/locations/_sysconfig.py b/.venv/lib/python3.12/site-packages/pip/_internal/locations/_sysconfig.py
new file mode 100644
index 0000000..97aef1f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/locations/_sysconfig.py
@@ -0,0 +1,213 @@
+import logging
+import os
+import sys
+import sysconfig
+import typing
+
+from pip._internal.exceptions import InvalidSchemeCombination, UserInstallationInvalid
+from pip._internal.models.scheme import SCHEME_KEYS, Scheme
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+from .base import change_root, get_major_minor_version, is_osx_framework
+
+logger = logging.getLogger(__name__)
+
+
+# Notes on _infer_* functions.
+# Unfortunately ``get_default_scheme()`` didn't exist before 3.10, so there's no
+# way to ask things like "what is the '_prefix' scheme on this platform". These
+# functions try to answer that with some heuristics while accounting for ad-hoc
+# platforms not covered by CPython's default sysconfig implementation. If the
+# ad-hoc implementation does not fully implement sysconfig, we'll fall back to
+# a POSIX scheme.
+
+_AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names())
+
+_PREFERRED_SCHEME_API = getattr(sysconfig, "get_preferred_scheme", None)
+
+
+def _should_use_osx_framework_prefix() -> bool:
+    """Check for Apple's ``osx_framework_library`` scheme.
+
+    Python distributed by Apple's Command Line Tools has this special scheme
+    that's used when:
+
+    * This is a framework build.
+    * We are installing into the system prefix.
+
+    This does not account for ``pip install --prefix`` (also means we're not
+    installing to the system prefix), which should use ``posix_prefix``, but
+    logic here means ``_infer_prefix()`` outputs ``osx_framework_library``. But
+    since ``prefix`` is not available for ``sysconfig.get_default_scheme()``,
+    which is the stdlib replacement for ``_infer_prefix()``, presumably Apple
+    wouldn't be able to magically switch between ``osx_framework_library`` and
+    ``posix_prefix``. ``_infer_prefix()`` returning ``osx_framework_library``
+    means its behavior is consistent whether we use the stdlib implementation
+    or our own, and we deal with this special case in ``get_scheme()`` instead.
+    """
+    return (
+        "osx_framework_library" in _AVAILABLE_SCHEMES
+        and not running_under_virtualenv()
+        and is_osx_framework()
+    )
+
+
+def _infer_prefix() -> str:
+    """Try to find a prefix scheme for the current platform.
+
+    This tries:
+
+    * A special ``osx_framework_library`` for Python distributed by Apple's
+      Command Line Tools, when not running in a virtual environment.
+    * Implementation + OS, used by PyPy on Windows (``pypy_nt``).
+    * Implementation without OS, used by PyPy on POSIX (``pypy``).
+    * OS + "prefix", used by CPython on POSIX (``posix_prefix``).
+    * Just the OS name, used by CPython on Windows (``nt``).
+
+    If none of the above works, fall back to ``posix_prefix``.
+    """
+    if _PREFERRED_SCHEME_API:
+        return _PREFERRED_SCHEME_API("prefix")
+    if _should_use_osx_framework_prefix():
+        return "osx_framework_library"
+    implementation_suffixed = f"{sys.implementation.name}_{os.name}"
+    if implementation_suffixed in _AVAILABLE_SCHEMES:
+        return implementation_suffixed
+    if sys.implementation.name in _AVAILABLE_SCHEMES:
+        return sys.implementation.name
+    suffixed = f"{os.name}_prefix"
+    if suffixed in _AVAILABLE_SCHEMES:
+        return suffixed
+    if os.name in _AVAILABLE_SCHEMES:  # On Windows, prefx is just called "nt".
+        return os.name
+    return "posix_prefix"
+
+
+def _infer_user() -> str:
+    """Try to find a user scheme for the current platform."""
+    if _PREFERRED_SCHEME_API:
+        return _PREFERRED_SCHEME_API("user")
+    if is_osx_framework() and not running_under_virtualenv():
+        suffixed = "osx_framework_user"
+    else:
+        suffixed = f"{os.name}_user"
+    if suffixed in _AVAILABLE_SCHEMES:
+        return suffixed
+    if "posix_user" not in _AVAILABLE_SCHEMES:  # User scheme unavailable.
+        raise UserInstallationInvalid()
+    return "posix_user"
+
+
+def _infer_home() -> str:
+    """Try to find a home for the current platform."""
+    if _PREFERRED_SCHEME_API:
+        return _PREFERRED_SCHEME_API("home")
+    suffixed = f"{os.name}_home"
+    if suffixed in _AVAILABLE_SCHEMES:
+        return suffixed
+    return "posix_home"
+
+
+# Update these keys if the user sets a custom home.
+_HOME_KEYS = [
+    "installed_base",
+    "base",
+    "installed_platbase",
+    "platbase",
+    "prefix",
+    "exec_prefix",
+]
+if sysconfig.get_config_var("userbase") is not None:
+    _HOME_KEYS.append("userbase")
+
+
+def get_scheme(
+    dist_name: str,
+    user: bool = False,
+    home: typing.Optional[str] = None,
+    root: typing.Optional[str] = None,
+    isolated: bool = False,
+    prefix: typing.Optional[str] = None,
+) -> Scheme:
+    """
+    Get the "scheme" corresponding to the input parameters.
+
+    :param dist_name: the name of the package to retrieve the scheme for, used
+        in the headers scheme path
+    :param user: indicates to use the "user" scheme
+    :param home: indicates to use the "home" scheme
+    :param root: root under which other directories are re-based
+    :param isolated: ignored, but kept for distutils compatibility (where
+        this controls whether the user-site pydistutils.cfg is honored)
+    :param prefix: indicates to use the "prefix" scheme and provides the
+        base directory for the same
+    """
+    if user and prefix:
+        raise InvalidSchemeCombination("--user", "--prefix")
+    if home and prefix:
+        raise InvalidSchemeCombination("--home", "--prefix")
+
+    if home is not None:
+        scheme_name = _infer_home()
+    elif user:
+        scheme_name = _infer_user()
+    else:
+        scheme_name = _infer_prefix()
+
+    # Special case: When installing into a custom prefix, use posix_prefix
+    # instead of osx_framework_library. See _should_use_osx_framework_prefix()
+    # docstring for details.
+    if prefix is not None and scheme_name == "osx_framework_library":
+        scheme_name = "posix_prefix"
+
+    if home is not None:
+        variables = {k: home for k in _HOME_KEYS}
+    elif prefix is not None:
+        variables = {k: prefix for k in _HOME_KEYS}
+    else:
+        variables = {}
+
+    paths = sysconfig.get_paths(scheme=scheme_name, vars=variables)
+
+    # Logic here is very arbitrary, we're doing it for compatibility, don't ask.
+    # 1. Pip historically uses a special header path in virtual environments.
+    # 2. If the distribution name is not known, distutils uses 'UNKNOWN'. We
+    #    only do the same when not running in a virtual environment because
+    #    pip's historical header path logic (see point 1) did not do this.
+    if running_under_virtualenv():
+        if user:
+            base = variables.get("userbase", sys.prefix)
+        else:
+            base = variables.get("base", sys.prefix)
+        python_xy = f"python{get_major_minor_version()}"
+        paths["include"] = os.path.join(base, "include", "site", python_xy)
+    elif not dist_name:
+        dist_name = "UNKNOWN"
+
+    scheme = Scheme(
+        platlib=paths["platlib"],
+        purelib=paths["purelib"],
+        headers=os.path.join(paths["include"], dist_name),
+        scripts=paths["scripts"],
+        data=paths["data"],
+    )
+    if root is not None:
+        for key in SCHEME_KEYS:
+            value = change_root(root, getattr(scheme, key))
+            setattr(scheme, key, value)
+    return scheme
+
+
+def get_bin_prefix() -> str:
+    # Forcing to use /usr/local/bin for standard macOS framework installs.
+    if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/":
+        return "/usr/local/bin"
+    return sysconfig.get_paths()["scripts"]
+
+
+def get_purelib() -> str:
+    return sysconfig.get_paths()["purelib"]
+
+
+def get_platlib() -> str:
+    return sysconfig.get_paths()["platlib"]
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/locations/base.py b/.venv/lib/python3.12/site-packages/pip/_internal/locations/base.py
new file mode 100644
index 0000000..3f9f896
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/locations/base.py
@@ -0,0 +1,81 @@
+import functools
+import os
+import site
+import sys
+import sysconfig
+import typing
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.utils import appdirs
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+# Application Directories
+USER_CACHE_DIR = appdirs.user_cache_dir("pip")
+
+# FIXME doesn't account for venv linked to global site-packages
+site_packages: str = sysconfig.get_path("purelib")
+
+
+def get_major_minor_version() -> str:
+    """
+    Return the major-minor version of the current Python as a string, e.g.
+    "3.7" or "3.10".
+    """
+    return "{}.{}".format(*sys.version_info)
+
+
+def change_root(new_root: str, pathname: str) -> str:
+    """Return 'pathname' with 'new_root' prepended.
+
+    If 'pathname' is relative, this is equivalent to os.path.join(new_root, pathname).
+    Otherwise, it requires making 'pathname' relative and then joining the
+    two, which is tricky on DOS/Windows and Mac OS.
+
+    This is borrowed from Python's standard library's distutils module.
+    """
+    if os.name == "posix":
+        if not os.path.isabs(pathname):
+            return os.path.join(new_root, pathname)
+        else:
+            return os.path.join(new_root, pathname[1:])
+
+    elif os.name == "nt":
+        (drive, path) = os.path.splitdrive(pathname)
+        if path[0] == "\\":
+            path = path[1:]
+        return os.path.join(new_root, path)
+
+    else:
+        raise InstallationError(
+            f"Unknown platform: {os.name}\n"
+            "Can not change root path prefix on unknown platform."
+        )
+
+
+def get_src_prefix() -> str:
+    if running_under_virtualenv():
+        src_prefix = os.path.join(sys.prefix, "src")
+    else:
+        # FIXME: keep src in cwd for now (it is not a temporary folder)
+        try:
+            src_prefix = os.path.join(os.getcwd(), "src")
+        except OSError:
+            # In case the current working directory has been renamed or deleted
+            sys.exit("The folder you are executing pip from can no longer be found.")
+
+    # under macOS + virtualenv sys.prefix is not properly resolved
+    # it is something like /path/to/python/bin/..
+    return os.path.abspath(src_prefix)
+
+
+try:
+    # Use getusersitepackages if this is present, as it ensures that the
+    # value is initialised properly.
+    user_site: typing.Optional[str] = site.getusersitepackages()
+except AttributeError:
+    user_site = site.USER_SITE
+
+
+@functools.lru_cache(maxsize=None)
+def is_osx_framework() -> bool:
+    return bool(sysconfig.get_config_var("PYTHONFRAMEWORK"))
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/main.py b/.venv/lib/python3.12/site-packages/pip/_internal/main.py
new file mode 100644
index 0000000..33c6d24
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/main.py
@@ -0,0 +1,12 @@
+from typing import List, Optional
+
+
+def main(args: Optional[List[str]] = None) -> int:
+    """This is preserved for old console scripts that may still be referencing
+    it.
+
+    For additional details, see https://github.com/pypa/pip/issues/7498.
+    """
+    from pip._internal.utils.entrypoints import _wrapper
+
+    return _wrapper(args)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/metadata/__init__.py b/.venv/lib/python3.12/site-packages/pip/_internal/metadata/__init__.py
new file mode 100644
index 0000000..aa232b6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/metadata/__init__.py
@@ -0,0 +1,128 @@
+import contextlib
+import functools
+import os
+import sys
+from typing import TYPE_CHECKING, List, Optional, Type, cast
+
+from pip._internal.utils.misc import strtobool
+
+from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel
+
+if TYPE_CHECKING:
+    from typing import Literal, Protocol
+else:
+    Protocol = object
+
+__all__ = [
+    "BaseDistribution",
+    "BaseEnvironment",
+    "FilesystemWheel",
+    "MemoryWheel",
+    "Wheel",
+    "get_default_environment",
+    "get_environment",
+    "get_wheel_distribution",
+    "select_backend",
+]
+
+
+def _should_use_importlib_metadata() -> bool:
+    """Whether to use the ``importlib.metadata`` or ``pkg_resources`` backend.
+
+    By default, pip uses ``importlib.metadata`` on Python 3.11+, and
+    ``pkg_resourcess`` otherwise. This can be overridden by a couple of ways:
+
+    * If environment variable ``_PIP_USE_IMPORTLIB_METADATA`` is set, it
+      dictates whether ``importlib.metadata`` is used, regardless of Python
+      version.
+    * On Python 3.11+, Python distributors can patch ``importlib.metadata``
+      to add a global constant ``_PIP_USE_IMPORTLIB_METADATA = False``. This
+      makes pip use ``pkg_resources`` (unless the user set the aforementioned
+      environment variable to *True*).
+    """
+    with contextlib.suppress(KeyError, ValueError):
+        return bool(strtobool(os.environ["_PIP_USE_IMPORTLIB_METADATA"]))
+    if sys.version_info < (3, 11):
+        return False
+    import importlib.metadata
+
+    return bool(getattr(importlib.metadata, "_PIP_USE_IMPORTLIB_METADATA", True))
+
+
+class Backend(Protocol):
+    NAME: 'Literal["importlib", "pkg_resources"]'
+    Distribution: Type[BaseDistribution]
+    Environment: Type[BaseEnvironment]
+
+
+@functools.lru_cache(maxsize=None)
+def select_backend() -> Backend:
+    if _should_use_importlib_metadata():
+        from . import importlib
+
+        return cast(Backend, importlib)
+    from . import pkg_resources
+
+    return cast(Backend, pkg_resources)
+
+
+def get_default_environment() -> BaseEnvironment:
+    """Get the default representation for the current environment.
+
+    This returns an Environment instance from the chosen backend. The default
+    Environment instance should be built from ``sys.path`` and may use caching
+    to share instance state accorss calls.
+    """
+    return select_backend().Environment.default()
+
+
+def get_environment(paths: Optional[List[str]]) -> BaseEnvironment:
+    """Get a representation of the environment specified by ``paths``.
+
+    This returns an Environment instance from the chosen backend based on the
+    given import paths. The backend must build a fresh instance representing
+    the state of installed distributions when this function is called.
+    """
+    return select_backend().Environment.from_paths(paths)
+
+
+def get_directory_distribution(directory: str) -> BaseDistribution:
+    """Get the distribution metadata representation in the specified directory.
+
+    This returns a Distribution instance from the chosen backend based on
+    the given on-disk ``.dist-info`` directory.
+    """
+    return select_backend().Distribution.from_directory(directory)
+
+
+def get_wheel_distribution(wheel: Wheel, canonical_name: str) -> BaseDistribution:
+    """Get the representation of the specified wheel's distribution metadata.
+
+    This returns a Distribution instance from the chosen backend based on
+    the given wheel's ``.dist-info`` directory.
+
+    :param canonical_name: Normalized project name of the given wheel.
+    """
+    return select_backend().Distribution.from_wheel(wheel, canonical_name)
+
+
+def get_metadata_distribution(
+    metadata_contents: bytes,
+    filename: str,
+    canonical_name: str,
+) -> BaseDistribution:
+    """Get the dist representation of the specified METADATA file contents.
+
+    This returns a Distribution instance from the chosen backend sourced from the data
+    in `metadata_contents`.
+
+    :param metadata_contents: Contents of a METADATA file within a dist, or one served
+                              via PEP 658.
+    :param filename: Filename for the dist this metadata represents.
+    :param canonical_name: Normalized project name of the given dist.
+    """
+    return select_backend().Distribution.from_metadata_file_contents(
+        metadata_contents,
+        filename,
+        canonical_name,
+    )
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/metadata/_json.py b/.venv/lib/python3.12/site-packages/pip/_internal/metadata/_json.py
new file mode 100644
index 0000000..27362fc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/metadata/_json.py
@@ -0,0 +1,84 @@
+# Extracted from https://github.com/pfmoore/pkg_metadata
+
+from email.header import Header, decode_header, make_header
+from email.message import Message
+from typing import Any, Dict, List, Union
+
+METADATA_FIELDS = [
+    # Name, Multiple-Use
+    ("Metadata-Version", False),
+    ("Name", False),
+    ("Version", False),
+    ("Dynamic", True),
+    ("Platform", True),
+    ("Supported-Platform", True),
+    ("Summary", False),
+    ("Description", False),
+    ("Description-Content-Type", False),
+    ("Keywords", False),
+    ("Home-page", False),
+    ("Download-URL", False),
+    ("Author", False),
+    ("Author-email", False),
+    ("Maintainer", False),
+    ("Maintainer-email", False),
+    ("License", False),
+    ("Classifier", True),
+    ("Requires-Dist", True),
+    ("Requires-Python", False),
+    ("Requires-External", True),
+    ("Project-URL", True),
+    ("Provides-Extra", True),
+    ("Provides-Dist", True),
+    ("Obsoletes-Dist", True),
+]
+
+
+def json_name(field: str) -> str:
+    return field.lower().replace("-", "_")
+
+
+def msg_to_json(msg: Message) -> Dict[str, Any]:
+    """Convert a Message object into a JSON-compatible dictionary."""
+
+    def sanitise_header(h: Union[Header, str]) -> str:
+        if isinstance(h, Header):
+            chunks = []
+            for bytes, encoding in decode_header(h):
+                if encoding == "unknown-8bit":
+                    try:
+                        # See if UTF-8 works
+                        bytes.decode("utf-8")
+                        encoding = "utf-8"
+                    except UnicodeDecodeError:
+                        # If not, latin1 at least won't fail
+                        encoding = "latin1"
+                chunks.append((bytes, encoding))
+            return str(make_header(chunks))
+        return str(h)
+
+    result = {}
+    for field, multi in METADATA_FIELDS:
+        if field not in msg:
+            continue
+        key = json_name(field)
+        if multi:
+            value: Union[str, List[str]] = [
+                sanitise_header(v) for v in msg.get_all(field)  # type: ignore
+            ]
+        else:
+            value = sanitise_header(msg.get(field))  # type: ignore
+            if key == "keywords":
+                # Accept both comma-separated and space-separated
+                # forms, for better compatibility with old data.
+                if "," in value:
+                    value = [v.strip() for v in value.split(",")]
+                else:
+                    value = value.split()
+        result[key] = value
+
+    payload = msg.get_payload()
+    if payload:
+        result["description"] = payload
+
+    return result
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/metadata/base.py b/.venv/lib/python3.12/site-packages/pip/_internal/metadata/base.py
new file mode 100644
index 0000000..9249124
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/metadata/base.py
@@ -0,0 +1,702 @@
+import csv
+import email.message
+import functools
+import json
+import logging
+import pathlib
+import re
+import zipfile
+from typing import (
+    IO,
+    TYPE_CHECKING,
+    Any,
+    Collection,
+    Container,
+    Dict,
+    Iterable,
+    Iterator,
+    List,
+    NamedTuple,
+    Optional,
+    Tuple,
+    Union,
+)
+
+from pip._vendor.packaging.requirements import Requirement
+from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+from pip._vendor.packaging.version import LegacyVersion, Version
+
+from pip._internal.exceptions import NoneMetadataError
+from pip._internal.locations import site_packages, user_site
+from pip._internal.models.direct_url import (
+    DIRECT_URL_METADATA_NAME,
+    DirectUrl,
+    DirectUrlValidationError,
+)
+from pip._internal.utils.compat import stdlib_pkgs  # TODO: Move definition here.
+from pip._internal.utils.egg_link import egg_link_path_from_sys_path
+from pip._internal.utils.misc import is_local, normalize_path
+from pip._internal.utils.urls import url_to_path
+
+from ._json import msg_to_json
+
+if TYPE_CHECKING:
+    from typing import Protocol
+else:
+    Protocol = object
+
+DistributionVersion = Union[LegacyVersion, Version]
+
+InfoPath = Union[str, pathlib.PurePath]
+
+logger = logging.getLogger(__name__)
+
+
+class BaseEntryPoint(Protocol):
+    @property
+    def name(self) -> str:
+        raise NotImplementedError()
+
+    @property
+    def value(self) -> str:
+        raise NotImplementedError()
+
+    @property
+    def group(self) -> str:
+        raise NotImplementedError()
+
+
+def _convert_installed_files_path(
+    entry: Tuple[str, ...],
+    info: Tuple[str, ...],
+) -> str:
+    """Convert a legacy installed-files.txt path into modern RECORD path.
+
+    The legacy format stores paths relative to the info directory, while the
+    modern format stores paths relative to the package root, e.g. the
+    site-packages directory.
+
+    :param entry: Path parts of the installed-files.txt entry.
+    :param info: Path parts of the egg-info directory relative to package root.
+    :returns: The converted entry.
+
+    For best compatibility with symlinks, this does not use ``abspath()`` or
+    ``Path.resolve()``, but tries to work with path parts:
+
+    1. While ``entry`` starts with ``..``, remove the equal amounts of parts
+       from ``info``; if ``info`` is empty, start appending ``..`` instead.
+    2. Join the two directly.
+    """
+    while entry and entry[0] == "..":
+        if not info or info[-1] == "..":
+            info += ("..",)
+        else:
+            info = info[:-1]
+        entry = entry[1:]
+    return str(pathlib.Path(*info, *entry))
+
+
+class RequiresEntry(NamedTuple):
+    requirement: str
+    extra: str
+    marker: str
+
+
+class BaseDistribution(Protocol):
+    @classmethod
+    def from_directory(cls, directory: str) -> "BaseDistribution":
+        """Load the distribution from a metadata directory.
+
+        :param directory: Path to a metadata directory, e.g. ``.dist-info``.
+        """
+        raise NotImplementedError()
+
+    @classmethod
+    def from_metadata_file_contents(
+        cls,
+        metadata_contents: bytes,
+        filename: str,
+        project_name: str,
+    ) -> "BaseDistribution":
+        """Load the distribution from the contents of a METADATA file.
+
+        This is used to implement PEP 658 by generating a "shallow" dist object that can
+        be used for resolution without downloading or building the actual dist yet.
+
+        :param metadata_contents: The contents of a METADATA file.
+        :param filename: File name for the dist with this metadata.
+        :param project_name: Name of the project this dist represents.
+        """
+        raise NotImplementedError()
+
+    @classmethod
+    def from_wheel(cls, wheel: "Wheel", name: str) -> "BaseDistribution":
+        """Load the distribution from a given wheel.
+
+        :param wheel: A concrete wheel definition.
+        :param name: File name of the wheel.
+
+        :raises InvalidWheel: Whenever loading of the wheel causes a
+            :py:exc:`zipfile.BadZipFile` exception to be thrown.
+        :raises UnsupportedWheel: If the wheel is a valid zip, but malformed
+            internally.
+        """
+        raise NotImplementedError()
+
+    def __repr__(self) -> str:
+        return f"{self.raw_name} {self.version} ({self.location})"
+
+    def __str__(self) -> str:
+        return f"{self.raw_name} {self.version}"
+
+    @property
+    def location(self) -> Optional[str]:
+        """Where the distribution is loaded from.
+
+        A string value is not necessarily a filesystem path, since distributions
+        can be loaded from other sources, e.g. arbitrary zip archives. ``None``
+        means the distribution is created in-memory.
+
+        Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If
+        this is a symbolic link, we want to preserve the relative path between
+        it and files in the distribution.
+        """
+        raise NotImplementedError()
+
+    @property
+    def editable_project_location(self) -> Optional[str]:
+        """The project location for editable distributions.
+
+        This is the directory where pyproject.toml or setup.py is located.
+        None if the distribution is not installed in editable mode.
+        """
+        # TODO: this property is relatively costly to compute, memoize it ?
+        direct_url = self.direct_url
+        if direct_url:
+            if direct_url.is_local_editable():
+                return url_to_path(direct_url.url)
+        else:
+            # Search for an .egg-link file by walking sys.path, as it was
+            # done before by dist_is_editable().
+            egg_link_path = egg_link_path_from_sys_path(self.raw_name)
+            if egg_link_path:
+                # TODO: get project location from second line of egg_link file
+                #       (https://github.com/pypa/pip/issues/10243)
+                return self.location
+        return None
+
+    @property
+    def installed_location(self) -> Optional[str]:
+        """The distribution's "installed" location.
+
+        This should generally be a ``site-packages`` directory. This is
+        usually ``dist.location``, except for legacy develop-installed packages,
+        where ``dist.location`` is the source code location, and this is where
+        the ``.egg-link`` file is.
+
+        The returned location is normalized (in particular, with symlinks removed).
+        """
+        raise NotImplementedError()
+
+    @property
+    def info_location(self) -> Optional[str]:
+        """Location of the .[egg|dist]-info directory or file.
+
+        Similarly to ``location``, a string value is not necessarily a
+        filesystem path. ``None`` means the distribution is created in-memory.
+
+        For a modern .dist-info installation on disk, this should be something
+        like ``{location}/{raw_name}-{version}.dist-info``.
+
+        Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If
+        this is a symbolic link, we want to preserve the relative path between
+        it and other files in the distribution.
+        """
+        raise NotImplementedError()
+
+    @property
+    def installed_by_distutils(self) -> bool:
+        """Whether this distribution is installed with legacy distutils format.
+
+        A distribution installed with "raw" distutils not patched by setuptools
+        uses one single file at ``info_location`` to store metadata. We need to
+        treat this specially on uninstallation.
+        """
+        info_location = self.info_location
+        if not info_location:
+            return False
+        return pathlib.Path(info_location).is_file()
+
+    @property
+    def installed_as_egg(self) -> bool:
+        """Whether this distribution is installed as an egg.
+
+        This usually indicates the distribution was installed by (older versions
+        of) easy_install.
+        """
+        location = self.location
+        if not location:
+            return False
+        return location.endswith(".egg")
+
+    @property
+    def installed_with_setuptools_egg_info(self) -> bool:
+        """Whether this distribution is installed with the ``.egg-info`` format.
+
+        This usually indicates the distribution was installed with setuptools
+        with an old pip version or with ``single-version-externally-managed``.
+
+        Note that this ensure the metadata store is a directory. distutils can
+        also installs an ``.egg-info``, but as a file, not a directory. This
+        property is *False* for that case. Also see ``installed_by_distutils``.
+        """
+        info_location = self.info_location
+        if not info_location:
+            return False
+        if not info_location.endswith(".egg-info"):
+            return False
+        return pathlib.Path(info_location).is_dir()
+
+    @property
+    def installed_with_dist_info(self) -> bool:
+        """Whether this distribution is installed with the "modern format".
+
+        This indicates a "modern" installation, e.g. storing metadata in the
+        ``.dist-info`` directory. This applies to installations made by
+        setuptools (but through pip, not directly), or anything using the
+        standardized build backend interface (PEP 517).
+        """
+        info_location = self.info_location
+        if not info_location:
+            return False
+        if not info_location.endswith(".dist-info"):
+            return False
+        return pathlib.Path(info_location).is_dir()
+
+    @property
+    def canonical_name(self) -> NormalizedName:
+        raise NotImplementedError()
+
+    @property
+    def version(self) -> DistributionVersion:
+        raise NotImplementedError()
+
+    @property
+    def setuptools_filename(self) -> str:
+        """Convert a project name to its setuptools-compatible filename.
+
+        This is a copy of ``pkg_resources.to_filename()`` for compatibility.
+        """
+        return self.raw_name.replace("-", "_")
+
+    @property
+    def direct_url(self) -> Optional[DirectUrl]:
+        """Obtain a DirectUrl from this distribution.
+
+        Returns None if the distribution has no `direct_url.json` metadata,
+        or if `direct_url.json` is invalid.
+        """
+        try:
+            content = self.read_text(DIRECT_URL_METADATA_NAME)
+        except FileNotFoundError:
+            return None
+        try:
+            return DirectUrl.from_json(content)
+        except (
+            UnicodeDecodeError,
+            json.JSONDecodeError,
+            DirectUrlValidationError,
+        ) as e:
+            logger.warning(
+                "Error parsing %s for %s: %s",
+                DIRECT_URL_METADATA_NAME,
+                self.canonical_name,
+                e,
+            )
+            return None
+
+    @property
+    def installer(self) -> str:
+        try:
+            installer_text = self.read_text("INSTALLER")
+        except (OSError, ValueError, NoneMetadataError):
+            return ""  # Fail silently if the installer file cannot be read.
+        for line in installer_text.splitlines():
+            cleaned_line = line.strip()
+            if cleaned_line:
+                return cleaned_line
+        return ""
+
+    @property
+    def requested(self) -> bool:
+        return self.is_file("REQUESTED")
+
+    @property
+    def editable(self) -> bool:
+        return bool(self.editable_project_location)
+
+    @property
+    def local(self) -> bool:
+        """If distribution is installed in the current virtual environment.
+
+        Always True if we're not in a virtualenv.
+        """
+        if self.installed_location is None:
+            return False
+        return is_local(self.installed_location)
+
+    @property
+    def in_usersite(self) -> bool:
+        if self.installed_location is None or user_site is None:
+            return False
+        return self.installed_location.startswith(normalize_path(user_site))
+
+    @property
+    def in_site_packages(self) -> bool:
+        if self.installed_location is None or site_packages is None:
+            return False
+        return self.installed_location.startswith(normalize_path(site_packages))
+
+    def is_file(self, path: InfoPath) -> bool:
+        """Check whether an entry in the info directory is a file."""
+        raise NotImplementedError()
+
+    def iter_distutils_script_names(self) -> Iterator[str]:
+        """Find distutils 'scripts' entries metadata.
+
+        If 'scripts' is supplied in ``setup.py``, distutils records those in the
+        installed distribution's ``scripts`` directory, a file for each script.
+        """
+        raise NotImplementedError()
+
+    def read_text(self, path: InfoPath) -> str:
+        """Read a file in the info directory.
+
+        :raise FileNotFoundError: If ``path`` does not exist in the directory.
+        :raise NoneMetadataError: If ``path`` exists in the info directory, but
+            cannot be read.
+        """
+        raise NotImplementedError()
+
+    def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
+        raise NotImplementedError()
+
+    def _metadata_impl(self) -> email.message.Message:
+        raise NotImplementedError()
+
+    @functools.lru_cache(maxsize=1)
+    def _metadata_cached(self) -> email.message.Message:
+        # When we drop python 3.7 support, move this to the metadata property and use
+        # functools.cached_property instead of lru_cache.
+        metadata = self._metadata_impl()
+        self._add_egg_info_requires(metadata)
+        return metadata
+
+    @property
+    def metadata(self) -> email.message.Message:
+        """Metadata of distribution parsed from e.g. METADATA or PKG-INFO.
+
+        This should return an empty message if the metadata file is unavailable.
+
+        :raises NoneMetadataError: If the metadata file is available, but does
+            not contain valid metadata.
+        """
+        return self._metadata_cached()
+
+    @property
+    def metadata_dict(self) -> Dict[str, Any]:
+        """PEP 566 compliant JSON-serializable representation of METADATA or PKG-INFO.
+
+        This should return an empty dict if the metadata file is unavailable.
+
+        :raises NoneMetadataError: If the metadata file is available, but does
+            not contain valid metadata.
+        """
+        return msg_to_json(self.metadata)
+
+    @property
+    def metadata_version(self) -> Optional[str]:
+        """Value of "Metadata-Version:" in distribution metadata, if available."""
+        return self.metadata.get("Metadata-Version")
+
+    @property
+    def raw_name(self) -> str:
+        """Value of "Name:" in distribution metadata."""
+        # The metadata should NEVER be missing the Name: key, but if it somehow
+        # does, fall back to the known canonical name.
+        return self.metadata.get("Name", self.canonical_name)
+
+    @property
+    def requires_python(self) -> SpecifierSet:
+        """Value of "Requires-Python:" in distribution metadata.
+
+        If the key does not exist or contains an invalid value, an empty
+        SpecifierSet should be returned.
+        """
+        value = self.metadata.get("Requires-Python")
+        if value is None:
+            return SpecifierSet()
+        try:
+            # Convert to str to satisfy the type checker; this can be a Header object.
+            spec = SpecifierSet(str(value))
+        except InvalidSpecifier as e:
+            message = "Package %r has an invalid Requires-Python: %s"
+            logger.warning(message, self.raw_name, e)
+            return SpecifierSet()
+        return spec
+
+    def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
+        """Dependencies of this distribution.
+
+        For modern .dist-info distributions, this is the collection of
+        "Requires-Dist:" entries in distribution metadata.
+        """
+        raise NotImplementedError()
+
+    def iter_provided_extras(self) -> Iterable[str]:
+        """Extras provided by this distribution.
+
+        For modern .dist-info distributions, this is the collection of
+        "Provides-Extra:" entries in distribution metadata.
+
+        The return value of this function is not particularly useful other than
+        display purposes due to backward compatibility issues and the extra
+        names being poorly normalized prior to PEP 685. If you want to perform
+        logic operations on extras, use :func:`is_extra_provided` instead.
+        """
+        raise NotImplementedError()
+
+    def is_extra_provided(self, extra: str) -> bool:
+        """Check whether an extra is provided by this distribution.
+
+        This is needed mostly for compatibility issues with pkg_resources not
+        following the extra normalization rules defined in PEP 685.
+        """
+        raise NotImplementedError()
+
+    def _iter_declared_entries_from_record(self) -> Optional[Iterator[str]]:
+        try:
+            text = self.read_text("RECORD")
+        except FileNotFoundError:
+            return None
+        # This extra Path-str cast normalizes entries.
+        return (str(pathlib.Path(row[0])) for row in csv.reader(text.splitlines()))
+
+    def _iter_declared_entries_from_legacy(self) -> Optional[Iterator[str]]:
+        try:
+            text = self.read_text("installed-files.txt")
+        except FileNotFoundError:
+            return None
+        paths = (p for p in text.splitlines(keepends=False) if p)
+        root = self.location
+        info = self.info_location
+        if root is None or info is None:
+            return paths
+        try:
+            info_rel = pathlib.Path(info).relative_to(root)
+        except ValueError:  # info is not relative to root.
+            return paths
+        if not info_rel.parts:  # info *is* root.
+            return paths
+        return (
+            _convert_installed_files_path(pathlib.Path(p).parts, info_rel.parts)
+            for p in paths
+        )
+
+    def iter_declared_entries(self) -> Optional[Iterator[str]]:
+        """Iterate through file entries declared in this distribution.
+
+        For modern .dist-info distributions, this is the files listed in the
+        ``RECORD`` metadata file. For legacy setuptools distributions, this
+        comes from ``installed-files.txt``, with entries normalized to be
+        compatible with the format used by ``RECORD``.
+
+        :return: An iterator for listed entries, or None if the distribution
+            contains neither ``RECORD`` nor ``installed-files.txt``.
+        """
+        return (
+            self._iter_declared_entries_from_record()
+            or self._iter_declared_entries_from_legacy()
+        )
+
+    def _iter_requires_txt_entries(self) -> Iterator[RequiresEntry]:
+        """Parse a ``requires.txt`` in an egg-info directory.
+
+        This is an INI-ish format where an egg-info stores dependencies. A
+        section name describes extra other environment markers, while each entry
+        is an arbitrary string (not a key-value pair) representing a dependency
+        as a requirement string (no markers).
+
+        There is a construct in ``importlib.metadata`` called ``Sectioned`` that
+        does mostly the same, but the format is currently considered private.
+        """
+        try:
+            content = self.read_text("requires.txt")
+        except FileNotFoundError:
+            return
+        extra = marker = ""  # Section-less entries don't have markers.
+        for line in content.splitlines():
+            line = line.strip()
+            if not line or line.startswith("#"):  # Comment; ignored.
+                continue
+            if line.startswith("[") and line.endswith("]"):  # A section header.
+                extra, _, marker = line.strip("[]").partition(":")
+                continue
+            yield RequiresEntry(requirement=line, extra=extra, marker=marker)
+
+    def _iter_egg_info_extras(self) -> Iterable[str]:
+        """Get extras from the egg-info directory."""
+        known_extras = {""}
+        for entry in self._iter_requires_txt_entries():
+            extra = canonicalize_name(entry.extra)
+            if extra in known_extras:
+                continue
+            known_extras.add(extra)
+            yield extra
+
+    def _iter_egg_info_dependencies(self) -> Iterable[str]:
+        """Get distribution dependencies from the egg-info directory.
+
+        To ease parsing, this converts a legacy dependency entry into a PEP 508
+        requirement string. Like ``_iter_requires_txt_entries()``, there is code
+        in ``importlib.metadata`` that does mostly the same, but not do exactly
+        what we need.
+
+        Namely, ``importlib.metadata`` does not normalize the extra name before
+        putting it into the requirement string, which causes marker comparison
+        to fail because the dist-info format do normalize. This is consistent in
+        all currently available PEP 517 backends, although not standardized.
+        """
+        for entry in self._iter_requires_txt_entries():
+            extra = canonicalize_name(entry.extra)
+            if extra and entry.marker:
+                marker = f'({entry.marker}) and extra == "{extra}"'
+            elif extra:
+                marker = f'extra == "{extra}"'
+            elif entry.marker:
+                marker = entry.marker
+            else:
+                marker = ""
+            if marker:
+                yield f"{entry.requirement} ; {marker}"
+            else:
+                yield entry.requirement
+
+    def _add_egg_info_requires(self, metadata: email.message.Message) -> None:
+        """Add egg-info requires.txt information to the metadata."""
+        if not metadata.get_all("Requires-Dist"):
+            for dep in self._iter_egg_info_dependencies():
+                metadata["Requires-Dist"] = dep
+        if not metadata.get_all("Provides-Extra"):
+            for extra in self._iter_egg_info_extras():
+                metadata["Provides-Extra"] = extra
+
+
+class BaseEnvironment:
+    """An environment containing distributions to introspect."""
+
+    @classmethod
+    def default(cls) -> "BaseEnvironment":
+        raise NotImplementedError()
+
+    @classmethod
+    def from_paths(cls, paths: Optional[List[str]]) -> "BaseEnvironment":
+        raise NotImplementedError()
+
+    def get_distribution(self, name: str) -> Optional["BaseDistribution"]:
+        """Given a requirement name, return the installed distributions.
+
+        The name may not be normalized. The implementation must canonicalize
+        it for lookup.
+        """
+        raise NotImplementedError()
+
+    def _iter_distributions(self) -> Iterator["BaseDistribution"]:
+        """Iterate through installed distributions.
+
+        This function should be implemented by subclass, but never called
+        directly. Use the public ``iter_distribution()`` instead, which
+        implements additional logic to make sure the distributions are valid.
+        """
+        raise NotImplementedError()
+
+    def iter_all_distributions(self) -> Iterator[BaseDistribution]:
+        """Iterate through all installed distributions without any filtering."""
+        for dist in self._iter_distributions():
+            # Make sure the distribution actually comes from a valid Python
+            # packaging distribution. Pip's AdjacentTempDirectory leaves folders
+            # e.g. ``~atplotlib.dist-info`` if cleanup was interrupted. The
+            # valid project name pattern is taken from PEP 508.
+            project_name_valid = re.match(
+                r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$",
+                dist.canonical_name,
+                flags=re.IGNORECASE,
+            )
+            if not project_name_valid:
+                logger.warning(
+                    "Ignoring invalid distribution %s (%s)",
+                    dist.canonical_name,
+                    dist.location,
+                )
+                continue
+            yield dist
+
+    def iter_installed_distributions(
+        self,
+        local_only: bool = True,
+        skip: Container[str] = stdlib_pkgs,
+        include_editables: bool = True,
+        editables_only: bool = False,
+        user_only: bool = False,
+    ) -> Iterator[BaseDistribution]:
+        """Return a list of installed distributions.
+
+        This is based on ``iter_all_distributions()`` with additional filtering
+        options. Note that ``iter_installed_distributions()`` without arguments
+        is *not* equal to ``iter_all_distributions()``, since some of the
+        configurations exclude packages by default.
+
+        :param local_only: If True (default), only return installations
+        local to the current virtualenv, if in a virtualenv.
+        :param skip: An iterable of canonicalized project names to ignore;
+            defaults to ``stdlib_pkgs``.
+        :param include_editables: If False, don't report editables.
+        :param editables_only: If True, only report editables.
+        :param user_only: If True, only report installations in the user
+        site directory.
+        """
+        it = self.iter_all_distributions()
+        if local_only:
+            it = (d for d in it if d.local)
+        if not include_editables:
+            it = (d for d in it if not d.editable)
+        if editables_only:
+            it = (d for d in it if d.editable)
+        if user_only:
+            it = (d for d in it if d.in_usersite)
+        return (d for d in it if d.canonical_name not in skip)
+
+
+class Wheel(Protocol):
+    location: str
+
+    def as_zipfile(self) -> zipfile.ZipFile:
+        raise NotImplementedError()
+
+
+class FilesystemWheel(Wheel):
+    def __init__(self, location: str) -> None:
+        self.location = location
+
+    def as_zipfile(self) -> zipfile.ZipFile:
+        return zipfile.ZipFile(self.location, allowZip64=True)
+
+
+class MemoryWheel(Wheel):
+    def __init__(self, location: str, stream: IO[bytes]) -> None:
+        self.location = location
+        self.stream = stream
+
+    def as_zipfile(self) -> zipfile.ZipFile:
+        return zipfile.ZipFile(self.stream, allowZip64=True)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__init__.py b/.venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__init__.py
new file mode 100644
index 0000000..a779138
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__init__.py
@@ -0,0 +1,6 @@
+from ._dists import Distribution
+from ._envs import Environment
+
+__all__ = ["NAME", "Distribution", "Environment"]
+
+NAME = "importlib"
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_compat.py b/.venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_compat.py
new file mode 100644
index 0000000..593bff2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_compat.py
@@ -0,0 +1,55 @@
+import importlib.metadata
+from typing import Any, Optional, Protocol, cast
+
+
+class BadMetadata(ValueError):
+    def __init__(self, dist: importlib.metadata.Distribution, *, reason: str) -> None:
+        self.dist = dist
+        self.reason = reason
+
+    def __str__(self) -> str:
+        return f"Bad metadata in {self.dist} ({self.reason})"
+
+
+class BasePath(Protocol):
+    """A protocol that various path objects conform.
+
+    This exists because importlib.metadata uses both ``pathlib.Path`` and
+    ``zipfile.Path``, and we need a common base for type hints (Union does not
+    work well since ``zipfile.Path`` is too new for our linter setup).
+
+    This does not mean to be exhaustive, but only contains things that present
+    in both classes *that we need*.
+    """
+
+    @property
+    def name(self) -> str:
+        raise NotImplementedError()
+
+    @property
+    def parent(self) -> "BasePath":
+        raise NotImplementedError()
+
+
+def get_info_location(d: importlib.metadata.Distribution) -> Optional[BasePath]:
+    """Find the path to the distribution's metadata directory.
+
+    HACK: This relies on importlib.metadata's private ``_path`` attribute. Not
+    all distributions exist on disk, so importlib.metadata is correct to not
+    expose the attribute as public. But pip's code base is old and not as clean,
+    so we do this to avoid having to rewrite too many things. Hopefully we can
+    eliminate this some day.
+    """
+    return getattr(d, "_path", None)
+
+
+def get_dist_name(dist: importlib.metadata.Distribution) -> str:
+    """Get the distribution's project name.
+
+    The ``name`` attribute is only available in Python 3.10 or later. We are
+    targeting exactly that, but Mypy does not know this.
+    """
+    name = cast(Any, dist).name
+    if not isinstance(name, str):
+        raise BadMetadata(dist, reason="invalid metadata entry 'name'")
+    return name
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_dists.py b/.venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_dists.py
new file mode 100644
index 0000000..26370fa
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_dists.py
@@ -0,0 +1,227 @@
+import email.message
+import importlib.metadata
+import os
+import pathlib
+import zipfile
+from typing import (
+    Collection,
+    Dict,
+    Iterable,
+    Iterator,
+    Mapping,
+    Optional,
+    Sequence,
+    cast,
+)
+
+from pip._vendor.packaging.requirements import Requirement
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+from pip._vendor.packaging.version import parse as parse_version
+
+from pip._internal.exceptions import InvalidWheel, UnsupportedWheel
+from pip._internal.metadata.base import (
+    BaseDistribution,
+    BaseEntryPoint,
+    DistributionVersion,
+    InfoPath,
+    Wheel,
+)
+from pip._internal.utils.misc import normalize_path
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file
+
+from ._compat import BasePath, get_dist_name
+
+
+class WheelDistribution(importlib.metadata.Distribution):
+    """An ``importlib.metadata.Distribution`` read from a wheel.
+
+    Although ``importlib.metadata.PathDistribution`` accepts ``zipfile.Path``,
+    its implementation is too "lazy" for pip's needs (we can't keep the ZipFile
+    handle open for the entire lifetime of the distribution object).
+
+    This implementation eagerly reads the entire metadata directory into the
+    memory instead, and operates from that.
+    """
+
+    def __init__(
+        self,
+        files: Mapping[pathlib.PurePosixPath, bytes],
+        info_location: pathlib.PurePosixPath,
+    ) -> None:
+        self._files = files
+        self.info_location = info_location
+
+    @classmethod
+    def from_zipfile(
+        cls,
+        zf: zipfile.ZipFile,
+        name: str,
+        location: str,
+    ) -> "WheelDistribution":
+        info_dir, _ = parse_wheel(zf, name)
+        paths = (
+            (name, pathlib.PurePosixPath(name.split("/", 1)[-1]))
+            for name in zf.namelist()
+            if name.startswith(f"{info_dir}/")
+        )
+        files = {
+            relpath: read_wheel_metadata_file(zf, fullpath)
+            for fullpath, relpath in paths
+        }
+        info_location = pathlib.PurePosixPath(location, info_dir)
+        return cls(files, info_location)
+
+    def iterdir(self, path: InfoPath) -> Iterator[pathlib.PurePosixPath]:
+        # Only allow iterating through the metadata directory.
+        if pathlib.PurePosixPath(str(path)) in self._files:
+            return iter(self._files)
+        raise FileNotFoundError(path)
+
+    def read_text(self, filename: str) -> Optional[str]:
+        try:
+            data = self._files[pathlib.PurePosixPath(filename)]
+        except KeyError:
+            return None
+        try:
+            text = data.decode("utf-8")
+        except UnicodeDecodeError as e:
+            wheel = self.info_location.parent
+            error = f"Error decoding metadata for {wheel}: {e} in {filename} file"
+            raise UnsupportedWheel(error)
+        return text
+
+
+class Distribution(BaseDistribution):
+    def __init__(
+        self,
+        dist: importlib.metadata.Distribution,
+        info_location: Optional[BasePath],
+        installed_location: Optional[BasePath],
+    ) -> None:
+        self._dist = dist
+        self._info_location = info_location
+        self._installed_location = installed_location
+
+    @classmethod
+    def from_directory(cls, directory: str) -> BaseDistribution:
+        info_location = pathlib.Path(directory)
+        dist = importlib.metadata.Distribution.at(info_location)
+        return cls(dist, info_location, info_location.parent)
+
+    @classmethod
+    def from_metadata_file_contents(
+        cls,
+        metadata_contents: bytes,
+        filename: str,
+        project_name: str,
+    ) -> BaseDistribution:
+        # Generate temp dir to contain the metadata file, and write the file contents.
+        temp_dir = pathlib.Path(
+            TempDirectory(kind="metadata", globally_managed=True).path
+        )
+        metadata_path = temp_dir / "METADATA"
+        metadata_path.write_bytes(metadata_contents)
+        # Construct dist pointing to the newly created directory.
+        dist = importlib.metadata.Distribution.at(metadata_path.parent)
+        return cls(dist, metadata_path.parent, None)
+
+    @classmethod
+    def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution:
+        try:
+            with wheel.as_zipfile() as zf:
+                dist = WheelDistribution.from_zipfile(zf, name, wheel.location)
+        except zipfile.BadZipFile as e:
+            raise InvalidWheel(wheel.location, name) from e
+        except UnsupportedWheel as e:
+            raise UnsupportedWheel(f"{name} has an invalid wheel, {e}")
+        return cls(dist, dist.info_location, pathlib.PurePosixPath(wheel.location))
+
+    @property
+    def location(self) -> Optional[str]:
+        if self._info_location is None:
+            return None
+        return str(self._info_location.parent)
+
+    @property
+    def info_location(self) -> Optional[str]:
+        if self._info_location is None:
+            return None
+        return str(self._info_location)
+
+    @property
+    def installed_location(self) -> Optional[str]:
+        if self._installed_location is None:
+            return None
+        return normalize_path(str(self._installed_location))
+
+    def _get_dist_name_from_location(self) -> Optional[str]:
+        """Try to get the name from the metadata directory name.
+
+        This is much faster than reading metadata.
+        """
+        if self._info_location is None:
+            return None
+        stem, suffix = os.path.splitext(self._info_location.name)
+        if suffix not in (".dist-info", ".egg-info"):
+            return None
+        return stem.split("-", 1)[0]
+
+    @property
+    def canonical_name(self) -> NormalizedName:
+        name = self._get_dist_name_from_location() or get_dist_name(self._dist)
+        return canonicalize_name(name)
+
+    @property
+    def version(self) -> DistributionVersion:
+        return parse_version(self._dist.version)
+
+    def is_file(self, path: InfoPath) -> bool:
+        return self._dist.read_text(str(path)) is not None
+
+    def iter_distutils_script_names(self) -> Iterator[str]:
+        # A distutils installation is always "flat" (not in e.g. egg form), so
+        # if this distribution's info location is NOT a pathlib.Path (but e.g.
+        # zipfile.Path), it can never contain any distutils scripts.
+        if not isinstance(self._info_location, pathlib.Path):
+            return
+        for child in self._info_location.joinpath("scripts").iterdir():
+            yield child.name
+
+    def read_text(self, path: InfoPath) -> str:
+        content = self._dist.read_text(str(path))
+        if content is None:
+            raise FileNotFoundError(path)
+        return content
+
+    def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
+        # importlib.metadata's EntryPoint structure sasitfies BaseEntryPoint.
+        return self._dist.entry_points
+
+    def _metadata_impl(self) -> email.message.Message:
+        # From Python 3.10+, importlib.metadata declares PackageMetadata as the
+        # return type. This protocol is unfortunately a disaster now and misses
+        # a ton of fields that we need, including get() and get_payload(). We
+        # rely on the implementation that the object is actually a Message now,
+        # until upstream can improve the protocol. (python/cpython#94952)
+        return cast(email.message.Message, self._dist.metadata)
+
+    def iter_provided_extras(self) -> Iterable[str]:
+        return self.metadata.get_all("Provides-Extra", [])
+
+    def is_extra_provided(self, extra: str) -> bool:
+        return any(
+            canonicalize_name(provided_extra) == canonicalize_name(extra)
+            for provided_extra in self.metadata.get_all("Provides-Extra", [])
+        )
+
+    def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
+        contexts: Sequence[Dict[str, str]] = [{"extra": e} for e in extras]
+        for req_string in self.metadata.get_all("Requires-Dist", []):
+            req = Requirement(req_string)
+            if not req.marker:
+                yield req
+            elif not extras and req.marker.evaluate({"extra": ""}):
+                yield req
+            elif any(req.marker.evaluate(context) for context in contexts):
+                yield req
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_envs.py b/.venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_envs.py
new file mode 100644
index 0000000..048dc55
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_envs.py
@@ -0,0 +1,189 @@
+import functools
+import importlib.metadata
+import logging
+import os
+import pathlib
+import sys
+import zipfile
+import zipimport
+from typing import Iterator, List, Optional, Sequence, Set, Tuple
+
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+
+from pip._internal.metadata.base import BaseDistribution, BaseEnvironment
+from pip._internal.models.wheel import Wheel
+from pip._internal.utils.deprecation import deprecated
+from pip._internal.utils.filetypes import WHEEL_EXTENSION
+
+from ._compat import BadMetadata, BasePath, get_dist_name, get_info_location
+from ._dists import Distribution
+
+logger = logging.getLogger(__name__)
+
+
+def _looks_like_wheel(location: str) -> bool:
+    if not location.endswith(WHEEL_EXTENSION):
+        return False
+    if not os.path.isfile(location):
+        return False
+    if not Wheel.wheel_file_re.match(os.path.basename(location)):
+        return False
+    return zipfile.is_zipfile(location)
+
+
+class _DistributionFinder:
+    """Finder to locate distributions.
+
+    The main purpose of this class is to memoize found distributions' names, so
+    only one distribution is returned for each package name. At lot of pip code
+    assumes this (because it is setuptools's behavior), and not doing the same
+    can potentially cause a distribution in lower precedence path to override a
+    higher precedence one if the caller is not careful.
+
+    Eventually we probably want to make it possible to see lower precedence
+    installations as well. It's useful feature, after all.
+    """
+
+    FoundResult = Tuple[importlib.metadata.Distribution, Optional[BasePath]]
+
+    def __init__(self) -> None:
+        self._found_names: Set[NormalizedName] = set()
+
+    def _find_impl(self, location: str) -> Iterator[FoundResult]:
+        """Find distributions in a location."""
+        # Skip looking inside a wheel. Since a package inside a wheel is not
+        # always valid (due to .data directories etc.), its .dist-info entry
+        # should not be considered an installed distribution.
+        if _looks_like_wheel(location):
+            return
+        # To know exactly where we find a distribution, we have to feed in the
+        # paths one by one, instead of dumping the list to importlib.metadata.
+        for dist in importlib.metadata.distributions(path=[location]):
+            info_location = get_info_location(dist)
+            try:
+                raw_name = get_dist_name(dist)
+            except BadMetadata as e:
+                logger.warning("Skipping %s due to %s", info_location, e.reason)
+                continue
+            normalized_name = canonicalize_name(raw_name)
+            if normalized_name in self._found_names:
+                continue
+            self._found_names.add(normalized_name)
+            yield dist, info_location
+
+    def find(self, location: str) -> Iterator[BaseDistribution]:
+        """Find distributions in a location.
+
+        The path can be either a directory, or a ZIP archive.
+        """
+        for dist, info_location in self._find_impl(location):
+            if info_location is None:
+                installed_location: Optional[BasePath] = None
+            else:
+                installed_location = info_location.parent
+            yield Distribution(dist, info_location, installed_location)
+
+    def find_linked(self, location: str) -> Iterator[BaseDistribution]:
+        """Read location in egg-link files and return distributions in there.
+
+        The path should be a directory; otherwise this returns nothing. This
+        follows how setuptools does this for compatibility. The first non-empty
+        line in the egg-link is read as a path (resolved against the egg-link's
+        containing directory if relative). Distributions found at that linked
+        location are returned.
+        """
+        path = pathlib.Path(location)
+        if not path.is_dir():
+            return
+        for child in path.iterdir():
+            if child.suffix != ".egg-link":
+                continue
+            with child.open() as f:
+                lines = (line.strip() for line in f)
+                target_rel = next((line for line in lines if line), "")
+            if not target_rel:
+                continue
+            target_location = str(path.joinpath(target_rel))
+            for dist, info_location in self._find_impl(target_location):
+                yield Distribution(dist, info_location, path)
+
+    def _find_eggs_in_dir(self, location: str) -> Iterator[BaseDistribution]:
+        from pip._vendor.pkg_resources import find_distributions
+
+        from pip._internal.metadata import pkg_resources as legacy
+
+        with os.scandir(location) as it:
+            for entry in it:
+                if not entry.name.endswith(".egg"):
+                    continue
+                for dist in find_distributions(entry.path):
+                    yield legacy.Distribution(dist)
+
+    def _find_eggs_in_zip(self, location: str) -> Iterator[BaseDistribution]:
+        from pip._vendor.pkg_resources import find_eggs_in_zip
+
+        from pip._internal.metadata import pkg_resources as legacy
+
+        try:
+            importer = zipimport.zipimporter(location)
+        except zipimport.ZipImportError:
+            return
+        for dist in find_eggs_in_zip(importer, location):
+            yield legacy.Distribution(dist)
+
+    def find_eggs(self, location: str) -> Iterator[BaseDistribution]:
+        """Find eggs in a location.
+
+        This actually uses the old *pkg_resources* backend. We likely want to
+        deprecate this so we can eventually remove the *pkg_resources*
+        dependency entirely. Before that, this should first emit a deprecation
+        warning for some versions when using the fallback since importing
+        *pkg_resources* is slow for those who don't need it.
+        """
+        if os.path.isdir(location):
+            yield from self._find_eggs_in_dir(location)
+        if zipfile.is_zipfile(location):
+            yield from self._find_eggs_in_zip(location)
+
+
+@functools.lru_cache(maxsize=None)  # Warn a distribution exactly once.
+def _emit_egg_deprecation(location: Optional[str]) -> None:
+    deprecated(
+        reason=f"Loading egg at {location} is deprecated.",
+        replacement="to use pip for package installation.",
+        gone_in="24.3",
+        issue=12330,
+    )
+
+
+class Environment(BaseEnvironment):
+    def __init__(self, paths: Sequence[str]) -> None:
+        self._paths = paths
+
+    @classmethod
+    def default(cls) -> BaseEnvironment:
+        return cls(sys.path)
+
+    @classmethod
+    def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment:
+        if paths is None:
+            return cls(sys.path)
+        return cls(paths)
+
+    def _iter_distributions(self) -> Iterator[BaseDistribution]:
+        finder = _DistributionFinder()
+        for location in self._paths:
+            yield from finder.find(location)
+            for dist in finder.find_eggs(location):
+                _emit_egg_deprecation(dist.location)
+                yield dist
+            # This must go last because that's how pkg_resources tie-breaks.
+            yield from finder.find_linked(location)
+
+    def get_distribution(self, name: str) -> Optional[BaseDistribution]:
+        matches = (
+            distribution
+            for distribution in self.iter_all_distributions()
+            if distribution.canonical_name == canonicalize_name(name)
+        )
+        return next(matches, None)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/metadata/pkg_resources.py b/.venv/lib/python3.12/site-packages/pip/_internal/metadata/pkg_resources.py
new file mode 100644
index 0000000..bb11e5b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/metadata/pkg_resources.py
@@ -0,0 +1,278 @@
+import email.message
+import email.parser
+import logging
+import os
+import zipfile
+from typing import Collection, Iterable, Iterator, List, Mapping, NamedTuple, Optional
+
+from pip._vendor import pkg_resources
+from pip._vendor.packaging.requirements import Requirement
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+from pip._vendor.packaging.version import parse as parse_version
+
+from pip._internal.exceptions import InvalidWheel, NoneMetadataError, UnsupportedWheel
+from pip._internal.utils.egg_link import egg_link_path_from_location
+from pip._internal.utils.misc import display_path, normalize_path
+from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file
+
+from .base import (
+    BaseDistribution,
+    BaseEntryPoint,
+    BaseEnvironment,
+    DistributionVersion,
+    InfoPath,
+    Wheel,
+)
+
+__all__ = ["NAME", "Distribution", "Environment"]
+
+logger = logging.getLogger(__name__)
+
+NAME = "pkg_resources"
+
+
+class EntryPoint(NamedTuple):
+    name: str
+    value: str
+    group: str
+
+
+class InMemoryMetadata:
+    """IMetadataProvider that reads metadata files from a dictionary.
+
+    This also maps metadata decoding exceptions to our internal exception type.
+    """
+
+    def __init__(self, metadata: Mapping[str, bytes], wheel_name: str) -> None:
+        self._metadata = metadata
+        self._wheel_name = wheel_name
+
+    def has_metadata(self, name: str) -> bool:
+        return name in self._metadata
+
+    def get_metadata(self, name: str) -> str:
+        try:
+            return self._metadata[name].decode()
+        except UnicodeDecodeError as e:
+            # Augment the default error with the origin of the file.
+            raise UnsupportedWheel(
+                f"Error decoding metadata for {self._wheel_name}: {e} in {name} file"
+            )
+
+    def get_metadata_lines(self, name: str) -> Iterable[str]:
+        return pkg_resources.yield_lines(self.get_metadata(name))
+
+    def metadata_isdir(self, name: str) -> bool:
+        return False
+
+    def metadata_listdir(self, name: str) -> List[str]:
+        return []
+
+    def run_script(self, script_name: str, namespace: str) -> None:
+        pass
+
+
+class Distribution(BaseDistribution):
+    def __init__(self, dist: pkg_resources.Distribution) -> None:
+        self._dist = dist
+
+    @classmethod
+    def from_directory(cls, directory: str) -> BaseDistribution:
+        dist_dir = directory.rstrip(os.sep)
+
+        # Build a PathMetadata object, from path to metadata. :wink:
+        base_dir, dist_dir_name = os.path.split(dist_dir)
+        metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
+
+        # Determine the correct Distribution object type.
+        if dist_dir.endswith(".egg-info"):
+            dist_cls = pkg_resources.Distribution
+            dist_name = os.path.splitext(dist_dir_name)[0]
+        else:
+            assert dist_dir.endswith(".dist-info")
+            dist_cls = pkg_resources.DistInfoDistribution
+            dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0]
+
+        dist = dist_cls(base_dir, project_name=dist_name, metadata=metadata)
+        return cls(dist)
+
+    @classmethod
+    def from_metadata_file_contents(
+        cls,
+        metadata_contents: bytes,
+        filename: str,
+        project_name: str,
+    ) -> BaseDistribution:
+        metadata_dict = {
+            "METADATA": metadata_contents,
+        }
+        dist = pkg_resources.DistInfoDistribution(
+            location=filename,
+            metadata=InMemoryMetadata(metadata_dict, filename),
+            project_name=project_name,
+        )
+        return cls(dist)
+
+    @classmethod
+    def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution:
+        try:
+            with wheel.as_zipfile() as zf:
+                info_dir, _ = parse_wheel(zf, name)
+                metadata_dict = {
+                    path.split("/", 1)[-1]: read_wheel_metadata_file(zf, path)
+                    for path in zf.namelist()
+                    if path.startswith(f"{info_dir}/")
+                }
+        except zipfile.BadZipFile as e:
+            raise InvalidWheel(wheel.location, name) from e
+        except UnsupportedWheel as e:
+            raise UnsupportedWheel(f"{name} has an invalid wheel, {e}")
+        dist = pkg_resources.DistInfoDistribution(
+            location=wheel.location,
+            metadata=InMemoryMetadata(metadata_dict, wheel.location),
+            project_name=name,
+        )
+        return cls(dist)
+
+    @property
+    def location(self) -> Optional[str]:
+        return self._dist.location
+
+    @property
+    def installed_location(self) -> Optional[str]:
+        egg_link = egg_link_path_from_location(self.raw_name)
+        if egg_link:
+            location = egg_link
+        elif self.location:
+            location = self.location
+        else:
+            return None
+        return normalize_path(location)
+
+    @property
+    def info_location(self) -> Optional[str]:
+        return self._dist.egg_info
+
+    @property
+    def installed_by_distutils(self) -> bool:
+        # A distutils-installed distribution is provided by FileMetadata. This
+        # provider has a "path" attribute not present anywhere else. Not the
+        # best introspection logic, but pip has been doing this for a long time.
+        try:
+            return bool(self._dist._provider.path)
+        except AttributeError:
+            return False
+
+    @property
+    def canonical_name(self) -> NormalizedName:
+        return canonicalize_name(self._dist.project_name)
+
+    @property
+    def version(self) -> DistributionVersion:
+        return parse_version(self._dist.version)
+
+    def is_file(self, path: InfoPath) -> bool:
+        return self._dist.has_metadata(str(path))
+
+    def iter_distutils_script_names(self) -> Iterator[str]:
+        yield from self._dist.metadata_listdir("scripts")
+
+    def read_text(self, path: InfoPath) -> str:
+        name = str(path)
+        if not self._dist.has_metadata(name):
+            raise FileNotFoundError(name)
+        content = self._dist.get_metadata(name)
+        if content is None:
+            raise NoneMetadataError(self, name)
+        return content
+
+    def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
+        for group, entries in self._dist.get_entry_map().items():
+            for name, entry_point in entries.items():
+                name, _, value = str(entry_point).partition("=")
+                yield EntryPoint(name=name.strip(), value=value.strip(), group=group)
+
+    def _metadata_impl(self) -> email.message.Message:
+        """
+        :raises NoneMetadataError: if the distribution reports `has_metadata()`
+            True but `get_metadata()` returns None.
+        """
+        if isinstance(self._dist, pkg_resources.DistInfoDistribution):
+            metadata_name = "METADATA"
+        else:
+            metadata_name = "PKG-INFO"
+        try:
+            metadata = self.read_text(metadata_name)
+        except FileNotFoundError:
+            if self.location:
+                displaying_path = display_path(self.location)
+            else:
+                displaying_path = repr(self.location)
+            logger.warning("No metadata found in %s", displaying_path)
+            metadata = ""
+        feed_parser = email.parser.FeedParser()
+        feed_parser.feed(metadata)
+        return feed_parser.close()
+
+    def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
+        if extras:  # pkg_resources raises on invalid extras, so we sanitize.
+            extras = frozenset(pkg_resources.safe_extra(e) for e in extras)
+            extras = extras.intersection(self._dist.extras)
+        return self._dist.requires(extras)
+
+    def iter_provided_extras(self) -> Iterable[str]:
+        return self._dist.extras
+
+    def is_extra_provided(self, extra: str) -> bool:
+        return pkg_resources.safe_extra(extra) in self._dist.extras
+
+
+class Environment(BaseEnvironment):
+    def __init__(self, ws: pkg_resources.WorkingSet) -> None:
+        self._ws = ws
+
+    @classmethod
+    def default(cls) -> BaseEnvironment:
+        return cls(pkg_resources.working_set)
+
+    @classmethod
+    def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment:
+        return cls(pkg_resources.WorkingSet(paths))
+
+    def _iter_distributions(self) -> Iterator[BaseDistribution]:
+        for dist in self._ws:
+            yield Distribution(dist)
+
+    def _search_distribution(self, name: str) -> Optional[BaseDistribution]:
+        """Find a distribution matching the ``name`` in the environment.
+
+        This searches from *all* distributions available in the environment, to
+        match the behavior of ``pkg_resources.get_distribution()``.
+        """
+        canonical_name = canonicalize_name(name)
+        for dist in self.iter_all_distributions():
+            if dist.canonical_name == canonical_name:
+                return dist
+        return None
+
+    def get_distribution(self, name: str) -> Optional[BaseDistribution]:
+        # Search the distribution by looking through the working set.
+        dist = self._search_distribution(name)
+        if dist:
+            return dist
+
+        # If distribution could not be found, call working_set.require to
+        # update the working set, and try to find the distribution again.
+        # This might happen for e.g. when you install a package twice, once
+        # using setup.py develop and again using setup.py install. Now when
+        # running pip uninstall twice, the package gets removed from the
+        # working set in the first uninstall, so we have to populate the
+        # working set again so that pip knows about it and the packages gets
+        # picked up and is successfully uninstalled the second time too.
+        try:
+            # We didn't pass in any version specifiers, so this can never
+            # raise pkg_resources.VersionConflict.
+            self._ws.require(name)
+        except pkg_resources.DistributionNotFound:
+            return None
+        return self._search_distribution(name)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/models/__init__.py b/.venv/lib/python3.12/site-packages/pip/_internal/models/__init__.py
new file mode 100644
index 0000000..7855226
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/models/__init__.py
@@ -0,0 +1,2 @@
+"""A package that contains models that represent entities.
+"""
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/models/candidate.py b/.venv/lib/python3.12/site-packages/pip/_internal/models/candidate.py
new file mode 100644
index 0000000..9184a90
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/models/candidate.py
@@ -0,0 +1,30 @@
+from pip._vendor.packaging.version import parse as parse_version
+
+from pip._internal.models.link import Link
+from pip._internal.utils.models import KeyBasedCompareMixin
+
+
+class InstallationCandidate(KeyBasedCompareMixin):
+    """Represents a potential "candidate" for installation."""
+
+    __slots__ = ["name", "version", "link"]
+
+    def __init__(self, name: str, version: str, link: Link) -> None:
+        self.name = name
+        self.version = parse_version(version)
+        self.link = link
+
+        super().__init__(
+            key=(self.name, self.version, self.link),
+            defining_class=InstallationCandidate,
+        )
+
+    def __repr__(self) -> str:
+        return "".format(
+            self.name,
+            self.version,
+            self.link,
+        )
+
+    def __str__(self) -> str:
+        return f"{self.name!r} candidate (version {self.version} at {self.link})"
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/models/direct_url.py b/.venv/lib/python3.12/site-packages/pip/_internal/models/direct_url.py
new file mode 100644
index 0000000..0af884b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/models/direct_url.py
@@ -0,0 +1,235 @@
+""" PEP 610 """
+import json
+import re
+import urllib.parse
+from typing import Any, Dict, Iterable, Optional, Type, TypeVar, Union
+
+__all__ = [
+    "DirectUrl",
+    "DirectUrlValidationError",
+    "DirInfo",
+    "ArchiveInfo",
+    "VcsInfo",
+]
+
+T = TypeVar("T")
+
+DIRECT_URL_METADATA_NAME = "direct_url.json"
+ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$")
+
+
+class DirectUrlValidationError(Exception):
+    pass
+
+
+def _get(
+    d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
+) -> Optional[T]:
+    """Get value from dictionary and verify expected type."""
+    if key not in d:
+        return default
+    value = d[key]
+    if not isinstance(value, expected_type):
+        raise DirectUrlValidationError(
+            f"{value!r} has unexpected type for {key} (expected {expected_type})"
+        )
+    return value
+
+
+def _get_required(
+    d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
+) -> T:
+    value = _get(d, expected_type, key, default)
+    if value is None:
+        raise DirectUrlValidationError(f"{key} must have a value")
+    return value
+
+
+def _exactly_one_of(infos: Iterable[Optional["InfoType"]]) -> "InfoType":
+    infos = [info for info in infos if info is not None]
+    if not infos:
+        raise DirectUrlValidationError(
+            "missing one of archive_info, dir_info, vcs_info"
+        )
+    if len(infos) > 1:
+        raise DirectUrlValidationError(
+            "more than one of archive_info, dir_info, vcs_info"
+        )
+    assert infos[0] is not None
+    return infos[0]
+
+
+def _filter_none(**kwargs: Any) -> Dict[str, Any]:
+    """Make dict excluding None values."""
+    return {k: v for k, v in kwargs.items() if v is not None}
+
+
+class VcsInfo:
+    name = "vcs_info"
+
+    def __init__(
+        self,
+        vcs: str,
+        commit_id: str,
+        requested_revision: Optional[str] = None,
+    ) -> None:
+        self.vcs = vcs
+        self.requested_revision = requested_revision
+        self.commit_id = commit_id
+
+    @classmethod
+    def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]:
+        if d is None:
+            return None
+        return cls(
+            vcs=_get_required(d, str, "vcs"),
+            commit_id=_get_required(d, str, "commit_id"),
+            requested_revision=_get(d, str, "requested_revision"),
+        )
+
+    def _to_dict(self) -> Dict[str, Any]:
+        return _filter_none(
+            vcs=self.vcs,
+            requested_revision=self.requested_revision,
+            commit_id=self.commit_id,
+        )
+
+
+class ArchiveInfo:
+    name = "archive_info"
+
+    def __init__(
+        self,
+        hash: Optional[str] = None,
+        hashes: Optional[Dict[str, str]] = None,
+    ) -> None:
+        # set hashes before hash, since the hash setter will further populate hashes
+        self.hashes = hashes
+        self.hash = hash
+
+    @property
+    def hash(self) -> Optional[str]:
+        return self._hash
+
+    @hash.setter
+    def hash(self, value: Optional[str]) -> None:
+        if value is not None:
+            # Auto-populate the hashes key to upgrade to the new format automatically.
+            # We don't back-populate the legacy hash key from hashes.
+            try:
+                hash_name, hash_value = value.split("=", 1)
+            except ValueError:
+                raise DirectUrlValidationError(
+                    f"invalid archive_info.hash format: {value!r}"
+                )
+            if self.hashes is None:
+                self.hashes = {hash_name: hash_value}
+            elif hash_name not in self.hashes:
+                self.hashes = self.hashes.copy()
+                self.hashes[hash_name] = hash_value
+        self._hash = value
+
+    @classmethod
+    def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]:
+        if d is None:
+            return None
+        return cls(hash=_get(d, str, "hash"), hashes=_get(d, dict, "hashes"))
+
+    def _to_dict(self) -> Dict[str, Any]:
+        return _filter_none(hash=self.hash, hashes=self.hashes)
+
+
+class DirInfo:
+    name = "dir_info"
+
+    def __init__(
+        self,
+        editable: bool = False,
+    ) -> None:
+        self.editable = editable
+
+    @classmethod
+    def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]:
+        if d is None:
+            return None
+        return cls(editable=_get_required(d, bool, "editable", default=False))
+
+    def _to_dict(self) -> Dict[str, Any]:
+        return _filter_none(editable=self.editable or None)
+
+
+InfoType = Union[ArchiveInfo, DirInfo, VcsInfo]
+
+
+class DirectUrl:
+    def __init__(
+        self,
+        url: str,
+        info: InfoType,
+        subdirectory: Optional[str] = None,
+    ) -> None:
+        self.url = url
+        self.info = info
+        self.subdirectory = subdirectory
+
+    def _remove_auth_from_netloc(self, netloc: str) -> str:
+        if "@" not in netloc:
+            return netloc
+        user_pass, netloc_no_user_pass = netloc.split("@", 1)
+        if (
+            isinstance(self.info, VcsInfo)
+            and self.info.vcs == "git"
+            and user_pass == "git"
+        ):
+            return netloc
+        if ENV_VAR_RE.match(user_pass):
+            return netloc
+        return netloc_no_user_pass
+
+    @property
+    def redacted_url(self) -> str:
+        """url with user:password part removed unless it is formed with
+        environment variables as specified in PEP 610, or it is ``git``
+        in the case of a git URL.
+        """
+        purl = urllib.parse.urlsplit(self.url)
+        netloc = self._remove_auth_from_netloc(purl.netloc)
+        surl = urllib.parse.urlunsplit(
+            (purl.scheme, netloc, purl.path, purl.query, purl.fragment)
+        )
+        return surl
+
+    def validate(self) -> None:
+        self.from_dict(self.to_dict())
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, Any]) -> "DirectUrl":
+        return DirectUrl(
+            url=_get_required(d, str, "url"),
+            subdirectory=_get(d, str, "subdirectory"),
+            info=_exactly_one_of(
+                [
+                    ArchiveInfo._from_dict(_get(d, dict, "archive_info")),
+                    DirInfo._from_dict(_get(d, dict, "dir_info")),
+                    VcsInfo._from_dict(_get(d, dict, "vcs_info")),
+                ]
+            ),
+        )
+
+    def to_dict(self) -> Dict[str, Any]:
+        res = _filter_none(
+            url=self.redacted_url,
+            subdirectory=self.subdirectory,
+        )
+        res[self.info.name] = self.info._to_dict()
+        return res
+
+    @classmethod
+    def from_json(cls, s: str) -> "DirectUrl":
+        return cls.from_dict(json.loads(s))
+
+    def to_json(self) -> str:
+        return json.dumps(self.to_dict(), sort_keys=True)
+
+    def is_local_editable(self) -> bool:
+        return isinstance(self.info, DirInfo) and self.info.editable
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/models/format_control.py b/.venv/lib/python3.12/site-packages/pip/_internal/models/format_control.py
new file mode 100644
index 0000000..ccd1127
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/models/format_control.py
@@ -0,0 +1,78 @@
+from typing import FrozenSet, Optional, Set
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.exceptions import CommandError
+
+
+class FormatControl:
+    """Helper for managing formats from which a package can be installed."""
+
+    __slots__ = ["no_binary", "only_binary"]
+
+    def __init__(
+        self,
+        no_binary: Optional[Set[str]] = None,
+        only_binary: Optional[Set[str]] = None,
+    ) -> None:
+        if no_binary is None:
+            no_binary = set()
+        if only_binary is None:
+            only_binary = set()
+
+        self.no_binary = no_binary
+        self.only_binary = only_binary
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, self.__class__):
+            return NotImplemented
+
+        if self.__slots__ != other.__slots__:
+            return False
+
+        return all(getattr(self, k) == getattr(other, k) for k in self.__slots__)
+
+    def __repr__(self) -> str:
+        return f"{self.__class__.__name__}({self.no_binary}, {self.only_binary})"
+
+    @staticmethod
+    def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None:
+        if value.startswith("-"):
+            raise CommandError(
+                "--no-binary / --only-binary option requires 1 argument."
+            )
+        new = value.split(",")
+        while ":all:" in new:
+            other.clear()
+            target.clear()
+            target.add(":all:")
+            del new[: new.index(":all:") + 1]
+            # Without a none, we want to discard everything as :all: covers it
+            if ":none:" not in new:
+                return
+        for name in new:
+            if name == ":none:":
+                target.clear()
+                continue
+            name = canonicalize_name(name)
+            other.discard(name)
+            target.add(name)
+
+    def get_allowed_formats(self, canonical_name: str) -> FrozenSet[str]:
+        result = {"binary", "source"}
+        if canonical_name in self.only_binary:
+            result.discard("source")
+        elif canonical_name in self.no_binary:
+            result.discard("binary")
+        elif ":all:" in self.only_binary:
+            result.discard("source")
+        elif ":all:" in self.no_binary:
+            result.discard("binary")
+        return frozenset(result)
+
+    def disallow_binaries(self) -> None:
+        self.handle_mutual_excludes(
+            ":all:",
+            self.no_binary,
+            self.only_binary,
+        )
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/models/index.py b/.venv/lib/python3.12/site-packages/pip/_internal/models/index.py
new file mode 100644
index 0000000..b94c325
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/models/index.py
@@ -0,0 +1,28 @@
+import urllib.parse
+
+
+class PackageIndex:
+    """Represents a Package Index and provides easier access to endpoints"""
+
+    __slots__ = ["url", "netloc", "simple_url", "pypi_url", "file_storage_domain"]
+
+    def __init__(self, url: str, file_storage_domain: str) -> None:
+        super().__init__()
+        self.url = url
+        self.netloc = urllib.parse.urlsplit(url).netloc
+        self.simple_url = self._url_for_path("simple")
+        self.pypi_url = self._url_for_path("pypi")
+
+        # This is part of a temporary hack used to block installs of PyPI
+        # packages which depend on external urls only necessary until PyPI can
+        # block such packages themselves
+        self.file_storage_domain = file_storage_domain
+
+    def _url_for_path(self, path: str) -> str:
+        return urllib.parse.urljoin(self.url, path)
+
+
+PyPI = PackageIndex("https://pypi.org/", file_storage_domain="files.pythonhosted.org")
+TestPyPI = PackageIndex(
+    "https://test.pypi.org/", file_storage_domain="test-files.pythonhosted.org"
+)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/models/installation_report.py b/.venv/lib/python3.12/site-packages/pip/_internal/models/installation_report.py
new file mode 100644
index 0000000..b9c6330
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/models/installation_report.py
@@ -0,0 +1,56 @@
+from typing import Any, Dict, Sequence
+
+from pip._vendor.packaging.markers import default_environment
+
+from pip import __version__
+from pip._internal.req.req_install import InstallRequirement
+
+
+class InstallationReport:
+    def __init__(self, install_requirements: Sequence[InstallRequirement]):
+        self._install_requirements = install_requirements
+
+    @classmethod
+    def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]:
+        assert ireq.download_info, f"No download_info for {ireq}"
+        res = {
+            # PEP 610 json for the download URL. download_info.archive_info.hashes may
+            # be absent when the requirement was installed from the wheel cache
+            # and the cache entry was populated by an older pip version that did not
+            # record origin.json.
+            "download_info": ireq.download_info.to_dict(),
+            # is_direct is true if the requirement was a direct URL reference (which
+            # includes editable requirements), and false if the requirement was
+            # downloaded from a PEP 503 index or --find-links.
+            "is_direct": ireq.is_direct,
+            # is_yanked is true if the requirement was yanked from the index, but
+            # was still selected by pip to conform to PEP 592.
+            "is_yanked": ireq.link.is_yanked if ireq.link else False,
+            # requested is true if the requirement was specified by the user (aka
+            # top level requirement), and false if it was installed as a dependency of a
+            # requirement. https://peps.python.org/pep-0376/#requested
+            "requested": ireq.user_supplied,
+            # PEP 566 json encoding for metadata
+            # https://www.python.org/dev/peps/pep-0566/#json-compatible-metadata
+            "metadata": ireq.get_dist().metadata_dict,
+        }
+        if ireq.user_supplied and ireq.extras:
+            # For top level requirements, the list of requested extras, if any.
+            res["requested_extras"] = sorted(ireq.extras)
+        return res
+
+    def to_dict(self) -> Dict[str, Any]:
+        return {
+            "version": "1",
+            "pip_version": __version__,
+            "install": [
+                self._install_req_to_dict(ireq) for ireq in self._install_requirements
+            ],
+            # https://peps.python.org/pep-0508/#environment-markers
+            # TODO: currently, the resolver uses the default environment to evaluate
+            # environment markers, so that is what we report here. In the future, it
+            # should also take into account options such as --python-version or
+            # --platform, perhaps under the form of an environment_override field?
+            # https://github.com/pypa/pip/issues/11198
+            "environment": default_environment(),
+        }
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/models/link.py b/.venv/lib/python3.12/site-packages/pip/_internal/models/link.py
new file mode 100644
index 0000000..73041b8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/models/link.py
@@ -0,0 +1,579 @@
+import functools
+import itertools
+import logging
+import os
+import posixpath
+import re
+import urllib.parse
+from dataclasses import dataclass
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Dict,
+    List,
+    Mapping,
+    NamedTuple,
+    Optional,
+    Tuple,
+    Union,
+)
+
+from pip._internal.utils.deprecation import deprecated
+from pip._internal.utils.filetypes import WHEEL_EXTENSION
+from pip._internal.utils.hashes import Hashes
+from pip._internal.utils.misc import (
+    pairwise,
+    redact_auth_from_url,
+    split_auth_from_netloc,
+    splitext,
+)
+from pip._internal.utils.models import KeyBasedCompareMixin
+from pip._internal.utils.urls import path_to_url, url_to_path
+
+if TYPE_CHECKING:
+    from pip._internal.index.collector import IndexContent
+
+logger = logging.getLogger(__name__)
+
+
+# Order matters, earlier hashes have a precedence over later hashes for what
+# we will pick to use.
+_SUPPORTED_HASHES = ("sha512", "sha384", "sha256", "sha224", "sha1", "md5")
+
+
+@dataclass(frozen=True)
+class LinkHash:
+    """Links to content may have embedded hash values. This class parses those.
+
+    `name` must be any member of `_SUPPORTED_HASHES`.
+
+    This class can be converted to and from `ArchiveInfo`. While ArchiveInfo intends to
+    be JSON-serializable to conform to PEP 610, this class contains the logic for
+    parsing a hash name and value for correctness, and then checking whether that hash
+    conforms to a schema with `.is_hash_allowed()`."""
+
+    name: str
+    value: str
+
+    _hash_url_fragment_re = re.compile(
+        # NB: we do not validate that the second group (.*) is a valid hex
+        # digest. Instead, we simply keep that string in this class, and then check it
+        # against Hashes when hash-checking is needed. This is easier to debug than
+        # proactively discarding an invalid hex digest, as we handle incorrect hashes
+        # and malformed hashes in the same place.
+        r"[#&]({choices})=([^&]*)".format(
+            choices="|".join(re.escape(hash_name) for hash_name in _SUPPORTED_HASHES)
+        ),
+    )
+
+    def __post_init__(self) -> None:
+        assert self.name in _SUPPORTED_HASHES
+
+    @classmethod
+    @functools.lru_cache(maxsize=None)
+    def find_hash_url_fragment(cls, url: str) -> Optional["LinkHash"]:
+        """Search a string for a checksum algorithm name and encoded output value."""
+        match = cls._hash_url_fragment_re.search(url)
+        if match is None:
+            return None
+        name, value = match.groups()
+        return cls(name=name, value=value)
+
+    def as_dict(self) -> Dict[str, str]:
+        return {self.name: self.value}
+
+    def as_hashes(self) -> Hashes:
+        """Return a Hashes instance which checks only for the current hash."""
+        return Hashes({self.name: [self.value]})
+
+    def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
+        """
+        Return True if the current hash is allowed by `hashes`.
+        """
+        if hashes is None:
+            return False
+        return hashes.is_hash_allowed(self.name, hex_digest=self.value)
+
+
+@dataclass(frozen=True)
+class MetadataFile:
+    """Information about a core metadata file associated with a distribution."""
+
+    hashes: Optional[Dict[str, str]]
+
+    def __post_init__(self) -> None:
+        if self.hashes is not None:
+            assert all(name in _SUPPORTED_HASHES for name in self.hashes)
+
+
+def supported_hashes(hashes: Optional[Dict[str, str]]) -> Optional[Dict[str, str]]:
+    # Remove any unsupported hash types from the mapping. If this leaves no
+    # supported hashes, return None
+    if hashes is None:
+        return None
+    hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES}
+    if not hashes:
+        return None
+    return hashes
+
+
+def _clean_url_path_part(part: str) -> str:
+    """
+    Clean a "part" of a URL path (i.e. after splitting on "@" characters).
+    """
+    # We unquote prior to quoting to make sure nothing is double quoted.
+    return urllib.parse.quote(urllib.parse.unquote(part))
+
+
+def _clean_file_url_path(part: str) -> str:
+    """
+    Clean the first part of a URL path that corresponds to a local
+    filesystem path (i.e. the first part after splitting on "@" characters).
+    """
+    # We unquote prior to quoting to make sure nothing is double quoted.
+    # Also, on Windows the path part might contain a drive letter which
+    # should not be quoted. On Linux where drive letters do not
+    # exist, the colon should be quoted. We rely on urllib.request
+    # to do the right thing here.
+    return urllib.request.pathname2url(urllib.request.url2pathname(part))
+
+
+# percent-encoded:                   /
+_reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE)
+
+
+def _clean_url_path(path: str, is_local_path: bool) -> str:
+    """
+    Clean the path portion of a URL.
+    """
+    if is_local_path:
+        clean_func = _clean_file_url_path
+    else:
+        clean_func = _clean_url_path_part
+
+    # Split on the reserved characters prior to cleaning so that
+    # revision strings in VCS URLs are properly preserved.
+    parts = _reserved_chars_re.split(path)
+
+    cleaned_parts = []
+    for to_clean, reserved in pairwise(itertools.chain(parts, [""])):
+        cleaned_parts.append(clean_func(to_clean))
+        # Normalize %xx escapes (e.g. %2f -> %2F)
+        cleaned_parts.append(reserved.upper())
+
+    return "".join(cleaned_parts)
+
+
+def _ensure_quoted_url(url: str) -> str:
+    """
+    Make sure a link is fully quoted.
+    For example, if ' ' occurs in the URL, it will be replaced with "%20",
+    and without double-quoting other characters.
+    """
+    # Split the URL into parts according to the general structure
+    # `scheme://netloc/path;parameters?query#fragment`.
+    result = urllib.parse.urlparse(url)
+    # If the netloc is empty, then the URL refers to a local filesystem path.
+    is_local_path = not result.netloc
+    path = _clean_url_path(result.path, is_local_path=is_local_path)
+    return urllib.parse.urlunparse(result._replace(path=path))
+
+
+class Link(KeyBasedCompareMixin):
+    """Represents a parsed link from a Package Index's simple URL"""
+
+    __slots__ = [
+        "_parsed_url",
+        "_url",
+        "_hashes",
+        "comes_from",
+        "requires_python",
+        "yanked_reason",
+        "metadata_file_data",
+        "cache_link_parsing",
+        "egg_fragment",
+    ]
+
+    def __init__(
+        self,
+        url: str,
+        comes_from: Optional[Union[str, "IndexContent"]] = None,
+        requires_python: Optional[str] = None,
+        yanked_reason: Optional[str] = None,
+        metadata_file_data: Optional[MetadataFile] = None,
+        cache_link_parsing: bool = True,
+        hashes: Optional[Mapping[str, str]] = None,
+    ) -> None:
+        """
+        :param url: url of the resource pointed to (href of the link)
+        :param comes_from: instance of IndexContent where the link was found,
+            or string.
+        :param requires_python: String containing the `Requires-Python`
+            metadata field, specified in PEP 345. This may be specified by
+            a data-requires-python attribute in the HTML link tag, as
+            described in PEP 503.
+        :param yanked_reason: the reason the file has been yanked, if the
+            file has been yanked, or None if the file hasn't been yanked.
+            This is the value of the "data-yanked" attribute, if present, in
+            a simple repository HTML link. If the file has been yanked but
+            no reason was provided, this should be the empty string. See
+            PEP 592 for more information and the specification.
+        :param metadata_file_data: the metadata attached to the file, or None if
+            no such metadata is provided. This argument, if not None, indicates
+            that a separate metadata file exists, and also optionally supplies
+            hashes for that file.
+        :param cache_link_parsing: A flag that is used elsewhere to determine
+            whether resources retrieved from this link should be cached. PyPI
+            URLs should generally have this set to False, for example.
+        :param hashes: A mapping of hash names to digests to allow us to
+            determine the validity of a download.
+        """
+
+        # The comes_from, requires_python, and metadata_file_data arguments are
+        # only used by classmethods of this class, and are not used in client
+        # code directly.
+
+        # url can be a UNC windows share
+        if url.startswith("\\\\"):
+            url = path_to_url(url)
+
+        self._parsed_url = urllib.parse.urlsplit(url)
+        # Store the url as a private attribute to prevent accidentally
+        # trying to set a new value.
+        self._url = url
+
+        link_hash = LinkHash.find_hash_url_fragment(url)
+        hashes_from_link = {} if link_hash is None else link_hash.as_dict()
+        if hashes is None:
+            self._hashes = hashes_from_link
+        else:
+            self._hashes = {**hashes, **hashes_from_link}
+
+        self.comes_from = comes_from
+        self.requires_python = requires_python if requires_python else None
+        self.yanked_reason = yanked_reason
+        self.metadata_file_data = metadata_file_data
+
+        super().__init__(key=url, defining_class=Link)
+
+        self.cache_link_parsing = cache_link_parsing
+        self.egg_fragment = self._egg_fragment()
+
+    @classmethod
+    def from_json(
+        cls,
+        file_data: Dict[str, Any],
+        page_url: str,
+    ) -> Optional["Link"]:
+        """
+        Convert an pypi json document from a simple repository page into a Link.
+        """
+        file_url = file_data.get("url")
+        if file_url is None:
+            return None
+
+        url = _ensure_quoted_url(urllib.parse.urljoin(page_url, file_url))
+        pyrequire = file_data.get("requires-python")
+        yanked_reason = file_data.get("yanked")
+        hashes = file_data.get("hashes", {})
+
+        # PEP 714: Indexes must use the name core-metadata, but
+        # clients should support the old name as a fallback for compatibility.
+        metadata_info = file_data.get("core-metadata")
+        if metadata_info is None:
+            metadata_info = file_data.get("dist-info-metadata")
+
+        # The metadata info value may be a boolean, or a dict of hashes.
+        if isinstance(metadata_info, dict):
+            # The file exists, and hashes have been supplied
+            metadata_file_data = MetadataFile(supported_hashes(metadata_info))
+        elif metadata_info:
+            # The file exists, but there are no hashes
+            metadata_file_data = MetadataFile(None)
+        else:
+            # False or not present: the file does not exist
+            metadata_file_data = None
+
+        # The Link.yanked_reason expects an empty string instead of a boolean.
+        if yanked_reason and not isinstance(yanked_reason, str):
+            yanked_reason = ""
+        # The Link.yanked_reason expects None instead of False.
+        elif not yanked_reason:
+            yanked_reason = None
+
+        return cls(
+            url,
+            comes_from=page_url,
+            requires_python=pyrequire,
+            yanked_reason=yanked_reason,
+            hashes=hashes,
+            metadata_file_data=metadata_file_data,
+        )
+
+    @classmethod
+    def from_element(
+        cls,
+        anchor_attribs: Dict[str, Optional[str]],
+        page_url: str,
+        base_url: str,
+    ) -> Optional["Link"]:
+        """
+        Convert an anchor element's attributes in a simple repository page to a Link.
+        """
+        href = anchor_attribs.get("href")
+        if not href:
+            return None
+
+        url = _ensure_quoted_url(urllib.parse.urljoin(base_url, href))
+        pyrequire = anchor_attribs.get("data-requires-python")
+        yanked_reason = anchor_attribs.get("data-yanked")
+
+        # PEP 714: Indexes must use the name data-core-metadata, but
+        # clients should support the old name as a fallback for compatibility.
+        metadata_info = anchor_attribs.get("data-core-metadata")
+        if metadata_info is None:
+            metadata_info = anchor_attribs.get("data-dist-info-metadata")
+        # The metadata info value may be the string "true", or a string of
+        # the form "hashname=hashval"
+        if metadata_info == "true":
+            # The file exists, but there are no hashes
+            metadata_file_data = MetadataFile(None)
+        elif metadata_info is None:
+            # The file does not exist
+            metadata_file_data = None
+        else:
+            # The file exists, and hashes have been supplied
+            hashname, sep, hashval = metadata_info.partition("=")
+            if sep == "=":
+                metadata_file_data = MetadataFile(supported_hashes({hashname: hashval}))
+            else:
+                # Error - data is wrong. Treat as no hashes supplied.
+                logger.debug(
+                    "Index returned invalid data-dist-info-metadata value: %s",
+                    metadata_info,
+                )
+                metadata_file_data = MetadataFile(None)
+
+        return cls(
+            url,
+            comes_from=page_url,
+            requires_python=pyrequire,
+            yanked_reason=yanked_reason,
+            metadata_file_data=metadata_file_data,
+        )
+
+    def __str__(self) -> str:
+        if self.requires_python:
+            rp = f" (requires-python:{self.requires_python})"
+        else:
+            rp = ""
+        if self.comes_from:
+            return f"{redact_auth_from_url(self._url)} (from {self.comes_from}){rp}"
+        else:
+            return redact_auth_from_url(str(self._url))
+
+    def __repr__(self) -> str:
+        return f""
+
+    @property
+    def url(self) -> str:
+        return self._url
+
+    @property
+    def filename(self) -> str:
+        path = self.path.rstrip("/")
+        name = posixpath.basename(path)
+        if not name:
+            # Make sure we don't leak auth information if the netloc
+            # includes a username and password.
+            netloc, user_pass = split_auth_from_netloc(self.netloc)
+            return netloc
+
+        name = urllib.parse.unquote(name)
+        assert name, f"URL {self._url!r} produced no filename"
+        return name
+
+    @property
+    def file_path(self) -> str:
+        return url_to_path(self.url)
+
+    @property
+    def scheme(self) -> str:
+        return self._parsed_url.scheme
+
+    @property
+    def netloc(self) -> str:
+        """
+        This can contain auth information.
+        """
+        return self._parsed_url.netloc
+
+    @property
+    def path(self) -> str:
+        return urllib.parse.unquote(self._parsed_url.path)
+
+    def splitext(self) -> Tuple[str, str]:
+        return splitext(posixpath.basename(self.path.rstrip("/")))
+
+    @property
+    def ext(self) -> str:
+        return self.splitext()[1]
+
+    @property
+    def url_without_fragment(self) -> str:
+        scheme, netloc, path, query, fragment = self._parsed_url
+        return urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
+
+    _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)")
+
+    # Per PEP 508.
+    _project_name_re = re.compile(
+        r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
+    )
+
+    def _egg_fragment(self) -> Optional[str]:
+        match = self._egg_fragment_re.search(self._url)
+        if not match:
+            return None
+
+        # An egg fragment looks like a PEP 508 project name, along with
+        # an optional extras specifier. Anything else is invalid.
+        project_name = match.group(1)
+        if not self._project_name_re.match(project_name):
+            deprecated(
+                reason=f"{self} contains an egg fragment with a non-PEP 508 name",
+                replacement="to use the req @ url syntax, and remove the egg fragment",
+                gone_in="25.0",
+                issue=11617,
+            )
+
+        return project_name
+
+    _subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)")
+
+    @property
+    def subdirectory_fragment(self) -> Optional[str]:
+        match = self._subdirectory_fragment_re.search(self._url)
+        if not match:
+            return None
+        return match.group(1)
+
+    def metadata_link(self) -> Optional["Link"]:
+        """Return a link to the associated core metadata file (if any)."""
+        if self.metadata_file_data is None:
+            return None
+        metadata_url = f"{self.url_without_fragment}.metadata"
+        if self.metadata_file_data.hashes is None:
+            return Link(metadata_url)
+        return Link(metadata_url, hashes=self.metadata_file_data.hashes)
+
+    def as_hashes(self) -> Hashes:
+        return Hashes({k: [v] for k, v in self._hashes.items()})
+
+    @property
+    def hash(self) -> Optional[str]:
+        return next(iter(self._hashes.values()), None)
+
+    @property
+    def hash_name(self) -> Optional[str]:
+        return next(iter(self._hashes), None)
+
+    @property
+    def show_url(self) -> str:
+        return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0])
+
+    @property
+    def is_file(self) -> bool:
+        return self.scheme == "file"
+
+    def is_existing_dir(self) -> bool:
+        return self.is_file and os.path.isdir(self.file_path)
+
+    @property
+    def is_wheel(self) -> bool:
+        return self.ext == WHEEL_EXTENSION
+
+    @property
+    def is_vcs(self) -> bool:
+        from pip._internal.vcs import vcs
+
+        return self.scheme in vcs.all_schemes
+
+    @property
+    def is_yanked(self) -> bool:
+        return self.yanked_reason is not None
+
+    @property
+    def has_hash(self) -> bool:
+        return bool(self._hashes)
+
+    def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
+        """
+        Return True if the link has a hash and it is allowed by `hashes`.
+        """
+        if hashes is None:
+            return False
+        return any(hashes.is_hash_allowed(k, v) for k, v in self._hashes.items())
+
+
+class _CleanResult(NamedTuple):
+    """Convert link for equivalency check.
+
+    This is used in the resolver to check whether two URL-specified requirements
+    likely point to the same distribution and can be considered equivalent. This
+    equivalency logic avoids comparing URLs literally, which can be too strict
+    (e.g. "a=1&b=2" vs "b=2&a=1") and produce conflicts unexpecting to users.
+
+    Currently this does three things:
+
+    1. Drop the basic auth part. This is technically wrong since a server can
+       serve different content based on auth, but if it does that, it is even
+       impossible to guarantee two URLs without auth are equivalent, since
+       the user can input different auth information when prompted. So the
+       practical solution is to assume the auth doesn't affect the response.
+    2. Parse the query to avoid the ordering issue. Note that ordering under the
+       same key in the query are NOT cleaned; i.e. "a=1&a=2" and "a=2&a=1" are
+       still considered different.
+    3. Explicitly drop most of the fragment part, except ``subdirectory=`` and
+       hash values, since it should have no impact the downloaded content. Note
+       that this drops the "egg=" part historically used to denote the requested
+       project (and extras), which is wrong in the strictest sense, but too many
+       people are supplying it inconsistently to cause superfluous resolution
+       conflicts, so we choose to also ignore them.
+    """
+
+    parsed: urllib.parse.SplitResult
+    query: Dict[str, List[str]]
+    subdirectory: str
+    hashes: Dict[str, str]
+
+
+def _clean_link(link: Link) -> _CleanResult:
+    parsed = link._parsed_url
+    netloc = parsed.netloc.rsplit("@", 1)[-1]
+    # According to RFC 8089, an empty host in file: means localhost.
+    if parsed.scheme == "file" and not netloc:
+        netloc = "localhost"
+    fragment = urllib.parse.parse_qs(parsed.fragment)
+    if "egg" in fragment:
+        logger.debug("Ignoring egg= fragment in %s", link)
+    try:
+        # If there are multiple subdirectory values, use the first one.
+        # This matches the behavior of Link.subdirectory_fragment.
+        subdirectory = fragment["subdirectory"][0]
+    except (IndexError, KeyError):
+        subdirectory = ""
+    # If there are multiple hash values under the same algorithm, use the
+    # first one. This matches the behavior of Link.hash_value.
+    hashes = {k: fragment[k][0] for k in _SUPPORTED_HASHES if k in fragment}
+    return _CleanResult(
+        parsed=parsed._replace(netloc=netloc, query="", fragment=""),
+        query=urllib.parse.parse_qs(parsed.query),
+        subdirectory=subdirectory,
+        hashes=hashes,
+    )
+
+
+@functools.lru_cache(maxsize=None)
+def links_equivalent(link1: Link, link2: Link) -> bool:
+    return _clean_link(link1) == _clean_link(link2)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/models/scheme.py b/.venv/lib/python3.12/site-packages/pip/_internal/models/scheme.py
new file mode 100644
index 0000000..f51190a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/models/scheme.py
@@ -0,0 +1,31 @@
+"""
+For types associated with installation schemes.
+
+For a general overview of available schemes and their context, see
+https://docs.python.org/3/install/index.html#alternate-installation.
+"""
+
+
+SCHEME_KEYS = ["platlib", "purelib", "headers", "scripts", "data"]
+
+
+class Scheme:
+    """A Scheme holds paths which are used as the base directories for
+    artifacts associated with a Python package.
+    """
+
+    __slots__ = SCHEME_KEYS
+
+    def __init__(
+        self,
+        platlib: str,
+        purelib: str,
+        headers: str,
+        scripts: str,
+        data: str,
+    ) -> None:
+        self.platlib = platlib
+        self.purelib = purelib
+        self.headers = headers
+        self.scripts = scripts
+        self.data = data
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/models/search_scope.py b/.venv/lib/python3.12/site-packages/pip/_internal/models/search_scope.py
new file mode 100644
index 0000000..fe61e81
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/models/search_scope.py
@@ -0,0 +1,132 @@
+import itertools
+import logging
+import os
+import posixpath
+import urllib.parse
+from typing import List
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.models.index import PyPI
+from pip._internal.utils.compat import has_tls
+from pip._internal.utils.misc import normalize_path, redact_auth_from_url
+
+logger = logging.getLogger(__name__)
+
+
+class SearchScope:
+
+    """
+    Encapsulates the locations that pip is configured to search.
+    """
+
+    __slots__ = ["find_links", "index_urls", "no_index"]
+
+    @classmethod
+    def create(
+        cls,
+        find_links: List[str],
+        index_urls: List[str],
+        no_index: bool,
+    ) -> "SearchScope":
+        """
+        Create a SearchScope object after normalizing the `find_links`.
+        """
+        # Build find_links. If an argument starts with ~, it may be
+        # a local file relative to a home directory. So try normalizing
+        # it and if it exists, use the normalized version.
+        # This is deliberately conservative - it might be fine just to
+        # blindly normalize anything starting with a ~...
+        built_find_links: List[str] = []
+        for link in find_links:
+            if link.startswith("~"):
+                new_link = normalize_path(link)
+                if os.path.exists(new_link):
+                    link = new_link
+            built_find_links.append(link)
+
+        # If we don't have TLS enabled, then WARN if anyplace we're looking
+        # relies on TLS.
+        if not has_tls():
+            for link in itertools.chain(index_urls, built_find_links):
+                parsed = urllib.parse.urlparse(link)
+                if parsed.scheme == "https":
+                    logger.warning(
+                        "pip is configured with locations that require "
+                        "TLS/SSL, however the ssl module in Python is not "
+                        "available."
+                    )
+                    break
+
+        return cls(
+            find_links=built_find_links,
+            index_urls=index_urls,
+            no_index=no_index,
+        )
+
+    def __init__(
+        self,
+        find_links: List[str],
+        index_urls: List[str],
+        no_index: bool,
+    ) -> None:
+        self.find_links = find_links
+        self.index_urls = index_urls
+        self.no_index = no_index
+
+    def get_formatted_locations(self) -> str:
+        lines = []
+        redacted_index_urls = []
+        if self.index_urls and self.index_urls != [PyPI.simple_url]:
+            for url in self.index_urls:
+                redacted_index_url = redact_auth_from_url(url)
+
+                # Parse the URL
+                purl = urllib.parse.urlsplit(redacted_index_url)
+
+                # URL is generally invalid if scheme and netloc is missing
+                # there are issues with Python and URL parsing, so this test
+                # is a bit crude. See bpo-20271, bpo-23505. Python doesn't
+                # always parse invalid URLs correctly - it should raise
+                # exceptions for malformed URLs
+                if not purl.scheme and not purl.netloc:
+                    logger.warning(
+                        'The index url "%s" seems invalid, please provide a scheme.',
+                        redacted_index_url,
+                    )
+
+                redacted_index_urls.append(redacted_index_url)
+
+            lines.append(
+                "Looking in indexes: {}".format(", ".join(redacted_index_urls))
+            )
+
+        if self.find_links:
+            lines.append(
+                "Looking in links: {}".format(
+                    ", ".join(redact_auth_from_url(url) for url in self.find_links)
+                )
+            )
+        return "\n".join(lines)
+
+    def get_index_urls_locations(self, project_name: str) -> List[str]:
+        """Returns the locations found via self.index_urls
+
+        Checks the url_name on the main (first in the list) index and
+        use this url_name to produce all locations
+        """
+
+        def mkurl_pypi_url(url: str) -> str:
+            loc = posixpath.join(
+                url, urllib.parse.quote(canonicalize_name(project_name))
+            )
+            # For maximum compatibility with easy_install, ensure the path
+            # ends in a trailing slash.  Although this isn't in the spec
+            # (and PyPI can handle it without the slash) some other index
+            # implementations might break if they relied on easy_install's
+            # behavior.
+            if not loc.endswith("/"):
+                loc = loc + "/"
+            return loc
+
+        return [mkurl_pypi_url(url) for url in self.index_urls]
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/models/selection_prefs.py b/.venv/lib/python3.12/site-packages/pip/_internal/models/selection_prefs.py
new file mode 100644
index 0000000..977bc4c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/models/selection_prefs.py
@@ -0,0 +1,51 @@
+from typing import Optional
+
+from pip._internal.models.format_control import FormatControl
+
+
+class SelectionPreferences:
+    """
+    Encapsulates the candidate selection preferences for downloading
+    and installing files.
+    """
+
+    __slots__ = [
+        "allow_yanked",
+        "allow_all_prereleases",
+        "format_control",
+        "prefer_binary",
+        "ignore_requires_python",
+    ]
+
+    # Don't include an allow_yanked default value to make sure each call
+    # site considers whether yanked releases are allowed. This also causes
+    # that decision to be made explicit in the calling code, which helps
+    # people when reading the code.
+    def __init__(
+        self,
+        allow_yanked: bool,
+        allow_all_prereleases: bool = False,
+        format_control: Optional[FormatControl] = None,
+        prefer_binary: bool = False,
+        ignore_requires_python: Optional[bool] = None,
+    ) -> None:
+        """Create a SelectionPreferences object.
+
+        :param allow_yanked: Whether files marked as yanked (in the sense
+            of PEP 592) are permitted to be candidates for install.
+        :param format_control: A FormatControl object or None. Used to control
+            the selection of source packages / binary packages when consulting
+            the index and links.
+        :param prefer_binary: Whether to prefer an old, but valid, binary
+            dist over a new source dist.
+        :param ignore_requires_python: Whether to ignore incompatible
+            "Requires-Python" values in links. Defaults to False.
+        """
+        if ignore_requires_python is None:
+            ignore_requires_python = False
+
+        self.allow_yanked = allow_yanked
+        self.allow_all_prereleases = allow_all_prereleases
+        self.format_control = format_control
+        self.prefer_binary = prefer_binary
+        self.ignore_requires_python = ignore_requires_python
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/models/target_python.py b/.venv/lib/python3.12/site-packages/pip/_internal/models/target_python.py
new file mode 100644
index 0000000..67ea5da
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/models/target_python.py
@@ -0,0 +1,122 @@
+import sys
+from typing import List, Optional, Set, Tuple
+
+from pip._vendor.packaging.tags import Tag
+
+from pip._internal.utils.compatibility_tags import get_supported, version_info_to_nodot
+from pip._internal.utils.misc import normalize_version_info
+
+
+class TargetPython:
+
+    """
+    Encapsulates the properties of a Python interpreter one is targeting
+    for a package install, download, etc.
+    """
+
+    __slots__ = [
+        "_given_py_version_info",
+        "abis",
+        "implementation",
+        "platforms",
+        "py_version",
+        "py_version_info",
+        "_valid_tags",
+        "_valid_tags_set",
+    ]
+
+    def __init__(
+        self,
+        platforms: Optional[List[str]] = None,
+        py_version_info: Optional[Tuple[int, ...]] = None,
+        abis: Optional[List[str]] = None,
+        implementation: Optional[str] = None,
+    ) -> None:
+        """
+        :param platforms: A list of strings or None. If None, searches for
+            packages that are supported by the current system. Otherwise, will
+            find packages that can be built on the platforms passed in. These
+            packages will only be downloaded for distribution: they will
+            not be built locally.
+        :param py_version_info: An optional tuple of ints representing the
+            Python version information to use (e.g. `sys.version_info[:3]`).
+            This can have length 1, 2, or 3 when provided.
+        :param abis: A list of strings or None. This is passed to
+            compatibility_tags.py's get_supported() function as is.
+        :param implementation: A string or None. This is passed to
+            compatibility_tags.py's get_supported() function as is.
+        """
+        # Store the given py_version_info for when we call get_supported().
+        self._given_py_version_info = py_version_info
+
+        if py_version_info is None:
+            py_version_info = sys.version_info[:3]
+        else:
+            py_version_info = normalize_version_info(py_version_info)
+
+        py_version = ".".join(map(str, py_version_info[:2]))
+
+        self.abis = abis
+        self.implementation = implementation
+        self.platforms = platforms
+        self.py_version = py_version
+        self.py_version_info = py_version_info
+
+        # This is used to cache the return value of get_(un)sorted_tags.
+        self._valid_tags: Optional[List[Tag]] = None
+        self._valid_tags_set: Optional[Set[Tag]] = None
+
+    def format_given(self) -> str:
+        """
+        Format the given, non-None attributes for display.
+        """
+        display_version = None
+        if self._given_py_version_info is not None:
+            display_version = ".".join(
+                str(part) for part in self._given_py_version_info
+            )
+
+        key_values = [
+            ("platforms", self.platforms),
+            ("version_info", display_version),
+            ("abis", self.abis),
+            ("implementation", self.implementation),
+        ]
+        return " ".join(
+            f"{key}={value!r}" for key, value in key_values if value is not None
+        )
+
+    def get_sorted_tags(self) -> List[Tag]:
+        """
+        Return the supported PEP 425 tags to check wheel candidates against.
+
+        The tags are returned in order of preference (most preferred first).
+        """
+        if self._valid_tags is None:
+            # Pass versions=None if no py_version_info was given since
+            # versions=None uses special default logic.
+            py_version_info = self._given_py_version_info
+            if py_version_info is None:
+                version = None
+            else:
+                version = version_info_to_nodot(py_version_info)
+
+            tags = get_supported(
+                version=version,
+                platforms=self.platforms,
+                abis=self.abis,
+                impl=self.implementation,
+            )
+            self._valid_tags = tags
+
+        return self._valid_tags
+
+    def get_unsorted_tags(self) -> Set[Tag]:
+        """Exactly the same as get_sorted_tags, but returns a set.
+
+        This is important for performance.
+        """
+        if self._valid_tags_set is None:
+            self._valid_tags_set = set(self.get_sorted_tags())
+
+        return self._valid_tags_set
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/models/wheel.py b/.venv/lib/python3.12/site-packages/pip/_internal/models/wheel.py
new file mode 100644
index 0000000..a5dc12b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/models/wheel.py
@@ -0,0 +1,92 @@
+"""Represents a wheel file and provides access to the various parts of the
+name that have meaning.
+"""
+import re
+from typing import Dict, Iterable, List
+
+from pip._vendor.packaging.tags import Tag
+
+from pip._internal.exceptions import InvalidWheelFilename
+
+
+class Wheel:
+    """A wheel file"""
+
+    wheel_file_re = re.compile(
+        r"""^(?P(?P[^\s-]+?)-(?P[^\s-]*?))
+        ((-(?P\d[^-]*?))?-(?P[^\s-]+?)-(?P[^\s-]+?)-(?P[^\s-]+?)
+        \.whl|\.dist-info)$""",
+        re.VERBOSE,
+    )
+
+    def __init__(self, filename: str) -> None:
+        """
+        :raises InvalidWheelFilename: when the filename is invalid for a wheel
+        """
+        wheel_info = self.wheel_file_re.match(filename)
+        if not wheel_info:
+            raise InvalidWheelFilename(f"{filename} is not a valid wheel filename.")
+        self.filename = filename
+        self.name = wheel_info.group("name").replace("_", "-")
+        # we'll assume "_" means "-" due to wheel naming scheme
+        # (https://github.com/pypa/pip/issues/1150)
+        self.version = wheel_info.group("ver").replace("_", "-")
+        self.build_tag = wheel_info.group("build")
+        self.pyversions = wheel_info.group("pyver").split(".")
+        self.abis = wheel_info.group("abi").split(".")
+        self.plats = wheel_info.group("plat").split(".")
+
+        # All the tag combinations from this file
+        self.file_tags = {
+            Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats
+        }
+
+    def get_formatted_file_tags(self) -> List[str]:
+        """Return the wheel's tags as a sorted list of strings."""
+        return sorted(str(tag) for tag in self.file_tags)
+
+    def support_index_min(self, tags: List[Tag]) -> int:
+        """Return the lowest index that one of the wheel's file_tag combinations
+        achieves in the given list of supported tags.
+
+        For example, if there are 8 supported tags and one of the file tags
+        is first in the list, then return 0.
+
+        :param tags: the PEP 425 tags to check the wheel against, in order
+            with most preferred first.
+
+        :raises ValueError: If none of the wheel's file tags match one of
+            the supported tags.
+        """
+        try:
+            return next(i for i, t in enumerate(tags) if t in self.file_tags)
+        except StopIteration:
+            raise ValueError()
+
+    def find_most_preferred_tag(
+        self, tags: List[Tag], tag_to_priority: Dict[Tag, int]
+    ) -> int:
+        """Return the priority of the most preferred tag that one of the wheel's file
+        tag combinations achieves in the given list of supported tags using the given
+        tag_to_priority mapping, where lower priorities are more-preferred.
+
+        This is used in place of support_index_min in some cases in order to avoid
+        an expensive linear scan of a large list of tags.
+
+        :param tags: the PEP 425 tags to check the wheel against.
+        :param tag_to_priority: a mapping from tag to priority of that tag, where
+            lower is more preferred.
+
+        :raises ValueError: If none of the wheel's file tags match one of
+            the supported tags.
+        """
+        return min(
+            tag_to_priority[tag] for tag in self.file_tags if tag in tag_to_priority
+        )
+
+    def supported(self, tags: Iterable[Tag]) -> bool:
+        """Return whether the wheel is compatible with one of the given tags.
+
+        :param tags: the PEP 425 tags to check the wheel against.
+        """
+        return not self.file_tags.isdisjoint(tags)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/network/__init__.py b/.venv/lib/python3.12/site-packages/pip/_internal/network/__init__.py
new file mode 100644
index 0000000..b51bde9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/network/__init__.py
@@ -0,0 +1,2 @@
+"""Contains purely network-related utilities.
+"""
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/network/auth.py b/.venv/lib/python3.12/site-packages/pip/_internal/network/auth.py
new file mode 100644
index 0000000..94a82fa
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/network/auth.py
@@ -0,0 +1,561 @@
+"""Network Authentication Helpers
+
+Contains interface (MultiDomainBasicAuth) and associated glue code for
+providing credentials in the context of network requests.
+"""
+import logging
+import os
+import shutil
+import subprocess
+import sysconfig
+import typing
+import urllib.parse
+from abc import ABC, abstractmethod
+from functools import lru_cache
+from os.path import commonprefix
+from pathlib import Path
+from typing import Any, Dict, List, NamedTuple, Optional, Tuple
+
+from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
+from pip._vendor.requests.models import Request, Response
+from pip._vendor.requests.utils import get_netrc_auth
+
+from pip._internal.utils.logging import getLogger
+from pip._internal.utils.misc import (
+    ask,
+    ask_input,
+    ask_password,
+    remove_auth_from_url,
+    split_auth_netloc_from_url,
+)
+from pip._internal.vcs.versioncontrol import AuthInfo
+
+logger = getLogger(__name__)
+
+KEYRING_DISABLED = False
+
+
+class Credentials(NamedTuple):
+    url: str
+    username: str
+    password: str
+
+
+class KeyRingBaseProvider(ABC):
+    """Keyring base provider interface"""
+
+    has_keyring: bool
+
+    @abstractmethod
+    def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
+        ...
+
+    @abstractmethod
+    def save_auth_info(self, url: str, username: str, password: str) -> None:
+        ...
+
+
+class KeyRingNullProvider(KeyRingBaseProvider):
+    """Keyring null provider"""
+
+    has_keyring = False
+
+    def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
+        return None
+
+    def save_auth_info(self, url: str, username: str, password: str) -> None:
+        return None
+
+
+class KeyRingPythonProvider(KeyRingBaseProvider):
+    """Keyring interface which uses locally imported `keyring`"""
+
+    has_keyring = True
+
+    def __init__(self) -> None:
+        import keyring
+
+        self.keyring = keyring
+
+    def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
+        # Support keyring's get_credential interface which supports getting
+        # credentials without a username. This is only available for
+        # keyring>=15.2.0.
+        if hasattr(self.keyring, "get_credential"):
+            logger.debug("Getting credentials from keyring for %s", url)
+            cred = self.keyring.get_credential(url, username)
+            if cred is not None:
+                return cred.username, cred.password
+            return None
+
+        if username is not None:
+            logger.debug("Getting password from keyring for %s", url)
+            password = self.keyring.get_password(url, username)
+            if password:
+                return username, password
+        return None
+
+    def save_auth_info(self, url: str, username: str, password: str) -> None:
+        self.keyring.set_password(url, username, password)
+
+
+class KeyRingCliProvider(KeyRingBaseProvider):
+    """Provider which uses `keyring` cli
+
+    Instead of calling the keyring package installed alongside pip
+    we call keyring on the command line which will enable pip to
+    use which ever installation of keyring is available first in
+    PATH.
+    """
+
+    has_keyring = True
+
+    def __init__(self, cmd: str) -> None:
+        self.keyring = cmd
+
+    def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
+        # This is the default implementation of keyring.get_credential
+        # https://github.com/jaraco/keyring/blob/97689324abcf01bd1793d49063e7ca01e03d7d07/keyring/backend.py#L134-L139
+        if username is not None:
+            password = self._get_password(url, username)
+            if password is not None:
+                return username, password
+        return None
+
+    def save_auth_info(self, url: str, username: str, password: str) -> None:
+        return self._set_password(url, username, password)
+
+    def _get_password(self, service_name: str, username: str) -> Optional[str]:
+        """Mirror the implementation of keyring.get_password using cli"""
+        if self.keyring is None:
+            return None
+
+        cmd = [self.keyring, "get", service_name, username]
+        env = os.environ.copy()
+        env["PYTHONIOENCODING"] = "utf-8"
+        res = subprocess.run(
+            cmd,
+            stdin=subprocess.DEVNULL,
+            stdout=subprocess.PIPE,
+            env=env,
+        )
+        if res.returncode:
+            return None
+        return res.stdout.decode("utf-8").strip(os.linesep)
+
+    def _set_password(self, service_name: str, username: str, password: str) -> None:
+        """Mirror the implementation of keyring.set_password using cli"""
+        if self.keyring is None:
+            return None
+        env = os.environ.copy()
+        env["PYTHONIOENCODING"] = "utf-8"
+        subprocess.run(
+            [self.keyring, "set", service_name, username],
+            input=f"{password}{os.linesep}".encode("utf-8"),
+            env=env,
+            check=True,
+        )
+        return None
+
+
+@lru_cache(maxsize=None)
+def get_keyring_provider(provider: str) -> KeyRingBaseProvider:
+    logger.verbose("Keyring provider requested: %s", provider)
+
+    # keyring has previously failed and been disabled
+    if KEYRING_DISABLED:
+        provider = "disabled"
+    if provider in ["import", "auto"]:
+        try:
+            impl = KeyRingPythonProvider()
+            logger.verbose("Keyring provider set: import")
+            return impl
+        except ImportError:
+            pass
+        except Exception as exc:
+            # In the event of an unexpected exception
+            # we should warn the user
+            msg = "Installed copy of keyring fails with exception %s"
+            if provider == "auto":
+                msg = msg + ", trying to find a keyring executable as a fallback"
+            logger.warning(msg, exc, exc_info=logger.isEnabledFor(logging.DEBUG))
+    if provider in ["subprocess", "auto"]:
+        cli = shutil.which("keyring")
+        if cli and cli.startswith(sysconfig.get_path("scripts")):
+            # all code within this function is stolen from shutil.which implementation
+            @typing.no_type_check
+            def PATH_as_shutil_which_determines_it() -> str:
+                path = os.environ.get("PATH", None)
+                if path is None:
+                    try:
+                        path = os.confstr("CS_PATH")
+                    except (AttributeError, ValueError):
+                        # os.confstr() or CS_PATH is not available
+                        path = os.defpath
+                # bpo-35755: Don't use os.defpath if the PATH environment variable is
+                # set to an empty string
+
+                return path
+
+            scripts = Path(sysconfig.get_path("scripts"))
+
+            paths = []
+            for path in PATH_as_shutil_which_determines_it().split(os.pathsep):
+                p = Path(path)
+                try:
+                    if not p.samefile(scripts):
+                        paths.append(path)
+                except FileNotFoundError:
+                    pass
+
+            path = os.pathsep.join(paths)
+
+            cli = shutil.which("keyring", path=path)
+
+        if cli:
+            logger.verbose("Keyring provider set: subprocess with executable %s", cli)
+            return KeyRingCliProvider(cli)
+
+    logger.verbose("Keyring provider set: disabled")
+    return KeyRingNullProvider()
+
+
+class MultiDomainBasicAuth(AuthBase):
+    def __init__(
+        self,
+        prompting: bool = True,
+        index_urls: Optional[List[str]] = None,
+        keyring_provider: str = "auto",
+    ) -> None:
+        self.prompting = prompting
+        self.index_urls = index_urls
+        self.keyring_provider = keyring_provider  # type: ignore[assignment]
+        self.passwords: Dict[str, AuthInfo] = {}
+        # When the user is prompted to enter credentials and keyring is
+        # available, we will offer to save them. If the user accepts,
+        # this value is set to the credentials they entered. After the
+        # request authenticates, the caller should call
+        # ``save_credentials`` to save these.
+        self._credentials_to_save: Optional[Credentials] = None
+
+    @property
+    def keyring_provider(self) -> KeyRingBaseProvider:
+        return get_keyring_provider(self._keyring_provider)
+
+    @keyring_provider.setter
+    def keyring_provider(self, provider: str) -> None:
+        # The free function get_keyring_provider has been decorated with
+        # functools.cache. If an exception occurs in get_keyring_auth that
+        # cache will be cleared and keyring disabled, take that into account
+        # if you want to remove this indirection.
+        self._keyring_provider = provider
+
+    @property
+    def use_keyring(self) -> bool:
+        # We won't use keyring when --no-input is passed unless
+        # a specific provider is requested because it might require
+        # user interaction
+        return self.prompting or self._keyring_provider not in ["auto", "disabled"]
+
+    def _get_keyring_auth(
+        self,
+        url: Optional[str],
+        username: Optional[str],
+    ) -> Optional[AuthInfo]:
+        """Return the tuple auth for a given url from keyring."""
+        # Do nothing if no url was provided
+        if not url:
+            return None
+
+        try:
+            return self.keyring_provider.get_auth_info(url, username)
+        except Exception as exc:
+            logger.warning(
+                "Keyring is skipped due to an exception: %s",
+                str(exc),
+            )
+            global KEYRING_DISABLED
+            KEYRING_DISABLED = True
+            get_keyring_provider.cache_clear()
+            return None
+
+    def _get_index_url(self, url: str) -> Optional[str]:
+        """Return the original index URL matching the requested URL.
+
+        Cached or dynamically generated credentials may work against
+        the original index URL rather than just the netloc.
+
+        The provided url should have had its username and password
+        removed already. If the original index url had credentials then
+        they will be included in the return value.
+
+        Returns None if no matching index was found, or if --no-index
+        was specified by the user.
+        """
+        if not url or not self.index_urls:
+            return None
+
+        url = remove_auth_from_url(url).rstrip("/") + "/"
+        parsed_url = urllib.parse.urlsplit(url)
+
+        candidates = []
+
+        for index in self.index_urls:
+            index = index.rstrip("/") + "/"
+            parsed_index = urllib.parse.urlsplit(remove_auth_from_url(index))
+            if parsed_url == parsed_index:
+                return index
+
+            if parsed_url.netloc != parsed_index.netloc:
+                continue
+
+            candidate = urllib.parse.urlsplit(index)
+            candidates.append(candidate)
+
+        if not candidates:
+            return None
+
+        candidates.sort(
+            reverse=True,
+            key=lambda candidate: commonprefix(
+                [
+                    parsed_url.path,
+                    candidate.path,
+                ]
+            ).rfind("/"),
+        )
+
+        return urllib.parse.urlunsplit(candidates[0])
+
+    def _get_new_credentials(
+        self,
+        original_url: str,
+        *,
+        allow_netrc: bool = True,
+        allow_keyring: bool = False,
+    ) -> AuthInfo:
+        """Find and return credentials for the specified URL."""
+        # Split the credentials and netloc from the url.
+        url, netloc, url_user_password = split_auth_netloc_from_url(
+            original_url,
+        )
+
+        # Start with the credentials embedded in the url
+        username, password = url_user_password
+        if username is not None and password is not None:
+            logger.debug("Found credentials in url for %s", netloc)
+            return url_user_password
+
+        # Find a matching index url for this request
+        index_url = self._get_index_url(url)
+        if index_url:
+            # Split the credentials from the url.
+            index_info = split_auth_netloc_from_url(index_url)
+            if index_info:
+                index_url, _, index_url_user_password = index_info
+                logger.debug("Found index url %s", index_url)
+
+        # If an index URL was found, try its embedded credentials
+        if index_url and index_url_user_password[0] is not None:
+            username, password = index_url_user_password
+            if username is not None and password is not None:
+                logger.debug("Found credentials in index url for %s", netloc)
+                return index_url_user_password
+
+        # Get creds from netrc if we still don't have them
+        if allow_netrc:
+            netrc_auth = get_netrc_auth(original_url)
+            if netrc_auth:
+                logger.debug("Found credentials in netrc for %s", netloc)
+                return netrc_auth
+
+        # If we don't have a password and keyring is available, use it.
+        if allow_keyring:
+            # The index url is more specific than the netloc, so try it first
+            # fmt: off
+            kr_auth = (
+                self._get_keyring_auth(index_url, username) or
+                self._get_keyring_auth(netloc, username)
+            )
+            # fmt: on
+            if kr_auth:
+                logger.debug("Found credentials in keyring for %s", netloc)
+                return kr_auth
+
+        return username, password
+
+    def _get_url_and_credentials(
+        self, original_url: str
+    ) -> Tuple[str, Optional[str], Optional[str]]:
+        """Return the credentials to use for the provided URL.
+
+        If allowed, netrc and keyring may be used to obtain the
+        correct credentials.
+
+        Returns (url_without_credentials, username, password). Note
+        that even if the original URL contains credentials, this
+        function may return a different username and password.
+        """
+        url, netloc, _ = split_auth_netloc_from_url(original_url)
+
+        # Try to get credentials from original url
+        username, password = self._get_new_credentials(original_url)
+
+        # If credentials not found, use any stored credentials for this netloc.
+        # Do this if either the username or the password is missing.
+        # This accounts for the situation in which the user has specified
+        # the username in the index url, but the password comes from keyring.
+        if (username is None or password is None) and netloc in self.passwords:
+            un, pw = self.passwords[netloc]
+            # It is possible that the cached credentials are for a different username,
+            # in which case the cache should be ignored.
+            if username is None or username == un:
+                username, password = un, pw
+
+        if username is not None or password is not None:
+            # Convert the username and password if they're None, so that
+            # this netloc will show up as "cached" in the conditional above.
+            # Further, HTTPBasicAuth doesn't accept None, so it makes sense to
+            # cache the value that is going to be used.
+            username = username or ""
+            password = password or ""
+
+            # Store any acquired credentials.
+            self.passwords[netloc] = (username, password)
+
+        assert (
+            # Credentials were found
+            (username is not None and password is not None)
+            # Credentials were not found
+            or (username is None and password is None)
+        ), f"Could not load credentials from url: {original_url}"
+
+        return url, username, password
+
+    def __call__(self, req: Request) -> Request:
+        # Get credentials for this request
+        url, username, password = self._get_url_and_credentials(req.url)
+
+        # Set the url of the request to the url without any credentials
+        req.url = url
+
+        if username is not None and password is not None:
+            # Send the basic auth with this request
+            req = HTTPBasicAuth(username, password)(req)
+
+        # Attach a hook to handle 401 responses
+        req.register_hook("response", self.handle_401)
+
+        return req
+
+    # Factored out to allow for easy patching in tests
+    def _prompt_for_password(
+        self, netloc: str
+    ) -> Tuple[Optional[str], Optional[str], bool]:
+        username = ask_input(f"User for {netloc}: ") if self.prompting else None
+        if not username:
+            return None, None, False
+        if self.use_keyring:
+            auth = self._get_keyring_auth(netloc, username)
+            if auth and auth[0] is not None and auth[1] is not None:
+                return auth[0], auth[1], False
+        password = ask_password("Password: ")
+        return username, password, True
+
+    # Factored out to allow for easy patching in tests
+    def _should_save_password_to_keyring(self) -> bool:
+        if (
+            not self.prompting
+            or not self.use_keyring
+            or not self.keyring_provider.has_keyring
+        ):
+            return False
+        return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
+
+    def handle_401(self, resp: Response, **kwargs: Any) -> Response:
+        # We only care about 401 responses, anything else we want to just
+        #   pass through the actual response
+        if resp.status_code != 401:
+            return resp
+
+        username, password = None, None
+
+        # Query the keyring for credentials:
+        if self.use_keyring:
+            username, password = self._get_new_credentials(
+                resp.url,
+                allow_netrc=False,
+                allow_keyring=True,
+            )
+
+        # We are not able to prompt the user so simply return the response
+        if not self.prompting and not username and not password:
+            return resp
+
+        parsed = urllib.parse.urlparse(resp.url)
+
+        # Prompt the user for a new username and password
+        save = False
+        if not username and not password:
+            username, password, save = self._prompt_for_password(parsed.netloc)
+
+        # Store the new username and password to use for future requests
+        self._credentials_to_save = None
+        if username is not None and password is not None:
+            self.passwords[parsed.netloc] = (username, password)
+
+            # Prompt to save the password to keyring
+            if save and self._should_save_password_to_keyring():
+                self._credentials_to_save = Credentials(
+                    url=parsed.netloc,
+                    username=username,
+                    password=password,
+                )
+
+        # Consume content and release the original connection to allow our new
+        #   request to reuse the same one.
+        # The result of the assignment isn't used, it's just needed to consume
+        # the content.
+        _ = resp.content
+        resp.raw.release_conn()
+
+        # Add our new username and password to the request
+        req = HTTPBasicAuth(username or "", password or "")(resp.request)
+        req.register_hook("response", self.warn_on_401)
+
+        # On successful request, save the credentials that were used to
+        # keyring. (Note that if the user responded "no" above, this member
+        # is not set and nothing will be saved.)
+        if self._credentials_to_save:
+            req.register_hook("response", self.save_credentials)
+
+        # Send our new request
+        new_resp = resp.connection.send(req, **kwargs)
+        new_resp.history.append(resp)
+
+        return new_resp
+
+    def warn_on_401(self, resp: Response, **kwargs: Any) -> None:
+        """Response callback to warn about incorrect credentials."""
+        if resp.status_code == 401:
+            logger.warning(
+                "401 Error, Credentials not correct for %s",
+                resp.request.url,
+            )
+
+    def save_credentials(self, resp: Response, **kwargs: Any) -> None:
+        """Response callback to save credentials on success."""
+        assert (
+            self.keyring_provider.has_keyring
+        ), "should never reach here without keyring"
+
+        creds = self._credentials_to_save
+        self._credentials_to_save = None
+        if creds and resp.status_code < 400:
+            try:
+                logger.info("Saving credentials to keyring")
+                self.keyring_provider.save_auth_info(
+                    creds.url, creds.username, creds.password
+                )
+            except Exception:
+                logger.exception("Failed to save credentials")
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/network/cache.py b/.venv/lib/python3.12/site-packages/pip/_internal/network/cache.py
new file mode 100644
index 0000000..4d0fb54
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/network/cache.py
@@ -0,0 +1,106 @@
+"""HTTP cache implementation.
+"""
+
+import os
+from contextlib import contextmanager
+from datetime import datetime
+from typing import BinaryIO, Generator, Optional, Union
+
+from pip._vendor.cachecontrol.cache import SeparateBodyBaseCache
+from pip._vendor.cachecontrol.caches import SeparateBodyFileCache
+from pip._vendor.requests.models import Response
+
+from pip._internal.utils.filesystem import adjacent_tmp_file, replace
+from pip._internal.utils.misc import ensure_dir
+
+
+def is_from_cache(response: Response) -> bool:
+    return getattr(response, "from_cache", False)
+
+
+@contextmanager
+def suppressed_cache_errors() -> Generator[None, None, None]:
+    """If we can't access the cache then we can just skip caching and process
+    requests as if caching wasn't enabled.
+    """
+    try:
+        yield
+    except OSError:
+        pass
+
+
+class SafeFileCache(SeparateBodyBaseCache):
+    """
+    A file based cache which is safe to use even when the target directory may
+    not be accessible or writable.
+
+    There is a race condition when two processes try to write and/or read the
+    same entry at the same time, since each entry consists of two separate
+    files (https://github.com/psf/cachecontrol/issues/324).  We therefore have
+    additional logic that makes sure that both files to be present before
+    returning an entry; this fixes the read side of the race condition.
+
+    For the write side, we assume that the server will only ever return the
+    same data for the same URL, which ought to be the case for files pip is
+    downloading.  PyPI does not have a mechanism to swap out a wheel for
+    another wheel, for example.  If this assumption is not true, the
+    CacheControl issue will need to be fixed.
+    """
+
+    def __init__(self, directory: str) -> None:
+        assert directory is not None, "Cache directory must not be None."
+        super().__init__()
+        self.directory = directory
+
+    def _get_cache_path(self, name: str) -> str:
+        # From cachecontrol.caches.file_cache.FileCache._fn, brought into our
+        # class for backwards-compatibility and to avoid using a non-public
+        # method.
+        hashed = SeparateBodyFileCache.encode(name)
+        parts = list(hashed[:5]) + [hashed]
+        return os.path.join(self.directory, *parts)
+
+    def get(self, key: str) -> Optional[bytes]:
+        # The cache entry is only valid if both metadata and body exist.
+        metadata_path = self._get_cache_path(key)
+        body_path = metadata_path + ".body"
+        if not (os.path.exists(metadata_path) and os.path.exists(body_path)):
+            return None
+        with suppressed_cache_errors():
+            with open(metadata_path, "rb") as f:
+                return f.read()
+
+    def _write(self, path: str, data: bytes) -> None:
+        with suppressed_cache_errors():
+            ensure_dir(os.path.dirname(path))
+
+            with adjacent_tmp_file(path) as f:
+                f.write(data)
+
+            replace(f.name, path)
+
+    def set(
+        self, key: str, value: bytes, expires: Union[int, datetime, None] = None
+    ) -> None:
+        path = self._get_cache_path(key)
+        self._write(path, value)
+
+    def delete(self, key: str) -> None:
+        path = self._get_cache_path(key)
+        with suppressed_cache_errors():
+            os.remove(path)
+        with suppressed_cache_errors():
+            os.remove(path + ".body")
+
+    def get_body(self, key: str) -> Optional[BinaryIO]:
+        # The cache entry is only valid if both metadata and body exist.
+        metadata_path = self._get_cache_path(key)
+        body_path = metadata_path + ".body"
+        if not (os.path.exists(metadata_path) and os.path.exists(body_path)):
+            return None
+        with suppressed_cache_errors():
+            return open(body_path, "rb")
+
+    def set_body(self, key: str, body: bytes) -> None:
+        path = self._get_cache_path(key) + ".body"
+        self._write(path, body)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/network/download.py b/.venv/lib/python3.12/site-packages/pip/_internal/network/download.py
new file mode 100644
index 0000000..d1d4354
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/network/download.py
@@ -0,0 +1,186 @@
+"""Download files with progress indicators.
+"""
+import email.message
+import logging
+import mimetypes
+import os
+from typing import Iterable, Optional, Tuple
+
+from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
+
+from pip._internal.cli.progress_bars import get_download_progress_renderer
+from pip._internal.exceptions import NetworkConnectionError
+from pip._internal.models.index import PyPI
+from pip._internal.models.link import Link
+from pip._internal.network.cache import is_from_cache
+from pip._internal.network.session import PipSession
+from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks
+from pip._internal.utils.misc import format_size, redact_auth_from_url, splitext
+
+logger = logging.getLogger(__name__)
+
+
+def _get_http_response_size(resp: Response) -> Optional[int]:
+    try:
+        return int(resp.headers["content-length"])
+    except (ValueError, KeyError, TypeError):
+        return None
+
+
+def _prepare_download(
+    resp: Response,
+    link: Link,
+    progress_bar: str,
+) -> Iterable[bytes]:
+    total_length = _get_http_response_size(resp)
+
+    if link.netloc == PyPI.file_storage_domain:
+        url = link.show_url
+    else:
+        url = link.url_without_fragment
+
+    logged_url = redact_auth_from_url(url)
+
+    if total_length:
+        logged_url = f"{logged_url} ({format_size(total_length)})"
+
+    if is_from_cache(resp):
+        logger.info("Using cached %s", logged_url)
+    else:
+        logger.info("Downloading %s", logged_url)
+
+    if logger.getEffectiveLevel() > logging.INFO:
+        show_progress = False
+    elif is_from_cache(resp):
+        show_progress = False
+    elif not total_length:
+        show_progress = True
+    elif total_length > (40 * 1000):
+        show_progress = True
+    else:
+        show_progress = False
+
+    chunks = response_chunks(resp, CONTENT_CHUNK_SIZE)
+
+    if not show_progress:
+        return chunks
+
+    renderer = get_download_progress_renderer(bar_type=progress_bar, size=total_length)
+    return renderer(chunks)
+
+
+def sanitize_content_filename(filename: str) -> str:
+    """
+    Sanitize the "filename" value from a Content-Disposition header.
+    """
+    return os.path.basename(filename)
+
+
+def parse_content_disposition(content_disposition: str, default_filename: str) -> str:
+    """
+    Parse the "filename" value from a Content-Disposition header, and
+    return the default filename if the result is empty.
+    """
+    m = email.message.Message()
+    m["content-type"] = content_disposition
+    filename = m.get_param("filename")
+    if filename:
+        # We need to sanitize the filename to prevent directory traversal
+        # in case the filename contains ".." path parts.
+        filename = sanitize_content_filename(str(filename))
+    return filename or default_filename
+
+
+def _get_http_response_filename(resp: Response, link: Link) -> str:
+    """Get an ideal filename from the given HTTP response, falling back to
+    the link filename if not provided.
+    """
+    filename = link.filename  # fallback
+    # Have a look at the Content-Disposition header for a better guess
+    content_disposition = resp.headers.get("content-disposition")
+    if content_disposition:
+        filename = parse_content_disposition(content_disposition, filename)
+    ext: Optional[str] = splitext(filename)[1]
+    if not ext:
+        ext = mimetypes.guess_extension(resp.headers.get("content-type", ""))
+        if ext:
+            filename += ext
+    if not ext and link.url != resp.url:
+        ext = os.path.splitext(resp.url)[1]
+        if ext:
+            filename += ext
+    return filename
+
+
+def _http_get_download(session: PipSession, link: Link) -> Response:
+    target_url = link.url.split("#", 1)[0]
+    resp = session.get(target_url, headers=HEADERS, stream=True)
+    raise_for_status(resp)
+    return resp
+
+
+class Downloader:
+    def __init__(
+        self,
+        session: PipSession,
+        progress_bar: str,
+    ) -> None:
+        self._session = session
+        self._progress_bar = progress_bar
+
+    def __call__(self, link: Link, location: str) -> Tuple[str, str]:
+        """Download the file given by link into location."""
+        try:
+            resp = _http_get_download(self._session, link)
+        except NetworkConnectionError as e:
+            assert e.response is not None
+            logger.critical(
+                "HTTP error %s while getting %s", e.response.status_code, link
+            )
+            raise
+
+        filename = _get_http_response_filename(resp, link)
+        filepath = os.path.join(location, filename)
+
+        chunks = _prepare_download(resp, link, self._progress_bar)
+        with open(filepath, "wb") as content_file:
+            for chunk in chunks:
+                content_file.write(chunk)
+        content_type = resp.headers.get("Content-Type", "")
+        return filepath, content_type
+
+
+class BatchDownloader:
+    def __init__(
+        self,
+        session: PipSession,
+        progress_bar: str,
+    ) -> None:
+        self._session = session
+        self._progress_bar = progress_bar
+
+    def __call__(
+        self, links: Iterable[Link], location: str
+    ) -> Iterable[Tuple[Link, Tuple[str, str]]]:
+        """Download the files given by links into location."""
+        for link in links:
+            try:
+                resp = _http_get_download(self._session, link)
+            except NetworkConnectionError as e:
+                assert e.response is not None
+                logger.critical(
+                    "HTTP error %s while getting %s",
+                    e.response.status_code,
+                    link,
+                )
+                raise
+
+            filename = _get_http_response_filename(resp, link)
+            filepath = os.path.join(location, filename)
+
+            chunks = _prepare_download(resp, link, self._progress_bar)
+            with open(filepath, "wb") as content_file:
+                for chunk in chunks:
+                    content_file.write(chunk)
+            content_type = resp.headers.get("Content-Type", "")
+            yield link, (filepath, content_type)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/network/lazy_wheel.py b/.venv/lib/python3.12/site-packages/pip/_internal/network/lazy_wheel.py
new file mode 100644
index 0000000..82ec50d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/network/lazy_wheel.py
@@ -0,0 +1,210 @@
+"""Lazy ZIP over HTTP"""
+
+__all__ = ["HTTPRangeRequestUnsupported", "dist_from_wheel_url"]
+
+from bisect import bisect_left, bisect_right
+from contextlib import contextmanager
+from tempfile import NamedTemporaryFile
+from typing import Any, Dict, Generator, List, Optional, Tuple
+from zipfile import BadZipFile, ZipFile
+
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
+
+from pip._internal.metadata import BaseDistribution, MemoryWheel, get_wheel_distribution
+from pip._internal.network.session import PipSession
+from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks
+
+
+class HTTPRangeRequestUnsupported(Exception):
+    pass
+
+
+def dist_from_wheel_url(name: str, url: str, session: PipSession) -> BaseDistribution:
+    """Return a distribution object from the given wheel URL.
+
+    This uses HTTP range requests to only fetch the portion of the wheel
+    containing metadata, just enough for the object to be constructed.
+    If such requests are not supported, HTTPRangeRequestUnsupported
+    is raised.
+    """
+    with LazyZipOverHTTP(url, session) as zf:
+        # For read-only ZIP files, ZipFile only needs methods read,
+        # seek, seekable and tell, not the whole IO protocol.
+        wheel = MemoryWheel(zf.name, zf)  # type: ignore
+        # After context manager exit, wheel.name
+        # is an invalid file by intention.
+        return get_wheel_distribution(wheel, canonicalize_name(name))
+
+
+class LazyZipOverHTTP:
+    """File-like object mapped to a ZIP file over HTTP.
+
+    This uses HTTP range requests to lazily fetch the file's content,
+    which is supposed to be fed to ZipFile.  If such requests are not
+    supported by the server, raise HTTPRangeRequestUnsupported
+    during initialization.
+    """
+
+    def __init__(
+        self, url: str, session: PipSession, chunk_size: int = CONTENT_CHUNK_SIZE
+    ) -> None:
+        head = session.head(url, headers=HEADERS)
+        raise_for_status(head)
+        assert head.status_code == 200
+        self._session, self._url, self._chunk_size = session, url, chunk_size
+        self._length = int(head.headers["Content-Length"])
+        self._file = NamedTemporaryFile()
+        self.truncate(self._length)
+        self._left: List[int] = []
+        self._right: List[int] = []
+        if "bytes" not in head.headers.get("Accept-Ranges", "none"):
+            raise HTTPRangeRequestUnsupported("range request is not supported")
+        self._check_zip()
+
+    @property
+    def mode(self) -> str:
+        """Opening mode, which is always rb."""
+        return "rb"
+
+    @property
+    def name(self) -> str:
+        """Path to the underlying file."""
+        return self._file.name
+
+    def seekable(self) -> bool:
+        """Return whether random access is supported, which is True."""
+        return True
+
+    def close(self) -> None:
+        """Close the file."""
+        self._file.close()
+
+    @property
+    def closed(self) -> bool:
+        """Whether the file is closed."""
+        return self._file.closed
+
+    def read(self, size: int = -1) -> bytes:
+        """Read up to size bytes from the object and return them.
+
+        As a convenience, if size is unspecified or -1,
+        all bytes until EOF are returned.  Fewer than
+        size bytes may be returned if EOF is reached.
+        """
+        download_size = max(size, self._chunk_size)
+        start, length = self.tell(), self._length
+        stop = length if size < 0 else min(start + download_size, length)
+        start = max(0, stop - download_size)
+        self._download(start, stop - 1)
+        return self._file.read(size)
+
+    def readable(self) -> bool:
+        """Return whether the file is readable, which is True."""
+        return True
+
+    def seek(self, offset: int, whence: int = 0) -> int:
+        """Change stream position and return the new absolute position.
+
+        Seek to offset relative position indicated by whence:
+        * 0: Start of stream (the default).  pos should be >= 0;
+        * 1: Current position - pos may be negative;
+        * 2: End of stream - pos usually negative.
+        """
+        return self._file.seek(offset, whence)
+
+    def tell(self) -> int:
+        """Return the current position."""
+        return self._file.tell()
+
+    def truncate(self, size: Optional[int] = None) -> int:
+        """Resize the stream to the given size in bytes.
+
+        If size is unspecified resize to the current position.
+        The current stream position isn't changed.
+
+        Return the new file size.
+        """
+        return self._file.truncate(size)
+
+    def writable(self) -> bool:
+        """Return False."""
+        return False
+
+    def __enter__(self) -> "LazyZipOverHTTP":
+        self._file.__enter__()
+        return self
+
+    def __exit__(self, *exc: Any) -> None:
+        self._file.__exit__(*exc)
+
+    @contextmanager
+    def _stay(self) -> Generator[None, None, None]:
+        """Return a context manager keeping the position.
+
+        At the end of the block, seek back to original position.
+        """
+        pos = self.tell()
+        try:
+            yield
+        finally:
+            self.seek(pos)
+
+    def _check_zip(self) -> None:
+        """Check and download until the file is a valid ZIP."""
+        end = self._length - 1
+        for start in reversed(range(0, end, self._chunk_size)):
+            self._download(start, end)
+            with self._stay():
+                try:
+                    # For read-only ZIP files, ZipFile only needs
+                    # methods read, seek, seekable and tell.
+                    ZipFile(self)  # type: ignore
+                except BadZipFile:
+                    pass
+                else:
+                    break
+
+    def _stream_response(
+        self, start: int, end: int, base_headers: Dict[str, str] = HEADERS
+    ) -> Response:
+        """Return HTTP response to a range request from start to end."""
+        headers = base_headers.copy()
+        headers["Range"] = f"bytes={start}-{end}"
+        # TODO: Get range requests to be correctly cached
+        headers["Cache-Control"] = "no-cache"
+        return self._session.get(self._url, headers=headers, stream=True)
+
+    def _merge(
+        self, start: int, end: int, left: int, right: int
+    ) -> Generator[Tuple[int, int], None, None]:
+        """Return a generator of intervals to be fetched.
+
+        Args:
+            start (int): Start of needed interval
+            end (int): End of needed interval
+            left (int): Index of first overlapping downloaded data
+            right (int): Index after last overlapping downloaded data
+        """
+        lslice, rslice = self._left[left:right], self._right[left:right]
+        i = start = min([start] + lslice[:1])
+        end = max([end] + rslice[-1:])
+        for j, k in zip(lslice, rslice):
+            if j > i:
+                yield i, j - 1
+            i = k + 1
+        if i <= end:
+            yield i, end
+        self._left[left:right], self._right[left:right] = [start], [end]
+
+    def _download(self, start: int, end: int) -> None:
+        """Download bytes from start to end inclusively."""
+        with self._stay():
+            left = bisect_left(self._right, start)
+            right = bisect_right(self._left, end)
+            for start, end in self._merge(start, end, left, right):
+                response = self._stream_response(start, end)
+                response.raise_for_status()
+                self.seek(start)
+                for chunk in response_chunks(response, self._chunk_size):
+                    self._file.write(chunk)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/network/session.py b/.venv/lib/python3.12/site-packages/pip/_internal/network/session.py
new file mode 100644
index 0000000..f17efc5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/network/session.py
@@ -0,0 +1,520 @@
+"""PipSession and supporting code, containing all pip-specific
+network request configuration and behavior.
+"""
+
+import email.utils
+import io
+import ipaddress
+import json
+import logging
+import mimetypes
+import os
+import platform
+import shutil
+import subprocess
+import sys
+import urllib.parse
+import warnings
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Dict,
+    Generator,
+    List,
+    Mapping,
+    Optional,
+    Sequence,
+    Tuple,
+    Union,
+)
+
+from pip._vendor import requests, urllib3
+from pip._vendor.cachecontrol import CacheControlAdapter as _BaseCacheControlAdapter
+from pip._vendor.requests.adapters import DEFAULT_POOLBLOCK, BaseAdapter
+from pip._vendor.requests.adapters import HTTPAdapter as _BaseHTTPAdapter
+from pip._vendor.requests.models import PreparedRequest, Response
+from pip._vendor.requests.structures import CaseInsensitiveDict
+from pip._vendor.urllib3.connectionpool import ConnectionPool
+from pip._vendor.urllib3.exceptions import InsecureRequestWarning
+
+from pip import __version__
+from pip._internal.metadata import get_default_environment
+from pip._internal.models.link import Link
+from pip._internal.network.auth import MultiDomainBasicAuth
+from pip._internal.network.cache import SafeFileCache
+
+# Import ssl from compat so the initial import occurs in only one place.
+from pip._internal.utils.compat import has_tls
+from pip._internal.utils.glibc import libc_ver
+from pip._internal.utils.misc import build_url_from_netloc, parse_netloc
+from pip._internal.utils.urls import url_to_path
+
+if TYPE_CHECKING:
+    from ssl import SSLContext
+
+    from pip._vendor.urllib3.poolmanager import PoolManager
+
+
+logger = logging.getLogger(__name__)
+
+SecureOrigin = Tuple[str, str, Optional[Union[int, str]]]
+
+
+# Ignore warning raised when using --trusted-host.
+warnings.filterwarnings("ignore", category=InsecureRequestWarning)
+
+
+SECURE_ORIGINS: List[SecureOrigin] = [
+    # protocol, hostname, port
+    # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
+    ("https", "*", "*"),
+    ("*", "localhost", "*"),
+    ("*", "127.0.0.0/8", "*"),
+    ("*", "::1/128", "*"),
+    ("file", "*", None),
+    # ssh is always secure.
+    ("ssh", "*", "*"),
+]
+
+
+# These are environment variables present when running under various
+# CI systems.  For each variable, some CI systems that use the variable
+# are indicated.  The collection was chosen so that for each of a number
+# of popular systems, at least one of the environment variables is used.
+# This list is used to provide some indication of and lower bound for
+# CI traffic to PyPI.  Thus, it is okay if the list is not comprehensive.
+# For more background, see: https://github.com/pypa/pip/issues/5499
+CI_ENVIRONMENT_VARIABLES = (
+    # Azure Pipelines
+    "BUILD_BUILDID",
+    # Jenkins
+    "BUILD_ID",
+    # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI
+    "CI",
+    # Explicit environment variable.
+    "PIP_IS_CI",
+)
+
+
+def looks_like_ci() -> bool:
+    """
+    Return whether it looks like pip is running under CI.
+    """
+    # We don't use the method of checking for a tty (e.g. using isatty())
+    # because some CI systems mimic a tty (e.g. Travis CI).  Thus that
+    # method doesn't provide definitive information in either direction.
+    return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES)
+
+
+def user_agent() -> str:
+    """
+    Return a string representing the user agent.
+    """
+    data: Dict[str, Any] = {
+        "installer": {"name": "pip", "version": __version__},
+        "python": platform.python_version(),
+        "implementation": {
+            "name": platform.python_implementation(),
+        },
+    }
+
+    if data["implementation"]["name"] == "CPython":
+        data["implementation"]["version"] = platform.python_version()
+    elif data["implementation"]["name"] == "PyPy":
+        pypy_version_info = sys.pypy_version_info  # type: ignore
+        if pypy_version_info.releaselevel == "final":
+            pypy_version_info = pypy_version_info[:3]
+        data["implementation"]["version"] = ".".join(
+            [str(x) for x in pypy_version_info]
+        )
+    elif data["implementation"]["name"] == "Jython":
+        # Complete Guess
+        data["implementation"]["version"] = platform.python_version()
+    elif data["implementation"]["name"] == "IronPython":
+        # Complete Guess
+        data["implementation"]["version"] = platform.python_version()
+
+    if sys.platform.startswith("linux"):
+        from pip._vendor import distro
+
+        linux_distribution = distro.name(), distro.version(), distro.codename()
+        distro_infos: Dict[str, Any] = dict(
+            filter(
+                lambda x: x[1],
+                zip(["name", "version", "id"], linux_distribution),
+            )
+        )
+        libc = dict(
+            filter(
+                lambda x: x[1],
+                zip(["lib", "version"], libc_ver()),
+            )
+        )
+        if libc:
+            distro_infos["libc"] = libc
+        if distro_infos:
+            data["distro"] = distro_infos
+
+    if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
+        data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
+
+    if platform.system():
+        data.setdefault("system", {})["name"] = platform.system()
+
+    if platform.release():
+        data.setdefault("system", {})["release"] = platform.release()
+
+    if platform.machine():
+        data["cpu"] = platform.machine()
+
+    if has_tls():
+        import _ssl as ssl
+
+        data["openssl_version"] = ssl.OPENSSL_VERSION
+
+    setuptools_dist = get_default_environment().get_distribution("setuptools")
+    if setuptools_dist is not None:
+        data["setuptools_version"] = str(setuptools_dist.version)
+
+    if shutil.which("rustc") is not None:
+        # If for any reason `rustc --version` fails, silently ignore it
+        try:
+            rustc_output = subprocess.check_output(
+                ["rustc", "--version"], stderr=subprocess.STDOUT, timeout=0.5
+            )
+        except Exception:
+            pass
+        else:
+            if rustc_output.startswith(b"rustc "):
+                # The format of `rustc --version` is:
+                # `b'rustc 1.52.1 (9bc8c42bb 2021-05-09)\n'`
+                # We extract just the middle (1.52.1) part
+                data["rustc_version"] = rustc_output.split(b" ")[1].decode()
+
+    # Use None rather than False so as not to give the impression that
+    # pip knows it is not being run under CI.  Rather, it is a null or
+    # inconclusive result.  Also, we include some value rather than no
+    # value to make it easier to know that the check has been run.
+    data["ci"] = True if looks_like_ci() else None
+
+    user_data = os.environ.get("PIP_USER_AGENT_USER_DATA")
+    if user_data is not None:
+        data["user_data"] = user_data
+
+    return "{data[installer][name]}/{data[installer][version]} {json}".format(
+        data=data,
+        json=json.dumps(data, separators=(",", ":"), sort_keys=True),
+    )
+
+
+class LocalFSAdapter(BaseAdapter):
+    def send(
+        self,
+        request: PreparedRequest,
+        stream: bool = False,
+        timeout: Optional[Union[float, Tuple[float, float]]] = None,
+        verify: Union[bool, str] = True,
+        cert: Optional[Union[str, Tuple[str, str]]] = None,
+        proxies: Optional[Mapping[str, str]] = None,
+    ) -> Response:
+        pathname = url_to_path(request.url)
+
+        resp = Response()
+        resp.status_code = 200
+        resp.url = request.url
+
+        try:
+            stats = os.stat(pathname)
+        except OSError as exc:
+            # format the exception raised as a io.BytesIO object,
+            # to return a better error message:
+            resp.status_code = 404
+            resp.reason = type(exc).__name__
+            resp.raw = io.BytesIO(f"{resp.reason}: {exc}".encode("utf8"))
+        else:
+            modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
+            content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
+            resp.headers = CaseInsensitiveDict(
+                {
+                    "Content-Type": content_type,
+                    "Content-Length": stats.st_size,
+                    "Last-Modified": modified,
+                }
+            )
+
+            resp.raw = open(pathname, "rb")
+            resp.close = resp.raw.close
+
+        return resp
+
+    def close(self) -> None:
+        pass
+
+
+class _SSLContextAdapterMixin:
+    """Mixin to add the ``ssl_context`` constructor argument to HTTP adapters.
+
+    The additional argument is forwarded directly to the pool manager. This allows us
+    to dynamically decide what SSL store to use at runtime, which is used to implement
+    the optional ``truststore`` backend.
+    """
+
+    def __init__(
+        self,
+        *,
+        ssl_context: Optional["SSLContext"] = None,
+        **kwargs: Any,
+    ) -> None:
+        self._ssl_context = ssl_context
+        super().__init__(**kwargs)
+
+    def init_poolmanager(
+        self,
+        connections: int,
+        maxsize: int,
+        block: bool = DEFAULT_POOLBLOCK,
+        **pool_kwargs: Any,
+    ) -> "PoolManager":
+        if self._ssl_context is not None:
+            pool_kwargs.setdefault("ssl_context", self._ssl_context)
+        return super().init_poolmanager(  # type: ignore[misc]
+            connections=connections,
+            maxsize=maxsize,
+            block=block,
+            **pool_kwargs,
+        )
+
+
+class HTTPAdapter(_SSLContextAdapterMixin, _BaseHTTPAdapter):
+    pass
+
+
+class CacheControlAdapter(_SSLContextAdapterMixin, _BaseCacheControlAdapter):
+    pass
+
+
+class InsecureHTTPAdapter(HTTPAdapter):
+    def cert_verify(
+        self,
+        conn: ConnectionPool,
+        url: str,
+        verify: Union[bool, str],
+        cert: Optional[Union[str, Tuple[str, str]]],
+    ) -> None:
+        super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
+
+
+class InsecureCacheControlAdapter(CacheControlAdapter):
+    def cert_verify(
+        self,
+        conn: ConnectionPool,
+        url: str,
+        verify: Union[bool, str],
+        cert: Optional[Union[str, Tuple[str, str]]],
+    ) -> None:
+        super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
+
+
+class PipSession(requests.Session):
+    timeout: Optional[int] = None
+
+    def __init__(
+        self,
+        *args: Any,
+        retries: int = 0,
+        cache: Optional[str] = None,
+        trusted_hosts: Sequence[str] = (),
+        index_urls: Optional[List[str]] = None,
+        ssl_context: Optional["SSLContext"] = None,
+        **kwargs: Any,
+    ) -> None:
+        """
+        :param trusted_hosts: Domains not to emit warnings for when not using
+            HTTPS.
+        """
+        super().__init__(*args, **kwargs)
+
+        # Namespace the attribute with "pip_" just in case to prevent
+        # possible conflicts with the base class.
+        self.pip_trusted_origins: List[Tuple[str, Optional[int]]] = []
+
+        # Attach our User Agent to the request
+        self.headers["User-Agent"] = user_agent()
+
+        # Attach our Authentication handler to the session
+        self.auth = MultiDomainBasicAuth(index_urls=index_urls)
+
+        # Create our urllib3.Retry instance which will allow us to customize
+        # how we handle retries.
+        retries = urllib3.Retry(
+            # Set the total number of retries that a particular request can
+            # have.
+            total=retries,
+            # A 503 error from PyPI typically means that the Fastly -> Origin
+            # connection got interrupted in some way. A 503 error in general
+            # is typically considered a transient error so we'll go ahead and
+            # retry it.
+            # A 500 may indicate transient error in Amazon S3
+            # A 502 may be a transient error from a CDN like CloudFlare or CloudFront
+            # A 520 or 527 - may indicate transient error in CloudFlare
+            status_forcelist=[500, 502, 503, 520, 527],
+            # Add a small amount of back off between failed requests in
+            # order to prevent hammering the service.
+            backoff_factor=0.25,
+        )  # type: ignore
+
+        # Our Insecure HTTPAdapter disables HTTPS validation. It does not
+        # support caching so we'll use it for all http:// URLs.
+        # If caching is disabled, we will also use it for
+        # https:// hosts that we've marked as ignoring
+        # TLS errors for (trusted-hosts).
+        insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
+
+        # We want to _only_ cache responses on securely fetched origins or when
+        # the host is specified as trusted. We do this because
+        # we can't validate the response of an insecurely/untrusted fetched
+        # origin, and we don't want someone to be able to poison the cache and
+        # require manual eviction from the cache to fix it.
+        if cache:
+            secure_adapter = CacheControlAdapter(
+                cache=SafeFileCache(cache),
+                max_retries=retries,
+                ssl_context=ssl_context,
+            )
+            self._trusted_host_adapter = InsecureCacheControlAdapter(
+                cache=SafeFileCache(cache),
+                max_retries=retries,
+            )
+        else:
+            secure_adapter = HTTPAdapter(max_retries=retries, ssl_context=ssl_context)
+            self._trusted_host_adapter = insecure_adapter
+
+        self.mount("https://", secure_adapter)
+        self.mount("http://", insecure_adapter)
+
+        # Enable file:// urls
+        self.mount("file://", LocalFSAdapter())
+
+        for host in trusted_hosts:
+            self.add_trusted_host(host, suppress_logging=True)
+
+    def update_index_urls(self, new_index_urls: List[str]) -> None:
+        """
+        :param new_index_urls: New index urls to update the authentication
+            handler with.
+        """
+        self.auth.index_urls = new_index_urls
+
+    def add_trusted_host(
+        self, host: str, source: Optional[str] = None, suppress_logging: bool = False
+    ) -> None:
+        """
+        :param host: It is okay to provide a host that has previously been
+            added.
+        :param source: An optional source string, for logging where the host
+            string came from.
+        """
+        if not suppress_logging:
+            msg = f"adding trusted host: {host!r}"
+            if source is not None:
+                msg += f" (from {source})"
+            logger.info(msg)
+
+        parsed_host, parsed_port = parse_netloc(host)
+        if parsed_host is None:
+            raise ValueError(f"Trusted host URL must include a host part: {host!r}")
+        if (parsed_host, parsed_port) not in self.pip_trusted_origins:
+            self.pip_trusted_origins.append((parsed_host, parsed_port))
+
+        self.mount(
+            build_url_from_netloc(host, scheme="http") + "/", self._trusted_host_adapter
+        )
+        self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter)
+        if not parsed_port:
+            self.mount(
+                build_url_from_netloc(host, scheme="http") + ":",
+                self._trusted_host_adapter,
+            )
+            # Mount wildcard ports for the same host.
+            self.mount(build_url_from_netloc(host) + ":", self._trusted_host_adapter)
+
+    def iter_secure_origins(self) -> Generator[SecureOrigin, None, None]:
+        yield from SECURE_ORIGINS
+        for host, port in self.pip_trusted_origins:
+            yield ("*", host, "*" if port is None else port)
+
+    def is_secure_origin(self, location: Link) -> bool:
+        # Determine if this url used a secure transport mechanism
+        parsed = urllib.parse.urlparse(str(location))
+        origin_protocol, origin_host, origin_port = (
+            parsed.scheme,
+            parsed.hostname,
+            parsed.port,
+        )
+
+        # The protocol to use to see if the protocol matches.
+        # Don't count the repository type as part of the protocol: in
+        # cases such as "git+ssh", only use "ssh". (I.e., Only verify against
+        # the last scheme.)
+        origin_protocol = origin_protocol.rsplit("+", 1)[-1]
+
+        # Determine if our origin is a secure origin by looking through our
+        # hardcoded list of secure origins, as well as any additional ones
+        # configured on this PackageFinder instance.
+        for secure_origin in self.iter_secure_origins():
+            secure_protocol, secure_host, secure_port = secure_origin
+            if origin_protocol != secure_protocol and secure_protocol != "*":
+                continue
+
+            try:
+                addr = ipaddress.ip_address(origin_host or "")
+                network = ipaddress.ip_network(secure_host)
+            except ValueError:
+                # We don't have both a valid address or a valid network, so
+                # we'll check this origin against hostnames.
+                if (
+                    origin_host
+                    and origin_host.lower() != secure_host.lower()
+                    and secure_host != "*"
+                ):
+                    continue
+            else:
+                # We have a valid address and network, so see if the address
+                # is contained within the network.
+                if addr not in network:
+                    continue
+
+            # Check to see if the port matches.
+            if (
+                origin_port != secure_port
+                and secure_port != "*"
+                and secure_port is not None
+            ):
+                continue
+
+            # If we've gotten here, then this origin matches the current
+            # secure origin and we should return True
+            return True
+
+        # If we've gotten to this point, then the origin isn't secure and we
+        # will not accept it as a valid location to search. We will however
+        # log a warning that we are ignoring it.
+        logger.warning(
+            "The repository located at %s is not a trusted or secure host and "
+            "is being ignored. If this repository is available via HTTPS we "
+            "recommend you use HTTPS instead, otherwise you may silence "
+            "this warning and allow it anyway with '--trusted-host %s'.",
+            origin_host,
+            origin_host,
+        )
+
+        return False
+
+    def request(self, method: str, url: str, *args: Any, **kwargs: Any) -> Response:
+        # Allow setting a default timeout on a session
+        kwargs.setdefault("timeout", self.timeout)
+        # Allow setting a default proxies on a session
+        kwargs.setdefault("proxies", self.proxies)
+
+        # Dispatch the actual request
+        return super().request(method, url, *args, **kwargs)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/network/utils.py b/.venv/lib/python3.12/site-packages/pip/_internal/network/utils.py
new file mode 100644
index 0000000..134848a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/network/utils.py
@@ -0,0 +1,96 @@
+from typing import Dict, Generator
+
+from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
+
+from pip._internal.exceptions import NetworkConnectionError
+
+# The following comments and HTTP headers were originally added by
+# Donald Stufft in git commit 22c562429a61bb77172039e480873fb239dd8c03.
+#
+# We use Accept-Encoding: identity here because requests defaults to
+# accepting compressed responses. This breaks in a variety of ways
+# depending on how the server is configured.
+# - Some servers will notice that the file isn't a compressible file
+#   and will leave the file alone and with an empty Content-Encoding
+# - Some servers will notice that the file is already compressed and
+#   will leave the file alone, adding a Content-Encoding: gzip header
+# - Some servers won't notice anything at all and will take a file
+#   that's already been compressed and compress it again, and set
+#   the Content-Encoding: gzip header
+# By setting this to request only the identity encoding we're hoping
+# to eliminate the third case.  Hopefully there does not exist a server
+# which when given a file will notice it is already compressed and that
+# you're not asking for a compressed file and will then decompress it
+# before sending because if that's the case I don't think it'll ever be
+# possible to make this work.
+HEADERS: Dict[str, str] = {"Accept-Encoding": "identity"}
+
+
+def raise_for_status(resp: Response) -> None:
+    http_error_msg = ""
+    if isinstance(resp.reason, bytes):
+        # We attempt to decode utf-8 first because some servers
+        # choose to localize their reason strings. If the string
+        # isn't utf-8, we fall back to iso-8859-1 for all other
+        # encodings.
+        try:
+            reason = resp.reason.decode("utf-8")
+        except UnicodeDecodeError:
+            reason = resp.reason.decode("iso-8859-1")
+    else:
+        reason = resp.reason
+
+    if 400 <= resp.status_code < 500:
+        http_error_msg = (
+            f"{resp.status_code} Client Error: {reason} for url: {resp.url}"
+        )
+
+    elif 500 <= resp.status_code < 600:
+        http_error_msg = (
+            f"{resp.status_code} Server Error: {reason} for url: {resp.url}"
+        )
+
+    if http_error_msg:
+        raise NetworkConnectionError(http_error_msg, response=resp)
+
+
+def response_chunks(
+    response: Response, chunk_size: int = CONTENT_CHUNK_SIZE
+) -> Generator[bytes, None, None]:
+    """Given a requests Response, provide the data chunks."""
+    try:
+        # Special case for urllib3.
+        for chunk in response.raw.stream(
+            chunk_size,
+            # We use decode_content=False here because we don't
+            # want urllib3 to mess with the raw bytes we get
+            # from the server. If we decompress inside of
+            # urllib3 then we cannot verify the checksum
+            # because the checksum will be of the compressed
+            # file. This breakage will only occur if the
+            # server adds a Content-Encoding header, which
+            # depends on how the server was configured:
+            # - Some servers will notice that the file isn't a
+            #   compressible file and will leave the file alone
+            #   and with an empty Content-Encoding
+            # - Some servers will notice that the file is
+            #   already compressed and will leave the file
+            #   alone and will add a Content-Encoding: gzip
+            #   header
+            # - Some servers won't notice anything at all and
+            #   will take a file that's already been compressed
+            #   and compress it again and set the
+            #   Content-Encoding: gzip header
+            #
+            # By setting this not to decode automatically we
+            # hope to eliminate problems with the second case.
+            decode_content=False,
+        ):
+            yield chunk
+    except AttributeError:
+        # Standard file-like object.
+        while True:
+            chunk = response.raw.read(chunk_size)
+            if not chunk:
+                break
+            yield chunk
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/network/xmlrpc.py b/.venv/lib/python3.12/site-packages/pip/_internal/network/xmlrpc.py
new file mode 100644
index 0000000..22ec8d2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/network/xmlrpc.py
@@ -0,0 +1,62 @@
+"""xmlrpclib.Transport implementation
+"""
+
+import logging
+import urllib.parse
+import xmlrpc.client
+from typing import TYPE_CHECKING, Tuple
+
+from pip._internal.exceptions import NetworkConnectionError
+from pip._internal.network.session import PipSession
+from pip._internal.network.utils import raise_for_status
+
+if TYPE_CHECKING:
+    from xmlrpc.client import _HostType, _Marshallable
+
+    from _typeshed import SizedBuffer
+
+logger = logging.getLogger(__name__)
+
+
+class PipXmlrpcTransport(xmlrpc.client.Transport):
+    """Provide a `xmlrpclib.Transport` implementation via a `PipSession`
+    object.
+    """
+
+    def __init__(
+        self, index_url: str, session: PipSession, use_datetime: bool = False
+    ) -> None:
+        super().__init__(use_datetime)
+        index_parts = urllib.parse.urlparse(index_url)
+        self._scheme = index_parts.scheme
+        self._session = session
+
+    def request(
+        self,
+        host: "_HostType",
+        handler: str,
+        request_body: "SizedBuffer",
+        verbose: bool = False,
+    ) -> Tuple["_Marshallable", ...]:
+        assert isinstance(host, str)
+        parts = (self._scheme, host, handler, None, None, None)
+        url = urllib.parse.urlunparse(parts)
+        try:
+            headers = {"Content-Type": "text/xml"}
+            response = self._session.post(
+                url,
+                data=request_body,
+                headers=headers,
+                stream=True,
+            )
+            raise_for_status(response)
+            self.verbose = verbose
+            return self.parse_response(response.raw)
+        except NetworkConnectionError as exc:
+            assert exc.response
+            logger.critical(
+                "HTTP error %s while getting %s",
+                exc.response.status_code,
+                url,
+            )
+            raise
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/operations/__init__.py b/.venv/lib/python3.12/site-packages/pip/_internal/operations/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/operations/check.py b/.venv/lib/python3.12/site-packages/pip/_internal/operations/check.py
new file mode 100644
index 0000000..90c6a58
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/operations/check.py
@@ -0,0 +1,187 @@
+"""Validation of dependencies of packages
+"""
+
+import logging
+from typing import Callable, Dict, List, NamedTuple, Optional, Set, Tuple
+
+from pip._vendor.packaging.requirements import Requirement
+from pip._vendor.packaging.specifiers import LegacySpecifier
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+from pip._vendor.packaging.version import LegacyVersion
+
+from pip._internal.distributions import make_distribution_for_install_requirement
+from pip._internal.metadata import get_default_environment
+from pip._internal.metadata.base import DistributionVersion
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils.deprecation import deprecated
+
+logger = logging.getLogger(__name__)
+
+
+class PackageDetails(NamedTuple):
+    version: DistributionVersion
+    dependencies: List[Requirement]
+
+
+# Shorthands
+PackageSet = Dict[NormalizedName, PackageDetails]
+Missing = Tuple[NormalizedName, Requirement]
+Conflicting = Tuple[NormalizedName, DistributionVersion, Requirement]
+
+MissingDict = Dict[NormalizedName, List[Missing]]
+ConflictingDict = Dict[NormalizedName, List[Conflicting]]
+CheckResult = Tuple[MissingDict, ConflictingDict]
+ConflictDetails = Tuple[PackageSet, CheckResult]
+
+
+def create_package_set_from_installed() -> Tuple[PackageSet, bool]:
+    """Converts a list of distributions into a PackageSet."""
+    package_set = {}
+    problems = False
+    env = get_default_environment()
+    for dist in env.iter_installed_distributions(local_only=False, skip=()):
+        name = dist.canonical_name
+        try:
+            dependencies = list(dist.iter_dependencies())
+            package_set[name] = PackageDetails(dist.version, dependencies)
+        except (OSError, ValueError) as e:
+            # Don't crash on unreadable or broken metadata.
+            logger.warning("Error parsing requirements for %s: %s", name, e)
+            problems = True
+    return package_set, problems
+
+
+def check_package_set(
+    package_set: PackageSet, should_ignore: Optional[Callable[[str], bool]] = None
+) -> CheckResult:
+    """Check if a package set is consistent
+
+    If should_ignore is passed, it should be a callable that takes a
+    package name and returns a boolean.
+    """
+
+    warn_legacy_versions_and_specifiers(package_set)
+
+    missing = {}
+    conflicting = {}
+
+    for package_name, package_detail in package_set.items():
+        # Info about dependencies of package_name
+        missing_deps: Set[Missing] = set()
+        conflicting_deps: Set[Conflicting] = set()
+
+        if should_ignore and should_ignore(package_name):
+            continue
+
+        for req in package_detail.dependencies:
+            name = canonicalize_name(req.name)
+
+            # Check if it's missing
+            if name not in package_set:
+                missed = True
+                if req.marker is not None:
+                    missed = req.marker.evaluate({"extra": ""})
+                if missed:
+                    missing_deps.add((name, req))
+                continue
+
+            # Check if there's a conflict
+            version = package_set[name].version
+            if not req.specifier.contains(version, prereleases=True):
+                conflicting_deps.add((name, version, req))
+
+        if missing_deps:
+            missing[package_name] = sorted(missing_deps, key=str)
+        if conflicting_deps:
+            conflicting[package_name] = sorted(conflicting_deps, key=str)
+
+    return missing, conflicting
+
+
+def check_install_conflicts(to_install: List[InstallRequirement]) -> ConflictDetails:
+    """For checking if the dependency graph would be consistent after \
+    installing given requirements
+    """
+    # Start from the current state
+    package_set, _ = create_package_set_from_installed()
+    # Install packages
+    would_be_installed = _simulate_installation_of(to_install, package_set)
+
+    # Only warn about directly-dependent packages; create a whitelist of them
+    whitelist = _create_whitelist(would_be_installed, package_set)
+
+    return (
+        package_set,
+        check_package_set(
+            package_set, should_ignore=lambda name: name not in whitelist
+        ),
+    )
+
+
+def _simulate_installation_of(
+    to_install: List[InstallRequirement], package_set: PackageSet
+) -> Set[NormalizedName]:
+    """Computes the version of packages after installing to_install."""
+    # Keep track of packages that were installed
+    installed = set()
+
+    # Modify it as installing requirement_set would (assuming no errors)
+    for inst_req in to_install:
+        abstract_dist = make_distribution_for_install_requirement(inst_req)
+        dist = abstract_dist.get_metadata_distribution()
+        name = dist.canonical_name
+        package_set[name] = PackageDetails(dist.version, list(dist.iter_dependencies()))
+
+        installed.add(name)
+
+    return installed
+
+
+def _create_whitelist(
+    would_be_installed: Set[NormalizedName], package_set: PackageSet
+) -> Set[NormalizedName]:
+    packages_affected = set(would_be_installed)
+
+    for package_name in package_set:
+        if package_name in packages_affected:
+            continue
+
+        for req in package_set[package_name].dependencies:
+            if canonicalize_name(req.name) in packages_affected:
+                packages_affected.add(package_name)
+                break
+
+    return packages_affected
+
+
+def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None:
+    for project_name, package_details in package_set.items():
+        if isinstance(package_details.version, LegacyVersion):
+            deprecated(
+                reason=(
+                    f"{project_name} {package_details.version} "
+                    f"has a non-standard version number."
+                ),
+                replacement=(
+                    f"to upgrade to a newer version of {project_name} "
+                    f"or contact the author to suggest that they "
+                    f"release a version with a conforming version number"
+                ),
+                issue=12063,
+                gone_in="24.1",
+            )
+        for dep in package_details.dependencies:
+            if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier):
+                deprecated(
+                    reason=(
+                        f"{project_name} {package_details.version} "
+                        f"has a non-standard dependency specifier {dep}."
+                    ),
+                    replacement=(
+                        f"to upgrade to a newer version of {project_name} "
+                        f"or contact the author to suggest that they "
+                        f"release a version with a conforming dependency specifiers"
+                    ),
+                    issue=12063,
+                    gone_in="24.1",
+                )
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/operations/freeze.py b/.venv/lib/python3.12/site-packages/pip/_internal/operations/freeze.py
new file mode 100644
index 0000000..3544568
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/operations/freeze.py
@@ -0,0 +1,255 @@
+import collections
+import logging
+import os
+from typing import Container, Dict, Generator, Iterable, List, NamedTuple, Optional, Set
+
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.packaging.version import Version
+
+from pip._internal.exceptions import BadCommand, InstallationError
+from pip._internal.metadata import BaseDistribution, get_environment
+from pip._internal.req.constructors import (
+    install_req_from_editable,
+    install_req_from_line,
+)
+from pip._internal.req.req_file import COMMENT_RE
+from pip._internal.utils.direct_url_helpers import direct_url_as_pep440_direct_reference
+
+logger = logging.getLogger(__name__)
+
+
+class _EditableInfo(NamedTuple):
+    requirement: str
+    comments: List[str]
+
+
+def freeze(
+    requirement: Optional[List[str]] = None,
+    local_only: bool = False,
+    user_only: bool = False,
+    paths: Optional[List[str]] = None,
+    isolated: bool = False,
+    exclude_editable: bool = False,
+    skip: Container[str] = (),
+) -> Generator[str, None, None]:
+    installations: Dict[str, FrozenRequirement] = {}
+
+    dists = get_environment(paths).iter_installed_distributions(
+        local_only=local_only,
+        skip=(),
+        user_only=user_only,
+    )
+    for dist in dists:
+        req = FrozenRequirement.from_dist(dist)
+        if exclude_editable and req.editable:
+            continue
+        installations[req.canonical_name] = req
+
+    if requirement:
+        # the options that don't get turned into an InstallRequirement
+        # should only be emitted once, even if the same option is in multiple
+        # requirements files, so we need to keep track of what has been emitted
+        # so that we don't emit it again if it's seen again
+        emitted_options: Set[str] = set()
+        # keep track of which files a requirement is in so that we can
+        # give an accurate warning if a requirement appears multiple times.
+        req_files: Dict[str, List[str]] = collections.defaultdict(list)
+        for req_file_path in requirement:
+            with open(req_file_path) as req_file:
+                for line in req_file:
+                    if (
+                        not line.strip()
+                        or line.strip().startswith("#")
+                        or line.startswith(
+                            (
+                                "-r",
+                                "--requirement",
+                                "-f",
+                                "--find-links",
+                                "-i",
+                                "--index-url",
+                                "--pre",
+                                "--trusted-host",
+                                "--process-dependency-links",
+                                "--extra-index-url",
+                                "--use-feature",
+                            )
+                        )
+                    ):
+                        line = line.rstrip()
+                        if line not in emitted_options:
+                            emitted_options.add(line)
+                            yield line
+                        continue
+
+                    if line.startswith("-e") or line.startswith("--editable"):
+                        if line.startswith("-e"):
+                            line = line[2:].strip()
+                        else:
+                            line = line[len("--editable") :].strip().lstrip("=")
+                        line_req = install_req_from_editable(
+                            line,
+                            isolated=isolated,
+                        )
+                    else:
+                        line_req = install_req_from_line(
+                            COMMENT_RE.sub("", line).strip(),
+                            isolated=isolated,
+                        )
+
+                    if not line_req.name:
+                        logger.info(
+                            "Skipping line in requirement file [%s] because "
+                            "it's not clear what it would install: %s",
+                            req_file_path,
+                            line.strip(),
+                        )
+                        logger.info(
+                            "  (add #egg=PackageName to the URL to avoid"
+                            " this warning)"
+                        )
+                    else:
+                        line_req_canonical_name = canonicalize_name(line_req.name)
+                        if line_req_canonical_name not in installations:
+                            # either it's not installed, or it is installed
+                            # but has been processed already
+                            if not req_files[line_req.name]:
+                                logger.warning(
+                                    "Requirement file [%s] contains %s, but "
+                                    "package %r is not installed",
+                                    req_file_path,
+                                    COMMENT_RE.sub("", line).strip(),
+                                    line_req.name,
+                                )
+                            else:
+                                req_files[line_req.name].append(req_file_path)
+                        else:
+                            yield str(installations[line_req_canonical_name]).rstrip()
+                            del installations[line_req_canonical_name]
+                            req_files[line_req.name].append(req_file_path)
+
+        # Warn about requirements that were included multiple times (in a
+        # single requirements file or in different requirements files).
+        for name, files in req_files.items():
+            if len(files) > 1:
+                logger.warning(
+                    "Requirement %s included multiple times [%s]",
+                    name,
+                    ", ".join(sorted(set(files))),
+                )
+
+        yield ("## The following requirements were added by pip freeze:")
+    for installation in sorted(installations.values(), key=lambda x: x.name.lower()):
+        if installation.canonical_name not in skip:
+            yield str(installation).rstrip()
+
+
+def _format_as_name_version(dist: BaseDistribution) -> str:
+    dist_version = dist.version
+    if isinstance(dist_version, Version):
+        return f"{dist.raw_name}=={dist_version}"
+    return f"{dist.raw_name}==={dist_version}"
+
+
+def _get_editable_info(dist: BaseDistribution) -> _EditableInfo:
+    """
+    Compute and return values (req, comments) for use in
+    FrozenRequirement.from_dist().
+    """
+    editable_project_location = dist.editable_project_location
+    assert editable_project_location
+    location = os.path.normcase(os.path.abspath(editable_project_location))
+
+    from pip._internal.vcs import RemoteNotFoundError, RemoteNotValidError, vcs
+
+    vcs_backend = vcs.get_backend_for_dir(location)
+
+    if vcs_backend is None:
+        display = _format_as_name_version(dist)
+        logger.debug(
+            'No VCS found for editable requirement "%s" in: %r',
+            display,
+            location,
+        )
+        return _EditableInfo(
+            requirement=location,
+            comments=[f"# Editable install with no version control ({display})"],
+        )
+
+    vcs_name = type(vcs_backend).__name__
+
+    try:
+        req = vcs_backend.get_src_requirement(location, dist.raw_name)
+    except RemoteNotFoundError:
+        display = _format_as_name_version(dist)
+        return _EditableInfo(
+            requirement=location,
+            comments=[f"# Editable {vcs_name} install with no remote ({display})"],
+        )
+    except RemoteNotValidError as ex:
+        display = _format_as_name_version(dist)
+        return _EditableInfo(
+            requirement=location,
+            comments=[
+                f"# Editable {vcs_name} install ({display}) with either a deleted "
+                f"local remote or invalid URI:",
+                f"# '{ex.url}'",
+            ],
+        )
+    except BadCommand:
+        logger.warning(
+            "cannot determine version of editable source in %s "
+            "(%s command not found in path)",
+            location,
+            vcs_backend.name,
+        )
+        return _EditableInfo(requirement=location, comments=[])
+    except InstallationError as exc:
+        logger.warning("Error when trying to get requirement for VCS system %s", exc)
+    else:
+        return _EditableInfo(requirement=req, comments=[])
+
+    logger.warning("Could not determine repository location of %s", location)
+
+    return _EditableInfo(
+        requirement=location,
+        comments=["## !! Could not determine repository location"],
+    )
+
+
+class FrozenRequirement:
+    def __init__(
+        self,
+        name: str,
+        req: str,
+        editable: bool,
+        comments: Iterable[str] = (),
+    ) -> None:
+        self.name = name
+        self.canonical_name = canonicalize_name(name)
+        self.req = req
+        self.editable = editable
+        self.comments = comments
+
+    @classmethod
+    def from_dist(cls, dist: BaseDistribution) -> "FrozenRequirement":
+        editable = dist.editable
+        if editable:
+            req, comments = _get_editable_info(dist)
+        else:
+            comments = []
+            direct_url = dist.direct_url
+            if direct_url:
+                # if PEP 610 metadata is present, use it
+                req = direct_url_as_pep440_direct_reference(direct_url, dist.raw_name)
+            else:
+                # name==version requirement
+                req = _format_as_name_version(dist)
+
+        return cls(dist.raw_name, req, editable, comments=comments)
+
+    def __str__(self) -> str:
+        req = self.req
+        if self.editable:
+            req = f"-e {req}"
+        return "\n".join(list(self.comments) + [str(req)]) + "\n"
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/operations/install/__init__.py b/.venv/lib/python3.12/site-packages/pip/_internal/operations/install/__init__.py
new file mode 100644
index 0000000..24d6a5d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/operations/install/__init__.py
@@ -0,0 +1,2 @@
+"""For modules related to installing packages.
+"""
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/operations/install/editable_legacy.py b/.venv/lib/python3.12/site-packages/pip/_internal/operations/install/editable_legacy.py
new file mode 100644
index 0000000..bebe24e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/operations/install/editable_legacy.py
@@ -0,0 +1,46 @@
+"""Legacy editable installation process, i.e. `setup.py develop`.
+"""
+import logging
+from typing import Optional, Sequence
+
+from pip._internal.build_env import BuildEnvironment
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.setuptools_build import make_setuptools_develop_args
+from pip._internal.utils.subprocess import call_subprocess
+
+logger = logging.getLogger(__name__)
+
+
+def install_editable(
+    *,
+    global_options: Sequence[str],
+    prefix: Optional[str],
+    home: Optional[str],
+    use_user_site: bool,
+    name: str,
+    setup_py_path: str,
+    isolated: bool,
+    build_env: BuildEnvironment,
+    unpacked_source_directory: str,
+) -> None:
+    """Install a package in editable mode. Most arguments are pass-through
+    to setuptools.
+    """
+    logger.info("Running setup.py develop for %s", name)
+
+    args = make_setuptools_develop_args(
+        setup_py_path,
+        global_options=global_options,
+        no_user_config=isolated,
+        prefix=prefix,
+        home=home,
+        use_user_site=use_user_site,
+    )
+
+    with indent_log():
+        with build_env:
+            call_subprocess(
+                args,
+                command_desc="python setup.py develop",
+                cwd=unpacked_source_directory,
+            )
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/operations/install/wheel.py b/.venv/lib/python3.12/site-packages/pip/_internal/operations/install/wheel.py
new file mode 100644
index 0000000..f67180c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/operations/install/wheel.py
@@ -0,0 +1,734 @@
+"""Support for installing and building the "wheel" binary package format.
+"""
+
+import collections
+import compileall
+import contextlib
+import csv
+import importlib
+import logging
+import os.path
+import re
+import shutil
+import sys
+import warnings
+from base64 import urlsafe_b64encode
+from email.message import Message
+from itertools import chain, filterfalse, starmap
+from typing import (
+    IO,
+    TYPE_CHECKING,
+    Any,
+    BinaryIO,
+    Callable,
+    Dict,
+    Generator,
+    Iterable,
+    Iterator,
+    List,
+    NewType,
+    Optional,
+    Sequence,
+    Set,
+    Tuple,
+    Union,
+    cast,
+)
+from zipfile import ZipFile, ZipInfo
+
+from pip._vendor.distlib.scripts import ScriptMaker
+from pip._vendor.distlib.util import get_export_entry
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.locations import get_major_minor_version
+from pip._internal.metadata import (
+    BaseDistribution,
+    FilesystemWheel,
+    get_wheel_distribution,
+)
+from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl
+from pip._internal.models.scheme import SCHEME_KEYS, Scheme
+from pip._internal.utils.filesystem import adjacent_tmp_file, replace
+from pip._internal.utils.misc import captured_stdout, ensure_dir, hash_file, partition
+from pip._internal.utils.unpacking import (
+    current_umask,
+    is_within_directory,
+    set_extracted_file_to_default_mode_plus_executable,
+    zip_item_is_executable,
+)
+from pip._internal.utils.wheel import parse_wheel
+
+if TYPE_CHECKING:
+    from typing import Protocol
+
+    class File(Protocol):
+        src_record_path: "RecordPath"
+        dest_path: str
+        changed: bool
+
+        def save(self) -> None:
+            pass
+
+
+logger = logging.getLogger(__name__)
+
+RecordPath = NewType("RecordPath", str)
+InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]]
+
+
+def rehash(path: str, blocksize: int = 1 << 20) -> Tuple[str, str]:
+    """Return (encoded_digest, length) for path using hashlib.sha256()"""
+    h, length = hash_file(path, blocksize)
+    digest = "sha256=" + urlsafe_b64encode(h.digest()).decode("latin1").rstrip("=")
+    return (digest, str(length))
+
+
+def csv_io_kwargs(mode: str) -> Dict[str, Any]:
+    """Return keyword arguments to properly open a CSV file
+    in the given mode.
+    """
+    return {"mode": mode, "newline": "", "encoding": "utf-8"}
+
+
+def fix_script(path: str) -> bool:
+    """Replace #!python with #!/path/to/python
+    Return True if file was changed.
+    """
+    # XXX RECORD hashes will need to be updated
+    assert os.path.isfile(path)
+
+    with open(path, "rb") as script:
+        firstline = script.readline()
+        if not firstline.startswith(b"#!python"):
+            return False
+        exename = sys.executable.encode(sys.getfilesystemencoding())
+        firstline = b"#!" + exename + os.linesep.encode("ascii")
+        rest = script.read()
+    with open(path, "wb") as script:
+        script.write(firstline)
+        script.write(rest)
+    return True
+
+
+def wheel_root_is_purelib(metadata: Message) -> bool:
+    return metadata.get("Root-Is-Purelib", "").lower() == "true"
+
+
+def get_entrypoints(dist: BaseDistribution) -> Tuple[Dict[str, str], Dict[str, str]]:
+    console_scripts = {}
+    gui_scripts = {}
+    for entry_point in dist.iter_entry_points():
+        if entry_point.group == "console_scripts":
+            console_scripts[entry_point.name] = entry_point.value
+        elif entry_point.group == "gui_scripts":
+            gui_scripts[entry_point.name] = entry_point.value
+    return console_scripts, gui_scripts
+
+
+def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]:
+    """Determine if any scripts are not on PATH and format a warning.
+    Returns a warning message if one or more scripts are not on PATH,
+    otherwise None.
+    """
+    if not scripts:
+        return None
+
+    # Group scripts by the path they were installed in
+    grouped_by_dir: Dict[str, Set[str]] = collections.defaultdict(set)
+    for destfile in scripts:
+        parent_dir = os.path.dirname(destfile)
+        script_name = os.path.basename(destfile)
+        grouped_by_dir[parent_dir].add(script_name)
+
+    # We don't want to warn for directories that are on PATH.
+    not_warn_dirs = [
+        os.path.normcase(os.path.normpath(i)).rstrip(os.sep)
+        for i in os.environ.get("PATH", "").split(os.pathsep)
+    ]
+    # If an executable sits with sys.executable, we don't warn for it.
+    #     This covers the case of venv invocations without activating the venv.
+    not_warn_dirs.append(
+        os.path.normcase(os.path.normpath(os.path.dirname(sys.executable)))
+    )
+    warn_for: Dict[str, Set[str]] = {
+        parent_dir: scripts
+        for parent_dir, scripts in grouped_by_dir.items()
+        if os.path.normcase(os.path.normpath(parent_dir)) not in not_warn_dirs
+    }
+    if not warn_for:
+        return None
+
+    # Format a message
+    msg_lines = []
+    for parent_dir, dir_scripts in warn_for.items():
+        sorted_scripts: List[str] = sorted(dir_scripts)
+        if len(sorted_scripts) == 1:
+            start_text = f"script {sorted_scripts[0]} is"
+        else:
+            start_text = "scripts {} are".format(
+                ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
+            )
+
+        msg_lines.append(
+            f"The {start_text} installed in '{parent_dir}' which is not on PATH."
+        )
+
+    last_line_fmt = (
+        "Consider adding {} to PATH or, if you prefer "
+        "to suppress this warning, use --no-warn-script-location."
+    )
+    if len(msg_lines) == 1:
+        msg_lines.append(last_line_fmt.format("this directory"))
+    else:
+        msg_lines.append(last_line_fmt.format("these directories"))
+
+    # Add a note if any directory starts with ~
+    warn_for_tilde = any(
+        i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i
+    )
+    if warn_for_tilde:
+        tilde_warning_msg = (
+            "NOTE: The current PATH contains path(s) starting with `~`, "
+            "which may not be expanded by all applications."
+        )
+        msg_lines.append(tilde_warning_msg)
+
+    # Returns the formatted multiline message
+    return "\n".join(msg_lines)
+
+
+def _normalized_outrows(
+    outrows: Iterable[InstalledCSVRow],
+) -> List[Tuple[str, str, str]]:
+    """Normalize the given rows of a RECORD file.
+
+    Items in each row are converted into str. Rows are then sorted to make
+    the value more predictable for tests.
+
+    Each row is a 3-tuple (path, hash, size) and corresponds to a record of
+    a RECORD file (see PEP 376 and PEP 427 for details).  For the rows
+    passed to this function, the size can be an integer as an int or string,
+    or the empty string.
+    """
+    # Normally, there should only be one row per path, in which case the
+    # second and third elements don't come into play when sorting.
+    # However, in cases in the wild where a path might happen to occur twice,
+    # we don't want the sort operation to trigger an error (but still want
+    # determinism).  Since the third element can be an int or string, we
+    # coerce each element to a string to avoid a TypeError in this case.
+    # For additional background, see--
+    # https://github.com/pypa/pip/issues/5868
+    return sorted(
+        (record_path, hash_, str(size)) for record_path, hash_, size in outrows
+    )
+
+
+def _record_to_fs_path(record_path: RecordPath, lib_dir: str) -> str:
+    return os.path.join(lib_dir, record_path)
+
+
+def _fs_to_record_path(path: str, lib_dir: str) -> RecordPath:
+    # On Windows, do not handle relative paths if they belong to different
+    # logical disks
+    if os.path.splitdrive(path)[0].lower() == os.path.splitdrive(lib_dir)[0].lower():
+        path = os.path.relpath(path, lib_dir)
+
+    path = path.replace(os.path.sep, "/")
+    return cast("RecordPath", path)
+
+
+def get_csv_rows_for_installed(
+    old_csv_rows: List[List[str]],
+    installed: Dict[RecordPath, RecordPath],
+    changed: Set[RecordPath],
+    generated: List[str],
+    lib_dir: str,
+) -> List[InstalledCSVRow]:
+    """
+    :param installed: A map from archive RECORD path to installation RECORD
+        path.
+    """
+    installed_rows: List[InstalledCSVRow] = []
+    for row in old_csv_rows:
+        if len(row) > 3:
+            logger.warning("RECORD line has more than three elements: %s", row)
+        old_record_path = cast("RecordPath", row[0])
+        new_record_path = installed.pop(old_record_path, old_record_path)
+        if new_record_path in changed:
+            digest, length = rehash(_record_to_fs_path(new_record_path, lib_dir))
+        else:
+            digest = row[1] if len(row) > 1 else ""
+            length = row[2] if len(row) > 2 else ""
+        installed_rows.append((new_record_path, digest, length))
+    for f in generated:
+        path = _fs_to_record_path(f, lib_dir)
+        digest, length = rehash(f)
+        installed_rows.append((path, digest, length))
+    return installed_rows + [
+        (installed_record_path, "", "") for installed_record_path in installed.values()
+    ]
+
+
+def get_console_script_specs(console: Dict[str, str]) -> List[str]:
+    """
+    Given the mapping from entrypoint name to callable, return the relevant
+    console script specs.
+    """
+    # Don't mutate caller's version
+    console = console.copy()
+
+    scripts_to_generate = []
+
+    # Special case pip and setuptools to generate versioned wrappers
+    #
+    # The issue is that some projects (specifically, pip and setuptools) use
+    # code in setup.py to create "versioned" entry points - pip2.7 on Python
+    # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
+    # the wheel metadata at build time, and so if the wheel is installed with
+    # a *different* version of Python the entry points will be wrong. The
+    # correct fix for this is to enhance the metadata to be able to describe
+    # such versioned entry points, but that won't happen till Metadata 2.0 is
+    # available.
+    # In the meantime, projects using versioned entry points will either have
+    # incorrect versioned entry points, or they will not be able to distribute
+    # "universal" wheels (i.e., they will need a wheel per Python version).
+    #
+    # Because setuptools and pip are bundled with _ensurepip and virtualenv,
+    # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
+    # override the versioned entry points in the wheel and generate the
+    # correct ones. This code is purely a short-term measure until Metadata 2.0
+    # is available.
+    #
+    # To add the level of hack in this section of code, in order to support
+    # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
+    # variable which will control which version scripts get installed.
+    #
+    # ENSUREPIP_OPTIONS=altinstall
+    #   - Only pipX.Y and easy_install-X.Y will be generated and installed
+    # ENSUREPIP_OPTIONS=install
+    #   - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
+    #     that this option is technically if ENSUREPIP_OPTIONS is set and is
+    #     not altinstall
+    # DEFAULT
+    #   - The default behavior is to install pip, pipX, pipX.Y, easy_install
+    #     and easy_install-X.Y.
+    pip_script = console.pop("pip", None)
+    if pip_script:
+        if "ENSUREPIP_OPTIONS" not in os.environ:
+            scripts_to_generate.append("pip = " + pip_script)
+
+        if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
+            scripts_to_generate.append(f"pip{sys.version_info[0]} = {pip_script}")
+
+        scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}")
+        # Delete any other versioned pip entry points
+        pip_ep = [k for k in console if re.match(r"pip(\d+(\.\d+)?)?$", k)]
+        for k in pip_ep:
+            del console[k]
+    easy_install_script = console.pop("easy_install", None)
+    if easy_install_script:
+        if "ENSUREPIP_OPTIONS" not in os.environ:
+            scripts_to_generate.append("easy_install = " + easy_install_script)
+
+        scripts_to_generate.append(
+            f"easy_install-{get_major_minor_version()} = {easy_install_script}"
+        )
+        # Delete any other versioned easy_install entry points
+        easy_install_ep = [
+            k for k in console if re.match(r"easy_install(-\d+\.\d+)?$", k)
+        ]
+        for k in easy_install_ep:
+            del console[k]
+
+    # Generate the console entry points specified in the wheel
+    scripts_to_generate.extend(starmap("{} = {}".format, console.items()))
+
+    return scripts_to_generate
+
+
+class ZipBackedFile:
+    def __init__(
+        self, src_record_path: RecordPath, dest_path: str, zip_file: ZipFile
+    ) -> None:
+        self.src_record_path = src_record_path
+        self.dest_path = dest_path
+        self._zip_file = zip_file
+        self.changed = False
+
+    def _getinfo(self) -> ZipInfo:
+        return self._zip_file.getinfo(self.src_record_path)
+
+    def save(self) -> None:
+        # directory creation is lazy and after file filtering
+        # to ensure we don't install empty dirs; empty dirs can't be
+        # uninstalled.
+        parent_dir = os.path.dirname(self.dest_path)
+        ensure_dir(parent_dir)
+
+        # When we open the output file below, any existing file is truncated
+        # before we start writing the new contents. This is fine in most
+        # cases, but can cause a segfault if pip has loaded a shared
+        # object (e.g. from pyopenssl through its vendored urllib3)
+        # Since the shared object is mmap'd an attempt to call a
+        # symbol in it will then cause a segfault. Unlinking the file
+        # allows writing of new contents while allowing the process to
+        # continue to use the old copy.
+        if os.path.exists(self.dest_path):
+            os.unlink(self.dest_path)
+
+        zipinfo = self._getinfo()
+
+        with self._zip_file.open(zipinfo) as f:
+            with open(self.dest_path, "wb") as dest:
+                shutil.copyfileobj(f, dest)
+
+        if zip_item_is_executable(zipinfo):
+            set_extracted_file_to_default_mode_plus_executable(self.dest_path)
+
+
+class ScriptFile:
+    def __init__(self, file: "File") -> None:
+        self._file = file
+        self.src_record_path = self._file.src_record_path
+        self.dest_path = self._file.dest_path
+        self.changed = False
+
+    def save(self) -> None:
+        self._file.save()
+        self.changed = fix_script(self.dest_path)
+
+
+class MissingCallableSuffix(InstallationError):
+    def __init__(self, entry_point: str) -> None:
+        super().__init__(
+            f"Invalid script entry point: {entry_point} - A callable "
+            "suffix is required. Cf https://packaging.python.org/"
+            "specifications/entry-points/#use-for-scripts for more "
+            "information."
+        )
+
+
+def _raise_for_invalid_entrypoint(specification: str) -> None:
+    entry = get_export_entry(specification)
+    if entry is not None and entry.suffix is None:
+        raise MissingCallableSuffix(str(entry))
+
+
+class PipScriptMaker(ScriptMaker):
+    def make(
+        self, specification: str, options: Optional[Dict[str, Any]] = None
+    ) -> List[str]:
+        _raise_for_invalid_entrypoint(specification)
+        return super().make(specification, options)
+
+
+def _install_wheel(
+    name: str,
+    wheel_zip: ZipFile,
+    wheel_path: str,
+    scheme: Scheme,
+    pycompile: bool = True,
+    warn_script_location: bool = True,
+    direct_url: Optional[DirectUrl] = None,
+    requested: bool = False,
+) -> None:
+    """Install a wheel.
+
+    :param name: Name of the project to install
+    :param wheel_zip: open ZipFile for wheel being installed
+    :param scheme: Distutils scheme dictating the install directories
+    :param req_description: String used in place of the requirement, for
+        logging
+    :param pycompile: Whether to byte-compile installed Python files
+    :param warn_script_location: Whether to check that scripts are installed
+        into a directory on PATH
+    :raises UnsupportedWheel:
+        * when the directory holds an unpacked wheel with incompatible
+          Wheel-Version
+        * when the .dist-info dir does not match the wheel
+    """
+    info_dir, metadata = parse_wheel(wheel_zip, name)
+
+    if wheel_root_is_purelib(metadata):
+        lib_dir = scheme.purelib
+    else:
+        lib_dir = scheme.platlib
+
+    # Record details of the files moved
+    #   installed = files copied from the wheel to the destination
+    #   changed = files changed while installing (scripts #! line typically)
+    #   generated = files newly generated during the install (script wrappers)
+    installed: Dict[RecordPath, RecordPath] = {}
+    changed: Set[RecordPath] = set()
+    generated: List[str] = []
+
+    def record_installed(
+        srcfile: RecordPath, destfile: str, modified: bool = False
+    ) -> None:
+        """Map archive RECORD paths to installation RECORD paths."""
+        newpath = _fs_to_record_path(destfile, lib_dir)
+        installed[srcfile] = newpath
+        if modified:
+            changed.add(newpath)
+
+    def is_dir_path(path: RecordPath) -> bool:
+        return path.endswith("/")
+
+    def assert_no_path_traversal(dest_dir_path: str, target_path: str) -> None:
+        if not is_within_directory(dest_dir_path, target_path):
+            message = (
+                "The wheel {!r} has a file {!r} trying to install"
+                " outside the target directory {!r}"
+            )
+            raise InstallationError(
+                message.format(wheel_path, target_path, dest_dir_path)
+            )
+
+    def root_scheme_file_maker(
+        zip_file: ZipFile, dest: str
+    ) -> Callable[[RecordPath], "File"]:
+        def make_root_scheme_file(record_path: RecordPath) -> "File":
+            normed_path = os.path.normpath(record_path)
+            dest_path = os.path.join(dest, normed_path)
+            assert_no_path_traversal(dest, dest_path)
+            return ZipBackedFile(record_path, dest_path, zip_file)
+
+        return make_root_scheme_file
+
+    def data_scheme_file_maker(
+        zip_file: ZipFile, scheme: Scheme
+    ) -> Callable[[RecordPath], "File"]:
+        scheme_paths = {key: getattr(scheme, key) for key in SCHEME_KEYS}
+
+        def make_data_scheme_file(record_path: RecordPath) -> "File":
+            normed_path = os.path.normpath(record_path)
+            try:
+                _, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2)
+            except ValueError:
+                message = (
+                    "Unexpected file in {}: {!r}. .data directory contents"
+                    " should be named like: '/'."
+                ).format(wheel_path, record_path)
+                raise InstallationError(message)
+
+            try:
+                scheme_path = scheme_paths[scheme_key]
+            except KeyError:
+                valid_scheme_keys = ", ".join(sorted(scheme_paths))
+                message = (
+                    "Unknown scheme key used in {}: {} (for file {!r}). .data"
+                    " directory contents should be in subdirectories named"
+                    " with a valid scheme key ({})"
+                ).format(wheel_path, scheme_key, record_path, valid_scheme_keys)
+                raise InstallationError(message)
+
+            dest_path = os.path.join(scheme_path, dest_subpath)
+            assert_no_path_traversal(scheme_path, dest_path)
+            return ZipBackedFile(record_path, dest_path, zip_file)
+
+        return make_data_scheme_file
+
+    def is_data_scheme_path(path: RecordPath) -> bool:
+        return path.split("/", 1)[0].endswith(".data")
+
+    paths = cast(List[RecordPath], wheel_zip.namelist())
+    file_paths = filterfalse(is_dir_path, paths)
+    root_scheme_paths, data_scheme_paths = partition(is_data_scheme_path, file_paths)
+
+    make_root_scheme_file = root_scheme_file_maker(wheel_zip, lib_dir)
+    files: Iterator[File] = map(make_root_scheme_file, root_scheme_paths)
+
+    def is_script_scheme_path(path: RecordPath) -> bool:
+        parts = path.split("/", 2)
+        return len(parts) > 2 and parts[0].endswith(".data") and parts[1] == "scripts"
+
+    other_scheme_paths, script_scheme_paths = partition(
+        is_script_scheme_path, data_scheme_paths
+    )
+
+    make_data_scheme_file = data_scheme_file_maker(wheel_zip, scheme)
+    other_scheme_files = map(make_data_scheme_file, other_scheme_paths)
+    files = chain(files, other_scheme_files)
+
+    # Get the defined entry points
+    distribution = get_wheel_distribution(
+        FilesystemWheel(wheel_path),
+        canonicalize_name(name),
+    )
+    console, gui = get_entrypoints(distribution)
+
+    def is_entrypoint_wrapper(file: "File") -> bool:
+        # EP, EP.exe and EP-script.py are scripts generated for
+        # entry point EP by setuptools
+        path = file.dest_path
+        name = os.path.basename(path)
+        if name.lower().endswith(".exe"):
+            matchname = name[:-4]
+        elif name.lower().endswith("-script.py"):
+            matchname = name[:-10]
+        elif name.lower().endswith(".pya"):
+            matchname = name[:-4]
+        else:
+            matchname = name
+        # Ignore setuptools-generated scripts
+        return matchname in console or matchname in gui
+
+    script_scheme_files: Iterator[File] = map(
+        make_data_scheme_file, script_scheme_paths
+    )
+    script_scheme_files = filterfalse(is_entrypoint_wrapper, script_scheme_files)
+    script_scheme_files = map(ScriptFile, script_scheme_files)
+    files = chain(files, script_scheme_files)
+
+    for file in files:
+        file.save()
+        record_installed(file.src_record_path, file.dest_path, file.changed)
+
+    def pyc_source_file_paths() -> Generator[str, None, None]:
+        # We de-duplicate installation paths, since there can be overlap (e.g.
+        # file in .data maps to same location as file in wheel root).
+        # Sorting installation paths makes it easier to reproduce and debug
+        # issues related to permissions on existing files.
+        for installed_path in sorted(set(installed.values())):
+            full_installed_path = os.path.join(lib_dir, installed_path)
+            if not os.path.isfile(full_installed_path):
+                continue
+            if not full_installed_path.endswith(".py"):
+                continue
+            yield full_installed_path
+
+    def pyc_output_path(path: str) -> str:
+        """Return the path the pyc file would have been written to."""
+        return importlib.util.cache_from_source(path)
+
+    # Compile all of the pyc files for the installed files
+    if pycompile:
+        with captured_stdout() as stdout:
+            with warnings.catch_warnings():
+                warnings.filterwarnings("ignore")
+                for path in pyc_source_file_paths():
+                    success = compileall.compile_file(path, force=True, quiet=True)
+                    if success:
+                        pyc_path = pyc_output_path(path)
+                        assert os.path.exists(pyc_path)
+                        pyc_record_path = cast(
+                            "RecordPath", pyc_path.replace(os.path.sep, "/")
+                        )
+                        record_installed(pyc_record_path, pyc_path)
+        logger.debug(stdout.getvalue())
+
+    maker = PipScriptMaker(None, scheme.scripts)
+
+    # Ensure old scripts are overwritten.
+    # See https://github.com/pypa/pip/issues/1800
+    maker.clobber = True
+
+    # Ensure we don't generate any variants for scripts because this is almost
+    # never what somebody wants.
+    # See https://bitbucket.org/pypa/distlib/issue/35/
+    maker.variants = {""}
+
+    # This is required because otherwise distlib creates scripts that are not
+    # executable.
+    # See https://bitbucket.org/pypa/distlib/issue/32/
+    maker.set_mode = True
+
+    # Generate the console and GUI entry points specified in the wheel
+    scripts_to_generate = get_console_script_specs(console)
+
+    gui_scripts_to_generate = list(starmap("{} = {}".format, gui.items()))
+
+    generated_console_scripts = maker.make_multiple(scripts_to_generate)
+    generated.extend(generated_console_scripts)
+
+    generated.extend(maker.make_multiple(gui_scripts_to_generate, {"gui": True}))
+
+    if warn_script_location:
+        msg = message_about_scripts_not_on_PATH(generated_console_scripts)
+        if msg is not None:
+            logger.warning(msg)
+
+    generated_file_mode = 0o666 & ~current_umask()
+
+    @contextlib.contextmanager
+    def _generate_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]:
+        with adjacent_tmp_file(path, **kwargs) as f:
+            yield f
+        os.chmod(f.name, generated_file_mode)
+        replace(f.name, path)
+
+    dest_info_dir = os.path.join(lib_dir, info_dir)
+
+    # Record pip as the installer
+    installer_path = os.path.join(dest_info_dir, "INSTALLER")
+    with _generate_file(installer_path) as installer_file:
+        installer_file.write(b"pip\n")
+    generated.append(installer_path)
+
+    # Record the PEP 610 direct URL reference
+    if direct_url is not None:
+        direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME)
+        with _generate_file(direct_url_path) as direct_url_file:
+            direct_url_file.write(direct_url.to_json().encode("utf-8"))
+        generated.append(direct_url_path)
+
+    # Record the REQUESTED file
+    if requested:
+        requested_path = os.path.join(dest_info_dir, "REQUESTED")
+        with open(requested_path, "wb"):
+            pass
+        generated.append(requested_path)
+
+    record_text = distribution.read_text("RECORD")
+    record_rows = list(csv.reader(record_text.splitlines()))
+
+    rows = get_csv_rows_for_installed(
+        record_rows,
+        installed=installed,
+        changed=changed,
+        generated=generated,
+        lib_dir=lib_dir,
+    )
+
+    # Record details of all files installed
+    record_path = os.path.join(dest_info_dir, "RECORD")
+
+    with _generate_file(record_path, **csv_io_kwargs("w")) as record_file:
+        # Explicitly cast to typing.IO[str] as a workaround for the mypy error:
+        # "writer" has incompatible type "BinaryIO"; expected "_Writer"
+        writer = csv.writer(cast("IO[str]", record_file))
+        writer.writerows(_normalized_outrows(rows))
+
+
+@contextlib.contextmanager
+def req_error_context(req_description: str) -> Generator[None, None, None]:
+    try:
+        yield
+    except InstallationError as e:
+        message = f"For req: {req_description}. {e.args[0]}"
+        raise InstallationError(message) from e
+
+
+def install_wheel(
+    name: str,
+    wheel_path: str,
+    scheme: Scheme,
+    req_description: str,
+    pycompile: bool = True,
+    warn_script_location: bool = True,
+    direct_url: Optional[DirectUrl] = None,
+    requested: bool = False,
+) -> None:
+    with ZipFile(wheel_path, allowZip64=True) as z:
+        with req_error_context(req_description):
+            _install_wheel(
+                name=name,
+                wheel_zip=z,
+                wheel_path=wheel_path,
+                scheme=scheme,
+                pycompile=pycompile,
+                warn_script_location=warn_script_location,
+                direct_url=direct_url,
+                requested=requested,
+            )
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/operations/prepare.py b/.venv/lib/python3.12/site-packages/pip/_internal/operations/prepare.py
new file mode 100644
index 0000000..956717d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/operations/prepare.py
@@ -0,0 +1,730 @@
+"""Prepares a distribution for installation
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+import mimetypes
+import os
+import shutil
+from pathlib import Path
+from typing import Dict, Iterable, List, Optional
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.distributions import make_distribution_for_install_requirement
+from pip._internal.distributions.installed import InstalledDistribution
+from pip._internal.exceptions import (
+    DirectoryUrlHashUnsupported,
+    HashMismatch,
+    HashUnpinned,
+    InstallationError,
+    MetadataInconsistent,
+    NetworkConnectionError,
+    VcsHashUnsupported,
+)
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import BaseDistribution, get_metadata_distribution
+from pip._internal.models.direct_url import ArchiveInfo
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.network.download import BatchDownloader, Downloader
+from pip._internal.network.lazy_wheel import (
+    HTTPRangeRequestUnsupported,
+    dist_from_wheel_url,
+)
+from pip._internal.network.session import PipSession
+from pip._internal.operations.build.build_tracker import BuildTracker
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils._log import getLogger
+from pip._internal.utils.direct_url_helpers import (
+    direct_url_for_editable,
+    direct_url_from_link,
+)
+from pip._internal.utils.hashes import Hashes, MissingHashes
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import (
+    display_path,
+    hash_file,
+    hide_url,
+    redact_auth_from_requirement,
+)
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.unpacking import unpack_file
+from pip._internal.vcs import vcs
+
+logger = getLogger(__name__)
+
+
+def _get_prepared_distribution(
+    req: InstallRequirement,
+    build_tracker: BuildTracker,
+    finder: PackageFinder,
+    build_isolation: bool,
+    check_build_deps: bool,
+) -> BaseDistribution:
+    """Prepare a distribution for installation."""
+    abstract_dist = make_distribution_for_install_requirement(req)
+    tracker_id = abstract_dist.build_tracker_id
+    if tracker_id is not None:
+        with build_tracker.track(req, tracker_id):
+            abstract_dist.prepare_distribution_metadata(
+                finder, build_isolation, check_build_deps
+            )
+    return abstract_dist.get_metadata_distribution()
+
+
+def unpack_vcs_link(link: Link, location: str, verbosity: int) -> None:
+    vcs_backend = vcs.get_backend_for_scheme(link.scheme)
+    assert vcs_backend is not None
+    vcs_backend.unpack(location, url=hide_url(link.url), verbosity=verbosity)
+
+
+class File:
+    def __init__(self, path: str, content_type: Optional[str]) -> None:
+        self.path = path
+        if content_type is None:
+            self.content_type = mimetypes.guess_type(path)[0]
+        else:
+            self.content_type = content_type
+
+
+def get_http_url(
+    link: Link,
+    download: Downloader,
+    download_dir: Optional[str] = None,
+    hashes: Optional[Hashes] = None,
+) -> File:
+    temp_dir = TempDirectory(kind="unpack", globally_managed=True)
+    # If a download dir is specified, is the file already downloaded there?
+    already_downloaded_path = None
+    if download_dir:
+        already_downloaded_path = _check_download_dir(link, download_dir, hashes)
+
+    if already_downloaded_path:
+        from_path = already_downloaded_path
+        content_type = None
+    else:
+        # let's download to a tmp dir
+        from_path, content_type = download(link, temp_dir.path)
+        if hashes:
+            hashes.check_against_path(from_path)
+
+    return File(from_path, content_type)
+
+
+def get_file_url(
+    link: Link, download_dir: Optional[str] = None, hashes: Optional[Hashes] = None
+) -> File:
+    """Get file and optionally check its hash."""
+    # If a download dir is specified, is the file already there and valid?
+    already_downloaded_path = None
+    if download_dir:
+        already_downloaded_path = _check_download_dir(link, download_dir, hashes)
+
+    if already_downloaded_path:
+        from_path = already_downloaded_path
+    else:
+        from_path = link.file_path
+
+    # If --require-hashes is off, `hashes` is either empty, the
+    # link's embedded hash, or MissingHashes; it is required to
+    # match. If --require-hashes is on, we are satisfied by any
+    # hash in `hashes` matching: a URL-based or an option-based
+    # one; no internet-sourced hash will be in `hashes`.
+    if hashes:
+        hashes.check_against_path(from_path)
+    return File(from_path, None)
+
+
+def unpack_url(
+    link: Link,
+    location: str,
+    download: Downloader,
+    verbosity: int,
+    download_dir: Optional[str] = None,
+    hashes: Optional[Hashes] = None,
+) -> Optional[File]:
+    """Unpack link into location, downloading if required.
+
+    :param hashes: A Hashes object, one of whose embedded hashes must match,
+        or HashMismatch will be raised. If the Hashes is empty, no matches are
+        required, and unhashable types of requirements (like VCS ones, which
+        would ordinarily raise HashUnsupported) are allowed.
+    """
+    # non-editable vcs urls
+    if link.is_vcs:
+        unpack_vcs_link(link, location, verbosity=verbosity)
+        return None
+
+    assert not link.is_existing_dir()
+
+    # file urls
+    if link.is_file:
+        file = get_file_url(link, download_dir, hashes=hashes)
+
+    # http urls
+    else:
+        file = get_http_url(
+            link,
+            download,
+            download_dir,
+            hashes=hashes,
+        )
+
+    # unpack the archive to the build dir location. even when only downloading
+    # archives, they have to be unpacked to parse dependencies, except wheels
+    if not link.is_wheel:
+        unpack_file(file.path, location, file.content_type)
+
+    return file
+
+
+def _check_download_dir(
+    link: Link,
+    download_dir: str,
+    hashes: Optional[Hashes],
+    warn_on_hash_mismatch: bool = True,
+) -> Optional[str]:
+    """Check download_dir for previously downloaded file with correct hash
+    If a correct file is found return its path else None
+    """
+    download_path = os.path.join(download_dir, link.filename)
+
+    if not os.path.exists(download_path):
+        return None
+
+    # If already downloaded, does its hash match?
+    logger.info("File was already downloaded %s", download_path)
+    if hashes:
+        try:
+            hashes.check_against_path(download_path)
+        except HashMismatch:
+            if warn_on_hash_mismatch:
+                logger.warning(
+                    "Previously-downloaded file %s has bad hash. Re-downloading.",
+                    download_path,
+                )
+            os.unlink(download_path)
+            return None
+    return download_path
+
+
+class RequirementPreparer:
+    """Prepares a Requirement"""
+
+    def __init__(
+        self,
+        build_dir: str,
+        download_dir: Optional[str],
+        src_dir: str,
+        build_isolation: bool,
+        check_build_deps: bool,
+        build_tracker: BuildTracker,
+        session: PipSession,
+        progress_bar: str,
+        finder: PackageFinder,
+        require_hashes: bool,
+        use_user_site: bool,
+        lazy_wheel: bool,
+        verbosity: int,
+        legacy_resolver: bool,
+    ) -> None:
+        super().__init__()
+
+        self.src_dir = src_dir
+        self.build_dir = build_dir
+        self.build_tracker = build_tracker
+        self._session = session
+        self._download = Downloader(session, progress_bar)
+        self._batch_download = BatchDownloader(session, progress_bar)
+        self.finder = finder
+
+        # Where still-packed archives should be written to. If None, they are
+        # not saved, and are deleted immediately after unpacking.
+        self.download_dir = download_dir
+
+        # Is build isolation allowed?
+        self.build_isolation = build_isolation
+
+        # Should check build dependencies?
+        self.check_build_deps = check_build_deps
+
+        # Should hash-checking be required?
+        self.require_hashes = require_hashes
+
+        # Should install in user site-packages?
+        self.use_user_site = use_user_site
+
+        # Should wheels be downloaded lazily?
+        self.use_lazy_wheel = lazy_wheel
+
+        # How verbose should underlying tooling be?
+        self.verbosity = verbosity
+
+        # Are we using the legacy resolver?
+        self.legacy_resolver = legacy_resolver
+
+        # Memoized downloaded files, as mapping of url: path.
+        self._downloaded: Dict[str, str] = {}
+
+        # Previous "header" printed for a link-based InstallRequirement
+        self._previous_requirement_header = ("", "")
+
+    def _log_preparing_link(self, req: InstallRequirement) -> None:
+        """Provide context for the requirement being prepared."""
+        if req.link.is_file and not req.is_wheel_from_cache:
+            message = "Processing %s"
+            information = str(display_path(req.link.file_path))
+        else:
+            message = "Collecting %s"
+            information = redact_auth_from_requirement(req.req) if req.req else str(req)
+
+        # If we used req.req, inject requirement source if available (this
+        # would already be included if we used req directly)
+        if req.req and req.comes_from:
+            if isinstance(req.comes_from, str):
+                comes_from: Optional[str] = req.comes_from
+            else:
+                comes_from = req.comes_from.from_path()
+            if comes_from:
+                information += f" (from {comes_from})"
+
+        if (message, information) != self._previous_requirement_header:
+            self._previous_requirement_header = (message, information)
+            logger.info(message, information)
+
+        if req.is_wheel_from_cache:
+            with indent_log():
+                logger.info("Using cached %s", req.link.filename)
+
+    def _ensure_link_req_src_dir(
+        self, req: InstallRequirement, parallel_builds: bool
+    ) -> None:
+        """Ensure source_dir of a linked InstallRequirement."""
+        # Since source_dir is only set for editable requirements.
+        if req.link.is_wheel:
+            # We don't need to unpack wheels, so no need for a source
+            # directory.
+            return
+        assert req.source_dir is None
+        if req.link.is_existing_dir():
+            # build local directories in-tree
+            req.source_dir = req.link.file_path
+            return
+
+        # We always delete unpacked sdists after pip runs.
+        req.ensure_has_source_dir(
+            self.build_dir,
+            autodelete=True,
+            parallel_builds=parallel_builds,
+        )
+        req.ensure_pristine_source_checkout()
+
+    def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes:
+        # By the time this is called, the requirement's link should have
+        # been checked so we can tell what kind of requirements req is
+        # and raise some more informative errors than otherwise.
+        # (For example, we can raise VcsHashUnsupported for a VCS URL
+        # rather than HashMissing.)
+        if not self.require_hashes:
+            return req.hashes(trust_internet=True)
+
+        # We could check these first 2 conditions inside unpack_url
+        # and save repetition of conditions, but then we would
+        # report less-useful error messages for unhashable
+        # requirements, complaining that there's no hash provided.
+        if req.link.is_vcs:
+            raise VcsHashUnsupported()
+        if req.link.is_existing_dir():
+            raise DirectoryUrlHashUnsupported()
+
+        # Unpinned packages are asking for trouble when a new version
+        # is uploaded.  This isn't a security check, but it saves users
+        # a surprising hash mismatch in the future.
+        # file:/// URLs aren't pinnable, so don't complain about them
+        # not being pinned.
+        if not req.is_direct and not req.is_pinned:
+            raise HashUnpinned()
+
+        # If known-good hashes are missing for this requirement,
+        # shim it with a facade object that will provoke hash
+        # computation and then raise a HashMissing exception
+        # showing the user what the hash should be.
+        return req.hashes(trust_internet=False) or MissingHashes()
+
+    def _fetch_metadata_only(
+        self,
+        req: InstallRequirement,
+    ) -> Optional[BaseDistribution]:
+        if self.legacy_resolver:
+            logger.debug(
+                "Metadata-only fetching is not used in the legacy resolver",
+            )
+            return None
+        if self.require_hashes:
+            logger.debug(
+                "Metadata-only fetching is not used as hash checking is required",
+            )
+            return None
+        # Try PEP 658 metadata first, then fall back to lazy wheel if unavailable.
+        return self._fetch_metadata_using_link_data_attr(
+            req
+        ) or self._fetch_metadata_using_lazy_wheel(req.link)
+
+    def _fetch_metadata_using_link_data_attr(
+        self,
+        req: InstallRequirement,
+    ) -> Optional[BaseDistribution]:
+        """Fetch metadata from the data-dist-info-metadata attribute, if possible."""
+        # (1) Get the link to the metadata file, if provided by the backend.
+        metadata_link = req.link.metadata_link()
+        if metadata_link is None:
+            return None
+        assert req.req is not None
+        logger.verbose(
+            "Obtaining dependency information for %s from %s",
+            req.req,
+            metadata_link,
+        )
+        # (2) Download the contents of the METADATA file, separate from the dist itself.
+        metadata_file = get_http_url(
+            metadata_link,
+            self._download,
+            hashes=metadata_link.as_hashes(),
+        )
+        with open(metadata_file.path, "rb") as f:
+            metadata_contents = f.read()
+        # (3) Generate a dist just from those file contents.
+        metadata_dist = get_metadata_distribution(
+            metadata_contents,
+            req.link.filename,
+            req.req.name,
+        )
+        # (4) Ensure the Name: field from the METADATA file matches the name from the
+        #     install requirement.
+        #
+        #     NB: raw_name will fall back to the name from the install requirement if
+        #     the Name: field is not present, but it's noted in the raw_name docstring
+        #     that that should NEVER happen anyway.
+        if canonicalize_name(metadata_dist.raw_name) != canonicalize_name(req.req.name):
+            raise MetadataInconsistent(
+                req, "Name", req.req.name, metadata_dist.raw_name
+            )
+        return metadata_dist
+
+    def _fetch_metadata_using_lazy_wheel(
+        self,
+        link: Link,
+    ) -> Optional[BaseDistribution]:
+        """Fetch metadata using lazy wheel, if possible."""
+        # --use-feature=fast-deps must be provided.
+        if not self.use_lazy_wheel:
+            return None
+        if link.is_file or not link.is_wheel:
+            logger.debug(
+                "Lazy wheel is not used as %r does not point to a remote wheel",
+                link,
+            )
+            return None
+
+        wheel = Wheel(link.filename)
+        name = canonicalize_name(wheel.name)
+        logger.info(
+            "Obtaining dependency information from %s %s",
+            name,
+            wheel.version,
+        )
+        url = link.url.split("#", 1)[0]
+        try:
+            return dist_from_wheel_url(name, url, self._session)
+        except HTTPRangeRequestUnsupported:
+            logger.debug("%s does not support range requests", url)
+            return None
+
+    def _complete_partial_requirements(
+        self,
+        partially_downloaded_reqs: Iterable[InstallRequirement],
+        parallel_builds: bool = False,
+    ) -> None:
+        """Download any requirements which were only fetched by metadata."""
+        # Download to a temporary directory. These will be copied over as
+        # needed for downstream 'download', 'wheel', and 'install' commands.
+        temp_dir = TempDirectory(kind="unpack", globally_managed=True).path
+
+        # Map each link to the requirement that owns it. This allows us to set
+        # `req.local_file_path` on the appropriate requirement after passing
+        # all the links at once into BatchDownloader.
+        links_to_fully_download: Dict[Link, InstallRequirement] = {}
+        for req in partially_downloaded_reqs:
+            assert req.link
+            links_to_fully_download[req.link] = req
+
+        batch_download = self._batch_download(
+            links_to_fully_download.keys(),
+            temp_dir,
+        )
+        for link, (filepath, _) in batch_download:
+            logger.debug("Downloading link %s to %s", link, filepath)
+            req = links_to_fully_download[link]
+            # Record the downloaded file path so wheel reqs can extract a Distribution
+            # in .get_dist().
+            req.local_file_path = filepath
+            # Record that the file is downloaded so we don't do it again in
+            # _prepare_linked_requirement().
+            self._downloaded[req.link.url] = filepath
+
+            # If this is an sdist, we need to unpack it after downloading, but the
+            # .source_dir won't be set up until we are in _prepare_linked_requirement().
+            # Add the downloaded archive to the install requirement to unpack after
+            # preparing the source dir.
+            if not req.is_wheel:
+                req.needs_unpacked_archive(Path(filepath))
+
+        # This step is necessary to ensure all lazy wheels are processed
+        # successfully by the 'download', 'wheel', and 'install' commands.
+        for req in partially_downloaded_reqs:
+            self._prepare_linked_requirement(req, parallel_builds)
+
+    def prepare_linked_requirement(
+        self, req: InstallRequirement, parallel_builds: bool = False
+    ) -> BaseDistribution:
+        """Prepare a requirement to be obtained from req.link."""
+        assert req.link
+        self._log_preparing_link(req)
+        with indent_log():
+            # Check if the relevant file is already available
+            # in the download directory
+            file_path = None
+            if self.download_dir is not None and req.link.is_wheel:
+                hashes = self._get_linked_req_hashes(req)
+                file_path = _check_download_dir(
+                    req.link,
+                    self.download_dir,
+                    hashes,
+                    # When a locally built wheel has been found in cache, we don't warn
+                    # about re-downloading when the already downloaded wheel hash does
+                    # not match. This is because the hash must be checked against the
+                    # original link, not the cached link. It that case the already
+                    # downloaded file will be removed and re-fetched from cache (which
+                    # implies a hash check against the cache entry's origin.json).
+                    warn_on_hash_mismatch=not req.is_wheel_from_cache,
+                )
+
+            if file_path is not None:
+                # The file is already available, so mark it as downloaded
+                self._downloaded[req.link.url] = file_path
+            else:
+                # The file is not available, attempt to fetch only metadata
+                metadata_dist = self._fetch_metadata_only(req)
+                if metadata_dist is not None:
+                    req.needs_more_preparation = True
+                    return metadata_dist
+
+            # None of the optimizations worked, fully prepare the requirement
+            return self._prepare_linked_requirement(req, parallel_builds)
+
+    def prepare_linked_requirements_more(
+        self, reqs: Iterable[InstallRequirement], parallel_builds: bool = False
+    ) -> None:
+        """Prepare linked requirements more, if needed."""
+        reqs = [req for req in reqs if req.needs_more_preparation]
+        for req in reqs:
+            # Determine if any of these requirements were already downloaded.
+            if self.download_dir is not None and req.link.is_wheel:
+                hashes = self._get_linked_req_hashes(req)
+                file_path = _check_download_dir(req.link, self.download_dir, hashes)
+                if file_path is not None:
+                    self._downloaded[req.link.url] = file_path
+                    req.needs_more_preparation = False
+
+        # Prepare requirements we found were already downloaded for some
+        # reason. The other downloads will be completed separately.
+        partially_downloaded_reqs: List[InstallRequirement] = []
+        for req in reqs:
+            if req.needs_more_preparation:
+                partially_downloaded_reqs.append(req)
+            else:
+                self._prepare_linked_requirement(req, parallel_builds)
+
+        # TODO: separate this part out from RequirementPreparer when the v1
+        # resolver can be removed!
+        self._complete_partial_requirements(
+            partially_downloaded_reqs,
+            parallel_builds=parallel_builds,
+        )
+
+    def _prepare_linked_requirement(
+        self, req: InstallRequirement, parallel_builds: bool
+    ) -> BaseDistribution:
+        assert req.link
+        link = req.link
+
+        hashes = self._get_linked_req_hashes(req)
+
+        if hashes and req.is_wheel_from_cache:
+            assert req.download_info is not None
+            assert link.is_wheel
+            assert link.is_file
+            # We need to verify hashes, and we have found the requirement in the cache
+            # of locally built wheels.
+            if (
+                isinstance(req.download_info.info, ArchiveInfo)
+                and req.download_info.info.hashes
+                and hashes.has_one_of(req.download_info.info.hashes)
+            ):
+                # At this point we know the requirement was built from a hashable source
+                # artifact, and we verified that the cache entry's hash of the original
+                # artifact matches one of the hashes we expect. We don't verify hashes
+                # against the cached wheel, because the wheel is not the original.
+                hashes = None
+            else:
+                logger.warning(
+                    "The hashes of the source archive found in cache entry "
+                    "don't match, ignoring cached built wheel "
+                    "and re-downloading source."
+                )
+                req.link = req.cached_wheel_source_link
+                link = req.link
+
+        self._ensure_link_req_src_dir(req, parallel_builds)
+
+        if link.is_existing_dir():
+            local_file = None
+        elif link.url not in self._downloaded:
+            try:
+                local_file = unpack_url(
+                    link,
+                    req.source_dir,
+                    self._download,
+                    self.verbosity,
+                    self.download_dir,
+                    hashes,
+                )
+            except NetworkConnectionError as exc:
+                raise InstallationError(
+                    f"Could not install requirement {req} because of HTTP "
+                    f"error {exc} for URL {link}"
+                )
+        else:
+            file_path = self._downloaded[link.url]
+            if hashes:
+                hashes.check_against_path(file_path)
+            local_file = File(file_path, content_type=None)
+
+        # If download_info is set, we got it from the wheel cache.
+        if req.download_info is None:
+            # Editables don't go through this function (see
+            # prepare_editable_requirement).
+            assert not req.editable
+            req.download_info = direct_url_from_link(link, req.source_dir)
+            # Make sure we have a hash in download_info. If we got it as part of the
+            # URL, it will have been verified and we can rely on it. Otherwise we
+            # compute it from the downloaded file.
+            # FIXME: https://github.com/pypa/pip/issues/11943
+            if (
+                isinstance(req.download_info.info, ArchiveInfo)
+                and not req.download_info.info.hashes
+                and local_file
+            ):
+                hash = hash_file(local_file.path)[0].hexdigest()
+                # We populate info.hash for backward compatibility.
+                # This will automatically populate info.hashes.
+                req.download_info.info.hash = f"sha256={hash}"
+
+        # For use in later processing,
+        # preserve the file path on the requirement.
+        if local_file:
+            req.local_file_path = local_file.path
+
+        dist = _get_prepared_distribution(
+            req,
+            self.build_tracker,
+            self.finder,
+            self.build_isolation,
+            self.check_build_deps,
+        )
+        return dist
+
+    def save_linked_requirement(self, req: InstallRequirement) -> None:
+        assert self.download_dir is not None
+        assert req.link is not None
+        link = req.link
+        if link.is_vcs or (link.is_existing_dir() and req.editable):
+            # Make a .zip of the source_dir we already created.
+            req.archive(self.download_dir)
+            return
+
+        if link.is_existing_dir():
+            logger.debug(
+                "Not copying link to destination directory "
+                "since it is a directory: %s",
+                link,
+            )
+            return
+        if req.local_file_path is None:
+            # No distribution was downloaded for this requirement.
+            return
+
+        download_location = os.path.join(self.download_dir, link.filename)
+        if not os.path.exists(download_location):
+            shutil.copy(req.local_file_path, download_location)
+            download_path = display_path(download_location)
+            logger.info("Saved %s", download_path)
+
+    def prepare_editable_requirement(
+        self,
+        req: InstallRequirement,
+    ) -> BaseDistribution:
+        """Prepare an editable requirement."""
+        assert req.editable, "cannot prepare a non-editable req as editable"
+
+        logger.info("Obtaining %s", req)
+
+        with indent_log():
+            if self.require_hashes:
+                raise InstallationError(
+                    f"The editable requirement {req} cannot be installed when "
+                    "requiring hashes, because there is no single file to "
+                    "hash."
+                )
+            req.ensure_has_source_dir(self.src_dir)
+            req.update_editable()
+            assert req.source_dir
+            req.download_info = direct_url_for_editable(req.unpacked_source_directory)
+
+            dist = _get_prepared_distribution(
+                req,
+                self.build_tracker,
+                self.finder,
+                self.build_isolation,
+                self.check_build_deps,
+            )
+
+            req.check_if_exists(self.use_user_site)
+
+        return dist
+
+    def prepare_installed_requirement(
+        self,
+        req: InstallRequirement,
+        skip_reason: str,
+    ) -> BaseDistribution:
+        """Prepare an already-installed requirement."""
+        assert req.satisfied_by, "req should have been satisfied but isn't"
+        assert skip_reason is not None, (
+            "did not get skip reason skipped but req.satisfied_by "
+            f"is set to {req.satisfied_by}"
+        )
+        logger.info(
+            "Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version
+        )
+        with indent_log():
+            if self.require_hashes:
+                logger.debug(
+                    "Since it is already installed, we are trusting this "
+                    "package without checking its hash. To ensure a "
+                    "completely repeatable environment, install into an "
+                    "empty virtualenv."
+                )
+            return InstalledDistribution(req).get_metadata_distribution()
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/pyproject.py b/.venv/lib/python3.12/site-packages/pip/_internal/pyproject.py
new file mode 100644
index 0000000..8de36b8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/pyproject.py
@@ -0,0 +1,179 @@
+import importlib.util
+import os
+from collections import namedtuple
+from typing import Any, List, Optional
+
+from pip._vendor import tomli
+from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
+
+from pip._internal.exceptions import (
+    InstallationError,
+    InvalidPyProjectBuildRequires,
+    MissingPyProjectBuildRequires,
+)
+
+
+def _is_list_of_str(obj: Any) -> bool:
+    return isinstance(obj, list) and all(isinstance(item, str) for item in obj)
+
+
+def make_pyproject_path(unpacked_source_directory: str) -> str:
+    return os.path.join(unpacked_source_directory, "pyproject.toml")
+
+
+BuildSystemDetails = namedtuple(
+    "BuildSystemDetails", ["requires", "backend", "check", "backend_path"]
+)
+
+
+def load_pyproject_toml(
+    use_pep517: Optional[bool], pyproject_toml: str, setup_py: str, req_name: str
+) -> Optional[BuildSystemDetails]:
+    """Load the pyproject.toml file.
+
+    Parameters:
+        use_pep517 - Has the user requested PEP 517 processing? None
+                     means the user hasn't explicitly specified.
+        pyproject_toml - Location of the project's pyproject.toml file
+        setup_py - Location of the project's setup.py file
+        req_name - The name of the requirement we're processing (for
+                   error reporting)
+
+    Returns:
+        None if we should use the legacy code path, otherwise a tuple
+        (
+            requirements from pyproject.toml,
+            name of PEP 517 backend,
+            requirements we should check are installed after setting
+                up the build environment
+            directory paths to import the backend from (backend-path),
+                relative to the project root.
+        )
+    """
+    has_pyproject = os.path.isfile(pyproject_toml)
+    has_setup = os.path.isfile(setup_py)
+
+    if not has_pyproject and not has_setup:
+        raise InstallationError(
+            f"{req_name} does not appear to be a Python project: "
+            f"neither 'setup.py' nor 'pyproject.toml' found."
+        )
+
+    if has_pyproject:
+        with open(pyproject_toml, encoding="utf-8") as f:
+            pp_toml = tomli.loads(f.read())
+        build_system = pp_toml.get("build-system")
+    else:
+        build_system = None
+
+    # The following cases must use PEP 517
+    # We check for use_pep517 being non-None and falsey because that means
+    # the user explicitly requested --no-use-pep517.  The value 0 as
+    # opposed to False can occur when the value is provided via an
+    # environment variable or config file option (due to the quirk of
+    # strtobool() returning an integer in pip's configuration code).
+    if has_pyproject and not has_setup:
+        if use_pep517 is not None and not use_pep517:
+            raise InstallationError(
+                "Disabling PEP 517 processing is invalid: "
+                "project does not have a setup.py"
+            )
+        use_pep517 = True
+    elif build_system and "build-backend" in build_system:
+        if use_pep517 is not None and not use_pep517:
+            raise InstallationError(
+                "Disabling PEP 517 processing is invalid: "
+                "project specifies a build backend of {} "
+                "in pyproject.toml".format(build_system["build-backend"])
+            )
+        use_pep517 = True
+
+    # If we haven't worked out whether to use PEP 517 yet,
+    # and the user hasn't explicitly stated a preference,
+    # we do so if the project has a pyproject.toml file
+    # or if we cannot import setuptools or wheels.
+
+    # We fallback to PEP 517 when without setuptools or without the wheel package,
+    # so setuptools can be installed as a default build backend.
+    # For more info see:
+    # https://discuss.python.org/t/pip-without-setuptools-could-the-experience-be-improved/11810/9
+    # https://github.com/pypa/pip/issues/8559
+    elif use_pep517 is None:
+        use_pep517 = (
+            has_pyproject
+            or not importlib.util.find_spec("setuptools")
+            or not importlib.util.find_spec("wheel")
+        )
+
+    # At this point, we know whether we're going to use PEP 517.
+    assert use_pep517 is not None
+
+    # If we're using the legacy code path, there is nothing further
+    # for us to do here.
+    if not use_pep517:
+        return None
+
+    if build_system is None:
+        # Either the user has a pyproject.toml with no build-system
+        # section, or the user has no pyproject.toml, but has opted in
+        # explicitly via --use-pep517.
+        # In the absence of any explicit backend specification, we
+        # assume the setuptools backend that most closely emulates the
+        # traditional direct setup.py execution, and require wheel and
+        # a version of setuptools that supports that backend.
+
+        build_system = {
+            "requires": ["setuptools>=40.8.0"],
+            "build-backend": "setuptools.build_meta:__legacy__",
+        }
+
+    # If we're using PEP 517, we have build system information (either
+    # from pyproject.toml, or defaulted by the code above).
+    # Note that at this point, we do not know if the user has actually
+    # specified a backend, though.
+    assert build_system is not None
+
+    # Ensure that the build-system section in pyproject.toml conforms
+    # to PEP 518.
+
+    # Specifying the build-system table but not the requires key is invalid
+    if "requires" not in build_system:
+        raise MissingPyProjectBuildRequires(package=req_name)
+
+    # Error out if requires is not a list of strings
+    requires = build_system["requires"]
+    if not _is_list_of_str(requires):
+        raise InvalidPyProjectBuildRequires(
+            package=req_name,
+            reason="It is not a list of strings.",
+        )
+
+    # Each requirement must be valid as per PEP 508
+    for requirement in requires:
+        try:
+            Requirement(requirement)
+        except InvalidRequirement as error:
+            raise InvalidPyProjectBuildRequires(
+                package=req_name,
+                reason=f"It contains an invalid requirement: {requirement!r}",
+            ) from error
+
+    backend = build_system.get("build-backend")
+    backend_path = build_system.get("backend-path", [])
+    check: List[str] = []
+    if backend is None:
+        # If the user didn't specify a backend, we assume they want to use
+        # the setuptools backend. But we can't be sure they have included
+        # a version of setuptools which supplies the backend. So we
+        # make a note to check that this requirement is present once
+        # we have set up the environment.
+        # This is quite a lot of work to check for a very specific case. But
+        # the problem is, that case is potentially quite common - projects that
+        # adopted PEP 518 early for the ability to specify requirements to
+        # execute setup.py, but never considered needing to mention the build
+        # tools themselves. The original PEP 518 code had a similar check (but
+        # implemented in a different way).
+        backend = "setuptools.build_meta:__legacy__"
+        check = ["setuptools>=40.8.0"]
+
+    return BuildSystemDetails(requires, backend, check, backend_path)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/req/__init__.py b/.venv/lib/python3.12/site-packages/pip/_internal/req/__init__.py
new file mode 100644
index 0000000..16de903
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/req/__init__.py
@@ -0,0 +1,92 @@
+import collections
+import logging
+from typing import Generator, List, Optional, Sequence, Tuple
+
+from pip._internal.utils.logging import indent_log
+
+from .req_file import parse_requirements
+from .req_install import InstallRequirement
+from .req_set import RequirementSet
+
+__all__ = [
+    "RequirementSet",
+    "InstallRequirement",
+    "parse_requirements",
+    "install_given_reqs",
+]
+
+logger = logging.getLogger(__name__)
+
+
+class InstallationResult:
+    def __init__(self, name: str) -> None:
+        self.name = name
+
+    def __repr__(self) -> str:
+        return f"InstallationResult(name={self.name!r})"
+
+
+def _validate_requirements(
+    requirements: List[InstallRequirement],
+) -> Generator[Tuple[str, InstallRequirement], None, None]:
+    for req in requirements:
+        assert req.name, f"invalid to-be-installed requirement: {req}"
+        yield req.name, req
+
+
+def install_given_reqs(
+    requirements: List[InstallRequirement],
+    global_options: Sequence[str],
+    root: Optional[str],
+    home: Optional[str],
+    prefix: Optional[str],
+    warn_script_location: bool,
+    use_user_site: bool,
+    pycompile: bool,
+) -> List[InstallationResult]:
+    """
+    Install everything in the given list.
+
+    (to be called after having downloaded and unpacked the packages)
+    """
+    to_install = collections.OrderedDict(_validate_requirements(requirements))
+
+    if to_install:
+        logger.info(
+            "Installing collected packages: %s",
+            ", ".join(to_install.keys()),
+        )
+
+    installed = []
+
+    with indent_log():
+        for req_name, requirement in to_install.items():
+            if requirement.should_reinstall:
+                logger.info("Attempting uninstall: %s", req_name)
+                with indent_log():
+                    uninstalled_pathset = requirement.uninstall(auto_confirm=True)
+            else:
+                uninstalled_pathset = None
+
+            try:
+                requirement.install(
+                    global_options,
+                    root=root,
+                    home=home,
+                    prefix=prefix,
+                    warn_script_location=warn_script_location,
+                    use_user_site=use_user_site,
+                    pycompile=pycompile,
+                )
+            except Exception:
+                # if install did not succeed, rollback previous uninstall
+                if uninstalled_pathset and not requirement.install_succeeded:
+                    uninstalled_pathset.rollback()
+                raise
+            else:
+                if uninstalled_pathset and requirement.install_succeeded:
+                    uninstalled_pathset.commit()
+
+            installed.append(InstallationResult(req_name))
+
+    return installed
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/req/constructors.py b/.venv/lib/python3.12/site-packages/pip/_internal/req/constructors.py
new file mode 100644
index 0000000..7e2d0e5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/req/constructors.py
@@ -0,0 +1,576 @@
+"""Backing implementation for InstallRequirement's various constructors
+
+The idea here is that these formed a major chunk of InstallRequirement's size
+so, moving them and support code dedicated to them outside of that class
+helps creates for better understandability for the rest of the code.
+
+These are meant to be used elsewhere within pip to create instances of
+InstallRequirement.
+"""
+
+import copy
+import logging
+import os
+import re
+from typing import Collection, Dict, List, Optional, Set, Tuple, Union
+
+from pip._vendor.packaging.markers import Marker
+from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
+from pip._vendor.packaging.specifiers import Specifier
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.models.index import PyPI, TestPyPI
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.req.req_file import ParsedRequirement
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils.filetypes import is_archive_file
+from pip._internal.utils.misc import is_installable_dir
+from pip._internal.utils.packaging import get_requirement
+from pip._internal.utils.urls import path_to_url
+from pip._internal.vcs import is_url, vcs
+
+__all__ = [
+    "install_req_from_editable",
+    "install_req_from_line",
+    "parse_editable",
+]
+
+logger = logging.getLogger(__name__)
+operators = Specifier._operators.keys()
+
+
+def _strip_extras(path: str) -> Tuple[str, Optional[str]]:
+    m = re.match(r"^(.+)(\[[^\]]+\])$", path)
+    extras = None
+    if m:
+        path_no_extras = m.group(1)
+        extras = m.group(2)
+    else:
+        path_no_extras = path
+
+    return path_no_extras, extras
+
+
+def convert_extras(extras: Optional[str]) -> Set[str]:
+    if not extras:
+        return set()
+    return get_requirement("placeholder" + extras.lower()).extras
+
+
+def _set_requirement_extras(req: Requirement, new_extras: Set[str]) -> Requirement:
+    """
+    Returns a new requirement based on the given one, with the supplied extras. If the
+    given requirement already has extras those are replaced (or dropped if no new extras
+    are given).
+    """
+    match: Optional[re.Match[str]] = re.fullmatch(
+        # see https://peps.python.org/pep-0508/#complete-grammar
+        r"([\w\t .-]+)(\[[^\]]*\])?(.*)",
+        str(req),
+        flags=re.ASCII,
+    )
+    # ireq.req is a valid requirement so the regex should always match
+    assert (
+        match is not None
+    ), f"regex match on requirement {req} failed, this should never happen"
+    pre: Optional[str] = match.group(1)
+    post: Optional[str] = match.group(3)
+    assert (
+        pre is not None and post is not None
+    ), f"regex group selection for requirement {req} failed, this should never happen"
+    extras: str = "[%s]" % ",".join(sorted(new_extras)) if new_extras else ""
+    return Requirement(f"{pre}{extras}{post}")
+
+
+def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:
+    """Parses an editable requirement into:
+        - a requirement name
+        - an URL
+        - extras
+        - editable options
+    Accepted requirements:
+        svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
+        .[some_extra]
+    """
+
+    url = editable_req
+
+    # If a file path is specified with extras, strip off the extras.
+    url_no_extras, extras = _strip_extras(url)
+
+    if os.path.isdir(url_no_extras):
+        # Treating it as code that has already been checked out
+        url_no_extras = path_to_url(url_no_extras)
+
+    if url_no_extras.lower().startswith("file:"):
+        package_name = Link(url_no_extras).egg_fragment
+        if extras:
+            return (
+                package_name,
+                url_no_extras,
+                get_requirement("placeholder" + extras.lower()).extras,
+            )
+        else:
+            return package_name, url_no_extras, set()
+
+    for version_control in vcs:
+        if url.lower().startswith(f"{version_control}:"):
+            url = f"{version_control}+{url}"
+            break
+
+    link = Link(url)
+
+    if not link.is_vcs:
+        backends = ", ".join(vcs.all_schemes)
+        raise InstallationError(
+            f"{editable_req} is not a valid editable requirement. "
+            f"It should either be a path to a local project or a VCS URL "
+            f"(beginning with {backends})."
+        )
+
+    package_name = link.egg_fragment
+    if not package_name:
+        raise InstallationError(
+            "Could not detect requirement name for '{}', please specify one "
+            "with #egg=your_package_name".format(editable_req)
+        )
+    return package_name, url, set()
+
+
+def check_first_requirement_in_file(filename: str) -> None:
+    """Check if file is parsable as a requirements file.
+
+    This is heavily based on ``pkg_resources.parse_requirements``, but
+    simplified to just check the first meaningful line.
+
+    :raises InvalidRequirement: If the first meaningful line cannot be parsed
+        as an requirement.
+    """
+    with open(filename, encoding="utf-8", errors="ignore") as f:
+        # Create a steppable iterator, so we can handle \-continuations.
+        lines = (
+            line
+            for line in (line.strip() for line in f)
+            if line and not line.startswith("#")  # Skip blank lines/comments.
+        )
+
+        for line in lines:
+            # Drop comments -- a hash without a space may be in a URL.
+            if " #" in line:
+                line = line[: line.find(" #")]
+            # If there is a line continuation, drop it, and append the next line.
+            if line.endswith("\\"):
+                line = line[:-2].strip() + next(lines, "")
+            Requirement(line)
+            return
+
+
+def deduce_helpful_msg(req: str) -> str:
+    """Returns helpful msg in case requirements file does not exist,
+    or cannot be parsed.
+
+    :params req: Requirements file path
+    """
+    if not os.path.exists(req):
+        return f" File '{req}' does not exist."
+    msg = " The path does exist. "
+    # Try to parse and check if it is a requirements file.
+    try:
+        check_first_requirement_in_file(req)
+    except InvalidRequirement:
+        logger.debug("Cannot parse '%s' as requirements file", req)
+    else:
+        msg += (
+            f"The argument you provided "
+            f"({req}) appears to be a"
+            f" requirements file. If that is the"
+            f" case, use the '-r' flag to install"
+            f" the packages specified within it."
+        )
+    return msg
+
+
+class RequirementParts:
+    def __init__(
+        self,
+        requirement: Optional[Requirement],
+        link: Optional[Link],
+        markers: Optional[Marker],
+        extras: Set[str],
+    ):
+        self.requirement = requirement
+        self.link = link
+        self.markers = markers
+        self.extras = extras
+
+
+def parse_req_from_editable(editable_req: str) -> RequirementParts:
+    name, url, extras_override = parse_editable(editable_req)
+
+    if name is not None:
+        try:
+            req: Optional[Requirement] = Requirement(name)
+        except InvalidRequirement:
+            raise InstallationError(f"Invalid requirement: '{name}'")
+    else:
+        req = None
+
+    link = Link(url)
+
+    return RequirementParts(req, link, None, extras_override)
+
+
+# ---- The actual constructors follow ----
+
+
+def install_req_from_editable(
+    editable_req: str,
+    comes_from: Optional[Union[InstallRequirement, str]] = None,
+    *,
+    use_pep517: Optional[bool] = None,
+    isolated: bool = False,
+    global_options: Optional[List[str]] = None,
+    hash_options: Optional[Dict[str, List[str]]] = None,
+    constraint: bool = False,
+    user_supplied: bool = False,
+    permit_editable_wheels: bool = False,
+    config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
+) -> InstallRequirement:
+    parts = parse_req_from_editable(editable_req)
+
+    return InstallRequirement(
+        parts.requirement,
+        comes_from=comes_from,
+        user_supplied=user_supplied,
+        editable=True,
+        permit_editable_wheels=permit_editable_wheels,
+        link=parts.link,
+        constraint=constraint,
+        use_pep517=use_pep517,
+        isolated=isolated,
+        global_options=global_options,
+        hash_options=hash_options,
+        config_settings=config_settings,
+        extras=parts.extras,
+    )
+
+
+def _looks_like_path(name: str) -> bool:
+    """Checks whether the string "looks like" a path on the filesystem.
+
+    This does not check whether the target actually exists, only judge from the
+    appearance.
+
+    Returns true if any of the following conditions is true:
+    * a path separator is found (either os.path.sep or os.path.altsep);
+    * a dot is found (which represents the current directory).
+    """
+    if os.path.sep in name:
+        return True
+    if os.path.altsep is not None and os.path.altsep in name:
+        return True
+    if name.startswith("."):
+        return True
+    return False
+
+
+def _get_url_from_path(path: str, name: str) -> Optional[str]:
+    """
+    First, it checks whether a provided path is an installable directory. If it
+    is, returns the path.
+
+    If false, check if the path is an archive file (such as a .whl).
+    The function checks if the path is a file. If false, if the path has
+    an @, it will treat it as a PEP 440 URL requirement and return the path.
+    """
+    if _looks_like_path(name) and os.path.isdir(path):
+        if is_installable_dir(path):
+            return path_to_url(path)
+        # TODO: The is_installable_dir test here might not be necessary
+        #       now that it is done in load_pyproject_toml too.
+        raise InstallationError(
+            f"Directory {name!r} is not installable. Neither 'setup.py' "
+            "nor 'pyproject.toml' found."
+        )
+    if not is_archive_file(path):
+        return None
+    if os.path.isfile(path):
+        return path_to_url(path)
+    urlreq_parts = name.split("@", 1)
+    if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]):
+        # If the path contains '@' and the part before it does not look
+        # like a path, try to treat it as a PEP 440 URL req instead.
+        return None
+    logger.warning(
+        "Requirement %r looks like a filename, but the file does not exist",
+        name,
+    )
+    return path_to_url(path)
+
+
+def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementParts:
+    if is_url(name):
+        marker_sep = "; "
+    else:
+        marker_sep = ";"
+    if marker_sep in name:
+        name, markers_as_string = name.split(marker_sep, 1)
+        markers_as_string = markers_as_string.strip()
+        if not markers_as_string:
+            markers = None
+        else:
+            markers = Marker(markers_as_string)
+    else:
+        markers = None
+    name = name.strip()
+    req_as_string = None
+    path = os.path.normpath(os.path.abspath(name))
+    link = None
+    extras_as_string = None
+
+    if is_url(name):
+        link = Link(name)
+    else:
+        p, extras_as_string = _strip_extras(path)
+        url = _get_url_from_path(p, name)
+        if url is not None:
+            link = Link(url)
+
+    # it's a local file, dir, or url
+    if link:
+        # Handle relative file URLs
+        if link.scheme == "file" and re.search(r"\.\./", link.url):
+            link = Link(path_to_url(os.path.normpath(os.path.abspath(link.path))))
+        # wheel file
+        if link.is_wheel:
+            wheel = Wheel(link.filename)  # can raise InvalidWheelFilename
+            req_as_string = f"{wheel.name}=={wheel.version}"
+        else:
+            # set the req to the egg fragment.  when it's not there, this
+            # will become an 'unnamed' requirement
+            req_as_string = link.egg_fragment
+
+    # a requirement specifier
+    else:
+        req_as_string = name
+
+    extras = convert_extras(extras_as_string)
+
+    def with_source(text: str) -> str:
+        if not line_source:
+            return text
+        return f"{text} (from {line_source})"
+
+    def _parse_req_string(req_as_string: str) -> Requirement:
+        try:
+            req = get_requirement(req_as_string)
+        except InvalidRequirement:
+            if os.path.sep in req_as_string:
+                add_msg = "It looks like a path."
+                add_msg += deduce_helpful_msg(req_as_string)
+            elif "=" in req_as_string and not any(
+                op in req_as_string for op in operators
+            ):
+                add_msg = "= is not a valid operator. Did you mean == ?"
+            else:
+                add_msg = ""
+            msg = with_source(f"Invalid requirement: {req_as_string!r}")
+            if add_msg:
+                msg += f"\nHint: {add_msg}"
+            raise InstallationError(msg)
+        else:
+            # Deprecate extras after specifiers: "name>=1.0[extras]"
+            # This currently works by accident because _strip_extras() parses
+            # any extras in the end of the string and those are saved in
+            # RequirementParts
+            for spec in req.specifier:
+                spec_str = str(spec)
+                if spec_str.endswith("]"):
+                    msg = f"Extras after version '{spec_str}'."
+                    raise InstallationError(msg)
+        return req
+
+    if req_as_string is not None:
+        req: Optional[Requirement] = _parse_req_string(req_as_string)
+    else:
+        req = None
+
+    return RequirementParts(req, link, markers, extras)
+
+
+def install_req_from_line(
+    name: str,
+    comes_from: Optional[Union[str, InstallRequirement]] = None,
+    *,
+    use_pep517: Optional[bool] = None,
+    isolated: bool = False,
+    global_options: Optional[List[str]] = None,
+    hash_options: Optional[Dict[str, List[str]]] = None,
+    constraint: bool = False,
+    line_source: Optional[str] = None,
+    user_supplied: bool = False,
+    config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
+) -> InstallRequirement:
+    """Creates an InstallRequirement from a name, which might be a
+    requirement, directory containing 'setup.py', filename, or URL.
+
+    :param line_source: An optional string describing where the line is from,
+        for logging purposes in case of an error.
+    """
+    parts = parse_req_from_line(name, line_source)
+
+    return InstallRequirement(
+        parts.requirement,
+        comes_from,
+        link=parts.link,
+        markers=parts.markers,
+        use_pep517=use_pep517,
+        isolated=isolated,
+        global_options=global_options,
+        hash_options=hash_options,
+        config_settings=config_settings,
+        constraint=constraint,
+        extras=parts.extras,
+        user_supplied=user_supplied,
+    )
+
+
+def install_req_from_req_string(
+    req_string: str,
+    comes_from: Optional[InstallRequirement] = None,
+    isolated: bool = False,
+    use_pep517: Optional[bool] = None,
+    user_supplied: bool = False,
+) -> InstallRequirement:
+    try:
+        req = get_requirement(req_string)
+    except InvalidRequirement:
+        raise InstallationError(f"Invalid requirement: '{req_string}'")
+
+    domains_not_allowed = [
+        PyPI.file_storage_domain,
+        TestPyPI.file_storage_domain,
+    ]
+    if (
+        req.url
+        and comes_from
+        and comes_from.link
+        and comes_from.link.netloc in domains_not_allowed
+    ):
+        # Explicitly disallow pypi packages that depend on external urls
+        raise InstallationError(
+            "Packages installed from PyPI cannot depend on packages "
+            "which are not also hosted on PyPI.\n"
+            f"{comes_from.name} depends on {req} "
+        )
+
+    return InstallRequirement(
+        req,
+        comes_from,
+        isolated=isolated,
+        use_pep517=use_pep517,
+        user_supplied=user_supplied,
+    )
+
+
+def install_req_from_parsed_requirement(
+    parsed_req: ParsedRequirement,
+    isolated: bool = False,
+    use_pep517: Optional[bool] = None,
+    user_supplied: bool = False,
+    config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
+) -> InstallRequirement:
+    if parsed_req.is_editable:
+        req = install_req_from_editable(
+            parsed_req.requirement,
+            comes_from=parsed_req.comes_from,
+            use_pep517=use_pep517,
+            constraint=parsed_req.constraint,
+            isolated=isolated,
+            user_supplied=user_supplied,
+            config_settings=config_settings,
+        )
+
+    else:
+        req = install_req_from_line(
+            parsed_req.requirement,
+            comes_from=parsed_req.comes_from,
+            use_pep517=use_pep517,
+            isolated=isolated,
+            global_options=(
+                parsed_req.options.get("global_options", [])
+                if parsed_req.options
+                else []
+            ),
+            hash_options=(
+                parsed_req.options.get("hashes", {}) if parsed_req.options else {}
+            ),
+            constraint=parsed_req.constraint,
+            line_source=parsed_req.line_source,
+            user_supplied=user_supplied,
+            config_settings=config_settings,
+        )
+    return req
+
+
+def install_req_from_link_and_ireq(
+    link: Link, ireq: InstallRequirement
+) -> InstallRequirement:
+    return InstallRequirement(
+        req=ireq.req,
+        comes_from=ireq.comes_from,
+        editable=ireq.editable,
+        link=link,
+        markers=ireq.markers,
+        use_pep517=ireq.use_pep517,
+        isolated=ireq.isolated,
+        global_options=ireq.global_options,
+        hash_options=ireq.hash_options,
+        config_settings=ireq.config_settings,
+        user_supplied=ireq.user_supplied,
+    )
+
+
+def install_req_drop_extras(ireq: InstallRequirement) -> InstallRequirement:
+    """
+    Creates a new InstallationRequirement using the given template but without
+    any extras. Sets the original requirement as the new one's parent
+    (comes_from).
+    """
+    return InstallRequirement(
+        req=(
+            _set_requirement_extras(ireq.req, set()) if ireq.req is not None else None
+        ),
+        comes_from=ireq,
+        editable=ireq.editable,
+        link=ireq.link,
+        markers=ireq.markers,
+        use_pep517=ireq.use_pep517,
+        isolated=ireq.isolated,
+        global_options=ireq.global_options,
+        hash_options=ireq.hash_options,
+        constraint=ireq.constraint,
+        extras=[],
+        config_settings=ireq.config_settings,
+        user_supplied=ireq.user_supplied,
+        permit_editable_wheels=ireq.permit_editable_wheels,
+    )
+
+
+def install_req_extend_extras(
+    ireq: InstallRequirement,
+    extras: Collection[str],
+) -> InstallRequirement:
+    """
+    Returns a copy of an installation requirement with some additional extras.
+    Makes a shallow copy of the ireq object.
+    """
+    result = copy.copy(ireq)
+    result.extras = {*ireq.extras, *extras}
+    result.req = (
+        _set_requirement_extras(ireq.req, result.extras)
+        if ireq.req is not None
+        else None
+    )
+    return result
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/req/req_file.py b/.venv/lib/python3.12/site-packages/pip/_internal/req/req_file.py
new file mode 100644
index 0000000..1ef3d5e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/req/req_file.py
@@ -0,0 +1,554 @@
+"""
+Requirements file parsing
+"""
+
+import logging
+import optparse
+import os
+import re
+import shlex
+import urllib.parse
+from optparse import Values
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    Dict,
+    Generator,
+    Iterable,
+    List,
+    Optional,
+    Tuple,
+)
+
+from pip._internal.cli import cmdoptions
+from pip._internal.exceptions import InstallationError, RequirementsFileParseError
+from pip._internal.models.search_scope import SearchScope
+from pip._internal.network.session import PipSession
+from pip._internal.network.utils import raise_for_status
+from pip._internal.utils.encoding import auto_decode
+from pip._internal.utils.urls import get_url_scheme
+
+if TYPE_CHECKING:
+    # NoReturn introduced in 3.6.2; imported only for type checking to maintain
+    # pip compatibility with older patch versions of Python 3.6
+    from typing import NoReturn
+
+    from pip._internal.index.package_finder import PackageFinder
+
+__all__ = ["parse_requirements"]
+
+ReqFileLines = Iterable[Tuple[int, str]]
+
+LineParser = Callable[[str], Tuple[str, Values]]
+
+SCHEME_RE = re.compile(r"^(http|https|file):", re.I)
+COMMENT_RE = re.compile(r"(^|\s+)#.*$")
+
+# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
+# variable name consisting of only uppercase letters, digits or the '_'
+# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
+# 2013 Edition.
+ENV_VAR_RE = re.compile(r"(?P\$\{(?P[A-Z0-9_]+)\})")
+
+SUPPORTED_OPTIONS: List[Callable[..., optparse.Option]] = [
+    cmdoptions.index_url,
+    cmdoptions.extra_index_url,
+    cmdoptions.no_index,
+    cmdoptions.constraints,
+    cmdoptions.requirements,
+    cmdoptions.editable,
+    cmdoptions.find_links,
+    cmdoptions.no_binary,
+    cmdoptions.only_binary,
+    cmdoptions.prefer_binary,
+    cmdoptions.require_hashes,
+    cmdoptions.pre,
+    cmdoptions.trusted_host,
+    cmdoptions.use_new_feature,
+]
+
+# options to be passed to requirements
+SUPPORTED_OPTIONS_REQ: List[Callable[..., optparse.Option]] = [
+    cmdoptions.global_options,
+    cmdoptions.hash,
+    cmdoptions.config_settings,
+]
+
+SUPPORTED_OPTIONS_EDITABLE_REQ: List[Callable[..., optparse.Option]] = [
+    cmdoptions.config_settings,
+]
+
+
+# the 'dest' string values
+SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
+SUPPORTED_OPTIONS_EDITABLE_REQ_DEST = [
+    str(o().dest) for o in SUPPORTED_OPTIONS_EDITABLE_REQ
+]
+
+logger = logging.getLogger(__name__)
+
+
+class ParsedRequirement:
+    def __init__(
+        self,
+        requirement: str,
+        is_editable: bool,
+        comes_from: str,
+        constraint: bool,
+        options: Optional[Dict[str, Any]] = None,
+        line_source: Optional[str] = None,
+    ) -> None:
+        self.requirement = requirement
+        self.is_editable = is_editable
+        self.comes_from = comes_from
+        self.options = options
+        self.constraint = constraint
+        self.line_source = line_source
+
+
+class ParsedLine:
+    def __init__(
+        self,
+        filename: str,
+        lineno: int,
+        args: str,
+        opts: Values,
+        constraint: bool,
+    ) -> None:
+        self.filename = filename
+        self.lineno = lineno
+        self.opts = opts
+        self.constraint = constraint
+
+        if args:
+            self.is_requirement = True
+            self.is_editable = False
+            self.requirement = args
+        elif opts.editables:
+            self.is_requirement = True
+            self.is_editable = True
+            # We don't support multiple -e on one line
+            self.requirement = opts.editables[0]
+        else:
+            self.is_requirement = False
+
+
+def parse_requirements(
+    filename: str,
+    session: PipSession,
+    finder: Optional["PackageFinder"] = None,
+    options: Optional[optparse.Values] = None,
+    constraint: bool = False,
+) -> Generator[ParsedRequirement, None, None]:
+    """Parse a requirements file and yield ParsedRequirement instances.
+
+    :param filename:    Path or url of requirements file.
+    :param session:     PipSession instance.
+    :param finder:      Instance of pip.index.PackageFinder.
+    :param options:     cli options.
+    :param constraint:  If true, parsing a constraint file rather than
+        requirements file.
+    """
+    line_parser = get_line_parser(finder)
+    parser = RequirementsFileParser(session, line_parser)
+
+    for parsed_line in parser.parse(filename, constraint):
+        parsed_req = handle_line(
+            parsed_line, options=options, finder=finder, session=session
+        )
+        if parsed_req is not None:
+            yield parsed_req
+
+
+def preprocess(content: str) -> ReqFileLines:
+    """Split, filter, and join lines, and return a line iterator
+
+    :param content: the content of the requirements file
+    """
+    lines_enum: ReqFileLines = enumerate(content.splitlines(), start=1)
+    lines_enum = join_lines(lines_enum)
+    lines_enum = ignore_comments(lines_enum)
+    lines_enum = expand_env_variables(lines_enum)
+    return lines_enum
+
+
+def handle_requirement_line(
+    line: ParsedLine,
+    options: Optional[optparse.Values] = None,
+) -> ParsedRequirement:
+    # preserve for the nested code path
+    line_comes_from = "{} {} (line {})".format(
+        "-c" if line.constraint else "-r",
+        line.filename,
+        line.lineno,
+    )
+
+    assert line.is_requirement
+
+    # get the options that apply to requirements
+    if line.is_editable:
+        supported_dest = SUPPORTED_OPTIONS_EDITABLE_REQ_DEST
+    else:
+        supported_dest = SUPPORTED_OPTIONS_REQ_DEST
+    req_options = {}
+    for dest in supported_dest:
+        if dest in line.opts.__dict__ and line.opts.__dict__[dest]:
+            req_options[dest] = line.opts.__dict__[dest]
+
+    line_source = f"line {line.lineno} of {line.filename}"
+    return ParsedRequirement(
+        requirement=line.requirement,
+        is_editable=line.is_editable,
+        comes_from=line_comes_from,
+        constraint=line.constraint,
+        options=req_options,
+        line_source=line_source,
+    )
+
+
+def handle_option_line(
+    opts: Values,
+    filename: str,
+    lineno: int,
+    finder: Optional["PackageFinder"] = None,
+    options: Optional[optparse.Values] = None,
+    session: Optional[PipSession] = None,
+) -> None:
+    if opts.hashes:
+        logger.warning(
+            "%s line %s has --hash but no requirement, and will be ignored.",
+            filename,
+            lineno,
+        )
+
+    if options:
+        # percolate options upward
+        if opts.require_hashes:
+            options.require_hashes = opts.require_hashes
+        if opts.features_enabled:
+            options.features_enabled.extend(
+                f for f in opts.features_enabled if f not in options.features_enabled
+            )
+
+    # set finder options
+    if finder:
+        find_links = finder.find_links
+        index_urls = finder.index_urls
+        no_index = finder.search_scope.no_index
+        if opts.no_index is True:
+            no_index = True
+            index_urls = []
+        if opts.index_url and not no_index:
+            index_urls = [opts.index_url]
+        if opts.extra_index_urls and not no_index:
+            index_urls.extend(opts.extra_index_urls)
+        if opts.find_links:
+            # FIXME: it would be nice to keep track of the source
+            # of the find_links: support a find-links local path
+            # relative to a requirements file.
+            value = opts.find_links[0]
+            req_dir = os.path.dirname(os.path.abspath(filename))
+            relative_to_reqs_file = os.path.join(req_dir, value)
+            if os.path.exists(relative_to_reqs_file):
+                value = relative_to_reqs_file
+            find_links.append(value)
+
+        if session:
+            # We need to update the auth urls in session
+            session.update_index_urls(index_urls)
+
+        search_scope = SearchScope(
+            find_links=find_links,
+            index_urls=index_urls,
+            no_index=no_index,
+        )
+        finder.search_scope = search_scope
+
+        if opts.pre:
+            finder.set_allow_all_prereleases()
+
+        if opts.prefer_binary:
+            finder.set_prefer_binary()
+
+        if session:
+            for host in opts.trusted_hosts or []:
+                source = f"line {lineno} of {filename}"
+                session.add_trusted_host(host, source=source)
+
+
+def handle_line(
+    line: ParsedLine,
+    options: Optional[optparse.Values] = None,
+    finder: Optional["PackageFinder"] = None,
+    session: Optional[PipSession] = None,
+) -> Optional[ParsedRequirement]:
+    """Handle a single parsed requirements line; This can result in
+    creating/yielding requirements, or updating the finder.
+
+    :param line:        The parsed line to be processed.
+    :param options:     CLI options.
+    :param finder:      The finder - updated by non-requirement lines.
+    :param session:     The session - updated by non-requirement lines.
+
+    Returns a ParsedRequirement object if the line is a requirement line,
+    otherwise returns None.
+
+    For lines that contain requirements, the only options that have an effect
+    are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
+    requirement. Other options from SUPPORTED_OPTIONS may be present, but are
+    ignored.
+
+    For lines that do not contain requirements, the only options that have an
+    effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
+    be present, but are ignored. These lines may contain multiple options
+    (although our docs imply only one is supported), and all our parsed and
+    affect the finder.
+    """
+
+    if line.is_requirement:
+        parsed_req = handle_requirement_line(line, options)
+        return parsed_req
+    else:
+        handle_option_line(
+            line.opts,
+            line.filename,
+            line.lineno,
+            finder,
+            options,
+            session,
+        )
+        return None
+
+
+class RequirementsFileParser:
+    def __init__(
+        self,
+        session: PipSession,
+        line_parser: LineParser,
+    ) -> None:
+        self._session = session
+        self._line_parser = line_parser
+
+    def parse(
+        self, filename: str, constraint: bool
+    ) -> Generator[ParsedLine, None, None]:
+        """Parse a given file, yielding parsed lines."""
+        yield from self._parse_and_recurse(filename, constraint)
+
+    def _parse_and_recurse(
+        self, filename: str, constraint: bool
+    ) -> Generator[ParsedLine, None, None]:
+        for line in self._parse_file(filename, constraint):
+            if not line.is_requirement and (
+                line.opts.requirements or line.opts.constraints
+            ):
+                # parse a nested requirements file
+                if line.opts.requirements:
+                    req_path = line.opts.requirements[0]
+                    nested_constraint = False
+                else:
+                    req_path = line.opts.constraints[0]
+                    nested_constraint = True
+
+                # original file is over http
+                if SCHEME_RE.search(filename):
+                    # do a url join so relative paths work
+                    req_path = urllib.parse.urljoin(filename, req_path)
+                # original file and nested file are paths
+                elif not SCHEME_RE.search(req_path):
+                    # do a join so relative paths work
+                    req_path = os.path.join(
+                        os.path.dirname(filename),
+                        req_path,
+                    )
+
+                yield from self._parse_and_recurse(req_path, nested_constraint)
+            else:
+                yield line
+
+    def _parse_file(
+        self, filename: str, constraint: bool
+    ) -> Generator[ParsedLine, None, None]:
+        _, content = get_file_content(filename, self._session)
+
+        lines_enum = preprocess(content)
+
+        for line_number, line in lines_enum:
+            try:
+                args_str, opts = self._line_parser(line)
+            except OptionParsingError as e:
+                # add offending line
+                msg = f"Invalid requirement: {line}\n{e.msg}"
+                raise RequirementsFileParseError(msg)
+
+            yield ParsedLine(
+                filename,
+                line_number,
+                args_str,
+                opts,
+                constraint,
+            )
+
+
+def get_line_parser(finder: Optional["PackageFinder"]) -> LineParser:
+    def parse_line(line: str) -> Tuple[str, Values]:
+        # Build new parser for each line since it accumulates appendable
+        # options.
+        parser = build_parser()
+        defaults = parser.get_default_values()
+        defaults.index_url = None
+        if finder:
+            defaults.format_control = finder.format_control
+
+        args_str, options_str = break_args_options(line)
+
+        try:
+            options = shlex.split(options_str)
+        except ValueError as e:
+            raise OptionParsingError(f"Could not split options: {options_str}") from e
+
+        opts, _ = parser.parse_args(options, defaults)
+
+        return args_str, opts
+
+    return parse_line
+
+
+def break_args_options(line: str) -> Tuple[str, str]:
+    """Break up the line into an args and options string.  We only want to shlex
+    (and then optparse) the options, not the args.  args can contain markers
+    which are corrupted by shlex.
+    """
+    tokens = line.split(" ")
+    args = []
+    options = tokens[:]
+    for token in tokens:
+        if token.startswith("-") or token.startswith("--"):
+            break
+        else:
+            args.append(token)
+            options.pop(0)
+    return " ".join(args), " ".join(options)
+
+
+class OptionParsingError(Exception):
+    def __init__(self, msg: str) -> None:
+        self.msg = msg
+
+
+def build_parser() -> optparse.OptionParser:
+    """
+    Return a parser for parsing requirement lines
+    """
+    parser = optparse.OptionParser(add_help_option=False)
+
+    option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
+    for option_factory in option_factories:
+        option = option_factory()
+        parser.add_option(option)
+
+    # By default optparse sys.exits on parsing errors. We want to wrap
+    # that in our own exception.
+    def parser_exit(self: Any, msg: str) -> "NoReturn":
+        raise OptionParsingError(msg)
+
+    # NOTE: mypy disallows assigning to a method
+    #       https://github.com/python/mypy/issues/2427
+    parser.exit = parser_exit  # type: ignore
+
+    return parser
+
+
+def join_lines(lines_enum: ReqFileLines) -> ReqFileLines:
+    """Joins a line ending in '\' with the previous line (except when following
+    comments).  The joined line takes on the index of the first line.
+    """
+    primary_line_number = None
+    new_line: List[str] = []
+    for line_number, line in lines_enum:
+        if not line.endswith("\\") or COMMENT_RE.match(line):
+            if COMMENT_RE.match(line):
+                # this ensures comments are always matched later
+                line = " " + line
+            if new_line:
+                new_line.append(line)
+                assert primary_line_number is not None
+                yield primary_line_number, "".join(new_line)
+                new_line = []
+            else:
+                yield line_number, line
+        else:
+            if not new_line:
+                primary_line_number = line_number
+            new_line.append(line.strip("\\"))
+
+    # last line contains \
+    if new_line:
+        assert primary_line_number is not None
+        yield primary_line_number, "".join(new_line)
+
+    # TODO: handle space after '\'.
+
+
+def ignore_comments(lines_enum: ReqFileLines) -> ReqFileLines:
+    """
+    Strips comments and filter empty lines.
+    """
+    for line_number, line in lines_enum:
+        line = COMMENT_RE.sub("", line)
+        line = line.strip()
+        if line:
+            yield line_number, line
+
+
+def expand_env_variables(lines_enum: ReqFileLines) -> ReqFileLines:
+    """Replace all environment variables that can be retrieved via `os.getenv`.
+
+    The only allowed format for environment variables defined in the
+    requirement file is `${MY_VARIABLE_1}` to ensure two things:
+
+    1. Strings that contain a `$` aren't accidentally (partially) expanded.
+    2. Ensure consistency across platforms for requirement files.
+
+    These points are the result of a discussion on the `github pull
+    request #3514 `_.
+
+    Valid characters in variable names follow the `POSIX standard
+    `_ and are limited
+    to uppercase letter, digits and the `_` (underscore).
+    """
+    for line_number, line in lines_enum:
+        for env_var, var_name in ENV_VAR_RE.findall(line):
+            value = os.getenv(var_name)
+            if not value:
+                continue
+
+            line = line.replace(env_var, value)
+
+        yield line_number, line
+
+
+def get_file_content(url: str, session: PipSession) -> Tuple[str, str]:
+    """Gets the content of a file; it may be a filename, file: URL, or
+    http: URL.  Returns (location, content).  Content is unicode.
+    Respects # -*- coding: declarations on the retrieved files.
+
+    :param url:         File path or url.
+    :param session:     PipSession instance.
+    """
+    scheme = get_url_scheme(url)
+
+    # Pip has special support for file:// URLs (LocalFSAdapter).
+    if scheme in ["http", "https", "file"]:
+        resp = session.get(url)
+        raise_for_status(resp)
+        return resp.url, resp.text
+
+    # Assume this is a bare path.
+    try:
+        with open(url, "rb") as f:
+            content = auto_decode(f.read())
+    except OSError as exc:
+        raise InstallationError(f"Could not open requirements file: {exc}")
+    return url, content
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/req/req_install.py b/.venv/lib/python3.12/site-packages/pip/_internal/req/req_install.py
new file mode 100644
index 0000000..a65611c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/req/req_install.py
@@ -0,0 +1,923 @@
+import functools
+import logging
+import os
+import shutil
+import sys
+import uuid
+import zipfile
+from optparse import Values
+from pathlib import Path
+from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union
+
+from pip._vendor.packaging.markers import Marker
+from pip._vendor.packaging.requirements import Requirement
+from pip._vendor.packaging.specifiers import SpecifierSet
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.packaging.version import Version
+from pip._vendor.packaging.version import parse as parse_version
+from pip._vendor.pyproject_hooks import BuildBackendHookCaller
+
+from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
+from pip._internal.exceptions import InstallationError, PreviousBuildDirError
+from pip._internal.locations import get_scheme
+from pip._internal.metadata import (
+    BaseDistribution,
+    get_default_environment,
+    get_directory_distribution,
+    get_wheel_distribution,
+)
+from pip._internal.metadata.base import FilesystemWheel
+from pip._internal.models.direct_url import DirectUrl
+from pip._internal.models.link import Link
+from pip._internal.operations.build.metadata import generate_metadata
+from pip._internal.operations.build.metadata_editable import generate_editable_metadata
+from pip._internal.operations.build.metadata_legacy import (
+    generate_metadata as generate_metadata_legacy,
+)
+from pip._internal.operations.install.editable_legacy import (
+    install_editable as install_editable_legacy,
+)
+from pip._internal.operations.install.wheel import install_wheel
+from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
+from pip._internal.req.req_uninstall import UninstallPathSet
+from pip._internal.utils.deprecation import deprecated
+from pip._internal.utils.hashes import Hashes
+from pip._internal.utils.misc import (
+    ConfiguredBuildBackendHookCaller,
+    ask_path_exists,
+    backup_dir,
+    display_path,
+    hide_url,
+    is_installable_dir,
+    redact_auth_from_requirement,
+    redact_auth_from_url,
+)
+from pip._internal.utils.packaging import safe_extra
+from pip._internal.utils.subprocess import runner_with_spinner_message
+from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
+from pip._internal.utils.unpacking import unpack_file
+from pip._internal.utils.virtualenv import running_under_virtualenv
+from pip._internal.vcs import vcs
+
+logger = logging.getLogger(__name__)
+
+
+class InstallRequirement:
+    """
+    Represents something that may be installed later on, may have information
+    about where to fetch the relevant requirement and also contains logic for
+    installing the said requirement.
+    """
+
+    def __init__(
+        self,
+        req: Optional[Requirement],
+        comes_from: Optional[Union[str, "InstallRequirement"]],
+        editable: bool = False,
+        link: Optional[Link] = None,
+        markers: Optional[Marker] = None,
+        use_pep517: Optional[bool] = None,
+        isolated: bool = False,
+        *,
+        global_options: Optional[List[str]] = None,
+        hash_options: Optional[Dict[str, List[str]]] = None,
+        config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
+        constraint: bool = False,
+        extras: Collection[str] = (),
+        user_supplied: bool = False,
+        permit_editable_wheels: bool = False,
+    ) -> None:
+        assert req is None or isinstance(req, Requirement), req
+        self.req = req
+        self.comes_from = comes_from
+        self.constraint = constraint
+        self.editable = editable
+        self.permit_editable_wheels = permit_editable_wheels
+
+        # source_dir is the local directory where the linked requirement is
+        # located, or unpacked. In case unpacking is needed, creating and
+        # populating source_dir is done by the RequirementPreparer. Note this
+        # is not necessarily the directory where pyproject.toml or setup.py is
+        # located - that one is obtained via unpacked_source_directory.
+        self.source_dir: Optional[str] = None
+        if self.editable:
+            assert link
+            if link.is_file:
+                self.source_dir = os.path.normpath(os.path.abspath(link.file_path))
+
+        # original_link is the direct URL that was provided by the user for the
+        # requirement, either directly or via a constraints file.
+        if link is None and req and req.url:
+            # PEP 508 URL requirement
+            link = Link(req.url)
+        self.link = self.original_link = link
+
+        # When this InstallRequirement is a wheel obtained from the cache of locally
+        # built wheels, this is the source link corresponding to the cache entry, which
+        # was used to download and build the cached wheel.
+        self.cached_wheel_source_link: Optional[Link] = None
+
+        # Information about the location of the artifact that was downloaded . This
+        # property is guaranteed to be set in resolver results.
+        self.download_info: Optional[DirectUrl] = None
+
+        # Path to any downloaded or already-existing package.
+        self.local_file_path: Optional[str] = None
+        if self.link and self.link.is_file:
+            self.local_file_path = self.link.file_path
+
+        if extras:
+            self.extras = extras
+        elif req:
+            self.extras = req.extras
+        else:
+            self.extras = set()
+        if markers is None and req:
+            markers = req.marker
+        self.markers = markers
+
+        # This holds the Distribution object if this requirement is already installed.
+        self.satisfied_by: Optional[BaseDistribution] = None
+        # Whether the installation process should try to uninstall an existing
+        # distribution before installing this requirement.
+        self.should_reinstall = False
+        # Temporary build location
+        self._temp_build_dir: Optional[TempDirectory] = None
+        # Set to True after successful installation
+        self.install_succeeded: Optional[bool] = None
+        # Supplied options
+        self.global_options = global_options if global_options else []
+        self.hash_options = hash_options if hash_options else {}
+        self.config_settings = config_settings
+        # Set to True after successful preparation of this requirement
+        self.prepared = False
+        # User supplied requirement are explicitly requested for installation
+        # by the user via CLI arguments or requirements files, as opposed to,
+        # e.g. dependencies, extras or constraints.
+        self.user_supplied = user_supplied
+
+        self.isolated = isolated
+        self.build_env: BuildEnvironment = NoOpBuildEnvironment()
+
+        # For PEP 517, the directory where we request the project metadata
+        # gets stored. We need this to pass to build_wheel, so the backend
+        # can ensure that the wheel matches the metadata (see the PEP for
+        # details).
+        self.metadata_directory: Optional[str] = None
+
+        # The static build requirements (from pyproject.toml)
+        self.pyproject_requires: Optional[List[str]] = None
+
+        # Build requirements that we will check are available
+        self.requirements_to_check: List[str] = []
+
+        # The PEP 517 backend we should use to build the project
+        self.pep517_backend: Optional[BuildBackendHookCaller] = None
+
+        # Are we using PEP 517 for this requirement?
+        # After pyproject.toml has been loaded, the only valid values are True
+        # and False. Before loading, None is valid (meaning "use the default").
+        # Setting an explicit value before loading pyproject.toml is supported,
+        # but after loading this flag should be treated as read only.
+        self.use_pep517 = use_pep517
+
+        # If config settings are provided, enforce PEP 517.
+        if self.config_settings:
+            if self.use_pep517 is False:
+                logger.warning(
+                    "--no-use-pep517 ignored for %s "
+                    "because --config-settings are specified.",
+                    self,
+                )
+            self.use_pep517 = True
+
+        # This requirement needs more preparation before it can be built
+        self.needs_more_preparation = False
+
+        # This requirement needs to be unpacked before it can be installed.
+        self._archive_source: Optional[Path] = None
+
+    def __str__(self) -> str:
+        if self.req:
+            s = redact_auth_from_requirement(self.req)
+            if self.link:
+                s += f" from {redact_auth_from_url(self.link.url)}"
+        elif self.link:
+            s = redact_auth_from_url(self.link.url)
+        else:
+            s = ""
+        if self.satisfied_by is not None:
+            if self.satisfied_by.location is not None:
+                location = display_path(self.satisfied_by.location)
+            else:
+                location = ""
+            s += f" in {location}"
+        if self.comes_from:
+            if isinstance(self.comes_from, str):
+                comes_from: Optional[str] = self.comes_from
+            else:
+                comes_from = self.comes_from.from_path()
+            if comes_from:
+                s += f" (from {comes_from})"
+        return s
+
+    def __repr__(self) -> str:
+        return "<{} object: {} editable={!r}>".format(
+            self.__class__.__name__, str(self), self.editable
+        )
+
+    def format_debug(self) -> str:
+        """An un-tested helper for getting state, for debugging."""
+        attributes = vars(self)
+        names = sorted(attributes)
+
+        state = (f"{attr}={attributes[attr]!r}" for attr in sorted(names))
+        return "<{name} object: {{{state}}}>".format(
+            name=self.__class__.__name__,
+            state=", ".join(state),
+        )
+
+    # Things that are valid for all kinds of requirements?
+    @property
+    def name(self) -> Optional[str]:
+        if self.req is None:
+            return None
+        return self.req.name
+
+    @functools.lru_cache()  # use cached_property in python 3.8+
+    def supports_pyproject_editable(self) -> bool:
+        if not self.use_pep517:
+            return False
+        assert self.pep517_backend
+        with self.build_env:
+            runner = runner_with_spinner_message(
+                "Checking if build backend supports build_editable"
+            )
+            with self.pep517_backend.subprocess_runner(runner):
+                return "build_editable" in self.pep517_backend._supported_features()
+
+    @property
+    def specifier(self) -> SpecifierSet:
+        assert self.req is not None
+        return self.req.specifier
+
+    @property
+    def is_direct(self) -> bool:
+        """Whether this requirement was specified as a direct URL."""
+        return self.original_link is not None
+
+    @property
+    def is_pinned(self) -> bool:
+        """Return whether I am pinned to an exact version.
+
+        For example, some-package==1.2 is pinned; some-package>1.2 is not.
+        """
+        assert self.req is not None
+        specifiers = self.req.specifier
+        return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="}
+
+    def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool:
+        if not extras_requested:
+            # Provide an extra to safely evaluate the markers
+            # without matching any extra
+            extras_requested = ("",)
+        if self.markers is not None:
+            return any(
+                self.markers.evaluate({"extra": extra})
+                # TODO: Remove these two variants when packaging is upgraded to
+                # support the marker comparison logic specified in PEP 685.
+                or self.markers.evaluate({"extra": safe_extra(extra)})
+                or self.markers.evaluate({"extra": canonicalize_name(extra)})
+                for extra in extras_requested
+            )
+        else:
+            return True
+
+    @property
+    def has_hash_options(self) -> bool:
+        """Return whether any known-good hashes are specified as options.
+
+        These activate --require-hashes mode; hashes specified as part of a
+        URL do not.
+
+        """
+        return bool(self.hash_options)
+
+    def hashes(self, trust_internet: bool = True) -> Hashes:
+        """Return a hash-comparer that considers my option- and URL-based
+        hashes to be known-good.
+
+        Hashes in URLs--ones embedded in the requirements file, not ones
+        downloaded from an index server--are almost peers with ones from
+        flags. They satisfy --require-hashes (whether it was implicitly or
+        explicitly activated) but do not activate it. md5 and sha224 are not
+        allowed in flags, which should nudge people toward good algos. We
+        always OR all hashes together, even ones from URLs.
+
+        :param trust_internet: Whether to trust URL-based (#md5=...) hashes
+            downloaded from the internet, as by populate_link()
+
+        """
+        good_hashes = self.hash_options.copy()
+        if trust_internet:
+            link = self.link
+        elif self.is_direct and self.user_supplied:
+            link = self.original_link
+        else:
+            link = None
+        if link and link.hash:
+            assert link.hash_name is not None
+            good_hashes.setdefault(link.hash_name, []).append(link.hash)
+        return Hashes(good_hashes)
+
+    def from_path(self) -> Optional[str]:
+        """Format a nice indicator to show where this "comes from" """
+        if self.req is None:
+            return None
+        s = str(self.req)
+        if self.comes_from:
+            comes_from: Optional[str]
+            if isinstance(self.comes_from, str):
+                comes_from = self.comes_from
+            else:
+                comes_from = self.comes_from.from_path()
+            if comes_from:
+                s += "->" + comes_from
+        return s
+
+    def ensure_build_location(
+        self, build_dir: str, autodelete: bool, parallel_builds: bool
+    ) -> str:
+        assert build_dir is not None
+        if self._temp_build_dir is not None:
+            assert self._temp_build_dir.path
+            return self._temp_build_dir.path
+        if self.req is None:
+            # Some systems have /tmp as a symlink which confuses custom
+            # builds (such as numpy). Thus, we ensure that the real path
+            # is returned.
+            self._temp_build_dir = TempDirectory(
+                kind=tempdir_kinds.REQ_BUILD, globally_managed=True
+            )
+
+            return self._temp_build_dir.path
+
+        # This is the only remaining place where we manually determine the path
+        # for the temporary directory. It is only needed for editables where
+        # it is the value of the --src option.
+
+        # When parallel builds are enabled, add a UUID to the build directory
+        # name so multiple builds do not interfere with each other.
+        dir_name: str = canonicalize_name(self.req.name)
+        if parallel_builds:
+            dir_name = f"{dir_name}_{uuid.uuid4().hex}"
+
+        # FIXME: Is there a better place to create the build_dir? (hg and bzr
+        # need this)
+        if not os.path.exists(build_dir):
+            logger.debug("Creating directory %s", build_dir)
+            os.makedirs(build_dir)
+        actual_build_dir = os.path.join(build_dir, dir_name)
+        # `None` indicates that we respect the globally-configured deletion
+        # settings, which is what we actually want when auto-deleting.
+        delete_arg = None if autodelete else False
+        return TempDirectory(
+            path=actual_build_dir,
+            delete=delete_arg,
+            kind=tempdir_kinds.REQ_BUILD,
+            globally_managed=True,
+        ).path
+
+    def _set_requirement(self) -> None:
+        """Set requirement after generating metadata."""
+        assert self.req is None
+        assert self.metadata is not None
+        assert self.source_dir is not None
+
+        # Construct a Requirement object from the generated metadata
+        if isinstance(parse_version(self.metadata["Version"]), Version):
+            op = "=="
+        else:
+            op = "==="
+
+        self.req = Requirement(
+            "".join(
+                [
+                    self.metadata["Name"],
+                    op,
+                    self.metadata["Version"],
+                ]
+            )
+        )
+
+    def warn_on_mismatching_name(self) -> None:
+        assert self.req is not None
+        metadata_name = canonicalize_name(self.metadata["Name"])
+        if canonicalize_name(self.req.name) == metadata_name:
+            # Everything is fine.
+            return
+
+        # If we're here, there's a mismatch. Log a warning about it.
+        logger.warning(
+            "Generating metadata for package %s "
+            "produced metadata for project name %s. Fix your "
+            "#egg=%s fragments.",
+            self.name,
+            metadata_name,
+            self.name,
+        )
+        self.req = Requirement(metadata_name)
+
+    def check_if_exists(self, use_user_site: bool) -> None:
+        """Find an installed distribution that satisfies or conflicts
+        with this requirement, and set self.satisfied_by or
+        self.should_reinstall appropriately.
+        """
+        if self.req is None:
+            return
+        existing_dist = get_default_environment().get_distribution(self.req.name)
+        if not existing_dist:
+            return
+
+        version_compatible = self.req.specifier.contains(
+            existing_dist.version,
+            prereleases=True,
+        )
+        if not version_compatible:
+            self.satisfied_by = None
+            if use_user_site:
+                if existing_dist.in_usersite:
+                    self.should_reinstall = True
+                elif running_under_virtualenv() and existing_dist.in_site_packages:
+                    raise InstallationError(
+                        f"Will not install to the user site because it will "
+                        f"lack sys.path precedence to {existing_dist.raw_name} "
+                        f"in {existing_dist.location}"
+                    )
+            else:
+                self.should_reinstall = True
+        else:
+            if self.editable:
+                self.should_reinstall = True
+                # when installing editables, nothing pre-existing should ever
+                # satisfy
+                self.satisfied_by = None
+            else:
+                self.satisfied_by = existing_dist
+
+    # Things valid for wheels
+    @property
+    def is_wheel(self) -> bool:
+        if not self.link:
+            return False
+        return self.link.is_wheel
+
+    @property
+    def is_wheel_from_cache(self) -> bool:
+        # When True, it means that this InstallRequirement is a local wheel file in the
+        # cache of locally built wheels.
+        return self.cached_wheel_source_link is not None
+
+    # Things valid for sdists
+    @property
+    def unpacked_source_directory(self) -> str:
+        assert self.source_dir, f"No source dir for {self}"
+        return os.path.join(
+            self.source_dir, self.link and self.link.subdirectory_fragment or ""
+        )
+
+    @property
+    def setup_py_path(self) -> str:
+        assert self.source_dir, f"No source dir for {self}"
+        setup_py = os.path.join(self.unpacked_source_directory, "setup.py")
+
+        return setup_py
+
+    @property
+    def setup_cfg_path(self) -> str:
+        assert self.source_dir, f"No source dir for {self}"
+        setup_cfg = os.path.join(self.unpacked_source_directory, "setup.cfg")
+
+        return setup_cfg
+
+    @property
+    def pyproject_toml_path(self) -> str:
+        assert self.source_dir, f"No source dir for {self}"
+        return make_pyproject_path(self.unpacked_source_directory)
+
+    def load_pyproject_toml(self) -> None:
+        """Load the pyproject.toml file.
+
+        After calling this routine, all of the attributes related to PEP 517
+        processing for this requirement have been set. In particular, the
+        use_pep517 attribute can be used to determine whether we should
+        follow the PEP 517 or legacy (setup.py) code path.
+        """
+        pyproject_toml_data = load_pyproject_toml(
+            self.use_pep517, self.pyproject_toml_path, self.setup_py_path, str(self)
+        )
+
+        if pyproject_toml_data is None:
+            assert not self.config_settings
+            self.use_pep517 = False
+            return
+
+        self.use_pep517 = True
+        requires, backend, check, backend_path = pyproject_toml_data
+        self.requirements_to_check = check
+        self.pyproject_requires = requires
+        self.pep517_backend = ConfiguredBuildBackendHookCaller(
+            self,
+            self.unpacked_source_directory,
+            backend,
+            backend_path=backend_path,
+        )
+
+    def isolated_editable_sanity_check(self) -> None:
+        """Check that an editable requirement if valid for use with PEP 517/518.
+
+        This verifies that an editable that has a pyproject.toml either supports PEP 660
+        or as a setup.py or a setup.cfg
+        """
+        if (
+            self.editable
+            and self.use_pep517
+            and not self.supports_pyproject_editable()
+            and not os.path.isfile(self.setup_py_path)
+            and not os.path.isfile(self.setup_cfg_path)
+        ):
+            raise InstallationError(
+                f"Project {self} has a 'pyproject.toml' and its build "
+                f"backend is missing the 'build_editable' hook. Since it does not "
+                f"have a 'setup.py' nor a 'setup.cfg', "
+                f"it cannot be installed in editable mode. "
+                f"Consider using a build backend that supports PEP 660."
+            )
+
+    def prepare_metadata(self) -> None:
+        """Ensure that project metadata is available.
+
+        Under PEP 517 and PEP 660, call the backend hook to prepare the metadata.
+        Under legacy processing, call setup.py egg-info.
+        """
+        assert self.source_dir, f"No source dir for {self}"
+        details = self.name or f"from {self.link}"
+
+        if self.use_pep517:
+            assert self.pep517_backend is not None
+            if (
+                self.editable
+                and self.permit_editable_wheels
+                and self.supports_pyproject_editable()
+            ):
+                self.metadata_directory = generate_editable_metadata(
+                    build_env=self.build_env,
+                    backend=self.pep517_backend,
+                    details=details,
+                )
+            else:
+                self.metadata_directory = generate_metadata(
+                    build_env=self.build_env,
+                    backend=self.pep517_backend,
+                    details=details,
+                )
+        else:
+            self.metadata_directory = generate_metadata_legacy(
+                build_env=self.build_env,
+                setup_py_path=self.setup_py_path,
+                source_dir=self.unpacked_source_directory,
+                isolated=self.isolated,
+                details=details,
+            )
+
+        # Act on the newly generated metadata, based on the name and version.
+        if not self.name:
+            self._set_requirement()
+        else:
+            self.warn_on_mismatching_name()
+
+        self.assert_source_matches_version()
+
+    @property
+    def metadata(self) -> Any:
+        if not hasattr(self, "_metadata"):
+            self._metadata = self.get_dist().metadata
+
+        return self._metadata
+
+    def get_dist(self) -> BaseDistribution:
+        if self.metadata_directory:
+            return get_directory_distribution(self.metadata_directory)
+        elif self.local_file_path and self.is_wheel:
+            assert self.req is not None
+            return get_wheel_distribution(
+                FilesystemWheel(self.local_file_path),
+                canonicalize_name(self.req.name),
+            )
+        raise AssertionError(
+            f"InstallRequirement {self} has no metadata directory and no wheel: "
+            f"can't make a distribution."
+        )
+
+    def assert_source_matches_version(self) -> None:
+        assert self.source_dir, f"No source dir for {self}"
+        version = self.metadata["version"]
+        if self.req and self.req.specifier and version not in self.req.specifier:
+            logger.warning(
+                "Requested %s, but installing version %s",
+                self,
+                version,
+            )
+        else:
+            logger.debug(
+                "Source in %s has version %s, which satisfies requirement %s",
+                display_path(self.source_dir),
+                version,
+                self,
+            )
+
+    # For both source distributions and editables
+    def ensure_has_source_dir(
+        self,
+        parent_dir: str,
+        autodelete: bool = False,
+        parallel_builds: bool = False,
+    ) -> None:
+        """Ensure that a source_dir is set.
+
+        This will create a temporary build dir if the name of the requirement
+        isn't known yet.
+
+        :param parent_dir: The ideal pip parent_dir for the source_dir.
+            Generally src_dir for editables and build_dir for sdists.
+        :return: self.source_dir
+        """
+        if self.source_dir is None:
+            self.source_dir = self.ensure_build_location(
+                parent_dir,
+                autodelete=autodelete,
+                parallel_builds=parallel_builds,
+            )
+
+    def needs_unpacked_archive(self, archive_source: Path) -> None:
+        assert self._archive_source is None
+        self._archive_source = archive_source
+
+    def ensure_pristine_source_checkout(self) -> None:
+        """Ensure the source directory has not yet been built in."""
+        assert self.source_dir is not None
+        if self._archive_source is not None:
+            unpack_file(str(self._archive_source), self.source_dir)
+        elif is_installable_dir(self.source_dir):
+            # If a checkout exists, it's unwise to keep going.
+            # version inconsistencies are logged later, but do not fail
+            # the installation.
+            raise PreviousBuildDirError(
+                f"pip can't proceed with requirements '{self}' due to a "
+                f"pre-existing build directory ({self.source_dir}). This is likely "
+                "due to a previous installation that failed . pip is "
+                "being responsible and not assuming it can delete this. "
+                "Please delete it and try again."
+            )
+
+    # For editable installations
+    def update_editable(self) -> None:
+        if not self.link:
+            logger.debug(
+                "Cannot update repository at %s; repository location is unknown",
+                self.source_dir,
+            )
+            return
+        assert self.editable
+        assert self.source_dir
+        if self.link.scheme == "file":
+            # Static paths don't get updated
+            return
+        vcs_backend = vcs.get_backend_for_scheme(self.link.scheme)
+        # Editable requirements are validated in Requirement constructors.
+        # So here, if it's neither a path nor a valid VCS URL, it's a bug.
+        assert vcs_backend, f"Unsupported VCS URL {self.link.url}"
+        hidden_url = hide_url(self.link.url)
+        vcs_backend.obtain(self.source_dir, url=hidden_url, verbosity=0)
+
+    # Top-level Actions
+    def uninstall(
+        self, auto_confirm: bool = False, verbose: bool = False
+    ) -> Optional[UninstallPathSet]:
+        """
+        Uninstall the distribution currently satisfying this requirement.
+
+        Prompts before removing or modifying files unless
+        ``auto_confirm`` is True.
+
+        Refuses to delete or modify files outside of ``sys.prefix`` -
+        thus uninstallation within a virtual environment can only
+        modify that virtual environment, even if the virtualenv is
+        linked to global site-packages.
+
+        """
+        assert self.req
+        dist = get_default_environment().get_distribution(self.req.name)
+        if not dist:
+            logger.warning("Skipping %s as it is not installed.", self.name)
+            return None
+        logger.info("Found existing installation: %s", dist)
+
+        uninstalled_pathset = UninstallPathSet.from_dist(dist)
+        uninstalled_pathset.remove(auto_confirm, verbose)
+        return uninstalled_pathset
+
+    def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str:
+        def _clean_zip_name(name: str, prefix: str) -> str:
+            assert name.startswith(
+                prefix + os.path.sep
+            ), f"name {name!r} doesn't start with prefix {prefix!r}"
+            name = name[len(prefix) + 1 :]
+            name = name.replace(os.path.sep, "/")
+            return name
+
+        assert self.req is not None
+        path = os.path.join(parentdir, path)
+        name = _clean_zip_name(path, rootdir)
+        return self.req.name + "/" + name
+
+    def archive(self, build_dir: Optional[str]) -> None:
+        """Saves archive to provided build_dir.
+
+        Used for saving downloaded VCS requirements as part of `pip download`.
+        """
+        assert self.source_dir
+        if build_dir is None:
+            return
+
+        create_archive = True
+        archive_name = "{}-{}.zip".format(self.name, self.metadata["version"])
+        archive_path = os.path.join(build_dir, archive_name)
+
+        if os.path.exists(archive_path):
+            response = ask_path_exists(
+                f"The file {display_path(archive_path)} exists. (i)gnore, (w)ipe, "
+                "(b)ackup, (a)bort ",
+                ("i", "w", "b", "a"),
+            )
+            if response == "i":
+                create_archive = False
+            elif response == "w":
+                logger.warning("Deleting %s", display_path(archive_path))
+                os.remove(archive_path)
+            elif response == "b":
+                dest_file = backup_dir(archive_path)
+                logger.warning(
+                    "Backing up %s to %s",
+                    display_path(archive_path),
+                    display_path(dest_file),
+                )
+                shutil.move(archive_path, dest_file)
+            elif response == "a":
+                sys.exit(-1)
+
+        if not create_archive:
+            return
+
+        zip_output = zipfile.ZipFile(
+            archive_path,
+            "w",
+            zipfile.ZIP_DEFLATED,
+            allowZip64=True,
+        )
+        with zip_output:
+            dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory))
+            for dirpath, dirnames, filenames in os.walk(dir):
+                for dirname in dirnames:
+                    dir_arcname = self._get_archive_name(
+                        dirname,
+                        parentdir=dirpath,
+                        rootdir=dir,
+                    )
+                    zipdir = zipfile.ZipInfo(dir_arcname + "/")
+                    zipdir.external_attr = 0x1ED << 16  # 0o755
+                    zip_output.writestr(zipdir, "")
+                for filename in filenames:
+                    file_arcname = self._get_archive_name(
+                        filename,
+                        parentdir=dirpath,
+                        rootdir=dir,
+                    )
+                    filename = os.path.join(dirpath, filename)
+                    zip_output.write(filename, file_arcname)
+
+        logger.info("Saved %s", display_path(archive_path))
+
+    def install(
+        self,
+        global_options: Optional[Sequence[str]] = None,
+        root: Optional[str] = None,
+        home: Optional[str] = None,
+        prefix: Optional[str] = None,
+        warn_script_location: bool = True,
+        use_user_site: bool = False,
+        pycompile: bool = True,
+    ) -> None:
+        assert self.req is not None
+        scheme = get_scheme(
+            self.req.name,
+            user=use_user_site,
+            home=home,
+            root=root,
+            isolated=self.isolated,
+            prefix=prefix,
+        )
+
+        if self.editable and not self.is_wheel:
+            if self.config_settings:
+                logger.warning(
+                    "--config-settings ignored for legacy editable install of %s. "
+                    "Consider upgrading to a version of setuptools "
+                    "that supports PEP 660 (>= 64).",
+                    self,
+                )
+            install_editable_legacy(
+                global_options=global_options if global_options is not None else [],
+                prefix=prefix,
+                home=home,
+                use_user_site=use_user_site,
+                name=self.req.name,
+                setup_py_path=self.setup_py_path,
+                isolated=self.isolated,
+                build_env=self.build_env,
+                unpacked_source_directory=self.unpacked_source_directory,
+            )
+            self.install_succeeded = True
+            return
+
+        assert self.is_wheel
+        assert self.local_file_path
+
+        install_wheel(
+            self.req.name,
+            self.local_file_path,
+            scheme=scheme,
+            req_description=str(self.req),
+            pycompile=pycompile,
+            warn_script_location=warn_script_location,
+            direct_url=self.download_info if self.is_direct else None,
+            requested=self.user_supplied,
+        )
+        self.install_succeeded = True
+
+
+def check_invalid_constraint_type(req: InstallRequirement) -> str:
+    # Check for unsupported forms
+    problem = ""
+    if not req.name:
+        problem = "Unnamed requirements are not allowed as constraints"
+    elif req.editable:
+        problem = "Editable requirements are not allowed as constraints"
+    elif req.extras:
+        problem = "Constraints cannot have extras"
+
+    if problem:
+        deprecated(
+            reason=(
+                "Constraints are only allowed to take the form of a package "
+                "name and a version specifier. Other forms were originally "
+                "permitted as an accident of the implementation, but were "
+                "undocumented. The new implementation of the resolver no "
+                "longer supports these forms."
+            ),
+            replacement="replacing the constraint with a requirement",
+            # No plan yet for when the new resolver becomes default
+            gone_in=None,
+            issue=8210,
+        )
+
+    return problem
+
+
+def _has_option(options: Values, reqs: List[InstallRequirement], option: str) -> bool:
+    if getattr(options, option, None):
+        return True
+    for req in reqs:
+        if getattr(req, option, None):
+            return True
+    return False
+
+
+def check_legacy_setup_py_options(
+    options: Values,
+    reqs: List[InstallRequirement],
+) -> None:
+    has_build_options = _has_option(options, reqs, "build_options")
+    has_global_options = _has_option(options, reqs, "global_options")
+    if has_build_options or has_global_options:
+        deprecated(
+            reason="--build-option and --global-option are deprecated.",
+            issue=11859,
+            replacement="to use --config-settings",
+            gone_in="24.2",
+        )
+        logger.warning(
+            "Implying --no-binary=:all: due to the presence of "
+            "--build-option / --global-option. "
+        )
+        options.format_control.disallow_binaries()
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/req/req_set.py b/.venv/lib/python3.12/site-packages/pip/_internal/req/req_set.py
new file mode 100644
index 0000000..bf36114
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/req/req_set.py
@@ -0,0 +1,119 @@
+import logging
+from collections import OrderedDict
+from typing import Dict, List
+
+from pip._vendor.packaging.specifiers import LegacySpecifier
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.packaging.version import LegacyVersion
+
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils.deprecation import deprecated
+
+logger = logging.getLogger(__name__)
+
+
+class RequirementSet:
+    def __init__(self, check_supported_wheels: bool = True) -> None:
+        """Create a RequirementSet."""
+
+        self.requirements: Dict[str, InstallRequirement] = OrderedDict()
+        self.check_supported_wheels = check_supported_wheels
+
+        self.unnamed_requirements: List[InstallRequirement] = []
+
+    def __str__(self) -> str:
+        requirements = sorted(
+            (req for req in self.requirements.values() if not req.comes_from),
+            key=lambda req: canonicalize_name(req.name or ""),
+        )
+        return " ".join(str(req.req) for req in requirements)
+
+    def __repr__(self) -> str:
+        requirements = sorted(
+            self.requirements.values(),
+            key=lambda req: canonicalize_name(req.name or ""),
+        )
+
+        format_string = "<{classname} object; {count} requirement(s): {reqs}>"
+        return format_string.format(
+            classname=self.__class__.__name__,
+            count=len(requirements),
+            reqs=", ".join(str(req.req) for req in requirements),
+        )
+
+    def add_unnamed_requirement(self, install_req: InstallRequirement) -> None:
+        assert not install_req.name
+        self.unnamed_requirements.append(install_req)
+
+    def add_named_requirement(self, install_req: InstallRequirement) -> None:
+        assert install_req.name
+
+        project_name = canonicalize_name(install_req.name)
+        self.requirements[project_name] = install_req
+
+    def has_requirement(self, name: str) -> bool:
+        project_name = canonicalize_name(name)
+
+        return (
+            project_name in self.requirements
+            and not self.requirements[project_name].constraint
+        )
+
+    def get_requirement(self, name: str) -> InstallRequirement:
+        project_name = canonicalize_name(name)
+
+        if project_name in self.requirements:
+            return self.requirements[project_name]
+
+        raise KeyError(f"No project with the name {name!r}")
+
+    @property
+    def all_requirements(self) -> List[InstallRequirement]:
+        return self.unnamed_requirements + list(self.requirements.values())
+
+    @property
+    def requirements_to_install(self) -> List[InstallRequirement]:
+        """Return the list of requirements that need to be installed.
+
+        TODO remove this property together with the legacy resolver, since the new
+             resolver only returns requirements that need to be installed.
+        """
+        return [
+            install_req
+            for install_req in self.all_requirements
+            if not install_req.constraint and not install_req.satisfied_by
+        ]
+
+    def warn_legacy_versions_and_specifiers(self) -> None:
+        for req in self.requirements_to_install:
+            version = req.get_dist().version
+            if isinstance(version, LegacyVersion):
+                deprecated(
+                    reason=(
+                        f"pip has selected the non standard version {version} "
+                        f"of {req}. In the future this version will be "
+                        f"ignored as it isn't standard compliant."
+                    ),
+                    replacement=(
+                        "set or update constraints to select another version "
+                        "or contact the package author to fix the version number"
+                    ),
+                    issue=12063,
+                    gone_in="24.1",
+                )
+            for dep in req.get_dist().iter_dependencies():
+                if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier):
+                    deprecated(
+                        reason=(
+                            f"pip has selected {req} {version} which has non "
+                            f"standard dependency specifier {dep}. "
+                            f"In the future this version of {req} will be "
+                            f"ignored as it isn't standard compliant."
+                        ),
+                        replacement=(
+                            "set or update constraints to select another version "
+                            "or contact the package author to fix the version number"
+                        ),
+                        issue=12063,
+                        gone_in="24.1",
+                    )
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/req/req_uninstall.py b/.venv/lib/python3.12/site-packages/pip/_internal/req/req_uninstall.py
new file mode 100644
index 0000000..707fde1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/req/req_uninstall.py
@@ -0,0 +1,649 @@
+import functools
+import os
+import sys
+import sysconfig
+from importlib.util import cache_from_source
+from typing import Any, Callable, Dict, Generator, Iterable, List, Optional, Set, Tuple
+
+from pip._internal.exceptions import UninstallationError
+from pip._internal.locations import get_bin_prefix, get_bin_user
+from pip._internal.metadata import BaseDistribution
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.egg_link import egg_link_path_from_location
+from pip._internal.utils.logging import getLogger, indent_log
+from pip._internal.utils.misc import ask, normalize_path, renames, rmtree
+from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+logger = getLogger(__name__)
+
+
+def _script_names(
+    bin_dir: str, script_name: str, is_gui: bool
+) -> Generator[str, None, None]:
+    """Create the fully qualified name of the files created by
+    {console,gui}_scripts for the given ``dist``.
+    Returns the list of file names
+    """
+    exe_name = os.path.join(bin_dir, script_name)
+    yield exe_name
+    if not WINDOWS:
+        return
+    yield f"{exe_name}.exe"
+    yield f"{exe_name}.exe.manifest"
+    if is_gui:
+        yield f"{exe_name}-script.pyw"
+    else:
+        yield f"{exe_name}-script.py"
+
+
+def _unique(
+    fn: Callable[..., Generator[Any, None, None]]
+) -> Callable[..., Generator[Any, None, None]]:
+    @functools.wraps(fn)
+    def unique(*args: Any, **kw: Any) -> Generator[Any, None, None]:
+        seen: Set[Any] = set()
+        for item in fn(*args, **kw):
+            if item not in seen:
+                seen.add(item)
+                yield item
+
+    return unique
+
+
+@_unique
+def uninstallation_paths(dist: BaseDistribution) -> Generator[str, None, None]:
+    """
+    Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
+
+    Yield paths to all the files in RECORD. For each .py file in RECORD, add
+    the .pyc and .pyo in the same directory.
+
+    UninstallPathSet.add() takes care of the __pycache__ .py[co].
+
+    If RECORD is not found, raises UninstallationError,
+    with possible information from the INSTALLER file.
+
+    https://packaging.python.org/specifications/recording-installed-packages/
+    """
+    location = dist.location
+    assert location is not None, "not installed"
+
+    entries = dist.iter_declared_entries()
+    if entries is None:
+        msg = f"Cannot uninstall {dist}, RECORD file not found."
+        installer = dist.installer
+        if not installer or installer == "pip":
+            dep = f"{dist.raw_name}=={dist.version}"
+            msg += (
+                " You might be able to recover from this via: "
+                f"'pip install --force-reinstall --no-deps {dep}'."
+            )
+        else:
+            msg += f" Hint: The package was installed by {installer}."
+        raise UninstallationError(msg)
+
+    for entry in entries:
+        path = os.path.join(location, entry)
+        yield path
+        if path.endswith(".py"):
+            dn, fn = os.path.split(path)
+            base = fn[:-3]
+            path = os.path.join(dn, base + ".pyc")
+            yield path
+            path = os.path.join(dn, base + ".pyo")
+            yield path
+
+
+def compact(paths: Iterable[str]) -> Set[str]:
+    """Compact a path set to contain the minimal number of paths
+    necessary to contain all paths in the set. If /a/path/ and
+    /a/path/to/a/file.txt are both in the set, leave only the
+    shorter path."""
+
+    sep = os.path.sep
+    short_paths: Set[str] = set()
+    for path in sorted(paths, key=len):
+        should_skip = any(
+            path.startswith(shortpath.rstrip("*"))
+            and path[len(shortpath.rstrip("*").rstrip(sep))] == sep
+            for shortpath in short_paths
+        )
+        if not should_skip:
+            short_paths.add(path)
+    return short_paths
+
+
+def compress_for_rename(paths: Iterable[str]) -> Set[str]:
+    """Returns a set containing the paths that need to be renamed.
+
+    This set may include directories when the original sequence of paths
+    included every file on disk.
+    """
+    case_map = {os.path.normcase(p): p for p in paths}
+    remaining = set(case_map)
+    unchecked = sorted({os.path.split(p)[0] for p in case_map.values()}, key=len)
+    wildcards: Set[str] = set()
+
+    def norm_join(*a: str) -> str:
+        return os.path.normcase(os.path.join(*a))
+
+    for root in unchecked:
+        if any(os.path.normcase(root).startswith(w) for w in wildcards):
+            # This directory has already been handled.
+            continue
+
+        all_files: Set[str] = set()
+        all_subdirs: Set[str] = set()
+        for dirname, subdirs, files in os.walk(root):
+            all_subdirs.update(norm_join(root, dirname, d) for d in subdirs)
+            all_files.update(norm_join(root, dirname, f) for f in files)
+        # If all the files we found are in our remaining set of files to
+        # remove, then remove them from the latter set and add a wildcard
+        # for the directory.
+        if not (all_files - remaining):
+            remaining.difference_update(all_files)
+            wildcards.add(root + os.sep)
+
+    return set(map(case_map.__getitem__, remaining)) | wildcards
+
+
+def compress_for_output_listing(paths: Iterable[str]) -> Tuple[Set[str], Set[str]]:
+    """Returns a tuple of 2 sets of which paths to display to user
+
+    The first set contains paths that would be deleted. Files of a package
+    are not added and the top-level directory of the package has a '*' added
+    at the end - to signify that all it's contents are removed.
+
+    The second set contains files that would have been skipped in the above
+    folders.
+    """
+
+    will_remove = set(paths)
+    will_skip = set()
+
+    # Determine folders and files
+    folders = set()
+    files = set()
+    for path in will_remove:
+        if path.endswith(".pyc"):
+            continue
+        if path.endswith("__init__.py") or ".dist-info" in path:
+            folders.add(os.path.dirname(path))
+        files.add(path)
+
+    _normcased_files = set(map(os.path.normcase, files))
+
+    folders = compact(folders)
+
+    # This walks the tree using os.walk to not miss extra folders
+    # that might get added.
+    for folder in folders:
+        for dirpath, _, dirfiles in os.walk(folder):
+            for fname in dirfiles:
+                if fname.endswith(".pyc"):
+                    continue
+
+                file_ = os.path.join(dirpath, fname)
+                if (
+                    os.path.isfile(file_)
+                    and os.path.normcase(file_) not in _normcased_files
+                ):
+                    # We are skipping this file. Add it to the set.
+                    will_skip.add(file_)
+
+    will_remove = files | {os.path.join(folder, "*") for folder in folders}
+
+    return will_remove, will_skip
+
+
+class StashedUninstallPathSet:
+    """A set of file rename operations to stash files while
+    tentatively uninstalling them."""
+
+    def __init__(self) -> None:
+        # Mapping from source file root to [Adjacent]TempDirectory
+        # for files under that directory.
+        self._save_dirs: Dict[str, TempDirectory] = {}
+        # (old path, new path) tuples for each move that may need
+        # to be undone.
+        self._moves: List[Tuple[str, str]] = []
+
+    def _get_directory_stash(self, path: str) -> str:
+        """Stashes a directory.
+
+        Directories are stashed adjacent to their original location if
+        possible, or else moved/copied into the user's temp dir."""
+
+        try:
+            save_dir: TempDirectory = AdjacentTempDirectory(path)
+        except OSError:
+            save_dir = TempDirectory(kind="uninstall")
+        self._save_dirs[os.path.normcase(path)] = save_dir
+
+        return save_dir.path
+
+    def _get_file_stash(self, path: str) -> str:
+        """Stashes a file.
+
+        If no root has been provided, one will be created for the directory
+        in the user's temp directory."""
+        path = os.path.normcase(path)
+        head, old_head = os.path.dirname(path), None
+        save_dir = None
+
+        while head != old_head:
+            try:
+                save_dir = self._save_dirs[head]
+                break
+            except KeyError:
+                pass
+            head, old_head = os.path.dirname(head), head
+        else:
+            # Did not find any suitable root
+            head = os.path.dirname(path)
+            save_dir = TempDirectory(kind="uninstall")
+            self._save_dirs[head] = save_dir
+
+        relpath = os.path.relpath(path, head)
+        if relpath and relpath != os.path.curdir:
+            return os.path.join(save_dir.path, relpath)
+        return save_dir.path
+
+    def stash(self, path: str) -> str:
+        """Stashes the directory or file and returns its new location.
+        Handle symlinks as files to avoid modifying the symlink targets.
+        """
+        path_is_dir = os.path.isdir(path) and not os.path.islink(path)
+        if path_is_dir:
+            new_path = self._get_directory_stash(path)
+        else:
+            new_path = self._get_file_stash(path)
+
+        self._moves.append((path, new_path))
+        if path_is_dir and os.path.isdir(new_path):
+            # If we're moving a directory, we need to
+            # remove the destination first or else it will be
+            # moved to inside the existing directory.
+            # We just created new_path ourselves, so it will
+            # be removable.
+            os.rmdir(new_path)
+        renames(path, new_path)
+        return new_path
+
+    def commit(self) -> None:
+        """Commits the uninstall by removing stashed files."""
+        for save_dir in self._save_dirs.values():
+            save_dir.cleanup()
+        self._moves = []
+        self._save_dirs = {}
+
+    def rollback(self) -> None:
+        """Undoes the uninstall by moving stashed files back."""
+        for p in self._moves:
+            logger.info("Moving to %s\n from %s", *p)
+
+        for new_path, path in self._moves:
+            try:
+                logger.debug("Replacing %s from %s", new_path, path)
+                if os.path.isfile(new_path) or os.path.islink(new_path):
+                    os.unlink(new_path)
+                elif os.path.isdir(new_path):
+                    rmtree(new_path)
+                renames(path, new_path)
+            except OSError as ex:
+                logger.error("Failed to restore %s", new_path)
+                logger.debug("Exception: %s", ex)
+
+        self.commit()
+
+    @property
+    def can_rollback(self) -> bool:
+        return bool(self._moves)
+
+
+class UninstallPathSet:
+    """A set of file paths to be removed in the uninstallation of a
+    requirement."""
+
+    def __init__(self, dist: BaseDistribution) -> None:
+        self._paths: Set[str] = set()
+        self._refuse: Set[str] = set()
+        self._pth: Dict[str, UninstallPthEntries] = {}
+        self._dist = dist
+        self._moved_paths = StashedUninstallPathSet()
+        # Create local cache of normalize_path results. Creating an UninstallPathSet
+        # can result in hundreds/thousands of redundant calls to normalize_path with
+        # the same args, which hurts performance.
+        self._normalize_path_cached = functools.lru_cache()(normalize_path)
+
+    def _permitted(self, path: str) -> bool:
+        """
+        Return True if the given path is one we are permitted to
+        remove/modify, False otherwise.
+
+        """
+        # aka is_local, but caching normalized sys.prefix
+        if not running_under_virtualenv():
+            return True
+        return path.startswith(self._normalize_path_cached(sys.prefix))
+
+    def add(self, path: str) -> None:
+        head, tail = os.path.split(path)
+
+        # we normalize the head to resolve parent directory symlinks, but not
+        # the tail, since we only want to uninstall symlinks, not their targets
+        path = os.path.join(self._normalize_path_cached(head), os.path.normcase(tail))
+
+        if not os.path.exists(path):
+            return
+        if self._permitted(path):
+            self._paths.add(path)
+        else:
+            self._refuse.add(path)
+
+        # __pycache__ files can show up after 'installed-files.txt' is created,
+        # due to imports
+        if os.path.splitext(path)[1] == ".py":
+            self.add(cache_from_source(path))
+
+    def add_pth(self, pth_file: str, entry: str) -> None:
+        pth_file = self._normalize_path_cached(pth_file)
+        if self._permitted(pth_file):
+            if pth_file not in self._pth:
+                self._pth[pth_file] = UninstallPthEntries(pth_file)
+            self._pth[pth_file].add(entry)
+        else:
+            self._refuse.add(pth_file)
+
+    def remove(self, auto_confirm: bool = False, verbose: bool = False) -> None:
+        """Remove paths in ``self._paths`` with confirmation (unless
+        ``auto_confirm`` is True)."""
+
+        if not self._paths:
+            logger.info(
+                "Can't uninstall '%s'. No files were found to uninstall.",
+                self._dist.raw_name,
+            )
+            return
+
+        dist_name_version = f"{self._dist.raw_name}-{self._dist.version}"
+        logger.info("Uninstalling %s:", dist_name_version)
+
+        with indent_log():
+            if auto_confirm or self._allowed_to_proceed(verbose):
+                moved = self._moved_paths
+
+                for_rename = compress_for_rename(self._paths)
+
+                for path in sorted(compact(for_rename)):
+                    moved.stash(path)
+                    logger.verbose("Removing file or directory %s", path)
+
+                for pth in self._pth.values():
+                    pth.remove()
+
+                logger.info("Successfully uninstalled %s", dist_name_version)
+
+    def _allowed_to_proceed(self, verbose: bool) -> bool:
+        """Display which files would be deleted and prompt for confirmation"""
+
+        def _display(msg: str, paths: Iterable[str]) -> None:
+            if not paths:
+                return
+
+            logger.info(msg)
+            with indent_log():
+                for path in sorted(compact(paths)):
+                    logger.info(path)
+
+        if not verbose:
+            will_remove, will_skip = compress_for_output_listing(self._paths)
+        else:
+            # In verbose mode, display all the files that are going to be
+            # deleted.
+            will_remove = set(self._paths)
+            will_skip = set()
+
+        _display("Would remove:", will_remove)
+        _display("Would not remove (might be manually added):", will_skip)
+        _display("Would not remove (outside of prefix):", self._refuse)
+        if verbose:
+            _display("Will actually move:", compress_for_rename(self._paths))
+
+        return ask("Proceed (Y/n)? ", ("y", "n", "")) != "n"
+
+    def rollback(self) -> None:
+        """Rollback the changes previously made by remove()."""
+        if not self._moved_paths.can_rollback:
+            logger.error(
+                "Can't roll back %s; was not uninstalled",
+                self._dist.raw_name,
+            )
+            return
+        logger.info("Rolling back uninstall of %s", self._dist.raw_name)
+        self._moved_paths.rollback()
+        for pth in self._pth.values():
+            pth.rollback()
+
+    def commit(self) -> None:
+        """Remove temporary save dir: rollback will no longer be possible."""
+        self._moved_paths.commit()
+
+    @classmethod
+    def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet":
+        dist_location = dist.location
+        info_location = dist.info_location
+        if dist_location is None:
+            logger.info(
+                "Not uninstalling %s since it is not installed",
+                dist.canonical_name,
+            )
+            return cls(dist)
+
+        normalized_dist_location = normalize_path(dist_location)
+        if not dist.local:
+            logger.info(
+                "Not uninstalling %s at %s, outside environment %s",
+                dist.canonical_name,
+                normalized_dist_location,
+                sys.prefix,
+            )
+            return cls(dist)
+
+        if normalized_dist_location in {
+            p
+            for p in {sysconfig.get_path("stdlib"), sysconfig.get_path("platstdlib")}
+            if p
+        }:
+            logger.info(
+                "Not uninstalling %s at %s, as it is in the standard library.",
+                dist.canonical_name,
+                normalized_dist_location,
+            )
+            return cls(dist)
+
+        paths_to_remove = cls(dist)
+        develop_egg_link = egg_link_path_from_location(dist.raw_name)
+
+        # Distribution is installed with metadata in a "flat" .egg-info
+        # directory. This means it is not a modern .dist-info installation, an
+        # egg, or legacy editable.
+        setuptools_flat_installation = (
+            dist.installed_with_setuptools_egg_info
+            and info_location is not None
+            and os.path.exists(info_location)
+            # If dist is editable and the location points to a ``.egg-info``,
+            # we are in fact in the legacy editable case.
+            and not info_location.endswith(f"{dist.setuptools_filename}.egg-info")
+        )
+
+        # Uninstall cases order do matter as in the case of 2 installs of the
+        # same package, pip needs to uninstall the currently detected version
+        if setuptools_flat_installation:
+            if info_location is not None:
+                paths_to_remove.add(info_location)
+            installed_files = dist.iter_declared_entries()
+            if installed_files is not None:
+                for installed_file in installed_files:
+                    paths_to_remove.add(os.path.join(dist_location, installed_file))
+            # FIXME: need a test for this elif block
+            # occurs with --single-version-externally-managed/--record outside
+            # of pip
+            elif dist.is_file("top_level.txt"):
+                try:
+                    namespace_packages = dist.read_text("namespace_packages.txt")
+                except FileNotFoundError:
+                    namespaces = []
+                else:
+                    namespaces = namespace_packages.splitlines(keepends=False)
+                for top_level_pkg in [
+                    p
+                    for p in dist.read_text("top_level.txt").splitlines()
+                    if p and p not in namespaces
+                ]:
+                    path = os.path.join(dist_location, top_level_pkg)
+                    paths_to_remove.add(path)
+                    paths_to_remove.add(f"{path}.py")
+                    paths_to_remove.add(f"{path}.pyc")
+                    paths_to_remove.add(f"{path}.pyo")
+
+        elif dist.installed_by_distutils:
+            raise UninstallationError(
+                "Cannot uninstall {!r}. It is a distutils installed project "
+                "and thus we cannot accurately determine which files belong "
+                "to it which would lead to only a partial uninstall.".format(
+                    dist.raw_name,
+                )
+            )
+
+        elif dist.installed_as_egg:
+            # package installed by easy_install
+            # We cannot match on dist.egg_name because it can slightly vary
+            # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
+            paths_to_remove.add(dist_location)
+            easy_install_egg = os.path.split(dist_location)[1]
+            easy_install_pth = os.path.join(
+                os.path.dirname(dist_location),
+                "easy-install.pth",
+            )
+            paths_to_remove.add_pth(easy_install_pth, "./" + easy_install_egg)
+
+        elif dist.installed_with_dist_info:
+            for path in uninstallation_paths(dist):
+                paths_to_remove.add(path)
+
+        elif develop_egg_link:
+            # PEP 660 modern editable is handled in the ``.dist-info`` case
+            # above, so this only covers the setuptools-style editable.
+            with open(develop_egg_link) as fh:
+                link_pointer = os.path.normcase(fh.readline().strip())
+                normalized_link_pointer = paths_to_remove._normalize_path_cached(
+                    link_pointer
+                )
+            assert os.path.samefile(
+                normalized_link_pointer, normalized_dist_location
+            ), (
+                f"Egg-link {develop_egg_link} (to {link_pointer}) does not match "
+                f"installed location of {dist.raw_name} (at {dist_location})"
+            )
+            paths_to_remove.add(develop_egg_link)
+            easy_install_pth = os.path.join(
+                os.path.dirname(develop_egg_link), "easy-install.pth"
+            )
+            paths_to_remove.add_pth(easy_install_pth, dist_location)
+
+        else:
+            logger.debug(
+                "Not sure how to uninstall: %s - Check: %s",
+                dist,
+                dist_location,
+            )
+
+        if dist.in_usersite:
+            bin_dir = get_bin_user()
+        else:
+            bin_dir = get_bin_prefix()
+
+        # find distutils scripts= scripts
+        try:
+            for script in dist.iter_distutils_script_names():
+                paths_to_remove.add(os.path.join(bin_dir, script))
+                if WINDOWS:
+                    paths_to_remove.add(os.path.join(bin_dir, f"{script}.bat"))
+        except (FileNotFoundError, NotADirectoryError):
+            pass
+
+        # find console_scripts and gui_scripts
+        def iter_scripts_to_remove(
+            dist: BaseDistribution,
+            bin_dir: str,
+        ) -> Generator[str, None, None]:
+            for entry_point in dist.iter_entry_points():
+                if entry_point.group == "console_scripts":
+                    yield from _script_names(bin_dir, entry_point.name, False)
+                elif entry_point.group == "gui_scripts":
+                    yield from _script_names(bin_dir, entry_point.name, True)
+
+        for s in iter_scripts_to_remove(dist, bin_dir):
+            paths_to_remove.add(s)
+
+        return paths_to_remove
+
+
+class UninstallPthEntries:
+    def __init__(self, pth_file: str) -> None:
+        self.file = pth_file
+        self.entries: Set[str] = set()
+        self._saved_lines: Optional[List[bytes]] = None
+
+    def add(self, entry: str) -> None:
+        entry = os.path.normcase(entry)
+        # On Windows, os.path.normcase converts the entry to use
+        # backslashes.  This is correct for entries that describe absolute
+        # paths outside of site-packages, but all the others use forward
+        # slashes.
+        # os.path.splitdrive is used instead of os.path.isabs because isabs
+        # treats non-absolute paths with drive letter markings like c:foo\bar
+        # as absolute paths. It also does not recognize UNC paths if they don't
+        # have more than "\\sever\share". Valid examples: "\\server\share\" or
+        # "\\server\share\folder".
+        if WINDOWS and not os.path.splitdrive(entry)[0]:
+            entry = entry.replace("\\", "/")
+        self.entries.add(entry)
+
+    def remove(self) -> None:
+        logger.verbose("Removing pth entries from %s:", self.file)
+
+        # If the file doesn't exist, log a warning and return
+        if not os.path.isfile(self.file):
+            logger.warning("Cannot remove entries from nonexistent file %s", self.file)
+            return
+        with open(self.file, "rb") as fh:
+            # windows uses '\r\n' with py3k, but uses '\n' with py2.x
+            lines = fh.readlines()
+            self._saved_lines = lines
+        if any(b"\r\n" in line for line in lines):
+            endline = "\r\n"
+        else:
+            endline = "\n"
+        # handle missing trailing newline
+        if lines and not lines[-1].endswith(endline.encode("utf-8")):
+            lines[-1] = lines[-1] + endline.encode("utf-8")
+        for entry in self.entries:
+            try:
+                logger.verbose("Removing entry: %s", entry)
+                lines.remove((entry + endline).encode("utf-8"))
+            except ValueError:
+                pass
+        with open(self.file, "wb") as fh:
+            fh.writelines(lines)
+
+    def rollback(self) -> bool:
+        if self._saved_lines is None:
+            logger.error("Cannot roll back changes to %s, none were made", self.file)
+            return False
+        logger.debug("Rolling %s back to previous state", self.file)
+        with open(self.file, "wb") as fh:
+            fh.writelines(self._saved_lines)
+        return True
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/resolution/__init__.py b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/resolution/base.py b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/base.py
new file mode 100644
index 0000000..42dade1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/base.py
@@ -0,0 +1,20 @@
+from typing import Callable, List, Optional
+
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.req.req_set import RequirementSet
+
+InstallRequirementProvider = Callable[
+    [str, Optional[InstallRequirement]], InstallRequirement
+]
+
+
+class BaseResolver:
+    def resolve(
+        self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
+    ) -> RequirementSet:
+        raise NotImplementedError()
+
+    def get_installation_order(
+        self, req_set: RequirementSet
+    ) -> List[InstallRequirement]:
+        raise NotImplementedError()
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__init__.py b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/resolver.py b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/resolver.py
new file mode 100644
index 0000000..5ddb848
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/resolver.py
@@ -0,0 +1,598 @@
+"""Dependency Resolution
+
+The dependency resolution in pip is performed as follows:
+
+for top-level requirements:
+    a. only one spec allowed per project, regardless of conflicts or not.
+       otherwise a "double requirement" exception is raised
+    b. they override sub-dependency requirements.
+for sub-dependencies
+    a. "first found, wins" (where the order is breadth first)
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+import logging
+import sys
+from collections import defaultdict
+from itertools import chain
+from typing import DefaultDict, Iterable, List, Optional, Set, Tuple
+
+from pip._vendor.packaging import specifiers
+from pip._vendor.packaging.requirements import Requirement
+
+from pip._internal.cache import WheelCache
+from pip._internal.exceptions import (
+    BestVersionAlreadyInstalled,
+    DistributionNotFound,
+    HashError,
+    HashErrors,
+    InstallationError,
+    NoneMetadataError,
+    UnsupportedPythonVersion,
+)
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import BaseDistribution
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req.req_install import (
+    InstallRequirement,
+    check_invalid_constraint_type,
+)
+from pip._internal.req.req_set import RequirementSet
+from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
+from pip._internal.utils import compatibility_tags
+from pip._internal.utils.compatibility_tags import get_supported
+from pip._internal.utils.direct_url_helpers import direct_url_from_link
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import normalize_version_info
+from pip._internal.utils.packaging import check_requires_python
+
+logger = logging.getLogger(__name__)
+
+DiscoveredDependencies = DefaultDict[str, List[InstallRequirement]]
+
+
+def _check_dist_requires_python(
+    dist: BaseDistribution,
+    version_info: Tuple[int, int, int],
+    ignore_requires_python: bool = False,
+) -> None:
+    """
+    Check whether the given Python version is compatible with a distribution's
+    "Requires-Python" value.
+
+    :param version_info: A 3-tuple of ints representing the Python
+        major-minor-micro version to check.
+    :param ignore_requires_python: Whether to ignore the "Requires-Python"
+        value if the given Python version isn't compatible.
+
+    :raises UnsupportedPythonVersion: When the given Python version isn't
+        compatible.
+    """
+    # This idiosyncratically converts the SpecifierSet to str and let
+    # check_requires_python then parse it again into SpecifierSet. But this
+    # is the legacy resolver so I'm just not going to bother refactoring.
+    try:
+        requires_python = str(dist.requires_python)
+    except FileNotFoundError as e:
+        raise NoneMetadataError(dist, str(e))
+    try:
+        is_compatible = check_requires_python(
+            requires_python,
+            version_info=version_info,
+        )
+    except specifiers.InvalidSpecifier as exc:
+        logger.warning(
+            "Package %r has an invalid Requires-Python: %s", dist.raw_name, exc
+        )
+        return
+
+    if is_compatible:
+        return
+
+    version = ".".join(map(str, version_info))
+    if ignore_requires_python:
+        logger.debug(
+            "Ignoring failed Requires-Python check for package %r: %s not in %r",
+            dist.raw_name,
+            version,
+            requires_python,
+        )
+        return
+
+    raise UnsupportedPythonVersion(
+        "Package {!r} requires a different Python: {} not in {!r}".format(
+            dist.raw_name, version, requires_python
+        )
+    )
+
+
+class Resolver(BaseResolver):
+    """Resolves which packages need to be installed/uninstalled to perform \
+    the requested operation without breaking the requirements of any package.
+    """
+
+    _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
+
+    def __init__(
+        self,
+        preparer: RequirementPreparer,
+        finder: PackageFinder,
+        wheel_cache: Optional[WheelCache],
+        make_install_req: InstallRequirementProvider,
+        use_user_site: bool,
+        ignore_dependencies: bool,
+        ignore_installed: bool,
+        ignore_requires_python: bool,
+        force_reinstall: bool,
+        upgrade_strategy: str,
+        py_version_info: Optional[Tuple[int, ...]] = None,
+    ) -> None:
+        super().__init__()
+        assert upgrade_strategy in self._allowed_strategies
+
+        if py_version_info is None:
+            py_version_info = sys.version_info[:3]
+        else:
+            py_version_info = normalize_version_info(py_version_info)
+
+        self._py_version_info = py_version_info
+
+        self.preparer = preparer
+        self.finder = finder
+        self.wheel_cache = wheel_cache
+
+        self.upgrade_strategy = upgrade_strategy
+        self.force_reinstall = force_reinstall
+        self.ignore_dependencies = ignore_dependencies
+        self.ignore_installed = ignore_installed
+        self.ignore_requires_python = ignore_requires_python
+        self.use_user_site = use_user_site
+        self._make_install_req = make_install_req
+
+        self._discovered_dependencies: DiscoveredDependencies = defaultdict(list)
+
+    def resolve(
+        self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
+    ) -> RequirementSet:
+        """Resolve what operations need to be done
+
+        As a side-effect of this method, the packages (and their dependencies)
+        are downloaded, unpacked and prepared for installation. This
+        preparation is done by ``pip.operations.prepare``.
+
+        Once PyPI has static dependency metadata available, it would be
+        possible to move the preparation to become a step separated from
+        dependency resolution.
+        """
+        requirement_set = RequirementSet(check_supported_wheels=check_supported_wheels)
+        for req in root_reqs:
+            if req.constraint:
+                check_invalid_constraint_type(req)
+            self._add_requirement_to_set(requirement_set, req)
+
+        # Actually prepare the files, and collect any exceptions. Most hash
+        # exceptions cannot be checked ahead of time, because
+        # _populate_link() needs to be called before we can make decisions
+        # based on link type.
+        discovered_reqs: List[InstallRequirement] = []
+        hash_errors = HashErrors()
+        for req in chain(requirement_set.all_requirements, discovered_reqs):
+            try:
+                discovered_reqs.extend(self._resolve_one(requirement_set, req))
+            except HashError as exc:
+                exc.req = req
+                hash_errors.append(exc)
+
+        if hash_errors:
+            raise hash_errors
+
+        return requirement_set
+
+    def _add_requirement_to_set(
+        self,
+        requirement_set: RequirementSet,
+        install_req: InstallRequirement,
+        parent_req_name: Optional[str] = None,
+        extras_requested: Optional[Iterable[str]] = None,
+    ) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]]:
+        """Add install_req as a requirement to install.
+
+        :param parent_req_name: The name of the requirement that needed this
+            added. The name is used because when multiple unnamed requirements
+            resolve to the same name, we could otherwise end up with dependency
+            links that point outside the Requirements set. parent_req must
+            already be added. Note that None implies that this is a user
+            supplied requirement, vs an inferred one.
+        :param extras_requested: an iterable of extras used to evaluate the
+            environment markers.
+        :return: Additional requirements to scan. That is either [] if
+            the requirement is not applicable, or [install_req] if the
+            requirement is applicable and has just been added.
+        """
+        # If the markers do not match, ignore this requirement.
+        if not install_req.match_markers(extras_requested):
+            logger.info(
+                "Ignoring %s: markers '%s' don't match your environment",
+                install_req.name,
+                install_req.markers,
+            )
+            return [], None
+
+        # If the wheel is not supported, raise an error.
+        # Should check this after filtering out based on environment markers to
+        # allow specifying different wheels based on the environment/OS, in a
+        # single requirements file.
+        if install_req.link and install_req.link.is_wheel:
+            wheel = Wheel(install_req.link.filename)
+            tags = compatibility_tags.get_supported()
+            if requirement_set.check_supported_wheels and not wheel.supported(tags):
+                raise InstallationError(
+                    f"{wheel.filename} is not a supported wheel on this platform."
+                )
+
+        # This next bit is really a sanity check.
+        assert (
+            not install_req.user_supplied or parent_req_name is None
+        ), "a user supplied req shouldn't have a parent"
+
+        # Unnamed requirements are scanned again and the requirement won't be
+        # added as a dependency until after scanning.
+        if not install_req.name:
+            requirement_set.add_unnamed_requirement(install_req)
+            return [install_req], None
+
+        try:
+            existing_req: Optional[
+                InstallRequirement
+            ] = requirement_set.get_requirement(install_req.name)
+        except KeyError:
+            existing_req = None
+
+        has_conflicting_requirement = (
+            parent_req_name is None
+            and existing_req
+            and not existing_req.constraint
+            and existing_req.extras == install_req.extras
+            and existing_req.req
+            and install_req.req
+            and existing_req.req.specifier != install_req.req.specifier
+        )
+        if has_conflicting_requirement:
+            raise InstallationError(
+                "Double requirement given: {} (already in {}, name={!r})".format(
+                    install_req, existing_req, install_req.name
+                )
+            )
+
+        # When no existing requirement exists, add the requirement as a
+        # dependency and it will be scanned again after.
+        if not existing_req:
+            requirement_set.add_named_requirement(install_req)
+            # We'd want to rescan this requirement later
+            return [install_req], install_req
+
+        # Assume there's no need to scan, and that we've already
+        # encountered this for scanning.
+        if install_req.constraint or not existing_req.constraint:
+            return [], existing_req
+
+        does_not_satisfy_constraint = install_req.link and not (
+            existing_req.link and install_req.link.path == existing_req.link.path
+        )
+        if does_not_satisfy_constraint:
+            raise InstallationError(
+                f"Could not satisfy constraints for '{install_req.name}': "
+                "installation from path or url cannot be "
+                "constrained to a version"
+            )
+        # If we're now installing a constraint, mark the existing
+        # object for real installation.
+        existing_req.constraint = False
+        # If we're now installing a user supplied requirement,
+        # mark the existing object as such.
+        if install_req.user_supplied:
+            existing_req.user_supplied = True
+        existing_req.extras = tuple(
+            sorted(set(existing_req.extras) | set(install_req.extras))
+        )
+        logger.debug(
+            "Setting %s extras to: %s",
+            existing_req,
+            existing_req.extras,
+        )
+        # Return the existing requirement for addition to the parent and
+        # scanning again.
+        return [existing_req], existing_req
+
+    def _is_upgrade_allowed(self, req: InstallRequirement) -> bool:
+        if self.upgrade_strategy == "to-satisfy-only":
+            return False
+        elif self.upgrade_strategy == "eager":
+            return True
+        else:
+            assert self.upgrade_strategy == "only-if-needed"
+            return req.user_supplied or req.constraint
+
+    def _set_req_to_reinstall(self, req: InstallRequirement) -> None:
+        """
+        Set a requirement to be installed.
+        """
+        # Don't uninstall the conflict if doing a user install and the
+        # conflict is not a user install.
+        if not self.use_user_site or req.satisfied_by.in_usersite:
+            req.should_reinstall = True
+        req.satisfied_by = None
+
+    def _check_skip_installed(
+        self, req_to_install: InstallRequirement
+    ) -> Optional[str]:
+        """Check if req_to_install should be skipped.
+
+        This will check if the req is installed, and whether we should upgrade
+        or reinstall it, taking into account all the relevant user options.
+
+        After calling this req_to_install will only have satisfied_by set to
+        None if the req_to_install is to be upgraded/reinstalled etc. Any
+        other value will be a dist recording the current thing installed that
+        satisfies the requirement.
+
+        Note that for vcs urls and the like we can't assess skipping in this
+        routine - we simply identify that we need to pull the thing down,
+        then later on it is pulled down and introspected to assess upgrade/
+        reinstalls etc.
+
+        :return: A text reason for why it was skipped, or None.
+        """
+        if self.ignore_installed:
+            return None
+
+        req_to_install.check_if_exists(self.use_user_site)
+        if not req_to_install.satisfied_by:
+            return None
+
+        if self.force_reinstall:
+            self._set_req_to_reinstall(req_to_install)
+            return None
+
+        if not self._is_upgrade_allowed(req_to_install):
+            if self.upgrade_strategy == "only-if-needed":
+                return "already satisfied, skipping upgrade"
+            return "already satisfied"
+
+        # Check for the possibility of an upgrade.  For link-based
+        # requirements we have to pull the tree down and inspect to assess
+        # the version #, so it's handled way down.
+        if not req_to_install.link:
+            try:
+                self.finder.find_requirement(req_to_install, upgrade=True)
+            except BestVersionAlreadyInstalled:
+                # Then the best version is installed.
+                return "already up-to-date"
+            except DistributionNotFound:
+                # No distribution found, so we squash the error.  It will
+                # be raised later when we re-try later to do the install.
+                # Why don't we just raise here?
+                pass
+
+        self._set_req_to_reinstall(req_to_install)
+        return None
+
+    def _find_requirement_link(self, req: InstallRequirement) -> Optional[Link]:
+        upgrade = self._is_upgrade_allowed(req)
+        best_candidate = self.finder.find_requirement(req, upgrade)
+        if not best_candidate:
+            return None
+
+        # Log a warning per PEP 592 if necessary before returning.
+        link = best_candidate.link
+        if link.is_yanked:
+            reason = link.yanked_reason or ""
+            msg = (
+                # Mark this as a unicode string to prevent
+                # "UnicodeEncodeError: 'ascii' codec can't encode character"
+                # in Python 2 when the reason contains non-ascii characters.
+                "The candidate selected for download or install is a "
+                f"yanked version: {best_candidate}\n"
+                f"Reason for being yanked: {reason}"
+            )
+            logger.warning(msg)
+
+        return link
+
+    def _populate_link(self, req: InstallRequirement) -> None:
+        """Ensure that if a link can be found for this, that it is found.
+
+        Note that req.link may still be None - if the requirement is already
+        installed and not needed to be upgraded based on the return value of
+        _is_upgrade_allowed().
+
+        If preparer.require_hashes is True, don't use the wheel cache, because
+        cached wheels, always built locally, have different hashes than the
+        files downloaded from the index server and thus throw false hash
+        mismatches. Furthermore, cached wheels at present have undeterministic
+        contents due to file modification times.
+        """
+        if req.link is None:
+            req.link = self._find_requirement_link(req)
+
+        if self.wheel_cache is None or self.preparer.require_hashes:
+            return
+        cache_entry = self.wheel_cache.get_cache_entry(
+            link=req.link,
+            package_name=req.name,
+            supported_tags=get_supported(),
+        )
+        if cache_entry is not None:
+            logger.debug("Using cached wheel link: %s", cache_entry.link)
+            if req.link is req.original_link and cache_entry.persistent:
+                req.cached_wheel_source_link = req.link
+            if cache_entry.origin is not None:
+                req.download_info = cache_entry.origin
+            else:
+                # Legacy cache entry that does not have origin.json.
+                # download_info may miss the archive_info.hashes field.
+                req.download_info = direct_url_from_link(
+                    req.link, link_is_in_wheel_cache=cache_entry.persistent
+                )
+            req.link = cache_entry.link
+
+    def _get_dist_for(self, req: InstallRequirement) -> BaseDistribution:
+        """Takes a InstallRequirement and returns a single AbstractDist \
+        representing a prepared variant of the same.
+        """
+        if req.editable:
+            return self.preparer.prepare_editable_requirement(req)
+
+        # satisfied_by is only evaluated by calling _check_skip_installed,
+        # so it must be None here.
+        assert req.satisfied_by is None
+        skip_reason = self._check_skip_installed(req)
+
+        if req.satisfied_by:
+            return self.preparer.prepare_installed_requirement(req, skip_reason)
+
+        # We eagerly populate the link, since that's our "legacy" behavior.
+        self._populate_link(req)
+        dist = self.preparer.prepare_linked_requirement(req)
+
+        # NOTE
+        # The following portion is for determining if a certain package is
+        # going to be re-installed/upgraded or not and reporting to the user.
+        # This should probably get cleaned up in a future refactor.
+
+        # req.req is only avail after unpack for URL
+        # pkgs repeat check_if_exists to uninstall-on-upgrade
+        # (#14)
+        if not self.ignore_installed:
+            req.check_if_exists(self.use_user_site)
+
+        if req.satisfied_by:
+            should_modify = (
+                self.upgrade_strategy != "to-satisfy-only"
+                or self.force_reinstall
+                or self.ignore_installed
+                or req.link.scheme == "file"
+            )
+            if should_modify:
+                self._set_req_to_reinstall(req)
+            else:
+                logger.info(
+                    "Requirement already satisfied (use --upgrade to upgrade): %s",
+                    req,
+                )
+        return dist
+
+    def _resolve_one(
+        self,
+        requirement_set: RequirementSet,
+        req_to_install: InstallRequirement,
+    ) -> List[InstallRequirement]:
+        """Prepare a single requirements file.
+
+        :return: A list of additional InstallRequirements to also install.
+        """
+        # Tell user what we are doing for this requirement:
+        # obtain (editable), skipping, processing (local url), collecting
+        # (remote url or package name)
+        if req_to_install.constraint or req_to_install.prepared:
+            return []
+
+        req_to_install.prepared = True
+
+        # Parse and return dependencies
+        dist = self._get_dist_for(req_to_install)
+        # This will raise UnsupportedPythonVersion if the given Python
+        # version isn't compatible with the distribution's Requires-Python.
+        _check_dist_requires_python(
+            dist,
+            version_info=self._py_version_info,
+            ignore_requires_python=self.ignore_requires_python,
+        )
+
+        more_reqs: List[InstallRequirement] = []
+
+        def add_req(subreq: Requirement, extras_requested: Iterable[str]) -> None:
+            # This idiosyncratically converts the Requirement to str and let
+            # make_install_req then parse it again into Requirement. But this is
+            # the legacy resolver so I'm just not going to bother refactoring.
+            sub_install_req = self._make_install_req(str(subreq), req_to_install)
+            parent_req_name = req_to_install.name
+            to_scan_again, add_to_parent = self._add_requirement_to_set(
+                requirement_set,
+                sub_install_req,
+                parent_req_name=parent_req_name,
+                extras_requested=extras_requested,
+            )
+            if parent_req_name and add_to_parent:
+                self._discovered_dependencies[parent_req_name].append(add_to_parent)
+            more_reqs.extend(to_scan_again)
+
+        with indent_log():
+            # We add req_to_install before its dependencies, so that we
+            # can refer to it when adding dependencies.
+            if not requirement_set.has_requirement(req_to_install.name):
+                # 'unnamed' requirements will get added here
+                # 'unnamed' requirements can only come from being directly
+                # provided by the user.
+                assert req_to_install.user_supplied
+                self._add_requirement_to_set(
+                    requirement_set, req_to_install, parent_req_name=None
+                )
+
+            if not self.ignore_dependencies:
+                if req_to_install.extras:
+                    logger.debug(
+                        "Installing extra requirements: %r",
+                        ",".join(req_to_install.extras),
+                    )
+                missing_requested = sorted(
+                    set(req_to_install.extras) - set(dist.iter_provided_extras())
+                )
+                for missing in missing_requested:
+                    logger.warning(
+                        "%s %s does not provide the extra '%s'",
+                        dist.raw_name,
+                        dist.version,
+                        missing,
+                    )
+
+                available_requested = sorted(
+                    set(dist.iter_provided_extras()) & set(req_to_install.extras)
+                )
+                for subreq in dist.iter_dependencies(available_requested):
+                    add_req(subreq, extras_requested=available_requested)
+
+        return more_reqs
+
+    def get_installation_order(
+        self, req_set: RequirementSet
+    ) -> List[InstallRequirement]:
+        """Create the installation order.
+
+        The installation order is topological - requirements are installed
+        before the requiring thing. We break cycles at an arbitrary point,
+        and make no other guarantees.
+        """
+        # The current implementation, which we may change at any point
+        # installs the user specified things in the order given, except when
+        # dependencies must come earlier to achieve topological order.
+        order = []
+        ordered_reqs: Set[InstallRequirement] = set()
+
+        def schedule(req: InstallRequirement) -> None:
+            if req.satisfied_by or req in ordered_reqs:
+                return
+            if req.constraint:
+                return
+            ordered_reqs.add(req)
+            for dep in self._discovered_dependencies[req.name]:
+                schedule(dep)
+            order.append(req)
+
+        for install_req in req_set.requirements.values():
+            schedule(install_req)
+        return order
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__init__.py b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/base.py b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/base.py
new file mode 100644
index 0000000..9c0ef5c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/base.py
@@ -0,0 +1,141 @@
+from typing import FrozenSet, Iterable, Optional, Tuple, Union
+
+from pip._vendor.packaging.specifiers import SpecifierSet
+from pip._vendor.packaging.utils import NormalizedName
+from pip._vendor.packaging.version import LegacyVersion, Version
+
+from pip._internal.models.link import Link, links_equivalent
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils.hashes import Hashes
+
+CandidateLookup = Tuple[Optional["Candidate"], Optional[InstallRequirement]]
+CandidateVersion = Union[LegacyVersion, Version]
+
+
+def format_name(project: NormalizedName, extras: FrozenSet[NormalizedName]) -> str:
+    if not extras:
+        return project
+    extras_expr = ",".join(sorted(extras))
+    return f"{project}[{extras_expr}]"
+
+
+class Constraint:
+    def __init__(
+        self, specifier: SpecifierSet, hashes: Hashes, links: FrozenSet[Link]
+    ) -> None:
+        self.specifier = specifier
+        self.hashes = hashes
+        self.links = links
+
+    @classmethod
+    def empty(cls) -> "Constraint":
+        return Constraint(SpecifierSet(), Hashes(), frozenset())
+
+    @classmethod
+    def from_ireq(cls, ireq: InstallRequirement) -> "Constraint":
+        links = frozenset([ireq.link]) if ireq.link else frozenset()
+        return Constraint(ireq.specifier, ireq.hashes(trust_internet=False), links)
+
+    def __bool__(self) -> bool:
+        return bool(self.specifier) or bool(self.hashes) or bool(self.links)
+
+    def __and__(self, other: InstallRequirement) -> "Constraint":
+        if not isinstance(other, InstallRequirement):
+            return NotImplemented
+        specifier = self.specifier & other.specifier
+        hashes = self.hashes & other.hashes(trust_internet=False)
+        links = self.links
+        if other.link:
+            links = links.union([other.link])
+        return Constraint(specifier, hashes, links)
+
+    def is_satisfied_by(self, candidate: "Candidate") -> bool:
+        # Reject if there are any mismatched URL constraints on this package.
+        if self.links and not all(_match_link(link, candidate) for link in self.links):
+            return False
+        # We can safely always allow prereleases here since PackageFinder
+        # already implements the prerelease logic, and would have filtered out
+        # prerelease candidates if the user does not expect them.
+        return self.specifier.contains(candidate.version, prereleases=True)
+
+
+class Requirement:
+    @property
+    def project_name(self) -> NormalizedName:
+        """The "project name" of a requirement.
+
+        This is different from ``name`` if this requirement contains extras,
+        in which case ``name`` would contain the ``[...]`` part, while this
+        refers to the name of the project.
+        """
+        raise NotImplementedError("Subclass should override")
+
+    @property
+    def name(self) -> str:
+        """The name identifying this requirement in the resolver.
+
+        This is different from ``project_name`` if this requirement contains
+        extras, where ``project_name`` would not contain the ``[...]`` part.
+        """
+        raise NotImplementedError("Subclass should override")
+
+    def is_satisfied_by(self, candidate: "Candidate") -> bool:
+        return False
+
+    def get_candidate_lookup(self) -> CandidateLookup:
+        raise NotImplementedError("Subclass should override")
+
+    def format_for_error(self) -> str:
+        raise NotImplementedError("Subclass should override")
+
+
+def _match_link(link: Link, candidate: "Candidate") -> bool:
+    if candidate.source_link:
+        return links_equivalent(link, candidate.source_link)
+    return False
+
+
+class Candidate:
+    @property
+    def project_name(self) -> NormalizedName:
+        """The "project name" of the candidate.
+
+        This is different from ``name`` if this candidate contains extras,
+        in which case ``name`` would contain the ``[...]`` part, while this
+        refers to the name of the project.
+        """
+        raise NotImplementedError("Override in subclass")
+
+    @property
+    def name(self) -> str:
+        """The name identifying this candidate in the resolver.
+
+        This is different from ``project_name`` if this candidate contains
+        extras, where ``project_name`` would not contain the ``[...]`` part.
+        """
+        raise NotImplementedError("Override in subclass")
+
+    @property
+    def version(self) -> CandidateVersion:
+        raise NotImplementedError("Override in subclass")
+
+    @property
+    def is_installed(self) -> bool:
+        raise NotImplementedError("Override in subclass")
+
+    @property
+    def is_editable(self) -> bool:
+        raise NotImplementedError("Override in subclass")
+
+    @property
+    def source_link(self) -> Optional[Link]:
+        raise NotImplementedError("Override in subclass")
+
+    def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
+        raise NotImplementedError("Override in subclass")
+
+    def get_install_requirement(self) -> Optional[InstallRequirement]:
+        raise NotImplementedError("Override in subclass")
+
+    def format_for_error(self) -> str:
+        raise NotImplementedError("Subclass should override")
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/candidates.py b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/candidates.py
new file mode 100644
index 0000000..4125cda
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/candidates.py
@@ -0,0 +1,597 @@
+import logging
+import sys
+from typing import TYPE_CHECKING, Any, FrozenSet, Iterable, Optional, Tuple, Union, cast
+
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+from pip._vendor.packaging.version import Version
+
+from pip._internal.exceptions import (
+    HashError,
+    InstallationSubprocessError,
+    MetadataInconsistent,
+)
+from pip._internal.metadata import BaseDistribution
+from pip._internal.models.link import Link, links_equivalent
+from pip._internal.models.wheel import Wheel
+from pip._internal.req.constructors import (
+    install_req_from_editable,
+    install_req_from_line,
+)
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils.direct_url_helpers import direct_url_from_link
+from pip._internal.utils.misc import normalize_version_info
+
+from .base import Candidate, CandidateVersion, Requirement, format_name
+
+if TYPE_CHECKING:
+    from .factory import Factory
+
+logger = logging.getLogger(__name__)
+
+BaseCandidate = Union[
+    "AlreadyInstalledCandidate",
+    "EditableCandidate",
+    "LinkCandidate",
+]
+
+# Avoid conflicting with the PyPI package "Python".
+REQUIRES_PYTHON_IDENTIFIER = cast(NormalizedName, "")
+
+
+def as_base_candidate(candidate: Candidate) -> Optional[BaseCandidate]:
+    """The runtime version of BaseCandidate."""
+    base_candidate_classes = (
+        AlreadyInstalledCandidate,
+        EditableCandidate,
+        LinkCandidate,
+    )
+    if isinstance(candidate, base_candidate_classes):
+        return candidate
+    return None
+
+
+def make_install_req_from_link(
+    link: Link, template: InstallRequirement
+) -> InstallRequirement:
+    assert not template.editable, "template is editable"
+    if template.req:
+        line = str(template.req)
+    else:
+        line = link.url
+    ireq = install_req_from_line(
+        line,
+        user_supplied=template.user_supplied,
+        comes_from=template.comes_from,
+        use_pep517=template.use_pep517,
+        isolated=template.isolated,
+        constraint=template.constraint,
+        global_options=template.global_options,
+        hash_options=template.hash_options,
+        config_settings=template.config_settings,
+    )
+    ireq.original_link = template.original_link
+    ireq.link = link
+    ireq.extras = template.extras
+    return ireq
+
+
+def make_install_req_from_editable(
+    link: Link, template: InstallRequirement
+) -> InstallRequirement:
+    assert template.editable, "template not editable"
+    ireq = install_req_from_editable(
+        link.url,
+        user_supplied=template.user_supplied,
+        comes_from=template.comes_from,
+        use_pep517=template.use_pep517,
+        isolated=template.isolated,
+        constraint=template.constraint,
+        permit_editable_wheels=template.permit_editable_wheels,
+        global_options=template.global_options,
+        hash_options=template.hash_options,
+        config_settings=template.config_settings,
+    )
+    ireq.extras = template.extras
+    return ireq
+
+
+def _make_install_req_from_dist(
+    dist: BaseDistribution, template: InstallRequirement
+) -> InstallRequirement:
+    if template.req:
+        line = str(template.req)
+    elif template.link:
+        line = f"{dist.canonical_name} @ {template.link.url}"
+    else:
+        line = f"{dist.canonical_name}=={dist.version}"
+    ireq = install_req_from_line(
+        line,
+        user_supplied=template.user_supplied,
+        comes_from=template.comes_from,
+        use_pep517=template.use_pep517,
+        isolated=template.isolated,
+        constraint=template.constraint,
+        global_options=template.global_options,
+        hash_options=template.hash_options,
+        config_settings=template.config_settings,
+    )
+    ireq.satisfied_by = dist
+    return ireq
+
+
+class _InstallRequirementBackedCandidate(Candidate):
+    """A candidate backed by an ``InstallRequirement``.
+
+    This represents a package request with the target not being already
+    in the environment, and needs to be fetched and installed. The backing
+    ``InstallRequirement`` is responsible for most of the leg work; this
+    class exposes appropriate information to the resolver.
+
+    :param link: The link passed to the ``InstallRequirement``. The backing
+        ``InstallRequirement`` will use this link to fetch the distribution.
+    :param source_link: The link this candidate "originates" from. This is
+        different from ``link`` when the link is found in the wheel cache.
+        ``link`` would point to the wheel cache, while this points to the
+        found remote link (e.g. from pypi.org).
+    """
+
+    dist: BaseDistribution
+    is_installed = False
+
+    def __init__(
+        self,
+        link: Link,
+        source_link: Link,
+        ireq: InstallRequirement,
+        factory: "Factory",
+        name: Optional[NormalizedName] = None,
+        version: Optional[CandidateVersion] = None,
+    ) -> None:
+        self._link = link
+        self._source_link = source_link
+        self._factory = factory
+        self._ireq = ireq
+        self._name = name
+        self._version = version
+        self.dist = self._prepare()
+
+    def __str__(self) -> str:
+        return f"{self.name} {self.version}"
+
+    def __repr__(self) -> str:
+        return f"{self.__class__.__name__}({str(self._link)!r})"
+
+    def __hash__(self) -> int:
+        return hash((self.__class__, self._link))
+
+    def __eq__(self, other: Any) -> bool:
+        if isinstance(other, self.__class__):
+            return links_equivalent(self._link, other._link)
+        return False
+
+    @property
+    def source_link(self) -> Optional[Link]:
+        return self._source_link
+
+    @property
+    def project_name(self) -> NormalizedName:
+        """The normalised name of the project the candidate refers to"""
+        if self._name is None:
+            self._name = self.dist.canonical_name
+        return self._name
+
+    @property
+    def name(self) -> str:
+        return self.project_name
+
+    @property
+    def version(self) -> CandidateVersion:
+        if self._version is None:
+            self._version = self.dist.version
+        return self._version
+
+    def format_for_error(self) -> str:
+        return "{} {} (from {})".format(
+            self.name,
+            self.version,
+            self._link.file_path if self._link.is_file else self._link,
+        )
+
+    def _prepare_distribution(self) -> BaseDistribution:
+        raise NotImplementedError("Override in subclass")
+
+    def _check_metadata_consistency(self, dist: BaseDistribution) -> None:
+        """Check for consistency of project name and version of dist."""
+        if self._name is not None and self._name != dist.canonical_name:
+            raise MetadataInconsistent(
+                self._ireq,
+                "name",
+                self._name,
+                dist.canonical_name,
+            )
+        if self._version is not None and self._version != dist.version:
+            raise MetadataInconsistent(
+                self._ireq,
+                "version",
+                str(self._version),
+                str(dist.version),
+            )
+
+    def _prepare(self) -> BaseDistribution:
+        try:
+            dist = self._prepare_distribution()
+        except HashError as e:
+            # Provide HashError the underlying ireq that caused it. This
+            # provides context for the resulting error message to show the
+            # offending line to the user.
+            e.req = self._ireq
+            raise
+        except InstallationSubprocessError as exc:
+            # The output has been presented already, so don't duplicate it.
+            exc.context = "See above for output."
+            raise
+
+        self._check_metadata_consistency(dist)
+        return dist
+
+    def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
+        requires = self.dist.iter_dependencies() if with_requires else ()
+        for r in requires:
+            yield from self._factory.make_requirements_from_spec(str(r), self._ireq)
+        yield self._factory.make_requires_python_requirement(self.dist.requires_python)
+
+    def get_install_requirement(self) -> Optional[InstallRequirement]:
+        return self._ireq
+
+
+class LinkCandidate(_InstallRequirementBackedCandidate):
+    is_editable = False
+
+    def __init__(
+        self,
+        link: Link,
+        template: InstallRequirement,
+        factory: "Factory",
+        name: Optional[NormalizedName] = None,
+        version: Optional[CandidateVersion] = None,
+    ) -> None:
+        source_link = link
+        cache_entry = factory.get_wheel_cache_entry(source_link, name)
+        if cache_entry is not None:
+            logger.debug("Using cached wheel link: %s", cache_entry.link)
+            link = cache_entry.link
+        ireq = make_install_req_from_link(link, template)
+        assert ireq.link == link
+        if ireq.link.is_wheel and not ireq.link.is_file:
+            wheel = Wheel(ireq.link.filename)
+            wheel_name = canonicalize_name(wheel.name)
+            assert name == wheel_name, f"{name!r} != {wheel_name!r} for wheel"
+            # Version may not be present for PEP 508 direct URLs
+            if version is not None:
+                wheel_version = Version(wheel.version)
+                assert version == wheel_version, "{!r} != {!r} for wheel {}".format(
+                    version, wheel_version, name
+                )
+
+        if cache_entry is not None:
+            assert ireq.link.is_wheel
+            assert ireq.link.is_file
+            if cache_entry.persistent and template.link is template.original_link:
+                ireq.cached_wheel_source_link = source_link
+            if cache_entry.origin is not None:
+                ireq.download_info = cache_entry.origin
+            else:
+                # Legacy cache entry that does not have origin.json.
+                # download_info may miss the archive_info.hashes field.
+                ireq.download_info = direct_url_from_link(
+                    source_link, link_is_in_wheel_cache=cache_entry.persistent
+                )
+
+        super().__init__(
+            link=link,
+            source_link=source_link,
+            ireq=ireq,
+            factory=factory,
+            name=name,
+            version=version,
+        )
+
+    def _prepare_distribution(self) -> BaseDistribution:
+        preparer = self._factory.preparer
+        return preparer.prepare_linked_requirement(self._ireq, parallel_builds=True)
+
+
+class EditableCandidate(_InstallRequirementBackedCandidate):
+    is_editable = True
+
+    def __init__(
+        self,
+        link: Link,
+        template: InstallRequirement,
+        factory: "Factory",
+        name: Optional[NormalizedName] = None,
+        version: Optional[CandidateVersion] = None,
+    ) -> None:
+        super().__init__(
+            link=link,
+            source_link=link,
+            ireq=make_install_req_from_editable(link, template),
+            factory=factory,
+            name=name,
+            version=version,
+        )
+
+    def _prepare_distribution(self) -> BaseDistribution:
+        return self._factory.preparer.prepare_editable_requirement(self._ireq)
+
+
+class AlreadyInstalledCandidate(Candidate):
+    is_installed = True
+    source_link = None
+
+    def __init__(
+        self,
+        dist: BaseDistribution,
+        template: InstallRequirement,
+        factory: "Factory",
+    ) -> None:
+        self.dist = dist
+        self._ireq = _make_install_req_from_dist(dist, template)
+        self._factory = factory
+        self._version = None
+
+        # This is just logging some messages, so we can do it eagerly.
+        # The returned dist would be exactly the same as self.dist because we
+        # set satisfied_by in _make_install_req_from_dist.
+        # TODO: Supply reason based on force_reinstall and upgrade_strategy.
+        skip_reason = "already satisfied"
+        factory.preparer.prepare_installed_requirement(self._ireq, skip_reason)
+
+    def __str__(self) -> str:
+        return str(self.dist)
+
+    def __repr__(self) -> str:
+        return f"{self.__class__.__name__}({self.dist!r})"
+
+    def __hash__(self) -> int:
+        return hash((self.__class__, self.name, self.version))
+
+    def __eq__(self, other: Any) -> bool:
+        if isinstance(other, self.__class__):
+            return self.name == other.name and self.version == other.version
+        return False
+
+    @property
+    def project_name(self) -> NormalizedName:
+        return self.dist.canonical_name
+
+    @property
+    def name(self) -> str:
+        return self.project_name
+
+    @property
+    def version(self) -> CandidateVersion:
+        if self._version is None:
+            self._version = self.dist.version
+        return self._version
+
+    @property
+    def is_editable(self) -> bool:
+        return self.dist.editable
+
+    def format_for_error(self) -> str:
+        return f"{self.name} {self.version} (Installed)"
+
+    def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
+        if not with_requires:
+            return
+        for r in self.dist.iter_dependencies():
+            yield from self._factory.make_requirements_from_spec(str(r), self._ireq)
+
+    def get_install_requirement(self) -> Optional[InstallRequirement]:
+        return None
+
+
+class ExtrasCandidate(Candidate):
+    """A candidate that has 'extras', indicating additional dependencies.
+
+    Requirements can be for a project with dependencies, something like
+    foo[extra].  The extras don't affect the project/version being installed
+    directly, but indicate that we need additional dependencies. We model that
+    by having an artificial ExtrasCandidate that wraps the "base" candidate.
+
+    The ExtrasCandidate differs from the base in the following ways:
+
+    1. It has a unique name, of the form foo[extra]. This causes the resolver
+       to treat it as a separate node in the dependency graph.
+    2. When we're getting the candidate's dependencies,
+       a) We specify that we want the extra dependencies as well.
+       b) We add a dependency on the base candidate.
+          See below for why this is needed.
+    3. We return None for the underlying InstallRequirement, as the base
+       candidate will provide it, and we don't want to end up with duplicates.
+
+    The dependency on the base candidate is needed so that the resolver can't
+    decide that it should recommend foo[extra1] version 1.0 and foo[extra2]
+    version 2.0. Having those candidates depend on foo=1.0 and foo=2.0
+    respectively forces the resolver to recognise that this is a conflict.
+    """
+
+    def __init__(
+        self,
+        base: BaseCandidate,
+        extras: FrozenSet[str],
+        *,
+        comes_from: Optional[InstallRequirement] = None,
+    ) -> None:
+        """
+        :param comes_from: the InstallRequirement that led to this candidate if it
+            differs from the base's InstallRequirement. This will often be the
+            case in the sense that this candidate's requirement has the extras
+            while the base's does not. Unlike the InstallRequirement backed
+            candidates, this requirement is used solely for reporting purposes,
+            it does not do any leg work.
+        """
+        self.base = base
+        self.extras = frozenset(canonicalize_name(e) for e in extras)
+        # If any extras are requested in their non-normalized forms, keep track
+        # of their raw values. This is needed when we look up dependencies
+        # since PEP 685 has not been implemented for marker-matching, and using
+        # the non-normalized extra for lookup ensures the user can select a
+        # non-normalized extra in a package with its non-normalized form.
+        # TODO: Remove this attribute when packaging is upgraded to support the
+        # marker comparison logic specified in PEP 685.
+        self._unnormalized_extras = extras.difference(self.extras)
+        self._comes_from = comes_from if comes_from is not None else self.base._ireq
+
+    def __str__(self) -> str:
+        name, rest = str(self.base).split(" ", 1)
+        return "{}[{}] {}".format(name, ",".join(self.extras), rest)
+
+    def __repr__(self) -> str:
+        return f"{self.__class__.__name__}(base={self.base!r}, extras={self.extras!r})"
+
+    def __hash__(self) -> int:
+        return hash((self.base, self.extras))
+
+    def __eq__(self, other: Any) -> bool:
+        if isinstance(other, self.__class__):
+            return self.base == other.base and self.extras == other.extras
+        return False
+
+    @property
+    def project_name(self) -> NormalizedName:
+        return self.base.project_name
+
+    @property
+    def name(self) -> str:
+        """The normalised name of the project the candidate refers to"""
+        return format_name(self.base.project_name, self.extras)
+
+    @property
+    def version(self) -> CandidateVersion:
+        return self.base.version
+
+    def format_for_error(self) -> str:
+        return "{} [{}]".format(
+            self.base.format_for_error(), ", ".join(sorted(self.extras))
+        )
+
+    @property
+    def is_installed(self) -> bool:
+        return self.base.is_installed
+
+    @property
+    def is_editable(self) -> bool:
+        return self.base.is_editable
+
+    @property
+    def source_link(self) -> Optional[Link]:
+        return self.base.source_link
+
+    def _warn_invalid_extras(
+        self,
+        requested: FrozenSet[str],
+        valid: FrozenSet[str],
+    ) -> None:
+        """Emit warnings for invalid extras being requested.
+
+        This emits a warning for each requested extra that is not in the
+        candidate's ``Provides-Extra`` list.
+        """
+        invalid_extras_to_warn = frozenset(
+            extra
+            for extra in requested
+            if extra not in valid
+            # If an extra is requested in an unnormalized form, skip warning
+            # about the normalized form being missing.
+            and extra in self.extras
+        )
+        if not invalid_extras_to_warn:
+            return
+        for extra in sorted(invalid_extras_to_warn):
+            logger.warning(
+                "%s %s does not provide the extra '%s'",
+                self.base.name,
+                self.version,
+                extra,
+            )
+
+    def _calculate_valid_requested_extras(self) -> FrozenSet[str]:
+        """Get a list of valid extras requested by this candidate.
+
+        The user (or upstream dependant) may have specified extras that the
+        candidate doesn't support. Any unsupported extras are dropped, and each
+        cause a warning to be logged here.
+        """
+        requested_extras = self.extras.union(self._unnormalized_extras)
+        valid_extras = frozenset(
+            extra
+            for extra in requested_extras
+            if self.base.dist.is_extra_provided(extra)
+        )
+        self._warn_invalid_extras(requested_extras, valid_extras)
+        return valid_extras
+
+    def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
+        factory = self.base._factory
+
+        # Add a dependency on the exact base
+        # (See note 2b in the class docstring)
+        yield factory.make_requirement_from_candidate(self.base)
+        if not with_requires:
+            return
+
+        valid_extras = self._calculate_valid_requested_extras()
+        for r in self.base.dist.iter_dependencies(valid_extras):
+            yield from factory.make_requirements_from_spec(
+                str(r),
+                self._comes_from,
+                valid_extras,
+            )
+
+    def get_install_requirement(self) -> Optional[InstallRequirement]:
+        # We don't return anything here, because we always
+        # depend on the base candidate, and we'll get the
+        # install requirement from that.
+        return None
+
+
+class RequiresPythonCandidate(Candidate):
+    is_installed = False
+    source_link = None
+
+    def __init__(self, py_version_info: Optional[Tuple[int, ...]]) -> None:
+        if py_version_info is not None:
+            version_info = normalize_version_info(py_version_info)
+        else:
+            version_info = sys.version_info[:3]
+        self._version = Version(".".join(str(c) for c in version_info))
+
+    # We don't need to implement __eq__() and __ne__() since there is always
+    # only one RequiresPythonCandidate in a resolution, i.e. the host Python.
+    # The built-in object.__eq__() and object.__ne__() do exactly what we want.
+
+    def __str__(self) -> str:
+        return f"Python {self._version}"
+
+    @property
+    def project_name(self) -> NormalizedName:
+        return REQUIRES_PYTHON_IDENTIFIER
+
+    @property
+    def name(self) -> str:
+        return REQUIRES_PYTHON_IDENTIFIER
+
+    @property
+    def version(self) -> CandidateVersion:
+        return self._version
+
+    def format_for_error(self) -> str:
+        return f"Python {self.version}"
+
+    def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
+        return ()
+
+    def get_install_requirement(self) -> Optional[InstallRequirement]:
+        return None
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/factory.py b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/factory.py
new file mode 100644
index 0000000..4adeb43
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/factory.py
@@ -0,0 +1,812 @@
+import contextlib
+import functools
+import logging
+from typing import (
+    TYPE_CHECKING,
+    Dict,
+    FrozenSet,
+    Iterable,
+    Iterator,
+    List,
+    Mapping,
+    NamedTuple,
+    Optional,
+    Sequence,
+    Set,
+    Tuple,
+    TypeVar,
+    cast,
+)
+
+from pip._vendor.packaging.requirements import InvalidRequirement
+from pip._vendor.packaging.specifiers import SpecifierSet
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+from pip._vendor.resolvelib import ResolutionImpossible
+
+from pip._internal.cache import CacheEntry, WheelCache
+from pip._internal.exceptions import (
+    DistributionNotFound,
+    InstallationError,
+    MetadataInconsistent,
+    UnsupportedPythonVersion,
+    UnsupportedWheel,
+)
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import BaseDistribution, get_default_environment
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req.constructors import (
+    install_req_drop_extras,
+    install_req_from_link_and_ireq,
+)
+from pip._internal.req.req_install import (
+    InstallRequirement,
+    check_invalid_constraint_type,
+)
+from pip._internal.resolution.base import InstallRequirementProvider
+from pip._internal.utils.compatibility_tags import get_supported
+from pip._internal.utils.hashes import Hashes
+from pip._internal.utils.packaging import get_requirement
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+from .base import Candidate, CandidateVersion, Constraint, Requirement
+from .candidates import (
+    AlreadyInstalledCandidate,
+    BaseCandidate,
+    EditableCandidate,
+    ExtrasCandidate,
+    LinkCandidate,
+    RequiresPythonCandidate,
+    as_base_candidate,
+)
+from .found_candidates import FoundCandidates, IndexCandidateInfo
+from .requirements import (
+    ExplicitRequirement,
+    RequiresPythonRequirement,
+    SpecifierRequirement,
+    SpecifierWithoutExtrasRequirement,
+    UnsatisfiableRequirement,
+)
+
+if TYPE_CHECKING:
+    from typing import Protocol
+
+    class ConflictCause(Protocol):
+        requirement: RequiresPythonRequirement
+        parent: Candidate
+
+
+logger = logging.getLogger(__name__)
+
+C = TypeVar("C")
+Cache = Dict[Link, C]
+
+
+class CollectedRootRequirements(NamedTuple):
+    requirements: List[Requirement]
+    constraints: Dict[str, Constraint]
+    user_requested: Dict[str, int]
+
+
+class Factory:
+    def __init__(
+        self,
+        finder: PackageFinder,
+        preparer: RequirementPreparer,
+        make_install_req: InstallRequirementProvider,
+        wheel_cache: Optional[WheelCache],
+        use_user_site: bool,
+        force_reinstall: bool,
+        ignore_installed: bool,
+        ignore_requires_python: bool,
+        py_version_info: Optional[Tuple[int, ...]] = None,
+    ) -> None:
+        self._finder = finder
+        self.preparer = preparer
+        self._wheel_cache = wheel_cache
+        self._python_candidate = RequiresPythonCandidate(py_version_info)
+        self._make_install_req_from_spec = make_install_req
+        self._use_user_site = use_user_site
+        self._force_reinstall = force_reinstall
+        self._ignore_requires_python = ignore_requires_python
+
+        self._build_failures: Cache[InstallationError] = {}
+        self._link_candidate_cache: Cache[LinkCandidate] = {}
+        self._editable_candidate_cache: Cache[EditableCandidate] = {}
+        self._installed_candidate_cache: Dict[str, AlreadyInstalledCandidate] = {}
+        self._extras_candidate_cache: Dict[
+            Tuple[int, FrozenSet[NormalizedName]], ExtrasCandidate
+        ] = {}
+
+        if not ignore_installed:
+            env = get_default_environment()
+            self._installed_dists = {
+                dist.canonical_name: dist
+                for dist in env.iter_installed_distributions(local_only=False)
+            }
+        else:
+            self._installed_dists = {}
+
+    @property
+    def force_reinstall(self) -> bool:
+        return self._force_reinstall
+
+    def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None:
+        if not link.is_wheel:
+            return
+        wheel = Wheel(link.filename)
+        if wheel.supported(self._finder.target_python.get_unsorted_tags()):
+            return
+        msg = f"{link.filename} is not a supported wheel on this platform."
+        raise UnsupportedWheel(msg)
+
+    def _make_extras_candidate(
+        self,
+        base: BaseCandidate,
+        extras: FrozenSet[str],
+        *,
+        comes_from: Optional[InstallRequirement] = None,
+    ) -> ExtrasCandidate:
+        cache_key = (id(base), frozenset(canonicalize_name(e) for e in extras))
+        try:
+            candidate = self._extras_candidate_cache[cache_key]
+        except KeyError:
+            candidate = ExtrasCandidate(base, extras, comes_from=comes_from)
+            self._extras_candidate_cache[cache_key] = candidate
+        return candidate
+
+    def _make_candidate_from_dist(
+        self,
+        dist: BaseDistribution,
+        extras: FrozenSet[str],
+        template: InstallRequirement,
+    ) -> Candidate:
+        try:
+            base = self._installed_candidate_cache[dist.canonical_name]
+        except KeyError:
+            base = AlreadyInstalledCandidate(dist, template, factory=self)
+            self._installed_candidate_cache[dist.canonical_name] = base
+        if not extras:
+            return base
+        return self._make_extras_candidate(base, extras, comes_from=template)
+
+    def _make_candidate_from_link(
+        self,
+        link: Link,
+        extras: FrozenSet[str],
+        template: InstallRequirement,
+        name: Optional[NormalizedName],
+        version: Optional[CandidateVersion],
+    ) -> Optional[Candidate]:
+        base: Optional[BaseCandidate] = self._make_base_candidate_from_link(
+            link, template, name, version
+        )
+        if not extras or base is None:
+            return base
+        return self._make_extras_candidate(base, extras, comes_from=template)
+
+    def _make_base_candidate_from_link(
+        self,
+        link: Link,
+        template: InstallRequirement,
+        name: Optional[NormalizedName],
+        version: Optional[CandidateVersion],
+    ) -> Optional[BaseCandidate]:
+        # TODO: Check already installed candidate, and use it if the link and
+        # editable flag match.
+
+        if link in self._build_failures:
+            # We already tried this candidate before, and it does not build.
+            # Don't bother trying again.
+            return None
+
+        if template.editable:
+            if link not in self._editable_candidate_cache:
+                try:
+                    self._editable_candidate_cache[link] = EditableCandidate(
+                        link,
+                        template,
+                        factory=self,
+                        name=name,
+                        version=version,
+                    )
+                except MetadataInconsistent as e:
+                    logger.info(
+                        "Discarding [blue underline]%s[/]: [yellow]%s[reset]",
+                        link,
+                        e,
+                        extra={"markup": True},
+                    )
+                    self._build_failures[link] = e
+                    return None
+
+            return self._editable_candidate_cache[link]
+        else:
+            if link not in self._link_candidate_cache:
+                try:
+                    self._link_candidate_cache[link] = LinkCandidate(
+                        link,
+                        template,
+                        factory=self,
+                        name=name,
+                        version=version,
+                    )
+                except MetadataInconsistent as e:
+                    logger.info(
+                        "Discarding [blue underline]%s[/]: [yellow]%s[reset]",
+                        link,
+                        e,
+                        extra={"markup": True},
+                    )
+                    self._build_failures[link] = e
+                    return None
+            return self._link_candidate_cache[link]
+
+    def _iter_found_candidates(
+        self,
+        ireqs: Sequence[InstallRequirement],
+        specifier: SpecifierSet,
+        hashes: Hashes,
+        prefers_installed: bool,
+        incompatible_ids: Set[int],
+    ) -> Iterable[Candidate]:
+        if not ireqs:
+            return ()
+
+        # The InstallRequirement implementation requires us to give it a
+        # "template". Here we just choose the first requirement to represent
+        # all of them.
+        # Hopefully the Project model can correct this mismatch in the future.
+        template = ireqs[0]
+        assert template.req, "Candidates found on index must be PEP 508"
+        name = canonicalize_name(template.req.name)
+
+        extras: FrozenSet[str] = frozenset()
+        for ireq in ireqs:
+            assert ireq.req, "Candidates found on index must be PEP 508"
+            specifier &= ireq.req.specifier
+            hashes &= ireq.hashes(trust_internet=False)
+            extras |= frozenset(ireq.extras)
+
+        def _get_installed_candidate() -> Optional[Candidate]:
+            """Get the candidate for the currently-installed version."""
+            # If --force-reinstall is set, we want the version from the index
+            # instead, so we "pretend" there is nothing installed.
+            if self._force_reinstall:
+                return None
+            try:
+                installed_dist = self._installed_dists[name]
+            except KeyError:
+                return None
+            # Don't use the installed distribution if its version does not fit
+            # the current dependency graph.
+            if not specifier.contains(installed_dist.version, prereleases=True):
+                return None
+            candidate = self._make_candidate_from_dist(
+                dist=installed_dist,
+                extras=extras,
+                template=template,
+            )
+            # The candidate is a known incompatibility. Don't use it.
+            if id(candidate) in incompatible_ids:
+                return None
+            return candidate
+
+        def iter_index_candidate_infos() -> Iterator[IndexCandidateInfo]:
+            result = self._finder.find_best_candidate(
+                project_name=name,
+                specifier=specifier,
+                hashes=hashes,
+            )
+            icans = list(result.iter_applicable())
+
+            # PEP 592: Yanked releases are ignored unless the specifier
+            # explicitly pins a version (via '==' or '===') that can be
+            # solely satisfied by a yanked release.
+            all_yanked = all(ican.link.is_yanked for ican in icans)
+
+            def is_pinned(specifier: SpecifierSet) -> bool:
+                for sp in specifier:
+                    if sp.operator == "===":
+                        return True
+                    if sp.operator != "==":
+                        continue
+                    if sp.version.endswith(".*"):
+                        continue
+                    return True
+                return False
+
+            pinned = is_pinned(specifier)
+
+            # PackageFinder returns earlier versions first, so we reverse.
+            for ican in reversed(icans):
+                if not (all_yanked and pinned) and ican.link.is_yanked:
+                    continue
+                func = functools.partial(
+                    self._make_candidate_from_link,
+                    link=ican.link,
+                    extras=extras,
+                    template=template,
+                    name=name,
+                    version=ican.version,
+                )
+                yield ican.version, func
+
+        return FoundCandidates(
+            iter_index_candidate_infos,
+            _get_installed_candidate(),
+            prefers_installed,
+            incompatible_ids,
+        )
+
+    def _iter_explicit_candidates_from_base(
+        self,
+        base_requirements: Iterable[Requirement],
+        extras: FrozenSet[str],
+    ) -> Iterator[Candidate]:
+        """Produce explicit candidates from the base given an extra-ed package.
+
+        :param base_requirements: Requirements known to the resolver. The
+            requirements are guaranteed to not have extras.
+        :param extras: The extras to inject into the explicit requirements'
+            candidates.
+        """
+        for req in base_requirements:
+            lookup_cand, _ = req.get_candidate_lookup()
+            if lookup_cand is None:  # Not explicit.
+                continue
+            # We've stripped extras from the identifier, and should always
+            # get a BaseCandidate here, unless there's a bug elsewhere.
+            base_cand = as_base_candidate(lookup_cand)
+            assert base_cand is not None, "no extras here"
+            yield self._make_extras_candidate(base_cand, extras)
+
+    def _iter_candidates_from_constraints(
+        self,
+        identifier: str,
+        constraint: Constraint,
+        template: InstallRequirement,
+    ) -> Iterator[Candidate]:
+        """Produce explicit candidates from constraints.
+
+        This creates "fake" InstallRequirement objects that are basically clones
+        of what "should" be the template, but with original_link set to link.
+        """
+        for link in constraint.links:
+            self._fail_if_link_is_unsupported_wheel(link)
+            candidate = self._make_base_candidate_from_link(
+                link,
+                template=install_req_from_link_and_ireq(link, template),
+                name=canonicalize_name(identifier),
+                version=None,
+            )
+            if candidate:
+                yield candidate
+
+    def find_candidates(
+        self,
+        identifier: str,
+        requirements: Mapping[str, Iterable[Requirement]],
+        incompatibilities: Mapping[str, Iterator[Candidate]],
+        constraint: Constraint,
+        prefers_installed: bool,
+    ) -> Iterable[Candidate]:
+        # Collect basic lookup information from the requirements.
+        explicit_candidates: Set[Candidate] = set()
+        ireqs: List[InstallRequirement] = []
+        for req in requirements[identifier]:
+            cand, ireq = req.get_candidate_lookup()
+            if cand is not None:
+                explicit_candidates.add(cand)
+            if ireq is not None:
+                ireqs.append(ireq)
+
+        # If the current identifier contains extras, add requires and explicit
+        # candidates from entries from extra-less identifier.
+        with contextlib.suppress(InvalidRequirement):
+            parsed_requirement = get_requirement(identifier)
+            if parsed_requirement.name != identifier:
+                explicit_candidates.update(
+                    self._iter_explicit_candidates_from_base(
+                        requirements.get(parsed_requirement.name, ()),
+                        frozenset(parsed_requirement.extras),
+                    ),
+                )
+                for req in requirements.get(parsed_requirement.name, []):
+                    _, ireq = req.get_candidate_lookup()
+                    if ireq is not None:
+                        ireqs.append(ireq)
+
+        # Add explicit candidates from constraints. We only do this if there are
+        # known ireqs, which represent requirements not already explicit. If
+        # there are no ireqs, we're constraining already-explicit requirements,
+        # which is handled later when we return the explicit candidates.
+        if ireqs:
+            try:
+                explicit_candidates.update(
+                    self._iter_candidates_from_constraints(
+                        identifier,
+                        constraint,
+                        template=ireqs[0],
+                    ),
+                )
+            except UnsupportedWheel:
+                # If we're constrained to install a wheel incompatible with the
+                # target architecture, no candidates will ever be valid.
+                return ()
+
+        # Since we cache all the candidates, incompatibility identification
+        # can be made quicker by comparing only the id() values.
+        incompat_ids = {id(c) for c in incompatibilities.get(identifier, ())}
+
+        # If none of the requirements want an explicit candidate, we can ask
+        # the finder for candidates.
+        if not explicit_candidates:
+            return self._iter_found_candidates(
+                ireqs,
+                constraint.specifier,
+                constraint.hashes,
+                prefers_installed,
+                incompat_ids,
+            )
+
+        return (
+            c
+            for c in explicit_candidates
+            if id(c) not in incompat_ids
+            and constraint.is_satisfied_by(c)
+            and all(req.is_satisfied_by(c) for req in requirements[identifier])
+        )
+
+    def _make_requirements_from_install_req(
+        self, ireq: InstallRequirement, requested_extras: Iterable[str]
+    ) -> Iterator[Requirement]:
+        """
+        Returns requirement objects associated with the given InstallRequirement. In
+        most cases this will be a single object but the following special cases exist:
+            - the InstallRequirement has markers that do not apply -> result is empty
+            - the InstallRequirement has both a constraint (or link) and extras
+                -> result is split in two requirement objects: one with the constraint
+                (or link) and one with the extra. This allows centralized constraint
+                handling for the base, resulting in fewer candidate rejections.
+        """
+        if not ireq.match_markers(requested_extras):
+            logger.info(
+                "Ignoring %s: markers '%s' don't match your environment",
+                ireq.name,
+                ireq.markers,
+            )
+        elif not ireq.link:
+            if ireq.extras and ireq.req is not None and ireq.req.specifier:
+                yield SpecifierWithoutExtrasRequirement(ireq)
+            yield SpecifierRequirement(ireq)
+        else:
+            self._fail_if_link_is_unsupported_wheel(ireq.link)
+            # Always make the link candidate for the base requirement to make it
+            # available to `find_candidates` for explicit candidate lookup for any
+            # set of extras.
+            # The extras are required separately via a second requirement.
+            cand = self._make_base_candidate_from_link(
+                ireq.link,
+                template=install_req_drop_extras(ireq) if ireq.extras else ireq,
+                name=canonicalize_name(ireq.name) if ireq.name else None,
+                version=None,
+            )
+            if cand is None:
+                # There's no way we can satisfy a URL requirement if the underlying
+                # candidate fails to build. An unnamed URL must be user-supplied, so
+                # we fail eagerly. If the URL is named, an unsatisfiable requirement
+                # can make the resolver do the right thing, either backtrack (and
+                # maybe find some other requirement that's buildable) or raise a
+                # ResolutionImpossible eventually.
+                if not ireq.name:
+                    raise self._build_failures[ireq.link]
+                yield UnsatisfiableRequirement(canonicalize_name(ireq.name))
+            else:
+                # require the base from the link
+                yield self.make_requirement_from_candidate(cand)
+                if ireq.extras:
+                    # require the extras on top of the base candidate
+                    yield self.make_requirement_from_candidate(
+                        self._make_extras_candidate(cand, frozenset(ireq.extras))
+                    )
+
+    def collect_root_requirements(
+        self, root_ireqs: List[InstallRequirement]
+    ) -> CollectedRootRequirements:
+        collected = CollectedRootRequirements([], {}, {})
+        for i, ireq in enumerate(root_ireqs):
+            if ireq.constraint:
+                # Ensure we only accept valid constraints
+                problem = check_invalid_constraint_type(ireq)
+                if problem:
+                    raise InstallationError(problem)
+                if not ireq.match_markers():
+                    continue
+                assert ireq.name, "Constraint must be named"
+                name = canonicalize_name(ireq.name)
+                if name in collected.constraints:
+                    collected.constraints[name] &= ireq
+                else:
+                    collected.constraints[name] = Constraint.from_ireq(ireq)
+            else:
+                reqs = list(
+                    self._make_requirements_from_install_req(
+                        ireq,
+                        requested_extras=(),
+                    )
+                )
+                if not reqs:
+                    continue
+                template = reqs[0]
+                if ireq.user_supplied and template.name not in collected.user_requested:
+                    collected.user_requested[template.name] = i
+                collected.requirements.extend(reqs)
+        # Put requirements with extras at the end of the root requires. This does not
+        # affect resolvelib's picking preference but it does affect its initial criteria
+        # population: by putting extras at the end we enable the candidate finder to
+        # present resolvelib with a smaller set of candidates to resolvelib, already
+        # taking into account any non-transient constraints on the associated base. This
+        # means resolvelib will have fewer candidates to visit and reject.
+        # Python's list sort is stable, meaning relative order is kept for objects with
+        # the same key.
+        collected.requirements.sort(key=lambda r: r.name != r.project_name)
+        return collected
+
+    def make_requirement_from_candidate(
+        self, candidate: Candidate
+    ) -> ExplicitRequirement:
+        return ExplicitRequirement(candidate)
+
+    def make_requirements_from_spec(
+        self,
+        specifier: str,
+        comes_from: Optional[InstallRequirement],
+        requested_extras: Iterable[str] = (),
+    ) -> Iterator[Requirement]:
+        """
+        Returns requirement objects associated with the given specifier. In most cases
+        this will be a single object but the following special cases exist:
+            - the specifier has markers that do not apply -> result is empty
+            - the specifier has both a constraint and extras -> result is split
+                in two requirement objects: one with the constraint and one with the
+                extra. This allows centralized constraint handling for the base,
+                resulting in fewer candidate rejections.
+        """
+        ireq = self._make_install_req_from_spec(specifier, comes_from)
+        return self._make_requirements_from_install_req(ireq, requested_extras)
+
+    def make_requires_python_requirement(
+        self,
+        specifier: SpecifierSet,
+    ) -> Optional[Requirement]:
+        if self._ignore_requires_python:
+            return None
+        # Don't bother creating a dependency for an empty Requires-Python.
+        if not str(specifier):
+            return None
+        return RequiresPythonRequirement(specifier, self._python_candidate)
+
+    def get_wheel_cache_entry(
+        self, link: Link, name: Optional[str]
+    ) -> Optional[CacheEntry]:
+        """Look up the link in the wheel cache.
+
+        If ``preparer.require_hashes`` is True, don't use the wheel cache,
+        because cached wheels, always built locally, have different hashes
+        than the files downloaded from the index server and thus throw false
+        hash mismatches. Furthermore, cached wheels at present have
+        nondeterministic contents due to file modification times.
+        """
+        if self._wheel_cache is None:
+            return None
+        return self._wheel_cache.get_cache_entry(
+            link=link,
+            package_name=name,
+            supported_tags=get_supported(),
+        )
+
+    def get_dist_to_uninstall(self, candidate: Candidate) -> Optional[BaseDistribution]:
+        # TODO: Are there more cases this needs to return True? Editable?
+        dist = self._installed_dists.get(candidate.project_name)
+        if dist is None:  # Not installed, no uninstallation required.
+            return None
+
+        # We're installing into global site. The current installation must
+        # be uninstalled, no matter it's in global or user site, because the
+        # user site installation has precedence over global.
+        if not self._use_user_site:
+            return dist
+
+        # We're installing into user site. Remove the user site installation.
+        if dist.in_usersite:
+            return dist
+
+        # We're installing into user site, but the installed incompatible
+        # package is in global site. We can't uninstall that, and would let
+        # the new user installation to "shadow" it. But shadowing won't work
+        # in virtual environments, so we error out.
+        if running_under_virtualenv() and dist.in_site_packages:
+            message = (
+                f"Will not install to the user site because it will lack "
+                f"sys.path precedence to {dist.raw_name} in {dist.location}"
+            )
+            raise InstallationError(message)
+        return None
+
+    def _report_requires_python_error(
+        self, causes: Sequence["ConflictCause"]
+    ) -> UnsupportedPythonVersion:
+        assert causes, "Requires-Python error reported with no cause"
+
+        version = self._python_candidate.version
+
+        if len(causes) == 1:
+            specifier = str(causes[0].requirement.specifier)
+            message = (
+                f"Package {causes[0].parent.name!r} requires a different "
+                f"Python: {version} not in {specifier!r}"
+            )
+            return UnsupportedPythonVersion(message)
+
+        message = f"Packages require a different Python. {version} not in:"
+        for cause in causes:
+            package = cause.parent.format_for_error()
+            specifier = str(cause.requirement.specifier)
+            message += f"\n{specifier!r} (required by {package})"
+        return UnsupportedPythonVersion(message)
+
+    def _report_single_requirement_conflict(
+        self, req: Requirement, parent: Optional[Candidate]
+    ) -> DistributionNotFound:
+        if parent is None:
+            req_disp = str(req)
+        else:
+            req_disp = f"{req} (from {parent.name})"
+
+        cands = self._finder.find_all_candidates(req.project_name)
+        skipped_by_requires_python = self._finder.requires_python_skipped_reasons()
+
+        versions_set: Set[CandidateVersion] = set()
+        yanked_versions_set: Set[CandidateVersion] = set()
+        for c in cands:
+            is_yanked = c.link.is_yanked if c.link else False
+            if is_yanked:
+                yanked_versions_set.add(c.version)
+            else:
+                versions_set.add(c.version)
+
+        versions = [str(v) for v in sorted(versions_set)]
+        yanked_versions = [str(v) for v in sorted(yanked_versions_set)]
+
+        if yanked_versions:
+            # Saying "version X is yanked" isn't entirely accurate.
+            # https://github.com/pypa/pip/issues/11745#issuecomment-1402805842
+            logger.critical(
+                "Ignored the following yanked versions: %s",
+                ", ".join(yanked_versions) or "none",
+            )
+        if skipped_by_requires_python:
+            logger.critical(
+                "Ignored the following versions that require a different python "
+                "version: %s",
+                "; ".join(skipped_by_requires_python) or "none",
+            )
+        logger.critical(
+            "Could not find a version that satisfies the requirement %s "
+            "(from versions: %s)",
+            req_disp,
+            ", ".join(versions) or "none",
+        )
+        if str(req) == "requirements.txt":
+            logger.info(
+                "HINT: You are attempting to install a package literally "
+                'named "requirements.txt" (which cannot exist). Consider '
+                "using the '-r' flag to install the packages listed in "
+                "requirements.txt"
+            )
+
+        return DistributionNotFound(f"No matching distribution found for {req}")
+
+    def get_installation_error(
+        self,
+        e: "ResolutionImpossible[Requirement, Candidate]",
+        constraints: Dict[str, Constraint],
+    ) -> InstallationError:
+        assert e.causes, "Installation error reported with no cause"
+
+        # If one of the things we can't solve is "we need Python X.Y",
+        # that is what we report.
+        requires_python_causes = [
+            cause
+            for cause in e.causes
+            if isinstance(cause.requirement, RequiresPythonRequirement)
+            and not cause.requirement.is_satisfied_by(self._python_candidate)
+        ]
+        if requires_python_causes:
+            # The comprehension above makes sure all Requirement instances are
+            # RequiresPythonRequirement, so let's cast for convenience.
+            return self._report_requires_python_error(
+                cast("Sequence[ConflictCause]", requires_python_causes),
+            )
+
+        # Otherwise, we have a set of causes which can't all be satisfied
+        # at once.
+
+        # The simplest case is when we have *one* cause that can't be
+        # satisfied. We just report that case.
+        if len(e.causes) == 1:
+            req, parent = e.causes[0]
+            if req.name not in constraints:
+                return self._report_single_requirement_conflict(req, parent)
+
+        # OK, we now have a list of requirements that can't all be
+        # satisfied at once.
+
+        # A couple of formatting helpers
+        def text_join(parts: List[str]) -> str:
+            if len(parts) == 1:
+                return parts[0]
+
+            return ", ".join(parts[:-1]) + " and " + parts[-1]
+
+        def describe_trigger(parent: Candidate) -> str:
+            ireq = parent.get_install_requirement()
+            if not ireq or not ireq.comes_from:
+                return f"{parent.name}=={parent.version}"
+            if isinstance(ireq.comes_from, InstallRequirement):
+                return str(ireq.comes_from.name)
+            return str(ireq.comes_from)
+
+        triggers = set()
+        for req, parent in e.causes:
+            if parent is None:
+                # This is a root requirement, so we can report it directly
+                trigger = req.format_for_error()
+            else:
+                trigger = describe_trigger(parent)
+            triggers.add(trigger)
+
+        if triggers:
+            info = text_join(sorted(triggers))
+        else:
+            info = "the requested packages"
+
+        msg = (
+            f"Cannot install {info} because these package versions "
+            "have conflicting dependencies."
+        )
+        logger.critical(msg)
+        msg = "\nThe conflict is caused by:"
+
+        relevant_constraints = set()
+        for req, parent in e.causes:
+            if req.name in constraints:
+                relevant_constraints.add(req.name)
+            msg = msg + "\n    "
+            if parent:
+                msg = msg + f"{parent.name} {parent.version} depends on "
+            else:
+                msg = msg + "The user requested "
+            msg = msg + req.format_for_error()
+        for key in relevant_constraints:
+            spec = constraints[key].specifier
+            msg += f"\n    The user requested (constraint) {key}{spec}"
+
+        msg = (
+            msg
+            + "\n\n"
+            + "To fix this you could try to:\n"
+            + "1. loosen the range of package versions you've specified\n"
+            + "2. remove package versions to allow pip attempt to solve "
+            + "the dependency conflict\n"
+        )
+
+        logger.info(msg)
+
+        return DistributionNotFound(
+            "ResolutionImpossible: for help visit "
+            "https://pip.pypa.io/en/latest/topics/dependency-resolution/"
+            "#dealing-with-dependency-conflicts"
+        )
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py
new file mode 100644
index 0000000..8663097
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py
@@ -0,0 +1,155 @@
+"""Utilities to lazily create and visit candidates found.
+
+Creating and visiting a candidate is a *very* costly operation. It involves
+fetching, extracting, potentially building modules from source, and verifying
+distribution metadata. It is therefore crucial for performance to keep
+everything here lazy all the way down, so we only touch candidates that we
+absolutely need, and not "download the world" when we only need one version of
+something.
+"""
+
+import functools
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, Set, Tuple
+
+from pip._vendor.packaging.version import _BaseVersion
+
+from .base import Candidate
+
+IndexCandidateInfo = Tuple[_BaseVersion, Callable[[], Optional[Candidate]]]
+
+if TYPE_CHECKING:
+    SequenceCandidate = Sequence[Candidate]
+else:
+    # For compatibility: Python before 3.9 does not support using [] on the
+    # Sequence class.
+    #
+    # >>> from collections.abc import Sequence
+    # >>> Sequence[str]
+    # Traceback (most recent call last):
+    #   File "", line 1, in 
+    # TypeError: 'ABCMeta' object is not subscriptable
+    #
+    # TODO: Remove this block after dropping Python 3.8 support.
+    SequenceCandidate = Sequence
+
+
+def _iter_built(infos: Iterator[IndexCandidateInfo]) -> Iterator[Candidate]:
+    """Iterator for ``FoundCandidates``.
+
+    This iterator is used when the package is not already installed. Candidates
+    from index come later in their normal ordering.
+    """
+    versions_found: Set[_BaseVersion] = set()
+    for version, func in infos:
+        if version in versions_found:
+            continue
+        candidate = func()
+        if candidate is None:
+            continue
+        yield candidate
+        versions_found.add(version)
+
+
+def _iter_built_with_prepended(
+    installed: Candidate, infos: Iterator[IndexCandidateInfo]
+) -> Iterator[Candidate]:
+    """Iterator for ``FoundCandidates``.
+
+    This iterator is used when the resolver prefers the already-installed
+    candidate and NOT to upgrade. The installed candidate is therefore
+    always yielded first, and candidates from index come later in their
+    normal ordering, except skipped when the version is already installed.
+    """
+    yield installed
+    versions_found: Set[_BaseVersion] = {installed.version}
+    for version, func in infos:
+        if version in versions_found:
+            continue
+        candidate = func()
+        if candidate is None:
+            continue
+        yield candidate
+        versions_found.add(version)
+
+
+def _iter_built_with_inserted(
+    installed: Candidate, infos: Iterator[IndexCandidateInfo]
+) -> Iterator[Candidate]:
+    """Iterator for ``FoundCandidates``.
+
+    This iterator is used when the resolver prefers to upgrade an
+    already-installed package. Candidates from index are returned in their
+    normal ordering, except replaced when the version is already installed.
+
+    The implementation iterates through and yields other candidates, inserting
+    the installed candidate exactly once before we start yielding older or
+    equivalent candidates, or after all other candidates if they are all newer.
+    """
+    versions_found: Set[_BaseVersion] = set()
+    for version, func in infos:
+        if version in versions_found:
+            continue
+        # If the installed candidate is better, yield it first.
+        if installed.version >= version:
+            yield installed
+            versions_found.add(installed.version)
+        candidate = func()
+        if candidate is None:
+            continue
+        yield candidate
+        versions_found.add(version)
+
+    # If the installed candidate is older than all other candidates.
+    if installed.version not in versions_found:
+        yield installed
+
+
+class FoundCandidates(SequenceCandidate):
+    """A lazy sequence to provide candidates to the resolver.
+
+    The intended usage is to return this from `find_matches()` so the resolver
+    can iterate through the sequence multiple times, but only access the index
+    page when remote packages are actually needed. This improve performances
+    when suitable candidates are already installed on disk.
+    """
+
+    def __init__(
+        self,
+        get_infos: Callable[[], Iterator[IndexCandidateInfo]],
+        installed: Optional[Candidate],
+        prefers_installed: bool,
+        incompatible_ids: Set[int],
+    ):
+        self._get_infos = get_infos
+        self._installed = installed
+        self._prefers_installed = prefers_installed
+        self._incompatible_ids = incompatible_ids
+
+    def __getitem__(self, index: Any) -> Any:
+        # Implemented to satisfy the ABC check. This is not needed by the
+        # resolver, and should not be used by the provider either (for
+        # performance reasons).
+        raise NotImplementedError("don't do this")
+
+    def __iter__(self) -> Iterator[Candidate]:
+        infos = self._get_infos()
+        if not self._installed:
+            iterator = _iter_built(infos)
+        elif self._prefers_installed:
+            iterator = _iter_built_with_prepended(self._installed, infos)
+        else:
+            iterator = _iter_built_with_inserted(self._installed, infos)
+        return (c for c in iterator if id(c) not in self._incompatible_ids)
+
+    def __len__(self) -> int:
+        # Implemented to satisfy the ABC check. This is not needed by the
+        # resolver, and should not be used by the provider either (for
+        # performance reasons).
+        raise NotImplementedError("don't do this")
+
+    @functools.lru_cache(maxsize=1)
+    def __bool__(self) -> bool:
+        if self._prefers_installed and self._installed:
+            return True
+        return any(self)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/provider.py b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/provider.py
new file mode 100644
index 0000000..315fb9c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/provider.py
@@ -0,0 +1,255 @@
+import collections
+import math
+from typing import (
+    TYPE_CHECKING,
+    Dict,
+    Iterable,
+    Iterator,
+    Mapping,
+    Sequence,
+    TypeVar,
+    Union,
+)
+
+from pip._vendor.resolvelib.providers import AbstractProvider
+
+from .base import Candidate, Constraint, Requirement
+from .candidates import REQUIRES_PYTHON_IDENTIFIER
+from .factory import Factory
+
+if TYPE_CHECKING:
+    from pip._vendor.resolvelib.providers import Preference
+    from pip._vendor.resolvelib.resolvers import RequirementInformation
+
+    PreferenceInformation = RequirementInformation[Requirement, Candidate]
+
+    _ProviderBase = AbstractProvider[Requirement, Candidate, str]
+else:
+    _ProviderBase = AbstractProvider
+
+# Notes on the relationship between the provider, the factory, and the
+# candidate and requirement classes.
+#
+# The provider is a direct implementation of the resolvelib class. Its role
+# is to deliver the API that resolvelib expects.
+#
+# Rather than work with completely abstract "requirement" and "candidate"
+# concepts as resolvelib does, pip has concrete classes implementing these two
+# ideas. The API of Requirement and Candidate objects are defined in the base
+# classes, but essentially map fairly directly to the equivalent provider
+# methods. In particular, `find_matches` and `is_satisfied_by` are
+# requirement methods, and `get_dependencies` is a candidate method.
+#
+# The factory is the interface to pip's internal mechanisms. It is stateless,
+# and is created by the resolver and held as a property of the provider. It is
+# responsible for creating Requirement and Candidate objects, and provides
+# services to those objects (access to pip's finder and preparer).
+
+
+D = TypeVar("D")
+V = TypeVar("V")
+
+
+def _get_with_identifier(
+    mapping: Mapping[str, V],
+    identifier: str,
+    default: D,
+) -> Union[D, V]:
+    """Get item from a package name lookup mapping with a resolver identifier.
+
+    This extra logic is needed when the target mapping is keyed by package
+    name, which cannot be directly looked up with an identifier (which may
+    contain requested extras). Additional logic is added to also look up a value
+    by "cleaning up" the extras from the identifier.
+    """
+    if identifier in mapping:
+        return mapping[identifier]
+    # HACK: Theoretically we should check whether this identifier is a valid
+    # "NAME[EXTRAS]" format, and parse out the name part with packaging or
+    # some regular expression. But since pip's resolver only spits out three
+    # kinds of identifiers: normalized PEP 503 names, normalized names plus
+    # extras, and Requires-Python, we can cheat a bit here.
+    name, open_bracket, _ = identifier.partition("[")
+    if open_bracket and name in mapping:
+        return mapping[name]
+    return default
+
+
+class PipProvider(_ProviderBase):
+    """Pip's provider implementation for resolvelib.
+
+    :params constraints: A mapping of constraints specified by the user. Keys
+        are canonicalized project names.
+    :params ignore_dependencies: Whether the user specified ``--no-deps``.
+    :params upgrade_strategy: The user-specified upgrade strategy.
+    :params user_requested: A set of canonicalized package names that the user
+        supplied for pip to install/upgrade.
+    """
+
+    def __init__(
+        self,
+        factory: Factory,
+        constraints: Dict[str, Constraint],
+        ignore_dependencies: bool,
+        upgrade_strategy: str,
+        user_requested: Dict[str, int],
+    ) -> None:
+        self._factory = factory
+        self._constraints = constraints
+        self._ignore_dependencies = ignore_dependencies
+        self._upgrade_strategy = upgrade_strategy
+        self._user_requested = user_requested
+        self._known_depths: Dict[str, float] = collections.defaultdict(lambda: math.inf)
+
+    def identify(self, requirement_or_candidate: Union[Requirement, Candidate]) -> str:
+        return requirement_or_candidate.name
+
+    def get_preference(
+        self,
+        identifier: str,
+        resolutions: Mapping[str, Candidate],
+        candidates: Mapping[str, Iterator[Candidate]],
+        information: Mapping[str, Iterable["PreferenceInformation"]],
+        backtrack_causes: Sequence["PreferenceInformation"],
+    ) -> "Preference":
+        """Produce a sort key for given requirement based on preference.
+
+        The lower the return value is, the more preferred this group of
+        arguments is.
+
+        Currently pip considers the following in order:
+
+        * Prefer if any of the known requirements is "direct", e.g. points to an
+          explicit URL.
+        * If equal, prefer if any requirement is "pinned", i.e. contains
+          operator ``===`` or ``==``.
+        * If equal, calculate an approximate "depth" and resolve requirements
+          closer to the user-specified requirements first. If the depth cannot
+          by determined (eg: due to no matching parents), it is considered
+          infinite.
+        * Order user-specified requirements by the order they are specified.
+        * If equal, prefers "non-free" requirements, i.e. contains at least one
+          operator, such as ``>=`` or ``<``.
+        * If equal, order alphabetically for consistency (helps debuggability).
+        """
+        try:
+            next(iter(information[identifier]))
+        except StopIteration:
+            # There is no information for this identifier, so there's no known
+            # candidates.
+            has_information = False
+        else:
+            has_information = True
+
+        if has_information:
+            lookups = (r.get_candidate_lookup() for r, _ in information[identifier])
+            candidate, ireqs = zip(*lookups)
+        else:
+            candidate, ireqs = None, ()
+
+        operators = [
+            specifier.operator
+            for specifier_set in (ireq.specifier for ireq in ireqs if ireq)
+            for specifier in specifier_set
+        ]
+
+        direct = candidate is not None
+        pinned = any(op[:2] == "==" for op in operators)
+        unfree = bool(operators)
+
+        try:
+            requested_order: Union[int, float] = self._user_requested[identifier]
+        except KeyError:
+            requested_order = math.inf
+            if has_information:
+                parent_depths = (
+                    self._known_depths[parent.name] if parent is not None else 0.0
+                    for _, parent in information[identifier]
+                )
+                inferred_depth = min(d for d in parent_depths) + 1.0
+            else:
+                inferred_depth = math.inf
+        else:
+            inferred_depth = 1.0
+        self._known_depths[identifier] = inferred_depth
+
+        requested_order = self._user_requested.get(identifier, math.inf)
+
+        # Requires-Python has only one candidate and the check is basically
+        # free, so we always do it first to avoid needless work if it fails.
+        requires_python = identifier == REQUIRES_PYTHON_IDENTIFIER
+
+        # Prefer the causes of backtracking on the assumption that the problem
+        # resolving the dependency tree is related to the failures that caused
+        # the backtracking
+        backtrack_cause = self.is_backtrack_cause(identifier, backtrack_causes)
+
+        return (
+            not requires_python,
+            not direct,
+            not pinned,
+            not backtrack_cause,
+            inferred_depth,
+            requested_order,
+            not unfree,
+            identifier,
+        )
+
+    def find_matches(
+        self,
+        identifier: str,
+        requirements: Mapping[str, Iterator[Requirement]],
+        incompatibilities: Mapping[str, Iterator[Candidate]],
+    ) -> Iterable[Candidate]:
+        def _eligible_for_upgrade(identifier: str) -> bool:
+            """Are upgrades allowed for this project?
+
+            This checks the upgrade strategy, and whether the project was one
+            that the user specified in the command line, in order to decide
+            whether we should upgrade if there's a newer version available.
+
+            (Note that we don't need access to the `--upgrade` flag, because
+            an upgrade strategy of "to-satisfy-only" means that `--upgrade`
+            was not specified).
+            """
+            if self._upgrade_strategy == "eager":
+                return True
+            elif self._upgrade_strategy == "only-if-needed":
+                user_order = _get_with_identifier(
+                    self._user_requested,
+                    identifier,
+                    default=None,
+                )
+                return user_order is not None
+            return False
+
+        constraint = _get_with_identifier(
+            self._constraints,
+            identifier,
+            default=Constraint.empty(),
+        )
+        return self._factory.find_candidates(
+            identifier=identifier,
+            requirements=requirements,
+            constraint=constraint,
+            prefers_installed=(not _eligible_for_upgrade(identifier)),
+            incompatibilities=incompatibilities,
+        )
+
+    def is_satisfied_by(self, requirement: Requirement, candidate: Candidate) -> bool:
+        return requirement.is_satisfied_by(candidate)
+
+    def get_dependencies(self, candidate: Candidate) -> Sequence[Requirement]:
+        with_requires = not self._ignore_dependencies
+        return [r for r in candidate.iter_dependencies(with_requires) if r is not None]
+
+    @staticmethod
+    def is_backtrack_cause(
+        identifier: str, backtrack_causes: Sequence["PreferenceInformation"]
+    ) -> bool:
+        for backtrack_cause in backtrack_causes:
+            if identifier == backtrack_cause.requirement.name:
+                return True
+            if backtrack_cause.parent and identifier == backtrack_cause.parent.name:
+                return True
+        return False
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/reporter.py b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/reporter.py
new file mode 100644
index 0000000..12adeff
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/reporter.py
@@ -0,0 +1,80 @@
+from collections import defaultdict
+from logging import getLogger
+from typing import Any, DefaultDict
+
+from pip._vendor.resolvelib.reporters import BaseReporter
+
+from .base import Candidate, Requirement
+
+logger = getLogger(__name__)
+
+
+class PipReporter(BaseReporter):
+    def __init__(self) -> None:
+        self.reject_count_by_package: DefaultDict[str, int] = defaultdict(int)
+
+        self._messages_at_reject_count = {
+            1: (
+                "pip is looking at multiple versions of {package_name} to "
+                "determine which version is compatible with other "
+                "requirements. This could take a while."
+            ),
+            8: (
+                "pip is still looking at multiple versions of {package_name} to "
+                "determine which version is compatible with other "
+                "requirements. This could take a while."
+            ),
+            13: (
+                "This is taking longer than usual. You might need to provide "
+                "the dependency resolver with stricter constraints to reduce "
+                "runtime. See https://pip.pypa.io/warnings/backtracking for "
+                "guidance. If you want to abort this run, press Ctrl + C."
+            ),
+        }
+
+    def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None:
+        self.reject_count_by_package[candidate.name] += 1
+
+        count = self.reject_count_by_package[candidate.name]
+        if count not in self._messages_at_reject_count:
+            return
+
+        message = self._messages_at_reject_count[count]
+        logger.info("INFO: %s", message.format(package_name=candidate.name))
+
+        msg = "Will try a different candidate, due to conflict:"
+        for req_info in criterion.information:
+            req, parent = req_info.requirement, req_info.parent
+            # Inspired by Factory.get_installation_error
+            msg += "\n    "
+            if parent:
+                msg += f"{parent.name} {parent.version} depends on "
+            else:
+                msg += "The user requested "
+            msg += req.format_for_error()
+        logger.debug(msg)
+
+
+class PipDebuggingReporter(BaseReporter):
+    """A reporter that does an info log for every event it sees."""
+
+    def starting(self) -> None:
+        logger.info("Reporter.starting()")
+
+    def starting_round(self, index: int) -> None:
+        logger.info("Reporter.starting_round(%r)", index)
+
+    def ending_round(self, index: int, state: Any) -> None:
+        logger.info("Reporter.ending_round(%r, state)", index)
+
+    def ending(self, state: Any) -> None:
+        logger.info("Reporter.ending(%r)", state)
+
+    def adding_requirement(self, requirement: Requirement, parent: Candidate) -> None:
+        logger.info("Reporter.adding_requirement(%r, %r)", requirement, parent)
+
+    def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None:
+        logger.info("Reporter.rejecting_candidate(%r, %r)", criterion, candidate)
+
+    def pinning(self, candidate: Candidate) -> None:
+        logger.info("Reporter.pinning(%r)", candidate)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/requirements.py b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/requirements.py
new file mode 100644
index 0000000..4af4a9f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/requirements.py
@@ -0,0 +1,166 @@
+from pip._vendor.packaging.specifiers import SpecifierSet
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+
+from pip._internal.req.constructors import install_req_drop_extras
+from pip._internal.req.req_install import InstallRequirement
+
+from .base import Candidate, CandidateLookup, Requirement, format_name
+
+
+class ExplicitRequirement(Requirement):
+    def __init__(self, candidate: Candidate) -> None:
+        self.candidate = candidate
+
+    def __str__(self) -> str:
+        return str(self.candidate)
+
+    def __repr__(self) -> str:
+        return f"{self.__class__.__name__}({self.candidate!r})"
+
+    @property
+    def project_name(self) -> NormalizedName:
+        # No need to canonicalize - the candidate did this
+        return self.candidate.project_name
+
+    @property
+    def name(self) -> str:
+        # No need to canonicalize - the candidate did this
+        return self.candidate.name
+
+    def format_for_error(self) -> str:
+        return self.candidate.format_for_error()
+
+    def get_candidate_lookup(self) -> CandidateLookup:
+        return self.candidate, None
+
+    def is_satisfied_by(self, candidate: Candidate) -> bool:
+        return candidate == self.candidate
+
+
+class SpecifierRequirement(Requirement):
+    def __init__(self, ireq: InstallRequirement) -> None:
+        assert ireq.link is None, "This is a link, not a specifier"
+        self._ireq = ireq
+        self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras)
+
+    def __str__(self) -> str:
+        return str(self._ireq.req)
+
+    def __repr__(self) -> str:
+        return f"{self.__class__.__name__}({str(self._ireq.req)!r})"
+
+    @property
+    def project_name(self) -> NormalizedName:
+        assert self._ireq.req, "Specifier-backed ireq is always PEP 508"
+        return canonicalize_name(self._ireq.req.name)
+
+    @property
+    def name(self) -> str:
+        return format_name(self.project_name, self._extras)
+
+    def format_for_error(self) -> str:
+        # Convert comma-separated specifiers into "A, B, ..., F and G"
+        # This makes the specifier a bit more "human readable", without
+        # risking a change in meaning. (Hopefully! Not all edge cases have
+        # been checked)
+        parts = [s.strip() for s in str(self).split(",")]
+        if len(parts) == 0:
+            return ""
+        elif len(parts) == 1:
+            return parts[0]
+
+        return ", ".join(parts[:-1]) + " and " + parts[-1]
+
+    def get_candidate_lookup(self) -> CandidateLookup:
+        return None, self._ireq
+
+    def is_satisfied_by(self, candidate: Candidate) -> bool:
+        assert candidate.name == self.name, (
+            f"Internal issue: Candidate is not for this requirement "
+            f"{candidate.name} vs {self.name}"
+        )
+        # We can safely always allow prereleases here since PackageFinder
+        # already implements the prerelease logic, and would have filtered out
+        # prerelease candidates if the user does not expect them.
+        assert self._ireq.req, "Specifier-backed ireq is always PEP 508"
+        spec = self._ireq.req.specifier
+        return spec.contains(candidate.version, prereleases=True)
+
+
+class SpecifierWithoutExtrasRequirement(SpecifierRequirement):
+    """
+    Requirement backed by an install requirement on a base package.
+    Trims extras from its install requirement if there are any.
+    """
+
+    def __init__(self, ireq: InstallRequirement) -> None:
+        assert ireq.link is None, "This is a link, not a specifier"
+        self._ireq = install_req_drop_extras(ireq)
+        self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras)
+
+
+class RequiresPythonRequirement(Requirement):
+    """A requirement representing Requires-Python metadata."""
+
+    def __init__(self, specifier: SpecifierSet, match: Candidate) -> None:
+        self.specifier = specifier
+        self._candidate = match
+
+    def __str__(self) -> str:
+        return f"Python {self.specifier}"
+
+    def __repr__(self) -> str:
+        return f"{self.__class__.__name__}({str(self.specifier)!r})"
+
+    @property
+    def project_name(self) -> NormalizedName:
+        return self._candidate.project_name
+
+    @property
+    def name(self) -> str:
+        return self._candidate.name
+
+    def format_for_error(self) -> str:
+        return str(self)
+
+    def get_candidate_lookup(self) -> CandidateLookup:
+        if self.specifier.contains(self._candidate.version, prereleases=True):
+            return self._candidate, None
+        return None, None
+
+    def is_satisfied_by(self, candidate: Candidate) -> bool:
+        assert candidate.name == self._candidate.name, "Not Python candidate"
+        # We can safely always allow prereleases here since PackageFinder
+        # already implements the prerelease logic, and would have filtered out
+        # prerelease candidates if the user does not expect them.
+        return self.specifier.contains(candidate.version, prereleases=True)
+
+
+class UnsatisfiableRequirement(Requirement):
+    """A requirement that cannot be satisfied."""
+
+    def __init__(self, name: NormalizedName) -> None:
+        self._name = name
+
+    def __str__(self) -> str:
+        return f"{self._name} (unavailable)"
+
+    def __repr__(self) -> str:
+        return f"{self.__class__.__name__}({str(self._name)!r})"
+
+    @property
+    def project_name(self) -> NormalizedName:
+        return self._name
+
+    @property
+    def name(self) -> str:
+        return self._name
+
+    def format_for_error(self) -> str:
+        return str(self)
+
+    def get_candidate_lookup(self) -> CandidateLookup:
+        return None, None
+
+    def is_satisfied_by(self, candidate: Candidate) -> bool:
+        return False
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/resolver.py b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/resolver.py
new file mode 100644
index 0000000..c12beef
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/resolver.py
@@ -0,0 +1,317 @@
+import contextlib
+import functools
+import logging
+import os
+from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, cast
+
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible
+from pip._vendor.resolvelib import Resolver as RLResolver
+from pip._vendor.resolvelib.structs import DirectedGraph
+
+from pip._internal.cache import WheelCache
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req.constructors import install_req_extend_extras
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.req.req_set import RequirementSet
+from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
+from pip._internal.resolution.resolvelib.provider import PipProvider
+from pip._internal.resolution.resolvelib.reporter import (
+    PipDebuggingReporter,
+    PipReporter,
+)
+from pip._internal.utils.packaging import get_requirement
+
+from .base import Candidate, Requirement
+from .factory import Factory
+
+if TYPE_CHECKING:
+    from pip._vendor.resolvelib.resolvers import Result as RLResult
+
+    Result = RLResult[Requirement, Candidate, str]
+
+
+logger = logging.getLogger(__name__)
+
+
+class Resolver(BaseResolver):
+    _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
+
+    def __init__(
+        self,
+        preparer: RequirementPreparer,
+        finder: PackageFinder,
+        wheel_cache: Optional[WheelCache],
+        make_install_req: InstallRequirementProvider,
+        use_user_site: bool,
+        ignore_dependencies: bool,
+        ignore_installed: bool,
+        ignore_requires_python: bool,
+        force_reinstall: bool,
+        upgrade_strategy: str,
+        py_version_info: Optional[Tuple[int, ...]] = None,
+    ):
+        super().__init__()
+        assert upgrade_strategy in self._allowed_strategies
+
+        self.factory = Factory(
+            finder=finder,
+            preparer=preparer,
+            make_install_req=make_install_req,
+            wheel_cache=wheel_cache,
+            use_user_site=use_user_site,
+            force_reinstall=force_reinstall,
+            ignore_installed=ignore_installed,
+            ignore_requires_python=ignore_requires_python,
+            py_version_info=py_version_info,
+        )
+        self.ignore_dependencies = ignore_dependencies
+        self.upgrade_strategy = upgrade_strategy
+        self._result: Optional[Result] = None
+
+    def resolve(
+        self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
+    ) -> RequirementSet:
+        collected = self.factory.collect_root_requirements(root_reqs)
+        provider = PipProvider(
+            factory=self.factory,
+            constraints=collected.constraints,
+            ignore_dependencies=self.ignore_dependencies,
+            upgrade_strategy=self.upgrade_strategy,
+            user_requested=collected.user_requested,
+        )
+        if "PIP_RESOLVER_DEBUG" in os.environ:
+            reporter: BaseReporter = PipDebuggingReporter()
+        else:
+            reporter = PipReporter()
+        resolver: RLResolver[Requirement, Candidate, str] = RLResolver(
+            provider,
+            reporter,
+        )
+
+        try:
+            limit_how_complex_resolution_can_be = 200000
+            result = self._result = resolver.resolve(
+                collected.requirements, max_rounds=limit_how_complex_resolution_can_be
+            )
+
+        except ResolutionImpossible as e:
+            error = self.factory.get_installation_error(
+                cast("ResolutionImpossible[Requirement, Candidate]", e),
+                collected.constraints,
+            )
+            raise error from e
+
+        req_set = RequirementSet(check_supported_wheels=check_supported_wheels)
+        # process candidates with extras last to ensure their base equivalent is
+        # already in the req_set if appropriate.
+        # Python's sort is stable so using a binary key function keeps relative order
+        # within both subsets.
+        for candidate in sorted(
+            result.mapping.values(), key=lambda c: c.name != c.project_name
+        ):
+            ireq = candidate.get_install_requirement()
+            if ireq is None:
+                if candidate.name != candidate.project_name:
+                    # extend existing req's extras
+                    with contextlib.suppress(KeyError):
+                        req = req_set.get_requirement(candidate.project_name)
+                        req_set.add_named_requirement(
+                            install_req_extend_extras(
+                                req, get_requirement(candidate.name).extras
+                            )
+                        )
+                continue
+
+            # Check if there is already an installation under the same name,
+            # and set a flag for later stages to uninstall it, if needed.
+            installed_dist = self.factory.get_dist_to_uninstall(candidate)
+            if installed_dist is None:
+                # There is no existing installation -- nothing to uninstall.
+                ireq.should_reinstall = False
+            elif self.factory.force_reinstall:
+                # The --force-reinstall flag is set -- reinstall.
+                ireq.should_reinstall = True
+            elif installed_dist.version != candidate.version:
+                # The installation is different in version -- reinstall.
+                ireq.should_reinstall = True
+            elif candidate.is_editable or installed_dist.editable:
+                # The incoming distribution is editable, or different in
+                # editable-ness to installation -- reinstall.
+                ireq.should_reinstall = True
+            elif candidate.source_link and candidate.source_link.is_file:
+                # The incoming distribution is under file://
+                if candidate.source_link.is_wheel:
+                    # is a local wheel -- do nothing.
+                    logger.info(
+                        "%s is already installed with the same version as the "
+                        "provided wheel. Use --force-reinstall to force an "
+                        "installation of the wheel.",
+                        ireq.name,
+                    )
+                    continue
+
+                # is a local sdist or path -- reinstall
+                ireq.should_reinstall = True
+            else:
+                continue
+
+            link = candidate.source_link
+            if link and link.is_yanked:
+                # The reason can contain non-ASCII characters, Unicode
+                # is required for Python 2.
+                msg = (
+                    "The candidate selected for download or install is a "
+                    "yanked version: {name!r} candidate (version {version} "
+                    "at {link})\nReason for being yanked: {reason}"
+                ).format(
+                    name=candidate.name,
+                    version=candidate.version,
+                    link=link,
+                    reason=link.yanked_reason or "",
+                )
+                logger.warning(msg)
+
+            req_set.add_named_requirement(ireq)
+
+        reqs = req_set.all_requirements
+        self.factory.preparer.prepare_linked_requirements_more(reqs)
+        for req in reqs:
+            req.prepared = True
+            req.needs_more_preparation = False
+        return req_set
+
+    def get_installation_order(
+        self, req_set: RequirementSet
+    ) -> List[InstallRequirement]:
+        """Get order for installation of requirements in RequirementSet.
+
+        The returned list contains a requirement before another that depends on
+        it. This helps ensure that the environment is kept consistent as they
+        get installed one-by-one.
+
+        The current implementation creates a topological ordering of the
+        dependency graph, giving more weight to packages with less
+        or no dependencies, while breaking any cycles in the graph at
+        arbitrary points. We make no guarantees about where the cycle
+        would be broken, other than it *would* be broken.
+        """
+        assert self._result is not None, "must call resolve() first"
+
+        if not req_set.requirements:
+            # Nothing is left to install, so we do not need an order.
+            return []
+
+        graph = self._result.graph
+        weights = get_topological_weights(graph, set(req_set.requirements.keys()))
+
+        sorted_items = sorted(
+            req_set.requirements.items(),
+            key=functools.partial(_req_set_item_sorter, weights=weights),
+            reverse=True,
+        )
+        return [ireq for _, ireq in sorted_items]
+
+
+def get_topological_weights(
+    graph: "DirectedGraph[Optional[str]]", requirement_keys: Set[str]
+) -> Dict[Optional[str], int]:
+    """Assign weights to each node based on how "deep" they are.
+
+    This implementation may change at any point in the future without prior
+    notice.
+
+    We first simplify the dependency graph by pruning any leaves and giving them
+    the highest weight: a package without any dependencies should be installed
+    first. This is done again and again in the same way, giving ever less weight
+    to the newly found leaves. The loop stops when no leaves are left: all
+    remaining packages have at least one dependency left in the graph.
+
+    Then we continue with the remaining graph, by taking the length for the
+    longest path to any node from root, ignoring any paths that contain a single
+    node twice (i.e. cycles). This is done through a depth-first search through
+    the graph, while keeping track of the path to the node.
+
+    Cycles in the graph result would result in node being revisited while also
+    being on its own path. In this case, take no action. This helps ensure we
+    don't get stuck in a cycle.
+
+    When assigning weight, the longer path (i.e. larger length) is preferred.
+
+    We are only interested in the weights of packages that are in the
+    requirement_keys.
+    """
+    path: Set[Optional[str]] = set()
+    weights: Dict[Optional[str], int] = {}
+
+    def visit(node: Optional[str]) -> None:
+        if node in path:
+            # We hit a cycle, so we'll break it here.
+            return
+
+        # Time to visit the children!
+        path.add(node)
+        for child in graph.iter_children(node):
+            visit(child)
+        path.remove(node)
+
+        if node not in requirement_keys:
+            return
+
+        last_known_parent_count = weights.get(node, 0)
+        weights[node] = max(last_known_parent_count, len(path))
+
+    # Simplify the graph, pruning leaves that have no dependencies.
+    # This is needed for large graphs (say over 200 packages) because the
+    # `visit` function is exponentially slower then, taking minutes.
+    # See https://github.com/pypa/pip/issues/10557
+    # We will loop until we explicitly break the loop.
+    while True:
+        leaves = set()
+        for key in graph:
+            if key is None:
+                continue
+            for _child in graph.iter_children(key):
+                # This means we have at least one child
+                break
+            else:
+                # No child.
+                leaves.add(key)
+        if not leaves:
+            # We are done simplifying.
+            break
+        # Calculate the weight for the leaves.
+        weight = len(graph) - 1
+        for leaf in leaves:
+            if leaf not in requirement_keys:
+                continue
+            weights[leaf] = weight
+        # Remove the leaves from the graph, making it simpler.
+        for leaf in leaves:
+            graph.remove(leaf)
+
+    # Visit the remaining graph.
+    # `None` is guaranteed to be the root node by resolvelib.
+    visit(None)
+
+    # Sanity check: all requirement keys should be in the weights,
+    # and no other keys should be in the weights.
+    difference = set(weights.keys()).difference(requirement_keys)
+    assert not difference, difference
+
+    return weights
+
+
+def _req_set_item_sorter(
+    item: Tuple[str, InstallRequirement],
+    weights: Dict[Optional[str], int],
+) -> Tuple[int, str]:
+    """Key function used to sort install requirements for installation.
+
+    Based on the "weight" mapping calculated in ``get_installation_order()``.
+    The canonical package name is returned as the second member as a tie-
+    breaker to ensure the result is predictable, which is useful in tests.
+    """
+    name = canonicalize_name(item[0])
+    return weights[name], name
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/self_outdated_check.py b/.venv/lib/python3.12/site-packages/pip/_internal/self_outdated_check.py
new file mode 100644
index 0000000..0f64ae0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/self_outdated_check.py
@@ -0,0 +1,248 @@
+import datetime
+import functools
+import hashlib
+import json
+import logging
+import optparse
+import os.path
+import sys
+from dataclasses import dataclass
+from typing import Any, Callable, Dict, Optional
+
+from pip._vendor.packaging.version import parse as parse_version
+from pip._vendor.rich.console import Group
+from pip._vendor.rich.markup import escape
+from pip._vendor.rich.text import Text
+
+from pip._internal.index.collector import LinkCollector
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import get_default_environment
+from pip._internal.metadata.base import DistributionVersion
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.network.session import PipSession
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.entrypoints import (
+    get_best_invocation_for_this_pip,
+    get_best_invocation_for_this_python,
+)
+from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace
+from pip._internal.utils.misc import ensure_dir
+
+_WEEK = datetime.timedelta(days=7)
+
+logger = logging.getLogger(__name__)
+
+
+def _get_statefile_name(key: str) -> str:
+    key_bytes = key.encode()
+    name = hashlib.sha224(key_bytes).hexdigest()
+    return name
+
+
+def _convert_date(isodate: str) -> datetime.datetime:
+    """Convert an ISO format string to a date.
+
+    Handles the format 2020-01-22T14:24:01Z (trailing Z)
+    which is not supported by older versions of fromisoformat.
+    """
+    return datetime.datetime.fromisoformat(isodate.replace("Z", "+00:00"))
+
+
+class SelfCheckState:
+    def __init__(self, cache_dir: str) -> None:
+        self._state: Dict[str, Any] = {}
+        self._statefile_path = None
+
+        # Try to load the existing state
+        if cache_dir:
+            self._statefile_path = os.path.join(
+                cache_dir, "selfcheck", _get_statefile_name(self.key)
+            )
+            try:
+                with open(self._statefile_path, encoding="utf-8") as statefile:
+                    self._state = json.load(statefile)
+            except (OSError, ValueError, KeyError):
+                # Explicitly suppressing exceptions, since we don't want to
+                # error out if the cache file is invalid.
+                pass
+
+    @property
+    def key(self) -> str:
+        return sys.prefix
+
+    def get(self, current_time: datetime.datetime) -> Optional[str]:
+        """Check if we have a not-outdated version loaded already."""
+        if not self._state:
+            return None
+
+        if "last_check" not in self._state:
+            return None
+
+        if "pypi_version" not in self._state:
+            return None
+
+        # Determine if we need to refresh the state
+        last_check = _convert_date(self._state["last_check"])
+        time_since_last_check = current_time - last_check
+        if time_since_last_check > _WEEK:
+            return None
+
+        return self._state["pypi_version"]
+
+    def set(self, pypi_version: str, current_time: datetime.datetime) -> None:
+        # If we do not have a path to cache in, don't bother saving.
+        if not self._statefile_path:
+            return
+
+        # Check to make sure that we own the directory
+        if not check_path_owner(os.path.dirname(self._statefile_path)):
+            return
+
+        # Now that we've ensured the directory is owned by this user, we'll go
+        # ahead and make sure that all our directories are created.
+        ensure_dir(os.path.dirname(self._statefile_path))
+
+        state = {
+            # Include the key so it's easy to tell which pip wrote the
+            # file.
+            "key": self.key,
+            "last_check": current_time.isoformat(),
+            "pypi_version": pypi_version,
+        }
+
+        text = json.dumps(state, sort_keys=True, separators=(",", ":"))
+
+        with adjacent_tmp_file(self._statefile_path) as f:
+            f.write(text.encode())
+
+        try:
+            # Since we have a prefix-specific state file, we can just
+            # overwrite whatever is there, no need to check.
+            replace(f.name, self._statefile_path)
+        except OSError:
+            # Best effort.
+            pass
+
+
+@dataclass
+class UpgradePrompt:
+    old: str
+    new: str
+
+    def __rich__(self) -> Group:
+        if WINDOWS:
+            pip_cmd = f"{get_best_invocation_for_this_python()} -m pip"
+        else:
+            pip_cmd = get_best_invocation_for_this_pip()
+
+        notice = "[bold][[reset][blue]notice[reset][bold]][reset]"
+        return Group(
+            Text(),
+            Text.from_markup(
+                f"{notice} A new release of pip is available: "
+                f"[red]{self.old}[reset] -> [green]{self.new}[reset]"
+            ),
+            Text.from_markup(
+                f"{notice} To update, run: "
+                f"[green]{escape(pip_cmd)} install --upgrade pip"
+            ),
+        )
+
+
+def was_installed_by_pip(pkg: str) -> bool:
+    """Checks whether pkg was installed by pip
+
+    This is used not to display the upgrade message when pip is in fact
+    installed by system package manager, such as dnf on Fedora.
+    """
+    dist = get_default_environment().get_distribution(pkg)
+    return dist is not None and "pip" == dist.installer
+
+
+def _get_current_remote_pip_version(
+    session: PipSession, options: optparse.Values
+) -> Optional[str]:
+    # Lets use PackageFinder to see what the latest pip version is
+    link_collector = LinkCollector.create(
+        session,
+        options=options,
+        suppress_no_index=True,
+    )
+
+    # Pass allow_yanked=False so we don't suggest upgrading to a
+    # yanked version.
+    selection_prefs = SelectionPreferences(
+        allow_yanked=False,
+        allow_all_prereleases=False,  # Explicitly set to False
+    )
+
+    finder = PackageFinder.create(
+        link_collector=link_collector,
+        selection_prefs=selection_prefs,
+    )
+    best_candidate = finder.find_best_candidate("pip").best_candidate
+    if best_candidate is None:
+        return None
+
+    return str(best_candidate.version)
+
+
+def _self_version_check_logic(
+    *,
+    state: SelfCheckState,
+    current_time: datetime.datetime,
+    local_version: DistributionVersion,
+    get_remote_version: Callable[[], Optional[str]],
+) -> Optional[UpgradePrompt]:
+    remote_version_str = state.get(current_time)
+    if remote_version_str is None:
+        remote_version_str = get_remote_version()
+        if remote_version_str is None:
+            logger.debug("No remote pip version found")
+            return None
+        state.set(remote_version_str, current_time)
+
+    remote_version = parse_version(remote_version_str)
+    logger.debug("Remote version of pip: %s", remote_version)
+    logger.debug("Local version of pip:  %s", local_version)
+
+    pip_installed_by_pip = was_installed_by_pip("pip")
+    logger.debug("Was pip installed by pip? %s", pip_installed_by_pip)
+    if not pip_installed_by_pip:
+        return None  # Only suggest upgrade if pip is installed by pip.
+
+    local_version_is_older = (
+        local_version < remote_version
+        and local_version.base_version != remote_version.base_version
+    )
+    if local_version_is_older:
+        return UpgradePrompt(old=str(local_version), new=remote_version_str)
+
+    return None
+
+
+def pip_self_version_check(session: PipSession, options: optparse.Values) -> None:
+    """Check for an update for pip.
+
+    Limit the frequency of checks to once per week. State is stored either in
+    the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
+    of the pip script path.
+    """
+    installed_dist = get_default_environment().get_distribution("pip")
+    if not installed_dist:
+        return
+
+    try:
+        upgrade_prompt = _self_version_check_logic(
+            state=SelfCheckState(cache_dir=options.cache_dir),
+            current_time=datetime.datetime.now(datetime.timezone.utc),
+            local_version=installed_dist.version,
+            get_remote_version=functools.partial(
+                _get_current_remote_pip_version, session, options
+            ),
+        )
+        if upgrade_prompt is not None:
+            logger.warning("%s", upgrade_prompt, extra={"rich": True})
+    except Exception:
+        logger.warning("There was an error checking the latest version of pip.")
+        logger.debug("See below for error", exc_info=True)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/__init__.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/_jaraco_text.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/_jaraco_text.py
new file mode 100644
index 0000000..e06947c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/_jaraco_text.py
@@ -0,0 +1,109 @@
+"""Functions brought over from jaraco.text.
+
+These functions are not supposed to be used within `pip._internal`. These are
+helper functions brought over from `jaraco.text` to enable vendoring newer
+copies of `pkg_resources` without having to vendor `jaraco.text` and its entire
+dependency cone; something that our vendoring setup is not currently capable of
+handling.
+
+License reproduced from original source below:
+
+Copyright Jason R. Coombs
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+import functools
+import itertools
+
+
+def _nonblank(str):
+    return str and not str.startswith("#")
+
+
+@functools.singledispatch
+def yield_lines(iterable):
+    r"""
+    Yield valid lines of a string or iterable.
+
+    >>> list(yield_lines(''))
+    []
+    >>> list(yield_lines(['foo', 'bar']))
+    ['foo', 'bar']
+    >>> list(yield_lines('foo\nbar'))
+    ['foo', 'bar']
+    >>> list(yield_lines('\nfoo\n#bar\nbaz #comment'))
+    ['foo', 'baz #comment']
+    >>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n']))
+    ['foo', 'bar', 'baz', 'bing']
+    """
+    return itertools.chain.from_iterable(map(yield_lines, iterable))
+
+
+@yield_lines.register(str)
+def _(text):
+    return filter(_nonblank, map(str.strip, text.splitlines()))
+
+
+def drop_comment(line):
+    """
+    Drop comments.
+
+    >>> drop_comment('foo # bar')
+    'foo'
+
+    A hash without a space may be in a URL.
+
+    >>> drop_comment('http://example.com/foo#bar')
+    'http://example.com/foo#bar'
+    """
+    return line.partition(" #")[0]
+
+
+def join_continuation(lines):
+    r"""
+    Join lines continued by a trailing backslash.
+
+    >>> list(join_continuation(['foo \\', 'bar', 'baz']))
+    ['foobar', 'baz']
+    >>> list(join_continuation(['foo \\', 'bar', 'baz']))
+    ['foobar', 'baz']
+    >>> list(join_continuation(['foo \\', 'bar \\', 'baz']))
+    ['foobarbaz']
+
+    Not sure why, but...
+    The character preceeding the backslash is also elided.
+
+    >>> list(join_continuation(['goo\\', 'dly']))
+    ['godly']
+
+    A terrible idea, but...
+    If no line is available to continue, suppress the lines.
+
+    >>> list(join_continuation(['foo', 'bar\\', 'baz\\']))
+    ['foo']
+    """
+    lines = iter(lines)
+    for item in lines:
+        while item.endswith("\\"):
+            try:
+                item = item[:-2].strip() + next(lines)
+            except StopIteration:
+                return
+        yield item
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/_log.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/_log.py
new file mode 100644
index 0000000..92c4c6a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/_log.py
@@ -0,0 +1,38 @@
+"""Customize logging
+
+Defines custom logger class for the `logger.verbose(...)` method.
+
+init_logging() must be called before any other modules that call logging.getLogger.
+"""
+
+import logging
+from typing import Any, cast
+
+# custom log level for `--verbose` output
+# between DEBUG and INFO
+VERBOSE = 15
+
+
+class VerboseLogger(logging.Logger):
+    """Custom Logger, defining a verbose log-level
+
+    VERBOSE is between INFO and DEBUG.
+    """
+
+    def verbose(self, msg: str, *args: Any, **kwargs: Any) -> None:
+        return self.log(VERBOSE, msg, *args, **kwargs)
+
+
+def getLogger(name: str) -> VerboseLogger:
+    """logging.getLogger, but ensures our VerboseLogger class is returned"""
+    return cast(VerboseLogger, logging.getLogger(name))
+
+
+def init_logging() -> None:
+    """Register our VerboseLogger and VERBOSE log level.
+
+    Should be called before any calls to getLogger(),
+    i.e. in pip._internal.__init__
+    """
+    logging.setLoggerClass(VerboseLogger)
+    logging.addLevelName(VERBOSE, "VERBOSE")
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/appdirs.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/appdirs.py
new file mode 100644
index 0000000..16933bf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/appdirs.py
@@ -0,0 +1,52 @@
+"""
+This code wraps the vendored appdirs module to so the return values are
+compatible for the current pip code base.
+
+The intention is to rewrite current usages gradually, keeping the tests pass,
+and eventually drop this after all usages are changed.
+"""
+
+import os
+import sys
+from typing import List
+
+from pip._vendor import platformdirs as _appdirs
+
+
+def user_cache_dir(appname: str) -> str:
+    return _appdirs.user_cache_dir(appname, appauthor=False)
+
+
+def _macos_user_config_dir(appname: str, roaming: bool = True) -> str:
+    # Use ~/Application Support/pip, if the directory exists.
+    path = _appdirs.user_data_dir(appname, appauthor=False, roaming=roaming)
+    if os.path.isdir(path):
+        return path
+
+    # Use a Linux-like ~/.config/pip, by default.
+    linux_like_path = "~/.config/"
+    if appname:
+        linux_like_path = os.path.join(linux_like_path, appname)
+
+    return os.path.expanduser(linux_like_path)
+
+
+def user_config_dir(appname: str, roaming: bool = True) -> str:
+    if sys.platform == "darwin":
+        return _macos_user_config_dir(appname, roaming)
+
+    return _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming)
+
+
+# for the discussion regarding site_config_dir locations
+# see 
+def site_config_dirs(appname: str) -> List[str]:
+    if sys.platform == "darwin":
+        return [_appdirs.site_data_dir(appname, appauthor=False, multipath=True)]
+
+    dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True)
+    if sys.platform == "win32":
+        return [dirval]
+
+    # Unix-y system. Look in /etc as well.
+    return dirval.split(os.pathsep) + ["/etc"]
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/compat.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/compat.py
new file mode 100644
index 0000000..3f4d300
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/compat.py
@@ -0,0 +1,63 @@
+"""Stuff that differs in different Python versions and platform
+distributions."""
+
+import logging
+import os
+import sys
+
+__all__ = ["get_path_uid", "stdlib_pkgs", "WINDOWS"]
+
+
+logger = logging.getLogger(__name__)
+
+
+def has_tls() -> bool:
+    try:
+        import _ssl  # noqa: F401  # ignore unused
+
+        return True
+    except ImportError:
+        pass
+
+    from pip._vendor.urllib3.util import IS_PYOPENSSL
+
+    return IS_PYOPENSSL
+
+
+def get_path_uid(path: str) -> int:
+    """
+    Return path's uid.
+
+    Does not follow symlinks:
+        https://github.com/pypa/pip/pull/935#discussion_r5307003
+
+    Placed this function in compat due to differences on AIX and
+    Jython, that should eventually go away.
+
+    :raises OSError: When path is a symlink or can't be read.
+    """
+    if hasattr(os, "O_NOFOLLOW"):
+        fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
+        file_uid = os.fstat(fd).st_uid
+        os.close(fd)
+    else:  # AIX and Jython
+        # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW
+        if not os.path.islink(path):
+            # older versions of Jython don't have `os.fstat`
+            file_uid = os.stat(path).st_uid
+        else:
+            # raise OSError for parity with os.O_NOFOLLOW above
+            raise OSError(f"{path} is a symlink; Will not return uid for symlinks")
+    return file_uid
+
+
+# packages in the stdlib that may have installation metadata, but should not be
+# considered 'installed'.  this theoretically could be determined based on
+# dist.location (py27:`sysconfig.get_paths()['stdlib']`,
+# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
+# make this ineffective, so hard-coding
+stdlib_pkgs = {"python", "wsgiref", "argparse"}
+
+
+# windows detection, covers cpython and ironpython
+WINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt")
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/compatibility_tags.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/compatibility_tags.py
new file mode 100644
index 0000000..b6ed9a7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/compatibility_tags.py
@@ -0,0 +1,165 @@
+"""Generate and work with PEP 425 Compatibility Tags.
+"""
+
+import re
+from typing import List, Optional, Tuple
+
+from pip._vendor.packaging.tags import (
+    PythonVersion,
+    Tag,
+    compatible_tags,
+    cpython_tags,
+    generic_tags,
+    interpreter_name,
+    interpreter_version,
+    mac_platforms,
+)
+
+_osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)")
+
+
+def version_info_to_nodot(version_info: Tuple[int, ...]) -> str:
+    # Only use up to the first two numbers.
+    return "".join(map(str, version_info[:2]))
+
+
+def _mac_platforms(arch: str) -> List[str]:
+    match = _osx_arch_pat.match(arch)
+    if match:
+        name, major, minor, actual_arch = match.groups()
+        mac_version = (int(major), int(minor))
+        arches = [
+            # Since we have always only checked that the platform starts
+            # with "macosx", for backwards-compatibility we extract the
+            # actual prefix provided by the user in case they provided
+            # something like "macosxcustom_". It may be good to remove
+            # this as undocumented or deprecate it in the future.
+            "{}_{}".format(name, arch[len("macosx_") :])
+            for arch in mac_platforms(mac_version, actual_arch)
+        ]
+    else:
+        # arch pattern didn't match (?!)
+        arches = [arch]
+    return arches
+
+
+def _custom_manylinux_platforms(arch: str) -> List[str]:
+    arches = [arch]
+    arch_prefix, arch_sep, arch_suffix = arch.partition("_")
+    if arch_prefix == "manylinux2014":
+        # manylinux1/manylinux2010 wheels run on most manylinux2014 systems
+        # with the exception of wheels depending on ncurses. PEP 599 states
+        # manylinux1/manylinux2010 wheels should be considered
+        # manylinux2014 wheels:
+        # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels
+        if arch_suffix in {"i686", "x86_64"}:
+            arches.append("manylinux2010" + arch_sep + arch_suffix)
+            arches.append("manylinux1" + arch_sep + arch_suffix)
+    elif arch_prefix == "manylinux2010":
+        # manylinux1 wheels run on most manylinux2010 systems with the
+        # exception of wheels depending on ncurses. PEP 571 states
+        # manylinux1 wheels should be considered manylinux2010 wheels:
+        # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels
+        arches.append("manylinux1" + arch_sep + arch_suffix)
+    return arches
+
+
+def _get_custom_platforms(arch: str) -> List[str]:
+    arch_prefix, arch_sep, arch_suffix = arch.partition("_")
+    if arch.startswith("macosx"):
+        arches = _mac_platforms(arch)
+    elif arch_prefix in ["manylinux2014", "manylinux2010"]:
+        arches = _custom_manylinux_platforms(arch)
+    else:
+        arches = [arch]
+    return arches
+
+
+def _expand_allowed_platforms(platforms: Optional[List[str]]) -> Optional[List[str]]:
+    if not platforms:
+        return None
+
+    seen = set()
+    result = []
+
+    for p in platforms:
+        if p in seen:
+            continue
+        additions = [c for c in _get_custom_platforms(p) if c not in seen]
+        seen.update(additions)
+        result.extend(additions)
+
+    return result
+
+
+def _get_python_version(version: str) -> PythonVersion:
+    if len(version) > 1:
+        return int(version[0]), int(version[1:])
+    else:
+        return (int(version[0]),)
+
+
+def _get_custom_interpreter(
+    implementation: Optional[str] = None, version: Optional[str] = None
+) -> str:
+    if implementation is None:
+        implementation = interpreter_name()
+    if version is None:
+        version = interpreter_version()
+    return f"{implementation}{version}"
+
+
+def get_supported(
+    version: Optional[str] = None,
+    platforms: Optional[List[str]] = None,
+    impl: Optional[str] = None,
+    abis: Optional[List[str]] = None,
+) -> List[Tag]:
+    """Return a list of supported tags for each version specified in
+    `versions`.
+
+    :param version: a string version, of the form "33" or "32",
+        or None. The version will be assumed to support our ABI.
+    :param platform: specify a list of platforms you want valid
+        tags for, or None. If None, use the local system platform.
+    :param impl: specify the exact implementation you want valid
+        tags for, or None. If None, use the local interpreter impl.
+    :param abis: specify a list of abis you want valid
+        tags for, or None. If None, use the local interpreter abi.
+    """
+    supported: List[Tag] = []
+
+    python_version: Optional[PythonVersion] = None
+    if version is not None:
+        python_version = _get_python_version(version)
+
+    interpreter = _get_custom_interpreter(impl, version)
+
+    platforms = _expand_allowed_platforms(platforms)
+
+    is_cpython = (impl or interpreter_name()) == "cp"
+    if is_cpython:
+        supported.extend(
+            cpython_tags(
+                python_version=python_version,
+                abis=abis,
+                platforms=platforms,
+            )
+        )
+    else:
+        supported.extend(
+            generic_tags(
+                interpreter=interpreter,
+                abis=abis,
+                platforms=platforms,
+            )
+        )
+    supported.extend(
+        compatible_tags(
+            python_version=python_version,
+            interpreter=interpreter,
+            platforms=platforms,
+        )
+    )
+
+    return supported
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/datetime.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/datetime.py
new file mode 100644
index 0000000..8668b3b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/datetime.py
@@ -0,0 +1,11 @@
+"""For when pip wants to check the date or time.
+"""
+
+import datetime
+
+
+def today_is_later_than(year: int, month: int, day: int) -> bool:
+    today = datetime.date.today()
+    given = datetime.date(year, month, day)
+
+    return today > given
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/deprecation.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/deprecation.py
new file mode 100644
index 0000000..72bd6f2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/deprecation.py
@@ -0,0 +1,120 @@
+"""
+A module that implements tooling to enable easy warnings about deprecations.
+"""
+
+import logging
+import warnings
+from typing import Any, Optional, TextIO, Type, Union
+
+from pip._vendor.packaging.version import parse
+
+from pip import __version__ as current_version  # NOTE: tests patch this name.
+
+DEPRECATION_MSG_PREFIX = "DEPRECATION: "
+
+
+class PipDeprecationWarning(Warning):
+    pass
+
+
+_original_showwarning: Any = None
+
+
+# Warnings <-> Logging Integration
+def _showwarning(
+    message: Union[Warning, str],
+    category: Type[Warning],
+    filename: str,
+    lineno: int,
+    file: Optional[TextIO] = None,
+    line: Optional[str] = None,
+) -> None:
+    if file is not None:
+        if _original_showwarning is not None:
+            _original_showwarning(message, category, filename, lineno, file, line)
+    elif issubclass(category, PipDeprecationWarning):
+        # We use a specially named logger which will handle all of the
+        # deprecation messages for pip.
+        logger = logging.getLogger("pip._internal.deprecations")
+        logger.warning(message)
+    else:
+        _original_showwarning(message, category, filename, lineno, file, line)
+
+
+def install_warning_logger() -> None:
+    # Enable our Deprecation Warnings
+    warnings.simplefilter("default", PipDeprecationWarning, append=True)
+
+    global _original_showwarning
+
+    if _original_showwarning is None:
+        _original_showwarning = warnings.showwarning
+        warnings.showwarning = _showwarning
+
+
+def deprecated(
+    *,
+    reason: str,
+    replacement: Optional[str],
+    gone_in: Optional[str],
+    feature_flag: Optional[str] = None,
+    issue: Optional[int] = None,
+) -> None:
+    """Helper to deprecate existing functionality.
+
+    reason:
+        Textual reason shown to the user about why this functionality has
+        been deprecated. Should be a complete sentence.
+    replacement:
+        Textual suggestion shown to the user about what alternative
+        functionality they can use.
+    gone_in:
+        The version of pip does this functionality should get removed in.
+        Raises an error if pip's current version is greater than or equal to
+        this.
+    feature_flag:
+        Command-line flag of the form --use-feature={feature_flag} for testing
+        upcoming functionality.
+    issue:
+        Issue number on the tracker that would serve as a useful place for
+        users to find related discussion and provide feedback.
+    """
+
+    # Determine whether or not the feature is already gone in this version.
+    is_gone = gone_in is not None and parse(current_version) >= parse(gone_in)
+
+    message_parts = [
+        (reason, f"{DEPRECATION_MSG_PREFIX}{{}}"),
+        (
+            gone_in,
+            "pip {} will enforce this behaviour change."
+            if not is_gone
+            else "Since pip {}, this is no longer supported.",
+        ),
+        (
+            replacement,
+            "A possible replacement is {}.",
+        ),
+        (
+            feature_flag,
+            "You can use the flag --use-feature={} to test the upcoming behaviour."
+            if not is_gone
+            else None,
+        ),
+        (
+            issue,
+            "Discussion can be found at https://github.com/pypa/pip/issues/{}",
+        ),
+    ]
+
+    message = " ".join(
+        format_str.format(value)
+        for value, format_str in message_parts
+        if format_str is not None and value is not None
+    )
+
+    # Raise as an error if this behaviour is deprecated.
+    if is_gone:
+        raise PipDeprecationWarning(message)
+
+    warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/direct_url_helpers.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/direct_url_helpers.py
new file mode 100644
index 0000000..0e8e5e1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/direct_url_helpers.py
@@ -0,0 +1,87 @@
+from typing import Optional
+
+from pip._internal.models.direct_url import ArchiveInfo, DirectUrl, DirInfo, VcsInfo
+from pip._internal.models.link import Link
+from pip._internal.utils.urls import path_to_url
+from pip._internal.vcs import vcs
+
+
+def direct_url_as_pep440_direct_reference(direct_url: DirectUrl, name: str) -> str:
+    """Convert a DirectUrl to a pip requirement string."""
+    direct_url.validate()  # if invalid, this is a pip bug
+    requirement = name + " @ "
+    fragments = []
+    if isinstance(direct_url.info, VcsInfo):
+        requirement += "{}+{}@{}".format(
+            direct_url.info.vcs, direct_url.url, direct_url.info.commit_id
+        )
+    elif isinstance(direct_url.info, ArchiveInfo):
+        requirement += direct_url.url
+        if direct_url.info.hash:
+            fragments.append(direct_url.info.hash)
+    else:
+        assert isinstance(direct_url.info, DirInfo)
+        requirement += direct_url.url
+    if direct_url.subdirectory:
+        fragments.append("subdirectory=" + direct_url.subdirectory)
+    if fragments:
+        requirement += "#" + "&".join(fragments)
+    return requirement
+
+
+def direct_url_for_editable(source_dir: str) -> DirectUrl:
+    return DirectUrl(
+        url=path_to_url(source_dir),
+        info=DirInfo(editable=True),
+    )
+
+
+def direct_url_from_link(
+    link: Link, source_dir: Optional[str] = None, link_is_in_wheel_cache: bool = False
+) -> DirectUrl:
+    if link.is_vcs:
+        vcs_backend = vcs.get_backend_for_scheme(link.scheme)
+        assert vcs_backend
+        url, requested_revision, _ = vcs_backend.get_url_rev_and_auth(
+            link.url_without_fragment
+        )
+        # For VCS links, we need to find out and add commit_id.
+        if link_is_in_wheel_cache:
+            # If the requested VCS link corresponds to a cached
+            # wheel, it means the requested revision was an
+            # immutable commit hash, otherwise it would not have
+            # been cached. In that case we don't have a source_dir
+            # with the VCS checkout.
+            assert requested_revision
+            commit_id = requested_revision
+        else:
+            # If the wheel was not in cache, it means we have
+            # had to checkout from VCS to build and we have a source_dir
+            # which we can inspect to find out the commit id.
+            assert source_dir
+            commit_id = vcs_backend.get_revision(source_dir)
+        return DirectUrl(
+            url=url,
+            info=VcsInfo(
+                vcs=vcs_backend.name,
+                commit_id=commit_id,
+                requested_revision=requested_revision,
+            ),
+            subdirectory=link.subdirectory_fragment,
+        )
+    elif link.is_existing_dir():
+        return DirectUrl(
+            url=link.url_without_fragment,
+            info=DirInfo(),
+            subdirectory=link.subdirectory_fragment,
+        )
+    else:
+        hash = None
+        hash_name = link.hash_name
+        if hash_name:
+            hash = f"{hash_name}={link.hash}"
+        return DirectUrl(
+            url=link.url_without_fragment,
+            info=ArchiveInfo(hash=hash),
+            subdirectory=link.subdirectory_fragment,
+        )
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/egg_link.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/egg_link.py
new file mode 100644
index 0000000..4a384a6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/egg_link.py
@@ -0,0 +1,80 @@
+import os
+import re
+import sys
+from typing import List, Optional
+
+from pip._internal.locations import site_packages, user_site
+from pip._internal.utils.virtualenv import (
+    running_under_virtualenv,
+    virtualenv_no_global,
+)
+
+__all__ = [
+    "egg_link_path_from_sys_path",
+    "egg_link_path_from_location",
+]
+
+
+def _egg_link_names(raw_name: str) -> List[str]:
+    """
+    Convert a Name metadata value to a .egg-link name, by applying
+    the same substitution as pkg_resources's safe_name function.
+    Note: we cannot use canonicalize_name because it has a different logic.
+
+    We also look for the raw name (without normalization) as setuptools 69 changed
+    the way it names .egg-link files (https://github.com/pypa/setuptools/issues/4167).
+    """
+    return [
+        re.sub("[^A-Za-z0-9.]+", "-", raw_name) + ".egg-link",
+        f"{raw_name}.egg-link",
+    ]
+
+
+def egg_link_path_from_sys_path(raw_name: str) -> Optional[str]:
+    """
+    Look for a .egg-link file for project name, by walking sys.path.
+    """
+    egg_link_names = _egg_link_names(raw_name)
+    for path_item in sys.path:
+        for egg_link_name in egg_link_names:
+            egg_link = os.path.join(path_item, egg_link_name)
+            if os.path.isfile(egg_link):
+                return egg_link
+    return None
+
+
+def egg_link_path_from_location(raw_name: str) -> Optional[str]:
+    """
+    Return the path for the .egg-link file if it exists, otherwise, None.
+
+    There's 3 scenarios:
+    1) not in a virtualenv
+       try to find in site.USER_SITE, then site_packages
+    2) in a no-global virtualenv
+       try to find in site_packages
+    3) in a yes-global virtualenv
+       try to find in site_packages, then site.USER_SITE
+       (don't look in global location)
+
+    For #1 and #3, there could be odd cases, where there's an egg-link in 2
+    locations.
+
+    This method will just return the first one found.
+    """
+    sites: List[str] = []
+    if running_under_virtualenv():
+        sites.append(site_packages)
+        if not virtualenv_no_global() and user_site:
+            sites.append(user_site)
+    else:
+        if user_site:
+            sites.append(user_site)
+        sites.append(site_packages)
+
+    egg_link_names = _egg_link_names(raw_name)
+    for site in sites:
+        for egg_link_name in egg_link_names:
+            egglink = os.path.join(site, egg_link_name)
+            if os.path.isfile(egglink):
+                return egglink
+    return None
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/encoding.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/encoding.py
new file mode 100644
index 0000000..008f06a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/encoding.py
@@ -0,0 +1,36 @@
+import codecs
+import locale
+import re
+import sys
+from typing import List, Tuple
+
+BOMS: List[Tuple[bytes, str]] = [
+    (codecs.BOM_UTF8, "utf-8"),
+    (codecs.BOM_UTF16, "utf-16"),
+    (codecs.BOM_UTF16_BE, "utf-16-be"),
+    (codecs.BOM_UTF16_LE, "utf-16-le"),
+    (codecs.BOM_UTF32, "utf-32"),
+    (codecs.BOM_UTF32_BE, "utf-32-be"),
+    (codecs.BOM_UTF32_LE, "utf-32-le"),
+]
+
+ENCODING_RE = re.compile(rb"coding[:=]\s*([-\w.]+)")
+
+
+def auto_decode(data: bytes) -> str:
+    """Check a bytes string for a BOM to correctly detect the encoding
+
+    Fallback to locale.getpreferredencoding(False) like open() on Python3"""
+    for bom, encoding in BOMS:
+        if data.startswith(bom):
+            return data[len(bom) :].decode(encoding)
+    # Lets check the first two lines as in PEP263
+    for line in data.split(b"\n")[:2]:
+        if line[0:1] == b"#" and ENCODING_RE.search(line):
+            result = ENCODING_RE.search(line)
+            assert result is not None
+            encoding = result.groups()[0].decode("ascii")
+            return data.decode(encoding)
+    return data.decode(
+        locale.getpreferredencoding(False) or sys.getdefaultencoding(),
+    )
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/entrypoints.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/entrypoints.py
new file mode 100644
index 0000000..1501369
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/entrypoints.py
@@ -0,0 +1,84 @@
+import itertools
+import os
+import shutil
+import sys
+from typing import List, Optional
+
+from pip._internal.cli.main import main
+from pip._internal.utils.compat import WINDOWS
+
+_EXECUTABLE_NAMES = [
+    "pip",
+    f"pip{sys.version_info.major}",
+    f"pip{sys.version_info.major}.{sys.version_info.minor}",
+]
+if WINDOWS:
+    _allowed_extensions = {"", ".exe"}
+    _EXECUTABLE_NAMES = [
+        "".join(parts)
+        for parts in itertools.product(_EXECUTABLE_NAMES, _allowed_extensions)
+    ]
+
+
+def _wrapper(args: Optional[List[str]] = None) -> int:
+    """Central wrapper for all old entrypoints.
+
+    Historically pip has had several entrypoints defined. Because of issues
+    arising from PATH, sys.path, multiple Pythons, their interactions, and most
+    of them having a pip installed, users suffer every time an entrypoint gets
+    moved.
+
+    To alleviate this pain, and provide a mechanism for warning users and
+    directing them to an appropriate place for help, we now define all of
+    our old entrypoints as wrappers for the current one.
+    """
+    sys.stderr.write(
+        "WARNING: pip is being invoked by an old script wrapper. This will "
+        "fail in a future version of pip.\n"
+        "Please see https://github.com/pypa/pip/issues/5599 for advice on "
+        "fixing the underlying issue.\n"
+        "To avoid this problem you can invoke Python with '-m pip' instead of "
+        "running pip directly.\n"
+    )
+    return main(args)
+
+
+def get_best_invocation_for_this_pip() -> str:
+    """Try to figure out the best way to invoke pip in the current environment."""
+    binary_directory = "Scripts" if WINDOWS else "bin"
+    binary_prefix = os.path.join(sys.prefix, binary_directory)
+
+    # Try to use pip[X[.Y]] names, if those executables for this environment are
+    # the first on PATH with that name.
+    path_parts = os.path.normcase(os.environ.get("PATH", "")).split(os.pathsep)
+    exe_are_in_PATH = os.path.normcase(binary_prefix) in path_parts
+    if exe_are_in_PATH:
+        for exe_name in _EXECUTABLE_NAMES:
+            found_executable = shutil.which(exe_name)
+            binary_executable = os.path.join(binary_prefix, exe_name)
+            if (
+                found_executable
+                and os.path.exists(binary_executable)
+                and os.path.samefile(
+                    found_executable,
+                    binary_executable,
+                )
+            ):
+                return exe_name
+
+    # Use the `-m` invocation, if there's no "nice" invocation.
+    return f"{get_best_invocation_for_this_python()} -m pip"
+
+
+def get_best_invocation_for_this_python() -> str:
+    """Try to figure out the best way to invoke the current Python."""
+    exe = sys.executable
+    exe_name = os.path.basename(exe)
+
+    # Try to use the basename, if it's the first executable.
+    found_executable = shutil.which(exe_name)
+    if found_executable and os.path.samefile(found_executable, exe):
+        return exe_name
+
+    # Use the full executable name, because we couldn't find something simpler.
+    return exe
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/filesystem.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/filesystem.py
new file mode 100644
index 0000000..83c2df7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/filesystem.py
@@ -0,0 +1,153 @@
+import fnmatch
+import os
+import os.path
+import random
+import sys
+from contextlib import contextmanager
+from tempfile import NamedTemporaryFile
+from typing import Any, BinaryIO, Generator, List, Union, cast
+
+from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed
+
+from pip._internal.utils.compat import get_path_uid
+from pip._internal.utils.misc import format_size
+
+
+def check_path_owner(path: str) -> bool:
+    # If we don't have a way to check the effective uid of this process, then
+    # we'll just assume that we own the directory.
+    if sys.platform == "win32" or not hasattr(os, "geteuid"):
+        return True
+
+    assert os.path.isabs(path)
+
+    previous = None
+    while path != previous:
+        if os.path.lexists(path):
+            # Check if path is writable by current user.
+            if os.geteuid() == 0:
+                # Special handling for root user in order to handle properly
+                # cases where users use sudo without -H flag.
+                try:
+                    path_uid = get_path_uid(path)
+                except OSError:
+                    return False
+                return path_uid == 0
+            else:
+                return os.access(path, os.W_OK)
+        else:
+            previous, path = path, os.path.dirname(path)
+    return False  # assume we don't own the path
+
+
+@contextmanager
+def adjacent_tmp_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]:
+    """Return a file-like object pointing to a tmp file next to path.
+
+    The file is created securely and is ensured to be written to disk
+    after the context reaches its end.
+
+    kwargs will be passed to tempfile.NamedTemporaryFile to control
+    the way the temporary file will be opened.
+    """
+    with NamedTemporaryFile(
+        delete=False,
+        dir=os.path.dirname(path),
+        prefix=os.path.basename(path),
+        suffix=".tmp",
+        **kwargs,
+    ) as f:
+        result = cast(BinaryIO, f)
+        try:
+            yield result
+        finally:
+            result.flush()
+            os.fsync(result.fileno())
+
+
+# Tenacity raises RetryError by default, explicitly raise the original exception
+_replace_retry = retry(reraise=True, stop=stop_after_delay(1), wait=wait_fixed(0.25))
+
+replace = _replace_retry(os.replace)
+
+
+# test_writable_dir and _test_writable_dir_win are copied from Flit,
+# with the author's agreement to also place them under pip's license.
+def test_writable_dir(path: str) -> bool:
+    """Check if a directory is writable.
+
+    Uses os.access() on POSIX, tries creating files on Windows.
+    """
+    # If the directory doesn't exist, find the closest parent that does.
+    while not os.path.isdir(path):
+        parent = os.path.dirname(path)
+        if parent == path:
+            break  # Should never get here, but infinite loops are bad
+        path = parent
+
+    if os.name == "posix":
+        return os.access(path, os.W_OK)
+
+    return _test_writable_dir_win(path)
+
+
+def _test_writable_dir_win(path: str) -> bool:
+    # os.access doesn't work on Windows: http://bugs.python.org/issue2528
+    # and we can't use tempfile: http://bugs.python.org/issue22107
+    basename = "accesstest_deleteme_fishfingers_custard_"
+    alphabet = "abcdefghijklmnopqrstuvwxyz0123456789"
+    for _ in range(10):
+        name = basename + "".join(random.choice(alphabet) for _ in range(6))
+        file = os.path.join(path, name)
+        try:
+            fd = os.open(file, os.O_RDWR | os.O_CREAT | os.O_EXCL)
+        except FileExistsError:
+            pass
+        except PermissionError:
+            # This could be because there's a directory with the same name.
+            # But it's highly unlikely there's a directory called that,
+            # so we'll assume it's because the parent dir is not writable.
+            # This could as well be because the parent dir is not readable,
+            # due to non-privileged user access.
+            return False
+        else:
+            os.close(fd)
+            os.unlink(file)
+            return True
+
+    # This should never be reached
+    raise OSError("Unexpected condition testing for writable directory")
+
+
+def find_files(path: str, pattern: str) -> List[str]:
+    """Returns a list of absolute paths of files beneath path, recursively,
+    with filenames which match the UNIX-style shell glob pattern."""
+    result: List[str] = []
+    for root, _, files in os.walk(path):
+        matches = fnmatch.filter(files, pattern)
+        result.extend(os.path.join(root, f) for f in matches)
+    return result
+
+
+def file_size(path: str) -> Union[int, float]:
+    # If it's a symlink, return 0.
+    if os.path.islink(path):
+        return 0
+    return os.path.getsize(path)
+
+
+def format_file_size(path: str) -> str:
+    return format_size(file_size(path))
+
+
+def directory_size(path: str) -> Union[int, float]:
+    size = 0.0
+    for root, _dirs, files in os.walk(path):
+        for filename in files:
+            file_path = os.path.join(root, filename)
+            size += file_size(file_path)
+    return size
+
+
+def format_directory_size(path: str) -> str:
+    return format_size(directory_size(path))
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/filetypes.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/filetypes.py
new file mode 100644
index 0000000..5948570
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/filetypes.py
@@ -0,0 +1,27 @@
+"""Filetype information.
+"""
+
+from typing import Tuple
+
+from pip._internal.utils.misc import splitext
+
+WHEEL_EXTENSION = ".whl"
+BZ2_EXTENSIONS: Tuple[str, ...] = (".tar.bz2", ".tbz")
+XZ_EXTENSIONS: Tuple[str, ...] = (
+    ".tar.xz",
+    ".txz",
+    ".tlz",
+    ".tar.lz",
+    ".tar.lzma",
+)
+ZIP_EXTENSIONS: Tuple[str, ...] = (".zip", WHEEL_EXTENSION)
+TAR_EXTENSIONS: Tuple[str, ...] = (".tar.gz", ".tgz", ".tar")
+ARCHIVE_EXTENSIONS = ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS
+
+
+def is_archive_file(name: str) -> bool:
+    """Return True if `name` is a considered as an archive file."""
+    ext = splitext(name)[1].lower()
+    if ext in ARCHIVE_EXTENSIONS:
+        return True
+    return False
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/glibc.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/glibc.py
new file mode 100644
index 0000000..81342af
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/glibc.py
@@ -0,0 +1,88 @@
+import os
+import sys
+from typing import Optional, Tuple
+
+
+def glibc_version_string() -> Optional[str]:
+    "Returns glibc version string, or None if not using glibc."
+    return glibc_version_string_confstr() or glibc_version_string_ctypes()
+
+
+def glibc_version_string_confstr() -> Optional[str]:
+    "Primary implementation of glibc_version_string using os.confstr."
+    # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
+    # to be broken or missing. This strategy is used in the standard library
+    # platform module:
+    # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183
+    if sys.platform == "win32":
+        return None
+    try:
+        gnu_libc_version = os.confstr("CS_GNU_LIBC_VERSION")
+        if gnu_libc_version is None:
+            return None
+        # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17":
+        _, version = gnu_libc_version.split()
+    except (AttributeError, OSError, ValueError):
+        # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
+        return None
+    return version
+
+
+def glibc_version_string_ctypes() -> Optional[str]:
+    "Fallback implementation of glibc_version_string using ctypes."
+
+    try:
+        import ctypes
+    except ImportError:
+        return None
+
+    # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
+    # manpage says, "If filename is NULL, then the returned handle is for the
+    # main program". This way we can let the linker do the work to figure out
+    # which libc our process is actually using.
+    process_namespace = ctypes.CDLL(None)
+    try:
+        gnu_get_libc_version = process_namespace.gnu_get_libc_version
+    except AttributeError:
+        # Symbol doesn't exist -> therefore, we are not linked to
+        # glibc.
+        return None
+
+    # Call gnu_get_libc_version, which returns a string like "2.5"
+    gnu_get_libc_version.restype = ctypes.c_char_p
+    version_str = gnu_get_libc_version()
+    # py2 / py3 compatibility:
+    if not isinstance(version_str, str):
+        version_str = version_str.decode("ascii")
+
+    return version_str
+
+
+# platform.libc_ver regularly returns completely nonsensical glibc
+# versions. E.g. on my computer, platform says:
+#
+#   ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
+#   ('glibc', '2.7')
+#   ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
+#   ('glibc', '2.9')
+#
+# But the truth is:
+#
+#   ~$ ldd --version
+#   ldd (Debian GLIBC 2.22-11) 2.22
+#
+# This is unfortunate, because it means that the linehaul data on libc
+# versions that was generated by pip 8.1.2 and earlier is useless and
+# misleading. Solution: instead of using platform, use our code that actually
+# works.
+def libc_ver() -> Tuple[str, str]:
+    """Try to determine the glibc version
+
+    Returns a tuple of strings (lib, version) which default to empty strings
+    in case the lookup fails.
+    """
+    glibc_version = glibc_version_string()
+    if glibc_version is None:
+        return ("", "")
+    else:
+        return ("glibc", glibc_version)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/hashes.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/hashes.py
new file mode 100644
index 0000000..843cffc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/hashes.py
@@ -0,0 +1,151 @@
+import hashlib
+from typing import TYPE_CHECKING, BinaryIO, Dict, Iterable, List, Optional
+
+from pip._internal.exceptions import HashMismatch, HashMissing, InstallationError
+from pip._internal.utils.misc import read_chunks
+
+if TYPE_CHECKING:
+    from hashlib import _Hash
+
+    # NoReturn introduced in 3.6.2; imported only for type checking to maintain
+    # pip compatibility with older patch versions of Python 3.6
+    from typing import NoReturn
+
+
+# The recommended hash algo of the moment. Change this whenever the state of
+# the art changes; it won't hurt backward compatibility.
+FAVORITE_HASH = "sha256"
+
+
+# Names of hashlib algorithms allowed by the --hash option and ``pip hash``
+# Currently, those are the ones at least as collision-resistant as sha256.
+STRONG_HASHES = ["sha256", "sha384", "sha512"]
+
+
+class Hashes:
+    """A wrapper that builds multiple hashes at once and checks them against
+    known-good values
+
+    """
+
+    def __init__(self, hashes: Optional[Dict[str, List[str]]] = None) -> None:
+        """
+        :param hashes: A dict of algorithm names pointing to lists of allowed
+            hex digests
+        """
+        allowed = {}
+        if hashes is not None:
+            for alg, keys in hashes.items():
+                # Make sure values are always sorted (to ease equality checks)
+                allowed[alg] = sorted(keys)
+        self._allowed = allowed
+
+    def __and__(self, other: "Hashes") -> "Hashes":
+        if not isinstance(other, Hashes):
+            return NotImplemented
+
+        # If either of the Hashes object is entirely empty (i.e. no hash
+        # specified at all), all hashes from the other object are allowed.
+        if not other:
+            return self
+        if not self:
+            return other
+
+        # Otherwise only hashes that present in both objects are allowed.
+        new = {}
+        for alg, values in other._allowed.items():
+            if alg not in self._allowed:
+                continue
+            new[alg] = [v for v in values if v in self._allowed[alg]]
+        return Hashes(new)
+
+    @property
+    def digest_count(self) -> int:
+        return sum(len(digests) for digests in self._allowed.values())
+
+    def is_hash_allowed(self, hash_name: str, hex_digest: str) -> bool:
+        """Return whether the given hex digest is allowed."""
+        return hex_digest in self._allowed.get(hash_name, [])
+
+    def check_against_chunks(self, chunks: Iterable[bytes]) -> None:
+        """Check good hashes against ones built from iterable of chunks of
+        data.
+
+        Raise HashMismatch if none match.
+
+        """
+        gots = {}
+        for hash_name in self._allowed.keys():
+            try:
+                gots[hash_name] = hashlib.new(hash_name)
+            except (ValueError, TypeError):
+                raise InstallationError(f"Unknown hash name: {hash_name}")
+
+        for chunk in chunks:
+            for hash in gots.values():
+                hash.update(chunk)
+
+        for hash_name, got in gots.items():
+            if got.hexdigest() in self._allowed[hash_name]:
+                return
+        self._raise(gots)
+
+    def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn":
+        raise HashMismatch(self._allowed, gots)
+
+    def check_against_file(self, file: BinaryIO) -> None:
+        """Check good hashes against a file-like object
+
+        Raise HashMismatch if none match.
+
+        """
+        return self.check_against_chunks(read_chunks(file))
+
+    def check_against_path(self, path: str) -> None:
+        with open(path, "rb") as file:
+            return self.check_against_file(file)
+
+    def has_one_of(self, hashes: Dict[str, str]) -> bool:
+        """Return whether any of the given hashes are allowed."""
+        for hash_name, hex_digest in hashes.items():
+            if self.is_hash_allowed(hash_name, hex_digest):
+                return True
+        return False
+
+    def __bool__(self) -> bool:
+        """Return whether I know any known-good hashes."""
+        return bool(self._allowed)
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, Hashes):
+            return NotImplemented
+        return self._allowed == other._allowed
+
+    def __hash__(self) -> int:
+        return hash(
+            ",".join(
+                sorted(
+                    ":".join((alg, digest))
+                    for alg, digest_list in self._allowed.items()
+                    for digest in digest_list
+                )
+            )
+        )
+
+
+class MissingHashes(Hashes):
+    """A workalike for Hashes used when we're missing a hash for a requirement
+
+    It computes the actual hash of the requirement and raises a HashMissing
+    exception showing it to the user.
+
+    """
+
+    def __init__(self) -> None:
+        """Don't offer the ``hashes`` kwarg."""
+        # Pass our favorite hash in to generate a "gotten hash". With the
+        # empty list, it will never match, so an error will always raise.
+        super().__init__(hashes={FAVORITE_HASH: []})
+
+    def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn":
+        raise HashMissing(gots[FAVORITE_HASH].hexdigest())
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/logging.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/logging.py
new file mode 100644
index 0000000..95982df
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/logging.py
@@ -0,0 +1,348 @@
+import contextlib
+import errno
+import logging
+import logging.handlers
+import os
+import sys
+import threading
+from dataclasses import dataclass
+from io import TextIOWrapper
+from logging import Filter
+from typing import Any, ClassVar, Generator, List, Optional, TextIO, Type
+
+from pip._vendor.rich.console import (
+    Console,
+    ConsoleOptions,
+    ConsoleRenderable,
+    RenderableType,
+    RenderResult,
+    RichCast,
+)
+from pip._vendor.rich.highlighter import NullHighlighter
+from pip._vendor.rich.logging import RichHandler
+from pip._vendor.rich.segment import Segment
+from pip._vendor.rich.style import Style
+
+from pip._internal.utils._log import VERBOSE, getLogger
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX
+from pip._internal.utils.misc import ensure_dir
+
+_log_state = threading.local()
+subprocess_logger = getLogger("pip.subprocessor")
+
+
+class BrokenStdoutLoggingError(Exception):
+    """
+    Raised if BrokenPipeError occurs for the stdout stream while logging.
+    """
+
+
+def _is_broken_pipe_error(exc_class: Type[BaseException], exc: BaseException) -> bool:
+    if exc_class is BrokenPipeError:
+        return True
+
+    # On Windows, a broken pipe can show up as EINVAL rather than EPIPE:
+    # https://bugs.python.org/issue19612
+    # https://bugs.python.org/issue30418
+    if not WINDOWS:
+        return False
+
+    return isinstance(exc, OSError) and exc.errno in (errno.EINVAL, errno.EPIPE)
+
+
+@contextlib.contextmanager
+def indent_log(num: int = 2) -> Generator[None, None, None]:
+    """
+    A context manager which will cause the log output to be indented for any
+    log messages emitted inside it.
+    """
+    # For thread-safety
+    _log_state.indentation = get_indentation()
+    _log_state.indentation += num
+    try:
+        yield
+    finally:
+        _log_state.indentation -= num
+
+
+def get_indentation() -> int:
+    return getattr(_log_state, "indentation", 0)
+
+
+class IndentingFormatter(logging.Formatter):
+    default_time_format = "%Y-%m-%dT%H:%M:%S"
+
+    def __init__(
+        self,
+        *args: Any,
+        add_timestamp: bool = False,
+        **kwargs: Any,
+    ) -> None:
+        """
+        A logging.Formatter that obeys the indent_log() context manager.
+
+        :param add_timestamp: A bool indicating output lines should be prefixed
+            with their record's timestamp.
+        """
+        self.add_timestamp = add_timestamp
+        super().__init__(*args, **kwargs)
+
+    def get_message_start(self, formatted: str, levelno: int) -> str:
+        """
+        Return the start of the formatted log message (not counting the
+        prefix to add to each line).
+        """
+        if levelno < logging.WARNING:
+            return ""
+        if formatted.startswith(DEPRECATION_MSG_PREFIX):
+            # Then the message already has a prefix.  We don't want it to
+            # look like "WARNING: DEPRECATION: ...."
+            return ""
+        if levelno < logging.ERROR:
+            return "WARNING: "
+
+        return "ERROR: "
+
+    def format(self, record: logging.LogRecord) -> str:
+        """
+        Calls the standard formatter, but will indent all of the log message
+        lines by our current indentation level.
+        """
+        formatted = super().format(record)
+        message_start = self.get_message_start(formatted, record.levelno)
+        formatted = message_start + formatted
+
+        prefix = ""
+        if self.add_timestamp:
+            prefix = f"{self.formatTime(record)} "
+        prefix += " " * get_indentation()
+        formatted = "".join([prefix + line for line in formatted.splitlines(True)])
+        return formatted
+
+
+@dataclass
+class IndentedRenderable:
+    renderable: RenderableType
+    indent: int
+
+    def __rich_console__(
+        self, console: Console, options: ConsoleOptions
+    ) -> RenderResult:
+        segments = console.render(self.renderable, options)
+        lines = Segment.split_lines(segments)
+        for line in lines:
+            yield Segment(" " * self.indent)
+            yield from line
+            yield Segment("\n")
+
+
+class RichPipStreamHandler(RichHandler):
+    KEYWORDS: ClassVar[Optional[List[str]]] = []
+
+    def __init__(self, stream: Optional[TextIO], no_color: bool) -> None:
+        super().__init__(
+            console=Console(file=stream, no_color=no_color, soft_wrap=True),
+            show_time=False,
+            show_level=False,
+            show_path=False,
+            highlighter=NullHighlighter(),
+        )
+
+    # Our custom override on Rich's logger, to make things work as we need them to.
+    def emit(self, record: logging.LogRecord) -> None:
+        style: Optional[Style] = None
+
+        # If we are given a diagnostic error to present, present it with indentation.
+        assert isinstance(record.args, tuple)
+        if getattr(record, "rich", False):
+            (rich_renderable,) = record.args
+            assert isinstance(
+                rich_renderable, (ConsoleRenderable, RichCast, str)
+            ), f"{rich_renderable} is not rich-console-renderable"
+
+            renderable: RenderableType = IndentedRenderable(
+                rich_renderable, indent=get_indentation()
+            )
+        else:
+            message = self.format(record)
+            renderable = self.render_message(record, message)
+            if record.levelno is not None:
+                if record.levelno >= logging.ERROR:
+                    style = Style(color="red")
+                elif record.levelno >= logging.WARNING:
+                    style = Style(color="yellow")
+
+        try:
+            self.console.print(renderable, overflow="ignore", crop=False, style=style)
+        except Exception:
+            self.handleError(record)
+
+    def handleError(self, record: logging.LogRecord) -> None:
+        """Called when logging is unable to log some output."""
+
+        exc_class, exc = sys.exc_info()[:2]
+        # If a broken pipe occurred while calling write() or flush() on the
+        # stdout stream in logging's Handler.emit(), then raise our special
+        # exception so we can handle it in main() instead of logging the
+        # broken pipe error and continuing.
+        if (
+            exc_class
+            and exc
+            and self.console.file is sys.stdout
+            and _is_broken_pipe_error(exc_class, exc)
+        ):
+            raise BrokenStdoutLoggingError()
+
+        return super().handleError(record)
+
+
+class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
+    def _open(self) -> TextIOWrapper:
+        ensure_dir(os.path.dirname(self.baseFilename))
+        return super()._open()
+
+
+class MaxLevelFilter(Filter):
+    def __init__(self, level: int) -> None:
+        self.level = level
+
+    def filter(self, record: logging.LogRecord) -> bool:
+        return record.levelno < self.level
+
+
+class ExcludeLoggerFilter(Filter):
+
+    """
+    A logging Filter that excludes records from a logger (or its children).
+    """
+
+    def filter(self, record: logging.LogRecord) -> bool:
+        # The base Filter class allows only records from a logger (or its
+        # children).
+        return not super().filter(record)
+
+
+def setup_logging(verbosity: int, no_color: bool, user_log_file: Optional[str]) -> int:
+    """Configures and sets up all of the logging
+
+    Returns the requested logging level, as its integer value.
+    """
+
+    # Determine the level to be logging at.
+    if verbosity >= 2:
+        level_number = logging.DEBUG
+    elif verbosity == 1:
+        level_number = VERBOSE
+    elif verbosity == -1:
+        level_number = logging.WARNING
+    elif verbosity == -2:
+        level_number = logging.ERROR
+    elif verbosity <= -3:
+        level_number = logging.CRITICAL
+    else:
+        level_number = logging.INFO
+
+    level = logging.getLevelName(level_number)
+
+    # The "root" logger should match the "console" level *unless* we also need
+    # to log to a user log file.
+    include_user_log = user_log_file is not None
+    if include_user_log:
+        additional_log_file = user_log_file
+        root_level = "DEBUG"
+    else:
+        additional_log_file = "/dev/null"
+        root_level = level
+
+    # Disable any logging besides WARNING unless we have DEBUG level logging
+    # enabled for vendored libraries.
+    vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
+
+    # Shorthands for clarity
+    log_streams = {
+        "stdout": "ext://sys.stdout",
+        "stderr": "ext://sys.stderr",
+    }
+    handler_classes = {
+        "stream": "pip._internal.utils.logging.RichPipStreamHandler",
+        "file": "pip._internal.utils.logging.BetterRotatingFileHandler",
+    }
+    handlers = ["console", "console_errors", "console_subprocess"] + (
+        ["user_log"] if include_user_log else []
+    )
+
+    logging.config.dictConfig(
+        {
+            "version": 1,
+            "disable_existing_loggers": False,
+            "filters": {
+                "exclude_warnings": {
+                    "()": "pip._internal.utils.logging.MaxLevelFilter",
+                    "level": logging.WARNING,
+                },
+                "restrict_to_subprocess": {
+                    "()": "logging.Filter",
+                    "name": subprocess_logger.name,
+                },
+                "exclude_subprocess": {
+                    "()": "pip._internal.utils.logging.ExcludeLoggerFilter",
+                    "name": subprocess_logger.name,
+                },
+            },
+            "formatters": {
+                "indent": {
+                    "()": IndentingFormatter,
+                    "format": "%(message)s",
+                },
+                "indent_with_timestamp": {
+                    "()": IndentingFormatter,
+                    "format": "%(message)s",
+                    "add_timestamp": True,
+                },
+            },
+            "handlers": {
+                "console": {
+                    "level": level,
+                    "class": handler_classes["stream"],
+                    "no_color": no_color,
+                    "stream": log_streams["stdout"],
+                    "filters": ["exclude_subprocess", "exclude_warnings"],
+                    "formatter": "indent",
+                },
+                "console_errors": {
+                    "level": "WARNING",
+                    "class": handler_classes["stream"],
+                    "no_color": no_color,
+                    "stream": log_streams["stderr"],
+                    "filters": ["exclude_subprocess"],
+                    "formatter": "indent",
+                },
+                # A handler responsible for logging to the console messages
+                # from the "subprocessor" logger.
+                "console_subprocess": {
+                    "level": level,
+                    "class": handler_classes["stream"],
+                    "stream": log_streams["stderr"],
+                    "no_color": no_color,
+                    "filters": ["restrict_to_subprocess"],
+                    "formatter": "indent",
+                },
+                "user_log": {
+                    "level": "DEBUG",
+                    "class": handler_classes["file"],
+                    "filename": additional_log_file,
+                    "encoding": "utf-8",
+                    "delay": True,
+                    "formatter": "indent_with_timestamp",
+                },
+            },
+            "root": {
+                "level": root_level,
+                "handlers": handlers,
+            },
+            "loggers": {"pip._vendor": {"level": vendored_log_level}},
+        }
+    )
+
+    return level_number
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/misc.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/misc.py
new file mode 100644
index 0000000..1ad3f61
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/misc.py
@@ -0,0 +1,783 @@
+import contextlib
+import errno
+import getpass
+import hashlib
+import io
+import logging
+import os
+import posixpath
+import shutil
+import stat
+import sys
+import sysconfig
+import urllib.parse
+from functools import partial
+from io import StringIO
+from itertools import filterfalse, tee, zip_longest
+from pathlib import Path
+from types import FunctionType, TracebackType
+from typing import (
+    Any,
+    BinaryIO,
+    Callable,
+    ContextManager,
+    Dict,
+    Generator,
+    Iterable,
+    Iterator,
+    List,
+    Optional,
+    TextIO,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+    cast,
+)
+
+from pip._vendor.packaging.requirements import Requirement
+from pip._vendor.pyproject_hooks import BuildBackendHookCaller
+from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed
+
+from pip import __version__
+from pip._internal.exceptions import CommandError, ExternallyManagedEnvironment
+from pip._internal.locations import get_major_minor_version
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+__all__ = [
+    "rmtree",
+    "display_path",
+    "backup_dir",
+    "ask",
+    "splitext",
+    "format_size",
+    "is_installable_dir",
+    "normalize_path",
+    "renames",
+    "get_prog",
+    "captured_stdout",
+    "ensure_dir",
+    "remove_auth_from_url",
+    "check_externally_managed",
+    "ConfiguredBuildBackendHookCaller",
+]
+
+logger = logging.getLogger(__name__)
+
+T = TypeVar("T")
+ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType]
+VersionInfo = Tuple[int, int, int]
+NetlocTuple = Tuple[str, Tuple[Optional[str], Optional[str]]]
+OnExc = Callable[[FunctionType, Path, BaseException], Any]
+OnErr = Callable[[FunctionType, Path, ExcInfo], Any]
+
+
+def get_pip_version() -> str:
+    pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..")
+    pip_pkg_dir = os.path.abspath(pip_pkg_dir)
+
+    return f"pip {__version__} from {pip_pkg_dir} (python {get_major_minor_version()})"
+
+
+def normalize_version_info(py_version_info: Tuple[int, ...]) -> Tuple[int, int, int]:
+    """
+    Convert a tuple of ints representing a Python version to one of length
+    three.
+
+    :param py_version_info: a tuple of ints representing a Python version,
+        or None to specify no version. The tuple can have any length.
+
+    :return: a tuple of length three if `py_version_info` is non-None.
+        Otherwise, return `py_version_info` unchanged (i.e. None).
+    """
+    if len(py_version_info) < 3:
+        py_version_info += (3 - len(py_version_info)) * (0,)
+    elif len(py_version_info) > 3:
+        py_version_info = py_version_info[:3]
+
+    return cast("VersionInfo", py_version_info)
+
+
+def ensure_dir(path: str) -> None:
+    """os.path.makedirs without EEXIST."""
+    try:
+        os.makedirs(path)
+    except OSError as e:
+        # Windows can raise spurious ENOTEMPTY errors. See #6426.
+        if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY:
+            raise
+
+
+def get_prog() -> str:
+    try:
+        prog = os.path.basename(sys.argv[0])
+        if prog in ("__main__.py", "-c"):
+            return f"{sys.executable} -m pip"
+        else:
+            return prog
+    except (AttributeError, TypeError, IndexError):
+        pass
+    return "pip"
+
+
+# Retry every half second for up to 3 seconds
+# Tenacity raises RetryError by default, explicitly raise the original exception
+@retry(reraise=True, stop=stop_after_delay(3), wait=wait_fixed(0.5))
+def rmtree(
+    dir: str,
+    ignore_errors: bool = False,
+    onexc: Optional[OnExc] = None,
+) -> None:
+    if ignore_errors:
+        onexc = _onerror_ignore
+    if onexc is None:
+        onexc = _onerror_reraise
+    handler: OnErr = partial(
+        # `[func, path, Union[ExcInfo, BaseException]] -> Any` is equivalent to
+        # `Union[([func, path, ExcInfo] -> Any), ([func, path, BaseException] -> Any)]`.
+        cast(Union[OnExc, OnErr], rmtree_errorhandler),
+        onexc=onexc,
+    )
+    if sys.version_info >= (3, 12):
+        # See https://docs.python.org/3.12/whatsnew/3.12.html#shutil.
+        shutil.rmtree(dir, onexc=handler)  # type: ignore
+    else:
+        shutil.rmtree(dir, onerror=handler)  # type: ignore
+
+
+def _onerror_ignore(*_args: Any) -> None:
+    pass
+
+
+def _onerror_reraise(*_args: Any) -> None:
+    raise
+
+
+def rmtree_errorhandler(
+    func: FunctionType,
+    path: Path,
+    exc_info: Union[ExcInfo, BaseException],
+    *,
+    onexc: OnExc = _onerror_reraise,
+) -> None:
+    """
+    `rmtree` error handler to 'force' a file remove (i.e. like `rm -f`).
+
+    * If a file is readonly then it's write flag is set and operation is
+      retried.
+
+    * `onerror` is the original callback from `rmtree(... onerror=onerror)`
+      that is chained at the end if the "rm -f" still fails.
+    """
+    try:
+        st_mode = os.stat(path).st_mode
+    except OSError:
+        # it's equivalent to os.path.exists
+        return
+
+    if not st_mode & stat.S_IWRITE:
+        # convert to read/write
+        try:
+            os.chmod(path, st_mode | stat.S_IWRITE)
+        except OSError:
+            pass
+        else:
+            # use the original function to repeat the operation
+            try:
+                func(path)
+                return
+            except OSError:
+                pass
+
+    if not isinstance(exc_info, BaseException):
+        _, exc_info, _ = exc_info
+    onexc(func, path, exc_info)
+
+
+def display_path(path: str) -> str:
+    """Gives the display value for a given path, making it relative to cwd
+    if possible."""
+    path = os.path.normcase(os.path.abspath(path))
+    if path.startswith(os.getcwd() + os.path.sep):
+        path = "." + path[len(os.getcwd()) :]
+    return path
+
+
+def backup_dir(dir: str, ext: str = ".bak") -> str:
+    """Figure out the name of a directory to back up the given dir to
+    (adding .bak, .bak2, etc)"""
+    n = 1
+    extension = ext
+    while os.path.exists(dir + extension):
+        n += 1
+        extension = ext + str(n)
+    return dir + extension
+
+
+def ask_path_exists(message: str, options: Iterable[str]) -> str:
+    for action in os.environ.get("PIP_EXISTS_ACTION", "").split():
+        if action in options:
+            return action
+    return ask(message, options)
+
+
+def _check_no_input(message: str) -> None:
+    """Raise an error if no input is allowed."""
+    if os.environ.get("PIP_NO_INPUT"):
+        raise Exception(
+            f"No input was expected ($PIP_NO_INPUT set); question: {message}"
+        )
+
+
+def ask(message: str, options: Iterable[str]) -> str:
+    """Ask the message interactively, with the given possible responses"""
+    while 1:
+        _check_no_input(message)
+        response = input(message)
+        response = response.strip().lower()
+        if response not in options:
+            print(
+                "Your response ({!r}) was not one of the expected responses: "
+                "{}".format(response, ", ".join(options))
+            )
+        else:
+            return response
+
+
+def ask_input(message: str) -> str:
+    """Ask for input interactively."""
+    _check_no_input(message)
+    return input(message)
+
+
+def ask_password(message: str) -> str:
+    """Ask for a password interactively."""
+    _check_no_input(message)
+    return getpass.getpass(message)
+
+
+def strtobool(val: str) -> int:
+    """Convert a string representation of truth to true (1) or false (0).
+
+    True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
+    are 'n', 'no', 'f', 'false', 'off', and '0'.  Raises ValueError if
+    'val' is anything else.
+    """
+    val = val.lower()
+    if val in ("y", "yes", "t", "true", "on", "1"):
+        return 1
+    elif val in ("n", "no", "f", "false", "off", "0"):
+        return 0
+    else:
+        raise ValueError(f"invalid truth value {val!r}")
+
+
+def format_size(bytes: float) -> str:
+    if bytes > 1000 * 1000:
+        return f"{bytes / 1000.0 / 1000:.1f} MB"
+    elif bytes > 10 * 1000:
+        return f"{int(bytes / 1000)} kB"
+    elif bytes > 1000:
+        return f"{bytes / 1000.0:.1f} kB"
+    else:
+        return f"{int(bytes)} bytes"
+
+
+def tabulate(rows: Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]]:
+    """Return a list of formatted rows and a list of column sizes.
+
+    For example::
+
+    >>> tabulate([['foobar', 2000], [0xdeadbeef]])
+    (['foobar     2000', '3735928559'], [10, 4])
+    """
+    rows = [tuple(map(str, row)) for row in rows]
+    sizes = [max(map(len, col)) for col in zip_longest(*rows, fillvalue="")]
+    table = [" ".join(map(str.ljust, row, sizes)).rstrip() for row in rows]
+    return table, sizes
+
+
+def is_installable_dir(path: str) -> bool:
+    """Is path is a directory containing pyproject.toml or setup.py?
+
+    If pyproject.toml exists, this is a PEP 517 project. Otherwise we look for
+    a legacy setuptools layout by identifying setup.py. We don't check for the
+    setup.cfg because using it without setup.py is only available for PEP 517
+    projects, which are already covered by the pyproject.toml check.
+    """
+    if not os.path.isdir(path):
+        return False
+    if os.path.isfile(os.path.join(path, "pyproject.toml")):
+        return True
+    if os.path.isfile(os.path.join(path, "setup.py")):
+        return True
+    return False
+
+
+def read_chunks(
+    file: BinaryIO, size: int = io.DEFAULT_BUFFER_SIZE
+) -> Generator[bytes, None, None]:
+    """Yield pieces of data from a file-like object until EOF."""
+    while True:
+        chunk = file.read(size)
+        if not chunk:
+            break
+        yield chunk
+
+
+def normalize_path(path: str, resolve_symlinks: bool = True) -> str:
+    """
+    Convert a path to its canonical, case-normalized, absolute version.
+
+    """
+    path = os.path.expanduser(path)
+    if resolve_symlinks:
+        path = os.path.realpath(path)
+    else:
+        path = os.path.abspath(path)
+    return os.path.normcase(path)
+
+
+def splitext(path: str) -> Tuple[str, str]:
+    """Like os.path.splitext, but take off .tar too"""
+    base, ext = posixpath.splitext(path)
+    if base.lower().endswith(".tar"):
+        ext = base[-4:] + ext
+        base = base[:-4]
+    return base, ext
+
+
+def renames(old: str, new: str) -> None:
+    """Like os.renames(), but handles renaming across devices."""
+    # Implementation borrowed from os.renames().
+    head, tail = os.path.split(new)
+    if head and tail and not os.path.exists(head):
+        os.makedirs(head)
+
+    shutil.move(old, new)
+
+    head, tail = os.path.split(old)
+    if head and tail:
+        try:
+            os.removedirs(head)
+        except OSError:
+            pass
+
+
+def is_local(path: str) -> bool:
+    """
+    Return True if path is within sys.prefix, if we're running in a virtualenv.
+
+    If we're not in a virtualenv, all paths are considered "local."
+
+    Caution: this function assumes the head of path has been normalized
+    with normalize_path.
+    """
+    if not running_under_virtualenv():
+        return True
+    return path.startswith(normalize_path(sys.prefix))
+
+
+def write_output(msg: Any, *args: Any) -> None:
+    logger.info(msg, *args)
+
+
+class StreamWrapper(StringIO):
+    orig_stream: TextIO
+
+    @classmethod
+    def from_stream(cls, orig_stream: TextIO) -> "StreamWrapper":
+        ret = cls()
+        ret.orig_stream = orig_stream
+        return ret
+
+    # compileall.compile_dir() needs stdout.encoding to print to stdout
+    # type ignore is because TextIOBase.encoding is writeable
+    @property
+    def encoding(self) -> str:  # type: ignore
+        return self.orig_stream.encoding
+
+
+@contextlib.contextmanager
+def captured_output(stream_name: str) -> Generator[StreamWrapper, None, None]:
+    """Return a context manager used by captured_stdout/stdin/stderr
+    that temporarily replaces the sys stream *stream_name* with a StringIO.
+
+    Taken from Lib/support/__init__.py in the CPython repo.
+    """
+    orig_stdout = getattr(sys, stream_name)
+    setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout))
+    try:
+        yield getattr(sys, stream_name)
+    finally:
+        setattr(sys, stream_name, orig_stdout)
+
+
+def captured_stdout() -> ContextManager[StreamWrapper]:
+    """Capture the output of sys.stdout:
+
+       with captured_stdout() as stdout:
+           print('hello')
+       self.assertEqual(stdout.getvalue(), 'hello\n')
+
+    Taken from Lib/support/__init__.py in the CPython repo.
+    """
+    return captured_output("stdout")
+
+
+def captured_stderr() -> ContextManager[StreamWrapper]:
+    """
+    See captured_stdout().
+    """
+    return captured_output("stderr")
+
+
+# Simulates an enum
+def enum(*sequential: Any, **named: Any) -> Type[Any]:
+    enums = dict(zip(sequential, range(len(sequential))), **named)
+    reverse = {value: key for key, value in enums.items()}
+    enums["reverse_mapping"] = reverse
+    return type("Enum", (), enums)
+
+
+def build_netloc(host: str, port: Optional[int]) -> str:
+    """
+    Build a netloc from a host-port pair
+    """
+    if port is None:
+        return host
+    if ":" in host:
+        # Only wrap host with square brackets when it is IPv6
+        host = f"[{host}]"
+    return f"{host}:{port}"
+
+
+def build_url_from_netloc(netloc: str, scheme: str = "https") -> str:
+    """
+    Build a full URL from a netloc.
+    """
+    if netloc.count(":") >= 2 and "@" not in netloc and "[" not in netloc:
+        # It must be a bare IPv6 address, so wrap it with brackets.
+        netloc = f"[{netloc}]"
+    return f"{scheme}://{netloc}"
+
+
+def parse_netloc(netloc: str) -> Tuple[Optional[str], Optional[int]]:
+    """
+    Return the host-port pair from a netloc.
+    """
+    url = build_url_from_netloc(netloc)
+    parsed = urllib.parse.urlparse(url)
+    return parsed.hostname, parsed.port
+
+
+def split_auth_from_netloc(netloc: str) -> NetlocTuple:
+    """
+    Parse out and remove the auth information from a netloc.
+
+    Returns: (netloc, (username, password)).
+    """
+    if "@" not in netloc:
+        return netloc, (None, None)
+
+    # Split from the right because that's how urllib.parse.urlsplit()
+    # behaves if more than one @ is present (which can be checked using
+    # the password attribute of urlsplit()'s return value).
+    auth, netloc = netloc.rsplit("@", 1)
+    pw: Optional[str] = None
+    if ":" in auth:
+        # Split from the left because that's how urllib.parse.urlsplit()
+        # behaves if more than one : is present (which again can be checked
+        # using the password attribute of the return value)
+        user, pw = auth.split(":", 1)
+    else:
+        user, pw = auth, None
+
+    user = urllib.parse.unquote(user)
+    if pw is not None:
+        pw = urllib.parse.unquote(pw)
+
+    return netloc, (user, pw)
+
+
+def redact_netloc(netloc: str) -> str:
+    """
+    Replace the sensitive data in a netloc with "****", if it exists.
+
+    For example:
+        - "user:pass@example.com" returns "user:****@example.com"
+        - "accesstoken@example.com" returns "****@example.com"
+    """
+    netloc, (user, password) = split_auth_from_netloc(netloc)
+    if user is None:
+        return netloc
+    if password is None:
+        user = "****"
+        password = ""
+    else:
+        user = urllib.parse.quote(user)
+        password = ":****"
+    return f"{user}{password}@{netloc}"
+
+
+def _transform_url(
+    url: str, transform_netloc: Callable[[str], Tuple[Any, ...]]
+) -> Tuple[str, NetlocTuple]:
+    """Transform and replace netloc in a url.
+
+    transform_netloc is a function taking the netloc and returning a
+    tuple. The first element of this tuple is the new netloc. The
+    entire tuple is returned.
+
+    Returns a tuple containing the transformed url as item 0 and the
+    original tuple returned by transform_netloc as item 1.
+    """
+    purl = urllib.parse.urlsplit(url)
+    netloc_tuple = transform_netloc(purl.netloc)
+    # stripped url
+    url_pieces = (purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment)
+    surl = urllib.parse.urlunsplit(url_pieces)
+    return surl, cast("NetlocTuple", netloc_tuple)
+
+
+def _get_netloc(netloc: str) -> NetlocTuple:
+    return split_auth_from_netloc(netloc)
+
+
+def _redact_netloc(netloc: str) -> Tuple[str]:
+    return (redact_netloc(netloc),)
+
+
+def split_auth_netloc_from_url(
+    url: str,
+) -> Tuple[str, str, Tuple[Optional[str], Optional[str]]]:
+    """
+    Parse a url into separate netloc, auth, and url with no auth.
+
+    Returns: (url_without_auth, netloc, (username, password))
+    """
+    url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc)
+    return url_without_auth, netloc, auth
+
+
+def remove_auth_from_url(url: str) -> str:
+    """Return a copy of url with 'username:password@' removed."""
+    # username/pass params are passed to subversion through flags
+    # and are not recognized in the url.
+    return _transform_url(url, _get_netloc)[0]
+
+
+def redact_auth_from_url(url: str) -> str:
+    """Replace the password in a given url with ****."""
+    return _transform_url(url, _redact_netloc)[0]
+
+
+def redact_auth_from_requirement(req: Requirement) -> str:
+    """Replace the password in a given requirement url with ****."""
+    if not req.url:
+        return str(req)
+    return str(req).replace(req.url, redact_auth_from_url(req.url))
+
+
+class HiddenText:
+    def __init__(self, secret: str, redacted: str) -> None:
+        self.secret = secret
+        self.redacted = redacted
+
+    def __repr__(self) -> str:
+        return f""
+
+    def __str__(self) -> str:
+        return self.redacted
+
+    # This is useful for testing.
+    def __eq__(self, other: Any) -> bool:
+        if type(self) != type(other):
+            return False
+
+        # The string being used for redaction doesn't also have to match,
+        # just the raw, original string.
+        return self.secret == other.secret
+
+
+def hide_value(value: str) -> HiddenText:
+    return HiddenText(value, redacted="****")
+
+
+def hide_url(url: str) -> HiddenText:
+    redacted = redact_auth_from_url(url)
+    return HiddenText(url, redacted=redacted)
+
+
+def protect_pip_from_modification_on_windows(modifying_pip: bool) -> None:
+    """Protection of pip.exe from modification on Windows
+
+    On Windows, any operation modifying pip should be run as:
+        python -m pip ...
+    """
+    pip_names = [
+        "pip",
+        f"pip{sys.version_info.major}",
+        f"pip{sys.version_info.major}.{sys.version_info.minor}",
+    ]
+
+    # See https://github.com/pypa/pip/issues/1299 for more discussion
+    should_show_use_python_msg = (
+        modifying_pip and WINDOWS and os.path.basename(sys.argv[0]) in pip_names
+    )
+
+    if should_show_use_python_msg:
+        new_command = [sys.executable, "-m", "pip"] + sys.argv[1:]
+        raise CommandError(
+            "To modify pip, please run the following command:\n{}".format(
+                " ".join(new_command)
+            )
+        )
+
+
+def check_externally_managed() -> None:
+    """Check whether the current environment is externally managed.
+
+    If the ``EXTERNALLY-MANAGED`` config file is found, the current environment
+    is considered externally managed, and an ExternallyManagedEnvironment is
+    raised.
+    """
+    if running_under_virtualenv():
+        return
+    marker = os.path.join(sysconfig.get_path("stdlib"), "EXTERNALLY-MANAGED")
+    if not os.path.isfile(marker):
+        return
+    raise ExternallyManagedEnvironment.from_config(marker)
+
+
+def is_console_interactive() -> bool:
+    """Is this console interactive?"""
+    return sys.stdin is not None and sys.stdin.isatty()
+
+
+def hash_file(path: str, blocksize: int = 1 << 20) -> Tuple[Any, int]:
+    """Return (hash, length) for path using hashlib.sha256()"""
+
+    h = hashlib.sha256()
+    length = 0
+    with open(path, "rb") as f:
+        for block in read_chunks(f, size=blocksize):
+            length += len(block)
+            h.update(block)
+    return h, length
+
+
+def pairwise(iterable: Iterable[Any]) -> Iterator[Tuple[Any, Any]]:
+    """
+    Return paired elements.
+
+    For example:
+        s -> (s0, s1), (s2, s3), (s4, s5), ...
+    """
+    iterable = iter(iterable)
+    return zip_longest(iterable, iterable)
+
+
+def partition(
+    pred: Callable[[T], bool],
+    iterable: Iterable[T],
+) -> Tuple[Iterable[T], Iterable[T]]:
+    """
+    Use a predicate to partition entries into false entries and true entries,
+    like
+
+        partition(is_odd, range(10)) --> 0 2 4 6 8   and  1 3 5 7 9
+    """
+    t1, t2 = tee(iterable)
+    return filterfalse(pred, t1), filter(pred, t2)
+
+
+class ConfiguredBuildBackendHookCaller(BuildBackendHookCaller):
+    def __init__(
+        self,
+        config_holder: Any,
+        source_dir: str,
+        build_backend: str,
+        backend_path: Optional[str] = None,
+        runner: Optional[Callable[..., None]] = None,
+        python_executable: Optional[str] = None,
+    ):
+        super().__init__(
+            source_dir, build_backend, backend_path, runner, python_executable
+        )
+        self.config_holder = config_holder
+
+    def build_wheel(
+        self,
+        wheel_directory: str,
+        config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
+        metadata_directory: Optional[str] = None,
+    ) -> str:
+        cs = self.config_holder.config_settings
+        return super().build_wheel(
+            wheel_directory, config_settings=cs, metadata_directory=metadata_directory
+        )
+
+    def build_sdist(
+        self,
+        sdist_directory: str,
+        config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
+    ) -> str:
+        cs = self.config_holder.config_settings
+        return super().build_sdist(sdist_directory, config_settings=cs)
+
+    def build_editable(
+        self,
+        wheel_directory: str,
+        config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
+        metadata_directory: Optional[str] = None,
+    ) -> str:
+        cs = self.config_holder.config_settings
+        return super().build_editable(
+            wheel_directory, config_settings=cs, metadata_directory=metadata_directory
+        )
+
+    def get_requires_for_build_wheel(
+        self, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None
+    ) -> List[str]:
+        cs = self.config_holder.config_settings
+        return super().get_requires_for_build_wheel(config_settings=cs)
+
+    def get_requires_for_build_sdist(
+        self, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None
+    ) -> List[str]:
+        cs = self.config_holder.config_settings
+        return super().get_requires_for_build_sdist(config_settings=cs)
+
+    def get_requires_for_build_editable(
+        self, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None
+    ) -> List[str]:
+        cs = self.config_holder.config_settings
+        return super().get_requires_for_build_editable(config_settings=cs)
+
+    def prepare_metadata_for_build_wheel(
+        self,
+        metadata_directory: str,
+        config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
+        _allow_fallback: bool = True,
+    ) -> str:
+        cs = self.config_holder.config_settings
+        return super().prepare_metadata_for_build_wheel(
+            metadata_directory=metadata_directory,
+            config_settings=cs,
+            _allow_fallback=_allow_fallback,
+        )
+
+    def prepare_metadata_for_build_editable(
+        self,
+        metadata_directory: str,
+        config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
+        _allow_fallback: bool = True,
+    ) -> str:
+        cs = self.config_holder.config_settings
+        return super().prepare_metadata_for_build_editable(
+            metadata_directory=metadata_directory,
+            config_settings=cs,
+            _allow_fallback=_allow_fallback,
+        )
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/models.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/models.py
new file mode 100644
index 0000000..b6bb21a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/models.py
@@ -0,0 +1,39 @@
+"""Utilities for defining models
+"""
+
+import operator
+from typing import Any, Callable, Type
+
+
+class KeyBasedCompareMixin:
+    """Provides comparison capabilities that is based on a key"""
+
+    __slots__ = ["_compare_key", "_defining_class"]
+
+    def __init__(self, key: Any, defining_class: Type["KeyBasedCompareMixin"]) -> None:
+        self._compare_key = key
+        self._defining_class = defining_class
+
+    def __hash__(self) -> int:
+        return hash(self._compare_key)
+
+    def __lt__(self, other: Any) -> bool:
+        return self._compare(other, operator.__lt__)
+
+    def __le__(self, other: Any) -> bool:
+        return self._compare(other, operator.__le__)
+
+    def __gt__(self, other: Any) -> bool:
+        return self._compare(other, operator.__gt__)
+
+    def __ge__(self, other: Any) -> bool:
+        return self._compare(other, operator.__ge__)
+
+    def __eq__(self, other: Any) -> bool:
+        return self._compare(other, operator.__eq__)
+
+    def _compare(self, other: Any, method: Callable[[Any, Any], bool]) -> bool:
+        if not isinstance(other, self._defining_class):
+            return NotImplemented
+
+        return method(self._compare_key, other._compare_key)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/packaging.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/packaging.py
new file mode 100644
index 0000000..b9f6af4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/packaging.py
@@ -0,0 +1,57 @@
+import functools
+import logging
+import re
+from typing import NewType, Optional, Tuple, cast
+
+from pip._vendor.packaging import specifiers, version
+from pip._vendor.packaging.requirements import Requirement
+
+NormalizedExtra = NewType("NormalizedExtra", str)
+
+logger = logging.getLogger(__name__)
+
+
+def check_requires_python(
+    requires_python: Optional[str], version_info: Tuple[int, ...]
+) -> bool:
+    """
+    Check if the given Python version matches a "Requires-Python" specifier.
+
+    :param version_info: A 3-tuple of ints representing a Python
+        major-minor-micro version to check (e.g. `sys.version_info[:3]`).
+
+    :return: `True` if the given Python version satisfies the requirement.
+        Otherwise, return `False`.
+
+    :raises InvalidSpecifier: If `requires_python` has an invalid format.
+    """
+    if requires_python is None:
+        # The package provides no information
+        return True
+    requires_python_specifier = specifiers.SpecifierSet(requires_python)
+
+    python_version = version.parse(".".join(map(str, version_info)))
+    return python_version in requires_python_specifier
+
+
+@functools.lru_cache(maxsize=512)
+def get_requirement(req_string: str) -> Requirement:
+    """Construct a packaging.Requirement object with caching"""
+    # Parsing requirement strings is expensive, and is also expected to happen
+    # with a low diversity of different arguments (at least relative the number
+    # constructed). This method adds a cache to requirement object creation to
+    # minimize repeated parsing of the same string to construct equivalent
+    # Requirement objects.
+    return Requirement(req_string)
+
+
+def safe_extra(extra: str) -> NormalizedExtra:
+    """Convert an arbitrary string to a standard 'extra' name
+
+    Any runs of non-alphanumeric characters are replaced with a single '_',
+    and the result is always lowercased.
+
+    This function is duplicated from ``pkg_resources``. Note that this is not
+    the same to either ``canonicalize_name`` or ``_egg_link_name``.
+    """
+    return cast(NormalizedExtra, re.sub("[^A-Za-z0-9.-]+", "_", extra).lower())
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/setuptools_build.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/setuptools_build.py
new file mode 100644
index 0000000..96d1b24
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/setuptools_build.py
@@ -0,0 +1,146 @@
+import sys
+import textwrap
+from typing import List, Optional, Sequence
+
+# Shim to wrap setup.py invocation with setuptools
+# Note that __file__ is handled via two {!r} *and* %r, to ensure that paths on
+# Windows are correctly handled (it should be "C:\\Users" not "C:\Users").
+_SETUPTOOLS_SHIM = textwrap.dedent(
+    """
+    exec(compile('''
+    # This is  -- a caller that pip uses to run setup.py
+    #
+    # - It imports setuptools before invoking setup.py, to enable projects that directly
+    #   import from `distutils.core` to work with newer packaging standards.
+    # - It provides a clear error message when setuptools is not installed.
+    # - It sets `sys.argv[0]` to the underlying `setup.py`, when invoking `setup.py` so
+    #   setuptools doesn't think the script is `-c`. This avoids the following warning:
+    #     manifest_maker: standard file '-c' not found".
+    # - It generates a shim setup.py, for handling setup.cfg-only projects.
+    import os, sys, tokenize
+
+    try:
+        import setuptools
+    except ImportError as error:
+        print(
+            "ERROR: Can not execute `setup.py` since setuptools is not available in "
+            "the build environment.",
+            file=sys.stderr,
+        )
+        sys.exit(1)
+
+    __file__ = %r
+    sys.argv[0] = __file__
+
+    if os.path.exists(__file__):
+        filename = __file__
+        with tokenize.open(__file__) as f:
+            setup_py_code = f.read()
+    else:
+        filename = ""
+        setup_py_code = "from setuptools import setup; setup()"
+
+    exec(compile(setup_py_code, filename, "exec"))
+    ''' % ({!r},), "", "exec"))
+    """
+).rstrip()
+
+
+def make_setuptools_shim_args(
+    setup_py_path: str,
+    global_options: Optional[Sequence[str]] = None,
+    no_user_config: bool = False,
+    unbuffered_output: bool = False,
+) -> List[str]:
+    """
+    Get setuptools command arguments with shim wrapped setup file invocation.
+
+    :param setup_py_path: The path to setup.py to be wrapped.
+    :param global_options: Additional global options.
+    :param no_user_config: If True, disables personal user configuration.
+    :param unbuffered_output: If True, adds the unbuffered switch to the
+     argument list.
+    """
+    args = [sys.executable]
+    if unbuffered_output:
+        args += ["-u"]
+    args += ["-c", _SETUPTOOLS_SHIM.format(setup_py_path)]
+    if global_options:
+        args += global_options
+    if no_user_config:
+        args += ["--no-user-cfg"]
+    return args
+
+
+def make_setuptools_bdist_wheel_args(
+    setup_py_path: str,
+    global_options: Sequence[str],
+    build_options: Sequence[str],
+    destination_dir: str,
+) -> List[str]:
+    # NOTE: Eventually, we'd want to also -S to the flags here, when we're
+    # isolating. Currently, it breaks Python in virtualenvs, because it
+    # relies on site.py to find parts of the standard library outside the
+    # virtualenv.
+    args = make_setuptools_shim_args(
+        setup_py_path, global_options=global_options, unbuffered_output=True
+    )
+    args += ["bdist_wheel", "-d", destination_dir]
+    args += build_options
+    return args
+
+
+def make_setuptools_clean_args(
+    setup_py_path: str,
+    global_options: Sequence[str],
+) -> List[str]:
+    args = make_setuptools_shim_args(
+        setup_py_path, global_options=global_options, unbuffered_output=True
+    )
+    args += ["clean", "--all"]
+    return args
+
+
+def make_setuptools_develop_args(
+    setup_py_path: str,
+    *,
+    global_options: Sequence[str],
+    no_user_config: bool,
+    prefix: Optional[str],
+    home: Optional[str],
+    use_user_site: bool,
+) -> List[str]:
+    assert not (use_user_site and prefix)
+
+    args = make_setuptools_shim_args(
+        setup_py_path,
+        global_options=global_options,
+        no_user_config=no_user_config,
+    )
+
+    args += ["develop", "--no-deps"]
+
+    if prefix:
+        args += ["--prefix", prefix]
+    if home is not None:
+        args += ["--install-dir", home]
+
+    if use_user_site:
+        args += ["--user", "--prefix="]
+
+    return args
+
+
+def make_setuptools_egg_info_args(
+    setup_py_path: str,
+    egg_info_dir: Optional[str],
+    no_user_config: bool,
+) -> List[str]:
+    args = make_setuptools_shim_args(setup_py_path, no_user_config=no_user_config)
+
+    args += ["egg_info"]
+
+    if egg_info_dir:
+        args += ["--egg-base", egg_info_dir]
+
+    return args
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/subprocess.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/subprocess.py
new file mode 100644
index 0000000..79580b0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/subprocess.py
@@ -0,0 +1,260 @@
+import logging
+import os
+import shlex
+import subprocess
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    Iterable,
+    List,
+    Mapping,
+    Optional,
+    Union,
+)
+
+from pip._vendor.rich.markup import escape
+
+from pip._internal.cli.spinners import SpinnerInterface, open_spinner
+from pip._internal.exceptions import InstallationSubprocessError
+from pip._internal.utils.logging import VERBOSE, subprocess_logger
+from pip._internal.utils.misc import HiddenText
+
+if TYPE_CHECKING:
+    # Literal was introduced in Python 3.8.
+    #
+    # TODO: Remove `if TYPE_CHECKING` when dropping support for Python 3.7.
+    from typing import Literal
+
+CommandArgs = List[Union[str, HiddenText]]
+
+
+def make_command(*args: Union[str, HiddenText, CommandArgs]) -> CommandArgs:
+    """
+    Create a CommandArgs object.
+    """
+    command_args: CommandArgs = []
+    for arg in args:
+        # Check for list instead of CommandArgs since CommandArgs is
+        # only known during type-checking.
+        if isinstance(arg, list):
+            command_args.extend(arg)
+        else:
+            # Otherwise, arg is str or HiddenText.
+            command_args.append(arg)
+
+    return command_args
+
+
+def format_command_args(args: Union[List[str], CommandArgs]) -> str:
+    """
+    Format command arguments for display.
+    """
+    # For HiddenText arguments, display the redacted form by calling str().
+    # Also, we don't apply str() to arguments that aren't HiddenText since
+    # this can trigger a UnicodeDecodeError in Python 2 if the argument
+    # has type unicode and includes a non-ascii character.  (The type
+    # checker doesn't ensure the annotations are correct in all cases.)
+    return " ".join(
+        shlex.quote(str(arg)) if isinstance(arg, HiddenText) else shlex.quote(arg)
+        for arg in args
+    )
+
+
+def reveal_command_args(args: Union[List[str], CommandArgs]) -> List[str]:
+    """
+    Return the arguments in their raw, unredacted form.
+    """
+    return [arg.secret if isinstance(arg, HiddenText) else arg for arg in args]
+
+
+def call_subprocess(
+    cmd: Union[List[str], CommandArgs],
+    show_stdout: bool = False,
+    cwd: Optional[str] = None,
+    on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise",
+    extra_ok_returncodes: Optional[Iterable[int]] = None,
+    extra_environ: Optional[Mapping[str, Any]] = None,
+    unset_environ: Optional[Iterable[str]] = None,
+    spinner: Optional[SpinnerInterface] = None,
+    log_failed_cmd: Optional[bool] = True,
+    stdout_only: Optional[bool] = False,
+    *,
+    command_desc: str,
+) -> str:
+    """
+    Args:
+      show_stdout: if true, use INFO to log the subprocess's stderr and
+        stdout streams.  Otherwise, use DEBUG.  Defaults to False.
+      extra_ok_returncodes: an iterable of integer return codes that are
+        acceptable, in addition to 0. Defaults to None, which means [].
+      unset_environ: an iterable of environment variable names to unset
+        prior to calling subprocess.Popen().
+      log_failed_cmd: if false, failed commands are not logged, only raised.
+      stdout_only: if true, return only stdout, else return both. When true,
+        logging of both stdout and stderr occurs when the subprocess has
+        terminated, else logging occurs as subprocess output is produced.
+    """
+    if extra_ok_returncodes is None:
+        extra_ok_returncodes = []
+    if unset_environ is None:
+        unset_environ = []
+    # Most places in pip use show_stdout=False. What this means is--
+    #
+    # - We connect the child's output (combined stderr and stdout) to a
+    #   single pipe, which we read.
+    # - We log this output to stderr at DEBUG level as it is received.
+    # - If DEBUG logging isn't enabled (e.g. if --verbose logging wasn't
+    #   requested), then we show a spinner so the user can still see the
+    #   subprocess is in progress.
+    # - If the subprocess exits with an error, we log the output to stderr
+    #   at ERROR level if it hasn't already been displayed to the console
+    #   (e.g. if --verbose logging wasn't enabled).  This way we don't log
+    #   the output to the console twice.
+    #
+    # If show_stdout=True, then the above is still done, but with DEBUG
+    # replaced by INFO.
+    if show_stdout:
+        # Then log the subprocess output at INFO level.
+        log_subprocess: Callable[..., None] = subprocess_logger.info
+        used_level = logging.INFO
+    else:
+        # Then log the subprocess output using VERBOSE.  This also ensures
+        # it will be logged to the log file (aka user_log), if enabled.
+        log_subprocess = subprocess_logger.verbose
+        used_level = VERBOSE
+
+    # Whether the subprocess will be visible in the console.
+    showing_subprocess = subprocess_logger.getEffectiveLevel() <= used_level
+
+    # Only use the spinner if we're not showing the subprocess output
+    # and we have a spinner.
+    use_spinner = not showing_subprocess and spinner is not None
+
+    log_subprocess("Running command %s", command_desc)
+    env = os.environ.copy()
+    if extra_environ:
+        env.update(extra_environ)
+    for name in unset_environ:
+        env.pop(name, None)
+    try:
+        proc = subprocess.Popen(
+            # Convert HiddenText objects to the underlying str.
+            reveal_command_args(cmd),
+            stdin=subprocess.PIPE,
+            stdout=subprocess.PIPE,
+            stderr=subprocess.STDOUT if not stdout_only else subprocess.PIPE,
+            cwd=cwd,
+            env=env,
+            errors="backslashreplace",
+        )
+    except Exception as exc:
+        if log_failed_cmd:
+            subprocess_logger.critical(
+                "Error %s while executing command %s",
+                exc,
+                command_desc,
+            )
+        raise
+    all_output = []
+    if not stdout_only:
+        assert proc.stdout
+        assert proc.stdin
+        proc.stdin.close()
+        # In this mode, stdout and stderr are in the same pipe.
+        while True:
+            line: str = proc.stdout.readline()
+            if not line:
+                break
+            line = line.rstrip()
+            all_output.append(line + "\n")
+
+            # Show the line immediately.
+            log_subprocess(line)
+            # Update the spinner.
+            if use_spinner:
+                assert spinner
+                spinner.spin()
+        try:
+            proc.wait()
+        finally:
+            if proc.stdout:
+                proc.stdout.close()
+        output = "".join(all_output)
+    else:
+        # In this mode, stdout and stderr are in different pipes.
+        # We must use communicate() which is the only safe way to read both.
+        out, err = proc.communicate()
+        # log line by line to preserve pip log indenting
+        for out_line in out.splitlines():
+            log_subprocess(out_line)
+        all_output.append(out)
+        for err_line in err.splitlines():
+            log_subprocess(err_line)
+        all_output.append(err)
+        output = out
+
+    proc_had_error = proc.returncode and proc.returncode not in extra_ok_returncodes
+    if use_spinner:
+        assert spinner
+        if proc_had_error:
+            spinner.finish("error")
+        else:
+            spinner.finish("done")
+    if proc_had_error:
+        if on_returncode == "raise":
+            error = InstallationSubprocessError(
+                command_description=command_desc,
+                exit_code=proc.returncode,
+                output_lines=all_output if not showing_subprocess else None,
+            )
+            if log_failed_cmd:
+                subprocess_logger.error("%s", error, extra={"rich": True})
+                subprocess_logger.verbose(
+                    "[bold magenta]full command[/]: [blue]%s[/]",
+                    escape(format_command_args(cmd)),
+                    extra={"markup": True},
+                )
+                subprocess_logger.verbose(
+                    "[bold magenta]cwd[/]: %s",
+                    escape(cwd or "[inherit]"),
+                    extra={"markup": True},
+                )
+
+            raise error
+        elif on_returncode == "warn":
+            subprocess_logger.warning(
+                'Command "%s" had error code %s in %s',
+                command_desc,
+                proc.returncode,
+                cwd,
+            )
+        elif on_returncode == "ignore":
+            pass
+        else:
+            raise ValueError(f"Invalid value: on_returncode={on_returncode!r}")
+    return output
+
+
+def runner_with_spinner_message(message: str) -> Callable[..., None]:
+    """Provide a subprocess_runner that shows a spinner message.
+
+    Intended for use with for BuildBackendHookCaller. Thus, the runner has
+    an API that matches what's expected by BuildBackendHookCaller.subprocess_runner.
+    """
+
+    def runner(
+        cmd: List[str],
+        cwd: Optional[str] = None,
+        extra_environ: Optional[Mapping[str, Any]] = None,
+    ) -> None:
+        with open_spinner(message) as spinner:
+            call_subprocess(
+                cmd,
+                command_desc=message,
+                cwd=cwd,
+                extra_environ=extra_environ,
+                spinner=spinner,
+            )
+
+    return runner
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/temp_dir.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/temp_dir.py
new file mode 100644
index 0000000..4eec5f3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/temp_dir.py
@@ -0,0 +1,296 @@
+import errno
+import itertools
+import logging
+import os.path
+import tempfile
+import traceback
+from contextlib import ExitStack, contextmanager
+from pathlib import Path
+from typing import (
+    Any,
+    Callable,
+    Dict,
+    Generator,
+    List,
+    Optional,
+    TypeVar,
+    Union,
+)
+
+from pip._internal.utils.misc import enum, rmtree
+
+logger = logging.getLogger(__name__)
+
+_T = TypeVar("_T", bound="TempDirectory")
+
+
+# Kinds of temporary directories. Only needed for ones that are
+# globally-managed.
+tempdir_kinds = enum(
+    BUILD_ENV="build-env",
+    EPHEM_WHEEL_CACHE="ephem-wheel-cache",
+    REQ_BUILD="req-build",
+)
+
+
+_tempdir_manager: Optional[ExitStack] = None
+
+
+@contextmanager
+def global_tempdir_manager() -> Generator[None, None, None]:
+    global _tempdir_manager
+    with ExitStack() as stack:
+        old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack
+        try:
+            yield
+        finally:
+            _tempdir_manager = old_tempdir_manager
+
+
+class TempDirectoryTypeRegistry:
+    """Manages temp directory behavior"""
+
+    def __init__(self) -> None:
+        self._should_delete: Dict[str, bool] = {}
+
+    def set_delete(self, kind: str, value: bool) -> None:
+        """Indicate whether a TempDirectory of the given kind should be
+        auto-deleted.
+        """
+        self._should_delete[kind] = value
+
+    def get_delete(self, kind: str) -> bool:
+        """Get configured auto-delete flag for a given TempDirectory type,
+        default True.
+        """
+        return self._should_delete.get(kind, True)
+
+
+_tempdir_registry: Optional[TempDirectoryTypeRegistry] = None
+
+
+@contextmanager
+def tempdir_registry() -> Generator[TempDirectoryTypeRegistry, None, None]:
+    """Provides a scoped global tempdir registry that can be used to dictate
+    whether directories should be deleted.
+    """
+    global _tempdir_registry
+    old_tempdir_registry = _tempdir_registry
+    _tempdir_registry = TempDirectoryTypeRegistry()
+    try:
+        yield _tempdir_registry
+    finally:
+        _tempdir_registry = old_tempdir_registry
+
+
+class _Default:
+    pass
+
+
+_default = _Default()
+
+
+class TempDirectory:
+    """Helper class that owns and cleans up a temporary directory.
+
+    This class can be used as a context manager or as an OO representation of a
+    temporary directory.
+
+    Attributes:
+        path
+            Location to the created temporary directory
+        delete
+            Whether the directory should be deleted when exiting
+            (when used as a contextmanager)
+
+    Methods:
+        cleanup()
+            Deletes the temporary directory
+
+    When used as a context manager, if the delete attribute is True, on
+    exiting the context the temporary directory is deleted.
+    """
+
+    def __init__(
+        self,
+        path: Optional[str] = None,
+        delete: Union[bool, None, _Default] = _default,
+        kind: str = "temp",
+        globally_managed: bool = False,
+        ignore_cleanup_errors: bool = True,
+    ):
+        super().__init__()
+
+        if delete is _default:
+            if path is not None:
+                # If we were given an explicit directory, resolve delete option
+                # now.
+                delete = False
+            else:
+                # Otherwise, we wait until cleanup and see what
+                # tempdir_registry says.
+                delete = None
+
+        # The only time we specify path is in for editables where it
+        # is the value of the --src option.
+        if path is None:
+            path = self._create(kind)
+
+        self._path = path
+        self._deleted = False
+        self.delete = delete
+        self.kind = kind
+        self.ignore_cleanup_errors = ignore_cleanup_errors
+
+        if globally_managed:
+            assert _tempdir_manager is not None
+            _tempdir_manager.enter_context(self)
+
+    @property
+    def path(self) -> str:
+        assert not self._deleted, f"Attempted to access deleted path: {self._path}"
+        return self._path
+
+    def __repr__(self) -> str:
+        return f"<{self.__class__.__name__} {self.path!r}>"
+
+    def __enter__(self: _T) -> _T:
+        return self
+
+    def __exit__(self, exc: Any, value: Any, tb: Any) -> None:
+        if self.delete is not None:
+            delete = self.delete
+        elif _tempdir_registry:
+            delete = _tempdir_registry.get_delete(self.kind)
+        else:
+            delete = True
+
+        if delete:
+            self.cleanup()
+
+    def _create(self, kind: str) -> str:
+        """Create a temporary directory and store its path in self.path"""
+        # We realpath here because some systems have their default tmpdir
+        # symlinked to another directory.  This tends to confuse build
+        # scripts, so we canonicalize the path by traversing potential
+        # symlinks here.
+        path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-"))
+        logger.debug("Created temporary directory: %s", path)
+        return path
+
+    def cleanup(self) -> None:
+        """Remove the temporary directory created and reset state"""
+        self._deleted = True
+        if not os.path.exists(self._path):
+            return
+
+        errors: List[BaseException] = []
+
+        def onerror(
+            func: Callable[..., Any],
+            path: Path,
+            exc_val: BaseException,
+        ) -> None:
+            """Log a warning for a `rmtree` error and continue"""
+            formatted_exc = "\n".join(
+                traceback.format_exception_only(type(exc_val), exc_val)
+            )
+            formatted_exc = formatted_exc.rstrip()  # remove trailing new line
+            if func in (os.unlink, os.remove, os.rmdir):
+                logger.debug(
+                    "Failed to remove a temporary file '%s' due to %s.\n",
+                    path,
+                    formatted_exc,
+                )
+            else:
+                logger.debug("%s failed with %s.", func.__qualname__, formatted_exc)
+            errors.append(exc_val)
+
+        if self.ignore_cleanup_errors:
+            try:
+                # first try with tenacity; retrying to handle ephemeral errors
+                rmtree(self._path, ignore_errors=False)
+            except OSError:
+                # last pass ignore/log all errors
+                rmtree(self._path, onexc=onerror)
+            if errors:
+                logger.warning(
+                    "Failed to remove contents in a temporary directory '%s'.\n"
+                    "You can safely remove it manually.",
+                    self._path,
+                )
+        else:
+            rmtree(self._path)
+
+
+class AdjacentTempDirectory(TempDirectory):
+    """Helper class that creates a temporary directory adjacent to a real one.
+
+    Attributes:
+        original
+            The original directory to create a temp directory for.
+        path
+            After calling create() or entering, contains the full
+            path to the temporary directory.
+        delete
+            Whether the directory should be deleted when exiting
+            (when used as a contextmanager)
+
+    """
+
+    # The characters that may be used to name the temp directory
+    # We always prepend a ~ and then rotate through these until
+    # a usable name is found.
+    # pkg_resources raises a different error for .dist-info folder
+    # with leading '-' and invalid metadata
+    LEADING_CHARS = "-~.=%0123456789"
+
+    def __init__(self, original: str, delete: Optional[bool] = None) -> None:
+        self.original = original.rstrip("/\\")
+        super().__init__(delete=delete)
+
+    @classmethod
+    def _generate_names(cls, name: str) -> Generator[str, None, None]:
+        """Generates a series of temporary names.
+
+        The algorithm replaces the leading characters in the name
+        with ones that are valid filesystem characters, but are not
+        valid package names (for both Python and pip definitions of
+        package).
+        """
+        for i in range(1, len(name)):
+            for candidate in itertools.combinations_with_replacement(
+                cls.LEADING_CHARS, i - 1
+            ):
+                new_name = "~" + "".join(candidate) + name[i:]
+                if new_name != name:
+                    yield new_name
+
+        # If we make it this far, we will have to make a longer name
+        for i in range(len(cls.LEADING_CHARS)):
+            for candidate in itertools.combinations_with_replacement(
+                cls.LEADING_CHARS, i
+            ):
+                new_name = "~" + "".join(candidate) + name
+                if new_name != name:
+                    yield new_name
+
+    def _create(self, kind: str) -> str:
+        root, name = os.path.split(self.original)
+        for candidate in self._generate_names(name):
+            path = os.path.join(root, candidate)
+            try:
+                os.mkdir(path)
+            except OSError as ex:
+                # Continue if the name exists already
+                if ex.errno != errno.EEXIST:
+                    raise
+            else:
+                path = os.path.realpath(path)
+                break
+        else:
+            # Final fallback on the default behavior.
+            path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-"))
+
+        logger.debug("Created temporary directory: %s", path)
+        return path
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/unpacking.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/unpacking.py
new file mode 100644
index 0000000..78b5c13
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/unpacking.py
@@ -0,0 +1,257 @@
+"""Utilities related archives.
+"""
+
+import logging
+import os
+import shutil
+import stat
+import tarfile
+import zipfile
+from typing import Iterable, List, Optional
+from zipfile import ZipInfo
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.utils.filetypes import (
+    BZ2_EXTENSIONS,
+    TAR_EXTENSIONS,
+    XZ_EXTENSIONS,
+    ZIP_EXTENSIONS,
+)
+from pip._internal.utils.misc import ensure_dir
+
+logger = logging.getLogger(__name__)
+
+
+SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS
+
+try:
+    import bz2  # noqa
+
+    SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
+except ImportError:
+    logger.debug("bz2 module is not available")
+
+try:
+    # Only for Python 3.3+
+    import lzma  # noqa
+
+    SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
+except ImportError:
+    logger.debug("lzma module is not available")
+
+
+def current_umask() -> int:
+    """Get the current umask which involves having to set it temporarily."""
+    mask = os.umask(0)
+    os.umask(mask)
+    return mask
+
+
+def split_leading_dir(path: str) -> List[str]:
+    path = path.lstrip("/").lstrip("\\")
+    if "/" in path and (
+        ("\\" in path and path.find("/") < path.find("\\")) or "\\" not in path
+    ):
+        return path.split("/", 1)
+    elif "\\" in path:
+        return path.split("\\", 1)
+    else:
+        return [path, ""]
+
+
+def has_leading_dir(paths: Iterable[str]) -> bool:
+    """Returns true if all the paths have the same leading path name
+    (i.e., everything is in one subdirectory in an archive)"""
+    common_prefix = None
+    for path in paths:
+        prefix, rest = split_leading_dir(path)
+        if not prefix:
+            return False
+        elif common_prefix is None:
+            common_prefix = prefix
+        elif prefix != common_prefix:
+            return False
+    return True
+
+
+def is_within_directory(directory: str, target: str) -> bool:
+    """
+    Return true if the absolute path of target is within the directory
+    """
+    abs_directory = os.path.abspath(directory)
+    abs_target = os.path.abspath(target)
+
+    prefix = os.path.commonprefix([abs_directory, abs_target])
+    return prefix == abs_directory
+
+
+def set_extracted_file_to_default_mode_plus_executable(path: str) -> None:
+    """
+    Make file present at path have execute for user/group/world
+    (chmod +x) is no-op on windows per python docs
+    """
+    os.chmod(path, (0o777 & ~current_umask() | 0o111))
+
+
+def zip_item_is_executable(info: ZipInfo) -> bool:
+    mode = info.external_attr >> 16
+    # if mode and regular file and any execute permissions for
+    # user/group/world?
+    return bool(mode and stat.S_ISREG(mode) and mode & 0o111)
+
+
+def unzip_file(filename: str, location: str, flatten: bool = True) -> None:
+    """
+    Unzip the file (with path `filename`) to the destination `location`.  All
+    files are written based on system defaults and umask (i.e. permissions are
+    not preserved), except that regular file members with any execute
+    permissions (user, group, or world) have "chmod +x" applied after being
+    written. Note that for windows, any execute changes using os.chmod are
+    no-ops per the python docs.
+    """
+    ensure_dir(location)
+    zipfp = open(filename, "rb")
+    try:
+        zip = zipfile.ZipFile(zipfp, allowZip64=True)
+        leading = has_leading_dir(zip.namelist()) and flatten
+        for info in zip.infolist():
+            name = info.filename
+            fn = name
+            if leading:
+                fn = split_leading_dir(name)[1]
+            fn = os.path.join(location, fn)
+            dir = os.path.dirname(fn)
+            if not is_within_directory(location, fn):
+                message = (
+                    "The zip file ({}) has a file ({}) trying to install "
+                    "outside target directory ({})"
+                )
+                raise InstallationError(message.format(filename, fn, location))
+            if fn.endswith("/") or fn.endswith("\\"):
+                # A directory
+                ensure_dir(fn)
+            else:
+                ensure_dir(dir)
+                # Don't use read() to avoid allocating an arbitrarily large
+                # chunk of memory for the file's content
+                fp = zip.open(name)
+                try:
+                    with open(fn, "wb") as destfp:
+                        shutil.copyfileobj(fp, destfp)
+                finally:
+                    fp.close()
+                    if zip_item_is_executable(info):
+                        set_extracted_file_to_default_mode_plus_executable(fn)
+    finally:
+        zipfp.close()
+
+
+def untar_file(filename: str, location: str) -> None:
+    """
+    Untar the file (with path `filename`) to the destination `location`.
+    All files are written based on system defaults and umask (i.e. permissions
+    are not preserved), except that regular file members with any execute
+    permissions (user, group, or world) have "chmod +x" applied after being
+    written.  Note that for windows, any execute changes using os.chmod are
+    no-ops per the python docs.
+    """
+    ensure_dir(location)
+    if filename.lower().endswith(".gz") or filename.lower().endswith(".tgz"):
+        mode = "r:gz"
+    elif filename.lower().endswith(BZ2_EXTENSIONS):
+        mode = "r:bz2"
+    elif filename.lower().endswith(XZ_EXTENSIONS):
+        mode = "r:xz"
+    elif filename.lower().endswith(".tar"):
+        mode = "r"
+    else:
+        logger.warning(
+            "Cannot determine compression type for file %s",
+            filename,
+        )
+        mode = "r:*"
+    tar = tarfile.open(filename, mode, encoding="utf-8")
+    try:
+        leading = has_leading_dir([member.name for member in tar.getmembers()])
+        for member in tar.getmembers():
+            fn = member.name
+            if leading:
+                fn = split_leading_dir(fn)[1]
+            path = os.path.join(location, fn)
+            if not is_within_directory(location, path):
+                message = (
+                    "The tar file ({}) has a file ({}) trying to install "
+                    "outside target directory ({})"
+                )
+                raise InstallationError(message.format(filename, path, location))
+            if member.isdir():
+                ensure_dir(path)
+            elif member.issym():
+                try:
+                    tar._extract_member(member, path)
+                except Exception as exc:
+                    # Some corrupt tar files seem to produce this
+                    # (specifically bad symlinks)
+                    logger.warning(
+                        "In the tar file %s the member %s is invalid: %s",
+                        filename,
+                        member.name,
+                        exc,
+                    )
+                    continue
+            else:
+                try:
+                    fp = tar.extractfile(member)
+                except (KeyError, AttributeError) as exc:
+                    # Some corrupt tar files seem to produce this
+                    # (specifically bad symlinks)
+                    logger.warning(
+                        "In the tar file %s the member %s is invalid: %s",
+                        filename,
+                        member.name,
+                        exc,
+                    )
+                    continue
+                ensure_dir(os.path.dirname(path))
+                assert fp is not None
+                with open(path, "wb") as destfp:
+                    shutil.copyfileobj(fp, destfp)
+                fp.close()
+                # Update the timestamp (useful for cython compiled files)
+                tar.utime(member, path)
+                # member have any execute permissions for user/group/world?
+                if member.mode & 0o111:
+                    set_extracted_file_to_default_mode_plus_executable(path)
+    finally:
+        tar.close()
+
+
+def unpack_file(
+    filename: str,
+    location: str,
+    content_type: Optional[str] = None,
+) -> None:
+    filename = os.path.realpath(filename)
+    if (
+        content_type == "application/zip"
+        or filename.lower().endswith(ZIP_EXTENSIONS)
+        or zipfile.is_zipfile(filename)
+    ):
+        unzip_file(filename, location, flatten=not filename.endswith(".whl"))
+    elif (
+        content_type == "application/x-gzip"
+        or tarfile.is_tarfile(filename)
+        or filename.lower().endswith(TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)
+    ):
+        untar_file(filename, location)
+    else:
+        # FIXME: handle?
+        # FIXME: magic signatures?
+        logger.critical(
+            "Cannot unpack file %s (downloaded from %s, content-type: %s); "
+            "cannot detect archive format",
+            filename,
+            location,
+            content_type,
+        )
+        raise InstallationError(f"Cannot determine archive format of {location}")
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/urls.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/urls.py
new file mode 100644
index 0000000..6ba2e04
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/urls.py
@@ -0,0 +1,62 @@
+import os
+import string
+import urllib.parse
+import urllib.request
+from typing import Optional
+
+from .compat import WINDOWS
+
+
+def get_url_scheme(url: str) -> Optional[str]:
+    if ":" not in url:
+        return None
+    return url.split(":", 1)[0].lower()
+
+
+def path_to_url(path: str) -> str:
+    """
+    Convert a path to a file: URL.  The path will be made absolute and have
+    quoted path parts.
+    """
+    path = os.path.normpath(os.path.abspath(path))
+    url = urllib.parse.urljoin("file:", urllib.request.pathname2url(path))
+    return url
+
+
+def url_to_path(url: str) -> str:
+    """
+    Convert a file: URL to a path.
+    """
+    assert url.startswith(
+        "file:"
+    ), f"You can only turn file: urls into filenames (not {url!r})"
+
+    _, netloc, path, _, _ = urllib.parse.urlsplit(url)
+
+    if not netloc or netloc == "localhost":
+        # According to RFC 8089, same as empty authority.
+        netloc = ""
+    elif WINDOWS:
+        # If we have a UNC path, prepend UNC share notation.
+        netloc = "\\\\" + netloc
+    else:
+        raise ValueError(
+            f"non-local file URIs are not supported on this platform: {url!r}"
+        )
+
+    path = urllib.request.url2pathname(netloc + path)
+
+    # On Windows, urlsplit parses the path as something like "/C:/Users/foo".
+    # This creates issues for path-related functions like io.open(), so we try
+    # to detect and strip the leading slash.
+    if (
+        WINDOWS
+        and not netloc  # Not UNC.
+        and len(path) >= 3
+        and path[0] == "/"  # Leading slash to strip.
+        and path[1] in string.ascii_letters  # Drive letter.
+        and path[2:4] in (":", ":/")  # Colon + end of string, or colon + absolute path.
+    ):
+        path = path[1:]
+
+    return path
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/virtualenv.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/virtualenv.py
new file mode 100644
index 0000000..882e36f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/virtualenv.py
@@ -0,0 +1,104 @@
+import logging
+import os
+import re
+import site
+import sys
+from typing import List, Optional
+
+logger = logging.getLogger(__name__)
+_INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile(
+    r"include-system-site-packages\s*=\s*(?Ptrue|false)"
+)
+
+
+def _running_under_venv() -> bool:
+    """Checks if sys.base_prefix and sys.prefix match.
+
+    This handles PEP 405 compliant virtual environments.
+    """
+    return sys.prefix != getattr(sys, "base_prefix", sys.prefix)
+
+
+def _running_under_legacy_virtualenv() -> bool:
+    """Checks if sys.real_prefix is set.
+
+    This handles virtual environments created with pypa's virtualenv.
+    """
+    # pypa/virtualenv case
+    return hasattr(sys, "real_prefix")
+
+
+def running_under_virtualenv() -> bool:
+    """True if we're running inside a virtual environment, False otherwise."""
+    return _running_under_venv() or _running_under_legacy_virtualenv()
+
+
+def _get_pyvenv_cfg_lines() -> Optional[List[str]]:
+    """Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines
+
+    Returns None, if it could not read/access the file.
+    """
+    pyvenv_cfg_file = os.path.join(sys.prefix, "pyvenv.cfg")
+    try:
+        # Although PEP 405 does not specify, the built-in venv module always
+        # writes with UTF-8. (pypa/pip#8717)
+        with open(pyvenv_cfg_file, encoding="utf-8") as f:
+            return f.read().splitlines()  # avoids trailing newlines
+    except OSError:
+        return None
+
+
+def _no_global_under_venv() -> bool:
+    """Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion
+
+    PEP 405 specifies that when system site-packages are not supposed to be
+    visible from a virtual environment, `pyvenv.cfg` must contain the following
+    line:
+
+        include-system-site-packages = false
+
+    Additionally, log a warning if accessing the file fails.
+    """
+    cfg_lines = _get_pyvenv_cfg_lines()
+    if cfg_lines is None:
+        # We're not in a "sane" venv, so assume there is no system
+        # site-packages access (since that's PEP 405's default state).
+        logger.warning(
+            "Could not access 'pyvenv.cfg' despite a virtual environment "
+            "being active. Assuming global site-packages is not accessible "
+            "in this environment."
+        )
+        return True
+
+    for line in cfg_lines:
+        match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line)
+        if match is not None and match.group("value") == "false":
+            return True
+    return False
+
+
+def _no_global_under_legacy_virtualenv() -> bool:
+    """Check if "no-global-site-packages.txt" exists beside site.py
+
+    This mirrors logic in pypa/virtualenv for determining whether system
+    site-packages are visible in the virtual environment.
+    """
+    site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
+    no_global_site_packages_file = os.path.join(
+        site_mod_dir,
+        "no-global-site-packages.txt",
+    )
+    return os.path.exists(no_global_site_packages_file)
+
+
+def virtualenv_no_global() -> bool:
+    """Returns a boolean, whether running in venv with no system site-packages."""
+    # PEP 405 compliance needs to be checked first since virtualenv >=20 would
+    # return True for both checks, but is only able to use the PEP 405 config.
+    if _running_under_venv():
+        return _no_global_under_venv()
+
+    if _running_under_legacy_virtualenv():
+        return _no_global_under_legacy_virtualenv()
+
+    return False
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/utils/wheel.py b/.venv/lib/python3.12/site-packages/pip/_internal/utils/wheel.py
new file mode 100644
index 0000000..3551f8f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/utils/wheel.py
@@ -0,0 +1,134 @@
+"""Support functions for working with wheel files.
+"""
+
+import logging
+from email.message import Message
+from email.parser import Parser
+from typing import Tuple
+from zipfile import BadZipFile, ZipFile
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.exceptions import UnsupportedWheel
+
+VERSION_COMPATIBLE = (1, 0)
+
+
+logger = logging.getLogger(__name__)
+
+
+def parse_wheel(wheel_zip: ZipFile, name: str) -> Tuple[str, Message]:
+    """Extract information from the provided wheel, ensuring it meets basic
+    standards.
+
+    Returns the name of the .dist-info directory and the parsed WHEEL metadata.
+    """
+    try:
+        info_dir = wheel_dist_info_dir(wheel_zip, name)
+        metadata = wheel_metadata(wheel_zip, info_dir)
+        version = wheel_version(metadata)
+    except UnsupportedWheel as e:
+        raise UnsupportedWheel(f"{name} has an invalid wheel, {str(e)}")
+
+    check_compatibility(version, name)
+
+    return info_dir, metadata
+
+
+def wheel_dist_info_dir(source: ZipFile, name: str) -> str:
+    """Returns the name of the contained .dist-info directory.
+
+    Raises AssertionError or UnsupportedWheel if not found, >1 found, or
+    it doesn't match the provided name.
+    """
+    # Zip file path separators must be /
+    subdirs = {p.split("/", 1)[0] for p in source.namelist()}
+
+    info_dirs = [s for s in subdirs if s.endswith(".dist-info")]
+
+    if not info_dirs:
+        raise UnsupportedWheel(".dist-info directory not found")
+
+    if len(info_dirs) > 1:
+        raise UnsupportedWheel(
+            "multiple .dist-info directories found: {}".format(", ".join(info_dirs))
+        )
+
+    info_dir = info_dirs[0]
+
+    info_dir_name = canonicalize_name(info_dir)
+    canonical_name = canonicalize_name(name)
+    if not info_dir_name.startswith(canonical_name):
+        raise UnsupportedWheel(
+            f".dist-info directory {info_dir!r} does not start with {canonical_name!r}"
+        )
+
+    return info_dir
+
+
+def read_wheel_metadata_file(source: ZipFile, path: str) -> bytes:
+    try:
+        return source.read(path)
+        # BadZipFile for general corruption, KeyError for missing entry,
+        # and RuntimeError for password-protected files
+    except (BadZipFile, KeyError, RuntimeError) as e:
+        raise UnsupportedWheel(f"could not read {path!r} file: {e!r}")
+
+
+def wheel_metadata(source: ZipFile, dist_info_dir: str) -> Message:
+    """Return the WHEEL metadata of an extracted wheel, if possible.
+    Otherwise, raise UnsupportedWheel.
+    """
+    path = f"{dist_info_dir}/WHEEL"
+    # Zip file path separators must be /
+    wheel_contents = read_wheel_metadata_file(source, path)
+
+    try:
+        wheel_text = wheel_contents.decode()
+    except UnicodeDecodeError as e:
+        raise UnsupportedWheel(f"error decoding {path!r}: {e!r}")
+
+    # FeedParser (used by Parser) does not raise any exceptions. The returned
+    # message may have .defects populated, but for backwards-compatibility we
+    # currently ignore them.
+    return Parser().parsestr(wheel_text)
+
+
+def wheel_version(wheel_data: Message) -> Tuple[int, ...]:
+    """Given WHEEL metadata, return the parsed Wheel-Version.
+    Otherwise, raise UnsupportedWheel.
+    """
+    version_text = wheel_data["Wheel-Version"]
+    if version_text is None:
+        raise UnsupportedWheel("WHEEL is missing Wheel-Version")
+
+    version = version_text.strip()
+
+    try:
+        return tuple(map(int, version.split(".")))
+    except ValueError:
+        raise UnsupportedWheel(f"invalid Wheel-Version: {version!r}")
+
+
+def check_compatibility(version: Tuple[int, ...], name: str) -> None:
+    """Raises errors or warns if called with an incompatible Wheel-Version.
+
+    pip should refuse to install a Wheel-Version that's a major series
+    ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
+    installing a version only minor version ahead (e.g 1.2 > 1.1).
+
+    version: a 2-tuple representing a Wheel-Version (Major, Minor)
+    name: name of wheel or package to raise exception about
+
+    :raises UnsupportedWheel: when an incompatible Wheel-Version is given
+    """
+    if version[0] > VERSION_COMPATIBLE[0]:
+        raise UnsupportedWheel(
+            "{}'s Wheel-Version ({}) is not compatible with this version "
+            "of pip".format(name, ".".join(map(str, version)))
+        )
+    elif version > VERSION_COMPATIBLE:
+        logger.warning(
+            "Installing from a newer Wheel-Version (%s)",
+            ".".join(map(str, version)),
+        )
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/vcs/__init__.py b/.venv/lib/python3.12/site-packages/pip/_internal/vcs/__init__.py
new file mode 100644
index 0000000..b6beddb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/vcs/__init__.py
@@ -0,0 +1,15 @@
+# Expose a limited set of classes and functions so callers outside of
+# the vcs package don't need to import deeper than `pip._internal.vcs`.
+# (The test directory may still need to import from a vcs sub-package.)
+# Import all vcs modules to register each VCS in the VcsSupport object.
+import pip._internal.vcs.bazaar
+import pip._internal.vcs.git
+import pip._internal.vcs.mercurial
+import pip._internal.vcs.subversion  # noqa: F401
+from pip._internal.vcs.versioncontrol import (  # noqa: F401
+    RemoteNotFoundError,
+    RemoteNotValidError,
+    is_url,
+    make_vcs_requirement_url,
+    vcs,
+)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/vcs/bazaar.py b/.venv/lib/python3.12/site-packages/pip/_internal/vcs/bazaar.py
new file mode 100644
index 0000000..20a17ed
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/vcs/bazaar.py
@@ -0,0 +1,112 @@
+import logging
+from typing import List, Optional, Tuple
+
+from pip._internal.utils.misc import HiddenText, display_path
+from pip._internal.utils.subprocess import make_command
+from pip._internal.utils.urls import path_to_url
+from pip._internal.vcs.versioncontrol import (
+    AuthInfo,
+    RemoteNotFoundError,
+    RevOptions,
+    VersionControl,
+    vcs,
+)
+
+logger = logging.getLogger(__name__)
+
+
+class Bazaar(VersionControl):
+    name = "bzr"
+    dirname = ".bzr"
+    repo_name = "branch"
+    schemes = (
+        "bzr+http",
+        "bzr+https",
+        "bzr+ssh",
+        "bzr+sftp",
+        "bzr+ftp",
+        "bzr+lp",
+        "bzr+file",
+    )
+
+    @staticmethod
+    def get_base_rev_args(rev: str) -> List[str]:
+        return ["-r", rev]
+
+    def fetch_new(
+        self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
+    ) -> None:
+        rev_display = rev_options.to_display()
+        logger.info(
+            "Checking out %s%s to %s",
+            url,
+            rev_display,
+            display_path(dest),
+        )
+        if verbosity <= 0:
+            flag = "--quiet"
+        elif verbosity == 1:
+            flag = ""
+        else:
+            flag = f"-{'v'*verbosity}"
+        cmd_args = make_command(
+            "checkout", "--lightweight", flag, rev_options.to_args(), url, dest
+        )
+        self.run_command(cmd_args)
+
+    def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
+        self.run_command(make_command("switch", url), cwd=dest)
+
+    def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
+        output = self.run_command(
+            make_command("info"), show_stdout=False, stdout_only=True, cwd=dest
+        )
+        if output.startswith("Standalone "):
+            # Older versions of pip used to create standalone branches.
+            # Convert the standalone branch to a checkout by calling "bzr bind".
+            cmd_args = make_command("bind", "-q", url)
+            self.run_command(cmd_args, cwd=dest)
+
+        cmd_args = make_command("update", "-q", rev_options.to_args())
+        self.run_command(cmd_args, cwd=dest)
+
+    @classmethod
+    def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:
+        # hotfix the URL scheme after removing bzr+ from bzr+ssh:// re-add it
+        url, rev, user_pass = super().get_url_rev_and_auth(url)
+        if url.startswith("ssh://"):
+            url = "bzr+" + url
+        return url, rev, user_pass
+
+    @classmethod
+    def get_remote_url(cls, location: str) -> str:
+        urls = cls.run_command(
+            ["info"], show_stdout=False, stdout_only=True, cwd=location
+        )
+        for line in urls.splitlines():
+            line = line.strip()
+            for x in ("checkout of branch: ", "parent branch: "):
+                if line.startswith(x):
+                    repo = line.split(x)[1]
+                    if cls._is_local_repository(repo):
+                        return path_to_url(repo)
+                    return repo
+        raise RemoteNotFoundError
+
+    @classmethod
+    def get_revision(cls, location: str) -> str:
+        revision = cls.run_command(
+            ["revno"],
+            show_stdout=False,
+            stdout_only=True,
+            cwd=location,
+        )
+        return revision.splitlines()[-1]
+
+    @classmethod
+    def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
+        """Always assume the versions don't match"""
+        return False
+
+
+vcs.register(Bazaar)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/vcs/git.py b/.venv/lib/python3.12/site-packages/pip/_internal/vcs/git.py
new file mode 100644
index 0000000..8c242cf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/vcs/git.py
@@ -0,0 +1,526 @@
+import logging
+import os.path
+import pathlib
+import re
+import urllib.parse
+import urllib.request
+from typing import List, Optional, Tuple
+
+from pip._internal.exceptions import BadCommand, InstallationError
+from pip._internal.utils.misc import HiddenText, display_path, hide_url
+from pip._internal.utils.subprocess import make_command
+from pip._internal.vcs.versioncontrol import (
+    AuthInfo,
+    RemoteNotFoundError,
+    RemoteNotValidError,
+    RevOptions,
+    VersionControl,
+    find_path_to_project_root_from_repo_root,
+    vcs,
+)
+
+urlsplit = urllib.parse.urlsplit
+urlunsplit = urllib.parse.urlunsplit
+
+
+logger = logging.getLogger(__name__)
+
+
+GIT_VERSION_REGEX = re.compile(
+    r"^git version "  # Prefix.
+    r"(\d+)"  # Major.
+    r"\.(\d+)"  # Dot, minor.
+    r"(?:\.(\d+))?"  # Optional dot, patch.
+    r".*$"  # Suffix, including any pre- and post-release segments we don't care about.
+)
+
+HASH_REGEX = re.compile("^[a-fA-F0-9]{40}$")
+
+# SCP (Secure copy protocol) shorthand. e.g. 'git@example.com:foo/bar.git'
+SCP_REGEX = re.compile(
+    r"""^
+    # Optional user, e.g. 'git@'
+    (\w+@)?
+    # Server, e.g. 'github.com'.
+    ([^/:]+):
+    # The server-side path. e.g. 'user/project.git'. Must start with an
+    # alphanumeric character so as not to be confusable with a Windows paths
+    # like 'C:/foo/bar' or 'C:\foo\bar'.
+    (\w[^:]*)
+    $""",
+    re.VERBOSE,
+)
+
+
+def looks_like_hash(sha: str) -> bool:
+    return bool(HASH_REGEX.match(sha))
+
+
+class Git(VersionControl):
+    name = "git"
+    dirname = ".git"
+    repo_name = "clone"
+    schemes = (
+        "git+http",
+        "git+https",
+        "git+ssh",
+        "git+git",
+        "git+file",
+    )
+    # Prevent the user's environment variables from interfering with pip:
+    # https://github.com/pypa/pip/issues/1130
+    unset_environ = ("GIT_DIR", "GIT_WORK_TREE")
+    default_arg_rev = "HEAD"
+
+    @staticmethod
+    def get_base_rev_args(rev: str) -> List[str]:
+        return [rev]
+
+    def is_immutable_rev_checkout(self, url: str, dest: str) -> bool:
+        _, rev_options = self.get_url_rev_options(hide_url(url))
+        if not rev_options.rev:
+            return False
+        if not self.is_commit_id_equal(dest, rev_options.rev):
+            # the current commit is different from rev,
+            # which means rev was something else than a commit hash
+            return False
+        # return False in the rare case rev is both a commit hash
+        # and a tag or a branch; we don't want to cache in that case
+        # because that branch/tag could point to something else in the future
+        is_tag_or_branch = bool(self.get_revision_sha(dest, rev_options.rev)[0])
+        return not is_tag_or_branch
+
+    def get_git_version(self) -> Tuple[int, ...]:
+        version = self.run_command(
+            ["version"],
+            command_desc="git version",
+            show_stdout=False,
+            stdout_only=True,
+        )
+        match = GIT_VERSION_REGEX.match(version)
+        if not match:
+            logger.warning("Can't parse git version: %s", version)
+            return ()
+        return (int(match.group(1)), int(match.group(2)))
+
+    @classmethod
+    def get_current_branch(cls, location: str) -> Optional[str]:
+        """
+        Return the current branch, or None if HEAD isn't at a branch
+        (e.g. detached HEAD).
+        """
+        # git-symbolic-ref exits with empty stdout if "HEAD" is a detached
+        # HEAD rather than a symbolic ref.  In addition, the -q causes the
+        # command to exit with status code 1 instead of 128 in this case
+        # and to suppress the message to stderr.
+        args = ["symbolic-ref", "-q", "HEAD"]
+        output = cls.run_command(
+            args,
+            extra_ok_returncodes=(1,),
+            show_stdout=False,
+            stdout_only=True,
+            cwd=location,
+        )
+        ref = output.strip()
+
+        if ref.startswith("refs/heads/"):
+            return ref[len("refs/heads/") :]
+
+        return None
+
+    @classmethod
+    def get_revision_sha(cls, dest: str, rev: str) -> Tuple[Optional[str], bool]:
+        """
+        Return (sha_or_none, is_branch), where sha_or_none is a commit hash
+        if the revision names a remote branch or tag, otherwise None.
+
+        Args:
+          dest: the repository directory.
+          rev: the revision name.
+        """
+        # Pass rev to pre-filter the list.
+        output = cls.run_command(
+            ["show-ref", rev],
+            cwd=dest,
+            show_stdout=False,
+            stdout_only=True,
+            on_returncode="ignore",
+        )
+        refs = {}
+        # NOTE: We do not use splitlines here since that would split on other
+        #       unicode separators, which can be maliciously used to install a
+        #       different revision.
+        for line in output.strip().split("\n"):
+            line = line.rstrip("\r")
+            if not line:
+                continue
+            try:
+                ref_sha, ref_name = line.split(" ", maxsplit=2)
+            except ValueError:
+                # Include the offending line to simplify troubleshooting if
+                # this error ever occurs.
+                raise ValueError(f"unexpected show-ref line: {line!r}")
+
+            refs[ref_name] = ref_sha
+
+        branch_ref = f"refs/remotes/origin/{rev}"
+        tag_ref = f"refs/tags/{rev}"
+
+        sha = refs.get(branch_ref)
+        if sha is not None:
+            return (sha, True)
+
+        sha = refs.get(tag_ref)
+
+        return (sha, False)
+
+    @classmethod
+    def _should_fetch(cls, dest: str, rev: str) -> bool:
+        """
+        Return true if rev is a ref or is a commit that we don't have locally.
+
+        Branches and tags are not considered in this method because they are
+        assumed to be always available locally (which is a normal outcome of
+        ``git clone`` and ``git fetch --tags``).
+        """
+        if rev.startswith("refs/"):
+            # Always fetch remote refs.
+            return True
+
+        if not looks_like_hash(rev):
+            # Git fetch would fail with abbreviated commits.
+            return False
+
+        if cls.has_commit(dest, rev):
+            # Don't fetch if we have the commit locally.
+            return False
+
+        return True
+
+    @classmethod
+    def resolve_revision(
+        cls, dest: str, url: HiddenText, rev_options: RevOptions
+    ) -> RevOptions:
+        """
+        Resolve a revision to a new RevOptions object with the SHA1 of the
+        branch, tag, or ref if found.
+
+        Args:
+          rev_options: a RevOptions object.
+        """
+        rev = rev_options.arg_rev
+        # The arg_rev property's implementation for Git ensures that the
+        # rev return value is always non-None.
+        assert rev is not None
+
+        sha, is_branch = cls.get_revision_sha(dest, rev)
+
+        if sha is not None:
+            rev_options = rev_options.make_new(sha)
+            rev_options.branch_name = rev if is_branch else None
+
+            return rev_options
+
+        # Do not show a warning for the common case of something that has
+        # the form of a Git commit hash.
+        if not looks_like_hash(rev):
+            logger.warning(
+                "Did not find branch or tag '%s', assuming revision or ref.",
+                rev,
+            )
+
+        if not cls._should_fetch(dest, rev):
+            return rev_options
+
+        # fetch the requested revision
+        cls.run_command(
+            make_command("fetch", "-q", url, rev_options.to_args()),
+            cwd=dest,
+        )
+        # Change the revision to the SHA of the ref we fetched
+        sha = cls.get_revision(dest, rev="FETCH_HEAD")
+        rev_options = rev_options.make_new(sha)
+
+        return rev_options
+
+    @classmethod
+    def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
+        """
+        Return whether the current commit hash equals the given name.
+
+        Args:
+          dest: the repository directory.
+          name: a string name.
+        """
+        if not name:
+            # Then avoid an unnecessary subprocess call.
+            return False
+
+        return cls.get_revision(dest) == name
+
+    def fetch_new(
+        self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
+    ) -> None:
+        rev_display = rev_options.to_display()
+        logger.info("Cloning %s%s to %s", url, rev_display, display_path(dest))
+        if verbosity <= 0:
+            flags: Tuple[str, ...] = ("--quiet",)
+        elif verbosity == 1:
+            flags = ()
+        else:
+            flags = ("--verbose", "--progress")
+        if self.get_git_version() >= (2, 17):
+            # Git added support for partial clone in 2.17
+            # https://git-scm.com/docs/partial-clone
+            # Speeds up cloning by functioning without a complete copy of repository
+            self.run_command(
+                make_command(
+                    "clone",
+                    "--filter=blob:none",
+                    *flags,
+                    url,
+                    dest,
+                )
+            )
+        else:
+            self.run_command(make_command("clone", *flags, url, dest))
+
+        if rev_options.rev:
+            # Then a specific revision was requested.
+            rev_options = self.resolve_revision(dest, url, rev_options)
+            branch_name = getattr(rev_options, "branch_name", None)
+            logger.debug("Rev options %s, branch_name %s", rev_options, branch_name)
+            if branch_name is None:
+                # Only do a checkout if the current commit id doesn't match
+                # the requested revision.
+                if not self.is_commit_id_equal(dest, rev_options.rev):
+                    cmd_args = make_command(
+                        "checkout",
+                        "-q",
+                        rev_options.to_args(),
+                    )
+                    self.run_command(cmd_args, cwd=dest)
+            elif self.get_current_branch(dest) != branch_name:
+                # Then a specific branch was requested, and that branch
+                # is not yet checked out.
+                track_branch = f"origin/{branch_name}"
+                cmd_args = [
+                    "checkout",
+                    "-b",
+                    branch_name,
+                    "--track",
+                    track_branch,
+                ]
+                self.run_command(cmd_args, cwd=dest)
+        else:
+            sha = self.get_revision(dest)
+            rev_options = rev_options.make_new(sha)
+
+        logger.info("Resolved %s to commit %s", url, rev_options.rev)
+
+        #: repo may contain submodules
+        self.update_submodules(dest)
+
+    def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
+        self.run_command(
+            make_command("config", "remote.origin.url", url),
+            cwd=dest,
+        )
+        cmd_args = make_command("checkout", "-q", rev_options.to_args())
+        self.run_command(cmd_args, cwd=dest)
+
+        self.update_submodules(dest)
+
+    def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
+        # First fetch changes from the default remote
+        if self.get_git_version() >= (1, 9):
+            # fetch tags in addition to everything else
+            self.run_command(["fetch", "-q", "--tags"], cwd=dest)
+        else:
+            self.run_command(["fetch", "-q"], cwd=dest)
+        # Then reset to wanted revision (maybe even origin/master)
+        rev_options = self.resolve_revision(dest, url, rev_options)
+        cmd_args = make_command("reset", "--hard", "-q", rev_options.to_args())
+        self.run_command(cmd_args, cwd=dest)
+        #: update submodules
+        self.update_submodules(dest)
+
+    @classmethod
+    def get_remote_url(cls, location: str) -> str:
+        """
+        Return URL of the first remote encountered.
+
+        Raises RemoteNotFoundError if the repository does not have a remote
+        url configured.
+        """
+        # We need to pass 1 for extra_ok_returncodes since the command
+        # exits with return code 1 if there are no matching lines.
+        stdout = cls.run_command(
+            ["config", "--get-regexp", r"remote\..*\.url"],
+            extra_ok_returncodes=(1,),
+            show_stdout=False,
+            stdout_only=True,
+            cwd=location,
+        )
+        remotes = stdout.splitlines()
+        try:
+            found_remote = remotes[0]
+        except IndexError:
+            raise RemoteNotFoundError
+
+        for remote in remotes:
+            if remote.startswith("remote.origin.url "):
+                found_remote = remote
+                break
+        url = found_remote.split(" ")[1]
+        return cls._git_remote_to_pip_url(url.strip())
+
+    @staticmethod
+    def _git_remote_to_pip_url(url: str) -> str:
+        """
+        Convert a remote url from what git uses to what pip accepts.
+
+        There are 3 legal forms **url** may take:
+
+            1. A fully qualified url: ssh://git@example.com/foo/bar.git
+            2. A local project.git folder: /path/to/bare/repository.git
+            3. SCP shorthand for form 1: git@example.com:foo/bar.git
+
+        Form 1 is output as-is. Form 2 must be converted to URI and form 3 must
+        be converted to form 1.
+
+        See the corresponding test test_git_remote_url_to_pip() for examples of
+        sample inputs/outputs.
+        """
+        if re.match(r"\w+://", url):
+            # This is already valid. Pass it though as-is.
+            return url
+        if os.path.exists(url):
+            # A local bare remote (git clone --mirror).
+            # Needs a file:// prefix.
+            return pathlib.PurePath(url).as_uri()
+        scp_match = SCP_REGEX.match(url)
+        if scp_match:
+            # Add an ssh:// prefix and replace the ':' with a '/'.
+            return scp_match.expand(r"ssh://\1\2/\3")
+        # Otherwise, bail out.
+        raise RemoteNotValidError(url)
+
+    @classmethod
+    def has_commit(cls, location: str, rev: str) -> bool:
+        """
+        Check if rev is a commit that is available in the local repository.
+        """
+        try:
+            cls.run_command(
+                ["rev-parse", "-q", "--verify", "sha^" + rev],
+                cwd=location,
+                log_failed_cmd=False,
+            )
+        except InstallationError:
+            return False
+        else:
+            return True
+
+    @classmethod
+    def get_revision(cls, location: str, rev: Optional[str] = None) -> str:
+        if rev is None:
+            rev = "HEAD"
+        current_rev = cls.run_command(
+            ["rev-parse", rev],
+            show_stdout=False,
+            stdout_only=True,
+            cwd=location,
+        )
+        return current_rev.strip()
+
+    @classmethod
+    def get_subdirectory(cls, location: str) -> Optional[str]:
+        """
+        Return the path to Python project root, relative to the repo root.
+        Return None if the project root is in the repo root.
+        """
+        # find the repo root
+        git_dir = cls.run_command(
+            ["rev-parse", "--git-dir"],
+            show_stdout=False,
+            stdout_only=True,
+            cwd=location,
+        ).strip()
+        if not os.path.isabs(git_dir):
+            git_dir = os.path.join(location, git_dir)
+        repo_root = os.path.abspath(os.path.join(git_dir, ".."))
+        return find_path_to_project_root_from_repo_root(location, repo_root)
+
+    @classmethod
+    def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:
+        """
+        Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
+        That's required because although they use SSH they sometimes don't
+        work with a ssh:// scheme (e.g. GitHub). But we need a scheme for
+        parsing. Hence we remove it again afterwards and return it as a stub.
+        """
+        # Works around an apparent Git bug
+        # (see https://article.gmane.org/gmane.comp.version-control.git/146500)
+        scheme, netloc, path, query, fragment = urlsplit(url)
+        if scheme.endswith("file"):
+            initial_slashes = path[: -len(path.lstrip("/"))]
+            newpath = initial_slashes + urllib.request.url2pathname(path).replace(
+                "\\", "/"
+            ).lstrip("/")
+            after_plus = scheme.find("+") + 1
+            url = scheme[:after_plus] + urlunsplit(
+                (scheme[after_plus:], netloc, newpath, query, fragment),
+            )
+
+        if "://" not in url:
+            assert "file:" not in url
+            url = url.replace("git+", "git+ssh://")
+            url, rev, user_pass = super().get_url_rev_and_auth(url)
+            url = url.replace("ssh://", "")
+        else:
+            url, rev, user_pass = super().get_url_rev_and_auth(url)
+
+        return url, rev, user_pass
+
+    @classmethod
+    def update_submodules(cls, location: str) -> None:
+        if not os.path.exists(os.path.join(location, ".gitmodules")):
+            return
+        cls.run_command(
+            ["submodule", "update", "--init", "--recursive", "-q"],
+            cwd=location,
+        )
+
+    @classmethod
+    def get_repository_root(cls, location: str) -> Optional[str]:
+        loc = super().get_repository_root(location)
+        if loc:
+            return loc
+        try:
+            r = cls.run_command(
+                ["rev-parse", "--show-toplevel"],
+                cwd=location,
+                show_stdout=False,
+                stdout_only=True,
+                on_returncode="raise",
+                log_failed_cmd=False,
+            )
+        except BadCommand:
+            logger.debug(
+                "could not determine if %s is under git control "
+                "because git is not available",
+                location,
+            )
+            return None
+        except InstallationError:
+            return None
+        return os.path.normpath(r.rstrip("\r\n"))
+
+    @staticmethod
+    def should_add_vcs_url_prefix(repo_url: str) -> bool:
+        """In either https or ssh form, requirements must be prefixed with git+."""
+        return True
+
+
+vcs.register(Git)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/vcs/mercurial.py b/.venv/lib/python3.12/site-packages/pip/_internal/vcs/mercurial.py
new file mode 100644
index 0000000..c183d41
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/vcs/mercurial.py
@@ -0,0 +1,163 @@
+import configparser
+import logging
+import os
+from typing import List, Optional, Tuple
+
+from pip._internal.exceptions import BadCommand, InstallationError
+from pip._internal.utils.misc import HiddenText, display_path
+from pip._internal.utils.subprocess import make_command
+from pip._internal.utils.urls import path_to_url
+from pip._internal.vcs.versioncontrol import (
+    RevOptions,
+    VersionControl,
+    find_path_to_project_root_from_repo_root,
+    vcs,
+)
+
+logger = logging.getLogger(__name__)
+
+
+class Mercurial(VersionControl):
+    name = "hg"
+    dirname = ".hg"
+    repo_name = "clone"
+    schemes = (
+        "hg+file",
+        "hg+http",
+        "hg+https",
+        "hg+ssh",
+        "hg+static-http",
+    )
+
+    @staticmethod
+    def get_base_rev_args(rev: str) -> List[str]:
+        return [f"--rev={rev}"]
+
+    def fetch_new(
+        self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
+    ) -> None:
+        rev_display = rev_options.to_display()
+        logger.info(
+            "Cloning hg %s%s to %s",
+            url,
+            rev_display,
+            display_path(dest),
+        )
+        if verbosity <= 0:
+            flags: Tuple[str, ...] = ("--quiet",)
+        elif verbosity == 1:
+            flags = ()
+        elif verbosity == 2:
+            flags = ("--verbose",)
+        else:
+            flags = ("--verbose", "--debug")
+        self.run_command(make_command("clone", "--noupdate", *flags, url, dest))
+        self.run_command(
+            make_command("update", *flags, rev_options.to_args()),
+            cwd=dest,
+        )
+
+    def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
+        repo_config = os.path.join(dest, self.dirname, "hgrc")
+        config = configparser.RawConfigParser()
+        try:
+            config.read(repo_config)
+            config.set("paths", "default", url.secret)
+            with open(repo_config, "w") as config_file:
+                config.write(config_file)
+        except (OSError, configparser.NoSectionError) as exc:
+            logger.warning("Could not switch Mercurial repository to %s: %s", url, exc)
+        else:
+            cmd_args = make_command("update", "-q", rev_options.to_args())
+            self.run_command(cmd_args, cwd=dest)
+
+    def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
+        self.run_command(["pull", "-q"], cwd=dest)
+        cmd_args = make_command("update", "-q", rev_options.to_args())
+        self.run_command(cmd_args, cwd=dest)
+
+    @classmethod
+    def get_remote_url(cls, location: str) -> str:
+        url = cls.run_command(
+            ["showconfig", "paths.default"],
+            show_stdout=False,
+            stdout_only=True,
+            cwd=location,
+        ).strip()
+        if cls._is_local_repository(url):
+            url = path_to_url(url)
+        return url.strip()
+
+    @classmethod
+    def get_revision(cls, location: str) -> str:
+        """
+        Return the repository-local changeset revision number, as an integer.
+        """
+        current_revision = cls.run_command(
+            ["parents", "--template={rev}"],
+            show_stdout=False,
+            stdout_only=True,
+            cwd=location,
+        ).strip()
+        return current_revision
+
+    @classmethod
+    def get_requirement_revision(cls, location: str) -> str:
+        """
+        Return the changeset identification hash, as a 40-character
+        hexadecimal string
+        """
+        current_rev_hash = cls.run_command(
+            ["parents", "--template={node}"],
+            show_stdout=False,
+            stdout_only=True,
+            cwd=location,
+        ).strip()
+        return current_rev_hash
+
+    @classmethod
+    def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
+        """Always assume the versions don't match"""
+        return False
+
+    @classmethod
+    def get_subdirectory(cls, location: str) -> Optional[str]:
+        """
+        Return the path to Python project root, relative to the repo root.
+        Return None if the project root is in the repo root.
+        """
+        # find the repo root
+        repo_root = cls.run_command(
+            ["root"], show_stdout=False, stdout_only=True, cwd=location
+        ).strip()
+        if not os.path.isabs(repo_root):
+            repo_root = os.path.abspath(os.path.join(location, repo_root))
+        return find_path_to_project_root_from_repo_root(location, repo_root)
+
+    @classmethod
+    def get_repository_root(cls, location: str) -> Optional[str]:
+        loc = super().get_repository_root(location)
+        if loc:
+            return loc
+        try:
+            r = cls.run_command(
+                ["root"],
+                cwd=location,
+                show_stdout=False,
+                stdout_only=True,
+                on_returncode="raise",
+                log_failed_cmd=False,
+            )
+        except BadCommand:
+            logger.debug(
+                "could not determine if %s is under hg control "
+                "because hg is not available",
+                location,
+            )
+            return None
+        except InstallationError:
+            return None
+        return os.path.normpath(r.rstrip("\r\n"))
+
+
+vcs.register(Mercurial)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/vcs/subversion.py b/.venv/lib/python3.12/site-packages/pip/_internal/vcs/subversion.py
new file mode 100644
index 0000000..16d93a6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/vcs/subversion.py
@@ -0,0 +1,324 @@
+import logging
+import os
+import re
+from typing import List, Optional, Tuple
+
+from pip._internal.utils.misc import (
+    HiddenText,
+    display_path,
+    is_console_interactive,
+    is_installable_dir,
+    split_auth_from_netloc,
+)
+from pip._internal.utils.subprocess import CommandArgs, make_command
+from pip._internal.vcs.versioncontrol import (
+    AuthInfo,
+    RemoteNotFoundError,
+    RevOptions,
+    VersionControl,
+    vcs,
+)
+
+logger = logging.getLogger(__name__)
+
+_svn_xml_url_re = re.compile('url="([^"]+)"')
+_svn_rev_re = re.compile(r'committed-rev="(\d+)"')
+_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
+_svn_info_xml_url_re = re.compile(r"(.*)")
+
+
+class Subversion(VersionControl):
+    name = "svn"
+    dirname = ".svn"
+    repo_name = "checkout"
+    schemes = ("svn+ssh", "svn+http", "svn+https", "svn+svn", "svn+file")
+
+    @classmethod
+    def should_add_vcs_url_prefix(cls, remote_url: str) -> bool:
+        return True
+
+    @staticmethod
+    def get_base_rev_args(rev: str) -> List[str]:
+        return ["-r", rev]
+
+    @classmethod
+    def get_revision(cls, location: str) -> str:
+        """
+        Return the maximum revision for all files under a given location
+        """
+        # Note: taken from setuptools.command.egg_info
+        revision = 0
+
+        for base, dirs, _ in os.walk(location):
+            if cls.dirname not in dirs:
+                dirs[:] = []
+                continue  # no sense walking uncontrolled subdirs
+            dirs.remove(cls.dirname)
+            entries_fn = os.path.join(base, cls.dirname, "entries")
+            if not os.path.exists(entries_fn):
+                # FIXME: should we warn?
+                continue
+
+            dirurl, localrev = cls._get_svn_url_rev(base)
+
+            if base == location:
+                assert dirurl is not None
+                base = dirurl + "/"  # save the root url
+            elif not dirurl or not dirurl.startswith(base):
+                dirs[:] = []
+                continue  # not part of the same svn tree, skip it
+            revision = max(revision, localrev)
+        return str(revision)
+
+    @classmethod
+    def get_netloc_and_auth(
+        cls, netloc: str, scheme: str
+    ) -> Tuple[str, Tuple[Optional[str], Optional[str]]]:
+        """
+        This override allows the auth information to be passed to svn via the
+        --username and --password options instead of via the URL.
+        """
+        if scheme == "ssh":
+            # The --username and --password options can't be used for
+            # svn+ssh URLs, so keep the auth information in the URL.
+            return super().get_netloc_and_auth(netloc, scheme)
+
+        return split_auth_from_netloc(netloc)
+
+    @classmethod
+    def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:
+        # hotfix the URL scheme after removing svn+ from svn+ssh:// re-add it
+        url, rev, user_pass = super().get_url_rev_and_auth(url)
+        if url.startswith("ssh://"):
+            url = "svn+" + url
+        return url, rev, user_pass
+
+    @staticmethod
+    def make_rev_args(
+        username: Optional[str], password: Optional[HiddenText]
+    ) -> CommandArgs:
+        extra_args: CommandArgs = []
+        if username:
+            extra_args += ["--username", username]
+        if password:
+            extra_args += ["--password", password]
+
+        return extra_args
+
+    @classmethod
+    def get_remote_url(cls, location: str) -> str:
+        # In cases where the source is in a subdirectory, we have to look up in
+        # the location until we find a valid project root.
+        orig_location = location
+        while not is_installable_dir(location):
+            last_location = location
+            location = os.path.dirname(location)
+            if location == last_location:
+                # We've traversed up to the root of the filesystem without
+                # finding a Python project.
+                logger.warning(
+                    "Could not find Python project for directory %s (tried all "
+                    "parent directories)",
+                    orig_location,
+                )
+                raise RemoteNotFoundError
+
+        url, _rev = cls._get_svn_url_rev(location)
+        if url is None:
+            raise RemoteNotFoundError
+
+        return url
+
+    @classmethod
+    def _get_svn_url_rev(cls, location: str) -> Tuple[Optional[str], int]:
+        from pip._internal.exceptions import InstallationError
+
+        entries_path = os.path.join(location, cls.dirname, "entries")
+        if os.path.exists(entries_path):
+            with open(entries_path) as f:
+                data = f.read()
+        else:  # subversion >= 1.7 does not have the 'entries' file
+            data = ""
+
+        url = None
+        if data.startswith("8") or data.startswith("9") or data.startswith("10"):
+            entries = list(map(str.splitlines, data.split("\n\x0c\n")))
+            del entries[0][0]  # get rid of the '8'
+            url = entries[0][3]
+            revs = [int(d[9]) for d in entries if len(d) > 9 and d[9]] + [0]
+        elif data.startswith("= 1.7
+                # Note that using get_remote_call_options is not necessary here
+                # because `svn info` is being run against a local directory.
+                # We don't need to worry about making sure interactive mode
+                # is being used to prompt for passwords, because passwords
+                # are only potentially needed for remote server requests.
+                xml = cls.run_command(
+                    ["info", "--xml", location],
+                    show_stdout=False,
+                    stdout_only=True,
+                )
+                match = _svn_info_xml_url_re.search(xml)
+                assert match is not None
+                url = match.group(1)
+                revs = [int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)]
+            except InstallationError:
+                url, revs = None, []
+
+        if revs:
+            rev = max(revs)
+        else:
+            rev = 0
+
+        return url, rev
+
+    @classmethod
+    def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
+        """Always assume the versions don't match"""
+        return False
+
+    def __init__(self, use_interactive: Optional[bool] = None) -> None:
+        if use_interactive is None:
+            use_interactive = is_console_interactive()
+        self.use_interactive = use_interactive
+
+        # This member is used to cache the fetched version of the current
+        # ``svn`` client.
+        # Special value definitions:
+        #   None: Not evaluated yet.
+        #   Empty tuple: Could not parse version.
+        self._vcs_version: Optional[Tuple[int, ...]] = None
+
+        super().__init__()
+
+    def call_vcs_version(self) -> Tuple[int, ...]:
+        """Query the version of the currently installed Subversion client.
+
+        :return: A tuple containing the parts of the version information or
+            ``()`` if the version returned from ``svn`` could not be parsed.
+        :raises: BadCommand: If ``svn`` is not installed.
+        """
+        # Example versions:
+        #   svn, version 1.10.3 (r1842928)
+        #      compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0
+        #   svn, version 1.7.14 (r1542130)
+        #      compiled Mar 28 2018, 08:49:13 on x86_64-pc-linux-gnu
+        #   svn, version 1.12.0-SlikSvn (SlikSvn/1.12.0)
+        #      compiled May 28 2019, 13:44:56 on x86_64-microsoft-windows6.2
+        version_prefix = "svn, version "
+        version = self.run_command(["--version"], show_stdout=False, stdout_only=True)
+        if not version.startswith(version_prefix):
+            return ()
+
+        version = version[len(version_prefix) :].split()[0]
+        version_list = version.partition("-")[0].split(".")
+        try:
+            parsed_version = tuple(map(int, version_list))
+        except ValueError:
+            return ()
+
+        return parsed_version
+
+    def get_vcs_version(self) -> Tuple[int, ...]:
+        """Return the version of the currently installed Subversion client.
+
+        If the version of the Subversion client has already been queried,
+        a cached value will be used.
+
+        :return: A tuple containing the parts of the version information or
+            ``()`` if the version returned from ``svn`` could not be parsed.
+        :raises: BadCommand: If ``svn`` is not installed.
+        """
+        if self._vcs_version is not None:
+            # Use cached version, if available.
+            # If parsing the version failed previously (empty tuple),
+            # do not attempt to parse it again.
+            return self._vcs_version
+
+        vcs_version = self.call_vcs_version()
+        self._vcs_version = vcs_version
+        return vcs_version
+
+    def get_remote_call_options(self) -> CommandArgs:
+        """Return options to be used on calls to Subversion that contact the server.
+
+        These options are applicable for the following ``svn`` subcommands used
+        in this class.
+
+            - checkout
+            - switch
+            - update
+
+        :return: A list of command line arguments to pass to ``svn``.
+        """
+        if not self.use_interactive:
+            # --non-interactive switch is available since Subversion 0.14.4.
+            # Subversion < 1.8 runs in interactive mode by default.
+            return ["--non-interactive"]
+
+        svn_version = self.get_vcs_version()
+        # By default, Subversion >= 1.8 runs in non-interactive mode if
+        # stdin is not a TTY. Since that is how pip invokes SVN, in
+        # call_subprocess(), pip must pass --force-interactive to ensure
+        # the user can be prompted for a password, if required.
+        #   SVN added the --force-interactive option in SVN 1.8. Since
+        # e.g. RHEL/CentOS 7, which is supported until 2024, ships with
+        # SVN 1.7, pip should continue to support SVN 1.7. Therefore, pip
+        # can't safely add the option if the SVN version is < 1.8 (or unknown).
+        if svn_version >= (1, 8):
+            return ["--force-interactive"]
+
+        return []
+
+    def fetch_new(
+        self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
+    ) -> None:
+        rev_display = rev_options.to_display()
+        logger.info(
+            "Checking out %s%s to %s",
+            url,
+            rev_display,
+            display_path(dest),
+        )
+        if verbosity <= 0:
+            flag = "--quiet"
+        else:
+            flag = ""
+        cmd_args = make_command(
+            "checkout",
+            flag,
+            self.get_remote_call_options(),
+            rev_options.to_args(),
+            url,
+            dest,
+        )
+        self.run_command(cmd_args)
+
+    def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
+        cmd_args = make_command(
+            "switch",
+            self.get_remote_call_options(),
+            rev_options.to_args(),
+            url,
+            dest,
+        )
+        self.run_command(cmd_args)
+
+    def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
+        cmd_args = make_command(
+            "update",
+            self.get_remote_call_options(),
+            rev_options.to_args(),
+            dest,
+        )
+        self.run_command(cmd_args)
+
+
+vcs.register(Subversion)
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/vcs/versioncontrol.py b/.venv/lib/python3.12/site-packages/pip/_internal/vcs/versioncontrol.py
new file mode 100644
index 0000000..46ca279
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/vcs/versioncontrol.py
@@ -0,0 +1,705 @@
+"""Handles all VCS (version control) support"""
+
+import logging
+import os
+import shutil
+import sys
+import urllib.parse
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Dict,
+    Iterable,
+    Iterator,
+    List,
+    Mapping,
+    Optional,
+    Tuple,
+    Type,
+    Union,
+)
+
+from pip._internal.cli.spinners import SpinnerInterface
+from pip._internal.exceptions import BadCommand, InstallationError
+from pip._internal.utils.misc import (
+    HiddenText,
+    ask_path_exists,
+    backup_dir,
+    display_path,
+    hide_url,
+    hide_value,
+    is_installable_dir,
+    rmtree,
+)
+from pip._internal.utils.subprocess import (
+    CommandArgs,
+    call_subprocess,
+    format_command_args,
+    make_command,
+)
+from pip._internal.utils.urls import get_url_scheme
+
+if TYPE_CHECKING:
+    # Literal was introduced in Python 3.8.
+    #
+    # TODO: Remove `if TYPE_CHECKING` when dropping support for Python 3.7.
+    from typing import Literal
+
+
+__all__ = ["vcs"]
+
+
+logger = logging.getLogger(__name__)
+
+AuthInfo = Tuple[Optional[str], Optional[str]]
+
+
+def is_url(name: str) -> bool:
+    """
+    Return true if the name looks like a URL.
+    """
+    scheme = get_url_scheme(name)
+    if scheme is None:
+        return False
+    return scheme in ["http", "https", "file", "ftp"] + vcs.all_schemes
+
+
+def make_vcs_requirement_url(
+    repo_url: str, rev: str, project_name: str, subdir: Optional[str] = None
+) -> str:
+    """
+    Return the URL for a VCS requirement.
+
+    Args:
+      repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+").
+      project_name: the (unescaped) project name.
+    """
+    egg_project_name = project_name.replace("-", "_")
+    req = f"{repo_url}@{rev}#egg={egg_project_name}"
+    if subdir:
+        req += f"&subdirectory={subdir}"
+
+    return req
+
+
+def find_path_to_project_root_from_repo_root(
+    location: str, repo_root: str
+) -> Optional[str]:
+    """
+    Find the the Python project's root by searching up the filesystem from
+    `location`. Return the path to project root relative to `repo_root`.
+    Return None if the project root is `repo_root`, or cannot be found.
+    """
+    # find project root.
+    orig_location = location
+    while not is_installable_dir(location):
+        last_location = location
+        location = os.path.dirname(location)
+        if location == last_location:
+            # We've traversed up to the root of the filesystem without
+            # finding a Python project.
+            logger.warning(
+                "Could not find a Python project for directory %s (tried all "
+                "parent directories)",
+                orig_location,
+            )
+            return None
+
+    if os.path.samefile(repo_root, location):
+        return None
+
+    return os.path.relpath(location, repo_root)
+
+
+class RemoteNotFoundError(Exception):
+    pass
+
+
+class RemoteNotValidError(Exception):
+    def __init__(self, url: str):
+        super().__init__(url)
+        self.url = url
+
+
+class RevOptions:
+
+    """
+    Encapsulates a VCS-specific revision to install, along with any VCS
+    install options.
+
+    Instances of this class should be treated as if immutable.
+    """
+
+    def __init__(
+        self,
+        vc_class: Type["VersionControl"],
+        rev: Optional[str] = None,
+        extra_args: Optional[CommandArgs] = None,
+    ) -> None:
+        """
+        Args:
+          vc_class: a VersionControl subclass.
+          rev: the name of the revision to install.
+          extra_args: a list of extra options.
+        """
+        if extra_args is None:
+            extra_args = []
+
+        self.extra_args = extra_args
+        self.rev = rev
+        self.vc_class = vc_class
+        self.branch_name: Optional[str] = None
+
+    def __repr__(self) -> str:
+        return f""
+
+    @property
+    def arg_rev(self) -> Optional[str]:
+        if self.rev is None:
+            return self.vc_class.default_arg_rev
+
+        return self.rev
+
+    def to_args(self) -> CommandArgs:
+        """
+        Return the VCS-specific command arguments.
+        """
+        args: CommandArgs = []
+        rev = self.arg_rev
+        if rev is not None:
+            args += self.vc_class.get_base_rev_args(rev)
+        args += self.extra_args
+
+        return args
+
+    def to_display(self) -> str:
+        if not self.rev:
+            return ""
+
+        return f" (to revision {self.rev})"
+
+    def make_new(self, rev: str) -> "RevOptions":
+        """
+        Make a copy of the current instance, but with a new rev.
+
+        Args:
+          rev: the name of the revision for the new object.
+        """
+        return self.vc_class.make_rev_options(rev, extra_args=self.extra_args)
+
+
+class VcsSupport:
+    _registry: Dict[str, "VersionControl"] = {}
+    schemes = ["ssh", "git", "hg", "bzr", "sftp", "svn"]
+
+    def __init__(self) -> None:
+        # Register more schemes with urlparse for various version control
+        # systems
+        urllib.parse.uses_netloc.extend(self.schemes)
+        super().__init__()
+
+    def __iter__(self) -> Iterator[str]:
+        return self._registry.__iter__()
+
+    @property
+    def backends(self) -> List["VersionControl"]:
+        return list(self._registry.values())
+
+    @property
+    def dirnames(self) -> List[str]:
+        return [backend.dirname for backend in self.backends]
+
+    @property
+    def all_schemes(self) -> List[str]:
+        schemes: List[str] = []
+        for backend in self.backends:
+            schemes.extend(backend.schemes)
+        return schemes
+
+    def register(self, cls: Type["VersionControl"]) -> None:
+        if not hasattr(cls, "name"):
+            logger.warning("Cannot register VCS %s", cls.__name__)
+            return
+        if cls.name not in self._registry:
+            self._registry[cls.name] = cls()
+            logger.debug("Registered VCS backend: %s", cls.name)
+
+    def unregister(self, name: str) -> None:
+        if name in self._registry:
+            del self._registry[name]
+
+    def get_backend_for_dir(self, location: str) -> Optional["VersionControl"]:
+        """
+        Return a VersionControl object if a repository of that type is found
+        at the given directory.
+        """
+        vcs_backends = {}
+        for vcs_backend in self._registry.values():
+            repo_path = vcs_backend.get_repository_root(location)
+            if not repo_path:
+                continue
+            logger.debug("Determine that %s uses VCS: %s", location, vcs_backend.name)
+            vcs_backends[repo_path] = vcs_backend
+
+        if not vcs_backends:
+            return None
+
+        # Choose the VCS in the inner-most directory. Since all repository
+        # roots found here would be either `location` or one of its
+        # parents, the longest path should have the most path components,
+        # i.e. the backend representing the inner-most repository.
+        inner_most_repo_path = max(vcs_backends, key=len)
+        return vcs_backends[inner_most_repo_path]
+
+    def get_backend_for_scheme(self, scheme: str) -> Optional["VersionControl"]:
+        """
+        Return a VersionControl object or None.
+        """
+        for vcs_backend in self._registry.values():
+            if scheme in vcs_backend.schemes:
+                return vcs_backend
+        return None
+
+    def get_backend(self, name: str) -> Optional["VersionControl"]:
+        """
+        Return a VersionControl object or None.
+        """
+        name = name.lower()
+        return self._registry.get(name)
+
+
+vcs = VcsSupport()
+
+
+class VersionControl:
+    name = ""
+    dirname = ""
+    repo_name = ""
+    # List of supported schemes for this Version Control
+    schemes: Tuple[str, ...] = ()
+    # Iterable of environment variable names to pass to call_subprocess().
+    unset_environ: Tuple[str, ...] = ()
+    default_arg_rev: Optional[str] = None
+
+    @classmethod
+    def should_add_vcs_url_prefix(cls, remote_url: str) -> bool:
+        """
+        Return whether the vcs prefix (e.g. "git+") should be added to a
+        repository's remote url when used in a requirement.
+        """
+        return not remote_url.lower().startswith(f"{cls.name}:")
+
+    @classmethod
+    def get_subdirectory(cls, location: str) -> Optional[str]:
+        """
+        Return the path to Python project root, relative to the repo root.
+        Return None if the project root is in the repo root.
+        """
+        return None
+
+    @classmethod
+    def get_requirement_revision(cls, repo_dir: str) -> str:
+        """
+        Return the revision string that should be used in a requirement.
+        """
+        return cls.get_revision(repo_dir)
+
+    @classmethod
+    def get_src_requirement(cls, repo_dir: str, project_name: str) -> str:
+        """
+        Return the requirement string to use to redownload the files
+        currently at the given repository directory.
+
+        Args:
+          project_name: the (unescaped) project name.
+
+        The return value has a form similar to the following:
+
+            {repository_url}@{revision}#egg={project_name}
+        """
+        repo_url = cls.get_remote_url(repo_dir)
+
+        if cls.should_add_vcs_url_prefix(repo_url):
+            repo_url = f"{cls.name}+{repo_url}"
+
+        revision = cls.get_requirement_revision(repo_dir)
+        subdir = cls.get_subdirectory(repo_dir)
+        req = make_vcs_requirement_url(repo_url, revision, project_name, subdir=subdir)
+
+        return req
+
+    @staticmethod
+    def get_base_rev_args(rev: str) -> List[str]:
+        """
+        Return the base revision arguments for a vcs command.
+
+        Args:
+          rev: the name of a revision to install.  Cannot be None.
+        """
+        raise NotImplementedError
+
+    def is_immutable_rev_checkout(self, url: str, dest: str) -> bool:
+        """
+        Return true if the commit hash checked out at dest matches
+        the revision in url.
+
+        Always return False, if the VCS does not support immutable commit
+        hashes.
+
+        This method does not check if there are local uncommitted changes
+        in dest after checkout, as pip currently has no use case for that.
+        """
+        return False
+
+    @classmethod
+    def make_rev_options(
+        cls, rev: Optional[str] = None, extra_args: Optional[CommandArgs] = None
+    ) -> RevOptions:
+        """
+        Return a RevOptions object.
+
+        Args:
+          rev: the name of a revision to install.
+          extra_args: a list of extra options.
+        """
+        return RevOptions(cls, rev, extra_args=extra_args)
+
+    @classmethod
+    def _is_local_repository(cls, repo: str) -> bool:
+        """
+        posix absolute paths start with os.path.sep,
+        win32 ones start with drive (like c:\\folder)
+        """
+        drive, tail = os.path.splitdrive(repo)
+        return repo.startswith(os.path.sep) or bool(drive)
+
+    @classmethod
+    def get_netloc_and_auth(
+        cls, netloc: str, scheme: str
+    ) -> Tuple[str, Tuple[Optional[str], Optional[str]]]:
+        """
+        Parse the repository URL's netloc, and return the new netloc to use
+        along with auth information.
+
+        Args:
+          netloc: the original repository URL netloc.
+          scheme: the repository URL's scheme without the vcs prefix.
+
+        This is mainly for the Subversion class to override, so that auth
+        information can be provided via the --username and --password options
+        instead of through the URL.  For other subclasses like Git without
+        such an option, auth information must stay in the URL.
+
+        Returns: (netloc, (username, password)).
+        """
+        return netloc, (None, None)
+
+    @classmethod
+    def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:
+        """
+        Parse the repository URL to use, and return the URL, revision,
+        and auth info to use.
+
+        Returns: (url, rev, (username, password)).
+        """
+        scheme, netloc, path, query, frag = urllib.parse.urlsplit(url)
+        if "+" not in scheme:
+            raise ValueError(
+                f"Sorry, {url!r} is a malformed VCS url. "
+                "The format is +://, "
+                "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp"
+            )
+        # Remove the vcs prefix.
+        scheme = scheme.split("+", 1)[1]
+        netloc, user_pass = cls.get_netloc_and_auth(netloc, scheme)
+        rev = None
+        if "@" in path:
+            path, rev = path.rsplit("@", 1)
+            if not rev:
+                raise InstallationError(
+                    f"The URL {url!r} has an empty revision (after @) "
+                    "which is not supported. Include a revision after @ "
+                    "or remove @ from the URL."
+                )
+        url = urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
+        return url, rev, user_pass
+
+    @staticmethod
+    def make_rev_args(
+        username: Optional[str], password: Optional[HiddenText]
+    ) -> CommandArgs:
+        """
+        Return the RevOptions "extra arguments" to use in obtain().
+        """
+        return []
+
+    def get_url_rev_options(self, url: HiddenText) -> Tuple[HiddenText, RevOptions]:
+        """
+        Return the URL and RevOptions object to use in obtain(),
+        as a tuple (url, rev_options).
+        """
+        secret_url, rev, user_pass = self.get_url_rev_and_auth(url.secret)
+        username, secret_password = user_pass
+        password: Optional[HiddenText] = None
+        if secret_password is not None:
+            password = hide_value(secret_password)
+        extra_args = self.make_rev_args(username, password)
+        rev_options = self.make_rev_options(rev, extra_args=extra_args)
+
+        return hide_url(secret_url), rev_options
+
+    @staticmethod
+    def normalize_url(url: str) -> str:
+        """
+        Normalize a URL for comparison by unquoting it and removing any
+        trailing slash.
+        """
+        return urllib.parse.unquote(url).rstrip("/")
+
+    @classmethod
+    def compare_urls(cls, url1: str, url2: str) -> bool:
+        """
+        Compare two repo URLs for identity, ignoring incidental differences.
+        """
+        return cls.normalize_url(url1) == cls.normalize_url(url2)
+
+    def fetch_new(
+        self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
+    ) -> None:
+        """
+        Fetch a revision from a repository, in the case that this is the
+        first fetch from the repository.
+
+        Args:
+          dest: the directory to fetch the repository to.
+          rev_options: a RevOptions object.
+          verbosity: verbosity level.
+        """
+        raise NotImplementedError
+
+    def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
+        """
+        Switch the repo at ``dest`` to point to ``URL``.
+
+        Args:
+          rev_options: a RevOptions object.
+        """
+        raise NotImplementedError
+
+    def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
+        """
+        Update an already-existing repo to the given ``rev_options``.
+
+        Args:
+          rev_options: a RevOptions object.
+        """
+        raise NotImplementedError
+
+    @classmethod
+    def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
+        """
+        Return whether the id of the current commit equals the given name.
+
+        Args:
+          dest: the repository directory.
+          name: a string name.
+        """
+        raise NotImplementedError
+
+    def obtain(self, dest: str, url: HiddenText, verbosity: int) -> None:
+        """
+        Install or update in editable mode the package represented by this
+        VersionControl object.
+
+        :param dest: the repository directory in which to install or update.
+        :param url: the repository URL starting with a vcs prefix.
+        :param verbosity: verbosity level.
+        """
+        url, rev_options = self.get_url_rev_options(url)
+
+        if not os.path.exists(dest):
+            self.fetch_new(dest, url, rev_options, verbosity=verbosity)
+            return
+
+        rev_display = rev_options.to_display()
+        if self.is_repository_directory(dest):
+            existing_url = self.get_remote_url(dest)
+            if self.compare_urls(existing_url, url.secret):
+                logger.debug(
+                    "%s in %s exists, and has correct URL (%s)",
+                    self.repo_name.title(),
+                    display_path(dest),
+                    url,
+                )
+                if not self.is_commit_id_equal(dest, rev_options.rev):
+                    logger.info(
+                        "Updating %s %s%s",
+                        display_path(dest),
+                        self.repo_name,
+                        rev_display,
+                    )
+                    self.update(dest, url, rev_options)
+                else:
+                    logger.info("Skipping because already up-to-date.")
+                return
+
+            logger.warning(
+                "%s %s in %s exists with URL %s",
+                self.name,
+                self.repo_name,
+                display_path(dest),
+                existing_url,
+            )
+            prompt = ("(s)witch, (i)gnore, (w)ipe, (b)ackup ", ("s", "i", "w", "b"))
+        else:
+            logger.warning(
+                "Directory %s already exists, and is not a %s %s.",
+                dest,
+                self.name,
+                self.repo_name,
+            )
+            # https://github.com/python/mypy/issues/1174
+            prompt = ("(i)gnore, (w)ipe, (b)ackup ", ("i", "w", "b"))  # type: ignore
+
+        logger.warning(
+            "The plan is to install the %s repository %s",
+            self.name,
+            url,
+        )
+        response = ask_path_exists(f"What to do?  {prompt[0]}", prompt[1])
+
+        if response == "a":
+            sys.exit(-1)
+
+        if response == "w":
+            logger.warning("Deleting %s", display_path(dest))
+            rmtree(dest)
+            self.fetch_new(dest, url, rev_options, verbosity=verbosity)
+            return
+
+        if response == "b":
+            dest_dir = backup_dir(dest)
+            logger.warning("Backing up %s to %s", display_path(dest), dest_dir)
+            shutil.move(dest, dest_dir)
+            self.fetch_new(dest, url, rev_options, verbosity=verbosity)
+            return
+
+        # Do nothing if the response is "i".
+        if response == "s":
+            logger.info(
+                "Switching %s %s to %s%s",
+                self.repo_name,
+                display_path(dest),
+                url,
+                rev_display,
+            )
+            self.switch(dest, url, rev_options)
+
+    def unpack(self, location: str, url: HiddenText, verbosity: int) -> None:
+        """
+        Clean up current location and download the url repository
+        (and vcs infos) into location
+
+        :param url: the repository URL starting with a vcs prefix.
+        :param verbosity: verbosity level.
+        """
+        if os.path.exists(location):
+            rmtree(location)
+        self.obtain(location, url=url, verbosity=verbosity)
+
+    @classmethod
+    def get_remote_url(cls, location: str) -> str:
+        """
+        Return the url used at location
+
+        Raises RemoteNotFoundError if the repository does not have a remote
+        url configured.
+        """
+        raise NotImplementedError
+
+    @classmethod
+    def get_revision(cls, location: str) -> str:
+        """
+        Return the current commit id of the files at the given location.
+        """
+        raise NotImplementedError
+
+    @classmethod
+    def run_command(
+        cls,
+        cmd: Union[List[str], CommandArgs],
+        show_stdout: bool = True,
+        cwd: Optional[str] = None,
+        on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise",
+        extra_ok_returncodes: Optional[Iterable[int]] = None,
+        command_desc: Optional[str] = None,
+        extra_environ: Optional[Mapping[str, Any]] = None,
+        spinner: Optional[SpinnerInterface] = None,
+        log_failed_cmd: bool = True,
+        stdout_only: bool = False,
+    ) -> str:
+        """
+        Run a VCS subcommand
+        This is simply a wrapper around call_subprocess that adds the VCS
+        command name, and checks that the VCS is available
+        """
+        cmd = make_command(cls.name, *cmd)
+        if command_desc is None:
+            command_desc = format_command_args(cmd)
+        try:
+            return call_subprocess(
+                cmd,
+                show_stdout,
+                cwd,
+                on_returncode=on_returncode,
+                extra_ok_returncodes=extra_ok_returncodes,
+                command_desc=command_desc,
+                extra_environ=extra_environ,
+                unset_environ=cls.unset_environ,
+                spinner=spinner,
+                log_failed_cmd=log_failed_cmd,
+                stdout_only=stdout_only,
+            )
+        except FileNotFoundError:
+            # errno.ENOENT = no such file or directory
+            # In other words, the VCS executable isn't available
+            raise BadCommand(
+                f"Cannot find command {cls.name!r} - do you have "
+                f"{cls.name!r} installed and in your PATH?"
+            )
+        except PermissionError:
+            # errno.EACCES = Permission denied
+            # This error occurs, for instance, when the command is installed
+            # only for another user. So, the current user don't have
+            # permission to call the other user command.
+            raise BadCommand(
+                f"No permission to execute {cls.name!r} - install it "
+                f"locally, globally (ask admin), or check your PATH. "
+                f"See possible solutions at "
+                f"https://pip.pypa.io/en/latest/reference/pip_freeze/"
+                f"#fixing-permission-denied."
+            )
+
+    @classmethod
+    def is_repository_directory(cls, path: str) -> bool:
+        """
+        Return whether a directory path is a repository directory.
+        """
+        logger.debug("Checking in %s for %s (%s)...", path, cls.dirname, cls.name)
+        return os.path.exists(os.path.join(path, cls.dirname))
+
+    @classmethod
+    def get_repository_root(cls, location: str) -> Optional[str]:
+        """
+        Return the "root" (top-level) directory controlled by the vcs,
+        or `None` if the directory is not in any.
+
+        It is meant to be overridden to implement smarter detection
+        mechanisms for specific vcs.
+
+        This can do more than is_repository_directory() alone. For
+        example, the Git override checks that Git is actually available.
+        """
+        if cls.is_repository_directory(location):
+            return location
+        return None
diff --git a/.venv/lib/python3.12/site-packages/pip/_internal/wheel_builder.py b/.venv/lib/python3.12/site-packages/pip/_internal/wheel_builder.py
new file mode 100644
index 0000000..b1debe3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_internal/wheel_builder.py
@@ -0,0 +1,354 @@
+"""Orchestrator for building wheels from InstallRequirements.
+"""
+
+import logging
+import os.path
+import re
+import shutil
+from typing import Iterable, List, Optional, Tuple
+
+from pip._vendor.packaging.utils import canonicalize_name, canonicalize_version
+from pip._vendor.packaging.version import InvalidVersion, Version
+
+from pip._internal.cache import WheelCache
+from pip._internal.exceptions import InvalidWheelFilename, UnsupportedWheel
+from pip._internal.metadata import FilesystemWheel, get_wheel_distribution
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.operations.build.wheel import build_wheel_pep517
+from pip._internal.operations.build.wheel_editable import build_wheel_editable
+from pip._internal.operations.build.wheel_legacy import build_wheel_legacy
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import ensure_dir, hash_file
+from pip._internal.utils.setuptools_build import make_setuptools_clean_args
+from pip._internal.utils.subprocess import call_subprocess
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.urls import path_to_url
+from pip._internal.vcs import vcs
+
+logger = logging.getLogger(__name__)
+
+_egg_info_re = re.compile(r"([a-z0-9_.]+)-([a-z0-9_.!+-]+)", re.IGNORECASE)
+
+BuildResult = Tuple[List[InstallRequirement], List[InstallRequirement]]
+
+
+def _contains_egg_info(s: str) -> bool:
+    """Determine whether the string looks like an egg_info.
+
+    :param s: The string to parse. E.g. foo-2.1
+    """
+    return bool(_egg_info_re.search(s))
+
+
+def _should_build(
+    req: InstallRequirement,
+    need_wheel: bool,
+) -> bool:
+    """Return whether an InstallRequirement should be built into a wheel."""
+    if req.constraint:
+        # never build requirements that are merely constraints
+        return False
+    if req.is_wheel:
+        if need_wheel:
+            logger.info(
+                "Skipping %s, due to already being wheel.",
+                req.name,
+            )
+        return False
+
+    if need_wheel:
+        # i.e. pip wheel, not pip install
+        return True
+
+    # From this point, this concerns the pip install command only
+    # (need_wheel=False).
+
+    if not req.source_dir:
+        return False
+
+    if req.editable:
+        # we only build PEP 660 editable requirements
+        return req.supports_pyproject_editable()
+
+    return True
+
+
+def should_build_for_wheel_command(
+    req: InstallRequirement,
+) -> bool:
+    return _should_build(req, need_wheel=True)
+
+
+def should_build_for_install_command(
+    req: InstallRequirement,
+) -> bool:
+    return _should_build(req, need_wheel=False)
+
+
+def _should_cache(
+    req: InstallRequirement,
+) -> Optional[bool]:
+    """
+    Return whether a built InstallRequirement can be stored in the persistent
+    wheel cache, assuming the wheel cache is available, and _should_build()
+    has determined a wheel needs to be built.
+    """
+    if req.editable or not req.source_dir:
+        # never cache editable requirements
+        return False
+
+    if req.link and req.link.is_vcs:
+        # VCS checkout. Do not cache
+        # unless it points to an immutable commit hash.
+        assert not req.editable
+        assert req.source_dir
+        vcs_backend = vcs.get_backend_for_scheme(req.link.scheme)
+        assert vcs_backend
+        if vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir):
+            return True
+        return False
+
+    assert req.link
+    base, ext = req.link.splitext()
+    if _contains_egg_info(base):
+        return True
+
+    # Otherwise, do not cache.
+    return False
+
+
+def _get_cache_dir(
+    req: InstallRequirement,
+    wheel_cache: WheelCache,
+) -> str:
+    """Return the persistent or temporary cache directory where the built
+    wheel need to be stored.
+    """
+    cache_available = bool(wheel_cache.cache_dir)
+    assert req.link
+    if cache_available and _should_cache(req):
+        cache_dir = wheel_cache.get_path_for_link(req.link)
+    else:
+        cache_dir = wheel_cache.get_ephem_path_for_link(req.link)
+    return cache_dir
+
+
+def _verify_one(req: InstallRequirement, wheel_path: str) -> None:
+    canonical_name = canonicalize_name(req.name or "")
+    w = Wheel(os.path.basename(wheel_path))
+    if canonicalize_name(w.name) != canonical_name:
+        raise InvalidWheelFilename(
+            f"Wheel has unexpected file name: expected {canonical_name!r}, "
+            f"got {w.name!r}",
+        )
+    dist = get_wheel_distribution(FilesystemWheel(wheel_path), canonical_name)
+    dist_verstr = str(dist.version)
+    if canonicalize_version(dist_verstr) != canonicalize_version(w.version):
+        raise InvalidWheelFilename(
+            f"Wheel has unexpected file name: expected {dist_verstr!r}, "
+            f"got {w.version!r}",
+        )
+    metadata_version_value = dist.metadata_version
+    if metadata_version_value is None:
+        raise UnsupportedWheel("Missing Metadata-Version")
+    try:
+        metadata_version = Version(metadata_version_value)
+    except InvalidVersion:
+        msg = f"Invalid Metadata-Version: {metadata_version_value}"
+        raise UnsupportedWheel(msg)
+    if metadata_version >= Version("1.2") and not isinstance(dist.version, Version):
+        raise UnsupportedWheel(
+            f"Metadata 1.2 mandates PEP 440 version, but {dist_verstr!r} is not"
+        )
+
+
+def _build_one(
+    req: InstallRequirement,
+    output_dir: str,
+    verify: bool,
+    build_options: List[str],
+    global_options: List[str],
+    editable: bool,
+) -> Optional[str]:
+    """Build one wheel.
+
+    :return: The filename of the built wheel, or None if the build failed.
+    """
+    artifact = "editable" if editable else "wheel"
+    try:
+        ensure_dir(output_dir)
+    except OSError as e:
+        logger.warning(
+            "Building %s for %s failed: %s",
+            artifact,
+            req.name,
+            e,
+        )
+        return None
+
+    # Install build deps into temporary directory (PEP 518)
+    with req.build_env:
+        wheel_path = _build_one_inside_env(
+            req, output_dir, build_options, global_options, editable
+        )
+    if wheel_path and verify:
+        try:
+            _verify_one(req, wheel_path)
+        except (InvalidWheelFilename, UnsupportedWheel) as e:
+            logger.warning("Built %s for %s is invalid: %s", artifact, req.name, e)
+            return None
+    return wheel_path
+
+
+def _build_one_inside_env(
+    req: InstallRequirement,
+    output_dir: str,
+    build_options: List[str],
+    global_options: List[str],
+    editable: bool,
+) -> Optional[str]:
+    with TempDirectory(kind="wheel") as temp_dir:
+        assert req.name
+        if req.use_pep517:
+            assert req.metadata_directory
+            assert req.pep517_backend
+            if global_options:
+                logger.warning(
+                    "Ignoring --global-option when building %s using PEP 517", req.name
+                )
+            if build_options:
+                logger.warning(
+                    "Ignoring --build-option when building %s using PEP 517", req.name
+                )
+            if editable:
+                wheel_path = build_wheel_editable(
+                    name=req.name,
+                    backend=req.pep517_backend,
+                    metadata_directory=req.metadata_directory,
+                    tempd=temp_dir.path,
+                )
+            else:
+                wheel_path = build_wheel_pep517(
+                    name=req.name,
+                    backend=req.pep517_backend,
+                    metadata_directory=req.metadata_directory,
+                    tempd=temp_dir.path,
+                )
+        else:
+            wheel_path = build_wheel_legacy(
+                name=req.name,
+                setup_py_path=req.setup_py_path,
+                source_dir=req.unpacked_source_directory,
+                global_options=global_options,
+                build_options=build_options,
+                tempd=temp_dir.path,
+            )
+
+        if wheel_path is not None:
+            wheel_name = os.path.basename(wheel_path)
+            dest_path = os.path.join(output_dir, wheel_name)
+            try:
+                wheel_hash, length = hash_file(wheel_path)
+                shutil.move(wheel_path, dest_path)
+                logger.info(
+                    "Created wheel for %s: filename=%s size=%d sha256=%s",
+                    req.name,
+                    wheel_name,
+                    length,
+                    wheel_hash.hexdigest(),
+                )
+                logger.info("Stored in directory: %s", output_dir)
+                return dest_path
+            except Exception as e:
+                logger.warning(
+                    "Building wheel for %s failed: %s",
+                    req.name,
+                    e,
+                )
+        # Ignore return, we can't do anything else useful.
+        if not req.use_pep517:
+            _clean_one_legacy(req, global_options)
+        return None
+
+
+def _clean_one_legacy(req: InstallRequirement, global_options: List[str]) -> bool:
+    clean_args = make_setuptools_clean_args(
+        req.setup_py_path,
+        global_options=global_options,
+    )
+
+    logger.info("Running setup.py clean for %s", req.name)
+    try:
+        call_subprocess(
+            clean_args, command_desc="python setup.py clean", cwd=req.source_dir
+        )
+        return True
+    except Exception:
+        logger.error("Failed cleaning build dir for %s", req.name)
+        return False
+
+
+def build(
+    requirements: Iterable[InstallRequirement],
+    wheel_cache: WheelCache,
+    verify: bool,
+    build_options: List[str],
+    global_options: List[str],
+) -> BuildResult:
+    """Build wheels.
+
+    :return: The list of InstallRequirement that succeeded to build and
+        the list of InstallRequirement that failed to build.
+    """
+    if not requirements:
+        return [], []
+
+    # Build the wheels.
+    logger.info(
+        "Building wheels for collected packages: %s",
+        ", ".join(req.name for req in requirements),  # type: ignore
+    )
+
+    with indent_log():
+        build_successes, build_failures = [], []
+        for req in requirements:
+            assert req.name
+            cache_dir = _get_cache_dir(req, wheel_cache)
+            wheel_file = _build_one(
+                req,
+                cache_dir,
+                verify,
+                build_options,
+                global_options,
+                req.editable and req.permit_editable_wheels,
+            )
+            if wheel_file:
+                # Record the download origin in the cache
+                if req.download_info is not None:
+                    # download_info is guaranteed to be set because when we build an
+                    # InstallRequirement it has been through the preparer before, but
+                    # let's be cautious.
+                    wheel_cache.record_download_origin(cache_dir, req.download_info)
+                # Update the link for this.
+                req.link = Link(path_to_url(wheel_file))
+                req.local_file_path = req.link.file_path
+                assert req.link.is_wheel
+                build_successes.append(req)
+            else:
+                build_failures.append(req)
+
+    # notify success/failure
+    if build_successes:
+        logger.info(
+            "Successfully built %s",
+            " ".join([req.name for req in build_successes]),  # type: ignore
+        )
+    if build_failures:
+        logger.info(
+            "Failed to build %s",
+            " ".join([req.name for req in build_failures]),  # type: ignore
+        )
+    # Return a list of requirements that failed to build
+    return build_successes, build_failures
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/__init__.py
new file mode 100644
index 0000000..c1884ba
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/__init__.py
@@ -0,0 +1,121 @@
+"""
+pip._vendor is for vendoring dependencies of pip to prevent needing pip to
+depend on something external.
+
+Files inside of pip._vendor should be considered immutable and should only be
+updated to versions from upstream.
+"""
+from __future__ import absolute_import
+
+import glob
+import os.path
+import sys
+
+# Downstream redistributors which have debundled our dependencies should also
+# patch this value to be true. This will trigger the additional patching
+# to cause things like "six" to be available as pip.
+DEBUNDLED = False
+
+# By default, look in this directory for a bunch of .whl files which we will
+# add to the beginning of sys.path before attempting to import anything. This
+# is done to support downstream re-distributors like Debian and Fedora who
+# wish to create their own Wheels for our dependencies to aid in debundling.
+WHEEL_DIR = os.path.abspath(os.path.dirname(__file__))
+
+
+# Define a small helper function to alias our vendored modules to the real ones
+# if the vendored ones do not exist. This idea of this was taken from
+# https://github.com/kennethreitz/requests/pull/2567.
+def vendored(modulename):
+    vendored_name = "{0}.{1}".format(__name__, modulename)
+
+    try:
+        __import__(modulename, globals(), locals(), level=0)
+    except ImportError:
+        # We can just silently allow import failures to pass here. If we
+        # got to this point it means that ``import pip._vendor.whatever``
+        # failed and so did ``import whatever``. Since we're importing this
+        # upfront in an attempt to alias imports, not erroring here will
+        # just mean we get a regular import error whenever pip *actually*
+        # tries to import one of these modules to use it, which actually
+        # gives us a better error message than we would have otherwise
+        # gotten.
+        pass
+    else:
+        sys.modules[vendored_name] = sys.modules[modulename]
+        base, head = vendored_name.rsplit(".", 1)
+        setattr(sys.modules[base], head, sys.modules[modulename])
+
+
+# If we're operating in a debundled setup, then we want to go ahead and trigger
+# the aliasing of our vendored libraries as well as looking for wheels to add
+# to our sys.path. This will cause all of this code to be a no-op typically
+# however downstream redistributors can enable it in a consistent way across
+# all platforms.
+if DEBUNDLED:
+    # Actually look inside of WHEEL_DIR to find .whl files and add them to the
+    # front of our sys.path.
+    sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path
+
+    # Actually alias all of our vendored dependencies.
+    vendored("cachecontrol")
+    vendored("certifi")
+    vendored("colorama")
+    vendored("distlib")
+    vendored("distro")
+    vendored("six")
+    vendored("six.moves")
+    vendored("six.moves.urllib")
+    vendored("six.moves.urllib.parse")
+    vendored("packaging")
+    vendored("packaging.version")
+    vendored("packaging.specifiers")
+    vendored("pep517")
+    vendored("pkg_resources")
+    vendored("platformdirs")
+    vendored("progress")
+    vendored("requests")
+    vendored("requests.exceptions")
+    vendored("requests.packages")
+    vendored("requests.packages.urllib3")
+    vendored("requests.packages.urllib3._collections")
+    vendored("requests.packages.urllib3.connection")
+    vendored("requests.packages.urllib3.connectionpool")
+    vendored("requests.packages.urllib3.contrib")
+    vendored("requests.packages.urllib3.contrib.ntlmpool")
+    vendored("requests.packages.urllib3.contrib.pyopenssl")
+    vendored("requests.packages.urllib3.exceptions")
+    vendored("requests.packages.urllib3.fields")
+    vendored("requests.packages.urllib3.filepost")
+    vendored("requests.packages.urllib3.packages")
+    vendored("requests.packages.urllib3.packages.ordered_dict")
+    vendored("requests.packages.urllib3.packages.six")
+    vendored("requests.packages.urllib3.packages.ssl_match_hostname")
+    vendored("requests.packages.urllib3.packages.ssl_match_hostname."
+             "_implementation")
+    vendored("requests.packages.urllib3.poolmanager")
+    vendored("requests.packages.urllib3.request")
+    vendored("requests.packages.urllib3.response")
+    vendored("requests.packages.urllib3.util")
+    vendored("requests.packages.urllib3.util.connection")
+    vendored("requests.packages.urllib3.util.request")
+    vendored("requests.packages.urllib3.util.response")
+    vendored("requests.packages.urllib3.util.retry")
+    vendored("requests.packages.urllib3.util.ssl_")
+    vendored("requests.packages.urllib3.util.timeout")
+    vendored("requests.packages.urllib3.util.url")
+    vendored("resolvelib")
+    vendored("rich")
+    vendored("rich.console")
+    vendored("rich.highlighter")
+    vendored("rich.logging")
+    vendored("rich.markup")
+    vendored("rich.progress")
+    vendored("rich.segment")
+    vendored("rich.style")
+    vendored("rich.text")
+    vendored("rich.traceback")
+    vendored("tenacity")
+    vendored("tomli")
+    vendored("truststore")
+    vendored("urllib3")
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__init__.py
new file mode 100644
index 0000000..4d20bc9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__init__.py
@@ -0,0 +1,28 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+
+"""CacheControl import Interface.
+
+Make it easy to import from cachecontrol without long namespaces.
+"""
+__author__ = "Eric Larson"
+__email__ = "eric@ionrock.org"
+__version__ = "0.13.1"
+
+from pip._vendor.cachecontrol.adapter import CacheControlAdapter
+from pip._vendor.cachecontrol.controller import CacheController
+from pip._vendor.cachecontrol.wrapper import CacheControl
+
+__all__ = [
+    "__author__",
+    "__email__",
+    "__version__",
+    "CacheControlAdapter",
+    "CacheController",
+    "CacheControl",
+]
+
+import logging
+
+logging.getLogger(__name__).addHandler(logging.NullHandler())
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/_cmd.py b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/_cmd.py
new file mode 100644
index 0000000..2c84208
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/_cmd.py
@@ -0,0 +1,70 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+from __future__ import annotations
+
+import logging
+from argparse import ArgumentParser
+from typing import TYPE_CHECKING
+
+from pip._vendor import requests
+
+from pip._vendor.cachecontrol.adapter import CacheControlAdapter
+from pip._vendor.cachecontrol.cache import DictCache
+from pip._vendor.cachecontrol.controller import logger
+
+if TYPE_CHECKING:
+    from argparse import Namespace
+
+    from pip._vendor.cachecontrol.controller import CacheController
+
+
+def setup_logging() -> None:
+    logger.setLevel(logging.DEBUG)
+    handler = logging.StreamHandler()
+    logger.addHandler(handler)
+
+
+def get_session() -> requests.Session:
+    adapter = CacheControlAdapter(
+        DictCache(), cache_etags=True, serializer=None, heuristic=None
+    )
+    sess = requests.Session()
+    sess.mount("http://", adapter)
+    sess.mount("https://", adapter)
+
+    sess.cache_controller = adapter.controller  # type: ignore[attr-defined]
+    return sess
+
+
+def get_args() -> Namespace:
+    parser = ArgumentParser()
+    parser.add_argument("url", help="The URL to try and cache")
+    return parser.parse_args()
+
+
+def main() -> None:
+    args = get_args()
+    sess = get_session()
+
+    # Make a request to get a response
+    resp = sess.get(args.url)
+
+    # Turn on logging
+    setup_logging()
+
+    # try setting the cache
+    cache_controller: CacheController = (
+        sess.cache_controller  # type: ignore[attr-defined]
+    )
+    cache_controller.cache_response(resp.request, resp.raw)
+
+    # Now try to get it
+    if cache_controller.cached_request(resp.request):
+        print("Cached!")
+    else:
+        print("Not cached :(")
+
+
+if __name__ == "__main__":
+    main()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/adapter.py b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/adapter.py
new file mode 100644
index 0000000..3e83e30
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/adapter.py
@@ -0,0 +1,161 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+from __future__ import annotations
+
+import functools
+import types
+import zlib
+from typing import TYPE_CHECKING, Any, Collection, Mapping
+
+from pip._vendor.requests.adapters import HTTPAdapter
+
+from pip._vendor.cachecontrol.cache import DictCache
+from pip._vendor.cachecontrol.controller import PERMANENT_REDIRECT_STATUSES, CacheController
+from pip._vendor.cachecontrol.filewrapper import CallbackFileWrapper
+
+if TYPE_CHECKING:
+    from pip._vendor.requests import PreparedRequest, Response
+    from pip._vendor.urllib3 import HTTPResponse
+
+    from pip._vendor.cachecontrol.cache import BaseCache
+    from pip._vendor.cachecontrol.heuristics import BaseHeuristic
+    from pip._vendor.cachecontrol.serialize import Serializer
+
+
+class CacheControlAdapter(HTTPAdapter):
+    invalidating_methods = {"PUT", "PATCH", "DELETE"}
+
+    def __init__(
+        self,
+        cache: BaseCache | None = None,
+        cache_etags: bool = True,
+        controller_class: type[CacheController] | None = None,
+        serializer: Serializer | None = None,
+        heuristic: BaseHeuristic | None = None,
+        cacheable_methods: Collection[str] | None = None,
+        *args: Any,
+        **kw: Any,
+    ) -> None:
+        super().__init__(*args, **kw)
+        self.cache = DictCache() if cache is None else cache
+        self.heuristic = heuristic
+        self.cacheable_methods = cacheable_methods or ("GET",)
+
+        controller_factory = controller_class or CacheController
+        self.controller = controller_factory(
+            self.cache, cache_etags=cache_etags, serializer=serializer
+        )
+
+    def send(
+        self,
+        request: PreparedRequest,
+        stream: bool = False,
+        timeout: None | float | tuple[float, float] | tuple[float, None] = None,
+        verify: bool | str = True,
+        cert: (None | bytes | str | tuple[bytes | str, bytes | str]) = None,
+        proxies: Mapping[str, str] | None = None,
+        cacheable_methods: Collection[str] | None = None,
+    ) -> Response:
+        """
+        Send a request. Use the request information to see if it
+        exists in the cache and cache the response if we need to and can.
+        """
+        cacheable = cacheable_methods or self.cacheable_methods
+        if request.method in cacheable:
+            try:
+                cached_response = self.controller.cached_request(request)
+            except zlib.error:
+                cached_response = None
+            if cached_response:
+                return self.build_response(request, cached_response, from_cache=True)
+
+            # check for etags and add headers if appropriate
+            request.headers.update(self.controller.conditional_headers(request))
+
+        resp = super().send(request, stream, timeout, verify, cert, proxies)
+
+        return resp
+
+    def build_response(
+        self,
+        request: PreparedRequest,
+        response: HTTPResponse,
+        from_cache: bool = False,
+        cacheable_methods: Collection[str] | None = None,
+    ) -> Response:
+        """
+        Build a response by making a request or using the cache.
+
+        This will end up calling send and returning a potentially
+        cached response
+        """
+        cacheable = cacheable_methods or self.cacheable_methods
+        if not from_cache and request.method in cacheable:
+            # Check for any heuristics that might update headers
+            # before trying to cache.
+            if self.heuristic:
+                response = self.heuristic.apply(response)
+
+            # apply any expiration heuristics
+            if response.status == 304:
+                # We must have sent an ETag request. This could mean
+                # that we've been expired already or that we simply
+                # have an etag. In either case, we want to try and
+                # update the cache if that is the case.
+                cached_response = self.controller.update_cached_response(
+                    request, response
+                )
+
+                if cached_response is not response:
+                    from_cache = True
+
+                # We are done with the server response, read a
+                # possible response body (compliant servers will
+                # not return one, but we cannot be 100% sure) and
+                # release the connection back to the pool.
+                response.read(decode_content=False)
+                response.release_conn()
+
+                response = cached_response
+
+            # We always cache the 301 responses
+            elif int(response.status) in PERMANENT_REDIRECT_STATUSES:
+                self.controller.cache_response(request, response)
+            else:
+                # Wrap the response file with a wrapper that will cache the
+                #   response when the stream has been consumed.
+                response._fp = CallbackFileWrapper(  # type: ignore[attr-defined]
+                    response._fp,  # type: ignore[attr-defined]
+                    functools.partial(
+                        self.controller.cache_response, request, response
+                    ),
+                )
+                if response.chunked:
+                    super_update_chunk_length = response._update_chunk_length  # type: ignore[attr-defined]
+
+                    def _update_chunk_length(self: HTTPResponse) -> None:
+                        super_update_chunk_length()
+                        if self.chunk_left == 0:
+                            self._fp._close()  # type: ignore[attr-defined]
+
+                    response._update_chunk_length = types.MethodType(  # type: ignore[attr-defined]
+                        _update_chunk_length, response
+                    )
+
+        resp: Response = super().build_response(request, response)  # type: ignore[no-untyped-call]
+
+        # See if we should invalidate the cache.
+        if request.method in self.invalidating_methods and resp.ok:
+            assert request.url is not None
+            cache_url = self.controller.cache_url(request.url)
+            self.cache.delete(cache_url)
+
+        # Give the request a from_cache attr to let people use it
+        resp.from_cache = from_cache  # type: ignore[attr-defined]
+
+        return resp
+
+    def close(self) -> None:
+        self.cache.close()
+        super().close()  # type: ignore[no-untyped-call]
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/cache.py b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/cache.py
new file mode 100644
index 0000000..3293b00
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/cache.py
@@ -0,0 +1,74 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+
+"""
+The cache object API for implementing caches. The default is a thread
+safe in-memory dictionary.
+"""
+from __future__ import annotations
+
+from threading import Lock
+from typing import IO, TYPE_CHECKING, MutableMapping
+
+if TYPE_CHECKING:
+    from datetime import datetime
+
+
+class BaseCache:
+    def get(self, key: str) -> bytes | None:
+        raise NotImplementedError()
+
+    def set(
+        self, key: str, value: bytes, expires: int | datetime | None = None
+    ) -> None:
+        raise NotImplementedError()
+
+    def delete(self, key: str) -> None:
+        raise NotImplementedError()
+
+    def close(self) -> None:
+        pass
+
+
+class DictCache(BaseCache):
+    def __init__(self, init_dict: MutableMapping[str, bytes] | None = None) -> None:
+        self.lock = Lock()
+        self.data = init_dict or {}
+
+    def get(self, key: str) -> bytes | None:
+        return self.data.get(key, None)
+
+    def set(
+        self, key: str, value: bytes, expires: int | datetime | None = None
+    ) -> None:
+        with self.lock:
+            self.data.update({key: value})
+
+    def delete(self, key: str) -> None:
+        with self.lock:
+            if key in self.data:
+                self.data.pop(key)
+
+
+class SeparateBodyBaseCache(BaseCache):
+    """
+    In this variant, the body is not stored mixed in with the metadata, but is
+    passed in (as a bytes-like object) in a separate call to ``set_body()``.
+
+    That is, the expected interaction pattern is::
+
+        cache.set(key, serialized_metadata)
+        cache.set_body(key)
+
+    Similarly, the body should be loaded separately via ``get_body()``.
+    """
+
+    def set_body(self, key: str, body: bytes) -> None:
+        raise NotImplementedError()
+
+    def get_body(self, key: str) -> IO[bytes] | None:
+        """
+        Return the body as file-like object.
+        """
+        raise NotImplementedError()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__init__.py
new file mode 100644
index 0000000..24ff469
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__init__.py
@@ -0,0 +1,8 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+
+from pip._vendor.cachecontrol.caches.file_cache import FileCache, SeparateBodyFileCache
+from pip._vendor.cachecontrol.caches.redis_cache import RedisCache
+
+__all__ = ["FileCache", "SeparateBodyFileCache", "RedisCache"]
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py
new file mode 100644
index 0000000..1fd2801
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py
@@ -0,0 +1,181 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+from __future__ import annotations
+
+import hashlib
+import os
+from textwrap import dedent
+from typing import IO, TYPE_CHECKING
+
+from pip._vendor.cachecontrol.cache import BaseCache, SeparateBodyBaseCache
+from pip._vendor.cachecontrol.controller import CacheController
+
+if TYPE_CHECKING:
+    from datetime import datetime
+
+    from filelock import BaseFileLock
+
+
+def _secure_open_write(filename: str, fmode: int) -> IO[bytes]:
+    # We only want to write to this file, so open it in write only mode
+    flags = os.O_WRONLY
+
+    # os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only
+    #  will open *new* files.
+    # We specify this because we want to ensure that the mode we pass is the
+    # mode of the file.
+    flags |= os.O_CREAT | os.O_EXCL
+
+    # Do not follow symlinks to prevent someone from making a symlink that
+    # we follow and insecurely open a cache file.
+    if hasattr(os, "O_NOFOLLOW"):
+        flags |= os.O_NOFOLLOW
+
+    # On Windows we'll mark this file as binary
+    if hasattr(os, "O_BINARY"):
+        flags |= os.O_BINARY
+
+    # Before we open our file, we want to delete any existing file that is
+    # there
+    try:
+        os.remove(filename)
+    except OSError:
+        # The file must not exist already, so we can just skip ahead to opening
+        pass
+
+    # Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a
+    # race condition happens between the os.remove and this line, that an
+    # error will be raised. Because we utilize a lockfile this should only
+    # happen if someone is attempting to attack us.
+    fd = os.open(filename, flags, fmode)
+    try:
+        return os.fdopen(fd, "wb")
+
+    except:
+        # An error occurred wrapping our FD in a file object
+        os.close(fd)
+        raise
+
+
+class _FileCacheMixin:
+    """Shared implementation for both FileCache variants."""
+
+    def __init__(
+        self,
+        directory: str,
+        forever: bool = False,
+        filemode: int = 0o0600,
+        dirmode: int = 0o0700,
+        lock_class: type[BaseFileLock] | None = None,
+    ) -> None:
+        try:
+            if lock_class is None:
+                from filelock import FileLock
+
+                lock_class = FileLock
+        except ImportError:
+            notice = dedent(
+                """
+            NOTE: In order to use the FileCache you must have
+            filelock installed. You can install it via pip:
+              pip install filelock
+            """
+            )
+            raise ImportError(notice)
+
+        self.directory = directory
+        self.forever = forever
+        self.filemode = filemode
+        self.dirmode = dirmode
+        self.lock_class = lock_class
+
+    @staticmethod
+    def encode(x: str) -> str:
+        return hashlib.sha224(x.encode()).hexdigest()
+
+    def _fn(self, name: str) -> str:
+        # NOTE: This method should not change as some may depend on it.
+        #       See: https://github.com/ionrock/cachecontrol/issues/63
+        hashed = self.encode(name)
+        parts = list(hashed[:5]) + [hashed]
+        return os.path.join(self.directory, *parts)
+
+    def get(self, key: str) -> bytes | None:
+        name = self._fn(key)
+        try:
+            with open(name, "rb") as fh:
+                return fh.read()
+
+        except FileNotFoundError:
+            return None
+
+    def set(
+        self, key: str, value: bytes, expires: int | datetime | None = None
+    ) -> None:
+        name = self._fn(key)
+        self._write(name, value)
+
+    def _write(self, path: str, data: bytes) -> None:
+        """
+        Safely write the data to the given path.
+        """
+        # Make sure the directory exists
+        try:
+            os.makedirs(os.path.dirname(path), self.dirmode)
+        except OSError:
+            pass
+
+        with self.lock_class(path + ".lock"):
+            # Write our actual file
+            with _secure_open_write(path, self.filemode) as fh:
+                fh.write(data)
+
+    def _delete(self, key: str, suffix: str) -> None:
+        name = self._fn(key) + suffix
+        if not self.forever:
+            try:
+                os.remove(name)
+            except FileNotFoundError:
+                pass
+
+
+class FileCache(_FileCacheMixin, BaseCache):
+    """
+    Traditional FileCache: body is stored in memory, so not suitable for large
+    downloads.
+    """
+
+    def delete(self, key: str) -> None:
+        self._delete(key, "")
+
+
+class SeparateBodyFileCache(_FileCacheMixin, SeparateBodyBaseCache):
+    """
+    Memory-efficient FileCache: body is stored in a separate file, reducing
+    peak memory usage.
+    """
+
+    def get_body(self, key: str) -> IO[bytes] | None:
+        name = self._fn(key) + ".body"
+        try:
+            return open(name, "rb")
+        except FileNotFoundError:
+            return None
+
+    def set_body(self, key: str, body: bytes) -> None:
+        name = self._fn(key) + ".body"
+        self._write(name, body)
+
+    def delete(self, key: str) -> None:
+        self._delete(key, "")
+        self._delete(key, ".body")
+
+
+def url_to_file_path(url: str, filecache: FileCache) -> str:
+    """Return the file cache path based on the URL.
+
+    This does not ensure the file exists!
+    """
+    key = CacheController.cache_url(url)
+    return filecache._fn(key)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py
new file mode 100644
index 0000000..f4f68c4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py
@@ -0,0 +1,48 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+from __future__ import annotations
+
+
+from datetime import datetime, timezone
+from typing import TYPE_CHECKING
+
+from pip._vendor.cachecontrol.cache import BaseCache
+
+if TYPE_CHECKING:
+    from redis import Redis
+
+
+class RedisCache(BaseCache):
+    def __init__(self, conn: Redis[bytes]) -> None:
+        self.conn = conn
+
+    def get(self, key: str) -> bytes | None:
+        return self.conn.get(key)
+
+    def set(
+        self, key: str, value: bytes, expires: int | datetime | None = None
+    ) -> None:
+        if not expires:
+            self.conn.set(key, value)
+        elif isinstance(expires, datetime):
+            now_utc = datetime.now(timezone.utc)
+            if expires.tzinfo is None:
+                now_utc = now_utc.replace(tzinfo=None)
+            delta = expires - now_utc
+            self.conn.setex(key, int(delta.total_seconds()), value)
+        else:
+            self.conn.setex(key, expires, value)
+
+    def delete(self, key: str) -> None:
+        self.conn.delete(key)
+
+    def clear(self) -> None:
+        """Helper for clearing all the keys in a database. Use with
+        caution!"""
+        for key in self.conn.keys():
+            self.conn.delete(key)
+
+    def close(self) -> None:
+        """Redis uses connection pooling, no need to close the connection."""
+        pass
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/controller.py b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/controller.py
new file mode 100644
index 0000000..586b9f9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/controller.py
@@ -0,0 +1,494 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+
+"""
+The httplib2 algorithms ported for use with requests.
+"""
+from __future__ import annotations
+
+import calendar
+import logging
+import re
+import time
+from email.utils import parsedate_tz
+from typing import TYPE_CHECKING, Collection, Mapping
+
+from pip._vendor.requests.structures import CaseInsensitiveDict
+
+from pip._vendor.cachecontrol.cache import DictCache, SeparateBodyBaseCache
+from pip._vendor.cachecontrol.serialize import Serializer
+
+if TYPE_CHECKING:
+    from typing import Literal
+
+    from pip._vendor.requests import PreparedRequest
+    from pip._vendor.urllib3 import HTTPResponse
+
+    from pip._vendor.cachecontrol.cache import BaseCache
+
+logger = logging.getLogger(__name__)
+
+URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
+
+PERMANENT_REDIRECT_STATUSES = (301, 308)
+
+
+def parse_uri(uri: str) -> tuple[str, str, str, str, str]:
+    """Parses a URI using the regex given in Appendix B of RFC 3986.
+
+    (scheme, authority, path, query, fragment) = parse_uri(uri)
+    """
+    match = URI.match(uri)
+    assert match is not None
+    groups = match.groups()
+    return (groups[1], groups[3], groups[4], groups[6], groups[8])
+
+
+class CacheController:
+    """An interface to see if request should cached or not."""
+
+    def __init__(
+        self,
+        cache: BaseCache | None = None,
+        cache_etags: bool = True,
+        serializer: Serializer | None = None,
+        status_codes: Collection[int] | None = None,
+    ):
+        self.cache = DictCache() if cache is None else cache
+        self.cache_etags = cache_etags
+        self.serializer = serializer or Serializer()
+        self.cacheable_status_codes = status_codes or (200, 203, 300, 301, 308)
+
+    @classmethod
+    def _urlnorm(cls, uri: str) -> str:
+        """Normalize the URL to create a safe key for the cache"""
+        (scheme, authority, path, query, fragment) = parse_uri(uri)
+        if not scheme or not authority:
+            raise Exception("Only absolute URIs are allowed. uri = %s" % uri)
+
+        scheme = scheme.lower()
+        authority = authority.lower()
+
+        if not path:
+            path = "/"
+
+        # Could do syntax based normalization of the URI before
+        # computing the digest. See Section 6.2.2 of Std 66.
+        request_uri = query and "?".join([path, query]) or path
+        defrag_uri = scheme + "://" + authority + request_uri
+
+        return defrag_uri
+
+    @classmethod
+    def cache_url(cls, uri: str) -> str:
+        return cls._urlnorm(uri)
+
+    def parse_cache_control(self, headers: Mapping[str, str]) -> dict[str, int | None]:
+        known_directives = {
+            # https://tools.ietf.org/html/rfc7234#section-5.2
+            "max-age": (int, True),
+            "max-stale": (int, False),
+            "min-fresh": (int, True),
+            "no-cache": (None, False),
+            "no-store": (None, False),
+            "no-transform": (None, False),
+            "only-if-cached": (None, False),
+            "must-revalidate": (None, False),
+            "public": (None, False),
+            "private": (None, False),
+            "proxy-revalidate": (None, False),
+            "s-maxage": (int, True),
+        }
+
+        cc_headers = headers.get("cache-control", headers.get("Cache-Control", ""))
+
+        retval: dict[str, int | None] = {}
+
+        for cc_directive in cc_headers.split(","):
+            if not cc_directive.strip():
+                continue
+
+            parts = cc_directive.split("=", 1)
+            directive = parts[0].strip()
+
+            try:
+                typ, required = known_directives[directive]
+            except KeyError:
+                logger.debug("Ignoring unknown cache-control directive: %s", directive)
+                continue
+
+            if not typ or not required:
+                retval[directive] = None
+            if typ:
+                try:
+                    retval[directive] = typ(parts[1].strip())
+                except IndexError:
+                    if required:
+                        logger.debug(
+                            "Missing value for cache-control " "directive: %s",
+                            directive,
+                        )
+                except ValueError:
+                    logger.debug(
+                        "Invalid value for cache-control directive " "%s, must be %s",
+                        directive,
+                        typ.__name__,
+                    )
+
+        return retval
+
+    def _load_from_cache(self, request: PreparedRequest) -> HTTPResponse | None:
+        """
+        Load a cached response, or return None if it's not available.
+        """
+        cache_url = request.url
+        assert cache_url is not None
+        cache_data = self.cache.get(cache_url)
+        if cache_data is None:
+            logger.debug("No cache entry available")
+            return None
+
+        if isinstance(self.cache, SeparateBodyBaseCache):
+            body_file = self.cache.get_body(cache_url)
+        else:
+            body_file = None
+
+        result = self.serializer.loads(request, cache_data, body_file)
+        if result is None:
+            logger.warning("Cache entry deserialization failed, entry ignored")
+        return result
+
+    def cached_request(self, request: PreparedRequest) -> HTTPResponse | Literal[False]:
+        """
+        Return a cached response if it exists in the cache, otherwise
+        return False.
+        """
+        assert request.url is not None
+        cache_url = self.cache_url(request.url)
+        logger.debug('Looking up "%s" in the cache', cache_url)
+        cc = self.parse_cache_control(request.headers)
+
+        # Bail out if the request insists on fresh data
+        if "no-cache" in cc:
+            logger.debug('Request header has "no-cache", cache bypassed')
+            return False
+
+        if "max-age" in cc and cc["max-age"] == 0:
+            logger.debug('Request header has "max_age" as 0, cache bypassed')
+            return False
+
+        # Check whether we can load the response from the cache:
+        resp = self._load_from_cache(request)
+        if not resp:
+            return False
+
+        # If we have a cached permanent redirect, return it immediately. We
+        # don't need to test our response for other headers b/c it is
+        # intrinsically "cacheable" as it is Permanent.
+        #
+        # See:
+        #   https://tools.ietf.org/html/rfc7231#section-6.4.2
+        #
+        # Client can try to refresh the value by repeating the request
+        # with cache busting headers as usual (ie no-cache).
+        if int(resp.status) in PERMANENT_REDIRECT_STATUSES:
+            msg = (
+                "Returning cached permanent redirect response "
+                "(ignoring date and etag information)"
+            )
+            logger.debug(msg)
+            return resp
+
+        headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers)
+        if not headers or "date" not in headers:
+            if "etag" not in headers:
+                # Without date or etag, the cached response can never be used
+                # and should be deleted.
+                logger.debug("Purging cached response: no date or etag")
+                self.cache.delete(cache_url)
+            logger.debug("Ignoring cached response: no date")
+            return False
+
+        now = time.time()
+        time_tuple = parsedate_tz(headers["date"])
+        assert time_tuple is not None
+        date = calendar.timegm(time_tuple[:6])
+        current_age = max(0, now - date)
+        logger.debug("Current age based on date: %i", current_age)
+
+        # TODO: There is an assumption that the result will be a
+        #       urllib3 response object. This may not be best since we
+        #       could probably avoid instantiating or constructing the
+        #       response until we know we need it.
+        resp_cc = self.parse_cache_control(headers)
+
+        # determine freshness
+        freshness_lifetime = 0
+
+        # Check the max-age pragma in the cache control header
+        max_age = resp_cc.get("max-age")
+        if max_age is not None:
+            freshness_lifetime = max_age
+            logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime)
+
+        # If there isn't a max-age, check for an expires header
+        elif "expires" in headers:
+            expires = parsedate_tz(headers["expires"])
+            if expires is not None:
+                expire_time = calendar.timegm(expires[:6]) - date
+                freshness_lifetime = max(0, expire_time)
+                logger.debug("Freshness lifetime from expires: %i", freshness_lifetime)
+
+        # Determine if we are setting freshness limit in the
+        # request. Note, this overrides what was in the response.
+        max_age = cc.get("max-age")
+        if max_age is not None:
+            freshness_lifetime = max_age
+            logger.debug(
+                "Freshness lifetime from request max-age: %i", freshness_lifetime
+            )
+
+        min_fresh = cc.get("min-fresh")
+        if min_fresh is not None:
+            # adjust our current age by our min fresh
+            current_age += min_fresh
+            logger.debug("Adjusted current age from min-fresh: %i", current_age)
+
+        # Return entry if it is fresh enough
+        if freshness_lifetime > current_age:
+            logger.debug('The response is "fresh", returning cached response')
+            logger.debug("%i > %i", freshness_lifetime, current_age)
+            return resp
+
+        # we're not fresh. If we don't have an Etag, clear it out
+        if "etag" not in headers:
+            logger.debug('The cached response is "stale" with no etag, purging')
+            self.cache.delete(cache_url)
+
+        # return the original handler
+        return False
+
+    def conditional_headers(self, request: PreparedRequest) -> dict[str, str]:
+        resp = self._load_from_cache(request)
+        new_headers = {}
+
+        if resp:
+            headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers)
+
+            if "etag" in headers:
+                new_headers["If-None-Match"] = headers["ETag"]
+
+            if "last-modified" in headers:
+                new_headers["If-Modified-Since"] = headers["Last-Modified"]
+
+        return new_headers
+
+    def _cache_set(
+        self,
+        cache_url: str,
+        request: PreparedRequest,
+        response: HTTPResponse,
+        body: bytes | None = None,
+        expires_time: int | None = None,
+    ) -> None:
+        """
+        Store the data in the cache.
+        """
+        if isinstance(self.cache, SeparateBodyBaseCache):
+            # We pass in the body separately; just put a placeholder empty
+            # string in the metadata.
+            self.cache.set(
+                cache_url,
+                self.serializer.dumps(request, response, b""),
+                expires=expires_time,
+            )
+            # body is None can happen when, for example, we're only updating
+            # headers, as is the case in update_cached_response().
+            if body is not None:
+                self.cache.set_body(cache_url, body)
+        else:
+            self.cache.set(
+                cache_url,
+                self.serializer.dumps(request, response, body),
+                expires=expires_time,
+            )
+
+    def cache_response(
+        self,
+        request: PreparedRequest,
+        response: HTTPResponse,
+        body: bytes | None = None,
+        status_codes: Collection[int] | None = None,
+    ) -> None:
+        """
+        Algorithm for caching requests.
+
+        This assumes a requests Response object.
+        """
+        # From httplib2: Don't cache 206's since we aren't going to
+        #                handle byte range requests
+        cacheable_status_codes = status_codes or self.cacheable_status_codes
+        if response.status not in cacheable_status_codes:
+            logger.debug(
+                "Status code %s not in %s", response.status, cacheable_status_codes
+            )
+            return
+
+        response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(
+            response.headers
+        )
+
+        if "date" in response_headers:
+            time_tuple = parsedate_tz(response_headers["date"])
+            assert time_tuple is not None
+            date = calendar.timegm(time_tuple[:6])
+        else:
+            date = 0
+
+        # If we've been given a body, our response has a Content-Length, that
+        # Content-Length is valid then we can check to see if the body we've
+        # been given matches the expected size, and if it doesn't we'll just
+        # skip trying to cache it.
+        if (
+            body is not None
+            and "content-length" in response_headers
+            and response_headers["content-length"].isdigit()
+            and int(response_headers["content-length"]) != len(body)
+        ):
+            return
+
+        cc_req = self.parse_cache_control(request.headers)
+        cc = self.parse_cache_control(response_headers)
+
+        assert request.url is not None
+        cache_url = self.cache_url(request.url)
+        logger.debug('Updating cache with response from "%s"', cache_url)
+
+        # Delete it from the cache if we happen to have it stored there
+        no_store = False
+        if "no-store" in cc:
+            no_store = True
+            logger.debug('Response header has "no-store"')
+        if "no-store" in cc_req:
+            no_store = True
+            logger.debug('Request header has "no-store"')
+        if no_store and self.cache.get(cache_url):
+            logger.debug('Purging existing cache entry to honor "no-store"')
+            self.cache.delete(cache_url)
+        if no_store:
+            return
+
+        # https://tools.ietf.org/html/rfc7234#section-4.1:
+        # A Vary header field-value of "*" always fails to match.
+        # Storing such a response leads to a deserialization warning
+        # during cache lookup and is not allowed to ever be served,
+        # so storing it can be avoided.
+        if "*" in response_headers.get("vary", ""):
+            logger.debug('Response header has "Vary: *"')
+            return
+
+        # If we've been given an etag, then keep the response
+        if self.cache_etags and "etag" in response_headers:
+            expires_time = 0
+            if response_headers.get("expires"):
+                expires = parsedate_tz(response_headers["expires"])
+                if expires is not None:
+                    expires_time = calendar.timegm(expires[:6]) - date
+
+            expires_time = max(expires_time, 14 * 86400)
+
+            logger.debug(f"etag object cached for {expires_time} seconds")
+            logger.debug("Caching due to etag")
+            self._cache_set(cache_url, request, response, body, expires_time)
+
+        # Add to the cache any permanent redirects. We do this before looking
+        # that the Date headers.
+        elif int(response.status) in PERMANENT_REDIRECT_STATUSES:
+            logger.debug("Caching permanent redirect")
+            self._cache_set(cache_url, request, response, b"")
+
+        # Add to the cache if the response headers demand it. If there
+        # is no date header then we can't do anything about expiring
+        # the cache.
+        elif "date" in response_headers:
+            time_tuple = parsedate_tz(response_headers["date"])
+            assert time_tuple is not None
+            date = calendar.timegm(time_tuple[:6])
+            # cache when there is a max-age > 0
+            max_age = cc.get("max-age")
+            if max_age is not None and max_age > 0:
+                logger.debug("Caching b/c date exists and max-age > 0")
+                expires_time = max_age
+                self._cache_set(
+                    cache_url,
+                    request,
+                    response,
+                    body,
+                    expires_time,
+                )
+
+            # If the request can expire, it means we should cache it
+            # in the meantime.
+            elif "expires" in response_headers:
+                if response_headers["expires"]:
+                    expires = parsedate_tz(response_headers["expires"])
+                    if expires is not None:
+                        expires_time = calendar.timegm(expires[:6]) - date
+                    else:
+                        expires_time = None
+
+                    logger.debug(
+                        "Caching b/c of expires header. expires in {} seconds".format(
+                            expires_time
+                        )
+                    )
+                    self._cache_set(
+                        cache_url,
+                        request,
+                        response,
+                        body,
+                        expires_time,
+                    )
+
+    def update_cached_response(
+        self, request: PreparedRequest, response: HTTPResponse
+    ) -> HTTPResponse:
+        """On a 304 we will get a new set of headers that we want to
+        update our cached value with, assuming we have one.
+
+        This should only ever be called when we've sent an ETag and
+        gotten a 304 as the response.
+        """
+        assert request.url is not None
+        cache_url = self.cache_url(request.url)
+        cached_response = self._load_from_cache(request)
+
+        if not cached_response:
+            # we didn't have a cached response
+            return response
+
+        # Lets update our headers with the headers from the new request:
+        # http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1
+        #
+        # The server isn't supposed to send headers that would make
+        # the cached body invalid. But... just in case, we'll be sure
+        # to strip out ones we know that might be problmatic due to
+        # typical assumptions.
+        excluded_headers = ["content-length"]
+
+        cached_response.headers.update(
+            {
+                k: v
+                for k, v in response.headers.items()  # type: ignore[no-untyped-call]
+                if k.lower() not in excluded_headers
+            }
+        )
+
+        # we want a 200 b/c we have content via the cache
+        cached_response.status = 200
+
+        # update our cache
+        self._cache_set(cache_url, request, cached_response)
+
+        return cached_response
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/filewrapper.py b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/filewrapper.py
new file mode 100644
index 0000000..2514390
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/filewrapper.py
@@ -0,0 +1,119 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+from __future__ import annotations
+
+import mmap
+from tempfile import NamedTemporaryFile
+from typing import TYPE_CHECKING, Any, Callable
+
+if TYPE_CHECKING:
+    from http.client import HTTPResponse
+
+
+class CallbackFileWrapper:
+    """
+    Small wrapper around a fp object which will tee everything read into a
+    buffer, and when that file is closed it will execute a callback with the
+    contents of that buffer.
+
+    All attributes are proxied to the underlying file object.
+
+    This class uses members with a double underscore (__) leading prefix so as
+    not to accidentally shadow an attribute.
+
+    The data is stored in a temporary file until it is all available.  As long
+    as the temporary files directory is disk-based (sometimes it's a
+    memory-backed-``tmpfs`` on Linux), data will be unloaded to disk if memory
+    pressure is high.  For small files the disk usually won't be used at all,
+    it'll all be in the filesystem memory cache, so there should be no
+    performance impact.
+    """
+
+    def __init__(
+        self, fp: HTTPResponse, callback: Callable[[bytes], None] | None
+    ) -> None:
+        self.__buf = NamedTemporaryFile("rb+", delete=True)
+        self.__fp = fp
+        self.__callback = callback
+
+    def __getattr__(self, name: str) -> Any:
+        # The vaguaries of garbage collection means that self.__fp is
+        # not always set.  By using __getattribute__ and the private
+        # name[0] allows looking up the attribute value and raising an
+        # AttributeError when it doesn't exist. This stop thigns from
+        # infinitely recursing calls to getattr in the case where
+        # self.__fp hasn't been set.
+        #
+        # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers
+        fp = self.__getattribute__("_CallbackFileWrapper__fp")
+        return getattr(fp, name)
+
+    def __is_fp_closed(self) -> bool:
+        try:
+            return self.__fp.fp is None
+
+        except AttributeError:
+            pass
+
+        try:
+            closed: bool = self.__fp.closed
+            return closed
+
+        except AttributeError:
+            pass
+
+        # We just don't cache it then.
+        # TODO: Add some logging here...
+        return False
+
+    def _close(self) -> None:
+        if self.__callback:
+            if self.__buf.tell() == 0:
+                # Empty file:
+                result = b""
+            else:
+                # Return the data without actually loading it into memory,
+                # relying on Python's buffer API and mmap(). mmap() just gives
+                # a view directly into the filesystem's memory cache, so it
+                # doesn't result in duplicate memory use.
+                self.__buf.seek(0, 0)
+                result = memoryview(
+                    mmap.mmap(self.__buf.fileno(), 0, access=mmap.ACCESS_READ)
+                )
+            self.__callback(result)
+
+        # We assign this to None here, because otherwise we can get into
+        # really tricky problems where the CPython interpreter dead locks
+        # because the callback is holding a reference to something which
+        # has a __del__ method. Setting this to None breaks the cycle
+        # and allows the garbage collector to do it's thing normally.
+        self.__callback = None
+
+        # Closing the temporary file releases memory and frees disk space.
+        # Important when caching big files.
+        self.__buf.close()
+
+    def read(self, amt: int | None = None) -> bytes:
+        data: bytes = self.__fp.read(amt)
+        if data:
+            # We may be dealing with b'', a sign that things are over:
+            # it's passed e.g. after we've already closed self.__buf.
+            self.__buf.write(data)
+        if self.__is_fp_closed():
+            self._close()
+
+        return data
+
+    def _safe_read(self, amt: int) -> bytes:
+        data: bytes = self.__fp._safe_read(amt)  # type: ignore[attr-defined]
+        if amt == 2 and data == b"\r\n":
+            # urllib executes this read to toss the CRLF at the end
+            # of the chunk.
+            return data
+
+        self.__buf.write(data)
+        if self.__is_fp_closed():
+            self._close()
+
+        return data
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/heuristics.py b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/heuristics.py
new file mode 100644
index 0000000..b9d72ca
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/heuristics.py
@@ -0,0 +1,154 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+from __future__ import annotations
+
+import calendar
+import time
+from datetime import datetime, timedelta, timezone
+from email.utils import formatdate, parsedate, parsedate_tz
+from typing import TYPE_CHECKING, Any, Mapping
+
+if TYPE_CHECKING:
+    from pip._vendor.urllib3 import HTTPResponse
+
+TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"
+
+
+def expire_after(delta: timedelta, date: datetime | None = None) -> datetime:
+    date = date or datetime.now(timezone.utc)
+    return date + delta
+
+
+def datetime_to_header(dt: datetime) -> str:
+    return formatdate(calendar.timegm(dt.timetuple()))
+
+
+class BaseHeuristic:
+    def warning(self, response: HTTPResponse) -> str | None:
+        """
+        Return a valid 1xx warning header value describing the cache
+        adjustments.
+
+        The response is provided too allow warnings like 113
+        http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
+        to explicitly say response is over 24 hours old.
+        """
+        return '110 - "Response is Stale"'
+
+    def update_headers(self, response: HTTPResponse) -> dict[str, str]:
+        """Update the response headers with any new headers.
+
+        NOTE: This SHOULD always include some Warning header to
+              signify that the response was cached by the client, not
+              by way of the provided headers.
+        """
+        return {}
+
+    def apply(self, response: HTTPResponse) -> HTTPResponse:
+        updated_headers = self.update_headers(response)
+
+        if updated_headers:
+            response.headers.update(updated_headers)
+            warning_header_value = self.warning(response)
+            if warning_header_value is not None:
+                response.headers.update({"Warning": warning_header_value})
+
+        return response
+
+
+class OneDayCache(BaseHeuristic):
+    """
+    Cache the response by providing an expires 1 day in the
+    future.
+    """
+
+    def update_headers(self, response: HTTPResponse) -> dict[str, str]:
+        headers = {}
+
+        if "expires" not in response.headers:
+            date = parsedate(response.headers["date"])
+            expires = expire_after(timedelta(days=1), date=datetime(*date[:6], tzinfo=timezone.utc))  # type: ignore[misc]
+            headers["expires"] = datetime_to_header(expires)
+            headers["cache-control"] = "public"
+        return headers
+
+
+class ExpiresAfter(BaseHeuristic):
+    """
+    Cache **all** requests for a defined time period.
+    """
+
+    def __init__(self, **kw: Any) -> None:
+        self.delta = timedelta(**kw)
+
+    def update_headers(self, response: HTTPResponse) -> dict[str, str]:
+        expires = expire_after(self.delta)
+        return {"expires": datetime_to_header(expires), "cache-control": "public"}
+
+    def warning(self, response: HTTPResponse) -> str | None:
+        tmpl = "110 - Automatically cached for %s. Response might be stale"
+        return tmpl % self.delta
+
+
+class LastModified(BaseHeuristic):
+    """
+    If there is no Expires header already, fall back on Last-Modified
+    using the heuristic from
+    http://tools.ietf.org/html/rfc7234#section-4.2.2
+    to calculate a reasonable value.
+
+    Firefox also does something like this per
+    https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
+    http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
+    Unlike mozilla we limit this to 24-hr.
+    """
+
+    cacheable_by_default_statuses = {
+        200,
+        203,
+        204,
+        206,
+        300,
+        301,
+        404,
+        405,
+        410,
+        414,
+        501,
+    }
+
+    def update_headers(self, resp: HTTPResponse) -> dict[str, str]:
+        headers: Mapping[str, str] = resp.headers
+
+        if "expires" in headers:
+            return {}
+
+        if "cache-control" in headers and headers["cache-control"] != "public":
+            return {}
+
+        if resp.status not in self.cacheable_by_default_statuses:
+            return {}
+
+        if "date" not in headers or "last-modified" not in headers:
+            return {}
+
+        time_tuple = parsedate_tz(headers["date"])
+        assert time_tuple is not None
+        date = calendar.timegm(time_tuple[:6])
+        last_modified = parsedate(headers["last-modified"])
+        if last_modified is None:
+            return {}
+
+        now = time.time()
+        current_age = max(0, now - date)
+        delta = date - calendar.timegm(last_modified)
+        freshness_lifetime = max(0, min(delta / 10, 24 * 3600))
+        if freshness_lifetime <= current_age:
+            return {}
+
+        expires = date + freshness_lifetime
+        return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))}
+
+    def warning(self, resp: HTTPResponse) -> str | None:
+        return None
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/serialize.py b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/serialize.py
new file mode 100644
index 0000000..f9e967c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/serialize.py
@@ -0,0 +1,206 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+from __future__ import annotations
+
+import io
+from typing import IO, TYPE_CHECKING, Any, Mapping, cast
+
+from pip._vendor import msgpack
+from pip._vendor.requests.structures import CaseInsensitiveDict
+from pip._vendor.urllib3 import HTTPResponse
+
+if TYPE_CHECKING:
+    from pip._vendor.requests import PreparedRequest
+
+
+class Serializer:
+    serde_version = "4"
+
+    def dumps(
+        self,
+        request: PreparedRequest,
+        response: HTTPResponse,
+        body: bytes | None = None,
+    ) -> bytes:
+        response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(
+            response.headers
+        )
+
+        if body is None:
+            # When a body isn't passed in, we'll read the response. We
+            # also update the response with a new file handler to be
+            # sure it acts as though it was never read.
+            body = response.read(decode_content=False)
+            response._fp = io.BytesIO(body)  # type: ignore[attr-defined]
+            response.length_remaining = len(body)
+
+        data = {
+            "response": {
+                "body": body,  # Empty bytestring if body is stored separately
+                "headers": {str(k): str(v) for k, v in response.headers.items()},  # type: ignore[no-untyped-call]
+                "status": response.status,
+                "version": response.version,
+                "reason": str(response.reason),
+                "decode_content": response.decode_content,
+            }
+        }
+
+        # Construct our vary headers
+        data["vary"] = {}
+        if "vary" in response_headers:
+            varied_headers = response_headers["vary"].split(",")
+            for header in varied_headers:
+                header = str(header).strip()
+                header_value = request.headers.get(header, None)
+                if header_value is not None:
+                    header_value = str(header_value)
+                data["vary"][header] = header_value
+
+        return b",".join([f"cc={self.serde_version}".encode(), self.serialize(data)])
+
+    def serialize(self, data: dict[str, Any]) -> bytes:
+        return cast(bytes, msgpack.dumps(data, use_bin_type=True))
+
+    def loads(
+        self,
+        request: PreparedRequest,
+        data: bytes,
+        body_file: IO[bytes] | None = None,
+    ) -> HTTPResponse | None:
+        # Short circuit if we've been given an empty set of data
+        if not data:
+            return None
+
+        # Determine what version of the serializer the data was serialized
+        # with
+        try:
+            ver, data = data.split(b",", 1)
+        except ValueError:
+            ver = b"cc=0"
+
+        # Make sure that our "ver" is actually a version and isn't a false
+        # positive from a , being in the data stream.
+        if ver[:3] != b"cc=":
+            data = ver + data
+            ver = b"cc=0"
+
+        # Get the version number out of the cc=N
+        verstr = ver.split(b"=", 1)[-1].decode("ascii")
+
+        # Dispatch to the actual load method for the given version
+        try:
+            return getattr(self, f"_loads_v{verstr}")(request, data, body_file)  # type: ignore[no-any-return]
+
+        except AttributeError:
+            # This is a version we don't have a loads function for, so we'll
+            # just treat it as a miss and return None
+            return None
+
+    def prepare_response(
+        self,
+        request: PreparedRequest,
+        cached: Mapping[str, Any],
+        body_file: IO[bytes] | None = None,
+    ) -> HTTPResponse | None:
+        """Verify our vary headers match and construct a real urllib3
+        HTTPResponse object.
+        """
+        # Special case the '*' Vary value as it means we cannot actually
+        # determine if the cached response is suitable for this request.
+        # This case is also handled in the controller code when creating
+        # a cache entry, but is left here for backwards compatibility.
+        if "*" in cached.get("vary", {}):
+            return None
+
+        # Ensure that the Vary headers for the cached response match our
+        # request
+        for header, value in cached.get("vary", {}).items():
+            if request.headers.get(header, None) != value:
+                return None
+
+        body_raw = cached["response"].pop("body")
+
+        headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(
+            data=cached["response"]["headers"]
+        )
+        if headers.get("transfer-encoding", "") == "chunked":
+            headers.pop("transfer-encoding")
+
+        cached["response"]["headers"] = headers
+
+        try:
+            body: IO[bytes]
+            if body_file is None:
+                body = io.BytesIO(body_raw)
+            else:
+                body = body_file
+        except TypeError:
+            # This can happen if cachecontrol serialized to v1 format (pickle)
+            # using Python 2. A Python 2 str(byte string) will be unpickled as
+            # a Python 3 str (unicode string), which will cause the above to
+            # fail with:
+            #
+            #     TypeError: 'str' does not support the buffer interface
+            body = io.BytesIO(body_raw.encode("utf8"))
+
+        # Discard any `strict` parameter serialized by older version of cachecontrol.
+        cached["response"].pop("strict", None)
+
+        return HTTPResponse(body=body, preload_content=False, **cached["response"])
+
+    def _loads_v0(
+        self,
+        request: PreparedRequest,
+        data: bytes,
+        body_file: IO[bytes] | None = None,
+    ) -> None:
+        # The original legacy cache data. This doesn't contain enough
+        # information to construct everything we need, so we'll treat this as
+        # a miss.
+        return None
+
+    def _loads_v1(
+        self,
+        request: PreparedRequest,
+        data: bytes,
+        body_file: IO[bytes] | None = None,
+    ) -> HTTPResponse | None:
+        # The "v1" pickled cache format. This is no longer supported
+        # for security reasons, so we treat it as a miss.
+        return None
+
+    def _loads_v2(
+        self,
+        request: PreparedRequest,
+        data: bytes,
+        body_file: IO[bytes] | None = None,
+    ) -> HTTPResponse | None:
+        # The "v2" compressed base64 cache format.
+        # This has been removed due to age and poor size/performance
+        # characteristics, so we treat it as a miss.
+        return None
+
+    def _loads_v3(
+        self,
+        request: PreparedRequest,
+        data: bytes,
+        body_file: IO[bytes] | None = None,
+    ) -> None:
+        # Due to Python 2 encoding issues, it's impossible to know for sure
+        # exactly how to load v3 entries, thus we'll treat these as a miss so
+        # that they get rewritten out as v4 entries.
+        return None
+
+    def _loads_v4(
+        self,
+        request: PreparedRequest,
+        data: bytes,
+        body_file: IO[bytes] | None = None,
+    ) -> HTTPResponse | None:
+        try:
+            cached = msgpack.loads(data, raw=False)
+        except ValueError:
+            return None
+
+        return self.prepare_response(request, cached, body_file)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/wrapper.py b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/wrapper.py
new file mode 100644
index 0000000..f618bc3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/wrapper.py
@@ -0,0 +1,43 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Collection
+
+from pip._vendor.cachecontrol.adapter import CacheControlAdapter
+from pip._vendor.cachecontrol.cache import DictCache
+
+if TYPE_CHECKING:
+    from pip._vendor import requests
+
+    from pip._vendor.cachecontrol.cache import BaseCache
+    from pip._vendor.cachecontrol.controller import CacheController
+    from pip._vendor.cachecontrol.heuristics import BaseHeuristic
+    from pip._vendor.cachecontrol.serialize import Serializer
+
+
+def CacheControl(
+    sess: requests.Session,
+    cache: BaseCache | None = None,
+    cache_etags: bool = True,
+    serializer: Serializer | None = None,
+    heuristic: BaseHeuristic | None = None,
+    controller_class: type[CacheController] | None = None,
+    adapter_class: type[CacheControlAdapter] | None = None,
+    cacheable_methods: Collection[str] | None = None,
+) -> requests.Session:
+    cache = DictCache() if cache is None else cache
+    adapter_class = adapter_class or CacheControlAdapter
+    adapter = adapter_class(
+        cache,
+        cache_etags=cache_etags,
+        serializer=serializer,
+        heuristic=heuristic,
+        controller_class=controller_class,
+        cacheable_methods=cacheable_methods,
+    )
+    sess.mount("http://", adapter)
+    sess.mount("https://", adapter)
+
+    return sess
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/certifi/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/certifi/__init__.py
new file mode 100644
index 0000000..8ce89ce
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/certifi/__init__.py
@@ -0,0 +1,4 @@
+from .core import contents, where
+
+__all__ = ["contents", "where"]
+__version__ = "2023.07.22"
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/certifi/__main__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/certifi/__main__.py
new file mode 100644
index 0000000..0037634
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/certifi/__main__.py
@@ -0,0 +1,12 @@
+import argparse
+
+from pip._vendor.certifi import contents, where
+
+parser = argparse.ArgumentParser()
+parser.add_argument("-c", "--contents", action="store_true")
+args = parser.parse_args()
+
+if args.contents:
+    print(contents())
+else:
+    print(where())
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/certifi/cacert.pem b/.venv/lib/python3.12/site-packages/pip/_vendor/certifi/cacert.pem
new file mode 100644
index 0000000..0212369
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/certifi/cacert.pem
@@ -0,0 +1,4635 @@
+
+# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Label: "GlobalSign Root CA"
+# Serial: 4835703278459707669005204
+# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
+# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
+# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
+-----BEGIN CERTIFICATE-----
+MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
+A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
+b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
+MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
+YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
+aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
+jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
+xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
+1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
+snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
+U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
+9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
+AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
+yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
+38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
+AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
+DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
+HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Premium 2048 Secure Server CA"
+# Serial: 946069240
+# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90
+# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31
+# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
+RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
+bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
+IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3
+MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
+LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
+YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
+A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
+K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
+sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
+MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
+XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
+HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
+4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub
+j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo
+U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf
+zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b
+u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+
+bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er
+fF6adulZkMV8gzURZVE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Label: "Baltimore CyberTrust Root"
+# Serial: 33554617
+# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
+# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
+# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
+RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
+VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
+DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
+ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
+VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
+mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
+IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
+mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
+XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
+dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
+jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
+BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
+DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
+9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
+jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
+Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
+ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
+R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Label: "Entrust Root Certification Authority"
+# Serial: 1164660820
+# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
+# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
+# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
+-----BEGIN CERTIFICATE-----
+MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
+Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
+KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
+cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
+NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
+NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
+ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
+BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
+KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
+Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
+4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
+KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
+rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
+94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
+sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
+gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
+kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
+vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
+A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
+O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
+AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
+9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
+eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
+0vdXcDazv/wor3ElhVsT/h5/WrQ8
+-----END CERTIFICATE-----
+
+# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
+# Subject: CN=AAA Certificate Services O=Comodo CA Limited
+# Label: "Comodo AAA Services root"
+# Serial: 1
+# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
+# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
+# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
+-----BEGIN CERTIFICATE-----
+MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
+YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
+GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
+BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
+3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
+YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
+rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
+ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
+oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
+MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
+QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
+b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
+AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
+GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
+Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
+G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
+l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
+smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2"
+# Serial: 1289
+# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b
+# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7
+# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86
+-----BEGIN CERTIFICATE-----
+MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa
+GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg
+Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J
+WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB
+rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp
++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1
+ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i
+Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz
+PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og
+/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH
+oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI
+yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud
+EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2
+A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL
+MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT
+ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f
+BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn
+g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl
+fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K
+WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha
+B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc
+hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR
+TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD
+mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z
+ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y
+4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza
+8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3"
+# Serial: 1478
+# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf
+# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85
+# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35
+-----BEGIN CERTIFICATE-----
+MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM
+V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB
+4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr
+H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd
+8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv
+vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT
+mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe
+btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc
+T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt
+WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ
+c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A
+4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD
+VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG
+CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0
+aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0
+aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu
+dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw
+czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G
+A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg
+Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0
+7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem
+d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd
++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B
+4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN
+t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x
+DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57
+k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s
+zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j
+Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT
+mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
+4SVhM7JZG+Ju1zdXtg2pEto=
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1
+# Subject: O=SECOM Trust.net OU=Security Communication RootCA1
+# Label: "Security Communication Root CA"
+# Serial: 0
+# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a
+# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7
+# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c
+-----BEGIN CERTIFICATE-----
+MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY
+MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t
+dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5
+WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD
+VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8
+9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ
+DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9
+Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N
+QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ
+xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G
+A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG
+kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr
+Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5
+Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU
+JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot
+RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Label: "XRamp Global CA Root"
+# Serial: 107108908803651509692980124233745014957
+# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
+# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
+# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
+gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
+MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
+UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
+NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
+dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
+dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
+38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
+KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
+DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
+qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
+JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
+PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
+BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
+jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
+eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
+ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
+vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
+qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
+IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
+i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
+O+7ETPTsJ3xCwnR8gooJybQDJbw=
+-----END CERTIFICATE-----
+
+# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Label: "Go Daddy Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
+# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
+# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
+MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
+YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
+MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
+ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
+MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
+ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
+PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
+wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
+EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
+avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
+YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
+sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
+/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
+IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
+ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
+OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
+TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
+HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
+dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
+ReYNnyicsbkqWletNw+vHX/bvZ8=
+-----END CERTIFICATE-----
+
+# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Label: "Starfield Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
+# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
+# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
+-----BEGIN CERTIFICATE-----
+MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
+MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
+U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
+NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
+ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
+ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
+DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
+8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
+X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
+K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
+1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
+A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
+zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
+YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
+bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
+DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
+L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
+eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
+xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
+VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
+WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root CA"
+# Serial: 17154717934120587862167794914071425081
+# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
+# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
+# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
+-----BEGIN CERTIFICATE-----
+MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
+JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
+mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
+VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
+AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
+AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
+pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
+dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
+fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
+NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
+H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root CA"
+# Serial: 10944719598952040374951832963794454346
+# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
+# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
+# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
+-----BEGIN CERTIFICATE-----
+MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
+QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
+CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
+nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
+43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
+T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
+gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
+TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
+DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
+hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
+06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
+PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
+YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
+CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert High Assurance EV Root CA"
+# Serial: 3553400076410547919724730734378100087
+# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
+# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
+# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
+ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
+LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
+RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
+PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
+xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
+Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
+hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
+EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
+FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
+nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
+eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
+hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
+Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
+vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
++OkuE6N36B9K
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Label: "SwissSign Gold CA - G2"
+# Serial: 13492815561806991280
+# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93
+# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61
+# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95
+-----BEGIN CERTIFICATE-----
+MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV
+BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln
+biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF
+MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT
+d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
+CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8
+76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+
+bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c
+6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE
+emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd
+MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt
+MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y
+MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y
+FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi
+aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM
+gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB
+qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7
+lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn
+8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov
+L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6
+45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO
+UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5
+O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC
+bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv
+GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a
+77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC
+hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3
+92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp
+Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w
+ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt
+Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Label: "SwissSign Silver CA - G2"
+# Serial: 5700383053117599563
+# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13
+# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb
+# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5
+-----BEGIN CERTIFICATE-----
+MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE
+BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu
+IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow
+RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY
+U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A
+MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv
+Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br
+YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF
+nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH
+6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt
+eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/
+c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ
+MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH
+HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf
+jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6
+5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB
+rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
+F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c
+wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0
+cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB
+AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp
+WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9
+xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ
+2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ
+IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8
+aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X
+em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR
+dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/
+OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+
+hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy
+tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureTrust CA O=SecureTrust Corporation
+# Subject: CN=SecureTrust CA O=SecureTrust Corporation
+# Label: "SecureTrust CA"
+# Serial: 17199774589125277788362757014266862032
+# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1
+# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11
+# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73
+-----BEGIN CERTIFICATE-----
+MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz
+MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv
+cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN
+AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz
+Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO
+0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao
+wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj
+7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS
+8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT
+BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg
+JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC
+NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3
+6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/
+3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm
+D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS
+CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR
+3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Secure Global CA O=SecureTrust Corporation
+# Subject: CN=Secure Global CA O=SecureTrust Corporation
+# Label: "Secure Global CA"
+# Serial: 9751836167731051554232119481456978597
+# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de
+# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b
+# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69
+-----BEGIN CERTIFICATE-----
+MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx
+MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg
+Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ
+iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa
+/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ
+jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI
+HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7
+sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w
+gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw
+KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG
+AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L
+URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO
+H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm
+I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY
+iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc
+f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
+# Label: "COMODO Certification Authority"
+# Serial: 104350513648249232941998508985834464573
+# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
+# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
+# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
+-----BEGIN CERTIFICATE-----
+MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
+gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
+BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
+MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
+YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
+RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
+UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
+2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
+Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
+nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
+/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
+PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
+QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
+SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
+IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
+RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
+zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
+BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
+ZQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Label: "COMODO ECC Certification Authority"
+# Serial: 41578283867086692638256921589707938090
+# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
+# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
+# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
+-----BEGIN CERTIFICATE-----
+MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
+IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
+MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
+ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
+T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
+biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
+FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
+cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
+BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
+fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
+GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna O=Dhimyotis
+# Subject: CN=Certigna O=Dhimyotis
+# Label: "Certigna"
+# Serial: 18364802974209362175
+# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff
+# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97
+# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d
+-----BEGIN CERTIFICATE-----
+MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV
+BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X
+DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ
+BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4
+QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny
+gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw
+zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q
+130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2
+JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw
+ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT
+AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj
+AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG
+9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h
+bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc
+fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu
+HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w
+t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw
+WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
+-----END CERTIFICATE-----
+
+# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Label: "ePKI Root Certification Authority"
+# Serial: 28956088682735189655030529057352760477
+# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3
+# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0
+# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5
+-----BEGIN CERTIFICATE-----
+MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe
+MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
+ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw
+IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL
+SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH
+SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh
+ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X
+DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1
+TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ
+fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA
+sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU
+WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS
+nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH
+dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip
+NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC
+AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF
+MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH
+ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB
+uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl
+PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP
+JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/
+gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2
+j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6
+5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB
+o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS
+/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z
+Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE
+W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D
+hNQ+IIX3Sj0rnP0qCglN6oH4EZw=
+-----END CERTIFICATE-----
+
+# Issuer: O=certSIGN OU=certSIGN ROOT CA
+# Subject: O=certSIGN OU=certSIGN ROOT CA
+# Label: "certSIGN ROOT CA"
+# Serial: 35210227249154
+# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17
+# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b
+# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb
+-----BEGIN CERTIFICATE-----
+MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT
+AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD
+QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP
+MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC
+ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do
+0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ
+UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d
+RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ
+OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv
+JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C
+AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O
+BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ
+LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY
+MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ
+44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I
+Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw
+i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN
+9u6wWk5JRFRYX0KD
+-----END CERTIFICATE-----
+
+# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
+# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
+# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny"
+# Serial: 80544274841616
+# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88
+# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91
+# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98
+-----BEGIN CERTIFICATE-----
+MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG
+EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3
+MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl
+cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR
+dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB
+pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM
+b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm
+aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz
+IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT
+lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz
+AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5
+VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG
+ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2
+BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG
+AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M
+U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh
+bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C
++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC
+bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F
+uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
+XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Label: "SecureSign RootCA11"
+# Serial: 1
+# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26
+# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3
+# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12
+-----BEGIN CERTIFICATE-----
+MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr
+MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG
+A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0
+MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp
+Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD
+QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz
+i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8
+h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV
+MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9
+UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni
+8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC
+h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD
+VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm
+KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ
+X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr
+QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5
+pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN
+QSdJQO7e5iNEOdyhIta6A/I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Label: "Microsec e-Szigno Root CA 2009"
+# Serial: 14014712776195784473
+# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1
+# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e
+# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78
+-----BEGIN CERTIFICATE-----
+MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD
+VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0
+ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G
+CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y
+OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx
+FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp
+Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o
+dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP
+kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc
+cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U
+fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7
+N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC
+xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1
++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
+A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM
+Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG
+SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h
+mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk
+ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775
+tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c
+2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t
+HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Label: "GlobalSign Root CA - R3"
+# Serial: 4835703278459759426209954
+# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
+# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
+# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
+-----BEGIN CERTIFICATE-----
+MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
+MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
+RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
+gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
+KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
+QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
+XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
+LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
+RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
+jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
+6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
+mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
+Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
+WD9f
+-----END CERTIFICATE-----
+
+# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
+# Serial: 6047274297262753887
+# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3
+# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa
+# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef
+-----BEGIN CERTIFICATE-----
+MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE
+BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
+cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy
+MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
+Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
+thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
+cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
+L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
+NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
+X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
+m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
+Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
+EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
+KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
+6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
+OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD
+VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD
+VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp
+cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv
+ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl
+AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF
+661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9
+am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1
+ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481
+PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS
+3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k
+SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF
+3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM
+ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g
+StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz
+Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB
+jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V
+-----END CERTIFICATE-----
+
+# Issuer: CN=Izenpe.com O=IZENPE S.A.
+# Subject: CN=Izenpe.com O=IZENPE S.A.
+# Label: "Izenpe.com"
+# Serial: 917563065490389241595536686991402621
+# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73
+# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19
+# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f
+-----BEGIN CERTIFICATE-----
+MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4
+MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6
+ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD
+VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j
+b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq
+scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO
+xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H
+LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX
+uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD
+yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+
+JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q
+rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN
+BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L
+hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB
+QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+
+HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu
+Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg
+QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB
+BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx
+MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA
+A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb
+laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56
+awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo
+JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw
+LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT
+VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk
+LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb
+UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/
+QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+
+naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls
+QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Label: "Go Daddy Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
+# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
+# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
+EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
+ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
+NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
+EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
+AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
+E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
+/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
+DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
+GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
+tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
+AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
+WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
+9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
+gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
+2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
+LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
+4uJEvlz36hz1
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
+# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
+# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
+ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
+MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
+b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
+aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
+Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
+nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
+HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
+Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
+dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
+HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
+CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
+sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
+4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
+8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
+pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
+mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Services Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
+# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
+# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
+-----BEGIN CERTIFICATE-----
+MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
+ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
+MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
+VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
+ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
+dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
+OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
+8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
+Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
+hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
+6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
+AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
+bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
+ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
+qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
+iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
+0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
+sSi6
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
+# Subject: CN=AffirmTrust Commercial O=AffirmTrust
+# Label: "AffirmTrust Commercial"
+# Serial: 8608355977964138876
+# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
+# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
+# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
+Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
+ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
+MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
+yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
+VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
+nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
+XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
+vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
+Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
+N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
+nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Networking O=AffirmTrust
+# Subject: CN=AffirmTrust Networking O=AffirmTrust
+# Label: "AffirmTrust Networking"
+# Serial: 8957382827206547757
+# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
+# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
+# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
+YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
+kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
+QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
+6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
+yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
+QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
+tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
+QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
+Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
+olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
+x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium O=AffirmTrust
+# Subject: CN=AffirmTrust Premium O=AffirmTrust
+# Label: "AffirmTrust Premium"
+# Serial: 7893706540734352110
+# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
+# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
+# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
+dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
+A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
+cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
+qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
+JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
+s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
+HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
+70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
+V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
+qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
+5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
+C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
+OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
+FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
+KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
+Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
+8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
+MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
+0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
+u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
+u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
+YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
+GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
+RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
+KeC2uAloGRwYQw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Label: "AffirmTrust Premium ECC"
+# Serial: 8401224907861490260
+# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
+# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
+# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
+-----BEGIN CERTIFICATE-----
+MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
+VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
+cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
+BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
+VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
+0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
+ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
+A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
+aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
+flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA"
+# Serial: 279744
+# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78
+# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e
+# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e
+-----BEGIN CERTIFICATE-----
+MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM
+MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D
+ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU
+cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3
+WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg
+Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw
+IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH
+UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM
+TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU
+BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM
+kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x
+AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV
+HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y
+sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL
+I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8
+J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY
+VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI
+03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Root Certification Authority"
+# Serial: 1
+# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79
+# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48
+# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44
+-----BEGIN CERTIFICATE-----
+MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES
+MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU
+V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz
+WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO
+LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
+AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE
+AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH
+K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX
+RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z
+rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx
+3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq
+hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC
+MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls
+XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D
+lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn
+aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ
+YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw==
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Label: "Security Communication RootCA2"
+# Serial: 0
+# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43
+# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74
+# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl
+MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe
+U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX
+DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy
+dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj
+YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV
+OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr
+zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM
+VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ
+hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO
+ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw
+awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs
+OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3
+DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF
+coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc
+okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8
+t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy
+1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/
+SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03
+-----END CERTIFICATE-----
+
+# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Label: "Actalis Authentication Root CA"
+# Serial: 6271844772424770508
+# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6
+# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac
+# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66
+-----BEGIN CERTIFICATE-----
+MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE
+BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w
+MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290
+IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC
+SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1
+ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv
+UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX
+4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9
+KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/
+gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb
+rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ
+51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F
+be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe
+KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F
+v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn
+fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7
+jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz
+ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt
+ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL
+e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70
+jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz
+WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V
+SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j
+pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX
+X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok
+fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R
+K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU
+ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU
+LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT
+LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 2 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29
+# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99
+# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr
+6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV
+L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91
+1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx
+MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ
+QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB
+arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr
+Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi
+FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS
+P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN
+9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz
+uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h
+9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s
+A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t
+OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo
++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7
+KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2
+DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us
+H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ
+I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7
+5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h
+3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz
+Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 3 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec
+# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57
+# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y
+ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E
+N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9
+tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX
+0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c
+/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X
+KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY
+zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS
+O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D
+34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP
+K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv
+Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj
+QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV
+cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS
+IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2
+HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa
+O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv
+033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u
+dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE
+kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41
+3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD
+u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq
+4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 3"
+# Serial: 1
+# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef
+# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1
+# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN
+8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/
+RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4
+hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5
+ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM
+EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1
+A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy
+WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ
+1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30
+6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT
+91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml
+e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p
+TpPDpFQUWw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 2009"
+# Serial: 623603
+# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f
+# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0
+# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1
+-----BEGIN CERTIFICATE-----
+MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha
+ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM
+HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03
+UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42
+tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R
+ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM
+lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp
+/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G
+A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G
+A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj
+dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy
+MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl
+cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js
+L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL
+BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni
+acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0
+o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K
+zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8
+PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y
+Johw1+qRzT65ysCQblrGXnRl11z+o+I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 EV 2009"
+# Serial: 623604
+# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6
+# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83
+# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81
+-----BEGIN CERTIFICATE-----
+MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw
+NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV
+BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn
+ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0
+3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z
+qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR
+p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8
+HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw
+ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea
+HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw
+Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh
+c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E
+RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt
+dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku
+Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp
+3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05
+nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF
+CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na
+xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX
+KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1
+-----END CERTIFICATE-----
+
+# Issuer: CN=CA Disig Root R2 O=Disig a.s.
+# Subject: CN=CA Disig Root R2 O=Disig a.s.
+# Label: "CA Disig Root R2"
+# Serial: 10572350602393338211
+# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03
+# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71
+# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03
+-----BEGIN CERTIFICATE-----
+MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV
+BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
+MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy
+MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
+EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw
+ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe
+NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH
+PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I
+x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe
+QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR
+yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO
+QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912
+H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ
+QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD
+i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs
+nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1
+rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
+DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI
+hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM
+tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf
+GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb
+lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka
++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal
+TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i
+nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3
+gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr
+G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os
+zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x
+L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL
+-----END CERTIFICATE-----
+
+# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Label: "ACCVRAIZ1"
+# Serial: 6828503384748696800
+# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02
+# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17
+# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13
+-----BEGIN CERTIFICATE-----
+MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE
+AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw
+CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ
+BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND
+VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb
+qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY
+HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo
+G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA
+lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr
+IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/
+0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH
+k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47
+4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO
+m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa
+cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl
+uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI
+KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls
+ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG
+AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2
+VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT
+VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG
+CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA
+cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA
+QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA
+7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA
+cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA
+QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA
+czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu
+aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt
+aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud
+DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF
+BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp
+D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU
+JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m
+AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD
+vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms
+tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH
+7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h
+I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA
+h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF
+d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H
+pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Global Root CA"
+# Serial: 3262
+# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96
+# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65
+# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx
+EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT
+VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5
+NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT
+B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF
+10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz
+0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh
+MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH
+zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc
+46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2
+yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi
+laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP
+oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA
+BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE
+qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm
+4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL
+1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn
+LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF
+H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo
+RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+
+nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh
+15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW
+6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW
+nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j
+wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz
+aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy
+KwbQBM0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Label: "TeliaSonera Root CA v1"
+# Serial: 199041966741090107964904287217786801558
+# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c
+# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37
+# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89
+-----BEGIN CERTIFICATE-----
+MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw
+NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv
+b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD
+VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F
+VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1
+7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X
+Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+
+/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs
+81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm
+dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe
+Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu
+sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4
+pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs
+slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ
+arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD
+VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG
+9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl
+dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx
+0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj
+TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed
+Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7
+Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI
+OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7
+vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW
+t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn
+HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx
+SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 2"
+# Serial: 1
+# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a
+# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9
+# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd
+AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC
+FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi
+1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq
+jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ
+wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/
+WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy
+NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC
+uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw
+IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6
+g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN
+9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP
+BSeOE6Fuwg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot 2011 O=Atos
+# Subject: CN=Atos TrustedRoot 2011 O=Atos
+# Label: "Atos TrustedRoot 2011"
+# Serial: 6643877497813316402
+# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56
+# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21
+# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE
+AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG
+EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM
+FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC
+REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp
+Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM
+VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+
+SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ
+4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L
+cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi
+eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG
+A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3
+DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j
+vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP
+DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc
+maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D
+lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv
+KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 1 G3"
+# Serial: 687049649626669250736271037606554624078720034195
+# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab
+# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67
+# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00
+MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV
+wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe
+rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341
+68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh
+4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp
+UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o
+abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc
+3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G
+KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt
+hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO
+Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt
+zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD
+ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC
+MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2
+cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN
+qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5
+YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv
+b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2
+8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k
+NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj
+ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp
+q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt
+nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2 G3"
+# Serial: 390156079458959257446133169266079962026824725800
+# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06
+# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36
+# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00
+MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf
+qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW
+n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym
+c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+
+O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1
+o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j
+IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq
+IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz
+8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh
+vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l
+7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG
+cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD
+ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66
+AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC
+roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga
+W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n
+lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE
++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV
+csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd
+dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg
+KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM
+HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4
+WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3 G3"
+# Serial: 268090761170461462463995952157327242137089239581
+# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7
+# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d
+# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00
+MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR
+/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu
+FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR
+U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c
+ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR
+FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k
+A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw
+eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl
+sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp
+VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q
+A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+
+ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD
+ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px
+KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI
+FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv
+oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg
+u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP
+0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf
+3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl
+8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+
+DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN
+PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/
+ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G2"
+# Serial: 15385348160840213938643033620894905419
+# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d
+# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f
+# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85
+-----BEGIN CERTIFICATE-----
+MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA
+n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc
+biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp
+EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA
+bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu
+YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB
+AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW
+BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI
+QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I
+0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni
+lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9
+B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv
+ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo
+IhNzbM8m9Yop5w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G3"
+# Serial: 15459312981008553731928384953135426796
+# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb
+# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89
+# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2
+-----BEGIN CERTIFICATE-----
+MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg
+RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq
+hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf
+Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q
+RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD
+AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY
+JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv
+6pZjamVFkpUBtA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G2"
+# Serial: 4293743540046975378534879503202253541
+# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44
+# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4
+# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f
+-----BEGIN CERTIFICATE-----
+MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
+MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
+2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
+1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
+q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
+tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
+vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
+5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
+1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
+NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
+Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
+8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
+pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
+MrY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G3"
+# Serial: 7089244469030293291760083333884364146
+# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca
+# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e
+# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0
+-----BEGIN CERTIFICATE-----
+MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe
+Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw
+EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x
+IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF
+K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG
+fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO
+Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd
+BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx
+AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/
+oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8
+sycX
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Trusted Root G4"
+# Serial: 7451500558977370777930084869016614236
+# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49
+# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4
+# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88
+-----BEGIN CERTIFICATE-----
+MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg
+RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y
+ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If
+xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV
+ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO
+DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ
+jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/
+CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi
+EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM
+fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY
+uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK
+chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t
+9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD
+ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2
+SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd
++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc
+fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa
+sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N
+cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N
+0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie
+4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI
+r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1
+/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm
+gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Label: "COMODO RSA Certification Authority"
+# Serial: 101909084537582093308941363524873193117
+# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18
+# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4
+# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34
+-----BEGIN CERTIFICATE-----
+MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB
+hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV
+BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT
+EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR
+Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR
+6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X
+pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC
+9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV
+/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf
+Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z
++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w
+qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah
+SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC
+u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf
+Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq
+crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E
+FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB
+/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl
+wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM
+4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV
+2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna
+FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ
+CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK
+boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke
+jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL
+S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb
+QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl
+0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB
+NVOFBkpdn627G190
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Label: "USERTrust RSA Certification Authority"
+# Serial: 2645093764781058787591871645665788717
+# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5
+# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e
+# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2
+-----BEGIN CERTIFICATE-----
+MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB
+iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl
+cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV
+BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw
+MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV
+BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU
+aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B
+3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY
+tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/
+Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2
+VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT
+79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6
+c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT
+Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l
+c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee
+UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE
+Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd
+BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G
+A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF
+Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO
+VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3
+ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs
+8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR
+iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze
+Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ
+XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/
+qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB
+VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB
+L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG
+jjxDah2nGN59PRbxYvnKkKj9
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Label: "USERTrust ECC Certification Authority"
+# Serial: 123013823720199481456569720443997572134
+# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1
+# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0
+# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a
+-----BEGIN CERTIFICATE-----
+MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL
+MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl
+eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT
+JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT
+Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg
+VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo
+I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng
+o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G
+A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB
+zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW
+RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Label: "GlobalSign ECC Root CA - R5"
+# Serial: 32785792099990507226680698011560947931244
+# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08
+# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa
+# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24
+-----BEGIN CERTIFICATE-----
+MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc
+8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke
+hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI
+KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg
+515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO
+xwy8p2Fp8fc74SrL+SvzZpA3
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Label: "IdenTrust Commercial Root CA 1"
+# Serial: 13298821034946342390520003877796839426
+# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7
+# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25
+# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu
+VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw
+MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw
+JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT
+3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU
++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp
+S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1
+bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi
+T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL
+vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK
+Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK
+dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT
+c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv
+l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N
+iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD
+ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH
+6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt
+LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93
+nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3
++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK
+W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT
+AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq
+l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG
+4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ
+mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A
+7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Label: "IdenTrust Public Sector Root CA 1"
+# Serial: 13298821034946342390521976156843933698
+# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba
+# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd
+# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f
+-----BEGIN CERTIFICATE-----
+MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu
+VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN
+MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0
+MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7
+ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy
+RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS
+bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF
+/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R
+3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw
+EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy
+9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V
+GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ
+2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV
+WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD
+W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN
+AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj
+t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV
+DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9
+TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G
+lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW
+mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df
+WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5
++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ
+tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA
+GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv
+8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - G2"
+# Serial: 1246989352
+# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2
+# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4
+# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39
+-----BEGIN CERTIFICATE-----
+MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50
+cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs
+IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz
+dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy
+NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu
+dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt
+dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0
+aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T
+RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN
+cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW
+wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1
+U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0
+jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP
+BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN
+BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/
+jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ
+Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v
+1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R
+nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH
+VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - EC1"
+# Serial: 51543124481930649114116133369
+# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc
+# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47
+# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5
+-----BEGIN CERTIFICATE-----
+MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG
+A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3
+d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu
+dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq
+RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy
+MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD
+VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0
+L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g
+Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD
+ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi
+A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt
+ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH
+Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
+BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC
+R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX
+hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G
+-----END CERTIFICATE-----
+
+# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Label: "CFCA EV ROOT"
+# Serial: 407555286
+# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30
+# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83
+# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd
+-----BEGIN CERTIFICATE-----
+MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD
+TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx
+MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j
+aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP
+T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03
+sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL
+TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5
+/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp
+7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz
+EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt
+hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP
+a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot
+aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg
+TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV
+PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv
+cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL
+tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd
+BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB
+ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT
+ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL
+jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS
+ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy
+P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19
+xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d
+Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN
+5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe
+/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z
+AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
+5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GB CA"
+# Serial: 157768595616588414422159278966750757568
+# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d
+# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed
+# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6
+-----BEGIN CERTIFICATE-----
+MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt
+MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg
+Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i
+YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x
+CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG
+b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh
+bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3
+HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx
+WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX
+1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk
+u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P
+99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r
+M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB
+BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh
+cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5
+gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO
+ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf
+aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic
+Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Label: "SZAFIR ROOT CA2"
+# Serial: 357043034767186914217277344587386743377558296292
+# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99
+# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de
+# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe
+-----BEGIN CERTIFICATE-----
+MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL
+BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6
+ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw
+NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L
+cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg
+Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN
+QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT
+3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw
+3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6
+3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5
+BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN
+XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF
+AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw
+8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG
+nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP
+oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy
+d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg
+LvWpCz/UXeHPhJ/iGcJfitYgHuNztw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA 2"
+# Serial: 44979900017204383099463764357512596969
+# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2
+# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92
+# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04
+-----BEGIN CERTIFICATE-----
+MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB
+gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu
+QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG
+A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz
+OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ
+VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3
+b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA
+DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn
+0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB
+OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE
+fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E
+Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m
+o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i
+sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW
+OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez
+Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS
+adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n
+3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC
+AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ
+F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf
+CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29
+XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm
+djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/
+WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb
+AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq
+P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko
+b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj
+XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P
+5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi
+DrW5viSP
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce
+# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6
+# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36
+-----BEGIN CERTIFICATE-----
+MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix
+DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k
+IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT
+N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v
+dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG
+A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh
+ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx
+QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA
+4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0
+AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10
+4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C
+ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV
+9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD
+gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6
+Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq
+NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko
+LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc
+Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd
+ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I
+XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI
+M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot
+9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V
+Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea
+j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh
+X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ
+l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf
+bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4
+pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK
+e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0
+vm9qp/UsQu0yrbYhnr68
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef
+# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66
+# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33
+-----BEGIN CERTIFICATE-----
+MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN
+BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl
+bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv
+b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ
+BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj
+YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5
+MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0
+dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg
+QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa
+jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC
+MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi
+C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep
+lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof
+TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR
+-----END CERTIFICATE-----
+
+# Issuer: CN=ISRG Root X1 O=Internet Security Research Group
+# Subject: CN=ISRG Root X1 O=Internet Security Research Group
+# Label: "ISRG Root X1"
+# Serial: 172886928669790476064670243504169061120
+# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e
+# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8
+# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6
+-----BEGIN CERTIFICATE-----
+MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
+TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
+cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
+WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
+ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
+h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
+0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
+A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
+T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
+B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
+B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
+KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
+OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
+jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
+qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
+rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
+hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
+ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
+3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
+NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
+ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
+TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
+jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
+oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
+4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
+mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
+emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
+-----END CERTIFICATE-----
+
+# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Label: "AC RAIZ FNMT-RCM"
+# Serial: 485876308206448804701554682760554759
+# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d
+# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20
+# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx
+CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ
+WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ
+BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG
+Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/
+yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf
+BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz
+WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF
+tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z
+374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC
+IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL
+mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7
+wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS
+MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2
+ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet
+UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H
+YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3
+LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD
+nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1
+RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM
+LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf
+77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N
+JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm
+fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp
+6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp
+1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B
+9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok
+RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv
+uu8wd+RU4riEmViAqhOLUTpPSPaLtrM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 1 O=Amazon
+# Subject: CN=Amazon Root CA 1 O=Amazon
+# Label: "Amazon Root CA 1"
+# Serial: 143266978916655856878034712317230054538369994
+# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6
+# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16
+# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e
+-----BEGIN CERTIFICATE-----
+MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj
+ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM
+9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw
+IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6
+VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L
+93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm
+jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA
+A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI
+U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs
+N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv
+o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU
+5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy
+rqXRfboQnoZsG4q5WTP468SQvvG5
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 2 O=Amazon
+# Subject: CN=Amazon Root CA 2 O=Amazon
+# Label: "Amazon Root CA 2"
+# Serial: 143266982885963551818349160658925006970653239
+# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66
+# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a
+# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK
+gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ
+W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg
+1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K
+8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r
+2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me
+z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR
+8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj
+mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz
+7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6
++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI
+0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm
+UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2
+LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY
++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS
+k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl
+7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm
+btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl
+urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+
+fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63
+n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE
+76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H
+9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT
+4PsJYGw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 3 O=Amazon
+# Subject: CN=Amazon Root CA 3 O=Amazon
+# Label: "Amazon Root CA 3"
+# Serial: 143266986699090766294700635381230934788665930
+# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87
+# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e
+# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4
+-----BEGIN CERTIFICATE-----
+MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl
+ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr
+ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr
+BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM
+YyRIHN8wfdVoOw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 4 O=Amazon
+# Subject: CN=Amazon Root CA 4 O=Amazon
+# Label: "Amazon Root CA 4"
+# Serial: 143266989758080763974105200630763877849284878
+# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd
+# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be
+# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92
+-----BEGIN CERTIFICATE-----
+MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi
+9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk
+M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB
+MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw
+CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW
+1KyLa2tJElMzrdfkviT8tQp21KW8EA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1"
+# Serial: 1
+# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49
+# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca
+# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16
+-----BEGIN CERTIFICATE-----
+MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx
+GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp
+bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w
+KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0
+BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy
+dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG
+EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll
+IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU
+QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT
+TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg
+LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7
+a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr
+LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr
+N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X
+YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/
+iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f
+AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH
+V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh
+AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf
+IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4
+lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c
+8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf
+lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Label: "GDCA TrustAUTH R5 ROOT"
+# Serial: 9009899650740120186
+# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4
+# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4
+# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93
+-----BEGIN CERTIFICATE-----
+MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE
+BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ
+IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0
+MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV
+BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w
+HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj
+Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj
+TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u
+KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj
+qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm
+MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12
+ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP
+zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk
+L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC
+jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA
+HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC
+AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB
+/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg
+p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm
+DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5
+COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry
+L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf
+JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg
+IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io
+2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV
+09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ
+XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq
+T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe
+MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Label: "SSL.com Root Certification Authority RSA"
+# Serial: 8875640296558310041
+# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29
+# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb
+# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69
+-----BEGIN CERTIFICATE-----
+MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE
+BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK
+DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz
+OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv
+bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R
+xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX
+qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC
+C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3
+6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh
+/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF
+YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E
+JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc
+US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8
+ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm
++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi
+M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G
+A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV
+cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc
+Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs
+PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/
+q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0
+cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr
+a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I
+H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y
+K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu
+nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf
+oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY
+Ic2wBlX7Jz9TkHCpBB5XJ7k=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com Root Certification Authority ECC"
+# Serial: 8495723813297216424
+# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e
+# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a
+# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65
+-----BEGIN CERTIFICATE-----
+MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0
+aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz
+WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0
+b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS
+b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI
+7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg
+CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud
+EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD
+VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T
+kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+
+gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority RSA R2"
+# Serial: 6248227494352943350
+# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95
+# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a
+# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c
+-----BEGIN CERTIFICATE-----
+MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV
+BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE
+CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy
+MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G
+A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD
+DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq
+M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf
+OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa
+4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9
+HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR
+aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA
+b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ
+Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV
+PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO
+pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu
+UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY
+MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV
+HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4
+9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW
+s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5
+Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg
+cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM
+79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz
+/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt
+ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm
+Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK
+QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ
+w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi
+S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07
+mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority ECC"
+# Serial: 3182246526754555285
+# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90
+# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d
+# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8
+-----BEGIN CERTIFICATE-----
+MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx
+NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv
+bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49
+AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA
+VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku
+WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP
+MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX
+5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ
+ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg
+h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Label: "GlobalSign Root CA - R6"
+# Serial: 1417766617973444989252670301619537
+# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae
+# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1
+# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg
+MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh
+bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx
+MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET
+MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ
+KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI
+xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k
+ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD
+aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw
+LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw
+1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX
+k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2
+SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h
+bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n
+WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY
+rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce
+MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu
+bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN
+nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt
+Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61
+55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj
+vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf
+cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz
+oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp
+nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs
+pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v
+JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R
+8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4
+5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GC CA"
+# Serial: 44084345621038548146064804565436152554
+# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23
+# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31
+# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d
+-----BEGIN CERTIFICATE-----
+MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw
+CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91
+bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg
+Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ
+BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu
+ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS
+b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni
+eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W
+p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T
+rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV
+57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg
+Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9
+-----END CERTIFICATE-----
+
+# Issuer: CN=UCA Global G2 Root O=UniTrust
+# Subject: CN=UCA Global G2 Root O=UniTrust
+# Label: "UCA Global G2 Root"
+# Serial: 124779693093741543919145257850076631279
+# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8
+# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a
+# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9
+MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH
+bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x
+CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds
+b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr
+b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9
+kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm
+VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R
+VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc
+C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj
+tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY
+D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv
+j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl
+NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6
+iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP
+O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/
+BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV
+ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj
+L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5
+1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl
+1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU
+b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV
+PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj
+y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb
+EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg
+DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI
++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy
+YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX
+UB+K+wb1whnw0A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=UCA Extended Validation Root O=UniTrust
+# Subject: CN=UCA Extended Validation Root O=UniTrust
+# Label: "UCA Extended Validation Root"
+# Serial: 106100277556486529736699587978573607008
+# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2
+# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a
+# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH
+MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF
+eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx
+MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV
+BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog
+D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS
+sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop
+O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk
+sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi
+c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj
+VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz
+KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/
+TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G
+sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs
+1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD
+fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T
+AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN
+l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR
+ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ
+VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5
+c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp
+4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s
+t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj
+2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO
+vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C
+xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx
+cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM
+fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
+# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
+# Label: "Certigna Root CA"
+# Serial: 269714418870597844693661054334862075617
+# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77
+# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43
+# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68
+-----BEGIN CERTIFICATE-----
+MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw
+WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw
+MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x
+MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD
+VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX
+BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw
+ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO
+ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M
+CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu
+I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm
+TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh
+C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf
+ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz
+IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT
+Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k
+JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5
+hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB
+GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of
+1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov
+L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo
+dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr
+aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq
+hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L
+6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG
+HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6
+0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB
+lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi
+o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1
+gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v
+faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63
+Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh
+jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw
+3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
+# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
+# Label: "emSign Root CA - G1"
+# Serial: 235931866688319308814040
+# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac
+# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c
+# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67
+-----BEGIN CERTIFICATE-----
+MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD
+VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU
+ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH
+MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO
+MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv
+Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz
+f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO
+8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq
+d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM
+tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt
+Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB
+o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD
+AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x
+PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM
+wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d
+GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH
+6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby
+RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx
+iN66zB+Afko=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
+# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
+# Label: "emSign ECC Root CA - G3"
+# Serial: 287880440101571086945156
+# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40
+# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1
+# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b
+-----BEGIN CERTIFICATE-----
+MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG
+EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo
+bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g
+RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ
+TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s
+b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw
+djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0
+WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS
+fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB
+zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq
+hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB
+CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD
++JbNR6iC8hZVdyR+EhCVBCyj
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
+# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
+# Label: "emSign Root CA - C1"
+# Serial: 825510296613316004955058
+# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68
+# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01
+# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f
+-----BEGIN CERTIFICATE-----
+MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG
+A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg
+SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw
+MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
+biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v
+dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ
+BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ
+HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH
+3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH
+GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c
+xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1
+aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq
+TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87
+/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4
+kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG
+YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT
++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo
+WXzhriKi4gp6D/piq1JM4fHfyr6DDUI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
+# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
+# Label: "emSign ECC Root CA - C3"
+# Serial: 582948710642506000014504
+# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5
+# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66
+# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3
+-----BEGIN CERTIFICATE-----
+MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG
+EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx
+IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw
+MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
+biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND
+IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci
+MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti
+sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O
+BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB
+Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c
+3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J
+0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post
+# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post
+# Label: "Hongkong Post Root CA 3"
+# Serial: 46170865288971385588281144162979347873371282084
+# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0
+# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02
+# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6
+-----BEGIN CERTIFICATE-----
+MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL
+BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ
+SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n
+a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5
+NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT
+CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u
+Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO
+dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI
+VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV
+9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY
+2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY
+vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt
+bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb
+x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+
+l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK
+TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj
+Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e
+i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw
+DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG
+7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk
+MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr
+gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk
+GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS
+3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm
+Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+
+l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c
+JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP
+L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa
+LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG
+mpv0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - G4"
+# Serial: 289383649854506086828220374796556676440
+# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88
+# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01
+# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88
+-----BEGIN CERTIFICATE-----
+MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw
+gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL
+Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg
+MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw
+BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0
+MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT
+MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1
+c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ
+bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg
+Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B
+AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ
+2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E
+T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j
+5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM
+C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T
+DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX
+wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A
+2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm
+nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8
+dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl
+N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj
+c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
+VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS
+5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS
+Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr
+hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/
+B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI
+AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw
+H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+
+b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk
+2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol
+IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk
+5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY
+n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation
+# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation
+# Label: "Microsoft ECC Root Certificate Authority 2017"
+# Serial: 136839042543790627607696632466672567020
+# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67
+# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5
+# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02
+-----BEGIN CERTIFICATE-----
+MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw
+CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD
+VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw
+MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV
+UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy
+b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq
+hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR
+ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb
+hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3
+FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV
+L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB
+iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation
+# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation
+# Label: "Microsoft RSA Root Certificate Authority 2017"
+# Serial: 40975477897264996090493496164228220339
+# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47
+# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74
+# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0
+-----BEGIN CERTIFICATE-----
+MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl
+MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw
+NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5
+IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG
+EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N
+aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ
+Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0
+ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1
+HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm
+gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ
+jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc
+aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG
+YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6
+W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K
+UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH
++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q
+W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC
+NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC
+LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC
+gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6
+tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh
+SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2
+TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3
+pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR
+xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp
+GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9
+dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN
+AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB
+RA+GsCyRxj3qrg+E
+-----END CERTIFICATE-----
+
+# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd.
+# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd.
+# Label: "e-Szigno Root CA 2017"
+# Serial: 411379200276854331539784714
+# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98
+# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1
+# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99
+-----BEGIN CERTIFICATE-----
+MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV
+BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk
+LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv
+b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ
+BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg
+THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v
+IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv
+xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H
+Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
+A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB
+eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo
+jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ
++efcMQ==
+-----END CERTIFICATE-----
+
+# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2
+# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2
+# Label: "certSIGN Root CA G2"
+# Serial: 313609486401300475190
+# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7
+# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32
+# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05
+-----BEGIN CERTIFICATE-----
+MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV
+BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g
+Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ
+BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ
+R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF
+dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw
+vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ
+uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp
+n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs
+cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW
+xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P
+rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF
+DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx
+DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy
+LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C
+eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB
+/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ
+d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq
+kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC
+b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl
+qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0
+OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c
+NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk
+ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO
+pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj
+03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk
+PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE
+1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX
+QRBdJ3NghVdJIgc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc.
+# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc.
+# Label: "Trustwave Global Certification Authority"
+# Serial: 1846098327275375458322922162
+# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e
+# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5
+# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8
+-----BEGIN CERTIFICATE-----
+MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw
+CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x
+ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1
+c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx
+OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI
+SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI
+b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
+ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn
+swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu
+7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8
+1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW
+80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP
+JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l
+RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw
+hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10
+coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc
+BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n
+twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud
+EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud
+DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W
+0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe
+uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q
+lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB
+aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE
+sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT
+MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe
+qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh
+VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8
+h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9
+EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK
+yeC2nOnOcXHebD8WpHk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc.
+# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc.
+# Label: "Trustwave Global ECC P256 Certification Authority"
+# Serial: 4151900041497450638097112925
+# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54
+# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf
+# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4
+-----BEGIN CERTIFICATE-----
+MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD
+VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf
+BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3
+YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x
+NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G
+A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0
+d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF
+Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG
+SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN
+FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w
+DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw
+CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh
+DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7
+-----END CERTIFICATE-----
+
+# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc.
+# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc.
+# Label: "Trustwave Global ECC P384 Certification Authority"
+# Serial: 2704997926503831671788816187
+# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6
+# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2
+# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97
+-----BEGIN CERTIFICATE-----
+MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD
+VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf
+BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3
+YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x
+NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G
+A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0
+d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF
+Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ
+j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF
+1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G
+A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3
+AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC
+MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu
+Sw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp.
+# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp.
+# Label: "NAVER Global Root Certification Authority"
+# Serial: 9013692873798656336226253319739695165984492813
+# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b
+# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1
+# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65
+-----BEGIN CERTIFICATE-----
+MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM
+BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG
+T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0
+aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx
+CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD
+b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA
+iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH
+38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE
+HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz
+kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP
+szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq
+vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf
+nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG
+YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo
+0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a
+CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K
+AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I
+36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB
+Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN
+qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj
+cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm
++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL
+hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe
+lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7
+p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8
+piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR
+LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX
+5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO
+dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul
+9XXeifdy
+-----END CERTIFICATE-----
+
+# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres
+# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres
+# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS"
+# Serial: 131542671362353147877283741781055151509
+# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb
+# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a
+# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb
+-----BEGIN CERTIFICATE-----
+MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw
+CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw
+FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S
+Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5
+MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL
+DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS
+QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH
+sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK
+Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
+VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu
+SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC
+MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy
+v+c=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa
+# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa
+# Label: "GlobalSign Root R46"
+# Serial: 1552617688466950547958867513931858518042577
+# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef
+# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90
+# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA
+MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD
+VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy
+MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt
+c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ
+OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG
+vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud
+316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo
+0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE
+y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF
+zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE
++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN
+I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs
+x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa
+ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC
+4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4
+7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg
+JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti
+2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk
+pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF
+FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt
+rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk
+ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5
+u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP
+4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6
+N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3
+vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa
+# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa
+# Label: "GlobalSign Root E46"
+# Serial: 1552617690338932563915843282459653771421763
+# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f
+# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84
+# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58
+-----BEGIN CERTIFICATE-----
+MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx
+CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD
+ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw
+MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex
+HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA
+IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq
+R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd
+yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ
+7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8
++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH
+# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH
+# Label: "GLOBALTRUST 2020"
+# Serial: 109160994242082918454945253
+# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8
+# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2
+# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a
+-----BEGIN CERTIFICATE-----
+MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG
+A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw
+FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx
+MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u
+aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq
+hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b
+RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z
+YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3
+QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw
+yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+
+BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ
+SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH
+r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0
+4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me
+dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw
+q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2
+nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu
+H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA
+VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC
+XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd
+6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf
++I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi
+kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7
+wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB
+TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C
+MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn
+4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I
+aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy
+qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
+# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
+# Label: "ANF Secure Server Root CA"
+# Serial: 996390341000653745
+# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96
+# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74
+# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99
+-----BEGIN CERTIFICATE-----
+MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV
+BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk
+YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV
+BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN
+MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF
+UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD
+VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v
+dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj
+cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q
+yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH
+2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX
+H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL
+zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR
+p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz
+W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/
+SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn
+LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3
+n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B
+u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj
+o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC
+AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L
+9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej
+rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK
+pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0
+vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq
+OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ
+/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9
+2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI
++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2
+MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo
+tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority
+# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority
+# Label: "Certum EC-384 CA"
+# Serial: 160250656287871593594747141429395092468
+# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1
+# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed
+# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6
+-----BEGIN CERTIFICATE-----
+MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw
+CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw
+JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT
+EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0
+WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT
+LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX
+BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE
+KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm
+Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8
+EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J
+UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn
+nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Root CA"
+# Serial: 40870380103424195783807378461123655149
+# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29
+# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5
+# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd
+-----BEGIN CERTIFICATE-----
+MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6
+MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu
+MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV
+BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw
+MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg
+U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo
+b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ
+n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q
+p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq
+NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF
+8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3
+HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa
+mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi
+7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF
+ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P
+qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ
+v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6
+Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1
+vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD
+ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4
+WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo
+zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR
+5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ
+GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf
+5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq
+0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D
+P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM
+qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP
+0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf
+E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb
+-----END CERTIFICATE-----
+
+# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique
+# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique
+# Label: "TunTrust Root CA"
+# Serial: 108534058042236574382096126452369648152337120275
+# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4
+# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb
+# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41
+-----BEGIN CERTIFICATE-----
+MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL
+BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg
+Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv
+b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG
+EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u
+IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ
+KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ
+n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd
+2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF
+VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ
+GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF
+li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU
+r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2
+eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb
+MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg
+jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB
+7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW
+5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE
+ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0
+90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z
+xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu
+QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4
+FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH
+22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP
+xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn
+dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5
+Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b
+nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ
+CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH
+u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj
+d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o=
+-----END CERTIFICATE-----
+
+# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA
+# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA
+# Label: "HARICA TLS RSA Root CA 2021"
+# Serial: 76817823531813593706434026085292783742
+# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91
+# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d
+# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d
+-----BEGIN CERTIFICATE-----
+MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs
+MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg
+Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL
+MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl
+YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv
+b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l
+mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE
+4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv
+a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M
+pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw
+Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b
+LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY
+AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB
+AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq
+E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr
+W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ
+CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE
+AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU
+X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3
+f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja
+H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP
+JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P
+zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt
+jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0
+/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT
+BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79
+aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW
+xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU
+63ZTGI0RmLo=
+-----END CERTIFICATE-----
+
+# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA
+# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA
+# Label: "HARICA TLS ECC Root CA 2021"
+# Serial: 137515985548005187474074462014555733966
+# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0
+# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48
+# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01
+-----BEGIN CERTIFICATE-----
+MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw
+CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh
+cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v
+dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG
+A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj
+aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg
+Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7
+KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y
+STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD
+AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw
+SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN
+nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps
+-----END CERTIFICATE-----
+
+# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
+# Serial: 1977337328857672817
+# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3
+# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe
+# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a
+-----BEGIN CERTIFICATE-----
+MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE
+BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
+cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1
+MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
+Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
+thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
+cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
+L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
+NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
+X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
+m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
+Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
+EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
+KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
+6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
+OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc
+tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd
+IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j
+b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC
+AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw
+ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m
+iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF
+Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ
+hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P
+Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE
+EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV
+1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t
+CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR
+5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw
+f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9
+ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK
+GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV
+-----END CERTIFICATE-----
+
+# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd.
+# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd.
+# Label: "vTrus ECC Root CA"
+# Serial: 630369271402956006249506845124680065938238527194
+# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85
+# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1
+# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3
+-----BEGIN CERTIFICATE-----
+MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw
+RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY
+BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz
+MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u
+LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF
+K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0
+v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd
+e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD
+VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw
+V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA
+AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG
+GJTO
+-----END CERTIFICATE-----
+
+# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd.
+# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd.
+# Label: "vTrus Root CA"
+# Serial: 387574501246983434957692974888460947164905180485
+# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc
+# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7
+# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87
+-----BEGIN CERTIFICATE-----
+MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL
+BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x
+FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx
+MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s
+THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD
+ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc
+IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU
+AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+
+GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9
+8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH
+flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt
+J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim
+0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN
+pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ
+UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW
+OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB
+AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet
+8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd
+nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j
+bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM
+Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv
+TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS
+S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr
+I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9
+b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB
+UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P
+Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven
+sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=ISRG Root X2 O=Internet Security Research Group
+# Subject: CN=ISRG Root X2 O=Internet Security Research Group
+# Label: "ISRG Root X2"
+# Serial: 87493402998870891108772069816698636114
+# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5
+# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af
+# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70
+-----BEGIN CERTIFICATE-----
+MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw
+CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg
+R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00
+MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT
+ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw
+EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW
++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9
+ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI
+zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW
+tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1
+/q4AaOeMSQ+2b1tbFfLn
+-----END CERTIFICATE-----
+
+# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd.
+# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd.
+# Label: "HiPKI Root CA - G1"
+# Serial: 60966262342023497858655262305426234976
+# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3
+# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60
+# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc
+-----BEGIN CERTIFICATE-----
+MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP
+MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
+ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa
+Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3
+YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw
+qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv
+Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6
+lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz
+Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ
+KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK
+FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj
+HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr
+y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ
+/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM
+a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6
+fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG
+SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi
+7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc
+SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza
+ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc
+XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg
+iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho
+L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF
+Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr
+kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+
+vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU
+YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Label: "GlobalSign ECC Root CA - R4"
+# Serial: 159662223612894884239637590694
+# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc
+# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28
+# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2
+-----BEGIN CERTIFICATE-----
+MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD
+VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh
+bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw
+MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g
+UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT
+BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx
+uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV
+HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/
++wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147
+bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R1 O=Google Trust Services LLC
+# Subject: CN=GTS Root R1 O=Google Trust Services LLC
+# Label: "GTS Root R1"
+# Serial: 159662320309726417404178440727
+# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40
+# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a
+# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf
+-----BEGIN CERTIFICATE-----
+MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw
+CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
+MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
+MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
+Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo
+27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w
+Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw
+TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl
+qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH
+szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8
+Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk
+MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92
+wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p
+aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN
+VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID
+AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E
+FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb
+C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe
+QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy
+h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4
+7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J
+ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef
+MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/
+Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT
+6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ
+0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm
+2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb
+bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R2 O=Google Trust Services LLC
+# Subject: CN=GTS Root R2 O=Google Trust Services LLC
+# Label: "GTS Root R2"
+# Serial: 159662449406622349769042896298
+# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc
+# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94
+# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8
+-----BEGIN CERTIFICATE-----
+MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw
+CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
+MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
+MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
+Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt
+nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY
+6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu
+MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k
+RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg
+f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV
++3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo
+dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW
+Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa
+G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq
+gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID
+AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E
+FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H
+vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8
+0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC
+B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u
+NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg
+yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev
+HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6
+xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR
+TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg
+JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV
+7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl
+6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R3 O=Google Trust Services LLC
+# Subject: CN=GTS Root R3 O=Google Trust Services LLC
+# Label: "GTS Root R3"
+# Serial: 159662495401136852707857743206
+# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73
+# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46
+# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48
+-----BEGIN CERTIFICATE-----
+MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD
+VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG
+A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw
+WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz
+IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi
+AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G
+jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2
+4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7
+VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm
+ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R4 O=Google Trust Services LLC
+# Subject: CN=GTS Root R4 O=Google Trust Services LLC
+# Label: "GTS Root R4"
+# Serial: 159662532700760215368942768210
+# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8
+# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47
+# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d
+-----BEGIN CERTIFICATE-----
+MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD
+VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG
+A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw
+WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz
+IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi
+AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi
+QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR
+HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D
+9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8
+p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD
+-----END CERTIFICATE-----
+
+# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj
+# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj
+# Label: "Telia Root CA v2"
+# Serial: 7288924052977061235122729490515358
+# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48
+# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd
+# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c
+-----BEGIN CERTIFICATE-----
+MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx
+CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE
+AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1
+NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ
+MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP
+ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq
+AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9
+vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9
+lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD
+n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT
+7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o
+6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC
+TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6
+WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R
+DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI
+pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj
+YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy
+rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw
+AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ
+8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi
+0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM
+A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS
+SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K
+TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF
+6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er
+3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt
+Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT
+VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW
+ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA
+rBPuUBQemMc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH
+# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH
+# Label: "D-TRUST BR Root CA 1 2020"
+# Serial: 165870826978392376648679885835942448534
+# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed
+# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67
+# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44
+-----BEGIN CERTIFICATE-----
+MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw
+CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS
+VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5
+NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG
+A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS
+zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0
+QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/
+VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g
+PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf
+Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l
+dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1
+c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO
+PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW
+wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV
+dWNbFJWcHwHP2NVypw87
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH
+# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH
+# Label: "D-TRUST EV Root CA 1 2020"
+# Serial: 126288379621884218666039612629459926992
+# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e
+# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07
+# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db
+-----BEGIN CERTIFICATE-----
+MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw
+CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS
+VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5
+NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG
+A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC
+/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD
+wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3
+OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g
+PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf
+Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l
+dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1
+c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO
+PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA
+y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb
+gfM0agPnIjhQW+0ZT0MW
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc.
+# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc.
+# Label: "DigiCert TLS ECC P384 Root G5"
+# Serial: 13129116028163249804115411775095713523
+# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed
+# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee
+# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05
+-----BEGIN CERTIFICATE-----
+MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp
+Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2
+MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ
+bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG
+ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS
+7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp
+0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS
+B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49
+BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ
+LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4
+DXZDjC5Ty3zfDBeWUA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc.
+# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc.
+# Label: "DigiCert TLS RSA4096 Root G5"
+# Serial: 11930366277458970227240571539258396554
+# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1
+# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35
+# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75
+-----BEGIN CERTIFICATE-----
+MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN
+MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT
+HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN
+NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs
+IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+
+ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0
+2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp
+wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM
+pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD
+nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po
+sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx
+Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd
+Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX
+KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe
+XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL
+tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv
+TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN
+AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw
+GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H
+PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF
+O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ
+REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik
+AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv
+/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+
+p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw
+MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF
+qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK
+ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certainly Root R1 O=Certainly
+# Subject: CN=Certainly Root R1 O=Certainly
+# Label: "Certainly Root R1"
+# Serial: 188833316161142517227353805653483829216
+# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12
+# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af
+# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0
+-----BEGIN CERTIFICATE-----
+MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw
+PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy
+dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0
+YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2
+1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT
+vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed
+aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0
+1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5
+r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5
+cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ
+wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ
+6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA
+2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH
+Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR
+eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB
+/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u
+d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr
+PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d
+8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi
+1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd
+rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di
+taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7
+lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj
+yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn
+Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy
+yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n
+wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6
+OV+KmalBWQewLK8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certainly Root E1 O=Certainly
+# Subject: CN=Certainly Root E1 O=Certainly
+# Label: "Certainly Root E1"
+# Serial: 8168531406727139161245376702891150584
+# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9
+# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b
+# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2
+-----BEGIN CERTIFICATE-----
+MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw
+CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu
+bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ
+BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s
+eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK
++IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2
+QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4
+hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm
+ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG
+BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR
+-----END CERTIFICATE-----
+
+# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD.
+# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD.
+# Label: "Security Communication RootCA3"
+# Serial: 16247922307909811815
+# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26
+# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a
+# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94
+-----BEGIN CERTIFICATE-----
+MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV
+BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw
+JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2
+MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc
+U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg
+Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
+CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r
+CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA
+lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG
+TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7
+9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7
+8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4
+g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we
+GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst
++3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M
+0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ
+T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw
+HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP
+BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS
+YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA
+FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd
+9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI
+UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+
+OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke
+gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf
+iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV
+nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD
+2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI//
+1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad
+TdJ0MN1kURXbg4NR16/9M51NZg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD.
+# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD.
+# Label: "Security Communication ECC RootCA1"
+# Serial: 15446673492073852651
+# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86
+# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41
+# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11
+-----BEGIN CERTIFICATE-----
+MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT
+AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD
+VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx
+NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT
+HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5
+IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi
+AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl
+dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK
+ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu
+9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O
+be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k=
+-----END CERTIFICATE-----
+
+# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY
+# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY
+# Label: "BJCA Global Root CA1"
+# Serial: 113562791157148395269083148143378328608
+# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90
+# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a
+# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae
+-----BEGIN CERTIFICATE-----
+MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU
+MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI
+T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz
+MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF
+SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh
+bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z
+xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ
+spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5
+58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR
+at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll
+5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq
+nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK
+V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/
+pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO
+z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn
+jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+
+WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF
+7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE
+AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4
+YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli
+awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u
++2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88
+X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN
+SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo
+P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI
++pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz
+znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9
+eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2
+YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy
+r/6zcCwupvI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY
+# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY
+# Label: "BJCA Global Root CA2"
+# Serial: 58605626836079930195615843123109055211
+# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c
+# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6
+# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82
+-----BEGIN CERTIFICATE-----
+MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw
+CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ
+VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy
+MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ
+TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS
+b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B
+IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+
++kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK
+sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA
+94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B
+43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited
+# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited
+# Label: "Sectigo Public Server Authentication Root E46"
+# Serial: 88989738453351742415770396670917916916
+# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01
+# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a
+# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83
+-----BEGIN CERTIFICATE-----
+MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw
+CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T
+ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN
+MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG
+A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT
+ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA
+IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC
+WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+
+6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B
+Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa
+qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q
+4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited
+# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited
+# Label: "Sectigo Public Server Authentication Root R46"
+# Serial: 156256931880233212765902055439220583700
+# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5
+# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38
+# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06
+-----BEGIN CERTIFICATE-----
+MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf
+MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD
+Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw
+HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY
+MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp
+YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa
+ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz
+SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf
+iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X
+ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3
+IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS
+VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE
+SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu
++Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt
+8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L
+HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt
+zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P
+AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c
+mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ
+YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52
+gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA
+Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB
+JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX
+DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui
+TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5
+dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65
+LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp
+0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY
+QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation
+# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation
+# Label: "SSL.com TLS RSA Root CA 2022"
+# Serial: 148535279242832292258835760425842727825
+# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da
+# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca
+# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed
+-----BEGIN CERTIFICATE-----
+MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO
+MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD
+DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX
+DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw
+b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC
+AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP
+L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY
+t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins
+S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3
+PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO
+L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3
+R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w
+dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS
++YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS
+d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG
+AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f
+gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j
+BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z
+NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt
+hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM
+QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf
+R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ
+DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW
+P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy
+lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq
+bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w
+AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q
+r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji
+Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU
+98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation
+# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation
+# Label: "SSL.com TLS ECC Root CA 2022"
+# Serial: 26605119622390491762507526719404364228
+# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5
+# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39
+# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43
+-----BEGIN CERTIFICATE-----
+MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw
+CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT
+U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2
+MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh
+dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG
+ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm
+acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN
+SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME
+GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW
+uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp
+15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN
+b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos
+# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos
+# Label: "Atos TrustedRoot Root CA ECC TLS 2021"
+# Serial: 81873346711060652204712539181482831616
+# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8
+# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd
+# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8
+-----BEGIN CERTIFICATE-----
+MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w
+LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w
+CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0
+MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF
+Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI
+zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X
+tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4
+AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2
+KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD
+aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu
+CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo
+9H1/IISpQuQo
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos
+# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos
+# Label: "Atos TrustedRoot Root CA RSA TLS 2021"
+# Serial: 111436099570196163832749341232207667876
+# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2
+# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48
+# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f
+-----BEGIN CERTIFICATE-----
+MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM
+MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx
+MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00
+MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD
+QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN
+BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z
+4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv
+Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ
+kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs
+GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln
+nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh
+3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD
+0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy
+geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8
+ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB
+c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI
+pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
+dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB
+DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS
+4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs
+o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ
+qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw
+xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM
+rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4
+AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR
+0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY
+o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5
+dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE
+oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ==
+-----END CERTIFICATE-----
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/certifi/core.py b/.venv/lib/python3.12/site-packages/pip/_vendor/certifi/core.py
new file mode 100644
index 0000000..5c67600
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/certifi/core.py
@@ -0,0 +1,119 @@
+"""
+certifi.py
+~~~~~~~~~~
+
+This module returns the installation location of cacert.pem or its contents.
+"""
+import sys
+
+DEBIAN_CA_CERTS_PATH = '/etc/ssl/certs/ca-certificates.crt'
+
+if sys.version_info >= (3, 11):
+
+    from importlib.resources import as_file, files
+
+    _CACERT_CTX = None
+    _CACERT_PATH = None
+
+    def where() -> str:
+        # This is slightly terrible, but we want to delay extracting the file
+        # in cases where we're inside of a zipimport situation until someone
+        # actually calls where(), but we don't want to re-extract the file
+        # on every call of where(), so we'll do it once then store it in a
+        # global variable.
+        global _CACERT_CTX
+        global _CACERT_PATH
+        if _CACERT_PATH is None:
+            # This is slightly janky, the importlib.resources API wants you to
+            # manage the cleanup of this file, so it doesn't actually return a
+            # path, it returns a context manager that will give you the path
+            # when you enter it and will do any cleanup when you leave it. In
+            # the common case of not needing a temporary file, it will just
+            # return the file system location and the __exit__() is a no-op.
+            #
+            # We also have to hold onto the actual context manager, because
+            # it will do the cleanup whenever it gets garbage collected, so
+            # we will also store that at the global level as well.
+            _CACERT_CTX = as_file(files("pip._vendor.certifi").joinpath("cacert.pem"))
+            _CACERT_PATH = str(_CACERT_CTX.__enter__())
+
+        return _CACERT_PATH
+
+    def contents() -> str:
+        return files("pip._vendor.certifi").joinpath("cacert.pem").read_text(encoding="ascii")
+
+elif sys.version_info >= (3, 7):
+
+    from importlib.resources import path as get_path, read_text
+
+    _CACERT_CTX = None
+    _CACERT_PATH = None
+
+    def where() -> str:
+        # This is slightly terrible, but we want to delay extracting the
+        # file in cases where we're inside of a zipimport situation until
+        # someone actually calls where(), but we don't want to re-extract
+        # the file on every call of where(), so we'll do it once then store
+        # it in a global variable.
+        global _CACERT_CTX
+        global _CACERT_PATH
+        if _CACERT_PATH is None:
+            # This is slightly janky, the importlib.resources API wants you
+            # to manage the cleanup of this file, so it doesn't actually
+            # return a path, it returns a context manager that will give
+            # you the path when you enter it and will do any cleanup when
+            # you leave it. In the common case of not needing a temporary
+            # file, it will just return the file system location and the
+            # __exit__() is a no-op.
+            #
+            # We also have to hold onto the actual context manager, because
+            # it will do the cleanup whenever it gets garbage collected, so
+            # we will also store that at the global level as well.
+            _CACERT_CTX = get_path("pip._vendor.certifi", "cacert.pem")
+            _CACERT_PATH = str(_CACERT_CTX.__enter__())
+
+        return _CACERT_PATH
+
+    def contents() -> str:
+        return read_text("pip._vendor.certifi", "cacert.pem", encoding="ascii")
+
+else:
+    import os
+    import types
+    from typing import Union
+
+    Package = Union[types.ModuleType, str]
+    Resource = Union[str, "os.PathLike"]
+
+    # This fallback will work for Python versions prior to 3.7 that lack the
+    # importlib.resources module but relies on the existing `where` function
+    # so won't address issues with environments like PyOxidizer that don't set
+    # __file__ on modules.
+    def read_text(
+        package: Package,
+        resource: Resource,
+        encoding: str = 'utf-8',
+        errors: str = 'strict'
+    ) -> str:
+        with open(where(), encoding=encoding) as data:
+            return data.read()
+
+    # If we don't have importlib.resources, then we will just do the old logic
+    # of assuming we're on the filesystem and munge the path directly.
+    def where() -> str:
+        f = os.path.dirname(__file__)
+
+        return os.path.join(f, "cacert.pem")
+
+    def contents() -> str:
+        return read_text("pip._vendor.certifi", "cacert.pem", encoding="ascii")
+
+
+# Debian: Use system CA certs:
+def where() -> str:
+    return DEBIAN_CA_CERTS_PATH
+
+
+def contents() -> str:
+    with open(where(), "r", encoding="ascii") as data:
+        return data.read()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/__init__.py
new file mode 100644
index 0000000..fe58162
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/__init__.py
@@ -0,0 +1,115 @@
+######################## BEGIN LICENSE BLOCK ########################
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from typing import List, Union
+
+from .charsetgroupprober import CharSetGroupProber
+from .charsetprober import CharSetProber
+from .enums import InputState
+from .resultdict import ResultDict
+from .universaldetector import UniversalDetector
+from .version import VERSION, __version__
+
+__all__ = ["UniversalDetector", "detect", "detect_all", "__version__", "VERSION"]
+
+
+def detect(
+    byte_str: Union[bytes, bytearray], should_rename_legacy: bool = False
+) -> ResultDict:
+    """
+    Detect the encoding of the given byte string.
+
+    :param byte_str:     The byte sequence to examine.
+    :type byte_str:      ``bytes`` or ``bytearray``
+    :param should_rename_legacy:  Should we rename legacy encodings
+                                  to their more modern equivalents?
+    :type should_rename_legacy:   ``bool``
+    """
+    if not isinstance(byte_str, bytearray):
+        if not isinstance(byte_str, bytes):
+            raise TypeError(
+                f"Expected object of type bytes or bytearray, got: {type(byte_str)}"
+            )
+        byte_str = bytearray(byte_str)
+    detector = UniversalDetector(should_rename_legacy=should_rename_legacy)
+    detector.feed(byte_str)
+    return detector.close()
+
+
+def detect_all(
+    byte_str: Union[bytes, bytearray],
+    ignore_threshold: bool = False,
+    should_rename_legacy: bool = False,
+) -> List[ResultDict]:
+    """
+    Detect all the possible encodings of the given byte string.
+
+    :param byte_str:          The byte sequence to examine.
+    :type byte_str:           ``bytes`` or ``bytearray``
+    :param ignore_threshold:  Include encodings that are below
+                              ``UniversalDetector.MINIMUM_THRESHOLD``
+                              in results.
+    :type ignore_threshold:   ``bool``
+    :param should_rename_legacy:  Should we rename legacy encodings
+                                  to their more modern equivalents?
+    :type should_rename_legacy:   ``bool``
+    """
+    if not isinstance(byte_str, bytearray):
+        if not isinstance(byte_str, bytes):
+            raise TypeError(
+                f"Expected object of type bytes or bytearray, got: {type(byte_str)}"
+            )
+        byte_str = bytearray(byte_str)
+
+    detector = UniversalDetector(should_rename_legacy=should_rename_legacy)
+    detector.feed(byte_str)
+    detector.close()
+
+    if detector.input_state == InputState.HIGH_BYTE:
+        results: List[ResultDict] = []
+        probers: List[CharSetProber] = []
+        for prober in detector.charset_probers:
+            if isinstance(prober, CharSetGroupProber):
+                probers.extend(p for p in prober.probers)
+            else:
+                probers.append(prober)
+        for prober in probers:
+            if ignore_threshold or prober.get_confidence() > detector.MINIMUM_THRESHOLD:
+                charset_name = prober.charset_name or ""
+                lower_charset_name = charset_name.lower()
+                # Use Windows encoding name instead of ISO-8859 if we saw any
+                # extra Windows-specific bytes
+                if lower_charset_name.startswith("iso-8859") and detector.has_win_bytes:
+                    charset_name = detector.ISO_WIN_MAP.get(
+                        lower_charset_name, charset_name
+                    )
+                # Rename legacy encodings with superset encodings if asked
+                if should_rename_legacy:
+                    charset_name = detector.LEGACY_MAP.get(
+                        charset_name.lower(), charset_name
+                    )
+                results.append(
+                    {
+                        "encoding": charset_name,
+                        "confidence": prober.get_confidence(),
+                        "language": prober.language,
+                    }
+                )
+        if len(results) > 0:
+            return sorted(results, key=lambda result: -result["confidence"])
+
+    return [detector.result]
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/big5freq.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/big5freq.py
new file mode 100644
index 0000000..87d9f97
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/big5freq.py
@@ -0,0 +1,386 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+# Big5 frequency table
+# by Taiwan's Mandarin Promotion Council
+# 
+#
+# 128  --> 0.42261
+# 256  --> 0.57851
+# 512  --> 0.74851
+# 1024 --> 0.89384
+# 2048 --> 0.97583
+#
+# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98
+# Random Distribution Ration = 512/(5401-512)=0.105
+#
+# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
+
+BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75
+
+# Char to FreqOrder table
+BIG5_TABLE_SIZE = 5376
+# fmt: off
+BIG5_CHAR_TO_FREQ_ORDER = (
+   1,1801,1506, 255,1431, 198,   9,  82,   6,5008, 177, 202,3681,1256,2821, 110, #   16
+3814,  33,3274, 261,  76,  44,2114,  16,2946,2187,1176, 659,3971,  26,3451,2653, #   32
+1198,3972,3350,4202, 410,2215, 302, 590, 361,1964,   8, 204,  58,4510,5009,1932, #   48
+  63,5010,5011, 317,1614,  75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, #   64
+3682,   3,  10,3973,1471,  29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, #   80
+4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947,  34,3556,3204,  64, 604, #   96
+5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337,  72, 406,5017,  80, #  112
+ 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449,  69,2987, 591, #  128
+ 179,2096, 471, 115,2035,1844,  60,  50,2988, 134, 806,1869, 734,2036,3454, 180, #  144
+ 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, #  160
+2502,  90,2716,1338, 663,  11, 906,1099,2553,  20,2441, 182, 532,1716,5019, 732, #  176
+1376,4204,1311,1420,3206,  25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, #  192
+3276, 475,1447,3683,5020, 117,  21, 656, 810,1297,2300,2334,3557,5021, 126,4205, #  208
+ 706, 456, 150, 613,4513,  71,1118,2037,4206, 145,3092,  85, 835, 486,2115,1246, #  224
+1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, #  240
+3558,3135,5023,1956,1153,4207,  83, 296,1199,3093, 192, 624,  93,5024, 822,1898, #  256
+2823,3136, 795,2065, 991,1554,1542,1592,  27,  43,2867, 859, 139,1456, 860,4514, #  272
+ 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, #  288
+3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, #  304
+1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, #  320
+5026,5027,2176,3207,3685,2682, 593, 845,1062,3277,  88,1723,2038,3978,1951, 212, #  336
+ 266, 152, 149, 468,1899,4208,4516,  77, 187,5028,3038,  37,   5,2990,5029,3979, #  352
+5030,5031,  39,2524,4517,2908,3208,2079,  55, 148,  74,4518, 545, 483,1474,1029, #  368
+1665, 217,1870,1531,3138,1104,2655,4209,  24, 172,3562, 900,3980,3563,3564,4519, #  384
+  32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683,   4,3039,3351,1427,1789, #  400
+ 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, #  416
+3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439,  38,5037,1063,5038, 794, #  432
+3982,1435,2301,  46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804,  35, 707, #  448
+ 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, #  464
+2129,1363,3689,1423, 697, 100,3094,  48,  70,1231, 495,3139,2196,5043,1294,5044, #  480
+2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, #  496
+ 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, #  512
+ 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, #  528
+3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, #  544
+1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, #  560
+1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, #  576
+1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381,   7, #  592
+2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, #  608
+ 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, #  624
+4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, #  640
+1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, #  656
+5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, #  672
+2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, #  688
+ 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, #  704
+  98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, #  720
+ 523,2789,2790,2658,5061, 141,2235,1333,  68, 176, 441, 876, 907,4220, 603,2602, #  736
+ 710, 171,3464, 404, 549,  18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, #  752
+5063,2991, 368,5064, 146, 366,  99, 871,3693,1543, 748, 807,1586,1185,  22,2263, #  768
+ 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, #  784
+1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068,  59,5069, #  800
+ 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, #  816
+ 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, #  832
+5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, #  848
+1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, #  864
+ 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, #  880
+3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, #  896
+4224,  57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, #  912
+3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, #  928
+ 279,3145,  51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, #  944
+ 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, #  960
+1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, #  976
+4227,2475,1436, 953,4228,2055,4545, 671,2400,  79,4229,2446,3285, 608, 567,2689, #  992
+3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008
+3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024
+2402,5097,5098,5099,4232,3045,   0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040
+5101, 233,4233,3697,1819,4550,4551,5102,  96,1777,1315,2083,5103, 257,5104,1810, # 1056
+3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072
+5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088
+1484,5110,1712, 127,  67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104
+2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120
+1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136
+  78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152
+1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168
+4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184
+3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200
+ 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216
+ 165, 243,4559,3703,2528, 123, 683,4239, 764,4560,  36,3998,1793, 589,2916, 816, # 1232
+ 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248
+2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264
+5122, 611,1156, 854,2386,1316,2875,   2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280
+1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296
+2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312
+1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328
+1994,5135,4564,5136,5137,2198,  13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344
+5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360
+5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376
+5149, 128,2133,  92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392
+3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408
+4567,2252,  94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424
+4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440
+2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456
+5163,2337,2068,  23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472
+3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488
+ 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504
+5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863,  41, # 1520
+5170,5171,4575,5172,1657,2338,  19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536
+1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552
+2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568
+3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584
+4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600
+5182,2692, 733,  40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616
+3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632
+4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648
+1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664
+1871,2762,3004,5187, 435,5188, 343,1108, 596,  17,1751,4579,2239,3477,3709,5189, # 1680
+4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696
+1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712
+ 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728
+1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744
+1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760
+3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776
+ 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792
+5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808
+2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824
+1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840
+1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551,  30,2268,4266, # 1856
+5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872
+ 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888
+4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904
+ 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920
+2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936
+ 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952
+1041,3005, 293,1168,  87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968
+1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984
+ 730,1515, 184,2840,  66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000
+4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016
+4021,5231,5232,1186,  15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032
+1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048
+3596,1342,1681,1718, 766,3297, 286,  89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064
+5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080
+5240,3298, 310, 313,3482,2304, 770,4278,  54,3054, 189,4611,3105,3848,4025,5241, # 2096
+1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112
+2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128
+1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144
+3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160
+2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176
+3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192
+2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208
+4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224
+4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240
+3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256
+  97,  81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272
+3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288
+ 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304
+3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320
+4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336
+3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352
+1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368
+5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384
+ 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400
+5286, 587,  14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416
+1702,1226, 102,1547,  62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432
+ 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448
+4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294,  86,1494,1730, # 2464
+4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480
+ 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496
+2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512
+2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885,  28,2695, # 2528
+3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544
+1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560
+4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576
+2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592
+1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608
+1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624
+2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640
+3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656
+1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672
+5313,3493,5314,5315,5316,3310,2698,1433,3311, 131,  95,1504,4049, 723,4303,3166, # 2688
+1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704
+4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654,  53,5320,3014,5321, # 2720
+1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736
+ 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752
+1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768
+4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784
+4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800
+2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816
+1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832
+4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848
+ 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864
+5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880
+2322,3316,5346,5347,4308,5348,4309,  84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896
+3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912
+4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928
+ 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944
+5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960
+5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976
+1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992
+4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008
+4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024
+2699,1516,3614,1121,1082,1329,3317,4073,1449,3873,  65,1128,2848,2927,2769,1590, # 3040
+3874,5370,5371,  12,2668,  45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056
+3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072
+2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088
+1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104
+4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120
+3736,1859,  91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136
+3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152
+2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168
+4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771,  61,4079,3738,1823,4080, # 3184
+5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200
+3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216
+2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232
+3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248
+1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264
+2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280
+3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296
+4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063,  56,1396,3113, # 3312
+2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328
+2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344
+5418,1076,  49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360
+1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376
+2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392
+1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408
+3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424
+4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629,  31,2851, # 3440
+2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456
+3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472
+3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488
+2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504
+4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520
+2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536
+3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552
+4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568
+5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584
+3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600
+ 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616
+1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412,  42,3119, 464,5455,2642, # 3632
+4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648
+1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664
+4701,5462,3020, 962, 588,3629, 289,3250,2644,1116,  52,5463,3067,1797,5464,5465, # 3680
+5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696
+ 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712
+5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728
+5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744
+2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760
+3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776
+2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792
+2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808
+ 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824
+1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840
+4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856
+3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872
+3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888
+ 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904
+2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920
+ 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936
+2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952
+4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968
+1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984
+4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000
+1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016
+3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032
+ 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048
+3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064
+5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080
+5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096
+3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112
+3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128
+1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144
+2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160
+5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176
+1561,2674,1452,4113,1375,5549,5550,  47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192
+1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208
+3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224
+ 919,2352,2975,2353,1270,4727,4115,  73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240
+1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256
+4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272
+5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288
+2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304
+3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320
+ 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336
+1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352
+2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368
+2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384
+5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400
+5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416
+5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432
+2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448
+2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464
+1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480
+4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496
+3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512
+3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528
+4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544
+4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560
+2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576
+2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592
+5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608
+4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624
+5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640
+4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656
+ 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672
+ 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688
+1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704
+3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720
+4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736
+1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752
+5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768
+2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784
+2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800
+3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816
+5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832
+1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848
+3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864
+5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880
+1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896
+5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912
+2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928
+3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944
+2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960
+3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976
+3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992
+3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008
+4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024
+ 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040
+2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056
+4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072
+3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088
+5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104
+1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120
+5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136
+ 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152
+1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168
+ 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184
+4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200
+1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216
+4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232
+1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248
+ 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264
+3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280
+4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296
+5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312
+ 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328
+3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344
+ 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360
+2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376
+)
+# fmt: on
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/big5prober.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/big5prober.py
new file mode 100644
index 0000000..ef09c60
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/big5prober.py
@@ -0,0 +1,47 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from .chardistribution import Big5DistributionAnalysis
+from .codingstatemachine import CodingStateMachine
+from .mbcharsetprober import MultiByteCharSetProber
+from .mbcssm import BIG5_SM_MODEL
+
+
+class Big5Prober(MultiByteCharSetProber):
+    def __init__(self) -> None:
+        super().__init__()
+        self.coding_sm = CodingStateMachine(BIG5_SM_MODEL)
+        self.distribution_analyzer = Big5DistributionAnalysis()
+        self.reset()
+
+    @property
+    def charset_name(self) -> str:
+        return "Big5"
+
+    @property
+    def language(self) -> str:
+        return "Chinese"
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/chardistribution.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/chardistribution.py
new file mode 100644
index 0000000..176cb99
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/chardistribution.py
@@ -0,0 +1,261 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from typing import Tuple, Union
+
+from .big5freq import (
+    BIG5_CHAR_TO_FREQ_ORDER,
+    BIG5_TABLE_SIZE,
+    BIG5_TYPICAL_DISTRIBUTION_RATIO,
+)
+from .euckrfreq import (
+    EUCKR_CHAR_TO_FREQ_ORDER,
+    EUCKR_TABLE_SIZE,
+    EUCKR_TYPICAL_DISTRIBUTION_RATIO,
+)
+from .euctwfreq import (
+    EUCTW_CHAR_TO_FREQ_ORDER,
+    EUCTW_TABLE_SIZE,
+    EUCTW_TYPICAL_DISTRIBUTION_RATIO,
+)
+from .gb2312freq import (
+    GB2312_CHAR_TO_FREQ_ORDER,
+    GB2312_TABLE_SIZE,
+    GB2312_TYPICAL_DISTRIBUTION_RATIO,
+)
+from .jisfreq import (
+    JIS_CHAR_TO_FREQ_ORDER,
+    JIS_TABLE_SIZE,
+    JIS_TYPICAL_DISTRIBUTION_RATIO,
+)
+from .johabfreq import JOHAB_TO_EUCKR_ORDER_TABLE
+
+
+class CharDistributionAnalysis:
+    ENOUGH_DATA_THRESHOLD = 1024
+    SURE_YES = 0.99
+    SURE_NO = 0.01
+    MINIMUM_DATA_THRESHOLD = 3
+
+    def __init__(self) -> None:
+        # Mapping table to get frequency order from char order (get from
+        # GetOrder())
+        self._char_to_freq_order: Tuple[int, ...] = tuple()
+        self._table_size = 0  # Size of above table
+        # This is a constant value which varies from language to language,
+        # used in calculating confidence.  See
+        # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html
+        # for further detail.
+        self.typical_distribution_ratio = 0.0
+        self._done = False
+        self._total_chars = 0
+        self._freq_chars = 0
+        self.reset()
+
+    def reset(self) -> None:
+        """reset analyser, clear any state"""
+        # If this flag is set to True, detection is done and conclusion has
+        # been made
+        self._done = False
+        self._total_chars = 0  # Total characters encountered
+        # The number of characters whose frequency order is less than 512
+        self._freq_chars = 0
+
+    def feed(self, char: Union[bytes, bytearray], char_len: int) -> None:
+        """feed a character with known length"""
+        if char_len == 2:
+            # we only care about 2-bytes character in our distribution analysis
+            order = self.get_order(char)
+        else:
+            order = -1
+        if order >= 0:
+            self._total_chars += 1
+            # order is valid
+            if order < self._table_size:
+                if 512 > self._char_to_freq_order[order]:
+                    self._freq_chars += 1
+
+    def get_confidence(self) -> float:
+        """return confidence based on existing data"""
+        # if we didn't receive any character in our consideration range,
+        # return negative answer
+        if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD:
+            return self.SURE_NO
+
+        if self._total_chars != self._freq_chars:
+            r = self._freq_chars / (
+                (self._total_chars - self._freq_chars) * self.typical_distribution_ratio
+            )
+            if r < self.SURE_YES:
+                return r
+
+        # normalize confidence (we don't want to be 100% sure)
+        return self.SURE_YES
+
+    def got_enough_data(self) -> bool:
+        # It is not necessary to receive all data to draw conclusion.
+        # For charset detection, certain amount of data is enough
+        return self._total_chars > self.ENOUGH_DATA_THRESHOLD
+
+    def get_order(self, _: Union[bytes, bytearray]) -> int:
+        # We do not handle characters based on the original encoding string,
+        # but convert this encoding string to a number, here called order.
+        # This allows multiple encodings of a language to share one frequency
+        # table.
+        return -1
+
+
+class EUCTWDistributionAnalysis(CharDistributionAnalysis):
+    def __init__(self) -> None:
+        super().__init__()
+        self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER
+        self._table_size = EUCTW_TABLE_SIZE
+        self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO
+
+    def get_order(self, byte_str: Union[bytes, bytearray]) -> int:
+        # for euc-TW encoding, we are interested
+        #   first  byte range: 0xc4 -- 0xfe
+        #   second byte range: 0xa1 -- 0xfe
+        # no validation needed here. State machine has done that
+        first_char = byte_str[0]
+        if first_char >= 0xC4:
+            return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1
+        return -1
+
+
+class EUCKRDistributionAnalysis(CharDistributionAnalysis):
+    def __init__(self) -> None:
+        super().__init__()
+        self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER
+        self._table_size = EUCKR_TABLE_SIZE
+        self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
+
+    def get_order(self, byte_str: Union[bytes, bytearray]) -> int:
+        # for euc-KR encoding, we are interested
+        #   first  byte range: 0xb0 -- 0xfe
+        #   second byte range: 0xa1 -- 0xfe
+        # no validation needed here. State machine has done that
+        first_char = byte_str[0]
+        if first_char >= 0xB0:
+            return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1
+        return -1
+
+
+class JOHABDistributionAnalysis(CharDistributionAnalysis):
+    def __init__(self) -> None:
+        super().__init__()
+        self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER
+        self._table_size = EUCKR_TABLE_SIZE
+        self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
+
+    def get_order(self, byte_str: Union[bytes, bytearray]) -> int:
+        first_char = byte_str[0]
+        if 0x88 <= first_char < 0xD4:
+            code = first_char * 256 + byte_str[1]
+            return JOHAB_TO_EUCKR_ORDER_TABLE.get(code, -1)
+        return -1
+
+
+class GB2312DistributionAnalysis(CharDistributionAnalysis):
+    def __init__(self) -> None:
+        super().__init__()
+        self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER
+        self._table_size = GB2312_TABLE_SIZE
+        self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO
+
+    def get_order(self, byte_str: Union[bytes, bytearray]) -> int:
+        # for GB2312 encoding, we are interested
+        #  first  byte range: 0xb0 -- 0xfe
+        #  second byte range: 0xa1 -- 0xfe
+        # no validation needed here. State machine has done that
+        first_char, second_char = byte_str[0], byte_str[1]
+        if (first_char >= 0xB0) and (second_char >= 0xA1):
+            return 94 * (first_char - 0xB0) + second_char - 0xA1
+        return -1
+
+
+class Big5DistributionAnalysis(CharDistributionAnalysis):
+    def __init__(self) -> None:
+        super().__init__()
+        self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER
+        self._table_size = BIG5_TABLE_SIZE
+        self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO
+
+    def get_order(self, byte_str: Union[bytes, bytearray]) -> int:
+        # for big5 encoding, we are interested
+        #   first  byte range: 0xa4 -- 0xfe
+        #   second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe
+        # no validation needed here. State machine has done that
+        first_char, second_char = byte_str[0], byte_str[1]
+        if first_char >= 0xA4:
+            if second_char >= 0xA1:
+                return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63
+            return 157 * (first_char - 0xA4) + second_char - 0x40
+        return -1
+
+
+class SJISDistributionAnalysis(CharDistributionAnalysis):
+    def __init__(self) -> None:
+        super().__init__()
+        self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER
+        self._table_size = JIS_TABLE_SIZE
+        self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO
+
+    def get_order(self, byte_str: Union[bytes, bytearray]) -> int:
+        # for sjis encoding, we are interested
+        #   first  byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe
+        #   second byte range: 0x40 -- 0x7e,  0x81 -- oxfe
+        # no validation needed here. State machine has done that
+        first_char, second_char = byte_str[0], byte_str[1]
+        if 0x81 <= first_char <= 0x9F:
+            order = 188 * (first_char - 0x81)
+        elif 0xE0 <= first_char <= 0xEF:
+            order = 188 * (first_char - 0xE0 + 31)
+        else:
+            return -1
+        order = order + second_char - 0x40
+        if second_char > 0x7F:
+            order = -1
+        return order
+
+
+class EUCJPDistributionAnalysis(CharDistributionAnalysis):
+    def __init__(self) -> None:
+        super().__init__()
+        self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER
+        self._table_size = JIS_TABLE_SIZE
+        self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO
+
+    def get_order(self, byte_str: Union[bytes, bytearray]) -> int:
+        # for euc-JP encoding, we are interested
+        #   first  byte range: 0xa0 -- 0xfe
+        #   second byte range: 0xa1 -- 0xfe
+        # no validation needed here. State machine has done that
+        char = byte_str[0]
+        if char >= 0xA0:
+            return 94 * (char - 0xA1) + byte_str[1] - 0xA1
+        return -1
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/charsetgroupprober.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/charsetgroupprober.py
new file mode 100644
index 0000000..6def56b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/charsetgroupprober.py
@@ -0,0 +1,106 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from typing import List, Optional, Union
+
+from .charsetprober import CharSetProber
+from .enums import LanguageFilter, ProbingState
+
+
+class CharSetGroupProber(CharSetProber):
+    def __init__(self, lang_filter: LanguageFilter = LanguageFilter.NONE) -> None:
+        super().__init__(lang_filter=lang_filter)
+        self._active_num = 0
+        self.probers: List[CharSetProber] = []
+        self._best_guess_prober: Optional[CharSetProber] = None
+
+    def reset(self) -> None:
+        super().reset()
+        self._active_num = 0
+        for prober in self.probers:
+            prober.reset()
+            prober.active = True
+            self._active_num += 1
+        self._best_guess_prober = None
+
+    @property
+    def charset_name(self) -> Optional[str]:
+        if not self._best_guess_prober:
+            self.get_confidence()
+            if not self._best_guess_prober:
+                return None
+        return self._best_guess_prober.charset_name
+
+    @property
+    def language(self) -> Optional[str]:
+        if not self._best_guess_prober:
+            self.get_confidence()
+            if not self._best_guess_prober:
+                return None
+        return self._best_guess_prober.language
+
+    def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState:
+        for prober in self.probers:
+            if not prober.active:
+                continue
+            state = prober.feed(byte_str)
+            if not state:
+                continue
+            if state == ProbingState.FOUND_IT:
+                self._best_guess_prober = prober
+                self._state = ProbingState.FOUND_IT
+                return self.state
+            if state == ProbingState.NOT_ME:
+                prober.active = False
+                self._active_num -= 1
+                if self._active_num <= 0:
+                    self._state = ProbingState.NOT_ME
+                    return self.state
+        return self.state
+
+    def get_confidence(self) -> float:
+        state = self.state
+        if state == ProbingState.FOUND_IT:
+            return 0.99
+        if state == ProbingState.NOT_ME:
+            return 0.01
+        best_conf = 0.0
+        self._best_guess_prober = None
+        for prober in self.probers:
+            if not prober.active:
+                self.logger.debug("%s not active", prober.charset_name)
+                continue
+            conf = prober.get_confidence()
+            self.logger.debug(
+                "%s %s confidence = %s", prober.charset_name, prober.language, conf
+            )
+            if best_conf < conf:
+                best_conf = conf
+                self._best_guess_prober = prober
+        if not self._best_guess_prober:
+            return 0.0
+        return best_conf
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/charsetprober.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/charsetprober.py
new file mode 100644
index 0000000..a103ca1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/charsetprober.py
@@ -0,0 +1,147 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#   Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+import logging
+import re
+from typing import Optional, Union
+
+from .enums import LanguageFilter, ProbingState
+
+INTERNATIONAL_WORDS_PATTERN = re.compile(
+    b"[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?"
+)
+
+
+class CharSetProber:
+
+    SHORTCUT_THRESHOLD = 0.95
+
+    def __init__(self, lang_filter: LanguageFilter = LanguageFilter.NONE) -> None:
+        self._state = ProbingState.DETECTING
+        self.active = True
+        self.lang_filter = lang_filter
+        self.logger = logging.getLogger(__name__)
+
+    def reset(self) -> None:
+        self._state = ProbingState.DETECTING
+
+    @property
+    def charset_name(self) -> Optional[str]:
+        return None
+
+    @property
+    def language(self) -> Optional[str]:
+        raise NotImplementedError
+
+    def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState:
+        raise NotImplementedError
+
+    @property
+    def state(self) -> ProbingState:
+        return self._state
+
+    def get_confidence(self) -> float:
+        return 0.0
+
+    @staticmethod
+    def filter_high_byte_only(buf: Union[bytes, bytearray]) -> bytes:
+        buf = re.sub(b"([\x00-\x7F])+", b" ", buf)
+        return buf
+
+    @staticmethod
+    def filter_international_words(buf: Union[bytes, bytearray]) -> bytearray:
+        """
+        We define three types of bytes:
+        alphabet: english alphabets [a-zA-Z]
+        international: international characters [\x80-\xFF]
+        marker: everything else [^a-zA-Z\x80-\xFF]
+        The input buffer can be thought to contain a series of words delimited
+        by markers. This function works to filter all words that contain at
+        least one international character. All contiguous sequences of markers
+        are replaced by a single space ascii character.
+        This filter applies to all scripts which do not use English characters.
+        """
+        filtered = bytearray()
+
+        # This regex expression filters out only words that have at-least one
+        # international character. The word may include one marker character at
+        # the end.
+        words = INTERNATIONAL_WORDS_PATTERN.findall(buf)
+
+        for word in words:
+            filtered.extend(word[:-1])
+
+            # If the last character in the word is a marker, replace it with a
+            # space as markers shouldn't affect our analysis (they are used
+            # similarly across all languages and may thus have similar
+            # frequencies).
+            last_char = word[-1:]
+            if not last_char.isalpha() and last_char < b"\x80":
+                last_char = b" "
+            filtered.extend(last_char)
+
+        return filtered
+
+    @staticmethod
+    def remove_xml_tags(buf: Union[bytes, bytearray]) -> bytes:
+        """
+        Returns a copy of ``buf`` that retains only the sequences of English
+        alphabet and high byte characters that are not between <> characters.
+        This filter can be applied to all scripts which contain both English
+        characters and extended ASCII characters, but is currently only used by
+        ``Latin1Prober``.
+        """
+        filtered = bytearray()
+        in_tag = False
+        prev = 0
+        buf = memoryview(buf).cast("c")
+
+        for curr, buf_char in enumerate(buf):
+            # Check if we're coming out of or entering an XML tag
+
+            # https://github.com/python/typeshed/issues/8182
+            if buf_char == b">":  # type: ignore[comparison-overlap]
+                prev = curr + 1
+                in_tag = False
+            # https://github.com/python/typeshed/issues/8182
+            elif buf_char == b"<":  # type: ignore[comparison-overlap]
+                if curr > prev and not in_tag:
+                    # Keep everything after last non-extended-ASCII,
+                    # non-alphabetic character
+                    filtered.extend(buf[prev:curr])
+                    # Output a space to delimit stretch we kept
+                    filtered.extend(b" ")
+                in_tag = True
+
+        # If we're not in a tag...
+        if not in_tag:
+            # Keep everything after last non-extended-ASCII, non-alphabetic
+            # character
+            filtered.extend(buf[prev:])
+
+        return filtered
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/cli/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/cli/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/cli/chardetect.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/cli/chardetect.py
new file mode 100644
index 0000000..43f6e14
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/cli/chardetect.py
@@ -0,0 +1,112 @@
+"""
+Script which takes one or more file paths and reports on their detected
+encodings
+
+Example::
+
+    % chardetect somefile someotherfile
+    somefile: windows-1252 with confidence 0.5
+    someotherfile: ascii with confidence 1.0
+
+If no paths are provided, it takes its input from stdin.
+
+"""
+
+
+import argparse
+import sys
+from typing import Iterable, List, Optional
+
+from .. import __version__
+from ..universaldetector import UniversalDetector
+
+
+def description_of(
+    lines: Iterable[bytes],
+    name: str = "stdin",
+    minimal: bool = False,
+    should_rename_legacy: bool = False,
+) -> Optional[str]:
+    """
+    Return a string describing the probable encoding of a file or
+    list of strings.
+
+    :param lines: The lines to get the encoding of.
+    :type lines: Iterable of bytes
+    :param name: Name of file or collection of lines
+    :type name: str
+    :param should_rename_legacy:  Should we rename legacy encodings to
+                                  their more modern equivalents?
+    :type should_rename_legacy:   ``bool``
+    """
+    u = UniversalDetector(should_rename_legacy=should_rename_legacy)
+    for line in lines:
+        line = bytearray(line)
+        u.feed(line)
+        # shortcut out of the loop to save reading further - particularly useful if we read a BOM.
+        if u.done:
+            break
+    u.close()
+    result = u.result
+    if minimal:
+        return result["encoding"]
+    if result["encoding"]:
+        return f'{name}: {result["encoding"]} with confidence {result["confidence"]}'
+    return f"{name}: no result"
+
+
+def main(argv: Optional[List[str]] = None) -> None:
+    """
+    Handles command line arguments and gets things started.
+
+    :param argv: List of arguments, as if specified on the command-line.
+                 If None, ``sys.argv[1:]`` is used instead.
+    :type argv: list of str
+    """
+    # Get command line arguments
+    parser = argparse.ArgumentParser(
+        description=(
+            "Takes one or more file paths and reports their detected encodings"
+        )
+    )
+    parser.add_argument(
+        "input",
+        help="File whose encoding we would like to determine. (default: stdin)",
+        type=argparse.FileType("rb"),
+        nargs="*",
+        default=[sys.stdin.buffer],
+    )
+    parser.add_argument(
+        "--minimal",
+        help="Print only the encoding to standard output",
+        action="store_true",
+    )
+    parser.add_argument(
+        "-l",
+        "--legacy",
+        help="Rename legacy encodings to more modern ones.",
+        action="store_true",
+    )
+    parser.add_argument(
+        "--version", action="version", version=f"%(prog)s {__version__}"
+    )
+    args = parser.parse_args(argv)
+
+    for f in args.input:
+        if f.isatty():
+            print(
+                "You are running chardetect interactively. Press "
+                "CTRL-D twice at the start of a blank line to signal the "
+                "end of your input. If you want help, run chardetect "
+                "--help\n",
+                file=sys.stderr,
+            )
+        print(
+            description_of(
+                f, f.name, minimal=args.minimal, should_rename_legacy=args.legacy
+            )
+        )
+
+
+if __name__ == "__main__":
+    main()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/codingstatemachine.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/codingstatemachine.py
new file mode 100644
index 0000000..8ed4a87
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/codingstatemachine.py
@@ -0,0 +1,90 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+import logging
+
+from .codingstatemachinedict import CodingStateMachineDict
+from .enums import MachineState
+
+
+class CodingStateMachine:
+    """
+    A state machine to verify a byte sequence for a particular encoding. For
+    each byte the detector receives, it will feed that byte to every active
+    state machine available, one byte at a time. The state machine changes its
+    state based on its previous state and the byte it receives. There are 3
+    states in a state machine that are of interest to an auto-detector:
+
+    START state: This is the state to start with, or a legal byte sequence
+                 (i.e. a valid code point) for character has been identified.
+
+    ME state:  This indicates that the state machine identified a byte sequence
+               that is specific to the charset it is designed for and that
+               there is no other possible encoding which can contain this byte
+               sequence. This will to lead to an immediate positive answer for
+               the detector.
+
+    ERROR state: This indicates the state machine identified an illegal byte
+                 sequence for that encoding. This will lead to an immediate
+                 negative answer for this encoding. Detector will exclude this
+                 encoding from consideration from here on.
+    """
+
+    def __init__(self, sm: CodingStateMachineDict) -> None:
+        self._model = sm
+        self._curr_byte_pos = 0
+        self._curr_char_len = 0
+        self._curr_state = MachineState.START
+        self.active = True
+        self.logger = logging.getLogger(__name__)
+        self.reset()
+
+    def reset(self) -> None:
+        self._curr_state = MachineState.START
+
+    def next_state(self, c: int) -> int:
+        # for each byte we get its class
+        # if it is first byte, we also get byte length
+        byte_class = self._model["class_table"][c]
+        if self._curr_state == MachineState.START:
+            self._curr_byte_pos = 0
+            self._curr_char_len = self._model["char_len_table"][byte_class]
+        # from byte's class and state_table, we get its next state
+        curr_state = self._curr_state * self._model["class_factor"] + byte_class
+        self._curr_state = self._model["state_table"][curr_state]
+        self._curr_byte_pos += 1
+        return self._curr_state
+
+    def get_current_charlen(self) -> int:
+        return self._curr_char_len
+
+    def get_coding_state_machine(self) -> str:
+        return self._model["name"]
+
+    @property
+    def language(self) -> str:
+        return self._model["language"]
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/codingstatemachinedict.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/codingstatemachinedict.py
new file mode 100644
index 0000000..7a3c4c7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/codingstatemachinedict.py
@@ -0,0 +1,19 @@
+from typing import TYPE_CHECKING, Tuple
+
+if TYPE_CHECKING:
+    # TypedDict was introduced in Python 3.8.
+    #
+    # TODO: Remove the else block and TYPE_CHECKING check when dropping support
+    # for Python 3.7.
+    from typing import TypedDict
+
+    class CodingStateMachineDict(TypedDict, total=False):
+        class_table: Tuple[int, ...]
+        class_factor: int
+        state_table: Tuple[int, ...]
+        char_len_table: Tuple[int, ...]
+        name: str
+        language: str  # Optional key
+
+else:
+    CodingStateMachineDict = dict
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/cp949prober.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/cp949prober.py
new file mode 100644
index 0000000..fa7307e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/cp949prober.py
@@ -0,0 +1,49 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from .chardistribution import EUCKRDistributionAnalysis
+from .codingstatemachine import CodingStateMachine
+from .mbcharsetprober import MultiByteCharSetProber
+from .mbcssm import CP949_SM_MODEL
+
+
+class CP949Prober(MultiByteCharSetProber):
+    def __init__(self) -> None:
+        super().__init__()
+        self.coding_sm = CodingStateMachine(CP949_SM_MODEL)
+        # NOTE: CP949 is a superset of EUC-KR, so the distribution should be
+        #       not different.
+        self.distribution_analyzer = EUCKRDistributionAnalysis()
+        self.reset()
+
+    @property
+    def charset_name(self) -> str:
+        return "CP949"
+
+    @property
+    def language(self) -> str:
+        return "Korean"
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/enums.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/enums.py
new file mode 100644
index 0000000..5e3e198
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/enums.py
@@ -0,0 +1,85 @@
+"""
+All of the Enums that are used throughout the chardet package.
+
+:author: Dan Blanchard (dan.blanchard@gmail.com)
+"""
+
+from enum import Enum, Flag
+
+
+class InputState:
+    """
+    This enum represents the different states a universal detector can be in.
+    """
+
+    PURE_ASCII = 0
+    ESC_ASCII = 1
+    HIGH_BYTE = 2
+
+
+class LanguageFilter(Flag):
+    """
+    This enum represents the different language filters we can apply to a
+    ``UniversalDetector``.
+    """
+
+    NONE = 0x00
+    CHINESE_SIMPLIFIED = 0x01
+    CHINESE_TRADITIONAL = 0x02
+    JAPANESE = 0x04
+    KOREAN = 0x08
+    NON_CJK = 0x10
+    ALL = 0x1F
+    CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL
+    CJK = CHINESE | JAPANESE | KOREAN
+
+
+class ProbingState(Enum):
+    """
+    This enum represents the different states a prober can be in.
+    """
+
+    DETECTING = 0
+    FOUND_IT = 1
+    NOT_ME = 2
+
+
+class MachineState:
+    """
+    This enum represents the different states a state machine can be in.
+    """
+
+    START = 0
+    ERROR = 1
+    ITS_ME = 2
+
+
+class SequenceLikelihood:
+    """
+    This enum represents the likelihood of a character following the previous one.
+    """
+
+    NEGATIVE = 0
+    UNLIKELY = 1
+    LIKELY = 2
+    POSITIVE = 3
+
+    @classmethod
+    def get_num_categories(cls) -> int:
+        """:returns: The number of likelihood categories in the enum."""
+        return 4
+
+
+class CharacterCategory:
+    """
+    This enum represents the different categories language models for
+    ``SingleByteCharsetProber`` put characters into.
+
+    Anything less than CONTROL is considered a letter.
+    """
+
+    UNDEFINED = 255
+    LINE_BREAK = 254
+    SYMBOL = 253
+    DIGIT = 252
+    CONTROL = 251
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/escprober.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/escprober.py
new file mode 100644
index 0000000..fd71383
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/escprober.py
@@ -0,0 +1,102 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from typing import Optional, Union
+
+from .charsetprober import CharSetProber
+from .codingstatemachine import CodingStateMachine
+from .enums import LanguageFilter, MachineState, ProbingState
+from .escsm import (
+    HZ_SM_MODEL,
+    ISO2022CN_SM_MODEL,
+    ISO2022JP_SM_MODEL,
+    ISO2022KR_SM_MODEL,
+)
+
+
+class EscCharSetProber(CharSetProber):
+    """
+    This CharSetProber uses a "code scheme" approach for detecting encodings,
+    whereby easily recognizable escape or shift sequences are relied on to
+    identify these encodings.
+    """
+
+    def __init__(self, lang_filter: LanguageFilter = LanguageFilter.NONE) -> None:
+        super().__init__(lang_filter=lang_filter)
+        self.coding_sm = []
+        if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED:
+            self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL))
+            self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL))
+        if self.lang_filter & LanguageFilter.JAPANESE:
+            self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL))
+        if self.lang_filter & LanguageFilter.KOREAN:
+            self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL))
+        self.active_sm_count = 0
+        self._detected_charset: Optional[str] = None
+        self._detected_language: Optional[str] = None
+        self._state = ProbingState.DETECTING
+        self.reset()
+
+    def reset(self) -> None:
+        super().reset()
+        for coding_sm in self.coding_sm:
+            coding_sm.active = True
+            coding_sm.reset()
+        self.active_sm_count = len(self.coding_sm)
+        self._detected_charset = None
+        self._detected_language = None
+
+    @property
+    def charset_name(self) -> Optional[str]:
+        return self._detected_charset
+
+    @property
+    def language(self) -> Optional[str]:
+        return self._detected_language
+
+    def get_confidence(self) -> float:
+        return 0.99 if self._detected_charset else 0.00
+
+    def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState:
+        for c in byte_str:
+            for coding_sm in self.coding_sm:
+                if not coding_sm.active:
+                    continue
+                coding_state = coding_sm.next_state(c)
+                if coding_state == MachineState.ERROR:
+                    coding_sm.active = False
+                    self.active_sm_count -= 1
+                    if self.active_sm_count <= 0:
+                        self._state = ProbingState.NOT_ME
+                        return self.state
+                elif coding_state == MachineState.ITS_ME:
+                    self._state = ProbingState.FOUND_IT
+                    self._detected_charset = coding_sm.get_coding_state_machine()
+                    self._detected_language = coding_sm.language
+                    return self.state
+
+        return self.state
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/escsm.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/escsm.py
new file mode 100644
index 0000000..11d4adf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/escsm.py
@@ -0,0 +1,261 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License,  or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not,  write to the Free Software
+# Foundation,  Inc.,  51 Franklin St,  Fifth Floor,  Boston,  MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from .codingstatemachinedict import CodingStateMachineDict
+from .enums import MachineState
+
+# fmt: off
+HZ_CLS = (
+    1, 0, 0, 0, 0, 0, 0, 0,  # 00 - 07
+    0, 0, 0, 0, 0, 0, 0, 0,  # 08 - 0f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 10 - 17
+    0, 0, 0, 1, 0, 0, 0, 0,  # 18 - 1f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 20 - 27
+    0, 0, 0, 0, 0, 0, 0, 0,  # 28 - 2f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 30 - 37
+    0, 0, 0, 0, 0, 0, 0, 0,  # 38 - 3f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 40 - 47
+    0, 0, 0, 0, 0, 0, 0, 0,  # 48 - 4f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 50 - 57
+    0, 0, 0, 0, 0, 0, 0, 0,  # 58 - 5f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 60 - 67
+    0, 0, 0, 0, 0, 0, 0, 0,  # 68 - 6f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 70 - 77
+    0, 0, 0, 4, 0, 5, 2, 0,  # 78 - 7f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 80 - 87
+    1, 1, 1, 1, 1, 1, 1, 1,  # 88 - 8f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 90 - 97
+    1, 1, 1, 1, 1, 1, 1, 1,  # 98 - 9f
+    1, 1, 1, 1, 1, 1, 1, 1,  # a0 - a7
+    1, 1, 1, 1, 1, 1, 1, 1,  # a8 - af
+    1, 1, 1, 1, 1, 1, 1, 1,  # b0 - b7
+    1, 1, 1, 1, 1, 1, 1, 1,  # b8 - bf
+    1, 1, 1, 1, 1, 1, 1, 1,  # c0 - c7
+    1, 1, 1, 1, 1, 1, 1, 1,  # c8 - cf
+    1, 1, 1, 1, 1, 1, 1, 1,  # d0 - d7
+    1, 1, 1, 1, 1, 1, 1, 1,  # d8 - df
+    1, 1, 1, 1, 1, 1, 1, 1,  # e0 - e7
+    1, 1, 1, 1, 1, 1, 1, 1,  # e8 - ef
+    1, 1, 1, 1, 1, 1, 1, 1,  # f0 - f7
+    1, 1, 1, 1, 1, 1, 1, 1,  # f8 - ff
+)
+
+HZ_ST = (
+MachineState.START, MachineState.ERROR,      3, MachineState.START, MachineState.START, MachineState.START, MachineState.ERROR, MachineState.ERROR, # 00-07
+MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, # 08-0f
+MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.START, MachineState.START,      4, MachineState.ERROR, # 10-17
+     5, MachineState.ERROR,      6, MachineState.ERROR,      5,      5,      4, MachineState.ERROR, # 18-1f
+     4, MachineState.ERROR,      4,      4,      4, MachineState.ERROR,      4, MachineState.ERROR, # 20-27
+     4, MachineState.ITS_ME, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, # 28-2f
+)
+# fmt: on
+
+HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0)
+
+HZ_SM_MODEL: CodingStateMachineDict = {
+    "class_table": HZ_CLS,
+    "class_factor": 6,
+    "state_table": HZ_ST,
+    "char_len_table": HZ_CHAR_LEN_TABLE,
+    "name": "HZ-GB-2312",
+    "language": "Chinese",
+}
+
+# fmt: off
+ISO2022CN_CLS = (
+    2, 0, 0, 0, 0, 0, 0, 0,  # 00 - 07
+    0, 0, 0, 0, 0, 0, 0, 0,  # 08 - 0f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 10 - 17
+    0, 0, 0, 1, 0, 0, 0, 0,  # 18 - 1f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 20 - 27
+    0, 3, 0, 0, 0, 0, 0, 0,  # 28 - 2f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 30 - 37
+    0, 0, 0, 0, 0, 0, 0, 0,  # 38 - 3f
+    0, 0, 0, 4, 0, 0, 0, 0,  # 40 - 47
+    0, 0, 0, 0, 0, 0, 0, 0,  # 48 - 4f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 50 - 57
+    0, 0, 0, 0, 0, 0, 0, 0,  # 58 - 5f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 60 - 67
+    0, 0, 0, 0, 0, 0, 0, 0,  # 68 - 6f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 70 - 77
+    0, 0, 0, 0, 0, 0, 0, 0,  # 78 - 7f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 80 - 87
+    2, 2, 2, 2, 2, 2, 2, 2,  # 88 - 8f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 90 - 97
+    2, 2, 2, 2, 2, 2, 2, 2,  # 98 - 9f
+    2, 2, 2, 2, 2, 2, 2, 2,  # a0 - a7
+    2, 2, 2, 2, 2, 2, 2, 2,  # a8 - af
+    2, 2, 2, 2, 2, 2, 2, 2,  # b0 - b7
+    2, 2, 2, 2, 2, 2, 2, 2,  # b8 - bf
+    2, 2, 2, 2, 2, 2, 2, 2,  # c0 - c7
+    2, 2, 2, 2, 2, 2, 2, 2,  # c8 - cf
+    2, 2, 2, 2, 2, 2, 2, 2,  # d0 - d7
+    2, 2, 2, 2, 2, 2, 2, 2,  # d8 - df
+    2, 2, 2, 2, 2, 2, 2, 2,  # e0 - e7
+    2, 2, 2, 2, 2, 2, 2, 2,  # e8 - ef
+    2, 2, 2, 2, 2, 2, 2, 2,  # f0 - f7
+    2, 2, 2, 2, 2, 2, 2, 2,  # f8 - ff
+)
+
+ISO2022CN_ST = (
+    MachineState.START,      3, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, # 00-07
+    MachineState.START, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, # 08-0f
+    MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, # 10-17
+    MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR,      4, MachineState.ERROR, # 18-1f
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, # 20-27
+        5,      6, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, # 28-2f
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, # 30-37
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, MachineState.START, # 38-3f
+)
+# fmt: on
+
+ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0)
+
+ISO2022CN_SM_MODEL: CodingStateMachineDict = {
+    "class_table": ISO2022CN_CLS,
+    "class_factor": 9,
+    "state_table": ISO2022CN_ST,
+    "char_len_table": ISO2022CN_CHAR_LEN_TABLE,
+    "name": "ISO-2022-CN",
+    "language": "Chinese",
+}
+
+# fmt: off
+ISO2022JP_CLS = (
+    2, 0, 0, 0, 0, 0, 0, 0,  # 00 - 07
+    0, 0, 0, 0, 0, 0, 2, 2,  # 08 - 0f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 10 - 17
+    0, 0, 0, 1, 0, 0, 0, 0,  # 18 - 1f
+    0, 0, 0, 0, 7, 0, 0, 0,  # 20 - 27
+    3, 0, 0, 0, 0, 0, 0, 0,  # 28 - 2f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 30 - 37
+    0, 0, 0, 0, 0, 0, 0, 0,  # 38 - 3f
+    6, 0, 4, 0, 8, 0, 0, 0,  # 40 - 47
+    0, 9, 5, 0, 0, 0, 0, 0,  # 48 - 4f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 50 - 57
+    0, 0, 0, 0, 0, 0, 0, 0,  # 58 - 5f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 60 - 67
+    0, 0, 0, 0, 0, 0, 0, 0,  # 68 - 6f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 70 - 77
+    0, 0, 0, 0, 0, 0, 0, 0,  # 78 - 7f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 80 - 87
+    2, 2, 2, 2, 2, 2, 2, 2,  # 88 - 8f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 90 - 97
+    2, 2, 2, 2, 2, 2, 2, 2,  # 98 - 9f
+    2, 2, 2, 2, 2, 2, 2, 2,  # a0 - a7
+    2, 2, 2, 2, 2, 2, 2, 2,  # a8 - af
+    2, 2, 2, 2, 2, 2, 2, 2,  # b0 - b7
+    2, 2, 2, 2, 2, 2, 2, 2,  # b8 - bf
+    2, 2, 2, 2, 2, 2, 2, 2,  # c0 - c7
+    2, 2, 2, 2, 2, 2, 2, 2,  # c8 - cf
+    2, 2, 2, 2, 2, 2, 2, 2,  # d0 - d7
+    2, 2, 2, 2, 2, 2, 2, 2,  # d8 - df
+    2, 2, 2, 2, 2, 2, 2, 2,  # e0 - e7
+    2, 2, 2, 2, 2, 2, 2, 2,  # e8 - ef
+    2, 2, 2, 2, 2, 2, 2, 2,  # f0 - f7
+    2, 2, 2, 2, 2, 2, 2, 2,  # f8 - ff
+)
+
+ISO2022JP_ST = (
+    MachineState.START,      3, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, # 00-07
+    MachineState.START, MachineState.START, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, # 08-0f
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, # 10-17
+    MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, # 18-1f
+    MachineState.ERROR,      5, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR,      4, MachineState.ERROR, MachineState.ERROR, # 20-27
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR,      6, MachineState.ITS_ME, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, # 28-2f
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, # 30-37
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, # 38-3f
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, MachineState.START, MachineState.START, # 40-47
+)
+# fmt: on
+
+ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
+
+ISO2022JP_SM_MODEL: CodingStateMachineDict = {
+    "class_table": ISO2022JP_CLS,
+    "class_factor": 10,
+    "state_table": ISO2022JP_ST,
+    "char_len_table": ISO2022JP_CHAR_LEN_TABLE,
+    "name": "ISO-2022-JP",
+    "language": "Japanese",
+}
+
+# fmt: off
+ISO2022KR_CLS = (
+    2, 0, 0, 0, 0, 0, 0, 0,  # 00 - 07
+    0, 0, 0, 0, 0, 0, 0, 0,  # 08 - 0f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 10 - 17
+    0, 0, 0, 1, 0, 0, 0, 0,  # 18 - 1f
+    0, 0, 0, 0, 3, 0, 0, 0,  # 20 - 27
+    0, 4, 0, 0, 0, 0, 0, 0,  # 28 - 2f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 30 - 37
+    0, 0, 0, 0, 0, 0, 0, 0,  # 38 - 3f
+    0, 0, 0, 5, 0, 0, 0, 0,  # 40 - 47
+    0, 0, 0, 0, 0, 0, 0, 0,  # 48 - 4f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 50 - 57
+    0, 0, 0, 0, 0, 0, 0, 0,  # 58 - 5f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 60 - 67
+    0, 0, 0, 0, 0, 0, 0, 0,  # 68 - 6f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 70 - 77
+    0, 0, 0, 0, 0, 0, 0, 0,  # 78 - 7f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 80 - 87
+    2, 2, 2, 2, 2, 2, 2, 2,  # 88 - 8f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 90 - 97
+    2, 2, 2, 2, 2, 2, 2, 2,  # 98 - 9f
+    2, 2, 2, 2, 2, 2, 2, 2,  # a0 - a7
+    2, 2, 2, 2, 2, 2, 2, 2,  # a8 - af
+    2, 2, 2, 2, 2, 2, 2, 2,  # b0 - b7
+    2, 2, 2, 2, 2, 2, 2, 2,  # b8 - bf
+    2, 2, 2, 2, 2, 2, 2, 2,  # c0 - c7
+    2, 2, 2, 2, 2, 2, 2, 2,  # c8 - cf
+    2, 2, 2, 2, 2, 2, 2, 2,  # d0 - d7
+    2, 2, 2, 2, 2, 2, 2, 2,  # d8 - df
+    2, 2, 2, 2, 2, 2, 2, 2,  # e0 - e7
+    2, 2, 2, 2, 2, 2, 2, 2,  # e8 - ef
+    2, 2, 2, 2, 2, 2, 2, 2,  # f0 - f7
+    2, 2, 2, 2, 2, 2, 2, 2,  # f8 - ff
+)
+
+ISO2022KR_ST = (
+    MachineState.START,      3, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.ERROR, MachineState.ERROR, # 00-07
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, # 08-0f
+    MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR,      4, MachineState.ERROR, MachineState.ERROR, # 10-17
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR,      5, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, # 18-1f
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.START, MachineState.START, MachineState.START, MachineState.START, # 20-27
+)
+# fmt: on
+
+ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0)
+
+ISO2022KR_SM_MODEL: CodingStateMachineDict = {
+    "class_table": ISO2022KR_CLS,
+    "class_factor": 6,
+    "state_table": ISO2022KR_ST,
+    "char_len_table": ISO2022KR_CHAR_LEN_TABLE,
+    "name": "ISO-2022-KR",
+    "language": "Korean",
+}
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/eucjpprober.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/eucjpprober.py
new file mode 100644
index 0000000..39487f4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/eucjpprober.py
@@ -0,0 +1,102 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from typing import Union
+
+from .chardistribution import EUCJPDistributionAnalysis
+from .codingstatemachine import CodingStateMachine
+from .enums import MachineState, ProbingState
+from .jpcntx import EUCJPContextAnalysis
+from .mbcharsetprober import MultiByteCharSetProber
+from .mbcssm import EUCJP_SM_MODEL
+
+
+class EUCJPProber(MultiByteCharSetProber):
+    def __init__(self) -> None:
+        super().__init__()
+        self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL)
+        self.distribution_analyzer = EUCJPDistributionAnalysis()
+        self.context_analyzer = EUCJPContextAnalysis()
+        self.reset()
+
+    def reset(self) -> None:
+        super().reset()
+        self.context_analyzer.reset()
+
+    @property
+    def charset_name(self) -> str:
+        return "EUC-JP"
+
+    @property
+    def language(self) -> str:
+        return "Japanese"
+
+    def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState:
+        assert self.coding_sm is not None
+        assert self.distribution_analyzer is not None
+
+        for i, byte in enumerate(byte_str):
+            # PY3K: byte_str is a byte array, so byte is an int, not a byte
+            coding_state = self.coding_sm.next_state(byte)
+            if coding_state == MachineState.ERROR:
+                self.logger.debug(
+                    "%s %s prober hit error at byte %s",
+                    self.charset_name,
+                    self.language,
+                    i,
+                )
+                self._state = ProbingState.NOT_ME
+                break
+            if coding_state == MachineState.ITS_ME:
+                self._state = ProbingState.FOUND_IT
+                break
+            if coding_state == MachineState.START:
+                char_len = self.coding_sm.get_current_charlen()
+                if i == 0:
+                    self._last_char[1] = byte
+                    self.context_analyzer.feed(self._last_char, char_len)
+                    self.distribution_analyzer.feed(self._last_char, char_len)
+                else:
+                    self.context_analyzer.feed(byte_str[i - 1 : i + 1], char_len)
+                    self.distribution_analyzer.feed(byte_str[i - 1 : i + 1], char_len)
+
+        self._last_char[0] = byte_str[-1]
+
+        if self.state == ProbingState.DETECTING:
+            if self.context_analyzer.got_enough_data() and (
+                self.get_confidence() > self.SHORTCUT_THRESHOLD
+            ):
+                self._state = ProbingState.FOUND_IT
+
+        return self.state
+
+    def get_confidence(self) -> float:
+        assert self.distribution_analyzer is not None
+
+        context_conf = self.context_analyzer.get_confidence()
+        distrib_conf = self.distribution_analyzer.get_confidence()
+        return max(context_conf, distrib_conf)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/euckrfreq.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/euckrfreq.py
new file mode 100644
index 0000000..7dc3b10
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/euckrfreq.py
@@ -0,0 +1,196 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+# Sampling from about 20M text materials include literature and computer technology
+
+# 128  --> 0.79
+# 256  --> 0.92
+# 512  --> 0.986
+# 1024 --> 0.99944
+# 2048 --> 0.99999
+#
+# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24
+# Random Distribution Ration = 512 / (2350-512) = 0.279.
+#
+# Typical Distribution Ratio
+
+EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0
+
+EUCKR_TABLE_SIZE = 2352
+
+# Char to FreqOrder table ,
+# fmt: off
+EUCKR_CHAR_TO_FREQ_ORDER = (
+  13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722,  87,
+1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398,
+1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488,  20,1733,1269,1734,
+ 945,1400,1735,  47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739,
+ 116, 987, 813,1401, 683,  75,1204, 145,1740,1741,1742,1743,  16, 847, 667, 622,
+ 708,1744,1745,1746, 966, 787, 304, 129,1747,  60, 820, 123, 676,1748,1749,1750,
+1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856,
+ 344,1763,1764,1765,1766,  89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205,
+ 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779,
+1780, 337, 751,1058,  28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782,  19,
+1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567,
+1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797,
+1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802,
+1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899,
+ 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818,
+1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409,
+1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697,
+1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770,
+1412,1837,1838,  39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723,
+ 544,1023,1081, 869,  91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416,
+1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300,
+ 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083,
+ 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857,
+1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871,
+ 282,  96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420,
+1421, 268,1877,1422,1878,1879,1880, 308,1881,   2, 537,1882,1883,1215,1884,1885,
+ 127, 791,1886,1273,1423,1887,  34, 336, 404, 643,1888, 571, 654, 894, 840,1889,
+   0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893,
+1894,1123,  48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317,
+1899, 694,1900, 909, 734,1424, 572, 866,1425, 691,  85, 524,1010, 543, 394, 841,
+1901,1902,1903,1026,1904,1905,1906,1907,1908,1909,  30, 451, 651, 988, 310,1910,
+1911,1426, 810,1216,  93,1912,1913,1277,1217,1914, 858, 759,  45,  58, 181, 610,
+ 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375,
+1919, 359,1920, 687,1921, 822,1922, 293,1923,1924,  40, 662, 118, 692,  29, 939,
+ 887, 640, 482, 174,1925,  69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870,
+ 217, 854,1163, 823,1927,1928,1929,1930, 834,1931,  78,1932, 859,1933,1063,1934,
+1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888,
+1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950,
+1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065,
+1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002,
+1283,1222,1960,1961,1962,1963,  36, 383, 228, 753, 247, 454,1964, 876, 678,1965,
+1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467,
+  50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285,
+ 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971,   7,
+ 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979,
+1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985,
+ 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994,
+1995, 560, 223,1287,  98,   8, 189, 650, 978,1288,1996,1437,1997,  17, 345, 250,
+ 423, 277, 234, 512, 226,  97, 289,  42, 167,1998, 201,1999,2000, 843, 836, 824,
+ 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003,
+2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008,  71,1440, 745,
+ 619, 688,2009, 829,2010,2011, 147,2012,  33, 948,2013,2014,  74, 224,2015,  61,
+ 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023,
+2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591,  52, 724, 246,2031,2032,
+2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912,
+2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224,
+ 719,1170, 959, 440, 437, 534,  84, 388, 480,1131, 159, 220, 198, 679,2044,1012,
+ 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050,
+2051,2052,2053,  59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681,
+ 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414,
+1444,2064,2065,  41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068,
+2069,1292,2070,2071,1445,2072,1446,2073,2074,  55, 588,  66,1447, 271,1092,2075,
+1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850,
+2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606,
+2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449,
+1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452,
+ 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112,
+2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121,
+2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130,
+  22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174,  73,1096, 231, 274,
+ 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139,
+2141,2142,2143,2144,  11, 374, 844,2145, 154,1232,  46,1461,2146, 838, 830, 721,
+1233, 106,2147,  90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298,
+2150,1462, 761, 565,2151, 686,2152, 649,2153,  72, 173,2154, 460, 415,2155,1463,
+2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747,
+2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177,  23, 530, 285,
+2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187,
+2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193,  10,
+2194, 613, 424,2195, 979, 108, 449, 589,  27, 172,  81,1031,  80, 774, 281, 350,
+1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201,
+2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972,
+2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219,
+2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233,
+2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242,
+2243, 521, 486, 548,2244,2245,2246,1473,1300,  53, 549, 137, 875,  76, 158,2247,
+1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178,
+1475,2249,  82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255,
+2256,  18, 450, 206,2257, 290, 292,1142,2258, 511, 162,  99, 346, 164, 735,2259,
+1476,1477,   4, 554, 343, 798,1099,2260,1100,2261,  43, 171,1303, 139, 215,2262,
+2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702,
+1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272,  67,2273,
+ 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541,
+2282,2283,2284,2285,2286,  70, 852,1071,2287,2288,2289,2290,  21,  56, 509, 117,
+ 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187,
+2294,1046,1479,2295, 340,2296,  63,1047, 230,2297,2298,1305, 763,1306, 101, 800,
+ 808, 494,2299,2300,2301, 903,2302,  37,1072,  14,   5,2303,  79, 675,2304, 312,
+2305,2306,2307,2308,2309,1480,   6,1307,2310,2311,2312,   1, 470,  35,  24, 229,
+2313, 695, 210,  86, 778,  15, 784, 592, 779,  32,  77, 855, 964,2314, 259,2315,
+ 501, 380,2316,2317,  83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484,
+2320,2321,2322,2323,2324,2325,1485,2326,2327, 128,  57,  68, 261,1048, 211, 170,
+1240,  31,2328,  51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335,
+ 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601,
+1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395,
+2351,1490,1491,  62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354,
+1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476,
+2361,2362, 332,  12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035,
+ 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498,
+2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310,
+1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389,
+2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504,
+1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505,
+2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145,
+1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624,
+ 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700,
+2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221,
+2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377,
+ 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448,
+ 915, 489,2449,1514,1184,2450,2451, 515,  64, 427, 495,2452, 583,2453, 483, 485,
+1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705,
+1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465,
+ 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471,
+2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997,
+2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486,
+ 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187,  65,2494,
+ 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771,
+ 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323,
+2499,2500,  49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491,
+  95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510,
+ 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519,
+2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532,
+2533,  25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199,
+ 704, 504, 468, 758, 657,1528, 196,  44, 839,1246, 272, 750,2543, 765, 862,2544,
+2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247,
+1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441,
+ 249,1075,2556,2557,2558, 466, 743,2559,2560,2561,  92, 514, 426, 420, 526,2562,
+2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362,
+2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583,
+2584,1532,  54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465,
+   3, 458,   9,  38,2588, 107, 110, 890, 209,  26, 737, 498,2589,1534,2590, 431,
+ 202,  88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151,
+ 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596,
+2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601,  94, 175, 197, 406,
+2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611,
+2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619,
+1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628,
+2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042,
+ 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642,  # 512, 256
+)
+# fmt: on
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/euckrprober.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/euckrprober.py
new file mode 100644
index 0000000..1fc5de0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/euckrprober.py
@@ -0,0 +1,47 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from .chardistribution import EUCKRDistributionAnalysis
+from .codingstatemachine import CodingStateMachine
+from .mbcharsetprober import MultiByteCharSetProber
+from .mbcssm import EUCKR_SM_MODEL
+
+
+class EUCKRProber(MultiByteCharSetProber):
+    def __init__(self) -> None:
+        super().__init__()
+        self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL)
+        self.distribution_analyzer = EUCKRDistributionAnalysis()
+        self.reset()
+
+    @property
+    def charset_name(self) -> str:
+        return "EUC-KR"
+
+    @property
+    def language(self) -> str:
+        return "Korean"
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/euctwfreq.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/euctwfreq.py
new file mode 100644
index 0000000..4900ccc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/euctwfreq.py
@@ -0,0 +1,388 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+# EUCTW frequency table
+# Converted from big5 work
+# by Taiwan's Mandarin Promotion Council
+# 
+
+# 128  --> 0.42261
+# 256  --> 0.57851
+# 512  --> 0.74851
+# 1024 --> 0.89384
+# 2048 --> 0.97583
+#
+# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98
+# Random Distribution Ration = 512/(5401-512)=0.105
+#
+# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
+
+EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75
+
+# Char to FreqOrder table
+EUCTW_TABLE_SIZE = 5376
+
+# fmt: off
+EUCTW_CHAR_TO_FREQ_ORDER = (
+    1, 1800, 1506, 255, 1431, 198, 9, 82, 6, 7310, 177, 202, 3615, 1256, 2808, 110,  # 2742
+    3735, 33, 3241, 261, 76, 44, 2113, 16, 2931, 2184, 1176, 659, 3868, 26, 3404, 2643,  # 2758
+    1198, 3869, 3313, 4060, 410, 2211, 302, 590, 361, 1963, 8, 204, 58, 4296, 7311, 1931,  # 2774
+    63, 7312, 7313, 317, 1614, 75, 222, 159, 4061, 2412, 1480, 7314, 3500, 3068, 224, 2809,  # 2790
+    3616, 3, 10, 3870, 1471, 29, 2774, 1135, 2852, 1939, 873, 130, 3242, 1123, 312, 7315,  # 2806
+    4297, 2051, 507, 252, 682, 7316, 142, 1914, 124, 206, 2932, 34, 3501, 3173, 64, 604,  # 2822
+    7317, 2494, 1976, 1977, 155, 1990, 645, 641, 1606, 7318, 3405, 337, 72, 406, 7319, 80,  # 2838
+    630, 238, 3174, 1509, 263, 939, 1092, 2644, 756, 1440, 1094, 3406, 449, 69, 2969, 591,  # 2854
+    179, 2095, 471, 115, 2034, 1843, 60, 50, 2970, 134, 806, 1868, 734, 2035, 3407, 180,  # 2870
+    995, 1607, 156, 537, 2893, 688, 7320, 319, 1305, 779, 2144, 514, 2374, 298, 4298, 359,  # 2886
+    2495, 90, 2707, 1338, 663, 11, 906, 1099, 2545, 20, 2436, 182, 532, 1716, 7321, 732,  # 2902
+    1376, 4062, 1311, 1420, 3175, 25, 2312, 1056, 113, 399, 382, 1949, 242, 3408, 2467, 529,  # 2918
+    3243, 475, 1447, 3617, 7322, 117, 21, 656, 810, 1297, 2295, 2329, 3502, 7323, 126, 4063,  # 2934
+    706, 456, 150, 613, 4299, 71, 1118, 2036, 4064, 145, 3069, 85, 835, 486, 2114, 1246,  # 2950
+    1426, 428, 727, 1285, 1015, 800, 106, 623, 303, 1281, 7324, 2127, 2354, 347, 3736, 221,  # 2966
+    3503, 3110, 7325, 1955, 1153, 4065, 83, 296, 1199, 3070, 192, 624, 93, 7326, 822, 1897,  # 2982
+    2810, 3111, 795, 2064, 991, 1554, 1542, 1592, 27, 43, 2853, 859, 139, 1456, 860, 4300,  # 2998
+    437, 712, 3871, 164, 2392, 3112, 695, 211, 3017, 2096, 195, 3872, 1608, 3504, 3505, 3618,  # 3014
+    3873, 234, 811, 2971, 2097, 3874, 2229, 1441, 3506, 1615, 2375, 668, 2076, 1638, 305, 228,  # 3030
+    1664, 4301, 467, 415, 7327, 262, 2098, 1593, 239, 108, 300, 200, 1033, 512, 1247, 2077,  # 3046
+    7328, 7329, 2173, 3176, 3619, 2673, 593, 845, 1062, 3244, 88, 1723, 2037, 3875, 1950, 212,  # 3062
+    266, 152, 149, 468, 1898, 4066, 4302, 77, 187, 7330, 3018, 37, 5, 2972, 7331, 3876,  # 3078
+    7332, 7333, 39, 2517, 4303, 2894, 3177, 2078, 55, 148, 74, 4304, 545, 483, 1474, 1029,  # 3094
+    1665, 217, 1869, 1531, 3113, 1104, 2645, 4067, 24, 172, 3507, 900, 3877, 3508, 3509, 4305,  # 3110
+    32, 1408, 2811, 1312, 329, 487, 2355, 2247, 2708, 784, 2674, 4, 3019, 3314, 1427, 1788,  # 3126
+    188, 109, 499, 7334, 3620, 1717, 1789, 888, 1217, 3020, 4306, 7335, 3510, 7336, 3315, 1520,  # 3142
+    3621, 3878, 196, 1034, 775, 7337, 7338, 929, 1815, 249, 439, 38, 7339, 1063, 7340, 794,  # 3158
+    3879, 1435, 2296, 46, 178, 3245, 2065, 7341, 2376, 7342, 214, 1709, 4307, 804, 35, 707,  # 3174
+    324, 3622, 1601, 2546, 140, 459, 4068, 7343, 7344, 1365, 839, 272, 978, 2257, 2572, 3409,  # 3190
+    2128, 1363, 3623, 1423, 697, 100, 3071, 48, 70, 1231, 495, 3114, 2193, 7345, 1294, 7346,  # 3206
+    2079, 462, 586, 1042, 3246, 853, 256, 988, 185, 2377, 3410, 1698, 434, 1084, 7347, 3411,  # 3222
+    314, 2615, 2775, 4308, 2330, 2331, 569, 2280, 637, 1816, 2518, 757, 1162, 1878, 1616, 3412,  # 3238
+    287, 1577, 2115, 768, 4309, 1671, 2854, 3511, 2519, 1321, 3737, 909, 2413, 7348, 4069, 933,  # 3254
+    3738, 7349, 2052, 2356, 1222, 4310, 765, 2414, 1322, 786, 4311, 7350, 1919, 1462, 1677, 2895,  # 3270
+    1699, 7351, 4312, 1424, 2437, 3115, 3624, 2590, 3316, 1774, 1940, 3413, 3880, 4070, 309, 1369,  # 3286
+    1130, 2812, 364, 2230, 1653, 1299, 3881, 3512, 3882, 3883, 2646, 525, 1085, 3021, 902, 2000,  # 3302
+    1475, 964, 4313, 421, 1844, 1415, 1057, 2281, 940, 1364, 3116, 376, 4314, 4315, 1381, 7,  # 3318
+    2520, 983, 2378, 336, 1710, 2675, 1845, 321, 3414, 559, 1131, 3022, 2742, 1808, 1132, 1313,  # 3334
+    265, 1481, 1857, 7352, 352, 1203, 2813, 3247, 167, 1089, 420, 2814, 776, 792, 1724, 3513,  # 3350
+    4071, 2438, 3248, 7353, 4072, 7354, 446, 229, 333, 2743, 901, 3739, 1200, 1557, 4316, 2647,  # 3366
+    1920, 395, 2744, 2676, 3740, 4073, 1835, 125, 916, 3178, 2616, 4317, 7355, 7356, 3741, 7357,  # 3382
+    7358, 7359, 4318, 3117, 3625, 1133, 2547, 1757, 3415, 1510, 2313, 1409, 3514, 7360, 2145, 438,  # 3398
+    2591, 2896, 2379, 3317, 1068, 958, 3023, 461, 311, 2855, 2677, 4074, 1915, 3179, 4075, 1978,  # 3414
+    383, 750, 2745, 2617, 4076, 274, 539, 385, 1278, 1442, 7361, 1154, 1964, 384, 561, 210,  # 3430
+    98, 1295, 2548, 3515, 7362, 1711, 2415, 1482, 3416, 3884, 2897, 1257, 129, 7363, 3742, 642,  # 3446
+    523, 2776, 2777, 2648, 7364, 141, 2231, 1333, 68, 176, 441, 876, 907, 4077, 603, 2592,  # 3462
+    710, 171, 3417, 404, 549, 18, 3118, 2393, 1410, 3626, 1666, 7365, 3516, 4319, 2898, 4320,  # 3478
+    7366, 2973, 368, 7367, 146, 366, 99, 871, 3627, 1543, 748, 807, 1586, 1185, 22, 2258,  # 3494
+    379, 3743, 3180, 7368, 3181, 505, 1941, 2618, 1991, 1382, 2314, 7369, 380, 2357, 218, 702,  # 3510
+    1817, 1248, 3418, 3024, 3517, 3318, 3249, 7370, 2974, 3628, 930, 3250, 3744, 7371, 59, 7372,  # 3526
+    585, 601, 4078, 497, 3419, 1112, 1314, 4321, 1801, 7373, 1223, 1472, 2174, 7374, 749, 1836,  # 3542
+    690, 1899, 3745, 1772, 3885, 1476, 429, 1043, 1790, 2232, 2116, 917, 4079, 447, 1086, 1629,  # 3558
+    7375, 556, 7376, 7377, 2020, 1654, 844, 1090, 105, 550, 966, 1758, 2815, 1008, 1782, 686,  # 3574
+    1095, 7378, 2282, 793, 1602, 7379, 3518, 2593, 4322, 4080, 2933, 2297, 4323, 3746, 980, 2496,  # 3590
+    544, 353, 527, 4324, 908, 2678, 2899, 7380, 381, 2619, 1942, 1348, 7381, 1341, 1252, 560,  # 3606
+    3072, 7382, 3420, 2856, 7383, 2053, 973, 886, 2080, 143, 4325, 7384, 7385, 157, 3886, 496,  # 3622
+    4081, 57, 840, 540, 2038, 4326, 4327, 3421, 2117, 1445, 970, 2259, 1748, 1965, 2081, 4082,  # 3638
+    3119, 1234, 1775, 3251, 2816, 3629, 773, 1206, 2129, 1066, 2039, 1326, 3887, 1738, 1725, 4083,  # 3654
+    279, 3120, 51, 1544, 2594, 423, 1578, 2130, 2066, 173, 4328, 1879, 7386, 7387, 1583, 264,  # 3670
+    610, 3630, 4329, 2439, 280, 154, 7388, 7389, 7390, 1739, 338, 1282, 3073, 693, 2857, 1411,  # 3686
+    1074, 3747, 2440, 7391, 4330, 7392, 7393, 1240, 952, 2394, 7394, 2900, 1538, 2679, 685, 1483,  # 3702
+    4084, 2468, 1436, 953, 4085, 2054, 4331, 671, 2395, 79, 4086, 2441, 3252, 608, 567, 2680,  # 3718
+    3422, 4087, 4088, 1691, 393, 1261, 1791, 2396, 7395, 4332, 7396, 7397, 7398, 7399, 1383, 1672,  # 3734
+    3748, 3182, 1464, 522, 1119, 661, 1150, 216, 675, 4333, 3888, 1432, 3519, 609, 4334, 2681,  # 3750
+    2397, 7400, 7401, 7402, 4089, 3025, 0, 7403, 2469, 315, 231, 2442, 301, 3319, 4335, 2380,  # 3766
+    7404, 233, 4090, 3631, 1818, 4336, 4337, 7405, 96, 1776, 1315, 2082, 7406, 257, 7407, 1809,  # 3782
+    3632, 2709, 1139, 1819, 4091, 2021, 1124, 2163, 2778, 1777, 2649, 7408, 3074, 363, 1655, 3183,  # 3798
+    7409, 2975, 7410, 7411, 7412, 3889, 1567, 3890, 718, 103, 3184, 849, 1443, 341, 3320, 2934,  # 3814
+    1484, 7413, 1712, 127, 67, 339, 4092, 2398, 679, 1412, 821, 7414, 7415, 834, 738, 351,  # 3830
+    2976, 2146, 846, 235, 1497, 1880, 418, 1992, 3749, 2710, 186, 1100, 2147, 2746, 3520, 1545,  # 3846
+    1355, 2935, 2858, 1377, 583, 3891, 4093, 2573, 2977, 7416, 1298, 3633, 1078, 2549, 3634, 2358,  # 3862
+    78, 3750, 3751, 267, 1289, 2099, 2001, 1594, 4094, 348, 369, 1274, 2194, 2175, 1837, 4338,  # 3878
+    1820, 2817, 3635, 2747, 2283, 2002, 4339, 2936, 2748, 144, 3321, 882, 4340, 3892, 2749, 3423,  # 3894
+    4341, 2901, 7417, 4095, 1726, 320, 7418, 3893, 3026, 788, 2978, 7419, 2818, 1773, 1327, 2859,  # 3910
+    3894, 2819, 7420, 1306, 4342, 2003, 1700, 3752, 3521, 2359, 2650, 787, 2022, 506, 824, 3636,  # 3926
+    534, 323, 4343, 1044, 3322, 2023, 1900, 946, 3424, 7421, 1778, 1500, 1678, 7422, 1881, 4344,  # 3942
+    165, 243, 4345, 3637, 2521, 123, 683, 4096, 764, 4346, 36, 3895, 1792, 589, 2902, 816,  # 3958
+    626, 1667, 3027, 2233, 1639, 1555, 1622, 3753, 3896, 7423, 3897, 2860, 1370, 1228, 1932, 891,  # 3974
+    2083, 2903, 304, 4097, 7424, 292, 2979, 2711, 3522, 691, 2100, 4098, 1115, 4347, 118, 662,  # 3990
+    7425, 611, 1156, 854, 2381, 1316, 2861, 2, 386, 515, 2904, 7426, 7427, 3253, 868, 2234,  # 4006
+    1486, 855, 2651, 785, 2212, 3028, 7428, 1040, 3185, 3523, 7429, 3121, 448, 7430, 1525, 7431,  # 4022
+    2164, 4348, 7432, 3754, 7433, 4099, 2820, 3524, 3122, 503, 818, 3898, 3123, 1568, 814, 676,  # 4038
+    1444, 306, 1749, 7434, 3755, 1416, 1030, 197, 1428, 805, 2821, 1501, 4349, 7435, 7436, 7437,  # 4054
+    1993, 7438, 4350, 7439, 7440, 2195, 13, 2779, 3638, 2980, 3124, 1229, 1916, 7441, 3756, 2131,  # 4070
+    7442, 4100, 4351, 2399, 3525, 7443, 2213, 1511, 1727, 1120, 7444, 7445, 646, 3757, 2443, 307,  # 4086
+    7446, 7447, 1595, 3186, 7448, 7449, 7450, 3639, 1113, 1356, 3899, 1465, 2522, 2523, 7451, 519,  # 4102
+    7452, 128, 2132, 92, 2284, 1979, 7453, 3900, 1512, 342, 3125, 2196, 7454, 2780, 2214, 1980,  # 4118
+    3323, 7455, 290, 1656, 1317, 789, 827, 2360, 7456, 3758, 4352, 562, 581, 3901, 7457, 401,  # 4134
+    4353, 2248, 94, 4354, 1399, 2781, 7458, 1463, 2024, 4355, 3187, 1943, 7459, 828, 1105, 4101,  # 4150
+    1262, 1394, 7460, 4102, 605, 4356, 7461, 1783, 2862, 7462, 2822, 819, 2101, 578, 2197, 2937,  # 4166
+    7463, 1502, 436, 3254, 4103, 3255, 2823, 3902, 2905, 3425, 3426, 7464, 2712, 2315, 7465, 7466,  # 4182
+    2332, 2067, 23, 4357, 193, 826, 3759, 2102, 699, 1630, 4104, 3075, 390, 1793, 1064, 3526,  # 4198
+    7467, 1579, 3076, 3077, 1400, 7468, 4105, 1838, 1640, 2863, 7469, 4358, 4359, 137, 4106, 598,  # 4214
+    3078, 1966, 780, 104, 974, 2938, 7470, 278, 899, 253, 402, 572, 504, 493, 1339, 7471,  # 4230
+    3903, 1275, 4360, 2574, 2550, 7472, 3640, 3029, 3079, 2249, 565, 1334, 2713, 863, 41, 7473,  # 4246
+    7474, 4361, 7475, 1657, 2333, 19, 463, 2750, 4107, 606, 7476, 2981, 3256, 1087, 2084, 1323,  # 4262
+    2652, 2982, 7477, 1631, 1623, 1750, 4108, 2682, 7478, 2864, 791, 2714, 2653, 2334, 232, 2416,  # 4278
+    7479, 2983, 1498, 7480, 2654, 2620, 755, 1366, 3641, 3257, 3126, 2025, 1609, 119, 1917, 3427,  # 4294
+    862, 1026, 4109, 7481, 3904, 3760, 4362, 3905, 4363, 2260, 1951, 2470, 7482, 1125, 817, 4110,  # 4310
+    4111, 3906, 1513, 1766, 2040, 1487, 4112, 3030, 3258, 2824, 3761, 3127, 7483, 7484, 1507, 7485,  # 4326
+    2683, 733, 40, 1632, 1106, 2865, 345, 4113, 841, 2524, 230, 4364, 2984, 1846, 3259, 3428,  # 4342
+    7486, 1263, 986, 3429, 7487, 735, 879, 254, 1137, 857, 622, 1300, 1180, 1388, 1562, 3907,  # 4358
+    3908, 2939, 967, 2751, 2655, 1349, 592, 2133, 1692, 3324, 2985, 1994, 4114, 1679, 3909, 1901,  # 4374
+    2185, 7488, 739, 3642, 2715, 1296, 1290, 7489, 4115, 2198, 2199, 1921, 1563, 2595, 2551, 1870,  # 4390
+    2752, 2986, 7490, 435, 7491, 343, 1108, 596, 17, 1751, 4365, 2235, 3430, 3643, 7492, 4366,  # 4406
+    294, 3527, 2940, 1693, 477, 979, 281, 2041, 3528, 643, 2042, 3644, 2621, 2782, 2261, 1031,  # 4422
+    2335, 2134, 2298, 3529, 4367, 367, 1249, 2552, 7493, 3530, 7494, 4368, 1283, 3325, 2004, 240,  # 4438
+    1762, 3326, 4369, 4370, 836, 1069, 3128, 474, 7495, 2148, 2525, 268, 3531, 7496, 3188, 1521,  # 4454
+    1284, 7497, 1658, 1546, 4116, 7498, 3532, 3533, 7499, 4117, 3327, 2684, 1685, 4118, 961, 1673,  # 4470
+    2622, 190, 2005, 2200, 3762, 4371, 4372, 7500, 570, 2497, 3645, 1490, 7501, 4373, 2623, 3260,  # 4486
+    1956, 4374, 584, 1514, 396, 1045, 1944, 7502, 4375, 1967, 2444, 7503, 7504, 4376, 3910, 619,  # 4502
+    7505, 3129, 3261, 215, 2006, 2783, 2553, 3189, 4377, 3190, 4378, 763, 4119, 3763, 4379, 7506,  # 4518
+    7507, 1957, 1767, 2941, 3328, 3646, 1174, 452, 1477, 4380, 3329, 3130, 7508, 2825, 1253, 2382,  # 4534
+    2186, 1091, 2285, 4120, 492, 7509, 638, 1169, 1824, 2135, 1752, 3911, 648, 926, 1021, 1324,  # 4550
+    4381, 520, 4382, 997, 847, 1007, 892, 4383, 3764, 2262, 1871, 3647, 7510, 2400, 1784, 4384,  # 4566
+    1952, 2942, 3080, 3191, 1728, 4121, 2043, 3648, 4385, 2007, 1701, 3131, 1551, 30, 2263, 4122,  # 4582
+    7511, 2026, 4386, 3534, 7512, 501, 7513, 4123, 594, 3431, 2165, 1821, 3535, 3432, 3536, 3192,  # 4598
+    829, 2826, 4124, 7514, 1680, 3132, 1225, 4125, 7515, 3262, 4387, 4126, 3133, 2336, 7516, 4388,  # 4614
+    4127, 7517, 3912, 3913, 7518, 1847, 2383, 2596, 3330, 7519, 4389, 374, 3914, 652, 4128, 4129,  # 4630
+    375, 1140, 798, 7520, 7521, 7522, 2361, 4390, 2264, 546, 1659, 138, 3031, 2445, 4391, 7523,  # 4646
+    2250, 612, 1848, 910, 796, 3765, 1740, 1371, 825, 3766, 3767, 7524, 2906, 2554, 7525, 692,  # 4662
+    444, 3032, 2624, 801, 4392, 4130, 7526, 1491, 244, 1053, 3033, 4131, 4132, 340, 7527, 3915,  # 4678
+    1041, 2987, 293, 1168, 87, 1357, 7528, 1539, 959, 7529, 2236, 721, 694, 4133, 3768, 219,  # 4694
+    1478, 644, 1417, 3331, 2656, 1413, 1401, 1335, 1389, 3916, 7530, 7531, 2988, 2362, 3134, 1825,  # 4710
+    730, 1515, 184, 2827, 66, 4393, 7532, 1660, 2943, 246, 3332, 378, 1457, 226, 3433, 975,  # 4726
+    3917, 2944, 1264, 3537, 674, 696, 7533, 163, 7534, 1141, 2417, 2166, 713, 3538, 3333, 4394,  # 4742
+    3918, 7535, 7536, 1186, 15, 7537, 1079, 1070, 7538, 1522, 3193, 3539, 276, 1050, 2716, 758,  # 4758
+    1126, 653, 2945, 3263, 7539, 2337, 889, 3540, 3919, 3081, 2989, 903, 1250, 4395, 3920, 3434,  # 4774
+    3541, 1342, 1681, 1718, 766, 3264, 286, 89, 2946, 3649, 7540, 1713, 7541, 2597, 3334, 2990,  # 4790
+    7542, 2947, 2215, 3194, 2866, 7543, 4396, 2498, 2526, 181, 387, 1075, 3921, 731, 2187, 3335,  # 4806
+    7544, 3265, 310, 313, 3435, 2299, 770, 4134, 54, 3034, 189, 4397, 3082, 3769, 3922, 7545,  # 4822
+    1230, 1617, 1849, 355, 3542, 4135, 4398, 3336, 111, 4136, 3650, 1350, 3135, 3436, 3035, 4137,  # 4838
+    2149, 3266, 3543, 7546, 2784, 3923, 3924, 2991, 722, 2008, 7547, 1071, 247, 1207, 2338, 2471,  # 4854
+    1378, 4399, 2009, 864, 1437, 1214, 4400, 373, 3770, 1142, 2216, 667, 4401, 442, 2753, 2555,  # 4870
+    3771, 3925, 1968, 4138, 3267, 1839, 837, 170, 1107, 934, 1336, 1882, 7548, 7549, 2118, 4139,  # 4886
+    2828, 743, 1569, 7550, 4402, 4140, 582, 2384, 1418, 3437, 7551, 1802, 7552, 357, 1395, 1729,  # 4902
+    3651, 3268, 2418, 1564, 2237, 7553, 3083, 3772, 1633, 4403, 1114, 2085, 4141, 1532, 7554, 482,  # 4918
+    2446, 4404, 7555, 7556, 1492, 833, 1466, 7557, 2717, 3544, 1641, 2829, 7558, 1526, 1272, 3652,  # 4934
+    4142, 1686, 1794, 416, 2556, 1902, 1953, 1803, 7559, 3773, 2785, 3774, 1159, 2316, 7560, 2867,  # 4950
+    4405, 1610, 1584, 3036, 2419, 2754, 443, 3269, 1163, 3136, 7561, 7562, 3926, 7563, 4143, 2499,  # 4966
+    3037, 4406, 3927, 3137, 2103, 1647, 3545, 2010, 1872, 4144, 7564, 4145, 431, 3438, 7565, 250,  # 4982
+    97, 81, 4146, 7566, 1648, 1850, 1558, 160, 848, 7567, 866, 740, 1694, 7568, 2201, 2830,  # 4998
+    3195, 4147, 4407, 3653, 1687, 950, 2472, 426, 469, 3196, 3654, 3655, 3928, 7569, 7570, 1188,  # 5014
+    424, 1995, 861, 3546, 4148, 3775, 2202, 2685, 168, 1235, 3547, 4149, 7571, 2086, 1674, 4408,  # 5030
+    3337, 3270, 220, 2557, 1009, 7572, 3776, 670, 2992, 332, 1208, 717, 7573, 7574, 3548, 2447,  # 5046
+    3929, 3338, 7575, 513, 7576, 1209, 2868, 3339, 3138, 4409, 1080, 7577, 7578, 7579, 7580, 2527,  # 5062
+    3656, 3549, 815, 1587, 3930, 3931, 7581, 3550, 3439, 3777, 1254, 4410, 1328, 3038, 1390, 3932,  # 5078
+    1741, 3933, 3778, 3934, 7582, 236, 3779, 2448, 3271, 7583, 7584, 3657, 3780, 1273, 3781, 4411,  # 5094
+    7585, 308, 7586, 4412, 245, 4413, 1851, 2473, 1307, 2575, 430, 715, 2136, 2449, 7587, 270,  # 5110
+    199, 2869, 3935, 7588, 3551, 2718, 1753, 761, 1754, 725, 1661, 1840, 4414, 3440, 3658, 7589,  # 5126
+    7590, 587, 14, 3272, 227, 2598, 326, 480, 2265, 943, 2755, 3552, 291, 650, 1883, 7591,  # 5142
+    1702, 1226, 102, 1547, 62, 3441, 904, 4415, 3442, 1164, 4150, 7592, 7593, 1224, 1548, 2756,  # 5158
+    391, 498, 1493, 7594, 1386, 1419, 7595, 2055, 1177, 4416, 813, 880, 1081, 2363, 566, 1145,  # 5174
+    4417, 2286, 1001, 1035, 2558, 2599, 2238, 394, 1286, 7596, 7597, 2068, 7598, 86, 1494, 1730,  # 5190
+    3936, 491, 1588, 745, 897, 2948, 843, 3340, 3937, 2757, 2870, 3273, 1768, 998, 2217, 2069,  # 5206
+    397, 1826, 1195, 1969, 3659, 2993, 3341, 284, 7599, 3782, 2500, 2137, 2119, 1903, 7600, 3938,  # 5222
+    2150, 3939, 4151, 1036, 3443, 1904, 114, 2559, 4152, 209, 1527, 7601, 7602, 2949, 2831, 2625,  # 5238
+    2385, 2719, 3139, 812, 2560, 7603, 3274, 7604, 1559, 737, 1884, 3660, 1210, 885, 28, 2686,  # 5254
+    3553, 3783, 7605, 4153, 1004, 1779, 4418, 7606, 346, 1981, 2218, 2687, 4419, 3784, 1742, 797,  # 5270
+    1642, 3940, 1933, 1072, 1384, 2151, 896, 3941, 3275, 3661, 3197, 2871, 3554, 7607, 2561, 1958,  # 5286
+    4420, 2450, 1785, 7608, 7609, 7610, 3942, 4154, 1005, 1308, 3662, 4155, 2720, 4421, 4422, 1528,  # 5302
+    2600, 161, 1178, 4156, 1982, 987, 4423, 1101, 4157, 631, 3943, 1157, 3198, 2420, 1343, 1241,  # 5318
+    1016, 2239, 2562, 372, 877, 2339, 2501, 1160, 555, 1934, 911, 3944, 7611, 466, 1170, 169,  # 5334
+    1051, 2907, 2688, 3663, 2474, 2994, 1182, 2011, 2563, 1251, 2626, 7612, 992, 2340, 3444, 1540,  # 5350
+    2721, 1201, 2070, 2401, 1996, 2475, 7613, 4424, 528, 1922, 2188, 1503, 1873, 1570, 2364, 3342,  # 5366
+    3276, 7614, 557, 1073, 7615, 1827, 3445, 2087, 2266, 3140, 3039, 3084, 767, 3085, 2786, 4425,  # 5382
+    1006, 4158, 4426, 2341, 1267, 2176, 3664, 3199, 778, 3945, 3200, 2722, 1597, 2657, 7616, 4427,  # 5398
+    7617, 3446, 7618, 7619, 7620, 3277, 2689, 1433, 3278, 131, 95, 1504, 3946, 723, 4159, 3141,  # 5414
+    1841, 3555, 2758, 2189, 3947, 2027, 2104, 3665, 7621, 2995, 3948, 1218, 7622, 3343, 3201, 3949,  # 5430
+    4160, 2576, 248, 1634, 3785, 912, 7623, 2832, 3666, 3040, 3786, 654, 53, 7624, 2996, 7625,  # 5446
+    1688, 4428, 777, 3447, 1032, 3950, 1425, 7626, 191, 820, 2120, 2833, 971, 4429, 931, 3202,  # 5462
+    135, 664, 783, 3787, 1997, 772, 2908, 1935, 3951, 3788, 4430, 2909, 3203, 282, 2723, 640,  # 5478
+    1372, 3448, 1127, 922, 325, 3344, 7627, 7628, 711, 2044, 7629, 7630, 3952, 2219, 2787, 1936,  # 5494
+    3953, 3345, 2220, 2251, 3789, 2300, 7631, 4431, 3790, 1258, 3279, 3954, 3204, 2138, 2950, 3955,  # 5510
+    3956, 7632, 2221, 258, 3205, 4432, 101, 1227, 7633, 3280, 1755, 7634, 1391, 3281, 7635, 2910,  # 5526
+    2056, 893, 7636, 7637, 7638, 1402, 4161, 2342, 7639, 7640, 3206, 3556, 7641, 7642, 878, 1325,  # 5542
+    1780, 2788, 4433, 259, 1385, 2577, 744, 1183, 2267, 4434, 7643, 3957, 2502, 7644, 684, 1024,  # 5558
+    4162, 7645, 472, 3557, 3449, 1165, 3282, 3958, 3959, 322, 2152, 881, 455, 1695, 1152, 1340,  # 5574
+    660, 554, 2153, 4435, 1058, 4436, 4163, 830, 1065, 3346, 3960, 4437, 1923, 7646, 1703, 1918,  # 5590
+    7647, 932, 2268, 122, 7648, 4438, 947, 677, 7649, 3791, 2627, 297, 1905, 1924, 2269, 4439,  # 5606
+    2317, 3283, 7650, 7651, 4164, 7652, 4165, 84, 4166, 112, 989, 7653, 547, 1059, 3961, 701,  # 5622
+    3558, 1019, 7654, 4167, 7655, 3450, 942, 639, 457, 2301, 2451, 993, 2951, 407, 851, 494,  # 5638
+    4440, 3347, 927, 7656, 1237, 7657, 2421, 3348, 573, 4168, 680, 921, 2911, 1279, 1874, 285,  # 5654
+    790, 1448, 1983, 719, 2167, 7658, 7659, 4441, 3962, 3963, 1649, 7660, 1541, 563, 7661, 1077,  # 5670
+    7662, 3349, 3041, 3451, 511, 2997, 3964, 3965, 3667, 3966, 1268, 2564, 3350, 3207, 4442, 4443,  # 5686
+    7663, 535, 1048, 1276, 1189, 2912, 2028, 3142, 1438, 1373, 2834, 2952, 1134, 2012, 7664, 4169,  # 5702
+    1238, 2578, 3086, 1259, 7665, 700, 7666, 2953, 3143, 3668, 4170, 7667, 4171, 1146, 1875, 1906,  # 5718
+    4444, 2601, 3967, 781, 2422, 132, 1589, 203, 147, 273, 2789, 2402, 898, 1786, 2154, 3968,  # 5734
+    3969, 7668, 3792, 2790, 7669, 7670, 4445, 4446, 7671, 3208, 7672, 1635, 3793, 965, 7673, 1804,  # 5750
+    2690, 1516, 3559, 1121, 1082, 1329, 3284, 3970, 1449, 3794, 65, 1128, 2835, 2913, 2759, 1590,  # 5766
+    3795, 7674, 7675, 12, 2658, 45, 976, 2579, 3144, 4447, 517, 2528, 1013, 1037, 3209, 7676,  # 5782
+    3796, 2836, 7677, 3797, 7678, 3452, 7679, 2602, 614, 1998, 2318, 3798, 3087, 2724, 2628, 7680,  # 5798
+    2580, 4172, 599, 1269, 7681, 1810, 3669, 7682, 2691, 3088, 759, 1060, 489, 1805, 3351, 3285,  # 5814
+    1358, 7683, 7684, 2386, 1387, 1215, 2629, 2252, 490, 7685, 7686, 4173, 1759, 2387, 2343, 7687,  # 5830
+    4448, 3799, 1907, 3971, 2630, 1806, 3210, 4449, 3453, 3286, 2760, 2344, 874, 7688, 7689, 3454,  # 5846
+    3670, 1858, 91, 2914, 3671, 3042, 3800, 4450, 7690, 3145, 3972, 2659, 7691, 3455, 1202, 1403,  # 5862
+    3801, 2954, 2529, 1517, 2503, 4451, 3456, 2504, 7692, 4452, 7693, 2692, 1885, 1495, 1731, 3973,  # 5878
+    2365, 4453, 7694, 2029, 7695, 7696, 3974, 2693, 1216, 237, 2581, 4174, 2319, 3975, 3802, 4454,  # 5894
+    4455, 2694, 3560, 3457, 445, 4456, 7697, 7698, 7699, 7700, 2761, 61, 3976, 3672, 1822, 3977,  # 5910
+    7701, 687, 2045, 935, 925, 405, 2660, 703, 1096, 1859, 2725, 4457, 3978, 1876, 1367, 2695,  # 5926
+    3352, 918, 2105, 1781, 2476, 334, 3287, 1611, 1093, 4458, 564, 3146, 3458, 3673, 3353, 945,  # 5942
+    2631, 2057, 4459, 7702, 1925, 872, 4175, 7703, 3459, 2696, 3089, 349, 4176, 3674, 3979, 4460,  # 5958
+    3803, 4177, 3675, 2155, 3980, 4461, 4462, 4178, 4463, 2403, 2046, 782, 3981, 400, 251, 4179,  # 5974
+    1624, 7704, 7705, 277, 3676, 299, 1265, 476, 1191, 3804, 2121, 4180, 4181, 1109, 205, 7706,  # 5990
+    2582, 1000, 2156, 3561, 1860, 7707, 7708, 7709, 4464, 7710, 4465, 2565, 107, 2477, 2157, 3982,  # 6006
+    3460, 3147, 7711, 1533, 541, 1301, 158, 753, 4182, 2872, 3562, 7712, 1696, 370, 1088, 4183,  # 6022
+    4466, 3563, 579, 327, 440, 162, 2240, 269, 1937, 1374, 3461, 968, 3043, 56, 1396, 3090,  # 6038
+    2106, 3288, 3354, 7713, 1926, 2158, 4467, 2998, 7714, 3564, 7715, 7716, 3677, 4468, 2478, 7717,  # 6054
+    2791, 7718, 1650, 4469, 7719, 2603, 7720, 7721, 3983, 2661, 3355, 1149, 3356, 3984, 3805, 3985,  # 6070
+    7722, 1076, 49, 7723, 951, 3211, 3289, 3290, 450, 2837, 920, 7724, 1811, 2792, 2366, 4184,  # 6086
+    1908, 1138, 2367, 3806, 3462, 7725, 3212, 4470, 1909, 1147, 1518, 2423, 4471, 3807, 7726, 4472,  # 6102
+    2388, 2604, 260, 1795, 3213, 7727, 7728, 3808, 3291, 708, 7729, 3565, 1704, 7730, 3566, 1351,  # 6118
+    1618, 3357, 2999, 1886, 944, 4185, 3358, 4186, 3044, 3359, 4187, 7731, 3678, 422, 413, 1714,  # 6134
+    3292, 500, 2058, 2345, 4188, 2479, 7732, 1344, 1910, 954, 7733, 1668, 7734, 7735, 3986, 2404,  # 6150
+    4189, 3567, 3809, 4190, 7736, 2302, 1318, 2505, 3091, 133, 3092, 2873, 4473, 629, 31, 2838,  # 6166
+    2697, 3810, 4474, 850, 949, 4475, 3987, 2955, 1732, 2088, 4191, 1496, 1852, 7737, 3988, 620,  # 6182
+    3214, 981, 1242, 3679, 3360, 1619, 3680, 1643, 3293, 2139, 2452, 1970, 1719, 3463, 2168, 7738,  # 6198
+    3215, 7739, 7740, 3361, 1828, 7741, 1277, 4476, 1565, 2047, 7742, 1636, 3568, 3093, 7743, 869,  # 6214
+    2839, 655, 3811, 3812, 3094, 3989, 3000, 3813, 1310, 3569, 4477, 7744, 7745, 7746, 1733, 558,  # 6230
+    4478, 3681, 335, 1549, 3045, 1756, 4192, 3682, 1945, 3464, 1829, 1291, 1192, 470, 2726, 2107,  # 6246
+    2793, 913, 1054, 3990, 7747, 1027, 7748, 3046, 3991, 4479, 982, 2662, 3362, 3148, 3465, 3216,  # 6262
+    3217, 1946, 2794, 7749, 571, 4480, 7750, 1830, 7751, 3570, 2583, 1523, 2424, 7752, 2089, 984,  # 6278
+    4481, 3683, 1959, 7753, 3684, 852, 923, 2795, 3466, 3685, 969, 1519, 999, 2048, 2320, 1705,  # 6294
+    7754, 3095, 615, 1662, 151, 597, 3992, 2405, 2321, 1049, 275, 4482, 3686, 4193, 568, 3687,  # 6310
+    3571, 2480, 4194, 3688, 7755, 2425, 2270, 409, 3218, 7756, 1566, 2874, 3467, 1002, 769, 2840,  # 6326
+    194, 2090, 3149, 3689, 2222, 3294, 4195, 628, 1505, 7757, 7758, 1763, 2177, 3001, 3993, 521,  # 6342
+    1161, 2584, 1787, 2203, 2406, 4483, 3994, 1625, 4196, 4197, 412, 42, 3096, 464, 7759, 2632,  # 6358
+    4484, 3363, 1760, 1571, 2875, 3468, 2530, 1219, 2204, 3814, 2633, 2140, 2368, 4485, 4486, 3295,  # 6374
+    1651, 3364, 3572, 7760, 7761, 3573, 2481, 3469, 7762, 3690, 7763, 7764, 2271, 2091, 460, 7765,  # 6390
+    4487, 7766, 3002, 962, 588, 3574, 289, 3219, 2634, 1116, 52, 7767, 3047, 1796, 7768, 7769,  # 6406
+    7770, 1467, 7771, 1598, 1143, 3691, 4198, 1984, 1734, 1067, 4488, 1280, 3365, 465, 4489, 1572,  # 6422
+    510, 7772, 1927, 2241, 1812, 1644, 3575, 7773, 4490, 3692, 7774, 7775, 2663, 1573, 1534, 7776,  # 6438
+    7777, 4199, 536, 1807, 1761, 3470, 3815, 3150, 2635, 7778, 7779, 7780, 4491, 3471, 2915, 1911,  # 6454
+    2796, 7781, 3296, 1122, 377, 3220, 7782, 360, 7783, 7784, 4200, 1529, 551, 7785, 2059, 3693,  # 6470
+    1769, 2426, 7786, 2916, 4201, 3297, 3097, 2322, 2108, 2030, 4492, 1404, 136, 1468, 1479, 672,  # 6486
+    1171, 3221, 2303, 271, 3151, 7787, 2762, 7788, 2049, 678, 2727, 865, 1947, 4493, 7789, 2013,  # 6502
+    3995, 2956, 7790, 2728, 2223, 1397, 3048, 3694, 4494, 4495, 1735, 2917, 3366, 3576, 7791, 3816,  # 6518
+    509, 2841, 2453, 2876, 3817, 7792, 7793, 3152, 3153, 4496, 4202, 2531, 4497, 2304, 1166, 1010,  # 6534
+    552, 681, 1887, 7794, 7795, 2957, 2958, 3996, 1287, 1596, 1861, 3154, 358, 453, 736, 175,  # 6550
+    478, 1117, 905, 1167, 1097, 7796, 1853, 1530, 7797, 1706, 7798, 2178, 3472, 2287, 3695, 3473,  # 6566
+    3577, 4203, 2092, 4204, 7799, 3367, 1193, 2482, 4205, 1458, 2190, 2205, 1862, 1888, 1421, 3298,  # 6582
+    2918, 3049, 2179, 3474, 595, 2122, 7800, 3997, 7801, 7802, 4206, 1707, 2636, 223, 3696, 1359,  # 6598
+    751, 3098, 183, 3475, 7803, 2797, 3003, 419, 2369, 633, 704, 3818, 2389, 241, 7804, 7805,  # 6614
+    7806, 838, 3004, 3697, 2272, 2763, 2454, 3819, 1938, 2050, 3998, 1309, 3099, 2242, 1181, 7807,  # 6630
+    1136, 2206, 3820, 2370, 1446, 4207, 2305, 4498, 7808, 7809, 4208, 1055, 2605, 484, 3698, 7810,  # 6646
+    3999, 625, 4209, 2273, 3368, 1499, 4210, 4000, 7811, 4001, 4211, 3222, 2274, 2275, 3476, 7812,  # 6662
+    7813, 2764, 808, 2606, 3699, 3369, 4002, 4212, 3100, 2532, 526, 3370, 3821, 4213, 955, 7814,  # 6678
+    1620, 4214, 2637, 2427, 7815, 1429, 3700, 1669, 1831, 994, 928, 7816, 3578, 1260, 7817, 7818,  # 6694
+    7819, 1948, 2288, 741, 2919, 1626, 4215, 2729, 2455, 867, 1184, 362, 3371, 1392, 7820, 7821,  # 6710
+    4003, 4216, 1770, 1736, 3223, 2920, 4499, 4500, 1928, 2698, 1459, 1158, 7822, 3050, 3372, 2877,  # 6726
+    1292, 1929, 2506, 2842, 3701, 1985, 1187, 2071, 2014, 2607, 4217, 7823, 2566, 2507, 2169, 3702,  # 6742
+    2483, 3299, 7824, 3703, 4501, 7825, 7826, 666, 1003, 3005, 1022, 3579, 4218, 7827, 4502, 1813,  # 6758
+    2253, 574, 3822, 1603, 295, 1535, 705, 3823, 4219, 283, 858, 417, 7828, 7829, 3224, 4503,  # 6774
+    4504, 3051, 1220, 1889, 1046, 2276, 2456, 4004, 1393, 1599, 689, 2567, 388, 4220, 7830, 2484,  # 6790
+    802, 7831, 2798, 3824, 2060, 1405, 2254, 7832, 4505, 3825, 2109, 1052, 1345, 3225, 1585, 7833,  # 6806
+    809, 7834, 7835, 7836, 575, 2730, 3477, 956, 1552, 1469, 1144, 2323, 7837, 2324, 1560, 2457,  # 6822
+    3580, 3226, 4005, 616, 2207, 3155, 2180, 2289, 7838, 1832, 7839, 3478, 4506, 7840, 1319, 3704,  # 6838
+    3705, 1211, 3581, 1023, 3227, 1293, 2799, 7841, 7842, 7843, 3826, 607, 2306, 3827, 762, 2878,  # 6854
+    1439, 4221, 1360, 7844, 1485, 3052, 7845, 4507, 1038, 4222, 1450, 2061, 2638, 4223, 1379, 4508,  # 6870
+    2585, 7846, 7847, 4224, 1352, 1414, 2325, 2921, 1172, 7848, 7849, 3828, 3829, 7850, 1797, 1451,  # 6886
+    7851, 7852, 7853, 7854, 2922, 4006, 4007, 2485, 2346, 411, 4008, 4009, 3582, 3300, 3101, 4509,  # 6902
+    1561, 2664, 1452, 4010, 1375, 7855, 7856, 47, 2959, 316, 7857, 1406, 1591, 2923, 3156, 7858,  # 6918
+    1025, 2141, 3102, 3157, 354, 2731, 884, 2224, 4225, 2407, 508, 3706, 726, 3583, 996, 2428,  # 6934
+    3584, 729, 7859, 392, 2191, 1453, 4011, 4510, 3707, 7860, 7861, 2458, 3585, 2608, 1675, 2800,  # 6950
+    919, 2347, 2960, 2348, 1270, 4511, 4012, 73, 7862, 7863, 647, 7864, 3228, 2843, 2255, 1550,  # 6966
+    1346, 3006, 7865, 1332, 883, 3479, 7866, 7867, 7868, 7869, 3301, 2765, 7870, 1212, 831, 1347,  # 6982
+    4226, 4512, 2326, 3830, 1863, 3053, 720, 3831, 4513, 4514, 3832, 7871, 4227, 7872, 7873, 4515,  # 6998
+    7874, 7875, 1798, 4516, 3708, 2609, 4517, 3586, 1645, 2371, 7876, 7877, 2924, 669, 2208, 2665,  # 7014
+    2429, 7878, 2879, 7879, 7880, 1028, 3229, 7881, 4228, 2408, 7882, 2256, 1353, 7883, 7884, 4518,  # 7030
+    3158, 518, 7885, 4013, 7886, 4229, 1960, 7887, 2142, 4230, 7888, 7889, 3007, 2349, 2350, 3833,  # 7046
+    516, 1833, 1454, 4014, 2699, 4231, 4519, 2225, 2610, 1971, 1129, 3587, 7890, 2766, 7891, 2961,  # 7062
+    1422, 577, 1470, 3008, 1524, 3373, 7892, 7893, 432, 4232, 3054, 3480, 7894, 2586, 1455, 2508,  # 7078
+    2226, 1972, 1175, 7895, 1020, 2732, 4015, 3481, 4520, 7896, 2733, 7897, 1743, 1361, 3055, 3482,  # 7094
+    2639, 4016, 4233, 4521, 2290, 895, 924, 4234, 2170, 331, 2243, 3056, 166, 1627, 3057, 1098,  # 7110
+    7898, 1232, 2880, 2227, 3374, 4522, 657, 403, 1196, 2372, 542, 3709, 3375, 1600, 4235, 3483,  # 7126
+    7899, 4523, 2767, 3230, 576, 530, 1362, 7900, 4524, 2533, 2666, 3710, 4017, 7901, 842, 3834,  # 7142
+    7902, 2801, 2031, 1014, 4018, 213, 2700, 3376, 665, 621, 4236, 7903, 3711, 2925, 2430, 7904,  # 7158
+    2431, 3302, 3588, 3377, 7905, 4237, 2534, 4238, 4525, 3589, 1682, 4239, 3484, 1380, 7906, 724,  # 7174
+    2277, 600, 1670, 7907, 1337, 1233, 4526, 3103, 2244, 7908, 1621, 4527, 7909, 651, 4240, 7910,  # 7190
+    1612, 4241, 2611, 7911, 2844, 7912, 2734, 2307, 3058, 7913, 716, 2459, 3059, 174, 1255, 2701,  # 7206
+    4019, 3590, 548, 1320, 1398, 728, 4020, 1574, 7914, 1890, 1197, 3060, 4021, 7915, 3061, 3062,  # 7222
+    3712, 3591, 3713, 747, 7916, 635, 4242, 4528, 7917, 7918, 7919, 4243, 7920, 7921, 4529, 7922,  # 7238
+    3378, 4530, 2432, 451, 7923, 3714, 2535, 2072, 4244, 2735, 4245, 4022, 7924, 1764, 4531, 7925,  # 7254
+    4246, 350, 7926, 2278, 2390, 2486, 7927, 4247, 4023, 2245, 1434, 4024, 488, 4532, 458, 4248,  # 7270
+    4025, 3715, 771, 1330, 2391, 3835, 2568, 3159, 2159, 2409, 1553, 2667, 3160, 4249, 7928, 2487,  # 7286
+    2881, 2612, 1720, 2702, 4250, 3379, 4533, 7929, 2536, 4251, 7930, 3231, 4252, 2768, 7931, 2015,  # 7302
+    2736, 7932, 1155, 1017, 3716, 3836, 7933, 3303, 2308, 201, 1864, 4253, 1430, 7934, 4026, 7935,  # 7318
+    7936, 7937, 7938, 7939, 4254, 1604, 7940, 414, 1865, 371, 2587, 4534, 4535, 3485, 2016, 3104,  # 7334
+    4536, 1708, 960, 4255, 887, 389, 2171, 1536, 1663, 1721, 7941, 2228, 4027, 2351, 2926, 1580,  # 7350
+    7942, 7943, 7944, 1744, 7945, 2537, 4537, 4538, 7946, 4539, 7947, 2073, 7948, 7949, 3592, 3380,  # 7366
+    2882, 4256, 7950, 4257, 2640, 3381, 2802, 673, 2703, 2460, 709, 3486, 4028, 3593, 4258, 7951,  # 7382
+    1148, 502, 634, 7952, 7953, 1204, 4540, 3594, 1575, 4541, 2613, 3717, 7954, 3718, 3105, 948,  # 7398
+    3232, 121, 1745, 3837, 1110, 7955, 4259, 3063, 2509, 3009, 4029, 3719, 1151, 1771, 3838, 1488,  # 7414
+    4030, 1986, 7956, 2433, 3487, 7957, 7958, 2093, 7959, 4260, 3839, 1213, 1407, 2803, 531, 2737,  # 7430
+    2538, 3233, 1011, 1537, 7960, 2769, 4261, 3106, 1061, 7961, 3720, 3721, 1866, 2883, 7962, 2017,  # 7446
+    120, 4262, 4263, 2062, 3595, 3234, 2309, 3840, 2668, 3382, 1954, 4542, 7963, 7964, 3488, 1047,  # 7462
+    2704, 1266, 7965, 1368, 4543, 2845, 649, 3383, 3841, 2539, 2738, 1102, 2846, 2669, 7966, 7967,  # 7478
+    1999, 7968, 1111, 3596, 2962, 7969, 2488, 3842, 3597, 2804, 1854, 3384, 3722, 7970, 7971, 3385,  # 7494
+    2410, 2884, 3304, 3235, 3598, 7972, 2569, 7973, 3599, 2805, 4031, 1460, 856, 7974, 3600, 7975,  # 7510
+    2885, 2963, 7976, 2886, 3843, 7977, 4264, 632, 2510, 875, 3844, 1697, 3845, 2291, 7978, 7979,  # 7526
+    4544, 3010, 1239, 580, 4545, 4265, 7980, 914, 936, 2074, 1190, 4032, 1039, 2123, 7981, 7982,  # 7542
+    7983, 3386, 1473, 7984, 1354, 4266, 3846, 7985, 2172, 3064, 4033, 915, 3305, 4267, 4268, 3306,  # 7558
+    1605, 1834, 7986, 2739, 398, 3601, 4269, 3847, 4034, 328, 1912, 2847, 4035, 3848, 1331, 4270,  # 7574
+    3011, 937, 4271, 7987, 3602, 4036, 4037, 3387, 2160, 4546, 3388, 524, 742, 538, 3065, 1012,  # 7590
+    7988, 7989, 3849, 2461, 7990, 658, 1103, 225, 3850, 7991, 7992, 4547, 7993, 4548, 7994, 3236,  # 7606
+    1243, 7995, 4038, 963, 2246, 4549, 7996, 2705, 3603, 3161, 7997, 7998, 2588, 2327, 7999, 4550,  # 7622
+    8000, 8001, 8002, 3489, 3307, 957, 3389, 2540, 2032, 1930, 2927, 2462, 870, 2018, 3604, 1746,  # 7638
+    2770, 2771, 2434, 2463, 8003, 3851, 8004, 3723, 3107, 3724, 3490, 3390, 3725, 8005, 1179, 3066,  # 7654
+    8006, 3162, 2373, 4272, 3726, 2541, 3163, 3108, 2740, 4039, 8007, 3391, 1556, 2542, 2292, 977,  # 7670
+    2887, 2033, 4040, 1205, 3392, 8008, 1765, 3393, 3164, 2124, 1271, 1689, 714, 4551, 3491, 8009,  # 7686
+    2328, 3852, 533, 4273, 3605, 2181, 617, 8010, 2464, 3308, 3492, 2310, 8011, 8012, 3165, 8013,  # 7702
+    8014, 3853, 1987, 618, 427, 2641, 3493, 3394, 8015, 8016, 1244, 1690, 8017, 2806, 4274, 4552,  # 7718
+    8018, 3494, 8019, 8020, 2279, 1576, 473, 3606, 4275, 3395, 972, 8021, 3607, 8022, 3067, 8023,  # 7734
+    8024, 4553, 4554, 8025, 3727, 4041, 4042, 8026, 153, 4555, 356, 8027, 1891, 2888, 4276, 2143,  # 7750
+    408, 803, 2352, 8028, 3854, 8029, 4277, 1646, 2570, 2511, 4556, 4557, 3855, 8030, 3856, 4278,  # 7766
+    8031, 2411, 3396, 752, 8032, 8033, 1961, 2964, 8034, 746, 3012, 2465, 8035, 4279, 3728, 698,  # 7782
+    4558, 1892, 4280, 3608, 2543, 4559, 3609, 3857, 8036, 3166, 3397, 8037, 1823, 1302, 4043, 2706,  # 7798
+    3858, 1973, 4281, 8038, 4282, 3167, 823, 1303, 1288, 1236, 2848, 3495, 4044, 3398, 774, 3859,  # 7814
+    8039, 1581, 4560, 1304, 2849, 3860, 4561, 8040, 2435, 2161, 1083, 3237, 4283, 4045, 4284, 344,  # 7830
+    1173, 288, 2311, 454, 1683, 8041, 8042, 1461, 4562, 4046, 2589, 8043, 8044, 4563, 985, 894,  # 7846
+    8045, 3399, 3168, 8046, 1913, 2928, 3729, 1988, 8047, 2110, 1974, 8048, 4047, 8049, 2571, 1194,  # 7862
+    425, 8050, 4564, 3169, 1245, 3730, 4285, 8051, 8052, 2850, 8053, 636, 4565, 1855, 3861, 760,  # 7878
+    1799, 8054, 4286, 2209, 1508, 4566, 4048, 1893, 1684, 2293, 8055, 8056, 8057, 4287, 4288, 2210,  # 7894
+    479, 8058, 8059, 832, 8060, 4049, 2489, 8061, 2965, 2490, 3731, 990, 3109, 627, 1814, 2642,  # 7910
+    4289, 1582, 4290, 2125, 2111, 3496, 4567, 8062, 799, 4291, 3170, 8063, 4568, 2112, 1737, 3013,  # 7926
+    1018, 543, 754, 4292, 3309, 1676, 4569, 4570, 4050, 8064, 1489, 8065, 3497, 8066, 2614, 2889,  # 7942
+    4051, 8067, 8068, 2966, 8069, 8070, 8071, 8072, 3171, 4571, 4572, 2182, 1722, 8073, 3238, 3239,  # 7958
+    1842, 3610, 1715, 481, 365, 1975, 1856, 8074, 8075, 1962, 2491, 4573, 8076, 2126, 3611, 3240,  # 7974
+    433, 1894, 2063, 2075, 8077, 602, 2741, 8078, 8079, 8080, 8081, 8082, 3014, 1628, 3400, 8083,  # 7990
+    3172, 4574, 4052, 2890, 4575, 2512, 8084, 2544, 2772, 8085, 8086, 8087, 3310, 4576, 2891, 8088,  # 8006
+    4577, 8089, 2851, 4578, 4579, 1221, 2967, 4053, 2513, 8090, 8091, 8092, 1867, 1989, 8093, 8094,  # 8022
+    8095, 1895, 8096, 8097, 4580, 1896, 4054, 318, 8098, 2094, 4055, 4293, 8099, 8100, 485, 8101,  # 8038
+    938, 3862, 553, 2670, 116, 8102, 3863, 3612, 8103, 3498, 2671, 2773, 3401, 3311, 2807, 8104,  # 8054
+    3613, 2929, 4056, 1747, 2930, 2968, 8105, 8106, 207, 8107, 8108, 2672, 4581, 2514, 8109, 3015,  # 8070
+    890, 3614, 3864, 8110, 1877, 3732, 3402, 8111, 2183, 2353, 3403, 1652, 8112, 8113, 8114, 941,  # 8086
+    2294, 208, 3499, 4057, 2019, 330, 4294, 3865, 2892, 2492, 3733, 4295, 8115, 8116, 8117, 8118,  # 8102
+)
+# fmt: on
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/euctwprober.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/euctwprober.py
new file mode 100644
index 0000000..a37ab18
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/euctwprober.py
@@ -0,0 +1,47 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from .chardistribution import EUCTWDistributionAnalysis
+from .codingstatemachine import CodingStateMachine
+from .mbcharsetprober import MultiByteCharSetProber
+from .mbcssm import EUCTW_SM_MODEL
+
+
+class EUCTWProber(MultiByteCharSetProber):
+    def __init__(self) -> None:
+        super().__init__()
+        self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL)
+        self.distribution_analyzer = EUCTWDistributionAnalysis()
+        self.reset()
+
+    @property
+    def charset_name(self) -> str:
+        return "EUC-TW"
+
+    @property
+    def language(self) -> str:
+        return "Taiwan"
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/gb2312freq.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/gb2312freq.py
new file mode 100644
index 0000000..b32bfc7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/gb2312freq.py
@@ -0,0 +1,284 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+# GB2312 most frequently used character table
+#
+# Char to FreqOrder table , from hz6763
+
+# 512  --> 0.79  -- 0.79
+# 1024 --> 0.92  -- 0.13
+# 2048 --> 0.98  -- 0.06
+# 6768 --> 1.00  -- 0.02
+#
+# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79
+# Random Distribution Ration = 512 / (3755 - 512) = 0.157
+#
+# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR
+
+GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9
+
+GB2312_TABLE_SIZE = 3760
+
+# fmt: off
+GB2312_CHAR_TO_FREQ_ORDER = (
+1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205,
+2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842,
+2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409,
+ 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670,
+1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820,
+1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585,
+ 152,1687,1539, 738,1559,  59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566,
+1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850,  70,3285,2729,3534,3575,
+2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853,
+3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061,
+ 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155,
+1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406,
+ 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816,
+2534,1546,2393,2760, 737,2494,  13, 447, 245,2747,  38,2765,2129,2589,1079, 606,
+ 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023,
+2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414,
+1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513,
+3195,4115,5627,2489,2991,  24,2065,2697,1087,2719,  48,1634, 315,  68, 985,2052,
+ 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570,
+1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575,
+ 253,3099,  32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250,
+2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506,
+1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563,  26,
+3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835,
+1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686,
+2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054,
+1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894,
+ 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105,
+3777,3657, 643,2298,1148,1779, 190, 989,3544, 414,  11,2135,2063,2979,1471, 403,
+3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694,
+ 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873,
+3651, 210,  33,1608,2516, 200,1520, 415, 102,   0,3389,1287, 817,  91,3299,2940,
+ 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687,  20,1819, 121,
+1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648,
+3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992,
+2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680,  72, 842,1990, 212,1233,
+1154,1586,  75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157,
+ 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807,
+1910, 534, 529,3309,1721,1660, 274,  39,2827, 661,2670,1578, 925,3248,3815,1094,
+4278,4901,4252,  41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258,
+ 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478,
+3568, 194,5062,  15, 961,3870,1241,1192,2664,  66,5215,3260,2111,1295,1127,2152,
+3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426,  53,2909,
+ 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272,
+1272,2363, 284,1753,3679,4064,1695,  81, 815,2677,2757,2731,1386, 859, 500,4221,
+2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252,
+1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301,
+1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254,
+ 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070,
+3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461,
+3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640,  67,2360,
+4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124,
+ 296,3979,1739,1611,3684,  23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535,
+3116,  17,1074, 467,2692,2201, 387,2922,  45,1326,3055,1645,3659,2817, 958, 243,
+1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713,
+1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071,
+4046,3572,2399,1571,3281,  79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442,
+ 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946,
+ 814,4968,3487,1548,2644,1567,1285,   2, 295,2636,  97, 946,3576, 832, 141,4257,
+3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180,
+1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427,
+ 602,1525,2608,1605,1639,3175, 694,3064,  10, 465,  76,2000,4846,4208, 444,3781,
+1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724,
+2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844,  89, 937,
+ 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943,
+ 432, 445,2811, 206,4136,1472, 730, 349,  73, 397,2802,2547, 998,1637,1167, 789,
+ 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552,
+3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246,
+4996, 371,1575,2436,1621,2210, 984,4033,1734,2638,  16,4529, 663,2755,3255,1451,
+3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310,
+ 750,2058, 165,  80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860,
+2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297,
+2357, 395,3740, 137,2075, 944,4089,2584,1267,3802,  62,1533,2285, 178, 176, 780,
+2440, 201,3707, 590, 478,1560,4354,2117,1075,  30,  74,4643,4004,1635,1441,2745,
+ 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936,
+2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032,
+ 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669,  43,2523,1657,
+ 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414,
+ 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976,
+3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436,
+2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254,
+2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024,  40,3240,1536,
+1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238,
+  18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059,
+2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741,
+  90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447,
+ 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601,
+1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269,
+1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076,  46,4253,2873,1889,1894,
+ 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173,
+ 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994,
+1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956,
+2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437,
+3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154,
+2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240,
+2269,2246,1446,  36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143,
+2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634,
+3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472,
+1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906,  51, 369, 170,3541,
+1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143,
+2101,2730,2490,  82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312,
+1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414,
+3750,2289,2795, 813,3123,2610,1136,4368,   5,3391,4541,2174, 420, 429,1728, 754,
+1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424,
+1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302,
+3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739,
+ 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004,
+2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484,
+1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739,
+4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535,
+1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641,
+1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307,
+3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573,
+1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533,
+  47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965,
+ 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096,  99,
+1397,1769,2300,4428,1643,3455,1978,1757,3718,1440,  35,4879,3742,1296,4228,2280,
+ 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505,
+1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012,
+1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039,
+ 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982,
+3708, 135,2131,  87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530,
+4314,   9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392,
+3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656,
+2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220,
+2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766,
+1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535,
+3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728,
+2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338,
+1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627,
+1505,1911,1883,3526, 698,3629,3456,1833,1431, 746,  77,1261,2017,2296,1977,1885,
+ 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411,
+2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671,
+2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162,
+3192,2910,2010, 140,2395,2859,  55,1082,2012,2901, 662, 419,2081,1438, 680,2774,
+4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524,
+3399,  98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346,
+ 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040,
+3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188,
+2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280,
+1086,1974,2034, 630, 257,3338,2788,4903,1017,  86,4790, 966,2789,1995,1696,1131,
+ 259,3095,4188,1308, 179,1463,5257, 289,4107,1248,  42,3413,1725,2288, 896,1947,
+ 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970,
+3034,3310, 540,2370,1562,1288,2990, 502,4765,1147,   4,1853,2708, 207, 294,2814,
+4078,2902,2509, 684,  34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557,
+2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997,
+1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972,
+1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369,
+ 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376,
+1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196,  19, 941,3624,3480,
+3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610,
+ 955,1089,3103,1053,  96,  88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128,
+ 642,4006, 903,2539,1877,2082, 596,  29,4066,1790, 722,2157, 130, 995,1569, 769,
+1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445,  50, 625, 487,2207,
+  57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392,
+1783, 362,   8,3433,3422, 610,2793,3277,1390,1284,1654,  21,3823, 734, 367, 623,
+ 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782,
+2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650,
+ 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478,
+2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773,
+2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007,
+1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323,
+1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598,
+2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961,
+ 819,1541, 142,2284,  44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302,
+1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409,
+1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683,
+2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191,
+2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434,  92,1466,4920,2616,
+3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302,
+1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774,
+4462,  64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147,
+ 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731,
+ 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464,
+3264,2855,2722,1952,1029,2839,2467,  84,4383,2215, 820,1391,2015,2448,3672, 377,
+1948,2168, 797,2545,3536,2578,2645,  94,2874,1678, 405,1259,3071, 771, 546,1315,
+ 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928,  14,2594, 557,
+3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903,
+1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060,
+4031,2641,4067,3145,1870,  37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261,
+1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092,
+2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810,
+1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708,
+ 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658,
+1178,2639,2351,  93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871,
+3341,1618,4126,2595,2334, 603, 651,  69, 701, 268,2662,3411,2555,1380,1606, 503,
+ 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229,
+2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112,
+ 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504,
+1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389,
+1281,  52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169,  27,
+1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542,
+3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861,
+2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845,
+3891,2868,3621,2254,  58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700,
+3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469,
+3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582,
+ 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999,
+2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274,
+ 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020,
+2724,1927,2333,4440, 567,  22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601,
+  12, 974,3783,4391, 951,1412,   1,3720, 453,4608,4041, 528,1041,1027,3230,2628,
+1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040,  31,
+ 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668,
+ 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778,
+1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169,
+3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667,
+3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118,  63,2076, 314,1881,
+1348,1061, 172, 978,3515,1747, 532, 511,3970,   6, 601, 905,2699,3300,1751, 276,
+1467,3725,2668,  65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320,
+3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751,
+2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432,
+2754,  95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772,
+1985, 244,2546, 474, 495,1046,2611,1851,2061,  71,2089,1675,2590, 742,3758,2843,
+3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116,
+ 451,   3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904,
+4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652,
+1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664,
+2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078,  49,3770,
+3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283,
+3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626,
+1197,1663,4476,3127,  85,4240,2528,  25,1111,1181,3673, 407,3470,4561,2679,2713,
+ 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333,
+ 391,2963, 187,  61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062,
+2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555,
+ 931, 317,2517,3027, 325, 569, 686,2107,3084,  60,1042,1333,2794, 264,3177,4014,
+1628, 258,3712,   7,4464,1176,1043,1778, 683, 114,1975,  78,1492, 383,1886, 510,
+ 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015,
+1282,1289,4609, 697,1453,3044,2666,3611,1856,2412,  54, 719,1330, 568,3778,2459,
+1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390,
+1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238,
+1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421,  56,1908,1640,2387,2232,
+1917,1874,2477,4921, 148,  83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624,
+ 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189,
+ 852,1221,1400,1486, 882,2299,4036, 351,  28,1122, 700,6479,6480,6481,6482,6483,  #last 512
+)
+# fmt: on
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/gb2312prober.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/gb2312prober.py
new file mode 100644
index 0000000..d423e73
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/gb2312prober.py
@@ -0,0 +1,47 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from .chardistribution import GB2312DistributionAnalysis
+from .codingstatemachine import CodingStateMachine
+from .mbcharsetprober import MultiByteCharSetProber
+from .mbcssm import GB2312_SM_MODEL
+
+
+class GB2312Prober(MultiByteCharSetProber):
+    def __init__(self) -> None:
+        super().__init__()
+        self.coding_sm = CodingStateMachine(GB2312_SM_MODEL)
+        self.distribution_analyzer = GB2312DistributionAnalysis()
+        self.reset()
+
+    @property
+    def charset_name(self) -> str:
+        return "GB2312"
+
+    @property
+    def language(self) -> str:
+        return "Chinese"
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/hebrewprober.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/hebrewprober.py
new file mode 100644
index 0000000..785d005
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/hebrewprober.py
@@ -0,0 +1,316 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+#          Shy Shalom
+# Portions created by the Initial Developer are Copyright (C) 2005
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from typing import Optional, Union
+
+from .charsetprober import CharSetProber
+from .enums import ProbingState
+from .sbcharsetprober import SingleByteCharSetProber
+
+# This prober doesn't actually recognize a language or a charset.
+# It is a helper prober for the use of the Hebrew model probers
+
+### General ideas of the Hebrew charset recognition ###
+#
+# Four main charsets exist in Hebrew:
+# "ISO-8859-8" - Visual Hebrew
+# "windows-1255" - Logical Hebrew
+# "ISO-8859-8-I" - Logical Hebrew
+# "x-mac-hebrew" - ?? Logical Hebrew ??
+#
+# Both "ISO" charsets use a completely identical set of code points, whereas
+# "windows-1255" and "x-mac-hebrew" are two different proper supersets of
+# these code points. windows-1255 defines additional characters in the range
+# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific
+# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6.
+# x-mac-hebrew defines similar additional code points but with a different
+# mapping.
+#
+# As far as an average Hebrew text with no diacritics is concerned, all four
+# charsets are identical with respect to code points. Meaning that for the
+# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters
+# (including final letters).
+#
+# The dominant difference between these charsets is their directionality.
+# "Visual" directionality means that the text is ordered as if the renderer is
+# not aware of a BIDI rendering algorithm. The renderer sees the text and
+# draws it from left to right. The text itself when ordered naturally is read
+# backwards. A buffer of Visual Hebrew generally looks like so:
+# "[last word of first line spelled backwards] [whole line ordered backwards
+# and spelled backwards] [first word of first line spelled backwards]
+# [end of line] [last word of second line] ... etc' "
+# adding punctuation marks, numbers and English text to visual text is
+# naturally also "visual" and from left to right.
+#
+# "Logical" directionality means the text is ordered "naturally" according to
+# the order it is read. It is the responsibility of the renderer to display
+# the text from right to left. A BIDI algorithm is used to place general
+# punctuation marks, numbers and English text in the text.
+#
+# Texts in x-mac-hebrew are almost impossible to find on the Internet. From
+# what little evidence I could find, it seems that its general directionality
+# is Logical.
+#
+# To sum up all of the above, the Hebrew probing mechanism knows about two
+# charsets:
+# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are
+#    backwards while line order is natural. For charset recognition purposes
+#    the line order is unimportant (In fact, for this implementation, even
+#    word order is unimportant).
+# Logical Hebrew - "windows-1255" - normal, naturally ordered text.
+#
+# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be
+#    specifically identified.
+# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew
+#    that contain special punctuation marks or diacritics is displayed with
+#    some unconverted characters showing as question marks. This problem might
+#    be corrected using another model prober for x-mac-hebrew. Due to the fact
+#    that x-mac-hebrew texts are so rare, writing another model prober isn't
+#    worth the effort and performance hit.
+#
+#### The Prober ####
+#
+# The prober is divided between two SBCharSetProbers and a HebrewProber,
+# all of which are managed, created, fed data, inquired and deleted by the
+# SBCSGroupProber. The two SBCharSetProbers identify that the text is in
+# fact some kind of Hebrew, Logical or Visual. The final decision about which
+# one is it is made by the HebrewProber by combining final-letter scores
+# with the scores of the two SBCharSetProbers to produce a final answer.
+#
+# The SBCSGroupProber is responsible for stripping the original text of HTML
+# tags, English characters, numbers, low-ASCII punctuation characters, spaces
+# and new lines. It reduces any sequence of such characters to a single space.
+# The buffer fed to each prober in the SBCS group prober is pure text in
+# high-ASCII.
+# The two SBCharSetProbers (model probers) share the same language model:
+# Win1255Model.
+# The first SBCharSetProber uses the model normally as any other
+# SBCharSetProber does, to recognize windows-1255, upon which this model was
+# built. The second SBCharSetProber is told to make the pair-of-letter
+# lookup in the language model backwards. This in practice exactly simulates
+# a visual Hebrew model using the windows-1255 logical Hebrew model.
+#
+# The HebrewProber is not using any language model. All it does is look for
+# final-letter evidence suggesting the text is either logical Hebrew or visual
+# Hebrew. Disjointed from the model probers, the results of the HebrewProber
+# alone are meaningless. HebrewProber always returns 0.00 as confidence
+# since it never identifies a charset by itself. Instead, the pointer to the
+# HebrewProber is passed to the model probers as a helper "Name Prober".
+# When the Group prober receives a positive identification from any prober,
+# it asks for the name of the charset identified. If the prober queried is a
+# Hebrew model prober, the model prober forwards the call to the
+# HebrewProber to make the final decision. In the HebrewProber, the
+# decision is made according to the final-letters scores maintained and Both
+# model probers scores. The answer is returned in the form of the name of the
+# charset identified, either "windows-1255" or "ISO-8859-8".
+
+
+class HebrewProber(CharSetProber):
+    SPACE = 0x20
+    # windows-1255 / ISO-8859-8 code points of interest
+    FINAL_KAF = 0xEA
+    NORMAL_KAF = 0xEB
+    FINAL_MEM = 0xED
+    NORMAL_MEM = 0xEE
+    FINAL_NUN = 0xEF
+    NORMAL_NUN = 0xF0
+    FINAL_PE = 0xF3
+    NORMAL_PE = 0xF4
+    FINAL_TSADI = 0xF5
+    NORMAL_TSADI = 0xF6
+
+    # Minimum Visual vs Logical final letter score difference.
+    # If the difference is below this, don't rely solely on the final letter score
+    # distance.
+    MIN_FINAL_CHAR_DISTANCE = 5
+
+    # Minimum Visual vs Logical model score difference.
+    # If the difference is below this, don't rely at all on the model score
+    # distance.
+    MIN_MODEL_DISTANCE = 0.01
+
+    VISUAL_HEBREW_NAME = "ISO-8859-8"
+    LOGICAL_HEBREW_NAME = "windows-1255"
+
+    def __init__(self) -> None:
+        super().__init__()
+        self._final_char_logical_score = 0
+        self._final_char_visual_score = 0
+        self._prev = self.SPACE
+        self._before_prev = self.SPACE
+        self._logical_prober: Optional[SingleByteCharSetProber] = None
+        self._visual_prober: Optional[SingleByteCharSetProber] = None
+        self.reset()
+
+    def reset(self) -> None:
+        self._final_char_logical_score = 0
+        self._final_char_visual_score = 0
+        # The two last characters seen in the previous buffer,
+        # mPrev and mBeforePrev are initialized to space in order to simulate
+        # a word delimiter at the beginning of the data
+        self._prev = self.SPACE
+        self._before_prev = self.SPACE
+        # These probers are owned by the group prober.
+
+    def set_model_probers(
+        self,
+        logical_prober: SingleByteCharSetProber,
+        visual_prober: SingleByteCharSetProber,
+    ) -> None:
+        self._logical_prober = logical_prober
+        self._visual_prober = visual_prober
+
+    def is_final(self, c: int) -> bool:
+        return c in [
+            self.FINAL_KAF,
+            self.FINAL_MEM,
+            self.FINAL_NUN,
+            self.FINAL_PE,
+            self.FINAL_TSADI,
+        ]
+
+    def is_non_final(self, c: int) -> bool:
+        # The normal Tsadi is not a good Non-Final letter due to words like
+        # 'lechotet' (to chat) containing an apostrophe after the tsadi. This
+        # apostrophe is converted to a space in FilterWithoutEnglishLetters
+        # causing the Non-Final tsadi to appear at an end of a word even
+        # though this is not the case in the original text.
+        # The letters Pe and Kaf rarely display a related behavior of not being
+        # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak'
+        # for example legally end with a Non-Final Pe or Kaf. However, the
+        # benefit of these letters as Non-Final letters outweighs the damage
+        # since these words are quite rare.
+        return c in [self.NORMAL_KAF, self.NORMAL_MEM, self.NORMAL_NUN, self.NORMAL_PE]
+
+    def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState:
+        # Final letter analysis for logical-visual decision.
+        # Look for evidence that the received buffer is either logical Hebrew
+        # or visual Hebrew.
+        # The following cases are checked:
+        # 1) A word longer than 1 letter, ending with a final letter. This is
+        #    an indication that the text is laid out "naturally" since the
+        #    final letter really appears at the end. +1 for logical score.
+        # 2) A word longer than 1 letter, ending with a Non-Final letter. In
+        #    normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi,
+        #    should not end with the Non-Final form of that letter. Exceptions
+        #    to this rule are mentioned above in isNonFinal(). This is an
+        #    indication that the text is laid out backwards. +1 for visual
+        #    score
+        # 3) A word longer than 1 letter, starting with a final letter. Final
+        #    letters should not appear at the beginning of a word. This is an
+        #    indication that the text is laid out backwards. +1 for visual
+        #    score.
+        #
+        # The visual score and logical score are accumulated throughout the
+        # text and are finally checked against each other in GetCharSetName().
+        # No checking for final letters in the middle of words is done since
+        # that case is not an indication for either Logical or Visual text.
+        #
+        # We automatically filter out all 7-bit characters (replace them with
+        # spaces) so the word boundary detection works properly. [MAP]
+
+        if self.state == ProbingState.NOT_ME:
+            # Both model probers say it's not them. No reason to continue.
+            return ProbingState.NOT_ME
+
+        byte_str = self.filter_high_byte_only(byte_str)
+
+        for cur in byte_str:
+            if cur == self.SPACE:
+                # We stand on a space - a word just ended
+                if self._before_prev != self.SPACE:
+                    # next-to-last char was not a space so self._prev is not a
+                    # 1 letter word
+                    if self.is_final(self._prev):
+                        # case (1) [-2:not space][-1:final letter][cur:space]
+                        self._final_char_logical_score += 1
+                    elif self.is_non_final(self._prev):
+                        # case (2) [-2:not space][-1:Non-Final letter][
+                        #  cur:space]
+                        self._final_char_visual_score += 1
+            else:
+                # Not standing on a space
+                if (
+                    (self._before_prev == self.SPACE)
+                    and (self.is_final(self._prev))
+                    and (cur != self.SPACE)
+                ):
+                    # case (3) [-2:space][-1:final letter][cur:not space]
+                    self._final_char_visual_score += 1
+            self._before_prev = self._prev
+            self._prev = cur
+
+        # Forever detecting, till the end or until both model probers return
+        # ProbingState.NOT_ME (handled above)
+        return ProbingState.DETECTING
+
+    @property
+    def charset_name(self) -> str:
+        assert self._logical_prober is not None
+        assert self._visual_prober is not None
+
+        # Make the decision: is it Logical or Visual?
+        # If the final letter score distance is dominant enough, rely on it.
+        finalsub = self._final_char_logical_score - self._final_char_visual_score
+        if finalsub >= self.MIN_FINAL_CHAR_DISTANCE:
+            return self.LOGICAL_HEBREW_NAME
+        if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE:
+            return self.VISUAL_HEBREW_NAME
+
+        # It's not dominant enough, try to rely on the model scores instead.
+        modelsub = (
+            self._logical_prober.get_confidence() - self._visual_prober.get_confidence()
+        )
+        if modelsub > self.MIN_MODEL_DISTANCE:
+            return self.LOGICAL_HEBREW_NAME
+        if modelsub < -self.MIN_MODEL_DISTANCE:
+            return self.VISUAL_HEBREW_NAME
+
+        # Still no good, back to final letter distance, maybe it'll save the
+        # day.
+        if finalsub < 0.0:
+            return self.VISUAL_HEBREW_NAME
+
+        # (finalsub > 0 - Logical) or (don't know what to do) default to
+        # Logical.
+        return self.LOGICAL_HEBREW_NAME
+
+    @property
+    def language(self) -> str:
+        return "Hebrew"
+
+    @property
+    def state(self) -> ProbingState:
+        assert self._logical_prober is not None
+        assert self._visual_prober is not None
+
+        # Remain active as long as any of the model probers are active.
+        if (self._logical_prober.state == ProbingState.NOT_ME) and (
+            self._visual_prober.state == ProbingState.NOT_ME
+        ):
+            return ProbingState.NOT_ME
+        return ProbingState.DETECTING
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/jisfreq.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/jisfreq.py
new file mode 100644
index 0000000..3293576
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/jisfreq.py
@@ -0,0 +1,325 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+# Sampling from about 20M text materials include literature and computer technology
+#
+# Japanese frequency table, applied to both S-JIS and EUC-JP
+# They are sorted in order.
+
+# 128  --> 0.77094
+# 256  --> 0.85710
+# 512  --> 0.92635
+# 1024 --> 0.97130
+# 2048 --> 0.99431
+#
+# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58
+# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191
+#
+# Typical Distribution Ratio, 25% of IDR
+
+JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0
+
+# Char to FreqOrder table ,
+JIS_TABLE_SIZE = 4368
+
+# fmt: off
+JIS_CHAR_TO_FREQ_ORDER = (
+  40,   1,   6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, #   16
+3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247,  18, 179,5071, 856,1661, #   32
+1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, #   48
+2042,1061,1062,  48,  49,  44,  45, 433, 434,1040,1041, 996, 787,2997,1255,4305, #   64
+2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, #   80
+5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, #   96
+1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, #  112
+5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, #  128
+5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, #  144
+5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, #  160
+5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, #  176
+5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, #  192
+5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, #  208
+1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, #  224
+1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, #  240
+1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, #  256
+2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, #  272
+3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161,  26,3377,   2,3929,  20, #  288
+3691,  47,4100,  50,  17,  16,  35, 268,  27, 243,  42, 155,  24, 154,  29, 184, #  304
+   4,  91,  14,  92,  53, 396,  33, 289,   9,  37,  64, 620,  21,  39, 321,   5, #  320
+  12,  11,  52,  13,   3, 208, 138,   0,   7,  60, 526, 141, 151,1069, 181, 275, #  336
+1591,  83, 132,1475, 126, 331, 829,  15,  69, 160,  59,  22, 157,  55,1079, 312, #  352
+ 109,  38,  23,  25,  10,  19,  79,5195,  61, 382,1124,   8,  30,5196,5197,5198, #  368
+5199,5200,5201,5202,5203,5204,5205,5206,  89,  62,  74,  34,2416, 112, 139, 196, #  384
+ 271, 149,  84, 607, 131, 765,  46,  88, 153, 683,  76, 874, 101, 258,  57,  80, #  400
+  32, 364, 121,1508, 169,1547,  68, 235, 145,2999,  41, 360,3027,  70,  63,  31, #  416
+  43, 259, 262,1383,  99, 533, 194,  66,  93, 846, 217, 192,  56, 106,  58, 565, #  432
+ 280, 272, 311, 256, 146,  82, 308,  71, 100, 128, 214, 655, 110, 261, 104,1140, #  448
+  54,  51,  36,  87,  67,3070, 185,2618,2936,2020,  28,1066,2390,2059,5207,5208, #  464
+5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, #  480
+5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, #  496
+5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, #  512
+4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, #  528
+5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, #  544
+5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, #  560
+5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, #  576
+5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, #  592
+5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, #  608
+5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, #  624
+5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, #  640
+5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, #  656
+5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, #  672
+3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, #  688
+5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, #  704
+5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, #  720
+5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, #  736
+5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, #  752
+5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, #  768
+5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, #  784
+5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, #  800
+5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, #  816
+5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, #  832
+5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, #  848
+5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, #  864
+5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, #  880
+5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, #  896
+5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, #  912
+5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, #  928
+5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, #  944
+5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, #  960
+5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, #  976
+5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, #  992
+5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008
+5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024
+5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040
+5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056
+5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072
+5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088
+5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104
+5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120
+5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136
+5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152
+5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168
+5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184
+5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200
+5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216
+5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232
+5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248
+5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264
+5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280
+5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296
+6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312
+6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328
+6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344
+6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360
+6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376
+6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392
+6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408
+6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424
+4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440
+ 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456
+ 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472
+1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619,  65,3302,2045, # 1488
+1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504
+ 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520
+3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536
+3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552
+ 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568
+3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584
+3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600
+ 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616
+2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632
+ 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648
+3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664
+1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680
+ 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696
+1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712
+ 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728
+2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744
+2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760
+2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776
+2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792
+1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808
+1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824
+1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840
+1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856
+2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872
+1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888
+2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904
+1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920
+1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936
+1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952
+1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968
+1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984
+1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000
+ 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016
+ 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032
+1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048
+2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064
+2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080
+2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096
+3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112
+3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128
+ 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144
+3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160
+1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876,  78,2287,1482,1277, # 2176
+ 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192
+2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208
+1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224
+ 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240
+3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256
+4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272
+2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288
+1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304
+2601,1919,1078,  75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320
+1075, 292,3818,1756,2602, 317,  98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336
+ 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352
+ 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368
+1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384
+2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400
+2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416
+2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432
+3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448
+1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464
+2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480
+ 359,2291,1676,  73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496
+ 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512
+ 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528
+1209,  96, 587,2166,1032, 260,1072,2153, 173,  94, 226,3244, 819,2006,4642,4114, # 2544
+2203, 231,1744, 782,  97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560
+ 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576
+1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592
+1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608
+ 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624
+1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640
+1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656
+1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672
+ 764,2861,1853, 688,2429,1920,1462,  77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688
+2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704
+ 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720
+2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736
+3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752
+2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768
+1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784
+6147, 441, 762,1771,3447,3607,3608,1904, 840,3037,  86, 939,1385, 572,1370,2445, # 2800
+1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816
+2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832
+1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848
+ 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864
+  72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880
+3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896
+3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912
+1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928
+1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944
+1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960
+1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976
+ 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992
+ 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008
+2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024
+ 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040
+3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056
+2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072
+ 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088
+1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104
+2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120
+ 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136
+1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152
+ 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168
+4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184
+2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200
+1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216
+ 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232
+1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248
+2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264
+ 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280
+6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296
+1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312
+1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328
+2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344
+3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360
+ 914,2550,2587,  81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376
+3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392
+1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408
+ 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424
+1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440
+ 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456
+3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472
+ 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488
+2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504
+ 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520
+4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536
+2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552
+1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568
+1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584
+1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600
+ 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616
+1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632
+3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648
+1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664
+3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680
+ 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696
+ 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712
+ 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728
+2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744
+1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760
+ 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776
+1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792
+ 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808
+1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824
+ 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840
+ 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856
+ 480,2083,1774,3458, 923,2279,1350, 221,3086,  85,2233,2234,3835,1585,3010,2147, # 3872
+1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888
+1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904
+2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920
+4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936
+ 227,1351,1645,2453,2193,1421,2887, 812,2121, 634,  95,2435, 201,2312,4665,1646, # 3952
+1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968
+ 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984
+1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000
+3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016
+1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032
+2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048
+2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064
+1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080
+1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096
+2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112
+ 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128
+2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144
+1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160
+1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176
+1279,2136,1697,2335, 204, 721,2097,3838,  90,6186,2085,2505, 191,3967, 124,2148, # 4192
+1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208
+3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224
+2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240
+2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256
+ 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272
+3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288
+3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304
+1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320
+2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336
+1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352
+2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368  #last 512
+)
+# fmt: on
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/johabfreq.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/johabfreq.py
new file mode 100644
index 0000000..c129699
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/johabfreq.py
@@ -0,0 +1,2382 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+# The frequency data itself is the same as euc-kr.
+# This is just a mapping table to euc-kr.
+
+JOHAB_TO_EUCKR_ORDER_TABLE = {
+    0x8861: 0,
+    0x8862: 1,
+    0x8865: 2,
+    0x8868: 3,
+    0x8869: 4,
+    0x886A: 5,
+    0x886B: 6,
+    0x8871: 7,
+    0x8873: 8,
+    0x8874: 9,
+    0x8875: 10,
+    0x8876: 11,
+    0x8877: 12,
+    0x8878: 13,
+    0x8879: 14,
+    0x887B: 15,
+    0x887C: 16,
+    0x887D: 17,
+    0x8881: 18,
+    0x8882: 19,
+    0x8885: 20,
+    0x8889: 21,
+    0x8891: 22,
+    0x8893: 23,
+    0x8895: 24,
+    0x8896: 25,
+    0x8897: 26,
+    0x88A1: 27,
+    0x88A2: 28,
+    0x88A5: 29,
+    0x88A9: 30,
+    0x88B5: 31,
+    0x88B7: 32,
+    0x88C1: 33,
+    0x88C5: 34,
+    0x88C9: 35,
+    0x88E1: 36,
+    0x88E2: 37,
+    0x88E5: 38,
+    0x88E8: 39,
+    0x88E9: 40,
+    0x88EB: 41,
+    0x88F1: 42,
+    0x88F3: 43,
+    0x88F5: 44,
+    0x88F6: 45,
+    0x88F7: 46,
+    0x88F8: 47,
+    0x88FB: 48,
+    0x88FC: 49,
+    0x88FD: 50,
+    0x8941: 51,
+    0x8945: 52,
+    0x8949: 53,
+    0x8951: 54,
+    0x8953: 55,
+    0x8955: 56,
+    0x8956: 57,
+    0x8957: 58,
+    0x8961: 59,
+    0x8962: 60,
+    0x8963: 61,
+    0x8965: 62,
+    0x8968: 63,
+    0x8969: 64,
+    0x8971: 65,
+    0x8973: 66,
+    0x8975: 67,
+    0x8976: 68,
+    0x8977: 69,
+    0x897B: 70,
+    0x8981: 71,
+    0x8985: 72,
+    0x8989: 73,
+    0x8993: 74,
+    0x8995: 75,
+    0x89A1: 76,
+    0x89A2: 77,
+    0x89A5: 78,
+    0x89A8: 79,
+    0x89A9: 80,
+    0x89AB: 81,
+    0x89AD: 82,
+    0x89B0: 83,
+    0x89B1: 84,
+    0x89B3: 85,
+    0x89B5: 86,
+    0x89B7: 87,
+    0x89B8: 88,
+    0x89C1: 89,
+    0x89C2: 90,
+    0x89C5: 91,
+    0x89C9: 92,
+    0x89CB: 93,
+    0x89D1: 94,
+    0x89D3: 95,
+    0x89D5: 96,
+    0x89D7: 97,
+    0x89E1: 98,
+    0x89E5: 99,
+    0x89E9: 100,
+    0x89F3: 101,
+    0x89F6: 102,
+    0x89F7: 103,
+    0x8A41: 104,
+    0x8A42: 105,
+    0x8A45: 106,
+    0x8A49: 107,
+    0x8A51: 108,
+    0x8A53: 109,
+    0x8A55: 110,
+    0x8A57: 111,
+    0x8A61: 112,
+    0x8A65: 113,
+    0x8A69: 114,
+    0x8A73: 115,
+    0x8A75: 116,
+    0x8A81: 117,
+    0x8A82: 118,
+    0x8A85: 119,
+    0x8A88: 120,
+    0x8A89: 121,
+    0x8A8A: 122,
+    0x8A8B: 123,
+    0x8A90: 124,
+    0x8A91: 125,
+    0x8A93: 126,
+    0x8A95: 127,
+    0x8A97: 128,
+    0x8A98: 129,
+    0x8AA1: 130,
+    0x8AA2: 131,
+    0x8AA5: 132,
+    0x8AA9: 133,
+    0x8AB6: 134,
+    0x8AB7: 135,
+    0x8AC1: 136,
+    0x8AD5: 137,
+    0x8AE1: 138,
+    0x8AE2: 139,
+    0x8AE5: 140,
+    0x8AE9: 141,
+    0x8AF1: 142,
+    0x8AF3: 143,
+    0x8AF5: 144,
+    0x8B41: 145,
+    0x8B45: 146,
+    0x8B49: 147,
+    0x8B61: 148,
+    0x8B62: 149,
+    0x8B65: 150,
+    0x8B68: 151,
+    0x8B69: 152,
+    0x8B6A: 153,
+    0x8B71: 154,
+    0x8B73: 155,
+    0x8B75: 156,
+    0x8B77: 157,
+    0x8B81: 158,
+    0x8BA1: 159,
+    0x8BA2: 160,
+    0x8BA5: 161,
+    0x8BA8: 162,
+    0x8BA9: 163,
+    0x8BAB: 164,
+    0x8BB1: 165,
+    0x8BB3: 166,
+    0x8BB5: 167,
+    0x8BB7: 168,
+    0x8BB8: 169,
+    0x8BBC: 170,
+    0x8C61: 171,
+    0x8C62: 172,
+    0x8C63: 173,
+    0x8C65: 174,
+    0x8C69: 175,
+    0x8C6B: 176,
+    0x8C71: 177,
+    0x8C73: 178,
+    0x8C75: 179,
+    0x8C76: 180,
+    0x8C77: 181,
+    0x8C7B: 182,
+    0x8C81: 183,
+    0x8C82: 184,
+    0x8C85: 185,
+    0x8C89: 186,
+    0x8C91: 187,
+    0x8C93: 188,
+    0x8C95: 189,
+    0x8C96: 190,
+    0x8C97: 191,
+    0x8CA1: 192,
+    0x8CA2: 193,
+    0x8CA9: 194,
+    0x8CE1: 195,
+    0x8CE2: 196,
+    0x8CE3: 197,
+    0x8CE5: 198,
+    0x8CE9: 199,
+    0x8CF1: 200,
+    0x8CF3: 201,
+    0x8CF5: 202,
+    0x8CF6: 203,
+    0x8CF7: 204,
+    0x8D41: 205,
+    0x8D42: 206,
+    0x8D45: 207,
+    0x8D51: 208,
+    0x8D55: 209,
+    0x8D57: 210,
+    0x8D61: 211,
+    0x8D65: 212,
+    0x8D69: 213,
+    0x8D75: 214,
+    0x8D76: 215,
+    0x8D7B: 216,
+    0x8D81: 217,
+    0x8DA1: 218,
+    0x8DA2: 219,
+    0x8DA5: 220,
+    0x8DA7: 221,
+    0x8DA9: 222,
+    0x8DB1: 223,
+    0x8DB3: 224,
+    0x8DB5: 225,
+    0x8DB7: 226,
+    0x8DB8: 227,
+    0x8DB9: 228,
+    0x8DC1: 229,
+    0x8DC2: 230,
+    0x8DC9: 231,
+    0x8DD6: 232,
+    0x8DD7: 233,
+    0x8DE1: 234,
+    0x8DE2: 235,
+    0x8DF7: 236,
+    0x8E41: 237,
+    0x8E45: 238,
+    0x8E49: 239,
+    0x8E51: 240,
+    0x8E53: 241,
+    0x8E57: 242,
+    0x8E61: 243,
+    0x8E81: 244,
+    0x8E82: 245,
+    0x8E85: 246,
+    0x8E89: 247,
+    0x8E90: 248,
+    0x8E91: 249,
+    0x8E93: 250,
+    0x8E95: 251,
+    0x8E97: 252,
+    0x8E98: 253,
+    0x8EA1: 254,
+    0x8EA9: 255,
+    0x8EB6: 256,
+    0x8EB7: 257,
+    0x8EC1: 258,
+    0x8EC2: 259,
+    0x8EC5: 260,
+    0x8EC9: 261,
+    0x8ED1: 262,
+    0x8ED3: 263,
+    0x8ED6: 264,
+    0x8EE1: 265,
+    0x8EE5: 266,
+    0x8EE9: 267,
+    0x8EF1: 268,
+    0x8EF3: 269,
+    0x8F41: 270,
+    0x8F61: 271,
+    0x8F62: 272,
+    0x8F65: 273,
+    0x8F67: 274,
+    0x8F69: 275,
+    0x8F6B: 276,
+    0x8F70: 277,
+    0x8F71: 278,
+    0x8F73: 279,
+    0x8F75: 280,
+    0x8F77: 281,
+    0x8F7B: 282,
+    0x8FA1: 283,
+    0x8FA2: 284,
+    0x8FA5: 285,
+    0x8FA9: 286,
+    0x8FB1: 287,
+    0x8FB3: 288,
+    0x8FB5: 289,
+    0x8FB7: 290,
+    0x9061: 291,
+    0x9062: 292,
+    0x9063: 293,
+    0x9065: 294,
+    0x9068: 295,
+    0x9069: 296,
+    0x906A: 297,
+    0x906B: 298,
+    0x9071: 299,
+    0x9073: 300,
+    0x9075: 301,
+    0x9076: 302,
+    0x9077: 303,
+    0x9078: 304,
+    0x9079: 305,
+    0x907B: 306,
+    0x907D: 307,
+    0x9081: 308,
+    0x9082: 309,
+    0x9085: 310,
+    0x9089: 311,
+    0x9091: 312,
+    0x9093: 313,
+    0x9095: 314,
+    0x9096: 315,
+    0x9097: 316,
+    0x90A1: 317,
+    0x90A2: 318,
+    0x90A5: 319,
+    0x90A9: 320,
+    0x90B1: 321,
+    0x90B7: 322,
+    0x90E1: 323,
+    0x90E2: 324,
+    0x90E4: 325,
+    0x90E5: 326,
+    0x90E9: 327,
+    0x90EB: 328,
+    0x90EC: 329,
+    0x90F1: 330,
+    0x90F3: 331,
+    0x90F5: 332,
+    0x90F6: 333,
+    0x90F7: 334,
+    0x90FD: 335,
+    0x9141: 336,
+    0x9142: 337,
+    0x9145: 338,
+    0x9149: 339,
+    0x9151: 340,
+    0x9153: 341,
+    0x9155: 342,
+    0x9156: 343,
+    0x9157: 344,
+    0x9161: 345,
+    0x9162: 346,
+    0x9165: 347,
+    0x9169: 348,
+    0x9171: 349,
+    0x9173: 350,
+    0x9176: 351,
+    0x9177: 352,
+    0x917A: 353,
+    0x9181: 354,
+    0x9185: 355,
+    0x91A1: 356,
+    0x91A2: 357,
+    0x91A5: 358,
+    0x91A9: 359,
+    0x91AB: 360,
+    0x91B1: 361,
+    0x91B3: 362,
+    0x91B5: 363,
+    0x91B7: 364,
+    0x91BC: 365,
+    0x91BD: 366,
+    0x91C1: 367,
+    0x91C5: 368,
+    0x91C9: 369,
+    0x91D6: 370,
+    0x9241: 371,
+    0x9245: 372,
+    0x9249: 373,
+    0x9251: 374,
+    0x9253: 375,
+    0x9255: 376,
+    0x9261: 377,
+    0x9262: 378,
+    0x9265: 379,
+    0x9269: 380,
+    0x9273: 381,
+    0x9275: 382,
+    0x9277: 383,
+    0x9281: 384,
+    0x9282: 385,
+    0x9285: 386,
+    0x9288: 387,
+    0x9289: 388,
+    0x9291: 389,
+    0x9293: 390,
+    0x9295: 391,
+    0x9297: 392,
+    0x92A1: 393,
+    0x92B6: 394,
+    0x92C1: 395,
+    0x92E1: 396,
+    0x92E5: 397,
+    0x92E9: 398,
+    0x92F1: 399,
+    0x92F3: 400,
+    0x9341: 401,
+    0x9342: 402,
+    0x9349: 403,
+    0x9351: 404,
+    0x9353: 405,
+    0x9357: 406,
+    0x9361: 407,
+    0x9362: 408,
+    0x9365: 409,
+    0x9369: 410,
+    0x936A: 411,
+    0x936B: 412,
+    0x9371: 413,
+    0x9373: 414,
+    0x9375: 415,
+    0x9377: 416,
+    0x9378: 417,
+    0x937C: 418,
+    0x9381: 419,
+    0x9385: 420,
+    0x9389: 421,
+    0x93A1: 422,
+    0x93A2: 423,
+    0x93A5: 424,
+    0x93A9: 425,
+    0x93AB: 426,
+    0x93B1: 427,
+    0x93B3: 428,
+    0x93B5: 429,
+    0x93B7: 430,
+    0x93BC: 431,
+    0x9461: 432,
+    0x9462: 433,
+    0x9463: 434,
+    0x9465: 435,
+    0x9468: 436,
+    0x9469: 437,
+    0x946A: 438,
+    0x946B: 439,
+    0x946C: 440,
+    0x9470: 441,
+    0x9471: 442,
+    0x9473: 443,
+    0x9475: 444,
+    0x9476: 445,
+    0x9477: 446,
+    0x9478: 447,
+    0x9479: 448,
+    0x947D: 449,
+    0x9481: 450,
+    0x9482: 451,
+    0x9485: 452,
+    0x9489: 453,
+    0x9491: 454,
+    0x9493: 455,
+    0x9495: 456,
+    0x9496: 457,
+    0x9497: 458,
+    0x94A1: 459,
+    0x94E1: 460,
+    0x94E2: 461,
+    0x94E3: 462,
+    0x94E5: 463,
+    0x94E8: 464,
+    0x94E9: 465,
+    0x94EB: 466,
+    0x94EC: 467,
+    0x94F1: 468,
+    0x94F3: 469,
+    0x94F5: 470,
+    0x94F7: 471,
+    0x94F9: 472,
+    0x94FC: 473,
+    0x9541: 474,
+    0x9542: 475,
+    0x9545: 476,
+    0x9549: 477,
+    0x9551: 478,
+    0x9553: 479,
+    0x9555: 480,
+    0x9556: 481,
+    0x9557: 482,
+    0x9561: 483,
+    0x9565: 484,
+    0x9569: 485,
+    0x9576: 486,
+    0x9577: 487,
+    0x9581: 488,
+    0x9585: 489,
+    0x95A1: 490,
+    0x95A2: 491,
+    0x95A5: 492,
+    0x95A8: 493,
+    0x95A9: 494,
+    0x95AB: 495,
+    0x95AD: 496,
+    0x95B1: 497,
+    0x95B3: 498,
+    0x95B5: 499,
+    0x95B7: 500,
+    0x95B9: 501,
+    0x95BB: 502,
+    0x95C1: 503,
+    0x95C5: 504,
+    0x95C9: 505,
+    0x95E1: 506,
+    0x95F6: 507,
+    0x9641: 508,
+    0x9645: 509,
+    0x9649: 510,
+    0x9651: 511,
+    0x9653: 512,
+    0x9655: 513,
+    0x9661: 514,
+    0x9681: 515,
+    0x9682: 516,
+    0x9685: 517,
+    0x9689: 518,
+    0x9691: 519,
+    0x9693: 520,
+    0x9695: 521,
+    0x9697: 522,
+    0x96A1: 523,
+    0x96B6: 524,
+    0x96C1: 525,
+    0x96D7: 526,
+    0x96E1: 527,
+    0x96E5: 528,
+    0x96E9: 529,
+    0x96F3: 530,
+    0x96F5: 531,
+    0x96F7: 532,
+    0x9741: 533,
+    0x9745: 534,
+    0x9749: 535,
+    0x9751: 536,
+    0x9757: 537,
+    0x9761: 538,
+    0x9762: 539,
+    0x9765: 540,
+    0x9768: 541,
+    0x9769: 542,
+    0x976B: 543,
+    0x9771: 544,
+    0x9773: 545,
+    0x9775: 546,
+    0x9777: 547,
+    0x9781: 548,
+    0x97A1: 549,
+    0x97A2: 550,
+    0x97A5: 551,
+    0x97A8: 552,
+    0x97A9: 553,
+    0x97B1: 554,
+    0x97B3: 555,
+    0x97B5: 556,
+    0x97B6: 557,
+    0x97B7: 558,
+    0x97B8: 559,
+    0x9861: 560,
+    0x9862: 561,
+    0x9865: 562,
+    0x9869: 563,
+    0x9871: 564,
+    0x9873: 565,
+    0x9875: 566,
+    0x9876: 567,
+    0x9877: 568,
+    0x987D: 569,
+    0x9881: 570,
+    0x9882: 571,
+    0x9885: 572,
+    0x9889: 573,
+    0x9891: 574,
+    0x9893: 575,
+    0x9895: 576,
+    0x9896: 577,
+    0x9897: 578,
+    0x98E1: 579,
+    0x98E2: 580,
+    0x98E5: 581,
+    0x98E9: 582,
+    0x98EB: 583,
+    0x98EC: 584,
+    0x98F1: 585,
+    0x98F3: 586,
+    0x98F5: 587,
+    0x98F6: 588,
+    0x98F7: 589,
+    0x98FD: 590,
+    0x9941: 591,
+    0x9942: 592,
+    0x9945: 593,
+    0x9949: 594,
+    0x9951: 595,
+    0x9953: 596,
+    0x9955: 597,
+    0x9956: 598,
+    0x9957: 599,
+    0x9961: 600,
+    0x9976: 601,
+    0x99A1: 602,
+    0x99A2: 603,
+    0x99A5: 604,
+    0x99A9: 605,
+    0x99B7: 606,
+    0x99C1: 607,
+    0x99C9: 608,
+    0x99E1: 609,
+    0x9A41: 610,
+    0x9A45: 611,
+    0x9A81: 612,
+    0x9A82: 613,
+    0x9A85: 614,
+    0x9A89: 615,
+    0x9A90: 616,
+    0x9A91: 617,
+    0x9A97: 618,
+    0x9AC1: 619,
+    0x9AE1: 620,
+    0x9AE5: 621,
+    0x9AE9: 622,
+    0x9AF1: 623,
+    0x9AF3: 624,
+    0x9AF7: 625,
+    0x9B61: 626,
+    0x9B62: 627,
+    0x9B65: 628,
+    0x9B68: 629,
+    0x9B69: 630,
+    0x9B71: 631,
+    0x9B73: 632,
+    0x9B75: 633,
+    0x9B81: 634,
+    0x9B85: 635,
+    0x9B89: 636,
+    0x9B91: 637,
+    0x9B93: 638,
+    0x9BA1: 639,
+    0x9BA5: 640,
+    0x9BA9: 641,
+    0x9BB1: 642,
+    0x9BB3: 643,
+    0x9BB5: 644,
+    0x9BB7: 645,
+    0x9C61: 646,
+    0x9C62: 647,
+    0x9C65: 648,
+    0x9C69: 649,
+    0x9C71: 650,
+    0x9C73: 651,
+    0x9C75: 652,
+    0x9C76: 653,
+    0x9C77: 654,
+    0x9C78: 655,
+    0x9C7C: 656,
+    0x9C7D: 657,
+    0x9C81: 658,
+    0x9C82: 659,
+    0x9C85: 660,
+    0x9C89: 661,
+    0x9C91: 662,
+    0x9C93: 663,
+    0x9C95: 664,
+    0x9C96: 665,
+    0x9C97: 666,
+    0x9CA1: 667,
+    0x9CA2: 668,
+    0x9CA5: 669,
+    0x9CB5: 670,
+    0x9CB7: 671,
+    0x9CE1: 672,
+    0x9CE2: 673,
+    0x9CE5: 674,
+    0x9CE9: 675,
+    0x9CF1: 676,
+    0x9CF3: 677,
+    0x9CF5: 678,
+    0x9CF6: 679,
+    0x9CF7: 680,
+    0x9CFD: 681,
+    0x9D41: 682,
+    0x9D42: 683,
+    0x9D45: 684,
+    0x9D49: 685,
+    0x9D51: 686,
+    0x9D53: 687,
+    0x9D55: 688,
+    0x9D57: 689,
+    0x9D61: 690,
+    0x9D62: 691,
+    0x9D65: 692,
+    0x9D69: 693,
+    0x9D71: 694,
+    0x9D73: 695,
+    0x9D75: 696,
+    0x9D76: 697,
+    0x9D77: 698,
+    0x9D81: 699,
+    0x9D85: 700,
+    0x9D93: 701,
+    0x9D95: 702,
+    0x9DA1: 703,
+    0x9DA2: 704,
+    0x9DA5: 705,
+    0x9DA9: 706,
+    0x9DB1: 707,
+    0x9DB3: 708,
+    0x9DB5: 709,
+    0x9DB7: 710,
+    0x9DC1: 711,
+    0x9DC5: 712,
+    0x9DD7: 713,
+    0x9DF6: 714,
+    0x9E41: 715,
+    0x9E45: 716,
+    0x9E49: 717,
+    0x9E51: 718,
+    0x9E53: 719,
+    0x9E55: 720,
+    0x9E57: 721,
+    0x9E61: 722,
+    0x9E65: 723,
+    0x9E69: 724,
+    0x9E73: 725,
+    0x9E75: 726,
+    0x9E77: 727,
+    0x9E81: 728,
+    0x9E82: 729,
+    0x9E85: 730,
+    0x9E89: 731,
+    0x9E91: 732,
+    0x9E93: 733,
+    0x9E95: 734,
+    0x9E97: 735,
+    0x9EA1: 736,
+    0x9EB6: 737,
+    0x9EC1: 738,
+    0x9EE1: 739,
+    0x9EE2: 740,
+    0x9EE5: 741,
+    0x9EE9: 742,
+    0x9EF1: 743,
+    0x9EF5: 744,
+    0x9EF7: 745,
+    0x9F41: 746,
+    0x9F42: 747,
+    0x9F45: 748,
+    0x9F49: 749,
+    0x9F51: 750,
+    0x9F53: 751,
+    0x9F55: 752,
+    0x9F57: 753,
+    0x9F61: 754,
+    0x9F62: 755,
+    0x9F65: 756,
+    0x9F69: 757,
+    0x9F71: 758,
+    0x9F73: 759,
+    0x9F75: 760,
+    0x9F77: 761,
+    0x9F78: 762,
+    0x9F7B: 763,
+    0x9F7C: 764,
+    0x9FA1: 765,
+    0x9FA2: 766,
+    0x9FA5: 767,
+    0x9FA9: 768,
+    0x9FB1: 769,
+    0x9FB3: 770,
+    0x9FB5: 771,
+    0x9FB7: 772,
+    0xA061: 773,
+    0xA062: 774,
+    0xA065: 775,
+    0xA067: 776,
+    0xA068: 777,
+    0xA069: 778,
+    0xA06A: 779,
+    0xA06B: 780,
+    0xA071: 781,
+    0xA073: 782,
+    0xA075: 783,
+    0xA077: 784,
+    0xA078: 785,
+    0xA07B: 786,
+    0xA07D: 787,
+    0xA081: 788,
+    0xA082: 789,
+    0xA085: 790,
+    0xA089: 791,
+    0xA091: 792,
+    0xA093: 793,
+    0xA095: 794,
+    0xA096: 795,
+    0xA097: 796,
+    0xA098: 797,
+    0xA0A1: 798,
+    0xA0A2: 799,
+    0xA0A9: 800,
+    0xA0B7: 801,
+    0xA0E1: 802,
+    0xA0E2: 803,
+    0xA0E5: 804,
+    0xA0E9: 805,
+    0xA0EB: 806,
+    0xA0F1: 807,
+    0xA0F3: 808,
+    0xA0F5: 809,
+    0xA0F7: 810,
+    0xA0F8: 811,
+    0xA0FD: 812,
+    0xA141: 813,
+    0xA142: 814,
+    0xA145: 815,
+    0xA149: 816,
+    0xA151: 817,
+    0xA153: 818,
+    0xA155: 819,
+    0xA156: 820,
+    0xA157: 821,
+    0xA161: 822,
+    0xA162: 823,
+    0xA165: 824,
+    0xA169: 825,
+    0xA175: 826,
+    0xA176: 827,
+    0xA177: 828,
+    0xA179: 829,
+    0xA181: 830,
+    0xA1A1: 831,
+    0xA1A2: 832,
+    0xA1A4: 833,
+    0xA1A5: 834,
+    0xA1A9: 835,
+    0xA1AB: 836,
+    0xA1B1: 837,
+    0xA1B3: 838,
+    0xA1B5: 839,
+    0xA1B7: 840,
+    0xA1C1: 841,
+    0xA1C5: 842,
+    0xA1D6: 843,
+    0xA1D7: 844,
+    0xA241: 845,
+    0xA245: 846,
+    0xA249: 847,
+    0xA253: 848,
+    0xA255: 849,
+    0xA257: 850,
+    0xA261: 851,
+    0xA265: 852,
+    0xA269: 853,
+    0xA273: 854,
+    0xA275: 855,
+    0xA281: 856,
+    0xA282: 857,
+    0xA283: 858,
+    0xA285: 859,
+    0xA288: 860,
+    0xA289: 861,
+    0xA28A: 862,
+    0xA28B: 863,
+    0xA291: 864,
+    0xA293: 865,
+    0xA295: 866,
+    0xA297: 867,
+    0xA29B: 868,
+    0xA29D: 869,
+    0xA2A1: 870,
+    0xA2A5: 871,
+    0xA2A9: 872,
+    0xA2B3: 873,
+    0xA2B5: 874,
+    0xA2C1: 875,
+    0xA2E1: 876,
+    0xA2E5: 877,
+    0xA2E9: 878,
+    0xA341: 879,
+    0xA345: 880,
+    0xA349: 881,
+    0xA351: 882,
+    0xA355: 883,
+    0xA361: 884,
+    0xA365: 885,
+    0xA369: 886,
+    0xA371: 887,
+    0xA375: 888,
+    0xA3A1: 889,
+    0xA3A2: 890,
+    0xA3A5: 891,
+    0xA3A8: 892,
+    0xA3A9: 893,
+    0xA3AB: 894,
+    0xA3B1: 895,
+    0xA3B3: 896,
+    0xA3B5: 897,
+    0xA3B6: 898,
+    0xA3B7: 899,
+    0xA3B9: 900,
+    0xA3BB: 901,
+    0xA461: 902,
+    0xA462: 903,
+    0xA463: 904,
+    0xA464: 905,
+    0xA465: 906,
+    0xA468: 907,
+    0xA469: 908,
+    0xA46A: 909,
+    0xA46B: 910,
+    0xA46C: 911,
+    0xA471: 912,
+    0xA473: 913,
+    0xA475: 914,
+    0xA477: 915,
+    0xA47B: 916,
+    0xA481: 917,
+    0xA482: 918,
+    0xA485: 919,
+    0xA489: 920,
+    0xA491: 921,
+    0xA493: 922,
+    0xA495: 923,
+    0xA496: 924,
+    0xA497: 925,
+    0xA49B: 926,
+    0xA4A1: 927,
+    0xA4A2: 928,
+    0xA4A5: 929,
+    0xA4B3: 930,
+    0xA4E1: 931,
+    0xA4E2: 932,
+    0xA4E5: 933,
+    0xA4E8: 934,
+    0xA4E9: 935,
+    0xA4EB: 936,
+    0xA4F1: 937,
+    0xA4F3: 938,
+    0xA4F5: 939,
+    0xA4F7: 940,
+    0xA4F8: 941,
+    0xA541: 942,
+    0xA542: 943,
+    0xA545: 944,
+    0xA548: 945,
+    0xA549: 946,
+    0xA551: 947,
+    0xA553: 948,
+    0xA555: 949,
+    0xA556: 950,
+    0xA557: 951,
+    0xA561: 952,
+    0xA562: 953,
+    0xA565: 954,
+    0xA569: 955,
+    0xA573: 956,
+    0xA575: 957,
+    0xA576: 958,
+    0xA577: 959,
+    0xA57B: 960,
+    0xA581: 961,
+    0xA585: 962,
+    0xA5A1: 963,
+    0xA5A2: 964,
+    0xA5A3: 965,
+    0xA5A5: 966,
+    0xA5A9: 967,
+    0xA5B1: 968,
+    0xA5B3: 969,
+    0xA5B5: 970,
+    0xA5B7: 971,
+    0xA5C1: 972,
+    0xA5C5: 973,
+    0xA5D6: 974,
+    0xA5E1: 975,
+    0xA5F6: 976,
+    0xA641: 977,
+    0xA642: 978,
+    0xA645: 979,
+    0xA649: 980,
+    0xA651: 981,
+    0xA653: 982,
+    0xA661: 983,
+    0xA665: 984,
+    0xA681: 985,
+    0xA682: 986,
+    0xA685: 987,
+    0xA688: 988,
+    0xA689: 989,
+    0xA68A: 990,
+    0xA68B: 991,
+    0xA691: 992,
+    0xA693: 993,
+    0xA695: 994,
+    0xA697: 995,
+    0xA69B: 996,
+    0xA69C: 997,
+    0xA6A1: 998,
+    0xA6A9: 999,
+    0xA6B6: 1000,
+    0xA6C1: 1001,
+    0xA6E1: 1002,
+    0xA6E2: 1003,
+    0xA6E5: 1004,
+    0xA6E9: 1005,
+    0xA6F7: 1006,
+    0xA741: 1007,
+    0xA745: 1008,
+    0xA749: 1009,
+    0xA751: 1010,
+    0xA755: 1011,
+    0xA757: 1012,
+    0xA761: 1013,
+    0xA762: 1014,
+    0xA765: 1015,
+    0xA769: 1016,
+    0xA771: 1017,
+    0xA773: 1018,
+    0xA775: 1019,
+    0xA7A1: 1020,
+    0xA7A2: 1021,
+    0xA7A5: 1022,
+    0xA7A9: 1023,
+    0xA7AB: 1024,
+    0xA7B1: 1025,
+    0xA7B3: 1026,
+    0xA7B5: 1027,
+    0xA7B7: 1028,
+    0xA7B8: 1029,
+    0xA7B9: 1030,
+    0xA861: 1031,
+    0xA862: 1032,
+    0xA865: 1033,
+    0xA869: 1034,
+    0xA86B: 1035,
+    0xA871: 1036,
+    0xA873: 1037,
+    0xA875: 1038,
+    0xA876: 1039,
+    0xA877: 1040,
+    0xA87D: 1041,
+    0xA881: 1042,
+    0xA882: 1043,
+    0xA885: 1044,
+    0xA889: 1045,
+    0xA891: 1046,
+    0xA893: 1047,
+    0xA895: 1048,
+    0xA896: 1049,
+    0xA897: 1050,
+    0xA8A1: 1051,
+    0xA8A2: 1052,
+    0xA8B1: 1053,
+    0xA8E1: 1054,
+    0xA8E2: 1055,
+    0xA8E5: 1056,
+    0xA8E8: 1057,
+    0xA8E9: 1058,
+    0xA8F1: 1059,
+    0xA8F5: 1060,
+    0xA8F6: 1061,
+    0xA8F7: 1062,
+    0xA941: 1063,
+    0xA957: 1064,
+    0xA961: 1065,
+    0xA962: 1066,
+    0xA971: 1067,
+    0xA973: 1068,
+    0xA975: 1069,
+    0xA976: 1070,
+    0xA977: 1071,
+    0xA9A1: 1072,
+    0xA9A2: 1073,
+    0xA9A5: 1074,
+    0xA9A9: 1075,
+    0xA9B1: 1076,
+    0xA9B3: 1077,
+    0xA9B7: 1078,
+    0xAA41: 1079,
+    0xAA61: 1080,
+    0xAA77: 1081,
+    0xAA81: 1082,
+    0xAA82: 1083,
+    0xAA85: 1084,
+    0xAA89: 1085,
+    0xAA91: 1086,
+    0xAA95: 1087,
+    0xAA97: 1088,
+    0xAB41: 1089,
+    0xAB57: 1090,
+    0xAB61: 1091,
+    0xAB65: 1092,
+    0xAB69: 1093,
+    0xAB71: 1094,
+    0xAB73: 1095,
+    0xABA1: 1096,
+    0xABA2: 1097,
+    0xABA5: 1098,
+    0xABA9: 1099,
+    0xABB1: 1100,
+    0xABB3: 1101,
+    0xABB5: 1102,
+    0xABB7: 1103,
+    0xAC61: 1104,
+    0xAC62: 1105,
+    0xAC64: 1106,
+    0xAC65: 1107,
+    0xAC68: 1108,
+    0xAC69: 1109,
+    0xAC6A: 1110,
+    0xAC6B: 1111,
+    0xAC71: 1112,
+    0xAC73: 1113,
+    0xAC75: 1114,
+    0xAC76: 1115,
+    0xAC77: 1116,
+    0xAC7B: 1117,
+    0xAC81: 1118,
+    0xAC82: 1119,
+    0xAC85: 1120,
+    0xAC89: 1121,
+    0xAC91: 1122,
+    0xAC93: 1123,
+    0xAC95: 1124,
+    0xAC96: 1125,
+    0xAC97: 1126,
+    0xACA1: 1127,
+    0xACA2: 1128,
+    0xACA5: 1129,
+    0xACA9: 1130,
+    0xACB1: 1131,
+    0xACB3: 1132,
+    0xACB5: 1133,
+    0xACB7: 1134,
+    0xACC1: 1135,
+    0xACC5: 1136,
+    0xACC9: 1137,
+    0xACD1: 1138,
+    0xACD7: 1139,
+    0xACE1: 1140,
+    0xACE2: 1141,
+    0xACE3: 1142,
+    0xACE4: 1143,
+    0xACE5: 1144,
+    0xACE8: 1145,
+    0xACE9: 1146,
+    0xACEB: 1147,
+    0xACEC: 1148,
+    0xACF1: 1149,
+    0xACF3: 1150,
+    0xACF5: 1151,
+    0xACF6: 1152,
+    0xACF7: 1153,
+    0xACFC: 1154,
+    0xAD41: 1155,
+    0xAD42: 1156,
+    0xAD45: 1157,
+    0xAD49: 1158,
+    0xAD51: 1159,
+    0xAD53: 1160,
+    0xAD55: 1161,
+    0xAD56: 1162,
+    0xAD57: 1163,
+    0xAD61: 1164,
+    0xAD62: 1165,
+    0xAD65: 1166,
+    0xAD69: 1167,
+    0xAD71: 1168,
+    0xAD73: 1169,
+    0xAD75: 1170,
+    0xAD76: 1171,
+    0xAD77: 1172,
+    0xAD81: 1173,
+    0xAD85: 1174,
+    0xAD89: 1175,
+    0xAD97: 1176,
+    0xADA1: 1177,
+    0xADA2: 1178,
+    0xADA3: 1179,
+    0xADA5: 1180,
+    0xADA9: 1181,
+    0xADAB: 1182,
+    0xADB1: 1183,
+    0xADB3: 1184,
+    0xADB5: 1185,
+    0xADB7: 1186,
+    0xADBB: 1187,
+    0xADC1: 1188,
+    0xADC2: 1189,
+    0xADC5: 1190,
+    0xADC9: 1191,
+    0xADD7: 1192,
+    0xADE1: 1193,
+    0xADE5: 1194,
+    0xADE9: 1195,
+    0xADF1: 1196,
+    0xADF5: 1197,
+    0xADF6: 1198,
+    0xAE41: 1199,
+    0xAE45: 1200,
+    0xAE49: 1201,
+    0xAE51: 1202,
+    0xAE53: 1203,
+    0xAE55: 1204,
+    0xAE61: 1205,
+    0xAE62: 1206,
+    0xAE65: 1207,
+    0xAE69: 1208,
+    0xAE71: 1209,
+    0xAE73: 1210,
+    0xAE75: 1211,
+    0xAE77: 1212,
+    0xAE81: 1213,
+    0xAE82: 1214,
+    0xAE85: 1215,
+    0xAE88: 1216,
+    0xAE89: 1217,
+    0xAE91: 1218,
+    0xAE93: 1219,
+    0xAE95: 1220,
+    0xAE97: 1221,
+    0xAE99: 1222,
+    0xAE9B: 1223,
+    0xAE9C: 1224,
+    0xAEA1: 1225,
+    0xAEB6: 1226,
+    0xAEC1: 1227,
+    0xAEC2: 1228,
+    0xAEC5: 1229,
+    0xAEC9: 1230,
+    0xAED1: 1231,
+    0xAED7: 1232,
+    0xAEE1: 1233,
+    0xAEE2: 1234,
+    0xAEE5: 1235,
+    0xAEE9: 1236,
+    0xAEF1: 1237,
+    0xAEF3: 1238,
+    0xAEF5: 1239,
+    0xAEF7: 1240,
+    0xAF41: 1241,
+    0xAF42: 1242,
+    0xAF49: 1243,
+    0xAF51: 1244,
+    0xAF55: 1245,
+    0xAF57: 1246,
+    0xAF61: 1247,
+    0xAF62: 1248,
+    0xAF65: 1249,
+    0xAF69: 1250,
+    0xAF6A: 1251,
+    0xAF71: 1252,
+    0xAF73: 1253,
+    0xAF75: 1254,
+    0xAF77: 1255,
+    0xAFA1: 1256,
+    0xAFA2: 1257,
+    0xAFA5: 1258,
+    0xAFA8: 1259,
+    0xAFA9: 1260,
+    0xAFB0: 1261,
+    0xAFB1: 1262,
+    0xAFB3: 1263,
+    0xAFB5: 1264,
+    0xAFB7: 1265,
+    0xAFBC: 1266,
+    0xB061: 1267,
+    0xB062: 1268,
+    0xB064: 1269,
+    0xB065: 1270,
+    0xB069: 1271,
+    0xB071: 1272,
+    0xB073: 1273,
+    0xB076: 1274,
+    0xB077: 1275,
+    0xB07D: 1276,
+    0xB081: 1277,
+    0xB082: 1278,
+    0xB085: 1279,
+    0xB089: 1280,
+    0xB091: 1281,
+    0xB093: 1282,
+    0xB096: 1283,
+    0xB097: 1284,
+    0xB0B7: 1285,
+    0xB0E1: 1286,
+    0xB0E2: 1287,
+    0xB0E5: 1288,
+    0xB0E9: 1289,
+    0xB0EB: 1290,
+    0xB0F1: 1291,
+    0xB0F3: 1292,
+    0xB0F6: 1293,
+    0xB0F7: 1294,
+    0xB141: 1295,
+    0xB145: 1296,
+    0xB149: 1297,
+    0xB185: 1298,
+    0xB1A1: 1299,
+    0xB1A2: 1300,
+    0xB1A5: 1301,
+    0xB1A8: 1302,
+    0xB1A9: 1303,
+    0xB1AB: 1304,
+    0xB1B1: 1305,
+    0xB1B3: 1306,
+    0xB1B7: 1307,
+    0xB1C1: 1308,
+    0xB1C2: 1309,
+    0xB1C5: 1310,
+    0xB1D6: 1311,
+    0xB1E1: 1312,
+    0xB1F6: 1313,
+    0xB241: 1314,
+    0xB245: 1315,
+    0xB249: 1316,
+    0xB251: 1317,
+    0xB253: 1318,
+    0xB261: 1319,
+    0xB281: 1320,
+    0xB282: 1321,
+    0xB285: 1322,
+    0xB289: 1323,
+    0xB291: 1324,
+    0xB293: 1325,
+    0xB297: 1326,
+    0xB2A1: 1327,
+    0xB2B6: 1328,
+    0xB2C1: 1329,
+    0xB2E1: 1330,
+    0xB2E5: 1331,
+    0xB357: 1332,
+    0xB361: 1333,
+    0xB362: 1334,
+    0xB365: 1335,
+    0xB369: 1336,
+    0xB36B: 1337,
+    0xB370: 1338,
+    0xB371: 1339,
+    0xB373: 1340,
+    0xB381: 1341,
+    0xB385: 1342,
+    0xB389: 1343,
+    0xB391: 1344,
+    0xB3A1: 1345,
+    0xB3A2: 1346,
+    0xB3A5: 1347,
+    0xB3A9: 1348,
+    0xB3B1: 1349,
+    0xB3B3: 1350,
+    0xB3B5: 1351,
+    0xB3B7: 1352,
+    0xB461: 1353,
+    0xB462: 1354,
+    0xB465: 1355,
+    0xB466: 1356,
+    0xB467: 1357,
+    0xB469: 1358,
+    0xB46A: 1359,
+    0xB46B: 1360,
+    0xB470: 1361,
+    0xB471: 1362,
+    0xB473: 1363,
+    0xB475: 1364,
+    0xB476: 1365,
+    0xB477: 1366,
+    0xB47B: 1367,
+    0xB47C: 1368,
+    0xB481: 1369,
+    0xB482: 1370,
+    0xB485: 1371,
+    0xB489: 1372,
+    0xB491: 1373,
+    0xB493: 1374,
+    0xB495: 1375,
+    0xB496: 1376,
+    0xB497: 1377,
+    0xB4A1: 1378,
+    0xB4A2: 1379,
+    0xB4A5: 1380,
+    0xB4A9: 1381,
+    0xB4AC: 1382,
+    0xB4B1: 1383,
+    0xB4B3: 1384,
+    0xB4B5: 1385,
+    0xB4B7: 1386,
+    0xB4BB: 1387,
+    0xB4BD: 1388,
+    0xB4C1: 1389,
+    0xB4C5: 1390,
+    0xB4C9: 1391,
+    0xB4D3: 1392,
+    0xB4E1: 1393,
+    0xB4E2: 1394,
+    0xB4E5: 1395,
+    0xB4E6: 1396,
+    0xB4E8: 1397,
+    0xB4E9: 1398,
+    0xB4EA: 1399,
+    0xB4EB: 1400,
+    0xB4F1: 1401,
+    0xB4F3: 1402,
+    0xB4F4: 1403,
+    0xB4F5: 1404,
+    0xB4F6: 1405,
+    0xB4F7: 1406,
+    0xB4F8: 1407,
+    0xB4FA: 1408,
+    0xB4FC: 1409,
+    0xB541: 1410,
+    0xB542: 1411,
+    0xB545: 1412,
+    0xB549: 1413,
+    0xB551: 1414,
+    0xB553: 1415,
+    0xB555: 1416,
+    0xB557: 1417,
+    0xB561: 1418,
+    0xB562: 1419,
+    0xB563: 1420,
+    0xB565: 1421,
+    0xB569: 1422,
+    0xB56B: 1423,
+    0xB56C: 1424,
+    0xB571: 1425,
+    0xB573: 1426,
+    0xB574: 1427,
+    0xB575: 1428,
+    0xB576: 1429,
+    0xB577: 1430,
+    0xB57B: 1431,
+    0xB57C: 1432,
+    0xB57D: 1433,
+    0xB581: 1434,
+    0xB585: 1435,
+    0xB589: 1436,
+    0xB591: 1437,
+    0xB593: 1438,
+    0xB595: 1439,
+    0xB596: 1440,
+    0xB5A1: 1441,
+    0xB5A2: 1442,
+    0xB5A5: 1443,
+    0xB5A9: 1444,
+    0xB5AA: 1445,
+    0xB5AB: 1446,
+    0xB5AD: 1447,
+    0xB5B0: 1448,
+    0xB5B1: 1449,
+    0xB5B3: 1450,
+    0xB5B5: 1451,
+    0xB5B7: 1452,
+    0xB5B9: 1453,
+    0xB5C1: 1454,
+    0xB5C2: 1455,
+    0xB5C5: 1456,
+    0xB5C9: 1457,
+    0xB5D1: 1458,
+    0xB5D3: 1459,
+    0xB5D5: 1460,
+    0xB5D6: 1461,
+    0xB5D7: 1462,
+    0xB5E1: 1463,
+    0xB5E2: 1464,
+    0xB5E5: 1465,
+    0xB5F1: 1466,
+    0xB5F5: 1467,
+    0xB5F7: 1468,
+    0xB641: 1469,
+    0xB642: 1470,
+    0xB645: 1471,
+    0xB649: 1472,
+    0xB651: 1473,
+    0xB653: 1474,
+    0xB655: 1475,
+    0xB657: 1476,
+    0xB661: 1477,
+    0xB662: 1478,
+    0xB665: 1479,
+    0xB669: 1480,
+    0xB671: 1481,
+    0xB673: 1482,
+    0xB675: 1483,
+    0xB677: 1484,
+    0xB681: 1485,
+    0xB682: 1486,
+    0xB685: 1487,
+    0xB689: 1488,
+    0xB68A: 1489,
+    0xB68B: 1490,
+    0xB691: 1491,
+    0xB693: 1492,
+    0xB695: 1493,
+    0xB697: 1494,
+    0xB6A1: 1495,
+    0xB6A2: 1496,
+    0xB6A5: 1497,
+    0xB6A9: 1498,
+    0xB6B1: 1499,
+    0xB6B3: 1500,
+    0xB6B6: 1501,
+    0xB6B7: 1502,
+    0xB6C1: 1503,
+    0xB6C2: 1504,
+    0xB6C5: 1505,
+    0xB6C9: 1506,
+    0xB6D1: 1507,
+    0xB6D3: 1508,
+    0xB6D7: 1509,
+    0xB6E1: 1510,
+    0xB6E2: 1511,
+    0xB6E5: 1512,
+    0xB6E9: 1513,
+    0xB6F1: 1514,
+    0xB6F3: 1515,
+    0xB6F5: 1516,
+    0xB6F7: 1517,
+    0xB741: 1518,
+    0xB742: 1519,
+    0xB745: 1520,
+    0xB749: 1521,
+    0xB751: 1522,
+    0xB753: 1523,
+    0xB755: 1524,
+    0xB757: 1525,
+    0xB759: 1526,
+    0xB761: 1527,
+    0xB762: 1528,
+    0xB765: 1529,
+    0xB769: 1530,
+    0xB76F: 1531,
+    0xB771: 1532,
+    0xB773: 1533,
+    0xB775: 1534,
+    0xB777: 1535,
+    0xB778: 1536,
+    0xB779: 1537,
+    0xB77A: 1538,
+    0xB77B: 1539,
+    0xB77C: 1540,
+    0xB77D: 1541,
+    0xB781: 1542,
+    0xB785: 1543,
+    0xB789: 1544,
+    0xB791: 1545,
+    0xB795: 1546,
+    0xB7A1: 1547,
+    0xB7A2: 1548,
+    0xB7A5: 1549,
+    0xB7A9: 1550,
+    0xB7AA: 1551,
+    0xB7AB: 1552,
+    0xB7B0: 1553,
+    0xB7B1: 1554,
+    0xB7B3: 1555,
+    0xB7B5: 1556,
+    0xB7B6: 1557,
+    0xB7B7: 1558,
+    0xB7B8: 1559,
+    0xB7BC: 1560,
+    0xB861: 1561,
+    0xB862: 1562,
+    0xB865: 1563,
+    0xB867: 1564,
+    0xB868: 1565,
+    0xB869: 1566,
+    0xB86B: 1567,
+    0xB871: 1568,
+    0xB873: 1569,
+    0xB875: 1570,
+    0xB876: 1571,
+    0xB877: 1572,
+    0xB878: 1573,
+    0xB881: 1574,
+    0xB882: 1575,
+    0xB885: 1576,
+    0xB889: 1577,
+    0xB891: 1578,
+    0xB893: 1579,
+    0xB895: 1580,
+    0xB896: 1581,
+    0xB897: 1582,
+    0xB8A1: 1583,
+    0xB8A2: 1584,
+    0xB8A5: 1585,
+    0xB8A7: 1586,
+    0xB8A9: 1587,
+    0xB8B1: 1588,
+    0xB8B7: 1589,
+    0xB8C1: 1590,
+    0xB8C5: 1591,
+    0xB8C9: 1592,
+    0xB8E1: 1593,
+    0xB8E2: 1594,
+    0xB8E5: 1595,
+    0xB8E9: 1596,
+    0xB8EB: 1597,
+    0xB8F1: 1598,
+    0xB8F3: 1599,
+    0xB8F5: 1600,
+    0xB8F7: 1601,
+    0xB8F8: 1602,
+    0xB941: 1603,
+    0xB942: 1604,
+    0xB945: 1605,
+    0xB949: 1606,
+    0xB951: 1607,
+    0xB953: 1608,
+    0xB955: 1609,
+    0xB957: 1610,
+    0xB961: 1611,
+    0xB965: 1612,
+    0xB969: 1613,
+    0xB971: 1614,
+    0xB973: 1615,
+    0xB976: 1616,
+    0xB977: 1617,
+    0xB981: 1618,
+    0xB9A1: 1619,
+    0xB9A2: 1620,
+    0xB9A5: 1621,
+    0xB9A9: 1622,
+    0xB9AB: 1623,
+    0xB9B1: 1624,
+    0xB9B3: 1625,
+    0xB9B5: 1626,
+    0xB9B7: 1627,
+    0xB9B8: 1628,
+    0xB9B9: 1629,
+    0xB9BD: 1630,
+    0xB9C1: 1631,
+    0xB9C2: 1632,
+    0xB9C9: 1633,
+    0xB9D3: 1634,
+    0xB9D5: 1635,
+    0xB9D7: 1636,
+    0xB9E1: 1637,
+    0xB9F6: 1638,
+    0xB9F7: 1639,
+    0xBA41: 1640,
+    0xBA45: 1641,
+    0xBA49: 1642,
+    0xBA51: 1643,
+    0xBA53: 1644,
+    0xBA55: 1645,
+    0xBA57: 1646,
+    0xBA61: 1647,
+    0xBA62: 1648,
+    0xBA65: 1649,
+    0xBA77: 1650,
+    0xBA81: 1651,
+    0xBA82: 1652,
+    0xBA85: 1653,
+    0xBA89: 1654,
+    0xBA8A: 1655,
+    0xBA8B: 1656,
+    0xBA91: 1657,
+    0xBA93: 1658,
+    0xBA95: 1659,
+    0xBA97: 1660,
+    0xBAA1: 1661,
+    0xBAB6: 1662,
+    0xBAC1: 1663,
+    0xBAE1: 1664,
+    0xBAE2: 1665,
+    0xBAE5: 1666,
+    0xBAE9: 1667,
+    0xBAF1: 1668,
+    0xBAF3: 1669,
+    0xBAF5: 1670,
+    0xBB41: 1671,
+    0xBB45: 1672,
+    0xBB49: 1673,
+    0xBB51: 1674,
+    0xBB61: 1675,
+    0xBB62: 1676,
+    0xBB65: 1677,
+    0xBB69: 1678,
+    0xBB71: 1679,
+    0xBB73: 1680,
+    0xBB75: 1681,
+    0xBB77: 1682,
+    0xBBA1: 1683,
+    0xBBA2: 1684,
+    0xBBA5: 1685,
+    0xBBA8: 1686,
+    0xBBA9: 1687,
+    0xBBAB: 1688,
+    0xBBB1: 1689,
+    0xBBB3: 1690,
+    0xBBB5: 1691,
+    0xBBB7: 1692,
+    0xBBB8: 1693,
+    0xBBBB: 1694,
+    0xBBBC: 1695,
+    0xBC61: 1696,
+    0xBC62: 1697,
+    0xBC65: 1698,
+    0xBC67: 1699,
+    0xBC69: 1700,
+    0xBC6C: 1701,
+    0xBC71: 1702,
+    0xBC73: 1703,
+    0xBC75: 1704,
+    0xBC76: 1705,
+    0xBC77: 1706,
+    0xBC81: 1707,
+    0xBC82: 1708,
+    0xBC85: 1709,
+    0xBC89: 1710,
+    0xBC91: 1711,
+    0xBC93: 1712,
+    0xBC95: 1713,
+    0xBC96: 1714,
+    0xBC97: 1715,
+    0xBCA1: 1716,
+    0xBCA5: 1717,
+    0xBCB7: 1718,
+    0xBCE1: 1719,
+    0xBCE2: 1720,
+    0xBCE5: 1721,
+    0xBCE9: 1722,
+    0xBCF1: 1723,
+    0xBCF3: 1724,
+    0xBCF5: 1725,
+    0xBCF6: 1726,
+    0xBCF7: 1727,
+    0xBD41: 1728,
+    0xBD57: 1729,
+    0xBD61: 1730,
+    0xBD76: 1731,
+    0xBDA1: 1732,
+    0xBDA2: 1733,
+    0xBDA5: 1734,
+    0xBDA9: 1735,
+    0xBDB1: 1736,
+    0xBDB3: 1737,
+    0xBDB5: 1738,
+    0xBDB7: 1739,
+    0xBDB9: 1740,
+    0xBDC1: 1741,
+    0xBDC2: 1742,
+    0xBDC9: 1743,
+    0xBDD6: 1744,
+    0xBDE1: 1745,
+    0xBDF6: 1746,
+    0xBE41: 1747,
+    0xBE45: 1748,
+    0xBE49: 1749,
+    0xBE51: 1750,
+    0xBE53: 1751,
+    0xBE77: 1752,
+    0xBE81: 1753,
+    0xBE82: 1754,
+    0xBE85: 1755,
+    0xBE89: 1756,
+    0xBE91: 1757,
+    0xBE93: 1758,
+    0xBE97: 1759,
+    0xBEA1: 1760,
+    0xBEB6: 1761,
+    0xBEB7: 1762,
+    0xBEE1: 1763,
+    0xBF41: 1764,
+    0xBF61: 1765,
+    0xBF71: 1766,
+    0xBF75: 1767,
+    0xBF77: 1768,
+    0xBFA1: 1769,
+    0xBFA2: 1770,
+    0xBFA5: 1771,
+    0xBFA9: 1772,
+    0xBFB1: 1773,
+    0xBFB3: 1774,
+    0xBFB7: 1775,
+    0xBFB8: 1776,
+    0xBFBD: 1777,
+    0xC061: 1778,
+    0xC062: 1779,
+    0xC065: 1780,
+    0xC067: 1781,
+    0xC069: 1782,
+    0xC071: 1783,
+    0xC073: 1784,
+    0xC075: 1785,
+    0xC076: 1786,
+    0xC077: 1787,
+    0xC078: 1788,
+    0xC081: 1789,
+    0xC082: 1790,
+    0xC085: 1791,
+    0xC089: 1792,
+    0xC091: 1793,
+    0xC093: 1794,
+    0xC095: 1795,
+    0xC096: 1796,
+    0xC097: 1797,
+    0xC0A1: 1798,
+    0xC0A5: 1799,
+    0xC0A7: 1800,
+    0xC0A9: 1801,
+    0xC0B1: 1802,
+    0xC0B7: 1803,
+    0xC0E1: 1804,
+    0xC0E2: 1805,
+    0xC0E5: 1806,
+    0xC0E9: 1807,
+    0xC0F1: 1808,
+    0xC0F3: 1809,
+    0xC0F5: 1810,
+    0xC0F6: 1811,
+    0xC0F7: 1812,
+    0xC141: 1813,
+    0xC142: 1814,
+    0xC145: 1815,
+    0xC149: 1816,
+    0xC151: 1817,
+    0xC153: 1818,
+    0xC155: 1819,
+    0xC157: 1820,
+    0xC161: 1821,
+    0xC165: 1822,
+    0xC176: 1823,
+    0xC181: 1824,
+    0xC185: 1825,
+    0xC197: 1826,
+    0xC1A1: 1827,
+    0xC1A2: 1828,
+    0xC1A5: 1829,
+    0xC1A9: 1830,
+    0xC1B1: 1831,
+    0xC1B3: 1832,
+    0xC1B5: 1833,
+    0xC1B7: 1834,
+    0xC1C1: 1835,
+    0xC1C5: 1836,
+    0xC1C9: 1837,
+    0xC1D7: 1838,
+    0xC241: 1839,
+    0xC245: 1840,
+    0xC249: 1841,
+    0xC251: 1842,
+    0xC253: 1843,
+    0xC255: 1844,
+    0xC257: 1845,
+    0xC261: 1846,
+    0xC271: 1847,
+    0xC281: 1848,
+    0xC282: 1849,
+    0xC285: 1850,
+    0xC289: 1851,
+    0xC291: 1852,
+    0xC293: 1853,
+    0xC295: 1854,
+    0xC297: 1855,
+    0xC2A1: 1856,
+    0xC2B6: 1857,
+    0xC2C1: 1858,
+    0xC2C5: 1859,
+    0xC2E1: 1860,
+    0xC2E5: 1861,
+    0xC2E9: 1862,
+    0xC2F1: 1863,
+    0xC2F3: 1864,
+    0xC2F5: 1865,
+    0xC2F7: 1866,
+    0xC341: 1867,
+    0xC345: 1868,
+    0xC349: 1869,
+    0xC351: 1870,
+    0xC357: 1871,
+    0xC361: 1872,
+    0xC362: 1873,
+    0xC365: 1874,
+    0xC369: 1875,
+    0xC371: 1876,
+    0xC373: 1877,
+    0xC375: 1878,
+    0xC377: 1879,
+    0xC3A1: 1880,
+    0xC3A2: 1881,
+    0xC3A5: 1882,
+    0xC3A8: 1883,
+    0xC3A9: 1884,
+    0xC3AA: 1885,
+    0xC3B1: 1886,
+    0xC3B3: 1887,
+    0xC3B5: 1888,
+    0xC3B7: 1889,
+    0xC461: 1890,
+    0xC462: 1891,
+    0xC465: 1892,
+    0xC469: 1893,
+    0xC471: 1894,
+    0xC473: 1895,
+    0xC475: 1896,
+    0xC477: 1897,
+    0xC481: 1898,
+    0xC482: 1899,
+    0xC485: 1900,
+    0xC489: 1901,
+    0xC491: 1902,
+    0xC493: 1903,
+    0xC495: 1904,
+    0xC496: 1905,
+    0xC497: 1906,
+    0xC4A1: 1907,
+    0xC4A2: 1908,
+    0xC4B7: 1909,
+    0xC4E1: 1910,
+    0xC4E2: 1911,
+    0xC4E5: 1912,
+    0xC4E8: 1913,
+    0xC4E9: 1914,
+    0xC4F1: 1915,
+    0xC4F3: 1916,
+    0xC4F5: 1917,
+    0xC4F6: 1918,
+    0xC4F7: 1919,
+    0xC541: 1920,
+    0xC542: 1921,
+    0xC545: 1922,
+    0xC549: 1923,
+    0xC551: 1924,
+    0xC553: 1925,
+    0xC555: 1926,
+    0xC557: 1927,
+    0xC561: 1928,
+    0xC565: 1929,
+    0xC569: 1930,
+    0xC571: 1931,
+    0xC573: 1932,
+    0xC575: 1933,
+    0xC576: 1934,
+    0xC577: 1935,
+    0xC581: 1936,
+    0xC5A1: 1937,
+    0xC5A2: 1938,
+    0xC5A5: 1939,
+    0xC5A9: 1940,
+    0xC5B1: 1941,
+    0xC5B3: 1942,
+    0xC5B5: 1943,
+    0xC5B7: 1944,
+    0xC5C1: 1945,
+    0xC5C2: 1946,
+    0xC5C5: 1947,
+    0xC5C9: 1948,
+    0xC5D1: 1949,
+    0xC5D7: 1950,
+    0xC5E1: 1951,
+    0xC5F7: 1952,
+    0xC641: 1953,
+    0xC649: 1954,
+    0xC661: 1955,
+    0xC681: 1956,
+    0xC682: 1957,
+    0xC685: 1958,
+    0xC689: 1959,
+    0xC691: 1960,
+    0xC693: 1961,
+    0xC695: 1962,
+    0xC697: 1963,
+    0xC6A1: 1964,
+    0xC6A5: 1965,
+    0xC6A9: 1966,
+    0xC6B7: 1967,
+    0xC6C1: 1968,
+    0xC6D7: 1969,
+    0xC6E1: 1970,
+    0xC6E2: 1971,
+    0xC6E5: 1972,
+    0xC6E9: 1973,
+    0xC6F1: 1974,
+    0xC6F3: 1975,
+    0xC6F5: 1976,
+    0xC6F7: 1977,
+    0xC741: 1978,
+    0xC745: 1979,
+    0xC749: 1980,
+    0xC751: 1981,
+    0xC761: 1982,
+    0xC762: 1983,
+    0xC765: 1984,
+    0xC769: 1985,
+    0xC771: 1986,
+    0xC773: 1987,
+    0xC777: 1988,
+    0xC7A1: 1989,
+    0xC7A2: 1990,
+    0xC7A5: 1991,
+    0xC7A9: 1992,
+    0xC7B1: 1993,
+    0xC7B3: 1994,
+    0xC7B5: 1995,
+    0xC7B7: 1996,
+    0xC861: 1997,
+    0xC862: 1998,
+    0xC865: 1999,
+    0xC869: 2000,
+    0xC86A: 2001,
+    0xC871: 2002,
+    0xC873: 2003,
+    0xC875: 2004,
+    0xC876: 2005,
+    0xC877: 2006,
+    0xC881: 2007,
+    0xC882: 2008,
+    0xC885: 2009,
+    0xC889: 2010,
+    0xC891: 2011,
+    0xC893: 2012,
+    0xC895: 2013,
+    0xC896: 2014,
+    0xC897: 2015,
+    0xC8A1: 2016,
+    0xC8B7: 2017,
+    0xC8E1: 2018,
+    0xC8E2: 2019,
+    0xC8E5: 2020,
+    0xC8E9: 2021,
+    0xC8EB: 2022,
+    0xC8F1: 2023,
+    0xC8F3: 2024,
+    0xC8F5: 2025,
+    0xC8F6: 2026,
+    0xC8F7: 2027,
+    0xC941: 2028,
+    0xC942: 2029,
+    0xC945: 2030,
+    0xC949: 2031,
+    0xC951: 2032,
+    0xC953: 2033,
+    0xC955: 2034,
+    0xC957: 2035,
+    0xC961: 2036,
+    0xC965: 2037,
+    0xC976: 2038,
+    0xC981: 2039,
+    0xC985: 2040,
+    0xC9A1: 2041,
+    0xC9A2: 2042,
+    0xC9A5: 2043,
+    0xC9A9: 2044,
+    0xC9B1: 2045,
+    0xC9B3: 2046,
+    0xC9B5: 2047,
+    0xC9B7: 2048,
+    0xC9BC: 2049,
+    0xC9C1: 2050,
+    0xC9C5: 2051,
+    0xC9E1: 2052,
+    0xCA41: 2053,
+    0xCA45: 2054,
+    0xCA55: 2055,
+    0xCA57: 2056,
+    0xCA61: 2057,
+    0xCA81: 2058,
+    0xCA82: 2059,
+    0xCA85: 2060,
+    0xCA89: 2061,
+    0xCA91: 2062,
+    0xCA93: 2063,
+    0xCA95: 2064,
+    0xCA97: 2065,
+    0xCAA1: 2066,
+    0xCAB6: 2067,
+    0xCAC1: 2068,
+    0xCAE1: 2069,
+    0xCAE2: 2070,
+    0xCAE5: 2071,
+    0xCAE9: 2072,
+    0xCAF1: 2073,
+    0xCAF3: 2074,
+    0xCAF7: 2075,
+    0xCB41: 2076,
+    0xCB45: 2077,
+    0xCB49: 2078,
+    0xCB51: 2079,
+    0xCB57: 2080,
+    0xCB61: 2081,
+    0xCB62: 2082,
+    0xCB65: 2083,
+    0xCB68: 2084,
+    0xCB69: 2085,
+    0xCB6B: 2086,
+    0xCB71: 2087,
+    0xCB73: 2088,
+    0xCB75: 2089,
+    0xCB81: 2090,
+    0xCB85: 2091,
+    0xCB89: 2092,
+    0xCB91: 2093,
+    0xCB93: 2094,
+    0xCBA1: 2095,
+    0xCBA2: 2096,
+    0xCBA5: 2097,
+    0xCBA9: 2098,
+    0xCBB1: 2099,
+    0xCBB3: 2100,
+    0xCBB5: 2101,
+    0xCBB7: 2102,
+    0xCC61: 2103,
+    0xCC62: 2104,
+    0xCC63: 2105,
+    0xCC65: 2106,
+    0xCC69: 2107,
+    0xCC6B: 2108,
+    0xCC71: 2109,
+    0xCC73: 2110,
+    0xCC75: 2111,
+    0xCC76: 2112,
+    0xCC77: 2113,
+    0xCC7B: 2114,
+    0xCC81: 2115,
+    0xCC82: 2116,
+    0xCC85: 2117,
+    0xCC89: 2118,
+    0xCC91: 2119,
+    0xCC93: 2120,
+    0xCC95: 2121,
+    0xCC96: 2122,
+    0xCC97: 2123,
+    0xCCA1: 2124,
+    0xCCA2: 2125,
+    0xCCE1: 2126,
+    0xCCE2: 2127,
+    0xCCE5: 2128,
+    0xCCE9: 2129,
+    0xCCF1: 2130,
+    0xCCF3: 2131,
+    0xCCF5: 2132,
+    0xCCF6: 2133,
+    0xCCF7: 2134,
+    0xCD41: 2135,
+    0xCD42: 2136,
+    0xCD45: 2137,
+    0xCD49: 2138,
+    0xCD51: 2139,
+    0xCD53: 2140,
+    0xCD55: 2141,
+    0xCD57: 2142,
+    0xCD61: 2143,
+    0xCD65: 2144,
+    0xCD69: 2145,
+    0xCD71: 2146,
+    0xCD73: 2147,
+    0xCD76: 2148,
+    0xCD77: 2149,
+    0xCD81: 2150,
+    0xCD89: 2151,
+    0xCD93: 2152,
+    0xCD95: 2153,
+    0xCDA1: 2154,
+    0xCDA2: 2155,
+    0xCDA5: 2156,
+    0xCDA9: 2157,
+    0xCDB1: 2158,
+    0xCDB3: 2159,
+    0xCDB5: 2160,
+    0xCDB7: 2161,
+    0xCDC1: 2162,
+    0xCDD7: 2163,
+    0xCE41: 2164,
+    0xCE45: 2165,
+    0xCE61: 2166,
+    0xCE65: 2167,
+    0xCE69: 2168,
+    0xCE73: 2169,
+    0xCE75: 2170,
+    0xCE81: 2171,
+    0xCE82: 2172,
+    0xCE85: 2173,
+    0xCE88: 2174,
+    0xCE89: 2175,
+    0xCE8B: 2176,
+    0xCE91: 2177,
+    0xCE93: 2178,
+    0xCE95: 2179,
+    0xCE97: 2180,
+    0xCEA1: 2181,
+    0xCEB7: 2182,
+    0xCEE1: 2183,
+    0xCEE5: 2184,
+    0xCEE9: 2185,
+    0xCEF1: 2186,
+    0xCEF5: 2187,
+    0xCF41: 2188,
+    0xCF45: 2189,
+    0xCF49: 2190,
+    0xCF51: 2191,
+    0xCF55: 2192,
+    0xCF57: 2193,
+    0xCF61: 2194,
+    0xCF65: 2195,
+    0xCF69: 2196,
+    0xCF71: 2197,
+    0xCF73: 2198,
+    0xCF75: 2199,
+    0xCFA1: 2200,
+    0xCFA2: 2201,
+    0xCFA5: 2202,
+    0xCFA9: 2203,
+    0xCFB1: 2204,
+    0xCFB3: 2205,
+    0xCFB5: 2206,
+    0xCFB7: 2207,
+    0xD061: 2208,
+    0xD062: 2209,
+    0xD065: 2210,
+    0xD069: 2211,
+    0xD06E: 2212,
+    0xD071: 2213,
+    0xD073: 2214,
+    0xD075: 2215,
+    0xD077: 2216,
+    0xD081: 2217,
+    0xD082: 2218,
+    0xD085: 2219,
+    0xD089: 2220,
+    0xD091: 2221,
+    0xD093: 2222,
+    0xD095: 2223,
+    0xD096: 2224,
+    0xD097: 2225,
+    0xD0A1: 2226,
+    0xD0B7: 2227,
+    0xD0E1: 2228,
+    0xD0E2: 2229,
+    0xD0E5: 2230,
+    0xD0E9: 2231,
+    0xD0EB: 2232,
+    0xD0F1: 2233,
+    0xD0F3: 2234,
+    0xD0F5: 2235,
+    0xD0F7: 2236,
+    0xD141: 2237,
+    0xD142: 2238,
+    0xD145: 2239,
+    0xD149: 2240,
+    0xD151: 2241,
+    0xD153: 2242,
+    0xD155: 2243,
+    0xD157: 2244,
+    0xD161: 2245,
+    0xD162: 2246,
+    0xD165: 2247,
+    0xD169: 2248,
+    0xD171: 2249,
+    0xD173: 2250,
+    0xD175: 2251,
+    0xD176: 2252,
+    0xD177: 2253,
+    0xD181: 2254,
+    0xD185: 2255,
+    0xD189: 2256,
+    0xD193: 2257,
+    0xD1A1: 2258,
+    0xD1A2: 2259,
+    0xD1A5: 2260,
+    0xD1A9: 2261,
+    0xD1AE: 2262,
+    0xD1B1: 2263,
+    0xD1B3: 2264,
+    0xD1B5: 2265,
+    0xD1B7: 2266,
+    0xD1BB: 2267,
+    0xD1C1: 2268,
+    0xD1C2: 2269,
+    0xD1C5: 2270,
+    0xD1C9: 2271,
+    0xD1D5: 2272,
+    0xD1D7: 2273,
+    0xD1E1: 2274,
+    0xD1E2: 2275,
+    0xD1E5: 2276,
+    0xD1F5: 2277,
+    0xD1F7: 2278,
+    0xD241: 2279,
+    0xD242: 2280,
+    0xD245: 2281,
+    0xD249: 2282,
+    0xD253: 2283,
+    0xD255: 2284,
+    0xD257: 2285,
+    0xD261: 2286,
+    0xD265: 2287,
+    0xD269: 2288,
+    0xD273: 2289,
+    0xD275: 2290,
+    0xD281: 2291,
+    0xD282: 2292,
+    0xD285: 2293,
+    0xD289: 2294,
+    0xD28E: 2295,
+    0xD291: 2296,
+    0xD295: 2297,
+    0xD297: 2298,
+    0xD2A1: 2299,
+    0xD2A5: 2300,
+    0xD2A9: 2301,
+    0xD2B1: 2302,
+    0xD2B7: 2303,
+    0xD2C1: 2304,
+    0xD2C2: 2305,
+    0xD2C5: 2306,
+    0xD2C9: 2307,
+    0xD2D7: 2308,
+    0xD2E1: 2309,
+    0xD2E2: 2310,
+    0xD2E5: 2311,
+    0xD2E9: 2312,
+    0xD2F1: 2313,
+    0xD2F3: 2314,
+    0xD2F5: 2315,
+    0xD2F7: 2316,
+    0xD341: 2317,
+    0xD342: 2318,
+    0xD345: 2319,
+    0xD349: 2320,
+    0xD351: 2321,
+    0xD355: 2322,
+    0xD357: 2323,
+    0xD361: 2324,
+    0xD362: 2325,
+    0xD365: 2326,
+    0xD367: 2327,
+    0xD368: 2328,
+    0xD369: 2329,
+    0xD36A: 2330,
+    0xD371: 2331,
+    0xD373: 2332,
+    0xD375: 2333,
+    0xD377: 2334,
+    0xD37B: 2335,
+    0xD381: 2336,
+    0xD385: 2337,
+    0xD389: 2338,
+    0xD391: 2339,
+    0xD393: 2340,
+    0xD397: 2341,
+    0xD3A1: 2342,
+    0xD3A2: 2343,
+    0xD3A5: 2344,
+    0xD3A9: 2345,
+    0xD3B1: 2346,
+    0xD3B3: 2347,
+    0xD3B5: 2348,
+    0xD3B7: 2349,
+}
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/johabprober.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/johabprober.py
new file mode 100644
index 0000000..d7364ba
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/johabprober.py
@@ -0,0 +1,47 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from .chardistribution import JOHABDistributionAnalysis
+from .codingstatemachine import CodingStateMachine
+from .mbcharsetprober import MultiByteCharSetProber
+from .mbcssm import JOHAB_SM_MODEL
+
+
+class JOHABProber(MultiByteCharSetProber):
+    def __init__(self) -> None:
+        super().__init__()
+        self.coding_sm = CodingStateMachine(JOHAB_SM_MODEL)
+        self.distribution_analyzer = JOHABDistributionAnalysis()
+        self.reset()
+
+    @property
+    def charset_name(self) -> str:
+        return "Johab"
+
+    @property
+    def language(self) -> str:
+        return "Korean"
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/jpcntx.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/jpcntx.py
new file mode 100644
index 0000000..2f53bdd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/jpcntx.py
@@ -0,0 +1,238 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from typing import List, Tuple, Union
+
+# This is hiragana 2-char sequence table, the number in each cell represents its frequency category
+# fmt: off
+jp2_char_context = (
+    (0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
+    (2, 4, 0, 4, 0, 3, 0, 4, 0, 3, 4, 4, 4, 2, 4, 3, 3, 4, 3, 2, 3, 3, 4, 2, 3, 3, 3, 2, 4, 1, 4, 3, 3, 1, 5, 4, 3, 4, 3, 4, 3, 5, 3, 0, 3, 5, 4, 2, 0, 3, 1, 0, 3, 3, 0, 3, 3, 0, 1, 1, 0, 4, 3, 0, 3, 3, 0, 4, 0, 2, 0, 3, 5, 5, 5, 5, 4, 0, 4, 1, 0, 3, 4),
+    (0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2),
+    (0, 4, 0, 5, 0, 5, 0, 4, 0, 4, 5, 4, 4, 3, 5, 3, 5, 1, 5, 3, 4, 3, 4, 4, 3, 4, 3, 3, 4, 3, 5, 4, 4, 3, 5, 5, 3, 5, 5, 5, 3, 5, 5, 3, 4, 5, 5, 3, 1, 3, 2, 0, 3, 4, 0, 4, 2, 0, 4, 2, 1, 5, 3, 2, 3, 5, 0, 4, 0, 2, 0, 5, 4, 4, 5, 4, 5, 0, 4, 0, 0, 4, 4),
+    (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
+    (0, 3, 0, 4, 0, 3, 0, 3, 0, 4, 5, 4, 3, 3, 3, 3, 4, 3, 5, 4, 4, 3, 5, 4, 4, 3, 4, 3, 4, 4, 4, 4, 5, 3, 4, 4, 3, 4, 5, 5, 4, 5, 5, 1, 4, 5, 4, 3, 0, 3, 3, 1, 3, 3, 0, 4, 4, 0, 3, 3, 1, 5, 3, 3, 3, 5, 0, 4, 0, 3, 0, 4, 4, 3, 4, 3, 3, 0, 4, 1, 1, 3, 4),
+    (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
+    (0, 4, 0, 3, 0, 3, 0, 4, 0, 3, 4, 4, 3, 2, 2, 1, 2, 1, 3, 1, 3, 3, 3, 3, 3, 4, 3, 1, 3, 3, 5, 3, 3, 0, 4, 3, 0, 5, 4, 3, 3, 5, 4, 4, 3, 4, 4, 5, 0, 1, 2, 0, 1, 2, 0, 2, 2, 0, 1, 0, 0, 5, 2, 2, 1, 4, 0, 3, 0, 1, 0, 4, 4, 3, 5, 4, 3, 0, 2, 1, 0, 4, 3),
+    (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
+    (0, 3, 0, 5, 0, 4, 0, 2, 1, 4, 4, 2, 4, 1, 4, 2, 4, 2, 4, 3, 3, 3, 4, 3, 3, 3, 3, 1, 4, 2, 3, 3, 3, 1, 4, 4, 1, 1, 1, 4, 3, 3, 2, 0, 2, 4, 3, 2, 0, 3, 3, 0, 3, 1, 1, 0, 0, 0, 3, 3, 0, 4, 2, 2, 3, 4, 0, 4, 0, 3, 0, 4, 4, 5, 3, 4, 4, 0, 3, 0, 0, 1, 4),
+    (1, 4, 0, 4, 0, 4, 0, 4, 0, 3, 5, 4, 4, 3, 4, 3, 5, 4, 3, 3, 4, 3, 5, 4, 4, 4, 4, 3, 4, 2, 4, 3, 3, 1, 5, 4, 3, 2, 4, 5, 4, 5, 5, 4, 4, 5, 4, 4, 0, 3, 2, 2, 3, 3, 0, 4, 3, 1, 3, 2, 1, 4, 3, 3, 4, 5, 0, 3, 0, 2, 0, 4, 5, 5, 4, 5, 4, 0, 4, 0, 0, 5, 4),
+    (0, 5, 0, 5, 0, 4, 0, 3, 0, 4, 4, 3, 4, 3, 3, 3, 4, 0, 4, 4, 4, 3, 4, 3, 4, 3, 3, 1, 4, 2, 4, 3, 4, 0, 5, 4, 1, 4, 5, 4, 4, 5, 3, 2, 4, 3, 4, 3, 2, 4, 1, 3, 3, 3, 2, 3, 2, 0, 4, 3, 3, 4, 3, 3, 3, 4, 0, 4, 0, 3, 0, 4, 5, 4, 4, 4, 3, 0, 4, 1, 0, 1, 3),
+    (0, 3, 1, 4, 0, 3, 0, 2, 0, 3, 4, 4, 3, 1, 4, 2, 3, 3, 4, 3, 4, 3, 4, 3, 4, 4, 3, 2, 3, 1, 5, 4, 4, 1, 4, 4, 3, 5, 4, 4, 3, 5, 5, 4, 3, 4, 4, 3, 1, 2, 3, 1, 2, 2, 0, 3, 2, 0, 3, 1, 0, 5, 3, 3, 3, 4, 3, 3, 3, 3, 4, 4, 4, 4, 5, 4, 2, 0, 3, 3, 2, 4, 3),
+    (0, 2, 0, 3, 0, 1, 0, 1, 0, 0, 3, 2, 0, 0, 2, 0, 1, 0, 2, 1, 3, 3, 3, 1, 2, 3, 1, 0, 1, 0, 4, 2, 1, 1, 3, 3, 0, 4, 3, 3, 1, 4, 3, 3, 0, 3, 3, 2, 0, 0, 0, 0, 1, 0, 0, 2, 0, 0, 0, 0, 0, 4, 1, 0, 2, 3, 2, 2, 2, 1, 3, 3, 3, 4, 4, 3, 2, 0, 3, 1, 0, 3, 3),
+    (0, 4, 0, 4, 0, 3, 0, 3, 0, 4, 4, 4, 3, 3, 3, 3, 3, 3, 4, 3, 4, 2, 4, 3, 4, 3, 3, 2, 4, 3, 4, 5, 4, 1, 4, 5, 3, 5, 4, 5, 3, 5, 4, 0, 3, 5, 5, 3, 1, 3, 3, 2, 2, 3, 0, 3, 4, 1, 3, 3, 2, 4, 3, 3, 3, 4, 0, 4, 0, 3, 0, 4, 5, 4, 4, 5, 3, 0, 4, 1, 0, 3, 4),
+    (0, 2, 0, 3, 0, 3, 0, 0, 0, 2, 2, 2, 1, 0, 1, 0, 0, 0, 3, 0, 3, 0, 3, 0, 1, 3, 1, 0, 3, 1, 3, 3, 3, 1, 3, 3, 3, 0, 1, 3, 1, 3, 4, 0, 0, 3, 1, 1, 0, 3, 2, 0, 0, 0, 0, 1, 3, 0, 1, 0, 0, 3, 3, 2, 0, 3, 0, 0, 0, 0, 0, 3, 4, 3, 4, 3, 3, 0, 3, 0, 0, 2, 3),
+    (2, 3, 0, 3, 0, 2, 0, 1, 0, 3, 3, 4, 3, 1, 3, 1, 1, 1, 3, 1, 4, 3, 4, 3, 3, 3, 0, 0, 3, 1, 5, 4, 3, 1, 4, 3, 2, 5, 5, 4, 4, 4, 4, 3, 3, 4, 4, 4, 0, 2, 1, 1, 3, 2, 0, 1, 2, 0, 0, 1, 0, 4, 1, 3, 3, 3, 0, 3, 0, 1, 0, 4, 4, 4, 5, 5, 3, 0, 2, 0, 0, 4, 4),
+    (0, 2, 0, 1, 0, 3, 1, 3, 0, 2, 3, 3, 3, 0, 3, 1, 0, 0, 3, 0, 3, 2, 3, 1, 3, 2, 1, 1, 0, 0, 4, 2, 1, 0, 2, 3, 1, 4, 3, 2, 0, 4, 4, 3, 1, 3, 1, 3, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 4, 1, 1, 1, 2, 0, 3, 0, 0, 0, 3, 4, 2, 4, 3, 2, 0, 1, 0, 0, 3, 3),
+    (0, 1, 0, 4, 0, 5, 0, 4, 0, 2, 4, 4, 2, 3, 3, 2, 3, 3, 5, 3, 3, 3, 4, 3, 4, 2, 3, 0, 4, 3, 3, 3, 4, 1, 4, 3, 2, 1, 5, 5, 3, 4, 5, 1, 3, 5, 4, 2, 0, 3, 3, 0, 1, 3, 0, 4, 2, 0, 1, 3, 1, 4, 3, 3, 3, 3, 0, 3, 0, 1, 0, 3, 4, 4, 4, 5, 5, 0, 3, 0, 1, 4, 5),
+    (0, 2, 0, 3, 0, 3, 0, 0, 0, 2, 3, 1, 3, 0, 4, 0, 1, 1, 3, 0, 3, 4, 3, 2, 3, 1, 0, 3, 3, 2, 3, 1, 3, 0, 2, 3, 0, 2, 1, 4, 1, 2, 2, 0, 0, 3, 3, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0, 0, 2, 2, 0, 3, 2, 1, 3, 3, 0, 2, 0, 2, 0, 0, 3, 3, 1, 2, 4, 0, 3, 0, 2, 2, 3),
+    (2, 4, 0, 5, 0, 4, 0, 4, 0, 2, 4, 4, 4, 3, 4, 3, 3, 3, 1, 2, 4, 3, 4, 3, 4, 4, 5, 0, 3, 3, 3, 3, 2, 0, 4, 3, 1, 4, 3, 4, 1, 4, 4, 3, 3, 4, 4, 3, 1, 2, 3, 0, 4, 2, 0, 4, 1, 0, 3, 3, 0, 4, 3, 3, 3, 4, 0, 4, 0, 2, 0, 3, 5, 3, 4, 5, 2, 0, 3, 0, 0, 4, 5),
+    (0, 3, 0, 4, 0, 1, 0, 1, 0, 1, 3, 2, 2, 1, 3, 0, 3, 0, 2, 0, 2, 0, 3, 0, 2, 0, 0, 0, 1, 0, 1, 1, 0, 0, 3, 1, 0, 0, 0, 4, 0, 3, 1, 0, 2, 1, 3, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 4, 2, 2, 3, 1, 0, 3, 0, 0, 0, 1, 4, 4, 4, 3, 0, 0, 4, 0, 0, 1, 4),
+    (1, 4, 1, 5, 0, 3, 0, 3, 0, 4, 5, 4, 4, 3, 5, 3, 3, 4, 4, 3, 4, 1, 3, 3, 3, 3, 2, 1, 4, 1, 5, 4, 3, 1, 4, 4, 3, 5, 4, 4, 3, 5, 4, 3, 3, 4, 4, 4, 0, 3, 3, 1, 2, 3, 0, 3, 1, 0, 3, 3, 0, 5, 4, 4, 4, 4, 4, 4, 3, 3, 5, 4, 4, 3, 3, 5, 4, 0, 3, 2, 0, 4, 4),
+    (0, 2, 0, 3, 0, 1, 0, 0, 0, 1, 3, 3, 3, 2, 4, 1, 3, 0, 3, 1, 3, 0, 2, 2, 1, 1, 0, 0, 2, 0, 4, 3, 1, 0, 4, 3, 0, 4, 4, 4, 1, 4, 3, 1, 1, 3, 3, 1, 0, 2, 0, 0, 1, 3, 0, 0, 0, 0, 2, 0, 0, 4, 3, 2, 4, 3, 5, 4, 3, 3, 3, 4, 3, 3, 4, 3, 3, 0, 2, 1, 0, 3, 3),
+    (0, 2, 0, 4, 0, 3, 0, 2, 0, 2, 5, 5, 3, 4, 4, 4, 4, 1, 4, 3, 3, 0, 4, 3, 4, 3, 1, 3, 3, 2, 4, 3, 0, 3, 4, 3, 0, 3, 4, 4, 2, 4, 4, 0, 4, 5, 3, 3, 2, 2, 1, 1, 1, 2, 0, 1, 5, 0, 3, 3, 2, 4, 3, 3, 3, 4, 0, 3, 0, 2, 0, 4, 4, 3, 5, 5, 0, 0, 3, 0, 2, 3, 3),
+    (0, 3, 0, 4, 0, 3, 0, 1, 0, 3, 4, 3, 3, 1, 3, 3, 3, 0, 3, 1, 3, 0, 4, 3, 3, 1, 1, 0, 3, 0, 3, 3, 0, 0, 4, 4, 0, 1, 5, 4, 3, 3, 5, 0, 3, 3, 4, 3, 0, 2, 0, 1, 1, 1, 0, 1, 3, 0, 1, 2, 1, 3, 3, 2, 3, 3, 0, 3, 0, 1, 0, 1, 3, 3, 4, 4, 1, 0, 1, 2, 2, 1, 3),
+    (0, 1, 0, 4, 0, 4, 0, 3, 0, 1, 3, 3, 3, 2, 3, 1, 1, 0, 3, 0, 3, 3, 4, 3, 2, 4, 2, 0, 1, 0, 4, 3, 2, 0, 4, 3, 0, 5, 3, 3, 2, 4, 4, 4, 3, 3, 3, 4, 0, 1, 3, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 4, 2, 3, 3, 3, 0, 3, 0, 0, 0, 4, 4, 4, 5, 3, 2, 0, 3, 3, 0, 3, 5),
+    (0, 2, 0, 3, 0, 0, 0, 3, 0, 1, 3, 0, 2, 0, 0, 0, 1, 0, 3, 1, 1, 3, 3, 0, 0, 3, 0, 0, 3, 0, 2, 3, 1, 0, 3, 1, 0, 3, 3, 2, 0, 4, 2, 2, 0, 2, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 1, 2, 0, 1, 0, 1, 0, 0, 0, 1, 3, 1, 2, 0, 0, 0, 1, 0, 0, 1, 4),
+    (0, 3, 0, 3, 0, 5, 0, 1, 0, 2, 4, 3, 1, 3, 3, 2, 1, 1, 5, 2, 1, 0, 5, 1, 2, 0, 0, 0, 3, 3, 2, 2, 3, 2, 4, 3, 0, 0, 3, 3, 1, 3, 3, 0, 2, 5, 3, 4, 0, 3, 3, 0, 1, 2, 0, 2, 2, 0, 3, 2, 0, 2, 2, 3, 3, 3, 0, 2, 0, 1, 0, 3, 4, 4, 2, 5, 4, 0, 3, 0, 0, 3, 5),
+    (0, 3, 0, 3, 0, 3, 0, 1, 0, 3, 3, 3, 3, 0, 3, 0, 2, 0, 2, 1, 1, 0, 2, 0, 1, 0, 0, 0, 2, 1, 0, 0, 1, 0, 3, 2, 0, 0, 3, 3, 1, 2, 3, 1, 0, 3, 3, 0, 0, 1, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 2, 3, 1, 2, 3, 0, 3, 0, 1, 0, 3, 2, 1, 0, 4, 3, 0, 1, 1, 0, 3, 3),
+    (0, 4, 0, 5, 0, 3, 0, 3, 0, 4, 5, 5, 4, 3, 5, 3, 4, 3, 5, 3, 3, 2, 5, 3, 4, 4, 4, 3, 4, 3, 4, 5, 5, 3, 4, 4, 3, 4, 4, 5, 4, 4, 4, 3, 4, 5, 5, 4, 2, 3, 4, 2, 3, 4, 0, 3, 3, 1, 4, 3, 2, 4, 3, 3, 5, 5, 0, 3, 0, 3, 0, 5, 5, 5, 5, 4, 4, 0, 4, 0, 1, 4, 4),
+    (0, 4, 0, 4, 0, 3, 0, 3, 0, 3, 5, 4, 4, 2, 3, 2, 5, 1, 3, 2, 5, 1, 4, 2, 3, 2, 3, 3, 4, 3, 3, 3, 3, 2, 5, 4, 1, 3, 3, 5, 3, 4, 4, 0, 4, 4, 3, 1, 1, 3, 1, 0, 2, 3, 0, 2, 3, 0, 3, 0, 0, 4, 3, 1, 3, 4, 0, 3, 0, 2, 0, 4, 4, 4, 3, 4, 5, 0, 4, 0, 0, 3, 4),
+    (0, 3, 0, 3, 0, 3, 1, 2, 0, 3, 4, 4, 3, 3, 3, 0, 2, 2, 4, 3, 3, 1, 3, 3, 3, 1, 1, 0, 3, 1, 4, 3, 2, 3, 4, 4, 2, 4, 4, 4, 3, 4, 4, 3, 2, 4, 4, 3, 1, 3, 3, 1, 3, 3, 0, 4, 1, 0, 2, 2, 1, 4, 3, 2, 3, 3, 5, 4, 3, 3, 5, 4, 4, 3, 3, 0, 4, 0, 3, 2, 2, 4, 4),
+    (0, 2, 0, 1, 0, 0, 0, 0, 0, 1, 2, 1, 3, 0, 0, 0, 0, 0, 2, 0, 1, 2, 1, 0, 0, 1, 0, 0, 0, 0, 3, 0, 0, 1, 0, 1, 1, 3, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 0, 3, 4, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1),
+    (0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 4, 0, 4, 1, 4, 0, 3, 0, 4, 0, 3, 0, 4, 0, 3, 0, 3, 0, 4, 1, 5, 1, 4, 0, 0, 3, 0, 5, 0, 5, 2, 0, 1, 0, 0, 0, 2, 1, 4, 0, 1, 3, 0, 0, 3, 0, 0, 3, 1, 1, 4, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0),
+    (1, 4, 0, 5, 0, 3, 0, 2, 0, 3, 5, 4, 4, 3, 4, 3, 5, 3, 4, 3, 3, 0, 4, 3, 3, 3, 3, 3, 3, 2, 4, 4, 3, 1, 3, 4, 4, 5, 4, 4, 3, 4, 4, 1, 3, 5, 4, 3, 3, 3, 1, 2, 2, 3, 3, 1, 3, 1, 3, 3, 3, 5, 3, 3, 4, 5, 0, 3, 0, 3, 0, 3, 4, 3, 4, 4, 3, 0, 3, 0, 2, 4, 3),
+    (0, 1, 0, 4, 0, 0, 0, 0, 0, 1, 4, 0, 4, 1, 4, 2, 4, 0, 3, 0, 1, 0, 1, 0, 0, 0, 0, 0, 2, 0, 3, 1, 1, 1, 0, 3, 0, 0, 0, 1, 2, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 3, 0, 0, 0, 0, 3, 2, 0, 2, 2, 0, 1, 0, 0, 0, 2, 3, 2, 3, 3, 0, 0, 0, 0, 2, 1, 0),
+    (0, 5, 1, 5, 0, 3, 0, 3, 0, 5, 4, 4, 5, 1, 5, 3, 3, 0, 4, 3, 4, 3, 5, 3, 4, 3, 3, 2, 4, 3, 4, 3, 3, 0, 3, 3, 1, 4, 4, 3, 4, 4, 4, 3, 4, 5, 5, 3, 2, 3, 1, 1, 3, 3, 1, 3, 1, 1, 3, 3, 2, 4, 5, 3, 3, 5, 0, 4, 0, 3, 0, 4, 4, 3, 5, 3, 3, 0, 3, 4, 0, 4, 3),
+    (0, 5, 0, 5, 0, 3, 0, 2, 0, 4, 4, 3, 5, 2, 4, 3, 3, 3, 4, 4, 4, 3, 5, 3, 5, 3, 3, 1, 4, 0, 4, 3, 3, 0, 3, 3, 0, 4, 4, 4, 4, 5, 4, 3, 3, 5, 5, 3, 2, 3, 1, 2, 3, 2, 0, 1, 0, 0, 3, 2, 2, 4, 4, 3, 1, 5, 0, 4, 0, 3, 0, 4, 3, 1, 3, 2, 1, 0, 3, 3, 0, 3, 3),
+    (0, 4, 0, 5, 0, 5, 0, 4, 0, 4, 5, 5, 5, 3, 4, 3, 3, 2, 5, 4, 4, 3, 5, 3, 5, 3, 4, 0, 4, 3, 4, 4, 3, 2, 4, 4, 3, 4, 5, 4, 4, 5, 5, 0, 3, 5, 5, 4, 1, 3, 3, 2, 3, 3, 1, 3, 1, 0, 4, 3, 1, 4, 4, 3, 4, 5, 0, 4, 0, 2, 0, 4, 3, 4, 4, 3, 3, 0, 4, 0, 0, 5, 5),
+    (0, 4, 0, 4, 0, 5, 0, 1, 1, 3, 3, 4, 4, 3, 4, 1, 3, 0, 5, 1, 3, 0, 3, 1, 3, 1, 1, 0, 3, 0, 3, 3, 4, 0, 4, 3, 0, 4, 4, 4, 3, 4, 4, 0, 3, 5, 4, 1, 0, 3, 0, 0, 2, 3, 0, 3, 1, 0, 3, 1, 0, 3, 2, 1, 3, 5, 0, 3, 0, 1, 0, 3, 2, 3, 3, 4, 4, 0, 2, 2, 0, 4, 4),
+    (2, 4, 0, 5, 0, 4, 0, 3, 0, 4, 5, 5, 4, 3, 5, 3, 5, 3, 5, 3, 5, 2, 5, 3, 4, 3, 3, 4, 3, 4, 5, 3, 2, 1, 5, 4, 3, 2, 3, 4, 5, 3, 4, 1, 2, 5, 4, 3, 0, 3, 3, 0, 3, 2, 0, 2, 3, 0, 4, 1, 0, 3, 4, 3, 3, 5, 0, 3, 0, 1, 0, 4, 5, 5, 5, 4, 3, 0, 4, 2, 0, 3, 5),
+    (0, 5, 0, 4, 0, 4, 0, 2, 0, 5, 4, 3, 4, 3, 4, 3, 3, 3, 4, 3, 4, 2, 5, 3, 5, 3, 4, 1, 4, 3, 4, 4, 4, 0, 3, 5, 0, 4, 4, 4, 4, 5, 3, 1, 3, 4, 5, 3, 3, 3, 3, 3, 3, 3, 0, 2, 2, 0, 3, 3, 2, 4, 3, 3, 3, 5, 3, 4, 1, 3, 3, 5, 3, 2, 0, 0, 0, 0, 4, 3, 1, 3, 3),
+    (0, 1, 0, 3, 0, 3, 0, 1, 0, 1, 3, 3, 3, 2, 3, 3, 3, 0, 3, 0, 0, 0, 3, 1, 3, 0, 0, 0, 2, 2, 2, 3, 0, 0, 3, 2, 0, 1, 2, 4, 1, 3, 3, 0, 0, 3, 3, 3, 0, 1, 0, 0, 2, 1, 0, 0, 3, 0, 3, 1, 0, 3, 0, 0, 1, 3, 0, 2, 0, 1, 0, 3, 3, 1, 3, 3, 0, 0, 1, 1, 0, 3, 3),
+    (0, 2, 0, 3, 0, 2, 1, 4, 0, 2, 2, 3, 1, 1, 3, 1, 1, 0, 2, 0, 3, 1, 2, 3, 1, 3, 0, 0, 1, 0, 4, 3, 2, 3, 3, 3, 1, 4, 2, 3, 3, 3, 3, 1, 0, 3, 1, 4, 0, 1, 1, 0, 1, 2, 0, 1, 1, 0, 1, 1, 0, 3, 1, 3, 2, 2, 0, 1, 0, 0, 0, 2, 3, 3, 3, 1, 0, 0, 0, 0, 0, 2, 3),
+    (0, 5, 0, 4, 0, 5, 0, 2, 0, 4, 5, 5, 3, 3, 4, 3, 3, 1, 5, 4, 4, 2, 4, 4, 4, 3, 4, 2, 4, 3, 5, 5, 4, 3, 3, 4, 3, 3, 5, 5, 4, 5, 5, 1, 3, 4, 5, 3, 1, 4, 3, 1, 3, 3, 0, 3, 3, 1, 4, 3, 1, 4, 5, 3, 3, 5, 0, 4, 0, 3, 0, 5, 3, 3, 1, 4, 3, 0, 4, 0, 1, 5, 3),
+    (0, 5, 0, 5, 0, 4, 0, 2, 0, 4, 4, 3, 4, 3, 3, 3, 3, 3, 5, 4, 4, 4, 4, 4, 4, 5, 3, 3, 5, 2, 4, 4, 4, 3, 4, 4, 3, 3, 4, 4, 5, 5, 3, 3, 4, 3, 4, 3, 3, 4, 3, 3, 3, 3, 1, 2, 2, 1, 4, 3, 3, 5, 4, 4, 3, 4, 0, 4, 0, 3, 0, 4, 4, 4, 4, 4, 1, 0, 4, 2, 0, 2, 4),
+    (0, 4, 0, 4, 0, 3, 0, 1, 0, 3, 5, 2, 3, 0, 3, 0, 2, 1, 4, 2, 3, 3, 4, 1, 4, 3, 3, 2, 4, 1, 3, 3, 3, 0, 3, 3, 0, 0, 3, 3, 3, 5, 3, 3, 3, 3, 3, 2, 0, 2, 0, 0, 2, 0, 0, 2, 0, 0, 1, 0, 0, 3, 1, 2, 2, 3, 0, 3, 0, 2, 0, 4, 4, 3, 3, 4, 1, 0, 3, 0, 0, 2, 4),
+    (0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 2, 0, 0, 0, 0, 0, 1, 0, 2, 0, 1, 0, 0, 0, 0, 0, 3, 1, 3, 0, 3, 2, 0, 0, 0, 1, 0, 3, 2, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 0, 2, 0, 0, 0, 0, 0, 0, 2),
+    (0, 2, 1, 3, 0, 2, 0, 2, 0, 3, 3, 3, 3, 1, 3, 1, 3, 3, 3, 3, 3, 3, 4, 2, 2, 1, 2, 1, 4, 0, 4, 3, 1, 3, 3, 3, 2, 4, 3, 5, 4, 3, 3, 3, 3, 3, 3, 3, 0, 1, 3, 0, 2, 0, 0, 1, 0, 0, 1, 0, 0, 4, 2, 0, 2, 3, 0, 3, 3, 0, 3, 3, 4, 2, 3, 1, 4, 0, 1, 2, 0, 2, 3),
+    (0, 3, 0, 3, 0, 1, 0, 3, 0, 2, 3, 3, 3, 0, 3, 1, 2, 0, 3, 3, 2, 3, 3, 2, 3, 2, 3, 1, 3, 0, 4, 3, 2, 0, 3, 3, 1, 4, 3, 3, 2, 3, 4, 3, 1, 3, 3, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 4, 1, 1, 0, 3, 0, 3, 1, 0, 2, 3, 3, 3, 3, 3, 1, 0, 0, 2, 0, 3, 3),
+    (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 2, 0, 3, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 3, 0, 3, 0, 3, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 2, 0, 2, 3, 0, 0, 0, 0, 0, 0, 0, 0, 3),
+    (0, 2, 0, 3, 1, 3, 0, 3, 0, 2, 3, 3, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 1, 3, 0, 2, 3, 1, 1, 4, 3, 3, 2, 3, 3, 1, 2, 2, 4, 1, 3, 3, 0, 1, 4, 2, 3, 0, 1, 3, 0, 3, 0, 0, 1, 3, 0, 2, 0, 0, 3, 3, 2, 1, 3, 0, 3, 0, 2, 0, 3, 4, 4, 4, 3, 1, 0, 3, 0, 0, 3, 3),
+    (0, 2, 0, 1, 0, 2, 0, 0, 0, 1, 3, 2, 2, 1, 3, 0, 1, 1, 3, 0, 3, 2, 3, 1, 2, 0, 2, 0, 1, 1, 3, 3, 3, 0, 3, 3, 1, 1, 2, 3, 2, 3, 3, 1, 2, 3, 2, 0, 0, 1, 0, 0, 0, 0, 0, 0, 3, 0, 1, 0, 0, 2, 1, 2, 1, 3, 0, 3, 0, 0, 0, 3, 4, 4, 4, 3, 2, 0, 2, 0, 0, 2, 4),
+    (0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 3, 1, 0, 0, 0, 0, 0, 0, 0, 3),
+    (0, 3, 0, 3, 0, 2, 0, 3, 0, 3, 3, 3, 2, 3, 2, 2, 2, 0, 3, 1, 3, 3, 3, 2, 3, 3, 0, 0, 3, 0, 3, 2, 2, 0, 2, 3, 1, 4, 3, 4, 3, 3, 2, 3, 1, 5, 4, 4, 0, 3, 1, 2, 1, 3, 0, 3, 1, 1, 2, 0, 2, 3, 1, 3, 1, 3, 0, 3, 0, 1, 0, 3, 3, 4, 4, 2, 1, 0, 2, 1, 0, 2, 4),
+    (0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 4, 2, 5, 1, 4, 0, 2, 0, 2, 1, 3, 1, 4, 0, 2, 1, 0, 0, 2, 1, 4, 1, 1, 0, 3, 3, 0, 5, 1, 3, 2, 3, 3, 1, 0, 3, 2, 3, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 4, 0, 1, 0, 3, 0, 2, 0, 1, 0, 3, 3, 3, 4, 3, 3, 0, 0, 0, 0, 2, 3),
+    (0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 2, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 1, 0, 0, 0, 0, 0, 3),
+    (0, 1, 0, 3, 0, 4, 0, 3, 0, 2, 4, 3, 1, 0, 3, 2, 2, 1, 3, 1, 2, 2, 3, 1, 1, 1, 2, 1, 3, 0, 1, 2, 0, 1, 3, 2, 1, 3, 0, 5, 5, 1, 0, 0, 1, 3, 2, 1, 0, 3, 0, 0, 1, 0, 0, 0, 0, 0, 3, 4, 0, 1, 1, 1, 3, 2, 0, 2, 0, 1, 0, 2, 3, 3, 1, 2, 3, 0, 1, 0, 1, 0, 4),
+    (0, 0, 0, 1, 0, 3, 0, 3, 0, 2, 2, 1, 0, 0, 4, 0, 3, 0, 3, 1, 3, 0, 3, 0, 3, 0, 1, 0, 3, 0, 3, 1, 3, 0, 3, 3, 0, 0, 1, 2, 1, 1, 1, 0, 1, 2, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 1, 2, 0, 0, 2, 0, 0, 0, 0, 2, 3, 3, 3, 3, 0, 0, 0, 0, 1, 4),
+    (0, 0, 0, 3, 0, 3, 0, 0, 0, 0, 3, 1, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 3, 0, 2, 0, 2, 3, 0, 0, 2, 2, 3, 1, 2, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 2, 0, 0, 0, 0, 2, 3),
+    (2, 4, 0, 5, 0, 5, 0, 4, 0, 3, 4, 3, 3, 3, 4, 3, 3, 3, 4, 3, 4, 4, 5, 4, 5, 5, 5, 2, 3, 0, 5, 5, 4, 1, 5, 4, 3, 1, 5, 4, 3, 4, 4, 3, 3, 4, 3, 3, 0, 3, 2, 0, 2, 3, 0, 3, 0, 0, 3, 3, 0, 5, 3, 2, 3, 3, 0, 3, 0, 3, 0, 3, 4, 5, 4, 5, 3, 0, 4, 3, 0, 3, 4),
+    (0, 3, 0, 3, 0, 3, 0, 3, 0, 3, 3, 4, 3, 2, 3, 2, 3, 0, 4, 3, 3, 3, 3, 3, 3, 3, 3, 0, 3, 2, 4, 3, 3, 1, 3, 4, 3, 4, 4, 4, 3, 4, 4, 3, 2, 4, 4, 1, 0, 2, 0, 0, 1, 1, 0, 2, 0, 0, 3, 1, 0, 5, 3, 2, 1, 3, 0, 3, 0, 1, 2, 4, 3, 2, 4, 3, 3, 0, 3, 2, 0, 4, 4),
+    (0, 3, 0, 3, 0, 1, 0, 0, 0, 1, 4, 3, 3, 2, 3, 1, 3, 1, 4, 2, 3, 2, 4, 2, 3, 4, 3, 0, 2, 2, 3, 3, 3, 0, 3, 3, 3, 0, 3, 4, 1, 3, 3, 0, 3, 4, 3, 3, 0, 1, 1, 0, 1, 0, 0, 0, 4, 0, 3, 0, 0, 3, 1, 2, 1, 3, 0, 4, 0, 1, 0, 4, 3, 3, 4, 3, 3, 0, 2, 0, 0, 3, 3),
+    (0, 3, 0, 4, 0, 1, 0, 3, 0, 3, 4, 3, 3, 0, 3, 3, 3, 1, 3, 1, 3, 3, 4, 3, 3, 3, 0, 0, 3, 1, 5, 3, 3, 1, 3, 3, 2, 5, 4, 3, 3, 4, 5, 3, 2, 5, 3, 4, 0, 1, 0, 0, 0, 0, 0, 2, 0, 0, 1, 1, 0, 4, 2, 2, 1, 3, 0, 3, 0, 2, 0, 4, 4, 3, 5, 3, 2, 0, 1, 1, 0, 3, 4),
+    (0, 5, 0, 4, 0, 5, 0, 2, 0, 4, 4, 3, 3, 2, 3, 3, 3, 1, 4, 3, 4, 1, 5, 3, 4, 3, 4, 0, 4, 2, 4, 3, 4, 1, 5, 4, 0, 4, 4, 4, 4, 5, 4, 1, 3, 5, 4, 2, 1, 4, 1, 1, 3, 2, 0, 3, 1, 0, 3, 2, 1, 4, 3, 3, 3, 4, 0, 4, 0, 3, 0, 4, 4, 4, 3, 3, 3, 0, 4, 2, 0, 3, 4),
+    (1, 4, 0, 4, 0, 3, 0, 1, 0, 3, 3, 3, 1, 1, 3, 3, 2, 2, 3, 3, 1, 0, 3, 2, 2, 1, 2, 0, 3, 1, 2, 1, 2, 0, 3, 2, 0, 2, 2, 3, 3, 4, 3, 0, 3, 3, 1, 2, 0, 1, 1, 3, 1, 2, 0, 0, 3, 0, 1, 1, 0, 3, 2, 2, 3, 3, 0, 3, 0, 0, 0, 2, 3, 3, 4, 3, 3, 0, 1, 0, 0, 1, 4),
+    (0, 4, 0, 4, 0, 4, 0, 0, 0, 3, 4, 4, 3, 1, 4, 2, 3, 2, 3, 3, 3, 1, 4, 3, 4, 0, 3, 0, 4, 2, 3, 3, 2, 2, 5, 4, 2, 1, 3, 4, 3, 4, 3, 1, 3, 3, 4, 2, 0, 2, 1, 0, 3, 3, 0, 0, 2, 0, 3, 1, 0, 4, 4, 3, 4, 3, 0, 4, 0, 1, 0, 2, 4, 4, 4, 4, 4, 0, 3, 2, 0, 3, 3),
+    (0, 0, 0, 1, 0, 4, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 3, 2, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 2),
+    (0, 2, 0, 3, 0, 4, 0, 4, 0, 1, 3, 3, 3, 0, 4, 0, 2, 1, 2, 1, 1, 1, 2, 0, 3, 1, 1, 0, 1, 0, 3, 1, 0, 0, 3, 3, 2, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 2, 0, 2, 2, 0, 3, 1, 0, 0, 1, 0, 1, 1, 0, 1, 2, 0, 3, 0, 0, 0, 0, 1, 0, 0, 3, 3, 4, 3, 1, 0, 1, 0, 3, 0, 2),
+    (0, 0, 0, 3, 0, 5, 0, 0, 0, 0, 1, 0, 2, 0, 3, 1, 0, 1, 3, 0, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 4, 0, 0, 0, 2, 3, 0, 1, 4, 1, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 3, 0, 0, 0, 0, 0, 3),
+    (0, 2, 0, 5, 0, 5, 0, 1, 0, 2, 4, 3, 3, 2, 5, 1, 3, 2, 3, 3, 3, 0, 4, 1, 2, 0, 3, 0, 4, 0, 2, 2, 1, 1, 5, 3, 0, 0, 1, 4, 2, 3, 2, 0, 3, 3, 3, 2, 0, 2, 4, 1, 1, 2, 0, 1, 1, 0, 3, 1, 0, 1, 3, 1, 2, 3, 0, 2, 0, 0, 0, 1, 3, 5, 4, 4, 4, 0, 3, 0, 0, 1, 3),
+    (0, 4, 0, 5, 0, 4, 0, 4, 0, 4, 5, 4, 3, 3, 4, 3, 3, 3, 4, 3, 4, 4, 5, 3, 4, 5, 4, 2, 4, 2, 3, 4, 3, 1, 4, 4, 1, 3, 5, 4, 4, 5, 5, 4, 4, 5, 5, 5, 2, 3, 3, 1, 4, 3, 1, 3, 3, 0, 3, 3, 1, 4, 3, 4, 4, 4, 0, 3, 0, 4, 0, 3, 3, 4, 4, 5, 0, 0, 4, 3, 0, 4, 5),
+    (0, 4, 0, 4, 0, 3, 0, 3, 0, 3, 4, 4, 4, 3, 3, 2, 4, 3, 4, 3, 4, 3, 5, 3, 4, 3, 2, 1, 4, 2, 4, 4, 3, 1, 3, 4, 2, 4, 5, 5, 3, 4, 5, 4, 1, 5, 4, 3, 0, 3, 2, 2, 3, 2, 1, 3, 1, 0, 3, 3, 3, 5, 3, 3, 3, 5, 4, 4, 2, 3, 3, 4, 3, 3, 3, 2, 1, 0, 3, 2, 1, 4, 3),
+    (0, 4, 0, 5, 0, 4, 0, 3, 0, 3, 5, 5, 3, 2, 4, 3, 4, 0, 5, 4, 4, 1, 4, 4, 4, 3, 3, 3, 4, 3, 5, 5, 2, 3, 3, 4, 1, 2, 5, 5, 3, 5, 5, 2, 3, 5, 5, 4, 0, 3, 2, 0, 3, 3, 1, 1, 5, 1, 4, 1, 0, 4, 3, 2, 3, 5, 0, 4, 0, 3, 0, 5, 4, 3, 4, 3, 0, 0, 4, 1, 0, 4, 4),
+    (1, 3, 0, 4, 0, 2, 0, 2, 0, 2, 5, 5, 3, 3, 3, 3, 3, 0, 4, 2, 3, 4, 4, 4, 3, 4, 0, 0, 3, 4, 5, 4, 3, 3, 3, 3, 2, 5, 5, 4, 5, 5, 5, 4, 3, 5, 5, 5, 1, 3, 1, 0, 1, 0, 0, 3, 2, 0, 4, 2, 0, 5, 2, 3, 2, 4, 1, 3, 0, 3, 0, 4, 5, 4, 5, 4, 3, 0, 4, 2, 0, 5, 4),
+    (0, 3, 0, 4, 0, 5, 0, 3, 0, 3, 4, 4, 3, 2, 3, 2, 3, 3, 3, 3, 3, 2, 4, 3, 3, 2, 2, 0, 3, 3, 3, 3, 3, 1, 3, 3, 3, 0, 4, 4, 3, 4, 4, 1, 1, 4, 4, 2, 0, 3, 1, 0, 1, 1, 0, 4, 1, 0, 2, 3, 1, 3, 3, 1, 3, 4, 0, 3, 0, 1, 0, 3, 1, 3, 0, 0, 1, 0, 2, 0, 0, 4, 4),
+    (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
+    (0, 3, 0, 3, 0, 2, 0, 3, 0, 1, 5, 4, 3, 3, 3, 1, 4, 2, 1, 2, 3, 4, 4, 2, 4, 4, 5, 0, 3, 1, 4, 3, 4, 0, 4, 3, 3, 3, 2, 3, 2, 5, 3, 4, 3, 2, 2, 3, 0, 0, 3, 0, 2, 1, 0, 1, 2, 0, 0, 0, 0, 2, 1, 1, 3, 1, 0, 2, 0, 4, 0, 3, 4, 4, 4, 5, 2, 0, 2, 0, 0, 1, 3),
+    (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 4, 2, 1, 1, 0, 1, 0, 3, 2, 0, 0, 3, 1, 1, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 1, 0, 0, 0, 2, 0, 0, 0, 1, 4, 0, 4, 2, 1, 0, 0, 0, 0, 0, 1),
+    (0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 2, 0, 2, 1, 0, 0, 1, 2, 1, 0, 1, 1, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 1, 0, 0, 0, 0, 0, 1, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 2),
+    (0, 4, 0, 4, 0, 4, 0, 3, 0, 4, 4, 3, 4, 2, 4, 3, 2, 0, 4, 4, 4, 3, 5, 3, 5, 3, 3, 2, 4, 2, 4, 3, 4, 3, 1, 4, 0, 2, 3, 4, 4, 4, 3, 3, 3, 4, 4, 4, 3, 4, 1, 3, 4, 3, 2, 1, 2, 1, 3, 3, 3, 4, 4, 3, 3, 5, 0, 4, 0, 3, 0, 4, 3, 3, 3, 2, 1, 0, 3, 0, 0, 3, 3),
+    (0, 4, 0, 3, 0, 3, 0, 3, 0, 3, 5, 5, 3, 3, 3, 3, 4, 3, 4, 3, 3, 3, 4, 4, 4, 3, 3, 3, 3, 4, 3, 5, 3, 3, 1, 3, 2, 4, 5, 5, 5, 5, 4, 3, 4, 5, 5, 3, 2, 2, 3, 3, 3, 3, 2, 3, 3, 1, 2, 3, 2, 4, 3, 3, 3, 4, 0, 4, 0, 2, 0, 4, 3, 2, 2, 1, 2, 0, 3, 0, 0, 4, 1),
+)
+# fmt: on
+
+
+class JapaneseContextAnalysis:
+    NUM_OF_CATEGORY = 6
+    DONT_KNOW = -1
+    ENOUGH_REL_THRESHOLD = 100
+    MAX_REL_THRESHOLD = 1000
+    MINIMUM_DATA_THRESHOLD = 4
+
+    def __init__(self) -> None:
+        self._total_rel = 0
+        self._rel_sample: List[int] = []
+        self._need_to_skip_char_num = 0
+        self._last_char_order = -1
+        self._done = False
+        self.reset()
+
+    def reset(self) -> None:
+        self._total_rel = 0  # total sequence received
+        # category counters, each integer counts sequence in its category
+        self._rel_sample = [0] * self.NUM_OF_CATEGORY
+        # if last byte in current buffer is not the last byte of a character,
+        # we need to know how many bytes to skip in next buffer
+        self._need_to_skip_char_num = 0
+        self._last_char_order = -1  # The order of previous char
+        # If this flag is set to True, detection is done and conclusion has
+        # been made
+        self._done = False
+
+    def feed(self, byte_str: Union[bytes, bytearray], num_bytes: int) -> None:
+        if self._done:
+            return
+
+        # The buffer we got is byte oriented, and a character may span in more than one
+        # buffers. In case the last one or two byte in last buffer is not
+        # complete, we record how many byte needed to complete that character
+        # and skip these bytes here.  We can choose to record those bytes as
+        # well and analyse the character once it is complete, but since a
+        # character will not make much difference, by simply skipping
+        # this character will simply our logic and improve performance.
+        i = self._need_to_skip_char_num
+        while i < num_bytes:
+            order, char_len = self.get_order(byte_str[i : i + 2])
+            i += char_len
+            if i > num_bytes:
+                self._need_to_skip_char_num = i - num_bytes
+                self._last_char_order = -1
+            else:
+                if (order != -1) and (self._last_char_order != -1):
+                    self._total_rel += 1
+                    if self._total_rel > self.MAX_REL_THRESHOLD:
+                        self._done = True
+                        break
+                    self._rel_sample[
+                        jp2_char_context[self._last_char_order][order]
+                    ] += 1
+                self._last_char_order = order
+
+    def got_enough_data(self) -> bool:
+        return self._total_rel > self.ENOUGH_REL_THRESHOLD
+
+    def get_confidence(self) -> float:
+        # This is just one way to calculate confidence. It works well for me.
+        if self._total_rel > self.MINIMUM_DATA_THRESHOLD:
+            return (self._total_rel - self._rel_sample[0]) / self._total_rel
+        return self.DONT_KNOW
+
+    def get_order(self, _: Union[bytes, bytearray]) -> Tuple[int, int]:
+        return -1, 1
+
+
+class SJISContextAnalysis(JapaneseContextAnalysis):
+    def __init__(self) -> None:
+        super().__init__()
+        self._charset_name = "SHIFT_JIS"
+
+    @property
+    def charset_name(self) -> str:
+        return self._charset_name
+
+    def get_order(self, byte_str: Union[bytes, bytearray]) -> Tuple[int, int]:
+        if not byte_str:
+            return -1, 1
+        # find out current char's byte length
+        first_char = byte_str[0]
+        if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC):
+            char_len = 2
+            if (first_char == 0x87) or (0xFA <= first_char <= 0xFC):
+                self._charset_name = "CP932"
+        else:
+            char_len = 1
+
+        # return its order if it is hiragana
+        if len(byte_str) > 1:
+            second_char = byte_str[1]
+            if (first_char == 202) and (0x9F <= second_char <= 0xF1):
+                return second_char - 0x9F, char_len
+
+        return -1, char_len
+
+
+class EUCJPContextAnalysis(JapaneseContextAnalysis):
+    def get_order(self, byte_str: Union[bytes, bytearray]) -> Tuple[int, int]:
+        if not byte_str:
+            return -1, 1
+        # find out current char's byte length
+        first_char = byte_str[0]
+        if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE):
+            char_len = 2
+        elif first_char == 0x8F:
+            char_len = 3
+        else:
+            char_len = 1
+
+        # return its order if it is hiragana
+        if len(byte_str) > 1:
+            second_char = byte_str[1]
+            if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3):
+                return second_char - 0xA1, char_len
+
+        return -1, char_len
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/langbulgarianmodel.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/langbulgarianmodel.py
new file mode 100644
index 0000000..9946682
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/langbulgarianmodel.py
@@ -0,0 +1,4649 @@
+from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel
+
+# 3: Positive
+# 2: Likely
+# 1: Unlikely
+# 0: Negative
+
+BULGARIAN_LANG_MODEL = {
+    63: {  # 'e'
+        63: 1,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 0,  # 'а'
+        18: 1,  # 'б'
+        9: 1,  # 'в'
+        20: 1,  # 'г'
+        11: 1,  # 'д'
+        3: 1,  # 'е'
+        23: 1,  # 'ж'
+        15: 1,  # 'з'
+        2: 0,  # 'и'
+        26: 1,  # 'й'
+        12: 1,  # 'к'
+        10: 1,  # 'л'
+        14: 1,  # 'м'
+        6: 1,  # 'н'
+        4: 1,  # 'о'
+        13: 1,  # 'п'
+        7: 1,  # 'р'
+        8: 1,  # 'с'
+        5: 1,  # 'т'
+        19: 0,  # 'у'
+        29: 1,  # 'ф'
+        25: 1,  # 'х'
+        22: 0,  # 'ц'
+        21: 1,  # 'ч'
+        27: 1,  # 'ш'
+        24: 1,  # 'щ'
+        17: 0,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 0,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    45: {  # '\xad'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 1,  # 'Б'
+        35: 1,  # 'В'
+        43: 0,  # 'Г'
+        37: 1,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 1,  # 'И'
+        59: 0,  # 'Й'
+        33: 1,  # 'К'
+        46: 0,  # 'Л'
+        38: 1,  # 'М'
+        36: 0,  # 'Н'
+        41: 1,  # 'О'
+        30: 1,  # 'П'
+        39: 1,  # 'Р'
+        28: 1,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 1,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 0,  # 'а'
+        18: 0,  # 'б'
+        9: 0,  # 'в'
+        20: 0,  # 'г'
+        11: 0,  # 'д'
+        3: 0,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 0,  # 'и'
+        26: 0,  # 'й'
+        12: 0,  # 'к'
+        10: 0,  # 'л'
+        14: 0,  # 'м'
+        6: 0,  # 'н'
+        4: 0,  # 'о'
+        13: 0,  # 'п'
+        7: 0,  # 'р'
+        8: 0,  # 'с'
+        5: 0,  # 'т'
+        19: 0,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 0,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 0,  # 'ю'
+        16: 0,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    31: {  # 'А'
+        63: 0,  # 'e'
+        45: 1,  # '\xad'
+        31: 1,  # 'А'
+        32: 1,  # 'Б'
+        35: 2,  # 'В'
+        43: 1,  # 'Г'
+        37: 2,  # 'Д'
+        44: 2,  # 'Е'
+        55: 1,  # 'Ж'
+        47: 2,  # 'З'
+        40: 1,  # 'И'
+        59: 1,  # 'Й'
+        33: 1,  # 'К'
+        46: 2,  # 'Л'
+        38: 1,  # 'М'
+        36: 2,  # 'Н'
+        41: 1,  # 'О'
+        30: 2,  # 'П'
+        39: 2,  # 'Р'
+        28: 2,  # 'С'
+        34: 2,  # 'Т'
+        51: 1,  # 'У'
+        48: 2,  # 'Ф'
+        49: 1,  # 'Х'
+        53: 1,  # 'Ц'
+        50: 1,  # 'Ч'
+        54: 1,  # 'Ш'
+        57: 2,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 1,  # 'Я'
+        1: 1,  # 'а'
+        18: 2,  # 'б'
+        9: 2,  # 'в'
+        20: 2,  # 'г'
+        11: 2,  # 'д'
+        3: 1,  # 'е'
+        23: 1,  # 'ж'
+        15: 2,  # 'з'
+        2: 0,  # 'и'
+        26: 2,  # 'й'
+        12: 2,  # 'к'
+        10: 3,  # 'л'
+        14: 2,  # 'м'
+        6: 3,  # 'н'
+        4: 0,  # 'о'
+        13: 2,  # 'п'
+        7: 2,  # 'р'
+        8: 2,  # 'с'
+        5: 2,  # 'т'
+        19: 1,  # 'у'
+        29: 2,  # 'ф'
+        25: 1,  # 'х'
+        22: 1,  # 'ц'
+        21: 1,  # 'ч'
+        27: 1,  # 'ш'
+        24: 0,  # 'щ'
+        17: 0,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 0,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    32: {  # 'Б'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 2,  # 'А'
+        32: 2,  # 'Б'
+        35: 1,  # 'В'
+        43: 1,  # 'Г'
+        37: 2,  # 'Д'
+        44: 1,  # 'Е'
+        55: 1,  # 'Ж'
+        47: 2,  # 'З'
+        40: 1,  # 'И'
+        59: 0,  # 'Й'
+        33: 1,  # 'К'
+        46: 1,  # 'Л'
+        38: 1,  # 'М'
+        36: 2,  # 'Н'
+        41: 2,  # 'О'
+        30: 1,  # 'П'
+        39: 1,  # 'Р'
+        28: 2,  # 'С'
+        34: 2,  # 'Т'
+        51: 1,  # 'У'
+        48: 2,  # 'Ф'
+        49: 1,  # 'Х'
+        53: 1,  # 'Ц'
+        50: 1,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 1,  # 'Щ'
+        61: 2,  # 'Ъ'
+        60: 1,  # 'Ю'
+        56: 1,  # 'Я'
+        1: 3,  # 'а'
+        18: 0,  # 'б'
+        9: 0,  # 'в'
+        20: 0,  # 'г'
+        11: 1,  # 'д'
+        3: 3,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 2,  # 'и'
+        26: 0,  # 'й'
+        12: 0,  # 'к'
+        10: 2,  # 'л'
+        14: 0,  # 'м'
+        6: 0,  # 'н'
+        4: 3,  # 'о'
+        13: 0,  # 'п'
+        7: 2,  # 'р'
+        8: 1,  # 'с'
+        5: 0,  # 'т'
+        19: 2,  # 'у'
+        29: 0,  # 'ф'
+        25: 1,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 3,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 1,  # 'ю'
+        16: 2,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    35: {  # 'В'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 2,  # 'А'
+        32: 1,  # 'Б'
+        35: 1,  # 'В'
+        43: 0,  # 'Г'
+        37: 1,  # 'Д'
+        44: 2,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 2,  # 'И'
+        59: 0,  # 'Й'
+        33: 1,  # 'К'
+        46: 1,  # 'Л'
+        38: 1,  # 'М'
+        36: 1,  # 'Н'
+        41: 1,  # 'О'
+        30: 1,  # 'П'
+        39: 2,  # 'Р'
+        28: 2,  # 'С'
+        34: 1,  # 'Т'
+        51: 1,  # 'У'
+        48: 2,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 1,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 1,  # 'Ъ'
+        60: 1,  # 'Ю'
+        56: 2,  # 'Я'
+        1: 3,  # 'а'
+        18: 1,  # 'б'
+        9: 0,  # 'в'
+        20: 0,  # 'г'
+        11: 1,  # 'д'
+        3: 3,  # 'е'
+        23: 1,  # 'ж'
+        15: 2,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 1,  # 'к'
+        10: 2,  # 'л'
+        14: 1,  # 'м'
+        6: 2,  # 'н'
+        4: 2,  # 'о'
+        13: 1,  # 'п'
+        7: 2,  # 'р'
+        8: 2,  # 'с'
+        5: 2,  # 'т'
+        19: 1,  # 'у'
+        29: 0,  # 'ф'
+        25: 1,  # 'х'
+        22: 0,  # 'ц'
+        21: 2,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 2,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 1,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    43: {  # 'Г'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 2,  # 'А'
+        32: 1,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 1,  # 'Д'
+        44: 2,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 1,  # 'З'
+        40: 1,  # 'И'
+        59: 0,  # 'Й'
+        33: 1,  # 'К'
+        46: 1,  # 'Л'
+        38: 0,  # 'М'
+        36: 1,  # 'Н'
+        41: 1,  # 'О'
+        30: 0,  # 'П'
+        39: 1,  # 'Р'
+        28: 1,  # 'С'
+        34: 0,  # 'Т'
+        51: 1,  # 'У'
+        48: 1,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 1,  # 'Щ'
+        61: 1,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 2,  # 'а'
+        18: 1,  # 'б'
+        9: 1,  # 'в'
+        20: 0,  # 'г'
+        11: 1,  # 'д'
+        3: 3,  # 'е'
+        23: 1,  # 'ж'
+        15: 0,  # 'з'
+        2: 2,  # 'и'
+        26: 0,  # 'й'
+        12: 1,  # 'к'
+        10: 2,  # 'л'
+        14: 1,  # 'м'
+        6: 1,  # 'н'
+        4: 2,  # 'о'
+        13: 0,  # 'п'
+        7: 2,  # 'р'
+        8: 0,  # 'с'
+        5: 0,  # 'т'
+        19: 2,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 1,  # 'щ'
+        17: 2,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 1,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    37: {  # 'Д'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 2,  # 'А'
+        32: 1,  # 'Б'
+        35: 2,  # 'В'
+        43: 1,  # 'Г'
+        37: 2,  # 'Д'
+        44: 2,  # 'Е'
+        55: 2,  # 'Ж'
+        47: 1,  # 'З'
+        40: 2,  # 'И'
+        59: 0,  # 'Й'
+        33: 1,  # 'К'
+        46: 1,  # 'Л'
+        38: 1,  # 'М'
+        36: 1,  # 'Н'
+        41: 2,  # 'О'
+        30: 2,  # 'П'
+        39: 1,  # 'Р'
+        28: 2,  # 'С'
+        34: 1,  # 'Т'
+        51: 1,  # 'У'
+        48: 1,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 1,  # 'Ц'
+        50: 1,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 1,  # 'Ъ'
+        60: 1,  # 'Ю'
+        56: 1,  # 'Я'
+        1: 3,  # 'а'
+        18: 0,  # 'б'
+        9: 2,  # 'в'
+        20: 0,  # 'г'
+        11: 0,  # 'д'
+        3: 3,  # 'е'
+        23: 3,  # 'ж'
+        15: 1,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 0,  # 'к'
+        10: 1,  # 'л'
+        14: 1,  # 'м'
+        6: 2,  # 'н'
+        4: 3,  # 'о'
+        13: 0,  # 'п'
+        7: 2,  # 'р'
+        8: 0,  # 'с'
+        5: 0,  # 'т'
+        19: 2,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 2,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 2,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    44: {  # 'Е'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 1,  # 'А'
+        32: 1,  # 'Б'
+        35: 2,  # 'В'
+        43: 1,  # 'Г'
+        37: 1,  # 'Д'
+        44: 1,  # 'Е'
+        55: 1,  # 'Ж'
+        47: 1,  # 'З'
+        40: 1,  # 'И'
+        59: 1,  # 'Й'
+        33: 2,  # 'К'
+        46: 2,  # 'Л'
+        38: 1,  # 'М'
+        36: 2,  # 'Н'
+        41: 2,  # 'О'
+        30: 1,  # 'П'
+        39: 2,  # 'Р'
+        28: 2,  # 'С'
+        34: 2,  # 'Т'
+        51: 1,  # 'У'
+        48: 2,  # 'Ф'
+        49: 1,  # 'Х'
+        53: 2,  # 'Ц'
+        50: 1,  # 'Ч'
+        54: 1,  # 'Ш'
+        57: 1,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 1,  # 'Я'
+        1: 0,  # 'а'
+        18: 1,  # 'б'
+        9: 2,  # 'в'
+        20: 1,  # 'г'
+        11: 2,  # 'д'
+        3: 0,  # 'е'
+        23: 1,  # 'ж'
+        15: 1,  # 'з'
+        2: 0,  # 'и'
+        26: 1,  # 'й'
+        12: 2,  # 'к'
+        10: 2,  # 'л'
+        14: 2,  # 'м'
+        6: 2,  # 'н'
+        4: 0,  # 'о'
+        13: 1,  # 'п'
+        7: 2,  # 'р'
+        8: 2,  # 'с'
+        5: 1,  # 'т'
+        19: 1,  # 'у'
+        29: 1,  # 'ф'
+        25: 1,  # 'х'
+        22: 0,  # 'ц'
+        21: 1,  # 'ч'
+        27: 1,  # 'ш'
+        24: 1,  # 'щ'
+        17: 1,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 1,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    55: {  # 'Ж'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 1,  # 'А'
+        32: 0,  # 'Б'
+        35: 1,  # 'В'
+        43: 0,  # 'Г'
+        37: 1,  # 'Д'
+        44: 1,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 1,  # 'И'
+        59: 0,  # 'Й'
+        33: 1,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 1,  # 'Н'
+        41: 1,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 1,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 2,  # 'а'
+        18: 0,  # 'б'
+        9: 0,  # 'в'
+        20: 0,  # 'г'
+        11: 1,  # 'д'
+        3: 2,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 2,  # 'и'
+        26: 0,  # 'й'
+        12: 0,  # 'к'
+        10: 0,  # 'л'
+        14: 0,  # 'м'
+        6: 0,  # 'н'
+        4: 2,  # 'о'
+        13: 1,  # 'п'
+        7: 1,  # 'р'
+        8: 0,  # 'с'
+        5: 0,  # 'т'
+        19: 1,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 1,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 1,  # 'ю'
+        16: 0,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    47: {  # 'З'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 2,  # 'А'
+        32: 1,  # 'Б'
+        35: 1,  # 'В'
+        43: 1,  # 'Г'
+        37: 1,  # 'Д'
+        44: 1,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 1,  # 'З'
+        40: 1,  # 'И'
+        59: 0,  # 'Й'
+        33: 1,  # 'К'
+        46: 1,  # 'Л'
+        38: 1,  # 'М'
+        36: 2,  # 'Н'
+        41: 1,  # 'О'
+        30: 1,  # 'П'
+        39: 1,  # 'Р'
+        28: 1,  # 'С'
+        34: 1,  # 'Т'
+        51: 1,  # 'У'
+        48: 0,  # 'Ф'
+        49: 1,  # 'Х'
+        53: 1,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 1,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 1,  # 'Я'
+        1: 3,  # 'а'
+        18: 1,  # 'б'
+        9: 2,  # 'в'
+        20: 1,  # 'г'
+        11: 2,  # 'д'
+        3: 2,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 1,  # 'и'
+        26: 0,  # 'й'
+        12: 0,  # 'к'
+        10: 2,  # 'л'
+        14: 1,  # 'м'
+        6: 1,  # 'н'
+        4: 1,  # 'о'
+        13: 0,  # 'п'
+        7: 1,  # 'р'
+        8: 0,  # 'с'
+        5: 0,  # 'т'
+        19: 1,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 1,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 1,  # 'ю'
+        16: 0,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    40: {  # 'И'
+        63: 0,  # 'e'
+        45: 1,  # '\xad'
+        31: 1,  # 'А'
+        32: 1,  # 'Б'
+        35: 1,  # 'В'
+        43: 1,  # 'Г'
+        37: 1,  # 'Д'
+        44: 2,  # 'Е'
+        55: 1,  # 'Ж'
+        47: 2,  # 'З'
+        40: 1,  # 'И'
+        59: 1,  # 'Й'
+        33: 2,  # 'К'
+        46: 2,  # 'Л'
+        38: 2,  # 'М'
+        36: 2,  # 'Н'
+        41: 1,  # 'О'
+        30: 1,  # 'П'
+        39: 2,  # 'Р'
+        28: 2,  # 'С'
+        34: 2,  # 'Т'
+        51: 0,  # 'У'
+        48: 1,  # 'Ф'
+        49: 1,  # 'Х'
+        53: 1,  # 'Ц'
+        50: 1,  # 'Ч'
+        54: 1,  # 'Ш'
+        57: 1,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 2,  # 'Я'
+        1: 1,  # 'а'
+        18: 1,  # 'б'
+        9: 3,  # 'в'
+        20: 2,  # 'г'
+        11: 1,  # 'д'
+        3: 1,  # 'е'
+        23: 0,  # 'ж'
+        15: 3,  # 'з'
+        2: 0,  # 'и'
+        26: 1,  # 'й'
+        12: 1,  # 'к'
+        10: 2,  # 'л'
+        14: 2,  # 'м'
+        6: 2,  # 'н'
+        4: 0,  # 'о'
+        13: 1,  # 'п'
+        7: 2,  # 'р'
+        8: 2,  # 'с'
+        5: 2,  # 'т'
+        19: 0,  # 'у'
+        29: 1,  # 'ф'
+        25: 1,  # 'х'
+        22: 1,  # 'ц'
+        21: 1,  # 'ч'
+        27: 1,  # 'ш'
+        24: 1,  # 'щ'
+        17: 0,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 0,  # 'ю'
+        16: 0,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    59: {  # 'Й'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 1,  # 'Д'
+        44: 1,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 1,  # 'К'
+        46: 1,  # 'Л'
+        38: 1,  # 'М'
+        36: 1,  # 'Н'
+        41: 1,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 1,  # 'С'
+        34: 1,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 1,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 1,  # 'Я'
+        1: 0,  # 'а'
+        18: 0,  # 'б'
+        9: 0,  # 'в'
+        20: 0,  # 'г'
+        11: 0,  # 'д'
+        3: 1,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 0,  # 'и'
+        26: 0,  # 'й'
+        12: 0,  # 'к'
+        10: 0,  # 'л'
+        14: 0,  # 'м'
+        6: 0,  # 'н'
+        4: 2,  # 'о'
+        13: 0,  # 'п'
+        7: 0,  # 'р'
+        8: 0,  # 'с'
+        5: 0,  # 'т'
+        19: 0,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 1,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 0,  # 'ю'
+        16: 0,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    33: {  # 'К'
+        63: 0,  # 'e'
+        45: 1,  # '\xad'
+        31: 2,  # 'А'
+        32: 1,  # 'Б'
+        35: 1,  # 'В'
+        43: 1,  # 'Г'
+        37: 1,  # 'Д'
+        44: 1,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 1,  # 'З'
+        40: 2,  # 'И'
+        59: 0,  # 'Й'
+        33: 1,  # 'К'
+        46: 1,  # 'Л'
+        38: 0,  # 'М'
+        36: 2,  # 'Н'
+        41: 2,  # 'О'
+        30: 2,  # 'П'
+        39: 1,  # 'Р'
+        28: 2,  # 'С'
+        34: 1,  # 'Т'
+        51: 1,  # 'У'
+        48: 1,  # 'Ф'
+        49: 1,  # 'Х'
+        53: 1,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 1,  # 'Ъ'
+        60: 1,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 0,  # 'б'
+        9: 1,  # 'в'
+        20: 0,  # 'г'
+        11: 0,  # 'д'
+        3: 2,  # 'е'
+        23: 1,  # 'ж'
+        15: 0,  # 'з'
+        2: 2,  # 'и'
+        26: 0,  # 'й'
+        12: 0,  # 'к'
+        10: 2,  # 'л'
+        14: 1,  # 'м'
+        6: 2,  # 'н'
+        4: 3,  # 'о'
+        13: 0,  # 'п'
+        7: 3,  # 'р'
+        8: 1,  # 'с'
+        5: 0,  # 'т'
+        19: 2,  # 'у'
+        29: 0,  # 'ф'
+        25: 1,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 1,  # 'ш'
+        24: 0,  # 'щ'
+        17: 2,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 2,  # 'ю'
+        16: 0,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    46: {  # 'Л'
+        63: 1,  # 'e'
+        45: 0,  # '\xad'
+        31: 2,  # 'А'
+        32: 1,  # 'Б'
+        35: 1,  # 'В'
+        43: 2,  # 'Г'
+        37: 1,  # 'Д'
+        44: 2,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 1,  # 'З'
+        40: 2,  # 'И'
+        59: 0,  # 'Й'
+        33: 1,  # 'К'
+        46: 1,  # 'Л'
+        38: 0,  # 'М'
+        36: 1,  # 'Н'
+        41: 2,  # 'О'
+        30: 1,  # 'П'
+        39: 0,  # 'Р'
+        28: 1,  # 'С'
+        34: 1,  # 'Т'
+        51: 1,  # 'У'
+        48: 0,  # 'Ф'
+        49: 1,  # 'Х'
+        53: 1,  # 'Ц'
+        50: 1,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 1,  # 'Ъ'
+        60: 1,  # 'Ю'
+        56: 1,  # 'Я'
+        1: 2,  # 'а'
+        18: 0,  # 'б'
+        9: 1,  # 'в'
+        20: 0,  # 'г'
+        11: 0,  # 'д'
+        3: 3,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 2,  # 'и'
+        26: 0,  # 'й'
+        12: 0,  # 'к'
+        10: 0,  # 'л'
+        14: 0,  # 'м'
+        6: 0,  # 'н'
+        4: 2,  # 'о'
+        13: 0,  # 'п'
+        7: 0,  # 'р'
+        8: 0,  # 'с'
+        5: 0,  # 'т'
+        19: 2,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 1,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 2,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    38: {  # 'М'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 2,  # 'А'
+        32: 1,  # 'Б'
+        35: 2,  # 'В'
+        43: 0,  # 'Г'
+        37: 1,  # 'Д'
+        44: 1,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 1,  # 'З'
+        40: 2,  # 'И'
+        59: 0,  # 'Й'
+        33: 1,  # 'К'
+        46: 1,  # 'Л'
+        38: 1,  # 'М'
+        36: 1,  # 'Н'
+        41: 2,  # 'О'
+        30: 1,  # 'П'
+        39: 1,  # 'Р'
+        28: 2,  # 'С'
+        34: 1,  # 'Т'
+        51: 1,  # 'У'
+        48: 1,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 1,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 1,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 1,  # 'Я'
+        1: 3,  # 'а'
+        18: 0,  # 'б'
+        9: 0,  # 'в'
+        20: 0,  # 'г'
+        11: 0,  # 'д'
+        3: 3,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 0,  # 'к'
+        10: 2,  # 'л'
+        14: 0,  # 'м'
+        6: 2,  # 'н'
+        4: 3,  # 'о'
+        13: 0,  # 'п'
+        7: 1,  # 'р'
+        8: 0,  # 'с'
+        5: 0,  # 'т'
+        19: 2,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 2,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 2,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    36: {  # 'Н'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 2,  # 'А'
+        32: 2,  # 'Б'
+        35: 1,  # 'В'
+        43: 1,  # 'Г'
+        37: 2,  # 'Д'
+        44: 2,  # 'Е'
+        55: 1,  # 'Ж'
+        47: 1,  # 'З'
+        40: 2,  # 'И'
+        59: 1,  # 'Й'
+        33: 2,  # 'К'
+        46: 1,  # 'Л'
+        38: 1,  # 'М'
+        36: 1,  # 'Н'
+        41: 2,  # 'О'
+        30: 1,  # 'П'
+        39: 1,  # 'Р'
+        28: 2,  # 'С'
+        34: 2,  # 'Т'
+        51: 1,  # 'У'
+        48: 1,  # 'Ф'
+        49: 1,  # 'Х'
+        53: 1,  # 'Ц'
+        50: 1,  # 'Ч'
+        54: 1,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 1,  # 'Ъ'
+        60: 1,  # 'Ю'
+        56: 1,  # 'Я'
+        1: 3,  # 'а'
+        18: 0,  # 'б'
+        9: 0,  # 'в'
+        20: 1,  # 'г'
+        11: 0,  # 'д'
+        3: 3,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 0,  # 'к'
+        10: 0,  # 'л'
+        14: 0,  # 'м'
+        6: 0,  # 'н'
+        4: 3,  # 'о'
+        13: 0,  # 'п'
+        7: 0,  # 'р'
+        8: 0,  # 'с'
+        5: 1,  # 'т'
+        19: 1,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 1,  # 'ш'
+        24: 0,  # 'щ'
+        17: 0,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 2,  # 'ю'
+        16: 2,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    41: {  # 'О'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 1,  # 'А'
+        32: 1,  # 'Б'
+        35: 2,  # 'В'
+        43: 1,  # 'Г'
+        37: 2,  # 'Д'
+        44: 1,  # 'Е'
+        55: 1,  # 'Ж'
+        47: 1,  # 'З'
+        40: 1,  # 'И'
+        59: 1,  # 'Й'
+        33: 2,  # 'К'
+        46: 2,  # 'Л'
+        38: 2,  # 'М'
+        36: 2,  # 'Н'
+        41: 2,  # 'О'
+        30: 1,  # 'П'
+        39: 2,  # 'Р'
+        28: 2,  # 'С'
+        34: 2,  # 'Т'
+        51: 1,  # 'У'
+        48: 1,  # 'Ф'
+        49: 1,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 1,  # 'Ч'
+        54: 1,  # 'Ш'
+        57: 1,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 1,  # 'Я'
+        1: 1,  # 'а'
+        18: 2,  # 'б'
+        9: 2,  # 'в'
+        20: 2,  # 'г'
+        11: 1,  # 'д'
+        3: 1,  # 'е'
+        23: 1,  # 'ж'
+        15: 1,  # 'з'
+        2: 0,  # 'и'
+        26: 1,  # 'й'
+        12: 2,  # 'к'
+        10: 2,  # 'л'
+        14: 1,  # 'м'
+        6: 1,  # 'н'
+        4: 0,  # 'о'
+        13: 2,  # 'п'
+        7: 2,  # 'р'
+        8: 2,  # 'с'
+        5: 3,  # 'т'
+        19: 1,  # 'у'
+        29: 1,  # 'ф'
+        25: 1,  # 'х'
+        22: 1,  # 'ц'
+        21: 2,  # 'ч'
+        27: 0,  # 'ш'
+        24: 2,  # 'щ'
+        17: 0,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 0,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    30: {  # 'П'
+        63: 0,  # 'e'
+        45: 1,  # '\xad'
+        31: 2,  # 'А'
+        32: 1,  # 'Б'
+        35: 1,  # 'В'
+        43: 1,  # 'Г'
+        37: 1,  # 'Д'
+        44: 1,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 1,  # 'З'
+        40: 2,  # 'И'
+        59: 0,  # 'Й'
+        33: 1,  # 'К'
+        46: 1,  # 'Л'
+        38: 1,  # 'М'
+        36: 1,  # 'Н'
+        41: 2,  # 'О'
+        30: 2,  # 'П'
+        39: 2,  # 'Р'
+        28: 2,  # 'С'
+        34: 1,  # 'Т'
+        51: 2,  # 'У'
+        48: 1,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 1,  # 'Ц'
+        50: 1,  # 'Ч'
+        54: 1,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 1,  # 'Ъ'
+        60: 1,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 0,  # 'б'
+        9: 0,  # 'в'
+        20: 0,  # 'г'
+        11: 2,  # 'д'
+        3: 3,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 2,  # 'и'
+        26: 0,  # 'й'
+        12: 1,  # 'к'
+        10: 3,  # 'л'
+        14: 0,  # 'м'
+        6: 1,  # 'н'
+        4: 3,  # 'о'
+        13: 0,  # 'п'
+        7: 3,  # 'р'
+        8: 1,  # 'с'
+        5: 1,  # 'т'
+        19: 2,  # 'у'
+        29: 1,  # 'ф'
+        25: 1,  # 'х'
+        22: 0,  # 'ц'
+        21: 1,  # 'ч'
+        27: 1,  # 'ш'
+        24: 0,  # 'щ'
+        17: 2,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 1,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    39: {  # 'Р'
+        63: 0,  # 'e'
+        45: 1,  # '\xad'
+        31: 2,  # 'А'
+        32: 1,  # 'Б'
+        35: 1,  # 'В'
+        43: 2,  # 'Г'
+        37: 2,  # 'Д'
+        44: 2,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 1,  # 'З'
+        40: 2,  # 'И'
+        59: 0,  # 'Й'
+        33: 1,  # 'К'
+        46: 0,  # 'Л'
+        38: 1,  # 'М'
+        36: 1,  # 'Н'
+        41: 2,  # 'О'
+        30: 2,  # 'П'
+        39: 1,  # 'Р'
+        28: 1,  # 'С'
+        34: 1,  # 'Т'
+        51: 1,  # 'У'
+        48: 1,  # 'Ф'
+        49: 1,  # 'Х'
+        53: 1,  # 'Ц'
+        50: 1,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 1,  # 'Ъ'
+        60: 1,  # 'Ю'
+        56: 1,  # 'Я'
+        1: 3,  # 'а'
+        18: 0,  # 'б'
+        9: 0,  # 'в'
+        20: 0,  # 'г'
+        11: 0,  # 'д'
+        3: 2,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 2,  # 'и'
+        26: 0,  # 'й'
+        12: 0,  # 'к'
+        10: 0,  # 'л'
+        14: 0,  # 'м'
+        6: 1,  # 'н'
+        4: 3,  # 'о'
+        13: 0,  # 'п'
+        7: 0,  # 'р'
+        8: 1,  # 'с'
+        5: 0,  # 'т'
+        19: 3,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 1,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 1,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    28: {  # 'С'
+        63: 1,  # 'e'
+        45: 0,  # '\xad'
+        31: 3,  # 'А'
+        32: 2,  # 'Б'
+        35: 2,  # 'В'
+        43: 1,  # 'Г'
+        37: 2,  # 'Д'
+        44: 2,  # 'Е'
+        55: 1,  # 'Ж'
+        47: 1,  # 'З'
+        40: 2,  # 'И'
+        59: 0,  # 'Й'
+        33: 2,  # 'К'
+        46: 1,  # 'Л'
+        38: 1,  # 'М'
+        36: 1,  # 'Н'
+        41: 2,  # 'О'
+        30: 2,  # 'П'
+        39: 1,  # 'Р'
+        28: 2,  # 'С'
+        34: 2,  # 'Т'
+        51: 1,  # 'У'
+        48: 1,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 1,  # 'Ъ'
+        60: 1,  # 'Ю'
+        56: 1,  # 'Я'
+        1: 3,  # 'а'
+        18: 1,  # 'б'
+        9: 2,  # 'в'
+        20: 1,  # 'г'
+        11: 1,  # 'д'
+        3: 3,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 2,  # 'к'
+        10: 3,  # 'л'
+        14: 2,  # 'м'
+        6: 1,  # 'н'
+        4: 3,  # 'о'
+        13: 3,  # 'п'
+        7: 2,  # 'р'
+        8: 0,  # 'с'
+        5: 3,  # 'т'
+        19: 2,  # 'у'
+        29: 2,  # 'ф'
+        25: 1,  # 'х'
+        22: 1,  # 'ц'
+        21: 1,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 3,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 1,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    34: {  # 'Т'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 2,  # 'А'
+        32: 2,  # 'Б'
+        35: 1,  # 'В'
+        43: 0,  # 'Г'
+        37: 1,  # 'Д'
+        44: 2,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 2,  # 'И'
+        59: 0,  # 'Й'
+        33: 2,  # 'К'
+        46: 1,  # 'Л'
+        38: 1,  # 'М'
+        36: 1,  # 'Н'
+        41: 2,  # 'О'
+        30: 1,  # 'П'
+        39: 2,  # 'Р'
+        28: 2,  # 'С'
+        34: 1,  # 'Т'
+        51: 1,  # 'У'
+        48: 1,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 1,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 1,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 1,  # 'Я'
+        1: 3,  # 'а'
+        18: 1,  # 'б'
+        9: 1,  # 'в'
+        20: 0,  # 'г'
+        11: 0,  # 'д'
+        3: 3,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 2,  # 'и'
+        26: 0,  # 'й'
+        12: 1,  # 'к'
+        10: 1,  # 'л'
+        14: 0,  # 'м'
+        6: 0,  # 'н'
+        4: 3,  # 'о'
+        13: 0,  # 'п'
+        7: 3,  # 'р'
+        8: 0,  # 'с'
+        5: 0,  # 'т'
+        19: 2,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 2,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 1,  # 'ю'
+        16: 2,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    51: {  # 'У'
+        63: 0,  # 'e'
+        45: 1,  # '\xad'
+        31: 1,  # 'А'
+        32: 1,  # 'Б'
+        35: 1,  # 'В'
+        43: 1,  # 'Г'
+        37: 1,  # 'Д'
+        44: 2,  # 'Е'
+        55: 1,  # 'Ж'
+        47: 1,  # 'З'
+        40: 1,  # 'И'
+        59: 0,  # 'Й'
+        33: 1,  # 'К'
+        46: 1,  # 'Л'
+        38: 1,  # 'М'
+        36: 1,  # 'Н'
+        41: 0,  # 'О'
+        30: 1,  # 'П'
+        39: 1,  # 'Р'
+        28: 1,  # 'С'
+        34: 2,  # 'Т'
+        51: 0,  # 'У'
+        48: 1,  # 'Ф'
+        49: 1,  # 'Х'
+        53: 1,  # 'Ц'
+        50: 1,  # 'Ч'
+        54: 1,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 1,  # 'а'
+        18: 1,  # 'б'
+        9: 2,  # 'в'
+        20: 1,  # 'г'
+        11: 1,  # 'д'
+        3: 2,  # 'е'
+        23: 1,  # 'ж'
+        15: 1,  # 'з'
+        2: 2,  # 'и'
+        26: 1,  # 'й'
+        12: 2,  # 'к'
+        10: 1,  # 'л'
+        14: 1,  # 'м'
+        6: 2,  # 'н'
+        4: 2,  # 'о'
+        13: 1,  # 'п'
+        7: 1,  # 'р'
+        8: 2,  # 'с'
+        5: 1,  # 'т'
+        19: 1,  # 'у'
+        29: 0,  # 'ф'
+        25: 1,  # 'х'
+        22: 0,  # 'ц'
+        21: 2,  # 'ч'
+        27: 1,  # 'ш'
+        24: 0,  # 'щ'
+        17: 1,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 0,  # 'ю'
+        16: 0,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    48: {  # 'Ф'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 2,  # 'А'
+        32: 1,  # 'Б'
+        35: 1,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 1,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 2,  # 'И'
+        59: 0,  # 'Й'
+        33: 1,  # 'К'
+        46: 1,  # 'Л'
+        38: 0,  # 'М'
+        36: 1,  # 'Н'
+        41: 1,  # 'О'
+        30: 2,  # 'П'
+        39: 1,  # 'Р'
+        28: 2,  # 'С'
+        34: 1,  # 'Т'
+        51: 1,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 2,  # 'а'
+        18: 0,  # 'б'
+        9: 0,  # 'в'
+        20: 0,  # 'г'
+        11: 0,  # 'д'
+        3: 2,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 2,  # 'и'
+        26: 0,  # 'й'
+        12: 0,  # 'к'
+        10: 2,  # 'л'
+        14: 0,  # 'м'
+        6: 0,  # 'н'
+        4: 2,  # 'о'
+        13: 0,  # 'п'
+        7: 2,  # 'р'
+        8: 0,  # 'с'
+        5: 0,  # 'т'
+        19: 1,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 1,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 1,  # 'ю'
+        16: 0,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    49: {  # 'Х'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 1,  # 'А'
+        32: 0,  # 'Б'
+        35: 1,  # 'В'
+        43: 1,  # 'Г'
+        37: 1,  # 'Д'
+        44: 1,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 1,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 1,  # 'Л'
+        38: 1,  # 'М'
+        36: 1,  # 'Н'
+        41: 1,  # 'О'
+        30: 1,  # 'П'
+        39: 1,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 1,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 2,  # 'а'
+        18: 0,  # 'б'
+        9: 1,  # 'в'
+        20: 0,  # 'г'
+        11: 0,  # 'д'
+        3: 2,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 2,  # 'и'
+        26: 0,  # 'й'
+        12: 0,  # 'к'
+        10: 1,  # 'л'
+        14: 1,  # 'м'
+        6: 0,  # 'н'
+        4: 2,  # 'о'
+        13: 0,  # 'п'
+        7: 2,  # 'р'
+        8: 0,  # 'с'
+        5: 0,  # 'т'
+        19: 2,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 2,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 1,  # 'ю'
+        16: 0,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    53: {  # 'Ц'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 1,  # 'А'
+        32: 0,  # 'Б'
+        35: 1,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 1,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 2,  # 'И'
+        59: 0,  # 'Й'
+        33: 2,  # 'К'
+        46: 1,  # 'Л'
+        38: 1,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 1,  # 'Р'
+        28: 2,  # 'С'
+        34: 0,  # 'Т'
+        51: 1,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 2,  # 'а'
+        18: 0,  # 'б'
+        9: 2,  # 'в'
+        20: 0,  # 'г'
+        11: 0,  # 'д'
+        3: 2,  # 'е'
+        23: 0,  # 'ж'
+        15: 1,  # 'з'
+        2: 2,  # 'и'
+        26: 0,  # 'й'
+        12: 0,  # 'к'
+        10: 0,  # 'л'
+        14: 0,  # 'м'
+        6: 0,  # 'н'
+        4: 1,  # 'о'
+        13: 0,  # 'п'
+        7: 1,  # 'р'
+        8: 0,  # 'с'
+        5: 0,  # 'т'
+        19: 1,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 1,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 1,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    50: {  # 'Ч'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 2,  # 'А'
+        32: 1,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 1,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 1,  # 'З'
+        40: 1,  # 'И'
+        59: 0,  # 'Й'
+        33: 1,  # 'К'
+        46: 1,  # 'Л'
+        38: 0,  # 'М'
+        36: 1,  # 'Н'
+        41: 1,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 1,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 2,  # 'а'
+        18: 0,  # 'б'
+        9: 0,  # 'в'
+        20: 0,  # 'г'
+        11: 0,  # 'д'
+        3: 3,  # 'е'
+        23: 1,  # 'ж'
+        15: 0,  # 'з'
+        2: 2,  # 'и'
+        26: 0,  # 'й'
+        12: 0,  # 'к'
+        10: 1,  # 'л'
+        14: 0,  # 'м'
+        6: 0,  # 'н'
+        4: 2,  # 'о'
+        13: 0,  # 'п'
+        7: 1,  # 'р'
+        8: 0,  # 'с'
+        5: 0,  # 'т'
+        19: 2,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 1,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 0,  # 'ю'
+        16: 0,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    54: {  # 'Ш'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 1,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 1,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 1,  # 'З'
+        40: 1,  # 'И'
+        59: 0,  # 'Й'
+        33: 1,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 1,  # 'Н'
+        41: 1,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 1,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 2,  # 'а'
+        18: 0,  # 'б'
+        9: 2,  # 'в'
+        20: 0,  # 'г'
+        11: 0,  # 'д'
+        3: 2,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 2,  # 'и'
+        26: 0,  # 'й'
+        12: 1,  # 'к'
+        10: 1,  # 'л'
+        14: 1,  # 'м'
+        6: 1,  # 'н'
+        4: 2,  # 'о'
+        13: 1,  # 'п'
+        7: 1,  # 'р'
+        8: 0,  # 'с'
+        5: 0,  # 'т'
+        19: 2,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 1,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 1,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 0,  # 'ю'
+        16: 0,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    57: {  # 'Щ'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 1,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 1,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 1,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 1,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 2,  # 'а'
+        18: 0,  # 'б'
+        9: 0,  # 'в'
+        20: 0,  # 'г'
+        11: 0,  # 'д'
+        3: 2,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 1,  # 'и'
+        26: 0,  # 'й'
+        12: 0,  # 'к'
+        10: 0,  # 'л'
+        14: 0,  # 'м'
+        6: 0,  # 'н'
+        4: 1,  # 'о'
+        13: 0,  # 'п'
+        7: 1,  # 'р'
+        8: 0,  # 'с'
+        5: 0,  # 'т'
+        19: 1,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 1,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 0,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    61: {  # 'Ъ'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 1,  # 'Б'
+        35: 1,  # 'В'
+        43: 0,  # 'Г'
+        37: 1,  # 'Д'
+        44: 0,  # 'Е'
+        55: 1,  # 'Ж'
+        47: 1,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 1,  # 'К'
+        46: 2,  # 'Л'
+        38: 1,  # 'М'
+        36: 1,  # 'Н'
+        41: 0,  # 'О'
+        30: 1,  # 'П'
+        39: 2,  # 'Р'
+        28: 1,  # 'С'
+        34: 1,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 1,  # 'Х'
+        53: 1,  # 'Ц'
+        50: 1,  # 'Ч'
+        54: 1,  # 'Ш'
+        57: 1,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 0,  # 'а'
+        18: 0,  # 'б'
+        9: 0,  # 'в'
+        20: 0,  # 'г'
+        11: 0,  # 'д'
+        3: 0,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 0,  # 'и'
+        26: 0,  # 'й'
+        12: 0,  # 'к'
+        10: 1,  # 'л'
+        14: 0,  # 'м'
+        6: 1,  # 'н'
+        4: 0,  # 'о'
+        13: 0,  # 'п'
+        7: 1,  # 'р'
+        8: 0,  # 'с'
+        5: 0,  # 'т'
+        19: 0,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 0,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 0,  # 'ю'
+        16: 0,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    60: {  # 'Ю'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 1,  # 'А'
+        32: 1,  # 'Б'
+        35: 0,  # 'В'
+        43: 1,  # 'Г'
+        37: 1,  # 'Д'
+        44: 0,  # 'Е'
+        55: 1,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 1,  # 'К'
+        46: 1,  # 'Л'
+        38: 0,  # 'М'
+        36: 1,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 1,  # 'Р'
+        28: 1,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 0,  # 'а'
+        18: 1,  # 'б'
+        9: 1,  # 'в'
+        20: 2,  # 'г'
+        11: 1,  # 'д'
+        3: 0,  # 'е'
+        23: 2,  # 'ж'
+        15: 1,  # 'з'
+        2: 1,  # 'и'
+        26: 0,  # 'й'
+        12: 1,  # 'к'
+        10: 1,  # 'л'
+        14: 1,  # 'м'
+        6: 1,  # 'н'
+        4: 0,  # 'о'
+        13: 1,  # 'п'
+        7: 1,  # 'р'
+        8: 1,  # 'с'
+        5: 1,  # 'т'
+        19: 0,  # 'у'
+        29: 0,  # 'ф'
+        25: 1,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 0,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 0,  # 'ю'
+        16: 0,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    56: {  # 'Я'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 1,  # 'Б'
+        35: 1,  # 'В'
+        43: 1,  # 'Г'
+        37: 1,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 1,  # 'К'
+        46: 1,  # 'Л'
+        38: 1,  # 'М'
+        36: 1,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 1,  # 'С'
+        34: 2,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 0,  # 'а'
+        18: 1,  # 'б'
+        9: 1,  # 'в'
+        20: 1,  # 'г'
+        11: 1,  # 'д'
+        3: 0,  # 'е'
+        23: 0,  # 'ж'
+        15: 1,  # 'з'
+        2: 1,  # 'и'
+        26: 1,  # 'й'
+        12: 1,  # 'к'
+        10: 1,  # 'л'
+        14: 2,  # 'м'
+        6: 2,  # 'н'
+        4: 0,  # 'о'
+        13: 2,  # 'п'
+        7: 1,  # 'р'
+        8: 1,  # 'с'
+        5: 1,  # 'т'
+        19: 0,  # 'у'
+        29: 0,  # 'ф'
+        25: 1,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 1,  # 'ш'
+        24: 0,  # 'щ'
+        17: 0,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 1,  # 'ю'
+        16: 0,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    1: {  # 'а'
+        63: 1,  # 'e'
+        45: 1,  # '\xad'
+        31: 1,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 1,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 1,  # 'а'
+        18: 3,  # 'б'
+        9: 3,  # 'в'
+        20: 3,  # 'г'
+        11: 3,  # 'д'
+        3: 3,  # 'е'
+        23: 3,  # 'ж'
+        15: 3,  # 'з'
+        2: 3,  # 'и'
+        26: 3,  # 'й'
+        12: 3,  # 'к'
+        10: 3,  # 'л'
+        14: 3,  # 'м'
+        6: 3,  # 'н'
+        4: 2,  # 'о'
+        13: 3,  # 'п'
+        7: 3,  # 'р'
+        8: 3,  # 'с'
+        5: 3,  # 'т'
+        19: 3,  # 'у'
+        29: 3,  # 'ф'
+        25: 3,  # 'х'
+        22: 3,  # 'ц'
+        21: 3,  # 'ч'
+        27: 3,  # 'ш'
+        24: 3,  # 'щ'
+        17: 0,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 1,  # 'ю'
+        16: 3,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    18: {  # 'б'
+        63: 1,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 0,  # 'б'
+        9: 3,  # 'в'
+        20: 1,  # 'г'
+        11: 2,  # 'д'
+        3: 3,  # 'е'
+        23: 1,  # 'ж'
+        15: 1,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 1,  # 'к'
+        10: 3,  # 'л'
+        14: 2,  # 'м'
+        6: 3,  # 'н'
+        4: 3,  # 'о'
+        13: 1,  # 'п'
+        7: 3,  # 'р'
+        8: 3,  # 'с'
+        5: 0,  # 'т'
+        19: 3,  # 'у'
+        29: 0,  # 'ф'
+        25: 2,  # 'х'
+        22: 1,  # 'ц'
+        21: 1,  # 'ч'
+        27: 1,  # 'ш'
+        24: 3,  # 'щ'
+        17: 3,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 2,  # 'ю'
+        16: 3,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    9: {  # 'в'
+        63: 1,  # 'e'
+        45: 1,  # '\xad'
+        31: 0,  # 'А'
+        32: 1,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 1,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 1,  # 'б'
+        9: 0,  # 'в'
+        20: 2,  # 'г'
+        11: 3,  # 'д'
+        3: 3,  # 'е'
+        23: 1,  # 'ж'
+        15: 3,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 3,  # 'к'
+        10: 3,  # 'л'
+        14: 2,  # 'м'
+        6: 3,  # 'н'
+        4: 3,  # 'о'
+        13: 2,  # 'п'
+        7: 3,  # 'р'
+        8: 3,  # 'с'
+        5: 3,  # 'т'
+        19: 2,  # 'у'
+        29: 0,  # 'ф'
+        25: 2,  # 'х'
+        22: 2,  # 'ц'
+        21: 3,  # 'ч'
+        27: 2,  # 'ш'
+        24: 1,  # 'щ'
+        17: 3,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 2,  # 'ю'
+        16: 3,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    20: {  # 'г'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 1,  # 'б'
+        9: 2,  # 'в'
+        20: 1,  # 'г'
+        11: 2,  # 'д'
+        3: 3,  # 'е'
+        23: 0,  # 'ж'
+        15: 1,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 1,  # 'к'
+        10: 3,  # 'л'
+        14: 1,  # 'м'
+        6: 3,  # 'н'
+        4: 3,  # 'о'
+        13: 1,  # 'п'
+        7: 3,  # 'р'
+        8: 2,  # 'с'
+        5: 2,  # 'т'
+        19: 3,  # 'у'
+        29: 1,  # 'ф'
+        25: 1,  # 'х'
+        22: 0,  # 'ц'
+        21: 1,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 3,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 1,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    11: {  # 'д'
+        63: 1,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 2,  # 'б'
+        9: 3,  # 'в'
+        20: 2,  # 'г'
+        11: 2,  # 'д'
+        3: 3,  # 'е'
+        23: 3,  # 'ж'
+        15: 2,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 3,  # 'к'
+        10: 3,  # 'л'
+        14: 3,  # 'м'
+        6: 3,  # 'н'
+        4: 3,  # 'о'
+        13: 3,  # 'п'
+        7: 3,  # 'р'
+        8: 3,  # 'с'
+        5: 1,  # 'т'
+        19: 3,  # 'у'
+        29: 1,  # 'ф'
+        25: 2,  # 'х'
+        22: 2,  # 'ц'
+        21: 2,  # 'ч'
+        27: 1,  # 'ш'
+        24: 1,  # 'щ'
+        17: 3,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 1,  # 'ю'
+        16: 3,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    3: {  # 'е'
+        63: 0,  # 'e'
+        45: 1,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 2,  # 'а'
+        18: 3,  # 'б'
+        9: 3,  # 'в'
+        20: 3,  # 'г'
+        11: 3,  # 'д'
+        3: 2,  # 'е'
+        23: 3,  # 'ж'
+        15: 3,  # 'з'
+        2: 2,  # 'и'
+        26: 3,  # 'й'
+        12: 3,  # 'к'
+        10: 3,  # 'л'
+        14: 3,  # 'м'
+        6: 3,  # 'н'
+        4: 3,  # 'о'
+        13: 3,  # 'п'
+        7: 3,  # 'р'
+        8: 3,  # 'с'
+        5: 3,  # 'т'
+        19: 2,  # 'у'
+        29: 3,  # 'ф'
+        25: 3,  # 'х'
+        22: 3,  # 'ц'
+        21: 3,  # 'ч'
+        27: 3,  # 'ш'
+        24: 3,  # 'щ'
+        17: 1,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 1,  # 'ю'
+        16: 3,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    23: {  # 'ж'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 3,  # 'б'
+        9: 2,  # 'в'
+        20: 1,  # 'г'
+        11: 3,  # 'д'
+        3: 3,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 2,  # 'к'
+        10: 1,  # 'л'
+        14: 1,  # 'м'
+        6: 3,  # 'н'
+        4: 2,  # 'о'
+        13: 1,  # 'п'
+        7: 1,  # 'р'
+        8: 1,  # 'с'
+        5: 1,  # 'т'
+        19: 2,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 1,  # 'ц'
+        21: 1,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 2,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 0,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    15: {  # 'з'
+        63: 1,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 3,  # 'б'
+        9: 3,  # 'в'
+        20: 3,  # 'г'
+        11: 3,  # 'д'
+        3: 3,  # 'е'
+        23: 1,  # 'ж'
+        15: 1,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 3,  # 'к'
+        10: 3,  # 'л'
+        14: 3,  # 'м'
+        6: 3,  # 'н'
+        4: 3,  # 'о'
+        13: 3,  # 'п'
+        7: 3,  # 'р'
+        8: 3,  # 'с'
+        5: 3,  # 'т'
+        19: 3,  # 'у'
+        29: 1,  # 'ф'
+        25: 2,  # 'х'
+        22: 2,  # 'ц'
+        21: 2,  # 'ч'
+        27: 2,  # 'ш'
+        24: 1,  # 'щ'
+        17: 2,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 1,  # 'ю'
+        16: 2,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    2: {  # 'и'
+        63: 1,  # 'e'
+        45: 1,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 1,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 1,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 1,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 1,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 3,  # 'б'
+        9: 3,  # 'в'
+        20: 3,  # 'г'
+        11: 3,  # 'д'
+        3: 3,  # 'е'
+        23: 3,  # 'ж'
+        15: 3,  # 'з'
+        2: 3,  # 'и'
+        26: 3,  # 'й'
+        12: 3,  # 'к'
+        10: 3,  # 'л'
+        14: 3,  # 'м'
+        6: 3,  # 'н'
+        4: 3,  # 'о'
+        13: 3,  # 'п'
+        7: 3,  # 'р'
+        8: 3,  # 'с'
+        5: 3,  # 'т'
+        19: 2,  # 'у'
+        29: 3,  # 'ф'
+        25: 3,  # 'х'
+        22: 3,  # 'ц'
+        21: 3,  # 'ч'
+        27: 3,  # 'ш'
+        24: 3,  # 'щ'
+        17: 2,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 1,  # 'ю'
+        16: 3,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    26: {  # 'й'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 1,  # 'а'
+        18: 2,  # 'б'
+        9: 2,  # 'в'
+        20: 1,  # 'г'
+        11: 2,  # 'д'
+        3: 2,  # 'е'
+        23: 0,  # 'ж'
+        15: 2,  # 'з'
+        2: 1,  # 'и'
+        26: 0,  # 'й'
+        12: 3,  # 'к'
+        10: 2,  # 'л'
+        14: 2,  # 'м'
+        6: 3,  # 'н'
+        4: 2,  # 'о'
+        13: 1,  # 'п'
+        7: 2,  # 'р'
+        8: 3,  # 'с'
+        5: 3,  # 'т'
+        19: 1,  # 'у'
+        29: 2,  # 'ф'
+        25: 1,  # 'х'
+        22: 2,  # 'ц'
+        21: 2,  # 'ч'
+        27: 1,  # 'ш'
+        24: 1,  # 'щ'
+        17: 1,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 0,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    12: {  # 'к'
+        63: 1,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 1,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 1,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 1,  # 'б'
+        9: 3,  # 'в'
+        20: 2,  # 'г'
+        11: 1,  # 'д'
+        3: 3,  # 'е'
+        23: 0,  # 'ж'
+        15: 2,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 1,  # 'к'
+        10: 3,  # 'л'
+        14: 2,  # 'м'
+        6: 3,  # 'н'
+        4: 3,  # 'о'
+        13: 1,  # 'п'
+        7: 3,  # 'р'
+        8: 3,  # 'с'
+        5: 3,  # 'т'
+        19: 3,  # 'у'
+        29: 1,  # 'ф'
+        25: 1,  # 'х'
+        22: 3,  # 'ц'
+        21: 2,  # 'ч'
+        27: 1,  # 'ш'
+        24: 0,  # 'щ'
+        17: 3,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 2,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    10: {  # 'л'
+        63: 1,  # 'e'
+        45: 1,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 1,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 3,  # 'б'
+        9: 3,  # 'в'
+        20: 3,  # 'г'
+        11: 2,  # 'д'
+        3: 3,  # 'е'
+        23: 3,  # 'ж'
+        15: 2,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 3,  # 'к'
+        10: 1,  # 'л'
+        14: 2,  # 'м'
+        6: 3,  # 'н'
+        4: 3,  # 'о'
+        13: 2,  # 'п'
+        7: 2,  # 'р'
+        8: 3,  # 'с'
+        5: 3,  # 'т'
+        19: 3,  # 'у'
+        29: 2,  # 'ф'
+        25: 2,  # 'х'
+        22: 2,  # 'ц'
+        21: 2,  # 'ч'
+        27: 2,  # 'ш'
+        24: 1,  # 'щ'
+        17: 3,  # 'ъ'
+        52: 2,  # 'ь'
+        42: 3,  # 'ю'
+        16: 3,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    14: {  # 'м'
+        63: 1,  # 'e'
+        45: 0,  # '\xad'
+        31: 1,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 3,  # 'б'
+        9: 3,  # 'в'
+        20: 1,  # 'г'
+        11: 1,  # 'д'
+        3: 3,  # 'е'
+        23: 1,  # 'ж'
+        15: 1,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 2,  # 'к'
+        10: 3,  # 'л'
+        14: 1,  # 'м'
+        6: 3,  # 'н'
+        4: 3,  # 'о'
+        13: 3,  # 'п'
+        7: 2,  # 'р'
+        8: 2,  # 'с'
+        5: 1,  # 'т'
+        19: 3,  # 'у'
+        29: 2,  # 'ф'
+        25: 1,  # 'х'
+        22: 2,  # 'ц'
+        21: 2,  # 'ч'
+        27: 2,  # 'ш'
+        24: 1,  # 'щ'
+        17: 3,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 2,  # 'ю'
+        16: 3,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    6: {  # 'н'
+        63: 1,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 1,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 2,  # 'б'
+        9: 2,  # 'в'
+        20: 3,  # 'г'
+        11: 3,  # 'д'
+        3: 3,  # 'е'
+        23: 2,  # 'ж'
+        15: 2,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 3,  # 'к'
+        10: 2,  # 'л'
+        14: 1,  # 'м'
+        6: 3,  # 'н'
+        4: 3,  # 'о'
+        13: 1,  # 'п'
+        7: 2,  # 'р'
+        8: 3,  # 'с'
+        5: 3,  # 'т'
+        19: 3,  # 'у'
+        29: 3,  # 'ф'
+        25: 2,  # 'х'
+        22: 3,  # 'ц'
+        21: 3,  # 'ч'
+        27: 2,  # 'ш'
+        24: 1,  # 'щ'
+        17: 3,  # 'ъ'
+        52: 2,  # 'ь'
+        42: 2,  # 'ю'
+        16: 3,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    4: {  # 'о'
+        63: 0,  # 'e'
+        45: 1,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 2,  # 'а'
+        18: 3,  # 'б'
+        9: 3,  # 'в'
+        20: 3,  # 'г'
+        11: 3,  # 'д'
+        3: 3,  # 'е'
+        23: 3,  # 'ж'
+        15: 3,  # 'з'
+        2: 3,  # 'и'
+        26: 3,  # 'й'
+        12: 3,  # 'к'
+        10: 3,  # 'л'
+        14: 3,  # 'м'
+        6: 3,  # 'н'
+        4: 2,  # 'о'
+        13: 3,  # 'п'
+        7: 3,  # 'р'
+        8: 3,  # 'с'
+        5: 3,  # 'т'
+        19: 2,  # 'у'
+        29: 3,  # 'ф'
+        25: 3,  # 'х'
+        22: 3,  # 'ц'
+        21: 3,  # 'ч'
+        27: 3,  # 'ш'
+        24: 3,  # 'щ'
+        17: 1,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 1,  # 'ю'
+        16: 3,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    13: {  # 'п'
+        63: 1,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 1,  # 'б'
+        9: 2,  # 'в'
+        20: 1,  # 'г'
+        11: 1,  # 'д'
+        3: 3,  # 'е'
+        23: 0,  # 'ж'
+        15: 1,  # 'з'
+        2: 3,  # 'и'
+        26: 1,  # 'й'
+        12: 2,  # 'к'
+        10: 3,  # 'л'
+        14: 1,  # 'м'
+        6: 2,  # 'н'
+        4: 3,  # 'о'
+        13: 1,  # 'п'
+        7: 3,  # 'р'
+        8: 2,  # 'с'
+        5: 2,  # 'т'
+        19: 3,  # 'у'
+        29: 1,  # 'ф'
+        25: 1,  # 'х'
+        22: 2,  # 'ц'
+        21: 2,  # 'ч'
+        27: 1,  # 'ш'
+        24: 1,  # 'щ'
+        17: 3,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 2,  # 'ю'
+        16: 2,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    7: {  # 'р'
+        63: 1,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 3,  # 'б'
+        9: 3,  # 'в'
+        20: 3,  # 'г'
+        11: 3,  # 'д'
+        3: 3,  # 'е'
+        23: 3,  # 'ж'
+        15: 2,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 3,  # 'к'
+        10: 3,  # 'л'
+        14: 3,  # 'м'
+        6: 3,  # 'н'
+        4: 3,  # 'о'
+        13: 2,  # 'п'
+        7: 1,  # 'р'
+        8: 3,  # 'с'
+        5: 3,  # 'т'
+        19: 3,  # 'у'
+        29: 2,  # 'ф'
+        25: 3,  # 'х'
+        22: 3,  # 'ц'
+        21: 2,  # 'ч'
+        27: 3,  # 'ш'
+        24: 1,  # 'щ'
+        17: 3,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 2,  # 'ю'
+        16: 3,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    8: {  # 'с'
+        63: 1,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 2,  # 'б'
+        9: 3,  # 'в'
+        20: 2,  # 'г'
+        11: 2,  # 'д'
+        3: 3,  # 'е'
+        23: 0,  # 'ж'
+        15: 1,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 3,  # 'к'
+        10: 3,  # 'л'
+        14: 3,  # 'м'
+        6: 3,  # 'н'
+        4: 3,  # 'о'
+        13: 3,  # 'п'
+        7: 3,  # 'р'
+        8: 1,  # 'с'
+        5: 3,  # 'т'
+        19: 3,  # 'у'
+        29: 2,  # 'ф'
+        25: 2,  # 'х'
+        22: 2,  # 'ц'
+        21: 2,  # 'ч'
+        27: 2,  # 'ш'
+        24: 0,  # 'щ'
+        17: 3,  # 'ъ'
+        52: 2,  # 'ь'
+        42: 2,  # 'ю'
+        16: 3,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    5: {  # 'т'
+        63: 1,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 3,  # 'б'
+        9: 3,  # 'в'
+        20: 2,  # 'г'
+        11: 2,  # 'д'
+        3: 3,  # 'е'
+        23: 1,  # 'ж'
+        15: 1,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 3,  # 'к'
+        10: 3,  # 'л'
+        14: 2,  # 'м'
+        6: 3,  # 'н'
+        4: 3,  # 'о'
+        13: 2,  # 'п'
+        7: 3,  # 'р'
+        8: 3,  # 'с'
+        5: 3,  # 'т'
+        19: 3,  # 'у'
+        29: 1,  # 'ф'
+        25: 2,  # 'х'
+        22: 2,  # 'ц'
+        21: 2,  # 'ч'
+        27: 1,  # 'ш'
+        24: 1,  # 'щ'
+        17: 3,  # 'ъ'
+        52: 2,  # 'ь'
+        42: 2,  # 'ю'
+        16: 3,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    19: {  # 'у'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 3,  # 'б'
+        9: 3,  # 'в'
+        20: 3,  # 'г'
+        11: 3,  # 'д'
+        3: 2,  # 'е'
+        23: 3,  # 'ж'
+        15: 3,  # 'з'
+        2: 2,  # 'и'
+        26: 2,  # 'й'
+        12: 3,  # 'к'
+        10: 3,  # 'л'
+        14: 3,  # 'м'
+        6: 3,  # 'н'
+        4: 2,  # 'о'
+        13: 3,  # 'п'
+        7: 3,  # 'р'
+        8: 3,  # 'с'
+        5: 3,  # 'т'
+        19: 1,  # 'у'
+        29: 2,  # 'ф'
+        25: 2,  # 'х'
+        22: 2,  # 'ц'
+        21: 3,  # 'ч'
+        27: 3,  # 'ш'
+        24: 2,  # 'щ'
+        17: 1,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 1,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    29: {  # 'ф'
+        63: 1,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 1,  # 'б'
+        9: 1,  # 'в'
+        20: 1,  # 'г'
+        11: 0,  # 'д'
+        3: 3,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 2,  # 'к'
+        10: 2,  # 'л'
+        14: 1,  # 'м'
+        6: 1,  # 'н'
+        4: 3,  # 'о'
+        13: 0,  # 'п'
+        7: 2,  # 'р'
+        8: 2,  # 'с'
+        5: 2,  # 'т'
+        19: 2,  # 'у'
+        29: 0,  # 'ф'
+        25: 1,  # 'х'
+        22: 0,  # 'ц'
+        21: 1,  # 'ч'
+        27: 1,  # 'ш'
+        24: 0,  # 'щ'
+        17: 2,  # 'ъ'
+        52: 2,  # 'ь'
+        42: 1,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    25: {  # 'х'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 1,  # 'б'
+        9: 3,  # 'в'
+        20: 0,  # 'г'
+        11: 1,  # 'д'
+        3: 2,  # 'е'
+        23: 0,  # 'ж'
+        15: 1,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 1,  # 'к'
+        10: 2,  # 'л'
+        14: 2,  # 'м'
+        6: 3,  # 'н'
+        4: 3,  # 'о'
+        13: 1,  # 'п'
+        7: 3,  # 'р'
+        8: 1,  # 'с'
+        5: 2,  # 'т'
+        19: 3,  # 'у'
+        29: 0,  # 'ф'
+        25: 1,  # 'х'
+        22: 0,  # 'ц'
+        21: 1,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 2,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 1,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    22: {  # 'ц'
+        63: 1,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 1,  # 'б'
+        9: 2,  # 'в'
+        20: 1,  # 'г'
+        11: 1,  # 'д'
+        3: 3,  # 'е'
+        23: 0,  # 'ж'
+        15: 1,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 2,  # 'к'
+        10: 1,  # 'л'
+        14: 1,  # 'м'
+        6: 1,  # 'н'
+        4: 2,  # 'о'
+        13: 1,  # 'п'
+        7: 1,  # 'р'
+        8: 1,  # 'с'
+        5: 1,  # 'т'
+        19: 2,  # 'у'
+        29: 1,  # 'ф'
+        25: 1,  # 'х'
+        22: 1,  # 'ц'
+        21: 1,  # 'ч'
+        27: 1,  # 'ш'
+        24: 1,  # 'щ'
+        17: 2,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 0,  # 'ю'
+        16: 2,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    21: {  # 'ч'
+        63: 1,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 1,  # 'б'
+        9: 3,  # 'в'
+        20: 1,  # 'г'
+        11: 0,  # 'д'
+        3: 3,  # 'е'
+        23: 1,  # 'ж'
+        15: 0,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 3,  # 'к'
+        10: 2,  # 'л'
+        14: 2,  # 'м'
+        6: 3,  # 'н'
+        4: 3,  # 'о'
+        13: 0,  # 'п'
+        7: 2,  # 'р'
+        8: 0,  # 'с'
+        5: 2,  # 'т'
+        19: 3,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 1,  # 'ш'
+        24: 0,  # 'щ'
+        17: 2,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 1,  # 'ю'
+        16: 0,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    27: {  # 'ш'
+        63: 1,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 0,  # 'б'
+        9: 2,  # 'в'
+        20: 0,  # 'г'
+        11: 1,  # 'д'
+        3: 3,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 3,  # 'к'
+        10: 2,  # 'л'
+        14: 1,  # 'м'
+        6: 3,  # 'н'
+        4: 2,  # 'о'
+        13: 2,  # 'п'
+        7: 1,  # 'р'
+        8: 0,  # 'с'
+        5: 1,  # 'т'
+        19: 2,  # 'у'
+        29: 1,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 1,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 2,  # 'ъ'
+        52: 1,  # 'ь'
+        42: 1,  # 'ю'
+        16: 0,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    24: {  # 'щ'
+        63: 1,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 3,  # 'а'
+        18: 0,  # 'б'
+        9: 1,  # 'в'
+        20: 0,  # 'г'
+        11: 0,  # 'д'
+        3: 3,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 3,  # 'и'
+        26: 0,  # 'й'
+        12: 1,  # 'к'
+        10: 0,  # 'л'
+        14: 0,  # 'м'
+        6: 2,  # 'н'
+        4: 3,  # 'о'
+        13: 0,  # 'п'
+        7: 1,  # 'р'
+        8: 0,  # 'с'
+        5: 2,  # 'т'
+        19: 3,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 1,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 1,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 0,  # 'ю'
+        16: 2,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    17: {  # 'ъ'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 1,  # 'а'
+        18: 3,  # 'б'
+        9: 3,  # 'в'
+        20: 3,  # 'г'
+        11: 3,  # 'д'
+        3: 2,  # 'е'
+        23: 3,  # 'ж'
+        15: 3,  # 'з'
+        2: 1,  # 'и'
+        26: 2,  # 'й'
+        12: 3,  # 'к'
+        10: 3,  # 'л'
+        14: 3,  # 'м'
+        6: 3,  # 'н'
+        4: 3,  # 'о'
+        13: 3,  # 'п'
+        7: 3,  # 'р'
+        8: 3,  # 'с'
+        5: 3,  # 'т'
+        19: 1,  # 'у'
+        29: 1,  # 'ф'
+        25: 2,  # 'х'
+        22: 2,  # 'ц'
+        21: 3,  # 'ч'
+        27: 2,  # 'ш'
+        24: 3,  # 'щ'
+        17: 0,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 2,  # 'ю'
+        16: 0,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    52: {  # 'ь'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 0,  # 'а'
+        18: 0,  # 'б'
+        9: 0,  # 'в'
+        20: 0,  # 'г'
+        11: 0,  # 'д'
+        3: 1,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 0,  # 'и'
+        26: 0,  # 'й'
+        12: 1,  # 'к'
+        10: 0,  # 'л'
+        14: 0,  # 'м'
+        6: 1,  # 'н'
+        4: 3,  # 'о'
+        13: 0,  # 'п'
+        7: 0,  # 'р'
+        8: 0,  # 'с'
+        5: 1,  # 'т'
+        19: 0,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 1,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 0,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 1,  # 'ю'
+        16: 0,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    42: {  # 'ю'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 1,  # 'а'
+        18: 2,  # 'б'
+        9: 1,  # 'в'
+        20: 2,  # 'г'
+        11: 2,  # 'д'
+        3: 1,  # 'е'
+        23: 2,  # 'ж'
+        15: 2,  # 'з'
+        2: 1,  # 'и'
+        26: 1,  # 'й'
+        12: 2,  # 'к'
+        10: 2,  # 'л'
+        14: 2,  # 'м'
+        6: 2,  # 'н'
+        4: 1,  # 'о'
+        13: 1,  # 'п'
+        7: 2,  # 'р'
+        8: 2,  # 'с'
+        5: 2,  # 'т'
+        19: 1,  # 'у'
+        29: 1,  # 'ф'
+        25: 1,  # 'х'
+        22: 2,  # 'ц'
+        21: 3,  # 'ч'
+        27: 1,  # 'ш'
+        24: 1,  # 'щ'
+        17: 1,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 0,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    16: {  # 'я'
+        63: 0,  # 'e'
+        45: 1,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 0,  # 'а'
+        18: 3,  # 'б'
+        9: 3,  # 'в'
+        20: 2,  # 'г'
+        11: 3,  # 'д'
+        3: 2,  # 'е'
+        23: 1,  # 'ж'
+        15: 2,  # 'з'
+        2: 1,  # 'и'
+        26: 2,  # 'й'
+        12: 3,  # 'к'
+        10: 3,  # 'л'
+        14: 3,  # 'м'
+        6: 3,  # 'н'
+        4: 1,  # 'о'
+        13: 2,  # 'п'
+        7: 2,  # 'р'
+        8: 3,  # 'с'
+        5: 3,  # 'т'
+        19: 1,  # 'у'
+        29: 1,  # 'ф'
+        25: 3,  # 'х'
+        22: 2,  # 'ц'
+        21: 1,  # 'ч'
+        27: 1,  # 'ш'
+        24: 2,  # 'щ'
+        17: 0,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 0,  # 'ю'
+        16: 1,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    58: {  # 'є'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 0,  # 'а'
+        18: 0,  # 'б'
+        9: 0,  # 'в'
+        20: 0,  # 'г'
+        11: 0,  # 'д'
+        3: 0,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 0,  # 'и'
+        26: 0,  # 'й'
+        12: 0,  # 'к'
+        10: 0,  # 'л'
+        14: 0,  # 'м'
+        6: 0,  # 'н'
+        4: 0,  # 'о'
+        13: 0,  # 'п'
+        7: 0,  # 'р'
+        8: 0,  # 'с'
+        5: 0,  # 'т'
+        19: 0,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 0,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 0,  # 'ю'
+        16: 0,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+    62: {  # '№'
+        63: 0,  # 'e'
+        45: 0,  # '\xad'
+        31: 0,  # 'А'
+        32: 0,  # 'Б'
+        35: 0,  # 'В'
+        43: 0,  # 'Г'
+        37: 0,  # 'Д'
+        44: 0,  # 'Е'
+        55: 0,  # 'Ж'
+        47: 0,  # 'З'
+        40: 0,  # 'И'
+        59: 0,  # 'Й'
+        33: 0,  # 'К'
+        46: 0,  # 'Л'
+        38: 0,  # 'М'
+        36: 0,  # 'Н'
+        41: 0,  # 'О'
+        30: 0,  # 'П'
+        39: 0,  # 'Р'
+        28: 0,  # 'С'
+        34: 0,  # 'Т'
+        51: 0,  # 'У'
+        48: 0,  # 'Ф'
+        49: 0,  # 'Х'
+        53: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        54: 0,  # 'Ш'
+        57: 0,  # 'Щ'
+        61: 0,  # 'Ъ'
+        60: 0,  # 'Ю'
+        56: 0,  # 'Я'
+        1: 0,  # 'а'
+        18: 0,  # 'б'
+        9: 0,  # 'в'
+        20: 0,  # 'г'
+        11: 0,  # 'д'
+        3: 0,  # 'е'
+        23: 0,  # 'ж'
+        15: 0,  # 'з'
+        2: 0,  # 'и'
+        26: 0,  # 'й'
+        12: 0,  # 'к'
+        10: 0,  # 'л'
+        14: 0,  # 'м'
+        6: 0,  # 'н'
+        4: 0,  # 'о'
+        13: 0,  # 'п'
+        7: 0,  # 'р'
+        8: 0,  # 'с'
+        5: 0,  # 'т'
+        19: 0,  # 'у'
+        29: 0,  # 'ф'
+        25: 0,  # 'х'
+        22: 0,  # 'ц'
+        21: 0,  # 'ч'
+        27: 0,  # 'ш'
+        24: 0,  # 'щ'
+        17: 0,  # 'ъ'
+        52: 0,  # 'ь'
+        42: 0,  # 'ю'
+        16: 0,  # 'я'
+        58: 0,  # 'є'
+        62: 0,  # '№'
+    },
+}
+
+# 255: Undefined characters that did not exist in training text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+# 251: Control characters
+
+# Character Mapping Table(s):
+ISO_8859_5_BULGARIAN_CHAR_TO_ORDER = {
+    0: 255,  # '\x00'
+    1: 255,  # '\x01'
+    2: 255,  # '\x02'
+    3: 255,  # '\x03'
+    4: 255,  # '\x04'
+    5: 255,  # '\x05'
+    6: 255,  # '\x06'
+    7: 255,  # '\x07'
+    8: 255,  # '\x08'
+    9: 255,  # '\t'
+    10: 254,  # '\n'
+    11: 255,  # '\x0b'
+    12: 255,  # '\x0c'
+    13: 254,  # '\r'
+    14: 255,  # '\x0e'
+    15: 255,  # '\x0f'
+    16: 255,  # '\x10'
+    17: 255,  # '\x11'
+    18: 255,  # '\x12'
+    19: 255,  # '\x13'
+    20: 255,  # '\x14'
+    21: 255,  # '\x15'
+    22: 255,  # '\x16'
+    23: 255,  # '\x17'
+    24: 255,  # '\x18'
+    25: 255,  # '\x19'
+    26: 255,  # '\x1a'
+    27: 255,  # '\x1b'
+    28: 255,  # '\x1c'
+    29: 255,  # '\x1d'
+    30: 255,  # '\x1e'
+    31: 255,  # '\x1f'
+    32: 253,  # ' '
+    33: 253,  # '!'
+    34: 253,  # '"'
+    35: 253,  # '#'
+    36: 253,  # '$'
+    37: 253,  # '%'
+    38: 253,  # '&'
+    39: 253,  # "'"
+    40: 253,  # '('
+    41: 253,  # ')'
+    42: 253,  # '*'
+    43: 253,  # '+'
+    44: 253,  # ','
+    45: 253,  # '-'
+    46: 253,  # '.'
+    47: 253,  # '/'
+    48: 252,  # '0'
+    49: 252,  # '1'
+    50: 252,  # '2'
+    51: 252,  # '3'
+    52: 252,  # '4'
+    53: 252,  # '5'
+    54: 252,  # '6'
+    55: 252,  # '7'
+    56: 252,  # '8'
+    57: 252,  # '9'
+    58: 253,  # ':'
+    59: 253,  # ';'
+    60: 253,  # '<'
+    61: 253,  # '='
+    62: 253,  # '>'
+    63: 253,  # '?'
+    64: 253,  # '@'
+    65: 77,  # 'A'
+    66: 90,  # 'B'
+    67: 99,  # 'C'
+    68: 100,  # 'D'
+    69: 72,  # 'E'
+    70: 109,  # 'F'
+    71: 107,  # 'G'
+    72: 101,  # 'H'
+    73: 79,  # 'I'
+    74: 185,  # 'J'
+    75: 81,  # 'K'
+    76: 102,  # 'L'
+    77: 76,  # 'M'
+    78: 94,  # 'N'
+    79: 82,  # 'O'
+    80: 110,  # 'P'
+    81: 186,  # 'Q'
+    82: 108,  # 'R'
+    83: 91,  # 'S'
+    84: 74,  # 'T'
+    85: 119,  # 'U'
+    86: 84,  # 'V'
+    87: 96,  # 'W'
+    88: 111,  # 'X'
+    89: 187,  # 'Y'
+    90: 115,  # 'Z'
+    91: 253,  # '['
+    92: 253,  # '\\'
+    93: 253,  # ']'
+    94: 253,  # '^'
+    95: 253,  # '_'
+    96: 253,  # '`'
+    97: 65,  # 'a'
+    98: 69,  # 'b'
+    99: 70,  # 'c'
+    100: 66,  # 'd'
+    101: 63,  # 'e'
+    102: 68,  # 'f'
+    103: 112,  # 'g'
+    104: 103,  # 'h'
+    105: 92,  # 'i'
+    106: 194,  # 'j'
+    107: 104,  # 'k'
+    108: 95,  # 'l'
+    109: 86,  # 'm'
+    110: 87,  # 'n'
+    111: 71,  # 'o'
+    112: 116,  # 'p'
+    113: 195,  # 'q'
+    114: 85,  # 'r'
+    115: 93,  # 's'
+    116: 97,  # 't'
+    117: 113,  # 'u'
+    118: 196,  # 'v'
+    119: 197,  # 'w'
+    120: 198,  # 'x'
+    121: 199,  # 'y'
+    122: 200,  # 'z'
+    123: 253,  # '{'
+    124: 253,  # '|'
+    125: 253,  # '}'
+    126: 253,  # '~'
+    127: 253,  # '\x7f'
+    128: 194,  # '\x80'
+    129: 195,  # '\x81'
+    130: 196,  # '\x82'
+    131: 197,  # '\x83'
+    132: 198,  # '\x84'
+    133: 199,  # '\x85'
+    134: 200,  # '\x86'
+    135: 201,  # '\x87'
+    136: 202,  # '\x88'
+    137: 203,  # '\x89'
+    138: 204,  # '\x8a'
+    139: 205,  # '\x8b'
+    140: 206,  # '\x8c'
+    141: 207,  # '\x8d'
+    142: 208,  # '\x8e'
+    143: 209,  # '\x8f'
+    144: 210,  # '\x90'
+    145: 211,  # '\x91'
+    146: 212,  # '\x92'
+    147: 213,  # '\x93'
+    148: 214,  # '\x94'
+    149: 215,  # '\x95'
+    150: 216,  # '\x96'
+    151: 217,  # '\x97'
+    152: 218,  # '\x98'
+    153: 219,  # '\x99'
+    154: 220,  # '\x9a'
+    155: 221,  # '\x9b'
+    156: 222,  # '\x9c'
+    157: 223,  # '\x9d'
+    158: 224,  # '\x9e'
+    159: 225,  # '\x9f'
+    160: 81,  # '\xa0'
+    161: 226,  # 'Ё'
+    162: 227,  # 'Ђ'
+    163: 228,  # 'Ѓ'
+    164: 229,  # 'Є'
+    165: 230,  # 'Ѕ'
+    166: 105,  # 'І'
+    167: 231,  # 'Ї'
+    168: 232,  # 'Ј'
+    169: 233,  # 'Љ'
+    170: 234,  # 'Њ'
+    171: 235,  # 'Ћ'
+    172: 236,  # 'Ќ'
+    173: 45,  # '\xad'
+    174: 237,  # 'Ў'
+    175: 238,  # 'Џ'
+    176: 31,  # 'А'
+    177: 32,  # 'Б'
+    178: 35,  # 'В'
+    179: 43,  # 'Г'
+    180: 37,  # 'Д'
+    181: 44,  # 'Е'
+    182: 55,  # 'Ж'
+    183: 47,  # 'З'
+    184: 40,  # 'И'
+    185: 59,  # 'Й'
+    186: 33,  # 'К'
+    187: 46,  # 'Л'
+    188: 38,  # 'М'
+    189: 36,  # 'Н'
+    190: 41,  # 'О'
+    191: 30,  # 'П'
+    192: 39,  # 'Р'
+    193: 28,  # 'С'
+    194: 34,  # 'Т'
+    195: 51,  # 'У'
+    196: 48,  # 'Ф'
+    197: 49,  # 'Х'
+    198: 53,  # 'Ц'
+    199: 50,  # 'Ч'
+    200: 54,  # 'Ш'
+    201: 57,  # 'Щ'
+    202: 61,  # 'Ъ'
+    203: 239,  # 'Ы'
+    204: 67,  # 'Ь'
+    205: 240,  # 'Э'
+    206: 60,  # 'Ю'
+    207: 56,  # 'Я'
+    208: 1,  # 'а'
+    209: 18,  # 'б'
+    210: 9,  # 'в'
+    211: 20,  # 'г'
+    212: 11,  # 'д'
+    213: 3,  # 'е'
+    214: 23,  # 'ж'
+    215: 15,  # 'з'
+    216: 2,  # 'и'
+    217: 26,  # 'й'
+    218: 12,  # 'к'
+    219: 10,  # 'л'
+    220: 14,  # 'м'
+    221: 6,  # 'н'
+    222: 4,  # 'о'
+    223: 13,  # 'п'
+    224: 7,  # 'р'
+    225: 8,  # 'с'
+    226: 5,  # 'т'
+    227: 19,  # 'у'
+    228: 29,  # 'ф'
+    229: 25,  # 'х'
+    230: 22,  # 'ц'
+    231: 21,  # 'ч'
+    232: 27,  # 'ш'
+    233: 24,  # 'щ'
+    234: 17,  # 'ъ'
+    235: 75,  # 'ы'
+    236: 52,  # 'ь'
+    237: 241,  # 'э'
+    238: 42,  # 'ю'
+    239: 16,  # 'я'
+    240: 62,  # '№'
+    241: 242,  # 'ё'
+    242: 243,  # 'ђ'
+    243: 244,  # 'ѓ'
+    244: 58,  # 'є'
+    245: 245,  # 'ѕ'
+    246: 98,  # 'і'
+    247: 246,  # 'ї'
+    248: 247,  # 'ј'
+    249: 248,  # 'љ'
+    250: 249,  # 'њ'
+    251: 250,  # 'ћ'
+    252: 251,  # 'ќ'
+    253: 91,  # '§'
+    254: 252,  # 'ў'
+    255: 253,  # 'џ'
+}
+
+ISO_8859_5_BULGARIAN_MODEL = SingleByteCharSetModel(
+    charset_name="ISO-8859-5",
+    language="Bulgarian",
+    char_to_order_map=ISO_8859_5_BULGARIAN_CHAR_TO_ORDER,
+    language_model=BULGARIAN_LANG_MODEL,
+    typical_positive_ratio=0.969392,
+    keep_ascii_letters=False,
+    alphabet="АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЬЮЯабвгдежзийклмнопрстуфхцчшщъьюя",
+)
+
+WINDOWS_1251_BULGARIAN_CHAR_TO_ORDER = {
+    0: 255,  # '\x00'
+    1: 255,  # '\x01'
+    2: 255,  # '\x02'
+    3: 255,  # '\x03'
+    4: 255,  # '\x04'
+    5: 255,  # '\x05'
+    6: 255,  # '\x06'
+    7: 255,  # '\x07'
+    8: 255,  # '\x08'
+    9: 255,  # '\t'
+    10: 254,  # '\n'
+    11: 255,  # '\x0b'
+    12: 255,  # '\x0c'
+    13: 254,  # '\r'
+    14: 255,  # '\x0e'
+    15: 255,  # '\x0f'
+    16: 255,  # '\x10'
+    17: 255,  # '\x11'
+    18: 255,  # '\x12'
+    19: 255,  # '\x13'
+    20: 255,  # '\x14'
+    21: 255,  # '\x15'
+    22: 255,  # '\x16'
+    23: 255,  # '\x17'
+    24: 255,  # '\x18'
+    25: 255,  # '\x19'
+    26: 255,  # '\x1a'
+    27: 255,  # '\x1b'
+    28: 255,  # '\x1c'
+    29: 255,  # '\x1d'
+    30: 255,  # '\x1e'
+    31: 255,  # '\x1f'
+    32: 253,  # ' '
+    33: 253,  # '!'
+    34: 253,  # '"'
+    35: 253,  # '#'
+    36: 253,  # '$'
+    37: 253,  # '%'
+    38: 253,  # '&'
+    39: 253,  # "'"
+    40: 253,  # '('
+    41: 253,  # ')'
+    42: 253,  # '*'
+    43: 253,  # '+'
+    44: 253,  # ','
+    45: 253,  # '-'
+    46: 253,  # '.'
+    47: 253,  # '/'
+    48: 252,  # '0'
+    49: 252,  # '1'
+    50: 252,  # '2'
+    51: 252,  # '3'
+    52: 252,  # '4'
+    53: 252,  # '5'
+    54: 252,  # '6'
+    55: 252,  # '7'
+    56: 252,  # '8'
+    57: 252,  # '9'
+    58: 253,  # ':'
+    59: 253,  # ';'
+    60: 253,  # '<'
+    61: 253,  # '='
+    62: 253,  # '>'
+    63: 253,  # '?'
+    64: 253,  # '@'
+    65: 77,  # 'A'
+    66: 90,  # 'B'
+    67: 99,  # 'C'
+    68: 100,  # 'D'
+    69: 72,  # 'E'
+    70: 109,  # 'F'
+    71: 107,  # 'G'
+    72: 101,  # 'H'
+    73: 79,  # 'I'
+    74: 185,  # 'J'
+    75: 81,  # 'K'
+    76: 102,  # 'L'
+    77: 76,  # 'M'
+    78: 94,  # 'N'
+    79: 82,  # 'O'
+    80: 110,  # 'P'
+    81: 186,  # 'Q'
+    82: 108,  # 'R'
+    83: 91,  # 'S'
+    84: 74,  # 'T'
+    85: 119,  # 'U'
+    86: 84,  # 'V'
+    87: 96,  # 'W'
+    88: 111,  # 'X'
+    89: 187,  # 'Y'
+    90: 115,  # 'Z'
+    91: 253,  # '['
+    92: 253,  # '\\'
+    93: 253,  # ']'
+    94: 253,  # '^'
+    95: 253,  # '_'
+    96: 253,  # '`'
+    97: 65,  # 'a'
+    98: 69,  # 'b'
+    99: 70,  # 'c'
+    100: 66,  # 'd'
+    101: 63,  # 'e'
+    102: 68,  # 'f'
+    103: 112,  # 'g'
+    104: 103,  # 'h'
+    105: 92,  # 'i'
+    106: 194,  # 'j'
+    107: 104,  # 'k'
+    108: 95,  # 'l'
+    109: 86,  # 'm'
+    110: 87,  # 'n'
+    111: 71,  # 'o'
+    112: 116,  # 'p'
+    113: 195,  # 'q'
+    114: 85,  # 'r'
+    115: 93,  # 's'
+    116: 97,  # 't'
+    117: 113,  # 'u'
+    118: 196,  # 'v'
+    119: 197,  # 'w'
+    120: 198,  # 'x'
+    121: 199,  # 'y'
+    122: 200,  # 'z'
+    123: 253,  # '{'
+    124: 253,  # '|'
+    125: 253,  # '}'
+    126: 253,  # '~'
+    127: 253,  # '\x7f'
+    128: 206,  # 'Ђ'
+    129: 207,  # 'Ѓ'
+    130: 208,  # '‚'
+    131: 209,  # 'ѓ'
+    132: 210,  # '„'
+    133: 211,  # '…'
+    134: 212,  # '†'
+    135: 213,  # '‡'
+    136: 120,  # '€'
+    137: 214,  # '‰'
+    138: 215,  # 'Љ'
+    139: 216,  # '‹'
+    140: 217,  # 'Њ'
+    141: 218,  # 'Ќ'
+    142: 219,  # 'Ћ'
+    143: 220,  # 'Џ'
+    144: 221,  # 'ђ'
+    145: 78,  # '‘'
+    146: 64,  # '’'
+    147: 83,  # '“'
+    148: 121,  # '”'
+    149: 98,  # '•'
+    150: 117,  # '–'
+    151: 105,  # '—'
+    152: 222,  # None
+    153: 223,  # '™'
+    154: 224,  # 'љ'
+    155: 225,  # '›'
+    156: 226,  # 'њ'
+    157: 227,  # 'ќ'
+    158: 228,  # 'ћ'
+    159: 229,  # 'џ'
+    160: 88,  # '\xa0'
+    161: 230,  # 'Ў'
+    162: 231,  # 'ў'
+    163: 232,  # 'Ј'
+    164: 233,  # '¤'
+    165: 122,  # 'Ґ'
+    166: 89,  # '¦'
+    167: 106,  # '§'
+    168: 234,  # 'Ё'
+    169: 235,  # '©'
+    170: 236,  # 'Є'
+    171: 237,  # '«'
+    172: 238,  # '¬'
+    173: 45,  # '\xad'
+    174: 239,  # '®'
+    175: 240,  # 'Ї'
+    176: 73,  # '°'
+    177: 80,  # '±'
+    178: 118,  # 'І'
+    179: 114,  # 'і'
+    180: 241,  # 'ґ'
+    181: 242,  # 'µ'
+    182: 243,  # '¶'
+    183: 244,  # '·'
+    184: 245,  # 'ё'
+    185: 62,  # '№'
+    186: 58,  # 'є'
+    187: 246,  # '»'
+    188: 247,  # 'ј'
+    189: 248,  # 'Ѕ'
+    190: 249,  # 'ѕ'
+    191: 250,  # 'ї'
+    192: 31,  # 'А'
+    193: 32,  # 'Б'
+    194: 35,  # 'В'
+    195: 43,  # 'Г'
+    196: 37,  # 'Д'
+    197: 44,  # 'Е'
+    198: 55,  # 'Ж'
+    199: 47,  # 'З'
+    200: 40,  # 'И'
+    201: 59,  # 'Й'
+    202: 33,  # 'К'
+    203: 46,  # 'Л'
+    204: 38,  # 'М'
+    205: 36,  # 'Н'
+    206: 41,  # 'О'
+    207: 30,  # 'П'
+    208: 39,  # 'Р'
+    209: 28,  # 'С'
+    210: 34,  # 'Т'
+    211: 51,  # 'У'
+    212: 48,  # 'Ф'
+    213: 49,  # 'Х'
+    214: 53,  # 'Ц'
+    215: 50,  # 'Ч'
+    216: 54,  # 'Ш'
+    217: 57,  # 'Щ'
+    218: 61,  # 'Ъ'
+    219: 251,  # 'Ы'
+    220: 67,  # 'Ь'
+    221: 252,  # 'Э'
+    222: 60,  # 'Ю'
+    223: 56,  # 'Я'
+    224: 1,  # 'а'
+    225: 18,  # 'б'
+    226: 9,  # 'в'
+    227: 20,  # 'г'
+    228: 11,  # 'д'
+    229: 3,  # 'е'
+    230: 23,  # 'ж'
+    231: 15,  # 'з'
+    232: 2,  # 'и'
+    233: 26,  # 'й'
+    234: 12,  # 'к'
+    235: 10,  # 'л'
+    236: 14,  # 'м'
+    237: 6,  # 'н'
+    238: 4,  # 'о'
+    239: 13,  # 'п'
+    240: 7,  # 'р'
+    241: 8,  # 'с'
+    242: 5,  # 'т'
+    243: 19,  # 'у'
+    244: 29,  # 'ф'
+    245: 25,  # 'х'
+    246: 22,  # 'ц'
+    247: 21,  # 'ч'
+    248: 27,  # 'ш'
+    249: 24,  # 'щ'
+    250: 17,  # 'ъ'
+    251: 75,  # 'ы'
+    252: 52,  # 'ь'
+    253: 253,  # 'э'
+    254: 42,  # 'ю'
+    255: 16,  # 'я'
+}
+
+WINDOWS_1251_BULGARIAN_MODEL = SingleByteCharSetModel(
+    charset_name="windows-1251",
+    language="Bulgarian",
+    char_to_order_map=WINDOWS_1251_BULGARIAN_CHAR_TO_ORDER,
+    language_model=BULGARIAN_LANG_MODEL,
+    typical_positive_ratio=0.969392,
+    keep_ascii_letters=False,
+    alphabet="АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЬЮЯабвгдежзийклмнопрстуфхцчшщъьюя",
+)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/langgreekmodel.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/langgreekmodel.py
new file mode 100644
index 0000000..cfb8639
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/langgreekmodel.py
@@ -0,0 +1,4397 @@
+from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel
+
+# 3: Positive
+# 2: Likely
+# 1: Unlikely
+# 0: Negative
+
+GREEK_LANG_MODEL = {
+    60: {  # 'e'
+        60: 2,  # 'e'
+        55: 1,  # 'o'
+        58: 2,  # 't'
+        36: 1,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 1,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 0,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 0,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 0,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 0,  # 'ο'
+        9: 0,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 0,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    55: {  # 'o'
+        60: 0,  # 'e'
+        55: 2,  # 'o'
+        58: 2,  # 't'
+        36: 1,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 0,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 0,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 0,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 0,  # 'μ'
+        6: 1,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 0,  # 'ο'
+        9: 0,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 1,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    58: {  # 't'
+        60: 2,  # 'e'
+        55: 1,  # 'o'
+        58: 1,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 2,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 0,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 0,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 0,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 1,  # 'ο'
+        9: 0,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 0,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    36: {  # '·'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 0,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 0,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 0,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 0,  # 'ο'
+        9: 0,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 0,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    61: {  # 'Ά'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 0,  # 'α'
+        29: 0,  # 'β'
+        20: 1,  # 'γ'
+        21: 2,  # 'δ'
+        3: 0,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 0,  # 'ι'
+        11: 0,  # 'κ'
+        16: 2,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 0,  # 'ο'
+        9: 1,  # 'π'
+        8: 2,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 0,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    46: {  # 'Έ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 0,  # 'α'
+        29: 2,  # 'β'
+        20: 2,  # 'γ'
+        21: 0,  # 'δ'
+        3: 0,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 0,  # 'ι'
+        11: 2,  # 'κ'
+        16: 2,  # 'λ'
+        10: 0,  # 'μ'
+        6: 3,  # 'ν'
+        30: 2,  # 'ξ'
+        4: 0,  # 'ο'
+        9: 2,  # 'π'
+        8: 2,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 1,  # 'σ'
+        2: 2,  # 'τ'
+        12: 0,  # 'υ'
+        28: 2,  # 'φ'
+        23: 3,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    54: {  # 'Ό'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 0,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 0,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 0,  # 'ι'
+        11: 0,  # 'κ'
+        16: 2,  # 'λ'
+        10: 2,  # 'μ'
+        6: 2,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 0,  # 'ο'
+        9: 2,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 2,  # 'σ'
+        2: 3,  # 'τ'
+        12: 0,  # 'υ'
+        28: 0,  # 'φ'
+        23: 2,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    31: {  # 'Α'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 2,  # 'Β'
+        43: 2,  # 'Γ'
+        41: 1,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 2,  # 'Θ'
+        47: 2,  # 'Ι'
+        44: 2,  # 'Κ'
+        53: 2,  # 'Λ'
+        38: 2,  # 'Μ'
+        49: 2,  # 'Ν'
+        59: 1,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 2,  # 'Π'
+        48: 2,  # 'Ρ'
+        37: 2,  # 'Σ'
+        33: 2,  # 'Τ'
+        45: 2,  # 'Υ'
+        56: 2,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 0,  # 'α'
+        29: 0,  # 'β'
+        20: 2,  # 'γ'
+        21: 0,  # 'δ'
+        3: 0,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 1,  # 'θ'
+        5: 0,  # 'ι'
+        11: 2,  # 'κ'
+        16: 3,  # 'λ'
+        10: 2,  # 'μ'
+        6: 3,  # 'ν'
+        30: 2,  # 'ξ'
+        4: 0,  # 'ο'
+        9: 3,  # 'π'
+        8: 3,  # 'ρ'
+        14: 2,  # 'ς'
+        7: 2,  # 'σ'
+        2: 0,  # 'τ'
+        12: 3,  # 'υ'
+        28: 2,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 2,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    51: {  # 'Β'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 2,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 1,  # 'Ε'
+        40: 1,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 1,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 1,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 2,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 2,  # 'ά'
+        18: 2,  # 'έ'
+        22: 2,  # 'ή'
+        15: 0,  # 'ί'
+        1: 2,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 2,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 2,  # 'ι'
+        11: 0,  # 'κ'
+        16: 2,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 2,  # 'ο'
+        9: 0,  # 'π'
+        8: 2,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 0,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    43: {  # 'Γ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 1,  # 'Α'
+        51: 0,  # 'Β'
+        43: 2,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 2,  # 'Ε'
+        40: 1,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 2,  # 'Ι'
+        44: 1,  # 'Κ'
+        53: 1,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 1,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 2,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 2,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 1,  # 'Χ'
+        57: 2,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 2,  # 'ί'
+        1: 2,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 2,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 3,  # 'ι'
+        11: 0,  # 'κ'
+        16: 2,  # 'λ'
+        10: 0,  # 'μ'
+        6: 2,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 0,  # 'ο'
+        9: 0,  # 'π'
+        8: 2,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 0,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    41: {  # 'Δ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 2,  # 'Ε'
+        40: 2,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 2,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 2,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 2,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 2,  # 'ή'
+        15: 2,  # 'ί'
+        1: 0,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 3,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 2,  # 'η'
+        25: 0,  # 'θ'
+        5: 3,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 2,  # 'ο'
+        9: 0,  # 'π'
+        8: 2,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 2,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 2,  # 'ω'
+        19: 1,  # 'ό'
+        26: 2,  # 'ύ'
+        27: 2,  # 'ώ'
+    },
+    34: {  # 'Ε'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 2,  # 'Α'
+        51: 0,  # 'Β'
+        43: 2,  # 'Γ'
+        41: 2,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 2,  # 'Ι'
+        44: 2,  # 'Κ'
+        53: 2,  # 'Λ'
+        38: 2,  # 'Μ'
+        49: 2,  # 'Ν'
+        59: 1,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 2,  # 'Π'
+        48: 2,  # 'Ρ'
+        37: 2,  # 'Σ'
+        33: 2,  # 'Τ'
+        45: 2,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 2,  # 'Χ'
+        57: 2,  # 'Ω'
+        17: 3,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 3,  # 'ί'
+        1: 0,  # 'α'
+        29: 0,  # 'β'
+        20: 3,  # 'γ'
+        21: 2,  # 'δ'
+        3: 1,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 1,  # 'θ'
+        5: 2,  # 'ι'
+        11: 3,  # 'κ'
+        16: 3,  # 'λ'
+        10: 2,  # 'μ'
+        6: 3,  # 'ν'
+        30: 2,  # 'ξ'
+        4: 0,  # 'ο'
+        9: 3,  # 'π'
+        8: 2,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 2,  # 'σ'
+        2: 2,  # 'τ'
+        12: 2,  # 'υ'
+        28: 2,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 1,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    40: {  # 'Η'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 1,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 2,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 2,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 2,  # 'Μ'
+        49: 2,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 2,  # 'Π'
+        48: 2,  # 'Ρ'
+        37: 2,  # 'Σ'
+        33: 2,  # 'Τ'
+        45: 1,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 0,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 0,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 0,  # 'ι'
+        11: 0,  # 'κ'
+        16: 2,  # 'λ'
+        10: 0,  # 'μ'
+        6: 1,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 0,  # 'ο'
+        9: 0,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 0,  # 'υ'
+        28: 0,  # 'φ'
+        23: 1,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    52: {  # 'Θ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 2,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 2,  # 'Ε'
+        40: 2,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 2,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 1,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 1,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 2,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 3,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 2,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 0,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 0,  # 'ο'
+        9: 0,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 2,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 2,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    47: {  # 'Ι'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 2,  # 'Α'
+        51: 1,  # 'Β'
+        43: 1,  # 'Γ'
+        41: 2,  # 'Δ'
+        34: 2,  # 'Ε'
+        40: 2,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 2,  # 'Κ'
+        53: 2,  # 'Λ'
+        38: 2,  # 'Μ'
+        49: 2,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 2,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 2,  # 'Ρ'
+        37: 2,  # 'Σ'
+        33: 2,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 2,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 2,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 2,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 2,  # 'δ'
+        3: 0,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 0,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 0,  # 'μ'
+        6: 1,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 2,  # 'ο'
+        9: 0,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 2,  # 'σ'
+        2: 1,  # 'τ'
+        12: 0,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 1,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    44: {  # 'Κ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 2,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 1,  # 'Δ'
+        34: 2,  # 'Ε'
+        40: 2,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 1,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 2,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 2,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 1,  # 'Τ'
+        45: 2,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 1,  # 'Ω'
+        17: 3,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 3,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 2,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 2,  # 'ι'
+        11: 0,  # 'κ'
+        16: 2,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 2,  # 'ο'
+        9: 0,  # 'π'
+        8: 2,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 2,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 2,  # 'ό'
+        26: 2,  # 'ύ'
+        27: 2,  # 'ώ'
+    },
+    53: {  # 'Λ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 2,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 2,  # 'Ε'
+        40: 2,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 2,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 2,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 2,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 2,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 2,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 2,  # 'Ω'
+        17: 2,  # 'ά'
+        18: 2,  # 'έ'
+        22: 0,  # 'ή'
+        15: 2,  # 'ί'
+        1: 2,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 2,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 1,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 2,  # 'ο'
+        9: 0,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 2,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 2,  # 'ό'
+        26: 2,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    38: {  # 'Μ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 2,  # 'Α'
+        51: 2,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 2,  # 'Ε'
+        40: 2,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 2,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 2,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 2,  # 'Ο'
+        35: 2,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 2,  # 'ά'
+        18: 2,  # 'έ'
+        22: 2,  # 'ή'
+        15: 2,  # 'ί'
+        1: 2,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 3,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 2,  # 'η'
+        25: 0,  # 'θ'
+        5: 3,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 2,  # 'ο'
+        9: 3,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 2,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 2,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    49: {  # 'Ν'
+        60: 2,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 2,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 2,  # 'Ε'
+        40: 2,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 2,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 2,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 2,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 2,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 2,  # 'έ'
+        22: 0,  # 'ή'
+        15: 2,  # 'ί'
+        1: 2,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 1,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 0,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 2,  # 'ο'
+        9: 0,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 0,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 1,  # 'ω'
+        19: 2,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    59: {  # 'Ξ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 1,  # 'Ε'
+        40: 1,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 1,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 2,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 2,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 2,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 0,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 0,  # 'ο'
+        9: 0,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 0,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    39: {  # 'Ο'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 1,  # 'Β'
+        43: 2,  # 'Γ'
+        41: 2,  # 'Δ'
+        34: 2,  # 'Ε'
+        40: 1,  # 'Η'
+        52: 2,  # 'Θ'
+        47: 2,  # 'Ι'
+        44: 2,  # 'Κ'
+        53: 2,  # 'Λ'
+        38: 2,  # 'Μ'
+        49: 2,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 2,  # 'Π'
+        48: 2,  # 'Ρ'
+        37: 2,  # 'Σ'
+        33: 2,  # 'Τ'
+        45: 2,  # 'Υ'
+        56: 2,  # 'Φ'
+        50: 2,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 0,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 2,  # 'δ'
+        3: 0,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 3,  # 'ι'
+        11: 2,  # 'κ'
+        16: 2,  # 'λ'
+        10: 2,  # 'μ'
+        6: 2,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 0,  # 'ο'
+        9: 2,  # 'π'
+        8: 2,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 2,  # 'τ'
+        12: 2,  # 'υ'
+        28: 1,  # 'φ'
+        23: 1,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 2,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    35: {  # 'Π'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 2,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 2,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 2,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 2,  # 'Λ'
+        38: 1,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 2,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 2,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 1,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 1,  # 'Χ'
+        57: 2,  # 'Ω'
+        17: 2,  # 'ά'
+        18: 1,  # 'έ'
+        22: 1,  # 'ή'
+        15: 2,  # 'ί'
+        1: 3,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 3,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 2,  # 'η'
+        25: 0,  # 'θ'
+        5: 2,  # 'ι'
+        11: 0,  # 'κ'
+        16: 2,  # 'λ'
+        10: 0,  # 'μ'
+        6: 2,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 0,  # 'π'
+        8: 3,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 2,  # 'υ'
+        28: 0,  # 'φ'
+        23: 2,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 2,  # 'ω'
+        19: 2,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 3,  # 'ώ'
+    },
+    48: {  # 'Ρ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 2,  # 'Α'
+        51: 0,  # 'Β'
+        43: 1,  # 'Γ'
+        41: 1,  # 'Δ'
+        34: 2,  # 'Ε'
+        40: 2,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 2,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 2,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 2,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 2,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 1,  # 'Τ'
+        45: 1,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 1,  # 'Χ'
+        57: 1,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 2,  # 'ί'
+        1: 0,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 0,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 0,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 1,  # 'ο'
+        9: 0,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 3,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 2,  # 'ω'
+        19: 0,  # 'ό'
+        26: 2,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    37: {  # 'Σ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 2,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 1,  # 'Δ'
+        34: 2,  # 'Ε'
+        40: 2,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 2,  # 'Ι'
+        44: 2,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 2,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 2,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 2,  # 'Σ'
+        33: 2,  # 'Τ'
+        45: 2,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 2,  # 'Χ'
+        57: 2,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 2,  # 'ή'
+        15: 2,  # 'ί'
+        1: 2,  # 'α'
+        29: 2,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 3,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 3,  # 'η'
+        25: 0,  # 'θ'
+        5: 2,  # 'ι'
+        11: 2,  # 'κ'
+        16: 0,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 2,  # 'ο'
+        9: 2,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 3,  # 'τ'
+        12: 3,  # 'υ'
+        28: 0,  # 'φ'
+        23: 2,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 2,  # 'ω'
+        19: 0,  # 'ό'
+        26: 2,  # 'ύ'
+        27: 2,  # 'ώ'
+    },
+    33: {  # 'Τ'
+        60: 0,  # 'e'
+        55: 1,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 2,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 2,  # 'Ε'
+        40: 2,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 2,  # 'Ι'
+        44: 2,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 2,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 2,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 1,  # 'Τ'
+        45: 1,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 2,  # 'Ω'
+        17: 2,  # 'ά'
+        18: 2,  # 'έ'
+        22: 0,  # 'ή'
+        15: 2,  # 'ί'
+        1: 3,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 2,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 2,  # 'η'
+        25: 0,  # 'θ'
+        5: 2,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 2,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 0,  # 'π'
+        8: 2,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 2,  # 'σ'
+        2: 0,  # 'τ'
+        12: 2,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 2,  # 'ό'
+        26: 2,  # 'ύ'
+        27: 3,  # 'ώ'
+    },
+    45: {  # 'Υ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 2,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 1,  # 'Ε'
+        40: 2,  # 'Η'
+        52: 2,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 1,  # 'Λ'
+        38: 2,  # 'Μ'
+        49: 2,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 2,  # 'Π'
+        48: 1,  # 'Ρ'
+        37: 2,  # 'Σ'
+        33: 2,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 1,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 0,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 0,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 0,  # 'ι'
+        11: 0,  # 'κ'
+        16: 2,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 0,  # 'ο'
+        9: 3,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 0,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    56: {  # 'Φ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 1,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 1,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 2,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 2,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 2,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 2,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 2,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 2,  # 'ο'
+        9: 0,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 2,  # 'τ'
+        12: 2,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 1,  # 'ύ'
+        27: 1,  # 'ώ'
+    },
+    50: {  # 'Χ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 1,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 2,  # 'Ε'
+        40: 2,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 2,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 1,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 1,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 2,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 1,  # 'Χ'
+        57: 1,  # 'Ω'
+        17: 2,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 2,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 2,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 0,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 2,  # 'ο'
+        9: 0,  # 'π'
+        8: 3,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 2,  # 'τ'
+        12: 0,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 2,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    57: {  # 'Ω'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 1,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 1,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 2,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 2,  # 'Ρ'
+        37: 2,  # 'Σ'
+        33: 2,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 0,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 0,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 0,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 0,  # 'ο'
+        9: 0,  # 'π'
+        8: 2,  # 'ρ'
+        14: 2,  # 'ς'
+        7: 2,  # 'σ'
+        2: 0,  # 'τ'
+        12: 0,  # 'υ'
+        28: 0,  # 'φ'
+        23: 1,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    17: {  # 'ά'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 2,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 0,  # 'α'
+        29: 3,  # 'β'
+        20: 3,  # 'γ'
+        21: 3,  # 'δ'
+        3: 3,  # 'ε'
+        32: 3,  # 'ζ'
+        13: 0,  # 'η'
+        25: 3,  # 'θ'
+        5: 2,  # 'ι'
+        11: 3,  # 'κ'
+        16: 3,  # 'λ'
+        10: 3,  # 'μ'
+        6: 3,  # 'ν'
+        30: 3,  # 'ξ'
+        4: 0,  # 'ο'
+        9: 3,  # 'π'
+        8: 3,  # 'ρ'
+        14: 3,  # 'ς'
+        7: 3,  # 'σ'
+        2: 3,  # 'τ'
+        12: 0,  # 'υ'
+        28: 3,  # 'φ'
+        23: 3,  # 'χ'
+        42: 3,  # 'ψ'
+        24: 2,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    18: {  # 'έ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 3,  # 'α'
+        29: 2,  # 'β'
+        20: 3,  # 'γ'
+        21: 2,  # 'δ'
+        3: 3,  # 'ε'
+        32: 2,  # 'ζ'
+        13: 0,  # 'η'
+        25: 3,  # 'θ'
+        5: 0,  # 'ι'
+        11: 3,  # 'κ'
+        16: 3,  # 'λ'
+        10: 3,  # 'μ'
+        6: 3,  # 'ν'
+        30: 3,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 3,  # 'π'
+        8: 3,  # 'ρ'
+        14: 3,  # 'ς'
+        7: 3,  # 'σ'
+        2: 3,  # 'τ'
+        12: 0,  # 'υ'
+        28: 3,  # 'φ'
+        23: 3,  # 'χ'
+        42: 3,  # 'ψ'
+        24: 2,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    22: {  # 'ή'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 1,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 0,  # 'α'
+        29: 0,  # 'β'
+        20: 3,  # 'γ'
+        21: 3,  # 'δ'
+        3: 0,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 3,  # 'θ'
+        5: 0,  # 'ι'
+        11: 3,  # 'κ'
+        16: 2,  # 'λ'
+        10: 3,  # 'μ'
+        6: 3,  # 'ν'
+        30: 2,  # 'ξ'
+        4: 0,  # 'ο'
+        9: 3,  # 'π'
+        8: 3,  # 'ρ'
+        14: 3,  # 'ς'
+        7: 3,  # 'σ'
+        2: 3,  # 'τ'
+        12: 0,  # 'υ'
+        28: 2,  # 'φ'
+        23: 3,  # 'χ'
+        42: 2,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    15: {  # 'ί'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 3,  # 'α'
+        29: 2,  # 'β'
+        20: 3,  # 'γ'
+        21: 3,  # 'δ'
+        3: 3,  # 'ε'
+        32: 3,  # 'ζ'
+        13: 3,  # 'η'
+        25: 3,  # 'θ'
+        5: 0,  # 'ι'
+        11: 3,  # 'κ'
+        16: 3,  # 'λ'
+        10: 3,  # 'μ'
+        6: 3,  # 'ν'
+        30: 3,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 3,  # 'π'
+        8: 3,  # 'ρ'
+        14: 3,  # 'ς'
+        7: 3,  # 'σ'
+        2: 3,  # 'τ'
+        12: 0,  # 'υ'
+        28: 1,  # 'φ'
+        23: 3,  # 'χ'
+        42: 2,  # 'ψ'
+        24: 3,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    1: {  # 'α'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 2,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 2,  # 'έ'
+        22: 0,  # 'ή'
+        15: 3,  # 'ί'
+        1: 0,  # 'α'
+        29: 3,  # 'β'
+        20: 3,  # 'γ'
+        21: 3,  # 'δ'
+        3: 2,  # 'ε'
+        32: 3,  # 'ζ'
+        13: 1,  # 'η'
+        25: 3,  # 'θ'
+        5: 3,  # 'ι'
+        11: 3,  # 'κ'
+        16: 3,  # 'λ'
+        10: 3,  # 'μ'
+        6: 3,  # 'ν'
+        30: 3,  # 'ξ'
+        4: 2,  # 'ο'
+        9: 3,  # 'π'
+        8: 3,  # 'ρ'
+        14: 3,  # 'ς'
+        7: 3,  # 'σ'
+        2: 3,  # 'τ'
+        12: 3,  # 'υ'
+        28: 3,  # 'φ'
+        23: 3,  # 'χ'
+        42: 2,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 2,  # 'ό'
+        26: 2,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    29: {  # 'β'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 3,  # 'ά'
+        18: 2,  # 'έ'
+        22: 3,  # 'ή'
+        15: 2,  # 'ί'
+        1: 3,  # 'α'
+        29: 0,  # 'β'
+        20: 2,  # 'γ'
+        21: 2,  # 'δ'
+        3: 3,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 2,  # 'η'
+        25: 0,  # 'θ'
+        5: 3,  # 'ι'
+        11: 0,  # 'κ'
+        16: 3,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 0,  # 'π'
+        8: 3,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 0,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 2,  # 'ω'
+        19: 2,  # 'ό'
+        26: 2,  # 'ύ'
+        27: 2,  # 'ώ'
+    },
+    20: {  # 'γ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 3,  # 'ά'
+        18: 3,  # 'έ'
+        22: 3,  # 'ή'
+        15: 3,  # 'ί'
+        1: 3,  # 'α'
+        29: 0,  # 'β'
+        20: 3,  # 'γ'
+        21: 0,  # 'δ'
+        3: 3,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 3,  # 'η'
+        25: 0,  # 'θ'
+        5: 3,  # 'ι'
+        11: 3,  # 'κ'
+        16: 3,  # 'λ'
+        10: 3,  # 'μ'
+        6: 3,  # 'ν'
+        30: 3,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 0,  # 'π'
+        8: 3,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 2,  # 'υ'
+        28: 0,  # 'φ'
+        23: 3,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 3,  # 'ω'
+        19: 3,  # 'ό'
+        26: 2,  # 'ύ'
+        27: 3,  # 'ώ'
+    },
+    21: {  # 'δ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 2,  # 'ά'
+        18: 3,  # 'έ'
+        22: 3,  # 'ή'
+        15: 3,  # 'ί'
+        1: 3,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 3,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 3,  # 'η'
+        25: 0,  # 'θ'
+        5: 3,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 0,  # 'π'
+        8: 3,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 3,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 3,  # 'ω'
+        19: 3,  # 'ό'
+        26: 3,  # 'ύ'
+        27: 3,  # 'ώ'
+    },
+    3: {  # 'ε'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 2,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 3,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 3,  # 'ί'
+        1: 2,  # 'α'
+        29: 3,  # 'β'
+        20: 3,  # 'γ'
+        21: 3,  # 'δ'
+        3: 2,  # 'ε'
+        32: 2,  # 'ζ'
+        13: 0,  # 'η'
+        25: 3,  # 'θ'
+        5: 3,  # 'ι'
+        11: 3,  # 'κ'
+        16: 3,  # 'λ'
+        10: 3,  # 'μ'
+        6: 3,  # 'ν'
+        30: 3,  # 'ξ'
+        4: 2,  # 'ο'
+        9: 3,  # 'π'
+        8: 3,  # 'ρ'
+        14: 3,  # 'ς'
+        7: 3,  # 'σ'
+        2: 3,  # 'τ'
+        12: 3,  # 'υ'
+        28: 3,  # 'φ'
+        23: 3,  # 'χ'
+        42: 2,  # 'ψ'
+        24: 3,  # 'ω'
+        19: 2,  # 'ό'
+        26: 3,  # 'ύ'
+        27: 2,  # 'ώ'
+    },
+    32: {  # 'ζ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 2,  # 'ά'
+        18: 2,  # 'έ'
+        22: 2,  # 'ή'
+        15: 2,  # 'ί'
+        1: 2,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 3,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 3,  # 'η'
+        25: 0,  # 'θ'
+        5: 2,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 0,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 1,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 3,  # 'ω'
+        19: 2,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 2,  # 'ώ'
+    },
+    13: {  # 'η'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 2,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 0,  # 'α'
+        29: 0,  # 'β'
+        20: 3,  # 'γ'
+        21: 2,  # 'δ'
+        3: 0,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 3,  # 'θ'
+        5: 0,  # 'ι'
+        11: 3,  # 'κ'
+        16: 3,  # 'λ'
+        10: 3,  # 'μ'
+        6: 3,  # 'ν'
+        30: 2,  # 'ξ'
+        4: 0,  # 'ο'
+        9: 2,  # 'π'
+        8: 3,  # 'ρ'
+        14: 3,  # 'ς'
+        7: 3,  # 'σ'
+        2: 3,  # 'τ'
+        12: 0,  # 'υ'
+        28: 2,  # 'φ'
+        23: 3,  # 'χ'
+        42: 2,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    25: {  # 'θ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 2,  # 'ά'
+        18: 3,  # 'έ'
+        22: 3,  # 'ή'
+        15: 2,  # 'ί'
+        1: 3,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 3,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 3,  # 'η'
+        25: 0,  # 'θ'
+        5: 3,  # 'ι'
+        11: 0,  # 'κ'
+        16: 1,  # 'λ'
+        10: 3,  # 'μ'
+        6: 2,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 0,  # 'π'
+        8: 3,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 3,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 3,  # 'ω'
+        19: 3,  # 'ό'
+        26: 3,  # 'ύ'
+        27: 3,  # 'ώ'
+    },
+    5: {  # 'ι'
+        60: 0,  # 'e'
+        55: 1,  # 'o'
+        58: 0,  # 't'
+        36: 2,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 1,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 3,  # 'ά'
+        18: 3,  # 'έ'
+        22: 3,  # 'ή'
+        15: 0,  # 'ί'
+        1: 3,  # 'α'
+        29: 3,  # 'β'
+        20: 3,  # 'γ'
+        21: 3,  # 'δ'
+        3: 3,  # 'ε'
+        32: 2,  # 'ζ'
+        13: 3,  # 'η'
+        25: 3,  # 'θ'
+        5: 0,  # 'ι'
+        11: 3,  # 'κ'
+        16: 3,  # 'λ'
+        10: 3,  # 'μ'
+        6: 3,  # 'ν'
+        30: 3,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 3,  # 'π'
+        8: 3,  # 'ρ'
+        14: 3,  # 'ς'
+        7: 3,  # 'σ'
+        2: 3,  # 'τ'
+        12: 0,  # 'υ'
+        28: 2,  # 'φ'
+        23: 3,  # 'χ'
+        42: 2,  # 'ψ'
+        24: 3,  # 'ω'
+        19: 3,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 3,  # 'ώ'
+    },
+    11: {  # 'κ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 3,  # 'ά'
+        18: 3,  # 'έ'
+        22: 3,  # 'ή'
+        15: 3,  # 'ί'
+        1: 3,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 3,  # 'δ'
+        3: 3,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 3,  # 'η'
+        25: 2,  # 'θ'
+        5: 3,  # 'ι'
+        11: 3,  # 'κ'
+        16: 3,  # 'λ'
+        10: 3,  # 'μ'
+        6: 2,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 2,  # 'π'
+        8: 3,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 3,  # 'τ'
+        12: 3,  # 'υ'
+        28: 2,  # 'φ'
+        23: 2,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 3,  # 'ω'
+        19: 3,  # 'ό'
+        26: 3,  # 'ύ'
+        27: 3,  # 'ώ'
+    },
+    16: {  # 'λ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 3,  # 'ά'
+        18: 3,  # 'έ'
+        22: 3,  # 'ή'
+        15: 3,  # 'ί'
+        1: 3,  # 'α'
+        29: 1,  # 'β'
+        20: 2,  # 'γ'
+        21: 1,  # 'δ'
+        3: 3,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 3,  # 'η'
+        25: 2,  # 'θ'
+        5: 3,  # 'ι'
+        11: 2,  # 'κ'
+        16: 3,  # 'λ'
+        10: 2,  # 'μ'
+        6: 2,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 3,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 3,  # 'τ'
+        12: 3,  # 'υ'
+        28: 2,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 3,  # 'ω'
+        19: 3,  # 'ό'
+        26: 3,  # 'ύ'
+        27: 3,  # 'ώ'
+    },
+    10: {  # 'μ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 1,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 3,  # 'ά'
+        18: 3,  # 'έ'
+        22: 3,  # 'ή'
+        15: 3,  # 'ί'
+        1: 3,  # 'α'
+        29: 3,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 3,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 3,  # 'η'
+        25: 0,  # 'θ'
+        5: 3,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 3,  # 'μ'
+        6: 3,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 3,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 2,  # 'υ'
+        28: 3,  # 'φ'
+        23: 0,  # 'χ'
+        42: 2,  # 'ψ'
+        24: 3,  # 'ω'
+        19: 3,  # 'ό'
+        26: 2,  # 'ύ'
+        27: 2,  # 'ώ'
+    },
+    6: {  # 'ν'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 2,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 3,  # 'ά'
+        18: 3,  # 'έ'
+        22: 3,  # 'ή'
+        15: 3,  # 'ί'
+        1: 3,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 3,  # 'δ'
+        3: 3,  # 'ε'
+        32: 2,  # 'ζ'
+        13: 3,  # 'η'
+        25: 3,  # 'θ'
+        5: 3,  # 'ι'
+        11: 0,  # 'κ'
+        16: 1,  # 'λ'
+        10: 0,  # 'μ'
+        6: 2,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 0,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 3,  # 'σ'
+        2: 3,  # 'τ'
+        12: 3,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 3,  # 'ω'
+        19: 3,  # 'ό'
+        26: 3,  # 'ύ'
+        27: 3,  # 'ώ'
+    },
+    30: {  # 'ξ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 2,  # 'ά'
+        18: 3,  # 'έ'
+        22: 3,  # 'ή'
+        15: 2,  # 'ί'
+        1: 3,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 3,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 3,  # 'η'
+        25: 0,  # 'θ'
+        5: 2,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 0,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 3,  # 'τ'
+        12: 2,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 3,  # 'ω'
+        19: 2,  # 'ό'
+        26: 3,  # 'ύ'
+        27: 1,  # 'ώ'
+    },
+    4: {  # 'ο'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 2,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 2,  # 'έ'
+        22: 3,  # 'ή'
+        15: 3,  # 'ί'
+        1: 2,  # 'α'
+        29: 3,  # 'β'
+        20: 3,  # 'γ'
+        21: 3,  # 'δ'
+        3: 3,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 3,  # 'η'
+        25: 3,  # 'θ'
+        5: 3,  # 'ι'
+        11: 3,  # 'κ'
+        16: 3,  # 'λ'
+        10: 3,  # 'μ'
+        6: 3,  # 'ν'
+        30: 2,  # 'ξ'
+        4: 2,  # 'ο'
+        9: 3,  # 'π'
+        8: 3,  # 'ρ'
+        14: 3,  # 'ς'
+        7: 3,  # 'σ'
+        2: 3,  # 'τ'
+        12: 3,  # 'υ'
+        28: 3,  # 'φ'
+        23: 3,  # 'χ'
+        42: 2,  # 'ψ'
+        24: 2,  # 'ω'
+        19: 1,  # 'ό'
+        26: 3,  # 'ύ'
+        27: 2,  # 'ώ'
+    },
+    9: {  # 'π'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 3,  # 'ά'
+        18: 3,  # 'έ'
+        22: 3,  # 'ή'
+        15: 3,  # 'ί'
+        1: 3,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 3,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 3,  # 'η'
+        25: 0,  # 'θ'
+        5: 3,  # 'ι'
+        11: 0,  # 'κ'
+        16: 3,  # 'λ'
+        10: 0,  # 'μ'
+        6: 2,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 0,  # 'π'
+        8: 3,  # 'ρ'
+        14: 2,  # 'ς'
+        7: 0,  # 'σ'
+        2: 3,  # 'τ'
+        12: 3,  # 'υ'
+        28: 0,  # 'φ'
+        23: 2,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 3,  # 'ω'
+        19: 3,  # 'ό'
+        26: 2,  # 'ύ'
+        27: 3,  # 'ώ'
+    },
+    8: {  # 'ρ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 3,  # 'ά'
+        18: 3,  # 'έ'
+        22: 3,  # 'ή'
+        15: 3,  # 'ί'
+        1: 3,  # 'α'
+        29: 2,  # 'β'
+        20: 3,  # 'γ'
+        21: 2,  # 'δ'
+        3: 3,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 3,  # 'η'
+        25: 3,  # 'θ'
+        5: 3,  # 'ι'
+        11: 3,  # 'κ'
+        16: 1,  # 'λ'
+        10: 3,  # 'μ'
+        6: 3,  # 'ν'
+        30: 2,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 2,  # 'π'
+        8: 2,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 2,  # 'σ'
+        2: 3,  # 'τ'
+        12: 3,  # 'υ'
+        28: 3,  # 'φ'
+        23: 3,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 3,  # 'ω'
+        19: 3,  # 'ό'
+        26: 3,  # 'ύ'
+        27: 3,  # 'ώ'
+    },
+    14: {  # 'ς'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 2,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 0,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 0,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 0,  # 'θ'
+        5: 0,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 0,  # 'ο'
+        9: 0,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 0,  # 'τ'
+        12: 0,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    7: {  # 'σ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 2,  # 'ά'
+        18: 2,  # 'έ'
+        22: 3,  # 'ή'
+        15: 3,  # 'ί'
+        1: 3,  # 'α'
+        29: 3,  # 'β'
+        20: 0,  # 'γ'
+        21: 2,  # 'δ'
+        3: 3,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 3,  # 'η'
+        25: 3,  # 'θ'
+        5: 3,  # 'ι'
+        11: 3,  # 'κ'
+        16: 2,  # 'λ'
+        10: 3,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 3,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 3,  # 'σ'
+        2: 3,  # 'τ'
+        12: 3,  # 'υ'
+        28: 3,  # 'φ'
+        23: 3,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 3,  # 'ω'
+        19: 3,  # 'ό'
+        26: 3,  # 'ύ'
+        27: 2,  # 'ώ'
+    },
+    2: {  # 'τ'
+        60: 0,  # 'e'
+        55: 2,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 3,  # 'ά'
+        18: 3,  # 'έ'
+        22: 3,  # 'ή'
+        15: 3,  # 'ί'
+        1: 3,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 3,  # 'ε'
+        32: 2,  # 'ζ'
+        13: 3,  # 'η'
+        25: 0,  # 'θ'
+        5: 3,  # 'ι'
+        11: 2,  # 'κ'
+        16: 2,  # 'λ'
+        10: 3,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 0,  # 'π'
+        8: 3,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 3,  # 'σ'
+        2: 3,  # 'τ'
+        12: 3,  # 'υ'
+        28: 2,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 3,  # 'ω'
+        19: 3,  # 'ό'
+        26: 3,  # 'ύ'
+        27: 3,  # 'ώ'
+    },
+    12: {  # 'υ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 2,  # 'ά'
+        18: 2,  # 'έ'
+        22: 3,  # 'ή'
+        15: 2,  # 'ί'
+        1: 3,  # 'α'
+        29: 2,  # 'β'
+        20: 3,  # 'γ'
+        21: 2,  # 'δ'
+        3: 2,  # 'ε'
+        32: 2,  # 'ζ'
+        13: 2,  # 'η'
+        25: 3,  # 'θ'
+        5: 2,  # 'ι'
+        11: 3,  # 'κ'
+        16: 3,  # 'λ'
+        10: 3,  # 'μ'
+        6: 3,  # 'ν'
+        30: 3,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 3,  # 'π'
+        8: 3,  # 'ρ'
+        14: 3,  # 'ς'
+        7: 3,  # 'σ'
+        2: 3,  # 'τ'
+        12: 0,  # 'υ'
+        28: 2,  # 'φ'
+        23: 3,  # 'χ'
+        42: 2,  # 'ψ'
+        24: 2,  # 'ω'
+        19: 2,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 2,  # 'ώ'
+    },
+    28: {  # 'φ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 3,  # 'ά'
+        18: 3,  # 'έ'
+        22: 3,  # 'ή'
+        15: 3,  # 'ί'
+        1: 3,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 3,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 2,  # 'η'
+        25: 2,  # 'θ'
+        5: 3,  # 'ι'
+        11: 0,  # 'κ'
+        16: 2,  # 'λ'
+        10: 0,  # 'μ'
+        6: 1,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 0,  # 'π'
+        8: 3,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 3,  # 'τ'
+        12: 3,  # 'υ'
+        28: 1,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 3,  # 'ω'
+        19: 3,  # 'ό'
+        26: 2,  # 'ύ'
+        27: 2,  # 'ώ'
+    },
+    23: {  # 'χ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 3,  # 'ά'
+        18: 2,  # 'έ'
+        22: 3,  # 'ή'
+        15: 3,  # 'ί'
+        1: 3,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 3,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 2,  # 'η'
+        25: 2,  # 'θ'
+        5: 3,  # 'ι'
+        11: 0,  # 'κ'
+        16: 2,  # 'λ'
+        10: 2,  # 'μ'
+        6: 3,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 0,  # 'π'
+        8: 3,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 3,  # 'τ'
+        12: 3,  # 'υ'
+        28: 0,  # 'φ'
+        23: 2,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 3,  # 'ω'
+        19: 3,  # 'ό'
+        26: 3,  # 'ύ'
+        27: 3,  # 'ώ'
+    },
+    42: {  # 'ψ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 2,  # 'ά'
+        18: 2,  # 'έ'
+        22: 1,  # 'ή'
+        15: 2,  # 'ί'
+        1: 2,  # 'α'
+        29: 0,  # 'β'
+        20: 0,  # 'γ'
+        21: 0,  # 'δ'
+        3: 3,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 3,  # 'η'
+        25: 0,  # 'θ'
+        5: 2,  # 'ι'
+        11: 0,  # 'κ'
+        16: 0,  # 'λ'
+        10: 0,  # 'μ'
+        6: 0,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 2,  # 'ο'
+        9: 0,  # 'π'
+        8: 0,  # 'ρ'
+        14: 0,  # 'ς'
+        7: 0,  # 'σ'
+        2: 2,  # 'τ'
+        12: 1,  # 'υ'
+        28: 0,  # 'φ'
+        23: 0,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 2,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    24: {  # 'ω'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 1,  # 'ά'
+        18: 0,  # 'έ'
+        22: 2,  # 'ή'
+        15: 0,  # 'ί'
+        1: 0,  # 'α'
+        29: 2,  # 'β'
+        20: 3,  # 'γ'
+        21: 2,  # 'δ'
+        3: 0,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 0,  # 'η'
+        25: 3,  # 'θ'
+        5: 2,  # 'ι'
+        11: 0,  # 'κ'
+        16: 2,  # 'λ'
+        10: 3,  # 'μ'
+        6: 3,  # 'ν'
+        30: 0,  # 'ξ'
+        4: 0,  # 'ο'
+        9: 3,  # 'π'
+        8: 3,  # 'ρ'
+        14: 3,  # 'ς'
+        7: 3,  # 'σ'
+        2: 3,  # 'τ'
+        12: 0,  # 'υ'
+        28: 2,  # 'φ'
+        23: 2,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    19: {  # 'ό'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 0,  # 'α'
+        29: 3,  # 'β'
+        20: 3,  # 'γ'
+        21: 3,  # 'δ'
+        3: 1,  # 'ε'
+        32: 2,  # 'ζ'
+        13: 2,  # 'η'
+        25: 2,  # 'θ'
+        5: 2,  # 'ι'
+        11: 3,  # 'κ'
+        16: 3,  # 'λ'
+        10: 3,  # 'μ'
+        6: 3,  # 'ν'
+        30: 1,  # 'ξ'
+        4: 2,  # 'ο'
+        9: 3,  # 'π'
+        8: 3,  # 'ρ'
+        14: 3,  # 'ς'
+        7: 3,  # 'σ'
+        2: 3,  # 'τ'
+        12: 0,  # 'υ'
+        28: 2,  # 'φ'
+        23: 3,  # 'χ'
+        42: 2,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    26: {  # 'ύ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 2,  # 'α'
+        29: 2,  # 'β'
+        20: 2,  # 'γ'
+        21: 1,  # 'δ'
+        3: 3,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 2,  # 'η'
+        25: 3,  # 'θ'
+        5: 0,  # 'ι'
+        11: 3,  # 'κ'
+        16: 3,  # 'λ'
+        10: 3,  # 'μ'
+        6: 3,  # 'ν'
+        30: 2,  # 'ξ'
+        4: 3,  # 'ο'
+        9: 3,  # 'π'
+        8: 3,  # 'ρ'
+        14: 3,  # 'ς'
+        7: 3,  # 'σ'
+        2: 3,  # 'τ'
+        12: 0,  # 'υ'
+        28: 2,  # 'φ'
+        23: 2,  # 'χ'
+        42: 2,  # 'ψ'
+        24: 2,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+    27: {  # 'ώ'
+        60: 0,  # 'e'
+        55: 0,  # 'o'
+        58: 0,  # 't'
+        36: 0,  # '·'
+        61: 0,  # 'Ά'
+        46: 0,  # 'Έ'
+        54: 0,  # 'Ό'
+        31: 0,  # 'Α'
+        51: 0,  # 'Β'
+        43: 0,  # 'Γ'
+        41: 0,  # 'Δ'
+        34: 0,  # 'Ε'
+        40: 0,  # 'Η'
+        52: 0,  # 'Θ'
+        47: 0,  # 'Ι'
+        44: 0,  # 'Κ'
+        53: 0,  # 'Λ'
+        38: 0,  # 'Μ'
+        49: 0,  # 'Ν'
+        59: 0,  # 'Ξ'
+        39: 0,  # 'Ο'
+        35: 0,  # 'Π'
+        48: 0,  # 'Ρ'
+        37: 0,  # 'Σ'
+        33: 0,  # 'Τ'
+        45: 0,  # 'Υ'
+        56: 0,  # 'Φ'
+        50: 0,  # 'Χ'
+        57: 0,  # 'Ω'
+        17: 0,  # 'ά'
+        18: 0,  # 'έ'
+        22: 0,  # 'ή'
+        15: 0,  # 'ί'
+        1: 0,  # 'α'
+        29: 1,  # 'β'
+        20: 0,  # 'γ'
+        21: 3,  # 'δ'
+        3: 0,  # 'ε'
+        32: 0,  # 'ζ'
+        13: 1,  # 'η'
+        25: 2,  # 'θ'
+        5: 2,  # 'ι'
+        11: 0,  # 'κ'
+        16: 2,  # 'λ'
+        10: 3,  # 'μ'
+        6: 3,  # 'ν'
+        30: 1,  # 'ξ'
+        4: 0,  # 'ο'
+        9: 2,  # 'π'
+        8: 3,  # 'ρ'
+        14: 3,  # 'ς'
+        7: 3,  # 'σ'
+        2: 3,  # 'τ'
+        12: 0,  # 'υ'
+        28: 1,  # 'φ'
+        23: 1,  # 'χ'
+        42: 0,  # 'ψ'
+        24: 0,  # 'ω'
+        19: 0,  # 'ό'
+        26: 0,  # 'ύ'
+        27: 0,  # 'ώ'
+    },
+}
+
+# 255: Undefined characters that did not exist in training text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+# 251: Control characters
+
+# Character Mapping Table(s):
+WINDOWS_1253_GREEK_CHAR_TO_ORDER = {
+    0: 255,  # '\x00'
+    1: 255,  # '\x01'
+    2: 255,  # '\x02'
+    3: 255,  # '\x03'
+    4: 255,  # '\x04'
+    5: 255,  # '\x05'
+    6: 255,  # '\x06'
+    7: 255,  # '\x07'
+    8: 255,  # '\x08'
+    9: 255,  # '\t'
+    10: 254,  # '\n'
+    11: 255,  # '\x0b'
+    12: 255,  # '\x0c'
+    13: 254,  # '\r'
+    14: 255,  # '\x0e'
+    15: 255,  # '\x0f'
+    16: 255,  # '\x10'
+    17: 255,  # '\x11'
+    18: 255,  # '\x12'
+    19: 255,  # '\x13'
+    20: 255,  # '\x14'
+    21: 255,  # '\x15'
+    22: 255,  # '\x16'
+    23: 255,  # '\x17'
+    24: 255,  # '\x18'
+    25: 255,  # '\x19'
+    26: 255,  # '\x1a'
+    27: 255,  # '\x1b'
+    28: 255,  # '\x1c'
+    29: 255,  # '\x1d'
+    30: 255,  # '\x1e'
+    31: 255,  # '\x1f'
+    32: 253,  # ' '
+    33: 253,  # '!'
+    34: 253,  # '"'
+    35: 253,  # '#'
+    36: 253,  # '$'
+    37: 253,  # '%'
+    38: 253,  # '&'
+    39: 253,  # "'"
+    40: 253,  # '('
+    41: 253,  # ')'
+    42: 253,  # '*'
+    43: 253,  # '+'
+    44: 253,  # ','
+    45: 253,  # '-'
+    46: 253,  # '.'
+    47: 253,  # '/'
+    48: 252,  # '0'
+    49: 252,  # '1'
+    50: 252,  # '2'
+    51: 252,  # '3'
+    52: 252,  # '4'
+    53: 252,  # '5'
+    54: 252,  # '6'
+    55: 252,  # '7'
+    56: 252,  # '8'
+    57: 252,  # '9'
+    58: 253,  # ':'
+    59: 253,  # ';'
+    60: 253,  # '<'
+    61: 253,  # '='
+    62: 253,  # '>'
+    63: 253,  # '?'
+    64: 253,  # '@'
+    65: 82,  # 'A'
+    66: 100,  # 'B'
+    67: 104,  # 'C'
+    68: 94,  # 'D'
+    69: 98,  # 'E'
+    70: 101,  # 'F'
+    71: 116,  # 'G'
+    72: 102,  # 'H'
+    73: 111,  # 'I'
+    74: 187,  # 'J'
+    75: 117,  # 'K'
+    76: 92,  # 'L'
+    77: 88,  # 'M'
+    78: 113,  # 'N'
+    79: 85,  # 'O'
+    80: 79,  # 'P'
+    81: 118,  # 'Q'
+    82: 105,  # 'R'
+    83: 83,  # 'S'
+    84: 67,  # 'T'
+    85: 114,  # 'U'
+    86: 119,  # 'V'
+    87: 95,  # 'W'
+    88: 99,  # 'X'
+    89: 109,  # 'Y'
+    90: 188,  # 'Z'
+    91: 253,  # '['
+    92: 253,  # '\\'
+    93: 253,  # ']'
+    94: 253,  # '^'
+    95: 253,  # '_'
+    96: 253,  # '`'
+    97: 72,  # 'a'
+    98: 70,  # 'b'
+    99: 80,  # 'c'
+    100: 81,  # 'd'
+    101: 60,  # 'e'
+    102: 96,  # 'f'
+    103: 93,  # 'g'
+    104: 89,  # 'h'
+    105: 68,  # 'i'
+    106: 120,  # 'j'
+    107: 97,  # 'k'
+    108: 77,  # 'l'
+    109: 86,  # 'm'
+    110: 69,  # 'n'
+    111: 55,  # 'o'
+    112: 78,  # 'p'
+    113: 115,  # 'q'
+    114: 65,  # 'r'
+    115: 66,  # 's'
+    116: 58,  # 't'
+    117: 76,  # 'u'
+    118: 106,  # 'v'
+    119: 103,  # 'w'
+    120: 87,  # 'x'
+    121: 107,  # 'y'
+    122: 112,  # 'z'
+    123: 253,  # '{'
+    124: 253,  # '|'
+    125: 253,  # '}'
+    126: 253,  # '~'
+    127: 253,  # '\x7f'
+    128: 255,  # '€'
+    129: 255,  # None
+    130: 255,  # '‚'
+    131: 255,  # 'ƒ'
+    132: 255,  # '„'
+    133: 255,  # '…'
+    134: 255,  # '†'
+    135: 255,  # '‡'
+    136: 255,  # None
+    137: 255,  # '‰'
+    138: 255,  # None
+    139: 255,  # '‹'
+    140: 255,  # None
+    141: 255,  # None
+    142: 255,  # None
+    143: 255,  # None
+    144: 255,  # None
+    145: 255,  # '‘'
+    146: 255,  # '’'
+    147: 255,  # '“'
+    148: 255,  # '”'
+    149: 255,  # '•'
+    150: 255,  # '–'
+    151: 255,  # '—'
+    152: 255,  # None
+    153: 255,  # '™'
+    154: 255,  # None
+    155: 255,  # '›'
+    156: 255,  # None
+    157: 255,  # None
+    158: 255,  # None
+    159: 255,  # None
+    160: 253,  # '\xa0'
+    161: 233,  # '΅'
+    162: 61,  # 'Ά'
+    163: 253,  # '£'
+    164: 253,  # '¤'
+    165: 253,  # '¥'
+    166: 253,  # '¦'
+    167: 253,  # '§'
+    168: 253,  # '¨'
+    169: 253,  # '©'
+    170: 253,  # None
+    171: 253,  # '«'
+    172: 253,  # '¬'
+    173: 74,  # '\xad'
+    174: 253,  # '®'
+    175: 253,  # '―'
+    176: 253,  # '°'
+    177: 253,  # '±'
+    178: 253,  # '²'
+    179: 253,  # '³'
+    180: 247,  # '΄'
+    181: 253,  # 'µ'
+    182: 253,  # '¶'
+    183: 36,  # '·'
+    184: 46,  # 'Έ'
+    185: 71,  # 'Ή'
+    186: 73,  # 'Ί'
+    187: 253,  # '»'
+    188: 54,  # 'Ό'
+    189: 253,  # '½'
+    190: 108,  # 'Ύ'
+    191: 123,  # 'Ώ'
+    192: 110,  # 'ΐ'
+    193: 31,  # 'Α'
+    194: 51,  # 'Β'
+    195: 43,  # 'Γ'
+    196: 41,  # 'Δ'
+    197: 34,  # 'Ε'
+    198: 91,  # 'Ζ'
+    199: 40,  # 'Η'
+    200: 52,  # 'Θ'
+    201: 47,  # 'Ι'
+    202: 44,  # 'Κ'
+    203: 53,  # 'Λ'
+    204: 38,  # 'Μ'
+    205: 49,  # 'Ν'
+    206: 59,  # 'Ξ'
+    207: 39,  # 'Ο'
+    208: 35,  # 'Π'
+    209: 48,  # 'Ρ'
+    210: 250,  # None
+    211: 37,  # 'Σ'
+    212: 33,  # 'Τ'
+    213: 45,  # 'Υ'
+    214: 56,  # 'Φ'
+    215: 50,  # 'Χ'
+    216: 84,  # 'Ψ'
+    217: 57,  # 'Ω'
+    218: 120,  # 'Ϊ'
+    219: 121,  # 'Ϋ'
+    220: 17,  # 'ά'
+    221: 18,  # 'έ'
+    222: 22,  # 'ή'
+    223: 15,  # 'ί'
+    224: 124,  # 'ΰ'
+    225: 1,  # 'α'
+    226: 29,  # 'β'
+    227: 20,  # 'γ'
+    228: 21,  # 'δ'
+    229: 3,  # 'ε'
+    230: 32,  # 'ζ'
+    231: 13,  # 'η'
+    232: 25,  # 'θ'
+    233: 5,  # 'ι'
+    234: 11,  # 'κ'
+    235: 16,  # 'λ'
+    236: 10,  # 'μ'
+    237: 6,  # 'ν'
+    238: 30,  # 'ξ'
+    239: 4,  # 'ο'
+    240: 9,  # 'π'
+    241: 8,  # 'ρ'
+    242: 14,  # 'ς'
+    243: 7,  # 'σ'
+    244: 2,  # 'τ'
+    245: 12,  # 'υ'
+    246: 28,  # 'φ'
+    247: 23,  # 'χ'
+    248: 42,  # 'ψ'
+    249: 24,  # 'ω'
+    250: 64,  # 'ϊ'
+    251: 75,  # 'ϋ'
+    252: 19,  # 'ό'
+    253: 26,  # 'ύ'
+    254: 27,  # 'ώ'
+    255: 253,  # None
+}
+
+WINDOWS_1253_GREEK_MODEL = SingleByteCharSetModel(
+    charset_name="windows-1253",
+    language="Greek",
+    char_to_order_map=WINDOWS_1253_GREEK_CHAR_TO_ORDER,
+    language_model=GREEK_LANG_MODEL,
+    typical_positive_ratio=0.982851,
+    keep_ascii_letters=False,
+    alphabet="ΆΈΉΊΌΎΏΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩάέήίαβγδεζηθικλμνξοπρςστυφχψωόύώ",
+)
+
+ISO_8859_7_GREEK_CHAR_TO_ORDER = {
+    0: 255,  # '\x00'
+    1: 255,  # '\x01'
+    2: 255,  # '\x02'
+    3: 255,  # '\x03'
+    4: 255,  # '\x04'
+    5: 255,  # '\x05'
+    6: 255,  # '\x06'
+    7: 255,  # '\x07'
+    8: 255,  # '\x08'
+    9: 255,  # '\t'
+    10: 254,  # '\n'
+    11: 255,  # '\x0b'
+    12: 255,  # '\x0c'
+    13: 254,  # '\r'
+    14: 255,  # '\x0e'
+    15: 255,  # '\x0f'
+    16: 255,  # '\x10'
+    17: 255,  # '\x11'
+    18: 255,  # '\x12'
+    19: 255,  # '\x13'
+    20: 255,  # '\x14'
+    21: 255,  # '\x15'
+    22: 255,  # '\x16'
+    23: 255,  # '\x17'
+    24: 255,  # '\x18'
+    25: 255,  # '\x19'
+    26: 255,  # '\x1a'
+    27: 255,  # '\x1b'
+    28: 255,  # '\x1c'
+    29: 255,  # '\x1d'
+    30: 255,  # '\x1e'
+    31: 255,  # '\x1f'
+    32: 253,  # ' '
+    33: 253,  # '!'
+    34: 253,  # '"'
+    35: 253,  # '#'
+    36: 253,  # '$'
+    37: 253,  # '%'
+    38: 253,  # '&'
+    39: 253,  # "'"
+    40: 253,  # '('
+    41: 253,  # ')'
+    42: 253,  # '*'
+    43: 253,  # '+'
+    44: 253,  # ','
+    45: 253,  # '-'
+    46: 253,  # '.'
+    47: 253,  # '/'
+    48: 252,  # '0'
+    49: 252,  # '1'
+    50: 252,  # '2'
+    51: 252,  # '3'
+    52: 252,  # '4'
+    53: 252,  # '5'
+    54: 252,  # '6'
+    55: 252,  # '7'
+    56: 252,  # '8'
+    57: 252,  # '9'
+    58: 253,  # ':'
+    59: 253,  # ';'
+    60: 253,  # '<'
+    61: 253,  # '='
+    62: 253,  # '>'
+    63: 253,  # '?'
+    64: 253,  # '@'
+    65: 82,  # 'A'
+    66: 100,  # 'B'
+    67: 104,  # 'C'
+    68: 94,  # 'D'
+    69: 98,  # 'E'
+    70: 101,  # 'F'
+    71: 116,  # 'G'
+    72: 102,  # 'H'
+    73: 111,  # 'I'
+    74: 187,  # 'J'
+    75: 117,  # 'K'
+    76: 92,  # 'L'
+    77: 88,  # 'M'
+    78: 113,  # 'N'
+    79: 85,  # 'O'
+    80: 79,  # 'P'
+    81: 118,  # 'Q'
+    82: 105,  # 'R'
+    83: 83,  # 'S'
+    84: 67,  # 'T'
+    85: 114,  # 'U'
+    86: 119,  # 'V'
+    87: 95,  # 'W'
+    88: 99,  # 'X'
+    89: 109,  # 'Y'
+    90: 188,  # 'Z'
+    91: 253,  # '['
+    92: 253,  # '\\'
+    93: 253,  # ']'
+    94: 253,  # '^'
+    95: 253,  # '_'
+    96: 253,  # '`'
+    97: 72,  # 'a'
+    98: 70,  # 'b'
+    99: 80,  # 'c'
+    100: 81,  # 'd'
+    101: 60,  # 'e'
+    102: 96,  # 'f'
+    103: 93,  # 'g'
+    104: 89,  # 'h'
+    105: 68,  # 'i'
+    106: 120,  # 'j'
+    107: 97,  # 'k'
+    108: 77,  # 'l'
+    109: 86,  # 'm'
+    110: 69,  # 'n'
+    111: 55,  # 'o'
+    112: 78,  # 'p'
+    113: 115,  # 'q'
+    114: 65,  # 'r'
+    115: 66,  # 's'
+    116: 58,  # 't'
+    117: 76,  # 'u'
+    118: 106,  # 'v'
+    119: 103,  # 'w'
+    120: 87,  # 'x'
+    121: 107,  # 'y'
+    122: 112,  # 'z'
+    123: 253,  # '{'
+    124: 253,  # '|'
+    125: 253,  # '}'
+    126: 253,  # '~'
+    127: 253,  # '\x7f'
+    128: 255,  # '\x80'
+    129: 255,  # '\x81'
+    130: 255,  # '\x82'
+    131: 255,  # '\x83'
+    132: 255,  # '\x84'
+    133: 255,  # '\x85'
+    134: 255,  # '\x86'
+    135: 255,  # '\x87'
+    136: 255,  # '\x88'
+    137: 255,  # '\x89'
+    138: 255,  # '\x8a'
+    139: 255,  # '\x8b'
+    140: 255,  # '\x8c'
+    141: 255,  # '\x8d'
+    142: 255,  # '\x8e'
+    143: 255,  # '\x8f'
+    144: 255,  # '\x90'
+    145: 255,  # '\x91'
+    146: 255,  # '\x92'
+    147: 255,  # '\x93'
+    148: 255,  # '\x94'
+    149: 255,  # '\x95'
+    150: 255,  # '\x96'
+    151: 255,  # '\x97'
+    152: 255,  # '\x98'
+    153: 255,  # '\x99'
+    154: 255,  # '\x9a'
+    155: 255,  # '\x9b'
+    156: 255,  # '\x9c'
+    157: 255,  # '\x9d'
+    158: 255,  # '\x9e'
+    159: 255,  # '\x9f'
+    160: 253,  # '\xa0'
+    161: 233,  # '‘'
+    162: 90,  # '’'
+    163: 253,  # '£'
+    164: 253,  # '€'
+    165: 253,  # '₯'
+    166: 253,  # '¦'
+    167: 253,  # '§'
+    168: 253,  # '¨'
+    169: 253,  # '©'
+    170: 253,  # 'ͺ'
+    171: 253,  # '«'
+    172: 253,  # '¬'
+    173: 74,  # '\xad'
+    174: 253,  # None
+    175: 253,  # '―'
+    176: 253,  # '°'
+    177: 253,  # '±'
+    178: 253,  # '²'
+    179: 253,  # '³'
+    180: 247,  # '΄'
+    181: 248,  # '΅'
+    182: 61,  # 'Ά'
+    183: 36,  # '·'
+    184: 46,  # 'Έ'
+    185: 71,  # 'Ή'
+    186: 73,  # 'Ί'
+    187: 253,  # '»'
+    188: 54,  # 'Ό'
+    189: 253,  # '½'
+    190: 108,  # 'Ύ'
+    191: 123,  # 'Ώ'
+    192: 110,  # 'ΐ'
+    193: 31,  # 'Α'
+    194: 51,  # 'Β'
+    195: 43,  # 'Γ'
+    196: 41,  # 'Δ'
+    197: 34,  # 'Ε'
+    198: 91,  # 'Ζ'
+    199: 40,  # 'Η'
+    200: 52,  # 'Θ'
+    201: 47,  # 'Ι'
+    202: 44,  # 'Κ'
+    203: 53,  # 'Λ'
+    204: 38,  # 'Μ'
+    205: 49,  # 'Ν'
+    206: 59,  # 'Ξ'
+    207: 39,  # 'Ο'
+    208: 35,  # 'Π'
+    209: 48,  # 'Ρ'
+    210: 250,  # None
+    211: 37,  # 'Σ'
+    212: 33,  # 'Τ'
+    213: 45,  # 'Υ'
+    214: 56,  # 'Φ'
+    215: 50,  # 'Χ'
+    216: 84,  # 'Ψ'
+    217: 57,  # 'Ω'
+    218: 120,  # 'Ϊ'
+    219: 121,  # 'Ϋ'
+    220: 17,  # 'ά'
+    221: 18,  # 'έ'
+    222: 22,  # 'ή'
+    223: 15,  # 'ί'
+    224: 124,  # 'ΰ'
+    225: 1,  # 'α'
+    226: 29,  # 'β'
+    227: 20,  # 'γ'
+    228: 21,  # 'δ'
+    229: 3,  # 'ε'
+    230: 32,  # 'ζ'
+    231: 13,  # 'η'
+    232: 25,  # 'θ'
+    233: 5,  # 'ι'
+    234: 11,  # 'κ'
+    235: 16,  # 'λ'
+    236: 10,  # 'μ'
+    237: 6,  # 'ν'
+    238: 30,  # 'ξ'
+    239: 4,  # 'ο'
+    240: 9,  # 'π'
+    241: 8,  # 'ρ'
+    242: 14,  # 'ς'
+    243: 7,  # 'σ'
+    244: 2,  # 'τ'
+    245: 12,  # 'υ'
+    246: 28,  # 'φ'
+    247: 23,  # 'χ'
+    248: 42,  # 'ψ'
+    249: 24,  # 'ω'
+    250: 64,  # 'ϊ'
+    251: 75,  # 'ϋ'
+    252: 19,  # 'ό'
+    253: 26,  # 'ύ'
+    254: 27,  # 'ώ'
+    255: 253,  # None
+}
+
+ISO_8859_7_GREEK_MODEL = SingleByteCharSetModel(
+    charset_name="ISO-8859-7",
+    language="Greek",
+    char_to_order_map=ISO_8859_7_GREEK_CHAR_TO_ORDER,
+    language_model=GREEK_LANG_MODEL,
+    typical_positive_ratio=0.982851,
+    keep_ascii_letters=False,
+    alphabet="ΆΈΉΊΌΎΏΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩάέήίαβγδεζηθικλμνξοπρςστυφχψωόύώ",
+)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/langhebrewmodel.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/langhebrewmodel.py
new file mode 100644
index 0000000..56d2975
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/langhebrewmodel.py
@@ -0,0 +1,4380 @@
+from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel
+
+# 3: Positive
+# 2: Likely
+# 1: Unlikely
+# 0: Negative
+
+HEBREW_LANG_MODEL = {
+    50: {  # 'a'
+        50: 0,  # 'a'
+        60: 1,  # 'c'
+        61: 1,  # 'd'
+        42: 1,  # 'e'
+        53: 1,  # 'i'
+        56: 2,  # 'l'
+        54: 2,  # 'n'
+        49: 0,  # 'o'
+        51: 2,  # 'r'
+        43: 1,  # 's'
+        44: 2,  # 't'
+        63: 1,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 0,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 1,  # 'ה'
+        2: 0,  # 'ו'
+        24: 0,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 0,  # 'י'
+        25: 0,  # 'ך'
+        15: 0,  # 'כ'
+        4: 0,  # 'ל'
+        11: 0,  # 'ם'
+        6: 1,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 0,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 1,  # 'ק'
+        7: 0,  # 'ר'
+        10: 1,  # 'ש'
+        5: 0,  # 'ת'
+        32: 0,  # '–'
+        52: 1,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    60: {  # 'c'
+        50: 1,  # 'a'
+        60: 1,  # 'c'
+        61: 0,  # 'd'
+        42: 1,  # 'e'
+        53: 1,  # 'i'
+        56: 1,  # 'l'
+        54: 0,  # 'n'
+        49: 1,  # 'o'
+        51: 1,  # 'r'
+        43: 1,  # 's'
+        44: 2,  # 't'
+        63: 1,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 1,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 1,  # 'ה'
+        2: 0,  # 'ו'
+        24: 0,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 0,  # 'י'
+        25: 0,  # 'ך'
+        15: 0,  # 'כ'
+        4: 0,  # 'ל'
+        11: 0,  # 'ם'
+        6: 1,  # 'מ'
+        23: 0,  # 'ן'
+        12: 1,  # 'נ'
+        19: 0,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 0,  # 'ר'
+        10: 0,  # 'ש'
+        5: 0,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    61: {  # 'd'
+        50: 1,  # 'a'
+        60: 0,  # 'c'
+        61: 1,  # 'd'
+        42: 1,  # 'e'
+        53: 1,  # 'i'
+        56: 1,  # 'l'
+        54: 1,  # 'n'
+        49: 2,  # 'o'
+        51: 1,  # 'r'
+        43: 1,  # 's'
+        44: 0,  # 't'
+        63: 1,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 0,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 1,  # 'ה'
+        2: 0,  # 'ו'
+        24: 0,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 0,  # 'י'
+        25: 0,  # 'ך'
+        15: 0,  # 'כ'
+        4: 0,  # 'ל'
+        11: 0,  # 'ם'
+        6: 0,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 0,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 0,  # 'ר'
+        10: 0,  # 'ש'
+        5: 0,  # 'ת'
+        32: 1,  # '–'
+        52: 1,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    42: {  # 'e'
+        50: 1,  # 'a'
+        60: 1,  # 'c'
+        61: 2,  # 'd'
+        42: 1,  # 'e'
+        53: 1,  # 'i'
+        56: 2,  # 'l'
+        54: 2,  # 'n'
+        49: 1,  # 'o'
+        51: 2,  # 'r'
+        43: 2,  # 's'
+        44: 2,  # 't'
+        63: 1,  # 'u'
+        34: 1,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 0,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 0,  # 'ה'
+        2: 0,  # 'ו'
+        24: 0,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 0,  # 'י'
+        25: 0,  # 'ך'
+        15: 0,  # 'כ'
+        4: 0,  # 'ל'
+        11: 0,  # 'ם'
+        6: 0,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 0,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 1,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 0,  # 'ר'
+        10: 0,  # 'ש'
+        5: 0,  # 'ת'
+        32: 1,  # '–'
+        52: 2,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    53: {  # 'i'
+        50: 1,  # 'a'
+        60: 2,  # 'c'
+        61: 1,  # 'd'
+        42: 1,  # 'e'
+        53: 0,  # 'i'
+        56: 1,  # 'l'
+        54: 2,  # 'n'
+        49: 2,  # 'o'
+        51: 1,  # 'r'
+        43: 2,  # 's'
+        44: 2,  # 't'
+        63: 1,  # 'u'
+        34: 0,  # '\xa0'
+        55: 1,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 0,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 0,  # 'ה'
+        2: 0,  # 'ו'
+        24: 0,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 0,  # 'י'
+        25: 0,  # 'ך'
+        15: 0,  # 'כ'
+        4: 0,  # 'ל'
+        11: 0,  # 'ם'
+        6: 0,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 0,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 0,  # 'ר'
+        10: 0,  # 'ש'
+        5: 0,  # 'ת'
+        32: 0,  # '–'
+        52: 1,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    56: {  # 'l'
+        50: 1,  # 'a'
+        60: 1,  # 'c'
+        61: 1,  # 'd'
+        42: 2,  # 'e'
+        53: 2,  # 'i'
+        56: 2,  # 'l'
+        54: 1,  # 'n'
+        49: 1,  # 'o'
+        51: 0,  # 'r'
+        43: 1,  # 's'
+        44: 1,  # 't'
+        63: 1,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 0,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 0,  # 'ה'
+        2: 0,  # 'ו'
+        24: 0,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 0,  # 'י'
+        25: 0,  # 'ך'
+        15: 0,  # 'כ'
+        4: 0,  # 'ל'
+        11: 0,  # 'ם'
+        6: 0,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 0,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 0,  # 'ר'
+        10: 0,  # 'ש'
+        5: 0,  # 'ת'
+        32: 0,  # '–'
+        52: 1,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    54: {  # 'n'
+        50: 1,  # 'a'
+        60: 1,  # 'c'
+        61: 1,  # 'd'
+        42: 1,  # 'e'
+        53: 1,  # 'i'
+        56: 1,  # 'l'
+        54: 1,  # 'n'
+        49: 1,  # 'o'
+        51: 0,  # 'r'
+        43: 1,  # 's'
+        44: 2,  # 't'
+        63: 1,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 0,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 1,  # 'ה'
+        2: 0,  # 'ו'
+        24: 0,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 0,  # 'י'
+        25: 0,  # 'ך'
+        15: 0,  # 'כ'
+        4: 0,  # 'ל'
+        11: 0,  # 'ם'
+        6: 0,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 0,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 0,  # 'ר'
+        10: 0,  # 'ש'
+        5: 0,  # 'ת'
+        32: 0,  # '–'
+        52: 2,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    49: {  # 'o'
+        50: 1,  # 'a'
+        60: 1,  # 'c'
+        61: 1,  # 'd'
+        42: 1,  # 'e'
+        53: 1,  # 'i'
+        56: 1,  # 'l'
+        54: 2,  # 'n'
+        49: 1,  # 'o'
+        51: 2,  # 'r'
+        43: 1,  # 's'
+        44: 1,  # 't'
+        63: 1,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 0,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 0,  # 'ה'
+        2: 0,  # 'ו'
+        24: 0,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 0,  # 'י'
+        25: 0,  # 'ך'
+        15: 0,  # 'כ'
+        4: 0,  # 'ל'
+        11: 0,  # 'ם'
+        6: 0,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 0,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 0,  # 'ר'
+        10: 0,  # 'ש'
+        5: 0,  # 'ת'
+        32: 0,  # '–'
+        52: 1,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    51: {  # 'r'
+        50: 2,  # 'a'
+        60: 1,  # 'c'
+        61: 1,  # 'd'
+        42: 2,  # 'e'
+        53: 1,  # 'i'
+        56: 1,  # 'l'
+        54: 1,  # 'n'
+        49: 2,  # 'o'
+        51: 1,  # 'r'
+        43: 1,  # 's'
+        44: 1,  # 't'
+        63: 1,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 0,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 0,  # 'ה'
+        2: 0,  # 'ו'
+        24: 0,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 0,  # 'י'
+        25: 0,  # 'ך'
+        15: 0,  # 'כ'
+        4: 0,  # 'ל'
+        11: 0,  # 'ם'
+        6: 0,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 0,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 0,  # 'ר'
+        10: 0,  # 'ש'
+        5: 0,  # 'ת'
+        32: 0,  # '–'
+        52: 2,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    43: {  # 's'
+        50: 1,  # 'a'
+        60: 1,  # 'c'
+        61: 0,  # 'd'
+        42: 2,  # 'e'
+        53: 1,  # 'i'
+        56: 1,  # 'l'
+        54: 1,  # 'n'
+        49: 1,  # 'o'
+        51: 1,  # 'r'
+        43: 1,  # 's'
+        44: 2,  # 't'
+        63: 1,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 0,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 0,  # 'ה'
+        2: 0,  # 'ו'
+        24: 0,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 0,  # 'י'
+        25: 0,  # 'ך'
+        15: 0,  # 'כ'
+        4: 0,  # 'ל'
+        11: 0,  # 'ם'
+        6: 0,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 0,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 0,  # 'ר'
+        10: 0,  # 'ש'
+        5: 0,  # 'ת'
+        32: 0,  # '–'
+        52: 1,  # '’'
+        47: 0,  # '“'
+        46: 2,  # '”'
+        58: 0,  # '†'
+        40: 2,  # '…'
+    },
+    44: {  # 't'
+        50: 1,  # 'a'
+        60: 1,  # 'c'
+        61: 0,  # 'd'
+        42: 2,  # 'e'
+        53: 2,  # 'i'
+        56: 1,  # 'l'
+        54: 0,  # 'n'
+        49: 1,  # 'o'
+        51: 1,  # 'r'
+        43: 1,  # 's'
+        44: 1,  # 't'
+        63: 1,  # 'u'
+        34: 1,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 0,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 0,  # 'ה'
+        2: 0,  # 'ו'
+        24: 0,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 0,  # 'י'
+        25: 0,  # 'ך'
+        15: 0,  # 'כ'
+        4: 0,  # 'ל'
+        11: 0,  # 'ם'
+        6: 0,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 0,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 0,  # 'ר'
+        10: 0,  # 'ש'
+        5: 0,  # 'ת'
+        32: 0,  # '–'
+        52: 2,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    63: {  # 'u'
+        50: 1,  # 'a'
+        60: 1,  # 'c'
+        61: 1,  # 'd'
+        42: 1,  # 'e'
+        53: 1,  # 'i'
+        56: 1,  # 'l'
+        54: 1,  # 'n'
+        49: 0,  # 'o'
+        51: 1,  # 'r'
+        43: 2,  # 's'
+        44: 1,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 0,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 0,  # 'ה'
+        2: 0,  # 'ו'
+        24: 0,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 0,  # 'י'
+        25: 0,  # 'ך'
+        15: 0,  # 'כ'
+        4: 0,  # 'ל'
+        11: 0,  # 'ם'
+        6: 0,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 0,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 0,  # 'ר'
+        10: 0,  # 'ש'
+        5: 0,  # 'ת'
+        32: 0,  # '–'
+        52: 1,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    34: {  # '\xa0'
+        50: 1,  # 'a'
+        60: 0,  # 'c'
+        61: 1,  # 'd'
+        42: 0,  # 'e'
+        53: 1,  # 'i'
+        56: 0,  # 'l'
+        54: 1,  # 'n'
+        49: 1,  # 'o'
+        51: 0,  # 'r'
+        43: 1,  # 's'
+        44: 1,  # 't'
+        63: 0,  # 'u'
+        34: 2,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 2,  # 'א'
+        8: 1,  # 'ב'
+        20: 1,  # 'ג'
+        16: 1,  # 'ד'
+        3: 1,  # 'ה'
+        2: 1,  # 'ו'
+        24: 1,  # 'ז'
+        14: 1,  # 'ח'
+        22: 1,  # 'ט'
+        1: 2,  # 'י'
+        25: 0,  # 'ך'
+        15: 1,  # 'כ'
+        4: 1,  # 'ל'
+        11: 0,  # 'ם'
+        6: 2,  # 'מ'
+        23: 0,  # 'ן'
+        12: 1,  # 'נ'
+        19: 1,  # 'ס'
+        13: 1,  # 'ע'
+        26: 0,  # 'ף'
+        18: 1,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 1,  # 'צ'
+        17: 1,  # 'ק'
+        7: 1,  # 'ר'
+        10: 1,  # 'ש'
+        5: 1,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    55: {  # '´'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 1,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 1,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 1,  # 'ה'
+        2: 1,  # 'ו'
+        24: 0,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 2,  # 'י'
+        25: 0,  # 'ך'
+        15: 0,  # 'כ'
+        4: 1,  # 'ל'
+        11: 0,  # 'ם'
+        6: 1,  # 'מ'
+        23: 1,  # 'ן'
+        12: 1,  # 'נ'
+        19: 1,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 1,  # 'ר'
+        10: 1,  # 'ש'
+        5: 0,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    48: {  # '¼'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 1,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 0,  # 'ה'
+        2: 1,  # 'ו'
+        24: 0,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 0,  # 'י'
+        25: 0,  # 'ך'
+        15: 1,  # 'כ'
+        4: 1,  # 'ל'
+        11: 0,  # 'ם'
+        6: 1,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 0,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 0,  # 'ר'
+        10: 0,  # 'ש'
+        5: 0,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    39: {  # '½'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 0,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 0,  # 'ה'
+        2: 0,  # 'ו'
+        24: 0,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 0,  # 'י'
+        25: 0,  # 'ך'
+        15: 1,  # 'כ'
+        4: 1,  # 'ל'
+        11: 0,  # 'ם'
+        6: 0,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 0,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 1,  # 'צ'
+        17: 1,  # 'ק'
+        7: 0,  # 'ר'
+        10: 0,  # 'ש'
+        5: 0,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    57: {  # '¾'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 0,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 0,  # 'ה'
+        2: 0,  # 'ו'
+        24: 0,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 0,  # 'י'
+        25: 0,  # 'ך'
+        15: 0,  # 'כ'
+        4: 0,  # 'ל'
+        11: 0,  # 'ם'
+        6: 0,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 0,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 0,  # 'ר'
+        10: 0,  # 'ש'
+        5: 0,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    30: {  # 'ְ'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 1,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 1,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 2,  # 'א'
+        8: 2,  # 'ב'
+        20: 2,  # 'ג'
+        16: 2,  # 'ד'
+        3: 2,  # 'ה'
+        2: 2,  # 'ו'
+        24: 2,  # 'ז'
+        14: 2,  # 'ח'
+        22: 2,  # 'ט'
+        1: 2,  # 'י'
+        25: 2,  # 'ך'
+        15: 2,  # 'כ'
+        4: 2,  # 'ל'
+        11: 1,  # 'ם'
+        6: 2,  # 'מ'
+        23: 0,  # 'ן'
+        12: 2,  # 'נ'
+        19: 2,  # 'ס'
+        13: 2,  # 'ע'
+        26: 0,  # 'ף'
+        18: 2,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 2,  # 'צ'
+        17: 2,  # 'ק'
+        7: 2,  # 'ר'
+        10: 2,  # 'ש'
+        5: 2,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    59: {  # 'ֱ'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 1,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 0,  # 'א'
+        8: 1,  # 'ב'
+        20: 1,  # 'ג'
+        16: 0,  # 'ד'
+        3: 0,  # 'ה'
+        2: 0,  # 'ו'
+        24: 1,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 1,  # 'י'
+        25: 0,  # 'ך'
+        15: 1,  # 'כ'
+        4: 2,  # 'ל'
+        11: 0,  # 'ם'
+        6: 2,  # 'מ'
+        23: 0,  # 'ן'
+        12: 1,  # 'נ'
+        19: 0,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 1,  # 'ר'
+        10: 1,  # 'ש'
+        5: 0,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    41: {  # 'ֲ'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 0,  # 'א'
+        8: 2,  # 'ב'
+        20: 1,  # 'ג'
+        16: 2,  # 'ד'
+        3: 1,  # 'ה'
+        2: 1,  # 'ו'
+        24: 1,  # 'ז'
+        14: 1,  # 'ח'
+        22: 1,  # 'ט'
+        1: 1,  # 'י'
+        25: 1,  # 'ך'
+        15: 1,  # 'כ'
+        4: 2,  # 'ל'
+        11: 0,  # 'ם'
+        6: 2,  # 'מ'
+        23: 0,  # 'ן'
+        12: 2,  # 'נ'
+        19: 1,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 1,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 2,  # 'צ'
+        17: 1,  # 'ק'
+        7: 2,  # 'ר'
+        10: 2,  # 'ש'
+        5: 1,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    33: {  # 'ִ'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 1,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 1,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 1,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 1,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 1,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 1,  # 'א'
+        8: 2,  # 'ב'
+        20: 2,  # 'ג'
+        16: 2,  # 'ד'
+        3: 1,  # 'ה'
+        2: 1,  # 'ו'
+        24: 2,  # 'ז'
+        14: 1,  # 'ח'
+        22: 1,  # 'ט'
+        1: 3,  # 'י'
+        25: 1,  # 'ך'
+        15: 2,  # 'כ'
+        4: 2,  # 'ל'
+        11: 2,  # 'ם'
+        6: 2,  # 'מ'
+        23: 2,  # 'ן'
+        12: 2,  # 'נ'
+        19: 2,  # 'ס'
+        13: 1,  # 'ע'
+        26: 0,  # 'ף'
+        18: 2,  # 'פ'
+        27: 1,  # 'ץ'
+        21: 2,  # 'צ'
+        17: 2,  # 'ק'
+        7: 2,  # 'ר'
+        10: 2,  # 'ש'
+        5: 2,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    37: {  # 'ֵ'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 1,  # 'ֶ'
+        31: 1,  # 'ַ'
+        29: 1,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 2,  # 'א'
+        8: 2,  # 'ב'
+        20: 1,  # 'ג'
+        16: 2,  # 'ד'
+        3: 2,  # 'ה'
+        2: 1,  # 'ו'
+        24: 1,  # 'ז'
+        14: 2,  # 'ח'
+        22: 1,  # 'ט'
+        1: 3,  # 'י'
+        25: 2,  # 'ך'
+        15: 1,  # 'כ'
+        4: 2,  # 'ל'
+        11: 2,  # 'ם'
+        6: 1,  # 'מ'
+        23: 2,  # 'ן'
+        12: 2,  # 'נ'
+        19: 1,  # 'ס'
+        13: 2,  # 'ע'
+        26: 1,  # 'ף'
+        18: 1,  # 'פ'
+        27: 1,  # 'ץ'
+        21: 1,  # 'צ'
+        17: 1,  # 'ק'
+        7: 2,  # 'ר'
+        10: 2,  # 'ש'
+        5: 2,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    36: {  # 'ֶ'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 1,  # 'ֶ'
+        31: 1,  # 'ַ'
+        29: 1,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 2,  # 'א'
+        8: 2,  # 'ב'
+        20: 1,  # 'ג'
+        16: 2,  # 'ד'
+        3: 2,  # 'ה'
+        2: 1,  # 'ו'
+        24: 1,  # 'ז'
+        14: 2,  # 'ח'
+        22: 1,  # 'ט'
+        1: 2,  # 'י'
+        25: 2,  # 'ך'
+        15: 1,  # 'כ'
+        4: 2,  # 'ל'
+        11: 2,  # 'ם'
+        6: 2,  # 'מ'
+        23: 2,  # 'ן'
+        12: 2,  # 'נ'
+        19: 2,  # 'ס'
+        13: 1,  # 'ע'
+        26: 1,  # 'ף'
+        18: 1,  # 'פ'
+        27: 2,  # 'ץ'
+        21: 1,  # 'צ'
+        17: 1,  # 'ק'
+        7: 2,  # 'ר'
+        10: 2,  # 'ש'
+        5: 2,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    31: {  # 'ַ'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 1,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 1,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 2,  # 'א'
+        8: 2,  # 'ב'
+        20: 2,  # 'ג'
+        16: 2,  # 'ד'
+        3: 2,  # 'ה'
+        2: 1,  # 'ו'
+        24: 2,  # 'ז'
+        14: 2,  # 'ח'
+        22: 2,  # 'ט'
+        1: 3,  # 'י'
+        25: 1,  # 'ך'
+        15: 2,  # 'כ'
+        4: 2,  # 'ל'
+        11: 2,  # 'ם'
+        6: 2,  # 'מ'
+        23: 2,  # 'ן'
+        12: 2,  # 'נ'
+        19: 2,  # 'ס'
+        13: 2,  # 'ע'
+        26: 2,  # 'ף'
+        18: 2,  # 'פ'
+        27: 1,  # 'ץ'
+        21: 2,  # 'צ'
+        17: 2,  # 'ק'
+        7: 2,  # 'ר'
+        10: 2,  # 'ש'
+        5: 2,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    29: {  # 'ָ'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 1,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 1,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 2,  # 'א'
+        8: 2,  # 'ב'
+        20: 2,  # 'ג'
+        16: 2,  # 'ד'
+        3: 3,  # 'ה'
+        2: 2,  # 'ו'
+        24: 2,  # 'ז'
+        14: 2,  # 'ח'
+        22: 1,  # 'ט'
+        1: 2,  # 'י'
+        25: 2,  # 'ך'
+        15: 2,  # 'כ'
+        4: 2,  # 'ל'
+        11: 2,  # 'ם'
+        6: 2,  # 'מ'
+        23: 2,  # 'ן'
+        12: 2,  # 'נ'
+        19: 1,  # 'ס'
+        13: 2,  # 'ע'
+        26: 1,  # 'ף'
+        18: 2,  # 'פ'
+        27: 1,  # 'ץ'
+        21: 2,  # 'צ'
+        17: 2,  # 'ק'
+        7: 2,  # 'ר'
+        10: 2,  # 'ש'
+        5: 2,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    35: {  # 'ֹ'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 1,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 2,  # 'א'
+        8: 2,  # 'ב'
+        20: 1,  # 'ג'
+        16: 2,  # 'ד'
+        3: 2,  # 'ה'
+        2: 1,  # 'ו'
+        24: 1,  # 'ז'
+        14: 1,  # 'ח'
+        22: 1,  # 'ט'
+        1: 1,  # 'י'
+        25: 1,  # 'ך'
+        15: 2,  # 'כ'
+        4: 2,  # 'ל'
+        11: 2,  # 'ם'
+        6: 2,  # 'מ'
+        23: 2,  # 'ן'
+        12: 2,  # 'נ'
+        19: 2,  # 'ס'
+        13: 2,  # 'ע'
+        26: 1,  # 'ף'
+        18: 2,  # 'פ'
+        27: 1,  # 'ץ'
+        21: 2,  # 'צ'
+        17: 2,  # 'ק'
+        7: 2,  # 'ר'
+        10: 2,  # 'ש'
+        5: 2,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    62: {  # 'ֻ'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 0,  # 'א'
+        8: 1,  # 'ב'
+        20: 1,  # 'ג'
+        16: 1,  # 'ד'
+        3: 1,  # 'ה'
+        2: 1,  # 'ו'
+        24: 1,  # 'ז'
+        14: 1,  # 'ח'
+        22: 0,  # 'ט'
+        1: 1,  # 'י'
+        25: 0,  # 'ך'
+        15: 1,  # 'כ'
+        4: 2,  # 'ל'
+        11: 1,  # 'ם'
+        6: 1,  # 'מ'
+        23: 1,  # 'ן'
+        12: 1,  # 'נ'
+        19: 1,  # 'ס'
+        13: 1,  # 'ע'
+        26: 0,  # 'ף'
+        18: 1,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 1,  # 'צ'
+        17: 1,  # 'ק'
+        7: 1,  # 'ר'
+        10: 1,  # 'ש'
+        5: 1,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    28: {  # 'ּ'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 3,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 1,  # 'ֲ'
+        33: 3,  # 'ִ'
+        37: 2,  # 'ֵ'
+        36: 2,  # 'ֶ'
+        31: 3,  # 'ַ'
+        29: 3,  # 'ָ'
+        35: 2,  # 'ֹ'
+        62: 1,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 2,  # 'ׁ'
+        45: 1,  # 'ׂ'
+        9: 2,  # 'א'
+        8: 2,  # 'ב'
+        20: 1,  # 'ג'
+        16: 2,  # 'ד'
+        3: 1,  # 'ה'
+        2: 2,  # 'ו'
+        24: 1,  # 'ז'
+        14: 1,  # 'ח'
+        22: 1,  # 'ט'
+        1: 2,  # 'י'
+        25: 2,  # 'ך'
+        15: 2,  # 'כ'
+        4: 2,  # 'ל'
+        11: 1,  # 'ם'
+        6: 2,  # 'מ'
+        23: 1,  # 'ן'
+        12: 2,  # 'נ'
+        19: 1,  # 'ס'
+        13: 2,  # 'ע'
+        26: 1,  # 'ף'
+        18: 1,  # 'פ'
+        27: 1,  # 'ץ'
+        21: 1,  # 'צ'
+        17: 1,  # 'ק'
+        7: 2,  # 'ר'
+        10: 2,  # 'ש'
+        5: 2,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    38: {  # 'ׁ'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 2,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 2,  # 'ִ'
+        37: 2,  # 'ֵ'
+        36: 2,  # 'ֶ'
+        31: 2,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 1,  # 'ֹ'
+        62: 1,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 0,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 0,  # 'ה'
+        2: 2,  # 'ו'
+        24: 0,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 1,  # 'י'
+        25: 0,  # 'ך'
+        15: 0,  # 'כ'
+        4: 0,  # 'ל'
+        11: 0,  # 'ם'
+        6: 0,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 0,  # 'ס'
+        13: 1,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 0,  # 'ר'
+        10: 0,  # 'ש'
+        5: 0,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    45: {  # 'ׂ'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 2,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 2,  # 'ִ'
+        37: 1,  # 'ֵ'
+        36: 2,  # 'ֶ'
+        31: 1,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 1,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 1,  # 'א'
+        8: 0,  # 'ב'
+        20: 1,  # 'ג'
+        16: 0,  # 'ד'
+        3: 1,  # 'ה'
+        2: 2,  # 'ו'
+        24: 0,  # 'ז'
+        14: 1,  # 'ח'
+        22: 0,  # 'ט'
+        1: 1,  # 'י'
+        25: 0,  # 'ך'
+        15: 0,  # 'כ'
+        4: 0,  # 'ל'
+        11: 1,  # 'ם'
+        6: 1,  # 'מ'
+        23: 0,  # 'ן'
+        12: 1,  # 'נ'
+        19: 0,  # 'ס'
+        13: 1,  # 'ע'
+        26: 0,  # 'ף'
+        18: 1,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 1,  # 'ר'
+        10: 0,  # 'ש'
+        5: 1,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    9: {  # 'א'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 1,  # '\xa0'
+        55: 1,  # '´'
+        48: 1,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 2,  # 'ֱ'
+        41: 2,  # 'ֲ'
+        33: 2,  # 'ִ'
+        37: 2,  # 'ֵ'
+        36: 2,  # 'ֶ'
+        31: 2,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 2,  # 'ֹ'
+        62: 1,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 2,  # 'א'
+        8: 3,  # 'ב'
+        20: 3,  # 'ג'
+        16: 3,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 3,  # 'ז'
+        14: 3,  # 'ח'
+        22: 3,  # 'ט'
+        1: 3,  # 'י'
+        25: 3,  # 'ך'
+        15: 3,  # 'כ'
+        4: 3,  # 'ל'
+        11: 3,  # 'ם'
+        6: 3,  # 'מ'
+        23: 3,  # 'ן'
+        12: 3,  # 'נ'
+        19: 3,  # 'ס'
+        13: 2,  # 'ע'
+        26: 3,  # 'ף'
+        18: 3,  # 'פ'
+        27: 1,  # 'ץ'
+        21: 3,  # 'צ'
+        17: 3,  # 'ק'
+        7: 3,  # 'ר'
+        10: 3,  # 'ש'
+        5: 3,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    8: {  # 'ב'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 1,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 1,  # '\xa0'
+        55: 1,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 2,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 2,  # 'ִ'
+        37: 2,  # 'ֵ'
+        36: 2,  # 'ֶ'
+        31: 2,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 2,  # 'ֹ'
+        62: 1,  # 'ֻ'
+        28: 3,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 3,  # 'א'
+        8: 3,  # 'ב'
+        20: 3,  # 'ג'
+        16: 3,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 3,  # 'ז'
+        14: 3,  # 'ח'
+        22: 3,  # 'ט'
+        1: 3,  # 'י'
+        25: 2,  # 'ך'
+        15: 3,  # 'כ'
+        4: 3,  # 'ל'
+        11: 2,  # 'ם'
+        6: 3,  # 'מ'
+        23: 3,  # 'ן'
+        12: 3,  # 'נ'
+        19: 3,  # 'ס'
+        13: 3,  # 'ע'
+        26: 1,  # 'ף'
+        18: 3,  # 'פ'
+        27: 2,  # 'ץ'
+        21: 3,  # 'צ'
+        17: 3,  # 'ק'
+        7: 3,  # 'ר'
+        10: 3,  # 'ש'
+        5: 3,  # 'ת'
+        32: 1,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    20: {  # 'ג'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 1,  # '\xa0'
+        55: 2,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 2,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 1,  # 'ִ'
+        37: 1,  # 'ֵ'
+        36: 1,  # 'ֶ'
+        31: 2,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 1,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 2,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 2,  # 'א'
+        8: 3,  # 'ב'
+        20: 2,  # 'ג'
+        16: 3,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 3,  # 'ז'
+        14: 2,  # 'ח'
+        22: 2,  # 'ט'
+        1: 3,  # 'י'
+        25: 1,  # 'ך'
+        15: 1,  # 'כ'
+        4: 3,  # 'ל'
+        11: 3,  # 'ם'
+        6: 3,  # 'מ'
+        23: 3,  # 'ן'
+        12: 3,  # 'נ'
+        19: 2,  # 'ס'
+        13: 3,  # 'ע'
+        26: 2,  # 'ף'
+        18: 2,  # 'פ'
+        27: 1,  # 'ץ'
+        21: 1,  # 'צ'
+        17: 1,  # 'ק'
+        7: 3,  # 'ר'
+        10: 3,  # 'ש'
+        5: 3,  # 'ת'
+        32: 0,  # '–'
+        52: 1,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    16: {  # 'ד'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 2,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 2,  # 'ִ'
+        37: 2,  # 'ֵ'
+        36: 2,  # 'ֶ'
+        31: 2,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 2,  # 'ֹ'
+        62: 1,  # 'ֻ'
+        28: 2,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 3,  # 'א'
+        8: 3,  # 'ב'
+        20: 3,  # 'ג'
+        16: 3,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 1,  # 'ז'
+        14: 2,  # 'ח'
+        22: 2,  # 'ט'
+        1: 3,  # 'י'
+        25: 2,  # 'ך'
+        15: 2,  # 'כ'
+        4: 3,  # 'ל'
+        11: 3,  # 'ם'
+        6: 3,  # 'מ'
+        23: 2,  # 'ן'
+        12: 3,  # 'נ'
+        19: 2,  # 'ס'
+        13: 3,  # 'ע'
+        26: 2,  # 'ף'
+        18: 3,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 2,  # 'צ'
+        17: 3,  # 'ק'
+        7: 3,  # 'ר'
+        10: 3,  # 'ש'
+        5: 3,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    3: {  # 'ה'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 1,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 1,  # '\xa0'
+        55: 0,  # '´'
+        48: 1,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 1,  # 'ְ'
+        59: 1,  # 'ֱ'
+        41: 2,  # 'ֲ'
+        33: 2,  # 'ִ'
+        37: 2,  # 'ֵ'
+        36: 2,  # 'ֶ'
+        31: 3,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 1,  # 'ֹ'
+        62: 1,  # 'ֻ'
+        28: 2,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 3,  # 'א'
+        8: 3,  # 'ב'
+        20: 3,  # 'ג'
+        16: 3,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 3,  # 'ז'
+        14: 3,  # 'ח'
+        22: 3,  # 'ט'
+        1: 3,  # 'י'
+        25: 1,  # 'ך'
+        15: 3,  # 'כ'
+        4: 3,  # 'ל'
+        11: 3,  # 'ם'
+        6: 3,  # 'מ'
+        23: 3,  # 'ן'
+        12: 3,  # 'נ'
+        19: 3,  # 'ס'
+        13: 3,  # 'ע'
+        26: 0,  # 'ף'
+        18: 3,  # 'פ'
+        27: 1,  # 'ץ'
+        21: 3,  # 'צ'
+        17: 3,  # 'ק'
+        7: 3,  # 'ר'
+        10: 3,  # 'ש'
+        5: 3,  # 'ת'
+        32: 1,  # '–'
+        52: 1,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 2,  # '…'
+    },
+    2: {  # 'ו'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 1,  # 't'
+        63: 0,  # 'u'
+        34: 1,  # '\xa0'
+        55: 1,  # '´'
+        48: 1,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 2,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 2,  # 'ִ'
+        37: 1,  # 'ֵ'
+        36: 1,  # 'ֶ'
+        31: 2,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 3,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 3,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 3,  # 'א'
+        8: 3,  # 'ב'
+        20: 3,  # 'ג'
+        16: 3,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 3,  # 'ז'
+        14: 3,  # 'ח'
+        22: 3,  # 'ט'
+        1: 3,  # 'י'
+        25: 3,  # 'ך'
+        15: 3,  # 'כ'
+        4: 3,  # 'ל'
+        11: 3,  # 'ם'
+        6: 3,  # 'מ'
+        23: 3,  # 'ן'
+        12: 3,  # 'נ'
+        19: 3,  # 'ס'
+        13: 3,  # 'ע'
+        26: 3,  # 'ף'
+        18: 3,  # 'פ'
+        27: 3,  # 'ץ'
+        21: 3,  # 'צ'
+        17: 3,  # 'ק'
+        7: 3,  # 'ר'
+        10: 3,  # 'ש'
+        5: 3,  # 'ת'
+        32: 1,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 2,  # '…'
+    },
+    24: {  # 'ז'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 1,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 2,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 1,  # 'ֲ'
+        33: 1,  # 'ִ'
+        37: 2,  # 'ֵ'
+        36: 2,  # 'ֶ'
+        31: 2,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 1,  # 'ֹ'
+        62: 1,  # 'ֻ'
+        28: 2,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 3,  # 'א'
+        8: 2,  # 'ב'
+        20: 2,  # 'ג'
+        16: 2,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 2,  # 'ז'
+        14: 2,  # 'ח'
+        22: 1,  # 'ט'
+        1: 3,  # 'י'
+        25: 1,  # 'ך'
+        15: 3,  # 'כ'
+        4: 3,  # 'ל'
+        11: 2,  # 'ם'
+        6: 3,  # 'מ'
+        23: 2,  # 'ן'
+        12: 2,  # 'נ'
+        19: 1,  # 'ס'
+        13: 2,  # 'ע'
+        26: 1,  # 'ף'
+        18: 1,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 2,  # 'צ'
+        17: 3,  # 'ק'
+        7: 3,  # 'ר'
+        10: 1,  # 'ש'
+        5: 2,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    14: {  # 'ח'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 1,  # '\xa0'
+        55: 1,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 2,  # 'ְ'
+        59: 1,  # 'ֱ'
+        41: 2,  # 'ֲ'
+        33: 2,  # 'ִ'
+        37: 2,  # 'ֵ'
+        36: 2,  # 'ֶ'
+        31: 2,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 2,  # 'ֹ'
+        62: 1,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 2,  # 'א'
+        8: 3,  # 'ב'
+        20: 2,  # 'ג'
+        16: 3,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 3,  # 'ז'
+        14: 2,  # 'ח'
+        22: 2,  # 'ט'
+        1: 3,  # 'י'
+        25: 1,  # 'ך'
+        15: 2,  # 'כ'
+        4: 3,  # 'ל'
+        11: 3,  # 'ם'
+        6: 3,  # 'מ'
+        23: 2,  # 'ן'
+        12: 3,  # 'נ'
+        19: 3,  # 'ס'
+        13: 1,  # 'ע'
+        26: 2,  # 'ף'
+        18: 2,  # 'פ'
+        27: 2,  # 'ץ'
+        21: 3,  # 'צ'
+        17: 3,  # 'ק'
+        7: 3,  # 'ר'
+        10: 3,  # 'ש'
+        5: 3,  # 'ת'
+        32: 0,  # '–'
+        52: 1,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    22: {  # 'ט'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 1,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 2,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 2,  # 'ִ'
+        37: 1,  # 'ֵ'
+        36: 1,  # 'ֶ'
+        31: 2,  # 'ַ'
+        29: 1,  # 'ָ'
+        35: 1,  # 'ֹ'
+        62: 1,  # 'ֻ'
+        28: 1,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 3,  # 'א'
+        8: 3,  # 'ב'
+        20: 3,  # 'ג'
+        16: 1,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 2,  # 'ז'
+        14: 3,  # 'ח'
+        22: 2,  # 'ט'
+        1: 3,  # 'י'
+        25: 1,  # 'ך'
+        15: 2,  # 'כ'
+        4: 3,  # 'ל'
+        11: 2,  # 'ם'
+        6: 2,  # 'מ'
+        23: 2,  # 'ן'
+        12: 3,  # 'נ'
+        19: 2,  # 'ס'
+        13: 3,  # 'ע'
+        26: 2,  # 'ף'
+        18: 3,  # 'פ'
+        27: 1,  # 'ץ'
+        21: 2,  # 'צ'
+        17: 2,  # 'ק'
+        7: 3,  # 'ר'
+        10: 2,  # 'ש'
+        5: 3,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    1: {  # 'י'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 1,  # '\xa0'
+        55: 1,  # '´'
+        48: 1,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 2,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 2,  # 'ִ'
+        37: 2,  # 'ֵ'
+        36: 1,  # 'ֶ'
+        31: 2,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 2,  # 'ֹ'
+        62: 1,  # 'ֻ'
+        28: 2,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 3,  # 'א'
+        8: 3,  # 'ב'
+        20: 3,  # 'ג'
+        16: 3,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 3,  # 'ז'
+        14: 3,  # 'ח'
+        22: 3,  # 'ט'
+        1: 3,  # 'י'
+        25: 3,  # 'ך'
+        15: 3,  # 'כ'
+        4: 3,  # 'ל'
+        11: 3,  # 'ם'
+        6: 3,  # 'מ'
+        23: 3,  # 'ן'
+        12: 3,  # 'נ'
+        19: 3,  # 'ס'
+        13: 3,  # 'ע'
+        26: 3,  # 'ף'
+        18: 3,  # 'פ'
+        27: 3,  # 'ץ'
+        21: 3,  # 'צ'
+        17: 3,  # 'ק'
+        7: 3,  # 'ר'
+        10: 3,  # 'ש'
+        5: 3,  # 'ת'
+        32: 1,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 2,  # '…'
+    },
+    25: {  # 'ך'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 2,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 1,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 1,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 1,  # 'ה'
+        2: 0,  # 'ו'
+        24: 0,  # 'ז'
+        14: 1,  # 'ח'
+        22: 0,  # 'ט'
+        1: 0,  # 'י'
+        25: 0,  # 'ך'
+        15: 0,  # 'כ'
+        4: 1,  # 'ל'
+        11: 0,  # 'ם'
+        6: 1,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 0,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 0,  # 'ר'
+        10: 1,  # 'ש'
+        5: 0,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    15: {  # 'כ'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 2,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 2,  # 'ִ'
+        37: 2,  # 'ֵ'
+        36: 2,  # 'ֶ'
+        31: 2,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 1,  # 'ֹ'
+        62: 1,  # 'ֻ'
+        28: 3,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 3,  # 'א'
+        8: 3,  # 'ב'
+        20: 2,  # 'ג'
+        16: 3,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 3,  # 'ז'
+        14: 3,  # 'ח'
+        22: 2,  # 'ט'
+        1: 3,  # 'י'
+        25: 3,  # 'ך'
+        15: 3,  # 'כ'
+        4: 3,  # 'ל'
+        11: 3,  # 'ם'
+        6: 3,  # 'מ'
+        23: 3,  # 'ן'
+        12: 3,  # 'נ'
+        19: 3,  # 'ס'
+        13: 2,  # 'ע'
+        26: 3,  # 'ף'
+        18: 3,  # 'פ'
+        27: 1,  # 'ץ'
+        21: 2,  # 'צ'
+        17: 2,  # 'ק'
+        7: 3,  # 'ר'
+        10: 3,  # 'ש'
+        5: 3,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    4: {  # 'ל'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 1,  # '\xa0'
+        55: 1,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 3,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 2,  # 'ִ'
+        37: 2,  # 'ֵ'
+        36: 2,  # 'ֶ'
+        31: 2,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 2,  # 'ֹ'
+        62: 1,  # 'ֻ'
+        28: 2,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 3,  # 'א'
+        8: 3,  # 'ב'
+        20: 3,  # 'ג'
+        16: 3,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 3,  # 'ז'
+        14: 3,  # 'ח'
+        22: 3,  # 'ט'
+        1: 3,  # 'י'
+        25: 3,  # 'ך'
+        15: 3,  # 'כ'
+        4: 3,  # 'ל'
+        11: 3,  # 'ם'
+        6: 3,  # 'מ'
+        23: 2,  # 'ן'
+        12: 3,  # 'נ'
+        19: 3,  # 'ס'
+        13: 3,  # 'ע'
+        26: 2,  # 'ף'
+        18: 3,  # 'פ'
+        27: 2,  # 'ץ'
+        21: 3,  # 'צ'
+        17: 3,  # 'ק'
+        7: 3,  # 'ר'
+        10: 3,  # 'ש'
+        5: 3,  # 'ת'
+        32: 1,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    11: {  # 'ם'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 1,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 1,  # 'א'
+        8: 1,  # 'ב'
+        20: 1,  # 'ג'
+        16: 0,  # 'ד'
+        3: 1,  # 'ה'
+        2: 1,  # 'ו'
+        24: 1,  # 'ז'
+        14: 1,  # 'ח'
+        22: 0,  # 'ט'
+        1: 1,  # 'י'
+        25: 0,  # 'ך'
+        15: 1,  # 'כ'
+        4: 1,  # 'ל'
+        11: 1,  # 'ם'
+        6: 1,  # 'מ'
+        23: 0,  # 'ן'
+        12: 1,  # 'נ'
+        19: 0,  # 'ס'
+        13: 1,  # 'ע'
+        26: 0,  # 'ף'
+        18: 1,  # 'פ'
+        27: 1,  # 'ץ'
+        21: 1,  # 'צ'
+        17: 1,  # 'ק'
+        7: 1,  # 'ר'
+        10: 1,  # 'ש'
+        5: 1,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 2,  # '…'
+    },
+    6: {  # 'מ'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 1,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 2,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 2,  # 'ִ'
+        37: 2,  # 'ֵ'
+        36: 2,  # 'ֶ'
+        31: 2,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 2,  # 'ֹ'
+        62: 1,  # 'ֻ'
+        28: 2,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 3,  # 'א'
+        8: 3,  # 'ב'
+        20: 3,  # 'ג'
+        16: 3,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 3,  # 'ז'
+        14: 3,  # 'ח'
+        22: 3,  # 'ט'
+        1: 3,  # 'י'
+        25: 2,  # 'ך'
+        15: 3,  # 'כ'
+        4: 3,  # 'ל'
+        11: 3,  # 'ם'
+        6: 3,  # 'מ'
+        23: 3,  # 'ן'
+        12: 3,  # 'נ'
+        19: 3,  # 'ס'
+        13: 3,  # 'ע'
+        26: 0,  # 'ף'
+        18: 3,  # 'פ'
+        27: 2,  # 'ץ'
+        21: 3,  # 'צ'
+        17: 3,  # 'ק'
+        7: 3,  # 'ר'
+        10: 3,  # 'ש'
+        5: 3,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    23: {  # 'ן'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 1,  # '\xa0'
+        55: 0,  # '´'
+        48: 1,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 1,  # 'א'
+        8: 1,  # 'ב'
+        20: 1,  # 'ג'
+        16: 1,  # 'ד'
+        3: 1,  # 'ה'
+        2: 1,  # 'ו'
+        24: 0,  # 'ז'
+        14: 1,  # 'ח'
+        22: 1,  # 'ט'
+        1: 1,  # 'י'
+        25: 0,  # 'ך'
+        15: 1,  # 'כ'
+        4: 1,  # 'ל'
+        11: 1,  # 'ם'
+        6: 1,  # 'מ'
+        23: 0,  # 'ן'
+        12: 1,  # 'נ'
+        19: 1,  # 'ס'
+        13: 1,  # 'ע'
+        26: 1,  # 'ף'
+        18: 1,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 1,  # 'ק'
+        7: 1,  # 'ר'
+        10: 1,  # 'ש'
+        5: 1,  # 'ת'
+        32: 1,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 2,  # '…'
+    },
+    12: {  # 'נ'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 2,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 2,  # 'ִ'
+        37: 2,  # 'ֵ'
+        36: 2,  # 'ֶ'
+        31: 2,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 1,  # 'ֹ'
+        62: 1,  # 'ֻ'
+        28: 2,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 3,  # 'א'
+        8: 3,  # 'ב'
+        20: 3,  # 'ג'
+        16: 3,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 3,  # 'ז'
+        14: 3,  # 'ח'
+        22: 3,  # 'ט'
+        1: 3,  # 'י'
+        25: 2,  # 'ך'
+        15: 3,  # 'כ'
+        4: 3,  # 'ל'
+        11: 3,  # 'ם'
+        6: 3,  # 'מ'
+        23: 3,  # 'ן'
+        12: 3,  # 'נ'
+        19: 3,  # 'ס'
+        13: 3,  # 'ע'
+        26: 2,  # 'ף'
+        18: 3,  # 'פ'
+        27: 2,  # 'ץ'
+        21: 3,  # 'צ'
+        17: 3,  # 'ק'
+        7: 3,  # 'ר'
+        10: 3,  # 'ש'
+        5: 3,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    19: {  # 'ס'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 1,  # '\xa0'
+        55: 1,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 2,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 2,  # 'ִ'
+        37: 1,  # 'ֵ'
+        36: 2,  # 'ֶ'
+        31: 2,  # 'ַ'
+        29: 1,  # 'ָ'
+        35: 1,  # 'ֹ'
+        62: 2,  # 'ֻ'
+        28: 2,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 2,  # 'א'
+        8: 3,  # 'ב'
+        20: 3,  # 'ג'
+        16: 3,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 1,  # 'ז'
+        14: 3,  # 'ח'
+        22: 3,  # 'ט'
+        1: 3,  # 'י'
+        25: 2,  # 'ך'
+        15: 3,  # 'כ'
+        4: 3,  # 'ל'
+        11: 2,  # 'ם'
+        6: 3,  # 'מ'
+        23: 2,  # 'ן'
+        12: 3,  # 'נ'
+        19: 2,  # 'ס'
+        13: 3,  # 'ע'
+        26: 3,  # 'ף'
+        18: 3,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 2,  # 'צ'
+        17: 3,  # 'ק'
+        7: 3,  # 'ר'
+        10: 1,  # 'ש'
+        5: 3,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    13: {  # 'ע'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 1,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 1,  # 'ְ'
+        59: 1,  # 'ֱ'
+        41: 2,  # 'ֲ'
+        33: 2,  # 'ִ'
+        37: 2,  # 'ֵ'
+        36: 2,  # 'ֶ'
+        31: 2,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 2,  # 'ֹ'
+        62: 1,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 2,  # 'א'
+        8: 3,  # 'ב'
+        20: 3,  # 'ג'
+        16: 3,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 3,  # 'ז'
+        14: 1,  # 'ח'
+        22: 3,  # 'ט'
+        1: 3,  # 'י'
+        25: 2,  # 'ך'
+        15: 2,  # 'כ'
+        4: 3,  # 'ל'
+        11: 3,  # 'ם'
+        6: 3,  # 'מ'
+        23: 2,  # 'ן'
+        12: 3,  # 'נ'
+        19: 3,  # 'ס'
+        13: 2,  # 'ע'
+        26: 1,  # 'ף'
+        18: 2,  # 'פ'
+        27: 2,  # 'ץ'
+        21: 3,  # 'צ'
+        17: 3,  # 'ק'
+        7: 3,  # 'ר'
+        10: 3,  # 'ש'
+        5: 3,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    26: {  # 'ף'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 1,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 0,  # 'ה'
+        2: 1,  # 'ו'
+        24: 0,  # 'ז'
+        14: 1,  # 'ח'
+        22: 0,  # 'ט'
+        1: 0,  # 'י'
+        25: 0,  # 'ך'
+        15: 1,  # 'כ'
+        4: 1,  # 'ל'
+        11: 0,  # 'ם'
+        6: 1,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 1,  # 'ס'
+        13: 0,  # 'ע'
+        26: 1,  # 'ף'
+        18: 1,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 1,  # 'ק'
+        7: 1,  # 'ר'
+        10: 1,  # 'ש'
+        5: 0,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    18: {  # 'פ'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 1,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 2,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 2,  # 'ִ'
+        37: 1,  # 'ֵ'
+        36: 2,  # 'ֶ'
+        31: 1,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 1,  # 'ֹ'
+        62: 1,  # 'ֻ'
+        28: 2,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 3,  # 'א'
+        8: 2,  # 'ב'
+        20: 3,  # 'ג'
+        16: 2,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 2,  # 'ז'
+        14: 3,  # 'ח'
+        22: 3,  # 'ט'
+        1: 3,  # 'י'
+        25: 2,  # 'ך'
+        15: 3,  # 'כ'
+        4: 3,  # 'ל'
+        11: 2,  # 'ם'
+        6: 2,  # 'מ'
+        23: 3,  # 'ן'
+        12: 3,  # 'נ'
+        19: 3,  # 'ס'
+        13: 3,  # 'ע'
+        26: 2,  # 'ף'
+        18: 2,  # 'פ'
+        27: 2,  # 'ץ'
+        21: 3,  # 'צ'
+        17: 3,  # 'ק'
+        7: 3,  # 'ר'
+        10: 3,  # 'ש'
+        5: 3,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    27: {  # 'ץ'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 1,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 1,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 0,  # 'ה'
+        2: 0,  # 'ו'
+        24: 0,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 0,  # 'י'
+        25: 0,  # 'ך'
+        15: 0,  # 'כ'
+        4: 1,  # 'ל'
+        11: 0,  # 'ם'
+        6: 0,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 1,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 1,  # 'ר'
+        10: 0,  # 'ש'
+        5: 1,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    21: {  # 'צ'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 1,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 2,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 2,  # 'ִ'
+        37: 2,  # 'ֵ'
+        36: 1,  # 'ֶ'
+        31: 2,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 1,  # 'ֹ'
+        62: 1,  # 'ֻ'
+        28: 2,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 3,  # 'א'
+        8: 3,  # 'ב'
+        20: 2,  # 'ג'
+        16: 3,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 1,  # 'ז'
+        14: 3,  # 'ח'
+        22: 2,  # 'ט'
+        1: 3,  # 'י'
+        25: 1,  # 'ך'
+        15: 1,  # 'כ'
+        4: 3,  # 'ל'
+        11: 2,  # 'ם'
+        6: 3,  # 'מ'
+        23: 2,  # 'ן'
+        12: 3,  # 'נ'
+        19: 1,  # 'ס'
+        13: 3,  # 'ע'
+        26: 2,  # 'ף'
+        18: 3,  # 'פ'
+        27: 2,  # 'ץ'
+        21: 2,  # 'צ'
+        17: 3,  # 'ק'
+        7: 3,  # 'ר'
+        10: 0,  # 'ש'
+        5: 3,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    17: {  # 'ק'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 1,  # '\xa0'
+        55: 1,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 2,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 2,  # 'ִ'
+        37: 2,  # 'ֵ'
+        36: 1,  # 'ֶ'
+        31: 2,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 2,  # 'ֹ'
+        62: 1,  # 'ֻ'
+        28: 2,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 3,  # 'א'
+        8: 3,  # 'ב'
+        20: 2,  # 'ג'
+        16: 3,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 2,  # 'ז'
+        14: 3,  # 'ח'
+        22: 3,  # 'ט'
+        1: 3,  # 'י'
+        25: 1,  # 'ך'
+        15: 1,  # 'כ'
+        4: 3,  # 'ל'
+        11: 2,  # 'ם'
+        6: 3,  # 'מ'
+        23: 2,  # 'ן'
+        12: 3,  # 'נ'
+        19: 3,  # 'ס'
+        13: 3,  # 'ע'
+        26: 2,  # 'ף'
+        18: 3,  # 'פ'
+        27: 2,  # 'ץ'
+        21: 3,  # 'צ'
+        17: 2,  # 'ק'
+        7: 3,  # 'ר'
+        10: 3,  # 'ש'
+        5: 3,  # 'ת'
+        32: 0,  # '–'
+        52: 1,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    7: {  # 'ר'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 1,  # '\xa0'
+        55: 2,  # '´'
+        48: 1,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 2,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 1,  # 'ֲ'
+        33: 2,  # 'ִ'
+        37: 2,  # 'ֵ'
+        36: 2,  # 'ֶ'
+        31: 2,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 2,  # 'ֹ'
+        62: 1,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 3,  # 'א'
+        8: 3,  # 'ב'
+        20: 3,  # 'ג'
+        16: 3,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 3,  # 'ז'
+        14: 3,  # 'ח'
+        22: 3,  # 'ט'
+        1: 3,  # 'י'
+        25: 3,  # 'ך'
+        15: 3,  # 'כ'
+        4: 3,  # 'ל'
+        11: 3,  # 'ם'
+        6: 3,  # 'מ'
+        23: 3,  # 'ן'
+        12: 3,  # 'נ'
+        19: 3,  # 'ס'
+        13: 3,  # 'ע'
+        26: 2,  # 'ף'
+        18: 3,  # 'פ'
+        27: 3,  # 'ץ'
+        21: 3,  # 'צ'
+        17: 3,  # 'ק'
+        7: 3,  # 'ר'
+        10: 3,  # 'ש'
+        5: 3,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 2,  # '…'
+    },
+    10: {  # 'ש'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 1,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 1,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 1,  # 'ִ'
+        37: 1,  # 'ֵ'
+        36: 1,  # 'ֶ'
+        31: 1,  # 'ַ'
+        29: 1,  # 'ָ'
+        35: 1,  # 'ֹ'
+        62: 1,  # 'ֻ'
+        28: 2,  # 'ּ'
+        38: 3,  # 'ׁ'
+        45: 2,  # 'ׂ'
+        9: 3,  # 'א'
+        8: 3,  # 'ב'
+        20: 3,  # 'ג'
+        16: 3,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 2,  # 'ז'
+        14: 3,  # 'ח'
+        22: 3,  # 'ט'
+        1: 3,  # 'י'
+        25: 3,  # 'ך'
+        15: 3,  # 'כ'
+        4: 3,  # 'ל'
+        11: 3,  # 'ם'
+        6: 3,  # 'מ'
+        23: 2,  # 'ן'
+        12: 3,  # 'נ'
+        19: 2,  # 'ס'
+        13: 3,  # 'ע'
+        26: 2,  # 'ף'
+        18: 3,  # 'פ'
+        27: 1,  # 'ץ'
+        21: 2,  # 'צ'
+        17: 3,  # 'ק'
+        7: 3,  # 'ר'
+        10: 3,  # 'ש'
+        5: 3,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 1,  # '…'
+    },
+    5: {  # 'ת'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 1,  # '\xa0'
+        55: 0,  # '´'
+        48: 1,  # '¼'
+        39: 1,  # '½'
+        57: 0,  # '¾'
+        30: 2,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 2,  # 'ִ'
+        37: 2,  # 'ֵ'
+        36: 2,  # 'ֶ'
+        31: 2,  # 'ַ'
+        29: 2,  # 'ָ'
+        35: 1,  # 'ֹ'
+        62: 1,  # 'ֻ'
+        28: 2,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 3,  # 'א'
+        8: 3,  # 'ב'
+        20: 3,  # 'ג'
+        16: 2,  # 'ד'
+        3: 3,  # 'ה'
+        2: 3,  # 'ו'
+        24: 2,  # 'ז'
+        14: 3,  # 'ח'
+        22: 2,  # 'ט'
+        1: 3,  # 'י'
+        25: 2,  # 'ך'
+        15: 3,  # 'כ'
+        4: 3,  # 'ל'
+        11: 3,  # 'ם'
+        6: 3,  # 'מ'
+        23: 3,  # 'ן'
+        12: 3,  # 'נ'
+        19: 2,  # 'ס'
+        13: 3,  # 'ע'
+        26: 2,  # 'ף'
+        18: 3,  # 'פ'
+        27: 1,  # 'ץ'
+        21: 2,  # 'צ'
+        17: 3,  # 'ק'
+        7: 3,  # 'ר'
+        10: 3,  # 'ש'
+        5: 3,  # 'ת'
+        32: 1,  # '–'
+        52: 1,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 2,  # '…'
+    },
+    32: {  # '–'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 1,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 1,  # 'א'
+        8: 1,  # 'ב'
+        20: 1,  # 'ג'
+        16: 1,  # 'ד'
+        3: 1,  # 'ה'
+        2: 1,  # 'ו'
+        24: 0,  # 'ז'
+        14: 1,  # 'ח'
+        22: 0,  # 'ט'
+        1: 1,  # 'י'
+        25: 0,  # 'ך'
+        15: 1,  # 'כ'
+        4: 1,  # 'ל'
+        11: 0,  # 'ם'
+        6: 1,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 1,  # 'ס'
+        13: 1,  # 'ע'
+        26: 0,  # 'ף'
+        18: 1,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 1,  # 'צ'
+        17: 0,  # 'ק'
+        7: 1,  # 'ר'
+        10: 1,  # 'ש'
+        5: 1,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    52: {  # '’'
+        50: 1,  # 'a'
+        60: 0,  # 'c'
+        61: 1,  # 'd'
+        42: 1,  # 'e'
+        53: 1,  # 'i'
+        56: 1,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 1,  # 'r'
+        43: 2,  # 's'
+        44: 2,  # 't'
+        63: 1,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 0,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 0,  # 'ה'
+        2: 1,  # 'ו'
+        24: 0,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 0,  # 'י'
+        25: 0,  # 'ך'
+        15: 0,  # 'כ'
+        4: 0,  # 'ל'
+        11: 0,  # 'ם'
+        6: 1,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 0,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 0,  # 'ר'
+        10: 0,  # 'ש'
+        5: 1,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    47: {  # '“'
+        50: 1,  # 'a'
+        60: 1,  # 'c'
+        61: 1,  # 'd'
+        42: 1,  # 'e'
+        53: 1,  # 'i'
+        56: 1,  # 'l'
+        54: 1,  # 'n'
+        49: 1,  # 'o'
+        51: 1,  # 'r'
+        43: 1,  # 's'
+        44: 1,  # 't'
+        63: 1,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 2,  # 'א'
+        8: 1,  # 'ב'
+        20: 1,  # 'ג'
+        16: 1,  # 'ד'
+        3: 1,  # 'ה'
+        2: 1,  # 'ו'
+        24: 1,  # 'ז'
+        14: 1,  # 'ח'
+        22: 1,  # 'ט'
+        1: 1,  # 'י'
+        25: 0,  # 'ך'
+        15: 1,  # 'כ'
+        4: 1,  # 'ל'
+        11: 0,  # 'ם'
+        6: 1,  # 'מ'
+        23: 0,  # 'ן'
+        12: 1,  # 'נ'
+        19: 1,  # 'ס'
+        13: 1,  # 'ע'
+        26: 0,  # 'ף'
+        18: 1,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 1,  # 'צ'
+        17: 1,  # 'ק'
+        7: 1,  # 'ר'
+        10: 1,  # 'ש'
+        5: 1,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    46: {  # '”'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 1,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 1,  # 'א'
+        8: 1,  # 'ב'
+        20: 1,  # 'ג'
+        16: 0,  # 'ד'
+        3: 0,  # 'ה'
+        2: 0,  # 'ו'
+        24: 0,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 1,  # 'י'
+        25: 0,  # 'ך'
+        15: 1,  # 'כ'
+        4: 1,  # 'ל'
+        11: 0,  # 'ם'
+        6: 1,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 0,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 1,  # 'צ'
+        17: 0,  # 'ק'
+        7: 1,  # 'ר'
+        10: 0,  # 'ש'
+        5: 0,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 0,  # '†'
+        40: 0,  # '…'
+    },
+    58: {  # '†'
+        50: 0,  # 'a'
+        60: 0,  # 'c'
+        61: 0,  # 'd'
+        42: 0,  # 'e'
+        53: 0,  # 'i'
+        56: 0,  # 'l'
+        54: 0,  # 'n'
+        49: 0,  # 'o'
+        51: 0,  # 'r'
+        43: 0,  # 's'
+        44: 0,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 0,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 0,  # 'ה'
+        2: 0,  # 'ו'
+        24: 0,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 0,  # 'י'
+        25: 0,  # 'ך'
+        15: 0,  # 'כ'
+        4: 0,  # 'ל'
+        11: 0,  # 'ם'
+        6: 0,  # 'מ'
+        23: 0,  # 'ן'
+        12: 0,  # 'נ'
+        19: 0,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 0,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 0,  # 'ר'
+        10: 0,  # 'ש'
+        5: 0,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 0,  # '”'
+        58: 2,  # '†'
+        40: 0,  # '…'
+    },
+    40: {  # '…'
+        50: 1,  # 'a'
+        60: 1,  # 'c'
+        61: 1,  # 'd'
+        42: 1,  # 'e'
+        53: 1,  # 'i'
+        56: 0,  # 'l'
+        54: 1,  # 'n'
+        49: 0,  # 'o'
+        51: 1,  # 'r'
+        43: 1,  # 's'
+        44: 1,  # 't'
+        63: 0,  # 'u'
+        34: 0,  # '\xa0'
+        55: 0,  # '´'
+        48: 0,  # '¼'
+        39: 0,  # '½'
+        57: 0,  # '¾'
+        30: 0,  # 'ְ'
+        59: 0,  # 'ֱ'
+        41: 0,  # 'ֲ'
+        33: 0,  # 'ִ'
+        37: 0,  # 'ֵ'
+        36: 0,  # 'ֶ'
+        31: 0,  # 'ַ'
+        29: 0,  # 'ָ'
+        35: 0,  # 'ֹ'
+        62: 0,  # 'ֻ'
+        28: 0,  # 'ּ'
+        38: 0,  # 'ׁ'
+        45: 0,  # 'ׂ'
+        9: 1,  # 'א'
+        8: 0,  # 'ב'
+        20: 0,  # 'ג'
+        16: 0,  # 'ד'
+        3: 1,  # 'ה'
+        2: 1,  # 'ו'
+        24: 1,  # 'ז'
+        14: 0,  # 'ח'
+        22: 0,  # 'ט'
+        1: 1,  # 'י'
+        25: 0,  # 'ך'
+        15: 1,  # 'כ'
+        4: 1,  # 'ל'
+        11: 0,  # 'ם'
+        6: 1,  # 'מ'
+        23: 0,  # 'ן'
+        12: 1,  # 'נ'
+        19: 0,  # 'ס'
+        13: 0,  # 'ע'
+        26: 0,  # 'ף'
+        18: 1,  # 'פ'
+        27: 0,  # 'ץ'
+        21: 0,  # 'צ'
+        17: 0,  # 'ק'
+        7: 1,  # 'ר'
+        10: 1,  # 'ש'
+        5: 1,  # 'ת'
+        32: 0,  # '–'
+        52: 0,  # '’'
+        47: 0,  # '“'
+        46: 1,  # '”'
+        58: 0,  # '†'
+        40: 2,  # '…'
+    },
+}
+
+# 255: Undefined characters that did not exist in training text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+# 251: Control characters
+
+# Character Mapping Table(s):
+WINDOWS_1255_HEBREW_CHAR_TO_ORDER = {
+    0: 255,  # '\x00'
+    1: 255,  # '\x01'
+    2: 255,  # '\x02'
+    3: 255,  # '\x03'
+    4: 255,  # '\x04'
+    5: 255,  # '\x05'
+    6: 255,  # '\x06'
+    7: 255,  # '\x07'
+    8: 255,  # '\x08'
+    9: 255,  # '\t'
+    10: 254,  # '\n'
+    11: 255,  # '\x0b'
+    12: 255,  # '\x0c'
+    13: 254,  # '\r'
+    14: 255,  # '\x0e'
+    15: 255,  # '\x0f'
+    16: 255,  # '\x10'
+    17: 255,  # '\x11'
+    18: 255,  # '\x12'
+    19: 255,  # '\x13'
+    20: 255,  # '\x14'
+    21: 255,  # '\x15'
+    22: 255,  # '\x16'
+    23: 255,  # '\x17'
+    24: 255,  # '\x18'
+    25: 255,  # '\x19'
+    26: 255,  # '\x1a'
+    27: 255,  # '\x1b'
+    28: 255,  # '\x1c'
+    29: 255,  # '\x1d'
+    30: 255,  # '\x1e'
+    31: 255,  # '\x1f'
+    32: 253,  # ' '
+    33: 253,  # '!'
+    34: 253,  # '"'
+    35: 253,  # '#'
+    36: 253,  # '$'
+    37: 253,  # '%'
+    38: 253,  # '&'
+    39: 253,  # "'"
+    40: 253,  # '('
+    41: 253,  # ')'
+    42: 253,  # '*'
+    43: 253,  # '+'
+    44: 253,  # ','
+    45: 253,  # '-'
+    46: 253,  # '.'
+    47: 253,  # '/'
+    48: 252,  # '0'
+    49: 252,  # '1'
+    50: 252,  # '2'
+    51: 252,  # '3'
+    52: 252,  # '4'
+    53: 252,  # '5'
+    54: 252,  # '6'
+    55: 252,  # '7'
+    56: 252,  # '8'
+    57: 252,  # '9'
+    58: 253,  # ':'
+    59: 253,  # ';'
+    60: 253,  # '<'
+    61: 253,  # '='
+    62: 253,  # '>'
+    63: 253,  # '?'
+    64: 253,  # '@'
+    65: 69,  # 'A'
+    66: 91,  # 'B'
+    67: 79,  # 'C'
+    68: 80,  # 'D'
+    69: 92,  # 'E'
+    70: 89,  # 'F'
+    71: 97,  # 'G'
+    72: 90,  # 'H'
+    73: 68,  # 'I'
+    74: 111,  # 'J'
+    75: 112,  # 'K'
+    76: 82,  # 'L'
+    77: 73,  # 'M'
+    78: 95,  # 'N'
+    79: 85,  # 'O'
+    80: 78,  # 'P'
+    81: 121,  # 'Q'
+    82: 86,  # 'R'
+    83: 71,  # 'S'
+    84: 67,  # 'T'
+    85: 102,  # 'U'
+    86: 107,  # 'V'
+    87: 84,  # 'W'
+    88: 114,  # 'X'
+    89: 103,  # 'Y'
+    90: 115,  # 'Z'
+    91: 253,  # '['
+    92: 253,  # '\\'
+    93: 253,  # ']'
+    94: 253,  # '^'
+    95: 253,  # '_'
+    96: 253,  # '`'
+    97: 50,  # 'a'
+    98: 74,  # 'b'
+    99: 60,  # 'c'
+    100: 61,  # 'd'
+    101: 42,  # 'e'
+    102: 76,  # 'f'
+    103: 70,  # 'g'
+    104: 64,  # 'h'
+    105: 53,  # 'i'
+    106: 105,  # 'j'
+    107: 93,  # 'k'
+    108: 56,  # 'l'
+    109: 65,  # 'm'
+    110: 54,  # 'n'
+    111: 49,  # 'o'
+    112: 66,  # 'p'
+    113: 110,  # 'q'
+    114: 51,  # 'r'
+    115: 43,  # 's'
+    116: 44,  # 't'
+    117: 63,  # 'u'
+    118: 81,  # 'v'
+    119: 77,  # 'w'
+    120: 98,  # 'x'
+    121: 75,  # 'y'
+    122: 108,  # 'z'
+    123: 253,  # '{'
+    124: 253,  # '|'
+    125: 253,  # '}'
+    126: 253,  # '~'
+    127: 253,  # '\x7f'
+    128: 124,  # '€'
+    129: 202,  # None
+    130: 203,  # '‚'
+    131: 204,  # 'ƒ'
+    132: 205,  # '„'
+    133: 40,  # '…'
+    134: 58,  # '†'
+    135: 206,  # '‡'
+    136: 207,  # 'ˆ'
+    137: 208,  # '‰'
+    138: 209,  # None
+    139: 210,  # '‹'
+    140: 211,  # None
+    141: 212,  # None
+    142: 213,  # None
+    143: 214,  # None
+    144: 215,  # None
+    145: 83,  # '‘'
+    146: 52,  # '’'
+    147: 47,  # '“'
+    148: 46,  # '”'
+    149: 72,  # '•'
+    150: 32,  # '–'
+    151: 94,  # '—'
+    152: 216,  # '˜'
+    153: 113,  # '™'
+    154: 217,  # None
+    155: 109,  # '›'
+    156: 218,  # None
+    157: 219,  # None
+    158: 220,  # None
+    159: 221,  # None
+    160: 34,  # '\xa0'
+    161: 116,  # '¡'
+    162: 222,  # '¢'
+    163: 118,  # '£'
+    164: 100,  # '₪'
+    165: 223,  # '¥'
+    166: 224,  # '¦'
+    167: 117,  # '§'
+    168: 119,  # '¨'
+    169: 104,  # '©'
+    170: 125,  # '×'
+    171: 225,  # '«'
+    172: 226,  # '¬'
+    173: 87,  # '\xad'
+    174: 99,  # '®'
+    175: 227,  # '¯'
+    176: 106,  # '°'
+    177: 122,  # '±'
+    178: 123,  # '²'
+    179: 228,  # '³'
+    180: 55,  # '´'
+    181: 229,  # 'µ'
+    182: 230,  # '¶'
+    183: 101,  # '·'
+    184: 231,  # '¸'
+    185: 232,  # '¹'
+    186: 120,  # '÷'
+    187: 233,  # '»'
+    188: 48,  # '¼'
+    189: 39,  # '½'
+    190: 57,  # '¾'
+    191: 234,  # '¿'
+    192: 30,  # 'ְ'
+    193: 59,  # 'ֱ'
+    194: 41,  # 'ֲ'
+    195: 88,  # 'ֳ'
+    196: 33,  # 'ִ'
+    197: 37,  # 'ֵ'
+    198: 36,  # 'ֶ'
+    199: 31,  # 'ַ'
+    200: 29,  # 'ָ'
+    201: 35,  # 'ֹ'
+    202: 235,  # None
+    203: 62,  # 'ֻ'
+    204: 28,  # 'ּ'
+    205: 236,  # 'ֽ'
+    206: 126,  # '־'
+    207: 237,  # 'ֿ'
+    208: 238,  # '׀'
+    209: 38,  # 'ׁ'
+    210: 45,  # 'ׂ'
+    211: 239,  # '׃'
+    212: 240,  # 'װ'
+    213: 241,  # 'ױ'
+    214: 242,  # 'ײ'
+    215: 243,  # '׳'
+    216: 127,  # '״'
+    217: 244,  # None
+    218: 245,  # None
+    219: 246,  # None
+    220: 247,  # None
+    221: 248,  # None
+    222: 249,  # None
+    223: 250,  # None
+    224: 9,  # 'א'
+    225: 8,  # 'ב'
+    226: 20,  # 'ג'
+    227: 16,  # 'ד'
+    228: 3,  # 'ה'
+    229: 2,  # 'ו'
+    230: 24,  # 'ז'
+    231: 14,  # 'ח'
+    232: 22,  # 'ט'
+    233: 1,  # 'י'
+    234: 25,  # 'ך'
+    235: 15,  # 'כ'
+    236: 4,  # 'ל'
+    237: 11,  # 'ם'
+    238: 6,  # 'מ'
+    239: 23,  # 'ן'
+    240: 12,  # 'נ'
+    241: 19,  # 'ס'
+    242: 13,  # 'ע'
+    243: 26,  # 'ף'
+    244: 18,  # 'פ'
+    245: 27,  # 'ץ'
+    246: 21,  # 'צ'
+    247: 17,  # 'ק'
+    248: 7,  # 'ר'
+    249: 10,  # 'ש'
+    250: 5,  # 'ת'
+    251: 251,  # None
+    252: 252,  # None
+    253: 128,  # '\u200e'
+    254: 96,  # '\u200f'
+    255: 253,  # None
+}
+
+WINDOWS_1255_HEBREW_MODEL = SingleByteCharSetModel(
+    charset_name="windows-1255",
+    language="Hebrew",
+    char_to_order_map=WINDOWS_1255_HEBREW_CHAR_TO_ORDER,
+    language_model=HEBREW_LANG_MODEL,
+    typical_positive_ratio=0.984004,
+    keep_ascii_letters=False,
+    alphabet="אבגדהוזחטיךכלםמןנסעףפץצקרשתװױײ",
+)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/langhungarianmodel.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/langhungarianmodel.py
new file mode 100644
index 0000000..09a0d32
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/langhungarianmodel.py
@@ -0,0 +1,4649 @@
+from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel
+
+# 3: Positive
+# 2: Likely
+# 1: Unlikely
+# 0: Negative
+
+HUNGARIAN_LANG_MODEL = {
+    28: {  # 'A'
+        28: 0,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 2,  # 'D'
+        32: 1,  # 'E'
+        50: 1,  # 'F'
+        49: 2,  # 'G'
+        38: 1,  # 'H'
+        39: 2,  # 'I'
+        53: 1,  # 'J'
+        36: 2,  # 'K'
+        41: 2,  # 'L'
+        34: 1,  # 'M'
+        35: 2,  # 'N'
+        47: 1,  # 'O'
+        46: 2,  # 'P'
+        43: 2,  # 'R'
+        33: 2,  # 'S'
+        37: 2,  # 'T'
+        57: 1,  # 'U'
+        48: 1,  # 'V'
+        55: 1,  # 'Y'
+        52: 2,  # 'Z'
+        2: 0,  # 'a'
+        18: 1,  # 'b'
+        26: 1,  # 'c'
+        17: 2,  # 'd'
+        1: 1,  # 'e'
+        27: 1,  # 'f'
+        12: 1,  # 'g'
+        20: 1,  # 'h'
+        9: 1,  # 'i'
+        22: 1,  # 'j'
+        7: 2,  # 'k'
+        6: 2,  # 'l'
+        13: 2,  # 'm'
+        4: 2,  # 'n'
+        8: 0,  # 'o'
+        23: 2,  # 'p'
+        10: 2,  # 'r'
+        5: 1,  # 's'
+        3: 1,  # 't'
+        21: 1,  # 'u'
+        19: 1,  # 'v'
+        62: 1,  # 'x'
+        16: 0,  # 'y'
+        11: 3,  # 'z'
+        51: 1,  # 'Á'
+        44: 0,  # 'É'
+        61: 1,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 0,  # 'á'
+        15: 0,  # 'é'
+        30: 0,  # 'í'
+        25: 0,  # 'ó'
+        24: 0,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    40: {  # 'B'
+        28: 2,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 1,  # 'D'
+        32: 2,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 1,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 1,  # 'L'
+        34: 0,  # 'M'
+        35: 1,  # 'N'
+        47: 2,  # 'O'
+        46: 0,  # 'P'
+        43: 1,  # 'R'
+        33: 1,  # 'S'
+        37: 1,  # 'T'
+        57: 1,  # 'U'
+        48: 1,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 2,  # 'a'
+        18: 0,  # 'b'
+        26: 0,  # 'c'
+        17: 0,  # 'd'
+        1: 3,  # 'e'
+        27: 0,  # 'f'
+        12: 0,  # 'g'
+        20: 0,  # 'h'
+        9: 2,  # 'i'
+        22: 1,  # 'j'
+        7: 0,  # 'k'
+        6: 1,  # 'l'
+        13: 0,  # 'm'
+        4: 0,  # 'n'
+        8: 2,  # 'o'
+        23: 1,  # 'p'
+        10: 2,  # 'r'
+        5: 0,  # 's'
+        3: 0,  # 't'
+        21: 3,  # 'u'
+        19: 0,  # 'v'
+        62: 0,  # 'x'
+        16: 1,  # 'y'
+        11: 0,  # 'z'
+        51: 1,  # 'Á'
+        44: 1,  # 'É'
+        61: 1,  # 'Í'
+        58: 1,  # 'Ó'
+        59: 1,  # 'Ö'
+        60: 1,  # 'Ú'
+        63: 1,  # 'Ü'
+        14: 2,  # 'á'
+        15: 2,  # 'é'
+        30: 1,  # 'í'
+        25: 1,  # 'ó'
+        24: 1,  # 'ö'
+        31: 1,  # 'ú'
+        29: 1,  # 'ü'
+        42: 1,  # 'ő'
+        56: 1,  # 'ű'
+    },
+    54: {  # 'C'
+        28: 1,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 1,  # 'D'
+        32: 1,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 1,  # 'H'
+        39: 2,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 1,  # 'L'
+        34: 1,  # 'M'
+        35: 0,  # 'N'
+        47: 1,  # 'O'
+        46: 1,  # 'P'
+        43: 1,  # 'R'
+        33: 2,  # 'S'
+        37: 1,  # 'T'
+        57: 1,  # 'U'
+        48: 0,  # 'V'
+        55: 1,  # 'Y'
+        52: 1,  # 'Z'
+        2: 2,  # 'a'
+        18: 0,  # 'b'
+        26: 0,  # 'c'
+        17: 0,  # 'd'
+        1: 1,  # 'e'
+        27: 0,  # 'f'
+        12: 0,  # 'g'
+        20: 1,  # 'h'
+        9: 1,  # 'i'
+        22: 0,  # 'j'
+        7: 0,  # 'k'
+        6: 1,  # 'l'
+        13: 0,  # 'm'
+        4: 0,  # 'n'
+        8: 2,  # 'o'
+        23: 0,  # 'p'
+        10: 1,  # 'r'
+        5: 3,  # 's'
+        3: 0,  # 't'
+        21: 1,  # 'u'
+        19: 0,  # 'v'
+        62: 0,  # 'x'
+        16: 1,  # 'y'
+        11: 1,  # 'z'
+        51: 1,  # 'Á'
+        44: 1,  # 'É'
+        61: 1,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 1,  # 'á'
+        15: 1,  # 'é'
+        30: 1,  # 'í'
+        25: 1,  # 'ó'
+        24: 0,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    45: {  # 'D'
+        28: 2,  # 'A'
+        40: 1,  # 'B'
+        54: 0,  # 'C'
+        45: 1,  # 'D'
+        32: 2,  # 'E'
+        50: 1,  # 'F'
+        49: 1,  # 'G'
+        38: 1,  # 'H'
+        39: 2,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 0,  # 'L'
+        34: 1,  # 'M'
+        35: 1,  # 'N'
+        47: 2,  # 'O'
+        46: 0,  # 'P'
+        43: 1,  # 'R'
+        33: 1,  # 'S'
+        37: 1,  # 'T'
+        57: 1,  # 'U'
+        48: 1,  # 'V'
+        55: 1,  # 'Y'
+        52: 1,  # 'Z'
+        2: 2,  # 'a'
+        18: 0,  # 'b'
+        26: 0,  # 'c'
+        17: 0,  # 'd'
+        1: 3,  # 'e'
+        27: 0,  # 'f'
+        12: 0,  # 'g'
+        20: 0,  # 'h'
+        9: 1,  # 'i'
+        22: 0,  # 'j'
+        7: 0,  # 'k'
+        6: 0,  # 'l'
+        13: 0,  # 'm'
+        4: 0,  # 'n'
+        8: 1,  # 'o'
+        23: 0,  # 'p'
+        10: 2,  # 'r'
+        5: 0,  # 's'
+        3: 0,  # 't'
+        21: 2,  # 'u'
+        19: 0,  # 'v'
+        62: 0,  # 'x'
+        16: 1,  # 'y'
+        11: 1,  # 'z'
+        51: 1,  # 'Á'
+        44: 1,  # 'É'
+        61: 1,  # 'Í'
+        58: 1,  # 'Ó'
+        59: 1,  # 'Ö'
+        60: 1,  # 'Ú'
+        63: 1,  # 'Ü'
+        14: 1,  # 'á'
+        15: 1,  # 'é'
+        30: 1,  # 'í'
+        25: 1,  # 'ó'
+        24: 1,  # 'ö'
+        31: 1,  # 'ú'
+        29: 1,  # 'ü'
+        42: 1,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    32: {  # 'E'
+        28: 1,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 1,  # 'D'
+        32: 1,  # 'E'
+        50: 1,  # 'F'
+        49: 2,  # 'G'
+        38: 1,  # 'H'
+        39: 1,  # 'I'
+        53: 1,  # 'J'
+        36: 2,  # 'K'
+        41: 2,  # 'L'
+        34: 2,  # 'M'
+        35: 2,  # 'N'
+        47: 1,  # 'O'
+        46: 1,  # 'P'
+        43: 2,  # 'R'
+        33: 2,  # 'S'
+        37: 2,  # 'T'
+        57: 1,  # 'U'
+        48: 1,  # 'V'
+        55: 1,  # 'Y'
+        52: 1,  # 'Z'
+        2: 1,  # 'a'
+        18: 1,  # 'b'
+        26: 1,  # 'c'
+        17: 2,  # 'd'
+        1: 1,  # 'e'
+        27: 1,  # 'f'
+        12: 3,  # 'g'
+        20: 1,  # 'h'
+        9: 1,  # 'i'
+        22: 1,  # 'j'
+        7: 1,  # 'k'
+        6: 2,  # 'l'
+        13: 2,  # 'm'
+        4: 2,  # 'n'
+        8: 0,  # 'o'
+        23: 1,  # 'p'
+        10: 2,  # 'r'
+        5: 2,  # 's'
+        3: 1,  # 't'
+        21: 2,  # 'u'
+        19: 1,  # 'v'
+        62: 1,  # 'x'
+        16: 0,  # 'y'
+        11: 3,  # 'z'
+        51: 1,  # 'Á'
+        44: 1,  # 'É'
+        61: 0,  # 'Í'
+        58: 1,  # 'Ó'
+        59: 1,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 1,  # 'Ü'
+        14: 0,  # 'á'
+        15: 0,  # 'é'
+        30: 0,  # 'í'
+        25: 0,  # 'ó'
+        24: 1,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    50: {  # 'F'
+        28: 1,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 1,  # 'E'
+        50: 1,  # 'F'
+        49: 0,  # 'G'
+        38: 1,  # 'H'
+        39: 1,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 1,  # 'L'
+        34: 1,  # 'M'
+        35: 1,  # 'N'
+        47: 1,  # 'O'
+        46: 0,  # 'P'
+        43: 1,  # 'R'
+        33: 0,  # 'S'
+        37: 1,  # 'T'
+        57: 1,  # 'U'
+        48: 0,  # 'V'
+        55: 1,  # 'Y'
+        52: 0,  # 'Z'
+        2: 2,  # 'a'
+        18: 0,  # 'b'
+        26: 0,  # 'c'
+        17: 0,  # 'd'
+        1: 2,  # 'e'
+        27: 1,  # 'f'
+        12: 0,  # 'g'
+        20: 0,  # 'h'
+        9: 2,  # 'i'
+        22: 1,  # 'j'
+        7: 0,  # 'k'
+        6: 1,  # 'l'
+        13: 0,  # 'm'
+        4: 0,  # 'n'
+        8: 2,  # 'o'
+        23: 0,  # 'p'
+        10: 2,  # 'r'
+        5: 0,  # 's'
+        3: 0,  # 't'
+        21: 1,  # 'u'
+        19: 0,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 0,  # 'z'
+        51: 1,  # 'Á'
+        44: 1,  # 'É'
+        61: 0,  # 'Í'
+        58: 1,  # 'Ó'
+        59: 1,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 1,  # 'Ü'
+        14: 1,  # 'á'
+        15: 1,  # 'é'
+        30: 0,  # 'í'
+        25: 0,  # 'ó'
+        24: 2,  # 'ö'
+        31: 1,  # 'ú'
+        29: 1,  # 'ü'
+        42: 1,  # 'ő'
+        56: 1,  # 'ű'
+    },
+    49: {  # 'G'
+        28: 2,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 1,  # 'D'
+        32: 2,  # 'E'
+        50: 1,  # 'F'
+        49: 1,  # 'G'
+        38: 1,  # 'H'
+        39: 1,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 1,  # 'L'
+        34: 1,  # 'M'
+        35: 1,  # 'N'
+        47: 1,  # 'O'
+        46: 1,  # 'P'
+        43: 1,  # 'R'
+        33: 1,  # 'S'
+        37: 1,  # 'T'
+        57: 1,  # 'U'
+        48: 1,  # 'V'
+        55: 2,  # 'Y'
+        52: 1,  # 'Z'
+        2: 2,  # 'a'
+        18: 0,  # 'b'
+        26: 0,  # 'c'
+        17: 0,  # 'd'
+        1: 2,  # 'e'
+        27: 0,  # 'f'
+        12: 0,  # 'g'
+        20: 0,  # 'h'
+        9: 1,  # 'i'
+        22: 0,  # 'j'
+        7: 0,  # 'k'
+        6: 1,  # 'l'
+        13: 0,  # 'm'
+        4: 0,  # 'n'
+        8: 2,  # 'o'
+        23: 0,  # 'p'
+        10: 2,  # 'r'
+        5: 0,  # 's'
+        3: 0,  # 't'
+        21: 1,  # 'u'
+        19: 0,  # 'v'
+        62: 0,  # 'x'
+        16: 2,  # 'y'
+        11: 0,  # 'z'
+        51: 1,  # 'Á'
+        44: 1,  # 'É'
+        61: 1,  # 'Í'
+        58: 1,  # 'Ó'
+        59: 1,  # 'Ö'
+        60: 1,  # 'Ú'
+        63: 1,  # 'Ü'
+        14: 1,  # 'á'
+        15: 1,  # 'é'
+        30: 0,  # 'í'
+        25: 1,  # 'ó'
+        24: 1,  # 'ö'
+        31: 1,  # 'ú'
+        29: 1,  # 'ü'
+        42: 1,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    38: {  # 'H'
+        28: 2,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 0,  # 'D'
+        32: 1,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 1,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 1,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 1,  # 'O'
+        46: 0,  # 'P'
+        43: 1,  # 'R'
+        33: 1,  # 'S'
+        37: 1,  # 'T'
+        57: 1,  # 'U'
+        48: 0,  # 'V'
+        55: 1,  # 'Y'
+        52: 0,  # 'Z'
+        2: 3,  # 'a'
+        18: 0,  # 'b'
+        26: 0,  # 'c'
+        17: 0,  # 'd'
+        1: 2,  # 'e'
+        27: 0,  # 'f'
+        12: 0,  # 'g'
+        20: 0,  # 'h'
+        9: 2,  # 'i'
+        22: 1,  # 'j'
+        7: 0,  # 'k'
+        6: 1,  # 'l'
+        13: 1,  # 'm'
+        4: 0,  # 'n'
+        8: 3,  # 'o'
+        23: 0,  # 'p'
+        10: 1,  # 'r'
+        5: 0,  # 's'
+        3: 0,  # 't'
+        21: 2,  # 'u'
+        19: 0,  # 'v'
+        62: 0,  # 'x'
+        16: 1,  # 'y'
+        11: 0,  # 'z'
+        51: 2,  # 'Á'
+        44: 2,  # 'É'
+        61: 1,  # 'Í'
+        58: 1,  # 'Ó'
+        59: 1,  # 'Ö'
+        60: 1,  # 'Ú'
+        63: 1,  # 'Ü'
+        14: 2,  # 'á'
+        15: 1,  # 'é'
+        30: 2,  # 'í'
+        25: 1,  # 'ó'
+        24: 1,  # 'ö'
+        31: 1,  # 'ú'
+        29: 1,  # 'ü'
+        42: 1,  # 'ő'
+        56: 1,  # 'ű'
+    },
+    39: {  # 'I'
+        28: 2,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 1,  # 'D'
+        32: 1,  # 'E'
+        50: 1,  # 'F'
+        49: 1,  # 'G'
+        38: 1,  # 'H'
+        39: 2,  # 'I'
+        53: 1,  # 'J'
+        36: 2,  # 'K'
+        41: 2,  # 'L'
+        34: 1,  # 'M'
+        35: 2,  # 'N'
+        47: 1,  # 'O'
+        46: 1,  # 'P'
+        43: 1,  # 'R'
+        33: 2,  # 'S'
+        37: 1,  # 'T'
+        57: 1,  # 'U'
+        48: 1,  # 'V'
+        55: 0,  # 'Y'
+        52: 2,  # 'Z'
+        2: 0,  # 'a'
+        18: 1,  # 'b'
+        26: 1,  # 'c'
+        17: 2,  # 'd'
+        1: 0,  # 'e'
+        27: 1,  # 'f'
+        12: 2,  # 'g'
+        20: 1,  # 'h'
+        9: 0,  # 'i'
+        22: 1,  # 'j'
+        7: 1,  # 'k'
+        6: 2,  # 'l'
+        13: 2,  # 'm'
+        4: 1,  # 'n'
+        8: 0,  # 'o'
+        23: 1,  # 'p'
+        10: 2,  # 'r'
+        5: 2,  # 's'
+        3: 2,  # 't'
+        21: 0,  # 'u'
+        19: 1,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 1,  # 'z'
+        51: 1,  # 'Á'
+        44: 1,  # 'É'
+        61: 0,  # 'Í'
+        58: 1,  # 'Ó'
+        59: 1,  # 'Ö'
+        60: 1,  # 'Ú'
+        63: 1,  # 'Ü'
+        14: 0,  # 'á'
+        15: 0,  # 'é'
+        30: 0,  # 'í'
+        25: 0,  # 'ó'
+        24: 0,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    53: {  # 'J'
+        28: 2,  # 'A'
+        40: 0,  # 'B'
+        54: 1,  # 'C'
+        45: 1,  # 'D'
+        32: 2,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 1,  # 'H'
+        39: 1,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 1,  # 'L'
+        34: 1,  # 'M'
+        35: 1,  # 'N'
+        47: 1,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 1,  # 'S'
+        37: 1,  # 'T'
+        57: 1,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 1,  # 'Z'
+        2: 2,  # 'a'
+        18: 0,  # 'b'
+        26: 0,  # 'c'
+        17: 0,  # 'd'
+        1: 2,  # 'e'
+        27: 0,  # 'f'
+        12: 0,  # 'g'
+        20: 0,  # 'h'
+        9: 1,  # 'i'
+        22: 0,  # 'j'
+        7: 0,  # 'k'
+        6: 0,  # 'l'
+        13: 0,  # 'm'
+        4: 0,  # 'n'
+        8: 1,  # 'o'
+        23: 0,  # 'p'
+        10: 0,  # 'r'
+        5: 0,  # 's'
+        3: 0,  # 't'
+        21: 2,  # 'u'
+        19: 0,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 0,  # 'z'
+        51: 1,  # 'Á'
+        44: 1,  # 'É'
+        61: 0,  # 'Í'
+        58: 1,  # 'Ó'
+        59: 1,  # 'Ö'
+        60: 1,  # 'Ú'
+        63: 1,  # 'Ü'
+        14: 2,  # 'á'
+        15: 1,  # 'é'
+        30: 0,  # 'í'
+        25: 2,  # 'ó'
+        24: 2,  # 'ö'
+        31: 1,  # 'ú'
+        29: 0,  # 'ü'
+        42: 1,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    36: {  # 'K'
+        28: 2,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 1,  # 'D'
+        32: 2,  # 'E'
+        50: 1,  # 'F'
+        49: 0,  # 'G'
+        38: 1,  # 'H'
+        39: 2,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 1,  # 'L'
+        34: 1,  # 'M'
+        35: 1,  # 'N'
+        47: 2,  # 'O'
+        46: 0,  # 'P'
+        43: 1,  # 'R'
+        33: 1,  # 'S'
+        37: 1,  # 'T'
+        57: 1,  # 'U'
+        48: 1,  # 'V'
+        55: 1,  # 'Y'
+        52: 0,  # 'Z'
+        2: 2,  # 'a'
+        18: 0,  # 'b'
+        26: 0,  # 'c'
+        17: 0,  # 'd'
+        1: 2,  # 'e'
+        27: 1,  # 'f'
+        12: 0,  # 'g'
+        20: 1,  # 'h'
+        9: 3,  # 'i'
+        22: 0,  # 'j'
+        7: 0,  # 'k'
+        6: 1,  # 'l'
+        13: 1,  # 'm'
+        4: 1,  # 'n'
+        8: 2,  # 'o'
+        23: 0,  # 'p'
+        10: 2,  # 'r'
+        5: 0,  # 's'
+        3: 0,  # 't'
+        21: 1,  # 'u'
+        19: 1,  # 'v'
+        62: 0,  # 'x'
+        16: 1,  # 'y'
+        11: 0,  # 'z'
+        51: 1,  # 'Á'
+        44: 1,  # 'É'
+        61: 1,  # 'Í'
+        58: 1,  # 'Ó'
+        59: 2,  # 'Ö'
+        60: 1,  # 'Ú'
+        63: 1,  # 'Ü'
+        14: 2,  # 'á'
+        15: 2,  # 'é'
+        30: 1,  # 'í'
+        25: 1,  # 'ó'
+        24: 2,  # 'ö'
+        31: 1,  # 'ú'
+        29: 2,  # 'ü'
+        42: 1,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    41: {  # 'L'
+        28: 2,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 1,  # 'D'
+        32: 2,  # 'E'
+        50: 1,  # 'F'
+        49: 1,  # 'G'
+        38: 1,  # 'H'
+        39: 2,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 2,  # 'L'
+        34: 1,  # 'M'
+        35: 1,  # 'N'
+        47: 2,  # 'O'
+        46: 0,  # 'P'
+        43: 1,  # 'R'
+        33: 1,  # 'S'
+        37: 2,  # 'T'
+        57: 1,  # 'U'
+        48: 1,  # 'V'
+        55: 1,  # 'Y'
+        52: 1,  # 'Z'
+        2: 2,  # 'a'
+        18: 0,  # 'b'
+        26: 0,  # 'c'
+        17: 0,  # 'd'
+        1: 3,  # 'e'
+        27: 0,  # 'f'
+        12: 0,  # 'g'
+        20: 0,  # 'h'
+        9: 2,  # 'i'
+        22: 1,  # 'j'
+        7: 0,  # 'k'
+        6: 1,  # 'l'
+        13: 0,  # 'm'
+        4: 0,  # 'n'
+        8: 2,  # 'o'
+        23: 0,  # 'p'
+        10: 0,  # 'r'
+        5: 0,  # 's'
+        3: 0,  # 't'
+        21: 2,  # 'u'
+        19: 0,  # 'v'
+        62: 0,  # 'x'
+        16: 1,  # 'y'
+        11: 0,  # 'z'
+        51: 2,  # 'Á'
+        44: 1,  # 'É'
+        61: 1,  # 'Í'
+        58: 1,  # 'Ó'
+        59: 1,  # 'Ö'
+        60: 1,  # 'Ú'
+        63: 1,  # 'Ü'
+        14: 2,  # 'á'
+        15: 1,  # 'é'
+        30: 1,  # 'í'
+        25: 1,  # 'ó'
+        24: 1,  # 'ö'
+        31: 0,  # 'ú'
+        29: 1,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    34: {  # 'M'
+        28: 2,  # 'A'
+        40: 1,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 2,  # 'E'
+        50: 1,  # 'F'
+        49: 0,  # 'G'
+        38: 1,  # 'H'
+        39: 2,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 1,  # 'L'
+        34: 1,  # 'M'
+        35: 1,  # 'N'
+        47: 1,  # 'O'
+        46: 1,  # 'P'
+        43: 1,  # 'R'
+        33: 1,  # 'S'
+        37: 1,  # 'T'
+        57: 1,  # 'U'
+        48: 1,  # 'V'
+        55: 1,  # 'Y'
+        52: 1,  # 'Z'
+        2: 3,  # 'a'
+        18: 0,  # 'b'
+        26: 1,  # 'c'
+        17: 0,  # 'd'
+        1: 3,  # 'e'
+        27: 0,  # 'f'
+        12: 0,  # 'g'
+        20: 0,  # 'h'
+        9: 3,  # 'i'
+        22: 0,  # 'j'
+        7: 0,  # 'k'
+        6: 0,  # 'l'
+        13: 1,  # 'm'
+        4: 1,  # 'n'
+        8: 3,  # 'o'
+        23: 0,  # 'p'
+        10: 1,  # 'r'
+        5: 0,  # 's'
+        3: 0,  # 't'
+        21: 2,  # 'u'
+        19: 0,  # 'v'
+        62: 0,  # 'x'
+        16: 1,  # 'y'
+        11: 0,  # 'z'
+        51: 2,  # 'Á'
+        44: 1,  # 'É'
+        61: 1,  # 'Í'
+        58: 1,  # 'Ó'
+        59: 1,  # 'Ö'
+        60: 1,  # 'Ú'
+        63: 1,  # 'Ü'
+        14: 2,  # 'á'
+        15: 2,  # 'é'
+        30: 1,  # 'í'
+        25: 1,  # 'ó'
+        24: 1,  # 'ö'
+        31: 1,  # 'ú'
+        29: 1,  # 'ü'
+        42: 0,  # 'ő'
+        56: 1,  # 'ű'
+    },
+    35: {  # 'N'
+        28: 2,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 2,  # 'D'
+        32: 2,  # 'E'
+        50: 1,  # 'F'
+        49: 1,  # 'G'
+        38: 1,  # 'H'
+        39: 1,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 1,  # 'L'
+        34: 1,  # 'M'
+        35: 1,  # 'N'
+        47: 1,  # 'O'
+        46: 1,  # 'P'
+        43: 1,  # 'R'
+        33: 1,  # 'S'
+        37: 2,  # 'T'
+        57: 1,  # 'U'
+        48: 1,  # 'V'
+        55: 2,  # 'Y'
+        52: 1,  # 'Z'
+        2: 3,  # 'a'
+        18: 0,  # 'b'
+        26: 0,  # 'c'
+        17: 0,  # 'd'
+        1: 3,  # 'e'
+        27: 0,  # 'f'
+        12: 0,  # 'g'
+        20: 0,  # 'h'
+        9: 2,  # 'i'
+        22: 0,  # 'j'
+        7: 0,  # 'k'
+        6: 0,  # 'l'
+        13: 0,  # 'm'
+        4: 1,  # 'n'
+        8: 2,  # 'o'
+        23: 0,  # 'p'
+        10: 0,  # 'r'
+        5: 0,  # 's'
+        3: 0,  # 't'
+        21: 1,  # 'u'
+        19: 0,  # 'v'
+        62: 0,  # 'x'
+        16: 2,  # 'y'
+        11: 0,  # 'z'
+        51: 1,  # 'Á'
+        44: 1,  # 'É'
+        61: 1,  # 'Í'
+        58: 1,  # 'Ó'
+        59: 1,  # 'Ö'
+        60: 1,  # 'Ú'
+        63: 1,  # 'Ü'
+        14: 1,  # 'á'
+        15: 2,  # 'é'
+        30: 1,  # 'í'
+        25: 1,  # 'ó'
+        24: 1,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 1,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    47: {  # 'O'
+        28: 1,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 1,  # 'D'
+        32: 1,  # 'E'
+        50: 1,  # 'F'
+        49: 1,  # 'G'
+        38: 1,  # 'H'
+        39: 1,  # 'I'
+        53: 1,  # 'J'
+        36: 2,  # 'K'
+        41: 2,  # 'L'
+        34: 2,  # 'M'
+        35: 2,  # 'N'
+        47: 1,  # 'O'
+        46: 1,  # 'P'
+        43: 2,  # 'R'
+        33: 2,  # 'S'
+        37: 2,  # 'T'
+        57: 1,  # 'U'
+        48: 1,  # 'V'
+        55: 1,  # 'Y'
+        52: 1,  # 'Z'
+        2: 0,  # 'a'
+        18: 1,  # 'b'
+        26: 1,  # 'c'
+        17: 1,  # 'd'
+        1: 1,  # 'e'
+        27: 1,  # 'f'
+        12: 1,  # 'g'
+        20: 1,  # 'h'
+        9: 1,  # 'i'
+        22: 1,  # 'j'
+        7: 2,  # 'k'
+        6: 2,  # 'l'
+        13: 1,  # 'm'
+        4: 1,  # 'n'
+        8: 1,  # 'o'
+        23: 1,  # 'p'
+        10: 2,  # 'r'
+        5: 1,  # 's'
+        3: 2,  # 't'
+        21: 1,  # 'u'
+        19: 0,  # 'v'
+        62: 1,  # 'x'
+        16: 0,  # 'y'
+        11: 1,  # 'z'
+        51: 1,  # 'Á'
+        44: 1,  # 'É'
+        61: 0,  # 'Í'
+        58: 1,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 0,  # 'á'
+        15: 0,  # 'é'
+        30: 0,  # 'í'
+        25: 0,  # 'ó'
+        24: 0,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    46: {  # 'P'
+        28: 1,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 1,  # 'D'
+        32: 1,  # 'E'
+        50: 1,  # 'F'
+        49: 1,  # 'G'
+        38: 1,  # 'H'
+        39: 1,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 1,  # 'L'
+        34: 0,  # 'M'
+        35: 1,  # 'N'
+        47: 1,  # 'O'
+        46: 1,  # 'P'
+        43: 2,  # 'R'
+        33: 1,  # 'S'
+        37: 1,  # 'T'
+        57: 1,  # 'U'
+        48: 1,  # 'V'
+        55: 0,  # 'Y'
+        52: 1,  # 'Z'
+        2: 2,  # 'a'
+        18: 0,  # 'b'
+        26: 0,  # 'c'
+        17: 0,  # 'd'
+        1: 2,  # 'e'
+        27: 1,  # 'f'
+        12: 0,  # 'g'
+        20: 1,  # 'h'
+        9: 2,  # 'i'
+        22: 0,  # 'j'
+        7: 0,  # 'k'
+        6: 1,  # 'l'
+        13: 0,  # 'm'
+        4: 1,  # 'n'
+        8: 2,  # 'o'
+        23: 0,  # 'p'
+        10: 2,  # 'r'
+        5: 1,  # 's'
+        3: 0,  # 't'
+        21: 1,  # 'u'
+        19: 0,  # 'v'
+        62: 0,  # 'x'
+        16: 1,  # 'y'
+        11: 0,  # 'z'
+        51: 2,  # 'Á'
+        44: 1,  # 'É'
+        61: 1,  # 'Í'
+        58: 1,  # 'Ó'
+        59: 1,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 1,  # 'Ü'
+        14: 3,  # 'á'
+        15: 2,  # 'é'
+        30: 0,  # 'í'
+        25: 1,  # 'ó'
+        24: 1,  # 'ö'
+        31: 0,  # 'ú'
+        29: 1,  # 'ü'
+        42: 1,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    43: {  # 'R'
+        28: 2,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 1,  # 'D'
+        32: 2,  # 'E'
+        50: 1,  # 'F'
+        49: 1,  # 'G'
+        38: 1,  # 'H'
+        39: 2,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 1,  # 'L'
+        34: 1,  # 'M'
+        35: 1,  # 'N'
+        47: 2,  # 'O'
+        46: 1,  # 'P'
+        43: 1,  # 'R'
+        33: 2,  # 'S'
+        37: 2,  # 'T'
+        57: 1,  # 'U'
+        48: 1,  # 'V'
+        55: 1,  # 'Y'
+        52: 1,  # 'Z'
+        2: 2,  # 'a'
+        18: 0,  # 'b'
+        26: 0,  # 'c'
+        17: 0,  # 'd'
+        1: 2,  # 'e'
+        27: 0,  # 'f'
+        12: 0,  # 'g'
+        20: 1,  # 'h'
+        9: 2,  # 'i'
+        22: 0,  # 'j'
+        7: 0,  # 'k'
+        6: 0,  # 'l'
+        13: 0,  # 'm'
+        4: 0,  # 'n'
+        8: 2,  # 'o'
+        23: 0,  # 'p'
+        10: 0,  # 'r'
+        5: 0,  # 's'
+        3: 0,  # 't'
+        21: 1,  # 'u'
+        19: 0,  # 'v'
+        62: 0,  # 'x'
+        16: 1,  # 'y'
+        11: 0,  # 'z'
+        51: 2,  # 'Á'
+        44: 1,  # 'É'
+        61: 1,  # 'Í'
+        58: 2,  # 'Ó'
+        59: 1,  # 'Ö'
+        60: 1,  # 'Ú'
+        63: 1,  # 'Ü'
+        14: 2,  # 'á'
+        15: 2,  # 'é'
+        30: 1,  # 'í'
+        25: 2,  # 'ó'
+        24: 1,  # 'ö'
+        31: 1,  # 'ú'
+        29: 1,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    33: {  # 'S'
+        28: 2,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 1,  # 'D'
+        32: 2,  # 'E'
+        50: 1,  # 'F'
+        49: 1,  # 'G'
+        38: 1,  # 'H'
+        39: 2,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 1,  # 'L'
+        34: 1,  # 'M'
+        35: 1,  # 'N'
+        47: 2,  # 'O'
+        46: 1,  # 'P'
+        43: 1,  # 'R'
+        33: 2,  # 'S'
+        37: 2,  # 'T'
+        57: 1,  # 'U'
+        48: 1,  # 'V'
+        55: 1,  # 'Y'
+        52: 3,  # 'Z'
+        2: 2,  # 'a'
+        18: 0,  # 'b'
+        26: 1,  # 'c'
+        17: 0,  # 'd'
+        1: 2,  # 'e'
+        27: 0,  # 'f'
+        12: 0,  # 'g'
+        20: 1,  # 'h'
+        9: 2,  # 'i'
+        22: 0,  # 'j'
+        7: 1,  # 'k'
+        6: 1,  # 'l'
+        13: 1,  # 'm'
+        4: 0,  # 'n'
+        8: 2,  # 'o'
+        23: 1,  # 'p'
+        10: 0,  # 'r'
+        5: 0,  # 's'
+        3: 1,  # 't'
+        21: 1,  # 'u'
+        19: 1,  # 'v'
+        62: 0,  # 'x'
+        16: 1,  # 'y'
+        11: 3,  # 'z'
+        51: 2,  # 'Á'
+        44: 1,  # 'É'
+        61: 1,  # 'Í'
+        58: 1,  # 'Ó'
+        59: 1,  # 'Ö'
+        60: 1,  # 'Ú'
+        63: 1,  # 'Ü'
+        14: 2,  # 'á'
+        15: 1,  # 'é'
+        30: 1,  # 'í'
+        25: 1,  # 'ó'
+        24: 1,  # 'ö'
+        31: 1,  # 'ú'
+        29: 1,  # 'ü'
+        42: 1,  # 'ő'
+        56: 1,  # 'ű'
+    },
+    37: {  # 'T'
+        28: 2,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 1,  # 'D'
+        32: 2,  # 'E'
+        50: 1,  # 'F'
+        49: 1,  # 'G'
+        38: 1,  # 'H'
+        39: 2,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 1,  # 'L'
+        34: 1,  # 'M'
+        35: 1,  # 'N'
+        47: 2,  # 'O'
+        46: 1,  # 'P'
+        43: 2,  # 'R'
+        33: 1,  # 'S'
+        37: 2,  # 'T'
+        57: 1,  # 'U'
+        48: 1,  # 'V'
+        55: 1,  # 'Y'
+        52: 1,  # 'Z'
+        2: 2,  # 'a'
+        18: 0,  # 'b'
+        26: 0,  # 'c'
+        17: 0,  # 'd'
+        1: 2,  # 'e'
+        27: 0,  # 'f'
+        12: 0,  # 'g'
+        20: 1,  # 'h'
+        9: 2,  # 'i'
+        22: 0,  # 'j'
+        7: 0,  # 'k'
+        6: 0,  # 'l'
+        13: 0,  # 'm'
+        4: 0,  # 'n'
+        8: 2,  # 'o'
+        23: 0,  # 'p'
+        10: 1,  # 'r'
+        5: 1,  # 's'
+        3: 0,  # 't'
+        21: 2,  # 'u'
+        19: 0,  # 'v'
+        62: 0,  # 'x'
+        16: 1,  # 'y'
+        11: 1,  # 'z'
+        51: 2,  # 'Á'
+        44: 2,  # 'É'
+        61: 1,  # 'Í'
+        58: 1,  # 'Ó'
+        59: 1,  # 'Ö'
+        60: 1,  # 'Ú'
+        63: 1,  # 'Ü'
+        14: 2,  # 'á'
+        15: 1,  # 'é'
+        30: 1,  # 'í'
+        25: 1,  # 'ó'
+        24: 2,  # 'ö'
+        31: 1,  # 'ú'
+        29: 1,  # 'ü'
+        42: 1,  # 'ő'
+        56: 1,  # 'ű'
+    },
+    57: {  # 'U'
+        28: 1,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 1,  # 'D'
+        32: 1,  # 'E'
+        50: 1,  # 'F'
+        49: 1,  # 'G'
+        38: 1,  # 'H'
+        39: 1,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 1,  # 'L'
+        34: 1,  # 'M'
+        35: 1,  # 'N'
+        47: 1,  # 'O'
+        46: 1,  # 'P'
+        43: 1,  # 'R'
+        33: 2,  # 'S'
+        37: 1,  # 'T'
+        57: 0,  # 'U'
+        48: 1,  # 'V'
+        55: 0,  # 'Y'
+        52: 1,  # 'Z'
+        2: 0,  # 'a'
+        18: 1,  # 'b'
+        26: 1,  # 'c'
+        17: 1,  # 'd'
+        1: 1,  # 'e'
+        27: 0,  # 'f'
+        12: 2,  # 'g'
+        20: 0,  # 'h'
+        9: 0,  # 'i'
+        22: 1,  # 'j'
+        7: 1,  # 'k'
+        6: 1,  # 'l'
+        13: 1,  # 'm'
+        4: 1,  # 'n'
+        8: 0,  # 'o'
+        23: 1,  # 'p'
+        10: 1,  # 'r'
+        5: 1,  # 's'
+        3: 1,  # 't'
+        21: 0,  # 'u'
+        19: 0,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 1,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 1,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 0,  # 'á'
+        15: 0,  # 'é'
+        30: 0,  # 'í'
+        25: 0,  # 'ó'
+        24: 0,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    48: {  # 'V'
+        28: 2,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 1,  # 'D'
+        32: 2,  # 'E'
+        50: 1,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 2,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 0,  # 'L'
+        34: 1,  # 'M'
+        35: 1,  # 'N'
+        47: 1,  # 'O'
+        46: 1,  # 'P'
+        43: 1,  # 'R'
+        33: 1,  # 'S'
+        37: 1,  # 'T'
+        57: 1,  # 'U'
+        48: 1,  # 'V'
+        55: 1,  # 'Y'
+        52: 0,  # 'Z'
+        2: 3,  # 'a'
+        18: 0,  # 'b'
+        26: 0,  # 'c'
+        17: 0,  # 'd'
+        1: 2,  # 'e'
+        27: 0,  # 'f'
+        12: 0,  # 'g'
+        20: 0,  # 'h'
+        9: 2,  # 'i'
+        22: 0,  # 'j'
+        7: 0,  # 'k'
+        6: 1,  # 'l'
+        13: 0,  # 'm'
+        4: 0,  # 'n'
+        8: 2,  # 'o'
+        23: 0,  # 'p'
+        10: 0,  # 'r'
+        5: 0,  # 's'
+        3: 0,  # 't'
+        21: 1,  # 'u'
+        19: 0,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 0,  # 'z'
+        51: 2,  # 'Á'
+        44: 2,  # 'É'
+        61: 1,  # 'Í'
+        58: 1,  # 'Ó'
+        59: 1,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 1,  # 'Ü'
+        14: 2,  # 'á'
+        15: 2,  # 'é'
+        30: 1,  # 'í'
+        25: 0,  # 'ó'
+        24: 1,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    55: {  # 'Y'
+        28: 2,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 1,  # 'D'
+        32: 2,  # 'E'
+        50: 1,  # 'F'
+        49: 1,  # 'G'
+        38: 1,  # 'H'
+        39: 1,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 1,  # 'L'
+        34: 1,  # 'M'
+        35: 1,  # 'N'
+        47: 1,  # 'O'
+        46: 1,  # 'P'
+        43: 1,  # 'R'
+        33: 1,  # 'S'
+        37: 1,  # 'T'
+        57: 1,  # 'U'
+        48: 1,  # 'V'
+        55: 0,  # 'Y'
+        52: 2,  # 'Z'
+        2: 1,  # 'a'
+        18: 0,  # 'b'
+        26: 0,  # 'c'
+        17: 1,  # 'd'
+        1: 1,  # 'e'
+        27: 0,  # 'f'
+        12: 0,  # 'g'
+        20: 0,  # 'h'
+        9: 0,  # 'i'
+        22: 0,  # 'j'
+        7: 0,  # 'k'
+        6: 0,  # 'l'
+        13: 0,  # 'm'
+        4: 0,  # 'n'
+        8: 1,  # 'o'
+        23: 1,  # 'p'
+        10: 0,  # 'r'
+        5: 0,  # 's'
+        3: 0,  # 't'
+        21: 0,  # 'u'
+        19: 1,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 0,  # 'z'
+        51: 1,  # 'Á'
+        44: 1,  # 'É'
+        61: 1,  # 'Í'
+        58: 1,  # 'Ó'
+        59: 1,  # 'Ö'
+        60: 1,  # 'Ú'
+        63: 1,  # 'Ü'
+        14: 0,  # 'á'
+        15: 0,  # 'é'
+        30: 0,  # 'í'
+        25: 0,  # 'ó'
+        24: 0,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    52: {  # 'Z'
+        28: 2,  # 'A'
+        40: 1,  # 'B'
+        54: 0,  # 'C'
+        45: 1,  # 'D'
+        32: 2,  # 'E'
+        50: 1,  # 'F'
+        49: 1,  # 'G'
+        38: 1,  # 'H'
+        39: 2,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 1,  # 'L'
+        34: 1,  # 'M'
+        35: 1,  # 'N'
+        47: 2,  # 'O'
+        46: 1,  # 'P'
+        43: 1,  # 'R'
+        33: 2,  # 'S'
+        37: 1,  # 'T'
+        57: 1,  # 'U'
+        48: 1,  # 'V'
+        55: 1,  # 'Y'
+        52: 1,  # 'Z'
+        2: 1,  # 'a'
+        18: 0,  # 'b'
+        26: 0,  # 'c'
+        17: 0,  # 'd'
+        1: 1,  # 'e'
+        27: 0,  # 'f'
+        12: 0,  # 'g'
+        20: 0,  # 'h'
+        9: 1,  # 'i'
+        22: 0,  # 'j'
+        7: 0,  # 'k'
+        6: 0,  # 'l'
+        13: 0,  # 'm'
+        4: 1,  # 'n'
+        8: 1,  # 'o'
+        23: 0,  # 'p'
+        10: 1,  # 'r'
+        5: 2,  # 's'
+        3: 0,  # 't'
+        21: 1,  # 'u'
+        19: 0,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 0,  # 'z'
+        51: 2,  # 'Á'
+        44: 1,  # 'É'
+        61: 1,  # 'Í'
+        58: 1,  # 'Ó'
+        59: 1,  # 'Ö'
+        60: 1,  # 'Ú'
+        63: 1,  # 'Ü'
+        14: 1,  # 'á'
+        15: 1,  # 'é'
+        30: 0,  # 'í'
+        25: 0,  # 'ó'
+        24: 1,  # 'ö'
+        31: 1,  # 'ú'
+        29: 1,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    2: {  # 'a'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 1,  # 'a'
+        18: 3,  # 'b'
+        26: 3,  # 'c'
+        17: 3,  # 'd'
+        1: 2,  # 'e'
+        27: 2,  # 'f'
+        12: 3,  # 'g'
+        20: 3,  # 'h'
+        9: 3,  # 'i'
+        22: 3,  # 'j'
+        7: 3,  # 'k'
+        6: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        8: 2,  # 'o'
+        23: 3,  # 'p'
+        10: 3,  # 'r'
+        5: 3,  # 's'
+        3: 3,  # 't'
+        21: 3,  # 'u'
+        19: 3,  # 'v'
+        62: 1,  # 'x'
+        16: 2,  # 'y'
+        11: 3,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 1,  # 'á'
+        15: 1,  # 'é'
+        30: 1,  # 'í'
+        25: 1,  # 'ó'
+        24: 1,  # 'ö'
+        31: 1,  # 'ú'
+        29: 1,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    18: {  # 'b'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 3,  # 'a'
+        18: 3,  # 'b'
+        26: 1,  # 'c'
+        17: 1,  # 'd'
+        1: 3,  # 'e'
+        27: 1,  # 'f'
+        12: 1,  # 'g'
+        20: 1,  # 'h'
+        9: 3,  # 'i'
+        22: 2,  # 'j'
+        7: 2,  # 'k'
+        6: 2,  # 'l'
+        13: 1,  # 'm'
+        4: 2,  # 'n'
+        8: 3,  # 'o'
+        23: 1,  # 'p'
+        10: 3,  # 'r'
+        5: 2,  # 's'
+        3: 1,  # 't'
+        21: 3,  # 'u'
+        19: 1,  # 'v'
+        62: 0,  # 'x'
+        16: 1,  # 'y'
+        11: 1,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 3,  # 'á'
+        15: 3,  # 'é'
+        30: 2,  # 'í'
+        25: 3,  # 'ó'
+        24: 2,  # 'ö'
+        31: 2,  # 'ú'
+        29: 2,  # 'ü'
+        42: 2,  # 'ő'
+        56: 1,  # 'ű'
+    },
+    26: {  # 'c'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 1,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 1,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 2,  # 'a'
+        18: 1,  # 'b'
+        26: 2,  # 'c'
+        17: 1,  # 'd'
+        1: 3,  # 'e'
+        27: 1,  # 'f'
+        12: 1,  # 'g'
+        20: 3,  # 'h'
+        9: 3,  # 'i'
+        22: 1,  # 'j'
+        7: 2,  # 'k'
+        6: 1,  # 'l'
+        13: 1,  # 'm'
+        4: 1,  # 'n'
+        8: 3,  # 'o'
+        23: 1,  # 'p'
+        10: 2,  # 'r'
+        5: 3,  # 's'
+        3: 2,  # 't'
+        21: 2,  # 'u'
+        19: 1,  # 'v'
+        62: 0,  # 'x'
+        16: 1,  # 'y'
+        11: 2,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 2,  # 'á'
+        15: 2,  # 'é'
+        30: 2,  # 'í'
+        25: 1,  # 'ó'
+        24: 1,  # 'ö'
+        31: 1,  # 'ú'
+        29: 1,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    17: {  # 'd'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 3,  # 'a'
+        18: 2,  # 'b'
+        26: 1,  # 'c'
+        17: 2,  # 'd'
+        1: 3,  # 'e'
+        27: 1,  # 'f'
+        12: 1,  # 'g'
+        20: 2,  # 'h'
+        9: 3,  # 'i'
+        22: 3,  # 'j'
+        7: 2,  # 'k'
+        6: 1,  # 'l'
+        13: 2,  # 'm'
+        4: 3,  # 'n'
+        8: 3,  # 'o'
+        23: 1,  # 'p'
+        10: 3,  # 'r'
+        5: 3,  # 's'
+        3: 3,  # 't'
+        21: 3,  # 'u'
+        19: 3,  # 'v'
+        62: 0,  # 'x'
+        16: 2,  # 'y'
+        11: 2,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 3,  # 'á'
+        15: 3,  # 'é'
+        30: 3,  # 'í'
+        25: 3,  # 'ó'
+        24: 3,  # 'ö'
+        31: 2,  # 'ú'
+        29: 2,  # 'ü'
+        42: 2,  # 'ő'
+        56: 1,  # 'ű'
+    },
+    1: {  # 'e'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 2,  # 'a'
+        18: 3,  # 'b'
+        26: 3,  # 'c'
+        17: 3,  # 'd'
+        1: 2,  # 'e'
+        27: 3,  # 'f'
+        12: 3,  # 'g'
+        20: 3,  # 'h'
+        9: 3,  # 'i'
+        22: 3,  # 'j'
+        7: 3,  # 'k'
+        6: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        8: 2,  # 'o'
+        23: 3,  # 'p'
+        10: 3,  # 'r'
+        5: 3,  # 's'
+        3: 3,  # 't'
+        21: 2,  # 'u'
+        19: 3,  # 'v'
+        62: 2,  # 'x'
+        16: 2,  # 'y'
+        11: 3,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 3,  # 'á'
+        15: 1,  # 'é'
+        30: 1,  # 'í'
+        25: 1,  # 'ó'
+        24: 1,  # 'ö'
+        31: 1,  # 'ú'
+        29: 1,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    27: {  # 'f'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 3,  # 'a'
+        18: 1,  # 'b'
+        26: 1,  # 'c'
+        17: 1,  # 'd'
+        1: 3,  # 'e'
+        27: 2,  # 'f'
+        12: 1,  # 'g'
+        20: 1,  # 'h'
+        9: 3,  # 'i'
+        22: 2,  # 'j'
+        7: 1,  # 'k'
+        6: 1,  # 'l'
+        13: 1,  # 'm'
+        4: 1,  # 'n'
+        8: 3,  # 'o'
+        23: 0,  # 'p'
+        10: 3,  # 'r'
+        5: 1,  # 's'
+        3: 1,  # 't'
+        21: 2,  # 'u'
+        19: 1,  # 'v'
+        62: 0,  # 'x'
+        16: 1,  # 'y'
+        11: 0,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 3,  # 'á'
+        15: 3,  # 'é'
+        30: 1,  # 'í'
+        25: 1,  # 'ó'
+        24: 3,  # 'ö'
+        31: 1,  # 'ú'
+        29: 2,  # 'ü'
+        42: 1,  # 'ő'
+        56: 1,  # 'ű'
+    },
+    12: {  # 'g'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 3,  # 'a'
+        18: 3,  # 'b'
+        26: 2,  # 'c'
+        17: 2,  # 'd'
+        1: 3,  # 'e'
+        27: 2,  # 'f'
+        12: 3,  # 'g'
+        20: 3,  # 'h'
+        9: 3,  # 'i'
+        22: 3,  # 'j'
+        7: 2,  # 'k'
+        6: 3,  # 'l'
+        13: 2,  # 'm'
+        4: 3,  # 'n'
+        8: 3,  # 'o'
+        23: 1,  # 'p'
+        10: 3,  # 'r'
+        5: 3,  # 's'
+        3: 3,  # 't'
+        21: 3,  # 'u'
+        19: 3,  # 'v'
+        62: 0,  # 'x'
+        16: 3,  # 'y'
+        11: 2,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 3,  # 'á'
+        15: 3,  # 'é'
+        30: 2,  # 'í'
+        25: 3,  # 'ó'
+        24: 2,  # 'ö'
+        31: 2,  # 'ú'
+        29: 2,  # 'ü'
+        42: 2,  # 'ő'
+        56: 1,  # 'ű'
+    },
+    20: {  # 'h'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 3,  # 'a'
+        18: 1,  # 'b'
+        26: 1,  # 'c'
+        17: 0,  # 'd'
+        1: 3,  # 'e'
+        27: 0,  # 'f'
+        12: 1,  # 'g'
+        20: 2,  # 'h'
+        9: 3,  # 'i'
+        22: 1,  # 'j'
+        7: 1,  # 'k'
+        6: 1,  # 'l'
+        13: 1,  # 'm'
+        4: 1,  # 'n'
+        8: 3,  # 'o'
+        23: 0,  # 'p'
+        10: 1,  # 'r'
+        5: 2,  # 's'
+        3: 1,  # 't'
+        21: 3,  # 'u'
+        19: 1,  # 'v'
+        62: 0,  # 'x'
+        16: 2,  # 'y'
+        11: 0,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 3,  # 'á'
+        15: 3,  # 'é'
+        30: 3,  # 'í'
+        25: 2,  # 'ó'
+        24: 2,  # 'ö'
+        31: 2,  # 'ú'
+        29: 1,  # 'ü'
+        42: 1,  # 'ő'
+        56: 1,  # 'ű'
+    },
+    9: {  # 'i'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 3,  # 'a'
+        18: 3,  # 'b'
+        26: 3,  # 'c'
+        17: 3,  # 'd'
+        1: 3,  # 'e'
+        27: 3,  # 'f'
+        12: 3,  # 'g'
+        20: 3,  # 'h'
+        9: 2,  # 'i'
+        22: 2,  # 'j'
+        7: 3,  # 'k'
+        6: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        8: 2,  # 'o'
+        23: 2,  # 'p'
+        10: 3,  # 'r'
+        5: 3,  # 's'
+        3: 3,  # 't'
+        21: 3,  # 'u'
+        19: 3,  # 'v'
+        62: 1,  # 'x'
+        16: 1,  # 'y'
+        11: 3,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 3,  # 'á'
+        15: 2,  # 'é'
+        30: 1,  # 'í'
+        25: 3,  # 'ó'
+        24: 1,  # 'ö'
+        31: 2,  # 'ú'
+        29: 1,  # 'ü'
+        42: 0,  # 'ő'
+        56: 1,  # 'ű'
+    },
+    22: {  # 'j'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 3,  # 'a'
+        18: 2,  # 'b'
+        26: 1,  # 'c'
+        17: 3,  # 'd'
+        1: 3,  # 'e'
+        27: 1,  # 'f'
+        12: 1,  # 'g'
+        20: 2,  # 'h'
+        9: 1,  # 'i'
+        22: 2,  # 'j'
+        7: 2,  # 'k'
+        6: 2,  # 'l'
+        13: 1,  # 'm'
+        4: 2,  # 'n'
+        8: 3,  # 'o'
+        23: 1,  # 'p'
+        10: 2,  # 'r'
+        5: 2,  # 's'
+        3: 3,  # 't'
+        21: 3,  # 'u'
+        19: 1,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 2,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 3,  # 'á'
+        15: 3,  # 'é'
+        30: 1,  # 'í'
+        25: 3,  # 'ó'
+        24: 3,  # 'ö'
+        31: 3,  # 'ú'
+        29: 2,  # 'ü'
+        42: 1,  # 'ő'
+        56: 1,  # 'ű'
+    },
+    7: {  # 'k'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 3,  # 'a'
+        18: 3,  # 'b'
+        26: 2,  # 'c'
+        17: 1,  # 'd'
+        1: 3,  # 'e'
+        27: 1,  # 'f'
+        12: 1,  # 'g'
+        20: 2,  # 'h'
+        9: 3,  # 'i'
+        22: 2,  # 'j'
+        7: 3,  # 'k'
+        6: 3,  # 'l'
+        13: 1,  # 'm'
+        4: 3,  # 'n'
+        8: 3,  # 'o'
+        23: 1,  # 'p'
+        10: 3,  # 'r'
+        5: 3,  # 's'
+        3: 3,  # 't'
+        21: 3,  # 'u'
+        19: 2,  # 'v'
+        62: 0,  # 'x'
+        16: 2,  # 'y'
+        11: 1,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 3,  # 'á'
+        15: 3,  # 'é'
+        30: 3,  # 'í'
+        25: 2,  # 'ó'
+        24: 3,  # 'ö'
+        31: 1,  # 'ú'
+        29: 3,  # 'ü'
+        42: 1,  # 'ő'
+        56: 1,  # 'ű'
+    },
+    6: {  # 'l'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 1,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 1,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 3,  # 'a'
+        18: 2,  # 'b'
+        26: 3,  # 'c'
+        17: 3,  # 'd'
+        1: 3,  # 'e'
+        27: 3,  # 'f'
+        12: 3,  # 'g'
+        20: 3,  # 'h'
+        9: 3,  # 'i'
+        22: 3,  # 'j'
+        7: 3,  # 'k'
+        6: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        8: 3,  # 'o'
+        23: 2,  # 'p'
+        10: 2,  # 'r'
+        5: 3,  # 's'
+        3: 3,  # 't'
+        21: 3,  # 'u'
+        19: 3,  # 'v'
+        62: 0,  # 'x'
+        16: 3,  # 'y'
+        11: 2,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 3,  # 'á'
+        15: 3,  # 'é'
+        30: 3,  # 'í'
+        25: 3,  # 'ó'
+        24: 3,  # 'ö'
+        31: 2,  # 'ú'
+        29: 2,  # 'ü'
+        42: 3,  # 'ő'
+        56: 1,  # 'ű'
+    },
+    13: {  # 'm'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 3,  # 'a'
+        18: 3,  # 'b'
+        26: 2,  # 'c'
+        17: 1,  # 'd'
+        1: 3,  # 'e'
+        27: 1,  # 'f'
+        12: 1,  # 'g'
+        20: 2,  # 'h'
+        9: 3,  # 'i'
+        22: 2,  # 'j'
+        7: 1,  # 'k'
+        6: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 2,  # 'n'
+        8: 3,  # 'o'
+        23: 3,  # 'p'
+        10: 2,  # 'r'
+        5: 2,  # 's'
+        3: 2,  # 't'
+        21: 3,  # 'u'
+        19: 1,  # 'v'
+        62: 0,  # 'x'
+        16: 1,  # 'y'
+        11: 2,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 3,  # 'á'
+        15: 3,  # 'é'
+        30: 2,  # 'í'
+        25: 2,  # 'ó'
+        24: 2,  # 'ö'
+        31: 2,  # 'ú'
+        29: 2,  # 'ü'
+        42: 1,  # 'ő'
+        56: 2,  # 'ű'
+    },
+    4: {  # 'n'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 3,  # 'a'
+        18: 3,  # 'b'
+        26: 3,  # 'c'
+        17: 3,  # 'd'
+        1: 3,  # 'e'
+        27: 2,  # 'f'
+        12: 3,  # 'g'
+        20: 3,  # 'h'
+        9: 3,  # 'i'
+        22: 2,  # 'j'
+        7: 3,  # 'k'
+        6: 2,  # 'l'
+        13: 2,  # 'm'
+        4: 3,  # 'n'
+        8: 3,  # 'o'
+        23: 2,  # 'p'
+        10: 2,  # 'r'
+        5: 3,  # 's'
+        3: 3,  # 't'
+        21: 3,  # 'u'
+        19: 2,  # 'v'
+        62: 1,  # 'x'
+        16: 3,  # 'y'
+        11: 3,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 3,  # 'á'
+        15: 3,  # 'é'
+        30: 2,  # 'í'
+        25: 2,  # 'ó'
+        24: 3,  # 'ö'
+        31: 2,  # 'ú'
+        29: 3,  # 'ü'
+        42: 2,  # 'ő'
+        56: 1,  # 'ű'
+    },
+    8: {  # 'o'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 1,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 2,  # 'a'
+        18: 3,  # 'b'
+        26: 3,  # 'c'
+        17: 3,  # 'd'
+        1: 2,  # 'e'
+        27: 2,  # 'f'
+        12: 3,  # 'g'
+        20: 3,  # 'h'
+        9: 2,  # 'i'
+        22: 2,  # 'j'
+        7: 3,  # 'k'
+        6: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        8: 1,  # 'o'
+        23: 3,  # 'p'
+        10: 3,  # 'r'
+        5: 3,  # 's'
+        3: 3,  # 't'
+        21: 2,  # 'u'
+        19: 3,  # 'v'
+        62: 1,  # 'x'
+        16: 1,  # 'y'
+        11: 3,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 1,  # 'á'
+        15: 2,  # 'é'
+        30: 1,  # 'í'
+        25: 1,  # 'ó'
+        24: 1,  # 'ö'
+        31: 1,  # 'ú'
+        29: 1,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    23: {  # 'p'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 3,  # 'a'
+        18: 1,  # 'b'
+        26: 2,  # 'c'
+        17: 1,  # 'd'
+        1: 3,  # 'e'
+        27: 1,  # 'f'
+        12: 1,  # 'g'
+        20: 2,  # 'h'
+        9: 3,  # 'i'
+        22: 2,  # 'j'
+        7: 2,  # 'k'
+        6: 3,  # 'l'
+        13: 1,  # 'm'
+        4: 2,  # 'n'
+        8: 3,  # 'o'
+        23: 3,  # 'p'
+        10: 3,  # 'r'
+        5: 2,  # 's'
+        3: 2,  # 't'
+        21: 3,  # 'u'
+        19: 2,  # 'v'
+        62: 0,  # 'x'
+        16: 1,  # 'y'
+        11: 2,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 3,  # 'á'
+        15: 3,  # 'é'
+        30: 2,  # 'í'
+        25: 2,  # 'ó'
+        24: 2,  # 'ö'
+        31: 1,  # 'ú'
+        29: 2,  # 'ü'
+        42: 1,  # 'ő'
+        56: 1,  # 'ű'
+    },
+    10: {  # 'r'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 3,  # 'a'
+        18: 3,  # 'b'
+        26: 3,  # 'c'
+        17: 3,  # 'd'
+        1: 3,  # 'e'
+        27: 2,  # 'f'
+        12: 3,  # 'g'
+        20: 2,  # 'h'
+        9: 3,  # 'i'
+        22: 3,  # 'j'
+        7: 3,  # 'k'
+        6: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        8: 3,  # 'o'
+        23: 2,  # 'p'
+        10: 3,  # 'r'
+        5: 3,  # 's'
+        3: 3,  # 't'
+        21: 3,  # 'u'
+        19: 3,  # 'v'
+        62: 1,  # 'x'
+        16: 2,  # 'y'
+        11: 3,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 3,  # 'á'
+        15: 3,  # 'é'
+        30: 2,  # 'í'
+        25: 3,  # 'ó'
+        24: 3,  # 'ö'
+        31: 3,  # 'ú'
+        29: 3,  # 'ü'
+        42: 2,  # 'ő'
+        56: 2,  # 'ű'
+    },
+    5: {  # 's'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 3,  # 'a'
+        18: 3,  # 'b'
+        26: 2,  # 'c'
+        17: 2,  # 'd'
+        1: 3,  # 'e'
+        27: 2,  # 'f'
+        12: 2,  # 'g'
+        20: 2,  # 'h'
+        9: 3,  # 'i'
+        22: 1,  # 'j'
+        7: 3,  # 'k'
+        6: 2,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        8: 3,  # 'o'
+        23: 2,  # 'p'
+        10: 3,  # 'r'
+        5: 3,  # 's'
+        3: 3,  # 't'
+        21: 3,  # 'u'
+        19: 2,  # 'v'
+        62: 0,  # 'x'
+        16: 1,  # 'y'
+        11: 3,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 3,  # 'á'
+        15: 3,  # 'é'
+        30: 3,  # 'í'
+        25: 3,  # 'ó'
+        24: 3,  # 'ö'
+        31: 3,  # 'ú'
+        29: 3,  # 'ü'
+        42: 2,  # 'ő'
+        56: 1,  # 'ű'
+    },
+    3: {  # 't'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 3,  # 'a'
+        18: 3,  # 'b'
+        26: 2,  # 'c'
+        17: 1,  # 'd'
+        1: 3,  # 'e'
+        27: 2,  # 'f'
+        12: 1,  # 'g'
+        20: 3,  # 'h'
+        9: 3,  # 'i'
+        22: 3,  # 'j'
+        7: 3,  # 'k'
+        6: 3,  # 'l'
+        13: 2,  # 'm'
+        4: 3,  # 'n'
+        8: 3,  # 'o'
+        23: 1,  # 'p'
+        10: 3,  # 'r'
+        5: 3,  # 's'
+        3: 3,  # 't'
+        21: 3,  # 'u'
+        19: 3,  # 'v'
+        62: 0,  # 'x'
+        16: 3,  # 'y'
+        11: 1,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 3,  # 'á'
+        15: 3,  # 'é'
+        30: 2,  # 'í'
+        25: 3,  # 'ó'
+        24: 3,  # 'ö'
+        31: 3,  # 'ú'
+        29: 3,  # 'ü'
+        42: 3,  # 'ő'
+        56: 2,  # 'ű'
+    },
+    21: {  # 'u'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 1,  # 'a'
+        18: 2,  # 'b'
+        26: 2,  # 'c'
+        17: 3,  # 'd'
+        1: 2,  # 'e'
+        27: 1,  # 'f'
+        12: 3,  # 'g'
+        20: 2,  # 'h'
+        9: 2,  # 'i'
+        22: 2,  # 'j'
+        7: 3,  # 'k'
+        6: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        8: 1,  # 'o'
+        23: 2,  # 'p'
+        10: 3,  # 'r'
+        5: 3,  # 's'
+        3: 3,  # 't'
+        21: 1,  # 'u'
+        19: 3,  # 'v'
+        62: 1,  # 'x'
+        16: 1,  # 'y'
+        11: 2,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 2,  # 'á'
+        15: 1,  # 'é'
+        30: 1,  # 'í'
+        25: 1,  # 'ó'
+        24: 0,  # 'ö'
+        31: 1,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    19: {  # 'v'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 3,  # 'a'
+        18: 2,  # 'b'
+        26: 1,  # 'c'
+        17: 1,  # 'd'
+        1: 3,  # 'e'
+        27: 1,  # 'f'
+        12: 1,  # 'g'
+        20: 1,  # 'h'
+        9: 3,  # 'i'
+        22: 1,  # 'j'
+        7: 1,  # 'k'
+        6: 1,  # 'l'
+        13: 1,  # 'm'
+        4: 1,  # 'n'
+        8: 3,  # 'o'
+        23: 1,  # 'p'
+        10: 1,  # 'r'
+        5: 2,  # 's'
+        3: 2,  # 't'
+        21: 2,  # 'u'
+        19: 2,  # 'v'
+        62: 0,  # 'x'
+        16: 1,  # 'y'
+        11: 1,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 3,  # 'á'
+        15: 3,  # 'é'
+        30: 2,  # 'í'
+        25: 2,  # 'ó'
+        24: 2,  # 'ö'
+        31: 1,  # 'ú'
+        29: 2,  # 'ü'
+        42: 1,  # 'ő'
+        56: 1,  # 'ű'
+    },
+    62: {  # 'x'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 1,  # 'a'
+        18: 1,  # 'b'
+        26: 1,  # 'c'
+        17: 0,  # 'd'
+        1: 1,  # 'e'
+        27: 1,  # 'f'
+        12: 0,  # 'g'
+        20: 0,  # 'h'
+        9: 1,  # 'i'
+        22: 0,  # 'j'
+        7: 1,  # 'k'
+        6: 1,  # 'l'
+        13: 1,  # 'm'
+        4: 1,  # 'n'
+        8: 1,  # 'o'
+        23: 1,  # 'p'
+        10: 1,  # 'r'
+        5: 1,  # 's'
+        3: 1,  # 't'
+        21: 1,  # 'u'
+        19: 0,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 0,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 1,  # 'á'
+        15: 1,  # 'é'
+        30: 1,  # 'í'
+        25: 1,  # 'ó'
+        24: 0,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    16: {  # 'y'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 3,  # 'a'
+        18: 2,  # 'b'
+        26: 1,  # 'c'
+        17: 1,  # 'd'
+        1: 3,  # 'e'
+        27: 2,  # 'f'
+        12: 2,  # 'g'
+        20: 2,  # 'h'
+        9: 3,  # 'i'
+        22: 2,  # 'j'
+        7: 2,  # 'k'
+        6: 2,  # 'l'
+        13: 2,  # 'm'
+        4: 3,  # 'n'
+        8: 3,  # 'o'
+        23: 2,  # 'p'
+        10: 2,  # 'r'
+        5: 3,  # 's'
+        3: 3,  # 't'
+        21: 3,  # 'u'
+        19: 3,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 2,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 3,  # 'á'
+        15: 3,  # 'é'
+        30: 2,  # 'í'
+        25: 2,  # 'ó'
+        24: 3,  # 'ö'
+        31: 2,  # 'ú'
+        29: 2,  # 'ü'
+        42: 1,  # 'ő'
+        56: 2,  # 'ű'
+    },
+    11: {  # 'z'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 3,  # 'a'
+        18: 2,  # 'b'
+        26: 1,  # 'c'
+        17: 3,  # 'd'
+        1: 3,  # 'e'
+        27: 1,  # 'f'
+        12: 2,  # 'g'
+        20: 2,  # 'h'
+        9: 3,  # 'i'
+        22: 1,  # 'j'
+        7: 3,  # 'k'
+        6: 2,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        8: 3,  # 'o'
+        23: 1,  # 'p'
+        10: 2,  # 'r'
+        5: 3,  # 's'
+        3: 3,  # 't'
+        21: 3,  # 'u'
+        19: 2,  # 'v'
+        62: 0,  # 'x'
+        16: 1,  # 'y'
+        11: 3,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 3,  # 'á'
+        15: 3,  # 'é'
+        30: 3,  # 'í'
+        25: 3,  # 'ó'
+        24: 3,  # 'ö'
+        31: 2,  # 'ú'
+        29: 3,  # 'ü'
+        42: 2,  # 'ő'
+        56: 1,  # 'ű'
+    },
+    51: {  # 'Á'
+        28: 0,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 1,  # 'D'
+        32: 0,  # 'E'
+        50: 1,  # 'F'
+        49: 2,  # 'G'
+        38: 1,  # 'H'
+        39: 1,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 2,  # 'L'
+        34: 1,  # 'M'
+        35: 2,  # 'N'
+        47: 0,  # 'O'
+        46: 1,  # 'P'
+        43: 2,  # 'R'
+        33: 2,  # 'S'
+        37: 1,  # 'T'
+        57: 0,  # 'U'
+        48: 1,  # 'V'
+        55: 0,  # 'Y'
+        52: 1,  # 'Z'
+        2: 0,  # 'a'
+        18: 1,  # 'b'
+        26: 1,  # 'c'
+        17: 1,  # 'd'
+        1: 0,  # 'e'
+        27: 0,  # 'f'
+        12: 1,  # 'g'
+        20: 1,  # 'h'
+        9: 0,  # 'i'
+        22: 1,  # 'j'
+        7: 1,  # 'k'
+        6: 2,  # 'l'
+        13: 2,  # 'm'
+        4: 0,  # 'n'
+        8: 0,  # 'o'
+        23: 1,  # 'p'
+        10: 1,  # 'r'
+        5: 1,  # 's'
+        3: 1,  # 't'
+        21: 0,  # 'u'
+        19: 0,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 1,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 1,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 0,  # 'á'
+        15: 0,  # 'é'
+        30: 0,  # 'í'
+        25: 0,  # 'ó'
+        24: 0,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    44: {  # 'É'
+        28: 0,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 1,  # 'D'
+        32: 1,  # 'E'
+        50: 0,  # 'F'
+        49: 2,  # 'G'
+        38: 1,  # 'H'
+        39: 1,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 2,  # 'L'
+        34: 1,  # 'M'
+        35: 2,  # 'N'
+        47: 0,  # 'O'
+        46: 1,  # 'P'
+        43: 2,  # 'R'
+        33: 2,  # 'S'
+        37: 2,  # 'T'
+        57: 0,  # 'U'
+        48: 1,  # 'V'
+        55: 0,  # 'Y'
+        52: 1,  # 'Z'
+        2: 0,  # 'a'
+        18: 1,  # 'b'
+        26: 1,  # 'c'
+        17: 1,  # 'd'
+        1: 0,  # 'e'
+        27: 0,  # 'f'
+        12: 1,  # 'g'
+        20: 1,  # 'h'
+        9: 0,  # 'i'
+        22: 1,  # 'j'
+        7: 1,  # 'k'
+        6: 2,  # 'l'
+        13: 1,  # 'm'
+        4: 2,  # 'n'
+        8: 0,  # 'o'
+        23: 1,  # 'p'
+        10: 2,  # 'r'
+        5: 3,  # 's'
+        3: 1,  # 't'
+        21: 0,  # 'u'
+        19: 1,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 0,  # 'z'
+        51: 0,  # 'Á'
+        44: 1,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 0,  # 'á'
+        15: 0,  # 'é'
+        30: 0,  # 'í'
+        25: 0,  # 'ó'
+        24: 0,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    61: {  # 'Í'
+        28: 0,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 1,  # 'D'
+        32: 0,  # 'E'
+        50: 1,  # 'F'
+        49: 1,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 1,  # 'J'
+        36: 0,  # 'K'
+        41: 1,  # 'L'
+        34: 1,  # 'M'
+        35: 1,  # 'N'
+        47: 0,  # 'O'
+        46: 1,  # 'P'
+        43: 1,  # 'R'
+        33: 1,  # 'S'
+        37: 1,  # 'T'
+        57: 0,  # 'U'
+        48: 1,  # 'V'
+        55: 0,  # 'Y'
+        52: 1,  # 'Z'
+        2: 0,  # 'a'
+        18: 0,  # 'b'
+        26: 0,  # 'c'
+        17: 0,  # 'd'
+        1: 0,  # 'e'
+        27: 0,  # 'f'
+        12: 2,  # 'g'
+        20: 0,  # 'h'
+        9: 0,  # 'i'
+        22: 0,  # 'j'
+        7: 0,  # 'k'
+        6: 0,  # 'l'
+        13: 1,  # 'm'
+        4: 0,  # 'n'
+        8: 0,  # 'o'
+        23: 0,  # 'p'
+        10: 1,  # 'r'
+        5: 0,  # 's'
+        3: 1,  # 't'
+        21: 0,  # 'u'
+        19: 0,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 1,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 0,  # 'á'
+        15: 0,  # 'é'
+        30: 0,  # 'í'
+        25: 0,  # 'ó'
+        24: 0,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    58: {  # 'Ó'
+        28: 1,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 1,  # 'D'
+        32: 0,  # 'E'
+        50: 1,  # 'F'
+        49: 1,  # 'G'
+        38: 1,  # 'H'
+        39: 1,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 2,  # 'L'
+        34: 1,  # 'M'
+        35: 1,  # 'N'
+        47: 0,  # 'O'
+        46: 1,  # 'P'
+        43: 1,  # 'R'
+        33: 1,  # 'S'
+        37: 1,  # 'T'
+        57: 0,  # 'U'
+        48: 1,  # 'V'
+        55: 0,  # 'Y'
+        52: 1,  # 'Z'
+        2: 0,  # 'a'
+        18: 1,  # 'b'
+        26: 1,  # 'c'
+        17: 1,  # 'd'
+        1: 0,  # 'e'
+        27: 0,  # 'f'
+        12: 0,  # 'g'
+        20: 2,  # 'h'
+        9: 0,  # 'i'
+        22: 0,  # 'j'
+        7: 1,  # 'k'
+        6: 1,  # 'l'
+        13: 0,  # 'm'
+        4: 1,  # 'n'
+        8: 0,  # 'o'
+        23: 1,  # 'p'
+        10: 1,  # 'r'
+        5: 1,  # 's'
+        3: 0,  # 't'
+        21: 0,  # 'u'
+        19: 1,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 1,  # 'z'
+        51: 0,  # 'Á'
+        44: 1,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 0,  # 'á'
+        15: 0,  # 'é'
+        30: 0,  # 'í'
+        25: 0,  # 'ó'
+        24: 0,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    59: {  # 'Ö'
+        28: 0,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 1,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 1,  # 'G'
+        38: 1,  # 'H'
+        39: 0,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 1,  # 'L'
+        34: 1,  # 'M'
+        35: 1,  # 'N'
+        47: 0,  # 'O'
+        46: 1,  # 'P'
+        43: 1,  # 'R'
+        33: 1,  # 'S'
+        37: 1,  # 'T'
+        57: 0,  # 'U'
+        48: 1,  # 'V'
+        55: 0,  # 'Y'
+        52: 1,  # 'Z'
+        2: 0,  # 'a'
+        18: 0,  # 'b'
+        26: 1,  # 'c'
+        17: 1,  # 'd'
+        1: 0,  # 'e'
+        27: 0,  # 'f'
+        12: 0,  # 'g'
+        20: 0,  # 'h'
+        9: 0,  # 'i'
+        22: 0,  # 'j'
+        7: 1,  # 'k'
+        6: 1,  # 'l'
+        13: 1,  # 'm'
+        4: 1,  # 'n'
+        8: 0,  # 'o'
+        23: 0,  # 'p'
+        10: 2,  # 'r'
+        5: 1,  # 's'
+        3: 1,  # 't'
+        21: 0,  # 'u'
+        19: 1,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 1,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 0,  # 'á'
+        15: 0,  # 'é'
+        30: 0,  # 'í'
+        25: 0,  # 'ó'
+        24: 0,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    60: {  # 'Ú'
+        28: 0,  # 'A'
+        40: 1,  # 'B'
+        54: 1,  # 'C'
+        45: 1,  # 'D'
+        32: 0,  # 'E'
+        50: 1,  # 'F'
+        49: 1,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 1,  # 'L'
+        34: 1,  # 'M'
+        35: 1,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 1,  # 'R'
+        33: 1,  # 'S'
+        37: 1,  # 'T'
+        57: 0,  # 'U'
+        48: 1,  # 'V'
+        55: 0,  # 'Y'
+        52: 1,  # 'Z'
+        2: 0,  # 'a'
+        18: 0,  # 'b'
+        26: 0,  # 'c'
+        17: 0,  # 'd'
+        1: 0,  # 'e'
+        27: 0,  # 'f'
+        12: 2,  # 'g'
+        20: 0,  # 'h'
+        9: 0,  # 'i'
+        22: 2,  # 'j'
+        7: 0,  # 'k'
+        6: 0,  # 'l'
+        13: 0,  # 'm'
+        4: 1,  # 'n'
+        8: 0,  # 'o'
+        23: 0,  # 'p'
+        10: 1,  # 'r'
+        5: 1,  # 's'
+        3: 1,  # 't'
+        21: 0,  # 'u'
+        19: 0,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 0,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 0,  # 'á'
+        15: 0,  # 'é'
+        30: 0,  # 'í'
+        25: 0,  # 'ó'
+        24: 0,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    63: {  # 'Ü'
+        28: 0,  # 'A'
+        40: 1,  # 'B'
+        54: 0,  # 'C'
+        45: 1,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 1,  # 'G'
+        38: 1,  # 'H'
+        39: 0,  # 'I'
+        53: 1,  # 'J'
+        36: 1,  # 'K'
+        41: 1,  # 'L'
+        34: 1,  # 'M'
+        35: 1,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 1,  # 'R'
+        33: 1,  # 'S'
+        37: 1,  # 'T'
+        57: 0,  # 'U'
+        48: 1,  # 'V'
+        55: 0,  # 'Y'
+        52: 1,  # 'Z'
+        2: 0,  # 'a'
+        18: 1,  # 'b'
+        26: 0,  # 'c'
+        17: 1,  # 'd'
+        1: 0,  # 'e'
+        27: 0,  # 'f'
+        12: 1,  # 'g'
+        20: 0,  # 'h'
+        9: 0,  # 'i'
+        22: 0,  # 'j'
+        7: 0,  # 'k'
+        6: 1,  # 'l'
+        13: 0,  # 'm'
+        4: 1,  # 'n'
+        8: 0,  # 'o'
+        23: 0,  # 'p'
+        10: 1,  # 'r'
+        5: 1,  # 's'
+        3: 1,  # 't'
+        21: 0,  # 'u'
+        19: 1,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 1,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 0,  # 'á'
+        15: 0,  # 'é'
+        30: 0,  # 'í'
+        25: 0,  # 'ó'
+        24: 0,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    14: {  # 'á'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 1,  # 'a'
+        18: 3,  # 'b'
+        26: 3,  # 'c'
+        17: 3,  # 'd'
+        1: 1,  # 'e'
+        27: 2,  # 'f'
+        12: 3,  # 'g'
+        20: 2,  # 'h'
+        9: 2,  # 'i'
+        22: 3,  # 'j'
+        7: 3,  # 'k'
+        6: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        8: 1,  # 'o'
+        23: 2,  # 'p'
+        10: 3,  # 'r'
+        5: 3,  # 's'
+        3: 3,  # 't'
+        21: 2,  # 'u'
+        19: 3,  # 'v'
+        62: 0,  # 'x'
+        16: 1,  # 'y'
+        11: 3,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 1,  # 'á'
+        15: 2,  # 'é'
+        30: 1,  # 'í'
+        25: 0,  # 'ó'
+        24: 1,  # 'ö'
+        31: 0,  # 'ú'
+        29: 1,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    15: {  # 'é'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 1,  # 'a'
+        18: 3,  # 'b'
+        26: 2,  # 'c'
+        17: 3,  # 'd'
+        1: 1,  # 'e'
+        27: 1,  # 'f'
+        12: 3,  # 'g'
+        20: 3,  # 'h'
+        9: 2,  # 'i'
+        22: 2,  # 'j'
+        7: 3,  # 'k'
+        6: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        8: 1,  # 'o'
+        23: 3,  # 'p'
+        10: 3,  # 'r'
+        5: 3,  # 's'
+        3: 3,  # 't'
+        21: 0,  # 'u'
+        19: 3,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 3,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 1,  # 'á'
+        15: 1,  # 'é'
+        30: 0,  # 'í'
+        25: 0,  # 'ó'
+        24: 0,  # 'ö'
+        31: 0,  # 'ú'
+        29: 1,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    30: {  # 'í'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 0,  # 'a'
+        18: 1,  # 'b'
+        26: 2,  # 'c'
+        17: 1,  # 'd'
+        1: 0,  # 'e'
+        27: 1,  # 'f'
+        12: 3,  # 'g'
+        20: 0,  # 'h'
+        9: 0,  # 'i'
+        22: 1,  # 'j'
+        7: 1,  # 'k'
+        6: 2,  # 'l'
+        13: 2,  # 'm'
+        4: 3,  # 'n'
+        8: 0,  # 'o'
+        23: 1,  # 'p'
+        10: 3,  # 'r'
+        5: 2,  # 's'
+        3: 3,  # 't'
+        21: 0,  # 'u'
+        19: 3,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 2,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 0,  # 'á'
+        15: 0,  # 'é'
+        30: 0,  # 'í'
+        25: 0,  # 'ó'
+        24: 0,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    25: {  # 'ó'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 2,  # 'a'
+        18: 3,  # 'b'
+        26: 2,  # 'c'
+        17: 3,  # 'd'
+        1: 1,  # 'e'
+        27: 2,  # 'f'
+        12: 2,  # 'g'
+        20: 2,  # 'h'
+        9: 2,  # 'i'
+        22: 2,  # 'j'
+        7: 3,  # 'k'
+        6: 3,  # 'l'
+        13: 2,  # 'm'
+        4: 3,  # 'n'
+        8: 1,  # 'o'
+        23: 2,  # 'p'
+        10: 3,  # 'r'
+        5: 3,  # 's'
+        3: 3,  # 't'
+        21: 1,  # 'u'
+        19: 2,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 3,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 1,  # 'á'
+        15: 1,  # 'é'
+        30: 1,  # 'í'
+        25: 0,  # 'ó'
+        24: 1,  # 'ö'
+        31: 1,  # 'ú'
+        29: 1,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    24: {  # 'ö'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 0,  # 'a'
+        18: 3,  # 'b'
+        26: 1,  # 'c'
+        17: 2,  # 'd'
+        1: 0,  # 'e'
+        27: 1,  # 'f'
+        12: 2,  # 'g'
+        20: 1,  # 'h'
+        9: 0,  # 'i'
+        22: 1,  # 'j'
+        7: 3,  # 'k'
+        6: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        8: 0,  # 'o'
+        23: 2,  # 'p'
+        10: 3,  # 'r'
+        5: 3,  # 's'
+        3: 3,  # 't'
+        21: 0,  # 'u'
+        19: 3,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 3,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 0,  # 'á'
+        15: 0,  # 'é'
+        30: 0,  # 'í'
+        25: 0,  # 'ó'
+        24: 0,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    31: {  # 'ú'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 1,  # 'a'
+        18: 1,  # 'b'
+        26: 2,  # 'c'
+        17: 1,  # 'd'
+        1: 1,  # 'e'
+        27: 2,  # 'f'
+        12: 3,  # 'g'
+        20: 1,  # 'h'
+        9: 1,  # 'i'
+        22: 3,  # 'j'
+        7: 1,  # 'k'
+        6: 3,  # 'l'
+        13: 1,  # 'm'
+        4: 2,  # 'n'
+        8: 0,  # 'o'
+        23: 1,  # 'p'
+        10: 3,  # 'r'
+        5: 3,  # 's'
+        3: 2,  # 't'
+        21: 1,  # 'u'
+        19: 1,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 2,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 1,  # 'á'
+        15: 1,  # 'é'
+        30: 0,  # 'í'
+        25: 0,  # 'ó'
+        24: 0,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    29: {  # 'ü'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 1,  # 'a'
+        18: 1,  # 'b'
+        26: 1,  # 'c'
+        17: 2,  # 'd'
+        1: 1,  # 'e'
+        27: 1,  # 'f'
+        12: 3,  # 'g'
+        20: 2,  # 'h'
+        9: 1,  # 'i'
+        22: 1,  # 'j'
+        7: 3,  # 'k'
+        6: 3,  # 'l'
+        13: 1,  # 'm'
+        4: 3,  # 'n'
+        8: 0,  # 'o'
+        23: 1,  # 'p'
+        10: 2,  # 'r'
+        5: 2,  # 's'
+        3: 2,  # 't'
+        21: 0,  # 'u'
+        19: 2,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 2,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 0,  # 'á'
+        15: 1,  # 'é'
+        30: 0,  # 'í'
+        25: 0,  # 'ó'
+        24: 0,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    42: {  # 'ő'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 1,  # 'a'
+        18: 2,  # 'b'
+        26: 1,  # 'c'
+        17: 2,  # 'd'
+        1: 1,  # 'e'
+        27: 1,  # 'f'
+        12: 1,  # 'g'
+        20: 1,  # 'h'
+        9: 1,  # 'i'
+        22: 1,  # 'j'
+        7: 2,  # 'k'
+        6: 3,  # 'l'
+        13: 1,  # 'm'
+        4: 2,  # 'n'
+        8: 1,  # 'o'
+        23: 1,  # 'p'
+        10: 2,  # 'r'
+        5: 2,  # 's'
+        3: 2,  # 't'
+        21: 1,  # 'u'
+        19: 1,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 2,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 0,  # 'á'
+        15: 1,  # 'é'
+        30: 1,  # 'í'
+        25: 0,  # 'ó'
+        24: 0,  # 'ö'
+        31: 0,  # 'ú'
+        29: 1,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+    56: {  # 'ű'
+        28: 0,  # 'A'
+        40: 0,  # 'B'
+        54: 0,  # 'C'
+        45: 0,  # 'D'
+        32: 0,  # 'E'
+        50: 0,  # 'F'
+        49: 0,  # 'G'
+        38: 0,  # 'H'
+        39: 0,  # 'I'
+        53: 0,  # 'J'
+        36: 0,  # 'K'
+        41: 0,  # 'L'
+        34: 0,  # 'M'
+        35: 0,  # 'N'
+        47: 0,  # 'O'
+        46: 0,  # 'P'
+        43: 0,  # 'R'
+        33: 0,  # 'S'
+        37: 0,  # 'T'
+        57: 0,  # 'U'
+        48: 0,  # 'V'
+        55: 0,  # 'Y'
+        52: 0,  # 'Z'
+        2: 1,  # 'a'
+        18: 1,  # 'b'
+        26: 0,  # 'c'
+        17: 1,  # 'd'
+        1: 1,  # 'e'
+        27: 1,  # 'f'
+        12: 1,  # 'g'
+        20: 1,  # 'h'
+        9: 1,  # 'i'
+        22: 1,  # 'j'
+        7: 1,  # 'k'
+        6: 1,  # 'l'
+        13: 0,  # 'm'
+        4: 2,  # 'n'
+        8: 0,  # 'o'
+        23: 0,  # 'p'
+        10: 1,  # 'r'
+        5: 1,  # 's'
+        3: 1,  # 't'
+        21: 0,  # 'u'
+        19: 1,  # 'v'
+        62: 0,  # 'x'
+        16: 0,  # 'y'
+        11: 2,  # 'z'
+        51: 0,  # 'Á'
+        44: 0,  # 'É'
+        61: 0,  # 'Í'
+        58: 0,  # 'Ó'
+        59: 0,  # 'Ö'
+        60: 0,  # 'Ú'
+        63: 0,  # 'Ü'
+        14: 0,  # 'á'
+        15: 0,  # 'é'
+        30: 0,  # 'í'
+        25: 0,  # 'ó'
+        24: 0,  # 'ö'
+        31: 0,  # 'ú'
+        29: 0,  # 'ü'
+        42: 0,  # 'ő'
+        56: 0,  # 'ű'
+    },
+}
+
+# 255: Undefined characters that did not exist in training text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+# 251: Control characters
+
+# Character Mapping Table(s):
+WINDOWS_1250_HUNGARIAN_CHAR_TO_ORDER = {
+    0: 255,  # '\x00'
+    1: 255,  # '\x01'
+    2: 255,  # '\x02'
+    3: 255,  # '\x03'
+    4: 255,  # '\x04'
+    5: 255,  # '\x05'
+    6: 255,  # '\x06'
+    7: 255,  # '\x07'
+    8: 255,  # '\x08'
+    9: 255,  # '\t'
+    10: 254,  # '\n'
+    11: 255,  # '\x0b'
+    12: 255,  # '\x0c'
+    13: 254,  # '\r'
+    14: 255,  # '\x0e'
+    15: 255,  # '\x0f'
+    16: 255,  # '\x10'
+    17: 255,  # '\x11'
+    18: 255,  # '\x12'
+    19: 255,  # '\x13'
+    20: 255,  # '\x14'
+    21: 255,  # '\x15'
+    22: 255,  # '\x16'
+    23: 255,  # '\x17'
+    24: 255,  # '\x18'
+    25: 255,  # '\x19'
+    26: 255,  # '\x1a'
+    27: 255,  # '\x1b'
+    28: 255,  # '\x1c'
+    29: 255,  # '\x1d'
+    30: 255,  # '\x1e'
+    31: 255,  # '\x1f'
+    32: 253,  # ' '
+    33: 253,  # '!'
+    34: 253,  # '"'
+    35: 253,  # '#'
+    36: 253,  # '$'
+    37: 253,  # '%'
+    38: 253,  # '&'
+    39: 253,  # "'"
+    40: 253,  # '('
+    41: 253,  # ')'
+    42: 253,  # '*'
+    43: 253,  # '+'
+    44: 253,  # ','
+    45: 253,  # '-'
+    46: 253,  # '.'
+    47: 253,  # '/'
+    48: 252,  # '0'
+    49: 252,  # '1'
+    50: 252,  # '2'
+    51: 252,  # '3'
+    52: 252,  # '4'
+    53: 252,  # '5'
+    54: 252,  # '6'
+    55: 252,  # '7'
+    56: 252,  # '8'
+    57: 252,  # '9'
+    58: 253,  # ':'
+    59: 253,  # ';'
+    60: 253,  # '<'
+    61: 253,  # '='
+    62: 253,  # '>'
+    63: 253,  # '?'
+    64: 253,  # '@'
+    65: 28,  # 'A'
+    66: 40,  # 'B'
+    67: 54,  # 'C'
+    68: 45,  # 'D'
+    69: 32,  # 'E'
+    70: 50,  # 'F'
+    71: 49,  # 'G'
+    72: 38,  # 'H'
+    73: 39,  # 'I'
+    74: 53,  # 'J'
+    75: 36,  # 'K'
+    76: 41,  # 'L'
+    77: 34,  # 'M'
+    78: 35,  # 'N'
+    79: 47,  # 'O'
+    80: 46,  # 'P'
+    81: 72,  # 'Q'
+    82: 43,  # 'R'
+    83: 33,  # 'S'
+    84: 37,  # 'T'
+    85: 57,  # 'U'
+    86: 48,  # 'V'
+    87: 64,  # 'W'
+    88: 68,  # 'X'
+    89: 55,  # 'Y'
+    90: 52,  # 'Z'
+    91: 253,  # '['
+    92: 253,  # '\\'
+    93: 253,  # ']'
+    94: 253,  # '^'
+    95: 253,  # '_'
+    96: 253,  # '`'
+    97: 2,  # 'a'
+    98: 18,  # 'b'
+    99: 26,  # 'c'
+    100: 17,  # 'd'
+    101: 1,  # 'e'
+    102: 27,  # 'f'
+    103: 12,  # 'g'
+    104: 20,  # 'h'
+    105: 9,  # 'i'
+    106: 22,  # 'j'
+    107: 7,  # 'k'
+    108: 6,  # 'l'
+    109: 13,  # 'm'
+    110: 4,  # 'n'
+    111: 8,  # 'o'
+    112: 23,  # 'p'
+    113: 67,  # 'q'
+    114: 10,  # 'r'
+    115: 5,  # 's'
+    116: 3,  # 't'
+    117: 21,  # 'u'
+    118: 19,  # 'v'
+    119: 65,  # 'w'
+    120: 62,  # 'x'
+    121: 16,  # 'y'
+    122: 11,  # 'z'
+    123: 253,  # '{'
+    124: 253,  # '|'
+    125: 253,  # '}'
+    126: 253,  # '~'
+    127: 253,  # '\x7f'
+    128: 161,  # '€'
+    129: 162,  # None
+    130: 163,  # '‚'
+    131: 164,  # None
+    132: 165,  # '„'
+    133: 166,  # '…'
+    134: 167,  # '†'
+    135: 168,  # '‡'
+    136: 169,  # None
+    137: 170,  # '‰'
+    138: 171,  # 'Š'
+    139: 172,  # '‹'
+    140: 173,  # 'Ś'
+    141: 174,  # 'Ť'
+    142: 175,  # 'Ž'
+    143: 176,  # 'Ź'
+    144: 177,  # None
+    145: 178,  # '‘'
+    146: 179,  # '’'
+    147: 180,  # '“'
+    148: 78,  # '”'
+    149: 181,  # '•'
+    150: 69,  # '–'
+    151: 182,  # '—'
+    152: 183,  # None
+    153: 184,  # '™'
+    154: 185,  # 'š'
+    155: 186,  # '›'
+    156: 187,  # 'ś'
+    157: 188,  # 'ť'
+    158: 189,  # 'ž'
+    159: 190,  # 'ź'
+    160: 191,  # '\xa0'
+    161: 192,  # 'ˇ'
+    162: 193,  # '˘'
+    163: 194,  # 'Ł'
+    164: 195,  # '¤'
+    165: 196,  # 'Ą'
+    166: 197,  # '¦'
+    167: 76,  # '§'
+    168: 198,  # '¨'
+    169: 199,  # '©'
+    170: 200,  # 'Ş'
+    171: 201,  # '«'
+    172: 202,  # '¬'
+    173: 203,  # '\xad'
+    174: 204,  # '®'
+    175: 205,  # 'Ż'
+    176: 81,  # '°'
+    177: 206,  # '±'
+    178: 207,  # '˛'
+    179: 208,  # 'ł'
+    180: 209,  # '´'
+    181: 210,  # 'µ'
+    182: 211,  # '¶'
+    183: 212,  # '·'
+    184: 213,  # '¸'
+    185: 214,  # 'ą'
+    186: 215,  # 'ş'
+    187: 216,  # '»'
+    188: 217,  # 'Ľ'
+    189: 218,  # '˝'
+    190: 219,  # 'ľ'
+    191: 220,  # 'ż'
+    192: 221,  # 'Ŕ'
+    193: 51,  # 'Á'
+    194: 83,  # 'Â'
+    195: 222,  # 'Ă'
+    196: 80,  # 'Ä'
+    197: 223,  # 'Ĺ'
+    198: 224,  # 'Ć'
+    199: 225,  # 'Ç'
+    200: 226,  # 'Č'
+    201: 44,  # 'É'
+    202: 227,  # 'Ę'
+    203: 228,  # 'Ë'
+    204: 229,  # 'Ě'
+    205: 61,  # 'Í'
+    206: 230,  # 'Î'
+    207: 231,  # 'Ď'
+    208: 232,  # 'Đ'
+    209: 233,  # 'Ń'
+    210: 234,  # 'Ň'
+    211: 58,  # 'Ó'
+    212: 235,  # 'Ô'
+    213: 66,  # 'Ő'
+    214: 59,  # 'Ö'
+    215: 236,  # '×'
+    216: 237,  # 'Ř'
+    217: 238,  # 'Ů'
+    218: 60,  # 'Ú'
+    219: 70,  # 'Ű'
+    220: 63,  # 'Ü'
+    221: 239,  # 'Ý'
+    222: 240,  # 'Ţ'
+    223: 241,  # 'ß'
+    224: 84,  # 'ŕ'
+    225: 14,  # 'á'
+    226: 75,  # 'â'
+    227: 242,  # 'ă'
+    228: 71,  # 'ä'
+    229: 82,  # 'ĺ'
+    230: 243,  # 'ć'
+    231: 73,  # 'ç'
+    232: 244,  # 'č'
+    233: 15,  # 'é'
+    234: 85,  # 'ę'
+    235: 79,  # 'ë'
+    236: 86,  # 'ě'
+    237: 30,  # 'í'
+    238: 77,  # 'î'
+    239: 87,  # 'ď'
+    240: 245,  # 'đ'
+    241: 246,  # 'ń'
+    242: 247,  # 'ň'
+    243: 25,  # 'ó'
+    244: 74,  # 'ô'
+    245: 42,  # 'ő'
+    246: 24,  # 'ö'
+    247: 248,  # '÷'
+    248: 249,  # 'ř'
+    249: 250,  # 'ů'
+    250: 31,  # 'ú'
+    251: 56,  # 'ű'
+    252: 29,  # 'ü'
+    253: 251,  # 'ý'
+    254: 252,  # 'ţ'
+    255: 253,  # '˙'
+}
+
+WINDOWS_1250_HUNGARIAN_MODEL = SingleByteCharSetModel(
+    charset_name="windows-1250",
+    language="Hungarian",
+    char_to_order_map=WINDOWS_1250_HUNGARIAN_CHAR_TO_ORDER,
+    language_model=HUNGARIAN_LANG_MODEL,
+    typical_positive_ratio=0.947368,
+    keep_ascii_letters=True,
+    alphabet="ABCDEFGHIJKLMNOPRSTUVZabcdefghijklmnoprstuvzÁÉÍÓÖÚÜáéíóöúüŐőŰű",
+)
+
+ISO_8859_2_HUNGARIAN_CHAR_TO_ORDER = {
+    0: 255,  # '\x00'
+    1: 255,  # '\x01'
+    2: 255,  # '\x02'
+    3: 255,  # '\x03'
+    4: 255,  # '\x04'
+    5: 255,  # '\x05'
+    6: 255,  # '\x06'
+    7: 255,  # '\x07'
+    8: 255,  # '\x08'
+    9: 255,  # '\t'
+    10: 254,  # '\n'
+    11: 255,  # '\x0b'
+    12: 255,  # '\x0c'
+    13: 254,  # '\r'
+    14: 255,  # '\x0e'
+    15: 255,  # '\x0f'
+    16: 255,  # '\x10'
+    17: 255,  # '\x11'
+    18: 255,  # '\x12'
+    19: 255,  # '\x13'
+    20: 255,  # '\x14'
+    21: 255,  # '\x15'
+    22: 255,  # '\x16'
+    23: 255,  # '\x17'
+    24: 255,  # '\x18'
+    25: 255,  # '\x19'
+    26: 255,  # '\x1a'
+    27: 255,  # '\x1b'
+    28: 255,  # '\x1c'
+    29: 255,  # '\x1d'
+    30: 255,  # '\x1e'
+    31: 255,  # '\x1f'
+    32: 253,  # ' '
+    33: 253,  # '!'
+    34: 253,  # '"'
+    35: 253,  # '#'
+    36: 253,  # '$'
+    37: 253,  # '%'
+    38: 253,  # '&'
+    39: 253,  # "'"
+    40: 253,  # '('
+    41: 253,  # ')'
+    42: 253,  # '*'
+    43: 253,  # '+'
+    44: 253,  # ','
+    45: 253,  # '-'
+    46: 253,  # '.'
+    47: 253,  # '/'
+    48: 252,  # '0'
+    49: 252,  # '1'
+    50: 252,  # '2'
+    51: 252,  # '3'
+    52: 252,  # '4'
+    53: 252,  # '5'
+    54: 252,  # '6'
+    55: 252,  # '7'
+    56: 252,  # '8'
+    57: 252,  # '9'
+    58: 253,  # ':'
+    59: 253,  # ';'
+    60: 253,  # '<'
+    61: 253,  # '='
+    62: 253,  # '>'
+    63: 253,  # '?'
+    64: 253,  # '@'
+    65: 28,  # 'A'
+    66: 40,  # 'B'
+    67: 54,  # 'C'
+    68: 45,  # 'D'
+    69: 32,  # 'E'
+    70: 50,  # 'F'
+    71: 49,  # 'G'
+    72: 38,  # 'H'
+    73: 39,  # 'I'
+    74: 53,  # 'J'
+    75: 36,  # 'K'
+    76: 41,  # 'L'
+    77: 34,  # 'M'
+    78: 35,  # 'N'
+    79: 47,  # 'O'
+    80: 46,  # 'P'
+    81: 71,  # 'Q'
+    82: 43,  # 'R'
+    83: 33,  # 'S'
+    84: 37,  # 'T'
+    85: 57,  # 'U'
+    86: 48,  # 'V'
+    87: 64,  # 'W'
+    88: 68,  # 'X'
+    89: 55,  # 'Y'
+    90: 52,  # 'Z'
+    91: 253,  # '['
+    92: 253,  # '\\'
+    93: 253,  # ']'
+    94: 253,  # '^'
+    95: 253,  # '_'
+    96: 253,  # '`'
+    97: 2,  # 'a'
+    98: 18,  # 'b'
+    99: 26,  # 'c'
+    100: 17,  # 'd'
+    101: 1,  # 'e'
+    102: 27,  # 'f'
+    103: 12,  # 'g'
+    104: 20,  # 'h'
+    105: 9,  # 'i'
+    106: 22,  # 'j'
+    107: 7,  # 'k'
+    108: 6,  # 'l'
+    109: 13,  # 'm'
+    110: 4,  # 'n'
+    111: 8,  # 'o'
+    112: 23,  # 'p'
+    113: 67,  # 'q'
+    114: 10,  # 'r'
+    115: 5,  # 's'
+    116: 3,  # 't'
+    117: 21,  # 'u'
+    118: 19,  # 'v'
+    119: 65,  # 'w'
+    120: 62,  # 'x'
+    121: 16,  # 'y'
+    122: 11,  # 'z'
+    123: 253,  # '{'
+    124: 253,  # '|'
+    125: 253,  # '}'
+    126: 253,  # '~'
+    127: 253,  # '\x7f'
+    128: 159,  # '\x80'
+    129: 160,  # '\x81'
+    130: 161,  # '\x82'
+    131: 162,  # '\x83'
+    132: 163,  # '\x84'
+    133: 164,  # '\x85'
+    134: 165,  # '\x86'
+    135: 166,  # '\x87'
+    136: 167,  # '\x88'
+    137: 168,  # '\x89'
+    138: 169,  # '\x8a'
+    139: 170,  # '\x8b'
+    140: 171,  # '\x8c'
+    141: 172,  # '\x8d'
+    142: 173,  # '\x8e'
+    143: 174,  # '\x8f'
+    144: 175,  # '\x90'
+    145: 176,  # '\x91'
+    146: 177,  # '\x92'
+    147: 178,  # '\x93'
+    148: 179,  # '\x94'
+    149: 180,  # '\x95'
+    150: 181,  # '\x96'
+    151: 182,  # '\x97'
+    152: 183,  # '\x98'
+    153: 184,  # '\x99'
+    154: 185,  # '\x9a'
+    155: 186,  # '\x9b'
+    156: 187,  # '\x9c'
+    157: 188,  # '\x9d'
+    158: 189,  # '\x9e'
+    159: 190,  # '\x9f'
+    160: 191,  # '\xa0'
+    161: 192,  # 'Ą'
+    162: 193,  # '˘'
+    163: 194,  # 'Ł'
+    164: 195,  # '¤'
+    165: 196,  # 'Ľ'
+    166: 197,  # 'Ś'
+    167: 75,  # '§'
+    168: 198,  # '¨'
+    169: 199,  # 'Š'
+    170: 200,  # 'Ş'
+    171: 201,  # 'Ť'
+    172: 202,  # 'Ź'
+    173: 203,  # '\xad'
+    174: 204,  # 'Ž'
+    175: 205,  # 'Ż'
+    176: 79,  # '°'
+    177: 206,  # 'ą'
+    178: 207,  # '˛'
+    179: 208,  # 'ł'
+    180: 209,  # '´'
+    181: 210,  # 'ľ'
+    182: 211,  # 'ś'
+    183: 212,  # 'ˇ'
+    184: 213,  # '¸'
+    185: 214,  # 'š'
+    186: 215,  # 'ş'
+    187: 216,  # 'ť'
+    188: 217,  # 'ź'
+    189: 218,  # '˝'
+    190: 219,  # 'ž'
+    191: 220,  # 'ż'
+    192: 221,  # 'Ŕ'
+    193: 51,  # 'Á'
+    194: 81,  # 'Â'
+    195: 222,  # 'Ă'
+    196: 78,  # 'Ä'
+    197: 223,  # 'Ĺ'
+    198: 224,  # 'Ć'
+    199: 225,  # 'Ç'
+    200: 226,  # 'Č'
+    201: 44,  # 'É'
+    202: 227,  # 'Ę'
+    203: 228,  # 'Ë'
+    204: 229,  # 'Ě'
+    205: 61,  # 'Í'
+    206: 230,  # 'Î'
+    207: 231,  # 'Ď'
+    208: 232,  # 'Đ'
+    209: 233,  # 'Ń'
+    210: 234,  # 'Ň'
+    211: 58,  # 'Ó'
+    212: 235,  # 'Ô'
+    213: 66,  # 'Ő'
+    214: 59,  # 'Ö'
+    215: 236,  # '×'
+    216: 237,  # 'Ř'
+    217: 238,  # 'Ů'
+    218: 60,  # 'Ú'
+    219: 69,  # 'Ű'
+    220: 63,  # 'Ü'
+    221: 239,  # 'Ý'
+    222: 240,  # 'Ţ'
+    223: 241,  # 'ß'
+    224: 82,  # 'ŕ'
+    225: 14,  # 'á'
+    226: 74,  # 'â'
+    227: 242,  # 'ă'
+    228: 70,  # 'ä'
+    229: 80,  # 'ĺ'
+    230: 243,  # 'ć'
+    231: 72,  # 'ç'
+    232: 244,  # 'č'
+    233: 15,  # 'é'
+    234: 83,  # 'ę'
+    235: 77,  # 'ë'
+    236: 84,  # 'ě'
+    237: 30,  # 'í'
+    238: 76,  # 'î'
+    239: 85,  # 'ď'
+    240: 245,  # 'đ'
+    241: 246,  # 'ń'
+    242: 247,  # 'ň'
+    243: 25,  # 'ó'
+    244: 73,  # 'ô'
+    245: 42,  # 'ő'
+    246: 24,  # 'ö'
+    247: 248,  # '÷'
+    248: 249,  # 'ř'
+    249: 250,  # 'ů'
+    250: 31,  # 'ú'
+    251: 56,  # 'ű'
+    252: 29,  # 'ü'
+    253: 251,  # 'ý'
+    254: 252,  # 'ţ'
+    255: 253,  # '˙'
+}
+
+ISO_8859_2_HUNGARIAN_MODEL = SingleByteCharSetModel(
+    charset_name="ISO-8859-2",
+    language="Hungarian",
+    char_to_order_map=ISO_8859_2_HUNGARIAN_CHAR_TO_ORDER,
+    language_model=HUNGARIAN_LANG_MODEL,
+    typical_positive_ratio=0.947368,
+    keep_ascii_letters=True,
+    alphabet="ABCDEFGHIJKLMNOPRSTUVZabcdefghijklmnoprstuvzÁÉÍÓÖÚÜáéíóöúüŐőŰű",
+)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/langrussianmodel.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/langrussianmodel.py
new file mode 100644
index 0000000..39a5388
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/langrussianmodel.py
@@ -0,0 +1,5725 @@
+from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel
+
+# 3: Positive
+# 2: Likely
+# 1: Unlikely
+# 0: Negative
+
+RUSSIAN_LANG_MODEL = {
+    37: {  # 'А'
+        37: 0,  # 'А'
+        44: 1,  # 'Б'
+        33: 1,  # 'В'
+        46: 1,  # 'Г'
+        41: 1,  # 'Д'
+        48: 1,  # 'Е'
+        56: 1,  # 'Ж'
+        51: 1,  # 'З'
+        42: 1,  # 'И'
+        60: 1,  # 'Й'
+        36: 1,  # 'К'
+        49: 1,  # 'Л'
+        38: 1,  # 'М'
+        31: 2,  # 'Н'
+        34: 1,  # 'О'
+        35: 1,  # 'П'
+        45: 1,  # 'Р'
+        32: 1,  # 'С'
+        40: 1,  # 'Т'
+        52: 1,  # 'У'
+        53: 1,  # 'Ф'
+        55: 1,  # 'Х'
+        58: 1,  # 'Ц'
+        50: 1,  # 'Ч'
+        57: 1,  # 'Ш'
+        63: 1,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 1,  # 'Ю'
+        43: 1,  # 'Я'
+        3: 1,  # 'а'
+        21: 2,  # 'б'
+        10: 2,  # 'в'
+        19: 2,  # 'г'
+        13: 2,  # 'д'
+        2: 0,  # 'е'
+        24: 1,  # 'ж'
+        20: 1,  # 'з'
+        4: 0,  # 'и'
+        23: 1,  # 'й'
+        11: 2,  # 'к'
+        8: 3,  # 'л'
+        12: 2,  # 'м'
+        5: 2,  # 'н'
+        1: 0,  # 'о'
+        15: 2,  # 'п'
+        9: 2,  # 'р'
+        7: 2,  # 'с'
+        6: 2,  # 'т'
+        14: 2,  # 'у'
+        39: 2,  # 'ф'
+        26: 2,  # 'х'
+        28: 0,  # 'ц'
+        22: 1,  # 'ч'
+        25: 2,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 1,  # 'э'
+        27: 0,  # 'ю'
+        16: 0,  # 'я'
+    },
+    44: {  # 'Б'
+        37: 1,  # 'А'
+        44: 0,  # 'Б'
+        33: 1,  # 'В'
+        46: 1,  # 'Г'
+        41: 0,  # 'Д'
+        48: 1,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 1,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 1,  # 'Л'
+        38: 1,  # 'М'
+        31: 1,  # 'Н'
+        34: 1,  # 'О'
+        35: 0,  # 'П'
+        45: 1,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 1,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 1,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 1,  # 'Я'
+        3: 2,  # 'а'
+        21: 0,  # 'б'
+        10: 0,  # 'в'
+        19: 0,  # 'г'
+        13: 1,  # 'д'
+        2: 3,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 2,  # 'и'
+        23: 0,  # 'й'
+        11: 0,  # 'к'
+        8: 2,  # 'л'
+        12: 0,  # 'м'
+        5: 0,  # 'н'
+        1: 3,  # 'о'
+        15: 0,  # 'п'
+        9: 2,  # 'р'
+        7: 0,  # 'с'
+        6: 0,  # 'т'
+        14: 2,  # 'у'
+        39: 0,  # 'ф'
+        26: 0,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 2,  # 'ы'
+        17: 1,  # 'ь'
+        30: 2,  # 'э'
+        27: 1,  # 'ю'
+        16: 1,  # 'я'
+    },
+    33: {  # 'В'
+        37: 2,  # 'А'
+        44: 0,  # 'Б'
+        33: 1,  # 'В'
+        46: 0,  # 'Г'
+        41: 1,  # 'Д'
+        48: 1,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 1,  # 'И'
+        60: 0,  # 'Й'
+        36: 1,  # 'К'
+        49: 1,  # 'Л'
+        38: 1,  # 'М'
+        31: 1,  # 'Н'
+        34: 1,  # 'О'
+        35: 1,  # 'П'
+        45: 1,  # 'Р'
+        32: 1,  # 'С'
+        40: 1,  # 'Т'
+        52: 1,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 1,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 1,  # 'Ы'
+        61: 1,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 1,  # 'Я'
+        3: 2,  # 'а'
+        21: 1,  # 'б'
+        10: 1,  # 'в'
+        19: 1,  # 'г'
+        13: 2,  # 'д'
+        2: 3,  # 'е'
+        24: 0,  # 'ж'
+        20: 2,  # 'з'
+        4: 2,  # 'и'
+        23: 0,  # 'й'
+        11: 1,  # 'к'
+        8: 2,  # 'л'
+        12: 2,  # 'м'
+        5: 2,  # 'н'
+        1: 3,  # 'о'
+        15: 2,  # 'п'
+        9: 2,  # 'р'
+        7: 3,  # 'с'
+        6: 2,  # 'т'
+        14: 2,  # 'у'
+        39: 0,  # 'ф'
+        26: 1,  # 'х'
+        28: 1,  # 'ц'
+        22: 2,  # 'ч'
+        25: 1,  # 'ш'
+        29: 0,  # 'щ'
+        54: 1,  # 'ъ'
+        18: 3,  # 'ы'
+        17: 1,  # 'ь'
+        30: 2,  # 'э'
+        27: 0,  # 'ю'
+        16: 1,  # 'я'
+    },
+    46: {  # 'Г'
+        37: 1,  # 'А'
+        44: 1,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 1,  # 'Д'
+        48: 1,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 1,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 1,  # 'Л'
+        38: 1,  # 'М'
+        31: 1,  # 'Н'
+        34: 1,  # 'О'
+        35: 1,  # 'П'
+        45: 1,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 1,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 2,  # 'а'
+        21: 0,  # 'б'
+        10: 1,  # 'в'
+        19: 0,  # 'г'
+        13: 2,  # 'д'
+        2: 2,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 2,  # 'и'
+        23: 0,  # 'й'
+        11: 0,  # 'к'
+        8: 2,  # 'л'
+        12: 1,  # 'м'
+        5: 1,  # 'н'
+        1: 3,  # 'о'
+        15: 0,  # 'п'
+        9: 2,  # 'р'
+        7: 0,  # 'с'
+        6: 0,  # 'т'
+        14: 2,  # 'у'
+        39: 0,  # 'ф'
+        26: 0,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 1,  # 'ь'
+        30: 1,  # 'э'
+        27: 1,  # 'ю'
+        16: 0,  # 'я'
+    },
+    41: {  # 'Д'
+        37: 1,  # 'А'
+        44: 0,  # 'Б'
+        33: 1,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 2,  # 'Е'
+        56: 1,  # 'Ж'
+        51: 0,  # 'З'
+        42: 1,  # 'И'
+        60: 0,  # 'Й'
+        36: 1,  # 'К'
+        49: 1,  # 'Л'
+        38: 0,  # 'М'
+        31: 1,  # 'Н'
+        34: 1,  # 'О'
+        35: 0,  # 'П'
+        45: 1,  # 'Р'
+        32: 1,  # 'С'
+        40: 0,  # 'Т'
+        52: 1,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 1,  # 'Ц'
+        50: 1,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 1,  # 'Ы'
+        61: 1,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 1,  # 'Я'
+        3: 3,  # 'а'
+        21: 0,  # 'б'
+        10: 2,  # 'в'
+        19: 0,  # 'г'
+        13: 0,  # 'д'
+        2: 2,  # 'е'
+        24: 3,  # 'ж'
+        20: 1,  # 'з'
+        4: 2,  # 'и'
+        23: 0,  # 'й'
+        11: 0,  # 'к'
+        8: 2,  # 'л'
+        12: 1,  # 'м'
+        5: 1,  # 'н'
+        1: 3,  # 'о'
+        15: 0,  # 'п'
+        9: 2,  # 'р'
+        7: 0,  # 'с'
+        6: 0,  # 'т'
+        14: 2,  # 'у'
+        39: 0,  # 'ф'
+        26: 1,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 1,  # 'ы'
+        17: 1,  # 'ь'
+        30: 2,  # 'э'
+        27: 1,  # 'ю'
+        16: 1,  # 'я'
+    },
+    48: {  # 'Е'
+        37: 1,  # 'А'
+        44: 1,  # 'Б'
+        33: 1,  # 'В'
+        46: 1,  # 'Г'
+        41: 1,  # 'Д'
+        48: 1,  # 'Е'
+        56: 1,  # 'Ж'
+        51: 1,  # 'З'
+        42: 1,  # 'И'
+        60: 1,  # 'Й'
+        36: 1,  # 'К'
+        49: 1,  # 'Л'
+        38: 1,  # 'М'
+        31: 2,  # 'Н'
+        34: 1,  # 'О'
+        35: 1,  # 'П'
+        45: 2,  # 'Р'
+        32: 2,  # 'С'
+        40: 1,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 1,  # 'Х'
+        58: 1,  # 'Ц'
+        50: 1,  # 'Ч'
+        57: 1,  # 'Ш'
+        63: 1,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 1,  # 'Я'
+        3: 0,  # 'а'
+        21: 0,  # 'б'
+        10: 2,  # 'в'
+        19: 2,  # 'г'
+        13: 2,  # 'д'
+        2: 2,  # 'е'
+        24: 1,  # 'ж'
+        20: 1,  # 'з'
+        4: 0,  # 'и'
+        23: 2,  # 'й'
+        11: 1,  # 'к'
+        8: 2,  # 'л'
+        12: 2,  # 'м'
+        5: 1,  # 'н'
+        1: 0,  # 'о'
+        15: 1,  # 'п'
+        9: 1,  # 'р'
+        7: 3,  # 'с'
+        6: 0,  # 'т'
+        14: 0,  # 'у'
+        39: 1,  # 'ф'
+        26: 1,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 1,  # 'ш'
+        29: 2,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 0,  # 'э'
+        27: 1,  # 'ю'
+        16: 0,  # 'я'
+    },
+    56: {  # 'Ж'
+        37: 1,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 1,  # 'Д'
+        48: 1,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 1,  # 'З'
+        42: 1,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 1,  # 'Н'
+        34: 1,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 1,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 2,  # 'а'
+        21: 1,  # 'б'
+        10: 0,  # 'в'
+        19: 1,  # 'г'
+        13: 1,  # 'д'
+        2: 2,  # 'е'
+        24: 1,  # 'ж'
+        20: 0,  # 'з'
+        4: 2,  # 'и'
+        23: 0,  # 'й'
+        11: 0,  # 'к'
+        8: 0,  # 'л'
+        12: 1,  # 'м'
+        5: 0,  # 'н'
+        1: 2,  # 'о'
+        15: 0,  # 'п'
+        9: 1,  # 'р'
+        7: 0,  # 'с'
+        6: 0,  # 'т'
+        14: 2,  # 'у'
+        39: 0,  # 'ф'
+        26: 0,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 0,  # 'э'
+        27: 2,  # 'ю'
+        16: 0,  # 'я'
+    },
+    51: {  # 'З'
+        37: 1,  # 'А'
+        44: 0,  # 'Б'
+        33: 1,  # 'В'
+        46: 1,  # 'Г'
+        41: 1,  # 'Д'
+        48: 1,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 1,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 1,  # 'Л'
+        38: 1,  # 'М'
+        31: 1,  # 'Н'
+        34: 1,  # 'О'
+        35: 0,  # 'П'
+        45: 1,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 1,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 1,  # 'Ы'
+        61: 1,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 1,  # 'б'
+        10: 2,  # 'в'
+        19: 0,  # 'г'
+        13: 2,  # 'д'
+        2: 2,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 2,  # 'и'
+        23: 0,  # 'й'
+        11: 0,  # 'к'
+        8: 1,  # 'л'
+        12: 1,  # 'м'
+        5: 2,  # 'н'
+        1: 2,  # 'о'
+        15: 0,  # 'п'
+        9: 1,  # 'р'
+        7: 0,  # 'с'
+        6: 0,  # 'т'
+        14: 1,  # 'у'
+        39: 0,  # 'ф'
+        26: 0,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 1,  # 'ы'
+        17: 0,  # 'ь'
+        30: 0,  # 'э'
+        27: 0,  # 'ю'
+        16: 1,  # 'я'
+    },
+    42: {  # 'И'
+        37: 1,  # 'А'
+        44: 1,  # 'Б'
+        33: 1,  # 'В'
+        46: 1,  # 'Г'
+        41: 1,  # 'Д'
+        48: 2,  # 'Е'
+        56: 1,  # 'Ж'
+        51: 1,  # 'З'
+        42: 1,  # 'И'
+        60: 1,  # 'Й'
+        36: 1,  # 'К'
+        49: 1,  # 'Л'
+        38: 1,  # 'М'
+        31: 1,  # 'Н'
+        34: 1,  # 'О'
+        35: 1,  # 'П'
+        45: 1,  # 'Р'
+        32: 2,  # 'С'
+        40: 1,  # 'Т'
+        52: 0,  # 'У'
+        53: 1,  # 'Ф'
+        55: 1,  # 'Х'
+        58: 1,  # 'Ц'
+        50: 1,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 1,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 1,  # 'Ю'
+        43: 1,  # 'Я'
+        3: 1,  # 'а'
+        21: 2,  # 'б'
+        10: 2,  # 'в'
+        19: 2,  # 'г'
+        13: 2,  # 'д'
+        2: 2,  # 'е'
+        24: 0,  # 'ж'
+        20: 2,  # 'з'
+        4: 1,  # 'и'
+        23: 0,  # 'й'
+        11: 1,  # 'к'
+        8: 2,  # 'л'
+        12: 2,  # 'м'
+        5: 2,  # 'н'
+        1: 1,  # 'о'
+        15: 1,  # 'п'
+        9: 2,  # 'р'
+        7: 2,  # 'с'
+        6: 2,  # 'т'
+        14: 1,  # 'у'
+        39: 1,  # 'ф'
+        26: 2,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 1,  # 'ш'
+        29: 1,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 0,  # 'э'
+        27: 1,  # 'ю'
+        16: 0,  # 'я'
+    },
+    60: {  # 'Й'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 1,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 1,  # 'К'
+        49: 1,  # 'Л'
+        38: 0,  # 'М'
+        31: 1,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 1,  # 'С'
+        40: 1,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 1,  # 'Х'
+        58: 1,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 0,  # 'а'
+        21: 0,  # 'б'
+        10: 0,  # 'в'
+        19: 0,  # 'г'
+        13: 0,  # 'д'
+        2: 1,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 0,  # 'и'
+        23: 0,  # 'й'
+        11: 0,  # 'к'
+        8: 0,  # 'л'
+        12: 0,  # 'м'
+        5: 0,  # 'н'
+        1: 2,  # 'о'
+        15: 0,  # 'п'
+        9: 0,  # 'р'
+        7: 0,  # 'с'
+        6: 0,  # 'т'
+        14: 0,  # 'у'
+        39: 0,  # 'ф'
+        26: 0,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 0,  # 'э'
+        27: 0,  # 'ю'
+        16: 0,  # 'я'
+    },
+    36: {  # 'К'
+        37: 2,  # 'А'
+        44: 0,  # 'Б'
+        33: 1,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 1,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 1,  # 'З'
+        42: 1,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 1,  # 'Л'
+        38: 0,  # 'М'
+        31: 1,  # 'Н'
+        34: 2,  # 'О'
+        35: 1,  # 'П'
+        45: 1,  # 'Р'
+        32: 1,  # 'С'
+        40: 1,  # 'Т'
+        52: 1,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 1,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 0,  # 'б'
+        10: 1,  # 'в'
+        19: 0,  # 'г'
+        13: 0,  # 'д'
+        2: 2,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 2,  # 'и'
+        23: 0,  # 'й'
+        11: 0,  # 'к'
+        8: 2,  # 'л'
+        12: 0,  # 'м'
+        5: 1,  # 'н'
+        1: 3,  # 'о'
+        15: 0,  # 'п'
+        9: 2,  # 'р'
+        7: 2,  # 'с'
+        6: 2,  # 'т'
+        14: 2,  # 'у'
+        39: 0,  # 'ф'
+        26: 1,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 1,  # 'ы'
+        17: 1,  # 'ь'
+        30: 2,  # 'э'
+        27: 1,  # 'ю'
+        16: 0,  # 'я'
+    },
+    49: {  # 'Л'
+        37: 2,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 1,  # 'Г'
+        41: 0,  # 'Д'
+        48: 1,  # 'Е'
+        56: 1,  # 'Ж'
+        51: 0,  # 'З'
+        42: 1,  # 'И'
+        60: 0,  # 'Й'
+        36: 1,  # 'К'
+        49: 1,  # 'Л'
+        38: 1,  # 'М'
+        31: 0,  # 'Н'
+        34: 1,  # 'О'
+        35: 1,  # 'П'
+        45: 0,  # 'Р'
+        32: 1,  # 'С'
+        40: 1,  # 'Т'
+        52: 1,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 1,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 1,  # 'Ы'
+        61: 1,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 1,  # 'Ю'
+        43: 1,  # 'Я'
+        3: 2,  # 'а'
+        21: 0,  # 'б'
+        10: 0,  # 'в'
+        19: 1,  # 'г'
+        13: 0,  # 'д'
+        2: 2,  # 'е'
+        24: 1,  # 'ж'
+        20: 0,  # 'з'
+        4: 2,  # 'и'
+        23: 0,  # 'й'
+        11: 0,  # 'к'
+        8: 1,  # 'л'
+        12: 0,  # 'м'
+        5: 1,  # 'н'
+        1: 2,  # 'о'
+        15: 0,  # 'п'
+        9: 0,  # 'р'
+        7: 0,  # 'с'
+        6: 0,  # 'т'
+        14: 2,  # 'у'
+        39: 0,  # 'ф'
+        26: 1,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 1,  # 'ы'
+        17: 1,  # 'ь'
+        30: 2,  # 'э'
+        27: 2,  # 'ю'
+        16: 1,  # 'я'
+    },
+    38: {  # 'М'
+        37: 1,  # 'А'
+        44: 1,  # 'Б'
+        33: 1,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 1,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 1,  # 'И'
+        60: 0,  # 'Й'
+        36: 1,  # 'К'
+        49: 1,  # 'Л'
+        38: 1,  # 'М'
+        31: 1,  # 'Н'
+        34: 1,  # 'О'
+        35: 1,  # 'П'
+        45: 1,  # 'Р'
+        32: 1,  # 'С'
+        40: 1,  # 'Т'
+        52: 1,  # 'У'
+        53: 1,  # 'Ф'
+        55: 1,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 1,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 1,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 1,  # 'Я'
+        3: 3,  # 'а'
+        21: 0,  # 'б'
+        10: 0,  # 'в'
+        19: 1,  # 'г'
+        13: 0,  # 'д'
+        2: 2,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 0,  # 'к'
+        8: 1,  # 'л'
+        12: 1,  # 'м'
+        5: 2,  # 'н'
+        1: 3,  # 'о'
+        15: 0,  # 'п'
+        9: 1,  # 'р'
+        7: 1,  # 'с'
+        6: 0,  # 'т'
+        14: 2,  # 'у'
+        39: 0,  # 'ф'
+        26: 0,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 3,  # 'ы'
+        17: 1,  # 'ь'
+        30: 2,  # 'э'
+        27: 1,  # 'ю'
+        16: 1,  # 'я'
+    },
+    31: {  # 'Н'
+        37: 2,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 1,  # 'Г'
+        41: 1,  # 'Д'
+        48: 1,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 1,  # 'З'
+        42: 2,  # 'И'
+        60: 0,  # 'Й'
+        36: 1,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 1,  # 'Н'
+        34: 1,  # 'О'
+        35: 0,  # 'П'
+        45: 1,  # 'Р'
+        32: 1,  # 'С'
+        40: 1,  # 'Т'
+        52: 1,  # 'У'
+        53: 1,  # 'Ф'
+        55: 1,  # 'Х'
+        58: 1,  # 'Ц'
+        50: 1,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 1,  # 'Ы'
+        61: 1,  # 'Ь'
+        47: 1,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 1,  # 'Я'
+        3: 3,  # 'а'
+        21: 0,  # 'б'
+        10: 0,  # 'в'
+        19: 0,  # 'г'
+        13: 0,  # 'д'
+        2: 3,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 0,  # 'к'
+        8: 0,  # 'л'
+        12: 0,  # 'м'
+        5: 0,  # 'н'
+        1: 3,  # 'о'
+        15: 0,  # 'п'
+        9: 1,  # 'р'
+        7: 0,  # 'с'
+        6: 0,  # 'т'
+        14: 3,  # 'у'
+        39: 0,  # 'ф'
+        26: 1,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 1,  # 'ы'
+        17: 2,  # 'ь'
+        30: 1,  # 'э'
+        27: 1,  # 'ю'
+        16: 1,  # 'я'
+    },
+    34: {  # 'О'
+        37: 0,  # 'А'
+        44: 1,  # 'Б'
+        33: 1,  # 'В'
+        46: 1,  # 'Г'
+        41: 2,  # 'Д'
+        48: 1,  # 'Е'
+        56: 1,  # 'Ж'
+        51: 1,  # 'З'
+        42: 1,  # 'И'
+        60: 1,  # 'Й'
+        36: 1,  # 'К'
+        49: 2,  # 'Л'
+        38: 1,  # 'М'
+        31: 2,  # 'Н'
+        34: 1,  # 'О'
+        35: 1,  # 'П'
+        45: 2,  # 'Р'
+        32: 1,  # 'С'
+        40: 1,  # 'Т'
+        52: 1,  # 'У'
+        53: 1,  # 'Ф'
+        55: 1,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 1,  # 'Ч'
+        57: 1,  # 'Ш'
+        63: 1,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 1,  # 'Я'
+        3: 1,  # 'а'
+        21: 2,  # 'б'
+        10: 1,  # 'в'
+        19: 2,  # 'г'
+        13: 2,  # 'д'
+        2: 0,  # 'е'
+        24: 1,  # 'ж'
+        20: 1,  # 'з'
+        4: 0,  # 'и'
+        23: 1,  # 'й'
+        11: 2,  # 'к'
+        8: 2,  # 'л'
+        12: 1,  # 'м'
+        5: 3,  # 'н'
+        1: 0,  # 'о'
+        15: 2,  # 'п'
+        9: 2,  # 'р'
+        7: 2,  # 'с'
+        6: 2,  # 'т'
+        14: 1,  # 'у'
+        39: 1,  # 'ф'
+        26: 2,  # 'х'
+        28: 1,  # 'ц'
+        22: 2,  # 'ч'
+        25: 2,  # 'ш'
+        29: 1,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 0,  # 'э'
+        27: 0,  # 'ю'
+        16: 0,  # 'я'
+    },
+    35: {  # 'П'
+        37: 1,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 1,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 1,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 1,  # 'Л'
+        38: 0,  # 'М'
+        31: 1,  # 'Н'
+        34: 1,  # 'О'
+        35: 1,  # 'П'
+        45: 2,  # 'Р'
+        32: 1,  # 'С'
+        40: 1,  # 'Т'
+        52: 1,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 1,  # 'Ы'
+        61: 1,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 1,  # 'Я'
+        3: 2,  # 'а'
+        21: 0,  # 'б'
+        10: 0,  # 'в'
+        19: 0,  # 'г'
+        13: 0,  # 'д'
+        2: 2,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 2,  # 'и'
+        23: 0,  # 'й'
+        11: 0,  # 'к'
+        8: 2,  # 'л'
+        12: 0,  # 'м'
+        5: 1,  # 'н'
+        1: 3,  # 'о'
+        15: 0,  # 'п'
+        9: 3,  # 'р'
+        7: 1,  # 'с'
+        6: 1,  # 'т'
+        14: 2,  # 'у'
+        39: 1,  # 'ф'
+        26: 0,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 1,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 1,  # 'ы'
+        17: 2,  # 'ь'
+        30: 1,  # 'э'
+        27: 0,  # 'ю'
+        16: 2,  # 'я'
+    },
+    45: {  # 'Р'
+        37: 2,  # 'А'
+        44: 1,  # 'Б'
+        33: 1,  # 'В'
+        46: 1,  # 'Г'
+        41: 1,  # 'Д'
+        48: 2,  # 'Е'
+        56: 1,  # 'Ж'
+        51: 0,  # 'З'
+        42: 2,  # 'И'
+        60: 0,  # 'Й'
+        36: 1,  # 'К'
+        49: 1,  # 'Л'
+        38: 1,  # 'М'
+        31: 1,  # 'Н'
+        34: 2,  # 'О'
+        35: 0,  # 'П'
+        45: 1,  # 'Р'
+        32: 1,  # 'С'
+        40: 1,  # 'Т'
+        52: 1,  # 'У'
+        53: 0,  # 'Ф'
+        55: 1,  # 'Х'
+        58: 1,  # 'Ц'
+        50: 1,  # 'Ч'
+        57: 1,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 1,  # 'Ы'
+        61: 1,  # 'Ь'
+        47: 1,  # 'Э'
+        59: 1,  # 'Ю'
+        43: 1,  # 'Я'
+        3: 3,  # 'а'
+        21: 0,  # 'б'
+        10: 1,  # 'в'
+        19: 0,  # 'г'
+        13: 0,  # 'д'
+        2: 2,  # 'е'
+        24: 1,  # 'ж'
+        20: 0,  # 'з'
+        4: 2,  # 'и'
+        23: 0,  # 'й'
+        11: 0,  # 'к'
+        8: 0,  # 'л'
+        12: 0,  # 'м'
+        5: 0,  # 'н'
+        1: 3,  # 'о'
+        15: 0,  # 'п'
+        9: 1,  # 'р'
+        7: 0,  # 'с'
+        6: 0,  # 'т'
+        14: 2,  # 'у'
+        39: 0,  # 'ф'
+        26: 0,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 2,  # 'ы'
+        17: 0,  # 'ь'
+        30: 1,  # 'э'
+        27: 1,  # 'ю'
+        16: 2,  # 'я'
+    },
+    32: {  # 'С'
+        37: 1,  # 'А'
+        44: 1,  # 'Б'
+        33: 1,  # 'В'
+        46: 1,  # 'Г'
+        41: 1,  # 'Д'
+        48: 1,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 1,  # 'И'
+        60: 0,  # 'Й'
+        36: 1,  # 'К'
+        49: 1,  # 'Л'
+        38: 1,  # 'М'
+        31: 1,  # 'Н'
+        34: 1,  # 'О'
+        35: 1,  # 'П'
+        45: 1,  # 'Р'
+        32: 1,  # 'С'
+        40: 2,  # 'Т'
+        52: 1,  # 'У'
+        53: 0,  # 'Ф'
+        55: 1,  # 'Х'
+        58: 1,  # 'Ц'
+        50: 1,  # 'Ч'
+        57: 1,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 1,  # 'Ы'
+        61: 1,  # 'Ь'
+        47: 1,  # 'Э'
+        59: 1,  # 'Ю'
+        43: 1,  # 'Я'
+        3: 2,  # 'а'
+        21: 1,  # 'б'
+        10: 2,  # 'в'
+        19: 1,  # 'г'
+        13: 2,  # 'д'
+        2: 3,  # 'е'
+        24: 1,  # 'ж'
+        20: 1,  # 'з'
+        4: 2,  # 'и'
+        23: 0,  # 'й'
+        11: 2,  # 'к'
+        8: 2,  # 'л'
+        12: 2,  # 'м'
+        5: 2,  # 'н'
+        1: 2,  # 'о'
+        15: 2,  # 'п'
+        9: 2,  # 'р'
+        7: 1,  # 'с'
+        6: 3,  # 'т'
+        14: 2,  # 'у'
+        39: 1,  # 'ф'
+        26: 1,  # 'х'
+        28: 1,  # 'ц'
+        22: 1,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 1,  # 'ъ'
+        18: 1,  # 'ы'
+        17: 1,  # 'ь'
+        30: 2,  # 'э'
+        27: 1,  # 'ю'
+        16: 1,  # 'я'
+    },
+    40: {  # 'Т'
+        37: 1,  # 'А'
+        44: 0,  # 'Б'
+        33: 1,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 1,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 1,  # 'И'
+        60: 0,  # 'Й'
+        36: 1,  # 'К'
+        49: 1,  # 'Л'
+        38: 1,  # 'М'
+        31: 1,  # 'Н'
+        34: 2,  # 'О'
+        35: 0,  # 'П'
+        45: 1,  # 'Р'
+        32: 1,  # 'С'
+        40: 1,  # 'Т'
+        52: 1,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 1,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 1,  # 'Ы'
+        61: 1,  # 'Ь'
+        47: 1,  # 'Э'
+        59: 1,  # 'Ю'
+        43: 1,  # 'Я'
+        3: 3,  # 'а'
+        21: 1,  # 'б'
+        10: 2,  # 'в'
+        19: 0,  # 'г'
+        13: 0,  # 'д'
+        2: 3,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 2,  # 'и'
+        23: 0,  # 'й'
+        11: 1,  # 'к'
+        8: 1,  # 'л'
+        12: 0,  # 'м'
+        5: 0,  # 'н'
+        1: 3,  # 'о'
+        15: 0,  # 'п'
+        9: 2,  # 'р'
+        7: 1,  # 'с'
+        6: 0,  # 'т'
+        14: 2,  # 'у'
+        39: 0,  # 'ф'
+        26: 0,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 1,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 3,  # 'ы'
+        17: 1,  # 'ь'
+        30: 2,  # 'э'
+        27: 1,  # 'ю'
+        16: 1,  # 'я'
+    },
+    52: {  # 'У'
+        37: 1,  # 'А'
+        44: 1,  # 'Б'
+        33: 1,  # 'В'
+        46: 1,  # 'Г'
+        41: 1,  # 'Д'
+        48: 1,  # 'Е'
+        56: 1,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 1,  # 'Й'
+        36: 1,  # 'К'
+        49: 1,  # 'Л'
+        38: 1,  # 'М'
+        31: 1,  # 'Н'
+        34: 1,  # 'О'
+        35: 1,  # 'П'
+        45: 1,  # 'Р'
+        32: 1,  # 'С'
+        40: 1,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 1,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 1,  # 'Ч'
+        57: 1,  # 'Ш'
+        63: 1,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 1,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 1,  # 'а'
+        21: 2,  # 'б'
+        10: 2,  # 'в'
+        19: 1,  # 'г'
+        13: 2,  # 'д'
+        2: 1,  # 'е'
+        24: 2,  # 'ж'
+        20: 2,  # 'з'
+        4: 2,  # 'и'
+        23: 1,  # 'й'
+        11: 1,  # 'к'
+        8: 2,  # 'л'
+        12: 2,  # 'м'
+        5: 1,  # 'н'
+        1: 2,  # 'о'
+        15: 1,  # 'п'
+        9: 2,  # 'р'
+        7: 2,  # 'с'
+        6: 2,  # 'т'
+        14: 0,  # 'у'
+        39: 1,  # 'ф'
+        26: 1,  # 'х'
+        28: 1,  # 'ц'
+        22: 2,  # 'ч'
+        25: 1,  # 'ш'
+        29: 1,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 2,  # 'э'
+        27: 1,  # 'ю'
+        16: 0,  # 'я'
+    },
+    53: {  # 'Ф'
+        37: 1,  # 'А'
+        44: 1,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 1,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 1,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 1,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 1,  # 'О'
+        35: 0,  # 'П'
+        45: 1,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 1,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 2,  # 'а'
+        21: 0,  # 'б'
+        10: 0,  # 'в'
+        19: 0,  # 'г'
+        13: 0,  # 'д'
+        2: 2,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 2,  # 'и'
+        23: 0,  # 'й'
+        11: 0,  # 'к'
+        8: 2,  # 'л'
+        12: 0,  # 'м'
+        5: 0,  # 'н'
+        1: 2,  # 'о'
+        15: 0,  # 'п'
+        9: 2,  # 'р'
+        7: 0,  # 'с'
+        6: 1,  # 'т'
+        14: 2,  # 'у'
+        39: 0,  # 'ф'
+        26: 0,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 1,  # 'ь'
+        30: 2,  # 'э'
+        27: 0,  # 'ю'
+        16: 0,  # 'я'
+    },
+    55: {  # 'Х'
+        37: 1,  # 'А'
+        44: 0,  # 'Б'
+        33: 1,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 1,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 1,  # 'Л'
+        38: 1,  # 'М'
+        31: 1,  # 'Н'
+        34: 1,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 2,  # 'а'
+        21: 0,  # 'б'
+        10: 2,  # 'в'
+        19: 0,  # 'г'
+        13: 0,  # 'д'
+        2: 2,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 2,  # 'и'
+        23: 0,  # 'й'
+        11: 0,  # 'к'
+        8: 2,  # 'л'
+        12: 1,  # 'м'
+        5: 0,  # 'н'
+        1: 2,  # 'о'
+        15: 0,  # 'п'
+        9: 2,  # 'р'
+        7: 0,  # 'с'
+        6: 0,  # 'т'
+        14: 1,  # 'у'
+        39: 0,  # 'ф'
+        26: 0,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 1,  # 'ь'
+        30: 1,  # 'э'
+        27: 0,  # 'ю'
+        16: 0,  # 'я'
+    },
+    58: {  # 'Ц'
+        37: 1,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 1,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 1,  # 'И'
+        60: 0,  # 'Й'
+        36: 1,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 1,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 1,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 1,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 1,  # 'а'
+        21: 0,  # 'б'
+        10: 1,  # 'в'
+        19: 0,  # 'г'
+        13: 0,  # 'д'
+        2: 2,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 2,  # 'и'
+        23: 0,  # 'й'
+        11: 0,  # 'к'
+        8: 0,  # 'л'
+        12: 0,  # 'м'
+        5: 0,  # 'н'
+        1: 0,  # 'о'
+        15: 0,  # 'п'
+        9: 0,  # 'р'
+        7: 0,  # 'с'
+        6: 0,  # 'т'
+        14: 1,  # 'у'
+        39: 0,  # 'ф'
+        26: 0,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 1,  # 'ы'
+        17: 0,  # 'ь'
+        30: 0,  # 'э'
+        27: 1,  # 'ю'
+        16: 0,  # 'я'
+    },
+    50: {  # 'Ч'
+        37: 1,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 1,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 1,  # 'И'
+        60: 0,  # 'Й'
+        36: 1,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 1,  # 'Н'
+        34: 0,  # 'О'
+        35: 1,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 1,  # 'Т'
+        52: 1,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 1,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 2,  # 'а'
+        21: 0,  # 'б'
+        10: 0,  # 'в'
+        19: 0,  # 'г'
+        13: 0,  # 'д'
+        2: 2,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 2,  # 'и'
+        23: 0,  # 'й'
+        11: 0,  # 'к'
+        8: 1,  # 'л'
+        12: 0,  # 'м'
+        5: 0,  # 'н'
+        1: 1,  # 'о'
+        15: 0,  # 'п'
+        9: 1,  # 'р'
+        7: 0,  # 'с'
+        6: 3,  # 'т'
+        14: 2,  # 'у'
+        39: 0,  # 'ф'
+        26: 0,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 1,  # 'ь'
+        30: 0,  # 'э'
+        27: 0,  # 'ю'
+        16: 0,  # 'я'
+    },
+    57: {  # 'Ш'
+        37: 1,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 1,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 1,  # 'И'
+        60: 0,  # 'Й'
+        36: 1,  # 'К'
+        49: 1,  # 'Л'
+        38: 0,  # 'М'
+        31: 1,  # 'Н'
+        34: 1,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 1,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 2,  # 'а'
+        21: 0,  # 'б'
+        10: 1,  # 'в'
+        19: 0,  # 'г'
+        13: 0,  # 'д'
+        2: 2,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 1,  # 'и'
+        23: 0,  # 'й'
+        11: 1,  # 'к'
+        8: 2,  # 'л'
+        12: 1,  # 'м'
+        5: 1,  # 'н'
+        1: 2,  # 'о'
+        15: 2,  # 'п'
+        9: 1,  # 'р'
+        7: 0,  # 'с'
+        6: 2,  # 'т'
+        14: 2,  # 'у'
+        39: 0,  # 'ф'
+        26: 1,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 1,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 1,  # 'э'
+        27: 0,  # 'ю'
+        16: 0,  # 'я'
+    },
+    63: {  # 'Щ'
+        37: 1,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 1,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 1,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 1,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 1,  # 'а'
+        21: 0,  # 'б'
+        10: 0,  # 'в'
+        19: 0,  # 'г'
+        13: 0,  # 'д'
+        2: 1,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 1,  # 'и'
+        23: 0,  # 'й'
+        11: 0,  # 'к'
+        8: 0,  # 'л'
+        12: 0,  # 'м'
+        5: 0,  # 'н'
+        1: 1,  # 'о'
+        15: 0,  # 'п'
+        9: 0,  # 'р'
+        7: 0,  # 'с'
+        6: 0,  # 'т'
+        14: 1,  # 'у'
+        39: 0,  # 'ф'
+        26: 0,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 0,  # 'э'
+        27: 0,  # 'ю'
+        16: 0,  # 'я'
+    },
+    62: {  # 'Ы'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 1,  # 'В'
+        46: 1,  # 'Г'
+        41: 0,  # 'Д'
+        48: 1,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 1,  # 'Й'
+        36: 1,  # 'К'
+        49: 1,  # 'Л'
+        38: 1,  # 'М'
+        31: 1,  # 'Н'
+        34: 0,  # 'О'
+        35: 1,  # 'П'
+        45: 1,  # 'Р'
+        32: 1,  # 'С'
+        40: 1,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 1,  # 'Х'
+        58: 1,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 1,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 0,  # 'а'
+        21: 0,  # 'б'
+        10: 0,  # 'в'
+        19: 0,  # 'г'
+        13: 0,  # 'д'
+        2: 0,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 0,  # 'и'
+        23: 0,  # 'й'
+        11: 0,  # 'к'
+        8: 0,  # 'л'
+        12: 0,  # 'м'
+        5: 0,  # 'н'
+        1: 0,  # 'о'
+        15: 0,  # 'п'
+        9: 0,  # 'р'
+        7: 0,  # 'с'
+        6: 0,  # 'т'
+        14: 0,  # 'у'
+        39: 0,  # 'ф'
+        26: 0,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 0,  # 'э'
+        27: 0,  # 'ю'
+        16: 0,  # 'я'
+    },
+    61: {  # 'Ь'
+        37: 0,  # 'А'
+        44: 1,  # 'Б'
+        33: 1,  # 'В'
+        46: 0,  # 'Г'
+        41: 1,  # 'Д'
+        48: 1,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 1,  # 'И'
+        60: 0,  # 'Й'
+        36: 1,  # 'К'
+        49: 0,  # 'Л'
+        38: 1,  # 'М'
+        31: 1,  # 'Н'
+        34: 1,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 1,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 1,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 1,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 1,  # 'Ю'
+        43: 1,  # 'Я'
+        3: 0,  # 'а'
+        21: 0,  # 'б'
+        10: 0,  # 'в'
+        19: 0,  # 'г'
+        13: 0,  # 'д'
+        2: 0,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 0,  # 'и'
+        23: 0,  # 'й'
+        11: 0,  # 'к'
+        8: 0,  # 'л'
+        12: 0,  # 'м'
+        5: 0,  # 'н'
+        1: 0,  # 'о'
+        15: 0,  # 'п'
+        9: 0,  # 'р'
+        7: 0,  # 'с'
+        6: 0,  # 'т'
+        14: 0,  # 'у'
+        39: 0,  # 'ф'
+        26: 0,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 0,  # 'э'
+        27: 0,  # 'ю'
+        16: 0,  # 'я'
+    },
+    47: {  # 'Э'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 1,  # 'В'
+        46: 0,  # 'Г'
+        41: 1,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 1,  # 'Й'
+        36: 1,  # 'К'
+        49: 1,  # 'Л'
+        38: 1,  # 'М'
+        31: 1,  # 'Н'
+        34: 0,  # 'О'
+        35: 1,  # 'П'
+        45: 1,  # 'Р'
+        32: 1,  # 'С'
+        40: 1,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 1,  # 'а'
+        21: 1,  # 'б'
+        10: 2,  # 'в'
+        19: 1,  # 'г'
+        13: 2,  # 'д'
+        2: 0,  # 'е'
+        24: 1,  # 'ж'
+        20: 0,  # 'з'
+        4: 0,  # 'и'
+        23: 2,  # 'й'
+        11: 2,  # 'к'
+        8: 2,  # 'л'
+        12: 2,  # 'м'
+        5: 2,  # 'н'
+        1: 0,  # 'о'
+        15: 1,  # 'п'
+        9: 2,  # 'р'
+        7: 1,  # 'с'
+        6: 3,  # 'т'
+        14: 1,  # 'у'
+        39: 1,  # 'ф'
+        26: 1,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 1,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 0,  # 'э'
+        27: 0,  # 'ю'
+        16: 0,  # 'я'
+    },
+    59: {  # 'Ю'
+        37: 1,  # 'А'
+        44: 1,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 1,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 1,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 1,  # 'Р'
+        32: 0,  # 'С'
+        40: 1,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 1,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 1,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 0,  # 'а'
+        21: 1,  # 'б'
+        10: 0,  # 'в'
+        19: 1,  # 'г'
+        13: 1,  # 'д'
+        2: 0,  # 'е'
+        24: 1,  # 'ж'
+        20: 0,  # 'з'
+        4: 0,  # 'и'
+        23: 0,  # 'й'
+        11: 1,  # 'к'
+        8: 2,  # 'л'
+        12: 1,  # 'м'
+        5: 2,  # 'н'
+        1: 0,  # 'о'
+        15: 1,  # 'п'
+        9: 1,  # 'р'
+        7: 1,  # 'с'
+        6: 0,  # 'т'
+        14: 0,  # 'у'
+        39: 0,  # 'ф'
+        26: 1,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 0,  # 'э'
+        27: 0,  # 'ю'
+        16: 0,  # 'я'
+    },
+    43: {  # 'Я'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 1,  # 'В'
+        46: 1,  # 'Г'
+        41: 0,  # 'Д'
+        48: 1,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 1,  # 'И'
+        60: 0,  # 'Й'
+        36: 1,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 1,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 1,  # 'С'
+        40: 1,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 1,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 1,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 1,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 1,  # 'Ю'
+        43: 1,  # 'Я'
+        3: 0,  # 'а'
+        21: 1,  # 'б'
+        10: 1,  # 'в'
+        19: 1,  # 'г'
+        13: 1,  # 'д'
+        2: 0,  # 'е'
+        24: 0,  # 'ж'
+        20: 1,  # 'з'
+        4: 0,  # 'и'
+        23: 1,  # 'й'
+        11: 1,  # 'к'
+        8: 1,  # 'л'
+        12: 1,  # 'м'
+        5: 2,  # 'н'
+        1: 0,  # 'о'
+        15: 1,  # 'п'
+        9: 1,  # 'р'
+        7: 1,  # 'с'
+        6: 0,  # 'т'
+        14: 0,  # 'у'
+        39: 0,  # 'ф'
+        26: 1,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 1,  # 'ш'
+        29: 1,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 0,  # 'э'
+        27: 0,  # 'ю'
+        16: 0,  # 'я'
+    },
+    3: {  # 'а'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 1,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 1,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 2,  # 'а'
+        21: 3,  # 'б'
+        10: 3,  # 'в'
+        19: 3,  # 'г'
+        13: 3,  # 'д'
+        2: 3,  # 'е'
+        24: 3,  # 'ж'
+        20: 3,  # 'з'
+        4: 3,  # 'и'
+        23: 3,  # 'й'
+        11: 3,  # 'к'
+        8: 3,  # 'л'
+        12: 3,  # 'м'
+        5: 3,  # 'н'
+        1: 2,  # 'о'
+        15: 3,  # 'п'
+        9: 3,  # 'р'
+        7: 3,  # 'с'
+        6: 3,  # 'т'
+        14: 3,  # 'у'
+        39: 2,  # 'ф'
+        26: 3,  # 'х'
+        28: 3,  # 'ц'
+        22: 3,  # 'ч'
+        25: 3,  # 'ш'
+        29: 3,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 2,  # 'э'
+        27: 3,  # 'ю'
+        16: 3,  # 'я'
+    },
+    21: {  # 'б'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 1,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 2,  # 'б'
+        10: 2,  # 'в'
+        19: 1,  # 'г'
+        13: 2,  # 'д'
+        2: 3,  # 'е'
+        24: 2,  # 'ж'
+        20: 1,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 2,  # 'к'
+        8: 3,  # 'л'
+        12: 2,  # 'м'
+        5: 3,  # 'н'
+        1: 3,  # 'о'
+        15: 1,  # 'п'
+        9: 3,  # 'р'
+        7: 3,  # 'с'
+        6: 2,  # 'т'
+        14: 3,  # 'у'
+        39: 0,  # 'ф'
+        26: 2,  # 'х'
+        28: 1,  # 'ц'
+        22: 1,  # 'ч'
+        25: 2,  # 'ш'
+        29: 3,  # 'щ'
+        54: 2,  # 'ъ'
+        18: 3,  # 'ы'
+        17: 2,  # 'ь'
+        30: 1,  # 'э'
+        27: 2,  # 'ю'
+        16: 3,  # 'я'
+    },
+    10: {  # 'в'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 2,  # 'б'
+        10: 2,  # 'в'
+        19: 2,  # 'г'
+        13: 3,  # 'д'
+        2: 3,  # 'е'
+        24: 1,  # 'ж'
+        20: 3,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 3,  # 'к'
+        8: 3,  # 'л'
+        12: 2,  # 'м'
+        5: 3,  # 'н'
+        1: 3,  # 'о'
+        15: 3,  # 'п'
+        9: 3,  # 'р'
+        7: 3,  # 'с'
+        6: 3,  # 'т'
+        14: 3,  # 'у'
+        39: 1,  # 'ф'
+        26: 2,  # 'х'
+        28: 2,  # 'ц'
+        22: 2,  # 'ч'
+        25: 3,  # 'ш'
+        29: 2,  # 'щ'
+        54: 2,  # 'ъ'
+        18: 3,  # 'ы'
+        17: 3,  # 'ь'
+        30: 1,  # 'э'
+        27: 1,  # 'ю'
+        16: 3,  # 'я'
+    },
+    19: {  # 'г'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 1,  # 'б'
+        10: 2,  # 'в'
+        19: 1,  # 'г'
+        13: 3,  # 'д'
+        2: 3,  # 'е'
+        24: 0,  # 'ж'
+        20: 1,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 2,  # 'к'
+        8: 3,  # 'л'
+        12: 2,  # 'м'
+        5: 3,  # 'н'
+        1: 3,  # 'о'
+        15: 0,  # 'п'
+        9: 3,  # 'р'
+        7: 2,  # 'с'
+        6: 2,  # 'т'
+        14: 3,  # 'у'
+        39: 1,  # 'ф'
+        26: 1,  # 'х'
+        28: 1,  # 'ц'
+        22: 2,  # 'ч'
+        25: 1,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 1,  # 'ы'
+        17: 1,  # 'ь'
+        30: 1,  # 'э'
+        27: 1,  # 'ю'
+        16: 0,  # 'я'
+    },
+    13: {  # 'д'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 2,  # 'б'
+        10: 3,  # 'в'
+        19: 2,  # 'г'
+        13: 2,  # 'д'
+        2: 3,  # 'е'
+        24: 2,  # 'ж'
+        20: 2,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 3,  # 'к'
+        8: 3,  # 'л'
+        12: 2,  # 'м'
+        5: 3,  # 'н'
+        1: 3,  # 'о'
+        15: 2,  # 'п'
+        9: 3,  # 'р'
+        7: 3,  # 'с'
+        6: 3,  # 'т'
+        14: 3,  # 'у'
+        39: 1,  # 'ф'
+        26: 2,  # 'х'
+        28: 3,  # 'ц'
+        22: 2,  # 'ч'
+        25: 2,  # 'ш'
+        29: 1,  # 'щ'
+        54: 2,  # 'ъ'
+        18: 3,  # 'ы'
+        17: 3,  # 'ь'
+        30: 1,  # 'э'
+        27: 2,  # 'ю'
+        16: 3,  # 'я'
+    },
+    2: {  # 'е'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 2,  # 'а'
+        21: 3,  # 'б'
+        10: 3,  # 'в'
+        19: 3,  # 'г'
+        13: 3,  # 'д'
+        2: 3,  # 'е'
+        24: 3,  # 'ж'
+        20: 3,  # 'з'
+        4: 2,  # 'и'
+        23: 3,  # 'й'
+        11: 3,  # 'к'
+        8: 3,  # 'л'
+        12: 3,  # 'м'
+        5: 3,  # 'н'
+        1: 3,  # 'о'
+        15: 3,  # 'п'
+        9: 3,  # 'р'
+        7: 3,  # 'с'
+        6: 3,  # 'т'
+        14: 2,  # 'у'
+        39: 2,  # 'ф'
+        26: 3,  # 'х'
+        28: 3,  # 'ц'
+        22: 3,  # 'ч'
+        25: 3,  # 'ш'
+        29: 3,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 1,  # 'э'
+        27: 2,  # 'ю'
+        16: 3,  # 'я'
+    },
+    24: {  # 'ж'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 2,  # 'б'
+        10: 1,  # 'в'
+        19: 2,  # 'г'
+        13: 3,  # 'д'
+        2: 3,  # 'е'
+        24: 2,  # 'ж'
+        20: 1,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 2,  # 'к'
+        8: 2,  # 'л'
+        12: 1,  # 'м'
+        5: 3,  # 'н'
+        1: 2,  # 'о'
+        15: 1,  # 'п'
+        9: 2,  # 'р'
+        7: 2,  # 'с'
+        6: 1,  # 'т'
+        14: 3,  # 'у'
+        39: 1,  # 'ф'
+        26: 0,  # 'х'
+        28: 1,  # 'ц'
+        22: 2,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 1,  # 'ы'
+        17: 2,  # 'ь'
+        30: 1,  # 'э'
+        27: 1,  # 'ю'
+        16: 1,  # 'я'
+    },
+    20: {  # 'з'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 3,  # 'б'
+        10: 3,  # 'в'
+        19: 3,  # 'г'
+        13: 3,  # 'д'
+        2: 3,  # 'е'
+        24: 2,  # 'ж'
+        20: 2,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 3,  # 'к'
+        8: 3,  # 'л'
+        12: 3,  # 'м'
+        5: 3,  # 'н'
+        1: 3,  # 'о'
+        15: 0,  # 'п'
+        9: 3,  # 'р'
+        7: 2,  # 'с'
+        6: 2,  # 'т'
+        14: 3,  # 'у'
+        39: 0,  # 'ф'
+        26: 0,  # 'х'
+        28: 1,  # 'ц'
+        22: 2,  # 'ч'
+        25: 1,  # 'ш'
+        29: 0,  # 'щ'
+        54: 2,  # 'ъ'
+        18: 3,  # 'ы'
+        17: 2,  # 'ь'
+        30: 1,  # 'э'
+        27: 1,  # 'ю'
+        16: 3,  # 'я'
+    },
+    4: {  # 'и'
+        37: 1,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 1,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 3,  # 'б'
+        10: 3,  # 'в'
+        19: 3,  # 'г'
+        13: 3,  # 'д'
+        2: 3,  # 'е'
+        24: 3,  # 'ж'
+        20: 3,  # 'з'
+        4: 3,  # 'и'
+        23: 3,  # 'й'
+        11: 3,  # 'к'
+        8: 3,  # 'л'
+        12: 3,  # 'м'
+        5: 3,  # 'н'
+        1: 3,  # 'о'
+        15: 3,  # 'п'
+        9: 3,  # 'р'
+        7: 3,  # 'с'
+        6: 3,  # 'т'
+        14: 2,  # 'у'
+        39: 2,  # 'ф'
+        26: 3,  # 'х'
+        28: 3,  # 'ц'
+        22: 3,  # 'ч'
+        25: 3,  # 'ш'
+        29: 3,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 2,  # 'э'
+        27: 3,  # 'ю'
+        16: 3,  # 'я'
+    },
+    23: {  # 'й'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 1,  # 'а'
+        21: 1,  # 'б'
+        10: 1,  # 'в'
+        19: 2,  # 'г'
+        13: 3,  # 'д'
+        2: 2,  # 'е'
+        24: 0,  # 'ж'
+        20: 2,  # 'з'
+        4: 1,  # 'и'
+        23: 0,  # 'й'
+        11: 2,  # 'к'
+        8: 2,  # 'л'
+        12: 2,  # 'м'
+        5: 3,  # 'н'
+        1: 2,  # 'о'
+        15: 1,  # 'п'
+        9: 2,  # 'р'
+        7: 3,  # 'с'
+        6: 3,  # 'т'
+        14: 1,  # 'у'
+        39: 2,  # 'ф'
+        26: 1,  # 'х'
+        28: 2,  # 'ц'
+        22: 3,  # 'ч'
+        25: 2,  # 'ш'
+        29: 1,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 1,  # 'э'
+        27: 1,  # 'ю'
+        16: 2,  # 'я'
+    },
+    11: {  # 'к'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 1,  # 'б'
+        10: 3,  # 'в'
+        19: 1,  # 'г'
+        13: 1,  # 'д'
+        2: 3,  # 'е'
+        24: 2,  # 'ж'
+        20: 2,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 2,  # 'к'
+        8: 3,  # 'л'
+        12: 1,  # 'м'
+        5: 3,  # 'н'
+        1: 3,  # 'о'
+        15: 0,  # 'п'
+        9: 3,  # 'р'
+        7: 3,  # 'с'
+        6: 3,  # 'т'
+        14: 3,  # 'у'
+        39: 1,  # 'ф'
+        26: 2,  # 'х'
+        28: 2,  # 'ц'
+        22: 1,  # 'ч'
+        25: 2,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 1,  # 'ы'
+        17: 1,  # 'ь'
+        30: 1,  # 'э'
+        27: 1,  # 'ю'
+        16: 1,  # 'я'
+    },
+    8: {  # 'л'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 2,  # 'б'
+        10: 2,  # 'в'
+        19: 3,  # 'г'
+        13: 2,  # 'д'
+        2: 3,  # 'е'
+        24: 3,  # 'ж'
+        20: 2,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 3,  # 'к'
+        8: 3,  # 'л'
+        12: 2,  # 'м'
+        5: 3,  # 'н'
+        1: 3,  # 'о'
+        15: 2,  # 'п'
+        9: 1,  # 'р'
+        7: 3,  # 'с'
+        6: 2,  # 'т'
+        14: 3,  # 'у'
+        39: 2,  # 'ф'
+        26: 2,  # 'х'
+        28: 1,  # 'ц'
+        22: 3,  # 'ч'
+        25: 2,  # 'ш'
+        29: 1,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 3,  # 'ы'
+        17: 3,  # 'ь'
+        30: 1,  # 'э'
+        27: 3,  # 'ю'
+        16: 3,  # 'я'
+    },
+    12: {  # 'м'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 2,  # 'б'
+        10: 2,  # 'в'
+        19: 2,  # 'г'
+        13: 1,  # 'д'
+        2: 3,  # 'е'
+        24: 1,  # 'ж'
+        20: 1,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 2,  # 'к'
+        8: 3,  # 'л'
+        12: 2,  # 'м'
+        5: 3,  # 'н'
+        1: 3,  # 'о'
+        15: 2,  # 'п'
+        9: 2,  # 'р'
+        7: 3,  # 'с'
+        6: 2,  # 'т'
+        14: 3,  # 'у'
+        39: 2,  # 'ф'
+        26: 2,  # 'х'
+        28: 2,  # 'ц'
+        22: 2,  # 'ч'
+        25: 1,  # 'ш'
+        29: 1,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 3,  # 'ы'
+        17: 2,  # 'ь'
+        30: 2,  # 'э'
+        27: 1,  # 'ю'
+        16: 3,  # 'я'
+    },
+    5: {  # 'н'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 2,  # 'б'
+        10: 2,  # 'в'
+        19: 3,  # 'г'
+        13: 3,  # 'д'
+        2: 3,  # 'е'
+        24: 2,  # 'ж'
+        20: 2,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 3,  # 'к'
+        8: 2,  # 'л'
+        12: 1,  # 'м'
+        5: 3,  # 'н'
+        1: 3,  # 'о'
+        15: 1,  # 'п'
+        9: 2,  # 'р'
+        7: 3,  # 'с'
+        6: 3,  # 'т'
+        14: 3,  # 'у'
+        39: 2,  # 'ф'
+        26: 2,  # 'х'
+        28: 3,  # 'ц'
+        22: 3,  # 'ч'
+        25: 2,  # 'ш'
+        29: 2,  # 'щ'
+        54: 1,  # 'ъ'
+        18: 3,  # 'ы'
+        17: 3,  # 'ь'
+        30: 1,  # 'э'
+        27: 3,  # 'ю'
+        16: 3,  # 'я'
+    },
+    1: {  # 'о'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 2,  # 'а'
+        21: 3,  # 'б'
+        10: 3,  # 'в'
+        19: 3,  # 'г'
+        13: 3,  # 'д'
+        2: 3,  # 'е'
+        24: 3,  # 'ж'
+        20: 3,  # 'з'
+        4: 3,  # 'и'
+        23: 3,  # 'й'
+        11: 3,  # 'к'
+        8: 3,  # 'л'
+        12: 3,  # 'м'
+        5: 3,  # 'н'
+        1: 3,  # 'о'
+        15: 3,  # 'п'
+        9: 3,  # 'р'
+        7: 3,  # 'с'
+        6: 3,  # 'т'
+        14: 2,  # 'у'
+        39: 2,  # 'ф'
+        26: 3,  # 'х'
+        28: 2,  # 'ц'
+        22: 3,  # 'ч'
+        25: 3,  # 'ш'
+        29: 3,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 2,  # 'э'
+        27: 3,  # 'ю'
+        16: 3,  # 'я'
+    },
+    15: {  # 'п'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 1,  # 'б'
+        10: 0,  # 'в'
+        19: 0,  # 'г'
+        13: 0,  # 'д'
+        2: 3,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 2,  # 'к'
+        8: 3,  # 'л'
+        12: 1,  # 'м'
+        5: 3,  # 'н'
+        1: 3,  # 'о'
+        15: 2,  # 'п'
+        9: 3,  # 'р'
+        7: 2,  # 'с'
+        6: 2,  # 'т'
+        14: 3,  # 'у'
+        39: 1,  # 'ф'
+        26: 0,  # 'х'
+        28: 2,  # 'ц'
+        22: 2,  # 'ч'
+        25: 1,  # 'ш'
+        29: 1,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 3,  # 'ы'
+        17: 2,  # 'ь'
+        30: 1,  # 'э'
+        27: 1,  # 'ю'
+        16: 3,  # 'я'
+    },
+    9: {  # 'р'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 2,  # 'б'
+        10: 3,  # 'в'
+        19: 3,  # 'г'
+        13: 3,  # 'д'
+        2: 3,  # 'е'
+        24: 3,  # 'ж'
+        20: 2,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 3,  # 'к'
+        8: 2,  # 'л'
+        12: 3,  # 'м'
+        5: 3,  # 'н'
+        1: 3,  # 'о'
+        15: 2,  # 'п'
+        9: 2,  # 'р'
+        7: 3,  # 'с'
+        6: 3,  # 'т'
+        14: 3,  # 'у'
+        39: 2,  # 'ф'
+        26: 3,  # 'х'
+        28: 2,  # 'ц'
+        22: 2,  # 'ч'
+        25: 3,  # 'ш'
+        29: 2,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 3,  # 'ы'
+        17: 3,  # 'ь'
+        30: 2,  # 'э'
+        27: 2,  # 'ю'
+        16: 3,  # 'я'
+    },
+    7: {  # 'с'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 1,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 2,  # 'б'
+        10: 3,  # 'в'
+        19: 2,  # 'г'
+        13: 3,  # 'д'
+        2: 3,  # 'е'
+        24: 2,  # 'ж'
+        20: 2,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 3,  # 'к'
+        8: 3,  # 'л'
+        12: 3,  # 'м'
+        5: 3,  # 'н'
+        1: 3,  # 'о'
+        15: 3,  # 'п'
+        9: 3,  # 'р'
+        7: 3,  # 'с'
+        6: 3,  # 'т'
+        14: 3,  # 'у'
+        39: 2,  # 'ф'
+        26: 3,  # 'х'
+        28: 2,  # 'ц'
+        22: 3,  # 'ч'
+        25: 2,  # 'ш'
+        29: 1,  # 'щ'
+        54: 2,  # 'ъ'
+        18: 3,  # 'ы'
+        17: 3,  # 'ь'
+        30: 2,  # 'э'
+        27: 3,  # 'ю'
+        16: 3,  # 'я'
+    },
+    6: {  # 'т'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 2,  # 'б'
+        10: 3,  # 'в'
+        19: 2,  # 'г'
+        13: 2,  # 'д'
+        2: 3,  # 'е'
+        24: 1,  # 'ж'
+        20: 1,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 3,  # 'к'
+        8: 3,  # 'л'
+        12: 2,  # 'м'
+        5: 3,  # 'н'
+        1: 3,  # 'о'
+        15: 2,  # 'п'
+        9: 3,  # 'р'
+        7: 3,  # 'с'
+        6: 2,  # 'т'
+        14: 3,  # 'у'
+        39: 2,  # 'ф'
+        26: 2,  # 'х'
+        28: 2,  # 'ц'
+        22: 2,  # 'ч'
+        25: 2,  # 'ш'
+        29: 2,  # 'щ'
+        54: 2,  # 'ъ'
+        18: 3,  # 'ы'
+        17: 3,  # 'ь'
+        30: 2,  # 'э'
+        27: 2,  # 'ю'
+        16: 3,  # 'я'
+    },
+    14: {  # 'у'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 2,  # 'а'
+        21: 3,  # 'б'
+        10: 3,  # 'в'
+        19: 3,  # 'г'
+        13: 3,  # 'д'
+        2: 3,  # 'е'
+        24: 3,  # 'ж'
+        20: 3,  # 'з'
+        4: 2,  # 'и'
+        23: 2,  # 'й'
+        11: 3,  # 'к'
+        8: 3,  # 'л'
+        12: 3,  # 'м'
+        5: 3,  # 'н'
+        1: 2,  # 'о'
+        15: 3,  # 'п'
+        9: 3,  # 'р'
+        7: 3,  # 'с'
+        6: 3,  # 'т'
+        14: 1,  # 'у'
+        39: 2,  # 'ф'
+        26: 3,  # 'х'
+        28: 2,  # 'ц'
+        22: 3,  # 'ч'
+        25: 3,  # 'ш'
+        29: 3,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 2,  # 'э'
+        27: 3,  # 'ю'
+        16: 2,  # 'я'
+    },
+    39: {  # 'ф'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 1,  # 'б'
+        10: 0,  # 'в'
+        19: 1,  # 'г'
+        13: 0,  # 'д'
+        2: 3,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 1,  # 'к'
+        8: 2,  # 'л'
+        12: 1,  # 'м'
+        5: 1,  # 'н'
+        1: 3,  # 'о'
+        15: 1,  # 'п'
+        9: 2,  # 'р'
+        7: 2,  # 'с'
+        6: 2,  # 'т'
+        14: 2,  # 'у'
+        39: 2,  # 'ф'
+        26: 0,  # 'х'
+        28: 0,  # 'ц'
+        22: 1,  # 'ч'
+        25: 1,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 2,  # 'ы'
+        17: 1,  # 'ь'
+        30: 2,  # 'э'
+        27: 1,  # 'ю'
+        16: 1,  # 'я'
+    },
+    26: {  # 'х'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 0,  # 'б'
+        10: 3,  # 'в'
+        19: 1,  # 'г'
+        13: 1,  # 'д'
+        2: 2,  # 'е'
+        24: 0,  # 'ж'
+        20: 1,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 1,  # 'к'
+        8: 2,  # 'л'
+        12: 2,  # 'м'
+        5: 3,  # 'н'
+        1: 3,  # 'о'
+        15: 1,  # 'п'
+        9: 3,  # 'р'
+        7: 2,  # 'с'
+        6: 2,  # 'т'
+        14: 2,  # 'у'
+        39: 1,  # 'ф'
+        26: 1,  # 'х'
+        28: 1,  # 'ц'
+        22: 1,  # 'ч'
+        25: 2,  # 'ш'
+        29: 0,  # 'щ'
+        54: 1,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 1,  # 'ь'
+        30: 1,  # 'э'
+        27: 1,  # 'ю'
+        16: 0,  # 'я'
+    },
+    28: {  # 'ц'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 1,  # 'б'
+        10: 2,  # 'в'
+        19: 1,  # 'г'
+        13: 1,  # 'д'
+        2: 3,  # 'е'
+        24: 0,  # 'ж'
+        20: 1,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 2,  # 'к'
+        8: 1,  # 'л'
+        12: 1,  # 'м'
+        5: 1,  # 'н'
+        1: 3,  # 'о'
+        15: 0,  # 'п'
+        9: 1,  # 'р'
+        7: 0,  # 'с'
+        6: 1,  # 'т'
+        14: 3,  # 'у'
+        39: 0,  # 'ф'
+        26: 0,  # 'х'
+        28: 1,  # 'ц'
+        22: 0,  # 'ч'
+        25: 1,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 3,  # 'ы'
+        17: 1,  # 'ь'
+        30: 0,  # 'э'
+        27: 1,  # 'ю'
+        16: 0,  # 'я'
+    },
+    22: {  # 'ч'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 1,  # 'б'
+        10: 1,  # 'в'
+        19: 0,  # 'г'
+        13: 0,  # 'д'
+        2: 3,  # 'е'
+        24: 1,  # 'ж'
+        20: 0,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 3,  # 'к'
+        8: 2,  # 'л'
+        12: 1,  # 'м'
+        5: 3,  # 'н'
+        1: 2,  # 'о'
+        15: 0,  # 'п'
+        9: 2,  # 'р'
+        7: 1,  # 'с'
+        6: 3,  # 'т'
+        14: 3,  # 'у'
+        39: 1,  # 'ф'
+        26: 1,  # 'х'
+        28: 0,  # 'ц'
+        22: 1,  # 'ч'
+        25: 2,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 3,  # 'ь'
+        30: 0,  # 'э'
+        27: 0,  # 'ю'
+        16: 0,  # 'я'
+    },
+    25: {  # 'ш'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 1,  # 'б'
+        10: 2,  # 'в'
+        19: 1,  # 'г'
+        13: 0,  # 'д'
+        2: 3,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 3,  # 'к'
+        8: 3,  # 'л'
+        12: 2,  # 'м'
+        5: 3,  # 'н'
+        1: 3,  # 'о'
+        15: 2,  # 'п'
+        9: 2,  # 'р'
+        7: 1,  # 'с'
+        6: 2,  # 'т'
+        14: 3,  # 'у'
+        39: 2,  # 'ф'
+        26: 1,  # 'х'
+        28: 1,  # 'ц'
+        22: 1,  # 'ч'
+        25: 1,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 3,  # 'ь'
+        30: 1,  # 'э'
+        27: 1,  # 'ю'
+        16: 0,  # 'я'
+    },
+    29: {  # 'щ'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 3,  # 'а'
+        21: 0,  # 'б'
+        10: 1,  # 'в'
+        19: 0,  # 'г'
+        13: 0,  # 'д'
+        2: 3,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 3,  # 'и'
+        23: 0,  # 'й'
+        11: 0,  # 'к'
+        8: 0,  # 'л'
+        12: 1,  # 'м'
+        5: 2,  # 'н'
+        1: 1,  # 'о'
+        15: 0,  # 'п'
+        9: 2,  # 'р'
+        7: 0,  # 'с'
+        6: 0,  # 'т'
+        14: 2,  # 'у'
+        39: 0,  # 'ф'
+        26: 0,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 2,  # 'ь'
+        30: 0,  # 'э'
+        27: 0,  # 'ю'
+        16: 0,  # 'я'
+    },
+    54: {  # 'ъ'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 0,  # 'а'
+        21: 0,  # 'б'
+        10: 0,  # 'в'
+        19: 0,  # 'г'
+        13: 0,  # 'д'
+        2: 2,  # 'е'
+        24: 0,  # 'ж'
+        20: 0,  # 'з'
+        4: 0,  # 'и'
+        23: 0,  # 'й'
+        11: 0,  # 'к'
+        8: 0,  # 'л'
+        12: 0,  # 'м'
+        5: 0,  # 'н'
+        1: 0,  # 'о'
+        15: 0,  # 'п'
+        9: 0,  # 'р'
+        7: 0,  # 'с'
+        6: 0,  # 'т'
+        14: 0,  # 'у'
+        39: 0,  # 'ф'
+        26: 0,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 0,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 0,  # 'э'
+        27: 1,  # 'ю'
+        16: 2,  # 'я'
+    },
+    18: {  # 'ы'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 0,  # 'а'
+        21: 3,  # 'б'
+        10: 3,  # 'в'
+        19: 2,  # 'г'
+        13: 2,  # 'д'
+        2: 3,  # 'е'
+        24: 2,  # 'ж'
+        20: 2,  # 'з'
+        4: 2,  # 'и'
+        23: 3,  # 'й'
+        11: 3,  # 'к'
+        8: 3,  # 'л'
+        12: 3,  # 'м'
+        5: 3,  # 'н'
+        1: 1,  # 'о'
+        15: 3,  # 'п'
+        9: 3,  # 'р'
+        7: 3,  # 'с'
+        6: 3,  # 'т'
+        14: 1,  # 'у'
+        39: 0,  # 'ф'
+        26: 3,  # 'х'
+        28: 2,  # 'ц'
+        22: 3,  # 'ч'
+        25: 3,  # 'ш'
+        29: 2,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 0,  # 'э'
+        27: 0,  # 'ю'
+        16: 2,  # 'я'
+    },
+    17: {  # 'ь'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 0,  # 'а'
+        21: 2,  # 'б'
+        10: 2,  # 'в'
+        19: 2,  # 'г'
+        13: 2,  # 'д'
+        2: 3,  # 'е'
+        24: 1,  # 'ж'
+        20: 3,  # 'з'
+        4: 2,  # 'и'
+        23: 0,  # 'й'
+        11: 3,  # 'к'
+        8: 0,  # 'л'
+        12: 3,  # 'м'
+        5: 3,  # 'н'
+        1: 2,  # 'о'
+        15: 2,  # 'п'
+        9: 1,  # 'р'
+        7: 3,  # 'с'
+        6: 2,  # 'т'
+        14: 0,  # 'у'
+        39: 2,  # 'ф'
+        26: 1,  # 'х'
+        28: 2,  # 'ц'
+        22: 2,  # 'ч'
+        25: 3,  # 'ш'
+        29: 2,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 1,  # 'э'
+        27: 3,  # 'ю'
+        16: 3,  # 'я'
+    },
+    30: {  # 'э'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 1,  # 'М'
+        31: 1,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 1,  # 'Р'
+        32: 1,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 1,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 0,  # 'а'
+        21: 1,  # 'б'
+        10: 1,  # 'в'
+        19: 1,  # 'г'
+        13: 2,  # 'д'
+        2: 1,  # 'е'
+        24: 0,  # 'ж'
+        20: 1,  # 'з'
+        4: 0,  # 'и'
+        23: 2,  # 'й'
+        11: 2,  # 'к'
+        8: 2,  # 'л'
+        12: 2,  # 'м'
+        5: 2,  # 'н'
+        1: 0,  # 'о'
+        15: 2,  # 'п'
+        9: 2,  # 'р'
+        7: 2,  # 'с'
+        6: 3,  # 'т'
+        14: 1,  # 'у'
+        39: 2,  # 'ф'
+        26: 1,  # 'х'
+        28: 0,  # 'ц'
+        22: 0,  # 'ч'
+        25: 1,  # 'ш'
+        29: 0,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 1,  # 'э'
+        27: 1,  # 'ю'
+        16: 1,  # 'я'
+    },
+    27: {  # 'ю'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 2,  # 'а'
+        21: 3,  # 'б'
+        10: 1,  # 'в'
+        19: 2,  # 'г'
+        13: 3,  # 'д'
+        2: 1,  # 'е'
+        24: 2,  # 'ж'
+        20: 2,  # 'з'
+        4: 1,  # 'и'
+        23: 1,  # 'й'
+        11: 2,  # 'к'
+        8: 2,  # 'л'
+        12: 2,  # 'м'
+        5: 2,  # 'н'
+        1: 1,  # 'о'
+        15: 2,  # 'п'
+        9: 2,  # 'р'
+        7: 3,  # 'с'
+        6: 3,  # 'т'
+        14: 0,  # 'у'
+        39: 1,  # 'ф'
+        26: 2,  # 'х'
+        28: 2,  # 'ц'
+        22: 2,  # 'ч'
+        25: 2,  # 'ш'
+        29: 3,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 1,  # 'э'
+        27: 2,  # 'ю'
+        16: 1,  # 'я'
+    },
+    16: {  # 'я'
+        37: 0,  # 'А'
+        44: 0,  # 'Б'
+        33: 0,  # 'В'
+        46: 0,  # 'Г'
+        41: 0,  # 'Д'
+        48: 0,  # 'Е'
+        56: 0,  # 'Ж'
+        51: 0,  # 'З'
+        42: 0,  # 'И'
+        60: 0,  # 'Й'
+        36: 0,  # 'К'
+        49: 0,  # 'Л'
+        38: 0,  # 'М'
+        31: 0,  # 'Н'
+        34: 0,  # 'О'
+        35: 0,  # 'П'
+        45: 0,  # 'Р'
+        32: 0,  # 'С'
+        40: 0,  # 'Т'
+        52: 0,  # 'У'
+        53: 0,  # 'Ф'
+        55: 0,  # 'Х'
+        58: 0,  # 'Ц'
+        50: 0,  # 'Ч'
+        57: 0,  # 'Ш'
+        63: 0,  # 'Щ'
+        62: 0,  # 'Ы'
+        61: 0,  # 'Ь'
+        47: 0,  # 'Э'
+        59: 0,  # 'Ю'
+        43: 0,  # 'Я'
+        3: 0,  # 'а'
+        21: 2,  # 'б'
+        10: 3,  # 'в'
+        19: 2,  # 'г'
+        13: 3,  # 'д'
+        2: 3,  # 'е'
+        24: 3,  # 'ж'
+        20: 3,  # 'з'
+        4: 2,  # 'и'
+        23: 2,  # 'й'
+        11: 3,  # 'к'
+        8: 3,  # 'л'
+        12: 3,  # 'м'
+        5: 3,  # 'н'
+        1: 0,  # 'о'
+        15: 2,  # 'п'
+        9: 2,  # 'р'
+        7: 3,  # 'с'
+        6: 3,  # 'т'
+        14: 1,  # 'у'
+        39: 1,  # 'ф'
+        26: 3,  # 'х'
+        28: 2,  # 'ц'
+        22: 2,  # 'ч'
+        25: 2,  # 'ш'
+        29: 3,  # 'щ'
+        54: 0,  # 'ъ'
+        18: 0,  # 'ы'
+        17: 0,  # 'ь'
+        30: 0,  # 'э'
+        27: 2,  # 'ю'
+        16: 2,  # 'я'
+    },
+}
+
+# 255: Undefined characters that did not exist in training text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+# 251: Control characters
+
+# Character Mapping Table(s):
+IBM866_RUSSIAN_CHAR_TO_ORDER = {
+    0: 255,  # '\x00'
+    1: 255,  # '\x01'
+    2: 255,  # '\x02'
+    3: 255,  # '\x03'
+    4: 255,  # '\x04'
+    5: 255,  # '\x05'
+    6: 255,  # '\x06'
+    7: 255,  # '\x07'
+    8: 255,  # '\x08'
+    9: 255,  # '\t'
+    10: 254,  # '\n'
+    11: 255,  # '\x0b'
+    12: 255,  # '\x0c'
+    13: 254,  # '\r'
+    14: 255,  # '\x0e'
+    15: 255,  # '\x0f'
+    16: 255,  # '\x10'
+    17: 255,  # '\x11'
+    18: 255,  # '\x12'
+    19: 255,  # '\x13'
+    20: 255,  # '\x14'
+    21: 255,  # '\x15'
+    22: 255,  # '\x16'
+    23: 255,  # '\x17'
+    24: 255,  # '\x18'
+    25: 255,  # '\x19'
+    26: 255,  # '\x1a'
+    27: 255,  # '\x1b'
+    28: 255,  # '\x1c'
+    29: 255,  # '\x1d'
+    30: 255,  # '\x1e'
+    31: 255,  # '\x1f'
+    32: 253,  # ' '
+    33: 253,  # '!'
+    34: 253,  # '"'
+    35: 253,  # '#'
+    36: 253,  # '$'
+    37: 253,  # '%'
+    38: 253,  # '&'
+    39: 253,  # "'"
+    40: 253,  # '('
+    41: 253,  # ')'
+    42: 253,  # '*'
+    43: 253,  # '+'
+    44: 253,  # ','
+    45: 253,  # '-'
+    46: 253,  # '.'
+    47: 253,  # '/'
+    48: 252,  # '0'
+    49: 252,  # '1'
+    50: 252,  # '2'
+    51: 252,  # '3'
+    52: 252,  # '4'
+    53: 252,  # '5'
+    54: 252,  # '6'
+    55: 252,  # '7'
+    56: 252,  # '8'
+    57: 252,  # '9'
+    58: 253,  # ':'
+    59: 253,  # ';'
+    60: 253,  # '<'
+    61: 253,  # '='
+    62: 253,  # '>'
+    63: 253,  # '?'
+    64: 253,  # '@'
+    65: 142,  # 'A'
+    66: 143,  # 'B'
+    67: 144,  # 'C'
+    68: 145,  # 'D'
+    69: 146,  # 'E'
+    70: 147,  # 'F'
+    71: 148,  # 'G'
+    72: 149,  # 'H'
+    73: 150,  # 'I'
+    74: 151,  # 'J'
+    75: 152,  # 'K'
+    76: 74,  # 'L'
+    77: 153,  # 'M'
+    78: 75,  # 'N'
+    79: 154,  # 'O'
+    80: 155,  # 'P'
+    81: 156,  # 'Q'
+    82: 157,  # 'R'
+    83: 158,  # 'S'
+    84: 159,  # 'T'
+    85: 160,  # 'U'
+    86: 161,  # 'V'
+    87: 162,  # 'W'
+    88: 163,  # 'X'
+    89: 164,  # 'Y'
+    90: 165,  # 'Z'
+    91: 253,  # '['
+    92: 253,  # '\\'
+    93: 253,  # ']'
+    94: 253,  # '^'
+    95: 253,  # '_'
+    96: 253,  # '`'
+    97: 71,  # 'a'
+    98: 172,  # 'b'
+    99: 66,  # 'c'
+    100: 173,  # 'd'
+    101: 65,  # 'e'
+    102: 174,  # 'f'
+    103: 76,  # 'g'
+    104: 175,  # 'h'
+    105: 64,  # 'i'
+    106: 176,  # 'j'
+    107: 177,  # 'k'
+    108: 77,  # 'l'
+    109: 72,  # 'm'
+    110: 178,  # 'n'
+    111: 69,  # 'o'
+    112: 67,  # 'p'
+    113: 179,  # 'q'
+    114: 78,  # 'r'
+    115: 73,  # 's'
+    116: 180,  # 't'
+    117: 181,  # 'u'
+    118: 79,  # 'v'
+    119: 182,  # 'w'
+    120: 183,  # 'x'
+    121: 184,  # 'y'
+    122: 185,  # 'z'
+    123: 253,  # '{'
+    124: 253,  # '|'
+    125: 253,  # '}'
+    126: 253,  # '~'
+    127: 253,  # '\x7f'
+    128: 37,  # 'А'
+    129: 44,  # 'Б'
+    130: 33,  # 'В'
+    131: 46,  # 'Г'
+    132: 41,  # 'Д'
+    133: 48,  # 'Е'
+    134: 56,  # 'Ж'
+    135: 51,  # 'З'
+    136: 42,  # 'И'
+    137: 60,  # 'Й'
+    138: 36,  # 'К'
+    139: 49,  # 'Л'
+    140: 38,  # 'М'
+    141: 31,  # 'Н'
+    142: 34,  # 'О'
+    143: 35,  # 'П'
+    144: 45,  # 'Р'
+    145: 32,  # 'С'
+    146: 40,  # 'Т'
+    147: 52,  # 'У'
+    148: 53,  # 'Ф'
+    149: 55,  # 'Х'
+    150: 58,  # 'Ц'
+    151: 50,  # 'Ч'
+    152: 57,  # 'Ш'
+    153: 63,  # 'Щ'
+    154: 70,  # 'Ъ'
+    155: 62,  # 'Ы'
+    156: 61,  # 'Ь'
+    157: 47,  # 'Э'
+    158: 59,  # 'Ю'
+    159: 43,  # 'Я'
+    160: 3,  # 'а'
+    161: 21,  # 'б'
+    162: 10,  # 'в'
+    163: 19,  # 'г'
+    164: 13,  # 'д'
+    165: 2,  # 'е'
+    166: 24,  # 'ж'
+    167: 20,  # 'з'
+    168: 4,  # 'и'
+    169: 23,  # 'й'
+    170: 11,  # 'к'
+    171: 8,  # 'л'
+    172: 12,  # 'м'
+    173: 5,  # 'н'
+    174: 1,  # 'о'
+    175: 15,  # 'п'
+    176: 191,  # '░'
+    177: 192,  # '▒'
+    178: 193,  # '▓'
+    179: 194,  # '│'
+    180: 195,  # '┤'
+    181: 196,  # '╡'
+    182: 197,  # '╢'
+    183: 198,  # '╖'
+    184: 199,  # '╕'
+    185: 200,  # '╣'
+    186: 201,  # '║'
+    187: 202,  # '╗'
+    188: 203,  # '╝'
+    189: 204,  # '╜'
+    190: 205,  # '╛'
+    191: 206,  # '┐'
+    192: 207,  # '└'
+    193: 208,  # '┴'
+    194: 209,  # '┬'
+    195: 210,  # '├'
+    196: 211,  # '─'
+    197: 212,  # '┼'
+    198: 213,  # '╞'
+    199: 214,  # '╟'
+    200: 215,  # '╚'
+    201: 216,  # '╔'
+    202: 217,  # '╩'
+    203: 218,  # '╦'
+    204: 219,  # '╠'
+    205: 220,  # '═'
+    206: 221,  # '╬'
+    207: 222,  # '╧'
+    208: 223,  # '╨'
+    209: 224,  # '╤'
+    210: 225,  # '╥'
+    211: 226,  # '╙'
+    212: 227,  # '╘'
+    213: 228,  # '╒'
+    214: 229,  # '╓'
+    215: 230,  # '╫'
+    216: 231,  # '╪'
+    217: 232,  # '┘'
+    218: 233,  # '┌'
+    219: 234,  # '█'
+    220: 235,  # '▄'
+    221: 236,  # '▌'
+    222: 237,  # '▐'
+    223: 238,  # '▀'
+    224: 9,  # 'р'
+    225: 7,  # 'с'
+    226: 6,  # 'т'
+    227: 14,  # 'у'
+    228: 39,  # 'ф'
+    229: 26,  # 'х'
+    230: 28,  # 'ц'
+    231: 22,  # 'ч'
+    232: 25,  # 'ш'
+    233: 29,  # 'щ'
+    234: 54,  # 'ъ'
+    235: 18,  # 'ы'
+    236: 17,  # 'ь'
+    237: 30,  # 'э'
+    238: 27,  # 'ю'
+    239: 16,  # 'я'
+    240: 239,  # 'Ё'
+    241: 68,  # 'ё'
+    242: 240,  # 'Є'
+    243: 241,  # 'є'
+    244: 242,  # 'Ї'
+    245: 243,  # 'ї'
+    246: 244,  # 'Ў'
+    247: 245,  # 'ў'
+    248: 246,  # '°'
+    249: 247,  # '∙'
+    250: 248,  # '·'
+    251: 249,  # '√'
+    252: 250,  # '№'
+    253: 251,  # '¤'
+    254: 252,  # '■'
+    255: 255,  # '\xa0'
+}
+
+IBM866_RUSSIAN_MODEL = SingleByteCharSetModel(
+    charset_name="IBM866",
+    language="Russian",
+    char_to_order_map=IBM866_RUSSIAN_CHAR_TO_ORDER,
+    language_model=RUSSIAN_LANG_MODEL,
+    typical_positive_ratio=0.976601,
+    keep_ascii_letters=False,
+    alphabet="ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё",
+)
+
+WINDOWS_1251_RUSSIAN_CHAR_TO_ORDER = {
+    0: 255,  # '\x00'
+    1: 255,  # '\x01'
+    2: 255,  # '\x02'
+    3: 255,  # '\x03'
+    4: 255,  # '\x04'
+    5: 255,  # '\x05'
+    6: 255,  # '\x06'
+    7: 255,  # '\x07'
+    8: 255,  # '\x08'
+    9: 255,  # '\t'
+    10: 254,  # '\n'
+    11: 255,  # '\x0b'
+    12: 255,  # '\x0c'
+    13: 254,  # '\r'
+    14: 255,  # '\x0e'
+    15: 255,  # '\x0f'
+    16: 255,  # '\x10'
+    17: 255,  # '\x11'
+    18: 255,  # '\x12'
+    19: 255,  # '\x13'
+    20: 255,  # '\x14'
+    21: 255,  # '\x15'
+    22: 255,  # '\x16'
+    23: 255,  # '\x17'
+    24: 255,  # '\x18'
+    25: 255,  # '\x19'
+    26: 255,  # '\x1a'
+    27: 255,  # '\x1b'
+    28: 255,  # '\x1c'
+    29: 255,  # '\x1d'
+    30: 255,  # '\x1e'
+    31: 255,  # '\x1f'
+    32: 253,  # ' '
+    33: 253,  # '!'
+    34: 253,  # '"'
+    35: 253,  # '#'
+    36: 253,  # '$'
+    37: 253,  # '%'
+    38: 253,  # '&'
+    39: 253,  # "'"
+    40: 253,  # '('
+    41: 253,  # ')'
+    42: 253,  # '*'
+    43: 253,  # '+'
+    44: 253,  # ','
+    45: 253,  # '-'
+    46: 253,  # '.'
+    47: 253,  # '/'
+    48: 252,  # '0'
+    49: 252,  # '1'
+    50: 252,  # '2'
+    51: 252,  # '3'
+    52: 252,  # '4'
+    53: 252,  # '5'
+    54: 252,  # '6'
+    55: 252,  # '7'
+    56: 252,  # '8'
+    57: 252,  # '9'
+    58: 253,  # ':'
+    59: 253,  # ';'
+    60: 253,  # '<'
+    61: 253,  # '='
+    62: 253,  # '>'
+    63: 253,  # '?'
+    64: 253,  # '@'
+    65: 142,  # 'A'
+    66: 143,  # 'B'
+    67: 144,  # 'C'
+    68: 145,  # 'D'
+    69: 146,  # 'E'
+    70: 147,  # 'F'
+    71: 148,  # 'G'
+    72: 149,  # 'H'
+    73: 150,  # 'I'
+    74: 151,  # 'J'
+    75: 152,  # 'K'
+    76: 74,  # 'L'
+    77: 153,  # 'M'
+    78: 75,  # 'N'
+    79: 154,  # 'O'
+    80: 155,  # 'P'
+    81: 156,  # 'Q'
+    82: 157,  # 'R'
+    83: 158,  # 'S'
+    84: 159,  # 'T'
+    85: 160,  # 'U'
+    86: 161,  # 'V'
+    87: 162,  # 'W'
+    88: 163,  # 'X'
+    89: 164,  # 'Y'
+    90: 165,  # 'Z'
+    91: 253,  # '['
+    92: 253,  # '\\'
+    93: 253,  # ']'
+    94: 253,  # '^'
+    95: 253,  # '_'
+    96: 253,  # '`'
+    97: 71,  # 'a'
+    98: 172,  # 'b'
+    99: 66,  # 'c'
+    100: 173,  # 'd'
+    101: 65,  # 'e'
+    102: 174,  # 'f'
+    103: 76,  # 'g'
+    104: 175,  # 'h'
+    105: 64,  # 'i'
+    106: 176,  # 'j'
+    107: 177,  # 'k'
+    108: 77,  # 'l'
+    109: 72,  # 'm'
+    110: 178,  # 'n'
+    111: 69,  # 'o'
+    112: 67,  # 'p'
+    113: 179,  # 'q'
+    114: 78,  # 'r'
+    115: 73,  # 's'
+    116: 180,  # 't'
+    117: 181,  # 'u'
+    118: 79,  # 'v'
+    119: 182,  # 'w'
+    120: 183,  # 'x'
+    121: 184,  # 'y'
+    122: 185,  # 'z'
+    123: 253,  # '{'
+    124: 253,  # '|'
+    125: 253,  # '}'
+    126: 253,  # '~'
+    127: 253,  # '\x7f'
+    128: 191,  # 'Ђ'
+    129: 192,  # 'Ѓ'
+    130: 193,  # '‚'
+    131: 194,  # 'ѓ'
+    132: 195,  # '„'
+    133: 196,  # '…'
+    134: 197,  # '†'
+    135: 198,  # '‡'
+    136: 199,  # '€'
+    137: 200,  # '‰'
+    138: 201,  # 'Љ'
+    139: 202,  # '‹'
+    140: 203,  # 'Њ'
+    141: 204,  # 'Ќ'
+    142: 205,  # 'Ћ'
+    143: 206,  # 'Џ'
+    144: 207,  # 'ђ'
+    145: 208,  # '‘'
+    146: 209,  # '’'
+    147: 210,  # '“'
+    148: 211,  # '”'
+    149: 212,  # '•'
+    150: 213,  # '–'
+    151: 214,  # '—'
+    152: 215,  # None
+    153: 216,  # '™'
+    154: 217,  # 'љ'
+    155: 218,  # '›'
+    156: 219,  # 'њ'
+    157: 220,  # 'ќ'
+    158: 221,  # 'ћ'
+    159: 222,  # 'џ'
+    160: 223,  # '\xa0'
+    161: 224,  # 'Ў'
+    162: 225,  # 'ў'
+    163: 226,  # 'Ј'
+    164: 227,  # '¤'
+    165: 228,  # 'Ґ'
+    166: 229,  # '¦'
+    167: 230,  # '§'
+    168: 231,  # 'Ё'
+    169: 232,  # '©'
+    170: 233,  # 'Є'
+    171: 234,  # '«'
+    172: 235,  # '¬'
+    173: 236,  # '\xad'
+    174: 237,  # '®'
+    175: 238,  # 'Ї'
+    176: 239,  # '°'
+    177: 240,  # '±'
+    178: 241,  # 'І'
+    179: 242,  # 'і'
+    180: 243,  # 'ґ'
+    181: 244,  # 'µ'
+    182: 245,  # '¶'
+    183: 246,  # '·'
+    184: 68,  # 'ё'
+    185: 247,  # '№'
+    186: 248,  # 'є'
+    187: 249,  # '»'
+    188: 250,  # 'ј'
+    189: 251,  # 'Ѕ'
+    190: 252,  # 'ѕ'
+    191: 253,  # 'ї'
+    192: 37,  # 'А'
+    193: 44,  # 'Б'
+    194: 33,  # 'В'
+    195: 46,  # 'Г'
+    196: 41,  # 'Д'
+    197: 48,  # 'Е'
+    198: 56,  # 'Ж'
+    199: 51,  # 'З'
+    200: 42,  # 'И'
+    201: 60,  # 'Й'
+    202: 36,  # 'К'
+    203: 49,  # 'Л'
+    204: 38,  # 'М'
+    205: 31,  # 'Н'
+    206: 34,  # 'О'
+    207: 35,  # 'П'
+    208: 45,  # 'Р'
+    209: 32,  # 'С'
+    210: 40,  # 'Т'
+    211: 52,  # 'У'
+    212: 53,  # 'Ф'
+    213: 55,  # 'Х'
+    214: 58,  # 'Ц'
+    215: 50,  # 'Ч'
+    216: 57,  # 'Ш'
+    217: 63,  # 'Щ'
+    218: 70,  # 'Ъ'
+    219: 62,  # 'Ы'
+    220: 61,  # 'Ь'
+    221: 47,  # 'Э'
+    222: 59,  # 'Ю'
+    223: 43,  # 'Я'
+    224: 3,  # 'а'
+    225: 21,  # 'б'
+    226: 10,  # 'в'
+    227: 19,  # 'г'
+    228: 13,  # 'д'
+    229: 2,  # 'е'
+    230: 24,  # 'ж'
+    231: 20,  # 'з'
+    232: 4,  # 'и'
+    233: 23,  # 'й'
+    234: 11,  # 'к'
+    235: 8,  # 'л'
+    236: 12,  # 'м'
+    237: 5,  # 'н'
+    238: 1,  # 'о'
+    239: 15,  # 'п'
+    240: 9,  # 'р'
+    241: 7,  # 'с'
+    242: 6,  # 'т'
+    243: 14,  # 'у'
+    244: 39,  # 'ф'
+    245: 26,  # 'х'
+    246: 28,  # 'ц'
+    247: 22,  # 'ч'
+    248: 25,  # 'ш'
+    249: 29,  # 'щ'
+    250: 54,  # 'ъ'
+    251: 18,  # 'ы'
+    252: 17,  # 'ь'
+    253: 30,  # 'э'
+    254: 27,  # 'ю'
+    255: 16,  # 'я'
+}
+
+WINDOWS_1251_RUSSIAN_MODEL = SingleByteCharSetModel(
+    charset_name="windows-1251",
+    language="Russian",
+    char_to_order_map=WINDOWS_1251_RUSSIAN_CHAR_TO_ORDER,
+    language_model=RUSSIAN_LANG_MODEL,
+    typical_positive_ratio=0.976601,
+    keep_ascii_letters=False,
+    alphabet="ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё",
+)
+
+IBM855_RUSSIAN_CHAR_TO_ORDER = {
+    0: 255,  # '\x00'
+    1: 255,  # '\x01'
+    2: 255,  # '\x02'
+    3: 255,  # '\x03'
+    4: 255,  # '\x04'
+    5: 255,  # '\x05'
+    6: 255,  # '\x06'
+    7: 255,  # '\x07'
+    8: 255,  # '\x08'
+    9: 255,  # '\t'
+    10: 254,  # '\n'
+    11: 255,  # '\x0b'
+    12: 255,  # '\x0c'
+    13: 254,  # '\r'
+    14: 255,  # '\x0e'
+    15: 255,  # '\x0f'
+    16: 255,  # '\x10'
+    17: 255,  # '\x11'
+    18: 255,  # '\x12'
+    19: 255,  # '\x13'
+    20: 255,  # '\x14'
+    21: 255,  # '\x15'
+    22: 255,  # '\x16'
+    23: 255,  # '\x17'
+    24: 255,  # '\x18'
+    25: 255,  # '\x19'
+    26: 255,  # '\x1a'
+    27: 255,  # '\x1b'
+    28: 255,  # '\x1c'
+    29: 255,  # '\x1d'
+    30: 255,  # '\x1e'
+    31: 255,  # '\x1f'
+    32: 253,  # ' '
+    33: 253,  # '!'
+    34: 253,  # '"'
+    35: 253,  # '#'
+    36: 253,  # '$'
+    37: 253,  # '%'
+    38: 253,  # '&'
+    39: 253,  # "'"
+    40: 253,  # '('
+    41: 253,  # ')'
+    42: 253,  # '*'
+    43: 253,  # '+'
+    44: 253,  # ','
+    45: 253,  # '-'
+    46: 253,  # '.'
+    47: 253,  # '/'
+    48: 252,  # '0'
+    49: 252,  # '1'
+    50: 252,  # '2'
+    51: 252,  # '3'
+    52: 252,  # '4'
+    53: 252,  # '5'
+    54: 252,  # '6'
+    55: 252,  # '7'
+    56: 252,  # '8'
+    57: 252,  # '9'
+    58: 253,  # ':'
+    59: 253,  # ';'
+    60: 253,  # '<'
+    61: 253,  # '='
+    62: 253,  # '>'
+    63: 253,  # '?'
+    64: 253,  # '@'
+    65: 142,  # 'A'
+    66: 143,  # 'B'
+    67: 144,  # 'C'
+    68: 145,  # 'D'
+    69: 146,  # 'E'
+    70: 147,  # 'F'
+    71: 148,  # 'G'
+    72: 149,  # 'H'
+    73: 150,  # 'I'
+    74: 151,  # 'J'
+    75: 152,  # 'K'
+    76: 74,  # 'L'
+    77: 153,  # 'M'
+    78: 75,  # 'N'
+    79: 154,  # 'O'
+    80: 155,  # 'P'
+    81: 156,  # 'Q'
+    82: 157,  # 'R'
+    83: 158,  # 'S'
+    84: 159,  # 'T'
+    85: 160,  # 'U'
+    86: 161,  # 'V'
+    87: 162,  # 'W'
+    88: 163,  # 'X'
+    89: 164,  # 'Y'
+    90: 165,  # 'Z'
+    91: 253,  # '['
+    92: 253,  # '\\'
+    93: 253,  # ']'
+    94: 253,  # '^'
+    95: 253,  # '_'
+    96: 253,  # '`'
+    97: 71,  # 'a'
+    98: 172,  # 'b'
+    99: 66,  # 'c'
+    100: 173,  # 'd'
+    101: 65,  # 'e'
+    102: 174,  # 'f'
+    103: 76,  # 'g'
+    104: 175,  # 'h'
+    105: 64,  # 'i'
+    106: 176,  # 'j'
+    107: 177,  # 'k'
+    108: 77,  # 'l'
+    109: 72,  # 'm'
+    110: 178,  # 'n'
+    111: 69,  # 'o'
+    112: 67,  # 'p'
+    113: 179,  # 'q'
+    114: 78,  # 'r'
+    115: 73,  # 's'
+    116: 180,  # 't'
+    117: 181,  # 'u'
+    118: 79,  # 'v'
+    119: 182,  # 'w'
+    120: 183,  # 'x'
+    121: 184,  # 'y'
+    122: 185,  # 'z'
+    123: 253,  # '{'
+    124: 253,  # '|'
+    125: 253,  # '}'
+    126: 253,  # '~'
+    127: 253,  # '\x7f'
+    128: 191,  # 'ђ'
+    129: 192,  # 'Ђ'
+    130: 193,  # 'ѓ'
+    131: 194,  # 'Ѓ'
+    132: 68,  # 'ё'
+    133: 195,  # 'Ё'
+    134: 196,  # 'є'
+    135: 197,  # 'Є'
+    136: 198,  # 'ѕ'
+    137: 199,  # 'Ѕ'
+    138: 200,  # 'і'
+    139: 201,  # 'І'
+    140: 202,  # 'ї'
+    141: 203,  # 'Ї'
+    142: 204,  # 'ј'
+    143: 205,  # 'Ј'
+    144: 206,  # 'љ'
+    145: 207,  # 'Љ'
+    146: 208,  # 'њ'
+    147: 209,  # 'Њ'
+    148: 210,  # 'ћ'
+    149: 211,  # 'Ћ'
+    150: 212,  # 'ќ'
+    151: 213,  # 'Ќ'
+    152: 214,  # 'ў'
+    153: 215,  # 'Ў'
+    154: 216,  # 'џ'
+    155: 217,  # 'Џ'
+    156: 27,  # 'ю'
+    157: 59,  # 'Ю'
+    158: 54,  # 'ъ'
+    159: 70,  # 'Ъ'
+    160: 3,  # 'а'
+    161: 37,  # 'А'
+    162: 21,  # 'б'
+    163: 44,  # 'Б'
+    164: 28,  # 'ц'
+    165: 58,  # 'Ц'
+    166: 13,  # 'д'
+    167: 41,  # 'Д'
+    168: 2,  # 'е'
+    169: 48,  # 'Е'
+    170: 39,  # 'ф'
+    171: 53,  # 'Ф'
+    172: 19,  # 'г'
+    173: 46,  # 'Г'
+    174: 218,  # '«'
+    175: 219,  # '»'
+    176: 220,  # '░'
+    177: 221,  # '▒'
+    178: 222,  # '▓'
+    179: 223,  # '│'
+    180: 224,  # '┤'
+    181: 26,  # 'х'
+    182: 55,  # 'Х'
+    183: 4,  # 'и'
+    184: 42,  # 'И'
+    185: 225,  # '╣'
+    186: 226,  # '║'
+    187: 227,  # '╗'
+    188: 228,  # '╝'
+    189: 23,  # 'й'
+    190: 60,  # 'Й'
+    191: 229,  # '┐'
+    192: 230,  # '└'
+    193: 231,  # '┴'
+    194: 232,  # '┬'
+    195: 233,  # '├'
+    196: 234,  # '─'
+    197: 235,  # '┼'
+    198: 11,  # 'к'
+    199: 36,  # 'К'
+    200: 236,  # '╚'
+    201: 237,  # '╔'
+    202: 238,  # '╩'
+    203: 239,  # '╦'
+    204: 240,  # '╠'
+    205: 241,  # '═'
+    206: 242,  # '╬'
+    207: 243,  # '¤'
+    208: 8,  # 'л'
+    209: 49,  # 'Л'
+    210: 12,  # 'м'
+    211: 38,  # 'М'
+    212: 5,  # 'н'
+    213: 31,  # 'Н'
+    214: 1,  # 'о'
+    215: 34,  # 'О'
+    216: 15,  # 'п'
+    217: 244,  # '┘'
+    218: 245,  # '┌'
+    219: 246,  # '█'
+    220: 247,  # '▄'
+    221: 35,  # 'П'
+    222: 16,  # 'я'
+    223: 248,  # '▀'
+    224: 43,  # 'Я'
+    225: 9,  # 'р'
+    226: 45,  # 'Р'
+    227: 7,  # 'с'
+    228: 32,  # 'С'
+    229: 6,  # 'т'
+    230: 40,  # 'Т'
+    231: 14,  # 'у'
+    232: 52,  # 'У'
+    233: 24,  # 'ж'
+    234: 56,  # 'Ж'
+    235: 10,  # 'в'
+    236: 33,  # 'В'
+    237: 17,  # 'ь'
+    238: 61,  # 'Ь'
+    239: 249,  # '№'
+    240: 250,  # '\xad'
+    241: 18,  # 'ы'
+    242: 62,  # 'Ы'
+    243: 20,  # 'з'
+    244: 51,  # 'З'
+    245: 25,  # 'ш'
+    246: 57,  # 'Ш'
+    247: 30,  # 'э'
+    248: 47,  # 'Э'
+    249: 29,  # 'щ'
+    250: 63,  # 'Щ'
+    251: 22,  # 'ч'
+    252: 50,  # 'Ч'
+    253: 251,  # '§'
+    254: 252,  # '■'
+    255: 255,  # '\xa0'
+}
+
+IBM855_RUSSIAN_MODEL = SingleByteCharSetModel(
+    charset_name="IBM855",
+    language="Russian",
+    char_to_order_map=IBM855_RUSSIAN_CHAR_TO_ORDER,
+    language_model=RUSSIAN_LANG_MODEL,
+    typical_positive_ratio=0.976601,
+    keep_ascii_letters=False,
+    alphabet="ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё",
+)
+
+KOI8_R_RUSSIAN_CHAR_TO_ORDER = {
+    0: 255,  # '\x00'
+    1: 255,  # '\x01'
+    2: 255,  # '\x02'
+    3: 255,  # '\x03'
+    4: 255,  # '\x04'
+    5: 255,  # '\x05'
+    6: 255,  # '\x06'
+    7: 255,  # '\x07'
+    8: 255,  # '\x08'
+    9: 255,  # '\t'
+    10: 254,  # '\n'
+    11: 255,  # '\x0b'
+    12: 255,  # '\x0c'
+    13: 254,  # '\r'
+    14: 255,  # '\x0e'
+    15: 255,  # '\x0f'
+    16: 255,  # '\x10'
+    17: 255,  # '\x11'
+    18: 255,  # '\x12'
+    19: 255,  # '\x13'
+    20: 255,  # '\x14'
+    21: 255,  # '\x15'
+    22: 255,  # '\x16'
+    23: 255,  # '\x17'
+    24: 255,  # '\x18'
+    25: 255,  # '\x19'
+    26: 255,  # '\x1a'
+    27: 255,  # '\x1b'
+    28: 255,  # '\x1c'
+    29: 255,  # '\x1d'
+    30: 255,  # '\x1e'
+    31: 255,  # '\x1f'
+    32: 253,  # ' '
+    33: 253,  # '!'
+    34: 253,  # '"'
+    35: 253,  # '#'
+    36: 253,  # '$'
+    37: 253,  # '%'
+    38: 253,  # '&'
+    39: 253,  # "'"
+    40: 253,  # '('
+    41: 253,  # ')'
+    42: 253,  # '*'
+    43: 253,  # '+'
+    44: 253,  # ','
+    45: 253,  # '-'
+    46: 253,  # '.'
+    47: 253,  # '/'
+    48: 252,  # '0'
+    49: 252,  # '1'
+    50: 252,  # '2'
+    51: 252,  # '3'
+    52: 252,  # '4'
+    53: 252,  # '5'
+    54: 252,  # '6'
+    55: 252,  # '7'
+    56: 252,  # '8'
+    57: 252,  # '9'
+    58: 253,  # ':'
+    59: 253,  # ';'
+    60: 253,  # '<'
+    61: 253,  # '='
+    62: 253,  # '>'
+    63: 253,  # '?'
+    64: 253,  # '@'
+    65: 142,  # 'A'
+    66: 143,  # 'B'
+    67: 144,  # 'C'
+    68: 145,  # 'D'
+    69: 146,  # 'E'
+    70: 147,  # 'F'
+    71: 148,  # 'G'
+    72: 149,  # 'H'
+    73: 150,  # 'I'
+    74: 151,  # 'J'
+    75: 152,  # 'K'
+    76: 74,  # 'L'
+    77: 153,  # 'M'
+    78: 75,  # 'N'
+    79: 154,  # 'O'
+    80: 155,  # 'P'
+    81: 156,  # 'Q'
+    82: 157,  # 'R'
+    83: 158,  # 'S'
+    84: 159,  # 'T'
+    85: 160,  # 'U'
+    86: 161,  # 'V'
+    87: 162,  # 'W'
+    88: 163,  # 'X'
+    89: 164,  # 'Y'
+    90: 165,  # 'Z'
+    91: 253,  # '['
+    92: 253,  # '\\'
+    93: 253,  # ']'
+    94: 253,  # '^'
+    95: 253,  # '_'
+    96: 253,  # '`'
+    97: 71,  # 'a'
+    98: 172,  # 'b'
+    99: 66,  # 'c'
+    100: 173,  # 'd'
+    101: 65,  # 'e'
+    102: 174,  # 'f'
+    103: 76,  # 'g'
+    104: 175,  # 'h'
+    105: 64,  # 'i'
+    106: 176,  # 'j'
+    107: 177,  # 'k'
+    108: 77,  # 'l'
+    109: 72,  # 'm'
+    110: 178,  # 'n'
+    111: 69,  # 'o'
+    112: 67,  # 'p'
+    113: 179,  # 'q'
+    114: 78,  # 'r'
+    115: 73,  # 's'
+    116: 180,  # 't'
+    117: 181,  # 'u'
+    118: 79,  # 'v'
+    119: 182,  # 'w'
+    120: 183,  # 'x'
+    121: 184,  # 'y'
+    122: 185,  # 'z'
+    123: 253,  # '{'
+    124: 253,  # '|'
+    125: 253,  # '}'
+    126: 253,  # '~'
+    127: 253,  # '\x7f'
+    128: 191,  # '─'
+    129: 192,  # '│'
+    130: 193,  # '┌'
+    131: 194,  # '┐'
+    132: 195,  # '└'
+    133: 196,  # '┘'
+    134: 197,  # '├'
+    135: 198,  # '┤'
+    136: 199,  # '┬'
+    137: 200,  # '┴'
+    138: 201,  # '┼'
+    139: 202,  # '▀'
+    140: 203,  # '▄'
+    141: 204,  # '█'
+    142: 205,  # '▌'
+    143: 206,  # '▐'
+    144: 207,  # '░'
+    145: 208,  # '▒'
+    146: 209,  # '▓'
+    147: 210,  # '⌠'
+    148: 211,  # '■'
+    149: 212,  # '∙'
+    150: 213,  # '√'
+    151: 214,  # '≈'
+    152: 215,  # '≤'
+    153: 216,  # '≥'
+    154: 217,  # '\xa0'
+    155: 218,  # '⌡'
+    156: 219,  # '°'
+    157: 220,  # '²'
+    158: 221,  # '·'
+    159: 222,  # '÷'
+    160: 223,  # '═'
+    161: 224,  # '║'
+    162: 225,  # '╒'
+    163: 68,  # 'ё'
+    164: 226,  # '╓'
+    165: 227,  # '╔'
+    166: 228,  # '╕'
+    167: 229,  # '╖'
+    168: 230,  # '╗'
+    169: 231,  # '╘'
+    170: 232,  # '╙'
+    171: 233,  # '╚'
+    172: 234,  # '╛'
+    173: 235,  # '╜'
+    174: 236,  # '╝'
+    175: 237,  # '╞'
+    176: 238,  # '╟'
+    177: 239,  # '╠'
+    178: 240,  # '╡'
+    179: 241,  # 'Ё'
+    180: 242,  # '╢'
+    181: 243,  # '╣'
+    182: 244,  # '╤'
+    183: 245,  # '╥'
+    184: 246,  # '╦'
+    185: 247,  # '╧'
+    186: 248,  # '╨'
+    187: 249,  # '╩'
+    188: 250,  # '╪'
+    189: 251,  # '╫'
+    190: 252,  # '╬'
+    191: 253,  # '©'
+    192: 27,  # 'ю'
+    193: 3,  # 'а'
+    194: 21,  # 'б'
+    195: 28,  # 'ц'
+    196: 13,  # 'д'
+    197: 2,  # 'е'
+    198: 39,  # 'ф'
+    199: 19,  # 'г'
+    200: 26,  # 'х'
+    201: 4,  # 'и'
+    202: 23,  # 'й'
+    203: 11,  # 'к'
+    204: 8,  # 'л'
+    205: 12,  # 'м'
+    206: 5,  # 'н'
+    207: 1,  # 'о'
+    208: 15,  # 'п'
+    209: 16,  # 'я'
+    210: 9,  # 'р'
+    211: 7,  # 'с'
+    212: 6,  # 'т'
+    213: 14,  # 'у'
+    214: 24,  # 'ж'
+    215: 10,  # 'в'
+    216: 17,  # 'ь'
+    217: 18,  # 'ы'
+    218: 20,  # 'з'
+    219: 25,  # 'ш'
+    220: 30,  # 'э'
+    221: 29,  # 'щ'
+    222: 22,  # 'ч'
+    223: 54,  # 'ъ'
+    224: 59,  # 'Ю'
+    225: 37,  # 'А'
+    226: 44,  # 'Б'
+    227: 58,  # 'Ц'
+    228: 41,  # 'Д'
+    229: 48,  # 'Е'
+    230: 53,  # 'Ф'
+    231: 46,  # 'Г'
+    232: 55,  # 'Х'
+    233: 42,  # 'И'
+    234: 60,  # 'Й'
+    235: 36,  # 'К'
+    236: 49,  # 'Л'
+    237: 38,  # 'М'
+    238: 31,  # 'Н'
+    239: 34,  # 'О'
+    240: 35,  # 'П'
+    241: 43,  # 'Я'
+    242: 45,  # 'Р'
+    243: 32,  # 'С'
+    244: 40,  # 'Т'
+    245: 52,  # 'У'
+    246: 56,  # 'Ж'
+    247: 33,  # 'В'
+    248: 61,  # 'Ь'
+    249: 62,  # 'Ы'
+    250: 51,  # 'З'
+    251: 57,  # 'Ш'
+    252: 47,  # 'Э'
+    253: 63,  # 'Щ'
+    254: 50,  # 'Ч'
+    255: 70,  # 'Ъ'
+}
+
+KOI8_R_RUSSIAN_MODEL = SingleByteCharSetModel(
+    charset_name="KOI8-R",
+    language="Russian",
+    char_to_order_map=KOI8_R_RUSSIAN_CHAR_TO_ORDER,
+    language_model=RUSSIAN_LANG_MODEL,
+    typical_positive_ratio=0.976601,
+    keep_ascii_letters=False,
+    alphabet="ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё",
+)
+
+MACCYRILLIC_RUSSIAN_CHAR_TO_ORDER = {
+    0: 255,  # '\x00'
+    1: 255,  # '\x01'
+    2: 255,  # '\x02'
+    3: 255,  # '\x03'
+    4: 255,  # '\x04'
+    5: 255,  # '\x05'
+    6: 255,  # '\x06'
+    7: 255,  # '\x07'
+    8: 255,  # '\x08'
+    9: 255,  # '\t'
+    10: 254,  # '\n'
+    11: 255,  # '\x0b'
+    12: 255,  # '\x0c'
+    13: 254,  # '\r'
+    14: 255,  # '\x0e'
+    15: 255,  # '\x0f'
+    16: 255,  # '\x10'
+    17: 255,  # '\x11'
+    18: 255,  # '\x12'
+    19: 255,  # '\x13'
+    20: 255,  # '\x14'
+    21: 255,  # '\x15'
+    22: 255,  # '\x16'
+    23: 255,  # '\x17'
+    24: 255,  # '\x18'
+    25: 255,  # '\x19'
+    26: 255,  # '\x1a'
+    27: 255,  # '\x1b'
+    28: 255,  # '\x1c'
+    29: 255,  # '\x1d'
+    30: 255,  # '\x1e'
+    31: 255,  # '\x1f'
+    32: 253,  # ' '
+    33: 253,  # '!'
+    34: 253,  # '"'
+    35: 253,  # '#'
+    36: 253,  # '$'
+    37: 253,  # '%'
+    38: 253,  # '&'
+    39: 253,  # "'"
+    40: 253,  # '('
+    41: 253,  # ')'
+    42: 253,  # '*'
+    43: 253,  # '+'
+    44: 253,  # ','
+    45: 253,  # '-'
+    46: 253,  # '.'
+    47: 253,  # '/'
+    48: 252,  # '0'
+    49: 252,  # '1'
+    50: 252,  # '2'
+    51: 252,  # '3'
+    52: 252,  # '4'
+    53: 252,  # '5'
+    54: 252,  # '6'
+    55: 252,  # '7'
+    56: 252,  # '8'
+    57: 252,  # '9'
+    58: 253,  # ':'
+    59: 253,  # ';'
+    60: 253,  # '<'
+    61: 253,  # '='
+    62: 253,  # '>'
+    63: 253,  # '?'
+    64: 253,  # '@'
+    65: 142,  # 'A'
+    66: 143,  # 'B'
+    67: 144,  # 'C'
+    68: 145,  # 'D'
+    69: 146,  # 'E'
+    70: 147,  # 'F'
+    71: 148,  # 'G'
+    72: 149,  # 'H'
+    73: 150,  # 'I'
+    74: 151,  # 'J'
+    75: 152,  # 'K'
+    76: 74,  # 'L'
+    77: 153,  # 'M'
+    78: 75,  # 'N'
+    79: 154,  # 'O'
+    80: 155,  # 'P'
+    81: 156,  # 'Q'
+    82: 157,  # 'R'
+    83: 158,  # 'S'
+    84: 159,  # 'T'
+    85: 160,  # 'U'
+    86: 161,  # 'V'
+    87: 162,  # 'W'
+    88: 163,  # 'X'
+    89: 164,  # 'Y'
+    90: 165,  # 'Z'
+    91: 253,  # '['
+    92: 253,  # '\\'
+    93: 253,  # ']'
+    94: 253,  # '^'
+    95: 253,  # '_'
+    96: 253,  # '`'
+    97: 71,  # 'a'
+    98: 172,  # 'b'
+    99: 66,  # 'c'
+    100: 173,  # 'd'
+    101: 65,  # 'e'
+    102: 174,  # 'f'
+    103: 76,  # 'g'
+    104: 175,  # 'h'
+    105: 64,  # 'i'
+    106: 176,  # 'j'
+    107: 177,  # 'k'
+    108: 77,  # 'l'
+    109: 72,  # 'm'
+    110: 178,  # 'n'
+    111: 69,  # 'o'
+    112: 67,  # 'p'
+    113: 179,  # 'q'
+    114: 78,  # 'r'
+    115: 73,  # 's'
+    116: 180,  # 't'
+    117: 181,  # 'u'
+    118: 79,  # 'v'
+    119: 182,  # 'w'
+    120: 183,  # 'x'
+    121: 184,  # 'y'
+    122: 185,  # 'z'
+    123: 253,  # '{'
+    124: 253,  # '|'
+    125: 253,  # '}'
+    126: 253,  # '~'
+    127: 253,  # '\x7f'
+    128: 37,  # 'А'
+    129: 44,  # 'Б'
+    130: 33,  # 'В'
+    131: 46,  # 'Г'
+    132: 41,  # 'Д'
+    133: 48,  # 'Е'
+    134: 56,  # 'Ж'
+    135: 51,  # 'З'
+    136: 42,  # 'И'
+    137: 60,  # 'Й'
+    138: 36,  # 'К'
+    139: 49,  # 'Л'
+    140: 38,  # 'М'
+    141: 31,  # 'Н'
+    142: 34,  # 'О'
+    143: 35,  # 'П'
+    144: 45,  # 'Р'
+    145: 32,  # 'С'
+    146: 40,  # 'Т'
+    147: 52,  # 'У'
+    148: 53,  # 'Ф'
+    149: 55,  # 'Х'
+    150: 58,  # 'Ц'
+    151: 50,  # 'Ч'
+    152: 57,  # 'Ш'
+    153: 63,  # 'Щ'
+    154: 70,  # 'Ъ'
+    155: 62,  # 'Ы'
+    156: 61,  # 'Ь'
+    157: 47,  # 'Э'
+    158: 59,  # 'Ю'
+    159: 43,  # 'Я'
+    160: 191,  # '†'
+    161: 192,  # '°'
+    162: 193,  # 'Ґ'
+    163: 194,  # '£'
+    164: 195,  # '§'
+    165: 196,  # '•'
+    166: 197,  # '¶'
+    167: 198,  # 'І'
+    168: 199,  # '®'
+    169: 200,  # '©'
+    170: 201,  # '™'
+    171: 202,  # 'Ђ'
+    172: 203,  # 'ђ'
+    173: 204,  # '≠'
+    174: 205,  # 'Ѓ'
+    175: 206,  # 'ѓ'
+    176: 207,  # '∞'
+    177: 208,  # '±'
+    178: 209,  # '≤'
+    179: 210,  # '≥'
+    180: 211,  # 'і'
+    181: 212,  # 'µ'
+    182: 213,  # 'ґ'
+    183: 214,  # 'Ј'
+    184: 215,  # 'Є'
+    185: 216,  # 'є'
+    186: 217,  # 'Ї'
+    187: 218,  # 'ї'
+    188: 219,  # 'Љ'
+    189: 220,  # 'љ'
+    190: 221,  # 'Њ'
+    191: 222,  # 'њ'
+    192: 223,  # 'ј'
+    193: 224,  # 'Ѕ'
+    194: 225,  # '¬'
+    195: 226,  # '√'
+    196: 227,  # 'ƒ'
+    197: 228,  # '≈'
+    198: 229,  # '∆'
+    199: 230,  # '«'
+    200: 231,  # '»'
+    201: 232,  # '…'
+    202: 233,  # '\xa0'
+    203: 234,  # 'Ћ'
+    204: 235,  # 'ћ'
+    205: 236,  # 'Ќ'
+    206: 237,  # 'ќ'
+    207: 238,  # 'ѕ'
+    208: 239,  # '–'
+    209: 240,  # '—'
+    210: 241,  # '“'
+    211: 242,  # '”'
+    212: 243,  # '‘'
+    213: 244,  # '’'
+    214: 245,  # '÷'
+    215: 246,  # '„'
+    216: 247,  # 'Ў'
+    217: 248,  # 'ў'
+    218: 249,  # 'Џ'
+    219: 250,  # 'џ'
+    220: 251,  # '№'
+    221: 252,  # 'Ё'
+    222: 68,  # 'ё'
+    223: 16,  # 'я'
+    224: 3,  # 'а'
+    225: 21,  # 'б'
+    226: 10,  # 'в'
+    227: 19,  # 'г'
+    228: 13,  # 'д'
+    229: 2,  # 'е'
+    230: 24,  # 'ж'
+    231: 20,  # 'з'
+    232: 4,  # 'и'
+    233: 23,  # 'й'
+    234: 11,  # 'к'
+    235: 8,  # 'л'
+    236: 12,  # 'м'
+    237: 5,  # 'н'
+    238: 1,  # 'о'
+    239: 15,  # 'п'
+    240: 9,  # 'р'
+    241: 7,  # 'с'
+    242: 6,  # 'т'
+    243: 14,  # 'у'
+    244: 39,  # 'ф'
+    245: 26,  # 'х'
+    246: 28,  # 'ц'
+    247: 22,  # 'ч'
+    248: 25,  # 'ш'
+    249: 29,  # 'щ'
+    250: 54,  # 'ъ'
+    251: 18,  # 'ы'
+    252: 17,  # 'ь'
+    253: 30,  # 'э'
+    254: 27,  # 'ю'
+    255: 255,  # '€'
+}
+
+MACCYRILLIC_RUSSIAN_MODEL = SingleByteCharSetModel(
+    charset_name="MacCyrillic",
+    language="Russian",
+    char_to_order_map=MACCYRILLIC_RUSSIAN_CHAR_TO_ORDER,
+    language_model=RUSSIAN_LANG_MODEL,
+    typical_positive_ratio=0.976601,
+    keep_ascii_letters=False,
+    alphabet="ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё",
+)
+
+ISO_8859_5_RUSSIAN_CHAR_TO_ORDER = {
+    0: 255,  # '\x00'
+    1: 255,  # '\x01'
+    2: 255,  # '\x02'
+    3: 255,  # '\x03'
+    4: 255,  # '\x04'
+    5: 255,  # '\x05'
+    6: 255,  # '\x06'
+    7: 255,  # '\x07'
+    8: 255,  # '\x08'
+    9: 255,  # '\t'
+    10: 254,  # '\n'
+    11: 255,  # '\x0b'
+    12: 255,  # '\x0c'
+    13: 254,  # '\r'
+    14: 255,  # '\x0e'
+    15: 255,  # '\x0f'
+    16: 255,  # '\x10'
+    17: 255,  # '\x11'
+    18: 255,  # '\x12'
+    19: 255,  # '\x13'
+    20: 255,  # '\x14'
+    21: 255,  # '\x15'
+    22: 255,  # '\x16'
+    23: 255,  # '\x17'
+    24: 255,  # '\x18'
+    25: 255,  # '\x19'
+    26: 255,  # '\x1a'
+    27: 255,  # '\x1b'
+    28: 255,  # '\x1c'
+    29: 255,  # '\x1d'
+    30: 255,  # '\x1e'
+    31: 255,  # '\x1f'
+    32: 253,  # ' '
+    33: 253,  # '!'
+    34: 253,  # '"'
+    35: 253,  # '#'
+    36: 253,  # '$'
+    37: 253,  # '%'
+    38: 253,  # '&'
+    39: 253,  # "'"
+    40: 253,  # '('
+    41: 253,  # ')'
+    42: 253,  # '*'
+    43: 253,  # '+'
+    44: 253,  # ','
+    45: 253,  # '-'
+    46: 253,  # '.'
+    47: 253,  # '/'
+    48: 252,  # '0'
+    49: 252,  # '1'
+    50: 252,  # '2'
+    51: 252,  # '3'
+    52: 252,  # '4'
+    53: 252,  # '5'
+    54: 252,  # '6'
+    55: 252,  # '7'
+    56: 252,  # '8'
+    57: 252,  # '9'
+    58: 253,  # ':'
+    59: 253,  # ';'
+    60: 253,  # '<'
+    61: 253,  # '='
+    62: 253,  # '>'
+    63: 253,  # '?'
+    64: 253,  # '@'
+    65: 142,  # 'A'
+    66: 143,  # 'B'
+    67: 144,  # 'C'
+    68: 145,  # 'D'
+    69: 146,  # 'E'
+    70: 147,  # 'F'
+    71: 148,  # 'G'
+    72: 149,  # 'H'
+    73: 150,  # 'I'
+    74: 151,  # 'J'
+    75: 152,  # 'K'
+    76: 74,  # 'L'
+    77: 153,  # 'M'
+    78: 75,  # 'N'
+    79: 154,  # 'O'
+    80: 155,  # 'P'
+    81: 156,  # 'Q'
+    82: 157,  # 'R'
+    83: 158,  # 'S'
+    84: 159,  # 'T'
+    85: 160,  # 'U'
+    86: 161,  # 'V'
+    87: 162,  # 'W'
+    88: 163,  # 'X'
+    89: 164,  # 'Y'
+    90: 165,  # 'Z'
+    91: 253,  # '['
+    92: 253,  # '\\'
+    93: 253,  # ']'
+    94: 253,  # '^'
+    95: 253,  # '_'
+    96: 253,  # '`'
+    97: 71,  # 'a'
+    98: 172,  # 'b'
+    99: 66,  # 'c'
+    100: 173,  # 'd'
+    101: 65,  # 'e'
+    102: 174,  # 'f'
+    103: 76,  # 'g'
+    104: 175,  # 'h'
+    105: 64,  # 'i'
+    106: 176,  # 'j'
+    107: 177,  # 'k'
+    108: 77,  # 'l'
+    109: 72,  # 'm'
+    110: 178,  # 'n'
+    111: 69,  # 'o'
+    112: 67,  # 'p'
+    113: 179,  # 'q'
+    114: 78,  # 'r'
+    115: 73,  # 's'
+    116: 180,  # 't'
+    117: 181,  # 'u'
+    118: 79,  # 'v'
+    119: 182,  # 'w'
+    120: 183,  # 'x'
+    121: 184,  # 'y'
+    122: 185,  # 'z'
+    123: 253,  # '{'
+    124: 253,  # '|'
+    125: 253,  # '}'
+    126: 253,  # '~'
+    127: 253,  # '\x7f'
+    128: 191,  # '\x80'
+    129: 192,  # '\x81'
+    130: 193,  # '\x82'
+    131: 194,  # '\x83'
+    132: 195,  # '\x84'
+    133: 196,  # '\x85'
+    134: 197,  # '\x86'
+    135: 198,  # '\x87'
+    136: 199,  # '\x88'
+    137: 200,  # '\x89'
+    138: 201,  # '\x8a'
+    139: 202,  # '\x8b'
+    140: 203,  # '\x8c'
+    141: 204,  # '\x8d'
+    142: 205,  # '\x8e'
+    143: 206,  # '\x8f'
+    144: 207,  # '\x90'
+    145: 208,  # '\x91'
+    146: 209,  # '\x92'
+    147: 210,  # '\x93'
+    148: 211,  # '\x94'
+    149: 212,  # '\x95'
+    150: 213,  # '\x96'
+    151: 214,  # '\x97'
+    152: 215,  # '\x98'
+    153: 216,  # '\x99'
+    154: 217,  # '\x9a'
+    155: 218,  # '\x9b'
+    156: 219,  # '\x9c'
+    157: 220,  # '\x9d'
+    158: 221,  # '\x9e'
+    159: 222,  # '\x9f'
+    160: 223,  # '\xa0'
+    161: 224,  # 'Ё'
+    162: 225,  # 'Ђ'
+    163: 226,  # 'Ѓ'
+    164: 227,  # 'Є'
+    165: 228,  # 'Ѕ'
+    166: 229,  # 'І'
+    167: 230,  # 'Ї'
+    168: 231,  # 'Ј'
+    169: 232,  # 'Љ'
+    170: 233,  # 'Њ'
+    171: 234,  # 'Ћ'
+    172: 235,  # 'Ќ'
+    173: 236,  # '\xad'
+    174: 237,  # 'Ў'
+    175: 238,  # 'Џ'
+    176: 37,  # 'А'
+    177: 44,  # 'Б'
+    178: 33,  # 'В'
+    179: 46,  # 'Г'
+    180: 41,  # 'Д'
+    181: 48,  # 'Е'
+    182: 56,  # 'Ж'
+    183: 51,  # 'З'
+    184: 42,  # 'И'
+    185: 60,  # 'Й'
+    186: 36,  # 'К'
+    187: 49,  # 'Л'
+    188: 38,  # 'М'
+    189: 31,  # 'Н'
+    190: 34,  # 'О'
+    191: 35,  # 'П'
+    192: 45,  # 'Р'
+    193: 32,  # 'С'
+    194: 40,  # 'Т'
+    195: 52,  # 'У'
+    196: 53,  # 'Ф'
+    197: 55,  # 'Х'
+    198: 58,  # 'Ц'
+    199: 50,  # 'Ч'
+    200: 57,  # 'Ш'
+    201: 63,  # 'Щ'
+    202: 70,  # 'Ъ'
+    203: 62,  # 'Ы'
+    204: 61,  # 'Ь'
+    205: 47,  # 'Э'
+    206: 59,  # 'Ю'
+    207: 43,  # 'Я'
+    208: 3,  # 'а'
+    209: 21,  # 'б'
+    210: 10,  # 'в'
+    211: 19,  # 'г'
+    212: 13,  # 'д'
+    213: 2,  # 'е'
+    214: 24,  # 'ж'
+    215: 20,  # 'з'
+    216: 4,  # 'и'
+    217: 23,  # 'й'
+    218: 11,  # 'к'
+    219: 8,  # 'л'
+    220: 12,  # 'м'
+    221: 5,  # 'н'
+    222: 1,  # 'о'
+    223: 15,  # 'п'
+    224: 9,  # 'р'
+    225: 7,  # 'с'
+    226: 6,  # 'т'
+    227: 14,  # 'у'
+    228: 39,  # 'ф'
+    229: 26,  # 'х'
+    230: 28,  # 'ц'
+    231: 22,  # 'ч'
+    232: 25,  # 'ш'
+    233: 29,  # 'щ'
+    234: 54,  # 'ъ'
+    235: 18,  # 'ы'
+    236: 17,  # 'ь'
+    237: 30,  # 'э'
+    238: 27,  # 'ю'
+    239: 16,  # 'я'
+    240: 239,  # '№'
+    241: 68,  # 'ё'
+    242: 240,  # 'ђ'
+    243: 241,  # 'ѓ'
+    244: 242,  # 'є'
+    245: 243,  # 'ѕ'
+    246: 244,  # 'і'
+    247: 245,  # 'ї'
+    248: 246,  # 'ј'
+    249: 247,  # 'љ'
+    250: 248,  # 'њ'
+    251: 249,  # 'ћ'
+    252: 250,  # 'ќ'
+    253: 251,  # '§'
+    254: 252,  # 'ў'
+    255: 255,  # 'џ'
+}
+
+ISO_8859_5_RUSSIAN_MODEL = SingleByteCharSetModel(
+    charset_name="ISO-8859-5",
+    language="Russian",
+    char_to_order_map=ISO_8859_5_RUSSIAN_CHAR_TO_ORDER,
+    language_model=RUSSIAN_LANG_MODEL,
+    typical_positive_ratio=0.976601,
+    keep_ascii_letters=False,
+    alphabet="ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё",
+)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/langthaimodel.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/langthaimodel.py
new file mode 100644
index 0000000..489cad9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/langthaimodel.py
@@ -0,0 +1,4380 @@
+from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel
+
+# 3: Positive
+# 2: Likely
+# 1: Unlikely
+# 0: Negative
+
+THAI_LANG_MODEL = {
+    5: {  # 'ก'
+        5: 2,  # 'ก'
+        30: 2,  # 'ข'
+        24: 2,  # 'ค'
+        8: 2,  # 'ง'
+        26: 2,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 1,  # 'ช'
+        51: 1,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 3,  # 'ฎ'
+        57: 2,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 2,  # 'ณ'
+        20: 2,  # 'ด'
+        19: 3,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 2,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 2,  # 'น'
+        17: 1,  # 'บ'
+        25: 2,  # 'ป'
+        39: 1,  # 'ผ'
+        62: 1,  # 'ฝ'
+        31: 1,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 1,  # 'ภ'
+        9: 2,  # 'ม'
+        16: 1,  # 'ย'
+        2: 3,  # 'ร'
+        61: 2,  # 'ฤ'
+        15: 3,  # 'ล'
+        12: 3,  # 'ว'
+        42: 2,  # 'ศ'
+        46: 3,  # 'ษ'
+        18: 2,  # 'ส'
+        21: 2,  # 'ห'
+        4: 3,  # 'อ'
+        63: 1,  # 'ฯ'
+        22: 2,  # 'ะ'
+        10: 3,  # 'ั'
+        1: 3,  # 'า'
+        36: 3,  # 'ำ'
+        23: 3,  # 'ิ'
+        13: 3,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 2,  # 'ื'
+        32: 2,  # 'ุ'
+        35: 1,  # 'ู'
+        11: 2,  # 'เ'
+        28: 2,  # 'แ'
+        41: 1,  # 'โ'
+        29: 1,  # 'ใ'
+        33: 2,  # 'ไ'
+        50: 1,  # 'ๆ'
+        37: 3,  # '็'
+        6: 3,  # '่'
+        7: 3,  # '้'
+        38: 2,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    30: {  # 'ข'
+        5: 1,  # 'ก'
+        30: 0,  # 'ข'
+        24: 1,  # 'ค'
+        8: 1,  # 'ง'
+        26: 1,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 2,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 2,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 1,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 2,  # 'น'
+        17: 1,  # 'บ'
+        25: 1,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 0,  # 'ม'
+        16: 2,  # 'ย'
+        2: 1,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 2,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 1,  # 'ส'
+        21: 1,  # 'ห'
+        4: 3,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 3,  # 'ั'
+        1: 3,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 2,  # 'ี'
+        40: 3,  # 'ึ'
+        27: 1,  # 'ื'
+        32: 1,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 1,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 1,  # '็'
+        6: 2,  # '่'
+        7: 3,  # '้'
+        38: 1,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    24: {  # 'ค'
+        5: 0,  # 'ก'
+        30: 0,  # 'ข'
+        24: 2,  # 'ค'
+        8: 2,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 2,  # 'ณ'
+        20: 2,  # 'ด'
+        19: 2,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 1,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 3,  # 'น'
+        17: 0,  # 'บ'
+        25: 1,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 2,  # 'ม'
+        16: 2,  # 'ย'
+        2: 3,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 3,  # 'ล'
+        12: 3,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 1,  # 'ส'
+        21: 0,  # 'ห'
+        4: 2,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 2,  # 'ะ'
+        10: 3,  # 'ั'
+        1: 2,  # 'า'
+        36: 3,  # 'ำ'
+        23: 3,  # 'ิ'
+        13: 2,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 3,  # 'ื'
+        32: 3,  # 'ุ'
+        35: 2,  # 'ู'
+        11: 1,  # 'เ'
+        28: 0,  # 'แ'
+        41: 3,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 1,  # '็'
+        6: 3,  # '่'
+        7: 3,  # '้'
+        38: 3,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    8: {  # 'ง'
+        5: 3,  # 'ก'
+        30: 2,  # 'ข'
+        24: 3,  # 'ค'
+        8: 2,  # 'ง'
+        26: 2,  # 'จ'
+        52: 1,  # 'ฉ'
+        34: 2,  # 'ช'
+        51: 1,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 2,  # 'ด'
+        19: 2,  # 'ต'
+        44: 1,  # 'ถ'
+        14: 3,  # 'ท'
+        48: 1,  # 'ธ'
+        3: 3,  # 'น'
+        17: 2,  # 'บ'
+        25: 2,  # 'ป'
+        39: 2,  # 'ผ'
+        62: 1,  # 'ฝ'
+        31: 2,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 1,  # 'ภ'
+        9: 2,  # 'ม'
+        16: 1,  # 'ย'
+        2: 2,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 2,  # 'ล'
+        12: 2,  # 'ว'
+        42: 2,  # 'ศ'
+        46: 1,  # 'ษ'
+        18: 3,  # 'ส'
+        21: 3,  # 'ห'
+        4: 2,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 1,  # 'ั'
+        1: 3,  # 'า'
+        36: 0,  # 'ำ'
+        23: 2,  # 'ิ'
+        13: 1,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 1,  # 'ื'
+        32: 1,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 3,  # 'เ'
+        28: 2,  # 'แ'
+        41: 1,  # 'โ'
+        29: 2,  # 'ใ'
+        33: 2,  # 'ไ'
+        50: 3,  # 'ๆ'
+        37: 0,  # '็'
+        6: 2,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    26: {  # 'จ'
+        5: 2,  # 'ก'
+        30: 1,  # 'ข'
+        24: 0,  # 'ค'
+        8: 2,  # 'ง'
+        26: 3,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 2,  # 'ด'
+        19: 1,  # 'ต'
+        44: 1,  # 'ถ'
+        14: 2,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 3,  # 'น'
+        17: 1,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 1,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 1,  # 'ม'
+        16: 1,  # 'ย'
+        2: 3,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 1,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 2,  # 'ส'
+        21: 1,  # 'ห'
+        4: 2,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 3,  # 'ะ'
+        10: 3,  # 'ั'
+        1: 3,  # 'า'
+        36: 3,  # 'ำ'
+        23: 2,  # 'ิ'
+        13: 1,  # 'ี'
+        40: 3,  # 'ึ'
+        27: 1,  # 'ื'
+        32: 3,  # 'ุ'
+        35: 2,  # 'ู'
+        11: 1,  # 'เ'
+        28: 1,  # 'แ'
+        41: 0,  # 'โ'
+        29: 1,  # 'ใ'
+        33: 1,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 2,  # '่'
+        7: 2,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    52: {  # 'ฉ'
+        5: 0,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 0,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 0,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 0,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 0,  # 'น'
+        17: 3,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 3,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 1,  # 'ม'
+        16: 1,  # 'ย'
+        2: 0,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 2,  # 'ล'
+        12: 1,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 0,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 1,  # 'ะ'
+        10: 1,  # 'ั'
+        1: 1,  # 'า'
+        36: 0,  # 'ำ'
+        23: 1,  # 'ิ'
+        13: 1,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 1,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    34: {  # 'ช'
+        5: 1,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 1,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 1,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 0,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 1,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 3,  # 'น'
+        17: 2,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 2,  # 'ม'
+        16: 1,  # 'ย'
+        2: 1,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 1,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 0,  # 'ห'
+        4: 2,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 2,  # 'ั'
+        1: 3,  # 'า'
+        36: 1,  # 'ำ'
+        23: 3,  # 'ิ'
+        13: 2,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 3,  # 'ื'
+        32: 3,  # 'ุ'
+        35: 1,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 1,  # '็'
+        6: 3,  # '่'
+        7: 3,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    51: {  # 'ซ'
+        5: 0,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 0,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 0,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 0,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 1,  # 'น'
+        17: 0,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 0,  # 'ม'
+        16: 0,  # 'ย'
+        2: 0,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 1,  # 'ล'
+        12: 0,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 1,  # 'ส'
+        21: 0,  # 'ห'
+        4: 2,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 1,  # 'ั'
+        1: 1,  # 'า'
+        36: 0,  # 'ำ'
+        23: 1,  # 'ิ'
+        13: 2,  # 'ี'
+        40: 3,  # 'ึ'
+        27: 2,  # 'ื'
+        32: 1,  # 'ุ'
+        35: 1,  # 'ู'
+        11: 1,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 1,  # '็'
+        6: 1,  # '่'
+        7: 2,  # '้'
+        38: 1,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    47: {  # 'ญ'
+        5: 1,  # 'ก'
+        30: 1,  # 'ข'
+        24: 0,  # 'ค'
+        8: 0,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 1,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 3,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 0,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 1,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 0,  # 'น'
+        17: 1,  # 'บ'
+        25: 1,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 1,  # 'ม'
+        16: 0,  # 'ย'
+        2: 0,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 1,  # 'ล'
+        12: 0,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 1,  # 'ส'
+        21: 2,  # 'ห'
+        4: 1,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 1,  # 'ะ'
+        10: 2,  # 'ั'
+        1: 3,  # 'า'
+        36: 0,  # 'ำ'
+        23: 1,  # 'ิ'
+        13: 1,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 1,  # 'เ'
+        28: 1,  # 'แ'
+        41: 0,  # 'โ'
+        29: 1,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 1,  # 'ๆ'
+        37: 0,  # '็'
+        6: 2,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    58: {  # 'ฎ'
+        5: 2,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 0,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 0,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 0,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 0,  # 'น'
+        17: 0,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 0,  # 'ม'
+        16: 0,  # 'ย'
+        2: 0,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 0,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 1,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 1,  # 'ิ'
+        13: 2,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    57: {  # 'ฏ'
+        5: 0,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 0,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 0,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 0,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 0,  # 'น'
+        17: 0,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 0,  # 'ม'
+        16: 0,  # 'ย'
+        2: 0,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 0,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 0,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 3,  # 'ิ'
+        13: 1,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    49: {  # 'ฐ'
+        5: 1,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 0,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 0,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 0,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 0,  # 'น'
+        17: 2,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 2,  # 'ม'
+        16: 0,  # 'ย'
+        2: 0,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 0,  # 'ว'
+        42: 1,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 0,  # 'ห'
+        4: 1,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 3,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 1,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    53: {  # 'ฑ'
+        5: 0,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 0,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 0,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 0,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 0,  # 'น'
+        17: 0,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 0,  # 'ม'
+        16: 0,  # 'ย'
+        2: 0,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 0,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 0,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 2,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 3,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    55: {  # 'ฒ'
+        5: 0,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 0,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 0,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 0,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 3,  # 'น'
+        17: 0,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 1,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 0,  # 'ม'
+        16: 0,  # 'ย'
+        2: 0,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 0,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 0,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 1,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    43: {  # 'ณ'
+        5: 1,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 0,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 3,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 0,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 0,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 0,  # 'น'
+        17: 0,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 3,  # 'ภ'
+        9: 0,  # 'ม'
+        16: 0,  # 'ย'
+        2: 1,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 1,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 1,  # 'ส'
+        21: 1,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 3,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 3,  # 'า'
+        36: 0,  # 'ำ'
+        23: 1,  # 'ิ'
+        13: 2,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 1,  # 'เ'
+        28: 1,  # 'แ'
+        41: 0,  # 'โ'
+        29: 1,  # 'ใ'
+        33: 1,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 3,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    20: {  # 'ด'
+        5: 2,  # 'ก'
+        30: 2,  # 'ข'
+        24: 2,  # 'ค'
+        8: 3,  # 'ง'
+        26: 2,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 1,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 1,  # 'ด'
+        19: 2,  # 'ต'
+        44: 1,  # 'ถ'
+        14: 2,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 1,  # 'น'
+        17: 1,  # 'บ'
+        25: 1,  # 'ป'
+        39: 1,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 1,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 1,  # 'ภ'
+        9: 2,  # 'ม'
+        16: 3,  # 'ย'
+        2: 2,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 2,  # 'ล'
+        12: 2,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 2,  # 'ส'
+        21: 2,  # 'ห'
+        4: 1,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 3,  # 'ั'
+        1: 2,  # 'า'
+        36: 2,  # 'ำ'
+        23: 3,  # 'ิ'
+        13: 3,  # 'ี'
+        40: 1,  # 'ึ'
+        27: 2,  # 'ื'
+        32: 3,  # 'ุ'
+        35: 2,  # 'ู'
+        11: 2,  # 'เ'
+        28: 2,  # 'แ'
+        41: 1,  # 'โ'
+        29: 2,  # 'ใ'
+        33: 2,  # 'ไ'
+        50: 2,  # 'ๆ'
+        37: 2,  # '็'
+        6: 1,  # '่'
+        7: 3,  # '้'
+        38: 1,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    19: {  # 'ต'
+        5: 2,  # 'ก'
+        30: 1,  # 'ข'
+        24: 1,  # 'ค'
+        8: 0,  # 'ง'
+        26: 1,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 1,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 1,  # 'ด'
+        19: 1,  # 'ต'
+        44: 2,  # 'ถ'
+        14: 1,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 2,  # 'น'
+        17: 1,  # 'บ'
+        25: 1,  # 'ป'
+        39: 1,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 1,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 2,  # 'ภ'
+        9: 1,  # 'ม'
+        16: 1,  # 'ย'
+        2: 3,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 2,  # 'ล'
+        12: 1,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 3,  # 'ส'
+        21: 0,  # 'ห'
+        4: 3,  # 'อ'
+        63: 1,  # 'ฯ'
+        22: 2,  # 'ะ'
+        10: 3,  # 'ั'
+        1: 3,  # 'า'
+        36: 2,  # 'ำ'
+        23: 3,  # 'ิ'
+        13: 2,  # 'ี'
+        40: 1,  # 'ึ'
+        27: 1,  # 'ื'
+        32: 3,  # 'ุ'
+        35: 2,  # 'ู'
+        11: 1,  # 'เ'
+        28: 1,  # 'แ'
+        41: 1,  # 'โ'
+        29: 1,  # 'ใ'
+        33: 1,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 2,  # '็'
+        6: 3,  # '่'
+        7: 3,  # '้'
+        38: 2,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    44: {  # 'ถ'
+        5: 1,  # 'ก'
+        30: 0,  # 'ข'
+        24: 1,  # 'ค'
+        8: 0,  # 'ง'
+        26: 1,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 1,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 1,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 1,  # 'น'
+        17: 2,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 1,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 0,  # 'ม'
+        16: 0,  # 'ย'
+        2: 1,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 1,  # 'ล'
+        12: 1,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 1,  # 'ส'
+        21: 0,  # 'ห'
+        4: 1,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 2,  # 'ั'
+        1: 3,  # 'า'
+        36: 0,  # 'ำ'
+        23: 2,  # 'ิ'
+        13: 1,  # 'ี'
+        40: 3,  # 'ึ'
+        27: 2,  # 'ื'
+        32: 2,  # 'ุ'
+        35: 3,  # 'ู'
+        11: 1,  # 'เ'
+        28: 1,  # 'แ'
+        41: 0,  # 'โ'
+        29: 1,  # 'ใ'
+        33: 1,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 2,  # '่'
+        7: 3,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    14: {  # 'ท'
+        5: 1,  # 'ก'
+        30: 1,  # 'ข'
+        24: 3,  # 'ค'
+        8: 1,  # 'ง'
+        26: 1,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 2,  # 'ด'
+        19: 1,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 1,  # 'ท'
+        48: 3,  # 'ธ'
+        3: 3,  # 'น'
+        17: 2,  # 'บ'
+        25: 2,  # 'ป'
+        39: 1,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 2,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 1,  # 'ม'
+        16: 3,  # 'ย'
+        2: 3,  # 'ร'
+        61: 1,  # 'ฤ'
+        15: 1,  # 'ล'
+        12: 2,  # 'ว'
+        42: 3,  # 'ศ'
+        46: 1,  # 'ษ'
+        18: 1,  # 'ส'
+        21: 0,  # 'ห'
+        4: 2,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 2,  # 'ะ'
+        10: 3,  # 'ั'
+        1: 3,  # 'า'
+        36: 3,  # 'ำ'
+        23: 2,  # 'ิ'
+        13: 3,  # 'ี'
+        40: 2,  # 'ึ'
+        27: 1,  # 'ื'
+        32: 3,  # 'ุ'
+        35: 1,  # 'ู'
+        11: 0,  # 'เ'
+        28: 1,  # 'แ'
+        41: 0,  # 'โ'
+        29: 1,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 1,  # '็'
+        6: 3,  # '่'
+        7: 3,  # '้'
+        38: 2,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    48: {  # 'ธ'
+        5: 0,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 1,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 0,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 0,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 1,  # 'น'
+        17: 0,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 0,  # 'ม'
+        16: 0,  # 'ย'
+        2: 2,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 0,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 0,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 2,  # 'า'
+        36: 0,  # 'ำ'
+        23: 3,  # 'ิ'
+        13: 3,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 2,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 3,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    3: {  # 'น'
+        5: 3,  # 'ก'
+        30: 2,  # 'ข'
+        24: 3,  # 'ค'
+        8: 1,  # 'ง'
+        26: 2,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 1,  # 'ช'
+        51: 1,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 1,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 3,  # 'ด'
+        19: 3,  # 'ต'
+        44: 2,  # 'ถ'
+        14: 3,  # 'ท'
+        48: 3,  # 'ธ'
+        3: 2,  # 'น'
+        17: 2,  # 'บ'
+        25: 2,  # 'ป'
+        39: 2,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 2,  # 'พ'
+        54: 1,  # 'ฟ'
+        45: 1,  # 'ภ'
+        9: 2,  # 'ม'
+        16: 2,  # 'ย'
+        2: 2,  # 'ร'
+        61: 1,  # 'ฤ'
+        15: 2,  # 'ล'
+        12: 3,  # 'ว'
+        42: 1,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 2,  # 'ส'
+        21: 2,  # 'ห'
+        4: 3,  # 'อ'
+        63: 1,  # 'ฯ'
+        22: 2,  # 'ะ'
+        10: 3,  # 'ั'
+        1: 3,  # 'า'
+        36: 3,  # 'ำ'
+        23: 3,  # 'ิ'
+        13: 3,  # 'ี'
+        40: 3,  # 'ึ'
+        27: 3,  # 'ื'
+        32: 3,  # 'ุ'
+        35: 2,  # 'ู'
+        11: 3,  # 'เ'
+        28: 2,  # 'แ'
+        41: 3,  # 'โ'
+        29: 3,  # 'ใ'
+        33: 3,  # 'ไ'
+        50: 2,  # 'ๆ'
+        37: 1,  # '็'
+        6: 3,  # '่'
+        7: 3,  # '้'
+        38: 2,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    17: {  # 'บ'
+        5: 3,  # 'ก'
+        30: 2,  # 'ข'
+        24: 2,  # 'ค'
+        8: 1,  # 'ง'
+        26: 1,  # 'จ'
+        52: 1,  # 'ฉ'
+        34: 1,  # 'ช'
+        51: 1,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 1,  # 'ด'
+        19: 2,  # 'ต'
+        44: 1,  # 'ถ'
+        14: 3,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 3,  # 'น'
+        17: 3,  # 'บ'
+        25: 2,  # 'ป'
+        39: 2,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 1,  # 'พ'
+        54: 1,  # 'ฟ'
+        45: 1,  # 'ภ'
+        9: 1,  # 'ม'
+        16: 0,  # 'ย'
+        2: 3,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 2,  # 'ล'
+        12: 3,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 2,  # 'ส'
+        21: 2,  # 'ห'
+        4: 2,  # 'อ'
+        63: 1,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 3,  # 'ั'
+        1: 3,  # 'า'
+        36: 2,  # 'ำ'
+        23: 2,  # 'ิ'
+        13: 2,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 2,  # 'ื'
+        32: 3,  # 'ุ'
+        35: 2,  # 'ู'
+        11: 2,  # 'เ'
+        28: 2,  # 'แ'
+        41: 1,  # 'โ'
+        29: 2,  # 'ใ'
+        33: 2,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 1,  # '็'
+        6: 2,  # '่'
+        7: 2,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    25: {  # 'ป'
+        5: 2,  # 'ก'
+        30: 0,  # 'ข'
+        24: 1,  # 'ค'
+        8: 0,  # 'ง'
+        26: 1,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 1,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 1,  # 'ฎ'
+        57: 3,  # 'ฏ'
+        49: 1,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 1,  # 'ด'
+        19: 1,  # 'ต'
+        44: 1,  # 'ถ'
+        14: 1,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 2,  # 'น'
+        17: 0,  # 'บ'
+        25: 1,  # 'ป'
+        39: 1,  # 'ผ'
+        62: 1,  # 'ฝ'
+        31: 1,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 1,  # 'ม'
+        16: 0,  # 'ย'
+        2: 3,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 3,  # 'ล'
+        12: 1,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 1,  # 'ษ'
+        18: 2,  # 'ส'
+        21: 1,  # 'ห'
+        4: 2,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 1,  # 'ะ'
+        10: 3,  # 'ั'
+        1: 1,  # 'า'
+        36: 0,  # 'ำ'
+        23: 2,  # 'ิ'
+        13: 3,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 1,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 1,  # 'เ'
+        28: 2,  # 'แ'
+        41: 0,  # 'โ'
+        29: 1,  # 'ใ'
+        33: 2,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 3,  # '็'
+        6: 1,  # '่'
+        7: 2,  # '้'
+        38: 1,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    39: {  # 'ผ'
+        5: 1,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 1,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 0,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 0,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 2,  # 'น'
+        17: 0,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 1,  # 'ม'
+        16: 2,  # 'ย'
+        2: 0,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 3,  # 'ล'
+        12: 0,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 1,  # 'ส'
+        21: 0,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 1,  # 'ะ'
+        10: 1,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 2,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 1,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 3,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 3,  # '่'
+        7: 1,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    62: {  # 'ฝ'
+        5: 0,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 0,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 0,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 0,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 1,  # 'น'
+        17: 0,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 0,  # 'ม'
+        16: 0,  # 'ย'
+        2: 1,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 0,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 0,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 1,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 1,  # 'ี'
+        40: 2,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 2,  # '่'
+        7: 1,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    31: {  # 'พ'
+        5: 1,  # 'ก'
+        30: 1,  # 'ข'
+        24: 1,  # 'ค'
+        8: 1,  # 'ง'
+        26: 1,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 1,  # 'ณ'
+        20: 1,  # 'ด'
+        19: 1,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 2,  # 'ท'
+        48: 1,  # 'ธ'
+        3: 3,  # 'น'
+        17: 2,  # 'บ'
+        25: 0,  # 'ป'
+        39: 1,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 1,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 1,  # 'ม'
+        16: 2,  # 'ย'
+        2: 3,  # 'ร'
+        61: 2,  # 'ฤ'
+        15: 2,  # 'ล'
+        12: 2,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 1,  # 'ส'
+        21: 1,  # 'ห'
+        4: 2,  # 'อ'
+        63: 1,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 3,  # 'ั'
+        1: 3,  # 'า'
+        36: 0,  # 'ำ'
+        23: 3,  # 'ิ'
+        13: 2,  # 'ี'
+        40: 1,  # 'ึ'
+        27: 3,  # 'ื'
+        32: 1,  # 'ุ'
+        35: 2,  # 'ู'
+        11: 1,  # 'เ'
+        28: 1,  # 'แ'
+        41: 0,  # 'โ'
+        29: 1,  # 'ใ'
+        33: 1,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 1,  # '็'
+        6: 0,  # '่'
+        7: 1,  # '้'
+        38: 3,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    54: {  # 'ฟ'
+        5: 0,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 0,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 1,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 1,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 1,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 0,  # 'น'
+        17: 0,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 2,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 0,  # 'ม'
+        16: 0,  # 'ย'
+        2: 1,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 2,  # 'ล'
+        12: 0,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 1,  # 'ส'
+        21: 0,  # 'ห'
+        4: 1,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 2,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 1,  # 'ิ'
+        13: 1,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 1,  # 'ื'
+        32: 1,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 1,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 2,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    45: {  # 'ภ'
+        5: 0,  # 'ก'
+        30: 0,  # 'ข'
+        24: 1,  # 'ค'
+        8: 0,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 0,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 3,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 0,  # 'น'
+        17: 0,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 1,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 0,  # 'ม'
+        16: 0,  # 'ย'
+        2: 1,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 0,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 0,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 3,  # 'ั'
+        1: 3,  # 'า'
+        36: 0,  # 'ำ'
+        23: 1,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 2,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 1,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    9: {  # 'ม'
+        5: 2,  # 'ก'
+        30: 2,  # 'ข'
+        24: 2,  # 'ค'
+        8: 2,  # 'ง'
+        26: 2,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 1,  # 'ช'
+        51: 1,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 1,  # 'ณ'
+        20: 2,  # 'ด'
+        19: 2,  # 'ต'
+        44: 1,  # 'ถ'
+        14: 2,  # 'ท'
+        48: 1,  # 'ธ'
+        3: 3,  # 'น'
+        17: 2,  # 'บ'
+        25: 2,  # 'ป'
+        39: 1,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 3,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 1,  # 'ภ'
+        9: 2,  # 'ม'
+        16: 1,  # 'ย'
+        2: 2,  # 'ร'
+        61: 2,  # 'ฤ'
+        15: 2,  # 'ล'
+        12: 2,  # 'ว'
+        42: 1,  # 'ศ'
+        46: 1,  # 'ษ'
+        18: 3,  # 'ส'
+        21: 3,  # 'ห'
+        4: 3,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 1,  # 'ะ'
+        10: 3,  # 'ั'
+        1: 3,  # 'า'
+        36: 0,  # 'ำ'
+        23: 3,  # 'ิ'
+        13: 3,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 3,  # 'ื'
+        32: 3,  # 'ุ'
+        35: 3,  # 'ู'
+        11: 2,  # 'เ'
+        28: 2,  # 'แ'
+        41: 2,  # 'โ'
+        29: 2,  # 'ใ'
+        33: 2,  # 'ไ'
+        50: 1,  # 'ๆ'
+        37: 1,  # '็'
+        6: 3,  # '่'
+        7: 2,  # '้'
+        38: 1,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    16: {  # 'ย'
+        5: 3,  # 'ก'
+        30: 1,  # 'ข'
+        24: 2,  # 'ค'
+        8: 3,  # 'ง'
+        26: 2,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 2,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 2,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 2,  # 'ด'
+        19: 2,  # 'ต'
+        44: 1,  # 'ถ'
+        14: 2,  # 'ท'
+        48: 1,  # 'ธ'
+        3: 3,  # 'น'
+        17: 3,  # 'บ'
+        25: 1,  # 'ป'
+        39: 1,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 1,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 1,  # 'ภ'
+        9: 2,  # 'ม'
+        16: 0,  # 'ย'
+        2: 2,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 1,  # 'ล'
+        12: 3,  # 'ว'
+        42: 1,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 2,  # 'ส'
+        21: 1,  # 'ห'
+        4: 2,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 2,  # 'ะ'
+        10: 3,  # 'ั'
+        1: 3,  # 'า'
+        36: 0,  # 'ำ'
+        23: 2,  # 'ิ'
+        13: 3,  # 'ี'
+        40: 1,  # 'ึ'
+        27: 2,  # 'ื'
+        32: 2,  # 'ุ'
+        35: 3,  # 'ู'
+        11: 2,  # 'เ'
+        28: 1,  # 'แ'
+        41: 1,  # 'โ'
+        29: 2,  # 'ใ'
+        33: 2,  # 'ไ'
+        50: 2,  # 'ๆ'
+        37: 1,  # '็'
+        6: 3,  # '่'
+        7: 2,  # '้'
+        38: 3,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    2: {  # 'ร'
+        5: 3,  # 'ก'
+        30: 2,  # 'ข'
+        24: 2,  # 'ค'
+        8: 3,  # 'ง'
+        26: 2,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 2,  # 'ช'
+        51: 1,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 3,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 3,  # 'ณ'
+        20: 2,  # 'ด'
+        19: 2,  # 'ต'
+        44: 3,  # 'ถ'
+        14: 3,  # 'ท'
+        48: 1,  # 'ธ'
+        3: 2,  # 'น'
+        17: 2,  # 'บ'
+        25: 3,  # 'ป'
+        39: 2,  # 'ผ'
+        62: 1,  # 'ฝ'
+        31: 2,  # 'พ'
+        54: 1,  # 'ฟ'
+        45: 1,  # 'ภ'
+        9: 3,  # 'ม'
+        16: 2,  # 'ย'
+        2: 3,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 2,  # 'ล'
+        12: 3,  # 'ว'
+        42: 2,  # 'ศ'
+        46: 2,  # 'ษ'
+        18: 2,  # 'ส'
+        21: 2,  # 'ห'
+        4: 3,  # 'อ'
+        63: 1,  # 'ฯ'
+        22: 3,  # 'ะ'
+        10: 3,  # 'ั'
+        1: 3,  # 'า'
+        36: 0,  # 'ำ'
+        23: 3,  # 'ิ'
+        13: 3,  # 'ี'
+        40: 2,  # 'ึ'
+        27: 3,  # 'ื'
+        32: 3,  # 'ุ'
+        35: 3,  # 'ู'
+        11: 3,  # 'เ'
+        28: 3,  # 'แ'
+        41: 1,  # 'โ'
+        29: 2,  # 'ใ'
+        33: 1,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 3,  # '็'
+        6: 3,  # '่'
+        7: 3,  # '้'
+        38: 3,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    61: {  # 'ฤ'
+        5: 0,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 0,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 2,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 2,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 0,  # 'น'
+        17: 0,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 1,  # 'ม'
+        16: 0,  # 'ย'
+        2: 0,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 0,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 2,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 0,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    15: {  # 'ล'
+        5: 2,  # 'ก'
+        30: 3,  # 'ข'
+        24: 1,  # 'ค'
+        8: 3,  # 'ง'
+        26: 1,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 1,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 2,  # 'ด'
+        19: 2,  # 'ต'
+        44: 1,  # 'ถ'
+        14: 2,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 1,  # 'น'
+        17: 2,  # 'บ'
+        25: 2,  # 'ป'
+        39: 1,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 1,  # 'ภ'
+        9: 1,  # 'ม'
+        16: 3,  # 'ย'
+        2: 1,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 1,  # 'ล'
+        12: 1,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 2,  # 'ส'
+        21: 1,  # 'ห'
+        4: 3,  # 'อ'
+        63: 2,  # 'ฯ'
+        22: 3,  # 'ะ'
+        10: 3,  # 'ั'
+        1: 3,  # 'า'
+        36: 2,  # 'ำ'
+        23: 3,  # 'ิ'
+        13: 3,  # 'ี'
+        40: 2,  # 'ึ'
+        27: 3,  # 'ื'
+        32: 2,  # 'ุ'
+        35: 3,  # 'ู'
+        11: 2,  # 'เ'
+        28: 1,  # 'แ'
+        41: 1,  # 'โ'
+        29: 2,  # 'ใ'
+        33: 1,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 2,  # '็'
+        6: 3,  # '่'
+        7: 3,  # '้'
+        38: 2,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    12: {  # 'ว'
+        5: 3,  # 'ก'
+        30: 2,  # 'ข'
+        24: 1,  # 'ค'
+        8: 3,  # 'ง'
+        26: 2,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 1,  # 'ช'
+        51: 1,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 1,  # 'ณ'
+        20: 2,  # 'ด'
+        19: 1,  # 'ต'
+        44: 1,  # 'ถ'
+        14: 1,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 3,  # 'น'
+        17: 2,  # 'บ'
+        25: 1,  # 'ป'
+        39: 1,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 1,  # 'พ'
+        54: 1,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 3,  # 'ม'
+        16: 3,  # 'ย'
+        2: 3,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 3,  # 'ล'
+        12: 1,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 2,  # 'ส'
+        21: 2,  # 'ห'
+        4: 2,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 2,  # 'ะ'
+        10: 3,  # 'ั'
+        1: 3,  # 'า'
+        36: 0,  # 'ำ'
+        23: 3,  # 'ิ'
+        13: 2,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 2,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 3,  # 'เ'
+        28: 2,  # 'แ'
+        41: 1,  # 'โ'
+        29: 1,  # 'ใ'
+        33: 2,  # 'ไ'
+        50: 1,  # 'ๆ'
+        37: 0,  # '็'
+        6: 3,  # '่'
+        7: 3,  # '้'
+        38: 1,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    42: {  # 'ศ'
+        5: 1,  # 'ก'
+        30: 0,  # 'ข'
+        24: 1,  # 'ค'
+        8: 0,  # 'ง'
+        26: 1,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 1,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 1,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 1,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 2,  # 'น'
+        17: 0,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 0,  # 'ม'
+        16: 0,  # 'ย'
+        2: 2,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 2,  # 'ว'
+        42: 1,  # 'ศ'
+        46: 2,  # 'ษ'
+        18: 1,  # 'ส'
+        21: 0,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 2,  # 'ั'
+        1: 3,  # 'า'
+        36: 0,  # 'ำ'
+        23: 2,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 3,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 2,  # 'ู'
+        11: 0,  # 'เ'
+        28: 1,  # 'แ'
+        41: 0,  # 'โ'
+        29: 1,  # 'ใ'
+        33: 1,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 1,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    46: {  # 'ษ'
+        5: 0,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 0,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 2,  # 'ฎ'
+        57: 1,  # 'ฏ'
+        49: 2,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 3,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 1,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 1,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 0,  # 'น'
+        17: 0,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 1,  # 'ภ'
+        9: 1,  # 'ม'
+        16: 2,  # 'ย'
+        2: 2,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 0,  # 'ว'
+        42: 1,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 0,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 2,  # 'ะ'
+        10: 2,  # 'ั'
+        1: 3,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 1,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 1,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 2,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    18: {  # 'ส'
+        5: 2,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 2,  # 'ง'
+        26: 1,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 3,  # 'ด'
+        19: 3,  # 'ต'
+        44: 3,  # 'ถ'
+        14: 0,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 3,  # 'น'
+        17: 2,  # 'บ'
+        25: 1,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 2,  # 'ภ'
+        9: 3,  # 'ม'
+        16: 1,  # 'ย'
+        2: 3,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 1,  # 'ล'
+        12: 2,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 2,  # 'ห'
+        4: 3,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 2,  # 'ะ'
+        10: 3,  # 'ั'
+        1: 3,  # 'า'
+        36: 3,  # 'ำ'
+        23: 3,  # 'ิ'
+        13: 3,  # 'ี'
+        40: 2,  # 'ึ'
+        27: 3,  # 'ื'
+        32: 3,  # 'ุ'
+        35: 3,  # 'ู'
+        11: 2,  # 'เ'
+        28: 0,  # 'แ'
+        41: 1,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 1,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 3,  # '่'
+        7: 1,  # '้'
+        38: 2,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    21: {  # 'ห'
+        5: 3,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 1,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 2,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 1,  # 'ด'
+        19: 3,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 0,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 3,  # 'น'
+        17: 0,  # 'บ'
+        25: 1,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 1,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 3,  # 'ม'
+        16: 2,  # 'ย'
+        2: 3,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 3,  # 'ล'
+        12: 2,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 0,  # 'ห'
+        4: 3,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 1,  # 'ะ'
+        10: 3,  # 'ั'
+        1: 3,  # 'า'
+        36: 0,  # 'ำ'
+        23: 1,  # 'ิ'
+        13: 1,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 1,  # 'ุ'
+        35: 1,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 3,  # '็'
+        6: 3,  # '่'
+        7: 3,  # '้'
+        38: 2,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    4: {  # 'อ'
+        5: 3,  # 'ก'
+        30: 1,  # 'ข'
+        24: 2,  # 'ค'
+        8: 3,  # 'ง'
+        26: 1,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 1,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 3,  # 'ด'
+        19: 2,  # 'ต'
+        44: 1,  # 'ถ'
+        14: 2,  # 'ท'
+        48: 1,  # 'ธ'
+        3: 3,  # 'น'
+        17: 3,  # 'บ'
+        25: 1,  # 'ป'
+        39: 1,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 1,  # 'พ'
+        54: 1,  # 'ฟ'
+        45: 1,  # 'ภ'
+        9: 3,  # 'ม'
+        16: 3,  # 'ย'
+        2: 3,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 2,  # 'ล'
+        12: 2,  # 'ว'
+        42: 1,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 2,  # 'ส'
+        21: 2,  # 'ห'
+        4: 3,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 2,  # 'ะ'
+        10: 3,  # 'ั'
+        1: 3,  # 'า'
+        36: 2,  # 'ำ'
+        23: 2,  # 'ิ'
+        13: 3,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 3,  # 'ื'
+        32: 3,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 3,  # 'เ'
+        28: 1,  # 'แ'
+        41: 1,  # 'โ'
+        29: 2,  # 'ใ'
+        33: 2,  # 'ไ'
+        50: 1,  # 'ๆ'
+        37: 1,  # '็'
+        6: 2,  # '่'
+        7: 2,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    63: {  # 'ฯ'
+        5: 0,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 0,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 0,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 0,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 0,  # 'น'
+        17: 0,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 0,  # 'ม'
+        16: 0,  # 'ย'
+        2: 0,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 2,  # 'ล'
+        12: 0,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 0,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    22: {  # 'ะ'
+        5: 3,  # 'ก'
+        30: 1,  # 'ข'
+        24: 2,  # 'ค'
+        8: 1,  # 'ง'
+        26: 2,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 3,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 3,  # 'ด'
+        19: 3,  # 'ต'
+        44: 1,  # 'ถ'
+        14: 3,  # 'ท'
+        48: 1,  # 'ธ'
+        3: 2,  # 'น'
+        17: 3,  # 'บ'
+        25: 2,  # 'ป'
+        39: 1,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 2,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 1,  # 'ภ'
+        9: 3,  # 'ม'
+        16: 2,  # 'ย'
+        2: 2,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 2,  # 'ล'
+        12: 2,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 3,  # 'ส'
+        21: 3,  # 'ห'
+        4: 2,  # 'อ'
+        63: 1,  # 'ฯ'
+        22: 1,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 3,  # 'เ'
+        28: 2,  # 'แ'
+        41: 1,  # 'โ'
+        29: 2,  # 'ใ'
+        33: 2,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    10: {  # 'ั'
+        5: 3,  # 'ก'
+        30: 0,  # 'ข'
+        24: 1,  # 'ค'
+        8: 3,  # 'ง'
+        26: 3,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 1,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 3,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 2,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 3,  # 'ฒ'
+        43: 3,  # 'ณ'
+        20: 3,  # 'ด'
+        19: 3,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 2,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 3,  # 'น'
+        17: 3,  # 'บ'
+        25: 1,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 2,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 3,  # 'ม'
+        16: 3,  # 'ย'
+        2: 0,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 2,  # 'ล'
+        12: 3,  # 'ว'
+        42: 2,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 3,  # 'ส'
+        21: 0,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 3,  # '่'
+        7: 3,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    1: {  # 'า'
+        5: 3,  # 'ก'
+        30: 2,  # 'ข'
+        24: 3,  # 'ค'
+        8: 3,  # 'ง'
+        26: 3,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 3,  # 'ช'
+        51: 1,  # 'ซ'
+        47: 2,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 3,  # 'ณ'
+        20: 3,  # 'ด'
+        19: 3,  # 'ต'
+        44: 1,  # 'ถ'
+        14: 3,  # 'ท'
+        48: 2,  # 'ธ'
+        3: 3,  # 'น'
+        17: 3,  # 'บ'
+        25: 2,  # 'ป'
+        39: 1,  # 'ผ'
+        62: 1,  # 'ฝ'
+        31: 3,  # 'พ'
+        54: 1,  # 'ฟ'
+        45: 1,  # 'ภ'
+        9: 3,  # 'ม'
+        16: 3,  # 'ย'
+        2: 3,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 3,  # 'ล'
+        12: 3,  # 'ว'
+        42: 2,  # 'ศ'
+        46: 3,  # 'ษ'
+        18: 3,  # 'ส'
+        21: 3,  # 'ห'
+        4: 2,  # 'อ'
+        63: 1,  # 'ฯ'
+        22: 3,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 3,  # 'เ'
+        28: 2,  # 'แ'
+        41: 1,  # 'โ'
+        29: 2,  # 'ใ'
+        33: 2,  # 'ไ'
+        50: 1,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    36: {  # 'ำ'
+        5: 2,  # 'ก'
+        30: 1,  # 'ข'
+        24: 3,  # 'ค'
+        8: 2,  # 'ง'
+        26: 1,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 1,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 1,  # 'ด'
+        19: 1,  # 'ต'
+        44: 1,  # 'ถ'
+        14: 1,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 3,  # 'น'
+        17: 1,  # 'บ'
+        25: 1,  # 'ป'
+        39: 1,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 1,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 1,  # 'ภ'
+        9: 1,  # 'ม'
+        16: 0,  # 'ย'
+        2: 2,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 2,  # 'ล'
+        12: 1,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 1,  # 'ส'
+        21: 3,  # 'ห'
+        4: 1,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 3,  # 'เ'
+        28: 2,  # 'แ'
+        41: 1,  # 'โ'
+        29: 2,  # 'ใ'
+        33: 2,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    23: {  # 'ิ'
+        5: 3,  # 'ก'
+        30: 1,  # 'ข'
+        24: 2,  # 'ค'
+        8: 3,  # 'ง'
+        26: 3,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 3,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 2,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 3,  # 'ด'
+        19: 3,  # 'ต'
+        44: 1,  # 'ถ'
+        14: 3,  # 'ท'
+        48: 3,  # 'ธ'
+        3: 3,  # 'น'
+        17: 3,  # 'บ'
+        25: 2,  # 'ป'
+        39: 2,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 3,  # 'พ'
+        54: 1,  # 'ฟ'
+        45: 2,  # 'ภ'
+        9: 3,  # 'ม'
+        16: 2,  # 'ย'
+        2: 2,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 2,  # 'ล'
+        12: 3,  # 'ว'
+        42: 3,  # 'ศ'
+        46: 2,  # 'ษ'
+        18: 2,  # 'ส'
+        21: 3,  # 'ห'
+        4: 1,  # 'อ'
+        63: 1,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 3,  # 'เ'
+        28: 1,  # 'แ'
+        41: 1,  # 'โ'
+        29: 1,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 3,  # '่'
+        7: 2,  # '้'
+        38: 2,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    13: {  # 'ี'
+        5: 3,  # 'ก'
+        30: 2,  # 'ข'
+        24: 2,  # 'ค'
+        8: 0,  # 'ง'
+        26: 1,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 1,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 2,  # 'ด'
+        19: 1,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 2,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 1,  # 'น'
+        17: 2,  # 'บ'
+        25: 2,  # 'ป'
+        39: 1,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 2,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 2,  # 'ม'
+        16: 3,  # 'ย'
+        2: 2,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 1,  # 'ล'
+        12: 2,  # 'ว'
+        42: 1,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 2,  # 'ส'
+        21: 1,  # 'ห'
+        4: 2,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 2,  # 'เ'
+        28: 2,  # 'แ'
+        41: 1,  # 'โ'
+        29: 1,  # 'ใ'
+        33: 1,  # 'ไ'
+        50: 1,  # 'ๆ'
+        37: 0,  # '็'
+        6: 3,  # '่'
+        7: 3,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    40: {  # 'ึ'
+        5: 3,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 3,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 1,  # 'ด'
+        19: 0,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 0,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 0,  # 'น'
+        17: 0,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 1,  # 'ม'
+        16: 0,  # 'ย'
+        2: 0,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 0,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 0,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 3,  # '่'
+        7: 3,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    27: {  # 'ื'
+        5: 0,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 0,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 1,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 1,  # 'ด'
+        19: 0,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 0,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 2,  # 'น'
+        17: 3,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 2,  # 'ม'
+        16: 0,  # 'ย'
+        2: 0,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 0,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 0,  # 'ห'
+        4: 3,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 3,  # '่'
+        7: 3,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    32: {  # 'ุ'
+        5: 3,  # 'ก'
+        30: 2,  # 'ข'
+        24: 3,  # 'ค'
+        8: 3,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 2,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 1,  # 'ฒ'
+        43: 3,  # 'ณ'
+        20: 3,  # 'ด'
+        19: 3,  # 'ต'
+        44: 1,  # 'ถ'
+        14: 2,  # 'ท'
+        48: 1,  # 'ธ'
+        3: 2,  # 'น'
+        17: 2,  # 'บ'
+        25: 2,  # 'ป'
+        39: 2,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 1,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 1,  # 'ภ'
+        9: 3,  # 'ม'
+        16: 1,  # 'ย'
+        2: 2,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 2,  # 'ล'
+        12: 1,  # 'ว'
+        42: 1,  # 'ศ'
+        46: 2,  # 'ษ'
+        18: 1,  # 'ส'
+        21: 1,  # 'ห'
+        4: 1,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 1,  # 'เ'
+        28: 0,  # 'แ'
+        41: 1,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 1,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 3,  # '่'
+        7: 2,  # '้'
+        38: 1,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    35: {  # 'ู'
+        5: 3,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 2,  # 'ง'
+        26: 1,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 2,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 1,  # 'ณ'
+        20: 2,  # 'ด'
+        19: 2,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 1,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 2,  # 'น'
+        17: 0,  # 'บ'
+        25: 3,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 2,  # 'ม'
+        16: 0,  # 'ย'
+        2: 1,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 3,  # 'ล'
+        12: 1,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 0,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 1,  # 'เ'
+        28: 1,  # 'แ'
+        41: 1,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 3,  # '่'
+        7: 3,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    11: {  # 'เ'
+        5: 3,  # 'ก'
+        30: 3,  # 'ข'
+        24: 3,  # 'ค'
+        8: 2,  # 'ง'
+        26: 3,  # 'จ'
+        52: 3,  # 'ฉ'
+        34: 3,  # 'ช'
+        51: 2,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 1,  # 'ณ'
+        20: 3,  # 'ด'
+        19: 3,  # 'ต'
+        44: 1,  # 'ถ'
+        14: 3,  # 'ท'
+        48: 1,  # 'ธ'
+        3: 3,  # 'น'
+        17: 3,  # 'บ'
+        25: 3,  # 'ป'
+        39: 2,  # 'ผ'
+        62: 1,  # 'ฝ'
+        31: 3,  # 'พ'
+        54: 1,  # 'ฟ'
+        45: 3,  # 'ภ'
+        9: 3,  # 'ม'
+        16: 2,  # 'ย'
+        2: 3,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 3,  # 'ล'
+        12: 3,  # 'ว'
+        42: 2,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 3,  # 'ส'
+        21: 3,  # 'ห'
+        4: 3,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    28: {  # 'แ'
+        5: 3,  # 'ก'
+        30: 2,  # 'ข'
+        24: 2,  # 'ค'
+        8: 1,  # 'ง'
+        26: 2,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 1,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 2,  # 'ด'
+        19: 3,  # 'ต'
+        44: 2,  # 'ถ'
+        14: 3,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 3,  # 'น'
+        17: 3,  # 'บ'
+        25: 2,  # 'ป'
+        39: 3,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 2,  # 'พ'
+        54: 2,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 2,  # 'ม'
+        16: 2,  # 'ย'
+        2: 2,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 3,  # 'ล'
+        12: 2,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 3,  # 'ส'
+        21: 3,  # 'ห'
+        4: 1,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    41: {  # 'โ'
+        5: 2,  # 'ก'
+        30: 1,  # 'ข'
+        24: 2,  # 'ค'
+        8: 0,  # 'ง'
+        26: 1,  # 'จ'
+        52: 1,  # 'ฉ'
+        34: 1,  # 'ช'
+        51: 1,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 3,  # 'ด'
+        19: 2,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 2,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 3,  # 'น'
+        17: 1,  # 'บ'
+        25: 3,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 1,  # 'พ'
+        54: 1,  # 'ฟ'
+        45: 1,  # 'ภ'
+        9: 1,  # 'ม'
+        16: 2,  # 'ย'
+        2: 2,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 3,  # 'ล'
+        12: 0,  # 'ว'
+        42: 1,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 2,  # 'ส'
+        21: 0,  # 'ห'
+        4: 2,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    29: {  # 'ใ'
+        5: 2,  # 'ก'
+        30: 0,  # 'ข'
+        24: 1,  # 'ค'
+        8: 0,  # 'ง'
+        26: 3,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 3,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 3,  # 'ด'
+        19: 1,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 0,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 3,  # 'น'
+        17: 2,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 0,  # 'ม'
+        16: 1,  # 'ย'
+        2: 0,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 0,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 3,  # 'ส'
+        21: 3,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    33: {  # 'ไ'
+        5: 1,  # 'ก'
+        30: 2,  # 'ข'
+        24: 0,  # 'ค'
+        8: 0,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 1,  # 'ช'
+        51: 1,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 3,  # 'ด'
+        19: 1,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 3,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 0,  # 'น'
+        17: 1,  # 'บ'
+        25: 3,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 2,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 3,  # 'ม'
+        16: 0,  # 'ย'
+        2: 3,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 1,  # 'ล'
+        12: 3,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 1,  # 'ส'
+        21: 2,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    50: {  # 'ๆ'
+        5: 0,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 0,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 0,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 0,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 0,  # 'น'
+        17: 0,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 0,  # 'ม'
+        16: 0,  # 'ย'
+        2: 0,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 0,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 0,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    37: {  # '็'
+        5: 2,  # 'ก'
+        30: 1,  # 'ข'
+        24: 2,  # 'ค'
+        8: 2,  # 'ง'
+        26: 3,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 1,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 1,  # 'ด'
+        19: 2,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 1,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 3,  # 'น'
+        17: 3,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 2,  # 'ม'
+        16: 1,  # 'ย'
+        2: 0,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 2,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 1,  # 'ส'
+        21: 0,  # 'ห'
+        4: 1,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 1,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 1,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    6: {  # '่'
+        5: 2,  # 'ก'
+        30: 1,  # 'ข'
+        24: 2,  # 'ค'
+        8: 3,  # 'ง'
+        26: 2,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 1,  # 'ช'
+        51: 1,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 1,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 1,  # 'ด'
+        19: 2,  # 'ต'
+        44: 1,  # 'ถ'
+        14: 2,  # 'ท'
+        48: 1,  # 'ธ'
+        3: 3,  # 'น'
+        17: 1,  # 'บ'
+        25: 2,  # 'ป'
+        39: 2,  # 'ผ'
+        62: 1,  # 'ฝ'
+        31: 1,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 3,  # 'ม'
+        16: 3,  # 'ย'
+        2: 2,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 2,  # 'ล'
+        12: 3,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 2,  # 'ส'
+        21: 1,  # 'ห'
+        4: 3,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 1,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 3,  # 'า'
+        36: 2,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 3,  # 'เ'
+        28: 2,  # 'แ'
+        41: 1,  # 'โ'
+        29: 2,  # 'ใ'
+        33: 2,  # 'ไ'
+        50: 1,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    7: {  # '้'
+        5: 2,  # 'ก'
+        30: 1,  # 'ข'
+        24: 2,  # 'ค'
+        8: 3,  # 'ง'
+        26: 2,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 1,  # 'ช'
+        51: 1,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 1,  # 'ด'
+        19: 2,  # 'ต'
+        44: 1,  # 'ถ'
+        14: 2,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 3,  # 'น'
+        17: 2,  # 'บ'
+        25: 2,  # 'ป'
+        39: 2,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 1,  # 'พ'
+        54: 1,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 3,  # 'ม'
+        16: 2,  # 'ย'
+        2: 2,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 1,  # 'ล'
+        12: 3,  # 'ว'
+        42: 1,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 2,  # 'ส'
+        21: 2,  # 'ห'
+        4: 3,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 3,  # 'า'
+        36: 2,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 2,  # 'เ'
+        28: 2,  # 'แ'
+        41: 1,  # 'โ'
+        29: 2,  # 'ใ'
+        33: 2,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    38: {  # '์'
+        5: 2,  # 'ก'
+        30: 1,  # 'ข'
+        24: 1,  # 'ค'
+        8: 0,  # 'ง'
+        26: 1,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 1,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 2,  # 'ด'
+        19: 1,  # 'ต'
+        44: 1,  # 'ถ'
+        14: 1,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 1,  # 'น'
+        17: 1,  # 'บ'
+        25: 1,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 1,  # 'พ'
+        54: 1,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 2,  # 'ม'
+        16: 0,  # 'ย'
+        2: 1,  # 'ร'
+        61: 1,  # 'ฤ'
+        15: 1,  # 'ล'
+        12: 1,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 1,  # 'ส'
+        21: 1,  # 'ห'
+        4: 2,  # 'อ'
+        63: 1,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 2,  # 'เ'
+        28: 2,  # 'แ'
+        41: 1,  # 'โ'
+        29: 1,  # 'ใ'
+        33: 1,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 0,  # '๑'
+        59: 0,  # '๒'
+        60: 0,  # '๕'
+    },
+    56: {  # '๑'
+        5: 0,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 0,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 0,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 0,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 0,  # 'น'
+        17: 0,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 0,  # 'ม'
+        16: 0,  # 'ย'
+        2: 0,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 0,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 0,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 2,  # '๑'
+        59: 1,  # '๒'
+        60: 1,  # '๕'
+    },
+    59: {  # '๒'
+        5: 0,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 0,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 0,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 0,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 0,  # 'น'
+        17: 0,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 0,  # 'ม'
+        16: 0,  # 'ย'
+        2: 0,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 0,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 0,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 1,  # '๑'
+        59: 1,  # '๒'
+        60: 3,  # '๕'
+    },
+    60: {  # '๕'
+        5: 0,  # 'ก'
+        30: 0,  # 'ข'
+        24: 0,  # 'ค'
+        8: 0,  # 'ง'
+        26: 0,  # 'จ'
+        52: 0,  # 'ฉ'
+        34: 0,  # 'ช'
+        51: 0,  # 'ซ'
+        47: 0,  # 'ญ'
+        58: 0,  # 'ฎ'
+        57: 0,  # 'ฏ'
+        49: 0,  # 'ฐ'
+        53: 0,  # 'ฑ'
+        55: 0,  # 'ฒ'
+        43: 0,  # 'ณ'
+        20: 0,  # 'ด'
+        19: 0,  # 'ต'
+        44: 0,  # 'ถ'
+        14: 0,  # 'ท'
+        48: 0,  # 'ธ'
+        3: 0,  # 'น'
+        17: 0,  # 'บ'
+        25: 0,  # 'ป'
+        39: 0,  # 'ผ'
+        62: 0,  # 'ฝ'
+        31: 0,  # 'พ'
+        54: 0,  # 'ฟ'
+        45: 0,  # 'ภ'
+        9: 0,  # 'ม'
+        16: 0,  # 'ย'
+        2: 0,  # 'ร'
+        61: 0,  # 'ฤ'
+        15: 0,  # 'ล'
+        12: 0,  # 'ว'
+        42: 0,  # 'ศ'
+        46: 0,  # 'ษ'
+        18: 0,  # 'ส'
+        21: 0,  # 'ห'
+        4: 0,  # 'อ'
+        63: 0,  # 'ฯ'
+        22: 0,  # 'ะ'
+        10: 0,  # 'ั'
+        1: 0,  # 'า'
+        36: 0,  # 'ำ'
+        23: 0,  # 'ิ'
+        13: 0,  # 'ี'
+        40: 0,  # 'ึ'
+        27: 0,  # 'ื'
+        32: 0,  # 'ุ'
+        35: 0,  # 'ู'
+        11: 0,  # 'เ'
+        28: 0,  # 'แ'
+        41: 0,  # 'โ'
+        29: 0,  # 'ใ'
+        33: 0,  # 'ไ'
+        50: 0,  # 'ๆ'
+        37: 0,  # '็'
+        6: 0,  # '่'
+        7: 0,  # '้'
+        38: 0,  # '์'
+        56: 2,  # '๑'
+        59: 1,  # '๒'
+        60: 0,  # '๕'
+    },
+}
+
+# 255: Undefined characters that did not exist in training text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+# 251: Control characters
+
+# Character Mapping Table(s):
+TIS_620_THAI_CHAR_TO_ORDER = {
+    0: 255,  # '\x00'
+    1: 255,  # '\x01'
+    2: 255,  # '\x02'
+    3: 255,  # '\x03'
+    4: 255,  # '\x04'
+    5: 255,  # '\x05'
+    6: 255,  # '\x06'
+    7: 255,  # '\x07'
+    8: 255,  # '\x08'
+    9: 255,  # '\t'
+    10: 254,  # '\n'
+    11: 255,  # '\x0b'
+    12: 255,  # '\x0c'
+    13: 254,  # '\r'
+    14: 255,  # '\x0e'
+    15: 255,  # '\x0f'
+    16: 255,  # '\x10'
+    17: 255,  # '\x11'
+    18: 255,  # '\x12'
+    19: 255,  # '\x13'
+    20: 255,  # '\x14'
+    21: 255,  # '\x15'
+    22: 255,  # '\x16'
+    23: 255,  # '\x17'
+    24: 255,  # '\x18'
+    25: 255,  # '\x19'
+    26: 255,  # '\x1a'
+    27: 255,  # '\x1b'
+    28: 255,  # '\x1c'
+    29: 255,  # '\x1d'
+    30: 255,  # '\x1e'
+    31: 255,  # '\x1f'
+    32: 253,  # ' '
+    33: 253,  # '!'
+    34: 253,  # '"'
+    35: 253,  # '#'
+    36: 253,  # '$'
+    37: 253,  # '%'
+    38: 253,  # '&'
+    39: 253,  # "'"
+    40: 253,  # '('
+    41: 253,  # ')'
+    42: 253,  # '*'
+    43: 253,  # '+'
+    44: 253,  # ','
+    45: 253,  # '-'
+    46: 253,  # '.'
+    47: 253,  # '/'
+    48: 252,  # '0'
+    49: 252,  # '1'
+    50: 252,  # '2'
+    51: 252,  # '3'
+    52: 252,  # '4'
+    53: 252,  # '5'
+    54: 252,  # '6'
+    55: 252,  # '7'
+    56: 252,  # '8'
+    57: 252,  # '9'
+    58: 253,  # ':'
+    59: 253,  # ';'
+    60: 253,  # '<'
+    61: 253,  # '='
+    62: 253,  # '>'
+    63: 253,  # '?'
+    64: 253,  # '@'
+    65: 182,  # 'A'
+    66: 106,  # 'B'
+    67: 107,  # 'C'
+    68: 100,  # 'D'
+    69: 183,  # 'E'
+    70: 184,  # 'F'
+    71: 185,  # 'G'
+    72: 101,  # 'H'
+    73: 94,  # 'I'
+    74: 186,  # 'J'
+    75: 187,  # 'K'
+    76: 108,  # 'L'
+    77: 109,  # 'M'
+    78: 110,  # 'N'
+    79: 111,  # 'O'
+    80: 188,  # 'P'
+    81: 189,  # 'Q'
+    82: 190,  # 'R'
+    83: 89,  # 'S'
+    84: 95,  # 'T'
+    85: 112,  # 'U'
+    86: 113,  # 'V'
+    87: 191,  # 'W'
+    88: 192,  # 'X'
+    89: 193,  # 'Y'
+    90: 194,  # 'Z'
+    91: 253,  # '['
+    92: 253,  # '\\'
+    93: 253,  # ']'
+    94: 253,  # '^'
+    95: 253,  # '_'
+    96: 253,  # '`'
+    97: 64,  # 'a'
+    98: 72,  # 'b'
+    99: 73,  # 'c'
+    100: 114,  # 'd'
+    101: 74,  # 'e'
+    102: 115,  # 'f'
+    103: 116,  # 'g'
+    104: 102,  # 'h'
+    105: 81,  # 'i'
+    106: 201,  # 'j'
+    107: 117,  # 'k'
+    108: 90,  # 'l'
+    109: 103,  # 'm'
+    110: 78,  # 'n'
+    111: 82,  # 'o'
+    112: 96,  # 'p'
+    113: 202,  # 'q'
+    114: 91,  # 'r'
+    115: 79,  # 's'
+    116: 84,  # 't'
+    117: 104,  # 'u'
+    118: 105,  # 'v'
+    119: 97,  # 'w'
+    120: 98,  # 'x'
+    121: 92,  # 'y'
+    122: 203,  # 'z'
+    123: 253,  # '{'
+    124: 253,  # '|'
+    125: 253,  # '}'
+    126: 253,  # '~'
+    127: 253,  # '\x7f'
+    128: 209,  # '\x80'
+    129: 210,  # '\x81'
+    130: 211,  # '\x82'
+    131: 212,  # '\x83'
+    132: 213,  # '\x84'
+    133: 88,  # '\x85'
+    134: 214,  # '\x86'
+    135: 215,  # '\x87'
+    136: 216,  # '\x88'
+    137: 217,  # '\x89'
+    138: 218,  # '\x8a'
+    139: 219,  # '\x8b'
+    140: 220,  # '\x8c'
+    141: 118,  # '\x8d'
+    142: 221,  # '\x8e'
+    143: 222,  # '\x8f'
+    144: 223,  # '\x90'
+    145: 224,  # '\x91'
+    146: 99,  # '\x92'
+    147: 85,  # '\x93'
+    148: 83,  # '\x94'
+    149: 225,  # '\x95'
+    150: 226,  # '\x96'
+    151: 227,  # '\x97'
+    152: 228,  # '\x98'
+    153: 229,  # '\x99'
+    154: 230,  # '\x9a'
+    155: 231,  # '\x9b'
+    156: 232,  # '\x9c'
+    157: 233,  # '\x9d'
+    158: 234,  # '\x9e'
+    159: 235,  # '\x9f'
+    160: 236,  # None
+    161: 5,  # 'ก'
+    162: 30,  # 'ข'
+    163: 237,  # 'ฃ'
+    164: 24,  # 'ค'
+    165: 238,  # 'ฅ'
+    166: 75,  # 'ฆ'
+    167: 8,  # 'ง'
+    168: 26,  # 'จ'
+    169: 52,  # 'ฉ'
+    170: 34,  # 'ช'
+    171: 51,  # 'ซ'
+    172: 119,  # 'ฌ'
+    173: 47,  # 'ญ'
+    174: 58,  # 'ฎ'
+    175: 57,  # 'ฏ'
+    176: 49,  # 'ฐ'
+    177: 53,  # 'ฑ'
+    178: 55,  # 'ฒ'
+    179: 43,  # 'ณ'
+    180: 20,  # 'ด'
+    181: 19,  # 'ต'
+    182: 44,  # 'ถ'
+    183: 14,  # 'ท'
+    184: 48,  # 'ธ'
+    185: 3,  # 'น'
+    186: 17,  # 'บ'
+    187: 25,  # 'ป'
+    188: 39,  # 'ผ'
+    189: 62,  # 'ฝ'
+    190: 31,  # 'พ'
+    191: 54,  # 'ฟ'
+    192: 45,  # 'ภ'
+    193: 9,  # 'ม'
+    194: 16,  # 'ย'
+    195: 2,  # 'ร'
+    196: 61,  # 'ฤ'
+    197: 15,  # 'ล'
+    198: 239,  # 'ฦ'
+    199: 12,  # 'ว'
+    200: 42,  # 'ศ'
+    201: 46,  # 'ษ'
+    202: 18,  # 'ส'
+    203: 21,  # 'ห'
+    204: 76,  # 'ฬ'
+    205: 4,  # 'อ'
+    206: 66,  # 'ฮ'
+    207: 63,  # 'ฯ'
+    208: 22,  # 'ะ'
+    209: 10,  # 'ั'
+    210: 1,  # 'า'
+    211: 36,  # 'ำ'
+    212: 23,  # 'ิ'
+    213: 13,  # 'ี'
+    214: 40,  # 'ึ'
+    215: 27,  # 'ื'
+    216: 32,  # 'ุ'
+    217: 35,  # 'ู'
+    218: 86,  # 'ฺ'
+    219: 240,  # None
+    220: 241,  # None
+    221: 242,  # None
+    222: 243,  # None
+    223: 244,  # '฿'
+    224: 11,  # 'เ'
+    225: 28,  # 'แ'
+    226: 41,  # 'โ'
+    227: 29,  # 'ใ'
+    228: 33,  # 'ไ'
+    229: 245,  # 'ๅ'
+    230: 50,  # 'ๆ'
+    231: 37,  # '็'
+    232: 6,  # '่'
+    233: 7,  # '้'
+    234: 67,  # '๊'
+    235: 77,  # '๋'
+    236: 38,  # '์'
+    237: 93,  # 'ํ'
+    238: 246,  # '๎'
+    239: 247,  # '๏'
+    240: 68,  # '๐'
+    241: 56,  # '๑'
+    242: 59,  # '๒'
+    243: 65,  # '๓'
+    244: 69,  # '๔'
+    245: 60,  # '๕'
+    246: 70,  # '๖'
+    247: 80,  # '๗'
+    248: 71,  # '๘'
+    249: 87,  # '๙'
+    250: 248,  # '๚'
+    251: 249,  # '๛'
+    252: 250,  # None
+    253: 251,  # None
+    254: 252,  # None
+    255: 253,  # None
+}
+
+TIS_620_THAI_MODEL = SingleByteCharSetModel(
+    charset_name="TIS-620",
+    language="Thai",
+    char_to_order_map=TIS_620_THAI_CHAR_TO_ORDER,
+    language_model=THAI_LANG_MODEL,
+    typical_positive_ratio=0.926386,
+    keep_ascii_letters=False,
+    alphabet="กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛",
+)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/langturkishmodel.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/langturkishmodel.py
new file mode 100644
index 0000000..291857c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/langturkishmodel.py
@@ -0,0 +1,4380 @@
+from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel
+
+# 3: Positive
+# 2: Likely
+# 1: Unlikely
+# 0: Negative
+
+TURKISH_LANG_MODEL = {
+    23: {  # 'A'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 0,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 0,  # 'b'
+        28: 0,  # 'c'
+        12: 2,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 1,  # 'g'
+        25: 1,  # 'h'
+        3: 1,  # 'i'
+        24: 0,  # 'j'
+        10: 2,  # 'k'
+        5: 1,  # 'l'
+        13: 1,  # 'm'
+        4: 1,  # 'n'
+        15: 0,  # 'o'
+        26: 0,  # 'p'
+        7: 1,  # 'r'
+        8: 1,  # 's'
+        9: 1,  # 't'
+        14: 1,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 3,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 1,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 0,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 0,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    37: {  # 'B'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 2,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 2,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 1,  # 'K'
+        49: 0,  # 'L'
+        20: 0,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 1,  # 'P'
+        44: 0,  # 'R'
+        35: 1,  # 'S'
+        31: 0,  # 'T'
+        51: 0,  # 'U'
+        38: 1,  # 'V'
+        62: 0,  # 'W'
+        43: 1,  # 'Y'
+        56: 0,  # 'Z'
+        1: 2,  # 'a'
+        21: 0,  # 'b'
+        28: 2,  # 'c'
+        12: 0,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 0,  # 'i'
+        24: 0,  # 'j'
+        10: 0,  # 'k'
+        5: 0,  # 'l'
+        13: 1,  # 'm'
+        4: 1,  # 'n'
+        15: 0,  # 'o'
+        26: 0,  # 'p'
+        7: 0,  # 'r'
+        8: 0,  # 's'
+        9: 0,  # 't'
+        14: 2,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 1,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 1,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 1,  # 'ö'
+        17: 0,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 0,  # 'ı'
+        40: 1,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    47: {  # 'C'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 1,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 0,  # 'K'
+        49: 1,  # 'L'
+        20: 0,  # 'M'
+        46: 1,  # 'N'
+        42: 0,  # 'O'
+        48: 1,  # 'P'
+        44: 1,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 0,  # 'U'
+        38: 1,  # 'V'
+        62: 0,  # 'W'
+        43: 1,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 0,  # 'b'
+        28: 2,  # 'c'
+        12: 0,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 0,  # 'i'
+        24: 2,  # 'j'
+        10: 1,  # 'k'
+        5: 2,  # 'l'
+        13: 2,  # 'm'
+        4: 2,  # 'n'
+        15: 1,  # 'o'
+        26: 0,  # 'p'
+        7: 2,  # 'r'
+        8: 0,  # 's'
+        9: 0,  # 't'
+        14: 3,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 2,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 1,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 1,  # 'ç'
+        61: 0,  # 'î'
+        34: 1,  # 'ö'
+        17: 0,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 1,  # 'İ'
+        6: 3,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    39: {  # 'D'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 1,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 1,  # 'K'
+        49: 0,  # 'L'
+        20: 0,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 1,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 2,  # 'a'
+        21: 0,  # 'b'
+        28: 2,  # 'c'
+        12: 0,  # 'd'
+        2: 2,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 0,  # 'i'
+        24: 0,  # 'j'
+        10: 0,  # 'k'
+        5: 1,  # 'l'
+        13: 3,  # 'm'
+        4: 0,  # 'n'
+        15: 1,  # 'o'
+        26: 0,  # 'p'
+        7: 0,  # 'r'
+        8: 0,  # 's'
+        9: 0,  # 't'
+        14: 1,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 1,  # 'z'
+        63: 0,  # '·'
+        54: 1,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 1,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 0,  # 'ü'
+        30: 1,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 1,  # 'ı'
+        40: 1,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    29: {  # 'E'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 1,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 3,  # 'K'
+        49: 0,  # 'L'
+        20: 1,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 0,  # 'b'
+        28: 0,  # 'c'
+        12: 2,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 1,  # 'g'
+        25: 0,  # 'h'
+        3: 1,  # 'i'
+        24: 1,  # 'j'
+        10: 0,  # 'k'
+        5: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        15: 0,  # 'o'
+        26: 0,  # 'p'
+        7: 0,  # 'r'
+        8: 1,  # 's'
+        9: 1,  # 't'
+        14: 1,  # 'u'
+        32: 1,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 2,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 0,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 3,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    52: {  # 'F'
+        23: 0,  # 'A'
+        37: 1,  # 'B'
+        47: 1,  # 'C'
+        39: 1,  # 'D'
+        29: 1,  # 'E'
+        52: 2,  # 'F'
+        36: 0,  # 'G'
+        45: 2,  # 'H'
+        53: 1,  # 'I'
+        60: 0,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 1,  # 'M'
+        46: 1,  # 'N'
+        42: 1,  # 'O'
+        48: 2,  # 'P'
+        44: 1,  # 'R'
+        35: 1,  # 'S'
+        31: 1,  # 'T'
+        51: 1,  # 'U'
+        38: 1,  # 'V'
+        62: 0,  # 'W'
+        43: 2,  # 'Y'
+        56: 0,  # 'Z'
+        1: 0,  # 'a'
+        21: 1,  # 'b'
+        28: 1,  # 'c'
+        12: 1,  # 'd'
+        2: 0,  # 'e'
+        18: 1,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 2,  # 'i'
+        24: 1,  # 'j'
+        10: 0,  # 'k'
+        5: 0,  # 'l'
+        13: 1,  # 'm'
+        4: 2,  # 'n'
+        15: 1,  # 'o'
+        26: 0,  # 'p'
+        7: 2,  # 'r'
+        8: 1,  # 's'
+        9: 1,  # 't'
+        14: 1,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 1,  # 'y'
+        22: 1,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 1,  # 'Ö'
+        55: 2,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 2,  # 'ö'
+        17: 0,  # 'ü'
+        30: 1,  # 'ğ'
+        41: 1,  # 'İ'
+        6: 2,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 2,  # 'ş'
+    },
+    36: {  # 'G'
+        23: 1,  # 'A'
+        37: 0,  # 'B'
+        47: 1,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 1,  # 'F'
+        36: 2,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 2,  # 'K'
+        49: 0,  # 'L'
+        20: 0,  # 'M'
+        46: 2,  # 'N'
+        42: 1,  # 'O'
+        48: 1,  # 'P'
+        44: 1,  # 'R'
+        35: 1,  # 'S'
+        31: 0,  # 'T'
+        51: 1,  # 'U'
+        38: 2,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 0,  # 'b'
+        28: 1,  # 'c'
+        12: 0,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 0,  # 'i'
+        24: 1,  # 'j'
+        10: 1,  # 'k'
+        5: 0,  # 'l'
+        13: 3,  # 'm'
+        4: 2,  # 'n'
+        15: 0,  # 'o'
+        26: 1,  # 'p'
+        7: 0,  # 'r'
+        8: 1,  # 's'
+        9: 1,  # 't'
+        14: 3,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 1,  # 'x'
+        11: 0,  # 'y'
+        22: 2,  # 'z'
+        63: 0,  # '·'
+        54: 1,  # 'Ç'
+        50: 2,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 1,  # 'â'
+        33: 2,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 0,  # 'ü'
+        30: 1,  # 'ğ'
+        41: 1,  # 'İ'
+        6: 2,  # 'ı'
+        40: 2,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    45: {  # 'H'
+        23: 0,  # 'A'
+        37: 1,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 2,  # 'F'
+        36: 2,  # 'G'
+        45: 1,  # 'H'
+        53: 1,  # 'I'
+        60: 0,  # 'J'
+        16: 2,  # 'K'
+        49: 1,  # 'L'
+        20: 0,  # 'M'
+        46: 1,  # 'N'
+        42: 1,  # 'O'
+        48: 1,  # 'P'
+        44: 0,  # 'R'
+        35: 2,  # 'S'
+        31: 0,  # 'T'
+        51: 1,  # 'U'
+        38: 2,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 0,  # 'b'
+        28: 2,  # 'c'
+        12: 0,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 2,  # 'i'
+        24: 0,  # 'j'
+        10: 1,  # 'k'
+        5: 0,  # 'l'
+        13: 2,  # 'm'
+        4: 0,  # 'n'
+        15: 1,  # 'o'
+        26: 1,  # 'p'
+        7: 1,  # 'r'
+        8: 0,  # 's'
+        9: 0,  # 't'
+        14: 3,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 2,  # 'z'
+        63: 0,  # '·'
+        54: 1,  # 'Ç'
+        50: 1,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 1,  # 'ç'
+        61: 0,  # 'î'
+        34: 1,  # 'ö'
+        17: 0,  # 'ü'
+        30: 2,  # 'ğ'
+        41: 1,  # 'İ'
+        6: 0,  # 'ı'
+        40: 2,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    53: {  # 'I'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 1,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 2,  # 'K'
+        49: 0,  # 'L'
+        20: 0,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 1,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 2,  # 'a'
+        21: 0,  # 'b'
+        28: 2,  # 'c'
+        12: 0,  # 'd'
+        2: 2,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 0,  # 'i'
+        24: 0,  # 'j'
+        10: 0,  # 'k'
+        5: 2,  # 'l'
+        13: 2,  # 'm'
+        4: 0,  # 'n'
+        15: 0,  # 'o'
+        26: 0,  # 'p'
+        7: 0,  # 'r'
+        8: 0,  # 's'
+        9: 0,  # 't'
+        14: 2,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 2,  # 'z'
+        63: 0,  # '·'
+        54: 1,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 2,  # 'ç'
+        61: 0,  # 'î'
+        34: 1,  # 'ö'
+        17: 0,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 0,  # 'ı'
+        40: 1,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    60: {  # 'J'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 1,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 0,  # 'a'
+        21: 1,  # 'b'
+        28: 0,  # 'c'
+        12: 1,  # 'd'
+        2: 0,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 1,  # 'i'
+        24: 0,  # 'j'
+        10: 0,  # 'k'
+        5: 0,  # 'l'
+        13: 0,  # 'm'
+        4: 1,  # 'n'
+        15: 0,  # 'o'
+        26: 0,  # 'p'
+        7: 0,  # 'r'
+        8: 1,  # 's'
+        9: 0,  # 't'
+        14: 0,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 0,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 0,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    16: {  # 'K'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 3,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 2,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 2,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 2,  # 'a'
+        21: 3,  # 'b'
+        28: 0,  # 'c'
+        12: 3,  # 'd'
+        2: 1,  # 'e'
+        18: 3,  # 'f'
+        27: 3,  # 'g'
+        25: 3,  # 'h'
+        3: 3,  # 'i'
+        24: 2,  # 'j'
+        10: 3,  # 'k'
+        5: 0,  # 'l'
+        13: 0,  # 'm'
+        4: 3,  # 'n'
+        15: 0,  # 'o'
+        26: 1,  # 'p'
+        7: 3,  # 'r'
+        8: 3,  # 's'
+        9: 3,  # 't'
+        14: 0,  # 'u'
+        32: 3,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 2,  # 'y'
+        22: 1,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 2,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 1,  # 'İ'
+        6: 3,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    49: {  # 'L'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 2,  # 'E'
+        52: 0,  # 'F'
+        36: 1,  # 'G'
+        45: 1,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 1,  # 'M'
+        46: 0,  # 'N'
+        42: 2,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 1,  # 'Y'
+        56: 0,  # 'Z'
+        1: 0,  # 'a'
+        21: 3,  # 'b'
+        28: 0,  # 'c'
+        12: 2,  # 'd'
+        2: 0,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 2,  # 'i'
+        24: 0,  # 'j'
+        10: 1,  # 'k'
+        5: 0,  # 'l'
+        13: 0,  # 'm'
+        4: 2,  # 'n'
+        15: 1,  # 'o'
+        26: 1,  # 'p'
+        7: 1,  # 'r'
+        8: 1,  # 's'
+        9: 1,  # 't'
+        14: 0,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 2,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 2,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 1,  # 'ö'
+        17: 1,  # 'ü'
+        30: 1,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 2,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    20: {  # 'M'
+        23: 1,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 1,  # 'J'
+        16: 3,  # 'K'
+        49: 0,  # 'L'
+        20: 2,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 1,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 2,  # 'b'
+        28: 0,  # 'c'
+        12: 3,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 1,  # 'g'
+        25: 1,  # 'h'
+        3: 2,  # 'i'
+        24: 2,  # 'j'
+        10: 2,  # 'k'
+        5: 2,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        15: 0,  # 'o'
+        26: 1,  # 'p'
+        7: 3,  # 'r'
+        8: 0,  # 's'
+        9: 2,  # 't'
+        14: 3,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 2,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 3,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 0,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 3,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    46: {  # 'N'
+        23: 0,  # 'A'
+        37: 1,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 1,  # 'F'
+        36: 1,  # 'G'
+        45: 1,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 2,  # 'K'
+        49: 0,  # 'L'
+        20: 0,  # 'M'
+        46: 1,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 1,  # 'R'
+        35: 1,  # 'S'
+        31: 0,  # 'T'
+        51: 1,  # 'U'
+        38: 2,  # 'V'
+        62: 0,  # 'W'
+        43: 1,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 0,  # 'b'
+        28: 2,  # 'c'
+        12: 0,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 1,  # 'g'
+        25: 0,  # 'h'
+        3: 0,  # 'i'
+        24: 2,  # 'j'
+        10: 1,  # 'k'
+        5: 1,  # 'l'
+        13: 3,  # 'm'
+        4: 2,  # 'n'
+        15: 1,  # 'o'
+        26: 1,  # 'p'
+        7: 1,  # 'r'
+        8: 0,  # 's'
+        9: 0,  # 't'
+        14: 3,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 1,  # 'x'
+        11: 1,  # 'y'
+        22: 2,  # 'z'
+        63: 0,  # '·'
+        54: 1,  # 'Ç'
+        50: 1,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 1,  # 'ö'
+        17: 0,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 1,  # 'İ'
+        6: 2,  # 'ı'
+        40: 1,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    42: {  # 'O'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 1,  # 'F'
+        36: 0,  # 'G'
+        45: 1,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 2,  # 'K'
+        49: 1,  # 'L'
+        20: 0,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 2,  # 'P'
+        44: 1,  # 'R'
+        35: 1,  # 'S'
+        31: 0,  # 'T'
+        51: 1,  # 'U'
+        38: 1,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 0,  # 'b'
+        28: 2,  # 'c'
+        12: 0,  # 'd'
+        2: 2,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 0,  # 'i'
+        24: 0,  # 'j'
+        10: 0,  # 'k'
+        5: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 0,  # 'n'
+        15: 1,  # 'o'
+        26: 0,  # 'p'
+        7: 0,  # 'r'
+        8: 0,  # 's'
+        9: 0,  # 't'
+        14: 2,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 2,  # 'z'
+        63: 0,  # '·'
+        54: 2,  # 'Ç'
+        50: 1,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 2,  # 'ç'
+        61: 0,  # 'î'
+        34: 1,  # 'ö'
+        17: 0,  # 'ü'
+        30: 1,  # 'ğ'
+        41: 2,  # 'İ'
+        6: 1,  # 'ı'
+        40: 1,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    48: {  # 'P'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 2,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 2,  # 'F'
+        36: 1,  # 'G'
+        45: 1,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 2,  # 'K'
+        49: 0,  # 'L'
+        20: 0,  # 'M'
+        46: 1,  # 'N'
+        42: 1,  # 'O'
+        48: 1,  # 'P'
+        44: 0,  # 'R'
+        35: 1,  # 'S'
+        31: 0,  # 'T'
+        51: 0,  # 'U'
+        38: 1,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 2,  # 'a'
+        21: 0,  # 'b'
+        28: 2,  # 'c'
+        12: 0,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 0,  # 'i'
+        24: 0,  # 'j'
+        10: 1,  # 'k'
+        5: 0,  # 'l'
+        13: 2,  # 'm'
+        4: 0,  # 'n'
+        15: 2,  # 'o'
+        26: 0,  # 'p'
+        7: 0,  # 'r'
+        8: 0,  # 's'
+        9: 0,  # 't'
+        14: 2,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 2,  # 'x'
+        11: 0,  # 'y'
+        22: 2,  # 'z'
+        63: 0,  # '·'
+        54: 1,  # 'Ç'
+        50: 2,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 2,  # 'ö'
+        17: 0,  # 'ü'
+        30: 1,  # 'ğ'
+        41: 1,  # 'İ'
+        6: 0,  # 'ı'
+        40: 2,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    44: {  # 'R'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 1,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 1,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 3,  # 'K'
+        49: 0,  # 'L'
+        20: 0,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 1,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 1,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 1,  # 'b'
+        28: 1,  # 'c'
+        12: 0,  # 'd'
+        2: 2,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 0,  # 'i'
+        24: 0,  # 'j'
+        10: 1,  # 'k'
+        5: 2,  # 'l'
+        13: 2,  # 'm'
+        4: 0,  # 'n'
+        15: 1,  # 'o'
+        26: 0,  # 'p'
+        7: 0,  # 'r'
+        8: 0,  # 's'
+        9: 0,  # 't'
+        14: 2,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 1,  # 'y'
+        22: 2,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 1,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 1,  # 'ç'
+        61: 0,  # 'î'
+        34: 1,  # 'ö'
+        17: 1,  # 'ü'
+        30: 1,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 2,  # 'ı'
+        40: 1,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    35: {  # 'S'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 1,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 1,  # 'F'
+        36: 1,  # 'G'
+        45: 1,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 3,  # 'K'
+        49: 1,  # 'L'
+        20: 1,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 1,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 1,  # 'U'
+        38: 1,  # 'V'
+        62: 0,  # 'W'
+        43: 1,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 0,  # 'b'
+        28: 2,  # 'c'
+        12: 0,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 0,  # 'i'
+        24: 0,  # 'j'
+        10: 1,  # 'k'
+        5: 1,  # 'l'
+        13: 2,  # 'm'
+        4: 1,  # 'n'
+        15: 0,  # 'o'
+        26: 0,  # 'p'
+        7: 0,  # 'r'
+        8: 0,  # 's'
+        9: 1,  # 't'
+        14: 2,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 1,  # 'z'
+        63: 0,  # '·'
+        54: 2,  # 'Ç'
+        50: 2,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 3,  # 'ç'
+        61: 0,  # 'î'
+        34: 1,  # 'ö'
+        17: 0,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 3,  # 'ı'
+        40: 2,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    31: {  # 'T'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 1,  # 'J'
+        16: 2,  # 'K'
+        49: 0,  # 'L'
+        20: 1,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 2,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 2,  # 'b'
+        28: 0,  # 'c'
+        12: 1,  # 'd'
+        2: 3,  # 'e'
+        18: 2,  # 'f'
+        27: 2,  # 'g'
+        25: 0,  # 'h'
+        3: 1,  # 'i'
+        24: 1,  # 'j'
+        10: 2,  # 'k'
+        5: 2,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        15: 0,  # 'o'
+        26: 2,  # 'p'
+        7: 2,  # 'r'
+        8: 0,  # 's'
+        9: 2,  # 't'
+        14: 2,  # 'u'
+        32: 1,  # 'v'
+        57: 1,  # 'w'
+        58: 1,  # 'x'
+        11: 2,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 1,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 3,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    51: {  # 'U'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 1,  # 'F'
+        36: 1,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 1,  # 'K'
+        49: 0,  # 'L'
+        20: 0,  # 'M'
+        46: 1,  # 'N'
+        42: 0,  # 'O'
+        48: 1,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 1,  # 'U'
+        38: 1,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 0,  # 'b'
+        28: 1,  # 'c'
+        12: 0,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 2,  # 'g'
+        25: 0,  # 'h'
+        3: 0,  # 'i'
+        24: 0,  # 'j'
+        10: 1,  # 'k'
+        5: 1,  # 'l'
+        13: 3,  # 'm'
+        4: 2,  # 'n'
+        15: 0,  # 'o'
+        26: 1,  # 'p'
+        7: 0,  # 'r'
+        8: 0,  # 's'
+        9: 0,  # 't'
+        14: 2,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 2,  # 'z'
+        63: 0,  # '·'
+        54: 1,  # 'Ç'
+        50: 1,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 0,  # 'ü'
+        30: 1,  # 'ğ'
+        41: 1,  # 'İ'
+        6: 2,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    38: {  # 'V'
+        23: 1,  # 'A'
+        37: 1,  # 'B'
+        47: 1,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 2,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 3,  # 'K'
+        49: 0,  # 'L'
+        20: 3,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 1,  # 'P'
+        44: 1,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 1,  # 'U'
+        38: 1,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 0,  # 'b'
+        28: 2,  # 'c'
+        12: 0,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 0,  # 'i'
+        24: 0,  # 'j'
+        10: 0,  # 'k'
+        5: 2,  # 'l'
+        13: 2,  # 'm'
+        4: 0,  # 'n'
+        15: 2,  # 'o'
+        26: 0,  # 'p'
+        7: 0,  # 'r'
+        8: 0,  # 's'
+        9: 1,  # 't'
+        14: 3,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 1,  # 'y'
+        22: 2,  # 'z'
+        63: 0,  # '·'
+        54: 1,  # 'Ç'
+        50: 1,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 1,  # 'â'
+        33: 2,  # 'ç'
+        61: 0,  # 'î'
+        34: 1,  # 'ö'
+        17: 0,  # 'ü'
+        30: 1,  # 'ğ'
+        41: 1,  # 'İ'
+        6: 3,  # 'ı'
+        40: 2,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    62: {  # 'W'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 0,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 0,  # 'a'
+        21: 0,  # 'b'
+        28: 0,  # 'c'
+        12: 0,  # 'd'
+        2: 0,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 0,  # 'i'
+        24: 0,  # 'j'
+        10: 0,  # 'k'
+        5: 0,  # 'l'
+        13: 0,  # 'm'
+        4: 0,  # 'n'
+        15: 0,  # 'o'
+        26: 0,  # 'p'
+        7: 0,  # 'r'
+        8: 0,  # 's'
+        9: 0,  # 't'
+        14: 0,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 0,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 0,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    43: {  # 'Y'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 1,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 2,  # 'F'
+        36: 0,  # 'G'
+        45: 1,  # 'H'
+        53: 1,  # 'I'
+        60: 0,  # 'J'
+        16: 2,  # 'K'
+        49: 0,  # 'L'
+        20: 0,  # 'M'
+        46: 2,  # 'N'
+        42: 0,  # 'O'
+        48: 2,  # 'P'
+        44: 1,  # 'R'
+        35: 1,  # 'S'
+        31: 0,  # 'T'
+        51: 1,  # 'U'
+        38: 2,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 0,  # 'b'
+        28: 2,  # 'c'
+        12: 0,  # 'd'
+        2: 2,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 0,  # 'i'
+        24: 1,  # 'j'
+        10: 1,  # 'k'
+        5: 1,  # 'l'
+        13: 3,  # 'm'
+        4: 0,  # 'n'
+        15: 2,  # 'o'
+        26: 0,  # 'p'
+        7: 0,  # 'r'
+        8: 0,  # 's'
+        9: 0,  # 't'
+        14: 3,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 1,  # 'x'
+        11: 0,  # 'y'
+        22: 2,  # 'z'
+        63: 0,  # '·'
+        54: 1,  # 'Ç'
+        50: 2,  # 'Ö'
+        55: 1,  # 'Ü'
+        59: 1,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 1,  # 'ö'
+        17: 0,  # 'ü'
+        30: 1,  # 'ğ'
+        41: 1,  # 'İ'
+        6: 0,  # 'ı'
+        40: 2,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    56: {  # 'Z'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 0,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 2,  # 'Z'
+        1: 2,  # 'a'
+        21: 1,  # 'b'
+        28: 0,  # 'c'
+        12: 0,  # 'd'
+        2: 2,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 2,  # 'i'
+        24: 1,  # 'j'
+        10: 0,  # 'k'
+        5: 0,  # 'l'
+        13: 1,  # 'm'
+        4: 1,  # 'n'
+        15: 0,  # 'o'
+        26: 0,  # 'p'
+        7: 1,  # 'r'
+        8: 1,  # 's'
+        9: 0,  # 't'
+        14: 2,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 1,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 1,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    1: {  # 'a'
+        23: 3,  # 'A'
+        37: 0,  # 'B'
+        47: 1,  # 'C'
+        39: 0,  # 'D'
+        29: 3,  # 'E'
+        52: 0,  # 'F'
+        36: 1,  # 'G'
+        45: 1,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 3,  # 'M'
+        46: 1,  # 'N'
+        42: 0,  # 'O'
+        48: 1,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 3,  # 'T'
+        51: 0,  # 'U'
+        38: 1,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 2,  # 'Z'
+        1: 2,  # 'a'
+        21: 3,  # 'b'
+        28: 0,  # 'c'
+        12: 3,  # 'd'
+        2: 2,  # 'e'
+        18: 3,  # 'f'
+        27: 3,  # 'g'
+        25: 3,  # 'h'
+        3: 3,  # 'i'
+        24: 3,  # 'j'
+        10: 3,  # 'k'
+        5: 0,  # 'l'
+        13: 2,  # 'm'
+        4: 3,  # 'n'
+        15: 1,  # 'o'
+        26: 3,  # 'p'
+        7: 3,  # 'r'
+        8: 3,  # 's'
+        9: 3,  # 't'
+        14: 3,  # 'u'
+        32: 3,  # 'v'
+        57: 2,  # 'w'
+        58: 0,  # 'x'
+        11: 3,  # 'y'
+        22: 0,  # 'z'
+        63: 1,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 1,  # 'ç'
+        61: 1,  # 'î'
+        34: 1,  # 'ö'
+        17: 3,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 3,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    21: {  # 'b'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 1,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 1,  # 'J'
+        16: 2,  # 'K'
+        49: 0,  # 'L'
+        20: 2,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 1,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 1,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 2,  # 'b'
+        28: 0,  # 'c'
+        12: 3,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 3,  # 'g'
+        25: 1,  # 'h'
+        3: 3,  # 'i'
+        24: 2,  # 'j'
+        10: 3,  # 'k'
+        5: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        15: 0,  # 'o'
+        26: 3,  # 'p'
+        7: 1,  # 'r'
+        8: 2,  # 's'
+        9: 2,  # 't'
+        14: 2,  # 'u'
+        32: 1,  # 'v'
+        57: 0,  # 'w'
+        58: 1,  # 'x'
+        11: 3,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 1,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 0,  # 'ü'
+        30: 1,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 2,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    28: {  # 'c'
+        23: 0,  # 'A'
+        37: 1,  # 'B'
+        47: 1,  # 'C'
+        39: 1,  # 'D'
+        29: 2,  # 'E'
+        52: 0,  # 'F'
+        36: 2,  # 'G'
+        45: 2,  # 'H'
+        53: 1,  # 'I'
+        60: 0,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 2,  # 'M'
+        46: 1,  # 'N'
+        42: 1,  # 'O'
+        48: 2,  # 'P'
+        44: 1,  # 'R'
+        35: 1,  # 'S'
+        31: 2,  # 'T'
+        51: 2,  # 'U'
+        38: 2,  # 'V'
+        62: 0,  # 'W'
+        43: 3,  # 'Y'
+        56: 0,  # 'Z'
+        1: 1,  # 'a'
+        21: 1,  # 'b'
+        28: 2,  # 'c'
+        12: 2,  # 'd'
+        2: 1,  # 'e'
+        18: 1,  # 'f'
+        27: 2,  # 'g'
+        25: 2,  # 'h'
+        3: 3,  # 'i'
+        24: 1,  # 'j'
+        10: 3,  # 'k'
+        5: 0,  # 'l'
+        13: 2,  # 'm'
+        4: 3,  # 'n'
+        15: 2,  # 'o'
+        26: 2,  # 'p'
+        7: 3,  # 'r'
+        8: 3,  # 's'
+        9: 3,  # 't'
+        14: 1,  # 'u'
+        32: 0,  # 'v'
+        57: 1,  # 'w'
+        58: 0,  # 'x'
+        11: 2,  # 'y'
+        22: 1,  # 'z'
+        63: 1,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 1,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 1,  # 'î'
+        34: 2,  # 'ö'
+        17: 2,  # 'ü'
+        30: 2,  # 'ğ'
+        41: 1,  # 'İ'
+        6: 3,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 2,  # 'ş'
+    },
+    12: {  # 'd'
+        23: 1,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 2,  # 'J'
+        16: 3,  # 'K'
+        49: 0,  # 'L'
+        20: 3,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 1,  # 'S'
+        31: 1,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 2,  # 'b'
+        28: 1,  # 'c'
+        12: 3,  # 'd'
+        2: 3,  # 'e'
+        18: 1,  # 'f'
+        27: 3,  # 'g'
+        25: 3,  # 'h'
+        3: 2,  # 'i'
+        24: 3,  # 'j'
+        10: 2,  # 'k'
+        5: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        15: 1,  # 'o'
+        26: 2,  # 'p'
+        7: 3,  # 'r'
+        8: 2,  # 's'
+        9: 2,  # 't'
+        14: 3,  # 'u'
+        32: 1,  # 'v'
+        57: 0,  # 'w'
+        58: 1,  # 'x'
+        11: 3,  # 'y'
+        22: 1,  # 'z'
+        63: 1,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 1,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 2,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    2: {  # 'e'
+        23: 2,  # 'A'
+        37: 0,  # 'B'
+        47: 2,  # 'C'
+        39: 0,  # 'D'
+        29: 3,  # 'E'
+        52: 1,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 1,  # 'K'
+        49: 0,  # 'L'
+        20: 3,  # 'M'
+        46: 1,  # 'N'
+        42: 0,  # 'O'
+        48: 1,  # 'P'
+        44: 1,  # 'R'
+        35: 0,  # 'S'
+        31: 3,  # 'T'
+        51: 0,  # 'U'
+        38: 1,  # 'V'
+        62: 0,  # 'W'
+        43: 1,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 3,  # 'b'
+        28: 0,  # 'c'
+        12: 3,  # 'd'
+        2: 2,  # 'e'
+        18: 3,  # 'f'
+        27: 3,  # 'g'
+        25: 3,  # 'h'
+        3: 3,  # 'i'
+        24: 3,  # 'j'
+        10: 3,  # 'k'
+        5: 0,  # 'l'
+        13: 2,  # 'm'
+        4: 3,  # 'n'
+        15: 1,  # 'o'
+        26: 3,  # 'p'
+        7: 3,  # 'r'
+        8: 3,  # 's'
+        9: 3,  # 't'
+        14: 3,  # 'u'
+        32: 3,  # 'v'
+        57: 2,  # 'w'
+        58: 0,  # 'x'
+        11: 3,  # 'y'
+        22: 1,  # 'z'
+        63: 1,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 1,  # 'ç'
+        61: 0,  # 'î'
+        34: 1,  # 'ö'
+        17: 3,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 3,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    18: {  # 'f'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 2,  # 'K'
+        49: 0,  # 'L'
+        20: 2,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 2,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 1,  # 'b'
+        28: 0,  # 'c'
+        12: 3,  # 'd'
+        2: 3,  # 'e'
+        18: 2,  # 'f'
+        27: 1,  # 'g'
+        25: 1,  # 'h'
+        3: 1,  # 'i'
+        24: 1,  # 'j'
+        10: 1,  # 'k'
+        5: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        15: 0,  # 'o'
+        26: 2,  # 'p'
+        7: 1,  # 'r'
+        8: 3,  # 's'
+        9: 3,  # 't'
+        14: 1,  # 'u'
+        32: 2,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 1,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 1,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 1,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 1,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    27: {  # 'g'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 3,  # 'K'
+        49: 0,  # 'L'
+        20: 0,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 1,  # 'S'
+        31: 1,  # 'T'
+        51: 0,  # 'U'
+        38: 2,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 1,  # 'b'
+        28: 0,  # 'c'
+        12: 1,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 2,  # 'g'
+        25: 1,  # 'h'
+        3: 2,  # 'i'
+        24: 3,  # 'j'
+        10: 2,  # 'k'
+        5: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 2,  # 'n'
+        15: 0,  # 'o'
+        26: 1,  # 'p'
+        7: 2,  # 'r'
+        8: 2,  # 's'
+        9: 3,  # 't'
+        14: 3,  # 'u'
+        32: 1,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 1,  # 'y'
+        22: 0,  # 'z'
+        63: 1,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 0,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 2,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    25: {  # 'h'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 2,  # 'K'
+        49: 0,  # 'L'
+        20: 0,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 0,  # 'b'
+        28: 0,  # 'c'
+        12: 2,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 1,  # 'g'
+        25: 2,  # 'h'
+        3: 2,  # 'i'
+        24: 3,  # 'j'
+        10: 3,  # 'k'
+        5: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        15: 1,  # 'o'
+        26: 1,  # 'p'
+        7: 3,  # 'r'
+        8: 3,  # 's'
+        9: 2,  # 't'
+        14: 3,  # 'u'
+        32: 2,  # 'v'
+        57: 1,  # 'w'
+        58: 0,  # 'x'
+        11: 1,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 0,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 3,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    3: {  # 'i'
+        23: 2,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 1,  # 'J'
+        16: 3,  # 'K'
+        49: 0,  # 'L'
+        20: 3,  # 'M'
+        46: 0,  # 'N'
+        42: 1,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 1,  # 'S'
+        31: 2,  # 'T'
+        51: 0,  # 'U'
+        38: 1,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 2,  # 'b'
+        28: 0,  # 'c'
+        12: 3,  # 'd'
+        2: 3,  # 'e'
+        18: 2,  # 'f'
+        27: 3,  # 'g'
+        25: 1,  # 'h'
+        3: 3,  # 'i'
+        24: 2,  # 'j'
+        10: 3,  # 'k'
+        5: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        15: 1,  # 'o'
+        26: 3,  # 'p'
+        7: 3,  # 'r'
+        8: 3,  # 's'
+        9: 3,  # 't'
+        14: 3,  # 'u'
+        32: 2,  # 'v'
+        57: 1,  # 'w'
+        58: 1,  # 'x'
+        11: 3,  # 'y'
+        22: 1,  # 'z'
+        63: 1,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 1,  # 'Ü'
+        59: 0,  # 'â'
+        33: 2,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 3,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 1,  # 'İ'
+        6: 2,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    24: {  # 'j'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 1,  # 'J'
+        16: 2,  # 'K'
+        49: 0,  # 'L'
+        20: 2,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 1,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 1,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 1,  # 'Z'
+        1: 3,  # 'a'
+        21: 1,  # 'b'
+        28: 1,  # 'c'
+        12: 3,  # 'd'
+        2: 3,  # 'e'
+        18: 2,  # 'f'
+        27: 1,  # 'g'
+        25: 1,  # 'h'
+        3: 2,  # 'i'
+        24: 1,  # 'j'
+        10: 2,  # 'k'
+        5: 2,  # 'l'
+        13: 3,  # 'm'
+        4: 2,  # 'n'
+        15: 0,  # 'o'
+        26: 1,  # 'p'
+        7: 2,  # 'r'
+        8: 3,  # 's'
+        9: 2,  # 't'
+        14: 3,  # 'u'
+        32: 2,  # 'v'
+        57: 0,  # 'w'
+        58: 2,  # 'x'
+        11: 1,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 1,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 1,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 3,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    10: {  # 'k'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 3,  # 'K'
+        49: 0,  # 'L'
+        20: 2,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 3,  # 'T'
+        51: 0,  # 'U'
+        38: 1,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 1,  # 'Z'
+        1: 3,  # 'a'
+        21: 2,  # 'b'
+        28: 0,  # 'c'
+        12: 2,  # 'd'
+        2: 3,  # 'e'
+        18: 1,  # 'f'
+        27: 2,  # 'g'
+        25: 2,  # 'h'
+        3: 3,  # 'i'
+        24: 2,  # 'j'
+        10: 2,  # 'k'
+        5: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        15: 0,  # 'o'
+        26: 3,  # 'p'
+        7: 2,  # 'r'
+        8: 2,  # 's'
+        9: 2,  # 't'
+        14: 3,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 1,  # 'x'
+        11: 3,  # 'y'
+        22: 0,  # 'z'
+        63: 1,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 3,  # 'ç'
+        61: 0,  # 'î'
+        34: 1,  # 'ö'
+        17: 3,  # 'ü'
+        30: 1,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 3,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    5: {  # 'l'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 3,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 2,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 1,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 0,  # 'a'
+        21: 3,  # 'b'
+        28: 0,  # 'c'
+        12: 3,  # 'd'
+        2: 1,  # 'e'
+        18: 3,  # 'f'
+        27: 3,  # 'g'
+        25: 2,  # 'h'
+        3: 3,  # 'i'
+        24: 2,  # 'j'
+        10: 3,  # 'k'
+        5: 1,  # 'l'
+        13: 1,  # 'm'
+        4: 3,  # 'n'
+        15: 0,  # 'o'
+        26: 2,  # 'p'
+        7: 3,  # 'r'
+        8: 3,  # 's'
+        9: 3,  # 't'
+        14: 2,  # 'u'
+        32: 2,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 3,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 1,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 2,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 3,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    13: {  # 'm'
+        23: 1,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 3,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 3,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 3,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 1,  # 'Y'
+        56: 0,  # 'Z'
+        1: 2,  # 'a'
+        21: 3,  # 'b'
+        28: 0,  # 'c'
+        12: 3,  # 'd'
+        2: 2,  # 'e'
+        18: 3,  # 'f'
+        27: 3,  # 'g'
+        25: 3,  # 'h'
+        3: 3,  # 'i'
+        24: 3,  # 'j'
+        10: 3,  # 'k'
+        5: 0,  # 'l'
+        13: 2,  # 'm'
+        4: 3,  # 'n'
+        15: 1,  # 'o'
+        26: 2,  # 'p'
+        7: 3,  # 'r'
+        8: 3,  # 's'
+        9: 3,  # 't'
+        14: 2,  # 'u'
+        32: 2,  # 'v'
+        57: 1,  # 'w'
+        58: 0,  # 'x'
+        11: 3,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 3,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 3,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    4: {  # 'n'
+        23: 1,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 1,  # 'H'
+        53: 0,  # 'I'
+        60: 2,  # 'J'
+        16: 3,  # 'K'
+        49: 0,  # 'L'
+        20: 3,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 2,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 2,  # 'b'
+        28: 1,  # 'c'
+        12: 3,  # 'd'
+        2: 3,  # 'e'
+        18: 1,  # 'f'
+        27: 2,  # 'g'
+        25: 3,  # 'h'
+        3: 2,  # 'i'
+        24: 2,  # 'j'
+        10: 3,  # 'k'
+        5: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        15: 1,  # 'o'
+        26: 3,  # 'p'
+        7: 2,  # 'r'
+        8: 3,  # 's'
+        9: 3,  # 't'
+        14: 3,  # 'u'
+        32: 2,  # 'v'
+        57: 0,  # 'w'
+        58: 2,  # 'x'
+        11: 3,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 1,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 2,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 1,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    15: {  # 'o'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 1,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 2,  # 'F'
+        36: 1,  # 'G'
+        45: 1,  # 'H'
+        53: 1,  # 'I'
+        60: 0,  # 'J'
+        16: 3,  # 'K'
+        49: 2,  # 'L'
+        20: 0,  # 'M'
+        46: 2,  # 'N'
+        42: 1,  # 'O'
+        48: 2,  # 'P'
+        44: 1,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 0,  # 'b'
+        28: 2,  # 'c'
+        12: 0,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 1,  # 'i'
+        24: 2,  # 'j'
+        10: 1,  # 'k'
+        5: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 2,  # 'n'
+        15: 2,  # 'o'
+        26: 0,  # 'p'
+        7: 1,  # 'r'
+        8: 0,  # 's'
+        9: 0,  # 't'
+        14: 3,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 2,  # 'x'
+        11: 0,  # 'y'
+        22: 2,  # 'z'
+        63: 0,  # '·'
+        54: 1,  # 'Ç'
+        50: 2,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 3,  # 'ç'
+        61: 0,  # 'î'
+        34: 1,  # 'ö'
+        17: 0,  # 'ü'
+        30: 2,  # 'ğ'
+        41: 2,  # 'İ'
+        6: 3,  # 'ı'
+        40: 2,  # 'Ş'
+        19: 2,  # 'ş'
+    },
+    26: {  # 'p'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 3,  # 'K'
+        49: 0,  # 'L'
+        20: 1,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 1,  # 'b'
+        28: 0,  # 'c'
+        12: 1,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 1,  # 'g'
+        25: 1,  # 'h'
+        3: 2,  # 'i'
+        24: 3,  # 'j'
+        10: 1,  # 'k'
+        5: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 2,  # 'n'
+        15: 0,  # 'o'
+        26: 2,  # 'p'
+        7: 2,  # 'r'
+        8: 1,  # 's'
+        9: 1,  # 't'
+        14: 3,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 1,  # 'x'
+        11: 1,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 3,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 1,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 3,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    7: {  # 'r'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 1,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 2,  # 'J'
+        16: 3,  # 'K'
+        49: 0,  # 'L'
+        20: 2,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 2,  # 'T'
+        51: 1,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 1,  # 'Z'
+        1: 3,  # 'a'
+        21: 1,  # 'b'
+        28: 0,  # 'c'
+        12: 3,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 2,  # 'g'
+        25: 3,  # 'h'
+        3: 2,  # 'i'
+        24: 2,  # 'j'
+        10: 3,  # 'k'
+        5: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        15: 0,  # 'o'
+        26: 2,  # 'p'
+        7: 3,  # 'r'
+        8: 3,  # 's'
+        9: 3,  # 't'
+        14: 3,  # 'u'
+        32: 2,  # 'v'
+        57: 0,  # 'w'
+        58: 1,  # 'x'
+        11: 2,  # 'y'
+        22: 0,  # 'z'
+        63: 1,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 2,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 3,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 2,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    8: {  # 's'
+        23: 1,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 1,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 3,  # 'K'
+        49: 0,  # 'L'
+        20: 3,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 2,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 1,  # 'Z'
+        1: 3,  # 'a'
+        21: 2,  # 'b'
+        28: 1,  # 'c'
+        12: 3,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 2,  # 'g'
+        25: 2,  # 'h'
+        3: 2,  # 'i'
+        24: 3,  # 'j'
+        10: 3,  # 'k'
+        5: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        15: 0,  # 'o'
+        26: 3,  # 'p'
+        7: 3,  # 'r'
+        8: 3,  # 's'
+        9: 3,  # 't'
+        14: 3,  # 'u'
+        32: 2,  # 'v'
+        57: 0,  # 'w'
+        58: 1,  # 'x'
+        11: 2,  # 'y'
+        22: 1,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 2,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 2,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 3,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    9: {  # 't'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 1,  # 'J'
+        16: 3,  # 'K'
+        49: 0,  # 'L'
+        20: 2,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 2,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 1,  # 'Z'
+        1: 3,  # 'a'
+        21: 3,  # 'b'
+        28: 0,  # 'c'
+        12: 3,  # 'd'
+        2: 3,  # 'e'
+        18: 2,  # 'f'
+        27: 2,  # 'g'
+        25: 2,  # 'h'
+        3: 2,  # 'i'
+        24: 2,  # 'j'
+        10: 3,  # 'k'
+        5: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        15: 0,  # 'o'
+        26: 2,  # 'p'
+        7: 3,  # 'r'
+        8: 3,  # 's'
+        9: 3,  # 't'
+        14: 3,  # 'u'
+        32: 3,  # 'v'
+        57: 0,  # 'w'
+        58: 2,  # 'x'
+        11: 2,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 3,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 2,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 3,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    14: {  # 'u'
+        23: 3,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 3,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 1,  # 'H'
+        53: 0,  # 'I'
+        60: 1,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 3,  # 'M'
+        46: 2,  # 'N'
+        42: 0,  # 'O'
+        48: 1,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 3,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 1,  # 'Y'
+        56: 2,  # 'Z'
+        1: 2,  # 'a'
+        21: 3,  # 'b'
+        28: 0,  # 'c'
+        12: 3,  # 'd'
+        2: 2,  # 'e'
+        18: 2,  # 'f'
+        27: 3,  # 'g'
+        25: 3,  # 'h'
+        3: 3,  # 'i'
+        24: 2,  # 'j'
+        10: 3,  # 'k'
+        5: 0,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        15: 0,  # 'o'
+        26: 3,  # 'p'
+        7: 3,  # 'r'
+        8: 3,  # 's'
+        9: 3,  # 't'
+        14: 3,  # 'u'
+        32: 2,  # 'v'
+        57: 2,  # 'w'
+        58: 0,  # 'x'
+        11: 3,  # 'y'
+        22: 0,  # 'z'
+        63: 1,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 3,  # 'ü'
+        30: 1,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 3,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    32: {  # 'v'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 3,  # 'K'
+        49: 0,  # 'L'
+        20: 1,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 0,  # 'b'
+        28: 0,  # 'c'
+        12: 3,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 0,  # 'i'
+        24: 1,  # 'j'
+        10: 1,  # 'k'
+        5: 3,  # 'l'
+        13: 2,  # 'm'
+        4: 3,  # 'n'
+        15: 0,  # 'o'
+        26: 1,  # 'p'
+        7: 1,  # 'r'
+        8: 2,  # 's'
+        9: 3,  # 't'
+        14: 3,  # 'u'
+        32: 1,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 2,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 0,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 1,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    57: {  # 'w'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 0,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 1,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 1,  # 'a'
+        21: 0,  # 'b'
+        28: 0,  # 'c'
+        12: 0,  # 'd'
+        2: 2,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 1,  # 'h'
+        3: 0,  # 'i'
+        24: 0,  # 'j'
+        10: 1,  # 'k'
+        5: 0,  # 'l'
+        13: 0,  # 'm'
+        4: 1,  # 'n'
+        15: 0,  # 'o'
+        26: 0,  # 'p'
+        7: 0,  # 'r'
+        8: 1,  # 's'
+        9: 0,  # 't'
+        14: 1,  # 'u'
+        32: 0,  # 'v'
+        57: 2,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 0,  # 'z'
+        63: 1,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 1,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 0,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    58: {  # 'x'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 1,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 1,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 1,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 0,  # 'a'
+        21: 1,  # 'b'
+        28: 0,  # 'c'
+        12: 2,  # 'd'
+        2: 1,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 2,  # 'i'
+        24: 2,  # 'j'
+        10: 1,  # 'k'
+        5: 0,  # 'l'
+        13: 0,  # 'm'
+        4: 2,  # 'n'
+        15: 0,  # 'o'
+        26: 0,  # 'p'
+        7: 1,  # 'r'
+        8: 2,  # 's'
+        9: 1,  # 't'
+        14: 0,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 2,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 1,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 2,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    11: {  # 'y'
+        23: 1,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 1,  # 'J'
+        16: 3,  # 'K'
+        49: 0,  # 'L'
+        20: 1,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 1,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 1,  # 'Y'
+        56: 1,  # 'Z'
+        1: 3,  # 'a'
+        21: 1,  # 'b'
+        28: 0,  # 'c'
+        12: 2,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 2,  # 'g'
+        25: 2,  # 'h'
+        3: 2,  # 'i'
+        24: 1,  # 'j'
+        10: 2,  # 'k'
+        5: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        15: 0,  # 'o'
+        26: 1,  # 'p'
+        7: 2,  # 'r'
+        8: 1,  # 's'
+        9: 2,  # 't'
+        14: 3,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 1,  # 'x'
+        11: 3,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 3,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 2,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 3,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    22: {  # 'z'
+        23: 2,  # 'A'
+        37: 2,  # 'B'
+        47: 1,  # 'C'
+        39: 2,  # 'D'
+        29: 3,  # 'E'
+        52: 1,  # 'F'
+        36: 2,  # 'G'
+        45: 2,  # 'H'
+        53: 1,  # 'I'
+        60: 0,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 3,  # 'M'
+        46: 2,  # 'N'
+        42: 2,  # 'O'
+        48: 2,  # 'P'
+        44: 1,  # 'R'
+        35: 1,  # 'S'
+        31: 3,  # 'T'
+        51: 2,  # 'U'
+        38: 2,  # 'V'
+        62: 0,  # 'W'
+        43: 2,  # 'Y'
+        56: 1,  # 'Z'
+        1: 1,  # 'a'
+        21: 2,  # 'b'
+        28: 1,  # 'c'
+        12: 2,  # 'd'
+        2: 2,  # 'e'
+        18: 3,  # 'f'
+        27: 2,  # 'g'
+        25: 2,  # 'h'
+        3: 3,  # 'i'
+        24: 2,  # 'j'
+        10: 3,  # 'k'
+        5: 0,  # 'l'
+        13: 2,  # 'm'
+        4: 3,  # 'n'
+        15: 2,  # 'o'
+        26: 2,  # 'p'
+        7: 3,  # 'r'
+        8: 3,  # 's'
+        9: 3,  # 't'
+        14: 0,  # 'u'
+        32: 2,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 3,  # 'y'
+        22: 2,  # 'z'
+        63: 1,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 2,  # 'Ü'
+        59: 1,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 2,  # 'ö'
+        17: 2,  # 'ü'
+        30: 2,  # 'ğ'
+        41: 1,  # 'İ'
+        6: 3,  # 'ı'
+        40: 1,  # 'Ş'
+        19: 2,  # 'ş'
+    },
+    63: {  # '·'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 0,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 0,  # 'a'
+        21: 0,  # 'b'
+        28: 0,  # 'c'
+        12: 0,  # 'd'
+        2: 1,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 0,  # 'i'
+        24: 0,  # 'j'
+        10: 0,  # 'k'
+        5: 0,  # 'l'
+        13: 2,  # 'm'
+        4: 0,  # 'n'
+        15: 0,  # 'o'
+        26: 0,  # 'p'
+        7: 0,  # 'r'
+        8: 0,  # 's'
+        9: 0,  # 't'
+        14: 2,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 0,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 0,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    54: {  # 'Ç'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 1,  # 'C'
+        39: 1,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 1,  # 'G'
+        45: 1,  # 'H'
+        53: 1,  # 'I'
+        60: 0,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 0,  # 'M'
+        46: 0,  # 'N'
+        42: 1,  # 'O'
+        48: 1,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 1,  # 'U'
+        38: 1,  # 'V'
+        62: 0,  # 'W'
+        43: 2,  # 'Y'
+        56: 0,  # 'Z'
+        1: 0,  # 'a'
+        21: 1,  # 'b'
+        28: 0,  # 'c'
+        12: 1,  # 'd'
+        2: 0,  # 'e'
+        18: 0,  # 'f'
+        27: 1,  # 'g'
+        25: 0,  # 'h'
+        3: 3,  # 'i'
+        24: 0,  # 'j'
+        10: 1,  # 'k'
+        5: 0,  # 'l'
+        13: 0,  # 'm'
+        4: 2,  # 'n'
+        15: 1,  # 'o'
+        26: 0,  # 'p'
+        7: 2,  # 'r'
+        8: 0,  # 's'
+        9: 1,  # 't'
+        14: 0,  # 'u'
+        32: 2,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 2,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 1,  # 'ö'
+        17: 0,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 2,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    50: {  # 'Ö'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 1,  # 'C'
+        39: 1,  # 'D'
+        29: 2,  # 'E'
+        52: 0,  # 'F'
+        36: 1,  # 'G'
+        45: 2,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 1,  # 'M'
+        46: 1,  # 'N'
+        42: 2,  # 'O'
+        48: 2,  # 'P'
+        44: 1,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 1,  # 'U'
+        38: 1,  # 'V'
+        62: 0,  # 'W'
+        43: 2,  # 'Y'
+        56: 0,  # 'Z'
+        1: 0,  # 'a'
+        21: 2,  # 'b'
+        28: 1,  # 'c'
+        12: 2,  # 'd'
+        2: 0,  # 'e'
+        18: 1,  # 'f'
+        27: 1,  # 'g'
+        25: 1,  # 'h'
+        3: 2,  # 'i'
+        24: 0,  # 'j'
+        10: 2,  # 'k'
+        5: 0,  # 'l'
+        13: 0,  # 'm'
+        4: 3,  # 'n'
+        15: 2,  # 'o'
+        26: 2,  # 'p'
+        7: 3,  # 'r'
+        8: 1,  # 's'
+        9: 2,  # 't'
+        14: 0,  # 'u'
+        32: 1,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 1,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 2,  # 'ö'
+        17: 2,  # 'ü'
+        30: 1,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 2,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    55: {  # 'Ü'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 2,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 1,  # 'K'
+        49: 0,  # 'L'
+        20: 0,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 1,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 0,  # 'U'
+        38: 1,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 2,  # 'a'
+        21: 0,  # 'b'
+        28: 2,  # 'c'
+        12: 0,  # 'd'
+        2: 2,  # 'e'
+        18: 0,  # 'f'
+        27: 1,  # 'g'
+        25: 0,  # 'h'
+        3: 0,  # 'i'
+        24: 0,  # 'j'
+        10: 0,  # 'k'
+        5: 1,  # 'l'
+        13: 1,  # 'm'
+        4: 1,  # 'n'
+        15: 0,  # 'o'
+        26: 0,  # 'p'
+        7: 0,  # 'r'
+        8: 0,  # 's'
+        9: 1,  # 't'
+        14: 2,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 1,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 1,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 1,  # 'ö'
+        17: 0,  # 'ü'
+        30: 1,  # 'ğ'
+        41: 1,  # 'İ'
+        6: 0,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    59: {  # 'â'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 1,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 1,  # 'K'
+        49: 0,  # 'L'
+        20: 0,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 2,  # 'a'
+        21: 0,  # 'b'
+        28: 0,  # 'c'
+        12: 0,  # 'd'
+        2: 2,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 0,  # 'i'
+        24: 0,  # 'j'
+        10: 0,  # 'k'
+        5: 0,  # 'l'
+        13: 2,  # 'm'
+        4: 0,  # 'n'
+        15: 1,  # 'o'
+        26: 0,  # 'p'
+        7: 0,  # 'r'
+        8: 0,  # 's'
+        9: 0,  # 't'
+        14: 2,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 1,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 0,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 1,  # 'ı'
+        40: 1,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    33: {  # 'ç'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 3,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 1,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 2,  # 'T'
+        51: 0,  # 'U'
+        38: 1,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 0,  # 'Z'
+        1: 0,  # 'a'
+        21: 3,  # 'b'
+        28: 0,  # 'c'
+        12: 2,  # 'd'
+        2: 0,  # 'e'
+        18: 2,  # 'f'
+        27: 1,  # 'g'
+        25: 3,  # 'h'
+        3: 3,  # 'i'
+        24: 0,  # 'j'
+        10: 3,  # 'k'
+        5: 0,  # 'l'
+        13: 0,  # 'm'
+        4: 3,  # 'n'
+        15: 0,  # 'o'
+        26: 1,  # 'p'
+        7: 3,  # 'r'
+        8: 2,  # 's'
+        9: 3,  # 't'
+        14: 0,  # 'u'
+        32: 2,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 2,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 1,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 3,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    61: {  # 'î'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 0,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 0,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 1,  # 'Z'
+        1: 2,  # 'a'
+        21: 0,  # 'b'
+        28: 0,  # 'c'
+        12: 0,  # 'd'
+        2: 2,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 0,  # 'i'
+        24: 1,  # 'j'
+        10: 0,  # 'k'
+        5: 0,  # 'l'
+        13: 1,  # 'm'
+        4: 1,  # 'n'
+        15: 0,  # 'o'
+        26: 0,  # 'p'
+        7: 0,  # 'r'
+        8: 0,  # 's'
+        9: 0,  # 't'
+        14: 1,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 1,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 1,  # 'î'
+        34: 0,  # 'ö'
+        17: 0,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 1,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    34: {  # 'ö'
+        23: 0,  # 'A'
+        37: 1,  # 'B'
+        47: 1,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 2,  # 'F'
+        36: 1,  # 'G'
+        45: 1,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 3,  # 'K'
+        49: 1,  # 'L'
+        20: 0,  # 'M'
+        46: 1,  # 'N'
+        42: 1,  # 'O'
+        48: 2,  # 'P'
+        44: 1,  # 'R'
+        35: 1,  # 'S'
+        31: 1,  # 'T'
+        51: 1,  # 'U'
+        38: 1,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 1,  # 'Z'
+        1: 3,  # 'a'
+        21: 1,  # 'b'
+        28: 2,  # 'c'
+        12: 1,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 2,  # 'g'
+        25: 2,  # 'h'
+        3: 1,  # 'i'
+        24: 2,  # 'j'
+        10: 1,  # 'k'
+        5: 2,  # 'l'
+        13: 3,  # 'm'
+        4: 2,  # 'n'
+        15: 2,  # 'o'
+        26: 0,  # 'p'
+        7: 0,  # 'r'
+        8: 3,  # 's'
+        9: 1,  # 't'
+        14: 3,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 1,  # 'y'
+        22: 2,  # 'z'
+        63: 0,  # '·'
+        54: 1,  # 'Ç'
+        50: 2,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 2,  # 'ç'
+        61: 0,  # 'î'
+        34: 2,  # 'ö'
+        17: 0,  # 'ü'
+        30: 2,  # 'ğ'
+        41: 1,  # 'İ'
+        6: 1,  # 'ı'
+        40: 2,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    17: {  # 'ü'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 1,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 0,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 1,  # 'J'
+        16: 1,  # 'K'
+        49: 0,  # 'L'
+        20: 1,  # 'M'
+        46: 0,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 1,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 0,  # 'Y'
+        56: 1,  # 'Z'
+        1: 3,  # 'a'
+        21: 0,  # 'b'
+        28: 0,  # 'c'
+        12: 1,  # 'd'
+        2: 3,  # 'e'
+        18: 1,  # 'f'
+        27: 2,  # 'g'
+        25: 0,  # 'h'
+        3: 1,  # 'i'
+        24: 1,  # 'j'
+        10: 2,  # 'k'
+        5: 3,  # 'l'
+        13: 2,  # 'm'
+        4: 3,  # 'n'
+        15: 0,  # 'o'
+        26: 2,  # 'p'
+        7: 2,  # 'r'
+        8: 3,  # 's'
+        9: 2,  # 't'
+        14: 3,  # 'u'
+        32: 1,  # 'v'
+        57: 1,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 1,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 2,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 2,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    30: {  # 'ğ'
+        23: 0,  # 'A'
+        37: 2,  # 'B'
+        47: 1,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 2,  # 'F'
+        36: 1,  # 'G'
+        45: 0,  # 'H'
+        53: 1,  # 'I'
+        60: 0,  # 'J'
+        16: 3,  # 'K'
+        49: 0,  # 'L'
+        20: 1,  # 'M'
+        46: 2,  # 'N'
+        42: 2,  # 'O'
+        48: 1,  # 'P'
+        44: 1,  # 'R'
+        35: 0,  # 'S'
+        31: 1,  # 'T'
+        51: 0,  # 'U'
+        38: 2,  # 'V'
+        62: 0,  # 'W'
+        43: 2,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 0,  # 'b'
+        28: 2,  # 'c'
+        12: 0,  # 'd'
+        2: 2,  # 'e'
+        18: 0,  # 'f'
+        27: 0,  # 'g'
+        25: 0,  # 'h'
+        3: 0,  # 'i'
+        24: 3,  # 'j'
+        10: 1,  # 'k'
+        5: 2,  # 'l'
+        13: 3,  # 'm'
+        4: 0,  # 'n'
+        15: 1,  # 'o'
+        26: 0,  # 'p'
+        7: 1,  # 'r'
+        8: 0,  # 's'
+        9: 0,  # 't'
+        14: 3,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 2,  # 'z'
+        63: 0,  # '·'
+        54: 2,  # 'Ç'
+        50: 2,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 1,  # 'ç'
+        61: 0,  # 'î'
+        34: 2,  # 'ö'
+        17: 0,  # 'ü'
+        30: 1,  # 'ğ'
+        41: 2,  # 'İ'
+        6: 2,  # 'ı'
+        40: 2,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    41: {  # 'İ'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 1,  # 'C'
+        39: 1,  # 'D'
+        29: 1,  # 'E'
+        52: 0,  # 'F'
+        36: 2,  # 'G'
+        45: 2,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 2,  # 'M'
+        46: 1,  # 'N'
+        42: 1,  # 'O'
+        48: 2,  # 'P'
+        44: 0,  # 'R'
+        35: 1,  # 'S'
+        31: 1,  # 'T'
+        51: 1,  # 'U'
+        38: 1,  # 'V'
+        62: 0,  # 'W'
+        43: 2,  # 'Y'
+        56: 0,  # 'Z'
+        1: 1,  # 'a'
+        21: 2,  # 'b'
+        28: 1,  # 'c'
+        12: 2,  # 'd'
+        2: 1,  # 'e'
+        18: 0,  # 'f'
+        27: 3,  # 'g'
+        25: 2,  # 'h'
+        3: 2,  # 'i'
+        24: 2,  # 'j'
+        10: 2,  # 'k'
+        5: 0,  # 'l'
+        13: 1,  # 'm'
+        4: 3,  # 'n'
+        15: 1,  # 'o'
+        26: 1,  # 'p'
+        7: 3,  # 'r'
+        8: 3,  # 's'
+        9: 2,  # 't'
+        14: 0,  # 'u'
+        32: 0,  # 'v'
+        57: 1,  # 'w'
+        58: 0,  # 'x'
+        11: 2,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 1,  # 'Ü'
+        59: 1,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 1,  # 'ö'
+        17: 1,  # 'ü'
+        30: 2,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 3,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+    6: {  # 'ı'
+        23: 2,  # 'A'
+        37: 0,  # 'B'
+        47: 0,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 0,  # 'F'
+        36: 1,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 2,  # 'J'
+        16: 3,  # 'K'
+        49: 0,  # 'L'
+        20: 3,  # 'M'
+        46: 1,  # 'N'
+        42: 0,  # 'O'
+        48: 0,  # 'P'
+        44: 0,  # 'R'
+        35: 0,  # 'S'
+        31: 2,  # 'T'
+        51: 0,  # 'U'
+        38: 0,  # 'V'
+        62: 0,  # 'W'
+        43: 2,  # 'Y'
+        56: 1,  # 'Z'
+        1: 3,  # 'a'
+        21: 2,  # 'b'
+        28: 1,  # 'c'
+        12: 3,  # 'd'
+        2: 3,  # 'e'
+        18: 3,  # 'f'
+        27: 3,  # 'g'
+        25: 2,  # 'h'
+        3: 3,  # 'i'
+        24: 3,  # 'j'
+        10: 3,  # 'k'
+        5: 3,  # 'l'
+        13: 3,  # 'm'
+        4: 3,  # 'n'
+        15: 0,  # 'o'
+        26: 3,  # 'p'
+        7: 3,  # 'r'
+        8: 3,  # 's'
+        9: 3,  # 't'
+        14: 3,  # 'u'
+        32: 3,  # 'v'
+        57: 1,  # 'w'
+        58: 1,  # 'x'
+        11: 3,  # 'y'
+        22: 0,  # 'z'
+        63: 1,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 2,  # 'ç'
+        61: 0,  # 'î'
+        34: 0,  # 'ö'
+        17: 3,  # 'ü'
+        30: 0,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 3,  # 'ı'
+        40: 0,  # 'Ş'
+        19: 0,  # 'ş'
+    },
+    40: {  # 'Ş'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 1,  # 'C'
+        39: 1,  # 'D'
+        29: 1,  # 'E'
+        52: 0,  # 'F'
+        36: 1,  # 'G'
+        45: 2,  # 'H'
+        53: 1,  # 'I'
+        60: 0,  # 'J'
+        16: 0,  # 'K'
+        49: 0,  # 'L'
+        20: 2,  # 'M'
+        46: 1,  # 'N'
+        42: 1,  # 'O'
+        48: 2,  # 'P'
+        44: 2,  # 'R'
+        35: 1,  # 'S'
+        31: 1,  # 'T'
+        51: 0,  # 'U'
+        38: 1,  # 'V'
+        62: 0,  # 'W'
+        43: 2,  # 'Y'
+        56: 1,  # 'Z'
+        1: 0,  # 'a'
+        21: 2,  # 'b'
+        28: 0,  # 'c'
+        12: 2,  # 'd'
+        2: 0,  # 'e'
+        18: 3,  # 'f'
+        27: 0,  # 'g'
+        25: 2,  # 'h'
+        3: 3,  # 'i'
+        24: 2,  # 'j'
+        10: 1,  # 'k'
+        5: 0,  # 'l'
+        13: 1,  # 'm'
+        4: 3,  # 'n'
+        15: 2,  # 'o'
+        26: 0,  # 'p'
+        7: 3,  # 'r'
+        8: 2,  # 's'
+        9: 2,  # 't'
+        14: 1,  # 'u'
+        32: 3,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 2,  # 'y'
+        22: 0,  # 'z'
+        63: 0,  # '·'
+        54: 0,  # 'Ç'
+        50: 0,  # 'Ö'
+        55: 1,  # 'Ü'
+        59: 0,  # 'â'
+        33: 0,  # 'ç'
+        61: 0,  # 'î'
+        34: 2,  # 'ö'
+        17: 1,  # 'ü'
+        30: 2,  # 'ğ'
+        41: 0,  # 'İ'
+        6: 2,  # 'ı'
+        40: 1,  # 'Ş'
+        19: 2,  # 'ş'
+    },
+    19: {  # 'ş'
+        23: 0,  # 'A'
+        37: 0,  # 'B'
+        47: 1,  # 'C'
+        39: 0,  # 'D'
+        29: 0,  # 'E'
+        52: 2,  # 'F'
+        36: 1,  # 'G'
+        45: 0,  # 'H'
+        53: 0,  # 'I'
+        60: 0,  # 'J'
+        16: 3,  # 'K'
+        49: 2,  # 'L'
+        20: 0,  # 'M'
+        46: 1,  # 'N'
+        42: 1,  # 'O'
+        48: 1,  # 'P'
+        44: 1,  # 'R'
+        35: 1,  # 'S'
+        31: 0,  # 'T'
+        51: 1,  # 'U'
+        38: 1,  # 'V'
+        62: 0,  # 'W'
+        43: 1,  # 'Y'
+        56: 0,  # 'Z'
+        1: 3,  # 'a'
+        21: 1,  # 'b'
+        28: 2,  # 'c'
+        12: 0,  # 'd'
+        2: 3,  # 'e'
+        18: 0,  # 'f'
+        27: 2,  # 'g'
+        25: 1,  # 'h'
+        3: 1,  # 'i'
+        24: 0,  # 'j'
+        10: 2,  # 'k'
+        5: 2,  # 'l'
+        13: 3,  # 'm'
+        4: 0,  # 'n'
+        15: 0,  # 'o'
+        26: 1,  # 'p'
+        7: 3,  # 'r'
+        8: 0,  # 's'
+        9: 0,  # 't'
+        14: 3,  # 'u'
+        32: 0,  # 'v'
+        57: 0,  # 'w'
+        58: 0,  # 'x'
+        11: 0,  # 'y'
+        22: 2,  # 'z'
+        63: 0,  # '·'
+        54: 1,  # 'Ç'
+        50: 2,  # 'Ö'
+        55: 0,  # 'Ü'
+        59: 0,  # 'â'
+        33: 1,  # 'ç'
+        61: 1,  # 'î'
+        34: 2,  # 'ö'
+        17: 0,  # 'ü'
+        30: 1,  # 'ğ'
+        41: 1,  # 'İ'
+        6: 1,  # 'ı'
+        40: 1,  # 'Ş'
+        19: 1,  # 'ş'
+    },
+}
+
+# 255: Undefined characters that did not exist in training text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+# 251: Control characters
+
+# Character Mapping Table(s):
+ISO_8859_9_TURKISH_CHAR_TO_ORDER = {
+    0: 255,  # '\x00'
+    1: 255,  # '\x01'
+    2: 255,  # '\x02'
+    3: 255,  # '\x03'
+    4: 255,  # '\x04'
+    5: 255,  # '\x05'
+    6: 255,  # '\x06'
+    7: 255,  # '\x07'
+    8: 255,  # '\x08'
+    9: 255,  # '\t'
+    10: 255,  # '\n'
+    11: 255,  # '\x0b'
+    12: 255,  # '\x0c'
+    13: 255,  # '\r'
+    14: 255,  # '\x0e'
+    15: 255,  # '\x0f'
+    16: 255,  # '\x10'
+    17: 255,  # '\x11'
+    18: 255,  # '\x12'
+    19: 255,  # '\x13'
+    20: 255,  # '\x14'
+    21: 255,  # '\x15'
+    22: 255,  # '\x16'
+    23: 255,  # '\x17'
+    24: 255,  # '\x18'
+    25: 255,  # '\x19'
+    26: 255,  # '\x1a'
+    27: 255,  # '\x1b'
+    28: 255,  # '\x1c'
+    29: 255,  # '\x1d'
+    30: 255,  # '\x1e'
+    31: 255,  # '\x1f'
+    32: 255,  # ' '
+    33: 255,  # '!'
+    34: 255,  # '"'
+    35: 255,  # '#'
+    36: 255,  # '$'
+    37: 255,  # '%'
+    38: 255,  # '&'
+    39: 255,  # "'"
+    40: 255,  # '('
+    41: 255,  # ')'
+    42: 255,  # '*'
+    43: 255,  # '+'
+    44: 255,  # ','
+    45: 255,  # '-'
+    46: 255,  # '.'
+    47: 255,  # '/'
+    48: 255,  # '0'
+    49: 255,  # '1'
+    50: 255,  # '2'
+    51: 255,  # '3'
+    52: 255,  # '4'
+    53: 255,  # '5'
+    54: 255,  # '6'
+    55: 255,  # '7'
+    56: 255,  # '8'
+    57: 255,  # '9'
+    58: 255,  # ':'
+    59: 255,  # ';'
+    60: 255,  # '<'
+    61: 255,  # '='
+    62: 255,  # '>'
+    63: 255,  # '?'
+    64: 255,  # '@'
+    65: 23,  # 'A'
+    66: 37,  # 'B'
+    67: 47,  # 'C'
+    68: 39,  # 'D'
+    69: 29,  # 'E'
+    70: 52,  # 'F'
+    71: 36,  # 'G'
+    72: 45,  # 'H'
+    73: 53,  # 'I'
+    74: 60,  # 'J'
+    75: 16,  # 'K'
+    76: 49,  # 'L'
+    77: 20,  # 'M'
+    78: 46,  # 'N'
+    79: 42,  # 'O'
+    80: 48,  # 'P'
+    81: 69,  # 'Q'
+    82: 44,  # 'R'
+    83: 35,  # 'S'
+    84: 31,  # 'T'
+    85: 51,  # 'U'
+    86: 38,  # 'V'
+    87: 62,  # 'W'
+    88: 65,  # 'X'
+    89: 43,  # 'Y'
+    90: 56,  # 'Z'
+    91: 255,  # '['
+    92: 255,  # '\\'
+    93: 255,  # ']'
+    94: 255,  # '^'
+    95: 255,  # '_'
+    96: 255,  # '`'
+    97: 1,  # 'a'
+    98: 21,  # 'b'
+    99: 28,  # 'c'
+    100: 12,  # 'd'
+    101: 2,  # 'e'
+    102: 18,  # 'f'
+    103: 27,  # 'g'
+    104: 25,  # 'h'
+    105: 3,  # 'i'
+    106: 24,  # 'j'
+    107: 10,  # 'k'
+    108: 5,  # 'l'
+    109: 13,  # 'm'
+    110: 4,  # 'n'
+    111: 15,  # 'o'
+    112: 26,  # 'p'
+    113: 64,  # 'q'
+    114: 7,  # 'r'
+    115: 8,  # 's'
+    116: 9,  # 't'
+    117: 14,  # 'u'
+    118: 32,  # 'v'
+    119: 57,  # 'w'
+    120: 58,  # 'x'
+    121: 11,  # 'y'
+    122: 22,  # 'z'
+    123: 255,  # '{'
+    124: 255,  # '|'
+    125: 255,  # '}'
+    126: 255,  # '~'
+    127: 255,  # '\x7f'
+    128: 180,  # '\x80'
+    129: 179,  # '\x81'
+    130: 178,  # '\x82'
+    131: 177,  # '\x83'
+    132: 176,  # '\x84'
+    133: 175,  # '\x85'
+    134: 174,  # '\x86'
+    135: 173,  # '\x87'
+    136: 172,  # '\x88'
+    137: 171,  # '\x89'
+    138: 170,  # '\x8a'
+    139: 169,  # '\x8b'
+    140: 168,  # '\x8c'
+    141: 167,  # '\x8d'
+    142: 166,  # '\x8e'
+    143: 165,  # '\x8f'
+    144: 164,  # '\x90'
+    145: 163,  # '\x91'
+    146: 162,  # '\x92'
+    147: 161,  # '\x93'
+    148: 160,  # '\x94'
+    149: 159,  # '\x95'
+    150: 101,  # '\x96'
+    151: 158,  # '\x97'
+    152: 157,  # '\x98'
+    153: 156,  # '\x99'
+    154: 155,  # '\x9a'
+    155: 154,  # '\x9b'
+    156: 153,  # '\x9c'
+    157: 152,  # '\x9d'
+    158: 151,  # '\x9e'
+    159: 106,  # '\x9f'
+    160: 150,  # '\xa0'
+    161: 149,  # '¡'
+    162: 148,  # '¢'
+    163: 147,  # '£'
+    164: 146,  # '¤'
+    165: 145,  # '¥'
+    166: 144,  # '¦'
+    167: 100,  # '§'
+    168: 143,  # '¨'
+    169: 142,  # '©'
+    170: 141,  # 'ª'
+    171: 140,  # '«'
+    172: 139,  # '¬'
+    173: 138,  # '\xad'
+    174: 137,  # '®'
+    175: 136,  # '¯'
+    176: 94,  # '°'
+    177: 80,  # '±'
+    178: 93,  # '²'
+    179: 135,  # '³'
+    180: 105,  # '´'
+    181: 134,  # 'µ'
+    182: 133,  # '¶'
+    183: 63,  # '·'
+    184: 132,  # '¸'
+    185: 131,  # '¹'
+    186: 130,  # 'º'
+    187: 129,  # '»'
+    188: 128,  # '¼'
+    189: 127,  # '½'
+    190: 126,  # '¾'
+    191: 125,  # '¿'
+    192: 124,  # 'À'
+    193: 104,  # 'Á'
+    194: 73,  # 'Â'
+    195: 99,  # 'Ã'
+    196: 79,  # 'Ä'
+    197: 85,  # 'Å'
+    198: 123,  # 'Æ'
+    199: 54,  # 'Ç'
+    200: 122,  # 'È'
+    201: 98,  # 'É'
+    202: 92,  # 'Ê'
+    203: 121,  # 'Ë'
+    204: 120,  # 'Ì'
+    205: 91,  # 'Í'
+    206: 103,  # 'Î'
+    207: 119,  # 'Ï'
+    208: 68,  # 'Ğ'
+    209: 118,  # 'Ñ'
+    210: 117,  # 'Ò'
+    211: 97,  # 'Ó'
+    212: 116,  # 'Ô'
+    213: 115,  # 'Õ'
+    214: 50,  # 'Ö'
+    215: 90,  # '×'
+    216: 114,  # 'Ø'
+    217: 113,  # 'Ù'
+    218: 112,  # 'Ú'
+    219: 111,  # 'Û'
+    220: 55,  # 'Ü'
+    221: 41,  # 'İ'
+    222: 40,  # 'Ş'
+    223: 86,  # 'ß'
+    224: 89,  # 'à'
+    225: 70,  # 'á'
+    226: 59,  # 'â'
+    227: 78,  # 'ã'
+    228: 71,  # 'ä'
+    229: 82,  # 'å'
+    230: 88,  # 'æ'
+    231: 33,  # 'ç'
+    232: 77,  # 'è'
+    233: 66,  # 'é'
+    234: 84,  # 'ê'
+    235: 83,  # 'ë'
+    236: 110,  # 'ì'
+    237: 75,  # 'í'
+    238: 61,  # 'î'
+    239: 96,  # 'ï'
+    240: 30,  # 'ğ'
+    241: 67,  # 'ñ'
+    242: 109,  # 'ò'
+    243: 74,  # 'ó'
+    244: 87,  # 'ô'
+    245: 102,  # 'õ'
+    246: 34,  # 'ö'
+    247: 95,  # '÷'
+    248: 81,  # 'ø'
+    249: 108,  # 'ù'
+    250: 76,  # 'ú'
+    251: 72,  # 'û'
+    252: 17,  # 'ü'
+    253: 6,  # 'ı'
+    254: 19,  # 'ş'
+    255: 107,  # 'ÿ'
+}
+
+ISO_8859_9_TURKISH_MODEL = SingleByteCharSetModel(
+    charset_name="ISO-8859-9",
+    language="Turkish",
+    char_to_order_map=ISO_8859_9_TURKISH_CHAR_TO_ORDER,
+    language_model=TURKISH_LANG_MODEL,
+    typical_positive_ratio=0.97029,
+    keep_ascii_letters=True,
+    alphabet="ABCDEFGHIJKLMNOPRSTUVYZabcdefghijklmnoprstuvyzÂÇÎÖÛÜâçîöûüĞğİıŞş",
+)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/latin1prober.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/latin1prober.py
new file mode 100644
index 0000000..59a01d9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/latin1prober.py
@@ -0,0 +1,147 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#   Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from typing import List, Union
+
+from .charsetprober import CharSetProber
+from .enums import ProbingState
+
+FREQ_CAT_NUM = 4
+
+UDF = 0  # undefined
+OTH = 1  # other
+ASC = 2  # ascii capital letter
+ASS = 3  # ascii small letter
+ACV = 4  # accent capital vowel
+ACO = 5  # accent capital other
+ASV = 6  # accent small vowel
+ASO = 7  # accent small other
+CLASS_NUM = 8  # total classes
+
+# fmt: off
+Latin1_CharToClass = (
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 00 - 07
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 08 - 0F
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 10 - 17
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 18 - 1F
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 20 - 27
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 28 - 2F
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 30 - 37
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 38 - 3F
+    OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC,   # 40 - 47
+    ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC,   # 48 - 4F
+    ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC,   # 50 - 57
+    ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH,   # 58 - 5F
+    OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS,   # 60 - 67
+    ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS,   # 68 - 6F
+    ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS,   # 70 - 77
+    ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH,   # 78 - 7F
+    OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH,   # 80 - 87
+    OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF,   # 88 - 8F
+    UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 90 - 97
+    OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO,   # 98 - 9F
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # A0 - A7
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # A8 - AF
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # B0 - B7
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # B8 - BF
+    ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO,   # C0 - C7
+    ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV,   # C8 - CF
+    ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH,   # D0 - D7
+    ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO,   # D8 - DF
+    ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO,   # E0 - E7
+    ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV,   # E8 - EF
+    ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH,   # F0 - F7
+    ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO,   # F8 - FF
+)
+
+# 0 : illegal
+# 1 : very unlikely
+# 2 : normal
+# 3 : very likely
+Latin1ClassModel = (
+# UDF OTH ASC ASS ACV ACO ASV ASO
+    0,  0,  0,  0,  0,  0,  0,  0,  # UDF
+    0,  3,  3,  3,  3,  3,  3,  3,  # OTH
+    0,  3,  3,  3,  3,  3,  3,  3,  # ASC
+    0,  3,  3,  3,  1,  1,  3,  3,  # ASS
+    0,  3,  3,  3,  1,  2,  1,  2,  # ACV
+    0,  3,  3,  3,  3,  3,  3,  3,  # ACO
+    0,  3,  1,  3,  1,  1,  1,  3,  # ASV
+    0,  3,  1,  3,  1,  1,  3,  3,  # ASO
+)
+# fmt: on
+
+
+class Latin1Prober(CharSetProber):
+    def __init__(self) -> None:
+        super().__init__()
+        self._last_char_class = OTH
+        self._freq_counter: List[int] = []
+        self.reset()
+
+    def reset(self) -> None:
+        self._last_char_class = OTH
+        self._freq_counter = [0] * FREQ_CAT_NUM
+        super().reset()
+
+    @property
+    def charset_name(self) -> str:
+        return "ISO-8859-1"
+
+    @property
+    def language(self) -> str:
+        return ""
+
+    def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState:
+        byte_str = self.remove_xml_tags(byte_str)
+        for c in byte_str:
+            char_class = Latin1_CharToClass[c]
+            freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM) + char_class]
+            if freq == 0:
+                self._state = ProbingState.NOT_ME
+                break
+            self._freq_counter[freq] += 1
+            self._last_char_class = char_class
+
+        return self.state
+
+    def get_confidence(self) -> float:
+        if self.state == ProbingState.NOT_ME:
+            return 0.01
+
+        total = sum(self._freq_counter)
+        confidence = (
+            0.0
+            if total < 0.01
+            else (self._freq_counter[3] - self._freq_counter[1] * 20.0) / total
+        )
+        confidence = max(confidence, 0.0)
+        # lower the confidence of latin1 so that other more accurate
+        # detector can take priority.
+        confidence *= 0.73
+        return confidence
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/macromanprober.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/macromanprober.py
new file mode 100644
index 0000000..1425d10
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/macromanprober.py
@@ -0,0 +1,162 @@
+######################## BEGIN LICENSE BLOCK ########################
+# This code was modified from latin1prober.py by Rob Speer .
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Rob Speer - adapt to MacRoman encoding
+#   Mark Pilgrim - port to Python
+#   Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from typing import List, Union
+
+from .charsetprober import CharSetProber
+from .enums import ProbingState
+
+FREQ_CAT_NUM = 4
+
+UDF = 0  # undefined
+OTH = 1  # other
+ASC = 2  # ascii capital letter
+ASS = 3  # ascii small letter
+ACV = 4  # accent capital vowel
+ACO = 5  # accent capital other
+ASV = 6  # accent small vowel
+ASO = 7  # accent small other
+ODD = 8  # character that is unlikely to appear
+CLASS_NUM = 9  # total classes
+
+# The change from Latin1 is that we explicitly look for extended characters
+# that are infrequently-occurring symbols, and consider them to always be
+# improbable. This should let MacRoman get out of the way of more likely
+# encodings in most situations.
+
+# fmt: off
+MacRoman_CharToClass = (
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,  # 00 - 07
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,  # 08 - 0F
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,  # 10 - 17
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,  # 18 - 1F
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,  # 20 - 27
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,  # 28 - 2F
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,  # 30 - 37
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,  # 38 - 3F
+    OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC,  # 40 - 47
+    ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC,  # 48 - 4F
+    ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC,  # 50 - 57
+    ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH,  # 58 - 5F
+    OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS,  # 60 - 67
+    ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS,  # 68 - 6F
+    ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS,  # 70 - 77
+    ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH,  # 78 - 7F
+    ACV, ACV, ACO, ACV, ACO, ACV, ACV, ASV,  # 80 - 87
+    ASV, ASV, ASV, ASV, ASV, ASO, ASV, ASV,  # 88 - 8F
+    ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASV,  # 90 - 97
+    ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV,  # 98 - 9F
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, ASO,  # A0 - A7
+    OTH, OTH, ODD, ODD, OTH, OTH, ACV, ACV,  # A8 - AF
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,  # B0 - B7
+    OTH, OTH, OTH, OTH, OTH, OTH, ASV, ASV,  # B8 - BF
+    OTH, OTH, ODD, OTH, ODD, OTH, OTH, OTH,  # C0 - C7
+    OTH, OTH, OTH, ACV, ACV, ACV, ACV, ASV,  # C8 - CF
+    OTH, OTH, OTH, OTH, OTH, OTH, OTH, ODD,  # D0 - D7
+    ASV, ACV, ODD, OTH, OTH, OTH, OTH, OTH,  # D8 - DF
+    OTH, OTH, OTH, OTH, OTH, ACV, ACV, ACV,  # E0 - E7
+    ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV,  # E8 - EF
+    ODD, ACV, ACV, ACV, ACV, ASV, ODD, ODD,  # F0 - F7
+    ODD, ODD, ODD, ODD, ODD, ODD, ODD, ODD,  # F8 - FF
+)
+
+# 0 : illegal
+# 1 : very unlikely
+# 2 : normal
+# 3 : very likely
+MacRomanClassModel = (
+# UDF OTH ASC ASS ACV ACO ASV ASO ODD
+    0,  0,  0,  0,  0,  0,  0,  0,  0,  # UDF
+    0,  3,  3,  3,  3,  3,  3,  3,  1,  # OTH
+    0,  3,  3,  3,  3,  3,  3,  3,  1,  # ASC
+    0,  3,  3,  3,  1,  1,  3,  3,  1,  # ASS
+    0,  3,  3,  3,  1,  2,  1,  2,  1,  # ACV
+    0,  3,  3,  3,  3,  3,  3,  3,  1,  # ACO
+    0,  3,  1,  3,  1,  1,  1,  3,  1,  # ASV
+    0,  3,  1,  3,  1,  1,  3,  3,  1,  # ASO
+    0,  1,  1,  1,  1,  1,  1,  1,  1,  # ODD
+)
+# fmt: on
+
+
+class MacRomanProber(CharSetProber):
+    def __init__(self) -> None:
+        super().__init__()
+        self._last_char_class = OTH
+        self._freq_counter: List[int] = []
+        self.reset()
+
+    def reset(self) -> None:
+        self._last_char_class = OTH
+        self._freq_counter = [0] * FREQ_CAT_NUM
+
+        # express the prior that MacRoman is a somewhat rare encoding;
+        # this can be done by starting out in a slightly improbable state
+        # that must be overcome
+        self._freq_counter[2] = 10
+
+        super().reset()
+
+    @property
+    def charset_name(self) -> str:
+        return "MacRoman"
+
+    @property
+    def language(self) -> str:
+        return ""
+
+    def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState:
+        byte_str = self.remove_xml_tags(byte_str)
+        for c in byte_str:
+            char_class = MacRoman_CharToClass[c]
+            freq = MacRomanClassModel[(self._last_char_class * CLASS_NUM) + char_class]
+            if freq == 0:
+                self._state = ProbingState.NOT_ME
+                break
+            self._freq_counter[freq] += 1
+            self._last_char_class = char_class
+
+        return self.state
+
+    def get_confidence(self) -> float:
+        if self.state == ProbingState.NOT_ME:
+            return 0.01
+
+        total = sum(self._freq_counter)
+        confidence = (
+            0.0
+            if total < 0.01
+            else (self._freq_counter[3] - self._freq_counter[1] * 20.0) / total
+        )
+        confidence = max(confidence, 0.0)
+        # lower the confidence of MacRoman so that other more accurate
+        # detector can take priority.
+        confidence *= 0.73
+        return confidence
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/mbcharsetprober.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/mbcharsetprober.py
new file mode 100644
index 0000000..666307e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/mbcharsetprober.py
@@ -0,0 +1,95 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#   Shy Shalom - original C code
+#   Proofpoint, Inc.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from typing import Optional, Union
+
+from .chardistribution import CharDistributionAnalysis
+from .charsetprober import CharSetProber
+from .codingstatemachine import CodingStateMachine
+from .enums import LanguageFilter, MachineState, ProbingState
+
+
+class MultiByteCharSetProber(CharSetProber):
+    """
+    MultiByteCharSetProber
+    """
+
+    def __init__(self, lang_filter: LanguageFilter = LanguageFilter.NONE) -> None:
+        super().__init__(lang_filter=lang_filter)
+        self.distribution_analyzer: Optional[CharDistributionAnalysis] = None
+        self.coding_sm: Optional[CodingStateMachine] = None
+        self._last_char = bytearray(b"\0\0")
+
+    def reset(self) -> None:
+        super().reset()
+        if self.coding_sm:
+            self.coding_sm.reset()
+        if self.distribution_analyzer:
+            self.distribution_analyzer.reset()
+        self._last_char = bytearray(b"\0\0")
+
+    def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState:
+        assert self.coding_sm is not None
+        assert self.distribution_analyzer is not None
+
+        for i, byte in enumerate(byte_str):
+            coding_state = self.coding_sm.next_state(byte)
+            if coding_state == MachineState.ERROR:
+                self.logger.debug(
+                    "%s %s prober hit error at byte %s",
+                    self.charset_name,
+                    self.language,
+                    i,
+                )
+                self._state = ProbingState.NOT_ME
+                break
+            if coding_state == MachineState.ITS_ME:
+                self._state = ProbingState.FOUND_IT
+                break
+            if coding_state == MachineState.START:
+                char_len = self.coding_sm.get_current_charlen()
+                if i == 0:
+                    self._last_char[1] = byte
+                    self.distribution_analyzer.feed(self._last_char, char_len)
+                else:
+                    self.distribution_analyzer.feed(byte_str[i - 1 : i + 1], char_len)
+
+        self._last_char[0] = byte_str[-1]
+
+        if self.state == ProbingState.DETECTING:
+            if self.distribution_analyzer.got_enough_data() and (
+                self.get_confidence() > self.SHORTCUT_THRESHOLD
+            ):
+                self._state = ProbingState.FOUND_IT
+
+        return self.state
+
+    def get_confidence(self) -> float:
+        assert self.distribution_analyzer is not None
+        return self.distribution_analyzer.get_confidence()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/mbcsgroupprober.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/mbcsgroupprober.py
new file mode 100644
index 0000000..6cb9cc7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/mbcsgroupprober.py
@@ -0,0 +1,57 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#   Shy Shalom - original C code
+#   Proofpoint, Inc.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from .big5prober import Big5Prober
+from .charsetgroupprober import CharSetGroupProber
+from .cp949prober import CP949Prober
+from .enums import LanguageFilter
+from .eucjpprober import EUCJPProber
+from .euckrprober import EUCKRProber
+from .euctwprober import EUCTWProber
+from .gb2312prober import GB2312Prober
+from .johabprober import JOHABProber
+from .sjisprober import SJISProber
+from .utf8prober import UTF8Prober
+
+
+class MBCSGroupProber(CharSetGroupProber):
+    def __init__(self, lang_filter: LanguageFilter = LanguageFilter.NONE) -> None:
+        super().__init__(lang_filter=lang_filter)
+        self.probers = [
+            UTF8Prober(),
+            SJISProber(),
+            EUCJPProber(),
+            GB2312Prober(),
+            EUCKRProber(),
+            CP949Prober(),
+            Big5Prober(),
+            EUCTWProber(),
+            JOHABProber(),
+        ]
+        self.reset()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/mbcssm.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/mbcssm.py
new file mode 100644
index 0000000..7bbe97e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/mbcssm.py
@@ -0,0 +1,661 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from .codingstatemachinedict import CodingStateMachineDict
+from .enums import MachineState
+
+# BIG5
+
+# fmt: off
+BIG5_CLS = (
+    1, 1, 1, 1, 1, 1, 1, 1,  # 00 - 07    #allow 0x00 as legal value
+    1, 1, 1, 1, 1, 1, 0, 0,  # 08 - 0f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 10 - 17
+    1, 1, 1, 0, 1, 1, 1, 1,  # 18 - 1f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 20 - 27
+    1, 1, 1, 1, 1, 1, 1, 1,  # 28 - 2f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 30 - 37
+    1, 1, 1, 1, 1, 1, 1, 1,  # 38 - 3f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 40 - 47
+    2, 2, 2, 2, 2, 2, 2, 2,  # 48 - 4f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 50 - 57
+    2, 2, 2, 2, 2, 2, 2, 2,  # 58 - 5f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 60 - 67
+    2, 2, 2, 2, 2, 2, 2, 2,  # 68 - 6f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 70 - 77
+    2, 2, 2, 2, 2, 2, 2, 1,  # 78 - 7f
+    4, 4, 4, 4, 4, 4, 4, 4,  # 80 - 87
+    4, 4, 4, 4, 4, 4, 4, 4,  # 88 - 8f
+    4, 4, 4, 4, 4, 4, 4, 4,  # 90 - 97
+    4, 4, 4, 4, 4, 4, 4, 4,  # 98 - 9f
+    4, 3, 3, 3, 3, 3, 3, 3,  # a0 - a7
+    3, 3, 3, 3, 3, 3, 3, 3,  # a8 - af
+    3, 3, 3, 3, 3, 3, 3, 3,  # b0 - b7
+    3, 3, 3, 3, 3, 3, 3, 3,  # b8 - bf
+    3, 3, 3, 3, 3, 3, 3, 3,  # c0 - c7
+    3, 3, 3, 3, 3, 3, 3, 3,  # c8 - cf
+    3, 3, 3, 3, 3, 3, 3, 3,  # d0 - d7
+    3, 3, 3, 3, 3, 3, 3, 3,  # d8 - df
+    3, 3, 3, 3, 3, 3, 3, 3,  # e0 - e7
+    3, 3, 3, 3, 3, 3, 3, 3,  # e8 - ef
+    3, 3, 3, 3, 3, 3, 3, 3,  # f0 - f7
+    3, 3, 3, 3, 3, 3, 3, 0  # f8 - ff
+)
+
+BIG5_ST = (
+    MachineState.ERROR,MachineState.START,MachineState.START,     3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07
+    MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f
+    MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17
+)
+# fmt: on
+
+BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0)
+
+BIG5_SM_MODEL: CodingStateMachineDict = {
+    "class_table": BIG5_CLS,
+    "class_factor": 5,
+    "state_table": BIG5_ST,
+    "char_len_table": BIG5_CHAR_LEN_TABLE,
+    "name": "Big5",
+}
+
+# CP949
+# fmt: off
+CP949_CLS  = (
+    1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,  # 00 - 0f
+    1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1,  # 10 - 1f
+    1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,  # 20 - 2f
+    1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,  # 30 - 3f
+    1, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,  # 40 - 4f
+    4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 1, 1, 1, 1, 1,  # 50 - 5f
+    1, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,  # 60 - 6f
+    5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 1, 1, 1, 1, 1,  # 70 - 7f
+    0, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,  # 80 - 8f
+    6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,  # 90 - 9f
+    6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 8, 8, 8,  # a0 - af
+    7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,  # b0 - bf
+    7, 7, 7, 7, 7, 7, 9, 2, 2, 3, 2, 2, 2, 2, 2, 2,  # c0 - cf
+    2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,  # d0 - df
+    2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,  # e0 - ef
+    2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0,  # f0 - ff
+)
+
+CP949_ST = (
+#cls=    0      1      2      3      4      5      6      7      8      9  # previous state =
+    MachineState.ERROR,MachineState.START,     3,MachineState.ERROR,MachineState.START,MachineState.START,     4,     5,MachineState.ERROR,     6, # MachineState.START
+    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR
+    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME
+    MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3
+    MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4
+    MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5
+    MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6
+)
+# fmt: on
+
+CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2)
+
+CP949_SM_MODEL: CodingStateMachineDict = {
+    "class_table": CP949_CLS,
+    "class_factor": 10,
+    "state_table": CP949_ST,
+    "char_len_table": CP949_CHAR_LEN_TABLE,
+    "name": "CP949",
+}
+
+# EUC-JP
+# fmt: off
+EUCJP_CLS = (
+    4, 4, 4, 4, 4, 4, 4, 4,  # 00 - 07
+    4, 4, 4, 4, 4, 4, 5, 5,  # 08 - 0f
+    4, 4, 4, 4, 4, 4, 4, 4,  # 10 - 17
+    4, 4, 4, 5, 4, 4, 4, 4,  # 18 - 1f
+    4, 4, 4, 4, 4, 4, 4, 4,  # 20 - 27
+    4, 4, 4, 4, 4, 4, 4, 4,  # 28 - 2f
+    4, 4, 4, 4, 4, 4, 4, 4,  # 30 - 37
+    4, 4, 4, 4, 4, 4, 4, 4,  # 38 - 3f
+    4, 4, 4, 4, 4, 4, 4, 4,  # 40 - 47
+    4, 4, 4, 4, 4, 4, 4, 4,  # 48 - 4f
+    4, 4, 4, 4, 4, 4, 4, 4,  # 50 - 57
+    4, 4, 4, 4, 4, 4, 4, 4,  # 58 - 5f
+    4, 4, 4, 4, 4, 4, 4, 4,  # 60 - 67
+    4, 4, 4, 4, 4, 4, 4, 4,  # 68 - 6f
+    4, 4, 4, 4, 4, 4, 4, 4,  # 70 - 77
+    4, 4, 4, 4, 4, 4, 4, 4,  # 78 - 7f
+    5, 5, 5, 5, 5, 5, 5, 5,  # 80 - 87
+    5, 5, 5, 5, 5, 5, 1, 3,  # 88 - 8f
+    5, 5, 5, 5, 5, 5, 5, 5,  # 90 - 97
+    5, 5, 5, 5, 5, 5, 5, 5,  # 98 - 9f
+    5, 2, 2, 2, 2, 2, 2, 2,  # a0 - a7
+    2, 2, 2, 2, 2, 2, 2, 2,  # a8 - af
+    2, 2, 2, 2, 2, 2, 2, 2,  # b0 - b7
+    2, 2, 2, 2, 2, 2, 2, 2,  # b8 - bf
+    2, 2, 2, 2, 2, 2, 2, 2,  # c0 - c7
+    2, 2, 2, 2, 2, 2, 2, 2,  # c8 - cf
+    2, 2, 2, 2, 2, 2, 2, 2,  # d0 - d7
+    2, 2, 2, 2, 2, 2, 2, 2,  # d8 - df
+    0, 0, 0, 0, 0, 0, 0, 0,  # e0 - e7
+    0, 0, 0, 0, 0, 0, 0, 0,  # e8 - ef
+    0, 0, 0, 0, 0, 0, 0, 0,  # f0 - f7
+    0, 0, 0, 0, 0, 0, 0, 5  # f8 - ff
+)
+
+EUCJP_ST = (
+          3,     4,     3,     5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07
+     MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
+     MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17
+     MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     3,MachineState.ERROR,#18-1f
+          3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27
+)
+# fmt: on
+
+EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0)
+
+EUCJP_SM_MODEL: CodingStateMachineDict = {
+    "class_table": EUCJP_CLS,
+    "class_factor": 6,
+    "state_table": EUCJP_ST,
+    "char_len_table": EUCJP_CHAR_LEN_TABLE,
+    "name": "EUC-JP",
+}
+
+# EUC-KR
+# fmt: off
+EUCKR_CLS  = (
+    1, 1, 1, 1, 1, 1, 1, 1,  # 00 - 07
+    1, 1, 1, 1, 1, 1, 0, 0,  # 08 - 0f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 10 - 17
+    1, 1, 1, 0, 1, 1, 1, 1,  # 18 - 1f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 20 - 27
+    1, 1, 1, 1, 1, 1, 1, 1,  # 28 - 2f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 30 - 37
+    1, 1, 1, 1, 1, 1, 1, 1,  # 38 - 3f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 40 - 47
+    1, 1, 1, 1, 1, 1, 1, 1,  # 48 - 4f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 50 - 57
+    1, 1, 1, 1, 1, 1, 1, 1,  # 58 - 5f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 60 - 67
+    1, 1, 1, 1, 1, 1, 1, 1,  # 68 - 6f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 70 - 77
+    1, 1, 1, 1, 1, 1, 1, 1,  # 78 - 7f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 80 - 87
+    0, 0, 0, 0, 0, 0, 0, 0,  # 88 - 8f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 90 - 97
+    0, 0, 0, 0, 0, 0, 0, 0,  # 98 - 9f
+    0, 2, 2, 2, 2, 2, 2, 2,  # a0 - a7
+    2, 2, 2, 2, 2, 3, 3, 3,  # a8 - af
+    2, 2, 2, 2, 2, 2, 2, 2,  # b0 - b7
+    2, 2, 2, 2, 2, 2, 2, 2,  # b8 - bf
+    2, 2, 2, 2, 2, 2, 2, 2,  # c0 - c7
+    2, 3, 2, 2, 2, 2, 2, 2,  # c8 - cf
+    2, 2, 2, 2, 2, 2, 2, 2,  # d0 - d7
+    2, 2, 2, 2, 2, 2, 2, 2,  # d8 - df
+    2, 2, 2, 2, 2, 2, 2, 2,  # e0 - e7
+    2, 2, 2, 2, 2, 2, 2, 2,  # e8 - ef
+    2, 2, 2, 2, 2, 2, 2, 2,  # f0 - f7
+    2, 2, 2, 2, 2, 2, 2, 0   # f8 - ff
+)
+
+EUCKR_ST = (
+    MachineState.ERROR,MachineState.START,     3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07
+    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f
+)
+# fmt: on
+
+EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0)
+
+EUCKR_SM_MODEL: CodingStateMachineDict = {
+    "class_table": EUCKR_CLS,
+    "class_factor": 4,
+    "state_table": EUCKR_ST,
+    "char_len_table": EUCKR_CHAR_LEN_TABLE,
+    "name": "EUC-KR",
+}
+
+# JOHAB
+# fmt: off
+JOHAB_CLS = (
+    4,4,4,4,4,4,4,4,  # 00 - 07
+    4,4,4,4,4,4,0,0,  # 08 - 0f
+    4,4,4,4,4,4,4,4,  # 10 - 17
+    4,4,4,0,4,4,4,4,  # 18 - 1f
+    4,4,4,4,4,4,4,4,  # 20 - 27
+    4,4,4,4,4,4,4,4,  # 28 - 2f
+    4,3,3,3,3,3,3,3,  # 30 - 37
+    3,3,3,3,3,3,3,3,  # 38 - 3f
+    3,1,1,1,1,1,1,1,  # 40 - 47
+    1,1,1,1,1,1,1,1,  # 48 - 4f
+    1,1,1,1,1,1,1,1,  # 50 - 57
+    1,1,1,1,1,1,1,1,  # 58 - 5f
+    1,1,1,1,1,1,1,1,  # 60 - 67
+    1,1,1,1,1,1,1,1,  # 68 - 6f
+    1,1,1,1,1,1,1,1,  # 70 - 77
+    1,1,1,1,1,1,1,2,  # 78 - 7f
+    6,6,6,6,8,8,8,8,  # 80 - 87
+    8,8,8,8,8,8,8,8,  # 88 - 8f
+    8,7,7,7,7,7,7,7,  # 90 - 97
+    7,7,7,7,7,7,7,7,  # 98 - 9f
+    7,7,7,7,7,7,7,7,  # a0 - a7
+    7,7,7,7,7,7,7,7,  # a8 - af
+    7,7,7,7,7,7,7,7,  # b0 - b7
+    7,7,7,7,7,7,7,7,  # b8 - bf
+    7,7,7,7,7,7,7,7,  # c0 - c7
+    7,7,7,7,7,7,7,7,  # c8 - cf
+    7,7,7,7,5,5,5,5,  # d0 - d7
+    5,9,9,9,9,9,9,5,  # d8 - df
+    9,9,9,9,9,9,9,9,  # e0 - e7
+    9,9,9,9,9,9,9,9,  # e8 - ef
+    9,9,9,9,9,9,9,9,  # f0 - f7
+    9,9,5,5,5,5,5,0   # f8 - ff
+)
+
+JOHAB_ST = (
+# cls = 0                   1                   2                   3                   4                   5                   6                   7                   8                   9
+    MachineState.ERROR ,MachineState.START ,MachineState.START ,MachineState.START ,MachineState.START ,MachineState.ERROR ,MachineState.ERROR ,3                  ,3                  ,4                  ,  # MachineState.START
+    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,  # MachineState.ITS_ME
+    MachineState.ERROR ,MachineState.ERROR ,MachineState.ERROR ,MachineState.ERROR ,MachineState.ERROR ,MachineState.ERROR ,MachineState.ERROR ,MachineState.ERROR ,MachineState.ERROR ,MachineState.ERROR ,  # MachineState.ERROR
+    MachineState.ERROR ,MachineState.START ,MachineState.START ,MachineState.ERROR ,MachineState.ERROR ,MachineState.START ,MachineState.START ,MachineState.START ,MachineState.START ,MachineState.START ,  # 3
+    MachineState.ERROR ,MachineState.START ,MachineState.ERROR ,MachineState.START ,MachineState.ERROR ,MachineState.START ,MachineState.ERROR ,MachineState.START ,MachineState.ERROR ,MachineState.START ,  # 4
+)
+# fmt: on
+
+JOHAB_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 0, 0, 2, 2, 2)
+
+JOHAB_SM_MODEL: CodingStateMachineDict = {
+    "class_table": JOHAB_CLS,
+    "class_factor": 10,
+    "state_table": JOHAB_ST,
+    "char_len_table": JOHAB_CHAR_LEN_TABLE,
+    "name": "Johab",
+}
+
+# EUC-TW
+# fmt: off
+EUCTW_CLS = (
+    2, 2, 2, 2, 2, 2, 2, 2,  # 00 - 07
+    2, 2, 2, 2, 2, 2, 0, 0,  # 08 - 0f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 10 - 17
+    2, 2, 2, 0, 2, 2, 2, 2,  # 18 - 1f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 20 - 27
+    2, 2, 2, 2, 2, 2, 2, 2,  # 28 - 2f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 30 - 37
+    2, 2, 2, 2, 2, 2, 2, 2,  # 38 - 3f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 40 - 47
+    2, 2, 2, 2, 2, 2, 2, 2,  # 48 - 4f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 50 - 57
+    2, 2, 2, 2, 2, 2, 2, 2,  # 58 - 5f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 60 - 67
+    2, 2, 2, 2, 2, 2, 2, 2,  # 68 - 6f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 70 - 77
+    2, 2, 2, 2, 2, 2, 2, 2,  # 78 - 7f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 80 - 87
+    0, 0, 0, 0, 0, 0, 6, 0,  # 88 - 8f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 90 - 97
+    0, 0, 0, 0, 0, 0, 0, 0,  # 98 - 9f
+    0, 3, 4, 4, 4, 4, 4, 4,  # a0 - a7
+    5, 5, 1, 1, 1, 1, 1, 1,  # a8 - af
+    1, 1, 1, 1, 1, 1, 1, 1,  # b0 - b7
+    1, 1, 1, 1, 1, 1, 1, 1,  # b8 - bf
+    1, 1, 3, 1, 3, 3, 3, 3,  # c0 - c7
+    3, 3, 3, 3, 3, 3, 3, 3,  # c8 - cf
+    3, 3, 3, 3, 3, 3, 3, 3,  # d0 - d7
+    3, 3, 3, 3, 3, 3, 3, 3,  # d8 - df
+    3, 3, 3, 3, 3, 3, 3, 3,  # e0 - e7
+    3, 3, 3, 3, 3, 3, 3, 3,  # e8 - ef
+    3, 3, 3, 3, 3, 3, 3, 3,  # f0 - f7
+    3, 3, 3, 3, 3, 3, 3, 0   # f8 - ff
+)
+
+EUCTW_ST = (
+    MachineState.ERROR,MachineState.ERROR,MachineState.START,     3,     3,     3,     4,MachineState.ERROR,#00-07
+    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
+    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17
+    MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f
+         5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27
+    MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f
+)
+# fmt: on
+
+EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3)
+
+EUCTW_SM_MODEL: CodingStateMachineDict = {
+    "class_table": EUCTW_CLS,
+    "class_factor": 7,
+    "state_table": EUCTW_ST,
+    "char_len_table": EUCTW_CHAR_LEN_TABLE,
+    "name": "x-euc-tw",
+}
+
+# GB2312
+# fmt: off
+GB2312_CLS = (
+    1, 1, 1, 1, 1, 1, 1, 1,  # 00 - 07
+    1, 1, 1, 1, 1, 1, 0, 0,  # 08 - 0f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 10 - 17
+    1, 1, 1, 0, 1, 1, 1, 1,  # 18 - 1f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 20 - 27
+    1, 1, 1, 1, 1, 1, 1, 1,  # 28 - 2f
+    3, 3, 3, 3, 3, 3, 3, 3,  # 30 - 37
+    3, 3, 1, 1, 1, 1, 1, 1,  # 38 - 3f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 40 - 47
+    2, 2, 2, 2, 2, 2, 2, 2,  # 48 - 4f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 50 - 57
+    2, 2, 2, 2, 2, 2, 2, 2,  # 58 - 5f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 60 - 67
+    2, 2, 2, 2, 2, 2, 2, 2,  # 68 - 6f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 70 - 77
+    2, 2, 2, 2, 2, 2, 2, 4,  # 78 - 7f
+    5, 6, 6, 6, 6, 6, 6, 6,  # 80 - 87
+    6, 6, 6, 6, 6, 6, 6, 6,  # 88 - 8f
+    6, 6, 6, 6, 6, 6, 6, 6,  # 90 - 97
+    6, 6, 6, 6, 6, 6, 6, 6,  # 98 - 9f
+    6, 6, 6, 6, 6, 6, 6, 6,  # a0 - a7
+    6, 6, 6, 6, 6, 6, 6, 6,  # a8 - af
+    6, 6, 6, 6, 6, 6, 6, 6,  # b0 - b7
+    6, 6, 6, 6, 6, 6, 6, 6,  # b8 - bf
+    6, 6, 6, 6, 6, 6, 6, 6,  # c0 - c7
+    6, 6, 6, 6, 6, 6, 6, 6,  # c8 - cf
+    6, 6, 6, 6, 6, 6, 6, 6,  # d0 - d7
+    6, 6, 6, 6, 6, 6, 6, 6,  # d8 - df
+    6, 6, 6, 6, 6, 6, 6, 6,  # e0 - e7
+    6, 6, 6, 6, 6, 6, 6, 6,  # e8 - ef
+    6, 6, 6, 6, 6, 6, 6, 6,  # f0 - f7
+    6, 6, 6, 6, 6, 6, 6, 0   # f8 - ff
+)
+
+GB2312_ST = (
+    MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,     3,MachineState.ERROR,#00-07
+    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
+    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17
+         4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f
+    MachineState.ERROR,MachineState.ERROR,     5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27
+    MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f
+)
+# fmt: on
+
+# To be accurate, the length of class 6 can be either 2 or 4.
+# But it is not necessary to discriminate between the two since
+# it is used for frequency analysis only, and we are validating
+# each code range there as well. So it is safe to set it to be
+# 2 here.
+GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2)
+
+GB2312_SM_MODEL: CodingStateMachineDict = {
+    "class_table": GB2312_CLS,
+    "class_factor": 7,
+    "state_table": GB2312_ST,
+    "char_len_table": GB2312_CHAR_LEN_TABLE,
+    "name": "GB2312",
+}
+
+# Shift_JIS
+# fmt: off
+SJIS_CLS = (
+    1, 1, 1, 1, 1, 1, 1, 1,  # 00 - 07
+    1, 1, 1, 1, 1, 1, 0, 0,  # 08 - 0f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 10 - 17
+    1, 1, 1, 0, 1, 1, 1, 1,  # 18 - 1f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 20 - 27
+    1, 1, 1, 1, 1, 1, 1, 1,  # 28 - 2f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 30 - 37
+    1, 1, 1, 1, 1, 1, 1, 1,  # 38 - 3f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 40 - 47
+    2, 2, 2, 2, 2, 2, 2, 2,  # 48 - 4f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 50 - 57
+    2, 2, 2, 2, 2, 2, 2, 2,  # 58 - 5f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 60 - 67
+    2, 2, 2, 2, 2, 2, 2, 2,  # 68 - 6f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 70 - 77
+    2, 2, 2, 2, 2, 2, 2, 1,  # 78 - 7f
+    3, 3, 3, 3, 3, 2, 2, 3,  # 80 - 87
+    3, 3, 3, 3, 3, 3, 3, 3,  # 88 - 8f
+    3, 3, 3, 3, 3, 3, 3, 3,  # 90 - 97
+    3, 3, 3, 3, 3, 3, 3, 3,  # 98 - 9f
+    #0xa0 is illegal in sjis encoding, but some pages does
+    #contain such byte. We need to be more error forgiven.
+    2, 2, 2, 2, 2, 2, 2, 2,  # a0 - a7
+    2, 2, 2, 2, 2, 2, 2, 2,  # a8 - af
+    2, 2, 2, 2, 2, 2, 2, 2,  # b0 - b7
+    2, 2, 2, 2, 2, 2, 2, 2,  # b8 - bf
+    2, 2, 2, 2, 2, 2, 2, 2,  # c0 - c7
+    2, 2, 2, 2, 2, 2, 2, 2,  # c8 - cf
+    2, 2, 2, 2, 2, 2, 2, 2,  # d0 - d7
+    2, 2, 2, 2, 2, 2, 2, 2,  # d8 - df
+    3, 3, 3, 3, 3, 3, 3, 3,  # e0 - e7
+    3, 3, 3, 3, 3, 4, 4, 4,  # e8 - ef
+    3, 3, 3, 3, 3, 3, 3, 3,  # f0 - f7
+    3, 3, 3, 3, 3, 0, 0, 0,  # f8 - ff
+)
+
+SJIS_ST = (
+    MachineState.ERROR,MachineState.START,MachineState.START,     3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07
+    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
+    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17
+)
+# fmt: on
+
+SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0)
+
+SJIS_SM_MODEL: CodingStateMachineDict = {
+    "class_table": SJIS_CLS,
+    "class_factor": 6,
+    "state_table": SJIS_ST,
+    "char_len_table": SJIS_CHAR_LEN_TABLE,
+    "name": "Shift_JIS",
+}
+
+# UCS2-BE
+# fmt: off
+UCS2BE_CLS = (
+    0, 0, 0, 0, 0, 0, 0, 0,  # 00 - 07
+    0, 0, 1, 0, 0, 2, 0, 0,  # 08 - 0f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 10 - 17
+    0, 0, 0, 3, 0, 0, 0, 0,  # 18 - 1f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 20 - 27
+    0, 3, 3, 3, 3, 3, 0, 0,  # 28 - 2f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 30 - 37
+    0, 0, 0, 0, 0, 0, 0, 0,  # 38 - 3f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 40 - 47
+    0, 0, 0, 0, 0, 0, 0, 0,  # 48 - 4f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 50 - 57
+    0, 0, 0, 0, 0, 0, 0, 0,  # 58 - 5f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 60 - 67
+    0, 0, 0, 0, 0, 0, 0, 0,  # 68 - 6f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 70 - 77
+    0, 0, 0, 0, 0, 0, 0, 0,  # 78 - 7f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 80 - 87
+    0, 0, 0, 0, 0, 0, 0, 0,  # 88 - 8f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 90 - 97
+    0, 0, 0, 0, 0, 0, 0, 0,  # 98 - 9f
+    0, 0, 0, 0, 0, 0, 0, 0,  # a0 - a7
+    0, 0, 0, 0, 0, 0, 0, 0,  # a8 - af
+    0, 0, 0, 0, 0, 0, 0, 0,  # b0 - b7
+    0, 0, 0, 0, 0, 0, 0, 0,  # b8 - bf
+    0, 0, 0, 0, 0, 0, 0, 0,  # c0 - c7
+    0, 0, 0, 0, 0, 0, 0, 0,  # c8 - cf
+    0, 0, 0, 0, 0, 0, 0, 0,  # d0 - d7
+    0, 0, 0, 0, 0, 0, 0, 0,  # d8 - df
+    0, 0, 0, 0, 0, 0, 0, 0,  # e0 - e7
+    0, 0, 0, 0, 0, 0, 0, 0,  # e8 - ef
+    0, 0, 0, 0, 0, 0, 0, 0,  # f0 - f7
+    0, 0, 0, 0, 0, 0, 4, 5   # f8 - ff
+)
+
+UCS2BE_ST  = (
+          5,     7,     7,MachineState.ERROR,     4,     3,MachineState.ERROR,MachineState.ERROR,#00-07
+     MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
+     MachineState.ITS_ME,MachineState.ITS_ME,     6,     6,     6,     6,MachineState.ERROR,MachineState.ERROR,#10-17
+          6,     6,     6,     6,     6,MachineState.ITS_ME,     6,     6,#18-1f
+          6,     6,     6,     6,     5,     7,     7,MachineState.ERROR,#20-27
+          5,     8,     6,     6,MachineState.ERROR,     6,     6,     6,#28-2f
+          6,     6,     6,     6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37
+)
+# fmt: on
+
+UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2)
+
+UCS2BE_SM_MODEL: CodingStateMachineDict = {
+    "class_table": UCS2BE_CLS,
+    "class_factor": 6,
+    "state_table": UCS2BE_ST,
+    "char_len_table": UCS2BE_CHAR_LEN_TABLE,
+    "name": "UTF-16BE",
+}
+
+# UCS2-LE
+# fmt: off
+UCS2LE_CLS = (
+    0, 0, 0, 0, 0, 0, 0, 0,  # 00 - 07
+    0, 0, 1, 0, 0, 2, 0, 0,  # 08 - 0f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 10 - 17
+    0, 0, 0, 3, 0, 0, 0, 0,  # 18 - 1f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 20 - 27
+    0, 3, 3, 3, 3, 3, 0, 0,  # 28 - 2f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 30 - 37
+    0, 0, 0, 0, 0, 0, 0, 0,  # 38 - 3f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 40 - 47
+    0, 0, 0, 0, 0, 0, 0, 0,  # 48 - 4f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 50 - 57
+    0, 0, 0, 0, 0, 0, 0, 0,  # 58 - 5f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 60 - 67
+    0, 0, 0, 0, 0, 0, 0, 0,  # 68 - 6f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 70 - 77
+    0, 0, 0, 0, 0, 0, 0, 0,  # 78 - 7f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 80 - 87
+    0, 0, 0, 0, 0, 0, 0, 0,  # 88 - 8f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 90 - 97
+    0, 0, 0, 0, 0, 0, 0, 0,  # 98 - 9f
+    0, 0, 0, 0, 0, 0, 0, 0,  # a0 - a7
+    0, 0, 0, 0, 0, 0, 0, 0,  # a8 - af
+    0, 0, 0, 0, 0, 0, 0, 0,  # b0 - b7
+    0, 0, 0, 0, 0, 0, 0, 0,  # b8 - bf
+    0, 0, 0, 0, 0, 0, 0, 0,  # c0 - c7
+    0, 0, 0, 0, 0, 0, 0, 0,  # c8 - cf
+    0, 0, 0, 0, 0, 0, 0, 0,  # d0 - d7
+    0, 0, 0, 0, 0, 0, 0, 0,  # d8 - df
+    0, 0, 0, 0, 0, 0, 0, 0,  # e0 - e7
+    0, 0, 0, 0, 0, 0, 0, 0,  # e8 - ef
+    0, 0, 0, 0, 0, 0, 0, 0,  # f0 - f7
+    0, 0, 0, 0, 0, 0, 4, 5   # f8 - ff
+)
+
+UCS2LE_ST = (
+          6,     6,     7,     6,     4,     3,MachineState.ERROR,MachineState.ERROR,#00-07
+     MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
+     MachineState.ITS_ME,MachineState.ITS_ME,     5,     5,     5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17
+          5,     5,     5,MachineState.ERROR,     5,MachineState.ERROR,     6,     6,#18-1f
+          7,     6,     8,     8,     5,     5,     5,MachineState.ERROR,#20-27
+          5,     5,     5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     5,     5,#28-2f
+          5,     5,     5,MachineState.ERROR,     5,MachineState.ERROR,MachineState.START,MachineState.START #30-37
+)
+# fmt: on
+
+UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2)
+
+UCS2LE_SM_MODEL: CodingStateMachineDict = {
+    "class_table": UCS2LE_CLS,
+    "class_factor": 6,
+    "state_table": UCS2LE_ST,
+    "char_len_table": UCS2LE_CHAR_LEN_TABLE,
+    "name": "UTF-16LE",
+}
+
+# UTF-8
+# fmt: off
+UTF8_CLS = (
+    1, 1, 1, 1, 1, 1, 1, 1,  # 00 - 07  #allow 0x00 as a legal value
+    1, 1, 1, 1, 1, 1, 0, 0,  # 08 - 0f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 10 - 17
+    1, 1, 1, 0, 1, 1, 1, 1,  # 18 - 1f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 20 - 27
+    1, 1, 1, 1, 1, 1, 1, 1,  # 28 - 2f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 30 - 37
+    1, 1, 1, 1, 1, 1, 1, 1,  # 38 - 3f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 40 - 47
+    1, 1, 1, 1, 1, 1, 1, 1,  # 48 - 4f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 50 - 57
+    1, 1, 1, 1, 1, 1, 1, 1,  # 58 - 5f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 60 - 67
+    1, 1, 1, 1, 1, 1, 1, 1,  # 68 - 6f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 70 - 77
+    1, 1, 1, 1, 1, 1, 1, 1,  # 78 - 7f
+    2, 2, 2, 2, 3, 3, 3, 3,  # 80 - 87
+    4, 4, 4, 4, 4, 4, 4, 4,  # 88 - 8f
+    4, 4, 4, 4, 4, 4, 4, 4,  # 90 - 97
+    4, 4, 4, 4, 4, 4, 4, 4,  # 98 - 9f
+    5, 5, 5, 5, 5, 5, 5, 5,  # a0 - a7
+    5, 5, 5, 5, 5, 5, 5, 5,  # a8 - af
+    5, 5, 5, 5, 5, 5, 5, 5,  # b0 - b7
+    5, 5, 5, 5, 5, 5, 5, 5,  # b8 - bf
+    0, 0, 6, 6, 6, 6, 6, 6,  # c0 - c7
+    6, 6, 6, 6, 6, 6, 6, 6,  # c8 - cf
+    6, 6, 6, 6, 6, 6, 6, 6,  # d0 - d7
+    6, 6, 6, 6, 6, 6, 6, 6,  # d8 - df
+    7, 8, 8, 8, 8, 8, 8, 8,  # e0 - e7
+    8, 8, 8, 8, 8, 9, 8, 8,  # e8 - ef
+    10, 11, 11, 11, 11, 11, 11, 11,  # f0 - f7
+    12, 13, 13, 13, 14, 15, 0, 0    # f8 - ff
+)
+
+UTF8_ST = (
+    MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     12,   10,#00-07
+         9,     11,     8,     7,     6,     5,     4,    3,#08-0f
+    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17
+    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f
+    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27
+    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f
+    MachineState.ERROR,MachineState.ERROR,     5,     5,     5,     5,MachineState.ERROR,MachineState.ERROR,#30-37
+    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f
+    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     5,     5,     5,MachineState.ERROR,MachineState.ERROR,#40-47
+    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f
+    MachineState.ERROR,MachineState.ERROR,     7,     7,     7,     7,MachineState.ERROR,MachineState.ERROR,#50-57
+    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f
+    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     7,     7,MachineState.ERROR,MachineState.ERROR,#60-67
+    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f
+    MachineState.ERROR,MachineState.ERROR,     9,     9,     9,     9,MachineState.ERROR,MachineState.ERROR,#70-77
+    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f
+    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     9,MachineState.ERROR,MachineState.ERROR,#80-87
+    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f
+    MachineState.ERROR,MachineState.ERROR,    12,    12,    12,    12,MachineState.ERROR,MachineState.ERROR,#90-97
+    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f
+    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,    12,MachineState.ERROR,MachineState.ERROR,#a0-a7
+    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af
+    MachineState.ERROR,MachineState.ERROR,    12,    12,    12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7
+    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf
+    MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7
+    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf
+)
+# fmt: on
+
+UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6)
+
+UTF8_SM_MODEL: CodingStateMachineDict = {
+    "class_table": UTF8_CLS,
+    "class_factor": 16,
+    "state_table": UTF8_ST,
+    "char_len_table": UTF8_CHAR_LEN_TABLE,
+    "name": "UTF-8",
+}
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/metadata/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/metadata/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/metadata/languages.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/metadata/languages.py
new file mode 100644
index 0000000..eb40c5f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/metadata/languages.py
@@ -0,0 +1,352 @@
+"""
+Metadata about languages used by our model training code for our
+SingleByteCharSetProbers.  Could be used for other things in the future.
+
+This code is based on the language metadata from the uchardet project.
+"""
+
+from string import ascii_letters
+from typing import List, Optional
+
+# TODO: Add Ukrainian (KOI8-U)
+
+
+class Language:
+    """Metadata about a language useful for training models
+
+    :ivar name: The human name for the language, in English.
+    :type name: str
+    :ivar iso_code: 2-letter ISO 639-1 if possible, 3-letter ISO code otherwise,
+                    or use another catalog as a last resort.
+    :type iso_code: str
+    :ivar use_ascii: Whether or not ASCII letters should be included in trained
+                     models.
+    :type use_ascii: bool
+    :ivar charsets: The charsets we want to support and create data for.
+    :type charsets: list of str
+    :ivar alphabet: The characters in the language's alphabet. If `use_ascii` is
+                    `True`, you only need to add those not in the ASCII set.
+    :type alphabet: str
+    :ivar wiki_start_pages: The Wikipedia pages to start from if we're crawling
+                            Wikipedia for training data.
+    :type wiki_start_pages: list of str
+    """
+
+    def __init__(
+        self,
+        name: Optional[str] = None,
+        iso_code: Optional[str] = None,
+        use_ascii: bool = True,
+        charsets: Optional[List[str]] = None,
+        alphabet: Optional[str] = None,
+        wiki_start_pages: Optional[List[str]] = None,
+    ) -> None:
+        super().__init__()
+        self.name = name
+        self.iso_code = iso_code
+        self.use_ascii = use_ascii
+        self.charsets = charsets
+        if self.use_ascii:
+            if alphabet:
+                alphabet += ascii_letters
+            else:
+                alphabet = ascii_letters
+        elif not alphabet:
+            raise ValueError("Must supply alphabet if use_ascii is False")
+        self.alphabet = "".join(sorted(set(alphabet))) if alphabet else None
+        self.wiki_start_pages = wiki_start_pages
+
+    def __repr__(self) -> str:
+        param_str = ", ".join(
+            f"{k}={v!r}" for k, v in self.__dict__.items() if not k.startswith("_")
+        )
+        return f"{self.__class__.__name__}({param_str})"
+
+
+LANGUAGES = {
+    "Arabic": Language(
+        name="Arabic",
+        iso_code="ar",
+        use_ascii=False,
+        # We only support encodings that use isolated
+        # forms, because the current recommendation is
+        # that the rendering system handles presentation
+        # forms. This means we purposefully skip IBM864.
+        charsets=["ISO-8859-6", "WINDOWS-1256", "CP720", "CP864"],
+        alphabet="ءآأؤإئابةتثجحخدذرزسشصضطظعغػؼؽؾؿـفقكلمنهوىيًٌٍَُِّ",
+        wiki_start_pages=["الصفحة_الرئيسية"],
+    ),
+    "Belarusian": Language(
+        name="Belarusian",
+        iso_code="be",
+        use_ascii=False,
+        charsets=["ISO-8859-5", "WINDOWS-1251", "IBM866", "MacCyrillic"],
+        alphabet="АБВГДЕЁЖЗІЙКЛМНОПРСТУЎФХЦЧШЫЬЭЮЯабвгдеёжзійклмнопрстуўфхцчшыьэюяʼ",
+        wiki_start_pages=["Галоўная_старонка"],
+    ),
+    "Bulgarian": Language(
+        name="Bulgarian",
+        iso_code="bg",
+        use_ascii=False,
+        charsets=["ISO-8859-5", "WINDOWS-1251", "IBM855"],
+        alphabet="АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЬЮЯабвгдежзийклмнопрстуфхцчшщъьюя",
+        wiki_start_pages=["Начална_страница"],
+    ),
+    "Czech": Language(
+        name="Czech",
+        iso_code="cz",
+        use_ascii=True,
+        charsets=["ISO-8859-2", "WINDOWS-1250"],
+        alphabet="áčďéěíňóřšťúůýžÁČĎÉĚÍŇÓŘŠŤÚŮÝŽ",
+        wiki_start_pages=["Hlavní_strana"],
+    ),
+    "Danish": Language(
+        name="Danish",
+        iso_code="da",
+        use_ascii=True,
+        charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252", "MacRoman"],
+        alphabet="æøåÆØÅ",
+        wiki_start_pages=["Forside"],
+    ),
+    "German": Language(
+        name="German",
+        iso_code="de",
+        use_ascii=True,
+        charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252", "MacRoman"],
+        alphabet="äöüßẞÄÖÜ",
+        wiki_start_pages=["Wikipedia:Hauptseite"],
+    ),
+    "Greek": Language(
+        name="Greek",
+        iso_code="el",
+        use_ascii=False,
+        charsets=["ISO-8859-7", "WINDOWS-1253"],
+        alphabet="αβγδεζηθικλμνξοπρσςτυφχψωάέήίόύώΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΣΤΥΦΧΨΩΆΈΉΊΌΎΏ",
+        wiki_start_pages=["Πύλη:Κύρια"],
+    ),
+    "English": Language(
+        name="English",
+        iso_code="en",
+        use_ascii=True,
+        charsets=["ISO-8859-1", "WINDOWS-1252", "MacRoman"],
+        wiki_start_pages=["Main_Page"],
+    ),
+    "Esperanto": Language(
+        name="Esperanto",
+        iso_code="eo",
+        # Q, W, X, and Y not used at all
+        use_ascii=False,
+        charsets=["ISO-8859-3"],
+        alphabet="abcĉdefgĝhĥijĵklmnoprsŝtuŭvzABCĈDEFGĜHĤIJĴKLMNOPRSŜTUŬVZ",
+        wiki_start_pages=["Vikipedio:Ĉefpaĝo"],
+    ),
+    "Spanish": Language(
+        name="Spanish",
+        iso_code="es",
+        use_ascii=True,
+        charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252", "MacRoman"],
+        alphabet="ñáéíóúüÑÁÉÍÓÚÜ",
+        wiki_start_pages=["Wikipedia:Portada"],
+    ),
+    "Estonian": Language(
+        name="Estonian",
+        iso_code="et",
+        use_ascii=False,
+        charsets=["ISO-8859-4", "ISO-8859-13", "WINDOWS-1257"],
+        # C, F, Š, Q, W, X, Y, Z, Ž are only for
+        # loanwords
+        alphabet="ABDEGHIJKLMNOPRSTUVÕÄÖÜabdeghijklmnoprstuvõäöü",
+        wiki_start_pages=["Esileht"],
+    ),
+    "Finnish": Language(
+        name="Finnish",
+        iso_code="fi",
+        use_ascii=True,
+        charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252", "MacRoman"],
+        alphabet="ÅÄÖŠŽåäöšž",
+        wiki_start_pages=["Wikipedia:Etusivu"],
+    ),
+    "French": Language(
+        name="French",
+        iso_code="fr",
+        use_ascii=True,
+        charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252", "MacRoman"],
+        alphabet="œàâçèéîïùûêŒÀÂÇÈÉÎÏÙÛÊ",
+        wiki_start_pages=["Wikipédia:Accueil_principal", "Bœuf (animal)"],
+    ),
+    "Hebrew": Language(
+        name="Hebrew",
+        iso_code="he",
+        use_ascii=False,
+        charsets=["ISO-8859-8", "WINDOWS-1255"],
+        alphabet="אבגדהוזחטיךכלםמןנסעףפץצקרשתװױײ",
+        wiki_start_pages=["עמוד_ראשי"],
+    ),
+    "Croatian": Language(
+        name="Croatian",
+        iso_code="hr",
+        # Q, W, X, Y are only used for foreign words.
+        use_ascii=False,
+        charsets=["ISO-8859-2", "WINDOWS-1250"],
+        alphabet="abcčćdđefghijklmnoprsštuvzžABCČĆDĐEFGHIJKLMNOPRSŠTUVZŽ",
+        wiki_start_pages=["Glavna_stranica"],
+    ),
+    "Hungarian": Language(
+        name="Hungarian",
+        iso_code="hu",
+        # Q, W, X, Y are only used for foreign words.
+        use_ascii=False,
+        charsets=["ISO-8859-2", "WINDOWS-1250"],
+        alphabet="abcdefghijklmnoprstuvzáéíóöőúüűABCDEFGHIJKLMNOPRSTUVZÁÉÍÓÖŐÚÜŰ",
+        wiki_start_pages=["Kezdőlap"],
+    ),
+    "Italian": Language(
+        name="Italian",
+        iso_code="it",
+        use_ascii=True,
+        charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252", "MacRoman"],
+        alphabet="ÀÈÉÌÒÓÙàèéìòóù",
+        wiki_start_pages=["Pagina_principale"],
+    ),
+    "Lithuanian": Language(
+        name="Lithuanian",
+        iso_code="lt",
+        use_ascii=False,
+        charsets=["ISO-8859-13", "WINDOWS-1257", "ISO-8859-4"],
+        # Q, W, and X not used at all
+        alphabet="AĄBCČDEĘĖFGHIĮYJKLMNOPRSŠTUŲŪVZŽaąbcčdeęėfghiįyjklmnoprsštuųūvzž",
+        wiki_start_pages=["Pagrindinis_puslapis"],
+    ),
+    "Latvian": Language(
+        name="Latvian",
+        iso_code="lv",
+        use_ascii=False,
+        charsets=["ISO-8859-13", "WINDOWS-1257", "ISO-8859-4"],
+        # Q, W, X, Y are only for loanwords
+        alphabet="AĀBCČDEĒFGĢHIĪJKĶLĻMNŅOPRSŠTUŪVZŽaābcčdeēfgģhiījkķlļmnņoprsštuūvzž",
+        wiki_start_pages=["Sākumlapa"],
+    ),
+    "Macedonian": Language(
+        name="Macedonian",
+        iso_code="mk",
+        use_ascii=False,
+        charsets=["ISO-8859-5", "WINDOWS-1251", "MacCyrillic", "IBM855"],
+        alphabet="АБВГДЃЕЖЗЅИЈКЛЉМНЊОПРСТЌУФХЦЧЏШабвгдѓежзѕијклљмнњопрстќуфхцчџш",
+        wiki_start_pages=["Главна_страница"],
+    ),
+    "Dutch": Language(
+        name="Dutch",
+        iso_code="nl",
+        use_ascii=True,
+        charsets=["ISO-8859-1", "WINDOWS-1252", "MacRoman"],
+        wiki_start_pages=["Hoofdpagina"],
+    ),
+    "Polish": Language(
+        name="Polish",
+        iso_code="pl",
+        # Q and X are only used for foreign words.
+        use_ascii=False,
+        charsets=["ISO-8859-2", "WINDOWS-1250"],
+        alphabet="AĄBCĆDEĘFGHIJKLŁMNŃOÓPRSŚTUWYZŹŻaąbcćdeęfghijklłmnńoóprsśtuwyzźż",
+        wiki_start_pages=["Wikipedia:Strona_główna"],
+    ),
+    "Portuguese": Language(
+        name="Portuguese",
+        iso_code="pt",
+        use_ascii=True,
+        charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252", "MacRoman"],
+        alphabet="ÁÂÃÀÇÉÊÍÓÔÕÚáâãàçéêíóôõú",
+        wiki_start_pages=["Wikipédia:Página_principal"],
+    ),
+    "Romanian": Language(
+        name="Romanian",
+        iso_code="ro",
+        use_ascii=True,
+        charsets=["ISO-8859-2", "WINDOWS-1250"],
+        alphabet="ăâîșțĂÂÎȘȚ",
+        wiki_start_pages=["Pagina_principală"],
+    ),
+    "Russian": Language(
+        name="Russian",
+        iso_code="ru",
+        use_ascii=False,
+        charsets=[
+            "ISO-8859-5",
+            "WINDOWS-1251",
+            "KOI8-R",
+            "MacCyrillic",
+            "IBM866",
+            "IBM855",
+        ],
+        alphabet="абвгдеёжзийклмнопрстуфхцчшщъыьэюяАБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ",
+        wiki_start_pages=["Заглавная_страница"],
+    ),
+    "Slovak": Language(
+        name="Slovak",
+        iso_code="sk",
+        use_ascii=True,
+        charsets=["ISO-8859-2", "WINDOWS-1250"],
+        alphabet="áäčďéíĺľňóôŕšťúýžÁÄČĎÉÍĹĽŇÓÔŔŠŤÚÝŽ",
+        wiki_start_pages=["Hlavná_stránka"],
+    ),
+    "Slovene": Language(
+        name="Slovene",
+        iso_code="sl",
+        # Q, W, X, Y are only used for foreign words.
+        use_ascii=False,
+        charsets=["ISO-8859-2", "WINDOWS-1250"],
+        alphabet="abcčdefghijklmnoprsštuvzžABCČDEFGHIJKLMNOPRSŠTUVZŽ",
+        wiki_start_pages=["Glavna_stran"],
+    ),
+    # Serbian can be written in both Latin and Cyrillic, but there's no
+    # simple way to get the Latin alphabet pages from Wikipedia through
+    # the API, so for now we just support Cyrillic.
+    "Serbian": Language(
+        name="Serbian",
+        iso_code="sr",
+        alphabet="АБВГДЂЕЖЗИЈКЛЉМНЊОПРСТЋУФХЦЧЏШабвгдђежзијклљмнњопрстћуфхцчџш",
+        charsets=["ISO-8859-5", "WINDOWS-1251", "MacCyrillic", "IBM855"],
+        wiki_start_pages=["Главна_страна"],
+    ),
+    "Thai": Language(
+        name="Thai",
+        iso_code="th",
+        use_ascii=False,
+        charsets=["ISO-8859-11", "TIS-620", "CP874"],
+        alphabet="กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛",
+        wiki_start_pages=["หน้าหลัก"],
+    ),
+    "Turkish": Language(
+        name="Turkish",
+        iso_code="tr",
+        # Q, W, and X are not used by Turkish
+        use_ascii=False,
+        charsets=["ISO-8859-3", "ISO-8859-9", "WINDOWS-1254"],
+        alphabet="abcçdefgğhıijklmnoöprsştuüvyzâîûABCÇDEFGĞHIİJKLMNOÖPRSŞTUÜVYZÂÎÛ",
+        wiki_start_pages=["Ana_Sayfa"],
+    ),
+    "Vietnamese": Language(
+        name="Vietnamese",
+        iso_code="vi",
+        use_ascii=False,
+        # Windows-1258 is the only common 8-bit
+        # Vietnamese encoding supported by Python.
+        # From Wikipedia:
+        # For systems that lack support for Unicode,
+        # dozens of 8-bit Vietnamese code pages are
+        # available.[1] The most common are VISCII
+        # (TCVN 5712:1993), VPS, and Windows-1258.[3]
+        # Where ASCII is required, such as when
+        # ensuring readability in plain text e-mail,
+        # Vietnamese letters are often encoded
+        # according to Vietnamese Quoted-Readable
+        # (VIQR) or VSCII Mnemonic (VSCII-MNEM),[4]
+        # though usage of either variable-width
+        # scheme has declined dramatically following
+        # the adoption of Unicode on the World Wide
+        # Web.
+        charsets=["WINDOWS-1258"],
+        alphabet="aăâbcdđeêghiklmnoôơpqrstuưvxyAĂÂBCDĐEÊGHIKLMNOÔƠPQRSTUƯVXY",
+        wiki_start_pages=["Chữ_Quốc_ngữ"],
+    ),
+}
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/resultdict.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/resultdict.py
new file mode 100644
index 0000000..7d36e64
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/resultdict.py
@@ -0,0 +1,16 @@
+from typing import TYPE_CHECKING, Optional
+
+if TYPE_CHECKING:
+    # TypedDict was introduced in Python 3.8.
+    #
+    # TODO: Remove the else block and TYPE_CHECKING check when dropping support
+    # for Python 3.7.
+    from typing import TypedDict
+
+    class ResultDict(TypedDict):
+        encoding: Optional[str]
+        confidence: float
+        language: Optional[str]
+
+else:
+    ResultDict = dict
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/sbcharsetprober.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/sbcharsetprober.py
new file mode 100644
index 0000000..0ffbcdd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/sbcharsetprober.py
@@ -0,0 +1,162 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#   Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from typing import Dict, List, NamedTuple, Optional, Union
+
+from .charsetprober import CharSetProber
+from .enums import CharacterCategory, ProbingState, SequenceLikelihood
+
+
+class SingleByteCharSetModel(NamedTuple):
+    charset_name: str
+    language: str
+    char_to_order_map: Dict[int, int]
+    language_model: Dict[int, Dict[int, int]]
+    typical_positive_ratio: float
+    keep_ascii_letters: bool
+    alphabet: str
+
+
+class SingleByteCharSetProber(CharSetProber):
+    SAMPLE_SIZE = 64
+    SB_ENOUGH_REL_THRESHOLD = 1024  # 0.25 * SAMPLE_SIZE^2
+    POSITIVE_SHORTCUT_THRESHOLD = 0.95
+    NEGATIVE_SHORTCUT_THRESHOLD = 0.05
+
+    def __init__(
+        self,
+        model: SingleByteCharSetModel,
+        is_reversed: bool = False,
+        name_prober: Optional[CharSetProber] = None,
+    ) -> None:
+        super().__init__()
+        self._model = model
+        # TRUE if we need to reverse every pair in the model lookup
+        self._reversed = is_reversed
+        # Optional auxiliary prober for name decision
+        self._name_prober = name_prober
+        self._last_order = 255
+        self._seq_counters: List[int] = []
+        self._total_seqs = 0
+        self._total_char = 0
+        self._control_char = 0
+        self._freq_char = 0
+        self.reset()
+
+    def reset(self) -> None:
+        super().reset()
+        # char order of last character
+        self._last_order = 255
+        self._seq_counters = [0] * SequenceLikelihood.get_num_categories()
+        self._total_seqs = 0
+        self._total_char = 0
+        self._control_char = 0
+        # characters that fall in our sampling range
+        self._freq_char = 0
+
+    @property
+    def charset_name(self) -> Optional[str]:
+        if self._name_prober:
+            return self._name_prober.charset_name
+        return self._model.charset_name
+
+    @property
+    def language(self) -> Optional[str]:
+        if self._name_prober:
+            return self._name_prober.language
+        return self._model.language
+
+    def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState:
+        # TODO: Make filter_international_words keep things in self.alphabet
+        if not self._model.keep_ascii_letters:
+            byte_str = self.filter_international_words(byte_str)
+        else:
+            byte_str = self.remove_xml_tags(byte_str)
+        if not byte_str:
+            return self.state
+        char_to_order_map = self._model.char_to_order_map
+        language_model = self._model.language_model
+        for char in byte_str:
+            order = char_to_order_map.get(char, CharacterCategory.UNDEFINED)
+            # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but
+            #      CharacterCategory.SYMBOL is actually 253, so we use CONTROL
+            #      to make it closer to the original intent. The only difference
+            #      is whether or not we count digits and control characters for
+            #      _total_char purposes.
+            if order < CharacterCategory.CONTROL:
+                self._total_char += 1
+            if order < self.SAMPLE_SIZE:
+                self._freq_char += 1
+                if self._last_order < self.SAMPLE_SIZE:
+                    self._total_seqs += 1
+                    if not self._reversed:
+                        lm_cat = language_model[self._last_order][order]
+                    else:
+                        lm_cat = language_model[order][self._last_order]
+                    self._seq_counters[lm_cat] += 1
+            self._last_order = order
+
+        charset_name = self._model.charset_name
+        if self.state == ProbingState.DETECTING:
+            if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD:
+                confidence = self.get_confidence()
+                if confidence > self.POSITIVE_SHORTCUT_THRESHOLD:
+                    self.logger.debug(
+                        "%s confidence = %s, we have a winner", charset_name, confidence
+                    )
+                    self._state = ProbingState.FOUND_IT
+                elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD:
+                    self.logger.debug(
+                        "%s confidence = %s, below negative shortcut threshold %s",
+                        charset_name,
+                        confidence,
+                        self.NEGATIVE_SHORTCUT_THRESHOLD,
+                    )
+                    self._state = ProbingState.NOT_ME
+
+        return self.state
+
+    def get_confidence(self) -> float:
+        r = 0.01
+        if self._total_seqs > 0:
+            r = (
+                (
+                    self._seq_counters[SequenceLikelihood.POSITIVE]
+                    + 0.25 * self._seq_counters[SequenceLikelihood.LIKELY]
+                )
+                / self._total_seqs
+                / self._model.typical_positive_ratio
+            )
+            # The more control characters (proportionnaly to the size
+            # of the text), the less confident we become in the current
+            # charset.
+            r = r * (self._total_char - self._control_char) / self._total_char
+            r = r * self._freq_char / self._total_char
+            if r >= 1.0:
+                r = 0.99
+        return r
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/sbcsgroupprober.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/sbcsgroupprober.py
new file mode 100644
index 0000000..890ae84
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/sbcsgroupprober.py
@@ -0,0 +1,88 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#   Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetgroupprober import CharSetGroupProber
+from .hebrewprober import HebrewProber
+from .langbulgarianmodel import ISO_8859_5_BULGARIAN_MODEL, WINDOWS_1251_BULGARIAN_MODEL
+from .langgreekmodel import ISO_8859_7_GREEK_MODEL, WINDOWS_1253_GREEK_MODEL
+from .langhebrewmodel import WINDOWS_1255_HEBREW_MODEL
+
+# from .langhungarianmodel import (ISO_8859_2_HUNGARIAN_MODEL,
+#                                  WINDOWS_1250_HUNGARIAN_MODEL)
+from .langrussianmodel import (
+    IBM855_RUSSIAN_MODEL,
+    IBM866_RUSSIAN_MODEL,
+    ISO_8859_5_RUSSIAN_MODEL,
+    KOI8_R_RUSSIAN_MODEL,
+    MACCYRILLIC_RUSSIAN_MODEL,
+    WINDOWS_1251_RUSSIAN_MODEL,
+)
+from .langthaimodel import TIS_620_THAI_MODEL
+from .langturkishmodel import ISO_8859_9_TURKISH_MODEL
+from .sbcharsetprober import SingleByteCharSetProber
+
+
+class SBCSGroupProber(CharSetGroupProber):
+    def __init__(self) -> None:
+        super().__init__()
+        hebrew_prober = HebrewProber()
+        logical_hebrew_prober = SingleByteCharSetProber(
+            WINDOWS_1255_HEBREW_MODEL, is_reversed=False, name_prober=hebrew_prober
+        )
+        # TODO: See if using ISO-8859-8 Hebrew model works better here, since
+        #       it's actually the visual one
+        visual_hebrew_prober = SingleByteCharSetProber(
+            WINDOWS_1255_HEBREW_MODEL, is_reversed=True, name_prober=hebrew_prober
+        )
+        hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober)
+        # TODO: ORDER MATTERS HERE. I changed the order vs what was in master
+        #       and several tests failed that did not before. Some thought
+        #       should be put into the ordering, and we should consider making
+        #       order not matter here, because that is very counter-intuitive.
+        self.probers = [
+            SingleByteCharSetProber(WINDOWS_1251_RUSSIAN_MODEL),
+            SingleByteCharSetProber(KOI8_R_RUSSIAN_MODEL),
+            SingleByteCharSetProber(ISO_8859_5_RUSSIAN_MODEL),
+            SingleByteCharSetProber(MACCYRILLIC_RUSSIAN_MODEL),
+            SingleByteCharSetProber(IBM866_RUSSIAN_MODEL),
+            SingleByteCharSetProber(IBM855_RUSSIAN_MODEL),
+            SingleByteCharSetProber(ISO_8859_7_GREEK_MODEL),
+            SingleByteCharSetProber(WINDOWS_1253_GREEK_MODEL),
+            SingleByteCharSetProber(ISO_8859_5_BULGARIAN_MODEL),
+            SingleByteCharSetProber(WINDOWS_1251_BULGARIAN_MODEL),
+            # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250)
+            #       after we retrain model.
+            # SingleByteCharSetProber(ISO_8859_2_HUNGARIAN_MODEL),
+            # SingleByteCharSetProber(WINDOWS_1250_HUNGARIAN_MODEL),
+            SingleByteCharSetProber(TIS_620_THAI_MODEL),
+            SingleByteCharSetProber(ISO_8859_9_TURKISH_MODEL),
+            hebrew_prober,
+            logical_hebrew_prober,
+            visual_hebrew_prober,
+        ]
+        self.reset()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/sjisprober.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/sjisprober.py
new file mode 100644
index 0000000..91df077
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/sjisprober.py
@@ -0,0 +1,105 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from typing import Union
+
+from .chardistribution import SJISDistributionAnalysis
+from .codingstatemachine import CodingStateMachine
+from .enums import MachineState, ProbingState
+from .jpcntx import SJISContextAnalysis
+from .mbcharsetprober import MultiByteCharSetProber
+from .mbcssm import SJIS_SM_MODEL
+
+
+class SJISProber(MultiByteCharSetProber):
+    def __init__(self) -> None:
+        super().__init__()
+        self.coding_sm = CodingStateMachine(SJIS_SM_MODEL)
+        self.distribution_analyzer = SJISDistributionAnalysis()
+        self.context_analyzer = SJISContextAnalysis()
+        self.reset()
+
+    def reset(self) -> None:
+        super().reset()
+        self.context_analyzer.reset()
+
+    @property
+    def charset_name(self) -> str:
+        return self.context_analyzer.charset_name
+
+    @property
+    def language(self) -> str:
+        return "Japanese"
+
+    def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState:
+        assert self.coding_sm is not None
+        assert self.distribution_analyzer is not None
+
+        for i, byte in enumerate(byte_str):
+            coding_state = self.coding_sm.next_state(byte)
+            if coding_state == MachineState.ERROR:
+                self.logger.debug(
+                    "%s %s prober hit error at byte %s",
+                    self.charset_name,
+                    self.language,
+                    i,
+                )
+                self._state = ProbingState.NOT_ME
+                break
+            if coding_state == MachineState.ITS_ME:
+                self._state = ProbingState.FOUND_IT
+                break
+            if coding_state == MachineState.START:
+                char_len = self.coding_sm.get_current_charlen()
+                if i == 0:
+                    self._last_char[1] = byte
+                    self.context_analyzer.feed(
+                        self._last_char[2 - char_len :], char_len
+                    )
+                    self.distribution_analyzer.feed(self._last_char, char_len)
+                else:
+                    self.context_analyzer.feed(
+                        byte_str[i + 1 - char_len : i + 3 - char_len], char_len
+                    )
+                    self.distribution_analyzer.feed(byte_str[i - 1 : i + 1], char_len)
+
+        self._last_char[0] = byte_str[-1]
+
+        if self.state == ProbingState.DETECTING:
+            if self.context_analyzer.got_enough_data() and (
+                self.get_confidence() > self.SHORTCUT_THRESHOLD
+            ):
+                self._state = ProbingState.FOUND_IT
+
+        return self.state
+
+    def get_confidence(self) -> float:
+        assert self.distribution_analyzer is not None
+
+        context_conf = self.context_analyzer.get_confidence()
+        distrib_conf = self.distribution_analyzer.get_confidence()
+        return max(context_conf, distrib_conf)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/universaldetector.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/universaldetector.py
new file mode 100644
index 0000000..30c441d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/universaldetector.py
@@ -0,0 +1,362 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#   Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+"""
+Module containing the UniversalDetector detector class, which is the primary
+class a user of ``chardet`` should use.
+
+:author: Mark Pilgrim (initial port to Python)
+:author: Shy Shalom (original C code)
+:author: Dan Blanchard (major refactoring for 3.0)
+:author: Ian Cordasco
+"""
+
+
+import codecs
+import logging
+import re
+from typing import List, Optional, Union
+
+from .charsetgroupprober import CharSetGroupProber
+from .charsetprober import CharSetProber
+from .enums import InputState, LanguageFilter, ProbingState
+from .escprober import EscCharSetProber
+from .latin1prober import Latin1Prober
+from .macromanprober import MacRomanProber
+from .mbcsgroupprober import MBCSGroupProber
+from .resultdict import ResultDict
+from .sbcsgroupprober import SBCSGroupProber
+from .utf1632prober import UTF1632Prober
+
+
+class UniversalDetector:
+    """
+    The ``UniversalDetector`` class underlies the ``chardet.detect`` function
+    and coordinates all of the different charset probers.
+
+    To get a ``dict`` containing an encoding and its confidence, you can simply
+    run:
+
+    .. code::
+
+            u = UniversalDetector()
+            u.feed(some_bytes)
+            u.close()
+            detected = u.result
+
+    """
+
+    MINIMUM_THRESHOLD = 0.20
+    HIGH_BYTE_DETECTOR = re.compile(b"[\x80-\xFF]")
+    ESC_DETECTOR = re.compile(b"(\033|~{)")
+    WIN_BYTE_DETECTOR = re.compile(b"[\x80-\x9F]")
+    ISO_WIN_MAP = {
+        "iso-8859-1": "Windows-1252",
+        "iso-8859-2": "Windows-1250",
+        "iso-8859-5": "Windows-1251",
+        "iso-8859-6": "Windows-1256",
+        "iso-8859-7": "Windows-1253",
+        "iso-8859-8": "Windows-1255",
+        "iso-8859-9": "Windows-1254",
+        "iso-8859-13": "Windows-1257",
+    }
+    # Based on https://encoding.spec.whatwg.org/#names-and-labels
+    # but altered to match Python names for encodings and remove mappings
+    # that break tests.
+    LEGACY_MAP = {
+        "ascii": "Windows-1252",
+        "iso-8859-1": "Windows-1252",
+        "tis-620": "ISO-8859-11",
+        "iso-8859-9": "Windows-1254",
+        "gb2312": "GB18030",
+        "euc-kr": "CP949",
+        "utf-16le": "UTF-16",
+    }
+
+    def __init__(
+        self,
+        lang_filter: LanguageFilter = LanguageFilter.ALL,
+        should_rename_legacy: bool = False,
+    ) -> None:
+        self._esc_charset_prober: Optional[EscCharSetProber] = None
+        self._utf1632_prober: Optional[UTF1632Prober] = None
+        self._charset_probers: List[CharSetProber] = []
+        self.result: ResultDict = {
+            "encoding": None,
+            "confidence": 0.0,
+            "language": None,
+        }
+        self.done = False
+        self._got_data = False
+        self._input_state = InputState.PURE_ASCII
+        self._last_char = b""
+        self.lang_filter = lang_filter
+        self.logger = logging.getLogger(__name__)
+        self._has_win_bytes = False
+        self.should_rename_legacy = should_rename_legacy
+        self.reset()
+
+    @property
+    def input_state(self) -> int:
+        return self._input_state
+
+    @property
+    def has_win_bytes(self) -> bool:
+        return self._has_win_bytes
+
+    @property
+    def charset_probers(self) -> List[CharSetProber]:
+        return self._charset_probers
+
+    def reset(self) -> None:
+        """
+        Reset the UniversalDetector and all of its probers back to their
+        initial states.  This is called by ``__init__``, so you only need to
+        call this directly in between analyses of different documents.
+        """
+        self.result = {"encoding": None, "confidence": 0.0, "language": None}
+        self.done = False
+        self._got_data = False
+        self._has_win_bytes = False
+        self._input_state = InputState.PURE_ASCII
+        self._last_char = b""
+        if self._esc_charset_prober:
+            self._esc_charset_prober.reset()
+        if self._utf1632_prober:
+            self._utf1632_prober.reset()
+        for prober in self._charset_probers:
+            prober.reset()
+
+    def feed(self, byte_str: Union[bytes, bytearray]) -> None:
+        """
+        Takes a chunk of a document and feeds it through all of the relevant
+        charset probers.
+
+        After calling ``feed``, you can check the value of the ``done``
+        attribute to see if you need to continue feeding the
+        ``UniversalDetector`` more data, or if it has made a prediction
+        (in the ``result`` attribute).
+
+        .. note::
+           You should always call ``close`` when you're done feeding in your
+           document if ``done`` is not already ``True``.
+        """
+        if self.done:
+            return
+
+        if not byte_str:
+            return
+
+        if not isinstance(byte_str, bytearray):
+            byte_str = bytearray(byte_str)
+
+        # First check for known BOMs, since these are guaranteed to be correct
+        if not self._got_data:
+            # If the data starts with BOM, we know it is UTF
+            if byte_str.startswith(codecs.BOM_UTF8):
+                # EF BB BF  UTF-8 with BOM
+                self.result = {
+                    "encoding": "UTF-8-SIG",
+                    "confidence": 1.0,
+                    "language": "",
+                }
+            elif byte_str.startswith((codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE)):
+                # FF FE 00 00  UTF-32, little-endian BOM
+                # 00 00 FE FF  UTF-32, big-endian BOM
+                self.result = {"encoding": "UTF-32", "confidence": 1.0, "language": ""}
+            elif byte_str.startswith(b"\xFE\xFF\x00\x00"):
+                # FE FF 00 00  UCS-4, unusual octet order BOM (3412)
+                self.result = {
+                    # TODO: This encoding is not supported by Python. Should remove?
+                    "encoding": "X-ISO-10646-UCS-4-3412",
+                    "confidence": 1.0,
+                    "language": "",
+                }
+            elif byte_str.startswith(b"\x00\x00\xFF\xFE"):
+                # 00 00 FF FE  UCS-4, unusual octet order BOM (2143)
+                self.result = {
+                    # TODO: This encoding is not supported by Python. Should remove?
+                    "encoding": "X-ISO-10646-UCS-4-2143",
+                    "confidence": 1.0,
+                    "language": "",
+                }
+            elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)):
+                # FF FE  UTF-16, little endian BOM
+                # FE FF  UTF-16, big endian BOM
+                self.result = {"encoding": "UTF-16", "confidence": 1.0, "language": ""}
+
+            self._got_data = True
+            if self.result["encoding"] is not None:
+                self.done = True
+                return
+
+        # If none of those matched and we've only see ASCII so far, check
+        # for high bytes and escape sequences
+        if self._input_state == InputState.PURE_ASCII:
+            if self.HIGH_BYTE_DETECTOR.search(byte_str):
+                self._input_state = InputState.HIGH_BYTE
+            elif (
+                self._input_state == InputState.PURE_ASCII
+                and self.ESC_DETECTOR.search(self._last_char + byte_str)
+            ):
+                self._input_state = InputState.ESC_ASCII
+
+        self._last_char = byte_str[-1:]
+
+        # next we will look to see if it is appears to be either a UTF-16 or
+        # UTF-32 encoding
+        if not self._utf1632_prober:
+            self._utf1632_prober = UTF1632Prober()
+
+        if self._utf1632_prober.state == ProbingState.DETECTING:
+            if self._utf1632_prober.feed(byte_str) == ProbingState.FOUND_IT:
+                self.result = {
+                    "encoding": self._utf1632_prober.charset_name,
+                    "confidence": self._utf1632_prober.get_confidence(),
+                    "language": "",
+                }
+                self.done = True
+                return
+
+        # If we've seen escape sequences, use the EscCharSetProber, which
+        # uses a simple state machine to check for known escape sequences in
+        # HZ and ISO-2022 encodings, since those are the only encodings that
+        # use such sequences.
+        if self._input_state == InputState.ESC_ASCII:
+            if not self._esc_charset_prober:
+                self._esc_charset_prober = EscCharSetProber(self.lang_filter)
+            if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT:
+                self.result = {
+                    "encoding": self._esc_charset_prober.charset_name,
+                    "confidence": self._esc_charset_prober.get_confidence(),
+                    "language": self._esc_charset_prober.language,
+                }
+                self.done = True
+        # If we've seen high bytes (i.e., those with values greater than 127),
+        # we need to do more complicated checks using all our multi-byte and
+        # single-byte probers that are left.  The single-byte probers
+        # use character bigram distributions to determine the encoding, whereas
+        # the multi-byte probers use a combination of character unigram and
+        # bigram distributions.
+        elif self._input_state == InputState.HIGH_BYTE:
+            if not self._charset_probers:
+                self._charset_probers = [MBCSGroupProber(self.lang_filter)]
+                # If we're checking non-CJK encodings, use single-byte prober
+                if self.lang_filter & LanguageFilter.NON_CJK:
+                    self._charset_probers.append(SBCSGroupProber())
+                self._charset_probers.append(Latin1Prober())
+                self._charset_probers.append(MacRomanProber())
+            for prober in self._charset_probers:
+                if prober.feed(byte_str) == ProbingState.FOUND_IT:
+                    self.result = {
+                        "encoding": prober.charset_name,
+                        "confidence": prober.get_confidence(),
+                        "language": prober.language,
+                    }
+                    self.done = True
+                    break
+            if self.WIN_BYTE_DETECTOR.search(byte_str):
+                self._has_win_bytes = True
+
+    def close(self) -> ResultDict:
+        """
+        Stop analyzing the current document and come up with a final
+        prediction.
+
+        :returns:  The ``result`` attribute, a ``dict`` with the keys
+                   `encoding`, `confidence`, and `language`.
+        """
+        # Don't bother with checks if we're already done
+        if self.done:
+            return self.result
+        self.done = True
+
+        if not self._got_data:
+            self.logger.debug("no data received!")
+
+        # Default to ASCII if it is all we've seen so far
+        elif self._input_state == InputState.PURE_ASCII:
+            self.result = {"encoding": "ascii", "confidence": 1.0, "language": ""}
+
+        # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD
+        elif self._input_state == InputState.HIGH_BYTE:
+            prober_confidence = None
+            max_prober_confidence = 0.0
+            max_prober = None
+            for prober in self._charset_probers:
+                if not prober:
+                    continue
+                prober_confidence = prober.get_confidence()
+                if prober_confidence > max_prober_confidence:
+                    max_prober_confidence = prober_confidence
+                    max_prober = prober
+            if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD):
+                charset_name = max_prober.charset_name
+                assert charset_name is not None
+                lower_charset_name = charset_name.lower()
+                confidence = max_prober.get_confidence()
+                # Use Windows encoding name instead of ISO-8859 if we saw any
+                # extra Windows-specific bytes
+                if lower_charset_name.startswith("iso-8859"):
+                    if self._has_win_bytes:
+                        charset_name = self.ISO_WIN_MAP.get(
+                            lower_charset_name, charset_name
+                        )
+                # Rename legacy encodings with superset encodings if asked
+                if self.should_rename_legacy:
+                    charset_name = self.LEGACY_MAP.get(
+                        (charset_name or "").lower(), charset_name
+                    )
+                self.result = {
+                    "encoding": charset_name,
+                    "confidence": confidence,
+                    "language": max_prober.language,
+                }
+
+        # Log all prober confidences if none met MINIMUM_THRESHOLD
+        if self.logger.getEffectiveLevel() <= logging.DEBUG:
+            if self.result["encoding"] is None:
+                self.logger.debug("no probers hit minimum threshold")
+                for group_prober in self._charset_probers:
+                    if not group_prober:
+                        continue
+                    if isinstance(group_prober, CharSetGroupProber):
+                        for prober in group_prober.probers:
+                            self.logger.debug(
+                                "%s %s confidence = %s",
+                                prober.charset_name,
+                                prober.language,
+                                prober.get_confidence(),
+                            )
+                    else:
+                        self.logger.debug(
+                            "%s %s confidence = %s",
+                            group_prober.charset_name,
+                            group_prober.language,
+                            group_prober.get_confidence(),
+                        )
+        return self.result
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/utf1632prober.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/utf1632prober.py
new file mode 100644
index 0000000..6bdec63
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/utf1632prober.py
@@ -0,0 +1,225 @@
+######################## BEGIN LICENSE BLOCK ########################
+#
+# Contributor(s):
+#   Jason Zavaglia
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+from typing import List, Union
+
+from .charsetprober import CharSetProber
+from .enums import ProbingState
+
+
+class UTF1632Prober(CharSetProber):
+    """
+    This class simply looks for occurrences of zero bytes, and infers
+    whether the file is UTF16 or UTF32 (low-endian or big-endian)
+    For instance, files looking like ( \0 \0 \0 [nonzero] )+
+    have a good probability to be UTF32BE.  Files looking like ( \0 [nonzero] )+
+    may be guessed to be UTF16BE, and inversely for little-endian varieties.
+    """
+
+    # how many logical characters to scan before feeling confident of prediction
+    MIN_CHARS_FOR_DETECTION = 20
+    # a fixed constant ratio of expected zeros or non-zeros in modulo-position.
+    EXPECTED_RATIO = 0.94
+
+    def __init__(self) -> None:
+        super().__init__()
+        self.position = 0
+        self.zeros_at_mod = [0] * 4
+        self.nonzeros_at_mod = [0] * 4
+        self._state = ProbingState.DETECTING
+        self.quad = [0, 0, 0, 0]
+        self.invalid_utf16be = False
+        self.invalid_utf16le = False
+        self.invalid_utf32be = False
+        self.invalid_utf32le = False
+        self.first_half_surrogate_pair_detected_16be = False
+        self.first_half_surrogate_pair_detected_16le = False
+        self.reset()
+
+    def reset(self) -> None:
+        super().reset()
+        self.position = 0
+        self.zeros_at_mod = [0] * 4
+        self.nonzeros_at_mod = [0] * 4
+        self._state = ProbingState.DETECTING
+        self.invalid_utf16be = False
+        self.invalid_utf16le = False
+        self.invalid_utf32be = False
+        self.invalid_utf32le = False
+        self.first_half_surrogate_pair_detected_16be = False
+        self.first_half_surrogate_pair_detected_16le = False
+        self.quad = [0, 0, 0, 0]
+
+    @property
+    def charset_name(self) -> str:
+        if self.is_likely_utf32be():
+            return "utf-32be"
+        if self.is_likely_utf32le():
+            return "utf-32le"
+        if self.is_likely_utf16be():
+            return "utf-16be"
+        if self.is_likely_utf16le():
+            return "utf-16le"
+        # default to something valid
+        return "utf-16"
+
+    @property
+    def language(self) -> str:
+        return ""
+
+    def approx_32bit_chars(self) -> float:
+        return max(1.0, self.position / 4.0)
+
+    def approx_16bit_chars(self) -> float:
+        return max(1.0, self.position / 2.0)
+
+    def is_likely_utf32be(self) -> bool:
+        approx_chars = self.approx_32bit_chars()
+        return approx_chars >= self.MIN_CHARS_FOR_DETECTION and (
+            self.zeros_at_mod[0] / approx_chars > self.EXPECTED_RATIO
+            and self.zeros_at_mod[1] / approx_chars > self.EXPECTED_RATIO
+            and self.zeros_at_mod[2] / approx_chars > self.EXPECTED_RATIO
+            and self.nonzeros_at_mod[3] / approx_chars > self.EXPECTED_RATIO
+            and not self.invalid_utf32be
+        )
+
+    def is_likely_utf32le(self) -> bool:
+        approx_chars = self.approx_32bit_chars()
+        return approx_chars >= self.MIN_CHARS_FOR_DETECTION and (
+            self.nonzeros_at_mod[0] / approx_chars > self.EXPECTED_RATIO
+            and self.zeros_at_mod[1] / approx_chars > self.EXPECTED_RATIO
+            and self.zeros_at_mod[2] / approx_chars > self.EXPECTED_RATIO
+            and self.zeros_at_mod[3] / approx_chars > self.EXPECTED_RATIO
+            and not self.invalid_utf32le
+        )
+
+    def is_likely_utf16be(self) -> bool:
+        approx_chars = self.approx_16bit_chars()
+        return approx_chars >= self.MIN_CHARS_FOR_DETECTION and (
+            (self.nonzeros_at_mod[1] + self.nonzeros_at_mod[3]) / approx_chars
+            > self.EXPECTED_RATIO
+            and (self.zeros_at_mod[0] + self.zeros_at_mod[2]) / approx_chars
+            > self.EXPECTED_RATIO
+            and not self.invalid_utf16be
+        )
+
+    def is_likely_utf16le(self) -> bool:
+        approx_chars = self.approx_16bit_chars()
+        return approx_chars >= self.MIN_CHARS_FOR_DETECTION and (
+            (self.nonzeros_at_mod[0] + self.nonzeros_at_mod[2]) / approx_chars
+            > self.EXPECTED_RATIO
+            and (self.zeros_at_mod[1] + self.zeros_at_mod[3]) / approx_chars
+            > self.EXPECTED_RATIO
+            and not self.invalid_utf16le
+        )
+
+    def validate_utf32_characters(self, quad: List[int]) -> None:
+        """
+        Validate if the quad of bytes is valid UTF-32.
+
+        UTF-32 is valid in the range 0x00000000 - 0x0010FFFF
+        excluding 0x0000D800 - 0x0000DFFF
+
+        https://en.wikipedia.org/wiki/UTF-32
+        """
+        if (
+            quad[0] != 0
+            or quad[1] > 0x10
+            or (quad[0] == 0 and quad[1] == 0 and 0xD8 <= quad[2] <= 0xDF)
+        ):
+            self.invalid_utf32be = True
+        if (
+            quad[3] != 0
+            or quad[2] > 0x10
+            or (quad[3] == 0 and quad[2] == 0 and 0xD8 <= quad[1] <= 0xDF)
+        ):
+            self.invalid_utf32le = True
+
+    def validate_utf16_characters(self, pair: List[int]) -> None:
+        """
+        Validate if the pair of bytes is  valid UTF-16.
+
+        UTF-16 is valid in the range 0x0000 - 0xFFFF excluding 0xD800 - 0xFFFF
+        with an exception for surrogate pairs, which must be in the range
+        0xD800-0xDBFF followed by 0xDC00-0xDFFF
+
+        https://en.wikipedia.org/wiki/UTF-16
+        """
+        if not self.first_half_surrogate_pair_detected_16be:
+            if 0xD8 <= pair[0] <= 0xDB:
+                self.first_half_surrogate_pair_detected_16be = True
+            elif 0xDC <= pair[0] <= 0xDF:
+                self.invalid_utf16be = True
+        else:
+            if 0xDC <= pair[0] <= 0xDF:
+                self.first_half_surrogate_pair_detected_16be = False
+            else:
+                self.invalid_utf16be = True
+
+        if not self.first_half_surrogate_pair_detected_16le:
+            if 0xD8 <= pair[1] <= 0xDB:
+                self.first_half_surrogate_pair_detected_16le = True
+            elif 0xDC <= pair[1] <= 0xDF:
+                self.invalid_utf16le = True
+        else:
+            if 0xDC <= pair[1] <= 0xDF:
+                self.first_half_surrogate_pair_detected_16le = False
+            else:
+                self.invalid_utf16le = True
+
+    def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState:
+        for c in byte_str:
+            mod4 = self.position % 4
+            self.quad[mod4] = c
+            if mod4 == 3:
+                self.validate_utf32_characters(self.quad)
+                self.validate_utf16_characters(self.quad[0:2])
+                self.validate_utf16_characters(self.quad[2:4])
+            if c == 0:
+                self.zeros_at_mod[mod4] += 1
+            else:
+                self.nonzeros_at_mod[mod4] += 1
+            self.position += 1
+        return self.state
+
+    @property
+    def state(self) -> ProbingState:
+        if self._state in {ProbingState.NOT_ME, ProbingState.FOUND_IT}:
+            # terminal, decided states
+            return self._state
+        if self.get_confidence() > 0.80:
+            self._state = ProbingState.FOUND_IT
+        elif self.position > 4 * 1024:
+            # if we get to 4kb into the file, and we can't conclude it's UTF,
+            # let's give up
+            self._state = ProbingState.NOT_ME
+        return self._state
+
+    def get_confidence(self) -> float:
+        return (
+            0.85
+            if (
+                self.is_likely_utf16le()
+                or self.is_likely_utf16be()
+                or self.is_likely_utf32le()
+                or self.is_likely_utf32be()
+            )
+            else 0.00
+        )
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/utf8prober.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/utf8prober.py
new file mode 100644
index 0000000..d96354d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/utf8prober.py
@@ -0,0 +1,82 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from typing import Union
+
+from .charsetprober import CharSetProber
+from .codingstatemachine import CodingStateMachine
+from .enums import MachineState, ProbingState
+from .mbcssm import UTF8_SM_MODEL
+
+
+class UTF8Prober(CharSetProber):
+    ONE_CHAR_PROB = 0.5
+
+    def __init__(self) -> None:
+        super().__init__()
+        self.coding_sm = CodingStateMachine(UTF8_SM_MODEL)
+        self._num_mb_chars = 0
+        self.reset()
+
+    def reset(self) -> None:
+        super().reset()
+        self.coding_sm.reset()
+        self._num_mb_chars = 0
+
+    @property
+    def charset_name(self) -> str:
+        return "utf-8"
+
+    @property
+    def language(self) -> str:
+        return ""
+
+    def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState:
+        for c in byte_str:
+            coding_state = self.coding_sm.next_state(c)
+            if coding_state == MachineState.ERROR:
+                self._state = ProbingState.NOT_ME
+                break
+            if coding_state == MachineState.ITS_ME:
+                self._state = ProbingState.FOUND_IT
+                break
+            if coding_state == MachineState.START:
+                if self.coding_sm.get_current_charlen() >= 2:
+                    self._num_mb_chars += 1
+
+        if self.state == ProbingState.DETECTING:
+            if self.get_confidence() > self.SHORTCUT_THRESHOLD:
+                self._state = ProbingState.FOUND_IT
+
+        return self.state
+
+    def get_confidence(self) -> float:
+        unlike = 0.99
+        if self._num_mb_chars < 6:
+            unlike *= self.ONE_CHAR_PROB**self._num_mb_chars
+            return 1.0 - unlike
+        return unlike
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/version.py b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/version.py
new file mode 100644
index 0000000..c5e9d85
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/chardet/version.py
@@ -0,0 +1,9 @@
+"""
+This module exists only to simplify retrieving the version number of chardet
+from within setuptools and from chardet subpackages.
+
+:author: Dan Blanchard (dan.blanchard@gmail.com)
+"""
+
+__version__ = "5.1.0"
+VERSION = __version__.split(".")
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/__init__.py
new file mode 100644
index 0000000..383101c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/__init__.py
@@ -0,0 +1,7 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+from .initialise import init, deinit, reinit, colorama_text, just_fix_windows_console
+from .ansi import Fore, Back, Style, Cursor
+from .ansitowin32 import AnsiToWin32
+
+__version__ = '0.4.6'
+
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/ansi.py b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/ansi.py
new file mode 100644
index 0000000..11ec695
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/ansi.py
@@ -0,0 +1,102 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+'''
+This module generates ANSI character codes to printing colors to terminals.
+See: http://en.wikipedia.org/wiki/ANSI_escape_code
+'''
+
+CSI = '\033['
+OSC = '\033]'
+BEL = '\a'
+
+
+def code_to_chars(code):
+    return CSI + str(code) + 'm'
+
+def set_title(title):
+    return OSC + '2;' + title + BEL
+
+def clear_screen(mode=2):
+    return CSI + str(mode) + 'J'
+
+def clear_line(mode=2):
+    return CSI + str(mode) + 'K'
+
+
+class AnsiCodes(object):
+    def __init__(self):
+        # the subclasses declare class attributes which are numbers.
+        # Upon instantiation we define instance attributes, which are the same
+        # as the class attributes but wrapped with the ANSI escape sequence
+        for name in dir(self):
+            if not name.startswith('_'):
+                value = getattr(self, name)
+                setattr(self, name, code_to_chars(value))
+
+
+class AnsiCursor(object):
+    def UP(self, n=1):
+        return CSI + str(n) + 'A'
+    def DOWN(self, n=1):
+        return CSI + str(n) + 'B'
+    def FORWARD(self, n=1):
+        return CSI + str(n) + 'C'
+    def BACK(self, n=1):
+        return CSI + str(n) + 'D'
+    def POS(self, x=1, y=1):
+        return CSI + str(y) + ';' + str(x) + 'H'
+
+
+class AnsiFore(AnsiCodes):
+    BLACK           = 30
+    RED             = 31
+    GREEN           = 32
+    YELLOW          = 33
+    BLUE            = 34
+    MAGENTA         = 35
+    CYAN            = 36
+    WHITE           = 37
+    RESET           = 39
+
+    # These are fairly well supported, but not part of the standard.
+    LIGHTBLACK_EX   = 90
+    LIGHTRED_EX     = 91
+    LIGHTGREEN_EX   = 92
+    LIGHTYELLOW_EX  = 93
+    LIGHTBLUE_EX    = 94
+    LIGHTMAGENTA_EX = 95
+    LIGHTCYAN_EX    = 96
+    LIGHTWHITE_EX   = 97
+
+
+class AnsiBack(AnsiCodes):
+    BLACK           = 40
+    RED             = 41
+    GREEN           = 42
+    YELLOW          = 43
+    BLUE            = 44
+    MAGENTA         = 45
+    CYAN            = 46
+    WHITE           = 47
+    RESET           = 49
+
+    # These are fairly well supported, but not part of the standard.
+    LIGHTBLACK_EX   = 100
+    LIGHTRED_EX     = 101
+    LIGHTGREEN_EX   = 102
+    LIGHTYELLOW_EX  = 103
+    LIGHTBLUE_EX    = 104
+    LIGHTMAGENTA_EX = 105
+    LIGHTCYAN_EX    = 106
+    LIGHTWHITE_EX   = 107
+
+
+class AnsiStyle(AnsiCodes):
+    BRIGHT    = 1
+    DIM       = 2
+    NORMAL    = 22
+    RESET_ALL = 0
+
+Fore   = AnsiFore()
+Back   = AnsiBack()
+Style  = AnsiStyle()
+Cursor = AnsiCursor()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/ansitowin32.py b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/ansitowin32.py
new file mode 100644
index 0000000..abf209e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/ansitowin32.py
@@ -0,0 +1,277 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+import re
+import sys
+import os
+
+from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style, BEL
+from .winterm import enable_vt_processing, WinTerm, WinColor, WinStyle
+from .win32 import windll, winapi_test
+
+
+winterm = None
+if windll is not None:
+    winterm = WinTerm()
+
+
+class StreamWrapper(object):
+    '''
+    Wraps a stream (such as stdout), acting as a transparent proxy for all
+    attribute access apart from method 'write()', which is delegated to our
+    Converter instance.
+    '''
+    def __init__(self, wrapped, converter):
+        # double-underscore everything to prevent clashes with names of
+        # attributes on the wrapped stream object.
+        self.__wrapped = wrapped
+        self.__convertor = converter
+
+    def __getattr__(self, name):
+        return getattr(self.__wrapped, name)
+
+    def __enter__(self, *args, **kwargs):
+        # special method lookup bypasses __getattr__/__getattribute__, see
+        # https://stackoverflow.com/questions/12632894/why-doesnt-getattr-work-with-exit
+        # thus, contextlib magic methods are not proxied via __getattr__
+        return self.__wrapped.__enter__(*args, **kwargs)
+
+    def __exit__(self, *args, **kwargs):
+        return self.__wrapped.__exit__(*args, **kwargs)
+
+    def __setstate__(self, state):
+        self.__dict__ = state
+
+    def __getstate__(self):
+        return self.__dict__
+
+    def write(self, text):
+        self.__convertor.write(text)
+
+    def isatty(self):
+        stream = self.__wrapped
+        if 'PYCHARM_HOSTED' in os.environ:
+            if stream is not None and (stream is sys.__stdout__ or stream is sys.__stderr__):
+                return True
+        try:
+            stream_isatty = stream.isatty
+        except AttributeError:
+            return False
+        else:
+            return stream_isatty()
+
+    @property
+    def closed(self):
+        stream = self.__wrapped
+        try:
+            return stream.closed
+        # AttributeError in the case that the stream doesn't support being closed
+        # ValueError for the case that the stream has already been detached when atexit runs
+        except (AttributeError, ValueError):
+            return True
+
+
+class AnsiToWin32(object):
+    '''
+    Implements a 'write()' method which, on Windows, will strip ANSI character
+    sequences from the text, and if outputting to a tty, will convert them into
+    win32 function calls.
+    '''
+    ANSI_CSI_RE = re.compile('\001?\033\\[((?:\\d|;)*)([a-zA-Z])\002?')   # Control Sequence Introducer
+    ANSI_OSC_RE = re.compile('\001?\033\\]([^\a]*)(\a)\002?')             # Operating System Command
+
+    def __init__(self, wrapped, convert=None, strip=None, autoreset=False):
+        # The wrapped stream (normally sys.stdout or sys.stderr)
+        self.wrapped = wrapped
+
+        # should we reset colors to defaults after every .write()
+        self.autoreset = autoreset
+
+        # create the proxy wrapping our output stream
+        self.stream = StreamWrapper(wrapped, self)
+
+        on_windows = os.name == 'nt'
+        # We test if the WinAPI works, because even if we are on Windows
+        # we may be using a terminal that doesn't support the WinAPI
+        # (e.g. Cygwin Terminal). In this case it's up to the terminal
+        # to support the ANSI codes.
+        conversion_supported = on_windows and winapi_test()
+        try:
+            fd = wrapped.fileno()
+        except Exception:
+            fd = -1
+        system_has_native_ansi = not on_windows or enable_vt_processing(fd)
+        have_tty = not self.stream.closed and self.stream.isatty()
+        need_conversion = conversion_supported and not system_has_native_ansi
+
+        # should we strip ANSI sequences from our output?
+        if strip is None:
+            strip = need_conversion or not have_tty
+        self.strip = strip
+
+        # should we should convert ANSI sequences into win32 calls?
+        if convert is None:
+            convert = need_conversion and have_tty
+        self.convert = convert
+
+        # dict of ansi codes to win32 functions and parameters
+        self.win32_calls = self.get_win32_calls()
+
+        # are we wrapping stderr?
+        self.on_stderr = self.wrapped is sys.stderr
+
+    def should_wrap(self):
+        '''
+        True if this class is actually needed. If false, then the output
+        stream will not be affected, nor will win32 calls be issued, so
+        wrapping stdout is not actually required. This will generally be
+        False on non-Windows platforms, unless optional functionality like
+        autoreset has been requested using kwargs to init()
+        '''
+        return self.convert or self.strip or self.autoreset
+
+    def get_win32_calls(self):
+        if self.convert and winterm:
+            return {
+                AnsiStyle.RESET_ALL: (winterm.reset_all, ),
+                AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT),
+                AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL),
+                AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL),
+                AnsiFore.BLACK: (winterm.fore, WinColor.BLACK),
+                AnsiFore.RED: (winterm.fore, WinColor.RED),
+                AnsiFore.GREEN: (winterm.fore, WinColor.GREEN),
+                AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW),
+                AnsiFore.BLUE: (winterm.fore, WinColor.BLUE),
+                AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA),
+                AnsiFore.CYAN: (winterm.fore, WinColor.CYAN),
+                AnsiFore.WHITE: (winterm.fore, WinColor.GREY),
+                AnsiFore.RESET: (winterm.fore, ),
+                AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True),
+                AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True),
+                AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True),
+                AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True),
+                AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True),
+                AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True),
+                AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True),
+                AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True),
+                AnsiBack.BLACK: (winterm.back, WinColor.BLACK),
+                AnsiBack.RED: (winterm.back, WinColor.RED),
+                AnsiBack.GREEN: (winterm.back, WinColor.GREEN),
+                AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW),
+                AnsiBack.BLUE: (winterm.back, WinColor.BLUE),
+                AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA),
+                AnsiBack.CYAN: (winterm.back, WinColor.CYAN),
+                AnsiBack.WHITE: (winterm.back, WinColor.GREY),
+                AnsiBack.RESET: (winterm.back, ),
+                AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True),
+                AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True),
+                AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True),
+                AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True),
+                AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True),
+                AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True),
+                AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True),
+                AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True),
+            }
+        return dict()
+
+    def write(self, text):
+        if self.strip or self.convert:
+            self.write_and_convert(text)
+        else:
+            self.wrapped.write(text)
+            self.wrapped.flush()
+        if self.autoreset:
+            self.reset_all()
+
+
+    def reset_all(self):
+        if self.convert:
+            self.call_win32('m', (0,))
+        elif not self.strip and not self.stream.closed:
+            self.wrapped.write(Style.RESET_ALL)
+
+
+    def write_and_convert(self, text):
+        '''
+        Write the given text to our wrapped stream, stripping any ANSI
+        sequences from the text, and optionally converting them into win32
+        calls.
+        '''
+        cursor = 0
+        text = self.convert_osc(text)
+        for match in self.ANSI_CSI_RE.finditer(text):
+            start, end = match.span()
+            self.write_plain_text(text, cursor, start)
+            self.convert_ansi(*match.groups())
+            cursor = end
+        self.write_plain_text(text, cursor, len(text))
+
+
+    def write_plain_text(self, text, start, end):
+        if start < end:
+            self.wrapped.write(text[start:end])
+            self.wrapped.flush()
+
+
+    def convert_ansi(self, paramstring, command):
+        if self.convert:
+            params = self.extract_params(command, paramstring)
+            self.call_win32(command, params)
+
+
+    def extract_params(self, command, paramstring):
+        if command in 'Hf':
+            params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';'))
+            while len(params) < 2:
+                # defaults:
+                params = params + (1,)
+        else:
+            params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0)
+            if len(params) == 0:
+                # defaults:
+                if command in 'JKm':
+                    params = (0,)
+                elif command in 'ABCD':
+                    params = (1,)
+
+        return params
+
+
+    def call_win32(self, command, params):
+        if command == 'm':
+            for param in params:
+                if param in self.win32_calls:
+                    func_args = self.win32_calls[param]
+                    func = func_args[0]
+                    args = func_args[1:]
+                    kwargs = dict(on_stderr=self.on_stderr)
+                    func(*args, **kwargs)
+        elif command in 'J':
+            winterm.erase_screen(params[0], on_stderr=self.on_stderr)
+        elif command in 'K':
+            winterm.erase_line(params[0], on_stderr=self.on_stderr)
+        elif command in 'Hf':     # cursor position - absolute
+            winterm.set_cursor_position(params, on_stderr=self.on_stderr)
+        elif command in 'ABCD':   # cursor position - relative
+            n = params[0]
+            # A - up, B - down, C - forward, D - back
+            x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command]
+            winterm.cursor_adjust(x, y, on_stderr=self.on_stderr)
+
+
+    def convert_osc(self, text):
+        for match in self.ANSI_OSC_RE.finditer(text):
+            start, end = match.span()
+            text = text[:start] + text[end:]
+            paramstring, command = match.groups()
+            if command == BEL:
+                if paramstring.count(";") == 1:
+                    params = paramstring.split(";")
+                    # 0 - change title and icon (we will only change title)
+                    # 1 - change icon (we don't support this)
+                    # 2 - change title
+                    if params[0] in '02':
+                        winterm.set_title(params[1])
+        return text
+
+
+    def flush(self):
+        self.wrapped.flush()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/initialise.py b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/initialise.py
new file mode 100644
index 0000000..d5fd4b7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/initialise.py
@@ -0,0 +1,121 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+import atexit
+import contextlib
+import sys
+
+from .ansitowin32 import AnsiToWin32
+
+
+def _wipe_internal_state_for_tests():
+    global orig_stdout, orig_stderr
+    orig_stdout = None
+    orig_stderr = None
+
+    global wrapped_stdout, wrapped_stderr
+    wrapped_stdout = None
+    wrapped_stderr = None
+
+    global atexit_done
+    atexit_done = False
+
+    global fixed_windows_console
+    fixed_windows_console = False
+
+    try:
+        # no-op if it wasn't registered
+        atexit.unregister(reset_all)
+    except AttributeError:
+        # python 2: no atexit.unregister. Oh well, we did our best.
+        pass
+
+
+def reset_all():
+    if AnsiToWin32 is not None:    # Issue #74: objects might become None at exit
+        AnsiToWin32(orig_stdout).reset_all()
+
+
+def init(autoreset=False, convert=None, strip=None, wrap=True):
+
+    if not wrap and any([autoreset, convert, strip]):
+        raise ValueError('wrap=False conflicts with any other arg=True')
+
+    global wrapped_stdout, wrapped_stderr
+    global orig_stdout, orig_stderr
+
+    orig_stdout = sys.stdout
+    orig_stderr = sys.stderr
+
+    if sys.stdout is None:
+        wrapped_stdout = None
+    else:
+        sys.stdout = wrapped_stdout = \
+            wrap_stream(orig_stdout, convert, strip, autoreset, wrap)
+    if sys.stderr is None:
+        wrapped_stderr = None
+    else:
+        sys.stderr = wrapped_stderr = \
+            wrap_stream(orig_stderr, convert, strip, autoreset, wrap)
+
+    global atexit_done
+    if not atexit_done:
+        atexit.register(reset_all)
+        atexit_done = True
+
+
+def deinit():
+    if orig_stdout is not None:
+        sys.stdout = orig_stdout
+    if orig_stderr is not None:
+        sys.stderr = orig_stderr
+
+
+def just_fix_windows_console():
+    global fixed_windows_console
+
+    if sys.platform != "win32":
+        return
+    if fixed_windows_console:
+        return
+    if wrapped_stdout is not None or wrapped_stderr is not None:
+        # Someone already ran init() and it did stuff, so we won't second-guess them
+        return
+
+    # On newer versions of Windows, AnsiToWin32.__init__ will implicitly enable the
+    # native ANSI support in the console as a side-effect. We only need to actually
+    # replace sys.stdout/stderr if we're in the old-style conversion mode.
+    new_stdout = AnsiToWin32(sys.stdout, convert=None, strip=None, autoreset=False)
+    if new_stdout.convert:
+        sys.stdout = new_stdout
+    new_stderr = AnsiToWin32(sys.stderr, convert=None, strip=None, autoreset=False)
+    if new_stderr.convert:
+        sys.stderr = new_stderr
+
+    fixed_windows_console = True
+
+@contextlib.contextmanager
+def colorama_text(*args, **kwargs):
+    init(*args, **kwargs)
+    try:
+        yield
+    finally:
+        deinit()
+
+
+def reinit():
+    if wrapped_stdout is not None:
+        sys.stdout = wrapped_stdout
+    if wrapped_stderr is not None:
+        sys.stderr = wrapped_stderr
+
+
+def wrap_stream(stream, convert, strip, autoreset, wrap):
+    if wrap:
+        wrapper = AnsiToWin32(stream,
+            convert=convert, strip=strip, autoreset=autoreset)
+        if wrapper.should_wrap():
+            stream = wrapper.stream
+    return stream
+
+
+# Use this for initial setup as well, to reduce code duplication
+_wipe_internal_state_for_tests()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__init__.py
new file mode 100644
index 0000000..8c5661e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__init__.py
@@ -0,0 +1 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/ansi_test.py b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/ansi_test.py
new file mode 100644
index 0000000..0a20c80
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/ansi_test.py
@@ -0,0 +1,76 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+import sys
+from unittest import TestCase, main
+
+from ..ansi import Back, Fore, Style
+from ..ansitowin32 import AnsiToWin32
+
+stdout_orig = sys.stdout
+stderr_orig = sys.stderr
+
+
+class AnsiTest(TestCase):
+
+    def setUp(self):
+        # sanity check: stdout should be a file or StringIO object.
+        # It will only be AnsiToWin32 if init() has previously wrapped it
+        self.assertNotEqual(type(sys.stdout), AnsiToWin32)
+        self.assertNotEqual(type(sys.stderr), AnsiToWin32)
+
+    def tearDown(self):
+        sys.stdout = stdout_orig
+        sys.stderr = stderr_orig
+
+
+    def testForeAttributes(self):
+        self.assertEqual(Fore.BLACK, '\033[30m')
+        self.assertEqual(Fore.RED, '\033[31m')
+        self.assertEqual(Fore.GREEN, '\033[32m')
+        self.assertEqual(Fore.YELLOW, '\033[33m')
+        self.assertEqual(Fore.BLUE, '\033[34m')
+        self.assertEqual(Fore.MAGENTA, '\033[35m')
+        self.assertEqual(Fore.CYAN, '\033[36m')
+        self.assertEqual(Fore.WHITE, '\033[37m')
+        self.assertEqual(Fore.RESET, '\033[39m')
+
+        # Check the light, extended versions.
+        self.assertEqual(Fore.LIGHTBLACK_EX, '\033[90m')
+        self.assertEqual(Fore.LIGHTRED_EX, '\033[91m')
+        self.assertEqual(Fore.LIGHTGREEN_EX, '\033[92m')
+        self.assertEqual(Fore.LIGHTYELLOW_EX, '\033[93m')
+        self.assertEqual(Fore.LIGHTBLUE_EX, '\033[94m')
+        self.assertEqual(Fore.LIGHTMAGENTA_EX, '\033[95m')
+        self.assertEqual(Fore.LIGHTCYAN_EX, '\033[96m')
+        self.assertEqual(Fore.LIGHTWHITE_EX, '\033[97m')
+
+
+    def testBackAttributes(self):
+        self.assertEqual(Back.BLACK, '\033[40m')
+        self.assertEqual(Back.RED, '\033[41m')
+        self.assertEqual(Back.GREEN, '\033[42m')
+        self.assertEqual(Back.YELLOW, '\033[43m')
+        self.assertEqual(Back.BLUE, '\033[44m')
+        self.assertEqual(Back.MAGENTA, '\033[45m')
+        self.assertEqual(Back.CYAN, '\033[46m')
+        self.assertEqual(Back.WHITE, '\033[47m')
+        self.assertEqual(Back.RESET, '\033[49m')
+
+        # Check the light, extended versions.
+        self.assertEqual(Back.LIGHTBLACK_EX, '\033[100m')
+        self.assertEqual(Back.LIGHTRED_EX, '\033[101m')
+        self.assertEqual(Back.LIGHTGREEN_EX, '\033[102m')
+        self.assertEqual(Back.LIGHTYELLOW_EX, '\033[103m')
+        self.assertEqual(Back.LIGHTBLUE_EX, '\033[104m')
+        self.assertEqual(Back.LIGHTMAGENTA_EX, '\033[105m')
+        self.assertEqual(Back.LIGHTCYAN_EX, '\033[106m')
+        self.assertEqual(Back.LIGHTWHITE_EX, '\033[107m')
+
+
+    def testStyleAttributes(self):
+        self.assertEqual(Style.DIM, '\033[2m')
+        self.assertEqual(Style.NORMAL, '\033[22m')
+        self.assertEqual(Style.BRIGHT, '\033[1m')
+
+
+if __name__ == '__main__':
+    main()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/ansitowin32_test.py b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/ansitowin32_test.py
new file mode 100644
index 0000000..91ca551
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/ansitowin32_test.py
@@ -0,0 +1,294 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+from io import StringIO, TextIOWrapper
+from unittest import TestCase, main
+try:
+    from contextlib import ExitStack
+except ImportError:
+    # python 2
+    from contextlib2 import ExitStack
+
+try:
+    from unittest.mock import MagicMock, Mock, patch
+except ImportError:
+    from mock import MagicMock, Mock, patch
+
+from ..ansitowin32 import AnsiToWin32, StreamWrapper
+from ..win32 import ENABLE_VIRTUAL_TERMINAL_PROCESSING
+from .utils import osname
+
+
+class StreamWrapperTest(TestCase):
+
+    def testIsAProxy(self):
+        mockStream = Mock()
+        wrapper = StreamWrapper(mockStream, None)
+        self.assertTrue( wrapper.random_attr is mockStream.random_attr )
+
+    def testDelegatesWrite(self):
+        mockStream = Mock()
+        mockConverter = Mock()
+        wrapper = StreamWrapper(mockStream, mockConverter)
+        wrapper.write('hello')
+        self.assertTrue(mockConverter.write.call_args, (('hello',), {}))
+
+    def testDelegatesContext(self):
+        mockConverter = Mock()
+        s = StringIO()
+        with StreamWrapper(s, mockConverter) as fp:
+            fp.write(u'hello')
+        self.assertTrue(s.closed)
+
+    def testProxyNoContextManager(self):
+        mockStream = MagicMock()
+        mockStream.__enter__.side_effect = AttributeError()
+        mockConverter = Mock()
+        with self.assertRaises(AttributeError) as excinfo:
+            with StreamWrapper(mockStream, mockConverter) as wrapper:
+                wrapper.write('hello')
+
+    def test_closed_shouldnt_raise_on_closed_stream(self):
+        stream = StringIO()
+        stream.close()
+        wrapper = StreamWrapper(stream, None)
+        self.assertEqual(wrapper.closed, True)
+
+    def test_closed_shouldnt_raise_on_detached_stream(self):
+        stream = TextIOWrapper(StringIO())
+        stream.detach()
+        wrapper = StreamWrapper(stream, None)
+        self.assertEqual(wrapper.closed, True)
+
+class AnsiToWin32Test(TestCase):
+
+    def testInit(self):
+        mockStdout = Mock()
+        auto = Mock()
+        stream = AnsiToWin32(mockStdout, autoreset=auto)
+        self.assertEqual(stream.wrapped, mockStdout)
+        self.assertEqual(stream.autoreset, auto)
+
+    @patch('colorama.ansitowin32.winterm', None)
+    @patch('colorama.ansitowin32.winapi_test', lambda *_: True)
+    def testStripIsTrueOnWindows(self):
+        with osname('nt'):
+            mockStdout = Mock()
+            stream = AnsiToWin32(mockStdout)
+            self.assertTrue(stream.strip)
+
+    def testStripIsFalseOffWindows(self):
+        with osname('posix'):
+            mockStdout = Mock(closed=False)
+            stream = AnsiToWin32(mockStdout)
+            self.assertFalse(stream.strip)
+
+    def testWriteStripsAnsi(self):
+        mockStdout = Mock()
+        stream = AnsiToWin32(mockStdout)
+        stream.wrapped = Mock()
+        stream.write_and_convert = Mock()
+        stream.strip = True
+
+        stream.write('abc')
+
+        self.assertFalse(stream.wrapped.write.called)
+        self.assertEqual(stream.write_and_convert.call_args, (('abc',), {}))
+
+    def testWriteDoesNotStripAnsi(self):
+        mockStdout = Mock()
+        stream = AnsiToWin32(mockStdout)
+        stream.wrapped = Mock()
+        stream.write_and_convert = Mock()
+        stream.strip = False
+        stream.convert = False
+
+        stream.write('abc')
+
+        self.assertFalse(stream.write_and_convert.called)
+        self.assertEqual(stream.wrapped.write.call_args, (('abc',), {}))
+
+    def assert_autoresets(self, convert, autoreset=True):
+        stream = AnsiToWin32(Mock())
+        stream.convert = convert
+        stream.reset_all = Mock()
+        stream.autoreset = autoreset
+        stream.winterm = Mock()
+
+        stream.write('abc')
+
+        self.assertEqual(stream.reset_all.called, autoreset)
+
+    def testWriteAutoresets(self):
+        self.assert_autoresets(convert=True)
+        self.assert_autoresets(convert=False)
+        self.assert_autoresets(convert=True, autoreset=False)
+        self.assert_autoresets(convert=False, autoreset=False)
+
+    def testWriteAndConvertWritesPlainText(self):
+        stream = AnsiToWin32(Mock())
+        stream.write_and_convert( 'abc' )
+        self.assertEqual( stream.wrapped.write.call_args, (('abc',), {}) )
+
+    def testWriteAndConvertStripsAllValidAnsi(self):
+        stream = AnsiToWin32(Mock())
+        stream.call_win32 = Mock()
+        data = [
+            'abc\033[mdef',
+            'abc\033[0mdef',
+            'abc\033[2mdef',
+            'abc\033[02mdef',
+            'abc\033[002mdef',
+            'abc\033[40mdef',
+            'abc\033[040mdef',
+            'abc\033[0;1mdef',
+            'abc\033[40;50mdef',
+            'abc\033[50;30;40mdef',
+            'abc\033[Adef',
+            'abc\033[0Gdef',
+            'abc\033[1;20;128Hdef',
+        ]
+        for datum in data:
+            stream.wrapped.write.reset_mock()
+            stream.write_and_convert( datum )
+            self.assertEqual(
+               [args[0] for args in stream.wrapped.write.call_args_list],
+               [ ('abc',), ('def',) ]
+            )
+
+    def testWriteAndConvertSkipsEmptySnippets(self):
+        stream = AnsiToWin32(Mock())
+        stream.call_win32 = Mock()
+        stream.write_and_convert( '\033[40m\033[41m' )
+        self.assertFalse( stream.wrapped.write.called )
+
+    def testWriteAndConvertCallsWin32WithParamsAndCommand(self):
+        stream = AnsiToWin32(Mock())
+        stream.convert = True
+        stream.call_win32 = Mock()
+        stream.extract_params = Mock(return_value='params')
+        data = {
+            'abc\033[adef':         ('a', 'params'),
+            'abc\033[;;bdef':       ('b', 'params'),
+            'abc\033[0cdef':        ('c', 'params'),
+            'abc\033[;;0;;Gdef':    ('G', 'params'),
+            'abc\033[1;20;128Hdef': ('H', 'params'),
+        }
+        for datum, expected in data.items():
+            stream.call_win32.reset_mock()
+            stream.write_and_convert( datum )
+            self.assertEqual( stream.call_win32.call_args[0], expected )
+
+    def test_reset_all_shouldnt_raise_on_closed_orig_stdout(self):
+        stream = StringIO()
+        converter = AnsiToWin32(stream)
+        stream.close()
+
+        converter.reset_all()
+
+    def test_wrap_shouldnt_raise_on_closed_orig_stdout(self):
+        stream = StringIO()
+        stream.close()
+        with \
+            patch("colorama.ansitowin32.os.name", "nt"), \
+            patch("colorama.ansitowin32.winapi_test", lambda: True):
+                converter = AnsiToWin32(stream)
+        self.assertTrue(converter.strip)
+        self.assertFalse(converter.convert)
+
+    def test_wrap_shouldnt_raise_on_missing_closed_attr(self):
+        with \
+            patch("colorama.ansitowin32.os.name", "nt"), \
+            patch("colorama.ansitowin32.winapi_test", lambda: True):
+                converter = AnsiToWin32(object())
+        self.assertTrue(converter.strip)
+        self.assertFalse(converter.convert)
+
+    def testExtractParams(self):
+        stream = AnsiToWin32(Mock())
+        data = {
+            '':               (0,),
+            ';;':             (0,),
+            '2':              (2,),
+            ';;002;;':        (2,),
+            '0;1':            (0, 1),
+            ';;003;;456;;':   (3, 456),
+            '11;22;33;44;55': (11, 22, 33, 44, 55),
+        }
+        for datum, expected in data.items():
+            self.assertEqual(stream.extract_params('m', datum), expected)
+
+    def testCallWin32UsesLookup(self):
+        listener = Mock()
+        stream = AnsiToWin32(listener)
+        stream.win32_calls = {
+            1: (lambda *_, **__: listener(11),),
+            2: (lambda *_, **__: listener(22),),
+            3: (lambda *_, **__: listener(33),),
+        }
+        stream.call_win32('m', (3, 1, 99, 2))
+        self.assertEqual(
+            [a[0][0] for a in listener.call_args_list],
+            [33, 11, 22] )
+
+    def test_osc_codes(self):
+        mockStdout = Mock()
+        stream = AnsiToWin32(mockStdout, convert=True)
+        with patch('colorama.ansitowin32.winterm') as winterm:
+            data = [
+                '\033]0\x07',                      # missing arguments
+                '\033]0;foo\x08',                  # wrong OSC command
+                '\033]0;colorama_test_title\x07',  # should work
+                '\033]1;colorama_test_title\x07',  # wrong set command
+                '\033]2;colorama_test_title\x07',  # should work
+                '\033]' + ';' * 64 + '\x08',       # see issue #247
+            ]
+            for code in data:
+                stream.write(code)
+            self.assertEqual(winterm.set_title.call_count, 2)
+
+    def test_native_windows_ansi(self):
+        with ExitStack() as stack:
+            def p(a, b):
+                stack.enter_context(patch(a, b, create=True))
+            # Pretend to be on Windows
+            p("colorama.ansitowin32.os.name", "nt")
+            p("colorama.ansitowin32.winapi_test", lambda: True)
+            p("colorama.win32.winapi_test", lambda: True)
+            p("colorama.winterm.win32.windll", "non-None")
+            p("colorama.winterm.get_osfhandle", lambda _: 1234)
+
+            # Pretend that our mock stream has native ANSI support
+            p(
+                "colorama.winterm.win32.GetConsoleMode",
+                lambda _: ENABLE_VIRTUAL_TERMINAL_PROCESSING,
+            )
+            SetConsoleMode = Mock()
+            p("colorama.winterm.win32.SetConsoleMode", SetConsoleMode)
+
+            stdout = Mock()
+            stdout.closed = False
+            stdout.isatty.return_value = True
+            stdout.fileno.return_value = 1
+
+            # Our fake console says it has native vt support, so AnsiToWin32 should
+            # enable that support and do nothing else.
+            stream = AnsiToWin32(stdout)
+            SetConsoleMode.assert_called_with(1234, ENABLE_VIRTUAL_TERMINAL_PROCESSING)
+            self.assertFalse(stream.strip)
+            self.assertFalse(stream.convert)
+            self.assertFalse(stream.should_wrap())
+
+            # Now let's pretend we're on an old Windows console, that doesn't have
+            # native ANSI support.
+            p("colorama.winterm.win32.GetConsoleMode", lambda _: 0)
+            SetConsoleMode = Mock()
+            p("colorama.winterm.win32.SetConsoleMode", SetConsoleMode)
+
+            stream = AnsiToWin32(stdout)
+            SetConsoleMode.assert_called_with(1234, ENABLE_VIRTUAL_TERMINAL_PROCESSING)
+            self.assertTrue(stream.strip)
+            self.assertTrue(stream.convert)
+            self.assertTrue(stream.should_wrap())
+
+
+if __name__ == '__main__':
+    main()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/initialise_test.py b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/initialise_test.py
new file mode 100644
index 0000000..89f9b07
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/initialise_test.py
@@ -0,0 +1,189 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+import sys
+from unittest import TestCase, main, skipUnless
+
+try:
+    from unittest.mock import patch, Mock
+except ImportError:
+    from mock import patch, Mock
+
+from ..ansitowin32 import StreamWrapper
+from ..initialise import init, just_fix_windows_console, _wipe_internal_state_for_tests
+from .utils import osname, replace_by
+
+orig_stdout = sys.stdout
+orig_stderr = sys.stderr
+
+
+class InitTest(TestCase):
+
+    @skipUnless(sys.stdout.isatty(), "sys.stdout is not a tty")
+    def setUp(self):
+        # sanity check
+        self.assertNotWrapped()
+
+    def tearDown(self):
+        _wipe_internal_state_for_tests()
+        sys.stdout = orig_stdout
+        sys.stderr = orig_stderr
+
+    def assertWrapped(self):
+        self.assertIsNot(sys.stdout, orig_stdout, 'stdout should be wrapped')
+        self.assertIsNot(sys.stderr, orig_stderr, 'stderr should be wrapped')
+        self.assertTrue(isinstance(sys.stdout, StreamWrapper),
+            'bad stdout wrapper')
+        self.assertTrue(isinstance(sys.stderr, StreamWrapper),
+            'bad stderr wrapper')
+
+    def assertNotWrapped(self):
+        self.assertIs(sys.stdout, orig_stdout, 'stdout should not be wrapped')
+        self.assertIs(sys.stderr, orig_stderr, 'stderr should not be wrapped')
+
+    @patch('colorama.initialise.reset_all')
+    @patch('colorama.ansitowin32.winapi_test', lambda *_: True)
+    @patch('colorama.ansitowin32.enable_vt_processing', lambda *_: False)
+    def testInitWrapsOnWindows(self, _):
+        with osname("nt"):
+            init()
+            self.assertWrapped()
+
+    @patch('colorama.initialise.reset_all')
+    @patch('colorama.ansitowin32.winapi_test', lambda *_: False)
+    def testInitDoesntWrapOnEmulatedWindows(self, _):
+        with osname("nt"):
+            init()
+            self.assertNotWrapped()
+
+    def testInitDoesntWrapOnNonWindows(self):
+        with osname("posix"):
+            init()
+            self.assertNotWrapped()
+
+    def testInitDoesntWrapIfNone(self):
+        with replace_by(None):
+            init()
+            # We can't use assertNotWrapped here because replace_by(None)
+            # changes stdout/stderr already.
+            self.assertIsNone(sys.stdout)
+            self.assertIsNone(sys.stderr)
+
+    def testInitAutoresetOnWrapsOnAllPlatforms(self):
+        with osname("posix"):
+            init(autoreset=True)
+            self.assertWrapped()
+
+    def testInitWrapOffDoesntWrapOnWindows(self):
+        with osname("nt"):
+            init(wrap=False)
+            self.assertNotWrapped()
+
+    def testInitWrapOffIncompatibleWithAutoresetOn(self):
+        self.assertRaises(ValueError, lambda: init(autoreset=True, wrap=False))
+
+    @patch('colorama.win32.SetConsoleTextAttribute')
+    @patch('colorama.initialise.AnsiToWin32')
+    def testAutoResetPassedOn(self, mockATW32, _):
+        with osname("nt"):
+            init(autoreset=True)
+            self.assertEqual(len(mockATW32.call_args_list), 2)
+            self.assertEqual(mockATW32.call_args_list[1][1]['autoreset'], True)
+            self.assertEqual(mockATW32.call_args_list[0][1]['autoreset'], True)
+
+    @patch('colorama.initialise.AnsiToWin32')
+    def testAutoResetChangeable(self, mockATW32):
+        with osname("nt"):
+            init()
+
+            init(autoreset=True)
+            self.assertEqual(len(mockATW32.call_args_list), 4)
+            self.assertEqual(mockATW32.call_args_list[2][1]['autoreset'], True)
+            self.assertEqual(mockATW32.call_args_list[3][1]['autoreset'], True)
+
+            init()
+            self.assertEqual(len(mockATW32.call_args_list), 6)
+            self.assertEqual(
+                mockATW32.call_args_list[4][1]['autoreset'], False)
+            self.assertEqual(
+                mockATW32.call_args_list[5][1]['autoreset'], False)
+
+
+    @patch('colorama.initialise.atexit.register')
+    def testAtexitRegisteredOnlyOnce(self, mockRegister):
+        init()
+        self.assertTrue(mockRegister.called)
+        mockRegister.reset_mock()
+        init()
+        self.assertFalse(mockRegister.called)
+
+
+class JustFixWindowsConsoleTest(TestCase):
+    def _reset(self):
+        _wipe_internal_state_for_tests()
+        sys.stdout = orig_stdout
+        sys.stderr = orig_stderr
+
+    def tearDown(self):
+        self._reset()
+
+    @patch("colorama.ansitowin32.winapi_test", lambda: True)
+    def testJustFixWindowsConsole(self):
+        if sys.platform != "win32":
+            # just_fix_windows_console should be a no-op
+            just_fix_windows_console()
+            self.assertIs(sys.stdout, orig_stdout)
+            self.assertIs(sys.stderr, orig_stderr)
+        else:
+            def fake_std():
+                # Emulate stdout=not a tty, stderr=tty
+                # to check that we handle both cases correctly
+                stdout = Mock()
+                stdout.closed = False
+                stdout.isatty.return_value = False
+                stdout.fileno.return_value = 1
+                sys.stdout = stdout
+
+                stderr = Mock()
+                stderr.closed = False
+                stderr.isatty.return_value = True
+                stderr.fileno.return_value = 2
+                sys.stderr = stderr
+
+            for native_ansi in [False, True]:
+                with patch(
+                    'colorama.ansitowin32.enable_vt_processing',
+                    lambda *_: native_ansi
+                ):
+                    self._reset()
+                    fake_std()
+
+                    # Regular single-call test
+                    prev_stdout = sys.stdout
+                    prev_stderr = sys.stderr
+                    just_fix_windows_console()
+                    self.assertIs(sys.stdout, prev_stdout)
+                    if native_ansi:
+                        self.assertIs(sys.stderr, prev_stderr)
+                    else:
+                        self.assertIsNot(sys.stderr, prev_stderr)
+
+                    # second call without resetting is always a no-op
+                    prev_stdout = sys.stdout
+                    prev_stderr = sys.stderr
+                    just_fix_windows_console()
+                    self.assertIs(sys.stdout, prev_stdout)
+                    self.assertIs(sys.stderr, prev_stderr)
+
+                    self._reset()
+                    fake_std()
+
+                    # If init() runs first, just_fix_windows_console should be a no-op
+                    init()
+                    prev_stdout = sys.stdout
+                    prev_stderr = sys.stderr
+                    just_fix_windows_console()
+                    self.assertIs(prev_stdout, sys.stdout)
+                    self.assertIs(prev_stderr, sys.stderr)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/isatty_test.py b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/isatty_test.py
new file mode 100644
index 0000000..0f84e4b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/isatty_test.py
@@ -0,0 +1,57 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+import sys
+from unittest import TestCase, main
+
+from ..ansitowin32 import StreamWrapper, AnsiToWin32
+from .utils import pycharm, replace_by, replace_original_by, StreamTTY, StreamNonTTY
+
+
+def is_a_tty(stream):
+    return StreamWrapper(stream, None).isatty()
+
+class IsattyTest(TestCase):
+
+    def test_TTY(self):
+        tty = StreamTTY()
+        self.assertTrue(is_a_tty(tty))
+        with pycharm():
+            self.assertTrue(is_a_tty(tty))
+
+    def test_nonTTY(self):
+        non_tty = StreamNonTTY()
+        self.assertFalse(is_a_tty(non_tty))
+        with pycharm():
+            self.assertFalse(is_a_tty(non_tty))
+
+    def test_withPycharm(self):
+        with pycharm():
+            self.assertTrue(is_a_tty(sys.stderr))
+            self.assertTrue(is_a_tty(sys.stdout))
+
+    def test_withPycharmTTYOverride(self):
+        tty = StreamTTY()
+        with pycharm(), replace_by(tty):
+            self.assertTrue(is_a_tty(tty))
+
+    def test_withPycharmNonTTYOverride(self):
+        non_tty = StreamNonTTY()
+        with pycharm(), replace_by(non_tty):
+            self.assertFalse(is_a_tty(non_tty))
+
+    def test_withPycharmNoneOverride(self):
+        with pycharm():
+            with replace_by(None), replace_original_by(None):
+                self.assertFalse(is_a_tty(None))
+                self.assertFalse(is_a_tty(StreamNonTTY()))
+                self.assertTrue(is_a_tty(StreamTTY()))
+
+    def test_withPycharmStreamWrapped(self):
+        with pycharm():
+            self.assertTrue(AnsiToWin32(StreamTTY()).stream.isatty())
+            self.assertFalse(AnsiToWin32(StreamNonTTY()).stream.isatty())
+            self.assertTrue(AnsiToWin32(sys.stdout).stream.isatty())
+            self.assertTrue(AnsiToWin32(sys.stderr).stream.isatty())
+
+
+if __name__ == '__main__':
+    main()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/utils.py b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/utils.py
new file mode 100644
index 0000000..472fafb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/utils.py
@@ -0,0 +1,49 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+from contextlib import contextmanager
+from io import StringIO
+import sys
+import os
+
+
+class StreamTTY(StringIO):
+    def isatty(self):
+        return True
+
+class StreamNonTTY(StringIO):
+    def isatty(self):
+        return False
+
+@contextmanager
+def osname(name):
+    orig = os.name
+    os.name = name
+    yield
+    os.name = orig
+
+@contextmanager
+def replace_by(stream):
+    orig_stdout = sys.stdout
+    orig_stderr = sys.stderr
+    sys.stdout = stream
+    sys.stderr = stream
+    yield
+    sys.stdout = orig_stdout
+    sys.stderr = orig_stderr
+
+@contextmanager
+def replace_original_by(stream):
+    orig_stdout = sys.__stdout__
+    orig_stderr = sys.__stderr__
+    sys.__stdout__ = stream
+    sys.__stderr__ = stream
+    yield
+    sys.__stdout__ = orig_stdout
+    sys.__stderr__ = orig_stderr
+
+@contextmanager
+def pycharm():
+    os.environ["PYCHARM_HOSTED"] = "1"
+    non_tty = StreamNonTTY()
+    with replace_by(non_tty), replace_original_by(non_tty):
+        yield
+    del os.environ["PYCHARM_HOSTED"]
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/winterm_test.py b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/winterm_test.py
new file mode 100644
index 0000000..d0955f9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/winterm_test.py
@@ -0,0 +1,131 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+import sys
+from unittest import TestCase, main, skipUnless
+
+try:
+    from unittest.mock import Mock, patch
+except ImportError:
+    from mock import Mock, patch
+
+from ..winterm import WinColor, WinStyle, WinTerm
+
+
+class WinTermTest(TestCase):
+
+    @patch('colorama.winterm.win32')
+    def testInit(self, mockWin32):
+        mockAttr = Mock()
+        mockAttr.wAttributes = 7 + 6 * 16 + 8
+        mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr
+        term = WinTerm()
+        self.assertEqual(term._fore, 7)
+        self.assertEqual(term._back, 6)
+        self.assertEqual(term._style, 8)
+
+    @skipUnless(sys.platform.startswith("win"), "requires Windows")
+    def testGetAttrs(self):
+        term = WinTerm()
+
+        term._fore = 0
+        term._back = 0
+        term._style = 0
+        self.assertEqual(term.get_attrs(), 0)
+
+        term._fore = WinColor.YELLOW
+        self.assertEqual(term.get_attrs(), WinColor.YELLOW)
+
+        term._back = WinColor.MAGENTA
+        self.assertEqual(
+            term.get_attrs(),
+            WinColor.YELLOW + WinColor.MAGENTA * 16)
+
+        term._style = WinStyle.BRIGHT
+        self.assertEqual(
+            term.get_attrs(),
+            WinColor.YELLOW + WinColor.MAGENTA * 16 + WinStyle.BRIGHT)
+
+    @patch('colorama.winterm.win32')
+    def testResetAll(self, mockWin32):
+        mockAttr = Mock()
+        mockAttr.wAttributes = 1 + 2 * 16 + 8
+        mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr
+        term = WinTerm()
+
+        term.set_console = Mock()
+        term._fore = -1
+        term._back = -1
+        term._style = -1
+
+        term.reset_all()
+
+        self.assertEqual(term._fore, 1)
+        self.assertEqual(term._back, 2)
+        self.assertEqual(term._style, 8)
+        self.assertEqual(term.set_console.called, True)
+
+    @skipUnless(sys.platform.startswith("win"), "requires Windows")
+    def testFore(self):
+        term = WinTerm()
+        term.set_console = Mock()
+        term._fore = 0
+
+        term.fore(5)
+
+        self.assertEqual(term._fore, 5)
+        self.assertEqual(term.set_console.called, True)
+
+    @skipUnless(sys.platform.startswith("win"), "requires Windows")
+    def testBack(self):
+        term = WinTerm()
+        term.set_console = Mock()
+        term._back = 0
+
+        term.back(5)
+
+        self.assertEqual(term._back, 5)
+        self.assertEqual(term.set_console.called, True)
+
+    @skipUnless(sys.platform.startswith("win"), "requires Windows")
+    def testStyle(self):
+        term = WinTerm()
+        term.set_console = Mock()
+        term._style = 0
+
+        term.style(22)
+
+        self.assertEqual(term._style, 22)
+        self.assertEqual(term.set_console.called, True)
+
+    @patch('colorama.winterm.win32')
+    def testSetConsole(self, mockWin32):
+        mockAttr = Mock()
+        mockAttr.wAttributes = 0
+        mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr
+        term = WinTerm()
+        term.windll = Mock()
+
+        term.set_console()
+
+        self.assertEqual(
+            mockWin32.SetConsoleTextAttribute.call_args,
+            ((mockWin32.STDOUT, term.get_attrs()), {})
+        )
+
+    @patch('colorama.winterm.win32')
+    def testSetConsoleOnStderr(self, mockWin32):
+        mockAttr = Mock()
+        mockAttr.wAttributes = 0
+        mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr
+        term = WinTerm()
+        term.windll = Mock()
+
+        term.set_console(on_stderr=True)
+
+        self.assertEqual(
+            mockWin32.SetConsoleTextAttribute.call_args,
+            ((mockWin32.STDERR, term.get_attrs()), {})
+        )
+
+
+if __name__ == '__main__':
+    main()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/win32.py b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/win32.py
new file mode 100644
index 0000000..841b0e2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/win32.py
@@ -0,0 +1,180 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+
+# from winbase.h
+STDOUT = -11
+STDERR = -12
+
+ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004
+
+try:
+    import ctypes
+    from ctypes import LibraryLoader
+    windll = LibraryLoader(ctypes.WinDLL)
+    from ctypes import wintypes
+except (AttributeError, ImportError):
+    windll = None
+    SetConsoleTextAttribute = lambda *_: None
+    winapi_test = lambda *_: None
+else:
+    from ctypes import byref, Structure, c_char, POINTER
+
+    COORD = wintypes._COORD
+
+    class CONSOLE_SCREEN_BUFFER_INFO(Structure):
+        """struct in wincon.h."""
+        _fields_ = [
+            ("dwSize", COORD),
+            ("dwCursorPosition", COORD),
+            ("wAttributes", wintypes.WORD),
+            ("srWindow", wintypes.SMALL_RECT),
+            ("dwMaximumWindowSize", COORD),
+        ]
+        def __str__(self):
+            return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % (
+                self.dwSize.Y, self.dwSize.X
+                , self.dwCursorPosition.Y, self.dwCursorPosition.X
+                , self.wAttributes
+                , self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right
+                , self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X
+            )
+
+    _GetStdHandle = windll.kernel32.GetStdHandle
+    _GetStdHandle.argtypes = [
+        wintypes.DWORD,
+    ]
+    _GetStdHandle.restype = wintypes.HANDLE
+
+    _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo
+    _GetConsoleScreenBufferInfo.argtypes = [
+        wintypes.HANDLE,
+        POINTER(CONSOLE_SCREEN_BUFFER_INFO),
+    ]
+    _GetConsoleScreenBufferInfo.restype = wintypes.BOOL
+
+    _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute
+    _SetConsoleTextAttribute.argtypes = [
+        wintypes.HANDLE,
+        wintypes.WORD,
+    ]
+    _SetConsoleTextAttribute.restype = wintypes.BOOL
+
+    _SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition
+    _SetConsoleCursorPosition.argtypes = [
+        wintypes.HANDLE,
+        COORD,
+    ]
+    _SetConsoleCursorPosition.restype = wintypes.BOOL
+
+    _FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA
+    _FillConsoleOutputCharacterA.argtypes = [
+        wintypes.HANDLE,
+        c_char,
+        wintypes.DWORD,
+        COORD,
+        POINTER(wintypes.DWORD),
+    ]
+    _FillConsoleOutputCharacterA.restype = wintypes.BOOL
+
+    _FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute
+    _FillConsoleOutputAttribute.argtypes = [
+        wintypes.HANDLE,
+        wintypes.WORD,
+        wintypes.DWORD,
+        COORD,
+        POINTER(wintypes.DWORD),
+    ]
+    _FillConsoleOutputAttribute.restype = wintypes.BOOL
+
+    _SetConsoleTitleW = windll.kernel32.SetConsoleTitleW
+    _SetConsoleTitleW.argtypes = [
+        wintypes.LPCWSTR
+    ]
+    _SetConsoleTitleW.restype = wintypes.BOOL
+
+    _GetConsoleMode = windll.kernel32.GetConsoleMode
+    _GetConsoleMode.argtypes = [
+        wintypes.HANDLE,
+        POINTER(wintypes.DWORD)
+    ]
+    _GetConsoleMode.restype = wintypes.BOOL
+
+    _SetConsoleMode = windll.kernel32.SetConsoleMode
+    _SetConsoleMode.argtypes = [
+        wintypes.HANDLE,
+        wintypes.DWORD
+    ]
+    _SetConsoleMode.restype = wintypes.BOOL
+
+    def _winapi_test(handle):
+        csbi = CONSOLE_SCREEN_BUFFER_INFO()
+        success = _GetConsoleScreenBufferInfo(
+            handle, byref(csbi))
+        return bool(success)
+
+    def winapi_test():
+        return any(_winapi_test(h) for h in
+                   (_GetStdHandle(STDOUT), _GetStdHandle(STDERR)))
+
+    def GetConsoleScreenBufferInfo(stream_id=STDOUT):
+        handle = _GetStdHandle(stream_id)
+        csbi = CONSOLE_SCREEN_BUFFER_INFO()
+        success = _GetConsoleScreenBufferInfo(
+            handle, byref(csbi))
+        return csbi
+
+    def SetConsoleTextAttribute(stream_id, attrs):
+        handle = _GetStdHandle(stream_id)
+        return _SetConsoleTextAttribute(handle, attrs)
+
+    def SetConsoleCursorPosition(stream_id, position, adjust=True):
+        position = COORD(*position)
+        # If the position is out of range, do nothing.
+        if position.Y <= 0 or position.X <= 0:
+            return
+        # Adjust for Windows' SetConsoleCursorPosition:
+        #    1. being 0-based, while ANSI is 1-based.
+        #    2. expecting (x,y), while ANSI uses (y,x).
+        adjusted_position = COORD(position.Y - 1, position.X - 1)
+        if adjust:
+            # Adjust for viewport's scroll position
+            sr = GetConsoleScreenBufferInfo(STDOUT).srWindow
+            adjusted_position.Y += sr.Top
+            adjusted_position.X += sr.Left
+        # Resume normal processing
+        handle = _GetStdHandle(stream_id)
+        return _SetConsoleCursorPosition(handle, adjusted_position)
+
+    def FillConsoleOutputCharacter(stream_id, char, length, start):
+        handle = _GetStdHandle(stream_id)
+        char = c_char(char.encode())
+        length = wintypes.DWORD(length)
+        num_written = wintypes.DWORD(0)
+        # Note that this is hard-coded for ANSI (vs wide) bytes.
+        success = _FillConsoleOutputCharacterA(
+            handle, char, length, start, byref(num_written))
+        return num_written.value
+
+    def FillConsoleOutputAttribute(stream_id, attr, length, start):
+        ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )'''
+        handle = _GetStdHandle(stream_id)
+        attribute = wintypes.WORD(attr)
+        length = wintypes.DWORD(length)
+        num_written = wintypes.DWORD(0)
+        # Note that this is hard-coded for ANSI (vs wide) bytes.
+        return _FillConsoleOutputAttribute(
+            handle, attribute, length, start, byref(num_written))
+
+    def SetConsoleTitle(title):
+        return _SetConsoleTitleW(title)
+
+    def GetConsoleMode(handle):
+        mode = wintypes.DWORD()
+        success = _GetConsoleMode(handle, byref(mode))
+        if not success:
+            raise ctypes.WinError()
+        return mode.value
+
+    def SetConsoleMode(handle, mode):
+        success = _SetConsoleMode(handle, mode)
+        if not success:
+            raise ctypes.WinError()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/winterm.py b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/winterm.py
new file mode 100644
index 0000000..aad867e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/colorama/winterm.py
@@ -0,0 +1,195 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+try:
+    from msvcrt import get_osfhandle
+except ImportError:
+    def get_osfhandle(_):
+        raise OSError("This isn't windows!")
+
+
+from . import win32
+
+# from wincon.h
+class WinColor(object):
+    BLACK   = 0
+    BLUE    = 1
+    GREEN   = 2
+    CYAN    = 3
+    RED     = 4
+    MAGENTA = 5
+    YELLOW  = 6
+    GREY    = 7
+
+# from wincon.h
+class WinStyle(object):
+    NORMAL              = 0x00 # dim text, dim background
+    BRIGHT              = 0x08 # bright text, dim background
+    BRIGHT_BACKGROUND   = 0x80 # dim text, bright background
+
+class WinTerm(object):
+
+    def __init__(self):
+        self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes
+        self.set_attrs(self._default)
+        self._default_fore = self._fore
+        self._default_back = self._back
+        self._default_style = self._style
+        # In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style.
+        # So that LIGHT_EX colors and BRIGHT style do not clobber each other,
+        # we track them separately, since LIGHT_EX is overwritten by Fore/Back
+        # and BRIGHT is overwritten by Style codes.
+        self._light = 0
+
+    def get_attrs(self):
+        return self._fore + self._back * 16 + (self._style | self._light)
+
+    def set_attrs(self, value):
+        self._fore = value & 7
+        self._back = (value >> 4) & 7
+        self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND)
+
+    def reset_all(self, on_stderr=None):
+        self.set_attrs(self._default)
+        self.set_console(attrs=self._default)
+        self._light = 0
+
+    def fore(self, fore=None, light=False, on_stderr=False):
+        if fore is None:
+            fore = self._default_fore
+        self._fore = fore
+        # Emulate LIGHT_EX with BRIGHT Style
+        if light:
+            self._light |= WinStyle.BRIGHT
+        else:
+            self._light &= ~WinStyle.BRIGHT
+        self.set_console(on_stderr=on_stderr)
+
+    def back(self, back=None, light=False, on_stderr=False):
+        if back is None:
+            back = self._default_back
+        self._back = back
+        # Emulate LIGHT_EX with BRIGHT_BACKGROUND Style
+        if light:
+            self._light |= WinStyle.BRIGHT_BACKGROUND
+        else:
+            self._light &= ~WinStyle.BRIGHT_BACKGROUND
+        self.set_console(on_stderr=on_stderr)
+
+    def style(self, style=None, on_stderr=False):
+        if style is None:
+            style = self._default_style
+        self._style = style
+        self.set_console(on_stderr=on_stderr)
+
+    def set_console(self, attrs=None, on_stderr=False):
+        if attrs is None:
+            attrs = self.get_attrs()
+        handle = win32.STDOUT
+        if on_stderr:
+            handle = win32.STDERR
+        win32.SetConsoleTextAttribute(handle, attrs)
+
+    def get_position(self, handle):
+        position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition
+        # Because Windows coordinates are 0-based,
+        # and win32.SetConsoleCursorPosition expects 1-based.
+        position.X += 1
+        position.Y += 1
+        return position
+
+    def set_cursor_position(self, position=None, on_stderr=False):
+        if position is None:
+            # I'm not currently tracking the position, so there is no default.
+            # position = self.get_position()
+            return
+        handle = win32.STDOUT
+        if on_stderr:
+            handle = win32.STDERR
+        win32.SetConsoleCursorPosition(handle, position)
+
+    def cursor_adjust(self, x, y, on_stderr=False):
+        handle = win32.STDOUT
+        if on_stderr:
+            handle = win32.STDERR
+        position = self.get_position(handle)
+        adjusted_position = (position.Y + y, position.X + x)
+        win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False)
+
+    def erase_screen(self, mode=0, on_stderr=False):
+        # 0 should clear from the cursor to the end of the screen.
+        # 1 should clear from the cursor to the beginning of the screen.
+        # 2 should clear the entire screen, and move cursor to (1,1)
+        handle = win32.STDOUT
+        if on_stderr:
+            handle = win32.STDERR
+        csbi = win32.GetConsoleScreenBufferInfo(handle)
+        # get the number of character cells in the current buffer
+        cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y
+        # get number of character cells before current cursor position
+        cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X
+        if mode == 0:
+            from_coord = csbi.dwCursorPosition
+            cells_to_erase = cells_in_screen - cells_before_cursor
+        elif mode == 1:
+            from_coord = win32.COORD(0, 0)
+            cells_to_erase = cells_before_cursor
+        elif mode == 2:
+            from_coord = win32.COORD(0, 0)
+            cells_to_erase = cells_in_screen
+        else:
+            # invalid mode
+            return
+        # fill the entire screen with blanks
+        win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
+        # now set the buffer's attributes accordingly
+        win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
+        if mode == 2:
+            # put the cursor where needed
+            win32.SetConsoleCursorPosition(handle, (1, 1))
+
+    def erase_line(self, mode=0, on_stderr=False):
+        # 0 should clear from the cursor to the end of the line.
+        # 1 should clear from the cursor to the beginning of the line.
+        # 2 should clear the entire line.
+        handle = win32.STDOUT
+        if on_stderr:
+            handle = win32.STDERR
+        csbi = win32.GetConsoleScreenBufferInfo(handle)
+        if mode == 0:
+            from_coord = csbi.dwCursorPosition
+            cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X
+        elif mode == 1:
+            from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
+            cells_to_erase = csbi.dwCursorPosition.X
+        elif mode == 2:
+            from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
+            cells_to_erase = csbi.dwSize.X
+        else:
+            # invalid mode
+            return
+        # fill the entire screen with blanks
+        win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
+        # now set the buffer's attributes accordingly
+        win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
+
+    def set_title(self, title):
+        win32.SetConsoleTitle(title)
+
+
+def enable_vt_processing(fd):
+    if win32.windll is None or not win32.winapi_test():
+        return False
+
+    try:
+        handle = get_osfhandle(fd)
+        mode = win32.GetConsoleMode(handle)
+        win32.SetConsoleMode(
+            handle,
+            mode | win32.ENABLE_VIRTUAL_TERMINAL_PROCESSING,
+        )
+
+        mode = win32.GetConsoleMode(handle)
+        if mode & win32.ENABLE_VIRTUAL_TERMINAL_PROCESSING:
+            return True
+    # Can get TypeError in testsuite where 'fd' is a Mock()
+    except (OSError, TypeError):
+        return False
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/__init__.py
new file mode 100644
index 0000000..e999438
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/__init__.py
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2023 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+import logging
+
+__version__ = '0.3.8'
+
+
+class DistlibException(Exception):
+    pass
+
+
+try:
+    from logging import NullHandler
+except ImportError:  # pragma: no cover
+
+    class NullHandler(logging.Handler):
+
+        def handle(self, record):
+            pass
+
+        def emit(self, record):
+            pass
+
+        def createLock(self):
+            self.lock = None
+
+
+logger = logging.getLogger(__name__)
+logger.addHandler(NullHandler())
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/compat.py b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/compat.py
new file mode 100644
index 0000000..e93dc27
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/compat.py
@@ -0,0 +1,1138 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2013-2017 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+from __future__ import absolute_import
+
+import os
+import re
+import shutil
+import sys
+
+try:
+    import ssl
+except ImportError:  # pragma: no cover
+    ssl = None
+
+if sys.version_info[0] < 3:  # pragma: no cover
+    from StringIO import StringIO
+    string_types = basestring,
+    text_type = unicode
+    from types import FileType as file_type
+    import __builtin__ as builtins
+    import ConfigParser as configparser
+    from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit
+    from urllib import (urlretrieve, quote as _quote, unquote, url2pathname,
+                        pathname2url, ContentTooShortError, splittype)
+
+    def quote(s):
+        if isinstance(s, unicode):
+            s = s.encode('utf-8')
+        return _quote(s)
+
+    import urllib2
+    from urllib2 import (Request, urlopen, URLError, HTTPError,
+                         HTTPBasicAuthHandler, HTTPPasswordMgr, HTTPHandler,
+                         HTTPRedirectHandler, build_opener)
+    if ssl:
+        from urllib2 import HTTPSHandler
+    import httplib
+    import xmlrpclib
+    import Queue as queue
+    from HTMLParser import HTMLParser
+    import htmlentitydefs
+    raw_input = raw_input
+    from itertools import ifilter as filter
+    from itertools import ifilterfalse as filterfalse
+
+    # Leaving this around for now, in case it needs resurrecting in some way
+    # _userprog = None
+    # def splituser(host):
+    # """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
+    # global _userprog
+    # if _userprog is None:
+    # import re
+    # _userprog = re.compile('^(.*)@(.*)$')
+
+    # match = _userprog.match(host)
+    # if match: return match.group(1, 2)
+    # return None, host
+
+else:  # pragma: no cover
+    from io import StringIO
+    string_types = str,
+    text_type = str
+    from io import TextIOWrapper as file_type
+    import builtins
+    import configparser
+    from urllib.parse import (urlparse, urlunparse, urljoin, quote, unquote,
+                              urlsplit, urlunsplit, splittype)
+    from urllib.request import (urlopen, urlretrieve, Request, url2pathname,
+                                pathname2url, HTTPBasicAuthHandler,
+                                HTTPPasswordMgr, HTTPHandler,
+                                HTTPRedirectHandler, build_opener)
+    if ssl:
+        from urllib.request import HTTPSHandler
+    from urllib.error import HTTPError, URLError, ContentTooShortError
+    import http.client as httplib
+    import urllib.request as urllib2
+    import xmlrpc.client as xmlrpclib
+    import queue
+    from html.parser import HTMLParser
+    import html.entities as htmlentitydefs
+    raw_input = input
+    from itertools import filterfalse
+    filter = filter
+
+try:
+    from ssl import match_hostname, CertificateError
+except ImportError:  # pragma: no cover
+
+    class CertificateError(ValueError):
+        pass
+
+    def _dnsname_match(dn, hostname, max_wildcards=1):
+        """Matching according to RFC 6125, section 6.4.3
+
+        http://tools.ietf.org/html/rfc6125#section-6.4.3
+        """
+        pats = []
+        if not dn:
+            return False
+
+        parts = dn.split('.')
+        leftmost, remainder = parts[0], parts[1:]
+
+        wildcards = leftmost.count('*')
+        if wildcards > max_wildcards:
+            # Issue #17980: avoid denials of service by refusing more
+            # than one wildcard per fragment.  A survey of established
+            # policy among SSL implementations showed it to be a
+            # reasonable choice.
+            raise CertificateError(
+                "too many wildcards in certificate DNS name: " + repr(dn))
+
+        # speed up common case w/o wildcards
+        if not wildcards:
+            return dn.lower() == hostname.lower()
+
+        # RFC 6125, section 6.4.3, subitem 1.
+        # The client SHOULD NOT attempt to match a presented identifier in which
+        # the wildcard character comprises a label other than the left-most label.
+        if leftmost == '*':
+            # When '*' is a fragment by itself, it matches a non-empty dotless
+            # fragment.
+            pats.append('[^.]+')
+        elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
+            # RFC 6125, section 6.4.3, subitem 3.
+            # The client SHOULD NOT attempt to match a presented identifier
+            # where the wildcard character is embedded within an A-label or
+            # U-label of an internationalized domain name.
+            pats.append(re.escape(leftmost))
+        else:
+            # Otherwise, '*' matches any dotless string, e.g. www*
+            pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
+
+        # add the remaining fragments, ignore any wildcards
+        for frag in remainder:
+            pats.append(re.escape(frag))
+
+        pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
+        return pat.match(hostname)
+
+    def match_hostname(cert, hostname):
+        """Verify that *cert* (in decoded format as returned by
+        SSLSocket.getpeercert()) matches the *hostname*.  RFC 2818 and RFC 6125
+        rules are followed, but IP addresses are not accepted for *hostname*.
+
+        CertificateError is raised on failure. On success, the function
+        returns nothing.
+        """
+        if not cert:
+            raise ValueError("empty or no certificate, match_hostname needs a "
+                             "SSL socket or SSL context with either "
+                             "CERT_OPTIONAL or CERT_REQUIRED")
+        dnsnames = []
+        san = cert.get('subjectAltName', ())
+        for key, value in san:
+            if key == 'DNS':
+                if _dnsname_match(value, hostname):
+                    return
+                dnsnames.append(value)
+        if not dnsnames:
+            # The subject is only checked when there is no dNSName entry
+            # in subjectAltName
+            for sub in cert.get('subject', ()):
+                for key, value in sub:
+                    # XXX according to RFC 2818, the most specific Common Name
+                    # must be used.
+                    if key == 'commonName':
+                        if _dnsname_match(value, hostname):
+                            return
+                        dnsnames.append(value)
+        if len(dnsnames) > 1:
+            raise CertificateError("hostname %r "
+                                   "doesn't match either of %s" %
+                                   (hostname, ', '.join(map(repr, dnsnames))))
+        elif len(dnsnames) == 1:
+            raise CertificateError("hostname %r "
+                                   "doesn't match %r" %
+                                   (hostname, dnsnames[0]))
+        else:
+            raise CertificateError("no appropriate commonName or "
+                                   "subjectAltName fields were found")
+
+
+try:
+    from types import SimpleNamespace as Container
+except ImportError:  # pragma: no cover
+
+    class Container(object):
+        """
+        A generic container for when multiple values need to be returned
+        """
+
+        def __init__(self, **kwargs):
+            self.__dict__.update(kwargs)
+
+
+try:
+    from shutil import which
+except ImportError:  # pragma: no cover
+    # Implementation from Python 3.3
+    def which(cmd, mode=os.F_OK | os.X_OK, path=None):
+        """Given a command, mode, and a PATH string, return the path which
+        conforms to the given mode on the PATH, or None if there is no such
+        file.
+
+        `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
+        of os.environ.get("PATH"), or can be overridden with a custom search
+        path.
+
+        """
+
+        # Check that a given file can be accessed with the correct mode.
+        # Additionally check that `file` is not a directory, as on Windows
+        # directories pass the os.access check.
+        def _access_check(fn, mode):
+            return (os.path.exists(fn) and os.access(fn, mode)
+                    and not os.path.isdir(fn))
+
+        # If we're given a path with a directory part, look it up directly rather
+        # than referring to PATH directories. This includes checking relative to the
+        # current directory, e.g. ./script
+        if os.path.dirname(cmd):
+            if _access_check(cmd, mode):
+                return cmd
+            return None
+
+        if path is None:
+            path = os.environ.get("PATH", os.defpath)
+        if not path:
+            return None
+        path = path.split(os.pathsep)
+
+        if sys.platform == "win32":
+            # The current directory takes precedence on Windows.
+            if os.curdir not in path:
+                path.insert(0, os.curdir)
+
+            # PATHEXT is necessary to check on Windows.
+            pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
+            # See if the given file matches any of the expected path extensions.
+            # This will allow us to short circuit when given "python.exe".
+            # If it does match, only test that one, otherwise we have to try
+            # others.
+            if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
+                files = [cmd]
+            else:
+                files = [cmd + ext for ext in pathext]
+        else:
+            # On other platforms you don't have things like PATHEXT to tell you
+            # what file suffixes are executable, so just pass on cmd as-is.
+            files = [cmd]
+
+        seen = set()
+        for dir in path:
+            normdir = os.path.normcase(dir)
+            if normdir not in seen:
+                seen.add(normdir)
+                for thefile in files:
+                    name = os.path.join(dir, thefile)
+                    if _access_check(name, mode):
+                        return name
+        return None
+
+
+# ZipFile is a context manager in 2.7, but not in 2.6
+
+from zipfile import ZipFile as BaseZipFile
+
+if hasattr(BaseZipFile, '__enter__'):  # pragma: no cover
+    ZipFile = BaseZipFile
+else:  # pragma: no cover
+    from zipfile import ZipExtFile as BaseZipExtFile
+
+    class ZipExtFile(BaseZipExtFile):
+
+        def __init__(self, base):
+            self.__dict__.update(base.__dict__)
+
+        def __enter__(self):
+            return self
+
+        def __exit__(self, *exc_info):
+            self.close()
+            # return None, so if an exception occurred, it will propagate
+
+    class ZipFile(BaseZipFile):
+
+        def __enter__(self):
+            return self
+
+        def __exit__(self, *exc_info):
+            self.close()
+            # return None, so if an exception occurred, it will propagate
+
+        def open(self, *args, **kwargs):
+            base = BaseZipFile.open(self, *args, **kwargs)
+            return ZipExtFile(base)
+
+
+try:
+    from platform import python_implementation
+except ImportError:  # pragma: no cover
+
+    def python_implementation():
+        """Return a string identifying the Python implementation."""
+        if 'PyPy' in sys.version:
+            return 'PyPy'
+        if os.name == 'java':
+            return 'Jython'
+        if sys.version.startswith('IronPython'):
+            return 'IronPython'
+        return 'CPython'
+
+
+import sysconfig
+
+try:
+    callable = callable
+except NameError:  # pragma: no cover
+    from collections.abc import Callable
+
+    def callable(obj):
+        return isinstance(obj, Callable)
+
+
+try:
+    fsencode = os.fsencode
+    fsdecode = os.fsdecode
+except AttributeError:  # pragma: no cover
+    # Issue #99: on some systems (e.g. containerised),
+    # sys.getfilesystemencoding() returns None, and we need a real value,
+    # so fall back to utf-8. From the CPython 2.7 docs relating to Unix and
+    # sys.getfilesystemencoding(): the return value is "the user’s preference
+    # according to the result of nl_langinfo(CODESET), or None if the
+    # nl_langinfo(CODESET) failed."
+    _fsencoding = sys.getfilesystemencoding() or 'utf-8'
+    if _fsencoding == 'mbcs':
+        _fserrors = 'strict'
+    else:
+        _fserrors = 'surrogateescape'
+
+    def fsencode(filename):
+        if isinstance(filename, bytes):
+            return filename
+        elif isinstance(filename, text_type):
+            return filename.encode(_fsencoding, _fserrors)
+        else:
+            raise TypeError("expect bytes or str, not %s" %
+                            type(filename).__name__)
+
+    def fsdecode(filename):
+        if isinstance(filename, text_type):
+            return filename
+        elif isinstance(filename, bytes):
+            return filename.decode(_fsencoding, _fserrors)
+        else:
+            raise TypeError("expect bytes or str, not %s" %
+                            type(filename).__name__)
+
+
+try:
+    from tokenize import detect_encoding
+except ImportError:  # pragma: no cover
+    from codecs import BOM_UTF8, lookup
+
+    cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)")
+
+    def _get_normal_name(orig_enc):
+        """Imitates get_normal_name in tokenizer.c."""
+        # Only care about the first 12 characters.
+        enc = orig_enc[:12].lower().replace("_", "-")
+        if enc == "utf-8" or enc.startswith("utf-8-"):
+            return "utf-8"
+        if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
+           enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
+            return "iso-8859-1"
+        return orig_enc
+
+    def detect_encoding(readline):
+        """
+        The detect_encoding() function is used to detect the encoding that should
+        be used to decode a Python source file.  It requires one argument, readline,
+        in the same way as the tokenize() generator.
+
+        It will call readline a maximum of twice, and return the encoding used
+        (as a string) and a list of any lines (left as bytes) it has read in.
+
+        It detects the encoding from the presence of a utf-8 bom or an encoding
+        cookie as specified in pep-0263.  If both a bom and a cookie are present,
+        but disagree, a SyntaxError will be raised.  If the encoding cookie is an
+        invalid charset, raise a SyntaxError.  Note that if a utf-8 bom is found,
+        'utf-8-sig' is returned.
+
+        If no encoding is specified, then the default of 'utf-8' will be returned.
+        """
+        try:
+            filename = readline.__self__.name
+        except AttributeError:
+            filename = None
+        bom_found = False
+        encoding = None
+        default = 'utf-8'
+
+        def read_or_stop():
+            try:
+                return readline()
+            except StopIteration:
+                return b''
+
+        def find_cookie(line):
+            try:
+                # Decode as UTF-8. Either the line is an encoding declaration,
+                # in which case it should be pure ASCII, or it must be UTF-8
+                # per default encoding.
+                line_string = line.decode('utf-8')
+            except UnicodeDecodeError:
+                msg = "invalid or missing encoding declaration"
+                if filename is not None:
+                    msg = '{} for {!r}'.format(msg, filename)
+                raise SyntaxError(msg)
+
+            matches = cookie_re.findall(line_string)
+            if not matches:
+                return None
+            encoding = _get_normal_name(matches[0])
+            try:
+                codec = lookup(encoding)
+            except LookupError:
+                # This behaviour mimics the Python interpreter
+                if filename is None:
+                    msg = "unknown encoding: " + encoding
+                else:
+                    msg = "unknown encoding for {!r}: {}".format(
+                        filename, encoding)
+                raise SyntaxError(msg)
+
+            if bom_found:
+                if codec.name != 'utf-8':
+                    # This behaviour mimics the Python interpreter
+                    if filename is None:
+                        msg = 'encoding problem: utf-8'
+                    else:
+                        msg = 'encoding problem for {!r}: utf-8'.format(
+                            filename)
+                    raise SyntaxError(msg)
+                encoding += '-sig'
+            return encoding
+
+        first = read_or_stop()
+        if first.startswith(BOM_UTF8):
+            bom_found = True
+            first = first[3:]
+            default = 'utf-8-sig'
+        if not first:
+            return default, []
+
+        encoding = find_cookie(first)
+        if encoding:
+            return encoding, [first]
+
+        second = read_or_stop()
+        if not second:
+            return default, [first]
+
+        encoding = find_cookie(second)
+        if encoding:
+            return encoding, [first, second]
+
+        return default, [first, second]
+
+
+# For converting & <-> & etc.
+try:
+    from html import escape
+except ImportError:
+    from cgi import escape
+if sys.version_info[:2] < (3, 4):
+    unescape = HTMLParser().unescape
+else:
+    from html import unescape
+
+try:
+    from collections import ChainMap
+except ImportError:  # pragma: no cover
+    from collections import MutableMapping
+
+    try:
+        from reprlib import recursive_repr as _recursive_repr
+    except ImportError:
+
+        def _recursive_repr(fillvalue='...'):
+            '''
+            Decorator to make a repr function return fillvalue for a recursive
+            call
+            '''
+
+            def decorating_function(user_function):
+                repr_running = set()
+
+                def wrapper(self):
+                    key = id(self), get_ident()
+                    if key in repr_running:
+                        return fillvalue
+                    repr_running.add(key)
+                    try:
+                        result = user_function(self)
+                    finally:
+                        repr_running.discard(key)
+                    return result
+
+                # Can't use functools.wraps() here because of bootstrap issues
+                wrapper.__module__ = getattr(user_function, '__module__')
+                wrapper.__doc__ = getattr(user_function, '__doc__')
+                wrapper.__name__ = getattr(user_function, '__name__')
+                wrapper.__annotations__ = getattr(user_function,
+                                                  '__annotations__', {})
+                return wrapper
+
+            return decorating_function
+
+    class ChainMap(MutableMapping):
+        '''
+        A ChainMap groups multiple dicts (or other mappings) together
+        to create a single, updateable view.
+
+        The underlying mappings are stored in a list.  That list is public and can
+        accessed or updated using the *maps* attribute.  There is no other state.
+
+        Lookups search the underlying mappings successively until a key is found.
+        In contrast, writes, updates, and deletions only operate on the first
+        mapping.
+        '''
+
+        def __init__(self, *maps):
+            '''Initialize a ChainMap by setting *maps* to the given mappings.
+            If no mappings are provided, a single empty dictionary is used.
+
+            '''
+            self.maps = list(maps) or [{}]  # always at least one map
+
+        def __missing__(self, key):
+            raise KeyError(key)
+
+        def __getitem__(self, key):
+            for mapping in self.maps:
+                try:
+                    return mapping[
+                        key]  # can't use 'key in mapping' with defaultdict
+                except KeyError:
+                    pass
+            return self.__missing__(
+                key)  # support subclasses that define __missing__
+
+        def get(self, key, default=None):
+            return self[key] if key in self else default
+
+        def __len__(self):
+            return len(set().union(
+                *self.maps))  # reuses stored hash values if possible
+
+        def __iter__(self):
+            return iter(set().union(*self.maps))
+
+        def __contains__(self, key):
+            return any(key in m for m in self.maps)
+
+        def __bool__(self):
+            return any(self.maps)
+
+        @_recursive_repr()
+        def __repr__(self):
+            return '{0.__class__.__name__}({1})'.format(
+                self, ', '.join(map(repr, self.maps)))
+
+        @classmethod
+        def fromkeys(cls, iterable, *args):
+            'Create a ChainMap with a single dict created from the iterable.'
+            return cls(dict.fromkeys(iterable, *args))
+
+        def copy(self):
+            'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
+            return self.__class__(self.maps[0].copy(), *self.maps[1:])
+
+        __copy__ = copy
+
+        def new_child(self):  # like Django's Context.push()
+            'New ChainMap with a new dict followed by all previous maps.'
+            return self.__class__({}, *self.maps)
+
+        @property
+        def parents(self):  # like Django's Context.pop()
+            'New ChainMap from maps[1:].'
+            return self.__class__(*self.maps[1:])
+
+        def __setitem__(self, key, value):
+            self.maps[0][key] = value
+
+        def __delitem__(self, key):
+            try:
+                del self.maps[0][key]
+            except KeyError:
+                raise KeyError(
+                    'Key not found in the first mapping: {!r}'.format(key))
+
+        def popitem(self):
+            'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
+            try:
+                return self.maps[0].popitem()
+            except KeyError:
+                raise KeyError('No keys found in the first mapping.')
+
+        def pop(self, key, *args):
+            'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
+            try:
+                return self.maps[0].pop(key, *args)
+            except KeyError:
+                raise KeyError(
+                    'Key not found in the first mapping: {!r}'.format(key))
+
+        def clear(self):
+            'Clear maps[0], leaving maps[1:] intact.'
+            self.maps[0].clear()
+
+
+try:
+    from importlib.util import cache_from_source  # Python >= 3.4
+except ImportError:  # pragma: no cover
+
+    def cache_from_source(path, debug_override=None):
+        assert path.endswith('.py')
+        if debug_override is None:
+            debug_override = __debug__
+        if debug_override:
+            suffix = 'c'
+        else:
+            suffix = 'o'
+        return path + suffix
+
+
+try:
+    from collections import OrderedDict
+except ImportError:  # pragma: no cover
+    # {{{ http://code.activestate.com/recipes/576693/ (r9)
+    # Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
+    # Passes Python2.7's test suite and incorporates all the latest updates.
+    try:
+        from thread import get_ident as _get_ident
+    except ImportError:
+        from dummy_thread import get_ident as _get_ident
+
+    try:
+        from _abcoll import KeysView, ValuesView, ItemsView
+    except ImportError:
+        pass
+
+    class OrderedDict(dict):
+        'Dictionary that remembers insertion order'
+
+        # An inherited dict maps keys to values.
+        # The inherited dict provides __getitem__, __len__, __contains__, and get.
+        # The remaining methods are order-aware.
+        # Big-O running times for all methods are the same as for regular dictionaries.
+
+        # The internal self.__map dictionary maps keys to links in a doubly linked list.
+        # The circular doubly linked list starts and ends with a sentinel element.
+        # The sentinel element never gets deleted (this simplifies the algorithm).
+        # Each link is stored as a list of length three:  [PREV, NEXT, KEY].
+
+        def __init__(self, *args, **kwds):
+            '''Initialize an ordered dictionary.  Signature is the same as for
+            regular dictionaries, but keyword arguments are not recommended
+            because their insertion order is arbitrary.
+
+            '''
+            if len(args) > 1:
+                raise TypeError('expected at most 1 arguments, got %d' %
+                                len(args))
+            try:
+                self.__root
+            except AttributeError:
+                self.__root = root = []  # sentinel node
+                root[:] = [root, root, None]
+                self.__map = {}
+            self.__update(*args, **kwds)
+
+        def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
+            'od.__setitem__(i, y) <==> od[i]=y'
+            # Setting a new item creates a new link which goes at the end of the linked
+            # list, and the inherited dictionary is updated with the new key/value pair.
+            if key not in self:
+                root = self.__root
+                last = root[0]
+                last[1] = root[0] = self.__map[key] = [last, root, key]
+            dict_setitem(self, key, value)
+
+        def __delitem__(self, key, dict_delitem=dict.__delitem__):
+            'od.__delitem__(y) <==> del od[y]'
+            # Deleting an existing item uses self.__map to find the link which is
+            # then removed by updating the links in the predecessor and successor nodes.
+            dict_delitem(self, key)
+            link_prev, link_next, key = self.__map.pop(key)
+            link_prev[1] = link_next
+            link_next[0] = link_prev
+
+        def __iter__(self):
+            'od.__iter__() <==> iter(od)'
+            root = self.__root
+            curr = root[1]
+            while curr is not root:
+                yield curr[2]
+                curr = curr[1]
+
+        def __reversed__(self):
+            'od.__reversed__() <==> reversed(od)'
+            root = self.__root
+            curr = root[0]
+            while curr is not root:
+                yield curr[2]
+                curr = curr[0]
+
+        def clear(self):
+            'od.clear() -> None.  Remove all items from od.'
+            try:
+                for node in self.__map.itervalues():
+                    del node[:]
+                root = self.__root
+                root[:] = [root, root, None]
+                self.__map.clear()
+            except AttributeError:
+                pass
+            dict.clear(self)
+
+        def popitem(self, last=True):
+            '''od.popitem() -> (k, v), return and remove a (key, value) pair.
+            Pairs are returned in LIFO order if last is true or FIFO order if false.
+
+            '''
+            if not self:
+                raise KeyError('dictionary is empty')
+            root = self.__root
+            if last:
+                link = root[0]
+                link_prev = link[0]
+                link_prev[1] = root
+                root[0] = link_prev
+            else:
+                link = root[1]
+                link_next = link[1]
+                root[1] = link_next
+                link_next[0] = root
+            key = link[2]
+            del self.__map[key]
+            value = dict.pop(self, key)
+            return key, value
+
+        # -- the following methods do not depend on the internal structure --
+
+        def keys(self):
+            'od.keys() -> list of keys in od'
+            return list(self)
+
+        def values(self):
+            'od.values() -> list of values in od'
+            return [self[key] for key in self]
+
+        def items(self):
+            'od.items() -> list of (key, value) pairs in od'
+            return [(key, self[key]) for key in self]
+
+        def iterkeys(self):
+            'od.iterkeys() -> an iterator over the keys in od'
+            return iter(self)
+
+        def itervalues(self):
+            'od.itervalues -> an iterator over the values in od'
+            for k in self:
+                yield self[k]
+
+        def iteritems(self):
+            'od.iteritems -> an iterator over the (key, value) items in od'
+            for k in self:
+                yield (k, self[k])
+
+        def update(*args, **kwds):
+            '''od.update(E, **F) -> None.  Update od from dict/iterable E and F.
+
+            If E is a dict instance, does:           for k in E: od[k] = E[k]
+            If E has a .keys() method, does:         for k in E.keys(): od[k] = E[k]
+            Or if E is an iterable of items, does:   for k, v in E: od[k] = v
+            In either case, this is followed by:     for k, v in F.items(): od[k] = v
+
+            '''
+            if len(args) > 2:
+                raise TypeError('update() takes at most 2 positional '
+                                'arguments (%d given)' % (len(args), ))
+            elif not args:
+                raise TypeError('update() takes at least 1 argument (0 given)')
+            self = args[0]
+            # Make progressively weaker assumptions about "other"
+            other = ()
+            if len(args) == 2:
+                other = args[1]
+            if isinstance(other, dict):
+                for key in other:
+                    self[key] = other[key]
+            elif hasattr(other, 'keys'):
+                for key in other.keys():
+                    self[key] = other[key]
+            else:
+                for key, value in other:
+                    self[key] = value
+            for key, value in kwds.items():
+                self[key] = value
+
+        __update = update  # let subclasses override update without breaking __init__
+
+        __marker = object()
+
+        def pop(self, key, default=__marker):
+            '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+            If key is not found, d is returned if given, otherwise KeyError is raised.
+
+            '''
+            if key in self:
+                result = self[key]
+                del self[key]
+                return result
+            if default is self.__marker:
+                raise KeyError(key)
+            return default
+
+        def setdefault(self, key, default=None):
+            'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
+            if key in self:
+                return self[key]
+            self[key] = default
+            return default
+
+        def __repr__(self, _repr_running=None):
+            'od.__repr__() <==> repr(od)'
+            if not _repr_running:
+                _repr_running = {}
+            call_key = id(self), _get_ident()
+            if call_key in _repr_running:
+                return '...'
+            _repr_running[call_key] = 1
+            try:
+                if not self:
+                    return '%s()' % (self.__class__.__name__, )
+                return '%s(%r)' % (self.__class__.__name__, self.items())
+            finally:
+                del _repr_running[call_key]
+
+        def __reduce__(self):
+            'Return state information for pickling'
+            items = [[k, self[k]] for k in self]
+            inst_dict = vars(self).copy()
+            for k in vars(OrderedDict()):
+                inst_dict.pop(k, None)
+            if inst_dict:
+                return (self.__class__, (items, ), inst_dict)
+            return self.__class__, (items, )
+
+        def copy(self):
+            'od.copy() -> a shallow copy of od'
+            return self.__class__(self)
+
+        @classmethod
+        def fromkeys(cls, iterable, value=None):
+            '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
+            and values equal to v (which defaults to None).
+
+            '''
+            d = cls()
+            for key in iterable:
+                d[key] = value
+            return d
+
+        def __eq__(self, other):
+            '''od.__eq__(y) <==> od==y.  Comparison to another OD is order-sensitive
+            while comparison to a regular mapping is order-insensitive.
+
+            '''
+            if isinstance(other, OrderedDict):
+                return len(self) == len(
+                    other) and self.items() == other.items()
+            return dict.__eq__(self, other)
+
+        def __ne__(self, other):
+            return not self == other
+
+        # -- the following methods are only used in Python 2.7 --
+
+        def viewkeys(self):
+            "od.viewkeys() -> a set-like object providing a view on od's keys"
+            return KeysView(self)
+
+        def viewvalues(self):
+            "od.viewvalues() -> an object providing a view on od's values"
+            return ValuesView(self)
+
+        def viewitems(self):
+            "od.viewitems() -> a set-like object providing a view on od's items"
+            return ItemsView(self)
+
+
+try:
+    from logging.config import BaseConfigurator, valid_ident
+except ImportError:  # pragma: no cover
+    IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
+
+    def valid_ident(s):
+        m = IDENTIFIER.match(s)
+        if not m:
+            raise ValueError('Not a valid Python identifier: %r' % s)
+        return True
+
+    # The ConvertingXXX classes are wrappers around standard Python containers,
+    # and they serve to convert any suitable values in the container. The
+    # conversion converts base dicts, lists and tuples to their wrapped
+    # equivalents, whereas strings which match a conversion format are converted
+    # appropriately.
+    #
+    # Each wrapper should have a configurator attribute holding the actual
+    # configurator to use for conversion.
+
+    class ConvertingDict(dict):
+        """A converting dictionary wrapper."""
+
+        def __getitem__(self, key):
+            value = dict.__getitem__(self, key)
+            result = self.configurator.convert(value)
+            # If the converted value is different, save for next time
+            if value is not result:
+                self[key] = result
+                if type(result) in (ConvertingDict, ConvertingList,
+                                    ConvertingTuple):
+                    result.parent = self
+                    result.key = key
+            return result
+
+        def get(self, key, default=None):
+            value = dict.get(self, key, default)
+            result = self.configurator.convert(value)
+            # If the converted value is different, save for next time
+            if value is not result:
+                self[key] = result
+                if type(result) in (ConvertingDict, ConvertingList,
+                                    ConvertingTuple):
+                    result.parent = self
+                    result.key = key
+            return result
+
+    def pop(self, key, default=None):
+        value = dict.pop(self, key, default)
+        result = self.configurator.convert(value)
+        if value is not result:
+            if type(result) in (ConvertingDict, ConvertingList,
+                                ConvertingTuple):
+                result.parent = self
+                result.key = key
+        return result
+
+    class ConvertingList(list):
+        """A converting list wrapper."""
+
+        def __getitem__(self, key):
+            value = list.__getitem__(self, key)
+            result = self.configurator.convert(value)
+            # If the converted value is different, save for next time
+            if value is not result:
+                self[key] = result
+                if type(result) in (ConvertingDict, ConvertingList,
+                                    ConvertingTuple):
+                    result.parent = self
+                    result.key = key
+            return result
+
+        def pop(self, idx=-1):
+            value = list.pop(self, idx)
+            result = self.configurator.convert(value)
+            if value is not result:
+                if type(result) in (ConvertingDict, ConvertingList,
+                                    ConvertingTuple):
+                    result.parent = self
+            return result
+
+    class ConvertingTuple(tuple):
+        """A converting tuple wrapper."""
+
+        def __getitem__(self, key):
+            value = tuple.__getitem__(self, key)
+            result = self.configurator.convert(value)
+            if value is not result:
+                if type(result) in (ConvertingDict, ConvertingList,
+                                    ConvertingTuple):
+                    result.parent = self
+                    result.key = key
+            return result
+
+    class BaseConfigurator(object):
+        """
+        The configurator base class which defines some useful defaults.
+        """
+
+        CONVERT_PATTERN = re.compile(r'^(?P[a-z]+)://(?P.*)$')
+
+        WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
+        DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
+        INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
+        DIGIT_PATTERN = re.compile(r'^\d+$')
+
+        value_converters = {
+            'ext': 'ext_convert',
+            'cfg': 'cfg_convert',
+        }
+
+        # We might want to use a different one, e.g. importlib
+        importer = staticmethod(__import__)
+
+        def __init__(self, config):
+            self.config = ConvertingDict(config)
+            self.config.configurator = self
+
+        def resolve(self, s):
+            """
+            Resolve strings to objects using standard import and attribute
+            syntax.
+            """
+            name = s.split('.')
+            used = name.pop(0)
+            try:
+                found = self.importer(used)
+                for frag in name:
+                    used += '.' + frag
+                    try:
+                        found = getattr(found, frag)
+                    except AttributeError:
+                        self.importer(used)
+                        found = getattr(found, frag)
+                return found
+            except ImportError:
+                e, tb = sys.exc_info()[1:]
+                v = ValueError('Cannot resolve %r: %s' % (s, e))
+                v.__cause__, v.__traceback__ = e, tb
+                raise v
+
+        def ext_convert(self, value):
+            """Default converter for the ext:// protocol."""
+            return self.resolve(value)
+
+        def cfg_convert(self, value):
+            """Default converter for the cfg:// protocol."""
+            rest = value
+            m = self.WORD_PATTERN.match(rest)
+            if m is None:
+                raise ValueError("Unable to convert %r" % value)
+            else:
+                rest = rest[m.end():]
+                d = self.config[m.groups()[0]]
+                while rest:
+                    m = self.DOT_PATTERN.match(rest)
+                    if m:
+                        d = d[m.groups()[0]]
+                    else:
+                        m = self.INDEX_PATTERN.match(rest)
+                        if m:
+                            idx = m.groups()[0]
+                            if not self.DIGIT_PATTERN.match(idx):
+                                d = d[idx]
+                            else:
+                                try:
+                                    n = int(
+                                        idx
+                                    )  # try as number first (most likely)
+                                    d = d[n]
+                                except TypeError:
+                                    d = d[idx]
+                    if m:
+                        rest = rest[m.end():]
+                    else:
+                        raise ValueError('Unable to convert '
+                                         '%r at %r' % (value, rest))
+            # rest should be empty
+            return d
+
+        def convert(self, value):
+            """
+            Convert values to an appropriate type. dicts, lists and tuples are
+            replaced by their converting alternatives. Strings are checked to
+            see if they have a conversion format and are converted if they do.
+            """
+            if not isinstance(value, ConvertingDict) and isinstance(
+                    value, dict):
+                value = ConvertingDict(value)
+                value.configurator = self
+            elif not isinstance(value, ConvertingList) and isinstance(
+                    value, list):
+                value = ConvertingList(value)
+                value.configurator = self
+            elif not isinstance(value, ConvertingTuple) and isinstance(value, tuple):
+                value = ConvertingTuple(value)
+                value.configurator = self
+            elif isinstance(value, string_types):
+                m = self.CONVERT_PATTERN.match(value)
+                if m:
+                    d = m.groupdict()
+                    prefix = d['prefix']
+                    converter = self.value_converters.get(prefix, None)
+                    if converter:
+                        suffix = d['suffix']
+                        converter = getattr(self, converter)
+                        value = converter(suffix)
+            return value
+
+        def configure_custom(self, config):
+            """Configure an object with a user-supplied factory."""
+            c = config.pop('()')
+            if not callable(c):
+                c = self.resolve(c)
+            props = config.pop('.', None)
+            # Check for valid identifiers
+            kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
+            result = c(**kwargs)
+            if props:
+                for name, value in props.items():
+                    setattr(result, name, value)
+            return result
+
+        def as_tuple(self, value):
+            """Utility function which converts lists to tuples."""
+            if isinstance(value, list):
+                value = tuple(value)
+            return value
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/database.py b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/database.py
new file mode 100644
index 0000000..eb3765f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/database.py
@@ -0,0 +1,1359 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2023 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""PEP 376 implementation."""
+
+from __future__ import unicode_literals
+
+import base64
+import codecs
+import contextlib
+import hashlib
+import logging
+import os
+import posixpath
+import sys
+import zipimport
+
+from . import DistlibException, resources
+from .compat import StringIO
+from .version import get_scheme, UnsupportedVersionError
+from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
+                       LEGACY_METADATA_FILENAME)
+from .util import (parse_requirement, cached_property, parse_name_and_version,
+                   read_exports, write_exports, CSVReader, CSVWriter)
+
+__all__ = [
+    'Distribution', 'BaseInstalledDistribution', 'InstalledDistribution',
+    'EggInfoDistribution', 'DistributionPath'
+]
+
+logger = logging.getLogger(__name__)
+
+EXPORTS_FILENAME = 'pydist-exports.json'
+COMMANDS_FILENAME = 'pydist-commands.json'
+
+DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED',
+              'RESOURCES', EXPORTS_FILENAME, 'SHARED')
+
+DISTINFO_EXT = '.dist-info'
+
+
+class _Cache(object):
+    """
+    A simple cache mapping names and .dist-info paths to distributions
+    """
+
+    def __init__(self):
+        """
+        Initialise an instance. There is normally one for each DistributionPath.
+        """
+        self.name = {}
+        self.path = {}
+        self.generated = False
+
+    def clear(self):
+        """
+        Clear the cache, setting it to its initial state.
+        """
+        self.name.clear()
+        self.path.clear()
+        self.generated = False
+
+    def add(self, dist):
+        """
+        Add a distribution to the cache.
+        :param dist: The distribution to add.
+        """
+        if dist.path not in self.path:
+            self.path[dist.path] = dist
+            self.name.setdefault(dist.key, []).append(dist)
+
+
+class DistributionPath(object):
+    """
+    Represents a set of distributions installed on a path (typically sys.path).
+    """
+
+    def __init__(self, path=None, include_egg=False):
+        """
+        Create an instance from a path, optionally including legacy (distutils/
+        setuptools/distribute) distributions.
+        :param path: The path to use, as a list of directories. If not specified,
+                     sys.path is used.
+        :param include_egg: If True, this instance will look for and return legacy
+                            distributions as well as those based on PEP 376.
+        """
+        if path is None:
+            path = sys.path
+        self.path = path
+        self._include_dist = True
+        self._include_egg = include_egg
+
+        self._cache = _Cache()
+        self._cache_egg = _Cache()
+        self._cache_enabled = True
+        self._scheme = get_scheme('default')
+
+    def _get_cache_enabled(self):
+        return self._cache_enabled
+
+    def _set_cache_enabled(self, value):
+        self._cache_enabled = value
+
+    cache_enabled = property(_get_cache_enabled, _set_cache_enabled)
+
+    def clear_cache(self):
+        """
+        Clears the internal cache.
+        """
+        self._cache.clear()
+        self._cache_egg.clear()
+
+    def _yield_distributions(self):
+        """
+        Yield .dist-info and/or .egg(-info) distributions.
+        """
+        # We need to check if we've seen some resources already, because on
+        # some Linux systems (e.g. some Debian/Ubuntu variants) there are
+        # symlinks which alias other files in the environment.
+        seen = set()
+        for path in self.path:
+            finder = resources.finder_for_path(path)
+            if finder is None:
+                continue
+            r = finder.find('')
+            if not r or not r.is_container:
+                continue
+            rset = sorted(r.resources)
+            for entry in rset:
+                r = finder.find(entry)
+                if not r or r.path in seen:
+                    continue
+                try:
+                    if self._include_dist and entry.endswith(DISTINFO_EXT):
+                        possible_filenames = [
+                            METADATA_FILENAME, WHEEL_METADATA_FILENAME,
+                            LEGACY_METADATA_FILENAME
+                        ]
+                        for metadata_filename in possible_filenames:
+                            metadata_path = posixpath.join(
+                                entry, metadata_filename)
+                            pydist = finder.find(metadata_path)
+                            if pydist:
+                                break
+                        else:
+                            continue
+
+                        with contextlib.closing(pydist.as_stream()) as stream:
+                            metadata = Metadata(fileobj=stream,
+                                                scheme='legacy')
+                        logger.debug('Found %s', r.path)
+                        seen.add(r.path)
+                        yield new_dist_class(r.path,
+                                             metadata=metadata,
+                                             env=self)
+                    elif self._include_egg and entry.endswith(
+                            ('.egg-info', '.egg')):
+                        logger.debug('Found %s', r.path)
+                        seen.add(r.path)
+                        yield old_dist_class(r.path, self)
+                except Exception as e:
+                    msg = 'Unable to read distribution at %s, perhaps due to bad metadata: %s'
+                    logger.warning(msg, r.path, e)
+                    import warnings
+                    warnings.warn(msg % (r.path, e), stacklevel=2)
+
+    def _generate_cache(self):
+        """
+        Scan the path for distributions and populate the cache with
+        those that are found.
+        """
+        gen_dist = not self._cache.generated
+        gen_egg = self._include_egg and not self._cache_egg.generated
+        if gen_dist or gen_egg:
+            for dist in self._yield_distributions():
+                if isinstance(dist, InstalledDistribution):
+                    self._cache.add(dist)
+                else:
+                    self._cache_egg.add(dist)
+
+            if gen_dist:
+                self._cache.generated = True
+            if gen_egg:
+                self._cache_egg.generated = True
+
+    @classmethod
+    def distinfo_dirname(cls, name, version):
+        """
+        The *name* and *version* parameters are converted into their
+        filename-escaped form, i.e. any ``'-'`` characters are replaced
+        with ``'_'`` other than the one in ``'dist-info'`` and the one
+        separating the name from the version number.
+
+        :parameter name: is converted to a standard distribution name by replacing
+                         any runs of non- alphanumeric characters with a single
+                         ``'-'``.
+        :type name: string
+        :parameter version: is converted to a standard version string. Spaces
+                            become dots, and all other non-alphanumeric characters
+                            (except dots) become dashes, with runs of multiple
+                            dashes condensed to a single dash.
+        :type version: string
+        :returns: directory name
+        :rtype: string"""
+        name = name.replace('-', '_')
+        return '-'.join([name, version]) + DISTINFO_EXT
+
+    def get_distributions(self):
+        """
+        Provides an iterator that looks for distributions and returns
+        :class:`InstalledDistribution` or
+        :class:`EggInfoDistribution` instances for each one of them.
+
+        :rtype: iterator of :class:`InstalledDistribution` and
+                :class:`EggInfoDistribution` instances
+        """
+        if not self._cache_enabled:
+            for dist in self._yield_distributions():
+                yield dist
+        else:
+            self._generate_cache()
+
+            for dist in self._cache.path.values():
+                yield dist
+
+            if self._include_egg:
+                for dist in self._cache_egg.path.values():
+                    yield dist
+
+    def get_distribution(self, name):
+        """
+        Looks for a named distribution on the path.
+
+        This function only returns the first result found, as no more than one
+        value is expected. If nothing is found, ``None`` is returned.
+
+        :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution`
+                or ``None``
+        """
+        result = None
+        name = name.lower()
+        if not self._cache_enabled:
+            for dist in self._yield_distributions():
+                if dist.key == name:
+                    result = dist
+                    break
+        else:
+            self._generate_cache()
+
+            if name in self._cache.name:
+                result = self._cache.name[name][0]
+            elif self._include_egg and name in self._cache_egg.name:
+                result = self._cache_egg.name[name][0]
+        return result
+
+    def provides_distribution(self, name, version=None):
+        """
+        Iterates over all distributions to find which distributions provide *name*.
+        If a *version* is provided, it will be used to filter the results.
+
+        This function only returns the first result found, since no more than
+        one values are expected. If the directory is not found, returns ``None``.
+
+        :parameter version: a version specifier that indicates the version
+                            required, conforming to the format in ``PEP-345``
+
+        :type name: string
+        :type version: string
+        """
+        matcher = None
+        if version is not None:
+            try:
+                matcher = self._scheme.matcher('%s (%s)' % (name, version))
+            except ValueError:
+                raise DistlibException('invalid name or version: %r, %r' %
+                                       (name, version))
+
+        for dist in self.get_distributions():
+            # We hit a problem on Travis where enum34 was installed and doesn't
+            # have a provides attribute ...
+            if not hasattr(dist, 'provides'):
+                logger.debug('No "provides": %s', dist)
+            else:
+                provided = dist.provides
+
+                for p in provided:
+                    p_name, p_ver = parse_name_and_version(p)
+                    if matcher is None:
+                        if p_name == name:
+                            yield dist
+                            break
+                    else:
+                        if p_name == name and matcher.match(p_ver):
+                            yield dist
+                            break
+
+    def get_file_path(self, name, relative_path):
+        """
+        Return the path to a resource file.
+        """
+        dist = self.get_distribution(name)
+        if dist is None:
+            raise LookupError('no distribution named %r found' % name)
+        return dist.get_resource_path(relative_path)
+
+    def get_exported_entries(self, category, name=None):
+        """
+        Return all of the exported entries in a particular category.
+
+        :param category: The category to search for entries.
+        :param name: If specified, only entries with that name are returned.
+        """
+        for dist in self.get_distributions():
+            r = dist.exports
+            if category in r:
+                d = r[category]
+                if name is not None:
+                    if name in d:
+                        yield d[name]
+                else:
+                    for v in d.values():
+                        yield v
+
+
+class Distribution(object):
+    """
+    A base class for distributions, whether installed or from indexes.
+    Either way, it must have some metadata, so that's all that's needed
+    for construction.
+    """
+
+    build_time_dependency = False
+    """
+    Set to True if it's known to be only a build-time dependency (i.e.
+    not needed after installation).
+    """
+
+    requested = False
+    """A boolean that indicates whether the ``REQUESTED`` metadata file is
+    present (in other words, whether the package was installed by user
+    request or it was installed as a dependency)."""
+
+    def __init__(self, metadata):
+        """
+        Initialise an instance.
+        :param metadata: The instance of :class:`Metadata` describing this
+        distribution.
+        """
+        self.metadata = metadata
+        self.name = metadata.name
+        self.key = self.name.lower()  # for case-insensitive comparisons
+        self.version = metadata.version
+        self.locator = None
+        self.digest = None
+        self.extras = None  # additional features requested
+        self.context = None  # environment marker overrides
+        self.download_urls = set()
+        self.digests = {}
+
+    @property
+    def source_url(self):
+        """
+        The source archive download URL for this distribution.
+        """
+        return self.metadata.source_url
+
+    download_url = source_url  # Backward compatibility
+
+    @property
+    def name_and_version(self):
+        """
+        A utility property which displays the name and version in parentheses.
+        """
+        return '%s (%s)' % (self.name, self.version)
+
+    @property
+    def provides(self):
+        """
+        A set of distribution names and versions provided by this distribution.
+        :return: A set of "name (version)" strings.
+        """
+        plist = self.metadata.provides
+        s = '%s (%s)' % (self.name, self.version)
+        if s not in plist:
+            plist.append(s)
+        return plist
+
+    def _get_requirements(self, req_attr):
+        md = self.metadata
+        reqts = getattr(md, req_attr)
+        logger.debug('%s: got requirements %r from metadata: %r', self.name,
+                     req_attr, reqts)
+        return set(
+            md.get_requirements(reqts, extras=self.extras, env=self.context))
+
+    @property
+    def run_requires(self):
+        return self._get_requirements('run_requires')
+
+    @property
+    def meta_requires(self):
+        return self._get_requirements('meta_requires')
+
+    @property
+    def build_requires(self):
+        return self._get_requirements('build_requires')
+
+    @property
+    def test_requires(self):
+        return self._get_requirements('test_requires')
+
+    @property
+    def dev_requires(self):
+        return self._get_requirements('dev_requires')
+
+    def matches_requirement(self, req):
+        """
+        Say if this instance matches (fulfills) a requirement.
+        :param req: The requirement to match.
+        :rtype req: str
+        :return: True if it matches, else False.
+        """
+        # Requirement may contain extras - parse to lose those
+        # from what's passed to the matcher
+        r = parse_requirement(req)
+        scheme = get_scheme(self.metadata.scheme)
+        try:
+            matcher = scheme.matcher(r.requirement)
+        except UnsupportedVersionError:
+            # XXX compat-mode if cannot read the version
+            logger.warning('could not read version %r - using name only', req)
+            name = req.split()[0]
+            matcher = scheme.matcher(name)
+
+        name = matcher.key  # case-insensitive
+
+        result = False
+        for p in self.provides:
+            p_name, p_ver = parse_name_and_version(p)
+            if p_name != name:
+                continue
+            try:
+                result = matcher.match(p_ver)
+                break
+            except UnsupportedVersionError:
+                pass
+        return result
+
+    def __repr__(self):
+        """
+        Return a textual representation of this instance,
+        """
+        if self.source_url:
+            suffix = ' [%s]' % self.source_url
+        else:
+            suffix = ''
+        return '' % (self.name, self.version, suffix)
+
+    def __eq__(self, other):
+        """
+        See if this distribution is the same as another.
+        :param other: The distribution to compare with. To be equal to one
+                      another. distributions must have the same type, name,
+                      version and source_url.
+        :return: True if it is the same, else False.
+        """
+        if type(other) is not type(self):
+            result = False
+        else:
+            result = (self.name == other.name and self.version == other.version
+                      and self.source_url == other.source_url)
+        return result
+
+    def __hash__(self):
+        """
+        Compute hash in a way which matches the equality test.
+        """
+        return hash(self.name) + hash(self.version) + hash(self.source_url)
+
+
+class BaseInstalledDistribution(Distribution):
+    """
+    This is the base class for installed distributions (whether PEP 376 or
+    legacy).
+    """
+
+    hasher = None
+
+    def __init__(self, metadata, path, env=None):
+        """
+        Initialise an instance.
+        :param metadata: An instance of :class:`Metadata` which describes the
+                         distribution. This will normally have been initialised
+                         from a metadata file in the ``path``.
+        :param path:     The path of the ``.dist-info`` or ``.egg-info``
+                         directory for the distribution.
+        :param env:      This is normally the :class:`DistributionPath`
+                         instance where this distribution was found.
+        """
+        super(BaseInstalledDistribution, self).__init__(metadata)
+        self.path = path
+        self.dist_path = env
+
+    def get_hash(self, data, hasher=None):
+        """
+        Get the hash of some data, using a particular hash algorithm, if
+        specified.
+
+        :param data: The data to be hashed.
+        :type data: bytes
+        :param hasher: The name of a hash implementation, supported by hashlib,
+                       or ``None``. Examples of valid values are ``'sha1'``,
+                       ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and
+                       ``'sha512'``. If no hasher is specified, the ``hasher``
+                       attribute of the :class:`InstalledDistribution` instance
+                       is used. If the hasher is determined to be ``None``, MD5
+                       is used as the hashing algorithm.
+        :returns: The hash of the data. If a hasher was explicitly specified,
+                  the returned hash will be prefixed with the specified hasher
+                  followed by '='.
+        :rtype: str
+        """
+        if hasher is None:
+            hasher = self.hasher
+        if hasher is None:
+            hasher = hashlib.md5
+            prefix = ''
+        else:
+            hasher = getattr(hashlib, hasher)
+            prefix = '%s=' % self.hasher
+        digest = hasher(data).digest()
+        digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii')
+        return '%s%s' % (prefix, digest)
+
+
+class InstalledDistribution(BaseInstalledDistribution):
+    """
+    Created with the *path* of the ``.dist-info`` directory provided to the
+    constructor. It reads the metadata contained in ``pydist.json`` when it is
+    instantiated., or uses a passed in Metadata instance (useful for when
+    dry-run mode is being used).
+    """
+
+    hasher = 'sha256'
+
+    def __init__(self, path, metadata=None, env=None):
+        self.modules = []
+        self.finder = finder = resources.finder_for_path(path)
+        if finder is None:
+            raise ValueError('finder unavailable for %s' % path)
+        if env and env._cache_enabled and path in env._cache.path:
+            metadata = env._cache.path[path].metadata
+        elif metadata is None:
+            r = finder.find(METADATA_FILENAME)
+            # Temporary - for Wheel 0.23 support
+            if r is None:
+                r = finder.find(WHEEL_METADATA_FILENAME)
+            # Temporary - for legacy support
+            if r is None:
+                r = finder.find(LEGACY_METADATA_FILENAME)
+            if r is None:
+                raise ValueError('no %s found in %s' %
+                                 (METADATA_FILENAME, path))
+            with contextlib.closing(r.as_stream()) as stream:
+                metadata = Metadata(fileobj=stream, scheme='legacy')
+
+        super(InstalledDistribution, self).__init__(metadata, path, env)
+
+        if env and env._cache_enabled:
+            env._cache.add(self)
+
+        r = finder.find('REQUESTED')
+        self.requested = r is not None
+        p = os.path.join(path, 'top_level.txt')
+        if os.path.exists(p):
+            with open(p, 'rb') as f:
+                data = f.read().decode('utf-8')
+            self.modules = data.splitlines()
+
+    def __repr__(self):
+        return '' % (
+            self.name, self.version, self.path)
+
+    def __str__(self):
+        return "%s %s" % (self.name, self.version)
+
+    def _get_records(self):
+        """
+        Get the list of installed files for the distribution
+        :return: A list of tuples of path, hash and size. Note that hash and
+                 size might be ``None`` for some entries. The path is exactly
+                 as stored in the file (which is as in PEP 376).
+        """
+        results = []
+        r = self.get_distinfo_resource('RECORD')
+        with contextlib.closing(r.as_stream()) as stream:
+            with CSVReader(stream=stream) as record_reader:
+                # Base location is parent dir of .dist-info dir
+                # base_location = os.path.dirname(self.path)
+                # base_location = os.path.abspath(base_location)
+                for row in record_reader:
+                    missing = [None for i in range(len(row), 3)]
+                    path, checksum, size = row + missing
+                    # if not os.path.isabs(path):
+                    #     path = path.replace('/', os.sep)
+                    #     path = os.path.join(base_location, path)
+                    results.append((path, checksum, size))
+        return results
+
+    @cached_property
+    def exports(self):
+        """
+        Return the information exported by this distribution.
+        :return: A dictionary of exports, mapping an export category to a dict
+                 of :class:`ExportEntry` instances describing the individual
+                 export entries, and keyed by name.
+        """
+        result = {}
+        r = self.get_distinfo_resource(EXPORTS_FILENAME)
+        if r:
+            result = self.read_exports()
+        return result
+
+    def read_exports(self):
+        """
+        Read exports data from a file in .ini format.
+
+        :return: A dictionary of exports, mapping an export category to a list
+                 of :class:`ExportEntry` instances describing the individual
+                 export entries.
+        """
+        result = {}
+        r = self.get_distinfo_resource(EXPORTS_FILENAME)
+        if r:
+            with contextlib.closing(r.as_stream()) as stream:
+                result = read_exports(stream)
+        return result
+
+    def write_exports(self, exports):
+        """
+        Write a dictionary of exports to a file in .ini format.
+        :param exports: A dictionary of exports, mapping an export category to
+                        a list of :class:`ExportEntry` instances describing the
+                        individual export entries.
+        """
+        rf = self.get_distinfo_file(EXPORTS_FILENAME)
+        with open(rf, 'w') as f:
+            write_exports(exports, f)
+
+    def get_resource_path(self, relative_path):
+        """
+        NOTE: This API may change in the future.
+
+        Return the absolute path to a resource file with the given relative
+        path.
+
+        :param relative_path: The path, relative to .dist-info, of the resource
+                              of interest.
+        :return: The absolute path where the resource is to be found.
+        """
+        r = self.get_distinfo_resource('RESOURCES')
+        with contextlib.closing(r.as_stream()) as stream:
+            with CSVReader(stream=stream) as resources_reader:
+                for relative, destination in resources_reader:
+                    if relative == relative_path:
+                        return destination
+        raise KeyError('no resource file with relative path %r '
+                       'is installed' % relative_path)
+
+    def list_installed_files(self):
+        """
+        Iterates over the ``RECORD`` entries and returns a tuple
+        ``(path, hash, size)`` for each line.
+
+        :returns: iterator of (path, hash, size)
+        """
+        for result in self._get_records():
+            yield result
+
+    def write_installed_files(self, paths, prefix, dry_run=False):
+        """
+        Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any
+        existing ``RECORD`` file is silently overwritten.
+
+        prefix is used to determine when to write absolute paths.
+        """
+        prefix = os.path.join(prefix, '')
+        base = os.path.dirname(self.path)
+        base_under_prefix = base.startswith(prefix)
+        base = os.path.join(base, '')
+        record_path = self.get_distinfo_file('RECORD')
+        logger.info('creating %s', record_path)
+        if dry_run:
+            return None
+        with CSVWriter(record_path) as writer:
+            for path in paths:
+                if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')):
+                    # do not put size and hash, as in PEP-376
+                    hash_value = size = ''
+                else:
+                    size = '%d' % os.path.getsize(path)
+                    with open(path, 'rb') as fp:
+                        hash_value = self.get_hash(fp.read())
+                if path.startswith(base) or (base_under_prefix
+                                             and path.startswith(prefix)):
+                    path = os.path.relpath(path, base)
+                writer.writerow((path, hash_value, size))
+
+            # add the RECORD file itself
+            if record_path.startswith(base):
+                record_path = os.path.relpath(record_path, base)
+            writer.writerow((record_path, '', ''))
+        return record_path
+
+    def check_installed_files(self):
+        """
+        Checks that the hashes and sizes of the files in ``RECORD`` are
+        matched by the files themselves. Returns a (possibly empty) list of
+        mismatches. Each entry in the mismatch list will be a tuple consisting
+        of the path, 'exists', 'size' or 'hash' according to what didn't match
+        (existence is checked first, then size, then hash), the expected
+        value and the actual value.
+        """
+        mismatches = []
+        base = os.path.dirname(self.path)
+        record_path = self.get_distinfo_file('RECORD')
+        for path, hash_value, size in self.list_installed_files():
+            if not os.path.isabs(path):
+                path = os.path.join(base, path)
+            if path == record_path:
+                continue
+            if not os.path.exists(path):
+                mismatches.append((path, 'exists', True, False))
+            elif os.path.isfile(path):
+                actual_size = str(os.path.getsize(path))
+                if size and actual_size != size:
+                    mismatches.append((path, 'size', size, actual_size))
+                elif hash_value:
+                    if '=' in hash_value:
+                        hasher = hash_value.split('=', 1)[0]
+                    else:
+                        hasher = None
+
+                    with open(path, 'rb') as f:
+                        actual_hash = self.get_hash(f.read(), hasher)
+                        if actual_hash != hash_value:
+                            mismatches.append(
+                                (path, 'hash', hash_value, actual_hash))
+        return mismatches
+
+    @cached_property
+    def shared_locations(self):
+        """
+        A dictionary of shared locations whose keys are in the set 'prefix',
+        'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'.
+        The corresponding value is the absolute path of that category for
+        this distribution, and takes into account any paths selected by the
+        user at installation time (e.g. via command-line arguments). In the
+        case of the 'namespace' key, this would be a list of absolute paths
+        for the roots of namespace packages in this distribution.
+
+        The first time this property is accessed, the relevant information is
+        read from the SHARED file in the .dist-info directory.
+        """
+        result = {}
+        shared_path = os.path.join(self.path, 'SHARED')
+        if os.path.isfile(shared_path):
+            with codecs.open(shared_path, 'r', encoding='utf-8') as f:
+                lines = f.read().splitlines()
+            for line in lines:
+                key, value = line.split('=', 1)
+                if key == 'namespace':
+                    result.setdefault(key, []).append(value)
+                else:
+                    result[key] = value
+        return result
+
+    def write_shared_locations(self, paths, dry_run=False):
+        """
+        Write shared location information to the SHARED file in .dist-info.
+        :param paths: A dictionary as described in the documentation for
+        :meth:`shared_locations`.
+        :param dry_run: If True, the action is logged but no file is actually
+                        written.
+        :return: The path of the file written to.
+        """
+        shared_path = os.path.join(self.path, 'SHARED')
+        logger.info('creating %s', shared_path)
+        if dry_run:
+            return None
+        lines = []
+        for key in ('prefix', 'lib', 'headers', 'scripts', 'data'):
+            path = paths[key]
+            if os.path.isdir(paths[key]):
+                lines.append('%s=%s' % (key, path))
+        for ns in paths.get('namespace', ()):
+            lines.append('namespace=%s' % ns)
+
+        with codecs.open(shared_path, 'w', encoding='utf-8') as f:
+            f.write('\n'.join(lines))
+        return shared_path
+
+    def get_distinfo_resource(self, path):
+        if path not in DIST_FILES:
+            raise DistlibException('invalid path for a dist-info file: '
+                                   '%r at %r' % (path, self.path))
+        finder = resources.finder_for_path(self.path)
+        if finder is None:
+            raise DistlibException('Unable to get a finder for %s' % self.path)
+        return finder.find(path)
+
+    def get_distinfo_file(self, path):
+        """
+        Returns a path located under the ``.dist-info`` directory. Returns a
+        string representing the path.
+
+        :parameter path: a ``'/'``-separated path relative to the
+                         ``.dist-info`` directory or an absolute path;
+                         If *path* is an absolute path and doesn't start
+                         with the ``.dist-info`` directory path,
+                         a :class:`DistlibException` is raised
+        :type path: str
+        :rtype: str
+        """
+        # Check if it is an absolute path  # XXX use relpath, add tests
+        if path.find(os.sep) >= 0:
+            # it's an absolute path?
+            distinfo_dirname, path = path.split(os.sep)[-2:]
+            if distinfo_dirname != self.path.split(os.sep)[-1]:
+                raise DistlibException(
+                    'dist-info file %r does not belong to the %r %s '
+                    'distribution' % (path, self.name, self.version))
+
+        # The file must be relative
+        if path not in DIST_FILES:
+            raise DistlibException('invalid path for a dist-info file: '
+                                   '%r at %r' % (path, self.path))
+
+        return os.path.join(self.path, path)
+
+    def list_distinfo_files(self):
+        """
+        Iterates over the ``RECORD`` entries and returns paths for each line if
+        the path is pointing to a file located in the ``.dist-info`` directory
+        or one of its subdirectories.
+
+        :returns: iterator of paths
+        """
+        base = os.path.dirname(self.path)
+        for path, checksum, size in self._get_records():
+            # XXX add separator or use real relpath algo
+            if not os.path.isabs(path):
+                path = os.path.join(base, path)
+            if path.startswith(self.path):
+                yield path
+
+    def __eq__(self, other):
+        return (isinstance(other, InstalledDistribution)
+                and self.path == other.path)
+
+    # See http://docs.python.org/reference/datamodel#object.__hash__
+    __hash__ = object.__hash__
+
+
+class EggInfoDistribution(BaseInstalledDistribution):
+    """Created with the *path* of the ``.egg-info`` directory or file provided
+    to the constructor. It reads the metadata contained in the file itself, or
+    if the given path happens to be a directory, the metadata is read from the
+    file ``PKG-INFO`` under that directory."""
+
+    requested = True  # as we have no way of knowing, assume it was
+    shared_locations = {}
+
+    def __init__(self, path, env=None):
+
+        def set_name_and_version(s, n, v):
+            s.name = n
+            s.key = n.lower()  # for case-insensitive comparisons
+            s.version = v
+
+        self.path = path
+        self.dist_path = env
+        if env and env._cache_enabled and path in env._cache_egg.path:
+            metadata = env._cache_egg.path[path].metadata
+            set_name_and_version(self, metadata.name, metadata.version)
+        else:
+            metadata = self._get_metadata(path)
+
+            # Need to be set before caching
+            set_name_and_version(self, metadata.name, metadata.version)
+
+            if env and env._cache_enabled:
+                env._cache_egg.add(self)
+        super(EggInfoDistribution, self).__init__(metadata, path, env)
+
+    def _get_metadata(self, path):
+        requires = None
+
+        def parse_requires_data(data):
+            """Create a list of dependencies from a requires.txt file.
+
+            *data*: the contents of a setuptools-produced requires.txt file.
+            """
+            reqs = []
+            lines = data.splitlines()
+            for line in lines:
+                line = line.strip()
+                # sectioned files have bare newlines (separating sections)
+                if not line:  # pragma: no cover
+                    continue
+                if line.startswith('['):  # pragma: no cover
+                    logger.warning(
+                        'Unexpected line: quitting requirement scan: %r', line)
+                    break
+                r = parse_requirement(line)
+                if not r:  # pragma: no cover
+                    logger.warning('Not recognised as a requirement: %r', line)
+                    continue
+                if r.extras:  # pragma: no cover
+                    logger.warning('extra requirements in requires.txt are '
+                                   'not supported')
+                if not r.constraints:
+                    reqs.append(r.name)
+                else:
+                    cons = ', '.join('%s%s' % c for c in r.constraints)
+                    reqs.append('%s (%s)' % (r.name, cons))
+            return reqs
+
+        def parse_requires_path(req_path):
+            """Create a list of dependencies from a requires.txt file.
+
+            *req_path*: the path to a setuptools-produced requires.txt file.
+            """
+
+            reqs = []
+            try:
+                with codecs.open(req_path, 'r', 'utf-8') as fp:
+                    reqs = parse_requires_data(fp.read())
+            except IOError:
+                pass
+            return reqs
+
+        tl_path = tl_data = None
+        if path.endswith('.egg'):
+            if os.path.isdir(path):
+                p = os.path.join(path, 'EGG-INFO')
+                meta_path = os.path.join(p, 'PKG-INFO')
+                metadata = Metadata(path=meta_path, scheme='legacy')
+                req_path = os.path.join(p, 'requires.txt')
+                tl_path = os.path.join(p, 'top_level.txt')
+                requires = parse_requires_path(req_path)
+            else:
+                # FIXME handle the case where zipfile is not available
+                zipf = zipimport.zipimporter(path)
+                fileobj = StringIO(
+                    zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8'))
+                metadata = Metadata(fileobj=fileobj, scheme='legacy')
+                try:
+                    data = zipf.get_data('EGG-INFO/requires.txt')
+                    tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode(
+                        'utf-8')
+                    requires = parse_requires_data(data.decode('utf-8'))
+                except IOError:
+                    requires = None
+        elif path.endswith('.egg-info'):
+            if os.path.isdir(path):
+                req_path = os.path.join(path, 'requires.txt')
+                requires = parse_requires_path(req_path)
+                path = os.path.join(path, 'PKG-INFO')
+                tl_path = os.path.join(path, 'top_level.txt')
+            metadata = Metadata(path=path, scheme='legacy')
+        else:
+            raise DistlibException('path must end with .egg-info or .egg, '
+                                   'got %r' % path)
+
+        if requires:
+            metadata.add_requirements(requires)
+        # look for top-level modules in top_level.txt, if present
+        if tl_data is None:
+            if tl_path is not None and os.path.exists(tl_path):
+                with open(tl_path, 'rb') as f:
+                    tl_data = f.read().decode('utf-8')
+        if not tl_data:
+            tl_data = []
+        else:
+            tl_data = tl_data.splitlines()
+        self.modules = tl_data
+        return metadata
+
+    def __repr__(self):
+        return '' % (self.name, self.version,
+                                                      self.path)
+
+    def __str__(self):
+        return "%s %s" % (self.name, self.version)
+
+    def check_installed_files(self):
+        """
+        Checks that the hashes and sizes of the files in ``RECORD`` are
+        matched by the files themselves. Returns a (possibly empty) list of
+        mismatches. Each entry in the mismatch list will be a tuple consisting
+        of the path, 'exists', 'size' or 'hash' according to what didn't match
+        (existence is checked first, then size, then hash), the expected
+        value and the actual value.
+        """
+        mismatches = []
+        record_path = os.path.join(self.path, 'installed-files.txt')
+        if os.path.exists(record_path):
+            for path, _, _ in self.list_installed_files():
+                if path == record_path:
+                    continue
+                if not os.path.exists(path):
+                    mismatches.append((path, 'exists', True, False))
+        return mismatches
+
+    def list_installed_files(self):
+        """
+        Iterates over the ``installed-files.txt`` entries and returns a tuple
+        ``(path, hash, size)`` for each line.
+
+        :returns: a list of (path, hash, size)
+        """
+
+        def _md5(path):
+            f = open(path, 'rb')
+            try:
+                content = f.read()
+            finally:
+                f.close()
+            return hashlib.md5(content).hexdigest()
+
+        def _size(path):
+            return os.stat(path).st_size
+
+        record_path = os.path.join(self.path, 'installed-files.txt')
+        result = []
+        if os.path.exists(record_path):
+            with codecs.open(record_path, 'r', encoding='utf-8') as f:
+                for line in f:
+                    line = line.strip()
+                    p = os.path.normpath(os.path.join(self.path, line))
+                    # "./" is present as a marker between installed files
+                    # and installation metadata files
+                    if not os.path.exists(p):
+                        logger.warning('Non-existent file: %s', p)
+                        if p.endswith(('.pyc', '.pyo')):
+                            continue
+                        # otherwise fall through and fail
+                    if not os.path.isdir(p):
+                        result.append((p, _md5(p), _size(p)))
+            result.append((record_path, None, None))
+        return result
+
+    def list_distinfo_files(self, absolute=False):
+        """
+        Iterates over the ``installed-files.txt`` entries and returns paths for
+        each line if the path is pointing to a file located in the
+        ``.egg-info`` directory or one of its subdirectories.
+
+        :parameter absolute: If *absolute* is ``True``, each returned path is
+                          transformed into a local absolute path. Otherwise the
+                          raw value from ``installed-files.txt`` is returned.
+        :type absolute: boolean
+        :returns: iterator of paths
+        """
+        record_path = os.path.join(self.path, 'installed-files.txt')
+        if os.path.exists(record_path):
+            skip = True
+            with codecs.open(record_path, 'r', encoding='utf-8') as f:
+                for line in f:
+                    line = line.strip()
+                    if line == './':
+                        skip = False
+                        continue
+                    if not skip:
+                        p = os.path.normpath(os.path.join(self.path, line))
+                        if p.startswith(self.path):
+                            if absolute:
+                                yield p
+                            else:
+                                yield line
+
+    def __eq__(self, other):
+        return (isinstance(other, EggInfoDistribution)
+                and self.path == other.path)
+
+    # See http://docs.python.org/reference/datamodel#object.__hash__
+    __hash__ = object.__hash__
+
+
+new_dist_class = InstalledDistribution
+old_dist_class = EggInfoDistribution
+
+
+class DependencyGraph(object):
+    """
+    Represents a dependency graph between distributions.
+
+    The dependency relationships are stored in an ``adjacency_list`` that maps
+    distributions to a list of ``(other, label)`` tuples where  ``other``
+    is a distribution and the edge is labeled with ``label`` (i.e. the version
+    specifier, if such was provided). Also, for more efficient traversal, for
+    every distribution ``x``, a list of predecessors is kept in
+    ``reverse_list[x]``. An edge from distribution ``a`` to
+    distribution ``b`` means that ``a`` depends on ``b``. If any missing
+    dependencies are found, they are stored in ``missing``, which is a
+    dictionary that maps distributions to a list of requirements that were not
+    provided by any other distributions.
+    """
+
+    def __init__(self):
+        self.adjacency_list = {}
+        self.reverse_list = {}
+        self.missing = {}
+
+    def add_distribution(self, distribution):
+        """Add the *distribution* to the graph.
+
+        :type distribution: :class:`distutils2.database.InstalledDistribution`
+                            or :class:`distutils2.database.EggInfoDistribution`
+        """
+        self.adjacency_list[distribution] = []
+        self.reverse_list[distribution] = []
+        # self.missing[distribution] = []
+
+    def add_edge(self, x, y, label=None):
+        """Add an edge from distribution *x* to distribution *y* with the given
+        *label*.
+
+        :type x: :class:`distutils2.database.InstalledDistribution` or
+                 :class:`distutils2.database.EggInfoDistribution`
+        :type y: :class:`distutils2.database.InstalledDistribution` or
+                 :class:`distutils2.database.EggInfoDistribution`
+        :type label: ``str`` or ``None``
+        """
+        self.adjacency_list[x].append((y, label))
+        # multiple edges are allowed, so be careful
+        if x not in self.reverse_list[y]:
+            self.reverse_list[y].append(x)
+
+    def add_missing(self, distribution, requirement):
+        """
+        Add a missing *requirement* for the given *distribution*.
+
+        :type distribution: :class:`distutils2.database.InstalledDistribution`
+                            or :class:`distutils2.database.EggInfoDistribution`
+        :type requirement: ``str``
+        """
+        logger.debug('%s missing %r', distribution, requirement)
+        self.missing.setdefault(distribution, []).append(requirement)
+
+    def _repr_dist(self, dist):
+        return '%s %s' % (dist.name, dist.version)
+
+    def repr_node(self, dist, level=1):
+        """Prints only a subgraph"""
+        output = [self._repr_dist(dist)]
+        for other, label in self.adjacency_list[dist]:
+            dist = self._repr_dist(other)
+            if label is not None:
+                dist = '%s [%s]' % (dist, label)
+            output.append('    ' * level + str(dist))
+            suboutput = self.repr_node(other, level + 1)
+            subs = suboutput.split('\n')
+            output.extend(subs[1:])
+        return '\n'.join(output)
+
+    def to_dot(self, f, skip_disconnected=True):
+        """Writes a DOT output for the graph to the provided file *f*.
+
+        If *skip_disconnected* is set to ``True``, then all distributions
+        that are not dependent on any other distribution are skipped.
+
+        :type f: has to support ``file``-like operations
+        :type skip_disconnected: ``bool``
+        """
+        disconnected = []
+
+        f.write("digraph dependencies {\n")
+        for dist, adjs in self.adjacency_list.items():
+            if len(adjs) == 0 and not skip_disconnected:
+                disconnected.append(dist)
+            for other, label in adjs:
+                if label is not None:
+                    f.write('"%s" -> "%s" [label="%s"]\n' %
+                            (dist.name, other.name, label))
+                else:
+                    f.write('"%s" -> "%s"\n' % (dist.name, other.name))
+        if not skip_disconnected and len(disconnected) > 0:
+            f.write('subgraph disconnected {\n')
+            f.write('label = "Disconnected"\n')
+            f.write('bgcolor = red\n')
+
+            for dist in disconnected:
+                f.write('"%s"' % dist.name)
+                f.write('\n')
+            f.write('}\n')
+        f.write('}\n')
+
+    def topological_sort(self):
+        """
+        Perform a topological sort of the graph.
+        :return: A tuple, the first element of which is a topologically sorted
+                 list of distributions, and the second element of which is a
+                 list of distributions that cannot be sorted because they have
+                 circular dependencies and so form a cycle.
+        """
+        result = []
+        # Make a shallow copy of the adjacency list
+        alist = {}
+        for k, v in self.adjacency_list.items():
+            alist[k] = v[:]
+        while True:
+            # See what we can remove in this run
+            to_remove = []
+            for k, v in list(alist.items())[:]:
+                if not v:
+                    to_remove.append(k)
+                    del alist[k]
+            if not to_remove:
+                # What's left in alist (if anything) is a cycle.
+                break
+            # Remove from the adjacency list of others
+            for k, v in alist.items():
+                alist[k] = [(d, r) for d, r in v if d not in to_remove]
+            logger.debug('Moving to result: %s',
+                         ['%s (%s)' % (d.name, d.version) for d in to_remove])
+            result.extend(to_remove)
+        return result, list(alist.keys())
+
+    def __repr__(self):
+        """Representation of the graph"""
+        output = []
+        for dist, adjs in self.adjacency_list.items():
+            output.append(self.repr_node(dist))
+        return '\n'.join(output)
+
+
+def make_graph(dists, scheme='default'):
+    """Makes a dependency graph from the given distributions.
+
+    :parameter dists: a list of distributions
+    :type dists: list of :class:`distutils2.database.InstalledDistribution` and
+                 :class:`distutils2.database.EggInfoDistribution` instances
+    :rtype: a :class:`DependencyGraph` instance
+    """
+    scheme = get_scheme(scheme)
+    graph = DependencyGraph()
+    provided = {}  # maps names to lists of (version, dist) tuples
+
+    # first, build the graph and find out what's provided
+    for dist in dists:
+        graph.add_distribution(dist)
+
+        for p in dist.provides:
+            name, version = parse_name_and_version(p)
+            logger.debug('Add to provided: %s, %s, %s', name, version, dist)
+            provided.setdefault(name, []).append((version, dist))
+
+    # now make the edges
+    for dist in dists:
+        requires = (dist.run_requires | dist.meta_requires
+                    | dist.build_requires | dist.dev_requires)
+        for req in requires:
+            try:
+                matcher = scheme.matcher(req)
+            except UnsupportedVersionError:
+                # XXX compat-mode if cannot read the version
+                logger.warning('could not read version %r - using name only',
+                               req)
+                name = req.split()[0]
+                matcher = scheme.matcher(name)
+
+            name = matcher.key  # case-insensitive
+
+            matched = False
+            if name in provided:
+                for version, provider in provided[name]:
+                    try:
+                        match = matcher.match(version)
+                    except UnsupportedVersionError:
+                        match = False
+
+                    if match:
+                        graph.add_edge(dist, provider, req)
+                        matched = True
+                        break
+            if not matched:
+                graph.add_missing(dist, req)
+    return graph
+
+
+def get_dependent_dists(dists, dist):
+    """Recursively generate a list of distributions from *dists* that are
+    dependent on *dist*.
+
+    :param dists: a list of distributions
+    :param dist: a distribution, member of *dists* for which we are interested
+    """
+    if dist not in dists:
+        raise DistlibException('given distribution %r is not a member '
+                               'of the list' % dist.name)
+    graph = make_graph(dists)
+
+    dep = [dist]  # dependent distributions
+    todo = graph.reverse_list[dist]  # list of nodes we should inspect
+
+    while todo:
+        d = todo.pop()
+        dep.append(d)
+        for succ in graph.reverse_list[d]:
+            if succ not in dep:
+                todo.append(succ)
+
+    dep.pop(0)  # remove dist from dep, was there to prevent infinite loops
+    return dep
+
+
+def get_required_dists(dists, dist):
+    """Recursively generate a list of distributions from *dists* that are
+    required by *dist*.
+
+    :param dists: a list of distributions
+    :param dist: a distribution, member of *dists* for which we are interested
+                 in finding the dependencies.
+    """
+    if dist not in dists:
+        raise DistlibException('given distribution %r is not a member '
+                               'of the list' % dist.name)
+    graph = make_graph(dists)
+
+    req = set()  # required distributions
+    todo = graph.adjacency_list[dist]  # list of nodes we should inspect
+    seen = set(t[0] for t in todo)  # already added to todo
+
+    while todo:
+        d = todo.pop()[0]
+        req.add(d)
+        pred_list = graph.adjacency_list[d]
+        for pred in pred_list:
+            d = pred[0]
+            if d not in req and d not in seen:
+                seen.add(d)
+                todo.append(pred)
+    return req
+
+
+def make_dist(name, version, **kwargs):
+    """
+    A convenience method for making a dist given just a name and version.
+    """
+    summary = kwargs.pop('summary', 'Placeholder for summary')
+    md = Metadata(**kwargs)
+    md.name = name
+    md.version = version
+    md.summary = summary or 'Placeholder for summary'
+    return Distribution(md)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/index.py b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/index.py
new file mode 100644
index 0000000..56cd286
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/index.py
@@ -0,0 +1,508 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2013-2023 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+import hashlib
+import logging
+import os
+import shutil
+import subprocess
+import tempfile
+try:
+    from threading import Thread
+except ImportError:  # pragma: no cover
+    from dummy_threading import Thread
+
+from . import DistlibException
+from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr,
+                     urlparse, build_opener, string_types)
+from .util import zip_dir, ServerProxy
+
+logger = logging.getLogger(__name__)
+
+DEFAULT_INDEX = 'https://pypi.org/pypi'
+DEFAULT_REALM = 'pypi'
+
+
+class PackageIndex(object):
+    """
+    This class represents a package index compatible with PyPI, the Python
+    Package Index.
+    """
+
+    boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$'
+
+    def __init__(self, url=None):
+        """
+        Initialise an instance.
+
+        :param url: The URL of the index. If not specified, the URL for PyPI is
+                    used.
+        """
+        self.url = url or DEFAULT_INDEX
+        self.read_configuration()
+        scheme, netloc, path, params, query, frag = urlparse(self.url)
+        if params or query or frag or scheme not in ('http', 'https'):
+            raise DistlibException('invalid repository: %s' % self.url)
+        self.password_handler = None
+        self.ssl_verifier = None
+        self.gpg = None
+        self.gpg_home = None
+        with open(os.devnull, 'w') as sink:
+            # Use gpg by default rather than gpg2, as gpg2 insists on
+            # prompting for passwords
+            for s in ('gpg', 'gpg2'):
+                try:
+                    rc = subprocess.check_call([s, '--version'], stdout=sink,
+                                               stderr=sink)
+                    if rc == 0:
+                        self.gpg = s
+                        break
+                except OSError:
+                    pass
+
+    def _get_pypirc_command(self):
+        """
+        Get the distutils command for interacting with PyPI configurations.
+        :return: the command.
+        """
+        from .util import _get_pypirc_command as cmd
+        return cmd()
+
+    def read_configuration(self):
+        """
+        Read the PyPI access configuration as supported by distutils. This populates
+        ``username``, ``password``, ``realm`` and ``url`` attributes from the
+        configuration.
+        """
+        from .util import _load_pypirc
+        cfg = _load_pypirc(self)
+        self.username = cfg.get('username')
+        self.password = cfg.get('password')
+        self.realm = cfg.get('realm', 'pypi')
+        self.url = cfg.get('repository', self.url)
+
+    def save_configuration(self):
+        """
+        Save the PyPI access configuration. You must have set ``username`` and
+        ``password`` attributes before calling this method.
+        """
+        self.check_credentials()
+        from .util import _store_pypirc
+        _store_pypirc(self)
+
+    def check_credentials(self):
+        """
+        Check that ``username`` and ``password`` have been set, and raise an
+        exception if not.
+        """
+        if self.username is None or self.password is None:
+            raise DistlibException('username and password must be set')
+        pm = HTTPPasswordMgr()
+        _, netloc, _, _, _, _ = urlparse(self.url)
+        pm.add_password(self.realm, netloc, self.username, self.password)
+        self.password_handler = HTTPBasicAuthHandler(pm)
+
+    def register(self, metadata):  # pragma: no cover
+        """
+        Register a distribution on PyPI, using the provided metadata.
+
+        :param metadata: A :class:`Metadata` instance defining at least a name
+                         and version number for the distribution to be
+                         registered.
+        :return: The HTTP response received from PyPI upon submission of the
+                request.
+        """
+        self.check_credentials()
+        metadata.validate()
+        d = metadata.todict()
+        d[':action'] = 'verify'
+        request = self.encode_request(d.items(), [])
+        self.send_request(request)
+        d[':action'] = 'submit'
+        request = self.encode_request(d.items(), [])
+        return self.send_request(request)
+
+    def _reader(self, name, stream, outbuf):
+        """
+        Thread runner for reading lines of from a subprocess into a buffer.
+
+        :param name: The logical name of the stream (used for logging only).
+        :param stream: The stream to read from. This will typically a pipe
+                       connected to the output stream of a subprocess.
+        :param outbuf: The list to append the read lines to.
+        """
+        while True:
+            s = stream.readline()
+            if not s:
+                break
+            s = s.decode('utf-8').rstrip()
+            outbuf.append(s)
+            logger.debug('%s: %s' % (name, s))
+        stream.close()
+
+    def get_sign_command(self, filename, signer, sign_password, keystore=None):  # pragma: no cover
+        """
+        Return a suitable command for signing a file.
+
+        :param filename: The pathname to the file to be signed.
+        :param signer: The identifier of the signer of the file.
+        :param sign_password: The passphrase for the signer's
+                              private key used for signing.
+        :param keystore: The path to a directory which contains the keys
+                         used in verification. If not specified, the
+                         instance's ``gpg_home`` attribute is used instead.
+        :return: The signing command as a list suitable to be
+                 passed to :class:`subprocess.Popen`.
+        """
+        cmd = [self.gpg, '--status-fd', '2', '--no-tty']
+        if keystore is None:
+            keystore = self.gpg_home
+        if keystore:
+            cmd.extend(['--homedir', keystore])
+        if sign_password is not None:
+            cmd.extend(['--batch', '--passphrase-fd', '0'])
+        td = tempfile.mkdtemp()
+        sf = os.path.join(td, os.path.basename(filename) + '.asc')
+        cmd.extend(['--detach-sign', '--armor', '--local-user',
+                    signer, '--output', sf, filename])
+        logger.debug('invoking: %s', ' '.join(cmd))
+        return cmd, sf
+
+    def run_command(self, cmd, input_data=None):
+        """
+        Run a command in a child process , passing it any input data specified.
+
+        :param cmd: The command to run.
+        :param input_data: If specified, this must be a byte string containing
+                           data to be sent to the child process.
+        :return: A tuple consisting of the subprocess' exit code, a list of
+                 lines read from the subprocess' ``stdout``, and a list of
+                 lines read from the subprocess' ``stderr``.
+        """
+        kwargs = {
+            'stdout': subprocess.PIPE,
+            'stderr': subprocess.PIPE,
+        }
+        if input_data is not None:
+            kwargs['stdin'] = subprocess.PIPE
+        stdout = []
+        stderr = []
+        p = subprocess.Popen(cmd, **kwargs)
+        # We don't use communicate() here because we may need to
+        # get clever with interacting with the command
+        t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout))
+        t1.start()
+        t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr))
+        t2.start()
+        if input_data is not None:
+            p.stdin.write(input_data)
+            p.stdin.close()
+
+        p.wait()
+        t1.join()
+        t2.join()
+        return p.returncode, stdout, stderr
+
+    def sign_file(self, filename, signer, sign_password, keystore=None):  # pragma: no cover
+        """
+        Sign a file.
+
+        :param filename: The pathname to the file to be signed.
+        :param signer: The identifier of the signer of the file.
+        :param sign_password: The passphrase for the signer's
+                              private key used for signing.
+        :param keystore: The path to a directory which contains the keys
+                         used in signing. If not specified, the instance's
+                         ``gpg_home`` attribute is used instead.
+        :return: The absolute pathname of the file where the signature is
+                 stored.
+        """
+        cmd, sig_file = self.get_sign_command(filename, signer, sign_password,
+                                              keystore)
+        rc, stdout, stderr = self.run_command(cmd,
+                                              sign_password.encode('utf-8'))
+        if rc != 0:
+            raise DistlibException('sign command failed with error '
+                                   'code %s' % rc)
+        return sig_file
+
+    def upload_file(self, metadata, filename, signer=None, sign_password=None,
+                    filetype='sdist', pyversion='source', keystore=None):
+        """
+        Upload a release file to the index.
+
+        :param metadata: A :class:`Metadata` instance defining at least a name
+                         and version number for the file to be uploaded.
+        :param filename: The pathname of the file to be uploaded.
+        :param signer: The identifier of the signer of the file.
+        :param sign_password: The passphrase for the signer's
+                              private key used for signing.
+        :param filetype: The type of the file being uploaded. This is the
+                        distutils command which produced that file, e.g.
+                        ``sdist`` or ``bdist_wheel``.
+        :param pyversion: The version of Python which the release relates
+                          to. For code compatible with any Python, this would
+                          be ``source``, otherwise it would be e.g. ``3.2``.
+        :param keystore: The path to a directory which contains the keys
+                         used in signing. If not specified, the instance's
+                         ``gpg_home`` attribute is used instead.
+        :return: The HTTP response received from PyPI upon submission of the
+                request.
+        """
+        self.check_credentials()
+        if not os.path.exists(filename):
+            raise DistlibException('not found: %s' % filename)
+        metadata.validate()
+        d = metadata.todict()
+        sig_file = None
+        if signer:
+            if not self.gpg:
+                logger.warning('no signing program available - not signed')
+            else:
+                sig_file = self.sign_file(filename, signer, sign_password,
+                                          keystore)
+        with open(filename, 'rb') as f:
+            file_data = f.read()
+        md5_digest = hashlib.md5(file_data).hexdigest()
+        sha256_digest = hashlib.sha256(file_data).hexdigest()
+        d.update({
+            ':action': 'file_upload',
+            'protocol_version': '1',
+            'filetype': filetype,
+            'pyversion': pyversion,
+            'md5_digest': md5_digest,
+            'sha256_digest': sha256_digest,
+        })
+        files = [('content', os.path.basename(filename), file_data)]
+        if sig_file:
+            with open(sig_file, 'rb') as f:
+                sig_data = f.read()
+            files.append(('gpg_signature', os.path.basename(sig_file),
+                         sig_data))
+            shutil.rmtree(os.path.dirname(sig_file))
+        request = self.encode_request(d.items(), files)
+        return self.send_request(request)
+
+    def upload_documentation(self, metadata, doc_dir):  # pragma: no cover
+        """
+        Upload documentation to the index.
+
+        :param metadata: A :class:`Metadata` instance defining at least a name
+                         and version number for the documentation to be
+                         uploaded.
+        :param doc_dir: The pathname of the directory which contains the
+                        documentation. This should be the directory that
+                        contains the ``index.html`` for the documentation.
+        :return: The HTTP response received from PyPI upon submission of the
+                request.
+        """
+        self.check_credentials()
+        if not os.path.isdir(doc_dir):
+            raise DistlibException('not a directory: %r' % doc_dir)
+        fn = os.path.join(doc_dir, 'index.html')
+        if not os.path.exists(fn):
+            raise DistlibException('not found: %r' % fn)
+        metadata.validate()
+        name, version = metadata.name, metadata.version
+        zip_data = zip_dir(doc_dir).getvalue()
+        fields = [(':action', 'doc_upload'),
+                  ('name', name), ('version', version)]
+        files = [('content', name, zip_data)]
+        request = self.encode_request(fields, files)
+        return self.send_request(request)
+
+    def get_verify_command(self, signature_filename, data_filename,
+                           keystore=None):
+        """
+        Return a suitable command for verifying a file.
+
+        :param signature_filename: The pathname to the file containing the
+                                   signature.
+        :param data_filename: The pathname to the file containing the
+                              signed data.
+        :param keystore: The path to a directory which contains the keys
+                         used in verification. If not specified, the
+                         instance's ``gpg_home`` attribute is used instead.
+        :return: The verifying command as a list suitable to be
+                 passed to :class:`subprocess.Popen`.
+        """
+        cmd = [self.gpg, '--status-fd', '2', '--no-tty']
+        if keystore is None:
+            keystore = self.gpg_home
+        if keystore:
+            cmd.extend(['--homedir', keystore])
+        cmd.extend(['--verify', signature_filename, data_filename])
+        logger.debug('invoking: %s', ' '.join(cmd))
+        return cmd
+
+    def verify_signature(self, signature_filename, data_filename,
+                         keystore=None):
+        """
+        Verify a signature for a file.
+
+        :param signature_filename: The pathname to the file containing the
+                                   signature.
+        :param data_filename: The pathname to the file containing the
+                              signed data.
+        :param keystore: The path to a directory which contains the keys
+                         used in verification. If not specified, the
+                         instance's ``gpg_home`` attribute is used instead.
+        :return: True if the signature was verified, else False.
+        """
+        if not self.gpg:
+            raise DistlibException('verification unavailable because gpg '
+                                   'unavailable')
+        cmd = self.get_verify_command(signature_filename, data_filename,
+                                      keystore)
+        rc, stdout, stderr = self.run_command(cmd)
+        if rc not in (0, 1):
+            raise DistlibException('verify command failed with error code %s' % rc)
+        return rc == 0
+
+    def download_file(self, url, destfile, digest=None, reporthook=None):
+        """
+        This is a convenience method for downloading a file from an URL.
+        Normally, this will be a file from the index, though currently
+        no check is made for this (i.e. a file can be downloaded from
+        anywhere).
+
+        The method is just like the :func:`urlretrieve` function in the
+        standard library, except that it allows digest computation to be
+        done during download and checking that the downloaded data
+        matched any expected value.
+
+        :param url: The URL of the file to be downloaded (assumed to be
+                    available via an HTTP GET request).
+        :param destfile: The pathname where the downloaded file is to be
+                         saved.
+        :param digest: If specified, this must be a (hasher, value)
+                       tuple, where hasher is the algorithm used (e.g.
+                       ``'md5'``) and ``value`` is the expected value.
+        :param reporthook: The same as for :func:`urlretrieve` in the
+                           standard library.
+        """
+        if digest is None:
+            digester = None
+            logger.debug('No digest specified')
+        else:
+            if isinstance(digest, (list, tuple)):
+                hasher, digest = digest
+            else:
+                hasher = 'md5'
+            digester = getattr(hashlib, hasher)()
+            logger.debug('Digest specified: %s' % digest)
+        # The following code is equivalent to urlretrieve.
+        # We need to do it this way so that we can compute the
+        # digest of the file as we go.
+        with open(destfile, 'wb') as dfp:
+            # addinfourl is not a context manager on 2.x
+            # so we have to use try/finally
+            sfp = self.send_request(Request(url))
+            try:
+                headers = sfp.info()
+                blocksize = 8192
+                size = -1
+                read = 0
+                blocknum = 0
+                if "content-length" in headers:
+                    size = int(headers["Content-Length"])
+                if reporthook:
+                    reporthook(blocknum, blocksize, size)
+                while True:
+                    block = sfp.read(blocksize)
+                    if not block:
+                        break
+                    read += len(block)
+                    dfp.write(block)
+                    if digester:
+                        digester.update(block)
+                    blocknum += 1
+                    if reporthook:
+                        reporthook(blocknum, blocksize, size)
+            finally:
+                sfp.close()
+
+        # check that we got the whole file, if we can
+        if size >= 0 and read < size:
+            raise DistlibException(
+                'retrieval incomplete: got only %d out of %d bytes'
+                % (read, size))
+        # if we have a digest, it must match.
+        if digester:
+            actual = digester.hexdigest()
+            if digest != actual:
+                raise DistlibException('%s digest mismatch for %s: expected '
+                                       '%s, got %s' % (hasher, destfile,
+                                                       digest, actual))
+            logger.debug('Digest verified: %s', digest)
+
+    def send_request(self, req):
+        """
+        Send a standard library :class:`Request` to PyPI and return its
+        response.
+
+        :param req: The request to send.
+        :return: The HTTP response from PyPI (a standard library HTTPResponse).
+        """
+        handlers = []
+        if self.password_handler:
+            handlers.append(self.password_handler)
+        if self.ssl_verifier:
+            handlers.append(self.ssl_verifier)
+        opener = build_opener(*handlers)
+        return opener.open(req)
+
+    def encode_request(self, fields, files):
+        """
+        Encode fields and files for posting to an HTTP server.
+
+        :param fields: The fields to send as a list of (fieldname, value)
+                       tuples.
+        :param files: The files to send as a list of (fieldname, filename,
+                      file_bytes) tuple.
+        """
+        # Adapted from packaging, which in turn was adapted from
+        # http://code.activestate.com/recipes/146306
+
+        parts = []
+        boundary = self.boundary
+        for k, values in fields:
+            if not isinstance(values, (list, tuple)):
+                values = [values]
+
+            for v in values:
+                parts.extend((
+                    b'--' + boundary,
+                    ('Content-Disposition: form-data; name="%s"' %
+                     k).encode('utf-8'),
+                    b'',
+                    v.encode('utf-8')))
+        for key, filename, value in files:
+            parts.extend((
+                b'--' + boundary,
+                ('Content-Disposition: form-data; name="%s"; filename="%s"' %
+                 (key, filename)).encode('utf-8'),
+                b'',
+                value))
+
+        parts.extend((b'--' + boundary + b'--', b''))
+
+        body = b'\r\n'.join(parts)
+        ct = b'multipart/form-data; boundary=' + boundary
+        headers = {
+            'Content-type': ct,
+            'Content-length': str(len(body))
+        }
+        return Request(self.url, body, headers)
+
+    def search(self, terms, operator=None):  # pragma: no cover
+        if isinstance(terms, string_types):
+            terms = {'name': terms}
+        rpc_proxy = ServerProxy(self.url, timeout=3.0)
+        try:
+            return rpc_proxy.search(terms, operator or 'and')
+        finally:
+            rpc_proxy('close')()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/locators.py b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/locators.py
new file mode 100644
index 0000000..f9f0788
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/locators.py
@@ -0,0 +1,1303 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2023 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+
+import gzip
+from io import BytesIO
+import json
+import logging
+import os
+import posixpath
+import re
+try:
+    import threading
+except ImportError:  # pragma: no cover
+    import dummy_threading as threading
+import zlib
+
+from . import DistlibException
+from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url,
+                     queue, quote, unescape, build_opener,
+                     HTTPRedirectHandler as BaseRedirectHandler, text_type,
+                     Request, HTTPError, URLError)
+from .database import Distribution, DistributionPath, make_dist
+from .metadata import Metadata, MetadataInvalidError
+from .util import (cached_property, ensure_slash, split_filename, get_project_data,
+                   parse_requirement, parse_name_and_version, ServerProxy,
+                   normalize_name)
+from .version import get_scheme, UnsupportedVersionError
+from .wheel import Wheel, is_compatible
+
+logger = logging.getLogger(__name__)
+
+HASHER_HASH = re.compile(r'^(\w+)=([a-f0-9]+)')
+CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I)
+HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml')
+DEFAULT_INDEX = 'https://pypi.org/pypi'
+
+
+def get_all_distribution_names(url=None):
+    """
+    Return all distribution names known by an index.
+    :param url: The URL of the index.
+    :return: A list of all known distribution names.
+    """
+    if url is None:
+        url = DEFAULT_INDEX
+    client = ServerProxy(url, timeout=3.0)
+    try:
+        return client.list_packages()
+    finally:
+        client('close')()
+
+
+class RedirectHandler(BaseRedirectHandler):
+    """
+    A class to work around a bug in some Python 3.2.x releases.
+    """
+    # There's a bug in the base version for some 3.2.x
+    # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header
+    # returns e.g. /abc, it bails because it says the scheme ''
+    # is bogus, when actually it should use the request's
+    # URL for the scheme. See Python issue #13696.
+    def http_error_302(self, req, fp, code, msg, headers):
+        # Some servers (incorrectly) return multiple Location headers
+        # (so probably same goes for URI).  Use first header.
+        newurl = None
+        for key in ('location', 'uri'):
+            if key in headers:
+                newurl = headers[key]
+                break
+        if newurl is None:  # pragma: no cover
+            return
+        urlparts = urlparse(newurl)
+        if urlparts.scheme == '':
+            newurl = urljoin(req.get_full_url(), newurl)
+            if hasattr(headers, 'replace_header'):
+                headers.replace_header(key, newurl)
+            else:
+                headers[key] = newurl
+        return BaseRedirectHandler.http_error_302(self, req, fp, code, msg,
+                                                  headers)
+
+    http_error_301 = http_error_303 = http_error_307 = http_error_302
+
+
+class Locator(object):
+    """
+    A base class for locators - things that locate distributions.
+    """
+    source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz')
+    binary_extensions = ('.egg', '.exe', '.whl')
+    excluded_extensions = ('.pdf',)
+
+    # A list of tags indicating which wheels you want to match. The default
+    # value of None matches against the tags compatible with the running
+    # Python. If you want to match other values, set wheel_tags on a locator
+    # instance to a list of tuples (pyver, abi, arch) which you want to match.
+    wheel_tags = None
+
+    downloadable_extensions = source_extensions + ('.whl',)
+
+    def __init__(self, scheme='default'):
+        """
+        Initialise an instance.
+        :param scheme: Because locators look for most recent versions, they
+                       need to know the version scheme to use. This specifies
+                       the current PEP-recommended scheme - use ``'legacy'``
+                       if you need to support existing distributions on PyPI.
+        """
+        self._cache = {}
+        self.scheme = scheme
+        # Because of bugs in some of the handlers on some of the platforms,
+        # we use our own opener rather than just using urlopen.
+        self.opener = build_opener(RedirectHandler())
+        # If get_project() is called from locate(), the matcher instance
+        # is set from the requirement passed to locate(). See issue #18 for
+        # why this can be useful to know.
+        self.matcher = None
+        self.errors = queue.Queue()
+
+    def get_errors(self):
+        """
+        Return any errors which have occurred.
+        """
+        result = []
+        while not self.errors.empty():  # pragma: no cover
+            try:
+                e = self.errors.get(False)
+                result.append(e)
+            except self.errors.Empty:
+                continue
+            self.errors.task_done()
+        return result
+
+    def clear_errors(self):
+        """
+        Clear any errors which may have been logged.
+        """
+        # Just get the errors and throw them away
+        self.get_errors()
+
+    def clear_cache(self):
+        self._cache.clear()
+
+    def _get_scheme(self):
+        return self._scheme
+
+    def _set_scheme(self, value):
+        self._scheme = value
+
+    scheme = property(_get_scheme, _set_scheme)
+
+    def _get_project(self, name):
+        """
+        For a given project, get a dictionary mapping available versions to Distribution
+        instances.
+
+        This should be implemented in subclasses.
+
+        If called from a locate() request, self.matcher will be set to a
+        matcher for the requirement to satisfy, otherwise it will be None.
+        """
+        raise NotImplementedError('Please implement in the subclass')
+
+    def get_distribution_names(self):
+        """
+        Return all the distribution names known to this locator.
+        """
+        raise NotImplementedError('Please implement in the subclass')
+
+    def get_project(self, name):
+        """
+        For a given project, get a dictionary mapping available versions to Distribution
+        instances.
+
+        This calls _get_project to do all the work, and just implements a caching layer on top.
+        """
+        if self._cache is None:  # pragma: no cover
+            result = self._get_project(name)
+        elif name in self._cache:
+            result = self._cache[name]
+        else:
+            self.clear_errors()
+            result = self._get_project(name)
+            self._cache[name] = result
+        return result
+
+    def score_url(self, url):
+        """
+        Give an url a score which can be used to choose preferred URLs
+        for a given project release.
+        """
+        t = urlparse(url)
+        basename = posixpath.basename(t.path)
+        compatible = True
+        is_wheel = basename.endswith('.whl')
+        is_downloadable = basename.endswith(self.downloadable_extensions)
+        if is_wheel:
+            compatible = is_compatible(Wheel(basename), self.wheel_tags)
+        return (t.scheme == 'https', 'pypi.org' in t.netloc,
+                is_downloadable, is_wheel, compatible, basename)
+
+    def prefer_url(self, url1, url2):
+        """
+        Choose one of two URLs where both are candidates for distribution
+        archives for the same version of a distribution (for example,
+        .tar.gz vs. zip).
+
+        The current implementation favours https:// URLs over http://, archives
+        from PyPI over those from other locations, wheel compatibility (if a
+        wheel) and then the archive name.
+        """
+        result = url2
+        if url1:
+            s1 = self.score_url(url1)
+            s2 = self.score_url(url2)
+            if s1 > s2:
+                result = url1
+            if result != url2:
+                logger.debug('Not replacing %r with %r', url1, url2)
+            else:
+                logger.debug('Replacing %r with %r', url1, url2)
+        return result
+
+    def split_filename(self, filename, project_name):
+        """
+        Attempt to split a filename in project name, version and Python version.
+        """
+        return split_filename(filename, project_name)
+
+    def convert_url_to_download_info(self, url, project_name):
+        """
+        See if a URL is a candidate for a download URL for a project (the URL
+        has typically been scraped from an HTML page).
+
+        If it is, a dictionary is returned with keys "name", "version",
+        "filename" and "url"; otherwise, None is returned.
+        """
+        def same_project(name1, name2):
+            return normalize_name(name1) == normalize_name(name2)
+
+        result = None
+        scheme, netloc, path, params, query, frag = urlparse(url)
+        if frag.lower().startswith('egg='):  # pragma: no cover
+            logger.debug('%s: version hint in fragment: %r',
+                         project_name, frag)
+        m = HASHER_HASH.match(frag)
+        if m:
+            algo, digest = m.groups()
+        else:
+            algo, digest = None, None
+        origpath = path
+        if path and path[-1] == '/':  # pragma: no cover
+            path = path[:-1]
+        if path.endswith('.whl'):
+            try:
+                wheel = Wheel(path)
+                if not is_compatible(wheel, self.wheel_tags):
+                    logger.debug('Wheel not compatible: %s', path)
+                else:
+                    if project_name is None:
+                        include = True
+                    else:
+                        include = same_project(wheel.name, project_name)
+                    if include:
+                        result = {
+                            'name': wheel.name,
+                            'version': wheel.version,
+                            'filename': wheel.filename,
+                            'url': urlunparse((scheme, netloc, origpath,
+                                               params, query, '')),
+                            'python-version': ', '.join(
+                                ['.'.join(list(v[2:])) for v in wheel.pyver]),
+                        }
+            except Exception:  # pragma: no cover
+                logger.warning('invalid path for wheel: %s', path)
+        elif not path.endswith(self.downloadable_extensions):  # pragma: no cover
+            logger.debug('Not downloadable: %s', path)
+        else:  # downloadable extension
+            path = filename = posixpath.basename(path)
+            for ext in self.downloadable_extensions:
+                if path.endswith(ext):
+                    path = path[:-len(ext)]
+                    t = self.split_filename(path, project_name)
+                    if not t:  # pragma: no cover
+                        logger.debug('No match for project/version: %s', path)
+                    else:
+                        name, version, pyver = t
+                        if not project_name or same_project(project_name, name):
+                            result = {
+                                'name': name,
+                                'version': version,
+                                'filename': filename,
+                                'url': urlunparse((scheme, netloc, origpath,
+                                                   params, query, '')),
+                            }
+                            if pyver:  # pragma: no cover
+                                result['python-version'] = pyver
+                    break
+        if result and algo:
+            result['%s_digest' % algo] = digest
+        return result
+
+    def _get_digest(self, info):
+        """
+        Get a digest from a dictionary by looking at a "digests" dictionary
+        or keys of the form 'algo_digest'.
+
+        Returns a 2-tuple (algo, digest) if found, else None. Currently
+        looks only for SHA256, then MD5.
+        """
+        result = None
+        if 'digests' in info:
+            digests = info['digests']
+            for algo in ('sha256', 'md5'):
+                if algo in digests:
+                    result = (algo, digests[algo])
+                    break
+        if not result:
+            for algo in ('sha256', 'md5'):
+                key = '%s_digest' % algo
+                if key in info:
+                    result = (algo, info[key])
+                    break
+        return result
+
+    def _update_version_data(self, result, info):
+        """
+        Update a result dictionary (the final result from _get_project) with a
+        dictionary for a specific version, which typically holds information
+        gleaned from a filename or URL for an archive for the distribution.
+        """
+        name = info.pop('name')
+        version = info.pop('version')
+        if version in result:
+            dist = result[version]
+            md = dist.metadata
+        else:
+            dist = make_dist(name, version, scheme=self.scheme)
+            md = dist.metadata
+        dist.digest = digest = self._get_digest(info)
+        url = info['url']
+        result['digests'][url] = digest
+        if md.source_url != info['url']:
+            md.source_url = self.prefer_url(md.source_url, url)
+            result['urls'].setdefault(version, set()).add(url)
+        dist.locator = self
+        result[version] = dist
+
+    def locate(self, requirement, prereleases=False):
+        """
+        Find the most recent distribution which matches the given
+        requirement.
+
+        :param requirement: A requirement of the form 'foo (1.0)' or perhaps
+                            'foo (>= 1.0, < 2.0, != 1.3)'
+        :param prereleases: If ``True``, allow pre-release versions
+                            to be located. Otherwise, pre-release versions
+                            are not returned.
+        :return: A :class:`Distribution` instance, or ``None`` if no such
+                 distribution could be located.
+        """
+        result = None
+        r = parse_requirement(requirement)
+        if r is None:  # pragma: no cover
+            raise DistlibException('Not a valid requirement: %r' % requirement)
+        scheme = get_scheme(self.scheme)
+        self.matcher = matcher = scheme.matcher(r.requirement)
+        logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__)
+        versions = self.get_project(r.name)
+        if len(versions) > 2:   # urls and digests keys are present
+            # sometimes, versions are invalid
+            slist = []
+            vcls = matcher.version_class
+            for k in versions:
+                if k in ('urls', 'digests'):
+                    continue
+                try:
+                    if not matcher.match(k):
+                        pass  # logger.debug('%s did not match %r', matcher, k)
+                    else:
+                        if prereleases or not vcls(k).is_prerelease:
+                            slist.append(k)
+                except Exception:  # pragma: no cover
+                    logger.warning('error matching %s with %r', matcher, k)
+                    pass  # slist.append(k)
+            if len(slist) > 1:
+                slist = sorted(slist, key=scheme.key)
+            if slist:
+                logger.debug('sorted list: %s', slist)
+                version = slist[-1]
+                result = versions[version]
+        if result:
+            if r.extras:
+                result.extras = r.extras
+            result.download_urls = versions.get('urls', {}).get(version, set())
+            d = {}
+            sd = versions.get('digests', {})
+            for url in result.download_urls:
+                if url in sd:  # pragma: no cover
+                    d[url] = sd[url]
+            result.digests = d
+        self.matcher = None
+        return result
+
+
+class PyPIRPCLocator(Locator):
+    """
+    This locator uses XML-RPC to locate distributions. It therefore
+    cannot be used with simple mirrors (that only mirror file content).
+    """
+    def __init__(self, url, **kwargs):
+        """
+        Initialise an instance.
+
+        :param url: The URL to use for XML-RPC.
+        :param kwargs: Passed to the superclass constructor.
+        """
+        super(PyPIRPCLocator, self).__init__(**kwargs)
+        self.base_url = url
+        self.client = ServerProxy(url, timeout=3.0)
+
+    def get_distribution_names(self):
+        """
+        Return all the distribution names known to this locator.
+        """
+        return set(self.client.list_packages())
+
+    def _get_project(self, name):
+        result = {'urls': {}, 'digests': {}}
+        versions = self.client.package_releases(name, True)
+        for v in versions:
+            urls = self.client.release_urls(name, v)
+            data = self.client.release_data(name, v)
+            metadata = Metadata(scheme=self.scheme)
+            metadata.name = data['name']
+            metadata.version = data['version']
+            metadata.license = data.get('license')
+            metadata.keywords = data.get('keywords', [])
+            metadata.summary = data.get('summary')
+            dist = Distribution(metadata)
+            if urls:
+                info = urls[0]
+                metadata.source_url = info['url']
+                dist.digest = self._get_digest(info)
+                dist.locator = self
+                result[v] = dist
+                for info in urls:
+                    url = info['url']
+                    digest = self._get_digest(info)
+                    result['urls'].setdefault(v, set()).add(url)
+                    result['digests'][url] = digest
+        return result
+
+
+class PyPIJSONLocator(Locator):
+    """
+    This locator uses PyPI's JSON interface. It's very limited in functionality
+    and probably not worth using.
+    """
+    def __init__(self, url, **kwargs):
+        super(PyPIJSONLocator, self).__init__(**kwargs)
+        self.base_url = ensure_slash(url)
+
+    def get_distribution_names(self):
+        """
+        Return all the distribution names known to this locator.
+        """
+        raise NotImplementedError('Not available from this locator')
+
+    def _get_project(self, name):
+        result = {'urls': {}, 'digests': {}}
+        url = urljoin(self.base_url, '%s/json' % quote(name))
+        try:
+            resp = self.opener.open(url)
+            data = resp.read().decode()  # for now
+            d = json.loads(data)
+            md = Metadata(scheme=self.scheme)
+            data = d['info']
+            md.name = data['name']
+            md.version = data['version']
+            md.license = data.get('license')
+            md.keywords = data.get('keywords', [])
+            md.summary = data.get('summary')
+            dist = Distribution(md)
+            dist.locator = self
+            # urls = d['urls']
+            result[md.version] = dist
+            for info in d['urls']:
+                url = info['url']
+                dist.download_urls.add(url)
+                dist.digests[url] = self._get_digest(info)
+                result['urls'].setdefault(md.version, set()).add(url)
+                result['digests'][url] = self._get_digest(info)
+            # Now get other releases
+            for version, infos in d['releases'].items():
+                if version == md.version:
+                    continue    # already done
+                omd = Metadata(scheme=self.scheme)
+                omd.name = md.name
+                omd.version = version
+                odist = Distribution(omd)
+                odist.locator = self
+                result[version] = odist
+                for info in infos:
+                    url = info['url']
+                    odist.download_urls.add(url)
+                    odist.digests[url] = self._get_digest(info)
+                    result['urls'].setdefault(version, set()).add(url)
+                    result['digests'][url] = self._get_digest(info)
+#            for info in urls:
+#                md.source_url = info['url']
+#                dist.digest = self._get_digest(info)
+#                dist.locator = self
+#                for info in urls:
+#                    url = info['url']
+#                    result['urls'].setdefault(md.version, set()).add(url)
+#                    result['digests'][url] = self._get_digest(info)
+        except Exception as e:
+            self.errors.put(text_type(e))
+            logger.exception('JSON fetch failed: %s', e)
+        return result
+
+
+class Page(object):
+    """
+    This class represents a scraped HTML page.
+    """
+    # The following slightly hairy-looking regex just looks for the contents of
+    # an anchor link, which has an attribute "href" either immediately preceded
+    # or immediately followed by a "rel" attribute. The attribute values can be
+    # declared with double quotes, single quotes or no quotes - which leads to
+    # the length of the expression.
+    _href = re.compile("""
+(rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*))\\s+)?
+href\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*))
+(\\s+rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*)))?
+""", re.I | re.S | re.X)
+    _base = re.compile(r"""]+)""", re.I | re.S)
+
+    def __init__(self, data, url):
+        """
+        Initialise an instance with the Unicode page contents and the URL they
+        came from.
+        """
+        self.data = data
+        self.base_url = self.url = url
+        m = self._base.search(self.data)
+        if m:
+            self.base_url = m.group(1)
+
+    _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
+
+    @cached_property
+    def links(self):
+        """
+        Return the URLs of all the links on a page together with information
+        about their "rel" attribute, for determining which ones to treat as
+        downloads and which ones to queue for further scraping.
+        """
+        def clean(url):
+            "Tidy up an URL."
+            scheme, netloc, path, params, query, frag = urlparse(url)
+            return urlunparse((scheme, netloc, quote(path),
+                               params, query, frag))
+
+        result = set()
+        for match in self._href.finditer(self.data):
+            d = match.groupdict('')
+            rel = (d['rel1'] or d['rel2'] or d['rel3'] or
+                   d['rel4'] or d['rel5'] or d['rel6'])
+            url = d['url1'] or d['url2'] or d['url3']
+            url = urljoin(self.base_url, url)
+            url = unescape(url)
+            url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url)
+            result.add((url, rel))
+        # We sort the result, hoping to bring the most recent versions
+        # to the front
+        result = sorted(result, key=lambda t: t[0], reverse=True)
+        return result
+
+
+class SimpleScrapingLocator(Locator):
+    """
+    A locator which scrapes HTML pages to locate downloads for a distribution.
+    This runs multiple threads to do the I/O; performance is at least as good
+    as pip's PackageFinder, which works in an analogous fashion.
+    """
+
+    # These are used to deal with various Content-Encoding schemes.
+    decoders = {
+        'deflate': zlib.decompress,
+        'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(b)).read(),
+        'none': lambda b: b,
+    }
+
+    def __init__(self, url, timeout=None, num_workers=10, **kwargs):
+        """
+        Initialise an instance.
+        :param url: The root URL to use for scraping.
+        :param timeout: The timeout, in seconds, to be applied to requests.
+                        This defaults to ``None`` (no timeout specified).
+        :param num_workers: The number of worker threads you want to do I/O,
+                            This defaults to 10.
+        :param kwargs: Passed to the superclass.
+        """
+        super(SimpleScrapingLocator, self).__init__(**kwargs)
+        self.base_url = ensure_slash(url)
+        self.timeout = timeout
+        self._page_cache = {}
+        self._seen = set()
+        self._to_fetch = queue.Queue()
+        self._bad_hosts = set()
+        self.skip_externals = False
+        self.num_workers = num_workers
+        self._lock = threading.RLock()
+        # See issue #45: we need to be resilient when the locator is used
+        # in a thread, e.g. with concurrent.futures. We can't use self._lock
+        # as it is for coordinating our internal threads - the ones created
+        # in _prepare_threads.
+        self._gplock = threading.RLock()
+        self.platform_check = False  # See issue #112
+
+    def _prepare_threads(self):
+        """
+        Threads are created only when get_project is called, and terminate
+        before it returns. They are there primarily to parallelise I/O (i.e.
+        fetching web pages).
+        """
+        self._threads = []
+        for i in range(self.num_workers):
+            t = threading.Thread(target=self._fetch)
+            t.daemon = True
+            t.start()
+            self._threads.append(t)
+
+    def _wait_threads(self):
+        """
+        Tell all the threads to terminate (by sending a sentinel value) and
+        wait for them to do so.
+        """
+        # Note that you need two loops, since you can't say which
+        # thread will get each sentinel
+        for t in self._threads:
+            self._to_fetch.put(None)    # sentinel
+        for t in self._threads:
+            t.join()
+        self._threads = []
+
+    def _get_project(self, name):
+        result = {'urls': {}, 'digests': {}}
+        with self._gplock:
+            self.result = result
+            self.project_name = name
+            url = urljoin(self.base_url, '%s/' % quote(name))
+            self._seen.clear()
+            self._page_cache.clear()
+            self._prepare_threads()
+            try:
+                logger.debug('Queueing %s', url)
+                self._to_fetch.put(url)
+                self._to_fetch.join()
+            finally:
+                self._wait_threads()
+            del self.result
+        return result
+
+    platform_dependent = re.compile(r'\b(linux_(i\d86|x86_64|arm\w+)|'
+                                    r'win(32|_amd64)|macosx_?\d+)\b', re.I)
+
+    def _is_platform_dependent(self, url):
+        """
+        Does an URL refer to a platform-specific download?
+        """
+        return self.platform_dependent.search(url)
+
+    def _process_download(self, url):
+        """
+        See if an URL is a suitable download for a project.
+
+        If it is, register information in the result dictionary (for
+        _get_project) about the specific version it's for.
+
+        Note that the return value isn't actually used other than as a boolean
+        value.
+        """
+        if self.platform_check and self._is_platform_dependent(url):
+            info = None
+        else:
+            info = self.convert_url_to_download_info(url, self.project_name)
+        logger.debug('process_download: %s -> %s', url, info)
+        if info:
+            with self._lock:    # needed because self.result is shared
+                self._update_version_data(self.result, info)
+        return info
+
+    def _should_queue(self, link, referrer, rel):
+        """
+        Determine whether a link URL from a referring page and with a
+        particular "rel" attribute should be queued for scraping.
+        """
+        scheme, netloc, path, _, _, _ = urlparse(link)
+        if path.endswith(self.source_extensions + self.binary_extensions +
+                         self.excluded_extensions):
+            result = False
+        elif self.skip_externals and not link.startswith(self.base_url):
+            result = False
+        elif not referrer.startswith(self.base_url):
+            result = False
+        elif rel not in ('homepage', 'download'):
+            result = False
+        elif scheme not in ('http', 'https', 'ftp'):
+            result = False
+        elif self._is_platform_dependent(link):
+            result = False
+        else:
+            host = netloc.split(':', 1)[0]
+            if host.lower() == 'localhost':
+                result = False
+            else:
+                result = True
+        logger.debug('should_queue: %s (%s) from %s -> %s', link, rel,
+                     referrer, result)
+        return result
+
+    def _fetch(self):
+        """
+        Get a URL to fetch from the work queue, get the HTML page, examine its
+        links for download candidates and candidates for further scraping.
+
+        This is a handy method to run in a thread.
+        """
+        while True:
+            url = self._to_fetch.get()
+            try:
+                if url:
+                    page = self.get_page(url)
+                    if page is None:    # e.g. after an error
+                        continue
+                    for link, rel in page.links:
+                        if link not in self._seen:
+                            try:
+                                self._seen.add(link)
+                                if (not self._process_download(link) and
+                                        self._should_queue(link, url, rel)):
+                                    logger.debug('Queueing %s from %s', link, url)
+                                    self._to_fetch.put(link)
+                            except MetadataInvalidError:  # e.g. invalid versions
+                                pass
+            except Exception as e:  # pragma: no cover
+                self.errors.put(text_type(e))
+            finally:
+                # always do this, to avoid hangs :-)
+                self._to_fetch.task_done()
+            if not url:
+                # logger.debug('Sentinel seen, quitting.')
+                break
+
+    def get_page(self, url):
+        """
+        Get the HTML for an URL, possibly from an in-memory cache.
+
+        XXX TODO Note: this cache is never actually cleared. It's assumed that
+        the data won't get stale over the lifetime of a locator instance (not
+        necessarily true for the default_locator).
+        """
+        # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api
+        scheme, netloc, path, _, _, _ = urlparse(url)
+        if scheme == 'file' and os.path.isdir(url2pathname(path)):
+            url = urljoin(ensure_slash(url), 'index.html')
+
+        if url in self._page_cache:
+            result = self._page_cache[url]
+            logger.debug('Returning %s from cache: %s', url, result)
+        else:
+            host = netloc.split(':', 1)[0]
+            result = None
+            if host in self._bad_hosts:
+                logger.debug('Skipping %s due to bad host %s', url, host)
+            else:
+                req = Request(url, headers={'Accept-encoding': 'identity'})
+                try:
+                    logger.debug('Fetching %s', url)
+                    resp = self.opener.open(req, timeout=self.timeout)
+                    logger.debug('Fetched %s', url)
+                    headers = resp.info()
+                    content_type = headers.get('Content-Type', '')
+                    if HTML_CONTENT_TYPE.match(content_type):
+                        final_url = resp.geturl()
+                        data = resp.read()
+                        encoding = headers.get('Content-Encoding')
+                        if encoding:
+                            decoder = self.decoders[encoding]   # fail if not found
+                            data = decoder(data)
+                        encoding = 'utf-8'
+                        m = CHARSET.search(content_type)
+                        if m:
+                            encoding = m.group(1)
+                        try:
+                            data = data.decode(encoding)
+                        except UnicodeError:  # pragma: no cover
+                            data = data.decode('latin-1')    # fallback
+                        result = Page(data, final_url)
+                        self._page_cache[final_url] = result
+                except HTTPError as e:
+                    if e.code != 404:
+                        logger.exception('Fetch failed: %s: %s', url, e)
+                except URLError as e:  # pragma: no cover
+                    logger.exception('Fetch failed: %s: %s', url, e)
+                    with self._lock:
+                        self._bad_hosts.add(host)
+                except Exception as e:  # pragma: no cover
+                    logger.exception('Fetch failed: %s: %s', url, e)
+                finally:
+                    self._page_cache[url] = result   # even if None (failure)
+        return result
+
+    _distname_re = re.compile(']*>([^<]+)<')
+
+    def get_distribution_names(self):
+        """
+        Return all the distribution names known to this locator.
+        """
+        result = set()
+        page = self.get_page(self.base_url)
+        if not page:
+            raise DistlibException('Unable to get %s' % self.base_url)
+        for match in self._distname_re.finditer(page.data):
+            result.add(match.group(1))
+        return result
+
+
+class DirectoryLocator(Locator):
+    """
+    This class locates distributions in a directory tree.
+    """
+
+    def __init__(self, path, **kwargs):
+        """
+        Initialise an instance.
+        :param path: The root of the directory tree to search.
+        :param kwargs: Passed to the superclass constructor,
+                       except for:
+                       * recursive - if True (the default), subdirectories are
+                         recursed into. If False, only the top-level directory
+                         is searched,
+        """
+        self.recursive = kwargs.pop('recursive', True)
+        super(DirectoryLocator, self).__init__(**kwargs)
+        path = os.path.abspath(path)
+        if not os.path.isdir(path):  # pragma: no cover
+            raise DistlibException('Not a directory: %r' % path)
+        self.base_dir = path
+
+    def should_include(self, filename, parent):
+        """
+        Should a filename be considered as a candidate for a distribution
+        archive? As well as the filename, the directory which contains it
+        is provided, though not used by the current implementation.
+        """
+        return filename.endswith(self.downloadable_extensions)
+
+    def _get_project(self, name):
+        result = {'urls': {}, 'digests': {}}
+        for root, dirs, files in os.walk(self.base_dir):
+            for fn in files:
+                if self.should_include(fn, root):
+                    fn = os.path.join(root, fn)
+                    url = urlunparse(('file', '',
+                                      pathname2url(os.path.abspath(fn)),
+                                      '', '', ''))
+                    info = self.convert_url_to_download_info(url, name)
+                    if info:
+                        self._update_version_data(result, info)
+            if not self.recursive:
+                break
+        return result
+
+    def get_distribution_names(self):
+        """
+        Return all the distribution names known to this locator.
+        """
+        result = set()
+        for root, dirs, files in os.walk(self.base_dir):
+            for fn in files:
+                if self.should_include(fn, root):
+                    fn = os.path.join(root, fn)
+                    url = urlunparse(('file', '',
+                                      pathname2url(os.path.abspath(fn)),
+                                      '', '', ''))
+                    info = self.convert_url_to_download_info(url, None)
+                    if info:
+                        result.add(info['name'])
+            if not self.recursive:
+                break
+        return result
+
+
+class JSONLocator(Locator):
+    """
+    This locator uses special extended metadata (not available on PyPI) and is
+    the basis of performant dependency resolution in distlib. Other locators
+    require archive downloads before dependencies can be determined! As you
+    might imagine, that can be slow.
+    """
+    def get_distribution_names(self):
+        """
+        Return all the distribution names known to this locator.
+        """
+        raise NotImplementedError('Not available from this locator')
+
+    def _get_project(self, name):
+        result = {'urls': {}, 'digests': {}}
+        data = get_project_data(name)
+        if data:
+            for info in data.get('files', []):
+                if info['ptype'] != 'sdist' or info['pyversion'] != 'source':
+                    continue
+                # We don't store summary in project metadata as it makes
+                # the data bigger for no benefit during dependency
+                # resolution
+                dist = make_dist(data['name'], info['version'],
+                                 summary=data.get('summary',
+                                                  'Placeholder for summary'),
+                                 scheme=self.scheme)
+                md = dist.metadata
+                md.source_url = info['url']
+                # TODO SHA256 digest
+                if 'digest' in info and info['digest']:
+                    dist.digest = ('md5', info['digest'])
+                md.dependencies = info.get('requirements', {})
+                dist.exports = info.get('exports', {})
+                result[dist.version] = dist
+                result['urls'].setdefault(dist.version, set()).add(info['url'])
+        return result
+
+
+class DistPathLocator(Locator):
+    """
+    This locator finds installed distributions in a path. It can be useful for
+    adding to an :class:`AggregatingLocator`.
+    """
+    def __init__(self, distpath, **kwargs):
+        """
+        Initialise an instance.
+
+        :param distpath: A :class:`DistributionPath` instance to search.
+        """
+        super(DistPathLocator, self).__init__(**kwargs)
+        assert isinstance(distpath, DistributionPath)
+        self.distpath = distpath
+
+    def _get_project(self, name):
+        dist = self.distpath.get_distribution(name)
+        if dist is None:
+            result = {'urls': {}, 'digests': {}}
+        else:
+            result = {
+                dist.version: dist,
+                'urls': {dist.version: set([dist.source_url])},
+                'digests': {dist.version: set([None])}
+            }
+        return result
+
+
+class AggregatingLocator(Locator):
+    """
+    This class allows you to chain and/or merge a list of locators.
+    """
+    def __init__(self, *locators, **kwargs):
+        """
+        Initialise an instance.
+
+        :param locators: The list of locators to search.
+        :param kwargs: Passed to the superclass constructor,
+                       except for:
+                       * merge - if False (the default), the first successful
+                         search from any of the locators is returned. If True,
+                         the results from all locators are merged (this can be
+                         slow).
+        """
+        self.merge = kwargs.pop('merge', False)
+        self.locators = locators
+        super(AggregatingLocator, self).__init__(**kwargs)
+
+    def clear_cache(self):
+        super(AggregatingLocator, self).clear_cache()
+        for locator in self.locators:
+            locator.clear_cache()
+
+    def _set_scheme(self, value):
+        self._scheme = value
+        for locator in self.locators:
+            locator.scheme = value
+
+    scheme = property(Locator.scheme.fget, _set_scheme)
+
+    def _get_project(self, name):
+        result = {}
+        for locator in self.locators:
+            d = locator.get_project(name)
+            if d:
+                if self.merge:
+                    files = result.get('urls', {})
+                    digests = result.get('digests', {})
+                    # next line could overwrite result['urls'], result['digests']
+                    result.update(d)
+                    df = result.get('urls')
+                    if files and df:
+                        for k, v in files.items():
+                            if k in df:
+                                df[k] |= v
+                            else:
+                                df[k] = v
+                    dd = result.get('digests')
+                    if digests and dd:
+                        dd.update(digests)
+                else:
+                    # See issue #18. If any dists are found and we're looking
+                    # for specific constraints, we only return something if
+                    # a match is found. For example, if a DirectoryLocator
+                    # returns just foo (1.0) while we're looking for
+                    # foo (>= 2.0), we'll pretend there was nothing there so
+                    # that subsequent locators can be queried. Otherwise we
+                    # would just return foo (1.0) which would then lead to a
+                    # failure to find foo (>= 2.0), because other locators
+                    # weren't searched. Note that this only matters when
+                    # merge=False.
+                    if self.matcher is None:
+                        found = True
+                    else:
+                        found = False
+                        for k in d:
+                            if self.matcher.match(k):
+                                found = True
+                                break
+                    if found:
+                        result = d
+                        break
+        return result
+
+    def get_distribution_names(self):
+        """
+        Return all the distribution names known to this locator.
+        """
+        result = set()
+        for locator in self.locators:
+            try:
+                result |= locator.get_distribution_names()
+            except NotImplementedError:
+                pass
+        return result
+
+
+# We use a legacy scheme simply because most of the dists on PyPI use legacy
+# versions which don't conform to PEP 440.
+default_locator = AggregatingLocator(
+                    # JSONLocator(), # don't use as PEP 426 is withdrawn
+                    SimpleScrapingLocator('https://pypi.org/simple/',
+                                          timeout=3.0),
+                    scheme='legacy')
+
+locate = default_locator.locate
+
+
+class DependencyFinder(object):
+    """
+    Locate dependencies for distributions.
+    """
+
+    def __init__(self, locator=None):
+        """
+        Initialise an instance, using the specified locator
+        to locate distributions.
+        """
+        self.locator = locator or default_locator
+        self.scheme = get_scheme(self.locator.scheme)
+
+    def add_distribution(self, dist):
+        """
+        Add a distribution to the finder. This will update internal information
+        about who provides what.
+        :param dist: The distribution to add.
+        """
+        logger.debug('adding distribution %s', dist)
+        name = dist.key
+        self.dists_by_name[name] = dist
+        self.dists[(name, dist.version)] = dist
+        for p in dist.provides:
+            name, version = parse_name_and_version(p)
+            logger.debug('Add to provided: %s, %s, %s', name, version, dist)
+            self.provided.setdefault(name, set()).add((version, dist))
+
+    def remove_distribution(self, dist):
+        """
+        Remove a distribution from the finder. This will update internal
+        information about who provides what.
+        :param dist: The distribution to remove.
+        """
+        logger.debug('removing distribution %s', dist)
+        name = dist.key
+        del self.dists_by_name[name]
+        del self.dists[(name, dist.version)]
+        for p in dist.provides:
+            name, version = parse_name_and_version(p)
+            logger.debug('Remove from provided: %s, %s, %s', name, version, dist)
+            s = self.provided[name]
+            s.remove((version, dist))
+            if not s:
+                del self.provided[name]
+
+    def get_matcher(self, reqt):
+        """
+        Get a version matcher for a requirement.
+        :param reqt: The requirement
+        :type reqt: str
+        :return: A version matcher (an instance of
+                 :class:`distlib.version.Matcher`).
+        """
+        try:
+            matcher = self.scheme.matcher(reqt)
+        except UnsupportedVersionError:  # pragma: no cover
+            # XXX compat-mode if cannot read the version
+            name = reqt.split()[0]
+            matcher = self.scheme.matcher(name)
+        return matcher
+
+    def find_providers(self, reqt):
+        """
+        Find the distributions which can fulfill a requirement.
+
+        :param reqt: The requirement.
+         :type reqt: str
+        :return: A set of distribution which can fulfill the requirement.
+        """
+        matcher = self.get_matcher(reqt)
+        name = matcher.key   # case-insensitive
+        result = set()
+        provided = self.provided
+        if name in provided:
+            for version, provider in provided[name]:
+                try:
+                    match = matcher.match(version)
+                except UnsupportedVersionError:
+                    match = False
+
+                if match:
+                    result.add(provider)
+                    break
+        return result
+
+    def try_to_replace(self, provider, other, problems):
+        """
+        Attempt to replace one provider with another. This is typically used
+        when resolving dependencies from multiple sources, e.g. A requires
+        (B >= 1.0) while C requires (B >= 1.1).
+
+        For successful replacement, ``provider`` must meet all the requirements
+        which ``other`` fulfills.
+
+        :param provider: The provider we are trying to replace with.
+        :param other: The provider we're trying to replace.
+        :param problems: If False is returned, this will contain what
+                         problems prevented replacement. This is currently
+                         a tuple of the literal string 'cantreplace',
+                         ``provider``, ``other``  and the set of requirements
+                         that ``provider`` couldn't fulfill.
+        :return: True if we can replace ``other`` with ``provider``, else
+                 False.
+        """
+        rlist = self.reqts[other]
+        unmatched = set()
+        for s in rlist:
+            matcher = self.get_matcher(s)
+            if not matcher.match(provider.version):
+                unmatched.add(s)
+        if unmatched:
+            # can't replace other with provider
+            problems.add(('cantreplace', provider, other,
+                          frozenset(unmatched)))
+            result = False
+        else:
+            # can replace other with provider
+            self.remove_distribution(other)
+            del self.reqts[other]
+            for s in rlist:
+                self.reqts.setdefault(provider, set()).add(s)
+            self.add_distribution(provider)
+            result = True
+        return result
+
+    def find(self, requirement, meta_extras=None, prereleases=False):
+        """
+        Find a distribution and all distributions it depends on.
+
+        :param requirement: The requirement specifying the distribution to
+                            find, or a Distribution instance.
+        :param meta_extras: A list of meta extras such as :test:, :build: and
+                            so on.
+        :param prereleases: If ``True``, allow pre-release versions to be
+                            returned - otherwise, don't return prereleases
+                            unless they're all that's available.
+
+        Return a set of :class:`Distribution` instances and a set of
+        problems.
+
+        The distributions returned should be such that they have the
+        :attr:`required` attribute set to ``True`` if they were
+        from the ``requirement`` passed to ``find()``, and they have the
+        :attr:`build_time_dependency` attribute set to ``True`` unless they
+        are post-installation dependencies of the ``requirement``.
+
+        The problems should be a tuple consisting of the string
+        ``'unsatisfied'`` and the requirement which couldn't be satisfied
+        by any distribution known to the locator.
+        """
+
+        self.provided = {}
+        self.dists = {}
+        self.dists_by_name = {}
+        self.reqts = {}
+
+        meta_extras = set(meta_extras or [])
+        if ':*:' in meta_extras:
+            meta_extras.remove(':*:')
+            # :meta: and :run: are implicitly included
+            meta_extras |= set([':test:', ':build:', ':dev:'])
+
+        if isinstance(requirement, Distribution):
+            dist = odist = requirement
+            logger.debug('passed %s as requirement', odist)
+        else:
+            dist = odist = self.locator.locate(requirement,
+                                               prereleases=prereleases)
+            if dist is None:
+                raise DistlibException('Unable to locate %r' % requirement)
+            logger.debug('located %s', odist)
+        dist.requested = True
+        problems = set()
+        todo = set([dist])
+        install_dists = set([odist])
+        while todo:
+            dist = todo.pop()
+            name = dist.key     # case-insensitive
+            if name not in self.dists_by_name:
+                self.add_distribution(dist)
+            else:
+                # import pdb; pdb.set_trace()
+                other = self.dists_by_name[name]
+                if other != dist:
+                    self.try_to_replace(dist, other, problems)
+
+            ireqts = dist.run_requires | dist.meta_requires
+            sreqts = dist.build_requires
+            ereqts = set()
+            if meta_extras and dist in install_dists:
+                for key in ('test', 'build', 'dev'):
+                    e = ':%s:' % key
+                    if e in meta_extras:
+                        ereqts |= getattr(dist, '%s_requires' % key)
+            all_reqts = ireqts | sreqts | ereqts
+            for r in all_reqts:
+                providers = self.find_providers(r)
+                if not providers:
+                    logger.debug('No providers found for %r', r)
+                    provider = self.locator.locate(r, prereleases=prereleases)
+                    # If no provider is found and we didn't consider
+                    # prereleases, consider them now.
+                    if provider is None and not prereleases:
+                        provider = self.locator.locate(r, prereleases=True)
+                    if provider is None:
+                        logger.debug('Cannot satisfy %r', r)
+                        problems.add(('unsatisfied', r))
+                    else:
+                        n, v = provider.key, provider.version
+                        if (n, v) not in self.dists:
+                            todo.add(provider)
+                        providers.add(provider)
+                        if r in ireqts and dist in install_dists:
+                            install_dists.add(provider)
+                            logger.debug('Adding %s to install_dists',
+                                         provider.name_and_version)
+                for p in providers:
+                    name = p.key
+                    if name not in self.dists_by_name:
+                        self.reqts.setdefault(p, set()).add(r)
+                    else:
+                        other = self.dists_by_name[name]
+                        if other != p:
+                            # see if other can be replaced by p
+                            self.try_to_replace(p, other, problems)
+
+        dists = set(self.dists.values())
+        for dist in dists:
+            dist.build_time_dependency = dist not in install_dists
+            if dist.build_time_dependency:
+                logger.debug('%s is a build-time dependency only.',
+                             dist.name_and_version)
+        logger.debug('find done for %s', odist)
+        return dists, problems
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/manifest.py b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/manifest.py
new file mode 100644
index 0000000..420dcf1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/manifest.py
@@ -0,0 +1,384 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2023 Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""
+Class representing the list of files in a distribution.
+
+Equivalent to distutils.filelist, but fixes some problems.
+"""
+import fnmatch
+import logging
+import os
+import re
+import sys
+
+from . import DistlibException
+from .compat import fsdecode
+from .util import convert_path
+
+
+__all__ = ['Manifest']
+
+logger = logging.getLogger(__name__)
+
+# a \ followed by some spaces + EOL
+_COLLAPSE_PATTERN = re.compile('\\\\w*\n', re.M)
+_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S)
+
+#
+# Due to the different results returned by fnmatch.translate, we need
+# to do slightly different processing for Python 2.7 and 3.2 ... this needed
+# to be brought in for Python 3.6 onwards.
+#
+_PYTHON_VERSION = sys.version_info[:2]
+
+
+class Manifest(object):
+    """
+    A list of files built by exploring the filesystem and filtered by applying various
+    patterns to what we find there.
+    """
+
+    def __init__(self, base=None):
+        """
+        Initialise an instance.
+
+        :param base: The base directory to explore under.
+        """
+        self.base = os.path.abspath(os.path.normpath(base or os.getcwd()))
+        self.prefix = self.base + os.sep
+        self.allfiles = None
+        self.files = set()
+
+    #
+    # Public API
+    #
+
+    def findall(self):
+        """Find all files under the base and set ``allfiles`` to the absolute
+        pathnames of files found.
+        """
+        from stat import S_ISREG, S_ISDIR, S_ISLNK
+
+        self.allfiles = allfiles = []
+        root = self.base
+        stack = [root]
+        pop = stack.pop
+        push = stack.append
+
+        while stack:
+            root = pop()
+            names = os.listdir(root)
+
+            for name in names:
+                fullname = os.path.join(root, name)
+
+                # Avoid excess stat calls -- just one will do, thank you!
+                stat = os.stat(fullname)
+                mode = stat.st_mode
+                if S_ISREG(mode):
+                    allfiles.append(fsdecode(fullname))
+                elif S_ISDIR(mode) and not S_ISLNK(mode):
+                    push(fullname)
+
+    def add(self, item):
+        """
+        Add a file to the manifest.
+
+        :param item: The pathname to add. This can be relative to the base.
+        """
+        if not item.startswith(self.prefix):
+            item = os.path.join(self.base, item)
+        self.files.add(os.path.normpath(item))
+
+    def add_many(self, items):
+        """
+        Add a list of files to the manifest.
+
+        :param items: The pathnames to add. These can be relative to the base.
+        """
+        for item in items:
+            self.add(item)
+
+    def sorted(self, wantdirs=False):
+        """
+        Return sorted files in directory order
+        """
+
+        def add_dir(dirs, d):
+            dirs.add(d)
+            logger.debug('add_dir added %s', d)
+            if d != self.base:
+                parent, _ = os.path.split(d)
+                assert parent not in ('', '/')
+                add_dir(dirs, parent)
+
+        result = set(self.files)    # make a copy!
+        if wantdirs:
+            dirs = set()
+            for f in result:
+                add_dir(dirs, os.path.dirname(f))
+            result |= dirs
+        return [os.path.join(*path_tuple) for path_tuple in
+                sorted(os.path.split(path) for path in result)]
+
+    def clear(self):
+        """Clear all collected files."""
+        self.files = set()
+        self.allfiles = []
+
+    def process_directive(self, directive):
+        """
+        Process a directive which either adds some files from ``allfiles`` to
+        ``files``, or removes some files from ``files``.
+
+        :param directive: The directive to process. This should be in a format
+                     compatible with distutils ``MANIFEST.in`` files:
+
+                     http://docs.python.org/distutils/sourcedist.html#commands
+        """
+        # Parse the line: split it up, make sure the right number of words
+        # is there, and return the relevant words.  'action' is always
+        # defined: it's the first word of the line.  Which of the other
+        # three are defined depends on the action; it'll be either
+        # patterns, (dir and patterns), or (dirpattern).
+        action, patterns, thedir, dirpattern = self._parse_directive(directive)
+
+        # OK, now we know that the action is valid and we have the
+        # right number of words on the line for that action -- so we
+        # can proceed with minimal error-checking.
+        if action == 'include':
+            for pattern in patterns:
+                if not self._include_pattern(pattern, anchor=True):
+                    logger.warning('no files found matching %r', pattern)
+
+        elif action == 'exclude':
+            for pattern in patterns:
+                self._exclude_pattern(pattern, anchor=True)
+
+        elif action == 'global-include':
+            for pattern in patterns:
+                if not self._include_pattern(pattern, anchor=False):
+                    logger.warning('no files found matching %r '
+                                   'anywhere in distribution', pattern)
+
+        elif action == 'global-exclude':
+            for pattern in patterns:
+                self._exclude_pattern(pattern, anchor=False)
+
+        elif action == 'recursive-include':
+            for pattern in patterns:
+                if not self._include_pattern(pattern, prefix=thedir):
+                    logger.warning('no files found matching %r '
+                                   'under directory %r', pattern, thedir)
+
+        elif action == 'recursive-exclude':
+            for pattern in patterns:
+                self._exclude_pattern(pattern, prefix=thedir)
+
+        elif action == 'graft':
+            if not self._include_pattern(None, prefix=dirpattern):
+                logger.warning('no directories found matching %r',
+                               dirpattern)
+
+        elif action == 'prune':
+            if not self._exclude_pattern(None, prefix=dirpattern):
+                logger.warning('no previously-included directories found '
+                               'matching %r', dirpattern)
+        else:   # pragma: no cover
+            # This should never happen, as it should be caught in
+            # _parse_template_line
+            raise DistlibException(
+                'invalid action %r' % action)
+
+    #
+    # Private API
+    #
+
+    def _parse_directive(self, directive):
+        """
+        Validate a directive.
+        :param directive: The directive to validate.
+        :return: A tuple of action, patterns, thedir, dir_patterns
+        """
+        words = directive.split()
+        if len(words) == 1 and words[0] not in ('include', 'exclude',
+                                                'global-include',
+                                                'global-exclude',
+                                                'recursive-include',
+                                                'recursive-exclude',
+                                                'graft', 'prune'):
+            # no action given, let's use the default 'include'
+            words.insert(0, 'include')
+
+        action = words[0]
+        patterns = thedir = dir_pattern = None
+
+        if action in ('include', 'exclude',
+                      'global-include', 'global-exclude'):
+            if len(words) < 2:
+                raise DistlibException(
+                    '%r expects   ...' % action)
+
+            patterns = [convert_path(word) for word in words[1:]]
+
+        elif action in ('recursive-include', 'recursive-exclude'):
+            if len(words) < 3:
+                raise DistlibException(
+                    '%r expects    ...' % action)
+
+            thedir = convert_path(words[1])
+            patterns = [convert_path(word) for word in words[2:]]
+
+        elif action in ('graft', 'prune'):
+            if len(words) != 2:
+                raise DistlibException(
+                    '%r expects a single ' % action)
+
+            dir_pattern = convert_path(words[1])
+
+        else:
+            raise DistlibException('unknown action %r' % action)
+
+        return action, patterns, thedir, dir_pattern
+
+    def _include_pattern(self, pattern, anchor=True, prefix=None,
+                         is_regex=False):
+        """Select strings (presumably filenames) from 'self.files' that
+        match 'pattern', a Unix-style wildcard (glob) pattern.
+
+        Patterns are not quite the same as implemented by the 'fnmatch'
+        module: '*' and '?'  match non-special characters, where "special"
+        is platform-dependent: slash on Unix; colon, slash, and backslash on
+        DOS/Windows; and colon on Mac OS.
+
+        If 'anchor' is true (the default), then the pattern match is more
+        stringent: "*.py" will match "foo.py" but not "foo/bar.py".  If
+        'anchor' is false, both of these will match.
+
+        If 'prefix' is supplied, then only filenames starting with 'prefix'
+        (itself a pattern) and ending with 'pattern', with anything in between
+        them, will match.  'anchor' is ignored in this case.
+
+        If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
+        'pattern' is assumed to be either a string containing a regex or a
+        regex object -- no translation is done, the regex is just compiled
+        and used as-is.
+
+        Selected strings will be added to self.files.
+
+        Return True if files are found.
+        """
+        # XXX docstring lying about what the special chars are?
+        found = False
+        pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
+
+        # delayed loading of allfiles list
+        if self.allfiles is None:
+            self.findall()
+
+        for name in self.allfiles:
+            if pattern_re.search(name):
+                self.files.add(name)
+                found = True
+        return found
+
+    def _exclude_pattern(self, pattern, anchor=True, prefix=None,
+                         is_regex=False):
+        """Remove strings (presumably filenames) from 'files' that match
+        'pattern'.
+
+        Other parameters are the same as for 'include_pattern()', above.
+        The list 'self.files' is modified in place. Return True if files are
+        found.
+
+        This API is public to allow e.g. exclusion of SCM subdirs, e.g. when
+        packaging source distributions
+        """
+        found = False
+        pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
+        for f in list(self.files):
+            if pattern_re.search(f):
+                self.files.remove(f)
+                found = True
+        return found
+
+    def _translate_pattern(self, pattern, anchor=True, prefix=None,
+                           is_regex=False):
+        """Translate a shell-like wildcard pattern to a compiled regular
+        expression.
+
+        Return the compiled regex.  If 'is_regex' true,
+        then 'pattern' is directly compiled to a regex (if it's a string)
+        or just returned as-is (assumes it's a regex object).
+        """
+        if is_regex:
+            if isinstance(pattern, str):
+                return re.compile(pattern)
+            else:
+                return pattern
+
+        if _PYTHON_VERSION > (3, 2):
+            # ditch start and end characters
+            start, _, end = self._glob_to_re('_').partition('_')
+
+        if pattern:
+            pattern_re = self._glob_to_re(pattern)
+            if _PYTHON_VERSION > (3, 2):
+                assert pattern_re.startswith(start) and pattern_re.endswith(end)
+        else:
+            pattern_re = ''
+
+        base = re.escape(os.path.join(self.base, ''))
+        if prefix is not None:
+            # ditch end of pattern character
+            if _PYTHON_VERSION <= (3, 2):
+                empty_pattern = self._glob_to_re('')
+                prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)]
+            else:
+                prefix_re = self._glob_to_re(prefix)
+                assert prefix_re.startswith(start) and prefix_re.endswith(end)
+                prefix_re = prefix_re[len(start): len(prefix_re) - len(end)]
+            sep = os.sep
+            if os.sep == '\\':
+                sep = r'\\'
+            if _PYTHON_VERSION <= (3, 2):
+                pattern_re = '^' + base + sep.join((prefix_re,
+                                                    '.*' + pattern_re))
+            else:
+                pattern_re = pattern_re[len(start): len(pattern_re) - len(end)]
+                pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep,
+                                                  pattern_re, end)
+        else:  # no prefix -- respect anchor flag
+            if anchor:
+                if _PYTHON_VERSION <= (3, 2):
+                    pattern_re = '^' + base + pattern_re
+                else:
+                    pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):])
+
+        return re.compile(pattern_re)
+
+    def _glob_to_re(self, pattern):
+        """Translate a shell-like glob pattern to a regular expression.
+
+        Return a string containing the regex.  Differs from
+        'fnmatch.translate()' in that '*' does not match "special characters"
+        (which are platform-specific).
+        """
+        pattern_re = fnmatch.translate(pattern)
+
+        # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
+        # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
+        # and by extension they shouldn't match such "special characters" under
+        # any OS.  So change all non-escaped dots in the RE to match any
+        # character except the special characters (currently: just os.sep).
+        sep = os.sep
+        if os.sep == '\\':
+            # we're using a regex to manipulate a regex, so we need
+            # to escape the backslash twice
+            sep = r'\\\\'
+        escaped = r'\1[^%s]' % sep
+        pattern_re = re.sub(r'((? y,
+        '!=': lambda x, y: x != y,
+        '<': lambda x, y: x < y,
+        '<=': lambda x, y: x == y or x < y,
+        '>': lambda x, y: x > y,
+        '>=': lambda x, y: x == y or x > y,
+        'and': lambda x, y: x and y,
+        'or': lambda x, y: x or y,
+        'in': lambda x, y: x in y,
+        'not in': lambda x, y: x not in y,
+    }
+
+    def evaluate(self, expr, context):
+        """
+        Evaluate a marker expression returned by the :func:`parse_requirement`
+        function in the specified context.
+        """
+        if isinstance(expr, string_types):
+            if expr[0] in '\'"':
+                result = expr[1:-1]
+            else:
+                if expr not in context:
+                    raise SyntaxError('unknown variable: %s' % expr)
+                result = context[expr]
+        else:
+            assert isinstance(expr, dict)
+            op = expr['op']
+            if op not in self.operations:
+                raise NotImplementedError('op not implemented: %s' % op)
+            elhs = expr['lhs']
+            erhs = expr['rhs']
+            if _is_literal(expr['lhs']) and _is_literal(expr['rhs']):
+                raise SyntaxError('invalid comparison: %s %s %s' %
+                                  (elhs, op, erhs))
+
+            lhs = self.evaluate(elhs, context)
+            rhs = self.evaluate(erhs, context)
+            if ((_is_version_marker(elhs) or _is_version_marker(erhs))
+                    and op in ('<', '<=', '>', '>=', '===', '==', '!=', '~=')):
+                lhs = LV(lhs)
+                rhs = LV(rhs)
+            elif _is_version_marker(elhs) and op in ('in', 'not in'):
+                lhs = LV(lhs)
+                rhs = _get_versions(rhs)
+            result = self.operations[op](lhs, rhs)
+        return result
+
+
+_DIGITS = re.compile(r'\d+\.\d+')
+
+
+def default_context():
+
+    def format_full_version(info):
+        version = '%s.%s.%s' % (info.major, info.minor, info.micro)
+        kind = info.releaselevel
+        if kind != 'final':
+            version += kind[0] + str(info.serial)
+        return version
+
+    if hasattr(sys, 'implementation'):
+        implementation_version = format_full_version(
+            sys.implementation.version)
+        implementation_name = sys.implementation.name
+    else:
+        implementation_version = '0'
+        implementation_name = ''
+
+    ppv = platform.python_version()
+    m = _DIGITS.match(ppv)
+    pv = m.group(0)
+    result = {
+        'implementation_name': implementation_name,
+        'implementation_version': implementation_version,
+        'os_name': os.name,
+        'platform_machine': platform.machine(),
+        'platform_python_implementation': platform.python_implementation(),
+        'platform_release': platform.release(),
+        'platform_system': platform.system(),
+        'platform_version': platform.version(),
+        'platform_in_venv': str(in_venv()),
+        'python_full_version': ppv,
+        'python_version': pv,
+        'sys_platform': sys.platform,
+    }
+    return result
+
+
+DEFAULT_CONTEXT = default_context()
+del default_context
+
+evaluator = Evaluator()
+
+
+def interpret(marker, execution_context=None):
+    """
+    Interpret a marker and return a result depending on environment.
+
+    :param marker: The marker to interpret.
+    :type marker: str
+    :param execution_context: The context used for name lookup.
+    :type execution_context: mapping
+    """
+    try:
+        expr, rest = parse_marker(marker)
+    except Exception as e:
+        raise SyntaxError('Unable to interpret marker syntax: %s: %s' %
+                          (marker, e))
+    if rest and rest[0] != '#':
+        raise SyntaxError('unexpected trailing data in marker: %s: %s' %
+                          (marker, rest))
+    context = dict(DEFAULT_CONTEXT)
+    if execution_context:
+        context.update(execution_context)
+    return evaluator.evaluate(expr, context)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/metadata.py b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/metadata.py
new file mode 100644
index 0000000..7189aee
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/metadata.py
@@ -0,0 +1,1068 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""Implementation of the Metadata for Python packages PEPs.
+
+Supports all metadata formats (1.0, 1.1, 1.2, 1.3/2.1 and 2.2).
+"""
+from __future__ import unicode_literals
+
+import codecs
+from email import message_from_file
+import json
+import logging
+import re
+
+
+from . import DistlibException, __version__
+from .compat import StringIO, string_types, text_type
+from .markers import interpret
+from .util import extract_by_key, get_extras
+from .version import get_scheme, PEP440_VERSION_RE
+
+logger = logging.getLogger(__name__)
+
+
+class MetadataMissingError(DistlibException):
+    """A required metadata is missing"""
+
+
+class MetadataConflictError(DistlibException):
+    """Attempt to read or write metadata fields that are conflictual."""
+
+
+class MetadataUnrecognizedVersionError(DistlibException):
+    """Unknown metadata version number."""
+
+
+class MetadataInvalidError(DistlibException):
+    """A metadata value is invalid"""
+
+# public API of this module
+__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION']
+
+# Encoding used for the PKG-INFO files
+PKG_INFO_ENCODING = 'utf-8'
+
+# preferred version. Hopefully will be changed
+# to 1.2 once PEP 345 is supported everywhere
+PKG_INFO_PREFERRED_VERSION = '1.1'
+
+_LINE_PREFIX_1_2 = re.compile('\n       \\|')
+_LINE_PREFIX_PRE_1_2 = re.compile('\n        ')
+_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+               'Summary', 'Description',
+               'Keywords', 'Home-page', 'Author', 'Author-email',
+               'License')
+
+_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+               'Supported-Platform', 'Summary', 'Description',
+               'Keywords', 'Home-page', 'Author', 'Author-email',
+               'License', 'Classifier', 'Download-URL', 'Obsoletes',
+               'Provides', 'Requires')
+
+_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier',
+                'Download-URL')
+
+_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+               'Supported-Platform', 'Summary', 'Description',
+               'Keywords', 'Home-page', 'Author', 'Author-email',
+               'Maintainer', 'Maintainer-email', 'License',
+               'Classifier', 'Download-URL', 'Obsoletes-Dist',
+               'Project-URL', 'Provides-Dist', 'Requires-Dist',
+               'Requires-Python', 'Requires-External')
+
+_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python',
+                'Obsoletes-Dist', 'Requires-External', 'Maintainer',
+                'Maintainer-email', 'Project-URL')
+
+_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+               'Supported-Platform', 'Summary', 'Description',
+               'Keywords', 'Home-page', 'Author', 'Author-email',
+               'Maintainer', 'Maintainer-email', 'License',
+               'Classifier', 'Download-URL', 'Obsoletes-Dist',
+               'Project-URL', 'Provides-Dist', 'Requires-Dist',
+               'Requires-Python', 'Requires-External', 'Private-Version',
+               'Obsoleted-By', 'Setup-Requires-Dist', 'Extension',
+               'Provides-Extra')
+
+_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By',
+                'Setup-Requires-Dist', 'Extension')
+
+# See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in
+# the metadata. Include them in the tuple literal below to allow them
+# (for now).
+# Ditto for Obsoletes - see issue #140.
+_566_FIELDS = _426_FIELDS + ('Description-Content-Type',
+                             'Requires', 'Provides', 'Obsoletes')
+
+_566_MARKERS = ('Description-Content-Type',)
+
+_643_MARKERS = ('Dynamic', 'License-File')
+
+_643_FIELDS = _566_FIELDS + _643_MARKERS
+
+_ALL_FIELDS = set()
+_ALL_FIELDS.update(_241_FIELDS)
+_ALL_FIELDS.update(_314_FIELDS)
+_ALL_FIELDS.update(_345_FIELDS)
+_ALL_FIELDS.update(_426_FIELDS)
+_ALL_FIELDS.update(_566_FIELDS)
+_ALL_FIELDS.update(_643_FIELDS)
+
+EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''')
+
+
+def _version2fieldlist(version):
+    if version == '1.0':
+        return _241_FIELDS
+    elif version == '1.1':
+        return _314_FIELDS
+    elif version == '1.2':
+        return _345_FIELDS
+    elif version in ('1.3', '2.1'):
+        # avoid adding field names if already there
+        return _345_FIELDS + tuple(f for f in _566_FIELDS if f not in _345_FIELDS)
+    elif version == '2.0':
+        raise ValueError('Metadata 2.0 is withdrawn and not supported')
+        # return _426_FIELDS
+    elif version == '2.2':
+        return _643_FIELDS
+    raise MetadataUnrecognizedVersionError(version)
+
+
+def _best_version(fields):
+    """Detect the best version depending on the fields used."""
+    def _has_marker(keys, markers):
+        return any(marker in keys for marker in markers)
+
+    keys = [key for key, value in fields.items() if value not in ([], 'UNKNOWN', None)]
+    possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.1', '2.2']  # 2.0 removed
+
+    # first let's try to see if a field is not part of one of the version
+    for key in keys:
+        if key not in _241_FIELDS and '1.0' in possible_versions:
+            possible_versions.remove('1.0')
+            logger.debug('Removed 1.0 due to %s', key)
+        if key not in _314_FIELDS and '1.1' in possible_versions:
+            possible_versions.remove('1.1')
+            logger.debug('Removed 1.1 due to %s', key)
+        if key not in _345_FIELDS and '1.2' in possible_versions:
+            possible_versions.remove('1.2')
+            logger.debug('Removed 1.2 due to %s', key)
+        if key not in _566_FIELDS and '1.3' in possible_versions:
+            possible_versions.remove('1.3')
+            logger.debug('Removed 1.3 due to %s', key)
+        if key not in _566_FIELDS and '2.1' in possible_versions:
+            if key != 'Description':  # In 2.1, description allowed after headers
+                possible_versions.remove('2.1')
+                logger.debug('Removed 2.1 due to %s', key)
+        if key not in _643_FIELDS and '2.2' in possible_versions:
+            possible_versions.remove('2.2')
+            logger.debug('Removed 2.2 due to %s', key)
+        # if key not in _426_FIELDS and '2.0' in possible_versions:
+            # possible_versions.remove('2.0')
+            # logger.debug('Removed 2.0 due to %s', key)
+
+    # possible_version contains qualified versions
+    if len(possible_versions) == 1:
+        return possible_versions[0]   # found !
+    elif len(possible_versions) == 0:
+        logger.debug('Out of options - unknown metadata set: %s', fields)
+        raise MetadataConflictError('Unknown metadata set')
+
+    # let's see if one unique marker is found
+    is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS)
+    is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS)
+    is_2_1 = '2.1' in possible_versions and _has_marker(keys, _566_MARKERS)
+    # is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS)
+    is_2_2 = '2.2' in possible_versions and _has_marker(keys, _643_MARKERS)
+    if int(is_1_1) + int(is_1_2) + int(is_2_1) + int(is_2_2) > 1:
+        raise MetadataConflictError('You used incompatible 1.1/1.2/2.1/2.2 fields')
+
+    # we have the choice, 1.0, or 1.2, 2.1 or 2.2
+    #   - 1.0 has a broken Summary field but works with all tools
+    #   - 1.1 is to avoid
+    #   - 1.2 fixes Summary but has little adoption
+    #   - 2.1 adds more features
+    #   - 2.2 is the latest
+    if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_2:
+        # we couldn't find any specific marker
+        if PKG_INFO_PREFERRED_VERSION in possible_versions:
+            return PKG_INFO_PREFERRED_VERSION
+    if is_1_1:
+        return '1.1'
+    if is_1_2:
+        return '1.2'
+    if is_2_1:
+        return '2.1'
+    # if is_2_2:
+        # return '2.2'
+
+    return '2.2'
+
+# This follows the rules about transforming keys as described in
+# https://www.python.org/dev/peps/pep-0566/#id17
+_ATTR2FIELD = {
+    name.lower().replace("-", "_"): name for name in _ALL_FIELDS
+}
+_FIELD2ATTR = {field: attr for attr, field in _ATTR2FIELD.items()}
+
+_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist')
+_VERSIONS_FIELDS = ('Requires-Python',)
+_VERSION_FIELDS = ('Version',)
+_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes',
+               'Requires', 'Provides', 'Obsoletes-Dist',
+               'Provides-Dist', 'Requires-Dist', 'Requires-External',
+               'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist',
+               'Provides-Extra', 'Extension', 'License-File')
+_LISTTUPLEFIELDS = ('Project-URL',)
+
+_ELEMENTSFIELD = ('Keywords',)
+
+_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description')
+
+_MISSING = object()
+
+_FILESAFE = re.compile('[^A-Za-z0-9.]+')
+
+
+def _get_name_and_version(name, version, for_filename=False):
+    """Return the distribution name with version.
+
+    If for_filename is true, return a filename-escaped form."""
+    if for_filename:
+        # For both name and version any runs of non-alphanumeric or '.'
+        # characters are replaced with a single '-'.  Additionally any
+        # spaces in the version string become '.'
+        name = _FILESAFE.sub('-', name)
+        version = _FILESAFE.sub('-', version.replace(' ', '.'))
+    return '%s-%s' % (name, version)
+
+
+class LegacyMetadata(object):
+    """The legacy metadata of a release.
+
+    Supports versions 1.0, 1.1, 1.2, 2.0 and 1.3/2.1 (auto-detected). You can
+    instantiate the class with one of these arguments (or none):
+    - *path*, the path to a metadata file
+    - *fileobj* give a file-like object with metadata as content
+    - *mapping* is a dict-like object
+    - *scheme* is a version scheme name
+    """
+    # TODO document the mapping API and UNKNOWN default key
+
+    def __init__(self, path=None, fileobj=None, mapping=None,
+                 scheme='default'):
+        if [path, fileobj, mapping].count(None) < 2:
+            raise TypeError('path, fileobj and mapping are exclusive')
+        self._fields = {}
+        self.requires_files = []
+        self._dependencies = None
+        self.scheme = scheme
+        if path is not None:
+            self.read(path)
+        elif fileobj is not None:
+            self.read_file(fileobj)
+        elif mapping is not None:
+            self.update(mapping)
+            self.set_metadata_version()
+
+    def set_metadata_version(self):
+        self._fields['Metadata-Version'] = _best_version(self._fields)
+
+    def _write_field(self, fileobj, name, value):
+        fileobj.write('%s: %s\n' % (name, value))
+
+    def __getitem__(self, name):
+        return self.get(name)
+
+    def __setitem__(self, name, value):
+        return self.set(name, value)
+
+    def __delitem__(self, name):
+        field_name = self._convert_name(name)
+        try:
+            del self._fields[field_name]
+        except KeyError:
+            raise KeyError(name)
+
+    def __contains__(self, name):
+        return (name in self._fields or
+                self._convert_name(name) in self._fields)
+
+    def _convert_name(self, name):
+        if name in _ALL_FIELDS:
+            return name
+        name = name.replace('-', '_').lower()
+        return _ATTR2FIELD.get(name, name)
+
+    def _default_value(self, name):
+        if name in _LISTFIELDS or name in _ELEMENTSFIELD:
+            return []
+        return 'UNKNOWN'
+
+    def _remove_line_prefix(self, value):
+        if self.metadata_version in ('1.0', '1.1'):
+            return _LINE_PREFIX_PRE_1_2.sub('\n', value)
+        else:
+            return _LINE_PREFIX_1_2.sub('\n', value)
+
+    def __getattr__(self, name):
+        if name in _ATTR2FIELD:
+            return self[name]
+        raise AttributeError(name)
+
+    #
+    # Public API
+    #
+
+#    dependencies = property(_get_dependencies, _set_dependencies)
+
+    def get_fullname(self, filesafe=False):
+        """Return the distribution name with version.
+
+        If filesafe is true, return a filename-escaped form."""
+        return _get_name_and_version(self['Name'], self['Version'], filesafe)
+
+    def is_field(self, name):
+        """return True if name is a valid metadata key"""
+        name = self._convert_name(name)
+        return name in _ALL_FIELDS
+
+    def is_multi_field(self, name):
+        name = self._convert_name(name)
+        return name in _LISTFIELDS
+
+    def read(self, filepath):
+        """Read the metadata values from a file path."""
+        fp = codecs.open(filepath, 'r', encoding='utf-8')
+        try:
+            self.read_file(fp)
+        finally:
+            fp.close()
+
+    def read_file(self, fileob):
+        """Read the metadata values from a file object."""
+        msg = message_from_file(fileob)
+        self._fields['Metadata-Version'] = msg['metadata-version']
+
+        # When reading, get all the fields we can
+        for field in _ALL_FIELDS:
+            if field not in msg:
+                continue
+            if field in _LISTFIELDS:
+                # we can have multiple lines
+                values = msg.get_all(field)
+                if field in _LISTTUPLEFIELDS and values is not None:
+                    values = [tuple(value.split(',')) for value in values]
+                self.set(field, values)
+            else:
+                # single line
+                value = msg[field]
+                if value is not None and value != 'UNKNOWN':
+                    self.set(field, value)
+
+        # PEP 566 specifies that the body be used for the description, if
+        # available
+        body = msg.get_payload()
+        self["Description"] = body if body else self["Description"]
+        # logger.debug('Attempting to set metadata for %s', self)
+        # self.set_metadata_version()
+
+    def write(self, filepath, skip_unknown=False):
+        """Write the metadata fields to filepath."""
+        fp = codecs.open(filepath, 'w', encoding='utf-8')
+        try:
+            self.write_file(fp, skip_unknown)
+        finally:
+            fp.close()
+
+    def write_file(self, fileobject, skip_unknown=False):
+        """Write the PKG-INFO format data to a file object."""
+        self.set_metadata_version()
+
+        for field in _version2fieldlist(self['Metadata-Version']):
+            values = self.get(field)
+            if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']):
+                continue
+            if field in _ELEMENTSFIELD:
+                self._write_field(fileobject, field, ','.join(values))
+                continue
+            if field not in _LISTFIELDS:
+                if field == 'Description':
+                    if self.metadata_version in ('1.0', '1.1'):
+                        values = values.replace('\n', '\n        ')
+                    else:
+                        values = values.replace('\n', '\n       |')
+                values = [values]
+
+            if field in _LISTTUPLEFIELDS:
+                values = [','.join(value) for value in values]
+
+            for value in values:
+                self._write_field(fileobject, field, value)
+
+    def update(self, other=None, **kwargs):
+        """Set metadata values from the given iterable `other` and kwargs.
+
+        Behavior is like `dict.update`: If `other` has a ``keys`` method,
+        they are looped over and ``self[key]`` is assigned ``other[key]``.
+        Else, ``other`` is an iterable of ``(key, value)`` iterables.
+
+        Keys that don't match a metadata field or that have an empty value are
+        dropped.
+        """
+        def _set(key, value):
+            if key in _ATTR2FIELD and value:
+                self.set(self._convert_name(key), value)
+
+        if not other:
+            # other is None or empty container
+            pass
+        elif hasattr(other, 'keys'):
+            for k in other.keys():
+                _set(k, other[k])
+        else:
+            for k, v in other:
+                _set(k, v)
+
+        if kwargs:
+            for k, v in kwargs.items():
+                _set(k, v)
+
+    def set(self, name, value):
+        """Control then set a metadata field."""
+        name = self._convert_name(name)
+
+        if ((name in _ELEMENTSFIELD or name == 'Platform') and
+            not isinstance(value, (list, tuple))):
+            if isinstance(value, string_types):
+                value = [v.strip() for v in value.split(',')]
+            else:
+                value = []
+        elif (name in _LISTFIELDS and
+              not isinstance(value, (list, tuple))):
+            if isinstance(value, string_types):
+                value = [value]
+            else:
+                value = []
+
+        if logger.isEnabledFor(logging.WARNING):
+            project_name = self['Name']
+
+            scheme = get_scheme(self.scheme)
+            if name in _PREDICATE_FIELDS and value is not None:
+                for v in value:
+                    # check that the values are valid
+                    if not scheme.is_valid_matcher(v.split(';')[0]):
+                        logger.warning(
+                            "'%s': '%s' is not valid (field '%s')",
+                            project_name, v, name)
+            # FIXME this rejects UNKNOWN, is that right?
+            elif name in _VERSIONS_FIELDS and value is not None:
+                if not scheme.is_valid_constraint_list(value):
+                    logger.warning("'%s': '%s' is not a valid version (field '%s')",
+                                   project_name, value, name)
+            elif name in _VERSION_FIELDS and value is not None:
+                if not scheme.is_valid_version(value):
+                    logger.warning("'%s': '%s' is not a valid version (field '%s')",
+                                   project_name, value, name)
+
+        if name in _UNICODEFIELDS:
+            if name == 'Description':
+                value = self._remove_line_prefix(value)
+
+        self._fields[name] = value
+
+    def get(self, name, default=_MISSING):
+        """Get a metadata field."""
+        name = self._convert_name(name)
+        if name not in self._fields:
+            if default is _MISSING:
+                default = self._default_value(name)
+            return default
+        if name in _UNICODEFIELDS:
+            value = self._fields[name]
+            return value
+        elif name in _LISTFIELDS:
+            value = self._fields[name]
+            if value is None:
+                return []
+            res = []
+            for val in value:
+                if name not in _LISTTUPLEFIELDS:
+                    res.append(val)
+                else:
+                    # That's for Project-URL
+                    res.append((val[0], val[1]))
+            return res
+
+        elif name in _ELEMENTSFIELD:
+            value = self._fields[name]
+            if isinstance(value, string_types):
+                return value.split(',')
+        return self._fields[name]
+
+    def check(self, strict=False):
+        """Check if the metadata is compliant. If strict is True then raise if
+        no Name or Version are provided"""
+        self.set_metadata_version()
+
+        # XXX should check the versions (if the file was loaded)
+        missing, warnings = [], []
+
+        for attr in ('Name', 'Version'):  # required by PEP 345
+            if attr not in self:
+                missing.append(attr)
+
+        if strict and missing != []:
+            msg = 'missing required metadata: %s' % ', '.join(missing)
+            raise MetadataMissingError(msg)
+
+        for attr in ('Home-page', 'Author'):
+            if attr not in self:
+                missing.append(attr)
+
+        # checking metadata 1.2 (XXX needs to check 1.1, 1.0)
+        if self['Metadata-Version'] != '1.2':
+            return missing, warnings
+
+        scheme = get_scheme(self.scheme)
+
+        def are_valid_constraints(value):
+            for v in value:
+                if not scheme.is_valid_matcher(v.split(';')[0]):
+                    return False
+            return True
+
+        for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints),
+                                   (_VERSIONS_FIELDS,
+                                    scheme.is_valid_constraint_list),
+                                   (_VERSION_FIELDS,
+                                    scheme.is_valid_version)):
+            for field in fields:
+                value = self.get(field, None)
+                if value is not None and not controller(value):
+                    warnings.append("Wrong value for '%s': %s" % (field, value))
+
+        return missing, warnings
+
+    def todict(self, skip_missing=False):
+        """Return fields as a dict.
+
+        Field names will be converted to use the underscore-lowercase style
+        instead of hyphen-mixed case (i.e. home_page instead of Home-page).
+        This is as per https://www.python.org/dev/peps/pep-0566/#id17.
+        """
+        self.set_metadata_version()
+
+        fields = _version2fieldlist(self['Metadata-Version'])
+
+        data = {}
+
+        for field_name in fields:
+            if not skip_missing or field_name in self._fields:
+                key = _FIELD2ATTR[field_name]
+                if key != 'project_url':
+                    data[key] = self[field_name]
+                else:
+                    data[key] = [','.join(u) for u in self[field_name]]
+
+        return data
+
+    def add_requirements(self, requirements):
+        if self['Metadata-Version'] == '1.1':
+            # we can't have 1.1 metadata *and* Setuptools requires
+            for field in ('Obsoletes', 'Requires', 'Provides'):
+                if field in self:
+                    del self[field]
+        self['Requires-Dist'] += requirements
+
+    # Mapping API
+    # TODO could add iter* variants
+
+    def keys(self):
+        return list(_version2fieldlist(self['Metadata-Version']))
+
+    def __iter__(self):
+        for key in self.keys():
+            yield key
+
+    def values(self):
+        return [self[key] for key in self.keys()]
+
+    def items(self):
+        return [(key, self[key]) for key in self.keys()]
+
+    def __repr__(self):
+        return '<%s %s %s>' % (self.__class__.__name__, self.name,
+                               self.version)
+
+
+METADATA_FILENAME = 'pydist.json'
+WHEEL_METADATA_FILENAME = 'metadata.json'
+LEGACY_METADATA_FILENAME = 'METADATA'
+
+
+class Metadata(object):
+    """
+    The metadata of a release. This implementation uses 2.1
+    metadata where possible. If not possible, it wraps a LegacyMetadata
+    instance which handles the key-value metadata format.
+    """
+
+    METADATA_VERSION_MATCHER = re.compile(r'^\d+(\.\d+)*$')
+
+    NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I)
+
+    FIELDNAME_MATCHER = re.compile('^[A-Z]([0-9A-Z-]*[0-9A-Z])?$', re.I)
+
+    VERSION_MATCHER = PEP440_VERSION_RE
+
+    SUMMARY_MATCHER = re.compile('.{1,2047}')
+
+    METADATA_VERSION = '2.0'
+
+    GENERATOR = 'distlib (%s)' % __version__
+
+    MANDATORY_KEYS = {
+        'name': (),
+        'version': (),
+        'summary': ('legacy',),
+    }
+
+    INDEX_KEYS = ('name version license summary description author '
+                  'author_email keywords platform home_page classifiers '
+                  'download_url')
+
+    DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires '
+                       'dev_requires provides meta_requires obsoleted_by '
+                       'supports_environments')
+
+    SYNTAX_VALIDATORS = {
+        'metadata_version': (METADATA_VERSION_MATCHER, ()),
+        'name': (NAME_MATCHER, ('legacy',)),
+        'version': (VERSION_MATCHER, ('legacy',)),
+        'summary': (SUMMARY_MATCHER, ('legacy',)),
+        'dynamic': (FIELDNAME_MATCHER, ('legacy',)),
+    }
+
+    __slots__ = ('_legacy', '_data', 'scheme')
+
+    def __init__(self, path=None, fileobj=None, mapping=None,
+                 scheme='default'):
+        if [path, fileobj, mapping].count(None) < 2:
+            raise TypeError('path, fileobj and mapping are exclusive')
+        self._legacy = None
+        self._data = None
+        self.scheme = scheme
+        #import pdb; pdb.set_trace()
+        if mapping is not None:
+            try:
+                self._validate_mapping(mapping, scheme)
+                self._data = mapping
+            except MetadataUnrecognizedVersionError:
+                self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme)
+                self.validate()
+        else:
+            data = None
+            if path:
+                with open(path, 'rb') as f:
+                    data = f.read()
+            elif fileobj:
+                data = fileobj.read()
+            if data is None:
+                # Initialised with no args - to be added
+                self._data = {
+                    'metadata_version': self.METADATA_VERSION,
+                    'generator': self.GENERATOR,
+                }
+            else:
+                if not isinstance(data, text_type):
+                    data = data.decode('utf-8')
+                try:
+                    self._data = json.loads(data)
+                    self._validate_mapping(self._data, scheme)
+                except ValueError:
+                    # Note: MetadataUnrecognizedVersionError does not
+                    # inherit from ValueError (it's a DistlibException,
+                    # which should not inherit from ValueError).
+                    # The ValueError comes from the json.load - if that
+                    # succeeds and we get a validation error, we want
+                    # that to propagate
+                    self._legacy = LegacyMetadata(fileobj=StringIO(data),
+                                                  scheme=scheme)
+                    self.validate()
+
+    common_keys = set(('name', 'version', 'license', 'keywords', 'summary'))
+
+    none_list = (None, list)
+    none_dict = (None, dict)
+
+    mapped_keys = {
+        'run_requires': ('Requires-Dist', list),
+        'build_requires': ('Setup-Requires-Dist', list),
+        'dev_requires': none_list,
+        'test_requires': none_list,
+        'meta_requires': none_list,
+        'extras': ('Provides-Extra', list),
+        'modules': none_list,
+        'namespaces': none_list,
+        'exports': none_dict,
+        'commands': none_dict,
+        'classifiers': ('Classifier', list),
+        'source_url': ('Download-URL', None),
+        'metadata_version': ('Metadata-Version', None),
+    }
+
+    del none_list, none_dict
+
+    def __getattribute__(self, key):
+        common = object.__getattribute__(self, 'common_keys')
+        mapped = object.__getattribute__(self, 'mapped_keys')
+        if key in mapped:
+            lk, maker = mapped[key]
+            if self._legacy:
+                if lk is None:
+                    result = None if maker is None else maker()
+                else:
+                    result = self._legacy.get(lk)
+            else:
+                value = None if maker is None else maker()
+                if key not in ('commands', 'exports', 'modules', 'namespaces',
+                               'classifiers'):
+                    result = self._data.get(key, value)
+                else:
+                    # special cases for PEP 459
+                    sentinel = object()
+                    result = sentinel
+                    d = self._data.get('extensions')
+                    if d:
+                        if key == 'commands':
+                            result = d.get('python.commands', value)
+                        elif key == 'classifiers':
+                            d = d.get('python.details')
+                            if d:
+                                result = d.get(key, value)
+                        else:
+                            d = d.get('python.exports')
+                            if not d:
+                                d = self._data.get('python.exports')
+                            if d:
+                                result = d.get(key, value)
+                    if result is sentinel:
+                        result = value
+        elif key not in common:
+            result = object.__getattribute__(self, key)
+        elif self._legacy:
+            result = self._legacy.get(key)
+        else:
+            result = self._data.get(key)
+        return result
+
+    def _validate_value(self, key, value, scheme=None):
+        if key in self.SYNTAX_VALIDATORS:
+            pattern, exclusions = self.SYNTAX_VALIDATORS[key]
+            if (scheme or self.scheme) not in exclusions:
+                m = pattern.match(value)
+                if not m:
+                    raise MetadataInvalidError("'%s' is an invalid value for "
+                                               "the '%s' property" % (value,
+                                                                    key))
+
+    def __setattr__(self, key, value):
+        self._validate_value(key, value)
+        common = object.__getattribute__(self, 'common_keys')
+        mapped = object.__getattribute__(self, 'mapped_keys')
+        if key in mapped:
+            lk, _ = mapped[key]
+            if self._legacy:
+                if lk is None:
+                    raise NotImplementedError
+                self._legacy[lk] = value
+            elif key not in ('commands', 'exports', 'modules', 'namespaces',
+                             'classifiers'):
+                self._data[key] = value
+            else:
+                # special cases for PEP 459
+                d = self._data.setdefault('extensions', {})
+                if key == 'commands':
+                    d['python.commands'] = value
+                elif key == 'classifiers':
+                    d = d.setdefault('python.details', {})
+                    d[key] = value
+                else:
+                    d = d.setdefault('python.exports', {})
+                    d[key] = value
+        elif key not in common:
+            object.__setattr__(self, key, value)
+        else:
+            if key == 'keywords':
+                if isinstance(value, string_types):
+                    value = value.strip()
+                    if value:
+                        value = value.split()
+                    else:
+                        value = []
+            if self._legacy:
+                self._legacy[key] = value
+            else:
+                self._data[key] = value
+
+    @property
+    def name_and_version(self):
+        return _get_name_and_version(self.name, self.version, True)
+
+    @property
+    def provides(self):
+        if self._legacy:
+            result = self._legacy['Provides-Dist']
+        else:
+            result = self._data.setdefault('provides', [])
+        s = '%s (%s)' % (self.name, self.version)
+        if s not in result:
+            result.append(s)
+        return result
+
+    @provides.setter
+    def provides(self, value):
+        if self._legacy:
+            self._legacy['Provides-Dist'] = value
+        else:
+            self._data['provides'] = value
+
+    def get_requirements(self, reqts, extras=None, env=None):
+        """
+        Base method to get dependencies, given a set of extras
+        to satisfy and an optional environment context.
+        :param reqts: A list of sometimes-wanted dependencies,
+                      perhaps dependent on extras and environment.
+        :param extras: A list of optional components being requested.
+        :param env: An optional environment for marker evaluation.
+        """
+        if self._legacy:
+            result = reqts
+        else:
+            result = []
+            extras = get_extras(extras or [], self.extras)
+            for d in reqts:
+                if 'extra' not in d and 'environment' not in d:
+                    # unconditional
+                    include = True
+                else:
+                    if 'extra' not in d:
+                        # Not extra-dependent - only environment-dependent
+                        include = True
+                    else:
+                        include = d.get('extra') in extras
+                    if include:
+                        # Not excluded because of extras, check environment
+                        marker = d.get('environment')
+                        if marker:
+                            include = interpret(marker, env)
+                if include:
+                    result.extend(d['requires'])
+            for key in ('build', 'dev', 'test'):
+                e = ':%s:' % key
+                if e in extras:
+                    extras.remove(e)
+                    # A recursive call, but it should terminate since 'test'
+                    # has been removed from the extras
+                    reqts = self._data.get('%s_requires' % key, [])
+                    result.extend(self.get_requirements(reqts, extras=extras,
+                                                        env=env))
+        return result
+
+    @property
+    def dictionary(self):
+        if self._legacy:
+            return self._from_legacy()
+        return self._data
+
+    @property
+    def dependencies(self):
+        if self._legacy:
+            raise NotImplementedError
+        else:
+            return extract_by_key(self._data, self.DEPENDENCY_KEYS)
+
+    @dependencies.setter
+    def dependencies(self, value):
+        if self._legacy:
+            raise NotImplementedError
+        else:
+            self._data.update(value)
+
+    def _validate_mapping(self, mapping, scheme):
+        if mapping.get('metadata_version') != self.METADATA_VERSION:
+            raise MetadataUnrecognizedVersionError()
+        missing = []
+        for key, exclusions in self.MANDATORY_KEYS.items():
+            if key not in mapping:
+                if scheme not in exclusions:
+                    missing.append(key)
+        if missing:
+            msg = 'Missing metadata items: %s' % ', '.join(missing)
+            raise MetadataMissingError(msg)
+        for k, v in mapping.items():
+            self._validate_value(k, v, scheme)
+
+    def validate(self):
+        if self._legacy:
+            missing, warnings = self._legacy.check(True)
+            if missing or warnings:
+                logger.warning('Metadata: missing: %s, warnings: %s',
+                               missing, warnings)
+        else:
+            self._validate_mapping(self._data, self.scheme)
+
+    def todict(self):
+        if self._legacy:
+            return self._legacy.todict(True)
+        else:
+            result = extract_by_key(self._data, self.INDEX_KEYS)
+            return result
+
+    def _from_legacy(self):
+        assert self._legacy and not self._data
+        result = {
+            'metadata_version': self.METADATA_VERSION,
+            'generator': self.GENERATOR,
+        }
+        lmd = self._legacy.todict(True)     # skip missing ones
+        for k in ('name', 'version', 'license', 'summary', 'description',
+                  'classifier'):
+            if k in lmd:
+                if k == 'classifier':
+                    nk = 'classifiers'
+                else:
+                    nk = k
+                result[nk] = lmd[k]
+        kw = lmd.get('Keywords', [])
+        if kw == ['']:
+            kw = []
+        result['keywords'] = kw
+        keys = (('requires_dist', 'run_requires'),
+                ('setup_requires_dist', 'build_requires'))
+        for ok, nk in keys:
+            if ok in lmd and lmd[ok]:
+                result[nk] = [{'requires': lmd[ok]}]
+        result['provides'] = self.provides
+        author = {}
+        maintainer = {}
+        return result
+
+    LEGACY_MAPPING = {
+        'name': 'Name',
+        'version': 'Version',
+        ('extensions', 'python.details', 'license'): 'License',
+        'summary': 'Summary',
+        'description': 'Description',
+        ('extensions', 'python.project', 'project_urls', 'Home'): 'Home-page',
+        ('extensions', 'python.project', 'contacts', 0, 'name'): 'Author',
+        ('extensions', 'python.project', 'contacts', 0, 'email'): 'Author-email',
+        'source_url': 'Download-URL',
+        ('extensions', 'python.details', 'classifiers'): 'Classifier',
+    }
+
+    def _to_legacy(self):
+        def process_entries(entries):
+            reqts = set()
+            for e in entries:
+                extra = e.get('extra')
+                env = e.get('environment')
+                rlist = e['requires']
+                for r in rlist:
+                    if not env and not extra:
+                        reqts.add(r)
+                    else:
+                        marker = ''
+                        if extra:
+                            marker = 'extra == "%s"' % extra
+                        if env:
+                            if marker:
+                                marker = '(%s) and %s' % (env, marker)
+                            else:
+                                marker = env
+                        reqts.add(';'.join((r, marker)))
+            return reqts
+
+        assert self._data and not self._legacy
+        result = LegacyMetadata()
+        nmd = self._data
+        # import pdb; pdb.set_trace()
+        for nk, ok in self.LEGACY_MAPPING.items():
+            if not isinstance(nk, tuple):
+                if nk in nmd:
+                    result[ok] = nmd[nk]
+            else:
+                d = nmd
+                found = True
+                for k in nk:
+                    try:
+                        d = d[k]
+                    except (KeyError, IndexError):
+                        found = False
+                        break
+                if found:
+                    result[ok] = d
+        r1 = process_entries(self.run_requires + self.meta_requires)
+        r2 = process_entries(self.build_requires + self.dev_requires)
+        if self.extras:
+            result['Provides-Extra'] = sorted(self.extras)
+        result['Requires-Dist'] = sorted(r1)
+        result['Setup-Requires-Dist'] = sorted(r2)
+        # TODO: any other fields wanted
+        return result
+
+    def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True):
+        if [path, fileobj].count(None) != 1:
+            raise ValueError('Exactly one of path and fileobj is needed')
+        self.validate()
+        if legacy:
+            if self._legacy:
+                legacy_md = self._legacy
+            else:
+                legacy_md = self._to_legacy()
+            if path:
+                legacy_md.write(path, skip_unknown=skip_unknown)
+            else:
+                legacy_md.write_file(fileobj, skip_unknown=skip_unknown)
+        else:
+            if self._legacy:
+                d = self._from_legacy()
+            else:
+                d = self._data
+            if fileobj:
+                json.dump(d, fileobj, ensure_ascii=True, indent=2,
+                          sort_keys=True)
+            else:
+                with codecs.open(path, 'w', 'utf-8') as f:
+                    json.dump(d, f, ensure_ascii=True, indent=2,
+                              sort_keys=True)
+
+    def add_requirements(self, requirements):
+        if self._legacy:
+            self._legacy.add_requirements(requirements)
+        else:
+            run_requires = self._data.setdefault('run_requires', [])
+            always = None
+            for entry in run_requires:
+                if 'environment' not in entry and 'extra' not in entry:
+                    always = entry
+                    break
+            if always is None:
+                always = { 'requires': requirements }
+                run_requires.insert(0, always)
+            else:
+                rset = set(always['requires']) | set(requirements)
+                always['requires'] = sorted(rset)
+
+    def __repr__(self):
+        name = self.name or '(no name)'
+        version = self.version or 'no version'
+        return '<%s %s %s (%s)>' % (self.__class__.__name__,
+                                    self.metadata_version, name, version)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/resources.py b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/resources.py
new file mode 100644
index 0000000..fef52aa
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/resources.py
@@ -0,0 +1,358 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2013-2017 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+from __future__ import unicode_literals
+
+import bisect
+import io
+import logging
+import os
+import pkgutil
+import sys
+import types
+import zipimport
+
+from . import DistlibException
+from .util import cached_property, get_cache_base, Cache
+
+logger = logging.getLogger(__name__)
+
+
+cache = None    # created when needed
+
+
+class ResourceCache(Cache):
+    def __init__(self, base=None):
+        if base is None:
+            # Use native string to avoid issues on 2.x: see Python #20140.
+            base = os.path.join(get_cache_base(), str('resource-cache'))
+        super(ResourceCache, self).__init__(base)
+
+    def is_stale(self, resource, path):
+        """
+        Is the cache stale for the given resource?
+
+        :param resource: The :class:`Resource` being cached.
+        :param path: The path of the resource in the cache.
+        :return: True if the cache is stale.
+        """
+        # Cache invalidation is a hard problem :-)
+        return True
+
+    def get(self, resource):
+        """
+        Get a resource into the cache,
+
+        :param resource: A :class:`Resource` instance.
+        :return: The pathname of the resource in the cache.
+        """
+        prefix, path = resource.finder.get_cache_info(resource)
+        if prefix is None:
+            result = path
+        else:
+            result = os.path.join(self.base, self.prefix_to_dir(prefix), path)
+            dirname = os.path.dirname(result)
+            if not os.path.isdir(dirname):
+                os.makedirs(dirname)
+            if not os.path.exists(result):
+                stale = True
+            else:
+                stale = self.is_stale(resource, path)
+            if stale:
+                # write the bytes of the resource to the cache location
+                with open(result, 'wb') as f:
+                    f.write(resource.bytes)
+        return result
+
+
+class ResourceBase(object):
+    def __init__(self, finder, name):
+        self.finder = finder
+        self.name = name
+
+
+class Resource(ResourceBase):
+    """
+    A class representing an in-package resource, such as a data file. This is
+    not normally instantiated by user code, but rather by a
+    :class:`ResourceFinder` which manages the resource.
+    """
+    is_container = False        # Backwards compatibility
+
+    def as_stream(self):
+        """
+        Get the resource as a stream.
+
+        This is not a property to make it obvious that it returns a new stream
+        each time.
+        """
+        return self.finder.get_stream(self)
+
+    @cached_property
+    def file_path(self):
+        global cache
+        if cache is None:
+            cache = ResourceCache()
+        return cache.get(self)
+
+    @cached_property
+    def bytes(self):
+        return self.finder.get_bytes(self)
+
+    @cached_property
+    def size(self):
+        return self.finder.get_size(self)
+
+
+class ResourceContainer(ResourceBase):
+    is_container = True     # Backwards compatibility
+
+    @cached_property
+    def resources(self):
+        return self.finder.get_resources(self)
+
+
+class ResourceFinder(object):
+    """
+    Resource finder for file system resources.
+    """
+
+    if sys.platform.startswith('java'):
+        skipped_extensions = ('.pyc', '.pyo', '.class')
+    else:
+        skipped_extensions = ('.pyc', '.pyo')
+
+    def __init__(self, module):
+        self.module = module
+        self.loader = getattr(module, '__loader__', None)
+        self.base = os.path.dirname(getattr(module, '__file__', ''))
+
+    def _adjust_path(self, path):
+        return os.path.realpath(path)
+
+    def _make_path(self, resource_name):
+        # Issue #50: need to preserve type of path on Python 2.x
+        # like os.path._get_sep
+        if isinstance(resource_name, bytes):    # should only happen on 2.x
+            sep = b'/'
+        else:
+            sep = '/'
+        parts = resource_name.split(sep)
+        parts.insert(0, self.base)
+        result = os.path.join(*parts)
+        return self._adjust_path(result)
+
+    def _find(self, path):
+        return os.path.exists(path)
+
+    def get_cache_info(self, resource):
+        return None, resource.path
+
+    def find(self, resource_name):
+        path = self._make_path(resource_name)
+        if not self._find(path):
+            result = None
+        else:
+            if self._is_directory(path):
+                result = ResourceContainer(self, resource_name)
+            else:
+                result = Resource(self, resource_name)
+            result.path = path
+        return result
+
+    def get_stream(self, resource):
+        return open(resource.path, 'rb')
+
+    def get_bytes(self, resource):
+        with open(resource.path, 'rb') as f:
+            return f.read()
+
+    def get_size(self, resource):
+        return os.path.getsize(resource.path)
+
+    def get_resources(self, resource):
+        def allowed(f):
+            return (f != '__pycache__' and not
+                    f.endswith(self.skipped_extensions))
+        return set([f for f in os.listdir(resource.path) if allowed(f)])
+
+    def is_container(self, resource):
+        return self._is_directory(resource.path)
+
+    _is_directory = staticmethod(os.path.isdir)
+
+    def iterator(self, resource_name):
+        resource = self.find(resource_name)
+        if resource is not None:
+            todo = [resource]
+            while todo:
+                resource = todo.pop(0)
+                yield resource
+                if resource.is_container:
+                    rname = resource.name
+                    for name in resource.resources:
+                        if not rname:
+                            new_name = name
+                        else:
+                            new_name = '/'.join([rname, name])
+                        child = self.find(new_name)
+                        if child.is_container:
+                            todo.append(child)
+                        else:
+                            yield child
+
+
+class ZipResourceFinder(ResourceFinder):
+    """
+    Resource finder for resources in .zip files.
+    """
+    def __init__(self, module):
+        super(ZipResourceFinder, self).__init__(module)
+        archive = self.loader.archive
+        self.prefix_len = 1 + len(archive)
+        # PyPy doesn't have a _files attr on zipimporter, and you can't set one
+        if hasattr(self.loader, '_files'):
+            self._files = self.loader._files
+        else:
+            self._files = zipimport._zip_directory_cache[archive]
+        self.index = sorted(self._files)
+
+    def _adjust_path(self, path):
+        return path
+
+    def _find(self, path):
+        path = path[self.prefix_len:]
+        if path in self._files:
+            result = True
+        else:
+            if path and path[-1] != os.sep:
+                path = path + os.sep
+            i = bisect.bisect(self.index, path)
+            try:
+                result = self.index[i].startswith(path)
+            except IndexError:
+                result = False
+        if not result:
+            logger.debug('_find failed: %r %r', path, self.loader.prefix)
+        else:
+            logger.debug('_find worked: %r %r', path, self.loader.prefix)
+        return result
+
+    def get_cache_info(self, resource):
+        prefix = self.loader.archive
+        path = resource.path[1 + len(prefix):]
+        return prefix, path
+
+    def get_bytes(self, resource):
+        return self.loader.get_data(resource.path)
+
+    def get_stream(self, resource):
+        return io.BytesIO(self.get_bytes(resource))
+
+    def get_size(self, resource):
+        path = resource.path[self.prefix_len:]
+        return self._files[path][3]
+
+    def get_resources(self, resource):
+        path = resource.path[self.prefix_len:]
+        if path and path[-1] != os.sep:
+            path += os.sep
+        plen = len(path)
+        result = set()
+        i = bisect.bisect(self.index, path)
+        while i < len(self.index):
+            if not self.index[i].startswith(path):
+                break
+            s = self.index[i][plen:]
+            result.add(s.split(os.sep, 1)[0])   # only immediate children
+            i += 1
+        return result
+
+    def _is_directory(self, path):
+        path = path[self.prefix_len:]
+        if path and path[-1] != os.sep:
+            path += os.sep
+        i = bisect.bisect(self.index, path)
+        try:
+            result = self.index[i].startswith(path)
+        except IndexError:
+            result = False
+        return result
+
+
+_finder_registry = {
+    type(None): ResourceFinder,
+    zipimport.zipimporter: ZipResourceFinder
+}
+
+try:
+    # In Python 3.6, _frozen_importlib -> _frozen_importlib_external
+    try:
+        import _frozen_importlib_external as _fi
+    except ImportError:
+        import _frozen_importlib as _fi
+    _finder_registry[_fi.SourceFileLoader] = ResourceFinder
+    _finder_registry[_fi.FileFinder] = ResourceFinder
+    # See issue #146
+    _finder_registry[_fi.SourcelessFileLoader] = ResourceFinder
+    del _fi
+except (ImportError, AttributeError):
+    pass
+
+
+def register_finder(loader, finder_maker):
+    _finder_registry[type(loader)] = finder_maker
+
+
+_finder_cache = {}
+
+
+def finder(package):
+    """
+    Return a resource finder for a package.
+    :param package: The name of the package.
+    :return: A :class:`ResourceFinder` instance for the package.
+    """
+    if package in _finder_cache:
+        result = _finder_cache[package]
+    else:
+        if package not in sys.modules:
+            __import__(package)
+        module = sys.modules[package]
+        path = getattr(module, '__path__', None)
+        if path is None:
+            raise DistlibException('You cannot get a finder for a module, '
+                                   'only for a package')
+        loader = getattr(module, '__loader__', None)
+        finder_maker = _finder_registry.get(type(loader))
+        if finder_maker is None:
+            raise DistlibException('Unable to locate finder for %r' % package)
+        result = finder_maker(module)
+        _finder_cache[package] = result
+    return result
+
+
+_dummy_module = types.ModuleType(str('__dummy__'))
+
+
+def finder_for_path(path):
+    """
+    Return a resource finder for a path, which should represent a container.
+
+    :param path: The path.
+    :return: A :class:`ResourceFinder` instance for the path.
+    """
+    result = None
+    # calls any path hooks, gets importer into cache
+    pkgutil.get_importer(path)
+    loader = sys.path_importer_cache.get(path)
+    finder = _finder_registry.get(type(loader))
+    if finder:
+        module = _dummy_module
+        module.__file__ = os.path.join(path, '')
+        module.__loader__ = loader
+        result = finder(module)
+    return result
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/scripts.py b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/scripts.py
new file mode 100644
index 0000000..cfa45d2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/scripts.py
@@ -0,0 +1,452 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2013-2023 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+from io import BytesIO
+import logging
+import os
+import re
+import struct
+import sys
+import time
+from zipfile import ZipInfo
+
+from .compat import sysconfig, detect_encoding, ZipFile
+from .resources import finder
+from .util import (FileOperator, get_export_entry, convert_path,
+                   get_executable, get_platform, in_venv)
+
+logger = logging.getLogger(__name__)
+
+_DEFAULT_MANIFEST = '''
+
+
+ 
+
+ 
+ 
+ 
+ 
+ 
+ 
+ 
+ 
+'''.strip()
+
+# check if Python is called on the first line with this expression
+FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
+SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*-
+import re
+import sys
+from %(module)s import %(import_name)s
+if __name__ == '__main__':
+    sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+    sys.exit(%(func)s())
+'''
+
+
+def enquote_executable(executable):
+    if ' ' in executable:
+        # make sure we quote only the executable in case of env
+        # for example /usr/bin/env "/dir with spaces/bin/jython"
+        # instead of "/usr/bin/env /dir with spaces/bin/jython"
+        # otherwise whole
+        if executable.startswith('/usr/bin/env '):
+            env, _executable = executable.split(' ', 1)
+            if ' ' in _executable and not _executable.startswith('"'):
+                executable = '%s "%s"' % (env, _executable)
+        else:
+            if not executable.startswith('"'):
+                executable = '"%s"' % executable
+    return executable
+
+
+# Keep the old name around (for now), as there is at least one project using it!
+_enquote_executable = enquote_executable
+
+
+class ScriptMaker(object):
+    """
+    A class to copy or create scripts from source scripts or callable
+    specifications.
+    """
+    script_template = SCRIPT_TEMPLATE
+
+    executable = None  # for shebangs
+
+    def __init__(self,
+                 source_dir,
+                 target_dir,
+                 add_launchers=True,
+                 dry_run=False,
+                 fileop=None):
+        self.source_dir = source_dir
+        self.target_dir = target_dir
+        self.add_launchers = add_launchers
+        self.force = False
+        self.clobber = False
+        # It only makes sense to set mode bits on POSIX.
+        self.set_mode = (os.name == 'posix') or (os.name == 'java'
+                                                 and os._name == 'posix')
+        self.variants = set(('', 'X.Y'))
+        self._fileop = fileop or FileOperator(dry_run)
+
+        self._is_nt = os.name == 'nt' or (os.name == 'java'
+                                          and os._name == 'nt')
+        self.version_info = sys.version_info
+
+    def _get_alternate_executable(self, executable, options):
+        if options.get('gui', False) and self._is_nt:  # pragma: no cover
+            dn, fn = os.path.split(executable)
+            fn = fn.replace('python', 'pythonw')
+            executable = os.path.join(dn, fn)
+        return executable
+
+    if sys.platform.startswith('java'):  # pragma: no cover
+
+        def _is_shell(self, executable):
+            """
+            Determine if the specified executable is a script
+            (contains a #! line)
+            """
+            try:
+                with open(executable) as fp:
+                    return fp.read(2) == '#!'
+            except (OSError, IOError):
+                logger.warning('Failed to open %s', executable)
+                return False
+
+        def _fix_jython_executable(self, executable):
+            if self._is_shell(executable):
+                # Workaround for Jython is not needed on Linux systems.
+                import java
+
+                if java.lang.System.getProperty('os.name') == 'Linux':
+                    return executable
+            elif executable.lower().endswith('jython.exe'):
+                # Use wrapper exe for Jython on Windows
+                return executable
+            return '/usr/bin/env %s' % executable
+
+    def _build_shebang(self, executable, post_interp):
+        """
+        Build a shebang line. In the simple case (on Windows, or a shebang line
+        which is not too long or contains spaces) use a simple formulation for
+        the shebang. Otherwise, use /bin/sh as the executable, with a contrived
+        shebang which allows the script to run either under Python or sh, using
+        suitable quoting. Thanks to Harald Nordgren for his input.
+
+        See also: http://www.in-ulm.de/~mascheck/various/shebang/#length
+                  https://hg.mozilla.org/mozilla-central/file/tip/mach
+        """
+        if os.name != 'posix':
+            simple_shebang = True
+        else:
+            # Add 3 for '#!' prefix and newline suffix.
+            shebang_length = len(executable) + len(post_interp) + 3
+            if sys.platform == 'darwin':
+                max_shebang_length = 512
+            else:
+                max_shebang_length = 127
+            simple_shebang = ((b' ' not in executable)
+                              and (shebang_length <= max_shebang_length))
+
+        if simple_shebang:
+            result = b'#!' + executable + post_interp + b'\n'
+        else:
+            result = b'#!/bin/sh\n'
+            result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n'
+            result += b"' '''"
+        return result
+
+    def _get_shebang(self, encoding, post_interp=b'', options=None):
+        enquote = True
+        if self.executable:
+            executable = self.executable
+            enquote = False  # assume this will be taken care of
+        elif not sysconfig.is_python_build():
+            executable = get_executable()
+        elif in_venv():  # pragma: no cover
+            executable = os.path.join(
+                sysconfig.get_path('scripts'),
+                'python%s' % sysconfig.get_config_var('EXE'))
+        else:  # pragma: no cover
+            if os.name == 'nt':
+                # for Python builds from source on Windows, no Python executables with
+                # a version suffix are created, so we use python.exe
+                executable = os.path.join(
+                    sysconfig.get_config_var('BINDIR'),
+                    'python%s' % (sysconfig.get_config_var('EXE')))
+            else:
+                executable = os.path.join(
+                    sysconfig.get_config_var('BINDIR'),
+                    'python%s%s' % (sysconfig.get_config_var('VERSION'),
+                                    sysconfig.get_config_var('EXE')))
+        if options:
+            executable = self._get_alternate_executable(executable, options)
+
+        if sys.platform.startswith('java'):  # pragma: no cover
+            executable = self._fix_jython_executable(executable)
+
+        # Normalise case for Windows - COMMENTED OUT
+        # executable = os.path.normcase(executable)
+        # N.B. The normalising operation above has been commented out: See
+        # issue #124. Although paths in Windows are generally case-insensitive,
+        # they aren't always. For example, a path containing a ẞ (which is a
+        # LATIN CAPITAL LETTER SHARP S - U+1E9E) is normcased to ß (which is a
+        # LATIN SMALL LETTER SHARP S' - U+00DF). The two are not considered by
+        # Windows as equivalent in path names.
+
+        # If the user didn't specify an executable, it may be necessary to
+        # cater for executable paths with spaces (not uncommon on Windows)
+        if enquote:
+            executable = enquote_executable(executable)
+        # Issue #51: don't use fsencode, since we later try to
+        # check that the shebang is decodable using utf-8.
+        executable = executable.encode('utf-8')
+        # in case of IronPython, play safe and enable frames support
+        if (sys.platform == 'cli' and '-X:Frames' not in post_interp
+                and '-X:FullFrames' not in post_interp):  # pragma: no cover
+            post_interp += b' -X:Frames'
+        shebang = self._build_shebang(executable, post_interp)
+        # Python parser starts to read a script using UTF-8 until
+        # it gets a #coding:xxx cookie. The shebang has to be the
+        # first line of a file, the #coding:xxx cookie cannot be
+        # written before. So the shebang has to be decodable from
+        # UTF-8.
+        try:
+            shebang.decode('utf-8')
+        except UnicodeDecodeError:  # pragma: no cover
+            raise ValueError('The shebang (%r) is not decodable from utf-8' %
+                             shebang)
+        # If the script is encoded to a custom encoding (use a
+        # #coding:xxx cookie), the shebang has to be decodable from
+        # the script encoding too.
+        if encoding != 'utf-8':
+            try:
+                shebang.decode(encoding)
+            except UnicodeDecodeError:  # pragma: no cover
+                raise ValueError('The shebang (%r) is not decodable '
+                                 'from the script encoding (%r)' %
+                                 (shebang, encoding))
+        return shebang
+
+    def _get_script_text(self, entry):
+        return self.script_template % dict(
+            module=entry.prefix,
+            import_name=entry.suffix.split('.')[0],
+            func=entry.suffix)
+
+    manifest = _DEFAULT_MANIFEST
+
+    def get_manifest(self, exename):
+        base = os.path.basename(exename)
+        return self.manifest % base
+
+    def _write_script(self, names, shebang, script_bytes, filenames, ext):
+        use_launcher = self.add_launchers and self._is_nt
+        linesep = os.linesep.encode('utf-8')
+        if not shebang.endswith(linesep):
+            shebang += linesep
+        if not use_launcher:
+            script_bytes = shebang + script_bytes
+        else:  # pragma: no cover
+            if ext == 'py':
+                launcher = self._get_launcher('t')
+            else:
+                launcher = self._get_launcher('w')
+            stream = BytesIO()
+            with ZipFile(stream, 'w') as zf:
+                source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH')
+                if source_date_epoch:
+                    date_time = time.gmtime(int(source_date_epoch))[:6]
+                    zinfo = ZipInfo(filename='__main__.py',
+                                    date_time=date_time)
+                    zf.writestr(zinfo, script_bytes)
+                else:
+                    zf.writestr('__main__.py', script_bytes)
+            zip_data = stream.getvalue()
+            script_bytes = launcher + shebang + zip_data
+        for name in names:
+            outname = os.path.join(self.target_dir, name)
+            if use_launcher:  # pragma: no cover
+                n, e = os.path.splitext(outname)
+                if e.startswith('.py'):
+                    outname = n
+                outname = '%s.exe' % outname
+                try:
+                    self._fileop.write_binary_file(outname, script_bytes)
+                except Exception:
+                    # Failed writing an executable - it might be in use.
+                    logger.warning('Failed to write executable - trying to '
+                                   'use .deleteme logic')
+                    dfname = '%s.deleteme' % outname
+                    if os.path.exists(dfname):
+                        os.remove(dfname)  # Not allowed to fail here
+                    os.rename(outname, dfname)  # nor here
+                    self._fileop.write_binary_file(outname, script_bytes)
+                    logger.debug('Able to replace executable using '
+                                 '.deleteme logic')
+                    try:
+                        os.remove(dfname)
+                    except Exception:
+                        pass  # still in use - ignore error
+            else:
+                if self._is_nt and not outname.endswith(
+                        '.' + ext):  # pragma: no cover
+                    outname = '%s.%s' % (outname, ext)
+                if os.path.exists(outname) and not self.clobber:
+                    logger.warning('Skipping existing file %s', outname)
+                    continue
+                self._fileop.write_binary_file(outname, script_bytes)
+                if self.set_mode:
+                    self._fileop.set_executable_mode([outname])
+            filenames.append(outname)
+
+    variant_separator = '-'
+
+    def get_script_filenames(self, name):
+        result = set()
+        if '' in self.variants:
+            result.add(name)
+        if 'X' in self.variants:
+            result.add('%s%s' % (name, self.version_info[0]))
+        if 'X.Y' in self.variants:
+            result.add('%s%s%s.%s' %
+                       (name, self.variant_separator, self.version_info[0],
+                        self.version_info[1]))
+        return result
+
+    def _make_script(self, entry, filenames, options=None):
+        post_interp = b''
+        if options:
+            args = options.get('interpreter_args', [])
+            if args:
+                args = ' %s' % ' '.join(args)
+                post_interp = args.encode('utf-8')
+        shebang = self._get_shebang('utf-8', post_interp, options=options)
+        script = self._get_script_text(entry).encode('utf-8')
+        scriptnames = self.get_script_filenames(entry.name)
+        if options and options.get('gui', False):
+            ext = 'pyw'
+        else:
+            ext = 'py'
+        self._write_script(scriptnames, shebang, script, filenames, ext)
+
+    def _copy_script(self, script, filenames):
+        adjust = False
+        script = os.path.join(self.source_dir, convert_path(script))
+        outname = os.path.join(self.target_dir, os.path.basename(script))
+        if not self.force and not self._fileop.newer(script, outname):
+            logger.debug('not copying %s (up-to-date)', script)
+            return
+
+        # Always open the file, but ignore failures in dry-run mode --
+        # that way, we'll get accurate feedback if we can read the
+        # script.
+        try:
+            f = open(script, 'rb')
+        except IOError:  # pragma: no cover
+            if not self.dry_run:
+                raise
+            f = None
+        else:
+            first_line = f.readline()
+            if not first_line:  # pragma: no cover
+                logger.warning('%s is an empty file (skipping)', script)
+                return
+
+            match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))
+            if match:
+                adjust = True
+                post_interp = match.group(1) or b''
+
+        if not adjust:
+            if f:
+                f.close()
+            self._fileop.copy_file(script, outname)
+            if self.set_mode:
+                self._fileop.set_executable_mode([outname])
+            filenames.append(outname)
+        else:
+            logger.info('copying and adjusting %s -> %s', script,
+                        self.target_dir)
+            if not self._fileop.dry_run:
+                encoding, lines = detect_encoding(f.readline)
+                f.seek(0)
+                shebang = self._get_shebang(encoding, post_interp)
+                if b'pythonw' in first_line:  # pragma: no cover
+                    ext = 'pyw'
+                else:
+                    ext = 'py'
+                n = os.path.basename(outname)
+                self._write_script([n], shebang, f.read(), filenames, ext)
+            if f:
+                f.close()
+
+    @property
+    def dry_run(self):
+        return self._fileop.dry_run
+
+    @dry_run.setter
+    def dry_run(self, value):
+        self._fileop.dry_run = value
+
+    if os.name == 'nt' or (os.name == 'java'
+                           and os._name == 'nt'):  # pragma: no cover
+        # Executable launcher support.
+        # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/
+
+        def _get_launcher(self, kind):
+            if struct.calcsize('P') == 8:  # 64-bit
+                bits = '64'
+            else:
+                bits = '32'
+            platform_suffix = '-arm' if get_platform() == 'win-arm64' else ''
+            name = '%s%s%s.exe' % (kind, bits, platform_suffix)
+            # Issue 31: don't hardcode an absolute package name, but
+            # determine it relative to the current package
+            distlib_package = __name__.rsplit('.', 1)[0]
+            resource = finder(distlib_package).find(name)
+            if not resource:
+                msg = ('Unable to find resource %s in package %s' %
+                       (name, distlib_package))
+                raise ValueError(msg)
+            return resource.bytes
+
+    # Public API follows
+
+    def make(self, specification, options=None):
+        """
+        Make a script.
+
+        :param specification: The specification, which is either a valid export
+                              entry specification (to make a script from a
+                              callable) or a filename (to make a script by
+                              copying from a source location).
+        :param options: A dictionary of options controlling script generation.
+        :return: A list of all absolute pathnames written to.
+        """
+        filenames = []
+        entry = get_export_entry(specification)
+        if entry is None:
+            self._copy_script(specification, filenames)
+        else:
+            self._make_script(entry, filenames, options=options)
+        return filenames
+
+    def make_multiple(self, specifications, options=None):
+        """
+        Take a list of specifications and make scripts from them,
+        :param specifications: A list of specifications.
+        :return: A list of all absolute pathnames written to,
+        """
+        filenames = []
+        for specification in specifications:
+            filenames.extend(self.make(specification, options))
+        return filenames
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/util.py b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/util.py
new file mode 100644
index 0000000..ba58858
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/util.py
@@ -0,0 +1,2025 @@
+#
+# Copyright (C) 2012-2023 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+import codecs
+from collections import deque
+import contextlib
+import csv
+from glob import iglob as std_iglob
+import io
+import json
+import logging
+import os
+import py_compile
+import re
+import socket
+try:
+    import ssl
+except ImportError:  # pragma: no cover
+    ssl = None
+import subprocess
+import sys
+import tarfile
+import tempfile
+import textwrap
+
+try:
+    import threading
+except ImportError:  # pragma: no cover
+    import dummy_threading as threading
+import time
+
+from . import DistlibException
+from .compat import (string_types, text_type, shutil, raw_input, StringIO,
+                     cache_from_source, urlopen, urljoin, httplib, xmlrpclib,
+                     HTTPHandler, BaseConfigurator, valid_ident,
+                     Container, configparser, URLError, ZipFile, fsdecode,
+                     unquote, urlparse)
+
+logger = logging.getLogger(__name__)
+
+#
+# Requirement parsing code as per PEP 508
+#
+
+IDENTIFIER = re.compile(r'^([\w\.-]+)\s*')
+VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*')
+COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*')
+MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*')
+OR = re.compile(r'^or\b\s*')
+AND = re.compile(r'^and\b\s*')
+NON_SPACE = re.compile(r'(\S+)\s*')
+STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)')
+
+
+def parse_marker(marker_string):
+    """
+    Parse a marker string and return a dictionary containing a marker expression.
+
+    The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in
+    the expression grammar, or strings. A string contained in quotes is to be
+    interpreted as a literal string, and a string not contained in quotes is a
+    variable (such as os_name).
+    """
+
+    def marker_var(remaining):
+        # either identifier, or literal string
+        m = IDENTIFIER.match(remaining)
+        if m:
+            result = m.groups()[0]
+            remaining = remaining[m.end():]
+        elif not remaining:
+            raise SyntaxError('unexpected end of input')
+        else:
+            q = remaining[0]
+            if q not in '\'"':
+                raise SyntaxError('invalid expression: %s' % remaining)
+            oq = '\'"'.replace(q, '')
+            remaining = remaining[1:]
+            parts = [q]
+            while remaining:
+                # either a string chunk, or oq, or q to terminate
+                if remaining[0] == q:
+                    break
+                elif remaining[0] == oq:
+                    parts.append(oq)
+                    remaining = remaining[1:]
+                else:
+                    m = STRING_CHUNK.match(remaining)
+                    if not m:
+                        raise SyntaxError('error in string literal: %s' %
+                                          remaining)
+                    parts.append(m.groups()[0])
+                    remaining = remaining[m.end():]
+            else:
+                s = ''.join(parts)
+                raise SyntaxError('unterminated string: %s' % s)
+            parts.append(q)
+            result = ''.join(parts)
+            remaining = remaining[1:].lstrip()  # skip past closing quote
+        return result, remaining
+
+    def marker_expr(remaining):
+        if remaining and remaining[0] == '(':
+            result, remaining = marker(remaining[1:].lstrip())
+            if remaining[0] != ')':
+                raise SyntaxError('unterminated parenthesis: %s' % remaining)
+            remaining = remaining[1:].lstrip()
+        else:
+            lhs, remaining = marker_var(remaining)
+            while remaining:
+                m = MARKER_OP.match(remaining)
+                if not m:
+                    break
+                op = m.groups()[0]
+                remaining = remaining[m.end():]
+                rhs, remaining = marker_var(remaining)
+                lhs = {'op': op, 'lhs': lhs, 'rhs': rhs}
+            result = lhs
+        return result, remaining
+
+    def marker_and(remaining):
+        lhs, remaining = marker_expr(remaining)
+        while remaining:
+            m = AND.match(remaining)
+            if not m:
+                break
+            remaining = remaining[m.end():]
+            rhs, remaining = marker_expr(remaining)
+            lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs}
+        return lhs, remaining
+
+    def marker(remaining):
+        lhs, remaining = marker_and(remaining)
+        while remaining:
+            m = OR.match(remaining)
+            if not m:
+                break
+            remaining = remaining[m.end():]
+            rhs, remaining = marker_and(remaining)
+            lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs}
+        return lhs, remaining
+
+    return marker(marker_string)
+
+
+def parse_requirement(req):
+    """
+    Parse a requirement passed in as a string. Return a Container
+    whose attributes contain the various parts of the requirement.
+    """
+    remaining = req.strip()
+    if not remaining or remaining.startswith('#'):
+        return None
+    m = IDENTIFIER.match(remaining)
+    if not m:
+        raise SyntaxError('name expected: %s' % remaining)
+    distname = m.groups()[0]
+    remaining = remaining[m.end():]
+    extras = mark_expr = versions = uri = None
+    if remaining and remaining[0] == '[':
+        i = remaining.find(']', 1)
+        if i < 0:
+            raise SyntaxError('unterminated extra: %s' % remaining)
+        s = remaining[1:i]
+        remaining = remaining[i + 1:].lstrip()
+        extras = []
+        while s:
+            m = IDENTIFIER.match(s)
+            if not m:
+                raise SyntaxError('malformed extra: %s' % s)
+            extras.append(m.groups()[0])
+            s = s[m.end():]
+            if not s:
+                break
+            if s[0] != ',':
+                raise SyntaxError('comma expected in extras: %s' % s)
+            s = s[1:].lstrip()
+        if not extras:
+            extras = None
+    if remaining:
+        if remaining[0] == '@':
+            # it's a URI
+            remaining = remaining[1:].lstrip()
+            m = NON_SPACE.match(remaining)
+            if not m:
+                raise SyntaxError('invalid URI: %s' % remaining)
+            uri = m.groups()[0]
+            t = urlparse(uri)
+            # there are issues with Python and URL parsing, so this test
+            # is a bit crude. See bpo-20271, bpo-23505. Python doesn't
+            # always parse invalid URLs correctly - it should raise
+            # exceptions for malformed URLs
+            if not (t.scheme and t.netloc):
+                raise SyntaxError('Invalid URL: %s' % uri)
+            remaining = remaining[m.end():].lstrip()
+        else:
+
+            def get_versions(ver_remaining):
+                """
+                Return a list of operator, version tuples if any are
+                specified, else None.
+                """
+                m = COMPARE_OP.match(ver_remaining)
+                versions = None
+                if m:
+                    versions = []
+                    while True:
+                        op = m.groups()[0]
+                        ver_remaining = ver_remaining[m.end():]
+                        m = VERSION_IDENTIFIER.match(ver_remaining)
+                        if not m:
+                            raise SyntaxError('invalid version: %s' %
+                                              ver_remaining)
+                        v = m.groups()[0]
+                        versions.append((op, v))
+                        ver_remaining = ver_remaining[m.end():]
+                        if not ver_remaining or ver_remaining[0] != ',':
+                            break
+                        ver_remaining = ver_remaining[1:].lstrip()
+                        # Some packages have a trailing comma which would break things
+                        # See issue #148
+                        if not ver_remaining:
+                            break
+                        m = COMPARE_OP.match(ver_remaining)
+                        if not m:
+                            raise SyntaxError('invalid constraint: %s' %
+                                              ver_remaining)
+                    if not versions:
+                        versions = None
+                return versions, ver_remaining
+
+            if remaining[0] != '(':
+                versions, remaining = get_versions(remaining)
+            else:
+                i = remaining.find(')', 1)
+                if i < 0:
+                    raise SyntaxError('unterminated parenthesis: %s' %
+                                      remaining)
+                s = remaining[1:i]
+                remaining = remaining[i + 1:].lstrip()
+                # As a special diversion from PEP 508, allow a version number
+                # a.b.c in parentheses as a synonym for ~= a.b.c (because this
+                # is allowed in earlier PEPs)
+                if COMPARE_OP.match(s):
+                    versions, _ = get_versions(s)
+                else:
+                    m = VERSION_IDENTIFIER.match(s)
+                    if not m:
+                        raise SyntaxError('invalid constraint: %s' % s)
+                    v = m.groups()[0]
+                    s = s[m.end():].lstrip()
+                    if s:
+                        raise SyntaxError('invalid constraint: %s' % s)
+                    versions = [('~=', v)]
+
+    if remaining:
+        if remaining[0] != ';':
+            raise SyntaxError('invalid requirement: %s' % remaining)
+        remaining = remaining[1:].lstrip()
+
+        mark_expr, remaining = parse_marker(remaining)
+
+    if remaining and remaining[0] != '#':
+        raise SyntaxError('unexpected trailing data: %s' % remaining)
+
+    if not versions:
+        rs = distname
+    else:
+        rs = '%s %s' % (distname, ', '.join(
+            ['%s %s' % con for con in versions]))
+    return Container(name=distname,
+                     extras=extras,
+                     constraints=versions,
+                     marker=mark_expr,
+                     url=uri,
+                     requirement=rs)
+
+
+def get_resources_dests(resources_root, rules):
+    """Find destinations for resources files"""
+
+    def get_rel_path(root, path):
+        # normalizes and returns a lstripped-/-separated path
+        root = root.replace(os.path.sep, '/')
+        path = path.replace(os.path.sep, '/')
+        assert path.startswith(root)
+        return path[len(root):].lstrip('/')
+
+    destinations = {}
+    for base, suffix, dest in rules:
+        prefix = os.path.join(resources_root, base)
+        for abs_base in iglob(prefix):
+            abs_glob = os.path.join(abs_base, suffix)
+            for abs_path in iglob(abs_glob):
+                resource_file = get_rel_path(resources_root, abs_path)
+                if dest is None:  # remove the entry if it was here
+                    destinations.pop(resource_file, None)
+                else:
+                    rel_path = get_rel_path(abs_base, abs_path)
+                    rel_dest = dest.replace(os.path.sep, '/').rstrip('/')
+                    destinations[resource_file] = rel_dest + '/' + rel_path
+    return destinations
+
+
+def in_venv():
+    if hasattr(sys, 'real_prefix'):
+        # virtualenv venvs
+        result = True
+    else:
+        # PEP 405 venvs
+        result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix)
+    return result
+
+
+def get_executable():
+    # The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as
+    # changes to the stub launcher mean that sys.executable always points
+    # to the stub on OS X
+    #    if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__'
+    #                                     in os.environ):
+    #        result =  os.environ['__PYVENV_LAUNCHER__']
+    #    else:
+    #        result = sys.executable
+    #    return result
+    # Avoid normcasing: see issue #143
+    # result = os.path.normcase(sys.executable)
+    result = sys.executable
+    if not isinstance(result, text_type):
+        result = fsdecode(result)
+    return result
+
+
+def proceed(prompt, allowed_chars, error_prompt=None, default=None):
+    p = prompt
+    while True:
+        s = raw_input(p)
+        p = prompt
+        if not s and default:
+            s = default
+        if s:
+            c = s[0].lower()
+            if c in allowed_chars:
+                break
+            if error_prompt:
+                p = '%c: %s\n%s' % (c, error_prompt, prompt)
+    return c
+
+
+def extract_by_key(d, keys):
+    if isinstance(keys, string_types):
+        keys = keys.split()
+    result = {}
+    for key in keys:
+        if key in d:
+            result[key] = d[key]
+    return result
+
+
+def read_exports(stream):
+    if sys.version_info[0] >= 3:
+        # needs to be a text stream
+        stream = codecs.getreader('utf-8')(stream)
+    # Try to load as JSON, falling back on legacy format
+    data = stream.read()
+    stream = StringIO(data)
+    try:
+        jdata = json.load(stream)
+        result = jdata['extensions']['python.exports']['exports']
+        for group, entries in result.items():
+            for k, v in entries.items():
+                s = '%s = %s' % (k, v)
+                entry = get_export_entry(s)
+                assert entry is not None
+                entries[k] = entry
+        return result
+    except Exception:
+        stream.seek(0, 0)
+
+    def read_stream(cp, stream):
+        if hasattr(cp, 'read_file'):
+            cp.read_file(stream)
+        else:
+            cp.readfp(stream)
+
+    cp = configparser.ConfigParser()
+    try:
+        read_stream(cp, stream)
+    except configparser.MissingSectionHeaderError:
+        stream.close()
+        data = textwrap.dedent(data)
+        stream = StringIO(data)
+        read_stream(cp, stream)
+
+    result = {}
+    for key in cp.sections():
+        result[key] = entries = {}
+        for name, value in cp.items(key):
+            s = '%s = %s' % (name, value)
+            entry = get_export_entry(s)
+            assert entry is not None
+            # entry.dist = self
+            entries[name] = entry
+    return result
+
+
+def write_exports(exports, stream):
+    if sys.version_info[0] >= 3:
+        # needs to be a text stream
+        stream = codecs.getwriter('utf-8')(stream)
+    cp = configparser.ConfigParser()
+    for k, v in exports.items():
+        # TODO check k, v for valid values
+        cp.add_section(k)
+        for entry in v.values():
+            if entry.suffix is None:
+                s = entry.prefix
+            else:
+                s = '%s:%s' % (entry.prefix, entry.suffix)
+            if entry.flags:
+                s = '%s [%s]' % (s, ', '.join(entry.flags))
+            cp.set(k, entry.name, s)
+    cp.write(stream)
+
+
+@contextlib.contextmanager
+def tempdir():
+    td = tempfile.mkdtemp()
+    try:
+        yield td
+    finally:
+        shutil.rmtree(td)
+
+
+@contextlib.contextmanager
+def chdir(d):
+    cwd = os.getcwd()
+    try:
+        os.chdir(d)
+        yield
+    finally:
+        os.chdir(cwd)
+
+
+@contextlib.contextmanager
+def socket_timeout(seconds=15):
+    cto = socket.getdefaulttimeout()
+    try:
+        socket.setdefaulttimeout(seconds)
+        yield
+    finally:
+        socket.setdefaulttimeout(cto)
+
+
+class cached_property(object):
+
+    def __init__(self, func):
+        self.func = func
+        # for attr in ('__name__', '__module__', '__doc__'):
+        #     setattr(self, attr, getattr(func, attr, None))
+
+    def __get__(self, obj, cls=None):
+        if obj is None:
+            return self
+        value = self.func(obj)
+        object.__setattr__(obj, self.func.__name__, value)
+        # obj.__dict__[self.func.__name__] = value = self.func(obj)
+        return value
+
+
+def convert_path(pathname):
+    """Return 'pathname' as a name that will work on the native filesystem.
+
+    The path is split on '/' and put back together again using the current
+    directory separator.  Needed because filenames in the setup script are
+    always supplied in Unix style, and have to be converted to the local
+    convention before we can actually use them in the filesystem.  Raises
+    ValueError on non-Unix-ish systems if 'pathname' either starts or
+    ends with a slash.
+    """
+    if os.sep == '/':
+        return pathname
+    if not pathname:
+        return pathname
+    if pathname[0] == '/':
+        raise ValueError("path '%s' cannot be absolute" % pathname)
+    if pathname[-1] == '/':
+        raise ValueError("path '%s' cannot end with '/'" % pathname)
+
+    paths = pathname.split('/')
+    while os.curdir in paths:
+        paths.remove(os.curdir)
+    if not paths:
+        return os.curdir
+    return os.path.join(*paths)
+
+
+class FileOperator(object):
+
+    def __init__(self, dry_run=False):
+        self.dry_run = dry_run
+        self.ensured = set()
+        self._init_record()
+
+    def _init_record(self):
+        self.record = False
+        self.files_written = set()
+        self.dirs_created = set()
+
+    def record_as_written(self, path):
+        if self.record:
+            self.files_written.add(path)
+
+    def newer(self, source, target):
+        """Tell if the target is newer than the source.
+
+        Returns true if 'source' exists and is more recently modified than
+        'target', or if 'source' exists and 'target' doesn't.
+
+        Returns false if both exist and 'target' is the same age or younger
+        than 'source'. Raise PackagingFileError if 'source' does not exist.
+
+        Note that this test is not very accurate: files created in the same
+        second will have the same "age".
+        """
+        if not os.path.exists(source):
+            raise DistlibException("file '%r' does not exist" %
+                                   os.path.abspath(source))
+        if not os.path.exists(target):
+            return True
+
+        return os.stat(source).st_mtime > os.stat(target).st_mtime
+
+    def copy_file(self, infile, outfile, check=True):
+        """Copy a file respecting dry-run and force flags.
+        """
+        self.ensure_dir(os.path.dirname(outfile))
+        logger.info('Copying %s to %s', infile, outfile)
+        if not self.dry_run:
+            msg = None
+            if check:
+                if os.path.islink(outfile):
+                    msg = '%s is a symlink' % outfile
+                elif os.path.exists(outfile) and not os.path.isfile(outfile):
+                    msg = '%s is a non-regular file' % outfile
+            if msg:
+                raise ValueError(msg + ' which would be overwritten')
+            shutil.copyfile(infile, outfile)
+        self.record_as_written(outfile)
+
+    def copy_stream(self, instream, outfile, encoding=None):
+        assert not os.path.isdir(outfile)
+        self.ensure_dir(os.path.dirname(outfile))
+        logger.info('Copying stream %s to %s', instream, outfile)
+        if not self.dry_run:
+            if encoding is None:
+                outstream = open(outfile, 'wb')
+            else:
+                outstream = codecs.open(outfile, 'w', encoding=encoding)
+            try:
+                shutil.copyfileobj(instream, outstream)
+            finally:
+                outstream.close()
+        self.record_as_written(outfile)
+
+    def write_binary_file(self, path, data):
+        self.ensure_dir(os.path.dirname(path))
+        if not self.dry_run:
+            if os.path.exists(path):
+                os.remove(path)
+            with open(path, 'wb') as f:
+                f.write(data)
+        self.record_as_written(path)
+
+    def write_text_file(self, path, data, encoding):
+        self.write_binary_file(path, data.encode(encoding))
+
+    def set_mode(self, bits, mask, files):
+        if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'):
+            # Set the executable bits (owner, group, and world) on
+            # all the files specified.
+            for f in files:
+                if self.dry_run:
+                    logger.info("changing mode of %s", f)
+                else:
+                    mode = (os.stat(f).st_mode | bits) & mask
+                    logger.info("changing mode of %s to %o", f, mode)
+                    os.chmod(f, mode)
+
+    set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f)
+
+    def ensure_dir(self, path):
+        path = os.path.abspath(path)
+        if path not in self.ensured and not os.path.exists(path):
+            self.ensured.add(path)
+            d, f = os.path.split(path)
+            self.ensure_dir(d)
+            logger.info('Creating %s' % path)
+            if not self.dry_run:
+                os.mkdir(path)
+            if self.record:
+                self.dirs_created.add(path)
+
+    def byte_compile(self,
+                     path,
+                     optimize=False,
+                     force=False,
+                     prefix=None,
+                     hashed_invalidation=False):
+        dpath = cache_from_source(path, not optimize)
+        logger.info('Byte-compiling %s to %s', path, dpath)
+        if not self.dry_run:
+            if force or self.newer(path, dpath):
+                if not prefix:
+                    diagpath = None
+                else:
+                    assert path.startswith(prefix)
+                    diagpath = path[len(prefix):]
+            compile_kwargs = {}
+            if hashed_invalidation and hasattr(py_compile,
+                                               'PycInvalidationMode'):
+                compile_kwargs[
+                    'invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH
+            py_compile.compile(path, dpath, diagpath, True,
+                               **compile_kwargs)  # raise error
+        self.record_as_written(dpath)
+        return dpath
+
+    def ensure_removed(self, path):
+        if os.path.exists(path):
+            if os.path.isdir(path) and not os.path.islink(path):
+                logger.debug('Removing directory tree at %s', path)
+                if not self.dry_run:
+                    shutil.rmtree(path)
+                if self.record:
+                    if path in self.dirs_created:
+                        self.dirs_created.remove(path)
+            else:
+                if os.path.islink(path):
+                    s = 'link'
+                else:
+                    s = 'file'
+                logger.debug('Removing %s %s', s, path)
+                if not self.dry_run:
+                    os.remove(path)
+                if self.record:
+                    if path in self.files_written:
+                        self.files_written.remove(path)
+
+    def is_writable(self, path):
+        result = False
+        while not result:
+            if os.path.exists(path):
+                result = os.access(path, os.W_OK)
+                break
+            parent = os.path.dirname(path)
+            if parent == path:
+                break
+            path = parent
+        return result
+
+    def commit(self):
+        """
+        Commit recorded changes, turn off recording, return
+        changes.
+        """
+        assert self.record
+        result = self.files_written, self.dirs_created
+        self._init_record()
+        return result
+
+    def rollback(self):
+        if not self.dry_run:
+            for f in list(self.files_written):
+                if os.path.exists(f):
+                    os.remove(f)
+            # dirs should all be empty now, except perhaps for
+            # __pycache__ subdirs
+            # reverse so that subdirs appear before their parents
+            dirs = sorted(self.dirs_created, reverse=True)
+            for d in dirs:
+                flist = os.listdir(d)
+                if flist:
+                    assert flist == ['__pycache__']
+                    sd = os.path.join(d, flist[0])
+                    os.rmdir(sd)
+                os.rmdir(d)  # should fail if non-empty
+        self._init_record()
+
+
+def resolve(module_name, dotted_path):
+    if module_name in sys.modules:
+        mod = sys.modules[module_name]
+    else:
+        mod = __import__(module_name)
+    if dotted_path is None:
+        result = mod
+    else:
+        parts = dotted_path.split('.')
+        result = getattr(mod, parts.pop(0))
+        for p in parts:
+            result = getattr(result, p)
+    return result
+
+
+class ExportEntry(object):
+
+    def __init__(self, name, prefix, suffix, flags):
+        self.name = name
+        self.prefix = prefix
+        self.suffix = suffix
+        self.flags = flags
+
+    @cached_property
+    def value(self):
+        return resolve(self.prefix, self.suffix)
+
+    def __repr__(self):  # pragma: no cover
+        return '' % (self.name, self.prefix,
+                                                self.suffix, self.flags)
+
+    def __eq__(self, other):
+        if not isinstance(other, ExportEntry):
+            result = False
+        else:
+            result = (self.name == other.name and self.prefix == other.prefix
+                      and self.suffix == other.suffix
+                      and self.flags == other.flags)
+        return result
+
+    __hash__ = object.__hash__
+
+
+ENTRY_RE = re.compile(
+    r'''(?P([^\[]\S*))
+                      \s*=\s*(?P(\w+)([:\.]\w+)*)
+                      \s*(\[\s*(?P[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])?
+                      ''', re.VERBOSE)
+
+
+def get_export_entry(specification):
+    m = ENTRY_RE.search(specification)
+    if not m:
+        result = None
+        if '[' in specification or ']' in specification:
+            raise DistlibException("Invalid specification "
+                                   "'%s'" % specification)
+    else:
+        d = m.groupdict()
+        name = d['name']
+        path = d['callable']
+        colons = path.count(':')
+        if colons == 0:
+            prefix, suffix = path, None
+        else:
+            if colons != 1:
+                raise DistlibException("Invalid specification "
+                                       "'%s'" % specification)
+            prefix, suffix = path.split(':')
+        flags = d['flags']
+        if flags is None:
+            if '[' in specification or ']' in specification:
+                raise DistlibException("Invalid specification "
+                                       "'%s'" % specification)
+            flags = []
+        else:
+            flags = [f.strip() for f in flags.split(',')]
+        result = ExportEntry(name, prefix, suffix, flags)
+    return result
+
+
+def get_cache_base(suffix=None):
+    """
+    Return the default base location for distlib caches. If the directory does
+    not exist, it is created. Use the suffix provided for the base directory,
+    and default to '.distlib' if it isn't provided.
+
+    On Windows, if LOCALAPPDATA is defined in the environment, then it is
+    assumed to be a directory, and will be the parent directory of the result.
+    On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home
+    directory - using os.expanduser('~') - will be the parent directory of
+    the result.
+
+    The result is just the directory '.distlib' in the parent directory as
+    determined above, or with the name specified with ``suffix``.
+    """
+    if suffix is None:
+        suffix = '.distlib'
+    if os.name == 'nt' and 'LOCALAPPDATA' in os.environ:
+        result = os.path.expandvars('$localappdata')
+    else:
+        # Assume posix, or old Windows
+        result = os.path.expanduser('~')
+    # we use 'isdir' instead of 'exists', because we want to
+    # fail if there's a file with that name
+    if os.path.isdir(result):
+        usable = os.access(result, os.W_OK)
+        if not usable:
+            logger.warning('Directory exists but is not writable: %s', result)
+    else:
+        try:
+            os.makedirs(result)
+            usable = True
+        except OSError:
+            logger.warning('Unable to create %s', result, exc_info=True)
+            usable = False
+    if not usable:
+        result = tempfile.mkdtemp()
+        logger.warning('Default location unusable, using %s', result)
+    return os.path.join(result, suffix)
+
+
+def path_to_cache_dir(path):
+    """
+    Convert an absolute path to a directory name for use in a cache.
+
+    The algorithm used is:
+
+    #. On Windows, any ``':'`` in the drive is replaced with ``'---'``.
+    #. Any occurrence of ``os.sep`` is replaced with ``'--'``.
+    #. ``'.cache'`` is appended.
+    """
+    d, p = os.path.splitdrive(os.path.abspath(path))
+    if d:
+        d = d.replace(':', '---')
+    p = p.replace(os.sep, '--')
+    return d + p + '.cache'
+
+
+def ensure_slash(s):
+    if not s.endswith('/'):
+        return s + '/'
+    return s
+
+
+def parse_credentials(netloc):
+    username = password = None
+    if '@' in netloc:
+        prefix, netloc = netloc.rsplit('@', 1)
+        if ':' not in prefix:
+            username = prefix
+        else:
+            username, password = prefix.split(':', 1)
+    if username:
+        username = unquote(username)
+    if password:
+        password = unquote(password)
+    return username, password, netloc
+
+
+def get_process_umask():
+    result = os.umask(0o22)
+    os.umask(result)
+    return result
+
+
+def is_string_sequence(seq):
+    result = True
+    i = None
+    for i, s in enumerate(seq):
+        if not isinstance(s, string_types):
+            result = False
+            break
+    assert i is not None
+    return result
+
+
+PROJECT_NAME_AND_VERSION = re.compile(
+    '([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-'
+    '([a-z0-9_.+-]+)', re.I)
+PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)')
+
+
+def split_filename(filename, project_name=None):
+    """
+    Extract name, version, python version from a filename (no extension)
+
+    Return name, version, pyver or None
+    """
+    result = None
+    pyver = None
+    filename = unquote(filename).replace(' ', '-')
+    m = PYTHON_VERSION.search(filename)
+    if m:
+        pyver = m.group(1)
+        filename = filename[:m.start()]
+    if project_name and len(filename) > len(project_name) + 1:
+        m = re.match(re.escape(project_name) + r'\b', filename)
+        if m:
+            n = m.end()
+            result = filename[:n], filename[n + 1:], pyver
+    if result is None:
+        m = PROJECT_NAME_AND_VERSION.match(filename)
+        if m:
+            result = m.group(1), m.group(3), pyver
+    return result
+
+
+# Allow spaces in name because of legacy dists like "Twisted Core"
+NAME_VERSION_RE = re.compile(r'(?P[\w .-]+)\s*'
+                             r'\(\s*(?P[^\s)]+)\)$')
+
+
+def parse_name_and_version(p):
+    """
+    A utility method used to get name and version from a string.
+
+    From e.g. a Provides-Dist value.
+
+    :param p: A value in a form 'foo (1.0)'
+    :return: The name and version as a tuple.
+    """
+    m = NAME_VERSION_RE.match(p)
+    if not m:
+        raise DistlibException('Ill-formed name/version string: \'%s\'' % p)
+    d = m.groupdict()
+    return d['name'].strip().lower(), d['ver']
+
+
+def get_extras(requested, available):
+    result = set()
+    requested = set(requested or [])
+    available = set(available or [])
+    if '*' in requested:
+        requested.remove('*')
+        result |= available
+    for r in requested:
+        if r == '-':
+            result.add(r)
+        elif r.startswith('-'):
+            unwanted = r[1:]
+            if unwanted not in available:
+                logger.warning('undeclared extra: %s' % unwanted)
+            if unwanted in result:
+                result.remove(unwanted)
+        else:
+            if r not in available:
+                logger.warning('undeclared extra: %s' % r)
+            result.add(r)
+    return result
+
+
+#
+# Extended metadata functionality
+#
+
+
+def _get_external_data(url):
+    result = {}
+    try:
+        # urlopen might fail if it runs into redirections,
+        # because of Python issue #13696. Fixed in locators
+        # using a custom redirect handler.
+        resp = urlopen(url)
+        headers = resp.info()
+        ct = headers.get('Content-Type')
+        if not ct.startswith('application/json'):
+            logger.debug('Unexpected response for JSON request: %s', ct)
+        else:
+            reader = codecs.getreader('utf-8')(resp)
+            # data = reader.read().decode('utf-8')
+            # result = json.loads(data)
+            result = json.load(reader)
+    except Exception as e:
+        logger.exception('Failed to get external data for %s: %s', url, e)
+    return result
+
+
+_external_data_base_url = 'https://www.red-dove.com/pypi/projects/'
+
+
+def get_project_data(name):
+    url = '%s/%s/project.json' % (name[0].upper(), name)
+    url = urljoin(_external_data_base_url, url)
+    result = _get_external_data(url)
+    return result
+
+
+def get_package_data(name, version):
+    url = '%s/%s/package-%s.json' % (name[0].upper(), name, version)
+    url = urljoin(_external_data_base_url, url)
+    return _get_external_data(url)
+
+
+class Cache(object):
+    """
+    A class implementing a cache for resources that need to live in the file system
+    e.g. shared libraries. This class was moved from resources to here because it
+    could be used by other modules, e.g. the wheel module.
+    """
+
+    def __init__(self, base):
+        """
+        Initialise an instance.
+
+        :param base: The base directory where the cache should be located.
+        """
+        # we use 'isdir' instead of 'exists', because we want to
+        # fail if there's a file with that name
+        if not os.path.isdir(base):  # pragma: no cover
+            os.makedirs(base)
+        if (os.stat(base).st_mode & 0o77) != 0:
+            logger.warning('Directory \'%s\' is not private', base)
+        self.base = os.path.abspath(os.path.normpath(base))
+
+    def prefix_to_dir(self, prefix):
+        """
+        Converts a resource prefix to a directory name in the cache.
+        """
+        return path_to_cache_dir(prefix)
+
+    def clear(self):
+        """
+        Clear the cache.
+        """
+        not_removed = []
+        for fn in os.listdir(self.base):
+            fn = os.path.join(self.base, fn)
+            try:
+                if os.path.islink(fn) or os.path.isfile(fn):
+                    os.remove(fn)
+                elif os.path.isdir(fn):
+                    shutil.rmtree(fn)
+            except Exception:
+                not_removed.append(fn)
+        return not_removed
+
+
+class EventMixin(object):
+    """
+    A very simple publish/subscribe system.
+    """
+
+    def __init__(self):
+        self._subscribers = {}
+
+    def add(self, event, subscriber, append=True):
+        """
+        Add a subscriber for an event.
+
+        :param event: The name of an event.
+        :param subscriber: The subscriber to be added (and called when the
+                           event is published).
+        :param append: Whether to append or prepend the subscriber to an
+                       existing subscriber list for the event.
+        """
+        subs = self._subscribers
+        if event not in subs:
+            subs[event] = deque([subscriber])
+        else:
+            sq = subs[event]
+            if append:
+                sq.append(subscriber)
+            else:
+                sq.appendleft(subscriber)
+
+    def remove(self, event, subscriber):
+        """
+        Remove a subscriber for an event.
+
+        :param event: The name of an event.
+        :param subscriber: The subscriber to be removed.
+        """
+        subs = self._subscribers
+        if event not in subs:
+            raise ValueError('No subscribers: %r' % event)
+        subs[event].remove(subscriber)
+
+    def get_subscribers(self, event):
+        """
+        Return an iterator for the subscribers for an event.
+        :param event: The event to return subscribers for.
+        """
+        return iter(self._subscribers.get(event, ()))
+
+    def publish(self, event, *args, **kwargs):
+        """
+        Publish a event and return a list of values returned by its
+        subscribers.
+
+        :param event: The event to publish.
+        :param args: The positional arguments to pass to the event's
+                     subscribers.
+        :param kwargs: The keyword arguments to pass to the event's
+                       subscribers.
+        """
+        result = []
+        for subscriber in self.get_subscribers(event):
+            try:
+                value = subscriber(event, *args, **kwargs)
+            except Exception:
+                logger.exception('Exception during event publication')
+                value = None
+            result.append(value)
+        logger.debug('publish %s: args = %s, kwargs = %s, result = %s', event,
+                     args, kwargs, result)
+        return result
+
+
+#
+# Simple sequencing
+#
+class Sequencer(object):
+
+    def __init__(self):
+        self._preds = {}
+        self._succs = {}
+        self._nodes = set()  # nodes with no preds/succs
+
+    def add_node(self, node):
+        self._nodes.add(node)
+
+    def remove_node(self, node, edges=False):
+        if node in self._nodes:
+            self._nodes.remove(node)
+        if edges:
+            for p in set(self._preds.get(node, ())):
+                self.remove(p, node)
+            for s in set(self._succs.get(node, ())):
+                self.remove(node, s)
+            # Remove empties
+            for k, v in list(self._preds.items()):
+                if not v:
+                    del self._preds[k]
+            for k, v in list(self._succs.items()):
+                if not v:
+                    del self._succs[k]
+
+    def add(self, pred, succ):
+        assert pred != succ
+        self._preds.setdefault(succ, set()).add(pred)
+        self._succs.setdefault(pred, set()).add(succ)
+
+    def remove(self, pred, succ):
+        assert pred != succ
+        try:
+            preds = self._preds[succ]
+            succs = self._succs[pred]
+        except KeyError:  # pragma: no cover
+            raise ValueError('%r not a successor of anything' % succ)
+        try:
+            preds.remove(pred)
+            succs.remove(succ)
+        except KeyError:  # pragma: no cover
+            raise ValueError('%r not a successor of %r' % (succ, pred))
+
+    def is_step(self, step):
+        return (step in self._preds or step in self._succs
+                or step in self._nodes)
+
+    def get_steps(self, final):
+        if not self.is_step(final):
+            raise ValueError('Unknown: %r' % final)
+        result = []
+        todo = []
+        seen = set()
+        todo.append(final)
+        while todo:
+            step = todo.pop(0)
+            if step in seen:
+                # if a step was already seen,
+                # move it to the end (so it will appear earlier
+                # when reversed on return) ... but not for the
+                # final step, as that would be confusing for
+                # users
+                if step != final:
+                    result.remove(step)
+                    result.append(step)
+            else:
+                seen.add(step)
+                result.append(step)
+                preds = self._preds.get(step, ())
+                todo.extend(preds)
+        return reversed(result)
+
+    @property
+    def strong_connections(self):
+        # http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
+        index_counter = [0]
+        stack = []
+        lowlinks = {}
+        index = {}
+        result = []
+
+        graph = self._succs
+
+        def strongconnect(node):
+            # set the depth index for this node to the smallest unused index
+            index[node] = index_counter[0]
+            lowlinks[node] = index_counter[0]
+            index_counter[0] += 1
+            stack.append(node)
+
+            # Consider successors
+            try:
+                successors = graph[node]
+            except Exception:
+                successors = []
+            for successor in successors:
+                if successor not in lowlinks:
+                    # Successor has not yet been visited
+                    strongconnect(successor)
+                    lowlinks[node] = min(lowlinks[node], lowlinks[successor])
+                elif successor in stack:
+                    # the successor is in the stack and hence in the current
+                    # strongly connected component (SCC)
+                    lowlinks[node] = min(lowlinks[node], index[successor])
+
+            # If `node` is a root node, pop the stack and generate an SCC
+            if lowlinks[node] == index[node]:
+                connected_component = []
+
+                while True:
+                    successor = stack.pop()
+                    connected_component.append(successor)
+                    if successor == node:
+                        break
+                component = tuple(connected_component)
+                # storing the result
+                result.append(component)
+
+        for node in graph:
+            if node not in lowlinks:
+                strongconnect(node)
+
+        return result
+
+    @property
+    def dot(self):
+        result = ['digraph G {']
+        for succ in self._preds:
+            preds = self._preds[succ]
+            for pred in preds:
+                result.append('  %s -> %s;' % (pred, succ))
+        for node in self._nodes:
+            result.append('  %s;' % node)
+        result.append('}')
+        return '\n'.join(result)
+
+
+#
+# Unarchiving functionality for zip, tar, tgz, tbz, whl
+#
+
+ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz',
+                      '.whl')
+
+
+def unarchive(archive_filename, dest_dir, format=None, check=True):
+
+    def check_path(path):
+        if not isinstance(path, text_type):
+            path = path.decode('utf-8')
+        p = os.path.abspath(os.path.join(dest_dir, path))
+        if not p.startswith(dest_dir) or p[plen] != os.sep:
+            raise ValueError('path outside destination: %r' % p)
+
+    dest_dir = os.path.abspath(dest_dir)
+    plen = len(dest_dir)
+    archive = None
+    if format is None:
+        if archive_filename.endswith(('.zip', '.whl')):
+            format = 'zip'
+        elif archive_filename.endswith(('.tar.gz', '.tgz')):
+            format = 'tgz'
+            mode = 'r:gz'
+        elif archive_filename.endswith(('.tar.bz2', '.tbz')):
+            format = 'tbz'
+            mode = 'r:bz2'
+        elif archive_filename.endswith('.tar'):
+            format = 'tar'
+            mode = 'r'
+        else:  # pragma: no cover
+            raise ValueError('Unknown format for %r' % archive_filename)
+    try:
+        if format == 'zip':
+            archive = ZipFile(archive_filename, 'r')
+            if check:
+                names = archive.namelist()
+                for name in names:
+                    check_path(name)
+        else:
+            archive = tarfile.open(archive_filename, mode)
+            if check:
+                names = archive.getnames()
+                for name in names:
+                    check_path(name)
+        if format != 'zip' and sys.version_info[0] < 3:
+            # See Python issue 17153. If the dest path contains Unicode,
+            # tarfile extraction fails on Python 2.x if a member path name
+            # contains non-ASCII characters - it leads to an implicit
+            # bytes -> unicode conversion using ASCII to decode.
+            for tarinfo in archive.getmembers():
+                if not isinstance(tarinfo.name, text_type):
+                    tarinfo.name = tarinfo.name.decode('utf-8')
+
+        # Limit extraction of dangerous items, if this Python
+        # allows it easily. If not, just trust the input.
+        # See: https://docs.python.org/3/library/tarfile.html#extraction-filters
+        def extraction_filter(member, path):
+            """Run tarfile.tar_filter, but raise the expected ValueError"""
+            # This is only called if the current Python has tarfile filters
+            try:
+                return tarfile.tar_filter(member, path)
+            except tarfile.FilterError as exc:
+                raise ValueError(str(exc))
+
+        archive.extraction_filter = extraction_filter
+
+        archive.extractall(dest_dir)
+
+    finally:
+        if archive:
+            archive.close()
+
+
+def zip_dir(directory):
+    """zip a directory tree into a BytesIO object"""
+    result = io.BytesIO()
+    dlen = len(directory)
+    with ZipFile(result, "w") as zf:
+        for root, dirs, files in os.walk(directory):
+            for name in files:
+                full = os.path.join(root, name)
+                rel = root[dlen:]
+                dest = os.path.join(rel, name)
+                zf.write(full, dest)
+    return result
+
+
+#
+# Simple progress bar
+#
+
+UNITS = ('', 'K', 'M', 'G', 'T', 'P')
+
+
+class Progress(object):
+    unknown = 'UNKNOWN'
+
+    def __init__(self, minval=0, maxval=100):
+        assert maxval is None or maxval >= minval
+        self.min = self.cur = minval
+        self.max = maxval
+        self.started = None
+        self.elapsed = 0
+        self.done = False
+
+    def update(self, curval):
+        assert self.min <= curval
+        assert self.max is None or curval <= self.max
+        self.cur = curval
+        now = time.time()
+        if self.started is None:
+            self.started = now
+        else:
+            self.elapsed = now - self.started
+
+    def increment(self, incr):
+        assert incr >= 0
+        self.update(self.cur + incr)
+
+    def start(self):
+        self.update(self.min)
+        return self
+
+    def stop(self):
+        if self.max is not None:
+            self.update(self.max)
+        self.done = True
+
+    @property
+    def maximum(self):
+        return self.unknown if self.max is None else self.max
+
+    @property
+    def percentage(self):
+        if self.done:
+            result = '100 %'
+        elif self.max is None:
+            result = ' ?? %'
+        else:
+            v = 100.0 * (self.cur - self.min) / (self.max - self.min)
+            result = '%3d %%' % v
+        return result
+
+    def format_duration(self, duration):
+        if (duration <= 0) and self.max is None or self.cur == self.min:
+            result = '??:??:??'
+        # elif duration < 1:
+        #     result = '--:--:--'
+        else:
+            result = time.strftime('%H:%M:%S', time.gmtime(duration))
+        return result
+
+    @property
+    def ETA(self):
+        if self.done:
+            prefix = 'Done'
+            t = self.elapsed
+            # import pdb; pdb.set_trace()
+        else:
+            prefix = 'ETA '
+            if self.max is None:
+                t = -1
+            elif self.elapsed == 0 or (self.cur == self.min):
+                t = 0
+            else:
+                # import pdb; pdb.set_trace()
+                t = float(self.max - self.min)
+                t /= self.cur - self.min
+                t = (t - 1) * self.elapsed
+        return '%s: %s' % (prefix, self.format_duration(t))
+
+    @property
+    def speed(self):
+        if self.elapsed == 0:
+            result = 0.0
+        else:
+            result = (self.cur - self.min) / self.elapsed
+        for unit in UNITS:
+            if result < 1000:
+                break
+            result /= 1000.0
+        return '%d %sB/s' % (result, unit)
+
+
+#
+# Glob functionality
+#
+
+RICH_GLOB = re.compile(r'\{([^}]*)\}')
+_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]')
+_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$')
+
+
+def iglob(path_glob):
+    """Extended globbing function that supports ** and {opt1,opt2,opt3}."""
+    if _CHECK_RECURSIVE_GLOB.search(path_glob):
+        msg = """invalid glob %r: recursive glob "**" must be used alone"""
+        raise ValueError(msg % path_glob)
+    if _CHECK_MISMATCH_SET.search(path_glob):
+        msg = """invalid glob %r: mismatching set marker '{' or '}'"""
+        raise ValueError(msg % path_glob)
+    return _iglob(path_glob)
+
+
+def _iglob(path_glob):
+    rich_path_glob = RICH_GLOB.split(path_glob, 1)
+    if len(rich_path_glob) > 1:
+        assert len(rich_path_glob) == 3, rich_path_glob
+        prefix, set, suffix = rich_path_glob
+        for item in set.split(','):
+            for path in _iglob(''.join((prefix, item, suffix))):
+                yield path
+    else:
+        if '**' not in path_glob:
+            for item in std_iglob(path_glob):
+                yield item
+        else:
+            prefix, radical = path_glob.split('**', 1)
+            if prefix == '':
+                prefix = '.'
+            if radical == '':
+                radical = '*'
+            else:
+                # we support both
+                radical = radical.lstrip('/')
+                radical = radical.lstrip('\\')
+            for path, dir, files in os.walk(prefix):
+                path = os.path.normpath(path)
+                for fn in _iglob(os.path.join(path, radical)):
+                    yield fn
+
+
+if ssl:
+    from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname,
+                         CertificateError)
+
+    #
+    # HTTPSConnection which verifies certificates/matches domains
+    #
+
+    class HTTPSConnection(httplib.HTTPSConnection):
+        ca_certs = None  # set this to the path to the certs file (.pem)
+        check_domain = True  # only used if ca_certs is not None
+
+        # noinspection PyPropertyAccess
+        def connect(self):
+            sock = socket.create_connection((self.host, self.port),
+                                            self.timeout)
+            if getattr(self, '_tunnel_host', False):
+                self.sock = sock
+                self._tunnel()
+
+            context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+            if hasattr(ssl, 'OP_NO_SSLv2'):
+                context.options |= ssl.OP_NO_SSLv2
+            if getattr(self, 'cert_file', None):
+                context.load_cert_chain(self.cert_file, self.key_file)
+            kwargs = {}
+            if self.ca_certs:
+                context.verify_mode = ssl.CERT_REQUIRED
+                context.load_verify_locations(cafile=self.ca_certs)
+                if getattr(ssl, 'HAS_SNI', False):
+                    kwargs['server_hostname'] = self.host
+
+            self.sock = context.wrap_socket(sock, **kwargs)
+            if self.ca_certs and self.check_domain:
+                try:
+                    match_hostname(self.sock.getpeercert(), self.host)
+                    logger.debug('Host verified: %s', self.host)
+                except CertificateError:  # pragma: no cover
+                    self.sock.shutdown(socket.SHUT_RDWR)
+                    self.sock.close()
+                    raise
+
+    class HTTPSHandler(BaseHTTPSHandler):
+
+        def __init__(self, ca_certs, check_domain=True):
+            BaseHTTPSHandler.__init__(self)
+            self.ca_certs = ca_certs
+            self.check_domain = check_domain
+
+        def _conn_maker(self, *args, **kwargs):
+            """
+            This is called to create a connection instance. Normally you'd
+            pass a connection class to do_open, but it doesn't actually check for
+            a class, and just expects a callable. As long as we behave just as a
+            constructor would have, we should be OK. If it ever changes so that
+            we *must* pass a class, we'll create an UnsafeHTTPSConnection class
+            which just sets check_domain to False in the class definition, and
+            choose which one to pass to do_open.
+            """
+            result = HTTPSConnection(*args, **kwargs)
+            if self.ca_certs:
+                result.ca_certs = self.ca_certs
+                result.check_domain = self.check_domain
+            return result
+
+        def https_open(self, req):
+            try:
+                return self.do_open(self._conn_maker, req)
+            except URLError as e:
+                if 'certificate verify failed' in str(e.reason):
+                    raise CertificateError(
+                        'Unable to verify server certificate '
+                        'for %s' % req.host)
+                else:
+                    raise
+
+    #
+    # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The-
+    # Middle proxy using HTTP listens on port 443, or an index mistakenly serves
+    # HTML containing a http://xyz link when it should be https://xyz),
+    # you can use the following handler class, which does not allow HTTP traffic.
+    #
+    # It works by inheriting from HTTPHandler - so build_opener won't add a
+    # handler for HTTP itself.
+    #
+    class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler):
+
+        def http_open(self, req):
+            raise URLError(
+                'Unexpected HTTP request on what should be a secure '
+                'connection: %s' % req)
+
+
+#
+# XML-RPC with timeouts
+#
+class Transport(xmlrpclib.Transport):
+
+    def __init__(self, timeout, use_datetime=0):
+        self.timeout = timeout
+        xmlrpclib.Transport.__init__(self, use_datetime)
+
+    def make_connection(self, host):
+        h, eh, x509 = self.get_host_info(host)
+        if not self._connection or host != self._connection[0]:
+            self._extra_headers = eh
+            self._connection = host, httplib.HTTPConnection(h)
+        return self._connection[1]
+
+
+if ssl:
+
+    class SafeTransport(xmlrpclib.SafeTransport):
+
+        def __init__(self, timeout, use_datetime=0):
+            self.timeout = timeout
+            xmlrpclib.SafeTransport.__init__(self, use_datetime)
+
+        def make_connection(self, host):
+            h, eh, kwargs = self.get_host_info(host)
+            if not kwargs:
+                kwargs = {}
+            kwargs['timeout'] = self.timeout
+            if not self._connection or host != self._connection[0]:
+                self._extra_headers = eh
+                self._connection = host, httplib.HTTPSConnection(
+                    h, None, **kwargs)
+            return self._connection[1]
+
+
+class ServerProxy(xmlrpclib.ServerProxy):
+
+    def __init__(self, uri, **kwargs):
+        self.timeout = timeout = kwargs.pop('timeout', None)
+        # The above classes only come into play if a timeout
+        # is specified
+        if timeout is not None:
+            # scheme = splittype(uri)  # deprecated as of Python 3.8
+            scheme = urlparse(uri)[0]
+            use_datetime = kwargs.get('use_datetime', 0)
+            if scheme == 'https':
+                tcls = SafeTransport
+            else:
+                tcls = Transport
+            kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime)
+            self.transport = t
+        xmlrpclib.ServerProxy.__init__(self, uri, **kwargs)
+
+
+#
+# CSV functionality. This is provided because on 2.x, the csv module can't
+# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files.
+#
+
+
+def _csv_open(fn, mode, **kwargs):
+    if sys.version_info[0] < 3:
+        mode += 'b'
+    else:
+        kwargs['newline'] = ''
+        # Python 3 determines encoding from locale. Force 'utf-8'
+        # file encoding to match other forced utf-8 encoding
+        kwargs['encoding'] = 'utf-8'
+    return open(fn, mode, **kwargs)
+
+
+class CSVBase(object):
+    defaults = {
+        'delimiter': str(','),  # The strs are used because we need native
+        'quotechar': str('"'),  # str in the csv API (2.x won't take
+        'lineterminator': str('\n')  # Unicode)
+    }
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, *exc_info):
+        self.stream.close()
+
+
+class CSVReader(CSVBase):
+
+    def __init__(self, **kwargs):
+        if 'stream' in kwargs:
+            stream = kwargs['stream']
+            if sys.version_info[0] >= 3:
+                # needs to be a text stream
+                stream = codecs.getreader('utf-8')(stream)
+            self.stream = stream
+        else:
+            self.stream = _csv_open(kwargs['path'], 'r')
+        self.reader = csv.reader(self.stream, **self.defaults)
+
+    def __iter__(self):
+        return self
+
+    def next(self):
+        result = next(self.reader)
+        if sys.version_info[0] < 3:
+            for i, item in enumerate(result):
+                if not isinstance(item, text_type):
+                    result[i] = item.decode('utf-8')
+        return result
+
+    __next__ = next
+
+
+class CSVWriter(CSVBase):
+
+    def __init__(self, fn, **kwargs):
+        self.stream = _csv_open(fn, 'w')
+        self.writer = csv.writer(self.stream, **self.defaults)
+
+    def writerow(self, row):
+        if sys.version_info[0] < 3:
+            r = []
+            for item in row:
+                if isinstance(item, text_type):
+                    item = item.encode('utf-8')
+                r.append(item)
+            row = r
+        self.writer.writerow(row)
+
+
+#
+#   Configurator functionality
+#
+
+
+class Configurator(BaseConfigurator):
+
+    value_converters = dict(BaseConfigurator.value_converters)
+    value_converters['inc'] = 'inc_convert'
+
+    def __init__(self, config, base=None):
+        super(Configurator, self).__init__(config)
+        self.base = base or os.getcwd()
+
+    def configure_custom(self, config):
+
+        def convert(o):
+            if isinstance(o, (list, tuple)):
+                result = type(o)([convert(i) for i in o])
+            elif isinstance(o, dict):
+                if '()' in o:
+                    result = self.configure_custom(o)
+                else:
+                    result = {}
+                    for k in o:
+                        result[k] = convert(o[k])
+            else:
+                result = self.convert(o)
+            return result
+
+        c = config.pop('()')
+        if not callable(c):
+            c = self.resolve(c)
+        props = config.pop('.', None)
+        # Check for valid identifiers
+        args = config.pop('[]', ())
+        if args:
+            args = tuple([convert(o) for o in args])
+        items = [(k, convert(config[k])) for k in config if valid_ident(k)]
+        kwargs = dict(items)
+        result = c(*args, **kwargs)
+        if props:
+            for n, v in props.items():
+                setattr(result, n, convert(v))
+        return result
+
+    def __getitem__(self, key):
+        result = self.config[key]
+        if isinstance(result, dict) and '()' in result:
+            self.config[key] = result = self.configure_custom(result)
+        return result
+
+    def inc_convert(self, value):
+        """Default converter for the inc:// protocol."""
+        if not os.path.isabs(value):
+            value = os.path.join(self.base, value)
+        with codecs.open(value, 'r', encoding='utf-8') as f:
+            result = json.load(f)
+        return result
+
+
+class SubprocessMixin(object):
+    """
+    Mixin for running subprocesses and capturing their output
+    """
+
+    def __init__(self, verbose=False, progress=None):
+        self.verbose = verbose
+        self.progress = progress
+
+    def reader(self, stream, context):
+        """
+        Read lines from a subprocess' output stream and either pass to a progress
+        callable (if specified) or write progress information to sys.stderr.
+        """
+        progress = self.progress
+        verbose = self.verbose
+        while True:
+            s = stream.readline()
+            if not s:
+                break
+            if progress is not None:
+                progress(s, context)
+            else:
+                if not verbose:
+                    sys.stderr.write('.')
+                else:
+                    sys.stderr.write(s.decode('utf-8'))
+                sys.stderr.flush()
+        stream.close()
+
+    def run_command(self, cmd, **kwargs):
+        p = subprocess.Popen(cmd,
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE,
+                             **kwargs)
+        t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout'))
+        t1.start()
+        t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr'))
+        t2.start()
+        p.wait()
+        t1.join()
+        t2.join()
+        if self.progress is not None:
+            self.progress('done.', 'main')
+        elif self.verbose:
+            sys.stderr.write('done.\n')
+        return p
+
+
+def normalize_name(name):
+    """Normalize a python package name a la PEP 503"""
+    # https://www.python.org/dev/peps/pep-0503/#normalized-names
+    return re.sub('[-_.]+', '-', name).lower()
+
+
+# def _get_pypirc_command():
+# """
+# Get the distutils command for interacting with PyPI configurations.
+# :return: the command.
+# """
+# from distutils.core import Distribution
+# from distutils.config import PyPIRCCommand
+# d = Distribution()
+# return PyPIRCCommand(d)
+
+
+class PyPIRCFile(object):
+
+    DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/'
+    DEFAULT_REALM = 'pypi'
+
+    def __init__(self, fn=None, url=None):
+        if fn is None:
+            fn = os.path.join(os.path.expanduser('~'), '.pypirc')
+        self.filename = fn
+        self.url = url
+
+    def read(self):
+        result = {}
+
+        if os.path.exists(self.filename):
+            repository = self.url or self.DEFAULT_REPOSITORY
+
+            config = configparser.RawConfigParser()
+            config.read(self.filename)
+            sections = config.sections()
+            if 'distutils' in sections:
+                # let's get the list of servers
+                index_servers = config.get('distutils', 'index-servers')
+                _servers = [
+                    server.strip() for server in index_servers.split('\n')
+                    if server.strip() != ''
+                ]
+                if _servers == []:
+                    # nothing set, let's try to get the default pypi
+                    if 'pypi' in sections:
+                        _servers = ['pypi']
+                else:
+                    for server in _servers:
+                        result = {'server': server}
+                        result['username'] = config.get(server, 'username')
+
+                        # optional params
+                        for key, default in (('repository',
+                                              self.DEFAULT_REPOSITORY),
+                                             ('realm', self.DEFAULT_REALM),
+                                             ('password', None)):
+                            if config.has_option(server, key):
+                                result[key] = config.get(server, key)
+                            else:
+                                result[key] = default
+
+                        # work around people having "repository" for the "pypi"
+                        # section of their config set to the HTTP (rather than
+                        # HTTPS) URL
+                        if (server == 'pypi' and repository
+                                in (self.DEFAULT_REPOSITORY, 'pypi')):
+                            result['repository'] = self.DEFAULT_REPOSITORY
+                        elif (result['server'] != repository
+                              and result['repository'] != repository):
+                            result = {}
+            elif 'server-login' in sections:
+                # old format
+                server = 'server-login'
+                if config.has_option(server, 'repository'):
+                    repository = config.get(server, 'repository')
+                else:
+                    repository = self.DEFAULT_REPOSITORY
+                result = {
+                    'username': config.get(server, 'username'),
+                    'password': config.get(server, 'password'),
+                    'repository': repository,
+                    'server': server,
+                    'realm': self.DEFAULT_REALM
+                }
+        return result
+
+    def update(self, username, password):
+        # import pdb; pdb.set_trace()
+        config = configparser.RawConfigParser()
+        fn = self.filename
+        config.read(fn)
+        if not config.has_section('pypi'):
+            config.add_section('pypi')
+        config.set('pypi', 'username', username)
+        config.set('pypi', 'password', password)
+        with open(fn, 'w') as f:
+            config.write(f)
+
+
+def _load_pypirc(index):
+    """
+    Read the PyPI access configuration as supported by distutils.
+    """
+    return PyPIRCFile(url=index.url).read()
+
+
+def _store_pypirc(index):
+    PyPIRCFile().update(index.username, index.password)
+
+
+#
+# get_platform()/get_host_platform() copied from Python 3.10.a0 source, with some minor
+# tweaks
+#
+
+
+def get_host_platform():
+    """Return a string that identifies the current platform.  This is used mainly to
+    distinguish platform-specific build directories and platform-specific built
+    distributions.  Typically includes the OS name and version and the
+    architecture (as supplied by 'os.uname()'), although the exact information
+    included depends on the OS; eg. on Linux, the kernel version isn't
+    particularly important.
+
+    Examples of returned values:
+       linux-i586
+       linux-alpha (?)
+       solaris-2.6-sun4u
+
+    Windows will return one of:
+       win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
+       win32 (all others - specifically, sys.platform is returned)
+
+    For other non-POSIX platforms, currently just returns 'sys.platform'.
+
+    """
+    if os.name == 'nt':
+        if 'amd64' in sys.version.lower():
+            return 'win-amd64'
+        if '(arm)' in sys.version.lower():
+            return 'win-arm32'
+        if '(arm64)' in sys.version.lower():
+            return 'win-arm64'
+        return sys.platform
+
+    # Set for cross builds explicitly
+    if "_PYTHON_HOST_PLATFORM" in os.environ:
+        return os.environ["_PYTHON_HOST_PLATFORM"]
+
+    if os.name != 'posix' or not hasattr(os, 'uname'):
+        # XXX what about the architecture? NT is Intel or Alpha,
+        # Mac OS is M68k or PPC, etc.
+        return sys.platform
+
+    # Try to distinguish various flavours of Unix
+
+    (osname, host, release, version, machine) = os.uname()
+
+    # Convert the OS name to lowercase, remove '/' characters, and translate
+    # spaces (for "Power Macintosh")
+    osname = osname.lower().replace('/', '')
+    machine = machine.replace(' ', '_').replace('/', '-')
+
+    if osname[:5] == 'linux':
+        # At least on Linux/Intel, 'machine' is the processor --
+        # i386, etc.
+        # XXX what about Alpha, SPARC, etc?
+        return "%s-%s" % (osname, machine)
+
+    elif osname[:5] == 'sunos':
+        if release[0] >= '5':  # SunOS 5 == Solaris 2
+            osname = 'solaris'
+            release = '%d.%s' % (int(release[0]) - 3, release[2:])
+            # We can't use 'platform.architecture()[0]' because a
+            # bootstrap problem. We use a dict to get an error
+            # if some suspicious happens.
+            bitness = {2147483647: '32bit', 9223372036854775807: '64bit'}
+            machine += '.%s' % bitness[sys.maxsize]
+        # fall through to standard osname-release-machine representation
+    elif osname[:3] == 'aix':
+        from _aix_support import aix_platform
+        return aix_platform()
+    elif osname[:6] == 'cygwin':
+        osname = 'cygwin'
+        rel_re = re.compile(r'[\d.]+', re.ASCII)
+        m = rel_re.match(release)
+        if m:
+            release = m.group()
+    elif osname[:6] == 'darwin':
+        import _osx_support
+        try:
+            from distutils import sysconfig
+        except ImportError:
+            import sysconfig
+        osname, release, machine = _osx_support.get_platform_osx(
+            sysconfig.get_config_vars(), osname, release, machine)
+
+    return '%s-%s-%s' % (osname, release, machine)
+
+
+_TARGET_TO_PLAT = {
+    'x86': 'win32',
+    'x64': 'win-amd64',
+    'arm': 'win-arm32',
+}
+
+
+def get_platform():
+    if os.name != 'nt':
+        return get_host_platform()
+    cross_compilation_target = os.environ.get('VSCMD_ARG_TGT_ARCH')
+    if cross_compilation_target not in _TARGET_TO_PLAT:
+        return get_host_platform()
+    return _TARGET_TO_PLAT[cross_compilation_target]
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/version.py b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/version.py
new file mode 100644
index 0000000..14171ac
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/version.py
@@ -0,0 +1,751 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2023 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""
+Implementation of a flexible versioning scheme providing support for PEP-440,
+setuptools-compatible and semantic versioning.
+"""
+
+import logging
+import re
+
+from .compat import string_types
+from .util import parse_requirement
+
+__all__ = ['NormalizedVersion', 'NormalizedMatcher',
+           'LegacyVersion', 'LegacyMatcher',
+           'SemanticVersion', 'SemanticMatcher',
+           'UnsupportedVersionError', 'get_scheme']
+
+logger = logging.getLogger(__name__)
+
+
+class UnsupportedVersionError(ValueError):
+    """This is an unsupported version."""
+    pass
+
+
+class Version(object):
+    def __init__(self, s):
+        self._string = s = s.strip()
+        self._parts = parts = self.parse(s)
+        assert isinstance(parts, tuple)
+        assert len(parts) > 0
+
+    def parse(self, s):
+        raise NotImplementedError('please implement in a subclass')
+
+    def _check_compatible(self, other):
+        if type(self) != type(other):
+            raise TypeError('cannot compare %r and %r' % (self, other))
+
+    def __eq__(self, other):
+        self._check_compatible(other)
+        return self._parts == other._parts
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def __lt__(self, other):
+        self._check_compatible(other)
+        return self._parts < other._parts
+
+    def __gt__(self, other):
+        return not (self.__lt__(other) or self.__eq__(other))
+
+    def __le__(self, other):
+        return self.__lt__(other) or self.__eq__(other)
+
+    def __ge__(self, other):
+        return self.__gt__(other) or self.__eq__(other)
+
+    # See http://docs.python.org/reference/datamodel#object.__hash__
+    def __hash__(self):
+        return hash(self._parts)
+
+    def __repr__(self):
+        return "%s('%s')" % (self.__class__.__name__, self._string)
+
+    def __str__(self):
+        return self._string
+
+    @property
+    def is_prerelease(self):
+        raise NotImplementedError('Please implement in subclasses.')
+
+
+class Matcher(object):
+    version_class = None
+
+    # value is either a callable or the name of a method
+    _operators = {
+        '<': lambda v, c, p: v < c,
+        '>': lambda v, c, p: v > c,
+        '<=': lambda v, c, p: v == c or v < c,
+        '>=': lambda v, c, p: v == c or v > c,
+        '==': lambda v, c, p: v == c,
+        '===': lambda v, c, p: v == c,
+        # by default, compatible => >=.
+        '~=': lambda v, c, p: v == c or v > c,
+        '!=': lambda v, c, p: v != c,
+    }
+
+    # this is a method only to support alternative implementations
+    # via overriding
+    def parse_requirement(self, s):
+        return parse_requirement(s)
+
+    def __init__(self, s):
+        if self.version_class is None:
+            raise ValueError('Please specify a version class')
+        self._string = s = s.strip()
+        r = self.parse_requirement(s)
+        if not r:
+            raise ValueError('Not valid: %r' % s)
+        self.name = r.name
+        self.key = self.name.lower()    # for case-insensitive comparisons
+        clist = []
+        if r.constraints:
+            # import pdb; pdb.set_trace()
+            for op, s in r.constraints:
+                if s.endswith('.*'):
+                    if op not in ('==', '!='):
+                        raise ValueError('\'.*\' not allowed for '
+                                         '%r constraints' % op)
+                    # Could be a partial version (e.g. for '2.*') which
+                    # won't parse as a version, so keep it as a string
+                    vn, prefix = s[:-2], True
+                    # Just to check that vn is a valid version
+                    self.version_class(vn)
+                else:
+                    # Should parse as a version, so we can create an
+                    # instance for the comparison
+                    vn, prefix = self.version_class(s), False
+                clist.append((op, vn, prefix))
+        self._parts = tuple(clist)
+
+    def match(self, version):
+        """
+        Check if the provided version matches the constraints.
+
+        :param version: The version to match against this instance.
+        :type version: String or :class:`Version` instance.
+        """
+        if isinstance(version, string_types):
+            version = self.version_class(version)
+        for operator, constraint, prefix in self._parts:
+            f = self._operators.get(operator)
+            if isinstance(f, string_types):
+                f = getattr(self, f)
+            if not f:
+                msg = ('%r not implemented '
+                       'for %s' % (operator, self.__class__.__name__))
+                raise NotImplementedError(msg)
+            if not f(version, constraint, prefix):
+                return False
+        return True
+
+    @property
+    def exact_version(self):
+        result = None
+        if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='):
+            result = self._parts[0][1]
+        return result
+
+    def _check_compatible(self, other):
+        if type(self) != type(other) or self.name != other.name:
+            raise TypeError('cannot compare %s and %s' % (self, other))
+
+    def __eq__(self, other):
+        self._check_compatible(other)
+        return self.key == other.key and self._parts == other._parts
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    # See http://docs.python.org/reference/datamodel#object.__hash__
+    def __hash__(self):
+        return hash(self.key) + hash(self._parts)
+
+    def __repr__(self):
+        return "%s(%r)" % (self.__class__.__name__, self._string)
+
+    def __str__(self):
+        return self._string
+
+
+PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|alpha|b|beta|c|rc|pre|preview)(\d+)?)?'
+                               r'(\.(post|r|rev)(\d+)?)?([._-]?(dev)(\d+)?)?'
+                               r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$', re.I)
+
+
+def _pep_440_key(s):
+    s = s.strip()
+    m = PEP440_VERSION_RE.match(s)
+    if not m:
+        raise UnsupportedVersionError('Not a valid version: %s' % s)
+    groups = m.groups()
+    nums = tuple(int(v) for v in groups[1].split('.'))
+    while len(nums) > 1 and nums[-1] == 0:
+        nums = nums[:-1]
+
+    if not groups[0]:
+        epoch = 0
+    else:
+        epoch = int(groups[0][:-1])
+    pre = groups[4:6]
+    post = groups[7:9]
+    dev = groups[10:12]
+    local = groups[13]
+    if pre == (None, None):
+        pre = ()
+    else:
+        if pre[1] is None:
+            pre = pre[0], 0
+        else:
+            pre = pre[0], int(pre[1])
+    if post == (None, None):
+        post = ()
+    else:
+        if post[1] is None:
+            post = post[0], 0
+        else:
+            post = post[0], int(post[1])
+    if dev == (None, None):
+        dev = ()
+    else:
+        if dev[1] is None:
+            dev = dev[0], 0
+        else:
+            dev = dev[0], int(dev[1])
+    if local is None:
+        local = ()
+    else:
+        parts = []
+        for part in local.split('.'):
+            # to ensure that numeric compares as > lexicographic, avoid
+            # comparing them directly, but encode a tuple which ensures
+            # correct sorting
+            if part.isdigit():
+                part = (1, int(part))
+            else:
+                part = (0, part)
+            parts.append(part)
+        local = tuple(parts)
+    if not pre:
+        # either before pre-release, or final release and after
+        if not post and dev:
+            # before pre-release
+            pre = ('a', -1)     # to sort before a0
+        else:
+            pre = ('z',)        # to sort after all pre-releases
+    # now look at the state of post and dev.
+    if not post:
+        post = ('_',)   # sort before 'a'
+    if not dev:
+        dev = ('final',)
+
+    return epoch, nums, pre, post, dev, local
+
+
+_normalized_key = _pep_440_key
+
+
+class NormalizedVersion(Version):
+    """A rational version.
+
+    Good:
+        1.2         # equivalent to "1.2.0"
+        1.2.0
+        1.2a1
+        1.2.3a2
+        1.2.3b1
+        1.2.3c1
+        1.2.3.4
+        TODO: fill this out
+
+    Bad:
+        1           # minimum two numbers
+        1.2a        # release level must have a release serial
+        1.2.3b
+    """
+    def parse(self, s):
+        result = _normalized_key(s)
+        # _normalized_key loses trailing zeroes in the release
+        # clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0
+        # However, PEP 440 prefix matching needs it: for example,
+        # (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0).
+        m = PEP440_VERSION_RE.match(s)      # must succeed
+        groups = m.groups()
+        self._release_clause = tuple(int(v) for v in groups[1].split('.'))
+        return result
+
+    PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev'])
+
+    @property
+    def is_prerelease(self):
+        return any(t[0] in self.PREREL_TAGS for t in self._parts if t)
+
+
+def _match_prefix(x, y):
+    x = str(x)
+    y = str(y)
+    if x == y:
+        return True
+    if not x.startswith(y):
+        return False
+    n = len(y)
+    return x[n] == '.'
+
+
+class NormalizedMatcher(Matcher):
+    version_class = NormalizedVersion
+
+    # value is either a callable or the name of a method
+    _operators = {
+        '~=': '_match_compatible',
+        '<': '_match_lt',
+        '>': '_match_gt',
+        '<=': '_match_le',
+        '>=': '_match_ge',
+        '==': '_match_eq',
+        '===': '_match_arbitrary',
+        '!=': '_match_ne',
+    }
+
+    def _adjust_local(self, version, constraint, prefix):
+        if prefix:
+            strip_local = '+' not in constraint and version._parts[-1]
+        else:
+            # both constraint and version are
+            # NormalizedVersion instances.
+            # If constraint does not have a local component,
+            # ensure the version doesn't, either.
+            strip_local = not constraint._parts[-1] and version._parts[-1]
+        if strip_local:
+            s = version._string.split('+', 1)[0]
+            version = self.version_class(s)
+        return version, constraint
+
+    def _match_lt(self, version, constraint, prefix):
+        version, constraint = self._adjust_local(version, constraint, prefix)
+        if version >= constraint:
+            return False
+        release_clause = constraint._release_clause
+        pfx = '.'.join([str(i) for i in release_clause])
+        return not _match_prefix(version, pfx)
+
+    def _match_gt(self, version, constraint, prefix):
+        version, constraint = self._adjust_local(version, constraint, prefix)
+        if version <= constraint:
+            return False
+        release_clause = constraint._release_clause
+        pfx = '.'.join([str(i) for i in release_clause])
+        return not _match_prefix(version, pfx)
+
+    def _match_le(self, version, constraint, prefix):
+        version, constraint = self._adjust_local(version, constraint, prefix)
+        return version <= constraint
+
+    def _match_ge(self, version, constraint, prefix):
+        version, constraint = self._adjust_local(version, constraint, prefix)
+        return version >= constraint
+
+    def _match_eq(self, version, constraint, prefix):
+        version, constraint = self._adjust_local(version, constraint, prefix)
+        if not prefix:
+            result = (version == constraint)
+        else:
+            result = _match_prefix(version, constraint)
+        return result
+
+    def _match_arbitrary(self, version, constraint, prefix):
+        return str(version) == str(constraint)
+
+    def _match_ne(self, version, constraint, prefix):
+        version, constraint = self._adjust_local(version, constraint, prefix)
+        if not prefix:
+            result = (version != constraint)
+        else:
+            result = not _match_prefix(version, constraint)
+        return result
+
+    def _match_compatible(self, version, constraint, prefix):
+        version, constraint = self._adjust_local(version, constraint, prefix)
+        if version == constraint:
+            return True
+        if version < constraint:
+            return False
+#        if not prefix:
+#            return True
+        release_clause = constraint._release_clause
+        if len(release_clause) > 1:
+            release_clause = release_clause[:-1]
+        pfx = '.'.join([str(i) for i in release_clause])
+        return _match_prefix(version, pfx)
+
+
+_REPLACEMENTS = (
+    (re.compile('[.+-]$'), ''),                     # remove trailing puncts
+    (re.compile(r'^[.](\d)'), r'0.\1'),             # .N -> 0.N at start
+    (re.compile('^[.-]'), ''),                      # remove leading puncts
+    (re.compile(r'^\((.*)\)$'), r'\1'),             # remove parentheses
+    (re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'),    # remove leading v(ersion)
+    (re.compile(r'^r(ev)?\s*(\d+)'), r'\2'),        # remove leading v(ersion)
+    (re.compile('[.]{2,}'), '.'),                   # multiple runs of '.'
+    (re.compile(r'\b(alfa|apha)\b'), 'alpha'),      # misspelt alpha
+    (re.compile(r'\b(pre-alpha|prealpha)\b'),
+        'pre.alpha'),                               # standardise
+    (re.compile(r'\(beta\)$'), 'beta'),             # remove parentheses
+)
+
+_SUFFIX_REPLACEMENTS = (
+    (re.compile('^[:~._+-]+'), ''),                   # remove leading puncts
+    (re.compile('[,*")([\\]]'), ''),                  # remove unwanted chars
+    (re.compile('[~:+_ -]'), '.'),                    # replace illegal chars
+    (re.compile('[.]{2,}'), '.'),                   # multiple runs of '.'
+    (re.compile(r'\.$'), ''),                       # trailing '.'
+)
+
+_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)')
+
+
+def _suggest_semantic_version(s):
+    """
+    Try to suggest a semantic form for a version for which
+    _suggest_normalized_version couldn't come up with anything.
+    """
+    result = s.strip().lower()
+    for pat, repl in _REPLACEMENTS:
+        result = pat.sub(repl, result)
+    if not result:
+        result = '0.0.0'
+
+    # Now look for numeric prefix, and separate it out from
+    # the rest.
+    # import pdb; pdb.set_trace()
+    m = _NUMERIC_PREFIX.match(result)
+    if not m:
+        prefix = '0.0.0'
+        suffix = result
+    else:
+        prefix = m.groups()[0].split('.')
+        prefix = [int(i) for i in prefix]
+        while len(prefix) < 3:
+            prefix.append(0)
+        if len(prefix) == 3:
+            suffix = result[m.end():]
+        else:
+            suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():]
+            prefix = prefix[:3]
+        prefix = '.'.join([str(i) for i in prefix])
+        suffix = suffix.strip()
+    if suffix:
+        # import pdb; pdb.set_trace()
+        # massage the suffix.
+        for pat, repl in _SUFFIX_REPLACEMENTS:
+            suffix = pat.sub(repl, suffix)
+
+    if not suffix:
+        result = prefix
+    else:
+        sep = '-' if 'dev' in suffix else '+'
+        result = prefix + sep + suffix
+    if not is_semver(result):
+        result = None
+    return result
+
+
+def _suggest_normalized_version(s):
+    """Suggest a normalized version close to the given version string.
+
+    If you have a version string that isn't rational (i.e. NormalizedVersion
+    doesn't like it) then you might be able to get an equivalent (or close)
+    rational version from this function.
+
+    This does a number of simple normalizations to the given string, based
+    on observation of versions currently in use on PyPI. Given a dump of
+    those version during PyCon 2009, 4287 of them:
+    - 2312 (53.93%) match NormalizedVersion without change
+      with the automatic suggestion
+    - 3474 (81.04%) match when using this suggestion method
+
+    @param s {str} An irrational version string.
+    @returns A rational version string, or None, if couldn't determine one.
+    """
+    try:
+        _normalized_key(s)
+        return s   # already rational
+    except UnsupportedVersionError:
+        pass
+
+    rs = s.lower()
+
+    # part of this could use maketrans
+    for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'),
+                       ('beta', 'b'), ('rc', 'c'), ('-final', ''),
+                       ('-pre', 'c'),
+                       ('-release', ''), ('.release', ''), ('-stable', ''),
+                       ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''),
+                       ('final', '')):
+        rs = rs.replace(orig, repl)
+
+    # if something ends with dev or pre, we add a 0
+    rs = re.sub(r"pre$", r"pre0", rs)
+    rs = re.sub(r"dev$", r"dev0", rs)
+
+    # if we have something like "b-2" or "a.2" at the end of the
+    # version, that is probably beta, alpha, etc
+    # let's remove the dash or dot
+    rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs)
+
+    # 1.0-dev-r371 -> 1.0.dev371
+    # 0.1-dev-r79 -> 0.1.dev79
+    rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs)
+
+    # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1
+    rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs)
+
+    # Clean: v0.3, v1.0
+    if rs.startswith('v'):
+        rs = rs[1:]
+
+    # Clean leading '0's on numbers.
+    # TODO: unintended side-effect on, e.g., "2003.05.09"
+    # PyPI stats: 77 (~2%) better
+    rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs)
+
+    # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers
+    # zero.
+    # PyPI stats: 245 (7.56%) better
+    rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs)
+
+    # the 'dev-rNNN' tag is a dev tag
+    rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs)
+
+    # clean the - when used as a pre delimiter
+    rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs)
+
+    # a terminal "dev" or "devel" can be changed into ".dev0"
+    rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs)
+
+    # a terminal "dev" can be changed into ".dev0"
+    rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs)
+
+    # a terminal "final" or "stable" can be removed
+    rs = re.sub(r"(final|stable)$", "", rs)
+
+    # The 'r' and the '-' tags are post release tags
+    #   0.4a1.r10       ->  0.4a1.post10
+    #   0.9.33-17222    ->  0.9.33.post17222
+    #   0.9.33-r17222   ->  0.9.33.post17222
+    rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs)
+
+    # Clean 'r' instead of 'dev' usage:
+    #   0.9.33+r17222   ->  0.9.33.dev17222
+    #   1.0dev123       ->  1.0.dev123
+    #   1.0.git123      ->  1.0.dev123
+    #   1.0.bzr123      ->  1.0.dev123
+    #   0.1a0dev.123    ->  0.1a0.dev123
+    # PyPI stats:  ~150 (~4%) better
+    rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs)
+
+    # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:
+    #   0.2.pre1        ->  0.2c1
+    #   0.2-c1         ->  0.2c1
+    #   1.0preview123   ->  1.0c123
+    # PyPI stats: ~21 (0.62%) better
+    rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs)
+
+    # Tcl/Tk uses "px" for their post release markers
+    rs = re.sub(r"p(\d+)$", r".post\1", rs)
+
+    try:
+        _normalized_key(rs)
+    except UnsupportedVersionError:
+        rs = None
+    return rs
+
+#
+#   Legacy version processing (distribute-compatible)
+#
+
+
+_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I)
+_VERSION_REPLACE = {
+    'pre': 'c',
+    'preview': 'c',
+    '-': 'final-',
+    'rc': 'c',
+    'dev': '@',
+    '': None,
+    '.': None,
+}
+
+
+def _legacy_key(s):
+    def get_parts(s):
+        result = []
+        for p in _VERSION_PART.split(s.lower()):
+            p = _VERSION_REPLACE.get(p, p)
+            if p:
+                if '0' <= p[:1] <= '9':
+                    p = p.zfill(8)
+                else:
+                    p = '*' + p
+                result.append(p)
+        result.append('*final')
+        return result
+
+    result = []
+    for p in get_parts(s):
+        if p.startswith('*'):
+            if p < '*final':
+                while result and result[-1] == '*final-':
+                    result.pop()
+            while result and result[-1] == '00000000':
+                result.pop()
+        result.append(p)
+    return tuple(result)
+
+
+class LegacyVersion(Version):
+    def parse(self, s):
+        return _legacy_key(s)
+
+    @property
+    def is_prerelease(self):
+        result = False
+        for x in self._parts:
+            if (isinstance(x, string_types) and x.startswith('*') and
+                    x < '*final'):
+                result = True
+                break
+        return result
+
+
+class LegacyMatcher(Matcher):
+    version_class = LegacyVersion
+
+    _operators = dict(Matcher._operators)
+    _operators['~='] = '_match_compatible'
+
+    numeric_re = re.compile(r'^(\d+(\.\d+)*)')
+
+    def _match_compatible(self, version, constraint, prefix):
+        if version < constraint:
+            return False
+        m = self.numeric_re.match(str(constraint))
+        if not m:
+            logger.warning('Cannot compute compatible match for version %s '
+                           ' and constraint %s', version, constraint)
+            return True
+        s = m.groups()[0]
+        if '.' in s:
+            s = s.rsplit('.', 1)[0]
+        return _match_prefix(version, s)
+
+#
+#   Semantic versioning
+#
+
+
+_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)'
+                        r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?'
+                        r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I)
+
+
+def is_semver(s):
+    return _SEMVER_RE.match(s)
+
+
+def _semantic_key(s):
+    def make_tuple(s, absent):
+        if s is None:
+            result = (absent,)
+        else:
+            parts = s[1:].split('.')
+            # We can't compare ints and strings on Python 3, so fudge it
+            # by zero-filling numeric values so simulate a numeric comparison
+            result = tuple([p.zfill(8) if p.isdigit() else p for p in parts])
+        return result
+
+    m = is_semver(s)
+    if not m:
+        raise UnsupportedVersionError(s)
+    groups = m.groups()
+    major, minor, patch = [int(i) for i in groups[:3]]
+    # choose the '|' and '*' so that versions sort correctly
+    pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*')
+    return (major, minor, patch), pre, build
+
+
+class SemanticVersion(Version):
+    def parse(self, s):
+        return _semantic_key(s)
+
+    @property
+    def is_prerelease(self):
+        return self._parts[1][0] != '|'
+
+
+class SemanticMatcher(Matcher):
+    version_class = SemanticVersion
+
+
+class VersionScheme(object):
+    def __init__(self, key, matcher, suggester=None):
+        self.key = key
+        self.matcher = matcher
+        self.suggester = suggester
+
+    def is_valid_version(self, s):
+        try:
+            self.matcher.version_class(s)
+            result = True
+        except UnsupportedVersionError:
+            result = False
+        return result
+
+    def is_valid_matcher(self, s):
+        try:
+            self.matcher(s)
+            result = True
+        except UnsupportedVersionError:
+            result = False
+        return result
+
+    def is_valid_constraint_list(self, s):
+        """
+        Used for processing some metadata fields
+        """
+        # See issue #140. Be tolerant of a single trailing comma.
+        if s.endswith(','):
+            s = s[:-1]
+        return self.is_valid_matcher('dummy_name (%s)' % s)
+
+    def suggest(self, s):
+        if self.suggester is None:
+            result = None
+        else:
+            result = self.suggester(s)
+        return result
+
+
+_SCHEMES = {
+    'normalized': VersionScheme(_normalized_key, NormalizedMatcher,
+                                _suggest_normalized_version),
+    'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s),
+    'semantic': VersionScheme(_semantic_key, SemanticMatcher,
+                              _suggest_semantic_version),
+}
+
+_SCHEMES['default'] = _SCHEMES['normalized']
+
+
+def get_scheme(name):
+    if name not in _SCHEMES:
+        raise ValueError('unknown scheme name: %r' % name)
+    return _SCHEMES[name]
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/wheel.py b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/wheel.py
new file mode 100644
index 0000000..4a5a30e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/distlib/wheel.py
@@ -0,0 +1,1099 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2013-2023 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+from __future__ import unicode_literals
+
+import base64
+import codecs
+import datetime
+from email import message_from_file
+import hashlib
+import json
+import logging
+import os
+import posixpath
+import re
+import shutil
+import sys
+import tempfile
+import zipfile
+
+from . import __version__, DistlibException
+from .compat import sysconfig, ZipFile, fsdecode, text_type, filter
+from .database import InstalledDistribution
+from .metadata import Metadata, WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME
+from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache,
+                   cached_property, get_cache_base, read_exports, tempdir,
+                   get_platform)
+from .version import NormalizedVersion, UnsupportedVersionError
+
+logger = logging.getLogger(__name__)
+
+cache = None  # created when needed
+
+if hasattr(sys, 'pypy_version_info'):  # pragma: no cover
+    IMP_PREFIX = 'pp'
+elif sys.platform.startswith('java'):  # pragma: no cover
+    IMP_PREFIX = 'jy'
+elif sys.platform == 'cli':  # pragma: no cover
+    IMP_PREFIX = 'ip'
+else:
+    IMP_PREFIX = 'cp'
+
+VER_SUFFIX = sysconfig.get_config_var('py_version_nodot')
+if not VER_SUFFIX:  # pragma: no cover
+    VER_SUFFIX = '%s%s' % sys.version_info[:2]
+PYVER = 'py' + VER_SUFFIX
+IMPVER = IMP_PREFIX + VER_SUFFIX
+
+ARCH = get_platform().replace('-', '_').replace('.', '_')
+
+ABI = sysconfig.get_config_var('SOABI')
+if ABI and ABI.startswith('cpython-'):
+    ABI = ABI.replace('cpython-', 'cp').split('-')[0]
+else:
+
+    def _derive_abi():
+        parts = ['cp', VER_SUFFIX]
+        if sysconfig.get_config_var('Py_DEBUG'):
+            parts.append('d')
+        if IMP_PREFIX == 'cp':
+            vi = sys.version_info[:2]
+            if vi < (3, 8):
+                wpm = sysconfig.get_config_var('WITH_PYMALLOC')
+                if wpm is None:
+                    wpm = True
+                if wpm:
+                    parts.append('m')
+                if vi < (3, 3):
+                    us = sysconfig.get_config_var('Py_UNICODE_SIZE')
+                    if us == 4 or (us is None and sys.maxunicode == 0x10FFFF):
+                        parts.append('u')
+        return ''.join(parts)
+
+    ABI = _derive_abi()
+    del _derive_abi
+
+FILENAME_RE = re.compile(
+    r'''
+(?P[^-]+)
+-(?P\d+[^-]*)
+(-(?P\d+[^-]*))?
+-(?P\w+\d+(\.\w+\d+)*)
+-(?P\w+)
+-(?P\w+(\.\w+)*)
+\.whl$
+''', re.IGNORECASE | re.VERBOSE)
+
+NAME_VERSION_RE = re.compile(
+    r'''
+(?P[^-]+)
+-(?P\d+[^-]*)
+(-(?P\d+[^-]*))?$
+''', re.IGNORECASE | re.VERBOSE)
+
+SHEBANG_RE = re.compile(br'\s*#![^\r\n]*')
+SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$')
+SHEBANG_PYTHON = b'#!python'
+SHEBANG_PYTHONW = b'#!pythonw'
+
+if os.sep == '/':
+    to_posix = lambda o: o
+else:
+    to_posix = lambda o: o.replace(os.sep, '/')
+
+if sys.version_info[0] < 3:
+    import imp
+else:
+    imp = None
+    import importlib.machinery
+    import importlib.util
+
+
+def _get_suffixes():
+    if imp:
+        return [s[0] for s in imp.get_suffixes()]
+    else:
+        return importlib.machinery.EXTENSION_SUFFIXES
+
+
+def _load_dynamic(name, path):
+    # https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly
+    if imp:
+        return imp.load_dynamic(name, path)
+    else:
+        spec = importlib.util.spec_from_file_location(name, path)
+        module = importlib.util.module_from_spec(spec)
+        sys.modules[name] = module
+        spec.loader.exec_module(module)
+        return module
+
+
+class Mounter(object):
+
+    def __init__(self):
+        self.impure_wheels = {}
+        self.libs = {}
+
+    def add(self, pathname, extensions):
+        self.impure_wheels[pathname] = extensions
+        self.libs.update(extensions)
+
+    def remove(self, pathname):
+        extensions = self.impure_wheels.pop(pathname)
+        for k, v in extensions:
+            if k in self.libs:
+                del self.libs[k]
+
+    def find_module(self, fullname, path=None):
+        if fullname in self.libs:
+            result = self
+        else:
+            result = None
+        return result
+
+    def load_module(self, fullname):
+        if fullname in sys.modules:
+            result = sys.modules[fullname]
+        else:
+            if fullname not in self.libs:
+                raise ImportError('unable to find extension for %s' % fullname)
+            result = _load_dynamic(fullname, self.libs[fullname])
+            result.__loader__ = self
+            parts = fullname.rsplit('.', 1)
+            if len(parts) > 1:
+                result.__package__ = parts[0]
+        return result
+
+
+_hook = Mounter()
+
+
+class Wheel(object):
+    """
+    Class to build and install from Wheel files (PEP 427).
+    """
+
+    wheel_version = (1, 1)
+    hash_kind = 'sha256'
+
+    def __init__(self, filename=None, sign=False, verify=False):
+        """
+        Initialise an instance using a (valid) filename.
+        """
+        self.sign = sign
+        self.should_verify = verify
+        self.buildver = ''
+        self.pyver = [PYVER]
+        self.abi = ['none']
+        self.arch = ['any']
+        self.dirname = os.getcwd()
+        if filename is None:
+            self.name = 'dummy'
+            self.version = '0.1'
+            self._filename = self.filename
+        else:
+            m = NAME_VERSION_RE.match(filename)
+            if m:
+                info = m.groupdict('')
+                self.name = info['nm']
+                # Reinstate the local version separator
+                self.version = info['vn'].replace('_', '-')
+                self.buildver = info['bn']
+                self._filename = self.filename
+            else:
+                dirname, filename = os.path.split(filename)
+                m = FILENAME_RE.match(filename)
+                if not m:
+                    raise DistlibException('Invalid name or '
+                                           'filename: %r' % filename)
+                if dirname:
+                    self.dirname = os.path.abspath(dirname)
+                self._filename = filename
+                info = m.groupdict('')
+                self.name = info['nm']
+                self.version = info['vn']
+                self.buildver = info['bn']
+                self.pyver = info['py'].split('.')
+                self.abi = info['bi'].split('.')
+                self.arch = info['ar'].split('.')
+
+    @property
+    def filename(self):
+        """
+        Build and return a filename from the various components.
+        """
+        if self.buildver:
+            buildver = '-' + self.buildver
+        else:
+            buildver = ''
+        pyver = '.'.join(self.pyver)
+        abi = '.'.join(self.abi)
+        arch = '.'.join(self.arch)
+        # replace - with _ as a local version separator
+        version = self.version.replace('-', '_')
+        return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, pyver,
+                                         abi, arch)
+
+    @property
+    def exists(self):
+        path = os.path.join(self.dirname, self.filename)
+        return os.path.isfile(path)
+
+    @property
+    def tags(self):
+        for pyver in self.pyver:
+            for abi in self.abi:
+                for arch in self.arch:
+                    yield pyver, abi, arch
+
+    @cached_property
+    def metadata(self):
+        pathname = os.path.join(self.dirname, self.filename)
+        name_ver = '%s-%s' % (self.name, self.version)
+        info_dir = '%s.dist-info' % name_ver
+        wrapper = codecs.getreader('utf-8')
+        with ZipFile(pathname, 'r') as zf:
+            self.get_wheel_metadata(zf)
+            # wv = wheel_metadata['Wheel-Version'].split('.', 1)
+            # file_version = tuple([int(i) for i in wv])
+            # if file_version < (1, 1):
+            # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME,
+            # LEGACY_METADATA_FILENAME]
+            # else:
+            # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME]
+            fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME]
+            result = None
+            for fn in fns:
+                try:
+                    metadata_filename = posixpath.join(info_dir, fn)
+                    with zf.open(metadata_filename) as bf:
+                        wf = wrapper(bf)
+                        result = Metadata(fileobj=wf)
+                        if result:
+                            break
+                except KeyError:
+                    pass
+            if not result:
+                raise ValueError('Invalid wheel, because metadata is '
+                                 'missing: looked in %s' % ', '.join(fns))
+        return result
+
+    def get_wheel_metadata(self, zf):
+        name_ver = '%s-%s' % (self.name, self.version)
+        info_dir = '%s.dist-info' % name_ver
+        metadata_filename = posixpath.join(info_dir, 'WHEEL')
+        with zf.open(metadata_filename) as bf:
+            wf = codecs.getreader('utf-8')(bf)
+            message = message_from_file(wf)
+        return dict(message)
+
+    @cached_property
+    def info(self):
+        pathname = os.path.join(self.dirname, self.filename)
+        with ZipFile(pathname, 'r') as zf:
+            result = self.get_wheel_metadata(zf)
+        return result
+
+    def process_shebang(self, data):
+        m = SHEBANG_RE.match(data)
+        if m:
+            end = m.end()
+            shebang, data_after_shebang = data[:end], data[end:]
+            # Preserve any arguments after the interpreter
+            if b'pythonw' in shebang.lower():
+                shebang_python = SHEBANG_PYTHONW
+            else:
+                shebang_python = SHEBANG_PYTHON
+            m = SHEBANG_DETAIL_RE.match(shebang)
+            if m:
+                args = b' ' + m.groups()[-1]
+            else:
+                args = b''
+            shebang = shebang_python + args
+            data = shebang + data_after_shebang
+        else:
+            cr = data.find(b'\r')
+            lf = data.find(b'\n')
+            if cr < 0 or cr > lf:
+                term = b'\n'
+            else:
+                if data[cr:cr + 2] == b'\r\n':
+                    term = b'\r\n'
+                else:
+                    term = b'\r'
+            data = SHEBANG_PYTHON + term + data
+        return data
+
+    def get_hash(self, data, hash_kind=None):
+        if hash_kind is None:
+            hash_kind = self.hash_kind
+        try:
+            hasher = getattr(hashlib, hash_kind)
+        except AttributeError:
+            raise DistlibException('Unsupported hash algorithm: %r' %
+                                   hash_kind)
+        result = hasher(data).digest()
+        result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii')
+        return hash_kind, result
+
+    def write_record(self, records, record_path, archive_record_path):
+        records = list(records)  # make a copy, as mutated
+        records.append((archive_record_path, '', ''))
+        with CSVWriter(record_path) as writer:
+            for row in records:
+                writer.writerow(row)
+
+    def write_records(self, info, libdir, archive_paths):
+        records = []
+        distinfo, info_dir = info
+        # hasher = getattr(hashlib, self.hash_kind)
+        for ap, p in archive_paths:
+            with open(p, 'rb') as f:
+                data = f.read()
+            digest = '%s=%s' % self.get_hash(data)
+            size = os.path.getsize(p)
+            records.append((ap, digest, size))
+
+        p = os.path.join(distinfo, 'RECORD')
+        ap = to_posix(os.path.join(info_dir, 'RECORD'))
+        self.write_record(records, p, ap)
+        archive_paths.append((ap, p))
+
+    def build_zip(self, pathname, archive_paths):
+        with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf:
+            for ap, p in archive_paths:
+                logger.debug('Wrote %s to %s in wheel', p, ap)
+                zf.write(p, ap)
+
+    def build(self, paths, tags=None, wheel_version=None):
+        """
+        Build a wheel from files in specified paths, and use any specified tags
+        when determining the name of the wheel.
+        """
+        if tags is None:
+            tags = {}
+
+        libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0]
+        if libkey == 'platlib':
+            is_pure = 'false'
+            default_pyver = [IMPVER]
+            default_abi = [ABI]
+            default_arch = [ARCH]
+        else:
+            is_pure = 'true'
+            default_pyver = [PYVER]
+            default_abi = ['none']
+            default_arch = ['any']
+
+        self.pyver = tags.get('pyver', default_pyver)
+        self.abi = tags.get('abi', default_abi)
+        self.arch = tags.get('arch', default_arch)
+
+        libdir = paths[libkey]
+
+        name_ver = '%s-%s' % (self.name, self.version)
+        data_dir = '%s.data' % name_ver
+        info_dir = '%s.dist-info' % name_ver
+
+        archive_paths = []
+
+        # First, stuff which is not in site-packages
+        for key in ('data', 'headers', 'scripts'):
+            if key not in paths:
+                continue
+            path = paths[key]
+            if os.path.isdir(path):
+                for root, dirs, files in os.walk(path):
+                    for fn in files:
+                        p = fsdecode(os.path.join(root, fn))
+                        rp = os.path.relpath(p, path)
+                        ap = to_posix(os.path.join(data_dir, key, rp))
+                        archive_paths.append((ap, p))
+                        if key == 'scripts' and not p.endswith('.exe'):
+                            with open(p, 'rb') as f:
+                                data = f.read()
+                            data = self.process_shebang(data)
+                            with open(p, 'wb') as f:
+                                f.write(data)
+
+        # Now, stuff which is in site-packages, other than the
+        # distinfo stuff.
+        path = libdir
+        distinfo = None
+        for root, dirs, files in os.walk(path):
+            if root == path:
+                # At the top level only, save distinfo for later
+                # and skip it for now
+                for i, dn in enumerate(dirs):
+                    dn = fsdecode(dn)
+                    if dn.endswith('.dist-info'):
+                        distinfo = os.path.join(root, dn)
+                        del dirs[i]
+                        break
+                assert distinfo, '.dist-info directory expected, not found'
+
+            for fn in files:
+                # comment out next suite to leave .pyc files in
+                if fsdecode(fn).endswith(('.pyc', '.pyo')):
+                    continue
+                p = os.path.join(root, fn)
+                rp = to_posix(os.path.relpath(p, path))
+                archive_paths.append((rp, p))
+
+        # Now distinfo. Assumed to be flat, i.e. os.listdir is enough.
+        files = os.listdir(distinfo)
+        for fn in files:
+            if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'):
+                p = fsdecode(os.path.join(distinfo, fn))
+                ap = to_posix(os.path.join(info_dir, fn))
+                archive_paths.append((ap, p))
+
+        wheel_metadata = [
+            'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version),
+            'Generator: distlib %s' % __version__,
+            'Root-Is-Purelib: %s' % is_pure,
+        ]
+        for pyver, abi, arch in self.tags:
+            wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch))
+        p = os.path.join(distinfo, 'WHEEL')
+        with open(p, 'w') as f:
+            f.write('\n'.join(wheel_metadata))
+        ap = to_posix(os.path.join(info_dir, 'WHEEL'))
+        archive_paths.append((ap, p))
+
+        # sort the entries by archive path. Not needed by any spec, but it
+        # keeps the archive listing and RECORD tidier than they would otherwise
+        # be. Use the number of path segments to keep directory entries together,
+        # and keep the dist-info stuff at the end.
+        def sorter(t):
+            ap = t[0]
+            n = ap.count('/')
+            if '.dist-info' in ap:
+                n += 10000
+            return (n, ap)
+
+        archive_paths = sorted(archive_paths, key=sorter)
+
+        # Now, at last, RECORD.
+        # Paths in here are archive paths - nothing else makes sense.
+        self.write_records((distinfo, info_dir), libdir, archive_paths)
+        # Now, ready to build the zip file
+        pathname = os.path.join(self.dirname, self.filename)
+        self.build_zip(pathname, archive_paths)
+        return pathname
+
+    def skip_entry(self, arcname):
+        """
+        Determine whether an archive entry should be skipped when verifying
+        or installing.
+        """
+        # The signature file won't be in RECORD,
+        # and we  don't currently don't do anything with it
+        # We also skip directories, as they won't be in RECORD
+        # either. See:
+        #
+        # https://github.com/pypa/wheel/issues/294
+        # https://github.com/pypa/wheel/issues/287
+        # https://github.com/pypa/wheel/pull/289
+        #
+        return arcname.endswith(('/', '/RECORD.jws'))
+
+    def install(self, paths, maker, **kwargs):
+        """
+        Install a wheel to the specified paths. If kwarg ``warner`` is
+        specified, it should be a callable, which will be called with two
+        tuples indicating the wheel version of this software and the wheel
+        version in the file, if there is a discrepancy in the versions.
+        This can be used to issue any warnings to raise any exceptions.
+        If kwarg ``lib_only`` is True, only the purelib/platlib files are
+        installed, and the headers, scripts, data and dist-info metadata are
+        not written. If kwarg ``bytecode_hashed_invalidation`` is True, written
+        bytecode will try to use file-hash based invalidation (PEP-552) on
+        supported interpreter versions (CPython 2.7+).
+
+        The return value is a :class:`InstalledDistribution` instance unless
+        ``options.lib_only`` is True, in which case the return value is ``None``.
+        """
+
+        dry_run = maker.dry_run
+        warner = kwargs.get('warner')
+        lib_only = kwargs.get('lib_only', False)
+        bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation',
+                                            False)
+
+        pathname = os.path.join(self.dirname, self.filename)
+        name_ver = '%s-%s' % (self.name, self.version)
+        data_dir = '%s.data' % name_ver
+        info_dir = '%s.dist-info' % name_ver
+
+        metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME)
+        wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
+        record_name = posixpath.join(info_dir, 'RECORD')
+
+        wrapper = codecs.getreader('utf-8')
+
+        with ZipFile(pathname, 'r') as zf:
+            with zf.open(wheel_metadata_name) as bwf:
+                wf = wrapper(bwf)
+                message = message_from_file(wf)
+            wv = message['Wheel-Version'].split('.', 1)
+            file_version = tuple([int(i) for i in wv])
+            if (file_version != self.wheel_version) and warner:
+                warner(self.wheel_version, file_version)
+
+            if message['Root-Is-Purelib'] == 'true':
+                libdir = paths['purelib']
+            else:
+                libdir = paths['platlib']
+
+            records = {}
+            with zf.open(record_name) as bf:
+                with CSVReader(stream=bf) as reader:
+                    for row in reader:
+                        p = row[0]
+                        records[p] = row
+
+            data_pfx = posixpath.join(data_dir, '')
+            info_pfx = posixpath.join(info_dir, '')
+            script_pfx = posixpath.join(data_dir, 'scripts', '')
+
+            # make a new instance rather than a copy of maker's,
+            # as we mutate it
+            fileop = FileOperator(dry_run=dry_run)
+            fileop.record = True  # so we can rollback if needed
+
+            bc = not sys.dont_write_bytecode  # Double negatives. Lovely!
+
+            outfiles = []  # for RECORD writing
+
+            # for script copying/shebang processing
+            workdir = tempfile.mkdtemp()
+            # set target dir later
+            # we default add_launchers to False, as the
+            # Python Launcher should be used instead
+            maker.source_dir = workdir
+            maker.target_dir = None
+            try:
+                for zinfo in zf.infolist():
+                    arcname = zinfo.filename
+                    if isinstance(arcname, text_type):
+                        u_arcname = arcname
+                    else:
+                        u_arcname = arcname.decode('utf-8')
+                    if self.skip_entry(u_arcname):
+                        continue
+                    row = records[u_arcname]
+                    if row[2] and str(zinfo.file_size) != row[2]:
+                        raise DistlibException('size mismatch for '
+                                               '%s' % u_arcname)
+                    if row[1]:
+                        kind, value = row[1].split('=', 1)
+                        with zf.open(arcname) as bf:
+                            data = bf.read()
+                        _, digest = self.get_hash(data, kind)
+                        if digest != value:
+                            raise DistlibException('digest mismatch for '
+                                                   '%s' % arcname)
+
+                    if lib_only and u_arcname.startswith((info_pfx, data_pfx)):
+                        logger.debug('lib_only: skipping %s', u_arcname)
+                        continue
+                    is_script = (u_arcname.startswith(script_pfx)
+                                 and not u_arcname.endswith('.exe'))
+
+                    if u_arcname.startswith(data_pfx):
+                        _, where, rp = u_arcname.split('/', 2)
+                        outfile = os.path.join(paths[where], convert_path(rp))
+                    else:
+                        # meant for site-packages.
+                        if u_arcname in (wheel_metadata_name, record_name):
+                            continue
+                        outfile = os.path.join(libdir, convert_path(u_arcname))
+                    if not is_script:
+                        with zf.open(arcname) as bf:
+                            fileop.copy_stream(bf, outfile)
+                        # Issue #147: permission bits aren't preserved. Using
+                        # zf.extract(zinfo, libdir) should have worked, but didn't,
+                        # see https://www.thetopsites.net/article/53834422.shtml
+                        # So ... manually preserve permission bits as given in zinfo
+                        if os.name == 'posix':
+                            # just set the normal permission bits
+                            os.chmod(outfile,
+                                     (zinfo.external_attr >> 16) & 0x1FF)
+                        outfiles.append(outfile)
+                        # Double check the digest of the written file
+                        if not dry_run and row[1]:
+                            with open(outfile, 'rb') as bf:
+                                data = bf.read()
+                                _, newdigest = self.get_hash(data, kind)
+                                if newdigest != digest:
+                                    raise DistlibException('digest mismatch '
+                                                           'on write for '
+                                                           '%s' % outfile)
+                        if bc and outfile.endswith('.py'):
+                            try:
+                                pyc = fileop.byte_compile(
+                                    outfile,
+                                    hashed_invalidation=bc_hashed_invalidation)
+                                outfiles.append(pyc)
+                            except Exception:
+                                # Don't give up if byte-compilation fails,
+                                # but log it and perhaps warn the user
+                                logger.warning('Byte-compilation failed',
+                                               exc_info=True)
+                    else:
+                        fn = os.path.basename(convert_path(arcname))
+                        workname = os.path.join(workdir, fn)
+                        with zf.open(arcname) as bf:
+                            fileop.copy_stream(bf, workname)
+
+                        dn, fn = os.path.split(outfile)
+                        maker.target_dir = dn
+                        filenames = maker.make(fn)
+                        fileop.set_executable_mode(filenames)
+                        outfiles.extend(filenames)
+
+                if lib_only:
+                    logger.debug('lib_only: returning None')
+                    dist = None
+                else:
+                    # Generate scripts
+
+                    # Try to get pydist.json so we can see if there are
+                    # any commands to generate. If this fails (e.g. because
+                    # of a legacy wheel), log a warning but don't give up.
+                    commands = None
+                    file_version = self.info['Wheel-Version']
+                    if file_version == '1.0':
+                        # Use legacy info
+                        ep = posixpath.join(info_dir, 'entry_points.txt')
+                        try:
+                            with zf.open(ep) as bwf:
+                                epdata = read_exports(bwf)
+                            commands = {}
+                            for key in ('console', 'gui'):
+                                k = '%s_scripts' % key
+                                if k in epdata:
+                                    commands['wrap_%s' % key] = d = {}
+                                    for v in epdata[k].values():
+                                        s = '%s:%s' % (v.prefix, v.suffix)
+                                        if v.flags:
+                                            s += ' [%s]' % ','.join(v.flags)
+                                        d[v.name] = s
+                        except Exception:
+                            logger.warning('Unable to read legacy script '
+                                           'metadata, so cannot generate '
+                                           'scripts')
+                    else:
+                        try:
+                            with zf.open(metadata_name) as bwf:
+                                wf = wrapper(bwf)
+                                commands = json.load(wf).get('extensions')
+                                if commands:
+                                    commands = commands.get('python.commands')
+                        except Exception:
+                            logger.warning('Unable to read JSON metadata, so '
+                                           'cannot generate scripts')
+                    if commands:
+                        console_scripts = commands.get('wrap_console', {})
+                        gui_scripts = commands.get('wrap_gui', {})
+                        if console_scripts or gui_scripts:
+                            script_dir = paths.get('scripts', '')
+                            if not os.path.isdir(script_dir):
+                                raise ValueError('Valid script path not '
+                                                 'specified')
+                            maker.target_dir = script_dir
+                            for k, v in console_scripts.items():
+                                script = '%s = %s' % (k, v)
+                                filenames = maker.make(script)
+                                fileop.set_executable_mode(filenames)
+
+                            if gui_scripts:
+                                options = {'gui': True}
+                                for k, v in gui_scripts.items():
+                                    script = '%s = %s' % (k, v)
+                                    filenames = maker.make(script, options)
+                                    fileop.set_executable_mode(filenames)
+
+                    p = os.path.join(libdir, info_dir)
+                    dist = InstalledDistribution(p)
+
+                    # Write SHARED
+                    paths = dict(paths)  # don't change passed in dict
+                    del paths['purelib']
+                    del paths['platlib']
+                    paths['lib'] = libdir
+                    p = dist.write_shared_locations(paths, dry_run)
+                    if p:
+                        outfiles.append(p)
+
+                    # Write RECORD
+                    dist.write_installed_files(outfiles, paths['prefix'],
+                                               dry_run)
+                return dist
+            except Exception:  # pragma: no cover
+                logger.exception('installation failed.')
+                fileop.rollback()
+                raise
+            finally:
+                shutil.rmtree(workdir)
+
+    def _get_dylib_cache(self):
+        global cache
+        if cache is None:
+            # Use native string to avoid issues on 2.x: see Python #20140.
+            base = os.path.join(get_cache_base(), str('dylib-cache'),
+                                '%s.%s' % sys.version_info[:2])
+            cache = Cache(base)
+        return cache
+
+    def _get_extensions(self):
+        pathname = os.path.join(self.dirname, self.filename)
+        name_ver = '%s-%s' % (self.name, self.version)
+        info_dir = '%s.dist-info' % name_ver
+        arcname = posixpath.join(info_dir, 'EXTENSIONS')
+        wrapper = codecs.getreader('utf-8')
+        result = []
+        with ZipFile(pathname, 'r') as zf:
+            try:
+                with zf.open(arcname) as bf:
+                    wf = wrapper(bf)
+                    extensions = json.load(wf)
+                    cache = self._get_dylib_cache()
+                    prefix = cache.prefix_to_dir(pathname)
+                    cache_base = os.path.join(cache.base, prefix)
+                    if not os.path.isdir(cache_base):
+                        os.makedirs(cache_base)
+                    for name, relpath in extensions.items():
+                        dest = os.path.join(cache_base, convert_path(relpath))
+                        if not os.path.exists(dest):
+                            extract = True
+                        else:
+                            file_time = os.stat(dest).st_mtime
+                            file_time = datetime.datetime.fromtimestamp(
+                                file_time)
+                            info = zf.getinfo(relpath)
+                            wheel_time = datetime.datetime(*info.date_time)
+                            extract = wheel_time > file_time
+                        if extract:
+                            zf.extract(relpath, cache_base)
+                        result.append((name, dest))
+            except KeyError:
+                pass
+        return result
+
+    def is_compatible(self):
+        """
+        Determine if a wheel is compatible with the running system.
+        """
+        return is_compatible(self)
+
+    def is_mountable(self):
+        """
+        Determine if a wheel is asserted as mountable by its metadata.
+        """
+        return True  # for now - metadata details TBD
+
+    def mount(self, append=False):
+        pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
+        if not self.is_compatible():
+            msg = 'Wheel %s not compatible with this Python.' % pathname
+            raise DistlibException(msg)
+        if not self.is_mountable():
+            msg = 'Wheel %s is marked as not mountable.' % pathname
+            raise DistlibException(msg)
+        if pathname in sys.path:
+            logger.debug('%s already in path', pathname)
+        else:
+            if append:
+                sys.path.append(pathname)
+            else:
+                sys.path.insert(0, pathname)
+            extensions = self._get_extensions()
+            if extensions:
+                if _hook not in sys.meta_path:
+                    sys.meta_path.append(_hook)
+                _hook.add(pathname, extensions)
+
+    def unmount(self):
+        pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
+        if pathname not in sys.path:
+            logger.debug('%s not in path', pathname)
+        else:
+            sys.path.remove(pathname)
+            if pathname in _hook.impure_wheels:
+                _hook.remove(pathname)
+            if not _hook.impure_wheels:
+                if _hook in sys.meta_path:
+                    sys.meta_path.remove(_hook)
+
+    def verify(self):
+        pathname = os.path.join(self.dirname, self.filename)
+        name_ver = '%s-%s' % (self.name, self.version)
+        # data_dir = '%s.data' % name_ver
+        info_dir = '%s.dist-info' % name_ver
+
+        # metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME)
+        wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
+        record_name = posixpath.join(info_dir, 'RECORD')
+
+        wrapper = codecs.getreader('utf-8')
+
+        with ZipFile(pathname, 'r') as zf:
+            with zf.open(wheel_metadata_name) as bwf:
+                wf = wrapper(bwf)
+                message_from_file(wf)
+            # wv = message['Wheel-Version'].split('.', 1)
+            # file_version = tuple([int(i) for i in wv])
+            # TODO version verification
+
+            records = {}
+            with zf.open(record_name) as bf:
+                with CSVReader(stream=bf) as reader:
+                    for row in reader:
+                        p = row[0]
+                        records[p] = row
+
+            for zinfo in zf.infolist():
+                arcname = zinfo.filename
+                if isinstance(arcname, text_type):
+                    u_arcname = arcname
+                else:
+                    u_arcname = arcname.decode('utf-8')
+                # See issue #115: some wheels have .. in their entries, but
+                # in the filename ... e.g. __main__..py ! So the check is
+                # updated to look for .. in the directory portions
+                p = u_arcname.split('/')
+                if '..' in p:
+                    raise DistlibException('invalid entry in '
+                                           'wheel: %r' % u_arcname)
+
+                if self.skip_entry(u_arcname):
+                    continue
+                row = records[u_arcname]
+                if row[2] and str(zinfo.file_size) != row[2]:
+                    raise DistlibException('size mismatch for '
+                                           '%s' % u_arcname)
+                if row[1]:
+                    kind, value = row[1].split('=', 1)
+                    with zf.open(arcname) as bf:
+                        data = bf.read()
+                    _, digest = self.get_hash(data, kind)
+                    if digest != value:
+                        raise DistlibException('digest mismatch for '
+                                               '%s' % arcname)
+
+    def update(self, modifier, dest_dir=None, **kwargs):
+        """
+        Update the contents of a wheel in a generic way. The modifier should
+        be a callable which expects a dictionary argument: its keys are
+        archive-entry paths, and its values are absolute filesystem paths
+        where the contents the corresponding archive entries can be found. The
+        modifier is free to change the contents of the files pointed to, add
+        new entries and remove entries, before returning. This method will
+        extract the entire contents of the wheel to a temporary location, call
+        the modifier, and then use the passed (and possibly updated)
+        dictionary to write a new wheel. If ``dest_dir`` is specified, the new
+        wheel is written there -- otherwise, the original wheel is overwritten.
+
+        The modifier should return True if it updated the wheel, else False.
+        This method returns the same value the modifier returns.
+        """
+
+        def get_version(path_map, info_dir):
+            version = path = None
+            key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME)
+            if key not in path_map:
+                key = '%s/PKG-INFO' % info_dir
+            if key in path_map:
+                path = path_map[key]
+                version = Metadata(path=path).version
+            return version, path
+
+        def update_version(version, path):
+            updated = None
+            try:
+                NormalizedVersion(version)
+                i = version.find('-')
+                if i < 0:
+                    updated = '%s+1' % version
+                else:
+                    parts = [int(s) for s in version[i + 1:].split('.')]
+                    parts[-1] += 1
+                    updated = '%s+%s' % (version[:i], '.'.join(
+                        str(i) for i in parts))
+            except UnsupportedVersionError:
+                logger.debug(
+                    'Cannot update non-compliant (PEP-440) '
+                    'version %r', version)
+            if updated:
+                md = Metadata(path=path)
+                md.version = updated
+                legacy = path.endswith(LEGACY_METADATA_FILENAME)
+                md.write(path=path, legacy=legacy)
+                logger.debug('Version updated from %r to %r', version, updated)
+
+        pathname = os.path.join(self.dirname, self.filename)
+        name_ver = '%s-%s' % (self.name, self.version)
+        info_dir = '%s.dist-info' % name_ver
+        record_name = posixpath.join(info_dir, 'RECORD')
+        with tempdir() as workdir:
+            with ZipFile(pathname, 'r') as zf:
+                path_map = {}
+                for zinfo in zf.infolist():
+                    arcname = zinfo.filename
+                    if isinstance(arcname, text_type):
+                        u_arcname = arcname
+                    else:
+                        u_arcname = arcname.decode('utf-8')
+                    if u_arcname == record_name:
+                        continue
+                    if '..' in u_arcname:
+                        raise DistlibException('invalid entry in '
+                                               'wheel: %r' % u_arcname)
+                    zf.extract(zinfo, workdir)
+                    path = os.path.join(workdir, convert_path(u_arcname))
+                    path_map[u_arcname] = path
+
+            # Remember the version.
+            original_version, _ = get_version(path_map, info_dir)
+            # Files extracted. Call the modifier.
+            modified = modifier(path_map, **kwargs)
+            if modified:
+                # Something changed - need to build a new wheel.
+                current_version, path = get_version(path_map, info_dir)
+                if current_version and (current_version == original_version):
+                    # Add or update local version to signify changes.
+                    update_version(current_version, path)
+                # Decide where the new wheel goes.
+                if dest_dir is None:
+                    fd, newpath = tempfile.mkstemp(suffix='.whl',
+                                                   prefix='wheel-update-',
+                                                   dir=workdir)
+                    os.close(fd)
+                else:
+                    if not os.path.isdir(dest_dir):
+                        raise DistlibException('Not a directory: %r' %
+                                               dest_dir)
+                    newpath = os.path.join(dest_dir, self.filename)
+                archive_paths = list(path_map.items())
+                distinfo = os.path.join(workdir, info_dir)
+                info = distinfo, info_dir
+                self.write_records(info, workdir, archive_paths)
+                self.build_zip(newpath, archive_paths)
+                if dest_dir is None:
+                    shutil.copyfile(newpath, pathname)
+        return modified
+
+
+def _get_glibc_version():
+    import platform
+    ver = platform.libc_ver()
+    result = []
+    if ver[0] == 'glibc':
+        for s in ver[1].split('.'):
+            result.append(int(s) if s.isdigit() else 0)
+        result = tuple(result)
+    return result
+
+
+def compatible_tags():
+    """
+    Return (pyver, abi, arch) tuples compatible with this Python.
+    """
+    versions = [VER_SUFFIX]
+    major = VER_SUFFIX[0]
+    for minor in range(sys.version_info[1] - 1, -1, -1):
+        versions.append(''.join([major, str(minor)]))
+
+    abis = []
+    for suffix in _get_suffixes():
+        if suffix.startswith('.abi'):
+            abis.append(suffix.split('.', 2)[1])
+    abis.sort()
+    if ABI != 'none':
+        abis.insert(0, ABI)
+    abis.append('none')
+    result = []
+
+    arches = [ARCH]
+    if sys.platform == 'darwin':
+        m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH)
+        if m:
+            name, major, minor, arch = m.groups()
+            minor = int(minor)
+            matches = [arch]
+            if arch in ('i386', 'ppc'):
+                matches.append('fat')
+            if arch in ('i386', 'ppc', 'x86_64'):
+                matches.append('fat3')
+            if arch in ('ppc64', 'x86_64'):
+                matches.append('fat64')
+            if arch in ('i386', 'x86_64'):
+                matches.append('intel')
+            if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'):
+                matches.append('universal')
+            while minor >= 0:
+                for match in matches:
+                    s = '%s_%s_%s_%s' % (name, major, minor, match)
+                    if s != ARCH:  # already there
+                        arches.append(s)
+                minor -= 1
+
+    # Most specific - our Python version, ABI and arch
+    for abi in abis:
+        for arch in arches:
+            result.append((''.join((IMP_PREFIX, versions[0])), abi, arch))
+            # manylinux
+            if abi != 'none' and sys.platform.startswith('linux'):
+                arch = arch.replace('linux_', '')
+                parts = _get_glibc_version()
+                if len(parts) == 2:
+                    if parts >= (2, 5):
+                        result.append((''.join((IMP_PREFIX, versions[0])), abi,
+                                       'manylinux1_%s' % arch))
+                    if parts >= (2, 12):
+                        result.append((''.join((IMP_PREFIX, versions[0])), abi,
+                                       'manylinux2010_%s' % arch))
+                    if parts >= (2, 17):
+                        result.append((''.join((IMP_PREFIX, versions[0])), abi,
+                                       'manylinux2014_%s' % arch))
+                    result.append(
+                        (''.join((IMP_PREFIX, versions[0])), abi,
+                         'manylinux_%s_%s_%s' % (parts[0], parts[1], arch)))
+
+    # where no ABI / arch dependency, but IMP_PREFIX dependency
+    for i, version in enumerate(versions):
+        result.append((''.join((IMP_PREFIX, version)), 'none', 'any'))
+        if i == 0:
+            result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any'))
+
+    # no IMP_PREFIX, ABI or arch dependency
+    for i, version in enumerate(versions):
+        result.append((''.join(('py', version)), 'none', 'any'))
+        if i == 0:
+            result.append((''.join(('py', version[0])), 'none', 'any'))
+
+    return set(result)
+
+
+COMPATIBLE_TAGS = compatible_tags()
+
+del compatible_tags
+
+
+def is_compatible(wheel, tags=None):
+    if not isinstance(wheel, Wheel):
+        wheel = Wheel(wheel)  # assume it's a filename
+    result = False
+    if tags is None:
+        tags = COMPATIBLE_TAGS
+    for ver, abi, arch in tags:
+        if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch:
+            result = True
+            break
+    return result
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/distro/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/distro/__init__.py
new file mode 100644
index 0000000..7686fe8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/distro/__init__.py
@@ -0,0 +1,54 @@
+from .distro import (
+    NORMALIZED_DISTRO_ID,
+    NORMALIZED_LSB_ID,
+    NORMALIZED_OS_ID,
+    LinuxDistribution,
+    __version__,
+    build_number,
+    codename,
+    distro_release_attr,
+    distro_release_info,
+    id,
+    info,
+    like,
+    linux_distribution,
+    lsb_release_attr,
+    lsb_release_info,
+    major_version,
+    minor_version,
+    name,
+    os_release_attr,
+    os_release_info,
+    uname_attr,
+    uname_info,
+    version,
+    version_parts,
+)
+
+__all__ = [
+    "NORMALIZED_DISTRO_ID",
+    "NORMALIZED_LSB_ID",
+    "NORMALIZED_OS_ID",
+    "LinuxDistribution",
+    "build_number",
+    "codename",
+    "distro_release_attr",
+    "distro_release_info",
+    "id",
+    "info",
+    "like",
+    "linux_distribution",
+    "lsb_release_attr",
+    "lsb_release_info",
+    "major_version",
+    "minor_version",
+    "name",
+    "os_release_attr",
+    "os_release_info",
+    "uname_attr",
+    "uname_info",
+    "version",
+    "version_parts",
+]
+
+__version__ = __version__
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/distro/__main__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/distro/__main__.py
new file mode 100644
index 0000000..0c01d5b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/distro/__main__.py
@@ -0,0 +1,4 @@
+from .distro import main
+
+if __name__ == "__main__":
+    main()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/distro/distro.py b/.venv/lib/python3.12/site-packages/pip/_vendor/distro/distro.py
new file mode 100644
index 0000000..89e1868
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/distro/distro.py
@@ -0,0 +1,1399 @@
+#!/usr/bin/env python
+# Copyright 2015,2016,2017 Nir Cohen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+The ``distro`` package (``distro`` stands for Linux Distribution) provides
+information about the Linux distribution it runs on, such as a reliable
+machine-readable distro ID, or version information.
+
+It is the recommended replacement for Python's original
+:py:func:`platform.linux_distribution` function, but it provides much more
+functionality. An alternative implementation became necessary because Python
+3.5 deprecated this function, and Python 3.8 removed it altogether. Its
+predecessor function :py:func:`platform.dist` was already deprecated since
+Python 2.6 and removed in Python 3.8. Still, there are many cases in which
+access to OS distribution information is needed. See `Python issue 1322
+`_ for more information.
+"""
+
+import argparse
+import json
+import logging
+import os
+import re
+import shlex
+import subprocess
+import sys
+import warnings
+from typing import (
+    Any,
+    Callable,
+    Dict,
+    Iterable,
+    Optional,
+    Sequence,
+    TextIO,
+    Tuple,
+    Type,
+)
+
+try:
+    from typing import TypedDict
+except ImportError:
+    # Python 3.7
+    TypedDict = dict
+
+__version__ = "1.8.0"
+
+
+class VersionDict(TypedDict):
+    major: str
+    minor: str
+    build_number: str
+
+
+class InfoDict(TypedDict):
+    id: str
+    version: str
+    version_parts: VersionDict
+    like: str
+    codename: str
+
+
+_UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc")
+_UNIXUSRLIBDIR = os.environ.get("UNIXUSRLIBDIR", "/usr/lib")
+_OS_RELEASE_BASENAME = "os-release"
+
+#: Translation table for normalizing the "ID" attribute defined in os-release
+#: files, for use by the :func:`distro.id` method.
+#:
+#: * Key: Value as defined in the os-release file, translated to lower case,
+#:   with blanks translated to underscores.
+#:
+#: * Value: Normalized value.
+NORMALIZED_OS_ID = {
+    "ol": "oracle",  # Oracle Linux
+    "opensuse-leap": "opensuse",  # Newer versions of OpenSuSE report as opensuse-leap
+}
+
+#: Translation table for normalizing the "Distributor ID" attribute returned by
+#: the lsb_release command, for use by the :func:`distro.id` method.
+#:
+#: * Key: Value as returned by the lsb_release command, translated to lower
+#:   case, with blanks translated to underscores.
+#:
+#: * Value: Normalized value.
+NORMALIZED_LSB_ID = {
+    "enterpriseenterpriseas": "oracle",  # Oracle Enterprise Linux 4
+    "enterpriseenterpriseserver": "oracle",  # Oracle Linux 5
+    "redhatenterpriseworkstation": "rhel",  # RHEL 6, 7 Workstation
+    "redhatenterpriseserver": "rhel",  # RHEL 6, 7 Server
+    "redhatenterprisecomputenode": "rhel",  # RHEL 6 ComputeNode
+}
+
+#: Translation table for normalizing the distro ID derived from the file name
+#: of distro release files, for use by the :func:`distro.id` method.
+#:
+#: * Key: Value as derived from the file name of a distro release file,
+#:   translated to lower case, with blanks translated to underscores.
+#:
+#: * Value: Normalized value.
+NORMALIZED_DISTRO_ID = {
+    "redhat": "rhel",  # RHEL 6.x, 7.x
+}
+
+# Pattern for content of distro release file (reversed)
+_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile(
+    r"(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)"
+)
+
+# Pattern for base file name of distro release file
+_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$")
+
+# Base file names to be looked up for if _UNIXCONFDIR is not readable.
+_DISTRO_RELEASE_BASENAMES = [
+    "SuSE-release",
+    "arch-release",
+    "base-release",
+    "centos-release",
+    "fedora-release",
+    "gentoo-release",
+    "mageia-release",
+    "mandrake-release",
+    "mandriva-release",
+    "mandrivalinux-release",
+    "manjaro-release",
+    "oracle-release",
+    "redhat-release",
+    "rocky-release",
+    "sl-release",
+    "slackware-version",
+]
+
+# Base file names to be ignored when searching for distro release file
+_DISTRO_RELEASE_IGNORE_BASENAMES = (
+    "debian_version",
+    "lsb-release",
+    "oem-release",
+    _OS_RELEASE_BASENAME,
+    "system-release",
+    "plesk-release",
+    "iredmail-release",
+)
+
+
+def linux_distribution(full_distribution_name: bool = True) -> Tuple[str, str, str]:
+    """
+    .. deprecated:: 1.6.0
+
+        :func:`distro.linux_distribution()` is deprecated. It should only be
+        used as a compatibility shim with Python's
+        :py:func:`platform.linux_distribution()`. Please use :func:`distro.id`,
+        :func:`distro.version` and :func:`distro.name` instead.
+
+    Return information about the current OS distribution as a tuple
+    ``(id_name, version, codename)`` with items as follows:
+
+    * ``id_name``:  If *full_distribution_name* is false, the result of
+      :func:`distro.id`. Otherwise, the result of :func:`distro.name`.
+
+    * ``version``:  The result of :func:`distro.version`.
+
+    * ``codename``:  The extra item (usually in parentheses) after the
+      os-release version number, or the result of :func:`distro.codename`.
+
+    The interface of this function is compatible with the original
+    :py:func:`platform.linux_distribution` function, supporting a subset of
+    its parameters.
+
+    The data it returns may not exactly be the same, because it uses more data
+    sources than the original function, and that may lead to different data if
+    the OS distribution is not consistent across multiple data sources it
+    provides (there are indeed such distributions ...).
+
+    Another reason for differences is the fact that the :func:`distro.id`
+    method normalizes the distro ID string to a reliable machine-readable value
+    for a number of popular OS distributions.
+    """
+    warnings.warn(
+        "distro.linux_distribution() is deprecated. It should only be used as a "
+        "compatibility shim with Python's platform.linux_distribution(). Please use "
+        "distro.id(), distro.version() and distro.name() instead.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    return _distro.linux_distribution(full_distribution_name)
+
+
+def id() -> str:
+    """
+    Return the distro ID of the current distribution, as a
+    machine-readable string.
+
+    For a number of OS distributions, the returned distro ID value is
+    *reliable*, in the sense that it is documented and that it does not change
+    across releases of the distribution.
+
+    This package maintains the following reliable distro ID values:
+
+    ==============  =========================================
+    Distro ID       Distribution
+    ==============  =========================================
+    "ubuntu"        Ubuntu
+    "debian"        Debian
+    "rhel"          RedHat Enterprise Linux
+    "centos"        CentOS
+    "fedora"        Fedora
+    "sles"          SUSE Linux Enterprise Server
+    "opensuse"      openSUSE
+    "amzn"          Amazon Linux
+    "arch"          Arch Linux
+    "buildroot"     Buildroot
+    "cloudlinux"    CloudLinux OS
+    "exherbo"       Exherbo Linux
+    "gentoo"        GenToo Linux
+    "ibm_powerkvm"  IBM PowerKVM
+    "kvmibm"        KVM for IBM z Systems
+    "linuxmint"     Linux Mint
+    "mageia"        Mageia
+    "mandriva"      Mandriva Linux
+    "parallels"     Parallels
+    "pidora"        Pidora
+    "raspbian"      Raspbian
+    "oracle"        Oracle Linux (and Oracle Enterprise Linux)
+    "scientific"    Scientific Linux
+    "slackware"     Slackware
+    "xenserver"     XenServer
+    "openbsd"       OpenBSD
+    "netbsd"        NetBSD
+    "freebsd"       FreeBSD
+    "midnightbsd"   MidnightBSD
+    "rocky"         Rocky Linux
+    "aix"           AIX
+    "guix"          Guix System
+    ==============  =========================================
+
+    If you have a need to get distros for reliable IDs added into this set,
+    or if you find that the :func:`distro.id` function returns a different
+    distro ID for one of the listed distros, please create an issue in the
+    `distro issue tracker`_.
+
+    **Lookup hierarchy and transformations:**
+
+    First, the ID is obtained from the following sources, in the specified
+    order. The first available and non-empty value is used:
+
+    * the value of the "ID" attribute of the os-release file,
+
+    * the value of the "Distributor ID" attribute returned by the lsb_release
+      command,
+
+    * the first part of the file name of the distro release file,
+
+    The so determined ID value then passes the following transformations,
+    before it is returned by this method:
+
+    * it is translated to lower case,
+
+    * blanks (which should not be there anyway) are translated to underscores,
+
+    * a normalization of the ID is performed, based upon
+      `normalization tables`_. The purpose of this normalization is to ensure
+      that the ID is as reliable as possible, even across incompatible changes
+      in the OS distributions. A common reason for an incompatible change is
+      the addition of an os-release file, or the addition of the lsb_release
+      command, with ID values that differ from what was previously determined
+      from the distro release file name.
+    """
+    return _distro.id()
+
+
+def name(pretty: bool = False) -> str:
+    """
+    Return the name of the current OS distribution, as a human-readable
+    string.
+
+    If *pretty* is false, the name is returned without version or codename.
+    (e.g. "CentOS Linux")
+
+    If *pretty* is true, the version and codename are appended.
+    (e.g. "CentOS Linux 7.1.1503 (Core)")
+
+    **Lookup hierarchy:**
+
+    The name is obtained from the following sources, in the specified order.
+    The first available and non-empty value is used:
+
+    * If *pretty* is false:
+
+      - the value of the "NAME" attribute of the os-release file,
+
+      - the value of the "Distributor ID" attribute returned by the lsb_release
+        command,
+
+      - the value of the "" field of the distro release file.
+
+    * If *pretty* is true:
+
+      - the value of the "PRETTY_NAME" attribute of the os-release file,
+
+      - the value of the "Description" attribute returned by the lsb_release
+        command,
+
+      - the value of the "" field of the distro release file, appended
+        with the value of the pretty version ("" and ""
+        fields) of the distro release file, if available.
+    """
+    return _distro.name(pretty)
+
+
+def version(pretty: bool = False, best: bool = False) -> str:
+    """
+    Return the version of the current OS distribution, as a human-readable
+    string.
+
+    If *pretty* is false, the version is returned without codename (e.g.
+    "7.0").
+
+    If *pretty* is true, the codename in parenthesis is appended, if the
+    codename is non-empty (e.g. "7.0 (Maipo)").
+
+    Some distributions provide version numbers with different precisions in
+    the different sources of distribution information. Examining the different
+    sources in a fixed priority order does not always yield the most precise
+    version (e.g. for Debian 8.2, or CentOS 7.1).
+
+    Some other distributions may not provide this kind of information. In these
+    cases, an empty string would be returned. This behavior can be observed
+    with rolling releases distributions (e.g. Arch Linux).
+
+    The *best* parameter can be used to control the approach for the returned
+    version:
+
+    If *best* is false, the first non-empty version number in priority order of
+    the examined sources is returned.
+
+    If *best* is true, the most precise version number out of all examined
+    sources is returned.
+
+    **Lookup hierarchy:**
+
+    In all cases, the version number is obtained from the following sources.
+    If *best* is false, this order represents the priority order:
+
+    * the value of the "VERSION_ID" attribute of the os-release file,
+    * the value of the "Release" attribute returned by the lsb_release
+      command,
+    * the version number parsed from the "" field of the first line
+      of the distro release file,
+    * the version number parsed from the "PRETTY_NAME" attribute of the
+      os-release file, if it follows the format of the distro release files.
+    * the version number parsed from the "Description" attribute returned by
+      the lsb_release command, if it follows the format of the distro release
+      files.
+    """
+    return _distro.version(pretty, best)
+
+
+def version_parts(best: bool = False) -> Tuple[str, str, str]:
+    """
+    Return the version of the current OS distribution as a tuple
+    ``(major, minor, build_number)`` with items as follows:
+
+    * ``major``:  The result of :func:`distro.major_version`.
+
+    * ``minor``:  The result of :func:`distro.minor_version`.
+
+    * ``build_number``:  The result of :func:`distro.build_number`.
+
+    For a description of the *best* parameter, see the :func:`distro.version`
+    method.
+    """
+    return _distro.version_parts(best)
+
+
+def major_version(best: bool = False) -> str:
+    """
+    Return the major version of the current OS distribution, as a string,
+    if provided.
+    Otherwise, the empty string is returned. The major version is the first
+    part of the dot-separated version string.
+
+    For a description of the *best* parameter, see the :func:`distro.version`
+    method.
+    """
+    return _distro.major_version(best)
+
+
+def minor_version(best: bool = False) -> str:
+    """
+    Return the minor version of the current OS distribution, as a string,
+    if provided.
+    Otherwise, the empty string is returned. The minor version is the second
+    part of the dot-separated version string.
+
+    For a description of the *best* parameter, see the :func:`distro.version`
+    method.
+    """
+    return _distro.minor_version(best)
+
+
+def build_number(best: bool = False) -> str:
+    """
+    Return the build number of the current OS distribution, as a string,
+    if provided.
+    Otherwise, the empty string is returned. The build number is the third part
+    of the dot-separated version string.
+
+    For a description of the *best* parameter, see the :func:`distro.version`
+    method.
+    """
+    return _distro.build_number(best)
+
+
+def like() -> str:
+    """
+    Return a space-separated list of distro IDs of distributions that are
+    closely related to the current OS distribution in regards to packaging
+    and programming interfaces, for example distributions the current
+    distribution is a derivative from.
+
+    **Lookup hierarchy:**
+
+    This information item is only provided by the os-release file.
+    For details, see the description of the "ID_LIKE" attribute in the
+    `os-release man page
+    `_.
+    """
+    return _distro.like()
+
+
+def codename() -> str:
+    """
+    Return the codename for the release of the current OS distribution,
+    as a string.
+
+    If the distribution does not have a codename, an empty string is returned.
+
+    Note that the returned codename is not always really a codename. For
+    example, openSUSE returns "x86_64". This function does not handle such
+    cases in any special way and just returns the string it finds, if any.
+
+    **Lookup hierarchy:**
+
+    * the codename within the "VERSION" attribute of the os-release file, if
+      provided,
+
+    * the value of the "Codename" attribute returned by the lsb_release
+      command,
+
+    * the value of the "" field of the distro release file.
+    """
+    return _distro.codename()
+
+
+def info(pretty: bool = False, best: bool = False) -> InfoDict:
+    """
+    Return certain machine-readable information items about the current OS
+    distribution in a dictionary, as shown in the following example:
+
+    .. sourcecode:: python
+
+        {
+            'id': 'rhel',
+            'version': '7.0',
+            'version_parts': {
+                'major': '7',
+                'minor': '0',
+                'build_number': ''
+            },
+            'like': 'fedora',
+            'codename': 'Maipo'
+        }
+
+    The dictionary structure and keys are always the same, regardless of which
+    information items are available in the underlying data sources. The values
+    for the various keys are as follows:
+
+    * ``id``:  The result of :func:`distro.id`.
+
+    * ``version``:  The result of :func:`distro.version`.
+
+    * ``version_parts -> major``:  The result of :func:`distro.major_version`.
+
+    * ``version_parts -> minor``:  The result of :func:`distro.minor_version`.
+
+    * ``version_parts -> build_number``:  The result of
+      :func:`distro.build_number`.
+
+    * ``like``:  The result of :func:`distro.like`.
+
+    * ``codename``:  The result of :func:`distro.codename`.
+
+    For a description of the *pretty* and *best* parameters, see the
+    :func:`distro.version` method.
+    """
+    return _distro.info(pretty, best)
+
+
+def os_release_info() -> Dict[str, str]:
+    """
+    Return a dictionary containing key-value pairs for the information items
+    from the os-release file data source of the current OS distribution.
+
+    See `os-release file`_ for details about these information items.
+    """
+    return _distro.os_release_info()
+
+
+def lsb_release_info() -> Dict[str, str]:
+    """
+    Return a dictionary containing key-value pairs for the information items
+    from the lsb_release command data source of the current OS distribution.
+
+    See `lsb_release command output`_ for details about these information
+    items.
+    """
+    return _distro.lsb_release_info()
+
+
+def distro_release_info() -> Dict[str, str]:
+    """
+    Return a dictionary containing key-value pairs for the information items
+    from the distro release file data source of the current OS distribution.
+
+    See `distro release file`_ for details about these information items.
+    """
+    return _distro.distro_release_info()
+
+
+def uname_info() -> Dict[str, str]:
+    """
+    Return a dictionary containing key-value pairs for the information items
+    from the distro release file data source of the current OS distribution.
+    """
+    return _distro.uname_info()
+
+
+def os_release_attr(attribute: str) -> str:
+    """
+    Return a single named information item from the os-release file data source
+    of the current OS distribution.
+
+    Parameters:
+
+    * ``attribute`` (string): Key of the information item.
+
+    Returns:
+
+    * (string): Value of the information item, if the item exists.
+      The empty string, if the item does not exist.
+
+    See `os-release file`_ for details about these information items.
+    """
+    return _distro.os_release_attr(attribute)
+
+
+def lsb_release_attr(attribute: str) -> str:
+    """
+    Return a single named information item from the lsb_release command output
+    data source of the current OS distribution.
+
+    Parameters:
+
+    * ``attribute`` (string): Key of the information item.
+
+    Returns:
+
+    * (string): Value of the information item, if the item exists.
+      The empty string, if the item does not exist.
+
+    See `lsb_release command output`_ for details about these information
+    items.
+    """
+    return _distro.lsb_release_attr(attribute)
+
+
+def distro_release_attr(attribute: str) -> str:
+    """
+    Return a single named information item from the distro release file
+    data source of the current OS distribution.
+
+    Parameters:
+
+    * ``attribute`` (string): Key of the information item.
+
+    Returns:
+
+    * (string): Value of the information item, if the item exists.
+      The empty string, if the item does not exist.
+
+    See `distro release file`_ for details about these information items.
+    """
+    return _distro.distro_release_attr(attribute)
+
+
+def uname_attr(attribute: str) -> str:
+    """
+    Return a single named information item from the distro release file
+    data source of the current OS distribution.
+
+    Parameters:
+
+    * ``attribute`` (string): Key of the information item.
+
+    Returns:
+
+    * (string): Value of the information item, if the item exists.
+                The empty string, if the item does not exist.
+    """
+    return _distro.uname_attr(attribute)
+
+
+try:
+    from functools import cached_property
+except ImportError:
+    # Python < 3.8
+    class cached_property:  # type: ignore
+        """A version of @property which caches the value.  On access, it calls the
+        underlying function and sets the value in `__dict__` so future accesses
+        will not re-call the property.
+        """
+
+        def __init__(self, f: Callable[[Any], Any]) -> None:
+            self._fname = f.__name__
+            self._f = f
+
+        def __get__(self, obj: Any, owner: Type[Any]) -> Any:
+            assert obj is not None, f"call {self._fname} on an instance"
+            ret = obj.__dict__[self._fname] = self._f(obj)
+            return ret
+
+
+class LinuxDistribution:
+    """
+    Provides information about a OS distribution.
+
+    This package creates a private module-global instance of this class with
+    default initialization arguments, that is used by the
+    `consolidated accessor functions`_ and `single source accessor functions`_.
+    By using default initialization arguments, that module-global instance
+    returns data about the current OS distribution (i.e. the distro this
+    package runs on).
+
+    Normally, it is not necessary to create additional instances of this class.
+    However, in situations where control is needed over the exact data sources
+    that are used, instances of this class can be created with a specific
+    distro release file, or a specific os-release file, or without invoking the
+    lsb_release command.
+    """
+
+    def __init__(
+        self,
+        include_lsb: Optional[bool] = None,
+        os_release_file: str = "",
+        distro_release_file: str = "",
+        include_uname: Optional[bool] = None,
+        root_dir: Optional[str] = None,
+        include_oslevel: Optional[bool] = None,
+    ) -> None:
+        """
+        The initialization method of this class gathers information from the
+        available data sources, and stores that in private instance attributes.
+        Subsequent access to the information items uses these private instance
+        attributes, so that the data sources are read only once.
+
+        Parameters:
+
+        * ``include_lsb`` (bool): Controls whether the
+          `lsb_release command output`_ is included as a data source.
+
+          If the lsb_release command is not available in the program execution
+          path, the data source for the lsb_release command will be empty.
+
+        * ``os_release_file`` (string): The path name of the
+          `os-release file`_ that is to be used as a data source.
+
+          An empty string (the default) will cause the default path name to
+          be used (see `os-release file`_ for details).
+
+          If the specified or defaulted os-release file does not exist, the
+          data source for the os-release file will be empty.
+
+        * ``distro_release_file`` (string): The path name of the
+          `distro release file`_ that is to be used as a data source.
+
+          An empty string (the default) will cause a default search algorithm
+          to be used (see `distro release file`_ for details).
+
+          If the specified distro release file does not exist, or if no default
+          distro release file can be found, the data source for the distro
+          release file will be empty.
+
+        * ``include_uname`` (bool): Controls whether uname command output is
+          included as a data source. If the uname command is not available in
+          the program execution path the data source for the uname command will
+          be empty.
+
+        * ``root_dir`` (string): The absolute path to the root directory to use
+          to find distro-related information files. Note that ``include_*``
+          parameters must not be enabled in combination with ``root_dir``.
+
+        * ``include_oslevel`` (bool): Controls whether (AIX) oslevel command
+          output is included as a data source. If the oslevel command is not
+          available in the program execution path the data source will be
+          empty.
+
+        Public instance attributes:
+
+        * ``os_release_file`` (string): The path name of the
+          `os-release file`_ that is actually used as a data source. The
+          empty string if no distro release file is used as a data source.
+
+        * ``distro_release_file`` (string): The path name of the
+          `distro release file`_ that is actually used as a data source. The
+          empty string if no distro release file is used as a data source.
+
+        * ``include_lsb`` (bool): The result of the ``include_lsb`` parameter.
+          This controls whether the lsb information will be loaded.
+
+        * ``include_uname`` (bool): The result of the ``include_uname``
+          parameter. This controls whether the uname information will
+          be loaded.
+
+        * ``include_oslevel`` (bool): The result of the ``include_oslevel``
+          parameter. This controls whether (AIX) oslevel information will be
+          loaded.
+
+        * ``root_dir`` (string): The result of the ``root_dir`` parameter.
+          The absolute path to the root directory to use to find distro-related
+          information files.
+
+        Raises:
+
+        * :py:exc:`ValueError`: Initialization parameters combination is not
+           supported.
+
+        * :py:exc:`OSError`: Some I/O issue with an os-release file or distro
+          release file.
+
+        * :py:exc:`UnicodeError`: A data source has unexpected characters or
+          uses an unexpected encoding.
+        """
+        self.root_dir = root_dir
+        self.etc_dir = os.path.join(root_dir, "etc") if root_dir else _UNIXCONFDIR
+        self.usr_lib_dir = (
+            os.path.join(root_dir, "usr/lib") if root_dir else _UNIXUSRLIBDIR
+        )
+
+        if os_release_file:
+            self.os_release_file = os_release_file
+        else:
+            etc_dir_os_release_file = os.path.join(self.etc_dir, _OS_RELEASE_BASENAME)
+            usr_lib_os_release_file = os.path.join(
+                self.usr_lib_dir, _OS_RELEASE_BASENAME
+            )
+
+            # NOTE: The idea is to respect order **and** have it set
+            #       at all times for API backwards compatibility.
+            if os.path.isfile(etc_dir_os_release_file) or not os.path.isfile(
+                usr_lib_os_release_file
+            ):
+                self.os_release_file = etc_dir_os_release_file
+            else:
+                self.os_release_file = usr_lib_os_release_file
+
+        self.distro_release_file = distro_release_file or ""  # updated later
+
+        is_root_dir_defined = root_dir is not None
+        if is_root_dir_defined and (include_lsb or include_uname or include_oslevel):
+            raise ValueError(
+                "Including subprocess data sources from specific root_dir is disallowed"
+                " to prevent false information"
+            )
+        self.include_lsb = (
+            include_lsb if include_lsb is not None else not is_root_dir_defined
+        )
+        self.include_uname = (
+            include_uname if include_uname is not None else not is_root_dir_defined
+        )
+        self.include_oslevel = (
+            include_oslevel if include_oslevel is not None else not is_root_dir_defined
+        )
+
+    def __repr__(self) -> str:
+        """Return repr of all info"""
+        return (
+            "LinuxDistribution("
+            "os_release_file={self.os_release_file!r}, "
+            "distro_release_file={self.distro_release_file!r}, "
+            "include_lsb={self.include_lsb!r}, "
+            "include_uname={self.include_uname!r}, "
+            "include_oslevel={self.include_oslevel!r}, "
+            "root_dir={self.root_dir!r}, "
+            "_os_release_info={self._os_release_info!r}, "
+            "_lsb_release_info={self._lsb_release_info!r}, "
+            "_distro_release_info={self._distro_release_info!r}, "
+            "_uname_info={self._uname_info!r}, "
+            "_oslevel_info={self._oslevel_info!r})".format(self=self)
+        )
+
+    def linux_distribution(
+        self, full_distribution_name: bool = True
+    ) -> Tuple[str, str, str]:
+        """
+        Return information about the OS distribution that is compatible
+        with Python's :func:`platform.linux_distribution`, supporting a subset
+        of its parameters.
+
+        For details, see :func:`distro.linux_distribution`.
+        """
+        return (
+            self.name() if full_distribution_name else self.id(),
+            self.version(),
+            self._os_release_info.get("release_codename") or self.codename(),
+        )
+
+    def id(self) -> str:
+        """Return the distro ID of the OS distribution, as a string.
+
+        For details, see :func:`distro.id`.
+        """
+
+        def normalize(distro_id: str, table: Dict[str, str]) -> str:
+            distro_id = distro_id.lower().replace(" ", "_")
+            return table.get(distro_id, distro_id)
+
+        distro_id = self.os_release_attr("id")
+        if distro_id:
+            return normalize(distro_id, NORMALIZED_OS_ID)
+
+        distro_id = self.lsb_release_attr("distributor_id")
+        if distro_id:
+            return normalize(distro_id, NORMALIZED_LSB_ID)
+
+        distro_id = self.distro_release_attr("id")
+        if distro_id:
+            return normalize(distro_id, NORMALIZED_DISTRO_ID)
+
+        distro_id = self.uname_attr("id")
+        if distro_id:
+            return normalize(distro_id, NORMALIZED_DISTRO_ID)
+
+        return ""
+
+    def name(self, pretty: bool = False) -> str:
+        """
+        Return the name of the OS distribution, as a string.
+
+        For details, see :func:`distro.name`.
+        """
+        name = (
+            self.os_release_attr("name")
+            or self.lsb_release_attr("distributor_id")
+            or self.distro_release_attr("name")
+            or self.uname_attr("name")
+        )
+        if pretty:
+            name = self.os_release_attr("pretty_name") or self.lsb_release_attr(
+                "description"
+            )
+            if not name:
+                name = self.distro_release_attr("name") or self.uname_attr("name")
+                version = self.version(pretty=True)
+                if version:
+                    name = f"{name} {version}"
+        return name or ""
+
+    def version(self, pretty: bool = False, best: bool = False) -> str:
+        """
+        Return the version of the OS distribution, as a string.
+
+        For details, see :func:`distro.version`.
+        """
+        versions = [
+            self.os_release_attr("version_id"),
+            self.lsb_release_attr("release"),
+            self.distro_release_attr("version_id"),
+            self._parse_distro_release_content(self.os_release_attr("pretty_name")).get(
+                "version_id", ""
+            ),
+            self._parse_distro_release_content(
+                self.lsb_release_attr("description")
+            ).get("version_id", ""),
+            self.uname_attr("release"),
+        ]
+        if self.uname_attr("id").startswith("aix"):
+            # On AIX platforms, prefer oslevel command output.
+            versions.insert(0, self.oslevel_info())
+        elif self.id() == "debian" or "debian" in self.like().split():
+            # On Debian-like, add debian_version file content to candidates list.
+            versions.append(self._debian_version)
+        version = ""
+        if best:
+            # This algorithm uses the last version in priority order that has
+            # the best precision. If the versions are not in conflict, that
+            # does not matter; otherwise, using the last one instead of the
+            # first one might be considered a surprise.
+            for v in versions:
+                if v.count(".") > version.count(".") or version == "":
+                    version = v
+        else:
+            for v in versions:
+                if v != "":
+                    version = v
+                    break
+        if pretty and version and self.codename():
+            version = f"{version} ({self.codename()})"
+        return version
+
+    def version_parts(self, best: bool = False) -> Tuple[str, str, str]:
+        """
+        Return the version of the OS distribution, as a tuple of version
+        numbers.
+
+        For details, see :func:`distro.version_parts`.
+        """
+        version_str = self.version(best=best)
+        if version_str:
+            version_regex = re.compile(r"(\d+)\.?(\d+)?\.?(\d+)?")
+            matches = version_regex.match(version_str)
+            if matches:
+                major, minor, build_number = matches.groups()
+                return major, minor or "", build_number or ""
+        return "", "", ""
+
+    def major_version(self, best: bool = False) -> str:
+        """
+        Return the major version number of the current distribution.
+
+        For details, see :func:`distro.major_version`.
+        """
+        return self.version_parts(best)[0]
+
+    def minor_version(self, best: bool = False) -> str:
+        """
+        Return the minor version number of the current distribution.
+
+        For details, see :func:`distro.minor_version`.
+        """
+        return self.version_parts(best)[1]
+
+    def build_number(self, best: bool = False) -> str:
+        """
+        Return the build number of the current distribution.
+
+        For details, see :func:`distro.build_number`.
+        """
+        return self.version_parts(best)[2]
+
+    def like(self) -> str:
+        """
+        Return the IDs of distributions that are like the OS distribution.
+
+        For details, see :func:`distro.like`.
+        """
+        return self.os_release_attr("id_like") or ""
+
+    def codename(self) -> str:
+        """
+        Return the codename of the OS distribution.
+
+        For details, see :func:`distro.codename`.
+        """
+        try:
+            # Handle os_release specially since distros might purposefully set
+            # this to empty string to have no codename
+            return self._os_release_info["codename"]
+        except KeyError:
+            return (
+                self.lsb_release_attr("codename")
+                or self.distro_release_attr("codename")
+                or ""
+            )
+
+    def info(self, pretty: bool = False, best: bool = False) -> InfoDict:
+        """
+        Return certain machine-readable information about the OS
+        distribution.
+
+        For details, see :func:`distro.info`.
+        """
+        return dict(
+            id=self.id(),
+            version=self.version(pretty, best),
+            version_parts=dict(
+                major=self.major_version(best),
+                minor=self.minor_version(best),
+                build_number=self.build_number(best),
+            ),
+            like=self.like(),
+            codename=self.codename(),
+        )
+
+    def os_release_info(self) -> Dict[str, str]:
+        """
+        Return a dictionary containing key-value pairs for the information
+        items from the os-release file data source of the OS distribution.
+
+        For details, see :func:`distro.os_release_info`.
+        """
+        return self._os_release_info
+
+    def lsb_release_info(self) -> Dict[str, str]:
+        """
+        Return a dictionary containing key-value pairs for the information
+        items from the lsb_release command data source of the OS
+        distribution.
+
+        For details, see :func:`distro.lsb_release_info`.
+        """
+        return self._lsb_release_info
+
+    def distro_release_info(self) -> Dict[str, str]:
+        """
+        Return a dictionary containing key-value pairs for the information
+        items from the distro release file data source of the OS
+        distribution.
+
+        For details, see :func:`distro.distro_release_info`.
+        """
+        return self._distro_release_info
+
+    def uname_info(self) -> Dict[str, str]:
+        """
+        Return a dictionary containing key-value pairs for the information
+        items from the uname command data source of the OS distribution.
+
+        For details, see :func:`distro.uname_info`.
+        """
+        return self._uname_info
+
+    def oslevel_info(self) -> str:
+        """
+        Return AIX' oslevel command output.
+        """
+        return self._oslevel_info
+
+    def os_release_attr(self, attribute: str) -> str:
+        """
+        Return a single named information item from the os-release file data
+        source of the OS distribution.
+
+        For details, see :func:`distro.os_release_attr`.
+        """
+        return self._os_release_info.get(attribute, "")
+
+    def lsb_release_attr(self, attribute: str) -> str:
+        """
+        Return a single named information item from the lsb_release command
+        output data source of the OS distribution.
+
+        For details, see :func:`distro.lsb_release_attr`.
+        """
+        return self._lsb_release_info.get(attribute, "")
+
+    def distro_release_attr(self, attribute: str) -> str:
+        """
+        Return a single named information item from the distro release file
+        data source of the OS distribution.
+
+        For details, see :func:`distro.distro_release_attr`.
+        """
+        return self._distro_release_info.get(attribute, "")
+
+    def uname_attr(self, attribute: str) -> str:
+        """
+        Return a single named information item from the uname command
+        output data source of the OS distribution.
+
+        For details, see :func:`distro.uname_attr`.
+        """
+        return self._uname_info.get(attribute, "")
+
+    @cached_property
+    def _os_release_info(self) -> Dict[str, str]:
+        """
+        Get the information items from the specified os-release file.
+
+        Returns:
+            A dictionary containing all information items.
+        """
+        if os.path.isfile(self.os_release_file):
+            with open(self.os_release_file, encoding="utf-8") as release_file:
+                return self._parse_os_release_content(release_file)
+        return {}
+
+    @staticmethod
+    def _parse_os_release_content(lines: TextIO) -> Dict[str, str]:
+        """
+        Parse the lines of an os-release file.
+
+        Parameters:
+
+        * lines: Iterable through the lines in the os-release file.
+                 Each line must be a unicode string or a UTF-8 encoded byte
+                 string.
+
+        Returns:
+            A dictionary containing all information items.
+        """
+        props = {}
+        lexer = shlex.shlex(lines, posix=True)
+        lexer.whitespace_split = True
+
+        tokens = list(lexer)
+        for token in tokens:
+            # At this point, all shell-like parsing has been done (i.e.
+            # comments processed, quotes and backslash escape sequences
+            # processed, multi-line values assembled, trailing newlines
+            # stripped, etc.), so the tokens are now either:
+            # * variable assignments: var=value
+            # * commands or their arguments (not allowed in os-release)
+            # Ignore any tokens that are not variable assignments
+            if "=" in token:
+                k, v = token.split("=", 1)
+                props[k.lower()] = v
+
+        if "version" in props:
+            # extract release codename (if any) from version attribute
+            match = re.search(r"\((\D+)\)|,\s*(\D+)", props["version"])
+            if match:
+                release_codename = match.group(1) or match.group(2)
+                props["codename"] = props["release_codename"] = release_codename
+
+        if "version_codename" in props:
+            # os-release added a version_codename field.  Use that in
+            # preference to anything else Note that some distros purposefully
+            # do not have code names.  They should be setting
+            # version_codename=""
+            props["codename"] = props["version_codename"]
+        elif "ubuntu_codename" in props:
+            # Same as above but a non-standard field name used on older Ubuntus
+            props["codename"] = props["ubuntu_codename"]
+
+        return props
+
+    @cached_property
+    def _lsb_release_info(self) -> Dict[str, str]:
+        """
+        Get the information items from the lsb_release command output.
+
+        Returns:
+            A dictionary containing all information items.
+        """
+        if not self.include_lsb:
+            return {}
+        try:
+            cmd = ("lsb_release", "-a")
+            stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
+        # Command not found or lsb_release returned error
+        except (OSError, subprocess.CalledProcessError):
+            return {}
+        content = self._to_str(stdout).splitlines()
+        return self._parse_lsb_release_content(content)
+
+    @staticmethod
+    def _parse_lsb_release_content(lines: Iterable[str]) -> Dict[str, str]:
+        """
+        Parse the output of the lsb_release command.
+
+        Parameters:
+
+        * lines: Iterable through the lines of the lsb_release output.
+                 Each line must be a unicode string or a UTF-8 encoded byte
+                 string.
+
+        Returns:
+            A dictionary containing all information items.
+        """
+        props = {}
+        for line in lines:
+            kv = line.strip("\n").split(":", 1)
+            if len(kv) != 2:
+                # Ignore lines without colon.
+                continue
+            k, v = kv
+            props.update({k.replace(" ", "_").lower(): v.strip()})
+        return props
+
+    @cached_property
+    def _uname_info(self) -> Dict[str, str]:
+        if not self.include_uname:
+            return {}
+        try:
+            cmd = ("uname", "-rs")
+            stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
+        except OSError:
+            return {}
+        content = self._to_str(stdout).splitlines()
+        return self._parse_uname_content(content)
+
+    @cached_property
+    def _oslevel_info(self) -> str:
+        if not self.include_oslevel:
+            return ""
+        try:
+            stdout = subprocess.check_output("oslevel", stderr=subprocess.DEVNULL)
+        except (OSError, subprocess.CalledProcessError):
+            return ""
+        return self._to_str(stdout).strip()
+
+    @cached_property
+    def _debian_version(self) -> str:
+        try:
+            with open(
+                os.path.join(self.etc_dir, "debian_version"), encoding="ascii"
+            ) as fp:
+                return fp.readline().rstrip()
+        except FileNotFoundError:
+            return ""
+
+    @staticmethod
+    def _parse_uname_content(lines: Sequence[str]) -> Dict[str, str]:
+        if not lines:
+            return {}
+        props = {}
+        match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip())
+        if match:
+            name, version = match.groups()
+
+            # This is to prevent the Linux kernel version from
+            # appearing as the 'best' version on otherwise
+            # identifiable distributions.
+            if name == "Linux":
+                return {}
+            props["id"] = name.lower()
+            props["name"] = name
+            props["release"] = version
+        return props
+
+    @staticmethod
+    def _to_str(bytestring: bytes) -> str:
+        encoding = sys.getfilesystemencoding()
+        return bytestring.decode(encoding)
+
+    @cached_property
+    def _distro_release_info(self) -> Dict[str, str]:
+        """
+        Get the information items from the specified distro release file.
+
+        Returns:
+            A dictionary containing all information items.
+        """
+        if self.distro_release_file:
+            # If it was specified, we use it and parse what we can, even if
+            # its file name or content does not match the expected pattern.
+            distro_info = self._parse_distro_release_file(self.distro_release_file)
+            basename = os.path.basename(self.distro_release_file)
+            # The file name pattern for user-specified distro release files
+            # is somewhat more tolerant (compared to when searching for the
+            # file), because we want to use what was specified as best as
+            # possible.
+            match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
+        else:
+            try:
+                basenames = [
+                    basename
+                    for basename in os.listdir(self.etc_dir)
+                    if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES
+                    and os.path.isfile(os.path.join(self.etc_dir, basename))
+                ]
+                # We sort for repeatability in cases where there are multiple
+                # distro specific files; e.g. CentOS, Oracle, Enterprise all
+                # containing `redhat-release` on top of their own.
+                basenames.sort()
+            except OSError:
+                # This may occur when /etc is not readable but we can't be
+                # sure about the *-release files. Check common entries of
+                # /etc for information. If they turn out to not be there the
+                # error is handled in `_parse_distro_release_file()`.
+                basenames = _DISTRO_RELEASE_BASENAMES
+            for basename in basenames:
+                match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
+                if match is None:
+                    continue
+                filepath = os.path.join(self.etc_dir, basename)
+                distro_info = self._parse_distro_release_file(filepath)
+                # The name is always present if the pattern matches.
+                if "name" not in distro_info:
+                    continue
+                self.distro_release_file = filepath
+                break
+            else:  # the loop didn't "break": no candidate.
+                return {}
+
+        if match is not None:
+            distro_info["id"] = match.group(1)
+
+        # CloudLinux < 7: manually enrich info with proper id.
+        if "cloudlinux" in distro_info.get("name", "").lower():
+            distro_info["id"] = "cloudlinux"
+
+        return distro_info
+
+    def _parse_distro_release_file(self, filepath: str) -> Dict[str, str]:
+        """
+        Parse a distro release file.
+
+        Parameters:
+
+        * filepath: Path name of the distro release file.
+
+        Returns:
+            A dictionary containing all information items.
+        """
+        try:
+            with open(filepath, encoding="utf-8") as fp:
+                # Only parse the first line. For instance, on SLES there
+                # are multiple lines. We don't want them...
+                return self._parse_distro_release_content(fp.readline())
+        except OSError:
+            # Ignore not being able to read a specific, seemingly version
+            # related file.
+            # See https://github.com/python-distro/distro/issues/162
+            return {}
+
+    @staticmethod
+    def _parse_distro_release_content(line: str) -> Dict[str, str]:
+        """
+        Parse a line from a distro release file.
+
+        Parameters:
+        * line: Line from the distro release file. Must be a unicode string
+                or a UTF-8 encoded byte string.
+
+        Returns:
+            A dictionary containing all information items.
+        """
+        matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(line.strip()[::-1])
+        distro_info = {}
+        if matches:
+            # regexp ensures non-None
+            distro_info["name"] = matches.group(3)[::-1]
+            if matches.group(2):
+                distro_info["version_id"] = matches.group(2)[::-1]
+            if matches.group(1):
+                distro_info["codename"] = matches.group(1)[::-1]
+        elif line:
+            distro_info["name"] = line.strip()
+        return distro_info
+
+
+_distro = LinuxDistribution()
+
+
+def main() -> None:
+    logger = logging.getLogger(__name__)
+    logger.setLevel(logging.DEBUG)
+    logger.addHandler(logging.StreamHandler(sys.stdout))
+
+    parser = argparse.ArgumentParser(description="OS distro info tool")
+    parser.add_argument(
+        "--json", "-j", help="Output in machine readable format", action="store_true"
+    )
+
+    parser.add_argument(
+        "--root-dir",
+        "-r",
+        type=str,
+        dest="root_dir",
+        help="Path to the root filesystem directory (defaults to /)",
+    )
+
+    args = parser.parse_args()
+
+    if args.root_dir:
+        dist = LinuxDistribution(
+            include_lsb=False,
+            include_uname=False,
+            include_oslevel=False,
+            root_dir=args.root_dir,
+        )
+    else:
+        dist = _distro
+
+    if args.json:
+        logger.info(json.dumps(dist.info(), indent=4, sort_keys=True))
+    else:
+        logger.info("Name: %s", dist.name(pretty=True))
+        distribution_version = dist.version(pretty=True)
+        logger.info("Version: %s", distribution_version)
+        distribution_codename = dist.codename()
+        logger.info("Codename: %s", distribution_codename)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/idna/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/idna/__init__.py
new file mode 100644
index 0000000..a40eeaf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/idna/__init__.py
@@ -0,0 +1,44 @@
+from .package_data import __version__
+from .core import (
+    IDNABidiError,
+    IDNAError,
+    InvalidCodepoint,
+    InvalidCodepointContext,
+    alabel,
+    check_bidi,
+    check_hyphen_ok,
+    check_initial_combiner,
+    check_label,
+    check_nfc,
+    decode,
+    encode,
+    ulabel,
+    uts46_remap,
+    valid_contextj,
+    valid_contexto,
+    valid_label_length,
+    valid_string_length,
+)
+from .intranges import intranges_contain
+
+__all__ = [
+    "IDNABidiError",
+    "IDNAError",
+    "InvalidCodepoint",
+    "InvalidCodepointContext",
+    "alabel",
+    "check_bidi",
+    "check_hyphen_ok",
+    "check_initial_combiner",
+    "check_label",
+    "check_nfc",
+    "decode",
+    "encode",
+    "intranges_contain",
+    "ulabel",
+    "uts46_remap",
+    "valid_contextj",
+    "valid_contexto",
+    "valid_label_length",
+    "valid_string_length",
+]
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/idna/codec.py b/.venv/lib/python3.12/site-packages/pip/_vendor/idna/codec.py
new file mode 100644
index 0000000..1ca9ba6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/idna/codec.py
@@ -0,0 +1,112 @@
+from .core import encode, decode, alabel, ulabel, IDNAError
+import codecs
+import re
+from typing import Tuple, Optional
+
+_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]')
+
+class Codec(codecs.Codec):
+
+    def encode(self, data: str, errors: str = 'strict') -> Tuple[bytes, int]:
+        if errors != 'strict':
+            raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
+
+        if not data:
+            return b"", 0
+
+        return encode(data), len(data)
+
+    def decode(self, data: bytes, errors: str = 'strict') -> Tuple[str, int]:
+        if errors != 'strict':
+            raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
+
+        if not data:
+            return '', 0
+
+        return decode(data), len(data)
+
+class IncrementalEncoder(codecs.BufferedIncrementalEncoder):
+    def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[str, int]:  # type: ignore
+        if errors != 'strict':
+            raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
+
+        if not data:
+            return "", 0
+
+        labels = _unicode_dots_re.split(data)
+        trailing_dot = ''
+        if labels:
+            if not labels[-1]:
+                trailing_dot = '.'
+                del labels[-1]
+            elif not final:
+                # Keep potentially unfinished label until the next call
+                del labels[-1]
+                if labels:
+                    trailing_dot = '.'
+
+        result = []
+        size = 0
+        for label in labels:
+            result.append(alabel(label))
+            if size:
+                size += 1
+            size += len(label)
+
+        # Join with U+002E
+        result_str = '.'.join(result) + trailing_dot  # type: ignore
+        size += len(trailing_dot)
+        return result_str, size
+
+class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
+    def _buffer_decode(self, data: str, errors: str, final: bool) -> Tuple[str, int]:  # type: ignore
+        if errors != 'strict':
+            raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
+
+        if not data:
+            return ('', 0)
+
+        labels = _unicode_dots_re.split(data)
+        trailing_dot = ''
+        if labels:
+            if not labels[-1]:
+                trailing_dot = '.'
+                del labels[-1]
+            elif not final:
+                # Keep potentially unfinished label until the next call
+                del labels[-1]
+                if labels:
+                    trailing_dot = '.'
+
+        result = []
+        size = 0
+        for label in labels:
+            result.append(ulabel(label))
+            if size:
+                size += 1
+            size += len(label)
+
+        result_str = '.'.join(result) + trailing_dot
+        size += len(trailing_dot)
+        return (result_str, size)
+
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    pass
+
+
+class StreamReader(Codec, codecs.StreamReader):
+    pass
+
+
+def getregentry() -> codecs.CodecInfo:
+    # Compatibility as a search_function for codecs.register()
+    return codecs.CodecInfo(
+        name='idna',
+        encode=Codec().encode,  # type: ignore
+        decode=Codec().decode,  # type: ignore
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamwriter=StreamWriter,
+        streamreader=StreamReader,
+    )
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/idna/compat.py b/.venv/lib/python3.12/site-packages/pip/_vendor/idna/compat.py
new file mode 100644
index 0000000..786e6bd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/idna/compat.py
@@ -0,0 +1,13 @@
+from .core import *
+from .codec import *
+from typing import Any, Union
+
+def ToASCII(label: str) -> bytes:
+    return encode(label)
+
+def ToUnicode(label: Union[bytes, bytearray]) -> str:
+    return decode(label)
+
+def nameprep(s: Any) -> None:
+    raise NotImplementedError('IDNA 2008 does not utilise nameprep protocol')
+
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/idna/core.py b/.venv/lib/python3.12/site-packages/pip/_vendor/idna/core.py
new file mode 100644
index 0000000..aea17ac
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/idna/core.py
@@ -0,0 +1,400 @@
+from . import idnadata
+import bisect
+import unicodedata
+import re
+from typing import Union, Optional
+from .intranges import intranges_contain
+
+_virama_combining_class = 9
+_alabel_prefix = b'xn--'
+_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]')
+
+class IDNAError(UnicodeError):
+    """ Base exception for all IDNA-encoding related problems """
+    pass
+
+
+class IDNABidiError(IDNAError):
+    """ Exception when bidirectional requirements are not satisfied """
+    pass
+
+
+class InvalidCodepoint(IDNAError):
+    """ Exception when a disallowed or unallocated codepoint is used """
+    pass
+
+
+class InvalidCodepointContext(IDNAError):
+    """ Exception when the codepoint is not valid in the context it is used """
+    pass
+
+
+def _combining_class(cp: int) -> int:
+    v = unicodedata.combining(chr(cp))
+    if v == 0:
+        if not unicodedata.name(chr(cp)):
+            raise ValueError('Unknown character in unicodedata')
+    return v
+
+def _is_script(cp: str, script: str) -> bool:
+    return intranges_contain(ord(cp), idnadata.scripts[script])
+
+def _punycode(s: str) -> bytes:
+    return s.encode('punycode')
+
+def _unot(s: int) -> str:
+    return 'U+{:04X}'.format(s)
+
+
+def valid_label_length(label: Union[bytes, str]) -> bool:
+    if len(label) > 63:
+        return False
+    return True
+
+
+def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool:
+    if len(label) > (254 if trailing_dot else 253):
+        return False
+    return True
+
+
+def check_bidi(label: str, check_ltr: bool = False) -> bool:
+    # Bidi rules should only be applied if string contains RTL characters
+    bidi_label = False
+    for (idx, cp) in enumerate(label, 1):
+        direction = unicodedata.bidirectional(cp)
+        if direction == '':
+            # String likely comes from a newer version of Unicode
+            raise IDNABidiError('Unknown directionality in label {} at position {}'.format(repr(label), idx))
+        if direction in ['R', 'AL', 'AN']:
+            bidi_label = True
+    if not bidi_label and not check_ltr:
+        return True
+
+    # Bidi rule 1
+    direction = unicodedata.bidirectional(label[0])
+    if direction in ['R', 'AL']:
+        rtl = True
+    elif direction == 'L':
+        rtl = False
+    else:
+        raise IDNABidiError('First codepoint in label {} must be directionality L, R or AL'.format(repr(label)))
+
+    valid_ending = False
+    number_type = None  # type: Optional[str]
+    for (idx, cp) in enumerate(label, 1):
+        direction = unicodedata.bidirectional(cp)
+
+        if rtl:
+            # Bidi rule 2
+            if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']:
+                raise IDNABidiError('Invalid direction for codepoint at position {} in a right-to-left label'.format(idx))
+            # Bidi rule 3
+            if direction in ['R', 'AL', 'EN', 'AN']:
+                valid_ending = True
+            elif direction != 'NSM':
+                valid_ending = False
+            # Bidi rule 4
+            if direction in ['AN', 'EN']:
+                if not number_type:
+                    number_type = direction
+                else:
+                    if number_type != direction:
+                        raise IDNABidiError('Can not mix numeral types in a right-to-left label')
+        else:
+            # Bidi rule 5
+            if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']:
+                raise IDNABidiError('Invalid direction for codepoint at position {} in a left-to-right label'.format(idx))
+            # Bidi rule 6
+            if direction in ['L', 'EN']:
+                valid_ending = True
+            elif direction != 'NSM':
+                valid_ending = False
+
+    if not valid_ending:
+        raise IDNABidiError('Label ends with illegal codepoint directionality')
+
+    return True
+
+
+def check_initial_combiner(label: str) -> bool:
+    if unicodedata.category(label[0])[0] == 'M':
+        raise IDNAError('Label begins with an illegal combining character')
+    return True
+
+
+def check_hyphen_ok(label: str) -> bool:
+    if label[2:4] == '--':
+        raise IDNAError('Label has disallowed hyphens in 3rd and 4th position')
+    if label[0] == '-' or label[-1] == '-':
+        raise IDNAError('Label must not start or end with a hyphen')
+    return True
+
+
+def check_nfc(label: str) -> None:
+    if unicodedata.normalize('NFC', label) != label:
+        raise IDNAError('Label must be in Normalization Form C')
+
+
+def valid_contextj(label: str, pos: int) -> bool:
+    cp_value = ord(label[pos])
+
+    if cp_value == 0x200c:
+
+        if pos > 0:
+            if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
+                return True
+
+        ok = False
+        for i in range(pos-1, -1, -1):
+            joining_type = idnadata.joining_types.get(ord(label[i]))
+            if joining_type == ord('T'):
+                continue
+            elif joining_type in [ord('L'), ord('D')]:
+                ok = True
+                break
+            else:
+                break
+
+        if not ok:
+            return False
+
+        ok = False
+        for i in range(pos+1, len(label)):
+            joining_type = idnadata.joining_types.get(ord(label[i]))
+            if joining_type == ord('T'):
+                continue
+            elif joining_type in [ord('R'), ord('D')]:
+                ok = True
+                break
+            else:
+                break
+        return ok
+
+    if cp_value == 0x200d:
+
+        if pos > 0:
+            if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
+                return True
+        return False
+
+    else:
+
+        return False
+
+
+def valid_contexto(label: str, pos: int, exception: bool = False) -> bool:
+    cp_value = ord(label[pos])
+
+    if cp_value == 0x00b7:
+        if 0 < pos < len(label)-1:
+            if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c:
+                return True
+        return False
+
+    elif cp_value == 0x0375:
+        if pos < len(label)-1 and len(label) > 1:
+            return _is_script(label[pos + 1], 'Greek')
+        return False
+
+    elif cp_value == 0x05f3 or cp_value == 0x05f4:
+        if pos > 0:
+            return _is_script(label[pos - 1], 'Hebrew')
+        return False
+
+    elif cp_value == 0x30fb:
+        for cp in label:
+            if cp == '\u30fb':
+                continue
+            if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'):
+                return True
+        return False
+
+    elif 0x660 <= cp_value <= 0x669:
+        for cp in label:
+            if 0x6f0 <= ord(cp) <= 0x06f9:
+                return False
+        return True
+
+    elif 0x6f0 <= cp_value <= 0x6f9:
+        for cp in label:
+            if 0x660 <= ord(cp) <= 0x0669:
+                return False
+        return True
+
+    return False
+
+
+def check_label(label: Union[str, bytes, bytearray]) -> None:
+    if isinstance(label, (bytes, bytearray)):
+        label = label.decode('utf-8')
+    if len(label) == 0:
+        raise IDNAError('Empty Label')
+
+    check_nfc(label)
+    check_hyphen_ok(label)
+    check_initial_combiner(label)
+
+    for (pos, cp) in enumerate(label):
+        cp_value = ord(cp)
+        if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']):
+            continue
+        elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']):
+            if not valid_contextj(label, pos):
+                raise InvalidCodepointContext('Joiner {} not allowed at position {} in {}'.format(
+                    _unot(cp_value), pos+1, repr(label)))
+        elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']):
+            if not valid_contexto(label, pos):
+                raise InvalidCodepointContext('Codepoint {} not allowed at position {} in {}'.format(_unot(cp_value), pos+1, repr(label)))
+        else:
+            raise InvalidCodepoint('Codepoint {} at position {} of {} not allowed'.format(_unot(cp_value), pos+1, repr(label)))
+
+    check_bidi(label)
+
+
+def alabel(label: str) -> bytes:
+    try:
+        label_bytes = label.encode('ascii')
+        ulabel(label_bytes)
+        if not valid_label_length(label_bytes):
+            raise IDNAError('Label too long')
+        return label_bytes
+    except UnicodeEncodeError:
+        pass
+
+    if not label:
+        raise IDNAError('No Input')
+
+    label = str(label)
+    check_label(label)
+    label_bytes = _punycode(label)
+    label_bytes = _alabel_prefix + label_bytes
+
+    if not valid_label_length(label_bytes):
+        raise IDNAError('Label too long')
+
+    return label_bytes
+
+
+def ulabel(label: Union[str, bytes, bytearray]) -> str:
+    if not isinstance(label, (bytes, bytearray)):
+        try:
+            label_bytes = label.encode('ascii')
+        except UnicodeEncodeError:
+            check_label(label)
+            return label
+    else:
+        label_bytes = label
+
+    label_bytes = label_bytes.lower()
+    if label_bytes.startswith(_alabel_prefix):
+        label_bytes = label_bytes[len(_alabel_prefix):]
+        if not label_bytes:
+            raise IDNAError('Malformed A-label, no Punycode eligible content found')
+        if label_bytes.decode('ascii')[-1] == '-':
+            raise IDNAError('A-label must not end with a hyphen')
+    else:
+        check_label(label_bytes)
+        return label_bytes.decode('ascii')
+
+    try:
+        label = label_bytes.decode('punycode')
+    except UnicodeError:
+        raise IDNAError('Invalid A-label')
+    check_label(label)
+    return label
+
+
+def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str:
+    """Re-map the characters in the string according to UTS46 processing."""
+    from .uts46data import uts46data
+    output = ''
+
+    for pos, char in enumerate(domain):
+        code_point = ord(char)
+        try:
+            uts46row = uts46data[code_point if code_point < 256 else
+                bisect.bisect_left(uts46data, (code_point, 'Z')) - 1]
+            status = uts46row[1]
+            replacement = None  # type: Optional[str]
+            if len(uts46row) == 3:
+                replacement = uts46row[2]  # type: ignore
+            if (status == 'V' or
+                    (status == 'D' and not transitional) or
+                    (status == '3' and not std3_rules and replacement is None)):
+                output += char
+            elif replacement is not None and (status == 'M' or
+                    (status == '3' and not std3_rules) or
+                    (status == 'D' and transitional)):
+                output += replacement
+            elif status != 'I':
+                raise IndexError()
+        except IndexError:
+            raise InvalidCodepoint(
+                'Codepoint {} not allowed at position {} in {}'.format(
+                _unot(code_point), pos + 1, repr(domain)))
+
+    return unicodedata.normalize('NFC', output)
+
+
+def encode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False, transitional: bool = False) -> bytes:
+    if isinstance(s, (bytes, bytearray)):
+        try:
+            s = s.decode('ascii')
+        except UnicodeDecodeError:
+            raise IDNAError('should pass a unicode string to the function rather than a byte string.')
+    if uts46:
+        s = uts46_remap(s, std3_rules, transitional)
+    trailing_dot = False
+    result = []
+    if strict:
+        labels = s.split('.')
+    else:
+        labels = _unicode_dots_re.split(s)
+    if not labels or labels == ['']:
+        raise IDNAError('Empty domain')
+    if labels[-1] == '':
+        del labels[-1]
+        trailing_dot = True
+    for label in labels:
+        s = alabel(label)
+        if s:
+            result.append(s)
+        else:
+            raise IDNAError('Empty label')
+    if trailing_dot:
+        result.append(b'')
+    s = b'.'.join(result)
+    if not valid_string_length(s, trailing_dot):
+        raise IDNAError('Domain too long')
+    return s
+
+
+def decode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False) -> str:
+    try:
+        if isinstance(s, (bytes, bytearray)):
+            s = s.decode('ascii')
+    except UnicodeDecodeError:
+        raise IDNAError('Invalid ASCII in A-label')
+    if uts46:
+        s = uts46_remap(s, std3_rules, False)
+    trailing_dot = False
+    result = []
+    if not strict:
+        labels = _unicode_dots_re.split(s)
+    else:
+        labels = s.split('.')
+    if not labels or labels == ['']:
+        raise IDNAError('Empty domain')
+    if not labels[-1]:
+        del labels[-1]
+        trailing_dot = True
+    for label in labels:
+        s = ulabel(label)
+        if s:
+            result.append(s)
+        else:
+            raise IDNAError('Empty label')
+    if trailing_dot:
+        result.append('')
+    return '.'.join(result)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/idna/idnadata.py b/.venv/lib/python3.12/site-packages/pip/_vendor/idna/idnadata.py
new file mode 100644
index 0000000..5b5e02a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/idna/idnadata.py
@@ -0,0 +1,4246 @@
+# This file is automatically generated by tools/idna-data
+
+__version__ = '15.0.0'
+scripts = {
+    'Greek': (
+        0x37000000374,
+        0x37500000378,
+        0x37a0000037e,
+        0x37f00000380,
+        0x38400000385,
+        0x38600000387,
+        0x3880000038b,
+        0x38c0000038d,
+        0x38e000003a2,
+        0x3a3000003e2,
+        0x3f000000400,
+        0x1d2600001d2b,
+        0x1d5d00001d62,
+        0x1d6600001d6b,
+        0x1dbf00001dc0,
+        0x1f0000001f16,
+        0x1f1800001f1e,
+        0x1f2000001f46,
+        0x1f4800001f4e,
+        0x1f5000001f58,
+        0x1f5900001f5a,
+        0x1f5b00001f5c,
+        0x1f5d00001f5e,
+        0x1f5f00001f7e,
+        0x1f8000001fb5,
+        0x1fb600001fc5,
+        0x1fc600001fd4,
+        0x1fd600001fdc,
+        0x1fdd00001ff0,
+        0x1ff200001ff5,
+        0x1ff600001fff,
+        0x212600002127,
+        0xab650000ab66,
+        0x101400001018f,
+        0x101a0000101a1,
+        0x1d2000001d246,
+    ),
+    'Han': (
+        0x2e8000002e9a,
+        0x2e9b00002ef4,
+        0x2f0000002fd6,
+        0x300500003006,
+        0x300700003008,
+        0x30210000302a,
+        0x30380000303c,
+        0x340000004dc0,
+        0x4e000000a000,
+        0xf9000000fa6e,
+        0xfa700000fada,
+        0x16fe200016fe4,
+        0x16ff000016ff2,
+        0x200000002a6e0,
+        0x2a7000002b73a,
+        0x2b7400002b81e,
+        0x2b8200002cea2,
+        0x2ceb00002ebe1,
+        0x2f8000002fa1e,
+        0x300000003134b,
+        0x31350000323b0,
+    ),
+    'Hebrew': (
+        0x591000005c8,
+        0x5d0000005eb,
+        0x5ef000005f5,
+        0xfb1d0000fb37,
+        0xfb380000fb3d,
+        0xfb3e0000fb3f,
+        0xfb400000fb42,
+        0xfb430000fb45,
+        0xfb460000fb50,
+    ),
+    'Hiragana': (
+        0x304100003097,
+        0x309d000030a0,
+        0x1b0010001b120,
+        0x1b1320001b133,
+        0x1b1500001b153,
+        0x1f2000001f201,
+    ),
+    'Katakana': (
+        0x30a1000030fb,
+        0x30fd00003100,
+        0x31f000003200,
+        0x32d0000032ff,
+        0x330000003358,
+        0xff660000ff70,
+        0xff710000ff9e,
+        0x1aff00001aff4,
+        0x1aff50001affc,
+        0x1affd0001afff,
+        0x1b0000001b001,
+        0x1b1200001b123,
+        0x1b1550001b156,
+        0x1b1640001b168,
+    ),
+}
+joining_types = {
+    0xad: 84,
+    0x300: 84,
+    0x301: 84,
+    0x302: 84,
+    0x303: 84,
+    0x304: 84,
+    0x305: 84,
+    0x306: 84,
+    0x307: 84,
+    0x308: 84,
+    0x309: 84,
+    0x30a: 84,
+    0x30b: 84,
+    0x30c: 84,
+    0x30d: 84,
+    0x30e: 84,
+    0x30f: 84,
+    0x310: 84,
+    0x311: 84,
+    0x312: 84,
+    0x313: 84,
+    0x314: 84,
+    0x315: 84,
+    0x316: 84,
+    0x317: 84,
+    0x318: 84,
+    0x319: 84,
+    0x31a: 84,
+    0x31b: 84,
+    0x31c: 84,
+    0x31d: 84,
+    0x31e: 84,
+    0x31f: 84,
+    0x320: 84,
+    0x321: 84,
+    0x322: 84,
+    0x323: 84,
+    0x324: 84,
+    0x325: 84,
+    0x326: 84,
+    0x327: 84,
+    0x328: 84,
+    0x329: 84,
+    0x32a: 84,
+    0x32b: 84,
+    0x32c: 84,
+    0x32d: 84,
+    0x32e: 84,
+    0x32f: 84,
+    0x330: 84,
+    0x331: 84,
+    0x332: 84,
+    0x333: 84,
+    0x334: 84,
+    0x335: 84,
+    0x336: 84,
+    0x337: 84,
+    0x338: 84,
+    0x339: 84,
+    0x33a: 84,
+    0x33b: 84,
+    0x33c: 84,
+    0x33d: 84,
+    0x33e: 84,
+    0x33f: 84,
+    0x340: 84,
+    0x341: 84,
+    0x342: 84,
+    0x343: 84,
+    0x344: 84,
+    0x345: 84,
+    0x346: 84,
+    0x347: 84,
+    0x348: 84,
+    0x349: 84,
+    0x34a: 84,
+    0x34b: 84,
+    0x34c: 84,
+    0x34d: 84,
+    0x34e: 84,
+    0x34f: 84,
+    0x350: 84,
+    0x351: 84,
+    0x352: 84,
+    0x353: 84,
+    0x354: 84,
+    0x355: 84,
+    0x356: 84,
+    0x357: 84,
+    0x358: 84,
+    0x359: 84,
+    0x35a: 84,
+    0x35b: 84,
+    0x35c: 84,
+    0x35d: 84,
+    0x35e: 84,
+    0x35f: 84,
+    0x360: 84,
+    0x361: 84,
+    0x362: 84,
+    0x363: 84,
+    0x364: 84,
+    0x365: 84,
+    0x366: 84,
+    0x367: 84,
+    0x368: 84,
+    0x369: 84,
+    0x36a: 84,
+    0x36b: 84,
+    0x36c: 84,
+    0x36d: 84,
+    0x36e: 84,
+    0x36f: 84,
+    0x483: 84,
+    0x484: 84,
+    0x485: 84,
+    0x486: 84,
+    0x487: 84,
+    0x488: 84,
+    0x489: 84,
+    0x591: 84,
+    0x592: 84,
+    0x593: 84,
+    0x594: 84,
+    0x595: 84,
+    0x596: 84,
+    0x597: 84,
+    0x598: 84,
+    0x599: 84,
+    0x59a: 84,
+    0x59b: 84,
+    0x59c: 84,
+    0x59d: 84,
+    0x59e: 84,
+    0x59f: 84,
+    0x5a0: 84,
+    0x5a1: 84,
+    0x5a2: 84,
+    0x5a3: 84,
+    0x5a4: 84,
+    0x5a5: 84,
+    0x5a6: 84,
+    0x5a7: 84,
+    0x5a8: 84,
+    0x5a9: 84,
+    0x5aa: 84,
+    0x5ab: 84,
+    0x5ac: 84,
+    0x5ad: 84,
+    0x5ae: 84,
+    0x5af: 84,
+    0x5b0: 84,
+    0x5b1: 84,
+    0x5b2: 84,
+    0x5b3: 84,
+    0x5b4: 84,
+    0x5b5: 84,
+    0x5b6: 84,
+    0x5b7: 84,
+    0x5b8: 84,
+    0x5b9: 84,
+    0x5ba: 84,
+    0x5bb: 84,
+    0x5bc: 84,
+    0x5bd: 84,
+    0x5bf: 84,
+    0x5c1: 84,
+    0x5c2: 84,
+    0x5c4: 84,
+    0x5c5: 84,
+    0x5c7: 84,
+    0x610: 84,
+    0x611: 84,
+    0x612: 84,
+    0x613: 84,
+    0x614: 84,
+    0x615: 84,
+    0x616: 84,
+    0x617: 84,
+    0x618: 84,
+    0x619: 84,
+    0x61a: 84,
+    0x61c: 84,
+    0x620: 68,
+    0x622: 82,
+    0x623: 82,
+    0x624: 82,
+    0x625: 82,
+    0x626: 68,
+    0x627: 82,
+    0x628: 68,
+    0x629: 82,
+    0x62a: 68,
+    0x62b: 68,
+    0x62c: 68,
+    0x62d: 68,
+    0x62e: 68,
+    0x62f: 82,
+    0x630: 82,
+    0x631: 82,
+    0x632: 82,
+    0x633: 68,
+    0x634: 68,
+    0x635: 68,
+    0x636: 68,
+    0x637: 68,
+    0x638: 68,
+    0x639: 68,
+    0x63a: 68,
+    0x63b: 68,
+    0x63c: 68,
+    0x63d: 68,
+    0x63e: 68,
+    0x63f: 68,
+    0x640: 67,
+    0x641: 68,
+    0x642: 68,
+    0x643: 68,
+    0x644: 68,
+    0x645: 68,
+    0x646: 68,
+    0x647: 68,
+    0x648: 82,
+    0x649: 68,
+    0x64a: 68,
+    0x64b: 84,
+    0x64c: 84,
+    0x64d: 84,
+    0x64e: 84,
+    0x64f: 84,
+    0x650: 84,
+    0x651: 84,
+    0x652: 84,
+    0x653: 84,
+    0x654: 84,
+    0x655: 84,
+    0x656: 84,
+    0x657: 84,
+    0x658: 84,
+    0x659: 84,
+    0x65a: 84,
+    0x65b: 84,
+    0x65c: 84,
+    0x65d: 84,
+    0x65e: 84,
+    0x65f: 84,
+    0x66e: 68,
+    0x66f: 68,
+    0x670: 84,
+    0x671: 82,
+    0x672: 82,
+    0x673: 82,
+    0x675: 82,
+    0x676: 82,
+    0x677: 82,
+    0x678: 68,
+    0x679: 68,
+    0x67a: 68,
+    0x67b: 68,
+    0x67c: 68,
+    0x67d: 68,
+    0x67e: 68,
+    0x67f: 68,
+    0x680: 68,
+    0x681: 68,
+    0x682: 68,
+    0x683: 68,
+    0x684: 68,
+    0x685: 68,
+    0x686: 68,
+    0x687: 68,
+    0x688: 82,
+    0x689: 82,
+    0x68a: 82,
+    0x68b: 82,
+    0x68c: 82,
+    0x68d: 82,
+    0x68e: 82,
+    0x68f: 82,
+    0x690: 82,
+    0x691: 82,
+    0x692: 82,
+    0x693: 82,
+    0x694: 82,
+    0x695: 82,
+    0x696: 82,
+    0x697: 82,
+    0x698: 82,
+    0x699: 82,
+    0x69a: 68,
+    0x69b: 68,
+    0x69c: 68,
+    0x69d: 68,
+    0x69e: 68,
+    0x69f: 68,
+    0x6a0: 68,
+    0x6a1: 68,
+    0x6a2: 68,
+    0x6a3: 68,
+    0x6a4: 68,
+    0x6a5: 68,
+    0x6a6: 68,
+    0x6a7: 68,
+    0x6a8: 68,
+    0x6a9: 68,
+    0x6aa: 68,
+    0x6ab: 68,
+    0x6ac: 68,
+    0x6ad: 68,
+    0x6ae: 68,
+    0x6af: 68,
+    0x6b0: 68,
+    0x6b1: 68,
+    0x6b2: 68,
+    0x6b3: 68,
+    0x6b4: 68,
+    0x6b5: 68,
+    0x6b6: 68,
+    0x6b7: 68,
+    0x6b8: 68,
+    0x6b9: 68,
+    0x6ba: 68,
+    0x6bb: 68,
+    0x6bc: 68,
+    0x6bd: 68,
+    0x6be: 68,
+    0x6bf: 68,
+    0x6c0: 82,
+    0x6c1: 68,
+    0x6c2: 68,
+    0x6c3: 82,
+    0x6c4: 82,
+    0x6c5: 82,
+    0x6c6: 82,
+    0x6c7: 82,
+    0x6c8: 82,
+    0x6c9: 82,
+    0x6ca: 82,
+    0x6cb: 82,
+    0x6cc: 68,
+    0x6cd: 82,
+    0x6ce: 68,
+    0x6cf: 82,
+    0x6d0: 68,
+    0x6d1: 68,
+    0x6d2: 82,
+    0x6d3: 82,
+    0x6d5: 82,
+    0x6d6: 84,
+    0x6d7: 84,
+    0x6d8: 84,
+    0x6d9: 84,
+    0x6da: 84,
+    0x6db: 84,
+    0x6dc: 84,
+    0x6df: 84,
+    0x6e0: 84,
+    0x6e1: 84,
+    0x6e2: 84,
+    0x6e3: 84,
+    0x6e4: 84,
+    0x6e7: 84,
+    0x6e8: 84,
+    0x6ea: 84,
+    0x6eb: 84,
+    0x6ec: 84,
+    0x6ed: 84,
+    0x6ee: 82,
+    0x6ef: 82,
+    0x6fa: 68,
+    0x6fb: 68,
+    0x6fc: 68,
+    0x6ff: 68,
+    0x70f: 84,
+    0x710: 82,
+    0x711: 84,
+    0x712: 68,
+    0x713: 68,
+    0x714: 68,
+    0x715: 82,
+    0x716: 82,
+    0x717: 82,
+    0x718: 82,
+    0x719: 82,
+    0x71a: 68,
+    0x71b: 68,
+    0x71c: 68,
+    0x71d: 68,
+    0x71e: 82,
+    0x71f: 68,
+    0x720: 68,
+    0x721: 68,
+    0x722: 68,
+    0x723: 68,
+    0x724: 68,
+    0x725: 68,
+    0x726: 68,
+    0x727: 68,
+    0x728: 82,
+    0x729: 68,
+    0x72a: 82,
+    0x72b: 68,
+    0x72c: 82,
+    0x72d: 68,
+    0x72e: 68,
+    0x72f: 82,
+    0x730: 84,
+    0x731: 84,
+    0x732: 84,
+    0x733: 84,
+    0x734: 84,
+    0x735: 84,
+    0x736: 84,
+    0x737: 84,
+    0x738: 84,
+    0x739: 84,
+    0x73a: 84,
+    0x73b: 84,
+    0x73c: 84,
+    0x73d: 84,
+    0x73e: 84,
+    0x73f: 84,
+    0x740: 84,
+    0x741: 84,
+    0x742: 84,
+    0x743: 84,
+    0x744: 84,
+    0x745: 84,
+    0x746: 84,
+    0x747: 84,
+    0x748: 84,
+    0x749: 84,
+    0x74a: 84,
+    0x74d: 82,
+    0x74e: 68,
+    0x74f: 68,
+    0x750: 68,
+    0x751: 68,
+    0x752: 68,
+    0x753: 68,
+    0x754: 68,
+    0x755: 68,
+    0x756: 68,
+    0x757: 68,
+    0x758: 68,
+    0x759: 82,
+    0x75a: 82,
+    0x75b: 82,
+    0x75c: 68,
+    0x75d: 68,
+    0x75e: 68,
+    0x75f: 68,
+    0x760: 68,
+    0x761: 68,
+    0x762: 68,
+    0x763: 68,
+    0x764: 68,
+    0x765: 68,
+    0x766: 68,
+    0x767: 68,
+    0x768: 68,
+    0x769: 68,
+    0x76a: 68,
+    0x76b: 82,
+    0x76c: 82,
+    0x76d: 68,
+    0x76e: 68,
+    0x76f: 68,
+    0x770: 68,
+    0x771: 82,
+    0x772: 68,
+    0x773: 82,
+    0x774: 82,
+    0x775: 68,
+    0x776: 68,
+    0x777: 68,
+    0x778: 82,
+    0x779: 82,
+    0x77a: 68,
+    0x77b: 68,
+    0x77c: 68,
+    0x77d: 68,
+    0x77e: 68,
+    0x77f: 68,
+    0x7a6: 84,
+    0x7a7: 84,
+    0x7a8: 84,
+    0x7a9: 84,
+    0x7aa: 84,
+    0x7ab: 84,
+    0x7ac: 84,
+    0x7ad: 84,
+    0x7ae: 84,
+    0x7af: 84,
+    0x7b0: 84,
+    0x7ca: 68,
+    0x7cb: 68,
+    0x7cc: 68,
+    0x7cd: 68,
+    0x7ce: 68,
+    0x7cf: 68,
+    0x7d0: 68,
+    0x7d1: 68,
+    0x7d2: 68,
+    0x7d3: 68,
+    0x7d4: 68,
+    0x7d5: 68,
+    0x7d6: 68,
+    0x7d7: 68,
+    0x7d8: 68,
+    0x7d9: 68,
+    0x7da: 68,
+    0x7db: 68,
+    0x7dc: 68,
+    0x7dd: 68,
+    0x7de: 68,
+    0x7df: 68,
+    0x7e0: 68,
+    0x7e1: 68,
+    0x7e2: 68,
+    0x7e3: 68,
+    0x7e4: 68,
+    0x7e5: 68,
+    0x7e6: 68,
+    0x7e7: 68,
+    0x7e8: 68,
+    0x7e9: 68,
+    0x7ea: 68,
+    0x7eb: 84,
+    0x7ec: 84,
+    0x7ed: 84,
+    0x7ee: 84,
+    0x7ef: 84,
+    0x7f0: 84,
+    0x7f1: 84,
+    0x7f2: 84,
+    0x7f3: 84,
+    0x7fa: 67,
+    0x7fd: 84,
+    0x816: 84,
+    0x817: 84,
+    0x818: 84,
+    0x819: 84,
+    0x81b: 84,
+    0x81c: 84,
+    0x81d: 84,
+    0x81e: 84,
+    0x81f: 84,
+    0x820: 84,
+    0x821: 84,
+    0x822: 84,
+    0x823: 84,
+    0x825: 84,
+    0x826: 84,
+    0x827: 84,
+    0x829: 84,
+    0x82a: 84,
+    0x82b: 84,
+    0x82c: 84,
+    0x82d: 84,
+    0x840: 82,
+    0x841: 68,
+    0x842: 68,
+    0x843: 68,
+    0x844: 68,
+    0x845: 68,
+    0x846: 82,
+    0x847: 82,
+    0x848: 68,
+    0x849: 82,
+    0x84a: 68,
+    0x84b: 68,
+    0x84c: 68,
+    0x84d: 68,
+    0x84e: 68,
+    0x84f: 68,
+    0x850: 68,
+    0x851: 68,
+    0x852: 68,
+    0x853: 68,
+    0x854: 82,
+    0x855: 68,
+    0x856: 82,
+    0x857: 82,
+    0x858: 82,
+    0x859: 84,
+    0x85a: 84,
+    0x85b: 84,
+    0x860: 68,
+    0x862: 68,
+    0x863: 68,
+    0x864: 68,
+    0x865: 68,
+    0x867: 82,
+    0x868: 68,
+    0x869: 82,
+    0x86a: 82,
+    0x870: 82,
+    0x871: 82,
+    0x872: 82,
+    0x873: 82,
+    0x874: 82,
+    0x875: 82,
+    0x876: 82,
+    0x877: 82,
+    0x878: 82,
+    0x879: 82,
+    0x87a: 82,
+    0x87b: 82,
+    0x87c: 82,
+    0x87d: 82,
+    0x87e: 82,
+    0x87f: 82,
+    0x880: 82,
+    0x881: 82,
+    0x882: 82,
+    0x883: 67,
+    0x884: 67,
+    0x885: 67,
+    0x886: 68,
+    0x889: 68,
+    0x88a: 68,
+    0x88b: 68,
+    0x88c: 68,
+    0x88d: 68,
+    0x88e: 82,
+    0x898: 84,
+    0x899: 84,
+    0x89a: 84,
+    0x89b: 84,
+    0x89c: 84,
+    0x89d: 84,
+    0x89e: 84,
+    0x89f: 84,
+    0x8a0: 68,
+    0x8a1: 68,
+    0x8a2: 68,
+    0x8a3: 68,
+    0x8a4: 68,
+    0x8a5: 68,
+    0x8a6: 68,
+    0x8a7: 68,
+    0x8a8: 68,
+    0x8a9: 68,
+    0x8aa: 82,
+    0x8ab: 82,
+    0x8ac: 82,
+    0x8ae: 82,
+    0x8af: 68,
+    0x8b0: 68,
+    0x8b1: 82,
+    0x8b2: 82,
+    0x8b3: 68,
+    0x8b4: 68,
+    0x8b5: 68,
+    0x8b6: 68,
+    0x8b7: 68,
+    0x8b8: 68,
+    0x8b9: 82,
+    0x8ba: 68,
+    0x8bb: 68,
+    0x8bc: 68,
+    0x8bd: 68,
+    0x8be: 68,
+    0x8bf: 68,
+    0x8c0: 68,
+    0x8c1: 68,
+    0x8c2: 68,
+    0x8c3: 68,
+    0x8c4: 68,
+    0x8c5: 68,
+    0x8c6: 68,
+    0x8c7: 68,
+    0x8c8: 68,
+    0x8ca: 84,
+    0x8cb: 84,
+    0x8cc: 84,
+    0x8cd: 84,
+    0x8ce: 84,
+    0x8cf: 84,
+    0x8d0: 84,
+    0x8d1: 84,
+    0x8d2: 84,
+    0x8d3: 84,
+    0x8d4: 84,
+    0x8d5: 84,
+    0x8d6: 84,
+    0x8d7: 84,
+    0x8d8: 84,
+    0x8d9: 84,
+    0x8da: 84,
+    0x8db: 84,
+    0x8dc: 84,
+    0x8dd: 84,
+    0x8de: 84,
+    0x8df: 84,
+    0x8e0: 84,
+    0x8e1: 84,
+    0x8e3: 84,
+    0x8e4: 84,
+    0x8e5: 84,
+    0x8e6: 84,
+    0x8e7: 84,
+    0x8e8: 84,
+    0x8e9: 84,
+    0x8ea: 84,
+    0x8eb: 84,
+    0x8ec: 84,
+    0x8ed: 84,
+    0x8ee: 84,
+    0x8ef: 84,
+    0x8f0: 84,
+    0x8f1: 84,
+    0x8f2: 84,
+    0x8f3: 84,
+    0x8f4: 84,
+    0x8f5: 84,
+    0x8f6: 84,
+    0x8f7: 84,
+    0x8f8: 84,
+    0x8f9: 84,
+    0x8fa: 84,
+    0x8fb: 84,
+    0x8fc: 84,
+    0x8fd: 84,
+    0x8fe: 84,
+    0x8ff: 84,
+    0x900: 84,
+    0x901: 84,
+    0x902: 84,
+    0x93a: 84,
+    0x93c: 84,
+    0x941: 84,
+    0x942: 84,
+    0x943: 84,
+    0x944: 84,
+    0x945: 84,
+    0x946: 84,
+    0x947: 84,
+    0x948: 84,
+    0x94d: 84,
+    0x951: 84,
+    0x952: 84,
+    0x953: 84,
+    0x954: 84,
+    0x955: 84,
+    0x956: 84,
+    0x957: 84,
+    0x962: 84,
+    0x963: 84,
+    0x981: 84,
+    0x9bc: 84,
+    0x9c1: 84,
+    0x9c2: 84,
+    0x9c3: 84,
+    0x9c4: 84,
+    0x9cd: 84,
+    0x9e2: 84,
+    0x9e3: 84,
+    0x9fe: 84,
+    0xa01: 84,
+    0xa02: 84,
+    0xa3c: 84,
+    0xa41: 84,
+    0xa42: 84,
+    0xa47: 84,
+    0xa48: 84,
+    0xa4b: 84,
+    0xa4c: 84,
+    0xa4d: 84,
+    0xa51: 84,
+    0xa70: 84,
+    0xa71: 84,
+    0xa75: 84,
+    0xa81: 84,
+    0xa82: 84,
+    0xabc: 84,
+    0xac1: 84,
+    0xac2: 84,
+    0xac3: 84,
+    0xac4: 84,
+    0xac5: 84,
+    0xac7: 84,
+    0xac8: 84,
+    0xacd: 84,
+    0xae2: 84,
+    0xae3: 84,
+    0xafa: 84,
+    0xafb: 84,
+    0xafc: 84,
+    0xafd: 84,
+    0xafe: 84,
+    0xaff: 84,
+    0xb01: 84,
+    0xb3c: 84,
+    0xb3f: 84,
+    0xb41: 84,
+    0xb42: 84,
+    0xb43: 84,
+    0xb44: 84,
+    0xb4d: 84,
+    0xb55: 84,
+    0xb56: 84,
+    0xb62: 84,
+    0xb63: 84,
+    0xb82: 84,
+    0xbc0: 84,
+    0xbcd: 84,
+    0xc00: 84,
+    0xc04: 84,
+    0xc3c: 84,
+    0xc3e: 84,
+    0xc3f: 84,
+    0xc40: 84,
+    0xc46: 84,
+    0xc47: 84,
+    0xc48: 84,
+    0xc4a: 84,
+    0xc4b: 84,
+    0xc4c: 84,
+    0xc4d: 84,
+    0xc55: 84,
+    0xc56: 84,
+    0xc62: 84,
+    0xc63: 84,
+    0xc81: 84,
+    0xcbc: 84,
+    0xcbf: 84,
+    0xcc6: 84,
+    0xccc: 84,
+    0xccd: 84,
+    0xce2: 84,
+    0xce3: 84,
+    0xd00: 84,
+    0xd01: 84,
+    0xd3b: 84,
+    0xd3c: 84,
+    0xd41: 84,
+    0xd42: 84,
+    0xd43: 84,
+    0xd44: 84,
+    0xd4d: 84,
+    0xd62: 84,
+    0xd63: 84,
+    0xd81: 84,
+    0xdca: 84,
+    0xdd2: 84,
+    0xdd3: 84,
+    0xdd4: 84,
+    0xdd6: 84,
+    0xe31: 84,
+    0xe34: 84,
+    0xe35: 84,
+    0xe36: 84,
+    0xe37: 84,
+    0xe38: 84,
+    0xe39: 84,
+    0xe3a: 84,
+    0xe47: 84,
+    0xe48: 84,
+    0xe49: 84,
+    0xe4a: 84,
+    0xe4b: 84,
+    0xe4c: 84,
+    0xe4d: 84,
+    0xe4e: 84,
+    0xeb1: 84,
+    0xeb4: 84,
+    0xeb5: 84,
+    0xeb6: 84,
+    0xeb7: 84,
+    0xeb8: 84,
+    0xeb9: 84,
+    0xeba: 84,
+    0xebb: 84,
+    0xebc: 84,
+    0xec8: 84,
+    0xec9: 84,
+    0xeca: 84,
+    0xecb: 84,
+    0xecc: 84,
+    0xecd: 84,
+    0xece: 84,
+    0xf18: 84,
+    0xf19: 84,
+    0xf35: 84,
+    0xf37: 84,
+    0xf39: 84,
+    0xf71: 84,
+    0xf72: 84,
+    0xf73: 84,
+    0xf74: 84,
+    0xf75: 84,
+    0xf76: 84,
+    0xf77: 84,
+    0xf78: 84,
+    0xf79: 84,
+    0xf7a: 84,
+    0xf7b: 84,
+    0xf7c: 84,
+    0xf7d: 84,
+    0xf7e: 84,
+    0xf80: 84,
+    0xf81: 84,
+    0xf82: 84,
+    0xf83: 84,
+    0xf84: 84,
+    0xf86: 84,
+    0xf87: 84,
+    0xf8d: 84,
+    0xf8e: 84,
+    0xf8f: 84,
+    0xf90: 84,
+    0xf91: 84,
+    0xf92: 84,
+    0xf93: 84,
+    0xf94: 84,
+    0xf95: 84,
+    0xf96: 84,
+    0xf97: 84,
+    0xf99: 84,
+    0xf9a: 84,
+    0xf9b: 84,
+    0xf9c: 84,
+    0xf9d: 84,
+    0xf9e: 84,
+    0xf9f: 84,
+    0xfa0: 84,
+    0xfa1: 84,
+    0xfa2: 84,
+    0xfa3: 84,
+    0xfa4: 84,
+    0xfa5: 84,
+    0xfa6: 84,
+    0xfa7: 84,
+    0xfa8: 84,
+    0xfa9: 84,
+    0xfaa: 84,
+    0xfab: 84,
+    0xfac: 84,
+    0xfad: 84,
+    0xfae: 84,
+    0xfaf: 84,
+    0xfb0: 84,
+    0xfb1: 84,
+    0xfb2: 84,
+    0xfb3: 84,
+    0xfb4: 84,
+    0xfb5: 84,
+    0xfb6: 84,
+    0xfb7: 84,
+    0xfb8: 84,
+    0xfb9: 84,
+    0xfba: 84,
+    0xfbb: 84,
+    0xfbc: 84,
+    0xfc6: 84,
+    0x102d: 84,
+    0x102e: 84,
+    0x102f: 84,
+    0x1030: 84,
+    0x1032: 84,
+    0x1033: 84,
+    0x1034: 84,
+    0x1035: 84,
+    0x1036: 84,
+    0x1037: 84,
+    0x1039: 84,
+    0x103a: 84,
+    0x103d: 84,
+    0x103e: 84,
+    0x1058: 84,
+    0x1059: 84,
+    0x105e: 84,
+    0x105f: 84,
+    0x1060: 84,
+    0x1071: 84,
+    0x1072: 84,
+    0x1073: 84,
+    0x1074: 84,
+    0x1082: 84,
+    0x1085: 84,
+    0x1086: 84,
+    0x108d: 84,
+    0x109d: 84,
+    0x135d: 84,
+    0x135e: 84,
+    0x135f: 84,
+    0x1712: 84,
+    0x1713: 84,
+    0x1714: 84,
+    0x1732: 84,
+    0x1733: 84,
+    0x1752: 84,
+    0x1753: 84,
+    0x1772: 84,
+    0x1773: 84,
+    0x17b4: 84,
+    0x17b5: 84,
+    0x17b7: 84,
+    0x17b8: 84,
+    0x17b9: 84,
+    0x17ba: 84,
+    0x17bb: 84,
+    0x17bc: 84,
+    0x17bd: 84,
+    0x17c6: 84,
+    0x17c9: 84,
+    0x17ca: 84,
+    0x17cb: 84,
+    0x17cc: 84,
+    0x17cd: 84,
+    0x17ce: 84,
+    0x17cf: 84,
+    0x17d0: 84,
+    0x17d1: 84,
+    0x17d2: 84,
+    0x17d3: 84,
+    0x17dd: 84,
+    0x1807: 68,
+    0x180a: 67,
+    0x180b: 84,
+    0x180c: 84,
+    0x180d: 84,
+    0x180f: 84,
+    0x1820: 68,
+    0x1821: 68,
+    0x1822: 68,
+    0x1823: 68,
+    0x1824: 68,
+    0x1825: 68,
+    0x1826: 68,
+    0x1827: 68,
+    0x1828: 68,
+    0x1829: 68,
+    0x182a: 68,
+    0x182b: 68,
+    0x182c: 68,
+    0x182d: 68,
+    0x182e: 68,
+    0x182f: 68,
+    0x1830: 68,
+    0x1831: 68,
+    0x1832: 68,
+    0x1833: 68,
+    0x1834: 68,
+    0x1835: 68,
+    0x1836: 68,
+    0x1837: 68,
+    0x1838: 68,
+    0x1839: 68,
+    0x183a: 68,
+    0x183b: 68,
+    0x183c: 68,
+    0x183d: 68,
+    0x183e: 68,
+    0x183f: 68,
+    0x1840: 68,
+    0x1841: 68,
+    0x1842: 68,
+    0x1843: 68,
+    0x1844: 68,
+    0x1845: 68,
+    0x1846: 68,
+    0x1847: 68,
+    0x1848: 68,
+    0x1849: 68,
+    0x184a: 68,
+    0x184b: 68,
+    0x184c: 68,
+    0x184d: 68,
+    0x184e: 68,
+    0x184f: 68,
+    0x1850: 68,
+    0x1851: 68,
+    0x1852: 68,
+    0x1853: 68,
+    0x1854: 68,
+    0x1855: 68,
+    0x1856: 68,
+    0x1857: 68,
+    0x1858: 68,
+    0x1859: 68,
+    0x185a: 68,
+    0x185b: 68,
+    0x185c: 68,
+    0x185d: 68,
+    0x185e: 68,
+    0x185f: 68,
+    0x1860: 68,
+    0x1861: 68,
+    0x1862: 68,
+    0x1863: 68,
+    0x1864: 68,
+    0x1865: 68,
+    0x1866: 68,
+    0x1867: 68,
+    0x1868: 68,
+    0x1869: 68,
+    0x186a: 68,
+    0x186b: 68,
+    0x186c: 68,
+    0x186d: 68,
+    0x186e: 68,
+    0x186f: 68,
+    0x1870: 68,
+    0x1871: 68,
+    0x1872: 68,
+    0x1873: 68,
+    0x1874: 68,
+    0x1875: 68,
+    0x1876: 68,
+    0x1877: 68,
+    0x1878: 68,
+    0x1885: 84,
+    0x1886: 84,
+    0x1887: 68,
+    0x1888: 68,
+    0x1889: 68,
+    0x188a: 68,
+    0x188b: 68,
+    0x188c: 68,
+    0x188d: 68,
+    0x188e: 68,
+    0x188f: 68,
+    0x1890: 68,
+    0x1891: 68,
+    0x1892: 68,
+    0x1893: 68,
+    0x1894: 68,
+    0x1895: 68,
+    0x1896: 68,
+    0x1897: 68,
+    0x1898: 68,
+    0x1899: 68,
+    0x189a: 68,
+    0x189b: 68,
+    0x189c: 68,
+    0x189d: 68,
+    0x189e: 68,
+    0x189f: 68,
+    0x18a0: 68,
+    0x18a1: 68,
+    0x18a2: 68,
+    0x18a3: 68,
+    0x18a4: 68,
+    0x18a5: 68,
+    0x18a6: 68,
+    0x18a7: 68,
+    0x18a8: 68,
+    0x18a9: 84,
+    0x18aa: 68,
+    0x1920: 84,
+    0x1921: 84,
+    0x1922: 84,
+    0x1927: 84,
+    0x1928: 84,
+    0x1932: 84,
+    0x1939: 84,
+    0x193a: 84,
+    0x193b: 84,
+    0x1a17: 84,
+    0x1a18: 84,
+    0x1a1b: 84,
+    0x1a56: 84,
+    0x1a58: 84,
+    0x1a59: 84,
+    0x1a5a: 84,
+    0x1a5b: 84,
+    0x1a5c: 84,
+    0x1a5d: 84,
+    0x1a5e: 84,
+    0x1a60: 84,
+    0x1a62: 84,
+    0x1a65: 84,
+    0x1a66: 84,
+    0x1a67: 84,
+    0x1a68: 84,
+    0x1a69: 84,
+    0x1a6a: 84,
+    0x1a6b: 84,
+    0x1a6c: 84,
+    0x1a73: 84,
+    0x1a74: 84,
+    0x1a75: 84,
+    0x1a76: 84,
+    0x1a77: 84,
+    0x1a78: 84,
+    0x1a79: 84,
+    0x1a7a: 84,
+    0x1a7b: 84,
+    0x1a7c: 84,
+    0x1a7f: 84,
+    0x1ab0: 84,
+    0x1ab1: 84,
+    0x1ab2: 84,
+    0x1ab3: 84,
+    0x1ab4: 84,
+    0x1ab5: 84,
+    0x1ab6: 84,
+    0x1ab7: 84,
+    0x1ab8: 84,
+    0x1ab9: 84,
+    0x1aba: 84,
+    0x1abb: 84,
+    0x1abc: 84,
+    0x1abd: 84,
+    0x1abe: 84,
+    0x1abf: 84,
+    0x1ac0: 84,
+    0x1ac1: 84,
+    0x1ac2: 84,
+    0x1ac3: 84,
+    0x1ac4: 84,
+    0x1ac5: 84,
+    0x1ac6: 84,
+    0x1ac7: 84,
+    0x1ac8: 84,
+    0x1ac9: 84,
+    0x1aca: 84,
+    0x1acb: 84,
+    0x1acc: 84,
+    0x1acd: 84,
+    0x1ace: 84,
+    0x1b00: 84,
+    0x1b01: 84,
+    0x1b02: 84,
+    0x1b03: 84,
+    0x1b34: 84,
+    0x1b36: 84,
+    0x1b37: 84,
+    0x1b38: 84,
+    0x1b39: 84,
+    0x1b3a: 84,
+    0x1b3c: 84,
+    0x1b42: 84,
+    0x1b6b: 84,
+    0x1b6c: 84,
+    0x1b6d: 84,
+    0x1b6e: 84,
+    0x1b6f: 84,
+    0x1b70: 84,
+    0x1b71: 84,
+    0x1b72: 84,
+    0x1b73: 84,
+    0x1b80: 84,
+    0x1b81: 84,
+    0x1ba2: 84,
+    0x1ba3: 84,
+    0x1ba4: 84,
+    0x1ba5: 84,
+    0x1ba8: 84,
+    0x1ba9: 84,
+    0x1bab: 84,
+    0x1bac: 84,
+    0x1bad: 84,
+    0x1be6: 84,
+    0x1be8: 84,
+    0x1be9: 84,
+    0x1bed: 84,
+    0x1bef: 84,
+    0x1bf0: 84,
+    0x1bf1: 84,
+    0x1c2c: 84,
+    0x1c2d: 84,
+    0x1c2e: 84,
+    0x1c2f: 84,
+    0x1c30: 84,
+    0x1c31: 84,
+    0x1c32: 84,
+    0x1c33: 84,
+    0x1c36: 84,
+    0x1c37: 84,
+    0x1cd0: 84,
+    0x1cd1: 84,
+    0x1cd2: 84,
+    0x1cd4: 84,
+    0x1cd5: 84,
+    0x1cd6: 84,
+    0x1cd7: 84,
+    0x1cd8: 84,
+    0x1cd9: 84,
+    0x1cda: 84,
+    0x1cdb: 84,
+    0x1cdc: 84,
+    0x1cdd: 84,
+    0x1cde: 84,
+    0x1cdf: 84,
+    0x1ce0: 84,
+    0x1ce2: 84,
+    0x1ce3: 84,
+    0x1ce4: 84,
+    0x1ce5: 84,
+    0x1ce6: 84,
+    0x1ce7: 84,
+    0x1ce8: 84,
+    0x1ced: 84,
+    0x1cf4: 84,
+    0x1cf8: 84,
+    0x1cf9: 84,
+    0x1dc0: 84,
+    0x1dc1: 84,
+    0x1dc2: 84,
+    0x1dc3: 84,
+    0x1dc4: 84,
+    0x1dc5: 84,
+    0x1dc6: 84,
+    0x1dc7: 84,
+    0x1dc8: 84,
+    0x1dc9: 84,
+    0x1dca: 84,
+    0x1dcb: 84,
+    0x1dcc: 84,
+    0x1dcd: 84,
+    0x1dce: 84,
+    0x1dcf: 84,
+    0x1dd0: 84,
+    0x1dd1: 84,
+    0x1dd2: 84,
+    0x1dd3: 84,
+    0x1dd4: 84,
+    0x1dd5: 84,
+    0x1dd6: 84,
+    0x1dd7: 84,
+    0x1dd8: 84,
+    0x1dd9: 84,
+    0x1dda: 84,
+    0x1ddb: 84,
+    0x1ddc: 84,
+    0x1ddd: 84,
+    0x1dde: 84,
+    0x1ddf: 84,
+    0x1de0: 84,
+    0x1de1: 84,
+    0x1de2: 84,
+    0x1de3: 84,
+    0x1de4: 84,
+    0x1de5: 84,
+    0x1de6: 84,
+    0x1de7: 84,
+    0x1de8: 84,
+    0x1de9: 84,
+    0x1dea: 84,
+    0x1deb: 84,
+    0x1dec: 84,
+    0x1ded: 84,
+    0x1dee: 84,
+    0x1def: 84,
+    0x1df0: 84,
+    0x1df1: 84,
+    0x1df2: 84,
+    0x1df3: 84,
+    0x1df4: 84,
+    0x1df5: 84,
+    0x1df6: 84,
+    0x1df7: 84,
+    0x1df8: 84,
+    0x1df9: 84,
+    0x1dfa: 84,
+    0x1dfb: 84,
+    0x1dfc: 84,
+    0x1dfd: 84,
+    0x1dfe: 84,
+    0x1dff: 84,
+    0x200b: 84,
+    0x200d: 67,
+    0x200e: 84,
+    0x200f: 84,
+    0x202a: 84,
+    0x202b: 84,
+    0x202c: 84,
+    0x202d: 84,
+    0x202e: 84,
+    0x2060: 84,
+    0x2061: 84,
+    0x2062: 84,
+    0x2063: 84,
+    0x2064: 84,
+    0x206a: 84,
+    0x206b: 84,
+    0x206c: 84,
+    0x206d: 84,
+    0x206e: 84,
+    0x206f: 84,
+    0x20d0: 84,
+    0x20d1: 84,
+    0x20d2: 84,
+    0x20d3: 84,
+    0x20d4: 84,
+    0x20d5: 84,
+    0x20d6: 84,
+    0x20d7: 84,
+    0x20d8: 84,
+    0x20d9: 84,
+    0x20da: 84,
+    0x20db: 84,
+    0x20dc: 84,
+    0x20dd: 84,
+    0x20de: 84,
+    0x20df: 84,
+    0x20e0: 84,
+    0x20e1: 84,
+    0x20e2: 84,
+    0x20e3: 84,
+    0x20e4: 84,
+    0x20e5: 84,
+    0x20e6: 84,
+    0x20e7: 84,
+    0x20e8: 84,
+    0x20e9: 84,
+    0x20ea: 84,
+    0x20eb: 84,
+    0x20ec: 84,
+    0x20ed: 84,
+    0x20ee: 84,
+    0x20ef: 84,
+    0x20f0: 84,
+    0x2cef: 84,
+    0x2cf0: 84,
+    0x2cf1: 84,
+    0x2d7f: 84,
+    0x2de0: 84,
+    0x2de1: 84,
+    0x2de2: 84,
+    0x2de3: 84,
+    0x2de4: 84,
+    0x2de5: 84,
+    0x2de6: 84,
+    0x2de7: 84,
+    0x2de8: 84,
+    0x2de9: 84,
+    0x2dea: 84,
+    0x2deb: 84,
+    0x2dec: 84,
+    0x2ded: 84,
+    0x2dee: 84,
+    0x2def: 84,
+    0x2df0: 84,
+    0x2df1: 84,
+    0x2df2: 84,
+    0x2df3: 84,
+    0x2df4: 84,
+    0x2df5: 84,
+    0x2df6: 84,
+    0x2df7: 84,
+    0x2df8: 84,
+    0x2df9: 84,
+    0x2dfa: 84,
+    0x2dfb: 84,
+    0x2dfc: 84,
+    0x2dfd: 84,
+    0x2dfe: 84,
+    0x2dff: 84,
+    0x302a: 84,
+    0x302b: 84,
+    0x302c: 84,
+    0x302d: 84,
+    0x3099: 84,
+    0x309a: 84,
+    0xa66f: 84,
+    0xa670: 84,
+    0xa671: 84,
+    0xa672: 84,
+    0xa674: 84,
+    0xa675: 84,
+    0xa676: 84,
+    0xa677: 84,
+    0xa678: 84,
+    0xa679: 84,
+    0xa67a: 84,
+    0xa67b: 84,
+    0xa67c: 84,
+    0xa67d: 84,
+    0xa69e: 84,
+    0xa69f: 84,
+    0xa6f0: 84,
+    0xa6f1: 84,
+    0xa802: 84,
+    0xa806: 84,
+    0xa80b: 84,
+    0xa825: 84,
+    0xa826: 84,
+    0xa82c: 84,
+    0xa840: 68,
+    0xa841: 68,
+    0xa842: 68,
+    0xa843: 68,
+    0xa844: 68,
+    0xa845: 68,
+    0xa846: 68,
+    0xa847: 68,
+    0xa848: 68,
+    0xa849: 68,
+    0xa84a: 68,
+    0xa84b: 68,
+    0xa84c: 68,
+    0xa84d: 68,
+    0xa84e: 68,
+    0xa84f: 68,
+    0xa850: 68,
+    0xa851: 68,
+    0xa852: 68,
+    0xa853: 68,
+    0xa854: 68,
+    0xa855: 68,
+    0xa856: 68,
+    0xa857: 68,
+    0xa858: 68,
+    0xa859: 68,
+    0xa85a: 68,
+    0xa85b: 68,
+    0xa85c: 68,
+    0xa85d: 68,
+    0xa85e: 68,
+    0xa85f: 68,
+    0xa860: 68,
+    0xa861: 68,
+    0xa862: 68,
+    0xa863: 68,
+    0xa864: 68,
+    0xa865: 68,
+    0xa866: 68,
+    0xa867: 68,
+    0xa868: 68,
+    0xa869: 68,
+    0xa86a: 68,
+    0xa86b: 68,
+    0xa86c: 68,
+    0xa86d: 68,
+    0xa86e: 68,
+    0xa86f: 68,
+    0xa870: 68,
+    0xa871: 68,
+    0xa872: 76,
+    0xa8c4: 84,
+    0xa8c5: 84,
+    0xa8e0: 84,
+    0xa8e1: 84,
+    0xa8e2: 84,
+    0xa8e3: 84,
+    0xa8e4: 84,
+    0xa8e5: 84,
+    0xa8e6: 84,
+    0xa8e7: 84,
+    0xa8e8: 84,
+    0xa8e9: 84,
+    0xa8ea: 84,
+    0xa8eb: 84,
+    0xa8ec: 84,
+    0xa8ed: 84,
+    0xa8ee: 84,
+    0xa8ef: 84,
+    0xa8f0: 84,
+    0xa8f1: 84,
+    0xa8ff: 84,
+    0xa926: 84,
+    0xa927: 84,
+    0xa928: 84,
+    0xa929: 84,
+    0xa92a: 84,
+    0xa92b: 84,
+    0xa92c: 84,
+    0xa92d: 84,
+    0xa947: 84,
+    0xa948: 84,
+    0xa949: 84,
+    0xa94a: 84,
+    0xa94b: 84,
+    0xa94c: 84,
+    0xa94d: 84,
+    0xa94e: 84,
+    0xa94f: 84,
+    0xa950: 84,
+    0xa951: 84,
+    0xa980: 84,
+    0xa981: 84,
+    0xa982: 84,
+    0xa9b3: 84,
+    0xa9b6: 84,
+    0xa9b7: 84,
+    0xa9b8: 84,
+    0xa9b9: 84,
+    0xa9bc: 84,
+    0xa9bd: 84,
+    0xa9e5: 84,
+    0xaa29: 84,
+    0xaa2a: 84,
+    0xaa2b: 84,
+    0xaa2c: 84,
+    0xaa2d: 84,
+    0xaa2e: 84,
+    0xaa31: 84,
+    0xaa32: 84,
+    0xaa35: 84,
+    0xaa36: 84,
+    0xaa43: 84,
+    0xaa4c: 84,
+    0xaa7c: 84,
+    0xaab0: 84,
+    0xaab2: 84,
+    0xaab3: 84,
+    0xaab4: 84,
+    0xaab7: 84,
+    0xaab8: 84,
+    0xaabe: 84,
+    0xaabf: 84,
+    0xaac1: 84,
+    0xaaec: 84,
+    0xaaed: 84,
+    0xaaf6: 84,
+    0xabe5: 84,
+    0xabe8: 84,
+    0xabed: 84,
+    0xfb1e: 84,
+    0xfe00: 84,
+    0xfe01: 84,
+    0xfe02: 84,
+    0xfe03: 84,
+    0xfe04: 84,
+    0xfe05: 84,
+    0xfe06: 84,
+    0xfe07: 84,
+    0xfe08: 84,
+    0xfe09: 84,
+    0xfe0a: 84,
+    0xfe0b: 84,
+    0xfe0c: 84,
+    0xfe0d: 84,
+    0xfe0e: 84,
+    0xfe0f: 84,
+    0xfe20: 84,
+    0xfe21: 84,
+    0xfe22: 84,
+    0xfe23: 84,
+    0xfe24: 84,
+    0xfe25: 84,
+    0xfe26: 84,
+    0xfe27: 84,
+    0xfe28: 84,
+    0xfe29: 84,
+    0xfe2a: 84,
+    0xfe2b: 84,
+    0xfe2c: 84,
+    0xfe2d: 84,
+    0xfe2e: 84,
+    0xfe2f: 84,
+    0xfeff: 84,
+    0xfff9: 84,
+    0xfffa: 84,
+    0xfffb: 84,
+    0x101fd: 84,
+    0x102e0: 84,
+    0x10376: 84,
+    0x10377: 84,
+    0x10378: 84,
+    0x10379: 84,
+    0x1037a: 84,
+    0x10a01: 84,
+    0x10a02: 84,
+    0x10a03: 84,
+    0x10a05: 84,
+    0x10a06: 84,
+    0x10a0c: 84,
+    0x10a0d: 84,
+    0x10a0e: 84,
+    0x10a0f: 84,
+    0x10a38: 84,
+    0x10a39: 84,
+    0x10a3a: 84,
+    0x10a3f: 84,
+    0x10ac0: 68,
+    0x10ac1: 68,
+    0x10ac2: 68,
+    0x10ac3: 68,
+    0x10ac4: 68,
+    0x10ac5: 82,
+    0x10ac7: 82,
+    0x10ac9: 82,
+    0x10aca: 82,
+    0x10acd: 76,
+    0x10ace: 82,
+    0x10acf: 82,
+    0x10ad0: 82,
+    0x10ad1: 82,
+    0x10ad2: 82,
+    0x10ad3: 68,
+    0x10ad4: 68,
+    0x10ad5: 68,
+    0x10ad6: 68,
+    0x10ad7: 76,
+    0x10ad8: 68,
+    0x10ad9: 68,
+    0x10ada: 68,
+    0x10adb: 68,
+    0x10adc: 68,
+    0x10add: 82,
+    0x10ade: 68,
+    0x10adf: 68,
+    0x10ae0: 68,
+    0x10ae1: 82,
+    0x10ae4: 82,
+    0x10ae5: 84,
+    0x10ae6: 84,
+    0x10aeb: 68,
+    0x10aec: 68,
+    0x10aed: 68,
+    0x10aee: 68,
+    0x10aef: 82,
+    0x10b80: 68,
+    0x10b81: 82,
+    0x10b82: 68,
+    0x10b83: 82,
+    0x10b84: 82,
+    0x10b85: 82,
+    0x10b86: 68,
+    0x10b87: 68,
+    0x10b88: 68,
+    0x10b89: 82,
+    0x10b8a: 68,
+    0x10b8b: 68,
+    0x10b8c: 82,
+    0x10b8d: 68,
+    0x10b8e: 82,
+    0x10b8f: 82,
+    0x10b90: 68,
+    0x10b91: 82,
+    0x10ba9: 82,
+    0x10baa: 82,
+    0x10bab: 82,
+    0x10bac: 82,
+    0x10bad: 68,
+    0x10bae: 68,
+    0x10d00: 76,
+    0x10d01: 68,
+    0x10d02: 68,
+    0x10d03: 68,
+    0x10d04: 68,
+    0x10d05: 68,
+    0x10d06: 68,
+    0x10d07: 68,
+    0x10d08: 68,
+    0x10d09: 68,
+    0x10d0a: 68,
+    0x10d0b: 68,
+    0x10d0c: 68,
+    0x10d0d: 68,
+    0x10d0e: 68,
+    0x10d0f: 68,
+    0x10d10: 68,
+    0x10d11: 68,
+    0x10d12: 68,
+    0x10d13: 68,
+    0x10d14: 68,
+    0x10d15: 68,
+    0x10d16: 68,
+    0x10d17: 68,
+    0x10d18: 68,
+    0x10d19: 68,
+    0x10d1a: 68,
+    0x10d1b: 68,
+    0x10d1c: 68,
+    0x10d1d: 68,
+    0x10d1e: 68,
+    0x10d1f: 68,
+    0x10d20: 68,
+    0x10d21: 68,
+    0x10d22: 82,
+    0x10d23: 68,
+    0x10d24: 84,
+    0x10d25: 84,
+    0x10d26: 84,
+    0x10d27: 84,
+    0x10eab: 84,
+    0x10eac: 84,
+    0x10efd: 84,
+    0x10efe: 84,
+    0x10eff: 84,
+    0x10f30: 68,
+    0x10f31: 68,
+    0x10f32: 68,
+    0x10f33: 82,
+    0x10f34: 68,
+    0x10f35: 68,
+    0x10f36: 68,
+    0x10f37: 68,
+    0x10f38: 68,
+    0x10f39: 68,
+    0x10f3a: 68,
+    0x10f3b: 68,
+    0x10f3c: 68,
+    0x10f3d: 68,
+    0x10f3e: 68,
+    0x10f3f: 68,
+    0x10f40: 68,
+    0x10f41: 68,
+    0x10f42: 68,
+    0x10f43: 68,
+    0x10f44: 68,
+    0x10f46: 84,
+    0x10f47: 84,
+    0x10f48: 84,
+    0x10f49: 84,
+    0x10f4a: 84,
+    0x10f4b: 84,
+    0x10f4c: 84,
+    0x10f4d: 84,
+    0x10f4e: 84,
+    0x10f4f: 84,
+    0x10f50: 84,
+    0x10f51: 68,
+    0x10f52: 68,
+    0x10f53: 68,
+    0x10f54: 82,
+    0x10f70: 68,
+    0x10f71: 68,
+    0x10f72: 68,
+    0x10f73: 68,
+    0x10f74: 82,
+    0x10f75: 82,
+    0x10f76: 68,
+    0x10f77: 68,
+    0x10f78: 68,
+    0x10f79: 68,
+    0x10f7a: 68,
+    0x10f7b: 68,
+    0x10f7c: 68,
+    0x10f7d: 68,
+    0x10f7e: 68,
+    0x10f7f: 68,
+    0x10f80: 68,
+    0x10f81: 68,
+    0x10f82: 84,
+    0x10f83: 84,
+    0x10f84: 84,
+    0x10f85: 84,
+    0x10fb0: 68,
+    0x10fb2: 68,
+    0x10fb3: 68,
+    0x10fb4: 82,
+    0x10fb5: 82,
+    0x10fb6: 82,
+    0x10fb8: 68,
+    0x10fb9: 82,
+    0x10fba: 82,
+    0x10fbb: 68,
+    0x10fbc: 68,
+    0x10fbd: 82,
+    0x10fbe: 68,
+    0x10fbf: 68,
+    0x10fc1: 68,
+    0x10fc2: 82,
+    0x10fc3: 82,
+    0x10fc4: 68,
+    0x10fc9: 82,
+    0x10fca: 68,
+    0x10fcb: 76,
+    0x11001: 84,
+    0x11038: 84,
+    0x11039: 84,
+    0x1103a: 84,
+    0x1103b: 84,
+    0x1103c: 84,
+    0x1103d: 84,
+    0x1103e: 84,
+    0x1103f: 84,
+    0x11040: 84,
+    0x11041: 84,
+    0x11042: 84,
+    0x11043: 84,
+    0x11044: 84,
+    0x11045: 84,
+    0x11046: 84,
+    0x11070: 84,
+    0x11073: 84,
+    0x11074: 84,
+    0x1107f: 84,
+    0x11080: 84,
+    0x11081: 84,
+    0x110b3: 84,
+    0x110b4: 84,
+    0x110b5: 84,
+    0x110b6: 84,
+    0x110b9: 84,
+    0x110ba: 84,
+    0x110c2: 84,
+    0x11100: 84,
+    0x11101: 84,
+    0x11102: 84,
+    0x11127: 84,
+    0x11128: 84,
+    0x11129: 84,
+    0x1112a: 84,
+    0x1112b: 84,
+    0x1112d: 84,
+    0x1112e: 84,
+    0x1112f: 84,
+    0x11130: 84,
+    0x11131: 84,
+    0x11132: 84,
+    0x11133: 84,
+    0x11134: 84,
+    0x11173: 84,
+    0x11180: 84,
+    0x11181: 84,
+    0x111b6: 84,
+    0x111b7: 84,
+    0x111b8: 84,
+    0x111b9: 84,
+    0x111ba: 84,
+    0x111bb: 84,
+    0x111bc: 84,
+    0x111bd: 84,
+    0x111be: 84,
+    0x111c9: 84,
+    0x111ca: 84,
+    0x111cb: 84,
+    0x111cc: 84,
+    0x111cf: 84,
+    0x1122f: 84,
+    0x11230: 84,
+    0x11231: 84,
+    0x11234: 84,
+    0x11236: 84,
+    0x11237: 84,
+    0x1123e: 84,
+    0x11241: 84,
+    0x112df: 84,
+    0x112e3: 84,
+    0x112e4: 84,
+    0x112e5: 84,
+    0x112e6: 84,
+    0x112e7: 84,
+    0x112e8: 84,
+    0x112e9: 84,
+    0x112ea: 84,
+    0x11300: 84,
+    0x11301: 84,
+    0x1133b: 84,
+    0x1133c: 84,
+    0x11340: 84,
+    0x11366: 84,
+    0x11367: 84,
+    0x11368: 84,
+    0x11369: 84,
+    0x1136a: 84,
+    0x1136b: 84,
+    0x1136c: 84,
+    0x11370: 84,
+    0x11371: 84,
+    0x11372: 84,
+    0x11373: 84,
+    0x11374: 84,
+    0x11438: 84,
+    0x11439: 84,
+    0x1143a: 84,
+    0x1143b: 84,
+    0x1143c: 84,
+    0x1143d: 84,
+    0x1143e: 84,
+    0x1143f: 84,
+    0x11442: 84,
+    0x11443: 84,
+    0x11444: 84,
+    0x11446: 84,
+    0x1145e: 84,
+    0x114b3: 84,
+    0x114b4: 84,
+    0x114b5: 84,
+    0x114b6: 84,
+    0x114b7: 84,
+    0x114b8: 84,
+    0x114ba: 84,
+    0x114bf: 84,
+    0x114c0: 84,
+    0x114c2: 84,
+    0x114c3: 84,
+    0x115b2: 84,
+    0x115b3: 84,
+    0x115b4: 84,
+    0x115b5: 84,
+    0x115bc: 84,
+    0x115bd: 84,
+    0x115bf: 84,
+    0x115c0: 84,
+    0x115dc: 84,
+    0x115dd: 84,
+    0x11633: 84,
+    0x11634: 84,
+    0x11635: 84,
+    0x11636: 84,
+    0x11637: 84,
+    0x11638: 84,
+    0x11639: 84,
+    0x1163a: 84,
+    0x1163d: 84,
+    0x1163f: 84,
+    0x11640: 84,
+    0x116ab: 84,
+    0x116ad: 84,
+    0x116b0: 84,
+    0x116b1: 84,
+    0x116b2: 84,
+    0x116b3: 84,
+    0x116b4: 84,
+    0x116b5: 84,
+    0x116b7: 84,
+    0x1171d: 84,
+    0x1171e: 84,
+    0x1171f: 84,
+    0x11722: 84,
+    0x11723: 84,
+    0x11724: 84,
+    0x11725: 84,
+    0x11727: 84,
+    0x11728: 84,
+    0x11729: 84,
+    0x1172a: 84,
+    0x1172b: 84,
+    0x1182f: 84,
+    0x11830: 84,
+    0x11831: 84,
+    0x11832: 84,
+    0x11833: 84,
+    0x11834: 84,
+    0x11835: 84,
+    0x11836: 84,
+    0x11837: 84,
+    0x11839: 84,
+    0x1183a: 84,
+    0x1193b: 84,
+    0x1193c: 84,
+    0x1193e: 84,
+    0x11943: 84,
+    0x119d4: 84,
+    0x119d5: 84,
+    0x119d6: 84,
+    0x119d7: 84,
+    0x119da: 84,
+    0x119db: 84,
+    0x119e0: 84,
+    0x11a01: 84,
+    0x11a02: 84,
+    0x11a03: 84,
+    0x11a04: 84,
+    0x11a05: 84,
+    0x11a06: 84,
+    0x11a07: 84,
+    0x11a08: 84,
+    0x11a09: 84,
+    0x11a0a: 84,
+    0x11a33: 84,
+    0x11a34: 84,
+    0x11a35: 84,
+    0x11a36: 84,
+    0x11a37: 84,
+    0x11a38: 84,
+    0x11a3b: 84,
+    0x11a3c: 84,
+    0x11a3d: 84,
+    0x11a3e: 84,
+    0x11a47: 84,
+    0x11a51: 84,
+    0x11a52: 84,
+    0x11a53: 84,
+    0x11a54: 84,
+    0x11a55: 84,
+    0x11a56: 84,
+    0x11a59: 84,
+    0x11a5a: 84,
+    0x11a5b: 84,
+    0x11a8a: 84,
+    0x11a8b: 84,
+    0x11a8c: 84,
+    0x11a8d: 84,
+    0x11a8e: 84,
+    0x11a8f: 84,
+    0x11a90: 84,
+    0x11a91: 84,
+    0x11a92: 84,
+    0x11a93: 84,
+    0x11a94: 84,
+    0x11a95: 84,
+    0x11a96: 84,
+    0x11a98: 84,
+    0x11a99: 84,
+    0x11c30: 84,
+    0x11c31: 84,
+    0x11c32: 84,
+    0x11c33: 84,
+    0x11c34: 84,
+    0x11c35: 84,
+    0x11c36: 84,
+    0x11c38: 84,
+    0x11c39: 84,
+    0x11c3a: 84,
+    0x11c3b: 84,
+    0x11c3c: 84,
+    0x11c3d: 84,
+    0x11c3f: 84,
+    0x11c92: 84,
+    0x11c93: 84,
+    0x11c94: 84,
+    0x11c95: 84,
+    0x11c96: 84,
+    0x11c97: 84,
+    0x11c98: 84,
+    0x11c99: 84,
+    0x11c9a: 84,
+    0x11c9b: 84,
+    0x11c9c: 84,
+    0x11c9d: 84,
+    0x11c9e: 84,
+    0x11c9f: 84,
+    0x11ca0: 84,
+    0x11ca1: 84,
+    0x11ca2: 84,
+    0x11ca3: 84,
+    0x11ca4: 84,
+    0x11ca5: 84,
+    0x11ca6: 84,
+    0x11ca7: 84,
+    0x11caa: 84,
+    0x11cab: 84,
+    0x11cac: 84,
+    0x11cad: 84,
+    0x11cae: 84,
+    0x11caf: 84,
+    0x11cb0: 84,
+    0x11cb2: 84,
+    0x11cb3: 84,
+    0x11cb5: 84,
+    0x11cb6: 84,
+    0x11d31: 84,
+    0x11d32: 84,
+    0x11d33: 84,
+    0x11d34: 84,
+    0x11d35: 84,
+    0x11d36: 84,
+    0x11d3a: 84,
+    0x11d3c: 84,
+    0x11d3d: 84,
+    0x11d3f: 84,
+    0x11d40: 84,
+    0x11d41: 84,
+    0x11d42: 84,
+    0x11d43: 84,
+    0x11d44: 84,
+    0x11d45: 84,
+    0x11d47: 84,
+    0x11d90: 84,
+    0x11d91: 84,
+    0x11d95: 84,
+    0x11d97: 84,
+    0x11ef3: 84,
+    0x11ef4: 84,
+    0x11f00: 84,
+    0x11f01: 84,
+    0x11f36: 84,
+    0x11f37: 84,
+    0x11f38: 84,
+    0x11f39: 84,
+    0x11f3a: 84,
+    0x11f40: 84,
+    0x11f42: 84,
+    0x13430: 84,
+    0x13431: 84,
+    0x13432: 84,
+    0x13433: 84,
+    0x13434: 84,
+    0x13435: 84,
+    0x13436: 84,
+    0x13437: 84,
+    0x13438: 84,
+    0x13439: 84,
+    0x1343a: 84,
+    0x1343b: 84,
+    0x1343c: 84,
+    0x1343d: 84,
+    0x1343e: 84,
+    0x1343f: 84,
+    0x13440: 84,
+    0x13447: 84,
+    0x13448: 84,
+    0x13449: 84,
+    0x1344a: 84,
+    0x1344b: 84,
+    0x1344c: 84,
+    0x1344d: 84,
+    0x1344e: 84,
+    0x1344f: 84,
+    0x13450: 84,
+    0x13451: 84,
+    0x13452: 84,
+    0x13453: 84,
+    0x13454: 84,
+    0x13455: 84,
+    0x16af0: 84,
+    0x16af1: 84,
+    0x16af2: 84,
+    0x16af3: 84,
+    0x16af4: 84,
+    0x16b30: 84,
+    0x16b31: 84,
+    0x16b32: 84,
+    0x16b33: 84,
+    0x16b34: 84,
+    0x16b35: 84,
+    0x16b36: 84,
+    0x16f4f: 84,
+    0x16f8f: 84,
+    0x16f90: 84,
+    0x16f91: 84,
+    0x16f92: 84,
+    0x16fe4: 84,
+    0x1bc9d: 84,
+    0x1bc9e: 84,
+    0x1bca0: 84,
+    0x1bca1: 84,
+    0x1bca2: 84,
+    0x1bca3: 84,
+    0x1cf00: 84,
+    0x1cf01: 84,
+    0x1cf02: 84,
+    0x1cf03: 84,
+    0x1cf04: 84,
+    0x1cf05: 84,
+    0x1cf06: 84,
+    0x1cf07: 84,
+    0x1cf08: 84,
+    0x1cf09: 84,
+    0x1cf0a: 84,
+    0x1cf0b: 84,
+    0x1cf0c: 84,
+    0x1cf0d: 84,
+    0x1cf0e: 84,
+    0x1cf0f: 84,
+    0x1cf10: 84,
+    0x1cf11: 84,
+    0x1cf12: 84,
+    0x1cf13: 84,
+    0x1cf14: 84,
+    0x1cf15: 84,
+    0x1cf16: 84,
+    0x1cf17: 84,
+    0x1cf18: 84,
+    0x1cf19: 84,
+    0x1cf1a: 84,
+    0x1cf1b: 84,
+    0x1cf1c: 84,
+    0x1cf1d: 84,
+    0x1cf1e: 84,
+    0x1cf1f: 84,
+    0x1cf20: 84,
+    0x1cf21: 84,
+    0x1cf22: 84,
+    0x1cf23: 84,
+    0x1cf24: 84,
+    0x1cf25: 84,
+    0x1cf26: 84,
+    0x1cf27: 84,
+    0x1cf28: 84,
+    0x1cf29: 84,
+    0x1cf2a: 84,
+    0x1cf2b: 84,
+    0x1cf2c: 84,
+    0x1cf2d: 84,
+    0x1cf30: 84,
+    0x1cf31: 84,
+    0x1cf32: 84,
+    0x1cf33: 84,
+    0x1cf34: 84,
+    0x1cf35: 84,
+    0x1cf36: 84,
+    0x1cf37: 84,
+    0x1cf38: 84,
+    0x1cf39: 84,
+    0x1cf3a: 84,
+    0x1cf3b: 84,
+    0x1cf3c: 84,
+    0x1cf3d: 84,
+    0x1cf3e: 84,
+    0x1cf3f: 84,
+    0x1cf40: 84,
+    0x1cf41: 84,
+    0x1cf42: 84,
+    0x1cf43: 84,
+    0x1cf44: 84,
+    0x1cf45: 84,
+    0x1cf46: 84,
+    0x1d167: 84,
+    0x1d168: 84,
+    0x1d169: 84,
+    0x1d173: 84,
+    0x1d174: 84,
+    0x1d175: 84,
+    0x1d176: 84,
+    0x1d177: 84,
+    0x1d178: 84,
+    0x1d179: 84,
+    0x1d17a: 84,
+    0x1d17b: 84,
+    0x1d17c: 84,
+    0x1d17d: 84,
+    0x1d17e: 84,
+    0x1d17f: 84,
+    0x1d180: 84,
+    0x1d181: 84,
+    0x1d182: 84,
+    0x1d185: 84,
+    0x1d186: 84,
+    0x1d187: 84,
+    0x1d188: 84,
+    0x1d189: 84,
+    0x1d18a: 84,
+    0x1d18b: 84,
+    0x1d1aa: 84,
+    0x1d1ab: 84,
+    0x1d1ac: 84,
+    0x1d1ad: 84,
+    0x1d242: 84,
+    0x1d243: 84,
+    0x1d244: 84,
+    0x1da00: 84,
+    0x1da01: 84,
+    0x1da02: 84,
+    0x1da03: 84,
+    0x1da04: 84,
+    0x1da05: 84,
+    0x1da06: 84,
+    0x1da07: 84,
+    0x1da08: 84,
+    0x1da09: 84,
+    0x1da0a: 84,
+    0x1da0b: 84,
+    0x1da0c: 84,
+    0x1da0d: 84,
+    0x1da0e: 84,
+    0x1da0f: 84,
+    0x1da10: 84,
+    0x1da11: 84,
+    0x1da12: 84,
+    0x1da13: 84,
+    0x1da14: 84,
+    0x1da15: 84,
+    0x1da16: 84,
+    0x1da17: 84,
+    0x1da18: 84,
+    0x1da19: 84,
+    0x1da1a: 84,
+    0x1da1b: 84,
+    0x1da1c: 84,
+    0x1da1d: 84,
+    0x1da1e: 84,
+    0x1da1f: 84,
+    0x1da20: 84,
+    0x1da21: 84,
+    0x1da22: 84,
+    0x1da23: 84,
+    0x1da24: 84,
+    0x1da25: 84,
+    0x1da26: 84,
+    0x1da27: 84,
+    0x1da28: 84,
+    0x1da29: 84,
+    0x1da2a: 84,
+    0x1da2b: 84,
+    0x1da2c: 84,
+    0x1da2d: 84,
+    0x1da2e: 84,
+    0x1da2f: 84,
+    0x1da30: 84,
+    0x1da31: 84,
+    0x1da32: 84,
+    0x1da33: 84,
+    0x1da34: 84,
+    0x1da35: 84,
+    0x1da36: 84,
+    0x1da3b: 84,
+    0x1da3c: 84,
+    0x1da3d: 84,
+    0x1da3e: 84,
+    0x1da3f: 84,
+    0x1da40: 84,
+    0x1da41: 84,
+    0x1da42: 84,
+    0x1da43: 84,
+    0x1da44: 84,
+    0x1da45: 84,
+    0x1da46: 84,
+    0x1da47: 84,
+    0x1da48: 84,
+    0x1da49: 84,
+    0x1da4a: 84,
+    0x1da4b: 84,
+    0x1da4c: 84,
+    0x1da4d: 84,
+    0x1da4e: 84,
+    0x1da4f: 84,
+    0x1da50: 84,
+    0x1da51: 84,
+    0x1da52: 84,
+    0x1da53: 84,
+    0x1da54: 84,
+    0x1da55: 84,
+    0x1da56: 84,
+    0x1da57: 84,
+    0x1da58: 84,
+    0x1da59: 84,
+    0x1da5a: 84,
+    0x1da5b: 84,
+    0x1da5c: 84,
+    0x1da5d: 84,
+    0x1da5e: 84,
+    0x1da5f: 84,
+    0x1da60: 84,
+    0x1da61: 84,
+    0x1da62: 84,
+    0x1da63: 84,
+    0x1da64: 84,
+    0x1da65: 84,
+    0x1da66: 84,
+    0x1da67: 84,
+    0x1da68: 84,
+    0x1da69: 84,
+    0x1da6a: 84,
+    0x1da6b: 84,
+    0x1da6c: 84,
+    0x1da75: 84,
+    0x1da84: 84,
+    0x1da9b: 84,
+    0x1da9c: 84,
+    0x1da9d: 84,
+    0x1da9e: 84,
+    0x1da9f: 84,
+    0x1daa1: 84,
+    0x1daa2: 84,
+    0x1daa3: 84,
+    0x1daa4: 84,
+    0x1daa5: 84,
+    0x1daa6: 84,
+    0x1daa7: 84,
+    0x1daa8: 84,
+    0x1daa9: 84,
+    0x1daaa: 84,
+    0x1daab: 84,
+    0x1daac: 84,
+    0x1daad: 84,
+    0x1daae: 84,
+    0x1daaf: 84,
+    0x1e000: 84,
+    0x1e001: 84,
+    0x1e002: 84,
+    0x1e003: 84,
+    0x1e004: 84,
+    0x1e005: 84,
+    0x1e006: 84,
+    0x1e008: 84,
+    0x1e009: 84,
+    0x1e00a: 84,
+    0x1e00b: 84,
+    0x1e00c: 84,
+    0x1e00d: 84,
+    0x1e00e: 84,
+    0x1e00f: 84,
+    0x1e010: 84,
+    0x1e011: 84,
+    0x1e012: 84,
+    0x1e013: 84,
+    0x1e014: 84,
+    0x1e015: 84,
+    0x1e016: 84,
+    0x1e017: 84,
+    0x1e018: 84,
+    0x1e01b: 84,
+    0x1e01c: 84,
+    0x1e01d: 84,
+    0x1e01e: 84,
+    0x1e01f: 84,
+    0x1e020: 84,
+    0x1e021: 84,
+    0x1e023: 84,
+    0x1e024: 84,
+    0x1e026: 84,
+    0x1e027: 84,
+    0x1e028: 84,
+    0x1e029: 84,
+    0x1e02a: 84,
+    0x1e08f: 84,
+    0x1e130: 84,
+    0x1e131: 84,
+    0x1e132: 84,
+    0x1e133: 84,
+    0x1e134: 84,
+    0x1e135: 84,
+    0x1e136: 84,
+    0x1e2ae: 84,
+    0x1e2ec: 84,
+    0x1e2ed: 84,
+    0x1e2ee: 84,
+    0x1e2ef: 84,
+    0x1e4ec: 84,
+    0x1e4ed: 84,
+    0x1e4ee: 84,
+    0x1e4ef: 84,
+    0x1e8d0: 84,
+    0x1e8d1: 84,
+    0x1e8d2: 84,
+    0x1e8d3: 84,
+    0x1e8d4: 84,
+    0x1e8d5: 84,
+    0x1e8d6: 84,
+    0x1e900: 68,
+    0x1e901: 68,
+    0x1e902: 68,
+    0x1e903: 68,
+    0x1e904: 68,
+    0x1e905: 68,
+    0x1e906: 68,
+    0x1e907: 68,
+    0x1e908: 68,
+    0x1e909: 68,
+    0x1e90a: 68,
+    0x1e90b: 68,
+    0x1e90c: 68,
+    0x1e90d: 68,
+    0x1e90e: 68,
+    0x1e90f: 68,
+    0x1e910: 68,
+    0x1e911: 68,
+    0x1e912: 68,
+    0x1e913: 68,
+    0x1e914: 68,
+    0x1e915: 68,
+    0x1e916: 68,
+    0x1e917: 68,
+    0x1e918: 68,
+    0x1e919: 68,
+    0x1e91a: 68,
+    0x1e91b: 68,
+    0x1e91c: 68,
+    0x1e91d: 68,
+    0x1e91e: 68,
+    0x1e91f: 68,
+    0x1e920: 68,
+    0x1e921: 68,
+    0x1e922: 68,
+    0x1e923: 68,
+    0x1e924: 68,
+    0x1e925: 68,
+    0x1e926: 68,
+    0x1e927: 68,
+    0x1e928: 68,
+    0x1e929: 68,
+    0x1e92a: 68,
+    0x1e92b: 68,
+    0x1e92c: 68,
+    0x1e92d: 68,
+    0x1e92e: 68,
+    0x1e92f: 68,
+    0x1e930: 68,
+    0x1e931: 68,
+    0x1e932: 68,
+    0x1e933: 68,
+    0x1e934: 68,
+    0x1e935: 68,
+    0x1e936: 68,
+    0x1e937: 68,
+    0x1e938: 68,
+    0x1e939: 68,
+    0x1e93a: 68,
+    0x1e93b: 68,
+    0x1e93c: 68,
+    0x1e93d: 68,
+    0x1e93e: 68,
+    0x1e93f: 68,
+    0x1e940: 68,
+    0x1e941: 68,
+    0x1e942: 68,
+    0x1e943: 68,
+    0x1e944: 84,
+    0x1e945: 84,
+    0x1e946: 84,
+    0x1e947: 84,
+    0x1e948: 84,
+    0x1e949: 84,
+    0x1e94a: 84,
+    0x1e94b: 84,
+    0xe0001: 84,
+    0xe0020: 84,
+    0xe0021: 84,
+    0xe0022: 84,
+    0xe0023: 84,
+    0xe0024: 84,
+    0xe0025: 84,
+    0xe0026: 84,
+    0xe0027: 84,
+    0xe0028: 84,
+    0xe0029: 84,
+    0xe002a: 84,
+    0xe002b: 84,
+    0xe002c: 84,
+    0xe002d: 84,
+    0xe002e: 84,
+    0xe002f: 84,
+    0xe0030: 84,
+    0xe0031: 84,
+    0xe0032: 84,
+    0xe0033: 84,
+    0xe0034: 84,
+    0xe0035: 84,
+    0xe0036: 84,
+    0xe0037: 84,
+    0xe0038: 84,
+    0xe0039: 84,
+    0xe003a: 84,
+    0xe003b: 84,
+    0xe003c: 84,
+    0xe003d: 84,
+    0xe003e: 84,
+    0xe003f: 84,
+    0xe0040: 84,
+    0xe0041: 84,
+    0xe0042: 84,
+    0xe0043: 84,
+    0xe0044: 84,
+    0xe0045: 84,
+    0xe0046: 84,
+    0xe0047: 84,
+    0xe0048: 84,
+    0xe0049: 84,
+    0xe004a: 84,
+    0xe004b: 84,
+    0xe004c: 84,
+    0xe004d: 84,
+    0xe004e: 84,
+    0xe004f: 84,
+    0xe0050: 84,
+    0xe0051: 84,
+    0xe0052: 84,
+    0xe0053: 84,
+    0xe0054: 84,
+    0xe0055: 84,
+    0xe0056: 84,
+    0xe0057: 84,
+    0xe0058: 84,
+    0xe0059: 84,
+    0xe005a: 84,
+    0xe005b: 84,
+    0xe005c: 84,
+    0xe005d: 84,
+    0xe005e: 84,
+    0xe005f: 84,
+    0xe0060: 84,
+    0xe0061: 84,
+    0xe0062: 84,
+    0xe0063: 84,
+    0xe0064: 84,
+    0xe0065: 84,
+    0xe0066: 84,
+    0xe0067: 84,
+    0xe0068: 84,
+    0xe0069: 84,
+    0xe006a: 84,
+    0xe006b: 84,
+    0xe006c: 84,
+    0xe006d: 84,
+    0xe006e: 84,
+    0xe006f: 84,
+    0xe0070: 84,
+    0xe0071: 84,
+    0xe0072: 84,
+    0xe0073: 84,
+    0xe0074: 84,
+    0xe0075: 84,
+    0xe0076: 84,
+    0xe0077: 84,
+    0xe0078: 84,
+    0xe0079: 84,
+    0xe007a: 84,
+    0xe007b: 84,
+    0xe007c: 84,
+    0xe007d: 84,
+    0xe007e: 84,
+    0xe007f: 84,
+    0xe0100: 84,
+    0xe0101: 84,
+    0xe0102: 84,
+    0xe0103: 84,
+    0xe0104: 84,
+    0xe0105: 84,
+    0xe0106: 84,
+    0xe0107: 84,
+    0xe0108: 84,
+    0xe0109: 84,
+    0xe010a: 84,
+    0xe010b: 84,
+    0xe010c: 84,
+    0xe010d: 84,
+    0xe010e: 84,
+    0xe010f: 84,
+    0xe0110: 84,
+    0xe0111: 84,
+    0xe0112: 84,
+    0xe0113: 84,
+    0xe0114: 84,
+    0xe0115: 84,
+    0xe0116: 84,
+    0xe0117: 84,
+    0xe0118: 84,
+    0xe0119: 84,
+    0xe011a: 84,
+    0xe011b: 84,
+    0xe011c: 84,
+    0xe011d: 84,
+    0xe011e: 84,
+    0xe011f: 84,
+    0xe0120: 84,
+    0xe0121: 84,
+    0xe0122: 84,
+    0xe0123: 84,
+    0xe0124: 84,
+    0xe0125: 84,
+    0xe0126: 84,
+    0xe0127: 84,
+    0xe0128: 84,
+    0xe0129: 84,
+    0xe012a: 84,
+    0xe012b: 84,
+    0xe012c: 84,
+    0xe012d: 84,
+    0xe012e: 84,
+    0xe012f: 84,
+    0xe0130: 84,
+    0xe0131: 84,
+    0xe0132: 84,
+    0xe0133: 84,
+    0xe0134: 84,
+    0xe0135: 84,
+    0xe0136: 84,
+    0xe0137: 84,
+    0xe0138: 84,
+    0xe0139: 84,
+    0xe013a: 84,
+    0xe013b: 84,
+    0xe013c: 84,
+    0xe013d: 84,
+    0xe013e: 84,
+    0xe013f: 84,
+    0xe0140: 84,
+    0xe0141: 84,
+    0xe0142: 84,
+    0xe0143: 84,
+    0xe0144: 84,
+    0xe0145: 84,
+    0xe0146: 84,
+    0xe0147: 84,
+    0xe0148: 84,
+    0xe0149: 84,
+    0xe014a: 84,
+    0xe014b: 84,
+    0xe014c: 84,
+    0xe014d: 84,
+    0xe014e: 84,
+    0xe014f: 84,
+    0xe0150: 84,
+    0xe0151: 84,
+    0xe0152: 84,
+    0xe0153: 84,
+    0xe0154: 84,
+    0xe0155: 84,
+    0xe0156: 84,
+    0xe0157: 84,
+    0xe0158: 84,
+    0xe0159: 84,
+    0xe015a: 84,
+    0xe015b: 84,
+    0xe015c: 84,
+    0xe015d: 84,
+    0xe015e: 84,
+    0xe015f: 84,
+    0xe0160: 84,
+    0xe0161: 84,
+    0xe0162: 84,
+    0xe0163: 84,
+    0xe0164: 84,
+    0xe0165: 84,
+    0xe0166: 84,
+    0xe0167: 84,
+    0xe0168: 84,
+    0xe0169: 84,
+    0xe016a: 84,
+    0xe016b: 84,
+    0xe016c: 84,
+    0xe016d: 84,
+    0xe016e: 84,
+    0xe016f: 84,
+    0xe0170: 84,
+    0xe0171: 84,
+    0xe0172: 84,
+    0xe0173: 84,
+    0xe0174: 84,
+    0xe0175: 84,
+    0xe0176: 84,
+    0xe0177: 84,
+    0xe0178: 84,
+    0xe0179: 84,
+    0xe017a: 84,
+    0xe017b: 84,
+    0xe017c: 84,
+    0xe017d: 84,
+    0xe017e: 84,
+    0xe017f: 84,
+    0xe0180: 84,
+    0xe0181: 84,
+    0xe0182: 84,
+    0xe0183: 84,
+    0xe0184: 84,
+    0xe0185: 84,
+    0xe0186: 84,
+    0xe0187: 84,
+    0xe0188: 84,
+    0xe0189: 84,
+    0xe018a: 84,
+    0xe018b: 84,
+    0xe018c: 84,
+    0xe018d: 84,
+    0xe018e: 84,
+    0xe018f: 84,
+    0xe0190: 84,
+    0xe0191: 84,
+    0xe0192: 84,
+    0xe0193: 84,
+    0xe0194: 84,
+    0xe0195: 84,
+    0xe0196: 84,
+    0xe0197: 84,
+    0xe0198: 84,
+    0xe0199: 84,
+    0xe019a: 84,
+    0xe019b: 84,
+    0xe019c: 84,
+    0xe019d: 84,
+    0xe019e: 84,
+    0xe019f: 84,
+    0xe01a0: 84,
+    0xe01a1: 84,
+    0xe01a2: 84,
+    0xe01a3: 84,
+    0xe01a4: 84,
+    0xe01a5: 84,
+    0xe01a6: 84,
+    0xe01a7: 84,
+    0xe01a8: 84,
+    0xe01a9: 84,
+    0xe01aa: 84,
+    0xe01ab: 84,
+    0xe01ac: 84,
+    0xe01ad: 84,
+    0xe01ae: 84,
+    0xe01af: 84,
+    0xe01b0: 84,
+    0xe01b1: 84,
+    0xe01b2: 84,
+    0xe01b3: 84,
+    0xe01b4: 84,
+    0xe01b5: 84,
+    0xe01b6: 84,
+    0xe01b7: 84,
+    0xe01b8: 84,
+    0xe01b9: 84,
+    0xe01ba: 84,
+    0xe01bb: 84,
+    0xe01bc: 84,
+    0xe01bd: 84,
+    0xe01be: 84,
+    0xe01bf: 84,
+    0xe01c0: 84,
+    0xe01c1: 84,
+    0xe01c2: 84,
+    0xe01c3: 84,
+    0xe01c4: 84,
+    0xe01c5: 84,
+    0xe01c6: 84,
+    0xe01c7: 84,
+    0xe01c8: 84,
+    0xe01c9: 84,
+    0xe01ca: 84,
+    0xe01cb: 84,
+    0xe01cc: 84,
+    0xe01cd: 84,
+    0xe01ce: 84,
+    0xe01cf: 84,
+    0xe01d0: 84,
+    0xe01d1: 84,
+    0xe01d2: 84,
+    0xe01d3: 84,
+    0xe01d4: 84,
+    0xe01d5: 84,
+    0xe01d6: 84,
+    0xe01d7: 84,
+    0xe01d8: 84,
+    0xe01d9: 84,
+    0xe01da: 84,
+    0xe01db: 84,
+    0xe01dc: 84,
+    0xe01dd: 84,
+    0xe01de: 84,
+    0xe01df: 84,
+    0xe01e0: 84,
+    0xe01e1: 84,
+    0xe01e2: 84,
+    0xe01e3: 84,
+    0xe01e4: 84,
+    0xe01e5: 84,
+    0xe01e6: 84,
+    0xe01e7: 84,
+    0xe01e8: 84,
+    0xe01e9: 84,
+    0xe01ea: 84,
+    0xe01eb: 84,
+    0xe01ec: 84,
+    0xe01ed: 84,
+    0xe01ee: 84,
+    0xe01ef: 84,
+}
+codepoint_classes = {
+    'PVALID': (
+        0x2d0000002e,
+        0x300000003a,
+        0x610000007b,
+        0xdf000000f7,
+        0xf800000100,
+        0x10100000102,
+        0x10300000104,
+        0x10500000106,
+        0x10700000108,
+        0x1090000010a,
+        0x10b0000010c,
+        0x10d0000010e,
+        0x10f00000110,
+        0x11100000112,
+        0x11300000114,
+        0x11500000116,
+        0x11700000118,
+        0x1190000011a,
+        0x11b0000011c,
+        0x11d0000011e,
+        0x11f00000120,
+        0x12100000122,
+        0x12300000124,
+        0x12500000126,
+        0x12700000128,
+        0x1290000012a,
+        0x12b0000012c,
+        0x12d0000012e,
+        0x12f00000130,
+        0x13100000132,
+        0x13500000136,
+        0x13700000139,
+        0x13a0000013b,
+        0x13c0000013d,
+        0x13e0000013f,
+        0x14200000143,
+        0x14400000145,
+        0x14600000147,
+        0x14800000149,
+        0x14b0000014c,
+        0x14d0000014e,
+        0x14f00000150,
+        0x15100000152,
+        0x15300000154,
+        0x15500000156,
+        0x15700000158,
+        0x1590000015a,
+        0x15b0000015c,
+        0x15d0000015e,
+        0x15f00000160,
+        0x16100000162,
+        0x16300000164,
+        0x16500000166,
+        0x16700000168,
+        0x1690000016a,
+        0x16b0000016c,
+        0x16d0000016e,
+        0x16f00000170,
+        0x17100000172,
+        0x17300000174,
+        0x17500000176,
+        0x17700000178,
+        0x17a0000017b,
+        0x17c0000017d,
+        0x17e0000017f,
+        0x18000000181,
+        0x18300000184,
+        0x18500000186,
+        0x18800000189,
+        0x18c0000018e,
+        0x19200000193,
+        0x19500000196,
+        0x1990000019c,
+        0x19e0000019f,
+        0x1a1000001a2,
+        0x1a3000001a4,
+        0x1a5000001a6,
+        0x1a8000001a9,
+        0x1aa000001ac,
+        0x1ad000001ae,
+        0x1b0000001b1,
+        0x1b4000001b5,
+        0x1b6000001b7,
+        0x1b9000001bc,
+        0x1bd000001c4,
+        0x1ce000001cf,
+        0x1d0000001d1,
+        0x1d2000001d3,
+        0x1d4000001d5,
+        0x1d6000001d7,
+        0x1d8000001d9,
+        0x1da000001db,
+        0x1dc000001de,
+        0x1df000001e0,
+        0x1e1000001e2,
+        0x1e3000001e4,
+        0x1e5000001e6,
+        0x1e7000001e8,
+        0x1e9000001ea,
+        0x1eb000001ec,
+        0x1ed000001ee,
+        0x1ef000001f1,
+        0x1f5000001f6,
+        0x1f9000001fa,
+        0x1fb000001fc,
+        0x1fd000001fe,
+        0x1ff00000200,
+        0x20100000202,
+        0x20300000204,
+        0x20500000206,
+        0x20700000208,
+        0x2090000020a,
+        0x20b0000020c,
+        0x20d0000020e,
+        0x20f00000210,
+        0x21100000212,
+        0x21300000214,
+        0x21500000216,
+        0x21700000218,
+        0x2190000021a,
+        0x21b0000021c,
+        0x21d0000021e,
+        0x21f00000220,
+        0x22100000222,
+        0x22300000224,
+        0x22500000226,
+        0x22700000228,
+        0x2290000022a,
+        0x22b0000022c,
+        0x22d0000022e,
+        0x22f00000230,
+        0x23100000232,
+        0x2330000023a,
+        0x23c0000023d,
+        0x23f00000241,
+        0x24200000243,
+        0x24700000248,
+        0x2490000024a,
+        0x24b0000024c,
+        0x24d0000024e,
+        0x24f000002b0,
+        0x2b9000002c2,
+        0x2c6000002d2,
+        0x2ec000002ed,
+        0x2ee000002ef,
+        0x30000000340,
+        0x34200000343,
+        0x3460000034f,
+        0x35000000370,
+        0x37100000372,
+        0x37300000374,
+        0x37700000378,
+        0x37b0000037e,
+        0x39000000391,
+        0x3ac000003cf,
+        0x3d7000003d8,
+        0x3d9000003da,
+        0x3db000003dc,
+        0x3dd000003de,
+        0x3df000003e0,
+        0x3e1000003e2,
+        0x3e3000003e4,
+        0x3e5000003e6,
+        0x3e7000003e8,
+        0x3e9000003ea,
+        0x3eb000003ec,
+        0x3ed000003ee,
+        0x3ef000003f0,
+        0x3f3000003f4,
+        0x3f8000003f9,
+        0x3fb000003fd,
+        0x43000000460,
+        0x46100000462,
+        0x46300000464,
+        0x46500000466,
+        0x46700000468,
+        0x4690000046a,
+        0x46b0000046c,
+        0x46d0000046e,
+        0x46f00000470,
+        0x47100000472,
+        0x47300000474,
+        0x47500000476,
+        0x47700000478,
+        0x4790000047a,
+        0x47b0000047c,
+        0x47d0000047e,
+        0x47f00000480,
+        0x48100000482,
+        0x48300000488,
+        0x48b0000048c,
+        0x48d0000048e,
+        0x48f00000490,
+        0x49100000492,
+        0x49300000494,
+        0x49500000496,
+        0x49700000498,
+        0x4990000049a,
+        0x49b0000049c,
+        0x49d0000049e,
+        0x49f000004a0,
+        0x4a1000004a2,
+        0x4a3000004a4,
+        0x4a5000004a6,
+        0x4a7000004a8,
+        0x4a9000004aa,
+        0x4ab000004ac,
+        0x4ad000004ae,
+        0x4af000004b0,
+        0x4b1000004b2,
+        0x4b3000004b4,
+        0x4b5000004b6,
+        0x4b7000004b8,
+        0x4b9000004ba,
+        0x4bb000004bc,
+        0x4bd000004be,
+        0x4bf000004c0,
+        0x4c2000004c3,
+        0x4c4000004c5,
+        0x4c6000004c7,
+        0x4c8000004c9,
+        0x4ca000004cb,
+        0x4cc000004cd,
+        0x4ce000004d0,
+        0x4d1000004d2,
+        0x4d3000004d4,
+        0x4d5000004d6,
+        0x4d7000004d8,
+        0x4d9000004da,
+        0x4db000004dc,
+        0x4dd000004de,
+        0x4df000004e0,
+        0x4e1000004e2,
+        0x4e3000004e4,
+        0x4e5000004e6,
+        0x4e7000004e8,
+        0x4e9000004ea,
+        0x4eb000004ec,
+        0x4ed000004ee,
+        0x4ef000004f0,
+        0x4f1000004f2,
+        0x4f3000004f4,
+        0x4f5000004f6,
+        0x4f7000004f8,
+        0x4f9000004fa,
+        0x4fb000004fc,
+        0x4fd000004fe,
+        0x4ff00000500,
+        0x50100000502,
+        0x50300000504,
+        0x50500000506,
+        0x50700000508,
+        0x5090000050a,
+        0x50b0000050c,
+        0x50d0000050e,
+        0x50f00000510,
+        0x51100000512,
+        0x51300000514,
+        0x51500000516,
+        0x51700000518,
+        0x5190000051a,
+        0x51b0000051c,
+        0x51d0000051e,
+        0x51f00000520,
+        0x52100000522,
+        0x52300000524,
+        0x52500000526,
+        0x52700000528,
+        0x5290000052a,
+        0x52b0000052c,
+        0x52d0000052e,
+        0x52f00000530,
+        0x5590000055a,
+        0x56000000587,
+        0x58800000589,
+        0x591000005be,
+        0x5bf000005c0,
+        0x5c1000005c3,
+        0x5c4000005c6,
+        0x5c7000005c8,
+        0x5d0000005eb,
+        0x5ef000005f3,
+        0x6100000061b,
+        0x62000000640,
+        0x64100000660,
+        0x66e00000675,
+        0x679000006d4,
+        0x6d5000006dd,
+        0x6df000006e9,
+        0x6ea000006f0,
+        0x6fa00000700,
+        0x7100000074b,
+        0x74d000007b2,
+        0x7c0000007f6,
+        0x7fd000007fe,
+        0x8000000082e,
+        0x8400000085c,
+        0x8600000086b,
+        0x87000000888,
+        0x8890000088f,
+        0x898000008e2,
+        0x8e300000958,
+        0x96000000964,
+        0x96600000970,
+        0x97100000984,
+        0x9850000098d,
+        0x98f00000991,
+        0x993000009a9,
+        0x9aa000009b1,
+        0x9b2000009b3,
+        0x9b6000009ba,
+        0x9bc000009c5,
+        0x9c7000009c9,
+        0x9cb000009cf,
+        0x9d7000009d8,
+        0x9e0000009e4,
+        0x9e6000009f2,
+        0x9fc000009fd,
+        0x9fe000009ff,
+        0xa0100000a04,
+        0xa0500000a0b,
+        0xa0f00000a11,
+        0xa1300000a29,
+        0xa2a00000a31,
+        0xa3200000a33,
+        0xa3500000a36,
+        0xa3800000a3a,
+        0xa3c00000a3d,
+        0xa3e00000a43,
+        0xa4700000a49,
+        0xa4b00000a4e,
+        0xa5100000a52,
+        0xa5c00000a5d,
+        0xa6600000a76,
+        0xa8100000a84,
+        0xa8500000a8e,
+        0xa8f00000a92,
+        0xa9300000aa9,
+        0xaaa00000ab1,
+        0xab200000ab4,
+        0xab500000aba,
+        0xabc00000ac6,
+        0xac700000aca,
+        0xacb00000ace,
+        0xad000000ad1,
+        0xae000000ae4,
+        0xae600000af0,
+        0xaf900000b00,
+        0xb0100000b04,
+        0xb0500000b0d,
+        0xb0f00000b11,
+        0xb1300000b29,
+        0xb2a00000b31,
+        0xb3200000b34,
+        0xb3500000b3a,
+        0xb3c00000b45,
+        0xb4700000b49,
+        0xb4b00000b4e,
+        0xb5500000b58,
+        0xb5f00000b64,
+        0xb6600000b70,
+        0xb7100000b72,
+        0xb8200000b84,
+        0xb8500000b8b,
+        0xb8e00000b91,
+        0xb9200000b96,
+        0xb9900000b9b,
+        0xb9c00000b9d,
+        0xb9e00000ba0,
+        0xba300000ba5,
+        0xba800000bab,
+        0xbae00000bba,
+        0xbbe00000bc3,
+        0xbc600000bc9,
+        0xbca00000bce,
+        0xbd000000bd1,
+        0xbd700000bd8,
+        0xbe600000bf0,
+        0xc0000000c0d,
+        0xc0e00000c11,
+        0xc1200000c29,
+        0xc2a00000c3a,
+        0xc3c00000c45,
+        0xc4600000c49,
+        0xc4a00000c4e,
+        0xc5500000c57,
+        0xc5800000c5b,
+        0xc5d00000c5e,
+        0xc6000000c64,
+        0xc6600000c70,
+        0xc8000000c84,
+        0xc8500000c8d,
+        0xc8e00000c91,
+        0xc9200000ca9,
+        0xcaa00000cb4,
+        0xcb500000cba,
+        0xcbc00000cc5,
+        0xcc600000cc9,
+        0xcca00000cce,
+        0xcd500000cd7,
+        0xcdd00000cdf,
+        0xce000000ce4,
+        0xce600000cf0,
+        0xcf100000cf4,
+        0xd0000000d0d,
+        0xd0e00000d11,
+        0xd1200000d45,
+        0xd4600000d49,
+        0xd4a00000d4f,
+        0xd5400000d58,
+        0xd5f00000d64,
+        0xd6600000d70,
+        0xd7a00000d80,
+        0xd8100000d84,
+        0xd8500000d97,
+        0xd9a00000db2,
+        0xdb300000dbc,
+        0xdbd00000dbe,
+        0xdc000000dc7,
+        0xdca00000dcb,
+        0xdcf00000dd5,
+        0xdd600000dd7,
+        0xdd800000de0,
+        0xde600000df0,
+        0xdf200000df4,
+        0xe0100000e33,
+        0xe3400000e3b,
+        0xe4000000e4f,
+        0xe5000000e5a,
+        0xe8100000e83,
+        0xe8400000e85,
+        0xe8600000e8b,
+        0xe8c00000ea4,
+        0xea500000ea6,
+        0xea700000eb3,
+        0xeb400000ebe,
+        0xec000000ec5,
+        0xec600000ec7,
+        0xec800000ecf,
+        0xed000000eda,
+        0xede00000ee0,
+        0xf0000000f01,
+        0xf0b00000f0c,
+        0xf1800000f1a,
+        0xf2000000f2a,
+        0xf3500000f36,
+        0xf3700000f38,
+        0xf3900000f3a,
+        0xf3e00000f43,
+        0xf4400000f48,
+        0xf4900000f4d,
+        0xf4e00000f52,
+        0xf5300000f57,
+        0xf5800000f5c,
+        0xf5d00000f69,
+        0xf6a00000f6d,
+        0xf7100000f73,
+        0xf7400000f75,
+        0xf7a00000f81,
+        0xf8200000f85,
+        0xf8600000f93,
+        0xf9400000f98,
+        0xf9900000f9d,
+        0xf9e00000fa2,
+        0xfa300000fa7,
+        0xfa800000fac,
+        0xfad00000fb9,
+        0xfba00000fbd,
+        0xfc600000fc7,
+        0x10000000104a,
+        0x10500000109e,
+        0x10d0000010fb,
+        0x10fd00001100,
+        0x120000001249,
+        0x124a0000124e,
+        0x125000001257,
+        0x125800001259,
+        0x125a0000125e,
+        0x126000001289,
+        0x128a0000128e,
+        0x1290000012b1,
+        0x12b2000012b6,
+        0x12b8000012bf,
+        0x12c0000012c1,
+        0x12c2000012c6,
+        0x12c8000012d7,
+        0x12d800001311,
+        0x131200001316,
+        0x13180000135b,
+        0x135d00001360,
+        0x138000001390,
+        0x13a0000013f6,
+        0x14010000166d,
+        0x166f00001680,
+        0x16810000169b,
+        0x16a0000016eb,
+        0x16f1000016f9,
+        0x170000001716,
+        0x171f00001735,
+        0x174000001754,
+        0x17600000176d,
+        0x176e00001771,
+        0x177200001774,
+        0x1780000017b4,
+        0x17b6000017d4,
+        0x17d7000017d8,
+        0x17dc000017de,
+        0x17e0000017ea,
+        0x18100000181a,
+        0x182000001879,
+        0x1880000018ab,
+        0x18b0000018f6,
+        0x19000000191f,
+        0x19200000192c,
+        0x19300000193c,
+        0x19460000196e,
+        0x197000001975,
+        0x1980000019ac,
+        0x19b0000019ca,
+        0x19d0000019da,
+        0x1a0000001a1c,
+        0x1a2000001a5f,
+        0x1a6000001a7d,
+        0x1a7f00001a8a,
+        0x1a9000001a9a,
+        0x1aa700001aa8,
+        0x1ab000001abe,
+        0x1abf00001acf,
+        0x1b0000001b4d,
+        0x1b5000001b5a,
+        0x1b6b00001b74,
+        0x1b8000001bf4,
+        0x1c0000001c38,
+        0x1c4000001c4a,
+        0x1c4d00001c7e,
+        0x1cd000001cd3,
+        0x1cd400001cfb,
+        0x1d0000001d2c,
+        0x1d2f00001d30,
+        0x1d3b00001d3c,
+        0x1d4e00001d4f,
+        0x1d6b00001d78,
+        0x1d7900001d9b,
+        0x1dc000001e00,
+        0x1e0100001e02,
+        0x1e0300001e04,
+        0x1e0500001e06,
+        0x1e0700001e08,
+        0x1e0900001e0a,
+        0x1e0b00001e0c,
+        0x1e0d00001e0e,
+        0x1e0f00001e10,
+        0x1e1100001e12,
+        0x1e1300001e14,
+        0x1e1500001e16,
+        0x1e1700001e18,
+        0x1e1900001e1a,
+        0x1e1b00001e1c,
+        0x1e1d00001e1e,
+        0x1e1f00001e20,
+        0x1e2100001e22,
+        0x1e2300001e24,
+        0x1e2500001e26,
+        0x1e2700001e28,
+        0x1e2900001e2a,
+        0x1e2b00001e2c,
+        0x1e2d00001e2e,
+        0x1e2f00001e30,
+        0x1e3100001e32,
+        0x1e3300001e34,
+        0x1e3500001e36,
+        0x1e3700001e38,
+        0x1e3900001e3a,
+        0x1e3b00001e3c,
+        0x1e3d00001e3e,
+        0x1e3f00001e40,
+        0x1e4100001e42,
+        0x1e4300001e44,
+        0x1e4500001e46,
+        0x1e4700001e48,
+        0x1e4900001e4a,
+        0x1e4b00001e4c,
+        0x1e4d00001e4e,
+        0x1e4f00001e50,
+        0x1e5100001e52,
+        0x1e5300001e54,
+        0x1e5500001e56,
+        0x1e5700001e58,
+        0x1e5900001e5a,
+        0x1e5b00001e5c,
+        0x1e5d00001e5e,
+        0x1e5f00001e60,
+        0x1e6100001e62,
+        0x1e6300001e64,
+        0x1e6500001e66,
+        0x1e6700001e68,
+        0x1e6900001e6a,
+        0x1e6b00001e6c,
+        0x1e6d00001e6e,
+        0x1e6f00001e70,
+        0x1e7100001e72,
+        0x1e7300001e74,
+        0x1e7500001e76,
+        0x1e7700001e78,
+        0x1e7900001e7a,
+        0x1e7b00001e7c,
+        0x1e7d00001e7e,
+        0x1e7f00001e80,
+        0x1e8100001e82,
+        0x1e8300001e84,
+        0x1e8500001e86,
+        0x1e8700001e88,
+        0x1e8900001e8a,
+        0x1e8b00001e8c,
+        0x1e8d00001e8e,
+        0x1e8f00001e90,
+        0x1e9100001e92,
+        0x1e9300001e94,
+        0x1e9500001e9a,
+        0x1e9c00001e9e,
+        0x1e9f00001ea0,
+        0x1ea100001ea2,
+        0x1ea300001ea4,
+        0x1ea500001ea6,
+        0x1ea700001ea8,
+        0x1ea900001eaa,
+        0x1eab00001eac,
+        0x1ead00001eae,
+        0x1eaf00001eb0,
+        0x1eb100001eb2,
+        0x1eb300001eb4,
+        0x1eb500001eb6,
+        0x1eb700001eb8,
+        0x1eb900001eba,
+        0x1ebb00001ebc,
+        0x1ebd00001ebe,
+        0x1ebf00001ec0,
+        0x1ec100001ec2,
+        0x1ec300001ec4,
+        0x1ec500001ec6,
+        0x1ec700001ec8,
+        0x1ec900001eca,
+        0x1ecb00001ecc,
+        0x1ecd00001ece,
+        0x1ecf00001ed0,
+        0x1ed100001ed2,
+        0x1ed300001ed4,
+        0x1ed500001ed6,
+        0x1ed700001ed8,
+        0x1ed900001eda,
+        0x1edb00001edc,
+        0x1edd00001ede,
+        0x1edf00001ee0,
+        0x1ee100001ee2,
+        0x1ee300001ee4,
+        0x1ee500001ee6,
+        0x1ee700001ee8,
+        0x1ee900001eea,
+        0x1eeb00001eec,
+        0x1eed00001eee,
+        0x1eef00001ef0,
+        0x1ef100001ef2,
+        0x1ef300001ef4,
+        0x1ef500001ef6,
+        0x1ef700001ef8,
+        0x1ef900001efa,
+        0x1efb00001efc,
+        0x1efd00001efe,
+        0x1eff00001f08,
+        0x1f1000001f16,
+        0x1f2000001f28,
+        0x1f3000001f38,
+        0x1f4000001f46,
+        0x1f5000001f58,
+        0x1f6000001f68,
+        0x1f7000001f71,
+        0x1f7200001f73,
+        0x1f7400001f75,
+        0x1f7600001f77,
+        0x1f7800001f79,
+        0x1f7a00001f7b,
+        0x1f7c00001f7d,
+        0x1fb000001fb2,
+        0x1fb600001fb7,
+        0x1fc600001fc7,
+        0x1fd000001fd3,
+        0x1fd600001fd8,
+        0x1fe000001fe3,
+        0x1fe400001fe8,
+        0x1ff600001ff7,
+        0x214e0000214f,
+        0x218400002185,
+        0x2c3000002c60,
+        0x2c6100002c62,
+        0x2c6500002c67,
+        0x2c6800002c69,
+        0x2c6a00002c6b,
+        0x2c6c00002c6d,
+        0x2c7100002c72,
+        0x2c7300002c75,
+        0x2c7600002c7c,
+        0x2c8100002c82,
+        0x2c8300002c84,
+        0x2c8500002c86,
+        0x2c8700002c88,
+        0x2c8900002c8a,
+        0x2c8b00002c8c,
+        0x2c8d00002c8e,
+        0x2c8f00002c90,
+        0x2c9100002c92,
+        0x2c9300002c94,
+        0x2c9500002c96,
+        0x2c9700002c98,
+        0x2c9900002c9a,
+        0x2c9b00002c9c,
+        0x2c9d00002c9e,
+        0x2c9f00002ca0,
+        0x2ca100002ca2,
+        0x2ca300002ca4,
+        0x2ca500002ca6,
+        0x2ca700002ca8,
+        0x2ca900002caa,
+        0x2cab00002cac,
+        0x2cad00002cae,
+        0x2caf00002cb0,
+        0x2cb100002cb2,
+        0x2cb300002cb4,
+        0x2cb500002cb6,
+        0x2cb700002cb8,
+        0x2cb900002cba,
+        0x2cbb00002cbc,
+        0x2cbd00002cbe,
+        0x2cbf00002cc0,
+        0x2cc100002cc2,
+        0x2cc300002cc4,
+        0x2cc500002cc6,
+        0x2cc700002cc8,
+        0x2cc900002cca,
+        0x2ccb00002ccc,
+        0x2ccd00002cce,
+        0x2ccf00002cd0,
+        0x2cd100002cd2,
+        0x2cd300002cd4,
+        0x2cd500002cd6,
+        0x2cd700002cd8,
+        0x2cd900002cda,
+        0x2cdb00002cdc,
+        0x2cdd00002cde,
+        0x2cdf00002ce0,
+        0x2ce100002ce2,
+        0x2ce300002ce5,
+        0x2cec00002ced,
+        0x2cee00002cf2,
+        0x2cf300002cf4,
+        0x2d0000002d26,
+        0x2d2700002d28,
+        0x2d2d00002d2e,
+        0x2d3000002d68,
+        0x2d7f00002d97,
+        0x2da000002da7,
+        0x2da800002daf,
+        0x2db000002db7,
+        0x2db800002dbf,
+        0x2dc000002dc7,
+        0x2dc800002dcf,
+        0x2dd000002dd7,
+        0x2dd800002ddf,
+        0x2de000002e00,
+        0x2e2f00002e30,
+        0x300500003008,
+        0x302a0000302e,
+        0x303c0000303d,
+        0x304100003097,
+        0x30990000309b,
+        0x309d0000309f,
+        0x30a1000030fb,
+        0x30fc000030ff,
+        0x310500003130,
+        0x31a0000031c0,
+        0x31f000003200,
+        0x340000004dc0,
+        0x4e000000a48d,
+        0xa4d00000a4fe,
+        0xa5000000a60d,
+        0xa6100000a62c,
+        0xa6410000a642,
+        0xa6430000a644,
+        0xa6450000a646,
+        0xa6470000a648,
+        0xa6490000a64a,
+        0xa64b0000a64c,
+        0xa64d0000a64e,
+        0xa64f0000a650,
+        0xa6510000a652,
+        0xa6530000a654,
+        0xa6550000a656,
+        0xa6570000a658,
+        0xa6590000a65a,
+        0xa65b0000a65c,
+        0xa65d0000a65e,
+        0xa65f0000a660,
+        0xa6610000a662,
+        0xa6630000a664,
+        0xa6650000a666,
+        0xa6670000a668,
+        0xa6690000a66a,
+        0xa66b0000a66c,
+        0xa66d0000a670,
+        0xa6740000a67e,
+        0xa67f0000a680,
+        0xa6810000a682,
+        0xa6830000a684,
+        0xa6850000a686,
+        0xa6870000a688,
+        0xa6890000a68a,
+        0xa68b0000a68c,
+        0xa68d0000a68e,
+        0xa68f0000a690,
+        0xa6910000a692,
+        0xa6930000a694,
+        0xa6950000a696,
+        0xa6970000a698,
+        0xa6990000a69a,
+        0xa69b0000a69c,
+        0xa69e0000a6e6,
+        0xa6f00000a6f2,
+        0xa7170000a720,
+        0xa7230000a724,
+        0xa7250000a726,
+        0xa7270000a728,
+        0xa7290000a72a,
+        0xa72b0000a72c,
+        0xa72d0000a72e,
+        0xa72f0000a732,
+        0xa7330000a734,
+        0xa7350000a736,
+        0xa7370000a738,
+        0xa7390000a73a,
+        0xa73b0000a73c,
+        0xa73d0000a73e,
+        0xa73f0000a740,
+        0xa7410000a742,
+        0xa7430000a744,
+        0xa7450000a746,
+        0xa7470000a748,
+        0xa7490000a74a,
+        0xa74b0000a74c,
+        0xa74d0000a74e,
+        0xa74f0000a750,
+        0xa7510000a752,
+        0xa7530000a754,
+        0xa7550000a756,
+        0xa7570000a758,
+        0xa7590000a75a,
+        0xa75b0000a75c,
+        0xa75d0000a75e,
+        0xa75f0000a760,
+        0xa7610000a762,
+        0xa7630000a764,
+        0xa7650000a766,
+        0xa7670000a768,
+        0xa7690000a76a,
+        0xa76b0000a76c,
+        0xa76d0000a76e,
+        0xa76f0000a770,
+        0xa7710000a779,
+        0xa77a0000a77b,
+        0xa77c0000a77d,
+        0xa77f0000a780,
+        0xa7810000a782,
+        0xa7830000a784,
+        0xa7850000a786,
+        0xa7870000a789,
+        0xa78c0000a78d,
+        0xa78e0000a790,
+        0xa7910000a792,
+        0xa7930000a796,
+        0xa7970000a798,
+        0xa7990000a79a,
+        0xa79b0000a79c,
+        0xa79d0000a79e,
+        0xa79f0000a7a0,
+        0xa7a10000a7a2,
+        0xa7a30000a7a4,
+        0xa7a50000a7a6,
+        0xa7a70000a7a8,
+        0xa7a90000a7aa,
+        0xa7af0000a7b0,
+        0xa7b50000a7b6,
+        0xa7b70000a7b8,
+        0xa7b90000a7ba,
+        0xa7bb0000a7bc,
+        0xa7bd0000a7be,
+        0xa7bf0000a7c0,
+        0xa7c10000a7c2,
+        0xa7c30000a7c4,
+        0xa7c80000a7c9,
+        0xa7ca0000a7cb,
+        0xa7d10000a7d2,
+        0xa7d30000a7d4,
+        0xa7d50000a7d6,
+        0xa7d70000a7d8,
+        0xa7d90000a7da,
+        0xa7f20000a7f5,
+        0xa7f60000a7f8,
+        0xa7fa0000a828,
+        0xa82c0000a82d,
+        0xa8400000a874,
+        0xa8800000a8c6,
+        0xa8d00000a8da,
+        0xa8e00000a8f8,
+        0xa8fb0000a8fc,
+        0xa8fd0000a92e,
+        0xa9300000a954,
+        0xa9800000a9c1,
+        0xa9cf0000a9da,
+        0xa9e00000a9ff,
+        0xaa000000aa37,
+        0xaa400000aa4e,
+        0xaa500000aa5a,
+        0xaa600000aa77,
+        0xaa7a0000aac3,
+        0xaadb0000aade,
+        0xaae00000aaf0,
+        0xaaf20000aaf7,
+        0xab010000ab07,
+        0xab090000ab0f,
+        0xab110000ab17,
+        0xab200000ab27,
+        0xab280000ab2f,
+        0xab300000ab5b,
+        0xab600000ab69,
+        0xabc00000abeb,
+        0xabec0000abee,
+        0xabf00000abfa,
+        0xac000000d7a4,
+        0xfa0e0000fa10,
+        0xfa110000fa12,
+        0xfa130000fa15,
+        0xfa1f0000fa20,
+        0xfa210000fa22,
+        0xfa230000fa25,
+        0xfa270000fa2a,
+        0xfb1e0000fb1f,
+        0xfe200000fe30,
+        0xfe730000fe74,
+        0x100000001000c,
+        0x1000d00010027,
+        0x100280001003b,
+        0x1003c0001003e,
+        0x1003f0001004e,
+        0x100500001005e,
+        0x10080000100fb,
+        0x101fd000101fe,
+        0x102800001029d,
+        0x102a0000102d1,
+        0x102e0000102e1,
+        0x1030000010320,
+        0x1032d00010341,
+        0x103420001034a,
+        0x103500001037b,
+        0x103800001039e,
+        0x103a0000103c4,
+        0x103c8000103d0,
+        0x104280001049e,
+        0x104a0000104aa,
+        0x104d8000104fc,
+        0x1050000010528,
+        0x1053000010564,
+        0x10597000105a2,
+        0x105a3000105b2,
+        0x105b3000105ba,
+        0x105bb000105bd,
+        0x1060000010737,
+        0x1074000010756,
+        0x1076000010768,
+        0x1078000010786,
+        0x10787000107b1,
+        0x107b2000107bb,
+        0x1080000010806,
+        0x1080800010809,
+        0x1080a00010836,
+        0x1083700010839,
+        0x1083c0001083d,
+        0x1083f00010856,
+        0x1086000010877,
+        0x108800001089f,
+        0x108e0000108f3,
+        0x108f4000108f6,
+        0x1090000010916,
+        0x109200001093a,
+        0x10980000109b8,
+        0x109be000109c0,
+        0x10a0000010a04,
+        0x10a0500010a07,
+        0x10a0c00010a14,
+        0x10a1500010a18,
+        0x10a1900010a36,
+        0x10a3800010a3b,
+        0x10a3f00010a40,
+        0x10a6000010a7d,
+        0x10a8000010a9d,
+        0x10ac000010ac8,
+        0x10ac900010ae7,
+        0x10b0000010b36,
+        0x10b4000010b56,
+        0x10b6000010b73,
+        0x10b8000010b92,
+        0x10c0000010c49,
+        0x10cc000010cf3,
+        0x10d0000010d28,
+        0x10d3000010d3a,
+        0x10e8000010eaa,
+        0x10eab00010ead,
+        0x10eb000010eb2,
+        0x10efd00010f1d,
+        0x10f2700010f28,
+        0x10f3000010f51,
+        0x10f7000010f86,
+        0x10fb000010fc5,
+        0x10fe000010ff7,
+        0x1100000011047,
+        0x1106600011076,
+        0x1107f000110bb,
+        0x110c2000110c3,
+        0x110d0000110e9,
+        0x110f0000110fa,
+        0x1110000011135,
+        0x1113600011140,
+        0x1114400011148,
+        0x1115000011174,
+        0x1117600011177,
+        0x11180000111c5,
+        0x111c9000111cd,
+        0x111ce000111db,
+        0x111dc000111dd,
+        0x1120000011212,
+        0x1121300011238,
+        0x1123e00011242,
+        0x1128000011287,
+        0x1128800011289,
+        0x1128a0001128e,
+        0x1128f0001129e,
+        0x1129f000112a9,
+        0x112b0000112eb,
+        0x112f0000112fa,
+        0x1130000011304,
+        0x113050001130d,
+        0x1130f00011311,
+        0x1131300011329,
+        0x1132a00011331,
+        0x1133200011334,
+        0x113350001133a,
+        0x1133b00011345,
+        0x1134700011349,
+        0x1134b0001134e,
+        0x1135000011351,
+        0x1135700011358,
+        0x1135d00011364,
+        0x113660001136d,
+        0x1137000011375,
+        0x114000001144b,
+        0x114500001145a,
+        0x1145e00011462,
+        0x11480000114c6,
+        0x114c7000114c8,
+        0x114d0000114da,
+        0x11580000115b6,
+        0x115b8000115c1,
+        0x115d8000115de,
+        0x1160000011641,
+        0x1164400011645,
+        0x116500001165a,
+        0x11680000116b9,
+        0x116c0000116ca,
+        0x117000001171b,
+        0x1171d0001172c,
+        0x117300001173a,
+        0x1174000011747,
+        0x118000001183b,
+        0x118c0000118ea,
+        0x118ff00011907,
+        0x119090001190a,
+        0x1190c00011914,
+        0x1191500011917,
+        0x1191800011936,
+        0x1193700011939,
+        0x1193b00011944,
+        0x119500001195a,
+        0x119a0000119a8,
+        0x119aa000119d8,
+        0x119da000119e2,
+        0x119e3000119e5,
+        0x11a0000011a3f,
+        0x11a4700011a48,
+        0x11a5000011a9a,
+        0x11a9d00011a9e,
+        0x11ab000011af9,
+        0x11c0000011c09,
+        0x11c0a00011c37,
+        0x11c3800011c41,
+        0x11c5000011c5a,
+        0x11c7200011c90,
+        0x11c9200011ca8,
+        0x11ca900011cb7,
+        0x11d0000011d07,
+        0x11d0800011d0a,
+        0x11d0b00011d37,
+        0x11d3a00011d3b,
+        0x11d3c00011d3e,
+        0x11d3f00011d48,
+        0x11d5000011d5a,
+        0x11d6000011d66,
+        0x11d6700011d69,
+        0x11d6a00011d8f,
+        0x11d9000011d92,
+        0x11d9300011d99,
+        0x11da000011daa,
+        0x11ee000011ef7,
+        0x11f0000011f11,
+        0x11f1200011f3b,
+        0x11f3e00011f43,
+        0x11f5000011f5a,
+        0x11fb000011fb1,
+        0x120000001239a,
+        0x1248000012544,
+        0x12f9000012ff1,
+        0x1300000013430,
+        0x1344000013456,
+        0x1440000014647,
+        0x1680000016a39,
+        0x16a4000016a5f,
+        0x16a6000016a6a,
+        0x16a7000016abf,
+        0x16ac000016aca,
+        0x16ad000016aee,
+        0x16af000016af5,
+        0x16b0000016b37,
+        0x16b4000016b44,
+        0x16b5000016b5a,
+        0x16b6300016b78,
+        0x16b7d00016b90,
+        0x16e6000016e80,
+        0x16f0000016f4b,
+        0x16f4f00016f88,
+        0x16f8f00016fa0,
+        0x16fe000016fe2,
+        0x16fe300016fe5,
+        0x16ff000016ff2,
+        0x17000000187f8,
+        0x1880000018cd6,
+        0x18d0000018d09,
+        0x1aff00001aff4,
+        0x1aff50001affc,
+        0x1affd0001afff,
+        0x1b0000001b123,
+        0x1b1320001b133,
+        0x1b1500001b153,
+        0x1b1550001b156,
+        0x1b1640001b168,
+        0x1b1700001b2fc,
+        0x1bc000001bc6b,
+        0x1bc700001bc7d,
+        0x1bc800001bc89,
+        0x1bc900001bc9a,
+        0x1bc9d0001bc9f,
+        0x1cf000001cf2e,
+        0x1cf300001cf47,
+        0x1da000001da37,
+        0x1da3b0001da6d,
+        0x1da750001da76,
+        0x1da840001da85,
+        0x1da9b0001daa0,
+        0x1daa10001dab0,
+        0x1df000001df1f,
+        0x1df250001df2b,
+        0x1e0000001e007,
+        0x1e0080001e019,
+        0x1e01b0001e022,
+        0x1e0230001e025,
+        0x1e0260001e02b,
+        0x1e08f0001e090,
+        0x1e1000001e12d,
+        0x1e1300001e13e,
+        0x1e1400001e14a,
+        0x1e14e0001e14f,
+        0x1e2900001e2af,
+        0x1e2c00001e2fa,
+        0x1e4d00001e4fa,
+        0x1e7e00001e7e7,
+        0x1e7e80001e7ec,
+        0x1e7ed0001e7ef,
+        0x1e7f00001e7ff,
+        0x1e8000001e8c5,
+        0x1e8d00001e8d7,
+        0x1e9220001e94c,
+        0x1e9500001e95a,
+        0x200000002a6e0,
+        0x2a7000002b73a,
+        0x2b7400002b81e,
+        0x2b8200002cea2,
+        0x2ceb00002ebe1,
+        0x300000003134b,
+        0x31350000323b0,
+    ),
+    'CONTEXTJ': (
+        0x200c0000200e,
+    ),
+    'CONTEXTO': (
+        0xb7000000b8,
+        0x37500000376,
+        0x5f3000005f5,
+        0x6600000066a,
+        0x6f0000006fa,
+        0x30fb000030fc,
+    ),
+}
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/idna/intranges.py b/.venv/lib/python3.12/site-packages/pip/_vendor/idna/intranges.py
new file mode 100644
index 0000000..6a43b04
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/idna/intranges.py
@@ -0,0 +1,54 @@
+"""
+Given a list of integers, made up of (hopefully) a small number of long runs
+of consecutive integers, compute a representation of the form
+((start1, end1), (start2, end2) ...). Then answer the question "was x present
+in the original list?" in time O(log(# runs)).
+"""
+
+import bisect
+from typing import List, Tuple
+
+def intranges_from_list(list_: List[int]) -> Tuple[int, ...]:
+    """Represent a list of integers as a sequence of ranges:
+    ((start_0, end_0), (start_1, end_1), ...), such that the original
+    integers are exactly those x such that start_i <= x < end_i for some i.
+
+    Ranges are encoded as single integers (start << 32 | end), not as tuples.
+    """
+
+    sorted_list = sorted(list_)
+    ranges = []
+    last_write = -1
+    for i in range(len(sorted_list)):
+        if i+1 < len(sorted_list):
+            if sorted_list[i] == sorted_list[i+1]-1:
+                continue
+        current_range = sorted_list[last_write+1:i+1]
+        ranges.append(_encode_range(current_range[0], current_range[-1] + 1))
+        last_write = i
+
+    return tuple(ranges)
+
+def _encode_range(start: int, end: int) -> int:
+    return (start << 32) | end
+
+def _decode_range(r: int) -> Tuple[int, int]:
+    return (r >> 32), (r & ((1 << 32) - 1))
+
+
+def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool:
+    """Determine if `int_` falls into one of the ranges in `ranges`."""
+    tuple_ = _encode_range(int_, 0)
+    pos = bisect.bisect_left(ranges, tuple_)
+    # we could be immediately ahead of a tuple (start, end)
+    # with start < int_ <= end
+    if pos > 0:
+        left, right = _decode_range(ranges[pos-1])
+        if left <= int_ < right:
+            return True
+    # or we could be immediately behind a tuple (int_, end)
+    if pos < len(ranges):
+        left, _ = _decode_range(ranges[pos])
+        if left == int_:
+            return True
+    return False
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/idna/package_data.py b/.venv/lib/python3.12/site-packages/pip/_vendor/idna/package_data.py
new file mode 100644
index 0000000..8501893
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/idna/package_data.py
@@ -0,0 +1,2 @@
+__version__ = '3.4'
+
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/idna/uts46data.py b/.venv/lib/python3.12/site-packages/pip/_vendor/idna/uts46data.py
new file mode 100644
index 0000000..186796c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/idna/uts46data.py
@@ -0,0 +1,8600 @@
+# This file is automatically generated by tools/idna-data
+# vim: set fileencoding=utf-8 :
+
+from typing import List, Tuple, Union
+
+
+"""IDNA Mapping Table from UTS46."""
+
+
+__version__ = '15.0.0'
+def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x0, '3'),
+    (0x1, '3'),
+    (0x2, '3'),
+    (0x3, '3'),
+    (0x4, '3'),
+    (0x5, '3'),
+    (0x6, '3'),
+    (0x7, '3'),
+    (0x8, '3'),
+    (0x9, '3'),
+    (0xA, '3'),
+    (0xB, '3'),
+    (0xC, '3'),
+    (0xD, '3'),
+    (0xE, '3'),
+    (0xF, '3'),
+    (0x10, '3'),
+    (0x11, '3'),
+    (0x12, '3'),
+    (0x13, '3'),
+    (0x14, '3'),
+    (0x15, '3'),
+    (0x16, '3'),
+    (0x17, '3'),
+    (0x18, '3'),
+    (0x19, '3'),
+    (0x1A, '3'),
+    (0x1B, '3'),
+    (0x1C, '3'),
+    (0x1D, '3'),
+    (0x1E, '3'),
+    (0x1F, '3'),
+    (0x20, '3'),
+    (0x21, '3'),
+    (0x22, '3'),
+    (0x23, '3'),
+    (0x24, '3'),
+    (0x25, '3'),
+    (0x26, '3'),
+    (0x27, '3'),
+    (0x28, '3'),
+    (0x29, '3'),
+    (0x2A, '3'),
+    (0x2B, '3'),
+    (0x2C, '3'),
+    (0x2D, 'V'),
+    (0x2E, 'V'),
+    (0x2F, '3'),
+    (0x30, 'V'),
+    (0x31, 'V'),
+    (0x32, 'V'),
+    (0x33, 'V'),
+    (0x34, 'V'),
+    (0x35, 'V'),
+    (0x36, 'V'),
+    (0x37, 'V'),
+    (0x38, 'V'),
+    (0x39, 'V'),
+    (0x3A, '3'),
+    (0x3B, '3'),
+    (0x3C, '3'),
+    (0x3D, '3'),
+    (0x3E, '3'),
+    (0x3F, '3'),
+    (0x40, '3'),
+    (0x41, 'M', 'a'),
+    (0x42, 'M', 'b'),
+    (0x43, 'M', 'c'),
+    (0x44, 'M', 'd'),
+    (0x45, 'M', 'e'),
+    (0x46, 'M', 'f'),
+    (0x47, 'M', 'g'),
+    (0x48, 'M', 'h'),
+    (0x49, 'M', 'i'),
+    (0x4A, 'M', 'j'),
+    (0x4B, 'M', 'k'),
+    (0x4C, 'M', 'l'),
+    (0x4D, 'M', 'm'),
+    (0x4E, 'M', 'n'),
+    (0x4F, 'M', 'o'),
+    (0x50, 'M', 'p'),
+    (0x51, 'M', 'q'),
+    (0x52, 'M', 'r'),
+    (0x53, 'M', 's'),
+    (0x54, 'M', 't'),
+    (0x55, 'M', 'u'),
+    (0x56, 'M', 'v'),
+    (0x57, 'M', 'w'),
+    (0x58, 'M', 'x'),
+    (0x59, 'M', 'y'),
+    (0x5A, 'M', 'z'),
+    (0x5B, '3'),
+    (0x5C, '3'),
+    (0x5D, '3'),
+    (0x5E, '3'),
+    (0x5F, '3'),
+    (0x60, '3'),
+    (0x61, 'V'),
+    (0x62, 'V'),
+    (0x63, 'V'),
+    ]
+
+def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x64, 'V'),
+    (0x65, 'V'),
+    (0x66, 'V'),
+    (0x67, 'V'),
+    (0x68, 'V'),
+    (0x69, 'V'),
+    (0x6A, 'V'),
+    (0x6B, 'V'),
+    (0x6C, 'V'),
+    (0x6D, 'V'),
+    (0x6E, 'V'),
+    (0x6F, 'V'),
+    (0x70, 'V'),
+    (0x71, 'V'),
+    (0x72, 'V'),
+    (0x73, 'V'),
+    (0x74, 'V'),
+    (0x75, 'V'),
+    (0x76, 'V'),
+    (0x77, 'V'),
+    (0x78, 'V'),
+    (0x79, 'V'),
+    (0x7A, 'V'),
+    (0x7B, '3'),
+    (0x7C, '3'),
+    (0x7D, '3'),
+    (0x7E, '3'),
+    (0x7F, '3'),
+    (0x80, 'X'),
+    (0x81, 'X'),
+    (0x82, 'X'),
+    (0x83, 'X'),
+    (0x84, 'X'),
+    (0x85, 'X'),
+    (0x86, 'X'),
+    (0x87, 'X'),
+    (0x88, 'X'),
+    (0x89, 'X'),
+    (0x8A, 'X'),
+    (0x8B, 'X'),
+    (0x8C, 'X'),
+    (0x8D, 'X'),
+    (0x8E, 'X'),
+    (0x8F, 'X'),
+    (0x90, 'X'),
+    (0x91, 'X'),
+    (0x92, 'X'),
+    (0x93, 'X'),
+    (0x94, 'X'),
+    (0x95, 'X'),
+    (0x96, 'X'),
+    (0x97, 'X'),
+    (0x98, 'X'),
+    (0x99, 'X'),
+    (0x9A, 'X'),
+    (0x9B, 'X'),
+    (0x9C, 'X'),
+    (0x9D, 'X'),
+    (0x9E, 'X'),
+    (0x9F, 'X'),
+    (0xA0, '3', ' '),
+    (0xA1, 'V'),
+    (0xA2, 'V'),
+    (0xA3, 'V'),
+    (0xA4, 'V'),
+    (0xA5, 'V'),
+    (0xA6, 'V'),
+    (0xA7, 'V'),
+    (0xA8, '3', ' ̈'),
+    (0xA9, 'V'),
+    (0xAA, 'M', 'a'),
+    (0xAB, 'V'),
+    (0xAC, 'V'),
+    (0xAD, 'I'),
+    (0xAE, 'V'),
+    (0xAF, '3', ' ̄'),
+    (0xB0, 'V'),
+    (0xB1, 'V'),
+    (0xB2, 'M', '2'),
+    (0xB3, 'M', '3'),
+    (0xB4, '3', ' ́'),
+    (0xB5, 'M', 'μ'),
+    (0xB6, 'V'),
+    (0xB7, 'V'),
+    (0xB8, '3', ' ̧'),
+    (0xB9, 'M', '1'),
+    (0xBA, 'M', 'o'),
+    (0xBB, 'V'),
+    (0xBC, 'M', '1⁄4'),
+    (0xBD, 'M', '1⁄2'),
+    (0xBE, 'M', '3⁄4'),
+    (0xBF, 'V'),
+    (0xC0, 'M', 'à'),
+    (0xC1, 'M', 'á'),
+    (0xC2, 'M', 'â'),
+    (0xC3, 'M', 'ã'),
+    (0xC4, 'M', 'ä'),
+    (0xC5, 'M', 'å'),
+    (0xC6, 'M', 'æ'),
+    (0xC7, 'M', 'ç'),
+    ]
+
+def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xC8, 'M', 'è'),
+    (0xC9, 'M', 'é'),
+    (0xCA, 'M', 'ê'),
+    (0xCB, 'M', 'ë'),
+    (0xCC, 'M', 'ì'),
+    (0xCD, 'M', 'í'),
+    (0xCE, 'M', 'î'),
+    (0xCF, 'M', 'ï'),
+    (0xD0, 'M', 'ð'),
+    (0xD1, 'M', 'ñ'),
+    (0xD2, 'M', 'ò'),
+    (0xD3, 'M', 'ó'),
+    (0xD4, 'M', 'ô'),
+    (0xD5, 'M', 'õ'),
+    (0xD6, 'M', 'ö'),
+    (0xD7, 'V'),
+    (0xD8, 'M', 'ø'),
+    (0xD9, 'M', 'ù'),
+    (0xDA, 'M', 'ú'),
+    (0xDB, 'M', 'û'),
+    (0xDC, 'M', 'ü'),
+    (0xDD, 'M', 'ý'),
+    (0xDE, 'M', 'þ'),
+    (0xDF, 'D', 'ss'),
+    (0xE0, 'V'),
+    (0xE1, 'V'),
+    (0xE2, 'V'),
+    (0xE3, 'V'),
+    (0xE4, 'V'),
+    (0xE5, 'V'),
+    (0xE6, 'V'),
+    (0xE7, 'V'),
+    (0xE8, 'V'),
+    (0xE9, 'V'),
+    (0xEA, 'V'),
+    (0xEB, 'V'),
+    (0xEC, 'V'),
+    (0xED, 'V'),
+    (0xEE, 'V'),
+    (0xEF, 'V'),
+    (0xF0, 'V'),
+    (0xF1, 'V'),
+    (0xF2, 'V'),
+    (0xF3, 'V'),
+    (0xF4, 'V'),
+    (0xF5, 'V'),
+    (0xF6, 'V'),
+    (0xF7, 'V'),
+    (0xF8, 'V'),
+    (0xF9, 'V'),
+    (0xFA, 'V'),
+    (0xFB, 'V'),
+    (0xFC, 'V'),
+    (0xFD, 'V'),
+    (0xFE, 'V'),
+    (0xFF, 'V'),
+    (0x100, 'M', 'ā'),
+    (0x101, 'V'),
+    (0x102, 'M', 'ă'),
+    (0x103, 'V'),
+    (0x104, 'M', 'ą'),
+    (0x105, 'V'),
+    (0x106, 'M', 'ć'),
+    (0x107, 'V'),
+    (0x108, 'M', 'ĉ'),
+    (0x109, 'V'),
+    (0x10A, 'M', 'ċ'),
+    (0x10B, 'V'),
+    (0x10C, 'M', 'č'),
+    (0x10D, 'V'),
+    (0x10E, 'M', 'ď'),
+    (0x10F, 'V'),
+    (0x110, 'M', 'đ'),
+    (0x111, 'V'),
+    (0x112, 'M', 'ē'),
+    (0x113, 'V'),
+    (0x114, 'M', 'ĕ'),
+    (0x115, 'V'),
+    (0x116, 'M', 'ė'),
+    (0x117, 'V'),
+    (0x118, 'M', 'ę'),
+    (0x119, 'V'),
+    (0x11A, 'M', 'ě'),
+    (0x11B, 'V'),
+    (0x11C, 'M', 'ĝ'),
+    (0x11D, 'V'),
+    (0x11E, 'M', 'ğ'),
+    (0x11F, 'V'),
+    (0x120, 'M', 'ġ'),
+    (0x121, 'V'),
+    (0x122, 'M', 'ģ'),
+    (0x123, 'V'),
+    (0x124, 'M', 'ĥ'),
+    (0x125, 'V'),
+    (0x126, 'M', 'ħ'),
+    (0x127, 'V'),
+    (0x128, 'M', 'ĩ'),
+    (0x129, 'V'),
+    (0x12A, 'M', 'ī'),
+    (0x12B, 'V'),
+    ]
+
+def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x12C, 'M', 'ĭ'),
+    (0x12D, 'V'),
+    (0x12E, 'M', 'į'),
+    (0x12F, 'V'),
+    (0x130, 'M', 'i̇'),
+    (0x131, 'V'),
+    (0x132, 'M', 'ij'),
+    (0x134, 'M', 'ĵ'),
+    (0x135, 'V'),
+    (0x136, 'M', 'ķ'),
+    (0x137, 'V'),
+    (0x139, 'M', 'ĺ'),
+    (0x13A, 'V'),
+    (0x13B, 'M', 'ļ'),
+    (0x13C, 'V'),
+    (0x13D, 'M', 'ľ'),
+    (0x13E, 'V'),
+    (0x13F, 'M', 'l·'),
+    (0x141, 'M', 'ł'),
+    (0x142, 'V'),
+    (0x143, 'M', 'ń'),
+    (0x144, 'V'),
+    (0x145, 'M', 'ņ'),
+    (0x146, 'V'),
+    (0x147, 'M', 'ň'),
+    (0x148, 'V'),
+    (0x149, 'M', 'ʼn'),
+    (0x14A, 'M', 'ŋ'),
+    (0x14B, 'V'),
+    (0x14C, 'M', 'ō'),
+    (0x14D, 'V'),
+    (0x14E, 'M', 'ŏ'),
+    (0x14F, 'V'),
+    (0x150, 'M', 'ő'),
+    (0x151, 'V'),
+    (0x152, 'M', 'œ'),
+    (0x153, 'V'),
+    (0x154, 'M', 'ŕ'),
+    (0x155, 'V'),
+    (0x156, 'M', 'ŗ'),
+    (0x157, 'V'),
+    (0x158, 'M', 'ř'),
+    (0x159, 'V'),
+    (0x15A, 'M', 'ś'),
+    (0x15B, 'V'),
+    (0x15C, 'M', 'ŝ'),
+    (0x15D, 'V'),
+    (0x15E, 'M', 'ş'),
+    (0x15F, 'V'),
+    (0x160, 'M', 'š'),
+    (0x161, 'V'),
+    (0x162, 'M', 'ţ'),
+    (0x163, 'V'),
+    (0x164, 'M', 'ť'),
+    (0x165, 'V'),
+    (0x166, 'M', 'ŧ'),
+    (0x167, 'V'),
+    (0x168, 'M', 'ũ'),
+    (0x169, 'V'),
+    (0x16A, 'M', 'ū'),
+    (0x16B, 'V'),
+    (0x16C, 'M', 'ŭ'),
+    (0x16D, 'V'),
+    (0x16E, 'M', 'ů'),
+    (0x16F, 'V'),
+    (0x170, 'M', 'ű'),
+    (0x171, 'V'),
+    (0x172, 'M', 'ų'),
+    (0x173, 'V'),
+    (0x174, 'M', 'ŵ'),
+    (0x175, 'V'),
+    (0x176, 'M', 'ŷ'),
+    (0x177, 'V'),
+    (0x178, 'M', 'ÿ'),
+    (0x179, 'M', 'ź'),
+    (0x17A, 'V'),
+    (0x17B, 'M', 'ż'),
+    (0x17C, 'V'),
+    (0x17D, 'M', 'ž'),
+    (0x17E, 'V'),
+    (0x17F, 'M', 's'),
+    (0x180, 'V'),
+    (0x181, 'M', 'ɓ'),
+    (0x182, 'M', 'ƃ'),
+    (0x183, 'V'),
+    (0x184, 'M', 'ƅ'),
+    (0x185, 'V'),
+    (0x186, 'M', 'ɔ'),
+    (0x187, 'M', 'ƈ'),
+    (0x188, 'V'),
+    (0x189, 'M', 'ɖ'),
+    (0x18A, 'M', 'ɗ'),
+    (0x18B, 'M', 'ƌ'),
+    (0x18C, 'V'),
+    (0x18E, 'M', 'ǝ'),
+    (0x18F, 'M', 'ə'),
+    (0x190, 'M', 'ɛ'),
+    (0x191, 'M', 'ƒ'),
+    (0x192, 'V'),
+    (0x193, 'M', 'ɠ'),
+    ]
+
+def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x194, 'M', 'ɣ'),
+    (0x195, 'V'),
+    (0x196, 'M', 'ɩ'),
+    (0x197, 'M', 'ɨ'),
+    (0x198, 'M', 'ƙ'),
+    (0x199, 'V'),
+    (0x19C, 'M', 'ɯ'),
+    (0x19D, 'M', 'ɲ'),
+    (0x19E, 'V'),
+    (0x19F, 'M', 'ɵ'),
+    (0x1A0, 'M', 'ơ'),
+    (0x1A1, 'V'),
+    (0x1A2, 'M', 'ƣ'),
+    (0x1A3, 'V'),
+    (0x1A4, 'M', 'ƥ'),
+    (0x1A5, 'V'),
+    (0x1A6, 'M', 'ʀ'),
+    (0x1A7, 'M', 'ƨ'),
+    (0x1A8, 'V'),
+    (0x1A9, 'M', 'ʃ'),
+    (0x1AA, 'V'),
+    (0x1AC, 'M', 'ƭ'),
+    (0x1AD, 'V'),
+    (0x1AE, 'M', 'ʈ'),
+    (0x1AF, 'M', 'ư'),
+    (0x1B0, 'V'),
+    (0x1B1, 'M', 'ʊ'),
+    (0x1B2, 'M', 'ʋ'),
+    (0x1B3, 'M', 'ƴ'),
+    (0x1B4, 'V'),
+    (0x1B5, 'M', 'ƶ'),
+    (0x1B6, 'V'),
+    (0x1B7, 'M', 'ʒ'),
+    (0x1B8, 'M', 'ƹ'),
+    (0x1B9, 'V'),
+    (0x1BC, 'M', 'ƽ'),
+    (0x1BD, 'V'),
+    (0x1C4, 'M', 'dž'),
+    (0x1C7, 'M', 'lj'),
+    (0x1CA, 'M', 'nj'),
+    (0x1CD, 'M', 'ǎ'),
+    (0x1CE, 'V'),
+    (0x1CF, 'M', 'ǐ'),
+    (0x1D0, 'V'),
+    (0x1D1, 'M', 'ǒ'),
+    (0x1D2, 'V'),
+    (0x1D3, 'M', 'ǔ'),
+    (0x1D4, 'V'),
+    (0x1D5, 'M', 'ǖ'),
+    (0x1D6, 'V'),
+    (0x1D7, 'M', 'ǘ'),
+    (0x1D8, 'V'),
+    (0x1D9, 'M', 'ǚ'),
+    (0x1DA, 'V'),
+    (0x1DB, 'M', 'ǜ'),
+    (0x1DC, 'V'),
+    (0x1DE, 'M', 'ǟ'),
+    (0x1DF, 'V'),
+    (0x1E0, 'M', 'ǡ'),
+    (0x1E1, 'V'),
+    (0x1E2, 'M', 'ǣ'),
+    (0x1E3, 'V'),
+    (0x1E4, 'M', 'ǥ'),
+    (0x1E5, 'V'),
+    (0x1E6, 'M', 'ǧ'),
+    (0x1E7, 'V'),
+    (0x1E8, 'M', 'ǩ'),
+    (0x1E9, 'V'),
+    (0x1EA, 'M', 'ǫ'),
+    (0x1EB, 'V'),
+    (0x1EC, 'M', 'ǭ'),
+    (0x1ED, 'V'),
+    (0x1EE, 'M', 'ǯ'),
+    (0x1EF, 'V'),
+    (0x1F1, 'M', 'dz'),
+    (0x1F4, 'M', 'ǵ'),
+    (0x1F5, 'V'),
+    (0x1F6, 'M', 'ƕ'),
+    (0x1F7, 'M', 'ƿ'),
+    (0x1F8, 'M', 'ǹ'),
+    (0x1F9, 'V'),
+    (0x1FA, 'M', 'ǻ'),
+    (0x1FB, 'V'),
+    (0x1FC, 'M', 'ǽ'),
+    (0x1FD, 'V'),
+    (0x1FE, 'M', 'ǿ'),
+    (0x1FF, 'V'),
+    (0x200, 'M', 'ȁ'),
+    (0x201, 'V'),
+    (0x202, 'M', 'ȃ'),
+    (0x203, 'V'),
+    (0x204, 'M', 'ȅ'),
+    (0x205, 'V'),
+    (0x206, 'M', 'ȇ'),
+    (0x207, 'V'),
+    (0x208, 'M', 'ȉ'),
+    (0x209, 'V'),
+    (0x20A, 'M', 'ȋ'),
+    (0x20B, 'V'),
+    (0x20C, 'M', 'ȍ'),
+    ]
+
+def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x20D, 'V'),
+    (0x20E, 'M', 'ȏ'),
+    (0x20F, 'V'),
+    (0x210, 'M', 'ȑ'),
+    (0x211, 'V'),
+    (0x212, 'M', 'ȓ'),
+    (0x213, 'V'),
+    (0x214, 'M', 'ȕ'),
+    (0x215, 'V'),
+    (0x216, 'M', 'ȗ'),
+    (0x217, 'V'),
+    (0x218, 'M', 'ș'),
+    (0x219, 'V'),
+    (0x21A, 'M', 'ț'),
+    (0x21B, 'V'),
+    (0x21C, 'M', 'ȝ'),
+    (0x21D, 'V'),
+    (0x21E, 'M', 'ȟ'),
+    (0x21F, 'V'),
+    (0x220, 'M', 'ƞ'),
+    (0x221, 'V'),
+    (0x222, 'M', 'ȣ'),
+    (0x223, 'V'),
+    (0x224, 'M', 'ȥ'),
+    (0x225, 'V'),
+    (0x226, 'M', 'ȧ'),
+    (0x227, 'V'),
+    (0x228, 'M', 'ȩ'),
+    (0x229, 'V'),
+    (0x22A, 'M', 'ȫ'),
+    (0x22B, 'V'),
+    (0x22C, 'M', 'ȭ'),
+    (0x22D, 'V'),
+    (0x22E, 'M', 'ȯ'),
+    (0x22F, 'V'),
+    (0x230, 'M', 'ȱ'),
+    (0x231, 'V'),
+    (0x232, 'M', 'ȳ'),
+    (0x233, 'V'),
+    (0x23A, 'M', 'ⱥ'),
+    (0x23B, 'M', 'ȼ'),
+    (0x23C, 'V'),
+    (0x23D, 'M', 'ƚ'),
+    (0x23E, 'M', 'ⱦ'),
+    (0x23F, 'V'),
+    (0x241, 'M', 'ɂ'),
+    (0x242, 'V'),
+    (0x243, 'M', 'ƀ'),
+    (0x244, 'M', 'ʉ'),
+    (0x245, 'M', 'ʌ'),
+    (0x246, 'M', 'ɇ'),
+    (0x247, 'V'),
+    (0x248, 'M', 'ɉ'),
+    (0x249, 'V'),
+    (0x24A, 'M', 'ɋ'),
+    (0x24B, 'V'),
+    (0x24C, 'M', 'ɍ'),
+    (0x24D, 'V'),
+    (0x24E, 'M', 'ɏ'),
+    (0x24F, 'V'),
+    (0x2B0, 'M', 'h'),
+    (0x2B1, 'M', 'ɦ'),
+    (0x2B2, 'M', 'j'),
+    (0x2B3, 'M', 'r'),
+    (0x2B4, 'M', 'ɹ'),
+    (0x2B5, 'M', 'ɻ'),
+    (0x2B6, 'M', 'ʁ'),
+    (0x2B7, 'M', 'w'),
+    (0x2B8, 'M', 'y'),
+    (0x2B9, 'V'),
+    (0x2D8, '3', ' ̆'),
+    (0x2D9, '3', ' ̇'),
+    (0x2DA, '3', ' ̊'),
+    (0x2DB, '3', ' ̨'),
+    (0x2DC, '3', ' ̃'),
+    (0x2DD, '3', ' ̋'),
+    (0x2DE, 'V'),
+    (0x2E0, 'M', 'ɣ'),
+    (0x2E1, 'M', 'l'),
+    (0x2E2, 'M', 's'),
+    (0x2E3, 'M', 'x'),
+    (0x2E4, 'M', 'ʕ'),
+    (0x2E5, 'V'),
+    (0x340, 'M', '̀'),
+    (0x341, 'M', '́'),
+    (0x342, 'V'),
+    (0x343, 'M', '̓'),
+    (0x344, 'M', '̈́'),
+    (0x345, 'M', 'ι'),
+    (0x346, 'V'),
+    (0x34F, 'I'),
+    (0x350, 'V'),
+    (0x370, 'M', 'ͱ'),
+    (0x371, 'V'),
+    (0x372, 'M', 'ͳ'),
+    (0x373, 'V'),
+    (0x374, 'M', 'ʹ'),
+    (0x375, 'V'),
+    (0x376, 'M', 'ͷ'),
+    (0x377, 'V'),
+    ]
+
+def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x378, 'X'),
+    (0x37A, '3', ' ι'),
+    (0x37B, 'V'),
+    (0x37E, '3', ';'),
+    (0x37F, 'M', 'ϳ'),
+    (0x380, 'X'),
+    (0x384, '3', ' ́'),
+    (0x385, '3', ' ̈́'),
+    (0x386, 'M', 'ά'),
+    (0x387, 'M', '·'),
+    (0x388, 'M', 'έ'),
+    (0x389, 'M', 'ή'),
+    (0x38A, 'M', 'ί'),
+    (0x38B, 'X'),
+    (0x38C, 'M', 'ό'),
+    (0x38D, 'X'),
+    (0x38E, 'M', 'ύ'),
+    (0x38F, 'M', 'ώ'),
+    (0x390, 'V'),
+    (0x391, 'M', 'α'),
+    (0x392, 'M', 'β'),
+    (0x393, 'M', 'γ'),
+    (0x394, 'M', 'δ'),
+    (0x395, 'M', 'ε'),
+    (0x396, 'M', 'ζ'),
+    (0x397, 'M', 'η'),
+    (0x398, 'M', 'θ'),
+    (0x399, 'M', 'ι'),
+    (0x39A, 'M', 'κ'),
+    (0x39B, 'M', 'λ'),
+    (0x39C, 'M', 'μ'),
+    (0x39D, 'M', 'ν'),
+    (0x39E, 'M', 'ξ'),
+    (0x39F, 'M', 'ο'),
+    (0x3A0, 'M', 'π'),
+    (0x3A1, 'M', 'ρ'),
+    (0x3A2, 'X'),
+    (0x3A3, 'M', 'σ'),
+    (0x3A4, 'M', 'τ'),
+    (0x3A5, 'M', 'υ'),
+    (0x3A6, 'M', 'φ'),
+    (0x3A7, 'M', 'χ'),
+    (0x3A8, 'M', 'ψ'),
+    (0x3A9, 'M', 'ω'),
+    (0x3AA, 'M', 'ϊ'),
+    (0x3AB, 'M', 'ϋ'),
+    (0x3AC, 'V'),
+    (0x3C2, 'D', 'σ'),
+    (0x3C3, 'V'),
+    (0x3CF, 'M', 'ϗ'),
+    (0x3D0, 'M', 'β'),
+    (0x3D1, 'M', 'θ'),
+    (0x3D2, 'M', 'υ'),
+    (0x3D3, 'M', 'ύ'),
+    (0x3D4, 'M', 'ϋ'),
+    (0x3D5, 'M', 'φ'),
+    (0x3D6, 'M', 'π'),
+    (0x3D7, 'V'),
+    (0x3D8, 'M', 'ϙ'),
+    (0x3D9, 'V'),
+    (0x3DA, 'M', 'ϛ'),
+    (0x3DB, 'V'),
+    (0x3DC, 'M', 'ϝ'),
+    (0x3DD, 'V'),
+    (0x3DE, 'M', 'ϟ'),
+    (0x3DF, 'V'),
+    (0x3E0, 'M', 'ϡ'),
+    (0x3E1, 'V'),
+    (0x3E2, 'M', 'ϣ'),
+    (0x3E3, 'V'),
+    (0x3E4, 'M', 'ϥ'),
+    (0x3E5, 'V'),
+    (0x3E6, 'M', 'ϧ'),
+    (0x3E7, 'V'),
+    (0x3E8, 'M', 'ϩ'),
+    (0x3E9, 'V'),
+    (0x3EA, 'M', 'ϫ'),
+    (0x3EB, 'V'),
+    (0x3EC, 'M', 'ϭ'),
+    (0x3ED, 'V'),
+    (0x3EE, 'M', 'ϯ'),
+    (0x3EF, 'V'),
+    (0x3F0, 'M', 'κ'),
+    (0x3F1, 'M', 'ρ'),
+    (0x3F2, 'M', 'σ'),
+    (0x3F3, 'V'),
+    (0x3F4, 'M', 'θ'),
+    (0x3F5, 'M', 'ε'),
+    (0x3F6, 'V'),
+    (0x3F7, 'M', 'ϸ'),
+    (0x3F8, 'V'),
+    (0x3F9, 'M', 'σ'),
+    (0x3FA, 'M', 'ϻ'),
+    (0x3FB, 'V'),
+    (0x3FD, 'M', 'ͻ'),
+    (0x3FE, 'M', 'ͼ'),
+    (0x3FF, 'M', 'ͽ'),
+    (0x400, 'M', 'ѐ'),
+    (0x401, 'M', 'ё'),
+    (0x402, 'M', 'ђ'),
+    ]
+
+def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x403, 'M', 'ѓ'),
+    (0x404, 'M', 'є'),
+    (0x405, 'M', 'ѕ'),
+    (0x406, 'M', 'і'),
+    (0x407, 'M', 'ї'),
+    (0x408, 'M', 'ј'),
+    (0x409, 'M', 'љ'),
+    (0x40A, 'M', 'њ'),
+    (0x40B, 'M', 'ћ'),
+    (0x40C, 'M', 'ќ'),
+    (0x40D, 'M', 'ѝ'),
+    (0x40E, 'M', 'ў'),
+    (0x40F, 'M', 'џ'),
+    (0x410, 'M', 'а'),
+    (0x411, 'M', 'б'),
+    (0x412, 'M', 'в'),
+    (0x413, 'M', 'г'),
+    (0x414, 'M', 'д'),
+    (0x415, 'M', 'е'),
+    (0x416, 'M', 'ж'),
+    (0x417, 'M', 'з'),
+    (0x418, 'M', 'и'),
+    (0x419, 'M', 'й'),
+    (0x41A, 'M', 'к'),
+    (0x41B, 'M', 'л'),
+    (0x41C, 'M', 'м'),
+    (0x41D, 'M', 'н'),
+    (0x41E, 'M', 'о'),
+    (0x41F, 'M', 'п'),
+    (0x420, 'M', 'р'),
+    (0x421, 'M', 'с'),
+    (0x422, 'M', 'т'),
+    (0x423, 'M', 'у'),
+    (0x424, 'M', 'ф'),
+    (0x425, 'M', 'х'),
+    (0x426, 'M', 'ц'),
+    (0x427, 'M', 'ч'),
+    (0x428, 'M', 'ш'),
+    (0x429, 'M', 'щ'),
+    (0x42A, 'M', 'ъ'),
+    (0x42B, 'M', 'ы'),
+    (0x42C, 'M', 'ь'),
+    (0x42D, 'M', 'э'),
+    (0x42E, 'M', 'ю'),
+    (0x42F, 'M', 'я'),
+    (0x430, 'V'),
+    (0x460, 'M', 'ѡ'),
+    (0x461, 'V'),
+    (0x462, 'M', 'ѣ'),
+    (0x463, 'V'),
+    (0x464, 'M', 'ѥ'),
+    (0x465, 'V'),
+    (0x466, 'M', 'ѧ'),
+    (0x467, 'V'),
+    (0x468, 'M', 'ѩ'),
+    (0x469, 'V'),
+    (0x46A, 'M', 'ѫ'),
+    (0x46B, 'V'),
+    (0x46C, 'M', 'ѭ'),
+    (0x46D, 'V'),
+    (0x46E, 'M', 'ѯ'),
+    (0x46F, 'V'),
+    (0x470, 'M', 'ѱ'),
+    (0x471, 'V'),
+    (0x472, 'M', 'ѳ'),
+    (0x473, 'V'),
+    (0x474, 'M', 'ѵ'),
+    (0x475, 'V'),
+    (0x476, 'M', 'ѷ'),
+    (0x477, 'V'),
+    (0x478, 'M', 'ѹ'),
+    (0x479, 'V'),
+    (0x47A, 'M', 'ѻ'),
+    (0x47B, 'V'),
+    (0x47C, 'M', 'ѽ'),
+    (0x47D, 'V'),
+    (0x47E, 'M', 'ѿ'),
+    (0x47F, 'V'),
+    (0x480, 'M', 'ҁ'),
+    (0x481, 'V'),
+    (0x48A, 'M', 'ҋ'),
+    (0x48B, 'V'),
+    (0x48C, 'M', 'ҍ'),
+    (0x48D, 'V'),
+    (0x48E, 'M', 'ҏ'),
+    (0x48F, 'V'),
+    (0x490, 'M', 'ґ'),
+    (0x491, 'V'),
+    (0x492, 'M', 'ғ'),
+    (0x493, 'V'),
+    (0x494, 'M', 'ҕ'),
+    (0x495, 'V'),
+    (0x496, 'M', 'җ'),
+    (0x497, 'V'),
+    (0x498, 'M', 'ҙ'),
+    (0x499, 'V'),
+    (0x49A, 'M', 'қ'),
+    (0x49B, 'V'),
+    (0x49C, 'M', 'ҝ'),
+    (0x49D, 'V'),
+    ]
+
+def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x49E, 'M', 'ҟ'),
+    (0x49F, 'V'),
+    (0x4A0, 'M', 'ҡ'),
+    (0x4A1, 'V'),
+    (0x4A2, 'M', 'ң'),
+    (0x4A3, 'V'),
+    (0x4A4, 'M', 'ҥ'),
+    (0x4A5, 'V'),
+    (0x4A6, 'M', 'ҧ'),
+    (0x4A7, 'V'),
+    (0x4A8, 'M', 'ҩ'),
+    (0x4A9, 'V'),
+    (0x4AA, 'M', 'ҫ'),
+    (0x4AB, 'V'),
+    (0x4AC, 'M', 'ҭ'),
+    (0x4AD, 'V'),
+    (0x4AE, 'M', 'ү'),
+    (0x4AF, 'V'),
+    (0x4B0, 'M', 'ұ'),
+    (0x4B1, 'V'),
+    (0x4B2, 'M', 'ҳ'),
+    (0x4B3, 'V'),
+    (0x4B4, 'M', 'ҵ'),
+    (0x4B5, 'V'),
+    (0x4B6, 'M', 'ҷ'),
+    (0x4B7, 'V'),
+    (0x4B8, 'M', 'ҹ'),
+    (0x4B9, 'V'),
+    (0x4BA, 'M', 'һ'),
+    (0x4BB, 'V'),
+    (0x4BC, 'M', 'ҽ'),
+    (0x4BD, 'V'),
+    (0x4BE, 'M', 'ҿ'),
+    (0x4BF, 'V'),
+    (0x4C0, 'X'),
+    (0x4C1, 'M', 'ӂ'),
+    (0x4C2, 'V'),
+    (0x4C3, 'M', 'ӄ'),
+    (0x4C4, 'V'),
+    (0x4C5, 'M', 'ӆ'),
+    (0x4C6, 'V'),
+    (0x4C7, 'M', 'ӈ'),
+    (0x4C8, 'V'),
+    (0x4C9, 'M', 'ӊ'),
+    (0x4CA, 'V'),
+    (0x4CB, 'M', 'ӌ'),
+    (0x4CC, 'V'),
+    (0x4CD, 'M', 'ӎ'),
+    (0x4CE, 'V'),
+    (0x4D0, 'M', 'ӑ'),
+    (0x4D1, 'V'),
+    (0x4D2, 'M', 'ӓ'),
+    (0x4D3, 'V'),
+    (0x4D4, 'M', 'ӕ'),
+    (0x4D5, 'V'),
+    (0x4D6, 'M', 'ӗ'),
+    (0x4D7, 'V'),
+    (0x4D8, 'M', 'ә'),
+    (0x4D9, 'V'),
+    (0x4DA, 'M', 'ӛ'),
+    (0x4DB, 'V'),
+    (0x4DC, 'M', 'ӝ'),
+    (0x4DD, 'V'),
+    (0x4DE, 'M', 'ӟ'),
+    (0x4DF, 'V'),
+    (0x4E0, 'M', 'ӡ'),
+    (0x4E1, 'V'),
+    (0x4E2, 'M', 'ӣ'),
+    (0x4E3, 'V'),
+    (0x4E4, 'M', 'ӥ'),
+    (0x4E5, 'V'),
+    (0x4E6, 'M', 'ӧ'),
+    (0x4E7, 'V'),
+    (0x4E8, 'M', 'ө'),
+    (0x4E9, 'V'),
+    (0x4EA, 'M', 'ӫ'),
+    (0x4EB, 'V'),
+    (0x4EC, 'M', 'ӭ'),
+    (0x4ED, 'V'),
+    (0x4EE, 'M', 'ӯ'),
+    (0x4EF, 'V'),
+    (0x4F0, 'M', 'ӱ'),
+    (0x4F1, 'V'),
+    (0x4F2, 'M', 'ӳ'),
+    (0x4F3, 'V'),
+    (0x4F4, 'M', 'ӵ'),
+    (0x4F5, 'V'),
+    (0x4F6, 'M', 'ӷ'),
+    (0x4F7, 'V'),
+    (0x4F8, 'M', 'ӹ'),
+    (0x4F9, 'V'),
+    (0x4FA, 'M', 'ӻ'),
+    (0x4FB, 'V'),
+    (0x4FC, 'M', 'ӽ'),
+    (0x4FD, 'V'),
+    (0x4FE, 'M', 'ӿ'),
+    (0x4FF, 'V'),
+    (0x500, 'M', 'ԁ'),
+    (0x501, 'V'),
+    (0x502, 'M', 'ԃ'),
+    ]
+
+def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x503, 'V'),
+    (0x504, 'M', 'ԅ'),
+    (0x505, 'V'),
+    (0x506, 'M', 'ԇ'),
+    (0x507, 'V'),
+    (0x508, 'M', 'ԉ'),
+    (0x509, 'V'),
+    (0x50A, 'M', 'ԋ'),
+    (0x50B, 'V'),
+    (0x50C, 'M', 'ԍ'),
+    (0x50D, 'V'),
+    (0x50E, 'M', 'ԏ'),
+    (0x50F, 'V'),
+    (0x510, 'M', 'ԑ'),
+    (0x511, 'V'),
+    (0x512, 'M', 'ԓ'),
+    (0x513, 'V'),
+    (0x514, 'M', 'ԕ'),
+    (0x515, 'V'),
+    (0x516, 'M', 'ԗ'),
+    (0x517, 'V'),
+    (0x518, 'M', 'ԙ'),
+    (0x519, 'V'),
+    (0x51A, 'M', 'ԛ'),
+    (0x51B, 'V'),
+    (0x51C, 'M', 'ԝ'),
+    (0x51D, 'V'),
+    (0x51E, 'M', 'ԟ'),
+    (0x51F, 'V'),
+    (0x520, 'M', 'ԡ'),
+    (0x521, 'V'),
+    (0x522, 'M', 'ԣ'),
+    (0x523, 'V'),
+    (0x524, 'M', 'ԥ'),
+    (0x525, 'V'),
+    (0x526, 'M', 'ԧ'),
+    (0x527, 'V'),
+    (0x528, 'M', 'ԩ'),
+    (0x529, 'V'),
+    (0x52A, 'M', 'ԫ'),
+    (0x52B, 'V'),
+    (0x52C, 'M', 'ԭ'),
+    (0x52D, 'V'),
+    (0x52E, 'M', 'ԯ'),
+    (0x52F, 'V'),
+    (0x530, 'X'),
+    (0x531, 'M', 'ա'),
+    (0x532, 'M', 'բ'),
+    (0x533, 'M', 'գ'),
+    (0x534, 'M', 'դ'),
+    (0x535, 'M', 'ե'),
+    (0x536, 'M', 'զ'),
+    (0x537, 'M', 'է'),
+    (0x538, 'M', 'ը'),
+    (0x539, 'M', 'թ'),
+    (0x53A, 'M', 'ժ'),
+    (0x53B, 'M', 'ի'),
+    (0x53C, 'M', 'լ'),
+    (0x53D, 'M', 'խ'),
+    (0x53E, 'M', 'ծ'),
+    (0x53F, 'M', 'կ'),
+    (0x540, 'M', 'հ'),
+    (0x541, 'M', 'ձ'),
+    (0x542, 'M', 'ղ'),
+    (0x543, 'M', 'ճ'),
+    (0x544, 'M', 'մ'),
+    (0x545, 'M', 'յ'),
+    (0x546, 'M', 'ն'),
+    (0x547, 'M', 'շ'),
+    (0x548, 'M', 'ո'),
+    (0x549, 'M', 'չ'),
+    (0x54A, 'M', 'պ'),
+    (0x54B, 'M', 'ջ'),
+    (0x54C, 'M', 'ռ'),
+    (0x54D, 'M', 'ս'),
+    (0x54E, 'M', 'վ'),
+    (0x54F, 'M', 'տ'),
+    (0x550, 'M', 'ր'),
+    (0x551, 'M', 'ց'),
+    (0x552, 'M', 'ւ'),
+    (0x553, 'M', 'փ'),
+    (0x554, 'M', 'ք'),
+    (0x555, 'M', 'օ'),
+    (0x556, 'M', 'ֆ'),
+    (0x557, 'X'),
+    (0x559, 'V'),
+    (0x587, 'M', 'եւ'),
+    (0x588, 'V'),
+    (0x58B, 'X'),
+    (0x58D, 'V'),
+    (0x590, 'X'),
+    (0x591, 'V'),
+    (0x5C8, 'X'),
+    (0x5D0, 'V'),
+    (0x5EB, 'X'),
+    (0x5EF, 'V'),
+    (0x5F5, 'X'),
+    (0x606, 'V'),
+    (0x61C, 'X'),
+    (0x61D, 'V'),
+    ]
+
+def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x675, 'M', 'اٴ'),
+    (0x676, 'M', 'وٴ'),
+    (0x677, 'M', 'ۇٴ'),
+    (0x678, 'M', 'يٴ'),
+    (0x679, 'V'),
+    (0x6DD, 'X'),
+    (0x6DE, 'V'),
+    (0x70E, 'X'),
+    (0x710, 'V'),
+    (0x74B, 'X'),
+    (0x74D, 'V'),
+    (0x7B2, 'X'),
+    (0x7C0, 'V'),
+    (0x7FB, 'X'),
+    (0x7FD, 'V'),
+    (0x82E, 'X'),
+    (0x830, 'V'),
+    (0x83F, 'X'),
+    (0x840, 'V'),
+    (0x85C, 'X'),
+    (0x85E, 'V'),
+    (0x85F, 'X'),
+    (0x860, 'V'),
+    (0x86B, 'X'),
+    (0x870, 'V'),
+    (0x88F, 'X'),
+    (0x898, 'V'),
+    (0x8E2, 'X'),
+    (0x8E3, 'V'),
+    (0x958, 'M', 'क़'),
+    (0x959, 'M', 'ख़'),
+    (0x95A, 'M', 'ग़'),
+    (0x95B, 'M', 'ज़'),
+    (0x95C, 'M', 'ड़'),
+    (0x95D, 'M', 'ढ़'),
+    (0x95E, 'M', 'फ़'),
+    (0x95F, 'M', 'य़'),
+    (0x960, 'V'),
+    (0x984, 'X'),
+    (0x985, 'V'),
+    (0x98D, 'X'),
+    (0x98F, 'V'),
+    (0x991, 'X'),
+    (0x993, 'V'),
+    (0x9A9, 'X'),
+    (0x9AA, 'V'),
+    (0x9B1, 'X'),
+    (0x9B2, 'V'),
+    (0x9B3, 'X'),
+    (0x9B6, 'V'),
+    (0x9BA, 'X'),
+    (0x9BC, 'V'),
+    (0x9C5, 'X'),
+    (0x9C7, 'V'),
+    (0x9C9, 'X'),
+    (0x9CB, 'V'),
+    (0x9CF, 'X'),
+    (0x9D7, 'V'),
+    (0x9D8, 'X'),
+    (0x9DC, 'M', 'ড়'),
+    (0x9DD, 'M', 'ঢ়'),
+    (0x9DE, 'X'),
+    (0x9DF, 'M', 'য়'),
+    (0x9E0, 'V'),
+    (0x9E4, 'X'),
+    (0x9E6, 'V'),
+    (0x9FF, 'X'),
+    (0xA01, 'V'),
+    (0xA04, 'X'),
+    (0xA05, 'V'),
+    (0xA0B, 'X'),
+    (0xA0F, 'V'),
+    (0xA11, 'X'),
+    (0xA13, 'V'),
+    (0xA29, 'X'),
+    (0xA2A, 'V'),
+    (0xA31, 'X'),
+    (0xA32, 'V'),
+    (0xA33, 'M', 'ਲ਼'),
+    (0xA34, 'X'),
+    (0xA35, 'V'),
+    (0xA36, 'M', 'ਸ਼'),
+    (0xA37, 'X'),
+    (0xA38, 'V'),
+    (0xA3A, 'X'),
+    (0xA3C, 'V'),
+    (0xA3D, 'X'),
+    (0xA3E, 'V'),
+    (0xA43, 'X'),
+    (0xA47, 'V'),
+    (0xA49, 'X'),
+    (0xA4B, 'V'),
+    (0xA4E, 'X'),
+    (0xA51, 'V'),
+    (0xA52, 'X'),
+    (0xA59, 'M', 'ਖ਼'),
+    (0xA5A, 'M', 'ਗ਼'),
+    (0xA5B, 'M', 'ਜ਼'),
+    (0xA5C, 'V'),
+    (0xA5D, 'X'),
+    ]
+
+def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xA5E, 'M', 'ਫ਼'),
+    (0xA5F, 'X'),
+    (0xA66, 'V'),
+    (0xA77, 'X'),
+    (0xA81, 'V'),
+    (0xA84, 'X'),
+    (0xA85, 'V'),
+    (0xA8E, 'X'),
+    (0xA8F, 'V'),
+    (0xA92, 'X'),
+    (0xA93, 'V'),
+    (0xAA9, 'X'),
+    (0xAAA, 'V'),
+    (0xAB1, 'X'),
+    (0xAB2, 'V'),
+    (0xAB4, 'X'),
+    (0xAB5, 'V'),
+    (0xABA, 'X'),
+    (0xABC, 'V'),
+    (0xAC6, 'X'),
+    (0xAC7, 'V'),
+    (0xACA, 'X'),
+    (0xACB, 'V'),
+    (0xACE, 'X'),
+    (0xAD0, 'V'),
+    (0xAD1, 'X'),
+    (0xAE0, 'V'),
+    (0xAE4, 'X'),
+    (0xAE6, 'V'),
+    (0xAF2, 'X'),
+    (0xAF9, 'V'),
+    (0xB00, 'X'),
+    (0xB01, 'V'),
+    (0xB04, 'X'),
+    (0xB05, 'V'),
+    (0xB0D, 'X'),
+    (0xB0F, 'V'),
+    (0xB11, 'X'),
+    (0xB13, 'V'),
+    (0xB29, 'X'),
+    (0xB2A, 'V'),
+    (0xB31, 'X'),
+    (0xB32, 'V'),
+    (0xB34, 'X'),
+    (0xB35, 'V'),
+    (0xB3A, 'X'),
+    (0xB3C, 'V'),
+    (0xB45, 'X'),
+    (0xB47, 'V'),
+    (0xB49, 'X'),
+    (0xB4B, 'V'),
+    (0xB4E, 'X'),
+    (0xB55, 'V'),
+    (0xB58, 'X'),
+    (0xB5C, 'M', 'ଡ଼'),
+    (0xB5D, 'M', 'ଢ଼'),
+    (0xB5E, 'X'),
+    (0xB5F, 'V'),
+    (0xB64, 'X'),
+    (0xB66, 'V'),
+    (0xB78, 'X'),
+    (0xB82, 'V'),
+    (0xB84, 'X'),
+    (0xB85, 'V'),
+    (0xB8B, 'X'),
+    (0xB8E, 'V'),
+    (0xB91, 'X'),
+    (0xB92, 'V'),
+    (0xB96, 'X'),
+    (0xB99, 'V'),
+    (0xB9B, 'X'),
+    (0xB9C, 'V'),
+    (0xB9D, 'X'),
+    (0xB9E, 'V'),
+    (0xBA0, 'X'),
+    (0xBA3, 'V'),
+    (0xBA5, 'X'),
+    (0xBA8, 'V'),
+    (0xBAB, 'X'),
+    (0xBAE, 'V'),
+    (0xBBA, 'X'),
+    (0xBBE, 'V'),
+    (0xBC3, 'X'),
+    (0xBC6, 'V'),
+    (0xBC9, 'X'),
+    (0xBCA, 'V'),
+    (0xBCE, 'X'),
+    (0xBD0, 'V'),
+    (0xBD1, 'X'),
+    (0xBD7, 'V'),
+    (0xBD8, 'X'),
+    (0xBE6, 'V'),
+    (0xBFB, 'X'),
+    (0xC00, 'V'),
+    (0xC0D, 'X'),
+    (0xC0E, 'V'),
+    (0xC11, 'X'),
+    (0xC12, 'V'),
+    (0xC29, 'X'),
+    (0xC2A, 'V'),
+    ]
+
+def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xC3A, 'X'),
+    (0xC3C, 'V'),
+    (0xC45, 'X'),
+    (0xC46, 'V'),
+    (0xC49, 'X'),
+    (0xC4A, 'V'),
+    (0xC4E, 'X'),
+    (0xC55, 'V'),
+    (0xC57, 'X'),
+    (0xC58, 'V'),
+    (0xC5B, 'X'),
+    (0xC5D, 'V'),
+    (0xC5E, 'X'),
+    (0xC60, 'V'),
+    (0xC64, 'X'),
+    (0xC66, 'V'),
+    (0xC70, 'X'),
+    (0xC77, 'V'),
+    (0xC8D, 'X'),
+    (0xC8E, 'V'),
+    (0xC91, 'X'),
+    (0xC92, 'V'),
+    (0xCA9, 'X'),
+    (0xCAA, 'V'),
+    (0xCB4, 'X'),
+    (0xCB5, 'V'),
+    (0xCBA, 'X'),
+    (0xCBC, 'V'),
+    (0xCC5, 'X'),
+    (0xCC6, 'V'),
+    (0xCC9, 'X'),
+    (0xCCA, 'V'),
+    (0xCCE, 'X'),
+    (0xCD5, 'V'),
+    (0xCD7, 'X'),
+    (0xCDD, 'V'),
+    (0xCDF, 'X'),
+    (0xCE0, 'V'),
+    (0xCE4, 'X'),
+    (0xCE6, 'V'),
+    (0xCF0, 'X'),
+    (0xCF1, 'V'),
+    (0xCF4, 'X'),
+    (0xD00, 'V'),
+    (0xD0D, 'X'),
+    (0xD0E, 'V'),
+    (0xD11, 'X'),
+    (0xD12, 'V'),
+    (0xD45, 'X'),
+    (0xD46, 'V'),
+    (0xD49, 'X'),
+    (0xD4A, 'V'),
+    (0xD50, 'X'),
+    (0xD54, 'V'),
+    (0xD64, 'X'),
+    (0xD66, 'V'),
+    (0xD80, 'X'),
+    (0xD81, 'V'),
+    (0xD84, 'X'),
+    (0xD85, 'V'),
+    (0xD97, 'X'),
+    (0xD9A, 'V'),
+    (0xDB2, 'X'),
+    (0xDB3, 'V'),
+    (0xDBC, 'X'),
+    (0xDBD, 'V'),
+    (0xDBE, 'X'),
+    (0xDC0, 'V'),
+    (0xDC7, 'X'),
+    (0xDCA, 'V'),
+    (0xDCB, 'X'),
+    (0xDCF, 'V'),
+    (0xDD5, 'X'),
+    (0xDD6, 'V'),
+    (0xDD7, 'X'),
+    (0xDD8, 'V'),
+    (0xDE0, 'X'),
+    (0xDE6, 'V'),
+    (0xDF0, 'X'),
+    (0xDF2, 'V'),
+    (0xDF5, 'X'),
+    (0xE01, 'V'),
+    (0xE33, 'M', 'ํา'),
+    (0xE34, 'V'),
+    (0xE3B, 'X'),
+    (0xE3F, 'V'),
+    (0xE5C, 'X'),
+    (0xE81, 'V'),
+    (0xE83, 'X'),
+    (0xE84, 'V'),
+    (0xE85, 'X'),
+    (0xE86, 'V'),
+    (0xE8B, 'X'),
+    (0xE8C, 'V'),
+    (0xEA4, 'X'),
+    (0xEA5, 'V'),
+    (0xEA6, 'X'),
+    (0xEA7, 'V'),
+    (0xEB3, 'M', 'ໍາ'),
+    (0xEB4, 'V'),
+    ]
+
+def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xEBE, 'X'),
+    (0xEC0, 'V'),
+    (0xEC5, 'X'),
+    (0xEC6, 'V'),
+    (0xEC7, 'X'),
+    (0xEC8, 'V'),
+    (0xECF, 'X'),
+    (0xED0, 'V'),
+    (0xEDA, 'X'),
+    (0xEDC, 'M', 'ຫນ'),
+    (0xEDD, 'M', 'ຫມ'),
+    (0xEDE, 'V'),
+    (0xEE0, 'X'),
+    (0xF00, 'V'),
+    (0xF0C, 'M', '་'),
+    (0xF0D, 'V'),
+    (0xF43, 'M', 'གྷ'),
+    (0xF44, 'V'),
+    (0xF48, 'X'),
+    (0xF49, 'V'),
+    (0xF4D, 'M', 'ཌྷ'),
+    (0xF4E, 'V'),
+    (0xF52, 'M', 'དྷ'),
+    (0xF53, 'V'),
+    (0xF57, 'M', 'བྷ'),
+    (0xF58, 'V'),
+    (0xF5C, 'M', 'ཛྷ'),
+    (0xF5D, 'V'),
+    (0xF69, 'M', 'ཀྵ'),
+    (0xF6A, 'V'),
+    (0xF6D, 'X'),
+    (0xF71, 'V'),
+    (0xF73, 'M', 'ཱི'),
+    (0xF74, 'V'),
+    (0xF75, 'M', 'ཱུ'),
+    (0xF76, 'M', 'ྲྀ'),
+    (0xF77, 'M', 'ྲཱྀ'),
+    (0xF78, 'M', 'ླྀ'),
+    (0xF79, 'M', 'ླཱྀ'),
+    (0xF7A, 'V'),
+    (0xF81, 'M', 'ཱྀ'),
+    (0xF82, 'V'),
+    (0xF93, 'M', 'ྒྷ'),
+    (0xF94, 'V'),
+    (0xF98, 'X'),
+    (0xF99, 'V'),
+    (0xF9D, 'M', 'ྜྷ'),
+    (0xF9E, 'V'),
+    (0xFA2, 'M', 'ྡྷ'),
+    (0xFA3, 'V'),
+    (0xFA7, 'M', 'ྦྷ'),
+    (0xFA8, 'V'),
+    (0xFAC, 'M', 'ྫྷ'),
+    (0xFAD, 'V'),
+    (0xFB9, 'M', 'ྐྵ'),
+    (0xFBA, 'V'),
+    (0xFBD, 'X'),
+    (0xFBE, 'V'),
+    (0xFCD, 'X'),
+    (0xFCE, 'V'),
+    (0xFDB, 'X'),
+    (0x1000, 'V'),
+    (0x10A0, 'X'),
+    (0x10C7, 'M', 'ⴧ'),
+    (0x10C8, 'X'),
+    (0x10CD, 'M', 'ⴭ'),
+    (0x10CE, 'X'),
+    (0x10D0, 'V'),
+    (0x10FC, 'M', 'ნ'),
+    (0x10FD, 'V'),
+    (0x115F, 'X'),
+    (0x1161, 'V'),
+    (0x1249, 'X'),
+    (0x124A, 'V'),
+    (0x124E, 'X'),
+    (0x1250, 'V'),
+    (0x1257, 'X'),
+    (0x1258, 'V'),
+    (0x1259, 'X'),
+    (0x125A, 'V'),
+    (0x125E, 'X'),
+    (0x1260, 'V'),
+    (0x1289, 'X'),
+    (0x128A, 'V'),
+    (0x128E, 'X'),
+    (0x1290, 'V'),
+    (0x12B1, 'X'),
+    (0x12B2, 'V'),
+    (0x12B6, 'X'),
+    (0x12B8, 'V'),
+    (0x12BF, 'X'),
+    (0x12C0, 'V'),
+    (0x12C1, 'X'),
+    (0x12C2, 'V'),
+    (0x12C6, 'X'),
+    (0x12C8, 'V'),
+    (0x12D7, 'X'),
+    (0x12D8, 'V'),
+    (0x1311, 'X'),
+    (0x1312, 'V'),
+    ]
+
+def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1316, 'X'),
+    (0x1318, 'V'),
+    (0x135B, 'X'),
+    (0x135D, 'V'),
+    (0x137D, 'X'),
+    (0x1380, 'V'),
+    (0x139A, 'X'),
+    (0x13A0, 'V'),
+    (0x13F6, 'X'),
+    (0x13F8, 'M', 'Ᏸ'),
+    (0x13F9, 'M', 'Ᏹ'),
+    (0x13FA, 'M', 'Ᏺ'),
+    (0x13FB, 'M', 'Ᏻ'),
+    (0x13FC, 'M', 'Ᏼ'),
+    (0x13FD, 'M', 'Ᏽ'),
+    (0x13FE, 'X'),
+    (0x1400, 'V'),
+    (0x1680, 'X'),
+    (0x1681, 'V'),
+    (0x169D, 'X'),
+    (0x16A0, 'V'),
+    (0x16F9, 'X'),
+    (0x1700, 'V'),
+    (0x1716, 'X'),
+    (0x171F, 'V'),
+    (0x1737, 'X'),
+    (0x1740, 'V'),
+    (0x1754, 'X'),
+    (0x1760, 'V'),
+    (0x176D, 'X'),
+    (0x176E, 'V'),
+    (0x1771, 'X'),
+    (0x1772, 'V'),
+    (0x1774, 'X'),
+    (0x1780, 'V'),
+    (0x17B4, 'X'),
+    (0x17B6, 'V'),
+    (0x17DE, 'X'),
+    (0x17E0, 'V'),
+    (0x17EA, 'X'),
+    (0x17F0, 'V'),
+    (0x17FA, 'X'),
+    (0x1800, 'V'),
+    (0x1806, 'X'),
+    (0x1807, 'V'),
+    (0x180B, 'I'),
+    (0x180E, 'X'),
+    (0x180F, 'I'),
+    (0x1810, 'V'),
+    (0x181A, 'X'),
+    (0x1820, 'V'),
+    (0x1879, 'X'),
+    (0x1880, 'V'),
+    (0x18AB, 'X'),
+    (0x18B0, 'V'),
+    (0x18F6, 'X'),
+    (0x1900, 'V'),
+    (0x191F, 'X'),
+    (0x1920, 'V'),
+    (0x192C, 'X'),
+    (0x1930, 'V'),
+    (0x193C, 'X'),
+    (0x1940, 'V'),
+    (0x1941, 'X'),
+    (0x1944, 'V'),
+    (0x196E, 'X'),
+    (0x1970, 'V'),
+    (0x1975, 'X'),
+    (0x1980, 'V'),
+    (0x19AC, 'X'),
+    (0x19B0, 'V'),
+    (0x19CA, 'X'),
+    (0x19D0, 'V'),
+    (0x19DB, 'X'),
+    (0x19DE, 'V'),
+    (0x1A1C, 'X'),
+    (0x1A1E, 'V'),
+    (0x1A5F, 'X'),
+    (0x1A60, 'V'),
+    (0x1A7D, 'X'),
+    (0x1A7F, 'V'),
+    (0x1A8A, 'X'),
+    (0x1A90, 'V'),
+    (0x1A9A, 'X'),
+    (0x1AA0, 'V'),
+    (0x1AAE, 'X'),
+    (0x1AB0, 'V'),
+    (0x1ACF, 'X'),
+    (0x1B00, 'V'),
+    (0x1B4D, 'X'),
+    (0x1B50, 'V'),
+    (0x1B7F, 'X'),
+    (0x1B80, 'V'),
+    (0x1BF4, 'X'),
+    (0x1BFC, 'V'),
+    (0x1C38, 'X'),
+    (0x1C3B, 'V'),
+    (0x1C4A, 'X'),
+    (0x1C4D, 'V'),
+    (0x1C80, 'M', 'в'),
+    ]
+
+def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1C81, 'M', 'д'),
+    (0x1C82, 'M', 'о'),
+    (0x1C83, 'M', 'с'),
+    (0x1C84, 'M', 'т'),
+    (0x1C86, 'M', 'ъ'),
+    (0x1C87, 'M', 'ѣ'),
+    (0x1C88, 'M', 'ꙋ'),
+    (0x1C89, 'X'),
+    (0x1C90, 'M', 'ა'),
+    (0x1C91, 'M', 'ბ'),
+    (0x1C92, 'M', 'გ'),
+    (0x1C93, 'M', 'დ'),
+    (0x1C94, 'M', 'ე'),
+    (0x1C95, 'M', 'ვ'),
+    (0x1C96, 'M', 'ზ'),
+    (0x1C97, 'M', 'თ'),
+    (0x1C98, 'M', 'ი'),
+    (0x1C99, 'M', 'კ'),
+    (0x1C9A, 'M', 'ლ'),
+    (0x1C9B, 'M', 'მ'),
+    (0x1C9C, 'M', 'ნ'),
+    (0x1C9D, 'M', 'ო'),
+    (0x1C9E, 'M', 'პ'),
+    (0x1C9F, 'M', 'ჟ'),
+    (0x1CA0, 'M', 'რ'),
+    (0x1CA1, 'M', 'ს'),
+    (0x1CA2, 'M', 'ტ'),
+    (0x1CA3, 'M', 'უ'),
+    (0x1CA4, 'M', 'ფ'),
+    (0x1CA5, 'M', 'ქ'),
+    (0x1CA6, 'M', 'ღ'),
+    (0x1CA7, 'M', 'ყ'),
+    (0x1CA8, 'M', 'შ'),
+    (0x1CA9, 'M', 'ჩ'),
+    (0x1CAA, 'M', 'ც'),
+    (0x1CAB, 'M', 'ძ'),
+    (0x1CAC, 'M', 'წ'),
+    (0x1CAD, 'M', 'ჭ'),
+    (0x1CAE, 'M', 'ხ'),
+    (0x1CAF, 'M', 'ჯ'),
+    (0x1CB0, 'M', 'ჰ'),
+    (0x1CB1, 'M', 'ჱ'),
+    (0x1CB2, 'M', 'ჲ'),
+    (0x1CB3, 'M', 'ჳ'),
+    (0x1CB4, 'M', 'ჴ'),
+    (0x1CB5, 'M', 'ჵ'),
+    (0x1CB6, 'M', 'ჶ'),
+    (0x1CB7, 'M', 'ჷ'),
+    (0x1CB8, 'M', 'ჸ'),
+    (0x1CB9, 'M', 'ჹ'),
+    (0x1CBA, 'M', 'ჺ'),
+    (0x1CBB, 'X'),
+    (0x1CBD, 'M', 'ჽ'),
+    (0x1CBE, 'M', 'ჾ'),
+    (0x1CBF, 'M', 'ჿ'),
+    (0x1CC0, 'V'),
+    (0x1CC8, 'X'),
+    (0x1CD0, 'V'),
+    (0x1CFB, 'X'),
+    (0x1D00, 'V'),
+    (0x1D2C, 'M', 'a'),
+    (0x1D2D, 'M', 'æ'),
+    (0x1D2E, 'M', 'b'),
+    (0x1D2F, 'V'),
+    (0x1D30, 'M', 'd'),
+    (0x1D31, 'M', 'e'),
+    (0x1D32, 'M', 'ǝ'),
+    (0x1D33, 'M', 'g'),
+    (0x1D34, 'M', 'h'),
+    (0x1D35, 'M', 'i'),
+    (0x1D36, 'M', 'j'),
+    (0x1D37, 'M', 'k'),
+    (0x1D38, 'M', 'l'),
+    (0x1D39, 'M', 'm'),
+    (0x1D3A, 'M', 'n'),
+    (0x1D3B, 'V'),
+    (0x1D3C, 'M', 'o'),
+    (0x1D3D, 'M', 'ȣ'),
+    (0x1D3E, 'M', 'p'),
+    (0x1D3F, 'M', 'r'),
+    (0x1D40, 'M', 't'),
+    (0x1D41, 'M', 'u'),
+    (0x1D42, 'M', 'w'),
+    (0x1D43, 'M', 'a'),
+    (0x1D44, 'M', 'ɐ'),
+    (0x1D45, 'M', 'ɑ'),
+    (0x1D46, 'M', 'ᴂ'),
+    (0x1D47, 'M', 'b'),
+    (0x1D48, 'M', 'd'),
+    (0x1D49, 'M', 'e'),
+    (0x1D4A, 'M', 'ə'),
+    (0x1D4B, 'M', 'ɛ'),
+    (0x1D4C, 'M', 'ɜ'),
+    (0x1D4D, 'M', 'g'),
+    (0x1D4E, 'V'),
+    (0x1D4F, 'M', 'k'),
+    (0x1D50, 'M', 'm'),
+    (0x1D51, 'M', 'ŋ'),
+    (0x1D52, 'M', 'o'),
+    (0x1D53, 'M', 'ɔ'),
+    ]
+
+def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1D54, 'M', 'ᴖ'),
+    (0x1D55, 'M', 'ᴗ'),
+    (0x1D56, 'M', 'p'),
+    (0x1D57, 'M', 't'),
+    (0x1D58, 'M', 'u'),
+    (0x1D59, 'M', 'ᴝ'),
+    (0x1D5A, 'M', 'ɯ'),
+    (0x1D5B, 'M', 'v'),
+    (0x1D5C, 'M', 'ᴥ'),
+    (0x1D5D, 'M', 'β'),
+    (0x1D5E, 'M', 'γ'),
+    (0x1D5F, 'M', 'δ'),
+    (0x1D60, 'M', 'φ'),
+    (0x1D61, 'M', 'χ'),
+    (0x1D62, 'M', 'i'),
+    (0x1D63, 'M', 'r'),
+    (0x1D64, 'M', 'u'),
+    (0x1D65, 'M', 'v'),
+    (0x1D66, 'M', 'β'),
+    (0x1D67, 'M', 'γ'),
+    (0x1D68, 'M', 'ρ'),
+    (0x1D69, 'M', 'φ'),
+    (0x1D6A, 'M', 'χ'),
+    (0x1D6B, 'V'),
+    (0x1D78, 'M', 'н'),
+    (0x1D79, 'V'),
+    (0x1D9B, 'M', 'ɒ'),
+    (0x1D9C, 'M', 'c'),
+    (0x1D9D, 'M', 'ɕ'),
+    (0x1D9E, 'M', 'ð'),
+    (0x1D9F, 'M', 'ɜ'),
+    (0x1DA0, 'M', 'f'),
+    (0x1DA1, 'M', 'ɟ'),
+    (0x1DA2, 'M', 'ɡ'),
+    (0x1DA3, 'M', 'ɥ'),
+    (0x1DA4, 'M', 'ɨ'),
+    (0x1DA5, 'M', 'ɩ'),
+    (0x1DA6, 'M', 'ɪ'),
+    (0x1DA7, 'M', 'ᵻ'),
+    (0x1DA8, 'M', 'ʝ'),
+    (0x1DA9, 'M', 'ɭ'),
+    (0x1DAA, 'M', 'ᶅ'),
+    (0x1DAB, 'M', 'ʟ'),
+    (0x1DAC, 'M', 'ɱ'),
+    (0x1DAD, 'M', 'ɰ'),
+    (0x1DAE, 'M', 'ɲ'),
+    (0x1DAF, 'M', 'ɳ'),
+    (0x1DB0, 'M', 'ɴ'),
+    (0x1DB1, 'M', 'ɵ'),
+    (0x1DB2, 'M', 'ɸ'),
+    (0x1DB3, 'M', 'ʂ'),
+    (0x1DB4, 'M', 'ʃ'),
+    (0x1DB5, 'M', 'ƫ'),
+    (0x1DB6, 'M', 'ʉ'),
+    (0x1DB7, 'M', 'ʊ'),
+    (0x1DB8, 'M', 'ᴜ'),
+    (0x1DB9, 'M', 'ʋ'),
+    (0x1DBA, 'M', 'ʌ'),
+    (0x1DBB, 'M', 'z'),
+    (0x1DBC, 'M', 'ʐ'),
+    (0x1DBD, 'M', 'ʑ'),
+    (0x1DBE, 'M', 'ʒ'),
+    (0x1DBF, 'M', 'θ'),
+    (0x1DC0, 'V'),
+    (0x1E00, 'M', 'ḁ'),
+    (0x1E01, 'V'),
+    (0x1E02, 'M', 'ḃ'),
+    (0x1E03, 'V'),
+    (0x1E04, 'M', 'ḅ'),
+    (0x1E05, 'V'),
+    (0x1E06, 'M', 'ḇ'),
+    (0x1E07, 'V'),
+    (0x1E08, 'M', 'ḉ'),
+    (0x1E09, 'V'),
+    (0x1E0A, 'M', 'ḋ'),
+    (0x1E0B, 'V'),
+    (0x1E0C, 'M', 'ḍ'),
+    (0x1E0D, 'V'),
+    (0x1E0E, 'M', 'ḏ'),
+    (0x1E0F, 'V'),
+    (0x1E10, 'M', 'ḑ'),
+    (0x1E11, 'V'),
+    (0x1E12, 'M', 'ḓ'),
+    (0x1E13, 'V'),
+    (0x1E14, 'M', 'ḕ'),
+    (0x1E15, 'V'),
+    (0x1E16, 'M', 'ḗ'),
+    (0x1E17, 'V'),
+    (0x1E18, 'M', 'ḙ'),
+    (0x1E19, 'V'),
+    (0x1E1A, 'M', 'ḛ'),
+    (0x1E1B, 'V'),
+    (0x1E1C, 'M', 'ḝ'),
+    (0x1E1D, 'V'),
+    (0x1E1E, 'M', 'ḟ'),
+    (0x1E1F, 'V'),
+    (0x1E20, 'M', 'ḡ'),
+    (0x1E21, 'V'),
+    (0x1E22, 'M', 'ḣ'),
+    (0x1E23, 'V'),
+    ]
+
+def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1E24, 'M', 'ḥ'),
+    (0x1E25, 'V'),
+    (0x1E26, 'M', 'ḧ'),
+    (0x1E27, 'V'),
+    (0x1E28, 'M', 'ḩ'),
+    (0x1E29, 'V'),
+    (0x1E2A, 'M', 'ḫ'),
+    (0x1E2B, 'V'),
+    (0x1E2C, 'M', 'ḭ'),
+    (0x1E2D, 'V'),
+    (0x1E2E, 'M', 'ḯ'),
+    (0x1E2F, 'V'),
+    (0x1E30, 'M', 'ḱ'),
+    (0x1E31, 'V'),
+    (0x1E32, 'M', 'ḳ'),
+    (0x1E33, 'V'),
+    (0x1E34, 'M', 'ḵ'),
+    (0x1E35, 'V'),
+    (0x1E36, 'M', 'ḷ'),
+    (0x1E37, 'V'),
+    (0x1E38, 'M', 'ḹ'),
+    (0x1E39, 'V'),
+    (0x1E3A, 'M', 'ḻ'),
+    (0x1E3B, 'V'),
+    (0x1E3C, 'M', 'ḽ'),
+    (0x1E3D, 'V'),
+    (0x1E3E, 'M', 'ḿ'),
+    (0x1E3F, 'V'),
+    (0x1E40, 'M', 'ṁ'),
+    (0x1E41, 'V'),
+    (0x1E42, 'M', 'ṃ'),
+    (0x1E43, 'V'),
+    (0x1E44, 'M', 'ṅ'),
+    (0x1E45, 'V'),
+    (0x1E46, 'M', 'ṇ'),
+    (0x1E47, 'V'),
+    (0x1E48, 'M', 'ṉ'),
+    (0x1E49, 'V'),
+    (0x1E4A, 'M', 'ṋ'),
+    (0x1E4B, 'V'),
+    (0x1E4C, 'M', 'ṍ'),
+    (0x1E4D, 'V'),
+    (0x1E4E, 'M', 'ṏ'),
+    (0x1E4F, 'V'),
+    (0x1E50, 'M', 'ṑ'),
+    (0x1E51, 'V'),
+    (0x1E52, 'M', 'ṓ'),
+    (0x1E53, 'V'),
+    (0x1E54, 'M', 'ṕ'),
+    (0x1E55, 'V'),
+    (0x1E56, 'M', 'ṗ'),
+    (0x1E57, 'V'),
+    (0x1E58, 'M', 'ṙ'),
+    (0x1E59, 'V'),
+    (0x1E5A, 'M', 'ṛ'),
+    (0x1E5B, 'V'),
+    (0x1E5C, 'M', 'ṝ'),
+    (0x1E5D, 'V'),
+    (0x1E5E, 'M', 'ṟ'),
+    (0x1E5F, 'V'),
+    (0x1E60, 'M', 'ṡ'),
+    (0x1E61, 'V'),
+    (0x1E62, 'M', 'ṣ'),
+    (0x1E63, 'V'),
+    (0x1E64, 'M', 'ṥ'),
+    (0x1E65, 'V'),
+    (0x1E66, 'M', 'ṧ'),
+    (0x1E67, 'V'),
+    (0x1E68, 'M', 'ṩ'),
+    (0x1E69, 'V'),
+    (0x1E6A, 'M', 'ṫ'),
+    (0x1E6B, 'V'),
+    (0x1E6C, 'M', 'ṭ'),
+    (0x1E6D, 'V'),
+    (0x1E6E, 'M', 'ṯ'),
+    (0x1E6F, 'V'),
+    (0x1E70, 'M', 'ṱ'),
+    (0x1E71, 'V'),
+    (0x1E72, 'M', 'ṳ'),
+    (0x1E73, 'V'),
+    (0x1E74, 'M', 'ṵ'),
+    (0x1E75, 'V'),
+    (0x1E76, 'M', 'ṷ'),
+    (0x1E77, 'V'),
+    (0x1E78, 'M', 'ṹ'),
+    (0x1E79, 'V'),
+    (0x1E7A, 'M', 'ṻ'),
+    (0x1E7B, 'V'),
+    (0x1E7C, 'M', 'ṽ'),
+    (0x1E7D, 'V'),
+    (0x1E7E, 'M', 'ṿ'),
+    (0x1E7F, 'V'),
+    (0x1E80, 'M', 'ẁ'),
+    (0x1E81, 'V'),
+    (0x1E82, 'M', 'ẃ'),
+    (0x1E83, 'V'),
+    (0x1E84, 'M', 'ẅ'),
+    (0x1E85, 'V'),
+    (0x1E86, 'M', 'ẇ'),
+    (0x1E87, 'V'),
+    ]
+
+def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1E88, 'M', 'ẉ'),
+    (0x1E89, 'V'),
+    (0x1E8A, 'M', 'ẋ'),
+    (0x1E8B, 'V'),
+    (0x1E8C, 'M', 'ẍ'),
+    (0x1E8D, 'V'),
+    (0x1E8E, 'M', 'ẏ'),
+    (0x1E8F, 'V'),
+    (0x1E90, 'M', 'ẑ'),
+    (0x1E91, 'V'),
+    (0x1E92, 'M', 'ẓ'),
+    (0x1E93, 'V'),
+    (0x1E94, 'M', 'ẕ'),
+    (0x1E95, 'V'),
+    (0x1E9A, 'M', 'aʾ'),
+    (0x1E9B, 'M', 'ṡ'),
+    (0x1E9C, 'V'),
+    (0x1E9E, 'M', 'ss'),
+    (0x1E9F, 'V'),
+    (0x1EA0, 'M', 'ạ'),
+    (0x1EA1, 'V'),
+    (0x1EA2, 'M', 'ả'),
+    (0x1EA3, 'V'),
+    (0x1EA4, 'M', 'ấ'),
+    (0x1EA5, 'V'),
+    (0x1EA6, 'M', 'ầ'),
+    (0x1EA7, 'V'),
+    (0x1EA8, 'M', 'ẩ'),
+    (0x1EA9, 'V'),
+    (0x1EAA, 'M', 'ẫ'),
+    (0x1EAB, 'V'),
+    (0x1EAC, 'M', 'ậ'),
+    (0x1EAD, 'V'),
+    (0x1EAE, 'M', 'ắ'),
+    (0x1EAF, 'V'),
+    (0x1EB0, 'M', 'ằ'),
+    (0x1EB1, 'V'),
+    (0x1EB2, 'M', 'ẳ'),
+    (0x1EB3, 'V'),
+    (0x1EB4, 'M', 'ẵ'),
+    (0x1EB5, 'V'),
+    (0x1EB6, 'M', 'ặ'),
+    (0x1EB7, 'V'),
+    (0x1EB8, 'M', 'ẹ'),
+    (0x1EB9, 'V'),
+    (0x1EBA, 'M', 'ẻ'),
+    (0x1EBB, 'V'),
+    (0x1EBC, 'M', 'ẽ'),
+    (0x1EBD, 'V'),
+    (0x1EBE, 'M', 'ế'),
+    (0x1EBF, 'V'),
+    (0x1EC0, 'M', 'ề'),
+    (0x1EC1, 'V'),
+    (0x1EC2, 'M', 'ể'),
+    (0x1EC3, 'V'),
+    (0x1EC4, 'M', 'ễ'),
+    (0x1EC5, 'V'),
+    (0x1EC6, 'M', 'ệ'),
+    (0x1EC7, 'V'),
+    (0x1EC8, 'M', 'ỉ'),
+    (0x1EC9, 'V'),
+    (0x1ECA, 'M', 'ị'),
+    (0x1ECB, 'V'),
+    (0x1ECC, 'M', 'ọ'),
+    (0x1ECD, 'V'),
+    (0x1ECE, 'M', 'ỏ'),
+    (0x1ECF, 'V'),
+    (0x1ED0, 'M', 'ố'),
+    (0x1ED1, 'V'),
+    (0x1ED2, 'M', 'ồ'),
+    (0x1ED3, 'V'),
+    (0x1ED4, 'M', 'ổ'),
+    (0x1ED5, 'V'),
+    (0x1ED6, 'M', 'ỗ'),
+    (0x1ED7, 'V'),
+    (0x1ED8, 'M', 'ộ'),
+    (0x1ED9, 'V'),
+    (0x1EDA, 'M', 'ớ'),
+    (0x1EDB, 'V'),
+    (0x1EDC, 'M', 'ờ'),
+    (0x1EDD, 'V'),
+    (0x1EDE, 'M', 'ở'),
+    (0x1EDF, 'V'),
+    (0x1EE0, 'M', 'ỡ'),
+    (0x1EE1, 'V'),
+    (0x1EE2, 'M', 'ợ'),
+    (0x1EE3, 'V'),
+    (0x1EE4, 'M', 'ụ'),
+    (0x1EE5, 'V'),
+    (0x1EE6, 'M', 'ủ'),
+    (0x1EE7, 'V'),
+    (0x1EE8, 'M', 'ứ'),
+    (0x1EE9, 'V'),
+    (0x1EEA, 'M', 'ừ'),
+    (0x1EEB, 'V'),
+    (0x1EEC, 'M', 'ử'),
+    (0x1EED, 'V'),
+    (0x1EEE, 'M', 'ữ'),
+    (0x1EEF, 'V'),
+    (0x1EF0, 'M', 'ự'),
+    ]
+
+def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1EF1, 'V'),
+    (0x1EF2, 'M', 'ỳ'),
+    (0x1EF3, 'V'),
+    (0x1EF4, 'M', 'ỵ'),
+    (0x1EF5, 'V'),
+    (0x1EF6, 'M', 'ỷ'),
+    (0x1EF7, 'V'),
+    (0x1EF8, 'M', 'ỹ'),
+    (0x1EF9, 'V'),
+    (0x1EFA, 'M', 'ỻ'),
+    (0x1EFB, 'V'),
+    (0x1EFC, 'M', 'ỽ'),
+    (0x1EFD, 'V'),
+    (0x1EFE, 'M', 'ỿ'),
+    (0x1EFF, 'V'),
+    (0x1F08, 'M', 'ἀ'),
+    (0x1F09, 'M', 'ἁ'),
+    (0x1F0A, 'M', 'ἂ'),
+    (0x1F0B, 'M', 'ἃ'),
+    (0x1F0C, 'M', 'ἄ'),
+    (0x1F0D, 'M', 'ἅ'),
+    (0x1F0E, 'M', 'ἆ'),
+    (0x1F0F, 'M', 'ἇ'),
+    (0x1F10, 'V'),
+    (0x1F16, 'X'),
+    (0x1F18, 'M', 'ἐ'),
+    (0x1F19, 'M', 'ἑ'),
+    (0x1F1A, 'M', 'ἒ'),
+    (0x1F1B, 'M', 'ἓ'),
+    (0x1F1C, 'M', 'ἔ'),
+    (0x1F1D, 'M', 'ἕ'),
+    (0x1F1E, 'X'),
+    (0x1F20, 'V'),
+    (0x1F28, 'M', 'ἠ'),
+    (0x1F29, 'M', 'ἡ'),
+    (0x1F2A, 'M', 'ἢ'),
+    (0x1F2B, 'M', 'ἣ'),
+    (0x1F2C, 'M', 'ἤ'),
+    (0x1F2D, 'M', 'ἥ'),
+    (0x1F2E, 'M', 'ἦ'),
+    (0x1F2F, 'M', 'ἧ'),
+    (0x1F30, 'V'),
+    (0x1F38, 'M', 'ἰ'),
+    (0x1F39, 'M', 'ἱ'),
+    (0x1F3A, 'M', 'ἲ'),
+    (0x1F3B, 'M', 'ἳ'),
+    (0x1F3C, 'M', 'ἴ'),
+    (0x1F3D, 'M', 'ἵ'),
+    (0x1F3E, 'M', 'ἶ'),
+    (0x1F3F, 'M', 'ἷ'),
+    (0x1F40, 'V'),
+    (0x1F46, 'X'),
+    (0x1F48, 'M', 'ὀ'),
+    (0x1F49, 'M', 'ὁ'),
+    (0x1F4A, 'M', 'ὂ'),
+    (0x1F4B, 'M', 'ὃ'),
+    (0x1F4C, 'M', 'ὄ'),
+    (0x1F4D, 'M', 'ὅ'),
+    (0x1F4E, 'X'),
+    (0x1F50, 'V'),
+    (0x1F58, 'X'),
+    (0x1F59, 'M', 'ὑ'),
+    (0x1F5A, 'X'),
+    (0x1F5B, 'M', 'ὓ'),
+    (0x1F5C, 'X'),
+    (0x1F5D, 'M', 'ὕ'),
+    (0x1F5E, 'X'),
+    (0x1F5F, 'M', 'ὗ'),
+    (0x1F60, 'V'),
+    (0x1F68, 'M', 'ὠ'),
+    (0x1F69, 'M', 'ὡ'),
+    (0x1F6A, 'M', 'ὢ'),
+    (0x1F6B, 'M', 'ὣ'),
+    (0x1F6C, 'M', 'ὤ'),
+    (0x1F6D, 'M', 'ὥ'),
+    (0x1F6E, 'M', 'ὦ'),
+    (0x1F6F, 'M', 'ὧ'),
+    (0x1F70, 'V'),
+    (0x1F71, 'M', 'ά'),
+    (0x1F72, 'V'),
+    (0x1F73, 'M', 'έ'),
+    (0x1F74, 'V'),
+    (0x1F75, 'M', 'ή'),
+    (0x1F76, 'V'),
+    (0x1F77, 'M', 'ί'),
+    (0x1F78, 'V'),
+    (0x1F79, 'M', 'ό'),
+    (0x1F7A, 'V'),
+    (0x1F7B, 'M', 'ύ'),
+    (0x1F7C, 'V'),
+    (0x1F7D, 'M', 'ώ'),
+    (0x1F7E, 'X'),
+    (0x1F80, 'M', 'ἀι'),
+    (0x1F81, 'M', 'ἁι'),
+    (0x1F82, 'M', 'ἂι'),
+    (0x1F83, 'M', 'ἃι'),
+    (0x1F84, 'M', 'ἄι'),
+    (0x1F85, 'M', 'ἅι'),
+    (0x1F86, 'M', 'ἆι'),
+    (0x1F87, 'M', 'ἇι'),
+    ]
+
+def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1F88, 'M', 'ἀι'),
+    (0x1F89, 'M', 'ἁι'),
+    (0x1F8A, 'M', 'ἂι'),
+    (0x1F8B, 'M', 'ἃι'),
+    (0x1F8C, 'M', 'ἄι'),
+    (0x1F8D, 'M', 'ἅι'),
+    (0x1F8E, 'M', 'ἆι'),
+    (0x1F8F, 'M', 'ἇι'),
+    (0x1F90, 'M', 'ἠι'),
+    (0x1F91, 'M', 'ἡι'),
+    (0x1F92, 'M', 'ἢι'),
+    (0x1F93, 'M', 'ἣι'),
+    (0x1F94, 'M', 'ἤι'),
+    (0x1F95, 'M', 'ἥι'),
+    (0x1F96, 'M', 'ἦι'),
+    (0x1F97, 'M', 'ἧι'),
+    (0x1F98, 'M', 'ἠι'),
+    (0x1F99, 'M', 'ἡι'),
+    (0x1F9A, 'M', 'ἢι'),
+    (0x1F9B, 'M', 'ἣι'),
+    (0x1F9C, 'M', 'ἤι'),
+    (0x1F9D, 'M', 'ἥι'),
+    (0x1F9E, 'M', 'ἦι'),
+    (0x1F9F, 'M', 'ἧι'),
+    (0x1FA0, 'M', 'ὠι'),
+    (0x1FA1, 'M', 'ὡι'),
+    (0x1FA2, 'M', 'ὢι'),
+    (0x1FA3, 'M', 'ὣι'),
+    (0x1FA4, 'M', 'ὤι'),
+    (0x1FA5, 'M', 'ὥι'),
+    (0x1FA6, 'M', 'ὦι'),
+    (0x1FA7, 'M', 'ὧι'),
+    (0x1FA8, 'M', 'ὠι'),
+    (0x1FA9, 'M', 'ὡι'),
+    (0x1FAA, 'M', 'ὢι'),
+    (0x1FAB, 'M', 'ὣι'),
+    (0x1FAC, 'M', 'ὤι'),
+    (0x1FAD, 'M', 'ὥι'),
+    (0x1FAE, 'M', 'ὦι'),
+    (0x1FAF, 'M', 'ὧι'),
+    (0x1FB0, 'V'),
+    (0x1FB2, 'M', 'ὰι'),
+    (0x1FB3, 'M', 'αι'),
+    (0x1FB4, 'M', 'άι'),
+    (0x1FB5, 'X'),
+    (0x1FB6, 'V'),
+    (0x1FB7, 'M', 'ᾶι'),
+    (0x1FB8, 'M', 'ᾰ'),
+    (0x1FB9, 'M', 'ᾱ'),
+    (0x1FBA, 'M', 'ὰ'),
+    (0x1FBB, 'M', 'ά'),
+    (0x1FBC, 'M', 'αι'),
+    (0x1FBD, '3', ' ̓'),
+    (0x1FBE, 'M', 'ι'),
+    (0x1FBF, '3', ' ̓'),
+    (0x1FC0, '3', ' ͂'),
+    (0x1FC1, '3', ' ̈͂'),
+    (0x1FC2, 'M', 'ὴι'),
+    (0x1FC3, 'M', 'ηι'),
+    (0x1FC4, 'M', 'ήι'),
+    (0x1FC5, 'X'),
+    (0x1FC6, 'V'),
+    (0x1FC7, 'M', 'ῆι'),
+    (0x1FC8, 'M', 'ὲ'),
+    (0x1FC9, 'M', 'έ'),
+    (0x1FCA, 'M', 'ὴ'),
+    (0x1FCB, 'M', 'ή'),
+    (0x1FCC, 'M', 'ηι'),
+    (0x1FCD, '3', ' ̓̀'),
+    (0x1FCE, '3', ' ̓́'),
+    (0x1FCF, '3', ' ̓͂'),
+    (0x1FD0, 'V'),
+    (0x1FD3, 'M', 'ΐ'),
+    (0x1FD4, 'X'),
+    (0x1FD6, 'V'),
+    (0x1FD8, 'M', 'ῐ'),
+    (0x1FD9, 'M', 'ῑ'),
+    (0x1FDA, 'M', 'ὶ'),
+    (0x1FDB, 'M', 'ί'),
+    (0x1FDC, 'X'),
+    (0x1FDD, '3', ' ̔̀'),
+    (0x1FDE, '3', ' ̔́'),
+    (0x1FDF, '3', ' ̔͂'),
+    (0x1FE0, 'V'),
+    (0x1FE3, 'M', 'ΰ'),
+    (0x1FE4, 'V'),
+    (0x1FE8, 'M', 'ῠ'),
+    (0x1FE9, 'M', 'ῡ'),
+    (0x1FEA, 'M', 'ὺ'),
+    (0x1FEB, 'M', 'ύ'),
+    (0x1FEC, 'M', 'ῥ'),
+    (0x1FED, '3', ' ̈̀'),
+    (0x1FEE, '3', ' ̈́'),
+    (0x1FEF, '3', '`'),
+    (0x1FF0, 'X'),
+    (0x1FF2, 'M', 'ὼι'),
+    (0x1FF3, 'M', 'ωι'),
+    (0x1FF4, 'M', 'ώι'),
+    (0x1FF5, 'X'),
+    (0x1FF6, 'V'),
+    ]
+
+def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1FF7, 'M', 'ῶι'),
+    (0x1FF8, 'M', 'ὸ'),
+    (0x1FF9, 'M', 'ό'),
+    (0x1FFA, 'M', 'ὼ'),
+    (0x1FFB, 'M', 'ώ'),
+    (0x1FFC, 'M', 'ωι'),
+    (0x1FFD, '3', ' ́'),
+    (0x1FFE, '3', ' ̔'),
+    (0x1FFF, 'X'),
+    (0x2000, '3', ' '),
+    (0x200B, 'I'),
+    (0x200C, 'D', ''),
+    (0x200E, 'X'),
+    (0x2010, 'V'),
+    (0x2011, 'M', '‐'),
+    (0x2012, 'V'),
+    (0x2017, '3', ' ̳'),
+    (0x2018, 'V'),
+    (0x2024, 'X'),
+    (0x2027, 'V'),
+    (0x2028, 'X'),
+    (0x202F, '3', ' '),
+    (0x2030, 'V'),
+    (0x2033, 'M', '′′'),
+    (0x2034, 'M', '′′′'),
+    (0x2035, 'V'),
+    (0x2036, 'M', '‵‵'),
+    (0x2037, 'M', '‵‵‵'),
+    (0x2038, 'V'),
+    (0x203C, '3', '!!'),
+    (0x203D, 'V'),
+    (0x203E, '3', ' ̅'),
+    (0x203F, 'V'),
+    (0x2047, '3', '??'),
+    (0x2048, '3', '?!'),
+    (0x2049, '3', '!?'),
+    (0x204A, 'V'),
+    (0x2057, 'M', '′′′′'),
+    (0x2058, 'V'),
+    (0x205F, '3', ' '),
+    (0x2060, 'I'),
+    (0x2061, 'X'),
+    (0x2064, 'I'),
+    (0x2065, 'X'),
+    (0x2070, 'M', '0'),
+    (0x2071, 'M', 'i'),
+    (0x2072, 'X'),
+    (0x2074, 'M', '4'),
+    (0x2075, 'M', '5'),
+    (0x2076, 'M', '6'),
+    (0x2077, 'M', '7'),
+    (0x2078, 'M', '8'),
+    (0x2079, 'M', '9'),
+    (0x207A, '3', '+'),
+    (0x207B, 'M', '−'),
+    (0x207C, '3', '='),
+    (0x207D, '3', '('),
+    (0x207E, '3', ')'),
+    (0x207F, 'M', 'n'),
+    (0x2080, 'M', '0'),
+    (0x2081, 'M', '1'),
+    (0x2082, 'M', '2'),
+    (0x2083, 'M', '3'),
+    (0x2084, 'M', '4'),
+    (0x2085, 'M', '5'),
+    (0x2086, 'M', '6'),
+    (0x2087, 'M', '7'),
+    (0x2088, 'M', '8'),
+    (0x2089, 'M', '9'),
+    (0x208A, '3', '+'),
+    (0x208B, 'M', '−'),
+    (0x208C, '3', '='),
+    (0x208D, '3', '('),
+    (0x208E, '3', ')'),
+    (0x208F, 'X'),
+    (0x2090, 'M', 'a'),
+    (0x2091, 'M', 'e'),
+    (0x2092, 'M', 'o'),
+    (0x2093, 'M', 'x'),
+    (0x2094, 'M', 'ə'),
+    (0x2095, 'M', 'h'),
+    (0x2096, 'M', 'k'),
+    (0x2097, 'M', 'l'),
+    (0x2098, 'M', 'm'),
+    (0x2099, 'M', 'n'),
+    (0x209A, 'M', 'p'),
+    (0x209B, 'M', 's'),
+    (0x209C, 'M', 't'),
+    (0x209D, 'X'),
+    (0x20A0, 'V'),
+    (0x20A8, 'M', 'rs'),
+    (0x20A9, 'V'),
+    (0x20C1, 'X'),
+    (0x20D0, 'V'),
+    (0x20F1, 'X'),
+    (0x2100, '3', 'a/c'),
+    (0x2101, '3', 'a/s'),
+    (0x2102, 'M', 'c'),
+    (0x2103, 'M', '°c'),
+    (0x2104, 'V'),
+    ]
+
+def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x2105, '3', 'c/o'),
+    (0x2106, '3', 'c/u'),
+    (0x2107, 'M', 'ɛ'),
+    (0x2108, 'V'),
+    (0x2109, 'M', '°f'),
+    (0x210A, 'M', 'g'),
+    (0x210B, 'M', 'h'),
+    (0x210F, 'M', 'ħ'),
+    (0x2110, 'M', 'i'),
+    (0x2112, 'M', 'l'),
+    (0x2114, 'V'),
+    (0x2115, 'M', 'n'),
+    (0x2116, 'M', 'no'),
+    (0x2117, 'V'),
+    (0x2119, 'M', 'p'),
+    (0x211A, 'M', 'q'),
+    (0x211B, 'M', 'r'),
+    (0x211E, 'V'),
+    (0x2120, 'M', 'sm'),
+    (0x2121, 'M', 'tel'),
+    (0x2122, 'M', 'tm'),
+    (0x2123, 'V'),
+    (0x2124, 'M', 'z'),
+    (0x2125, 'V'),
+    (0x2126, 'M', 'ω'),
+    (0x2127, 'V'),
+    (0x2128, 'M', 'z'),
+    (0x2129, 'V'),
+    (0x212A, 'M', 'k'),
+    (0x212B, 'M', 'å'),
+    (0x212C, 'M', 'b'),
+    (0x212D, 'M', 'c'),
+    (0x212E, 'V'),
+    (0x212F, 'M', 'e'),
+    (0x2131, 'M', 'f'),
+    (0x2132, 'X'),
+    (0x2133, 'M', 'm'),
+    (0x2134, 'M', 'o'),
+    (0x2135, 'M', 'א'),
+    (0x2136, 'M', 'ב'),
+    (0x2137, 'M', 'ג'),
+    (0x2138, 'M', 'ד'),
+    (0x2139, 'M', 'i'),
+    (0x213A, 'V'),
+    (0x213B, 'M', 'fax'),
+    (0x213C, 'M', 'π'),
+    (0x213D, 'M', 'γ'),
+    (0x213F, 'M', 'π'),
+    (0x2140, 'M', '∑'),
+    (0x2141, 'V'),
+    (0x2145, 'M', 'd'),
+    (0x2147, 'M', 'e'),
+    (0x2148, 'M', 'i'),
+    (0x2149, 'M', 'j'),
+    (0x214A, 'V'),
+    (0x2150, 'M', '1⁄7'),
+    (0x2151, 'M', '1⁄9'),
+    (0x2152, 'M', '1⁄10'),
+    (0x2153, 'M', '1⁄3'),
+    (0x2154, 'M', '2⁄3'),
+    (0x2155, 'M', '1⁄5'),
+    (0x2156, 'M', '2⁄5'),
+    (0x2157, 'M', '3⁄5'),
+    (0x2158, 'M', '4⁄5'),
+    (0x2159, 'M', '1⁄6'),
+    (0x215A, 'M', '5⁄6'),
+    (0x215B, 'M', '1⁄8'),
+    (0x215C, 'M', '3⁄8'),
+    (0x215D, 'M', '5⁄8'),
+    (0x215E, 'M', '7⁄8'),
+    (0x215F, 'M', '1⁄'),
+    (0x2160, 'M', 'i'),
+    (0x2161, 'M', 'ii'),
+    (0x2162, 'M', 'iii'),
+    (0x2163, 'M', 'iv'),
+    (0x2164, 'M', 'v'),
+    (0x2165, 'M', 'vi'),
+    (0x2166, 'M', 'vii'),
+    (0x2167, 'M', 'viii'),
+    (0x2168, 'M', 'ix'),
+    (0x2169, 'M', 'x'),
+    (0x216A, 'M', 'xi'),
+    (0x216B, 'M', 'xii'),
+    (0x216C, 'M', 'l'),
+    (0x216D, 'M', 'c'),
+    (0x216E, 'M', 'd'),
+    (0x216F, 'M', 'm'),
+    (0x2170, 'M', 'i'),
+    (0x2171, 'M', 'ii'),
+    (0x2172, 'M', 'iii'),
+    (0x2173, 'M', 'iv'),
+    (0x2174, 'M', 'v'),
+    (0x2175, 'M', 'vi'),
+    (0x2176, 'M', 'vii'),
+    (0x2177, 'M', 'viii'),
+    (0x2178, 'M', 'ix'),
+    (0x2179, 'M', 'x'),
+    (0x217A, 'M', 'xi'),
+    (0x217B, 'M', 'xii'),
+    (0x217C, 'M', 'l'),
+    ]
+
+def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x217D, 'M', 'c'),
+    (0x217E, 'M', 'd'),
+    (0x217F, 'M', 'm'),
+    (0x2180, 'V'),
+    (0x2183, 'X'),
+    (0x2184, 'V'),
+    (0x2189, 'M', '0⁄3'),
+    (0x218A, 'V'),
+    (0x218C, 'X'),
+    (0x2190, 'V'),
+    (0x222C, 'M', '∫∫'),
+    (0x222D, 'M', '∫∫∫'),
+    (0x222E, 'V'),
+    (0x222F, 'M', '∮∮'),
+    (0x2230, 'M', '∮∮∮'),
+    (0x2231, 'V'),
+    (0x2260, '3'),
+    (0x2261, 'V'),
+    (0x226E, '3'),
+    (0x2270, 'V'),
+    (0x2329, 'M', '〈'),
+    (0x232A, 'M', '〉'),
+    (0x232B, 'V'),
+    (0x2427, 'X'),
+    (0x2440, 'V'),
+    (0x244B, 'X'),
+    (0x2460, 'M', '1'),
+    (0x2461, 'M', '2'),
+    (0x2462, 'M', '3'),
+    (0x2463, 'M', '4'),
+    (0x2464, 'M', '5'),
+    (0x2465, 'M', '6'),
+    (0x2466, 'M', '7'),
+    (0x2467, 'M', '8'),
+    (0x2468, 'M', '9'),
+    (0x2469, 'M', '10'),
+    (0x246A, 'M', '11'),
+    (0x246B, 'M', '12'),
+    (0x246C, 'M', '13'),
+    (0x246D, 'M', '14'),
+    (0x246E, 'M', '15'),
+    (0x246F, 'M', '16'),
+    (0x2470, 'M', '17'),
+    (0x2471, 'M', '18'),
+    (0x2472, 'M', '19'),
+    (0x2473, 'M', '20'),
+    (0x2474, '3', '(1)'),
+    (0x2475, '3', '(2)'),
+    (0x2476, '3', '(3)'),
+    (0x2477, '3', '(4)'),
+    (0x2478, '3', '(5)'),
+    (0x2479, '3', '(6)'),
+    (0x247A, '3', '(7)'),
+    (0x247B, '3', '(8)'),
+    (0x247C, '3', '(9)'),
+    (0x247D, '3', '(10)'),
+    (0x247E, '3', '(11)'),
+    (0x247F, '3', '(12)'),
+    (0x2480, '3', '(13)'),
+    (0x2481, '3', '(14)'),
+    (0x2482, '3', '(15)'),
+    (0x2483, '3', '(16)'),
+    (0x2484, '3', '(17)'),
+    (0x2485, '3', '(18)'),
+    (0x2486, '3', '(19)'),
+    (0x2487, '3', '(20)'),
+    (0x2488, 'X'),
+    (0x249C, '3', '(a)'),
+    (0x249D, '3', '(b)'),
+    (0x249E, '3', '(c)'),
+    (0x249F, '3', '(d)'),
+    (0x24A0, '3', '(e)'),
+    (0x24A1, '3', '(f)'),
+    (0x24A2, '3', '(g)'),
+    (0x24A3, '3', '(h)'),
+    (0x24A4, '3', '(i)'),
+    (0x24A5, '3', '(j)'),
+    (0x24A6, '3', '(k)'),
+    (0x24A7, '3', '(l)'),
+    (0x24A8, '3', '(m)'),
+    (0x24A9, '3', '(n)'),
+    (0x24AA, '3', '(o)'),
+    (0x24AB, '3', '(p)'),
+    (0x24AC, '3', '(q)'),
+    (0x24AD, '3', '(r)'),
+    (0x24AE, '3', '(s)'),
+    (0x24AF, '3', '(t)'),
+    (0x24B0, '3', '(u)'),
+    (0x24B1, '3', '(v)'),
+    (0x24B2, '3', '(w)'),
+    (0x24B3, '3', '(x)'),
+    (0x24B4, '3', '(y)'),
+    (0x24B5, '3', '(z)'),
+    (0x24B6, 'M', 'a'),
+    (0x24B7, 'M', 'b'),
+    (0x24B8, 'M', 'c'),
+    (0x24B9, 'M', 'd'),
+    (0x24BA, 'M', 'e'),
+    (0x24BB, 'M', 'f'),
+    (0x24BC, 'M', 'g'),
+    ]
+
+def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x24BD, 'M', 'h'),
+    (0x24BE, 'M', 'i'),
+    (0x24BF, 'M', 'j'),
+    (0x24C0, 'M', 'k'),
+    (0x24C1, 'M', 'l'),
+    (0x24C2, 'M', 'm'),
+    (0x24C3, 'M', 'n'),
+    (0x24C4, 'M', 'o'),
+    (0x24C5, 'M', 'p'),
+    (0x24C6, 'M', 'q'),
+    (0x24C7, 'M', 'r'),
+    (0x24C8, 'M', 's'),
+    (0x24C9, 'M', 't'),
+    (0x24CA, 'M', 'u'),
+    (0x24CB, 'M', 'v'),
+    (0x24CC, 'M', 'w'),
+    (0x24CD, 'M', 'x'),
+    (0x24CE, 'M', 'y'),
+    (0x24CF, 'M', 'z'),
+    (0x24D0, 'M', 'a'),
+    (0x24D1, 'M', 'b'),
+    (0x24D2, 'M', 'c'),
+    (0x24D3, 'M', 'd'),
+    (0x24D4, 'M', 'e'),
+    (0x24D5, 'M', 'f'),
+    (0x24D6, 'M', 'g'),
+    (0x24D7, 'M', 'h'),
+    (0x24D8, 'M', 'i'),
+    (0x24D9, 'M', 'j'),
+    (0x24DA, 'M', 'k'),
+    (0x24DB, 'M', 'l'),
+    (0x24DC, 'M', 'm'),
+    (0x24DD, 'M', 'n'),
+    (0x24DE, 'M', 'o'),
+    (0x24DF, 'M', 'p'),
+    (0x24E0, 'M', 'q'),
+    (0x24E1, 'M', 'r'),
+    (0x24E2, 'M', 's'),
+    (0x24E3, 'M', 't'),
+    (0x24E4, 'M', 'u'),
+    (0x24E5, 'M', 'v'),
+    (0x24E6, 'M', 'w'),
+    (0x24E7, 'M', 'x'),
+    (0x24E8, 'M', 'y'),
+    (0x24E9, 'M', 'z'),
+    (0x24EA, 'M', '0'),
+    (0x24EB, 'V'),
+    (0x2A0C, 'M', '∫∫∫∫'),
+    (0x2A0D, 'V'),
+    (0x2A74, '3', '::='),
+    (0x2A75, '3', '=='),
+    (0x2A76, '3', '==='),
+    (0x2A77, 'V'),
+    (0x2ADC, 'M', '⫝̸'),
+    (0x2ADD, 'V'),
+    (0x2B74, 'X'),
+    (0x2B76, 'V'),
+    (0x2B96, 'X'),
+    (0x2B97, 'V'),
+    (0x2C00, 'M', 'ⰰ'),
+    (0x2C01, 'M', 'ⰱ'),
+    (0x2C02, 'M', 'ⰲ'),
+    (0x2C03, 'M', 'ⰳ'),
+    (0x2C04, 'M', 'ⰴ'),
+    (0x2C05, 'M', 'ⰵ'),
+    (0x2C06, 'M', 'ⰶ'),
+    (0x2C07, 'M', 'ⰷ'),
+    (0x2C08, 'M', 'ⰸ'),
+    (0x2C09, 'M', 'ⰹ'),
+    (0x2C0A, 'M', 'ⰺ'),
+    (0x2C0B, 'M', 'ⰻ'),
+    (0x2C0C, 'M', 'ⰼ'),
+    (0x2C0D, 'M', 'ⰽ'),
+    (0x2C0E, 'M', 'ⰾ'),
+    (0x2C0F, 'M', 'ⰿ'),
+    (0x2C10, 'M', 'ⱀ'),
+    (0x2C11, 'M', 'ⱁ'),
+    (0x2C12, 'M', 'ⱂ'),
+    (0x2C13, 'M', 'ⱃ'),
+    (0x2C14, 'M', 'ⱄ'),
+    (0x2C15, 'M', 'ⱅ'),
+    (0x2C16, 'M', 'ⱆ'),
+    (0x2C17, 'M', 'ⱇ'),
+    (0x2C18, 'M', 'ⱈ'),
+    (0x2C19, 'M', 'ⱉ'),
+    (0x2C1A, 'M', 'ⱊ'),
+    (0x2C1B, 'M', 'ⱋ'),
+    (0x2C1C, 'M', 'ⱌ'),
+    (0x2C1D, 'M', 'ⱍ'),
+    (0x2C1E, 'M', 'ⱎ'),
+    (0x2C1F, 'M', 'ⱏ'),
+    (0x2C20, 'M', 'ⱐ'),
+    (0x2C21, 'M', 'ⱑ'),
+    (0x2C22, 'M', 'ⱒ'),
+    (0x2C23, 'M', 'ⱓ'),
+    (0x2C24, 'M', 'ⱔ'),
+    (0x2C25, 'M', 'ⱕ'),
+    (0x2C26, 'M', 'ⱖ'),
+    (0x2C27, 'M', 'ⱗ'),
+    (0x2C28, 'M', 'ⱘ'),
+    ]
+
+def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x2C29, 'M', 'ⱙ'),
+    (0x2C2A, 'M', 'ⱚ'),
+    (0x2C2B, 'M', 'ⱛ'),
+    (0x2C2C, 'M', 'ⱜ'),
+    (0x2C2D, 'M', 'ⱝ'),
+    (0x2C2E, 'M', 'ⱞ'),
+    (0x2C2F, 'M', 'ⱟ'),
+    (0x2C30, 'V'),
+    (0x2C60, 'M', 'ⱡ'),
+    (0x2C61, 'V'),
+    (0x2C62, 'M', 'ɫ'),
+    (0x2C63, 'M', 'ᵽ'),
+    (0x2C64, 'M', 'ɽ'),
+    (0x2C65, 'V'),
+    (0x2C67, 'M', 'ⱨ'),
+    (0x2C68, 'V'),
+    (0x2C69, 'M', 'ⱪ'),
+    (0x2C6A, 'V'),
+    (0x2C6B, 'M', 'ⱬ'),
+    (0x2C6C, 'V'),
+    (0x2C6D, 'M', 'ɑ'),
+    (0x2C6E, 'M', 'ɱ'),
+    (0x2C6F, 'M', 'ɐ'),
+    (0x2C70, 'M', 'ɒ'),
+    (0x2C71, 'V'),
+    (0x2C72, 'M', 'ⱳ'),
+    (0x2C73, 'V'),
+    (0x2C75, 'M', 'ⱶ'),
+    (0x2C76, 'V'),
+    (0x2C7C, 'M', 'j'),
+    (0x2C7D, 'M', 'v'),
+    (0x2C7E, 'M', 'ȿ'),
+    (0x2C7F, 'M', 'ɀ'),
+    (0x2C80, 'M', 'ⲁ'),
+    (0x2C81, 'V'),
+    (0x2C82, 'M', 'ⲃ'),
+    (0x2C83, 'V'),
+    (0x2C84, 'M', 'ⲅ'),
+    (0x2C85, 'V'),
+    (0x2C86, 'M', 'ⲇ'),
+    (0x2C87, 'V'),
+    (0x2C88, 'M', 'ⲉ'),
+    (0x2C89, 'V'),
+    (0x2C8A, 'M', 'ⲋ'),
+    (0x2C8B, 'V'),
+    (0x2C8C, 'M', 'ⲍ'),
+    (0x2C8D, 'V'),
+    (0x2C8E, 'M', 'ⲏ'),
+    (0x2C8F, 'V'),
+    (0x2C90, 'M', 'ⲑ'),
+    (0x2C91, 'V'),
+    (0x2C92, 'M', 'ⲓ'),
+    (0x2C93, 'V'),
+    (0x2C94, 'M', 'ⲕ'),
+    (0x2C95, 'V'),
+    (0x2C96, 'M', 'ⲗ'),
+    (0x2C97, 'V'),
+    (0x2C98, 'M', 'ⲙ'),
+    (0x2C99, 'V'),
+    (0x2C9A, 'M', 'ⲛ'),
+    (0x2C9B, 'V'),
+    (0x2C9C, 'M', 'ⲝ'),
+    (0x2C9D, 'V'),
+    (0x2C9E, 'M', 'ⲟ'),
+    (0x2C9F, 'V'),
+    (0x2CA0, 'M', 'ⲡ'),
+    (0x2CA1, 'V'),
+    (0x2CA2, 'M', 'ⲣ'),
+    (0x2CA3, 'V'),
+    (0x2CA4, 'M', 'ⲥ'),
+    (0x2CA5, 'V'),
+    (0x2CA6, 'M', 'ⲧ'),
+    (0x2CA7, 'V'),
+    (0x2CA8, 'M', 'ⲩ'),
+    (0x2CA9, 'V'),
+    (0x2CAA, 'M', 'ⲫ'),
+    (0x2CAB, 'V'),
+    (0x2CAC, 'M', 'ⲭ'),
+    (0x2CAD, 'V'),
+    (0x2CAE, 'M', 'ⲯ'),
+    (0x2CAF, 'V'),
+    (0x2CB0, 'M', 'ⲱ'),
+    (0x2CB1, 'V'),
+    (0x2CB2, 'M', 'ⲳ'),
+    (0x2CB3, 'V'),
+    (0x2CB4, 'M', 'ⲵ'),
+    (0x2CB5, 'V'),
+    (0x2CB6, 'M', 'ⲷ'),
+    (0x2CB7, 'V'),
+    (0x2CB8, 'M', 'ⲹ'),
+    (0x2CB9, 'V'),
+    (0x2CBA, 'M', 'ⲻ'),
+    (0x2CBB, 'V'),
+    (0x2CBC, 'M', 'ⲽ'),
+    (0x2CBD, 'V'),
+    (0x2CBE, 'M', 'ⲿ'),
+    (0x2CBF, 'V'),
+    (0x2CC0, 'M', 'ⳁ'),
+    (0x2CC1, 'V'),
+    (0x2CC2, 'M', 'ⳃ'),
+    ]
+
+def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x2CC3, 'V'),
+    (0x2CC4, 'M', 'ⳅ'),
+    (0x2CC5, 'V'),
+    (0x2CC6, 'M', 'ⳇ'),
+    (0x2CC7, 'V'),
+    (0x2CC8, 'M', 'ⳉ'),
+    (0x2CC9, 'V'),
+    (0x2CCA, 'M', 'ⳋ'),
+    (0x2CCB, 'V'),
+    (0x2CCC, 'M', 'ⳍ'),
+    (0x2CCD, 'V'),
+    (0x2CCE, 'M', 'ⳏ'),
+    (0x2CCF, 'V'),
+    (0x2CD0, 'M', 'ⳑ'),
+    (0x2CD1, 'V'),
+    (0x2CD2, 'M', 'ⳓ'),
+    (0x2CD3, 'V'),
+    (0x2CD4, 'M', 'ⳕ'),
+    (0x2CD5, 'V'),
+    (0x2CD6, 'M', 'ⳗ'),
+    (0x2CD7, 'V'),
+    (0x2CD8, 'M', 'ⳙ'),
+    (0x2CD9, 'V'),
+    (0x2CDA, 'M', 'ⳛ'),
+    (0x2CDB, 'V'),
+    (0x2CDC, 'M', 'ⳝ'),
+    (0x2CDD, 'V'),
+    (0x2CDE, 'M', 'ⳟ'),
+    (0x2CDF, 'V'),
+    (0x2CE0, 'M', 'ⳡ'),
+    (0x2CE1, 'V'),
+    (0x2CE2, 'M', 'ⳣ'),
+    (0x2CE3, 'V'),
+    (0x2CEB, 'M', 'ⳬ'),
+    (0x2CEC, 'V'),
+    (0x2CED, 'M', 'ⳮ'),
+    (0x2CEE, 'V'),
+    (0x2CF2, 'M', 'ⳳ'),
+    (0x2CF3, 'V'),
+    (0x2CF4, 'X'),
+    (0x2CF9, 'V'),
+    (0x2D26, 'X'),
+    (0x2D27, 'V'),
+    (0x2D28, 'X'),
+    (0x2D2D, 'V'),
+    (0x2D2E, 'X'),
+    (0x2D30, 'V'),
+    (0x2D68, 'X'),
+    (0x2D6F, 'M', 'ⵡ'),
+    (0x2D70, 'V'),
+    (0x2D71, 'X'),
+    (0x2D7F, 'V'),
+    (0x2D97, 'X'),
+    (0x2DA0, 'V'),
+    (0x2DA7, 'X'),
+    (0x2DA8, 'V'),
+    (0x2DAF, 'X'),
+    (0x2DB0, 'V'),
+    (0x2DB7, 'X'),
+    (0x2DB8, 'V'),
+    (0x2DBF, 'X'),
+    (0x2DC0, 'V'),
+    (0x2DC7, 'X'),
+    (0x2DC8, 'V'),
+    (0x2DCF, 'X'),
+    (0x2DD0, 'V'),
+    (0x2DD7, 'X'),
+    (0x2DD8, 'V'),
+    (0x2DDF, 'X'),
+    (0x2DE0, 'V'),
+    (0x2E5E, 'X'),
+    (0x2E80, 'V'),
+    (0x2E9A, 'X'),
+    (0x2E9B, 'V'),
+    (0x2E9F, 'M', '母'),
+    (0x2EA0, 'V'),
+    (0x2EF3, 'M', '龟'),
+    (0x2EF4, 'X'),
+    (0x2F00, 'M', '一'),
+    (0x2F01, 'M', '丨'),
+    (0x2F02, 'M', '丶'),
+    (0x2F03, 'M', '丿'),
+    (0x2F04, 'M', '乙'),
+    (0x2F05, 'M', '亅'),
+    (0x2F06, 'M', '二'),
+    (0x2F07, 'M', '亠'),
+    (0x2F08, 'M', '人'),
+    (0x2F09, 'M', '儿'),
+    (0x2F0A, 'M', '入'),
+    (0x2F0B, 'M', '八'),
+    (0x2F0C, 'M', '冂'),
+    (0x2F0D, 'M', '冖'),
+    (0x2F0E, 'M', '冫'),
+    (0x2F0F, 'M', '几'),
+    (0x2F10, 'M', '凵'),
+    (0x2F11, 'M', '刀'),
+    (0x2F12, 'M', '力'),
+    (0x2F13, 'M', '勹'),
+    (0x2F14, 'M', '匕'),
+    (0x2F15, 'M', '匚'),
+    ]
+
+def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x2F16, 'M', '匸'),
+    (0x2F17, 'M', '十'),
+    (0x2F18, 'M', '卜'),
+    (0x2F19, 'M', '卩'),
+    (0x2F1A, 'M', '厂'),
+    (0x2F1B, 'M', '厶'),
+    (0x2F1C, 'M', '又'),
+    (0x2F1D, 'M', '口'),
+    (0x2F1E, 'M', '囗'),
+    (0x2F1F, 'M', '土'),
+    (0x2F20, 'M', '士'),
+    (0x2F21, 'M', '夂'),
+    (0x2F22, 'M', '夊'),
+    (0x2F23, 'M', '夕'),
+    (0x2F24, 'M', '大'),
+    (0x2F25, 'M', '女'),
+    (0x2F26, 'M', '子'),
+    (0x2F27, 'M', '宀'),
+    (0x2F28, 'M', '寸'),
+    (0x2F29, 'M', '小'),
+    (0x2F2A, 'M', '尢'),
+    (0x2F2B, 'M', '尸'),
+    (0x2F2C, 'M', '屮'),
+    (0x2F2D, 'M', '山'),
+    (0x2F2E, 'M', '巛'),
+    (0x2F2F, 'M', '工'),
+    (0x2F30, 'M', '己'),
+    (0x2F31, 'M', '巾'),
+    (0x2F32, 'M', '干'),
+    (0x2F33, 'M', '幺'),
+    (0x2F34, 'M', '广'),
+    (0x2F35, 'M', '廴'),
+    (0x2F36, 'M', '廾'),
+    (0x2F37, 'M', '弋'),
+    (0x2F38, 'M', '弓'),
+    (0x2F39, 'M', '彐'),
+    (0x2F3A, 'M', '彡'),
+    (0x2F3B, 'M', '彳'),
+    (0x2F3C, 'M', '心'),
+    (0x2F3D, 'M', '戈'),
+    (0x2F3E, 'M', '戶'),
+    (0x2F3F, 'M', '手'),
+    (0x2F40, 'M', '支'),
+    (0x2F41, 'M', '攴'),
+    (0x2F42, 'M', '文'),
+    (0x2F43, 'M', '斗'),
+    (0x2F44, 'M', '斤'),
+    (0x2F45, 'M', '方'),
+    (0x2F46, 'M', '无'),
+    (0x2F47, 'M', '日'),
+    (0x2F48, 'M', '曰'),
+    (0x2F49, 'M', '月'),
+    (0x2F4A, 'M', '木'),
+    (0x2F4B, 'M', '欠'),
+    (0x2F4C, 'M', '止'),
+    (0x2F4D, 'M', '歹'),
+    (0x2F4E, 'M', '殳'),
+    (0x2F4F, 'M', '毋'),
+    (0x2F50, 'M', '比'),
+    (0x2F51, 'M', '毛'),
+    (0x2F52, 'M', '氏'),
+    (0x2F53, 'M', '气'),
+    (0x2F54, 'M', '水'),
+    (0x2F55, 'M', '火'),
+    (0x2F56, 'M', '爪'),
+    (0x2F57, 'M', '父'),
+    (0x2F58, 'M', '爻'),
+    (0x2F59, 'M', '爿'),
+    (0x2F5A, 'M', '片'),
+    (0x2F5B, 'M', '牙'),
+    (0x2F5C, 'M', '牛'),
+    (0x2F5D, 'M', '犬'),
+    (0x2F5E, 'M', '玄'),
+    (0x2F5F, 'M', '玉'),
+    (0x2F60, 'M', '瓜'),
+    (0x2F61, 'M', '瓦'),
+    (0x2F62, 'M', '甘'),
+    (0x2F63, 'M', '生'),
+    (0x2F64, 'M', '用'),
+    (0x2F65, 'M', '田'),
+    (0x2F66, 'M', '疋'),
+    (0x2F67, 'M', '疒'),
+    (0x2F68, 'M', '癶'),
+    (0x2F69, 'M', '白'),
+    (0x2F6A, 'M', '皮'),
+    (0x2F6B, 'M', '皿'),
+    (0x2F6C, 'M', '目'),
+    (0x2F6D, 'M', '矛'),
+    (0x2F6E, 'M', '矢'),
+    (0x2F6F, 'M', '石'),
+    (0x2F70, 'M', '示'),
+    (0x2F71, 'M', '禸'),
+    (0x2F72, 'M', '禾'),
+    (0x2F73, 'M', '穴'),
+    (0x2F74, 'M', '立'),
+    (0x2F75, 'M', '竹'),
+    (0x2F76, 'M', '米'),
+    (0x2F77, 'M', '糸'),
+    (0x2F78, 'M', '缶'),
+    (0x2F79, 'M', '网'),
+    ]
+
+def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x2F7A, 'M', '羊'),
+    (0x2F7B, 'M', '羽'),
+    (0x2F7C, 'M', '老'),
+    (0x2F7D, 'M', '而'),
+    (0x2F7E, 'M', '耒'),
+    (0x2F7F, 'M', '耳'),
+    (0x2F80, 'M', '聿'),
+    (0x2F81, 'M', '肉'),
+    (0x2F82, 'M', '臣'),
+    (0x2F83, 'M', '自'),
+    (0x2F84, 'M', '至'),
+    (0x2F85, 'M', '臼'),
+    (0x2F86, 'M', '舌'),
+    (0x2F87, 'M', '舛'),
+    (0x2F88, 'M', '舟'),
+    (0x2F89, 'M', '艮'),
+    (0x2F8A, 'M', '色'),
+    (0x2F8B, 'M', '艸'),
+    (0x2F8C, 'M', '虍'),
+    (0x2F8D, 'M', '虫'),
+    (0x2F8E, 'M', '血'),
+    (0x2F8F, 'M', '行'),
+    (0x2F90, 'M', '衣'),
+    (0x2F91, 'M', '襾'),
+    (0x2F92, 'M', '見'),
+    (0x2F93, 'M', '角'),
+    (0x2F94, 'M', '言'),
+    (0x2F95, 'M', '谷'),
+    (0x2F96, 'M', '豆'),
+    (0x2F97, 'M', '豕'),
+    (0x2F98, 'M', '豸'),
+    (0x2F99, 'M', '貝'),
+    (0x2F9A, 'M', '赤'),
+    (0x2F9B, 'M', '走'),
+    (0x2F9C, 'M', '足'),
+    (0x2F9D, 'M', '身'),
+    (0x2F9E, 'M', '車'),
+    (0x2F9F, 'M', '辛'),
+    (0x2FA0, 'M', '辰'),
+    (0x2FA1, 'M', '辵'),
+    (0x2FA2, 'M', '邑'),
+    (0x2FA3, 'M', '酉'),
+    (0x2FA4, 'M', '釆'),
+    (0x2FA5, 'M', '里'),
+    (0x2FA6, 'M', '金'),
+    (0x2FA7, 'M', '長'),
+    (0x2FA8, 'M', '門'),
+    (0x2FA9, 'M', '阜'),
+    (0x2FAA, 'M', '隶'),
+    (0x2FAB, 'M', '隹'),
+    (0x2FAC, 'M', '雨'),
+    (0x2FAD, 'M', '靑'),
+    (0x2FAE, 'M', '非'),
+    (0x2FAF, 'M', '面'),
+    (0x2FB0, 'M', '革'),
+    (0x2FB1, 'M', '韋'),
+    (0x2FB2, 'M', '韭'),
+    (0x2FB3, 'M', '音'),
+    (0x2FB4, 'M', '頁'),
+    (0x2FB5, 'M', '風'),
+    (0x2FB6, 'M', '飛'),
+    (0x2FB7, 'M', '食'),
+    (0x2FB8, 'M', '首'),
+    (0x2FB9, 'M', '香'),
+    (0x2FBA, 'M', '馬'),
+    (0x2FBB, 'M', '骨'),
+    (0x2FBC, 'M', '高'),
+    (0x2FBD, 'M', '髟'),
+    (0x2FBE, 'M', '鬥'),
+    (0x2FBF, 'M', '鬯'),
+    (0x2FC0, 'M', '鬲'),
+    (0x2FC1, 'M', '鬼'),
+    (0x2FC2, 'M', '魚'),
+    (0x2FC3, 'M', '鳥'),
+    (0x2FC4, 'M', '鹵'),
+    (0x2FC5, 'M', '鹿'),
+    (0x2FC6, 'M', '麥'),
+    (0x2FC7, 'M', '麻'),
+    (0x2FC8, 'M', '黃'),
+    (0x2FC9, 'M', '黍'),
+    (0x2FCA, 'M', '黑'),
+    (0x2FCB, 'M', '黹'),
+    (0x2FCC, 'M', '黽'),
+    (0x2FCD, 'M', '鼎'),
+    (0x2FCE, 'M', '鼓'),
+    (0x2FCF, 'M', '鼠'),
+    (0x2FD0, 'M', '鼻'),
+    (0x2FD1, 'M', '齊'),
+    (0x2FD2, 'M', '齒'),
+    (0x2FD3, 'M', '龍'),
+    (0x2FD4, 'M', '龜'),
+    (0x2FD5, 'M', '龠'),
+    (0x2FD6, 'X'),
+    (0x3000, '3', ' '),
+    (0x3001, 'V'),
+    (0x3002, 'M', '.'),
+    (0x3003, 'V'),
+    (0x3036, 'M', '〒'),
+    (0x3037, 'V'),
+    (0x3038, 'M', '十'),
+    ]
+
+def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x3039, 'M', '卄'),
+    (0x303A, 'M', '卅'),
+    (0x303B, 'V'),
+    (0x3040, 'X'),
+    (0x3041, 'V'),
+    (0x3097, 'X'),
+    (0x3099, 'V'),
+    (0x309B, '3', ' ゙'),
+    (0x309C, '3', ' ゚'),
+    (0x309D, 'V'),
+    (0x309F, 'M', 'より'),
+    (0x30A0, 'V'),
+    (0x30FF, 'M', 'コト'),
+    (0x3100, 'X'),
+    (0x3105, 'V'),
+    (0x3130, 'X'),
+    (0x3131, 'M', 'ᄀ'),
+    (0x3132, 'M', 'ᄁ'),
+    (0x3133, 'M', 'ᆪ'),
+    (0x3134, 'M', 'ᄂ'),
+    (0x3135, 'M', 'ᆬ'),
+    (0x3136, 'M', 'ᆭ'),
+    (0x3137, 'M', 'ᄃ'),
+    (0x3138, 'M', 'ᄄ'),
+    (0x3139, 'M', 'ᄅ'),
+    (0x313A, 'M', 'ᆰ'),
+    (0x313B, 'M', 'ᆱ'),
+    (0x313C, 'M', 'ᆲ'),
+    (0x313D, 'M', 'ᆳ'),
+    (0x313E, 'M', 'ᆴ'),
+    (0x313F, 'M', 'ᆵ'),
+    (0x3140, 'M', 'ᄚ'),
+    (0x3141, 'M', 'ᄆ'),
+    (0x3142, 'M', 'ᄇ'),
+    (0x3143, 'M', 'ᄈ'),
+    (0x3144, 'M', 'ᄡ'),
+    (0x3145, 'M', 'ᄉ'),
+    (0x3146, 'M', 'ᄊ'),
+    (0x3147, 'M', 'ᄋ'),
+    (0x3148, 'M', 'ᄌ'),
+    (0x3149, 'M', 'ᄍ'),
+    (0x314A, 'M', 'ᄎ'),
+    (0x314B, 'M', 'ᄏ'),
+    (0x314C, 'M', 'ᄐ'),
+    (0x314D, 'M', 'ᄑ'),
+    (0x314E, 'M', 'ᄒ'),
+    (0x314F, 'M', 'ᅡ'),
+    (0x3150, 'M', 'ᅢ'),
+    (0x3151, 'M', 'ᅣ'),
+    (0x3152, 'M', 'ᅤ'),
+    (0x3153, 'M', 'ᅥ'),
+    (0x3154, 'M', 'ᅦ'),
+    (0x3155, 'M', 'ᅧ'),
+    (0x3156, 'M', 'ᅨ'),
+    (0x3157, 'M', 'ᅩ'),
+    (0x3158, 'M', 'ᅪ'),
+    (0x3159, 'M', 'ᅫ'),
+    (0x315A, 'M', 'ᅬ'),
+    (0x315B, 'M', 'ᅭ'),
+    (0x315C, 'M', 'ᅮ'),
+    (0x315D, 'M', 'ᅯ'),
+    (0x315E, 'M', 'ᅰ'),
+    (0x315F, 'M', 'ᅱ'),
+    (0x3160, 'M', 'ᅲ'),
+    (0x3161, 'M', 'ᅳ'),
+    (0x3162, 'M', 'ᅴ'),
+    (0x3163, 'M', 'ᅵ'),
+    (0x3164, 'X'),
+    (0x3165, 'M', 'ᄔ'),
+    (0x3166, 'M', 'ᄕ'),
+    (0x3167, 'M', 'ᇇ'),
+    (0x3168, 'M', 'ᇈ'),
+    (0x3169, 'M', 'ᇌ'),
+    (0x316A, 'M', 'ᇎ'),
+    (0x316B, 'M', 'ᇓ'),
+    (0x316C, 'M', 'ᇗ'),
+    (0x316D, 'M', 'ᇙ'),
+    (0x316E, 'M', 'ᄜ'),
+    (0x316F, 'M', 'ᇝ'),
+    (0x3170, 'M', 'ᇟ'),
+    (0x3171, 'M', 'ᄝ'),
+    (0x3172, 'M', 'ᄞ'),
+    (0x3173, 'M', 'ᄠ'),
+    (0x3174, 'M', 'ᄢ'),
+    (0x3175, 'M', 'ᄣ'),
+    (0x3176, 'M', 'ᄧ'),
+    (0x3177, 'M', 'ᄩ'),
+    (0x3178, 'M', 'ᄫ'),
+    (0x3179, 'M', 'ᄬ'),
+    (0x317A, 'M', 'ᄭ'),
+    (0x317B, 'M', 'ᄮ'),
+    (0x317C, 'M', 'ᄯ'),
+    (0x317D, 'M', 'ᄲ'),
+    (0x317E, 'M', 'ᄶ'),
+    (0x317F, 'M', 'ᅀ'),
+    (0x3180, 'M', 'ᅇ'),
+    (0x3181, 'M', 'ᅌ'),
+    (0x3182, 'M', 'ᇱ'),
+    (0x3183, 'M', 'ᇲ'),
+    (0x3184, 'M', 'ᅗ'),
+    ]
+
+def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x3185, 'M', 'ᅘ'),
+    (0x3186, 'M', 'ᅙ'),
+    (0x3187, 'M', 'ᆄ'),
+    (0x3188, 'M', 'ᆅ'),
+    (0x3189, 'M', 'ᆈ'),
+    (0x318A, 'M', 'ᆑ'),
+    (0x318B, 'M', 'ᆒ'),
+    (0x318C, 'M', 'ᆔ'),
+    (0x318D, 'M', 'ᆞ'),
+    (0x318E, 'M', 'ᆡ'),
+    (0x318F, 'X'),
+    (0x3190, 'V'),
+    (0x3192, 'M', '一'),
+    (0x3193, 'M', '二'),
+    (0x3194, 'M', '三'),
+    (0x3195, 'M', '四'),
+    (0x3196, 'M', '上'),
+    (0x3197, 'M', '中'),
+    (0x3198, 'M', '下'),
+    (0x3199, 'M', '甲'),
+    (0x319A, 'M', '乙'),
+    (0x319B, 'M', '丙'),
+    (0x319C, 'M', '丁'),
+    (0x319D, 'M', '天'),
+    (0x319E, 'M', '地'),
+    (0x319F, 'M', '人'),
+    (0x31A0, 'V'),
+    (0x31E4, 'X'),
+    (0x31F0, 'V'),
+    (0x3200, '3', '(ᄀ)'),
+    (0x3201, '3', '(ᄂ)'),
+    (0x3202, '3', '(ᄃ)'),
+    (0x3203, '3', '(ᄅ)'),
+    (0x3204, '3', '(ᄆ)'),
+    (0x3205, '3', '(ᄇ)'),
+    (0x3206, '3', '(ᄉ)'),
+    (0x3207, '3', '(ᄋ)'),
+    (0x3208, '3', '(ᄌ)'),
+    (0x3209, '3', '(ᄎ)'),
+    (0x320A, '3', '(ᄏ)'),
+    (0x320B, '3', '(ᄐ)'),
+    (0x320C, '3', '(ᄑ)'),
+    (0x320D, '3', '(ᄒ)'),
+    (0x320E, '3', '(가)'),
+    (0x320F, '3', '(나)'),
+    (0x3210, '3', '(다)'),
+    (0x3211, '3', '(라)'),
+    (0x3212, '3', '(마)'),
+    (0x3213, '3', '(바)'),
+    (0x3214, '3', '(사)'),
+    (0x3215, '3', '(아)'),
+    (0x3216, '3', '(자)'),
+    (0x3217, '3', '(차)'),
+    (0x3218, '3', '(카)'),
+    (0x3219, '3', '(타)'),
+    (0x321A, '3', '(파)'),
+    (0x321B, '3', '(하)'),
+    (0x321C, '3', '(주)'),
+    (0x321D, '3', '(오전)'),
+    (0x321E, '3', '(오후)'),
+    (0x321F, 'X'),
+    (0x3220, '3', '(一)'),
+    (0x3221, '3', '(二)'),
+    (0x3222, '3', '(三)'),
+    (0x3223, '3', '(四)'),
+    (0x3224, '3', '(五)'),
+    (0x3225, '3', '(六)'),
+    (0x3226, '3', '(七)'),
+    (0x3227, '3', '(八)'),
+    (0x3228, '3', '(九)'),
+    (0x3229, '3', '(十)'),
+    (0x322A, '3', '(月)'),
+    (0x322B, '3', '(火)'),
+    (0x322C, '3', '(水)'),
+    (0x322D, '3', '(木)'),
+    (0x322E, '3', '(金)'),
+    (0x322F, '3', '(土)'),
+    (0x3230, '3', '(日)'),
+    (0x3231, '3', '(株)'),
+    (0x3232, '3', '(有)'),
+    (0x3233, '3', '(社)'),
+    (0x3234, '3', '(名)'),
+    (0x3235, '3', '(特)'),
+    (0x3236, '3', '(財)'),
+    (0x3237, '3', '(祝)'),
+    (0x3238, '3', '(労)'),
+    (0x3239, '3', '(代)'),
+    (0x323A, '3', '(呼)'),
+    (0x323B, '3', '(学)'),
+    (0x323C, '3', '(監)'),
+    (0x323D, '3', '(企)'),
+    (0x323E, '3', '(資)'),
+    (0x323F, '3', '(協)'),
+    (0x3240, '3', '(祭)'),
+    (0x3241, '3', '(休)'),
+    (0x3242, '3', '(自)'),
+    (0x3243, '3', '(至)'),
+    (0x3244, 'M', '問'),
+    (0x3245, 'M', '幼'),
+    (0x3246, 'M', '文'),
+    ]
+
+def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x3247, 'M', '箏'),
+    (0x3248, 'V'),
+    (0x3250, 'M', 'pte'),
+    (0x3251, 'M', '21'),
+    (0x3252, 'M', '22'),
+    (0x3253, 'M', '23'),
+    (0x3254, 'M', '24'),
+    (0x3255, 'M', '25'),
+    (0x3256, 'M', '26'),
+    (0x3257, 'M', '27'),
+    (0x3258, 'M', '28'),
+    (0x3259, 'M', '29'),
+    (0x325A, 'M', '30'),
+    (0x325B, 'M', '31'),
+    (0x325C, 'M', '32'),
+    (0x325D, 'M', '33'),
+    (0x325E, 'M', '34'),
+    (0x325F, 'M', '35'),
+    (0x3260, 'M', 'ᄀ'),
+    (0x3261, 'M', 'ᄂ'),
+    (0x3262, 'M', 'ᄃ'),
+    (0x3263, 'M', 'ᄅ'),
+    (0x3264, 'M', 'ᄆ'),
+    (0x3265, 'M', 'ᄇ'),
+    (0x3266, 'M', 'ᄉ'),
+    (0x3267, 'M', 'ᄋ'),
+    (0x3268, 'M', 'ᄌ'),
+    (0x3269, 'M', 'ᄎ'),
+    (0x326A, 'M', 'ᄏ'),
+    (0x326B, 'M', 'ᄐ'),
+    (0x326C, 'M', 'ᄑ'),
+    (0x326D, 'M', 'ᄒ'),
+    (0x326E, 'M', '가'),
+    (0x326F, 'M', '나'),
+    (0x3270, 'M', '다'),
+    (0x3271, 'M', '라'),
+    (0x3272, 'M', '마'),
+    (0x3273, 'M', '바'),
+    (0x3274, 'M', '사'),
+    (0x3275, 'M', '아'),
+    (0x3276, 'M', '자'),
+    (0x3277, 'M', '차'),
+    (0x3278, 'M', '카'),
+    (0x3279, 'M', '타'),
+    (0x327A, 'M', '파'),
+    (0x327B, 'M', '하'),
+    (0x327C, 'M', '참고'),
+    (0x327D, 'M', '주의'),
+    (0x327E, 'M', '우'),
+    (0x327F, 'V'),
+    (0x3280, 'M', '一'),
+    (0x3281, 'M', '二'),
+    (0x3282, 'M', '三'),
+    (0x3283, 'M', '四'),
+    (0x3284, 'M', '五'),
+    (0x3285, 'M', '六'),
+    (0x3286, 'M', '七'),
+    (0x3287, 'M', '八'),
+    (0x3288, 'M', '九'),
+    (0x3289, 'M', '十'),
+    (0x328A, 'M', '月'),
+    (0x328B, 'M', '火'),
+    (0x328C, 'M', '水'),
+    (0x328D, 'M', '木'),
+    (0x328E, 'M', '金'),
+    (0x328F, 'M', '土'),
+    (0x3290, 'M', '日'),
+    (0x3291, 'M', '株'),
+    (0x3292, 'M', '有'),
+    (0x3293, 'M', '社'),
+    (0x3294, 'M', '名'),
+    (0x3295, 'M', '特'),
+    (0x3296, 'M', '財'),
+    (0x3297, 'M', '祝'),
+    (0x3298, 'M', '労'),
+    (0x3299, 'M', '秘'),
+    (0x329A, 'M', '男'),
+    (0x329B, 'M', '女'),
+    (0x329C, 'M', '適'),
+    (0x329D, 'M', '優'),
+    (0x329E, 'M', '印'),
+    (0x329F, 'M', '注'),
+    (0x32A0, 'M', '項'),
+    (0x32A1, 'M', '休'),
+    (0x32A2, 'M', '写'),
+    (0x32A3, 'M', '正'),
+    (0x32A4, 'M', '上'),
+    (0x32A5, 'M', '中'),
+    (0x32A6, 'M', '下'),
+    (0x32A7, 'M', '左'),
+    (0x32A8, 'M', '右'),
+    (0x32A9, 'M', '医'),
+    (0x32AA, 'M', '宗'),
+    (0x32AB, 'M', '学'),
+    (0x32AC, 'M', '監'),
+    (0x32AD, 'M', '企'),
+    (0x32AE, 'M', '資'),
+    (0x32AF, 'M', '協'),
+    (0x32B0, 'M', '夜'),
+    (0x32B1, 'M', '36'),
+    ]
+
+def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x32B2, 'M', '37'),
+    (0x32B3, 'M', '38'),
+    (0x32B4, 'M', '39'),
+    (0x32B5, 'M', '40'),
+    (0x32B6, 'M', '41'),
+    (0x32B7, 'M', '42'),
+    (0x32B8, 'M', '43'),
+    (0x32B9, 'M', '44'),
+    (0x32BA, 'M', '45'),
+    (0x32BB, 'M', '46'),
+    (0x32BC, 'M', '47'),
+    (0x32BD, 'M', '48'),
+    (0x32BE, 'M', '49'),
+    (0x32BF, 'M', '50'),
+    (0x32C0, 'M', '1月'),
+    (0x32C1, 'M', '2月'),
+    (0x32C2, 'M', '3月'),
+    (0x32C3, 'M', '4月'),
+    (0x32C4, 'M', '5月'),
+    (0x32C5, 'M', '6月'),
+    (0x32C6, 'M', '7月'),
+    (0x32C7, 'M', '8月'),
+    (0x32C8, 'M', '9月'),
+    (0x32C9, 'M', '10月'),
+    (0x32CA, 'M', '11月'),
+    (0x32CB, 'M', '12月'),
+    (0x32CC, 'M', 'hg'),
+    (0x32CD, 'M', 'erg'),
+    (0x32CE, 'M', 'ev'),
+    (0x32CF, 'M', 'ltd'),
+    (0x32D0, 'M', 'ア'),
+    (0x32D1, 'M', 'イ'),
+    (0x32D2, 'M', 'ウ'),
+    (0x32D3, 'M', 'エ'),
+    (0x32D4, 'M', 'オ'),
+    (0x32D5, 'M', 'カ'),
+    (0x32D6, 'M', 'キ'),
+    (0x32D7, 'M', 'ク'),
+    (0x32D8, 'M', 'ケ'),
+    (0x32D9, 'M', 'コ'),
+    (0x32DA, 'M', 'サ'),
+    (0x32DB, 'M', 'シ'),
+    (0x32DC, 'M', 'ス'),
+    (0x32DD, 'M', 'セ'),
+    (0x32DE, 'M', 'ソ'),
+    (0x32DF, 'M', 'タ'),
+    (0x32E0, 'M', 'チ'),
+    (0x32E1, 'M', 'ツ'),
+    (0x32E2, 'M', 'テ'),
+    (0x32E3, 'M', 'ト'),
+    (0x32E4, 'M', 'ナ'),
+    (0x32E5, 'M', 'ニ'),
+    (0x32E6, 'M', 'ヌ'),
+    (0x32E7, 'M', 'ネ'),
+    (0x32E8, 'M', 'ノ'),
+    (0x32E9, 'M', 'ハ'),
+    (0x32EA, 'M', 'ヒ'),
+    (0x32EB, 'M', 'フ'),
+    (0x32EC, 'M', 'ヘ'),
+    (0x32ED, 'M', 'ホ'),
+    (0x32EE, 'M', 'マ'),
+    (0x32EF, 'M', 'ミ'),
+    (0x32F0, 'M', 'ム'),
+    (0x32F1, 'M', 'メ'),
+    (0x32F2, 'M', 'モ'),
+    (0x32F3, 'M', 'ヤ'),
+    (0x32F4, 'M', 'ユ'),
+    (0x32F5, 'M', 'ヨ'),
+    (0x32F6, 'M', 'ラ'),
+    (0x32F7, 'M', 'リ'),
+    (0x32F8, 'M', 'ル'),
+    (0x32F9, 'M', 'レ'),
+    (0x32FA, 'M', 'ロ'),
+    (0x32FB, 'M', 'ワ'),
+    (0x32FC, 'M', 'ヰ'),
+    (0x32FD, 'M', 'ヱ'),
+    (0x32FE, 'M', 'ヲ'),
+    (0x32FF, 'M', '令和'),
+    (0x3300, 'M', 'アパート'),
+    (0x3301, 'M', 'アルファ'),
+    (0x3302, 'M', 'アンペア'),
+    (0x3303, 'M', 'アール'),
+    (0x3304, 'M', 'イニング'),
+    (0x3305, 'M', 'インチ'),
+    (0x3306, 'M', 'ウォン'),
+    (0x3307, 'M', 'エスクード'),
+    (0x3308, 'M', 'エーカー'),
+    (0x3309, 'M', 'オンス'),
+    (0x330A, 'M', 'オーム'),
+    (0x330B, 'M', 'カイリ'),
+    (0x330C, 'M', 'カラット'),
+    (0x330D, 'M', 'カロリー'),
+    (0x330E, 'M', 'ガロン'),
+    (0x330F, 'M', 'ガンマ'),
+    (0x3310, 'M', 'ギガ'),
+    (0x3311, 'M', 'ギニー'),
+    (0x3312, 'M', 'キュリー'),
+    (0x3313, 'M', 'ギルダー'),
+    (0x3314, 'M', 'キロ'),
+    (0x3315, 'M', 'キログラム'),
+    ]
+
+def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x3316, 'M', 'キロメートル'),
+    (0x3317, 'M', 'キロワット'),
+    (0x3318, 'M', 'グラム'),
+    (0x3319, 'M', 'グラムトン'),
+    (0x331A, 'M', 'クルゼイロ'),
+    (0x331B, 'M', 'クローネ'),
+    (0x331C, 'M', 'ケース'),
+    (0x331D, 'M', 'コルナ'),
+    (0x331E, 'M', 'コーポ'),
+    (0x331F, 'M', 'サイクル'),
+    (0x3320, 'M', 'サンチーム'),
+    (0x3321, 'M', 'シリング'),
+    (0x3322, 'M', 'センチ'),
+    (0x3323, 'M', 'セント'),
+    (0x3324, 'M', 'ダース'),
+    (0x3325, 'M', 'デシ'),
+    (0x3326, 'M', 'ドル'),
+    (0x3327, 'M', 'トン'),
+    (0x3328, 'M', 'ナノ'),
+    (0x3329, 'M', 'ノット'),
+    (0x332A, 'M', 'ハイツ'),
+    (0x332B, 'M', 'パーセント'),
+    (0x332C, 'M', 'パーツ'),
+    (0x332D, 'M', 'バーレル'),
+    (0x332E, 'M', 'ピアストル'),
+    (0x332F, 'M', 'ピクル'),
+    (0x3330, 'M', 'ピコ'),
+    (0x3331, 'M', 'ビル'),
+    (0x3332, 'M', 'ファラッド'),
+    (0x3333, 'M', 'フィート'),
+    (0x3334, 'M', 'ブッシェル'),
+    (0x3335, 'M', 'フラン'),
+    (0x3336, 'M', 'ヘクタール'),
+    (0x3337, 'M', 'ペソ'),
+    (0x3338, 'M', 'ペニヒ'),
+    (0x3339, 'M', 'ヘルツ'),
+    (0x333A, 'M', 'ペンス'),
+    (0x333B, 'M', 'ページ'),
+    (0x333C, 'M', 'ベータ'),
+    (0x333D, 'M', 'ポイント'),
+    (0x333E, 'M', 'ボルト'),
+    (0x333F, 'M', 'ホン'),
+    (0x3340, 'M', 'ポンド'),
+    (0x3341, 'M', 'ホール'),
+    (0x3342, 'M', 'ホーン'),
+    (0x3343, 'M', 'マイクロ'),
+    (0x3344, 'M', 'マイル'),
+    (0x3345, 'M', 'マッハ'),
+    (0x3346, 'M', 'マルク'),
+    (0x3347, 'M', 'マンション'),
+    (0x3348, 'M', 'ミクロン'),
+    (0x3349, 'M', 'ミリ'),
+    (0x334A, 'M', 'ミリバール'),
+    (0x334B, 'M', 'メガ'),
+    (0x334C, 'M', 'メガトン'),
+    (0x334D, 'M', 'メートル'),
+    (0x334E, 'M', 'ヤード'),
+    (0x334F, 'M', 'ヤール'),
+    (0x3350, 'M', 'ユアン'),
+    (0x3351, 'M', 'リットル'),
+    (0x3352, 'M', 'リラ'),
+    (0x3353, 'M', 'ルピー'),
+    (0x3354, 'M', 'ルーブル'),
+    (0x3355, 'M', 'レム'),
+    (0x3356, 'M', 'レントゲン'),
+    (0x3357, 'M', 'ワット'),
+    (0x3358, 'M', '0点'),
+    (0x3359, 'M', '1点'),
+    (0x335A, 'M', '2点'),
+    (0x335B, 'M', '3点'),
+    (0x335C, 'M', '4点'),
+    (0x335D, 'M', '5点'),
+    (0x335E, 'M', '6点'),
+    (0x335F, 'M', '7点'),
+    (0x3360, 'M', '8点'),
+    (0x3361, 'M', '9点'),
+    (0x3362, 'M', '10点'),
+    (0x3363, 'M', '11点'),
+    (0x3364, 'M', '12点'),
+    (0x3365, 'M', '13点'),
+    (0x3366, 'M', '14点'),
+    (0x3367, 'M', '15点'),
+    (0x3368, 'M', '16点'),
+    (0x3369, 'M', '17点'),
+    (0x336A, 'M', '18点'),
+    (0x336B, 'M', '19点'),
+    (0x336C, 'M', '20点'),
+    (0x336D, 'M', '21点'),
+    (0x336E, 'M', '22点'),
+    (0x336F, 'M', '23点'),
+    (0x3370, 'M', '24点'),
+    (0x3371, 'M', 'hpa'),
+    (0x3372, 'M', 'da'),
+    (0x3373, 'M', 'au'),
+    (0x3374, 'M', 'bar'),
+    (0x3375, 'M', 'ov'),
+    (0x3376, 'M', 'pc'),
+    (0x3377, 'M', 'dm'),
+    (0x3378, 'M', 'dm2'),
+    (0x3379, 'M', 'dm3'),
+    ]
+
+def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x337A, 'M', 'iu'),
+    (0x337B, 'M', '平成'),
+    (0x337C, 'M', '昭和'),
+    (0x337D, 'M', '大正'),
+    (0x337E, 'M', '明治'),
+    (0x337F, 'M', '株式会社'),
+    (0x3380, 'M', 'pa'),
+    (0x3381, 'M', 'na'),
+    (0x3382, 'M', 'μa'),
+    (0x3383, 'M', 'ma'),
+    (0x3384, 'M', 'ka'),
+    (0x3385, 'M', 'kb'),
+    (0x3386, 'M', 'mb'),
+    (0x3387, 'M', 'gb'),
+    (0x3388, 'M', 'cal'),
+    (0x3389, 'M', 'kcal'),
+    (0x338A, 'M', 'pf'),
+    (0x338B, 'M', 'nf'),
+    (0x338C, 'M', 'μf'),
+    (0x338D, 'M', 'μg'),
+    (0x338E, 'M', 'mg'),
+    (0x338F, 'M', 'kg'),
+    (0x3390, 'M', 'hz'),
+    (0x3391, 'M', 'khz'),
+    (0x3392, 'M', 'mhz'),
+    (0x3393, 'M', 'ghz'),
+    (0x3394, 'M', 'thz'),
+    (0x3395, 'M', 'μl'),
+    (0x3396, 'M', 'ml'),
+    (0x3397, 'M', 'dl'),
+    (0x3398, 'M', 'kl'),
+    (0x3399, 'M', 'fm'),
+    (0x339A, 'M', 'nm'),
+    (0x339B, 'M', 'μm'),
+    (0x339C, 'M', 'mm'),
+    (0x339D, 'M', 'cm'),
+    (0x339E, 'M', 'km'),
+    (0x339F, 'M', 'mm2'),
+    (0x33A0, 'M', 'cm2'),
+    (0x33A1, 'M', 'm2'),
+    (0x33A2, 'M', 'km2'),
+    (0x33A3, 'M', 'mm3'),
+    (0x33A4, 'M', 'cm3'),
+    (0x33A5, 'M', 'm3'),
+    (0x33A6, 'M', 'km3'),
+    (0x33A7, 'M', 'm∕s'),
+    (0x33A8, 'M', 'm∕s2'),
+    (0x33A9, 'M', 'pa'),
+    (0x33AA, 'M', 'kpa'),
+    (0x33AB, 'M', 'mpa'),
+    (0x33AC, 'M', 'gpa'),
+    (0x33AD, 'M', 'rad'),
+    (0x33AE, 'M', 'rad∕s'),
+    (0x33AF, 'M', 'rad∕s2'),
+    (0x33B0, 'M', 'ps'),
+    (0x33B1, 'M', 'ns'),
+    (0x33B2, 'M', 'μs'),
+    (0x33B3, 'M', 'ms'),
+    (0x33B4, 'M', 'pv'),
+    (0x33B5, 'M', 'nv'),
+    (0x33B6, 'M', 'μv'),
+    (0x33B7, 'M', 'mv'),
+    (0x33B8, 'M', 'kv'),
+    (0x33B9, 'M', 'mv'),
+    (0x33BA, 'M', 'pw'),
+    (0x33BB, 'M', 'nw'),
+    (0x33BC, 'M', 'μw'),
+    (0x33BD, 'M', 'mw'),
+    (0x33BE, 'M', 'kw'),
+    (0x33BF, 'M', 'mw'),
+    (0x33C0, 'M', 'kω'),
+    (0x33C1, 'M', 'mω'),
+    (0x33C2, 'X'),
+    (0x33C3, 'M', 'bq'),
+    (0x33C4, 'M', 'cc'),
+    (0x33C5, 'M', 'cd'),
+    (0x33C6, 'M', 'c∕kg'),
+    (0x33C7, 'X'),
+    (0x33C8, 'M', 'db'),
+    (0x33C9, 'M', 'gy'),
+    (0x33CA, 'M', 'ha'),
+    (0x33CB, 'M', 'hp'),
+    (0x33CC, 'M', 'in'),
+    (0x33CD, 'M', 'kk'),
+    (0x33CE, 'M', 'km'),
+    (0x33CF, 'M', 'kt'),
+    (0x33D0, 'M', 'lm'),
+    (0x33D1, 'M', 'ln'),
+    (0x33D2, 'M', 'log'),
+    (0x33D3, 'M', 'lx'),
+    (0x33D4, 'M', 'mb'),
+    (0x33D5, 'M', 'mil'),
+    (0x33D6, 'M', 'mol'),
+    (0x33D7, 'M', 'ph'),
+    (0x33D8, 'X'),
+    (0x33D9, 'M', 'ppm'),
+    (0x33DA, 'M', 'pr'),
+    (0x33DB, 'M', 'sr'),
+    (0x33DC, 'M', 'sv'),
+    (0x33DD, 'M', 'wb'),
+    ]
+
+def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x33DE, 'M', 'v∕m'),
+    (0x33DF, 'M', 'a∕m'),
+    (0x33E0, 'M', '1日'),
+    (0x33E1, 'M', '2日'),
+    (0x33E2, 'M', '3日'),
+    (0x33E3, 'M', '4日'),
+    (0x33E4, 'M', '5日'),
+    (0x33E5, 'M', '6日'),
+    (0x33E6, 'M', '7日'),
+    (0x33E7, 'M', '8日'),
+    (0x33E8, 'M', '9日'),
+    (0x33E9, 'M', '10日'),
+    (0x33EA, 'M', '11日'),
+    (0x33EB, 'M', '12日'),
+    (0x33EC, 'M', '13日'),
+    (0x33ED, 'M', '14日'),
+    (0x33EE, 'M', '15日'),
+    (0x33EF, 'M', '16日'),
+    (0x33F0, 'M', '17日'),
+    (0x33F1, 'M', '18日'),
+    (0x33F2, 'M', '19日'),
+    (0x33F3, 'M', '20日'),
+    (0x33F4, 'M', '21日'),
+    (0x33F5, 'M', '22日'),
+    (0x33F6, 'M', '23日'),
+    (0x33F7, 'M', '24日'),
+    (0x33F8, 'M', '25日'),
+    (0x33F9, 'M', '26日'),
+    (0x33FA, 'M', '27日'),
+    (0x33FB, 'M', '28日'),
+    (0x33FC, 'M', '29日'),
+    (0x33FD, 'M', '30日'),
+    (0x33FE, 'M', '31日'),
+    (0x33FF, 'M', 'gal'),
+    (0x3400, 'V'),
+    (0xA48D, 'X'),
+    (0xA490, 'V'),
+    (0xA4C7, 'X'),
+    (0xA4D0, 'V'),
+    (0xA62C, 'X'),
+    (0xA640, 'M', 'ꙁ'),
+    (0xA641, 'V'),
+    (0xA642, 'M', 'ꙃ'),
+    (0xA643, 'V'),
+    (0xA644, 'M', 'ꙅ'),
+    (0xA645, 'V'),
+    (0xA646, 'M', 'ꙇ'),
+    (0xA647, 'V'),
+    (0xA648, 'M', 'ꙉ'),
+    (0xA649, 'V'),
+    (0xA64A, 'M', 'ꙋ'),
+    (0xA64B, 'V'),
+    (0xA64C, 'M', 'ꙍ'),
+    (0xA64D, 'V'),
+    (0xA64E, 'M', 'ꙏ'),
+    (0xA64F, 'V'),
+    (0xA650, 'M', 'ꙑ'),
+    (0xA651, 'V'),
+    (0xA652, 'M', 'ꙓ'),
+    (0xA653, 'V'),
+    (0xA654, 'M', 'ꙕ'),
+    (0xA655, 'V'),
+    (0xA656, 'M', 'ꙗ'),
+    (0xA657, 'V'),
+    (0xA658, 'M', 'ꙙ'),
+    (0xA659, 'V'),
+    (0xA65A, 'M', 'ꙛ'),
+    (0xA65B, 'V'),
+    (0xA65C, 'M', 'ꙝ'),
+    (0xA65D, 'V'),
+    (0xA65E, 'M', 'ꙟ'),
+    (0xA65F, 'V'),
+    (0xA660, 'M', 'ꙡ'),
+    (0xA661, 'V'),
+    (0xA662, 'M', 'ꙣ'),
+    (0xA663, 'V'),
+    (0xA664, 'M', 'ꙥ'),
+    (0xA665, 'V'),
+    (0xA666, 'M', 'ꙧ'),
+    (0xA667, 'V'),
+    (0xA668, 'M', 'ꙩ'),
+    (0xA669, 'V'),
+    (0xA66A, 'M', 'ꙫ'),
+    (0xA66B, 'V'),
+    (0xA66C, 'M', 'ꙭ'),
+    (0xA66D, 'V'),
+    (0xA680, 'M', 'ꚁ'),
+    (0xA681, 'V'),
+    (0xA682, 'M', 'ꚃ'),
+    (0xA683, 'V'),
+    (0xA684, 'M', 'ꚅ'),
+    (0xA685, 'V'),
+    (0xA686, 'M', 'ꚇ'),
+    (0xA687, 'V'),
+    (0xA688, 'M', 'ꚉ'),
+    (0xA689, 'V'),
+    (0xA68A, 'M', 'ꚋ'),
+    (0xA68B, 'V'),
+    (0xA68C, 'M', 'ꚍ'),
+    (0xA68D, 'V'),
+    ]
+
+def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xA68E, 'M', 'ꚏ'),
+    (0xA68F, 'V'),
+    (0xA690, 'M', 'ꚑ'),
+    (0xA691, 'V'),
+    (0xA692, 'M', 'ꚓ'),
+    (0xA693, 'V'),
+    (0xA694, 'M', 'ꚕ'),
+    (0xA695, 'V'),
+    (0xA696, 'M', 'ꚗ'),
+    (0xA697, 'V'),
+    (0xA698, 'M', 'ꚙ'),
+    (0xA699, 'V'),
+    (0xA69A, 'M', 'ꚛ'),
+    (0xA69B, 'V'),
+    (0xA69C, 'M', 'ъ'),
+    (0xA69D, 'M', 'ь'),
+    (0xA69E, 'V'),
+    (0xA6F8, 'X'),
+    (0xA700, 'V'),
+    (0xA722, 'M', 'ꜣ'),
+    (0xA723, 'V'),
+    (0xA724, 'M', 'ꜥ'),
+    (0xA725, 'V'),
+    (0xA726, 'M', 'ꜧ'),
+    (0xA727, 'V'),
+    (0xA728, 'M', 'ꜩ'),
+    (0xA729, 'V'),
+    (0xA72A, 'M', 'ꜫ'),
+    (0xA72B, 'V'),
+    (0xA72C, 'M', 'ꜭ'),
+    (0xA72D, 'V'),
+    (0xA72E, 'M', 'ꜯ'),
+    (0xA72F, 'V'),
+    (0xA732, 'M', 'ꜳ'),
+    (0xA733, 'V'),
+    (0xA734, 'M', 'ꜵ'),
+    (0xA735, 'V'),
+    (0xA736, 'M', 'ꜷ'),
+    (0xA737, 'V'),
+    (0xA738, 'M', 'ꜹ'),
+    (0xA739, 'V'),
+    (0xA73A, 'M', 'ꜻ'),
+    (0xA73B, 'V'),
+    (0xA73C, 'M', 'ꜽ'),
+    (0xA73D, 'V'),
+    (0xA73E, 'M', 'ꜿ'),
+    (0xA73F, 'V'),
+    (0xA740, 'M', 'ꝁ'),
+    (0xA741, 'V'),
+    (0xA742, 'M', 'ꝃ'),
+    (0xA743, 'V'),
+    (0xA744, 'M', 'ꝅ'),
+    (0xA745, 'V'),
+    (0xA746, 'M', 'ꝇ'),
+    (0xA747, 'V'),
+    (0xA748, 'M', 'ꝉ'),
+    (0xA749, 'V'),
+    (0xA74A, 'M', 'ꝋ'),
+    (0xA74B, 'V'),
+    (0xA74C, 'M', 'ꝍ'),
+    (0xA74D, 'V'),
+    (0xA74E, 'M', 'ꝏ'),
+    (0xA74F, 'V'),
+    (0xA750, 'M', 'ꝑ'),
+    (0xA751, 'V'),
+    (0xA752, 'M', 'ꝓ'),
+    (0xA753, 'V'),
+    (0xA754, 'M', 'ꝕ'),
+    (0xA755, 'V'),
+    (0xA756, 'M', 'ꝗ'),
+    (0xA757, 'V'),
+    (0xA758, 'M', 'ꝙ'),
+    (0xA759, 'V'),
+    (0xA75A, 'M', 'ꝛ'),
+    (0xA75B, 'V'),
+    (0xA75C, 'M', 'ꝝ'),
+    (0xA75D, 'V'),
+    (0xA75E, 'M', 'ꝟ'),
+    (0xA75F, 'V'),
+    (0xA760, 'M', 'ꝡ'),
+    (0xA761, 'V'),
+    (0xA762, 'M', 'ꝣ'),
+    (0xA763, 'V'),
+    (0xA764, 'M', 'ꝥ'),
+    (0xA765, 'V'),
+    (0xA766, 'M', 'ꝧ'),
+    (0xA767, 'V'),
+    (0xA768, 'M', 'ꝩ'),
+    (0xA769, 'V'),
+    (0xA76A, 'M', 'ꝫ'),
+    (0xA76B, 'V'),
+    (0xA76C, 'M', 'ꝭ'),
+    (0xA76D, 'V'),
+    (0xA76E, 'M', 'ꝯ'),
+    (0xA76F, 'V'),
+    (0xA770, 'M', 'ꝯ'),
+    (0xA771, 'V'),
+    (0xA779, 'M', 'ꝺ'),
+    (0xA77A, 'V'),
+    (0xA77B, 'M', 'ꝼ'),
+    ]
+
+def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xA77C, 'V'),
+    (0xA77D, 'M', 'ᵹ'),
+    (0xA77E, 'M', 'ꝿ'),
+    (0xA77F, 'V'),
+    (0xA780, 'M', 'ꞁ'),
+    (0xA781, 'V'),
+    (0xA782, 'M', 'ꞃ'),
+    (0xA783, 'V'),
+    (0xA784, 'M', 'ꞅ'),
+    (0xA785, 'V'),
+    (0xA786, 'M', 'ꞇ'),
+    (0xA787, 'V'),
+    (0xA78B, 'M', 'ꞌ'),
+    (0xA78C, 'V'),
+    (0xA78D, 'M', 'ɥ'),
+    (0xA78E, 'V'),
+    (0xA790, 'M', 'ꞑ'),
+    (0xA791, 'V'),
+    (0xA792, 'M', 'ꞓ'),
+    (0xA793, 'V'),
+    (0xA796, 'M', 'ꞗ'),
+    (0xA797, 'V'),
+    (0xA798, 'M', 'ꞙ'),
+    (0xA799, 'V'),
+    (0xA79A, 'M', 'ꞛ'),
+    (0xA79B, 'V'),
+    (0xA79C, 'M', 'ꞝ'),
+    (0xA79D, 'V'),
+    (0xA79E, 'M', 'ꞟ'),
+    (0xA79F, 'V'),
+    (0xA7A0, 'M', 'ꞡ'),
+    (0xA7A1, 'V'),
+    (0xA7A2, 'M', 'ꞣ'),
+    (0xA7A3, 'V'),
+    (0xA7A4, 'M', 'ꞥ'),
+    (0xA7A5, 'V'),
+    (0xA7A6, 'M', 'ꞧ'),
+    (0xA7A7, 'V'),
+    (0xA7A8, 'M', 'ꞩ'),
+    (0xA7A9, 'V'),
+    (0xA7AA, 'M', 'ɦ'),
+    (0xA7AB, 'M', 'ɜ'),
+    (0xA7AC, 'M', 'ɡ'),
+    (0xA7AD, 'M', 'ɬ'),
+    (0xA7AE, 'M', 'ɪ'),
+    (0xA7AF, 'V'),
+    (0xA7B0, 'M', 'ʞ'),
+    (0xA7B1, 'M', 'ʇ'),
+    (0xA7B2, 'M', 'ʝ'),
+    (0xA7B3, 'M', 'ꭓ'),
+    (0xA7B4, 'M', 'ꞵ'),
+    (0xA7B5, 'V'),
+    (0xA7B6, 'M', 'ꞷ'),
+    (0xA7B7, 'V'),
+    (0xA7B8, 'M', 'ꞹ'),
+    (0xA7B9, 'V'),
+    (0xA7BA, 'M', 'ꞻ'),
+    (0xA7BB, 'V'),
+    (0xA7BC, 'M', 'ꞽ'),
+    (0xA7BD, 'V'),
+    (0xA7BE, 'M', 'ꞿ'),
+    (0xA7BF, 'V'),
+    (0xA7C0, 'M', 'ꟁ'),
+    (0xA7C1, 'V'),
+    (0xA7C2, 'M', 'ꟃ'),
+    (0xA7C3, 'V'),
+    (0xA7C4, 'M', 'ꞔ'),
+    (0xA7C5, 'M', 'ʂ'),
+    (0xA7C6, 'M', 'ᶎ'),
+    (0xA7C7, 'M', 'ꟈ'),
+    (0xA7C8, 'V'),
+    (0xA7C9, 'M', 'ꟊ'),
+    (0xA7CA, 'V'),
+    (0xA7CB, 'X'),
+    (0xA7D0, 'M', 'ꟑ'),
+    (0xA7D1, 'V'),
+    (0xA7D2, 'X'),
+    (0xA7D3, 'V'),
+    (0xA7D4, 'X'),
+    (0xA7D5, 'V'),
+    (0xA7D6, 'M', 'ꟗ'),
+    (0xA7D7, 'V'),
+    (0xA7D8, 'M', 'ꟙ'),
+    (0xA7D9, 'V'),
+    (0xA7DA, 'X'),
+    (0xA7F2, 'M', 'c'),
+    (0xA7F3, 'M', 'f'),
+    (0xA7F4, 'M', 'q'),
+    (0xA7F5, 'M', 'ꟶ'),
+    (0xA7F6, 'V'),
+    (0xA7F8, 'M', 'ħ'),
+    (0xA7F9, 'M', 'œ'),
+    (0xA7FA, 'V'),
+    (0xA82D, 'X'),
+    (0xA830, 'V'),
+    (0xA83A, 'X'),
+    (0xA840, 'V'),
+    (0xA878, 'X'),
+    (0xA880, 'V'),
+    (0xA8C6, 'X'),
+    ]
+
+def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xA8CE, 'V'),
+    (0xA8DA, 'X'),
+    (0xA8E0, 'V'),
+    (0xA954, 'X'),
+    (0xA95F, 'V'),
+    (0xA97D, 'X'),
+    (0xA980, 'V'),
+    (0xA9CE, 'X'),
+    (0xA9CF, 'V'),
+    (0xA9DA, 'X'),
+    (0xA9DE, 'V'),
+    (0xA9FF, 'X'),
+    (0xAA00, 'V'),
+    (0xAA37, 'X'),
+    (0xAA40, 'V'),
+    (0xAA4E, 'X'),
+    (0xAA50, 'V'),
+    (0xAA5A, 'X'),
+    (0xAA5C, 'V'),
+    (0xAAC3, 'X'),
+    (0xAADB, 'V'),
+    (0xAAF7, 'X'),
+    (0xAB01, 'V'),
+    (0xAB07, 'X'),
+    (0xAB09, 'V'),
+    (0xAB0F, 'X'),
+    (0xAB11, 'V'),
+    (0xAB17, 'X'),
+    (0xAB20, 'V'),
+    (0xAB27, 'X'),
+    (0xAB28, 'V'),
+    (0xAB2F, 'X'),
+    (0xAB30, 'V'),
+    (0xAB5C, 'M', 'ꜧ'),
+    (0xAB5D, 'M', 'ꬷ'),
+    (0xAB5E, 'M', 'ɫ'),
+    (0xAB5F, 'M', 'ꭒ'),
+    (0xAB60, 'V'),
+    (0xAB69, 'M', 'ʍ'),
+    (0xAB6A, 'V'),
+    (0xAB6C, 'X'),
+    (0xAB70, 'M', 'Ꭰ'),
+    (0xAB71, 'M', 'Ꭱ'),
+    (0xAB72, 'M', 'Ꭲ'),
+    (0xAB73, 'M', 'Ꭳ'),
+    (0xAB74, 'M', 'Ꭴ'),
+    (0xAB75, 'M', 'Ꭵ'),
+    (0xAB76, 'M', 'Ꭶ'),
+    (0xAB77, 'M', 'Ꭷ'),
+    (0xAB78, 'M', 'Ꭸ'),
+    (0xAB79, 'M', 'Ꭹ'),
+    (0xAB7A, 'M', 'Ꭺ'),
+    (0xAB7B, 'M', 'Ꭻ'),
+    (0xAB7C, 'M', 'Ꭼ'),
+    (0xAB7D, 'M', 'Ꭽ'),
+    (0xAB7E, 'M', 'Ꭾ'),
+    (0xAB7F, 'M', 'Ꭿ'),
+    (0xAB80, 'M', 'Ꮀ'),
+    (0xAB81, 'M', 'Ꮁ'),
+    (0xAB82, 'M', 'Ꮂ'),
+    (0xAB83, 'M', 'Ꮃ'),
+    (0xAB84, 'M', 'Ꮄ'),
+    (0xAB85, 'M', 'Ꮅ'),
+    (0xAB86, 'M', 'Ꮆ'),
+    (0xAB87, 'M', 'Ꮇ'),
+    (0xAB88, 'M', 'Ꮈ'),
+    (0xAB89, 'M', 'Ꮉ'),
+    (0xAB8A, 'M', 'Ꮊ'),
+    (0xAB8B, 'M', 'Ꮋ'),
+    (0xAB8C, 'M', 'Ꮌ'),
+    (0xAB8D, 'M', 'Ꮍ'),
+    (0xAB8E, 'M', 'Ꮎ'),
+    (0xAB8F, 'M', 'Ꮏ'),
+    (0xAB90, 'M', 'Ꮐ'),
+    (0xAB91, 'M', 'Ꮑ'),
+    (0xAB92, 'M', 'Ꮒ'),
+    (0xAB93, 'M', 'Ꮓ'),
+    (0xAB94, 'M', 'Ꮔ'),
+    (0xAB95, 'M', 'Ꮕ'),
+    (0xAB96, 'M', 'Ꮖ'),
+    (0xAB97, 'M', 'Ꮗ'),
+    (0xAB98, 'M', 'Ꮘ'),
+    (0xAB99, 'M', 'Ꮙ'),
+    (0xAB9A, 'M', 'Ꮚ'),
+    (0xAB9B, 'M', 'Ꮛ'),
+    (0xAB9C, 'M', 'Ꮜ'),
+    (0xAB9D, 'M', 'Ꮝ'),
+    (0xAB9E, 'M', 'Ꮞ'),
+    (0xAB9F, 'M', 'Ꮟ'),
+    (0xABA0, 'M', 'Ꮠ'),
+    (0xABA1, 'M', 'Ꮡ'),
+    (0xABA2, 'M', 'Ꮢ'),
+    (0xABA3, 'M', 'Ꮣ'),
+    (0xABA4, 'M', 'Ꮤ'),
+    (0xABA5, 'M', 'Ꮥ'),
+    (0xABA6, 'M', 'Ꮦ'),
+    (0xABA7, 'M', 'Ꮧ'),
+    (0xABA8, 'M', 'Ꮨ'),
+    (0xABA9, 'M', 'Ꮩ'),
+    (0xABAA, 'M', 'Ꮪ'),
+    ]
+
+def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xABAB, 'M', 'Ꮫ'),
+    (0xABAC, 'M', 'Ꮬ'),
+    (0xABAD, 'M', 'Ꮭ'),
+    (0xABAE, 'M', 'Ꮮ'),
+    (0xABAF, 'M', 'Ꮯ'),
+    (0xABB0, 'M', 'Ꮰ'),
+    (0xABB1, 'M', 'Ꮱ'),
+    (0xABB2, 'M', 'Ꮲ'),
+    (0xABB3, 'M', 'Ꮳ'),
+    (0xABB4, 'M', 'Ꮴ'),
+    (0xABB5, 'M', 'Ꮵ'),
+    (0xABB6, 'M', 'Ꮶ'),
+    (0xABB7, 'M', 'Ꮷ'),
+    (0xABB8, 'M', 'Ꮸ'),
+    (0xABB9, 'M', 'Ꮹ'),
+    (0xABBA, 'M', 'Ꮺ'),
+    (0xABBB, 'M', 'Ꮻ'),
+    (0xABBC, 'M', 'Ꮼ'),
+    (0xABBD, 'M', 'Ꮽ'),
+    (0xABBE, 'M', 'Ꮾ'),
+    (0xABBF, 'M', 'Ꮿ'),
+    (0xABC0, 'V'),
+    (0xABEE, 'X'),
+    (0xABF0, 'V'),
+    (0xABFA, 'X'),
+    (0xAC00, 'V'),
+    (0xD7A4, 'X'),
+    (0xD7B0, 'V'),
+    (0xD7C7, 'X'),
+    (0xD7CB, 'V'),
+    (0xD7FC, 'X'),
+    (0xF900, 'M', '豈'),
+    (0xF901, 'M', '更'),
+    (0xF902, 'M', '車'),
+    (0xF903, 'M', '賈'),
+    (0xF904, 'M', '滑'),
+    (0xF905, 'M', '串'),
+    (0xF906, 'M', '句'),
+    (0xF907, 'M', '龜'),
+    (0xF909, 'M', '契'),
+    (0xF90A, 'M', '金'),
+    (0xF90B, 'M', '喇'),
+    (0xF90C, 'M', '奈'),
+    (0xF90D, 'M', '懶'),
+    (0xF90E, 'M', '癩'),
+    (0xF90F, 'M', '羅'),
+    (0xF910, 'M', '蘿'),
+    (0xF911, 'M', '螺'),
+    (0xF912, 'M', '裸'),
+    (0xF913, 'M', '邏'),
+    (0xF914, 'M', '樂'),
+    (0xF915, 'M', '洛'),
+    (0xF916, 'M', '烙'),
+    (0xF917, 'M', '珞'),
+    (0xF918, 'M', '落'),
+    (0xF919, 'M', '酪'),
+    (0xF91A, 'M', '駱'),
+    (0xF91B, 'M', '亂'),
+    (0xF91C, 'M', '卵'),
+    (0xF91D, 'M', '欄'),
+    (0xF91E, 'M', '爛'),
+    (0xF91F, 'M', '蘭'),
+    (0xF920, 'M', '鸞'),
+    (0xF921, 'M', '嵐'),
+    (0xF922, 'M', '濫'),
+    (0xF923, 'M', '藍'),
+    (0xF924, 'M', '襤'),
+    (0xF925, 'M', '拉'),
+    (0xF926, 'M', '臘'),
+    (0xF927, 'M', '蠟'),
+    (0xF928, 'M', '廊'),
+    (0xF929, 'M', '朗'),
+    (0xF92A, 'M', '浪'),
+    (0xF92B, 'M', '狼'),
+    (0xF92C, 'M', '郎'),
+    (0xF92D, 'M', '來'),
+    (0xF92E, 'M', '冷'),
+    (0xF92F, 'M', '勞'),
+    (0xF930, 'M', '擄'),
+    (0xF931, 'M', '櫓'),
+    (0xF932, 'M', '爐'),
+    (0xF933, 'M', '盧'),
+    (0xF934, 'M', '老'),
+    (0xF935, 'M', '蘆'),
+    (0xF936, 'M', '虜'),
+    (0xF937, 'M', '路'),
+    (0xF938, 'M', '露'),
+    (0xF939, 'M', '魯'),
+    (0xF93A, 'M', '鷺'),
+    (0xF93B, 'M', '碌'),
+    (0xF93C, 'M', '祿'),
+    (0xF93D, 'M', '綠'),
+    (0xF93E, 'M', '菉'),
+    (0xF93F, 'M', '錄'),
+    (0xF940, 'M', '鹿'),
+    (0xF941, 'M', '論'),
+    (0xF942, 'M', '壟'),
+    (0xF943, 'M', '弄'),
+    (0xF944, 'M', '籠'),
+    (0xF945, 'M', '聾'),
+    ]
+
+def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xF946, 'M', '牢'),
+    (0xF947, 'M', '磊'),
+    (0xF948, 'M', '賂'),
+    (0xF949, 'M', '雷'),
+    (0xF94A, 'M', '壘'),
+    (0xF94B, 'M', '屢'),
+    (0xF94C, 'M', '樓'),
+    (0xF94D, 'M', '淚'),
+    (0xF94E, 'M', '漏'),
+    (0xF94F, 'M', '累'),
+    (0xF950, 'M', '縷'),
+    (0xF951, 'M', '陋'),
+    (0xF952, 'M', '勒'),
+    (0xF953, 'M', '肋'),
+    (0xF954, 'M', '凜'),
+    (0xF955, 'M', '凌'),
+    (0xF956, 'M', '稜'),
+    (0xF957, 'M', '綾'),
+    (0xF958, 'M', '菱'),
+    (0xF959, 'M', '陵'),
+    (0xF95A, 'M', '讀'),
+    (0xF95B, 'M', '拏'),
+    (0xF95C, 'M', '樂'),
+    (0xF95D, 'M', '諾'),
+    (0xF95E, 'M', '丹'),
+    (0xF95F, 'M', '寧'),
+    (0xF960, 'M', '怒'),
+    (0xF961, 'M', '率'),
+    (0xF962, 'M', '異'),
+    (0xF963, 'M', '北'),
+    (0xF964, 'M', '磻'),
+    (0xF965, 'M', '便'),
+    (0xF966, 'M', '復'),
+    (0xF967, 'M', '不'),
+    (0xF968, 'M', '泌'),
+    (0xF969, 'M', '數'),
+    (0xF96A, 'M', '索'),
+    (0xF96B, 'M', '參'),
+    (0xF96C, 'M', '塞'),
+    (0xF96D, 'M', '省'),
+    (0xF96E, 'M', '葉'),
+    (0xF96F, 'M', '說'),
+    (0xF970, 'M', '殺'),
+    (0xF971, 'M', '辰'),
+    (0xF972, 'M', '沈'),
+    (0xF973, 'M', '拾'),
+    (0xF974, 'M', '若'),
+    (0xF975, 'M', '掠'),
+    (0xF976, 'M', '略'),
+    (0xF977, 'M', '亮'),
+    (0xF978, 'M', '兩'),
+    (0xF979, 'M', '凉'),
+    (0xF97A, 'M', '梁'),
+    (0xF97B, 'M', '糧'),
+    (0xF97C, 'M', '良'),
+    (0xF97D, 'M', '諒'),
+    (0xF97E, 'M', '量'),
+    (0xF97F, 'M', '勵'),
+    (0xF980, 'M', '呂'),
+    (0xF981, 'M', '女'),
+    (0xF982, 'M', '廬'),
+    (0xF983, 'M', '旅'),
+    (0xF984, 'M', '濾'),
+    (0xF985, 'M', '礪'),
+    (0xF986, 'M', '閭'),
+    (0xF987, 'M', '驪'),
+    (0xF988, 'M', '麗'),
+    (0xF989, 'M', '黎'),
+    (0xF98A, 'M', '力'),
+    (0xF98B, 'M', '曆'),
+    (0xF98C, 'M', '歷'),
+    (0xF98D, 'M', '轢'),
+    (0xF98E, 'M', '年'),
+    (0xF98F, 'M', '憐'),
+    (0xF990, 'M', '戀'),
+    (0xF991, 'M', '撚'),
+    (0xF992, 'M', '漣'),
+    (0xF993, 'M', '煉'),
+    (0xF994, 'M', '璉'),
+    (0xF995, 'M', '秊'),
+    (0xF996, 'M', '練'),
+    (0xF997, 'M', '聯'),
+    (0xF998, 'M', '輦'),
+    (0xF999, 'M', '蓮'),
+    (0xF99A, 'M', '連'),
+    (0xF99B, 'M', '鍊'),
+    (0xF99C, 'M', '列'),
+    (0xF99D, 'M', '劣'),
+    (0xF99E, 'M', '咽'),
+    (0xF99F, 'M', '烈'),
+    (0xF9A0, 'M', '裂'),
+    (0xF9A1, 'M', '說'),
+    (0xF9A2, 'M', '廉'),
+    (0xF9A3, 'M', '念'),
+    (0xF9A4, 'M', '捻'),
+    (0xF9A5, 'M', '殮'),
+    (0xF9A6, 'M', '簾'),
+    (0xF9A7, 'M', '獵'),
+    (0xF9A8, 'M', '令'),
+    (0xF9A9, 'M', '囹'),
+    ]
+
+def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xF9AA, 'M', '寧'),
+    (0xF9AB, 'M', '嶺'),
+    (0xF9AC, 'M', '怜'),
+    (0xF9AD, 'M', '玲'),
+    (0xF9AE, 'M', '瑩'),
+    (0xF9AF, 'M', '羚'),
+    (0xF9B0, 'M', '聆'),
+    (0xF9B1, 'M', '鈴'),
+    (0xF9B2, 'M', '零'),
+    (0xF9B3, 'M', '靈'),
+    (0xF9B4, 'M', '領'),
+    (0xF9B5, 'M', '例'),
+    (0xF9B6, 'M', '禮'),
+    (0xF9B7, 'M', '醴'),
+    (0xF9B8, 'M', '隸'),
+    (0xF9B9, 'M', '惡'),
+    (0xF9BA, 'M', '了'),
+    (0xF9BB, 'M', '僚'),
+    (0xF9BC, 'M', '寮'),
+    (0xF9BD, 'M', '尿'),
+    (0xF9BE, 'M', '料'),
+    (0xF9BF, 'M', '樂'),
+    (0xF9C0, 'M', '燎'),
+    (0xF9C1, 'M', '療'),
+    (0xF9C2, 'M', '蓼'),
+    (0xF9C3, 'M', '遼'),
+    (0xF9C4, 'M', '龍'),
+    (0xF9C5, 'M', '暈'),
+    (0xF9C6, 'M', '阮'),
+    (0xF9C7, 'M', '劉'),
+    (0xF9C8, 'M', '杻'),
+    (0xF9C9, 'M', '柳'),
+    (0xF9CA, 'M', '流'),
+    (0xF9CB, 'M', '溜'),
+    (0xF9CC, 'M', '琉'),
+    (0xF9CD, 'M', '留'),
+    (0xF9CE, 'M', '硫'),
+    (0xF9CF, 'M', '紐'),
+    (0xF9D0, 'M', '類'),
+    (0xF9D1, 'M', '六'),
+    (0xF9D2, 'M', '戮'),
+    (0xF9D3, 'M', '陸'),
+    (0xF9D4, 'M', '倫'),
+    (0xF9D5, 'M', '崙'),
+    (0xF9D6, 'M', '淪'),
+    (0xF9D7, 'M', '輪'),
+    (0xF9D8, 'M', '律'),
+    (0xF9D9, 'M', '慄'),
+    (0xF9DA, 'M', '栗'),
+    (0xF9DB, 'M', '率'),
+    (0xF9DC, 'M', '隆'),
+    (0xF9DD, 'M', '利'),
+    (0xF9DE, 'M', '吏'),
+    (0xF9DF, 'M', '履'),
+    (0xF9E0, 'M', '易'),
+    (0xF9E1, 'M', '李'),
+    (0xF9E2, 'M', '梨'),
+    (0xF9E3, 'M', '泥'),
+    (0xF9E4, 'M', '理'),
+    (0xF9E5, 'M', '痢'),
+    (0xF9E6, 'M', '罹'),
+    (0xF9E7, 'M', '裏'),
+    (0xF9E8, 'M', '裡'),
+    (0xF9E9, 'M', '里'),
+    (0xF9EA, 'M', '離'),
+    (0xF9EB, 'M', '匿'),
+    (0xF9EC, 'M', '溺'),
+    (0xF9ED, 'M', '吝'),
+    (0xF9EE, 'M', '燐'),
+    (0xF9EF, 'M', '璘'),
+    (0xF9F0, 'M', '藺'),
+    (0xF9F1, 'M', '隣'),
+    (0xF9F2, 'M', '鱗'),
+    (0xF9F3, 'M', '麟'),
+    (0xF9F4, 'M', '林'),
+    (0xF9F5, 'M', '淋'),
+    (0xF9F6, 'M', '臨'),
+    (0xF9F7, 'M', '立'),
+    (0xF9F8, 'M', '笠'),
+    (0xF9F9, 'M', '粒'),
+    (0xF9FA, 'M', '狀'),
+    (0xF9FB, 'M', '炙'),
+    (0xF9FC, 'M', '識'),
+    (0xF9FD, 'M', '什'),
+    (0xF9FE, 'M', '茶'),
+    (0xF9FF, 'M', '刺'),
+    (0xFA00, 'M', '切'),
+    (0xFA01, 'M', '度'),
+    (0xFA02, 'M', '拓'),
+    (0xFA03, 'M', '糖'),
+    (0xFA04, 'M', '宅'),
+    (0xFA05, 'M', '洞'),
+    (0xFA06, 'M', '暴'),
+    (0xFA07, 'M', '輻'),
+    (0xFA08, 'M', '行'),
+    (0xFA09, 'M', '降'),
+    (0xFA0A, 'M', '見'),
+    (0xFA0B, 'M', '廓'),
+    (0xFA0C, 'M', '兀'),
+    (0xFA0D, 'M', '嗀'),
+    ]
+
+def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xFA0E, 'V'),
+    (0xFA10, 'M', '塚'),
+    (0xFA11, 'V'),
+    (0xFA12, 'M', '晴'),
+    (0xFA13, 'V'),
+    (0xFA15, 'M', '凞'),
+    (0xFA16, 'M', '猪'),
+    (0xFA17, 'M', '益'),
+    (0xFA18, 'M', '礼'),
+    (0xFA19, 'M', '神'),
+    (0xFA1A, 'M', '祥'),
+    (0xFA1B, 'M', '福'),
+    (0xFA1C, 'M', '靖'),
+    (0xFA1D, 'M', '精'),
+    (0xFA1E, 'M', '羽'),
+    (0xFA1F, 'V'),
+    (0xFA20, 'M', '蘒'),
+    (0xFA21, 'V'),
+    (0xFA22, 'M', '諸'),
+    (0xFA23, 'V'),
+    (0xFA25, 'M', '逸'),
+    (0xFA26, 'M', '都'),
+    (0xFA27, 'V'),
+    (0xFA2A, 'M', '飯'),
+    (0xFA2B, 'M', '飼'),
+    (0xFA2C, 'M', '館'),
+    (0xFA2D, 'M', '鶴'),
+    (0xFA2E, 'M', '郞'),
+    (0xFA2F, 'M', '隷'),
+    (0xFA30, 'M', '侮'),
+    (0xFA31, 'M', '僧'),
+    (0xFA32, 'M', '免'),
+    (0xFA33, 'M', '勉'),
+    (0xFA34, 'M', '勤'),
+    (0xFA35, 'M', '卑'),
+    (0xFA36, 'M', '喝'),
+    (0xFA37, 'M', '嘆'),
+    (0xFA38, 'M', '器'),
+    (0xFA39, 'M', '塀'),
+    (0xFA3A, 'M', '墨'),
+    (0xFA3B, 'M', '層'),
+    (0xFA3C, 'M', '屮'),
+    (0xFA3D, 'M', '悔'),
+    (0xFA3E, 'M', '慨'),
+    (0xFA3F, 'M', '憎'),
+    (0xFA40, 'M', '懲'),
+    (0xFA41, 'M', '敏'),
+    (0xFA42, 'M', '既'),
+    (0xFA43, 'M', '暑'),
+    (0xFA44, 'M', '梅'),
+    (0xFA45, 'M', '海'),
+    (0xFA46, 'M', '渚'),
+    (0xFA47, 'M', '漢'),
+    (0xFA48, 'M', '煮'),
+    (0xFA49, 'M', '爫'),
+    (0xFA4A, 'M', '琢'),
+    (0xFA4B, 'M', '碑'),
+    (0xFA4C, 'M', '社'),
+    (0xFA4D, 'M', '祉'),
+    (0xFA4E, 'M', '祈'),
+    (0xFA4F, 'M', '祐'),
+    (0xFA50, 'M', '祖'),
+    (0xFA51, 'M', '祝'),
+    (0xFA52, 'M', '禍'),
+    (0xFA53, 'M', '禎'),
+    (0xFA54, 'M', '穀'),
+    (0xFA55, 'M', '突'),
+    (0xFA56, 'M', '節'),
+    (0xFA57, 'M', '練'),
+    (0xFA58, 'M', '縉'),
+    (0xFA59, 'M', '繁'),
+    (0xFA5A, 'M', '署'),
+    (0xFA5B, 'M', '者'),
+    (0xFA5C, 'M', '臭'),
+    (0xFA5D, 'M', '艹'),
+    (0xFA5F, 'M', '著'),
+    (0xFA60, 'M', '褐'),
+    (0xFA61, 'M', '視'),
+    (0xFA62, 'M', '謁'),
+    (0xFA63, 'M', '謹'),
+    (0xFA64, 'M', '賓'),
+    (0xFA65, 'M', '贈'),
+    (0xFA66, 'M', '辶'),
+    (0xFA67, 'M', '逸'),
+    (0xFA68, 'M', '難'),
+    (0xFA69, 'M', '響'),
+    (0xFA6A, 'M', '頻'),
+    (0xFA6B, 'M', '恵'),
+    (0xFA6C, 'M', '𤋮'),
+    (0xFA6D, 'M', '舘'),
+    (0xFA6E, 'X'),
+    (0xFA70, 'M', '並'),
+    (0xFA71, 'M', '况'),
+    (0xFA72, 'M', '全'),
+    (0xFA73, 'M', '侀'),
+    (0xFA74, 'M', '充'),
+    (0xFA75, 'M', '冀'),
+    (0xFA76, 'M', '勇'),
+    (0xFA77, 'M', '勺'),
+    (0xFA78, 'M', '喝'),
+    ]
+
+def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xFA79, 'M', '啕'),
+    (0xFA7A, 'M', '喙'),
+    (0xFA7B, 'M', '嗢'),
+    (0xFA7C, 'M', '塚'),
+    (0xFA7D, 'M', '墳'),
+    (0xFA7E, 'M', '奄'),
+    (0xFA7F, 'M', '奔'),
+    (0xFA80, 'M', '婢'),
+    (0xFA81, 'M', '嬨'),
+    (0xFA82, 'M', '廒'),
+    (0xFA83, 'M', '廙'),
+    (0xFA84, 'M', '彩'),
+    (0xFA85, 'M', '徭'),
+    (0xFA86, 'M', '惘'),
+    (0xFA87, 'M', '慎'),
+    (0xFA88, 'M', '愈'),
+    (0xFA89, 'M', '憎'),
+    (0xFA8A, 'M', '慠'),
+    (0xFA8B, 'M', '懲'),
+    (0xFA8C, 'M', '戴'),
+    (0xFA8D, 'M', '揄'),
+    (0xFA8E, 'M', '搜'),
+    (0xFA8F, 'M', '摒'),
+    (0xFA90, 'M', '敖'),
+    (0xFA91, 'M', '晴'),
+    (0xFA92, 'M', '朗'),
+    (0xFA93, 'M', '望'),
+    (0xFA94, 'M', '杖'),
+    (0xFA95, 'M', '歹'),
+    (0xFA96, 'M', '殺'),
+    (0xFA97, 'M', '流'),
+    (0xFA98, 'M', '滛'),
+    (0xFA99, 'M', '滋'),
+    (0xFA9A, 'M', '漢'),
+    (0xFA9B, 'M', '瀞'),
+    (0xFA9C, 'M', '煮'),
+    (0xFA9D, 'M', '瞧'),
+    (0xFA9E, 'M', '爵'),
+    (0xFA9F, 'M', '犯'),
+    (0xFAA0, 'M', '猪'),
+    (0xFAA1, 'M', '瑱'),
+    (0xFAA2, 'M', '甆'),
+    (0xFAA3, 'M', '画'),
+    (0xFAA4, 'M', '瘝'),
+    (0xFAA5, 'M', '瘟'),
+    (0xFAA6, 'M', '益'),
+    (0xFAA7, 'M', '盛'),
+    (0xFAA8, 'M', '直'),
+    (0xFAA9, 'M', '睊'),
+    (0xFAAA, 'M', '着'),
+    (0xFAAB, 'M', '磌'),
+    (0xFAAC, 'M', '窱'),
+    (0xFAAD, 'M', '節'),
+    (0xFAAE, 'M', '类'),
+    (0xFAAF, 'M', '絛'),
+    (0xFAB0, 'M', '練'),
+    (0xFAB1, 'M', '缾'),
+    (0xFAB2, 'M', '者'),
+    (0xFAB3, 'M', '荒'),
+    (0xFAB4, 'M', '華'),
+    (0xFAB5, 'M', '蝹'),
+    (0xFAB6, 'M', '襁'),
+    (0xFAB7, 'M', '覆'),
+    (0xFAB8, 'M', '視'),
+    (0xFAB9, 'M', '調'),
+    (0xFABA, 'M', '諸'),
+    (0xFABB, 'M', '請'),
+    (0xFABC, 'M', '謁'),
+    (0xFABD, 'M', '諾'),
+    (0xFABE, 'M', '諭'),
+    (0xFABF, 'M', '謹'),
+    (0xFAC0, 'M', '變'),
+    (0xFAC1, 'M', '贈'),
+    (0xFAC2, 'M', '輸'),
+    (0xFAC3, 'M', '遲'),
+    (0xFAC4, 'M', '醙'),
+    (0xFAC5, 'M', '鉶'),
+    (0xFAC6, 'M', '陼'),
+    (0xFAC7, 'M', '難'),
+    (0xFAC8, 'M', '靖'),
+    (0xFAC9, 'M', '韛'),
+    (0xFACA, 'M', '響'),
+    (0xFACB, 'M', '頋'),
+    (0xFACC, 'M', '頻'),
+    (0xFACD, 'M', '鬒'),
+    (0xFACE, 'M', '龜'),
+    (0xFACF, 'M', '𢡊'),
+    (0xFAD0, 'M', '𢡄'),
+    (0xFAD1, 'M', '𣏕'),
+    (0xFAD2, 'M', '㮝'),
+    (0xFAD3, 'M', '䀘'),
+    (0xFAD4, 'M', '䀹'),
+    (0xFAD5, 'M', '𥉉'),
+    (0xFAD6, 'M', '𥳐'),
+    (0xFAD7, 'M', '𧻓'),
+    (0xFAD8, 'M', '齃'),
+    (0xFAD9, 'M', '龎'),
+    (0xFADA, 'X'),
+    (0xFB00, 'M', 'ff'),
+    (0xFB01, 'M', 'fi'),
+    ]
+
+def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xFB02, 'M', 'fl'),
+    (0xFB03, 'M', 'ffi'),
+    (0xFB04, 'M', 'ffl'),
+    (0xFB05, 'M', 'st'),
+    (0xFB07, 'X'),
+    (0xFB13, 'M', 'մն'),
+    (0xFB14, 'M', 'մե'),
+    (0xFB15, 'M', 'մի'),
+    (0xFB16, 'M', 'վն'),
+    (0xFB17, 'M', 'մխ'),
+    (0xFB18, 'X'),
+    (0xFB1D, 'M', 'יִ'),
+    (0xFB1E, 'V'),
+    (0xFB1F, 'M', 'ײַ'),
+    (0xFB20, 'M', 'ע'),
+    (0xFB21, 'M', 'א'),
+    (0xFB22, 'M', 'ד'),
+    (0xFB23, 'M', 'ה'),
+    (0xFB24, 'M', 'כ'),
+    (0xFB25, 'M', 'ל'),
+    (0xFB26, 'M', 'ם'),
+    (0xFB27, 'M', 'ר'),
+    (0xFB28, 'M', 'ת'),
+    (0xFB29, '3', '+'),
+    (0xFB2A, 'M', 'שׁ'),
+    (0xFB2B, 'M', 'שׂ'),
+    (0xFB2C, 'M', 'שּׁ'),
+    (0xFB2D, 'M', 'שּׂ'),
+    (0xFB2E, 'M', 'אַ'),
+    (0xFB2F, 'M', 'אָ'),
+    (0xFB30, 'M', 'אּ'),
+    (0xFB31, 'M', 'בּ'),
+    (0xFB32, 'M', 'גּ'),
+    (0xFB33, 'M', 'דּ'),
+    (0xFB34, 'M', 'הּ'),
+    (0xFB35, 'M', 'וּ'),
+    (0xFB36, 'M', 'זּ'),
+    (0xFB37, 'X'),
+    (0xFB38, 'M', 'טּ'),
+    (0xFB39, 'M', 'יּ'),
+    (0xFB3A, 'M', 'ךּ'),
+    (0xFB3B, 'M', 'כּ'),
+    (0xFB3C, 'M', 'לּ'),
+    (0xFB3D, 'X'),
+    (0xFB3E, 'M', 'מּ'),
+    (0xFB3F, 'X'),
+    (0xFB40, 'M', 'נּ'),
+    (0xFB41, 'M', 'סּ'),
+    (0xFB42, 'X'),
+    (0xFB43, 'M', 'ףּ'),
+    (0xFB44, 'M', 'פּ'),
+    (0xFB45, 'X'),
+    (0xFB46, 'M', 'צּ'),
+    (0xFB47, 'M', 'קּ'),
+    (0xFB48, 'M', 'רּ'),
+    (0xFB49, 'M', 'שּ'),
+    (0xFB4A, 'M', 'תּ'),
+    (0xFB4B, 'M', 'וֹ'),
+    (0xFB4C, 'M', 'בֿ'),
+    (0xFB4D, 'M', 'כֿ'),
+    (0xFB4E, 'M', 'פֿ'),
+    (0xFB4F, 'M', 'אל'),
+    (0xFB50, 'M', 'ٱ'),
+    (0xFB52, 'M', 'ٻ'),
+    (0xFB56, 'M', 'پ'),
+    (0xFB5A, 'M', 'ڀ'),
+    (0xFB5E, 'M', 'ٺ'),
+    (0xFB62, 'M', 'ٿ'),
+    (0xFB66, 'M', 'ٹ'),
+    (0xFB6A, 'M', 'ڤ'),
+    (0xFB6E, 'M', 'ڦ'),
+    (0xFB72, 'M', 'ڄ'),
+    (0xFB76, 'M', 'ڃ'),
+    (0xFB7A, 'M', 'چ'),
+    (0xFB7E, 'M', 'ڇ'),
+    (0xFB82, 'M', 'ڍ'),
+    (0xFB84, 'M', 'ڌ'),
+    (0xFB86, 'M', 'ڎ'),
+    (0xFB88, 'M', 'ڈ'),
+    (0xFB8A, 'M', 'ژ'),
+    (0xFB8C, 'M', 'ڑ'),
+    (0xFB8E, 'M', 'ک'),
+    (0xFB92, 'M', 'گ'),
+    (0xFB96, 'M', 'ڳ'),
+    (0xFB9A, 'M', 'ڱ'),
+    (0xFB9E, 'M', 'ں'),
+    (0xFBA0, 'M', 'ڻ'),
+    (0xFBA4, 'M', 'ۀ'),
+    (0xFBA6, 'M', 'ہ'),
+    (0xFBAA, 'M', 'ھ'),
+    (0xFBAE, 'M', 'ے'),
+    (0xFBB0, 'M', 'ۓ'),
+    (0xFBB2, 'V'),
+    (0xFBC3, 'X'),
+    (0xFBD3, 'M', 'ڭ'),
+    (0xFBD7, 'M', 'ۇ'),
+    (0xFBD9, 'M', 'ۆ'),
+    (0xFBDB, 'M', 'ۈ'),
+    (0xFBDD, 'M', 'ۇٴ'),
+    (0xFBDE, 'M', 'ۋ'),
+    ]
+
+def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xFBE0, 'M', 'ۅ'),
+    (0xFBE2, 'M', 'ۉ'),
+    (0xFBE4, 'M', 'ې'),
+    (0xFBE8, 'M', 'ى'),
+    (0xFBEA, 'M', 'ئا'),
+    (0xFBEC, 'M', 'ئە'),
+    (0xFBEE, 'M', 'ئو'),
+    (0xFBF0, 'M', 'ئۇ'),
+    (0xFBF2, 'M', 'ئۆ'),
+    (0xFBF4, 'M', 'ئۈ'),
+    (0xFBF6, 'M', 'ئې'),
+    (0xFBF9, 'M', 'ئى'),
+    (0xFBFC, 'M', 'ی'),
+    (0xFC00, 'M', 'ئج'),
+    (0xFC01, 'M', 'ئح'),
+    (0xFC02, 'M', 'ئم'),
+    (0xFC03, 'M', 'ئى'),
+    (0xFC04, 'M', 'ئي'),
+    (0xFC05, 'M', 'بج'),
+    (0xFC06, 'M', 'بح'),
+    (0xFC07, 'M', 'بخ'),
+    (0xFC08, 'M', 'بم'),
+    (0xFC09, 'M', 'بى'),
+    (0xFC0A, 'M', 'بي'),
+    (0xFC0B, 'M', 'تج'),
+    (0xFC0C, 'M', 'تح'),
+    (0xFC0D, 'M', 'تخ'),
+    (0xFC0E, 'M', 'تم'),
+    (0xFC0F, 'M', 'تى'),
+    (0xFC10, 'M', 'تي'),
+    (0xFC11, 'M', 'ثج'),
+    (0xFC12, 'M', 'ثم'),
+    (0xFC13, 'M', 'ثى'),
+    (0xFC14, 'M', 'ثي'),
+    (0xFC15, 'M', 'جح'),
+    (0xFC16, 'M', 'جم'),
+    (0xFC17, 'M', 'حج'),
+    (0xFC18, 'M', 'حم'),
+    (0xFC19, 'M', 'خج'),
+    (0xFC1A, 'M', 'خح'),
+    (0xFC1B, 'M', 'خم'),
+    (0xFC1C, 'M', 'سج'),
+    (0xFC1D, 'M', 'سح'),
+    (0xFC1E, 'M', 'سخ'),
+    (0xFC1F, 'M', 'سم'),
+    (0xFC20, 'M', 'صح'),
+    (0xFC21, 'M', 'صم'),
+    (0xFC22, 'M', 'ضج'),
+    (0xFC23, 'M', 'ضح'),
+    (0xFC24, 'M', 'ضخ'),
+    (0xFC25, 'M', 'ضم'),
+    (0xFC26, 'M', 'طح'),
+    (0xFC27, 'M', 'طم'),
+    (0xFC28, 'M', 'ظم'),
+    (0xFC29, 'M', 'عج'),
+    (0xFC2A, 'M', 'عم'),
+    (0xFC2B, 'M', 'غج'),
+    (0xFC2C, 'M', 'غم'),
+    (0xFC2D, 'M', 'فج'),
+    (0xFC2E, 'M', 'فح'),
+    (0xFC2F, 'M', 'فخ'),
+    (0xFC30, 'M', 'فم'),
+    (0xFC31, 'M', 'فى'),
+    (0xFC32, 'M', 'في'),
+    (0xFC33, 'M', 'قح'),
+    (0xFC34, 'M', 'قم'),
+    (0xFC35, 'M', 'قى'),
+    (0xFC36, 'M', 'قي'),
+    (0xFC37, 'M', 'كا'),
+    (0xFC38, 'M', 'كج'),
+    (0xFC39, 'M', 'كح'),
+    (0xFC3A, 'M', 'كخ'),
+    (0xFC3B, 'M', 'كل'),
+    (0xFC3C, 'M', 'كم'),
+    (0xFC3D, 'M', 'كى'),
+    (0xFC3E, 'M', 'كي'),
+    (0xFC3F, 'M', 'لج'),
+    (0xFC40, 'M', 'لح'),
+    (0xFC41, 'M', 'لخ'),
+    (0xFC42, 'M', 'لم'),
+    (0xFC43, 'M', 'لى'),
+    (0xFC44, 'M', 'لي'),
+    (0xFC45, 'M', 'مج'),
+    (0xFC46, 'M', 'مح'),
+    (0xFC47, 'M', 'مخ'),
+    (0xFC48, 'M', 'مم'),
+    (0xFC49, 'M', 'مى'),
+    (0xFC4A, 'M', 'مي'),
+    (0xFC4B, 'M', 'نج'),
+    (0xFC4C, 'M', 'نح'),
+    (0xFC4D, 'M', 'نخ'),
+    (0xFC4E, 'M', 'نم'),
+    (0xFC4F, 'M', 'نى'),
+    (0xFC50, 'M', 'ني'),
+    (0xFC51, 'M', 'هج'),
+    (0xFC52, 'M', 'هم'),
+    (0xFC53, 'M', 'هى'),
+    (0xFC54, 'M', 'هي'),
+    (0xFC55, 'M', 'يج'),
+    (0xFC56, 'M', 'يح'),
+    ]
+
+def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xFC57, 'M', 'يخ'),
+    (0xFC58, 'M', 'يم'),
+    (0xFC59, 'M', 'يى'),
+    (0xFC5A, 'M', 'يي'),
+    (0xFC5B, 'M', 'ذٰ'),
+    (0xFC5C, 'M', 'رٰ'),
+    (0xFC5D, 'M', 'ىٰ'),
+    (0xFC5E, '3', ' ٌّ'),
+    (0xFC5F, '3', ' ٍّ'),
+    (0xFC60, '3', ' َّ'),
+    (0xFC61, '3', ' ُّ'),
+    (0xFC62, '3', ' ِّ'),
+    (0xFC63, '3', ' ّٰ'),
+    (0xFC64, 'M', 'ئر'),
+    (0xFC65, 'M', 'ئز'),
+    (0xFC66, 'M', 'ئم'),
+    (0xFC67, 'M', 'ئن'),
+    (0xFC68, 'M', 'ئى'),
+    (0xFC69, 'M', 'ئي'),
+    (0xFC6A, 'M', 'بر'),
+    (0xFC6B, 'M', 'بز'),
+    (0xFC6C, 'M', 'بم'),
+    (0xFC6D, 'M', 'بن'),
+    (0xFC6E, 'M', 'بى'),
+    (0xFC6F, 'M', 'بي'),
+    (0xFC70, 'M', 'تر'),
+    (0xFC71, 'M', 'تز'),
+    (0xFC72, 'M', 'تم'),
+    (0xFC73, 'M', 'تن'),
+    (0xFC74, 'M', 'تى'),
+    (0xFC75, 'M', 'تي'),
+    (0xFC76, 'M', 'ثر'),
+    (0xFC77, 'M', 'ثز'),
+    (0xFC78, 'M', 'ثم'),
+    (0xFC79, 'M', 'ثن'),
+    (0xFC7A, 'M', 'ثى'),
+    (0xFC7B, 'M', 'ثي'),
+    (0xFC7C, 'M', 'فى'),
+    (0xFC7D, 'M', 'في'),
+    (0xFC7E, 'M', 'قى'),
+    (0xFC7F, 'M', 'قي'),
+    (0xFC80, 'M', 'كا'),
+    (0xFC81, 'M', 'كل'),
+    (0xFC82, 'M', 'كم'),
+    (0xFC83, 'M', 'كى'),
+    (0xFC84, 'M', 'كي'),
+    (0xFC85, 'M', 'لم'),
+    (0xFC86, 'M', 'لى'),
+    (0xFC87, 'M', 'لي'),
+    (0xFC88, 'M', 'ما'),
+    (0xFC89, 'M', 'مم'),
+    (0xFC8A, 'M', 'نر'),
+    (0xFC8B, 'M', 'نز'),
+    (0xFC8C, 'M', 'نم'),
+    (0xFC8D, 'M', 'نن'),
+    (0xFC8E, 'M', 'نى'),
+    (0xFC8F, 'M', 'ني'),
+    (0xFC90, 'M', 'ىٰ'),
+    (0xFC91, 'M', 'ير'),
+    (0xFC92, 'M', 'يز'),
+    (0xFC93, 'M', 'يم'),
+    (0xFC94, 'M', 'ين'),
+    (0xFC95, 'M', 'يى'),
+    (0xFC96, 'M', 'يي'),
+    (0xFC97, 'M', 'ئج'),
+    (0xFC98, 'M', 'ئح'),
+    (0xFC99, 'M', 'ئخ'),
+    (0xFC9A, 'M', 'ئم'),
+    (0xFC9B, 'M', 'ئه'),
+    (0xFC9C, 'M', 'بج'),
+    (0xFC9D, 'M', 'بح'),
+    (0xFC9E, 'M', 'بخ'),
+    (0xFC9F, 'M', 'بم'),
+    (0xFCA0, 'M', 'به'),
+    (0xFCA1, 'M', 'تج'),
+    (0xFCA2, 'M', 'تح'),
+    (0xFCA3, 'M', 'تخ'),
+    (0xFCA4, 'M', 'تم'),
+    (0xFCA5, 'M', 'ته'),
+    (0xFCA6, 'M', 'ثم'),
+    (0xFCA7, 'M', 'جح'),
+    (0xFCA8, 'M', 'جم'),
+    (0xFCA9, 'M', 'حج'),
+    (0xFCAA, 'M', 'حم'),
+    (0xFCAB, 'M', 'خج'),
+    (0xFCAC, 'M', 'خم'),
+    (0xFCAD, 'M', 'سج'),
+    (0xFCAE, 'M', 'سح'),
+    (0xFCAF, 'M', 'سخ'),
+    (0xFCB0, 'M', 'سم'),
+    (0xFCB1, 'M', 'صح'),
+    (0xFCB2, 'M', 'صخ'),
+    (0xFCB3, 'M', 'صم'),
+    (0xFCB4, 'M', 'ضج'),
+    (0xFCB5, 'M', 'ضح'),
+    (0xFCB6, 'M', 'ضخ'),
+    (0xFCB7, 'M', 'ضم'),
+    (0xFCB8, 'M', 'طح'),
+    (0xFCB9, 'M', 'ظم'),
+    (0xFCBA, 'M', 'عج'),
+    ]
+
+def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xFCBB, 'M', 'عم'),
+    (0xFCBC, 'M', 'غج'),
+    (0xFCBD, 'M', 'غم'),
+    (0xFCBE, 'M', 'فج'),
+    (0xFCBF, 'M', 'فح'),
+    (0xFCC0, 'M', 'فخ'),
+    (0xFCC1, 'M', 'فم'),
+    (0xFCC2, 'M', 'قح'),
+    (0xFCC3, 'M', 'قم'),
+    (0xFCC4, 'M', 'كج'),
+    (0xFCC5, 'M', 'كح'),
+    (0xFCC6, 'M', 'كخ'),
+    (0xFCC7, 'M', 'كل'),
+    (0xFCC8, 'M', 'كم'),
+    (0xFCC9, 'M', 'لج'),
+    (0xFCCA, 'M', 'لح'),
+    (0xFCCB, 'M', 'لخ'),
+    (0xFCCC, 'M', 'لم'),
+    (0xFCCD, 'M', 'له'),
+    (0xFCCE, 'M', 'مج'),
+    (0xFCCF, 'M', 'مح'),
+    (0xFCD0, 'M', 'مخ'),
+    (0xFCD1, 'M', 'مم'),
+    (0xFCD2, 'M', 'نج'),
+    (0xFCD3, 'M', 'نح'),
+    (0xFCD4, 'M', 'نخ'),
+    (0xFCD5, 'M', 'نم'),
+    (0xFCD6, 'M', 'نه'),
+    (0xFCD7, 'M', 'هج'),
+    (0xFCD8, 'M', 'هم'),
+    (0xFCD9, 'M', 'هٰ'),
+    (0xFCDA, 'M', 'يج'),
+    (0xFCDB, 'M', 'يح'),
+    (0xFCDC, 'M', 'يخ'),
+    (0xFCDD, 'M', 'يم'),
+    (0xFCDE, 'M', 'يه'),
+    (0xFCDF, 'M', 'ئم'),
+    (0xFCE0, 'M', 'ئه'),
+    (0xFCE1, 'M', 'بم'),
+    (0xFCE2, 'M', 'به'),
+    (0xFCE3, 'M', 'تم'),
+    (0xFCE4, 'M', 'ته'),
+    (0xFCE5, 'M', 'ثم'),
+    (0xFCE6, 'M', 'ثه'),
+    (0xFCE7, 'M', 'سم'),
+    (0xFCE8, 'M', 'سه'),
+    (0xFCE9, 'M', 'شم'),
+    (0xFCEA, 'M', 'شه'),
+    (0xFCEB, 'M', 'كل'),
+    (0xFCEC, 'M', 'كم'),
+    (0xFCED, 'M', 'لم'),
+    (0xFCEE, 'M', 'نم'),
+    (0xFCEF, 'M', 'نه'),
+    (0xFCF0, 'M', 'يم'),
+    (0xFCF1, 'M', 'يه'),
+    (0xFCF2, 'M', 'ـَّ'),
+    (0xFCF3, 'M', 'ـُّ'),
+    (0xFCF4, 'M', 'ـِّ'),
+    (0xFCF5, 'M', 'طى'),
+    (0xFCF6, 'M', 'طي'),
+    (0xFCF7, 'M', 'عى'),
+    (0xFCF8, 'M', 'عي'),
+    (0xFCF9, 'M', 'غى'),
+    (0xFCFA, 'M', 'غي'),
+    (0xFCFB, 'M', 'سى'),
+    (0xFCFC, 'M', 'سي'),
+    (0xFCFD, 'M', 'شى'),
+    (0xFCFE, 'M', 'شي'),
+    (0xFCFF, 'M', 'حى'),
+    (0xFD00, 'M', 'حي'),
+    (0xFD01, 'M', 'جى'),
+    (0xFD02, 'M', 'جي'),
+    (0xFD03, 'M', 'خى'),
+    (0xFD04, 'M', 'خي'),
+    (0xFD05, 'M', 'صى'),
+    (0xFD06, 'M', 'صي'),
+    (0xFD07, 'M', 'ضى'),
+    (0xFD08, 'M', 'ضي'),
+    (0xFD09, 'M', 'شج'),
+    (0xFD0A, 'M', 'شح'),
+    (0xFD0B, 'M', 'شخ'),
+    (0xFD0C, 'M', 'شم'),
+    (0xFD0D, 'M', 'شر'),
+    (0xFD0E, 'M', 'سر'),
+    (0xFD0F, 'M', 'صر'),
+    (0xFD10, 'M', 'ضر'),
+    (0xFD11, 'M', 'طى'),
+    (0xFD12, 'M', 'طي'),
+    (0xFD13, 'M', 'عى'),
+    (0xFD14, 'M', 'عي'),
+    (0xFD15, 'M', 'غى'),
+    (0xFD16, 'M', 'غي'),
+    (0xFD17, 'M', 'سى'),
+    (0xFD18, 'M', 'سي'),
+    (0xFD19, 'M', 'شى'),
+    (0xFD1A, 'M', 'شي'),
+    (0xFD1B, 'M', 'حى'),
+    (0xFD1C, 'M', 'حي'),
+    (0xFD1D, 'M', 'جى'),
+    (0xFD1E, 'M', 'جي'),
+    ]
+
+def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xFD1F, 'M', 'خى'),
+    (0xFD20, 'M', 'خي'),
+    (0xFD21, 'M', 'صى'),
+    (0xFD22, 'M', 'صي'),
+    (0xFD23, 'M', 'ضى'),
+    (0xFD24, 'M', 'ضي'),
+    (0xFD25, 'M', 'شج'),
+    (0xFD26, 'M', 'شح'),
+    (0xFD27, 'M', 'شخ'),
+    (0xFD28, 'M', 'شم'),
+    (0xFD29, 'M', 'شر'),
+    (0xFD2A, 'M', 'سر'),
+    (0xFD2B, 'M', 'صر'),
+    (0xFD2C, 'M', 'ضر'),
+    (0xFD2D, 'M', 'شج'),
+    (0xFD2E, 'M', 'شح'),
+    (0xFD2F, 'M', 'شخ'),
+    (0xFD30, 'M', 'شم'),
+    (0xFD31, 'M', 'سه'),
+    (0xFD32, 'M', 'شه'),
+    (0xFD33, 'M', 'طم'),
+    (0xFD34, 'M', 'سج'),
+    (0xFD35, 'M', 'سح'),
+    (0xFD36, 'M', 'سخ'),
+    (0xFD37, 'M', 'شج'),
+    (0xFD38, 'M', 'شح'),
+    (0xFD39, 'M', 'شخ'),
+    (0xFD3A, 'M', 'طم'),
+    (0xFD3B, 'M', 'ظم'),
+    (0xFD3C, 'M', 'اً'),
+    (0xFD3E, 'V'),
+    (0xFD50, 'M', 'تجم'),
+    (0xFD51, 'M', 'تحج'),
+    (0xFD53, 'M', 'تحم'),
+    (0xFD54, 'M', 'تخم'),
+    (0xFD55, 'M', 'تمج'),
+    (0xFD56, 'M', 'تمح'),
+    (0xFD57, 'M', 'تمخ'),
+    (0xFD58, 'M', 'جمح'),
+    (0xFD5A, 'M', 'حمي'),
+    (0xFD5B, 'M', 'حمى'),
+    (0xFD5C, 'M', 'سحج'),
+    (0xFD5D, 'M', 'سجح'),
+    (0xFD5E, 'M', 'سجى'),
+    (0xFD5F, 'M', 'سمح'),
+    (0xFD61, 'M', 'سمج'),
+    (0xFD62, 'M', 'سمم'),
+    (0xFD64, 'M', 'صحح'),
+    (0xFD66, 'M', 'صمم'),
+    (0xFD67, 'M', 'شحم'),
+    (0xFD69, 'M', 'شجي'),
+    (0xFD6A, 'M', 'شمخ'),
+    (0xFD6C, 'M', 'شمم'),
+    (0xFD6E, 'M', 'ضحى'),
+    (0xFD6F, 'M', 'ضخم'),
+    (0xFD71, 'M', 'طمح'),
+    (0xFD73, 'M', 'طمم'),
+    (0xFD74, 'M', 'طمي'),
+    (0xFD75, 'M', 'عجم'),
+    (0xFD76, 'M', 'عمم'),
+    (0xFD78, 'M', 'عمى'),
+    (0xFD79, 'M', 'غمم'),
+    (0xFD7A, 'M', 'غمي'),
+    (0xFD7B, 'M', 'غمى'),
+    (0xFD7C, 'M', 'فخم'),
+    (0xFD7E, 'M', 'قمح'),
+    (0xFD7F, 'M', 'قمم'),
+    (0xFD80, 'M', 'لحم'),
+    (0xFD81, 'M', 'لحي'),
+    (0xFD82, 'M', 'لحى'),
+    (0xFD83, 'M', 'لجج'),
+    (0xFD85, 'M', 'لخم'),
+    (0xFD87, 'M', 'لمح'),
+    (0xFD89, 'M', 'محج'),
+    (0xFD8A, 'M', 'محم'),
+    (0xFD8B, 'M', 'محي'),
+    (0xFD8C, 'M', 'مجح'),
+    (0xFD8D, 'M', 'مجم'),
+    (0xFD8E, 'M', 'مخج'),
+    (0xFD8F, 'M', 'مخم'),
+    (0xFD90, 'X'),
+    (0xFD92, 'M', 'مجخ'),
+    (0xFD93, 'M', 'همج'),
+    (0xFD94, 'M', 'همم'),
+    (0xFD95, 'M', 'نحم'),
+    (0xFD96, 'M', 'نحى'),
+    (0xFD97, 'M', 'نجم'),
+    (0xFD99, 'M', 'نجى'),
+    (0xFD9A, 'M', 'نمي'),
+    (0xFD9B, 'M', 'نمى'),
+    (0xFD9C, 'M', 'يمم'),
+    (0xFD9E, 'M', 'بخي'),
+    (0xFD9F, 'M', 'تجي'),
+    (0xFDA0, 'M', 'تجى'),
+    (0xFDA1, 'M', 'تخي'),
+    (0xFDA2, 'M', 'تخى'),
+    (0xFDA3, 'M', 'تمي'),
+    (0xFDA4, 'M', 'تمى'),
+    (0xFDA5, 'M', 'جمي'),
+    (0xFDA6, 'M', 'جحى'),
+    ]
+
+def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xFDA7, 'M', 'جمى'),
+    (0xFDA8, 'M', 'سخى'),
+    (0xFDA9, 'M', 'صحي'),
+    (0xFDAA, 'M', 'شحي'),
+    (0xFDAB, 'M', 'ضحي'),
+    (0xFDAC, 'M', 'لجي'),
+    (0xFDAD, 'M', 'لمي'),
+    (0xFDAE, 'M', 'يحي'),
+    (0xFDAF, 'M', 'يجي'),
+    (0xFDB0, 'M', 'يمي'),
+    (0xFDB1, 'M', 'ممي'),
+    (0xFDB2, 'M', 'قمي'),
+    (0xFDB3, 'M', 'نحي'),
+    (0xFDB4, 'M', 'قمح'),
+    (0xFDB5, 'M', 'لحم'),
+    (0xFDB6, 'M', 'عمي'),
+    (0xFDB7, 'M', 'كمي'),
+    (0xFDB8, 'M', 'نجح'),
+    (0xFDB9, 'M', 'مخي'),
+    (0xFDBA, 'M', 'لجم'),
+    (0xFDBB, 'M', 'كمم'),
+    (0xFDBC, 'M', 'لجم'),
+    (0xFDBD, 'M', 'نجح'),
+    (0xFDBE, 'M', 'جحي'),
+    (0xFDBF, 'M', 'حجي'),
+    (0xFDC0, 'M', 'مجي'),
+    (0xFDC1, 'M', 'فمي'),
+    (0xFDC2, 'M', 'بحي'),
+    (0xFDC3, 'M', 'كمم'),
+    (0xFDC4, 'M', 'عجم'),
+    (0xFDC5, 'M', 'صمم'),
+    (0xFDC6, 'M', 'سخي'),
+    (0xFDC7, 'M', 'نجي'),
+    (0xFDC8, 'X'),
+    (0xFDCF, 'V'),
+    (0xFDD0, 'X'),
+    (0xFDF0, 'M', 'صلے'),
+    (0xFDF1, 'M', 'قلے'),
+    (0xFDF2, 'M', 'الله'),
+    (0xFDF3, 'M', 'اكبر'),
+    (0xFDF4, 'M', 'محمد'),
+    (0xFDF5, 'M', 'صلعم'),
+    (0xFDF6, 'M', 'رسول'),
+    (0xFDF7, 'M', 'عليه'),
+    (0xFDF8, 'M', 'وسلم'),
+    (0xFDF9, 'M', 'صلى'),
+    (0xFDFA, '3', 'صلى الله عليه وسلم'),
+    (0xFDFB, '3', 'جل جلاله'),
+    (0xFDFC, 'M', 'ریال'),
+    (0xFDFD, 'V'),
+    (0xFE00, 'I'),
+    (0xFE10, '3', ','),
+    (0xFE11, 'M', '、'),
+    (0xFE12, 'X'),
+    (0xFE13, '3', ':'),
+    (0xFE14, '3', ';'),
+    (0xFE15, '3', '!'),
+    (0xFE16, '3', '?'),
+    (0xFE17, 'M', '〖'),
+    (0xFE18, 'M', '〗'),
+    (0xFE19, 'X'),
+    (0xFE20, 'V'),
+    (0xFE30, 'X'),
+    (0xFE31, 'M', '—'),
+    (0xFE32, 'M', '–'),
+    (0xFE33, '3', '_'),
+    (0xFE35, '3', '('),
+    (0xFE36, '3', ')'),
+    (0xFE37, '3', '{'),
+    (0xFE38, '3', '}'),
+    (0xFE39, 'M', '〔'),
+    (0xFE3A, 'M', '〕'),
+    (0xFE3B, 'M', '【'),
+    (0xFE3C, 'M', '】'),
+    (0xFE3D, 'M', '《'),
+    (0xFE3E, 'M', '》'),
+    (0xFE3F, 'M', '〈'),
+    (0xFE40, 'M', '〉'),
+    (0xFE41, 'M', '「'),
+    (0xFE42, 'M', '」'),
+    (0xFE43, 'M', '『'),
+    (0xFE44, 'M', '』'),
+    (0xFE45, 'V'),
+    (0xFE47, '3', '['),
+    (0xFE48, '3', ']'),
+    (0xFE49, '3', ' ̅'),
+    (0xFE4D, '3', '_'),
+    (0xFE50, '3', ','),
+    (0xFE51, 'M', '、'),
+    (0xFE52, 'X'),
+    (0xFE54, '3', ';'),
+    (0xFE55, '3', ':'),
+    (0xFE56, '3', '?'),
+    (0xFE57, '3', '!'),
+    (0xFE58, 'M', '—'),
+    (0xFE59, '3', '('),
+    (0xFE5A, '3', ')'),
+    (0xFE5B, '3', '{'),
+    (0xFE5C, '3', '}'),
+    (0xFE5D, 'M', '〔'),
+    ]
+
+def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xFE5E, 'M', '〕'),
+    (0xFE5F, '3', '#'),
+    (0xFE60, '3', '&'),
+    (0xFE61, '3', '*'),
+    (0xFE62, '3', '+'),
+    (0xFE63, 'M', '-'),
+    (0xFE64, '3', '<'),
+    (0xFE65, '3', '>'),
+    (0xFE66, '3', '='),
+    (0xFE67, 'X'),
+    (0xFE68, '3', '\\'),
+    (0xFE69, '3', '$'),
+    (0xFE6A, '3', '%'),
+    (0xFE6B, '3', '@'),
+    (0xFE6C, 'X'),
+    (0xFE70, '3', ' ً'),
+    (0xFE71, 'M', 'ـً'),
+    (0xFE72, '3', ' ٌ'),
+    (0xFE73, 'V'),
+    (0xFE74, '3', ' ٍ'),
+    (0xFE75, 'X'),
+    (0xFE76, '3', ' َ'),
+    (0xFE77, 'M', 'ـَ'),
+    (0xFE78, '3', ' ُ'),
+    (0xFE79, 'M', 'ـُ'),
+    (0xFE7A, '3', ' ِ'),
+    (0xFE7B, 'M', 'ـِ'),
+    (0xFE7C, '3', ' ّ'),
+    (0xFE7D, 'M', 'ـّ'),
+    (0xFE7E, '3', ' ْ'),
+    (0xFE7F, 'M', 'ـْ'),
+    (0xFE80, 'M', 'ء'),
+    (0xFE81, 'M', 'آ'),
+    (0xFE83, 'M', 'أ'),
+    (0xFE85, 'M', 'ؤ'),
+    (0xFE87, 'M', 'إ'),
+    (0xFE89, 'M', 'ئ'),
+    (0xFE8D, 'M', 'ا'),
+    (0xFE8F, 'M', 'ب'),
+    (0xFE93, 'M', 'ة'),
+    (0xFE95, 'M', 'ت'),
+    (0xFE99, 'M', 'ث'),
+    (0xFE9D, 'M', 'ج'),
+    (0xFEA1, 'M', 'ح'),
+    (0xFEA5, 'M', 'خ'),
+    (0xFEA9, 'M', 'د'),
+    (0xFEAB, 'M', 'ذ'),
+    (0xFEAD, 'M', 'ر'),
+    (0xFEAF, 'M', 'ز'),
+    (0xFEB1, 'M', 'س'),
+    (0xFEB5, 'M', 'ش'),
+    (0xFEB9, 'M', 'ص'),
+    (0xFEBD, 'M', 'ض'),
+    (0xFEC1, 'M', 'ط'),
+    (0xFEC5, 'M', 'ظ'),
+    (0xFEC9, 'M', 'ع'),
+    (0xFECD, 'M', 'غ'),
+    (0xFED1, 'M', 'ف'),
+    (0xFED5, 'M', 'ق'),
+    (0xFED9, 'M', 'ك'),
+    (0xFEDD, 'M', 'ل'),
+    (0xFEE1, 'M', 'م'),
+    (0xFEE5, 'M', 'ن'),
+    (0xFEE9, 'M', 'ه'),
+    (0xFEED, 'M', 'و'),
+    (0xFEEF, 'M', 'ى'),
+    (0xFEF1, 'M', 'ي'),
+    (0xFEF5, 'M', 'لآ'),
+    (0xFEF7, 'M', 'لأ'),
+    (0xFEF9, 'M', 'لإ'),
+    (0xFEFB, 'M', 'لا'),
+    (0xFEFD, 'X'),
+    (0xFEFF, 'I'),
+    (0xFF00, 'X'),
+    (0xFF01, '3', '!'),
+    (0xFF02, '3', '"'),
+    (0xFF03, '3', '#'),
+    (0xFF04, '3', '$'),
+    (0xFF05, '3', '%'),
+    (0xFF06, '3', '&'),
+    (0xFF07, '3', '\''),
+    (0xFF08, '3', '('),
+    (0xFF09, '3', ')'),
+    (0xFF0A, '3', '*'),
+    (0xFF0B, '3', '+'),
+    (0xFF0C, '3', ','),
+    (0xFF0D, 'M', '-'),
+    (0xFF0E, 'M', '.'),
+    (0xFF0F, '3', '/'),
+    (0xFF10, 'M', '0'),
+    (0xFF11, 'M', '1'),
+    (0xFF12, 'M', '2'),
+    (0xFF13, 'M', '3'),
+    (0xFF14, 'M', '4'),
+    (0xFF15, 'M', '5'),
+    (0xFF16, 'M', '6'),
+    (0xFF17, 'M', '7'),
+    (0xFF18, 'M', '8'),
+    (0xFF19, 'M', '9'),
+    (0xFF1A, '3', ':'),
+    ]
+
+def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xFF1B, '3', ';'),
+    (0xFF1C, '3', '<'),
+    (0xFF1D, '3', '='),
+    (0xFF1E, '3', '>'),
+    (0xFF1F, '3', '?'),
+    (0xFF20, '3', '@'),
+    (0xFF21, 'M', 'a'),
+    (0xFF22, 'M', 'b'),
+    (0xFF23, 'M', 'c'),
+    (0xFF24, 'M', 'd'),
+    (0xFF25, 'M', 'e'),
+    (0xFF26, 'M', 'f'),
+    (0xFF27, 'M', 'g'),
+    (0xFF28, 'M', 'h'),
+    (0xFF29, 'M', 'i'),
+    (0xFF2A, 'M', 'j'),
+    (0xFF2B, 'M', 'k'),
+    (0xFF2C, 'M', 'l'),
+    (0xFF2D, 'M', 'm'),
+    (0xFF2E, 'M', 'n'),
+    (0xFF2F, 'M', 'o'),
+    (0xFF30, 'M', 'p'),
+    (0xFF31, 'M', 'q'),
+    (0xFF32, 'M', 'r'),
+    (0xFF33, 'M', 's'),
+    (0xFF34, 'M', 't'),
+    (0xFF35, 'M', 'u'),
+    (0xFF36, 'M', 'v'),
+    (0xFF37, 'M', 'w'),
+    (0xFF38, 'M', 'x'),
+    (0xFF39, 'M', 'y'),
+    (0xFF3A, 'M', 'z'),
+    (0xFF3B, '3', '['),
+    (0xFF3C, '3', '\\'),
+    (0xFF3D, '3', ']'),
+    (0xFF3E, '3', '^'),
+    (0xFF3F, '3', '_'),
+    (0xFF40, '3', '`'),
+    (0xFF41, 'M', 'a'),
+    (0xFF42, 'M', 'b'),
+    (0xFF43, 'M', 'c'),
+    (0xFF44, 'M', 'd'),
+    (0xFF45, 'M', 'e'),
+    (0xFF46, 'M', 'f'),
+    (0xFF47, 'M', 'g'),
+    (0xFF48, 'M', 'h'),
+    (0xFF49, 'M', 'i'),
+    (0xFF4A, 'M', 'j'),
+    (0xFF4B, 'M', 'k'),
+    (0xFF4C, 'M', 'l'),
+    (0xFF4D, 'M', 'm'),
+    (0xFF4E, 'M', 'n'),
+    (0xFF4F, 'M', 'o'),
+    (0xFF50, 'M', 'p'),
+    (0xFF51, 'M', 'q'),
+    (0xFF52, 'M', 'r'),
+    (0xFF53, 'M', 's'),
+    (0xFF54, 'M', 't'),
+    (0xFF55, 'M', 'u'),
+    (0xFF56, 'M', 'v'),
+    (0xFF57, 'M', 'w'),
+    (0xFF58, 'M', 'x'),
+    (0xFF59, 'M', 'y'),
+    (0xFF5A, 'M', 'z'),
+    (0xFF5B, '3', '{'),
+    (0xFF5C, '3', '|'),
+    (0xFF5D, '3', '}'),
+    (0xFF5E, '3', '~'),
+    (0xFF5F, 'M', '⦅'),
+    (0xFF60, 'M', '⦆'),
+    (0xFF61, 'M', '.'),
+    (0xFF62, 'M', '「'),
+    (0xFF63, 'M', '」'),
+    (0xFF64, 'M', '、'),
+    (0xFF65, 'M', '・'),
+    (0xFF66, 'M', 'ヲ'),
+    (0xFF67, 'M', 'ァ'),
+    (0xFF68, 'M', 'ィ'),
+    (0xFF69, 'M', 'ゥ'),
+    (0xFF6A, 'M', 'ェ'),
+    (0xFF6B, 'M', 'ォ'),
+    (0xFF6C, 'M', 'ャ'),
+    (0xFF6D, 'M', 'ュ'),
+    (0xFF6E, 'M', 'ョ'),
+    (0xFF6F, 'M', 'ッ'),
+    (0xFF70, 'M', 'ー'),
+    (0xFF71, 'M', 'ア'),
+    (0xFF72, 'M', 'イ'),
+    (0xFF73, 'M', 'ウ'),
+    (0xFF74, 'M', 'エ'),
+    (0xFF75, 'M', 'オ'),
+    (0xFF76, 'M', 'カ'),
+    (0xFF77, 'M', 'キ'),
+    (0xFF78, 'M', 'ク'),
+    (0xFF79, 'M', 'ケ'),
+    (0xFF7A, 'M', 'コ'),
+    (0xFF7B, 'M', 'サ'),
+    (0xFF7C, 'M', 'シ'),
+    (0xFF7D, 'M', 'ス'),
+    (0xFF7E, 'M', 'セ'),
+    ]
+
+def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xFF7F, 'M', 'ソ'),
+    (0xFF80, 'M', 'タ'),
+    (0xFF81, 'M', 'チ'),
+    (0xFF82, 'M', 'ツ'),
+    (0xFF83, 'M', 'テ'),
+    (0xFF84, 'M', 'ト'),
+    (0xFF85, 'M', 'ナ'),
+    (0xFF86, 'M', 'ニ'),
+    (0xFF87, 'M', 'ヌ'),
+    (0xFF88, 'M', 'ネ'),
+    (0xFF89, 'M', 'ノ'),
+    (0xFF8A, 'M', 'ハ'),
+    (0xFF8B, 'M', 'ヒ'),
+    (0xFF8C, 'M', 'フ'),
+    (0xFF8D, 'M', 'ヘ'),
+    (0xFF8E, 'M', 'ホ'),
+    (0xFF8F, 'M', 'マ'),
+    (0xFF90, 'M', 'ミ'),
+    (0xFF91, 'M', 'ム'),
+    (0xFF92, 'M', 'メ'),
+    (0xFF93, 'M', 'モ'),
+    (0xFF94, 'M', 'ヤ'),
+    (0xFF95, 'M', 'ユ'),
+    (0xFF96, 'M', 'ヨ'),
+    (0xFF97, 'M', 'ラ'),
+    (0xFF98, 'M', 'リ'),
+    (0xFF99, 'M', 'ル'),
+    (0xFF9A, 'M', 'レ'),
+    (0xFF9B, 'M', 'ロ'),
+    (0xFF9C, 'M', 'ワ'),
+    (0xFF9D, 'M', 'ン'),
+    (0xFF9E, 'M', '゙'),
+    (0xFF9F, 'M', '゚'),
+    (0xFFA0, 'X'),
+    (0xFFA1, 'M', 'ᄀ'),
+    (0xFFA2, 'M', 'ᄁ'),
+    (0xFFA3, 'M', 'ᆪ'),
+    (0xFFA4, 'M', 'ᄂ'),
+    (0xFFA5, 'M', 'ᆬ'),
+    (0xFFA6, 'M', 'ᆭ'),
+    (0xFFA7, 'M', 'ᄃ'),
+    (0xFFA8, 'M', 'ᄄ'),
+    (0xFFA9, 'M', 'ᄅ'),
+    (0xFFAA, 'M', 'ᆰ'),
+    (0xFFAB, 'M', 'ᆱ'),
+    (0xFFAC, 'M', 'ᆲ'),
+    (0xFFAD, 'M', 'ᆳ'),
+    (0xFFAE, 'M', 'ᆴ'),
+    (0xFFAF, 'M', 'ᆵ'),
+    (0xFFB0, 'M', 'ᄚ'),
+    (0xFFB1, 'M', 'ᄆ'),
+    (0xFFB2, 'M', 'ᄇ'),
+    (0xFFB3, 'M', 'ᄈ'),
+    (0xFFB4, 'M', 'ᄡ'),
+    (0xFFB5, 'M', 'ᄉ'),
+    (0xFFB6, 'M', 'ᄊ'),
+    (0xFFB7, 'M', 'ᄋ'),
+    (0xFFB8, 'M', 'ᄌ'),
+    (0xFFB9, 'M', 'ᄍ'),
+    (0xFFBA, 'M', 'ᄎ'),
+    (0xFFBB, 'M', 'ᄏ'),
+    (0xFFBC, 'M', 'ᄐ'),
+    (0xFFBD, 'M', 'ᄑ'),
+    (0xFFBE, 'M', 'ᄒ'),
+    (0xFFBF, 'X'),
+    (0xFFC2, 'M', 'ᅡ'),
+    (0xFFC3, 'M', 'ᅢ'),
+    (0xFFC4, 'M', 'ᅣ'),
+    (0xFFC5, 'M', 'ᅤ'),
+    (0xFFC6, 'M', 'ᅥ'),
+    (0xFFC7, 'M', 'ᅦ'),
+    (0xFFC8, 'X'),
+    (0xFFCA, 'M', 'ᅧ'),
+    (0xFFCB, 'M', 'ᅨ'),
+    (0xFFCC, 'M', 'ᅩ'),
+    (0xFFCD, 'M', 'ᅪ'),
+    (0xFFCE, 'M', 'ᅫ'),
+    (0xFFCF, 'M', 'ᅬ'),
+    (0xFFD0, 'X'),
+    (0xFFD2, 'M', 'ᅭ'),
+    (0xFFD3, 'M', 'ᅮ'),
+    (0xFFD4, 'M', 'ᅯ'),
+    (0xFFD5, 'M', 'ᅰ'),
+    (0xFFD6, 'M', 'ᅱ'),
+    (0xFFD7, 'M', 'ᅲ'),
+    (0xFFD8, 'X'),
+    (0xFFDA, 'M', 'ᅳ'),
+    (0xFFDB, 'M', 'ᅴ'),
+    (0xFFDC, 'M', 'ᅵ'),
+    (0xFFDD, 'X'),
+    (0xFFE0, 'M', '¢'),
+    (0xFFE1, 'M', '£'),
+    (0xFFE2, 'M', '¬'),
+    (0xFFE3, '3', ' ̄'),
+    (0xFFE4, 'M', '¦'),
+    (0xFFE5, 'M', '¥'),
+    (0xFFE6, 'M', '₩'),
+    (0xFFE7, 'X'),
+    (0xFFE8, 'M', '│'),
+    (0xFFE9, 'M', '←'),
+    ]
+
+def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0xFFEA, 'M', '↑'),
+    (0xFFEB, 'M', '→'),
+    (0xFFEC, 'M', '↓'),
+    (0xFFED, 'M', '■'),
+    (0xFFEE, 'M', '○'),
+    (0xFFEF, 'X'),
+    (0x10000, 'V'),
+    (0x1000C, 'X'),
+    (0x1000D, 'V'),
+    (0x10027, 'X'),
+    (0x10028, 'V'),
+    (0x1003B, 'X'),
+    (0x1003C, 'V'),
+    (0x1003E, 'X'),
+    (0x1003F, 'V'),
+    (0x1004E, 'X'),
+    (0x10050, 'V'),
+    (0x1005E, 'X'),
+    (0x10080, 'V'),
+    (0x100FB, 'X'),
+    (0x10100, 'V'),
+    (0x10103, 'X'),
+    (0x10107, 'V'),
+    (0x10134, 'X'),
+    (0x10137, 'V'),
+    (0x1018F, 'X'),
+    (0x10190, 'V'),
+    (0x1019D, 'X'),
+    (0x101A0, 'V'),
+    (0x101A1, 'X'),
+    (0x101D0, 'V'),
+    (0x101FE, 'X'),
+    (0x10280, 'V'),
+    (0x1029D, 'X'),
+    (0x102A0, 'V'),
+    (0x102D1, 'X'),
+    (0x102E0, 'V'),
+    (0x102FC, 'X'),
+    (0x10300, 'V'),
+    (0x10324, 'X'),
+    (0x1032D, 'V'),
+    (0x1034B, 'X'),
+    (0x10350, 'V'),
+    (0x1037B, 'X'),
+    (0x10380, 'V'),
+    (0x1039E, 'X'),
+    (0x1039F, 'V'),
+    (0x103C4, 'X'),
+    (0x103C8, 'V'),
+    (0x103D6, 'X'),
+    (0x10400, 'M', '𐐨'),
+    (0x10401, 'M', '𐐩'),
+    (0x10402, 'M', '𐐪'),
+    (0x10403, 'M', '𐐫'),
+    (0x10404, 'M', '𐐬'),
+    (0x10405, 'M', '𐐭'),
+    (0x10406, 'M', '𐐮'),
+    (0x10407, 'M', '𐐯'),
+    (0x10408, 'M', '𐐰'),
+    (0x10409, 'M', '𐐱'),
+    (0x1040A, 'M', '𐐲'),
+    (0x1040B, 'M', '𐐳'),
+    (0x1040C, 'M', '𐐴'),
+    (0x1040D, 'M', '𐐵'),
+    (0x1040E, 'M', '𐐶'),
+    (0x1040F, 'M', '𐐷'),
+    (0x10410, 'M', '𐐸'),
+    (0x10411, 'M', '𐐹'),
+    (0x10412, 'M', '𐐺'),
+    (0x10413, 'M', '𐐻'),
+    (0x10414, 'M', '𐐼'),
+    (0x10415, 'M', '𐐽'),
+    (0x10416, 'M', '𐐾'),
+    (0x10417, 'M', '𐐿'),
+    (0x10418, 'M', '𐑀'),
+    (0x10419, 'M', '𐑁'),
+    (0x1041A, 'M', '𐑂'),
+    (0x1041B, 'M', '𐑃'),
+    (0x1041C, 'M', '𐑄'),
+    (0x1041D, 'M', '𐑅'),
+    (0x1041E, 'M', '𐑆'),
+    (0x1041F, 'M', '𐑇'),
+    (0x10420, 'M', '𐑈'),
+    (0x10421, 'M', '𐑉'),
+    (0x10422, 'M', '𐑊'),
+    (0x10423, 'M', '𐑋'),
+    (0x10424, 'M', '𐑌'),
+    (0x10425, 'M', '𐑍'),
+    (0x10426, 'M', '𐑎'),
+    (0x10427, 'M', '𐑏'),
+    (0x10428, 'V'),
+    (0x1049E, 'X'),
+    (0x104A0, 'V'),
+    (0x104AA, 'X'),
+    (0x104B0, 'M', '𐓘'),
+    (0x104B1, 'M', '𐓙'),
+    (0x104B2, 'M', '𐓚'),
+    (0x104B3, 'M', '𐓛'),
+    (0x104B4, 'M', '𐓜'),
+    (0x104B5, 'M', '𐓝'),
+    ]
+
+def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x104B6, 'M', '𐓞'),
+    (0x104B7, 'M', '𐓟'),
+    (0x104B8, 'M', '𐓠'),
+    (0x104B9, 'M', '𐓡'),
+    (0x104BA, 'M', '𐓢'),
+    (0x104BB, 'M', '𐓣'),
+    (0x104BC, 'M', '𐓤'),
+    (0x104BD, 'M', '𐓥'),
+    (0x104BE, 'M', '𐓦'),
+    (0x104BF, 'M', '𐓧'),
+    (0x104C0, 'M', '𐓨'),
+    (0x104C1, 'M', '𐓩'),
+    (0x104C2, 'M', '𐓪'),
+    (0x104C3, 'M', '𐓫'),
+    (0x104C4, 'M', '𐓬'),
+    (0x104C5, 'M', '𐓭'),
+    (0x104C6, 'M', '𐓮'),
+    (0x104C7, 'M', '𐓯'),
+    (0x104C8, 'M', '𐓰'),
+    (0x104C9, 'M', '𐓱'),
+    (0x104CA, 'M', '𐓲'),
+    (0x104CB, 'M', '𐓳'),
+    (0x104CC, 'M', '𐓴'),
+    (0x104CD, 'M', '𐓵'),
+    (0x104CE, 'M', '𐓶'),
+    (0x104CF, 'M', '𐓷'),
+    (0x104D0, 'M', '𐓸'),
+    (0x104D1, 'M', '𐓹'),
+    (0x104D2, 'M', '𐓺'),
+    (0x104D3, 'M', '𐓻'),
+    (0x104D4, 'X'),
+    (0x104D8, 'V'),
+    (0x104FC, 'X'),
+    (0x10500, 'V'),
+    (0x10528, 'X'),
+    (0x10530, 'V'),
+    (0x10564, 'X'),
+    (0x1056F, 'V'),
+    (0x10570, 'M', '𐖗'),
+    (0x10571, 'M', '𐖘'),
+    (0x10572, 'M', '𐖙'),
+    (0x10573, 'M', '𐖚'),
+    (0x10574, 'M', '𐖛'),
+    (0x10575, 'M', '𐖜'),
+    (0x10576, 'M', '𐖝'),
+    (0x10577, 'M', '𐖞'),
+    (0x10578, 'M', '𐖟'),
+    (0x10579, 'M', '𐖠'),
+    (0x1057A, 'M', '𐖡'),
+    (0x1057B, 'X'),
+    (0x1057C, 'M', '𐖣'),
+    (0x1057D, 'M', '𐖤'),
+    (0x1057E, 'M', '𐖥'),
+    (0x1057F, 'M', '𐖦'),
+    (0x10580, 'M', '𐖧'),
+    (0x10581, 'M', '𐖨'),
+    (0x10582, 'M', '𐖩'),
+    (0x10583, 'M', '𐖪'),
+    (0x10584, 'M', '𐖫'),
+    (0x10585, 'M', '𐖬'),
+    (0x10586, 'M', '𐖭'),
+    (0x10587, 'M', '𐖮'),
+    (0x10588, 'M', '𐖯'),
+    (0x10589, 'M', '𐖰'),
+    (0x1058A, 'M', '𐖱'),
+    (0x1058B, 'X'),
+    (0x1058C, 'M', '𐖳'),
+    (0x1058D, 'M', '𐖴'),
+    (0x1058E, 'M', '𐖵'),
+    (0x1058F, 'M', '𐖶'),
+    (0x10590, 'M', '𐖷'),
+    (0x10591, 'M', '𐖸'),
+    (0x10592, 'M', '𐖹'),
+    (0x10593, 'X'),
+    (0x10594, 'M', '𐖻'),
+    (0x10595, 'M', '𐖼'),
+    (0x10596, 'X'),
+    (0x10597, 'V'),
+    (0x105A2, 'X'),
+    (0x105A3, 'V'),
+    (0x105B2, 'X'),
+    (0x105B3, 'V'),
+    (0x105BA, 'X'),
+    (0x105BB, 'V'),
+    (0x105BD, 'X'),
+    (0x10600, 'V'),
+    (0x10737, 'X'),
+    (0x10740, 'V'),
+    (0x10756, 'X'),
+    (0x10760, 'V'),
+    (0x10768, 'X'),
+    (0x10780, 'V'),
+    (0x10781, 'M', 'ː'),
+    (0x10782, 'M', 'ˑ'),
+    (0x10783, 'M', 'æ'),
+    (0x10784, 'M', 'ʙ'),
+    (0x10785, 'M', 'ɓ'),
+    (0x10786, 'X'),
+    (0x10787, 'M', 'ʣ'),
+    (0x10788, 'M', 'ꭦ'),
+    ]
+
+def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x10789, 'M', 'ʥ'),
+    (0x1078A, 'M', 'ʤ'),
+    (0x1078B, 'M', 'ɖ'),
+    (0x1078C, 'M', 'ɗ'),
+    (0x1078D, 'M', 'ᶑ'),
+    (0x1078E, 'M', 'ɘ'),
+    (0x1078F, 'M', 'ɞ'),
+    (0x10790, 'M', 'ʩ'),
+    (0x10791, 'M', 'ɤ'),
+    (0x10792, 'M', 'ɢ'),
+    (0x10793, 'M', 'ɠ'),
+    (0x10794, 'M', 'ʛ'),
+    (0x10795, 'M', 'ħ'),
+    (0x10796, 'M', 'ʜ'),
+    (0x10797, 'M', 'ɧ'),
+    (0x10798, 'M', 'ʄ'),
+    (0x10799, 'M', 'ʪ'),
+    (0x1079A, 'M', 'ʫ'),
+    (0x1079B, 'M', 'ɬ'),
+    (0x1079C, 'M', '𝼄'),
+    (0x1079D, 'M', 'ꞎ'),
+    (0x1079E, 'M', 'ɮ'),
+    (0x1079F, 'M', '𝼅'),
+    (0x107A0, 'M', 'ʎ'),
+    (0x107A1, 'M', '𝼆'),
+    (0x107A2, 'M', 'ø'),
+    (0x107A3, 'M', 'ɶ'),
+    (0x107A4, 'M', 'ɷ'),
+    (0x107A5, 'M', 'q'),
+    (0x107A6, 'M', 'ɺ'),
+    (0x107A7, 'M', '𝼈'),
+    (0x107A8, 'M', 'ɽ'),
+    (0x107A9, 'M', 'ɾ'),
+    (0x107AA, 'M', 'ʀ'),
+    (0x107AB, 'M', 'ʨ'),
+    (0x107AC, 'M', 'ʦ'),
+    (0x107AD, 'M', 'ꭧ'),
+    (0x107AE, 'M', 'ʧ'),
+    (0x107AF, 'M', 'ʈ'),
+    (0x107B0, 'M', 'ⱱ'),
+    (0x107B1, 'X'),
+    (0x107B2, 'M', 'ʏ'),
+    (0x107B3, 'M', 'ʡ'),
+    (0x107B4, 'M', 'ʢ'),
+    (0x107B5, 'M', 'ʘ'),
+    (0x107B6, 'M', 'ǀ'),
+    (0x107B7, 'M', 'ǁ'),
+    (0x107B8, 'M', 'ǂ'),
+    (0x107B9, 'M', '𝼊'),
+    (0x107BA, 'M', '𝼞'),
+    (0x107BB, 'X'),
+    (0x10800, 'V'),
+    (0x10806, 'X'),
+    (0x10808, 'V'),
+    (0x10809, 'X'),
+    (0x1080A, 'V'),
+    (0x10836, 'X'),
+    (0x10837, 'V'),
+    (0x10839, 'X'),
+    (0x1083C, 'V'),
+    (0x1083D, 'X'),
+    (0x1083F, 'V'),
+    (0x10856, 'X'),
+    (0x10857, 'V'),
+    (0x1089F, 'X'),
+    (0x108A7, 'V'),
+    (0x108B0, 'X'),
+    (0x108E0, 'V'),
+    (0x108F3, 'X'),
+    (0x108F4, 'V'),
+    (0x108F6, 'X'),
+    (0x108FB, 'V'),
+    (0x1091C, 'X'),
+    (0x1091F, 'V'),
+    (0x1093A, 'X'),
+    (0x1093F, 'V'),
+    (0x10940, 'X'),
+    (0x10980, 'V'),
+    (0x109B8, 'X'),
+    (0x109BC, 'V'),
+    (0x109D0, 'X'),
+    (0x109D2, 'V'),
+    (0x10A04, 'X'),
+    (0x10A05, 'V'),
+    (0x10A07, 'X'),
+    (0x10A0C, 'V'),
+    (0x10A14, 'X'),
+    (0x10A15, 'V'),
+    (0x10A18, 'X'),
+    (0x10A19, 'V'),
+    (0x10A36, 'X'),
+    (0x10A38, 'V'),
+    (0x10A3B, 'X'),
+    (0x10A3F, 'V'),
+    (0x10A49, 'X'),
+    (0x10A50, 'V'),
+    (0x10A59, 'X'),
+    (0x10A60, 'V'),
+    (0x10AA0, 'X'),
+    (0x10AC0, 'V'),
+    ]
+
+def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x10AE7, 'X'),
+    (0x10AEB, 'V'),
+    (0x10AF7, 'X'),
+    (0x10B00, 'V'),
+    (0x10B36, 'X'),
+    (0x10B39, 'V'),
+    (0x10B56, 'X'),
+    (0x10B58, 'V'),
+    (0x10B73, 'X'),
+    (0x10B78, 'V'),
+    (0x10B92, 'X'),
+    (0x10B99, 'V'),
+    (0x10B9D, 'X'),
+    (0x10BA9, 'V'),
+    (0x10BB0, 'X'),
+    (0x10C00, 'V'),
+    (0x10C49, 'X'),
+    (0x10C80, 'M', '𐳀'),
+    (0x10C81, 'M', '𐳁'),
+    (0x10C82, 'M', '𐳂'),
+    (0x10C83, 'M', '𐳃'),
+    (0x10C84, 'M', '𐳄'),
+    (0x10C85, 'M', '𐳅'),
+    (0x10C86, 'M', '𐳆'),
+    (0x10C87, 'M', '𐳇'),
+    (0x10C88, 'M', '𐳈'),
+    (0x10C89, 'M', '𐳉'),
+    (0x10C8A, 'M', '𐳊'),
+    (0x10C8B, 'M', '𐳋'),
+    (0x10C8C, 'M', '𐳌'),
+    (0x10C8D, 'M', '𐳍'),
+    (0x10C8E, 'M', '𐳎'),
+    (0x10C8F, 'M', '𐳏'),
+    (0x10C90, 'M', '𐳐'),
+    (0x10C91, 'M', '𐳑'),
+    (0x10C92, 'M', '𐳒'),
+    (0x10C93, 'M', '𐳓'),
+    (0x10C94, 'M', '𐳔'),
+    (0x10C95, 'M', '𐳕'),
+    (0x10C96, 'M', '𐳖'),
+    (0x10C97, 'M', '𐳗'),
+    (0x10C98, 'M', '𐳘'),
+    (0x10C99, 'M', '𐳙'),
+    (0x10C9A, 'M', '𐳚'),
+    (0x10C9B, 'M', '𐳛'),
+    (0x10C9C, 'M', '𐳜'),
+    (0x10C9D, 'M', '𐳝'),
+    (0x10C9E, 'M', '𐳞'),
+    (0x10C9F, 'M', '𐳟'),
+    (0x10CA0, 'M', '𐳠'),
+    (0x10CA1, 'M', '𐳡'),
+    (0x10CA2, 'M', '𐳢'),
+    (0x10CA3, 'M', '𐳣'),
+    (0x10CA4, 'M', '𐳤'),
+    (0x10CA5, 'M', '𐳥'),
+    (0x10CA6, 'M', '𐳦'),
+    (0x10CA7, 'M', '𐳧'),
+    (0x10CA8, 'M', '𐳨'),
+    (0x10CA9, 'M', '𐳩'),
+    (0x10CAA, 'M', '𐳪'),
+    (0x10CAB, 'M', '𐳫'),
+    (0x10CAC, 'M', '𐳬'),
+    (0x10CAD, 'M', '𐳭'),
+    (0x10CAE, 'M', '𐳮'),
+    (0x10CAF, 'M', '𐳯'),
+    (0x10CB0, 'M', '𐳰'),
+    (0x10CB1, 'M', '𐳱'),
+    (0x10CB2, 'M', '𐳲'),
+    (0x10CB3, 'X'),
+    (0x10CC0, 'V'),
+    (0x10CF3, 'X'),
+    (0x10CFA, 'V'),
+    (0x10D28, 'X'),
+    (0x10D30, 'V'),
+    (0x10D3A, 'X'),
+    (0x10E60, 'V'),
+    (0x10E7F, 'X'),
+    (0x10E80, 'V'),
+    (0x10EAA, 'X'),
+    (0x10EAB, 'V'),
+    (0x10EAE, 'X'),
+    (0x10EB0, 'V'),
+    (0x10EB2, 'X'),
+    (0x10EFD, 'V'),
+    (0x10F28, 'X'),
+    (0x10F30, 'V'),
+    (0x10F5A, 'X'),
+    (0x10F70, 'V'),
+    (0x10F8A, 'X'),
+    (0x10FB0, 'V'),
+    (0x10FCC, 'X'),
+    (0x10FE0, 'V'),
+    (0x10FF7, 'X'),
+    (0x11000, 'V'),
+    (0x1104E, 'X'),
+    (0x11052, 'V'),
+    (0x11076, 'X'),
+    (0x1107F, 'V'),
+    (0x110BD, 'X'),
+    (0x110BE, 'V'),
+    ]
+
+def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x110C3, 'X'),
+    (0x110D0, 'V'),
+    (0x110E9, 'X'),
+    (0x110F0, 'V'),
+    (0x110FA, 'X'),
+    (0x11100, 'V'),
+    (0x11135, 'X'),
+    (0x11136, 'V'),
+    (0x11148, 'X'),
+    (0x11150, 'V'),
+    (0x11177, 'X'),
+    (0x11180, 'V'),
+    (0x111E0, 'X'),
+    (0x111E1, 'V'),
+    (0x111F5, 'X'),
+    (0x11200, 'V'),
+    (0x11212, 'X'),
+    (0x11213, 'V'),
+    (0x11242, 'X'),
+    (0x11280, 'V'),
+    (0x11287, 'X'),
+    (0x11288, 'V'),
+    (0x11289, 'X'),
+    (0x1128A, 'V'),
+    (0x1128E, 'X'),
+    (0x1128F, 'V'),
+    (0x1129E, 'X'),
+    (0x1129F, 'V'),
+    (0x112AA, 'X'),
+    (0x112B0, 'V'),
+    (0x112EB, 'X'),
+    (0x112F0, 'V'),
+    (0x112FA, 'X'),
+    (0x11300, 'V'),
+    (0x11304, 'X'),
+    (0x11305, 'V'),
+    (0x1130D, 'X'),
+    (0x1130F, 'V'),
+    (0x11311, 'X'),
+    (0x11313, 'V'),
+    (0x11329, 'X'),
+    (0x1132A, 'V'),
+    (0x11331, 'X'),
+    (0x11332, 'V'),
+    (0x11334, 'X'),
+    (0x11335, 'V'),
+    (0x1133A, 'X'),
+    (0x1133B, 'V'),
+    (0x11345, 'X'),
+    (0x11347, 'V'),
+    (0x11349, 'X'),
+    (0x1134B, 'V'),
+    (0x1134E, 'X'),
+    (0x11350, 'V'),
+    (0x11351, 'X'),
+    (0x11357, 'V'),
+    (0x11358, 'X'),
+    (0x1135D, 'V'),
+    (0x11364, 'X'),
+    (0x11366, 'V'),
+    (0x1136D, 'X'),
+    (0x11370, 'V'),
+    (0x11375, 'X'),
+    (0x11400, 'V'),
+    (0x1145C, 'X'),
+    (0x1145D, 'V'),
+    (0x11462, 'X'),
+    (0x11480, 'V'),
+    (0x114C8, 'X'),
+    (0x114D0, 'V'),
+    (0x114DA, 'X'),
+    (0x11580, 'V'),
+    (0x115B6, 'X'),
+    (0x115B8, 'V'),
+    (0x115DE, 'X'),
+    (0x11600, 'V'),
+    (0x11645, 'X'),
+    (0x11650, 'V'),
+    (0x1165A, 'X'),
+    (0x11660, 'V'),
+    (0x1166D, 'X'),
+    (0x11680, 'V'),
+    (0x116BA, 'X'),
+    (0x116C0, 'V'),
+    (0x116CA, 'X'),
+    (0x11700, 'V'),
+    (0x1171B, 'X'),
+    (0x1171D, 'V'),
+    (0x1172C, 'X'),
+    (0x11730, 'V'),
+    (0x11747, 'X'),
+    (0x11800, 'V'),
+    (0x1183C, 'X'),
+    (0x118A0, 'M', '𑣀'),
+    (0x118A1, 'M', '𑣁'),
+    (0x118A2, 'M', '𑣂'),
+    (0x118A3, 'M', '𑣃'),
+    (0x118A4, 'M', '𑣄'),
+    (0x118A5, 'M', '𑣅'),
+    (0x118A6, 'M', '𑣆'),
+    ]
+
+def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x118A7, 'M', '𑣇'),
+    (0x118A8, 'M', '𑣈'),
+    (0x118A9, 'M', '𑣉'),
+    (0x118AA, 'M', '𑣊'),
+    (0x118AB, 'M', '𑣋'),
+    (0x118AC, 'M', '𑣌'),
+    (0x118AD, 'M', '𑣍'),
+    (0x118AE, 'M', '𑣎'),
+    (0x118AF, 'M', '𑣏'),
+    (0x118B0, 'M', '𑣐'),
+    (0x118B1, 'M', '𑣑'),
+    (0x118B2, 'M', '𑣒'),
+    (0x118B3, 'M', '𑣓'),
+    (0x118B4, 'M', '𑣔'),
+    (0x118B5, 'M', '𑣕'),
+    (0x118B6, 'M', '𑣖'),
+    (0x118B7, 'M', '𑣗'),
+    (0x118B8, 'M', '𑣘'),
+    (0x118B9, 'M', '𑣙'),
+    (0x118BA, 'M', '𑣚'),
+    (0x118BB, 'M', '𑣛'),
+    (0x118BC, 'M', '𑣜'),
+    (0x118BD, 'M', '𑣝'),
+    (0x118BE, 'M', '𑣞'),
+    (0x118BF, 'M', '𑣟'),
+    (0x118C0, 'V'),
+    (0x118F3, 'X'),
+    (0x118FF, 'V'),
+    (0x11907, 'X'),
+    (0x11909, 'V'),
+    (0x1190A, 'X'),
+    (0x1190C, 'V'),
+    (0x11914, 'X'),
+    (0x11915, 'V'),
+    (0x11917, 'X'),
+    (0x11918, 'V'),
+    (0x11936, 'X'),
+    (0x11937, 'V'),
+    (0x11939, 'X'),
+    (0x1193B, 'V'),
+    (0x11947, 'X'),
+    (0x11950, 'V'),
+    (0x1195A, 'X'),
+    (0x119A0, 'V'),
+    (0x119A8, 'X'),
+    (0x119AA, 'V'),
+    (0x119D8, 'X'),
+    (0x119DA, 'V'),
+    (0x119E5, 'X'),
+    (0x11A00, 'V'),
+    (0x11A48, 'X'),
+    (0x11A50, 'V'),
+    (0x11AA3, 'X'),
+    (0x11AB0, 'V'),
+    (0x11AF9, 'X'),
+    (0x11B00, 'V'),
+    (0x11B0A, 'X'),
+    (0x11C00, 'V'),
+    (0x11C09, 'X'),
+    (0x11C0A, 'V'),
+    (0x11C37, 'X'),
+    (0x11C38, 'V'),
+    (0x11C46, 'X'),
+    (0x11C50, 'V'),
+    (0x11C6D, 'X'),
+    (0x11C70, 'V'),
+    (0x11C90, 'X'),
+    (0x11C92, 'V'),
+    (0x11CA8, 'X'),
+    (0x11CA9, 'V'),
+    (0x11CB7, 'X'),
+    (0x11D00, 'V'),
+    (0x11D07, 'X'),
+    (0x11D08, 'V'),
+    (0x11D0A, 'X'),
+    (0x11D0B, 'V'),
+    (0x11D37, 'X'),
+    (0x11D3A, 'V'),
+    (0x11D3B, 'X'),
+    (0x11D3C, 'V'),
+    (0x11D3E, 'X'),
+    (0x11D3F, 'V'),
+    (0x11D48, 'X'),
+    (0x11D50, 'V'),
+    (0x11D5A, 'X'),
+    (0x11D60, 'V'),
+    (0x11D66, 'X'),
+    (0x11D67, 'V'),
+    (0x11D69, 'X'),
+    (0x11D6A, 'V'),
+    (0x11D8F, 'X'),
+    (0x11D90, 'V'),
+    (0x11D92, 'X'),
+    (0x11D93, 'V'),
+    (0x11D99, 'X'),
+    (0x11DA0, 'V'),
+    (0x11DAA, 'X'),
+    (0x11EE0, 'V'),
+    (0x11EF9, 'X'),
+    (0x11F00, 'V'),
+    ]
+
+def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x11F11, 'X'),
+    (0x11F12, 'V'),
+    (0x11F3B, 'X'),
+    (0x11F3E, 'V'),
+    (0x11F5A, 'X'),
+    (0x11FB0, 'V'),
+    (0x11FB1, 'X'),
+    (0x11FC0, 'V'),
+    (0x11FF2, 'X'),
+    (0x11FFF, 'V'),
+    (0x1239A, 'X'),
+    (0x12400, 'V'),
+    (0x1246F, 'X'),
+    (0x12470, 'V'),
+    (0x12475, 'X'),
+    (0x12480, 'V'),
+    (0x12544, 'X'),
+    (0x12F90, 'V'),
+    (0x12FF3, 'X'),
+    (0x13000, 'V'),
+    (0x13430, 'X'),
+    (0x13440, 'V'),
+    (0x13456, 'X'),
+    (0x14400, 'V'),
+    (0x14647, 'X'),
+    (0x16800, 'V'),
+    (0x16A39, 'X'),
+    (0x16A40, 'V'),
+    (0x16A5F, 'X'),
+    (0x16A60, 'V'),
+    (0x16A6A, 'X'),
+    (0x16A6E, 'V'),
+    (0x16ABF, 'X'),
+    (0x16AC0, 'V'),
+    (0x16ACA, 'X'),
+    (0x16AD0, 'V'),
+    (0x16AEE, 'X'),
+    (0x16AF0, 'V'),
+    (0x16AF6, 'X'),
+    (0x16B00, 'V'),
+    (0x16B46, 'X'),
+    (0x16B50, 'V'),
+    (0x16B5A, 'X'),
+    (0x16B5B, 'V'),
+    (0x16B62, 'X'),
+    (0x16B63, 'V'),
+    (0x16B78, 'X'),
+    (0x16B7D, 'V'),
+    (0x16B90, 'X'),
+    (0x16E40, 'M', '𖹠'),
+    (0x16E41, 'M', '𖹡'),
+    (0x16E42, 'M', '𖹢'),
+    (0x16E43, 'M', '𖹣'),
+    (0x16E44, 'M', '𖹤'),
+    (0x16E45, 'M', '𖹥'),
+    (0x16E46, 'M', '𖹦'),
+    (0x16E47, 'M', '𖹧'),
+    (0x16E48, 'M', '𖹨'),
+    (0x16E49, 'M', '𖹩'),
+    (0x16E4A, 'M', '𖹪'),
+    (0x16E4B, 'M', '𖹫'),
+    (0x16E4C, 'M', '𖹬'),
+    (0x16E4D, 'M', '𖹭'),
+    (0x16E4E, 'M', '𖹮'),
+    (0x16E4F, 'M', '𖹯'),
+    (0x16E50, 'M', '𖹰'),
+    (0x16E51, 'M', '𖹱'),
+    (0x16E52, 'M', '𖹲'),
+    (0x16E53, 'M', '𖹳'),
+    (0x16E54, 'M', '𖹴'),
+    (0x16E55, 'M', '𖹵'),
+    (0x16E56, 'M', '𖹶'),
+    (0x16E57, 'M', '𖹷'),
+    (0x16E58, 'M', '𖹸'),
+    (0x16E59, 'M', '𖹹'),
+    (0x16E5A, 'M', '𖹺'),
+    (0x16E5B, 'M', '𖹻'),
+    (0x16E5C, 'M', '𖹼'),
+    (0x16E5D, 'M', '𖹽'),
+    (0x16E5E, 'M', '𖹾'),
+    (0x16E5F, 'M', '𖹿'),
+    (0x16E60, 'V'),
+    (0x16E9B, 'X'),
+    (0x16F00, 'V'),
+    (0x16F4B, 'X'),
+    (0x16F4F, 'V'),
+    (0x16F88, 'X'),
+    (0x16F8F, 'V'),
+    (0x16FA0, 'X'),
+    (0x16FE0, 'V'),
+    (0x16FE5, 'X'),
+    (0x16FF0, 'V'),
+    (0x16FF2, 'X'),
+    (0x17000, 'V'),
+    (0x187F8, 'X'),
+    (0x18800, 'V'),
+    (0x18CD6, 'X'),
+    (0x18D00, 'V'),
+    (0x18D09, 'X'),
+    (0x1AFF0, 'V'),
+    ]
+
+def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1AFF4, 'X'),
+    (0x1AFF5, 'V'),
+    (0x1AFFC, 'X'),
+    (0x1AFFD, 'V'),
+    (0x1AFFF, 'X'),
+    (0x1B000, 'V'),
+    (0x1B123, 'X'),
+    (0x1B132, 'V'),
+    (0x1B133, 'X'),
+    (0x1B150, 'V'),
+    (0x1B153, 'X'),
+    (0x1B155, 'V'),
+    (0x1B156, 'X'),
+    (0x1B164, 'V'),
+    (0x1B168, 'X'),
+    (0x1B170, 'V'),
+    (0x1B2FC, 'X'),
+    (0x1BC00, 'V'),
+    (0x1BC6B, 'X'),
+    (0x1BC70, 'V'),
+    (0x1BC7D, 'X'),
+    (0x1BC80, 'V'),
+    (0x1BC89, 'X'),
+    (0x1BC90, 'V'),
+    (0x1BC9A, 'X'),
+    (0x1BC9C, 'V'),
+    (0x1BCA0, 'I'),
+    (0x1BCA4, 'X'),
+    (0x1CF00, 'V'),
+    (0x1CF2E, 'X'),
+    (0x1CF30, 'V'),
+    (0x1CF47, 'X'),
+    (0x1CF50, 'V'),
+    (0x1CFC4, 'X'),
+    (0x1D000, 'V'),
+    (0x1D0F6, 'X'),
+    (0x1D100, 'V'),
+    (0x1D127, 'X'),
+    (0x1D129, 'V'),
+    (0x1D15E, 'M', '𝅗𝅥'),
+    (0x1D15F, 'M', '𝅘𝅥'),
+    (0x1D160, 'M', '𝅘𝅥𝅮'),
+    (0x1D161, 'M', '𝅘𝅥𝅯'),
+    (0x1D162, 'M', '𝅘𝅥𝅰'),
+    (0x1D163, 'M', '𝅘𝅥𝅱'),
+    (0x1D164, 'M', '𝅘𝅥𝅲'),
+    (0x1D165, 'V'),
+    (0x1D173, 'X'),
+    (0x1D17B, 'V'),
+    (0x1D1BB, 'M', '𝆹𝅥'),
+    (0x1D1BC, 'M', '𝆺𝅥'),
+    (0x1D1BD, 'M', '𝆹𝅥𝅮'),
+    (0x1D1BE, 'M', '𝆺𝅥𝅮'),
+    (0x1D1BF, 'M', '𝆹𝅥𝅯'),
+    (0x1D1C0, 'M', '𝆺𝅥𝅯'),
+    (0x1D1C1, 'V'),
+    (0x1D1EB, 'X'),
+    (0x1D200, 'V'),
+    (0x1D246, 'X'),
+    (0x1D2C0, 'V'),
+    (0x1D2D4, 'X'),
+    (0x1D2E0, 'V'),
+    (0x1D2F4, 'X'),
+    (0x1D300, 'V'),
+    (0x1D357, 'X'),
+    (0x1D360, 'V'),
+    (0x1D379, 'X'),
+    (0x1D400, 'M', 'a'),
+    (0x1D401, 'M', 'b'),
+    (0x1D402, 'M', 'c'),
+    (0x1D403, 'M', 'd'),
+    (0x1D404, 'M', 'e'),
+    (0x1D405, 'M', 'f'),
+    (0x1D406, 'M', 'g'),
+    (0x1D407, 'M', 'h'),
+    (0x1D408, 'M', 'i'),
+    (0x1D409, 'M', 'j'),
+    (0x1D40A, 'M', 'k'),
+    (0x1D40B, 'M', 'l'),
+    (0x1D40C, 'M', 'm'),
+    (0x1D40D, 'M', 'n'),
+    (0x1D40E, 'M', 'o'),
+    (0x1D40F, 'M', 'p'),
+    (0x1D410, 'M', 'q'),
+    (0x1D411, 'M', 'r'),
+    (0x1D412, 'M', 's'),
+    (0x1D413, 'M', 't'),
+    (0x1D414, 'M', 'u'),
+    (0x1D415, 'M', 'v'),
+    (0x1D416, 'M', 'w'),
+    (0x1D417, 'M', 'x'),
+    (0x1D418, 'M', 'y'),
+    (0x1D419, 'M', 'z'),
+    (0x1D41A, 'M', 'a'),
+    (0x1D41B, 'M', 'b'),
+    (0x1D41C, 'M', 'c'),
+    (0x1D41D, 'M', 'd'),
+    (0x1D41E, 'M', 'e'),
+    (0x1D41F, 'M', 'f'),
+    (0x1D420, 'M', 'g'),
+    ]
+
+def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1D421, 'M', 'h'),
+    (0x1D422, 'M', 'i'),
+    (0x1D423, 'M', 'j'),
+    (0x1D424, 'M', 'k'),
+    (0x1D425, 'M', 'l'),
+    (0x1D426, 'M', 'm'),
+    (0x1D427, 'M', 'n'),
+    (0x1D428, 'M', 'o'),
+    (0x1D429, 'M', 'p'),
+    (0x1D42A, 'M', 'q'),
+    (0x1D42B, 'M', 'r'),
+    (0x1D42C, 'M', 's'),
+    (0x1D42D, 'M', 't'),
+    (0x1D42E, 'M', 'u'),
+    (0x1D42F, 'M', 'v'),
+    (0x1D430, 'M', 'w'),
+    (0x1D431, 'M', 'x'),
+    (0x1D432, 'M', 'y'),
+    (0x1D433, 'M', 'z'),
+    (0x1D434, 'M', 'a'),
+    (0x1D435, 'M', 'b'),
+    (0x1D436, 'M', 'c'),
+    (0x1D437, 'M', 'd'),
+    (0x1D438, 'M', 'e'),
+    (0x1D439, 'M', 'f'),
+    (0x1D43A, 'M', 'g'),
+    (0x1D43B, 'M', 'h'),
+    (0x1D43C, 'M', 'i'),
+    (0x1D43D, 'M', 'j'),
+    (0x1D43E, 'M', 'k'),
+    (0x1D43F, 'M', 'l'),
+    (0x1D440, 'M', 'm'),
+    (0x1D441, 'M', 'n'),
+    (0x1D442, 'M', 'o'),
+    (0x1D443, 'M', 'p'),
+    (0x1D444, 'M', 'q'),
+    (0x1D445, 'M', 'r'),
+    (0x1D446, 'M', 's'),
+    (0x1D447, 'M', 't'),
+    (0x1D448, 'M', 'u'),
+    (0x1D449, 'M', 'v'),
+    (0x1D44A, 'M', 'w'),
+    (0x1D44B, 'M', 'x'),
+    (0x1D44C, 'M', 'y'),
+    (0x1D44D, 'M', 'z'),
+    (0x1D44E, 'M', 'a'),
+    (0x1D44F, 'M', 'b'),
+    (0x1D450, 'M', 'c'),
+    (0x1D451, 'M', 'd'),
+    (0x1D452, 'M', 'e'),
+    (0x1D453, 'M', 'f'),
+    (0x1D454, 'M', 'g'),
+    (0x1D455, 'X'),
+    (0x1D456, 'M', 'i'),
+    (0x1D457, 'M', 'j'),
+    (0x1D458, 'M', 'k'),
+    (0x1D459, 'M', 'l'),
+    (0x1D45A, 'M', 'm'),
+    (0x1D45B, 'M', 'n'),
+    (0x1D45C, 'M', 'o'),
+    (0x1D45D, 'M', 'p'),
+    (0x1D45E, 'M', 'q'),
+    (0x1D45F, 'M', 'r'),
+    (0x1D460, 'M', 's'),
+    (0x1D461, 'M', 't'),
+    (0x1D462, 'M', 'u'),
+    (0x1D463, 'M', 'v'),
+    (0x1D464, 'M', 'w'),
+    (0x1D465, 'M', 'x'),
+    (0x1D466, 'M', 'y'),
+    (0x1D467, 'M', 'z'),
+    (0x1D468, 'M', 'a'),
+    (0x1D469, 'M', 'b'),
+    (0x1D46A, 'M', 'c'),
+    (0x1D46B, 'M', 'd'),
+    (0x1D46C, 'M', 'e'),
+    (0x1D46D, 'M', 'f'),
+    (0x1D46E, 'M', 'g'),
+    (0x1D46F, 'M', 'h'),
+    (0x1D470, 'M', 'i'),
+    (0x1D471, 'M', 'j'),
+    (0x1D472, 'M', 'k'),
+    (0x1D473, 'M', 'l'),
+    (0x1D474, 'M', 'm'),
+    (0x1D475, 'M', 'n'),
+    (0x1D476, 'M', 'o'),
+    (0x1D477, 'M', 'p'),
+    (0x1D478, 'M', 'q'),
+    (0x1D479, 'M', 'r'),
+    (0x1D47A, 'M', 's'),
+    (0x1D47B, 'M', 't'),
+    (0x1D47C, 'M', 'u'),
+    (0x1D47D, 'M', 'v'),
+    (0x1D47E, 'M', 'w'),
+    (0x1D47F, 'M', 'x'),
+    (0x1D480, 'M', 'y'),
+    (0x1D481, 'M', 'z'),
+    (0x1D482, 'M', 'a'),
+    (0x1D483, 'M', 'b'),
+    (0x1D484, 'M', 'c'),
+    ]
+
+def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1D485, 'M', 'd'),
+    (0x1D486, 'M', 'e'),
+    (0x1D487, 'M', 'f'),
+    (0x1D488, 'M', 'g'),
+    (0x1D489, 'M', 'h'),
+    (0x1D48A, 'M', 'i'),
+    (0x1D48B, 'M', 'j'),
+    (0x1D48C, 'M', 'k'),
+    (0x1D48D, 'M', 'l'),
+    (0x1D48E, 'M', 'm'),
+    (0x1D48F, 'M', 'n'),
+    (0x1D490, 'M', 'o'),
+    (0x1D491, 'M', 'p'),
+    (0x1D492, 'M', 'q'),
+    (0x1D493, 'M', 'r'),
+    (0x1D494, 'M', 's'),
+    (0x1D495, 'M', 't'),
+    (0x1D496, 'M', 'u'),
+    (0x1D497, 'M', 'v'),
+    (0x1D498, 'M', 'w'),
+    (0x1D499, 'M', 'x'),
+    (0x1D49A, 'M', 'y'),
+    (0x1D49B, 'M', 'z'),
+    (0x1D49C, 'M', 'a'),
+    (0x1D49D, 'X'),
+    (0x1D49E, 'M', 'c'),
+    (0x1D49F, 'M', 'd'),
+    (0x1D4A0, 'X'),
+    (0x1D4A2, 'M', 'g'),
+    (0x1D4A3, 'X'),
+    (0x1D4A5, 'M', 'j'),
+    (0x1D4A6, 'M', 'k'),
+    (0x1D4A7, 'X'),
+    (0x1D4A9, 'M', 'n'),
+    (0x1D4AA, 'M', 'o'),
+    (0x1D4AB, 'M', 'p'),
+    (0x1D4AC, 'M', 'q'),
+    (0x1D4AD, 'X'),
+    (0x1D4AE, 'M', 's'),
+    (0x1D4AF, 'M', 't'),
+    (0x1D4B0, 'M', 'u'),
+    (0x1D4B1, 'M', 'v'),
+    (0x1D4B2, 'M', 'w'),
+    (0x1D4B3, 'M', 'x'),
+    (0x1D4B4, 'M', 'y'),
+    (0x1D4B5, 'M', 'z'),
+    (0x1D4B6, 'M', 'a'),
+    (0x1D4B7, 'M', 'b'),
+    (0x1D4B8, 'M', 'c'),
+    (0x1D4B9, 'M', 'd'),
+    (0x1D4BA, 'X'),
+    (0x1D4BB, 'M', 'f'),
+    (0x1D4BC, 'X'),
+    (0x1D4BD, 'M', 'h'),
+    (0x1D4BE, 'M', 'i'),
+    (0x1D4BF, 'M', 'j'),
+    (0x1D4C0, 'M', 'k'),
+    (0x1D4C1, 'M', 'l'),
+    (0x1D4C2, 'M', 'm'),
+    (0x1D4C3, 'M', 'n'),
+    (0x1D4C4, 'X'),
+    (0x1D4C5, 'M', 'p'),
+    (0x1D4C6, 'M', 'q'),
+    (0x1D4C7, 'M', 'r'),
+    (0x1D4C8, 'M', 's'),
+    (0x1D4C9, 'M', 't'),
+    (0x1D4CA, 'M', 'u'),
+    (0x1D4CB, 'M', 'v'),
+    (0x1D4CC, 'M', 'w'),
+    (0x1D4CD, 'M', 'x'),
+    (0x1D4CE, 'M', 'y'),
+    (0x1D4CF, 'M', 'z'),
+    (0x1D4D0, 'M', 'a'),
+    (0x1D4D1, 'M', 'b'),
+    (0x1D4D2, 'M', 'c'),
+    (0x1D4D3, 'M', 'd'),
+    (0x1D4D4, 'M', 'e'),
+    (0x1D4D5, 'M', 'f'),
+    (0x1D4D6, 'M', 'g'),
+    (0x1D4D7, 'M', 'h'),
+    (0x1D4D8, 'M', 'i'),
+    (0x1D4D9, 'M', 'j'),
+    (0x1D4DA, 'M', 'k'),
+    (0x1D4DB, 'M', 'l'),
+    (0x1D4DC, 'M', 'm'),
+    (0x1D4DD, 'M', 'n'),
+    (0x1D4DE, 'M', 'o'),
+    (0x1D4DF, 'M', 'p'),
+    (0x1D4E0, 'M', 'q'),
+    (0x1D4E1, 'M', 'r'),
+    (0x1D4E2, 'M', 's'),
+    (0x1D4E3, 'M', 't'),
+    (0x1D4E4, 'M', 'u'),
+    (0x1D4E5, 'M', 'v'),
+    (0x1D4E6, 'M', 'w'),
+    (0x1D4E7, 'M', 'x'),
+    (0x1D4E8, 'M', 'y'),
+    (0x1D4E9, 'M', 'z'),
+    (0x1D4EA, 'M', 'a'),
+    (0x1D4EB, 'M', 'b'),
+    ]
+
+def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1D4EC, 'M', 'c'),
+    (0x1D4ED, 'M', 'd'),
+    (0x1D4EE, 'M', 'e'),
+    (0x1D4EF, 'M', 'f'),
+    (0x1D4F0, 'M', 'g'),
+    (0x1D4F1, 'M', 'h'),
+    (0x1D4F2, 'M', 'i'),
+    (0x1D4F3, 'M', 'j'),
+    (0x1D4F4, 'M', 'k'),
+    (0x1D4F5, 'M', 'l'),
+    (0x1D4F6, 'M', 'm'),
+    (0x1D4F7, 'M', 'n'),
+    (0x1D4F8, 'M', 'o'),
+    (0x1D4F9, 'M', 'p'),
+    (0x1D4FA, 'M', 'q'),
+    (0x1D4FB, 'M', 'r'),
+    (0x1D4FC, 'M', 's'),
+    (0x1D4FD, 'M', 't'),
+    (0x1D4FE, 'M', 'u'),
+    (0x1D4FF, 'M', 'v'),
+    (0x1D500, 'M', 'w'),
+    (0x1D501, 'M', 'x'),
+    (0x1D502, 'M', 'y'),
+    (0x1D503, 'M', 'z'),
+    (0x1D504, 'M', 'a'),
+    (0x1D505, 'M', 'b'),
+    (0x1D506, 'X'),
+    (0x1D507, 'M', 'd'),
+    (0x1D508, 'M', 'e'),
+    (0x1D509, 'M', 'f'),
+    (0x1D50A, 'M', 'g'),
+    (0x1D50B, 'X'),
+    (0x1D50D, 'M', 'j'),
+    (0x1D50E, 'M', 'k'),
+    (0x1D50F, 'M', 'l'),
+    (0x1D510, 'M', 'm'),
+    (0x1D511, 'M', 'n'),
+    (0x1D512, 'M', 'o'),
+    (0x1D513, 'M', 'p'),
+    (0x1D514, 'M', 'q'),
+    (0x1D515, 'X'),
+    (0x1D516, 'M', 's'),
+    (0x1D517, 'M', 't'),
+    (0x1D518, 'M', 'u'),
+    (0x1D519, 'M', 'v'),
+    (0x1D51A, 'M', 'w'),
+    (0x1D51B, 'M', 'x'),
+    (0x1D51C, 'M', 'y'),
+    (0x1D51D, 'X'),
+    (0x1D51E, 'M', 'a'),
+    (0x1D51F, 'M', 'b'),
+    (0x1D520, 'M', 'c'),
+    (0x1D521, 'M', 'd'),
+    (0x1D522, 'M', 'e'),
+    (0x1D523, 'M', 'f'),
+    (0x1D524, 'M', 'g'),
+    (0x1D525, 'M', 'h'),
+    (0x1D526, 'M', 'i'),
+    (0x1D527, 'M', 'j'),
+    (0x1D528, 'M', 'k'),
+    (0x1D529, 'M', 'l'),
+    (0x1D52A, 'M', 'm'),
+    (0x1D52B, 'M', 'n'),
+    (0x1D52C, 'M', 'o'),
+    (0x1D52D, 'M', 'p'),
+    (0x1D52E, 'M', 'q'),
+    (0x1D52F, 'M', 'r'),
+    (0x1D530, 'M', 's'),
+    (0x1D531, 'M', 't'),
+    (0x1D532, 'M', 'u'),
+    (0x1D533, 'M', 'v'),
+    (0x1D534, 'M', 'w'),
+    (0x1D535, 'M', 'x'),
+    (0x1D536, 'M', 'y'),
+    (0x1D537, 'M', 'z'),
+    (0x1D538, 'M', 'a'),
+    (0x1D539, 'M', 'b'),
+    (0x1D53A, 'X'),
+    (0x1D53B, 'M', 'd'),
+    (0x1D53C, 'M', 'e'),
+    (0x1D53D, 'M', 'f'),
+    (0x1D53E, 'M', 'g'),
+    (0x1D53F, 'X'),
+    (0x1D540, 'M', 'i'),
+    (0x1D541, 'M', 'j'),
+    (0x1D542, 'M', 'k'),
+    (0x1D543, 'M', 'l'),
+    (0x1D544, 'M', 'm'),
+    (0x1D545, 'X'),
+    (0x1D546, 'M', 'o'),
+    (0x1D547, 'X'),
+    (0x1D54A, 'M', 's'),
+    (0x1D54B, 'M', 't'),
+    (0x1D54C, 'M', 'u'),
+    (0x1D54D, 'M', 'v'),
+    (0x1D54E, 'M', 'w'),
+    (0x1D54F, 'M', 'x'),
+    (0x1D550, 'M', 'y'),
+    (0x1D551, 'X'),
+    (0x1D552, 'M', 'a'),
+    ]
+
+def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1D553, 'M', 'b'),
+    (0x1D554, 'M', 'c'),
+    (0x1D555, 'M', 'd'),
+    (0x1D556, 'M', 'e'),
+    (0x1D557, 'M', 'f'),
+    (0x1D558, 'M', 'g'),
+    (0x1D559, 'M', 'h'),
+    (0x1D55A, 'M', 'i'),
+    (0x1D55B, 'M', 'j'),
+    (0x1D55C, 'M', 'k'),
+    (0x1D55D, 'M', 'l'),
+    (0x1D55E, 'M', 'm'),
+    (0x1D55F, 'M', 'n'),
+    (0x1D560, 'M', 'o'),
+    (0x1D561, 'M', 'p'),
+    (0x1D562, 'M', 'q'),
+    (0x1D563, 'M', 'r'),
+    (0x1D564, 'M', 's'),
+    (0x1D565, 'M', 't'),
+    (0x1D566, 'M', 'u'),
+    (0x1D567, 'M', 'v'),
+    (0x1D568, 'M', 'w'),
+    (0x1D569, 'M', 'x'),
+    (0x1D56A, 'M', 'y'),
+    (0x1D56B, 'M', 'z'),
+    (0x1D56C, 'M', 'a'),
+    (0x1D56D, 'M', 'b'),
+    (0x1D56E, 'M', 'c'),
+    (0x1D56F, 'M', 'd'),
+    (0x1D570, 'M', 'e'),
+    (0x1D571, 'M', 'f'),
+    (0x1D572, 'M', 'g'),
+    (0x1D573, 'M', 'h'),
+    (0x1D574, 'M', 'i'),
+    (0x1D575, 'M', 'j'),
+    (0x1D576, 'M', 'k'),
+    (0x1D577, 'M', 'l'),
+    (0x1D578, 'M', 'm'),
+    (0x1D579, 'M', 'n'),
+    (0x1D57A, 'M', 'o'),
+    (0x1D57B, 'M', 'p'),
+    (0x1D57C, 'M', 'q'),
+    (0x1D57D, 'M', 'r'),
+    (0x1D57E, 'M', 's'),
+    (0x1D57F, 'M', 't'),
+    (0x1D580, 'M', 'u'),
+    (0x1D581, 'M', 'v'),
+    (0x1D582, 'M', 'w'),
+    (0x1D583, 'M', 'x'),
+    (0x1D584, 'M', 'y'),
+    (0x1D585, 'M', 'z'),
+    (0x1D586, 'M', 'a'),
+    (0x1D587, 'M', 'b'),
+    (0x1D588, 'M', 'c'),
+    (0x1D589, 'M', 'd'),
+    (0x1D58A, 'M', 'e'),
+    (0x1D58B, 'M', 'f'),
+    (0x1D58C, 'M', 'g'),
+    (0x1D58D, 'M', 'h'),
+    (0x1D58E, 'M', 'i'),
+    (0x1D58F, 'M', 'j'),
+    (0x1D590, 'M', 'k'),
+    (0x1D591, 'M', 'l'),
+    (0x1D592, 'M', 'm'),
+    (0x1D593, 'M', 'n'),
+    (0x1D594, 'M', 'o'),
+    (0x1D595, 'M', 'p'),
+    (0x1D596, 'M', 'q'),
+    (0x1D597, 'M', 'r'),
+    (0x1D598, 'M', 's'),
+    (0x1D599, 'M', 't'),
+    (0x1D59A, 'M', 'u'),
+    (0x1D59B, 'M', 'v'),
+    (0x1D59C, 'M', 'w'),
+    (0x1D59D, 'M', 'x'),
+    (0x1D59E, 'M', 'y'),
+    (0x1D59F, 'M', 'z'),
+    (0x1D5A0, 'M', 'a'),
+    (0x1D5A1, 'M', 'b'),
+    (0x1D5A2, 'M', 'c'),
+    (0x1D5A3, 'M', 'd'),
+    (0x1D5A4, 'M', 'e'),
+    (0x1D5A5, 'M', 'f'),
+    (0x1D5A6, 'M', 'g'),
+    (0x1D5A7, 'M', 'h'),
+    (0x1D5A8, 'M', 'i'),
+    (0x1D5A9, 'M', 'j'),
+    (0x1D5AA, 'M', 'k'),
+    (0x1D5AB, 'M', 'l'),
+    (0x1D5AC, 'M', 'm'),
+    (0x1D5AD, 'M', 'n'),
+    (0x1D5AE, 'M', 'o'),
+    (0x1D5AF, 'M', 'p'),
+    (0x1D5B0, 'M', 'q'),
+    (0x1D5B1, 'M', 'r'),
+    (0x1D5B2, 'M', 's'),
+    (0x1D5B3, 'M', 't'),
+    (0x1D5B4, 'M', 'u'),
+    (0x1D5B5, 'M', 'v'),
+    (0x1D5B6, 'M', 'w'),
+    ]
+
+def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1D5B7, 'M', 'x'),
+    (0x1D5B8, 'M', 'y'),
+    (0x1D5B9, 'M', 'z'),
+    (0x1D5BA, 'M', 'a'),
+    (0x1D5BB, 'M', 'b'),
+    (0x1D5BC, 'M', 'c'),
+    (0x1D5BD, 'M', 'd'),
+    (0x1D5BE, 'M', 'e'),
+    (0x1D5BF, 'M', 'f'),
+    (0x1D5C0, 'M', 'g'),
+    (0x1D5C1, 'M', 'h'),
+    (0x1D5C2, 'M', 'i'),
+    (0x1D5C3, 'M', 'j'),
+    (0x1D5C4, 'M', 'k'),
+    (0x1D5C5, 'M', 'l'),
+    (0x1D5C6, 'M', 'm'),
+    (0x1D5C7, 'M', 'n'),
+    (0x1D5C8, 'M', 'o'),
+    (0x1D5C9, 'M', 'p'),
+    (0x1D5CA, 'M', 'q'),
+    (0x1D5CB, 'M', 'r'),
+    (0x1D5CC, 'M', 's'),
+    (0x1D5CD, 'M', 't'),
+    (0x1D5CE, 'M', 'u'),
+    (0x1D5CF, 'M', 'v'),
+    (0x1D5D0, 'M', 'w'),
+    (0x1D5D1, 'M', 'x'),
+    (0x1D5D2, 'M', 'y'),
+    (0x1D5D3, 'M', 'z'),
+    (0x1D5D4, 'M', 'a'),
+    (0x1D5D5, 'M', 'b'),
+    (0x1D5D6, 'M', 'c'),
+    (0x1D5D7, 'M', 'd'),
+    (0x1D5D8, 'M', 'e'),
+    (0x1D5D9, 'M', 'f'),
+    (0x1D5DA, 'M', 'g'),
+    (0x1D5DB, 'M', 'h'),
+    (0x1D5DC, 'M', 'i'),
+    (0x1D5DD, 'M', 'j'),
+    (0x1D5DE, 'M', 'k'),
+    (0x1D5DF, 'M', 'l'),
+    (0x1D5E0, 'M', 'm'),
+    (0x1D5E1, 'M', 'n'),
+    (0x1D5E2, 'M', 'o'),
+    (0x1D5E3, 'M', 'p'),
+    (0x1D5E4, 'M', 'q'),
+    (0x1D5E5, 'M', 'r'),
+    (0x1D5E6, 'M', 's'),
+    (0x1D5E7, 'M', 't'),
+    (0x1D5E8, 'M', 'u'),
+    (0x1D5E9, 'M', 'v'),
+    (0x1D5EA, 'M', 'w'),
+    (0x1D5EB, 'M', 'x'),
+    (0x1D5EC, 'M', 'y'),
+    (0x1D5ED, 'M', 'z'),
+    (0x1D5EE, 'M', 'a'),
+    (0x1D5EF, 'M', 'b'),
+    (0x1D5F0, 'M', 'c'),
+    (0x1D5F1, 'M', 'd'),
+    (0x1D5F2, 'M', 'e'),
+    (0x1D5F3, 'M', 'f'),
+    (0x1D5F4, 'M', 'g'),
+    (0x1D5F5, 'M', 'h'),
+    (0x1D5F6, 'M', 'i'),
+    (0x1D5F7, 'M', 'j'),
+    (0x1D5F8, 'M', 'k'),
+    (0x1D5F9, 'M', 'l'),
+    (0x1D5FA, 'M', 'm'),
+    (0x1D5FB, 'M', 'n'),
+    (0x1D5FC, 'M', 'o'),
+    (0x1D5FD, 'M', 'p'),
+    (0x1D5FE, 'M', 'q'),
+    (0x1D5FF, 'M', 'r'),
+    (0x1D600, 'M', 's'),
+    (0x1D601, 'M', 't'),
+    (0x1D602, 'M', 'u'),
+    (0x1D603, 'M', 'v'),
+    (0x1D604, 'M', 'w'),
+    (0x1D605, 'M', 'x'),
+    (0x1D606, 'M', 'y'),
+    (0x1D607, 'M', 'z'),
+    (0x1D608, 'M', 'a'),
+    (0x1D609, 'M', 'b'),
+    (0x1D60A, 'M', 'c'),
+    (0x1D60B, 'M', 'd'),
+    (0x1D60C, 'M', 'e'),
+    (0x1D60D, 'M', 'f'),
+    (0x1D60E, 'M', 'g'),
+    (0x1D60F, 'M', 'h'),
+    (0x1D610, 'M', 'i'),
+    (0x1D611, 'M', 'j'),
+    (0x1D612, 'M', 'k'),
+    (0x1D613, 'M', 'l'),
+    (0x1D614, 'M', 'm'),
+    (0x1D615, 'M', 'n'),
+    (0x1D616, 'M', 'o'),
+    (0x1D617, 'M', 'p'),
+    (0x1D618, 'M', 'q'),
+    (0x1D619, 'M', 'r'),
+    (0x1D61A, 'M', 's'),
+    ]
+
+def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1D61B, 'M', 't'),
+    (0x1D61C, 'M', 'u'),
+    (0x1D61D, 'M', 'v'),
+    (0x1D61E, 'M', 'w'),
+    (0x1D61F, 'M', 'x'),
+    (0x1D620, 'M', 'y'),
+    (0x1D621, 'M', 'z'),
+    (0x1D622, 'M', 'a'),
+    (0x1D623, 'M', 'b'),
+    (0x1D624, 'M', 'c'),
+    (0x1D625, 'M', 'd'),
+    (0x1D626, 'M', 'e'),
+    (0x1D627, 'M', 'f'),
+    (0x1D628, 'M', 'g'),
+    (0x1D629, 'M', 'h'),
+    (0x1D62A, 'M', 'i'),
+    (0x1D62B, 'M', 'j'),
+    (0x1D62C, 'M', 'k'),
+    (0x1D62D, 'M', 'l'),
+    (0x1D62E, 'M', 'm'),
+    (0x1D62F, 'M', 'n'),
+    (0x1D630, 'M', 'o'),
+    (0x1D631, 'M', 'p'),
+    (0x1D632, 'M', 'q'),
+    (0x1D633, 'M', 'r'),
+    (0x1D634, 'M', 's'),
+    (0x1D635, 'M', 't'),
+    (0x1D636, 'M', 'u'),
+    (0x1D637, 'M', 'v'),
+    (0x1D638, 'M', 'w'),
+    (0x1D639, 'M', 'x'),
+    (0x1D63A, 'M', 'y'),
+    (0x1D63B, 'M', 'z'),
+    (0x1D63C, 'M', 'a'),
+    (0x1D63D, 'M', 'b'),
+    (0x1D63E, 'M', 'c'),
+    (0x1D63F, 'M', 'd'),
+    (0x1D640, 'M', 'e'),
+    (0x1D641, 'M', 'f'),
+    (0x1D642, 'M', 'g'),
+    (0x1D643, 'M', 'h'),
+    (0x1D644, 'M', 'i'),
+    (0x1D645, 'M', 'j'),
+    (0x1D646, 'M', 'k'),
+    (0x1D647, 'M', 'l'),
+    (0x1D648, 'M', 'm'),
+    (0x1D649, 'M', 'n'),
+    (0x1D64A, 'M', 'o'),
+    (0x1D64B, 'M', 'p'),
+    (0x1D64C, 'M', 'q'),
+    (0x1D64D, 'M', 'r'),
+    (0x1D64E, 'M', 's'),
+    (0x1D64F, 'M', 't'),
+    (0x1D650, 'M', 'u'),
+    (0x1D651, 'M', 'v'),
+    (0x1D652, 'M', 'w'),
+    (0x1D653, 'M', 'x'),
+    (0x1D654, 'M', 'y'),
+    (0x1D655, 'M', 'z'),
+    (0x1D656, 'M', 'a'),
+    (0x1D657, 'M', 'b'),
+    (0x1D658, 'M', 'c'),
+    (0x1D659, 'M', 'd'),
+    (0x1D65A, 'M', 'e'),
+    (0x1D65B, 'M', 'f'),
+    (0x1D65C, 'M', 'g'),
+    (0x1D65D, 'M', 'h'),
+    (0x1D65E, 'M', 'i'),
+    (0x1D65F, 'M', 'j'),
+    (0x1D660, 'M', 'k'),
+    (0x1D661, 'M', 'l'),
+    (0x1D662, 'M', 'm'),
+    (0x1D663, 'M', 'n'),
+    (0x1D664, 'M', 'o'),
+    (0x1D665, 'M', 'p'),
+    (0x1D666, 'M', 'q'),
+    (0x1D667, 'M', 'r'),
+    (0x1D668, 'M', 's'),
+    (0x1D669, 'M', 't'),
+    (0x1D66A, 'M', 'u'),
+    (0x1D66B, 'M', 'v'),
+    (0x1D66C, 'M', 'w'),
+    (0x1D66D, 'M', 'x'),
+    (0x1D66E, 'M', 'y'),
+    (0x1D66F, 'M', 'z'),
+    (0x1D670, 'M', 'a'),
+    (0x1D671, 'M', 'b'),
+    (0x1D672, 'M', 'c'),
+    (0x1D673, 'M', 'd'),
+    (0x1D674, 'M', 'e'),
+    (0x1D675, 'M', 'f'),
+    (0x1D676, 'M', 'g'),
+    (0x1D677, 'M', 'h'),
+    (0x1D678, 'M', 'i'),
+    (0x1D679, 'M', 'j'),
+    (0x1D67A, 'M', 'k'),
+    (0x1D67B, 'M', 'l'),
+    (0x1D67C, 'M', 'm'),
+    (0x1D67D, 'M', 'n'),
+    (0x1D67E, 'M', 'o'),
+    ]
+
+def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1D67F, 'M', 'p'),
+    (0x1D680, 'M', 'q'),
+    (0x1D681, 'M', 'r'),
+    (0x1D682, 'M', 's'),
+    (0x1D683, 'M', 't'),
+    (0x1D684, 'M', 'u'),
+    (0x1D685, 'M', 'v'),
+    (0x1D686, 'M', 'w'),
+    (0x1D687, 'M', 'x'),
+    (0x1D688, 'M', 'y'),
+    (0x1D689, 'M', 'z'),
+    (0x1D68A, 'M', 'a'),
+    (0x1D68B, 'M', 'b'),
+    (0x1D68C, 'M', 'c'),
+    (0x1D68D, 'M', 'd'),
+    (0x1D68E, 'M', 'e'),
+    (0x1D68F, 'M', 'f'),
+    (0x1D690, 'M', 'g'),
+    (0x1D691, 'M', 'h'),
+    (0x1D692, 'M', 'i'),
+    (0x1D693, 'M', 'j'),
+    (0x1D694, 'M', 'k'),
+    (0x1D695, 'M', 'l'),
+    (0x1D696, 'M', 'm'),
+    (0x1D697, 'M', 'n'),
+    (0x1D698, 'M', 'o'),
+    (0x1D699, 'M', 'p'),
+    (0x1D69A, 'M', 'q'),
+    (0x1D69B, 'M', 'r'),
+    (0x1D69C, 'M', 's'),
+    (0x1D69D, 'M', 't'),
+    (0x1D69E, 'M', 'u'),
+    (0x1D69F, 'M', 'v'),
+    (0x1D6A0, 'M', 'w'),
+    (0x1D6A1, 'M', 'x'),
+    (0x1D6A2, 'M', 'y'),
+    (0x1D6A3, 'M', 'z'),
+    (0x1D6A4, 'M', 'ı'),
+    (0x1D6A5, 'M', 'ȷ'),
+    (0x1D6A6, 'X'),
+    (0x1D6A8, 'M', 'α'),
+    (0x1D6A9, 'M', 'β'),
+    (0x1D6AA, 'M', 'γ'),
+    (0x1D6AB, 'M', 'δ'),
+    (0x1D6AC, 'M', 'ε'),
+    (0x1D6AD, 'M', 'ζ'),
+    (0x1D6AE, 'M', 'η'),
+    (0x1D6AF, 'M', 'θ'),
+    (0x1D6B0, 'M', 'ι'),
+    (0x1D6B1, 'M', 'κ'),
+    (0x1D6B2, 'M', 'λ'),
+    (0x1D6B3, 'M', 'μ'),
+    (0x1D6B4, 'M', 'ν'),
+    (0x1D6B5, 'M', 'ξ'),
+    (0x1D6B6, 'M', 'ο'),
+    (0x1D6B7, 'M', 'π'),
+    (0x1D6B8, 'M', 'ρ'),
+    (0x1D6B9, 'M', 'θ'),
+    (0x1D6BA, 'M', 'σ'),
+    (0x1D6BB, 'M', 'τ'),
+    (0x1D6BC, 'M', 'υ'),
+    (0x1D6BD, 'M', 'φ'),
+    (0x1D6BE, 'M', 'χ'),
+    (0x1D6BF, 'M', 'ψ'),
+    (0x1D6C0, 'M', 'ω'),
+    (0x1D6C1, 'M', '∇'),
+    (0x1D6C2, 'M', 'α'),
+    (0x1D6C3, 'M', 'β'),
+    (0x1D6C4, 'M', 'γ'),
+    (0x1D6C5, 'M', 'δ'),
+    (0x1D6C6, 'M', 'ε'),
+    (0x1D6C7, 'M', 'ζ'),
+    (0x1D6C8, 'M', 'η'),
+    (0x1D6C9, 'M', 'θ'),
+    (0x1D6CA, 'M', 'ι'),
+    (0x1D6CB, 'M', 'κ'),
+    (0x1D6CC, 'M', 'λ'),
+    (0x1D6CD, 'M', 'μ'),
+    (0x1D6CE, 'M', 'ν'),
+    (0x1D6CF, 'M', 'ξ'),
+    (0x1D6D0, 'M', 'ο'),
+    (0x1D6D1, 'M', 'π'),
+    (0x1D6D2, 'M', 'ρ'),
+    (0x1D6D3, 'M', 'σ'),
+    (0x1D6D5, 'M', 'τ'),
+    (0x1D6D6, 'M', 'υ'),
+    (0x1D6D7, 'M', 'φ'),
+    (0x1D6D8, 'M', 'χ'),
+    (0x1D6D9, 'M', 'ψ'),
+    (0x1D6DA, 'M', 'ω'),
+    (0x1D6DB, 'M', '∂'),
+    (0x1D6DC, 'M', 'ε'),
+    (0x1D6DD, 'M', 'θ'),
+    (0x1D6DE, 'M', 'κ'),
+    (0x1D6DF, 'M', 'φ'),
+    (0x1D6E0, 'M', 'ρ'),
+    (0x1D6E1, 'M', 'π'),
+    (0x1D6E2, 'M', 'α'),
+    (0x1D6E3, 'M', 'β'),
+    (0x1D6E4, 'M', 'γ'),
+    ]
+
+def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1D6E5, 'M', 'δ'),
+    (0x1D6E6, 'M', 'ε'),
+    (0x1D6E7, 'M', 'ζ'),
+    (0x1D6E8, 'M', 'η'),
+    (0x1D6E9, 'M', 'θ'),
+    (0x1D6EA, 'M', 'ι'),
+    (0x1D6EB, 'M', 'κ'),
+    (0x1D6EC, 'M', 'λ'),
+    (0x1D6ED, 'M', 'μ'),
+    (0x1D6EE, 'M', 'ν'),
+    (0x1D6EF, 'M', 'ξ'),
+    (0x1D6F0, 'M', 'ο'),
+    (0x1D6F1, 'M', 'π'),
+    (0x1D6F2, 'M', 'ρ'),
+    (0x1D6F3, 'M', 'θ'),
+    (0x1D6F4, 'M', 'σ'),
+    (0x1D6F5, 'M', 'τ'),
+    (0x1D6F6, 'M', 'υ'),
+    (0x1D6F7, 'M', 'φ'),
+    (0x1D6F8, 'M', 'χ'),
+    (0x1D6F9, 'M', 'ψ'),
+    (0x1D6FA, 'M', 'ω'),
+    (0x1D6FB, 'M', '∇'),
+    (0x1D6FC, 'M', 'α'),
+    (0x1D6FD, 'M', 'β'),
+    (0x1D6FE, 'M', 'γ'),
+    (0x1D6FF, 'M', 'δ'),
+    (0x1D700, 'M', 'ε'),
+    (0x1D701, 'M', 'ζ'),
+    (0x1D702, 'M', 'η'),
+    (0x1D703, 'M', 'θ'),
+    (0x1D704, 'M', 'ι'),
+    (0x1D705, 'M', 'κ'),
+    (0x1D706, 'M', 'λ'),
+    (0x1D707, 'M', 'μ'),
+    (0x1D708, 'M', 'ν'),
+    (0x1D709, 'M', 'ξ'),
+    (0x1D70A, 'M', 'ο'),
+    (0x1D70B, 'M', 'π'),
+    (0x1D70C, 'M', 'ρ'),
+    (0x1D70D, 'M', 'σ'),
+    (0x1D70F, 'M', 'τ'),
+    (0x1D710, 'M', 'υ'),
+    (0x1D711, 'M', 'φ'),
+    (0x1D712, 'M', 'χ'),
+    (0x1D713, 'M', 'ψ'),
+    (0x1D714, 'M', 'ω'),
+    (0x1D715, 'M', '∂'),
+    (0x1D716, 'M', 'ε'),
+    (0x1D717, 'M', 'θ'),
+    (0x1D718, 'M', 'κ'),
+    (0x1D719, 'M', 'φ'),
+    (0x1D71A, 'M', 'ρ'),
+    (0x1D71B, 'M', 'π'),
+    (0x1D71C, 'M', 'α'),
+    (0x1D71D, 'M', 'β'),
+    (0x1D71E, 'M', 'γ'),
+    (0x1D71F, 'M', 'δ'),
+    (0x1D720, 'M', 'ε'),
+    (0x1D721, 'M', 'ζ'),
+    (0x1D722, 'M', 'η'),
+    (0x1D723, 'M', 'θ'),
+    (0x1D724, 'M', 'ι'),
+    (0x1D725, 'M', 'κ'),
+    (0x1D726, 'M', 'λ'),
+    (0x1D727, 'M', 'μ'),
+    (0x1D728, 'M', 'ν'),
+    (0x1D729, 'M', 'ξ'),
+    (0x1D72A, 'M', 'ο'),
+    (0x1D72B, 'M', 'π'),
+    (0x1D72C, 'M', 'ρ'),
+    (0x1D72D, 'M', 'θ'),
+    (0x1D72E, 'M', 'σ'),
+    (0x1D72F, 'M', 'τ'),
+    (0x1D730, 'M', 'υ'),
+    (0x1D731, 'M', 'φ'),
+    (0x1D732, 'M', 'χ'),
+    (0x1D733, 'M', 'ψ'),
+    (0x1D734, 'M', 'ω'),
+    (0x1D735, 'M', '∇'),
+    (0x1D736, 'M', 'α'),
+    (0x1D737, 'M', 'β'),
+    (0x1D738, 'M', 'γ'),
+    (0x1D739, 'M', 'δ'),
+    (0x1D73A, 'M', 'ε'),
+    (0x1D73B, 'M', 'ζ'),
+    (0x1D73C, 'M', 'η'),
+    (0x1D73D, 'M', 'θ'),
+    (0x1D73E, 'M', 'ι'),
+    (0x1D73F, 'M', 'κ'),
+    (0x1D740, 'M', 'λ'),
+    (0x1D741, 'M', 'μ'),
+    (0x1D742, 'M', 'ν'),
+    (0x1D743, 'M', 'ξ'),
+    (0x1D744, 'M', 'ο'),
+    (0x1D745, 'M', 'π'),
+    (0x1D746, 'M', 'ρ'),
+    (0x1D747, 'M', 'σ'),
+    (0x1D749, 'M', 'τ'),
+    (0x1D74A, 'M', 'υ'),
+    ]
+
+def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1D74B, 'M', 'φ'),
+    (0x1D74C, 'M', 'χ'),
+    (0x1D74D, 'M', 'ψ'),
+    (0x1D74E, 'M', 'ω'),
+    (0x1D74F, 'M', '∂'),
+    (0x1D750, 'M', 'ε'),
+    (0x1D751, 'M', 'θ'),
+    (0x1D752, 'M', 'κ'),
+    (0x1D753, 'M', 'φ'),
+    (0x1D754, 'M', 'ρ'),
+    (0x1D755, 'M', 'π'),
+    (0x1D756, 'M', 'α'),
+    (0x1D757, 'M', 'β'),
+    (0x1D758, 'M', 'γ'),
+    (0x1D759, 'M', 'δ'),
+    (0x1D75A, 'M', 'ε'),
+    (0x1D75B, 'M', 'ζ'),
+    (0x1D75C, 'M', 'η'),
+    (0x1D75D, 'M', 'θ'),
+    (0x1D75E, 'M', 'ι'),
+    (0x1D75F, 'M', 'κ'),
+    (0x1D760, 'M', 'λ'),
+    (0x1D761, 'M', 'μ'),
+    (0x1D762, 'M', 'ν'),
+    (0x1D763, 'M', 'ξ'),
+    (0x1D764, 'M', 'ο'),
+    (0x1D765, 'M', 'π'),
+    (0x1D766, 'M', 'ρ'),
+    (0x1D767, 'M', 'θ'),
+    (0x1D768, 'M', 'σ'),
+    (0x1D769, 'M', 'τ'),
+    (0x1D76A, 'M', 'υ'),
+    (0x1D76B, 'M', 'φ'),
+    (0x1D76C, 'M', 'χ'),
+    (0x1D76D, 'M', 'ψ'),
+    (0x1D76E, 'M', 'ω'),
+    (0x1D76F, 'M', '∇'),
+    (0x1D770, 'M', 'α'),
+    (0x1D771, 'M', 'β'),
+    (0x1D772, 'M', 'γ'),
+    (0x1D773, 'M', 'δ'),
+    (0x1D774, 'M', 'ε'),
+    (0x1D775, 'M', 'ζ'),
+    (0x1D776, 'M', 'η'),
+    (0x1D777, 'M', 'θ'),
+    (0x1D778, 'M', 'ι'),
+    (0x1D779, 'M', 'κ'),
+    (0x1D77A, 'M', 'λ'),
+    (0x1D77B, 'M', 'μ'),
+    (0x1D77C, 'M', 'ν'),
+    (0x1D77D, 'M', 'ξ'),
+    (0x1D77E, 'M', 'ο'),
+    (0x1D77F, 'M', 'π'),
+    (0x1D780, 'M', 'ρ'),
+    (0x1D781, 'M', 'σ'),
+    (0x1D783, 'M', 'τ'),
+    (0x1D784, 'M', 'υ'),
+    (0x1D785, 'M', 'φ'),
+    (0x1D786, 'M', 'χ'),
+    (0x1D787, 'M', 'ψ'),
+    (0x1D788, 'M', 'ω'),
+    (0x1D789, 'M', '∂'),
+    (0x1D78A, 'M', 'ε'),
+    (0x1D78B, 'M', 'θ'),
+    (0x1D78C, 'M', 'κ'),
+    (0x1D78D, 'M', 'φ'),
+    (0x1D78E, 'M', 'ρ'),
+    (0x1D78F, 'M', 'π'),
+    (0x1D790, 'M', 'α'),
+    (0x1D791, 'M', 'β'),
+    (0x1D792, 'M', 'γ'),
+    (0x1D793, 'M', 'δ'),
+    (0x1D794, 'M', 'ε'),
+    (0x1D795, 'M', 'ζ'),
+    (0x1D796, 'M', 'η'),
+    (0x1D797, 'M', 'θ'),
+    (0x1D798, 'M', 'ι'),
+    (0x1D799, 'M', 'κ'),
+    (0x1D79A, 'M', 'λ'),
+    (0x1D79B, 'M', 'μ'),
+    (0x1D79C, 'M', 'ν'),
+    (0x1D79D, 'M', 'ξ'),
+    (0x1D79E, 'M', 'ο'),
+    (0x1D79F, 'M', 'π'),
+    (0x1D7A0, 'M', 'ρ'),
+    (0x1D7A1, 'M', 'θ'),
+    (0x1D7A2, 'M', 'σ'),
+    (0x1D7A3, 'M', 'τ'),
+    (0x1D7A4, 'M', 'υ'),
+    (0x1D7A5, 'M', 'φ'),
+    (0x1D7A6, 'M', 'χ'),
+    (0x1D7A7, 'M', 'ψ'),
+    (0x1D7A8, 'M', 'ω'),
+    (0x1D7A9, 'M', '∇'),
+    (0x1D7AA, 'M', 'α'),
+    (0x1D7AB, 'M', 'β'),
+    (0x1D7AC, 'M', 'γ'),
+    (0x1D7AD, 'M', 'δ'),
+    (0x1D7AE, 'M', 'ε'),
+    (0x1D7AF, 'M', 'ζ'),
+    ]
+
+def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1D7B0, 'M', 'η'),
+    (0x1D7B1, 'M', 'θ'),
+    (0x1D7B2, 'M', 'ι'),
+    (0x1D7B3, 'M', 'κ'),
+    (0x1D7B4, 'M', 'λ'),
+    (0x1D7B5, 'M', 'μ'),
+    (0x1D7B6, 'M', 'ν'),
+    (0x1D7B7, 'M', 'ξ'),
+    (0x1D7B8, 'M', 'ο'),
+    (0x1D7B9, 'M', 'π'),
+    (0x1D7BA, 'M', 'ρ'),
+    (0x1D7BB, 'M', 'σ'),
+    (0x1D7BD, 'M', 'τ'),
+    (0x1D7BE, 'M', 'υ'),
+    (0x1D7BF, 'M', 'φ'),
+    (0x1D7C0, 'M', 'χ'),
+    (0x1D7C1, 'M', 'ψ'),
+    (0x1D7C2, 'M', 'ω'),
+    (0x1D7C3, 'M', '∂'),
+    (0x1D7C4, 'M', 'ε'),
+    (0x1D7C5, 'M', 'θ'),
+    (0x1D7C6, 'M', 'κ'),
+    (0x1D7C7, 'M', 'φ'),
+    (0x1D7C8, 'M', 'ρ'),
+    (0x1D7C9, 'M', 'π'),
+    (0x1D7CA, 'M', 'ϝ'),
+    (0x1D7CC, 'X'),
+    (0x1D7CE, 'M', '0'),
+    (0x1D7CF, 'M', '1'),
+    (0x1D7D0, 'M', '2'),
+    (0x1D7D1, 'M', '3'),
+    (0x1D7D2, 'M', '4'),
+    (0x1D7D3, 'M', '5'),
+    (0x1D7D4, 'M', '6'),
+    (0x1D7D5, 'M', '7'),
+    (0x1D7D6, 'M', '8'),
+    (0x1D7D7, 'M', '9'),
+    (0x1D7D8, 'M', '0'),
+    (0x1D7D9, 'M', '1'),
+    (0x1D7DA, 'M', '2'),
+    (0x1D7DB, 'M', '3'),
+    (0x1D7DC, 'M', '4'),
+    (0x1D7DD, 'M', '5'),
+    (0x1D7DE, 'M', '6'),
+    (0x1D7DF, 'M', '7'),
+    (0x1D7E0, 'M', '8'),
+    (0x1D7E1, 'M', '9'),
+    (0x1D7E2, 'M', '0'),
+    (0x1D7E3, 'M', '1'),
+    (0x1D7E4, 'M', '2'),
+    (0x1D7E5, 'M', '3'),
+    (0x1D7E6, 'M', '4'),
+    (0x1D7E7, 'M', '5'),
+    (0x1D7E8, 'M', '6'),
+    (0x1D7E9, 'M', '7'),
+    (0x1D7EA, 'M', '8'),
+    (0x1D7EB, 'M', '9'),
+    (0x1D7EC, 'M', '0'),
+    (0x1D7ED, 'M', '1'),
+    (0x1D7EE, 'M', '2'),
+    (0x1D7EF, 'M', '3'),
+    (0x1D7F0, 'M', '4'),
+    (0x1D7F1, 'M', '5'),
+    (0x1D7F2, 'M', '6'),
+    (0x1D7F3, 'M', '7'),
+    (0x1D7F4, 'M', '8'),
+    (0x1D7F5, 'M', '9'),
+    (0x1D7F6, 'M', '0'),
+    (0x1D7F7, 'M', '1'),
+    (0x1D7F8, 'M', '2'),
+    (0x1D7F9, 'M', '3'),
+    (0x1D7FA, 'M', '4'),
+    (0x1D7FB, 'M', '5'),
+    (0x1D7FC, 'M', '6'),
+    (0x1D7FD, 'M', '7'),
+    (0x1D7FE, 'M', '8'),
+    (0x1D7FF, 'M', '9'),
+    (0x1D800, 'V'),
+    (0x1DA8C, 'X'),
+    (0x1DA9B, 'V'),
+    (0x1DAA0, 'X'),
+    (0x1DAA1, 'V'),
+    (0x1DAB0, 'X'),
+    (0x1DF00, 'V'),
+    (0x1DF1F, 'X'),
+    (0x1DF25, 'V'),
+    (0x1DF2B, 'X'),
+    (0x1E000, 'V'),
+    (0x1E007, 'X'),
+    (0x1E008, 'V'),
+    (0x1E019, 'X'),
+    (0x1E01B, 'V'),
+    (0x1E022, 'X'),
+    (0x1E023, 'V'),
+    (0x1E025, 'X'),
+    (0x1E026, 'V'),
+    (0x1E02B, 'X'),
+    (0x1E030, 'M', 'а'),
+    (0x1E031, 'M', 'б'),
+    (0x1E032, 'M', 'в'),
+    ]
+
+def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1E033, 'M', 'г'),
+    (0x1E034, 'M', 'д'),
+    (0x1E035, 'M', 'е'),
+    (0x1E036, 'M', 'ж'),
+    (0x1E037, 'M', 'з'),
+    (0x1E038, 'M', 'и'),
+    (0x1E039, 'M', 'к'),
+    (0x1E03A, 'M', 'л'),
+    (0x1E03B, 'M', 'м'),
+    (0x1E03C, 'M', 'о'),
+    (0x1E03D, 'M', 'п'),
+    (0x1E03E, 'M', 'р'),
+    (0x1E03F, 'M', 'с'),
+    (0x1E040, 'M', 'т'),
+    (0x1E041, 'M', 'у'),
+    (0x1E042, 'M', 'ф'),
+    (0x1E043, 'M', 'х'),
+    (0x1E044, 'M', 'ц'),
+    (0x1E045, 'M', 'ч'),
+    (0x1E046, 'M', 'ш'),
+    (0x1E047, 'M', 'ы'),
+    (0x1E048, 'M', 'э'),
+    (0x1E049, 'M', 'ю'),
+    (0x1E04A, 'M', 'ꚉ'),
+    (0x1E04B, 'M', 'ә'),
+    (0x1E04C, 'M', 'і'),
+    (0x1E04D, 'M', 'ј'),
+    (0x1E04E, 'M', 'ө'),
+    (0x1E04F, 'M', 'ү'),
+    (0x1E050, 'M', 'ӏ'),
+    (0x1E051, 'M', 'а'),
+    (0x1E052, 'M', 'б'),
+    (0x1E053, 'M', 'в'),
+    (0x1E054, 'M', 'г'),
+    (0x1E055, 'M', 'д'),
+    (0x1E056, 'M', 'е'),
+    (0x1E057, 'M', 'ж'),
+    (0x1E058, 'M', 'з'),
+    (0x1E059, 'M', 'и'),
+    (0x1E05A, 'M', 'к'),
+    (0x1E05B, 'M', 'л'),
+    (0x1E05C, 'M', 'о'),
+    (0x1E05D, 'M', 'п'),
+    (0x1E05E, 'M', 'с'),
+    (0x1E05F, 'M', 'у'),
+    (0x1E060, 'M', 'ф'),
+    (0x1E061, 'M', 'х'),
+    (0x1E062, 'M', 'ц'),
+    (0x1E063, 'M', 'ч'),
+    (0x1E064, 'M', 'ш'),
+    (0x1E065, 'M', 'ъ'),
+    (0x1E066, 'M', 'ы'),
+    (0x1E067, 'M', 'ґ'),
+    (0x1E068, 'M', 'і'),
+    (0x1E069, 'M', 'ѕ'),
+    (0x1E06A, 'M', 'џ'),
+    (0x1E06B, 'M', 'ҫ'),
+    (0x1E06C, 'M', 'ꙑ'),
+    (0x1E06D, 'M', 'ұ'),
+    (0x1E06E, 'X'),
+    (0x1E08F, 'V'),
+    (0x1E090, 'X'),
+    (0x1E100, 'V'),
+    (0x1E12D, 'X'),
+    (0x1E130, 'V'),
+    (0x1E13E, 'X'),
+    (0x1E140, 'V'),
+    (0x1E14A, 'X'),
+    (0x1E14E, 'V'),
+    (0x1E150, 'X'),
+    (0x1E290, 'V'),
+    (0x1E2AF, 'X'),
+    (0x1E2C0, 'V'),
+    (0x1E2FA, 'X'),
+    (0x1E2FF, 'V'),
+    (0x1E300, 'X'),
+    (0x1E4D0, 'V'),
+    (0x1E4FA, 'X'),
+    (0x1E7E0, 'V'),
+    (0x1E7E7, 'X'),
+    (0x1E7E8, 'V'),
+    (0x1E7EC, 'X'),
+    (0x1E7ED, 'V'),
+    (0x1E7EF, 'X'),
+    (0x1E7F0, 'V'),
+    (0x1E7FF, 'X'),
+    (0x1E800, 'V'),
+    (0x1E8C5, 'X'),
+    (0x1E8C7, 'V'),
+    (0x1E8D7, 'X'),
+    (0x1E900, 'M', '𞤢'),
+    (0x1E901, 'M', '𞤣'),
+    (0x1E902, 'M', '𞤤'),
+    (0x1E903, 'M', '𞤥'),
+    (0x1E904, 'M', '𞤦'),
+    (0x1E905, 'M', '𞤧'),
+    (0x1E906, 'M', '𞤨'),
+    (0x1E907, 'M', '𞤩'),
+    (0x1E908, 'M', '𞤪'),
+    (0x1E909, 'M', '𞤫'),
+    ]
+
+def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1E90A, 'M', '𞤬'),
+    (0x1E90B, 'M', '𞤭'),
+    (0x1E90C, 'M', '𞤮'),
+    (0x1E90D, 'M', '𞤯'),
+    (0x1E90E, 'M', '𞤰'),
+    (0x1E90F, 'M', '𞤱'),
+    (0x1E910, 'M', '𞤲'),
+    (0x1E911, 'M', '𞤳'),
+    (0x1E912, 'M', '𞤴'),
+    (0x1E913, 'M', '𞤵'),
+    (0x1E914, 'M', '𞤶'),
+    (0x1E915, 'M', '𞤷'),
+    (0x1E916, 'M', '𞤸'),
+    (0x1E917, 'M', '𞤹'),
+    (0x1E918, 'M', '𞤺'),
+    (0x1E919, 'M', '𞤻'),
+    (0x1E91A, 'M', '𞤼'),
+    (0x1E91B, 'M', '𞤽'),
+    (0x1E91C, 'M', '𞤾'),
+    (0x1E91D, 'M', '𞤿'),
+    (0x1E91E, 'M', '𞥀'),
+    (0x1E91F, 'M', '𞥁'),
+    (0x1E920, 'M', '𞥂'),
+    (0x1E921, 'M', '𞥃'),
+    (0x1E922, 'V'),
+    (0x1E94C, 'X'),
+    (0x1E950, 'V'),
+    (0x1E95A, 'X'),
+    (0x1E95E, 'V'),
+    (0x1E960, 'X'),
+    (0x1EC71, 'V'),
+    (0x1ECB5, 'X'),
+    (0x1ED01, 'V'),
+    (0x1ED3E, 'X'),
+    (0x1EE00, 'M', 'ا'),
+    (0x1EE01, 'M', 'ب'),
+    (0x1EE02, 'M', 'ج'),
+    (0x1EE03, 'M', 'د'),
+    (0x1EE04, 'X'),
+    (0x1EE05, 'M', 'و'),
+    (0x1EE06, 'M', 'ز'),
+    (0x1EE07, 'M', 'ح'),
+    (0x1EE08, 'M', 'ط'),
+    (0x1EE09, 'M', 'ي'),
+    (0x1EE0A, 'M', 'ك'),
+    (0x1EE0B, 'M', 'ل'),
+    (0x1EE0C, 'M', 'م'),
+    (0x1EE0D, 'M', 'ن'),
+    (0x1EE0E, 'M', 'س'),
+    (0x1EE0F, 'M', 'ع'),
+    (0x1EE10, 'M', 'ف'),
+    (0x1EE11, 'M', 'ص'),
+    (0x1EE12, 'M', 'ق'),
+    (0x1EE13, 'M', 'ر'),
+    (0x1EE14, 'M', 'ش'),
+    (0x1EE15, 'M', 'ت'),
+    (0x1EE16, 'M', 'ث'),
+    (0x1EE17, 'M', 'خ'),
+    (0x1EE18, 'M', 'ذ'),
+    (0x1EE19, 'M', 'ض'),
+    (0x1EE1A, 'M', 'ظ'),
+    (0x1EE1B, 'M', 'غ'),
+    (0x1EE1C, 'M', 'ٮ'),
+    (0x1EE1D, 'M', 'ں'),
+    (0x1EE1E, 'M', 'ڡ'),
+    (0x1EE1F, 'M', 'ٯ'),
+    (0x1EE20, 'X'),
+    (0x1EE21, 'M', 'ب'),
+    (0x1EE22, 'M', 'ج'),
+    (0x1EE23, 'X'),
+    (0x1EE24, 'M', 'ه'),
+    (0x1EE25, 'X'),
+    (0x1EE27, 'M', 'ح'),
+    (0x1EE28, 'X'),
+    (0x1EE29, 'M', 'ي'),
+    (0x1EE2A, 'M', 'ك'),
+    (0x1EE2B, 'M', 'ل'),
+    (0x1EE2C, 'M', 'م'),
+    (0x1EE2D, 'M', 'ن'),
+    (0x1EE2E, 'M', 'س'),
+    (0x1EE2F, 'M', 'ع'),
+    (0x1EE30, 'M', 'ف'),
+    (0x1EE31, 'M', 'ص'),
+    (0x1EE32, 'M', 'ق'),
+    (0x1EE33, 'X'),
+    (0x1EE34, 'M', 'ش'),
+    (0x1EE35, 'M', 'ت'),
+    (0x1EE36, 'M', 'ث'),
+    (0x1EE37, 'M', 'خ'),
+    (0x1EE38, 'X'),
+    (0x1EE39, 'M', 'ض'),
+    (0x1EE3A, 'X'),
+    (0x1EE3B, 'M', 'غ'),
+    (0x1EE3C, 'X'),
+    (0x1EE42, 'M', 'ج'),
+    (0x1EE43, 'X'),
+    (0x1EE47, 'M', 'ح'),
+    (0x1EE48, 'X'),
+    (0x1EE49, 'M', 'ي'),
+    (0x1EE4A, 'X'),
+    ]
+
+def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1EE4B, 'M', 'ل'),
+    (0x1EE4C, 'X'),
+    (0x1EE4D, 'M', 'ن'),
+    (0x1EE4E, 'M', 'س'),
+    (0x1EE4F, 'M', 'ع'),
+    (0x1EE50, 'X'),
+    (0x1EE51, 'M', 'ص'),
+    (0x1EE52, 'M', 'ق'),
+    (0x1EE53, 'X'),
+    (0x1EE54, 'M', 'ش'),
+    (0x1EE55, 'X'),
+    (0x1EE57, 'M', 'خ'),
+    (0x1EE58, 'X'),
+    (0x1EE59, 'M', 'ض'),
+    (0x1EE5A, 'X'),
+    (0x1EE5B, 'M', 'غ'),
+    (0x1EE5C, 'X'),
+    (0x1EE5D, 'M', 'ں'),
+    (0x1EE5E, 'X'),
+    (0x1EE5F, 'M', 'ٯ'),
+    (0x1EE60, 'X'),
+    (0x1EE61, 'M', 'ب'),
+    (0x1EE62, 'M', 'ج'),
+    (0x1EE63, 'X'),
+    (0x1EE64, 'M', 'ه'),
+    (0x1EE65, 'X'),
+    (0x1EE67, 'M', 'ح'),
+    (0x1EE68, 'M', 'ط'),
+    (0x1EE69, 'M', 'ي'),
+    (0x1EE6A, 'M', 'ك'),
+    (0x1EE6B, 'X'),
+    (0x1EE6C, 'M', 'م'),
+    (0x1EE6D, 'M', 'ن'),
+    (0x1EE6E, 'M', 'س'),
+    (0x1EE6F, 'M', 'ع'),
+    (0x1EE70, 'M', 'ف'),
+    (0x1EE71, 'M', 'ص'),
+    (0x1EE72, 'M', 'ق'),
+    (0x1EE73, 'X'),
+    (0x1EE74, 'M', 'ش'),
+    (0x1EE75, 'M', 'ت'),
+    (0x1EE76, 'M', 'ث'),
+    (0x1EE77, 'M', 'خ'),
+    (0x1EE78, 'X'),
+    (0x1EE79, 'M', 'ض'),
+    (0x1EE7A, 'M', 'ظ'),
+    (0x1EE7B, 'M', 'غ'),
+    (0x1EE7C, 'M', 'ٮ'),
+    (0x1EE7D, 'X'),
+    (0x1EE7E, 'M', 'ڡ'),
+    (0x1EE7F, 'X'),
+    (0x1EE80, 'M', 'ا'),
+    (0x1EE81, 'M', 'ب'),
+    (0x1EE82, 'M', 'ج'),
+    (0x1EE83, 'M', 'د'),
+    (0x1EE84, 'M', 'ه'),
+    (0x1EE85, 'M', 'و'),
+    (0x1EE86, 'M', 'ز'),
+    (0x1EE87, 'M', 'ح'),
+    (0x1EE88, 'M', 'ط'),
+    (0x1EE89, 'M', 'ي'),
+    (0x1EE8A, 'X'),
+    (0x1EE8B, 'M', 'ل'),
+    (0x1EE8C, 'M', 'م'),
+    (0x1EE8D, 'M', 'ن'),
+    (0x1EE8E, 'M', 'س'),
+    (0x1EE8F, 'M', 'ع'),
+    (0x1EE90, 'M', 'ف'),
+    (0x1EE91, 'M', 'ص'),
+    (0x1EE92, 'M', 'ق'),
+    (0x1EE93, 'M', 'ر'),
+    (0x1EE94, 'M', 'ش'),
+    (0x1EE95, 'M', 'ت'),
+    (0x1EE96, 'M', 'ث'),
+    (0x1EE97, 'M', 'خ'),
+    (0x1EE98, 'M', 'ذ'),
+    (0x1EE99, 'M', 'ض'),
+    (0x1EE9A, 'M', 'ظ'),
+    (0x1EE9B, 'M', 'غ'),
+    (0x1EE9C, 'X'),
+    (0x1EEA1, 'M', 'ب'),
+    (0x1EEA2, 'M', 'ج'),
+    (0x1EEA3, 'M', 'د'),
+    (0x1EEA4, 'X'),
+    (0x1EEA5, 'M', 'و'),
+    (0x1EEA6, 'M', 'ز'),
+    (0x1EEA7, 'M', 'ح'),
+    (0x1EEA8, 'M', 'ط'),
+    (0x1EEA9, 'M', 'ي'),
+    (0x1EEAA, 'X'),
+    (0x1EEAB, 'M', 'ل'),
+    (0x1EEAC, 'M', 'م'),
+    (0x1EEAD, 'M', 'ن'),
+    (0x1EEAE, 'M', 'س'),
+    (0x1EEAF, 'M', 'ع'),
+    (0x1EEB0, 'M', 'ف'),
+    (0x1EEB1, 'M', 'ص'),
+    (0x1EEB2, 'M', 'ق'),
+    (0x1EEB3, 'M', 'ر'),
+    (0x1EEB4, 'M', 'ش'),
+    ]
+
+def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1EEB5, 'M', 'ت'),
+    (0x1EEB6, 'M', 'ث'),
+    (0x1EEB7, 'M', 'خ'),
+    (0x1EEB8, 'M', 'ذ'),
+    (0x1EEB9, 'M', 'ض'),
+    (0x1EEBA, 'M', 'ظ'),
+    (0x1EEBB, 'M', 'غ'),
+    (0x1EEBC, 'X'),
+    (0x1EEF0, 'V'),
+    (0x1EEF2, 'X'),
+    (0x1F000, 'V'),
+    (0x1F02C, 'X'),
+    (0x1F030, 'V'),
+    (0x1F094, 'X'),
+    (0x1F0A0, 'V'),
+    (0x1F0AF, 'X'),
+    (0x1F0B1, 'V'),
+    (0x1F0C0, 'X'),
+    (0x1F0C1, 'V'),
+    (0x1F0D0, 'X'),
+    (0x1F0D1, 'V'),
+    (0x1F0F6, 'X'),
+    (0x1F101, '3', '0,'),
+    (0x1F102, '3', '1,'),
+    (0x1F103, '3', '2,'),
+    (0x1F104, '3', '3,'),
+    (0x1F105, '3', '4,'),
+    (0x1F106, '3', '5,'),
+    (0x1F107, '3', '6,'),
+    (0x1F108, '3', '7,'),
+    (0x1F109, '3', '8,'),
+    (0x1F10A, '3', '9,'),
+    (0x1F10B, 'V'),
+    (0x1F110, '3', '(a)'),
+    (0x1F111, '3', '(b)'),
+    (0x1F112, '3', '(c)'),
+    (0x1F113, '3', '(d)'),
+    (0x1F114, '3', '(e)'),
+    (0x1F115, '3', '(f)'),
+    (0x1F116, '3', '(g)'),
+    (0x1F117, '3', '(h)'),
+    (0x1F118, '3', '(i)'),
+    (0x1F119, '3', '(j)'),
+    (0x1F11A, '3', '(k)'),
+    (0x1F11B, '3', '(l)'),
+    (0x1F11C, '3', '(m)'),
+    (0x1F11D, '3', '(n)'),
+    (0x1F11E, '3', '(o)'),
+    (0x1F11F, '3', '(p)'),
+    (0x1F120, '3', '(q)'),
+    (0x1F121, '3', '(r)'),
+    (0x1F122, '3', '(s)'),
+    (0x1F123, '3', '(t)'),
+    (0x1F124, '3', '(u)'),
+    (0x1F125, '3', '(v)'),
+    (0x1F126, '3', '(w)'),
+    (0x1F127, '3', '(x)'),
+    (0x1F128, '3', '(y)'),
+    (0x1F129, '3', '(z)'),
+    (0x1F12A, 'M', '〔s〕'),
+    (0x1F12B, 'M', 'c'),
+    (0x1F12C, 'M', 'r'),
+    (0x1F12D, 'M', 'cd'),
+    (0x1F12E, 'M', 'wz'),
+    (0x1F12F, 'V'),
+    (0x1F130, 'M', 'a'),
+    (0x1F131, 'M', 'b'),
+    (0x1F132, 'M', 'c'),
+    (0x1F133, 'M', 'd'),
+    (0x1F134, 'M', 'e'),
+    (0x1F135, 'M', 'f'),
+    (0x1F136, 'M', 'g'),
+    (0x1F137, 'M', 'h'),
+    (0x1F138, 'M', 'i'),
+    (0x1F139, 'M', 'j'),
+    (0x1F13A, 'M', 'k'),
+    (0x1F13B, 'M', 'l'),
+    (0x1F13C, 'M', 'm'),
+    (0x1F13D, 'M', 'n'),
+    (0x1F13E, 'M', 'o'),
+    (0x1F13F, 'M', 'p'),
+    (0x1F140, 'M', 'q'),
+    (0x1F141, 'M', 'r'),
+    (0x1F142, 'M', 's'),
+    (0x1F143, 'M', 't'),
+    (0x1F144, 'M', 'u'),
+    (0x1F145, 'M', 'v'),
+    (0x1F146, 'M', 'w'),
+    (0x1F147, 'M', 'x'),
+    (0x1F148, 'M', 'y'),
+    (0x1F149, 'M', 'z'),
+    (0x1F14A, 'M', 'hv'),
+    (0x1F14B, 'M', 'mv'),
+    (0x1F14C, 'M', 'sd'),
+    (0x1F14D, 'M', 'ss'),
+    (0x1F14E, 'M', 'ppv'),
+    (0x1F14F, 'M', 'wc'),
+    (0x1F150, 'V'),
+    (0x1F16A, 'M', 'mc'),
+    (0x1F16B, 'M', 'md'),
+    ]
+
+def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1F16C, 'M', 'mr'),
+    (0x1F16D, 'V'),
+    (0x1F190, 'M', 'dj'),
+    (0x1F191, 'V'),
+    (0x1F1AE, 'X'),
+    (0x1F1E6, 'V'),
+    (0x1F200, 'M', 'ほか'),
+    (0x1F201, 'M', 'ココ'),
+    (0x1F202, 'M', 'サ'),
+    (0x1F203, 'X'),
+    (0x1F210, 'M', '手'),
+    (0x1F211, 'M', '字'),
+    (0x1F212, 'M', '双'),
+    (0x1F213, 'M', 'デ'),
+    (0x1F214, 'M', '二'),
+    (0x1F215, 'M', '多'),
+    (0x1F216, 'M', '解'),
+    (0x1F217, 'M', '天'),
+    (0x1F218, 'M', '交'),
+    (0x1F219, 'M', '映'),
+    (0x1F21A, 'M', '無'),
+    (0x1F21B, 'M', '料'),
+    (0x1F21C, 'M', '前'),
+    (0x1F21D, 'M', '後'),
+    (0x1F21E, 'M', '再'),
+    (0x1F21F, 'M', '新'),
+    (0x1F220, 'M', '初'),
+    (0x1F221, 'M', '終'),
+    (0x1F222, 'M', '生'),
+    (0x1F223, 'M', '販'),
+    (0x1F224, 'M', '声'),
+    (0x1F225, 'M', '吹'),
+    (0x1F226, 'M', '演'),
+    (0x1F227, 'M', '投'),
+    (0x1F228, 'M', '捕'),
+    (0x1F229, 'M', '一'),
+    (0x1F22A, 'M', '三'),
+    (0x1F22B, 'M', '遊'),
+    (0x1F22C, 'M', '左'),
+    (0x1F22D, 'M', '中'),
+    (0x1F22E, 'M', '右'),
+    (0x1F22F, 'M', '指'),
+    (0x1F230, 'M', '走'),
+    (0x1F231, 'M', '打'),
+    (0x1F232, 'M', '禁'),
+    (0x1F233, 'M', '空'),
+    (0x1F234, 'M', '合'),
+    (0x1F235, 'M', '満'),
+    (0x1F236, 'M', '有'),
+    (0x1F237, 'M', '月'),
+    (0x1F238, 'M', '申'),
+    (0x1F239, 'M', '割'),
+    (0x1F23A, 'M', '営'),
+    (0x1F23B, 'M', '配'),
+    (0x1F23C, 'X'),
+    (0x1F240, 'M', '〔本〕'),
+    (0x1F241, 'M', '〔三〕'),
+    (0x1F242, 'M', '〔二〕'),
+    (0x1F243, 'M', '〔安〕'),
+    (0x1F244, 'M', '〔点〕'),
+    (0x1F245, 'M', '〔打〕'),
+    (0x1F246, 'M', '〔盗〕'),
+    (0x1F247, 'M', '〔勝〕'),
+    (0x1F248, 'M', '〔敗〕'),
+    (0x1F249, 'X'),
+    (0x1F250, 'M', '得'),
+    (0x1F251, 'M', '可'),
+    (0x1F252, 'X'),
+    (0x1F260, 'V'),
+    (0x1F266, 'X'),
+    (0x1F300, 'V'),
+    (0x1F6D8, 'X'),
+    (0x1F6DC, 'V'),
+    (0x1F6ED, 'X'),
+    (0x1F6F0, 'V'),
+    (0x1F6FD, 'X'),
+    (0x1F700, 'V'),
+    (0x1F777, 'X'),
+    (0x1F77B, 'V'),
+    (0x1F7DA, 'X'),
+    (0x1F7E0, 'V'),
+    (0x1F7EC, 'X'),
+    (0x1F7F0, 'V'),
+    (0x1F7F1, 'X'),
+    (0x1F800, 'V'),
+    (0x1F80C, 'X'),
+    (0x1F810, 'V'),
+    (0x1F848, 'X'),
+    (0x1F850, 'V'),
+    (0x1F85A, 'X'),
+    (0x1F860, 'V'),
+    (0x1F888, 'X'),
+    (0x1F890, 'V'),
+    (0x1F8AE, 'X'),
+    (0x1F8B0, 'V'),
+    (0x1F8B2, 'X'),
+    (0x1F900, 'V'),
+    (0x1FA54, 'X'),
+    (0x1FA60, 'V'),
+    (0x1FA6E, 'X'),
+    ]
+
+def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x1FA70, 'V'),
+    (0x1FA7D, 'X'),
+    (0x1FA80, 'V'),
+    (0x1FA89, 'X'),
+    (0x1FA90, 'V'),
+    (0x1FABE, 'X'),
+    (0x1FABF, 'V'),
+    (0x1FAC6, 'X'),
+    (0x1FACE, 'V'),
+    (0x1FADC, 'X'),
+    (0x1FAE0, 'V'),
+    (0x1FAE9, 'X'),
+    (0x1FAF0, 'V'),
+    (0x1FAF9, 'X'),
+    (0x1FB00, 'V'),
+    (0x1FB93, 'X'),
+    (0x1FB94, 'V'),
+    (0x1FBCB, 'X'),
+    (0x1FBF0, 'M', '0'),
+    (0x1FBF1, 'M', '1'),
+    (0x1FBF2, 'M', '2'),
+    (0x1FBF3, 'M', '3'),
+    (0x1FBF4, 'M', '4'),
+    (0x1FBF5, 'M', '5'),
+    (0x1FBF6, 'M', '6'),
+    (0x1FBF7, 'M', '7'),
+    (0x1FBF8, 'M', '8'),
+    (0x1FBF9, 'M', '9'),
+    (0x1FBFA, 'X'),
+    (0x20000, 'V'),
+    (0x2A6E0, 'X'),
+    (0x2A700, 'V'),
+    (0x2B73A, 'X'),
+    (0x2B740, 'V'),
+    (0x2B81E, 'X'),
+    (0x2B820, 'V'),
+    (0x2CEA2, 'X'),
+    (0x2CEB0, 'V'),
+    (0x2EBE1, 'X'),
+    (0x2F800, 'M', '丽'),
+    (0x2F801, 'M', '丸'),
+    (0x2F802, 'M', '乁'),
+    (0x2F803, 'M', '𠄢'),
+    (0x2F804, 'M', '你'),
+    (0x2F805, 'M', '侮'),
+    (0x2F806, 'M', '侻'),
+    (0x2F807, 'M', '倂'),
+    (0x2F808, 'M', '偺'),
+    (0x2F809, 'M', '備'),
+    (0x2F80A, 'M', '僧'),
+    (0x2F80B, 'M', '像'),
+    (0x2F80C, 'M', '㒞'),
+    (0x2F80D, 'M', '𠘺'),
+    (0x2F80E, 'M', '免'),
+    (0x2F80F, 'M', '兔'),
+    (0x2F810, 'M', '兤'),
+    (0x2F811, 'M', '具'),
+    (0x2F812, 'M', '𠔜'),
+    (0x2F813, 'M', '㒹'),
+    (0x2F814, 'M', '內'),
+    (0x2F815, 'M', '再'),
+    (0x2F816, 'M', '𠕋'),
+    (0x2F817, 'M', '冗'),
+    (0x2F818, 'M', '冤'),
+    (0x2F819, 'M', '仌'),
+    (0x2F81A, 'M', '冬'),
+    (0x2F81B, 'M', '况'),
+    (0x2F81C, 'M', '𩇟'),
+    (0x2F81D, 'M', '凵'),
+    (0x2F81E, 'M', '刃'),
+    (0x2F81F, 'M', '㓟'),
+    (0x2F820, 'M', '刻'),
+    (0x2F821, 'M', '剆'),
+    (0x2F822, 'M', '割'),
+    (0x2F823, 'M', '剷'),
+    (0x2F824, 'M', '㔕'),
+    (0x2F825, 'M', '勇'),
+    (0x2F826, 'M', '勉'),
+    (0x2F827, 'M', '勤'),
+    (0x2F828, 'M', '勺'),
+    (0x2F829, 'M', '包'),
+    (0x2F82A, 'M', '匆'),
+    (0x2F82B, 'M', '北'),
+    (0x2F82C, 'M', '卉'),
+    (0x2F82D, 'M', '卑'),
+    (0x2F82E, 'M', '博'),
+    (0x2F82F, 'M', '即'),
+    (0x2F830, 'M', '卽'),
+    (0x2F831, 'M', '卿'),
+    (0x2F834, 'M', '𠨬'),
+    (0x2F835, 'M', '灰'),
+    (0x2F836, 'M', '及'),
+    (0x2F837, 'M', '叟'),
+    (0x2F838, 'M', '𠭣'),
+    (0x2F839, 'M', '叫'),
+    (0x2F83A, 'M', '叱'),
+    (0x2F83B, 'M', '吆'),
+    (0x2F83C, 'M', '咞'),
+    (0x2F83D, 'M', '吸'),
+    (0x2F83E, 'M', '呈'),
+    ]
+
+def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x2F83F, 'M', '周'),
+    (0x2F840, 'M', '咢'),
+    (0x2F841, 'M', '哶'),
+    (0x2F842, 'M', '唐'),
+    (0x2F843, 'M', '啓'),
+    (0x2F844, 'M', '啣'),
+    (0x2F845, 'M', '善'),
+    (0x2F847, 'M', '喙'),
+    (0x2F848, 'M', '喫'),
+    (0x2F849, 'M', '喳'),
+    (0x2F84A, 'M', '嗂'),
+    (0x2F84B, 'M', '圖'),
+    (0x2F84C, 'M', '嘆'),
+    (0x2F84D, 'M', '圗'),
+    (0x2F84E, 'M', '噑'),
+    (0x2F84F, 'M', '噴'),
+    (0x2F850, 'M', '切'),
+    (0x2F851, 'M', '壮'),
+    (0x2F852, 'M', '城'),
+    (0x2F853, 'M', '埴'),
+    (0x2F854, 'M', '堍'),
+    (0x2F855, 'M', '型'),
+    (0x2F856, 'M', '堲'),
+    (0x2F857, 'M', '報'),
+    (0x2F858, 'M', '墬'),
+    (0x2F859, 'M', '𡓤'),
+    (0x2F85A, 'M', '売'),
+    (0x2F85B, 'M', '壷'),
+    (0x2F85C, 'M', '夆'),
+    (0x2F85D, 'M', '多'),
+    (0x2F85E, 'M', '夢'),
+    (0x2F85F, 'M', '奢'),
+    (0x2F860, 'M', '𡚨'),
+    (0x2F861, 'M', '𡛪'),
+    (0x2F862, 'M', '姬'),
+    (0x2F863, 'M', '娛'),
+    (0x2F864, 'M', '娧'),
+    (0x2F865, 'M', '姘'),
+    (0x2F866, 'M', '婦'),
+    (0x2F867, 'M', '㛮'),
+    (0x2F868, 'X'),
+    (0x2F869, 'M', '嬈'),
+    (0x2F86A, 'M', '嬾'),
+    (0x2F86C, 'M', '𡧈'),
+    (0x2F86D, 'M', '寃'),
+    (0x2F86E, 'M', '寘'),
+    (0x2F86F, 'M', '寧'),
+    (0x2F870, 'M', '寳'),
+    (0x2F871, 'M', '𡬘'),
+    (0x2F872, 'M', '寿'),
+    (0x2F873, 'M', '将'),
+    (0x2F874, 'X'),
+    (0x2F875, 'M', '尢'),
+    (0x2F876, 'M', '㞁'),
+    (0x2F877, 'M', '屠'),
+    (0x2F878, 'M', '屮'),
+    (0x2F879, 'M', '峀'),
+    (0x2F87A, 'M', '岍'),
+    (0x2F87B, 'M', '𡷤'),
+    (0x2F87C, 'M', '嵃'),
+    (0x2F87D, 'M', '𡷦'),
+    (0x2F87E, 'M', '嵮'),
+    (0x2F87F, 'M', '嵫'),
+    (0x2F880, 'M', '嵼'),
+    (0x2F881, 'M', '巡'),
+    (0x2F882, 'M', '巢'),
+    (0x2F883, 'M', '㠯'),
+    (0x2F884, 'M', '巽'),
+    (0x2F885, 'M', '帨'),
+    (0x2F886, 'M', '帽'),
+    (0x2F887, 'M', '幩'),
+    (0x2F888, 'M', '㡢'),
+    (0x2F889, 'M', '𢆃'),
+    (0x2F88A, 'M', '㡼'),
+    (0x2F88B, 'M', '庰'),
+    (0x2F88C, 'M', '庳'),
+    (0x2F88D, 'M', '庶'),
+    (0x2F88E, 'M', '廊'),
+    (0x2F88F, 'M', '𪎒'),
+    (0x2F890, 'M', '廾'),
+    (0x2F891, 'M', '𢌱'),
+    (0x2F893, 'M', '舁'),
+    (0x2F894, 'M', '弢'),
+    (0x2F896, 'M', '㣇'),
+    (0x2F897, 'M', '𣊸'),
+    (0x2F898, 'M', '𦇚'),
+    (0x2F899, 'M', '形'),
+    (0x2F89A, 'M', '彫'),
+    (0x2F89B, 'M', '㣣'),
+    (0x2F89C, 'M', '徚'),
+    (0x2F89D, 'M', '忍'),
+    (0x2F89E, 'M', '志'),
+    (0x2F89F, 'M', '忹'),
+    (0x2F8A0, 'M', '悁'),
+    (0x2F8A1, 'M', '㤺'),
+    (0x2F8A2, 'M', '㤜'),
+    (0x2F8A3, 'M', '悔'),
+    (0x2F8A4, 'M', '𢛔'),
+    (0x2F8A5, 'M', '惇'),
+    (0x2F8A6, 'M', '慈'),
+    ]
+
+def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x2F8A7, 'M', '慌'),
+    (0x2F8A8, 'M', '慎'),
+    (0x2F8A9, 'M', '慌'),
+    (0x2F8AA, 'M', '慺'),
+    (0x2F8AB, 'M', '憎'),
+    (0x2F8AC, 'M', '憲'),
+    (0x2F8AD, 'M', '憤'),
+    (0x2F8AE, 'M', '憯'),
+    (0x2F8AF, 'M', '懞'),
+    (0x2F8B0, 'M', '懲'),
+    (0x2F8B1, 'M', '懶'),
+    (0x2F8B2, 'M', '成'),
+    (0x2F8B3, 'M', '戛'),
+    (0x2F8B4, 'M', '扝'),
+    (0x2F8B5, 'M', '抱'),
+    (0x2F8B6, 'M', '拔'),
+    (0x2F8B7, 'M', '捐'),
+    (0x2F8B8, 'M', '𢬌'),
+    (0x2F8B9, 'M', '挽'),
+    (0x2F8BA, 'M', '拼'),
+    (0x2F8BB, 'M', '捨'),
+    (0x2F8BC, 'M', '掃'),
+    (0x2F8BD, 'M', '揤'),
+    (0x2F8BE, 'M', '𢯱'),
+    (0x2F8BF, 'M', '搢'),
+    (0x2F8C0, 'M', '揅'),
+    (0x2F8C1, 'M', '掩'),
+    (0x2F8C2, 'M', '㨮'),
+    (0x2F8C3, 'M', '摩'),
+    (0x2F8C4, 'M', '摾'),
+    (0x2F8C5, 'M', '撝'),
+    (0x2F8C6, 'M', '摷'),
+    (0x2F8C7, 'M', '㩬'),
+    (0x2F8C8, 'M', '敏'),
+    (0x2F8C9, 'M', '敬'),
+    (0x2F8CA, 'M', '𣀊'),
+    (0x2F8CB, 'M', '旣'),
+    (0x2F8CC, 'M', '書'),
+    (0x2F8CD, 'M', '晉'),
+    (0x2F8CE, 'M', '㬙'),
+    (0x2F8CF, 'M', '暑'),
+    (0x2F8D0, 'M', '㬈'),
+    (0x2F8D1, 'M', '㫤'),
+    (0x2F8D2, 'M', '冒'),
+    (0x2F8D3, 'M', '冕'),
+    (0x2F8D4, 'M', '最'),
+    (0x2F8D5, 'M', '暜'),
+    (0x2F8D6, 'M', '肭'),
+    (0x2F8D7, 'M', '䏙'),
+    (0x2F8D8, 'M', '朗'),
+    (0x2F8D9, 'M', '望'),
+    (0x2F8DA, 'M', '朡'),
+    (0x2F8DB, 'M', '杞'),
+    (0x2F8DC, 'M', '杓'),
+    (0x2F8DD, 'M', '𣏃'),
+    (0x2F8DE, 'M', '㭉'),
+    (0x2F8DF, 'M', '柺'),
+    (0x2F8E0, 'M', '枅'),
+    (0x2F8E1, 'M', '桒'),
+    (0x2F8E2, 'M', '梅'),
+    (0x2F8E3, 'M', '𣑭'),
+    (0x2F8E4, 'M', '梎'),
+    (0x2F8E5, 'M', '栟'),
+    (0x2F8E6, 'M', '椔'),
+    (0x2F8E7, 'M', '㮝'),
+    (0x2F8E8, 'M', '楂'),
+    (0x2F8E9, 'M', '榣'),
+    (0x2F8EA, 'M', '槪'),
+    (0x2F8EB, 'M', '檨'),
+    (0x2F8EC, 'M', '𣚣'),
+    (0x2F8ED, 'M', '櫛'),
+    (0x2F8EE, 'M', '㰘'),
+    (0x2F8EF, 'M', '次'),
+    (0x2F8F0, 'M', '𣢧'),
+    (0x2F8F1, 'M', '歔'),
+    (0x2F8F2, 'M', '㱎'),
+    (0x2F8F3, 'M', '歲'),
+    (0x2F8F4, 'M', '殟'),
+    (0x2F8F5, 'M', '殺'),
+    (0x2F8F6, 'M', '殻'),
+    (0x2F8F7, 'M', '𣪍'),
+    (0x2F8F8, 'M', '𡴋'),
+    (0x2F8F9, 'M', '𣫺'),
+    (0x2F8FA, 'M', '汎'),
+    (0x2F8FB, 'M', '𣲼'),
+    (0x2F8FC, 'M', '沿'),
+    (0x2F8FD, 'M', '泍'),
+    (0x2F8FE, 'M', '汧'),
+    (0x2F8FF, 'M', '洖'),
+    (0x2F900, 'M', '派'),
+    (0x2F901, 'M', '海'),
+    (0x2F902, 'M', '流'),
+    (0x2F903, 'M', '浩'),
+    (0x2F904, 'M', '浸'),
+    (0x2F905, 'M', '涅'),
+    (0x2F906, 'M', '𣴞'),
+    (0x2F907, 'M', '洴'),
+    (0x2F908, 'M', '港'),
+    (0x2F909, 'M', '湮'),
+    (0x2F90A, 'M', '㴳'),
+    ]
+
+def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x2F90B, 'M', '滋'),
+    (0x2F90C, 'M', '滇'),
+    (0x2F90D, 'M', '𣻑'),
+    (0x2F90E, 'M', '淹'),
+    (0x2F90F, 'M', '潮'),
+    (0x2F910, 'M', '𣽞'),
+    (0x2F911, 'M', '𣾎'),
+    (0x2F912, 'M', '濆'),
+    (0x2F913, 'M', '瀹'),
+    (0x2F914, 'M', '瀞'),
+    (0x2F915, 'M', '瀛'),
+    (0x2F916, 'M', '㶖'),
+    (0x2F917, 'M', '灊'),
+    (0x2F918, 'M', '災'),
+    (0x2F919, 'M', '灷'),
+    (0x2F91A, 'M', '炭'),
+    (0x2F91B, 'M', '𠔥'),
+    (0x2F91C, 'M', '煅'),
+    (0x2F91D, 'M', '𤉣'),
+    (0x2F91E, 'M', '熜'),
+    (0x2F91F, 'X'),
+    (0x2F920, 'M', '爨'),
+    (0x2F921, 'M', '爵'),
+    (0x2F922, 'M', '牐'),
+    (0x2F923, 'M', '𤘈'),
+    (0x2F924, 'M', '犀'),
+    (0x2F925, 'M', '犕'),
+    (0x2F926, 'M', '𤜵'),
+    (0x2F927, 'M', '𤠔'),
+    (0x2F928, 'M', '獺'),
+    (0x2F929, 'M', '王'),
+    (0x2F92A, 'M', '㺬'),
+    (0x2F92B, 'M', '玥'),
+    (0x2F92C, 'M', '㺸'),
+    (0x2F92E, 'M', '瑇'),
+    (0x2F92F, 'M', '瑜'),
+    (0x2F930, 'M', '瑱'),
+    (0x2F931, 'M', '璅'),
+    (0x2F932, 'M', '瓊'),
+    (0x2F933, 'M', '㼛'),
+    (0x2F934, 'M', '甤'),
+    (0x2F935, 'M', '𤰶'),
+    (0x2F936, 'M', '甾'),
+    (0x2F937, 'M', '𤲒'),
+    (0x2F938, 'M', '異'),
+    (0x2F939, 'M', '𢆟'),
+    (0x2F93A, 'M', '瘐'),
+    (0x2F93B, 'M', '𤾡'),
+    (0x2F93C, 'M', '𤾸'),
+    (0x2F93D, 'M', '𥁄'),
+    (0x2F93E, 'M', '㿼'),
+    (0x2F93F, 'M', '䀈'),
+    (0x2F940, 'M', '直'),
+    (0x2F941, 'M', '𥃳'),
+    (0x2F942, 'M', '𥃲'),
+    (0x2F943, 'M', '𥄙'),
+    (0x2F944, 'M', '𥄳'),
+    (0x2F945, 'M', '眞'),
+    (0x2F946, 'M', '真'),
+    (0x2F948, 'M', '睊'),
+    (0x2F949, 'M', '䀹'),
+    (0x2F94A, 'M', '瞋'),
+    (0x2F94B, 'M', '䁆'),
+    (0x2F94C, 'M', '䂖'),
+    (0x2F94D, 'M', '𥐝'),
+    (0x2F94E, 'M', '硎'),
+    (0x2F94F, 'M', '碌'),
+    (0x2F950, 'M', '磌'),
+    (0x2F951, 'M', '䃣'),
+    (0x2F952, 'M', '𥘦'),
+    (0x2F953, 'M', '祖'),
+    (0x2F954, 'M', '𥚚'),
+    (0x2F955, 'M', '𥛅'),
+    (0x2F956, 'M', '福'),
+    (0x2F957, 'M', '秫'),
+    (0x2F958, 'M', '䄯'),
+    (0x2F959, 'M', '穀'),
+    (0x2F95A, 'M', '穊'),
+    (0x2F95B, 'M', '穏'),
+    (0x2F95C, 'M', '𥥼'),
+    (0x2F95D, 'M', '𥪧'),
+    (0x2F95F, 'X'),
+    (0x2F960, 'M', '䈂'),
+    (0x2F961, 'M', '𥮫'),
+    (0x2F962, 'M', '篆'),
+    (0x2F963, 'M', '築'),
+    (0x2F964, 'M', '䈧'),
+    (0x2F965, 'M', '𥲀'),
+    (0x2F966, 'M', '糒'),
+    (0x2F967, 'M', '䊠'),
+    (0x2F968, 'M', '糨'),
+    (0x2F969, 'M', '糣'),
+    (0x2F96A, 'M', '紀'),
+    (0x2F96B, 'M', '𥾆'),
+    (0x2F96C, 'M', '絣'),
+    (0x2F96D, 'M', '䌁'),
+    (0x2F96E, 'M', '緇'),
+    (0x2F96F, 'M', '縂'),
+    (0x2F970, 'M', '繅'),
+    (0x2F971, 'M', '䌴'),
+    ]
+
+def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x2F972, 'M', '𦈨'),
+    (0x2F973, 'M', '𦉇'),
+    (0x2F974, 'M', '䍙'),
+    (0x2F975, 'M', '𦋙'),
+    (0x2F976, 'M', '罺'),
+    (0x2F977, 'M', '𦌾'),
+    (0x2F978, 'M', '羕'),
+    (0x2F979, 'M', '翺'),
+    (0x2F97A, 'M', '者'),
+    (0x2F97B, 'M', '𦓚'),
+    (0x2F97C, 'M', '𦔣'),
+    (0x2F97D, 'M', '聠'),
+    (0x2F97E, 'M', '𦖨'),
+    (0x2F97F, 'M', '聰'),
+    (0x2F980, 'M', '𣍟'),
+    (0x2F981, 'M', '䏕'),
+    (0x2F982, 'M', '育'),
+    (0x2F983, 'M', '脃'),
+    (0x2F984, 'M', '䐋'),
+    (0x2F985, 'M', '脾'),
+    (0x2F986, 'M', '媵'),
+    (0x2F987, 'M', '𦞧'),
+    (0x2F988, 'M', '𦞵'),
+    (0x2F989, 'M', '𣎓'),
+    (0x2F98A, 'M', '𣎜'),
+    (0x2F98B, 'M', '舁'),
+    (0x2F98C, 'M', '舄'),
+    (0x2F98D, 'M', '辞'),
+    (0x2F98E, 'M', '䑫'),
+    (0x2F98F, 'M', '芑'),
+    (0x2F990, 'M', '芋'),
+    (0x2F991, 'M', '芝'),
+    (0x2F992, 'M', '劳'),
+    (0x2F993, 'M', '花'),
+    (0x2F994, 'M', '芳'),
+    (0x2F995, 'M', '芽'),
+    (0x2F996, 'M', '苦'),
+    (0x2F997, 'M', '𦬼'),
+    (0x2F998, 'M', '若'),
+    (0x2F999, 'M', '茝'),
+    (0x2F99A, 'M', '荣'),
+    (0x2F99B, 'M', '莭'),
+    (0x2F99C, 'M', '茣'),
+    (0x2F99D, 'M', '莽'),
+    (0x2F99E, 'M', '菧'),
+    (0x2F99F, 'M', '著'),
+    (0x2F9A0, 'M', '荓'),
+    (0x2F9A1, 'M', '菊'),
+    (0x2F9A2, 'M', '菌'),
+    (0x2F9A3, 'M', '菜'),
+    (0x2F9A4, 'M', '𦰶'),
+    (0x2F9A5, 'M', '𦵫'),
+    (0x2F9A6, 'M', '𦳕'),
+    (0x2F9A7, 'M', '䔫'),
+    (0x2F9A8, 'M', '蓱'),
+    (0x2F9A9, 'M', '蓳'),
+    (0x2F9AA, 'M', '蔖'),
+    (0x2F9AB, 'M', '𧏊'),
+    (0x2F9AC, 'M', '蕤'),
+    (0x2F9AD, 'M', '𦼬'),
+    (0x2F9AE, 'M', '䕝'),
+    (0x2F9AF, 'M', '䕡'),
+    (0x2F9B0, 'M', '𦾱'),
+    (0x2F9B1, 'M', '𧃒'),
+    (0x2F9B2, 'M', '䕫'),
+    (0x2F9B3, 'M', '虐'),
+    (0x2F9B4, 'M', '虜'),
+    (0x2F9B5, 'M', '虧'),
+    (0x2F9B6, 'M', '虩'),
+    (0x2F9B7, 'M', '蚩'),
+    (0x2F9B8, 'M', '蚈'),
+    (0x2F9B9, 'M', '蜎'),
+    (0x2F9BA, 'M', '蛢'),
+    (0x2F9BB, 'M', '蝹'),
+    (0x2F9BC, 'M', '蜨'),
+    (0x2F9BD, 'M', '蝫'),
+    (0x2F9BE, 'M', '螆'),
+    (0x2F9BF, 'X'),
+    (0x2F9C0, 'M', '蟡'),
+    (0x2F9C1, 'M', '蠁'),
+    (0x2F9C2, 'M', '䗹'),
+    (0x2F9C3, 'M', '衠'),
+    (0x2F9C4, 'M', '衣'),
+    (0x2F9C5, 'M', '𧙧'),
+    (0x2F9C6, 'M', '裗'),
+    (0x2F9C7, 'M', '裞'),
+    (0x2F9C8, 'M', '䘵'),
+    (0x2F9C9, 'M', '裺'),
+    (0x2F9CA, 'M', '㒻'),
+    (0x2F9CB, 'M', '𧢮'),
+    (0x2F9CC, 'M', '𧥦'),
+    (0x2F9CD, 'M', '䚾'),
+    (0x2F9CE, 'M', '䛇'),
+    (0x2F9CF, 'M', '誠'),
+    (0x2F9D0, 'M', '諭'),
+    (0x2F9D1, 'M', '變'),
+    (0x2F9D2, 'M', '豕'),
+    (0x2F9D3, 'M', '𧲨'),
+    (0x2F9D4, 'M', '貫'),
+    (0x2F9D5, 'M', '賁'),
+    ]
+
+def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+    return [
+    (0x2F9D6, 'M', '贛'),
+    (0x2F9D7, 'M', '起'),
+    (0x2F9D8, 'M', '𧼯'),
+    (0x2F9D9, 'M', '𠠄'),
+    (0x2F9DA, 'M', '跋'),
+    (0x2F9DB, 'M', '趼'),
+    (0x2F9DC, 'M', '跰'),
+    (0x2F9DD, 'M', '𠣞'),
+    (0x2F9DE, 'M', '軔'),
+    (0x2F9DF, 'M', '輸'),
+    (0x2F9E0, 'M', '𨗒'),
+    (0x2F9E1, 'M', '𨗭'),
+    (0x2F9E2, 'M', '邔'),
+    (0x2F9E3, 'M', '郱'),
+    (0x2F9E4, 'M', '鄑'),
+    (0x2F9E5, 'M', '𨜮'),
+    (0x2F9E6, 'M', '鄛'),
+    (0x2F9E7, 'M', '鈸'),
+    (0x2F9E8, 'M', '鋗'),
+    (0x2F9E9, 'M', '鋘'),
+    (0x2F9EA, 'M', '鉼'),
+    (0x2F9EB, 'M', '鏹'),
+    (0x2F9EC, 'M', '鐕'),
+    (0x2F9ED, 'M', '𨯺'),
+    (0x2F9EE, 'M', '開'),
+    (0x2F9EF, 'M', '䦕'),
+    (0x2F9F0, 'M', '閷'),
+    (0x2F9F1, 'M', '𨵷'),
+    (0x2F9F2, 'M', '䧦'),
+    (0x2F9F3, 'M', '雃'),
+    (0x2F9F4, 'M', '嶲'),
+    (0x2F9F5, 'M', '霣'),
+    (0x2F9F6, 'M', '𩅅'),
+    (0x2F9F7, 'M', '𩈚'),
+    (0x2F9F8, 'M', '䩮'),
+    (0x2F9F9, 'M', '䩶'),
+    (0x2F9FA, 'M', '韠'),
+    (0x2F9FB, 'M', '𩐊'),
+    (0x2F9FC, 'M', '䪲'),
+    (0x2F9FD, 'M', '𩒖'),
+    (0x2F9FE, 'M', '頋'),
+    (0x2FA00, 'M', '頩'),
+    (0x2FA01, 'M', '𩖶'),
+    (0x2FA02, 'M', '飢'),
+    (0x2FA03, 'M', '䬳'),
+    (0x2FA04, 'M', '餩'),
+    (0x2FA05, 'M', '馧'),
+    (0x2FA06, 'M', '駂'),
+    (0x2FA07, 'M', '駾'),
+    (0x2FA08, 'M', '䯎'),
+    (0x2FA09, 'M', '𩬰'),
+    (0x2FA0A, 'M', '鬒'),
+    (0x2FA0B, 'M', '鱀'),
+    (0x2FA0C, 'M', '鳽'),
+    (0x2FA0D, 'M', '䳎'),
+    (0x2FA0E, 'M', '䳭'),
+    (0x2FA0F, 'M', '鵧'),
+    (0x2FA10, 'M', '𪃎'),
+    (0x2FA11, 'M', '䳸'),
+    (0x2FA12, 'M', '𪄅'),
+    (0x2FA13, 'M', '𪈎'),
+    (0x2FA14, 'M', '𪊑'),
+    (0x2FA15, 'M', '麻'),
+    (0x2FA16, 'M', '䵖'),
+    (0x2FA17, 'M', '黹'),
+    (0x2FA18, 'M', '黾'),
+    (0x2FA19, 'M', '鼅'),
+    (0x2FA1A, 'M', '鼏'),
+    (0x2FA1B, 'M', '鼖'),
+    (0x2FA1C, 'M', '鼻'),
+    (0x2FA1D, 'M', '𪘀'),
+    (0x2FA1E, 'X'),
+    (0x30000, 'V'),
+    (0x3134B, 'X'),
+    (0x31350, 'V'),
+    (0x323B0, 'X'),
+    (0xE0100, 'I'),
+    (0xE01F0, 'X'),
+    ]
+
+uts46data = tuple(
+    _seg_0()
+    + _seg_1()
+    + _seg_2()
+    + _seg_3()
+    + _seg_4()
+    + _seg_5()
+    + _seg_6()
+    + _seg_7()
+    + _seg_8()
+    + _seg_9()
+    + _seg_10()
+    + _seg_11()
+    + _seg_12()
+    + _seg_13()
+    + _seg_14()
+    + _seg_15()
+    + _seg_16()
+    + _seg_17()
+    + _seg_18()
+    + _seg_19()
+    + _seg_20()
+    + _seg_21()
+    + _seg_22()
+    + _seg_23()
+    + _seg_24()
+    + _seg_25()
+    + _seg_26()
+    + _seg_27()
+    + _seg_28()
+    + _seg_29()
+    + _seg_30()
+    + _seg_31()
+    + _seg_32()
+    + _seg_33()
+    + _seg_34()
+    + _seg_35()
+    + _seg_36()
+    + _seg_37()
+    + _seg_38()
+    + _seg_39()
+    + _seg_40()
+    + _seg_41()
+    + _seg_42()
+    + _seg_43()
+    + _seg_44()
+    + _seg_45()
+    + _seg_46()
+    + _seg_47()
+    + _seg_48()
+    + _seg_49()
+    + _seg_50()
+    + _seg_51()
+    + _seg_52()
+    + _seg_53()
+    + _seg_54()
+    + _seg_55()
+    + _seg_56()
+    + _seg_57()
+    + _seg_58()
+    + _seg_59()
+    + _seg_60()
+    + _seg_61()
+    + _seg_62()
+    + _seg_63()
+    + _seg_64()
+    + _seg_65()
+    + _seg_66()
+    + _seg_67()
+    + _seg_68()
+    + _seg_69()
+    + _seg_70()
+    + _seg_71()
+    + _seg_72()
+    + _seg_73()
+    + _seg_74()
+    + _seg_75()
+    + _seg_76()
+    + _seg_77()
+    + _seg_78()
+    + _seg_79()
+    + _seg_80()
+    + _seg_81()
+)  # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...]
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__init__.py
new file mode 100644
index 0000000..1300b86
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__init__.py
@@ -0,0 +1,57 @@
+# coding: utf-8
+from .exceptions import *
+from .ext import ExtType, Timestamp
+
+import os
+import sys
+
+
+version = (1, 0, 5)
+__version__ = "1.0.5"
+
+
+if os.environ.get("MSGPACK_PUREPYTHON") or sys.version_info[0] == 2:
+    from .fallback import Packer, unpackb, Unpacker
+else:
+    try:
+        from ._cmsgpack import Packer, unpackb, Unpacker
+    except ImportError:
+        from .fallback import Packer, unpackb, Unpacker
+
+
+def pack(o, stream, **kwargs):
+    """
+    Pack object `o` and write it to `stream`
+
+    See :class:`Packer` for options.
+    """
+    packer = Packer(**kwargs)
+    stream.write(packer.pack(o))
+
+
+def packb(o, **kwargs):
+    """
+    Pack object `o` and return packed bytes
+
+    See :class:`Packer` for options.
+    """
+    return Packer(**kwargs).pack(o)
+
+
+def unpack(stream, **kwargs):
+    """
+    Unpack an object from `stream`.
+
+    Raises `ExtraData` when `stream` contains extra bytes.
+    See :class:`Unpacker` for options.
+    """
+    data = stream.read()
+    return unpackb(data, **kwargs)
+
+
+# alias for compatibility to simplejson/marshal/pickle.
+load = unpack
+loads = unpackb
+
+dump = pack
+dumps = packb
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/msgpack/exceptions.py b/.venv/lib/python3.12/site-packages/pip/_vendor/msgpack/exceptions.py
new file mode 100644
index 0000000..d6d2615
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/msgpack/exceptions.py
@@ -0,0 +1,48 @@
+class UnpackException(Exception):
+    """Base class for some exceptions raised while unpacking.
+
+    NOTE: unpack may raise exception other than subclass of
+    UnpackException.  If you want to catch all error, catch
+    Exception instead.
+    """
+
+
+class BufferFull(UnpackException):
+    pass
+
+
+class OutOfData(UnpackException):
+    pass
+
+
+class FormatError(ValueError, UnpackException):
+    """Invalid msgpack format"""
+
+
+class StackError(ValueError, UnpackException):
+    """Too nested"""
+
+
+# Deprecated.  Use ValueError instead
+UnpackValueError = ValueError
+
+
+class ExtraData(UnpackValueError):
+    """ExtraData is raised when there is trailing data.
+
+    This exception is raised while only one-shot (not streaming)
+    unpack.
+    """
+
+    def __init__(self, unpacked, extra):
+        self.unpacked = unpacked
+        self.extra = extra
+
+    def __str__(self):
+        return "unpack(b) received extra data."
+
+
+# Deprecated.  Use Exception instead to catch all exception during packing.
+PackException = Exception
+PackValueError = ValueError
+PackOverflowError = OverflowError
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/msgpack/ext.py b/.venv/lib/python3.12/site-packages/pip/_vendor/msgpack/ext.py
new file mode 100644
index 0000000..23e0d6b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/msgpack/ext.py
@@ -0,0 +1,193 @@
+# coding: utf-8
+from collections import namedtuple
+import datetime
+import sys
+import struct
+
+
+PY2 = sys.version_info[0] == 2
+
+if PY2:
+    int_types = (int, long)
+    _utc = None
+else:
+    int_types = int
+    try:
+        _utc = datetime.timezone.utc
+    except AttributeError:
+        _utc = datetime.timezone(datetime.timedelta(0))
+
+
+class ExtType(namedtuple("ExtType", "code data")):
+    """ExtType represents ext type in msgpack."""
+
+    def __new__(cls, code, data):
+        if not isinstance(code, int):
+            raise TypeError("code must be int")
+        if not isinstance(data, bytes):
+            raise TypeError("data must be bytes")
+        if not 0 <= code <= 127:
+            raise ValueError("code must be 0~127")
+        return super(ExtType, cls).__new__(cls, code, data)
+
+
+class Timestamp(object):
+    """Timestamp represents the Timestamp extension type in msgpack.
+
+    When built with Cython, msgpack uses C methods to pack and unpack `Timestamp`. When using pure-Python
+    msgpack, :func:`to_bytes` and :func:`from_bytes` are used to pack and unpack `Timestamp`.
+
+    This class is immutable: Do not override seconds and nanoseconds.
+    """
+
+    __slots__ = ["seconds", "nanoseconds"]
+
+    def __init__(self, seconds, nanoseconds=0):
+        """Initialize a Timestamp object.
+
+        :param int seconds:
+            Number of seconds since the UNIX epoch (00:00:00 UTC Jan 1 1970, minus leap seconds).
+            May be negative.
+
+        :param int nanoseconds:
+            Number of nanoseconds to add to `seconds` to get fractional time.
+            Maximum is 999_999_999.  Default is 0.
+
+        Note: Negative times (before the UNIX epoch) are represented as negative seconds + positive ns.
+        """
+        if not isinstance(seconds, int_types):
+            raise TypeError("seconds must be an integer")
+        if not isinstance(nanoseconds, int_types):
+            raise TypeError("nanoseconds must be an integer")
+        if not (0 <= nanoseconds < 10**9):
+            raise ValueError(
+                "nanoseconds must be a non-negative integer less than 999999999."
+            )
+        self.seconds = seconds
+        self.nanoseconds = nanoseconds
+
+    def __repr__(self):
+        """String representation of Timestamp."""
+        return "Timestamp(seconds={0}, nanoseconds={1})".format(
+            self.seconds, self.nanoseconds
+        )
+
+    def __eq__(self, other):
+        """Check for equality with another Timestamp object"""
+        if type(other) is self.__class__:
+            return (
+                self.seconds == other.seconds and self.nanoseconds == other.nanoseconds
+            )
+        return False
+
+    def __ne__(self, other):
+        """not-equals method (see :func:`__eq__()`)"""
+        return not self.__eq__(other)
+
+    def __hash__(self):
+        return hash((self.seconds, self.nanoseconds))
+
+    @staticmethod
+    def from_bytes(b):
+        """Unpack bytes into a `Timestamp` object.
+
+        Used for pure-Python msgpack unpacking.
+
+        :param b: Payload from msgpack ext message with code -1
+        :type b: bytes
+
+        :returns: Timestamp object unpacked from msgpack ext payload
+        :rtype: Timestamp
+        """
+        if len(b) == 4:
+            seconds = struct.unpack("!L", b)[0]
+            nanoseconds = 0
+        elif len(b) == 8:
+            data64 = struct.unpack("!Q", b)[0]
+            seconds = data64 & 0x00000003FFFFFFFF
+            nanoseconds = data64 >> 34
+        elif len(b) == 12:
+            nanoseconds, seconds = struct.unpack("!Iq", b)
+        else:
+            raise ValueError(
+                "Timestamp type can only be created from 32, 64, or 96-bit byte objects"
+            )
+        return Timestamp(seconds, nanoseconds)
+
+    def to_bytes(self):
+        """Pack this Timestamp object into bytes.
+
+        Used for pure-Python msgpack packing.
+
+        :returns data: Payload for EXT message with code -1 (timestamp type)
+        :rtype: bytes
+        """
+        if (self.seconds >> 34) == 0:  # seconds is non-negative and fits in 34 bits
+            data64 = self.nanoseconds << 34 | self.seconds
+            if data64 & 0xFFFFFFFF00000000 == 0:
+                # nanoseconds is zero and seconds < 2**32, so timestamp 32
+                data = struct.pack("!L", data64)
+            else:
+                # timestamp 64
+                data = struct.pack("!Q", data64)
+        else:
+            # timestamp 96
+            data = struct.pack("!Iq", self.nanoseconds, self.seconds)
+        return data
+
+    @staticmethod
+    def from_unix(unix_sec):
+        """Create a Timestamp from posix timestamp in seconds.
+
+        :param unix_float: Posix timestamp in seconds.
+        :type unix_float: int or float.
+        """
+        seconds = int(unix_sec // 1)
+        nanoseconds = int((unix_sec % 1) * 10**9)
+        return Timestamp(seconds, nanoseconds)
+
+    def to_unix(self):
+        """Get the timestamp as a floating-point value.
+
+        :returns: posix timestamp
+        :rtype: float
+        """
+        return self.seconds + self.nanoseconds / 1e9
+
+    @staticmethod
+    def from_unix_nano(unix_ns):
+        """Create a Timestamp from posix timestamp in nanoseconds.
+
+        :param int unix_ns: Posix timestamp in nanoseconds.
+        :rtype: Timestamp
+        """
+        return Timestamp(*divmod(unix_ns, 10**9))
+
+    def to_unix_nano(self):
+        """Get the timestamp as a unixtime in nanoseconds.
+
+        :returns: posix timestamp in nanoseconds
+        :rtype: int
+        """
+        return self.seconds * 10**9 + self.nanoseconds
+
+    def to_datetime(self):
+        """Get the timestamp as a UTC datetime.
+
+        Python 2 is not supported.
+
+        :rtype: datetime.
+        """
+        return datetime.datetime.fromtimestamp(0, _utc) + datetime.timedelta(
+            seconds=self.to_unix()
+        )
+
+    @staticmethod
+    def from_datetime(dt):
+        """Create a Timestamp from datetime with tzinfo.
+
+        Python 2 is not supported.
+
+        :rtype: Timestamp
+        """
+        return Timestamp.from_unix(dt.timestamp())
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/msgpack/fallback.py b/.venv/lib/python3.12/site-packages/pip/_vendor/msgpack/fallback.py
new file mode 100644
index 0000000..e8cebc1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/msgpack/fallback.py
@@ -0,0 +1,1010 @@
+"""Fallback pure Python implementation of msgpack"""
+from datetime import datetime as _DateTime
+import sys
+import struct
+
+
+PY2 = sys.version_info[0] == 2
+if PY2:
+    int_types = (int, long)
+
+    def dict_iteritems(d):
+        return d.iteritems()
+
+else:
+    int_types = int
+    unicode = str
+    xrange = range
+
+    def dict_iteritems(d):
+        return d.items()
+
+
+if sys.version_info < (3, 5):
+    # Ugly hack...
+    RecursionError = RuntimeError
+
+    def _is_recursionerror(e):
+        return (
+            len(e.args) == 1
+            and isinstance(e.args[0], str)
+            and e.args[0].startswith("maximum recursion depth exceeded")
+        )
+
+else:
+
+    def _is_recursionerror(e):
+        return True
+
+
+if hasattr(sys, "pypy_version_info"):
+    # StringIO is slow on PyPy, StringIO is faster.  However: PyPy's own
+    # StringBuilder is fastest.
+    from __pypy__ import newlist_hint
+
+    try:
+        from __pypy__.builders import BytesBuilder as StringBuilder
+    except ImportError:
+        from __pypy__.builders import StringBuilder
+    USING_STRINGBUILDER = True
+
+    class StringIO(object):
+        def __init__(self, s=b""):
+            if s:
+                self.builder = StringBuilder(len(s))
+                self.builder.append(s)
+            else:
+                self.builder = StringBuilder()
+
+        def write(self, s):
+            if isinstance(s, memoryview):
+                s = s.tobytes()
+            elif isinstance(s, bytearray):
+                s = bytes(s)
+            self.builder.append(s)
+
+        def getvalue(self):
+            return self.builder.build()
+
+else:
+    USING_STRINGBUILDER = False
+    from io import BytesIO as StringIO
+
+    newlist_hint = lambda size: []
+
+
+from .exceptions import BufferFull, OutOfData, ExtraData, FormatError, StackError
+
+from .ext import ExtType, Timestamp
+
+
+EX_SKIP = 0
+EX_CONSTRUCT = 1
+EX_READ_ARRAY_HEADER = 2
+EX_READ_MAP_HEADER = 3
+
+TYPE_IMMEDIATE = 0
+TYPE_ARRAY = 1
+TYPE_MAP = 2
+TYPE_RAW = 3
+TYPE_BIN = 4
+TYPE_EXT = 5
+
+DEFAULT_RECURSE_LIMIT = 511
+
+
+def _check_type_strict(obj, t, type=type, tuple=tuple):
+    if type(t) is tuple:
+        return type(obj) in t
+    else:
+        return type(obj) is t
+
+
+def _get_data_from_buffer(obj):
+    view = memoryview(obj)
+    if view.itemsize != 1:
+        raise ValueError("cannot unpack from multi-byte object")
+    return view
+
+
+def unpackb(packed, **kwargs):
+    """
+    Unpack an object from `packed`.
+
+    Raises ``ExtraData`` when *packed* contains extra bytes.
+    Raises ``ValueError`` when *packed* is incomplete.
+    Raises ``FormatError`` when *packed* is not valid msgpack.
+    Raises ``StackError`` when *packed* contains too nested.
+    Other exceptions can be raised during unpacking.
+
+    See :class:`Unpacker` for options.
+    """
+    unpacker = Unpacker(None, max_buffer_size=len(packed), **kwargs)
+    unpacker.feed(packed)
+    try:
+        ret = unpacker._unpack()
+    except OutOfData:
+        raise ValueError("Unpack failed: incomplete input")
+    except RecursionError as e:
+        if _is_recursionerror(e):
+            raise StackError
+        raise
+    if unpacker._got_extradata():
+        raise ExtraData(ret, unpacker._get_extradata())
+    return ret
+
+
+if sys.version_info < (2, 7, 6):
+
+    def _unpack_from(f, b, o=0):
+        """Explicit type cast for legacy struct.unpack_from"""
+        return struct.unpack_from(f, bytes(b), o)
+
+else:
+    _unpack_from = struct.unpack_from
+
+_NO_FORMAT_USED = ""
+_MSGPACK_HEADERS = {
+    0xC4: (1, _NO_FORMAT_USED, TYPE_BIN),
+    0xC5: (2, ">H", TYPE_BIN),
+    0xC6: (4, ">I", TYPE_BIN),
+    0xC7: (2, "Bb", TYPE_EXT),
+    0xC8: (3, ">Hb", TYPE_EXT),
+    0xC9: (5, ">Ib", TYPE_EXT),
+    0xCA: (4, ">f"),
+    0xCB: (8, ">d"),
+    0xCC: (1, _NO_FORMAT_USED),
+    0xCD: (2, ">H"),
+    0xCE: (4, ">I"),
+    0xCF: (8, ">Q"),
+    0xD0: (1, "b"),
+    0xD1: (2, ">h"),
+    0xD2: (4, ">i"),
+    0xD3: (8, ">q"),
+    0xD4: (1, "b1s", TYPE_EXT),
+    0xD5: (2, "b2s", TYPE_EXT),
+    0xD6: (4, "b4s", TYPE_EXT),
+    0xD7: (8, "b8s", TYPE_EXT),
+    0xD8: (16, "b16s", TYPE_EXT),
+    0xD9: (1, _NO_FORMAT_USED, TYPE_RAW),
+    0xDA: (2, ">H", TYPE_RAW),
+    0xDB: (4, ">I", TYPE_RAW),
+    0xDC: (2, ">H", TYPE_ARRAY),
+    0xDD: (4, ">I", TYPE_ARRAY),
+    0xDE: (2, ">H", TYPE_MAP),
+    0xDF: (4, ">I", TYPE_MAP),
+}
+
+
+class Unpacker(object):
+    """Streaming unpacker.
+
+    Arguments:
+
+    :param file_like:
+        File-like object having `.read(n)` method.
+        If specified, unpacker reads serialized data from it and :meth:`feed()` is not usable.
+
+    :param int read_size:
+        Used as `file_like.read(read_size)`. (default: `min(16*1024, max_buffer_size)`)
+
+    :param bool use_list:
+        If true, unpack msgpack array to Python list.
+        Otherwise, unpack to Python tuple. (default: True)
+
+    :param bool raw:
+        If true, unpack msgpack raw to Python bytes.
+        Otherwise, unpack to Python str by decoding with UTF-8 encoding (default).
+
+    :param int timestamp:
+        Control how timestamp type is unpacked:
+
+            0 - Timestamp
+            1 - float  (Seconds from the EPOCH)
+            2 - int  (Nanoseconds from the EPOCH)
+            3 - datetime.datetime  (UTC).  Python 2 is not supported.
+
+    :param bool strict_map_key:
+        If true (default), only str or bytes are accepted for map (dict) keys.
+
+    :param callable object_hook:
+        When specified, it should be callable.
+        Unpacker calls it with a dict argument after unpacking msgpack map.
+        (See also simplejson)
+
+    :param callable object_pairs_hook:
+        When specified, it should be callable.
+        Unpacker calls it with a list of key-value pairs after unpacking msgpack map.
+        (See also simplejson)
+
+    :param str unicode_errors:
+        The error handler for decoding unicode. (default: 'strict')
+        This option should be used only when you have msgpack data which
+        contains invalid UTF-8 string.
+
+    :param int max_buffer_size:
+        Limits size of data waiting unpacked.  0 means 2**32-1.
+        The default value is 100*1024*1024 (100MiB).
+        Raises `BufferFull` exception when it is insufficient.
+        You should set this parameter when unpacking data from untrusted source.
+
+    :param int max_str_len:
+        Deprecated, use *max_buffer_size* instead.
+        Limits max length of str. (default: max_buffer_size)
+
+    :param int max_bin_len:
+        Deprecated, use *max_buffer_size* instead.
+        Limits max length of bin. (default: max_buffer_size)
+
+    :param int max_array_len:
+        Limits max length of array.
+        (default: max_buffer_size)
+
+    :param int max_map_len:
+        Limits max length of map.
+        (default: max_buffer_size//2)
+
+    :param int max_ext_len:
+        Deprecated, use *max_buffer_size* instead.
+        Limits max size of ext type.  (default: max_buffer_size)
+
+    Example of streaming deserialize from file-like object::
+
+        unpacker = Unpacker(file_like)
+        for o in unpacker:
+            process(o)
+
+    Example of streaming deserialize from socket::
+
+        unpacker = Unpacker()
+        while True:
+            buf = sock.recv(1024**2)
+            if not buf:
+                break
+            unpacker.feed(buf)
+            for o in unpacker:
+                process(o)
+
+    Raises ``ExtraData`` when *packed* contains extra bytes.
+    Raises ``OutOfData`` when *packed* is incomplete.
+    Raises ``FormatError`` when *packed* is not valid msgpack.
+    Raises ``StackError`` when *packed* contains too nested.
+    Other exceptions can be raised during unpacking.
+    """
+
+    def __init__(
+        self,
+        file_like=None,
+        read_size=0,
+        use_list=True,
+        raw=False,
+        timestamp=0,
+        strict_map_key=True,
+        object_hook=None,
+        object_pairs_hook=None,
+        list_hook=None,
+        unicode_errors=None,
+        max_buffer_size=100 * 1024 * 1024,
+        ext_hook=ExtType,
+        max_str_len=-1,
+        max_bin_len=-1,
+        max_array_len=-1,
+        max_map_len=-1,
+        max_ext_len=-1,
+    ):
+        if unicode_errors is None:
+            unicode_errors = "strict"
+
+        if file_like is None:
+            self._feeding = True
+        else:
+            if not callable(file_like.read):
+                raise TypeError("`file_like.read` must be callable")
+            self.file_like = file_like
+            self._feeding = False
+
+        #: array of bytes fed.
+        self._buffer = bytearray()
+        #: Which position we currently reads
+        self._buff_i = 0
+
+        # When Unpacker is used as an iterable, between the calls to next(),
+        # the buffer is not "consumed" completely, for efficiency sake.
+        # Instead, it is done sloppily.  To make sure we raise BufferFull at
+        # the correct moments, we have to keep track of how sloppy we were.
+        # Furthermore, when the buffer is incomplete (that is: in the case
+        # we raise an OutOfData) we need to rollback the buffer to the correct
+        # state, which _buf_checkpoint records.
+        self._buf_checkpoint = 0
+
+        if not max_buffer_size:
+            max_buffer_size = 2**31 - 1
+        if max_str_len == -1:
+            max_str_len = max_buffer_size
+        if max_bin_len == -1:
+            max_bin_len = max_buffer_size
+        if max_array_len == -1:
+            max_array_len = max_buffer_size
+        if max_map_len == -1:
+            max_map_len = max_buffer_size // 2
+        if max_ext_len == -1:
+            max_ext_len = max_buffer_size
+
+        self._max_buffer_size = max_buffer_size
+        if read_size > self._max_buffer_size:
+            raise ValueError("read_size must be smaller than max_buffer_size")
+        self._read_size = read_size or min(self._max_buffer_size, 16 * 1024)
+        self._raw = bool(raw)
+        self._strict_map_key = bool(strict_map_key)
+        self._unicode_errors = unicode_errors
+        self._use_list = use_list
+        if not (0 <= timestamp <= 3):
+            raise ValueError("timestamp must be 0..3")
+        self._timestamp = timestamp
+        self._list_hook = list_hook
+        self._object_hook = object_hook
+        self._object_pairs_hook = object_pairs_hook
+        self._ext_hook = ext_hook
+        self._max_str_len = max_str_len
+        self._max_bin_len = max_bin_len
+        self._max_array_len = max_array_len
+        self._max_map_len = max_map_len
+        self._max_ext_len = max_ext_len
+        self._stream_offset = 0
+
+        if list_hook is not None and not callable(list_hook):
+            raise TypeError("`list_hook` is not callable")
+        if object_hook is not None and not callable(object_hook):
+            raise TypeError("`object_hook` is not callable")
+        if object_pairs_hook is not None and not callable(object_pairs_hook):
+            raise TypeError("`object_pairs_hook` is not callable")
+        if object_hook is not None and object_pairs_hook is not None:
+            raise TypeError(
+                "object_pairs_hook and object_hook are mutually " "exclusive"
+            )
+        if not callable(ext_hook):
+            raise TypeError("`ext_hook` is not callable")
+
+    def feed(self, next_bytes):
+        assert self._feeding
+        view = _get_data_from_buffer(next_bytes)
+        if len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size:
+            raise BufferFull
+
+        # Strip buffer before checkpoint before reading file.
+        if self._buf_checkpoint > 0:
+            del self._buffer[: self._buf_checkpoint]
+            self._buff_i -= self._buf_checkpoint
+            self._buf_checkpoint = 0
+
+        # Use extend here: INPLACE_ADD += doesn't reliably typecast memoryview in jython
+        self._buffer.extend(view)
+
+    def _consume(self):
+        """Gets rid of the used parts of the buffer."""
+        self._stream_offset += self._buff_i - self._buf_checkpoint
+        self._buf_checkpoint = self._buff_i
+
+    def _got_extradata(self):
+        return self._buff_i < len(self._buffer)
+
+    def _get_extradata(self):
+        return self._buffer[self._buff_i :]
+
+    def read_bytes(self, n):
+        ret = self._read(n, raise_outofdata=False)
+        self._consume()
+        return ret
+
+    def _read(self, n, raise_outofdata=True):
+        # (int) -> bytearray
+        self._reserve(n, raise_outofdata=raise_outofdata)
+        i = self._buff_i
+        ret = self._buffer[i : i + n]
+        self._buff_i = i + len(ret)
+        return ret
+
+    def _reserve(self, n, raise_outofdata=True):
+        remain_bytes = len(self._buffer) - self._buff_i - n
+
+        # Fast path: buffer has n bytes already
+        if remain_bytes >= 0:
+            return
+
+        if self._feeding:
+            self._buff_i = self._buf_checkpoint
+            raise OutOfData
+
+        # Strip buffer before checkpoint before reading file.
+        if self._buf_checkpoint > 0:
+            del self._buffer[: self._buf_checkpoint]
+            self._buff_i -= self._buf_checkpoint
+            self._buf_checkpoint = 0
+
+        # Read from file
+        remain_bytes = -remain_bytes
+        if remain_bytes + len(self._buffer) > self._max_buffer_size:
+            raise BufferFull
+        while remain_bytes > 0:
+            to_read_bytes = max(self._read_size, remain_bytes)
+            read_data = self.file_like.read(to_read_bytes)
+            if not read_data:
+                break
+            assert isinstance(read_data, bytes)
+            self._buffer += read_data
+            remain_bytes -= len(read_data)
+
+        if len(self._buffer) < n + self._buff_i and raise_outofdata:
+            self._buff_i = 0  # rollback
+            raise OutOfData
+
+    def _read_header(self):
+        typ = TYPE_IMMEDIATE
+        n = 0
+        obj = None
+        self._reserve(1)
+        b = self._buffer[self._buff_i]
+        self._buff_i += 1
+        if b & 0b10000000 == 0:
+            obj = b
+        elif b & 0b11100000 == 0b11100000:
+            obj = -1 - (b ^ 0xFF)
+        elif b & 0b11100000 == 0b10100000:
+            n = b & 0b00011111
+            typ = TYPE_RAW
+            if n > self._max_str_len:
+                raise ValueError("%s exceeds max_str_len(%s)" % (n, self._max_str_len))
+            obj = self._read(n)
+        elif b & 0b11110000 == 0b10010000:
+            n = b & 0b00001111
+            typ = TYPE_ARRAY
+            if n > self._max_array_len:
+                raise ValueError(
+                    "%s exceeds max_array_len(%s)" % (n, self._max_array_len)
+                )
+        elif b & 0b11110000 == 0b10000000:
+            n = b & 0b00001111
+            typ = TYPE_MAP
+            if n > self._max_map_len:
+                raise ValueError("%s exceeds max_map_len(%s)" % (n, self._max_map_len))
+        elif b == 0xC0:
+            obj = None
+        elif b == 0xC2:
+            obj = False
+        elif b == 0xC3:
+            obj = True
+        elif 0xC4 <= b <= 0xC6:
+            size, fmt, typ = _MSGPACK_HEADERS[b]
+            self._reserve(size)
+            if len(fmt) > 0:
+                n = _unpack_from(fmt, self._buffer, self._buff_i)[0]
+            else:
+                n = self._buffer[self._buff_i]
+            self._buff_i += size
+            if n > self._max_bin_len:
+                raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
+            obj = self._read(n)
+        elif 0xC7 <= b <= 0xC9:
+            size, fmt, typ = _MSGPACK_HEADERS[b]
+            self._reserve(size)
+            L, n = _unpack_from(fmt, self._buffer, self._buff_i)
+            self._buff_i += size
+            if L > self._max_ext_len:
+                raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
+            obj = self._read(L)
+        elif 0xCA <= b <= 0xD3:
+            size, fmt = _MSGPACK_HEADERS[b]
+            self._reserve(size)
+            if len(fmt) > 0:
+                obj = _unpack_from(fmt, self._buffer, self._buff_i)[0]
+            else:
+                obj = self._buffer[self._buff_i]
+            self._buff_i += size
+        elif 0xD4 <= b <= 0xD8:
+            size, fmt, typ = _MSGPACK_HEADERS[b]
+            if self._max_ext_len < size:
+                raise ValueError(
+                    "%s exceeds max_ext_len(%s)" % (size, self._max_ext_len)
+                )
+            self._reserve(size + 1)
+            n, obj = _unpack_from(fmt, self._buffer, self._buff_i)
+            self._buff_i += size + 1
+        elif 0xD9 <= b <= 0xDB:
+            size, fmt, typ = _MSGPACK_HEADERS[b]
+            self._reserve(size)
+            if len(fmt) > 0:
+                (n,) = _unpack_from(fmt, self._buffer, self._buff_i)
+            else:
+                n = self._buffer[self._buff_i]
+            self._buff_i += size
+            if n > self._max_str_len:
+                raise ValueError("%s exceeds max_str_len(%s)" % (n, self._max_str_len))
+            obj = self._read(n)
+        elif 0xDC <= b <= 0xDD:
+            size, fmt, typ = _MSGPACK_HEADERS[b]
+            self._reserve(size)
+            (n,) = _unpack_from(fmt, self._buffer, self._buff_i)
+            self._buff_i += size
+            if n > self._max_array_len:
+                raise ValueError(
+                    "%s exceeds max_array_len(%s)" % (n, self._max_array_len)
+                )
+        elif 0xDE <= b <= 0xDF:
+            size, fmt, typ = _MSGPACK_HEADERS[b]
+            self._reserve(size)
+            (n,) = _unpack_from(fmt, self._buffer, self._buff_i)
+            self._buff_i += size
+            if n > self._max_map_len:
+                raise ValueError("%s exceeds max_map_len(%s)" % (n, self._max_map_len))
+        else:
+            raise FormatError("Unknown header: 0x%x" % b)
+        return typ, n, obj
+
+    def _unpack(self, execute=EX_CONSTRUCT):
+        typ, n, obj = self._read_header()
+
+        if execute == EX_READ_ARRAY_HEADER:
+            if typ != TYPE_ARRAY:
+                raise ValueError("Expected array")
+            return n
+        if execute == EX_READ_MAP_HEADER:
+            if typ != TYPE_MAP:
+                raise ValueError("Expected map")
+            return n
+        # TODO should we eliminate the recursion?
+        if typ == TYPE_ARRAY:
+            if execute == EX_SKIP:
+                for i in xrange(n):
+                    # TODO check whether we need to call `list_hook`
+                    self._unpack(EX_SKIP)
+                return
+            ret = newlist_hint(n)
+            for i in xrange(n):
+                ret.append(self._unpack(EX_CONSTRUCT))
+            if self._list_hook is not None:
+                ret = self._list_hook(ret)
+            # TODO is the interaction between `list_hook` and `use_list` ok?
+            return ret if self._use_list else tuple(ret)
+        if typ == TYPE_MAP:
+            if execute == EX_SKIP:
+                for i in xrange(n):
+                    # TODO check whether we need to call hooks
+                    self._unpack(EX_SKIP)
+                    self._unpack(EX_SKIP)
+                return
+            if self._object_pairs_hook is not None:
+                ret = self._object_pairs_hook(
+                    (self._unpack(EX_CONSTRUCT), self._unpack(EX_CONSTRUCT))
+                    for _ in xrange(n)
+                )
+            else:
+                ret = {}
+                for _ in xrange(n):
+                    key = self._unpack(EX_CONSTRUCT)
+                    if self._strict_map_key and type(key) not in (unicode, bytes):
+                        raise ValueError(
+                            "%s is not allowed for map key" % str(type(key))
+                        )
+                    if not PY2 and type(key) is str:
+                        key = sys.intern(key)
+                    ret[key] = self._unpack(EX_CONSTRUCT)
+                if self._object_hook is not None:
+                    ret = self._object_hook(ret)
+            return ret
+        if execute == EX_SKIP:
+            return
+        if typ == TYPE_RAW:
+            if self._raw:
+                obj = bytes(obj)
+            else:
+                obj = obj.decode("utf_8", self._unicode_errors)
+            return obj
+        if typ == TYPE_BIN:
+            return bytes(obj)
+        if typ == TYPE_EXT:
+            if n == -1:  # timestamp
+                ts = Timestamp.from_bytes(bytes(obj))
+                if self._timestamp == 1:
+                    return ts.to_unix()
+                elif self._timestamp == 2:
+                    return ts.to_unix_nano()
+                elif self._timestamp == 3:
+                    return ts.to_datetime()
+                else:
+                    return ts
+            else:
+                return self._ext_hook(n, bytes(obj))
+        assert typ == TYPE_IMMEDIATE
+        return obj
+
+    def __iter__(self):
+        return self
+
+    def __next__(self):
+        try:
+            ret = self._unpack(EX_CONSTRUCT)
+            self._consume()
+            return ret
+        except OutOfData:
+            self._consume()
+            raise StopIteration
+        except RecursionError:
+            raise StackError
+
+    next = __next__
+
+    def skip(self):
+        self._unpack(EX_SKIP)
+        self._consume()
+
+    def unpack(self):
+        try:
+            ret = self._unpack(EX_CONSTRUCT)
+        except RecursionError:
+            raise StackError
+        self._consume()
+        return ret
+
+    def read_array_header(self):
+        ret = self._unpack(EX_READ_ARRAY_HEADER)
+        self._consume()
+        return ret
+
+    def read_map_header(self):
+        ret = self._unpack(EX_READ_MAP_HEADER)
+        self._consume()
+        return ret
+
+    def tell(self):
+        return self._stream_offset
+
+
+class Packer(object):
+    """
+    MessagePack Packer
+
+    Usage::
+
+        packer = Packer()
+        astream.write(packer.pack(a))
+        astream.write(packer.pack(b))
+
+    Packer's constructor has some keyword arguments:
+
+    :param callable default:
+        Convert user type to builtin type that Packer supports.
+        See also simplejson's document.
+
+    :param bool use_single_float:
+        Use single precision float type for float. (default: False)
+
+    :param bool autoreset:
+        Reset buffer after each pack and return its content as `bytes`. (default: True).
+        If set this to false, use `bytes()` to get content and `.reset()` to clear buffer.
+
+    :param bool use_bin_type:
+        Use bin type introduced in msgpack spec 2.0 for bytes.
+        It also enables str8 type for unicode. (default: True)
+
+    :param bool strict_types:
+        If set to true, types will be checked to be exact. Derived classes
+        from serializable types will not be serialized and will be
+        treated as unsupported type and forwarded to default.
+        Additionally tuples will not be serialized as lists.
+        This is useful when trying to implement accurate serialization
+        for python types.
+
+    :param bool datetime:
+        If set to true, datetime with tzinfo is packed into Timestamp type.
+        Note that the tzinfo is stripped in the timestamp.
+        You can get UTC datetime with `timestamp=3` option of the Unpacker.
+        (Python 2 is not supported).
+
+    :param str unicode_errors:
+        The error handler for encoding unicode. (default: 'strict')
+        DO NOT USE THIS!!  This option is kept for very specific usage.
+
+    Example of streaming deserialize from file-like object::
+
+        unpacker = Unpacker(file_like)
+        for o in unpacker:
+            process(o)
+
+    Example of streaming deserialize from socket::
+
+        unpacker = Unpacker()
+        while True:
+            buf = sock.recv(1024**2)
+            if not buf:
+                break
+            unpacker.feed(buf)
+            for o in unpacker:
+                process(o)
+
+    Raises ``ExtraData`` when *packed* contains extra bytes.
+    Raises ``OutOfData`` when *packed* is incomplete.
+    Raises ``FormatError`` when *packed* is not valid msgpack.
+    Raises ``StackError`` when *packed* contains too nested.
+    Other exceptions can be raised during unpacking.
+    """
+
+    def __init__(
+        self,
+        default=None,
+        use_single_float=False,
+        autoreset=True,
+        use_bin_type=True,
+        strict_types=False,
+        datetime=False,
+        unicode_errors=None,
+    ):
+        self._strict_types = strict_types
+        self._use_float = use_single_float
+        self._autoreset = autoreset
+        self._use_bin_type = use_bin_type
+        self._buffer = StringIO()
+        if PY2 and datetime:
+            raise ValueError("datetime is not supported in Python 2")
+        self._datetime = bool(datetime)
+        self._unicode_errors = unicode_errors or "strict"
+        if default is not None:
+            if not callable(default):
+                raise TypeError("default must be callable")
+        self._default = default
+
+    def _pack(
+        self,
+        obj,
+        nest_limit=DEFAULT_RECURSE_LIMIT,
+        check=isinstance,
+        check_type_strict=_check_type_strict,
+    ):
+        default_used = False
+        if self._strict_types:
+            check = check_type_strict
+            list_types = list
+        else:
+            list_types = (list, tuple)
+        while True:
+            if nest_limit < 0:
+                raise ValueError("recursion limit exceeded")
+            if obj is None:
+                return self._buffer.write(b"\xc0")
+            if check(obj, bool):
+                if obj:
+                    return self._buffer.write(b"\xc3")
+                return self._buffer.write(b"\xc2")
+            if check(obj, int_types):
+                if 0 <= obj < 0x80:
+                    return self._buffer.write(struct.pack("B", obj))
+                if -0x20 <= obj < 0:
+                    return self._buffer.write(struct.pack("b", obj))
+                if 0x80 <= obj <= 0xFF:
+                    return self._buffer.write(struct.pack("BB", 0xCC, obj))
+                if -0x80 <= obj < 0:
+                    return self._buffer.write(struct.pack(">Bb", 0xD0, obj))
+                if 0xFF < obj <= 0xFFFF:
+                    return self._buffer.write(struct.pack(">BH", 0xCD, obj))
+                if -0x8000 <= obj < -0x80:
+                    return self._buffer.write(struct.pack(">Bh", 0xD1, obj))
+                if 0xFFFF < obj <= 0xFFFFFFFF:
+                    return self._buffer.write(struct.pack(">BI", 0xCE, obj))
+                if -0x80000000 <= obj < -0x8000:
+                    return self._buffer.write(struct.pack(">Bi", 0xD2, obj))
+                if 0xFFFFFFFF < obj <= 0xFFFFFFFFFFFFFFFF:
+                    return self._buffer.write(struct.pack(">BQ", 0xCF, obj))
+                if -0x8000000000000000 <= obj < -0x80000000:
+                    return self._buffer.write(struct.pack(">Bq", 0xD3, obj))
+                if not default_used and self._default is not None:
+                    obj = self._default(obj)
+                    default_used = True
+                    continue
+                raise OverflowError("Integer value out of range")
+            if check(obj, (bytes, bytearray)):
+                n = len(obj)
+                if n >= 2**32:
+                    raise ValueError("%s is too large" % type(obj).__name__)
+                self._pack_bin_header(n)
+                return self._buffer.write(obj)
+            if check(obj, unicode):
+                obj = obj.encode("utf-8", self._unicode_errors)
+                n = len(obj)
+                if n >= 2**32:
+                    raise ValueError("String is too large")
+                self._pack_raw_header(n)
+                return self._buffer.write(obj)
+            if check(obj, memoryview):
+                n = obj.nbytes
+                if n >= 2**32:
+                    raise ValueError("Memoryview is too large")
+                self._pack_bin_header(n)
+                return self._buffer.write(obj)
+            if check(obj, float):
+                if self._use_float:
+                    return self._buffer.write(struct.pack(">Bf", 0xCA, obj))
+                return self._buffer.write(struct.pack(">Bd", 0xCB, obj))
+            if check(obj, (ExtType, Timestamp)):
+                if check(obj, Timestamp):
+                    code = -1
+                    data = obj.to_bytes()
+                else:
+                    code = obj.code
+                    data = obj.data
+                assert isinstance(code, int)
+                assert isinstance(data, bytes)
+                L = len(data)
+                if L == 1:
+                    self._buffer.write(b"\xd4")
+                elif L == 2:
+                    self._buffer.write(b"\xd5")
+                elif L == 4:
+                    self._buffer.write(b"\xd6")
+                elif L == 8:
+                    self._buffer.write(b"\xd7")
+                elif L == 16:
+                    self._buffer.write(b"\xd8")
+                elif L <= 0xFF:
+                    self._buffer.write(struct.pack(">BB", 0xC7, L))
+                elif L <= 0xFFFF:
+                    self._buffer.write(struct.pack(">BH", 0xC8, L))
+                else:
+                    self._buffer.write(struct.pack(">BI", 0xC9, L))
+                self._buffer.write(struct.pack("b", code))
+                self._buffer.write(data)
+                return
+            if check(obj, list_types):
+                n = len(obj)
+                self._pack_array_header(n)
+                for i in xrange(n):
+                    self._pack(obj[i], nest_limit - 1)
+                return
+            if check(obj, dict):
+                return self._pack_map_pairs(
+                    len(obj), dict_iteritems(obj), nest_limit - 1
+                )
+
+            if self._datetime and check(obj, _DateTime) and obj.tzinfo is not None:
+                obj = Timestamp.from_datetime(obj)
+                default_used = 1
+                continue
+
+            if not default_used and self._default is not None:
+                obj = self._default(obj)
+                default_used = 1
+                continue
+
+            if self._datetime and check(obj, _DateTime):
+                raise ValueError("Cannot serialize %r where tzinfo=None" % (obj,))
+
+            raise TypeError("Cannot serialize %r" % (obj,))
+
+    def pack(self, obj):
+        try:
+            self._pack(obj)
+        except:
+            self._buffer = StringIO()  # force reset
+            raise
+        if self._autoreset:
+            ret = self._buffer.getvalue()
+            self._buffer = StringIO()
+            return ret
+
+    def pack_map_pairs(self, pairs):
+        self._pack_map_pairs(len(pairs), pairs)
+        if self._autoreset:
+            ret = self._buffer.getvalue()
+            self._buffer = StringIO()
+            return ret
+
+    def pack_array_header(self, n):
+        if n >= 2**32:
+            raise ValueError
+        self._pack_array_header(n)
+        if self._autoreset:
+            ret = self._buffer.getvalue()
+            self._buffer = StringIO()
+            return ret
+
+    def pack_map_header(self, n):
+        if n >= 2**32:
+            raise ValueError
+        self._pack_map_header(n)
+        if self._autoreset:
+            ret = self._buffer.getvalue()
+            self._buffer = StringIO()
+            return ret
+
+    def pack_ext_type(self, typecode, data):
+        if not isinstance(typecode, int):
+            raise TypeError("typecode must have int type.")
+        if not 0 <= typecode <= 127:
+            raise ValueError("typecode should be 0-127")
+        if not isinstance(data, bytes):
+            raise TypeError("data must have bytes type")
+        L = len(data)
+        if L > 0xFFFFFFFF:
+            raise ValueError("Too large data")
+        if L == 1:
+            self._buffer.write(b"\xd4")
+        elif L == 2:
+            self._buffer.write(b"\xd5")
+        elif L == 4:
+            self._buffer.write(b"\xd6")
+        elif L == 8:
+            self._buffer.write(b"\xd7")
+        elif L == 16:
+            self._buffer.write(b"\xd8")
+        elif L <= 0xFF:
+            self._buffer.write(b"\xc7" + struct.pack("B", L))
+        elif L <= 0xFFFF:
+            self._buffer.write(b"\xc8" + struct.pack(">H", L))
+        else:
+            self._buffer.write(b"\xc9" + struct.pack(">I", L))
+        self._buffer.write(struct.pack("B", typecode))
+        self._buffer.write(data)
+
+    def _pack_array_header(self, n):
+        if n <= 0x0F:
+            return self._buffer.write(struct.pack("B", 0x90 + n))
+        if n <= 0xFFFF:
+            return self._buffer.write(struct.pack(">BH", 0xDC, n))
+        if n <= 0xFFFFFFFF:
+            return self._buffer.write(struct.pack(">BI", 0xDD, n))
+        raise ValueError("Array is too large")
+
+    def _pack_map_header(self, n):
+        if n <= 0x0F:
+            return self._buffer.write(struct.pack("B", 0x80 + n))
+        if n <= 0xFFFF:
+            return self._buffer.write(struct.pack(">BH", 0xDE, n))
+        if n <= 0xFFFFFFFF:
+            return self._buffer.write(struct.pack(">BI", 0xDF, n))
+        raise ValueError("Dict is too large")
+
+    def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT):
+        self._pack_map_header(n)
+        for (k, v) in pairs:
+            self._pack(k, nest_limit - 1)
+            self._pack(v, nest_limit - 1)
+
+    def _pack_raw_header(self, n):
+        if n <= 0x1F:
+            self._buffer.write(struct.pack("B", 0xA0 + n))
+        elif self._use_bin_type and n <= 0xFF:
+            self._buffer.write(struct.pack(">BB", 0xD9, n))
+        elif n <= 0xFFFF:
+            self._buffer.write(struct.pack(">BH", 0xDA, n))
+        elif n <= 0xFFFFFFFF:
+            self._buffer.write(struct.pack(">BI", 0xDB, n))
+        else:
+            raise ValueError("Raw is too large")
+
+    def _pack_bin_header(self, n):
+        if not self._use_bin_type:
+            return self._pack_raw_header(n)
+        elif n <= 0xFF:
+            return self._buffer.write(struct.pack(">BB", 0xC4, n))
+        elif n <= 0xFFFF:
+            return self._buffer.write(struct.pack(">BH", 0xC5, n))
+        elif n <= 0xFFFFFFFF:
+            return self._buffer.write(struct.pack(">BI", 0xC6, n))
+        else:
+            raise ValueError("Bin is too large")
+
+    def bytes(self):
+        """Return internal buffer contents as bytes object"""
+        return self._buffer.getvalue()
+
+    def reset(self):
+        """Reset internal buffer.
+
+        This method is useful only when autoreset=False.
+        """
+        self._buffer = StringIO()
+
+    def getbuffer(self):
+        """Return view of internal buffer."""
+        if USING_STRINGBUILDER or PY2:
+            return memoryview(self.bytes())
+        else:
+            return self._buffer.getbuffer()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/__about__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/__about__.py
new file mode 100644
index 0000000..3551bc2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/__about__.py
@@ -0,0 +1,26 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+__all__ = [
+    "__title__",
+    "__summary__",
+    "__uri__",
+    "__version__",
+    "__author__",
+    "__email__",
+    "__license__",
+    "__copyright__",
+]
+
+__title__ = "packaging"
+__summary__ = "Core utilities for Python packages"
+__uri__ = "https://github.com/pypa/packaging"
+
+__version__ = "21.3"
+
+__author__ = "Donald Stufft and individual contributors"
+__email__ = "donald@stufft.io"
+
+__license__ = "BSD-2-Clause or Apache-2.0"
+__copyright__ = "2014-2019 %s" % __author__
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/__init__.py
new file mode 100644
index 0000000..3c50c5d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/__init__.py
@@ -0,0 +1,25 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from .__about__ import (
+    __author__,
+    __copyright__,
+    __email__,
+    __license__,
+    __summary__,
+    __title__,
+    __uri__,
+    __version__,
+)
+
+__all__ = [
+    "__title__",
+    "__summary__",
+    "__uri__",
+    "__version__",
+    "__author__",
+    "__email__",
+    "__license__",
+    "__copyright__",
+]
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/_manylinux.py b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/_manylinux.py
new file mode 100644
index 0000000..4c379aa
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/_manylinux.py
@@ -0,0 +1,301 @@
+import collections
+import functools
+import os
+import re
+import struct
+import sys
+import warnings
+from typing import IO, Dict, Iterator, NamedTuple, Optional, Tuple
+
+
+# Python does not provide platform information at sufficient granularity to
+# identify the architecture of the running executable in some cases, so we
+# determine it dynamically by reading the information from the running
+# process. This only applies on Linux, which uses the ELF format.
+class _ELFFileHeader:
+    # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
+    class _InvalidELFFileHeader(ValueError):
+        """
+        An invalid ELF file header was found.
+        """
+
+    ELF_MAGIC_NUMBER = 0x7F454C46
+    ELFCLASS32 = 1
+    ELFCLASS64 = 2
+    ELFDATA2LSB = 1
+    ELFDATA2MSB = 2
+    EM_386 = 3
+    EM_S390 = 22
+    EM_ARM = 40
+    EM_X86_64 = 62
+    EF_ARM_ABIMASK = 0xFF000000
+    EF_ARM_ABI_VER5 = 0x05000000
+    EF_ARM_ABI_FLOAT_HARD = 0x00000400
+
+    def __init__(self, file: IO[bytes]) -> None:
+        def unpack(fmt: str) -> int:
+            try:
+                data = file.read(struct.calcsize(fmt))
+                result: Tuple[int, ...] = struct.unpack(fmt, data)
+            except struct.error:
+                raise _ELFFileHeader._InvalidELFFileHeader()
+            return result[0]
+
+        self.e_ident_magic = unpack(">I")
+        if self.e_ident_magic != self.ELF_MAGIC_NUMBER:
+            raise _ELFFileHeader._InvalidELFFileHeader()
+        self.e_ident_class = unpack("B")
+        if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}:
+            raise _ELFFileHeader._InvalidELFFileHeader()
+        self.e_ident_data = unpack("B")
+        if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}:
+            raise _ELFFileHeader._InvalidELFFileHeader()
+        self.e_ident_version = unpack("B")
+        self.e_ident_osabi = unpack("B")
+        self.e_ident_abiversion = unpack("B")
+        self.e_ident_pad = file.read(7)
+        format_h = "H"
+        format_i = "I"
+        format_q = "Q"
+        format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q
+        self.e_type = unpack(format_h)
+        self.e_machine = unpack(format_h)
+        self.e_version = unpack(format_i)
+        self.e_entry = unpack(format_p)
+        self.e_phoff = unpack(format_p)
+        self.e_shoff = unpack(format_p)
+        self.e_flags = unpack(format_i)
+        self.e_ehsize = unpack(format_h)
+        self.e_phentsize = unpack(format_h)
+        self.e_phnum = unpack(format_h)
+        self.e_shentsize = unpack(format_h)
+        self.e_shnum = unpack(format_h)
+        self.e_shstrndx = unpack(format_h)
+
+
+def _get_elf_header() -> Optional[_ELFFileHeader]:
+    try:
+        with open(sys.executable, "rb") as f:
+            elf_header = _ELFFileHeader(f)
+    except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader):
+        return None
+    return elf_header
+
+
+def _is_linux_armhf() -> bool:
+    # hard-float ABI can be detected from the ELF header of the running
+    # process
+    # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
+    elf_header = _get_elf_header()
+    if elf_header is None:
+        return False
+    result = elf_header.e_ident_class == elf_header.ELFCLASS32
+    result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
+    result &= elf_header.e_machine == elf_header.EM_ARM
+    result &= (
+        elf_header.e_flags & elf_header.EF_ARM_ABIMASK
+    ) == elf_header.EF_ARM_ABI_VER5
+    result &= (
+        elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD
+    ) == elf_header.EF_ARM_ABI_FLOAT_HARD
+    return result
+
+
+def _is_linux_i686() -> bool:
+    elf_header = _get_elf_header()
+    if elf_header is None:
+        return False
+    result = elf_header.e_ident_class == elf_header.ELFCLASS32
+    result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
+    result &= elf_header.e_machine == elf_header.EM_386
+    return result
+
+
+def _have_compatible_abi(arch: str) -> bool:
+    if arch == "armv7l":
+        return _is_linux_armhf()
+    if arch == "i686":
+        return _is_linux_i686()
+    return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
+
+
+# If glibc ever changes its major version, we need to know what the last
+# minor version was, so we can build the complete list of all versions.
+# For now, guess what the highest minor version might be, assume it will
+# be 50 for testing. Once this actually happens, update the dictionary
+# with the actual value.
+_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
+
+
+class _GLibCVersion(NamedTuple):
+    major: int
+    minor: int
+
+
+def _glibc_version_string_confstr() -> Optional[str]:
+    """
+    Primary implementation of glibc_version_string using os.confstr.
+    """
+    # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
+    # to be broken or missing. This strategy is used in the standard library
+    # platform module.
+    # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
+    try:
+        # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17".
+        version_string = os.confstr("CS_GNU_LIBC_VERSION")
+        assert version_string is not None
+        _, version = version_string.split()
+    except (AssertionError, AttributeError, OSError, ValueError):
+        # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
+        return None
+    return version
+
+
+def _glibc_version_string_ctypes() -> Optional[str]:
+    """
+    Fallback implementation of glibc_version_string using ctypes.
+    """
+    try:
+        import ctypes
+    except ImportError:
+        return None
+
+    # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
+    # manpage says, "If filename is NULL, then the returned handle is for the
+    # main program". This way we can let the linker do the work to figure out
+    # which libc our process is actually using.
+    #
+    # We must also handle the special case where the executable is not a
+    # dynamically linked executable. This can occur when using musl libc,
+    # for example. In this situation, dlopen() will error, leading to an
+    # OSError. Interestingly, at least in the case of musl, there is no
+    # errno set on the OSError. The single string argument used to construct
+    # OSError comes from libc itself and is therefore not portable to
+    # hard code here. In any case, failure to call dlopen() means we
+    # can proceed, so we bail on our attempt.
+    try:
+        process_namespace = ctypes.CDLL(None)
+    except OSError:
+        return None
+
+    try:
+        gnu_get_libc_version = process_namespace.gnu_get_libc_version
+    except AttributeError:
+        # Symbol doesn't exist -> therefore, we are not linked to
+        # glibc.
+        return None
+
+    # Call gnu_get_libc_version, which returns a string like "2.5"
+    gnu_get_libc_version.restype = ctypes.c_char_p
+    version_str: str = gnu_get_libc_version()
+    # py2 / py3 compatibility:
+    if not isinstance(version_str, str):
+        version_str = version_str.decode("ascii")
+
+    return version_str
+
+
+def _glibc_version_string() -> Optional[str]:
+    """Returns glibc version string, or None if not using glibc."""
+    return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
+
+
+def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
+    """Parse glibc version.
+
+    We use a regexp instead of str.split because we want to discard any
+    random junk that might come after the minor version -- this might happen
+    in patched/forked versions of glibc (e.g. Linaro's version of glibc
+    uses version strings like "2.20-2014.11"). See gh-3588.
+    """
+    m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str)
+    if not m:
+        warnings.warn(
+            "Expected glibc version with 2 components major.minor,"
+            " got: %s" % version_str,
+            RuntimeWarning,
+        )
+        return -1, -1
+    return int(m.group("major")), int(m.group("minor"))
+
+
+@functools.lru_cache()
+def _get_glibc_version() -> Tuple[int, int]:
+    version_str = _glibc_version_string()
+    if version_str is None:
+        return (-1, -1)
+    return _parse_glibc_version(version_str)
+
+
+# From PEP 513, PEP 600
+def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool:
+    sys_glibc = _get_glibc_version()
+    if sys_glibc < version:
+        return False
+    # Check for presence of _manylinux module.
+    try:
+        import _manylinux  # noqa
+    except ImportError:
+        return True
+    if hasattr(_manylinux, "manylinux_compatible"):
+        result = _manylinux.manylinux_compatible(version[0], version[1], arch)
+        if result is not None:
+            return bool(result)
+        return True
+    if version == _GLibCVersion(2, 5):
+        if hasattr(_manylinux, "manylinux1_compatible"):
+            return bool(_manylinux.manylinux1_compatible)
+    if version == _GLibCVersion(2, 12):
+        if hasattr(_manylinux, "manylinux2010_compatible"):
+            return bool(_manylinux.manylinux2010_compatible)
+    if version == _GLibCVersion(2, 17):
+        if hasattr(_manylinux, "manylinux2014_compatible"):
+            return bool(_manylinux.manylinux2014_compatible)
+    return True
+
+
+_LEGACY_MANYLINUX_MAP = {
+    # CentOS 7 w/ glibc 2.17 (PEP 599)
+    (2, 17): "manylinux2014",
+    # CentOS 6 w/ glibc 2.12 (PEP 571)
+    (2, 12): "manylinux2010",
+    # CentOS 5 w/ glibc 2.5 (PEP 513)
+    (2, 5): "manylinux1",
+}
+
+
+def platform_tags(linux: str, arch: str) -> Iterator[str]:
+    if not _have_compatible_abi(arch):
+        return
+    # Oldest glibc to be supported regardless of architecture is (2, 17).
+    too_old_glibc2 = _GLibCVersion(2, 16)
+    if arch in {"x86_64", "i686"}:
+        # On x86/i686 also oldest glibc to be supported is (2, 5).
+        too_old_glibc2 = _GLibCVersion(2, 4)
+    current_glibc = _GLibCVersion(*_get_glibc_version())
+    glibc_max_list = [current_glibc]
+    # We can assume compatibility across glibc major versions.
+    # https://sourceware.org/bugzilla/show_bug.cgi?id=24636
+    #
+    # Build a list of maximum glibc versions so that we can
+    # output the canonical list of all glibc from current_glibc
+    # down to too_old_glibc2, including all intermediary versions.
+    for glibc_major in range(current_glibc.major - 1, 1, -1):
+        glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
+        glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
+    for glibc_max in glibc_max_list:
+        if glibc_max.major == too_old_glibc2.major:
+            min_minor = too_old_glibc2.minor
+        else:
+            # For other glibc major versions oldest supported is (x, 0).
+            min_minor = -1
+        for glibc_minor in range(glibc_max.minor, min_minor, -1):
+            glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
+            tag = "manylinux_{}_{}".format(*glibc_version)
+            if _is_compatible(tag, arch, glibc_version):
+                yield linux.replace("linux", tag)
+            # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
+            if glibc_version in _LEGACY_MANYLINUX_MAP:
+                legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
+                if _is_compatible(legacy_tag, arch, glibc_version):
+                    yield linux.replace("linux", legacy_tag)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/_musllinux.py b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/_musllinux.py
new file mode 100644
index 0000000..8ac3059
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/_musllinux.py
@@ -0,0 +1,136 @@
+"""PEP 656 support.
+
+This module implements logic to detect if the currently running Python is
+linked against musl, and what musl version is used.
+"""
+
+import contextlib
+import functools
+import operator
+import os
+import re
+import struct
+import subprocess
+import sys
+from typing import IO, Iterator, NamedTuple, Optional, Tuple
+
+
+def _read_unpacked(f: IO[bytes], fmt: str) -> Tuple[int, ...]:
+    return struct.unpack(fmt, f.read(struct.calcsize(fmt)))
+
+
+def _parse_ld_musl_from_elf(f: IO[bytes]) -> Optional[str]:
+    """Detect musl libc location by parsing the Python executable.
+
+    Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
+    ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
+    """
+    f.seek(0)
+    try:
+        ident = _read_unpacked(f, "16B")
+    except struct.error:
+        return None
+    if ident[:4] != tuple(b"\x7fELF"):  # Invalid magic, not ELF.
+        return None
+    f.seek(struct.calcsize("HHI"), 1)  # Skip file type, machine, and version.
+
+    try:
+        # e_fmt: Format for program header.
+        # p_fmt: Format for section header.
+        # p_idx: Indexes to find p_type, p_offset, and p_filesz.
+        e_fmt, p_fmt, p_idx = {
+            1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)),  # 32-bit.
+            2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)),  # 64-bit.
+        }[ident[4]]
+    except KeyError:
+        return None
+    else:
+        p_get = operator.itemgetter(*p_idx)
+
+    # Find the interpreter section and return its content.
+    try:
+        _, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt)
+    except struct.error:
+        return None
+    for i in range(e_phnum + 1):
+        f.seek(e_phoff + e_phentsize * i)
+        try:
+            p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt))
+        except struct.error:
+            return None
+        if p_type != 3:  # Not PT_INTERP.
+            continue
+        f.seek(p_offset)
+        interpreter = os.fsdecode(f.read(p_filesz)).strip("\0")
+        if "musl" not in interpreter:
+            return None
+        return interpreter
+    return None
+
+
+class _MuslVersion(NamedTuple):
+    major: int
+    minor: int
+
+
+def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
+    lines = [n for n in (n.strip() for n in output.splitlines()) if n]
+    if len(lines) < 2 or lines[0][:4] != "musl":
+        return None
+    m = re.match(r"Version (\d+)\.(\d+)", lines[1])
+    if not m:
+        return None
+    return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
+
+
+@functools.lru_cache()
+def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
+    """Detect currently-running musl runtime version.
+
+    This is done by checking the specified executable's dynamic linking
+    information, and invoking the loader to parse its output for a version
+    string. If the loader is musl, the output would be something like::
+
+        musl libc (x86_64)
+        Version 1.2.2
+        Dynamic Program Loader
+    """
+    with contextlib.ExitStack() as stack:
+        try:
+            f = stack.enter_context(open(executable, "rb"))
+        except OSError:
+            return None
+        ld = _parse_ld_musl_from_elf(f)
+    if not ld:
+        return None
+    proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True)
+    return _parse_musl_version(proc.stderr)
+
+
+def platform_tags(arch: str) -> Iterator[str]:
+    """Generate musllinux tags compatible to the current platform.
+
+    :param arch: Should be the part of platform tag after the ``linux_``
+        prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a
+        prerequisite for the current platform to be musllinux-compatible.
+
+    :returns: An iterator of compatible musllinux tags.
+    """
+    sys_musl = _get_musl_version(sys.executable)
+    if sys_musl is None:  # Python not dynamically linked against musl.
+        return
+    for minor in range(sys_musl.minor, -1, -1):
+        yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
+
+
+if __name__ == "__main__":  # pragma: no cover
+    import sysconfig
+
+    plat = sysconfig.get_platform()
+    assert plat.startswith("linux-"), "not linux"
+
+    print("plat:", plat)
+    print("musl:", _get_musl_version(sys.executable))
+    print("tags:", end=" ")
+    for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
+        print(t, end="\n      ")
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/_structures.py b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/_structures.py
new file mode 100644
index 0000000..90a6465
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/_structures.py
@@ -0,0 +1,61 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+class InfinityType:
+    def __repr__(self) -> str:
+        return "Infinity"
+
+    def __hash__(self) -> int:
+        return hash(repr(self))
+
+    def __lt__(self, other: object) -> bool:
+        return False
+
+    def __le__(self, other: object) -> bool:
+        return False
+
+    def __eq__(self, other: object) -> bool:
+        return isinstance(other, self.__class__)
+
+    def __gt__(self, other: object) -> bool:
+        return True
+
+    def __ge__(self, other: object) -> bool:
+        return True
+
+    def __neg__(self: object) -> "NegativeInfinityType":
+        return NegativeInfinity
+
+
+Infinity = InfinityType()
+
+
+class NegativeInfinityType:
+    def __repr__(self) -> str:
+        return "-Infinity"
+
+    def __hash__(self) -> int:
+        return hash(repr(self))
+
+    def __lt__(self, other: object) -> bool:
+        return True
+
+    def __le__(self, other: object) -> bool:
+        return True
+
+    def __eq__(self, other: object) -> bool:
+        return isinstance(other, self.__class__)
+
+    def __gt__(self, other: object) -> bool:
+        return False
+
+    def __ge__(self, other: object) -> bool:
+        return False
+
+    def __neg__(self: object) -> InfinityType:
+        return Infinity
+
+
+NegativeInfinity = NegativeInfinityType()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/markers.py b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/markers.py
new file mode 100644
index 0000000..540e7a4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/markers.py
@@ -0,0 +1,304 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import operator
+import os
+import platform
+import sys
+from typing import Any, Callable, Dict, List, Optional, Tuple, Union
+
+from pip._vendor.pyparsing import (  # noqa: N817
+    Forward,
+    Group,
+    Literal as L,
+    ParseException,
+    ParseResults,
+    QuotedString,
+    ZeroOrMore,
+    stringEnd,
+    stringStart,
+)
+
+from .specifiers import InvalidSpecifier, Specifier
+
+__all__ = [
+    "InvalidMarker",
+    "UndefinedComparison",
+    "UndefinedEnvironmentName",
+    "Marker",
+    "default_environment",
+]
+
+Operator = Callable[[str, str], bool]
+
+
+class InvalidMarker(ValueError):
+    """
+    An invalid marker was found, users should refer to PEP 508.
+    """
+
+
+class UndefinedComparison(ValueError):
+    """
+    An invalid operation was attempted on a value that doesn't support it.
+    """
+
+
+class UndefinedEnvironmentName(ValueError):
+    """
+    A name was attempted to be used that does not exist inside of the
+    environment.
+    """
+
+
+class Node:
+    def __init__(self, value: Any) -> None:
+        self.value = value
+
+    def __str__(self) -> str:
+        return str(self.value)
+
+    def __repr__(self) -> str:
+        return f"<{self.__class__.__name__}('{self}')>"
+
+    def serialize(self) -> str:
+        raise NotImplementedError
+
+
+class Variable(Node):
+    def serialize(self) -> str:
+        return str(self)
+
+
+class Value(Node):
+    def serialize(self) -> str:
+        return f'"{self}"'
+
+
+class Op(Node):
+    def serialize(self) -> str:
+        return str(self)
+
+
+VARIABLE = (
+    L("implementation_version")
+    | L("platform_python_implementation")
+    | L("implementation_name")
+    | L("python_full_version")
+    | L("platform_release")
+    | L("platform_version")
+    | L("platform_machine")
+    | L("platform_system")
+    | L("python_version")
+    | L("sys_platform")
+    | L("os_name")
+    | L("os.name")  # PEP-345
+    | L("sys.platform")  # PEP-345
+    | L("platform.version")  # PEP-345
+    | L("platform.machine")  # PEP-345
+    | L("platform.python_implementation")  # PEP-345
+    | L("python_implementation")  # undocumented setuptools legacy
+    | L("extra")  # PEP-508
+)
+ALIASES = {
+    "os.name": "os_name",
+    "sys.platform": "sys_platform",
+    "platform.version": "platform_version",
+    "platform.machine": "platform_machine",
+    "platform.python_implementation": "platform_python_implementation",
+    "python_implementation": "platform_python_implementation",
+}
+VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
+
+VERSION_CMP = (
+    L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<")
+)
+
+MARKER_OP = VERSION_CMP | L("not in") | L("in")
+MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))
+
+MARKER_VALUE = QuotedString("'") | QuotedString('"')
+MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
+
+BOOLOP = L("and") | L("or")
+
+MARKER_VAR = VARIABLE | MARKER_VALUE
+
+MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
+MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
+
+LPAREN = L("(").suppress()
+RPAREN = L(")").suppress()
+
+MARKER_EXPR = Forward()
+MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
+MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
+
+MARKER = stringStart + MARKER_EXPR + stringEnd
+
+
+def _coerce_parse_result(results: Union[ParseResults, List[Any]]) -> List[Any]:
+    if isinstance(results, ParseResults):
+        return [_coerce_parse_result(i) for i in results]
+    else:
+        return results
+
+
+def _format_marker(
+    marker: Union[List[str], Tuple[Node, ...], str], first: Optional[bool] = True
+) -> str:
+
+    assert isinstance(marker, (list, tuple, str))
+
+    # Sometimes we have a structure like [[...]] which is a single item list
+    # where the single item is itself it's own list. In that case we want skip
+    # the rest of this function so that we don't get extraneous () on the
+    # outside.
+    if (
+        isinstance(marker, list)
+        and len(marker) == 1
+        and isinstance(marker[0], (list, tuple))
+    ):
+        return _format_marker(marker[0])
+
+    if isinstance(marker, list):
+        inner = (_format_marker(m, first=False) for m in marker)
+        if first:
+            return " ".join(inner)
+        else:
+            return "(" + " ".join(inner) + ")"
+    elif isinstance(marker, tuple):
+        return " ".join([m.serialize() for m in marker])
+    else:
+        return marker
+
+
+_operators: Dict[str, Operator] = {
+    "in": lambda lhs, rhs: lhs in rhs,
+    "not in": lambda lhs, rhs: lhs not in rhs,
+    "<": operator.lt,
+    "<=": operator.le,
+    "==": operator.eq,
+    "!=": operator.ne,
+    ">=": operator.ge,
+    ">": operator.gt,
+}
+
+
+def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
+    try:
+        spec = Specifier("".join([op.serialize(), rhs]))
+    except InvalidSpecifier:
+        pass
+    else:
+        return spec.contains(lhs)
+
+    oper: Optional[Operator] = _operators.get(op.serialize())
+    if oper is None:
+        raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
+
+    return oper(lhs, rhs)
+
+
+class Undefined:
+    pass
+
+
+_undefined = Undefined()
+
+
+def _get_env(environment: Dict[str, str], name: str) -> str:
+    value: Union[str, Undefined] = environment.get(name, _undefined)
+
+    if isinstance(value, Undefined):
+        raise UndefinedEnvironmentName(
+            f"{name!r} does not exist in evaluation environment."
+        )
+
+    return value
+
+
+def _evaluate_markers(markers: List[Any], environment: Dict[str, str]) -> bool:
+    groups: List[List[bool]] = [[]]
+
+    for marker in markers:
+        assert isinstance(marker, (list, tuple, str))
+
+        if isinstance(marker, list):
+            groups[-1].append(_evaluate_markers(marker, environment))
+        elif isinstance(marker, tuple):
+            lhs, op, rhs = marker
+
+            if isinstance(lhs, Variable):
+                lhs_value = _get_env(environment, lhs.value)
+                rhs_value = rhs.value
+            else:
+                lhs_value = lhs.value
+                rhs_value = _get_env(environment, rhs.value)
+
+            groups[-1].append(_eval_op(lhs_value, op, rhs_value))
+        else:
+            assert marker in ["and", "or"]
+            if marker == "or":
+                groups.append([])
+
+    return any(all(item) for item in groups)
+
+
+def format_full_version(info: "sys._version_info") -> str:
+    version = "{0.major}.{0.minor}.{0.micro}".format(info)
+    kind = info.releaselevel
+    if kind != "final":
+        version += kind[0] + str(info.serial)
+    return version
+
+
+def default_environment() -> Dict[str, str]:
+    iver = format_full_version(sys.implementation.version)
+    implementation_name = sys.implementation.name
+    return {
+        "implementation_name": implementation_name,
+        "implementation_version": iver,
+        "os_name": os.name,
+        "platform_machine": platform.machine(),
+        "platform_release": platform.release(),
+        "platform_system": platform.system(),
+        "platform_version": platform.version(),
+        "python_full_version": platform.python_version(),
+        "platform_python_implementation": platform.python_implementation(),
+        "python_version": ".".join(platform.python_version_tuple()[:2]),
+        "sys_platform": sys.platform,
+    }
+
+
+class Marker:
+    def __init__(self, marker: str) -> None:
+        try:
+            self._markers = _coerce_parse_result(MARKER.parseString(marker))
+        except ParseException as e:
+            raise InvalidMarker(
+                f"Invalid marker: {marker!r}, parse error at "
+                f"{marker[e.loc : e.loc + 8]!r}"
+            )
+
+    def __str__(self) -> str:
+        return _format_marker(self._markers)
+
+    def __repr__(self) -> str:
+        return f""
+
+    def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
+        """Evaluate a marker.
+
+        Return the boolean from evaluating the given marker against the
+        environment. environment is an optional argument to override all or
+        part of the determined environment.
+
+        The environment is determined from the current Python process.
+        """
+        current_environment = default_environment()
+        if environment is not None:
+            current_environment.update(environment)
+
+        return _evaluate_markers(self._markers, current_environment)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/requirements.py b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/requirements.py
new file mode 100644
index 0000000..1eab7dd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/requirements.py
@@ -0,0 +1,146 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import re
+import string
+import urllib.parse
+from typing import List, Optional as TOptional, Set
+
+from pip._vendor.pyparsing import (  # noqa
+    Combine,
+    Literal as L,
+    Optional,
+    ParseException,
+    Regex,
+    Word,
+    ZeroOrMore,
+    originalTextFor,
+    stringEnd,
+    stringStart,
+)
+
+from .markers import MARKER_EXPR, Marker
+from .specifiers import LegacySpecifier, Specifier, SpecifierSet
+
+
+class InvalidRequirement(ValueError):
+    """
+    An invalid requirement was found, users should refer to PEP 508.
+    """
+
+
+ALPHANUM = Word(string.ascii_letters + string.digits)
+
+LBRACKET = L("[").suppress()
+RBRACKET = L("]").suppress()
+LPAREN = L("(").suppress()
+RPAREN = L(")").suppress()
+COMMA = L(",").suppress()
+SEMICOLON = L(";").suppress()
+AT = L("@").suppress()
+
+PUNCTUATION = Word("-_.")
+IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
+IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
+
+NAME = IDENTIFIER("name")
+EXTRA = IDENTIFIER
+
+URI = Regex(r"[^ ]+")("url")
+URL = AT + URI
+
+EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
+EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
+
+VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
+VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
+
+VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
+VERSION_MANY = Combine(
+    VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False
+)("_raw_spec")
+_VERSION_SPEC = Optional((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)
+_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "")
+
+VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
+VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
+
+MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
+MARKER_EXPR.setParseAction(
+    lambda s, l, t: Marker(s[t._original_start : t._original_end])
+)
+MARKER_SEPARATOR = SEMICOLON
+MARKER = MARKER_SEPARATOR + MARKER_EXPR
+
+VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
+URL_AND_MARKER = URL + Optional(MARKER)
+
+NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
+
+REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
+# pyparsing isn't thread safe during initialization, so we do it eagerly, see
+# issue #104
+REQUIREMENT.parseString("x[]")
+
+
+class Requirement:
+    """Parse a requirement.
+
+    Parse a given requirement string into its parts, such as name, specifier,
+    URL, and extras. Raises InvalidRequirement on a badly-formed requirement
+    string.
+    """
+
+    # TODO: Can we test whether something is contained within a requirement?
+    #       If so how do we do that? Do we need to test against the _name_ of
+    #       the thing as well as the version? What about the markers?
+    # TODO: Can we normalize the name and extra name?
+
+    def __init__(self, requirement_string: str) -> None:
+        try:
+            req = REQUIREMENT.parseString(requirement_string)
+        except ParseException as e:
+            raise InvalidRequirement(
+                f'Parse error at "{ requirement_string[e.loc : e.loc + 8]!r}": {e.msg}'
+            )
+
+        self.name: str = req.name
+        if req.url:
+            parsed_url = urllib.parse.urlparse(req.url)
+            if parsed_url.scheme == "file":
+                if urllib.parse.urlunparse(parsed_url) != req.url:
+                    raise InvalidRequirement("Invalid URL given")
+            elif not (parsed_url.scheme and parsed_url.netloc) or (
+                not parsed_url.scheme and not parsed_url.netloc
+            ):
+                raise InvalidRequirement(f"Invalid URL: {req.url}")
+            self.url: TOptional[str] = req.url
+        else:
+            self.url = None
+        self.extras: Set[str] = set(req.extras.asList() if req.extras else [])
+        self.specifier: SpecifierSet = SpecifierSet(req.specifier)
+        self.marker: TOptional[Marker] = req.marker if req.marker else None
+
+    def __str__(self) -> str:
+        parts: List[str] = [self.name]
+
+        if self.extras:
+            formatted_extras = ",".join(sorted(self.extras))
+            parts.append(f"[{formatted_extras}]")
+
+        if self.specifier:
+            parts.append(str(self.specifier))
+
+        if self.url:
+            parts.append(f"@ {self.url}")
+            if self.marker:
+                parts.append(" ")
+
+        if self.marker:
+            parts.append(f"; {self.marker}")
+
+        return "".join(parts)
+
+    def __repr__(self) -> str:
+        return f""
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/specifiers.py b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/specifiers.py
new file mode 100644
index 0000000..0e218a6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/specifiers.py
@@ -0,0 +1,802 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import abc
+import functools
+import itertools
+import re
+import warnings
+from typing import (
+    Callable,
+    Dict,
+    Iterable,
+    Iterator,
+    List,
+    Optional,
+    Pattern,
+    Set,
+    Tuple,
+    TypeVar,
+    Union,
+)
+
+from .utils import canonicalize_version
+from .version import LegacyVersion, Version, parse
+
+ParsedVersion = Union[Version, LegacyVersion]
+UnparsedVersion = Union[Version, LegacyVersion, str]
+VersionTypeVar = TypeVar("VersionTypeVar", bound=UnparsedVersion)
+CallableOperator = Callable[[ParsedVersion, str], bool]
+
+
+class InvalidSpecifier(ValueError):
+    """
+    An invalid specifier was found, users should refer to PEP 440.
+    """
+
+
+class BaseSpecifier(metaclass=abc.ABCMeta):
+    @abc.abstractmethod
+    def __str__(self) -> str:
+        """
+        Returns the str representation of this Specifier like object. This
+        should be representative of the Specifier itself.
+        """
+
+    @abc.abstractmethod
+    def __hash__(self) -> int:
+        """
+        Returns a hash value for this Specifier like object.
+        """
+
+    @abc.abstractmethod
+    def __eq__(self, other: object) -> bool:
+        """
+        Returns a boolean representing whether or not the two Specifier like
+        objects are equal.
+        """
+
+    @abc.abstractproperty
+    def prereleases(self) -> Optional[bool]:
+        """
+        Returns whether or not pre-releases as a whole are allowed by this
+        specifier.
+        """
+
+    @prereleases.setter
+    def prereleases(self, value: bool) -> None:
+        """
+        Sets whether or not pre-releases as a whole are allowed by this
+        specifier.
+        """
+
+    @abc.abstractmethod
+    def contains(self, item: str, prereleases: Optional[bool] = None) -> bool:
+        """
+        Determines if the given item is contained within this specifier.
+        """
+
+    @abc.abstractmethod
+    def filter(
+        self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
+    ) -> Iterable[VersionTypeVar]:
+        """
+        Takes an iterable of items and filters them so that only items which
+        are contained within this specifier are allowed in it.
+        """
+
+
+class _IndividualSpecifier(BaseSpecifier):
+
+    _operators: Dict[str, str] = {}
+    _regex: Pattern[str]
+
+    def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
+        match = self._regex.search(spec)
+        if not match:
+            raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
+
+        self._spec: Tuple[str, str] = (
+            match.group("operator").strip(),
+            match.group("version").strip(),
+        )
+
+        # Store whether or not this Specifier should accept prereleases
+        self._prereleases = prereleases
+
+    def __repr__(self) -> str:
+        pre = (
+            f", prereleases={self.prereleases!r}"
+            if self._prereleases is not None
+            else ""
+        )
+
+        return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
+
+    def __str__(self) -> str:
+        return "{}{}".format(*self._spec)
+
+    @property
+    def _canonical_spec(self) -> Tuple[str, str]:
+        return self._spec[0], canonicalize_version(self._spec[1])
+
+    def __hash__(self) -> int:
+        return hash(self._canonical_spec)
+
+    def __eq__(self, other: object) -> bool:
+        if isinstance(other, str):
+            try:
+                other = self.__class__(str(other))
+            except InvalidSpecifier:
+                return NotImplemented
+        elif not isinstance(other, self.__class__):
+            return NotImplemented
+
+        return self._canonical_spec == other._canonical_spec
+
+    def _get_operator(self, op: str) -> CallableOperator:
+        operator_callable: CallableOperator = getattr(
+            self, f"_compare_{self._operators[op]}"
+        )
+        return operator_callable
+
+    def _coerce_version(self, version: UnparsedVersion) -> ParsedVersion:
+        if not isinstance(version, (LegacyVersion, Version)):
+            version = parse(version)
+        return version
+
+    @property
+    def operator(self) -> str:
+        return self._spec[0]
+
+    @property
+    def version(self) -> str:
+        return self._spec[1]
+
+    @property
+    def prereleases(self) -> Optional[bool]:
+        return self._prereleases
+
+    @prereleases.setter
+    def prereleases(self, value: bool) -> None:
+        self._prereleases = value
+
+    def __contains__(self, item: str) -> bool:
+        return self.contains(item)
+
+    def contains(
+        self, item: UnparsedVersion, prereleases: Optional[bool] = None
+    ) -> bool:
+
+        # Determine if prereleases are to be allowed or not.
+        if prereleases is None:
+            prereleases = self.prereleases
+
+        # Normalize item to a Version or LegacyVersion, this allows us to have
+        # a shortcut for ``"2.0" in Specifier(">=2")
+        normalized_item = self._coerce_version(item)
+
+        # Determine if we should be supporting prereleases in this specifier
+        # or not, if we do not support prereleases than we can short circuit
+        # logic if this version is a prereleases.
+        if normalized_item.is_prerelease and not prereleases:
+            return False
+
+        # Actually do the comparison to determine if this item is contained
+        # within this Specifier or not.
+        operator_callable: CallableOperator = self._get_operator(self.operator)
+        return operator_callable(normalized_item, self.version)
+
+    def filter(
+        self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
+    ) -> Iterable[VersionTypeVar]:
+
+        yielded = False
+        found_prereleases = []
+
+        kw = {"prereleases": prereleases if prereleases is not None else True}
+
+        # Attempt to iterate over all the values in the iterable and if any of
+        # them match, yield them.
+        for version in iterable:
+            parsed_version = self._coerce_version(version)
+
+            if self.contains(parsed_version, **kw):
+                # If our version is a prerelease, and we were not set to allow
+                # prereleases, then we'll store it for later in case nothing
+                # else matches this specifier.
+                if parsed_version.is_prerelease and not (
+                    prereleases or self.prereleases
+                ):
+                    found_prereleases.append(version)
+                # Either this is not a prerelease, or we should have been
+                # accepting prereleases from the beginning.
+                else:
+                    yielded = True
+                    yield version
+
+        # Now that we've iterated over everything, determine if we've yielded
+        # any values, and if we have not and we have any prereleases stored up
+        # then we will go ahead and yield the prereleases.
+        if not yielded and found_prereleases:
+            for version in found_prereleases:
+                yield version
+
+
+class LegacySpecifier(_IndividualSpecifier):
+
+    _regex_str = r"""
+        (?P(==|!=|<=|>=|<|>))
+        \s*
+        (?P
+            [^,;\s)]* # Since this is a "legacy" specifier, and the version
+                      # string can be just about anything, we match everything
+                      # except for whitespace, a semi-colon for marker support,
+                      # a closing paren since versions can be enclosed in
+                      # them, and a comma since it's a version separator.
+        )
+        """
+
+    _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+    _operators = {
+        "==": "equal",
+        "!=": "not_equal",
+        "<=": "less_than_equal",
+        ">=": "greater_than_equal",
+        "<": "less_than",
+        ">": "greater_than",
+    }
+
+    def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
+        super().__init__(spec, prereleases)
+
+        warnings.warn(
+            "Creating a LegacyVersion has been deprecated and will be "
+            "removed in the next major release",
+            DeprecationWarning,
+        )
+
+    def _coerce_version(self, version: UnparsedVersion) -> LegacyVersion:
+        if not isinstance(version, LegacyVersion):
+            version = LegacyVersion(str(version))
+        return version
+
+    def _compare_equal(self, prospective: LegacyVersion, spec: str) -> bool:
+        return prospective == self._coerce_version(spec)
+
+    def _compare_not_equal(self, prospective: LegacyVersion, spec: str) -> bool:
+        return prospective != self._coerce_version(spec)
+
+    def _compare_less_than_equal(self, prospective: LegacyVersion, spec: str) -> bool:
+        return prospective <= self._coerce_version(spec)
+
+    def _compare_greater_than_equal(
+        self, prospective: LegacyVersion, spec: str
+    ) -> bool:
+        return prospective >= self._coerce_version(spec)
+
+    def _compare_less_than(self, prospective: LegacyVersion, spec: str) -> bool:
+        return prospective < self._coerce_version(spec)
+
+    def _compare_greater_than(self, prospective: LegacyVersion, spec: str) -> bool:
+        return prospective > self._coerce_version(spec)
+
+
+def _require_version_compare(
+    fn: Callable[["Specifier", ParsedVersion, str], bool]
+) -> Callable[["Specifier", ParsedVersion, str], bool]:
+    @functools.wraps(fn)
+    def wrapped(self: "Specifier", prospective: ParsedVersion, spec: str) -> bool:
+        if not isinstance(prospective, Version):
+            return False
+        return fn(self, prospective, spec)
+
+    return wrapped
+
+
+class Specifier(_IndividualSpecifier):
+
+    _regex_str = r"""
+        (?P(~=|==|!=|<=|>=|<|>|===))
+        (?P
+            (?:
+                # The identity operators allow for an escape hatch that will
+                # do an exact string match of the version you wish to install.
+                # This will not be parsed by PEP 440 and we cannot determine
+                # any semantic meaning from it. This operator is discouraged
+                # but included entirely as an escape hatch.
+                (?<====)  # Only match for the identity operator
+                \s*
+                [^\s]*    # We just match everything, except for whitespace
+                          # since we are only testing for strict identity.
+            )
+            |
+            (?:
+                # The (non)equality operators allow for wild card and local
+                # versions to be specified so we have to define these two
+                # operators separately to enable that.
+                (?<===|!=)            # Only match for equals and not equals
+
+                \s*
+                v?
+                (?:[0-9]+!)?          # epoch
+                [0-9]+(?:\.[0-9]+)*   # release
+                (?:                   # pre release
+                    [-_\.]?
+                    (a|b|c|rc|alpha|beta|pre|preview)
+                    [-_\.]?
+                    [0-9]*
+                )?
+                (?:                   # post release
+                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+                )?
+
+                # You cannot use a wild card and a dev or local version
+                # together so group them with a | and make them optional.
+                (?:
+                    (?:[-_\.]?dev[-_\.]?[0-9]*)?         # dev release
+                    (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
+                    |
+                    \.\*  # Wild card syntax of .*
+                )?
+            )
+            |
+            (?:
+                # The compatible operator requires at least two digits in the
+                # release segment.
+                (?<=~=)               # Only match for the compatible operator
+
+                \s*
+                v?
+                (?:[0-9]+!)?          # epoch
+                [0-9]+(?:\.[0-9]+)+   # release  (We have a + instead of a *)
+                (?:                   # pre release
+                    [-_\.]?
+                    (a|b|c|rc|alpha|beta|pre|preview)
+                    [-_\.]?
+                    [0-9]*
+                )?
+                (?:                                   # post release
+                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+                )?
+                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
+            )
+            |
+            (?:
+                # All other operators only allow a sub set of what the
+                # (non)equality operators do. Specifically they do not allow
+                # local versions to be specified nor do they allow the prefix
+                # matching wild cards.
+                (?=": "greater_than_equal",
+        "<": "less_than",
+        ">": "greater_than",
+        "===": "arbitrary",
+    }
+
+    @_require_version_compare
+    def _compare_compatible(self, prospective: ParsedVersion, spec: str) -> bool:
+
+        # Compatible releases have an equivalent combination of >= and ==. That
+        # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
+        # implement this in terms of the other specifiers instead of
+        # implementing it ourselves. The only thing we need to do is construct
+        # the other specifiers.
+
+        # We want everything but the last item in the version, but we want to
+        # ignore suffix segments.
+        prefix = ".".join(
+            list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
+        )
+
+        # Add the prefix notation to the end of our string
+        prefix += ".*"
+
+        return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
+            prospective, prefix
+        )
+
+    @_require_version_compare
+    def _compare_equal(self, prospective: ParsedVersion, spec: str) -> bool:
+
+        # We need special logic to handle prefix matching
+        if spec.endswith(".*"):
+            # In the case of prefix matching we want to ignore local segment.
+            prospective = Version(prospective.public)
+            # Split the spec out by dots, and pretend that there is an implicit
+            # dot in between a release segment and a pre-release segment.
+            split_spec = _version_split(spec[:-2])  # Remove the trailing .*
+
+            # Split the prospective version out by dots, and pretend that there
+            # is an implicit dot in between a release segment and a pre-release
+            # segment.
+            split_prospective = _version_split(str(prospective))
+
+            # Shorten the prospective version to be the same length as the spec
+            # so that we can determine if the specifier is a prefix of the
+            # prospective version or not.
+            shortened_prospective = split_prospective[: len(split_spec)]
+
+            # Pad out our two sides with zeros so that they both equal the same
+            # length.
+            padded_spec, padded_prospective = _pad_version(
+                split_spec, shortened_prospective
+            )
+
+            return padded_prospective == padded_spec
+        else:
+            # Convert our spec string into a Version
+            spec_version = Version(spec)
+
+            # If the specifier does not have a local segment, then we want to
+            # act as if the prospective version also does not have a local
+            # segment.
+            if not spec_version.local:
+                prospective = Version(prospective.public)
+
+            return prospective == spec_version
+
+    @_require_version_compare
+    def _compare_not_equal(self, prospective: ParsedVersion, spec: str) -> bool:
+        return not self._compare_equal(prospective, spec)
+
+    @_require_version_compare
+    def _compare_less_than_equal(self, prospective: ParsedVersion, spec: str) -> bool:
+
+        # NB: Local version identifiers are NOT permitted in the version
+        # specifier, so local version labels can be universally removed from
+        # the prospective version.
+        return Version(prospective.public) <= Version(spec)
+
+    @_require_version_compare
+    def _compare_greater_than_equal(
+        self, prospective: ParsedVersion, spec: str
+    ) -> bool:
+
+        # NB: Local version identifiers are NOT permitted in the version
+        # specifier, so local version labels can be universally removed from
+        # the prospective version.
+        return Version(prospective.public) >= Version(spec)
+
+    @_require_version_compare
+    def _compare_less_than(self, prospective: ParsedVersion, spec_str: str) -> bool:
+
+        # Convert our spec to a Version instance, since we'll want to work with
+        # it as a version.
+        spec = Version(spec_str)
+
+        # Check to see if the prospective version is less than the spec
+        # version. If it's not we can short circuit and just return False now
+        # instead of doing extra unneeded work.
+        if not prospective < spec:
+            return False
+
+        # This special case is here so that, unless the specifier itself
+        # includes is a pre-release version, that we do not accept pre-release
+        # versions for the version mentioned in the specifier (e.g. <3.1 should
+        # not match 3.1.dev0, but should match 3.0.dev0).
+        if not spec.is_prerelease and prospective.is_prerelease:
+            if Version(prospective.base_version) == Version(spec.base_version):
+                return False
+
+        # If we've gotten to here, it means that prospective version is both
+        # less than the spec version *and* it's not a pre-release of the same
+        # version in the spec.
+        return True
+
+    @_require_version_compare
+    def _compare_greater_than(self, prospective: ParsedVersion, spec_str: str) -> bool:
+
+        # Convert our spec to a Version instance, since we'll want to work with
+        # it as a version.
+        spec = Version(spec_str)
+
+        # Check to see if the prospective version is greater than the spec
+        # version. If it's not we can short circuit and just return False now
+        # instead of doing extra unneeded work.
+        if not prospective > spec:
+            return False
+
+        # This special case is here so that, unless the specifier itself
+        # includes is a post-release version, that we do not accept
+        # post-release versions for the version mentioned in the specifier
+        # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
+        if not spec.is_postrelease and prospective.is_postrelease:
+            if Version(prospective.base_version) == Version(spec.base_version):
+                return False
+
+        # Ensure that we do not allow a local version of the version mentioned
+        # in the specifier, which is technically greater than, to match.
+        if prospective.local is not None:
+            if Version(prospective.base_version) == Version(spec.base_version):
+                return False
+
+        # If we've gotten to here, it means that prospective version is both
+        # greater than the spec version *and* it's not a pre-release of the
+        # same version in the spec.
+        return True
+
+    def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
+        return str(prospective).lower() == str(spec).lower()
+
+    @property
+    def prereleases(self) -> bool:
+
+        # If there is an explicit prereleases set for this, then we'll just
+        # blindly use that.
+        if self._prereleases is not None:
+            return self._prereleases
+
+        # Look at all of our specifiers and determine if they are inclusive
+        # operators, and if they are if they are including an explicit
+        # prerelease.
+        operator, version = self._spec
+        if operator in ["==", ">=", "<=", "~=", "==="]:
+            # The == specifier can include a trailing .*, if it does we
+            # want to remove before parsing.
+            if operator == "==" and version.endswith(".*"):
+                version = version[:-2]
+
+            # Parse the version, and if it is a pre-release than this
+            # specifier allows pre-releases.
+            if parse(version).is_prerelease:
+                return True
+
+        return False
+
+    @prereleases.setter
+    def prereleases(self, value: bool) -> None:
+        self._prereleases = value
+
+
+_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
+
+
+def _version_split(version: str) -> List[str]:
+    result: List[str] = []
+    for item in version.split("."):
+        match = _prefix_regex.search(item)
+        if match:
+            result.extend(match.groups())
+        else:
+            result.append(item)
+    return result
+
+
+def _is_not_suffix(segment: str) -> bool:
+    return not any(
+        segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
+    )
+
+
+def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]:
+    left_split, right_split = [], []
+
+    # Get the release segment of our versions
+    left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
+    right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
+
+    # Get the rest of our versions
+    left_split.append(left[len(left_split[0]) :])
+    right_split.append(right[len(right_split[0]) :])
+
+    # Insert our padding
+    left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
+    right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
+
+    return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
+
+
+class SpecifierSet(BaseSpecifier):
+    def __init__(
+        self, specifiers: str = "", prereleases: Optional[bool] = None
+    ) -> None:
+
+        # Split on , to break each individual specifier into it's own item, and
+        # strip each item to remove leading/trailing whitespace.
+        split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
+
+        # Parsed each individual specifier, attempting first to make it a
+        # Specifier and falling back to a LegacySpecifier.
+        parsed: Set[_IndividualSpecifier] = set()
+        for specifier in split_specifiers:
+            try:
+                parsed.add(Specifier(specifier))
+            except InvalidSpecifier:
+                parsed.add(LegacySpecifier(specifier))
+
+        # Turn our parsed specifiers into a frozen set and save them for later.
+        self._specs = frozenset(parsed)
+
+        # Store our prereleases value so we can use it later to determine if
+        # we accept prereleases or not.
+        self._prereleases = prereleases
+
+    def __repr__(self) -> str:
+        pre = (
+            f", prereleases={self.prereleases!r}"
+            if self._prereleases is not None
+            else ""
+        )
+
+        return f""
+
+    def __str__(self) -> str:
+        return ",".join(sorted(str(s) for s in self._specs))
+
+    def __hash__(self) -> int:
+        return hash(self._specs)
+
+    def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet":
+        if isinstance(other, str):
+            other = SpecifierSet(other)
+        elif not isinstance(other, SpecifierSet):
+            return NotImplemented
+
+        specifier = SpecifierSet()
+        specifier._specs = frozenset(self._specs | other._specs)
+
+        if self._prereleases is None and other._prereleases is not None:
+            specifier._prereleases = other._prereleases
+        elif self._prereleases is not None and other._prereleases is None:
+            specifier._prereleases = self._prereleases
+        elif self._prereleases == other._prereleases:
+            specifier._prereleases = self._prereleases
+        else:
+            raise ValueError(
+                "Cannot combine SpecifierSets with True and False prerelease "
+                "overrides."
+            )
+
+        return specifier
+
+    def __eq__(self, other: object) -> bool:
+        if isinstance(other, (str, _IndividualSpecifier)):
+            other = SpecifierSet(str(other))
+        elif not isinstance(other, SpecifierSet):
+            return NotImplemented
+
+        return self._specs == other._specs
+
+    def __len__(self) -> int:
+        return len(self._specs)
+
+    def __iter__(self) -> Iterator[_IndividualSpecifier]:
+        return iter(self._specs)
+
+    @property
+    def prereleases(self) -> Optional[bool]:
+
+        # If we have been given an explicit prerelease modifier, then we'll
+        # pass that through here.
+        if self._prereleases is not None:
+            return self._prereleases
+
+        # If we don't have any specifiers, and we don't have a forced value,
+        # then we'll just return None since we don't know if this should have
+        # pre-releases or not.
+        if not self._specs:
+            return None
+
+        # Otherwise we'll see if any of the given specifiers accept
+        # prereleases, if any of them do we'll return True, otherwise False.
+        return any(s.prereleases for s in self._specs)
+
+    @prereleases.setter
+    def prereleases(self, value: bool) -> None:
+        self._prereleases = value
+
+    def __contains__(self, item: UnparsedVersion) -> bool:
+        return self.contains(item)
+
+    def contains(
+        self, item: UnparsedVersion, prereleases: Optional[bool] = None
+    ) -> bool:
+
+        # Ensure that our item is a Version or LegacyVersion instance.
+        if not isinstance(item, (LegacyVersion, Version)):
+            item = parse(item)
+
+        # Determine if we're forcing a prerelease or not, if we're not forcing
+        # one for this particular filter call, then we'll use whatever the
+        # SpecifierSet thinks for whether or not we should support prereleases.
+        if prereleases is None:
+            prereleases = self.prereleases
+
+        # We can determine if we're going to allow pre-releases by looking to
+        # see if any of the underlying items supports them. If none of them do
+        # and this item is a pre-release then we do not allow it and we can
+        # short circuit that here.
+        # Note: This means that 1.0.dev1 would not be contained in something
+        #       like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
+        if not prereleases and item.is_prerelease:
+            return False
+
+        # We simply dispatch to the underlying specs here to make sure that the
+        # given version is contained within all of them.
+        # Note: This use of all() here means that an empty set of specifiers
+        #       will always return True, this is an explicit design decision.
+        return all(s.contains(item, prereleases=prereleases) for s in self._specs)
+
+    def filter(
+        self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
+    ) -> Iterable[VersionTypeVar]:
+
+        # Determine if we're forcing a prerelease or not, if we're not forcing
+        # one for this particular filter call, then we'll use whatever the
+        # SpecifierSet thinks for whether or not we should support prereleases.
+        if prereleases is None:
+            prereleases = self.prereleases
+
+        # If we have any specifiers, then we want to wrap our iterable in the
+        # filter method for each one, this will act as a logical AND amongst
+        # each specifier.
+        if self._specs:
+            for spec in self._specs:
+                iterable = spec.filter(iterable, prereleases=bool(prereleases))
+            return iterable
+        # If we do not have any specifiers, then we need to have a rough filter
+        # which will filter out any pre-releases, unless there are no final
+        # releases, and which will filter out LegacyVersion in general.
+        else:
+            filtered: List[VersionTypeVar] = []
+            found_prereleases: List[VersionTypeVar] = []
+
+            item: UnparsedVersion
+            parsed_version: Union[Version, LegacyVersion]
+
+            for item in iterable:
+                # Ensure that we some kind of Version class for this item.
+                if not isinstance(item, (LegacyVersion, Version)):
+                    parsed_version = parse(item)
+                else:
+                    parsed_version = item
+
+                # Filter out any item which is parsed as a LegacyVersion
+                if isinstance(parsed_version, LegacyVersion):
+                    continue
+
+                # Store any item which is a pre-release for later unless we've
+                # already found a final version or we are accepting prereleases
+                if parsed_version.is_prerelease and not prereleases:
+                    if not filtered:
+                        found_prereleases.append(item)
+                else:
+                    filtered.append(item)
+
+            # If we've found no items except for pre-releases, then we'll go
+            # ahead and use the pre-releases
+            if not filtered and found_prereleases and prereleases is None:
+                return found_prereleases
+
+            return filtered
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/tags.py b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/tags.py
new file mode 100644
index 0000000..9a3d25a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/tags.py
@@ -0,0 +1,487 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import logging
+import platform
+import sys
+import sysconfig
+from importlib.machinery import EXTENSION_SUFFIXES
+from typing import (
+    Dict,
+    FrozenSet,
+    Iterable,
+    Iterator,
+    List,
+    Optional,
+    Sequence,
+    Tuple,
+    Union,
+    cast,
+)
+
+from . import _manylinux, _musllinux
+
+logger = logging.getLogger(__name__)
+
+PythonVersion = Sequence[int]
+MacVersion = Tuple[int, int]
+
+INTERPRETER_SHORT_NAMES: Dict[str, str] = {
+    "python": "py",  # Generic.
+    "cpython": "cp",
+    "pypy": "pp",
+    "ironpython": "ip",
+    "jython": "jy",
+}
+
+
+_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32
+
+
+class Tag:
+    """
+    A representation of the tag triple for a wheel.
+
+    Instances are considered immutable and thus are hashable. Equality checking
+    is also supported.
+    """
+
+    __slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
+
+    def __init__(self, interpreter: str, abi: str, platform: str) -> None:
+        self._interpreter = interpreter.lower()
+        self._abi = abi.lower()
+        self._platform = platform.lower()
+        # The __hash__ of every single element in a Set[Tag] will be evaluated each time
+        # that a set calls its `.disjoint()` method, which may be called hundreds of
+        # times when scanning a page of links for packages with tags matching that
+        # Set[Tag]. Pre-computing the value here produces significant speedups for
+        # downstream consumers.
+        self._hash = hash((self._interpreter, self._abi, self._platform))
+
+    @property
+    def interpreter(self) -> str:
+        return self._interpreter
+
+    @property
+    def abi(self) -> str:
+        return self._abi
+
+    @property
+    def platform(self) -> str:
+        return self._platform
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, Tag):
+            return NotImplemented
+
+        return (
+            (self._hash == other._hash)  # Short-circuit ASAP for perf reasons.
+            and (self._platform == other._platform)
+            and (self._abi == other._abi)
+            and (self._interpreter == other._interpreter)
+        )
+
+    def __hash__(self) -> int:
+        return self._hash
+
+    def __str__(self) -> str:
+        return f"{self._interpreter}-{self._abi}-{self._platform}"
+
+    def __repr__(self) -> str:
+        return f"<{self} @ {id(self)}>"
+
+
+def parse_tag(tag: str) -> FrozenSet[Tag]:
+    """
+    Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
+
+    Returning a set is required due to the possibility that the tag is a
+    compressed tag set.
+    """
+    tags = set()
+    interpreters, abis, platforms = tag.split("-")
+    for interpreter in interpreters.split("."):
+        for abi in abis.split("."):
+            for platform_ in platforms.split("."):
+                tags.add(Tag(interpreter, abi, platform_))
+    return frozenset(tags)
+
+
+def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
+    value = sysconfig.get_config_var(name)
+    if value is None and warn:
+        logger.debug(
+            "Config variable '%s' is unset, Python ABI tag may be incorrect", name
+        )
+    return value
+
+
+def _normalize_string(string: str) -> str:
+    return string.replace(".", "_").replace("-", "_")
+
+
+def _abi3_applies(python_version: PythonVersion) -> bool:
+    """
+    Determine if the Python version supports abi3.
+
+    PEP 384 was first implemented in Python 3.2.
+    """
+    return len(python_version) > 1 and tuple(python_version) >= (3, 2)
+
+
+def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
+    py_version = tuple(py_version)  # To allow for version comparison.
+    abis = []
+    version = _version_nodot(py_version[:2])
+    debug = pymalloc = ucs4 = ""
+    with_debug = _get_config_var("Py_DEBUG", warn)
+    has_refcount = hasattr(sys, "gettotalrefcount")
+    # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
+    # extension modules is the best option.
+    # https://github.com/pypa/pip/issues/3383#issuecomment-173267692
+    has_ext = "_d.pyd" in EXTENSION_SUFFIXES
+    if with_debug or (with_debug is None and (has_refcount or has_ext)):
+        debug = "d"
+    if py_version < (3, 8):
+        with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
+        if with_pymalloc or with_pymalloc is None:
+            pymalloc = "m"
+        if py_version < (3, 3):
+            unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
+            if unicode_size == 4 or (
+                unicode_size is None and sys.maxunicode == 0x10FFFF
+            ):
+                ucs4 = "u"
+    elif debug:
+        # Debug builds can also load "normal" extension modules.
+        # We can also assume no UCS-4 or pymalloc requirement.
+        abis.append(f"cp{version}")
+    abis.insert(
+        0,
+        "cp{version}{debug}{pymalloc}{ucs4}".format(
+            version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
+        ),
+    )
+    return abis
+
+
+def cpython_tags(
+    python_version: Optional[PythonVersion] = None,
+    abis: Optional[Iterable[str]] = None,
+    platforms: Optional[Iterable[str]] = None,
+    *,
+    warn: bool = False,
+) -> Iterator[Tag]:
+    """
+    Yields the tags for a CPython interpreter.
+
+    The tags consist of:
+    - cp--
+    - cp-abi3-
+    - cp-none-
+    - cp-abi3-  # Older Python versions down to 3.2.
+
+    If python_version only specifies a major version then user-provided ABIs and
+    the 'none' ABItag will be used.
+
+    If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
+    their normal position and not at the beginning.
+    """
+    if not python_version:
+        python_version = sys.version_info[:2]
+
+    interpreter = f"cp{_version_nodot(python_version[:2])}"
+
+    if abis is None:
+        if len(python_version) > 1:
+            abis = _cpython_abis(python_version, warn)
+        else:
+            abis = []
+    abis = list(abis)
+    # 'abi3' and 'none' are explicitly handled later.
+    for explicit_abi in ("abi3", "none"):
+        try:
+            abis.remove(explicit_abi)
+        except ValueError:
+            pass
+
+    platforms = list(platforms or platform_tags())
+    for abi in abis:
+        for platform_ in platforms:
+            yield Tag(interpreter, abi, platform_)
+    if _abi3_applies(python_version):
+        yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
+    yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
+
+    if _abi3_applies(python_version):
+        for minor_version in range(python_version[1] - 1, 1, -1):
+            for platform_ in platforms:
+                interpreter = "cp{version}".format(
+                    version=_version_nodot((python_version[0], minor_version))
+                )
+                yield Tag(interpreter, "abi3", platform_)
+
+
+def _generic_abi() -> Iterator[str]:
+    abi = sysconfig.get_config_var("SOABI")
+    if abi:
+        yield _normalize_string(abi)
+
+
+def generic_tags(
+    interpreter: Optional[str] = None,
+    abis: Optional[Iterable[str]] = None,
+    platforms: Optional[Iterable[str]] = None,
+    *,
+    warn: bool = False,
+) -> Iterator[Tag]:
+    """
+    Yields the tags for a generic interpreter.
+
+    The tags consist of:
+    - --
+
+    The "none" ABI will be added if it was not explicitly provided.
+    """
+    if not interpreter:
+        interp_name = interpreter_name()
+        interp_version = interpreter_version(warn=warn)
+        interpreter = "".join([interp_name, interp_version])
+    if abis is None:
+        abis = _generic_abi()
+    platforms = list(platforms or platform_tags())
+    abis = list(abis)
+    if "none" not in abis:
+        abis.append("none")
+    for abi in abis:
+        for platform_ in platforms:
+            yield Tag(interpreter, abi, platform_)
+
+
+def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
+    """
+    Yields Python versions in descending order.
+
+    After the latest version, the major-only version will be yielded, and then
+    all previous versions of that major version.
+    """
+    if len(py_version) > 1:
+        yield f"py{_version_nodot(py_version[:2])}"
+    yield f"py{py_version[0]}"
+    if len(py_version) > 1:
+        for minor in range(py_version[1] - 1, -1, -1):
+            yield f"py{_version_nodot((py_version[0], minor))}"
+
+
+def compatible_tags(
+    python_version: Optional[PythonVersion] = None,
+    interpreter: Optional[str] = None,
+    platforms: Optional[Iterable[str]] = None,
+) -> Iterator[Tag]:
+    """
+    Yields the sequence of tags that are compatible with a specific version of Python.
+
+    The tags consist of:
+    - py*-none-
+    - -none-any  # ... if `interpreter` is provided.
+    - py*-none-any
+    """
+    if not python_version:
+        python_version = sys.version_info[:2]
+    platforms = list(platforms or platform_tags())
+    for version in _py_interpreter_range(python_version):
+        for platform_ in platforms:
+            yield Tag(version, "none", platform_)
+    if interpreter:
+        yield Tag(interpreter, "none", "any")
+    for version in _py_interpreter_range(python_version):
+        yield Tag(version, "none", "any")
+
+
+def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
+    if not is_32bit:
+        return arch
+
+    if arch.startswith("ppc"):
+        return "ppc"
+
+    return "i386"
+
+
+def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
+    formats = [cpu_arch]
+    if cpu_arch == "x86_64":
+        if version < (10, 4):
+            return []
+        formats.extend(["intel", "fat64", "fat32"])
+
+    elif cpu_arch == "i386":
+        if version < (10, 4):
+            return []
+        formats.extend(["intel", "fat32", "fat"])
+
+    elif cpu_arch == "ppc64":
+        # TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
+        if version > (10, 5) or version < (10, 4):
+            return []
+        formats.append("fat64")
+
+    elif cpu_arch == "ppc":
+        if version > (10, 6):
+            return []
+        formats.extend(["fat32", "fat"])
+
+    if cpu_arch in {"arm64", "x86_64"}:
+        formats.append("universal2")
+
+    if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
+        formats.append("universal")
+
+    return formats
+
+
+def mac_platforms(
+    version: Optional[MacVersion] = None, arch: Optional[str] = None
+) -> Iterator[str]:
+    """
+    Yields the platform tags for a macOS system.
+
+    The `version` parameter is a two-item tuple specifying the macOS version to
+    generate platform tags for. The `arch` parameter is the CPU architecture to
+    generate platform tags for. Both parameters default to the appropriate value
+    for the current system.
+    """
+    version_str, _, cpu_arch = platform.mac_ver()
+    if version is None:
+        version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
+    else:
+        version = version
+    if arch is None:
+        arch = _mac_arch(cpu_arch)
+    else:
+        arch = arch
+
+    if (10, 0) <= version and version < (11, 0):
+        # Prior to Mac OS 11, each yearly release of Mac OS bumped the
+        # "minor" version number.  The major version was always 10.
+        for minor_version in range(version[1], -1, -1):
+            compat_version = 10, minor_version
+            binary_formats = _mac_binary_formats(compat_version, arch)
+            for binary_format in binary_formats:
+                yield "macosx_{major}_{minor}_{binary_format}".format(
+                    major=10, minor=minor_version, binary_format=binary_format
+                )
+
+    if version >= (11, 0):
+        # Starting with Mac OS 11, each yearly release bumps the major version
+        # number.   The minor versions are now the midyear updates.
+        for major_version in range(version[0], 10, -1):
+            compat_version = major_version, 0
+            binary_formats = _mac_binary_formats(compat_version, arch)
+            for binary_format in binary_formats:
+                yield "macosx_{major}_{minor}_{binary_format}".format(
+                    major=major_version, minor=0, binary_format=binary_format
+                )
+
+    if version >= (11, 0):
+        # Mac OS 11 on x86_64 is compatible with binaries from previous releases.
+        # Arm64 support was introduced in 11.0, so no Arm binaries from previous
+        # releases exist.
+        #
+        # However, the "universal2" binary format can have a
+        # macOS version earlier than 11.0 when the x86_64 part of the binary supports
+        # that version of macOS.
+        if arch == "x86_64":
+            for minor_version in range(16, 3, -1):
+                compat_version = 10, minor_version
+                binary_formats = _mac_binary_formats(compat_version, arch)
+                for binary_format in binary_formats:
+                    yield "macosx_{major}_{minor}_{binary_format}".format(
+                        major=compat_version[0],
+                        minor=compat_version[1],
+                        binary_format=binary_format,
+                    )
+        else:
+            for minor_version in range(16, 3, -1):
+                compat_version = 10, minor_version
+                binary_format = "universal2"
+                yield "macosx_{major}_{minor}_{binary_format}".format(
+                    major=compat_version[0],
+                    minor=compat_version[1],
+                    binary_format=binary_format,
+                )
+
+
+def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
+    linux = _normalize_string(sysconfig.get_platform())
+    if is_32bit:
+        if linux == "linux_x86_64":
+            linux = "linux_i686"
+        elif linux == "linux_aarch64":
+            linux = "linux_armv7l"
+    _, arch = linux.split("_", 1)
+    yield from _manylinux.platform_tags(linux, arch)
+    yield from _musllinux.platform_tags(arch)
+    yield linux
+
+
+def _generic_platforms() -> Iterator[str]:
+    yield _normalize_string(sysconfig.get_platform())
+
+
+def platform_tags() -> Iterator[str]:
+    """
+    Provides the platform tags for this installation.
+    """
+    if platform.system() == "Darwin":
+        return mac_platforms()
+    elif platform.system() == "Linux":
+        return _linux_platforms()
+    else:
+        return _generic_platforms()
+
+
+def interpreter_name() -> str:
+    """
+    Returns the name of the running interpreter.
+    """
+    name = sys.implementation.name
+    return INTERPRETER_SHORT_NAMES.get(name) or name
+
+
+def interpreter_version(*, warn: bool = False) -> str:
+    """
+    Returns the version of the running interpreter.
+    """
+    version = _get_config_var("py_version_nodot", warn=warn)
+    if version:
+        version = str(version)
+    else:
+        version = _version_nodot(sys.version_info[:2])
+    return version
+
+
+def _version_nodot(version: PythonVersion) -> str:
+    return "".join(map(str, version))
+
+
+def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
+    """
+    Returns the sequence of tag triples for the running interpreter.
+
+    The order of the sequence corresponds to priority order for the
+    interpreter, from most to least important.
+    """
+
+    interp_name = interpreter_name()
+    if interp_name == "cp":
+        yield from cpython_tags(warn=warn)
+    else:
+        yield from generic_tags()
+
+    if interp_name == "pp":
+        yield from compatible_tags(interpreter="pp3")
+    else:
+        yield from compatible_tags()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/utils.py b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/utils.py
new file mode 100644
index 0000000..bab11b8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/utils.py
@@ -0,0 +1,136 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import re
+from typing import FrozenSet, NewType, Tuple, Union, cast
+
+from .tags import Tag, parse_tag
+from .version import InvalidVersion, Version
+
+BuildTag = Union[Tuple[()], Tuple[int, str]]
+NormalizedName = NewType("NormalizedName", str)
+
+
+class InvalidWheelFilename(ValueError):
+    """
+    An invalid wheel filename was found, users should refer to PEP 427.
+    """
+
+
+class InvalidSdistFilename(ValueError):
+    """
+    An invalid sdist filename was found, users should refer to the packaging user guide.
+    """
+
+
+_canonicalize_regex = re.compile(r"[-_.]+")
+# PEP 427: The build number must start with a digit.
+_build_tag_regex = re.compile(r"(\d+)(.*)")
+
+
+def canonicalize_name(name: str) -> NormalizedName:
+    # This is taken from PEP 503.
+    value = _canonicalize_regex.sub("-", name).lower()
+    return cast(NormalizedName, value)
+
+
+def canonicalize_version(version: Union[Version, str]) -> str:
+    """
+    This is very similar to Version.__str__, but has one subtle difference
+    with the way it handles the release segment.
+    """
+    if isinstance(version, str):
+        try:
+            parsed = Version(version)
+        except InvalidVersion:
+            # Legacy versions cannot be normalized
+            return version
+    else:
+        parsed = version
+
+    parts = []
+
+    # Epoch
+    if parsed.epoch != 0:
+        parts.append(f"{parsed.epoch}!")
+
+    # Release segment
+    # NB: This strips trailing '.0's to normalize
+    parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in parsed.release)))
+
+    # Pre-release
+    if parsed.pre is not None:
+        parts.append("".join(str(x) for x in parsed.pre))
+
+    # Post-release
+    if parsed.post is not None:
+        parts.append(f".post{parsed.post}")
+
+    # Development release
+    if parsed.dev is not None:
+        parts.append(f".dev{parsed.dev}")
+
+    # Local version segment
+    if parsed.local is not None:
+        parts.append(f"+{parsed.local}")
+
+    return "".join(parts)
+
+
+def parse_wheel_filename(
+    filename: str,
+) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
+    if not filename.endswith(".whl"):
+        raise InvalidWheelFilename(
+            f"Invalid wheel filename (extension must be '.whl'): {filename}"
+        )
+
+    filename = filename[:-4]
+    dashes = filename.count("-")
+    if dashes not in (4, 5):
+        raise InvalidWheelFilename(
+            f"Invalid wheel filename (wrong number of parts): {filename}"
+        )
+
+    parts = filename.split("-", dashes - 2)
+    name_part = parts[0]
+    # See PEP 427 for the rules on escaping the project name
+    if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
+        raise InvalidWheelFilename(f"Invalid project name: {filename}")
+    name = canonicalize_name(name_part)
+    version = Version(parts[1])
+    if dashes == 5:
+        build_part = parts[2]
+        build_match = _build_tag_regex.match(build_part)
+        if build_match is None:
+            raise InvalidWheelFilename(
+                f"Invalid build number: {build_part} in '{filename}'"
+            )
+        build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
+    else:
+        build = ()
+    tags = parse_tag(parts[-1])
+    return (name, version, build, tags)
+
+
+def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
+    if filename.endswith(".tar.gz"):
+        file_stem = filename[: -len(".tar.gz")]
+    elif filename.endswith(".zip"):
+        file_stem = filename[: -len(".zip")]
+    else:
+        raise InvalidSdistFilename(
+            f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
+            f" {filename}"
+        )
+
+    # We are requiring a PEP 440 version, which cannot contain dashes,
+    # so we split on the last dash.
+    name_part, sep, version_part = file_stem.rpartition("-")
+    if not sep:
+        raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
+
+    name = canonicalize_name(name_part)
+    version = Version(version_part)
+    return (name, version)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/version.py b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/version.py
new file mode 100644
index 0000000..de9a09a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/packaging/version.py
@@ -0,0 +1,504 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import collections
+import itertools
+import re
+import warnings
+from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union
+
+from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
+
+__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"]
+
+InfiniteTypes = Union[InfinityType, NegativeInfinityType]
+PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
+SubLocalType = Union[InfiniteTypes, int, str]
+LocalType = Union[
+    NegativeInfinityType,
+    Tuple[
+        Union[
+            SubLocalType,
+            Tuple[SubLocalType, str],
+            Tuple[NegativeInfinityType, SubLocalType],
+        ],
+        ...,
+    ],
+]
+CmpKey = Tuple[
+    int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
+]
+LegacyCmpKey = Tuple[int, Tuple[str, ...]]
+VersionComparisonMethod = Callable[
+    [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool
+]
+
+_Version = collections.namedtuple(
+    "_Version", ["epoch", "release", "dev", "pre", "post", "local"]
+)
+
+
+def parse(version: str) -> Union["LegacyVersion", "Version"]:
+    """
+    Parse the given version string and return either a :class:`Version` object
+    or a :class:`LegacyVersion` object depending on if the given version is
+    a valid PEP 440 version or a legacy version.
+    """
+    try:
+        return Version(version)
+    except InvalidVersion:
+        return LegacyVersion(version)
+
+
+class InvalidVersion(ValueError):
+    """
+    An invalid version was found, users should refer to PEP 440.
+    """
+
+
+class _BaseVersion:
+    _key: Union[CmpKey, LegacyCmpKey]
+
+    def __hash__(self) -> int:
+        return hash(self._key)
+
+    # Please keep the duplicated `isinstance` check
+    # in the six comparisons hereunder
+    # unless you find a way to avoid adding overhead function calls.
+    def __lt__(self, other: "_BaseVersion") -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key < other._key
+
+    def __le__(self, other: "_BaseVersion") -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key <= other._key
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key == other._key
+
+    def __ge__(self, other: "_BaseVersion") -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key >= other._key
+
+    def __gt__(self, other: "_BaseVersion") -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key > other._key
+
+    def __ne__(self, other: object) -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key != other._key
+
+
+class LegacyVersion(_BaseVersion):
+    def __init__(self, version: str) -> None:
+        self._version = str(version)
+        self._key = _legacy_cmpkey(self._version)
+
+        warnings.warn(
+            "Creating a LegacyVersion has been deprecated and will be "
+            "removed in the next major release",
+            DeprecationWarning,
+        )
+
+    def __str__(self) -> str:
+        return self._version
+
+    def __repr__(self) -> str:
+        return f""
+
+    @property
+    def public(self) -> str:
+        return self._version
+
+    @property
+    def base_version(self) -> str:
+        return self._version
+
+    @property
+    def epoch(self) -> int:
+        return -1
+
+    @property
+    def release(self) -> None:
+        return None
+
+    @property
+    def pre(self) -> None:
+        return None
+
+    @property
+    def post(self) -> None:
+        return None
+
+    @property
+    def dev(self) -> None:
+        return None
+
+    @property
+    def local(self) -> None:
+        return None
+
+    @property
+    def is_prerelease(self) -> bool:
+        return False
+
+    @property
+    def is_postrelease(self) -> bool:
+        return False
+
+    @property
+    def is_devrelease(self) -> bool:
+        return False
+
+
+_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE)
+
+_legacy_version_replacement_map = {
+    "pre": "c",
+    "preview": "c",
+    "-": "final-",
+    "rc": "c",
+    "dev": "@",
+}
+
+
+def _parse_version_parts(s: str) -> Iterator[str]:
+    for part in _legacy_version_component_re.split(s):
+        part = _legacy_version_replacement_map.get(part, part)
+
+        if not part or part == ".":
+            continue
+
+        if part[:1] in "0123456789":
+            # pad for numeric comparison
+            yield part.zfill(8)
+        else:
+            yield "*" + part
+
+    # ensure that alpha/beta/candidate are before final
+    yield "*final"
+
+
+def _legacy_cmpkey(version: str) -> LegacyCmpKey:
+
+    # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
+    # greater than or equal to 0. This will effectively put the LegacyVersion,
+    # which uses the defacto standard originally implemented by setuptools,
+    # as before all PEP 440 versions.
+    epoch = -1
+
+    # This scheme is taken from pkg_resources.parse_version setuptools prior to
+    # it's adoption of the packaging library.
+    parts: List[str] = []
+    for part in _parse_version_parts(version.lower()):
+        if part.startswith("*"):
+            # remove "-" before a prerelease tag
+            if part < "*final":
+                while parts and parts[-1] == "*final-":
+                    parts.pop()
+
+            # remove trailing zeros from each series of numeric parts
+            while parts and parts[-1] == "00000000":
+                parts.pop()
+
+        parts.append(part)
+
+    return epoch, tuple(parts)
+
+
+# Deliberately not anchored to the start and end of the string, to make it
+# easier for 3rd party code to reuse
+VERSION_PATTERN = r"""
+    v?
+    (?:
+        (?:(?P[0-9]+)!)?                           # epoch
+        (?P[0-9]+(?:\.[0-9]+)*)                  # release segment
+        (?P
                                          # pre-release
+            [-_\.]?
+            (?P(a|b|c|rc|alpha|beta|pre|preview))
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+        (?P                                         # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?Ppost|rev|r)
+                [-_\.]?
+                (?P[0-9]+)?
+            )
+        )?
+        (?P                                          # dev release
+            [-_\.]?
+            (?Pdev)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+    )
+    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+"""
+
+
+class Version(_BaseVersion):
+
+    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+    def __init__(self, version: str) -> None:
+
+        # Validate the version and parse it into pieces
+        match = self._regex.search(version)
+        if not match:
+            raise InvalidVersion(f"Invalid version: '{version}'")
+
+        # Store the parsed out pieces of the version
+        self._version = _Version(
+            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+            release=tuple(int(i) for i in match.group("release").split(".")),
+            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
+            post=_parse_letter_version(
+                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+            ),
+            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
+            local=_parse_local_version(match.group("local")),
+        )
+
+        # Generate a key which will be used for sorting
+        self._key = _cmpkey(
+            self._version.epoch,
+            self._version.release,
+            self._version.pre,
+            self._version.post,
+            self._version.dev,
+            self._version.local,
+        )
+
+    def __repr__(self) -> str:
+        return f""
+
+    def __str__(self) -> str:
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        # Pre-release
+        if self.pre is not None:
+            parts.append("".join(str(x) for x in self.pre))
+
+        # Post-release
+        if self.post is not None:
+            parts.append(f".post{self.post}")
+
+        # Development release
+        if self.dev is not None:
+            parts.append(f".dev{self.dev}")
+
+        # Local version segment
+        if self.local is not None:
+            parts.append(f"+{self.local}")
+
+        return "".join(parts)
+
+    @property
+    def epoch(self) -> int:
+        _epoch: int = self._version.epoch
+        return _epoch
+
+    @property
+    def release(self) -> Tuple[int, ...]:
+        _release: Tuple[int, ...] = self._version.release
+        return _release
+
+    @property
+    def pre(self) -> Optional[Tuple[str, int]]:
+        _pre: Optional[Tuple[str, int]] = self._version.pre
+        return _pre
+
+    @property
+    def post(self) -> Optional[int]:
+        return self._version.post[1] if self._version.post else None
+
+    @property
+    def dev(self) -> Optional[int]:
+        return self._version.dev[1] if self._version.dev else None
+
+    @property
+    def local(self) -> Optional[str]:
+        if self._version.local:
+            return ".".join(str(x) for x in self._version.local)
+        else:
+            return None
+
+    @property
+    def public(self) -> str:
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self) -> str:
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        return "".join(parts)
+
+    @property
+    def is_prerelease(self) -> bool:
+        return self.dev is not None or self.pre is not None
+
+    @property
+    def is_postrelease(self) -> bool:
+        return self.post is not None
+
+    @property
+    def is_devrelease(self) -> bool:
+        return self.dev is not None
+
+    @property
+    def major(self) -> int:
+        return self.release[0] if len(self.release) >= 1 else 0
+
+    @property
+    def minor(self) -> int:
+        return self.release[1] if len(self.release) >= 2 else 0
+
+    @property
+    def micro(self) -> int:
+        return self.release[2] if len(self.release) >= 3 else 0
+
+
+def _parse_letter_version(
+    letter: str, number: Union[str, bytes, SupportsInt]
+) -> Optional[Tuple[str, int]]:
+
+    if letter:
+        # We consider there to be an implicit 0 in a pre-release if there is
+        # not a numeral associated with it.
+        if number is None:
+            number = 0
+
+        # We normalize any letters to their lower case form
+        letter = letter.lower()
+
+        # We consider some words to be alternate spellings of other words and
+        # in those cases we want to normalize the spellings to our preferred
+        # spelling.
+        if letter == "alpha":
+            letter = "a"
+        elif letter == "beta":
+            letter = "b"
+        elif letter in ["c", "pre", "preview"]:
+            letter = "rc"
+        elif letter in ["rev", "r"]:
+            letter = "post"
+
+        return letter, int(number)
+    if not letter and number:
+        # We assume if we are given a number, but we are not given a letter
+        # then this is using the implicit post release syntax (e.g. 1.0-1)
+        letter = "post"
+
+        return letter, int(number)
+
+    return None
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local: str) -> Optional[LocalType]:
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in _local_version_separators.split(local)
+        )
+    return None
+
+
+def _cmpkey(
+    epoch: int,
+    release: Tuple[int, ...],
+    pre: Optional[Tuple[str, int]],
+    post: Optional[Tuple[str, int]],
+    dev: Optional[Tuple[str, int]],
+    local: Optional[Tuple[SubLocalType]],
+) -> CmpKey:
+
+    # When we compare a release version, we want to compare it with all of the
+    # trailing zeros removed. So we'll use a reverse the list, drop all the now
+    # leading zeros until we come to something non zero, then take the rest
+    # re-reverse it back into the correct order and make it a tuple and use
+    # that for our sorting key.
+    _release = tuple(
+        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
+    )
+
+    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+    # We'll do this by abusing the pre segment, but we _only_ want to do this
+    # if there is not a pre or a post segment. If we have one of those then
+    # the normal sorting rules will handle this case correctly.
+    if pre is None and post is None and dev is not None:
+        _pre: PrePostDevType = NegativeInfinity
+    # Versions without a pre-release (except as noted above) should sort after
+    # those with one.
+    elif pre is None:
+        _pre = Infinity
+    else:
+        _pre = pre
+
+    # Versions without a post segment should sort before those with one.
+    if post is None:
+        _post: PrePostDevType = NegativeInfinity
+
+    else:
+        _post = post
+
+    # Versions without a development segment should sort after those with one.
+    if dev is None:
+        _dev: PrePostDevType = Infinity
+
+    else:
+        _dev = dev
+
+    if local is None:
+        # Versions without a local segment should sort before those with one.
+        _local: LocalType = NegativeInfinity
+    else:
+        # Versions with a local segment need that segment parsed to implement
+        # the sorting rules in PEP440.
+        # - Alpha numeric segments sort before numeric segments
+        # - Alpha numeric segments sort lexicographically
+        # - Numeric segments sort numerically
+        # - Shorter versions sort before longer versions when the prefixes
+        #   match exactly
+        _local = tuple(
+            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
+        )
+
+    return epoch, _release, _pre, _post, _dev, _local
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__init__.py
new file mode 100644
index 0000000..ad27940
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__init__.py
@@ -0,0 +1,3361 @@
+"""
+Package resource API
+--------------------
+
+A resource is a logical file contained within a package, or a logical
+subdirectory thereof.  The package resource API expects resource names
+to have their path parts separated with ``/``, *not* whatever the local
+path separator is.  Do not use os.path operations to manipulate resource
+names being passed into the API.
+
+The package resource API is designed to work with normal filesystem packages,
+.egg files, and unpacked .egg files.  It can also work in a limited way with
+.zip files and with custom PEP 302 loaders that support the ``get_data()``
+method.
+
+This module is deprecated. Users are directed to :mod:`importlib.resources`,
+:mod:`importlib.metadata` and :pypi:`packaging` instead.
+"""
+
+import sys
+import os
+import io
+import time
+import re
+import types
+import zipfile
+import zipimport
+import warnings
+import stat
+import functools
+import pkgutil
+import operator
+import platform
+import collections
+import plistlib
+import email.parser
+import errno
+import tempfile
+import textwrap
+import inspect
+import ntpath
+import posixpath
+import importlib
+from pkgutil import get_importer
+
+try:
+    import _imp
+except ImportError:
+    # Python 3.2 compatibility
+    import imp as _imp
+
+try:
+    FileExistsError
+except NameError:
+    FileExistsError = OSError
+
+# capture these to bypass sandboxing
+from os import utime
+
+try:
+    from os import mkdir, rename, unlink
+
+    WRITE_SUPPORT = True
+except ImportError:
+    # no write support, probably under GAE
+    WRITE_SUPPORT = False
+
+from os import open as os_open
+from os.path import isdir, split
+
+try:
+    import importlib.machinery as importlib_machinery
+
+    # access attribute to force import under delayed import mechanisms.
+    importlib_machinery.__name__
+except ImportError:
+    importlib_machinery = None
+
+from pip._internal.utils._jaraco_text import (
+    yield_lines,
+    drop_comment,
+    join_continuation,
+)
+
+from pip._vendor import platformdirs
+from pip._vendor import packaging
+
+__import__('pip._vendor.packaging.version')
+__import__('pip._vendor.packaging.specifiers')
+__import__('pip._vendor.packaging.requirements')
+__import__('pip._vendor.packaging.markers')
+__import__('pip._vendor.packaging.utils')
+
+if sys.version_info < (3, 5):
+    raise RuntimeError("Python 3.5 or later is required")
+
+# declare some globals that will be defined later to
+# satisfy the linters.
+require = None
+working_set = None
+add_activation_listener = None
+resources_stream = None
+cleanup_resources = None
+resource_dir = None
+resource_stream = None
+set_extraction_path = None
+resource_isdir = None
+resource_string = None
+iter_entry_points = None
+resource_listdir = None
+resource_filename = None
+resource_exists = None
+_distribution_finders = None
+_namespace_handlers = None
+_namespace_packages = None
+
+
+warnings.warn(
+    "pkg_resources is deprecated as an API. "
+    "See https://setuptools.pypa.io/en/latest/pkg_resources.html",
+    DeprecationWarning,
+    stacklevel=2
+)
+
+
+_PEP440_FALLBACK = re.compile(r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I)
+
+
+class PEP440Warning(RuntimeWarning):
+    """
+    Used when there is an issue with a version or specifier not complying with
+    PEP 440.
+    """
+
+
+parse_version = packaging.version.Version
+
+
+_state_vars = {}
+
+
+def _declare_state(vartype, **kw):
+    globals().update(kw)
+    _state_vars.update(dict.fromkeys(kw, vartype))
+
+
+def __getstate__():
+    state = {}
+    g = globals()
+    for k, v in _state_vars.items():
+        state[k] = g['_sget_' + v](g[k])
+    return state
+
+
+def __setstate__(state):
+    g = globals()
+    for k, v in state.items():
+        g['_sset_' + _state_vars[k]](k, g[k], v)
+    return state
+
+
+def _sget_dict(val):
+    return val.copy()
+
+
+def _sset_dict(key, ob, state):
+    ob.clear()
+    ob.update(state)
+
+
+def _sget_object(val):
+    return val.__getstate__()
+
+
+def _sset_object(key, ob, state):
+    ob.__setstate__(state)
+
+
+_sget_none = _sset_none = lambda *args: None
+
+
+def get_supported_platform():
+    """Return this platform's maximum compatible version.
+
+    distutils.util.get_platform() normally reports the minimum version
+    of macOS that would be required to *use* extensions produced by
+    distutils.  But what we want when checking compatibility is to know the
+    version of macOS that we are *running*.  To allow usage of packages that
+    explicitly require a newer version of macOS, we must also know the
+    current version of the OS.
+
+    If this condition occurs for any other platform with a version in its
+    platform strings, this function should be extended accordingly.
+    """
+    plat = get_build_platform()
+    m = macosVersionString.match(plat)
+    if m is not None and sys.platform == "darwin":
+        try:
+            plat = 'macosx-%s-%s' % ('.'.join(_macos_vers()[:2]), m.group(3))
+        except ValueError:
+            # not macOS
+            pass
+    return plat
+
+
+__all__ = [
+    # Basic resource access and distribution/entry point discovery
+    'require',
+    'run_script',
+    'get_provider',
+    'get_distribution',
+    'load_entry_point',
+    'get_entry_map',
+    'get_entry_info',
+    'iter_entry_points',
+    'resource_string',
+    'resource_stream',
+    'resource_filename',
+    'resource_listdir',
+    'resource_exists',
+    'resource_isdir',
+    # Environmental control
+    'declare_namespace',
+    'working_set',
+    'add_activation_listener',
+    'find_distributions',
+    'set_extraction_path',
+    'cleanup_resources',
+    'get_default_cache',
+    # Primary implementation classes
+    'Environment',
+    'WorkingSet',
+    'ResourceManager',
+    'Distribution',
+    'Requirement',
+    'EntryPoint',
+    # Exceptions
+    'ResolutionError',
+    'VersionConflict',
+    'DistributionNotFound',
+    'UnknownExtra',
+    'ExtractionError',
+    # Warnings
+    'PEP440Warning',
+    # Parsing functions and string utilities
+    'parse_requirements',
+    'parse_version',
+    'safe_name',
+    'safe_version',
+    'get_platform',
+    'compatible_platforms',
+    'yield_lines',
+    'split_sections',
+    'safe_extra',
+    'to_filename',
+    'invalid_marker',
+    'evaluate_marker',
+    # filesystem utilities
+    'ensure_directory',
+    'normalize_path',
+    # Distribution "precedence" constants
+    'EGG_DIST',
+    'BINARY_DIST',
+    'SOURCE_DIST',
+    'CHECKOUT_DIST',
+    'DEVELOP_DIST',
+    # "Provider" interfaces, implementations, and registration/lookup APIs
+    'IMetadataProvider',
+    'IResourceProvider',
+    'FileMetadata',
+    'PathMetadata',
+    'EggMetadata',
+    'EmptyProvider',
+    'empty_provider',
+    'NullProvider',
+    'EggProvider',
+    'DefaultProvider',
+    'ZipProvider',
+    'register_finder',
+    'register_namespace_handler',
+    'register_loader_type',
+    'fixup_namespace_packages',
+    'get_importer',
+    # Warnings
+    'PkgResourcesDeprecationWarning',
+    # Deprecated/backward compatibility only
+    'run_main',
+    'AvailableDistributions',
+]
+
+
+class ResolutionError(Exception):
+    """Abstract base for dependency resolution errors"""
+
+    def __repr__(self):
+        return self.__class__.__name__ + repr(self.args)
+
+
+class VersionConflict(ResolutionError):
+    """
+    An already-installed version conflicts with the requested version.
+
+    Should be initialized with the installed Distribution and the requested
+    Requirement.
+    """
+
+    _template = "{self.dist} is installed but {self.req} is required"
+
+    @property
+    def dist(self):
+        return self.args[0]
+
+    @property
+    def req(self):
+        return self.args[1]
+
+    def report(self):
+        return self._template.format(**locals())
+
+    def with_context(self, required_by):
+        """
+        If required_by is non-empty, return a version of self that is a
+        ContextualVersionConflict.
+        """
+        if not required_by:
+            return self
+        args = self.args + (required_by,)
+        return ContextualVersionConflict(*args)
+
+
+class ContextualVersionConflict(VersionConflict):
+    """
+    A VersionConflict that accepts a third parameter, the set of the
+    requirements that required the installed Distribution.
+    """
+
+    _template = VersionConflict._template + ' by {self.required_by}'
+
+    @property
+    def required_by(self):
+        return self.args[2]
+
+
+class DistributionNotFound(ResolutionError):
+    """A requested distribution was not found"""
+
+    _template = (
+        "The '{self.req}' distribution was not found "
+        "and is required by {self.requirers_str}"
+    )
+
+    @property
+    def req(self):
+        return self.args[0]
+
+    @property
+    def requirers(self):
+        return self.args[1]
+
+    @property
+    def requirers_str(self):
+        if not self.requirers:
+            return 'the application'
+        return ', '.join(self.requirers)
+
+    def report(self):
+        return self._template.format(**locals())
+
+    def __str__(self):
+        return self.report()
+
+
+class UnknownExtra(ResolutionError):
+    """Distribution doesn't have an "extra feature" of the given name"""
+
+
+_provider_factories = {}
+
+PY_MAJOR = '{}.{}'.format(*sys.version_info)
+EGG_DIST = 3
+BINARY_DIST = 2
+SOURCE_DIST = 1
+CHECKOUT_DIST = 0
+DEVELOP_DIST = -1
+
+
+def register_loader_type(loader_type, provider_factory):
+    """Register `provider_factory` to make providers for `loader_type`
+
+    `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
+    and `provider_factory` is a function that, passed a *module* object,
+    returns an ``IResourceProvider`` for that module.
+    """
+    _provider_factories[loader_type] = provider_factory
+
+
+def get_provider(moduleOrReq):
+    """Return an IResourceProvider for the named module or requirement"""
+    if isinstance(moduleOrReq, Requirement):
+        return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
+    try:
+        module = sys.modules[moduleOrReq]
+    except KeyError:
+        __import__(moduleOrReq)
+        module = sys.modules[moduleOrReq]
+    loader = getattr(module, '__loader__', None)
+    return _find_adapter(_provider_factories, loader)(module)
+
+
+def _macos_vers(_cache=[]):
+    if not _cache:
+        version = platform.mac_ver()[0]
+        # fallback for MacPorts
+        if version == '':
+            plist = '/System/Library/CoreServices/SystemVersion.plist'
+            if os.path.exists(plist):
+                if hasattr(plistlib, 'readPlist'):
+                    plist_content = plistlib.readPlist(plist)
+                    if 'ProductVersion' in plist_content:
+                        version = plist_content['ProductVersion']
+
+        _cache.append(version.split('.'))
+    return _cache[0]
+
+
+def _macos_arch(machine):
+    return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
+
+
+def get_build_platform():
+    """Return this platform's string for platform-specific distributions
+
+    XXX Currently this is the same as ``distutils.util.get_platform()``, but it
+    needs some hacks for Linux and macOS.
+    """
+    from sysconfig import get_platform
+
+    plat = get_platform()
+    if sys.platform == "darwin" and not plat.startswith('macosx-'):
+        try:
+            version = _macos_vers()
+            machine = os.uname()[4].replace(" ", "_")
+            return "macosx-%d.%d-%s" % (
+                int(version[0]),
+                int(version[1]),
+                _macos_arch(machine),
+            )
+        except ValueError:
+            # if someone is running a non-Mac darwin system, this will fall
+            # through to the default implementation
+            pass
+    return plat
+
+
+macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
+darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
+# XXX backward compat
+get_platform = get_build_platform
+
+
+def compatible_platforms(provided, required):
+    """Can code for the `provided` platform run on the `required` platform?
+
+    Returns true if either platform is ``None``, or the platforms are equal.
+
+    XXX Needs compatibility checks for Linux and other unixy OSes.
+    """
+    if provided is None or required is None or provided == required:
+        # easy case
+        return True
+
+    # macOS special cases
+    reqMac = macosVersionString.match(required)
+    if reqMac:
+        provMac = macosVersionString.match(provided)
+
+        # is this a Mac package?
+        if not provMac:
+            # this is backwards compatibility for packages built before
+            # setuptools 0.6. All packages built after this point will
+            # use the new macOS designation.
+            provDarwin = darwinVersionString.match(provided)
+            if provDarwin:
+                dversion = int(provDarwin.group(1))
+                macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
+                if (
+                    dversion == 7
+                    and macosversion >= "10.3"
+                    or dversion == 8
+                    and macosversion >= "10.4"
+                ):
+                    return True
+            # egg isn't macOS or legacy darwin
+            return False
+
+        # are they the same major version and machine type?
+        if provMac.group(1) != reqMac.group(1) or provMac.group(3) != reqMac.group(3):
+            return False
+
+        # is the required OS major update >= the provided one?
+        if int(provMac.group(2)) > int(reqMac.group(2)):
+            return False
+
+        return True
+
+    # XXX Linux and other platforms' special cases should go here
+    return False
+
+
+def run_script(dist_spec, script_name):
+    """Locate distribution `dist_spec` and run its `script_name` script"""
+    ns = sys._getframe(1).f_globals
+    name = ns['__name__']
+    ns.clear()
+    ns['__name__'] = name
+    require(dist_spec)[0].run_script(script_name, ns)
+
+
+# backward compatibility
+run_main = run_script
+
+
+def get_distribution(dist):
+    """Return a current distribution object for a Requirement or string"""
+    if isinstance(dist, str):
+        dist = Requirement.parse(dist)
+    if isinstance(dist, Requirement):
+        dist = get_provider(dist)
+    if not isinstance(dist, Distribution):
+        raise TypeError("Expected string, Requirement, or Distribution", dist)
+    return dist
+
+
+def load_entry_point(dist, group, name):
+    """Return `name` entry point of `group` for `dist` or raise ImportError"""
+    return get_distribution(dist).load_entry_point(group, name)
+
+
+def get_entry_map(dist, group=None):
+    """Return the entry point map for `group`, or the full entry map"""
+    return get_distribution(dist).get_entry_map(group)
+
+
+def get_entry_info(dist, group, name):
+    """Return the EntryPoint object for `group`+`name`, or ``None``"""
+    return get_distribution(dist).get_entry_info(group, name)
+
+
+class IMetadataProvider:
+    def has_metadata(name):
+        """Does the package's distribution contain the named metadata?"""
+
+    def get_metadata(name):
+        """The named metadata resource as a string"""
+
+    def get_metadata_lines(name):
+        """Yield named metadata resource as list of non-blank non-comment lines
+
+        Leading and trailing whitespace is stripped from each line, and lines
+        with ``#`` as the first non-blank character are omitted."""
+
+    def metadata_isdir(name):
+        """Is the named metadata a directory?  (like ``os.path.isdir()``)"""
+
+    def metadata_listdir(name):
+        """List of metadata names in the directory (like ``os.listdir()``)"""
+
+    def run_script(script_name, namespace):
+        """Execute the named script in the supplied namespace dictionary"""
+
+
+class IResourceProvider(IMetadataProvider):
+    """An object that provides access to package resources"""
+
+    def get_resource_filename(manager, resource_name):
+        """Return a true filesystem path for `resource_name`
+
+        `manager` must be an ``IResourceManager``"""
+
+    def get_resource_stream(manager, resource_name):
+        """Return a readable file-like object for `resource_name`
+
+        `manager` must be an ``IResourceManager``"""
+
+    def get_resource_string(manager, resource_name):
+        """Return a string containing the contents of `resource_name`
+
+        `manager` must be an ``IResourceManager``"""
+
+    def has_resource(resource_name):
+        """Does the package contain the named resource?"""
+
+    def resource_isdir(resource_name):
+        """Is the named resource a directory?  (like ``os.path.isdir()``)"""
+
+    def resource_listdir(resource_name):
+        """List of resource names in the directory (like ``os.listdir()``)"""
+
+
+class WorkingSet:
+    """A collection of active distributions on sys.path (or a similar list)"""
+
+    def __init__(self, entries=None):
+        """Create working set from list of path entries (default=sys.path)"""
+        self.entries = []
+        self.entry_keys = {}
+        self.by_key = {}
+        self.normalized_to_canonical_keys = {}
+        self.callbacks = []
+
+        if entries is None:
+            entries = sys.path
+
+        for entry in entries:
+            self.add_entry(entry)
+
+    @classmethod
+    def _build_master(cls):
+        """
+        Prepare the master working set.
+        """
+        ws = cls()
+        try:
+            from __main__ import __requires__
+        except ImportError:
+            # The main program does not list any requirements
+            return ws
+
+        # ensure the requirements are met
+        try:
+            ws.require(__requires__)
+        except VersionConflict:
+            return cls._build_from_requirements(__requires__)
+
+        return ws
+
+    @classmethod
+    def _build_from_requirements(cls, req_spec):
+        """
+        Build a working set from a requirement spec. Rewrites sys.path.
+        """
+        # try it without defaults already on sys.path
+        # by starting with an empty path
+        ws = cls([])
+        reqs = parse_requirements(req_spec)
+        dists = ws.resolve(reqs, Environment())
+        for dist in dists:
+            ws.add(dist)
+
+        # add any missing entries from sys.path
+        for entry in sys.path:
+            if entry not in ws.entries:
+                ws.add_entry(entry)
+
+        # then copy back to sys.path
+        sys.path[:] = ws.entries
+        return ws
+
+    def add_entry(self, entry):
+        """Add a path item to ``.entries``, finding any distributions on it
+
+        ``find_distributions(entry, True)`` is used to find distributions
+        corresponding to the path entry, and they are added.  `entry` is
+        always appended to ``.entries``, even if it is already present.
+        (This is because ``sys.path`` can contain the same value more than
+        once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
+        equal ``sys.path``.)
+        """
+        self.entry_keys.setdefault(entry, [])
+        self.entries.append(entry)
+        for dist in find_distributions(entry, True):
+            self.add(dist, entry, False)
+
+    def __contains__(self, dist):
+        """True if `dist` is the active distribution for its project"""
+        return self.by_key.get(dist.key) == dist
+
+    def find(self, req):
+        """Find a distribution matching requirement `req`
+
+        If there is an active distribution for the requested project, this
+        returns it as long as it meets the version requirement specified by
+        `req`.  But, if there is an active distribution for the project and it
+        does *not* meet the `req` requirement, ``VersionConflict`` is raised.
+        If there is no active distribution for the requested project, ``None``
+        is returned.
+        """
+        dist = self.by_key.get(req.key)
+
+        if dist is None:
+            canonical_key = self.normalized_to_canonical_keys.get(req.key)
+
+            if canonical_key is not None:
+                req.key = canonical_key
+                dist = self.by_key.get(canonical_key)
+
+        if dist is not None and dist not in req:
+            # XXX add more info
+            raise VersionConflict(dist, req)
+        return dist
+
+    def iter_entry_points(self, group, name=None):
+        """Yield entry point objects from `group` matching `name`
+
+        If `name` is None, yields all entry points in `group` from all
+        distributions in the working set, otherwise only ones matching
+        both `group` and `name` are yielded (in distribution order).
+        """
+        return (
+            entry
+            for dist in self
+            for entry in dist.get_entry_map(group).values()
+            if name is None or name == entry.name
+        )
+
+    def run_script(self, requires, script_name):
+        """Locate distribution for `requires` and run `script_name` script"""
+        ns = sys._getframe(1).f_globals
+        name = ns['__name__']
+        ns.clear()
+        ns['__name__'] = name
+        self.require(requires)[0].run_script(script_name, ns)
+
+    def __iter__(self):
+        """Yield distributions for non-duplicate projects in the working set
+
+        The yield order is the order in which the items' path entries were
+        added to the working set.
+        """
+        seen = {}
+        for item in self.entries:
+            if item not in self.entry_keys:
+                # workaround a cache issue
+                continue
+
+            for key in self.entry_keys[item]:
+                if key not in seen:
+                    seen[key] = 1
+                    yield self.by_key[key]
+
+    def add(self, dist, entry=None, insert=True, replace=False):
+        """Add `dist` to working set, associated with `entry`
+
+        If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
+        On exit from this routine, `entry` is added to the end of the working
+        set's ``.entries`` (if it wasn't already present).
+
+        `dist` is only added to the working set if it's for a project that
+        doesn't already have a distribution in the set, unless `replace=True`.
+        If it's added, any callbacks registered with the ``subscribe()`` method
+        will be called.
+        """
+        if insert:
+            dist.insert_on(self.entries, entry, replace=replace)
+
+        if entry is None:
+            entry = dist.location
+        keys = self.entry_keys.setdefault(entry, [])
+        keys2 = self.entry_keys.setdefault(dist.location, [])
+        if not replace and dist.key in self.by_key:
+            # ignore hidden distros
+            return
+
+        self.by_key[dist.key] = dist
+        normalized_name = packaging.utils.canonicalize_name(dist.key)
+        self.normalized_to_canonical_keys[normalized_name] = dist.key
+        if dist.key not in keys:
+            keys.append(dist.key)
+        if dist.key not in keys2:
+            keys2.append(dist.key)
+        self._added_new(dist)
+
+    def resolve(
+        self,
+        requirements,
+        env=None,
+        installer=None,
+        replace_conflicting=False,
+        extras=None,
+    ):
+        """List all distributions needed to (recursively) meet `requirements`
+
+        `requirements` must be a sequence of ``Requirement`` objects.  `env`,
+        if supplied, should be an ``Environment`` instance.  If
+        not supplied, it defaults to all distributions available within any
+        entry or distribution in the working set.  `installer`, if supplied,
+        will be invoked with each requirement that cannot be met by an
+        already-installed distribution; it should return a ``Distribution`` or
+        ``None``.
+
+        Unless `replace_conflicting=True`, raises a VersionConflict exception
+        if
+        any requirements are found on the path that have the correct name but
+        the wrong version.  Otherwise, if an `installer` is supplied it will be
+        invoked to obtain the correct version of the requirement and activate
+        it.
+
+        `extras` is a list of the extras to be used with these requirements.
+        This is important because extra requirements may look like `my_req;
+        extra = "my_extra"`, which would otherwise be interpreted as a purely
+        optional requirement.  Instead, we want to be able to assert that these
+        requirements are truly required.
+        """
+
+        # set up the stack
+        requirements = list(requirements)[::-1]
+        # set of processed requirements
+        processed = {}
+        # key -> dist
+        best = {}
+        to_activate = []
+
+        req_extras = _ReqExtras()
+
+        # Mapping of requirement to set of distributions that required it;
+        # useful for reporting info about conflicts.
+        required_by = collections.defaultdict(set)
+
+        while requirements:
+            # process dependencies breadth-first
+            req = requirements.pop(0)
+            if req in processed:
+                # Ignore cyclic or redundant dependencies
+                continue
+
+            if not req_extras.markers_pass(req, extras):
+                continue
+
+            dist = self._resolve_dist(
+                req, best, replace_conflicting, env, installer, required_by, to_activate
+            )
+
+            # push the new requirements onto the stack
+            new_requirements = dist.requires(req.extras)[::-1]
+            requirements.extend(new_requirements)
+
+            # Register the new requirements needed by req
+            for new_requirement in new_requirements:
+                required_by[new_requirement].add(req.project_name)
+                req_extras[new_requirement] = req.extras
+
+            processed[req] = True
+
+        # return list of distros to activate
+        return to_activate
+
+    def _resolve_dist(
+        self, req, best, replace_conflicting, env, installer, required_by, to_activate
+    ):
+        dist = best.get(req.key)
+        if dist is None:
+            # Find the best distribution and add it to the map
+            dist = self.by_key.get(req.key)
+            if dist is None or (dist not in req and replace_conflicting):
+                ws = self
+                if env is None:
+                    if dist is None:
+                        env = Environment(self.entries)
+                    else:
+                        # Use an empty environment and workingset to avoid
+                        # any further conflicts with the conflicting
+                        # distribution
+                        env = Environment([])
+                        ws = WorkingSet([])
+                dist = best[req.key] = env.best_match(
+                    req, ws, installer, replace_conflicting=replace_conflicting
+                )
+                if dist is None:
+                    requirers = required_by.get(req, None)
+                    raise DistributionNotFound(req, requirers)
+            to_activate.append(dist)
+        if dist not in req:
+            # Oops, the "best" so far conflicts with a dependency
+            dependent_req = required_by[req]
+            raise VersionConflict(dist, req).with_context(dependent_req)
+        return dist
+
+    def find_plugins(self, plugin_env, full_env=None, installer=None, fallback=True):
+        """Find all activatable distributions in `plugin_env`
+
+        Example usage::
+
+            distributions, errors = working_set.find_plugins(
+                Environment(plugin_dirlist)
+            )
+            # add plugins+libs to sys.path
+            map(working_set.add, distributions)
+            # display errors
+            print('Could not load', errors)
+
+        The `plugin_env` should be an ``Environment`` instance that contains
+        only distributions that are in the project's "plugin directory" or
+        directories. The `full_env`, if supplied, should be an ``Environment``
+        contains all currently-available distributions.  If `full_env` is not
+        supplied, one is created automatically from the ``WorkingSet`` this
+        method is called on, which will typically mean that every directory on
+        ``sys.path`` will be scanned for distributions.
+
+        `installer` is a standard installer callback as used by the
+        ``resolve()`` method. The `fallback` flag indicates whether we should
+        attempt to resolve older versions of a plugin if the newest version
+        cannot be resolved.
+
+        This method returns a 2-tuple: (`distributions`, `error_info`), where
+        `distributions` is a list of the distributions found in `plugin_env`
+        that were loadable, along with any other distributions that are needed
+        to resolve their dependencies.  `error_info` is a dictionary mapping
+        unloadable plugin distributions to an exception instance describing the
+        error that occurred. Usually this will be a ``DistributionNotFound`` or
+        ``VersionConflict`` instance.
+        """
+
+        plugin_projects = list(plugin_env)
+        # scan project names in alphabetic order
+        plugin_projects.sort()
+
+        error_info = {}
+        distributions = {}
+
+        if full_env is None:
+            env = Environment(self.entries)
+            env += plugin_env
+        else:
+            env = full_env + plugin_env
+
+        shadow_set = self.__class__([])
+        # put all our entries in shadow_set
+        list(map(shadow_set.add, self))
+
+        for project_name in plugin_projects:
+            for dist in plugin_env[project_name]:
+                req = [dist.as_requirement()]
+
+                try:
+                    resolvees = shadow_set.resolve(req, env, installer)
+
+                except ResolutionError as v:
+                    # save error info
+                    error_info[dist] = v
+                    if fallback:
+                        # try the next older version of project
+                        continue
+                    else:
+                        # give up on this project, keep going
+                        break
+
+                else:
+                    list(map(shadow_set.add, resolvees))
+                    distributions.update(dict.fromkeys(resolvees))
+
+                    # success, no need to try any more versions of this project
+                    break
+
+        distributions = list(distributions)
+        distributions.sort()
+
+        return distributions, error_info
+
+    def require(self, *requirements):
+        """Ensure that distributions matching `requirements` are activated
+
+        `requirements` must be a string or a (possibly-nested) sequence
+        thereof, specifying the distributions and versions required.  The
+        return value is a sequence of the distributions that needed to be
+        activated to fulfill the requirements; all relevant distributions are
+        included, even if they were already activated in this working set.
+        """
+        needed = self.resolve(parse_requirements(requirements))
+
+        for dist in needed:
+            self.add(dist)
+
+        return needed
+
+    def subscribe(self, callback, existing=True):
+        """Invoke `callback` for all distributions
+
+        If `existing=True` (default),
+        call on all existing ones, as well.
+        """
+        if callback in self.callbacks:
+            return
+        self.callbacks.append(callback)
+        if not existing:
+            return
+        for dist in self:
+            callback(dist)
+
+    def _added_new(self, dist):
+        for callback in self.callbacks:
+            callback(dist)
+
+    def __getstate__(self):
+        return (
+            self.entries[:],
+            self.entry_keys.copy(),
+            self.by_key.copy(),
+            self.normalized_to_canonical_keys.copy(),
+            self.callbacks[:],
+        )
+
+    def __setstate__(self, e_k_b_n_c):
+        entries, keys, by_key, normalized_to_canonical_keys, callbacks = e_k_b_n_c
+        self.entries = entries[:]
+        self.entry_keys = keys.copy()
+        self.by_key = by_key.copy()
+        self.normalized_to_canonical_keys = normalized_to_canonical_keys.copy()
+        self.callbacks = callbacks[:]
+
+
+class _ReqExtras(dict):
+    """
+    Map each requirement to the extras that demanded it.
+    """
+
+    def markers_pass(self, req, extras=None):
+        """
+        Evaluate markers for req against each extra that
+        demanded it.
+
+        Return False if the req has a marker and fails
+        evaluation. Otherwise, return True.
+        """
+        extra_evals = (
+            req.marker.evaluate({'extra': extra})
+            for extra in self.get(req, ()) + (extras or (None,))
+        )
+        return not req.marker or any(extra_evals)
+
+
+class Environment:
+    """Searchable snapshot of distributions on a search path"""
+
+    def __init__(
+        self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR
+    ):
+        """Snapshot distributions available on a search path
+
+        Any distributions found on `search_path` are added to the environment.
+        `search_path` should be a sequence of ``sys.path`` items.  If not
+        supplied, ``sys.path`` is used.
+
+        `platform` is an optional string specifying the name of the platform
+        that platform-specific distributions must be compatible with.  If
+        unspecified, it defaults to the current platform.  `python` is an
+        optional string naming the desired version of Python (e.g. ``'3.6'``);
+        it defaults to the current version.
+
+        You may explicitly set `platform` (and/or `python`) to ``None`` if you
+        wish to map *all* distributions, not just those compatible with the
+        running platform or Python version.
+        """
+        self._distmap = {}
+        self.platform = platform
+        self.python = python
+        self.scan(search_path)
+
+    def can_add(self, dist):
+        """Is distribution `dist` acceptable for this environment?
+
+        The distribution must match the platform and python version
+        requirements specified when this environment was created, or False
+        is returned.
+        """
+        py_compat = (
+            self.python is None
+            or dist.py_version is None
+            or dist.py_version == self.python
+        )
+        return py_compat and compatible_platforms(dist.platform, self.platform)
+
+    def remove(self, dist):
+        """Remove `dist` from the environment"""
+        self._distmap[dist.key].remove(dist)
+
+    def scan(self, search_path=None):
+        """Scan `search_path` for distributions usable in this environment
+
+        Any distributions found are added to the environment.
+        `search_path` should be a sequence of ``sys.path`` items.  If not
+        supplied, ``sys.path`` is used.  Only distributions conforming to
+        the platform/python version defined at initialization are added.
+        """
+        if search_path is None:
+            search_path = sys.path
+
+        for item in search_path:
+            for dist in find_distributions(item):
+                self.add(dist)
+
+    def __getitem__(self, project_name):
+        """Return a newest-to-oldest list of distributions for `project_name`
+
+        Uses case-insensitive `project_name` comparison, assuming all the
+        project's distributions use their project's name converted to all
+        lowercase as their key.
+
+        """
+        distribution_key = project_name.lower()
+        return self._distmap.get(distribution_key, [])
+
+    def add(self, dist):
+        """Add `dist` if we ``can_add()`` it and it has not already been added"""
+        if self.can_add(dist) and dist.has_version():
+            dists = self._distmap.setdefault(dist.key, [])
+            if dist not in dists:
+                dists.append(dist)
+                dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
+
+    def best_match(self, req, working_set, installer=None, replace_conflicting=False):
+        """Find distribution best matching `req` and usable on `working_set`
+
+        This calls the ``find(req)`` method of the `working_set` to see if a
+        suitable distribution is already active.  (This may raise
+        ``VersionConflict`` if an unsuitable version of the project is already
+        active in the specified `working_set`.)  If a suitable distribution
+        isn't active, this method returns the newest distribution in the
+        environment that meets the ``Requirement`` in `req`.  If no suitable
+        distribution is found, and `installer` is supplied, then the result of
+        calling the environment's ``obtain(req, installer)`` method will be
+        returned.
+        """
+        try:
+            dist = working_set.find(req)
+        except VersionConflict:
+            if not replace_conflicting:
+                raise
+            dist = None
+        if dist is not None:
+            return dist
+        for dist in self[req.key]:
+            if dist in req:
+                return dist
+        # try to download/install
+        return self.obtain(req, installer)
+
+    def obtain(self, requirement, installer=None):
+        """Obtain a distribution matching `requirement` (e.g. via download)
+
+        Obtain a distro that matches requirement (e.g. via download).  In the
+        base ``Environment`` class, this routine just returns
+        ``installer(requirement)``, unless `installer` is None, in which case
+        None is returned instead.  This method is a hook that allows subclasses
+        to attempt other ways of obtaining a distribution before falling back
+        to the `installer` argument."""
+        if installer is not None:
+            return installer(requirement)
+
+    def __iter__(self):
+        """Yield the unique project names of the available distributions"""
+        for key in self._distmap.keys():
+            if self[key]:
+                yield key
+
+    def __iadd__(self, other):
+        """In-place addition of a distribution or environment"""
+        if isinstance(other, Distribution):
+            self.add(other)
+        elif isinstance(other, Environment):
+            for project in other:
+                for dist in other[project]:
+                    self.add(dist)
+        else:
+            raise TypeError("Can't add %r to environment" % (other,))
+        return self
+
+    def __add__(self, other):
+        """Add an environment or distribution to an environment"""
+        new = self.__class__([], platform=None, python=None)
+        for env in self, other:
+            new += env
+        return new
+
+
+# XXX backward compatibility
+AvailableDistributions = Environment
+
+
+class ExtractionError(RuntimeError):
+    """An error occurred extracting a resource
+
+    The following attributes are available from instances of this exception:
+
+    manager
+        The resource manager that raised this exception
+
+    cache_path
+        The base directory for resource extraction
+
+    original_error
+        The exception instance that caused extraction to fail
+    """
+
+
+class ResourceManager:
+    """Manage resource extraction and packages"""
+
+    extraction_path = None
+
+    def __init__(self):
+        self.cached_files = {}
+
+    def resource_exists(self, package_or_requirement, resource_name):
+        """Does the named resource exist?"""
+        return get_provider(package_or_requirement).has_resource(resource_name)
+
+    def resource_isdir(self, package_or_requirement, resource_name):
+        """Is the named resource an existing directory?"""
+        return get_provider(package_or_requirement).resource_isdir(resource_name)
+
+    def resource_filename(self, package_or_requirement, resource_name):
+        """Return a true filesystem path for specified resource"""
+        return get_provider(package_or_requirement).get_resource_filename(
+            self, resource_name
+        )
+
+    def resource_stream(self, package_or_requirement, resource_name):
+        """Return a readable file-like object for specified resource"""
+        return get_provider(package_or_requirement).get_resource_stream(
+            self, resource_name
+        )
+
+    def resource_string(self, package_or_requirement, resource_name):
+        """Return specified resource as a string"""
+        return get_provider(package_or_requirement).get_resource_string(
+            self, resource_name
+        )
+
+    def resource_listdir(self, package_or_requirement, resource_name):
+        """List the contents of the named resource directory"""
+        return get_provider(package_or_requirement).resource_listdir(resource_name)
+
+    def extraction_error(self):
+        """Give an error message for problems extracting file(s)"""
+
+        old_exc = sys.exc_info()[1]
+        cache_path = self.extraction_path or get_default_cache()
+
+        tmpl = textwrap.dedent(
+            """
+            Can't extract file(s) to egg cache
+
+            The following error occurred while trying to extract file(s)
+            to the Python egg cache:
+
+              {old_exc}
+
+            The Python egg cache directory is currently set to:
+
+              {cache_path}
+
+            Perhaps your account does not have write access to this directory?
+            You can change the cache directory by setting the PYTHON_EGG_CACHE
+            environment variable to point to an accessible directory.
+            """
+        ).lstrip()
+        err = ExtractionError(tmpl.format(**locals()))
+        err.manager = self
+        err.cache_path = cache_path
+        err.original_error = old_exc
+        raise err
+
+    def get_cache_path(self, archive_name, names=()):
+        """Return absolute location in cache for `archive_name` and `names`
+
+        The parent directory of the resulting path will be created if it does
+        not already exist.  `archive_name` should be the base filename of the
+        enclosing egg (which may not be the name of the enclosing zipfile!),
+        including its ".egg" extension.  `names`, if provided, should be a
+        sequence of path name parts "under" the egg's extraction location.
+
+        This method should only be called by resource providers that need to
+        obtain an extraction location, and only for names they intend to
+        extract, as it tracks the generated names for possible cleanup later.
+        """
+        extract_path = self.extraction_path or get_default_cache()
+        target_path = os.path.join(extract_path, archive_name + '-tmp', *names)
+        try:
+            _bypass_ensure_directory(target_path)
+        except Exception:
+            self.extraction_error()
+
+        self._warn_unsafe_extraction_path(extract_path)
+
+        self.cached_files[target_path] = 1
+        return target_path
+
+    @staticmethod
+    def _warn_unsafe_extraction_path(path):
+        """
+        If the default extraction path is overridden and set to an insecure
+        location, such as /tmp, it opens up an opportunity for an attacker to
+        replace an extracted file with an unauthorized payload. Warn the user
+        if a known insecure location is used.
+
+        See Distribute #375 for more details.
+        """
+        if os.name == 'nt' and not path.startswith(os.environ['windir']):
+            # On Windows, permissions are generally restrictive by default
+            #  and temp directories are not writable by other users, so
+            #  bypass the warning.
+            return
+        mode = os.stat(path).st_mode
+        if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
+            msg = (
+                "Extraction path is writable by group/others "
+                "and vulnerable to attack when "
+                "used with get_resource_filename ({path}). "
+                "Consider a more secure "
+                "location (set with .set_extraction_path or the "
+                "PYTHON_EGG_CACHE environment variable)."
+            ).format(**locals())
+            warnings.warn(msg, UserWarning)
+
+    def postprocess(self, tempname, filename):
+        """Perform any platform-specific postprocessing of `tempname`
+
+        This is where Mac header rewrites should be done; other platforms don't
+        have anything special they should do.
+
+        Resource providers should call this method ONLY after successfully
+        extracting a compressed resource.  They must NOT call it on resources
+        that are already in the filesystem.
+
+        `tempname` is the current (temporary) name of the file, and `filename`
+        is the name it will be renamed to by the caller after this routine
+        returns.
+        """
+
+        if os.name == 'posix':
+            # Make the resource executable
+            mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
+            os.chmod(tempname, mode)
+
+    def set_extraction_path(self, path):
+        """Set the base path where resources will be extracted to, if needed.
+
+        If you do not call this routine before any extractions take place, the
+        path defaults to the return value of ``get_default_cache()``.  (Which
+        is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
+        platform-specific fallbacks.  See that routine's documentation for more
+        details.)
+
+        Resources are extracted to subdirectories of this path based upon
+        information given by the ``IResourceProvider``.  You may set this to a
+        temporary directory, but then you must call ``cleanup_resources()`` to
+        delete the extracted files when done.  There is no guarantee that
+        ``cleanup_resources()`` will be able to remove all extracted files.
+
+        (Note: you may not change the extraction path for a given resource
+        manager once resources have been extracted, unless you first call
+        ``cleanup_resources()``.)
+        """
+        if self.cached_files:
+            raise ValueError("Can't change extraction path, files already extracted")
+
+        self.extraction_path = path
+
+    def cleanup_resources(self, force=False):
+        """
+        Delete all extracted resource files and directories, returning a list
+        of the file and directory names that could not be successfully removed.
+        This function does not have any concurrency protection, so it should
+        generally only be called when the extraction path is a temporary
+        directory exclusive to a single process.  This method is not
+        automatically called; you must call it explicitly or register it as an
+        ``atexit`` function if you wish to ensure cleanup of a temporary
+        directory used for extractions.
+        """
+        # XXX
+
+
+def get_default_cache():
+    """
+    Return the ``PYTHON_EGG_CACHE`` environment variable
+    or a platform-relevant user cache dir for an app
+    named "Python-Eggs".
+    """
+    return os.environ.get('PYTHON_EGG_CACHE') or platformdirs.user_cache_dir(
+        appname='Python-Eggs'
+    )
+
+
+def safe_name(name):
+    """Convert an arbitrary string to a standard distribution name
+
+    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+    """
+    return re.sub('[^A-Za-z0-9.]+', '-', name)
+
+
+def safe_version(version):
+    """
+    Convert an arbitrary string to a standard version string
+    """
+    try:
+        # normalize the version
+        return str(packaging.version.Version(version))
+    except packaging.version.InvalidVersion:
+        version = version.replace(' ', '.')
+        return re.sub('[^A-Za-z0-9.]+', '-', version)
+
+
+def _forgiving_version(version):
+    """Fallback when ``safe_version`` is not safe enough
+    >>> parse_version(_forgiving_version('0.23ubuntu1'))
+    
+    >>> parse_version(_forgiving_version('0.23-'))
+    
+    >>> parse_version(_forgiving_version('0.-_'))
+    
+    >>> parse_version(_forgiving_version('42.+?1'))
+    
+    >>> parse_version(_forgiving_version('hello world'))
+    
+    """
+    version = version.replace(' ', '.')
+    match = _PEP440_FALLBACK.search(version)
+    if match:
+        safe = match["safe"]
+        rest = version[len(safe):]
+    else:
+        safe = "0"
+        rest = version
+    local = f"sanitized.{_safe_segment(rest)}".strip(".")
+    return f"{safe}.dev0+{local}"
+
+
+def _safe_segment(segment):
+    """Convert an arbitrary string into a safe segment"""
+    segment = re.sub('[^A-Za-z0-9.]+', '-', segment)
+    segment = re.sub('-[^A-Za-z0-9]+', '-', segment)
+    return re.sub(r'\.[^A-Za-z0-9]+', '.', segment).strip(".-")
+
+
+def safe_extra(extra):
+    """Convert an arbitrary string to a standard 'extra' name
+
+    Any runs of non-alphanumeric characters are replaced with a single '_',
+    and the result is always lowercased.
+    """
+    return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
+
+
+def to_filename(name):
+    """Convert a project or version name to its filename-escaped form
+
+    Any '-' characters are currently replaced with '_'.
+    """
+    return name.replace('-', '_')
+
+
+def invalid_marker(text):
+    """
+    Validate text as a PEP 508 environment marker; return an exception
+    if invalid or False otherwise.
+    """
+    try:
+        evaluate_marker(text)
+    except SyntaxError as e:
+        e.filename = None
+        e.lineno = None
+        return e
+    return False
+
+
+def evaluate_marker(text, extra=None):
+    """
+    Evaluate a PEP 508 environment marker.
+    Return a boolean indicating the marker result in this environment.
+    Raise SyntaxError if marker is invalid.
+
+    This implementation uses the 'pyparsing' module.
+    """
+    try:
+        marker = packaging.markers.Marker(text)
+        return marker.evaluate()
+    except packaging.markers.InvalidMarker as e:
+        raise SyntaxError(e) from e
+
+
+class NullProvider:
+    """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
+
+    egg_name = None
+    egg_info = None
+    loader = None
+
+    def __init__(self, module):
+        self.loader = getattr(module, '__loader__', None)
+        self.module_path = os.path.dirname(getattr(module, '__file__', ''))
+
+    def get_resource_filename(self, manager, resource_name):
+        return self._fn(self.module_path, resource_name)
+
+    def get_resource_stream(self, manager, resource_name):
+        return io.BytesIO(self.get_resource_string(manager, resource_name))
+
+    def get_resource_string(self, manager, resource_name):
+        return self._get(self._fn(self.module_path, resource_name))
+
+    def has_resource(self, resource_name):
+        return self._has(self._fn(self.module_path, resource_name))
+
+    def _get_metadata_path(self, name):
+        return self._fn(self.egg_info, name)
+
+    def has_metadata(self, name):
+        if not self.egg_info:
+            return self.egg_info
+
+        path = self._get_metadata_path(name)
+        return self._has(path)
+
+    def get_metadata(self, name):
+        if not self.egg_info:
+            return ""
+        path = self._get_metadata_path(name)
+        value = self._get(path)
+        try:
+            return value.decode('utf-8')
+        except UnicodeDecodeError as exc:
+            # Include the path in the error message to simplify
+            # troubleshooting, and without changing the exception type.
+            exc.reason += ' in {} file at path: {}'.format(name, path)
+            raise
+
+    def get_metadata_lines(self, name):
+        return yield_lines(self.get_metadata(name))
+
+    def resource_isdir(self, resource_name):
+        return self._isdir(self._fn(self.module_path, resource_name))
+
+    def metadata_isdir(self, name):
+        return self.egg_info and self._isdir(self._fn(self.egg_info, name))
+
+    def resource_listdir(self, resource_name):
+        return self._listdir(self._fn(self.module_path, resource_name))
+
+    def metadata_listdir(self, name):
+        if self.egg_info:
+            return self._listdir(self._fn(self.egg_info, name))
+        return []
+
+    def run_script(self, script_name, namespace):
+        script = 'scripts/' + script_name
+        if not self.has_metadata(script):
+            raise ResolutionError(
+                "Script {script!r} not found in metadata at {self.egg_info!r}".format(
+                    **locals()
+                ),
+            )
+        script_text = self.get_metadata(script).replace('\r\n', '\n')
+        script_text = script_text.replace('\r', '\n')
+        script_filename = self._fn(self.egg_info, script)
+        namespace['__file__'] = script_filename
+        if os.path.exists(script_filename):
+            with open(script_filename) as fid:
+                source = fid.read()
+            code = compile(source, script_filename, 'exec')
+            exec(code, namespace, namespace)
+        else:
+            from linecache import cache
+
+            cache[script_filename] = (
+                len(script_text),
+                0,
+                script_text.split('\n'),
+                script_filename,
+            )
+            script_code = compile(script_text, script_filename, 'exec')
+            exec(script_code, namespace, namespace)
+
+    def _has(self, path):
+        raise NotImplementedError(
+            "Can't perform this operation for unregistered loader type"
+        )
+
+    def _isdir(self, path):
+        raise NotImplementedError(
+            "Can't perform this operation for unregistered loader type"
+        )
+
+    def _listdir(self, path):
+        raise NotImplementedError(
+            "Can't perform this operation for unregistered loader type"
+        )
+
+    def _fn(self, base, resource_name):
+        self._validate_resource_path(resource_name)
+        if resource_name:
+            return os.path.join(base, *resource_name.split('/'))
+        return base
+
+    @staticmethod
+    def _validate_resource_path(path):
+        """
+        Validate the resource paths according to the docs.
+        https://setuptools.pypa.io/en/latest/pkg_resources.html#basic-resource-access
+
+        >>> warned = getfixture('recwarn')
+        >>> warnings.simplefilter('always')
+        >>> vrp = NullProvider._validate_resource_path
+        >>> vrp('foo/bar.txt')
+        >>> bool(warned)
+        False
+        >>> vrp('../foo/bar.txt')
+        >>> bool(warned)
+        True
+        >>> warned.clear()
+        >>> vrp('/foo/bar.txt')
+        >>> bool(warned)
+        True
+        >>> vrp('foo/../../bar.txt')
+        >>> bool(warned)
+        True
+        >>> warned.clear()
+        >>> vrp('foo/f../bar.txt')
+        >>> bool(warned)
+        False
+
+        Windows path separators are straight-up disallowed.
+        >>> vrp(r'\\foo/bar.txt')
+        Traceback (most recent call last):
+        ...
+        ValueError: Use of .. or absolute path in a resource path \
+is not allowed.
+
+        >>> vrp(r'C:\\foo/bar.txt')
+        Traceback (most recent call last):
+        ...
+        ValueError: Use of .. or absolute path in a resource path \
+is not allowed.
+
+        Blank values are allowed
+
+        >>> vrp('')
+        >>> bool(warned)
+        False
+
+        Non-string values are not.
+
+        >>> vrp(None)
+        Traceback (most recent call last):
+        ...
+        AttributeError: ...
+        """
+        invalid = (
+            os.path.pardir in path.split(posixpath.sep)
+            or posixpath.isabs(path)
+            or ntpath.isabs(path)
+        )
+        if not invalid:
+            return
+
+        msg = "Use of .. or absolute path in a resource path is not allowed."
+
+        # Aggressively disallow Windows absolute paths
+        if ntpath.isabs(path) and not posixpath.isabs(path):
+            raise ValueError(msg)
+
+        # for compatibility, warn; in future
+        # raise ValueError(msg)
+        issue_warning(
+            msg[:-1] + " and will raise exceptions in a future release.",
+            DeprecationWarning,
+        )
+
+    def _get(self, path):
+        if hasattr(self.loader, 'get_data'):
+            return self.loader.get_data(path)
+        raise NotImplementedError(
+            "Can't perform this operation for loaders without 'get_data()'"
+        )
+
+
+register_loader_type(object, NullProvider)
+
+
+def _parents(path):
+    """
+    yield all parents of path including path
+    """
+    last = None
+    while path != last:
+        yield path
+        last = path
+        path, _ = os.path.split(path)
+
+
+class EggProvider(NullProvider):
+    """Provider based on a virtual filesystem"""
+
+    def __init__(self, module):
+        super().__init__(module)
+        self._setup_prefix()
+
+    def _setup_prefix(self):
+        # Assume that metadata may be nested inside a "basket"
+        # of multiple eggs and use module_path instead of .archive.
+        eggs = filter(_is_egg_path, _parents(self.module_path))
+        egg = next(eggs, None)
+        egg and self._set_egg(egg)
+
+    def _set_egg(self, path):
+        self.egg_name = os.path.basename(path)
+        self.egg_info = os.path.join(path, 'EGG-INFO')
+        self.egg_root = path
+
+
+class DefaultProvider(EggProvider):
+    """Provides access to package resources in the filesystem"""
+
+    def _has(self, path):
+        return os.path.exists(path)
+
+    def _isdir(self, path):
+        return os.path.isdir(path)
+
+    def _listdir(self, path):
+        return os.listdir(path)
+
+    def get_resource_stream(self, manager, resource_name):
+        return open(self._fn(self.module_path, resource_name), 'rb')
+
+    def _get(self, path):
+        with open(path, 'rb') as stream:
+            return stream.read()
+
+    @classmethod
+    def _register(cls):
+        loader_names = (
+            'SourceFileLoader',
+            'SourcelessFileLoader',
+        )
+        for name in loader_names:
+            loader_cls = getattr(importlib_machinery, name, type(None))
+            register_loader_type(loader_cls, cls)
+
+
+DefaultProvider._register()
+
+
+class EmptyProvider(NullProvider):
+    """Provider that returns nothing for all requests"""
+
+    module_path = None
+
+    _isdir = _has = lambda self, path: False
+
+    def _get(self, path):
+        return ''
+
+    def _listdir(self, path):
+        return []
+
+    def __init__(self):
+        pass
+
+
+empty_provider = EmptyProvider()
+
+
+class ZipManifests(dict):
+    """
+    zip manifest builder
+    """
+
+    @classmethod
+    def build(cls, path):
+        """
+        Build a dictionary similar to the zipimport directory
+        caches, except instead of tuples, store ZipInfo objects.
+
+        Use a platform-specific path separator (os.sep) for the path keys
+        for compatibility with pypy on Windows.
+        """
+        with zipfile.ZipFile(path) as zfile:
+            items = (
+                (
+                    name.replace('/', os.sep),
+                    zfile.getinfo(name),
+                )
+                for name in zfile.namelist()
+            )
+            return dict(items)
+
+    load = build
+
+
+class MemoizedZipManifests(ZipManifests):
+    """
+    Memoized zipfile manifests.
+    """
+
+    manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
+
+    def load(self, path):
+        """
+        Load a manifest at path or return a suitable manifest already loaded.
+        """
+        path = os.path.normpath(path)
+        mtime = os.stat(path).st_mtime
+
+        if path not in self or self[path].mtime != mtime:
+            manifest = self.build(path)
+            self[path] = self.manifest_mod(manifest, mtime)
+
+        return self[path].manifest
+
+
+class ZipProvider(EggProvider):
+    """Resource support for zips and eggs"""
+
+    eagers = None
+    _zip_manifests = MemoizedZipManifests()
+
+    def __init__(self, module):
+        super().__init__(module)
+        self.zip_pre = self.loader.archive + os.sep
+
+    def _zipinfo_name(self, fspath):
+        # Convert a virtual filename (full path to file) into a zipfile subpath
+        # usable with the zipimport directory cache for our target archive
+        fspath = fspath.rstrip(os.sep)
+        if fspath == self.loader.archive:
+            return ''
+        if fspath.startswith(self.zip_pre):
+            return fspath[len(self.zip_pre) :]
+        raise AssertionError("%s is not a subpath of %s" % (fspath, self.zip_pre))
+
+    def _parts(self, zip_path):
+        # Convert a zipfile subpath into an egg-relative path part list.
+        # pseudo-fs path
+        fspath = self.zip_pre + zip_path
+        if fspath.startswith(self.egg_root + os.sep):
+            return fspath[len(self.egg_root) + 1 :].split(os.sep)
+        raise AssertionError("%s is not a subpath of %s" % (fspath, self.egg_root))
+
+    @property
+    def zipinfo(self):
+        return self._zip_manifests.load(self.loader.archive)
+
+    def get_resource_filename(self, manager, resource_name):
+        if not self.egg_name:
+            raise NotImplementedError(
+                "resource_filename() only supported for .egg, not .zip"
+            )
+        # no need to lock for extraction, since we use temp names
+        zip_path = self._resource_to_zip(resource_name)
+        eagers = self._get_eager_resources()
+        if '/'.join(self._parts(zip_path)) in eagers:
+            for name in eagers:
+                self._extract_resource(manager, self._eager_to_zip(name))
+        return self._extract_resource(manager, zip_path)
+
+    @staticmethod
+    def _get_date_and_size(zip_stat):
+        size = zip_stat.file_size
+        # ymdhms+wday, yday, dst
+        date_time = zip_stat.date_time + (0, 0, -1)
+        # 1980 offset already done
+        timestamp = time.mktime(date_time)
+        return timestamp, size
+
+    # FIXME: 'ZipProvider._extract_resource' is too complex (12)
+    def _extract_resource(self, manager, zip_path):  # noqa: C901
+        if zip_path in self._index():
+            for name in self._index()[zip_path]:
+                last = self._extract_resource(manager, os.path.join(zip_path, name))
+            # return the extracted directory name
+            return os.path.dirname(last)
+
+        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
+
+        if not WRITE_SUPPORT:
+            raise IOError(
+                '"os.rename" and "os.unlink" are not supported ' 'on this platform'
+            )
+        try:
+            real_path = manager.get_cache_path(self.egg_name, self._parts(zip_path))
+
+            if self._is_current(real_path, zip_path):
+                return real_path
+
+            outf, tmpnam = _mkstemp(
+                ".$extract",
+                dir=os.path.dirname(real_path),
+            )
+            os.write(outf, self.loader.get_data(zip_path))
+            os.close(outf)
+            utime(tmpnam, (timestamp, timestamp))
+            manager.postprocess(tmpnam, real_path)
+
+            try:
+                rename(tmpnam, real_path)
+
+            except os.error:
+                if os.path.isfile(real_path):
+                    if self._is_current(real_path, zip_path):
+                        # the file became current since it was checked above,
+                        #  so proceed.
+                        return real_path
+                    # Windows, del old file and retry
+                    elif os.name == 'nt':
+                        unlink(real_path)
+                        rename(tmpnam, real_path)
+                        return real_path
+                raise
+
+        except os.error:
+            # report a user-friendly error
+            manager.extraction_error()
+
+        return real_path
+
+    def _is_current(self, file_path, zip_path):
+        """
+        Return True if the file_path is current for this zip_path
+        """
+        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
+        if not os.path.isfile(file_path):
+            return False
+        stat = os.stat(file_path)
+        if stat.st_size != size or stat.st_mtime != timestamp:
+            return False
+        # check that the contents match
+        zip_contents = self.loader.get_data(zip_path)
+        with open(file_path, 'rb') as f:
+            file_contents = f.read()
+        return zip_contents == file_contents
+
+    def _get_eager_resources(self):
+        if self.eagers is None:
+            eagers = []
+            for name in ('native_libs.txt', 'eager_resources.txt'):
+                if self.has_metadata(name):
+                    eagers.extend(self.get_metadata_lines(name))
+            self.eagers = eagers
+        return self.eagers
+
+    def _index(self):
+        try:
+            return self._dirindex
+        except AttributeError:
+            ind = {}
+            for path in self.zipinfo:
+                parts = path.split(os.sep)
+                while parts:
+                    parent = os.sep.join(parts[:-1])
+                    if parent in ind:
+                        ind[parent].append(parts[-1])
+                        break
+                    else:
+                        ind[parent] = [parts.pop()]
+            self._dirindex = ind
+            return ind
+
+    def _has(self, fspath):
+        zip_path = self._zipinfo_name(fspath)
+        return zip_path in self.zipinfo or zip_path in self._index()
+
+    def _isdir(self, fspath):
+        return self._zipinfo_name(fspath) in self._index()
+
+    def _listdir(self, fspath):
+        return list(self._index().get(self._zipinfo_name(fspath), ()))
+
+    def _eager_to_zip(self, resource_name):
+        return self._zipinfo_name(self._fn(self.egg_root, resource_name))
+
+    def _resource_to_zip(self, resource_name):
+        return self._zipinfo_name(self._fn(self.module_path, resource_name))
+
+
+register_loader_type(zipimport.zipimporter, ZipProvider)
+
+
+class FileMetadata(EmptyProvider):
+    """Metadata handler for standalone PKG-INFO files
+
+    Usage::
+
+        metadata = FileMetadata("/path/to/PKG-INFO")
+
+    This provider rejects all data and metadata requests except for PKG-INFO,
+    which is treated as existing, and will be the contents of the file at
+    the provided location.
+    """
+
+    def __init__(self, path):
+        self.path = path
+
+    def _get_metadata_path(self, name):
+        return self.path
+
+    def has_metadata(self, name):
+        return name == 'PKG-INFO' and os.path.isfile(self.path)
+
+    def get_metadata(self, name):
+        if name != 'PKG-INFO':
+            raise KeyError("No metadata except PKG-INFO is available")
+
+        with io.open(self.path, encoding='utf-8', errors="replace") as f:
+            metadata = f.read()
+        self._warn_on_replacement(metadata)
+        return metadata
+
+    def _warn_on_replacement(self, metadata):
+        replacement_char = '�'
+        if replacement_char in metadata:
+            tmpl = "{self.path} could not be properly decoded in UTF-8"
+            msg = tmpl.format(**locals())
+            warnings.warn(msg)
+
+    def get_metadata_lines(self, name):
+        return yield_lines(self.get_metadata(name))
+
+
+class PathMetadata(DefaultProvider):
+    """Metadata provider for egg directories
+
+    Usage::
+
+        # Development eggs:
+
+        egg_info = "/path/to/PackageName.egg-info"
+        base_dir = os.path.dirname(egg_info)
+        metadata = PathMetadata(base_dir, egg_info)
+        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
+        dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
+
+        # Unpacked egg directories:
+
+        egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
+        metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
+        dist = Distribution.from_filename(egg_path, metadata=metadata)
+    """
+
+    def __init__(self, path, egg_info):
+        self.module_path = path
+        self.egg_info = egg_info
+
+
+class EggMetadata(ZipProvider):
+    """Metadata provider for .egg files"""
+
+    def __init__(self, importer):
+        """Create a metadata provider from a zipimporter"""
+
+        self.zip_pre = importer.archive + os.sep
+        self.loader = importer
+        if importer.prefix:
+            self.module_path = os.path.join(importer.archive, importer.prefix)
+        else:
+            self.module_path = importer.archive
+        self._setup_prefix()
+
+
+_declare_state('dict', _distribution_finders={})
+
+
+def register_finder(importer_type, distribution_finder):
+    """Register `distribution_finder` to find distributions in sys.path items
+
+    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
+    handler), and `distribution_finder` is a callable that, passed a path
+    item and the importer instance, yields ``Distribution`` instances found on
+    that path item.  See ``pkg_resources.find_on_path`` for an example."""
+    _distribution_finders[importer_type] = distribution_finder
+
+
+def find_distributions(path_item, only=False):
+    """Yield distributions accessible via `path_item`"""
+    importer = get_importer(path_item)
+    finder = _find_adapter(_distribution_finders, importer)
+    return finder(importer, path_item, only)
+
+
+def find_eggs_in_zip(importer, path_item, only=False):
+    """
+    Find eggs in zip files; possibly multiple nested eggs.
+    """
+    if importer.archive.endswith('.whl'):
+        # wheels are not supported with this finder
+        # they don't have PKG-INFO metadata, and won't ever contain eggs
+        return
+    metadata = EggMetadata(importer)
+    if metadata.has_metadata('PKG-INFO'):
+        yield Distribution.from_filename(path_item, metadata=metadata)
+    if only:
+        # don't yield nested distros
+        return
+    for subitem in metadata.resource_listdir(''):
+        if _is_egg_path(subitem):
+            subpath = os.path.join(path_item, subitem)
+            dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath)
+            for dist in dists:
+                yield dist
+        elif subitem.lower().endswith(('.dist-info', '.egg-info')):
+            subpath = os.path.join(path_item, subitem)
+            submeta = EggMetadata(zipimport.zipimporter(subpath))
+            submeta.egg_info = subpath
+            yield Distribution.from_location(path_item, subitem, submeta)
+
+
+register_finder(zipimport.zipimporter, find_eggs_in_zip)
+
+
+def find_nothing(importer, path_item, only=False):
+    return ()
+
+
+register_finder(object, find_nothing)
+
+
+def find_on_path(importer, path_item, only=False):
+    """Yield distributions accessible on a sys.path directory"""
+    path_item = _normalize_cached(path_item)
+
+    if _is_unpacked_egg(path_item):
+        yield Distribution.from_filename(
+            path_item,
+            metadata=PathMetadata(path_item, os.path.join(path_item, 'EGG-INFO')),
+        )
+        return
+
+    entries = (os.path.join(path_item, child) for child in safe_listdir(path_item))
+
+    # scan for .egg and .egg-info in directory
+    for entry in sorted(entries):
+        fullpath = os.path.join(path_item, entry)
+        factory = dist_factory(path_item, entry, only)
+        for dist in factory(fullpath):
+            yield dist
+
+
+def dist_factory(path_item, entry, only):
+    """Return a dist_factory for the given entry."""
+    lower = entry.lower()
+    is_egg_info = lower.endswith('.egg-info')
+    is_dist_info = lower.endswith('.dist-info') and os.path.isdir(
+        os.path.join(path_item, entry)
+    )
+    is_meta = is_egg_info or is_dist_info
+    return (
+        distributions_from_metadata
+        if is_meta
+        else find_distributions
+        if not only and _is_egg_path(entry)
+        else resolve_egg_link
+        if not only and lower.endswith('.egg-link')
+        else NoDists()
+    )
+
+
+class NoDists:
+    """
+    >>> bool(NoDists())
+    False
+
+    >>> list(NoDists()('anything'))
+    []
+    """
+
+    def __bool__(self):
+        return False
+
+    def __call__(self, fullpath):
+        return iter(())
+
+
+def safe_listdir(path):
+    """
+    Attempt to list contents of path, but suppress some exceptions.
+    """
+    try:
+        return os.listdir(path)
+    except (PermissionError, NotADirectoryError):
+        pass
+    except OSError as e:
+        # Ignore the directory if does not exist, not a directory or
+        # permission denied
+        if e.errno not in (errno.ENOTDIR, errno.EACCES, errno.ENOENT):
+            raise
+    return ()
+
+
+def distributions_from_metadata(path):
+    root = os.path.dirname(path)
+    if os.path.isdir(path):
+        if len(os.listdir(path)) == 0:
+            # empty metadata dir; skip
+            return
+        metadata = PathMetadata(root, path)
+    else:
+        metadata = FileMetadata(path)
+    entry = os.path.basename(path)
+    yield Distribution.from_location(
+        root,
+        entry,
+        metadata,
+        precedence=DEVELOP_DIST,
+    )
+
+
+def non_empty_lines(path):
+    """
+    Yield non-empty lines from file at path
+    """
+    with open(path) as f:
+        for line in f:
+            line = line.strip()
+            if line:
+                yield line
+
+
+def resolve_egg_link(path):
+    """
+    Given a path to an .egg-link, resolve distributions
+    present in the referenced path.
+    """
+    referenced_paths = non_empty_lines(path)
+    resolved_paths = (
+        os.path.join(os.path.dirname(path), ref) for ref in referenced_paths
+    )
+    dist_groups = map(find_distributions, resolved_paths)
+    return next(dist_groups, ())
+
+
+if hasattr(pkgutil, 'ImpImporter'):
+    register_finder(pkgutil.ImpImporter, find_on_path)
+
+register_finder(importlib_machinery.FileFinder, find_on_path)
+
+_declare_state('dict', _namespace_handlers={})
+_declare_state('dict', _namespace_packages={})
+
+
+def register_namespace_handler(importer_type, namespace_handler):
+    """Register `namespace_handler` to declare namespace packages
+
+    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
+    handler), and `namespace_handler` is a callable like this::
+
+        def namespace_handler(importer, path_entry, moduleName, module):
+            # return a path_entry to use for child packages
+
+    Namespace handlers are only called if the importer object has already
+    agreed that it can handle the relevant path item, and they should only
+    return a subpath if the module __path__ does not already contain an
+    equivalent subpath.  For an example namespace handler, see
+    ``pkg_resources.file_ns_handler``.
+    """
+    _namespace_handlers[importer_type] = namespace_handler
+
+
+def _handle_ns(packageName, path_item):
+    """Ensure that named package includes a subpath of path_item (if needed)"""
+
+    importer = get_importer(path_item)
+    if importer is None:
+        return None
+
+    # use find_spec (PEP 451) and fall-back to find_module (PEP 302)
+    try:
+        spec = importer.find_spec(packageName)
+    except AttributeError:
+        # capture warnings due to #1111
+        with warnings.catch_warnings():
+            warnings.simplefilter("ignore")
+            loader = importer.find_module(packageName)
+    else:
+        loader = spec.loader if spec else None
+
+    if loader is None:
+        return None
+    module = sys.modules.get(packageName)
+    if module is None:
+        module = sys.modules[packageName] = types.ModuleType(packageName)
+        module.__path__ = []
+        _set_parent_ns(packageName)
+    elif not hasattr(module, '__path__'):
+        raise TypeError("Not a package:", packageName)
+    handler = _find_adapter(_namespace_handlers, importer)
+    subpath = handler(importer, path_item, packageName, module)
+    if subpath is not None:
+        path = module.__path__
+        path.append(subpath)
+        importlib.import_module(packageName)
+        _rebuild_mod_path(path, packageName, module)
+    return subpath
+
+
+def _rebuild_mod_path(orig_path, package_name, module):
+    """
+    Rebuild module.__path__ ensuring that all entries are ordered
+    corresponding to their sys.path order
+    """
+    sys_path = [_normalize_cached(p) for p in sys.path]
+
+    def safe_sys_path_index(entry):
+        """
+        Workaround for #520 and #513.
+        """
+        try:
+            return sys_path.index(entry)
+        except ValueError:
+            return float('inf')
+
+    def position_in_sys_path(path):
+        """
+        Return the ordinal of the path based on its position in sys.path
+        """
+        path_parts = path.split(os.sep)
+        module_parts = package_name.count('.') + 1
+        parts = path_parts[:-module_parts]
+        return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))
+
+    new_path = sorted(orig_path, key=position_in_sys_path)
+    new_path = [_normalize_cached(p) for p in new_path]
+
+    if isinstance(module.__path__, list):
+        module.__path__[:] = new_path
+    else:
+        module.__path__ = new_path
+
+
+def declare_namespace(packageName):
+    """Declare that package 'packageName' is a namespace package"""
+
+    msg = (
+        f"Deprecated call to `pkg_resources.declare_namespace({packageName!r})`.\n"
+        "Implementing implicit namespace packages (as specified in PEP 420) "
+        "is preferred to `pkg_resources.declare_namespace`. "
+        "See https://setuptools.pypa.io/en/latest/references/"
+        "keywords.html#keyword-namespace-packages"
+    )
+    warnings.warn(msg, DeprecationWarning, stacklevel=2)
+
+    _imp.acquire_lock()
+    try:
+        if packageName in _namespace_packages:
+            return
+
+        path = sys.path
+        parent, _, _ = packageName.rpartition('.')
+
+        if parent:
+            declare_namespace(parent)
+            if parent not in _namespace_packages:
+                __import__(parent)
+            try:
+                path = sys.modules[parent].__path__
+            except AttributeError as e:
+                raise TypeError("Not a package:", parent) from e
+
+        # Track what packages are namespaces, so when new path items are added,
+        # they can be updated
+        _namespace_packages.setdefault(parent or None, []).append(packageName)
+        _namespace_packages.setdefault(packageName, [])
+
+        for path_item in path:
+            # Ensure all the parent's path items are reflected in the child,
+            # if they apply
+            _handle_ns(packageName, path_item)
+
+    finally:
+        _imp.release_lock()
+
+
+def fixup_namespace_packages(path_item, parent=None):
+    """Ensure that previously-declared namespace packages include path_item"""
+    _imp.acquire_lock()
+    try:
+        for package in _namespace_packages.get(parent, ()):
+            subpath = _handle_ns(package, path_item)
+            if subpath:
+                fixup_namespace_packages(subpath, package)
+    finally:
+        _imp.release_lock()
+
+
+def file_ns_handler(importer, path_item, packageName, module):
+    """Compute an ns-package subpath for a filesystem or zipfile importer"""
+
+    subpath = os.path.join(path_item, packageName.split('.')[-1])
+    normalized = _normalize_cached(subpath)
+    for item in module.__path__:
+        if _normalize_cached(item) == normalized:
+            break
+    else:
+        # Only return the path if it's not already there
+        return subpath
+
+
+if hasattr(pkgutil, 'ImpImporter'):
+    register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
+
+register_namespace_handler(zipimport.zipimporter, file_ns_handler)
+register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)
+
+
+def null_ns_handler(importer, path_item, packageName, module):
+    return None
+
+
+register_namespace_handler(object, null_ns_handler)
+
+
+def normalize_path(filename):
+    """Normalize a file/dir name for comparison purposes"""
+    return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename))))
+
+
+def _cygwin_patch(filename):  # pragma: nocover
+    """
+    Contrary to POSIX 2008, on Cygwin, getcwd (3) contains
+    symlink components. Using
+    os.path.abspath() works around this limitation. A fix in os.getcwd()
+    would probably better, in Cygwin even more so, except
+    that this seems to be by design...
+    """
+    return os.path.abspath(filename) if sys.platform == 'cygwin' else filename
+
+
+def _normalize_cached(filename, _cache={}):
+    try:
+        return _cache[filename]
+    except KeyError:
+        _cache[filename] = result = normalize_path(filename)
+        return result
+
+
+def _is_egg_path(path):
+    """
+    Determine if given path appears to be an egg.
+    """
+    return _is_zip_egg(path) or _is_unpacked_egg(path)
+
+
+def _is_zip_egg(path):
+    return (
+        path.lower().endswith('.egg')
+        and os.path.isfile(path)
+        and zipfile.is_zipfile(path)
+    )
+
+
+def _is_unpacked_egg(path):
+    """
+    Determine if given path appears to be an unpacked egg.
+    """
+    return path.lower().endswith('.egg') and os.path.isfile(
+        os.path.join(path, 'EGG-INFO', 'PKG-INFO')
+    )
+
+
+def _set_parent_ns(packageName):
+    parts = packageName.split('.')
+    name = parts.pop()
+    if parts:
+        parent = '.'.join(parts)
+        setattr(sys.modules[parent], name, sys.modules[packageName])
+
+
+MODULE = re.compile(r"\w+(\.\w+)*$").match
+EGG_NAME = re.compile(
+    r"""
+    (?P[^-]+) (
+        -(?P[^-]+) (
+            -py(?P[^-]+) (
+                -(?P.+)
+            )?
+        )?
+    )?
+    """,
+    re.VERBOSE | re.IGNORECASE,
+).match
+
+
+class EntryPoint:
+    """Object representing an advertised importable object"""
+
+    def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
+        if not MODULE(module_name):
+            raise ValueError("Invalid module name", module_name)
+        self.name = name
+        self.module_name = module_name
+        self.attrs = tuple(attrs)
+        self.extras = tuple(extras)
+        self.dist = dist
+
+    def __str__(self):
+        s = "%s = %s" % (self.name, self.module_name)
+        if self.attrs:
+            s += ':' + '.'.join(self.attrs)
+        if self.extras:
+            s += ' [%s]' % ','.join(self.extras)
+        return s
+
+    def __repr__(self):
+        return "EntryPoint.parse(%r)" % str(self)
+
+    def load(self, require=True, *args, **kwargs):
+        """
+        Require packages for this EntryPoint, then resolve it.
+        """
+        if not require or args or kwargs:
+            warnings.warn(
+                "Parameters to load are deprecated.  Call .resolve and "
+                ".require separately.",
+                PkgResourcesDeprecationWarning,
+                stacklevel=2,
+            )
+        if require:
+            self.require(*args, **kwargs)
+        return self.resolve()
+
+    def resolve(self):
+        """
+        Resolve the entry point from its module and attrs.
+        """
+        module = __import__(self.module_name, fromlist=['__name__'], level=0)
+        try:
+            return functools.reduce(getattr, self.attrs, module)
+        except AttributeError as exc:
+            raise ImportError(str(exc)) from exc
+
+    def require(self, env=None, installer=None):
+        if self.extras and not self.dist:
+            raise UnknownExtra("Can't require() without a distribution", self)
+
+        # Get the requirements for this entry point with all its extras and
+        # then resolve them. We have to pass `extras` along when resolving so
+        # that the working set knows what extras we want. Otherwise, for
+        # dist-info distributions, the working set will assume that the
+        # requirements for that extra are purely optional and skip over them.
+        reqs = self.dist.requires(self.extras)
+        items = working_set.resolve(reqs, env, installer, extras=self.extras)
+        list(map(working_set.add, items))
+
+    pattern = re.compile(
+        r'\s*'
+        r'(?P.+?)\s*'
+        r'=\s*'
+        r'(?P[\w.]+)\s*'
+        r'(:\s*(?P[\w.]+))?\s*'
+        r'(?P\[.*\])?\s*$'
+    )
+
+    @classmethod
+    def parse(cls, src, dist=None):
+        """Parse a single entry point from string `src`
+
+        Entry point syntax follows the form::
+
+            name = some.module:some.attr [extra1, extra2]
+
+        The entry name and module name are required, but the ``:attrs`` and
+        ``[extras]`` parts are optional
+        """
+        m = cls.pattern.match(src)
+        if not m:
+            msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
+            raise ValueError(msg, src)
+        res = m.groupdict()
+        extras = cls._parse_extras(res['extras'])
+        attrs = res['attr'].split('.') if res['attr'] else ()
+        return cls(res['name'], res['module'], attrs, extras, dist)
+
+    @classmethod
+    def _parse_extras(cls, extras_spec):
+        if not extras_spec:
+            return ()
+        req = Requirement.parse('x' + extras_spec)
+        if req.specs:
+            raise ValueError()
+        return req.extras
+
+    @classmethod
+    def parse_group(cls, group, lines, dist=None):
+        """Parse an entry point group"""
+        if not MODULE(group):
+            raise ValueError("Invalid group name", group)
+        this = {}
+        for line in yield_lines(lines):
+            ep = cls.parse(line, dist)
+            if ep.name in this:
+                raise ValueError("Duplicate entry point", group, ep.name)
+            this[ep.name] = ep
+        return this
+
+    @classmethod
+    def parse_map(cls, data, dist=None):
+        """Parse a map of entry point groups"""
+        if isinstance(data, dict):
+            data = data.items()
+        else:
+            data = split_sections(data)
+        maps = {}
+        for group, lines in data:
+            if group is None:
+                if not lines:
+                    continue
+                raise ValueError("Entry points must be listed in groups")
+            group = group.strip()
+            if group in maps:
+                raise ValueError("Duplicate group name", group)
+            maps[group] = cls.parse_group(group, lines, dist)
+        return maps
+
+
+def _version_from_file(lines):
+    """
+    Given an iterable of lines from a Metadata file, return
+    the value of the Version field, if present, or None otherwise.
+    """
+
+    def is_version_line(line):
+        return line.lower().startswith('version:')
+
+    version_lines = filter(is_version_line, lines)
+    line = next(iter(version_lines), '')
+    _, _, value = line.partition(':')
+    return safe_version(value.strip()) or None
+
+
+class Distribution:
+    """Wrap an actual or potential sys.path entry w/metadata"""
+
+    PKG_INFO = 'PKG-INFO'
+
+    def __init__(
+        self,
+        location=None,
+        metadata=None,
+        project_name=None,
+        version=None,
+        py_version=PY_MAJOR,
+        platform=None,
+        precedence=EGG_DIST,
+    ):
+        self.project_name = safe_name(project_name or 'Unknown')
+        if version is not None:
+            self._version = safe_version(version)
+        self.py_version = py_version
+        self.platform = platform
+        self.location = location
+        self.precedence = precedence
+        self._provider = metadata or empty_provider
+
+    @classmethod
+    def from_location(cls, location, basename, metadata=None, **kw):
+        project_name, version, py_version, platform = [None] * 4
+        basename, ext = os.path.splitext(basename)
+        if ext.lower() in _distributionImpl:
+            cls = _distributionImpl[ext.lower()]
+
+            match = EGG_NAME(basename)
+            if match:
+                project_name, version, py_version, platform = match.group(
+                    'name', 'ver', 'pyver', 'plat'
+                )
+        return cls(
+            location,
+            metadata,
+            project_name=project_name,
+            version=version,
+            py_version=py_version,
+            platform=platform,
+            **kw,
+        )._reload_version()
+
+    def _reload_version(self):
+        return self
+
+    @property
+    def hashcmp(self):
+        return (
+            self._forgiving_parsed_version,
+            self.precedence,
+            self.key,
+            self.location,
+            self.py_version or '',
+            self.platform or '',
+        )
+
+    def __hash__(self):
+        return hash(self.hashcmp)
+
+    def __lt__(self, other):
+        return self.hashcmp < other.hashcmp
+
+    def __le__(self, other):
+        return self.hashcmp <= other.hashcmp
+
+    def __gt__(self, other):
+        return self.hashcmp > other.hashcmp
+
+    def __ge__(self, other):
+        return self.hashcmp >= other.hashcmp
+
+    def __eq__(self, other):
+        if not isinstance(other, self.__class__):
+            # It's not a Distribution, so they are not equal
+            return False
+        return self.hashcmp == other.hashcmp
+
+    def __ne__(self, other):
+        return not self == other
+
+    # These properties have to be lazy so that we don't have to load any
+    # metadata until/unless it's actually needed.  (i.e., some distributions
+    # may not know their name or version without loading PKG-INFO)
+
+    @property
+    def key(self):
+        try:
+            return self._key
+        except AttributeError:
+            self._key = key = self.project_name.lower()
+            return key
+
+    @property
+    def parsed_version(self):
+        if not hasattr(self, "_parsed_version"):
+            try:
+                self._parsed_version = parse_version(self.version)
+            except packaging.version.InvalidVersion as ex:
+                info = f"(package: {self.project_name})"
+                if hasattr(ex, "add_note"):
+                    ex.add_note(info)  # PEP 678
+                    raise
+                raise packaging.version.InvalidVersion(f"{str(ex)} {info}") from None
+
+        return self._parsed_version
+
+    @property
+    def _forgiving_parsed_version(self):
+        try:
+            return self.parsed_version
+        except packaging.version.InvalidVersion as ex:
+            self._parsed_version = parse_version(_forgiving_version(self.version))
+
+            notes = "\n".join(getattr(ex, "__notes__", []))  # PEP 678
+            msg = f"""!!\n\n
+            *************************************************************************
+            {str(ex)}\n{notes}
+
+            This is a long overdue deprecation.
+            For the time being, `pkg_resources` will use `{self._parsed_version}`
+            as a replacement to avoid breaking existing environments,
+            but no future compatibility is guaranteed.
+
+            If you maintain package {self.project_name} you should implement
+            the relevant changes to adequate the project to PEP 440 immediately.
+            *************************************************************************
+            \n\n!!
+            """
+            warnings.warn(msg, DeprecationWarning)
+
+            return self._parsed_version
+
+    @property
+    def version(self):
+        try:
+            return self._version
+        except AttributeError as e:
+            version = self._get_version()
+            if version is None:
+                path = self._get_metadata_path_for_display(self.PKG_INFO)
+                msg = ("Missing 'Version:' header and/or {} file at path: {}").format(
+                    self.PKG_INFO, path
+                )
+                raise ValueError(msg, self) from e
+
+            return version
+
+    @property
+    def _dep_map(self):
+        """
+        A map of extra to its list of (direct) requirements
+        for this distribution, including the null extra.
+        """
+        try:
+            return self.__dep_map
+        except AttributeError:
+            self.__dep_map = self._filter_extras(self._build_dep_map())
+        return self.__dep_map
+
+    @staticmethod
+    def _filter_extras(dm):
+        """
+        Given a mapping of extras to dependencies, strip off
+        environment markers and filter out any dependencies
+        not matching the markers.
+        """
+        for extra in list(filter(None, dm)):
+            new_extra = extra
+            reqs = dm.pop(extra)
+            new_extra, _, marker = extra.partition(':')
+            fails_marker = marker and (
+                invalid_marker(marker) or not evaluate_marker(marker)
+            )
+            if fails_marker:
+                reqs = []
+            new_extra = safe_extra(new_extra) or None
+
+            dm.setdefault(new_extra, []).extend(reqs)
+        return dm
+
+    def _build_dep_map(self):
+        dm = {}
+        for name in 'requires.txt', 'depends.txt':
+            for extra, reqs in split_sections(self._get_metadata(name)):
+                dm.setdefault(extra, []).extend(parse_requirements(reqs))
+        return dm
+
+    def requires(self, extras=()):
+        """List of Requirements needed for this distro if `extras` are used"""
+        dm = self._dep_map
+        deps = []
+        deps.extend(dm.get(None, ()))
+        for ext in extras:
+            try:
+                deps.extend(dm[safe_extra(ext)])
+            except KeyError as e:
+                raise UnknownExtra(
+                    "%s has no such extra feature %r" % (self, ext)
+                ) from e
+        return deps
+
+    def _get_metadata_path_for_display(self, name):
+        """
+        Return the path to the given metadata file, if available.
+        """
+        try:
+            # We need to access _get_metadata_path() on the provider object
+            # directly rather than through this class's __getattr__()
+            # since _get_metadata_path() is marked private.
+            path = self._provider._get_metadata_path(name)
+
+        # Handle exceptions e.g. in case the distribution's metadata
+        # provider doesn't support _get_metadata_path().
+        except Exception:
+            return '[could not detect]'
+
+        return path
+
+    def _get_metadata(self, name):
+        if self.has_metadata(name):
+            for line in self.get_metadata_lines(name):
+                yield line
+
+    def _get_version(self):
+        lines = self._get_metadata(self.PKG_INFO)
+        version = _version_from_file(lines)
+
+        return version
+
+    def activate(self, path=None, replace=False):
+        """Ensure distribution is importable on `path` (default=sys.path)"""
+        if path is None:
+            path = sys.path
+        self.insert_on(path, replace=replace)
+        if path is sys.path:
+            fixup_namespace_packages(self.location)
+            for pkg in self._get_metadata('namespace_packages.txt'):
+                if pkg in sys.modules:
+                    declare_namespace(pkg)
+
+    def egg_name(self):
+        """Return what this distribution's standard .egg filename should be"""
+        filename = "%s-%s-py%s" % (
+            to_filename(self.project_name),
+            to_filename(self.version),
+            self.py_version or PY_MAJOR,
+        )
+
+        if self.platform:
+            filename += '-' + self.platform
+        return filename
+
+    def __repr__(self):
+        if self.location:
+            return "%s (%s)" % (self, self.location)
+        else:
+            return str(self)
+
+    def __str__(self):
+        try:
+            version = getattr(self, 'version', None)
+        except ValueError:
+            version = None
+        version = version or "[unknown version]"
+        return "%s %s" % (self.project_name, version)
+
+    def __getattr__(self, attr):
+        """Delegate all unrecognized public attributes to .metadata provider"""
+        if attr.startswith('_'):
+            raise AttributeError(attr)
+        return getattr(self._provider, attr)
+
+    def __dir__(self):
+        return list(
+            set(super(Distribution, self).__dir__())
+            | set(attr for attr in self._provider.__dir__() if not attr.startswith('_'))
+        )
+
+    @classmethod
+    def from_filename(cls, filename, metadata=None, **kw):
+        return cls.from_location(
+            _normalize_cached(filename), os.path.basename(filename), metadata, **kw
+        )
+
+    def as_requirement(self):
+        """Return a ``Requirement`` that matches this distribution exactly"""
+        if isinstance(self.parsed_version, packaging.version.Version):
+            spec = "%s==%s" % (self.project_name, self.parsed_version)
+        else:
+            spec = "%s===%s" % (self.project_name, self.parsed_version)
+
+        return Requirement.parse(spec)
+
+    def load_entry_point(self, group, name):
+        """Return the `name` entry point of `group` or raise ImportError"""
+        ep = self.get_entry_info(group, name)
+        if ep is None:
+            raise ImportError("Entry point %r not found" % ((group, name),))
+        return ep.load()
+
+    def get_entry_map(self, group=None):
+        """Return the entry point map for `group`, or the full entry map"""
+        try:
+            ep_map = self._ep_map
+        except AttributeError:
+            ep_map = self._ep_map = EntryPoint.parse_map(
+                self._get_metadata('entry_points.txt'), self
+            )
+        if group is not None:
+            return ep_map.get(group, {})
+        return ep_map
+
+    def get_entry_info(self, group, name):
+        """Return the EntryPoint object for `group`+`name`, or ``None``"""
+        return self.get_entry_map(group).get(name)
+
+    # FIXME: 'Distribution.insert_on' is too complex (13)
+    def insert_on(self, path, loc=None, replace=False):  # noqa: C901
+        """Ensure self.location is on path
+
+        If replace=False (default):
+            - If location is already in path anywhere, do nothing.
+            - Else:
+              - If it's an egg and its parent directory is on path,
+                insert just ahead of the parent.
+              - Else: add to the end of path.
+        If replace=True:
+            - If location is already on path anywhere (not eggs)
+              or higher priority than its parent (eggs)
+              do nothing.
+            - Else:
+              - If it's an egg and its parent directory is on path,
+                insert just ahead of the parent,
+                removing any lower-priority entries.
+              - Else: add it to the front of path.
+        """
+
+        loc = loc or self.location
+        if not loc:
+            return
+
+        nloc = _normalize_cached(loc)
+        bdir = os.path.dirname(nloc)
+        npath = [(p and _normalize_cached(p) or p) for p in path]
+
+        for p, item in enumerate(npath):
+            if item == nloc:
+                if replace:
+                    break
+                else:
+                    # don't modify path (even removing duplicates) if
+                    # found and not replace
+                    return
+            elif item == bdir and self.precedence == EGG_DIST:
+                # if it's an .egg, give it precedence over its directory
+                # UNLESS it's already been added to sys.path and replace=False
+                if (not replace) and nloc in npath[p:]:
+                    return
+                if path is sys.path:
+                    self.check_version_conflict()
+                path.insert(p, loc)
+                npath.insert(p, nloc)
+                break
+        else:
+            if path is sys.path:
+                self.check_version_conflict()
+            if replace:
+                path.insert(0, loc)
+            else:
+                path.append(loc)
+            return
+
+        # p is the spot where we found or inserted loc; now remove duplicates
+        while True:
+            try:
+                np = npath.index(nloc, p + 1)
+            except ValueError:
+                break
+            else:
+                del npath[np], path[np]
+                # ha!
+                p = np
+
+        return
+
+    def check_version_conflict(self):
+        if self.key == 'setuptools':
+            # ignore the inevitable setuptools self-conflicts  :(
+            return
+
+        nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
+        loc = normalize_path(self.location)
+        for modname in self._get_metadata('top_level.txt'):
+            if (
+                modname not in sys.modules
+                or modname in nsp
+                or modname in _namespace_packages
+            ):
+                continue
+            if modname in ('pkg_resources', 'setuptools', 'site'):
+                continue
+            fn = getattr(sys.modules[modname], '__file__', None)
+            if fn and (
+                normalize_path(fn).startswith(loc) or fn.startswith(self.location)
+            ):
+                continue
+            issue_warning(
+                "Module %s was already imported from %s, but %s is being added"
+                " to sys.path" % (modname, fn, self.location),
+            )
+
+    def has_version(self):
+        try:
+            self.version
+        except ValueError:
+            issue_warning("Unbuilt egg for " + repr(self))
+            return False
+        except SystemError:
+            # TODO: remove this except clause when python/cpython#103632 is fixed.
+            return False
+        return True
+
+    def clone(self, **kw):
+        """Copy this distribution, substituting in any changed keyword args"""
+        names = 'project_name version py_version platform location precedence'
+        for attr in names.split():
+            kw.setdefault(attr, getattr(self, attr, None))
+        kw.setdefault('metadata', self._provider)
+        return self.__class__(**kw)
+
+    @property
+    def extras(self):
+        return [dep for dep in self._dep_map if dep]
+
+
+class EggInfoDistribution(Distribution):
+    def _reload_version(self):
+        """
+        Packages installed by distutils (e.g. numpy or scipy),
+        which uses an old safe_version, and so
+        their version numbers can get mangled when
+        converted to filenames (e.g., 1.11.0.dev0+2329eae to
+        1.11.0.dev0_2329eae). These distributions will not be
+        parsed properly
+        downstream by Distribution and safe_version, so
+        take an extra step and try to get the version number from
+        the metadata file itself instead of the filename.
+        """
+        md_version = self._get_version()
+        if md_version:
+            self._version = md_version
+        return self
+
+
+class DistInfoDistribution(Distribution):
+    """
+    Wrap an actual or potential sys.path entry
+    w/metadata, .dist-info style.
+    """
+
+    PKG_INFO = 'METADATA'
+    EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
+
+    @property
+    def _parsed_pkg_info(self):
+        """Parse and cache metadata"""
+        try:
+            return self._pkg_info
+        except AttributeError:
+            metadata = self.get_metadata(self.PKG_INFO)
+            self._pkg_info = email.parser.Parser().parsestr(metadata)
+            return self._pkg_info
+
+    @property
+    def _dep_map(self):
+        try:
+            return self.__dep_map
+        except AttributeError:
+            self.__dep_map = self._compute_dependencies()
+            return self.__dep_map
+
+    def _compute_dependencies(self):
+        """Recompute this distribution's dependencies."""
+        dm = self.__dep_map = {None: []}
+
+        reqs = []
+        # Including any condition expressions
+        for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
+            reqs.extend(parse_requirements(req))
+
+        def reqs_for_extra(extra):
+            for req in reqs:
+                if not req.marker or req.marker.evaluate({'extra': extra}):
+                    yield req
+
+        common = types.MappingProxyType(dict.fromkeys(reqs_for_extra(None)))
+        dm[None].extend(common)
+
+        for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
+            s_extra = safe_extra(extra.strip())
+            dm[s_extra] = [r for r in reqs_for_extra(extra) if r not in common]
+
+        return dm
+
+
+_distributionImpl = {
+    '.egg': Distribution,
+    '.egg-info': EggInfoDistribution,
+    '.dist-info': DistInfoDistribution,
+}
+
+
+def issue_warning(*args, **kw):
+    level = 1
+    g = globals()
+    try:
+        # find the first stack frame that is *not* code in
+        # the pkg_resources module, to use for the warning
+        while sys._getframe(level).f_globals is g:
+            level += 1
+    except ValueError:
+        pass
+    warnings.warn(stacklevel=level + 1, *args, **kw)
+
+
+def parse_requirements(strs):
+    """
+    Yield ``Requirement`` objects for each specification in `strs`.
+
+    `strs` must be a string, or a (possibly-nested) iterable thereof.
+    """
+    return map(Requirement, join_continuation(map(drop_comment, yield_lines(strs))))
+
+
+class RequirementParseError(packaging.requirements.InvalidRequirement):
+    "Compatibility wrapper for InvalidRequirement"
+
+
+class Requirement(packaging.requirements.Requirement):
+    def __init__(self, requirement_string):
+        """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
+        super(Requirement, self).__init__(requirement_string)
+        self.unsafe_name = self.name
+        project_name = safe_name(self.name)
+        self.project_name, self.key = project_name, project_name.lower()
+        self.specs = [(spec.operator, spec.version) for spec in self.specifier]
+        self.extras = tuple(map(safe_extra, self.extras))
+        self.hashCmp = (
+            self.key,
+            self.url,
+            self.specifier,
+            frozenset(self.extras),
+            str(self.marker) if self.marker else None,
+        )
+        self.__hash = hash(self.hashCmp)
+
+    def __eq__(self, other):
+        return isinstance(other, Requirement) and self.hashCmp == other.hashCmp
+
+    def __ne__(self, other):
+        return not self == other
+
+    def __contains__(self, item):
+        if isinstance(item, Distribution):
+            if item.key != self.key:
+                return False
+
+            item = item.version
+
+        # Allow prereleases always in order to match the previous behavior of
+        # this method. In the future this should be smarter and follow PEP 440
+        # more accurately.
+        return self.specifier.contains(item, prereleases=True)
+
+    def __hash__(self):
+        return self.__hash
+
+    def __repr__(self):
+        return "Requirement.parse(%r)" % str(self)
+
+    @staticmethod
+    def parse(s):
+        (req,) = parse_requirements(s)
+        return req
+
+
+def _always_object(classes):
+    """
+    Ensure object appears in the mro even
+    for old-style classes.
+    """
+    if object not in classes:
+        return classes + (object,)
+    return classes
+
+
+def _find_adapter(registry, ob):
+    """Return an adapter factory for `ob` from `registry`"""
+    types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob))))
+    for t in types:
+        if t in registry:
+            return registry[t]
+
+
+def ensure_directory(path):
+    """Ensure that the parent directory of `path` exists"""
+    dirname = os.path.dirname(path)
+    os.makedirs(dirname, exist_ok=True)
+
+
+def _bypass_ensure_directory(path):
+    """Sandbox-bypassing version of ensure_directory()"""
+    if not WRITE_SUPPORT:
+        raise IOError('"os.mkdir" not supported on this platform.')
+    dirname, filename = split(path)
+    if dirname and filename and not isdir(dirname):
+        _bypass_ensure_directory(dirname)
+        try:
+            mkdir(dirname, 0o755)
+        except FileExistsError:
+            pass
+
+
+def split_sections(s):
+    """Split a string or iterable thereof into (section, content) pairs
+
+    Each ``section`` is a stripped version of the section header ("[section]")
+    and each ``content`` is a list of stripped lines excluding blank lines and
+    comment-only lines.  If there are any such lines before the first section
+    header, they're returned in a first ``section`` of ``None``.
+    """
+    section = None
+    content = []
+    for line in yield_lines(s):
+        if line.startswith("["):
+            if line.endswith("]"):
+                if section or content:
+                    yield section, content
+                section = line[1:-1].strip()
+                content = []
+            else:
+                raise ValueError("Invalid section heading", line)
+        else:
+            content.append(line)
+
+    # wrap up last segment
+    yield section, content
+
+
+def _mkstemp(*args, **kw):
+    old_open = os.open
+    try:
+        # temporarily bypass sandboxing
+        os.open = os_open
+        return tempfile.mkstemp(*args, **kw)
+    finally:
+        # and then put it back
+        os.open = old_open
+
+
+# Silence the PEP440Warning by default, so that end users don't get hit by it
+# randomly just because they use pkg_resources. We want to append the rule
+# because we want earlier uses of filterwarnings to take precedence over this
+# one.
+warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
+
+
+# from jaraco.functools 1.3
+def _call_aside(f, *args, **kwargs):
+    f(*args, **kwargs)
+    return f
+
+
+@_call_aside
+def _initialize(g=globals()):
+    "Set up global resource manager (deliberately not state-saved)"
+    manager = ResourceManager()
+    g['_manager'] = manager
+    g.update(
+        (name, getattr(manager, name))
+        for name in dir(manager)
+        if not name.startswith('_')
+    )
+
+
+class PkgResourcesDeprecationWarning(Warning):
+    """
+    Base class for warning about deprecations in ``pkg_resources``
+
+    This class is not derived from ``DeprecationWarning``, and as such is
+    visible by default.
+    """
+
+
+@_call_aside
+def _initialize_master_working_set():
+    """
+    Prepare the master working set and make the ``require()``
+    API available.
+
+    This function has explicit effects on the global state
+    of pkg_resources. It is intended to be invoked once at
+    the initialization of this module.
+
+    Invocation by other packages is unsupported and done
+    at their own risk.
+    """
+    working_set = WorkingSet._build_master()
+    _declare_state('object', working_set=working_set)
+
+    require = working_set.require
+    iter_entry_points = working_set.iter_entry_points
+    add_activation_listener = working_set.subscribe
+    run_script = working_set.run_script
+    # backward compatibility
+    run_main = run_script
+    # Activate all distributions already on sys.path with replace=False and
+    # ensure that all distributions added to the working set in the future
+    # (e.g. by calling ``require()``) will get activated as well,
+    # with higher priority (replace=True).
+    tuple(dist.activate(replace=False) for dist in working_set)
+    add_activation_listener(
+        lambda dist: dist.activate(replace=True),
+        existing=False,
+    )
+    working_set.entries = []
+    # match order
+    list(map(working_set.add_entry, sys.path))
+    globals().update(locals())
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__init__.py
new file mode 100644
index 0000000..5ebf595
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__init__.py
@@ -0,0 +1,566 @@
+"""
+Utilities for determining application-specific dirs. See  for details and
+usage.
+"""
+from __future__ import annotations
+
+import os
+import sys
+from typing import TYPE_CHECKING
+
+from .api import PlatformDirsABC
+from .version import __version__
+from .version import __version_tuple__ as __version_info__
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    if sys.version_info >= (3, 8):  # pragma: no cover (py38+)
+        from typing import Literal
+    else:  # pragma: no cover (py38+)
+        from pip._vendor.typing_extensions import Literal
+
+
+def _set_platform_dir_class() -> type[PlatformDirsABC]:
+    if sys.platform == "win32":
+        from pip._vendor.platformdirs.windows import Windows as Result
+    elif sys.platform == "darwin":
+        from pip._vendor.platformdirs.macos import MacOS as Result
+    else:
+        from pip._vendor.platformdirs.unix import Unix as Result
+
+    if os.getenv("ANDROID_DATA") == "/data" and os.getenv("ANDROID_ROOT") == "/system":
+        if os.getenv("SHELL") or os.getenv("PREFIX"):
+            return Result
+
+        from pip._vendor.platformdirs.android import _android_folder
+
+        if _android_folder() is not None:
+            from pip._vendor.platformdirs.android import Android
+
+            return Android  # return to avoid redefinition of result
+
+    return Result
+
+
+PlatformDirs = _set_platform_dir_class()  #: Currently active platform
+AppDirs = PlatformDirs  #: Backwards compatibility with appdirs
+
+
+def user_data_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: data directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_data_dir
+
+
+def site_data_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    multipath: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param multipath: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: data directory shared by users
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        multipath=multipath,
+        ensure_exists=ensure_exists,
+    ).site_data_dir
+
+
+def user_config_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: config directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_config_dir
+
+
+def site_config_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    multipath: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param multipath: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: config directory shared by the users
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        multipath=multipath,
+        ensure_exists=ensure_exists,
+    ).site_config_dir
+
+
+def user_cache_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: cache directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_cache_dir
+
+
+def site_cache_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `opinion `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: cache directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).site_cache_dir
+
+
+def user_state_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: state directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_state_dir
+
+
+def user_log_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: log directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_log_dir
+
+
+def user_documents_dir() -> str:
+    """:returns: documents directory tied to the user"""
+    return PlatformDirs().user_documents_dir
+
+
+def user_downloads_dir() -> str:
+    """:returns: downloads directory tied to the user"""
+    return PlatformDirs().user_downloads_dir
+
+
+def user_pictures_dir() -> str:
+    """:returns: pictures directory tied to the user"""
+    return PlatformDirs().user_pictures_dir
+
+
+def user_videos_dir() -> str:
+    """:returns: videos directory tied to the user"""
+    return PlatformDirs().user_videos_dir
+
+
+def user_music_dir() -> str:
+    """:returns: music directory tied to the user"""
+    return PlatformDirs().user_music_dir
+
+
+def user_runtime_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `opinion `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: runtime directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_runtime_dir
+
+
+def user_data_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: data path tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_data_path
+
+
+def site_data_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    multipath: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param multipath: See `multipath `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: data path shared by users
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        multipath=multipath,
+        ensure_exists=ensure_exists,
+    ).site_data_path
+
+
+def user_config_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: config path tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_config_path
+
+
+def site_config_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    multipath: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param multipath: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: config path shared by the users
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        multipath=multipath,
+        ensure_exists=ensure_exists,
+    ).site_config_path
+
+
+def site_cache_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `opinion `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: cache directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).site_cache_path
+
+
+def user_cache_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: cache path tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_cache_path
+
+
+def user_state_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: state path tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_state_path
+
+
+def user_log_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: log path tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_log_path
+
+
+def user_documents_path() -> Path:
+    """:returns: documents path tied to the user"""
+    return PlatformDirs().user_documents_path
+
+
+def user_downloads_path() -> Path:
+    """:returns: downloads path tied to the user"""
+    return PlatformDirs().user_downloads_path
+
+
+def user_pictures_path() -> Path:
+    """:returns: pictures path tied to the user"""
+    return PlatformDirs().user_pictures_path
+
+
+def user_videos_path() -> Path:
+    """:returns: videos path tied to the user"""
+    return PlatformDirs().user_videos_path
+
+
+def user_music_path() -> Path:
+    """:returns: music path tied to the user"""
+    return PlatformDirs().user_music_path
+
+
+def user_runtime_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `opinion `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: runtime path tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_runtime_path
+
+
+__all__ = [
+    "__version__",
+    "__version_info__",
+    "PlatformDirs",
+    "AppDirs",
+    "PlatformDirsABC",
+    "user_data_dir",
+    "user_config_dir",
+    "user_cache_dir",
+    "user_state_dir",
+    "user_log_dir",
+    "user_documents_dir",
+    "user_downloads_dir",
+    "user_pictures_dir",
+    "user_videos_dir",
+    "user_music_dir",
+    "user_runtime_dir",
+    "site_data_dir",
+    "site_config_dir",
+    "site_cache_dir",
+    "user_data_path",
+    "user_config_path",
+    "user_cache_path",
+    "user_state_path",
+    "user_log_path",
+    "user_documents_path",
+    "user_downloads_path",
+    "user_pictures_path",
+    "user_videos_path",
+    "user_music_path",
+    "user_runtime_path",
+    "site_data_path",
+    "site_config_path",
+    "site_cache_path",
+]
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__main__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__main__.py
new file mode 100644
index 0000000..6a0d6dd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__main__.py
@@ -0,0 +1,53 @@
+"""Main entry point."""
+from __future__ import annotations
+
+from pip._vendor.platformdirs import PlatformDirs, __version__
+
+PROPS = (
+    "user_data_dir",
+    "user_config_dir",
+    "user_cache_dir",
+    "user_state_dir",
+    "user_log_dir",
+    "user_documents_dir",
+    "user_downloads_dir",
+    "user_pictures_dir",
+    "user_videos_dir",
+    "user_music_dir",
+    "user_runtime_dir",
+    "site_data_dir",
+    "site_config_dir",
+    "site_cache_dir",
+)
+
+
+def main() -> None:
+    """Run main entry point."""
+    app_name = "MyApp"
+    app_author = "MyCompany"
+
+    print(f"-- platformdirs {__version__} --")  # noqa: T201
+
+    print("-- app dirs (with optional 'version')")  # noqa: T201
+    dirs = PlatformDirs(app_name, app_author, version="1.0")
+    for prop in PROPS:
+        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
+
+    print("\n-- app dirs (without optional 'version')")  # noqa: T201
+    dirs = PlatformDirs(app_name, app_author)
+    for prop in PROPS:
+        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
+
+    print("\n-- app dirs (without optional 'appauthor')")  # noqa: T201
+    dirs = PlatformDirs(app_name)
+    for prop in PROPS:
+        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
+
+    print("\n-- app dirs (with disabled 'appauthor')")  # noqa: T201
+    dirs = PlatformDirs(app_name, appauthor=False)
+    for prop in PROPS:
+        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
+
+
+if __name__ == "__main__":
+    main()
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/android.py b/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/android.py
new file mode 100644
index 0000000..76527dd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/android.py
@@ -0,0 +1,210 @@
+"""Android."""
+from __future__ import annotations
+
+import os
+import re
+import sys
+from functools import lru_cache
+from typing import cast
+
+from .api import PlatformDirsABC
+
+
+class Android(PlatformDirsABC):
+    """
+    Follows the guidance `from here `_. Makes use of the
+    `appname `,
+    `version `,
+    `ensure_exists `.
+    """
+
+    @property
+    def user_data_dir(self) -> str:
+        """:return: data directory tied to the user, e.g. ``/data/user///files/``"""
+        return self._append_app_name_and_version(cast(str, _android_folder()), "files")
+
+    @property
+    def site_data_dir(self) -> str:
+        """:return: data directory shared by users, same as `user_data_dir`"""
+        return self.user_data_dir
+
+    @property
+    def user_config_dir(self) -> str:
+        """
+        :return: config directory tied to the user, e.g. \
+        ``/data/user///shared_prefs/``
+        """
+        return self._append_app_name_and_version(cast(str, _android_folder()), "shared_prefs")
+
+    @property
+    def site_config_dir(self) -> str:
+        """:return: config directory shared by the users, same as `user_config_dir`"""
+        return self.user_config_dir
+
+    @property
+    def user_cache_dir(self) -> str:
+        """:return: cache directory tied to the user, e.g. e.g. ``/data/user///cache/``"""
+        return self._append_app_name_and_version(cast(str, _android_folder()), "cache")
+
+    @property
+    def site_cache_dir(self) -> str:
+        """:return: cache directory shared by users, same as `user_cache_dir`"""
+        return self.user_cache_dir
+
+    @property
+    def user_state_dir(self) -> str:
+        """:return: state directory tied to the user, same as `user_data_dir`"""
+        return self.user_data_dir
+
+    @property
+    def user_log_dir(self) -> str:
+        """
+        :return: log directory tied to the user, same as `user_cache_dir` if not opinionated else ``log`` in it,
+          e.g. ``/data/user///cache//log``
+        """
+        path = self.user_cache_dir
+        if self.opinion:
+            path = os.path.join(path, "log")  # noqa: PTH118
+        return path
+
+    @property
+    def user_documents_dir(self) -> str:
+        """:return: documents directory tied to the user e.g. ``/storage/emulated/0/Documents``"""
+        return _android_documents_folder()
+
+    @property
+    def user_downloads_dir(self) -> str:
+        """:return: downloads directory tied to the user e.g. ``/storage/emulated/0/Downloads``"""
+        return _android_downloads_folder()
+
+    @property
+    def user_pictures_dir(self) -> str:
+        """:return: pictures directory tied to the user e.g. ``/storage/emulated/0/Pictures``"""
+        return _android_pictures_folder()
+
+    @property
+    def user_videos_dir(self) -> str:
+        """:return: videos directory tied to the user e.g. ``/storage/emulated/0/DCIM/Camera``"""
+        return _android_videos_folder()
+
+    @property
+    def user_music_dir(self) -> str:
+        """:return: music directory tied to the user e.g. ``/storage/emulated/0/Music``"""
+        return _android_music_folder()
+
+    @property
+    def user_runtime_dir(self) -> str:
+        """
+        :return: runtime directory tied to the user, same as `user_cache_dir` if not opinionated else ``tmp`` in it,
+          e.g. ``/data/user///cache//tmp``
+        """
+        path = self.user_cache_dir
+        if self.opinion:
+            path = os.path.join(path, "tmp")  # noqa: PTH118
+        return path
+
+
+@lru_cache(maxsize=1)
+def _android_folder() -> str | None:
+    """:return: base folder for the Android OS or None if it cannot be found"""
+    try:
+        # First try to get path to android app via pyjnius
+        from jnius import autoclass
+
+        context = autoclass("android.content.Context")
+        result: str | None = context.getFilesDir().getParentFile().getAbsolutePath()
+    except Exception:  # noqa: BLE001
+        # if fails find an android folder looking path on the sys.path
+        pattern = re.compile(r"/data/(data|user/\d+)/(.+)/files")
+        for path in sys.path:
+            if pattern.match(path):
+                result = path.split("/files")[0]
+                break
+        else:
+            result = None
+    return result
+
+
+@lru_cache(maxsize=1)
+def _android_documents_folder() -> str:
+    """:return: documents folder for the Android OS"""
+    # Get directories with pyjnius
+    try:
+        from jnius import autoclass
+
+        context = autoclass("android.content.Context")
+        environment = autoclass("android.os.Environment")
+        documents_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DOCUMENTS).getAbsolutePath()
+    except Exception:  # noqa: BLE001
+        documents_dir = "/storage/emulated/0/Documents"
+
+    return documents_dir
+
+
+@lru_cache(maxsize=1)
+def _android_downloads_folder() -> str:
+    """:return: downloads folder for the Android OS"""
+    # Get directories with pyjnius
+    try:
+        from jnius import autoclass
+
+        context = autoclass("android.content.Context")
+        environment = autoclass("android.os.Environment")
+        downloads_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DOWNLOADS).getAbsolutePath()
+    except Exception:  # noqa: BLE001
+        downloads_dir = "/storage/emulated/0/Downloads"
+
+    return downloads_dir
+
+
+@lru_cache(maxsize=1)
+def _android_pictures_folder() -> str:
+    """:return: pictures folder for the Android OS"""
+    # Get directories with pyjnius
+    try:
+        from jnius import autoclass
+
+        context = autoclass("android.content.Context")
+        environment = autoclass("android.os.Environment")
+        pictures_dir: str = context.getExternalFilesDir(environment.DIRECTORY_PICTURES).getAbsolutePath()
+    except Exception:  # noqa: BLE001
+        pictures_dir = "/storage/emulated/0/Pictures"
+
+    return pictures_dir
+
+
+@lru_cache(maxsize=1)
+def _android_videos_folder() -> str:
+    """:return: videos folder for the Android OS"""
+    # Get directories with pyjnius
+    try:
+        from jnius import autoclass
+
+        context = autoclass("android.content.Context")
+        environment = autoclass("android.os.Environment")
+        videos_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DCIM).getAbsolutePath()
+    except Exception:  # noqa: BLE001
+        videos_dir = "/storage/emulated/0/DCIM/Camera"
+
+    return videos_dir
+
+
+@lru_cache(maxsize=1)
+def _android_music_folder() -> str:
+    """:return: music folder for the Android OS"""
+    # Get directories with pyjnius
+    try:
+        from jnius import autoclass
+
+        context = autoclass("android.content.Context")
+        environment = autoclass("android.os.Environment")
+        music_dir: str = context.getExternalFilesDir(environment.DIRECTORY_MUSIC).getAbsolutePath()
+    except Exception:  # noqa: BLE001
+        music_dir = "/storage/emulated/0/Music"
+
+    return music_dir
+
+
+__all__ = [
+    "Android",
+]
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/api.py b/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/api.py
new file mode 100644
index 0000000..d64ebb9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/api.py
@@ -0,0 +1,223 @@
+"""Base API."""
+from __future__ import annotations
+
+import os
+from abc import ABC, abstractmethod
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    import sys
+
+    if sys.version_info >= (3, 8):  # pragma: no cover (py38+)
+        from typing import Literal
+    else:  # pragma: no cover (py38+)
+        from pip._vendor.typing_extensions import Literal
+
+
+class PlatformDirsABC(ABC):
+    """Abstract base class for platform directories."""
+
+    def __init__(  # noqa: PLR0913
+        self,
+        appname: str | None = None,
+        appauthor: str | None | Literal[False] = None,
+        version: str | None = None,
+        roaming: bool = False,  # noqa: FBT001, FBT002
+        multipath: bool = False,  # noqa: FBT001, FBT002
+        opinion: bool = True,  # noqa: FBT001, FBT002
+        ensure_exists: bool = False,  # noqa: FBT001, FBT002
+    ) -> None:
+        """
+        Create a new platform directory.
+
+        :param appname: See `appname`.
+        :param appauthor: See `appauthor`.
+        :param version: See `version`.
+        :param roaming: See `roaming`.
+        :param multipath: See `multipath`.
+        :param opinion: See `opinion`.
+        :param ensure_exists: See `ensure_exists`.
+        """
+        self.appname = appname  #: The name of application.
+        self.appauthor = appauthor
+        """
+        The name of the app author or distributing body for this application. Typically, it is the owning company name.
+        Defaults to `appname`. You may pass ``False`` to disable it.
+        """
+        self.version = version
+        """
+        An optional version path element to append to the path. You might want to use this if you want multiple versions
+        of your app to be able to run independently. If used, this would typically be ``.``.
+        """
+        self.roaming = roaming
+        """
+        Whether to use the roaming appdata directory on Windows. That means that for users on a Windows network setup
+        for roaming profiles, this user data will be synced on login (see
+        `here `_).
+        """
+        self.multipath = multipath
+        """
+        An optional parameter only applicable to Unix/Linux which indicates that the entire list of data dirs should be
+        returned. By default, the first item would only be returned.
+        """
+        self.opinion = opinion  #: A flag to indicating to use opinionated values.
+        self.ensure_exists = ensure_exists
+        """
+        Optionally create the directory (and any missing parents) upon access if it does not exist.
+        By default, no directories are created.
+        """
+
+    def _append_app_name_and_version(self, *base: str) -> str:
+        params = list(base[1:])
+        if self.appname:
+            params.append(self.appname)
+            if self.version:
+                params.append(self.version)
+        path = os.path.join(base[0], *params)  # noqa: PTH118
+        self._optionally_create_directory(path)
+        return path
+
+    def _optionally_create_directory(self, path: str) -> None:
+        if self.ensure_exists:
+            Path(path).mkdir(parents=True, exist_ok=True)
+
+    @property
+    @abstractmethod
+    def user_data_dir(self) -> str:
+        """:return: data directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def site_data_dir(self) -> str:
+        """:return: data directory shared by users"""
+
+    @property
+    @abstractmethod
+    def user_config_dir(self) -> str:
+        """:return: config directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def site_config_dir(self) -> str:
+        """:return: config directory shared by the users"""
+
+    @property
+    @abstractmethod
+    def user_cache_dir(self) -> str:
+        """:return: cache directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def site_cache_dir(self) -> str:
+        """:return: cache directory shared by users"""
+
+    @property
+    @abstractmethod
+    def user_state_dir(self) -> str:
+        """:return: state directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_log_dir(self) -> str:
+        """:return: log directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_documents_dir(self) -> str:
+        """:return: documents directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_downloads_dir(self) -> str:
+        """:return: downloads directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_pictures_dir(self) -> str:
+        """:return: pictures directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_videos_dir(self) -> str:
+        """:return: videos directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_music_dir(self) -> str:
+        """:return: music directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_runtime_dir(self) -> str:
+        """:return: runtime directory tied to the user"""
+
+    @property
+    def user_data_path(self) -> Path:
+        """:return: data path tied to the user"""
+        return Path(self.user_data_dir)
+
+    @property
+    def site_data_path(self) -> Path:
+        """:return: data path shared by users"""
+        return Path(self.site_data_dir)
+
+    @property
+    def user_config_path(self) -> Path:
+        """:return: config path tied to the user"""
+        return Path(self.user_config_dir)
+
+    @property
+    def site_config_path(self) -> Path:
+        """:return: config path shared by the users"""
+        return Path(self.site_config_dir)
+
+    @property
+    def user_cache_path(self) -> Path:
+        """:return: cache path tied to the user"""
+        return Path(self.user_cache_dir)
+
+    @property
+    def site_cache_path(self) -> Path:
+        """:return: cache path shared by users"""
+        return Path(self.site_cache_dir)
+
+    @property
+    def user_state_path(self) -> Path:
+        """:return: state path tied to the user"""
+        return Path(self.user_state_dir)
+
+    @property
+    def user_log_path(self) -> Path:
+        """:return: log path tied to the user"""
+        return Path(self.user_log_dir)
+
+    @property
+    def user_documents_path(self) -> Path:
+        """:return: documents path tied to the user"""
+        return Path(self.user_documents_dir)
+
+    @property
+    def user_downloads_path(self) -> Path:
+        """:return: downloads path tied to the user"""
+        return Path(self.user_downloads_dir)
+
+    @property
+    def user_pictures_path(self) -> Path:
+        """:return: pictures path tied to the user"""
+        return Path(self.user_pictures_dir)
+
+    @property
+    def user_videos_path(self) -> Path:
+        """:return: videos path tied to the user"""
+        return Path(self.user_videos_dir)
+
+    @property
+    def user_music_path(self) -> Path:
+        """:return: music path tied to the user"""
+        return Path(self.user_music_dir)
+
+    @property
+    def user_runtime_path(self) -> Path:
+        """:return: runtime path tied to the user"""
+        return Path(self.user_runtime_dir)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/macos.py b/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/macos.py
new file mode 100644
index 0000000..a753e2a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/macos.py
@@ -0,0 +1,91 @@
+"""macOS."""
+from __future__ import annotations
+
+import os.path
+
+from .api import PlatformDirsABC
+
+
+class MacOS(PlatformDirsABC):
+    """
+    Platform directories for the macOS operating system. Follows the guidance from `Apple documentation
+    `_.
+    Makes use of the `appname `,
+    `version `,
+    `ensure_exists `.
+    """
+
+    @property
+    def user_data_dir(self) -> str:
+        """:return: data directory tied to the user, e.g. ``~/Library/Application Support/$appname/$version``"""
+        return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support"))  # noqa: PTH111
+
+    @property
+    def site_data_dir(self) -> str:
+        """:return: data directory shared by users, e.g. ``/Library/Application Support/$appname/$version``"""
+        return self._append_app_name_and_version("/Library/Application Support")
+
+    @property
+    def user_config_dir(self) -> str:
+        """:return: config directory tied to the user, same as `user_data_dir`"""
+        return self.user_data_dir
+
+    @property
+    def site_config_dir(self) -> str:
+        """:return: config directory shared by the users, same as `site_data_dir`"""
+        return self.site_data_dir
+
+    @property
+    def user_cache_dir(self) -> str:
+        """:return: cache directory tied to the user, e.g. ``~/Library/Caches/$appname/$version``"""
+        return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches"))  # noqa: PTH111
+
+    @property
+    def site_cache_dir(self) -> str:
+        """:return: cache directory shared by users, e.g. ``/Library/Caches/$appname/$version``"""
+        return self._append_app_name_and_version("/Library/Caches")
+
+    @property
+    def user_state_dir(self) -> str:
+        """:return: state directory tied to the user, same as `user_data_dir`"""
+        return self.user_data_dir
+
+    @property
+    def user_log_dir(self) -> str:
+        """:return: log directory tied to the user, e.g. ``~/Library/Logs/$appname/$version``"""
+        return self._append_app_name_and_version(os.path.expanduser("~/Library/Logs"))  # noqa: PTH111
+
+    @property
+    def user_documents_dir(self) -> str:
+        """:return: documents directory tied to the user, e.g. ``~/Documents``"""
+        return os.path.expanduser("~/Documents")  # noqa: PTH111
+
+    @property
+    def user_downloads_dir(self) -> str:
+        """:return: downloads directory tied to the user, e.g. ``~/Downloads``"""
+        return os.path.expanduser("~/Downloads")  # noqa: PTH111
+
+    @property
+    def user_pictures_dir(self) -> str:
+        """:return: pictures directory tied to the user, e.g. ``~/Pictures``"""
+        return os.path.expanduser("~/Pictures")  # noqa: PTH111
+
+    @property
+    def user_videos_dir(self) -> str:
+        """:return: videos directory tied to the user, e.g. ``~/Movies``"""
+        return os.path.expanduser("~/Movies")  # noqa: PTH111
+
+    @property
+    def user_music_dir(self) -> str:
+        """:return: music directory tied to the user, e.g. ``~/Music``"""
+        return os.path.expanduser("~/Music")  # noqa: PTH111
+
+    @property
+    def user_runtime_dir(self) -> str:
+        """:return: runtime directory tied to the user, e.g. ``~/Library/Caches/TemporaryItems/$appname/$version``"""
+        return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches/TemporaryItems"))  # noqa: PTH111
+
+
+__all__ = [
+    "MacOS",
+]
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/unix.py b/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/unix.py
new file mode 100644
index 0000000..468b0ab
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/unix.py
@@ -0,0 +1,223 @@
+"""Unix."""
+from __future__ import annotations
+
+import os
+import sys
+from configparser import ConfigParser
+from pathlib import Path
+
+from .api import PlatformDirsABC
+
+if sys.platform == "win32":
+
+    def getuid() -> int:
+        msg = "should only be used on Unix"
+        raise RuntimeError(msg)
+
+else:
+    from os import getuid
+
+
+class Unix(PlatformDirsABC):
+    """
+    On Unix/Linux, we follow the
+    `XDG Basedir Spec `_. The spec allows
+    overriding directories with environment variables. The examples show are the default values, alongside the name of
+    the environment variable that overrides them. Makes use of the
+    `appname `,
+    `version `,
+    `multipath `,
+    `opinion `,
+    `ensure_exists `.
+    """
+
+    @property
+    def user_data_dir(self) -> str:
+        """
+        :return: data directory tied to the user, e.g. ``~/.local/share/$appname/$version`` or
+         ``$XDG_DATA_HOME/$appname/$version``
+        """
+        path = os.environ.get("XDG_DATA_HOME", "")
+        if not path.strip():
+            path = os.path.expanduser("~/.local/share")  # noqa: PTH111
+        return self._append_app_name_and_version(path)
+
+    @property
+    def site_data_dir(self) -> str:
+        """
+        :return: data directories shared by users (if `multipath ` is
+         enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS
+         path separator), e.g. ``/usr/local/share/$appname/$version`` or ``/usr/share/$appname/$version``
+        """
+        # XDG default for $XDG_DATA_DIRS; only first, if multipath is False
+        path = os.environ.get("XDG_DATA_DIRS", "")
+        if not path.strip():
+            path = f"/usr/local/share{os.pathsep}/usr/share"
+        return self._with_multi_path(path)
+
+    def _with_multi_path(self, path: str) -> str:
+        path_list = path.split(os.pathsep)
+        if not self.multipath:
+            path_list = path_list[0:1]
+        path_list = [self._append_app_name_and_version(os.path.expanduser(p)) for p in path_list]  # noqa: PTH111
+        return os.pathsep.join(path_list)
+
+    @property
+    def user_config_dir(self) -> str:
+        """
+        :return: config directory tied to the user, e.g. ``~/.config/$appname/$version`` or
+         ``$XDG_CONFIG_HOME/$appname/$version``
+        """
+        path = os.environ.get("XDG_CONFIG_HOME", "")
+        if not path.strip():
+            path = os.path.expanduser("~/.config")  # noqa: PTH111
+        return self._append_app_name_and_version(path)
+
+    @property
+    def site_config_dir(self) -> str:
+        """
+        :return: config directories shared by users (if `multipath `
+         is enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS
+         path separator), e.g. ``/etc/xdg/$appname/$version``
+        """
+        # XDG default for $XDG_CONFIG_DIRS only first, if multipath is False
+        path = os.environ.get("XDG_CONFIG_DIRS", "")
+        if not path.strip():
+            path = "/etc/xdg"
+        return self._with_multi_path(path)
+
+    @property
+    def user_cache_dir(self) -> str:
+        """
+        :return: cache directory tied to the user, e.g. ``~/.cache/$appname/$version`` or
+         ``~/$XDG_CACHE_HOME/$appname/$version``
+        """
+        path = os.environ.get("XDG_CACHE_HOME", "")
+        if not path.strip():
+            path = os.path.expanduser("~/.cache")  # noqa: PTH111
+        return self._append_app_name_and_version(path)
+
+    @property
+    def site_cache_dir(self) -> str:
+        """:return: cache directory shared by users, e.g. ``/var/tmp/$appname/$version``"""
+        return self._append_app_name_and_version("/var/tmp")  # noqa: S108
+
+    @property
+    def user_state_dir(self) -> str:
+        """
+        :return: state directory tied to the user, e.g. ``~/.local/state/$appname/$version`` or
+         ``$XDG_STATE_HOME/$appname/$version``
+        """
+        path = os.environ.get("XDG_STATE_HOME", "")
+        if not path.strip():
+            path = os.path.expanduser("~/.local/state")  # noqa: PTH111
+        return self._append_app_name_and_version(path)
+
+    @property
+    def user_log_dir(self) -> str:
+        """:return: log directory tied to the user, same as `user_state_dir` if not opinionated else ``log`` in it"""
+        path = self.user_state_dir
+        if self.opinion:
+            path = os.path.join(path, "log")  # noqa: PTH118
+        return path
+
+    @property
+    def user_documents_dir(self) -> str:
+        """:return: documents directory tied to the user, e.g. ``~/Documents``"""
+        return _get_user_media_dir("XDG_DOCUMENTS_DIR", "~/Documents")
+
+    @property
+    def user_downloads_dir(self) -> str:
+        """:return: downloads directory tied to the user, e.g. ``~/Downloads``"""
+        return _get_user_media_dir("XDG_DOWNLOAD_DIR", "~/Downloads")
+
+    @property
+    def user_pictures_dir(self) -> str:
+        """:return: pictures directory tied to the user, e.g. ``~/Pictures``"""
+        return _get_user_media_dir("XDG_PICTURES_DIR", "~/Pictures")
+
+    @property
+    def user_videos_dir(self) -> str:
+        """:return: videos directory tied to the user, e.g. ``~/Videos``"""
+        return _get_user_media_dir("XDG_VIDEOS_DIR", "~/Videos")
+
+    @property
+    def user_music_dir(self) -> str:
+        """:return: music directory tied to the user, e.g. ``~/Music``"""
+        return _get_user_media_dir("XDG_MUSIC_DIR", "~/Music")
+
+    @property
+    def user_runtime_dir(self) -> str:
+        """
+        :return: runtime directory tied to the user, e.g. ``/run/user/$(id -u)/$appname/$version`` or
+         ``$XDG_RUNTIME_DIR/$appname/$version``.
+
+         For FreeBSD/OpenBSD/NetBSD, it would return ``/var/run/user/$(id -u)/$appname/$version`` if
+         exists, otherwise ``/tmp/runtime-$(id -u)/$appname/$version``, if``$XDG_RUNTIME_DIR``
+         is not set.
+        """
+        path = os.environ.get("XDG_RUNTIME_DIR", "")
+        if not path.strip():
+            if sys.platform.startswith(("freebsd", "openbsd", "netbsd")):
+                path = f"/var/run/user/{getuid()}"
+                if not Path(path).exists():
+                    path = f"/tmp/runtime-{getuid()}"  # noqa: S108
+            else:
+                path = f"/run/user/{getuid()}"
+        return self._append_app_name_and_version(path)
+
+    @property
+    def site_data_path(self) -> Path:
+        """:return: data path shared by users. Only return first item, even if ``multipath`` is set to ``True``"""
+        return self._first_item_as_path_if_multipath(self.site_data_dir)
+
+    @property
+    def site_config_path(self) -> Path:
+        """:return: config path shared by the users. Only return first item, even if ``multipath`` is set to ``True``"""
+        return self._first_item_as_path_if_multipath(self.site_config_dir)
+
+    @property
+    def site_cache_path(self) -> Path:
+        """:return: cache path shared by users. Only return first item, even if ``multipath`` is set to ``True``"""
+        return self._first_item_as_path_if_multipath(self.site_cache_dir)
+
+    def _first_item_as_path_if_multipath(self, directory: str) -> Path:
+        if self.multipath:
+            # If multipath is True, the first path is returned.
+            directory = directory.split(os.pathsep)[0]
+        return Path(directory)
+
+
+def _get_user_media_dir(env_var: str, fallback_tilde_path: str) -> str:
+    media_dir = _get_user_dirs_folder(env_var)
+    if media_dir is None:
+        media_dir = os.environ.get(env_var, "").strip()
+        if not media_dir:
+            media_dir = os.path.expanduser(fallback_tilde_path)  # noqa: PTH111
+
+    return media_dir
+
+
+def _get_user_dirs_folder(key: str) -> str | None:
+    """Return directory from user-dirs.dirs config file. See https://freedesktop.org/wiki/Software/xdg-user-dirs/."""
+    user_dirs_config_path = Path(Unix().user_config_dir) / "user-dirs.dirs"
+    if user_dirs_config_path.exists():
+        parser = ConfigParser()
+
+        with user_dirs_config_path.open() as stream:
+            # Add fake section header, so ConfigParser doesn't complain
+            parser.read_string(f"[top]\n{stream.read()}")
+
+        if key not in parser["top"]:
+            return None
+
+        path = parser["top"][key].strip('"')
+        # Handle relative home paths
+        return path.replace("$HOME", os.path.expanduser("~"))  # noqa: PTH111
+
+    return None
+
+
+__all__ = [
+    "Unix",
+]
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/version.py b/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/version.py
new file mode 100644
index 0000000..dc8c44c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/version.py
@@ -0,0 +1,4 @@
+# file generated by setuptools_scm
+# don't change, don't track in version control
+__version__ = version = '3.8.1'
+__version_tuple__ = version_tuple = (3, 8, 1)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/windows.py b/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/windows.py
new file mode 100644
index 0000000..b52c9c6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/windows.py
@@ -0,0 +1,255 @@
+"""Windows."""
+from __future__ import annotations
+
+import ctypes
+import os
+import sys
+from functools import lru_cache
+from typing import TYPE_CHECKING
+
+from .api import PlatformDirsABC
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+
+
+class Windows(PlatformDirsABC):
+    """
+    `MSDN on where to store app data files
+    `_.
+    Makes use of the
+    `appname `,
+    `appauthor `,
+    `version `,
+    `roaming `,
+    `opinion `,
+    `ensure_exists `.
+    """
+
+    @property
+    def user_data_dir(self) -> str:
+        """
+        :return: data directory tied to the user, e.g.
+         ``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname`` (not roaming) or
+         ``%USERPROFILE%\\AppData\\Roaming\\$appauthor\\$appname`` (roaming)
+        """
+        const = "CSIDL_APPDATA" if self.roaming else "CSIDL_LOCAL_APPDATA"
+        path = os.path.normpath(get_win_folder(const))
+        return self._append_parts(path)
+
+    def _append_parts(self, path: str, *, opinion_value: str | None = None) -> str:
+        params = []
+        if self.appname:
+            if self.appauthor is not False:
+                author = self.appauthor or self.appname
+                params.append(author)
+            params.append(self.appname)
+            if opinion_value is not None and self.opinion:
+                params.append(opinion_value)
+            if self.version:
+                params.append(self.version)
+        path = os.path.join(path, *params)  # noqa: PTH118
+        self._optionally_create_directory(path)
+        return path
+
+    @property
+    def site_data_dir(self) -> str:
+        """:return: data directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname``"""
+        path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA"))
+        return self._append_parts(path)
+
+    @property
+    def user_config_dir(self) -> str:
+        """:return: config directory tied to the user, same as `user_data_dir`"""
+        return self.user_data_dir
+
+    @property
+    def site_config_dir(self) -> str:
+        """:return: config directory shared by the users, same as `site_data_dir`"""
+        return self.site_data_dir
+
+    @property
+    def user_cache_dir(self) -> str:
+        """
+        :return: cache directory tied to the user (if opinionated with ``Cache`` folder within ``$appname``) e.g.
+         ``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname\\Cache\\$version``
+        """
+        path = os.path.normpath(get_win_folder("CSIDL_LOCAL_APPDATA"))
+        return self._append_parts(path, opinion_value="Cache")
+
+    @property
+    def site_cache_dir(self) -> str:
+        """:return: cache directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname\\Cache\\$version``"""
+        path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA"))
+        return self._append_parts(path, opinion_value="Cache")
+
+    @property
+    def user_state_dir(self) -> str:
+        """:return: state directory tied to the user, same as `user_data_dir`"""
+        return self.user_data_dir
+
+    @property
+    def user_log_dir(self) -> str:
+        """:return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``Logs`` in it"""
+        path = self.user_data_dir
+        if self.opinion:
+            path = os.path.join(path, "Logs")  # noqa: PTH118
+            self._optionally_create_directory(path)
+        return path
+
+    @property
+    def user_documents_dir(self) -> str:
+        """:return: documents directory tied to the user e.g. ``%USERPROFILE%\\Documents``"""
+        return os.path.normpath(get_win_folder("CSIDL_PERSONAL"))
+
+    @property
+    def user_downloads_dir(self) -> str:
+        """:return: downloads directory tied to the user e.g. ``%USERPROFILE%\\Downloads``"""
+        return os.path.normpath(get_win_folder("CSIDL_DOWNLOADS"))
+
+    @property
+    def user_pictures_dir(self) -> str:
+        """:return: pictures directory tied to the user e.g. ``%USERPROFILE%\\Pictures``"""
+        return os.path.normpath(get_win_folder("CSIDL_MYPICTURES"))
+
+    @property
+    def user_videos_dir(self) -> str:
+        """:return: videos directory tied to the user e.g. ``%USERPROFILE%\\Videos``"""
+        return os.path.normpath(get_win_folder("CSIDL_MYVIDEO"))
+
+    @property
+    def user_music_dir(self) -> str:
+        """:return: music directory tied to the user e.g. ``%USERPROFILE%\\Music``"""
+        return os.path.normpath(get_win_folder("CSIDL_MYMUSIC"))
+
+    @property
+    def user_runtime_dir(self) -> str:
+        """
+        :return: runtime directory tied to the user, e.g.
+         ``%USERPROFILE%\\AppData\\Local\\Temp\\$appauthor\\$appname``
+        """
+        path = os.path.normpath(os.path.join(get_win_folder("CSIDL_LOCAL_APPDATA"), "Temp"))  # noqa: PTH118
+        return self._append_parts(path)
+
+
+def get_win_folder_from_env_vars(csidl_name: str) -> str:
+    """Get folder from environment variables."""
+    result = get_win_folder_if_csidl_name_not_env_var(csidl_name)
+    if result is not None:
+        return result
+
+    env_var_name = {
+        "CSIDL_APPDATA": "APPDATA",
+        "CSIDL_COMMON_APPDATA": "ALLUSERSPROFILE",
+        "CSIDL_LOCAL_APPDATA": "LOCALAPPDATA",
+    }.get(csidl_name)
+    if env_var_name is None:
+        msg = f"Unknown CSIDL name: {csidl_name}"
+        raise ValueError(msg)
+    result = os.environ.get(env_var_name)
+    if result is None:
+        msg = f"Unset environment variable: {env_var_name}"
+        raise ValueError(msg)
+    return result
+
+
+def get_win_folder_if_csidl_name_not_env_var(csidl_name: str) -> str | None:
+    """Get folder for a CSIDL name that does not exist as an environment variable."""
+    if csidl_name == "CSIDL_PERSONAL":
+        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Documents")  # noqa: PTH118
+
+    if csidl_name == "CSIDL_DOWNLOADS":
+        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Downloads")  # noqa: PTH118
+
+    if csidl_name == "CSIDL_MYPICTURES":
+        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Pictures")  # noqa: PTH118
+
+    if csidl_name == "CSIDL_MYVIDEO":
+        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Videos")  # noqa: PTH118
+
+    if csidl_name == "CSIDL_MYMUSIC":
+        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Music")  # noqa: PTH118
+    return None
+
+
+def get_win_folder_from_registry(csidl_name: str) -> str:
+    """
+    Get folder from the registry.
+
+    This is a fallback technique at best. I'm not sure if using the registry for these guarantees us the correct answer
+    for all CSIDL_* names.
+    """
+    shell_folder_name = {
+        "CSIDL_APPDATA": "AppData",
+        "CSIDL_COMMON_APPDATA": "Common AppData",
+        "CSIDL_LOCAL_APPDATA": "Local AppData",
+        "CSIDL_PERSONAL": "Personal",
+        "CSIDL_DOWNLOADS": "{374DE290-123F-4565-9164-39C4925E467B}",
+        "CSIDL_MYPICTURES": "My Pictures",
+        "CSIDL_MYVIDEO": "My Video",
+        "CSIDL_MYMUSIC": "My Music",
+    }.get(csidl_name)
+    if shell_folder_name is None:
+        msg = f"Unknown CSIDL name: {csidl_name}"
+        raise ValueError(msg)
+    if sys.platform != "win32":  # only needed for mypy type checker to know that this code runs only on Windows
+        raise NotImplementedError
+    import winreg
+
+    key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders")
+    directory, _ = winreg.QueryValueEx(key, shell_folder_name)
+    return str(directory)
+
+
+def get_win_folder_via_ctypes(csidl_name: str) -> str:
+    """Get folder with ctypes."""
+    # There is no 'CSIDL_DOWNLOADS'.
+    # Use 'CSIDL_PROFILE' (40) and append the default folder 'Downloads' instead.
+    # https://learn.microsoft.com/en-us/windows/win32/shell/knownfolderid
+
+    csidl_const = {
+        "CSIDL_APPDATA": 26,
+        "CSIDL_COMMON_APPDATA": 35,
+        "CSIDL_LOCAL_APPDATA": 28,
+        "CSIDL_PERSONAL": 5,
+        "CSIDL_MYPICTURES": 39,
+        "CSIDL_MYVIDEO": 14,
+        "CSIDL_MYMUSIC": 13,
+        "CSIDL_DOWNLOADS": 40,
+    }.get(csidl_name)
+    if csidl_const is None:
+        msg = f"Unknown CSIDL name: {csidl_name}"
+        raise ValueError(msg)
+
+    buf = ctypes.create_unicode_buffer(1024)
+    windll = getattr(ctypes, "windll")  # noqa: B009 # using getattr to avoid false positive with mypy type checker
+    windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
+
+    # Downgrade to short path name if it has highbit chars.
+    if any(ord(c) > 255 for c in buf):  # noqa: PLR2004
+        buf2 = ctypes.create_unicode_buffer(1024)
+        if windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
+            buf = buf2
+
+    if csidl_name == "CSIDL_DOWNLOADS":
+        return os.path.join(buf.value, "Downloads")  # noqa: PTH118
+
+    return buf.value
+
+
+def _pick_get_win_folder() -> Callable[[str], str]:
+    if hasattr(ctypes, "windll"):
+        return get_win_folder_via_ctypes
+    try:
+        import winreg  # noqa: F401
+    except ImportError:
+        return get_win_folder_from_env_vars
+    else:
+        return get_win_folder_from_registry
+
+
+get_win_folder = lru_cache(maxsize=None)(_pick_get_win_folder())
+
+__all__ = [
+    "Windows",
+]
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/__init__.py
new file mode 100644
index 0000000..39c84aa
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/__init__.py
@@ -0,0 +1,82 @@
+"""
+    Pygments
+    ~~~~~~~~
+
+    Pygments is a syntax highlighting package written in Python.
+
+    It is a generic syntax highlighter for general use in all kinds of software
+    such as forum systems, wikis or other applications that need to prettify
+    source code. Highlights are:
+
+    * a wide range of common languages and markup formats is supported
+    * special attention is paid to details, increasing quality by a fair amount
+    * support for new languages and formats are added easily
+    * a number of output formats, presently HTML, LaTeX, RTF, SVG, all image
+      formats that PIL supports, and ANSI sequences
+    * it is usable as a command-line tool and as a library
+    * ... and it highlights even Brainfuck!
+
+    The `Pygments master branch`_ is installable with ``easy_install Pygments==dev``.
+
+    .. _Pygments master branch:
+       https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev
+
+    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+from io import StringIO, BytesIO
+
+__version__ = '2.15.1'
+__docformat__ = 'restructuredtext'
+
+__all__ = ['lex', 'format', 'highlight']
+
+
+def lex(code, lexer):
+    """
+    Lex `code` with the `lexer` (must be a `Lexer` instance)
+    and return an iterable of tokens. Currently, this only calls
+    `lexer.get_tokens()`.
+    """
+    try:
+        return lexer.get_tokens(code)
+    except TypeError:
+        # Heuristic to catch a common mistake.
+        from pip._vendor.pygments.lexer import RegexLexer
+        if isinstance(lexer, type) and issubclass(lexer, RegexLexer):
+            raise TypeError('lex() argument must be a lexer instance, '
+                            'not a class')
+        raise
+
+
+def format(tokens, formatter, outfile=None):  # pylint: disable=redefined-builtin
+    """
+    Format ``tokens`` (an iterable of tokens) with the formatter ``formatter``
+    (a `Formatter` instance).
+
+    If ``outfile`` is given and a valid file object (an object with a
+    ``write`` method), the result will be written to it, otherwise it
+    is returned as a string.
+    """
+    try:
+        if not outfile:
+            realoutfile = getattr(formatter, 'encoding', None) and BytesIO() or StringIO()
+            formatter.format(tokens, realoutfile)
+            return realoutfile.getvalue()
+        else:
+            formatter.format(tokens, outfile)
+    except TypeError:
+        # Heuristic to catch a common mistake.
+        from pip._vendor.pygments.formatter import Formatter
+        if isinstance(formatter, type) and issubclass(formatter, Formatter):
+            raise TypeError('format() argument must be a formatter instance, '
+                            'not a class')
+        raise
+
+
+def highlight(code, lexer, formatter, outfile=None):
+    """
+    This is the most high-level highlighting function. It combines `lex` and
+    `format` in one function.
+    """
+    return format(lex(code, lexer), formatter, outfile)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/__main__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/__main__.py
new file mode 100644
index 0000000..2f7f8cb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/__main__.py
@@ -0,0 +1,17 @@
+"""
+    pygments.__main__
+    ~~~~~~~~~~~~~~~~~
+
+    Main entry point for ``python -m pygments``.
+
+    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import sys
+from pip._vendor.pygments.cmdline import main
+
+try:
+    sys.exit(main(sys.argv))
+except KeyboardInterrupt:
+    sys.exit(1)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/cmdline.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/cmdline.py
new file mode 100644
index 0000000..eec1775
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/cmdline.py
@@ -0,0 +1,668 @@
+"""
+    pygments.cmdline
+    ~~~~~~~~~~~~~~~~
+
+    Command line interface.
+
+    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import os
+import sys
+import shutil
+import argparse
+from textwrap import dedent
+
+from pip._vendor.pygments import __version__, highlight
+from pip._vendor.pygments.util import ClassNotFound, OptionError, docstring_headline, \
+    guess_decode, guess_decode_from_terminal, terminal_encoding, \
+    UnclosingTextIOWrapper
+from pip._vendor.pygments.lexers import get_all_lexers, get_lexer_by_name, guess_lexer, \
+    load_lexer_from_file, get_lexer_for_filename, find_lexer_class_for_filename
+from pip._vendor.pygments.lexers.special import TextLexer
+from pip._vendor.pygments.formatters.latex import LatexEmbeddedLexer, LatexFormatter
+from pip._vendor.pygments.formatters import get_all_formatters, get_formatter_by_name, \
+    load_formatter_from_file, get_formatter_for_filename, find_formatter_class
+from pip._vendor.pygments.formatters.terminal import TerminalFormatter
+from pip._vendor.pygments.formatters.terminal256 import Terminal256Formatter, TerminalTrueColorFormatter
+from pip._vendor.pygments.filters import get_all_filters, find_filter_class
+from pip._vendor.pygments.styles import get_all_styles, get_style_by_name
+
+
+def _parse_options(o_strs):
+    opts = {}
+    if not o_strs:
+        return opts
+    for o_str in o_strs:
+        if not o_str.strip():
+            continue
+        o_args = o_str.split(',')
+        for o_arg in o_args:
+            o_arg = o_arg.strip()
+            try:
+                o_key, o_val = o_arg.split('=', 1)
+                o_key = o_key.strip()
+                o_val = o_val.strip()
+            except ValueError:
+                opts[o_arg] = True
+            else:
+                opts[o_key] = o_val
+    return opts
+
+
+def _parse_filters(f_strs):
+    filters = []
+    if not f_strs:
+        return filters
+    for f_str in f_strs:
+        if ':' in f_str:
+            fname, fopts = f_str.split(':', 1)
+            filters.append((fname, _parse_options([fopts])))
+        else:
+            filters.append((f_str, {}))
+    return filters
+
+
+def _print_help(what, name):
+    try:
+        if what == 'lexer':
+            cls = get_lexer_by_name(name)
+            print("Help on the %s lexer:" % cls.name)
+            print(dedent(cls.__doc__))
+        elif what == 'formatter':
+            cls = find_formatter_class(name)
+            print("Help on the %s formatter:" % cls.name)
+            print(dedent(cls.__doc__))
+        elif what == 'filter':
+            cls = find_filter_class(name)
+            print("Help on the %s filter:" % name)
+            print(dedent(cls.__doc__))
+        return 0
+    except (AttributeError, ValueError):
+        print("%s not found!" % what, file=sys.stderr)
+        return 1
+
+
+def _print_list(what):
+    if what == 'lexer':
+        print()
+        print("Lexers:")
+        print("~~~~~~~")
+
+        info = []
+        for fullname, names, exts, _ in get_all_lexers():
+            tup = (', '.join(names)+':', fullname,
+                   exts and '(filenames ' + ', '.join(exts) + ')' or '')
+            info.append(tup)
+        info.sort()
+        for i in info:
+            print(('* %s\n    %s %s') % i)
+
+    elif what == 'formatter':
+        print()
+        print("Formatters:")
+        print("~~~~~~~~~~~")
+
+        info = []
+        for cls in get_all_formatters():
+            doc = docstring_headline(cls)
+            tup = (', '.join(cls.aliases) + ':', doc, cls.filenames and
+                   '(filenames ' + ', '.join(cls.filenames) + ')' or '')
+            info.append(tup)
+        info.sort()
+        for i in info:
+            print(('* %s\n    %s %s') % i)
+
+    elif what == 'filter':
+        print()
+        print("Filters:")
+        print("~~~~~~~~")
+
+        for name in get_all_filters():
+            cls = find_filter_class(name)
+            print("* " + name + ':')
+            print("    %s" % docstring_headline(cls))
+
+    elif what == 'style':
+        print()
+        print("Styles:")
+        print("~~~~~~~")
+
+        for name in get_all_styles():
+            cls = get_style_by_name(name)
+            print("* " + name + ':')
+            print("    %s" % docstring_headline(cls))
+
+
+def _print_list_as_json(requested_items):
+    import json
+    result = {}
+    if 'lexer' in requested_items:
+        info = {}
+        for fullname, names, filenames, mimetypes in get_all_lexers():
+            info[fullname] = {
+                'aliases': names,
+                'filenames': filenames,
+                'mimetypes': mimetypes
+            }
+        result['lexers'] = info
+
+    if 'formatter' in requested_items:
+        info = {}
+        for cls in get_all_formatters():
+            doc = docstring_headline(cls)
+            info[cls.name] = {
+                'aliases': cls.aliases,
+                'filenames': cls.filenames,
+                'doc': doc
+            }
+        result['formatters'] = info
+
+    if 'filter' in requested_items:
+        info = {}
+        for name in get_all_filters():
+            cls = find_filter_class(name)
+            info[name] = {
+                'doc': docstring_headline(cls)
+            }
+        result['filters'] = info
+
+    if 'style' in requested_items:
+        info = {}
+        for name in get_all_styles():
+            cls = get_style_by_name(name)
+            info[name] = {
+                'doc': docstring_headline(cls)
+            }
+        result['styles'] = info
+
+    json.dump(result, sys.stdout)
+
+def main_inner(parser, argns):
+    if argns.help:
+        parser.print_help()
+        return 0
+
+    if argns.V:
+        print('Pygments version %s, (c) 2006-2023 by Georg Brandl, Matthäus '
+              'Chajdas and contributors.' % __version__)
+        return 0
+
+    def is_only_option(opt):
+        return not any(v for (k, v) in vars(argns).items() if k != opt)
+
+    # handle ``pygmentize -L``
+    if argns.L is not None:
+        arg_set = set()
+        for k, v in vars(argns).items():
+            if v:
+                arg_set.add(k)
+
+        arg_set.discard('L')
+        arg_set.discard('json')
+
+        if arg_set:
+            parser.print_help(sys.stderr)
+            return 2
+
+        # print version
+        if not argns.json:
+            main(['', '-V'])
+        allowed_types = {'lexer', 'formatter', 'filter', 'style'}
+        largs = [arg.rstrip('s') for arg in argns.L]
+        if any(arg not in allowed_types for arg in largs):
+            parser.print_help(sys.stderr)
+            return 0
+        if not largs:
+            largs = allowed_types
+        if not argns.json:
+            for arg in largs:
+                _print_list(arg)
+        else:
+            _print_list_as_json(largs)
+        return 0
+
+    # handle ``pygmentize -H``
+    if argns.H:
+        if not is_only_option('H'):
+            parser.print_help(sys.stderr)
+            return 2
+        what, name = argns.H
+        if what not in ('lexer', 'formatter', 'filter'):
+            parser.print_help(sys.stderr)
+            return 2
+        return _print_help(what, name)
+
+    # parse -O options
+    parsed_opts = _parse_options(argns.O or [])
+
+    # parse -P options
+    for p_opt in argns.P or []:
+        try:
+            name, value = p_opt.split('=', 1)
+        except ValueError:
+            parsed_opts[p_opt] = True
+        else:
+            parsed_opts[name] = value
+
+    # encodings
+    inencoding = parsed_opts.get('inencoding', parsed_opts.get('encoding'))
+    outencoding = parsed_opts.get('outencoding', parsed_opts.get('encoding'))
+
+    # handle ``pygmentize -N``
+    if argns.N:
+        lexer = find_lexer_class_for_filename(argns.N)
+        if lexer is None:
+            lexer = TextLexer
+
+        print(lexer.aliases[0])
+        return 0
+
+    # handle ``pygmentize -C``
+    if argns.C:
+        inp = sys.stdin.buffer.read()
+        try:
+            lexer = guess_lexer(inp, inencoding=inencoding)
+        except ClassNotFound:
+            lexer = TextLexer
+
+        print(lexer.aliases[0])
+        return 0
+
+    # handle ``pygmentize -S``
+    S_opt = argns.S
+    a_opt = argns.a
+    if S_opt is not None:
+        f_opt = argns.f
+        if not f_opt:
+            parser.print_help(sys.stderr)
+            return 2
+        if argns.l or argns.INPUTFILE:
+            parser.print_help(sys.stderr)
+            return 2
+
+        try:
+            parsed_opts['style'] = S_opt
+            fmter = get_formatter_by_name(f_opt, **parsed_opts)
+        except ClassNotFound as err:
+            print(err, file=sys.stderr)
+            return 1
+
+        print(fmter.get_style_defs(a_opt or ''))
+        return 0
+
+    # if no -S is given, -a is not allowed
+    if argns.a is not None:
+        parser.print_help(sys.stderr)
+        return 2
+
+    # parse -F options
+    F_opts = _parse_filters(argns.F or [])
+
+    # -x: allow custom (eXternal) lexers and formatters
+    allow_custom_lexer_formatter = bool(argns.x)
+
+    # select lexer
+    lexer = None
+
+    # given by name?
+    lexername = argns.l
+    if lexername:
+        # custom lexer, located relative to user's cwd
+        if allow_custom_lexer_formatter and '.py' in lexername:
+            try:
+                filename = None
+                name = None
+                if ':' in lexername:
+                    filename, name = lexername.rsplit(':', 1)
+
+                    if '.py' in name:
+                        # This can happen on Windows: If the lexername is
+                        # C:\lexer.py -- return to normal load path in that case
+                        name = None
+
+                if filename and name:
+                    lexer = load_lexer_from_file(filename, name,
+                                                 **parsed_opts)
+                else:
+                    lexer = load_lexer_from_file(lexername, **parsed_opts)
+            except ClassNotFound as err:
+                print('Error:', err, file=sys.stderr)
+                return 1
+        else:
+            try:
+                lexer = get_lexer_by_name(lexername, **parsed_opts)
+            except (OptionError, ClassNotFound) as err:
+                print('Error:', err, file=sys.stderr)
+                return 1
+
+    # read input code
+    code = None
+
+    if argns.INPUTFILE:
+        if argns.s:
+            print('Error: -s option not usable when input file specified',
+                  file=sys.stderr)
+            return 2
+
+        infn = argns.INPUTFILE
+        try:
+            with open(infn, 'rb') as infp:
+                code = infp.read()
+        except Exception as err:
+            print('Error: cannot read infile:', err, file=sys.stderr)
+            return 1
+        if not inencoding:
+            code, inencoding = guess_decode(code)
+
+        # do we have to guess the lexer?
+        if not lexer:
+            try:
+                lexer = get_lexer_for_filename(infn, code, **parsed_opts)
+            except ClassNotFound as err:
+                if argns.g:
+                    try:
+                        lexer = guess_lexer(code, **parsed_opts)
+                    except ClassNotFound:
+                        lexer = TextLexer(**parsed_opts)
+                else:
+                    print('Error:', err, file=sys.stderr)
+                    return 1
+            except OptionError as err:
+                print('Error:', err, file=sys.stderr)
+                return 1
+
+    elif not argns.s:  # treat stdin as full file (-s support is later)
+        # read code from terminal, always in binary mode since we want to
+        # decode ourselves and be tolerant with it
+        code = sys.stdin.buffer.read()  # use .buffer to get a binary stream
+        if not inencoding:
+            code, inencoding = guess_decode_from_terminal(code, sys.stdin)
+            # else the lexer will do the decoding
+        if not lexer:
+            try:
+                lexer = guess_lexer(code, **parsed_opts)
+            except ClassNotFound:
+                lexer = TextLexer(**parsed_opts)
+
+    else:  # -s option needs a lexer with -l
+        if not lexer:
+            print('Error: when using -s a lexer has to be selected with -l',
+                  file=sys.stderr)
+            return 2
+
+    # process filters
+    for fname, fopts in F_opts:
+        try:
+            lexer.add_filter(fname, **fopts)
+        except ClassNotFound as err:
+            print('Error:', err, file=sys.stderr)
+            return 1
+
+    # select formatter
+    outfn = argns.o
+    fmter = argns.f
+    if fmter:
+        # custom formatter, located relative to user's cwd
+        if allow_custom_lexer_formatter and '.py' in fmter:
+            try:
+                filename = None
+                name = None
+                if ':' in fmter:
+                    # Same logic as above for custom lexer
+                    filename, name = fmter.rsplit(':', 1)
+
+                    if '.py' in name:
+                        name = None
+
+                if filename and name:
+                    fmter = load_formatter_from_file(filename, name,
+                                                     **parsed_opts)
+                else:
+                    fmter = load_formatter_from_file(fmter, **parsed_opts)
+            except ClassNotFound as err:
+                print('Error:', err, file=sys.stderr)
+                return 1
+        else:
+            try:
+                fmter = get_formatter_by_name(fmter, **parsed_opts)
+            except (OptionError, ClassNotFound) as err:
+                print('Error:', err, file=sys.stderr)
+                return 1
+
+    if outfn:
+        if not fmter:
+            try:
+                fmter = get_formatter_for_filename(outfn, **parsed_opts)
+            except (OptionError, ClassNotFound) as err:
+                print('Error:', err, file=sys.stderr)
+                return 1
+        try:
+            outfile = open(outfn, 'wb')
+        except Exception as err:
+            print('Error: cannot open outfile:', err, file=sys.stderr)
+            return 1
+    else:
+        if not fmter:
+            if os.environ.get('COLORTERM','') in ('truecolor', '24bit'):
+                fmter = TerminalTrueColorFormatter(**parsed_opts)
+            elif '256' in os.environ.get('TERM', ''):
+                fmter = Terminal256Formatter(**parsed_opts)
+            else:
+                fmter = TerminalFormatter(**parsed_opts)
+        outfile = sys.stdout.buffer
+
+    # determine output encoding if not explicitly selected
+    if not outencoding:
+        if outfn:
+            # output file? use lexer encoding for now (can still be None)
+            fmter.encoding = inencoding
+        else:
+            # else use terminal encoding
+            fmter.encoding = terminal_encoding(sys.stdout)
+
+    # provide coloring under Windows, if possible
+    if not outfn and sys.platform in ('win32', 'cygwin') and \
+       fmter.name in ('Terminal', 'Terminal256'):  # pragma: no cover
+        # unfortunately colorama doesn't support binary streams on Py3
+        outfile = UnclosingTextIOWrapper(outfile, encoding=fmter.encoding)
+        fmter.encoding = None
+        try:
+            import pip._vendor.colorama.initialise as colorama_initialise
+        except ImportError:
+            pass
+        else:
+            outfile = colorama_initialise.wrap_stream(
+                outfile, convert=None, strip=None, autoreset=False, wrap=True)
+
+    # When using the LaTeX formatter and the option `escapeinside` is
+    # specified, we need a special lexer which collects escaped text
+    # before running the chosen language lexer.
+    escapeinside = parsed_opts.get('escapeinside', '')
+    if len(escapeinside) == 2 and isinstance(fmter, LatexFormatter):
+        left = escapeinside[0]
+        right = escapeinside[1]
+        lexer = LatexEmbeddedLexer(left, right, lexer)
+
+    # ... and do it!
+    if not argns.s:
+        # process whole input as per normal...
+        try:
+            highlight(code, lexer, fmter, outfile)
+        finally:
+            if outfn:
+                outfile.close()
+        return 0
+    else:
+        # line by line processing of stdin (eg: for 'tail -f')...
+        try:
+            while 1:
+                line = sys.stdin.buffer.readline()
+                if not line:
+                    break
+                if not inencoding:
+                    line = guess_decode_from_terminal(line, sys.stdin)[0]
+                highlight(line, lexer, fmter, outfile)
+                if hasattr(outfile, 'flush'):
+                    outfile.flush()
+            return 0
+        except KeyboardInterrupt:  # pragma: no cover
+            return 0
+        finally:
+            if outfn:
+                outfile.close()
+
+
+class HelpFormatter(argparse.HelpFormatter):
+    def __init__(self, prog, indent_increment=2, max_help_position=16, width=None):
+        if width is None:
+            try:
+                width = shutil.get_terminal_size().columns - 2
+            except Exception:
+                pass
+        argparse.HelpFormatter.__init__(self, prog, indent_increment,
+                                        max_help_position, width)
+
+
+def main(args=sys.argv):
+    """
+    Main command line entry point.
+    """
+    desc = "Highlight an input file and write the result to an output file."
+    parser = argparse.ArgumentParser(description=desc, add_help=False,
+                                     formatter_class=HelpFormatter)
+
+    operation = parser.add_argument_group('Main operation')
+    lexersel = operation.add_mutually_exclusive_group()
+    lexersel.add_argument(
+        '-l', metavar='LEXER',
+        help='Specify the lexer to use.  (Query names with -L.)  If not '
+        'given and -g is not present, the lexer is guessed from the filename.')
+    lexersel.add_argument(
+        '-g', action='store_true',
+        help='Guess the lexer from the file contents, or pass through '
+        'as plain text if nothing can be guessed.')
+    operation.add_argument(
+        '-F', metavar='FILTER[:options]', action='append',
+        help='Add a filter to the token stream.  (Query names with -L.) '
+        'Filter options are given after a colon if necessary.')
+    operation.add_argument(
+        '-f', metavar='FORMATTER',
+        help='Specify the formatter to use.  (Query names with -L.) '
+        'If not given, the formatter is guessed from the output filename, '
+        'and defaults to the terminal formatter if the output is to the '
+        'terminal or an unknown file extension.')
+    operation.add_argument(
+        '-O', metavar='OPTION=value[,OPTION=value,...]', action='append',
+        help='Give options to the lexer and formatter as a comma-separated '
+        'list of key-value pairs. '
+        'Example: `-O bg=light,python=cool`.')
+    operation.add_argument(
+        '-P', metavar='OPTION=value', action='append',
+        help='Give a single option to the lexer and formatter - with this '
+        'you can pass options whose value contains commas and equal signs. '
+        'Example: `-P "heading=Pygments, the Python highlighter"`.')
+    operation.add_argument(
+        '-o', metavar='OUTPUTFILE',
+        help='Where to write the output.  Defaults to standard output.')
+
+    operation.add_argument(
+        'INPUTFILE', nargs='?',
+        help='Where to read the input.  Defaults to standard input.')
+
+    flags = parser.add_argument_group('Operation flags')
+    flags.add_argument(
+        '-v', action='store_true',
+        help='Print a detailed traceback on unhandled exceptions, which '
+        'is useful for debugging and bug reports.')
+    flags.add_argument(
+        '-s', action='store_true',
+        help='Process lines one at a time until EOF, rather than waiting to '
+        'process the entire file.  This only works for stdin, only for lexers '
+        'with no line-spanning constructs, and is intended for streaming '
+        'input such as you get from `tail -f`. '
+        'Example usage: `tail -f sql.log | pygmentize -s -l sql`.')
+    flags.add_argument(
+        '-x', action='store_true',
+        help='Allow custom lexers and formatters to be loaded from a .py file '
+        'relative to the current working directory. For example, '
+        '`-l ./customlexer.py -x`. By default, this option expects a file '
+        'with a class named CustomLexer or CustomFormatter; you can also '
+        'specify your own class name with a colon (`-l ./lexer.py:MyLexer`). '
+        'Users should be very careful not to use this option with untrusted '
+        'files, because it will import and run them.')
+    flags.add_argument('--json', help='Output as JSON. This can '
+        'be only used in conjunction with -L.',
+        default=False,
+        action='store_true')
+
+    special_modes_group = parser.add_argument_group(
+        'Special modes - do not do any highlighting')
+    special_modes = special_modes_group.add_mutually_exclusive_group()
+    special_modes.add_argument(
+        '-S', metavar='STYLE -f formatter',
+        help='Print style definitions for STYLE for a formatter '
+        'given with -f. The argument given by -a is formatter '
+        'dependent.')
+    special_modes.add_argument(
+        '-L', nargs='*', metavar='WHAT',
+        help='List lexers, formatters, styles or filters -- '
+        'give additional arguments for the thing(s) you want to list '
+        '(e.g. "styles"), or omit them to list everything.')
+    special_modes.add_argument(
+        '-N', metavar='FILENAME',
+        help='Guess and print out a lexer name based solely on the given '
+        'filename. Does not take input or highlight anything. If no specific '
+        'lexer can be determined, "text" is printed.')
+    special_modes.add_argument(
+        '-C', action='store_true',
+        help='Like -N, but print out a lexer name based solely on '
+        'a given content from standard input.')
+    special_modes.add_argument(
+        '-H', action='store', nargs=2, metavar=('NAME', 'TYPE'),
+        help='Print detailed help for the object  of type , '
+        'where  is one of "lexer", "formatter" or "filter".')
+    special_modes.add_argument(
+        '-V', action='store_true',
+        help='Print the package version.')
+    special_modes.add_argument(
+        '-h', '--help', action='store_true',
+        help='Print this help.')
+    special_modes_group.add_argument(
+        '-a', metavar='ARG',
+        help='Formatter-specific additional argument for the -S (print '
+        'style sheet) mode.')
+
+    argns = parser.parse_args(args[1:])
+
+    try:
+        return main_inner(parser, argns)
+    except BrokenPipeError:
+        # someone closed our stdout, e.g. by quitting a pager.
+        return 0
+    except Exception:
+        if argns.v:
+            print(file=sys.stderr)
+            print('*' * 65, file=sys.stderr)
+            print('An unhandled exception occurred while highlighting.',
+                  file=sys.stderr)
+            print('Please report the whole traceback to the issue tracker at',
+                  file=sys.stderr)
+            print('.',
+                  file=sys.stderr)
+            print('*' * 65, file=sys.stderr)
+            print(file=sys.stderr)
+            raise
+        import traceback
+        info = traceback.format_exception(*sys.exc_info())
+        msg = info[-1].strip()
+        if len(info) >= 3:
+            # extract relevant file and position info
+            msg += '\n   (f%s)' % info[-2].split('\n')[0].strip()[1:]
+        print(file=sys.stderr)
+        print('*** Error while highlighting:', file=sys.stderr)
+        print(msg, file=sys.stderr)
+        print('*** If this is a bug you want to report, please rerun with -v.',
+              file=sys.stderr)
+        return 1
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/console.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/console.py
new file mode 100644
index 0000000..deb4937
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/console.py
@@ -0,0 +1,70 @@
+"""
+    pygments.console
+    ~~~~~~~~~~~~~~~~
+
+    Format colored console output.
+
+    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+esc = "\x1b["
+
+codes = {}
+codes[""] = ""
+codes["reset"] = esc + "39;49;00m"
+
+codes["bold"] = esc + "01m"
+codes["faint"] = esc + "02m"
+codes["standout"] = esc + "03m"
+codes["underline"] = esc + "04m"
+codes["blink"] = esc + "05m"
+codes["overline"] = esc + "06m"
+
+dark_colors = ["black", "red", "green", "yellow", "blue",
+               "magenta", "cyan", "gray"]
+light_colors = ["brightblack", "brightred", "brightgreen", "brightyellow", "brightblue",
+                "brightmagenta", "brightcyan", "white"]
+
+x = 30
+for d, l in zip(dark_colors, light_colors):
+    codes[d] = esc + "%im" % x
+    codes[l] = esc + "%im" % (60 + x)
+    x += 1
+
+del d, l, x
+
+codes["white"] = codes["bold"]
+
+
+def reset_color():
+    return codes["reset"]
+
+
+def colorize(color_key, text):
+    return codes[color_key] + text + codes["reset"]
+
+
+def ansiformat(attr, text):
+    """
+    Format ``text`` with a color and/or some attributes::
+
+        color       normal color
+        *color*     bold color
+        _color_     underlined color
+        +color+     blinking color
+    """
+    result = []
+    if attr[:1] == attr[-1:] == '+':
+        result.append(codes['blink'])
+        attr = attr[1:-1]
+    if attr[:1] == attr[-1:] == '*':
+        result.append(codes['bold'])
+        attr = attr[1:-1]
+    if attr[:1] == attr[-1:] == '_':
+        result.append(codes['underline'])
+        attr = attr[1:-1]
+    result.append(codes[attr])
+    result.append(text)
+    result.append(codes['reset'])
+    return ''.join(result)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/filter.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/filter.py
new file mode 100644
index 0000000..dafa08d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/filter.py
@@ -0,0 +1,71 @@
+"""
+    pygments.filter
+    ~~~~~~~~~~~~~~~
+
+    Module that implements the default filter.
+
+    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+
+def apply_filters(stream, filters, lexer=None):
+    """
+    Use this method to apply an iterable of filters to
+    a stream. If lexer is given it's forwarded to the
+    filter, otherwise the filter receives `None`.
+    """
+    def _apply(filter_, stream):
+        yield from filter_.filter(lexer, stream)
+    for filter_ in filters:
+        stream = _apply(filter_, stream)
+    return stream
+
+
+def simplefilter(f):
+    """
+    Decorator that converts a function into a filter::
+
+        @simplefilter
+        def lowercase(self, lexer, stream, options):
+            for ttype, value in stream:
+                yield ttype, value.lower()
+    """
+    return type(f.__name__, (FunctionFilter,), {
+        '__module__': getattr(f, '__module__'),
+        '__doc__': f.__doc__,
+        'function': f,
+    })
+
+
+class Filter:
+    """
+    Default filter. Subclass this class or use the `simplefilter`
+    decorator to create own filters.
+    """
+
+    def __init__(self, **options):
+        self.options = options
+
+    def filter(self, lexer, stream):
+        raise NotImplementedError()
+
+
+class FunctionFilter(Filter):
+    """
+    Abstract class used by `simplefilter` to create simple
+    function filters on the fly. The `simplefilter` decorator
+    automatically creates subclasses of this class for
+    functions passed to it.
+    """
+    function = None
+
+    def __init__(self, **options):
+        if not hasattr(self, 'function'):
+            raise TypeError('%r used without bound function' %
+                            self.__class__.__name__)
+        Filter.__init__(self, **options)
+
+    def filter(self, lexer, stream):
+        # pylint: disable=not-callable
+        yield from self.function(lexer, stream, self.options)
diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/filters/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/filters/__init__.py
new file mode 100644
index 0000000..5aa9ecb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/filters/__init__.py
@@ -0,0 +1,940 @@
+"""
+    pygments.filters
+    ~~~~~~~~~~~~~~~~
+
+    Module containing filter lookup functions and default
+    filters.
+
+    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pip._vendor.pygments.token import String, Comment, Keyword, Name, Error, Whitespace, \
+    string_to_tokentype
+from pip._vendor.pygments.filter import Filter
+from pip._vendor.pygments.util import get_list_opt, get_int_opt, get_bool_opt, \
+    get_choice_opt, ClassNotFound, OptionError
+from pip._vendor.pygments.plugin import find_plugin_filters
+
+
+def find_filter_class(filtername):
+    """Lookup a filter by name. Return None if not found."""
+    if filtername in FILTERS:
+        return FILTERS[filtername]
+    for name, cls in find_plugin_filters():
+        if name == filtername:
+            return cls
+    return None
+
+
+def get_filter_by_name(filtername, **options):
+    """Return an instantiated filter.
+
+    Options are passed to the filter initializer if wanted.
+    Raise a ClassNotFound if not found.
+    """
+    cls = find_filter_class(filtername)
+    if cls:
+        return cls(**options)
+    else:
+        raise ClassNotFound('filter %r not found' % filtername)
+
+
+def get_all_filters():
+    """Return a generator of all filter names."""
+    yield from FILTERS
+    for name, _ in find_plugin_filters():
+        yield name
+
+
+def _replace_special(ttype, value, regex, specialttype,
+                     replacefunc=lambda x: x):
+    last = 0
+    for match in regex.finditer(value):
+        start, end = match.start(), match.end()
+        if start != last:
+            yield ttype, value[last:start]
+        yield specialttype, replacefunc(value[start:end])
+        last = end
+    if last != len(value):
+        yield ttype, value[last:]
+
+
+class CodeTagFilter(Filter):
+    """Highlight special code tags in comments and docstrings.
+
+    Options accepted:
+
+    `codetags` : list of strings
+       A list of strings that are flagged as code tags.  The default is to
+       highlight ``XXX``, ``TODO``, ``FIXME``, ``BUG`` and ``NOTE``.
+
+    .. versionchanged:: 2.13
+       Now recognizes ``FIXME`` by default.
+    """
+
+    def __init__(self, **options):
+        Filter.__init__(self, **options)
+        tags = get_list_opt(options, 'codetags',
+                            ['XXX', 'TODO', 'FIXME', 'BUG', 'NOTE'])
+        self.tag_re = re.compile(r'\b(%s)\b' % '|'.join([
+            re.escape(tag) for tag in tags if tag
+        ]))
+
+    def filter(self, lexer, stream):
+        regex = self.tag_re
+        for ttype, value in stream:
+            if ttype in String.Doc or \
+               ttype in Comment and \
+               ttype not in Comment.Preproc:
+                yield from _replace_special(ttype, value, regex, Comment.Special)
+            else:
+                yield ttype, value
+
+
+class SymbolFilter(Filter):
+    """Convert mathematical symbols such as \\ in Isabelle
+    or \\longrightarrow in LaTeX into Unicode characters.
+
+    This is mostly useful for HTML or console output when you want to
+    approximate the source rendering you'd see in an IDE.
+
+    Options accepted:
+
+    `lang` : string
+       The symbol language. Must be one of ``'isabelle'`` or
+       ``'latex'``.  The default is ``'isabelle'``.
+    """
+
+    latex_symbols = {
+        '\\alpha'                : '\U000003b1',
+        '\\beta'                 : '\U000003b2',
+        '\\gamma'                : '\U000003b3',
+        '\\delta'                : '\U000003b4',
+        '\\varepsilon'           : '\U000003b5',
+        '\\zeta'                 : '\U000003b6',
+        '\\eta'                  : '\U000003b7',
+        '\\vartheta'             : '\U000003b8',
+        '\\iota'                 : '\U000003b9',
+        '\\kappa'                : '\U000003ba',
+        '\\lambda'               : '\U000003bb',
+        '\\mu'                   : '\U000003bc',
+        '\\nu'                   : '\U000003bd',
+        '\\xi'                   : '\U000003be',
+        '\\pi'                   : '\U000003c0',
+        '\\varrho'               : '\U000003c1',
+        '\\sigma'                : '\U000003c3',
+        '\\tau'                  : '\U000003c4',
+        '\\upsilon'              : '\U000003c5',
+        '\\varphi'               : '\U000003c6',
+        '\\chi'                  : '\U000003c7',
+        '\\psi'                  : '\U000003c8',
+        '\\omega'                : '\U000003c9',
+        '\\Gamma'                : '\U00000393',
+        '\\Delta'                : '\U00000394',
+        '\\Theta'                : '\U00000398',
+        '\\Lambda'               : '\U0000039b',
+        '\\Xi'                   : '\U0000039e',
+        '\\Pi'                   : '\U000003a0',
+        '\\Sigma'                : '\U000003a3',
+        '\\Upsilon'              : '\U000003a5',
+        '\\Phi'                  : '\U000003a6',
+        '\\Psi'                  : '\U000003a8',
+        '\\Omega'                : '\U000003a9',
+        '\\leftarrow'            : '\U00002190',
+        '\\longleftarrow'        : '\U000027f5',
+        '\\rightarrow'           : '\U00002192',
+        '\\longrightarrow'       : '\U000027f6',
+        '\\Leftarrow'            : '\U000021d0',
+        '\\Longleftarrow'        : '\U000027f8',
+        '\\Rightarrow'           : '\U000021d2',
+        '\\Longrightarrow'       : '\U000027f9',
+        '\\leftrightarrow'       : '\U00002194',
+        '\\longleftrightarrow'   : '\U000027f7',
+        '\\Leftrightarrow'       : '\U000021d4',
+        '\\Longleftrightarrow'   : '\U000027fa',
+        '\\mapsto'               : '\U000021a6',
+        '\\longmapsto'           : '\U000027fc',
+        '\\relbar'               : '\U00002500',
+        '\\Relbar'               : '\U00002550',
+        '\\hookleftarrow'        : '\U000021a9',
+        '\\hookrightarrow'       : '\U000021aa',
+        '\\leftharpoondown'      : '\U000021bd',
+        '\\rightharpoondown'     : '\U000021c1',
+        '\\leftharpoonup'        : '\U000021bc',
+        '\\rightharpoonup'       : '\U000021c0',
+        '\\rightleftharpoons'    : '\U000021cc',
+        '\\leadsto'              : '\U0000219d',
+        '\\downharpoonleft'      : '\U000021c3',
+        '\\downharpoonright'     : '\U000021c2',
+        '\\upharpoonleft'        : '\U000021bf',
+        '\\upharpoonright'       : '\U000021be',
+        '\\restriction'          : '\U000021be',
+        '\\uparrow'              : '\U00002191',
+        '\\Uparrow'              : '\U000021d1',
+        '\\downarrow'            : '\U00002193',
+        '\\Downarrow'            : '\U000021d3',
+        '\\updownarrow'          : '\U00002195',
+        '\\Updownarrow'          : '\U000021d5',
+        '\\langle'               : '\U000027e8',
+        '\\rangle'               : '\U000027e9',
+        '\\lceil'                : '\U00002308',
+        '\\rceil'                : '\U00002309',
+        '\\lfloor'               : '\U0000230a',
+        '\\rfloor'               : '\U0000230b',
+        '\\flqq'                 : '\U000000ab',
+        '\\frqq'                 : '\U000000bb',
+        '\\bot'                  : '\U000022a5',
+        '\\top'                  : '\U000022a4',
+        '\\wedge'                : '\U00002227',
+        '\\bigwedge'             : '\U000022c0',
+        '\\vee'                  : '\U00002228',
+        '\\bigvee'               : '\U000022c1',
+        '\\forall'               : '\U00002200',
+        '\\exists'               : '\U00002203',
+        '\\nexists'              : '\U00002204',
+        '\\neg'                  : '\U000000ac',
+        '\\Box'                  : '\U000025a1',
+        '\\Diamond'              : '\U000025c7',
+        '\\vdash'                : '\U000022a2',
+        '\\models'               : '\U000022a8',
+        '\\dashv'                : '\U000022a3',
+        '\\surd'                 : '\U0000221a',
+        '\\le'                   : '\U00002264',
+        '\\ge'                   : '\U00002265',
+        '\\ll'                   : '\U0000226a',
+        '\\gg'                   : '\U0000226b',
+        '\\lesssim'              : '\U00002272',
+        '\\gtrsim'               : '\U00002273',
+        '\\lessapprox'           : '\U00002a85',
+        '\\gtrapprox'            : '\U00002a86',
+        '\\in'                   : '\U00002208',
+        '\\notin'                : '\U00002209',
+        '\\subset'               : '\U00002282',
+        '\\supset'               : '\U00002283',
+        '\\subseteq'             : '\U00002286',
+        '\\supseteq'             : '\U00002287',
+        '\\sqsubset'             : '\U0000228f',
+        '\\sqsupset'             : '\U00002290',
+        '\\sqsubseteq'           : '\U00002291',
+        '\\sqsupseteq'           : '\U00002292',
+        '\\cap'                  : '\U00002229',
+        '\\bigcap'               : '\U000022c2',
+        '\\cup'                  : '\U0000222a',
+        '\\bigcup'               : '\U000022c3',
+        '\\sqcup'                : '\U00002294',
+        '\\bigsqcup'             : '\U00002a06',
+        '\\sqcap'                : '\U00002293',
+        '\\Bigsqcap'             : '\U00002a05',
+        '\\setminus'             : '\U00002216',
+        '\\propto'               : '\U0000221d',
+        '\\uplus'                : '\U0000228e',
+        '\\bigplus'              : '\U00002a04',
+        '\\sim'                  : '\U0000223c',
+        '\\doteq'                : '\U00002250',
+        '\\simeq'                : '\U00002243',
+        '\\approx'               : '\U00002248',
+        '\\asymp'                : '\U0000224d',
+        '\\cong'                 : '\U00002245',
+        '\\equiv'                : '\U00002261',
+        '\\Join'                 : '\U000022c8',
+        '\\bowtie'               : '\U00002a1d',
+        '\\prec'                 : '\U0000227a',
+        '\\succ'                 : '\U0000227b',
+        '\\preceq'               : '\U0000227c',
+        '\\succeq'               : '\U0000227d',
+        '\\parallel'             : '\U00002225',
+        '\\mid'                  : '\U000000a6',
+        '\\pm'                   : '\U000000b1',
+        '\\mp'                   : '\U00002213',
+        '\\times'                : '\U000000d7',
+        '\\div'                  : '\U000000f7',
+        '\\cdot'                 : '\U000022c5',
+        '\\star'                 : '\U000022c6',
+        '\\circ'                 : '\U00002218',
+        '\\dagger'               : '\U00002020',
+        '\\ddagger'              : '\U00002021',
+        '\\lhd'                  : '\U000022b2',
+        '\\rhd'                  : '\U000022b3',
+        '\\unlhd'                : '\U000022b4',
+        '\\unrhd'                : '\U000022b5',
+        '\\triangleleft'         : '\U000025c3',
+        '\\triangleright'        : '\U000025b9',
+        '\\triangle'             : '\U000025b3',
+        '\\triangleq'            : '\U0000225c',
+        '\\oplus'                : '\U00002295',
+        '\\bigoplus'             : '\U00002a01',
+        '\\otimes'               : '\U00002297',
+        '\\bigotimes'            : '\U00002a02',
+        '\\odot'                 : '\U00002299',
+        '\\bigodot'              : '\U00002a00',
+        '\\ominus'               : '\U00002296',
+        '\\oslash'               : '\U00002298',
+        '\\dots'                 : '\U00002026',
+        '\\cdots'                : '\U000022ef',
+        '\\sum'                  : '\U00002211',
+        '\\prod'                 : '\U0000220f',
+        '\\coprod'               : '\U00002210',
+        '\\infty'                : '\U0000221e',
+        '\\int'                  : '\U0000222b',
+        '\\oint'                 : '\U0000222e',
+        '\\clubsuit'             : '\U00002663',
+        '\\diamondsuit'          : '\U00002662',
+        '\\heartsuit'            : '\U00002661',
+        '\\spadesuit'            : '\U00002660',
+        '\\aleph'                : '\U00002135',
+        '\\emptyset'             : '\U00002205',
+        '\\nabla'                : '\U00002207',
+        '\\partial'              : '\U00002202',
+        '\\flat'                 : '\U0000266d',
+        '\\natural'              : '\U0000266e',
+        '\\sharp'                : '\U0000266f',
+        '\\angle'                : '\U00002220',
+        '\\copyright'            : '\U000000a9',
+        '\\textregistered'       : '\U000000ae',
+        '\\textonequarter'       : '\U000000bc',
+        '\\textonehalf'          : '\U000000bd',
+        '\\textthreequarters'    : '\U000000be',
+        '\\textordfeminine'      : '\U000000aa',
+        '\\textordmasculine'     : '\U000000ba',
+        '\\euro'                 : '\U000020ac',
+        '\\pounds'               : '\U000000a3',
+        '\\yen'                  : '\U000000a5',
+        '\\textcent'             : '\U000000a2',
+        '\\textcurrency'         : '\U000000a4',
+        '\\textdegree'           : '\U000000b0',
+    }
+
+    isabelle_symbols = {
+        '\\'                 : '\U0001d7ec',
+        '\\'                  : '\U0001d7ed',
+        '\\'                  : '\U0001d7ee',
+        '\\'                : '\U0001d7ef',
+        '\\'                 : '\U0001d7f0',
+        '\\'                 : '\U0001d7f1',
+        '\\'                  : '\U0001d7f2',
+        '\\'                : '\U0001d7f3',
+        '\\'                : '\U0001d7f4',
+        '\\'                 : '\U0001d7f5',
+        '\\'                    : '\U0001d49c',
+        '\\'                    : '\U0000212c',
+        '\\'                    : '\U0001d49e',
+        '\\'                    : '\U0001d49f',
+        '\\'                    : '\U00002130',
+        '\\'                    : '\U00002131',
+        '\\'                    : '\U0001d4a2',
+        '\\'                    : '\U0000210b',
+        '\\'                    : '\U00002110',
+        '\\'                    : '\U0001d4a5',
+        '\\'                    : '\U0001d4a6',
+        '\\'                    : '\U00002112',
+        '\\'                    : '\U00002133',
+        '\\'                    : '\U0001d4a9',
+        '\\'                    : '\U0001d4aa',
+        '\\

' : '\U0001d5c9', + '\\' : '\U0001d5ca', + '\\' : '\U0001d5cb', + '\\' : '\U0001d5cc', + '\\' : '\U0001d5cd', + '\\' : '\U0001d5ce', + '\\' : '\U0001d5cf', + '\\' : '\U0001d5d0', + '\\' : '\U0001d5d1', + '\\' : '\U0001d5d2', + '\\' : '\U0001d5d3', + '\\' : '\U0001d504', + '\\' : '\U0001d505', + '\\' : '\U0000212d', + '\\

' : '\U0001d507', + '\\' : '\U0001d508', + '\\' : '\U0001d509', + '\\' : '\U0001d50a', + '\\' : '\U0000210c', + '\\' : '\U00002111', + '\\' : '\U0001d50d', + '\\' : '\U0001d50e', + '\\' : '\U0001d50f', + '\\' : '\U0001d510', + '\\' : '\U0001d511', + '\\' : '\U0001d512', + '\\' : '\U0001d513', + '\\' : '\U0001d514', + '\\' : '\U0000211c', + '\\' : '\U0001d516', + '\\' : '\U0001d517', + '\\' : '\U0001d518', + '\\' : '\U0001d519', + '\\' : '\U0001d51a', + '\\' : '\U0001d51b', + '\\' : '\U0001d51c', + '\\' : '\U00002128', + '\\' : '\U0001d51e', + '\\' : '\U0001d51f', + '\\' : '\U0001d520', + '\\
' : '\U0001d521', + '\\' : '\U0001d522', + '\\' : '\U0001d523', + '\\' : '\U0001d524', + '\\' : '\U0001d525', + '\\' : '\U0001d526', + '\\' : '\U0001d527', + '\\' : '\U0001d528', + '\\' : '\U0001d529', + '\\' : '\U0001d52a', + '\\' : '\U0001d52b', + '\\' : '\U0001d52c', + '\\' : '\U0001d52d', + '\\' : '\U0001d52e', + '\\' : '\U0001d52f', + '\\' : '\U0001d530', + '\\' : '\U0001d531', + '\\' : '\U0001d532', + '\\' : '\U0001d533', + '\\' : '\U0001d534', + '\\' : '\U0001d535', + '\\' : '\U0001d536', + '\\' : '\U0001d537', + '\\' : '\U000003b1', + '\\' : '\U000003b2', + '\\' : '\U000003b3', + '\\' : '\U000003b4', + '\\' : '\U000003b5', + '\\' : '\U000003b6', + '\\' : '\U000003b7', + '\\' : '\U000003b8', + '\\' : '\U000003b9', + '\\' : '\U000003ba', + '\\' : '\U000003bb', + '\\' : '\U000003bc', + '\\' : '\U000003bd', + '\\' : '\U000003be', + '\\' : '\U000003c0', + '\\' : '\U000003c1', + '\\' : '\U000003c3', + '\\' : '\U000003c4', + '\\' : '\U000003c5', + '\\' : '\U000003c6', + '\\' : '\U000003c7', + '\\' : '\U000003c8', + '\\' : '\U000003c9', + '\\' : '\U00000393', + '\\' : '\U00000394', + '\\' : '\U00000398', + '\\' : '\U0000039b', + '\\' : '\U0000039e', + '\\' : '\U000003a0', + '\\' : '\U000003a3', + '\\' : '\U000003a5', + '\\' : '\U000003a6', + '\\' : '\U000003a8', + '\\' : '\U000003a9', + '\\' : '\U0001d539', + '\\' : '\U00002102', + '\\' : '\U00002115', + '\\' : '\U0000211a', + '\\' : '\U0000211d', + '\\' : '\U00002124', + '\\' : '\U00002190', + '\\' : '\U000027f5', + '\\' : '\U00002192', + '\\' : '\U000027f6', + '\\' : '\U000021d0', + '\\' : '\U000027f8', + '\\' : '\U000021d2', + '\\' : '\U000027f9', + '\\' : '\U00002194', + '\\' : '\U000027f7', + '\\' : '\U000021d4', + '\\' : '\U000027fa', + '\\' : '\U000021a6', + '\\' : '\U000027fc', + '\\' : '\U00002500', + '\\' : '\U00002550', + '\\' : '\U000021a9', + '\\' : '\U000021aa', + '\\' : '\U000021bd', + '\\' : '\U000021c1', + '\\' : '\U000021bc', + '\\' : '\U000021c0', + '\\' : '\U000021cc', + '\\' : '\U0000219d', + '\\' : '\U000021c3', + '\\' : '\U000021c2', + '\\' : '\U000021bf', + '\\' : '\U000021be', + '\\' : '\U000021be', + '\\' : '\U00002237', + '\\' : '\U00002191', + '\\' : '\U000021d1', + '\\' : '\U00002193', + '\\' : '\U000021d3', + '\\' : '\U00002195', + '\\' : '\U000021d5', + '\\' : '\U000027e8', + '\\' : '\U000027e9', + '\\' : '\U00002308', + '\\' : '\U00002309', + '\\' : '\U0000230a', + '\\' : '\U0000230b', + '\\' : '\U00002987', + '\\' : '\U00002988', + '\\' : '\U000027e6', + '\\' : '\U000027e7', + '\\' : '\U00002983', + '\\' : '\U00002984', + '\\' : '\U000000ab', + '\\' : '\U000000bb', + '\\' : '\U000022a5', + '\\' : '\U000022a4', + '\\' : '\U00002227', + '\\' : '\U000022c0', + '\\' : '\U00002228', + '\\' : '\U000022c1', + '\\' : '\U00002200', + '\\' : '\U00002203', + '\\' : '\U00002204', + '\\' : '\U000000ac', + '\\' : '\U000025a1', + '\\' : '\U000025c7', + '\\' : '\U000022a2', + '\\' : '\U000022a8', + '\\' : '\U000022a9', + '\\' : '\U000022ab', + '\\' : '\U000022a3', + '\\' : '\U0000221a', + '\\' : '\U00002264', + '\\' : '\U00002265', + '\\' : '\U0000226a', + '\\' : '\U0000226b', + '\\' : '\U00002272', + '\\' : '\U00002273', + '\\' : '\U00002a85', + '\\' : '\U00002a86', + '\\' : '\U00002208', + '\\' : '\U00002209', + '\\' : '\U00002282', + '\\' : '\U00002283', + '\\' : '\U00002286', + '\\' : '\U00002287', + '\\' : '\U0000228f', + '\\' : '\U00002290', + '\\' : '\U00002291', + '\\' : '\U00002292', + '\\' : '\U00002229', + '\\' : '\U000022c2', + '\\' : '\U0000222a', + '\\' : '\U000022c3', + '\\' : '\U00002294', + '\\' : '\U00002a06', + '\\' : '\U00002293', + '\\' : '\U00002a05', + '\\' : '\U00002216', + '\\' : '\U0000221d', + '\\' : '\U0000228e', + '\\' : '\U00002a04', + '\\' : '\U00002260', + '\\' : '\U0000223c', + '\\' : '\U00002250', + '\\' : '\U00002243', + '\\' : '\U00002248', + '\\' : '\U0000224d', + '\\' : '\U00002245', + '\\' : '\U00002323', + '\\' : '\U00002261', + '\\' : '\U00002322', + '\\' : '\U000022c8', + '\\' : '\U00002a1d', + '\\' : '\U0000227a', + '\\' : '\U0000227b', + '\\' : '\U0000227c', + '\\' : '\U0000227d', + '\\' : '\U00002225', + '\\' : '\U000000a6', + '\\' : '\U000000b1', + '\\' : '\U00002213', + '\\' : '\U000000d7', + '\\
' : '\U000000f7', + '\\' : '\U000022c5', + '\\' : '\U000022c6', + '\\' : '\U00002219', + '\\' : '\U00002218', + '\\' : '\U00002020', + '\\' : '\U00002021', + '\\' : '\U000022b2', + '\\' : '\U000022b3', + '\\' : '\U000022b4', + '\\' : '\U000022b5', + '\\' : '\U000025c3', + '\\' : '\U000025b9', + '\\' : '\U000025b3', + '\\' : '\U0000225c', + '\\' : '\U00002295', + '\\' : '\U00002a01', + '\\' : '\U00002297', + '\\' : '\U00002a02', + '\\' : '\U00002299', + '\\' : '\U00002a00', + '\\' : '\U00002296', + '\\' : '\U00002298', + '\\' : '\U00002026', + '\\' : '\U000022ef', + '\\' : '\U00002211', + '\\' : '\U0000220f', + '\\' : '\U00002210', + '\\' : '\U0000221e', + '\\' : '\U0000222b', + '\\' : '\U0000222e', + '\\' : '\U00002663', + '\\' : '\U00002662', + '\\' : '\U00002661', + '\\' : '\U00002660', + '\\' : '\U00002135', + '\\' : '\U00002205', + '\\' : '\U00002207', + '\\' : '\U00002202', + '\\' : '\U0000266d', + '\\' : '\U0000266e', + '\\' : '\U0000266f', + '\\' : '\U00002220', + '\\' : '\U000000a9', + '\\' : '\U000000ae', + '\\' : '\U000000ad', + '\\' : '\U000000af', + '\\' : '\U000000bc', + '\\' : '\U000000bd', + '\\' : '\U000000be', + '\\' : '\U000000aa', + '\\' : '\U000000ba', + '\\
' : '\U000000a7', + '\\' : '\U000000b6', + '\\' : '\U000000a1', + '\\' : '\U000000bf', + '\\' : '\U000020ac', + '\\' : '\U000000a3', + '\\' : '\U000000a5', + '\\' : '\U000000a2', + '\\' : '\U000000a4', + '\\' : '\U000000b0', + '\\' : '\U00002a3f', + '\\' : '\U00002127', + '\\' : '\U000025ca', + '\\' : '\U00002118', + '\\' : '\U00002240', + '\\' : '\U000022c4', + '\\' : '\U000000b4', + '\\' : '\U00000131', + '\\' : '\U000000a8', + '\\' : '\U000000b8', + '\\' : '\U000002dd', + '\\' : '\U000003f5', + '\\' : '\U000023ce', + '\\' : '\U00002039', + '\\' : '\U0000203a', + '\\' : '\U00002302', + '\\<^sub>' : '\U000021e9', + '\\<^sup>' : '\U000021e7', + '\\<^bold>' : '\U00002759', + '\\<^bsub>' : '\U000021d8', + '\\<^esub>' : '\U000021d9', + '\\<^bsup>' : '\U000021d7', + '\\<^esup>' : '\U000021d6', + } + + lang_map = {'isabelle' : isabelle_symbols, 'latex' : latex_symbols} + + def __init__(self, **options): + Filter.__init__(self, **options) + lang = get_choice_opt(options, 'lang', + ['isabelle', 'latex'], 'isabelle') + self.symbols = self.lang_map[lang] + + def filter(self, lexer, stream): + for ttype, value in stream: + if value in self.symbols: + yield ttype, self.symbols[value] + else: + yield ttype, value + + +class KeywordCaseFilter(Filter): + """Convert keywords to lowercase or uppercase or capitalize them, which + means first letter uppercase, rest lowercase. + + This can be useful e.g. if you highlight Pascal code and want to adapt the + code to your styleguide. + + Options accepted: + + `case` : string + The casing to convert keywords to. Must be one of ``'lower'``, + ``'upper'`` or ``'capitalize'``. The default is ``'lower'``. + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + case = get_choice_opt(options, 'case', + ['lower', 'upper', 'capitalize'], 'lower') + self.convert = getattr(str, case) + + def filter(self, lexer, stream): + for ttype, value in stream: + if ttype in Keyword: + yield ttype, self.convert(value) + else: + yield ttype, value + + +class NameHighlightFilter(Filter): + """Highlight a normal Name (and Name.*) token with a different token type. + + Example:: + + filter = NameHighlightFilter( + names=['foo', 'bar', 'baz'], + tokentype=Name.Function, + ) + + This would highlight the names "foo", "bar" and "baz" + as functions. `Name.Function` is the default token type. + + Options accepted: + + `names` : list of strings + A list of names that should be given the different token type. + There is no default. + `tokentype` : TokenType or string + A token type or a string containing a token type name that is + used for highlighting the strings in `names`. The default is + `Name.Function`. + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + self.names = set(get_list_opt(options, 'names', [])) + tokentype = options.get('tokentype') + if tokentype: + self.tokentype = string_to_tokentype(tokentype) + else: + self.tokentype = Name.Function + + def filter(self, lexer, stream): + for ttype, value in stream: + if ttype in Name and value in self.names: + yield self.tokentype, value + else: + yield ttype, value + + +class ErrorToken(Exception): + pass + + +class RaiseOnErrorTokenFilter(Filter): + """Raise an exception when the lexer generates an error token. + + Options accepted: + + `excclass` : Exception class + The exception class to raise. + The default is `pygments.filters.ErrorToken`. + + .. versionadded:: 0.8 + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + self.exception = options.get('excclass', ErrorToken) + try: + # issubclass() will raise TypeError if first argument is not a class + if not issubclass(self.exception, Exception): + raise TypeError + except TypeError: + raise OptionError('excclass option is not an exception class') + + def filter(self, lexer, stream): + for ttype, value in stream: + if ttype is Error: + raise self.exception(value) + yield ttype, value + + +class VisibleWhitespaceFilter(Filter): + """Convert tabs, newlines and/or spaces to visible characters. + + Options accepted: + + `spaces` : string or bool + If this is a one-character string, spaces will be replaces by this string. + If it is another true value, spaces will be replaced by ``·`` (unicode + MIDDLE DOT). If it is a false value, spaces will not be replaced. The + default is ``False``. + `tabs` : string or bool + The same as for `spaces`, but the default replacement character is ``»`` + (unicode RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK). The default value + is ``False``. Note: this will not work if the `tabsize` option for the + lexer is nonzero, as tabs will already have been expanded then. + `tabsize` : int + If tabs are to be replaced by this filter (see the `tabs` option), this + is the total number of characters that a tab should be expanded to. + The default is ``8``. + `newlines` : string or bool + The same as for `spaces`, but the default replacement character is ``¶`` + (unicode PILCROW SIGN). The default value is ``False``. + `wstokentype` : bool + If true, give whitespace the special `Whitespace` token type. This allows + styling the visible whitespace differently (e.g. greyed out), but it can + disrupt background colors. The default is ``True``. + + .. versionadded:: 0.8 + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + for name, default in [('spaces', '·'), + ('tabs', '»'), + ('newlines', '¶')]: + opt = options.get(name, False) + if isinstance(opt, str) and len(opt) == 1: + setattr(self, name, opt) + else: + setattr(self, name, (opt and default or '')) + tabsize = get_int_opt(options, 'tabsize', 8) + if self.tabs: + self.tabs += ' ' * (tabsize - 1) + if self.newlines: + self.newlines += '\n' + self.wstt = get_bool_opt(options, 'wstokentype', True) + + def filter(self, lexer, stream): + if self.wstt: + spaces = self.spaces or ' ' + tabs = self.tabs or '\t' + newlines = self.newlines or '\n' + regex = re.compile(r'\s') + + def replacefunc(wschar): + if wschar == ' ': + return spaces + elif wschar == '\t': + return tabs + elif wschar == '\n': + return newlines + return wschar + + for ttype, value in stream: + yield from _replace_special(ttype, value, regex, Whitespace, + replacefunc) + else: + spaces, tabs, newlines = self.spaces, self.tabs, self.newlines + # simpler processing + for ttype, value in stream: + if spaces: + value = value.replace(' ', spaces) + if tabs: + value = value.replace('\t', tabs) + if newlines: + value = value.replace('\n', newlines) + yield ttype, value + + +class GobbleFilter(Filter): + """Gobbles source code lines (eats initial characters). + + This filter drops the first ``n`` characters off every line of code. This + may be useful when the source code fed to the lexer is indented by a fixed + amount of space that isn't desired in the output. + + Options accepted: + + `n` : int + The number of characters to gobble. + + .. versionadded:: 1.2 + """ + def __init__(self, **options): + Filter.__init__(self, **options) + self.n = get_int_opt(options, 'n', 0) + + def gobble(self, value, left): + if left < len(value): + return value[left:], 0 + else: + return '', left - len(value) + + def filter(self, lexer, stream): + n = self.n + left = n # How many characters left to gobble. + for ttype, value in stream: + # Remove ``left`` tokens from first line, ``n`` from all others. + parts = value.split('\n') + (parts[0], left) = self.gobble(parts[0], left) + for i in range(1, len(parts)): + (parts[i], left) = self.gobble(parts[i], n) + value = '\n'.join(parts) + + if value != '': + yield ttype, value + + +class TokenMergeFilter(Filter): + """Merges consecutive tokens with the same token type in the output + stream of a lexer. + + .. versionadded:: 1.2 + """ + def __init__(self, **options): + Filter.__init__(self, **options) + + def filter(self, lexer, stream): + current_type = None + current_value = None + for ttype, value in stream: + if ttype is current_type: + current_value += value + else: + if current_type is not None: + yield current_type, current_value + current_type = ttype + current_value = value + if current_type is not None: + yield current_type, current_value + + +FILTERS = { + 'codetagify': CodeTagFilter, + 'keywordcase': KeywordCaseFilter, + 'highlight': NameHighlightFilter, + 'raiseonerror': RaiseOnErrorTokenFilter, + 'whitespace': VisibleWhitespaceFilter, + 'gobble': GobbleFilter, + 'tokenmerge': TokenMergeFilter, + 'symbols': SymbolFilter, +} diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatter.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatter.py new file mode 100644 index 0000000..3ca4892 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatter.py @@ -0,0 +1,124 @@ +""" + pygments.formatter + ~~~~~~~~~~~~~~~~~~ + + Base formatter class. + + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import codecs + +from pip._vendor.pygments.util import get_bool_opt +from pip._vendor.pygments.styles import get_style_by_name + +__all__ = ['Formatter'] + + +def _lookup_style(style): + if isinstance(style, str): + return get_style_by_name(style) + return style + + +class Formatter: + """ + Converts a token stream to text. + + Formatters should have attributes to help selecting them. These + are similar to the corresponding :class:`~pygments.lexer.Lexer` + attributes. + + .. autoattribute:: name + :no-value: + + .. autoattribute:: aliases + :no-value: + + .. autoattribute:: filenames + :no-value: + + You can pass options as keyword arguments to the constructor. + All formatters accept these basic options: + + ``style`` + The style to use, can be a string or a Style subclass + (default: "default"). Not used by e.g. the + TerminalFormatter. + ``full`` + Tells the formatter to output a "full" document, i.e. + a complete self-contained document. This doesn't have + any effect for some formatters (default: false). + ``title`` + If ``full`` is true, the title that should be used to + caption the document (default: ''). + ``encoding`` + If given, must be an encoding name. This will be used to + convert the Unicode token strings to byte strings in the + output. If it is "" or None, Unicode strings will be written + to the output file, which most file-like objects do not + support (default: None). + ``outencoding`` + Overrides ``encoding`` if given. + + """ + + #: Full name for the formatter, in human-readable form. + name = None + + #: A list of short, unique identifiers that can be used to lookup + #: the formatter from a list, e.g. using :func:`.get_formatter_by_name()`. + aliases = [] + + #: A list of fnmatch patterns that match filenames for which this + #: formatter can produce output. The patterns in this list should be unique + #: among all formatters. + filenames = [] + + #: If True, this formatter outputs Unicode strings when no encoding + #: option is given. + unicodeoutput = True + + def __init__(self, **options): + """ + As with lexers, this constructor takes arbitrary optional arguments, + and if you override it, you should first process your own options, then + call the base class implementation. + """ + self.style = _lookup_style(options.get('style', 'default')) + self.full = get_bool_opt(options, 'full', False) + self.title = options.get('title', '') + self.encoding = options.get('encoding', None) or None + if self.encoding in ('guess', 'chardet'): + # can happen for e.g. pygmentize -O encoding=guess + self.encoding = 'utf-8' + self.encoding = options.get('outencoding') or self.encoding + self.options = options + + def get_style_defs(self, arg=''): + """ + This method must return statements or declarations suitable to define + the current style for subsequent highlighted text (e.g. CSS classes + in the `HTMLFormatter`). + + The optional argument `arg` can be used to modify the generation and + is formatter dependent (it is standardized because it can be given on + the command line). + + This method is called by the ``-S`` :doc:`command-line option `, + the `arg` is then given by the ``-a`` option. + """ + return '' + + def format(self, tokensource, outfile): + """ + This method must format the tokens from the `tokensource` iterable and + write the formatted version to the file object `outfile`. + + Formatter options can control how exactly the tokens are converted. + """ + if self.encoding: + # wrap the outfile in a StreamWriter + outfile = codecs.lookup(self.encoding)[3](outfile) + return self.format_unencoded(tokensource, outfile) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__init__.py new file mode 100644 index 0000000..39db842 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__init__.py @@ -0,0 +1,158 @@ +""" + pygments.formatters + ~~~~~~~~~~~~~~~~~~~ + + Pygments formatters. + + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re +import sys +import types +import fnmatch +from os.path import basename + +from pip._vendor.pygments.formatters._mapping import FORMATTERS +from pip._vendor.pygments.plugin import find_plugin_formatters +from pip._vendor.pygments.util import ClassNotFound + +__all__ = ['get_formatter_by_name', 'get_formatter_for_filename', + 'get_all_formatters', 'load_formatter_from_file'] + list(FORMATTERS) + +_formatter_cache = {} # classes by name +_pattern_cache = {} + + +def _fn_matches(fn, glob): + """Return whether the supplied file name fn matches pattern filename.""" + if glob not in _pattern_cache: + pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob)) + return pattern.match(fn) + return _pattern_cache[glob].match(fn) + + +def _load_formatters(module_name): + """Load a formatter (and all others in the module too).""" + mod = __import__(module_name, None, None, ['__all__']) + for formatter_name in mod.__all__: + cls = getattr(mod, formatter_name) + _formatter_cache[cls.name] = cls + + +def get_all_formatters(): + """Return a generator for all formatter classes.""" + # NB: this returns formatter classes, not info like get_all_lexers(). + for info in FORMATTERS.values(): + if info[1] not in _formatter_cache: + _load_formatters(info[0]) + yield _formatter_cache[info[1]] + for _, formatter in find_plugin_formatters(): + yield formatter + + +def find_formatter_class(alias): + """Lookup a formatter by alias. + + Returns None if not found. + """ + for module_name, name, aliases, _, _ in FORMATTERS.values(): + if alias in aliases: + if name not in _formatter_cache: + _load_formatters(module_name) + return _formatter_cache[name] + for _, cls in find_plugin_formatters(): + if alias in cls.aliases: + return cls + + +def get_formatter_by_name(_alias, **options): + """ + Return an instance of a :class:`.Formatter` subclass that has `alias` in its + aliases list. The formatter is given the `options` at its instantiation. + + Will raise :exc:`pygments.util.ClassNotFound` if no formatter with that + alias is found. + """ + cls = find_formatter_class(_alias) + if cls is None: + raise ClassNotFound("no formatter found for name %r" % _alias) + return cls(**options) + + +def load_formatter_from_file(filename, formattername="CustomFormatter", **options): + """ + Return a `Formatter` subclass instance loaded from the provided file, relative + to the current directory. + + The file is expected to contain a Formatter class named ``formattername`` + (by default, CustomFormatter). Users should be very careful with the input, because + this method is equivalent to running ``eval()`` on the input file. The formatter is + given the `options` at its instantiation. + + :exc:`pygments.util.ClassNotFound` is raised if there are any errors loading + the formatter. + + .. versionadded:: 2.2 + """ + try: + # This empty dict will contain the namespace for the exec'd file + custom_namespace = {} + with open(filename, 'rb') as f: + exec(f.read(), custom_namespace) + # Retrieve the class `formattername` from that namespace + if formattername not in custom_namespace: + raise ClassNotFound('no valid %s class found in %s' % + (formattername, filename)) + formatter_class = custom_namespace[formattername] + # And finally instantiate it with the options + return formatter_class(**options) + except OSError as err: + raise ClassNotFound('cannot read %s: %s' % (filename, err)) + except ClassNotFound: + raise + except Exception as err: + raise ClassNotFound('error when loading custom formatter: %s' % err) + + +def get_formatter_for_filename(fn, **options): + """ + Return a :class:`.Formatter` subclass instance that has a filename pattern + matching `fn`. The formatter is given the `options` at its instantiation. + + Will raise :exc:`pygments.util.ClassNotFound` if no formatter for that filename + is found. + """ + fn = basename(fn) + for modname, name, _, filenames, _ in FORMATTERS.values(): + for filename in filenames: + if _fn_matches(fn, filename): + if name not in _formatter_cache: + _load_formatters(modname) + return _formatter_cache[name](**options) + for cls in find_plugin_formatters(): + for filename in cls.filenames: + if _fn_matches(fn, filename): + return cls(**options) + raise ClassNotFound("no formatter found for file name %r" % fn) + + +class _automodule(types.ModuleType): + """Automatically import formatters.""" + + def __getattr__(self, name): + info = FORMATTERS.get(name) + if info: + _load_formatters(info[0]) + cls = _formatter_cache[info[1]] + setattr(self, name, cls) + return cls + raise AttributeError(name) + + +oldmod = sys.modules[__name__] +newmod = _automodule(__name__) +newmod.__dict__.update(oldmod.__dict__) +sys.modules[__name__] = newmod +del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/_mapping.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/_mapping.py new file mode 100644 index 0000000..72ca840 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/_mapping.py @@ -0,0 +1,23 @@ +# Automatically generated by scripts/gen_mapfiles.py. +# DO NOT EDIT BY HAND; run `tox -e mapfiles` instead. + +FORMATTERS = { + 'BBCodeFormatter': ('pygments.formatters.bbcode', 'BBCode', ('bbcode', 'bb'), (), 'Format tokens with BBcodes. These formatting codes are used by many bulletin boards, so you can highlight your sourcecode with pygments before posting it there.'), + 'BmpImageFormatter': ('pygments.formatters.img', 'img_bmp', ('bmp', 'bitmap'), ('*.bmp',), 'Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), + 'GifImageFormatter': ('pygments.formatters.img', 'img_gif', ('gif',), ('*.gif',), 'Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), + 'GroffFormatter': ('pygments.formatters.groff', 'groff', ('groff', 'troff', 'roff'), (), 'Format tokens with groff escapes to change their color and font style.'), + 'HtmlFormatter': ('pygments.formatters.html', 'HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ```` tags. By default, the content is enclosed in a ``
`` tag, itself wrapped in a ``
`` tag (but see the `nowrap` option). The ``
``'s CSS class can be set by the `cssclass` option."), + 'IRCFormatter': ('pygments.formatters.irc', 'IRC', ('irc', 'IRC'), (), 'Format tokens with IRC color sequences'), + 'ImageFormatter': ('pygments.formatters.img', 'img', ('img', 'IMG', 'png'), ('*.png',), 'Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), + 'JpgImageFormatter': ('pygments.formatters.img', 'img_jpg', ('jpg', 'jpeg'), ('*.jpg',), 'Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), + 'LatexFormatter': ('pygments.formatters.latex', 'LaTeX', ('latex', 'tex'), ('*.tex',), 'Format tokens as LaTeX code. This needs the `fancyvrb` and `color` standard packages.'), + 'NullFormatter': ('pygments.formatters.other', 'Text only', ('text', 'null'), ('*.txt',), 'Output the text unchanged without any formatting.'), + 'PangoMarkupFormatter': ('pygments.formatters.pangomarkup', 'Pango Markup', ('pango', 'pangomarkup'), (), 'Format tokens as Pango Markup code. It can then be rendered to an SVG.'), + 'RawTokenFormatter': ('pygments.formatters.other', 'Raw tokens', ('raw', 'tokens'), ('*.raw',), 'Format tokens as a raw representation for storing token streams.'), + 'RtfFormatter': ('pygments.formatters.rtf', 'RTF', ('rtf',), ('*.rtf',), 'Format tokens as RTF markup. This formatter automatically outputs full RTF documents with color information and other useful stuff. Perfect for Copy and Paste into Microsoft(R) Word(R) documents.'), + 'SvgFormatter': ('pygments.formatters.svg', 'SVG', ('svg',), ('*.svg',), 'Format tokens as an SVG graphics file. This formatter is still experimental. Each line of code is a ```` element with explicit ``x`` and ``y`` coordinates containing ```` elements with the individual token styles.'), + 'Terminal256Formatter': ('pygments.formatters.terminal256', 'Terminal256', ('terminal256', 'console256', '256'), (), 'Format tokens with ANSI color sequences, for output in a 256-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'), + 'TerminalFormatter': ('pygments.formatters.terminal', 'Terminal', ('terminal', 'console'), (), 'Format tokens with ANSI color sequences, for output in a text console. Color sequences are terminated at newlines, so that paging the output works correctly.'), + 'TerminalTrueColorFormatter': ('pygments.formatters.terminal256', 'TerminalTrueColor', ('terminal16m', 'console16m', '16m'), (), 'Format tokens with ANSI color sequences, for output in a true-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'), + 'TestcaseFormatter': ('pygments.formatters.other', 'Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.'), +} diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/bbcode.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/bbcode.py new file mode 100644 index 0000000..c4db8f4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/bbcode.py @@ -0,0 +1,108 @@ +""" + pygments.formatters.bbcode + ~~~~~~~~~~~~~~~~~~~~~~~~~~ + + BBcode formatter. + + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + + +from pip._vendor.pygments.formatter import Formatter +from pip._vendor.pygments.util import get_bool_opt + +__all__ = ['BBCodeFormatter'] + + +class BBCodeFormatter(Formatter): + """ + Format tokens with BBcodes. These formatting codes are used by many + bulletin boards, so you can highlight your sourcecode with pygments before + posting it there. + + This formatter has no support for background colors and borders, as there + are no common BBcode tags for that. + + Some board systems (e.g. phpBB) don't support colors in their [code] tag, + so you can't use the highlighting together with that tag. + Text in a [code] tag usually is shown with a monospace font (which this + formatter can do with the ``monofont`` option) and no spaces (which you + need for indentation) are removed. + + Additional options accepted: + + `style` + The style to use, can be a string or a Style subclass (default: + ``'default'``). + + `codetag` + If set to true, put the output into ``[code]`` tags (default: + ``false``) + + `monofont` + If set to true, add a tag to show the code with a monospace font + (default: ``false``). + """ + name = 'BBCode' + aliases = ['bbcode', 'bb'] + filenames = [] + + def __init__(self, **options): + Formatter.__init__(self, **options) + self._code = get_bool_opt(options, 'codetag', False) + self._mono = get_bool_opt(options, 'monofont', False) + + self.styles = {} + self._make_styles() + + def _make_styles(self): + for ttype, ndef in self.style: + start = end = '' + if ndef['color']: + start += '[color=#%s]' % ndef['color'] + end = '[/color]' + end + if ndef['bold']: + start += '[b]' + end = '[/b]' + end + if ndef['italic']: + start += '[i]' + end = '[/i]' + end + if ndef['underline']: + start += '[u]' + end = '[/u]' + end + # there are no common BBcodes for background-color and border + + self.styles[ttype] = start, end + + def format_unencoded(self, tokensource, outfile): + if self._code: + outfile.write('[code]') + if self._mono: + outfile.write('[font=monospace]') + + lastval = '' + lasttype = None + + for ttype, value in tokensource: + while ttype not in self.styles: + ttype = ttype.parent + if ttype == lasttype: + lastval += value + else: + if lastval: + start, end = self.styles[lasttype] + outfile.write(''.join((start, lastval, end))) + lastval = value + lasttype = ttype + + if lastval: + start, end = self.styles[lasttype] + outfile.write(''.join((start, lastval, end))) + + if self._mono: + outfile.write('[/font]') + if self._code: + outfile.write('[/code]') + if self._code or self._mono: + outfile.write('\n') diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/groff.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/groff.py new file mode 100644 index 0000000..30a528e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/groff.py @@ -0,0 +1,170 @@ +""" + pygments.formatters.groff + ~~~~~~~~~~~~~~~~~~~~~~~~~ + + Formatter for groff output. + + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import math +from pip._vendor.pygments.formatter import Formatter +from pip._vendor.pygments.util import get_bool_opt, get_int_opt + +__all__ = ['GroffFormatter'] + + +class GroffFormatter(Formatter): + """ + Format tokens with groff escapes to change their color and font style. + + .. versionadded:: 2.11 + + Additional options accepted: + + `style` + The style to use, can be a string or a Style subclass (default: + ``'default'``). + + `monospaced` + If set to true, monospace font will be used (default: ``true``). + + `linenos` + If set to true, print the line numbers (default: ``false``). + + `wrap` + Wrap lines to the specified number of characters. Disabled if set to 0 + (default: ``0``). + """ + + name = 'groff' + aliases = ['groff','troff','roff'] + filenames = [] + + def __init__(self, **options): + Formatter.__init__(self, **options) + + self.monospaced = get_bool_opt(options, 'monospaced', True) + self.linenos = get_bool_opt(options, 'linenos', False) + self._lineno = 0 + self.wrap = get_int_opt(options, 'wrap', 0) + self._linelen = 0 + + self.styles = {} + self._make_styles() + + + def _make_styles(self): + regular = '\\f[CR]' if self.monospaced else '\\f[R]' + bold = '\\f[CB]' if self.monospaced else '\\f[B]' + italic = '\\f[CI]' if self.monospaced else '\\f[I]' + + for ttype, ndef in self.style: + start = end = '' + if ndef['color']: + start += '\\m[%s]' % ndef['color'] + end = '\\m[]' + end + if ndef['bold']: + start += bold + end = regular + end + if ndef['italic']: + start += italic + end = regular + end + if ndef['bgcolor']: + start += '\\M[%s]' % ndef['bgcolor'] + end = '\\M[]' + end + + self.styles[ttype] = start, end + + + def _define_colors(self, outfile): + colors = set() + for _, ndef in self.style: + if ndef['color'] is not None: + colors.add(ndef['color']) + + for color in sorted(colors): + outfile.write('.defcolor ' + color + ' rgb #' + color + '\n') + + + def _write_lineno(self, outfile): + self._lineno += 1 + outfile.write("%s% 4d " % (self._lineno != 1 and '\n' or '', self._lineno)) + + + def _wrap_line(self, line): + length = len(line.rstrip('\n')) + space = ' ' if self.linenos else '' + newline = '' + + if length > self.wrap: + for i in range(0, math.floor(length / self.wrap)): + chunk = line[i*self.wrap:i*self.wrap+self.wrap] + newline += (chunk + '\n' + space) + remainder = length % self.wrap + if remainder > 0: + newline += line[-remainder-1:] + self._linelen = remainder + elif self._linelen + length > self.wrap: + newline = ('\n' + space) + line + self._linelen = length + else: + newline = line + self._linelen += length + + return newline + + + def _escape_chars(self, text): + text = text.replace('\\', '\\[u005C]'). \ + replace('.', '\\[char46]'). \ + replace('\'', '\\[u0027]'). \ + replace('`', '\\[u0060]'). \ + replace('~', '\\[u007E]') + copy = text + + for char in copy: + if len(char) != len(char.encode()): + uni = char.encode('unicode_escape') \ + .decode()[1:] \ + .replace('x', 'u00') \ + .upper() + text = text.replace(char, '\\[u' + uni[1:] + ']') + + return text + + + def format_unencoded(self, tokensource, outfile): + self._define_colors(outfile) + + outfile.write('.nf\n\\f[CR]\n') + + if self.linenos: + self._write_lineno(outfile) + + for ttype, value in tokensource: + while ttype not in self.styles: + ttype = ttype.parent + start, end = self.styles[ttype] + + for line in value.splitlines(True): + if self.wrap > 0: + line = self._wrap_line(line) + + if start and end: + text = self._escape_chars(line.rstrip('\n')) + if text != '': + outfile.write(''.join((start, text, end))) + else: + outfile.write(self._escape_chars(line.rstrip('\n'))) + + if line.endswith('\n'): + if self.linenos: + self._write_lineno(outfile) + self._linelen = 0 + else: + outfile.write('\n') + self._linelen = 0 + + outfile.write('\n.fi') diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/html.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/html.py new file mode 100644 index 0000000..931d7c3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/html.py @@ -0,0 +1,989 @@ +""" + pygments.formatters.html + ~~~~~~~~~~~~~~~~~~~~~~~~ + + Formatter for HTML output. + + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import functools +import os +import sys +import os.path +from io import StringIO + +from pip._vendor.pygments.formatter import Formatter +from pip._vendor.pygments.token import Token, Text, STANDARD_TYPES +from pip._vendor.pygments.util import get_bool_opt, get_int_opt, get_list_opt + +try: + import ctags +except ImportError: + ctags = None + +__all__ = ['HtmlFormatter'] + + +_escape_html_table = { + ord('&'): '&', + ord('<'): '<', + ord('>'): '>', + ord('"'): '"', + ord("'"): ''', +} + + +def escape_html(text, table=_escape_html_table): + """Escape &, <, > as well as single and double quotes for HTML.""" + return text.translate(table) + + +def webify(color): + if color.startswith('calc') or color.startswith('var'): + return color + else: + return '#' + color + + +def _get_ttype_class(ttype): + fname = STANDARD_TYPES.get(ttype) + if fname: + return fname + aname = '' + while fname is None: + aname = '-' + ttype[-1] + aname + ttype = ttype.parent + fname = STANDARD_TYPES.get(ttype) + return fname + aname + + +CSSFILE_TEMPLATE = '''\ +/* +generated by Pygments +Copyright 2006-2023 by the Pygments team. +Licensed under the BSD license, see LICENSE for details. +*/ +%(styledefs)s +''' + +DOC_HEADER = '''\ + + + + + %(title)s + + + + +

%(title)s

+ +''' + +DOC_HEADER_EXTERNALCSS = '''\ + + + + + %(title)s + + + + +

%(title)s

+ +''' + +DOC_FOOTER = '''\ + + +''' + + +class HtmlFormatter(Formatter): + r""" + Format tokens as HTML 4 ```` tags. By default, the content is enclosed + in a ``
`` tag, itself wrapped in a ``
`` tag (but see the `nowrap` option). + The ``
``'s CSS class can be set by the `cssclass` option. + + If the `linenos` option is set to ``"table"``, the ``
`` is
+    additionally wrapped inside a ```` which has one row and two
+    cells: one containing the line numbers and one containing the code.
+    Example:
+
+    .. sourcecode:: html
+
+        
+
+ + +
+
1
+            2
+
+
def foo(bar):
+              pass
+            
+
+ + (whitespace added to improve clarity). + + A list of lines can be specified using the `hl_lines` option to make these + lines highlighted (as of Pygments 0.11). + + With the `full` option, a complete HTML 4 document is output, including + the style definitions inside a `` + {% else %} + {{ head | safe }} + {% endif %} +{% if not embed %} + + +{% endif %} +{{ body | safe }} +{% for diagram in diagrams %} +
+

{{ diagram.title }}

+
{{ diagram.text }}
+
+ {{ diagram.svg }} +
+
+{% endfor %} +{% if not embed %} + + +{% endif %} +""" + +template = Template(jinja2_template_source) + +# Note: ideally this would be a dataclass, but we're supporting Python 3.5+ so we can't do this yet +NamedDiagram = NamedTuple( + "NamedDiagram", + [("name", str), ("diagram", typing.Optional[railroad.DiagramItem]), ("index", int)], +) +""" +A simple structure for associating a name with a railroad diagram +""" + +T = TypeVar("T") + + +class EachItem(railroad.Group): + """ + Custom railroad item to compose a: + - Group containing a + - OneOrMore containing a + - Choice of the elements in the Each + with the group label indicating that all must be matched + """ + + all_label = "[ALL]" + + def __init__(self, *items): + choice_item = railroad.Choice(len(items) - 1, *items) + one_or_more_item = railroad.OneOrMore(item=choice_item) + super().__init__(one_or_more_item, label=self.all_label) + + +class AnnotatedItem(railroad.Group): + """ + Simple subclass of Group that creates an annotation label + """ + + def __init__(self, label: str, item): + super().__init__(item=item, label="[{}]".format(label) if label else label) + + +class EditablePartial(Generic[T]): + """ + Acts like a functools.partial, but can be edited. In other words, it represents a type that hasn't yet been + constructed. + """ + + # We need this here because the railroad constructors actually transform the data, so can't be called until the + # entire tree is assembled + + def __init__(self, func: Callable[..., T], args: list, kwargs: dict): + self.func = func + self.args = args + self.kwargs = kwargs + + @classmethod + def from_call(cls, func: Callable[..., T], *args, **kwargs) -> "EditablePartial[T]": + """ + If you call this function in the same way that you would call the constructor, it will store the arguments + as you expect. For example EditablePartial.from_call(Fraction, 1, 3)() == Fraction(1, 3) + """ + return EditablePartial(func=func, args=list(args), kwargs=kwargs) + + @property + def name(self): + return self.kwargs["name"] + + def __call__(self) -> T: + """ + Evaluate the partial and return the result + """ + args = self.args.copy() + kwargs = self.kwargs.copy() + + # This is a helpful hack to allow you to specify varargs parameters (e.g. *args) as keyword args (e.g. + # args=['list', 'of', 'things']) + arg_spec = inspect.getfullargspec(self.func) + if arg_spec.varargs in self.kwargs: + args += kwargs.pop(arg_spec.varargs) + + return self.func(*args, **kwargs) + + +def railroad_to_html(diagrams: List[NamedDiagram], embed=False, **kwargs) -> str: + """ + Given a list of NamedDiagram, produce a single HTML string that visualises those diagrams + :params kwargs: kwargs to be passed in to the template + """ + data = [] + for diagram in diagrams: + if diagram.diagram is None: + continue + io = StringIO() + try: + css = kwargs.get('css') + diagram.diagram.writeStandalone(io.write, css=css) + except AttributeError: + diagram.diagram.writeSvg(io.write) + title = diagram.name + if diagram.index == 0: + title += " (root)" + data.append({"title": title, "text": "", "svg": io.getvalue()}) + + return template.render(diagrams=data, embed=embed, **kwargs) + + +def resolve_partial(partial: "EditablePartial[T]") -> T: + """ + Recursively resolves a collection of Partials into whatever type they are + """ + if isinstance(partial, EditablePartial): + partial.args = resolve_partial(partial.args) + partial.kwargs = resolve_partial(partial.kwargs) + return partial() + elif isinstance(partial, list): + return [resolve_partial(x) for x in partial] + elif isinstance(partial, dict): + return {key: resolve_partial(x) for key, x in partial.items()} + else: + return partial + + +def to_railroad( + element: pyparsing.ParserElement, + diagram_kwargs: typing.Optional[dict] = None, + vertical: int = 3, + show_results_names: bool = False, + show_groups: bool = False, +) -> List[NamedDiagram]: + """ + Convert a pyparsing element tree into a list of diagrams. This is the recommended entrypoint to diagram + creation if you want to access the Railroad tree before it is converted to HTML + :param element: base element of the parser being diagrammed + :param diagram_kwargs: kwargs to pass to the Diagram() constructor + :param vertical: (optional) - int - limit at which number of alternatives should be + shown vertically instead of horizontally + :param show_results_names - bool to indicate whether results name annotations should be + included in the diagram + :param show_groups - bool to indicate whether groups should be highlighted with an unlabeled + surrounding box + """ + # Convert the whole tree underneath the root + lookup = ConverterState(diagram_kwargs=diagram_kwargs or {}) + _to_diagram_element( + element, + lookup=lookup, + parent=None, + vertical=vertical, + show_results_names=show_results_names, + show_groups=show_groups, + ) + + root_id = id(element) + # Convert the root if it hasn't been already + if root_id in lookup: + if not element.customName: + lookup[root_id].name = "" + lookup[root_id].mark_for_extraction(root_id, lookup, force=True) + + # Now that we're finished, we can convert from intermediate structures into Railroad elements + diags = list(lookup.diagrams.values()) + if len(diags) > 1: + # collapse out duplicate diags with the same name + seen = set() + deduped_diags = [] + for d in diags: + # don't extract SkipTo elements, they are uninformative as subdiagrams + if d.name == "...": + continue + if d.name is not None and d.name not in seen: + seen.add(d.name) + deduped_diags.append(d) + resolved = [resolve_partial(partial) for partial in deduped_diags] + else: + # special case - if just one diagram, always display it, even if + # it has no name + resolved = [resolve_partial(partial) for partial in diags] + return sorted(resolved, key=lambda diag: diag.index) + + +def _should_vertical( + specification: int, exprs: Iterable[pyparsing.ParserElement] +) -> bool: + """ + Returns true if we should return a vertical list of elements + """ + if specification is None: + return False + else: + return len(_visible_exprs(exprs)) >= specification + + +class ElementState: + """ + State recorded for an individual pyparsing Element + """ + + # Note: this should be a dataclass, but we have to support Python 3.5 + def __init__( + self, + element: pyparsing.ParserElement, + converted: EditablePartial, + parent: EditablePartial, + number: int, + name: str = None, + parent_index: typing.Optional[int] = None, + ): + #: The pyparsing element that this represents + self.element: pyparsing.ParserElement = element + #: The name of the element + self.name: typing.Optional[str] = name + #: The output Railroad element in an unconverted state + self.converted: EditablePartial = converted + #: The parent Railroad element, which we store so that we can extract this if it's duplicated + self.parent: EditablePartial = parent + #: The order in which we found this element, used for sorting diagrams if this is extracted into a diagram + self.number: int = number + #: The index of this inside its parent + self.parent_index: typing.Optional[int] = parent_index + #: If true, we should extract this out into a subdiagram + self.extract: bool = False + #: If true, all of this element's children have been filled out + self.complete: bool = False + + def mark_for_extraction( + self, el_id: int, state: "ConverterState", name: str = None, force: bool = False + ): + """ + Called when this instance has been seen twice, and thus should eventually be extracted into a sub-diagram + :param el_id: id of the element + :param state: element/diagram state tracker + :param name: name to use for this element's text + :param force: If true, force extraction now, regardless of the state of this. Only useful for extracting the + root element when we know we're finished + """ + self.extract = True + + # Set the name + if not self.name: + if name: + # Allow forcing a custom name + self.name = name + elif self.element.customName: + self.name = self.element.customName + else: + self.name = "" + + # Just because this is marked for extraction doesn't mean we can do it yet. We may have to wait for children + # to be added + # Also, if this is just a string literal etc, don't bother extracting it + if force or (self.complete and _worth_extracting(self.element)): + state.extract_into_diagram(el_id) + + +class ConverterState: + """ + Stores some state that persists between recursions into the element tree + """ + + def __init__(self, diagram_kwargs: typing.Optional[dict] = None): + #: A dictionary mapping ParserElements to state relating to them + self._element_diagram_states: Dict[int, ElementState] = {} + #: A dictionary mapping ParserElement IDs to subdiagrams generated from them + self.diagrams: Dict[int, EditablePartial[NamedDiagram]] = {} + #: The index of the next unnamed element + self.unnamed_index: int = 1 + #: The index of the next element. This is used for sorting + self.index: int = 0 + #: Shared kwargs that are used to customize the construction of diagrams + self.diagram_kwargs: dict = diagram_kwargs or {} + self.extracted_diagram_names: Set[str] = set() + + def __setitem__(self, key: int, value: ElementState): + self._element_diagram_states[key] = value + + def __getitem__(self, key: int) -> ElementState: + return self._element_diagram_states[key] + + def __delitem__(self, key: int): + del self._element_diagram_states[key] + + def __contains__(self, key: int): + return key in self._element_diagram_states + + def generate_unnamed(self) -> int: + """ + Generate a number used in the name of an otherwise unnamed diagram + """ + self.unnamed_index += 1 + return self.unnamed_index + + def generate_index(self) -> int: + """ + Generate a number used to index a diagram + """ + self.index += 1 + return self.index + + def extract_into_diagram(self, el_id: int): + """ + Used when we encounter the same token twice in the same tree. When this + happens, we replace all instances of that token with a terminal, and + create a new subdiagram for the token + """ + position = self[el_id] + + # Replace the original definition of this element with a regular block + if position.parent: + ret = EditablePartial.from_call(railroad.NonTerminal, text=position.name) + if "item" in position.parent.kwargs: + position.parent.kwargs["item"] = ret + elif "items" in position.parent.kwargs: + position.parent.kwargs["items"][position.parent_index] = ret + + # If the element we're extracting is a group, skip to its content but keep the title + if position.converted.func == railroad.Group: + content = position.converted.kwargs["item"] + else: + content = position.converted + + self.diagrams[el_id] = EditablePartial.from_call( + NamedDiagram, + name=position.name, + diagram=EditablePartial.from_call( + railroad.Diagram, content, **self.diagram_kwargs + ), + index=position.number, + ) + + del self[el_id] + + +def _worth_extracting(element: pyparsing.ParserElement) -> bool: + """ + Returns true if this element is worth having its own sub-diagram. Simply, if any of its children + themselves have children, then its complex enough to extract + """ + children = element.recurse() + return any(child.recurse() for child in children) + + +def _apply_diagram_item_enhancements(fn): + """ + decorator to ensure enhancements to a diagram item (such as results name annotations) + get applied on return from _to_diagram_element (we do this since there are several + returns in _to_diagram_element) + """ + + def _inner( + element: pyparsing.ParserElement, + parent: typing.Optional[EditablePartial], + lookup: ConverterState = None, + vertical: int = None, + index: int = 0, + name_hint: str = None, + show_results_names: bool = False, + show_groups: bool = False, + ) -> typing.Optional[EditablePartial]: + ret = fn( + element, + parent, + lookup, + vertical, + index, + name_hint, + show_results_names, + show_groups, + ) + + # apply annotation for results name, if present + if show_results_names and ret is not None: + element_results_name = element.resultsName + if element_results_name: + # add "*" to indicate if this is a "list all results" name + element_results_name += "" if element.modalResults else "*" + ret = EditablePartial.from_call( + railroad.Group, item=ret, label=element_results_name + ) + + return ret + + return _inner + + +def _visible_exprs(exprs: Iterable[pyparsing.ParserElement]): + non_diagramming_exprs = ( + pyparsing.ParseElementEnhance, + pyparsing.PositionToken, + pyparsing.And._ErrorStop, + ) + return [ + e + for e in exprs + if not (e.customName or e.resultsName or isinstance(e, non_diagramming_exprs)) + ] + + +@_apply_diagram_item_enhancements +def _to_diagram_element( + element: pyparsing.ParserElement, + parent: typing.Optional[EditablePartial], + lookup: ConverterState = None, + vertical: int = None, + index: int = 0, + name_hint: str = None, + show_results_names: bool = False, + show_groups: bool = False, +) -> typing.Optional[EditablePartial]: + """ + Recursively converts a PyParsing Element to a railroad Element + :param lookup: The shared converter state that keeps track of useful things + :param index: The index of this element within the parent + :param parent: The parent of this element in the output tree + :param vertical: Controls at what point we make a list of elements vertical. If this is an integer (the default), + it sets the threshold of the number of items before we go vertical. If True, always go vertical, if False, never + do so + :param name_hint: If provided, this will override the generated name + :param show_results_names: bool flag indicating whether to add annotations for results names + :returns: The converted version of the input element, but as a Partial that hasn't yet been constructed + :param show_groups: bool flag indicating whether to show groups using bounding box + """ + exprs = element.recurse() + name = name_hint or element.customName or element.__class__.__name__ + + # Python's id() is used to provide a unique identifier for elements + el_id = id(element) + + element_results_name = element.resultsName + + # Here we basically bypass processing certain wrapper elements if they contribute nothing to the diagram + if not element.customName: + if isinstance( + element, + ( + # pyparsing.TokenConverter, + # pyparsing.Forward, + pyparsing.Located, + ), + ): + # However, if this element has a useful custom name, and its child does not, we can pass it on to the child + if exprs: + if not exprs[0].customName: + propagated_name = name + else: + propagated_name = None + + return _to_diagram_element( + element.expr, + parent=parent, + lookup=lookup, + vertical=vertical, + index=index, + name_hint=propagated_name, + show_results_names=show_results_names, + show_groups=show_groups, + ) + + # If the element isn't worth extracting, we always treat it as the first time we say it + if _worth_extracting(element): + if el_id in lookup: + # If we've seen this element exactly once before, we are only just now finding out that it's a duplicate, + # so we have to extract it into a new diagram. + looked_up = lookup[el_id] + looked_up.mark_for_extraction(el_id, lookup, name=name_hint) + ret = EditablePartial.from_call(railroad.NonTerminal, text=looked_up.name) + return ret + + elif el_id in lookup.diagrams: + # If we have seen the element at least twice before, and have already extracted it into a subdiagram, we + # just put in a marker element that refers to the sub-diagram + ret = EditablePartial.from_call( + railroad.NonTerminal, text=lookup.diagrams[el_id].kwargs["name"] + ) + return ret + + # Recursively convert child elements + # Here we find the most relevant Railroad element for matching pyparsing Element + # We use ``items=[]`` here to hold the place for where the child elements will go once created + if isinstance(element, pyparsing.And): + # detect And's created with ``expr*N`` notation - for these use a OneOrMore with a repeat + # (all will have the same name, and resultsName) + if not exprs: + return None + if len(set((e.name, e.resultsName) for e in exprs)) == 1: + ret = EditablePartial.from_call( + railroad.OneOrMore, item="", repeat=str(len(exprs)) + ) + elif _should_vertical(vertical, exprs): + ret = EditablePartial.from_call(railroad.Stack, items=[]) + else: + ret = EditablePartial.from_call(railroad.Sequence, items=[]) + elif isinstance(element, (pyparsing.Or, pyparsing.MatchFirst)): + if not exprs: + return None + if _should_vertical(vertical, exprs): + ret = EditablePartial.from_call(railroad.Choice, 0, items=[]) + else: + ret = EditablePartial.from_call(railroad.HorizontalChoice, items=[]) + elif isinstance(element, pyparsing.Each): + if not exprs: + return None + ret = EditablePartial.from_call(EachItem, items=[]) + elif isinstance(element, pyparsing.NotAny): + ret = EditablePartial.from_call(AnnotatedItem, label="NOT", item="") + elif isinstance(element, pyparsing.FollowedBy): + ret = EditablePartial.from_call(AnnotatedItem, label="LOOKAHEAD", item="") + elif isinstance(element, pyparsing.PrecededBy): + ret = EditablePartial.from_call(AnnotatedItem, label="LOOKBEHIND", item="") + elif isinstance(element, pyparsing.Group): + if show_groups: + ret = EditablePartial.from_call(AnnotatedItem, label="", item="") + else: + ret = EditablePartial.from_call(railroad.Group, label="", item="") + elif isinstance(element, pyparsing.TokenConverter): + label = type(element).__name__.lower() + if label == "tokenconverter": + ret = EditablePartial.from_call(railroad.Sequence, items=[]) + else: + ret = EditablePartial.from_call(AnnotatedItem, label=label, item="") + elif isinstance(element, pyparsing.Opt): + ret = EditablePartial.from_call(railroad.Optional, item="") + elif isinstance(element, pyparsing.OneOrMore): + ret = EditablePartial.from_call(railroad.OneOrMore, item="") + elif isinstance(element, pyparsing.ZeroOrMore): + ret = EditablePartial.from_call(railroad.ZeroOrMore, item="") + elif isinstance(element, pyparsing.Group): + ret = EditablePartial.from_call( + railroad.Group, item=None, label=element_results_name + ) + elif isinstance(element, pyparsing.Empty) and not element.customName: + # Skip unnamed "Empty" elements + ret = None + elif isinstance(element, pyparsing.ParseElementEnhance): + ret = EditablePartial.from_call(railroad.Sequence, items=[]) + elif len(exprs) > 0 and not element_results_name: + ret = EditablePartial.from_call(railroad.Group, item="", label=name) + elif len(exprs) > 0: + ret = EditablePartial.from_call(railroad.Sequence, items=[]) + else: + terminal = EditablePartial.from_call(railroad.Terminal, element.defaultName) + ret = terminal + + if ret is None: + return + + # Indicate this element's position in the tree so we can extract it if necessary + lookup[el_id] = ElementState( + element=element, + converted=ret, + parent=parent, + parent_index=index, + number=lookup.generate_index(), + ) + if element.customName: + lookup[el_id].mark_for_extraction(el_id, lookup, element.customName) + + i = 0 + for expr in exprs: + # Add a placeholder index in case we have to extract the child before we even add it to the parent + if "items" in ret.kwargs: + ret.kwargs["items"].insert(i, None) + + item = _to_diagram_element( + expr, + parent=ret, + lookup=lookup, + vertical=vertical, + index=i, + show_results_names=show_results_names, + show_groups=show_groups, + ) + + # Some elements don't need to be shown in the diagram + if item is not None: + if "item" in ret.kwargs: + ret.kwargs["item"] = item + elif "items" in ret.kwargs: + # If we've already extracted the child, don't touch this index, since it's occupied by a nonterminal + ret.kwargs["items"][i] = item + i += 1 + elif "items" in ret.kwargs: + # If we're supposed to skip this element, remove it from the parent + del ret.kwargs["items"][i] + + # If all this items children are none, skip this item + if ret and ( + ("items" in ret.kwargs and len(ret.kwargs["items"]) == 0) + or ("item" in ret.kwargs and ret.kwargs["item"] is None) + ): + ret = EditablePartial.from_call(railroad.Terminal, name) + + # Mark this element as "complete", ie it has all of its children + if el_id in lookup: + lookup[el_id].complete = True + + if el_id in lookup and lookup[el_id].extract and lookup[el_id].complete: + lookup.extract_into_diagram(el_id) + if ret is not None: + ret = EditablePartial.from_call( + railroad.NonTerminal, text=lookup.diagrams[el_id].kwargs["name"] + ) + + return ret diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pyparsing/exceptions.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pyparsing/exceptions.py new file mode 100644 index 0000000..12219f1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pyparsing/exceptions.py @@ -0,0 +1,299 @@ +# exceptions.py + +import re +import sys +import typing + +from .util import ( + col, + line, + lineno, + _collapse_string_to_ranges, + replaced_by_pep8, +) +from .unicode import pyparsing_unicode as ppu + + +class ExceptionWordUnicode(ppu.Latin1, ppu.LatinA, ppu.LatinB, ppu.Greek, ppu.Cyrillic): + pass + + +_extract_alphanums = _collapse_string_to_ranges(ExceptionWordUnicode.alphanums) +_exception_word_extractor = re.compile("([" + _extract_alphanums + "]{1,16})|.") + + +class ParseBaseException(Exception): + """base exception class for all parsing runtime exceptions""" + + loc: int + msg: str + pstr: str + parser_element: typing.Any # "ParserElement" + args: typing.Tuple[str, int, typing.Optional[str]] + + __slots__ = ( + "loc", + "msg", + "pstr", + "parser_element", + "args", + ) + + # Performance tuning: we construct a *lot* of these, so keep this + # constructor as small and fast as possible + def __init__( + self, + pstr: str, + loc: int = 0, + msg: typing.Optional[str] = None, + elem=None, + ): + self.loc = loc + if msg is None: + self.msg = pstr + self.pstr = "" + else: + self.msg = msg + self.pstr = pstr + self.parser_element = elem + self.args = (pstr, loc, msg) + + @staticmethod + def explain_exception(exc, depth=16): + """ + Method to take an exception and translate the Python internal traceback into a list + of the pyparsing expressions that caused the exception to be raised. + + Parameters: + + - exc - exception raised during parsing (need not be a ParseException, in support + of Python exceptions that might be raised in a parse action) + - depth (default=16) - number of levels back in the stack trace to list expression + and function names; if None, the full stack trace names will be listed; if 0, only + the failing input line, marker, and exception string will be shown + + Returns a multi-line string listing the ParserElements and/or function names in the + exception's stack trace. + """ + import inspect + from .core import ParserElement + + if depth is None: + depth = sys.getrecursionlimit() + ret = [] + if isinstance(exc, ParseBaseException): + ret.append(exc.line) + ret.append(" " * (exc.column - 1) + "^") + ret.append(f"{type(exc).__name__}: {exc}") + + if depth > 0: + callers = inspect.getinnerframes(exc.__traceback__, context=depth) + seen = set() + for i, ff in enumerate(callers[-depth:]): + frm = ff[0] + + f_self = frm.f_locals.get("self", None) + if isinstance(f_self, ParserElement): + if not frm.f_code.co_name.startswith( + ("parseImpl", "_parseNoCache") + ): + continue + if id(f_self) in seen: + continue + seen.add(id(f_self)) + + self_type = type(f_self) + ret.append( + f"{self_type.__module__}.{self_type.__name__} - {f_self}" + ) + + elif f_self is not None: + self_type = type(f_self) + ret.append(f"{self_type.__module__}.{self_type.__name__}") + + else: + code = frm.f_code + if code.co_name in ("wrapper", ""): + continue + + ret.append(code.co_name) + + depth -= 1 + if not depth: + break + + return "\n".join(ret) + + @classmethod + def _from_exception(cls, pe): + """ + internal factory method to simplify creating one type of ParseException + from another - avoids having __init__ signature conflicts among subclasses + """ + return cls(pe.pstr, pe.loc, pe.msg, pe.parser_element) + + @property + def line(self) -> str: + """ + Return the line of text where the exception occurred. + """ + return line(self.loc, self.pstr) + + @property + def lineno(self) -> int: + """ + Return the 1-based line number of text where the exception occurred. + """ + return lineno(self.loc, self.pstr) + + @property + def col(self) -> int: + """ + Return the 1-based column on the line of text where the exception occurred. + """ + return col(self.loc, self.pstr) + + @property + def column(self) -> int: + """ + Return the 1-based column on the line of text where the exception occurred. + """ + return col(self.loc, self.pstr) + + # pre-PEP8 compatibility + @property + def parserElement(self): + return self.parser_element + + @parserElement.setter + def parserElement(self, elem): + self.parser_element = elem + + def __str__(self) -> str: + if self.pstr: + if self.loc >= len(self.pstr): + foundstr = ", found end of text" + else: + # pull out next word at error location + found_match = _exception_word_extractor.match(self.pstr, self.loc) + if found_match is not None: + found = found_match.group(0) + else: + found = self.pstr[self.loc : self.loc + 1] + foundstr = (", found %r" % found).replace(r"\\", "\\") + else: + foundstr = "" + return f"{self.msg}{foundstr} (at char {self.loc}), (line:{self.lineno}, col:{self.column})" + + def __repr__(self): + return str(self) + + def mark_input_line( + self, marker_string: typing.Optional[str] = None, *, markerString: str = ">!<" + ) -> str: + """ + Extracts the exception line from the input string, and marks + the location of the exception with a special symbol. + """ + markerString = marker_string if marker_string is not None else markerString + line_str = self.line + line_column = self.column - 1 + if markerString: + line_str = "".join( + (line_str[:line_column], markerString, line_str[line_column:]) + ) + return line_str.strip() + + def explain(self, depth=16) -> str: + """ + Method to translate the Python internal traceback into a list + of the pyparsing expressions that caused the exception to be raised. + + Parameters: + + - depth (default=16) - number of levels back in the stack trace to list expression + and function names; if None, the full stack trace names will be listed; if 0, only + the failing input line, marker, and exception string will be shown + + Returns a multi-line string listing the ParserElements and/or function names in the + exception's stack trace. + + Example:: + + expr = pp.Word(pp.nums) * 3 + try: + expr.parse_string("123 456 A789") + except pp.ParseException as pe: + print(pe.explain(depth=0)) + + prints:: + + 123 456 A789 + ^ + ParseException: Expected W:(0-9), found 'A' (at char 8), (line:1, col:9) + + Note: the diagnostic output will include string representations of the expressions + that failed to parse. These representations will be more helpful if you use `set_name` to + give identifiable names to your expressions. Otherwise they will use the default string + forms, which may be cryptic to read. + + Note: pyparsing's default truncation of exception tracebacks may also truncate the + stack of expressions that are displayed in the ``explain`` output. To get the full listing + of parser expressions, you may have to set ``ParserElement.verbose_stacktrace = True`` + """ + return self.explain_exception(self, depth) + + # fmt: off + @replaced_by_pep8(mark_input_line) + def markInputline(self): ... + # fmt: on + + +class ParseException(ParseBaseException): + """ + Exception thrown when a parse expression doesn't match the input string + + Example:: + + try: + Word(nums).set_name("integer").parse_string("ABC") + except ParseException as pe: + print(pe) + print("column: {}".format(pe.column)) + + prints:: + + Expected integer (at char 0), (line:1, col:1) + column: 1 + + """ + + +class ParseFatalException(ParseBaseException): + """ + User-throwable exception thrown when inconsistent parse content + is found; stops all parsing immediately + """ + + +class ParseSyntaxException(ParseFatalException): + """ + Just like :class:`ParseFatalException`, but thrown internally + when an :class:`ErrorStop` ('-' operator) indicates + that parsing is to stop immediately because an unbacktrackable + syntax error has been found. + """ + + +class RecursiveGrammarException(Exception): + """ + Exception thrown by :class:`ParserElement.validate` if the + grammar could be left-recursive; parser may need to enable + left recursion using :class:`ParserElement.enable_left_recursion` + """ + + def __init__(self, parseElementList): + self.parseElementTrace = parseElementList + + def __str__(self) -> str: + return f"RecursiveGrammarException: {self.parseElementTrace}" diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pyparsing/helpers.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pyparsing/helpers.py new file mode 100644 index 0000000..018f0d6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pyparsing/helpers.py @@ -0,0 +1,1100 @@ +# helpers.py +import html.entities +import re +import sys +import typing + +from . import __diag__ +from .core import * +from .util import ( + _bslash, + _flatten, + _escape_regex_range_chars, + replaced_by_pep8, +) + + +# +# global helpers +# +def counted_array( + expr: ParserElement, + int_expr: typing.Optional[ParserElement] = None, + *, + intExpr: typing.Optional[ParserElement] = None, +) -> ParserElement: + """Helper to define a counted list of expressions. + + This helper defines a pattern of the form:: + + integer expr expr expr... + + where the leading integer tells how many expr expressions follow. + The matched tokens returns the array of expr tokens as a list - the + leading count token is suppressed. + + If ``int_expr`` is specified, it should be a pyparsing expression + that produces an integer value. + + Example:: + + counted_array(Word(alphas)).parse_string('2 ab cd ef') # -> ['ab', 'cd'] + + # in this parser, the leading integer value is given in binary, + # '10' indicating that 2 values are in the array + binary_constant = Word('01').set_parse_action(lambda t: int(t[0], 2)) + counted_array(Word(alphas), int_expr=binary_constant).parse_string('10 ab cd ef') # -> ['ab', 'cd'] + + # if other fields must be parsed after the count but before the + # list items, give the fields results names and they will + # be preserved in the returned ParseResults: + count_with_metadata = integer + Word(alphas)("type") + typed_array = counted_array(Word(alphanums), int_expr=count_with_metadata)("items") + result = typed_array.parse_string("3 bool True True False") + print(result.dump()) + + # prints + # ['True', 'True', 'False'] + # - items: ['True', 'True', 'False'] + # - type: 'bool' + """ + intExpr = intExpr or int_expr + array_expr = Forward() + + def count_field_parse_action(s, l, t): + nonlocal array_expr + n = t[0] + array_expr <<= (expr * n) if n else Empty() + # clear list contents, but keep any named results + del t[:] + + if intExpr is None: + intExpr = Word(nums).set_parse_action(lambda t: int(t[0])) + else: + intExpr = intExpr.copy() + intExpr.set_name("arrayLen") + intExpr.add_parse_action(count_field_parse_action, call_during_try=True) + return (intExpr + array_expr).set_name("(len) " + str(expr) + "...") + + +def match_previous_literal(expr: ParserElement) -> ParserElement: + """Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks for + a 'repeat' of a previous expression. For example:: + + first = Word(nums) + second = match_previous_literal(first) + match_expr = first + ":" + second + + will match ``"1:1"``, but not ``"1:2"``. Because this + matches a previous literal, will also match the leading + ``"1:1"`` in ``"1:10"``. If this is not desired, use + :class:`match_previous_expr`. Do *not* use with packrat parsing + enabled. + """ + rep = Forward() + + def copy_token_to_repeater(s, l, t): + if t: + if len(t) == 1: + rep << t[0] + else: + # flatten t tokens + tflat = _flatten(t.as_list()) + rep << And(Literal(tt) for tt in tflat) + else: + rep << Empty() + + expr.add_parse_action(copy_token_to_repeater, callDuringTry=True) + rep.set_name("(prev) " + str(expr)) + return rep + + +def match_previous_expr(expr: ParserElement) -> ParserElement: + """Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks for + a 'repeat' of a previous expression. For example:: + + first = Word(nums) + second = match_previous_expr(first) + match_expr = first + ":" + second + + will match ``"1:1"``, but not ``"1:2"``. Because this + matches by expressions, will *not* match the leading ``"1:1"`` + in ``"1:10"``; the expressions are evaluated first, and then + compared, so ``"1"`` is compared with ``"10"``. Do *not* use + with packrat parsing enabled. + """ + rep = Forward() + e2 = expr.copy() + rep <<= e2 + + def copy_token_to_repeater(s, l, t): + matchTokens = _flatten(t.as_list()) + + def must_match_these_tokens(s, l, t): + theseTokens = _flatten(t.as_list()) + if theseTokens != matchTokens: + raise ParseException( + s, l, f"Expected {matchTokens}, found{theseTokens}" + ) + + rep.set_parse_action(must_match_these_tokens, callDuringTry=True) + + expr.add_parse_action(copy_token_to_repeater, callDuringTry=True) + rep.set_name("(prev) " + str(expr)) + return rep + + +def one_of( + strs: Union[typing.Iterable[str], str], + caseless: bool = False, + use_regex: bool = True, + as_keyword: bool = False, + *, + useRegex: bool = True, + asKeyword: bool = False, +) -> ParserElement: + """Helper to quickly define a set of alternative :class:`Literal` s, + and makes sure to do longest-first testing when there is a conflict, + regardless of the input order, but returns + a :class:`MatchFirst` for best performance. + + Parameters: + + - ``strs`` - a string of space-delimited literals, or a collection of + string literals + - ``caseless`` - treat all literals as caseless - (default= ``False``) + - ``use_regex`` - as an optimization, will + generate a :class:`Regex` object; otherwise, will generate + a :class:`MatchFirst` object (if ``caseless=True`` or ``as_keyword=True``, or if + creating a :class:`Regex` raises an exception) - (default= ``True``) + - ``as_keyword`` - enforce :class:`Keyword`-style matching on the + generated expressions - (default= ``False``) + - ``asKeyword`` and ``useRegex`` are retained for pre-PEP8 compatibility, + but will be removed in a future release + + Example:: + + comp_oper = one_of("< = > <= >= !=") + var = Word(alphas) + number = Word(nums) + term = var | number + comparison_expr = term + comp_oper + term + print(comparison_expr.search_string("B = 12 AA=23 B<=AA AA>12")) + + prints:: + + [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] + """ + asKeyword = asKeyword or as_keyword + useRegex = useRegex and use_regex + + if ( + isinstance(caseless, str_type) + and __diag__.warn_on_multiple_string_args_to_oneof + ): + warnings.warn( + "More than one string argument passed to one_of, pass" + " choices as a list or space-delimited string", + stacklevel=2, + ) + + if caseless: + isequal = lambda a, b: a.upper() == b.upper() + masks = lambda a, b: b.upper().startswith(a.upper()) + parseElementClass = CaselessKeyword if asKeyword else CaselessLiteral + else: + isequal = lambda a, b: a == b + masks = lambda a, b: b.startswith(a) + parseElementClass = Keyword if asKeyword else Literal + + symbols: List[str] = [] + if isinstance(strs, str_type): + strs = typing.cast(str, strs) + symbols = strs.split() + elif isinstance(strs, Iterable): + symbols = list(strs) + else: + raise TypeError("Invalid argument to one_of, expected string or iterable") + if not symbols: + return NoMatch() + + # reorder given symbols to take care to avoid masking longer choices with shorter ones + # (but only if the given symbols are not just single characters) + if any(len(sym) > 1 for sym in symbols): + i = 0 + while i < len(symbols) - 1: + cur = symbols[i] + for j, other in enumerate(symbols[i + 1 :]): + if isequal(other, cur): + del symbols[i + j + 1] + break + elif masks(cur, other): + del symbols[i + j + 1] + symbols.insert(i, other) + break + else: + i += 1 + + if useRegex: + re_flags: int = re.IGNORECASE if caseless else 0 + + try: + if all(len(sym) == 1 for sym in symbols): + # symbols are just single characters, create range regex pattern + patt = f"[{''.join(_escape_regex_range_chars(sym) for sym in symbols)}]" + else: + patt = "|".join(re.escape(sym) for sym in symbols) + + # wrap with \b word break markers if defining as keywords + if asKeyword: + patt = rf"\b(?:{patt})\b" + + ret = Regex(patt, flags=re_flags).set_name(" | ".join(symbols)) + + if caseless: + # add parse action to return symbols as specified, not in random + # casing as found in input string + symbol_map = {sym.lower(): sym for sym in symbols} + ret.add_parse_action(lambda s, l, t: symbol_map[t[0].lower()]) + + return ret + + except re.error: + warnings.warn( + "Exception creating Regex for one_of, building MatchFirst", stacklevel=2 + ) + + # last resort, just use MatchFirst + return MatchFirst(parseElementClass(sym) for sym in symbols).set_name( + " | ".join(symbols) + ) + + +def dict_of(key: ParserElement, value: ParserElement) -> ParserElement: + """Helper to easily and clearly define a dictionary by specifying + the respective patterns for the key and value. Takes care of + defining the :class:`Dict`, :class:`ZeroOrMore`, and + :class:`Group` tokens in the proper order. The key pattern + can include delimiting markers or punctuation, as long as they are + suppressed, thereby leaving the significant key text. The value + pattern can include named results, so that the :class:`Dict` results + can include named token fields. + + Example:: + + text = "shape: SQUARE posn: upper left color: light blue texture: burlap" + attr_expr = (label + Suppress(':') + OneOrMore(data_word, stop_on=label).set_parse_action(' '.join)) + print(attr_expr[1, ...].parse_string(text).dump()) + + attr_label = label + attr_value = Suppress(':') + OneOrMore(data_word, stop_on=label).set_parse_action(' '.join) + + # similar to Dict, but simpler call format + result = dict_of(attr_label, attr_value).parse_string(text) + print(result.dump()) + print(result['shape']) + print(result.shape) # object attribute access works too + print(result.as_dict()) + + prints:: + + [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] + - color: 'light blue' + - posn: 'upper left' + - shape: 'SQUARE' + - texture: 'burlap' + SQUARE + SQUARE + {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} + """ + return Dict(OneOrMore(Group(key + value))) + + +def original_text_for( + expr: ParserElement, as_string: bool = True, *, asString: bool = True +) -> ParserElement: + """Helper to return the original, untokenized text for a given + expression. Useful to restore the parsed fields of an HTML start + tag into the raw tag text itself, or to revert separate tokens with + intervening whitespace back to the original matching input text. By + default, returns a string containing the original parsed text. + + If the optional ``as_string`` argument is passed as + ``False``, then the return value is + a :class:`ParseResults` containing any results names that + were originally matched, and a single token containing the original + matched text from the input string. So if the expression passed to + :class:`original_text_for` contains expressions with defined + results names, you must set ``as_string`` to ``False`` if you + want to preserve those results name values. + + The ``asString`` pre-PEP8 argument is retained for compatibility, + but will be removed in a future release. + + Example:: + + src = "this is test bold text normal text " + for tag in ("b", "i"): + opener, closer = make_html_tags(tag) + patt = original_text_for(opener + ... + closer) + print(patt.search_string(src)[0]) + + prints:: + + [' bold text '] + ['text'] + """ + asString = asString and as_string + + locMarker = Empty().set_parse_action(lambda s, loc, t: loc) + endlocMarker = locMarker.copy() + endlocMarker.callPreparse = False + matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") + if asString: + extractText = lambda s, l, t: s[t._original_start : t._original_end] + else: + + def extractText(s, l, t): + t[:] = [s[t.pop("_original_start") : t.pop("_original_end")]] + + matchExpr.set_parse_action(extractText) + matchExpr.ignoreExprs = expr.ignoreExprs + matchExpr.suppress_warning(Diagnostics.warn_ungrouped_named_tokens_in_collection) + return matchExpr + + +def ungroup(expr: ParserElement) -> ParserElement: + """Helper to undo pyparsing's default grouping of And expressions, + even if all but one are non-empty. + """ + return TokenConverter(expr).add_parse_action(lambda t: t[0]) + + +def locatedExpr(expr: ParserElement) -> ParserElement: + """ + (DEPRECATED - future code should use the :class:`Located` class) + Helper to decorate a returned token with its starting and ending + locations in the input string. + + This helper adds the following results names: + + - ``locn_start`` - location where matched expression begins + - ``locn_end`` - location where matched expression ends + - ``value`` - the actual parsed results + + Be careful if the input text contains ```` characters, you + may want to call :class:`ParserElement.parse_with_tabs` + + Example:: + + wd = Word(alphas) + for match in locatedExpr(wd).search_string("ljsdf123lksdjjf123lkkjj1222"): + print(match) + + prints:: + + [[0, 'ljsdf', 5]] + [[8, 'lksdjjf', 15]] + [[18, 'lkkjj', 23]] + """ + locator = Empty().set_parse_action(lambda ss, ll, tt: ll) + return Group( + locator("locn_start") + + expr("value") + + locator.copy().leaveWhitespace()("locn_end") + ) + + +def nested_expr( + opener: Union[str, ParserElement] = "(", + closer: Union[str, ParserElement] = ")", + content: typing.Optional[ParserElement] = None, + ignore_expr: ParserElement = quoted_string(), + *, + ignoreExpr: ParserElement = quoted_string(), +) -> ParserElement: + """Helper method for defining nested lists enclosed in opening and + closing delimiters (``"("`` and ``")"`` are the default). + + Parameters: + + - ``opener`` - opening character for a nested list + (default= ``"("``); can also be a pyparsing expression + - ``closer`` - closing character for a nested list + (default= ``")"``); can also be a pyparsing expression + - ``content`` - expression for items within the nested lists + (default= ``None``) + - ``ignore_expr`` - expression for ignoring opening and closing delimiters + (default= :class:`quoted_string`) + - ``ignoreExpr`` - this pre-PEP8 argument is retained for compatibility + but will be removed in a future release + + If an expression is not provided for the content argument, the + nested expression will capture all whitespace-delimited content + between delimiters as a list of separate values. + + Use the ``ignore_expr`` argument to define expressions that may + contain opening or closing characters that should not be treated as + opening or closing characters for nesting, such as quoted_string or + a comment expression. Specify multiple expressions using an + :class:`Or` or :class:`MatchFirst`. The default is + :class:`quoted_string`, but if no expressions are to be ignored, then + pass ``None`` for this argument. + + Example:: + + data_type = one_of("void int short long char float double") + decl_data_type = Combine(data_type + Opt(Word('*'))) + ident = Word(alphas+'_', alphanums+'_') + number = pyparsing_common.number + arg = Group(decl_data_type + ident) + LPAR, RPAR = map(Suppress, "()") + + code_body = nested_expr('{', '}', ignore_expr=(quoted_string | c_style_comment)) + + c_function = (decl_data_type("type") + + ident("name") + + LPAR + Opt(DelimitedList(arg), [])("args") + RPAR + + code_body("body")) + c_function.ignore(c_style_comment) + + source_code = ''' + int is_odd(int x) { + return (x%2); + } + + int dec_to_hex(char hchar) { + if (hchar >= '0' && hchar <= '9') { + return (ord(hchar)-ord('0')); + } else { + return (10+ord(hchar)-ord('A')); + } + } + ''' + for func in c_function.search_string(source_code): + print("%(name)s (%(type)s) args: %(args)s" % func) + + + prints:: + + is_odd (int) args: [['int', 'x']] + dec_to_hex (int) args: [['char', 'hchar']] + """ + if ignoreExpr != ignore_expr: + ignoreExpr = ignore_expr if ignoreExpr == quoted_string() else ignoreExpr + if opener == closer: + raise ValueError("opening and closing strings cannot be the same") + if content is None: + if isinstance(opener, str_type) and isinstance(closer, str_type): + opener = typing.cast(str, opener) + closer = typing.cast(str, closer) + if len(opener) == 1 and len(closer) == 1: + if ignoreExpr is not None: + content = Combine( + OneOrMore( + ~ignoreExpr + + CharsNotIn( + opener + closer + ParserElement.DEFAULT_WHITE_CHARS, + exact=1, + ) + ) + ).set_parse_action(lambda t: t[0].strip()) + else: + content = empty.copy() + CharsNotIn( + opener + closer + ParserElement.DEFAULT_WHITE_CHARS + ).set_parse_action(lambda t: t[0].strip()) + else: + if ignoreExpr is not None: + content = Combine( + OneOrMore( + ~ignoreExpr + + ~Literal(opener) + + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1) + ) + ).set_parse_action(lambda t: t[0].strip()) + else: + content = Combine( + OneOrMore( + ~Literal(opener) + + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1) + ) + ).set_parse_action(lambda t: t[0].strip()) + else: + raise ValueError( + "opening and closing arguments must be strings if no content expression is given" + ) + ret = Forward() + if ignoreExpr is not None: + ret <<= Group( + Suppress(opener) + ZeroOrMore(ignoreExpr | ret | content) + Suppress(closer) + ) + else: + ret <<= Group(Suppress(opener) + ZeroOrMore(ret | content) + Suppress(closer)) + ret.set_name("nested %s%s expression" % (opener, closer)) + return ret + + +def _makeTags(tagStr, xml, suppress_LT=Suppress("<"), suppress_GT=Suppress(">")): + """Internal helper to construct opening and closing tag expressions, given a tag name""" + if isinstance(tagStr, str_type): + resname = tagStr + tagStr = Keyword(tagStr, caseless=not xml) + else: + resname = tagStr.name + + tagAttrName = Word(alphas, alphanums + "_-:") + if xml: + tagAttrValue = dbl_quoted_string.copy().set_parse_action(remove_quotes) + openTag = ( + suppress_LT + + tagStr("tag") + + Dict(ZeroOrMore(Group(tagAttrName + Suppress("=") + tagAttrValue))) + + Opt("/", default=[False])("empty").set_parse_action( + lambda s, l, t: t[0] == "/" + ) + + suppress_GT + ) + else: + tagAttrValue = quoted_string.copy().set_parse_action(remove_quotes) | Word( + printables, exclude_chars=">" + ) + openTag = ( + suppress_LT + + tagStr("tag") + + Dict( + ZeroOrMore( + Group( + tagAttrName.set_parse_action(lambda t: t[0].lower()) + + Opt(Suppress("=") + tagAttrValue) + ) + ) + ) + + Opt("/", default=[False])("empty").set_parse_action( + lambda s, l, t: t[0] == "/" + ) + + suppress_GT + ) + closeTag = Combine(Literal("", adjacent=False) + + openTag.set_name("<%s>" % resname) + # add start results name in parse action now that ungrouped names are not reported at two levels + openTag.add_parse_action( + lambda t: t.__setitem__( + "start" + "".join(resname.replace(":", " ").title().split()), t.copy() + ) + ) + closeTag = closeTag( + "end" + "".join(resname.replace(":", " ").title().split()) + ).set_name("" % resname) + openTag.tag = resname + closeTag.tag = resname + openTag.tag_body = SkipTo(closeTag()) + return openTag, closeTag + + +def make_html_tags( + tag_str: Union[str, ParserElement] +) -> Tuple[ParserElement, ParserElement]: + """Helper to construct opening and closing tag expressions for HTML, + given a tag name. Matches tags in either upper or lower case, + attributes with namespaces and with quoted or unquoted values. + + Example:: + + text = 'More info at the
pyparsing wiki page' + # make_html_tags returns pyparsing expressions for the opening and + # closing tags as a 2-tuple + a, a_end = make_html_tags("A") + link_expr = a + SkipTo(a_end)("link_text") + a_end + + for link in link_expr.search_string(text): + # attributes in the tag (like "href" shown here) are + # also accessible as named results + print(link.link_text, '->', link.href) + + prints:: + + pyparsing -> https://github.com/pyparsing/pyparsing/wiki + """ + return _makeTags(tag_str, False) + + +def make_xml_tags( + tag_str: Union[str, ParserElement] +) -> Tuple[ParserElement, ParserElement]: + """Helper to construct opening and closing tag expressions for XML, + given a tag name. Matches tags only in the given upper/lower case. + + Example: similar to :class:`make_html_tags` + """ + return _makeTags(tag_str, True) + + +any_open_tag: ParserElement +any_close_tag: ParserElement +any_open_tag, any_close_tag = make_html_tags( + Word(alphas, alphanums + "_:").set_name("any tag") +) + +_htmlEntityMap = {k.rstrip(";"): v for k, v in html.entities.html5.items()} +common_html_entity = Regex("&(?P" + "|".join(_htmlEntityMap) + ");").set_name( + "common HTML entity" +) + + +def replace_html_entity(s, l, t): + """Helper parser action to replace common HTML entities with their special characters""" + return _htmlEntityMap.get(t.entity) + + +class OpAssoc(Enum): + """Enumeration of operator associativity + - used in constructing InfixNotationOperatorSpec for :class:`infix_notation`""" + + LEFT = 1 + RIGHT = 2 + + +InfixNotationOperatorArgType = Union[ + ParserElement, str, Tuple[Union[ParserElement, str], Union[ParserElement, str]] +] +InfixNotationOperatorSpec = Union[ + Tuple[ + InfixNotationOperatorArgType, + int, + OpAssoc, + typing.Optional[ParseAction], + ], + Tuple[ + InfixNotationOperatorArgType, + int, + OpAssoc, + ], +] + + +def infix_notation( + base_expr: ParserElement, + op_list: List[InfixNotationOperatorSpec], + lpar: Union[str, ParserElement] = Suppress("("), + rpar: Union[str, ParserElement] = Suppress(")"), +) -> ParserElement: + """Helper method for constructing grammars of expressions made up of + operators working in a precedence hierarchy. Operators may be unary + or binary, left- or right-associative. Parse actions can also be + attached to operator expressions. The generated parser will also + recognize the use of parentheses to override operator precedences + (see example below). + + Note: if you define a deep operator list, you may see performance + issues when using infix_notation. See + :class:`ParserElement.enable_packrat` for a mechanism to potentially + improve your parser performance. + + Parameters: + + - ``base_expr`` - expression representing the most basic operand to + be used in the expression + - ``op_list`` - list of tuples, one for each operator precedence level + in the expression grammar; each tuple is of the form ``(op_expr, + num_operands, right_left_assoc, (optional)parse_action)``, where: + + - ``op_expr`` is the pyparsing expression for the operator; may also + be a string, which will be converted to a Literal; if ``num_operands`` + is 3, ``op_expr`` is a tuple of two expressions, for the two + operators separating the 3 terms + - ``num_operands`` is the number of terms for this operator (must be 1, + 2, or 3) + - ``right_left_assoc`` is the indicator whether the operator is right + or left associative, using the pyparsing-defined constants + ``OpAssoc.RIGHT`` and ``OpAssoc.LEFT``. + - ``parse_action`` is the parse action to be associated with + expressions matching this operator expression (the parse action + tuple member may be omitted); if the parse action is passed + a tuple or list of functions, this is equivalent to calling + ``set_parse_action(*fn)`` + (:class:`ParserElement.set_parse_action`) + - ``lpar`` - expression for matching left-parentheses; if passed as a + str, then will be parsed as ``Suppress(lpar)``. If lpar is passed as + an expression (such as ``Literal('(')``), then it will be kept in + the parsed results, and grouped with them. (default= ``Suppress('(')``) + - ``rpar`` - expression for matching right-parentheses; if passed as a + str, then will be parsed as ``Suppress(rpar)``. If rpar is passed as + an expression (such as ``Literal(')')``), then it will be kept in + the parsed results, and grouped with them. (default= ``Suppress(')')``) + + Example:: + + # simple example of four-function arithmetic with ints and + # variable names + integer = pyparsing_common.signed_integer + varname = pyparsing_common.identifier + + arith_expr = infix_notation(integer | varname, + [ + ('-', 1, OpAssoc.RIGHT), + (one_of('* /'), 2, OpAssoc.LEFT), + (one_of('+ -'), 2, OpAssoc.LEFT), + ]) + + arith_expr.run_tests(''' + 5+3*6 + (5+3)*6 + -2--11 + ''', full_dump=False) + + prints:: + + 5+3*6 + [[5, '+', [3, '*', 6]]] + + (5+3)*6 + [[[5, '+', 3], '*', 6]] + + (5+x)*y + [[[5, '+', 'x'], '*', 'y']] + + -2--11 + [[['-', 2], '-', ['-', 11]]] + """ + + # captive version of FollowedBy that does not do parse actions or capture results names + class _FB(FollowedBy): + def parseImpl(self, instring, loc, doActions=True): + self.expr.try_parse(instring, loc) + return loc, [] + + _FB.__name__ = "FollowedBy>" + + ret = Forward() + if isinstance(lpar, str): + lpar = Suppress(lpar) + if isinstance(rpar, str): + rpar = Suppress(rpar) + + # if lpar and rpar are not suppressed, wrap in group + if not (isinstance(rpar, Suppress) and isinstance(rpar, Suppress)): + lastExpr = base_expr | Group(lpar + ret + rpar) + else: + lastExpr = base_expr | (lpar + ret + rpar) + + arity: int + rightLeftAssoc: opAssoc + pa: typing.Optional[ParseAction] + opExpr1: ParserElement + opExpr2: ParserElement + for i, operDef in enumerate(op_list): + opExpr, arity, rightLeftAssoc, pa = (operDef + (None,))[:4] # type: ignore[assignment] + if isinstance(opExpr, str_type): + opExpr = ParserElement._literalStringClass(opExpr) + opExpr = typing.cast(ParserElement, opExpr) + if arity == 3: + if not isinstance(opExpr, (tuple, list)) or len(opExpr) != 2: + raise ValueError( + "if numterms=3, opExpr must be a tuple or list of two expressions" + ) + opExpr1, opExpr2 = opExpr + term_name = f"{opExpr1}{opExpr2} term" + else: + term_name = f"{opExpr} term" + + if not 1 <= arity <= 3: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + + if rightLeftAssoc not in (OpAssoc.LEFT, OpAssoc.RIGHT): + raise ValueError("operator must indicate right or left associativity") + + thisExpr: ParserElement = Forward().set_name(term_name) + thisExpr = typing.cast(Forward, thisExpr) + if rightLeftAssoc is OpAssoc.LEFT: + if arity == 1: + matchExpr = _FB(lastExpr + opExpr) + Group(lastExpr + opExpr[1, ...]) + elif arity == 2: + if opExpr is not None: + matchExpr = _FB(lastExpr + opExpr + lastExpr) + Group( + lastExpr + (opExpr + lastExpr)[1, ...] + ) + else: + matchExpr = _FB(lastExpr + lastExpr) + Group(lastExpr[2, ...]) + elif arity == 3: + matchExpr = _FB( + lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr + ) + Group(lastExpr + OneOrMore(opExpr1 + lastExpr + opExpr2 + lastExpr)) + elif rightLeftAssoc is OpAssoc.RIGHT: + if arity == 1: + # try to avoid LR with this extra test + if not isinstance(opExpr, Opt): + opExpr = Opt(opExpr) + matchExpr = _FB(opExpr.expr + thisExpr) + Group(opExpr + thisExpr) + elif arity == 2: + if opExpr is not None: + matchExpr = _FB(lastExpr + opExpr + thisExpr) + Group( + lastExpr + (opExpr + thisExpr)[1, ...] + ) + else: + matchExpr = _FB(lastExpr + thisExpr) + Group( + lastExpr + thisExpr[1, ...] + ) + elif arity == 3: + matchExpr = _FB( + lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr + ) + Group(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + if pa: + if isinstance(pa, (tuple, list)): + matchExpr.set_parse_action(*pa) + else: + matchExpr.set_parse_action(pa) + thisExpr <<= (matchExpr | lastExpr).setName(term_name) + lastExpr = thisExpr + ret <<= lastExpr + return ret + + +def indentedBlock(blockStatementExpr, indentStack, indent=True, backup_stacks=[]): + """ + (DEPRECATED - use :class:`IndentedBlock` class instead) + Helper method for defining space-delimited indentation blocks, + such as those used to define block statements in Python source code. + + Parameters: + + - ``blockStatementExpr`` - expression defining syntax of statement that + is repeated within the indented block + - ``indentStack`` - list created by caller to manage indentation stack + (multiple ``statementWithIndentedBlock`` expressions within a single + grammar should share a common ``indentStack``) + - ``indent`` - boolean indicating whether block must be indented beyond + the current level; set to ``False`` for block of left-most statements + (default= ``True``) + + A valid block must contain at least one ``blockStatement``. + + (Note that indentedBlock uses internal parse actions which make it + incompatible with packrat parsing.) + + Example:: + + data = ''' + def A(z): + A1 + B = 100 + G = A2 + A2 + A3 + B + def BB(a,b,c): + BB1 + def BBA(): + bba1 + bba2 + bba3 + C + D + def spam(x,y): + def eggs(z): + pass + ''' + + + indentStack = [1] + stmt = Forward() + + identifier = Word(alphas, alphanums) + funcDecl = ("def" + identifier + Group("(" + Opt(delimitedList(identifier)) + ")") + ":") + func_body = indentedBlock(stmt, indentStack) + funcDef = Group(funcDecl + func_body) + + rvalue = Forward() + funcCall = Group(identifier + "(" + Opt(delimitedList(rvalue)) + ")") + rvalue << (funcCall | identifier | Word(nums)) + assignment = Group(identifier + "=" + rvalue) + stmt << (funcDef | assignment | identifier) + + module_body = stmt[1, ...] + + parseTree = module_body.parseString(data) + parseTree.pprint() + + prints:: + + [['def', + 'A', + ['(', 'z', ')'], + ':', + [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], + 'B', + ['def', + 'BB', + ['(', 'a', 'b', 'c', ')'], + ':', + [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], + 'C', + 'D', + ['def', + 'spam', + ['(', 'x', 'y', ')'], + ':', + [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] + """ + backup_stacks.append(indentStack[:]) + + def reset_stack(): + indentStack[:] = backup_stacks[-1] + + def checkPeerIndent(s, l, t): + if l >= len(s): + return + curCol = col(l, s) + if curCol != indentStack[-1]: + if curCol > indentStack[-1]: + raise ParseException(s, l, "illegal nesting") + raise ParseException(s, l, "not a peer entry") + + def checkSubIndent(s, l, t): + curCol = col(l, s) + if curCol > indentStack[-1]: + indentStack.append(curCol) + else: + raise ParseException(s, l, "not a subentry") + + def checkUnindent(s, l, t): + if l >= len(s): + return + curCol = col(l, s) + if not (indentStack and curCol in indentStack): + raise ParseException(s, l, "not an unindent") + if curCol < indentStack[-1]: + indentStack.pop() + + NL = OneOrMore(LineEnd().set_whitespace_chars("\t ").suppress()) + INDENT = (Empty() + Empty().set_parse_action(checkSubIndent)).set_name("INDENT") + PEER = Empty().set_parse_action(checkPeerIndent).set_name("") + UNDENT = Empty().set_parse_action(checkUnindent).set_name("UNINDENT") + if indent: + smExpr = Group( + Opt(NL) + + INDENT + + OneOrMore(PEER + Group(blockStatementExpr) + Opt(NL)) + + UNDENT + ) + else: + smExpr = Group( + Opt(NL) + + OneOrMore(PEER + Group(blockStatementExpr) + Opt(NL)) + + Opt(UNDENT) + ) + + # add a parse action to remove backup_stack from list of backups + smExpr.add_parse_action( + lambda: backup_stacks.pop(-1) and None if backup_stacks else None + ) + smExpr.set_fail_action(lambda a, b, c, d: reset_stack()) + blockStatementExpr.ignore(_bslash + LineEnd()) + return smExpr.set_name("indented block") + + +# it's easy to get these comment structures wrong - they're very common, so may as well make them available +c_style_comment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + "*/").set_name( + "C style comment" +) +"Comment of the form ``/* ... */``" + +html_comment = Regex(r"").set_name("HTML comment") +"Comment of the form ````" + +rest_of_line = Regex(r".*").leave_whitespace().set_name("rest of line") +dbl_slash_comment = Regex(r"//(?:\\\n|[^\n])*").set_name("// comment") +"Comment of the form ``// ... (to end of line)``" + +cpp_style_comment = Combine( + Regex(r"/\*(?:[^*]|\*(?!/))*") + "*/" | dbl_slash_comment +).set_name("C++ style comment") +"Comment of either form :class:`c_style_comment` or :class:`dbl_slash_comment`" + +java_style_comment = cpp_style_comment +"Same as :class:`cpp_style_comment`" + +python_style_comment = Regex(r"#.*").set_name("Python style comment") +"Comment of the form ``# ... (to end of line)``" + + +# build list of built-in expressions, for future reference if a global default value +# gets updated +_builtin_exprs: List[ParserElement] = [ + v for v in vars().values() if isinstance(v, ParserElement) +] + + +# compatibility function, superseded by DelimitedList class +def delimited_list( + expr: Union[str, ParserElement], + delim: Union[str, ParserElement] = ",", + combine: bool = False, + min: typing.Optional[int] = None, + max: typing.Optional[int] = None, + *, + allow_trailing_delim: bool = False, +) -> ParserElement: + """(DEPRECATED - use :class:`DelimitedList` class)""" + return DelimitedList( + expr, delim, combine, min, max, allow_trailing_delim=allow_trailing_delim + ) + + +# pre-PEP8 compatible names +# fmt: off +opAssoc = OpAssoc +anyOpenTag = any_open_tag +anyCloseTag = any_close_tag +commonHTMLEntity = common_html_entity +cStyleComment = c_style_comment +htmlComment = html_comment +restOfLine = rest_of_line +dblSlashComment = dbl_slash_comment +cppStyleComment = cpp_style_comment +javaStyleComment = java_style_comment +pythonStyleComment = python_style_comment + +@replaced_by_pep8(DelimitedList) +def delimitedList(): ... + +@replaced_by_pep8(DelimitedList) +def delimited_list(): ... + +@replaced_by_pep8(counted_array) +def countedArray(): ... + +@replaced_by_pep8(match_previous_literal) +def matchPreviousLiteral(): ... + +@replaced_by_pep8(match_previous_expr) +def matchPreviousExpr(): ... + +@replaced_by_pep8(one_of) +def oneOf(): ... + +@replaced_by_pep8(dict_of) +def dictOf(): ... + +@replaced_by_pep8(original_text_for) +def originalTextFor(): ... + +@replaced_by_pep8(nested_expr) +def nestedExpr(): ... + +@replaced_by_pep8(make_html_tags) +def makeHTMLTags(): ... + +@replaced_by_pep8(make_xml_tags) +def makeXMLTags(): ... + +@replaced_by_pep8(replace_html_entity) +def replaceHTMLEntity(): ... + +@replaced_by_pep8(infix_notation) +def infixNotation(): ... +# fmt: on diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pyparsing/results.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pyparsing/results.py new file mode 100644 index 0000000..0313049 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pyparsing/results.py @@ -0,0 +1,796 @@ +# results.py +from collections.abc import ( + MutableMapping, + Mapping, + MutableSequence, + Iterator, + Sequence, + Container, +) +import pprint +from typing import Tuple, Any, Dict, Set, List + +str_type: Tuple[type, ...] = (str, bytes) +_generator_type = type((_ for _ in ())) + + +class _ParseResultsWithOffset: + tup: Tuple["ParseResults", int] + __slots__ = ["tup"] + + def __init__(self, p1: "ParseResults", p2: int): + self.tup: Tuple[ParseResults, int] = (p1, p2) + + def __getitem__(self, i): + return self.tup[i] + + def __getstate__(self): + return self.tup + + def __setstate__(self, *args): + self.tup = args[0] + + +class ParseResults: + """Structured parse results, to provide multiple means of access to + the parsed data: + + - as a list (``len(results)``) + - by list index (``results[0], results[1]``, etc.) + - by attribute (``results.`` - see :class:`ParserElement.set_results_name`) + + Example:: + + integer = Word(nums) + date_str = (integer.set_results_name("year") + '/' + + integer.set_results_name("month") + '/' + + integer.set_results_name("day")) + # equivalent form: + # date_str = (integer("year") + '/' + # + integer("month") + '/' + # + integer("day")) + + # parse_string returns a ParseResults object + result = date_str.parse_string("1999/12/31") + + def test(s, fn=repr): + print(f"{s} -> {fn(eval(s))}") + test("list(result)") + test("result[0]") + test("result['month']") + test("result.day") + test("'month' in result") + test("'minutes' in result") + test("result.dump()", str) + + prints:: + + list(result) -> ['1999', '/', '12', '/', '31'] + result[0] -> '1999' + result['month'] -> '12' + result.day -> '31' + 'month' in result -> True + 'minutes' in result -> False + result.dump() -> ['1999', '/', '12', '/', '31'] + - day: '31' + - month: '12' + - year: '1999' + """ + + _null_values: Tuple[Any, ...] = (None, [], ()) + + _name: str + _parent: "ParseResults" + _all_names: Set[str] + _modal: bool + _toklist: List[Any] + _tokdict: Dict[str, Any] + + __slots__ = ( + "_name", + "_parent", + "_all_names", + "_modal", + "_toklist", + "_tokdict", + ) + + class List(list): + """ + Simple wrapper class to distinguish parsed list results that should be preserved + as actual Python lists, instead of being converted to :class:`ParseResults`:: + + LBRACK, RBRACK = map(pp.Suppress, "[]") + element = pp.Forward() + item = ppc.integer + element_list = LBRACK + pp.DelimitedList(element) + RBRACK + + # add parse actions to convert from ParseResults to actual Python collection types + def as_python_list(t): + return pp.ParseResults.List(t.as_list()) + element_list.add_parse_action(as_python_list) + + element <<= item | element_list + + element.run_tests(''' + 100 + [2,3,4] + [[2, 1],3,4] + [(2, 1),3,4] + (2,3,4) + ''', post_parse=lambda s, r: (r[0], type(r[0]))) + + prints:: + + 100 + (100, ) + + [2,3,4] + ([2, 3, 4], ) + + [[2, 1],3,4] + ([[2, 1], 3, 4], ) + + (Used internally by :class:`Group` when `aslist=True`.) + """ + + def __new__(cls, contained=None): + if contained is None: + contained = [] + + if not isinstance(contained, list): + raise TypeError( + f"{cls.__name__} may only be constructed with a list, not {type(contained).__name__}" + ) + + return list.__new__(cls) + + def __new__(cls, toklist=None, name=None, **kwargs): + if isinstance(toklist, ParseResults): + return toklist + self = object.__new__(cls) + self._name = None + self._parent = None + self._all_names = set() + + if toklist is None: + self._toklist = [] + elif isinstance(toklist, (list, _generator_type)): + self._toklist = ( + [toklist[:]] + if isinstance(toklist, ParseResults.List) + else list(toklist) + ) + else: + self._toklist = [toklist] + self._tokdict = dict() + return self + + # Performance tuning: we construct a *lot* of these, so keep this + # constructor as small and fast as possible + def __init__( + self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance + ): + self._tokdict: Dict[str, _ParseResultsWithOffset] + self._modal = modal + if name is not None and name != "": + if isinstance(name, int): + name = str(name) + if not modal: + self._all_names = {name} + self._name = name + if toklist not in self._null_values: + if isinstance(toklist, (str_type, type)): + toklist = [toklist] + if asList: + if isinstance(toklist, ParseResults): + self[name] = _ParseResultsWithOffset( + ParseResults(toklist._toklist), 0 + ) + else: + self[name] = _ParseResultsWithOffset( + ParseResults(toklist[0]), 0 + ) + self[name]._name = name + else: + try: + self[name] = toklist[0] + except (KeyError, TypeError, IndexError): + if toklist is not self: + self[name] = toklist + else: + self._name = name + + def __getitem__(self, i): + if isinstance(i, (int, slice)): + return self._toklist[i] + else: + if i not in self._all_names: + return self._tokdict[i][-1][0] + else: + return ParseResults([v[0] for v in self._tokdict[i]]) + + def __setitem__(self, k, v, isinstance=isinstance): + if isinstance(v, _ParseResultsWithOffset): + self._tokdict[k] = self._tokdict.get(k, list()) + [v] + sub = v[0] + elif isinstance(k, (int, slice)): + self._toklist[k] = v + sub = v + else: + self._tokdict[k] = self._tokdict.get(k, list()) + [ + _ParseResultsWithOffset(v, 0) + ] + sub = v + if isinstance(sub, ParseResults): + sub._parent = self + + def __delitem__(self, i): + if isinstance(i, (int, slice)): + mylen = len(self._toklist) + del self._toklist[i] + + # convert int to slice + if isinstance(i, int): + if i < 0: + i += mylen + i = slice(i, i + 1) + # get removed indices + removed = list(range(*i.indices(mylen))) + removed.reverse() + # fixup indices in token dictionary + for name, occurrences in self._tokdict.items(): + for j in removed: + for k, (value, position) in enumerate(occurrences): + occurrences[k] = _ParseResultsWithOffset( + value, position - (position > j) + ) + else: + del self._tokdict[i] + + def __contains__(self, k) -> bool: + return k in self._tokdict + + def __len__(self) -> int: + return len(self._toklist) + + def __bool__(self) -> bool: + return not not (self._toklist or self._tokdict) + + def __iter__(self) -> Iterator: + return iter(self._toklist) + + def __reversed__(self) -> Iterator: + return iter(self._toklist[::-1]) + + def keys(self): + return iter(self._tokdict) + + def values(self): + return (self[k] for k in self.keys()) + + def items(self): + return ((k, self[k]) for k in self.keys()) + + def haskeys(self) -> bool: + """ + Since ``keys()`` returns an iterator, this method is helpful in bypassing + code that looks for the existence of any defined results names.""" + return not not self._tokdict + + def pop(self, *args, **kwargs): + """ + Removes and returns item at specified index (default= ``last``). + Supports both ``list`` and ``dict`` semantics for ``pop()``. If + passed no argument or an integer argument, it will use ``list`` + semantics and pop tokens from the list of parsed tokens. If passed + a non-integer argument (most likely a string), it will use ``dict`` + semantics and pop the corresponding value from any defined results + names. A second default return value argument is supported, just as in + ``dict.pop()``. + + Example:: + + numlist = Word(nums)[...] + print(numlist.parse_string("0 123 321")) # -> ['0', '123', '321'] + + def remove_first(tokens): + tokens.pop(0) + numlist.add_parse_action(remove_first) + print(numlist.parse_string("0 123 321")) # -> ['123', '321'] + + label = Word(alphas) + patt = label("LABEL") + Word(nums)[1, ...] + print(patt.parse_string("AAB 123 321").dump()) + + # Use pop() in a parse action to remove named result (note that corresponding value is not + # removed from list form of results) + def remove_LABEL(tokens): + tokens.pop("LABEL") + return tokens + patt.add_parse_action(remove_LABEL) + print(patt.parse_string("AAB 123 321").dump()) + + prints:: + + ['AAB', '123', '321'] + - LABEL: 'AAB' + + ['AAB', '123', '321'] + """ + if not args: + args = [-1] + for k, v in kwargs.items(): + if k == "default": + args = (args[0], v) + else: + raise TypeError(f"pop() got an unexpected keyword argument {k!r}") + if isinstance(args[0], int) or len(args) == 1 or args[0] in self: + index = args[0] + ret = self[index] + del self[index] + return ret + else: + defaultvalue = args[1] + return defaultvalue + + def get(self, key, default_value=None): + """ + Returns named result matching the given key, or if there is no + such name, then returns the given ``default_value`` or ``None`` if no + ``default_value`` is specified. + + Similar to ``dict.get()``. + + Example:: + + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parse_string("1999/12/31") + print(result.get("year")) # -> '1999' + print(result.get("hour", "not specified")) # -> 'not specified' + print(result.get("hour")) # -> None + """ + if key in self: + return self[key] + else: + return default_value + + def insert(self, index, ins_string): + """ + Inserts new element at location index in the list of parsed tokens. + + Similar to ``list.insert()``. + + Example:: + + numlist = Word(nums)[...] + print(numlist.parse_string("0 123 321")) # -> ['0', '123', '321'] + + # use a parse action to insert the parse location in the front of the parsed results + def insert_locn(locn, tokens): + tokens.insert(0, locn) + numlist.add_parse_action(insert_locn) + print(numlist.parse_string("0 123 321")) # -> [0, '0', '123', '321'] + """ + self._toklist.insert(index, ins_string) + # fixup indices in token dictionary + for name, occurrences in self._tokdict.items(): + for k, (value, position) in enumerate(occurrences): + occurrences[k] = _ParseResultsWithOffset( + value, position + (position > index) + ) + + def append(self, item): + """ + Add single element to end of ``ParseResults`` list of elements. + + Example:: + + numlist = Word(nums)[...] + print(numlist.parse_string("0 123 321")) # -> ['0', '123', '321'] + + # use a parse action to compute the sum of the parsed integers, and add it to the end + def append_sum(tokens): + tokens.append(sum(map(int, tokens))) + numlist.add_parse_action(append_sum) + print(numlist.parse_string("0 123 321")) # -> ['0', '123', '321', 444] + """ + self._toklist.append(item) + + def extend(self, itemseq): + """ + Add sequence of elements to end of ``ParseResults`` list of elements. + + Example:: + + patt = Word(alphas)[1, ...] + + # use a parse action to append the reverse of the matched strings, to make a palindrome + def make_palindrome(tokens): + tokens.extend(reversed([t[::-1] for t in tokens])) + return ''.join(tokens) + patt.add_parse_action(make_palindrome) + print(patt.parse_string("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' + """ + if isinstance(itemseq, ParseResults): + self.__iadd__(itemseq) + else: + self._toklist.extend(itemseq) + + def clear(self): + """ + Clear all elements and results names. + """ + del self._toklist[:] + self._tokdict.clear() + + def __getattr__(self, name): + try: + return self[name] + except KeyError: + if name.startswith("__"): + raise AttributeError(name) + return "" + + def __add__(self, other: "ParseResults") -> "ParseResults": + ret = self.copy() + ret += other + return ret + + def __iadd__(self, other: "ParseResults") -> "ParseResults": + if not other: + return self + + if other._tokdict: + offset = len(self._toklist) + addoffset = lambda a: offset if a < 0 else a + offset + otheritems = other._tokdict.items() + otherdictitems = [ + (k, _ParseResultsWithOffset(v[0], addoffset(v[1]))) + for k, vlist in otheritems + for v in vlist + ] + for k, v in otherdictitems: + self[k] = v + if isinstance(v[0], ParseResults): + v[0]._parent = self + + self._toklist += other._toklist + self._all_names |= other._all_names + return self + + def __radd__(self, other) -> "ParseResults": + if isinstance(other, int) and other == 0: + # useful for merging many ParseResults using sum() builtin + return self.copy() + else: + # this may raise a TypeError - so be it + return other + self + + def __repr__(self) -> str: + return f"{type(self).__name__}({self._toklist!r}, {self.as_dict()})" + + def __str__(self) -> str: + return ( + "[" + + ", ".join( + [ + str(i) if isinstance(i, ParseResults) else repr(i) + for i in self._toklist + ] + ) + + "]" + ) + + def _asStringList(self, sep=""): + out = [] + for item in self._toklist: + if out and sep: + out.append(sep) + if isinstance(item, ParseResults): + out += item._asStringList() + else: + out.append(str(item)) + return out + + def as_list(self) -> list: + """ + Returns the parse results as a nested list of matching tokens, all converted to strings. + + Example:: + + patt = Word(alphas)[1, ...] + result = patt.parse_string("sldkj lsdkj sldkj") + # even though the result prints in string-like form, it is actually a pyparsing ParseResults + print(type(result), result) # -> ['sldkj', 'lsdkj', 'sldkj'] + + # Use as_list() to create an actual list + result_list = result.as_list() + print(type(result_list), result_list) # -> ['sldkj', 'lsdkj', 'sldkj'] + """ + return [ + res.as_list() if isinstance(res, ParseResults) else res + for res in self._toklist + ] + + def as_dict(self) -> dict: + """ + Returns the named parse results as a nested dictionary. + + Example:: + + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parse_string('12/31/1999') + print(type(result), repr(result)) # -> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]}) + + result_dict = result.as_dict() + print(type(result_dict), repr(result_dict)) # -> {'day': '1999', 'year': '12', 'month': '31'} + + # even though a ParseResults supports dict-like access, sometime you just need to have a dict + import json + print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable + print(json.dumps(result.as_dict())) # -> {"month": "31", "day": "1999", "year": "12"} + """ + + def to_item(obj): + if isinstance(obj, ParseResults): + return obj.as_dict() if obj.haskeys() else [to_item(v) for v in obj] + else: + return obj + + return dict((k, to_item(v)) for k, v in self.items()) + + def copy(self) -> "ParseResults": + """ + Returns a new shallow copy of a :class:`ParseResults` object. `ParseResults` + items contained within the source are shared with the copy. Use + :class:`ParseResults.deepcopy()` to create a copy with its own separate + content values. + """ + ret = ParseResults(self._toklist) + ret._tokdict = self._tokdict.copy() + ret._parent = self._parent + ret._all_names |= self._all_names + ret._name = self._name + return ret + + def deepcopy(self) -> "ParseResults": + """ + Returns a new deep copy of a :class:`ParseResults` object. + """ + ret = self.copy() + # replace values with copies if they are of known mutable types + for i, obj in enumerate(self._toklist): + if isinstance(obj, ParseResults): + self._toklist[i] = obj.deepcopy() + elif isinstance(obj, (str, bytes)): + pass + elif isinstance(obj, MutableMapping): + self._toklist[i] = dest = type(obj)() + for k, v in obj.items(): + dest[k] = v.deepcopy() if isinstance(v, ParseResults) else v + elif isinstance(obj, Container): + self._toklist[i] = type(obj)( + v.deepcopy() if isinstance(v, ParseResults) else v for v in obj + ) + return ret + + def get_name(self): + r""" + Returns the results name for this token expression. Useful when several + different expressions might match at a particular location. + + Example:: + + integer = Word(nums) + ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d") + house_number_expr = Suppress('#') + Word(nums, alphanums) + user_data = (Group(house_number_expr)("house_number") + | Group(ssn_expr)("ssn") + | Group(integer)("age")) + user_info = user_data[1, ...] + + result = user_info.parse_string("22 111-22-3333 #221B") + for item in result: + print(item.get_name(), ':', item[0]) + + prints:: + + age : 22 + ssn : 111-22-3333 + house_number : 221B + """ + if self._name: + return self._name + elif self._parent: + par: "ParseResults" = self._parent + parent_tokdict_items = par._tokdict.items() + return next( + ( + k + for k, vlist in parent_tokdict_items + for v, loc in vlist + if v is self + ), + None, + ) + elif ( + len(self) == 1 + and len(self._tokdict) == 1 + and next(iter(self._tokdict.values()))[0][1] in (0, -1) + ): + return next(iter(self._tokdict.keys())) + else: + return None + + def dump(self, indent="", full=True, include_list=True, _depth=0) -> str: + """ + Diagnostic method for listing out the contents of + a :class:`ParseResults`. Accepts an optional ``indent`` argument so + that this string can be embedded in a nested display of other data. + + Example:: + + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parse_string('1999/12/31') + print(result.dump()) + + prints:: + + ['1999', '/', '12', '/', '31'] + - day: '31' + - month: '12' + - year: '1999' + """ + out = [] + NL = "\n" + out.append(indent + str(self.as_list()) if include_list else "") + + if full: + if self.haskeys(): + items = sorted((str(k), v) for k, v in self.items()) + for k, v in items: + if out: + out.append(NL) + out.append(f"{indent}{(' ' * _depth)}- {k}: ") + if isinstance(v, ParseResults): + if v: + out.append( + v.dump( + indent=indent, + full=full, + include_list=include_list, + _depth=_depth + 1, + ) + ) + else: + out.append(str(v)) + else: + out.append(repr(v)) + if any(isinstance(vv, ParseResults) for vv in self): + v = self + for i, vv in enumerate(v): + if isinstance(vv, ParseResults): + out.append( + "\n{}{}[{}]:\n{}{}{}".format( + indent, + (" " * (_depth)), + i, + indent, + (" " * (_depth + 1)), + vv.dump( + indent=indent, + full=full, + include_list=include_list, + _depth=_depth + 1, + ), + ) + ) + else: + out.append( + "\n%s%s[%d]:\n%s%s%s" + % ( + indent, + (" " * (_depth)), + i, + indent, + (" " * (_depth + 1)), + str(vv), + ) + ) + + return "".join(out) + + def pprint(self, *args, **kwargs): + """ + Pretty-printer for parsed results as a list, using the + `pprint `_ module. + Accepts additional positional or keyword args as defined for + `pprint.pprint `_ . + + Example:: + + ident = Word(alphas, alphanums) + num = Word(nums) + func = Forward() + term = ident | num | Group('(' + func + ')') + func <<= ident + Group(Optional(DelimitedList(term))) + result = func.parse_string("fna a,b,(fnb c,d,200),100") + result.pprint(width=40) + + prints:: + + ['fna', + ['a', + 'b', + ['(', 'fnb', ['c', 'd', '200'], ')'], + '100']] + """ + pprint.pprint(self.as_list(), *args, **kwargs) + + # add support for pickle protocol + def __getstate__(self): + return ( + self._toklist, + ( + self._tokdict.copy(), + None, + self._all_names, + self._name, + ), + ) + + def __setstate__(self, state): + self._toklist, (self._tokdict, par, inAccumNames, self._name) = state + self._all_names = set(inAccumNames) + self._parent = None + + def __getnewargs__(self): + return self._toklist, self._name + + def __dir__(self): + return dir(type(self)) + list(self.keys()) + + @classmethod + def from_dict(cls, other, name=None) -> "ParseResults": + """ + Helper classmethod to construct a ``ParseResults`` from a ``dict``, preserving the + name-value relations as results names. If an optional ``name`` argument is + given, a nested ``ParseResults`` will be returned. + """ + + def is_iterable(obj): + try: + iter(obj) + except Exception: + return False + # str's are iterable, but in pyparsing, we don't want to iterate over them + else: + return not isinstance(obj, str_type) + + ret = cls([]) + for k, v in other.items(): + if isinstance(v, Mapping): + ret += cls.from_dict(v, name=k) + else: + ret += cls([v], name=k, asList=is_iterable(v)) + if name is not None: + ret = cls([ret], name=name) + return ret + + asList = as_list + """Deprecated - use :class:`as_list`""" + asDict = as_dict + """Deprecated - use :class:`as_dict`""" + getName = get_name + """Deprecated - use :class:`get_name`""" + + +MutableMapping.register(ParseResults) +MutableSequence.register(ParseResults) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pyparsing/testing.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pyparsing/testing.py new file mode 100644 index 0000000..6a254c1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pyparsing/testing.py @@ -0,0 +1,331 @@ +# testing.py + +from contextlib import contextmanager +import typing + +from .core import ( + ParserElement, + ParseException, + Keyword, + __diag__, + __compat__, +) + + +class pyparsing_test: + """ + namespace class for classes useful in writing unit tests + """ + + class reset_pyparsing_context: + """ + Context manager to be used when writing unit tests that modify pyparsing config values: + - packrat parsing + - bounded recursion parsing + - default whitespace characters. + - default keyword characters + - literal string auto-conversion class + - __diag__ settings + + Example:: + + with reset_pyparsing_context(): + # test that literals used to construct a grammar are automatically suppressed + ParserElement.inlineLiteralsUsing(Suppress) + + term = Word(alphas) | Word(nums) + group = Group('(' + term[...] + ')') + + # assert that the '()' characters are not included in the parsed tokens + self.assertParseAndCheckList(group, "(abc 123 def)", ['abc', '123', 'def']) + + # after exiting context manager, literals are converted to Literal expressions again + """ + + def __init__(self): + self._save_context = {} + + def save(self): + self._save_context["default_whitespace"] = ParserElement.DEFAULT_WHITE_CHARS + self._save_context["default_keyword_chars"] = Keyword.DEFAULT_KEYWORD_CHARS + + self._save_context[ + "literal_string_class" + ] = ParserElement._literalStringClass + + self._save_context["verbose_stacktrace"] = ParserElement.verbose_stacktrace + + self._save_context["packrat_enabled"] = ParserElement._packratEnabled + if ParserElement._packratEnabled: + self._save_context[ + "packrat_cache_size" + ] = ParserElement.packrat_cache.size + else: + self._save_context["packrat_cache_size"] = None + self._save_context["packrat_parse"] = ParserElement._parse + self._save_context[ + "recursion_enabled" + ] = ParserElement._left_recursion_enabled + + self._save_context["__diag__"] = { + name: getattr(__diag__, name) for name in __diag__._all_names + } + + self._save_context["__compat__"] = { + "collect_all_And_tokens": __compat__.collect_all_And_tokens + } + + return self + + def restore(self): + # reset pyparsing global state + if ( + ParserElement.DEFAULT_WHITE_CHARS + != self._save_context["default_whitespace"] + ): + ParserElement.set_default_whitespace_chars( + self._save_context["default_whitespace"] + ) + + ParserElement.verbose_stacktrace = self._save_context["verbose_stacktrace"] + + Keyword.DEFAULT_KEYWORD_CHARS = self._save_context["default_keyword_chars"] + ParserElement.inlineLiteralsUsing( + self._save_context["literal_string_class"] + ) + + for name, value in self._save_context["__diag__"].items(): + (__diag__.enable if value else __diag__.disable)(name) + + ParserElement._packratEnabled = False + if self._save_context["packrat_enabled"]: + ParserElement.enable_packrat(self._save_context["packrat_cache_size"]) + else: + ParserElement._parse = self._save_context["packrat_parse"] + ParserElement._left_recursion_enabled = self._save_context[ + "recursion_enabled" + ] + + __compat__.collect_all_And_tokens = self._save_context["__compat__"] + + return self + + def copy(self): + ret = type(self)() + ret._save_context.update(self._save_context) + return ret + + def __enter__(self): + return self.save() + + def __exit__(self, *args): + self.restore() + + class TestParseResultsAsserts: + """ + A mixin class to add parse results assertion methods to normal unittest.TestCase classes. + """ + + def assertParseResultsEquals( + self, result, expected_list=None, expected_dict=None, msg=None + ): + """ + Unit test assertion to compare a :class:`ParseResults` object with an optional ``expected_list``, + and compare any defined results names with an optional ``expected_dict``. + """ + if expected_list is not None: + self.assertEqual(expected_list, result.as_list(), msg=msg) + if expected_dict is not None: + self.assertEqual(expected_dict, result.as_dict(), msg=msg) + + def assertParseAndCheckList( + self, expr, test_string, expected_list, msg=None, verbose=True + ): + """ + Convenience wrapper assert to test a parser element and input string, and assert that + the resulting ``ParseResults.asList()`` is equal to the ``expected_list``. + """ + result = expr.parse_string(test_string, parse_all=True) + if verbose: + print(result.dump()) + else: + print(result.as_list()) + self.assertParseResultsEquals(result, expected_list=expected_list, msg=msg) + + def assertParseAndCheckDict( + self, expr, test_string, expected_dict, msg=None, verbose=True + ): + """ + Convenience wrapper assert to test a parser element and input string, and assert that + the resulting ``ParseResults.asDict()`` is equal to the ``expected_dict``. + """ + result = expr.parse_string(test_string, parseAll=True) + if verbose: + print(result.dump()) + else: + print(result.as_list()) + self.assertParseResultsEquals(result, expected_dict=expected_dict, msg=msg) + + def assertRunTestResults( + self, run_tests_report, expected_parse_results=None, msg=None + ): + """ + Unit test assertion to evaluate output of ``ParserElement.runTests()``. If a list of + list-dict tuples is given as the ``expected_parse_results`` argument, then these are zipped + with the report tuples returned by ``runTests`` and evaluated using ``assertParseResultsEquals``. + Finally, asserts that the overall ``runTests()`` success value is ``True``. + + :param run_tests_report: tuple(bool, [tuple(str, ParseResults or Exception)]) returned from runTests + :param expected_parse_results (optional): [tuple(str, list, dict, Exception)] + """ + run_test_success, run_test_results = run_tests_report + + if expected_parse_results is not None: + merged = [ + (*rpt, expected) + for rpt, expected in zip(run_test_results, expected_parse_results) + ] + for test_string, result, expected in merged: + # expected should be a tuple containing a list and/or a dict or an exception, + # and optional failure message string + # an empty tuple will skip any result validation + fail_msg = next( + (exp for exp in expected if isinstance(exp, str)), None + ) + expected_exception = next( + ( + exp + for exp in expected + if isinstance(exp, type) and issubclass(exp, Exception) + ), + None, + ) + if expected_exception is not None: + with self.assertRaises( + expected_exception=expected_exception, msg=fail_msg or msg + ): + if isinstance(result, Exception): + raise result + else: + expected_list = next( + (exp for exp in expected if isinstance(exp, list)), None + ) + expected_dict = next( + (exp for exp in expected if isinstance(exp, dict)), None + ) + if (expected_list, expected_dict) != (None, None): + self.assertParseResultsEquals( + result, + expected_list=expected_list, + expected_dict=expected_dict, + msg=fail_msg or msg, + ) + else: + # warning here maybe? + print(f"no validation for {test_string!r}") + + # do this last, in case some specific test results can be reported instead + self.assertTrue( + run_test_success, msg=msg if msg is not None else "failed runTests" + ) + + @contextmanager + def assertRaisesParseException(self, exc_type=ParseException, msg=None): + with self.assertRaises(exc_type, msg=msg): + yield + + @staticmethod + def with_line_numbers( + s: str, + start_line: typing.Optional[int] = None, + end_line: typing.Optional[int] = None, + expand_tabs: bool = True, + eol_mark: str = "|", + mark_spaces: typing.Optional[str] = None, + mark_control: typing.Optional[str] = None, + ) -> str: + """ + Helpful method for debugging a parser - prints a string with line and column numbers. + (Line and column numbers are 1-based.) + + :param s: tuple(bool, str - string to be printed with line and column numbers + :param start_line: int - (optional) starting line number in s to print (default=1) + :param end_line: int - (optional) ending line number in s to print (default=len(s)) + :param expand_tabs: bool - (optional) expand tabs to spaces, to match the pyparsing default + :param eol_mark: str - (optional) string to mark the end of lines, helps visualize trailing spaces (default="|") + :param mark_spaces: str - (optional) special character to display in place of spaces + :param mark_control: str - (optional) convert non-printing control characters to a placeholding + character; valid values: + - "unicode" - replaces control chars with Unicode symbols, such as "␍" and "␊" + - any single character string - replace control characters with given string + - None (default) - string is displayed as-is + + :return: str - input string with leading line numbers and column number headers + """ + if expand_tabs: + s = s.expandtabs() + if mark_control is not None: + mark_control = typing.cast(str, mark_control) + if mark_control == "unicode": + transtable_map = { + c: u for c, u in zip(range(0, 33), range(0x2400, 0x2433)) + } + transtable_map[127] = 0x2421 + tbl = str.maketrans(transtable_map) + eol_mark = "" + else: + ord_mark_control = ord(mark_control) + tbl = str.maketrans( + {c: ord_mark_control for c in list(range(0, 32)) + [127]} + ) + s = s.translate(tbl) + if mark_spaces is not None and mark_spaces != " ": + if mark_spaces == "unicode": + tbl = str.maketrans({9: 0x2409, 32: 0x2423}) + s = s.translate(tbl) + else: + s = s.replace(" ", mark_spaces) + if start_line is None: + start_line = 1 + if end_line is None: + end_line = len(s) + end_line = min(end_line, len(s)) + start_line = min(max(1, start_line), end_line) + + if mark_control != "unicode": + s_lines = s.splitlines()[start_line - 1 : end_line] + else: + s_lines = [line + "␊" for line in s.split("␊")[start_line - 1 : end_line]] + if not s_lines: + return "" + + lineno_width = len(str(end_line)) + max_line_len = max(len(line) for line in s_lines) + lead = " " * (lineno_width + 1) + if max_line_len >= 99: + header0 = ( + lead + + "".join( + f"{' ' * 99}{(i + 1) % 100}" + for i in range(max(max_line_len // 100, 1)) + ) + + "\n" + ) + else: + header0 = "" + header1 = ( + header0 + + lead + + "".join(f" {(i + 1) % 10}" for i in range(-(-max_line_len // 10))) + + "\n" + ) + header2 = lead + "1234567890" * (-(-max_line_len // 10)) + "\n" + return ( + header1 + + header2 + + "\n".join( + f"{i:{lineno_width}d}:{line}{eol_mark}" + for i, line in enumerate(s_lines, start=start_line) + ) + + "\n" + ) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pyparsing/unicode.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pyparsing/unicode.py new file mode 100644 index 0000000..ec0b3a4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pyparsing/unicode.py @@ -0,0 +1,361 @@ +# unicode.py + +import sys +from itertools import filterfalse +from typing import List, Tuple, Union + + +class _lazyclassproperty: + def __init__(self, fn): + self.fn = fn + self.__doc__ = fn.__doc__ + self.__name__ = fn.__name__ + + def __get__(self, obj, cls): + if cls is None: + cls = type(obj) + if not hasattr(cls, "_intern") or any( + cls._intern is getattr(superclass, "_intern", []) + for superclass in cls.__mro__[1:] + ): + cls._intern = {} + attrname = self.fn.__name__ + if attrname not in cls._intern: + cls._intern[attrname] = self.fn(cls) + return cls._intern[attrname] + + +UnicodeRangeList = List[Union[Tuple[int, int], Tuple[int]]] + + +class unicode_set: + """ + A set of Unicode characters, for language-specific strings for + ``alphas``, ``nums``, ``alphanums``, and ``printables``. + A unicode_set is defined by a list of ranges in the Unicode character + set, in a class attribute ``_ranges``. Ranges can be specified using + 2-tuples or a 1-tuple, such as:: + + _ranges = [ + (0x0020, 0x007e), + (0x00a0, 0x00ff), + (0x0100,), + ] + + Ranges are left- and right-inclusive. A 1-tuple of (x,) is treated as (x, x). + + A unicode set can also be defined using multiple inheritance of other unicode sets:: + + class CJK(Chinese, Japanese, Korean): + pass + """ + + _ranges: UnicodeRangeList = [] + + @_lazyclassproperty + def _chars_for_ranges(cls): + ret = [] + for cc in cls.__mro__: + if cc is unicode_set: + break + for rr in getattr(cc, "_ranges", ()): + ret.extend(range(rr[0], rr[-1] + 1)) + return [chr(c) for c in sorted(set(ret))] + + @_lazyclassproperty + def printables(cls): + """all non-whitespace characters in this range""" + return "".join(filterfalse(str.isspace, cls._chars_for_ranges)) + + @_lazyclassproperty + def alphas(cls): + """all alphabetic characters in this range""" + return "".join(filter(str.isalpha, cls._chars_for_ranges)) + + @_lazyclassproperty + def nums(cls): + """all numeric digit characters in this range""" + return "".join(filter(str.isdigit, cls._chars_for_ranges)) + + @_lazyclassproperty + def alphanums(cls): + """all alphanumeric characters in this range""" + return cls.alphas + cls.nums + + @_lazyclassproperty + def identchars(cls): + """all characters in this range that are valid identifier characters, plus underscore '_'""" + return "".join( + sorted( + set( + "".join(filter(str.isidentifier, cls._chars_for_ranges)) + + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµº" + + "ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿ" + + "_" + ) + ) + ) + + @_lazyclassproperty + def identbodychars(cls): + """ + all characters in this range that are valid identifier body characters, + plus the digits 0-9, and · (Unicode MIDDLE DOT) + """ + return "".join( + sorted( + set( + cls.identchars + + "0123456789·" + + "".join( + [c for c in cls._chars_for_ranges if ("_" + c).isidentifier()] + ) + ) + ) + ) + + @_lazyclassproperty + def identifier(cls): + """ + a pyparsing Word expression for an identifier using this range's definitions for + identchars and identbodychars + """ + from pip._vendor.pyparsing import Word + + return Word(cls.identchars, cls.identbodychars) + + +class pyparsing_unicode(unicode_set): + """ + A namespace class for defining common language unicode_sets. + """ + + # fmt: off + + # define ranges in language character sets + _ranges: UnicodeRangeList = [ + (0x0020, sys.maxunicode), + ] + + class BasicMultilingualPlane(unicode_set): + """Unicode set for the Basic Multilingual Plane""" + _ranges: UnicodeRangeList = [ + (0x0020, 0xFFFF), + ] + + class Latin1(unicode_set): + """Unicode set for Latin-1 Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x0020, 0x007E), + (0x00A0, 0x00FF), + ] + + class LatinA(unicode_set): + """Unicode set for Latin-A Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x0100, 0x017F), + ] + + class LatinB(unicode_set): + """Unicode set for Latin-B Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x0180, 0x024F), + ] + + class Greek(unicode_set): + """Unicode set for Greek Unicode Character Ranges""" + _ranges: UnicodeRangeList = [ + (0x0342, 0x0345), + (0x0370, 0x0377), + (0x037A, 0x037F), + (0x0384, 0x038A), + (0x038C,), + (0x038E, 0x03A1), + (0x03A3, 0x03E1), + (0x03F0, 0x03FF), + (0x1D26, 0x1D2A), + (0x1D5E,), + (0x1D60,), + (0x1D66, 0x1D6A), + (0x1F00, 0x1F15), + (0x1F18, 0x1F1D), + (0x1F20, 0x1F45), + (0x1F48, 0x1F4D), + (0x1F50, 0x1F57), + (0x1F59,), + (0x1F5B,), + (0x1F5D,), + (0x1F5F, 0x1F7D), + (0x1F80, 0x1FB4), + (0x1FB6, 0x1FC4), + (0x1FC6, 0x1FD3), + (0x1FD6, 0x1FDB), + (0x1FDD, 0x1FEF), + (0x1FF2, 0x1FF4), + (0x1FF6, 0x1FFE), + (0x2129,), + (0x2719, 0x271A), + (0xAB65,), + (0x10140, 0x1018D), + (0x101A0,), + (0x1D200, 0x1D245), + (0x1F7A1, 0x1F7A7), + ] + + class Cyrillic(unicode_set): + """Unicode set for Cyrillic Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x0400, 0x052F), + (0x1C80, 0x1C88), + (0x1D2B,), + (0x1D78,), + (0x2DE0, 0x2DFF), + (0xA640, 0xA672), + (0xA674, 0xA69F), + (0xFE2E, 0xFE2F), + ] + + class Chinese(unicode_set): + """Unicode set for Chinese Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x2E80, 0x2E99), + (0x2E9B, 0x2EF3), + (0x31C0, 0x31E3), + (0x3400, 0x4DB5), + (0x4E00, 0x9FEF), + (0xA700, 0xA707), + (0xF900, 0xFA6D), + (0xFA70, 0xFAD9), + (0x16FE2, 0x16FE3), + (0x1F210, 0x1F212), + (0x1F214, 0x1F23B), + (0x1F240, 0x1F248), + (0x20000, 0x2A6D6), + (0x2A700, 0x2B734), + (0x2B740, 0x2B81D), + (0x2B820, 0x2CEA1), + (0x2CEB0, 0x2EBE0), + (0x2F800, 0x2FA1D), + ] + + class Japanese(unicode_set): + """Unicode set for Japanese Unicode Character Range, combining Kanji, Hiragana, and Katakana ranges""" + + class Kanji(unicode_set): + "Unicode set for Kanji Unicode Character Range" + _ranges: UnicodeRangeList = [ + (0x4E00, 0x9FBF), + (0x3000, 0x303F), + ] + + class Hiragana(unicode_set): + """Unicode set for Hiragana Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x3041, 0x3096), + (0x3099, 0x30A0), + (0x30FC,), + (0xFF70,), + (0x1B001,), + (0x1B150, 0x1B152), + (0x1F200,), + ] + + class Katakana(unicode_set): + """Unicode set for Katakana Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x3099, 0x309C), + (0x30A0, 0x30FF), + (0x31F0, 0x31FF), + (0x32D0, 0x32FE), + (0xFF65, 0xFF9F), + (0x1B000,), + (0x1B164, 0x1B167), + (0x1F201, 0x1F202), + (0x1F213,), + ] + + 漢字 = Kanji + カタカナ = Katakana + ひらがな = Hiragana + + _ranges = ( + Kanji._ranges + + Hiragana._ranges + + Katakana._ranges + ) + + class Hangul(unicode_set): + """Unicode set for Hangul (Korean) Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x1100, 0x11FF), + (0x302E, 0x302F), + (0x3131, 0x318E), + (0x3200, 0x321C), + (0x3260, 0x327B), + (0x327E,), + (0xA960, 0xA97C), + (0xAC00, 0xD7A3), + (0xD7B0, 0xD7C6), + (0xD7CB, 0xD7FB), + (0xFFA0, 0xFFBE), + (0xFFC2, 0xFFC7), + (0xFFCA, 0xFFCF), + (0xFFD2, 0xFFD7), + (0xFFDA, 0xFFDC), + ] + + Korean = Hangul + + class CJK(Chinese, Japanese, Hangul): + """Unicode set for combined Chinese, Japanese, and Korean (CJK) Unicode Character Range""" + + class Thai(unicode_set): + """Unicode set for Thai Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x0E01, 0x0E3A), + (0x0E3F, 0x0E5B) + ] + + class Arabic(unicode_set): + """Unicode set for Arabic Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x0600, 0x061B), + (0x061E, 0x06FF), + (0x0700, 0x077F), + ] + + class Hebrew(unicode_set): + """Unicode set for Hebrew Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x0591, 0x05C7), + (0x05D0, 0x05EA), + (0x05EF, 0x05F4), + (0xFB1D, 0xFB36), + (0xFB38, 0xFB3C), + (0xFB3E,), + (0xFB40, 0xFB41), + (0xFB43, 0xFB44), + (0xFB46, 0xFB4F), + ] + + class Devanagari(unicode_set): + """Unicode set for Devanagari Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x0900, 0x097F), + (0xA8E0, 0xA8FF) + ] + + BMP = BasicMultilingualPlane + + # add language identifiers using language Unicode + العربية = Arabic + 中文 = Chinese + кириллица = Cyrillic + Ελληνικά = Greek + עִברִית = Hebrew + 日本語 = Japanese + 한국어 = Korean + ไทย = Thai + देवनागरी = Devanagari + + # fmt: on diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pyparsing/util.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pyparsing/util.py new file mode 100644 index 0000000..d8d3f41 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pyparsing/util.py @@ -0,0 +1,284 @@ +# util.py +import inspect +import warnings +import types +import collections +import itertools +from functools import lru_cache, wraps +from typing import Callable, List, Union, Iterable, TypeVar, cast + +_bslash = chr(92) +C = TypeVar("C", bound=Callable) + + +class __config_flags: + """Internal class for defining compatibility and debugging flags""" + + _all_names: List[str] = [] + _fixed_names: List[str] = [] + _type_desc = "configuration" + + @classmethod + def _set(cls, dname, value): + if dname in cls._fixed_names: + warnings.warn( + f"{cls.__name__}.{dname} {cls._type_desc} is {str(getattr(cls, dname)).upper()}" + f" and cannot be overridden", + stacklevel=3, + ) + return + if dname in cls._all_names: + setattr(cls, dname, value) + else: + raise ValueError(f"no such {cls._type_desc} {dname!r}") + + enable = classmethod(lambda cls, name: cls._set(name, True)) + disable = classmethod(lambda cls, name: cls._set(name, False)) + + +@lru_cache(maxsize=128) +def col(loc: int, strg: str) -> int: + """ + Returns current column within a string, counting newlines as line separators. + The first column is number 1. + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See + :class:`ParserElement.parse_string` for more + information on parsing strings containing ```` s, and suggested + methods to maintain a consistent view of the parsed string, the parse + location, and line and column positions within the parsed string. + """ + s = strg + return 1 if 0 < loc < len(s) and s[loc - 1] == "\n" else loc - s.rfind("\n", 0, loc) + + +@lru_cache(maxsize=128) +def lineno(loc: int, strg: str) -> int: + """Returns current line number within a string, counting newlines as line separators. + The first line is number 1. + + Note - the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See :class:`ParserElement.parse_string` + for more information on parsing strings containing ```` s, and + suggested methods to maintain a consistent view of the parsed string, the + parse location, and line and column positions within the parsed string. + """ + return strg.count("\n", 0, loc) + 1 + + +@lru_cache(maxsize=128) +def line(loc: int, strg: str) -> str: + """ + Returns the line of text containing loc within a string, counting newlines as line separators. + """ + last_cr = strg.rfind("\n", 0, loc) + next_cr = strg.find("\n", loc) + return strg[last_cr + 1 : next_cr] if next_cr >= 0 else strg[last_cr + 1 :] + + +class _UnboundedCache: + def __init__(self): + cache = {} + cache_get = cache.get + self.not_in_cache = not_in_cache = object() + + def get(_, key): + return cache_get(key, not_in_cache) + + def set_(_, key, value): + cache[key] = value + + def clear(_): + cache.clear() + + self.size = None + self.get = types.MethodType(get, self) + self.set = types.MethodType(set_, self) + self.clear = types.MethodType(clear, self) + + +class _FifoCache: + def __init__(self, size): + self.not_in_cache = not_in_cache = object() + cache = {} + keyring = [object()] * size + cache_get = cache.get + cache_pop = cache.pop + keyiter = itertools.cycle(range(size)) + + def get(_, key): + return cache_get(key, not_in_cache) + + def set_(_, key, value): + cache[key] = value + i = next(keyiter) + cache_pop(keyring[i], None) + keyring[i] = key + + def clear(_): + cache.clear() + keyring[:] = [object()] * size + + self.size = size + self.get = types.MethodType(get, self) + self.set = types.MethodType(set_, self) + self.clear = types.MethodType(clear, self) + + +class LRUMemo: + """ + A memoizing mapping that retains `capacity` deleted items + + The memo tracks retained items by their access order; once `capacity` items + are retained, the least recently used item is discarded. + """ + + def __init__(self, capacity): + self._capacity = capacity + self._active = {} + self._memory = collections.OrderedDict() + + def __getitem__(self, key): + try: + return self._active[key] + except KeyError: + self._memory.move_to_end(key) + return self._memory[key] + + def __setitem__(self, key, value): + self._memory.pop(key, None) + self._active[key] = value + + def __delitem__(self, key): + try: + value = self._active.pop(key) + except KeyError: + pass + else: + while len(self._memory) >= self._capacity: + self._memory.popitem(last=False) + self._memory[key] = value + + def clear(self): + self._active.clear() + self._memory.clear() + + +class UnboundedMemo(dict): + """ + A memoizing mapping that retains all deleted items + """ + + def __delitem__(self, key): + pass + + +def _escape_regex_range_chars(s: str) -> str: + # escape these chars: ^-[] + for c in r"\^-[]": + s = s.replace(c, _bslash + c) + s = s.replace("\n", r"\n") + s = s.replace("\t", r"\t") + return str(s) + + +def _collapse_string_to_ranges( + s: Union[str, Iterable[str]], re_escape: bool = True +) -> str: + def is_consecutive(c): + c_int = ord(c) + is_consecutive.prev, prev = c_int, is_consecutive.prev + if c_int - prev > 1: + is_consecutive.value = next(is_consecutive.counter) + return is_consecutive.value + + is_consecutive.prev = 0 # type: ignore [attr-defined] + is_consecutive.counter = itertools.count() # type: ignore [attr-defined] + is_consecutive.value = -1 # type: ignore [attr-defined] + + def escape_re_range_char(c): + return "\\" + c if c in r"\^-][" else c + + def no_escape_re_range_char(c): + return c + + if not re_escape: + escape_re_range_char = no_escape_re_range_char + + ret = [] + s = "".join(sorted(set(s))) + if len(s) > 3: + for _, chars in itertools.groupby(s, key=is_consecutive): + first = last = next(chars) + last = collections.deque( + itertools.chain(iter([last]), chars), maxlen=1 + ).pop() + if first == last: + ret.append(escape_re_range_char(first)) + else: + sep = "" if ord(last) == ord(first) + 1 else "-" + ret.append( + f"{escape_re_range_char(first)}{sep}{escape_re_range_char(last)}" + ) + else: + ret = [escape_re_range_char(c) for c in s] + + return "".join(ret) + + +def _flatten(ll: list) -> list: + ret = [] + for i in ll: + if isinstance(i, list): + ret.extend(_flatten(i)) + else: + ret.append(i) + return ret + + +def _make_synonym_function(compat_name: str, fn: C) -> C: + # In a future version, uncomment the code in the internal _inner() functions + # to begin emitting DeprecationWarnings. + + # Unwrap staticmethod/classmethod + fn = getattr(fn, "__func__", fn) + + # (Presence of 'self' arg in signature is used by explain_exception() methods, so we take + # some extra steps to add it if present in decorated function.) + if "self" == list(inspect.signature(fn).parameters)[0]: + + @wraps(fn) + def _inner(self, *args, **kwargs): + # warnings.warn( + # f"Deprecated - use {fn.__name__}", DeprecationWarning, stacklevel=3 + # ) + return fn(self, *args, **kwargs) + + else: + + @wraps(fn) + def _inner(*args, **kwargs): + # warnings.warn( + # f"Deprecated - use {fn.__name__}", DeprecationWarning, stacklevel=3 + # ) + return fn(*args, **kwargs) + + _inner.__doc__ = f"""Deprecated - use :class:`{fn.__name__}`""" + _inner.__name__ = compat_name + _inner.__annotations__ = fn.__annotations__ + if isinstance(fn, types.FunctionType): + _inner.__kwdefaults__ = fn.__kwdefaults__ + elif isinstance(fn, type) and hasattr(fn, "__init__"): + _inner.__kwdefaults__ = fn.__init__.__kwdefaults__ + else: + _inner.__kwdefaults__ = None + _inner.__qualname__ = fn.__qualname__ + return cast(C, _inner) + + +def replaced_by_pep8(fn: C) -> Callable[[Callable], C]: + """ + Decorator for pre-PEP8 compatibility synonyms, to link them to the new function. + """ + return lambda other: _make_synonym_function(other.__name__, fn) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__init__.py new file mode 100644 index 0000000..ddfcf7f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__init__.py @@ -0,0 +1,23 @@ +"""Wrappers to call pyproject.toml-based build backend hooks. +""" + +from ._impl import ( + BackendInvalid, + BackendUnavailable, + BuildBackendHookCaller, + HookMissing, + UnsupportedOperation, + default_subprocess_runner, + quiet_subprocess_runner, +) + +__version__ = '1.0.0' +__all__ = [ + 'BackendUnavailable', + 'BackendInvalid', + 'HookMissing', + 'UnsupportedOperation', + 'default_subprocess_runner', + 'quiet_subprocess_runner', + 'BuildBackendHookCaller', +] diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_compat.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_compat.py new file mode 100644 index 0000000..95e509c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_compat.py @@ -0,0 +1,8 @@ +__all__ = ("tomllib",) + +import sys + +if sys.version_info >= (3, 11): + import tomllib +else: + from pip._vendor import tomli as tomllib diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_impl.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_impl.py new file mode 100644 index 0000000..37b0e65 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_impl.py @@ -0,0 +1,330 @@ +import json +import os +import sys +import tempfile +from contextlib import contextmanager +from os.path import abspath +from os.path import join as pjoin +from subprocess import STDOUT, check_call, check_output + +from ._in_process import _in_proc_script_path + + +def write_json(obj, path, **kwargs): + with open(path, 'w', encoding='utf-8') as f: + json.dump(obj, f, **kwargs) + + +def read_json(path): + with open(path, encoding='utf-8') as f: + return json.load(f) + + +class BackendUnavailable(Exception): + """Will be raised if the backend cannot be imported in the hook process.""" + def __init__(self, traceback): + self.traceback = traceback + + +class BackendInvalid(Exception): + """Will be raised if the backend is invalid.""" + def __init__(self, backend_name, backend_path, message): + super().__init__(message) + self.backend_name = backend_name + self.backend_path = backend_path + + +class HookMissing(Exception): + """Will be raised on missing hooks (if a fallback can't be used).""" + def __init__(self, hook_name): + super().__init__(hook_name) + self.hook_name = hook_name + + +class UnsupportedOperation(Exception): + """May be raised by build_sdist if the backend indicates that it can't.""" + def __init__(self, traceback): + self.traceback = traceback + + +def default_subprocess_runner(cmd, cwd=None, extra_environ=None): + """The default method of calling the wrapper subprocess. + + This uses :func:`subprocess.check_call` under the hood. + """ + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + + check_call(cmd, cwd=cwd, env=env) + + +def quiet_subprocess_runner(cmd, cwd=None, extra_environ=None): + """Call the subprocess while suppressing output. + + This uses :func:`subprocess.check_output` under the hood. + """ + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + + check_output(cmd, cwd=cwd, env=env, stderr=STDOUT) + + +def norm_and_check(source_tree, requested): + """Normalise and check a backend path. + + Ensure that the requested backend path is specified as a relative path, + and resolves to a location under the given source tree. + + Return an absolute version of the requested path. + """ + if os.path.isabs(requested): + raise ValueError("paths must be relative") + + abs_source = os.path.abspath(source_tree) + abs_requested = os.path.normpath(os.path.join(abs_source, requested)) + # We have to use commonprefix for Python 2.7 compatibility. So we + # normalise case to avoid problems because commonprefix is a character + # based comparison :-( + norm_source = os.path.normcase(abs_source) + norm_requested = os.path.normcase(abs_requested) + if os.path.commonprefix([norm_source, norm_requested]) != norm_source: + raise ValueError("paths must be inside source tree") + + return abs_requested + + +class BuildBackendHookCaller: + """A wrapper to call the build backend hooks for a source directory. + """ + + def __init__( + self, + source_dir, + build_backend, + backend_path=None, + runner=None, + python_executable=None, + ): + """ + :param source_dir: The source directory to invoke the build backend for + :param build_backend: The build backend spec + :param backend_path: Additional path entries for the build backend spec + :param runner: The :ref:`subprocess runner ` to use + :param python_executable: + The Python executable used to invoke the build backend + """ + if runner is None: + runner = default_subprocess_runner + + self.source_dir = abspath(source_dir) + self.build_backend = build_backend + if backend_path: + backend_path = [ + norm_and_check(self.source_dir, p) for p in backend_path + ] + self.backend_path = backend_path + self._subprocess_runner = runner + if not python_executable: + python_executable = sys.executable + self.python_executable = python_executable + + @contextmanager + def subprocess_runner(self, runner): + """A context manager for temporarily overriding the default + :ref:`subprocess runner `. + + .. code-block:: python + + hook_caller = BuildBackendHookCaller(...) + with hook_caller.subprocess_runner(quiet_subprocess_runner): + ... + """ + prev = self._subprocess_runner + self._subprocess_runner = runner + try: + yield + finally: + self._subprocess_runner = prev + + def _supported_features(self): + """Return the list of optional features supported by the backend.""" + return self._call_hook('_supported_features', {}) + + def get_requires_for_build_wheel(self, config_settings=None): + """Get additional dependencies required for building a wheel. + + :returns: A list of :pep:`dependency specifiers <508>`. + :rtype: list[str] + + .. admonition:: Fallback + + If the build backend does not defined a hook with this name, an + empty list will be returned. + """ + return self._call_hook('get_requires_for_build_wheel', { + 'config_settings': config_settings + }) + + def prepare_metadata_for_build_wheel( + self, metadata_directory, config_settings=None, + _allow_fallback=True): + """Prepare a ``*.dist-info`` folder with metadata for this project. + + :returns: Name of the newly created subfolder within + ``metadata_directory``, containing the metadata. + :rtype: str + + .. admonition:: Fallback + + If the build backend does not define a hook with this name and + ``_allow_fallback`` is truthy, the backend will be asked to build a + wheel via the ``build_wheel`` hook and the dist-info extracted from + that will be returned. + """ + return self._call_hook('prepare_metadata_for_build_wheel', { + 'metadata_directory': abspath(metadata_directory), + 'config_settings': config_settings, + '_allow_fallback': _allow_fallback, + }) + + def build_wheel( + self, wheel_directory, config_settings=None, + metadata_directory=None): + """Build a wheel from this project. + + :returns: + The name of the newly created wheel within ``wheel_directory``. + + .. admonition:: Interaction with fallback + + If the ``build_wheel`` hook was called in the fallback for + :meth:`prepare_metadata_for_build_wheel`, the build backend would + not be invoked. Instead, the previously built wheel will be copied + to ``wheel_directory`` and the name of that file will be returned. + """ + if metadata_directory is not None: + metadata_directory = abspath(metadata_directory) + return self._call_hook('build_wheel', { + 'wheel_directory': abspath(wheel_directory), + 'config_settings': config_settings, + 'metadata_directory': metadata_directory, + }) + + def get_requires_for_build_editable(self, config_settings=None): + """Get additional dependencies required for building an editable wheel. + + :returns: A list of :pep:`dependency specifiers <508>`. + :rtype: list[str] + + .. admonition:: Fallback + + If the build backend does not defined a hook with this name, an + empty list will be returned. + """ + return self._call_hook('get_requires_for_build_editable', { + 'config_settings': config_settings + }) + + def prepare_metadata_for_build_editable( + self, metadata_directory, config_settings=None, + _allow_fallback=True): + """Prepare a ``*.dist-info`` folder with metadata for this project. + + :returns: Name of the newly created subfolder within + ``metadata_directory``, containing the metadata. + :rtype: str + + .. admonition:: Fallback + + If the build backend does not define a hook with this name and + ``_allow_fallback`` is truthy, the backend will be asked to build a + wheel via the ``build_editable`` hook and the dist-info + extracted from that will be returned. + """ + return self._call_hook('prepare_metadata_for_build_editable', { + 'metadata_directory': abspath(metadata_directory), + 'config_settings': config_settings, + '_allow_fallback': _allow_fallback, + }) + + def build_editable( + self, wheel_directory, config_settings=None, + metadata_directory=None): + """Build an editable wheel from this project. + + :returns: + The name of the newly created wheel within ``wheel_directory``. + + .. admonition:: Interaction with fallback + + If the ``build_editable`` hook was called in the fallback for + :meth:`prepare_metadata_for_build_editable`, the build backend + would not be invoked. Instead, the previously built wheel will be + copied to ``wheel_directory`` and the name of that file will be + returned. + """ + if metadata_directory is not None: + metadata_directory = abspath(metadata_directory) + return self._call_hook('build_editable', { + 'wheel_directory': abspath(wheel_directory), + 'config_settings': config_settings, + 'metadata_directory': metadata_directory, + }) + + def get_requires_for_build_sdist(self, config_settings=None): + """Get additional dependencies required for building an sdist. + + :returns: A list of :pep:`dependency specifiers <508>`. + :rtype: list[str] + """ + return self._call_hook('get_requires_for_build_sdist', { + 'config_settings': config_settings + }) + + def build_sdist(self, sdist_directory, config_settings=None): + """Build an sdist from this project. + + :returns: + The name of the newly created sdist within ``wheel_directory``. + """ + return self._call_hook('build_sdist', { + 'sdist_directory': abspath(sdist_directory), + 'config_settings': config_settings, + }) + + def _call_hook(self, hook_name, kwargs): + extra_environ = {'PEP517_BUILD_BACKEND': self.build_backend} + + if self.backend_path: + backend_path = os.pathsep.join(self.backend_path) + extra_environ['PEP517_BACKEND_PATH'] = backend_path + + with tempfile.TemporaryDirectory() as td: + hook_input = {'kwargs': kwargs} + write_json(hook_input, pjoin(td, 'input.json'), indent=2) + + # Run the hook in a subprocess + with _in_proc_script_path() as script: + python = self.python_executable + self._subprocess_runner( + [python, abspath(str(script)), hook_name, td], + cwd=self.source_dir, + extra_environ=extra_environ + ) + + data = read_json(pjoin(td, 'output.json')) + if data.get('unsupported'): + raise UnsupportedOperation(data.get('traceback', '')) + if data.get('no_backend'): + raise BackendUnavailable(data.get('traceback', '')) + if data.get('backend_invalid'): + raise BackendInvalid( + backend_name=self.build_backend, + backend_path=self.backend_path, + message=data.get('backend_error', '') + ) + if data.get('hook_missing'): + raise HookMissing(data.get('missing_hook_name') or hook_name) + return data['return_val'] diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__init__.py new file mode 100644 index 0000000..917fa06 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__init__.py @@ -0,0 +1,18 @@ +"""This is a subpackage because the directory is on sys.path for _in_process.py + +The subpackage should stay as empty as possible to avoid shadowing modules that +the backend might import. +""" + +import importlib.resources as resources + +try: + resources.files +except AttributeError: + # Python 3.8 compatibility + def _in_proc_script_path(): + return resources.path(__package__, '_in_process.py') +else: + def _in_proc_script_path(): + return resources.as_file( + resources.files(__package__).joinpath('_in_process.py')) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/_in_process.py b/.venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/_in_process.py new file mode 100644 index 0000000..ee511ff --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/_in_process.py @@ -0,0 +1,353 @@ +"""This is invoked in a subprocess to call the build backend hooks. + +It expects: +- Command line args: hook_name, control_dir +- Environment variables: + PEP517_BUILD_BACKEND=entry.point:spec + PEP517_BACKEND_PATH=paths (separated with os.pathsep) +- control_dir/input.json: + - {"kwargs": {...}} + +Results: +- control_dir/output.json + - {"return_val": ...} +""" +import json +import os +import os.path +import re +import shutil +import sys +import traceback +from glob import glob +from importlib import import_module +from os.path import join as pjoin + +# This file is run as a script, and `import wrappers` is not zip-safe, so we +# include write_json() and read_json() from wrappers.py. + + +def write_json(obj, path, **kwargs): + with open(path, 'w', encoding='utf-8') as f: + json.dump(obj, f, **kwargs) + + +def read_json(path): + with open(path, encoding='utf-8') as f: + return json.load(f) + + +class BackendUnavailable(Exception): + """Raised if we cannot import the backend""" + def __init__(self, traceback): + self.traceback = traceback + + +class BackendInvalid(Exception): + """Raised if the backend is invalid""" + def __init__(self, message): + self.message = message + + +class HookMissing(Exception): + """Raised if a hook is missing and we are not executing the fallback""" + def __init__(self, hook_name=None): + super().__init__(hook_name) + self.hook_name = hook_name + + +def contained_in(filename, directory): + """Test if a file is located within the given directory.""" + filename = os.path.normcase(os.path.abspath(filename)) + directory = os.path.normcase(os.path.abspath(directory)) + return os.path.commonprefix([filename, directory]) == directory + + +def _build_backend(): + """Find and load the build backend""" + # Add in-tree backend directories to the front of sys.path. + backend_path = os.environ.get('PEP517_BACKEND_PATH') + if backend_path: + extra_pathitems = backend_path.split(os.pathsep) + sys.path[:0] = extra_pathitems + + ep = os.environ['PEP517_BUILD_BACKEND'] + mod_path, _, obj_path = ep.partition(':') + try: + obj = import_module(mod_path) + except ImportError: + raise BackendUnavailable(traceback.format_exc()) + + if backend_path: + if not any( + contained_in(obj.__file__, path) + for path in extra_pathitems + ): + raise BackendInvalid("Backend was not loaded from backend-path") + + if obj_path: + for path_part in obj_path.split('.'): + obj = getattr(obj, path_part) + return obj + + +def _supported_features(): + """Return the list of options features supported by the backend. + + Returns a list of strings. + The only possible value is 'build_editable'. + """ + backend = _build_backend() + features = [] + if hasattr(backend, "build_editable"): + features.append("build_editable") + return features + + +def get_requires_for_build_wheel(config_settings): + """Invoke the optional get_requires_for_build_wheel hook + + Returns [] if the hook is not defined. + """ + backend = _build_backend() + try: + hook = backend.get_requires_for_build_wheel + except AttributeError: + return [] + else: + return hook(config_settings) + + +def get_requires_for_build_editable(config_settings): + """Invoke the optional get_requires_for_build_editable hook + + Returns [] if the hook is not defined. + """ + backend = _build_backend() + try: + hook = backend.get_requires_for_build_editable + except AttributeError: + return [] + else: + return hook(config_settings) + + +def prepare_metadata_for_build_wheel( + metadata_directory, config_settings, _allow_fallback): + """Invoke optional prepare_metadata_for_build_wheel + + Implements a fallback by building a wheel if the hook isn't defined, + unless _allow_fallback is False in which case HookMissing is raised. + """ + backend = _build_backend() + try: + hook = backend.prepare_metadata_for_build_wheel + except AttributeError: + if not _allow_fallback: + raise HookMissing() + else: + return hook(metadata_directory, config_settings) + # fallback to build_wheel outside the try block to avoid exception chaining + # which can be confusing to users and is not relevant + whl_basename = backend.build_wheel(metadata_directory, config_settings) + return _get_wheel_metadata_from_wheel(whl_basename, metadata_directory, + config_settings) + + +def prepare_metadata_for_build_editable( + metadata_directory, config_settings, _allow_fallback): + """Invoke optional prepare_metadata_for_build_editable + + Implements a fallback by building an editable wheel if the hook isn't + defined, unless _allow_fallback is False in which case HookMissing is + raised. + """ + backend = _build_backend() + try: + hook = backend.prepare_metadata_for_build_editable + except AttributeError: + if not _allow_fallback: + raise HookMissing() + try: + build_hook = backend.build_editable + except AttributeError: + raise HookMissing(hook_name='build_editable') + else: + whl_basename = build_hook(metadata_directory, config_settings) + return _get_wheel_metadata_from_wheel(whl_basename, + metadata_directory, + config_settings) + else: + return hook(metadata_directory, config_settings) + + +WHEEL_BUILT_MARKER = 'PEP517_ALREADY_BUILT_WHEEL' + + +def _dist_info_files(whl_zip): + """Identify the .dist-info folder inside a wheel ZipFile.""" + res = [] + for path in whl_zip.namelist(): + m = re.match(r'[^/\\]+-[^/\\]+\.dist-info/', path) + if m: + res.append(path) + if res: + return res + raise Exception("No .dist-info folder found in wheel") + + +def _get_wheel_metadata_from_wheel( + whl_basename, metadata_directory, config_settings): + """Extract the metadata from a wheel. + + Fallback for when the build backend does not + define the 'get_wheel_metadata' hook. + """ + from zipfile import ZipFile + with open(os.path.join(metadata_directory, WHEEL_BUILT_MARKER), 'wb'): + pass # Touch marker file + + whl_file = os.path.join(metadata_directory, whl_basename) + with ZipFile(whl_file) as zipf: + dist_info = _dist_info_files(zipf) + zipf.extractall(path=metadata_directory, members=dist_info) + return dist_info[0].split('/')[0] + + +def _find_already_built_wheel(metadata_directory): + """Check for a wheel already built during the get_wheel_metadata hook. + """ + if not metadata_directory: + return None + metadata_parent = os.path.dirname(metadata_directory) + if not os.path.isfile(pjoin(metadata_parent, WHEEL_BUILT_MARKER)): + return None + + whl_files = glob(os.path.join(metadata_parent, '*.whl')) + if not whl_files: + print('Found wheel built marker, but no .whl files') + return None + if len(whl_files) > 1: + print('Found multiple .whl files; unspecified behaviour. ' + 'Will call build_wheel.') + return None + + # Exactly one .whl file + return whl_files[0] + + +def build_wheel(wheel_directory, config_settings, metadata_directory=None): + """Invoke the mandatory build_wheel hook. + + If a wheel was already built in the + prepare_metadata_for_build_wheel fallback, this + will copy it rather than rebuilding the wheel. + """ + prebuilt_whl = _find_already_built_wheel(metadata_directory) + if prebuilt_whl: + shutil.copy2(prebuilt_whl, wheel_directory) + return os.path.basename(prebuilt_whl) + + return _build_backend().build_wheel(wheel_directory, config_settings, + metadata_directory) + + +def build_editable(wheel_directory, config_settings, metadata_directory=None): + """Invoke the optional build_editable hook. + + If a wheel was already built in the + prepare_metadata_for_build_editable fallback, this + will copy it rather than rebuilding the wheel. + """ + backend = _build_backend() + try: + hook = backend.build_editable + except AttributeError: + raise HookMissing() + else: + prebuilt_whl = _find_already_built_wheel(metadata_directory) + if prebuilt_whl: + shutil.copy2(prebuilt_whl, wheel_directory) + return os.path.basename(prebuilt_whl) + + return hook(wheel_directory, config_settings, metadata_directory) + + +def get_requires_for_build_sdist(config_settings): + """Invoke the optional get_requires_for_build_wheel hook + + Returns [] if the hook is not defined. + """ + backend = _build_backend() + try: + hook = backend.get_requires_for_build_sdist + except AttributeError: + return [] + else: + return hook(config_settings) + + +class _DummyException(Exception): + """Nothing should ever raise this exception""" + + +class GotUnsupportedOperation(Exception): + """For internal use when backend raises UnsupportedOperation""" + def __init__(self, traceback): + self.traceback = traceback + + +def build_sdist(sdist_directory, config_settings): + """Invoke the mandatory build_sdist hook.""" + backend = _build_backend() + try: + return backend.build_sdist(sdist_directory, config_settings) + except getattr(backend, 'UnsupportedOperation', _DummyException): + raise GotUnsupportedOperation(traceback.format_exc()) + + +HOOK_NAMES = { + 'get_requires_for_build_wheel', + 'prepare_metadata_for_build_wheel', + 'build_wheel', + 'get_requires_for_build_editable', + 'prepare_metadata_for_build_editable', + 'build_editable', + 'get_requires_for_build_sdist', + 'build_sdist', + '_supported_features', +} + + +def main(): + if len(sys.argv) < 3: + sys.exit("Needs args: hook_name, control_dir") + hook_name = sys.argv[1] + control_dir = sys.argv[2] + if hook_name not in HOOK_NAMES: + sys.exit("Unknown hook: %s" % hook_name) + hook = globals()[hook_name] + + hook_input = read_json(pjoin(control_dir, 'input.json')) + + json_out = {'unsupported': False, 'return_val': None} + try: + json_out['return_val'] = hook(**hook_input['kwargs']) + except BackendUnavailable as e: + json_out['no_backend'] = True + json_out['traceback'] = e.traceback + except BackendInvalid as e: + json_out['backend_invalid'] = True + json_out['backend_error'] = e.message + except GotUnsupportedOperation as e: + json_out['unsupported'] = True + json_out['traceback'] = e.traceback + except HookMissing as e: + json_out['hook_missing'] = True + json_out['missing_hook_name'] = e.hook_name or hook_name + + write_json(json_out, pjoin(control_dir, 'output.json'), indent=2) + + +if __name__ == '__main__': + main() diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/requests/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/__init__.py new file mode 100644 index 0000000..10ff67f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/__init__.py @@ -0,0 +1,182 @@ +# __ +# /__) _ _ _ _ _/ _ +# / ( (- (/ (/ (- _) / _) +# / + +""" +Requests HTTP Library +~~~~~~~~~~~~~~~~~~~~~ + +Requests is an HTTP library, written in Python, for human beings. +Basic GET usage: + + >>> import requests + >>> r = requests.get('https://www.python.org') + >>> r.status_code + 200 + >>> b'Python is a programming language' in r.content + True + +... or POST: + + >>> payload = dict(key1='value1', key2='value2') + >>> r = requests.post('https://httpbin.org/post', data=payload) + >>> print(r.text) + { + ... + "form": { + "key1": "value1", + "key2": "value2" + }, + ... + } + +The other HTTP methods are supported - see `requests.api`. Full documentation +is at . + +:copyright: (c) 2017 by Kenneth Reitz. +:license: Apache 2.0, see LICENSE for more details. +""" + +import warnings + +from pip._vendor import urllib3 + +from .exceptions import RequestsDependencyWarning + +charset_normalizer_version = None + +try: + from pip._vendor.chardet import __version__ as chardet_version +except ImportError: + chardet_version = None + + +def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version): + urllib3_version = urllib3_version.split(".") + assert urllib3_version != ["dev"] # Verify urllib3 isn't installed from git. + + # Sometimes, urllib3 only reports its version as 16.1. + if len(urllib3_version) == 2: + urllib3_version.append("0") + + # Check urllib3 for compatibility. + major, minor, patch = urllib3_version # noqa: F811 + major, minor, patch = int(major), int(minor), int(patch) + # urllib3 >= 1.21.1 + assert major >= 1 + if major == 1: + assert minor >= 21 + + # Check charset_normalizer for compatibility. + if chardet_version: + major, minor, patch = chardet_version.split(".")[:3] + major, minor, patch = int(major), int(minor), int(patch) + # chardet_version >= 3.0.2, < 6.0.0 + assert (3, 0, 2) <= (major, minor, patch) < (6, 0, 0) + elif charset_normalizer_version: + major, minor, patch = charset_normalizer_version.split(".")[:3] + major, minor, patch = int(major), int(minor), int(patch) + # charset_normalizer >= 2.0.0 < 4.0.0 + assert (2, 0, 0) <= (major, minor, patch) < (4, 0, 0) + else: + raise Exception("You need either charset_normalizer or chardet installed") + + +def _check_cryptography(cryptography_version): + # cryptography < 1.3.4 + try: + cryptography_version = list(map(int, cryptography_version.split("."))) + except ValueError: + return + + if cryptography_version < [1, 3, 4]: + warning = "Old version of cryptography ({}) may cause slowdown.".format( + cryptography_version + ) + warnings.warn(warning, RequestsDependencyWarning) + + +# Check imported dependencies for compatibility. +try: + check_compatibility( + urllib3.__version__, chardet_version, charset_normalizer_version + ) +except (AssertionError, ValueError): + warnings.warn( + "urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported " + "version!".format( + urllib3.__version__, chardet_version, charset_normalizer_version + ), + RequestsDependencyWarning, + ) + +# Attempt to enable urllib3's fallback for SNI support +# if the standard library doesn't support SNI or the +# 'ssl' library isn't available. +try: + # Note: This logic prevents upgrading cryptography on Windows, if imported + # as part of pip. + from pip._internal.utils.compat import WINDOWS + if not WINDOWS: + raise ImportError("pip internals: don't import cryptography on Windows") + try: + import ssl + except ImportError: + ssl = None + + if not getattr(ssl, "HAS_SNI", False): + from pip._vendor.urllib3.contrib import pyopenssl + + pyopenssl.inject_into_urllib3() + + # Check cryptography version + from cryptography import __version__ as cryptography_version + + _check_cryptography(cryptography_version) +except ImportError: + pass + +# urllib3's DependencyWarnings should be silenced. +from pip._vendor.urllib3.exceptions import DependencyWarning + +warnings.simplefilter("ignore", DependencyWarning) + +# Set default logging handler to avoid "No handler found" warnings. +import logging +from logging import NullHandler + +from . import packages, utils +from .__version__ import ( + __author__, + __author_email__, + __build__, + __cake__, + __copyright__, + __description__, + __license__, + __title__, + __url__, + __version__, +) +from .api import delete, get, head, options, patch, post, put, request +from .exceptions import ( + ConnectionError, + ConnectTimeout, + FileModeWarning, + HTTPError, + JSONDecodeError, + ReadTimeout, + RequestException, + Timeout, + TooManyRedirects, + URLRequired, +) +from .models import PreparedRequest, Request, Response +from .sessions import Session, session +from .status_codes import codes + +logging.getLogger(__name__).addHandler(NullHandler()) + +# FileModeWarnings go off per the default. +warnings.simplefilter("default", FileModeWarning, append=True) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/requests/__version__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/__version__.py new file mode 100644 index 0000000..5063c3f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/__version__.py @@ -0,0 +1,14 @@ +# .-. .-. .-. . . .-. .-. .-. .-. +# |( |- |.| | | |- `-. | `-. +# ' ' `-' `-`.`-' `-' `-' ' `-' + +__title__ = "requests" +__description__ = "Python HTTP for Humans." +__url__ = "https://requests.readthedocs.io" +__version__ = "2.31.0" +__build__ = 0x023100 +__author__ = "Kenneth Reitz" +__author_email__ = "me@kennethreitz.org" +__license__ = "Apache 2.0" +__copyright__ = "Copyright Kenneth Reitz" +__cake__ = "\u2728 \U0001f370 \u2728" diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/requests/_internal_utils.py b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/_internal_utils.py new file mode 100644 index 0000000..f2cf635 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/_internal_utils.py @@ -0,0 +1,50 @@ +""" +requests._internal_utils +~~~~~~~~~~~~~~ + +Provides utility functions that are consumed internally by Requests +which depend on extremely few external helpers (such as compat) +""" +import re + +from .compat import builtin_str + +_VALID_HEADER_NAME_RE_BYTE = re.compile(rb"^[^:\s][^:\r\n]*$") +_VALID_HEADER_NAME_RE_STR = re.compile(r"^[^:\s][^:\r\n]*$") +_VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$") +_VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$") + +_HEADER_VALIDATORS_STR = (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR) +_HEADER_VALIDATORS_BYTE = (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE) +HEADER_VALIDATORS = { + bytes: _HEADER_VALIDATORS_BYTE, + str: _HEADER_VALIDATORS_STR, +} + + +def to_native_string(string, encoding="ascii"): + """Given a string object, regardless of type, returns a representation of + that string in the native string type, encoding and decoding where + necessary. This assumes ASCII unless told otherwise. + """ + if isinstance(string, builtin_str): + out = string + else: + out = string.decode(encoding) + + return out + + +def unicode_is_ascii(u_string): + """Determine if unicode string only contains ASCII characters. + + :param str u_string: unicode string to check. Must be unicode + and not Python 2 `str`. + :rtype: bool + """ + assert isinstance(u_string, str) + try: + u_string.encode("ascii") + return True + except UnicodeEncodeError: + return False diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/requests/adapters.py b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/adapters.py new file mode 100644 index 0000000..10c1767 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/adapters.py @@ -0,0 +1,538 @@ +""" +requests.adapters +~~~~~~~~~~~~~~~~~ + +This module contains the transport adapters that Requests uses to define +and maintain connections. +""" + +import os.path +import socket # noqa: F401 + +from pip._vendor.urllib3.exceptions import ClosedPoolError, ConnectTimeoutError +from pip._vendor.urllib3.exceptions import HTTPError as _HTTPError +from pip._vendor.urllib3.exceptions import InvalidHeader as _InvalidHeader +from pip._vendor.urllib3.exceptions import ( + LocationValueError, + MaxRetryError, + NewConnectionError, + ProtocolError, +) +from pip._vendor.urllib3.exceptions import ProxyError as _ProxyError +from pip._vendor.urllib3.exceptions import ReadTimeoutError, ResponseError +from pip._vendor.urllib3.exceptions import SSLError as _SSLError +from pip._vendor.urllib3.poolmanager import PoolManager, proxy_from_url +from pip._vendor.urllib3.util import Timeout as TimeoutSauce +from pip._vendor.urllib3.util import parse_url +from pip._vendor.urllib3.util.retry import Retry + +from .auth import _basic_auth_str +from .compat import basestring, urlparse +from .cookies import extract_cookies_to_jar +from .exceptions import ( + ConnectionError, + ConnectTimeout, + InvalidHeader, + InvalidProxyURL, + InvalidSchema, + InvalidURL, + ProxyError, + ReadTimeout, + RetryError, + SSLError, +) +from .models import Response +from .structures import CaseInsensitiveDict +from .utils import ( + DEFAULT_CA_BUNDLE_PATH, + extract_zipped_paths, + get_auth_from_url, + get_encoding_from_headers, + prepend_scheme_if_needed, + select_proxy, + urldefragauth, +) + +try: + from pip._vendor.urllib3.contrib.socks import SOCKSProxyManager +except ImportError: + + def SOCKSProxyManager(*args, **kwargs): + raise InvalidSchema("Missing dependencies for SOCKS support.") + + +DEFAULT_POOLBLOCK = False +DEFAULT_POOLSIZE = 10 +DEFAULT_RETRIES = 0 +DEFAULT_POOL_TIMEOUT = None + + +class BaseAdapter: + """The Base Transport Adapter""" + + def __init__(self): + super().__init__() + + def send( + self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None + ): + """Sends PreparedRequest object. Returns Response object. + + :param request: The :class:`PreparedRequest ` being sent. + :param stream: (optional) Whether to stream the request content. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use + :param cert: (optional) Any user-provided SSL certificate to be trusted. + :param proxies: (optional) The proxies dictionary to apply to the request. + """ + raise NotImplementedError + + def close(self): + """Cleans up adapter specific items.""" + raise NotImplementedError + + +class HTTPAdapter(BaseAdapter): + """The built-in HTTP Adapter for urllib3. + + Provides a general-case interface for Requests sessions to contact HTTP and + HTTPS urls by implementing the Transport Adapter interface. This class will + usually be created by the :class:`Session ` class under the + covers. + + :param pool_connections: The number of urllib3 connection pools to cache. + :param pool_maxsize: The maximum number of connections to save in the pool. + :param max_retries: The maximum number of retries each connection + should attempt. Note, this applies only to failed DNS lookups, socket + connections and connection timeouts, never to requests where data has + made it to the server. By default, Requests does not retry failed + connections. If you need granular control over the conditions under + which we retry a request, import urllib3's ``Retry`` class and pass + that instead. + :param pool_block: Whether the connection pool should block for connections. + + Usage:: + + >>> import requests + >>> s = requests.Session() + >>> a = requests.adapters.HTTPAdapter(max_retries=3) + >>> s.mount('http://', a) + """ + + __attrs__ = [ + "max_retries", + "config", + "_pool_connections", + "_pool_maxsize", + "_pool_block", + ] + + def __init__( + self, + pool_connections=DEFAULT_POOLSIZE, + pool_maxsize=DEFAULT_POOLSIZE, + max_retries=DEFAULT_RETRIES, + pool_block=DEFAULT_POOLBLOCK, + ): + if max_retries == DEFAULT_RETRIES: + self.max_retries = Retry(0, read=False) + else: + self.max_retries = Retry.from_int(max_retries) + self.config = {} + self.proxy_manager = {} + + super().__init__() + + self._pool_connections = pool_connections + self._pool_maxsize = pool_maxsize + self._pool_block = pool_block + + self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) + + def __getstate__(self): + return {attr: getattr(self, attr, None) for attr in self.__attrs__} + + def __setstate__(self, state): + # Can't handle by adding 'proxy_manager' to self.__attrs__ because + # self.poolmanager uses a lambda function, which isn't pickleable. + self.proxy_manager = {} + self.config = {} + + for attr, value in state.items(): + setattr(self, attr, value) + + self.init_poolmanager( + self._pool_connections, self._pool_maxsize, block=self._pool_block + ) + + def init_poolmanager( + self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs + ): + """Initializes a urllib3 PoolManager. + + This method should not be called from user code, and is only + exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param connections: The number of urllib3 connection pools to cache. + :param maxsize: The maximum number of connections to save in the pool. + :param block: Block when no free connections are available. + :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. + """ + # save these values for pickling + self._pool_connections = connections + self._pool_maxsize = maxsize + self._pool_block = block + + self.poolmanager = PoolManager( + num_pools=connections, + maxsize=maxsize, + block=block, + **pool_kwargs, + ) + + def proxy_manager_for(self, proxy, **proxy_kwargs): + """Return urllib3 ProxyManager for the given proxy. + + This method should not be called from user code, and is only + exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param proxy: The proxy to return a urllib3 ProxyManager for. + :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. + :returns: ProxyManager + :rtype: urllib3.ProxyManager + """ + if proxy in self.proxy_manager: + manager = self.proxy_manager[proxy] + elif proxy.lower().startswith("socks"): + username, password = get_auth_from_url(proxy) + manager = self.proxy_manager[proxy] = SOCKSProxyManager( + proxy, + username=username, + password=password, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs, + ) + else: + proxy_headers = self.proxy_headers(proxy) + manager = self.proxy_manager[proxy] = proxy_from_url( + proxy, + proxy_headers=proxy_headers, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs, + ) + + return manager + + def cert_verify(self, conn, url, verify, cert): + """Verify a SSL certificate. This method should not be called from user + code, and is only exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param conn: The urllib3 connection object associated with the cert. + :param url: The requested URL. + :param verify: Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use + :param cert: The SSL certificate to verify. + """ + if url.lower().startswith("https") and verify: + + cert_loc = None + + # Allow self-specified cert location. + if verify is not True: + cert_loc = verify + + if not cert_loc: + cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH) + + if not cert_loc or not os.path.exists(cert_loc): + raise OSError( + f"Could not find a suitable TLS CA certificate bundle, " + f"invalid path: {cert_loc}" + ) + + conn.cert_reqs = "CERT_REQUIRED" + + if not os.path.isdir(cert_loc): + conn.ca_certs = cert_loc + else: + conn.ca_cert_dir = cert_loc + else: + conn.cert_reqs = "CERT_NONE" + conn.ca_certs = None + conn.ca_cert_dir = None + + if cert: + if not isinstance(cert, basestring): + conn.cert_file = cert[0] + conn.key_file = cert[1] + else: + conn.cert_file = cert + conn.key_file = None + if conn.cert_file and not os.path.exists(conn.cert_file): + raise OSError( + f"Could not find the TLS certificate file, " + f"invalid path: {conn.cert_file}" + ) + if conn.key_file and not os.path.exists(conn.key_file): + raise OSError( + f"Could not find the TLS key file, invalid path: {conn.key_file}" + ) + + def build_response(self, req, resp): + """Builds a :class:`Response ` object from a urllib3 + response. This should not be called from user code, and is only exposed + for use when subclassing the + :class:`HTTPAdapter ` + + :param req: The :class:`PreparedRequest ` used to generate the response. + :param resp: The urllib3 response object. + :rtype: requests.Response + """ + response = Response() + + # Fallback to None if there's no status_code, for whatever reason. + response.status_code = getattr(resp, "status", None) + + # Make headers case-insensitive. + response.headers = CaseInsensitiveDict(getattr(resp, "headers", {})) + + # Set encoding. + response.encoding = get_encoding_from_headers(response.headers) + response.raw = resp + response.reason = response.raw.reason + + if isinstance(req.url, bytes): + response.url = req.url.decode("utf-8") + else: + response.url = req.url + + # Add new cookies from the server. + extract_cookies_to_jar(response.cookies, req, resp) + + # Give the Response some context. + response.request = req + response.connection = self + + return response + + def get_connection(self, url, proxies=None): + """Returns a urllib3 connection for the given URL. This should not be + called from user code, and is only exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param url: The URL to connect to. + :param proxies: (optional) A Requests-style dictionary of proxies used on this request. + :rtype: urllib3.ConnectionPool + """ + proxy = select_proxy(url, proxies) + + if proxy: + proxy = prepend_scheme_if_needed(proxy, "http") + proxy_url = parse_url(proxy) + if not proxy_url.host: + raise InvalidProxyURL( + "Please check proxy URL. It is malformed " + "and could be missing the host." + ) + proxy_manager = self.proxy_manager_for(proxy) + conn = proxy_manager.connection_from_url(url) + else: + # Only scheme should be lower case + parsed = urlparse(url) + url = parsed.geturl() + conn = self.poolmanager.connection_from_url(url) + + return conn + + def close(self): + """Disposes of any internal state. + + Currently, this closes the PoolManager and any active ProxyManager, + which closes any pooled connections. + """ + self.poolmanager.clear() + for proxy in self.proxy_manager.values(): + proxy.clear() + + def request_url(self, request, proxies): + """Obtain the url to use when making the final request. + + If the message is being sent through a HTTP proxy, the full URL has to + be used. Otherwise, we should only use the path portion of the URL. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter `. + + :param request: The :class:`PreparedRequest ` being sent. + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. + :rtype: str + """ + proxy = select_proxy(request.url, proxies) + scheme = urlparse(request.url).scheme + + is_proxied_http_request = proxy and scheme != "https" + using_socks_proxy = False + if proxy: + proxy_scheme = urlparse(proxy).scheme.lower() + using_socks_proxy = proxy_scheme.startswith("socks") + + url = request.path_url + if is_proxied_http_request and not using_socks_proxy: + url = urldefragauth(request.url) + + return url + + def add_headers(self, request, **kwargs): + """Add any headers needed by the connection. As of v2.0 this does + nothing by default, but is left for overriding by users that subclass + the :class:`HTTPAdapter `. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter `. + + :param request: The :class:`PreparedRequest ` to add headers to. + :param kwargs: The keyword arguments from the call to send(). + """ + pass + + def proxy_headers(self, proxy): + """Returns a dictionary of the headers to add to any request sent + through a proxy. This works with urllib3 magic to ensure that they are + correctly sent to the proxy, rather than in a tunnelled request if + CONNECT is being used. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter `. + + :param proxy: The url of the proxy being used for this request. + :rtype: dict + """ + headers = {} + username, password = get_auth_from_url(proxy) + + if username: + headers["Proxy-Authorization"] = _basic_auth_str(username, password) + + return headers + + def send( + self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None + ): + """Sends PreparedRequest object. Returns Response object. + + :param request: The :class:`PreparedRequest ` being sent. + :param stream: (optional) Whether to stream the request content. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple or urllib3 Timeout object + :param verify: (optional) Either a boolean, in which case it controls whether + we verify the server's TLS certificate, or a string, in which case it + must be a path to a CA bundle to use + :param cert: (optional) Any user-provided SSL certificate to be trusted. + :param proxies: (optional) The proxies dictionary to apply to the request. + :rtype: requests.Response + """ + + try: + conn = self.get_connection(request.url, proxies) + except LocationValueError as e: + raise InvalidURL(e, request=request) + + self.cert_verify(conn, request.url, verify, cert) + url = self.request_url(request, proxies) + self.add_headers( + request, + stream=stream, + timeout=timeout, + verify=verify, + cert=cert, + proxies=proxies, + ) + + chunked = not (request.body is None or "Content-Length" in request.headers) + + if isinstance(timeout, tuple): + try: + connect, read = timeout + timeout = TimeoutSauce(connect=connect, read=read) + except ValueError: + raise ValueError( + f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, " + f"or a single float to set both timeouts to the same value." + ) + elif isinstance(timeout, TimeoutSauce): + pass + else: + timeout = TimeoutSauce(connect=timeout, read=timeout) + + try: + resp = conn.urlopen( + method=request.method, + url=url, + body=request.body, + headers=request.headers, + redirect=False, + assert_same_host=False, + preload_content=False, + decode_content=False, + retries=self.max_retries, + timeout=timeout, + chunked=chunked, + ) + + except (ProtocolError, OSError) as err: + raise ConnectionError(err, request=request) + + except MaxRetryError as e: + if isinstance(e.reason, ConnectTimeoutError): + # TODO: Remove this in 3.0.0: see #2811 + if not isinstance(e.reason, NewConnectionError): + raise ConnectTimeout(e, request=request) + + if isinstance(e.reason, ResponseError): + raise RetryError(e, request=request) + + if isinstance(e.reason, _ProxyError): + raise ProxyError(e, request=request) + + if isinstance(e.reason, _SSLError): + # This branch is for urllib3 v1.22 and later. + raise SSLError(e, request=request) + + raise ConnectionError(e, request=request) + + except ClosedPoolError as e: + raise ConnectionError(e, request=request) + + except _ProxyError as e: + raise ProxyError(e) + + except (_SSLError, _HTTPError) as e: + if isinstance(e, _SSLError): + # This branch is for urllib3 versions earlier than v1.22 + raise SSLError(e, request=request) + elif isinstance(e, ReadTimeoutError): + raise ReadTimeout(e, request=request) + elif isinstance(e, _InvalidHeader): + raise InvalidHeader(e, request=request) + else: + raise + + return self.build_response(request, resp) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/requests/api.py b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/api.py new file mode 100644 index 0000000..cd0b3ee --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/api.py @@ -0,0 +1,157 @@ +""" +requests.api +~~~~~~~~~~~~ + +This module implements the Requests API. + +:copyright: (c) 2012 by Kenneth Reitz. +:license: Apache2, see LICENSE for more details. +""" + +from . import sessions + + +def request(method, url, **kwargs): + """Constructs and sends a :class:`Request `. + + :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary, list of tuples or bytes to send + in the query string for the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. + :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. + ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` + or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string + defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers + to add for the file. + :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) How many seconds to wait for the server to send data + before giving up, as a float, or a :ref:`(connect timeout, read + timeout) ` tuple. + :type timeout: float or tuple + :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. + :type allow_redirects: bool + :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use. Defaults to ``True``. + :param stream: (optional) if ``False``, the response content will be immediately downloaded. + :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. + :return: :class:`Response ` object + :rtype: requests.Response + + Usage:: + + >>> import requests + >>> req = requests.request('GET', 'https://httpbin.org/get') + >>> req + + """ + + # By using the 'with' statement we are sure the session is closed, thus we + # avoid leaving sockets open which can trigger a ResourceWarning in some + # cases, and look like a memory leak in others. + with sessions.Session() as session: + return session.request(method=method, url=url, **kwargs) + + +def get(url, params=None, **kwargs): + r"""Sends a GET request. + + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary, list of tuples or bytes to send + in the query string for the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("get", url, params=params, **kwargs) + + +def options(url, **kwargs): + r"""Sends an OPTIONS request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("options", url, **kwargs) + + +def head(url, **kwargs): + r"""Sends a HEAD request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. If + `allow_redirects` is not provided, it will be set to `False` (as + opposed to the default :meth:`request` behavior). + :return: :class:`Response ` object + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", False) + return request("head", url, **kwargs) + + +def post(url, data=None, json=None, **kwargs): + r"""Sends a POST request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("post", url, data=data, json=json, **kwargs) + + +def put(url, data=None, **kwargs): + r"""Sends a PUT request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("put", url, data=data, **kwargs) + + +def patch(url, data=None, **kwargs): + r"""Sends a PATCH request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("patch", url, data=data, **kwargs) + + +def delete(url, **kwargs): + r"""Sends a DELETE request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("delete", url, **kwargs) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/requests/auth.py b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/auth.py new file mode 100644 index 0000000..9733686 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/auth.py @@ -0,0 +1,315 @@ +""" +requests.auth +~~~~~~~~~~~~~ + +This module contains the authentication handlers for Requests. +""" + +import hashlib +import os +import re +import threading +import time +import warnings +from base64 import b64encode + +from ._internal_utils import to_native_string +from .compat import basestring, str, urlparse +from .cookies import extract_cookies_to_jar +from .utils import parse_dict_header + +CONTENT_TYPE_FORM_URLENCODED = "application/x-www-form-urlencoded" +CONTENT_TYPE_MULTI_PART = "multipart/form-data" + + +def _basic_auth_str(username, password): + """Returns a Basic Auth string.""" + + # "I want us to put a big-ol' comment on top of it that + # says that this behaviour is dumb but we need to preserve + # it because people are relying on it." + # - Lukasa + # + # These are here solely to maintain backwards compatibility + # for things like ints. This will be removed in 3.0.0. + if not isinstance(username, basestring): + warnings.warn( + "Non-string usernames will no longer be supported in Requests " + "3.0.0. Please convert the object you've passed in ({!r}) to " + "a string or bytes object in the near future to avoid " + "problems.".format(username), + category=DeprecationWarning, + ) + username = str(username) + + if not isinstance(password, basestring): + warnings.warn( + "Non-string passwords will no longer be supported in Requests " + "3.0.0. Please convert the object you've passed in ({!r}) to " + "a string or bytes object in the near future to avoid " + "problems.".format(type(password)), + category=DeprecationWarning, + ) + password = str(password) + # -- End Removal -- + + if isinstance(username, str): + username = username.encode("latin1") + + if isinstance(password, str): + password = password.encode("latin1") + + authstr = "Basic " + to_native_string( + b64encode(b":".join((username, password))).strip() + ) + + return authstr + + +class AuthBase: + """Base class that all auth implementations derive from""" + + def __call__(self, r): + raise NotImplementedError("Auth hooks must be callable.") + + +class HTTPBasicAuth(AuthBase): + """Attaches HTTP Basic Authentication to the given Request object.""" + + def __init__(self, username, password): + self.username = username + self.password = password + + def __eq__(self, other): + return all( + [ + self.username == getattr(other, "username", None), + self.password == getattr(other, "password", None), + ] + ) + + def __ne__(self, other): + return not self == other + + def __call__(self, r): + r.headers["Authorization"] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPProxyAuth(HTTPBasicAuth): + """Attaches HTTP Proxy Authentication to a given Request object.""" + + def __call__(self, r): + r.headers["Proxy-Authorization"] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPDigestAuth(AuthBase): + """Attaches HTTP Digest Authentication to the given Request object.""" + + def __init__(self, username, password): + self.username = username + self.password = password + # Keep state in per-thread local storage + self._thread_local = threading.local() + + def init_per_thread_state(self): + # Ensure state is initialized just once per-thread + if not hasattr(self._thread_local, "init"): + self._thread_local.init = True + self._thread_local.last_nonce = "" + self._thread_local.nonce_count = 0 + self._thread_local.chal = {} + self._thread_local.pos = None + self._thread_local.num_401_calls = None + + def build_digest_header(self, method, url): + """ + :rtype: str + """ + + realm = self._thread_local.chal["realm"] + nonce = self._thread_local.chal["nonce"] + qop = self._thread_local.chal.get("qop") + algorithm = self._thread_local.chal.get("algorithm") + opaque = self._thread_local.chal.get("opaque") + hash_utf8 = None + + if algorithm is None: + _algorithm = "MD5" + else: + _algorithm = algorithm.upper() + # lambdas assume digest modules are imported at the top level + if _algorithm == "MD5" or _algorithm == "MD5-SESS": + + def md5_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.md5(x).hexdigest() + + hash_utf8 = md5_utf8 + elif _algorithm == "SHA": + + def sha_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.sha1(x).hexdigest() + + hash_utf8 = sha_utf8 + elif _algorithm == "SHA-256": + + def sha256_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.sha256(x).hexdigest() + + hash_utf8 = sha256_utf8 + elif _algorithm == "SHA-512": + + def sha512_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.sha512(x).hexdigest() + + hash_utf8 = sha512_utf8 + + KD = lambda s, d: hash_utf8(f"{s}:{d}") # noqa:E731 + + if hash_utf8 is None: + return None + + # XXX not implemented yet + entdig = None + p_parsed = urlparse(url) + #: path is request-uri defined in RFC 2616 which should not be empty + path = p_parsed.path or "/" + if p_parsed.query: + path += f"?{p_parsed.query}" + + A1 = f"{self.username}:{realm}:{self.password}" + A2 = f"{method}:{path}" + + HA1 = hash_utf8(A1) + HA2 = hash_utf8(A2) + + if nonce == self._thread_local.last_nonce: + self._thread_local.nonce_count += 1 + else: + self._thread_local.nonce_count = 1 + ncvalue = f"{self._thread_local.nonce_count:08x}" + s = str(self._thread_local.nonce_count).encode("utf-8") + s += nonce.encode("utf-8") + s += time.ctime().encode("utf-8") + s += os.urandom(8) + + cnonce = hashlib.sha1(s).hexdigest()[:16] + if _algorithm == "MD5-SESS": + HA1 = hash_utf8(f"{HA1}:{nonce}:{cnonce}") + + if not qop: + respdig = KD(HA1, f"{nonce}:{HA2}") + elif qop == "auth" or "auth" in qop.split(","): + noncebit = f"{nonce}:{ncvalue}:{cnonce}:auth:{HA2}" + respdig = KD(HA1, noncebit) + else: + # XXX handle auth-int. + return None + + self._thread_local.last_nonce = nonce + + # XXX should the partial digests be encoded too? + base = ( + f'username="{self.username}", realm="{realm}", nonce="{nonce}", ' + f'uri="{path}", response="{respdig}"' + ) + if opaque: + base += f', opaque="{opaque}"' + if algorithm: + base += f', algorithm="{algorithm}"' + if entdig: + base += f', digest="{entdig}"' + if qop: + base += f', qop="auth", nc={ncvalue}, cnonce="{cnonce}"' + + return f"Digest {base}" + + def handle_redirect(self, r, **kwargs): + """Reset num_401_calls counter on redirects.""" + if r.is_redirect: + self._thread_local.num_401_calls = 1 + + def handle_401(self, r, **kwargs): + """ + Takes the given response and tries digest-auth, if needed. + + :rtype: requests.Response + """ + + # If response is not 4xx, do not auth + # See https://github.com/psf/requests/issues/3772 + if not 400 <= r.status_code < 500: + self._thread_local.num_401_calls = 1 + return r + + if self._thread_local.pos is not None: + # Rewind the file position indicator of the body to where + # it was to resend the request. + r.request.body.seek(self._thread_local.pos) + s_auth = r.headers.get("www-authenticate", "") + + if "digest" in s_auth.lower() and self._thread_local.num_401_calls < 2: + + self._thread_local.num_401_calls += 1 + pat = re.compile(r"digest ", flags=re.IGNORECASE) + self._thread_local.chal = parse_dict_header(pat.sub("", s_auth, count=1)) + + # Consume content and release the original connection + # to allow our new request to reuse the same one. + r.content + r.close() + prep = r.request.copy() + extract_cookies_to_jar(prep._cookies, r.request, r.raw) + prep.prepare_cookies(prep._cookies) + + prep.headers["Authorization"] = self.build_digest_header( + prep.method, prep.url + ) + _r = r.connection.send(prep, **kwargs) + _r.history.append(r) + _r.request = prep + + return _r + + self._thread_local.num_401_calls = 1 + return r + + def __call__(self, r): + # Initialize per-thread state, if needed + self.init_per_thread_state() + # If we have a saved nonce, skip the 401 + if self._thread_local.last_nonce: + r.headers["Authorization"] = self.build_digest_header(r.method, r.url) + try: + self._thread_local.pos = r.body.tell() + except AttributeError: + # In the case of HTTPDigestAuth being reused and the body of + # the previous request was a file-like object, pos has the + # file position of the previous body. Ensure it's set to + # None. + self._thread_local.pos = None + r.register_hook("response", self.handle_401) + r.register_hook("response", self.handle_redirect) + self._thread_local.num_401_calls = 1 + + return r + + def __eq__(self, other): + return all( + [ + self.username == getattr(other, "username", None), + self.password == getattr(other, "password", None), + ] + ) + + def __ne__(self, other): + return not self == other diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/requests/certs.py b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/certs.py new file mode 100644 index 0000000..38696a1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/certs.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python + +""" +requests.certs +~~~~~~~~~~~~~~ + +This module returns the preferred default CA certificate bundle. There is +only one — the one from the certifi package. + +If you are packaging Requests, e.g., for a Linux distribution or a managed +environment, you can change the definition of where() to return a separately +packaged CA bundle. +""" + +import os + +if "_PIP_STANDALONE_CERT" not in os.environ: + from pip._vendor.certifi import where +else: + def where(): + return os.environ["_PIP_STANDALONE_CERT"] + +if __name__ == "__main__": + print(where()) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/requests/compat.py b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/compat.py new file mode 100644 index 0000000..9ab2bb4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/compat.py @@ -0,0 +1,67 @@ +""" +requests.compat +~~~~~~~~~~~~~~~ + +This module previously handled import compatibility issues +between Python 2 and Python 3. It remains for backwards +compatibility until the next major version. +""" + +from pip._vendor import chardet + +import sys + +# ------- +# Pythons +# ------- + +# Syntax sugar. +_ver = sys.version_info + +#: Python 2.x? +is_py2 = _ver[0] == 2 + +#: Python 3.x? +is_py3 = _ver[0] == 3 + +# Note: We've patched out simplejson support in pip because it prevents +# upgrading simplejson on Windows. +import json +from json import JSONDecodeError + +# Keep OrderedDict for backwards compatibility. +from collections import OrderedDict +from collections.abc import Callable, Mapping, MutableMapping +from http import cookiejar as cookielib +from http.cookies import Morsel +from io import StringIO + +# -------------- +# Legacy Imports +# -------------- +from urllib.parse import ( + quote, + quote_plus, + unquote, + unquote_plus, + urldefrag, + urlencode, + urljoin, + urlparse, + urlsplit, + urlunparse, +) +from urllib.request import ( + getproxies, + getproxies_environment, + parse_http_list, + proxy_bypass, + proxy_bypass_environment, +) + +builtin_str = str +str = str +bytes = bytes +basestring = (str, bytes) +numeric_types = (int, float) +integer_types = (int,) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/requests/cookies.py b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/cookies.py new file mode 100644 index 0000000..bf54ab2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/cookies.py @@ -0,0 +1,561 @@ +""" +requests.cookies +~~~~~~~~~~~~~~~~ + +Compatibility code to be able to use `cookielib.CookieJar` with requests. + +requests.utils imports from here, so be careful with imports. +""" + +import calendar +import copy +import time + +from ._internal_utils import to_native_string +from .compat import Morsel, MutableMapping, cookielib, urlparse, urlunparse + +try: + import threading +except ImportError: + import dummy_threading as threading + + +class MockRequest: + """Wraps a `requests.Request` to mimic a `urllib2.Request`. + + The code in `cookielib.CookieJar` expects this interface in order to correctly + manage cookie policies, i.e., determine whether a cookie can be set, given the + domains of the request and the cookie. + + The original request object is read-only. The client is responsible for collecting + the new headers via `get_new_headers()` and interpreting them appropriately. You + probably want `get_cookie_header`, defined below. + """ + + def __init__(self, request): + self._r = request + self._new_headers = {} + self.type = urlparse(self._r.url).scheme + + def get_type(self): + return self.type + + def get_host(self): + return urlparse(self._r.url).netloc + + def get_origin_req_host(self): + return self.get_host() + + def get_full_url(self): + # Only return the response's URL if the user hadn't set the Host + # header + if not self._r.headers.get("Host"): + return self._r.url + # If they did set it, retrieve it and reconstruct the expected domain + host = to_native_string(self._r.headers["Host"], encoding="utf-8") + parsed = urlparse(self._r.url) + # Reconstruct the URL as we expect it + return urlunparse( + [ + parsed.scheme, + host, + parsed.path, + parsed.params, + parsed.query, + parsed.fragment, + ] + ) + + def is_unverifiable(self): + return True + + def has_header(self, name): + return name in self._r.headers or name in self._new_headers + + def get_header(self, name, default=None): + return self._r.headers.get(name, self._new_headers.get(name, default)) + + def add_header(self, key, val): + """cookielib has no legitimate use for this method; add it back if you find one.""" + raise NotImplementedError( + "Cookie headers should be added with add_unredirected_header()" + ) + + def add_unredirected_header(self, name, value): + self._new_headers[name] = value + + def get_new_headers(self): + return self._new_headers + + @property + def unverifiable(self): + return self.is_unverifiable() + + @property + def origin_req_host(self): + return self.get_origin_req_host() + + @property + def host(self): + return self.get_host() + + +class MockResponse: + """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. + + ...what? Basically, expose the parsed HTTP headers from the server response + the way `cookielib` expects to see them. + """ + + def __init__(self, headers): + """Make a MockResponse for `cookielib` to read. + + :param headers: a httplib.HTTPMessage or analogous carrying the headers + """ + self._headers = headers + + def info(self): + return self._headers + + def getheaders(self, name): + self._headers.getheaders(name) + + +def extract_cookies_to_jar(jar, request, response): + """Extract the cookies from the response into a CookieJar. + + :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar) + :param request: our own requests.Request object + :param response: urllib3.HTTPResponse object + """ + if not (hasattr(response, "_original_response") and response._original_response): + return + # the _original_response field is the wrapped httplib.HTTPResponse object, + req = MockRequest(request) + # pull out the HTTPMessage with the headers and put it in the mock: + res = MockResponse(response._original_response.msg) + jar.extract_cookies(res, req) + + +def get_cookie_header(jar, request): + """ + Produce an appropriate Cookie header string to be sent with `request`, or None. + + :rtype: str + """ + r = MockRequest(request) + jar.add_cookie_header(r) + return r.get_new_headers().get("Cookie") + + +def remove_cookie_by_name(cookiejar, name, domain=None, path=None): + """Unsets a cookie by name, by default over all domains and paths. + + Wraps CookieJar.clear(), is O(n). + """ + clearables = [] + for cookie in cookiejar: + if cookie.name != name: + continue + if domain is not None and domain != cookie.domain: + continue + if path is not None and path != cookie.path: + continue + clearables.append((cookie.domain, cookie.path, cookie.name)) + + for domain, path, name in clearables: + cookiejar.clear(domain, path, name) + + +class CookieConflictError(RuntimeError): + """There are two cookies that meet the criteria specified in the cookie jar. + Use .get and .set and include domain and path args in order to be more specific. + """ + + +class RequestsCookieJar(cookielib.CookieJar, MutableMapping): + """Compatibility class; is a cookielib.CookieJar, but exposes a dict + interface. + + This is the CookieJar we create by default for requests and sessions that + don't specify one, since some clients may expect response.cookies and + session.cookies to support dict operations. + + Requests does not use the dict interface internally; it's just for + compatibility with external client code. All requests code should work + out of the box with externally provided instances of ``CookieJar``, e.g. + ``LWPCookieJar`` and ``FileCookieJar``. + + Unlike a regular CookieJar, this class is pickleable. + + .. warning:: dictionary operations that are normally O(1) may be O(n). + """ + + def get(self, name, default=None, domain=None, path=None): + """Dict-like get() that also supports optional domain and path args in + order to resolve naming collisions from using one cookie jar over + multiple domains. + + .. warning:: operation is O(n), not O(1). + """ + try: + return self._find_no_duplicates(name, domain, path) + except KeyError: + return default + + def set(self, name, value, **kwargs): + """Dict-like set() that also supports optional domain and path args in + order to resolve naming collisions from using one cookie jar over + multiple domains. + """ + # support client code that unsets cookies by assignment of a None value: + if value is None: + remove_cookie_by_name( + self, name, domain=kwargs.get("domain"), path=kwargs.get("path") + ) + return + + if isinstance(value, Morsel): + c = morsel_to_cookie(value) + else: + c = create_cookie(name, value, **kwargs) + self.set_cookie(c) + return c + + def iterkeys(self): + """Dict-like iterkeys() that returns an iterator of names of cookies + from the jar. + + .. seealso:: itervalues() and iteritems(). + """ + for cookie in iter(self): + yield cookie.name + + def keys(self): + """Dict-like keys() that returns a list of names of cookies from the + jar. + + .. seealso:: values() and items(). + """ + return list(self.iterkeys()) + + def itervalues(self): + """Dict-like itervalues() that returns an iterator of values of cookies + from the jar. + + .. seealso:: iterkeys() and iteritems(). + """ + for cookie in iter(self): + yield cookie.value + + def values(self): + """Dict-like values() that returns a list of values of cookies from the + jar. + + .. seealso:: keys() and items(). + """ + return list(self.itervalues()) + + def iteritems(self): + """Dict-like iteritems() that returns an iterator of name-value tuples + from the jar. + + .. seealso:: iterkeys() and itervalues(). + """ + for cookie in iter(self): + yield cookie.name, cookie.value + + def items(self): + """Dict-like items() that returns a list of name-value tuples from the + jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a + vanilla python dict of key value pairs. + + .. seealso:: keys() and values(). + """ + return list(self.iteritems()) + + def list_domains(self): + """Utility method to list all the domains in the jar.""" + domains = [] + for cookie in iter(self): + if cookie.domain not in domains: + domains.append(cookie.domain) + return domains + + def list_paths(self): + """Utility method to list all the paths in the jar.""" + paths = [] + for cookie in iter(self): + if cookie.path not in paths: + paths.append(cookie.path) + return paths + + def multiple_domains(self): + """Returns True if there are multiple domains in the jar. + Returns False otherwise. + + :rtype: bool + """ + domains = [] + for cookie in iter(self): + if cookie.domain is not None and cookie.domain in domains: + return True + domains.append(cookie.domain) + return False # there is only one domain in jar + + def get_dict(self, domain=None, path=None): + """Takes as an argument an optional domain and path and returns a plain + old Python dict of name-value pairs of cookies that meet the + requirements. + + :rtype: dict + """ + dictionary = {} + for cookie in iter(self): + if (domain is None or cookie.domain == domain) and ( + path is None or cookie.path == path + ): + dictionary[cookie.name] = cookie.value + return dictionary + + def __contains__(self, name): + try: + return super().__contains__(name) + except CookieConflictError: + return True + + def __getitem__(self, name): + """Dict-like __getitem__() for compatibility with client code. Throws + exception if there are more than one cookie with name. In that case, + use the more explicit get() method instead. + + .. warning:: operation is O(n), not O(1). + """ + return self._find_no_duplicates(name) + + def __setitem__(self, name, value): + """Dict-like __setitem__ for compatibility with client code. Throws + exception if there is already a cookie of that name in the jar. In that + case, use the more explicit set() method instead. + """ + self.set(name, value) + + def __delitem__(self, name): + """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s + ``remove_cookie_by_name()``. + """ + remove_cookie_by_name(self, name) + + def set_cookie(self, cookie, *args, **kwargs): + if ( + hasattr(cookie.value, "startswith") + and cookie.value.startswith('"') + and cookie.value.endswith('"') + ): + cookie.value = cookie.value.replace('\\"', "") + return super().set_cookie(cookie, *args, **kwargs) + + def update(self, other): + """Updates this jar with cookies from another CookieJar or dict-like""" + if isinstance(other, cookielib.CookieJar): + for cookie in other: + self.set_cookie(copy.copy(cookie)) + else: + super().update(other) + + def _find(self, name, domain=None, path=None): + """Requests uses this method internally to get cookie values. + + If there are conflicting cookies, _find arbitrarily chooses one. + See _find_no_duplicates if you want an exception thrown if there are + conflicting cookies. + + :param name: a string containing name of cookie + :param domain: (optional) string containing domain of cookie + :param path: (optional) string containing path of cookie + :return: cookie.value + """ + for cookie in iter(self): + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + return cookie.value + + raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}") + + def _find_no_duplicates(self, name, domain=None, path=None): + """Both ``__get_item__`` and ``get`` call this function: it's never + used elsewhere in Requests. + + :param name: a string containing name of cookie + :param domain: (optional) string containing domain of cookie + :param path: (optional) string containing path of cookie + :raises KeyError: if cookie is not found + :raises CookieConflictError: if there are multiple cookies + that match name and optionally domain and path + :return: cookie.value + """ + toReturn = None + for cookie in iter(self): + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + if toReturn is not None: + # if there are multiple cookies that meet passed in criteria + raise CookieConflictError( + f"There are multiple cookies with name, {name!r}" + ) + # we will eventually return this as long as no cookie conflict + toReturn = cookie.value + + if toReturn: + return toReturn + raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}") + + def __getstate__(self): + """Unlike a normal CookieJar, this class is pickleable.""" + state = self.__dict__.copy() + # remove the unpickleable RLock object + state.pop("_cookies_lock") + return state + + def __setstate__(self, state): + """Unlike a normal CookieJar, this class is pickleable.""" + self.__dict__.update(state) + if "_cookies_lock" not in self.__dict__: + self._cookies_lock = threading.RLock() + + def copy(self): + """Return a copy of this RequestsCookieJar.""" + new_cj = RequestsCookieJar() + new_cj.set_policy(self.get_policy()) + new_cj.update(self) + return new_cj + + def get_policy(self): + """Return the CookiePolicy instance used.""" + return self._policy + + +def _copy_cookie_jar(jar): + if jar is None: + return None + + if hasattr(jar, "copy"): + # We're dealing with an instance of RequestsCookieJar + return jar.copy() + # We're dealing with a generic CookieJar instance + new_jar = copy.copy(jar) + new_jar.clear() + for cookie in jar: + new_jar.set_cookie(copy.copy(cookie)) + return new_jar + + +def create_cookie(name, value, **kwargs): + """Make a cookie from underspecified parameters. + + By default, the pair of `name` and `value` will be set for the domain '' + and sent on every request (this is sometimes called a "supercookie"). + """ + result = { + "version": 0, + "name": name, + "value": value, + "port": None, + "domain": "", + "path": "/", + "secure": False, + "expires": None, + "discard": True, + "comment": None, + "comment_url": None, + "rest": {"HttpOnly": None}, + "rfc2109": False, + } + + badargs = set(kwargs) - set(result) + if badargs: + raise TypeError( + f"create_cookie() got unexpected keyword arguments: {list(badargs)}" + ) + + result.update(kwargs) + result["port_specified"] = bool(result["port"]) + result["domain_specified"] = bool(result["domain"]) + result["domain_initial_dot"] = result["domain"].startswith(".") + result["path_specified"] = bool(result["path"]) + + return cookielib.Cookie(**result) + + +def morsel_to_cookie(morsel): + """Convert a Morsel object into a Cookie containing the one k/v pair.""" + + expires = None + if morsel["max-age"]: + try: + expires = int(time.time() + int(morsel["max-age"])) + except ValueError: + raise TypeError(f"max-age: {morsel['max-age']} must be integer") + elif morsel["expires"]: + time_template = "%a, %d-%b-%Y %H:%M:%S GMT" + expires = calendar.timegm(time.strptime(morsel["expires"], time_template)) + return create_cookie( + comment=morsel["comment"], + comment_url=bool(morsel["comment"]), + discard=False, + domain=morsel["domain"], + expires=expires, + name=morsel.key, + path=morsel["path"], + port=None, + rest={"HttpOnly": morsel["httponly"]}, + rfc2109=False, + secure=bool(morsel["secure"]), + value=morsel.value, + version=morsel["version"] or 0, + ) + + +def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True): + """Returns a CookieJar from a key/value dictionary. + + :param cookie_dict: Dict of key/values to insert into CookieJar. + :param cookiejar: (optional) A cookiejar to add the cookies to. + :param overwrite: (optional) If False, will not replace cookies + already in the jar with new ones. + :rtype: CookieJar + """ + if cookiejar is None: + cookiejar = RequestsCookieJar() + + if cookie_dict is not None: + names_from_jar = [cookie.name for cookie in cookiejar] + for name in cookie_dict: + if overwrite or (name not in names_from_jar): + cookiejar.set_cookie(create_cookie(name, cookie_dict[name])) + + return cookiejar + + +def merge_cookies(cookiejar, cookies): + """Add cookies to cookiejar and returns a merged CookieJar. + + :param cookiejar: CookieJar object to add the cookies to. + :param cookies: Dictionary or CookieJar object to be added. + :rtype: CookieJar + """ + if not isinstance(cookiejar, cookielib.CookieJar): + raise ValueError("You can only merge into CookieJar") + + if isinstance(cookies, dict): + cookiejar = cookiejar_from_dict(cookies, cookiejar=cookiejar, overwrite=False) + elif isinstance(cookies, cookielib.CookieJar): + try: + cookiejar.update(cookies) + except AttributeError: + for cookie_in_jar in cookies: + cookiejar.set_cookie(cookie_in_jar) + + return cookiejar diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/requests/exceptions.py b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/exceptions.py new file mode 100644 index 0000000..168d073 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/exceptions.py @@ -0,0 +1,141 @@ +""" +requests.exceptions +~~~~~~~~~~~~~~~~~~~ + +This module contains the set of Requests' exceptions. +""" +from pip._vendor.urllib3.exceptions import HTTPError as BaseHTTPError + +from .compat import JSONDecodeError as CompatJSONDecodeError + + +class RequestException(IOError): + """There was an ambiguous exception that occurred while handling your + request. + """ + + def __init__(self, *args, **kwargs): + """Initialize RequestException with `request` and `response` objects.""" + response = kwargs.pop("response", None) + self.response = response + self.request = kwargs.pop("request", None) + if response is not None and not self.request and hasattr(response, "request"): + self.request = self.response.request + super().__init__(*args, **kwargs) + + +class InvalidJSONError(RequestException): + """A JSON error occurred.""" + + +class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError): + """Couldn't decode the text into json""" + + def __init__(self, *args, **kwargs): + """ + Construct the JSONDecodeError instance first with all + args. Then use it's args to construct the IOError so that + the json specific args aren't used as IOError specific args + and the error message from JSONDecodeError is preserved. + """ + CompatJSONDecodeError.__init__(self, *args) + InvalidJSONError.__init__(self, *self.args, **kwargs) + + +class HTTPError(RequestException): + """An HTTP error occurred.""" + + +class ConnectionError(RequestException): + """A Connection error occurred.""" + + +class ProxyError(ConnectionError): + """A proxy error occurred.""" + + +class SSLError(ConnectionError): + """An SSL error occurred.""" + + +class Timeout(RequestException): + """The request timed out. + + Catching this error will catch both + :exc:`~requests.exceptions.ConnectTimeout` and + :exc:`~requests.exceptions.ReadTimeout` errors. + """ + + +class ConnectTimeout(ConnectionError, Timeout): + """The request timed out while trying to connect to the remote server. + + Requests that produced this error are safe to retry. + """ + + +class ReadTimeout(Timeout): + """The server did not send any data in the allotted amount of time.""" + + +class URLRequired(RequestException): + """A valid URL is required to make a request.""" + + +class TooManyRedirects(RequestException): + """Too many redirects.""" + + +class MissingSchema(RequestException, ValueError): + """The URL scheme (e.g. http or https) is missing.""" + + +class InvalidSchema(RequestException, ValueError): + """The URL scheme provided is either invalid or unsupported.""" + + +class InvalidURL(RequestException, ValueError): + """The URL provided was somehow invalid.""" + + +class InvalidHeader(RequestException, ValueError): + """The header value provided was somehow invalid.""" + + +class InvalidProxyURL(InvalidURL): + """The proxy URL provided is invalid.""" + + +class ChunkedEncodingError(RequestException): + """The server declared chunked encoding but sent an invalid chunk.""" + + +class ContentDecodingError(RequestException, BaseHTTPError): + """Failed to decode response content.""" + + +class StreamConsumedError(RequestException, TypeError): + """The content for this response was already consumed.""" + + +class RetryError(RequestException): + """Custom retries logic failed""" + + +class UnrewindableBodyError(RequestException): + """Requests encountered an error when trying to rewind a body.""" + + +# Warnings + + +class RequestsWarning(Warning): + """Base warning for Requests.""" + + +class FileModeWarning(RequestsWarning, DeprecationWarning): + """A file was opened in text mode, but Requests determined its binary length.""" + + +class RequestsDependencyWarning(RequestsWarning): + """An imported dependency doesn't match the expected version range.""" diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/requests/help.py b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/help.py new file mode 100644 index 0000000..2d292c2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/help.py @@ -0,0 +1,131 @@ +"""Module containing bug report helper(s).""" + +import json +import platform +import ssl +import sys + +from pip._vendor import idna +from pip._vendor import urllib3 + +from . import __version__ as requests_version + +charset_normalizer = None + +try: + from pip._vendor import chardet +except ImportError: + chardet = None + +try: + from pip._vendor.urllib3.contrib import pyopenssl +except ImportError: + pyopenssl = None + OpenSSL = None + cryptography = None +else: + import cryptography + import OpenSSL + + +def _implementation(): + """Return a dict with the Python implementation and version. + + Provide both the name and the version of the Python implementation + currently running. For example, on CPython 3.10.3 it will return + {'name': 'CPython', 'version': '3.10.3'}. + + This function works best on CPython and PyPy: in particular, it probably + doesn't work for Jython or IronPython. Future investigation should be done + to work out the correct shape of the code for those platforms. + """ + implementation = platform.python_implementation() + + if implementation == "CPython": + implementation_version = platform.python_version() + elif implementation == "PyPy": + implementation_version = "{}.{}.{}".format( + sys.pypy_version_info.major, + sys.pypy_version_info.minor, + sys.pypy_version_info.micro, + ) + if sys.pypy_version_info.releaselevel != "final": + implementation_version = "".join( + [implementation_version, sys.pypy_version_info.releaselevel] + ) + elif implementation == "Jython": + implementation_version = platform.python_version() # Complete Guess + elif implementation == "IronPython": + implementation_version = platform.python_version() # Complete Guess + else: + implementation_version = "Unknown" + + return {"name": implementation, "version": implementation_version} + + +def info(): + """Generate information for a bug report.""" + try: + platform_info = { + "system": platform.system(), + "release": platform.release(), + } + except OSError: + platform_info = { + "system": "Unknown", + "release": "Unknown", + } + + implementation_info = _implementation() + urllib3_info = {"version": urllib3.__version__} + charset_normalizer_info = {"version": None} + chardet_info = {"version": None} + if charset_normalizer: + charset_normalizer_info = {"version": charset_normalizer.__version__} + if chardet: + chardet_info = {"version": chardet.__version__} + + pyopenssl_info = { + "version": None, + "openssl_version": "", + } + if OpenSSL: + pyopenssl_info = { + "version": OpenSSL.__version__, + "openssl_version": f"{OpenSSL.SSL.OPENSSL_VERSION_NUMBER:x}", + } + cryptography_info = { + "version": getattr(cryptography, "__version__", ""), + } + idna_info = { + "version": getattr(idna, "__version__", ""), + } + + system_ssl = ssl.OPENSSL_VERSION_NUMBER + system_ssl_info = {"version": f"{system_ssl:x}" if system_ssl is not None else ""} + + return { + "platform": platform_info, + "implementation": implementation_info, + "system_ssl": system_ssl_info, + "using_pyopenssl": pyopenssl is not None, + "using_charset_normalizer": chardet is None, + "pyOpenSSL": pyopenssl_info, + "urllib3": urllib3_info, + "chardet": chardet_info, + "charset_normalizer": charset_normalizer_info, + "cryptography": cryptography_info, + "idna": idna_info, + "requests": { + "version": requests_version, + }, + } + + +def main(): + """Pretty-print the bug information as JSON.""" + print(json.dumps(info(), sort_keys=True, indent=2)) + + +if __name__ == "__main__": + main() diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/requests/hooks.py b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/hooks.py new file mode 100644 index 0000000..d181ba2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/hooks.py @@ -0,0 +1,33 @@ +""" +requests.hooks +~~~~~~~~~~~~~~ + +This module provides the capabilities for the Requests hooks system. + +Available hooks: + +``response``: + The response generated from a Request. +""" +HOOKS = ["response"] + + +def default_hooks(): + return {event: [] for event in HOOKS} + + +# TODO: response is the only one + + +def dispatch_hook(key, hooks, hook_data, **kwargs): + """Dispatches a hook dictionary on a given piece of data.""" + hooks = hooks or {} + hooks = hooks.get(key) + if hooks: + if hasattr(hooks, "__call__"): + hooks = [hooks] + for hook in hooks: + _hook_data = hook(hook_data, **kwargs) + if _hook_data is not None: + hook_data = _hook_data + return hook_data diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/requests/models.py b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/models.py new file mode 100644 index 0000000..76e6f19 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/models.py @@ -0,0 +1,1034 @@ +""" +requests.models +~~~~~~~~~~~~~~~ + +This module contains the primary objects that power Requests. +""" + +import datetime + +# Import encoding now, to avoid implicit import later. +# Implicit import within threads may cause LookupError when standard library is in a ZIP, +# such as in Embedded Python. See https://github.com/psf/requests/issues/3578. +import encodings.idna # noqa: F401 +from io import UnsupportedOperation + +from pip._vendor.urllib3.exceptions import ( + DecodeError, + LocationParseError, + ProtocolError, + ReadTimeoutError, + SSLError, +) +from pip._vendor.urllib3.fields import RequestField +from pip._vendor.urllib3.filepost import encode_multipart_formdata +from pip._vendor.urllib3.util import parse_url + +from ._internal_utils import to_native_string, unicode_is_ascii +from .auth import HTTPBasicAuth +from .compat import ( + Callable, + JSONDecodeError, + Mapping, + basestring, + builtin_str, + chardet, + cookielib, +) +from .compat import json as complexjson +from .compat import urlencode, urlsplit, urlunparse +from .cookies import _copy_cookie_jar, cookiejar_from_dict, get_cookie_header +from .exceptions import ( + ChunkedEncodingError, + ConnectionError, + ContentDecodingError, + HTTPError, + InvalidJSONError, + InvalidURL, +) +from .exceptions import JSONDecodeError as RequestsJSONDecodeError +from .exceptions import MissingSchema +from .exceptions import SSLError as RequestsSSLError +from .exceptions import StreamConsumedError +from .hooks import default_hooks +from .status_codes import codes +from .structures import CaseInsensitiveDict +from .utils import ( + check_header_validity, + get_auth_from_url, + guess_filename, + guess_json_utf, + iter_slices, + parse_header_links, + requote_uri, + stream_decode_response_unicode, + super_len, + to_key_val_list, +) + +#: The set of HTTP status codes that indicate an automatically +#: processable redirect. +REDIRECT_STATI = ( + codes.moved, # 301 + codes.found, # 302 + codes.other, # 303 + codes.temporary_redirect, # 307 + codes.permanent_redirect, # 308 +) + +DEFAULT_REDIRECT_LIMIT = 30 +CONTENT_CHUNK_SIZE = 10 * 1024 +ITER_CHUNK_SIZE = 512 + + +class RequestEncodingMixin: + @property + def path_url(self): + """Build the path URL to use.""" + + url = [] + + p = urlsplit(self.url) + + path = p.path + if not path: + path = "/" + + url.append(path) + + query = p.query + if query: + url.append("?") + url.append(query) + + return "".join(url) + + @staticmethod + def _encode_params(data): + """Encode parameters in a piece of data. + + Will successfully encode parameters when passed as a dict or a list of + 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary + if parameters are supplied as a dict. + """ + + if isinstance(data, (str, bytes)): + return data + elif hasattr(data, "read"): + return data + elif hasattr(data, "__iter__"): + result = [] + for k, vs in to_key_val_list(data): + if isinstance(vs, basestring) or not hasattr(vs, "__iter__"): + vs = [vs] + for v in vs: + if v is not None: + result.append( + ( + k.encode("utf-8") if isinstance(k, str) else k, + v.encode("utf-8") if isinstance(v, str) else v, + ) + ) + return urlencode(result, doseq=True) + else: + return data + + @staticmethod + def _encode_files(files, data): + """Build the body for a multipart/form-data request. + + Will successfully encode files when passed as a dict or a list of + tuples. Order is retained if data is a list of tuples but arbitrary + if parameters are supplied as a dict. + The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) + or 4-tuples (filename, fileobj, contentype, custom_headers). + """ + if not files: + raise ValueError("Files must be provided.") + elif isinstance(data, basestring): + raise ValueError("Data must not be a string.") + + new_fields = [] + fields = to_key_val_list(data or {}) + files = to_key_val_list(files or {}) + + for field, val in fields: + if isinstance(val, basestring) or not hasattr(val, "__iter__"): + val = [val] + for v in val: + if v is not None: + # Don't call str() on bytestrings: in Py3 it all goes wrong. + if not isinstance(v, bytes): + v = str(v) + + new_fields.append( + ( + field.decode("utf-8") + if isinstance(field, bytes) + else field, + v.encode("utf-8") if isinstance(v, str) else v, + ) + ) + + for (k, v) in files: + # support for explicit filename + ft = None + fh = None + if isinstance(v, (tuple, list)): + if len(v) == 2: + fn, fp = v + elif len(v) == 3: + fn, fp, ft = v + else: + fn, fp, ft, fh = v + else: + fn = guess_filename(v) or k + fp = v + + if isinstance(fp, (str, bytes, bytearray)): + fdata = fp + elif hasattr(fp, "read"): + fdata = fp.read() + elif fp is None: + continue + else: + fdata = fp + + rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) + rf.make_multipart(content_type=ft) + new_fields.append(rf) + + body, content_type = encode_multipart_formdata(new_fields) + + return body, content_type + + +class RequestHooksMixin: + def register_hook(self, event, hook): + """Properly register a hook.""" + + if event not in self.hooks: + raise ValueError(f'Unsupported event specified, with event name "{event}"') + + if isinstance(hook, Callable): + self.hooks[event].append(hook) + elif hasattr(hook, "__iter__"): + self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) + + def deregister_hook(self, event, hook): + """Deregister a previously registered hook. + Returns True if the hook existed, False if not. + """ + + try: + self.hooks[event].remove(hook) + return True + except ValueError: + return False + + +class Request(RequestHooksMixin): + """A user-created :class:`Request ` object. + + Used to prepare a :class:`PreparedRequest `, which is sent to the server. + + :param method: HTTP method to use. + :param url: URL to send. + :param headers: dictionary of headers to send. + :param files: dictionary of {filename: fileobject} files to multipart upload. + :param data: the body to attach to the request. If a dictionary or + list of tuples ``[(key, value)]`` is provided, form-encoding will + take place. + :param json: json for the body to attach to the request (if files or data is not specified). + :param params: URL parameters to append to the URL. If a dictionary or + list of tuples ``[(key, value)]`` is provided, form-encoding will + take place. + :param auth: Auth handler or (user, pass) tuple. + :param cookies: dictionary or CookieJar of cookies to attach to this request. + :param hooks: dictionary of callback hooks, for internal usage. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'https://httpbin.org/get') + >>> req.prepare() + + """ + + def __init__( + self, + method=None, + url=None, + headers=None, + files=None, + data=None, + params=None, + auth=None, + cookies=None, + hooks=None, + json=None, + ): + + # Default empty dicts for dict params. + data = [] if data is None else data + files = [] if files is None else files + headers = {} if headers is None else headers + params = {} if params is None else params + hooks = {} if hooks is None else hooks + + self.hooks = default_hooks() + for (k, v) in list(hooks.items()): + self.register_hook(event=k, hook=v) + + self.method = method + self.url = url + self.headers = headers + self.files = files + self.data = data + self.json = json + self.params = params + self.auth = auth + self.cookies = cookies + + def __repr__(self): + return f"" + + def prepare(self): + """Constructs a :class:`PreparedRequest ` for transmission and returns it.""" + p = PreparedRequest() + p.prepare( + method=self.method, + url=self.url, + headers=self.headers, + files=self.files, + data=self.data, + json=self.json, + params=self.params, + auth=self.auth, + cookies=self.cookies, + hooks=self.hooks, + ) + return p + + +class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): + """The fully mutable :class:`PreparedRequest ` object, + containing the exact bytes that will be sent to the server. + + Instances are generated from a :class:`Request ` object, and + should not be instantiated manually; doing so may produce undesirable + effects. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'https://httpbin.org/get') + >>> r = req.prepare() + >>> r + + + >>> s = requests.Session() + >>> s.send(r) + + """ + + def __init__(self): + #: HTTP verb to send to the server. + self.method = None + #: HTTP URL to send the request to. + self.url = None + #: dictionary of HTTP headers. + self.headers = None + # The `CookieJar` used to create the Cookie header will be stored here + # after prepare_cookies is called + self._cookies = None + #: request body to send to the server. + self.body = None + #: dictionary of callback hooks, for internal usage. + self.hooks = default_hooks() + #: integer denoting starting position of a readable file-like body. + self._body_position = None + + def prepare( + self, + method=None, + url=None, + headers=None, + files=None, + data=None, + params=None, + auth=None, + cookies=None, + hooks=None, + json=None, + ): + """Prepares the entire request with the given parameters.""" + + self.prepare_method(method) + self.prepare_url(url, params) + self.prepare_headers(headers) + self.prepare_cookies(cookies) + self.prepare_body(data, files, json) + self.prepare_auth(auth, url) + + # Note that prepare_auth must be last to enable authentication schemes + # such as OAuth to work on a fully prepared request. + + # This MUST go after prepare_auth. Authenticators could add a hook + self.prepare_hooks(hooks) + + def __repr__(self): + return f"" + + def copy(self): + p = PreparedRequest() + p.method = self.method + p.url = self.url + p.headers = self.headers.copy() if self.headers is not None else None + p._cookies = _copy_cookie_jar(self._cookies) + p.body = self.body + p.hooks = self.hooks + p._body_position = self._body_position + return p + + def prepare_method(self, method): + """Prepares the given HTTP method.""" + self.method = method + if self.method is not None: + self.method = to_native_string(self.method.upper()) + + @staticmethod + def _get_idna_encoded_host(host): + from pip._vendor import idna + + try: + host = idna.encode(host, uts46=True).decode("utf-8") + except idna.IDNAError: + raise UnicodeError + return host + + def prepare_url(self, url, params): + """Prepares the given HTTP URL.""" + #: Accept objects that have string representations. + #: We're unable to blindly call unicode/str functions + #: as this will include the bytestring indicator (b'') + #: on python 3.x. + #: https://github.com/psf/requests/pull/2238 + if isinstance(url, bytes): + url = url.decode("utf8") + else: + url = str(url) + + # Remove leading whitespaces from url + url = url.lstrip() + + # Don't do any URL preparation for non-HTTP schemes like `mailto`, + # `data` etc to work around exceptions from `url_parse`, which + # handles RFC 3986 only. + if ":" in url and not url.lower().startswith("http"): + self.url = url + return + + # Support for unicode domain names and paths. + try: + scheme, auth, host, port, path, query, fragment = parse_url(url) + except LocationParseError as e: + raise InvalidURL(*e.args) + + if not scheme: + raise MissingSchema( + f"Invalid URL {url!r}: No scheme supplied. " + f"Perhaps you meant https://{url}?" + ) + + if not host: + raise InvalidURL(f"Invalid URL {url!r}: No host supplied") + + # In general, we want to try IDNA encoding the hostname if the string contains + # non-ASCII characters. This allows users to automatically get the correct IDNA + # behaviour. For strings containing only ASCII characters, we need to also verify + # it doesn't start with a wildcard (*), before allowing the unencoded hostname. + if not unicode_is_ascii(host): + try: + host = self._get_idna_encoded_host(host) + except UnicodeError: + raise InvalidURL("URL has an invalid label.") + elif host.startswith(("*", ".")): + raise InvalidURL("URL has an invalid label.") + + # Carefully reconstruct the network location + netloc = auth or "" + if netloc: + netloc += "@" + netloc += host + if port: + netloc += f":{port}" + + # Bare domains aren't valid URLs. + if not path: + path = "/" + + if isinstance(params, (str, bytes)): + params = to_native_string(params) + + enc_params = self._encode_params(params) + if enc_params: + if query: + query = f"{query}&{enc_params}" + else: + query = enc_params + + url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) + self.url = url + + def prepare_headers(self, headers): + """Prepares the given HTTP headers.""" + + self.headers = CaseInsensitiveDict() + if headers: + for header in headers.items(): + # Raise exception on invalid header value. + check_header_validity(header) + name, value = header + self.headers[to_native_string(name)] = value + + def prepare_body(self, data, files, json=None): + """Prepares the given HTTP body data.""" + + # Check if file, fo, generator, iterator. + # If not, run through normal process. + + # Nottin' on you. + body = None + content_type = None + + if not data and json is not None: + # urllib3 requires a bytes-like body. Python 2's json.dumps + # provides this natively, but Python 3 gives a Unicode string. + content_type = "application/json" + + try: + body = complexjson.dumps(json, allow_nan=False) + except ValueError as ve: + raise InvalidJSONError(ve, request=self) + + if not isinstance(body, bytes): + body = body.encode("utf-8") + + is_stream = all( + [ + hasattr(data, "__iter__"), + not isinstance(data, (basestring, list, tuple, Mapping)), + ] + ) + + if is_stream: + try: + length = super_len(data) + except (TypeError, AttributeError, UnsupportedOperation): + length = None + + body = data + + if getattr(body, "tell", None) is not None: + # Record the current file position before reading. + # This will allow us to rewind a file in the event + # of a redirect. + try: + self._body_position = body.tell() + except OSError: + # This differentiates from None, allowing us to catch + # a failed `tell()` later when trying to rewind the body + self._body_position = object() + + if files: + raise NotImplementedError( + "Streamed bodies and files are mutually exclusive." + ) + + if length: + self.headers["Content-Length"] = builtin_str(length) + else: + self.headers["Transfer-Encoding"] = "chunked" + else: + # Multi-part file uploads. + if files: + (body, content_type) = self._encode_files(files, data) + else: + if data: + body = self._encode_params(data) + if isinstance(data, basestring) or hasattr(data, "read"): + content_type = None + else: + content_type = "application/x-www-form-urlencoded" + + self.prepare_content_length(body) + + # Add content-type if it wasn't explicitly provided. + if content_type and ("content-type" not in self.headers): + self.headers["Content-Type"] = content_type + + self.body = body + + def prepare_content_length(self, body): + """Prepare Content-Length header based on request method and body""" + if body is not None: + length = super_len(body) + if length: + # If length exists, set it. Otherwise, we fallback + # to Transfer-Encoding: chunked. + self.headers["Content-Length"] = builtin_str(length) + elif ( + self.method not in ("GET", "HEAD") + and self.headers.get("Content-Length") is None + ): + # Set Content-Length to 0 for methods that can have a body + # but don't provide one. (i.e. not GET or HEAD) + self.headers["Content-Length"] = "0" + + def prepare_auth(self, auth, url=""): + """Prepares the given HTTP auth data.""" + + # If no Auth is explicitly provided, extract it from the URL first. + if auth is None: + url_auth = get_auth_from_url(self.url) + auth = url_auth if any(url_auth) else None + + if auth: + if isinstance(auth, tuple) and len(auth) == 2: + # special-case basic HTTP auth + auth = HTTPBasicAuth(*auth) + + # Allow auth to make its changes. + r = auth(self) + + # Update self to reflect the auth changes. + self.__dict__.update(r.__dict__) + + # Recompute Content-Length + self.prepare_content_length(self.body) + + def prepare_cookies(self, cookies): + """Prepares the given HTTP cookie data. + + This function eventually generates a ``Cookie`` header from the + given cookies using cookielib. Due to cookielib's design, the header + will not be regenerated if it already exists, meaning this function + can only be called once for the life of the + :class:`PreparedRequest ` object. Any subsequent calls + to ``prepare_cookies`` will have no actual effect, unless the "Cookie" + header is removed beforehand. + """ + if isinstance(cookies, cookielib.CookieJar): + self._cookies = cookies + else: + self._cookies = cookiejar_from_dict(cookies) + + cookie_header = get_cookie_header(self._cookies, self) + if cookie_header is not None: + self.headers["Cookie"] = cookie_header + + def prepare_hooks(self, hooks): + """Prepares the given hooks.""" + # hooks can be passed as None to the prepare method and to this + # method. To prevent iterating over None, simply use an empty list + # if hooks is False-y + hooks = hooks or [] + for event in hooks: + self.register_hook(event, hooks[event]) + + +class Response: + """The :class:`Response ` object, which contains a + server's response to an HTTP request. + """ + + __attrs__ = [ + "_content", + "status_code", + "headers", + "url", + "history", + "encoding", + "reason", + "cookies", + "elapsed", + "request", + ] + + def __init__(self): + self._content = False + self._content_consumed = False + self._next = None + + #: Integer Code of responded HTTP Status, e.g. 404 or 200. + self.status_code = None + + #: Case-insensitive Dictionary of Response Headers. + #: For example, ``headers['content-encoding']`` will return the + #: value of a ``'Content-Encoding'`` response header. + self.headers = CaseInsensitiveDict() + + #: File-like object representation of response (for advanced usage). + #: Use of ``raw`` requires that ``stream=True`` be set on the request. + #: This requirement does not apply for use internally to Requests. + self.raw = None + + #: Final URL location of Response. + self.url = None + + #: Encoding to decode with when accessing r.text. + self.encoding = None + + #: A list of :class:`Response ` objects from + #: the history of the Request. Any redirect responses will end + #: up here. The list is sorted from the oldest to the most recent request. + self.history = [] + + #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". + self.reason = None + + #: A CookieJar of Cookies the server sent back. + self.cookies = cookiejar_from_dict({}) + + #: The amount of time elapsed between sending the request + #: and the arrival of the response (as a timedelta). + #: This property specifically measures the time taken between sending + #: the first byte of the request and finishing parsing the headers. It + #: is therefore unaffected by consuming the response content or the + #: value of the ``stream`` keyword argument. + self.elapsed = datetime.timedelta(0) + + #: The :class:`PreparedRequest ` object to which this + #: is a response. + self.request = None + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def __getstate__(self): + # Consume everything; accessing the content attribute makes + # sure the content has been fully read. + if not self._content_consumed: + self.content + + return {attr: getattr(self, attr, None) for attr in self.__attrs__} + + def __setstate__(self, state): + for name, value in state.items(): + setattr(self, name, value) + + # pickled objects do not have .raw + setattr(self, "_content_consumed", True) + setattr(self, "raw", None) + + def __repr__(self): + return f"" + + def __bool__(self): + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + return self.ok + + def __nonzero__(self): + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + return self.ok + + def __iter__(self): + """Allows you to use a response as an iterator.""" + return self.iter_content(128) + + @property + def ok(self): + """Returns True if :attr:`status_code` is less than 400, False if not. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + try: + self.raise_for_status() + except HTTPError: + return False + return True + + @property + def is_redirect(self): + """True if this Response is a well-formed HTTP redirect that could have + been processed automatically (by :meth:`Session.resolve_redirects`). + """ + return "location" in self.headers and self.status_code in REDIRECT_STATI + + @property + def is_permanent_redirect(self): + """True if this Response one of the permanent versions of redirect.""" + return "location" in self.headers and self.status_code in ( + codes.moved_permanently, + codes.permanent_redirect, + ) + + @property + def next(self): + """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" + return self._next + + @property + def apparent_encoding(self): + """The apparent encoding, provided by the charset_normalizer or chardet libraries.""" + return chardet.detect(self.content)["encoding"] + + def iter_content(self, chunk_size=1, decode_unicode=False): + """Iterates over the response data. When stream=True is set on the + request, this avoids reading the content at once into memory for + large responses. The chunk size is the number of bytes it should + read into memory. This is not necessarily the length of each item + returned as decoding can take place. + + chunk_size must be of type int or None. A value of None will + function differently depending on the value of `stream`. + stream=True will read data as it arrives in whatever size the + chunks are received. If stream=False, data is returned as + a single chunk. + + If decode_unicode is True, content will be decoded using the best + available encoding based on the response. + """ + + def generate(): + # Special case for urllib3. + if hasattr(self.raw, "stream"): + try: + yield from self.raw.stream(chunk_size, decode_content=True) + except ProtocolError as e: + raise ChunkedEncodingError(e) + except DecodeError as e: + raise ContentDecodingError(e) + except ReadTimeoutError as e: + raise ConnectionError(e) + except SSLError as e: + raise RequestsSSLError(e) + else: + # Standard file-like object. + while True: + chunk = self.raw.read(chunk_size) + if not chunk: + break + yield chunk + + self._content_consumed = True + + if self._content_consumed and isinstance(self._content, bool): + raise StreamConsumedError() + elif chunk_size is not None and not isinstance(chunk_size, int): + raise TypeError( + f"chunk_size must be an int, it is instead a {type(chunk_size)}." + ) + # simulate reading small chunks of the content + reused_chunks = iter_slices(self._content, chunk_size) + + stream_chunks = generate() + + chunks = reused_chunks if self._content_consumed else stream_chunks + + if decode_unicode: + chunks = stream_decode_response_unicode(chunks, self) + + return chunks + + def iter_lines( + self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None + ): + """Iterates over the response data, one line at a time. When + stream=True is set on the request, this avoids reading the + content at once into memory for large responses. + + .. note:: This method is not reentrant safe. + """ + + pending = None + + for chunk in self.iter_content( + chunk_size=chunk_size, decode_unicode=decode_unicode + ): + + if pending is not None: + chunk = pending + chunk + + if delimiter: + lines = chunk.split(delimiter) + else: + lines = chunk.splitlines() + + if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: + pending = lines.pop() + else: + pending = None + + yield from lines + + if pending is not None: + yield pending + + @property + def content(self): + """Content of the response, in bytes.""" + + if self._content is False: + # Read the contents. + if self._content_consumed: + raise RuntimeError("The content for this response was already consumed") + + if self.status_code == 0 or self.raw is None: + self._content = None + else: + self._content = b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b"" + + self._content_consumed = True + # don't need to release the connection; that's been handled by urllib3 + # since we exhausted the data. + return self._content + + @property + def text(self): + """Content of the response, in unicode. + + If Response.encoding is None, encoding will be guessed using + ``charset_normalizer`` or ``chardet``. + + The encoding of the response content is determined based solely on HTTP + headers, following RFC 2616 to the letter. If you can take advantage of + non-HTTP knowledge to make a better guess at the encoding, you should + set ``r.encoding`` appropriately before accessing this property. + """ + + # Try charset from content-type + content = None + encoding = self.encoding + + if not self.content: + return "" + + # Fallback to auto-detected encoding. + if self.encoding is None: + encoding = self.apparent_encoding + + # Decode unicode from given encoding. + try: + content = str(self.content, encoding, errors="replace") + except (LookupError, TypeError): + # A LookupError is raised if the encoding was not found which could + # indicate a misspelling or similar mistake. + # + # A TypeError can be raised if encoding is None + # + # So we try blindly encoding. + content = str(self.content, errors="replace") + + return content + + def json(self, **kwargs): + r"""Returns the json-encoded content of a response, if any. + + :param \*\*kwargs: Optional arguments that ``json.loads`` takes. + :raises requests.exceptions.JSONDecodeError: If the response body does not + contain valid json. + """ + + if not self.encoding and self.content and len(self.content) > 3: + # No encoding set. JSON RFC 4627 section 3 states we should expect + # UTF-8, -16 or -32. Detect which one to use; If the detection or + # decoding fails, fall back to `self.text` (using charset_normalizer to make + # a best guess). + encoding = guess_json_utf(self.content) + if encoding is not None: + try: + return complexjson.loads(self.content.decode(encoding), **kwargs) + except UnicodeDecodeError: + # Wrong UTF codec detected; usually because it's not UTF-8 + # but some other 8-bit codec. This is an RFC violation, + # and the server didn't bother to tell us what codec *was* + # used. + pass + except JSONDecodeError as e: + raise RequestsJSONDecodeError(e.msg, e.doc, e.pos) + + try: + return complexjson.loads(self.text, **kwargs) + except JSONDecodeError as e: + # Catch JSON-related errors and raise as requests.JSONDecodeError + # This aliases json.JSONDecodeError and simplejson.JSONDecodeError + raise RequestsJSONDecodeError(e.msg, e.doc, e.pos) + + @property + def links(self): + """Returns the parsed header links of the response, if any.""" + + header = self.headers.get("link") + + resolved_links = {} + + if header: + links = parse_header_links(header) + + for link in links: + key = link.get("rel") or link.get("url") + resolved_links[key] = link + + return resolved_links + + def raise_for_status(self): + """Raises :class:`HTTPError`, if one occurred.""" + + http_error_msg = "" + if isinstance(self.reason, bytes): + # We attempt to decode utf-8 first because some servers + # choose to localize their reason strings. If the string + # isn't utf-8, we fall back to iso-8859-1 for all other + # encodings. (See PR #3538) + try: + reason = self.reason.decode("utf-8") + except UnicodeDecodeError: + reason = self.reason.decode("iso-8859-1") + else: + reason = self.reason + + if 400 <= self.status_code < 500: + http_error_msg = ( + f"{self.status_code} Client Error: {reason} for url: {self.url}" + ) + + elif 500 <= self.status_code < 600: + http_error_msg = ( + f"{self.status_code} Server Error: {reason} for url: {self.url}" + ) + + if http_error_msg: + raise HTTPError(http_error_msg, response=self) + + def close(self): + """Releases the connection back to the pool. Once this method has been + called the underlying ``raw`` object must not be accessed again. + + *Note: Should not normally need to be called explicitly.* + """ + if not self._content_consumed: + self.raw.close() + + release_conn = getattr(self.raw, "release_conn", None) + if release_conn is not None: + release_conn() diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/requests/packages.py b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/packages.py new file mode 100644 index 0000000..9582fa7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/packages.py @@ -0,0 +1,16 @@ +import sys + +# This code exists for backwards compatibility reasons. +# I don't like it either. Just look the other way. :) + +for package in ('urllib3', 'idna', 'chardet'): + vendored_package = "pip._vendor." + package + locals()[package] = __import__(vendored_package) + # This traversal is apparently necessary such that the identities are + # preserved (requests.packages.urllib3.* is urllib3.*) + for mod in list(sys.modules): + if mod == vendored_package or mod.startswith(vendored_package + '.'): + unprefixed_mod = mod[len("pip._vendor."):] + sys.modules['pip._vendor.requests.packages.' + unprefixed_mod] = sys.modules[mod] + +# Kinda cool, though, right? diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/requests/sessions.py b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/sessions.py new file mode 100644 index 0000000..dbcf2a7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/sessions.py @@ -0,0 +1,833 @@ +""" +requests.sessions +~~~~~~~~~~~~~~~~~ + +This module provides a Session object to manage and persist settings across +requests (cookies, auth, proxies). +""" +import os +import sys +import time +from collections import OrderedDict +from datetime import timedelta + +from ._internal_utils import to_native_string +from .adapters import HTTPAdapter +from .auth import _basic_auth_str +from .compat import Mapping, cookielib, urljoin, urlparse +from .cookies import ( + RequestsCookieJar, + cookiejar_from_dict, + extract_cookies_to_jar, + merge_cookies, +) +from .exceptions import ( + ChunkedEncodingError, + ContentDecodingError, + InvalidSchema, + TooManyRedirects, +) +from .hooks import default_hooks, dispatch_hook + +# formerly defined here, reexposed here for backward compatibility +from .models import ( # noqa: F401 + DEFAULT_REDIRECT_LIMIT, + REDIRECT_STATI, + PreparedRequest, + Request, +) +from .status_codes import codes +from .structures import CaseInsensitiveDict +from .utils import ( # noqa: F401 + DEFAULT_PORTS, + default_headers, + get_auth_from_url, + get_environ_proxies, + get_netrc_auth, + requote_uri, + resolve_proxies, + rewind_body, + should_bypass_proxies, + to_key_val_list, +) + +# Preferred clock, based on which one is more accurate on a given system. +if sys.platform == "win32": + preferred_clock = time.perf_counter +else: + preferred_clock = time.time + + +def merge_setting(request_setting, session_setting, dict_class=OrderedDict): + """Determines appropriate setting for a given request, taking into account + the explicit setting on that request, and the setting in the session. If a + setting is a dictionary, they will be merged together using `dict_class` + """ + + if session_setting is None: + return request_setting + + if request_setting is None: + return session_setting + + # Bypass if not a dictionary (e.g. verify) + if not ( + isinstance(session_setting, Mapping) and isinstance(request_setting, Mapping) + ): + return request_setting + + merged_setting = dict_class(to_key_val_list(session_setting)) + merged_setting.update(to_key_val_list(request_setting)) + + # Remove keys that are set to None. Extract keys first to avoid altering + # the dictionary during iteration. + none_keys = [k for (k, v) in merged_setting.items() if v is None] + for key in none_keys: + del merged_setting[key] + + return merged_setting + + +def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): + """Properly merges both requests and session hooks. + + This is necessary because when request_hooks == {'response': []}, the + merge breaks Session hooks entirely. + """ + if session_hooks is None or session_hooks.get("response") == []: + return request_hooks + + if request_hooks is None or request_hooks.get("response") == []: + return session_hooks + + return merge_setting(request_hooks, session_hooks, dict_class) + + +class SessionRedirectMixin: + def get_redirect_target(self, resp): + """Receives a Response. Returns a redirect URI or ``None``""" + # Due to the nature of how requests processes redirects this method will + # be called at least once upon the original response and at least twice + # on each subsequent redirect response (if any). + # If a custom mixin is used to handle this logic, it may be advantageous + # to cache the redirect location onto the response object as a private + # attribute. + if resp.is_redirect: + location = resp.headers["location"] + # Currently the underlying http module on py3 decode headers + # in latin1, but empirical evidence suggests that latin1 is very + # rarely used with non-ASCII characters in HTTP headers. + # It is more likely to get UTF8 header rather than latin1. + # This causes incorrect handling of UTF8 encoded location headers. + # To solve this, we re-encode the location in latin1. + location = location.encode("latin1") + return to_native_string(location, "utf8") + return None + + def should_strip_auth(self, old_url, new_url): + """Decide whether Authorization header should be removed when redirecting""" + old_parsed = urlparse(old_url) + new_parsed = urlparse(new_url) + if old_parsed.hostname != new_parsed.hostname: + return True + # Special case: allow http -> https redirect when using the standard + # ports. This isn't specified by RFC 7235, but is kept to avoid + # breaking backwards compatibility with older versions of requests + # that allowed any redirects on the same host. + if ( + old_parsed.scheme == "http" + and old_parsed.port in (80, None) + and new_parsed.scheme == "https" + and new_parsed.port in (443, None) + ): + return False + + # Handle default port usage corresponding to scheme. + changed_port = old_parsed.port != new_parsed.port + changed_scheme = old_parsed.scheme != new_parsed.scheme + default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None) + if ( + not changed_scheme + and old_parsed.port in default_port + and new_parsed.port in default_port + ): + return False + + # Standard case: root URI must match + return changed_port or changed_scheme + + def resolve_redirects( + self, + resp, + req, + stream=False, + timeout=None, + verify=True, + cert=None, + proxies=None, + yield_requests=False, + **adapter_kwargs, + ): + """Receives a Response. Returns a generator of Responses or Requests.""" + + hist = [] # keep track of history + + url = self.get_redirect_target(resp) + previous_fragment = urlparse(req.url).fragment + while url: + prepared_request = req.copy() + + # Update history and keep track of redirects. + # resp.history must ignore the original request in this loop + hist.append(resp) + resp.history = hist[1:] + + try: + resp.content # Consume socket so it can be released + except (ChunkedEncodingError, ContentDecodingError, RuntimeError): + resp.raw.read(decode_content=False) + + if len(resp.history) >= self.max_redirects: + raise TooManyRedirects( + f"Exceeded {self.max_redirects} redirects.", response=resp + ) + + # Release the connection back into the pool. + resp.close() + + # Handle redirection without scheme (see: RFC 1808 Section 4) + if url.startswith("//"): + parsed_rurl = urlparse(resp.url) + url = ":".join([to_native_string(parsed_rurl.scheme), url]) + + # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2) + parsed = urlparse(url) + if parsed.fragment == "" and previous_fragment: + parsed = parsed._replace(fragment=previous_fragment) + elif parsed.fragment: + previous_fragment = parsed.fragment + url = parsed.geturl() + + # Facilitate relative 'location' headers, as allowed by RFC 7231. + # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') + # Compliant with RFC3986, we percent encode the url. + if not parsed.netloc: + url = urljoin(resp.url, requote_uri(url)) + else: + url = requote_uri(url) + + prepared_request.url = to_native_string(url) + + self.rebuild_method(prepared_request, resp) + + # https://github.com/psf/requests/issues/1084 + if resp.status_code not in ( + codes.temporary_redirect, + codes.permanent_redirect, + ): + # https://github.com/psf/requests/issues/3490 + purged_headers = ("Content-Length", "Content-Type", "Transfer-Encoding") + for header in purged_headers: + prepared_request.headers.pop(header, None) + prepared_request.body = None + + headers = prepared_request.headers + headers.pop("Cookie", None) + + # Extract any cookies sent on the response to the cookiejar + # in the new request. Because we've mutated our copied prepared + # request, use the old one that we haven't yet touched. + extract_cookies_to_jar(prepared_request._cookies, req, resp.raw) + merge_cookies(prepared_request._cookies, self.cookies) + prepared_request.prepare_cookies(prepared_request._cookies) + + # Rebuild auth and proxy information. + proxies = self.rebuild_proxies(prepared_request, proxies) + self.rebuild_auth(prepared_request, resp) + + # A failed tell() sets `_body_position` to `object()`. This non-None + # value ensures `rewindable` will be True, allowing us to raise an + # UnrewindableBodyError, instead of hanging the connection. + rewindable = prepared_request._body_position is not None and ( + "Content-Length" in headers or "Transfer-Encoding" in headers + ) + + # Attempt to rewind consumed file-like object. + if rewindable: + rewind_body(prepared_request) + + # Override the original request. + req = prepared_request + + if yield_requests: + yield req + else: + + resp = self.send( + req, + stream=stream, + timeout=timeout, + verify=verify, + cert=cert, + proxies=proxies, + allow_redirects=False, + **adapter_kwargs, + ) + + extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) + + # extract redirect url, if any, for the next loop + url = self.get_redirect_target(resp) + yield resp + + def rebuild_auth(self, prepared_request, response): + """When being redirected we may want to strip authentication from the + request to avoid leaking credentials. This method intelligently removes + and reapplies authentication where possible to avoid credential loss. + """ + headers = prepared_request.headers + url = prepared_request.url + + if "Authorization" in headers and self.should_strip_auth( + response.request.url, url + ): + # If we get redirected to a new host, we should strip out any + # authentication headers. + del headers["Authorization"] + + # .netrc might have more auth for us on our new host. + new_auth = get_netrc_auth(url) if self.trust_env else None + if new_auth is not None: + prepared_request.prepare_auth(new_auth) + + def rebuild_proxies(self, prepared_request, proxies): + """This method re-evaluates the proxy configuration by considering the + environment variables. If we are redirected to a URL covered by + NO_PROXY, we strip the proxy configuration. Otherwise, we set missing + proxy keys for this URL (in case they were stripped by a previous + redirect). + + This method also replaces the Proxy-Authorization header where + necessary. + + :rtype: dict + """ + headers = prepared_request.headers + scheme = urlparse(prepared_request.url).scheme + new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env) + + if "Proxy-Authorization" in headers: + del headers["Proxy-Authorization"] + + try: + username, password = get_auth_from_url(new_proxies[scheme]) + except KeyError: + username, password = None, None + + # urllib3 handles proxy authorization for us in the standard adapter. + # Avoid appending this to TLS tunneled requests where it may be leaked. + if not scheme.startswith('https') and username and password: + headers["Proxy-Authorization"] = _basic_auth_str(username, password) + + return new_proxies + + def rebuild_method(self, prepared_request, response): + """When being redirected we may want to change the method of the request + based on certain specs or browser behavior. + """ + method = prepared_request.method + + # https://tools.ietf.org/html/rfc7231#section-6.4.4 + if response.status_code == codes.see_other and method != "HEAD": + method = "GET" + + # Do what the browsers do, despite standards... + # First, turn 302s into GETs. + if response.status_code == codes.found and method != "HEAD": + method = "GET" + + # Second, if a POST is responded to with a 301, turn it into a GET. + # This bizarre behaviour is explained in Issue 1704. + if response.status_code == codes.moved and method == "POST": + method = "GET" + + prepared_request.method = method + + +class Session(SessionRedirectMixin): + """A Requests session. + + Provides cookie persistence, connection-pooling, and configuration. + + Basic Usage:: + + >>> import requests + >>> s = requests.Session() + >>> s.get('https://httpbin.org/get') + + + Or as a context manager:: + + >>> with requests.Session() as s: + ... s.get('https://httpbin.org/get') + + """ + + __attrs__ = [ + "headers", + "cookies", + "auth", + "proxies", + "hooks", + "params", + "verify", + "cert", + "adapters", + "stream", + "trust_env", + "max_redirects", + ] + + def __init__(self): + + #: A case-insensitive dictionary of headers to be sent on each + #: :class:`Request ` sent from this + #: :class:`Session `. + self.headers = default_headers() + + #: Default Authentication tuple or object to attach to + #: :class:`Request `. + self.auth = None + + #: Dictionary mapping protocol or protocol and host to the URL of the proxy + #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to + #: be used on each :class:`Request `. + self.proxies = {} + + #: Event-handling hooks. + self.hooks = default_hooks() + + #: Dictionary of querystring data to attach to each + #: :class:`Request `. The dictionary values may be lists for + #: representing multivalued query parameters. + self.params = {} + + #: Stream response content default. + self.stream = False + + #: SSL Verification default. + #: Defaults to `True`, requiring requests to verify the TLS certificate at the + #: remote end. + #: If verify is set to `False`, requests will accept any TLS certificate + #: presented by the server, and will ignore hostname mismatches and/or + #: expired certificates, which will make your application vulnerable to + #: man-in-the-middle (MitM) attacks. + #: Only set this to `False` for testing. + self.verify = True + + #: SSL client certificate default, if String, path to ssl client + #: cert file (.pem). If Tuple, ('cert', 'key') pair. + self.cert = None + + #: Maximum number of redirects allowed. If the request exceeds this + #: limit, a :class:`TooManyRedirects` exception is raised. + #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is + #: 30. + self.max_redirects = DEFAULT_REDIRECT_LIMIT + + #: Trust environment settings for proxy configuration, default + #: authentication and similar. + self.trust_env = True + + #: A CookieJar containing all currently outstanding cookies set on this + #: session. By default it is a + #: :class:`RequestsCookieJar `, but + #: may be any other ``cookielib.CookieJar`` compatible object. + self.cookies = cookiejar_from_dict({}) + + # Default connection adapters. + self.adapters = OrderedDict() + self.mount("https://", HTTPAdapter()) + self.mount("http://", HTTPAdapter()) + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def prepare_request(self, request): + """Constructs a :class:`PreparedRequest ` for + transmission and returns it. The :class:`PreparedRequest` has settings + merged from the :class:`Request ` instance and those of the + :class:`Session`. + + :param request: :class:`Request` instance to prepare with this + session's settings. + :rtype: requests.PreparedRequest + """ + cookies = request.cookies or {} + + # Bootstrap CookieJar. + if not isinstance(cookies, cookielib.CookieJar): + cookies = cookiejar_from_dict(cookies) + + # Merge with session cookies + merged_cookies = merge_cookies( + merge_cookies(RequestsCookieJar(), self.cookies), cookies + ) + + # Set environment's basic authentication if not explicitly set. + auth = request.auth + if self.trust_env and not auth and not self.auth: + auth = get_netrc_auth(request.url) + + p = PreparedRequest() + p.prepare( + method=request.method.upper(), + url=request.url, + files=request.files, + data=request.data, + json=request.json, + headers=merge_setting( + request.headers, self.headers, dict_class=CaseInsensitiveDict + ), + params=merge_setting(request.params, self.params), + auth=merge_setting(auth, self.auth), + cookies=merged_cookies, + hooks=merge_hooks(request.hooks, self.hooks), + ) + return p + + def request( + self, + method, + url, + params=None, + data=None, + headers=None, + cookies=None, + files=None, + auth=None, + timeout=None, + allow_redirects=True, + proxies=None, + hooks=None, + stream=None, + verify=None, + cert=None, + json=None, + ): + """Constructs a :class:`Request `, prepares it and sends it. + Returns :class:`Response ` object. + + :param method: method for the new :class:`Request` object. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary or bytes to be sent in the query + string for the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the + :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the + :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the + :class:`Request`. + :param files: (optional) Dictionary of ``'filename': file-like-objects`` + for multipart encoding upload. + :param auth: (optional) Auth tuple or callable to enable + Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple + :param allow_redirects: (optional) Set to True by default. + :type allow_redirects: bool + :param proxies: (optional) Dictionary mapping protocol or protocol and + hostname to the URL of the proxy. + :param stream: (optional) whether to immediately download the response + content. Defaults to ``False``. + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use. Defaults to ``True``. When set to + ``False``, requests will accept any TLS certificate presented by + the server, and will ignore hostname mismatches and/or expired + certificates, which will make your application vulnerable to + man-in-the-middle (MitM) attacks. Setting verify to ``False`` + may be useful during local development or testing. + :param cert: (optional) if String, path to ssl client cert file (.pem). + If Tuple, ('cert', 'key') pair. + :rtype: requests.Response + """ + # Create the Request. + req = Request( + method=method.upper(), + url=url, + headers=headers, + files=files, + data=data or {}, + json=json, + params=params or {}, + auth=auth, + cookies=cookies, + hooks=hooks, + ) + prep = self.prepare_request(req) + + proxies = proxies or {} + + settings = self.merge_environment_settings( + prep.url, proxies, stream, verify, cert + ) + + # Send the request. + send_kwargs = { + "timeout": timeout, + "allow_redirects": allow_redirects, + } + send_kwargs.update(settings) + resp = self.send(prep, **send_kwargs) + + return resp + + def get(self, url, **kwargs): + r"""Sends a GET request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", True) + return self.request("GET", url, **kwargs) + + def options(self, url, **kwargs): + r"""Sends a OPTIONS request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", True) + return self.request("OPTIONS", url, **kwargs) + + def head(self, url, **kwargs): + r"""Sends a HEAD request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", False) + return self.request("HEAD", url, **kwargs) + + def post(self, url, data=None, json=None, **kwargs): + r"""Sends a POST request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("POST", url, data=data, json=json, **kwargs) + + def put(self, url, data=None, **kwargs): + r"""Sends a PUT request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("PUT", url, data=data, **kwargs) + + def patch(self, url, data=None, **kwargs): + r"""Sends a PATCH request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("PATCH", url, data=data, **kwargs) + + def delete(self, url, **kwargs): + r"""Sends a DELETE request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("DELETE", url, **kwargs) + + def send(self, request, **kwargs): + """Send a given PreparedRequest. + + :rtype: requests.Response + """ + # Set defaults that the hooks can utilize to ensure they always have + # the correct parameters to reproduce the previous request. + kwargs.setdefault("stream", self.stream) + kwargs.setdefault("verify", self.verify) + kwargs.setdefault("cert", self.cert) + if "proxies" not in kwargs: + kwargs["proxies"] = resolve_proxies(request, self.proxies, self.trust_env) + + # It's possible that users might accidentally send a Request object. + # Guard against that specific failure case. + if isinstance(request, Request): + raise ValueError("You can only send PreparedRequests.") + + # Set up variables needed for resolve_redirects and dispatching of hooks + allow_redirects = kwargs.pop("allow_redirects", True) + stream = kwargs.get("stream") + hooks = request.hooks + + # Get the appropriate adapter to use + adapter = self.get_adapter(url=request.url) + + # Start time (approximately) of the request + start = preferred_clock() + + # Send the request + r = adapter.send(request, **kwargs) + + # Total elapsed time of the request (approximately) + elapsed = preferred_clock() - start + r.elapsed = timedelta(seconds=elapsed) + + # Response manipulation hooks + r = dispatch_hook("response", hooks, r, **kwargs) + + # Persist cookies + if r.history: + + # If the hooks create history then we want those cookies too + for resp in r.history: + extract_cookies_to_jar(self.cookies, resp.request, resp.raw) + + extract_cookies_to_jar(self.cookies, request, r.raw) + + # Resolve redirects if allowed. + if allow_redirects: + # Redirect resolving generator. + gen = self.resolve_redirects(r, request, **kwargs) + history = [resp for resp in gen] + else: + history = [] + + # Shuffle things around if there's history. + if history: + # Insert the first (original) request at the start + history.insert(0, r) + # Get the last request made + r = history.pop() + r.history = history + + # If redirects aren't being followed, store the response on the Request for Response.next(). + if not allow_redirects: + try: + r._next = next( + self.resolve_redirects(r, request, yield_requests=True, **kwargs) + ) + except StopIteration: + pass + + if not stream: + r.content + + return r + + def merge_environment_settings(self, url, proxies, stream, verify, cert): + """ + Check the environment and merge it with some settings. + + :rtype: dict + """ + # Gather clues from the surrounding environment. + if self.trust_env: + # Set environment's proxies. + no_proxy = proxies.get("no_proxy") if proxies is not None else None + env_proxies = get_environ_proxies(url, no_proxy=no_proxy) + for (k, v) in env_proxies.items(): + proxies.setdefault(k, v) + + # Look for requests environment configuration + # and be compatible with cURL. + if verify is True or verify is None: + verify = ( + os.environ.get("REQUESTS_CA_BUNDLE") + or os.environ.get("CURL_CA_BUNDLE") + or verify + ) + + # Merge all the kwargs. + proxies = merge_setting(proxies, self.proxies) + stream = merge_setting(stream, self.stream) + verify = merge_setting(verify, self.verify) + cert = merge_setting(cert, self.cert) + + return {"proxies": proxies, "stream": stream, "verify": verify, "cert": cert} + + def get_adapter(self, url): + """ + Returns the appropriate connection adapter for the given URL. + + :rtype: requests.adapters.BaseAdapter + """ + for (prefix, adapter) in self.adapters.items(): + + if url.lower().startswith(prefix.lower()): + return adapter + + # Nothing matches :-/ + raise InvalidSchema(f"No connection adapters were found for {url!r}") + + def close(self): + """Closes all adapters and as such the session""" + for v in self.adapters.values(): + v.close() + + def mount(self, prefix, adapter): + """Registers a connection adapter to a prefix. + + Adapters are sorted in descending order by prefix length. + """ + self.adapters[prefix] = adapter + keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] + + for key in keys_to_move: + self.adapters[key] = self.adapters.pop(key) + + def __getstate__(self): + state = {attr: getattr(self, attr, None) for attr in self.__attrs__} + return state + + def __setstate__(self, state): + for attr, value in state.items(): + setattr(self, attr, value) + + +def session(): + """ + Returns a :class:`Session` for context-management. + + .. deprecated:: 1.0.0 + + This method has been deprecated since version 1.0.0 and is only kept for + backwards compatibility. New code should use :class:`~requests.sessions.Session` + to create a session. This may be removed at a future date. + + :rtype: Session + """ + return Session() diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/requests/status_codes.py b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/status_codes.py new file mode 100644 index 0000000..4bd072b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/status_codes.py @@ -0,0 +1,128 @@ +r""" +The ``codes`` object defines a mapping from common names for HTTP statuses +to their numerical codes, accessible either as attributes or as dictionary +items. + +Example:: + + >>> import requests + >>> requests.codes['temporary_redirect'] + 307 + >>> requests.codes.teapot + 418 + >>> requests.codes['\o/'] + 200 + +Some codes have multiple names, and both upper- and lower-case versions of +the names are allowed. For example, ``codes.ok``, ``codes.OK``, and +``codes.okay`` all correspond to the HTTP status code 200. +""" + +from .structures import LookupDict + +_codes = { + # Informational. + 100: ("continue",), + 101: ("switching_protocols",), + 102: ("processing",), + 103: ("checkpoint",), + 122: ("uri_too_long", "request_uri_too_long"), + 200: ("ok", "okay", "all_ok", "all_okay", "all_good", "\\o/", "✓"), + 201: ("created",), + 202: ("accepted",), + 203: ("non_authoritative_info", "non_authoritative_information"), + 204: ("no_content",), + 205: ("reset_content", "reset"), + 206: ("partial_content", "partial"), + 207: ("multi_status", "multiple_status", "multi_stati", "multiple_stati"), + 208: ("already_reported",), + 226: ("im_used",), + # Redirection. + 300: ("multiple_choices",), + 301: ("moved_permanently", "moved", "\\o-"), + 302: ("found",), + 303: ("see_other", "other"), + 304: ("not_modified",), + 305: ("use_proxy",), + 306: ("switch_proxy",), + 307: ("temporary_redirect", "temporary_moved", "temporary"), + 308: ( + "permanent_redirect", + "resume_incomplete", + "resume", + ), # "resume" and "resume_incomplete" to be removed in 3.0 + # Client Error. + 400: ("bad_request", "bad"), + 401: ("unauthorized",), + 402: ("payment_required", "payment"), + 403: ("forbidden",), + 404: ("not_found", "-o-"), + 405: ("method_not_allowed", "not_allowed"), + 406: ("not_acceptable",), + 407: ("proxy_authentication_required", "proxy_auth", "proxy_authentication"), + 408: ("request_timeout", "timeout"), + 409: ("conflict",), + 410: ("gone",), + 411: ("length_required",), + 412: ("precondition_failed", "precondition"), + 413: ("request_entity_too_large",), + 414: ("request_uri_too_large",), + 415: ("unsupported_media_type", "unsupported_media", "media_type"), + 416: ( + "requested_range_not_satisfiable", + "requested_range", + "range_not_satisfiable", + ), + 417: ("expectation_failed",), + 418: ("im_a_teapot", "teapot", "i_am_a_teapot"), + 421: ("misdirected_request",), + 422: ("unprocessable_entity", "unprocessable"), + 423: ("locked",), + 424: ("failed_dependency", "dependency"), + 425: ("unordered_collection", "unordered"), + 426: ("upgrade_required", "upgrade"), + 428: ("precondition_required", "precondition"), + 429: ("too_many_requests", "too_many"), + 431: ("header_fields_too_large", "fields_too_large"), + 444: ("no_response", "none"), + 449: ("retry_with", "retry"), + 450: ("blocked_by_windows_parental_controls", "parental_controls"), + 451: ("unavailable_for_legal_reasons", "legal_reasons"), + 499: ("client_closed_request",), + # Server Error. + 500: ("internal_server_error", "server_error", "/o\\", "✗"), + 501: ("not_implemented",), + 502: ("bad_gateway",), + 503: ("service_unavailable", "unavailable"), + 504: ("gateway_timeout",), + 505: ("http_version_not_supported", "http_version"), + 506: ("variant_also_negotiates",), + 507: ("insufficient_storage",), + 509: ("bandwidth_limit_exceeded", "bandwidth"), + 510: ("not_extended",), + 511: ("network_authentication_required", "network_auth", "network_authentication"), +} + +codes = LookupDict(name="status_codes") + + +def _init(): + for code, titles in _codes.items(): + for title in titles: + setattr(codes, title, code) + if not title.startswith(("\\", "/")): + setattr(codes, title.upper(), code) + + def doc(code): + names = ", ".join(f"``{n}``" for n in _codes[code]) + return "* %d: %s" % (code, names) + + global __doc__ + __doc__ = ( + __doc__ + "\n" + "\n".join(doc(code) for code in sorted(_codes)) + if __doc__ is not None + else None + ) + + +_init() diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/requests/structures.py b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/structures.py new file mode 100644 index 0000000..188e13e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/structures.py @@ -0,0 +1,99 @@ +""" +requests.structures +~~~~~~~~~~~~~~~~~~~ + +Data structures that power Requests. +""" + +from collections import OrderedDict + +from .compat import Mapping, MutableMapping + + +class CaseInsensitiveDict(MutableMapping): + """A case-insensitive ``dict``-like object. + + Implements all methods and operations of + ``MutableMapping`` as well as dict's ``copy``. Also + provides ``lower_items``. + + All keys are expected to be strings. The structure remembers the + case of the last key to be set, and ``iter(instance)``, + ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` + will contain case-sensitive keys. However, querying and contains + testing is case insensitive:: + + cid = CaseInsensitiveDict() + cid['Accept'] = 'application/json' + cid['aCCEPT'] == 'application/json' # True + list(cid) == ['Accept'] # True + + For example, ``headers['content-encoding']`` will return the + value of a ``'Content-Encoding'`` response header, regardless + of how the header name was originally stored. + + If the constructor, ``.update``, or equality comparison + operations are given keys that have equal ``.lower()``s, the + behavior is undefined. + """ + + def __init__(self, data=None, **kwargs): + self._store = OrderedDict() + if data is None: + data = {} + self.update(data, **kwargs) + + def __setitem__(self, key, value): + # Use the lowercased key for lookups, but store the actual + # key alongside the value. + self._store[key.lower()] = (key, value) + + def __getitem__(self, key): + return self._store[key.lower()][1] + + def __delitem__(self, key): + del self._store[key.lower()] + + def __iter__(self): + return (casedkey for casedkey, mappedvalue in self._store.values()) + + def __len__(self): + return len(self._store) + + def lower_items(self): + """Like iteritems(), but with all lowercase keys.""" + return ((lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items()) + + def __eq__(self, other): + if isinstance(other, Mapping): + other = CaseInsensitiveDict(other) + else: + return NotImplemented + # Compare insensitively + return dict(self.lower_items()) == dict(other.lower_items()) + + # Copy is required + def copy(self): + return CaseInsensitiveDict(self._store.values()) + + def __repr__(self): + return str(dict(self.items())) + + +class LookupDict(dict): + """Dictionary lookup object.""" + + def __init__(self, name=None): + self.name = name + super().__init__() + + def __repr__(self): + return f"" + + def __getitem__(self, key): + # We allow fall-through here, so values default to None + + return self.__dict__.get(key, None) + + def get(self, key, default=None): + return self.__dict__.get(key, default) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/requests/utils.py b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/utils.py new file mode 100644 index 0000000..6adec33 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/requests/utils.py @@ -0,0 +1,1088 @@ +""" +requests.utils +~~~~~~~~~~~~~~ + +This module provides utility functions that are used within Requests +that are also useful for external consumption. +""" + +import codecs +import contextlib +import io +import os +import re +import socket +import struct +import sys +import tempfile +import warnings +import zipfile +from collections import OrderedDict + +from pip._vendor.urllib3.util import make_headers, parse_url + +from . import certs +from .__version__ import __version__ + +# to_native_string is unused here, but imported here for backwards compatibility +from ._internal_utils import ( # noqa: F401 + _HEADER_VALIDATORS_BYTE, + _HEADER_VALIDATORS_STR, + HEADER_VALIDATORS, + to_native_string, +) +from .compat import ( + Mapping, + basestring, + bytes, + getproxies, + getproxies_environment, + integer_types, +) +from .compat import parse_http_list as _parse_list_header +from .compat import ( + proxy_bypass, + proxy_bypass_environment, + quote, + str, + unquote, + urlparse, + urlunparse, +) +from .cookies import cookiejar_from_dict +from .exceptions import ( + FileModeWarning, + InvalidHeader, + InvalidURL, + UnrewindableBodyError, +) +from .structures import CaseInsensitiveDict + +NETRC_FILES = (".netrc", "_netrc") + +DEFAULT_CA_BUNDLE_PATH = certs.where() + +DEFAULT_PORTS = {"http": 80, "https": 443} + +# Ensure that ', ' is used to preserve previous delimiter behavior. +DEFAULT_ACCEPT_ENCODING = ", ".join( + re.split(r",\s*", make_headers(accept_encoding=True)["accept-encoding"]) +) + + +if sys.platform == "win32": + # provide a proxy_bypass version on Windows without DNS lookups + + def proxy_bypass_registry(host): + try: + import winreg + except ImportError: + return False + + try: + internetSettings = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Internet Settings", + ) + # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it + proxyEnable = int(winreg.QueryValueEx(internetSettings, "ProxyEnable")[0]) + # ProxyOverride is almost always a string + proxyOverride = winreg.QueryValueEx(internetSettings, "ProxyOverride")[0] + except (OSError, ValueError): + return False + if not proxyEnable or not proxyOverride: + return False + + # make a check value list from the registry entry: replace the + # '' string by the localhost entry and the corresponding + # canonical entry. + proxyOverride = proxyOverride.split(";") + # now check if we match one of the registry values. + for test in proxyOverride: + if test == "": + if "." not in host: + return True + test = test.replace(".", r"\.") # mask dots + test = test.replace("*", r".*") # change glob sequence + test = test.replace("?", r".") # change glob char + if re.match(test, host, re.I): + return True + return False + + def proxy_bypass(host): # noqa + """Return True, if the host should be bypassed. + + Checks proxy settings gathered from the environment, if specified, + or the registry. + """ + if getproxies_environment(): + return proxy_bypass_environment(host) + else: + return proxy_bypass_registry(host) + + +def dict_to_sequence(d): + """Returns an internal sequence dictionary update.""" + + if hasattr(d, "items"): + d = d.items() + + return d + + +def super_len(o): + total_length = None + current_position = 0 + + if hasattr(o, "__len__"): + total_length = len(o) + + elif hasattr(o, "len"): + total_length = o.len + + elif hasattr(o, "fileno"): + try: + fileno = o.fileno() + except (io.UnsupportedOperation, AttributeError): + # AttributeError is a surprising exception, seeing as how we've just checked + # that `hasattr(o, 'fileno')`. It happens for objects obtained via + # `Tarfile.extractfile()`, per issue 5229. + pass + else: + total_length = os.fstat(fileno).st_size + + # Having used fstat to determine the file length, we need to + # confirm that this file was opened up in binary mode. + if "b" not in o.mode: + warnings.warn( + ( + "Requests has determined the content-length for this " + "request using the binary size of the file: however, the " + "file has been opened in text mode (i.e. without the 'b' " + "flag in the mode). This may lead to an incorrect " + "content-length. In Requests 3.0, support will be removed " + "for files in text mode." + ), + FileModeWarning, + ) + + if hasattr(o, "tell"): + try: + current_position = o.tell() + except OSError: + # This can happen in some weird situations, such as when the file + # is actually a special file descriptor like stdin. In this + # instance, we don't know what the length is, so set it to zero and + # let requests chunk it instead. + if total_length is not None: + current_position = total_length + else: + if hasattr(o, "seek") and total_length is None: + # StringIO and BytesIO have seek but no usable fileno + try: + # seek to end of file + o.seek(0, 2) + total_length = o.tell() + + # seek back to current position to support + # partially read file-like objects + o.seek(current_position or 0) + except OSError: + total_length = 0 + + if total_length is None: + total_length = 0 + + return max(0, total_length - current_position) + + +def get_netrc_auth(url, raise_errors=False): + """Returns the Requests tuple auth for a given url from netrc.""" + + netrc_file = os.environ.get("NETRC") + if netrc_file is not None: + netrc_locations = (netrc_file,) + else: + netrc_locations = (f"~/{f}" for f in NETRC_FILES) + + try: + from netrc import NetrcParseError, netrc + + netrc_path = None + + for f in netrc_locations: + try: + loc = os.path.expanduser(f) + except KeyError: + # os.path.expanduser can fail when $HOME is undefined and + # getpwuid fails. See https://bugs.python.org/issue20164 & + # https://github.com/psf/requests/issues/1846 + return + + if os.path.exists(loc): + netrc_path = loc + break + + # Abort early if there isn't one. + if netrc_path is None: + return + + ri = urlparse(url) + host = ri.hostname + + try: + _netrc = netrc(netrc_path).authenticators(host) + if _netrc: + # Return with login / password + login_i = 0 if _netrc[0] else 1 + return (_netrc[login_i], _netrc[2]) + except (NetrcParseError, OSError): + # If there was a parsing error or a permissions issue reading the file, + # we'll just skip netrc auth unless explicitly asked to raise errors. + if raise_errors: + raise + + # App Engine hackiness. + except (ImportError, AttributeError): + pass + + +def guess_filename(obj): + """Tries to guess the filename of the given object.""" + name = getattr(obj, "name", None) + if name and isinstance(name, basestring) and name[0] != "<" and name[-1] != ">": + return os.path.basename(name) + + +def extract_zipped_paths(path): + """Replace nonexistent paths that look like they refer to a member of a zip + archive with the location of an extracted copy of the target, or else + just return the provided path unchanged. + """ + if os.path.exists(path): + # this is already a valid path, no need to do anything further + return path + + # find the first valid part of the provided path and treat that as a zip archive + # assume the rest of the path is the name of a member in the archive + archive, member = os.path.split(path) + while archive and not os.path.exists(archive): + archive, prefix = os.path.split(archive) + if not prefix: + # If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split), + # we _can_ end up in an infinite loop on a rare corner case affecting a small number of users + break + member = "/".join([prefix, member]) + + if not zipfile.is_zipfile(archive): + return path + + zip_file = zipfile.ZipFile(archive) + if member not in zip_file.namelist(): + return path + + # we have a valid zip archive and a valid member of that archive + tmp = tempfile.gettempdir() + extracted_path = os.path.join(tmp, member.split("/")[-1]) + if not os.path.exists(extracted_path): + # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition + with atomic_open(extracted_path) as file_handler: + file_handler.write(zip_file.read(member)) + return extracted_path + + +@contextlib.contextmanager +def atomic_open(filename): + """Write a file to the disk in an atomic fashion""" + tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename)) + try: + with os.fdopen(tmp_descriptor, "wb") as tmp_handler: + yield tmp_handler + os.replace(tmp_name, filename) + except BaseException: + os.remove(tmp_name) + raise + + +def from_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. Unless it can not be represented as such, return an + OrderedDict, e.g., + + :: + + >>> from_key_val_list([('key', 'val')]) + OrderedDict([('key', 'val')]) + >>> from_key_val_list('string') + Traceback (most recent call last): + ... + ValueError: cannot encode objects that are not 2-tuples + >>> from_key_val_list({'key': 'val'}) + OrderedDict([('key', 'val')]) + + :rtype: OrderedDict + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError("cannot encode objects that are not 2-tuples") + + return OrderedDict(value) + + +def to_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. If it can be, return a list of tuples, e.g., + + :: + + >>> to_key_val_list([('key', 'val')]) + [('key', 'val')] + >>> to_key_val_list({'key': 'val'}) + [('key', 'val')] + >>> to_key_val_list('string') + Traceback (most recent call last): + ... + ValueError: cannot encode objects that are not 2-tuples + + :rtype: list + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError("cannot encode objects that are not 2-tuples") + + if isinstance(value, Mapping): + value = value.items() + + return list(value) + + +# From mitsuhiko/werkzeug (used with permission). +def parse_list_header(value): + """Parse lists as described by RFC 2068 Section 2. + + In particular, parse comma-separated lists where the elements of + the list may include quoted-strings. A quoted-string could + contain a comma. A non-quoted string could have quotes in the + middle. Quotes are removed automatically after parsing. + + It basically works like :func:`parse_set_header` just that items + may appear multiple times and case sensitivity is preserved. + + The return value is a standard :class:`list`: + + >>> parse_list_header('token, "quoted value"') + ['token', 'quoted value'] + + To create a header from the :class:`list` again, use the + :func:`dump_header` function. + + :param value: a string with a list header. + :return: :class:`list` + :rtype: list + """ + result = [] + for item in _parse_list_header(value): + if item[:1] == item[-1:] == '"': + item = unquote_header_value(item[1:-1]) + result.append(item) + return result + + +# From mitsuhiko/werkzeug (used with permission). +def parse_dict_header(value): + """Parse lists of key, value pairs as described by RFC 2068 Section 2 and + convert them into a python dict: + + >>> d = parse_dict_header('foo="is a fish", bar="as well"') + >>> type(d) is dict + True + >>> sorted(d.items()) + [('bar', 'as well'), ('foo', 'is a fish')] + + If there is no value for a key it will be `None`: + + >>> parse_dict_header('key_without_value') + {'key_without_value': None} + + To create a header from the :class:`dict` again, use the + :func:`dump_header` function. + + :param value: a string with a dict header. + :return: :class:`dict` + :rtype: dict + """ + result = {} + for item in _parse_list_header(value): + if "=" not in item: + result[item] = None + continue + name, value = item.split("=", 1) + if value[:1] == value[-1:] == '"': + value = unquote_header_value(value[1:-1]) + result[name] = value + return result + + +# From mitsuhiko/werkzeug (used with permission). +def unquote_header_value(value, is_filename=False): + r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). + This does not use the real unquoting but what browsers are actually + using for quoting. + + :param value: the header value to unquote. + :rtype: str + """ + if value and value[0] == value[-1] == '"': + # this is not the real unquoting, but fixing this so that the + # RFC is met will result in bugs with internet explorer and + # probably some other browsers as well. IE for example is + # uploading files with "C:\foo\bar.txt" as filename + value = value[1:-1] + + # if this is a filename and the starting characters look like + # a UNC path, then just return the value without quotes. Using the + # replace sequence below on a UNC path has the effect of turning + # the leading double slash into a single slash and then + # _fix_ie_filename() doesn't work correctly. See #458. + if not is_filename or value[:2] != "\\\\": + return value.replace("\\\\", "\\").replace('\\"', '"') + return value + + +def dict_from_cookiejar(cj): + """Returns a key/value dictionary from a CookieJar. + + :param cj: CookieJar object to extract cookies from. + :rtype: dict + """ + + cookie_dict = {} + + for cookie in cj: + cookie_dict[cookie.name] = cookie.value + + return cookie_dict + + +def add_dict_to_cookiejar(cj, cookie_dict): + """Returns a CookieJar from a key/value dictionary. + + :param cj: CookieJar to insert cookies into. + :param cookie_dict: Dict of key/values to insert into CookieJar. + :rtype: CookieJar + """ + + return cookiejar_from_dict(cookie_dict, cj) + + +def get_encodings_from_content(content): + """Returns encodings from given content string. + + :param content: bytestring to extract encodings from. + """ + warnings.warn( + ( + "In requests 3.0, get_encodings_from_content will be removed. For " + "more information, please see the discussion on issue #2266. (This" + " warning should only appear once.)" + ), + DeprecationWarning, + ) + + charset_re = re.compile(r']', flags=re.I) + pragma_re = re.compile(r']', flags=re.I) + xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') + + return ( + charset_re.findall(content) + + pragma_re.findall(content) + + xml_re.findall(content) + ) + + +def _parse_content_type_header(header): + """Returns content type and parameters from given header + + :param header: string + :return: tuple containing content type and dictionary of + parameters + """ + + tokens = header.split(";") + content_type, params = tokens[0].strip(), tokens[1:] + params_dict = {} + items_to_strip = "\"' " + + for param in params: + param = param.strip() + if param: + key, value = param, True + index_of_equals = param.find("=") + if index_of_equals != -1: + key = param[:index_of_equals].strip(items_to_strip) + value = param[index_of_equals + 1 :].strip(items_to_strip) + params_dict[key.lower()] = value + return content_type, params_dict + + +def get_encoding_from_headers(headers): + """Returns encodings from given HTTP Header Dict. + + :param headers: dictionary to extract encoding from. + :rtype: str + """ + + content_type = headers.get("content-type") + + if not content_type: + return None + + content_type, params = _parse_content_type_header(content_type) + + if "charset" in params: + return params["charset"].strip("'\"") + + if "text" in content_type: + return "ISO-8859-1" + + if "application/json" in content_type: + # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset + return "utf-8" + + +def stream_decode_response_unicode(iterator, r): + """Stream decodes an iterator.""" + + if r.encoding is None: + yield from iterator + return + + decoder = codecs.getincrementaldecoder(r.encoding)(errors="replace") + for chunk in iterator: + rv = decoder.decode(chunk) + if rv: + yield rv + rv = decoder.decode(b"", final=True) + if rv: + yield rv + + +def iter_slices(string, slice_length): + """Iterate over slices of a string.""" + pos = 0 + if slice_length is None or slice_length <= 0: + slice_length = len(string) + while pos < len(string): + yield string[pos : pos + slice_length] + pos += slice_length + + +def get_unicode_from_response(r): + """Returns the requested content back in unicode. + + :param r: Response object to get unicode content from. + + Tried: + + 1. charset from content-type + 2. fall back and replace all unicode characters + + :rtype: str + """ + warnings.warn( + ( + "In requests 3.0, get_unicode_from_response will be removed. For " + "more information, please see the discussion on issue #2266. (This" + " warning should only appear once.)" + ), + DeprecationWarning, + ) + + tried_encodings = [] + + # Try charset from content-type + encoding = get_encoding_from_headers(r.headers) + + if encoding: + try: + return str(r.content, encoding) + except UnicodeError: + tried_encodings.append(encoding) + + # Fall back: + try: + return str(r.content, encoding, errors="replace") + except TypeError: + return r.content + + +# The unreserved URI characters (RFC 3986) +UNRESERVED_SET = frozenset( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~" +) + + +def unquote_unreserved(uri): + """Un-escape any percent-escape sequences in a URI that are unreserved + characters. This leaves all reserved, illegal and non-ASCII bytes encoded. + + :rtype: str + """ + parts = uri.split("%") + for i in range(1, len(parts)): + h = parts[i][0:2] + if len(h) == 2 and h.isalnum(): + try: + c = chr(int(h, 16)) + except ValueError: + raise InvalidURL(f"Invalid percent-escape sequence: '{h}'") + + if c in UNRESERVED_SET: + parts[i] = c + parts[i][2:] + else: + parts[i] = f"%{parts[i]}" + else: + parts[i] = f"%{parts[i]}" + return "".join(parts) + + +def requote_uri(uri): + """Re-quote the given URI. + + This function passes the given URI through an unquote/quote cycle to + ensure that it is fully and consistently quoted. + + :rtype: str + """ + safe_with_percent = "!#$%&'()*+,/:;=?@[]~" + safe_without_percent = "!#$&'()*+,/:;=?@[]~" + try: + # Unquote only the unreserved characters + # Then quote only illegal characters (do not quote reserved, + # unreserved, or '%') + return quote(unquote_unreserved(uri), safe=safe_with_percent) + except InvalidURL: + # We couldn't unquote the given URI, so let's try quoting it, but + # there may be unquoted '%'s in the URI. We need to make sure they're + # properly quoted so they do not cause issues elsewhere. + return quote(uri, safe=safe_without_percent) + + +def address_in_network(ip, net): + """This function allows you to check if an IP belongs to a network subnet + + Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24 + returns False if ip = 192.168.1.1 and net = 192.168.100.0/24 + + :rtype: bool + """ + ipaddr = struct.unpack("=L", socket.inet_aton(ip))[0] + netaddr, bits = net.split("/") + netmask = struct.unpack("=L", socket.inet_aton(dotted_netmask(int(bits))))[0] + network = struct.unpack("=L", socket.inet_aton(netaddr))[0] & netmask + return (ipaddr & netmask) == (network & netmask) + + +def dotted_netmask(mask): + """Converts mask from /xx format to xxx.xxx.xxx.xxx + + Example: if mask is 24 function returns 255.255.255.0 + + :rtype: str + """ + bits = 0xFFFFFFFF ^ (1 << 32 - mask) - 1 + return socket.inet_ntoa(struct.pack(">I", bits)) + + +def is_ipv4_address(string_ip): + """ + :rtype: bool + """ + try: + socket.inet_aton(string_ip) + except OSError: + return False + return True + + +def is_valid_cidr(string_network): + """ + Very simple check of the cidr format in no_proxy variable. + + :rtype: bool + """ + if string_network.count("/") == 1: + try: + mask = int(string_network.split("/")[1]) + except ValueError: + return False + + if mask < 1 or mask > 32: + return False + + try: + socket.inet_aton(string_network.split("/")[0]) + except OSError: + return False + else: + return False + return True + + +@contextlib.contextmanager +def set_environ(env_name, value): + """Set the environment variable 'env_name' to 'value' + + Save previous value, yield, and then restore the previous value stored in + the environment variable 'env_name'. + + If 'value' is None, do nothing""" + value_changed = value is not None + if value_changed: + old_value = os.environ.get(env_name) + os.environ[env_name] = value + try: + yield + finally: + if value_changed: + if old_value is None: + del os.environ[env_name] + else: + os.environ[env_name] = old_value + + +def should_bypass_proxies(url, no_proxy): + """ + Returns whether we should bypass proxies or not. + + :rtype: bool + """ + # Prioritize lowercase environment variables over uppercase + # to keep a consistent behaviour with other http projects (curl, wget). + def get_proxy(key): + return os.environ.get(key) or os.environ.get(key.upper()) + + # First check whether no_proxy is defined. If it is, check that the URL + # we're getting isn't in the no_proxy list. + no_proxy_arg = no_proxy + if no_proxy is None: + no_proxy = get_proxy("no_proxy") + parsed = urlparse(url) + + if parsed.hostname is None: + # URLs don't always have hostnames, e.g. file:/// urls. + return True + + if no_proxy: + # We need to check whether we match here. We need to see if we match + # the end of the hostname, both with and without the port. + no_proxy = (host for host in no_proxy.replace(" ", "").split(",") if host) + + if is_ipv4_address(parsed.hostname): + for proxy_ip in no_proxy: + if is_valid_cidr(proxy_ip): + if address_in_network(parsed.hostname, proxy_ip): + return True + elif parsed.hostname == proxy_ip: + # If no_proxy ip was defined in plain IP notation instead of cidr notation & + # matches the IP of the index + return True + else: + host_with_port = parsed.hostname + if parsed.port: + host_with_port += f":{parsed.port}" + + for host in no_proxy: + if parsed.hostname.endswith(host) or host_with_port.endswith(host): + # The URL does match something in no_proxy, so we don't want + # to apply the proxies on this URL. + return True + + with set_environ("no_proxy", no_proxy_arg): + # parsed.hostname can be `None` in cases such as a file URI. + try: + bypass = proxy_bypass(parsed.hostname) + except (TypeError, socket.gaierror): + bypass = False + + if bypass: + return True + + return False + + +def get_environ_proxies(url, no_proxy=None): + """ + Return a dict of environment proxies. + + :rtype: dict + """ + if should_bypass_proxies(url, no_proxy=no_proxy): + return {} + else: + return getproxies() + + +def select_proxy(url, proxies): + """Select a proxy for the url, if applicable. + + :param url: The url being for the request + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs + """ + proxies = proxies or {} + urlparts = urlparse(url) + if urlparts.hostname is None: + return proxies.get(urlparts.scheme, proxies.get("all")) + + proxy_keys = [ + urlparts.scheme + "://" + urlparts.hostname, + urlparts.scheme, + "all://" + urlparts.hostname, + "all", + ] + proxy = None + for proxy_key in proxy_keys: + if proxy_key in proxies: + proxy = proxies[proxy_key] + break + + return proxy + + +def resolve_proxies(request, proxies, trust_env=True): + """This method takes proxy information from a request and configuration + input to resolve a mapping of target proxies. This will consider settings + such a NO_PROXY to strip proxy configurations. + + :param request: Request or PreparedRequest + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs + :param trust_env: Boolean declaring whether to trust environment configs + + :rtype: dict + """ + proxies = proxies if proxies is not None else {} + url = request.url + scheme = urlparse(url).scheme + no_proxy = proxies.get("no_proxy") + new_proxies = proxies.copy() + + if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy): + environ_proxies = get_environ_proxies(url, no_proxy=no_proxy) + + proxy = environ_proxies.get(scheme, environ_proxies.get("all")) + + if proxy: + new_proxies.setdefault(scheme, proxy) + return new_proxies + + +def default_user_agent(name="python-requests"): + """ + Return a string representing the default user agent. + + :rtype: str + """ + return f"{name}/{__version__}" + + +def default_headers(): + """ + :rtype: requests.structures.CaseInsensitiveDict + """ + return CaseInsensitiveDict( + { + "User-Agent": default_user_agent(), + "Accept-Encoding": DEFAULT_ACCEPT_ENCODING, + "Accept": "*/*", + "Connection": "keep-alive", + } + ) + + +def parse_header_links(value): + """Return a list of parsed link headers proxies. + + i.e. Link: ; rel=front; type="image/jpeg",; rel=back;type="image/jpeg" + + :rtype: list + """ + + links = [] + + replace_chars = " '\"" + + value = value.strip(replace_chars) + if not value: + return links + + for val in re.split(", *<", value): + try: + url, params = val.split(";", 1) + except ValueError: + url, params = val, "" + + link = {"url": url.strip("<> '\"")} + + for param in params.split(";"): + try: + key, value = param.split("=") + except ValueError: + break + + link[key.strip(replace_chars)] = value.strip(replace_chars) + + links.append(link) + + return links + + +# Null bytes; no need to recreate these on each call to guess_json_utf +_null = "\x00".encode("ascii") # encoding to ASCII for Python 3 +_null2 = _null * 2 +_null3 = _null * 3 + + +def guess_json_utf(data): + """ + :rtype: str + """ + # JSON always starts with two ASCII characters, so detection is as + # easy as counting the nulls and from their location and count + # determine the encoding. Also detect a BOM, if present. + sample = data[:4] + if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): + return "utf-32" # BOM included + if sample[:3] == codecs.BOM_UTF8: + return "utf-8-sig" # BOM included, MS style (discouraged) + if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): + return "utf-16" # BOM included + nullcount = sample.count(_null) + if nullcount == 0: + return "utf-8" + if nullcount == 2: + if sample[::2] == _null2: # 1st and 3rd are null + return "utf-16-be" + if sample[1::2] == _null2: # 2nd and 4th are null + return "utf-16-le" + # Did not detect 2 valid UTF-16 ascii-range characters + if nullcount == 3: + if sample[:3] == _null3: + return "utf-32-be" + if sample[1:] == _null3: + return "utf-32-le" + # Did not detect a valid UTF-32 ascii-range character + return None + + +def prepend_scheme_if_needed(url, new_scheme): + """Given a URL that may or may not have a scheme, prepend the given scheme. + Does not replace a present scheme with the one provided as an argument. + + :rtype: str + """ + parsed = parse_url(url) + scheme, auth, host, port, path, query, fragment = parsed + + # A defect in urlparse determines that there isn't a netloc present in some + # urls. We previously assumed parsing was overly cautious, and swapped the + # netloc and path. Due to a lack of tests on the original defect, this is + # maintained with parse_url for backwards compatibility. + netloc = parsed.netloc + if not netloc: + netloc, path = path, netloc + + if auth: + # parse_url doesn't provide the netloc with auth + # so we'll add it ourselves. + netloc = "@".join([auth, netloc]) + if scheme is None: + scheme = new_scheme + if path is None: + path = "" + + return urlunparse((scheme, netloc, path, "", query, fragment)) + + +def get_auth_from_url(url): + """Given a url with authentication components, extract them into a tuple of + username,password. + + :rtype: (str,str) + """ + parsed = urlparse(url) + + try: + auth = (unquote(parsed.username), unquote(parsed.password)) + except (AttributeError, TypeError): + auth = ("", "") + + return auth + + +def check_header_validity(header): + """Verifies that header parts don't contain leading whitespace + reserved characters, or return characters. + + :param header: tuple, in the format (name, value). + """ + name, value = header + _validate_header_part(header, name, 0) + _validate_header_part(header, value, 1) + + +def _validate_header_part(header, header_part, header_validator_index): + if isinstance(header_part, str): + validator = _HEADER_VALIDATORS_STR[header_validator_index] + elif isinstance(header_part, bytes): + validator = _HEADER_VALIDATORS_BYTE[header_validator_index] + else: + raise InvalidHeader( + f"Header part ({header_part!r}) from {header} " + f"must be of type str or bytes, not {type(header_part)}" + ) + + if not validator.match(header_part): + header_kind = "name" if header_validator_index == 0 else "value" + raise InvalidHeader( + f"Invalid leading whitespace, reserved character(s), or return" + f"character(s) in header {header_kind}: {header_part!r}" + ) + + +def urldefragauth(url): + """ + Given a url remove the fragment and the authentication part. + + :rtype: str + """ + scheme, netloc, path, params, query, fragment = urlparse(url) + + # see func:`prepend_scheme_if_needed` + if not netloc: + netloc, path = path, netloc + + netloc = netloc.rsplit("@", 1)[-1] + + return urlunparse((scheme, netloc, path, params, query, "")) + + +def rewind_body(prepared_request): + """Move file pointer back to its recorded starting position + so it can be read again on redirect. + """ + body_seek = getattr(prepared_request.body, "seek", None) + if body_seek is not None and isinstance( + prepared_request._body_position, integer_types + ): + try: + body_seek(prepared_request._body_position) + except OSError: + raise UnrewindableBodyError( + "An error occurred when rewinding request body for redirect." + ) + else: + raise UnrewindableBodyError("Unable to rewind request body for redirect.") diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__init__.py new file mode 100644 index 0000000..d92acc7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__init__.py @@ -0,0 +1,26 @@ +__all__ = [ + "__version__", + "AbstractProvider", + "AbstractResolver", + "BaseReporter", + "InconsistentCandidate", + "Resolver", + "RequirementsConflicted", + "ResolutionError", + "ResolutionImpossible", + "ResolutionTooDeep", +] + +__version__ = "1.0.1" + + +from .providers import AbstractProvider, AbstractResolver +from .reporters import BaseReporter +from .resolvers import ( + InconsistentCandidate, + RequirementsConflicted, + ResolutionError, + ResolutionImpossible, + ResolutionTooDeep, + Resolver, +) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/compat/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/compat/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py b/.venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py new file mode 100644 index 0000000..1becc50 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py @@ -0,0 +1,6 @@ +__all__ = ["Mapping", "Sequence"] + +try: + from collections.abc import Mapping, Sequence +except ImportError: + from collections import Mapping, Sequence diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/providers.py b/.venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/providers.py new file mode 100644 index 0000000..e99d87e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/providers.py @@ -0,0 +1,133 @@ +class AbstractProvider(object): + """Delegate class to provide the required interface for the resolver.""" + + def identify(self, requirement_or_candidate): + """Given a requirement, return an identifier for it. + + This is used to identify a requirement, e.g. whether two requirements + should have their specifier parts merged. + """ + raise NotImplementedError + + def get_preference( + self, + identifier, + resolutions, + candidates, + information, + backtrack_causes, + ): + """Produce a sort key for given requirement based on preference. + + The preference is defined as "I think this requirement should be + resolved first". The lower the return value is, the more preferred + this group of arguments is. + + :param identifier: An identifier as returned by ``identify()``. This + identifies the dependency matches which should be returned. + :param resolutions: Mapping of candidates currently pinned by the + resolver. Each key is an identifier, and the value is a candidate. + The candidate may conflict with requirements from ``information``. + :param candidates: Mapping of each dependency's possible candidates. + Each value is an iterator of candidates. + :param information: Mapping of requirement information of each package. + Each value is an iterator of *requirement information*. + :param backtrack_causes: Sequence of requirement information that were + the requirements that caused the resolver to most recently backtrack. + + A *requirement information* instance is a named tuple with two members: + + * ``requirement`` specifies a requirement contributing to the current + list of candidates. + * ``parent`` specifies the candidate that provides (depended on) the + requirement, or ``None`` to indicate a root requirement. + + The preference could depend on various issues, including (not + necessarily in this order): + + * Is this package pinned in the current resolution result? + * How relaxed is the requirement? Stricter ones should probably be + worked on first? (I don't know, actually.) + * How many possibilities are there to satisfy this requirement? Those + with few left should likely be worked on first, I guess? + * Are there any known conflicts for this requirement? We should + probably work on those with the most known conflicts. + + A sortable value should be returned (this will be used as the ``key`` + parameter of the built-in sorting function). The smaller the value is, + the more preferred this requirement is (i.e. the sorting function + is called with ``reverse=False``). + """ + raise NotImplementedError + + def find_matches(self, identifier, requirements, incompatibilities): + """Find all possible candidates that satisfy the given constraints. + + :param identifier: An identifier as returned by ``identify()``. This + identifies the dependency matches of which should be returned. + :param requirements: A mapping of requirements that all returned + candidates must satisfy. Each key is an identifier, and the value + an iterator of requirements for that dependency. + :param incompatibilities: A mapping of known incompatibilities of + each dependency. Each key is an identifier, and the value an + iterator of incompatibilities known to the resolver. All + incompatibilities *must* be excluded from the return value. + + This should try to get candidates based on the requirements' types. + For VCS, local, and archive requirements, the one-and-only match is + returned, and for a "named" requirement, the index(es) should be + consulted to find concrete candidates for this requirement. + + The return value should produce candidates ordered by preference; the + most preferred candidate should come first. The return type may be one + of the following: + + * A callable that returns an iterator that yields candidates. + * An collection of candidates. + * An iterable of candidates. This will be consumed immediately into a + list of candidates. + """ + raise NotImplementedError + + def is_satisfied_by(self, requirement, candidate): + """Whether the given requirement can be satisfied by a candidate. + + The candidate is guaranteed to have been generated from the + requirement. + + A boolean should be returned to indicate whether ``candidate`` is a + viable solution to the requirement. + """ + raise NotImplementedError + + def get_dependencies(self, candidate): + """Get dependencies of a candidate. + + This should return a collection of requirements that `candidate` + specifies as its dependencies. + """ + raise NotImplementedError + + +class AbstractResolver(object): + """The thing that performs the actual resolution work.""" + + base_exception = Exception + + def __init__(self, provider, reporter): + self.provider = provider + self.reporter = reporter + + def resolve(self, requirements, **kwargs): + """Take a collection of constraints, spit out the resolution result. + + This returns a representation of the final resolution state, with one + guarenteed attribute ``mapping`` that contains resolved candidates as + values. The keys are their respective identifiers. + + :param requirements: A collection of constraints. + :param kwargs: Additional keyword arguments that subclasses may accept. + + :raises: ``self.base_exception`` or its subclass. + """ + raise NotImplementedError diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/reporters.py b/.venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/reporters.py new file mode 100644 index 0000000..688b5e1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/reporters.py @@ -0,0 +1,43 @@ +class BaseReporter(object): + """Delegate class to provider progress reporting for the resolver.""" + + def starting(self): + """Called before the resolution actually starts.""" + + def starting_round(self, index): + """Called before each round of resolution starts. + + The index is zero-based. + """ + + def ending_round(self, index, state): + """Called before each round of resolution ends. + + This is NOT called if the resolution ends at this round. Use `ending` + if you want to report finalization. The index is zero-based. + """ + + def ending(self, state): + """Called before the resolution ends successfully.""" + + def adding_requirement(self, requirement, parent): + """Called when adding a new requirement into the resolve criteria. + + :param requirement: The additional requirement to be applied to filter + the available candidaites. + :param parent: The candidate that requires ``requirement`` as a + dependency, or None if ``requirement`` is one of the root + requirements passed in from ``Resolver.resolve()``. + """ + + def resolving_conflicts(self, causes): + """Called when starting to attempt requirement conflict resolution. + + :param causes: The information on the collision that caused the backtracking. + """ + + def rejecting_candidate(self, criterion, candidate): + """Called when rejecting a candidate during backtracking.""" + + def pinning(self, candidate): + """Called when adding a candidate to the potential solution.""" diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers.py b/.venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers.py new file mode 100644 index 0000000..2c3d0e3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers.py @@ -0,0 +1,547 @@ +import collections +import itertools +import operator + +from .providers import AbstractResolver +from .structs import DirectedGraph, IteratorMapping, build_iter_view + +RequirementInformation = collections.namedtuple( + "RequirementInformation", ["requirement", "parent"] +) + + +class ResolverException(Exception): + """A base class for all exceptions raised by this module. + + Exceptions derived by this class should all be handled in this module. Any + bubbling pass the resolver should be treated as a bug. + """ + + +class RequirementsConflicted(ResolverException): + def __init__(self, criterion): + super(RequirementsConflicted, self).__init__(criterion) + self.criterion = criterion + + def __str__(self): + return "Requirements conflict: {}".format( + ", ".join(repr(r) for r in self.criterion.iter_requirement()), + ) + + +class InconsistentCandidate(ResolverException): + def __init__(self, candidate, criterion): + super(InconsistentCandidate, self).__init__(candidate, criterion) + self.candidate = candidate + self.criterion = criterion + + def __str__(self): + return "Provided candidate {!r} does not satisfy {}".format( + self.candidate, + ", ".join(repr(r) for r in self.criterion.iter_requirement()), + ) + + +class Criterion(object): + """Representation of possible resolution results of a package. + + This holds three attributes: + + * `information` is a collection of `RequirementInformation` pairs. + Each pair is a requirement contributing to this criterion, and the + candidate that provides the requirement. + * `incompatibilities` is a collection of all known not-to-work candidates + to exclude from consideration. + * `candidates` is a collection containing all possible candidates deducted + from the union of contributing requirements and known incompatibilities. + It should never be empty, except when the criterion is an attribute of a + raised `RequirementsConflicted` (in which case it is always empty). + + .. note:: + This class is intended to be externally immutable. **Do not** mutate + any of its attribute containers. + """ + + def __init__(self, candidates, information, incompatibilities): + self.candidates = candidates + self.information = information + self.incompatibilities = incompatibilities + + def __repr__(self): + requirements = ", ".join( + "({!r}, via={!r})".format(req, parent) + for req, parent in self.information + ) + return "Criterion({})".format(requirements) + + def iter_requirement(self): + return (i.requirement for i in self.information) + + def iter_parent(self): + return (i.parent for i in self.information) + + +class ResolutionError(ResolverException): + pass + + +class ResolutionImpossible(ResolutionError): + def __init__(self, causes): + super(ResolutionImpossible, self).__init__(causes) + # causes is a list of RequirementInformation objects + self.causes = causes + + +class ResolutionTooDeep(ResolutionError): + def __init__(self, round_count): + super(ResolutionTooDeep, self).__init__(round_count) + self.round_count = round_count + + +# Resolution state in a round. +State = collections.namedtuple("State", "mapping criteria backtrack_causes") + + +class Resolution(object): + """Stateful resolution object. + + This is designed as a one-off object that holds information to kick start + the resolution process, and holds the results afterwards. + """ + + def __init__(self, provider, reporter): + self._p = provider + self._r = reporter + self._states = [] + + @property + def state(self): + try: + return self._states[-1] + except IndexError: + raise AttributeError("state") + + def _push_new_state(self): + """Push a new state into history. + + This new state will be used to hold resolution results of the next + coming round. + """ + base = self._states[-1] + state = State( + mapping=base.mapping.copy(), + criteria=base.criteria.copy(), + backtrack_causes=base.backtrack_causes[:], + ) + self._states.append(state) + + def _add_to_criteria(self, criteria, requirement, parent): + self._r.adding_requirement(requirement=requirement, parent=parent) + + identifier = self._p.identify(requirement_or_candidate=requirement) + criterion = criteria.get(identifier) + if criterion: + incompatibilities = list(criterion.incompatibilities) + else: + incompatibilities = [] + + matches = self._p.find_matches( + identifier=identifier, + requirements=IteratorMapping( + criteria, + operator.methodcaller("iter_requirement"), + {identifier: [requirement]}, + ), + incompatibilities=IteratorMapping( + criteria, + operator.attrgetter("incompatibilities"), + {identifier: incompatibilities}, + ), + ) + + if criterion: + information = list(criterion.information) + information.append(RequirementInformation(requirement, parent)) + else: + information = [RequirementInformation(requirement, parent)] + + criterion = Criterion( + candidates=build_iter_view(matches), + information=information, + incompatibilities=incompatibilities, + ) + if not criterion.candidates: + raise RequirementsConflicted(criterion) + criteria[identifier] = criterion + + def _remove_information_from_criteria(self, criteria, parents): + """Remove information from parents of criteria. + + Concretely, removes all values from each criterion's ``information`` + field that have one of ``parents`` as provider of the requirement. + + :param criteria: The criteria to update. + :param parents: Identifiers for which to remove information from all criteria. + """ + if not parents: + return + for key, criterion in criteria.items(): + criteria[key] = Criterion( + criterion.candidates, + [ + information + for information in criterion.information + if ( + information.parent is None + or self._p.identify(information.parent) not in parents + ) + ], + criterion.incompatibilities, + ) + + def _get_preference(self, name): + return self._p.get_preference( + identifier=name, + resolutions=self.state.mapping, + candidates=IteratorMapping( + self.state.criteria, + operator.attrgetter("candidates"), + ), + information=IteratorMapping( + self.state.criteria, + operator.attrgetter("information"), + ), + backtrack_causes=self.state.backtrack_causes, + ) + + def _is_current_pin_satisfying(self, name, criterion): + try: + current_pin = self.state.mapping[name] + except KeyError: + return False + return all( + self._p.is_satisfied_by(requirement=r, candidate=current_pin) + for r in criterion.iter_requirement() + ) + + def _get_updated_criteria(self, candidate): + criteria = self.state.criteria.copy() + for requirement in self._p.get_dependencies(candidate=candidate): + self._add_to_criteria(criteria, requirement, parent=candidate) + return criteria + + def _attempt_to_pin_criterion(self, name): + criterion = self.state.criteria[name] + + causes = [] + for candidate in criterion.candidates: + try: + criteria = self._get_updated_criteria(candidate) + except RequirementsConflicted as e: + self._r.rejecting_candidate(e.criterion, candidate) + causes.append(e.criterion) + continue + + # Check the newly-pinned candidate actually works. This should + # always pass under normal circumstances, but in the case of a + # faulty provider, we will raise an error to notify the implementer + # to fix find_matches() and/or is_satisfied_by(). + satisfied = all( + self._p.is_satisfied_by(requirement=r, candidate=candidate) + for r in criterion.iter_requirement() + ) + if not satisfied: + raise InconsistentCandidate(candidate, criterion) + + self._r.pinning(candidate=candidate) + self.state.criteria.update(criteria) + + # Put newly-pinned candidate at the end. This is essential because + # backtracking looks at this mapping to get the last pin. + self.state.mapping.pop(name, None) + self.state.mapping[name] = candidate + + return [] + + # All candidates tried, nothing works. This criterion is a dead + # end, signal for backtracking. + return causes + + def _backjump(self, causes): + """Perform backjumping. + + When we enter here, the stack is like this:: + + [ state Z ] + [ state Y ] + [ state X ] + .... earlier states are irrelevant. + + 1. No pins worked for Z, so it does not have a pin. + 2. We want to reset state Y to unpinned, and pin another candidate. + 3. State X holds what state Y was before the pin, but does not + have the incompatibility information gathered in state Y. + + Each iteration of the loop will: + + 1. Identify Z. The incompatibility is not always caused by the latest + state. For example, given three requirements A, B and C, with + dependencies A1, B1 and C1, where A1 and B1 are incompatible: the + last state might be related to C, so we want to discard the + previous state. + 2. Discard Z. + 3. Discard Y but remember its incompatibility information gathered + previously, and the failure we're dealing with right now. + 4. Push a new state Y' based on X, and apply the incompatibility + information from Y to Y'. + 5a. If this causes Y' to conflict, we need to backtrack again. Make Y' + the new Z and go back to step 2. + 5b. If the incompatibilities apply cleanly, end backtracking. + """ + incompatible_reqs = itertools.chain( + (c.parent for c in causes if c.parent is not None), + (c.requirement for c in causes), + ) + incompatible_deps = {self._p.identify(r) for r in incompatible_reqs} + while len(self._states) >= 3: + # Remove the state that triggered backtracking. + del self._states[-1] + + # Ensure to backtrack to a state that caused the incompatibility + incompatible_state = False + while not incompatible_state: + # Retrieve the last candidate pin and known incompatibilities. + try: + broken_state = self._states.pop() + name, candidate = broken_state.mapping.popitem() + except (IndexError, KeyError): + raise ResolutionImpossible(causes) + current_dependencies = { + self._p.identify(d) + for d in self._p.get_dependencies(candidate) + } + incompatible_state = not current_dependencies.isdisjoint( + incompatible_deps + ) + + incompatibilities_from_broken = [ + (k, list(v.incompatibilities)) + for k, v in broken_state.criteria.items() + ] + + # Also mark the newly known incompatibility. + incompatibilities_from_broken.append((name, [candidate])) + + # Create a new state from the last known-to-work one, and apply + # the previously gathered incompatibility information. + def _patch_criteria(): + for k, incompatibilities in incompatibilities_from_broken: + if not incompatibilities: + continue + try: + criterion = self.state.criteria[k] + except KeyError: + continue + matches = self._p.find_matches( + identifier=k, + requirements=IteratorMapping( + self.state.criteria, + operator.methodcaller("iter_requirement"), + ), + incompatibilities=IteratorMapping( + self.state.criteria, + operator.attrgetter("incompatibilities"), + {k: incompatibilities}, + ), + ) + candidates = build_iter_view(matches) + if not candidates: + return False + incompatibilities.extend(criterion.incompatibilities) + self.state.criteria[k] = Criterion( + candidates=candidates, + information=list(criterion.information), + incompatibilities=incompatibilities, + ) + return True + + self._push_new_state() + success = _patch_criteria() + + # It works! Let's work on this new state. + if success: + return True + + # State does not work after applying known incompatibilities. + # Try the still previous state. + + # No way to backtrack anymore. + return False + + def resolve(self, requirements, max_rounds): + if self._states: + raise RuntimeError("already resolved") + + self._r.starting() + + # Initialize the root state. + self._states = [ + State( + mapping=collections.OrderedDict(), + criteria={}, + backtrack_causes=[], + ) + ] + for r in requirements: + try: + self._add_to_criteria(self.state.criteria, r, parent=None) + except RequirementsConflicted as e: + raise ResolutionImpossible(e.criterion.information) + + # The root state is saved as a sentinel so the first ever pin can have + # something to backtrack to if it fails. The root state is basically + # pinning the virtual "root" package in the graph. + self._push_new_state() + + for round_index in range(max_rounds): + self._r.starting_round(index=round_index) + + unsatisfied_names = [ + key + for key, criterion in self.state.criteria.items() + if not self._is_current_pin_satisfying(key, criterion) + ] + + # All criteria are accounted for. Nothing more to pin, we are done! + if not unsatisfied_names: + self._r.ending(state=self.state) + return self.state + + # keep track of satisfied names to calculate diff after pinning + satisfied_names = set(self.state.criteria.keys()) - set( + unsatisfied_names + ) + + # Choose the most preferred unpinned criterion to try. + name = min(unsatisfied_names, key=self._get_preference) + failure_causes = self._attempt_to_pin_criterion(name) + + if failure_causes: + causes = [i for c in failure_causes for i in c.information] + # Backjump if pinning fails. The backjump process puts us in + # an unpinned state, so we can work on it in the next round. + self._r.resolving_conflicts(causes=causes) + success = self._backjump(causes) + self.state.backtrack_causes[:] = causes + + # Dead ends everywhere. Give up. + if not success: + raise ResolutionImpossible(self.state.backtrack_causes) + else: + # discard as information sources any invalidated names + # (unsatisfied names that were previously satisfied) + newly_unsatisfied_names = { + key + for key, criterion in self.state.criteria.items() + if key in satisfied_names + and not self._is_current_pin_satisfying(key, criterion) + } + self._remove_information_from_criteria( + self.state.criteria, newly_unsatisfied_names + ) + # Pinning was successful. Push a new state to do another pin. + self._push_new_state() + + self._r.ending_round(index=round_index, state=self.state) + + raise ResolutionTooDeep(max_rounds) + + +def _has_route_to_root(criteria, key, all_keys, connected): + if key in connected: + return True + if key not in criteria: + return False + for p in criteria[key].iter_parent(): + try: + pkey = all_keys[id(p)] + except KeyError: + continue + if pkey in connected: + connected.add(key) + return True + if _has_route_to_root(criteria, pkey, all_keys, connected): + connected.add(key) + return True + return False + + +Result = collections.namedtuple("Result", "mapping graph criteria") + + +def _build_result(state): + mapping = state.mapping + all_keys = {id(v): k for k, v in mapping.items()} + all_keys[id(None)] = None + + graph = DirectedGraph() + graph.add(None) # Sentinel as root dependencies' parent. + + connected = {None} + for key, criterion in state.criteria.items(): + if not _has_route_to_root(state.criteria, key, all_keys, connected): + continue + if key not in graph: + graph.add(key) + for p in criterion.iter_parent(): + try: + pkey = all_keys[id(p)] + except KeyError: + continue + if pkey not in graph: + graph.add(pkey) + graph.connect(pkey, key) + + return Result( + mapping={k: v for k, v in mapping.items() if k in connected}, + graph=graph, + criteria=state.criteria, + ) + + +class Resolver(AbstractResolver): + """The thing that performs the actual resolution work.""" + + base_exception = ResolverException + + def resolve(self, requirements, max_rounds=100): + """Take a collection of constraints, spit out the resolution result. + + The return value is a representation to the final resolution result. It + is a tuple subclass with three public members: + + * `mapping`: A dict of resolved candidates. Each key is an identifier + of a requirement (as returned by the provider's `identify` method), + and the value is the resolved candidate. + * `graph`: A `DirectedGraph` instance representing the dependency tree. + The vertices are keys of `mapping`, and each edge represents *why* + a particular package is included. A special vertex `None` is + included to represent parents of user-supplied requirements. + * `criteria`: A dict of "criteria" that hold detailed information on + how edges in the graph are derived. Each key is an identifier of a + requirement, and the value is a `Criterion` instance. + + The following exceptions may be raised if a resolution cannot be found: + + * `ResolutionImpossible`: A resolution cannot be found for the given + combination of requirements. The `causes` attribute of the + exception is a list of (requirement, parent), giving the + requirements that could not be satisfied. + * `ResolutionTooDeep`: The dependency tree is too deeply nested and + the resolver gave up. This is usually caused by a circular + dependency, but you can try to resolve this by increasing the + `max_rounds` argument. + """ + resolution = Resolution(self.provider, self.reporter) + state = resolution.resolve(requirements, max_rounds=max_rounds) + return _build_result(state) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/structs.py b/.venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/structs.py new file mode 100644 index 0000000..359a34f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/structs.py @@ -0,0 +1,170 @@ +import itertools + +from .compat import collections_abc + + +class DirectedGraph(object): + """A graph structure with directed edges.""" + + def __init__(self): + self._vertices = set() + self._forwards = {} # -> Set[] + self._backwards = {} # -> Set[] + + def __iter__(self): + return iter(self._vertices) + + def __len__(self): + return len(self._vertices) + + def __contains__(self, key): + return key in self._vertices + + def copy(self): + """Return a shallow copy of this graph.""" + other = DirectedGraph() + other._vertices = set(self._vertices) + other._forwards = {k: set(v) for k, v in self._forwards.items()} + other._backwards = {k: set(v) for k, v in self._backwards.items()} + return other + + def add(self, key): + """Add a new vertex to the graph.""" + if key in self._vertices: + raise ValueError("vertex exists") + self._vertices.add(key) + self._forwards[key] = set() + self._backwards[key] = set() + + def remove(self, key): + """Remove a vertex from the graph, disconnecting all edges from/to it.""" + self._vertices.remove(key) + for f in self._forwards.pop(key): + self._backwards[f].remove(key) + for t in self._backwards.pop(key): + self._forwards[t].remove(key) + + def connected(self, f, t): + return f in self._backwards[t] and t in self._forwards[f] + + def connect(self, f, t): + """Connect two existing vertices. + + Nothing happens if the vertices are already connected. + """ + if t not in self._vertices: + raise KeyError(t) + self._forwards[f].add(t) + self._backwards[t].add(f) + + def iter_edges(self): + for f, children in self._forwards.items(): + for t in children: + yield f, t + + def iter_children(self, key): + return iter(self._forwards[key]) + + def iter_parents(self, key): + return iter(self._backwards[key]) + + +class IteratorMapping(collections_abc.Mapping): + def __init__(self, mapping, accessor, appends=None): + self._mapping = mapping + self._accessor = accessor + self._appends = appends or {} + + def __repr__(self): + return "IteratorMapping({!r}, {!r}, {!r})".format( + self._mapping, + self._accessor, + self._appends, + ) + + def __bool__(self): + return bool(self._mapping or self._appends) + + __nonzero__ = __bool__ # XXX: Python 2. + + def __contains__(self, key): + return key in self._mapping or key in self._appends + + def __getitem__(self, k): + try: + v = self._mapping[k] + except KeyError: + return iter(self._appends[k]) + return itertools.chain(self._accessor(v), self._appends.get(k, ())) + + def __iter__(self): + more = (k for k in self._appends if k not in self._mapping) + return itertools.chain(self._mapping, more) + + def __len__(self): + more = sum(1 for k in self._appends if k not in self._mapping) + return len(self._mapping) + more + + +class _FactoryIterableView(object): + """Wrap an iterator factory returned by `find_matches()`. + + Calling `iter()` on this class would invoke the underlying iterator + factory, making it a "collection with ordering" that can be iterated + through multiple times, but lacks random access methods presented in + built-in Python sequence types. + """ + + def __init__(self, factory): + self._factory = factory + self._iterable = None + + def __repr__(self): + return "{}({})".format(type(self).__name__, list(self)) + + def __bool__(self): + try: + next(iter(self)) + except StopIteration: + return False + return True + + __nonzero__ = __bool__ # XXX: Python 2. + + def __iter__(self): + iterable = ( + self._factory() if self._iterable is None else self._iterable + ) + self._iterable, current = itertools.tee(iterable) + return current + + +class _SequenceIterableView(object): + """Wrap an iterable returned by find_matches(). + + This is essentially just a proxy to the underlying sequence that provides + the same interface as `_FactoryIterableView`. + """ + + def __init__(self, sequence): + self._sequence = sequence + + def __repr__(self): + return "{}({})".format(type(self).__name__, self._sequence) + + def __bool__(self): + return bool(self._sequence) + + __nonzero__ = __bool__ # XXX: Python 2. + + def __iter__(self): + return iter(self._sequence) + + +def build_iter_view(matches): + """Build an iterable view from the value returned by `find_matches()`.""" + if callable(matches): + return _FactoryIterableView(matches) + if not isinstance(matches, collections_abc.Sequence): + matches = list(matches) + return _SequenceIterableView(matches) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/__init__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/__init__.py new file mode 100644 index 0000000..73f58d7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/__init__.py @@ -0,0 +1,177 @@ +"""Rich text and beautiful formatting in the terminal.""" + +import os +from typing import IO, TYPE_CHECKING, Any, Callable, Optional, Union + +from ._extension import load_ipython_extension # noqa: F401 + +__all__ = ["get_console", "reconfigure", "print", "inspect", "print_json"] + +if TYPE_CHECKING: + from .console import Console + +# Global console used by alternative print +_console: Optional["Console"] = None + +try: + _IMPORT_CWD = os.path.abspath(os.getcwd()) +except FileNotFoundError: + # Can happen if the cwd has been deleted + _IMPORT_CWD = "" + + +def get_console() -> "Console": + """Get a global :class:`~rich.console.Console` instance. This function is used when Rich requires a Console, + and hasn't been explicitly given one. + + Returns: + Console: A console instance. + """ + global _console + if _console is None: + from .console import Console + + _console = Console() + + return _console + + +def reconfigure(*args: Any, **kwargs: Any) -> None: + """Reconfigures the global console by replacing it with another. + + Args: + *args (Any): Positional arguments for the replacement :class:`~rich.console.Console`. + **kwargs (Any): Keyword arguments for the replacement :class:`~rich.console.Console`. + """ + from pip._vendor.rich.console import Console + + new_console = Console(*args, **kwargs) + _console = get_console() + _console.__dict__ = new_console.__dict__ + + +def print( + *objects: Any, + sep: str = " ", + end: str = "\n", + file: Optional[IO[str]] = None, + flush: bool = False, +) -> None: + r"""Print object(s) supplied via positional arguments. + This function has an identical signature to the built-in print. + For more advanced features, see the :class:`~rich.console.Console` class. + + Args: + sep (str, optional): Separator between printed objects. Defaults to " ". + end (str, optional): Character to write at end of output. Defaults to "\\n". + file (IO[str], optional): File to write to, or None for stdout. Defaults to None. + flush (bool, optional): Has no effect as Rich always flushes output. Defaults to False. + + """ + from .console import Console + + write_console = get_console() if file is None else Console(file=file) + return write_console.print(*objects, sep=sep, end=end) + + +def print_json( + json: Optional[str] = None, + *, + data: Any = None, + indent: Union[None, int, str] = 2, + highlight: bool = True, + skip_keys: bool = False, + ensure_ascii: bool = False, + check_circular: bool = True, + allow_nan: bool = True, + default: Optional[Callable[[Any], Any]] = None, + sort_keys: bool = False, +) -> None: + """Pretty prints JSON. Output will be valid JSON. + + Args: + json (str): A string containing JSON. + data (Any): If json is not supplied, then encode this data. + indent (int, optional): Number of spaces to indent. Defaults to 2. + highlight (bool, optional): Enable highlighting of output: Defaults to True. + skip_keys (bool, optional): Skip keys not of a basic type. Defaults to False. + ensure_ascii (bool, optional): Escape all non-ascii characters. Defaults to False. + check_circular (bool, optional): Check for circular references. Defaults to True. + allow_nan (bool, optional): Allow NaN and Infinity values. Defaults to True. + default (Callable, optional): A callable that converts values that can not be encoded + in to something that can be JSON encoded. Defaults to None. + sort_keys (bool, optional): Sort dictionary keys. Defaults to False. + """ + + get_console().print_json( + json, + data=data, + indent=indent, + highlight=highlight, + skip_keys=skip_keys, + ensure_ascii=ensure_ascii, + check_circular=check_circular, + allow_nan=allow_nan, + default=default, + sort_keys=sort_keys, + ) + + +def inspect( + obj: Any, + *, + console: Optional["Console"] = None, + title: Optional[str] = None, + help: bool = False, + methods: bool = False, + docs: bool = True, + private: bool = False, + dunder: bool = False, + sort: bool = True, + all: bool = False, + value: bool = True, +) -> None: + """Inspect any Python object. + + * inspect() to see summarized info. + * inspect(, methods=True) to see methods. + * inspect(, help=True) to see full (non-abbreviated) help. + * inspect(, private=True) to see private attributes (single underscore). + * inspect(, dunder=True) to see attributes beginning with double underscore. + * inspect(, all=True) to see all attributes. + + Args: + obj (Any): An object to inspect. + title (str, optional): Title to display over inspect result, or None use type. Defaults to None. + help (bool, optional): Show full help text rather than just first paragraph. Defaults to False. + methods (bool, optional): Enable inspection of callables. Defaults to False. + docs (bool, optional): Also render doc strings. Defaults to True. + private (bool, optional): Show private attributes (beginning with underscore). Defaults to False. + dunder (bool, optional): Show attributes starting with double underscore. Defaults to False. + sort (bool, optional): Sort attributes alphabetically. Defaults to True. + all (bool, optional): Show all attributes. Defaults to False. + value (bool, optional): Pretty print value. Defaults to True. + """ + _console = console or get_console() + from pip._vendor.rich._inspect import Inspect + + # Special case for inspect(inspect) + is_inspect = obj is inspect + + _inspect = Inspect( + obj, + title=title, + help=is_inspect or help, + methods=is_inspect or methods, + docs=is_inspect or docs, + private=private, + dunder=dunder, + sort=sort, + all=all, + value=value, + ) + _console.print(_inspect) + + +if __name__ == "__main__": # pragma: no cover + print("Hello, **World**") diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/__main__.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/__main__.py new file mode 100644 index 0000000..270629f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/__main__.py @@ -0,0 +1,274 @@ +import colorsys +import io +from time import process_time + +from pip._vendor.rich import box +from pip._vendor.rich.color import Color +from pip._vendor.rich.console import Console, ConsoleOptions, Group, RenderableType, RenderResult +from pip._vendor.rich.markdown import Markdown +from pip._vendor.rich.measure import Measurement +from pip._vendor.rich.pretty import Pretty +from pip._vendor.rich.segment import Segment +from pip._vendor.rich.style import Style +from pip._vendor.rich.syntax import Syntax +from pip._vendor.rich.table import Table +from pip._vendor.rich.text import Text + + +class ColorBox: + def __rich_console__( + self, console: Console, options: ConsoleOptions + ) -> RenderResult: + for y in range(0, 5): + for x in range(options.max_width): + h = x / options.max_width + l = 0.1 + ((y / 5) * 0.7) + r1, g1, b1 = colorsys.hls_to_rgb(h, l, 1.0) + r2, g2, b2 = colorsys.hls_to_rgb(h, l + 0.7 / 10, 1.0) + bgcolor = Color.from_rgb(r1 * 255, g1 * 255, b1 * 255) + color = Color.from_rgb(r2 * 255, g2 * 255, b2 * 255) + yield Segment("▄", Style(color=color, bgcolor=bgcolor)) + yield Segment.line() + + def __rich_measure__( + self, console: "Console", options: ConsoleOptions + ) -> Measurement: + return Measurement(1, options.max_width) + + +def make_test_card() -> Table: + """Get a renderable that demonstrates a number of features.""" + table = Table.grid(padding=1, pad_edge=True) + table.title = "Rich features" + table.add_column("Feature", no_wrap=True, justify="center", style="bold red") + table.add_column("Demonstration") + + color_table = Table( + box=None, + expand=False, + show_header=False, + show_edge=False, + pad_edge=False, + ) + color_table.add_row( + ( + "✓ [bold green]4-bit color[/]\n" + "✓ [bold blue]8-bit color[/]\n" + "✓ [bold magenta]Truecolor (16.7 million)[/]\n" + "✓ [bold yellow]Dumb terminals[/]\n" + "✓ [bold cyan]Automatic color conversion" + ), + ColorBox(), + ) + + table.add_row("Colors", color_table) + + table.add_row( + "Styles", + "All ansi styles: [bold]bold[/], [dim]dim[/], [italic]italic[/italic], [underline]underline[/], [strike]strikethrough[/], [reverse]reverse[/], and even [blink]blink[/].", + ) + + lorem = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Quisque in metus sed sapien ultricies pretium a at justo. Maecenas luctus velit et auctor maximus." + lorem_table = Table.grid(padding=1, collapse_padding=True) + lorem_table.pad_edge = False + lorem_table.add_row( + Text(lorem, justify="left", style="green"), + Text(lorem, justify="center", style="yellow"), + Text(lorem, justify="right", style="blue"), + Text(lorem, justify="full", style="red"), + ) + table.add_row( + "Text", + Group( + Text.from_markup( + """Word wrap text. Justify [green]left[/], [yellow]center[/], [blue]right[/] or [red]full[/].\n""" + ), + lorem_table, + ), + ) + + def comparison(renderable1: RenderableType, renderable2: RenderableType) -> Table: + table = Table(show_header=False, pad_edge=False, box=None, expand=True) + table.add_column("1", ratio=1) + table.add_column("2", ratio=1) + table.add_row(renderable1, renderable2) + return table + + table.add_row( + "Asian\nlanguage\nsupport", + ":flag_for_china: 该库支持中文,日文和韩文文本!\n:flag_for_japan: ライブラリは中国語、日本語、韓国語のテキストをサポートしています\n:flag_for_south_korea: 이 라이브러리는 중국어, 일본어 및 한국어 텍스트를 지원합니다", + ) + + markup_example = ( + "[bold magenta]Rich[/] supports a simple [i]bbcode[/i]-like [b]markup[/b] for [yellow]color[/], [underline]style[/], and emoji! " + ":+1: :apple: :ant: :bear: :baguette_bread: :bus: " + ) + table.add_row("Markup", markup_example) + + example_table = Table( + show_edge=False, + show_header=True, + expand=False, + row_styles=["none", "dim"], + box=box.SIMPLE, + ) + example_table.add_column("[green]Date", style="green", no_wrap=True) + example_table.add_column("[blue]Title", style="blue") + example_table.add_column( + "[cyan]Production Budget", + style="cyan", + justify="right", + no_wrap=True, + ) + example_table.add_column( + "[magenta]Box Office", + style="magenta", + justify="right", + no_wrap=True, + ) + example_table.add_row( + "Dec 20, 2019", + "Star Wars: The Rise of Skywalker", + "$275,000,000", + "$375,126,118", + ) + example_table.add_row( + "May 25, 2018", + "[b]Solo[/]: A Star Wars Story", + "$275,000,000", + "$393,151,347", + ) + example_table.add_row( + "Dec 15, 2017", + "Star Wars Ep. VIII: The Last Jedi", + "$262,000,000", + "[bold]$1,332,539,889[/bold]", + ) + example_table.add_row( + "May 19, 1999", + "Star Wars Ep. [b]I[/b]: [i]The phantom Menace", + "$115,000,000", + "$1,027,044,677", + ) + + table.add_row("Tables", example_table) + + code = '''\ +def iter_last(values: Iterable[T]) -> Iterable[Tuple[bool, T]]: + """Iterate and generate a tuple with a flag for last value.""" + iter_values = iter(values) + try: + previous_value = next(iter_values) + except StopIteration: + return + for value in iter_values: + yield False, previous_value + previous_value = value + yield True, previous_value''' + + pretty_data = { + "foo": [ + 3.1427, + ( + "Paul Atreides", + "Vladimir Harkonnen", + "Thufir Hawat", + ), + ], + "atomic": (False, True, None), + } + table.add_row( + "Syntax\nhighlighting\n&\npretty\nprinting", + comparison( + Syntax(code, "python3", line_numbers=True, indent_guides=True), + Pretty(pretty_data, indent_guides=True), + ), + ) + + markdown_example = """\ +# Markdown + +Supports much of the *markdown* __syntax__! + +- Headers +- Basic formatting: **bold**, *italic*, `code` +- Block quotes +- Lists, and more... + """ + table.add_row( + "Markdown", comparison("[cyan]" + markdown_example, Markdown(markdown_example)) + ) + + table.add_row( + "+more!", + """Progress bars, columns, styled logging handler, tracebacks, etc...""", + ) + return table + + +if __name__ == "__main__": # pragma: no cover + + console = Console( + file=io.StringIO(), + force_terminal=True, + ) + test_card = make_test_card() + + # Print once to warm cache + start = process_time() + console.print(test_card) + pre_cache_taken = round((process_time() - start) * 1000.0, 1) + + console.file = io.StringIO() + + start = process_time() + console.print(test_card) + taken = round((process_time() - start) * 1000.0, 1) + + c = Console(record=True) + c.print(test_card) + + print(f"rendered in {pre_cache_taken}ms (cold cache)") + print(f"rendered in {taken}ms (warm cache)") + + from pip._vendor.rich.panel import Panel + + console = Console() + + sponsor_message = Table.grid(padding=1) + sponsor_message.add_column(style="green", justify="right") + sponsor_message.add_column(no_wrap=True) + + sponsor_message.add_row( + "Textualize", + "[u blue link=https://github.com/textualize]https://github.com/textualize", + ) + sponsor_message.add_row( + "Twitter", + "[u blue link=https://twitter.com/willmcgugan]https://twitter.com/willmcgugan", + ) + + intro_message = Text.from_markup( + """\ +We hope you enjoy using Rich! + +Rich is maintained with [red]:heart:[/] by [link=https://www.textualize.io]Textualize.io[/] + +- Will McGugan""" + ) + + message = Table.grid(padding=2) + message.add_column() + message.add_column(no_wrap=True) + message.add_row(intro_message, sponsor_message) + + console.print( + Panel.fit( + message, + box=box.ROUNDED, + padding=(1, 2), + title="[b red]Thanks for trying out Rich!", + border_style="bright_blue", + ), + justify="center", + ) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_cell_widths.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_cell_widths.py new file mode 100644 index 0000000..36286df --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_cell_widths.py @@ -0,0 +1,451 @@ +# Auto generated by make_terminal_widths.py + +CELL_WIDTHS = [ + (0, 0, 0), + (1, 31, -1), + (127, 159, -1), + (768, 879, 0), + (1155, 1161, 0), + (1425, 1469, 0), + (1471, 1471, 0), + (1473, 1474, 0), + (1476, 1477, 0), + (1479, 1479, 0), + (1552, 1562, 0), + (1611, 1631, 0), + (1648, 1648, 0), + (1750, 1756, 0), + (1759, 1764, 0), + (1767, 1768, 0), + (1770, 1773, 0), + (1809, 1809, 0), + (1840, 1866, 0), + (1958, 1968, 0), + (2027, 2035, 0), + (2045, 2045, 0), + (2070, 2073, 0), + (2075, 2083, 0), + (2085, 2087, 0), + (2089, 2093, 0), + (2137, 2139, 0), + (2259, 2273, 0), + (2275, 2306, 0), + (2362, 2362, 0), + (2364, 2364, 0), + (2369, 2376, 0), + (2381, 2381, 0), + (2385, 2391, 0), + (2402, 2403, 0), + (2433, 2433, 0), + (2492, 2492, 0), + (2497, 2500, 0), + (2509, 2509, 0), + (2530, 2531, 0), + (2558, 2558, 0), + (2561, 2562, 0), + (2620, 2620, 0), + (2625, 2626, 0), + (2631, 2632, 0), + (2635, 2637, 0), + (2641, 2641, 0), + (2672, 2673, 0), + (2677, 2677, 0), + (2689, 2690, 0), + (2748, 2748, 0), + (2753, 2757, 0), + (2759, 2760, 0), + (2765, 2765, 0), + (2786, 2787, 0), + (2810, 2815, 0), + (2817, 2817, 0), + (2876, 2876, 0), + (2879, 2879, 0), + (2881, 2884, 0), + (2893, 2893, 0), + (2901, 2902, 0), + (2914, 2915, 0), + (2946, 2946, 0), + (3008, 3008, 0), + (3021, 3021, 0), + (3072, 3072, 0), + (3076, 3076, 0), + (3134, 3136, 0), + (3142, 3144, 0), + (3146, 3149, 0), + (3157, 3158, 0), + (3170, 3171, 0), + (3201, 3201, 0), + (3260, 3260, 0), + (3263, 3263, 0), + (3270, 3270, 0), + (3276, 3277, 0), + (3298, 3299, 0), + (3328, 3329, 0), + (3387, 3388, 0), + (3393, 3396, 0), + (3405, 3405, 0), + (3426, 3427, 0), + (3457, 3457, 0), + (3530, 3530, 0), + (3538, 3540, 0), + (3542, 3542, 0), + (3633, 3633, 0), + (3636, 3642, 0), + (3655, 3662, 0), + (3761, 3761, 0), + (3764, 3772, 0), + (3784, 3789, 0), + (3864, 3865, 0), + (3893, 3893, 0), + (3895, 3895, 0), + (3897, 3897, 0), + (3953, 3966, 0), + (3968, 3972, 0), + (3974, 3975, 0), + (3981, 3991, 0), + (3993, 4028, 0), + (4038, 4038, 0), + (4141, 4144, 0), + (4146, 4151, 0), + (4153, 4154, 0), + (4157, 4158, 0), + (4184, 4185, 0), + (4190, 4192, 0), + (4209, 4212, 0), + (4226, 4226, 0), + (4229, 4230, 0), + (4237, 4237, 0), + (4253, 4253, 0), + (4352, 4447, 2), + (4957, 4959, 0), + (5906, 5908, 0), + (5938, 5940, 0), + (5970, 5971, 0), + (6002, 6003, 0), + (6068, 6069, 0), + (6071, 6077, 0), + (6086, 6086, 0), + (6089, 6099, 0), + (6109, 6109, 0), + (6155, 6157, 0), + (6277, 6278, 0), + (6313, 6313, 0), + (6432, 6434, 0), + (6439, 6440, 0), + (6450, 6450, 0), + (6457, 6459, 0), + (6679, 6680, 0), + (6683, 6683, 0), + (6742, 6742, 0), + (6744, 6750, 0), + (6752, 6752, 0), + (6754, 6754, 0), + (6757, 6764, 0), + (6771, 6780, 0), + (6783, 6783, 0), + (6832, 6848, 0), + (6912, 6915, 0), + (6964, 6964, 0), + (6966, 6970, 0), + (6972, 6972, 0), + (6978, 6978, 0), + (7019, 7027, 0), + (7040, 7041, 0), + (7074, 7077, 0), + (7080, 7081, 0), + (7083, 7085, 0), + (7142, 7142, 0), + (7144, 7145, 0), + (7149, 7149, 0), + (7151, 7153, 0), + (7212, 7219, 0), + (7222, 7223, 0), + (7376, 7378, 0), + (7380, 7392, 0), + (7394, 7400, 0), + (7405, 7405, 0), + (7412, 7412, 0), + (7416, 7417, 0), + (7616, 7673, 0), + (7675, 7679, 0), + (8203, 8207, 0), + (8232, 8238, 0), + (8288, 8291, 0), + (8400, 8432, 0), + (8986, 8987, 2), + (9001, 9002, 2), + (9193, 9196, 2), + (9200, 9200, 2), + (9203, 9203, 2), + (9725, 9726, 2), + (9748, 9749, 2), + (9800, 9811, 2), + (9855, 9855, 2), + (9875, 9875, 2), + (9889, 9889, 2), + (9898, 9899, 2), + (9917, 9918, 2), + (9924, 9925, 2), + (9934, 9934, 2), + (9940, 9940, 2), + (9962, 9962, 2), + (9970, 9971, 2), + (9973, 9973, 2), + (9978, 9978, 2), + (9981, 9981, 2), + (9989, 9989, 2), + (9994, 9995, 2), + (10024, 10024, 2), + (10060, 10060, 2), + (10062, 10062, 2), + (10067, 10069, 2), + (10071, 10071, 2), + (10133, 10135, 2), + (10160, 10160, 2), + (10175, 10175, 2), + (11035, 11036, 2), + (11088, 11088, 2), + (11093, 11093, 2), + (11503, 11505, 0), + (11647, 11647, 0), + (11744, 11775, 0), + (11904, 11929, 2), + (11931, 12019, 2), + (12032, 12245, 2), + (12272, 12283, 2), + (12288, 12329, 2), + (12330, 12333, 0), + (12334, 12350, 2), + (12353, 12438, 2), + (12441, 12442, 0), + (12443, 12543, 2), + (12549, 12591, 2), + (12593, 12686, 2), + (12688, 12771, 2), + (12784, 12830, 2), + (12832, 12871, 2), + (12880, 19903, 2), + (19968, 42124, 2), + (42128, 42182, 2), + (42607, 42610, 0), + (42612, 42621, 0), + (42654, 42655, 0), + (42736, 42737, 0), + (43010, 43010, 0), + (43014, 43014, 0), + (43019, 43019, 0), + (43045, 43046, 0), + (43052, 43052, 0), + (43204, 43205, 0), + (43232, 43249, 0), + (43263, 43263, 0), + (43302, 43309, 0), + (43335, 43345, 0), + (43360, 43388, 2), + (43392, 43394, 0), + (43443, 43443, 0), + (43446, 43449, 0), + (43452, 43453, 0), + (43493, 43493, 0), + (43561, 43566, 0), + (43569, 43570, 0), + (43573, 43574, 0), + (43587, 43587, 0), + (43596, 43596, 0), + (43644, 43644, 0), + (43696, 43696, 0), + (43698, 43700, 0), + (43703, 43704, 0), + (43710, 43711, 0), + (43713, 43713, 0), + (43756, 43757, 0), + (43766, 43766, 0), + (44005, 44005, 0), + (44008, 44008, 0), + (44013, 44013, 0), + (44032, 55203, 2), + (63744, 64255, 2), + (64286, 64286, 0), + (65024, 65039, 0), + (65040, 65049, 2), + (65056, 65071, 0), + (65072, 65106, 2), + (65108, 65126, 2), + (65128, 65131, 2), + (65281, 65376, 2), + (65504, 65510, 2), + (66045, 66045, 0), + (66272, 66272, 0), + (66422, 66426, 0), + (68097, 68099, 0), + (68101, 68102, 0), + (68108, 68111, 0), + (68152, 68154, 0), + (68159, 68159, 0), + (68325, 68326, 0), + (68900, 68903, 0), + (69291, 69292, 0), + (69446, 69456, 0), + (69633, 69633, 0), + (69688, 69702, 0), + (69759, 69761, 0), + (69811, 69814, 0), + (69817, 69818, 0), + (69888, 69890, 0), + (69927, 69931, 0), + (69933, 69940, 0), + (70003, 70003, 0), + (70016, 70017, 0), + (70070, 70078, 0), + (70089, 70092, 0), + (70095, 70095, 0), + (70191, 70193, 0), + (70196, 70196, 0), + (70198, 70199, 0), + (70206, 70206, 0), + (70367, 70367, 0), + (70371, 70378, 0), + (70400, 70401, 0), + (70459, 70460, 0), + (70464, 70464, 0), + (70502, 70508, 0), + (70512, 70516, 0), + (70712, 70719, 0), + (70722, 70724, 0), + (70726, 70726, 0), + (70750, 70750, 0), + (70835, 70840, 0), + (70842, 70842, 0), + (70847, 70848, 0), + (70850, 70851, 0), + (71090, 71093, 0), + (71100, 71101, 0), + (71103, 71104, 0), + (71132, 71133, 0), + (71219, 71226, 0), + (71229, 71229, 0), + (71231, 71232, 0), + (71339, 71339, 0), + (71341, 71341, 0), + (71344, 71349, 0), + (71351, 71351, 0), + (71453, 71455, 0), + (71458, 71461, 0), + (71463, 71467, 0), + (71727, 71735, 0), + (71737, 71738, 0), + (71995, 71996, 0), + (71998, 71998, 0), + (72003, 72003, 0), + (72148, 72151, 0), + (72154, 72155, 0), + (72160, 72160, 0), + (72193, 72202, 0), + (72243, 72248, 0), + (72251, 72254, 0), + (72263, 72263, 0), + (72273, 72278, 0), + (72281, 72283, 0), + (72330, 72342, 0), + (72344, 72345, 0), + (72752, 72758, 0), + (72760, 72765, 0), + (72767, 72767, 0), + (72850, 72871, 0), + (72874, 72880, 0), + (72882, 72883, 0), + (72885, 72886, 0), + (73009, 73014, 0), + (73018, 73018, 0), + (73020, 73021, 0), + (73023, 73029, 0), + (73031, 73031, 0), + (73104, 73105, 0), + (73109, 73109, 0), + (73111, 73111, 0), + (73459, 73460, 0), + (92912, 92916, 0), + (92976, 92982, 0), + (94031, 94031, 0), + (94095, 94098, 0), + (94176, 94179, 2), + (94180, 94180, 0), + (94192, 94193, 2), + (94208, 100343, 2), + (100352, 101589, 2), + (101632, 101640, 2), + (110592, 110878, 2), + (110928, 110930, 2), + (110948, 110951, 2), + (110960, 111355, 2), + (113821, 113822, 0), + (119143, 119145, 0), + (119163, 119170, 0), + (119173, 119179, 0), + (119210, 119213, 0), + (119362, 119364, 0), + (121344, 121398, 0), + (121403, 121452, 0), + (121461, 121461, 0), + (121476, 121476, 0), + (121499, 121503, 0), + (121505, 121519, 0), + (122880, 122886, 0), + (122888, 122904, 0), + (122907, 122913, 0), + (122915, 122916, 0), + (122918, 122922, 0), + (123184, 123190, 0), + (123628, 123631, 0), + (125136, 125142, 0), + (125252, 125258, 0), + (126980, 126980, 2), + (127183, 127183, 2), + (127374, 127374, 2), + (127377, 127386, 2), + (127488, 127490, 2), + (127504, 127547, 2), + (127552, 127560, 2), + (127568, 127569, 2), + (127584, 127589, 2), + (127744, 127776, 2), + (127789, 127797, 2), + (127799, 127868, 2), + (127870, 127891, 2), + (127904, 127946, 2), + (127951, 127955, 2), + (127968, 127984, 2), + (127988, 127988, 2), + (127992, 128062, 2), + (128064, 128064, 2), + (128066, 128252, 2), + (128255, 128317, 2), + (128331, 128334, 2), + (128336, 128359, 2), + (128378, 128378, 2), + (128405, 128406, 2), + (128420, 128420, 2), + (128507, 128591, 2), + (128640, 128709, 2), + (128716, 128716, 2), + (128720, 128722, 2), + (128725, 128727, 2), + (128747, 128748, 2), + (128756, 128764, 2), + (128992, 129003, 2), + (129292, 129338, 2), + (129340, 129349, 2), + (129351, 129400, 2), + (129402, 129483, 2), + (129485, 129535, 2), + (129648, 129652, 2), + (129656, 129658, 2), + (129664, 129670, 2), + (129680, 129704, 2), + (129712, 129718, 2), + (129728, 129730, 2), + (129744, 129750, 2), + (131072, 196605, 2), + (196608, 262141, 2), + (917760, 917999, 0), +] diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_emoji_codes.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_emoji_codes.py new file mode 100644 index 0000000..1f2877b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_emoji_codes.py @@ -0,0 +1,3610 @@ +EMOJI = { + "1st_place_medal": "🥇", + "2nd_place_medal": "🥈", + "3rd_place_medal": "🥉", + "ab_button_(blood_type)": "🆎", + "atm_sign": "🏧", + "a_button_(blood_type)": "🅰", + "afghanistan": "🇦🇫", + "albania": "🇦🇱", + "algeria": "🇩🇿", + "american_samoa": "🇦🇸", + "andorra": "🇦🇩", + "angola": "🇦🇴", + "anguilla": "🇦🇮", + "antarctica": "🇦🇶", + "antigua_&_barbuda": "🇦🇬", + "aquarius": "♒", + "argentina": "🇦🇷", + "aries": "♈", + "armenia": "🇦🇲", + "aruba": "🇦🇼", + "ascension_island": "🇦🇨", + "australia": "🇦🇺", + "austria": "🇦🇹", + "azerbaijan": "🇦🇿", + "back_arrow": "🔙", + "b_button_(blood_type)": "🅱", + "bahamas": "🇧🇸", + "bahrain": "🇧🇭", + "bangladesh": "🇧🇩", + "barbados": "🇧🇧", + "belarus": "🇧🇾", + "belgium": "🇧🇪", + "belize": "🇧🇿", + "benin": "🇧🇯", + "bermuda": "🇧🇲", + "bhutan": "🇧🇹", + "bolivia": "🇧🇴", + "bosnia_&_herzegovina": "🇧🇦", + "botswana": "🇧🇼", + "bouvet_island": "🇧🇻", + "brazil": "🇧🇷", + "british_indian_ocean_territory": "🇮🇴", + "british_virgin_islands": "🇻🇬", + "brunei": "🇧🇳", + "bulgaria": "🇧🇬", + "burkina_faso": "🇧🇫", + "burundi": "🇧🇮", + "cl_button": "🆑", + "cool_button": "🆒", + "cambodia": "🇰🇭", + "cameroon": "🇨🇲", + "canada": "🇨🇦", + "canary_islands": "🇮🇨", + "cancer": "♋", + "cape_verde": "🇨🇻", + "capricorn": "♑", + "caribbean_netherlands": "🇧🇶", + "cayman_islands": "🇰🇾", + "central_african_republic": "🇨🇫", + "ceuta_&_melilla": "🇪🇦", + "chad": "🇹🇩", + "chile": "🇨🇱", + "china": "🇨🇳", + "christmas_island": "🇨🇽", + "christmas_tree": "🎄", + "clipperton_island": "🇨🇵", + "cocos_(keeling)_islands": "🇨🇨", + "colombia": "🇨🇴", + "comoros": "🇰🇲", + "congo_-_brazzaville": "🇨🇬", + "congo_-_kinshasa": "🇨🇩", + "cook_islands": "🇨🇰", + "costa_rica": "🇨🇷", + "croatia": "🇭🇷", + "cuba": "🇨🇺", + "curaçao": "🇨🇼", + "cyprus": "🇨🇾", + "czechia": "🇨🇿", + "côte_d’ivoire": "🇨🇮", + "denmark": "🇩🇰", + "diego_garcia": "🇩🇬", + "djibouti": "🇩🇯", + "dominica": "🇩🇲", + "dominican_republic": "🇩🇴", + "end_arrow": "🔚", + "ecuador": "🇪🇨", + "egypt": "🇪🇬", + "el_salvador": "🇸🇻", + "england": "🏴\U000e0067\U000e0062\U000e0065\U000e006e\U000e0067\U000e007f", + "equatorial_guinea": "🇬🇶", + "eritrea": "🇪🇷", + "estonia": "🇪🇪", + "ethiopia": "🇪🇹", + "european_union": "🇪🇺", + "free_button": "🆓", + "falkland_islands": "🇫🇰", + "faroe_islands": "🇫🇴", + "fiji": "🇫🇯", + "finland": "🇫🇮", + "france": "🇫🇷", + "french_guiana": "🇬🇫", + "french_polynesia": "🇵🇫", + "french_southern_territories": "🇹🇫", + "gabon": "🇬🇦", + "gambia": "🇬🇲", + "gemini": "♊", + "georgia": "🇬🇪", + "germany": "🇩🇪", + "ghana": "🇬🇭", + "gibraltar": "🇬🇮", + "greece": "🇬🇷", + "greenland": "🇬🇱", + "grenada": "🇬🇩", + "guadeloupe": "🇬🇵", + "guam": "🇬🇺", + "guatemala": "🇬🇹", + "guernsey": "🇬🇬", + "guinea": "🇬🇳", + "guinea-bissau": "🇬🇼", + "guyana": "🇬🇾", + "haiti": "🇭🇹", + "heard_&_mcdonald_islands": "🇭🇲", + "honduras": "🇭🇳", + "hong_kong_sar_china": "🇭🇰", + "hungary": "🇭🇺", + "id_button": "🆔", + "iceland": "🇮🇸", + "india": "🇮🇳", + "indonesia": "🇮🇩", + "iran": "🇮🇷", + "iraq": "🇮🇶", + "ireland": "🇮🇪", + "isle_of_man": "🇮🇲", + "israel": "🇮🇱", + "italy": "🇮🇹", + "jamaica": "🇯🇲", + "japan": "🗾", + "japanese_acceptable_button": "🉑", + "japanese_application_button": "🈸", + "japanese_bargain_button": "🉐", + "japanese_castle": "🏯", + "japanese_congratulations_button": "㊗", + "japanese_discount_button": "🈹", + "japanese_dolls": "🎎", + "japanese_free_of_charge_button": "🈚", + "japanese_here_button": "🈁", + "japanese_monthly_amount_button": "🈷", + "japanese_no_vacancy_button": "🈵", + "japanese_not_free_of_charge_button": "🈶", + "japanese_open_for_business_button": "🈺", + "japanese_passing_grade_button": "🈴", + "japanese_post_office": "🏣", + "japanese_prohibited_button": "🈲", + "japanese_reserved_button": "🈯", + "japanese_secret_button": "㊙", + "japanese_service_charge_button": "🈂", + "japanese_symbol_for_beginner": "🔰", + "japanese_vacancy_button": "🈳", + "jersey": "🇯🇪", + "jordan": "🇯🇴", + "kazakhstan": "🇰🇿", + "kenya": "🇰🇪", + "kiribati": "🇰🇮", + "kosovo": "🇽🇰", + "kuwait": "🇰🇼", + "kyrgyzstan": "🇰🇬", + "laos": "🇱🇦", + "latvia": "🇱🇻", + "lebanon": "🇱🇧", + "leo": "♌", + "lesotho": "🇱🇸", + "liberia": "🇱🇷", + "libra": "♎", + "libya": "🇱🇾", + "liechtenstein": "🇱🇮", + "lithuania": "🇱🇹", + "luxembourg": "🇱🇺", + "macau_sar_china": "🇲🇴", + "macedonia": "🇲🇰", + "madagascar": "🇲🇬", + "malawi": "🇲🇼", + "malaysia": "🇲🇾", + "maldives": "🇲🇻", + "mali": "🇲🇱", + "malta": "🇲🇹", + "marshall_islands": "🇲🇭", + "martinique": "🇲🇶", + "mauritania": "🇲🇷", + "mauritius": "🇲🇺", + "mayotte": "🇾🇹", + "mexico": "🇲🇽", + "micronesia": "🇫🇲", + "moldova": "🇲🇩", + "monaco": "🇲🇨", + "mongolia": "🇲🇳", + "montenegro": "🇲🇪", + "montserrat": "🇲🇸", + "morocco": "🇲🇦", + "mozambique": "🇲🇿", + "mrs._claus": "🤶", + "mrs._claus_dark_skin_tone": "🤶🏿", + "mrs._claus_light_skin_tone": "🤶🏻", + "mrs._claus_medium-dark_skin_tone": "🤶🏾", + "mrs._claus_medium-light_skin_tone": "🤶🏼", + "mrs._claus_medium_skin_tone": "🤶🏽", + "myanmar_(burma)": "🇲🇲", + "new_button": "🆕", + "ng_button": "🆖", + "namibia": "🇳🇦", + "nauru": "🇳🇷", + "nepal": "🇳🇵", + "netherlands": "🇳🇱", + "new_caledonia": "🇳🇨", + "new_zealand": "🇳🇿", + "nicaragua": "🇳🇮", + "niger": "🇳🇪", + "nigeria": "🇳🇬", + "niue": "🇳🇺", + "norfolk_island": "🇳🇫", + "north_korea": "🇰🇵", + "northern_mariana_islands": "🇲🇵", + "norway": "🇳🇴", + "ok_button": "🆗", + "ok_hand": "👌", + "ok_hand_dark_skin_tone": "👌🏿", + "ok_hand_light_skin_tone": "👌🏻", + "ok_hand_medium-dark_skin_tone": "👌🏾", + "ok_hand_medium-light_skin_tone": "👌🏼", + "ok_hand_medium_skin_tone": "👌🏽", + "on!_arrow": "🔛", + "o_button_(blood_type)": "🅾", + "oman": "🇴🇲", + "ophiuchus": "⛎", + "p_button": "🅿", + "pakistan": "🇵🇰", + "palau": "🇵🇼", + "palestinian_territories": "🇵🇸", + "panama": "🇵🇦", + "papua_new_guinea": "🇵🇬", + "paraguay": "🇵🇾", + "peru": "🇵🇪", + "philippines": "🇵🇭", + "pisces": "♓", + "pitcairn_islands": "🇵🇳", + "poland": "🇵🇱", + "portugal": "🇵🇹", + "puerto_rico": "🇵🇷", + "qatar": "🇶🇦", + "romania": "🇷🇴", + "russia": "🇷🇺", + "rwanda": "🇷🇼", + "réunion": "🇷🇪", + "soon_arrow": "🔜", + "sos_button": "🆘", + "sagittarius": "♐", + "samoa": "🇼🇸", + "san_marino": "🇸🇲", + "santa_claus": "🎅", + "santa_claus_dark_skin_tone": "🎅🏿", + "santa_claus_light_skin_tone": "🎅🏻", + "santa_claus_medium-dark_skin_tone": "🎅🏾", + "santa_claus_medium-light_skin_tone": "🎅🏼", + "santa_claus_medium_skin_tone": "🎅🏽", + "saudi_arabia": "🇸🇦", + "scorpio": "♏", + "scotland": "🏴\U000e0067\U000e0062\U000e0073\U000e0063\U000e0074\U000e007f", + "senegal": "🇸🇳", + "serbia": "🇷🇸", + "seychelles": "🇸🇨", + "sierra_leone": "🇸🇱", + "singapore": "🇸🇬", + "sint_maarten": "🇸🇽", + "slovakia": "🇸🇰", + "slovenia": "🇸🇮", + "solomon_islands": "🇸🇧", + "somalia": "🇸🇴", + "south_africa": "🇿🇦", + "south_georgia_&_south_sandwich_islands": "🇬🇸", + "south_korea": "🇰🇷", + "south_sudan": "🇸🇸", + "spain": "🇪🇸", + "sri_lanka": "🇱🇰", + "st._barthélemy": "🇧🇱", + "st._helena": "🇸🇭", + "st._kitts_&_nevis": "🇰🇳", + "st._lucia": "🇱🇨", + "st._martin": "🇲🇫", + "st._pierre_&_miquelon": "🇵🇲", + "st._vincent_&_grenadines": "🇻🇨", + "statue_of_liberty": "🗽", + "sudan": "🇸🇩", + "suriname": "🇸🇷", + "svalbard_&_jan_mayen": "🇸🇯", + "swaziland": "🇸🇿", + "sweden": "🇸🇪", + "switzerland": "🇨🇭", + "syria": "🇸🇾", + "são_tomé_&_príncipe": "🇸🇹", + "t-rex": "🦖", + "top_arrow": "🔝", + "taiwan": "🇹🇼", + "tajikistan": "🇹🇯", + "tanzania": "🇹🇿", + "taurus": "♉", + "thailand": "🇹🇭", + "timor-leste": "🇹🇱", + "togo": "🇹🇬", + "tokelau": "🇹🇰", + "tokyo_tower": "🗼", + "tonga": "🇹🇴", + "trinidad_&_tobago": "🇹🇹", + "tristan_da_cunha": "🇹🇦", + "tunisia": "🇹🇳", + "turkey": "🦃", + "turkmenistan": "🇹🇲", + "turks_&_caicos_islands": "🇹🇨", + "tuvalu": "🇹🇻", + "u.s._outlying_islands": "🇺🇲", + "u.s._virgin_islands": "🇻🇮", + "up!_button": "🆙", + "uganda": "🇺🇬", + "ukraine": "🇺🇦", + "united_arab_emirates": "🇦🇪", + "united_kingdom": "🇬🇧", + "united_nations": "🇺🇳", + "united_states": "🇺🇸", + "uruguay": "🇺🇾", + "uzbekistan": "🇺🇿", + "vs_button": "🆚", + "vanuatu": "🇻🇺", + "vatican_city": "🇻🇦", + "venezuela": "🇻🇪", + "vietnam": "🇻🇳", + "virgo": "♍", + "wales": "🏴\U000e0067\U000e0062\U000e0077\U000e006c\U000e0073\U000e007f", + "wallis_&_futuna": "🇼🇫", + "western_sahara": "🇪🇭", + "yemen": "🇾🇪", + "zambia": "🇿🇲", + "zimbabwe": "🇿🇼", + "abacus": "🧮", + "adhesive_bandage": "🩹", + "admission_tickets": "🎟", + "adult": "🧑", + "adult_dark_skin_tone": "🧑🏿", + "adult_light_skin_tone": "🧑🏻", + "adult_medium-dark_skin_tone": "🧑🏾", + "adult_medium-light_skin_tone": "🧑🏼", + "adult_medium_skin_tone": "🧑🏽", + "aerial_tramway": "🚡", + "airplane": "✈", + "airplane_arrival": "🛬", + "airplane_departure": "🛫", + "alarm_clock": "⏰", + "alembic": "⚗", + "alien": "👽", + "alien_monster": "👾", + "ambulance": "🚑", + "american_football": "🏈", + "amphora": "🏺", + "anchor": "⚓", + "anger_symbol": "💢", + "angry_face": "😠", + "angry_face_with_horns": "👿", + "anguished_face": "😧", + "ant": "🐜", + "antenna_bars": "📶", + "anxious_face_with_sweat": "😰", + "articulated_lorry": "🚛", + "artist_palette": "🎨", + "astonished_face": "😲", + "atom_symbol": "⚛", + "auto_rickshaw": "🛺", + "automobile": "🚗", + "avocado": "🥑", + "axe": "🪓", + "baby": "👶", + "baby_angel": "👼", + "baby_angel_dark_skin_tone": "👼🏿", + "baby_angel_light_skin_tone": "👼🏻", + "baby_angel_medium-dark_skin_tone": "👼🏾", + "baby_angel_medium-light_skin_tone": "👼🏼", + "baby_angel_medium_skin_tone": "👼🏽", + "baby_bottle": "🍼", + "baby_chick": "🐤", + "baby_dark_skin_tone": "👶🏿", + "baby_light_skin_tone": "👶🏻", + "baby_medium-dark_skin_tone": "👶🏾", + "baby_medium-light_skin_tone": "👶🏼", + "baby_medium_skin_tone": "👶🏽", + "baby_symbol": "🚼", + "backhand_index_pointing_down": "👇", + "backhand_index_pointing_down_dark_skin_tone": "👇🏿", + "backhand_index_pointing_down_light_skin_tone": "👇🏻", + "backhand_index_pointing_down_medium-dark_skin_tone": "👇🏾", + "backhand_index_pointing_down_medium-light_skin_tone": "👇🏼", + "backhand_index_pointing_down_medium_skin_tone": "👇🏽", + "backhand_index_pointing_left": "👈", + "backhand_index_pointing_left_dark_skin_tone": "👈🏿", + "backhand_index_pointing_left_light_skin_tone": "👈🏻", + "backhand_index_pointing_left_medium-dark_skin_tone": "👈🏾", + "backhand_index_pointing_left_medium-light_skin_tone": "👈🏼", + "backhand_index_pointing_left_medium_skin_tone": "👈🏽", + "backhand_index_pointing_right": "👉", + "backhand_index_pointing_right_dark_skin_tone": "👉🏿", + "backhand_index_pointing_right_light_skin_tone": "👉🏻", + "backhand_index_pointing_right_medium-dark_skin_tone": "👉🏾", + "backhand_index_pointing_right_medium-light_skin_tone": "👉🏼", + "backhand_index_pointing_right_medium_skin_tone": "👉🏽", + "backhand_index_pointing_up": "👆", + "backhand_index_pointing_up_dark_skin_tone": "👆🏿", + "backhand_index_pointing_up_light_skin_tone": "👆🏻", + "backhand_index_pointing_up_medium-dark_skin_tone": "👆🏾", + "backhand_index_pointing_up_medium-light_skin_tone": "👆🏼", + "backhand_index_pointing_up_medium_skin_tone": "👆🏽", + "bacon": "🥓", + "badger": "🦡", + "badminton": "🏸", + "bagel": "🥯", + "baggage_claim": "🛄", + "baguette_bread": "🥖", + "balance_scale": "⚖", + "bald": "🦲", + "bald_man": "👨\u200d🦲", + "bald_woman": "👩\u200d🦲", + "ballet_shoes": "🩰", + "balloon": "🎈", + "ballot_box_with_ballot": "🗳", + "ballot_box_with_check": "☑", + "banana": "🍌", + "banjo": "🪕", + "bank": "🏦", + "bar_chart": "📊", + "barber_pole": "💈", + "baseball": "⚾", + "basket": "🧺", + "basketball": "🏀", + "bat": "🦇", + "bathtub": "🛁", + "battery": "🔋", + "beach_with_umbrella": "🏖", + "beaming_face_with_smiling_eyes": "😁", + "bear_face": "🐻", + "bearded_person": "🧔", + "bearded_person_dark_skin_tone": "🧔🏿", + "bearded_person_light_skin_tone": "🧔🏻", + "bearded_person_medium-dark_skin_tone": "🧔🏾", + "bearded_person_medium-light_skin_tone": "🧔🏼", + "bearded_person_medium_skin_tone": "🧔🏽", + "beating_heart": "💓", + "bed": "🛏", + "beer_mug": "🍺", + "bell": "🔔", + "bell_with_slash": "🔕", + "bellhop_bell": "🛎", + "bento_box": "🍱", + "beverage_box": "🧃", + "bicycle": "🚲", + "bikini": "👙", + "billed_cap": "🧢", + "biohazard": "☣", + "bird": "🐦", + "birthday_cake": "🎂", + "black_circle": "⚫", + "black_flag": "🏴", + "black_heart": "🖤", + "black_large_square": "⬛", + "black_medium-small_square": "◾", + "black_medium_square": "◼", + "black_nib": "✒", + "black_small_square": "▪", + "black_square_button": "🔲", + "blond-haired_man": "👱\u200d♂️", + "blond-haired_man_dark_skin_tone": "👱🏿\u200d♂️", + "blond-haired_man_light_skin_tone": "👱🏻\u200d♂️", + "blond-haired_man_medium-dark_skin_tone": "👱🏾\u200d♂️", + "blond-haired_man_medium-light_skin_tone": "👱🏼\u200d♂️", + "blond-haired_man_medium_skin_tone": "👱🏽\u200d♂️", + "blond-haired_person": "👱", + "blond-haired_person_dark_skin_tone": "👱🏿", + "blond-haired_person_light_skin_tone": "👱🏻", + "blond-haired_person_medium-dark_skin_tone": "👱🏾", + "blond-haired_person_medium-light_skin_tone": "👱🏼", + "blond-haired_person_medium_skin_tone": "👱🏽", + "blond-haired_woman": "👱\u200d♀️", + "blond-haired_woman_dark_skin_tone": "👱🏿\u200d♀️", + "blond-haired_woman_light_skin_tone": "👱🏻\u200d♀️", + "blond-haired_woman_medium-dark_skin_tone": "👱🏾\u200d♀️", + "blond-haired_woman_medium-light_skin_tone": "👱🏼\u200d♀️", + "blond-haired_woman_medium_skin_tone": "👱🏽\u200d♀️", + "blossom": "🌼", + "blowfish": "🐡", + "blue_book": "📘", + "blue_circle": "🔵", + "blue_heart": "💙", + "blue_square": "🟦", + "boar": "🐗", + "bomb": "💣", + "bone": "🦴", + "bookmark": "🔖", + "bookmark_tabs": "📑", + "books": "📚", + "bottle_with_popping_cork": "🍾", + "bouquet": "💐", + "bow_and_arrow": "🏹", + "bowl_with_spoon": "🥣", + "bowling": "🎳", + "boxing_glove": "🥊", + "boy": "👦", + "boy_dark_skin_tone": "👦🏿", + "boy_light_skin_tone": "👦🏻", + "boy_medium-dark_skin_tone": "👦🏾", + "boy_medium-light_skin_tone": "👦🏼", + "boy_medium_skin_tone": "👦🏽", + "brain": "🧠", + "bread": "🍞", + "breast-feeding": "🤱", + "breast-feeding_dark_skin_tone": "🤱🏿", + "breast-feeding_light_skin_tone": "🤱🏻", + "breast-feeding_medium-dark_skin_tone": "🤱🏾", + "breast-feeding_medium-light_skin_tone": "🤱🏼", + "breast-feeding_medium_skin_tone": "🤱🏽", + "brick": "🧱", + "bride_with_veil": "👰", + "bride_with_veil_dark_skin_tone": "👰🏿", + "bride_with_veil_light_skin_tone": "👰🏻", + "bride_with_veil_medium-dark_skin_tone": "👰🏾", + "bride_with_veil_medium-light_skin_tone": "👰🏼", + "bride_with_veil_medium_skin_tone": "👰🏽", + "bridge_at_night": "🌉", + "briefcase": "💼", + "briefs": "🩲", + "bright_button": "🔆", + "broccoli": "🥦", + "broken_heart": "💔", + "broom": "🧹", + "brown_circle": "🟤", + "brown_heart": "🤎", + "brown_square": "🟫", + "bug": "🐛", + "building_construction": "🏗", + "bullet_train": "🚅", + "burrito": "🌯", + "bus": "🚌", + "bus_stop": "🚏", + "bust_in_silhouette": "👤", + "busts_in_silhouette": "👥", + "butter": "🧈", + "butterfly": "🦋", + "cactus": "🌵", + "calendar": "📆", + "call_me_hand": "🤙", + "call_me_hand_dark_skin_tone": "🤙🏿", + "call_me_hand_light_skin_tone": "🤙🏻", + "call_me_hand_medium-dark_skin_tone": "🤙🏾", + "call_me_hand_medium-light_skin_tone": "🤙🏼", + "call_me_hand_medium_skin_tone": "🤙🏽", + "camel": "🐫", + "camera": "📷", + "camera_with_flash": "📸", + "camping": "🏕", + "candle": "🕯", + "candy": "🍬", + "canned_food": "🥫", + "canoe": "🛶", + "card_file_box": "🗃", + "card_index": "📇", + "card_index_dividers": "🗂", + "carousel_horse": "🎠", + "carp_streamer": "🎏", + "carrot": "🥕", + "castle": "🏰", + "cat": "🐱", + "cat_face": "🐱", + "cat_face_with_tears_of_joy": "😹", + "cat_face_with_wry_smile": "😼", + "chains": "⛓", + "chair": "🪑", + "chart_decreasing": "📉", + "chart_increasing": "📈", + "chart_increasing_with_yen": "💹", + "cheese_wedge": "🧀", + "chequered_flag": "🏁", + "cherries": "🍒", + "cherry_blossom": "🌸", + "chess_pawn": "♟", + "chestnut": "🌰", + "chicken": "🐔", + "child": "🧒", + "child_dark_skin_tone": "🧒🏿", + "child_light_skin_tone": "🧒🏻", + "child_medium-dark_skin_tone": "🧒🏾", + "child_medium-light_skin_tone": "🧒🏼", + "child_medium_skin_tone": "🧒🏽", + "children_crossing": "🚸", + "chipmunk": "🐿", + "chocolate_bar": "🍫", + "chopsticks": "🥢", + "church": "⛪", + "cigarette": "🚬", + "cinema": "🎦", + "circled_m": "Ⓜ", + "circus_tent": "🎪", + "cityscape": "🏙", + "cityscape_at_dusk": "🌆", + "clamp": "🗜", + "clapper_board": "🎬", + "clapping_hands": "👏", + "clapping_hands_dark_skin_tone": "👏🏿", + "clapping_hands_light_skin_tone": "👏🏻", + "clapping_hands_medium-dark_skin_tone": "👏🏾", + "clapping_hands_medium-light_skin_tone": "👏🏼", + "clapping_hands_medium_skin_tone": "👏🏽", + "classical_building": "🏛", + "clinking_beer_mugs": "🍻", + "clinking_glasses": "🥂", + "clipboard": "📋", + "clockwise_vertical_arrows": "🔃", + "closed_book": "📕", + "closed_mailbox_with_lowered_flag": "📪", + "closed_mailbox_with_raised_flag": "📫", + "closed_umbrella": "🌂", + "cloud": "☁", + "cloud_with_lightning": "🌩", + "cloud_with_lightning_and_rain": "⛈", + "cloud_with_rain": "🌧", + "cloud_with_snow": "🌨", + "clown_face": "🤡", + "club_suit": "♣", + "clutch_bag": "👝", + "coat": "🧥", + "cocktail_glass": "🍸", + "coconut": "🥥", + "coffin": "⚰", + "cold_face": "🥶", + "collision": "💥", + "comet": "☄", + "compass": "🧭", + "computer_disk": "💽", + "computer_mouse": "🖱", + "confetti_ball": "🎊", + "confounded_face": "😖", + "confused_face": "😕", + "construction": "🚧", + "construction_worker": "👷", + "construction_worker_dark_skin_tone": "👷🏿", + "construction_worker_light_skin_tone": "👷🏻", + "construction_worker_medium-dark_skin_tone": "👷🏾", + "construction_worker_medium-light_skin_tone": "👷🏼", + "construction_worker_medium_skin_tone": "👷🏽", + "control_knobs": "🎛", + "convenience_store": "🏪", + "cooked_rice": "🍚", + "cookie": "🍪", + "cooking": "🍳", + "copyright": "©", + "couch_and_lamp": "🛋", + "counterclockwise_arrows_button": "🔄", + "couple_with_heart": "💑", + "couple_with_heart_man_man": "👨\u200d❤️\u200d👨", + "couple_with_heart_woman_man": "👩\u200d❤️\u200d👨", + "couple_with_heart_woman_woman": "👩\u200d❤️\u200d👩", + "cow": "🐮", + "cow_face": "🐮", + "cowboy_hat_face": "🤠", + "crab": "🦀", + "crayon": "🖍", + "credit_card": "💳", + "crescent_moon": "🌙", + "cricket": "🦗", + "cricket_game": "🏏", + "crocodile": "🐊", + "croissant": "🥐", + "cross_mark": "❌", + "cross_mark_button": "❎", + "crossed_fingers": "🤞", + "crossed_fingers_dark_skin_tone": "🤞🏿", + "crossed_fingers_light_skin_tone": "🤞🏻", + "crossed_fingers_medium-dark_skin_tone": "🤞🏾", + "crossed_fingers_medium-light_skin_tone": "🤞🏼", + "crossed_fingers_medium_skin_tone": "🤞🏽", + "crossed_flags": "🎌", + "crossed_swords": "⚔", + "crown": "👑", + "crying_cat_face": "😿", + "crying_face": "😢", + "crystal_ball": "🔮", + "cucumber": "🥒", + "cupcake": "🧁", + "cup_with_straw": "🥤", + "curling_stone": "🥌", + "curly_hair": "🦱", + "curly-haired_man": "👨\u200d🦱", + "curly-haired_woman": "👩\u200d🦱", + "curly_loop": "➰", + "currency_exchange": "💱", + "curry_rice": "🍛", + "custard": "🍮", + "customs": "🛃", + "cut_of_meat": "🥩", + "cyclone": "🌀", + "dagger": "🗡", + "dango": "🍡", + "dashing_away": "💨", + "deaf_person": "🧏", + "deciduous_tree": "🌳", + "deer": "🦌", + "delivery_truck": "🚚", + "department_store": "🏬", + "derelict_house": "🏚", + "desert": "🏜", + "desert_island": "🏝", + "desktop_computer": "🖥", + "detective": "🕵", + "detective_dark_skin_tone": "🕵🏿", + "detective_light_skin_tone": "🕵🏻", + "detective_medium-dark_skin_tone": "🕵🏾", + "detective_medium-light_skin_tone": "🕵🏼", + "detective_medium_skin_tone": "🕵🏽", + "diamond_suit": "♦", + "diamond_with_a_dot": "💠", + "dim_button": "🔅", + "direct_hit": "🎯", + "disappointed_face": "😞", + "diving_mask": "🤿", + "diya_lamp": "🪔", + "dizzy": "💫", + "dizzy_face": "😵", + "dna": "🧬", + "dog": "🐶", + "dog_face": "🐶", + "dollar_banknote": "💵", + "dolphin": "🐬", + "door": "🚪", + "dotted_six-pointed_star": "🔯", + "double_curly_loop": "➿", + "double_exclamation_mark": "‼", + "doughnut": "🍩", + "dove": "🕊", + "down-left_arrow": "↙", + "down-right_arrow": "↘", + "down_arrow": "⬇", + "downcast_face_with_sweat": "😓", + "downwards_button": "🔽", + "dragon": "🐉", + "dragon_face": "🐲", + "dress": "👗", + "drooling_face": "🤤", + "drop_of_blood": "🩸", + "droplet": "💧", + "drum": "🥁", + "duck": "🦆", + "dumpling": "🥟", + "dvd": "📀", + "e-mail": "📧", + "eagle": "🦅", + "ear": "👂", + "ear_dark_skin_tone": "👂🏿", + "ear_light_skin_tone": "👂🏻", + "ear_medium-dark_skin_tone": "👂🏾", + "ear_medium-light_skin_tone": "👂🏼", + "ear_medium_skin_tone": "👂🏽", + "ear_of_corn": "🌽", + "ear_with_hearing_aid": "🦻", + "egg": "🍳", + "eggplant": "🍆", + "eight-pointed_star": "✴", + "eight-spoked_asterisk": "✳", + "eight-thirty": "🕣", + "eight_o’clock": "🕗", + "eject_button": "⏏", + "electric_plug": "🔌", + "elephant": "🐘", + "eleven-thirty": "🕦", + "eleven_o’clock": "🕚", + "elf": "🧝", + "elf_dark_skin_tone": "🧝🏿", + "elf_light_skin_tone": "🧝🏻", + "elf_medium-dark_skin_tone": "🧝🏾", + "elf_medium-light_skin_tone": "🧝🏼", + "elf_medium_skin_tone": "🧝🏽", + "envelope": "✉", + "envelope_with_arrow": "📩", + "euro_banknote": "💶", + "evergreen_tree": "🌲", + "ewe": "🐑", + "exclamation_mark": "❗", + "exclamation_question_mark": "⁉", + "exploding_head": "🤯", + "expressionless_face": "😑", + "eye": "👁", + "eye_in_speech_bubble": "👁️\u200d🗨️", + "eyes": "👀", + "face_blowing_a_kiss": "😘", + "face_savoring_food": "😋", + "face_screaming_in_fear": "😱", + "face_vomiting": "🤮", + "face_with_hand_over_mouth": "🤭", + "face_with_head-bandage": "🤕", + "face_with_medical_mask": "😷", + "face_with_monocle": "🧐", + "face_with_open_mouth": "😮", + "face_with_raised_eyebrow": "🤨", + "face_with_rolling_eyes": "🙄", + "face_with_steam_from_nose": "😤", + "face_with_symbols_on_mouth": "🤬", + "face_with_tears_of_joy": "😂", + "face_with_thermometer": "🤒", + "face_with_tongue": "😛", + "face_without_mouth": "😶", + "factory": "🏭", + "fairy": "🧚", + "fairy_dark_skin_tone": "🧚🏿", + "fairy_light_skin_tone": "🧚🏻", + "fairy_medium-dark_skin_tone": "🧚🏾", + "fairy_medium-light_skin_tone": "🧚🏼", + "fairy_medium_skin_tone": "🧚🏽", + "falafel": "🧆", + "fallen_leaf": "🍂", + "family": "👪", + "family_man_boy": "👨\u200d👦", + "family_man_boy_boy": "👨\u200d👦\u200d👦", + "family_man_girl": "👨\u200d👧", + "family_man_girl_boy": "👨\u200d👧\u200d👦", + "family_man_girl_girl": "👨\u200d👧\u200d👧", + "family_man_man_boy": "👨\u200d👨\u200d👦", + "family_man_man_boy_boy": "👨\u200d👨\u200d👦\u200d👦", + "family_man_man_girl": "👨\u200d👨\u200d👧", + "family_man_man_girl_boy": "👨\u200d👨\u200d👧\u200d👦", + "family_man_man_girl_girl": "👨\u200d👨\u200d👧\u200d👧", + "family_man_woman_boy": "👨\u200d👩\u200d👦", + "family_man_woman_boy_boy": "👨\u200d👩\u200d👦\u200d👦", + "family_man_woman_girl": "👨\u200d👩\u200d👧", + "family_man_woman_girl_boy": "👨\u200d👩\u200d👧\u200d👦", + "family_man_woman_girl_girl": "👨\u200d👩\u200d👧\u200d👧", + "family_woman_boy": "👩\u200d👦", + "family_woman_boy_boy": "👩\u200d👦\u200d👦", + "family_woman_girl": "👩\u200d👧", + "family_woman_girl_boy": "👩\u200d👧\u200d👦", + "family_woman_girl_girl": "👩\u200d👧\u200d👧", + "family_woman_woman_boy": "👩\u200d👩\u200d👦", + "family_woman_woman_boy_boy": "👩\u200d👩\u200d👦\u200d👦", + "family_woman_woman_girl": "👩\u200d👩\u200d👧", + "family_woman_woman_girl_boy": "👩\u200d👩\u200d👧\u200d👦", + "family_woman_woman_girl_girl": "👩\u200d👩\u200d👧\u200d👧", + "fast-forward_button": "⏩", + "fast_down_button": "⏬", + "fast_reverse_button": "⏪", + "fast_up_button": "⏫", + "fax_machine": "📠", + "fearful_face": "😨", + "female_sign": "♀", + "ferris_wheel": "🎡", + "ferry": "⛴", + "field_hockey": "🏑", + "file_cabinet": "🗄", + "file_folder": "📁", + "film_frames": "🎞", + "film_projector": "📽", + "fire": "🔥", + "fire_extinguisher": "🧯", + "firecracker": "🧨", + "fire_engine": "🚒", + "fireworks": "🎆", + "first_quarter_moon": "🌓", + "first_quarter_moon_face": "🌛", + "fish": "🐟", + "fish_cake_with_swirl": "🍥", + "fishing_pole": "🎣", + "five-thirty": "🕠", + "five_o’clock": "🕔", + "flag_in_hole": "⛳", + "flamingo": "🦩", + "flashlight": "🔦", + "flat_shoe": "🥿", + "fleur-de-lis": "⚜", + "flexed_biceps": "💪", + "flexed_biceps_dark_skin_tone": "💪🏿", + "flexed_biceps_light_skin_tone": "💪🏻", + "flexed_biceps_medium-dark_skin_tone": "💪🏾", + "flexed_biceps_medium-light_skin_tone": "💪🏼", + "flexed_biceps_medium_skin_tone": "💪🏽", + "floppy_disk": "💾", + "flower_playing_cards": "🎴", + "flushed_face": "😳", + "flying_disc": "🥏", + "flying_saucer": "🛸", + "fog": "🌫", + "foggy": "🌁", + "folded_hands": "🙏", + "folded_hands_dark_skin_tone": "🙏🏿", + "folded_hands_light_skin_tone": "🙏🏻", + "folded_hands_medium-dark_skin_tone": "🙏🏾", + "folded_hands_medium-light_skin_tone": "🙏🏼", + "folded_hands_medium_skin_tone": "🙏🏽", + "foot": "🦶", + "footprints": "👣", + "fork_and_knife": "🍴", + "fork_and_knife_with_plate": "🍽", + "fortune_cookie": "🥠", + "fountain": "⛲", + "fountain_pen": "🖋", + "four-thirty": "🕟", + "four_leaf_clover": "🍀", + "four_o’clock": "🕓", + "fox_face": "🦊", + "framed_picture": "🖼", + "french_fries": "🍟", + "fried_shrimp": "🍤", + "frog_face": "🐸", + "front-facing_baby_chick": "🐥", + "frowning_face": "☹", + "frowning_face_with_open_mouth": "😦", + "fuel_pump": "⛽", + "full_moon": "🌕", + "full_moon_face": "🌝", + "funeral_urn": "⚱", + "game_die": "🎲", + "garlic": "🧄", + "gear": "⚙", + "gem_stone": "💎", + "genie": "🧞", + "ghost": "👻", + "giraffe": "🦒", + "girl": "👧", + "girl_dark_skin_tone": "👧🏿", + "girl_light_skin_tone": "👧🏻", + "girl_medium-dark_skin_tone": "👧🏾", + "girl_medium-light_skin_tone": "👧🏼", + "girl_medium_skin_tone": "👧🏽", + "glass_of_milk": "🥛", + "glasses": "👓", + "globe_showing_americas": "🌎", + "globe_showing_asia-australia": "🌏", + "globe_showing_europe-africa": "🌍", + "globe_with_meridians": "🌐", + "gloves": "🧤", + "glowing_star": "🌟", + "goal_net": "🥅", + "goat": "🐐", + "goblin": "👺", + "goggles": "🥽", + "gorilla": "🦍", + "graduation_cap": "🎓", + "grapes": "🍇", + "green_apple": "🍏", + "green_book": "📗", + "green_circle": "🟢", + "green_heart": "💚", + "green_salad": "🥗", + "green_square": "🟩", + "grimacing_face": "😬", + "grinning_cat_face": "😺", + "grinning_cat_face_with_smiling_eyes": "😸", + "grinning_face": "😀", + "grinning_face_with_big_eyes": "😃", + "grinning_face_with_smiling_eyes": "😄", + "grinning_face_with_sweat": "😅", + "grinning_squinting_face": "😆", + "growing_heart": "💗", + "guard": "💂", + "guard_dark_skin_tone": "💂🏿", + "guard_light_skin_tone": "💂🏻", + "guard_medium-dark_skin_tone": "💂🏾", + "guard_medium-light_skin_tone": "💂🏼", + "guard_medium_skin_tone": "💂🏽", + "guide_dog": "🦮", + "guitar": "🎸", + "hamburger": "🍔", + "hammer": "🔨", + "hammer_and_pick": "⚒", + "hammer_and_wrench": "🛠", + "hamster_face": "🐹", + "hand_with_fingers_splayed": "🖐", + "hand_with_fingers_splayed_dark_skin_tone": "🖐🏿", + "hand_with_fingers_splayed_light_skin_tone": "🖐🏻", + "hand_with_fingers_splayed_medium-dark_skin_tone": "🖐🏾", + "hand_with_fingers_splayed_medium-light_skin_tone": "🖐🏼", + "hand_with_fingers_splayed_medium_skin_tone": "🖐🏽", + "handbag": "👜", + "handshake": "🤝", + "hatching_chick": "🐣", + "headphone": "🎧", + "hear-no-evil_monkey": "🙉", + "heart_decoration": "💟", + "heart_suit": "♥", + "heart_with_arrow": "💘", + "heart_with_ribbon": "💝", + "heavy_check_mark": "✔", + "heavy_division_sign": "➗", + "heavy_dollar_sign": "💲", + "heavy_heart_exclamation": "❣", + "heavy_large_circle": "⭕", + "heavy_minus_sign": "➖", + "heavy_multiplication_x": "✖", + "heavy_plus_sign": "➕", + "hedgehog": "🦔", + "helicopter": "🚁", + "herb": "🌿", + "hibiscus": "🌺", + "high-heeled_shoe": "👠", + "high-speed_train": "🚄", + "high_voltage": "⚡", + "hiking_boot": "🥾", + "hindu_temple": "🛕", + "hippopotamus": "🦛", + "hole": "🕳", + "honey_pot": "🍯", + "honeybee": "🐝", + "horizontal_traffic_light": "🚥", + "horse": "🐴", + "horse_face": "🐴", + "horse_racing": "🏇", + "horse_racing_dark_skin_tone": "🏇🏿", + "horse_racing_light_skin_tone": "🏇🏻", + "horse_racing_medium-dark_skin_tone": "🏇🏾", + "horse_racing_medium-light_skin_tone": "🏇🏼", + "horse_racing_medium_skin_tone": "🏇🏽", + "hospital": "🏥", + "hot_beverage": "☕", + "hot_dog": "🌭", + "hot_face": "🥵", + "hot_pepper": "🌶", + "hot_springs": "♨", + "hotel": "🏨", + "hourglass_done": "⌛", + "hourglass_not_done": "⏳", + "house": "🏠", + "house_with_garden": "🏡", + "houses": "🏘", + "hugging_face": "🤗", + "hundred_points": "💯", + "hushed_face": "😯", + "ice": "🧊", + "ice_cream": "🍨", + "ice_hockey": "🏒", + "ice_skate": "⛸", + "inbox_tray": "📥", + "incoming_envelope": "📨", + "index_pointing_up": "☝", + "index_pointing_up_dark_skin_tone": "☝🏿", + "index_pointing_up_light_skin_tone": "☝🏻", + "index_pointing_up_medium-dark_skin_tone": "☝🏾", + "index_pointing_up_medium-light_skin_tone": "☝🏼", + "index_pointing_up_medium_skin_tone": "☝🏽", + "infinity": "♾", + "information": "ℹ", + "input_latin_letters": "🔤", + "input_latin_lowercase": "🔡", + "input_latin_uppercase": "🔠", + "input_numbers": "🔢", + "input_symbols": "🔣", + "jack-o-lantern": "🎃", + "jeans": "👖", + "jigsaw": "🧩", + "joker": "🃏", + "joystick": "🕹", + "kaaba": "🕋", + "kangaroo": "🦘", + "key": "🔑", + "keyboard": "⌨", + "keycap_#": "#️⃣", + "keycap_*": "*️⃣", + "keycap_0": "0️⃣", + "keycap_1": "1️⃣", + "keycap_10": "🔟", + "keycap_2": "2️⃣", + "keycap_3": "3️⃣", + "keycap_4": "4️⃣", + "keycap_5": "5️⃣", + "keycap_6": "6️⃣", + "keycap_7": "7️⃣", + "keycap_8": "8️⃣", + "keycap_9": "9️⃣", + "kick_scooter": "🛴", + "kimono": "👘", + "kiss": "💋", + "kiss_man_man": "👨\u200d❤️\u200d💋\u200d👨", + "kiss_mark": "💋", + "kiss_woman_man": "👩\u200d❤️\u200d💋\u200d👨", + "kiss_woman_woman": "👩\u200d❤️\u200d💋\u200d👩", + "kissing_cat_face": "😽", + "kissing_face": "😗", + "kissing_face_with_closed_eyes": "😚", + "kissing_face_with_smiling_eyes": "😙", + "kitchen_knife": "🔪", + "kite": "🪁", + "kiwi_fruit": "🥝", + "koala": "🐨", + "lab_coat": "🥼", + "label": "🏷", + "lacrosse": "🥍", + "lady_beetle": "🐞", + "laptop_computer": "💻", + "large_blue_diamond": "🔷", + "large_orange_diamond": "🔶", + "last_quarter_moon": "🌗", + "last_quarter_moon_face": "🌜", + "last_track_button": "⏮", + "latin_cross": "✝", + "leaf_fluttering_in_wind": "🍃", + "leafy_green": "🥬", + "ledger": "📒", + "left-facing_fist": "🤛", + "left-facing_fist_dark_skin_tone": "🤛🏿", + "left-facing_fist_light_skin_tone": "🤛🏻", + "left-facing_fist_medium-dark_skin_tone": "🤛🏾", + "left-facing_fist_medium-light_skin_tone": "🤛🏼", + "left-facing_fist_medium_skin_tone": "🤛🏽", + "left-right_arrow": "↔", + "left_arrow": "⬅", + "left_arrow_curving_right": "↪", + "left_luggage": "🛅", + "left_speech_bubble": "🗨", + "leg": "🦵", + "lemon": "🍋", + "leopard": "🐆", + "level_slider": "🎚", + "light_bulb": "💡", + "light_rail": "🚈", + "link": "🔗", + "linked_paperclips": "🖇", + "lion_face": "🦁", + "lipstick": "💄", + "litter_in_bin_sign": "🚮", + "lizard": "🦎", + "llama": "🦙", + "lobster": "🦞", + "locked": "🔒", + "locked_with_key": "🔐", + "locked_with_pen": "🔏", + "locomotive": "🚂", + "lollipop": "🍭", + "lotion_bottle": "🧴", + "loudly_crying_face": "😭", + "loudspeaker": "📢", + "love-you_gesture": "🤟", + "love-you_gesture_dark_skin_tone": "🤟🏿", + "love-you_gesture_light_skin_tone": "🤟🏻", + "love-you_gesture_medium-dark_skin_tone": "🤟🏾", + "love-you_gesture_medium-light_skin_tone": "🤟🏼", + "love-you_gesture_medium_skin_tone": "🤟🏽", + "love_hotel": "🏩", + "love_letter": "💌", + "luggage": "🧳", + "lying_face": "🤥", + "mage": "🧙", + "mage_dark_skin_tone": "🧙🏿", + "mage_light_skin_tone": "🧙🏻", + "mage_medium-dark_skin_tone": "🧙🏾", + "mage_medium-light_skin_tone": "🧙🏼", + "mage_medium_skin_tone": "🧙🏽", + "magnet": "🧲", + "magnifying_glass_tilted_left": "🔍", + "magnifying_glass_tilted_right": "🔎", + "mahjong_red_dragon": "🀄", + "male_sign": "♂", + "man": "👨", + "man_and_woman_holding_hands": "👫", + "man_artist": "👨\u200d🎨", + "man_artist_dark_skin_tone": "👨🏿\u200d🎨", + "man_artist_light_skin_tone": "👨🏻\u200d🎨", + "man_artist_medium-dark_skin_tone": "👨🏾\u200d🎨", + "man_artist_medium-light_skin_tone": "👨🏼\u200d🎨", + "man_artist_medium_skin_tone": "👨🏽\u200d🎨", + "man_astronaut": "👨\u200d🚀", + "man_astronaut_dark_skin_tone": "👨🏿\u200d🚀", + "man_astronaut_light_skin_tone": "👨🏻\u200d🚀", + "man_astronaut_medium-dark_skin_tone": "👨🏾\u200d🚀", + "man_astronaut_medium-light_skin_tone": "👨🏼\u200d🚀", + "man_astronaut_medium_skin_tone": "👨🏽\u200d🚀", + "man_biking": "🚴\u200d♂️", + "man_biking_dark_skin_tone": "🚴🏿\u200d♂️", + "man_biking_light_skin_tone": "🚴🏻\u200d♂️", + "man_biking_medium-dark_skin_tone": "🚴🏾\u200d♂️", + "man_biking_medium-light_skin_tone": "🚴🏼\u200d♂️", + "man_biking_medium_skin_tone": "🚴🏽\u200d♂️", + "man_bouncing_ball": "⛹️\u200d♂️", + "man_bouncing_ball_dark_skin_tone": "⛹🏿\u200d♂️", + "man_bouncing_ball_light_skin_tone": "⛹🏻\u200d♂️", + "man_bouncing_ball_medium-dark_skin_tone": "⛹🏾\u200d♂️", + "man_bouncing_ball_medium-light_skin_tone": "⛹🏼\u200d♂️", + "man_bouncing_ball_medium_skin_tone": "⛹🏽\u200d♂️", + "man_bowing": "🙇\u200d♂️", + "man_bowing_dark_skin_tone": "🙇🏿\u200d♂️", + "man_bowing_light_skin_tone": "🙇🏻\u200d♂️", + "man_bowing_medium-dark_skin_tone": "🙇🏾\u200d♂️", + "man_bowing_medium-light_skin_tone": "🙇🏼\u200d♂️", + "man_bowing_medium_skin_tone": "🙇🏽\u200d♂️", + "man_cartwheeling": "🤸\u200d♂️", + "man_cartwheeling_dark_skin_tone": "🤸🏿\u200d♂️", + "man_cartwheeling_light_skin_tone": "🤸🏻\u200d♂️", + "man_cartwheeling_medium-dark_skin_tone": "🤸🏾\u200d♂️", + "man_cartwheeling_medium-light_skin_tone": "🤸🏼\u200d♂️", + "man_cartwheeling_medium_skin_tone": "🤸🏽\u200d♂️", + "man_climbing": "🧗\u200d♂️", + "man_climbing_dark_skin_tone": "🧗🏿\u200d♂️", + "man_climbing_light_skin_tone": "🧗🏻\u200d♂️", + "man_climbing_medium-dark_skin_tone": "🧗🏾\u200d♂️", + "man_climbing_medium-light_skin_tone": "🧗🏼\u200d♂️", + "man_climbing_medium_skin_tone": "🧗🏽\u200d♂️", + "man_construction_worker": "👷\u200d♂️", + "man_construction_worker_dark_skin_tone": "👷🏿\u200d♂️", + "man_construction_worker_light_skin_tone": "👷🏻\u200d♂️", + "man_construction_worker_medium-dark_skin_tone": "👷🏾\u200d♂️", + "man_construction_worker_medium-light_skin_tone": "👷🏼\u200d♂️", + "man_construction_worker_medium_skin_tone": "👷🏽\u200d♂️", + "man_cook": "👨\u200d🍳", + "man_cook_dark_skin_tone": "👨🏿\u200d🍳", + "man_cook_light_skin_tone": "👨🏻\u200d🍳", + "man_cook_medium-dark_skin_tone": "👨🏾\u200d🍳", + "man_cook_medium-light_skin_tone": "👨🏼\u200d🍳", + "man_cook_medium_skin_tone": "👨🏽\u200d🍳", + "man_dancing": "🕺", + "man_dancing_dark_skin_tone": "🕺🏿", + "man_dancing_light_skin_tone": "🕺🏻", + "man_dancing_medium-dark_skin_tone": "🕺🏾", + "man_dancing_medium-light_skin_tone": "🕺🏼", + "man_dancing_medium_skin_tone": "🕺🏽", + "man_dark_skin_tone": "👨🏿", + "man_detective": "🕵️\u200d♂️", + "man_detective_dark_skin_tone": "🕵🏿\u200d♂️", + "man_detective_light_skin_tone": "🕵🏻\u200d♂️", + "man_detective_medium-dark_skin_tone": "🕵🏾\u200d♂️", + "man_detective_medium-light_skin_tone": "🕵🏼\u200d♂️", + "man_detective_medium_skin_tone": "🕵🏽\u200d♂️", + "man_elf": "🧝\u200d♂️", + "man_elf_dark_skin_tone": "🧝🏿\u200d♂️", + "man_elf_light_skin_tone": "🧝🏻\u200d♂️", + "man_elf_medium-dark_skin_tone": "🧝🏾\u200d♂️", + "man_elf_medium-light_skin_tone": "🧝🏼\u200d♂️", + "man_elf_medium_skin_tone": "🧝🏽\u200d♂️", + "man_facepalming": "🤦\u200d♂️", + "man_facepalming_dark_skin_tone": "🤦🏿\u200d♂️", + "man_facepalming_light_skin_tone": "🤦🏻\u200d♂️", + "man_facepalming_medium-dark_skin_tone": "🤦🏾\u200d♂️", + "man_facepalming_medium-light_skin_tone": "🤦🏼\u200d♂️", + "man_facepalming_medium_skin_tone": "🤦🏽\u200d♂️", + "man_factory_worker": "👨\u200d🏭", + "man_factory_worker_dark_skin_tone": "👨🏿\u200d🏭", + "man_factory_worker_light_skin_tone": "👨🏻\u200d🏭", + "man_factory_worker_medium-dark_skin_tone": "👨🏾\u200d🏭", + "man_factory_worker_medium-light_skin_tone": "👨🏼\u200d🏭", + "man_factory_worker_medium_skin_tone": "👨🏽\u200d🏭", + "man_fairy": "🧚\u200d♂️", + "man_fairy_dark_skin_tone": "🧚🏿\u200d♂️", + "man_fairy_light_skin_tone": "🧚🏻\u200d♂️", + "man_fairy_medium-dark_skin_tone": "🧚🏾\u200d♂️", + "man_fairy_medium-light_skin_tone": "🧚🏼\u200d♂️", + "man_fairy_medium_skin_tone": "🧚🏽\u200d♂️", + "man_farmer": "👨\u200d🌾", + "man_farmer_dark_skin_tone": "👨🏿\u200d🌾", + "man_farmer_light_skin_tone": "👨🏻\u200d🌾", + "man_farmer_medium-dark_skin_tone": "👨🏾\u200d🌾", + "man_farmer_medium-light_skin_tone": "👨🏼\u200d🌾", + "man_farmer_medium_skin_tone": "👨🏽\u200d🌾", + "man_firefighter": "👨\u200d🚒", + "man_firefighter_dark_skin_tone": "👨🏿\u200d🚒", + "man_firefighter_light_skin_tone": "👨🏻\u200d🚒", + "man_firefighter_medium-dark_skin_tone": "👨🏾\u200d🚒", + "man_firefighter_medium-light_skin_tone": "👨🏼\u200d🚒", + "man_firefighter_medium_skin_tone": "👨🏽\u200d🚒", + "man_frowning": "🙍\u200d♂️", + "man_frowning_dark_skin_tone": "🙍🏿\u200d♂️", + "man_frowning_light_skin_tone": "🙍🏻\u200d♂️", + "man_frowning_medium-dark_skin_tone": "🙍🏾\u200d♂️", + "man_frowning_medium-light_skin_tone": "🙍🏼\u200d♂️", + "man_frowning_medium_skin_tone": "🙍🏽\u200d♂️", + "man_genie": "🧞\u200d♂️", + "man_gesturing_no": "🙅\u200d♂️", + "man_gesturing_no_dark_skin_tone": "🙅🏿\u200d♂️", + "man_gesturing_no_light_skin_tone": "🙅🏻\u200d♂️", + "man_gesturing_no_medium-dark_skin_tone": "🙅🏾\u200d♂️", + "man_gesturing_no_medium-light_skin_tone": "🙅🏼\u200d♂️", + "man_gesturing_no_medium_skin_tone": "🙅🏽\u200d♂️", + "man_gesturing_ok": "🙆\u200d♂️", + "man_gesturing_ok_dark_skin_tone": "🙆🏿\u200d♂️", + "man_gesturing_ok_light_skin_tone": "🙆🏻\u200d♂️", + "man_gesturing_ok_medium-dark_skin_tone": "🙆🏾\u200d♂️", + "man_gesturing_ok_medium-light_skin_tone": "🙆🏼\u200d♂️", + "man_gesturing_ok_medium_skin_tone": "🙆🏽\u200d♂️", + "man_getting_haircut": "💇\u200d♂️", + "man_getting_haircut_dark_skin_tone": "💇🏿\u200d♂️", + "man_getting_haircut_light_skin_tone": "💇🏻\u200d♂️", + "man_getting_haircut_medium-dark_skin_tone": "💇🏾\u200d♂️", + "man_getting_haircut_medium-light_skin_tone": "💇🏼\u200d♂️", + "man_getting_haircut_medium_skin_tone": "💇🏽\u200d♂️", + "man_getting_massage": "💆\u200d♂️", + "man_getting_massage_dark_skin_tone": "💆🏿\u200d♂️", + "man_getting_massage_light_skin_tone": "💆🏻\u200d♂️", + "man_getting_massage_medium-dark_skin_tone": "💆🏾\u200d♂️", + "man_getting_massage_medium-light_skin_tone": "💆🏼\u200d♂️", + "man_getting_massage_medium_skin_tone": "💆🏽\u200d♂️", + "man_golfing": "🏌️\u200d♂️", + "man_golfing_dark_skin_tone": "🏌🏿\u200d♂️", + "man_golfing_light_skin_tone": "🏌🏻\u200d♂️", + "man_golfing_medium-dark_skin_tone": "🏌🏾\u200d♂️", + "man_golfing_medium-light_skin_tone": "🏌🏼\u200d♂️", + "man_golfing_medium_skin_tone": "🏌🏽\u200d♂️", + "man_guard": "💂\u200d♂️", + "man_guard_dark_skin_tone": "💂🏿\u200d♂️", + "man_guard_light_skin_tone": "💂🏻\u200d♂️", + "man_guard_medium-dark_skin_tone": "💂🏾\u200d♂️", + "man_guard_medium-light_skin_tone": "💂🏼\u200d♂️", + "man_guard_medium_skin_tone": "💂🏽\u200d♂️", + "man_health_worker": "👨\u200d⚕️", + "man_health_worker_dark_skin_tone": "👨🏿\u200d⚕️", + "man_health_worker_light_skin_tone": "👨🏻\u200d⚕️", + "man_health_worker_medium-dark_skin_tone": "👨🏾\u200d⚕️", + "man_health_worker_medium-light_skin_tone": "👨🏼\u200d⚕️", + "man_health_worker_medium_skin_tone": "👨🏽\u200d⚕️", + "man_in_lotus_position": "🧘\u200d♂️", + "man_in_lotus_position_dark_skin_tone": "🧘🏿\u200d♂️", + "man_in_lotus_position_light_skin_tone": "🧘🏻\u200d♂️", + "man_in_lotus_position_medium-dark_skin_tone": "🧘🏾\u200d♂️", + "man_in_lotus_position_medium-light_skin_tone": "🧘🏼\u200d♂️", + "man_in_lotus_position_medium_skin_tone": "🧘🏽\u200d♂️", + "man_in_manual_wheelchair": "👨\u200d🦽", + "man_in_motorized_wheelchair": "👨\u200d🦼", + "man_in_steamy_room": "🧖\u200d♂️", + "man_in_steamy_room_dark_skin_tone": "🧖🏿\u200d♂️", + "man_in_steamy_room_light_skin_tone": "🧖🏻\u200d♂️", + "man_in_steamy_room_medium-dark_skin_tone": "🧖🏾\u200d♂️", + "man_in_steamy_room_medium-light_skin_tone": "🧖🏼\u200d♂️", + "man_in_steamy_room_medium_skin_tone": "🧖🏽\u200d♂️", + "man_in_suit_levitating": "🕴", + "man_in_suit_levitating_dark_skin_tone": "🕴🏿", + "man_in_suit_levitating_light_skin_tone": "🕴🏻", + "man_in_suit_levitating_medium-dark_skin_tone": "🕴🏾", + "man_in_suit_levitating_medium-light_skin_tone": "🕴🏼", + "man_in_suit_levitating_medium_skin_tone": "🕴🏽", + "man_in_tuxedo": "🤵", + "man_in_tuxedo_dark_skin_tone": "🤵🏿", + "man_in_tuxedo_light_skin_tone": "🤵🏻", + "man_in_tuxedo_medium-dark_skin_tone": "🤵🏾", + "man_in_tuxedo_medium-light_skin_tone": "🤵🏼", + "man_in_tuxedo_medium_skin_tone": "🤵🏽", + "man_judge": "👨\u200d⚖️", + "man_judge_dark_skin_tone": "👨🏿\u200d⚖️", + "man_judge_light_skin_tone": "👨🏻\u200d⚖️", + "man_judge_medium-dark_skin_tone": "👨🏾\u200d⚖️", + "man_judge_medium-light_skin_tone": "👨🏼\u200d⚖️", + "man_judge_medium_skin_tone": "👨🏽\u200d⚖️", + "man_juggling": "🤹\u200d♂️", + "man_juggling_dark_skin_tone": "🤹🏿\u200d♂️", + "man_juggling_light_skin_tone": "🤹🏻\u200d♂️", + "man_juggling_medium-dark_skin_tone": "🤹🏾\u200d♂️", + "man_juggling_medium-light_skin_tone": "🤹🏼\u200d♂️", + "man_juggling_medium_skin_tone": "🤹🏽\u200d♂️", + "man_lifting_weights": "🏋️\u200d♂️", + "man_lifting_weights_dark_skin_tone": "🏋🏿\u200d♂️", + "man_lifting_weights_light_skin_tone": "🏋🏻\u200d♂️", + "man_lifting_weights_medium-dark_skin_tone": "🏋🏾\u200d♂️", + "man_lifting_weights_medium-light_skin_tone": "🏋🏼\u200d♂️", + "man_lifting_weights_medium_skin_tone": "🏋🏽\u200d♂️", + "man_light_skin_tone": "👨🏻", + "man_mage": "🧙\u200d♂️", + "man_mage_dark_skin_tone": "🧙🏿\u200d♂️", + "man_mage_light_skin_tone": "🧙🏻\u200d♂️", + "man_mage_medium-dark_skin_tone": "🧙🏾\u200d♂️", + "man_mage_medium-light_skin_tone": "🧙🏼\u200d♂️", + "man_mage_medium_skin_tone": "🧙🏽\u200d♂️", + "man_mechanic": "👨\u200d🔧", + "man_mechanic_dark_skin_tone": "👨🏿\u200d🔧", + "man_mechanic_light_skin_tone": "👨🏻\u200d🔧", + "man_mechanic_medium-dark_skin_tone": "👨🏾\u200d🔧", + "man_mechanic_medium-light_skin_tone": "👨🏼\u200d🔧", + "man_mechanic_medium_skin_tone": "👨🏽\u200d🔧", + "man_medium-dark_skin_tone": "👨🏾", + "man_medium-light_skin_tone": "👨🏼", + "man_medium_skin_tone": "👨🏽", + "man_mountain_biking": "🚵\u200d♂️", + "man_mountain_biking_dark_skin_tone": "🚵🏿\u200d♂️", + "man_mountain_biking_light_skin_tone": "🚵🏻\u200d♂️", + "man_mountain_biking_medium-dark_skin_tone": "🚵🏾\u200d♂️", + "man_mountain_biking_medium-light_skin_tone": "🚵🏼\u200d♂️", + "man_mountain_biking_medium_skin_tone": "🚵🏽\u200d♂️", + "man_office_worker": "👨\u200d💼", + "man_office_worker_dark_skin_tone": "👨🏿\u200d💼", + "man_office_worker_light_skin_tone": "👨🏻\u200d💼", + "man_office_worker_medium-dark_skin_tone": "👨🏾\u200d💼", + "man_office_worker_medium-light_skin_tone": "👨🏼\u200d💼", + "man_office_worker_medium_skin_tone": "👨🏽\u200d💼", + "man_pilot": "👨\u200d✈️", + "man_pilot_dark_skin_tone": "👨🏿\u200d✈️", + "man_pilot_light_skin_tone": "👨🏻\u200d✈️", + "man_pilot_medium-dark_skin_tone": "👨🏾\u200d✈️", + "man_pilot_medium-light_skin_tone": "👨🏼\u200d✈️", + "man_pilot_medium_skin_tone": "👨🏽\u200d✈️", + "man_playing_handball": "🤾\u200d♂️", + "man_playing_handball_dark_skin_tone": "🤾🏿\u200d♂️", + "man_playing_handball_light_skin_tone": "🤾🏻\u200d♂️", + "man_playing_handball_medium-dark_skin_tone": "🤾🏾\u200d♂️", + "man_playing_handball_medium-light_skin_tone": "🤾🏼\u200d♂️", + "man_playing_handball_medium_skin_tone": "🤾🏽\u200d♂️", + "man_playing_water_polo": "🤽\u200d♂️", + "man_playing_water_polo_dark_skin_tone": "🤽🏿\u200d♂️", + "man_playing_water_polo_light_skin_tone": "🤽🏻\u200d♂️", + "man_playing_water_polo_medium-dark_skin_tone": "🤽🏾\u200d♂️", + "man_playing_water_polo_medium-light_skin_tone": "🤽🏼\u200d♂️", + "man_playing_water_polo_medium_skin_tone": "🤽🏽\u200d♂️", + "man_police_officer": "👮\u200d♂️", + "man_police_officer_dark_skin_tone": "👮🏿\u200d♂️", + "man_police_officer_light_skin_tone": "👮🏻\u200d♂️", + "man_police_officer_medium-dark_skin_tone": "👮🏾\u200d♂️", + "man_police_officer_medium-light_skin_tone": "👮🏼\u200d♂️", + "man_police_officer_medium_skin_tone": "👮🏽\u200d♂️", + "man_pouting": "🙎\u200d♂️", + "man_pouting_dark_skin_tone": "🙎🏿\u200d♂️", + "man_pouting_light_skin_tone": "🙎🏻\u200d♂️", + "man_pouting_medium-dark_skin_tone": "🙎🏾\u200d♂️", + "man_pouting_medium-light_skin_tone": "🙎🏼\u200d♂️", + "man_pouting_medium_skin_tone": "🙎🏽\u200d♂️", + "man_raising_hand": "🙋\u200d♂️", + "man_raising_hand_dark_skin_tone": "🙋🏿\u200d♂️", + "man_raising_hand_light_skin_tone": "🙋🏻\u200d♂️", + "man_raising_hand_medium-dark_skin_tone": "🙋🏾\u200d♂️", + "man_raising_hand_medium-light_skin_tone": "🙋🏼\u200d♂️", + "man_raising_hand_medium_skin_tone": "🙋🏽\u200d♂️", + "man_rowing_boat": "🚣\u200d♂️", + "man_rowing_boat_dark_skin_tone": "🚣🏿\u200d♂️", + "man_rowing_boat_light_skin_tone": "🚣🏻\u200d♂️", + "man_rowing_boat_medium-dark_skin_tone": "🚣🏾\u200d♂️", + "man_rowing_boat_medium-light_skin_tone": "🚣🏼\u200d♂️", + "man_rowing_boat_medium_skin_tone": "🚣🏽\u200d♂️", + "man_running": "🏃\u200d♂️", + "man_running_dark_skin_tone": "🏃🏿\u200d♂️", + "man_running_light_skin_tone": "🏃🏻\u200d♂️", + "man_running_medium-dark_skin_tone": "🏃🏾\u200d♂️", + "man_running_medium-light_skin_tone": "🏃🏼\u200d♂️", + "man_running_medium_skin_tone": "🏃🏽\u200d♂️", + "man_scientist": "👨\u200d🔬", + "man_scientist_dark_skin_tone": "👨🏿\u200d🔬", + "man_scientist_light_skin_tone": "👨🏻\u200d🔬", + "man_scientist_medium-dark_skin_tone": "👨🏾\u200d🔬", + "man_scientist_medium-light_skin_tone": "👨🏼\u200d🔬", + "man_scientist_medium_skin_tone": "👨🏽\u200d🔬", + "man_shrugging": "🤷\u200d♂️", + "man_shrugging_dark_skin_tone": "🤷🏿\u200d♂️", + "man_shrugging_light_skin_tone": "🤷🏻\u200d♂️", + "man_shrugging_medium-dark_skin_tone": "🤷🏾\u200d♂️", + "man_shrugging_medium-light_skin_tone": "🤷🏼\u200d♂️", + "man_shrugging_medium_skin_tone": "🤷🏽\u200d♂️", + "man_singer": "👨\u200d🎤", + "man_singer_dark_skin_tone": "👨🏿\u200d🎤", + "man_singer_light_skin_tone": "👨🏻\u200d🎤", + "man_singer_medium-dark_skin_tone": "👨🏾\u200d🎤", + "man_singer_medium-light_skin_tone": "👨🏼\u200d🎤", + "man_singer_medium_skin_tone": "👨🏽\u200d🎤", + "man_student": "👨\u200d🎓", + "man_student_dark_skin_tone": "👨🏿\u200d🎓", + "man_student_light_skin_tone": "👨🏻\u200d🎓", + "man_student_medium-dark_skin_tone": "👨🏾\u200d🎓", + "man_student_medium-light_skin_tone": "👨🏼\u200d🎓", + "man_student_medium_skin_tone": "👨🏽\u200d🎓", + "man_surfing": "🏄\u200d♂️", + "man_surfing_dark_skin_tone": "🏄🏿\u200d♂️", + "man_surfing_light_skin_tone": "🏄🏻\u200d♂️", + "man_surfing_medium-dark_skin_tone": "🏄🏾\u200d♂️", + "man_surfing_medium-light_skin_tone": "🏄🏼\u200d♂️", + "man_surfing_medium_skin_tone": "🏄🏽\u200d♂️", + "man_swimming": "🏊\u200d♂️", + "man_swimming_dark_skin_tone": "🏊🏿\u200d♂️", + "man_swimming_light_skin_tone": "🏊🏻\u200d♂️", + "man_swimming_medium-dark_skin_tone": "🏊🏾\u200d♂️", + "man_swimming_medium-light_skin_tone": "🏊🏼\u200d♂️", + "man_swimming_medium_skin_tone": "🏊🏽\u200d♂️", + "man_teacher": "👨\u200d🏫", + "man_teacher_dark_skin_tone": "👨🏿\u200d🏫", + "man_teacher_light_skin_tone": "👨🏻\u200d🏫", + "man_teacher_medium-dark_skin_tone": "👨🏾\u200d🏫", + "man_teacher_medium-light_skin_tone": "👨🏼\u200d🏫", + "man_teacher_medium_skin_tone": "👨🏽\u200d🏫", + "man_technologist": "👨\u200d💻", + "man_technologist_dark_skin_tone": "👨🏿\u200d💻", + "man_technologist_light_skin_tone": "👨🏻\u200d💻", + "man_technologist_medium-dark_skin_tone": "👨🏾\u200d💻", + "man_technologist_medium-light_skin_tone": "👨🏼\u200d💻", + "man_technologist_medium_skin_tone": "👨🏽\u200d💻", + "man_tipping_hand": "💁\u200d♂️", + "man_tipping_hand_dark_skin_tone": "💁🏿\u200d♂️", + "man_tipping_hand_light_skin_tone": "💁🏻\u200d♂️", + "man_tipping_hand_medium-dark_skin_tone": "💁🏾\u200d♂️", + "man_tipping_hand_medium-light_skin_tone": "💁🏼\u200d♂️", + "man_tipping_hand_medium_skin_tone": "💁🏽\u200d♂️", + "man_vampire": "🧛\u200d♂️", + "man_vampire_dark_skin_tone": "🧛🏿\u200d♂️", + "man_vampire_light_skin_tone": "🧛🏻\u200d♂️", + "man_vampire_medium-dark_skin_tone": "🧛🏾\u200d♂️", + "man_vampire_medium-light_skin_tone": "🧛🏼\u200d♂️", + "man_vampire_medium_skin_tone": "🧛🏽\u200d♂️", + "man_walking": "🚶\u200d♂️", + "man_walking_dark_skin_tone": "🚶🏿\u200d♂️", + "man_walking_light_skin_tone": "🚶🏻\u200d♂️", + "man_walking_medium-dark_skin_tone": "🚶🏾\u200d♂️", + "man_walking_medium-light_skin_tone": "🚶🏼\u200d♂️", + "man_walking_medium_skin_tone": "🚶🏽\u200d♂️", + "man_wearing_turban": "👳\u200d♂️", + "man_wearing_turban_dark_skin_tone": "👳🏿\u200d♂️", + "man_wearing_turban_light_skin_tone": "👳🏻\u200d♂️", + "man_wearing_turban_medium-dark_skin_tone": "👳🏾\u200d♂️", + "man_wearing_turban_medium-light_skin_tone": "👳🏼\u200d♂️", + "man_wearing_turban_medium_skin_tone": "👳🏽\u200d♂️", + "man_with_probing_cane": "👨\u200d🦯", + "man_with_chinese_cap": "👲", + "man_with_chinese_cap_dark_skin_tone": "👲🏿", + "man_with_chinese_cap_light_skin_tone": "👲🏻", + "man_with_chinese_cap_medium-dark_skin_tone": "👲🏾", + "man_with_chinese_cap_medium-light_skin_tone": "👲🏼", + "man_with_chinese_cap_medium_skin_tone": "👲🏽", + "man_zombie": "🧟\u200d♂️", + "mango": "🥭", + "mantelpiece_clock": "🕰", + "manual_wheelchair": "🦽", + "man’s_shoe": "👞", + "map_of_japan": "🗾", + "maple_leaf": "🍁", + "martial_arts_uniform": "🥋", + "mate": "🧉", + "meat_on_bone": "🍖", + "mechanical_arm": "🦾", + "mechanical_leg": "🦿", + "medical_symbol": "⚕", + "megaphone": "📣", + "melon": "🍈", + "memo": "📝", + "men_with_bunny_ears": "👯\u200d♂️", + "men_wrestling": "🤼\u200d♂️", + "menorah": "🕎", + "men’s_room": "🚹", + "mermaid": "🧜\u200d♀️", + "mermaid_dark_skin_tone": "🧜🏿\u200d♀️", + "mermaid_light_skin_tone": "🧜🏻\u200d♀️", + "mermaid_medium-dark_skin_tone": "🧜🏾\u200d♀️", + "mermaid_medium-light_skin_tone": "🧜🏼\u200d♀️", + "mermaid_medium_skin_tone": "🧜🏽\u200d♀️", + "merman": "🧜\u200d♂️", + "merman_dark_skin_tone": "🧜🏿\u200d♂️", + "merman_light_skin_tone": "🧜🏻\u200d♂️", + "merman_medium-dark_skin_tone": "🧜🏾\u200d♂️", + "merman_medium-light_skin_tone": "🧜🏼\u200d♂️", + "merman_medium_skin_tone": "🧜🏽\u200d♂️", + "merperson": "🧜", + "merperson_dark_skin_tone": "🧜🏿", + "merperson_light_skin_tone": "🧜🏻", + "merperson_medium-dark_skin_tone": "🧜🏾", + "merperson_medium-light_skin_tone": "🧜🏼", + "merperson_medium_skin_tone": "🧜🏽", + "metro": "🚇", + "microbe": "🦠", + "microphone": "🎤", + "microscope": "🔬", + "middle_finger": "🖕", + "middle_finger_dark_skin_tone": "🖕🏿", + "middle_finger_light_skin_tone": "🖕🏻", + "middle_finger_medium-dark_skin_tone": "🖕🏾", + "middle_finger_medium-light_skin_tone": "🖕🏼", + "middle_finger_medium_skin_tone": "🖕🏽", + "military_medal": "🎖", + "milky_way": "🌌", + "minibus": "🚐", + "moai": "🗿", + "mobile_phone": "📱", + "mobile_phone_off": "📴", + "mobile_phone_with_arrow": "📲", + "money-mouth_face": "🤑", + "money_bag": "💰", + "money_with_wings": "💸", + "monkey": "🐒", + "monkey_face": "🐵", + "monorail": "🚝", + "moon_cake": "🥮", + "moon_viewing_ceremony": "🎑", + "mosque": "🕌", + "mosquito": "🦟", + "motor_boat": "🛥", + "motor_scooter": "🛵", + "motorcycle": "🏍", + "motorized_wheelchair": "🦼", + "motorway": "🛣", + "mount_fuji": "🗻", + "mountain": "⛰", + "mountain_cableway": "🚠", + "mountain_railway": "🚞", + "mouse": "🐭", + "mouse_face": "🐭", + "mouth": "👄", + "movie_camera": "🎥", + "mushroom": "🍄", + "musical_keyboard": "🎹", + "musical_note": "🎵", + "musical_notes": "🎶", + "musical_score": "🎼", + "muted_speaker": "🔇", + "nail_polish": "💅", + "nail_polish_dark_skin_tone": "💅🏿", + "nail_polish_light_skin_tone": "💅🏻", + "nail_polish_medium-dark_skin_tone": "💅🏾", + "nail_polish_medium-light_skin_tone": "💅🏼", + "nail_polish_medium_skin_tone": "💅🏽", + "name_badge": "📛", + "national_park": "🏞", + "nauseated_face": "🤢", + "nazar_amulet": "🧿", + "necktie": "👔", + "nerd_face": "🤓", + "neutral_face": "😐", + "new_moon": "🌑", + "new_moon_face": "🌚", + "newspaper": "📰", + "next_track_button": "⏭", + "night_with_stars": "🌃", + "nine-thirty": "🕤", + "nine_o’clock": "🕘", + "no_bicycles": "🚳", + "no_entry": "⛔", + "no_littering": "🚯", + "no_mobile_phones": "📵", + "no_one_under_eighteen": "🔞", + "no_pedestrians": "🚷", + "no_smoking": "🚭", + "non-potable_water": "🚱", + "nose": "👃", + "nose_dark_skin_tone": "👃🏿", + "nose_light_skin_tone": "👃🏻", + "nose_medium-dark_skin_tone": "👃🏾", + "nose_medium-light_skin_tone": "👃🏼", + "nose_medium_skin_tone": "👃🏽", + "notebook": "📓", + "notebook_with_decorative_cover": "📔", + "nut_and_bolt": "🔩", + "octopus": "🐙", + "oden": "🍢", + "office_building": "🏢", + "ogre": "👹", + "oil_drum": "🛢", + "old_key": "🗝", + "old_man": "👴", + "old_man_dark_skin_tone": "👴🏿", + "old_man_light_skin_tone": "👴🏻", + "old_man_medium-dark_skin_tone": "👴🏾", + "old_man_medium-light_skin_tone": "👴🏼", + "old_man_medium_skin_tone": "👴🏽", + "old_woman": "👵", + "old_woman_dark_skin_tone": "👵🏿", + "old_woman_light_skin_tone": "👵🏻", + "old_woman_medium-dark_skin_tone": "👵🏾", + "old_woman_medium-light_skin_tone": "👵🏼", + "old_woman_medium_skin_tone": "👵🏽", + "older_adult": "🧓", + "older_adult_dark_skin_tone": "🧓🏿", + "older_adult_light_skin_tone": "🧓🏻", + "older_adult_medium-dark_skin_tone": "🧓🏾", + "older_adult_medium-light_skin_tone": "🧓🏼", + "older_adult_medium_skin_tone": "🧓🏽", + "om": "🕉", + "oncoming_automobile": "🚘", + "oncoming_bus": "🚍", + "oncoming_fist": "👊", + "oncoming_fist_dark_skin_tone": "👊🏿", + "oncoming_fist_light_skin_tone": "👊🏻", + "oncoming_fist_medium-dark_skin_tone": "👊🏾", + "oncoming_fist_medium-light_skin_tone": "👊🏼", + "oncoming_fist_medium_skin_tone": "👊🏽", + "oncoming_police_car": "🚔", + "oncoming_taxi": "🚖", + "one-piece_swimsuit": "🩱", + "one-thirty": "🕜", + "one_o’clock": "🕐", + "onion": "🧅", + "open_book": "📖", + "open_file_folder": "📂", + "open_hands": "👐", + "open_hands_dark_skin_tone": "👐🏿", + "open_hands_light_skin_tone": "👐🏻", + "open_hands_medium-dark_skin_tone": "👐🏾", + "open_hands_medium-light_skin_tone": "👐🏼", + "open_hands_medium_skin_tone": "👐🏽", + "open_mailbox_with_lowered_flag": "📭", + "open_mailbox_with_raised_flag": "📬", + "optical_disk": "💿", + "orange_book": "📙", + "orange_circle": "🟠", + "orange_heart": "🧡", + "orange_square": "🟧", + "orangutan": "🦧", + "orthodox_cross": "☦", + "otter": "🦦", + "outbox_tray": "📤", + "owl": "🦉", + "ox": "🐂", + "oyster": "🦪", + "package": "📦", + "page_facing_up": "📄", + "page_with_curl": "📃", + "pager": "📟", + "paintbrush": "🖌", + "palm_tree": "🌴", + "palms_up_together": "🤲", + "palms_up_together_dark_skin_tone": "🤲🏿", + "palms_up_together_light_skin_tone": "🤲🏻", + "palms_up_together_medium-dark_skin_tone": "🤲🏾", + "palms_up_together_medium-light_skin_tone": "🤲🏼", + "palms_up_together_medium_skin_tone": "🤲🏽", + "pancakes": "🥞", + "panda_face": "🐼", + "paperclip": "📎", + "parrot": "🦜", + "part_alternation_mark": "〽", + "party_popper": "🎉", + "partying_face": "🥳", + "passenger_ship": "🛳", + "passport_control": "🛂", + "pause_button": "⏸", + "paw_prints": "🐾", + "peace_symbol": "☮", + "peach": "🍑", + "peacock": "🦚", + "peanuts": "🥜", + "pear": "🍐", + "pen": "🖊", + "pencil": "📝", + "penguin": "🐧", + "pensive_face": "😔", + "people_holding_hands": "🧑\u200d🤝\u200d🧑", + "people_with_bunny_ears": "👯", + "people_wrestling": "🤼", + "performing_arts": "🎭", + "persevering_face": "😣", + "person_biking": "🚴", + "person_biking_dark_skin_tone": "🚴🏿", + "person_biking_light_skin_tone": "🚴🏻", + "person_biking_medium-dark_skin_tone": "🚴🏾", + "person_biking_medium-light_skin_tone": "🚴🏼", + "person_biking_medium_skin_tone": "🚴🏽", + "person_bouncing_ball": "⛹", + "person_bouncing_ball_dark_skin_tone": "⛹🏿", + "person_bouncing_ball_light_skin_tone": "⛹🏻", + "person_bouncing_ball_medium-dark_skin_tone": "⛹🏾", + "person_bouncing_ball_medium-light_skin_tone": "⛹🏼", + "person_bouncing_ball_medium_skin_tone": "⛹🏽", + "person_bowing": "🙇", + "person_bowing_dark_skin_tone": "🙇🏿", + "person_bowing_light_skin_tone": "🙇🏻", + "person_bowing_medium-dark_skin_tone": "🙇🏾", + "person_bowing_medium-light_skin_tone": "🙇🏼", + "person_bowing_medium_skin_tone": "🙇🏽", + "person_cartwheeling": "🤸", + "person_cartwheeling_dark_skin_tone": "🤸🏿", + "person_cartwheeling_light_skin_tone": "🤸🏻", + "person_cartwheeling_medium-dark_skin_tone": "🤸🏾", + "person_cartwheeling_medium-light_skin_tone": "🤸🏼", + "person_cartwheeling_medium_skin_tone": "🤸🏽", + "person_climbing": "🧗", + "person_climbing_dark_skin_tone": "🧗🏿", + "person_climbing_light_skin_tone": "🧗🏻", + "person_climbing_medium-dark_skin_tone": "🧗🏾", + "person_climbing_medium-light_skin_tone": "🧗🏼", + "person_climbing_medium_skin_tone": "🧗🏽", + "person_facepalming": "🤦", + "person_facepalming_dark_skin_tone": "🤦🏿", + "person_facepalming_light_skin_tone": "🤦🏻", + "person_facepalming_medium-dark_skin_tone": "🤦🏾", + "person_facepalming_medium-light_skin_tone": "🤦🏼", + "person_facepalming_medium_skin_tone": "🤦🏽", + "person_fencing": "🤺", + "person_frowning": "🙍", + "person_frowning_dark_skin_tone": "🙍🏿", + "person_frowning_light_skin_tone": "🙍🏻", + "person_frowning_medium-dark_skin_tone": "🙍🏾", + "person_frowning_medium-light_skin_tone": "🙍🏼", + "person_frowning_medium_skin_tone": "🙍🏽", + "person_gesturing_no": "🙅", + "person_gesturing_no_dark_skin_tone": "🙅🏿", + "person_gesturing_no_light_skin_tone": "🙅🏻", + "person_gesturing_no_medium-dark_skin_tone": "🙅🏾", + "person_gesturing_no_medium-light_skin_tone": "🙅🏼", + "person_gesturing_no_medium_skin_tone": "🙅🏽", + "person_gesturing_ok": "🙆", + "person_gesturing_ok_dark_skin_tone": "🙆🏿", + "person_gesturing_ok_light_skin_tone": "🙆🏻", + "person_gesturing_ok_medium-dark_skin_tone": "🙆🏾", + "person_gesturing_ok_medium-light_skin_tone": "🙆🏼", + "person_gesturing_ok_medium_skin_tone": "🙆🏽", + "person_getting_haircut": "💇", + "person_getting_haircut_dark_skin_tone": "💇🏿", + "person_getting_haircut_light_skin_tone": "💇🏻", + "person_getting_haircut_medium-dark_skin_tone": "💇🏾", + "person_getting_haircut_medium-light_skin_tone": "💇🏼", + "person_getting_haircut_medium_skin_tone": "💇🏽", + "person_getting_massage": "💆", + "person_getting_massage_dark_skin_tone": "💆🏿", + "person_getting_massage_light_skin_tone": "💆🏻", + "person_getting_massage_medium-dark_skin_tone": "💆🏾", + "person_getting_massage_medium-light_skin_tone": "💆🏼", + "person_getting_massage_medium_skin_tone": "💆🏽", + "person_golfing": "🏌", + "person_golfing_dark_skin_tone": "🏌🏿", + "person_golfing_light_skin_tone": "🏌🏻", + "person_golfing_medium-dark_skin_tone": "🏌🏾", + "person_golfing_medium-light_skin_tone": "🏌🏼", + "person_golfing_medium_skin_tone": "🏌🏽", + "person_in_bed": "🛌", + "person_in_bed_dark_skin_tone": "🛌🏿", + "person_in_bed_light_skin_tone": "🛌🏻", + "person_in_bed_medium-dark_skin_tone": "🛌🏾", + "person_in_bed_medium-light_skin_tone": "🛌🏼", + "person_in_bed_medium_skin_tone": "🛌🏽", + "person_in_lotus_position": "🧘", + "person_in_lotus_position_dark_skin_tone": "🧘🏿", + "person_in_lotus_position_light_skin_tone": "🧘🏻", + "person_in_lotus_position_medium-dark_skin_tone": "🧘🏾", + "person_in_lotus_position_medium-light_skin_tone": "🧘🏼", + "person_in_lotus_position_medium_skin_tone": "🧘🏽", + "person_in_steamy_room": "🧖", + "person_in_steamy_room_dark_skin_tone": "🧖🏿", + "person_in_steamy_room_light_skin_tone": "🧖🏻", + "person_in_steamy_room_medium-dark_skin_tone": "🧖🏾", + "person_in_steamy_room_medium-light_skin_tone": "🧖🏼", + "person_in_steamy_room_medium_skin_tone": "🧖🏽", + "person_juggling": "🤹", + "person_juggling_dark_skin_tone": "🤹🏿", + "person_juggling_light_skin_tone": "🤹🏻", + "person_juggling_medium-dark_skin_tone": "🤹🏾", + "person_juggling_medium-light_skin_tone": "🤹🏼", + "person_juggling_medium_skin_tone": "🤹🏽", + "person_kneeling": "🧎", + "person_lifting_weights": "🏋", + "person_lifting_weights_dark_skin_tone": "🏋🏿", + "person_lifting_weights_light_skin_tone": "🏋🏻", + "person_lifting_weights_medium-dark_skin_tone": "🏋🏾", + "person_lifting_weights_medium-light_skin_tone": "🏋🏼", + "person_lifting_weights_medium_skin_tone": "🏋🏽", + "person_mountain_biking": "🚵", + "person_mountain_biking_dark_skin_tone": "🚵🏿", + "person_mountain_biking_light_skin_tone": "🚵🏻", + "person_mountain_biking_medium-dark_skin_tone": "🚵🏾", + "person_mountain_biking_medium-light_skin_tone": "🚵🏼", + "person_mountain_biking_medium_skin_tone": "🚵🏽", + "person_playing_handball": "🤾", + "person_playing_handball_dark_skin_tone": "🤾🏿", + "person_playing_handball_light_skin_tone": "🤾🏻", + "person_playing_handball_medium-dark_skin_tone": "🤾🏾", + "person_playing_handball_medium-light_skin_tone": "🤾🏼", + "person_playing_handball_medium_skin_tone": "🤾🏽", + "person_playing_water_polo": "🤽", + "person_playing_water_polo_dark_skin_tone": "🤽🏿", + "person_playing_water_polo_light_skin_tone": "🤽🏻", + "person_playing_water_polo_medium-dark_skin_tone": "🤽🏾", + "person_playing_water_polo_medium-light_skin_tone": "🤽🏼", + "person_playing_water_polo_medium_skin_tone": "🤽🏽", + "person_pouting": "🙎", + "person_pouting_dark_skin_tone": "🙎🏿", + "person_pouting_light_skin_tone": "🙎🏻", + "person_pouting_medium-dark_skin_tone": "🙎🏾", + "person_pouting_medium-light_skin_tone": "🙎🏼", + "person_pouting_medium_skin_tone": "🙎🏽", + "person_raising_hand": "🙋", + "person_raising_hand_dark_skin_tone": "🙋🏿", + "person_raising_hand_light_skin_tone": "🙋🏻", + "person_raising_hand_medium-dark_skin_tone": "🙋🏾", + "person_raising_hand_medium-light_skin_tone": "🙋🏼", + "person_raising_hand_medium_skin_tone": "🙋🏽", + "person_rowing_boat": "🚣", + "person_rowing_boat_dark_skin_tone": "🚣🏿", + "person_rowing_boat_light_skin_tone": "🚣🏻", + "person_rowing_boat_medium-dark_skin_tone": "🚣🏾", + "person_rowing_boat_medium-light_skin_tone": "🚣🏼", + "person_rowing_boat_medium_skin_tone": "🚣🏽", + "person_running": "🏃", + "person_running_dark_skin_tone": "🏃🏿", + "person_running_light_skin_tone": "🏃🏻", + "person_running_medium-dark_skin_tone": "🏃🏾", + "person_running_medium-light_skin_tone": "🏃🏼", + "person_running_medium_skin_tone": "🏃🏽", + "person_shrugging": "🤷", + "person_shrugging_dark_skin_tone": "🤷🏿", + "person_shrugging_light_skin_tone": "🤷🏻", + "person_shrugging_medium-dark_skin_tone": "🤷🏾", + "person_shrugging_medium-light_skin_tone": "🤷🏼", + "person_shrugging_medium_skin_tone": "🤷🏽", + "person_standing": "🧍", + "person_surfing": "🏄", + "person_surfing_dark_skin_tone": "🏄🏿", + "person_surfing_light_skin_tone": "🏄🏻", + "person_surfing_medium-dark_skin_tone": "🏄🏾", + "person_surfing_medium-light_skin_tone": "🏄🏼", + "person_surfing_medium_skin_tone": "🏄🏽", + "person_swimming": "🏊", + "person_swimming_dark_skin_tone": "🏊🏿", + "person_swimming_light_skin_tone": "🏊🏻", + "person_swimming_medium-dark_skin_tone": "🏊🏾", + "person_swimming_medium-light_skin_tone": "🏊🏼", + "person_swimming_medium_skin_tone": "🏊🏽", + "person_taking_bath": "🛀", + "person_taking_bath_dark_skin_tone": "🛀🏿", + "person_taking_bath_light_skin_tone": "🛀🏻", + "person_taking_bath_medium-dark_skin_tone": "🛀🏾", + "person_taking_bath_medium-light_skin_tone": "🛀🏼", + "person_taking_bath_medium_skin_tone": "🛀🏽", + "person_tipping_hand": "💁", + "person_tipping_hand_dark_skin_tone": "💁🏿", + "person_tipping_hand_light_skin_tone": "💁🏻", + "person_tipping_hand_medium-dark_skin_tone": "💁🏾", + "person_tipping_hand_medium-light_skin_tone": "💁🏼", + "person_tipping_hand_medium_skin_tone": "💁🏽", + "person_walking": "🚶", + "person_walking_dark_skin_tone": "🚶🏿", + "person_walking_light_skin_tone": "🚶🏻", + "person_walking_medium-dark_skin_tone": "🚶🏾", + "person_walking_medium-light_skin_tone": "🚶🏼", + "person_walking_medium_skin_tone": "🚶🏽", + "person_wearing_turban": "👳", + "person_wearing_turban_dark_skin_tone": "👳🏿", + "person_wearing_turban_light_skin_tone": "👳🏻", + "person_wearing_turban_medium-dark_skin_tone": "👳🏾", + "person_wearing_turban_medium-light_skin_tone": "👳🏼", + "person_wearing_turban_medium_skin_tone": "👳🏽", + "petri_dish": "🧫", + "pick": "⛏", + "pie": "🥧", + "pig": "🐷", + "pig_face": "🐷", + "pig_nose": "🐽", + "pile_of_poo": "💩", + "pill": "💊", + "pinching_hand": "🤏", + "pine_decoration": "🎍", + "pineapple": "🍍", + "ping_pong": "🏓", + "pirate_flag": "🏴\u200d☠️", + "pistol": "🔫", + "pizza": "🍕", + "place_of_worship": "🛐", + "play_button": "▶", + "play_or_pause_button": "⏯", + "pleading_face": "🥺", + "police_car": "🚓", + "police_car_light": "🚨", + "police_officer": "👮", + "police_officer_dark_skin_tone": "👮🏿", + "police_officer_light_skin_tone": "👮🏻", + "police_officer_medium-dark_skin_tone": "👮🏾", + "police_officer_medium-light_skin_tone": "👮🏼", + "police_officer_medium_skin_tone": "👮🏽", + "poodle": "🐩", + "pool_8_ball": "🎱", + "popcorn": "🍿", + "post_office": "🏣", + "postal_horn": "📯", + "postbox": "📮", + "pot_of_food": "🍲", + "potable_water": "🚰", + "potato": "🥔", + "poultry_leg": "🍗", + "pound_banknote": "💷", + "pouting_cat_face": "😾", + "pouting_face": "😡", + "prayer_beads": "📿", + "pregnant_woman": "🤰", + "pregnant_woman_dark_skin_tone": "🤰🏿", + "pregnant_woman_light_skin_tone": "🤰🏻", + "pregnant_woman_medium-dark_skin_tone": "🤰🏾", + "pregnant_woman_medium-light_skin_tone": "🤰🏼", + "pregnant_woman_medium_skin_tone": "🤰🏽", + "pretzel": "🥨", + "probing_cane": "🦯", + "prince": "🤴", + "prince_dark_skin_tone": "🤴🏿", + "prince_light_skin_tone": "🤴🏻", + "prince_medium-dark_skin_tone": "🤴🏾", + "prince_medium-light_skin_tone": "🤴🏼", + "prince_medium_skin_tone": "🤴🏽", + "princess": "👸", + "princess_dark_skin_tone": "👸🏿", + "princess_light_skin_tone": "👸🏻", + "princess_medium-dark_skin_tone": "👸🏾", + "princess_medium-light_skin_tone": "👸🏼", + "princess_medium_skin_tone": "👸🏽", + "printer": "🖨", + "prohibited": "🚫", + "purple_circle": "🟣", + "purple_heart": "💜", + "purple_square": "🟪", + "purse": "👛", + "pushpin": "📌", + "question_mark": "❓", + "rabbit": "🐰", + "rabbit_face": "🐰", + "raccoon": "🦝", + "racing_car": "🏎", + "radio": "📻", + "radio_button": "🔘", + "radioactive": "☢", + "railway_car": "🚃", + "railway_track": "🛤", + "rainbow": "🌈", + "rainbow_flag": "🏳️\u200d🌈", + "raised_back_of_hand": "🤚", + "raised_back_of_hand_dark_skin_tone": "🤚🏿", + "raised_back_of_hand_light_skin_tone": "🤚🏻", + "raised_back_of_hand_medium-dark_skin_tone": "🤚🏾", + "raised_back_of_hand_medium-light_skin_tone": "🤚🏼", + "raised_back_of_hand_medium_skin_tone": "🤚🏽", + "raised_fist": "✊", + "raised_fist_dark_skin_tone": "✊🏿", + "raised_fist_light_skin_tone": "✊🏻", + "raised_fist_medium-dark_skin_tone": "✊🏾", + "raised_fist_medium-light_skin_tone": "✊🏼", + "raised_fist_medium_skin_tone": "✊🏽", + "raised_hand": "✋", + "raised_hand_dark_skin_tone": "✋🏿", + "raised_hand_light_skin_tone": "✋🏻", + "raised_hand_medium-dark_skin_tone": "✋🏾", + "raised_hand_medium-light_skin_tone": "✋🏼", + "raised_hand_medium_skin_tone": "✋🏽", + "raising_hands": "🙌", + "raising_hands_dark_skin_tone": "🙌🏿", + "raising_hands_light_skin_tone": "🙌🏻", + "raising_hands_medium-dark_skin_tone": "🙌🏾", + "raising_hands_medium-light_skin_tone": "🙌🏼", + "raising_hands_medium_skin_tone": "🙌🏽", + "ram": "🐏", + "rat": "🐀", + "razor": "🪒", + "ringed_planet": "🪐", + "receipt": "🧾", + "record_button": "⏺", + "recycling_symbol": "♻", + "red_apple": "🍎", + "red_circle": "🔴", + "red_envelope": "🧧", + "red_hair": "🦰", + "red-haired_man": "👨\u200d🦰", + "red-haired_woman": "👩\u200d🦰", + "red_heart": "❤", + "red_paper_lantern": "🏮", + "red_square": "🟥", + "red_triangle_pointed_down": "🔻", + "red_triangle_pointed_up": "🔺", + "registered": "®", + "relieved_face": "😌", + "reminder_ribbon": "🎗", + "repeat_button": "🔁", + "repeat_single_button": "🔂", + "rescue_worker’s_helmet": "⛑", + "restroom": "🚻", + "reverse_button": "◀", + "revolving_hearts": "💞", + "rhinoceros": "🦏", + "ribbon": "🎀", + "rice_ball": "🍙", + "rice_cracker": "🍘", + "right-facing_fist": "🤜", + "right-facing_fist_dark_skin_tone": "🤜🏿", + "right-facing_fist_light_skin_tone": "🤜🏻", + "right-facing_fist_medium-dark_skin_tone": "🤜🏾", + "right-facing_fist_medium-light_skin_tone": "🤜🏼", + "right-facing_fist_medium_skin_tone": "🤜🏽", + "right_anger_bubble": "🗯", + "right_arrow": "➡", + "right_arrow_curving_down": "⤵", + "right_arrow_curving_left": "↩", + "right_arrow_curving_up": "⤴", + "ring": "💍", + "roasted_sweet_potato": "🍠", + "robot_face": "🤖", + "rocket": "🚀", + "roll_of_paper": "🧻", + "rolled-up_newspaper": "🗞", + "roller_coaster": "🎢", + "rolling_on_the_floor_laughing": "🤣", + "rooster": "🐓", + "rose": "🌹", + "rosette": "🏵", + "round_pushpin": "📍", + "rugby_football": "🏉", + "running_shirt": "🎽", + "running_shoe": "👟", + "sad_but_relieved_face": "😥", + "safety_pin": "🧷", + "safety_vest": "🦺", + "salt": "🧂", + "sailboat": "⛵", + "sake": "🍶", + "sandwich": "🥪", + "sari": "🥻", + "satellite": "📡", + "satellite_antenna": "📡", + "sauropod": "🦕", + "saxophone": "🎷", + "scarf": "🧣", + "school": "🏫", + "school_backpack": "🎒", + "scissors": "✂", + "scorpion": "🦂", + "scroll": "📜", + "seat": "💺", + "see-no-evil_monkey": "🙈", + "seedling": "🌱", + "selfie": "🤳", + "selfie_dark_skin_tone": "🤳🏿", + "selfie_light_skin_tone": "🤳🏻", + "selfie_medium-dark_skin_tone": "🤳🏾", + "selfie_medium-light_skin_tone": "🤳🏼", + "selfie_medium_skin_tone": "🤳🏽", + "service_dog": "🐕\u200d🦺", + "seven-thirty": "🕢", + "seven_o’clock": "🕖", + "shallow_pan_of_food": "🥘", + "shamrock": "☘", + "shark": "🦈", + "shaved_ice": "🍧", + "sheaf_of_rice": "🌾", + "shield": "🛡", + "shinto_shrine": "⛩", + "ship": "🚢", + "shooting_star": "🌠", + "shopping_bags": "🛍", + "shopping_cart": "🛒", + "shortcake": "🍰", + "shorts": "🩳", + "shower": "🚿", + "shrimp": "🦐", + "shuffle_tracks_button": "🔀", + "shushing_face": "🤫", + "sign_of_the_horns": "🤘", + "sign_of_the_horns_dark_skin_tone": "🤘🏿", + "sign_of_the_horns_light_skin_tone": "🤘🏻", + "sign_of_the_horns_medium-dark_skin_tone": "🤘🏾", + "sign_of_the_horns_medium-light_skin_tone": "🤘🏼", + "sign_of_the_horns_medium_skin_tone": "🤘🏽", + "six-thirty": "🕡", + "six_o’clock": "🕕", + "skateboard": "🛹", + "skier": "⛷", + "skis": "🎿", + "skull": "💀", + "skull_and_crossbones": "☠", + "skunk": "🦨", + "sled": "🛷", + "sleeping_face": "😴", + "sleepy_face": "😪", + "slightly_frowning_face": "🙁", + "slightly_smiling_face": "🙂", + "slot_machine": "🎰", + "sloth": "🦥", + "small_airplane": "🛩", + "small_blue_diamond": "🔹", + "small_orange_diamond": "🔸", + "smiling_cat_face_with_heart-eyes": "😻", + "smiling_face": "☺", + "smiling_face_with_halo": "😇", + "smiling_face_with_3_hearts": "🥰", + "smiling_face_with_heart-eyes": "😍", + "smiling_face_with_horns": "😈", + "smiling_face_with_smiling_eyes": "😊", + "smiling_face_with_sunglasses": "😎", + "smirking_face": "😏", + "snail": "🐌", + "snake": "🐍", + "sneezing_face": "🤧", + "snow-capped_mountain": "🏔", + "snowboarder": "🏂", + "snowboarder_dark_skin_tone": "🏂🏿", + "snowboarder_light_skin_tone": "🏂🏻", + "snowboarder_medium-dark_skin_tone": "🏂🏾", + "snowboarder_medium-light_skin_tone": "🏂🏼", + "snowboarder_medium_skin_tone": "🏂🏽", + "snowflake": "❄", + "snowman": "☃", + "snowman_without_snow": "⛄", + "soap": "🧼", + "soccer_ball": "⚽", + "socks": "🧦", + "softball": "🥎", + "soft_ice_cream": "🍦", + "spade_suit": "♠", + "spaghetti": "🍝", + "sparkle": "❇", + "sparkler": "🎇", + "sparkles": "✨", + "sparkling_heart": "💖", + "speak-no-evil_monkey": "🙊", + "speaker_high_volume": "🔊", + "speaker_low_volume": "🔈", + "speaker_medium_volume": "🔉", + "speaking_head": "🗣", + "speech_balloon": "💬", + "speedboat": "🚤", + "spider": "🕷", + "spider_web": "🕸", + "spiral_calendar": "🗓", + "spiral_notepad": "🗒", + "spiral_shell": "🐚", + "spoon": "🥄", + "sponge": "🧽", + "sport_utility_vehicle": "🚙", + "sports_medal": "🏅", + "spouting_whale": "🐳", + "squid": "🦑", + "squinting_face_with_tongue": "😝", + "stadium": "🏟", + "star-struck": "🤩", + "star_and_crescent": "☪", + "star_of_david": "✡", + "station": "🚉", + "steaming_bowl": "🍜", + "stethoscope": "🩺", + "stop_button": "⏹", + "stop_sign": "🛑", + "stopwatch": "⏱", + "straight_ruler": "📏", + "strawberry": "🍓", + "studio_microphone": "🎙", + "stuffed_flatbread": "🥙", + "sun": "☀", + "sun_behind_cloud": "⛅", + "sun_behind_large_cloud": "🌥", + "sun_behind_rain_cloud": "🌦", + "sun_behind_small_cloud": "🌤", + "sun_with_face": "🌞", + "sunflower": "🌻", + "sunglasses": "😎", + "sunrise": "🌅", + "sunrise_over_mountains": "🌄", + "sunset": "🌇", + "superhero": "🦸", + "supervillain": "🦹", + "sushi": "🍣", + "suspension_railway": "🚟", + "swan": "🦢", + "sweat_droplets": "💦", + "synagogue": "🕍", + "syringe": "💉", + "t-shirt": "👕", + "taco": "🌮", + "takeout_box": "🥡", + "tanabata_tree": "🎋", + "tangerine": "🍊", + "taxi": "🚕", + "teacup_without_handle": "🍵", + "tear-off_calendar": "📆", + "teddy_bear": "🧸", + "telephone": "☎", + "telephone_receiver": "📞", + "telescope": "🔭", + "television": "📺", + "ten-thirty": "🕥", + "ten_o’clock": "🕙", + "tennis": "🎾", + "tent": "⛺", + "test_tube": "🧪", + "thermometer": "🌡", + "thinking_face": "🤔", + "thought_balloon": "💭", + "thread": "🧵", + "three-thirty": "🕞", + "three_o’clock": "🕒", + "thumbs_down": "👎", + "thumbs_down_dark_skin_tone": "👎🏿", + "thumbs_down_light_skin_tone": "👎🏻", + "thumbs_down_medium-dark_skin_tone": "👎🏾", + "thumbs_down_medium-light_skin_tone": "👎🏼", + "thumbs_down_medium_skin_tone": "👎🏽", + "thumbs_up": "👍", + "thumbs_up_dark_skin_tone": "👍🏿", + "thumbs_up_light_skin_tone": "👍🏻", + "thumbs_up_medium-dark_skin_tone": "👍🏾", + "thumbs_up_medium-light_skin_tone": "👍🏼", + "thumbs_up_medium_skin_tone": "👍🏽", + "ticket": "🎫", + "tiger": "🐯", + "tiger_face": "🐯", + "timer_clock": "⏲", + "tired_face": "😫", + "toolbox": "🧰", + "toilet": "🚽", + "tomato": "🍅", + "tongue": "👅", + "tooth": "🦷", + "top_hat": "🎩", + "tornado": "🌪", + "trackball": "🖲", + "tractor": "🚜", + "trade_mark": "™", + "train": "🚋", + "tram": "🚊", + "tram_car": "🚋", + "triangular_flag": "🚩", + "triangular_ruler": "📐", + "trident_emblem": "🔱", + "trolleybus": "🚎", + "trophy": "🏆", + "tropical_drink": "🍹", + "tropical_fish": "🐠", + "trumpet": "🎺", + "tulip": "🌷", + "tumbler_glass": "🥃", + "turtle": "🐢", + "twelve-thirty": "🕧", + "twelve_o’clock": "🕛", + "two-hump_camel": "🐫", + "two-thirty": "🕝", + "two_hearts": "💕", + "two_men_holding_hands": "👬", + "two_o’clock": "🕑", + "two_women_holding_hands": "👭", + "umbrella": "☂", + "umbrella_on_ground": "⛱", + "umbrella_with_rain_drops": "☔", + "unamused_face": "😒", + "unicorn_face": "🦄", + "unlocked": "🔓", + "up-down_arrow": "↕", + "up-left_arrow": "↖", + "up-right_arrow": "↗", + "up_arrow": "⬆", + "upside-down_face": "🙃", + "upwards_button": "🔼", + "vampire": "🧛", + "vampire_dark_skin_tone": "🧛🏿", + "vampire_light_skin_tone": "🧛🏻", + "vampire_medium-dark_skin_tone": "🧛🏾", + "vampire_medium-light_skin_tone": "🧛🏼", + "vampire_medium_skin_tone": "🧛🏽", + "vertical_traffic_light": "🚦", + "vibration_mode": "📳", + "victory_hand": "✌", + "victory_hand_dark_skin_tone": "✌🏿", + "victory_hand_light_skin_tone": "✌🏻", + "victory_hand_medium-dark_skin_tone": "✌🏾", + "victory_hand_medium-light_skin_tone": "✌🏼", + "victory_hand_medium_skin_tone": "✌🏽", + "video_camera": "📹", + "video_game": "🎮", + "videocassette": "📼", + "violin": "🎻", + "volcano": "🌋", + "volleyball": "🏐", + "vulcan_salute": "🖖", + "vulcan_salute_dark_skin_tone": "🖖🏿", + "vulcan_salute_light_skin_tone": "🖖🏻", + "vulcan_salute_medium-dark_skin_tone": "🖖🏾", + "vulcan_salute_medium-light_skin_tone": "🖖🏼", + "vulcan_salute_medium_skin_tone": "🖖🏽", + "waffle": "🧇", + "waning_crescent_moon": "🌘", + "waning_gibbous_moon": "🌖", + "warning": "⚠", + "wastebasket": "🗑", + "watch": "⌚", + "water_buffalo": "🐃", + "water_closet": "🚾", + "water_wave": "🌊", + "watermelon": "🍉", + "waving_hand": "👋", + "waving_hand_dark_skin_tone": "👋🏿", + "waving_hand_light_skin_tone": "👋🏻", + "waving_hand_medium-dark_skin_tone": "👋🏾", + "waving_hand_medium-light_skin_tone": "👋🏼", + "waving_hand_medium_skin_tone": "👋🏽", + "wavy_dash": "〰", + "waxing_crescent_moon": "🌒", + "waxing_gibbous_moon": "🌔", + "weary_cat_face": "🙀", + "weary_face": "😩", + "wedding": "💒", + "whale": "🐳", + "wheel_of_dharma": "☸", + "wheelchair_symbol": "♿", + "white_circle": "⚪", + "white_exclamation_mark": "❕", + "white_flag": "🏳", + "white_flower": "💮", + "white_hair": "🦳", + "white-haired_man": "👨\u200d🦳", + "white-haired_woman": "👩\u200d🦳", + "white_heart": "🤍", + "white_heavy_check_mark": "✅", + "white_large_square": "⬜", + "white_medium-small_square": "◽", + "white_medium_square": "◻", + "white_medium_star": "⭐", + "white_question_mark": "❔", + "white_small_square": "▫", + "white_square_button": "🔳", + "wilted_flower": "🥀", + "wind_chime": "🎐", + "wind_face": "🌬", + "wine_glass": "🍷", + "winking_face": "😉", + "winking_face_with_tongue": "😜", + "wolf_face": "🐺", + "woman": "👩", + "woman_artist": "👩\u200d🎨", + "woman_artist_dark_skin_tone": "👩🏿\u200d🎨", + "woman_artist_light_skin_tone": "👩🏻\u200d🎨", + "woman_artist_medium-dark_skin_tone": "👩🏾\u200d🎨", + "woman_artist_medium-light_skin_tone": "👩🏼\u200d🎨", + "woman_artist_medium_skin_tone": "👩🏽\u200d🎨", + "woman_astronaut": "👩\u200d🚀", + "woman_astronaut_dark_skin_tone": "👩🏿\u200d🚀", + "woman_astronaut_light_skin_tone": "👩🏻\u200d🚀", + "woman_astronaut_medium-dark_skin_tone": "👩🏾\u200d🚀", + "woman_astronaut_medium-light_skin_tone": "👩🏼\u200d🚀", + "woman_astronaut_medium_skin_tone": "👩🏽\u200d🚀", + "woman_biking": "🚴\u200d♀️", + "woman_biking_dark_skin_tone": "🚴🏿\u200d♀️", + "woman_biking_light_skin_tone": "🚴🏻\u200d♀️", + "woman_biking_medium-dark_skin_tone": "🚴🏾\u200d♀️", + "woman_biking_medium-light_skin_tone": "🚴🏼\u200d♀️", + "woman_biking_medium_skin_tone": "🚴🏽\u200d♀️", + "woman_bouncing_ball": "⛹️\u200d♀️", + "woman_bouncing_ball_dark_skin_tone": "⛹🏿\u200d♀️", + "woman_bouncing_ball_light_skin_tone": "⛹🏻\u200d♀️", + "woman_bouncing_ball_medium-dark_skin_tone": "⛹🏾\u200d♀️", + "woman_bouncing_ball_medium-light_skin_tone": "⛹🏼\u200d♀️", + "woman_bouncing_ball_medium_skin_tone": "⛹🏽\u200d♀️", + "woman_bowing": "🙇\u200d♀️", + "woman_bowing_dark_skin_tone": "🙇🏿\u200d♀️", + "woman_bowing_light_skin_tone": "🙇🏻\u200d♀️", + "woman_bowing_medium-dark_skin_tone": "🙇🏾\u200d♀️", + "woman_bowing_medium-light_skin_tone": "🙇🏼\u200d♀️", + "woman_bowing_medium_skin_tone": "🙇🏽\u200d♀️", + "woman_cartwheeling": "🤸\u200d♀️", + "woman_cartwheeling_dark_skin_tone": "🤸🏿\u200d♀️", + "woman_cartwheeling_light_skin_tone": "🤸🏻\u200d♀️", + "woman_cartwheeling_medium-dark_skin_tone": "🤸🏾\u200d♀️", + "woman_cartwheeling_medium-light_skin_tone": "🤸🏼\u200d♀️", + "woman_cartwheeling_medium_skin_tone": "🤸🏽\u200d♀️", + "woman_climbing": "🧗\u200d♀️", + "woman_climbing_dark_skin_tone": "🧗🏿\u200d♀️", + "woman_climbing_light_skin_tone": "🧗🏻\u200d♀️", + "woman_climbing_medium-dark_skin_tone": "🧗🏾\u200d♀️", + "woman_climbing_medium-light_skin_tone": "🧗🏼\u200d♀️", + "woman_climbing_medium_skin_tone": "🧗🏽\u200d♀️", + "woman_construction_worker": "👷\u200d♀️", + "woman_construction_worker_dark_skin_tone": "👷🏿\u200d♀️", + "woman_construction_worker_light_skin_tone": "👷🏻\u200d♀️", + "woman_construction_worker_medium-dark_skin_tone": "👷🏾\u200d♀️", + "woman_construction_worker_medium-light_skin_tone": "👷🏼\u200d♀️", + "woman_construction_worker_medium_skin_tone": "👷🏽\u200d♀️", + "woman_cook": "👩\u200d🍳", + "woman_cook_dark_skin_tone": "👩🏿\u200d🍳", + "woman_cook_light_skin_tone": "👩🏻\u200d🍳", + "woman_cook_medium-dark_skin_tone": "👩🏾\u200d🍳", + "woman_cook_medium-light_skin_tone": "👩🏼\u200d🍳", + "woman_cook_medium_skin_tone": "👩🏽\u200d🍳", + "woman_dancing": "💃", + "woman_dancing_dark_skin_tone": "💃🏿", + "woman_dancing_light_skin_tone": "💃🏻", + "woman_dancing_medium-dark_skin_tone": "💃🏾", + "woman_dancing_medium-light_skin_tone": "💃🏼", + "woman_dancing_medium_skin_tone": "💃🏽", + "woman_dark_skin_tone": "👩🏿", + "woman_detective": "🕵️\u200d♀️", + "woman_detective_dark_skin_tone": "🕵🏿\u200d♀️", + "woman_detective_light_skin_tone": "🕵🏻\u200d♀️", + "woman_detective_medium-dark_skin_tone": "🕵🏾\u200d♀️", + "woman_detective_medium-light_skin_tone": "🕵🏼\u200d♀️", + "woman_detective_medium_skin_tone": "🕵🏽\u200d♀️", + "woman_elf": "🧝\u200d♀️", + "woman_elf_dark_skin_tone": "🧝🏿\u200d♀️", + "woman_elf_light_skin_tone": "🧝🏻\u200d♀️", + "woman_elf_medium-dark_skin_tone": "🧝🏾\u200d♀️", + "woman_elf_medium-light_skin_tone": "🧝🏼\u200d♀️", + "woman_elf_medium_skin_tone": "🧝🏽\u200d♀️", + "woman_facepalming": "🤦\u200d♀️", + "woman_facepalming_dark_skin_tone": "🤦🏿\u200d♀️", + "woman_facepalming_light_skin_tone": "🤦🏻\u200d♀️", + "woman_facepalming_medium-dark_skin_tone": "🤦🏾\u200d♀️", + "woman_facepalming_medium-light_skin_tone": "🤦🏼\u200d♀️", + "woman_facepalming_medium_skin_tone": "🤦🏽\u200d♀️", + "woman_factory_worker": "👩\u200d🏭", + "woman_factory_worker_dark_skin_tone": "👩🏿\u200d🏭", + "woman_factory_worker_light_skin_tone": "👩🏻\u200d🏭", + "woman_factory_worker_medium-dark_skin_tone": "👩🏾\u200d🏭", + "woman_factory_worker_medium-light_skin_tone": "👩🏼\u200d🏭", + "woman_factory_worker_medium_skin_tone": "👩🏽\u200d🏭", + "woman_fairy": "🧚\u200d♀️", + "woman_fairy_dark_skin_tone": "🧚🏿\u200d♀️", + "woman_fairy_light_skin_tone": "🧚🏻\u200d♀️", + "woman_fairy_medium-dark_skin_tone": "🧚🏾\u200d♀️", + "woman_fairy_medium-light_skin_tone": "🧚🏼\u200d♀️", + "woman_fairy_medium_skin_tone": "🧚🏽\u200d♀️", + "woman_farmer": "👩\u200d🌾", + "woman_farmer_dark_skin_tone": "👩🏿\u200d🌾", + "woman_farmer_light_skin_tone": "👩🏻\u200d🌾", + "woman_farmer_medium-dark_skin_tone": "👩🏾\u200d🌾", + "woman_farmer_medium-light_skin_tone": "👩🏼\u200d🌾", + "woman_farmer_medium_skin_tone": "👩🏽\u200d🌾", + "woman_firefighter": "👩\u200d🚒", + "woman_firefighter_dark_skin_tone": "👩🏿\u200d🚒", + "woman_firefighter_light_skin_tone": "👩🏻\u200d🚒", + "woman_firefighter_medium-dark_skin_tone": "👩🏾\u200d🚒", + "woman_firefighter_medium-light_skin_tone": "👩🏼\u200d🚒", + "woman_firefighter_medium_skin_tone": "👩🏽\u200d🚒", + "woman_frowning": "🙍\u200d♀️", + "woman_frowning_dark_skin_tone": "🙍🏿\u200d♀️", + "woman_frowning_light_skin_tone": "🙍🏻\u200d♀️", + "woman_frowning_medium-dark_skin_tone": "🙍🏾\u200d♀️", + "woman_frowning_medium-light_skin_tone": "🙍🏼\u200d♀️", + "woman_frowning_medium_skin_tone": "🙍🏽\u200d♀️", + "woman_genie": "🧞\u200d♀️", + "woman_gesturing_no": "🙅\u200d♀️", + "woman_gesturing_no_dark_skin_tone": "🙅🏿\u200d♀️", + "woman_gesturing_no_light_skin_tone": "🙅🏻\u200d♀️", + "woman_gesturing_no_medium-dark_skin_tone": "🙅🏾\u200d♀️", + "woman_gesturing_no_medium-light_skin_tone": "🙅🏼\u200d♀️", + "woman_gesturing_no_medium_skin_tone": "🙅🏽\u200d♀️", + "woman_gesturing_ok": "🙆\u200d♀️", + "woman_gesturing_ok_dark_skin_tone": "🙆🏿\u200d♀️", + "woman_gesturing_ok_light_skin_tone": "🙆🏻\u200d♀️", + "woman_gesturing_ok_medium-dark_skin_tone": "🙆🏾\u200d♀️", + "woman_gesturing_ok_medium-light_skin_tone": "🙆🏼\u200d♀️", + "woman_gesturing_ok_medium_skin_tone": "🙆🏽\u200d♀️", + "woman_getting_haircut": "💇\u200d♀️", + "woman_getting_haircut_dark_skin_tone": "💇🏿\u200d♀️", + "woman_getting_haircut_light_skin_tone": "💇🏻\u200d♀️", + "woman_getting_haircut_medium-dark_skin_tone": "💇🏾\u200d♀️", + "woman_getting_haircut_medium-light_skin_tone": "💇🏼\u200d♀️", + "woman_getting_haircut_medium_skin_tone": "💇🏽\u200d♀️", + "woman_getting_massage": "💆\u200d♀️", + "woman_getting_massage_dark_skin_tone": "💆🏿\u200d♀️", + "woman_getting_massage_light_skin_tone": "💆🏻\u200d♀️", + "woman_getting_massage_medium-dark_skin_tone": "💆🏾\u200d♀️", + "woman_getting_massage_medium-light_skin_tone": "💆🏼\u200d♀️", + "woman_getting_massage_medium_skin_tone": "💆🏽\u200d♀️", + "woman_golfing": "🏌️\u200d♀️", + "woman_golfing_dark_skin_tone": "🏌🏿\u200d♀️", + "woman_golfing_light_skin_tone": "🏌🏻\u200d♀️", + "woman_golfing_medium-dark_skin_tone": "🏌🏾\u200d♀️", + "woman_golfing_medium-light_skin_tone": "🏌🏼\u200d♀️", + "woman_golfing_medium_skin_tone": "🏌🏽\u200d♀️", + "woman_guard": "💂\u200d♀️", + "woman_guard_dark_skin_tone": "💂🏿\u200d♀️", + "woman_guard_light_skin_tone": "💂🏻\u200d♀️", + "woman_guard_medium-dark_skin_tone": "💂🏾\u200d♀️", + "woman_guard_medium-light_skin_tone": "💂🏼\u200d♀️", + "woman_guard_medium_skin_tone": "💂🏽\u200d♀️", + "woman_health_worker": "👩\u200d⚕️", + "woman_health_worker_dark_skin_tone": "👩🏿\u200d⚕️", + "woman_health_worker_light_skin_tone": "👩🏻\u200d⚕️", + "woman_health_worker_medium-dark_skin_tone": "👩🏾\u200d⚕️", + "woman_health_worker_medium-light_skin_tone": "👩🏼\u200d⚕️", + "woman_health_worker_medium_skin_tone": "👩🏽\u200d⚕️", + "woman_in_lotus_position": "🧘\u200d♀️", + "woman_in_lotus_position_dark_skin_tone": "🧘🏿\u200d♀️", + "woman_in_lotus_position_light_skin_tone": "🧘🏻\u200d♀️", + "woman_in_lotus_position_medium-dark_skin_tone": "🧘🏾\u200d♀️", + "woman_in_lotus_position_medium-light_skin_tone": "🧘🏼\u200d♀️", + "woman_in_lotus_position_medium_skin_tone": "🧘🏽\u200d♀️", + "woman_in_manual_wheelchair": "👩\u200d🦽", + "woman_in_motorized_wheelchair": "👩\u200d🦼", + "woman_in_steamy_room": "🧖\u200d♀️", + "woman_in_steamy_room_dark_skin_tone": "🧖🏿\u200d♀️", + "woman_in_steamy_room_light_skin_tone": "🧖🏻\u200d♀️", + "woman_in_steamy_room_medium-dark_skin_tone": "🧖🏾\u200d♀️", + "woman_in_steamy_room_medium-light_skin_tone": "🧖🏼\u200d♀️", + "woman_in_steamy_room_medium_skin_tone": "🧖🏽\u200d♀️", + "woman_judge": "👩\u200d⚖️", + "woman_judge_dark_skin_tone": "👩🏿\u200d⚖️", + "woman_judge_light_skin_tone": "👩🏻\u200d⚖️", + "woman_judge_medium-dark_skin_tone": "👩🏾\u200d⚖️", + "woman_judge_medium-light_skin_tone": "👩🏼\u200d⚖️", + "woman_judge_medium_skin_tone": "👩🏽\u200d⚖️", + "woman_juggling": "🤹\u200d♀️", + "woman_juggling_dark_skin_tone": "🤹🏿\u200d♀️", + "woman_juggling_light_skin_tone": "🤹🏻\u200d♀️", + "woman_juggling_medium-dark_skin_tone": "🤹🏾\u200d♀️", + "woman_juggling_medium-light_skin_tone": "🤹🏼\u200d♀️", + "woman_juggling_medium_skin_tone": "🤹🏽\u200d♀️", + "woman_lifting_weights": "🏋️\u200d♀️", + "woman_lifting_weights_dark_skin_tone": "🏋🏿\u200d♀️", + "woman_lifting_weights_light_skin_tone": "🏋🏻\u200d♀️", + "woman_lifting_weights_medium-dark_skin_tone": "🏋🏾\u200d♀️", + "woman_lifting_weights_medium-light_skin_tone": "🏋🏼\u200d♀️", + "woman_lifting_weights_medium_skin_tone": "🏋🏽\u200d♀️", + "woman_light_skin_tone": "👩🏻", + "woman_mage": "🧙\u200d♀️", + "woman_mage_dark_skin_tone": "🧙🏿\u200d♀️", + "woman_mage_light_skin_tone": "🧙🏻\u200d♀️", + "woman_mage_medium-dark_skin_tone": "🧙🏾\u200d♀️", + "woman_mage_medium-light_skin_tone": "🧙🏼\u200d♀️", + "woman_mage_medium_skin_tone": "🧙🏽\u200d♀️", + "woman_mechanic": "👩\u200d🔧", + "woman_mechanic_dark_skin_tone": "👩🏿\u200d🔧", + "woman_mechanic_light_skin_tone": "👩🏻\u200d🔧", + "woman_mechanic_medium-dark_skin_tone": "👩🏾\u200d🔧", + "woman_mechanic_medium-light_skin_tone": "👩🏼\u200d🔧", + "woman_mechanic_medium_skin_tone": "👩🏽\u200d🔧", + "woman_medium-dark_skin_tone": "👩🏾", + "woman_medium-light_skin_tone": "👩🏼", + "woman_medium_skin_tone": "👩🏽", + "woman_mountain_biking": "🚵\u200d♀️", + "woman_mountain_biking_dark_skin_tone": "🚵🏿\u200d♀️", + "woman_mountain_biking_light_skin_tone": "🚵🏻\u200d♀️", + "woman_mountain_biking_medium-dark_skin_tone": "🚵🏾\u200d♀️", + "woman_mountain_biking_medium-light_skin_tone": "🚵🏼\u200d♀️", + "woman_mountain_biking_medium_skin_tone": "🚵🏽\u200d♀️", + "woman_office_worker": "👩\u200d💼", + "woman_office_worker_dark_skin_tone": "👩🏿\u200d💼", + "woman_office_worker_light_skin_tone": "👩🏻\u200d💼", + "woman_office_worker_medium-dark_skin_tone": "👩🏾\u200d💼", + "woman_office_worker_medium-light_skin_tone": "👩🏼\u200d💼", + "woman_office_worker_medium_skin_tone": "👩🏽\u200d💼", + "woman_pilot": "👩\u200d✈️", + "woman_pilot_dark_skin_tone": "👩🏿\u200d✈️", + "woman_pilot_light_skin_tone": "👩🏻\u200d✈️", + "woman_pilot_medium-dark_skin_tone": "👩🏾\u200d✈️", + "woman_pilot_medium-light_skin_tone": "👩🏼\u200d✈️", + "woman_pilot_medium_skin_tone": "👩🏽\u200d✈️", + "woman_playing_handball": "🤾\u200d♀️", + "woman_playing_handball_dark_skin_tone": "🤾🏿\u200d♀️", + "woman_playing_handball_light_skin_tone": "🤾🏻\u200d♀️", + "woman_playing_handball_medium-dark_skin_tone": "🤾🏾\u200d♀️", + "woman_playing_handball_medium-light_skin_tone": "🤾🏼\u200d♀️", + "woman_playing_handball_medium_skin_tone": "🤾🏽\u200d♀️", + "woman_playing_water_polo": "🤽\u200d♀️", + "woman_playing_water_polo_dark_skin_tone": "🤽🏿\u200d♀️", + "woman_playing_water_polo_light_skin_tone": "🤽🏻\u200d♀️", + "woman_playing_water_polo_medium-dark_skin_tone": "🤽🏾\u200d♀️", + "woman_playing_water_polo_medium-light_skin_tone": "🤽🏼\u200d♀️", + "woman_playing_water_polo_medium_skin_tone": "🤽🏽\u200d♀️", + "woman_police_officer": "👮\u200d♀️", + "woman_police_officer_dark_skin_tone": "👮🏿\u200d♀️", + "woman_police_officer_light_skin_tone": "👮🏻\u200d♀️", + "woman_police_officer_medium-dark_skin_tone": "👮🏾\u200d♀️", + "woman_police_officer_medium-light_skin_tone": "👮🏼\u200d♀️", + "woman_police_officer_medium_skin_tone": "👮🏽\u200d♀️", + "woman_pouting": "🙎\u200d♀️", + "woman_pouting_dark_skin_tone": "🙎🏿\u200d♀️", + "woman_pouting_light_skin_tone": "🙎🏻\u200d♀️", + "woman_pouting_medium-dark_skin_tone": "🙎🏾\u200d♀️", + "woman_pouting_medium-light_skin_tone": "🙎🏼\u200d♀️", + "woman_pouting_medium_skin_tone": "🙎🏽\u200d♀️", + "woman_raising_hand": "🙋\u200d♀️", + "woman_raising_hand_dark_skin_tone": "🙋🏿\u200d♀️", + "woman_raising_hand_light_skin_tone": "🙋🏻\u200d♀️", + "woman_raising_hand_medium-dark_skin_tone": "🙋🏾\u200d♀️", + "woman_raising_hand_medium-light_skin_tone": "🙋🏼\u200d♀️", + "woman_raising_hand_medium_skin_tone": "🙋🏽\u200d♀️", + "woman_rowing_boat": "🚣\u200d♀️", + "woman_rowing_boat_dark_skin_tone": "🚣🏿\u200d♀️", + "woman_rowing_boat_light_skin_tone": "🚣🏻\u200d♀️", + "woman_rowing_boat_medium-dark_skin_tone": "🚣🏾\u200d♀️", + "woman_rowing_boat_medium-light_skin_tone": "🚣🏼\u200d♀️", + "woman_rowing_boat_medium_skin_tone": "🚣🏽\u200d♀️", + "woman_running": "🏃\u200d♀️", + "woman_running_dark_skin_tone": "🏃🏿\u200d♀️", + "woman_running_light_skin_tone": "🏃🏻\u200d♀️", + "woman_running_medium-dark_skin_tone": "🏃🏾\u200d♀️", + "woman_running_medium-light_skin_tone": "🏃🏼\u200d♀️", + "woman_running_medium_skin_tone": "🏃🏽\u200d♀️", + "woman_scientist": "👩\u200d🔬", + "woman_scientist_dark_skin_tone": "👩🏿\u200d🔬", + "woman_scientist_light_skin_tone": "👩🏻\u200d🔬", + "woman_scientist_medium-dark_skin_tone": "👩🏾\u200d🔬", + "woman_scientist_medium-light_skin_tone": "👩🏼\u200d🔬", + "woman_scientist_medium_skin_tone": "👩🏽\u200d🔬", + "woman_shrugging": "🤷\u200d♀️", + "woman_shrugging_dark_skin_tone": "🤷🏿\u200d♀️", + "woman_shrugging_light_skin_tone": "🤷🏻\u200d♀️", + "woman_shrugging_medium-dark_skin_tone": "🤷🏾\u200d♀️", + "woman_shrugging_medium-light_skin_tone": "🤷🏼\u200d♀️", + "woman_shrugging_medium_skin_tone": "🤷🏽\u200d♀️", + "woman_singer": "👩\u200d🎤", + "woman_singer_dark_skin_tone": "👩🏿\u200d🎤", + "woman_singer_light_skin_tone": "👩🏻\u200d🎤", + "woman_singer_medium-dark_skin_tone": "👩🏾\u200d🎤", + "woman_singer_medium-light_skin_tone": "👩🏼\u200d🎤", + "woman_singer_medium_skin_tone": "👩🏽\u200d🎤", + "woman_student": "👩\u200d🎓", + "woman_student_dark_skin_tone": "👩🏿\u200d🎓", + "woman_student_light_skin_tone": "👩🏻\u200d🎓", + "woman_student_medium-dark_skin_tone": "👩🏾\u200d🎓", + "woman_student_medium-light_skin_tone": "👩🏼\u200d🎓", + "woman_student_medium_skin_tone": "👩🏽\u200d🎓", + "woman_surfing": "🏄\u200d♀️", + "woman_surfing_dark_skin_tone": "🏄🏿\u200d♀️", + "woman_surfing_light_skin_tone": "🏄🏻\u200d♀️", + "woman_surfing_medium-dark_skin_tone": "🏄🏾\u200d♀️", + "woman_surfing_medium-light_skin_tone": "🏄🏼\u200d♀️", + "woman_surfing_medium_skin_tone": "🏄🏽\u200d♀️", + "woman_swimming": "🏊\u200d♀️", + "woman_swimming_dark_skin_tone": "🏊🏿\u200d♀️", + "woman_swimming_light_skin_tone": "🏊🏻\u200d♀️", + "woman_swimming_medium-dark_skin_tone": "🏊🏾\u200d♀️", + "woman_swimming_medium-light_skin_tone": "🏊🏼\u200d♀️", + "woman_swimming_medium_skin_tone": "🏊🏽\u200d♀️", + "woman_teacher": "👩\u200d🏫", + "woman_teacher_dark_skin_tone": "👩🏿\u200d🏫", + "woman_teacher_light_skin_tone": "👩🏻\u200d🏫", + "woman_teacher_medium-dark_skin_tone": "👩🏾\u200d🏫", + "woman_teacher_medium-light_skin_tone": "👩🏼\u200d🏫", + "woman_teacher_medium_skin_tone": "👩🏽\u200d🏫", + "woman_technologist": "👩\u200d💻", + "woman_technologist_dark_skin_tone": "👩🏿\u200d💻", + "woman_technologist_light_skin_tone": "👩🏻\u200d💻", + "woman_technologist_medium-dark_skin_tone": "👩🏾\u200d💻", + "woman_technologist_medium-light_skin_tone": "👩🏼\u200d💻", + "woman_technologist_medium_skin_tone": "👩🏽\u200d💻", + "woman_tipping_hand": "💁\u200d♀️", + "woman_tipping_hand_dark_skin_tone": "💁🏿\u200d♀️", + "woman_tipping_hand_light_skin_tone": "💁🏻\u200d♀️", + "woman_tipping_hand_medium-dark_skin_tone": "💁🏾\u200d♀️", + "woman_tipping_hand_medium-light_skin_tone": "💁🏼\u200d♀️", + "woman_tipping_hand_medium_skin_tone": "💁🏽\u200d♀️", + "woman_vampire": "🧛\u200d♀️", + "woman_vampire_dark_skin_tone": "🧛🏿\u200d♀️", + "woman_vampire_light_skin_tone": "🧛🏻\u200d♀️", + "woman_vampire_medium-dark_skin_tone": "🧛🏾\u200d♀️", + "woman_vampire_medium-light_skin_tone": "🧛🏼\u200d♀️", + "woman_vampire_medium_skin_tone": "🧛🏽\u200d♀️", + "woman_walking": "🚶\u200d♀️", + "woman_walking_dark_skin_tone": "🚶🏿\u200d♀️", + "woman_walking_light_skin_tone": "🚶🏻\u200d♀️", + "woman_walking_medium-dark_skin_tone": "🚶🏾\u200d♀️", + "woman_walking_medium-light_skin_tone": "🚶🏼\u200d♀️", + "woman_walking_medium_skin_tone": "🚶🏽\u200d♀️", + "woman_wearing_turban": "👳\u200d♀️", + "woman_wearing_turban_dark_skin_tone": "👳🏿\u200d♀️", + "woman_wearing_turban_light_skin_tone": "👳🏻\u200d♀️", + "woman_wearing_turban_medium-dark_skin_tone": "👳🏾\u200d♀️", + "woman_wearing_turban_medium-light_skin_tone": "👳🏼\u200d♀️", + "woman_wearing_turban_medium_skin_tone": "👳🏽\u200d♀️", + "woman_with_headscarf": "🧕", + "woman_with_headscarf_dark_skin_tone": "🧕🏿", + "woman_with_headscarf_light_skin_tone": "🧕🏻", + "woman_with_headscarf_medium-dark_skin_tone": "🧕🏾", + "woman_with_headscarf_medium-light_skin_tone": "🧕🏼", + "woman_with_headscarf_medium_skin_tone": "🧕🏽", + "woman_with_probing_cane": "👩\u200d🦯", + "woman_zombie": "🧟\u200d♀️", + "woman’s_boot": "👢", + "woman’s_clothes": "👚", + "woman’s_hat": "👒", + "woman’s_sandal": "👡", + "women_with_bunny_ears": "👯\u200d♀️", + "women_wrestling": "🤼\u200d♀️", + "women’s_room": "🚺", + "woozy_face": "🥴", + "world_map": "🗺", + "worried_face": "😟", + "wrapped_gift": "🎁", + "wrench": "🔧", + "writing_hand": "✍", + "writing_hand_dark_skin_tone": "✍🏿", + "writing_hand_light_skin_tone": "✍🏻", + "writing_hand_medium-dark_skin_tone": "✍🏾", + "writing_hand_medium-light_skin_tone": "✍🏼", + "writing_hand_medium_skin_tone": "✍🏽", + "yarn": "🧶", + "yawning_face": "🥱", + "yellow_circle": "🟡", + "yellow_heart": "💛", + "yellow_square": "🟨", + "yen_banknote": "💴", + "yo-yo": "🪀", + "yin_yang": "☯", + "zany_face": "🤪", + "zebra": "🦓", + "zipper-mouth_face": "🤐", + "zombie": "🧟", + "zzz": "💤", + "åland_islands": "🇦🇽", + "keycap_asterisk": "*⃣", + "keycap_digit_eight": "8⃣", + "keycap_digit_five": "5⃣", + "keycap_digit_four": "4⃣", + "keycap_digit_nine": "9⃣", + "keycap_digit_one": "1⃣", + "keycap_digit_seven": "7⃣", + "keycap_digit_six": "6⃣", + "keycap_digit_three": "3⃣", + "keycap_digit_two": "2⃣", + "keycap_digit_zero": "0⃣", + "keycap_number_sign": "#⃣", + "light_skin_tone": "🏻", + "medium_light_skin_tone": "🏼", + "medium_skin_tone": "🏽", + "medium_dark_skin_tone": "🏾", + "dark_skin_tone": "🏿", + "regional_indicator_symbol_letter_a": "🇦", + "regional_indicator_symbol_letter_b": "🇧", + "regional_indicator_symbol_letter_c": "🇨", + "regional_indicator_symbol_letter_d": "🇩", + "regional_indicator_symbol_letter_e": "🇪", + "regional_indicator_symbol_letter_f": "🇫", + "regional_indicator_symbol_letter_g": "🇬", + "regional_indicator_symbol_letter_h": "🇭", + "regional_indicator_symbol_letter_i": "🇮", + "regional_indicator_symbol_letter_j": "🇯", + "regional_indicator_symbol_letter_k": "🇰", + "regional_indicator_symbol_letter_l": "🇱", + "regional_indicator_symbol_letter_m": "🇲", + "regional_indicator_symbol_letter_n": "🇳", + "regional_indicator_symbol_letter_o": "🇴", + "regional_indicator_symbol_letter_p": "🇵", + "regional_indicator_symbol_letter_q": "🇶", + "regional_indicator_symbol_letter_r": "🇷", + "regional_indicator_symbol_letter_s": "🇸", + "regional_indicator_symbol_letter_t": "🇹", + "regional_indicator_symbol_letter_u": "🇺", + "regional_indicator_symbol_letter_v": "🇻", + "regional_indicator_symbol_letter_w": "🇼", + "regional_indicator_symbol_letter_x": "🇽", + "regional_indicator_symbol_letter_y": "🇾", + "regional_indicator_symbol_letter_z": "🇿", + "airplane_arriving": "🛬", + "space_invader": "👾", + "football": "🏈", + "anger": "💢", + "angry": "😠", + "anguished": "😧", + "signal_strength": "📶", + "arrows_counterclockwise": "🔄", + "arrow_heading_down": "⤵", + "arrow_heading_up": "⤴", + "art": "🎨", + "astonished": "😲", + "athletic_shoe": "👟", + "atm": "🏧", + "car": "🚗", + "red_car": "🚗", + "angel": "👼", + "back": "🔙", + "badminton_racquet_and_shuttlecock": "🏸", + "dollar": "💵", + "euro": "💶", + "pound": "💷", + "yen": "💴", + "barber": "💈", + "bath": "🛀", + "bear": "🐻", + "heartbeat": "💓", + "beer": "🍺", + "no_bell": "🔕", + "bento": "🍱", + "bike": "🚲", + "bicyclist": "🚴", + "8ball": "🎱", + "biohazard_sign": "☣", + "birthday": "🎂", + "black_circle_for_record": "⏺", + "clubs": "♣", + "diamonds": "♦", + "arrow_double_down": "⏬", + "hearts": "♥", + "rewind": "⏪", + "black_left__pointing_double_triangle_with_vertical_bar": "⏮", + "arrow_backward": "◀", + "black_medium_small_square": "◾", + "question": "❓", + "fast_forward": "⏩", + "black_right__pointing_double_triangle_with_vertical_bar": "⏭", + "arrow_forward": "▶", + "black_right__pointing_triangle_with_double_vertical_bar": "⏯", + "arrow_right": "➡", + "spades": "♠", + "black_square_for_stop": "⏹", + "sunny": "☀", + "phone": "☎", + "recycle": "♻", + "arrow_double_up": "⏫", + "busstop": "🚏", + "date": "📅", + "flags": "🎏", + "cat2": "🐈", + "joy_cat": "😹", + "smirk_cat": "😼", + "chart_with_downwards_trend": "📉", + "chart_with_upwards_trend": "📈", + "chart": "💹", + "mega": "📣", + "checkered_flag": "🏁", + "accept": "🉑", + "ideograph_advantage": "🉐", + "congratulations": "㊗", + "secret": "㊙", + "m": "Ⓜ", + "city_sunset": "🌆", + "clapper": "🎬", + "clap": "👏", + "beers": "🍻", + "clock830": "🕣", + "clock8": "🕗", + "clock1130": "🕦", + "clock11": "🕚", + "clock530": "🕠", + "clock5": "🕔", + "clock430": "🕟", + "clock4": "🕓", + "clock930": "🕤", + "clock9": "🕘", + "clock130": "🕜", + "clock1": "🕐", + "clock730": "🕢", + "clock7": "🕖", + "clock630": "🕡", + "clock6": "🕕", + "clock1030": "🕥", + "clock10": "🕙", + "clock330": "🕞", + "clock3": "🕒", + "clock1230": "🕧", + "clock12": "🕛", + "clock230": "🕝", + "clock2": "🕑", + "arrows_clockwise": "🔃", + "repeat": "🔁", + "repeat_one": "🔂", + "closed_lock_with_key": "🔐", + "mailbox_closed": "📪", + "mailbox": "📫", + "cloud_with_tornado": "🌪", + "cocktail": "🍸", + "boom": "💥", + "compression": "🗜", + "confounded": "😖", + "confused": "😕", + "rice": "🍚", + "cow2": "🐄", + "cricket_bat_and_ball": "🏏", + "x": "❌", + "cry": "😢", + "curry": "🍛", + "dagger_knife": "🗡", + "dancer": "💃", + "dark_sunglasses": "🕶", + "dash": "💨", + "truck": "🚚", + "derelict_house_building": "🏚", + "diamond_shape_with_a_dot_inside": "💠", + "dart": "🎯", + "disappointed_relieved": "😥", + "disappointed": "😞", + "do_not_litter": "🚯", + "dog2": "🐕", + "flipper": "🐬", + "loop": "➿", + "bangbang": "‼", + "double_vertical_bar": "⏸", + "dove_of_peace": "🕊", + "small_red_triangle_down": "🔻", + "arrow_down_small": "🔽", + "arrow_down": "⬇", + "dromedary_camel": "🐪", + "e__mail": "📧", + "corn": "🌽", + "ear_of_rice": "🌾", + "earth_americas": "🌎", + "earth_asia": "🌏", + "earth_africa": "🌍", + "eight_pointed_black_star": "✴", + "eight_spoked_asterisk": "✳", + "eject_symbol": "⏏", + "bulb": "💡", + "emoji_modifier_fitzpatrick_type__1__2": "🏻", + "emoji_modifier_fitzpatrick_type__3": "🏼", + "emoji_modifier_fitzpatrick_type__4": "🏽", + "emoji_modifier_fitzpatrick_type__5": "🏾", + "emoji_modifier_fitzpatrick_type__6": "🏿", + "end": "🔚", + "email": "✉", + "european_castle": "🏰", + "european_post_office": "🏤", + "interrobang": "⁉", + "expressionless": "😑", + "eyeglasses": "👓", + "massage": "💆", + "yum": "😋", + "scream": "😱", + "kissing_heart": "😘", + "sweat": "😓", + "face_with_head__bandage": "🤕", + "triumph": "😤", + "mask": "😷", + "no_good": "🙅", + "ok_woman": "🙆", + "open_mouth": "😮", + "cold_sweat": "😰", + "stuck_out_tongue": "😛", + "stuck_out_tongue_closed_eyes": "😝", + "stuck_out_tongue_winking_eye": "😜", + "joy": "😂", + "no_mouth": "😶", + "santa": "🎅", + "fax": "📠", + "fearful": "😨", + "field_hockey_stick_and_ball": "🏑", + "first_quarter_moon_with_face": "🌛", + "fish_cake": "🍥", + "fishing_pole_and_fish": "🎣", + "facepunch": "👊", + "punch": "👊", + "flag_for_afghanistan": "🇦🇫", + "flag_for_albania": "🇦🇱", + "flag_for_algeria": "🇩🇿", + "flag_for_american_samoa": "🇦🇸", + "flag_for_andorra": "🇦🇩", + "flag_for_angola": "🇦🇴", + "flag_for_anguilla": "🇦🇮", + "flag_for_antarctica": "🇦🇶", + "flag_for_antigua_&_barbuda": "🇦🇬", + "flag_for_argentina": "🇦🇷", + "flag_for_armenia": "🇦🇲", + "flag_for_aruba": "🇦🇼", + "flag_for_ascension_island": "🇦🇨", + "flag_for_australia": "🇦🇺", + "flag_for_austria": "🇦🇹", + "flag_for_azerbaijan": "🇦🇿", + "flag_for_bahamas": "🇧🇸", + "flag_for_bahrain": "🇧🇭", + "flag_for_bangladesh": "🇧🇩", + "flag_for_barbados": "🇧🇧", + "flag_for_belarus": "🇧🇾", + "flag_for_belgium": "🇧🇪", + "flag_for_belize": "🇧🇿", + "flag_for_benin": "🇧🇯", + "flag_for_bermuda": "🇧🇲", + "flag_for_bhutan": "🇧🇹", + "flag_for_bolivia": "🇧🇴", + "flag_for_bosnia_&_herzegovina": "🇧🇦", + "flag_for_botswana": "🇧🇼", + "flag_for_bouvet_island": "🇧🇻", + "flag_for_brazil": "🇧🇷", + "flag_for_british_indian_ocean_territory": "🇮🇴", + "flag_for_british_virgin_islands": "🇻🇬", + "flag_for_brunei": "🇧🇳", + "flag_for_bulgaria": "🇧🇬", + "flag_for_burkina_faso": "🇧🇫", + "flag_for_burundi": "🇧🇮", + "flag_for_cambodia": "🇰🇭", + "flag_for_cameroon": "🇨🇲", + "flag_for_canada": "🇨🇦", + "flag_for_canary_islands": "🇮🇨", + "flag_for_cape_verde": "🇨🇻", + "flag_for_caribbean_netherlands": "🇧🇶", + "flag_for_cayman_islands": "🇰🇾", + "flag_for_central_african_republic": "🇨🇫", + "flag_for_ceuta_&_melilla": "🇪🇦", + "flag_for_chad": "🇹🇩", + "flag_for_chile": "🇨🇱", + "flag_for_china": "🇨🇳", + "flag_for_christmas_island": "🇨🇽", + "flag_for_clipperton_island": "🇨🇵", + "flag_for_cocos__islands": "🇨🇨", + "flag_for_colombia": "🇨🇴", + "flag_for_comoros": "🇰🇲", + "flag_for_congo____brazzaville": "🇨🇬", + "flag_for_congo____kinshasa": "🇨🇩", + "flag_for_cook_islands": "🇨🇰", + "flag_for_costa_rica": "🇨🇷", + "flag_for_croatia": "🇭🇷", + "flag_for_cuba": "🇨🇺", + "flag_for_curaçao": "🇨🇼", + "flag_for_cyprus": "🇨🇾", + "flag_for_czech_republic": "🇨🇿", + "flag_for_côte_d’ivoire": "🇨🇮", + "flag_for_denmark": "🇩🇰", + "flag_for_diego_garcia": "🇩🇬", + "flag_for_djibouti": "🇩🇯", + "flag_for_dominica": "🇩🇲", + "flag_for_dominican_republic": "🇩🇴", + "flag_for_ecuador": "🇪🇨", + "flag_for_egypt": "🇪🇬", + "flag_for_el_salvador": "🇸🇻", + "flag_for_equatorial_guinea": "🇬🇶", + "flag_for_eritrea": "🇪🇷", + "flag_for_estonia": "🇪🇪", + "flag_for_ethiopia": "🇪🇹", + "flag_for_european_union": "🇪🇺", + "flag_for_falkland_islands": "🇫🇰", + "flag_for_faroe_islands": "🇫🇴", + "flag_for_fiji": "🇫🇯", + "flag_for_finland": "🇫🇮", + "flag_for_france": "🇫🇷", + "flag_for_french_guiana": "🇬🇫", + "flag_for_french_polynesia": "🇵🇫", + "flag_for_french_southern_territories": "🇹🇫", + "flag_for_gabon": "🇬🇦", + "flag_for_gambia": "🇬🇲", + "flag_for_georgia": "🇬🇪", + "flag_for_germany": "🇩🇪", + "flag_for_ghana": "🇬🇭", + "flag_for_gibraltar": "🇬🇮", + "flag_for_greece": "🇬🇷", + "flag_for_greenland": "🇬🇱", + "flag_for_grenada": "🇬🇩", + "flag_for_guadeloupe": "🇬🇵", + "flag_for_guam": "🇬🇺", + "flag_for_guatemala": "🇬🇹", + "flag_for_guernsey": "🇬🇬", + "flag_for_guinea": "🇬🇳", + "flag_for_guinea__bissau": "🇬🇼", + "flag_for_guyana": "🇬🇾", + "flag_for_haiti": "🇭🇹", + "flag_for_heard_&_mcdonald_islands": "🇭🇲", + "flag_for_honduras": "🇭🇳", + "flag_for_hong_kong": "🇭🇰", + "flag_for_hungary": "🇭🇺", + "flag_for_iceland": "🇮🇸", + "flag_for_india": "🇮🇳", + "flag_for_indonesia": "🇮🇩", + "flag_for_iran": "🇮🇷", + "flag_for_iraq": "🇮🇶", + "flag_for_ireland": "🇮🇪", + "flag_for_isle_of_man": "🇮🇲", + "flag_for_israel": "🇮🇱", + "flag_for_italy": "🇮🇹", + "flag_for_jamaica": "🇯🇲", + "flag_for_japan": "🇯🇵", + "flag_for_jersey": "🇯🇪", + "flag_for_jordan": "🇯🇴", + "flag_for_kazakhstan": "🇰🇿", + "flag_for_kenya": "🇰🇪", + "flag_for_kiribati": "🇰🇮", + "flag_for_kosovo": "🇽🇰", + "flag_for_kuwait": "🇰🇼", + "flag_for_kyrgyzstan": "🇰🇬", + "flag_for_laos": "🇱🇦", + "flag_for_latvia": "🇱🇻", + "flag_for_lebanon": "🇱🇧", + "flag_for_lesotho": "🇱🇸", + "flag_for_liberia": "🇱🇷", + "flag_for_libya": "🇱🇾", + "flag_for_liechtenstein": "🇱🇮", + "flag_for_lithuania": "🇱🇹", + "flag_for_luxembourg": "🇱🇺", + "flag_for_macau": "🇲🇴", + "flag_for_macedonia": "🇲🇰", + "flag_for_madagascar": "🇲🇬", + "flag_for_malawi": "🇲🇼", + "flag_for_malaysia": "🇲🇾", + "flag_for_maldives": "🇲🇻", + "flag_for_mali": "🇲🇱", + "flag_for_malta": "🇲🇹", + "flag_for_marshall_islands": "🇲🇭", + "flag_for_martinique": "🇲🇶", + "flag_for_mauritania": "🇲🇷", + "flag_for_mauritius": "🇲🇺", + "flag_for_mayotte": "🇾🇹", + "flag_for_mexico": "🇲🇽", + "flag_for_micronesia": "🇫🇲", + "flag_for_moldova": "🇲🇩", + "flag_for_monaco": "🇲🇨", + "flag_for_mongolia": "🇲🇳", + "flag_for_montenegro": "🇲🇪", + "flag_for_montserrat": "🇲🇸", + "flag_for_morocco": "🇲🇦", + "flag_for_mozambique": "🇲🇿", + "flag_for_myanmar": "🇲🇲", + "flag_for_namibia": "🇳🇦", + "flag_for_nauru": "🇳🇷", + "flag_for_nepal": "🇳🇵", + "flag_for_netherlands": "🇳🇱", + "flag_for_new_caledonia": "🇳🇨", + "flag_for_new_zealand": "🇳🇿", + "flag_for_nicaragua": "🇳🇮", + "flag_for_niger": "🇳🇪", + "flag_for_nigeria": "🇳🇬", + "flag_for_niue": "🇳🇺", + "flag_for_norfolk_island": "🇳🇫", + "flag_for_north_korea": "🇰🇵", + "flag_for_northern_mariana_islands": "🇲🇵", + "flag_for_norway": "🇳🇴", + "flag_for_oman": "🇴🇲", + "flag_for_pakistan": "🇵🇰", + "flag_for_palau": "🇵🇼", + "flag_for_palestinian_territories": "🇵🇸", + "flag_for_panama": "🇵🇦", + "flag_for_papua_new_guinea": "🇵🇬", + "flag_for_paraguay": "🇵🇾", + "flag_for_peru": "🇵🇪", + "flag_for_philippines": "🇵🇭", + "flag_for_pitcairn_islands": "🇵🇳", + "flag_for_poland": "🇵🇱", + "flag_for_portugal": "🇵🇹", + "flag_for_puerto_rico": "🇵🇷", + "flag_for_qatar": "🇶🇦", + "flag_for_romania": "🇷🇴", + "flag_for_russia": "🇷🇺", + "flag_for_rwanda": "🇷🇼", + "flag_for_réunion": "🇷🇪", + "flag_for_samoa": "🇼🇸", + "flag_for_san_marino": "🇸🇲", + "flag_for_saudi_arabia": "🇸🇦", + "flag_for_senegal": "🇸🇳", + "flag_for_serbia": "🇷🇸", + "flag_for_seychelles": "🇸🇨", + "flag_for_sierra_leone": "🇸🇱", + "flag_for_singapore": "🇸🇬", + "flag_for_sint_maarten": "🇸🇽", + "flag_for_slovakia": "🇸🇰", + "flag_for_slovenia": "🇸🇮", + "flag_for_solomon_islands": "🇸🇧", + "flag_for_somalia": "🇸🇴", + "flag_for_south_africa": "🇿🇦", + "flag_for_south_georgia_&_south_sandwich_islands": "🇬🇸", + "flag_for_south_korea": "🇰🇷", + "flag_for_south_sudan": "🇸🇸", + "flag_for_spain": "🇪🇸", + "flag_for_sri_lanka": "🇱🇰", + "flag_for_st._barthélemy": "🇧🇱", + "flag_for_st._helena": "🇸🇭", + "flag_for_st._kitts_&_nevis": "🇰🇳", + "flag_for_st._lucia": "🇱🇨", + "flag_for_st._martin": "🇲🇫", + "flag_for_st._pierre_&_miquelon": "🇵🇲", + "flag_for_st._vincent_&_grenadines": "🇻🇨", + "flag_for_sudan": "🇸🇩", + "flag_for_suriname": "🇸🇷", + "flag_for_svalbard_&_jan_mayen": "🇸🇯", + "flag_for_swaziland": "🇸🇿", + "flag_for_sweden": "🇸🇪", + "flag_for_switzerland": "🇨🇭", + "flag_for_syria": "🇸🇾", + "flag_for_são_tomé_&_príncipe": "🇸🇹", + "flag_for_taiwan": "🇹🇼", + "flag_for_tajikistan": "🇹🇯", + "flag_for_tanzania": "🇹🇿", + "flag_for_thailand": "🇹🇭", + "flag_for_timor__leste": "🇹🇱", + "flag_for_togo": "🇹🇬", + "flag_for_tokelau": "🇹🇰", + "flag_for_tonga": "🇹🇴", + "flag_for_trinidad_&_tobago": "🇹🇹", + "flag_for_tristan_da_cunha": "🇹🇦", + "flag_for_tunisia": "🇹🇳", + "flag_for_turkey": "🇹🇷", + "flag_for_turkmenistan": "🇹🇲", + "flag_for_turks_&_caicos_islands": "🇹🇨", + "flag_for_tuvalu": "🇹🇻", + "flag_for_u.s._outlying_islands": "🇺🇲", + "flag_for_u.s._virgin_islands": "🇻🇮", + "flag_for_uganda": "🇺🇬", + "flag_for_ukraine": "🇺🇦", + "flag_for_united_arab_emirates": "🇦🇪", + "flag_for_united_kingdom": "🇬🇧", + "flag_for_united_states": "🇺🇸", + "flag_for_uruguay": "🇺🇾", + "flag_for_uzbekistan": "🇺🇿", + "flag_for_vanuatu": "🇻🇺", + "flag_for_vatican_city": "🇻🇦", + "flag_for_venezuela": "🇻🇪", + "flag_for_vietnam": "🇻🇳", + "flag_for_wallis_&_futuna": "🇼🇫", + "flag_for_western_sahara": "🇪🇭", + "flag_for_yemen": "🇾🇪", + "flag_for_zambia": "🇿🇲", + "flag_for_zimbabwe": "🇿🇼", + "flag_for_åland_islands": "🇦🇽", + "golf": "⛳", + "fleur__de__lis": "⚜", + "muscle": "💪", + "flushed": "😳", + "frame_with_picture": "🖼", + "fries": "🍟", + "frog": "🐸", + "hatched_chick": "🐥", + "frowning": "😦", + "fuelpump": "⛽", + "full_moon_with_face": "🌝", + "gem": "💎", + "star2": "🌟", + "golfer": "🏌", + "mortar_board": "🎓", + "grimacing": "😬", + "smile_cat": "😸", + "grinning": "😀", + "grin": "😁", + "heartpulse": "💗", + "guardsman": "💂", + "haircut": "💇", + "hamster": "🐹", + "raising_hand": "🙋", + "headphones": "🎧", + "hear_no_evil": "🙉", + "cupid": "💘", + "gift_heart": "💝", + "heart": "❤", + "exclamation": "❗", + "heavy_exclamation_mark": "❗", + "heavy_heart_exclamation_mark_ornament": "❣", + "o": "⭕", + "helm_symbol": "⎈", + "helmet_with_white_cross": "⛑", + "high_heel": "👠", + "bullettrain_side": "🚄", + "bullettrain_front": "🚅", + "high_brightness": "🔆", + "zap": "⚡", + "hocho": "🔪", + "knife": "🔪", + "bee": "🐝", + "traffic_light": "🚥", + "racehorse": "🐎", + "coffee": "☕", + "hotsprings": "♨", + "hourglass": "⌛", + "hourglass_flowing_sand": "⏳", + "house_buildings": "🏘", + "100": "💯", + "hushed": "😯", + "ice_hockey_stick_and_puck": "🏒", + "imp": "👿", + "information_desk_person": "💁", + "information_source": "ℹ", + "capital_abcd": "🔠", + "abc": "🔤", + "abcd": "🔡", + "1234": "🔢", + "symbols": "🔣", + "izakaya_lantern": "🏮", + "lantern": "🏮", + "jack_o_lantern": "🎃", + "dolls": "🎎", + "japanese_goblin": "👺", + "japanese_ogre": "👹", + "beginner": "🔰", + "zero": "0️⃣", + "one": "1️⃣", + "ten": "🔟", + "two": "2️⃣", + "three": "3️⃣", + "four": "4️⃣", + "five": "5️⃣", + "six": "6️⃣", + "seven": "7️⃣", + "eight": "8️⃣", + "nine": "9️⃣", + "couplekiss": "💏", + "kissing_cat": "😽", + "kissing": "😗", + "kissing_closed_eyes": "😚", + "kissing_smiling_eyes": "😙", + "beetle": "🐞", + "large_blue_circle": "🔵", + "last_quarter_moon_with_face": "🌜", + "leaves": "🍃", + "mag": "🔍", + "left_right_arrow": "↔", + "leftwards_arrow_with_hook": "↩", + "arrow_left": "⬅", + "lock": "🔒", + "lock_with_ink_pen": "🔏", + "sob": "😭", + "low_brightness": "🔅", + "lower_left_ballpoint_pen": "🖊", + "lower_left_crayon": "🖍", + "lower_left_fountain_pen": "🖋", + "lower_left_paintbrush": "🖌", + "mahjong": "🀄", + "couple": "👫", + "man_in_business_suit_levitating": "🕴", + "man_with_gua_pi_mao": "👲", + "man_with_turban": "👳", + "mans_shoe": "👞", + "shoe": "👞", + "menorah_with_nine_branches": "🕎", + "mens": "🚹", + "minidisc": "💽", + "iphone": "📱", + "calling": "📲", + "money__mouth_face": "🤑", + "moneybag": "💰", + "rice_scene": "🎑", + "mountain_bicyclist": "🚵", + "mouse2": "🐁", + "lips": "👄", + "moyai": "🗿", + "notes": "🎶", + "nail_care": "💅", + "ab": "🆎", + "negative_squared_cross_mark": "❎", + "a": "🅰", + "b": "🅱", + "o2": "🅾", + "parking": "🅿", + "new_moon_with_face": "🌚", + "no_entry_sign": "🚫", + "underage": "🔞", + "non__potable_water": "🚱", + "arrow_upper_right": "↗", + "arrow_upper_left": "↖", + "office": "🏢", + "older_man": "👴", + "older_woman": "👵", + "om_symbol": "🕉", + "on": "🔛", + "book": "📖", + "unlock": "🔓", + "mailbox_with_no_mail": "📭", + "mailbox_with_mail": "📬", + "cd": "💿", + "tada": "🎉", + "feet": "🐾", + "walking": "🚶", + "pencil2": "✏", + "pensive": "😔", + "persevere": "😣", + "bow": "🙇", + "raised_hands": "🙌", + "person_with_ball": "⛹", + "person_with_blond_hair": "👱", + "pray": "🙏", + "person_with_pouting_face": "🙎", + "computer": "💻", + "pig2": "🐖", + "hankey": "💩", + "poop": "💩", + "shit": "💩", + "bamboo": "🎍", + "gun": "🔫", + "black_joker": "🃏", + "rotating_light": "🚨", + "cop": "👮", + "stew": "🍲", + "pouch": "👝", + "pouting_cat": "😾", + "rage": "😡", + "put_litter_in_its_place": "🚮", + "rabbit2": "🐇", + "racing_motorcycle": "🏍", + "radioactive_sign": "☢", + "fist": "✊", + "hand": "✋", + "raised_hand_with_fingers_splayed": "🖐", + "raised_hand_with_part_between_middle_and_ring_fingers": "🖖", + "blue_car": "🚙", + "apple": "🍎", + "relieved": "😌", + "reversed_hand_with_middle_finger_extended": "🖕", + "mag_right": "🔎", + "arrow_right_hook": "↪", + "sweet_potato": "🍠", + "robot": "🤖", + "rolled__up_newspaper": "🗞", + "rowboat": "🚣", + "runner": "🏃", + "running": "🏃", + "running_shirt_with_sash": "🎽", + "boat": "⛵", + "scales": "⚖", + "school_satchel": "🎒", + "scorpius": "♏", + "see_no_evil": "🙈", + "sheep": "🐑", + "stars": "🌠", + "cake": "🍰", + "six_pointed_star": "🔯", + "ski": "🎿", + "sleeping_accommodation": "🛌", + "sleeping": "😴", + "sleepy": "😪", + "sleuth_or_spy": "🕵", + "heart_eyes_cat": "😻", + "smiley_cat": "😺", + "innocent": "😇", + "heart_eyes": "😍", + "smiling_imp": "😈", + "smiley": "😃", + "sweat_smile": "😅", + "smile": "😄", + "laughing": "😆", + "satisfied": "😆", + "blush": "😊", + "smirk": "😏", + "smoking": "🚬", + "snow_capped_mountain": "🏔", + "soccer": "⚽", + "icecream": "🍦", + "soon": "🔜", + "arrow_lower_right": "↘", + "arrow_lower_left": "↙", + "speak_no_evil": "🙊", + "speaker": "🔈", + "mute": "🔇", + "sound": "🔉", + "loud_sound": "🔊", + "speaking_head_in_silhouette": "🗣", + "spiral_calendar_pad": "🗓", + "spiral_note_pad": "🗒", + "shell": "🐚", + "sweat_drops": "💦", + "u5272": "🈹", + "u5408": "🈴", + "u55b6": "🈺", + "u6307": "🈯", + "u6708": "🈷", + "u6709": "🈶", + "u6e80": "🈵", + "u7121": "🈚", + "u7533": "🈸", + "u7981": "🈲", + "u7a7a": "🈳", + "cl": "🆑", + "cool": "🆒", + "free": "🆓", + "id": "🆔", + "koko": "🈁", + "sa": "🈂", + "new": "🆕", + "ng": "🆖", + "ok": "🆗", + "sos": "🆘", + "up": "🆙", + "vs": "🆚", + "steam_locomotive": "🚂", + "ramen": "🍜", + "partly_sunny": "⛅", + "city_sunrise": "🌇", + "surfer": "🏄", + "swimmer": "🏊", + "shirt": "👕", + "tshirt": "👕", + "table_tennis_paddle_and_ball": "🏓", + "tea": "🍵", + "tv": "📺", + "three_button_mouse": "🖱", + "+1": "👍", + "thumbsup": "👍", + "__1": "👎", + "-1": "👎", + "thumbsdown": "👎", + "thunder_cloud_and_rain": "⛈", + "tiger2": "🐅", + "tophat": "🎩", + "top": "🔝", + "tm": "™", + "train2": "🚆", + "triangular_flag_on_post": "🚩", + "trident": "🔱", + "twisted_rightwards_arrows": "🔀", + "unamused": "😒", + "small_red_triangle": "🔺", + "arrow_up_small": "🔼", + "arrow_up_down": "↕", + "upside__down_face": "🙃", + "arrow_up": "⬆", + "v": "✌", + "vhs": "📼", + "wc": "🚾", + "ocean": "🌊", + "waving_black_flag": "🏴", + "wave": "👋", + "waving_white_flag": "🏳", + "moon": "🌔", + "scream_cat": "🙀", + "weary": "😩", + "weight_lifter": "🏋", + "whale2": "🐋", + "wheelchair": "♿", + "point_down": "👇", + "grey_exclamation": "❕", + "white_frowning_face": "☹", + "white_check_mark": "✅", + "point_left": "👈", + "white_medium_small_square": "◽", + "star": "⭐", + "grey_question": "❔", + "point_right": "👉", + "relaxed": "☺", + "white_sun_behind_cloud": "🌥", + "white_sun_behind_cloud_with_rain": "🌦", + "white_sun_with_small_cloud": "🌤", + "point_up_2": "👆", + "point_up": "☝", + "wind_blowing_face": "🌬", + "wink": "😉", + "wolf": "🐺", + "dancers": "👯", + "boot": "👢", + "womans_clothes": "👚", + "womans_hat": "👒", + "sandal": "👡", + "womens": "🚺", + "worried": "😟", + "gift": "🎁", + "zipper__mouth_face": "🤐", + "regional_indicator_a": "🇦", + "regional_indicator_b": "🇧", + "regional_indicator_c": "🇨", + "regional_indicator_d": "🇩", + "regional_indicator_e": "🇪", + "regional_indicator_f": "🇫", + "regional_indicator_g": "🇬", + "regional_indicator_h": "🇭", + "regional_indicator_i": "🇮", + "regional_indicator_j": "🇯", + "regional_indicator_k": "🇰", + "regional_indicator_l": "🇱", + "regional_indicator_m": "🇲", + "regional_indicator_n": "🇳", + "regional_indicator_o": "🇴", + "regional_indicator_p": "🇵", + "regional_indicator_q": "🇶", + "regional_indicator_r": "🇷", + "regional_indicator_s": "🇸", + "regional_indicator_t": "🇹", + "regional_indicator_u": "🇺", + "regional_indicator_v": "🇻", + "regional_indicator_w": "🇼", + "regional_indicator_x": "🇽", + "regional_indicator_y": "🇾", + "regional_indicator_z": "🇿", +} diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_emoji_replace.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_emoji_replace.py new file mode 100644 index 0000000..bb2cafa --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_emoji_replace.py @@ -0,0 +1,32 @@ +from typing import Callable, Match, Optional +import re + +from ._emoji_codes import EMOJI + + +_ReStringMatch = Match[str] # regex match object +_ReSubCallable = Callable[[_ReStringMatch], str] # Callable invoked by re.sub +_EmojiSubMethod = Callable[[_ReSubCallable, str], str] # Sub method of a compiled re + + +def _emoji_replace( + text: str, + default_variant: Optional[str] = None, + _emoji_sub: _EmojiSubMethod = re.compile(r"(:(\S*?)(?:(?:\-)(emoji|text))?:)").sub, +) -> str: + """Replace emoji code in text.""" + get_emoji = EMOJI.__getitem__ + variants = {"text": "\uFE0E", "emoji": "\uFE0F"} + get_variant = variants.get + default_variant_code = variants.get(default_variant, "") if default_variant else "" + + def do_replace(match: Match[str]) -> str: + emoji_code, emoji_name, variant = match.groups() + try: + return get_emoji(emoji_name.lower()) + get_variant( + variant, default_variant_code + ) + except KeyError: + return emoji_code + + return _emoji_sub(do_replace, text) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_export_format.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_export_format.py new file mode 100644 index 0000000..094d2dc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_export_format.py @@ -0,0 +1,76 @@ +CONSOLE_HTML_FORMAT = """\ + + + + + + + +
{code}
+ + +""" + +CONSOLE_SVG_FORMAT = """\ + + + + + + + + + {lines} + + + {chrome} + + {backgrounds} + + {matrix} + + + +""" + +_SVG_FONT_FAMILY = "Rich Fira Code" +_SVG_CLASSES_PREFIX = "rich-svg" diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_extension.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_extension.py new file mode 100644 index 0000000..cbd6da9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_extension.py @@ -0,0 +1,10 @@ +from typing import Any + + +def load_ipython_extension(ip: Any) -> None: # pragma: no cover + # prevent circular import + from pip._vendor.rich.pretty import install + from pip._vendor.rich.traceback import install as tr_install + + install() + tr_install() diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_fileno.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_fileno.py new file mode 100644 index 0000000..b17ee65 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_fileno.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +from typing import IO, Callable + + +def get_fileno(file_like: IO[str]) -> int | None: + """Get fileno() from a file, accounting for poorly implemented file-like objects. + + Args: + file_like (IO): A file-like object. + + Returns: + int | None: The result of fileno if available, or None if operation failed. + """ + fileno: Callable[[], int] | None = getattr(file_like, "fileno", None) + if fileno is not None: + try: + return fileno() + except Exception: + # `fileno` is documented as potentially raising a OSError + # Alas, from the issues, there are so many poorly implemented file-like objects, + # that `fileno()` can raise just about anything. + return None + return None diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_inspect.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_inspect.py new file mode 100644 index 0000000..30446ce --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_inspect.py @@ -0,0 +1,270 @@ +from __future__ import absolute_import + +import inspect +from inspect import cleandoc, getdoc, getfile, isclass, ismodule, signature +from typing import Any, Collection, Iterable, Optional, Tuple, Type, Union + +from .console import Group, RenderableType +from .control import escape_control_codes +from .highlighter import ReprHighlighter +from .jupyter import JupyterMixin +from .panel import Panel +from .pretty import Pretty +from .table import Table +from .text import Text, TextType + + +def _first_paragraph(doc: str) -> str: + """Get the first paragraph from a docstring.""" + paragraph, _, _ = doc.partition("\n\n") + return paragraph + + +class Inspect(JupyterMixin): + """A renderable to inspect any Python Object. + + Args: + obj (Any): An object to inspect. + title (str, optional): Title to display over inspect result, or None use type. Defaults to None. + help (bool, optional): Show full help text rather than just first paragraph. Defaults to False. + methods (bool, optional): Enable inspection of callables. Defaults to False. + docs (bool, optional): Also render doc strings. Defaults to True. + private (bool, optional): Show private attributes (beginning with underscore). Defaults to False. + dunder (bool, optional): Show attributes starting with double underscore. Defaults to False. + sort (bool, optional): Sort attributes alphabetically. Defaults to True. + all (bool, optional): Show all attributes. Defaults to False. + value (bool, optional): Pretty print value of object. Defaults to True. + """ + + def __init__( + self, + obj: Any, + *, + title: Optional[TextType] = None, + help: bool = False, + methods: bool = False, + docs: bool = True, + private: bool = False, + dunder: bool = False, + sort: bool = True, + all: bool = True, + value: bool = True, + ) -> None: + self.highlighter = ReprHighlighter() + self.obj = obj + self.title = title or self._make_title(obj) + if all: + methods = private = dunder = True + self.help = help + self.methods = methods + self.docs = docs or help + self.private = private or dunder + self.dunder = dunder + self.sort = sort + self.value = value + + def _make_title(self, obj: Any) -> Text: + """Make a default title.""" + title_str = ( + str(obj) + if (isclass(obj) or callable(obj) or ismodule(obj)) + else str(type(obj)) + ) + title_text = self.highlighter(title_str) + return title_text + + def __rich__(self) -> Panel: + return Panel.fit( + Group(*self._render()), + title=self.title, + border_style="scope.border", + padding=(0, 1), + ) + + def _get_signature(self, name: str, obj: Any) -> Optional[Text]: + """Get a signature for a callable.""" + try: + _signature = str(signature(obj)) + ":" + except ValueError: + _signature = "(...)" + except TypeError: + return None + + source_filename: Optional[str] = None + try: + source_filename = getfile(obj) + except (OSError, TypeError): + # OSError is raised if obj has no source file, e.g. when defined in REPL. + pass + + callable_name = Text(name, style="inspect.callable") + if source_filename: + callable_name.stylize(f"link file://{source_filename}") + signature_text = self.highlighter(_signature) + + qualname = name or getattr(obj, "__qualname__", name) + + # If obj is a module, there may be classes (which are callable) to display + if inspect.isclass(obj): + prefix = "class" + elif inspect.iscoroutinefunction(obj): + prefix = "async def" + else: + prefix = "def" + + qual_signature = Text.assemble( + (f"{prefix} ", f"inspect.{prefix.replace(' ', '_')}"), + (qualname, "inspect.callable"), + signature_text, + ) + + return qual_signature + + def _render(self) -> Iterable[RenderableType]: + """Render object.""" + + def sort_items(item: Tuple[str, Any]) -> Tuple[bool, str]: + key, (_error, value) = item + return (callable(value), key.strip("_").lower()) + + def safe_getattr(attr_name: str) -> Tuple[Any, Any]: + """Get attribute or any exception.""" + try: + return (None, getattr(obj, attr_name)) + except Exception as error: + return (error, None) + + obj = self.obj + keys = dir(obj) + total_items = len(keys) + if not self.dunder: + keys = [key for key in keys if not key.startswith("__")] + if not self.private: + keys = [key for key in keys if not key.startswith("_")] + not_shown_count = total_items - len(keys) + items = [(key, safe_getattr(key)) for key in keys] + if self.sort: + items.sort(key=sort_items) + + items_table = Table.grid(padding=(0, 1), expand=False) + items_table.add_column(justify="right") + add_row = items_table.add_row + highlighter = self.highlighter + + if callable(obj): + signature = self._get_signature("", obj) + if signature is not None: + yield signature + yield "" + + if self.docs: + _doc = self._get_formatted_doc(obj) + if _doc is not None: + doc_text = Text(_doc, style="inspect.help") + doc_text = highlighter(doc_text) + yield doc_text + yield "" + + if self.value and not (isclass(obj) or callable(obj) or ismodule(obj)): + yield Panel( + Pretty(obj, indent_guides=True, max_length=10, max_string=60), + border_style="inspect.value.border", + ) + yield "" + + for key, (error, value) in items: + key_text = Text.assemble( + ( + key, + "inspect.attr.dunder" if key.startswith("__") else "inspect.attr", + ), + (" =", "inspect.equals"), + ) + if error is not None: + warning = key_text.copy() + warning.stylize("inspect.error") + add_row(warning, highlighter(repr(error))) + continue + + if callable(value): + if not self.methods: + continue + + _signature_text = self._get_signature(key, value) + if _signature_text is None: + add_row(key_text, Pretty(value, highlighter=highlighter)) + else: + if self.docs: + docs = self._get_formatted_doc(value) + if docs is not None: + _signature_text.append("\n" if "\n" in docs else " ") + doc = highlighter(docs) + doc.stylize("inspect.doc") + _signature_text.append(doc) + + add_row(key_text, _signature_text) + else: + add_row(key_text, Pretty(value, highlighter=highlighter)) + if items_table.row_count: + yield items_table + elif not_shown_count: + yield Text.from_markup( + f"[b cyan]{not_shown_count}[/][i] attribute(s) not shown.[/i] " + f"Run [b][magenta]inspect[/]([not b]inspect[/])[/b] for options." + ) + + def _get_formatted_doc(self, object_: Any) -> Optional[str]: + """ + Extract the docstring of an object, process it and returns it. + The processing consists in cleaning up the doctring's indentation, + taking only its 1st paragraph if `self.help` is not True, + and escape its control codes. + + Args: + object_ (Any): the object to get the docstring from. + + Returns: + Optional[str]: the processed docstring, or None if no docstring was found. + """ + docs = getdoc(object_) + if docs is None: + return None + docs = cleandoc(docs).strip() + if not self.help: + docs = _first_paragraph(docs) + return escape_control_codes(docs) + + +def get_object_types_mro(obj: Union[object, Type[Any]]) -> Tuple[type, ...]: + """Returns the MRO of an object's class, or of the object itself if it's a class.""" + if not hasattr(obj, "__mro__"): + # N.B. we cannot use `if type(obj) is type` here because it doesn't work with + # some types of classes, such as the ones that use abc.ABCMeta. + obj = type(obj) + return getattr(obj, "__mro__", ()) + + +def get_object_types_mro_as_strings(obj: object) -> Collection[str]: + """ + Returns the MRO of an object's class as full qualified names, or of the object itself if it's a class. + + Examples: + `object_types_mro_as_strings(JSONDecoder)` will return `['json.decoder.JSONDecoder', 'builtins.object']` + """ + return [ + f'{getattr(type_, "__module__", "")}.{getattr(type_, "__qualname__", "")}' + for type_ in get_object_types_mro(obj) + ] + + +def is_object_one_of_types( + obj: object, fully_qualified_types_names: Collection[str] +) -> bool: + """ + Returns `True` if the given object's class (or the object itself, if it's a class) has one of the + fully qualified names in its MRO. + """ + for type_name in get_object_types_mro_as_strings(obj): + if type_name in fully_qualified_types_names: + return True + return False diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_log_render.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_log_render.py new file mode 100644 index 0000000..fc16c84 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_log_render.py @@ -0,0 +1,94 @@ +from datetime import datetime +from typing import Iterable, List, Optional, TYPE_CHECKING, Union, Callable + + +from .text import Text, TextType + +if TYPE_CHECKING: + from .console import Console, ConsoleRenderable, RenderableType + from .table import Table + +FormatTimeCallable = Callable[[datetime], Text] + + +class LogRender: + def __init__( + self, + show_time: bool = True, + show_level: bool = False, + show_path: bool = True, + time_format: Union[str, FormatTimeCallable] = "[%x %X]", + omit_repeated_times: bool = True, + level_width: Optional[int] = 8, + ) -> None: + self.show_time = show_time + self.show_level = show_level + self.show_path = show_path + self.time_format = time_format + self.omit_repeated_times = omit_repeated_times + self.level_width = level_width + self._last_time: Optional[Text] = None + + def __call__( + self, + console: "Console", + renderables: Iterable["ConsoleRenderable"], + log_time: Optional[datetime] = None, + time_format: Optional[Union[str, FormatTimeCallable]] = None, + level: TextType = "", + path: Optional[str] = None, + line_no: Optional[int] = None, + link_path: Optional[str] = None, + ) -> "Table": + from .containers import Renderables + from .table import Table + + output = Table.grid(padding=(0, 1)) + output.expand = True + if self.show_time: + output.add_column(style="log.time") + if self.show_level: + output.add_column(style="log.level", width=self.level_width) + output.add_column(ratio=1, style="log.message", overflow="fold") + if self.show_path and path: + output.add_column(style="log.path") + row: List["RenderableType"] = [] + if self.show_time: + log_time = log_time or console.get_datetime() + time_format = time_format or self.time_format + if callable(time_format): + log_time_display = time_format(log_time) + else: + log_time_display = Text(log_time.strftime(time_format)) + if log_time_display == self._last_time and self.omit_repeated_times: + row.append(Text(" " * len(log_time_display))) + else: + row.append(log_time_display) + self._last_time = log_time_display + if self.show_level: + row.append(level) + + row.append(Renderables(renderables)) + if self.show_path and path: + path_text = Text() + path_text.append( + path, style=f"link file://{link_path}" if link_path else "" + ) + if line_no: + path_text.append(":") + path_text.append( + f"{line_no}", + style=f"link file://{link_path}#{line_no}" if link_path else "", + ) + row.append(path_text) + + output.add_row(*row) + return output + + +if __name__ == "__main__": # pragma: no cover + from pip._vendor.rich.console import Console + + c = Console() + c.print("[on blue]Hello", justify="right") + c.log("[on blue]hello", justify="right") diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_loop.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_loop.py new file mode 100644 index 0000000..01c6caf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_loop.py @@ -0,0 +1,43 @@ +from typing import Iterable, Tuple, TypeVar + +T = TypeVar("T") + + +def loop_first(values: Iterable[T]) -> Iterable[Tuple[bool, T]]: + """Iterate and generate a tuple with a flag for first value.""" + iter_values = iter(values) + try: + value = next(iter_values) + except StopIteration: + return + yield True, value + for value in iter_values: + yield False, value + + +def loop_last(values: Iterable[T]) -> Iterable[Tuple[bool, T]]: + """Iterate and generate a tuple with a flag for last value.""" + iter_values = iter(values) + try: + previous_value = next(iter_values) + except StopIteration: + return + for value in iter_values: + yield False, previous_value + previous_value = value + yield True, previous_value + + +def loop_first_last(values: Iterable[T]) -> Iterable[Tuple[bool, bool, T]]: + """Iterate and generate a tuple with a flag for first and last value.""" + iter_values = iter(values) + try: + previous_value = next(iter_values) + except StopIteration: + return + first = True + for value in iter_values: + yield first, False, previous_value + first = False + previous_value = value + yield first, True, previous_value diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_null_file.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_null_file.py new file mode 100644 index 0000000..b659673 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_null_file.py @@ -0,0 +1,69 @@ +from types import TracebackType +from typing import IO, Iterable, Iterator, List, Optional, Type + + +class NullFile(IO[str]): + def close(self) -> None: + pass + + def isatty(self) -> bool: + return False + + def read(self, __n: int = 1) -> str: + return "" + + def readable(self) -> bool: + return False + + def readline(self, __limit: int = 1) -> str: + return "" + + def readlines(self, __hint: int = 1) -> List[str]: + return [] + + def seek(self, __offset: int, __whence: int = 1) -> int: + return 0 + + def seekable(self) -> bool: + return False + + def tell(self) -> int: + return 0 + + def truncate(self, __size: Optional[int] = 1) -> int: + return 0 + + def writable(self) -> bool: + return False + + def writelines(self, __lines: Iterable[str]) -> None: + pass + + def __next__(self) -> str: + return "" + + def __iter__(self) -> Iterator[str]: + return iter([""]) + + def __enter__(self) -> IO[str]: + pass + + def __exit__( + self, + __t: Optional[Type[BaseException]], + __value: Optional[BaseException], + __traceback: Optional[TracebackType], + ) -> None: + pass + + def write(self, text: str) -> int: + return 0 + + def flush(self) -> None: + pass + + def fileno(self) -> int: + return -1 + + +NULL_FILE = NullFile() diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_palettes.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_palettes.py new file mode 100644 index 0000000..3c748d3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_palettes.py @@ -0,0 +1,309 @@ +from .palette import Palette + + +# Taken from https://en.wikipedia.org/wiki/ANSI_escape_code (Windows 10 column) +WINDOWS_PALETTE = Palette( + [ + (12, 12, 12), + (197, 15, 31), + (19, 161, 14), + (193, 156, 0), + (0, 55, 218), + (136, 23, 152), + (58, 150, 221), + (204, 204, 204), + (118, 118, 118), + (231, 72, 86), + (22, 198, 12), + (249, 241, 165), + (59, 120, 255), + (180, 0, 158), + (97, 214, 214), + (242, 242, 242), + ] +) + +# # The standard ansi colors (including bright variants) +STANDARD_PALETTE = Palette( + [ + (0, 0, 0), + (170, 0, 0), + (0, 170, 0), + (170, 85, 0), + (0, 0, 170), + (170, 0, 170), + (0, 170, 170), + (170, 170, 170), + (85, 85, 85), + (255, 85, 85), + (85, 255, 85), + (255, 255, 85), + (85, 85, 255), + (255, 85, 255), + (85, 255, 255), + (255, 255, 255), + ] +) + + +# The 256 color palette +EIGHT_BIT_PALETTE = Palette( + [ + (0, 0, 0), + (128, 0, 0), + (0, 128, 0), + (128, 128, 0), + (0, 0, 128), + (128, 0, 128), + (0, 128, 128), + (192, 192, 192), + (128, 128, 128), + (255, 0, 0), + (0, 255, 0), + (255, 255, 0), + (0, 0, 255), + (255, 0, 255), + (0, 255, 255), + (255, 255, 255), + (0, 0, 0), + (0, 0, 95), + (0, 0, 135), + (0, 0, 175), + (0, 0, 215), + (0, 0, 255), + (0, 95, 0), + (0, 95, 95), + (0, 95, 135), + (0, 95, 175), + (0, 95, 215), + (0, 95, 255), + (0, 135, 0), + (0, 135, 95), + (0, 135, 135), + (0, 135, 175), + (0, 135, 215), + (0, 135, 255), + (0, 175, 0), + (0, 175, 95), + (0, 175, 135), + (0, 175, 175), + (0, 175, 215), + (0, 175, 255), + (0, 215, 0), + (0, 215, 95), + (0, 215, 135), + (0, 215, 175), + (0, 215, 215), + (0, 215, 255), + (0, 255, 0), + (0, 255, 95), + (0, 255, 135), + (0, 255, 175), + (0, 255, 215), + (0, 255, 255), + (95, 0, 0), + (95, 0, 95), + (95, 0, 135), + (95, 0, 175), + (95, 0, 215), + (95, 0, 255), + (95, 95, 0), + (95, 95, 95), + (95, 95, 135), + (95, 95, 175), + (95, 95, 215), + (95, 95, 255), + (95, 135, 0), + (95, 135, 95), + (95, 135, 135), + (95, 135, 175), + (95, 135, 215), + (95, 135, 255), + (95, 175, 0), + (95, 175, 95), + (95, 175, 135), + (95, 175, 175), + (95, 175, 215), + (95, 175, 255), + (95, 215, 0), + (95, 215, 95), + (95, 215, 135), + (95, 215, 175), + (95, 215, 215), + (95, 215, 255), + (95, 255, 0), + (95, 255, 95), + (95, 255, 135), + (95, 255, 175), + (95, 255, 215), + (95, 255, 255), + (135, 0, 0), + (135, 0, 95), + (135, 0, 135), + (135, 0, 175), + (135, 0, 215), + (135, 0, 255), + (135, 95, 0), + (135, 95, 95), + (135, 95, 135), + (135, 95, 175), + (135, 95, 215), + (135, 95, 255), + (135, 135, 0), + (135, 135, 95), + (135, 135, 135), + (135, 135, 175), + (135, 135, 215), + (135, 135, 255), + (135, 175, 0), + (135, 175, 95), + (135, 175, 135), + (135, 175, 175), + (135, 175, 215), + (135, 175, 255), + (135, 215, 0), + (135, 215, 95), + (135, 215, 135), + (135, 215, 175), + (135, 215, 215), + (135, 215, 255), + (135, 255, 0), + (135, 255, 95), + (135, 255, 135), + (135, 255, 175), + (135, 255, 215), + (135, 255, 255), + (175, 0, 0), + (175, 0, 95), + (175, 0, 135), + (175, 0, 175), + (175, 0, 215), + (175, 0, 255), + (175, 95, 0), + (175, 95, 95), + (175, 95, 135), + (175, 95, 175), + (175, 95, 215), + (175, 95, 255), + (175, 135, 0), + (175, 135, 95), + (175, 135, 135), + (175, 135, 175), + (175, 135, 215), + (175, 135, 255), + (175, 175, 0), + (175, 175, 95), + (175, 175, 135), + (175, 175, 175), + (175, 175, 215), + (175, 175, 255), + (175, 215, 0), + (175, 215, 95), + (175, 215, 135), + (175, 215, 175), + (175, 215, 215), + (175, 215, 255), + (175, 255, 0), + (175, 255, 95), + (175, 255, 135), + (175, 255, 175), + (175, 255, 215), + (175, 255, 255), + (215, 0, 0), + (215, 0, 95), + (215, 0, 135), + (215, 0, 175), + (215, 0, 215), + (215, 0, 255), + (215, 95, 0), + (215, 95, 95), + (215, 95, 135), + (215, 95, 175), + (215, 95, 215), + (215, 95, 255), + (215, 135, 0), + (215, 135, 95), + (215, 135, 135), + (215, 135, 175), + (215, 135, 215), + (215, 135, 255), + (215, 175, 0), + (215, 175, 95), + (215, 175, 135), + (215, 175, 175), + (215, 175, 215), + (215, 175, 255), + (215, 215, 0), + (215, 215, 95), + (215, 215, 135), + (215, 215, 175), + (215, 215, 215), + (215, 215, 255), + (215, 255, 0), + (215, 255, 95), + (215, 255, 135), + (215, 255, 175), + (215, 255, 215), + (215, 255, 255), + (255, 0, 0), + (255, 0, 95), + (255, 0, 135), + (255, 0, 175), + (255, 0, 215), + (255, 0, 255), + (255, 95, 0), + (255, 95, 95), + (255, 95, 135), + (255, 95, 175), + (255, 95, 215), + (255, 95, 255), + (255, 135, 0), + (255, 135, 95), + (255, 135, 135), + (255, 135, 175), + (255, 135, 215), + (255, 135, 255), + (255, 175, 0), + (255, 175, 95), + (255, 175, 135), + (255, 175, 175), + (255, 175, 215), + (255, 175, 255), + (255, 215, 0), + (255, 215, 95), + (255, 215, 135), + (255, 215, 175), + (255, 215, 215), + (255, 215, 255), + (255, 255, 0), + (255, 255, 95), + (255, 255, 135), + (255, 255, 175), + (255, 255, 215), + (255, 255, 255), + (8, 8, 8), + (18, 18, 18), + (28, 28, 28), + (38, 38, 38), + (48, 48, 48), + (58, 58, 58), + (68, 68, 68), + (78, 78, 78), + (88, 88, 88), + (98, 98, 98), + (108, 108, 108), + (118, 118, 118), + (128, 128, 128), + (138, 138, 138), + (148, 148, 148), + (158, 158, 158), + (168, 168, 168), + (178, 178, 178), + (188, 188, 188), + (198, 198, 198), + (208, 208, 208), + (218, 218, 218), + (228, 228, 228), + (238, 238, 238), + ] +) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_pick.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_pick.py new file mode 100644 index 0000000..4f6d8b2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_pick.py @@ -0,0 +1,17 @@ +from typing import Optional + + +def pick_bool(*values: Optional[bool]) -> bool: + """Pick the first non-none bool or return the last value. + + Args: + *values (bool): Any number of boolean or None values. + + Returns: + bool: First non-none boolean. + """ + assert values, "1 or more values required" + for value in values: + if value is not None: + return value + return bool(value) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_ratio.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_ratio.py new file mode 100644 index 0000000..e8a3a67 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_ratio.py @@ -0,0 +1,160 @@ +import sys +from fractions import Fraction +from math import ceil +from typing import cast, List, Optional, Sequence + +if sys.version_info >= (3, 8): + from typing import Protocol +else: + from pip._vendor.typing_extensions import Protocol # pragma: no cover + + +class Edge(Protocol): + """Any object that defines an edge (such as Layout).""" + + size: Optional[int] = None + ratio: int = 1 + minimum_size: int = 1 + + +def ratio_resolve(total: int, edges: Sequence[Edge]) -> List[int]: + """Divide total space to satisfy size, ratio, and minimum_size, constraints. + + The returned list of integers should add up to total in most cases, unless it is + impossible to satisfy all the constraints. For instance, if there are two edges + with a minimum size of 20 each and `total` is 30 then the returned list will be + greater than total. In practice, this would mean that a Layout object would + clip the rows that would overflow the screen height. + + Args: + total (int): Total number of characters. + edges (List[Edge]): Edges within total space. + + Returns: + List[int]: Number of characters for each edge. + """ + # Size of edge or None for yet to be determined + sizes = [(edge.size or None) for edge in edges] + + _Fraction = Fraction + + # While any edges haven't been calculated + while None in sizes: + # Get flexible edges and index to map these back on to sizes list + flexible_edges = [ + (index, edge) + for index, (size, edge) in enumerate(zip(sizes, edges)) + if size is None + ] + # Remaining space in total + remaining = total - sum(size or 0 for size in sizes) + if remaining <= 0: + # No room for flexible edges + return [ + ((edge.minimum_size or 1) if size is None else size) + for size, edge in zip(sizes, edges) + ] + # Calculate number of characters in a ratio portion + portion = _Fraction( + remaining, sum((edge.ratio or 1) for _, edge in flexible_edges) + ) + + # If any edges will be less than their minimum, replace size with the minimum + for index, edge in flexible_edges: + if portion * edge.ratio <= edge.minimum_size: + sizes[index] = edge.minimum_size + # New fixed size will invalidate calculations, so we need to repeat the process + break + else: + # Distribute flexible space and compensate for rounding error + # Since edge sizes can only be integers we need to add the remainder + # to the following line + remainder = _Fraction(0) + for index, edge in flexible_edges: + size, remainder = divmod(portion * edge.ratio + remainder, 1) + sizes[index] = size + break + # Sizes now contains integers only + return cast(List[int], sizes) + + +def ratio_reduce( + total: int, ratios: List[int], maximums: List[int], values: List[int] +) -> List[int]: + """Divide an integer total in to parts based on ratios. + + Args: + total (int): The total to divide. + ratios (List[int]): A list of integer ratios. + maximums (List[int]): List of maximums values for each slot. + values (List[int]): List of values + + Returns: + List[int]: A list of integers guaranteed to sum to total. + """ + ratios = [ratio if _max else 0 for ratio, _max in zip(ratios, maximums)] + total_ratio = sum(ratios) + if not total_ratio: + return values[:] + total_remaining = total + result: List[int] = [] + append = result.append + for ratio, maximum, value in zip(ratios, maximums, values): + if ratio and total_ratio > 0: + distributed = min(maximum, round(ratio * total_remaining / total_ratio)) + append(value - distributed) + total_remaining -= distributed + total_ratio -= ratio + else: + append(value) + return result + + +def ratio_distribute( + total: int, ratios: List[int], minimums: Optional[List[int]] = None +) -> List[int]: + """Distribute an integer total in to parts based on ratios. + + Args: + total (int): The total to divide. + ratios (List[int]): A list of integer ratios. + minimums (List[int]): List of minimum values for each slot. + + Returns: + List[int]: A list of integers guaranteed to sum to total. + """ + if minimums: + ratios = [ratio if _min else 0 for ratio, _min in zip(ratios, minimums)] + total_ratio = sum(ratios) + assert total_ratio > 0, "Sum of ratios must be > 0" + + total_remaining = total + distributed_total: List[int] = [] + append = distributed_total.append + if minimums is None: + _minimums = [0] * len(ratios) + else: + _minimums = minimums + for ratio, minimum in zip(ratios, _minimums): + if total_ratio > 0: + distributed = max(minimum, ceil(ratio * total_remaining / total_ratio)) + else: + distributed = total_remaining + append(distributed) + total_ratio -= ratio + total_remaining -= distributed + return distributed_total + + +if __name__ == "__main__": + from dataclasses import dataclass + + @dataclass + class E: + + size: Optional[int] = None + ratio: int = 1 + minimum_size: int = 1 + + resolved = ratio_resolve(110, [E(None, 1, 1), E(None, 1, 1), E(None, 1, 1)]) + print(sum(resolved)) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_spinners.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_spinners.py new file mode 100644 index 0000000..d0bb1fe --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_spinners.py @@ -0,0 +1,482 @@ +""" +Spinners are from: +* cli-spinners: + MIT License + Copyright (c) Sindre Sorhus (sindresorhus.com) + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights to + use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of + the Software, and to permit persons to whom the Software is furnished to do so, + subject to the following conditions: + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, + INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR + PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE + FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + IN THE SOFTWARE. +""" + +SPINNERS = { + "dots": { + "interval": 80, + "frames": "⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏", + }, + "dots2": {"interval": 80, "frames": "⣾⣽⣻⢿⡿⣟⣯⣷"}, + "dots3": { + "interval": 80, + "frames": "⠋⠙⠚⠞⠖⠦⠴⠲⠳⠓", + }, + "dots4": { + "interval": 80, + "frames": "⠄⠆⠇⠋⠙⠸⠰⠠⠰⠸⠙⠋⠇⠆", + }, + "dots5": { + "interval": 80, + "frames": "⠋⠙⠚⠒⠂⠂⠒⠲⠴⠦⠖⠒⠐⠐⠒⠓⠋", + }, + "dots6": { + "interval": 80, + "frames": "⠁⠉⠙⠚⠒⠂⠂⠒⠲⠴⠤⠄⠄⠤⠴⠲⠒⠂⠂⠒⠚⠙⠉⠁", + }, + "dots7": { + "interval": 80, + "frames": "⠈⠉⠋⠓⠒⠐⠐⠒⠖⠦⠤⠠⠠⠤⠦⠖⠒⠐⠐⠒⠓⠋⠉⠈", + }, + "dots8": { + "interval": 80, + "frames": "⠁⠁⠉⠙⠚⠒⠂⠂⠒⠲⠴⠤⠄⠄⠤⠠⠠⠤⠦⠖⠒⠐⠐⠒⠓⠋⠉⠈⠈", + }, + "dots9": {"interval": 80, "frames": "⢹⢺⢼⣸⣇⡧⡗⡏"}, + "dots10": {"interval": 80, "frames": "⢄⢂⢁⡁⡈⡐⡠"}, + "dots11": {"interval": 100, "frames": "⠁⠂⠄⡀⢀⠠⠐⠈"}, + "dots12": { + "interval": 80, + "frames": [ + "⢀⠀", + "⡀⠀", + "⠄⠀", + "⢂⠀", + "⡂⠀", + "⠅⠀", + "⢃⠀", + "⡃⠀", + "⠍⠀", + "⢋⠀", + "⡋⠀", + "⠍⠁", + "⢋⠁", + "⡋⠁", + "⠍⠉", + "⠋⠉", + "⠋⠉", + "⠉⠙", + "⠉⠙", + "⠉⠩", + "⠈⢙", + "⠈⡙", + "⢈⠩", + "⡀⢙", + "⠄⡙", + "⢂⠩", + "⡂⢘", + "⠅⡘", + "⢃⠨", + "⡃⢐", + "⠍⡐", + "⢋⠠", + "⡋⢀", + "⠍⡁", + "⢋⠁", + "⡋⠁", + "⠍⠉", + "⠋⠉", + "⠋⠉", + "⠉⠙", + "⠉⠙", + "⠉⠩", + "⠈⢙", + "⠈⡙", + "⠈⠩", + "⠀⢙", + "⠀⡙", + "⠀⠩", + "⠀⢘", + "⠀⡘", + "⠀⠨", + "⠀⢐", + "⠀⡐", + "⠀⠠", + "⠀⢀", + "⠀⡀", + ], + }, + "dots8Bit": { + "interval": 80, + "frames": "⠀⠁⠂⠃⠄⠅⠆⠇⡀⡁⡂⡃⡄⡅⡆⡇⠈⠉⠊⠋⠌⠍⠎⠏⡈⡉⡊⡋⡌⡍⡎⡏⠐⠑⠒⠓⠔⠕⠖⠗⡐⡑⡒⡓⡔⡕⡖⡗⠘⠙⠚⠛⠜⠝⠞⠟⡘⡙" + "⡚⡛⡜⡝⡞⡟⠠⠡⠢⠣⠤⠥⠦⠧⡠⡡⡢⡣⡤⡥⡦⡧⠨⠩⠪⠫⠬⠭⠮⠯⡨⡩⡪⡫⡬⡭⡮⡯⠰⠱⠲⠳⠴⠵⠶⠷⡰⡱⡲⡳⡴⡵⡶⡷⠸⠹⠺⠻" + "⠼⠽⠾⠿⡸⡹⡺⡻⡼⡽⡾⡿⢀⢁⢂⢃⢄⢅⢆⢇⣀⣁⣂⣃⣄⣅⣆⣇⢈⢉⢊⢋⢌⢍⢎⢏⣈⣉⣊⣋⣌⣍⣎⣏⢐⢑⢒⢓⢔⢕⢖⢗⣐⣑⣒⣓⣔⣕" + "⣖⣗⢘⢙⢚⢛⢜⢝⢞⢟⣘⣙⣚⣛⣜⣝⣞⣟⢠⢡⢢⢣⢤⢥⢦⢧⣠⣡⣢⣣⣤⣥⣦⣧⢨⢩⢪⢫⢬⢭⢮⢯⣨⣩⣪⣫⣬⣭⣮⣯⢰⢱⢲⢳⢴⢵⢶⢷" + "⣰⣱⣲⣳⣴⣵⣶⣷⢸⢹⢺⢻⢼⢽⢾⢿⣸⣹⣺⣻⣼⣽⣾⣿", + }, + "line": {"interval": 130, "frames": ["-", "\\", "|", "/"]}, + "line2": {"interval": 100, "frames": "⠂-–—–-"}, + "pipe": {"interval": 100, "frames": "┤┘┴└├┌┬┐"}, + "simpleDots": {"interval": 400, "frames": [". ", ".. ", "...", " "]}, + "simpleDotsScrolling": { + "interval": 200, + "frames": [". ", ".. ", "...", " ..", " .", " "], + }, + "star": {"interval": 70, "frames": "✶✸✹✺✹✷"}, + "star2": {"interval": 80, "frames": "+x*"}, + "flip": { + "interval": 70, + "frames": "___-``'´-___", + }, + "hamburger": {"interval": 100, "frames": "☱☲☴"}, + "growVertical": { + "interval": 120, + "frames": "▁▃▄▅▆▇▆▅▄▃", + }, + "growHorizontal": { + "interval": 120, + "frames": "▏▎▍▌▋▊▉▊▋▌▍▎", + }, + "balloon": {"interval": 140, "frames": " .oO@* "}, + "balloon2": {"interval": 120, "frames": ".oO°Oo."}, + "noise": {"interval": 100, "frames": "▓▒░"}, + "bounce": {"interval": 120, "frames": "⠁⠂⠄⠂"}, + "boxBounce": {"interval": 120, "frames": "▖▘▝▗"}, + "boxBounce2": {"interval": 100, "frames": "▌▀▐▄"}, + "triangle": {"interval": 50, "frames": "◢◣◤◥"}, + "arc": {"interval": 100, "frames": "◜◠◝◞◡◟"}, + "circle": {"interval": 120, "frames": "◡⊙◠"}, + "squareCorners": {"interval": 180, "frames": "◰◳◲◱"}, + "circleQuarters": {"interval": 120, "frames": "◴◷◶◵"}, + "circleHalves": {"interval": 50, "frames": "◐◓◑◒"}, + "squish": {"interval": 100, "frames": "╫╪"}, + "toggle": {"interval": 250, "frames": "⊶⊷"}, + "toggle2": {"interval": 80, "frames": "▫▪"}, + "toggle3": {"interval": 120, "frames": "□■"}, + "toggle4": {"interval": 100, "frames": "■□▪▫"}, + "toggle5": {"interval": 100, "frames": "▮▯"}, + "toggle6": {"interval": 300, "frames": "ဝ၀"}, + "toggle7": {"interval": 80, "frames": "⦾⦿"}, + "toggle8": {"interval": 100, "frames": "◍◌"}, + "toggle9": {"interval": 100, "frames": "◉◎"}, + "toggle10": {"interval": 100, "frames": "㊂㊀㊁"}, + "toggle11": {"interval": 50, "frames": "⧇⧆"}, + "toggle12": {"interval": 120, "frames": "☗☖"}, + "toggle13": {"interval": 80, "frames": "=*-"}, + "arrow": {"interval": 100, "frames": "←↖↑↗→↘↓↙"}, + "arrow2": { + "interval": 80, + "frames": ["⬆️ ", "↗️ ", "➡️ ", "↘️ ", "⬇️ ", "↙️ ", "⬅️ ", "↖️ "], + }, + "arrow3": { + "interval": 120, + "frames": ["▹▹▹▹▹", "▸▹▹▹▹", "▹▸▹▹▹", "▹▹▸▹▹", "▹▹▹▸▹", "▹▹▹▹▸"], + }, + "bouncingBar": { + "interval": 80, + "frames": [ + "[ ]", + "[= ]", + "[== ]", + "[=== ]", + "[ ===]", + "[ ==]", + "[ =]", + "[ ]", + "[ =]", + "[ ==]", + "[ ===]", + "[====]", + "[=== ]", + "[== ]", + "[= ]", + ], + }, + "bouncingBall": { + "interval": 80, + "frames": [ + "( ● )", + "( ● )", + "( ● )", + "( ● )", + "( ●)", + "( ● )", + "( ● )", + "( ● )", + "( ● )", + "(● )", + ], + }, + "smiley": {"interval": 200, "frames": ["😄 ", "😝 "]}, + "monkey": {"interval": 300, "frames": ["🙈 ", "🙈 ", "🙉 ", "🙊 "]}, + "hearts": {"interval": 100, "frames": ["💛 ", "💙 ", "💜 ", "💚 ", "❤️ "]}, + "clock": { + "interval": 100, + "frames": [ + "🕛 ", + "🕐 ", + "🕑 ", + "🕒 ", + "🕓 ", + "🕔 ", + "🕕 ", + "🕖 ", + "🕗 ", + "🕘 ", + "🕙 ", + "🕚 ", + ], + }, + "earth": {"interval": 180, "frames": ["🌍 ", "🌎 ", "🌏 "]}, + "material": { + "interval": 17, + "frames": [ + "█▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "██▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "███▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "████▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "██████▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "██████▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "███████▁▁▁▁▁▁▁▁▁▁▁▁▁", + "████████▁▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "██████████▁▁▁▁▁▁▁▁▁▁", + "███████████▁▁▁▁▁▁▁▁▁", + "█████████████▁▁▁▁▁▁▁", + "██████████████▁▁▁▁▁▁", + "██████████████▁▁▁▁▁▁", + "▁██████████████▁▁▁▁▁", + "▁██████████████▁▁▁▁▁", + "▁██████████████▁▁▁▁▁", + "▁▁██████████████▁▁▁▁", + "▁▁▁██████████████▁▁▁", + "▁▁▁▁█████████████▁▁▁", + "▁▁▁▁██████████████▁▁", + "▁▁▁▁██████████████▁▁", + "▁▁▁▁▁██████████████▁", + "▁▁▁▁▁██████████████▁", + "▁▁▁▁▁██████████████▁", + "▁▁▁▁▁▁██████████████", + "▁▁▁▁▁▁██████████████", + "▁▁▁▁▁▁▁█████████████", + "▁▁▁▁▁▁▁█████████████", + "▁▁▁▁▁▁▁▁████████████", + "▁▁▁▁▁▁▁▁████████████", + "▁▁▁▁▁▁▁▁▁███████████", + "▁▁▁▁▁▁▁▁▁███████████", + "▁▁▁▁▁▁▁▁▁▁██████████", + "▁▁▁▁▁▁▁▁▁▁██████████", + "▁▁▁▁▁▁▁▁▁▁▁▁████████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁███████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁██████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█████", + "█▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁████", + "██▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", + "██▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", + "███▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", + "████▁▁▁▁▁▁▁▁▁▁▁▁▁▁██", + "█████▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "█████▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "██████▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "████████▁▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "███████████▁▁▁▁▁▁▁▁▁", + "████████████▁▁▁▁▁▁▁▁", + "████████████▁▁▁▁▁▁▁▁", + "██████████████▁▁▁▁▁▁", + "██████████████▁▁▁▁▁▁", + "▁██████████████▁▁▁▁▁", + "▁██████████████▁▁▁▁▁", + "▁▁▁█████████████▁▁▁▁", + "▁▁▁▁▁████████████▁▁▁", + "▁▁▁▁▁████████████▁▁▁", + "▁▁▁▁▁▁███████████▁▁▁", + "▁▁▁▁▁▁▁▁█████████▁▁▁", + "▁▁▁▁▁▁▁▁█████████▁▁▁", + "▁▁▁▁▁▁▁▁▁█████████▁▁", + "▁▁▁▁▁▁▁▁▁█████████▁▁", + "▁▁▁▁▁▁▁▁▁▁█████████▁", + "▁▁▁▁▁▁▁▁▁▁▁████████▁", + "▁▁▁▁▁▁▁▁▁▁▁████████▁", + "▁▁▁▁▁▁▁▁▁▁▁▁███████▁", + "▁▁▁▁▁▁▁▁▁▁▁▁███████▁", + "▁▁▁▁▁▁▁▁▁▁▁▁▁███████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁███████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁██", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁██", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁██", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + ], + }, + "moon": { + "interval": 80, + "frames": ["🌑 ", "🌒 ", "🌓 ", "🌔 ", "🌕 ", "🌖 ", "🌗 ", "🌘 "], + }, + "runner": {"interval": 140, "frames": ["🚶 ", "🏃 "]}, + "pong": { + "interval": 80, + "frames": [ + "▐⠂ ▌", + "▐⠈ ▌", + "▐ ⠂ ▌", + "▐ ⠠ ▌", + "▐ ⡀ ▌", + "▐ ⠠ ▌", + "▐ ⠂ ▌", + "▐ ⠈ ▌", + "▐ ⠂ ▌", + "▐ ⠠ ▌", + "▐ ⡀ ▌", + "▐ ⠠ ▌", + "▐ ⠂ ▌", + "▐ ⠈ ▌", + "▐ ⠂▌", + "▐ ⠠▌", + "▐ ⡀▌", + "▐ ⠠ ▌", + "▐ ⠂ ▌", + "▐ ⠈ ▌", + "▐ ⠂ ▌", + "▐ ⠠ ▌", + "▐ ⡀ ▌", + "▐ ⠠ ▌", + "▐ ⠂ ▌", + "▐ ⠈ ▌", + "▐ ⠂ ▌", + "▐ ⠠ ▌", + "▐ ⡀ ▌", + "▐⠠ ▌", + ], + }, + "shark": { + "interval": 120, + "frames": [ + "▐|\\____________▌", + "▐_|\\___________▌", + "▐__|\\__________▌", + "▐___|\\_________▌", + "▐____|\\________▌", + "▐_____|\\_______▌", + "▐______|\\______▌", + "▐_______|\\_____▌", + "▐________|\\____▌", + "▐_________|\\___▌", + "▐__________|\\__▌", + "▐___________|\\_▌", + "▐____________|\\▌", + "▐____________/|▌", + "▐___________/|_▌", + "▐__________/|__▌", + "▐_________/|___▌", + "▐________/|____▌", + "▐_______/|_____▌", + "▐______/|______▌", + "▐_____/|_______▌", + "▐____/|________▌", + "▐___/|_________▌", + "▐__/|__________▌", + "▐_/|___________▌", + "▐/|____________▌", + ], + }, + "dqpb": {"interval": 100, "frames": "dqpb"}, + "weather": { + "interval": 100, + "frames": [ + "☀️ ", + "☀️ ", + "☀️ ", + "🌤 ", + "⛅️ ", + "🌥 ", + "☁️ ", + "🌧 ", + "🌨 ", + "🌧 ", + "🌨 ", + "🌧 ", + "🌨 ", + "⛈ ", + "🌨 ", + "🌧 ", + "🌨 ", + "☁️ ", + "🌥 ", + "⛅️ ", + "🌤 ", + "☀️ ", + "☀️ ", + ], + }, + "christmas": {"interval": 400, "frames": "🌲🎄"}, + "grenade": { + "interval": 80, + "frames": [ + "، ", + "′ ", + " ´ ", + " ‾ ", + " ⸌", + " ⸊", + " |", + " ⁎", + " ⁕", + " ෴ ", + " ⁓", + " ", + " ", + " ", + ], + }, + "point": {"interval": 125, "frames": ["∙∙∙", "●∙∙", "∙●∙", "∙∙●", "∙∙∙"]}, + "layer": {"interval": 150, "frames": "-=≡"}, + "betaWave": { + "interval": 80, + "frames": [ + "ρββββββ", + "βρβββββ", + "ββρββββ", + "βββρβββ", + "ββββρββ", + "βββββρβ", + "ββββββρ", + ], + }, + "aesthetic": { + "interval": 80, + "frames": [ + "▰▱▱▱▱▱▱", + "▰▰▱▱▱▱▱", + "▰▰▰▱▱▱▱", + "▰▰▰▰▱▱▱", + "▰▰▰▰▰▱▱", + "▰▰▰▰▰▰▱", + "▰▰▰▰▰▰▰", + "▰▱▱▱▱▱▱", + ], + }, +} diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_stack.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_stack.py new file mode 100644 index 0000000..194564e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_stack.py @@ -0,0 +1,16 @@ +from typing import List, TypeVar + +T = TypeVar("T") + + +class Stack(List[T]): + """A small shim over builtin list.""" + + @property + def top(self) -> T: + """Get top of stack.""" + return self[-1] + + def push(self, item: T) -> None: + """Push an item on to the stack (append in stack nomenclature).""" + self.append(item) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_timer.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_timer.py new file mode 100644 index 0000000..a2ca6be --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_timer.py @@ -0,0 +1,19 @@ +""" +Timer context manager, only used in debug. + +""" + +from time import time + +import contextlib +from typing import Generator + + +@contextlib.contextmanager +def timer(subject: str = "time") -> Generator[None, None, None]: + """print the elapsed time. (only used in debugging)""" + start = time() + yield + elapsed = time() - start + elapsed_ms = elapsed * 1000 + print(f"{subject} elapsed {elapsed_ms:.1f}ms") diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_win32_console.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_win32_console.py new file mode 100644 index 0000000..81b1082 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_win32_console.py @@ -0,0 +1,662 @@ +"""Light wrapper around the Win32 Console API - this module should only be imported on Windows + +The API that this module wraps is documented at https://docs.microsoft.com/en-us/windows/console/console-functions +""" +import ctypes +import sys +from typing import Any + +windll: Any = None +if sys.platform == "win32": + windll = ctypes.LibraryLoader(ctypes.WinDLL) +else: + raise ImportError(f"{__name__} can only be imported on Windows") + +import time +from ctypes import Structure, byref, wintypes +from typing import IO, NamedTuple, Type, cast + +from pip._vendor.rich.color import ColorSystem +from pip._vendor.rich.style import Style + +STDOUT = -11 +ENABLE_VIRTUAL_TERMINAL_PROCESSING = 4 + +COORD = wintypes._COORD + + +class LegacyWindowsError(Exception): + pass + + +class WindowsCoordinates(NamedTuple): + """Coordinates in the Windows Console API are (y, x), not (x, y). + This class is intended to prevent that confusion. + Rows and columns are indexed from 0. + This class can be used in place of wintypes._COORD in arguments and argtypes. + """ + + row: int + col: int + + @classmethod + def from_param(cls, value: "WindowsCoordinates") -> COORD: + """Converts a WindowsCoordinates into a wintypes _COORD structure. + This classmethod is internally called by ctypes to perform the conversion. + + Args: + value (WindowsCoordinates): The input coordinates to convert. + + Returns: + wintypes._COORD: The converted coordinates struct. + """ + return COORD(value.col, value.row) + + +class CONSOLE_SCREEN_BUFFER_INFO(Structure): + _fields_ = [ + ("dwSize", COORD), + ("dwCursorPosition", COORD), + ("wAttributes", wintypes.WORD), + ("srWindow", wintypes.SMALL_RECT), + ("dwMaximumWindowSize", COORD), + ] + + +class CONSOLE_CURSOR_INFO(ctypes.Structure): + _fields_ = [("dwSize", wintypes.DWORD), ("bVisible", wintypes.BOOL)] + + +_GetStdHandle = windll.kernel32.GetStdHandle +_GetStdHandle.argtypes = [ + wintypes.DWORD, +] +_GetStdHandle.restype = wintypes.HANDLE + + +def GetStdHandle(handle: int = STDOUT) -> wintypes.HANDLE: + """Retrieves a handle to the specified standard device (standard input, standard output, or standard error). + + Args: + handle (int): Integer identifier for the handle. Defaults to -11 (stdout). + + Returns: + wintypes.HANDLE: The handle + """ + return cast(wintypes.HANDLE, _GetStdHandle(handle)) + + +_GetConsoleMode = windll.kernel32.GetConsoleMode +_GetConsoleMode.argtypes = [wintypes.HANDLE, wintypes.LPDWORD] +_GetConsoleMode.restype = wintypes.BOOL + + +def GetConsoleMode(std_handle: wintypes.HANDLE) -> int: + """Retrieves the current input mode of a console's input buffer + or the current output mode of a console screen buffer. + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + + Raises: + LegacyWindowsError: If any error occurs while calling the Windows console API. + + Returns: + int: Value representing the current console mode as documented at + https://docs.microsoft.com/en-us/windows/console/getconsolemode#parameters + """ + + console_mode = wintypes.DWORD() + success = bool(_GetConsoleMode(std_handle, console_mode)) + if not success: + raise LegacyWindowsError("Unable to get legacy Windows Console Mode") + return console_mode.value + + +_FillConsoleOutputCharacterW = windll.kernel32.FillConsoleOutputCharacterW +_FillConsoleOutputCharacterW.argtypes = [ + wintypes.HANDLE, + ctypes.c_char, + wintypes.DWORD, + cast(Type[COORD], WindowsCoordinates), + ctypes.POINTER(wintypes.DWORD), +] +_FillConsoleOutputCharacterW.restype = wintypes.BOOL + + +def FillConsoleOutputCharacter( + std_handle: wintypes.HANDLE, + char: str, + length: int, + start: WindowsCoordinates, +) -> int: + """Writes a character to the console screen buffer a specified number of times, beginning at the specified coordinates. + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + char (str): The character to write. Must be a string of length 1. + length (int): The number of times to write the character. + start (WindowsCoordinates): The coordinates to start writing at. + + Returns: + int: The number of characters written. + """ + character = ctypes.c_char(char.encode()) + num_characters = wintypes.DWORD(length) + num_written = wintypes.DWORD(0) + _FillConsoleOutputCharacterW( + std_handle, + character, + num_characters, + start, + byref(num_written), + ) + return num_written.value + + +_FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute +_FillConsoleOutputAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, + wintypes.DWORD, + cast(Type[COORD], WindowsCoordinates), + ctypes.POINTER(wintypes.DWORD), +] +_FillConsoleOutputAttribute.restype = wintypes.BOOL + + +def FillConsoleOutputAttribute( + std_handle: wintypes.HANDLE, + attributes: int, + length: int, + start: WindowsCoordinates, +) -> int: + """Sets the character attributes for a specified number of character cells, + beginning at the specified coordinates in a screen buffer. + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + attributes (int): Integer value representing the foreground and background colours of the cells. + length (int): The number of cells to set the output attribute of. + start (WindowsCoordinates): The coordinates of the first cell whose attributes are to be set. + + Returns: + int: The number of cells whose attributes were actually set. + """ + num_cells = wintypes.DWORD(length) + style_attrs = wintypes.WORD(attributes) + num_written = wintypes.DWORD(0) + _FillConsoleOutputAttribute( + std_handle, style_attrs, num_cells, start, byref(num_written) + ) + return num_written.value + + +_SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute +_SetConsoleTextAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, +] +_SetConsoleTextAttribute.restype = wintypes.BOOL + + +def SetConsoleTextAttribute( + std_handle: wintypes.HANDLE, attributes: wintypes.WORD +) -> bool: + """Set the colour attributes for all text written after this function is called. + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + attributes (int): Integer value representing the foreground and background colours. + + + Returns: + bool: True if the attribute was set successfully, otherwise False. + """ + return bool(_SetConsoleTextAttribute(std_handle, attributes)) + + +_GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo +_GetConsoleScreenBufferInfo.argtypes = [ + wintypes.HANDLE, + ctypes.POINTER(CONSOLE_SCREEN_BUFFER_INFO), +] +_GetConsoleScreenBufferInfo.restype = wintypes.BOOL + + +def GetConsoleScreenBufferInfo( + std_handle: wintypes.HANDLE, +) -> CONSOLE_SCREEN_BUFFER_INFO: + """Retrieves information about the specified console screen buffer. + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + + Returns: + CONSOLE_SCREEN_BUFFER_INFO: A CONSOLE_SCREEN_BUFFER_INFO ctype struct contain information about + screen size, cursor position, colour attributes, and more.""" + console_screen_buffer_info = CONSOLE_SCREEN_BUFFER_INFO() + _GetConsoleScreenBufferInfo(std_handle, byref(console_screen_buffer_info)) + return console_screen_buffer_info + + +_SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition +_SetConsoleCursorPosition.argtypes = [ + wintypes.HANDLE, + cast(Type[COORD], WindowsCoordinates), +] +_SetConsoleCursorPosition.restype = wintypes.BOOL + + +def SetConsoleCursorPosition( + std_handle: wintypes.HANDLE, coords: WindowsCoordinates +) -> bool: + """Set the position of the cursor in the console screen + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + coords (WindowsCoordinates): The coordinates to move the cursor to. + + Returns: + bool: True if the function succeeds, otherwise False. + """ + return bool(_SetConsoleCursorPosition(std_handle, coords)) + + +_GetConsoleCursorInfo = windll.kernel32.GetConsoleCursorInfo +_GetConsoleCursorInfo.argtypes = [ + wintypes.HANDLE, + ctypes.POINTER(CONSOLE_CURSOR_INFO), +] +_GetConsoleCursorInfo.restype = wintypes.BOOL + + +def GetConsoleCursorInfo( + std_handle: wintypes.HANDLE, cursor_info: CONSOLE_CURSOR_INFO +) -> bool: + """Get the cursor info - used to get cursor visibility and width + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + cursor_info (CONSOLE_CURSOR_INFO): CONSOLE_CURSOR_INFO ctype struct that receives information + about the console's cursor. + + Returns: + bool: True if the function succeeds, otherwise False. + """ + return bool(_GetConsoleCursorInfo(std_handle, byref(cursor_info))) + + +_SetConsoleCursorInfo = windll.kernel32.SetConsoleCursorInfo +_SetConsoleCursorInfo.argtypes = [ + wintypes.HANDLE, + ctypes.POINTER(CONSOLE_CURSOR_INFO), +] +_SetConsoleCursorInfo.restype = wintypes.BOOL + + +def SetConsoleCursorInfo( + std_handle: wintypes.HANDLE, cursor_info: CONSOLE_CURSOR_INFO +) -> bool: + """Set the cursor info - used for adjusting cursor visibility and width + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + cursor_info (CONSOLE_CURSOR_INFO): CONSOLE_CURSOR_INFO ctype struct containing the new cursor info. + + Returns: + bool: True if the function succeeds, otherwise False. + """ + return bool(_SetConsoleCursorInfo(std_handle, byref(cursor_info))) + + +_SetConsoleTitle = windll.kernel32.SetConsoleTitleW +_SetConsoleTitle.argtypes = [wintypes.LPCWSTR] +_SetConsoleTitle.restype = wintypes.BOOL + + +def SetConsoleTitle(title: str) -> bool: + """Sets the title of the current console window + + Args: + title (str): The new title of the console window. + + Returns: + bool: True if the function succeeds, otherwise False. + """ + return bool(_SetConsoleTitle(title)) + + +class LegacyWindowsTerm: + """This class allows interaction with the legacy Windows Console API. It should only be used in the context + of environments where virtual terminal processing is not available. However, if it is used in a Windows environment, + the entire API should work. + + Args: + file (IO[str]): The file which the Windows Console API HANDLE is retrieved from, defaults to sys.stdout. + """ + + BRIGHT_BIT = 8 + + # Indices are ANSI color numbers, values are the corresponding Windows Console API color numbers + ANSI_TO_WINDOWS = [ + 0, # black The Windows colours are defined in wincon.h as follows: + 4, # red define FOREGROUND_BLUE 0x0001 -- 0000 0001 + 2, # green define FOREGROUND_GREEN 0x0002 -- 0000 0010 + 6, # yellow define FOREGROUND_RED 0x0004 -- 0000 0100 + 1, # blue define FOREGROUND_INTENSITY 0x0008 -- 0000 1000 + 5, # magenta define BACKGROUND_BLUE 0x0010 -- 0001 0000 + 3, # cyan define BACKGROUND_GREEN 0x0020 -- 0010 0000 + 7, # white define BACKGROUND_RED 0x0040 -- 0100 0000 + 8, # bright black (grey) define BACKGROUND_INTENSITY 0x0080 -- 1000 0000 + 12, # bright red + 10, # bright green + 14, # bright yellow + 9, # bright blue + 13, # bright magenta + 11, # bright cyan + 15, # bright white + ] + + def __init__(self, file: "IO[str]") -> None: + handle = GetStdHandle(STDOUT) + self._handle = handle + default_text = GetConsoleScreenBufferInfo(handle).wAttributes + self._default_text = default_text + + self._default_fore = default_text & 7 + self._default_back = (default_text >> 4) & 7 + self._default_attrs = self._default_fore | (self._default_back << 4) + + self._file = file + self.write = file.write + self.flush = file.flush + + @property + def cursor_position(self) -> WindowsCoordinates: + """Returns the current position of the cursor (0-based) + + Returns: + WindowsCoordinates: The current cursor position. + """ + coord: COORD = GetConsoleScreenBufferInfo(self._handle).dwCursorPosition + return WindowsCoordinates(row=cast(int, coord.Y), col=cast(int, coord.X)) + + @property + def screen_size(self) -> WindowsCoordinates: + """Returns the current size of the console screen buffer, in character columns and rows + + Returns: + WindowsCoordinates: The width and height of the screen as WindowsCoordinates. + """ + screen_size: COORD = GetConsoleScreenBufferInfo(self._handle).dwSize + return WindowsCoordinates( + row=cast(int, screen_size.Y), col=cast(int, screen_size.X) + ) + + def write_text(self, text: str) -> None: + """Write text directly to the terminal without any modification of styles + + Args: + text (str): The text to write to the console + """ + self.write(text) + self.flush() + + def write_styled(self, text: str, style: Style) -> None: + """Write styled text to the terminal. + + Args: + text (str): The text to write + style (Style): The style of the text + """ + color = style.color + bgcolor = style.bgcolor + if style.reverse: + color, bgcolor = bgcolor, color + + if color: + fore = color.downgrade(ColorSystem.WINDOWS).number + fore = fore if fore is not None else 7 # Default to ANSI 7: White + if style.bold: + fore = fore | self.BRIGHT_BIT + if style.dim: + fore = fore & ~self.BRIGHT_BIT + fore = self.ANSI_TO_WINDOWS[fore] + else: + fore = self._default_fore + + if bgcolor: + back = bgcolor.downgrade(ColorSystem.WINDOWS).number + back = back if back is not None else 0 # Default to ANSI 0: Black + back = self.ANSI_TO_WINDOWS[back] + else: + back = self._default_back + + assert fore is not None + assert back is not None + + SetConsoleTextAttribute( + self._handle, attributes=ctypes.c_ushort(fore | (back << 4)) + ) + self.write_text(text) + SetConsoleTextAttribute(self._handle, attributes=self._default_text) + + def move_cursor_to(self, new_position: WindowsCoordinates) -> None: + """Set the position of the cursor + + Args: + new_position (WindowsCoordinates): The WindowsCoordinates representing the new position of the cursor. + """ + if new_position.col < 0 or new_position.row < 0: + return + SetConsoleCursorPosition(self._handle, coords=new_position) + + def erase_line(self) -> None: + """Erase all content on the line the cursor is currently located at""" + screen_size = self.screen_size + cursor_position = self.cursor_position + cells_to_erase = screen_size.col + start_coordinates = WindowsCoordinates(row=cursor_position.row, col=0) + FillConsoleOutputCharacter( + self._handle, " ", length=cells_to_erase, start=start_coordinates + ) + FillConsoleOutputAttribute( + self._handle, + self._default_attrs, + length=cells_to_erase, + start=start_coordinates, + ) + + def erase_end_of_line(self) -> None: + """Erase all content from the cursor position to the end of that line""" + cursor_position = self.cursor_position + cells_to_erase = self.screen_size.col - cursor_position.col + FillConsoleOutputCharacter( + self._handle, " ", length=cells_to_erase, start=cursor_position + ) + FillConsoleOutputAttribute( + self._handle, + self._default_attrs, + length=cells_to_erase, + start=cursor_position, + ) + + def erase_start_of_line(self) -> None: + """Erase all content from the cursor position to the start of that line""" + row, col = self.cursor_position + start = WindowsCoordinates(row, 0) + FillConsoleOutputCharacter(self._handle, " ", length=col, start=start) + FillConsoleOutputAttribute( + self._handle, self._default_attrs, length=col, start=start + ) + + def move_cursor_up(self) -> None: + """Move the cursor up a single cell""" + cursor_position = self.cursor_position + SetConsoleCursorPosition( + self._handle, + coords=WindowsCoordinates( + row=cursor_position.row - 1, col=cursor_position.col + ), + ) + + def move_cursor_down(self) -> None: + """Move the cursor down a single cell""" + cursor_position = self.cursor_position + SetConsoleCursorPosition( + self._handle, + coords=WindowsCoordinates( + row=cursor_position.row + 1, + col=cursor_position.col, + ), + ) + + def move_cursor_forward(self) -> None: + """Move the cursor forward a single cell. Wrap to the next line if required.""" + row, col = self.cursor_position + if col == self.screen_size.col - 1: + row += 1 + col = 0 + else: + col += 1 + SetConsoleCursorPosition( + self._handle, coords=WindowsCoordinates(row=row, col=col) + ) + + def move_cursor_to_column(self, column: int) -> None: + """Move cursor to the column specified by the zero-based column index, staying on the same row + + Args: + column (int): The zero-based column index to move the cursor to. + """ + row, _ = self.cursor_position + SetConsoleCursorPosition(self._handle, coords=WindowsCoordinates(row, column)) + + def move_cursor_backward(self) -> None: + """Move the cursor backward a single cell. Wrap to the previous line if required.""" + row, col = self.cursor_position + if col == 0: + row -= 1 + col = self.screen_size.col - 1 + else: + col -= 1 + SetConsoleCursorPosition( + self._handle, coords=WindowsCoordinates(row=row, col=col) + ) + + def hide_cursor(self) -> None: + """Hide the cursor""" + current_cursor_size = self._get_cursor_size() + invisible_cursor = CONSOLE_CURSOR_INFO(dwSize=current_cursor_size, bVisible=0) + SetConsoleCursorInfo(self._handle, cursor_info=invisible_cursor) + + def show_cursor(self) -> None: + """Show the cursor""" + current_cursor_size = self._get_cursor_size() + visible_cursor = CONSOLE_CURSOR_INFO(dwSize=current_cursor_size, bVisible=1) + SetConsoleCursorInfo(self._handle, cursor_info=visible_cursor) + + def set_title(self, title: str) -> None: + """Set the title of the terminal window + + Args: + title (str): The new title of the console window + """ + assert len(title) < 255, "Console title must be less than 255 characters" + SetConsoleTitle(title) + + def _get_cursor_size(self) -> int: + """Get the percentage of the character cell that is filled by the cursor""" + cursor_info = CONSOLE_CURSOR_INFO() + GetConsoleCursorInfo(self._handle, cursor_info=cursor_info) + return int(cursor_info.dwSize) + + +if __name__ == "__main__": + handle = GetStdHandle() + + from pip._vendor.rich.console import Console + + console = Console() + + term = LegacyWindowsTerm(sys.stdout) + term.set_title("Win32 Console Examples") + + style = Style(color="black", bgcolor="red") + + heading = Style.parse("black on green") + + # Check colour output + console.rule("Checking colour output") + console.print("[on red]on red!") + console.print("[blue]blue!") + console.print("[yellow]yellow!") + console.print("[bold yellow]bold yellow!") + console.print("[bright_yellow]bright_yellow!") + console.print("[dim bright_yellow]dim bright_yellow!") + console.print("[italic cyan]italic cyan!") + console.print("[bold white on blue]bold white on blue!") + console.print("[reverse bold white on blue]reverse bold white on blue!") + console.print("[bold black on cyan]bold black on cyan!") + console.print("[black on green]black on green!") + console.print("[blue on green]blue on green!") + console.print("[white on black]white on black!") + console.print("[black on white]black on white!") + console.print("[#1BB152 on #DA812D]#1BB152 on #DA812D!") + + # Check cursor movement + console.rule("Checking cursor movement") + console.print() + term.move_cursor_backward() + term.move_cursor_backward() + term.write_text("went back and wrapped to prev line") + time.sleep(1) + term.move_cursor_up() + term.write_text("we go up") + time.sleep(1) + term.move_cursor_down() + term.write_text("and down") + time.sleep(1) + term.move_cursor_up() + term.move_cursor_backward() + term.move_cursor_backward() + term.write_text("we went up and back 2") + time.sleep(1) + term.move_cursor_down() + term.move_cursor_backward() + term.move_cursor_backward() + term.write_text("we went down and back 2") + time.sleep(1) + + # Check erasing of lines + term.hide_cursor() + console.print() + console.rule("Checking line erasing") + console.print("\n...Deleting to the start of the line...") + term.write_text("The red arrow shows the cursor location, and direction of erase") + time.sleep(1) + term.move_cursor_to_column(16) + term.write_styled("<", Style.parse("black on red")) + term.move_cursor_backward() + time.sleep(1) + term.erase_start_of_line() + time.sleep(1) + + console.print("\n\n...And to the end of the line...") + term.write_text("The red arrow shows the cursor location, and direction of erase") + time.sleep(1) + + term.move_cursor_to_column(16) + term.write_styled(">", Style.parse("black on red")) + time.sleep(1) + term.erase_end_of_line() + time.sleep(1) + + console.print("\n\n...Now the whole line will be erased...") + term.write_styled("I'm going to disappear!", style=Style.parse("black on cyan")) + time.sleep(1) + term.erase_line() + + term.show_cursor() + print("\n") diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_windows.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_windows.py new file mode 100644 index 0000000..10fc0d7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_windows.py @@ -0,0 +1,72 @@ +import sys +from dataclasses import dataclass + + +@dataclass +class WindowsConsoleFeatures: + """Windows features available.""" + + vt: bool = False + """The console supports VT codes.""" + truecolor: bool = False + """The console supports truecolor.""" + + +try: + import ctypes + from ctypes import LibraryLoader + + if sys.platform == "win32": + windll = LibraryLoader(ctypes.WinDLL) + else: + windll = None + raise ImportError("Not windows") + + from pip._vendor.rich._win32_console import ( + ENABLE_VIRTUAL_TERMINAL_PROCESSING, + GetConsoleMode, + GetStdHandle, + LegacyWindowsError, + ) + +except (AttributeError, ImportError, ValueError): + + # Fallback if we can't load the Windows DLL + def get_windows_console_features() -> WindowsConsoleFeatures: + features = WindowsConsoleFeatures() + return features + +else: + + def get_windows_console_features() -> WindowsConsoleFeatures: + """Get windows console features. + + Returns: + WindowsConsoleFeatures: An instance of WindowsConsoleFeatures. + """ + handle = GetStdHandle() + try: + console_mode = GetConsoleMode(handle) + success = True + except LegacyWindowsError: + console_mode = 0 + success = False + vt = bool(success and console_mode & ENABLE_VIRTUAL_TERMINAL_PROCESSING) + truecolor = False + if vt: + win_version = sys.getwindowsversion() + truecolor = win_version.major > 10 or ( + win_version.major == 10 and win_version.build >= 15063 + ) + features = WindowsConsoleFeatures(vt=vt, truecolor=truecolor) + return features + + +if __name__ == "__main__": + import platform + + features = get_windows_console_features() + from pip._vendor.rich import print + + print(f'platform="{platform.system()}"') + print(repr(features)) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_windows_renderer.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_windows_renderer.py new file mode 100644 index 0000000..5ece056 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_windows_renderer.py @@ -0,0 +1,56 @@ +from typing import Iterable, Sequence, Tuple, cast + +from pip._vendor.rich._win32_console import LegacyWindowsTerm, WindowsCoordinates +from pip._vendor.rich.segment import ControlCode, ControlType, Segment + + +def legacy_windows_render(buffer: Iterable[Segment], term: LegacyWindowsTerm) -> None: + """Makes appropriate Windows Console API calls based on the segments in the buffer. + + Args: + buffer (Iterable[Segment]): Iterable of Segments to convert to Win32 API calls. + term (LegacyWindowsTerm): Used to call the Windows Console API. + """ + for text, style, control in buffer: + if not control: + if style: + term.write_styled(text, style) + else: + term.write_text(text) + else: + control_codes: Sequence[ControlCode] = control + for control_code in control_codes: + control_type = control_code[0] + if control_type == ControlType.CURSOR_MOVE_TO: + _, x, y = cast(Tuple[ControlType, int, int], control_code) + term.move_cursor_to(WindowsCoordinates(row=y - 1, col=x - 1)) + elif control_type == ControlType.CARRIAGE_RETURN: + term.write_text("\r") + elif control_type == ControlType.HOME: + term.move_cursor_to(WindowsCoordinates(0, 0)) + elif control_type == ControlType.CURSOR_UP: + term.move_cursor_up() + elif control_type == ControlType.CURSOR_DOWN: + term.move_cursor_down() + elif control_type == ControlType.CURSOR_FORWARD: + term.move_cursor_forward() + elif control_type == ControlType.CURSOR_BACKWARD: + term.move_cursor_backward() + elif control_type == ControlType.CURSOR_MOVE_TO_COLUMN: + _, column = cast(Tuple[ControlType, int], control_code) + term.move_cursor_to_column(column - 1) + elif control_type == ControlType.HIDE_CURSOR: + term.hide_cursor() + elif control_type == ControlType.SHOW_CURSOR: + term.show_cursor() + elif control_type == ControlType.ERASE_IN_LINE: + _, mode = cast(Tuple[ControlType, int], control_code) + if mode == 0: + term.erase_end_of_line() + elif mode == 1: + term.erase_start_of_line() + elif mode == 2: + term.erase_line() + elif control_type == ControlType.SET_WINDOW_TITLE: + _, title = cast(Tuple[ControlType, str], control_code) + term.set_title(title) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_wrap.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_wrap.py new file mode 100644 index 0000000..c45f193 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/_wrap.py @@ -0,0 +1,56 @@ +import re +from typing import Iterable, List, Tuple + +from ._loop import loop_last +from .cells import cell_len, chop_cells + +re_word = re.compile(r"\s*\S+\s*") + + +def words(text: str) -> Iterable[Tuple[int, int, str]]: + position = 0 + word_match = re_word.match(text, position) + while word_match is not None: + start, end = word_match.span() + word = word_match.group(0) + yield start, end, word + word_match = re_word.match(text, end) + + +def divide_line(text: str, width: int, fold: bool = True) -> List[int]: + divides: List[int] = [] + append = divides.append + line_position = 0 + _cell_len = cell_len + for start, _end, word in words(text): + word_length = _cell_len(word.rstrip()) + if line_position + word_length > width: + if word_length > width: + if fold: + chopped_words = chop_cells(word, max_size=width, position=0) + for last, line in loop_last(chopped_words): + if start: + append(start) + + if last: + line_position = _cell_len(line) + else: + start += len(line) + else: + if start: + append(start) + line_position = _cell_len(word) + elif line_position and start: + append(start) + line_position = _cell_len(word) + else: + line_position += _cell_len(word) + return divides + + +if __name__ == "__main__": # pragma: no cover + from .console import Console + + console = Console(width=10) + console.print("12345 abcdefghijklmnopqrstuvwyxzABCDEFGHIJKLMNOPQRSTUVWXYZ 12345") + print(chop_cells("abcdefghijklmnopqrstuvwxyz", 10, position=2)) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/abc.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/abc.py new file mode 100644 index 0000000..e6e498e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/abc.py @@ -0,0 +1,33 @@ +from abc import ABC + + +class RichRenderable(ABC): + """An abstract base class for Rich renderables. + + Note that there is no need to extend this class, the intended use is to check if an + object supports the Rich renderable protocol. For example:: + + if isinstance(my_object, RichRenderable): + console.print(my_object) + + """ + + @classmethod + def __subclasshook__(cls, other: type) -> bool: + """Check if this class supports the rich render protocol.""" + return hasattr(other, "__rich_console__") or hasattr(other, "__rich__") + + +if __name__ == "__main__": # pragma: no cover + from pip._vendor.rich.text import Text + + t = Text() + print(isinstance(Text, RichRenderable)) + print(isinstance(t, RichRenderable)) + + class Foo: + pass + + f = Foo() + print(isinstance(f, RichRenderable)) + print(isinstance("", RichRenderable)) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/align.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/align.py new file mode 100644 index 0000000..c310b66 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/align.py @@ -0,0 +1,311 @@ +import sys +from itertools import chain +from typing import TYPE_CHECKING, Iterable, Optional + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from pip._vendor.typing_extensions import Literal # pragma: no cover + +from .constrain import Constrain +from .jupyter import JupyterMixin +from .measure import Measurement +from .segment import Segment +from .style import StyleType + +if TYPE_CHECKING: + from .console import Console, ConsoleOptions, RenderableType, RenderResult + +AlignMethod = Literal["left", "center", "right"] +VerticalAlignMethod = Literal["top", "middle", "bottom"] + + +class Align(JupyterMixin): + """Align a renderable by adding spaces if necessary. + + Args: + renderable (RenderableType): A console renderable. + align (AlignMethod): One of "left", "center", or "right"" + style (StyleType, optional): An optional style to apply to the background. + vertical (Optional[VerticalAlginMethod], optional): Optional vertical align, one of "top", "middle", or "bottom". Defaults to None. + pad (bool, optional): Pad the right with spaces. Defaults to True. + width (int, optional): Restrict contents to given width, or None to use default width. Defaults to None. + height (int, optional): Set height of align renderable, or None to fit to contents. Defaults to None. + + Raises: + ValueError: if ``align`` is not one of the expected values. + """ + + def __init__( + self, + renderable: "RenderableType", + align: AlignMethod = "left", + style: Optional[StyleType] = None, + *, + vertical: Optional[VerticalAlignMethod] = None, + pad: bool = True, + width: Optional[int] = None, + height: Optional[int] = None, + ) -> None: + if align not in ("left", "center", "right"): + raise ValueError( + f'invalid value for align, expected "left", "center", or "right" (not {align!r})' + ) + if vertical is not None and vertical not in ("top", "middle", "bottom"): + raise ValueError( + f'invalid value for vertical, expected "top", "middle", or "bottom" (not {vertical!r})' + ) + self.renderable = renderable + self.align = align + self.style = style + self.vertical = vertical + self.pad = pad + self.width = width + self.height = height + + def __repr__(self) -> str: + return f"Align({self.renderable!r}, {self.align!r})" + + @classmethod + def left( + cls, + renderable: "RenderableType", + style: Optional[StyleType] = None, + *, + vertical: Optional[VerticalAlignMethod] = None, + pad: bool = True, + width: Optional[int] = None, + height: Optional[int] = None, + ) -> "Align": + """Align a renderable to the left.""" + return cls( + renderable, + "left", + style=style, + vertical=vertical, + pad=pad, + width=width, + height=height, + ) + + @classmethod + def center( + cls, + renderable: "RenderableType", + style: Optional[StyleType] = None, + *, + vertical: Optional[VerticalAlignMethod] = None, + pad: bool = True, + width: Optional[int] = None, + height: Optional[int] = None, + ) -> "Align": + """Align a renderable to the center.""" + return cls( + renderable, + "center", + style=style, + vertical=vertical, + pad=pad, + width=width, + height=height, + ) + + @classmethod + def right( + cls, + renderable: "RenderableType", + style: Optional[StyleType] = None, + *, + vertical: Optional[VerticalAlignMethod] = None, + pad: bool = True, + width: Optional[int] = None, + height: Optional[int] = None, + ) -> "Align": + """Align a renderable to the right.""" + return cls( + renderable, + "right", + style=style, + vertical=vertical, + pad=pad, + width=width, + height=height, + ) + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": + align = self.align + width = console.measure(self.renderable, options=options).maximum + rendered = console.render( + Constrain( + self.renderable, width if self.width is None else min(width, self.width) + ), + options.update(height=None), + ) + lines = list(Segment.split_lines(rendered)) + width, height = Segment.get_shape(lines) + lines = Segment.set_shape(lines, width, height) + new_line = Segment.line() + excess_space = options.max_width - width + style = console.get_style(self.style) if self.style is not None else None + + def generate_segments() -> Iterable[Segment]: + if excess_space <= 0: + # Exact fit + for line in lines: + yield from line + yield new_line + + elif align == "left": + # Pad on the right + pad = Segment(" " * excess_space, style) if self.pad else None + for line in lines: + yield from line + if pad: + yield pad + yield new_line + + elif align == "center": + # Pad left and right + left = excess_space // 2 + pad = Segment(" " * left, style) + pad_right = ( + Segment(" " * (excess_space - left), style) if self.pad else None + ) + for line in lines: + if left: + yield pad + yield from line + if pad_right: + yield pad_right + yield new_line + + elif align == "right": + # Padding on left + pad = Segment(" " * excess_space, style) + for line in lines: + yield pad + yield from line + yield new_line + + blank_line = ( + Segment(f"{' ' * (self.width or options.max_width)}\n", style) + if self.pad + else Segment("\n") + ) + + def blank_lines(count: int) -> Iterable[Segment]: + if count > 0: + for _ in range(count): + yield blank_line + + vertical_height = self.height or options.height + iter_segments: Iterable[Segment] + if self.vertical and vertical_height is not None: + if self.vertical == "top": + bottom_space = vertical_height - height + iter_segments = chain(generate_segments(), blank_lines(bottom_space)) + elif self.vertical == "middle": + top_space = (vertical_height - height) // 2 + bottom_space = vertical_height - top_space - height + iter_segments = chain( + blank_lines(top_space), + generate_segments(), + blank_lines(bottom_space), + ) + else: # self.vertical == "bottom": + top_space = vertical_height - height + iter_segments = chain(blank_lines(top_space), generate_segments()) + else: + iter_segments = generate_segments() + if self.style: + style = console.get_style(self.style) + iter_segments = Segment.apply_style(iter_segments, style) + yield from iter_segments + + def __rich_measure__( + self, console: "Console", options: "ConsoleOptions" + ) -> Measurement: + measurement = Measurement.get(console, options, self.renderable) + return measurement + + +class VerticalCenter(JupyterMixin): + """Vertically aligns a renderable. + + Warn: + This class is deprecated and may be removed in a future version. Use Align class with + `vertical="middle"`. + + Args: + renderable (RenderableType): A renderable object. + """ + + def __init__( + self, + renderable: "RenderableType", + style: Optional[StyleType] = None, + ) -> None: + self.renderable = renderable + self.style = style + + def __repr__(self) -> str: + return f"VerticalCenter({self.renderable!r})" + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": + style = console.get_style(self.style) if self.style is not None else None + lines = console.render_lines( + self.renderable, options.update(height=None), pad=False + ) + width, _height = Segment.get_shape(lines) + new_line = Segment.line() + height = options.height or options.size.height + top_space = (height - len(lines)) // 2 + bottom_space = height - top_space - len(lines) + blank_line = Segment(f"{' ' * width}", style) + + def blank_lines(count: int) -> Iterable[Segment]: + for _ in range(count): + yield blank_line + yield new_line + + if top_space > 0: + yield from blank_lines(top_space) + for line in lines: + yield from line + yield new_line + if bottom_space > 0: + yield from blank_lines(bottom_space) + + def __rich_measure__( + self, console: "Console", options: "ConsoleOptions" + ) -> Measurement: + measurement = Measurement.get(console, options, self.renderable) + return measurement + + +if __name__ == "__main__": # pragma: no cover + from pip._vendor.rich.console import Console, Group + from pip._vendor.rich.highlighter import ReprHighlighter + from pip._vendor.rich.panel import Panel + + highlighter = ReprHighlighter() + console = Console() + + panel = Panel( + Group( + Align.left(highlighter("align='left'")), + Align.center(highlighter("align='center'")), + Align.right(highlighter("align='right'")), + ), + width=60, + style="on dark_blue", + title="Align", + ) + + console.print( + Align.center(panel, vertical="middle", style="on red", height=console.height) + ) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/ansi.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/ansi.py new file mode 100644 index 0000000..66365e6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/ansi.py @@ -0,0 +1,240 @@ +import re +import sys +from contextlib import suppress +from typing import Iterable, NamedTuple, Optional + +from .color import Color +from .style import Style +from .text import Text + +re_ansi = re.compile( + r""" +(?:\x1b\](.*?)\x1b\\)| +(?:\x1b([(@-Z\\-_]|\[[0-?]*[ -/]*[@-~])) +""", + re.VERBOSE, +) + + +class _AnsiToken(NamedTuple): + """Result of ansi tokenized string.""" + + plain: str = "" + sgr: Optional[str] = "" + osc: Optional[str] = "" + + +def _ansi_tokenize(ansi_text: str) -> Iterable[_AnsiToken]: + """Tokenize a string in to plain text and ANSI codes. + + Args: + ansi_text (str): A String containing ANSI codes. + + Yields: + AnsiToken: A named tuple of (plain, sgr, osc) + """ + + position = 0 + sgr: Optional[str] + osc: Optional[str] + for match in re_ansi.finditer(ansi_text): + start, end = match.span(0) + osc, sgr = match.groups() + if start > position: + yield _AnsiToken(ansi_text[position:start]) + if sgr: + if sgr == "(": + position = end + 1 + continue + if sgr.endswith("m"): + yield _AnsiToken("", sgr[1:-1], osc) + else: + yield _AnsiToken("", sgr, osc) + position = end + if position < len(ansi_text): + yield _AnsiToken(ansi_text[position:]) + + +SGR_STYLE_MAP = { + 1: "bold", + 2: "dim", + 3: "italic", + 4: "underline", + 5: "blink", + 6: "blink2", + 7: "reverse", + 8: "conceal", + 9: "strike", + 21: "underline2", + 22: "not dim not bold", + 23: "not italic", + 24: "not underline", + 25: "not blink", + 26: "not blink2", + 27: "not reverse", + 28: "not conceal", + 29: "not strike", + 30: "color(0)", + 31: "color(1)", + 32: "color(2)", + 33: "color(3)", + 34: "color(4)", + 35: "color(5)", + 36: "color(6)", + 37: "color(7)", + 39: "default", + 40: "on color(0)", + 41: "on color(1)", + 42: "on color(2)", + 43: "on color(3)", + 44: "on color(4)", + 45: "on color(5)", + 46: "on color(6)", + 47: "on color(7)", + 49: "on default", + 51: "frame", + 52: "encircle", + 53: "overline", + 54: "not frame not encircle", + 55: "not overline", + 90: "color(8)", + 91: "color(9)", + 92: "color(10)", + 93: "color(11)", + 94: "color(12)", + 95: "color(13)", + 96: "color(14)", + 97: "color(15)", + 100: "on color(8)", + 101: "on color(9)", + 102: "on color(10)", + 103: "on color(11)", + 104: "on color(12)", + 105: "on color(13)", + 106: "on color(14)", + 107: "on color(15)", +} + + +class AnsiDecoder: + """Translate ANSI code in to styled Text.""" + + def __init__(self) -> None: + self.style = Style.null() + + def decode(self, terminal_text: str) -> Iterable[Text]: + """Decode ANSI codes in an iterable of lines. + + Args: + lines (Iterable[str]): An iterable of lines of terminal output. + + Yields: + Text: Marked up Text. + """ + for line in terminal_text.splitlines(): + yield self.decode_line(line) + + def decode_line(self, line: str) -> Text: + """Decode a line containing ansi codes. + + Args: + line (str): A line of terminal output. + + Returns: + Text: A Text instance marked up according to ansi codes. + """ + from_ansi = Color.from_ansi + from_rgb = Color.from_rgb + _Style = Style + text = Text() + append = text.append + line = line.rsplit("\r", 1)[-1] + for plain_text, sgr, osc in _ansi_tokenize(line): + if plain_text: + append(plain_text, self.style or None) + elif osc is not None: + if osc.startswith("8;"): + _params, semicolon, link = osc[2:].partition(";") + if semicolon: + self.style = self.style.update_link(link or None) + elif sgr is not None: + # Translate in to semi-colon separated codes + # Ignore invalid codes, because we want to be lenient + codes = [ + min(255, int(_code) if _code else 0) + for _code in sgr.split(";") + if _code.isdigit() or _code == "" + ] + iter_codes = iter(codes) + for code in iter_codes: + if code == 0: + # reset + self.style = _Style.null() + elif code in SGR_STYLE_MAP: + # styles + self.style += _Style.parse(SGR_STYLE_MAP[code]) + elif code == 38: + #  Foreground + with suppress(StopIteration): + color_type = next(iter_codes) + if color_type == 5: + self.style += _Style.from_color( + from_ansi(next(iter_codes)) + ) + elif color_type == 2: + self.style += _Style.from_color( + from_rgb( + next(iter_codes), + next(iter_codes), + next(iter_codes), + ) + ) + elif code == 48: + # Background + with suppress(StopIteration): + color_type = next(iter_codes) + if color_type == 5: + self.style += _Style.from_color( + None, from_ansi(next(iter_codes)) + ) + elif color_type == 2: + self.style += _Style.from_color( + None, + from_rgb( + next(iter_codes), + next(iter_codes), + next(iter_codes), + ), + ) + + return text + + +if sys.platform != "win32" and __name__ == "__main__": # pragma: no cover + import io + import os + import pty + import sys + + decoder = AnsiDecoder() + + stdout = io.BytesIO() + + def read(fd: int) -> bytes: + data = os.read(fd, 1024) + stdout.write(data) + return data + + pty.spawn(sys.argv[1:], read) + + from .console import Console + + console = Console(record=True) + + stdout_result = stdout.getvalue().decode("utf-8") + print(stdout_result) + + for line in decoder.decode(stdout_result): + console.print(line) + + console.save_html("stdout.html") diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/bar.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/bar.py new file mode 100644 index 0000000..ed86a55 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/bar.py @@ -0,0 +1,94 @@ +from typing import Optional, Union + +from .color import Color +from .console import Console, ConsoleOptions, RenderResult +from .jupyter import JupyterMixin +from .measure import Measurement +from .segment import Segment +from .style import Style + +# There are left-aligned characters for 1/8 to 7/8, but +# the right-aligned characters exist only for 1/8 and 4/8. +BEGIN_BLOCK_ELEMENTS = ["█", "█", "█", "▐", "▐", "▐", "▕", "▕"] +END_BLOCK_ELEMENTS = [" ", "▏", "▎", "▍", "▌", "▋", "▊", "▉"] +FULL_BLOCK = "█" + + +class Bar(JupyterMixin): + """Renders a solid block bar. + + Args: + size (float): Value for the end of the bar. + begin (float): Begin point (between 0 and size, inclusive). + end (float): End point (between 0 and size, inclusive). + width (int, optional): Width of the bar, or ``None`` for maximum width. Defaults to None. + color (Union[Color, str], optional): Color of the bar. Defaults to "default". + bgcolor (Union[Color, str], optional): Color of bar background. Defaults to "default". + """ + + def __init__( + self, + size: float, + begin: float, + end: float, + *, + width: Optional[int] = None, + color: Union[Color, str] = "default", + bgcolor: Union[Color, str] = "default", + ): + self.size = size + self.begin = max(begin, 0) + self.end = min(end, size) + self.width = width + self.style = Style(color=color, bgcolor=bgcolor) + + def __repr__(self) -> str: + return f"Bar({self.size}, {self.begin}, {self.end})" + + def __rich_console__( + self, console: Console, options: ConsoleOptions + ) -> RenderResult: + + width = min( + self.width if self.width is not None else options.max_width, + options.max_width, + ) + + if self.begin >= self.end: + yield Segment(" " * width, self.style) + yield Segment.line() + return + + prefix_complete_eights = int(width * 8 * self.begin / self.size) + prefix_bar_count = prefix_complete_eights // 8 + prefix_eights_count = prefix_complete_eights % 8 + + body_complete_eights = int(width * 8 * self.end / self.size) + body_bar_count = body_complete_eights // 8 + body_eights_count = body_complete_eights % 8 + + # When start and end fall into the same cell, we ideally should render + # a symbol that's "center-aligned", but there is no good symbol in Unicode. + # In this case, we fall back to right-aligned block symbol for simplicity. + + prefix = " " * prefix_bar_count + if prefix_eights_count: + prefix += BEGIN_BLOCK_ELEMENTS[prefix_eights_count] + + body = FULL_BLOCK * body_bar_count + if body_eights_count: + body += END_BLOCK_ELEMENTS[body_eights_count] + + suffix = " " * (width - len(body)) + + yield Segment(prefix + body[len(prefix) :] + suffix, self.style) + yield Segment.line() + + def __rich_measure__( + self, console: Console, options: ConsoleOptions + ) -> Measurement: + return ( + Measurement(self.width, self.width) + if self.width is not None + else Measurement(4, options.max_width) + ) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/box.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/box.py new file mode 100644 index 0000000..97d2a94 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/box.py @@ -0,0 +1,517 @@ +import sys +from typing import TYPE_CHECKING, Iterable, List + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from pip._vendor.typing_extensions import Literal # pragma: no cover + + +from ._loop import loop_last + +if TYPE_CHECKING: + from pip._vendor.rich.console import ConsoleOptions + + +class Box: + """Defines characters to render boxes. + + ┌─┬┐ top + │ ││ head + ├─┼┤ head_row + │ ││ mid + ├─┼┤ row + ├─┼┤ foot_row + │ ││ foot + └─┴┘ bottom + + Args: + box (str): Characters making up box. + ascii (bool, optional): True if this box uses ascii characters only. Default is False. + """ + + def __init__(self, box: str, *, ascii: bool = False) -> None: + self._box = box + self.ascii = ascii + line1, line2, line3, line4, line5, line6, line7, line8 = box.splitlines() + # top + self.top_left, self.top, self.top_divider, self.top_right = iter(line1) + # head + self.head_left, _, self.head_vertical, self.head_right = iter(line2) + # head_row + ( + self.head_row_left, + self.head_row_horizontal, + self.head_row_cross, + self.head_row_right, + ) = iter(line3) + + # mid + self.mid_left, _, self.mid_vertical, self.mid_right = iter(line4) + # row + self.row_left, self.row_horizontal, self.row_cross, self.row_right = iter(line5) + # foot_row + ( + self.foot_row_left, + self.foot_row_horizontal, + self.foot_row_cross, + self.foot_row_right, + ) = iter(line6) + # foot + self.foot_left, _, self.foot_vertical, self.foot_right = iter(line7) + # bottom + self.bottom_left, self.bottom, self.bottom_divider, self.bottom_right = iter( + line8 + ) + + def __repr__(self) -> str: + return "Box(...)" + + def __str__(self) -> str: + return self._box + + def substitute(self, options: "ConsoleOptions", safe: bool = True) -> "Box": + """Substitute this box for another if it won't render due to platform issues. + + Args: + options (ConsoleOptions): Console options used in rendering. + safe (bool, optional): Substitute this for another Box if there are known problems + displaying on the platform (currently only relevant on Windows). Default is True. + + Returns: + Box: A different Box or the same Box. + """ + box = self + if options.legacy_windows and safe: + box = LEGACY_WINDOWS_SUBSTITUTIONS.get(box, box) + if options.ascii_only and not box.ascii: + box = ASCII + return box + + def get_plain_headed_box(self) -> "Box": + """If this box uses special characters for the borders of the header, then + return the equivalent box that does not. + + Returns: + Box: The most similar Box that doesn't use header-specific box characters. + If the current Box already satisfies this criterion, then it's returned. + """ + return PLAIN_HEADED_SUBSTITUTIONS.get(self, self) + + def get_top(self, widths: Iterable[int]) -> str: + """Get the top of a simple box. + + Args: + widths (List[int]): Widths of columns. + + Returns: + str: A string of box characters. + """ + + parts: List[str] = [] + append = parts.append + append(self.top_left) + for last, width in loop_last(widths): + append(self.top * width) + if not last: + append(self.top_divider) + append(self.top_right) + return "".join(parts) + + def get_row( + self, + widths: Iterable[int], + level: Literal["head", "row", "foot", "mid"] = "row", + edge: bool = True, + ) -> str: + """Get the top of a simple box. + + Args: + width (List[int]): Widths of columns. + + Returns: + str: A string of box characters. + """ + if level == "head": + left = self.head_row_left + horizontal = self.head_row_horizontal + cross = self.head_row_cross + right = self.head_row_right + elif level == "row": + left = self.row_left + horizontal = self.row_horizontal + cross = self.row_cross + right = self.row_right + elif level == "mid": + left = self.mid_left + horizontal = " " + cross = self.mid_vertical + right = self.mid_right + elif level == "foot": + left = self.foot_row_left + horizontal = self.foot_row_horizontal + cross = self.foot_row_cross + right = self.foot_row_right + else: + raise ValueError("level must be 'head', 'row' or 'foot'") + + parts: List[str] = [] + append = parts.append + if edge: + append(left) + for last, width in loop_last(widths): + append(horizontal * width) + if not last: + append(cross) + if edge: + append(right) + return "".join(parts) + + def get_bottom(self, widths: Iterable[int]) -> str: + """Get the bottom of a simple box. + + Args: + widths (List[int]): Widths of columns. + + Returns: + str: A string of box characters. + """ + + parts: List[str] = [] + append = parts.append + append(self.bottom_left) + for last, width in loop_last(widths): + append(self.bottom * width) + if not last: + append(self.bottom_divider) + append(self.bottom_right) + return "".join(parts) + + +ASCII: Box = Box( + """\ ++--+ +| || +|-+| +| || +|-+| +|-+| +| || ++--+ +""", + ascii=True, +) + +ASCII2: Box = Box( + """\ ++-++ +| || ++-++ +| || ++-++ ++-++ +| || ++-++ +""", + ascii=True, +) + +ASCII_DOUBLE_HEAD: Box = Box( + """\ ++-++ +| || ++=++ +| || ++-++ ++-++ +| || ++-++ +""", + ascii=True, +) + +SQUARE: Box = Box( + """\ +┌─┬┐ +│ ││ +├─┼┤ +│ ││ +├─┼┤ +├─┼┤ +│ ││ +└─┴┘ +""" +) + +SQUARE_DOUBLE_HEAD: Box = Box( + """\ +┌─┬┐ +│ ││ +╞═╪╡ +│ ││ +├─┼┤ +├─┼┤ +│ ││ +└─┴┘ +""" +) + +MINIMAL: Box = Box( + """\ + ╷ + │ +╶─┼╴ + │ +╶─┼╴ +╶─┼╴ + │ + ╵ +""" +) + + +MINIMAL_HEAVY_HEAD: Box = Box( + """\ + ╷ + │ +╺━┿╸ + │ +╶─┼╴ +╶─┼╴ + │ + ╵ +""" +) + +MINIMAL_DOUBLE_HEAD: Box = Box( + """\ + ╷ + │ + ═╪ + │ + ─┼ + ─┼ + │ + ╵ +""" +) + + +SIMPLE: Box = Box( + """\ + + + ── + + + ── + + +""" +) + +SIMPLE_HEAD: Box = Box( + """\ + + + ── + + + + + +""" +) + + +SIMPLE_HEAVY: Box = Box( + """\ + + + ━━ + + + ━━ + + +""" +) + + +HORIZONTALS: Box = Box( + """\ + ── + + ── + + ── + ── + + ── +""" +) + +ROUNDED: Box = Box( + """\ +╭─┬╮ +│ ││ +├─┼┤ +│ ││ +├─┼┤ +├─┼┤ +│ ││ +╰─┴╯ +""" +) + +HEAVY: Box = Box( + """\ +┏━┳┓ +┃ ┃┃ +┣━╋┫ +┃ ┃┃ +┣━╋┫ +┣━╋┫ +┃ ┃┃ +┗━┻┛ +""" +) + +HEAVY_EDGE: Box = Box( + """\ +┏━┯┓ +┃ │┃ +┠─┼┨ +┃ │┃ +┠─┼┨ +┠─┼┨ +┃ │┃ +┗━┷┛ +""" +) + +HEAVY_HEAD: Box = Box( + """\ +┏━┳┓ +┃ ┃┃ +┡━╇┩ +│ ││ +├─┼┤ +├─┼┤ +│ ││ +└─┴┘ +""" +) + +DOUBLE: Box = Box( + """\ +╔═╦╗ +║ ║║ +╠═╬╣ +║ ║║ +╠═╬╣ +╠═╬╣ +║ ║║ +╚═╩╝ +""" +) + +DOUBLE_EDGE: Box = Box( + """\ +╔═╤╗ +║ │║ +╟─┼╢ +║ │║ +╟─┼╢ +╟─┼╢ +║ │║ +╚═╧╝ +""" +) + +MARKDOWN: Box = Box( + """\ + +| || +|-|| +| || +|-|| +|-|| +| || + +""", + ascii=True, +) + +# Map Boxes that don't render with raster fonts on to equivalent that do +LEGACY_WINDOWS_SUBSTITUTIONS = { + ROUNDED: SQUARE, + MINIMAL_HEAVY_HEAD: MINIMAL, + SIMPLE_HEAVY: SIMPLE, + HEAVY: SQUARE, + HEAVY_EDGE: SQUARE, + HEAVY_HEAD: SQUARE, +} + +# Map headed boxes to their headerless equivalents +PLAIN_HEADED_SUBSTITUTIONS = { + HEAVY_HEAD: SQUARE, + SQUARE_DOUBLE_HEAD: SQUARE, + MINIMAL_DOUBLE_HEAD: MINIMAL, + MINIMAL_HEAVY_HEAD: MINIMAL, + ASCII_DOUBLE_HEAD: ASCII2, +} + + +if __name__ == "__main__": # pragma: no cover + + from pip._vendor.rich.columns import Columns + from pip._vendor.rich.panel import Panel + + from . import box as box + from .console import Console + from .table import Table + from .text import Text + + console = Console(record=True) + + BOXES = [ + "ASCII", + "ASCII2", + "ASCII_DOUBLE_HEAD", + "SQUARE", + "SQUARE_DOUBLE_HEAD", + "MINIMAL", + "MINIMAL_HEAVY_HEAD", + "MINIMAL_DOUBLE_HEAD", + "SIMPLE", + "SIMPLE_HEAD", + "SIMPLE_HEAVY", + "HORIZONTALS", + "ROUNDED", + "HEAVY", + "HEAVY_EDGE", + "HEAVY_HEAD", + "DOUBLE", + "DOUBLE_EDGE", + "MARKDOWN", + ] + + console.print(Panel("[bold green]Box Constants", style="green"), justify="center") + console.print() + + columns = Columns(expand=True, padding=2) + for box_name in sorted(BOXES): + table = Table( + show_footer=True, style="dim", border_style="not dim", expand=True + ) + table.add_column("Header 1", "Footer 1") + table.add_column("Header 2", "Footer 2") + table.add_row("Cell", "Cell") + table.add_row("Cell", "Cell") + table.box = getattr(box, box_name) + table.title = Text(f"box.{box_name}", style="magenta") + columns.add_renderable(table) + console.print(columns) + + # console.save_svg("box.svg") diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/cells.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/cells.py new file mode 100644 index 0000000..9354f9e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/cells.py @@ -0,0 +1,154 @@ +import re +from functools import lru_cache +from typing import Callable, List + +from ._cell_widths import CELL_WIDTHS + +# Regex to match sequence of the most common character ranges +_is_single_cell_widths = re.compile("^[\u0020-\u006f\u00a0\u02ff\u0370-\u0482]*$").match + + +@lru_cache(4096) +def cached_cell_len(text: str) -> int: + """Get the number of cells required to display text. + + This method always caches, which may use up a lot of memory. It is recommended to use + `cell_len` over this method. + + Args: + text (str): Text to display. + + Returns: + int: Get the number of cells required to display text. + """ + _get_size = get_character_cell_size + total_size = sum(_get_size(character) for character in text) + return total_size + + +def cell_len(text: str, _cell_len: Callable[[str], int] = cached_cell_len) -> int: + """Get the number of cells required to display text. + + Args: + text (str): Text to display. + + Returns: + int: Get the number of cells required to display text. + """ + if len(text) < 512: + return _cell_len(text) + _get_size = get_character_cell_size + total_size = sum(_get_size(character) for character in text) + return total_size + + +@lru_cache(maxsize=4096) +def get_character_cell_size(character: str) -> int: + """Get the cell size of a character. + + Args: + character (str): A single character. + + Returns: + int: Number of cells (0, 1 or 2) occupied by that character. + """ + return _get_codepoint_cell_size(ord(character)) + + +@lru_cache(maxsize=4096) +def _get_codepoint_cell_size(codepoint: int) -> int: + """Get the cell size of a character. + + Args: + codepoint (int): Codepoint of a character. + + Returns: + int: Number of cells (0, 1 or 2) occupied by that character. + """ + + _table = CELL_WIDTHS + lower_bound = 0 + upper_bound = len(_table) - 1 + index = (lower_bound + upper_bound) // 2 + while True: + start, end, width = _table[index] + if codepoint < start: + upper_bound = index - 1 + elif codepoint > end: + lower_bound = index + 1 + else: + return 0 if width == -1 else width + if upper_bound < lower_bound: + break + index = (lower_bound + upper_bound) // 2 + return 1 + + +def set_cell_size(text: str, total: int) -> str: + """Set the length of a string to fit within given number of cells.""" + + if _is_single_cell_widths(text): + size = len(text) + if size < total: + return text + " " * (total - size) + return text[:total] + + if total <= 0: + return "" + cell_size = cell_len(text) + if cell_size == total: + return text + if cell_size < total: + return text + " " * (total - cell_size) + + start = 0 + end = len(text) + + # Binary search until we find the right size + while True: + pos = (start + end) // 2 + before = text[: pos + 1] + before_len = cell_len(before) + if before_len == total + 1 and cell_len(before[-1]) == 2: + return before[:-1] + " " + if before_len == total: + return before + if before_len > total: + end = pos + else: + start = pos + + +# TODO: This is inefficient +# TODO: This might not work with CWJ type characters +def chop_cells(text: str, max_size: int, position: int = 0) -> List[str]: + """Break text in to equal (cell) length strings, returning the characters in reverse + order""" + _get_character_cell_size = get_character_cell_size + characters = [ + (character, _get_character_cell_size(character)) for character in text + ] + total_size = position + lines: List[List[str]] = [[]] + append = lines[-1].append + + for character, size in reversed(characters): + if total_size + size > max_size: + lines.append([character]) + append = lines[-1].append + total_size = size + else: + total_size += size + append(character) + + return ["".join(line) for line in lines] + + +if __name__ == "__main__": # pragma: no cover + + print(get_character_cell_size("😽")) + for line in chop_cells("""这是对亚洲语言支持的测试。面对模棱两可的想法,拒绝猜测的诱惑。""", 8): + print(line) + for n in range(80, 1, -1): + print(set_cell_size("""这是对亚洲语言支持的测试。面对模棱两可的想法,拒绝猜测的诱惑。""", n) + "|") + print("x" * n) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/color.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/color.py new file mode 100644 index 0000000..dfe4559 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/color.py @@ -0,0 +1,622 @@ +import platform +import re +from colorsys import rgb_to_hls +from enum import IntEnum +from functools import lru_cache +from typing import TYPE_CHECKING, NamedTuple, Optional, Tuple + +from ._palettes import EIGHT_BIT_PALETTE, STANDARD_PALETTE, WINDOWS_PALETTE +from .color_triplet import ColorTriplet +from .repr import Result, rich_repr +from .terminal_theme import DEFAULT_TERMINAL_THEME + +if TYPE_CHECKING: # pragma: no cover + from .terminal_theme import TerminalTheme + from .text import Text + + +WINDOWS = platform.system() == "Windows" + + +class ColorSystem(IntEnum): + """One of the 3 color system supported by terminals.""" + + STANDARD = 1 + EIGHT_BIT = 2 + TRUECOLOR = 3 + WINDOWS = 4 + + def __repr__(self) -> str: + return f"ColorSystem.{self.name}" + + def __str__(self) -> str: + return repr(self) + + +class ColorType(IntEnum): + """Type of color stored in Color class.""" + + DEFAULT = 0 + STANDARD = 1 + EIGHT_BIT = 2 + TRUECOLOR = 3 + WINDOWS = 4 + + def __repr__(self) -> str: + return f"ColorType.{self.name}" + + +ANSI_COLOR_NAMES = { + "black": 0, + "red": 1, + "green": 2, + "yellow": 3, + "blue": 4, + "magenta": 5, + "cyan": 6, + "white": 7, + "bright_black": 8, + "bright_red": 9, + "bright_green": 10, + "bright_yellow": 11, + "bright_blue": 12, + "bright_magenta": 13, + "bright_cyan": 14, + "bright_white": 15, + "grey0": 16, + "gray0": 16, + "navy_blue": 17, + "dark_blue": 18, + "blue3": 20, + "blue1": 21, + "dark_green": 22, + "deep_sky_blue4": 25, + "dodger_blue3": 26, + "dodger_blue2": 27, + "green4": 28, + "spring_green4": 29, + "turquoise4": 30, + "deep_sky_blue3": 32, + "dodger_blue1": 33, + "green3": 40, + "spring_green3": 41, + "dark_cyan": 36, + "light_sea_green": 37, + "deep_sky_blue2": 38, + "deep_sky_blue1": 39, + "spring_green2": 47, + "cyan3": 43, + "dark_turquoise": 44, + "turquoise2": 45, + "green1": 46, + "spring_green1": 48, + "medium_spring_green": 49, + "cyan2": 50, + "cyan1": 51, + "dark_red": 88, + "deep_pink4": 125, + "purple4": 55, + "purple3": 56, + "blue_violet": 57, + "orange4": 94, + "grey37": 59, + "gray37": 59, + "medium_purple4": 60, + "slate_blue3": 62, + "royal_blue1": 63, + "chartreuse4": 64, + "dark_sea_green4": 71, + "pale_turquoise4": 66, + "steel_blue": 67, + "steel_blue3": 68, + "cornflower_blue": 69, + "chartreuse3": 76, + "cadet_blue": 73, + "sky_blue3": 74, + "steel_blue1": 81, + "pale_green3": 114, + "sea_green3": 78, + "aquamarine3": 79, + "medium_turquoise": 80, + "chartreuse2": 112, + "sea_green2": 83, + "sea_green1": 85, + "aquamarine1": 122, + "dark_slate_gray2": 87, + "dark_magenta": 91, + "dark_violet": 128, + "purple": 129, + "light_pink4": 95, + "plum4": 96, + "medium_purple3": 98, + "slate_blue1": 99, + "yellow4": 106, + "wheat4": 101, + "grey53": 102, + "gray53": 102, + "light_slate_grey": 103, + "light_slate_gray": 103, + "medium_purple": 104, + "light_slate_blue": 105, + "dark_olive_green3": 149, + "dark_sea_green": 108, + "light_sky_blue3": 110, + "sky_blue2": 111, + "dark_sea_green3": 150, + "dark_slate_gray3": 116, + "sky_blue1": 117, + "chartreuse1": 118, + "light_green": 120, + "pale_green1": 156, + "dark_slate_gray1": 123, + "red3": 160, + "medium_violet_red": 126, + "magenta3": 164, + "dark_orange3": 166, + "indian_red": 167, + "hot_pink3": 168, + "medium_orchid3": 133, + "medium_orchid": 134, + "medium_purple2": 140, + "dark_goldenrod": 136, + "light_salmon3": 173, + "rosy_brown": 138, + "grey63": 139, + "gray63": 139, + "medium_purple1": 141, + "gold3": 178, + "dark_khaki": 143, + "navajo_white3": 144, + "grey69": 145, + "gray69": 145, + "light_steel_blue3": 146, + "light_steel_blue": 147, + "yellow3": 184, + "dark_sea_green2": 157, + "light_cyan3": 152, + "light_sky_blue1": 153, + "green_yellow": 154, + "dark_olive_green2": 155, + "dark_sea_green1": 193, + "pale_turquoise1": 159, + "deep_pink3": 162, + "magenta2": 200, + "hot_pink2": 169, + "orchid": 170, + "medium_orchid1": 207, + "orange3": 172, + "light_pink3": 174, + "pink3": 175, + "plum3": 176, + "violet": 177, + "light_goldenrod3": 179, + "tan": 180, + "misty_rose3": 181, + "thistle3": 182, + "plum2": 183, + "khaki3": 185, + "light_goldenrod2": 222, + "light_yellow3": 187, + "grey84": 188, + "gray84": 188, + "light_steel_blue1": 189, + "yellow2": 190, + "dark_olive_green1": 192, + "honeydew2": 194, + "light_cyan1": 195, + "red1": 196, + "deep_pink2": 197, + "deep_pink1": 199, + "magenta1": 201, + "orange_red1": 202, + "indian_red1": 204, + "hot_pink": 206, + "dark_orange": 208, + "salmon1": 209, + "light_coral": 210, + "pale_violet_red1": 211, + "orchid2": 212, + "orchid1": 213, + "orange1": 214, + "sandy_brown": 215, + "light_salmon1": 216, + "light_pink1": 217, + "pink1": 218, + "plum1": 219, + "gold1": 220, + "navajo_white1": 223, + "misty_rose1": 224, + "thistle1": 225, + "yellow1": 226, + "light_goldenrod1": 227, + "khaki1": 228, + "wheat1": 229, + "cornsilk1": 230, + "grey100": 231, + "gray100": 231, + "grey3": 232, + "gray3": 232, + "grey7": 233, + "gray7": 233, + "grey11": 234, + "gray11": 234, + "grey15": 235, + "gray15": 235, + "grey19": 236, + "gray19": 236, + "grey23": 237, + "gray23": 237, + "grey27": 238, + "gray27": 238, + "grey30": 239, + "gray30": 239, + "grey35": 240, + "gray35": 240, + "grey39": 241, + "gray39": 241, + "grey42": 242, + "gray42": 242, + "grey46": 243, + "gray46": 243, + "grey50": 244, + "gray50": 244, + "grey54": 245, + "gray54": 245, + "grey58": 246, + "gray58": 246, + "grey62": 247, + "gray62": 247, + "grey66": 248, + "gray66": 248, + "grey70": 249, + "gray70": 249, + "grey74": 250, + "gray74": 250, + "grey78": 251, + "gray78": 251, + "grey82": 252, + "gray82": 252, + "grey85": 253, + "gray85": 253, + "grey89": 254, + "gray89": 254, + "grey93": 255, + "gray93": 255, +} + + +class ColorParseError(Exception): + """The color could not be parsed.""" + + +RE_COLOR = re.compile( + r"""^ +\#([0-9a-f]{6})$| +color\(([0-9]{1,3})\)$| +rgb\(([\d\s,]+)\)$ +""", + re.VERBOSE, +) + + +@rich_repr +class Color(NamedTuple): + """Terminal color definition.""" + + name: str + """The name of the color (typically the input to Color.parse).""" + type: ColorType + """The type of the color.""" + number: Optional[int] = None + """The color number, if a standard color, or None.""" + triplet: Optional[ColorTriplet] = None + """A triplet of color components, if an RGB color.""" + + def __rich__(self) -> "Text": + """Displays the actual color if Rich printed.""" + from .style import Style + from .text import Text + + return Text.assemble( + f"", + ) + + def __rich_repr__(self) -> Result: + yield self.name + yield self.type + yield "number", self.number, None + yield "triplet", self.triplet, None + + @property + def system(self) -> ColorSystem: + """Get the native color system for this color.""" + if self.type == ColorType.DEFAULT: + return ColorSystem.STANDARD + return ColorSystem(int(self.type)) + + @property + def is_system_defined(self) -> bool: + """Check if the color is ultimately defined by the system.""" + return self.system not in (ColorSystem.EIGHT_BIT, ColorSystem.TRUECOLOR) + + @property + def is_default(self) -> bool: + """Check if the color is a default color.""" + return self.type == ColorType.DEFAULT + + def get_truecolor( + self, theme: Optional["TerminalTheme"] = None, foreground: bool = True + ) -> ColorTriplet: + """Get an equivalent color triplet for this color. + + Args: + theme (TerminalTheme, optional): Optional terminal theme, or None to use default. Defaults to None. + foreground (bool, optional): True for a foreground color, or False for background. Defaults to True. + + Returns: + ColorTriplet: A color triplet containing RGB components. + """ + + if theme is None: + theme = DEFAULT_TERMINAL_THEME + if self.type == ColorType.TRUECOLOR: + assert self.triplet is not None + return self.triplet + elif self.type == ColorType.EIGHT_BIT: + assert self.number is not None + return EIGHT_BIT_PALETTE[self.number] + elif self.type == ColorType.STANDARD: + assert self.number is not None + return theme.ansi_colors[self.number] + elif self.type == ColorType.WINDOWS: + assert self.number is not None + return WINDOWS_PALETTE[self.number] + else: # self.type == ColorType.DEFAULT: + assert self.number is None + return theme.foreground_color if foreground else theme.background_color + + @classmethod + def from_ansi(cls, number: int) -> "Color": + """Create a Color number from it's 8-bit ansi number. + + Args: + number (int): A number between 0-255 inclusive. + + Returns: + Color: A new Color instance. + """ + return cls( + name=f"color({number})", + type=(ColorType.STANDARD if number < 16 else ColorType.EIGHT_BIT), + number=number, + ) + + @classmethod + def from_triplet(cls, triplet: "ColorTriplet") -> "Color": + """Create a truecolor RGB color from a triplet of values. + + Args: + triplet (ColorTriplet): A color triplet containing red, green and blue components. + + Returns: + Color: A new color object. + """ + return cls(name=triplet.hex, type=ColorType.TRUECOLOR, triplet=triplet) + + @classmethod + def from_rgb(cls, red: float, green: float, blue: float) -> "Color": + """Create a truecolor from three color components in the range(0->255). + + Args: + red (float): Red component in range 0-255. + green (float): Green component in range 0-255. + blue (float): Blue component in range 0-255. + + Returns: + Color: A new color object. + """ + return cls.from_triplet(ColorTriplet(int(red), int(green), int(blue))) + + @classmethod + def default(cls) -> "Color": + """Get a Color instance representing the default color. + + Returns: + Color: Default color. + """ + return cls(name="default", type=ColorType.DEFAULT) + + @classmethod + @lru_cache(maxsize=1024) + def parse(cls, color: str) -> "Color": + """Parse a color definition.""" + original_color = color + color = color.lower().strip() + + if color == "default": + return cls(color, type=ColorType.DEFAULT) + + color_number = ANSI_COLOR_NAMES.get(color) + if color_number is not None: + return cls( + color, + type=(ColorType.STANDARD if color_number < 16 else ColorType.EIGHT_BIT), + number=color_number, + ) + + color_match = RE_COLOR.match(color) + if color_match is None: + raise ColorParseError(f"{original_color!r} is not a valid color") + + color_24, color_8, color_rgb = color_match.groups() + if color_24: + triplet = ColorTriplet( + int(color_24[0:2], 16), int(color_24[2:4], 16), int(color_24[4:6], 16) + ) + return cls(color, ColorType.TRUECOLOR, triplet=triplet) + + elif color_8: + number = int(color_8) + if number > 255: + raise ColorParseError(f"color number must be <= 255 in {color!r}") + return cls( + color, + type=(ColorType.STANDARD if number < 16 else ColorType.EIGHT_BIT), + number=number, + ) + + else: # color_rgb: + components = color_rgb.split(",") + if len(components) != 3: + raise ColorParseError( + f"expected three components in {original_color!r}" + ) + red, green, blue = components + triplet = ColorTriplet(int(red), int(green), int(blue)) + if not all(component <= 255 for component in triplet): + raise ColorParseError( + f"color components must be <= 255 in {original_color!r}" + ) + return cls(color, ColorType.TRUECOLOR, triplet=triplet) + + @lru_cache(maxsize=1024) + def get_ansi_codes(self, foreground: bool = True) -> Tuple[str, ...]: + """Get the ANSI escape codes for this color.""" + _type = self.type + if _type == ColorType.DEFAULT: + return ("39" if foreground else "49",) + + elif _type == ColorType.WINDOWS: + number = self.number + assert number is not None + fore, back = (30, 40) if number < 8 else (82, 92) + return (str(fore + number if foreground else back + number),) + + elif _type == ColorType.STANDARD: + number = self.number + assert number is not None + fore, back = (30, 40) if number < 8 else (82, 92) + return (str(fore + number if foreground else back + number),) + + elif _type == ColorType.EIGHT_BIT: + assert self.number is not None + return ("38" if foreground else "48", "5", str(self.number)) + + else: # self.standard == ColorStandard.TRUECOLOR: + assert self.triplet is not None + red, green, blue = self.triplet + return ("38" if foreground else "48", "2", str(red), str(green), str(blue)) + + @lru_cache(maxsize=1024) + def downgrade(self, system: ColorSystem) -> "Color": + """Downgrade a color system to a system with fewer colors.""" + + if self.type in (ColorType.DEFAULT, system): + return self + # Convert to 8-bit color from truecolor color + if system == ColorSystem.EIGHT_BIT and self.system == ColorSystem.TRUECOLOR: + assert self.triplet is not None + _h, l, s = rgb_to_hls(*self.triplet.normalized) + # If saturation is under 15% assume it is grayscale + if s < 0.15: + gray = round(l * 25.0) + if gray == 0: + color_number = 16 + elif gray == 25: + color_number = 231 + else: + color_number = 231 + gray + return Color(self.name, ColorType.EIGHT_BIT, number=color_number) + + red, green, blue = self.triplet + six_red = red / 95 if red < 95 else 1 + (red - 95) / 40 + six_green = green / 95 if green < 95 else 1 + (green - 95) / 40 + six_blue = blue / 95 if blue < 95 else 1 + (blue - 95) / 40 + + color_number = ( + 16 + 36 * round(six_red) + 6 * round(six_green) + round(six_blue) + ) + return Color(self.name, ColorType.EIGHT_BIT, number=color_number) + + # Convert to standard from truecolor or 8-bit + elif system == ColorSystem.STANDARD: + if self.system == ColorSystem.TRUECOLOR: + assert self.triplet is not None + triplet = self.triplet + else: # self.system == ColorSystem.EIGHT_BIT + assert self.number is not None + triplet = ColorTriplet(*EIGHT_BIT_PALETTE[self.number]) + + color_number = STANDARD_PALETTE.match(triplet) + return Color(self.name, ColorType.STANDARD, number=color_number) + + elif system == ColorSystem.WINDOWS: + if self.system == ColorSystem.TRUECOLOR: + assert self.triplet is not None + triplet = self.triplet + else: # self.system == ColorSystem.EIGHT_BIT + assert self.number is not None + if self.number < 16: + return Color(self.name, ColorType.WINDOWS, number=self.number) + triplet = ColorTriplet(*EIGHT_BIT_PALETTE[self.number]) + + color_number = WINDOWS_PALETTE.match(triplet) + return Color(self.name, ColorType.WINDOWS, number=color_number) + + return self + + +def parse_rgb_hex(hex_color: str) -> ColorTriplet: + """Parse six hex characters in to RGB triplet.""" + assert len(hex_color) == 6, "must be 6 characters" + color = ColorTriplet( + int(hex_color[0:2], 16), int(hex_color[2:4], 16), int(hex_color[4:6], 16) + ) + return color + + +def blend_rgb( + color1: ColorTriplet, color2: ColorTriplet, cross_fade: float = 0.5 +) -> ColorTriplet: + """Blend one RGB color in to another.""" + r1, g1, b1 = color1 + r2, g2, b2 = color2 + new_color = ColorTriplet( + int(r1 + (r2 - r1) * cross_fade), + int(g1 + (g2 - g1) * cross_fade), + int(b1 + (b2 - b1) * cross_fade), + ) + return new_color + + +if __name__ == "__main__": # pragma: no cover + + from .console import Console + from .table import Table + from .text import Text + + console = Console() + + table = Table(show_footer=False, show_edge=True) + table.add_column("Color", width=10, overflow="ellipsis") + table.add_column("Number", justify="right", style="yellow") + table.add_column("Name", style="green") + table.add_column("Hex", style="blue") + table.add_column("RGB", style="magenta") + + colors = sorted((v, k) for k, v in ANSI_COLOR_NAMES.items()) + for color_number, name in colors: + if "grey" in name: + continue + color_cell = Text(" " * 10, style=f"on {name}") + if color_number < 16: + table.add_row(color_cell, f"{color_number}", Text(f'"{name}"')) + else: + color = EIGHT_BIT_PALETTE[color_number] # type: ignore[has-type] + table.add_row( + color_cell, str(color_number), Text(f'"{name}"'), color.hex, color.rgb + ) + + console.print(table) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/color_triplet.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/color_triplet.py new file mode 100644 index 0000000..02cab32 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/color_triplet.py @@ -0,0 +1,38 @@ +from typing import NamedTuple, Tuple + + +class ColorTriplet(NamedTuple): + """The red, green, and blue components of a color.""" + + red: int + """Red component in 0 to 255 range.""" + green: int + """Green component in 0 to 255 range.""" + blue: int + """Blue component in 0 to 255 range.""" + + @property + def hex(self) -> str: + """get the color triplet in CSS style.""" + red, green, blue = self + return f"#{red:02x}{green:02x}{blue:02x}" + + @property + def rgb(self) -> str: + """The color in RGB format. + + Returns: + str: An rgb color, e.g. ``"rgb(100,23,255)"``. + """ + red, green, blue = self + return f"rgb({red},{green},{blue})" + + @property + def normalized(self) -> Tuple[float, float, float]: + """Convert components into floats between 0 and 1. + + Returns: + Tuple[float, float, float]: A tuple of three normalized colour components. + """ + red, green, blue = self + return red / 255.0, green / 255.0, blue / 255.0 diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/columns.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/columns.py new file mode 100644 index 0000000..669a3a7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/columns.py @@ -0,0 +1,187 @@ +from collections import defaultdict +from itertools import chain +from operator import itemgetter +from typing import Dict, Iterable, List, Optional, Tuple + +from .align import Align, AlignMethod +from .console import Console, ConsoleOptions, RenderableType, RenderResult +from .constrain import Constrain +from .measure import Measurement +from .padding import Padding, PaddingDimensions +from .table import Table +from .text import TextType +from .jupyter import JupyterMixin + + +class Columns(JupyterMixin): + """Display renderables in neat columns. + + Args: + renderables (Iterable[RenderableType]): Any number of Rich renderables (including str). + width (int, optional): The desired width of the columns, or None to auto detect. Defaults to None. + padding (PaddingDimensions, optional): Optional padding around cells. Defaults to (0, 1). + expand (bool, optional): Expand columns to full width. Defaults to False. + equal (bool, optional): Arrange in to equal sized columns. Defaults to False. + column_first (bool, optional): Align items from top to bottom (rather than left to right). Defaults to False. + right_to_left (bool, optional): Start column from right hand side. Defaults to False. + align (str, optional): Align value ("left", "right", or "center") or None for default. Defaults to None. + title (TextType, optional): Optional title for Columns. + """ + + def __init__( + self, + renderables: Optional[Iterable[RenderableType]] = None, + padding: PaddingDimensions = (0, 1), + *, + width: Optional[int] = None, + expand: bool = False, + equal: bool = False, + column_first: bool = False, + right_to_left: bool = False, + align: Optional[AlignMethod] = None, + title: Optional[TextType] = None, + ) -> None: + self.renderables = list(renderables or []) + self.width = width + self.padding = padding + self.expand = expand + self.equal = equal + self.column_first = column_first + self.right_to_left = right_to_left + self.align: Optional[AlignMethod] = align + self.title = title + + def add_renderable(self, renderable: RenderableType) -> None: + """Add a renderable to the columns. + + Args: + renderable (RenderableType): Any renderable object. + """ + self.renderables.append(renderable) + + def __rich_console__( + self, console: Console, options: ConsoleOptions + ) -> RenderResult: + render_str = console.render_str + renderables = [ + render_str(renderable) if isinstance(renderable, str) else renderable + for renderable in self.renderables + ] + if not renderables: + return + _top, right, _bottom, left = Padding.unpack(self.padding) + width_padding = max(left, right) + max_width = options.max_width + widths: Dict[int, int] = defaultdict(int) + column_count = len(renderables) + + get_measurement = Measurement.get + renderable_widths = [ + get_measurement(console, options, renderable).maximum + for renderable in renderables + ] + if self.equal: + renderable_widths = [max(renderable_widths)] * len(renderable_widths) + + def iter_renderables( + column_count: int, + ) -> Iterable[Tuple[int, Optional[RenderableType]]]: + item_count = len(renderables) + if self.column_first: + width_renderables = list(zip(renderable_widths, renderables)) + + column_lengths: List[int] = [item_count // column_count] * column_count + for col_no in range(item_count % column_count): + column_lengths[col_no] += 1 + + row_count = (item_count + column_count - 1) // column_count + cells = [[-1] * column_count for _ in range(row_count)] + row = col = 0 + for index in range(item_count): + cells[row][col] = index + column_lengths[col] -= 1 + if column_lengths[col]: + row += 1 + else: + col += 1 + row = 0 + for index in chain.from_iterable(cells): + if index == -1: + break + yield width_renderables[index] + else: + yield from zip(renderable_widths, renderables) + # Pad odd elements with spaces + if item_count % column_count: + for _ in range(column_count - (item_count % column_count)): + yield 0, None + + table = Table.grid(padding=self.padding, collapse_padding=True, pad_edge=False) + table.expand = self.expand + table.title = self.title + + if self.width is not None: + column_count = (max_width) // (self.width + width_padding) + for _ in range(column_count): + table.add_column(width=self.width) + else: + while column_count > 1: + widths.clear() + column_no = 0 + for renderable_width, _ in iter_renderables(column_count): + widths[column_no] = max(widths[column_no], renderable_width) + total_width = sum(widths.values()) + width_padding * ( + len(widths) - 1 + ) + if total_width > max_width: + column_count = len(widths) - 1 + break + else: + column_no = (column_no + 1) % column_count + else: + break + + get_renderable = itemgetter(1) + _renderables = [ + get_renderable(_renderable) + for _renderable in iter_renderables(column_count) + ] + if self.equal: + _renderables = [ + None + if renderable is None + else Constrain(renderable, renderable_widths[0]) + for renderable in _renderables + ] + if self.align: + align = self.align + _Align = Align + _renderables = [ + None if renderable is None else _Align(renderable, align) + for renderable in _renderables + ] + + right_to_left = self.right_to_left + add_row = table.add_row + for start in range(0, len(_renderables), column_count): + row = _renderables[start : start + column_count] + if right_to_left: + row = row[::-1] + add_row(*row) + yield table + + +if __name__ == "__main__": # pragma: no cover + import os + + console = Console() + + files = [f"{i} {s}" for i, s in enumerate(sorted(os.listdir()))] + columns = Columns(files, padding=(0, 1), expand=False, equal=False) + console.print(columns) + console.rule() + columns.column_first = True + console.print(columns) + columns.right_to_left = True + console.rule() + console.print(columns) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/console.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/console.py new file mode 100644 index 0000000..e559cbb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/console.py @@ -0,0 +1,2633 @@ +import inspect +import os +import platform +import sys +import threading +import zlib +from abc import ABC, abstractmethod +from dataclasses import dataclass, field +from datetime import datetime +from functools import wraps +from getpass import getpass +from html import escape +from inspect import isclass +from itertools import islice +from math import ceil +from time import monotonic +from types import FrameType, ModuleType, TracebackType +from typing import ( + IO, + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterable, + List, + Mapping, + NamedTuple, + Optional, + TextIO, + Tuple, + Type, + Union, + cast, +) + +from pip._vendor.rich._null_file import NULL_FILE + +if sys.version_info >= (3, 8): + from typing import Literal, Protocol, runtime_checkable +else: + from pip._vendor.typing_extensions import ( + Literal, + Protocol, + runtime_checkable, + ) # pragma: no cover + +from . import errors, themes +from ._emoji_replace import _emoji_replace +from ._export_format import CONSOLE_HTML_FORMAT, CONSOLE_SVG_FORMAT +from ._fileno import get_fileno +from ._log_render import FormatTimeCallable, LogRender +from .align import Align, AlignMethod +from .color import ColorSystem, blend_rgb +from .control import Control +from .emoji import EmojiVariant +from .highlighter import NullHighlighter, ReprHighlighter +from .markup import render as render_markup +from .measure import Measurement, measure_renderables +from .pager import Pager, SystemPager +from .pretty import Pretty, is_expandable +from .protocol import rich_cast +from .region import Region +from .scope import render_scope +from .screen import Screen +from .segment import Segment +from .style import Style, StyleType +from .styled import Styled +from .terminal_theme import DEFAULT_TERMINAL_THEME, SVG_EXPORT_THEME, TerminalTheme +from .text import Text, TextType +from .theme import Theme, ThemeStack + +if TYPE_CHECKING: + from ._windows import WindowsConsoleFeatures + from .live import Live + from .status import Status + +JUPYTER_DEFAULT_COLUMNS = 115 +JUPYTER_DEFAULT_LINES = 100 +WINDOWS = platform.system() == "Windows" + +HighlighterType = Callable[[Union[str, "Text"]], "Text"] +JustifyMethod = Literal["default", "left", "center", "right", "full"] +OverflowMethod = Literal["fold", "crop", "ellipsis", "ignore"] + + +class NoChange: + pass + + +NO_CHANGE = NoChange() + +try: + _STDIN_FILENO = sys.__stdin__.fileno() +except Exception: + _STDIN_FILENO = 0 +try: + _STDOUT_FILENO = sys.__stdout__.fileno() +except Exception: + _STDOUT_FILENO = 1 +try: + _STDERR_FILENO = sys.__stderr__.fileno() +except Exception: + _STDERR_FILENO = 2 + +_STD_STREAMS = (_STDIN_FILENO, _STDOUT_FILENO, _STDERR_FILENO) +_STD_STREAMS_OUTPUT = (_STDOUT_FILENO, _STDERR_FILENO) + + +_TERM_COLORS = { + "kitty": ColorSystem.EIGHT_BIT, + "256color": ColorSystem.EIGHT_BIT, + "16color": ColorSystem.STANDARD, +} + + +class ConsoleDimensions(NamedTuple): + """Size of the terminal.""" + + width: int + """The width of the console in 'cells'.""" + height: int + """The height of the console in lines.""" + + +@dataclass +class ConsoleOptions: + """Options for __rich_console__ method.""" + + size: ConsoleDimensions + """Size of console.""" + legacy_windows: bool + """legacy_windows: flag for legacy windows.""" + min_width: int + """Minimum width of renderable.""" + max_width: int + """Maximum width of renderable.""" + is_terminal: bool + """True if the target is a terminal, otherwise False.""" + encoding: str + """Encoding of terminal.""" + max_height: int + """Height of container (starts as terminal)""" + justify: Optional[JustifyMethod] = None + """Justify value override for renderable.""" + overflow: Optional[OverflowMethod] = None + """Overflow value override for renderable.""" + no_wrap: Optional[bool] = False + """Disable wrapping for text.""" + highlight: Optional[bool] = None + """Highlight override for render_str.""" + markup: Optional[bool] = None + """Enable markup when rendering strings.""" + height: Optional[int] = None + + @property + def ascii_only(self) -> bool: + """Check if renderables should use ascii only.""" + return not self.encoding.startswith("utf") + + def copy(self) -> "ConsoleOptions": + """Return a copy of the options. + + Returns: + ConsoleOptions: a copy of self. + """ + options: ConsoleOptions = ConsoleOptions.__new__(ConsoleOptions) + options.__dict__ = self.__dict__.copy() + return options + + def update( + self, + *, + width: Union[int, NoChange] = NO_CHANGE, + min_width: Union[int, NoChange] = NO_CHANGE, + max_width: Union[int, NoChange] = NO_CHANGE, + justify: Union[Optional[JustifyMethod], NoChange] = NO_CHANGE, + overflow: Union[Optional[OverflowMethod], NoChange] = NO_CHANGE, + no_wrap: Union[Optional[bool], NoChange] = NO_CHANGE, + highlight: Union[Optional[bool], NoChange] = NO_CHANGE, + markup: Union[Optional[bool], NoChange] = NO_CHANGE, + height: Union[Optional[int], NoChange] = NO_CHANGE, + ) -> "ConsoleOptions": + """Update values, return a copy.""" + options = self.copy() + if not isinstance(width, NoChange): + options.min_width = options.max_width = max(0, width) + if not isinstance(min_width, NoChange): + options.min_width = min_width + if not isinstance(max_width, NoChange): + options.max_width = max_width + if not isinstance(justify, NoChange): + options.justify = justify + if not isinstance(overflow, NoChange): + options.overflow = overflow + if not isinstance(no_wrap, NoChange): + options.no_wrap = no_wrap + if not isinstance(highlight, NoChange): + options.highlight = highlight + if not isinstance(markup, NoChange): + options.markup = markup + if not isinstance(height, NoChange): + if height is not None: + options.max_height = height + options.height = None if height is None else max(0, height) + return options + + def update_width(self, width: int) -> "ConsoleOptions": + """Update just the width, return a copy. + + Args: + width (int): New width (sets both min_width and max_width) + + Returns: + ~ConsoleOptions: New console options instance. + """ + options = self.copy() + options.min_width = options.max_width = max(0, width) + return options + + def update_height(self, height: int) -> "ConsoleOptions": + """Update the height, and return a copy. + + Args: + height (int): New height + + Returns: + ~ConsoleOptions: New Console options instance. + """ + options = self.copy() + options.max_height = options.height = height + return options + + def reset_height(self) -> "ConsoleOptions": + """Return a copy of the options with height set to ``None``. + + Returns: + ~ConsoleOptions: New console options instance. + """ + options = self.copy() + options.height = None + return options + + def update_dimensions(self, width: int, height: int) -> "ConsoleOptions": + """Update the width and height, and return a copy. + + Args: + width (int): New width (sets both min_width and max_width). + height (int): New height. + + Returns: + ~ConsoleOptions: New console options instance. + """ + options = self.copy() + options.min_width = options.max_width = max(0, width) + options.height = options.max_height = height + return options + + +@runtime_checkable +class RichCast(Protocol): + """An object that may be 'cast' to a console renderable.""" + + def __rich__( + self, + ) -> Union["ConsoleRenderable", "RichCast", str]: # pragma: no cover + ... + + +@runtime_checkable +class ConsoleRenderable(Protocol): + """An object that supports the console protocol.""" + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": # pragma: no cover + ... + + +# A type that may be rendered by Console. +RenderableType = Union[ConsoleRenderable, RichCast, str] + +# The result of calling a __rich_console__ method. +RenderResult = Iterable[Union[RenderableType, Segment]] + +_null_highlighter = NullHighlighter() + + +class CaptureError(Exception): + """An error in the Capture context manager.""" + + +class NewLine: + """A renderable to generate new line(s)""" + + def __init__(self, count: int = 1) -> None: + self.count = count + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> Iterable[Segment]: + yield Segment("\n" * self.count) + + +class ScreenUpdate: + """Render a list of lines at a given offset.""" + + def __init__(self, lines: List[List[Segment]], x: int, y: int) -> None: + self._lines = lines + self.x = x + self.y = y + + def __rich_console__( + self, console: "Console", options: ConsoleOptions + ) -> RenderResult: + x = self.x + move_to = Control.move_to + for offset, line in enumerate(self._lines, self.y): + yield move_to(x, offset) + yield from line + + +class Capture: + """Context manager to capture the result of printing to the console. + See :meth:`~rich.console.Console.capture` for how to use. + + Args: + console (Console): A console instance to capture output. + """ + + def __init__(self, console: "Console") -> None: + self._console = console + self._result: Optional[str] = None + + def __enter__(self) -> "Capture": + self._console.begin_capture() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self._result = self._console.end_capture() + + def get(self) -> str: + """Get the result of the capture.""" + if self._result is None: + raise CaptureError( + "Capture result is not available until context manager exits." + ) + return self._result + + +class ThemeContext: + """A context manager to use a temporary theme. See :meth:`~rich.console.Console.use_theme` for usage.""" + + def __init__(self, console: "Console", theme: Theme, inherit: bool = True) -> None: + self.console = console + self.theme = theme + self.inherit = inherit + + def __enter__(self) -> "ThemeContext": + self.console.push_theme(self.theme) + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.console.pop_theme() + + +class PagerContext: + """A context manager that 'pages' content. See :meth:`~rich.console.Console.pager` for usage.""" + + def __init__( + self, + console: "Console", + pager: Optional[Pager] = None, + styles: bool = False, + links: bool = False, + ) -> None: + self._console = console + self.pager = SystemPager() if pager is None else pager + self.styles = styles + self.links = links + + def __enter__(self) -> "PagerContext": + self._console._enter_buffer() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + if exc_type is None: + with self._console._lock: + buffer: List[Segment] = self._console._buffer[:] + del self._console._buffer[:] + segments: Iterable[Segment] = buffer + if not self.styles: + segments = Segment.strip_styles(segments) + elif not self.links: + segments = Segment.strip_links(segments) + content = self._console._render_buffer(segments) + self.pager.show(content) + self._console._exit_buffer() + + +class ScreenContext: + """A context manager that enables an alternative screen. See :meth:`~rich.console.Console.screen` for usage.""" + + def __init__( + self, console: "Console", hide_cursor: bool, style: StyleType = "" + ) -> None: + self.console = console + self.hide_cursor = hide_cursor + self.screen = Screen(style=style) + self._changed = False + + def update( + self, *renderables: RenderableType, style: Optional[StyleType] = None + ) -> None: + """Update the screen. + + Args: + renderable (RenderableType, optional): Optional renderable to replace current renderable, + or None for no change. Defaults to None. + style: (Style, optional): Replacement style, or None for no change. Defaults to None. + """ + if renderables: + self.screen.renderable = ( + Group(*renderables) if len(renderables) > 1 else renderables[0] + ) + if style is not None: + self.screen.style = style + self.console.print(self.screen, end="") + + def __enter__(self) -> "ScreenContext": + self._changed = self.console.set_alt_screen(True) + if self._changed and self.hide_cursor: + self.console.show_cursor(False) + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + if self._changed: + self.console.set_alt_screen(False) + if self.hide_cursor: + self.console.show_cursor(True) + + +class Group: + """Takes a group of renderables and returns a renderable object that renders the group. + + Args: + renderables (Iterable[RenderableType]): An iterable of renderable objects. + fit (bool, optional): Fit dimension of group to contents, or fill available space. Defaults to True. + """ + + def __init__(self, *renderables: "RenderableType", fit: bool = True) -> None: + self._renderables = renderables + self.fit = fit + self._render: Optional[List[RenderableType]] = None + + @property + def renderables(self) -> List["RenderableType"]: + if self._render is None: + self._render = list(self._renderables) + return self._render + + def __rich_measure__( + self, console: "Console", options: "ConsoleOptions" + ) -> "Measurement": + if self.fit: + return measure_renderables(console, options, self.renderables) + else: + return Measurement(options.max_width, options.max_width) + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> RenderResult: + yield from self.renderables + + +def group(fit: bool = True) -> Callable[..., Callable[..., Group]]: + """A decorator that turns an iterable of renderables in to a group. + + Args: + fit (bool, optional): Fit dimension of group to contents, or fill available space. Defaults to True. + """ + + def decorator( + method: Callable[..., Iterable[RenderableType]] + ) -> Callable[..., Group]: + """Convert a method that returns an iterable of renderables in to a Group.""" + + @wraps(method) + def _replace(*args: Any, **kwargs: Any) -> Group: + renderables = method(*args, **kwargs) + return Group(*renderables, fit=fit) + + return _replace + + return decorator + + +def _is_jupyter() -> bool: # pragma: no cover + """Check if we're running in a Jupyter notebook.""" + try: + get_ipython # type: ignore[name-defined] + except NameError: + return False + ipython = get_ipython() # type: ignore[name-defined] + shell = ipython.__class__.__name__ + if ( + "google.colab" in str(ipython.__class__) + or os.getenv("DATABRICKS_RUNTIME_VERSION") + or shell == "ZMQInteractiveShell" + ): + return True # Jupyter notebook or qtconsole + elif shell == "TerminalInteractiveShell": + return False # Terminal running IPython + else: + return False # Other type (?) + + +COLOR_SYSTEMS = { + "standard": ColorSystem.STANDARD, + "256": ColorSystem.EIGHT_BIT, + "truecolor": ColorSystem.TRUECOLOR, + "windows": ColorSystem.WINDOWS, +} + +_COLOR_SYSTEMS_NAMES = {system: name for name, system in COLOR_SYSTEMS.items()} + + +@dataclass +class ConsoleThreadLocals(threading.local): + """Thread local values for Console context.""" + + theme_stack: ThemeStack + buffer: List[Segment] = field(default_factory=list) + buffer_index: int = 0 + + +class RenderHook(ABC): + """Provides hooks in to the render process.""" + + @abstractmethod + def process_renderables( + self, renderables: List[ConsoleRenderable] + ) -> List[ConsoleRenderable]: + """Called with a list of objects to render. + + This method can return a new list of renderables, or modify and return the same list. + + Args: + renderables (List[ConsoleRenderable]): A number of renderable objects. + + Returns: + List[ConsoleRenderable]: A replacement list of renderables. + """ + + +_windows_console_features: Optional["WindowsConsoleFeatures"] = None + + +def get_windows_console_features() -> "WindowsConsoleFeatures": # pragma: no cover + global _windows_console_features + if _windows_console_features is not None: + return _windows_console_features + from ._windows import get_windows_console_features + + _windows_console_features = get_windows_console_features() + return _windows_console_features + + +def detect_legacy_windows() -> bool: + """Detect legacy Windows.""" + return WINDOWS and not get_windows_console_features().vt + + +class Console: + """A high level console interface. + + Args: + color_system (str, optional): The color system supported by your terminal, + either ``"standard"``, ``"256"`` or ``"truecolor"``. Leave as ``"auto"`` to autodetect. + force_terminal (Optional[bool], optional): Enable/disable terminal control codes, or None to auto-detect terminal. Defaults to None. + force_jupyter (Optional[bool], optional): Enable/disable Jupyter rendering, or None to auto-detect Jupyter. Defaults to None. + force_interactive (Optional[bool], optional): Enable/disable interactive mode, or None to auto detect. Defaults to None. + soft_wrap (Optional[bool], optional): Set soft wrap default on print method. Defaults to False. + theme (Theme, optional): An optional style theme object, or ``None`` for default theme. + stderr (bool, optional): Use stderr rather than stdout if ``file`` is not specified. Defaults to False. + file (IO, optional): A file object where the console should write to. Defaults to stdout. + quiet (bool, Optional): Boolean to suppress all output. Defaults to False. + width (int, optional): The width of the terminal. Leave as default to auto-detect width. + height (int, optional): The height of the terminal. Leave as default to auto-detect height. + style (StyleType, optional): Style to apply to all output, or None for no style. Defaults to None. + no_color (Optional[bool], optional): Enabled no color mode, or None to auto detect. Defaults to None. + tab_size (int, optional): Number of spaces used to replace a tab character. Defaults to 8. + record (bool, optional): Boolean to enable recording of terminal output, + required to call :meth:`export_html`, :meth:`export_svg`, and :meth:`export_text`. Defaults to False. + markup (bool, optional): Boolean to enable :ref:`console_markup`. Defaults to True. + emoji (bool, optional): Enable emoji code. Defaults to True. + emoji_variant (str, optional): Optional emoji variant, either "text" or "emoji". Defaults to None. + highlight (bool, optional): Enable automatic highlighting. Defaults to True. + log_time (bool, optional): Boolean to enable logging of time by :meth:`log` methods. Defaults to True. + log_path (bool, optional): Boolean to enable the logging of the caller by :meth:`log`. Defaults to True. + log_time_format (Union[str, TimeFormatterCallable], optional): If ``log_time`` is enabled, either string for strftime or callable that formats the time. Defaults to "[%X] ". + highlighter (HighlighterType, optional): Default highlighter. + legacy_windows (bool, optional): Enable legacy Windows mode, or ``None`` to auto detect. Defaults to ``None``. + safe_box (bool, optional): Restrict box options that don't render on legacy Windows. + get_datetime (Callable[[], datetime], optional): Callable that gets the current time as a datetime.datetime object (used by Console.log), + or None for datetime.now. + get_time (Callable[[], time], optional): Callable that gets the current time in seconds, default uses time.monotonic. + """ + + _environ: Mapping[str, str] = os.environ + + def __init__( + self, + *, + color_system: Optional[ + Literal["auto", "standard", "256", "truecolor", "windows"] + ] = "auto", + force_terminal: Optional[bool] = None, + force_jupyter: Optional[bool] = None, + force_interactive: Optional[bool] = None, + soft_wrap: bool = False, + theme: Optional[Theme] = None, + stderr: bool = False, + file: Optional[IO[str]] = None, + quiet: bool = False, + width: Optional[int] = None, + height: Optional[int] = None, + style: Optional[StyleType] = None, + no_color: Optional[bool] = None, + tab_size: int = 8, + record: bool = False, + markup: bool = True, + emoji: bool = True, + emoji_variant: Optional[EmojiVariant] = None, + highlight: bool = True, + log_time: bool = True, + log_path: bool = True, + log_time_format: Union[str, FormatTimeCallable] = "[%X]", + highlighter: Optional["HighlighterType"] = ReprHighlighter(), + legacy_windows: Optional[bool] = None, + safe_box: bool = True, + get_datetime: Optional[Callable[[], datetime]] = None, + get_time: Optional[Callable[[], float]] = None, + _environ: Optional[Mapping[str, str]] = None, + ): + # Copy of os.environ allows us to replace it for testing + if _environ is not None: + self._environ = _environ + + self.is_jupyter = _is_jupyter() if force_jupyter is None else force_jupyter + if self.is_jupyter: + if width is None: + jupyter_columns = self._environ.get("JUPYTER_COLUMNS") + if jupyter_columns is not None and jupyter_columns.isdigit(): + width = int(jupyter_columns) + else: + width = JUPYTER_DEFAULT_COLUMNS + if height is None: + jupyter_lines = self._environ.get("JUPYTER_LINES") + if jupyter_lines is not None and jupyter_lines.isdigit(): + height = int(jupyter_lines) + else: + height = JUPYTER_DEFAULT_LINES + + self.tab_size = tab_size + self.record = record + self._markup = markup + self._emoji = emoji + self._emoji_variant: Optional[EmojiVariant] = emoji_variant + self._highlight = highlight + self.legacy_windows: bool = ( + (detect_legacy_windows() and not self.is_jupyter) + if legacy_windows is None + else legacy_windows + ) + + if width is None: + columns = self._environ.get("COLUMNS") + if columns is not None and columns.isdigit(): + width = int(columns) - self.legacy_windows + if height is None: + lines = self._environ.get("LINES") + if lines is not None and lines.isdigit(): + height = int(lines) + + self.soft_wrap = soft_wrap + self._width = width + self._height = height + + self._color_system: Optional[ColorSystem] + + self._force_terminal = None + if force_terminal is not None: + self._force_terminal = force_terminal + + self._file = file + self.quiet = quiet + self.stderr = stderr + + if color_system is None: + self._color_system = None + elif color_system == "auto": + self._color_system = self._detect_color_system() + else: + self._color_system = COLOR_SYSTEMS[color_system] + + self._lock = threading.RLock() + self._log_render = LogRender( + show_time=log_time, + show_path=log_path, + time_format=log_time_format, + ) + self.highlighter: HighlighterType = highlighter or _null_highlighter + self.safe_box = safe_box + self.get_datetime = get_datetime or datetime.now + self.get_time = get_time or monotonic + self.style = style + self.no_color = ( + no_color if no_color is not None else "NO_COLOR" in self._environ + ) + self.is_interactive = ( + (self.is_terminal and not self.is_dumb_terminal) + if force_interactive is None + else force_interactive + ) + + self._record_buffer_lock = threading.RLock() + self._thread_locals = ConsoleThreadLocals( + theme_stack=ThemeStack(themes.DEFAULT if theme is None else theme) + ) + self._record_buffer: List[Segment] = [] + self._render_hooks: List[RenderHook] = [] + self._live: Optional["Live"] = None + self._is_alt_screen = False + + def __repr__(self) -> str: + return f"" + + @property + def file(self) -> IO[str]: + """Get the file object to write to.""" + file = self._file or (sys.stderr if self.stderr else sys.stdout) + file = getattr(file, "rich_proxied_file", file) + if file is None: + file = NULL_FILE + return file + + @file.setter + def file(self, new_file: IO[str]) -> None: + """Set a new file object.""" + self._file = new_file + + @property + def _buffer(self) -> List[Segment]: + """Get a thread local buffer.""" + return self._thread_locals.buffer + + @property + def _buffer_index(self) -> int: + """Get a thread local buffer.""" + return self._thread_locals.buffer_index + + @_buffer_index.setter + def _buffer_index(self, value: int) -> None: + self._thread_locals.buffer_index = value + + @property + def _theme_stack(self) -> ThemeStack: + """Get the thread local theme stack.""" + return self._thread_locals.theme_stack + + def _detect_color_system(self) -> Optional[ColorSystem]: + """Detect color system from env vars.""" + if self.is_jupyter: + return ColorSystem.TRUECOLOR + if not self.is_terminal or self.is_dumb_terminal: + return None + if WINDOWS: # pragma: no cover + if self.legacy_windows: # pragma: no cover + return ColorSystem.WINDOWS + windows_console_features = get_windows_console_features() + return ( + ColorSystem.TRUECOLOR + if windows_console_features.truecolor + else ColorSystem.EIGHT_BIT + ) + else: + color_term = self._environ.get("COLORTERM", "").strip().lower() + if color_term in ("truecolor", "24bit"): + return ColorSystem.TRUECOLOR + term = self._environ.get("TERM", "").strip().lower() + _term_name, _hyphen, colors = term.rpartition("-") + color_system = _TERM_COLORS.get(colors, ColorSystem.STANDARD) + return color_system + + def _enter_buffer(self) -> None: + """Enter in to a buffer context, and buffer all output.""" + self._buffer_index += 1 + + def _exit_buffer(self) -> None: + """Leave buffer context, and render content if required.""" + self._buffer_index -= 1 + self._check_buffer() + + def set_live(self, live: "Live") -> None: + """Set Live instance. Used by Live context manager. + + Args: + live (Live): Live instance using this Console. + + Raises: + errors.LiveError: If this Console has a Live context currently active. + """ + with self._lock: + if self._live is not None: + raise errors.LiveError("Only one live display may be active at once") + self._live = live + + def clear_live(self) -> None: + """Clear the Live instance.""" + with self._lock: + self._live = None + + def push_render_hook(self, hook: RenderHook) -> None: + """Add a new render hook to the stack. + + Args: + hook (RenderHook): Render hook instance. + """ + with self._lock: + self._render_hooks.append(hook) + + def pop_render_hook(self) -> None: + """Pop the last renderhook from the stack.""" + with self._lock: + self._render_hooks.pop() + + def __enter__(self) -> "Console": + """Own context manager to enter buffer context.""" + self._enter_buffer() + return self + + def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None: + """Exit buffer context.""" + self._exit_buffer() + + def begin_capture(self) -> None: + """Begin capturing console output. Call :meth:`end_capture` to exit capture mode and return output.""" + self._enter_buffer() + + def end_capture(self) -> str: + """End capture mode and return captured string. + + Returns: + str: Console output. + """ + render_result = self._render_buffer(self._buffer) + del self._buffer[:] + self._exit_buffer() + return render_result + + def push_theme(self, theme: Theme, *, inherit: bool = True) -> None: + """Push a new theme on to the top of the stack, replacing the styles from the previous theme. + Generally speaking, you should call :meth:`~rich.console.Console.use_theme` to get a context manager, rather + than calling this method directly. + + Args: + theme (Theme): A theme instance. + inherit (bool, optional): Inherit existing styles. Defaults to True. + """ + self._theme_stack.push_theme(theme, inherit=inherit) + + def pop_theme(self) -> None: + """Remove theme from top of stack, restoring previous theme.""" + self._theme_stack.pop_theme() + + def use_theme(self, theme: Theme, *, inherit: bool = True) -> ThemeContext: + """Use a different theme for the duration of the context manager. + + Args: + theme (Theme): Theme instance to user. + inherit (bool, optional): Inherit existing console styles. Defaults to True. + + Returns: + ThemeContext: [description] + """ + return ThemeContext(self, theme, inherit) + + @property + def color_system(self) -> Optional[str]: + """Get color system string. + + Returns: + Optional[str]: "standard", "256" or "truecolor". + """ + + if self._color_system is not None: + return _COLOR_SYSTEMS_NAMES[self._color_system] + else: + return None + + @property + def encoding(self) -> str: + """Get the encoding of the console file, e.g. ``"utf-8"``. + + Returns: + str: A standard encoding string. + """ + return (getattr(self.file, "encoding", "utf-8") or "utf-8").lower() + + @property + def is_terminal(self) -> bool: + """Check if the console is writing to a terminal. + + Returns: + bool: True if the console writing to a device capable of + understanding terminal codes, otherwise False. + """ + if self._force_terminal is not None: + return self._force_terminal + + if hasattr(sys.stdin, "__module__") and sys.stdin.__module__.startswith( + "idlelib" + ): + # Return False for Idle which claims to be a tty but can't handle ansi codes + return False + + if self.is_jupyter: + # return False for Jupyter, which may have FORCE_COLOR set + return False + + # If FORCE_COLOR env var has any value at all, we assume a terminal. + force_color = self._environ.get("FORCE_COLOR") + if force_color is not None: + self._force_terminal = True + return True + + isatty: Optional[Callable[[], bool]] = getattr(self.file, "isatty", None) + try: + return False if isatty is None else isatty() + except ValueError: + # in some situation (at the end of a pytest run for example) isatty() can raise + # ValueError: I/O operation on closed file + # return False because we aren't in a terminal anymore + return False + + @property + def is_dumb_terminal(self) -> bool: + """Detect dumb terminal. + + Returns: + bool: True if writing to a dumb terminal, otherwise False. + + """ + _term = self._environ.get("TERM", "") + is_dumb = _term.lower() in ("dumb", "unknown") + return self.is_terminal and is_dumb + + @property + def options(self) -> ConsoleOptions: + """Get default console options.""" + return ConsoleOptions( + max_height=self.size.height, + size=self.size, + legacy_windows=self.legacy_windows, + min_width=1, + max_width=self.width, + encoding=self.encoding, + is_terminal=self.is_terminal, + ) + + @property + def size(self) -> ConsoleDimensions: + """Get the size of the console. + + Returns: + ConsoleDimensions: A named tuple containing the dimensions. + """ + + if self._width is not None and self._height is not None: + return ConsoleDimensions(self._width - self.legacy_windows, self._height) + + if self.is_dumb_terminal: + return ConsoleDimensions(80, 25) + + width: Optional[int] = None + height: Optional[int] = None + + if WINDOWS: # pragma: no cover + try: + width, height = os.get_terminal_size() + except (AttributeError, ValueError, OSError): # Probably not a terminal + pass + else: + for file_descriptor in _STD_STREAMS: + try: + width, height = os.get_terminal_size(file_descriptor) + except (AttributeError, ValueError, OSError): + pass + else: + break + + columns = self._environ.get("COLUMNS") + if columns is not None and columns.isdigit(): + width = int(columns) + lines = self._environ.get("LINES") + if lines is not None and lines.isdigit(): + height = int(lines) + + # get_terminal_size can report 0, 0 if run from pseudo-terminal + width = width or 80 + height = height or 25 + return ConsoleDimensions( + width - self.legacy_windows if self._width is None else self._width, + height if self._height is None else self._height, + ) + + @size.setter + def size(self, new_size: Tuple[int, int]) -> None: + """Set a new size for the terminal. + + Args: + new_size (Tuple[int, int]): New width and height. + """ + width, height = new_size + self._width = width + self._height = height + + @property + def width(self) -> int: + """Get the width of the console. + + Returns: + int: The width (in characters) of the console. + """ + return self.size.width + + @width.setter + def width(self, width: int) -> None: + """Set width. + + Args: + width (int): New width. + """ + self._width = width + + @property + def height(self) -> int: + """Get the height of the console. + + Returns: + int: The height (in lines) of the console. + """ + return self.size.height + + @height.setter + def height(self, height: int) -> None: + """Set height. + + Args: + height (int): new height. + """ + self._height = height + + def bell(self) -> None: + """Play a 'bell' sound (if supported by the terminal).""" + self.control(Control.bell()) + + def capture(self) -> Capture: + """A context manager to *capture* the result of print() or log() in a string, + rather than writing it to the console. + + Example: + >>> from rich.console import Console + >>> console = Console() + >>> with console.capture() as capture: + ... console.print("[bold magenta]Hello World[/]") + >>> print(capture.get()) + + Returns: + Capture: Context manager with disables writing to the terminal. + """ + capture = Capture(self) + return capture + + def pager( + self, pager: Optional[Pager] = None, styles: bool = False, links: bool = False + ) -> PagerContext: + """A context manager to display anything printed within a "pager". The pager application + is defined by the system and will typically support at least pressing a key to scroll. + + Args: + pager (Pager, optional): A pager object, or None to use :class:`~rich.pager.SystemPager`. Defaults to None. + styles (bool, optional): Show styles in pager. Defaults to False. + links (bool, optional): Show links in pager. Defaults to False. + + Example: + >>> from rich.console import Console + >>> from rich.__main__ import make_test_card + >>> console = Console() + >>> with console.pager(): + console.print(make_test_card()) + + Returns: + PagerContext: A context manager. + """ + return PagerContext(self, pager=pager, styles=styles, links=links) + + def line(self, count: int = 1) -> None: + """Write new line(s). + + Args: + count (int, optional): Number of new lines. Defaults to 1. + """ + + assert count >= 0, "count must be >= 0" + self.print(NewLine(count)) + + def clear(self, home: bool = True) -> None: + """Clear the screen. + + Args: + home (bool, optional): Also move the cursor to 'home' position. Defaults to True. + """ + if home: + self.control(Control.clear(), Control.home()) + else: + self.control(Control.clear()) + + def status( + self, + status: RenderableType, + *, + spinner: str = "dots", + spinner_style: StyleType = "status.spinner", + speed: float = 1.0, + refresh_per_second: float = 12.5, + ) -> "Status": + """Display a status and spinner. + + Args: + status (RenderableType): A status renderable (str or Text typically). + spinner (str, optional): Name of spinner animation (see python -m rich.spinner). Defaults to "dots". + spinner_style (StyleType, optional): Style of spinner. Defaults to "status.spinner". + speed (float, optional): Speed factor for spinner animation. Defaults to 1.0. + refresh_per_second (float, optional): Number of refreshes per second. Defaults to 12.5. + + Returns: + Status: A Status object that may be used as a context manager. + """ + from .status import Status + + status_renderable = Status( + status, + console=self, + spinner=spinner, + spinner_style=spinner_style, + speed=speed, + refresh_per_second=refresh_per_second, + ) + return status_renderable + + def show_cursor(self, show: bool = True) -> bool: + """Show or hide the cursor. + + Args: + show (bool, optional): Set visibility of the cursor. + """ + if self.is_terminal: + self.control(Control.show_cursor(show)) + return True + return False + + def set_alt_screen(self, enable: bool = True) -> bool: + """Enables alternative screen mode. + + Note, if you enable this mode, you should ensure that is disabled before + the application exits. See :meth:`~rich.Console.screen` for a context manager + that handles this for you. + + Args: + enable (bool, optional): Enable (True) or disable (False) alternate screen. Defaults to True. + + Returns: + bool: True if the control codes were written. + + """ + changed = False + if self.is_terminal and not self.legacy_windows: + self.control(Control.alt_screen(enable)) + changed = True + self._is_alt_screen = enable + return changed + + @property + def is_alt_screen(self) -> bool: + """Check if the alt screen was enabled. + + Returns: + bool: True if the alt screen was enabled, otherwise False. + """ + return self._is_alt_screen + + def set_window_title(self, title: str) -> bool: + """Set the title of the console terminal window. + + Warning: There is no means within Rich of "resetting" the window title to its + previous value, meaning the title you set will persist even after your application + exits. + + ``fish`` shell resets the window title before and after each command by default, + negating this issue. Windows Terminal and command prompt will also reset the title for you. + Most other shells and terminals, however, do not do this. + + Some terminals may require configuration changes before you can set the title. + Some terminals may not support setting the title at all. + + Other software (including the terminal itself, the shell, custom prompts, plugins, etc.) + may also set the terminal window title. This could result in whatever value you write + using this method being overwritten. + + Args: + title (str): The new title of the terminal window. + + Returns: + bool: True if the control code to change the terminal title was + written, otherwise False. Note that a return value of True + does not guarantee that the window title has actually changed, + since the feature may be unsupported/disabled in some terminals. + """ + if self.is_terminal: + self.control(Control.title(title)) + return True + return False + + def screen( + self, hide_cursor: bool = True, style: Optional[StyleType] = None + ) -> "ScreenContext": + """Context manager to enable and disable 'alternative screen' mode. + + Args: + hide_cursor (bool, optional): Also hide the cursor. Defaults to False. + style (Style, optional): Optional style for screen. Defaults to None. + + Returns: + ~ScreenContext: Context which enables alternate screen on enter, and disables it on exit. + """ + return ScreenContext(self, hide_cursor=hide_cursor, style=style or "") + + def measure( + self, renderable: RenderableType, *, options: Optional[ConsoleOptions] = None + ) -> Measurement: + """Measure a renderable. Returns a :class:`~rich.measure.Measurement` object which contains + information regarding the number of characters required to print the renderable. + + Args: + renderable (RenderableType): Any renderable or string. + options (Optional[ConsoleOptions], optional): Options to use when measuring, or None + to use default options. Defaults to None. + + Returns: + Measurement: A measurement of the renderable. + """ + measurement = Measurement.get(self, options or self.options, renderable) + return measurement + + def render( + self, renderable: RenderableType, options: Optional[ConsoleOptions] = None + ) -> Iterable[Segment]: + """Render an object in to an iterable of `Segment` instances. + + This method contains the logic for rendering objects with the console protocol. + You are unlikely to need to use it directly, unless you are extending the library. + + Args: + renderable (RenderableType): An object supporting the console protocol, or + an object that may be converted to a string. + options (ConsoleOptions, optional): An options object, or None to use self.options. Defaults to None. + + Returns: + Iterable[Segment]: An iterable of segments that may be rendered. + """ + + _options = options or self.options + if _options.max_width < 1: + # No space to render anything. This prevents potential recursion errors. + return + render_iterable: RenderResult + + renderable = rich_cast(renderable) + if hasattr(renderable, "__rich_console__") and not isclass(renderable): + render_iterable = renderable.__rich_console__(self, _options) # type: ignore[union-attr] + elif isinstance(renderable, str): + text_renderable = self.render_str( + renderable, highlight=_options.highlight, markup=_options.markup + ) + render_iterable = text_renderable.__rich_console__(self, _options) + else: + raise errors.NotRenderableError( + f"Unable to render {renderable!r}; " + "A str, Segment or object with __rich_console__ method is required" + ) + + try: + iter_render = iter(render_iterable) + except TypeError: + raise errors.NotRenderableError( + f"object {render_iterable!r} is not renderable" + ) + _Segment = Segment + _options = _options.reset_height() + for render_output in iter_render: + if isinstance(render_output, _Segment): + yield render_output + else: + yield from self.render(render_output, _options) + + def render_lines( + self, + renderable: RenderableType, + options: Optional[ConsoleOptions] = None, + *, + style: Optional[Style] = None, + pad: bool = True, + new_lines: bool = False, + ) -> List[List[Segment]]: + """Render objects in to a list of lines. + + The output of render_lines is useful when further formatting of rendered console text + is required, such as the Panel class which draws a border around any renderable object. + + Args: + renderable (RenderableType): Any object renderable in the console. + options (Optional[ConsoleOptions], optional): Console options, or None to use self.options. Default to ``None``. + style (Style, optional): Optional style to apply to renderables. Defaults to ``None``. + pad (bool, optional): Pad lines shorter than render width. Defaults to ``True``. + new_lines (bool, optional): Include "\n" characters at end of lines. + + Returns: + List[List[Segment]]: A list of lines, where a line is a list of Segment objects. + """ + with self._lock: + render_options = options or self.options + _rendered = self.render(renderable, render_options) + if style: + _rendered = Segment.apply_style(_rendered, style) + + render_height = render_options.height + if render_height is not None: + render_height = max(0, render_height) + + lines = list( + islice( + Segment.split_and_crop_lines( + _rendered, + render_options.max_width, + include_new_lines=new_lines, + pad=pad, + style=style, + ), + None, + render_height, + ) + ) + if render_options.height is not None: + extra_lines = render_options.height - len(lines) + if extra_lines > 0: + pad_line = [ + [Segment(" " * render_options.max_width, style), Segment("\n")] + if new_lines + else [Segment(" " * render_options.max_width, style)] + ] + lines.extend(pad_line * extra_lines) + + return lines + + def render_str( + self, + text: str, + *, + style: Union[str, Style] = "", + justify: Optional[JustifyMethod] = None, + overflow: Optional[OverflowMethod] = None, + emoji: Optional[bool] = None, + markup: Optional[bool] = None, + highlight: Optional[bool] = None, + highlighter: Optional[HighlighterType] = None, + ) -> "Text": + """Convert a string to a Text instance. This is called automatically if + you print or log a string. + + Args: + text (str): Text to render. + style (Union[str, Style], optional): Style to apply to rendered text. + justify (str, optional): Justify method: "default", "left", "center", "full", or "right". Defaults to ``None``. + overflow (str, optional): Overflow method: "crop", "fold", or "ellipsis". Defaults to ``None``. + emoji (Optional[bool], optional): Enable emoji, or ``None`` to use Console default. + markup (Optional[bool], optional): Enable markup, or ``None`` to use Console default. + highlight (Optional[bool], optional): Enable highlighting, or ``None`` to use Console default. + highlighter (HighlighterType, optional): Optional highlighter to apply. + Returns: + ConsoleRenderable: Renderable object. + + """ + emoji_enabled = emoji or (emoji is None and self._emoji) + markup_enabled = markup or (markup is None and self._markup) + highlight_enabled = highlight or (highlight is None and self._highlight) + + if markup_enabled: + rich_text = render_markup( + text, + style=style, + emoji=emoji_enabled, + emoji_variant=self._emoji_variant, + ) + rich_text.justify = justify + rich_text.overflow = overflow + else: + rich_text = Text( + _emoji_replace(text, default_variant=self._emoji_variant) + if emoji_enabled + else text, + justify=justify, + overflow=overflow, + style=style, + ) + + _highlighter = (highlighter or self.highlighter) if highlight_enabled else None + if _highlighter is not None: + highlight_text = _highlighter(str(rich_text)) + highlight_text.copy_styles(rich_text) + return highlight_text + + return rich_text + + def get_style( + self, name: Union[str, Style], *, default: Optional[Union[Style, str]] = None + ) -> Style: + """Get a Style instance by its theme name or parse a definition. + + Args: + name (str): The name of a style or a style definition. + + Returns: + Style: A Style object. + + Raises: + MissingStyle: If no style could be parsed from name. + + """ + if isinstance(name, Style): + return name + + try: + style = self._theme_stack.get(name) + if style is None: + style = Style.parse(name) + return style.copy() if style.link else style + except errors.StyleSyntaxError as error: + if default is not None: + return self.get_style(default) + raise errors.MissingStyle( + f"Failed to get style {name!r}; {error}" + ) from None + + def _collect_renderables( + self, + objects: Iterable[Any], + sep: str, + end: str, + *, + justify: Optional[JustifyMethod] = None, + emoji: Optional[bool] = None, + markup: Optional[bool] = None, + highlight: Optional[bool] = None, + ) -> List[ConsoleRenderable]: + """Combine a number of renderables and text into one renderable. + + Args: + objects (Iterable[Any]): Anything that Rich can render. + sep (str): String to write between print data. + end (str): String to write at end of print data. + justify (str, optional): One of "left", "right", "center", or "full". Defaults to ``None``. + emoji (Optional[bool], optional): Enable emoji code, or ``None`` to use console default. + markup (Optional[bool], optional): Enable markup, or ``None`` to use console default. + highlight (Optional[bool], optional): Enable automatic highlighting, or ``None`` to use console default. + + Returns: + List[ConsoleRenderable]: A list of things to render. + """ + renderables: List[ConsoleRenderable] = [] + _append = renderables.append + text: List[Text] = [] + append_text = text.append + + append = _append + if justify in ("left", "center", "right"): + + def align_append(renderable: RenderableType) -> None: + _append(Align(renderable, cast(AlignMethod, justify))) + + append = align_append + + _highlighter: HighlighterType = _null_highlighter + if highlight or (highlight is None and self._highlight): + _highlighter = self.highlighter + + def check_text() -> None: + if text: + sep_text = Text(sep, justify=justify, end=end) + append(sep_text.join(text)) + text.clear() + + for renderable in objects: + renderable = rich_cast(renderable) + if isinstance(renderable, str): + append_text( + self.render_str( + renderable, emoji=emoji, markup=markup, highlighter=_highlighter + ) + ) + elif isinstance(renderable, Text): + append_text(renderable) + elif isinstance(renderable, ConsoleRenderable): + check_text() + append(renderable) + elif is_expandable(renderable): + check_text() + append(Pretty(renderable, highlighter=_highlighter)) + else: + append_text(_highlighter(str(renderable))) + + check_text() + + if self.style is not None: + style = self.get_style(self.style) + renderables = [Styled(renderable, style) for renderable in renderables] + + return renderables + + def rule( + self, + title: TextType = "", + *, + characters: str = "─", + style: Union[str, Style] = "rule.line", + align: AlignMethod = "center", + ) -> None: + """Draw a line with optional centered title. + + Args: + title (str, optional): Text to render over the rule. Defaults to "". + characters (str, optional): Character(s) to form the line. Defaults to "─". + style (str, optional): Style of line. Defaults to "rule.line". + align (str, optional): How to align the title, one of "left", "center", or "right". Defaults to "center". + """ + from .rule import Rule + + rule = Rule(title=title, characters=characters, style=style, align=align) + self.print(rule) + + def control(self, *control: Control) -> None: + """Insert non-printing control codes. + + Args: + control_codes (str): Control codes, such as those that may move the cursor. + """ + if not self.is_dumb_terminal: + with self: + self._buffer.extend(_control.segment for _control in control) + + def out( + self, + *objects: Any, + sep: str = " ", + end: str = "\n", + style: Optional[Union[str, Style]] = None, + highlight: Optional[bool] = None, + ) -> None: + """Output to the terminal. This is a low-level way of writing to the terminal which unlike + :meth:`~rich.console.Console.print` won't pretty print, wrap text, or apply markup, but will + optionally apply highlighting and a basic style. + + Args: + sep (str, optional): String to write between print data. Defaults to " ". + end (str, optional): String to write at end of print data. Defaults to "\\\\n". + style (Union[str, Style], optional): A style to apply to output. Defaults to None. + highlight (Optional[bool], optional): Enable automatic highlighting, or ``None`` to use + console default. Defaults to ``None``. + """ + raw_output: str = sep.join(str(_object) for _object in objects) + self.print( + raw_output, + style=style, + highlight=highlight, + emoji=False, + markup=False, + no_wrap=True, + overflow="ignore", + crop=False, + end=end, + ) + + def print( + self, + *objects: Any, + sep: str = " ", + end: str = "\n", + style: Optional[Union[str, Style]] = None, + justify: Optional[JustifyMethod] = None, + overflow: Optional[OverflowMethod] = None, + no_wrap: Optional[bool] = None, + emoji: Optional[bool] = None, + markup: Optional[bool] = None, + highlight: Optional[bool] = None, + width: Optional[int] = None, + height: Optional[int] = None, + crop: bool = True, + soft_wrap: Optional[bool] = None, + new_line_start: bool = False, + ) -> None: + """Print to the console. + + Args: + objects (positional args): Objects to log to the terminal. + sep (str, optional): String to write between print data. Defaults to " ". + end (str, optional): String to write at end of print data. Defaults to "\\\\n". + style (Union[str, Style], optional): A style to apply to output. Defaults to None. + justify (str, optional): Justify method: "default", "left", "right", "center", or "full". Defaults to ``None``. + overflow (str, optional): Overflow method: "ignore", "crop", "fold", or "ellipsis". Defaults to None. + no_wrap (Optional[bool], optional): Disable word wrapping. Defaults to None. + emoji (Optional[bool], optional): Enable emoji code, or ``None`` to use console default. Defaults to ``None``. + markup (Optional[bool], optional): Enable markup, or ``None`` to use console default. Defaults to ``None``. + highlight (Optional[bool], optional): Enable automatic highlighting, or ``None`` to use console default. Defaults to ``None``. + width (Optional[int], optional): Width of output, or ``None`` to auto-detect. Defaults to ``None``. + crop (Optional[bool], optional): Crop output to width of terminal. Defaults to True. + soft_wrap (bool, optional): Enable soft wrap mode which disables word wrapping and cropping of text or ``None`` for + Console default. Defaults to ``None``. + new_line_start (bool, False): Insert a new line at the start if the output contains more than one line. Defaults to ``False``. + """ + if not objects: + objects = (NewLine(),) + + if soft_wrap is None: + soft_wrap = self.soft_wrap + if soft_wrap: + if no_wrap is None: + no_wrap = True + if overflow is None: + overflow = "ignore" + crop = False + render_hooks = self._render_hooks[:] + with self: + renderables = self._collect_renderables( + objects, + sep, + end, + justify=justify, + emoji=emoji, + markup=markup, + highlight=highlight, + ) + for hook in render_hooks: + renderables = hook.process_renderables(renderables) + render_options = self.options.update( + justify=justify, + overflow=overflow, + width=min(width, self.width) if width is not None else NO_CHANGE, + height=height, + no_wrap=no_wrap, + markup=markup, + highlight=highlight, + ) + + new_segments: List[Segment] = [] + extend = new_segments.extend + render = self.render + if style is None: + for renderable in renderables: + extend(render(renderable, render_options)) + else: + for renderable in renderables: + extend( + Segment.apply_style( + render(renderable, render_options), self.get_style(style) + ) + ) + if new_line_start: + if ( + len("".join(segment.text for segment in new_segments).splitlines()) + > 1 + ): + new_segments.insert(0, Segment.line()) + if crop: + buffer_extend = self._buffer.extend + for line in Segment.split_and_crop_lines( + new_segments, self.width, pad=False + ): + buffer_extend(line) + else: + self._buffer.extend(new_segments) + + def print_json( + self, + json: Optional[str] = None, + *, + data: Any = None, + indent: Union[None, int, str] = 2, + highlight: bool = True, + skip_keys: bool = False, + ensure_ascii: bool = False, + check_circular: bool = True, + allow_nan: bool = True, + default: Optional[Callable[[Any], Any]] = None, + sort_keys: bool = False, + ) -> None: + """Pretty prints JSON. Output will be valid JSON. + + Args: + json (Optional[str]): A string containing JSON. + data (Any): If json is not supplied, then encode this data. + indent (Union[None, int, str], optional): Number of spaces to indent. Defaults to 2. + highlight (bool, optional): Enable highlighting of output: Defaults to True. + skip_keys (bool, optional): Skip keys not of a basic type. Defaults to False. + ensure_ascii (bool, optional): Escape all non-ascii characters. Defaults to False. + check_circular (bool, optional): Check for circular references. Defaults to True. + allow_nan (bool, optional): Allow NaN and Infinity values. Defaults to True. + default (Callable, optional): A callable that converts values that can not be encoded + in to something that can be JSON encoded. Defaults to None. + sort_keys (bool, optional): Sort dictionary keys. Defaults to False. + """ + from pip._vendor.rich.json import JSON + + if json is None: + json_renderable = JSON.from_data( + data, + indent=indent, + highlight=highlight, + skip_keys=skip_keys, + ensure_ascii=ensure_ascii, + check_circular=check_circular, + allow_nan=allow_nan, + default=default, + sort_keys=sort_keys, + ) + else: + if not isinstance(json, str): + raise TypeError( + f"json must be str. Did you mean print_json(data={json!r}) ?" + ) + json_renderable = JSON( + json, + indent=indent, + highlight=highlight, + skip_keys=skip_keys, + ensure_ascii=ensure_ascii, + check_circular=check_circular, + allow_nan=allow_nan, + default=default, + sort_keys=sort_keys, + ) + self.print(json_renderable, soft_wrap=True) + + def update_screen( + self, + renderable: RenderableType, + *, + region: Optional[Region] = None, + options: Optional[ConsoleOptions] = None, + ) -> None: + """Update the screen at a given offset. + + Args: + renderable (RenderableType): A Rich renderable. + region (Region, optional): Region of screen to update, or None for entire screen. Defaults to None. + x (int, optional): x offset. Defaults to 0. + y (int, optional): y offset. Defaults to 0. + + Raises: + errors.NoAltScreen: If the Console isn't in alt screen mode. + + """ + if not self.is_alt_screen: + raise errors.NoAltScreen("Alt screen must be enabled to call update_screen") + render_options = options or self.options + if region is None: + x = y = 0 + render_options = render_options.update_dimensions( + render_options.max_width, render_options.height or self.height + ) + else: + x, y, width, height = region + render_options = render_options.update_dimensions(width, height) + + lines = self.render_lines(renderable, options=render_options) + self.update_screen_lines(lines, x, y) + + def update_screen_lines( + self, lines: List[List[Segment]], x: int = 0, y: int = 0 + ) -> None: + """Update lines of the screen at a given offset. + + Args: + lines (List[List[Segment]]): Rendered lines (as produced by :meth:`~rich.Console.render_lines`). + x (int, optional): x offset (column no). Defaults to 0. + y (int, optional): y offset (column no). Defaults to 0. + + Raises: + errors.NoAltScreen: If the Console isn't in alt screen mode. + """ + if not self.is_alt_screen: + raise errors.NoAltScreen("Alt screen must be enabled to call update_screen") + screen_update = ScreenUpdate(lines, x, y) + segments = self.render(screen_update) + self._buffer.extend(segments) + self._check_buffer() + + def print_exception( + self, + *, + width: Optional[int] = 100, + extra_lines: int = 3, + theme: Optional[str] = None, + word_wrap: bool = False, + show_locals: bool = False, + suppress: Iterable[Union[str, ModuleType]] = (), + max_frames: int = 100, + ) -> None: + """Prints a rich render of the last exception and traceback. + + Args: + width (Optional[int], optional): Number of characters used to render code. Defaults to 100. + extra_lines (int, optional): Additional lines of code to render. Defaults to 3. + theme (str, optional): Override pygments theme used in traceback + word_wrap (bool, optional): Enable word wrapping of long lines. Defaults to False. + show_locals (bool, optional): Enable display of local variables. Defaults to False. + suppress (Iterable[Union[str, ModuleType]]): Optional sequence of modules or paths to exclude from traceback. + max_frames (int): Maximum number of frames to show in a traceback, 0 for no maximum. Defaults to 100. + """ + from .traceback import Traceback + + traceback = Traceback( + width=width, + extra_lines=extra_lines, + theme=theme, + word_wrap=word_wrap, + show_locals=show_locals, + suppress=suppress, + max_frames=max_frames, + ) + self.print(traceback) + + @staticmethod + def _caller_frame_info( + offset: int, + currentframe: Callable[[], Optional[FrameType]] = inspect.currentframe, + ) -> Tuple[str, int, Dict[str, Any]]: + """Get caller frame information. + + Args: + offset (int): the caller offset within the current frame stack. + currentframe (Callable[[], Optional[FrameType]], optional): the callable to use to + retrieve the current frame. Defaults to ``inspect.currentframe``. + + Returns: + Tuple[str, int, Dict[str, Any]]: A tuple containing the filename, the line number and + the dictionary of local variables associated with the caller frame. + + Raises: + RuntimeError: If the stack offset is invalid. + """ + # Ignore the frame of this local helper + offset += 1 + + frame = currentframe() + if frame is not None: + # Use the faster currentframe where implemented + while offset and frame is not None: + frame = frame.f_back + offset -= 1 + assert frame is not None + return frame.f_code.co_filename, frame.f_lineno, frame.f_locals + else: + # Fallback to the slower stack + frame_info = inspect.stack()[offset] + return frame_info.filename, frame_info.lineno, frame_info.frame.f_locals + + def log( + self, + *objects: Any, + sep: str = " ", + end: str = "\n", + style: Optional[Union[str, Style]] = None, + justify: Optional[JustifyMethod] = None, + emoji: Optional[bool] = None, + markup: Optional[bool] = None, + highlight: Optional[bool] = None, + log_locals: bool = False, + _stack_offset: int = 1, + ) -> None: + """Log rich content to the terminal. + + Args: + objects (positional args): Objects to log to the terminal. + sep (str, optional): String to write between print data. Defaults to " ". + end (str, optional): String to write at end of print data. Defaults to "\\\\n". + style (Union[str, Style], optional): A style to apply to output. Defaults to None. + justify (str, optional): One of "left", "right", "center", or "full". Defaults to ``None``. + overflow (str, optional): Overflow method: "crop", "fold", or "ellipsis". Defaults to None. + emoji (Optional[bool], optional): Enable emoji code, or ``None`` to use console default. Defaults to None. + markup (Optional[bool], optional): Enable markup, or ``None`` to use console default. Defaults to None. + highlight (Optional[bool], optional): Enable automatic highlighting, or ``None`` to use console default. Defaults to None. + log_locals (bool, optional): Boolean to enable logging of locals where ``log()`` + was called. Defaults to False. + _stack_offset (int, optional): Offset of caller from end of call stack. Defaults to 1. + """ + if not objects: + objects = (NewLine(),) + + render_hooks = self._render_hooks[:] + + with self: + renderables = self._collect_renderables( + objects, + sep, + end, + justify=justify, + emoji=emoji, + markup=markup, + highlight=highlight, + ) + if style is not None: + renderables = [Styled(renderable, style) for renderable in renderables] + + filename, line_no, locals = self._caller_frame_info(_stack_offset) + link_path = None if filename.startswith("<") else os.path.abspath(filename) + path = filename.rpartition(os.sep)[-1] + if log_locals: + locals_map = { + key: value + for key, value in locals.items() + if not key.startswith("__") + } + renderables.append(render_scope(locals_map, title="[i]locals")) + + renderables = [ + self._log_render( + self, + renderables, + log_time=self.get_datetime(), + path=path, + line_no=line_no, + link_path=link_path, + ) + ] + for hook in render_hooks: + renderables = hook.process_renderables(renderables) + new_segments: List[Segment] = [] + extend = new_segments.extend + render = self.render + render_options = self.options + for renderable in renderables: + extend(render(renderable, render_options)) + buffer_extend = self._buffer.extend + for line in Segment.split_and_crop_lines( + new_segments, self.width, pad=False + ): + buffer_extend(line) + + def _check_buffer(self) -> None: + """Check if the buffer may be rendered. Render it if it can (e.g. Console.quiet is False) + Rendering is supported on Windows, Unix and Jupyter environments. For + legacy Windows consoles, the win32 API is called directly. + This method will also record what it renders if recording is enabled via Console.record. + """ + if self.quiet: + del self._buffer[:] + return + with self._lock: + if self.record: + with self._record_buffer_lock: + self._record_buffer.extend(self._buffer[:]) + + if self._buffer_index == 0: + if self.is_jupyter: # pragma: no cover + from .jupyter import display + + display(self._buffer, self._render_buffer(self._buffer[:])) + del self._buffer[:] + else: + if WINDOWS: + use_legacy_windows_render = False + if self.legacy_windows: + fileno = get_fileno(self.file) + if fileno is not None: + use_legacy_windows_render = ( + fileno in _STD_STREAMS_OUTPUT + ) + + if use_legacy_windows_render: + from pip._vendor.rich._win32_console import LegacyWindowsTerm + from pip._vendor.rich._windows_renderer import legacy_windows_render + + buffer = self._buffer[:] + if self.no_color and self._color_system: + buffer = list(Segment.remove_color(buffer)) + + legacy_windows_render(buffer, LegacyWindowsTerm(self.file)) + else: + # Either a non-std stream on legacy Windows, or modern Windows. + text = self._render_buffer(self._buffer[:]) + # https://bugs.python.org/issue37871 + # https://github.com/python/cpython/issues/82052 + # We need to avoid writing more than 32Kb in a single write, due to the above bug + write = self.file.write + # Worse case scenario, every character is 4 bytes of utf-8 + MAX_WRITE = 32 * 1024 // 4 + try: + if len(text) <= MAX_WRITE: + write(text) + else: + batch: List[str] = [] + batch_append = batch.append + size = 0 + for line in text.splitlines(True): + if size + len(line) > MAX_WRITE and batch: + write("".join(batch)) + batch.clear() + size = 0 + batch_append(line) + size += len(line) + if batch: + write("".join(batch)) + batch.clear() + except UnicodeEncodeError as error: + error.reason = f"{error.reason}\n*** You may need to add PYTHONIOENCODING=utf-8 to your environment ***" + raise + else: + text = self._render_buffer(self._buffer[:]) + try: + self.file.write(text) + except UnicodeEncodeError as error: + error.reason = f"{error.reason}\n*** You may need to add PYTHONIOENCODING=utf-8 to your environment ***" + raise + + self.file.flush() + del self._buffer[:] + + def _render_buffer(self, buffer: Iterable[Segment]) -> str: + """Render buffered output, and clear buffer.""" + output: List[str] = [] + append = output.append + color_system = self._color_system + legacy_windows = self.legacy_windows + not_terminal = not self.is_terminal + if self.no_color and color_system: + buffer = Segment.remove_color(buffer) + for text, style, control in buffer: + if style: + append( + style.render( + text, + color_system=color_system, + legacy_windows=legacy_windows, + ) + ) + elif not (not_terminal and control): + append(text) + + rendered = "".join(output) + return rendered + + def input( + self, + prompt: TextType = "", + *, + markup: bool = True, + emoji: bool = True, + password: bool = False, + stream: Optional[TextIO] = None, + ) -> str: + """Displays a prompt and waits for input from the user. The prompt may contain color / style. + + It works in the same way as Python's builtin :func:`input` function and provides elaborate line editing and history features if Python's builtin :mod:`readline` module is previously loaded. + + Args: + prompt (Union[str, Text]): Text to render in the prompt. + markup (bool, optional): Enable console markup (requires a str prompt). Defaults to True. + emoji (bool, optional): Enable emoji (requires a str prompt). Defaults to True. + password: (bool, optional): Hide typed text. Defaults to False. + stream: (TextIO, optional): Optional file to read input from (rather than stdin). Defaults to None. + + Returns: + str: Text read from stdin. + """ + if prompt: + self.print(prompt, markup=markup, emoji=emoji, end="") + if password: + result = getpass("", stream=stream) + else: + if stream: + result = stream.readline() + else: + result = input() + return result + + def export_text(self, *, clear: bool = True, styles: bool = False) -> str: + """Generate text from console contents (requires record=True argument in constructor). + + Args: + clear (bool, optional): Clear record buffer after exporting. Defaults to ``True``. + styles (bool, optional): If ``True``, ansi escape codes will be included. ``False`` for plain text. + Defaults to ``False``. + + Returns: + str: String containing console contents. + + """ + assert ( + self.record + ), "To export console contents set record=True in the constructor or instance" + + with self._record_buffer_lock: + if styles: + text = "".join( + (style.render(text) if style else text) + for text, style, _ in self._record_buffer + ) + else: + text = "".join( + segment.text + for segment in self._record_buffer + if not segment.control + ) + if clear: + del self._record_buffer[:] + return text + + def save_text(self, path: str, *, clear: bool = True, styles: bool = False) -> None: + """Generate text from console and save to a given location (requires record=True argument in constructor). + + Args: + path (str): Path to write text files. + clear (bool, optional): Clear record buffer after exporting. Defaults to ``True``. + styles (bool, optional): If ``True``, ansi style codes will be included. ``False`` for plain text. + Defaults to ``False``. + + """ + text = self.export_text(clear=clear, styles=styles) + with open(path, "wt", encoding="utf-8") as write_file: + write_file.write(text) + + def export_html( + self, + *, + theme: Optional[TerminalTheme] = None, + clear: bool = True, + code_format: Optional[str] = None, + inline_styles: bool = False, + ) -> str: + """Generate HTML from console contents (requires record=True argument in constructor). + + Args: + theme (TerminalTheme, optional): TerminalTheme object containing console colors. + clear (bool, optional): Clear record buffer after exporting. Defaults to ``True``. + code_format (str, optional): Format string to render HTML. In addition to '{foreground}', + '{background}', and '{code}', should contain '{stylesheet}' if inline_styles is ``False``. + inline_styles (bool, optional): If ``True`` styles will be inlined in to spans, which makes files + larger but easier to cut and paste markup. If ``False``, styles will be embedded in a style tag. + Defaults to False. + + Returns: + str: String containing console contents as HTML. + """ + assert ( + self.record + ), "To export console contents set record=True in the constructor or instance" + fragments: List[str] = [] + append = fragments.append + _theme = theme or DEFAULT_TERMINAL_THEME + stylesheet = "" + + render_code_format = CONSOLE_HTML_FORMAT if code_format is None else code_format + + with self._record_buffer_lock: + if inline_styles: + for text, style, _ in Segment.filter_control( + Segment.simplify(self._record_buffer) + ): + text = escape(text) + if style: + rule = style.get_html_style(_theme) + if style.link: + text = f'{text}' + text = f'{text}' if rule else text + append(text) + else: + styles: Dict[str, int] = {} + for text, style, _ in Segment.filter_control( + Segment.simplify(self._record_buffer) + ): + text = escape(text) + if style: + rule = style.get_html_style(_theme) + style_number = styles.setdefault(rule, len(styles) + 1) + if style.link: + text = f'{text}' + else: + text = f'{text}' + append(text) + stylesheet_rules: List[str] = [] + stylesheet_append = stylesheet_rules.append + for style_rule, style_number in styles.items(): + if style_rule: + stylesheet_append(f".r{style_number} {{{style_rule}}}") + stylesheet = "\n".join(stylesheet_rules) + + rendered_code = render_code_format.format( + code="".join(fragments), + stylesheet=stylesheet, + foreground=_theme.foreground_color.hex, + background=_theme.background_color.hex, + ) + if clear: + del self._record_buffer[:] + return rendered_code + + def save_html( + self, + path: str, + *, + theme: Optional[TerminalTheme] = None, + clear: bool = True, + code_format: str = CONSOLE_HTML_FORMAT, + inline_styles: bool = False, + ) -> None: + """Generate HTML from console contents and write to a file (requires record=True argument in constructor). + + Args: + path (str): Path to write html file. + theme (TerminalTheme, optional): TerminalTheme object containing console colors. + clear (bool, optional): Clear record buffer after exporting. Defaults to ``True``. + code_format (str, optional): Format string to render HTML. In addition to '{foreground}', + '{background}', and '{code}', should contain '{stylesheet}' if inline_styles is ``False``. + inline_styles (bool, optional): If ``True`` styles will be inlined in to spans, which makes files + larger but easier to cut and paste markup. If ``False``, styles will be embedded in a style tag. + Defaults to False. + + """ + html = self.export_html( + theme=theme, + clear=clear, + code_format=code_format, + inline_styles=inline_styles, + ) + with open(path, "wt", encoding="utf-8") as write_file: + write_file.write(html) + + def export_svg( + self, + *, + title: str = "Rich", + theme: Optional[TerminalTheme] = None, + clear: bool = True, + code_format: str = CONSOLE_SVG_FORMAT, + font_aspect_ratio: float = 0.61, + unique_id: Optional[str] = None, + ) -> str: + """ + Generate an SVG from the console contents (requires record=True in Console constructor). + + Args: + title (str, optional): The title of the tab in the output image + theme (TerminalTheme, optional): The ``TerminalTheme`` object to use to style the terminal + clear (bool, optional): Clear record buffer after exporting. Defaults to ``True`` + code_format (str, optional): Format string used to generate the SVG. Rich will inject a number of variables + into the string in order to form the final SVG output. The default template used and the variables + injected by Rich can be found by inspecting the ``console.CONSOLE_SVG_FORMAT`` variable. + font_aspect_ratio (float, optional): The width to height ratio of the font used in the ``code_format`` + string. Defaults to 0.61, which is the width to height ratio of Fira Code (the default font). + If you aren't specifying a different font inside ``code_format``, you probably don't need this. + unique_id (str, optional): unique id that is used as the prefix for various elements (CSS styles, node + ids). If not set, this defaults to a computed value based on the recorded content. + """ + + from pip._vendor.rich.cells import cell_len + + style_cache: Dict[Style, str] = {} + + def get_svg_style(style: Style) -> str: + """Convert a Style to CSS rules for SVG.""" + if style in style_cache: + return style_cache[style] + css_rules = [] + color = ( + _theme.foreground_color + if (style.color is None or style.color.is_default) + else style.color.get_truecolor(_theme) + ) + bgcolor = ( + _theme.background_color + if (style.bgcolor is None or style.bgcolor.is_default) + else style.bgcolor.get_truecolor(_theme) + ) + if style.reverse: + color, bgcolor = bgcolor, color + if style.dim: + color = blend_rgb(color, bgcolor, 0.4) + css_rules.append(f"fill: {color.hex}") + if style.bold: + css_rules.append("font-weight: bold") + if style.italic: + css_rules.append("font-style: italic;") + if style.underline: + css_rules.append("text-decoration: underline;") + if style.strike: + css_rules.append("text-decoration: line-through;") + + css = ";".join(css_rules) + style_cache[style] = css + return css + + _theme = theme or SVG_EXPORT_THEME + + width = self.width + char_height = 20 + char_width = char_height * font_aspect_ratio + line_height = char_height * 1.22 + + margin_top = 1 + margin_right = 1 + margin_bottom = 1 + margin_left = 1 + + padding_top = 40 + padding_right = 8 + padding_bottom = 8 + padding_left = 8 + + padding_width = padding_left + padding_right + padding_height = padding_top + padding_bottom + margin_width = margin_left + margin_right + margin_height = margin_top + margin_bottom + + text_backgrounds: List[str] = [] + text_group: List[str] = [] + classes: Dict[str, int] = {} + style_no = 1 + + def escape_text(text: str) -> str: + """HTML escape text and replace spaces with nbsp.""" + return escape(text).replace(" ", " ") + + def make_tag( + name: str, content: Optional[str] = None, **attribs: object + ) -> str: + """Make a tag from name, content, and attributes.""" + + def stringify(value: object) -> str: + if isinstance(value, (float)): + return format(value, "g") + return str(value) + + tag_attribs = " ".join( + f'{k.lstrip("_").replace("_", "-")}="{stringify(v)}"' + for k, v in attribs.items() + ) + return ( + f"<{name} {tag_attribs}>{content}" + if content + else f"<{name} {tag_attribs}/>" + ) + + with self._record_buffer_lock: + segments = list(Segment.filter_control(self._record_buffer)) + if clear: + self._record_buffer.clear() + + if unique_id is None: + unique_id = "terminal-" + str( + zlib.adler32( + ("".join(repr(segment) for segment in segments)).encode( + "utf-8", + "ignore", + ) + + title.encode("utf-8", "ignore") + ) + ) + y = 0 + for y, line in enumerate(Segment.split_and_crop_lines(segments, length=width)): + x = 0 + for text, style, _control in line: + style = style or Style() + rules = get_svg_style(style) + if rules not in classes: + classes[rules] = style_no + style_no += 1 + class_name = f"r{classes[rules]}" + + if style.reverse: + has_background = True + background = ( + _theme.foreground_color.hex + if style.color is None + else style.color.get_truecolor(_theme).hex + ) + else: + bgcolor = style.bgcolor + has_background = bgcolor is not None and not bgcolor.is_default + background = ( + _theme.background_color.hex + if style.bgcolor is None + else style.bgcolor.get_truecolor(_theme).hex + ) + + text_length = cell_len(text) + if has_background: + text_backgrounds.append( + make_tag( + "rect", + fill=background, + x=x * char_width, + y=y * line_height + 1.5, + width=char_width * text_length, + height=line_height + 0.25, + shape_rendering="crispEdges", + ) + ) + + if text != " " * len(text): + text_group.append( + make_tag( + "text", + escape_text(text), + _class=f"{unique_id}-{class_name}", + x=x * char_width, + y=y * line_height + char_height, + textLength=char_width * len(text), + clip_path=f"url(#{unique_id}-line-{y})", + ) + ) + x += cell_len(text) + + line_offsets = [line_no * line_height + 1.5 for line_no in range(y)] + lines = "\n".join( + f""" + {make_tag("rect", x=0, y=offset, width=char_width * width, height=line_height + 0.25)} + """ + for line_no, offset in enumerate(line_offsets) + ) + + styles = "\n".join( + f".{unique_id}-r{rule_no} {{ {css} }}" for css, rule_no in classes.items() + ) + backgrounds = "".join(text_backgrounds) + matrix = "".join(text_group) + + terminal_width = ceil(width * char_width + padding_width) + terminal_height = (y + 1) * line_height + padding_height + chrome = make_tag( + "rect", + fill=_theme.background_color.hex, + stroke="rgba(255,255,255,0.35)", + stroke_width="1", + x=margin_left, + y=margin_top, + width=terminal_width, + height=terminal_height, + rx=8, + ) + + title_color = _theme.foreground_color.hex + if title: + chrome += make_tag( + "text", + escape_text(title), + _class=f"{unique_id}-title", + fill=title_color, + text_anchor="middle", + x=terminal_width // 2, + y=margin_top + char_height + 6, + ) + chrome += f""" + + + + + + """ + + svg = code_format.format( + unique_id=unique_id, + char_width=char_width, + char_height=char_height, + line_height=line_height, + terminal_width=char_width * width - 1, + terminal_height=(y + 1) * line_height - 1, + width=terminal_width + margin_width, + height=terminal_height + margin_height, + terminal_x=margin_left + padding_left, + terminal_y=margin_top + padding_top, + styles=styles, + chrome=chrome, + backgrounds=backgrounds, + matrix=matrix, + lines=lines, + ) + return svg + + def save_svg( + self, + path: str, + *, + title: str = "Rich", + theme: Optional[TerminalTheme] = None, + clear: bool = True, + code_format: str = CONSOLE_SVG_FORMAT, + font_aspect_ratio: float = 0.61, + unique_id: Optional[str] = None, + ) -> None: + """Generate an SVG file from the console contents (requires record=True in Console constructor). + + Args: + path (str): The path to write the SVG to. + title (str, optional): The title of the tab in the output image + theme (TerminalTheme, optional): The ``TerminalTheme`` object to use to style the terminal + clear (bool, optional): Clear record buffer after exporting. Defaults to ``True`` + code_format (str, optional): Format string used to generate the SVG. Rich will inject a number of variables + into the string in order to form the final SVG output. The default template used and the variables + injected by Rich can be found by inspecting the ``console.CONSOLE_SVG_FORMAT`` variable. + font_aspect_ratio (float, optional): The width to height ratio of the font used in the ``code_format`` + string. Defaults to 0.61, which is the width to height ratio of Fira Code (the default font). + If you aren't specifying a different font inside ``code_format``, you probably don't need this. + unique_id (str, optional): unique id that is used as the prefix for various elements (CSS styles, node + ids). If not set, this defaults to a computed value based on the recorded content. + """ + svg = self.export_svg( + title=title, + theme=theme, + clear=clear, + code_format=code_format, + font_aspect_ratio=font_aspect_ratio, + unique_id=unique_id, + ) + with open(path, "wt", encoding="utf-8") as write_file: + write_file.write(svg) + + +def _svg_hash(svg_main_code: str) -> str: + """Returns a unique hash for the given SVG main code. + + Args: + svg_main_code (str): The content we're going to inject in the SVG envelope. + + Returns: + str: a hash of the given content + """ + return str(zlib.adler32(svg_main_code.encode())) + + +if __name__ == "__main__": # pragma: no cover + console = Console(record=True) + + console.log( + "JSONRPC [i]request[/i]", + 5, + 1.3, + True, + False, + None, + { + "jsonrpc": "2.0", + "method": "subtract", + "params": {"minuend": 42, "subtrahend": 23}, + "id": 3, + }, + ) + + console.log("Hello, World!", "{'a': 1}", repr(console)) + + console.print( + { + "name": None, + "empty": [], + "quiz": { + "sport": { + "answered": True, + "q1": { + "question": "Which one is correct team name in NBA?", + "options": [ + "New York Bulls", + "Los Angeles Kings", + "Golden State Warriors", + "Huston Rocket", + ], + "answer": "Huston Rocket", + }, + }, + "maths": { + "answered": False, + "q1": { + "question": "5 + 7 = ?", + "options": [10, 11, 12, 13], + "answer": 12, + }, + "q2": { + "question": "12 - 8 = ?", + "options": [1, 2, 3, 4], + "answer": 4, + }, + }, + }, + } + ) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/constrain.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/constrain.py new file mode 100644 index 0000000..65fdf56 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/constrain.py @@ -0,0 +1,37 @@ +from typing import Optional, TYPE_CHECKING + +from .jupyter import JupyterMixin +from .measure import Measurement + +if TYPE_CHECKING: + from .console import Console, ConsoleOptions, RenderableType, RenderResult + + +class Constrain(JupyterMixin): + """Constrain the width of a renderable to a given number of characters. + + Args: + renderable (RenderableType): A renderable object. + width (int, optional): The maximum width (in characters) to render. Defaults to 80. + """ + + def __init__(self, renderable: "RenderableType", width: Optional[int] = 80) -> None: + self.renderable = renderable + self.width = width + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": + if self.width is None: + yield self.renderable + else: + child_options = options.update_width(min(self.width, options.max_width)) + yield from console.render(self.renderable, child_options) + + def __rich_measure__( + self, console: "Console", options: "ConsoleOptions" + ) -> "Measurement": + if self.width is not None: + options = options.update_width(self.width) + measurement = Measurement.get(console, options, self.renderable) + return measurement diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/containers.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/containers.py new file mode 100644 index 0000000..e29cf36 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/containers.py @@ -0,0 +1,167 @@ +from itertools import zip_longest +from typing import ( + Iterator, + Iterable, + List, + Optional, + Union, + overload, + TypeVar, + TYPE_CHECKING, +) + +if TYPE_CHECKING: + from .console import ( + Console, + ConsoleOptions, + JustifyMethod, + OverflowMethod, + RenderResult, + RenderableType, + ) + from .text import Text + +from .cells import cell_len +from .measure import Measurement + +T = TypeVar("T") + + +class Renderables: + """A list subclass which renders its contents to the console.""" + + def __init__( + self, renderables: Optional[Iterable["RenderableType"]] = None + ) -> None: + self._renderables: List["RenderableType"] = ( + list(renderables) if renderables is not None else [] + ) + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": + """Console render method to insert line-breaks.""" + yield from self._renderables + + def __rich_measure__( + self, console: "Console", options: "ConsoleOptions" + ) -> "Measurement": + dimensions = [ + Measurement.get(console, options, renderable) + for renderable in self._renderables + ] + if not dimensions: + return Measurement(1, 1) + _min = max(dimension.minimum for dimension in dimensions) + _max = max(dimension.maximum for dimension in dimensions) + return Measurement(_min, _max) + + def append(self, renderable: "RenderableType") -> None: + self._renderables.append(renderable) + + def __iter__(self) -> Iterable["RenderableType"]: + return iter(self._renderables) + + +class Lines: + """A list subclass which can render to the console.""" + + def __init__(self, lines: Iterable["Text"] = ()) -> None: + self._lines: List["Text"] = list(lines) + + def __repr__(self) -> str: + return f"Lines({self._lines!r})" + + def __iter__(self) -> Iterator["Text"]: + return iter(self._lines) + + @overload + def __getitem__(self, index: int) -> "Text": + ... + + @overload + def __getitem__(self, index: slice) -> List["Text"]: + ... + + def __getitem__(self, index: Union[slice, int]) -> Union["Text", List["Text"]]: + return self._lines[index] + + def __setitem__(self, index: int, value: "Text") -> "Lines": + self._lines[index] = value + return self + + def __len__(self) -> int: + return self._lines.__len__() + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": + """Console render method to insert line-breaks.""" + yield from self._lines + + def append(self, line: "Text") -> None: + self._lines.append(line) + + def extend(self, lines: Iterable["Text"]) -> None: + self._lines.extend(lines) + + def pop(self, index: int = -1) -> "Text": + return self._lines.pop(index) + + def justify( + self, + console: "Console", + width: int, + justify: "JustifyMethod" = "left", + overflow: "OverflowMethod" = "fold", + ) -> None: + """Justify and overflow text to a given width. + + Args: + console (Console): Console instance. + width (int): Number of characters per line. + justify (str, optional): Default justify method for text: "left", "center", "full" or "right". Defaults to "left". + overflow (str, optional): Default overflow for text: "crop", "fold", or "ellipsis". Defaults to "fold". + + """ + from .text import Text + + if justify == "left": + for line in self._lines: + line.truncate(width, overflow=overflow, pad=True) + elif justify == "center": + for line in self._lines: + line.rstrip() + line.truncate(width, overflow=overflow) + line.pad_left((width - cell_len(line.plain)) // 2) + line.pad_right(width - cell_len(line.plain)) + elif justify == "right": + for line in self._lines: + line.rstrip() + line.truncate(width, overflow=overflow) + line.pad_left(width - cell_len(line.plain)) + elif justify == "full": + for line_index, line in enumerate(self._lines): + if line_index == len(self._lines) - 1: + break + words = line.split(" ") + words_size = sum(cell_len(word.plain) for word in words) + num_spaces = len(words) - 1 + spaces = [1 for _ in range(num_spaces)] + index = 0 + if spaces: + while words_size + num_spaces < width: + spaces[len(spaces) - index - 1] += 1 + num_spaces += 1 + index = (index + 1) % len(spaces) + tokens: List[Text] = [] + for index, (word, next_word) in enumerate( + zip_longest(words, words[1:]) + ): + tokens.append(word) + if index < len(spaces): + style = word.get_style_at_offset(console, -1) + next_style = next_word.get_style_at_offset(console, 0) + space_style = style if style == next_style else line.style + tokens.append(Text(" " * spaces[index], style=space_style)) + self[line_index] = Text("").join(tokens) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/control.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/control.py new file mode 100644 index 0000000..88fcb92 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/control.py @@ -0,0 +1,225 @@ +import sys +import time +from typing import TYPE_CHECKING, Callable, Dict, Iterable, List, Union + +if sys.version_info >= (3, 8): + from typing import Final +else: + from pip._vendor.typing_extensions import Final # pragma: no cover + +from .segment import ControlCode, ControlType, Segment + +if TYPE_CHECKING: + from .console import Console, ConsoleOptions, RenderResult + +STRIP_CONTROL_CODES: Final = [ + 7, # Bell + 8, # Backspace + 11, # Vertical tab + 12, # Form feed + 13, # Carriage return +] +_CONTROL_STRIP_TRANSLATE: Final = { + _codepoint: None for _codepoint in STRIP_CONTROL_CODES +} + +CONTROL_ESCAPE: Final = { + 7: "\\a", + 8: "\\b", + 11: "\\v", + 12: "\\f", + 13: "\\r", +} + +CONTROL_CODES_FORMAT: Dict[int, Callable[..., str]] = { + ControlType.BELL: lambda: "\x07", + ControlType.CARRIAGE_RETURN: lambda: "\r", + ControlType.HOME: lambda: "\x1b[H", + ControlType.CLEAR: lambda: "\x1b[2J", + ControlType.ENABLE_ALT_SCREEN: lambda: "\x1b[?1049h", + ControlType.DISABLE_ALT_SCREEN: lambda: "\x1b[?1049l", + ControlType.SHOW_CURSOR: lambda: "\x1b[?25h", + ControlType.HIDE_CURSOR: lambda: "\x1b[?25l", + ControlType.CURSOR_UP: lambda param: f"\x1b[{param}A", + ControlType.CURSOR_DOWN: lambda param: f"\x1b[{param}B", + ControlType.CURSOR_FORWARD: lambda param: f"\x1b[{param}C", + ControlType.CURSOR_BACKWARD: lambda param: f"\x1b[{param}D", + ControlType.CURSOR_MOVE_TO_COLUMN: lambda param: f"\x1b[{param+1}G", + ControlType.ERASE_IN_LINE: lambda param: f"\x1b[{param}K", + ControlType.CURSOR_MOVE_TO: lambda x, y: f"\x1b[{y+1};{x+1}H", + ControlType.SET_WINDOW_TITLE: lambda title: f"\x1b]0;{title}\x07", +} + + +class Control: + """A renderable that inserts a control code (non printable but may move cursor). + + Args: + *codes (str): Positional arguments are either a :class:`~rich.segment.ControlType` enum or a + tuple of ControlType and an integer parameter + """ + + __slots__ = ["segment"] + + def __init__(self, *codes: Union[ControlType, ControlCode]) -> None: + control_codes: List[ControlCode] = [ + (code,) if isinstance(code, ControlType) else code for code in codes + ] + _format_map = CONTROL_CODES_FORMAT + rendered_codes = "".join( + _format_map[code](*parameters) for code, *parameters in control_codes + ) + self.segment = Segment(rendered_codes, None, control_codes) + + @classmethod + def bell(cls) -> "Control": + """Ring the 'bell'.""" + return cls(ControlType.BELL) + + @classmethod + def home(cls) -> "Control": + """Move cursor to 'home' position.""" + return cls(ControlType.HOME) + + @classmethod + def move(cls, x: int = 0, y: int = 0) -> "Control": + """Move cursor relative to current position. + + Args: + x (int): X offset. + y (int): Y offset. + + Returns: + ~Control: Control object. + + """ + + def get_codes() -> Iterable[ControlCode]: + control = ControlType + if x: + yield ( + control.CURSOR_FORWARD if x > 0 else control.CURSOR_BACKWARD, + abs(x), + ) + if y: + yield ( + control.CURSOR_DOWN if y > 0 else control.CURSOR_UP, + abs(y), + ) + + control = cls(*get_codes()) + return control + + @classmethod + def move_to_column(cls, x: int, y: int = 0) -> "Control": + """Move to the given column, optionally add offset to row. + + Returns: + x (int): absolute x (column) + y (int): optional y offset (row) + + Returns: + ~Control: Control object. + """ + + return ( + cls( + (ControlType.CURSOR_MOVE_TO_COLUMN, x), + ( + ControlType.CURSOR_DOWN if y > 0 else ControlType.CURSOR_UP, + abs(y), + ), + ) + if y + else cls((ControlType.CURSOR_MOVE_TO_COLUMN, x)) + ) + + @classmethod + def move_to(cls, x: int, y: int) -> "Control": + """Move cursor to absolute position. + + Args: + x (int): x offset (column) + y (int): y offset (row) + + Returns: + ~Control: Control object. + """ + return cls((ControlType.CURSOR_MOVE_TO, x, y)) + + @classmethod + def clear(cls) -> "Control": + """Clear the screen.""" + return cls(ControlType.CLEAR) + + @classmethod + def show_cursor(cls, show: bool) -> "Control": + """Show or hide the cursor.""" + return cls(ControlType.SHOW_CURSOR if show else ControlType.HIDE_CURSOR) + + @classmethod + def alt_screen(cls, enable: bool) -> "Control": + """Enable or disable alt screen.""" + if enable: + return cls(ControlType.ENABLE_ALT_SCREEN, ControlType.HOME) + else: + return cls(ControlType.DISABLE_ALT_SCREEN) + + @classmethod + def title(cls, title: str) -> "Control": + """Set the terminal window title + + Args: + title (str): The new terminal window title + """ + return cls((ControlType.SET_WINDOW_TITLE, title)) + + def __str__(self) -> str: + return self.segment.text + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": + if self.segment.text: + yield self.segment + + +def strip_control_codes( + text: str, _translate_table: Dict[int, None] = _CONTROL_STRIP_TRANSLATE +) -> str: + """Remove control codes from text. + + Args: + text (str): A string possibly contain control codes. + + Returns: + str: String with control codes removed. + """ + return text.translate(_translate_table) + + +def escape_control_codes( + text: str, + _translate_table: Dict[int, str] = CONTROL_ESCAPE, +) -> str: + """Replace control codes with their "escaped" equivalent in the given text. + (e.g. "\b" becomes "\\b") + + Args: + text (str): A string possibly containing control codes. + + Returns: + str: String with control codes replaced with their escaped version. + """ + return text.translate(_translate_table) + + +if __name__ == "__main__": # pragma: no cover + from pip._vendor.rich.console import Console + + console = Console() + console.print("Look at the title of your terminal window ^") + # console.print(Control((ControlType.SET_WINDOW_TITLE, "Hello, world!"))) + for i in range(10): + console.set_window_title("🚀 Loading" + "." * i) + time.sleep(0.5) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/default_styles.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/default_styles.py new file mode 100644 index 0000000..dca3719 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/default_styles.py @@ -0,0 +1,190 @@ +from typing import Dict + +from .style import Style + +DEFAULT_STYLES: Dict[str, Style] = { + "none": Style.null(), + "reset": Style( + color="default", + bgcolor="default", + dim=False, + bold=False, + italic=False, + underline=False, + blink=False, + blink2=False, + reverse=False, + conceal=False, + strike=False, + ), + "dim": Style(dim=True), + "bright": Style(dim=False), + "bold": Style(bold=True), + "strong": Style(bold=True), + "code": Style(reverse=True, bold=True), + "italic": Style(italic=True), + "emphasize": Style(italic=True), + "underline": Style(underline=True), + "blink": Style(blink=True), + "blink2": Style(blink2=True), + "reverse": Style(reverse=True), + "strike": Style(strike=True), + "black": Style(color="black"), + "red": Style(color="red"), + "green": Style(color="green"), + "yellow": Style(color="yellow"), + "magenta": Style(color="magenta"), + "cyan": Style(color="cyan"), + "white": Style(color="white"), + "inspect.attr": Style(color="yellow", italic=True), + "inspect.attr.dunder": Style(color="yellow", italic=True, dim=True), + "inspect.callable": Style(bold=True, color="red"), + "inspect.async_def": Style(italic=True, color="bright_cyan"), + "inspect.def": Style(italic=True, color="bright_cyan"), + "inspect.class": Style(italic=True, color="bright_cyan"), + "inspect.error": Style(bold=True, color="red"), + "inspect.equals": Style(), + "inspect.help": Style(color="cyan"), + "inspect.doc": Style(dim=True), + "inspect.value.border": Style(color="green"), + "live.ellipsis": Style(bold=True, color="red"), + "layout.tree.row": Style(dim=False, color="red"), + "layout.tree.column": Style(dim=False, color="blue"), + "logging.keyword": Style(bold=True, color="yellow"), + "logging.level.notset": Style(dim=True), + "logging.level.debug": Style(color="green"), + "logging.level.info": Style(color="blue"), + "logging.level.warning": Style(color="red"), + "logging.level.error": Style(color="red", bold=True), + "logging.level.critical": Style(color="red", bold=True, reverse=True), + "log.level": Style.null(), + "log.time": Style(color="cyan", dim=True), + "log.message": Style.null(), + "log.path": Style(dim=True), + "repr.ellipsis": Style(color="yellow"), + "repr.indent": Style(color="green", dim=True), + "repr.error": Style(color="red", bold=True), + "repr.str": Style(color="green", italic=False, bold=False), + "repr.brace": Style(bold=True), + "repr.comma": Style(bold=True), + "repr.ipv4": Style(bold=True, color="bright_green"), + "repr.ipv6": Style(bold=True, color="bright_green"), + "repr.eui48": Style(bold=True, color="bright_green"), + "repr.eui64": Style(bold=True, color="bright_green"), + "repr.tag_start": Style(bold=True), + "repr.tag_name": Style(color="bright_magenta", bold=True), + "repr.tag_contents": Style(color="default"), + "repr.tag_end": Style(bold=True), + "repr.attrib_name": Style(color="yellow", italic=False), + "repr.attrib_equal": Style(bold=True), + "repr.attrib_value": Style(color="magenta", italic=False), + "repr.number": Style(color="cyan", bold=True, italic=False), + "repr.number_complex": Style(color="cyan", bold=True, italic=False), # same + "repr.bool_true": Style(color="bright_green", italic=True), + "repr.bool_false": Style(color="bright_red", italic=True), + "repr.none": Style(color="magenta", italic=True), + "repr.url": Style(underline=True, color="bright_blue", italic=False, bold=False), + "repr.uuid": Style(color="bright_yellow", bold=False), + "repr.call": Style(color="magenta", bold=True), + "repr.path": Style(color="magenta"), + "repr.filename": Style(color="bright_magenta"), + "rule.line": Style(color="bright_green"), + "rule.text": Style.null(), + "json.brace": Style(bold=True), + "json.bool_true": Style(color="bright_green", italic=True), + "json.bool_false": Style(color="bright_red", italic=True), + "json.null": Style(color="magenta", italic=True), + "json.number": Style(color="cyan", bold=True, italic=False), + "json.str": Style(color="green", italic=False, bold=False), + "json.key": Style(color="blue", bold=True), + "prompt": Style.null(), + "prompt.choices": Style(color="magenta", bold=True), + "prompt.default": Style(color="cyan", bold=True), + "prompt.invalid": Style(color="red"), + "prompt.invalid.choice": Style(color="red"), + "pretty": Style.null(), + "scope.border": Style(color="blue"), + "scope.key": Style(color="yellow", italic=True), + "scope.key.special": Style(color="yellow", italic=True, dim=True), + "scope.equals": Style(color="red"), + "table.header": Style(bold=True), + "table.footer": Style(bold=True), + "table.cell": Style.null(), + "table.title": Style(italic=True), + "table.caption": Style(italic=True, dim=True), + "traceback.error": Style(color="red", italic=True), + "traceback.border.syntax_error": Style(color="bright_red"), + "traceback.border": Style(color="red"), + "traceback.text": Style.null(), + "traceback.title": Style(color="red", bold=True), + "traceback.exc_type": Style(color="bright_red", bold=True), + "traceback.exc_value": Style.null(), + "traceback.offset": Style(color="bright_red", bold=True), + "bar.back": Style(color="grey23"), + "bar.complete": Style(color="rgb(249,38,114)"), + "bar.finished": Style(color="rgb(114,156,31)"), + "bar.pulse": Style(color="rgb(249,38,114)"), + "progress.description": Style.null(), + "progress.filesize": Style(color="green"), + "progress.filesize.total": Style(color="green"), + "progress.download": Style(color="green"), + "progress.elapsed": Style(color="yellow"), + "progress.percentage": Style(color="magenta"), + "progress.remaining": Style(color="cyan"), + "progress.data.speed": Style(color="red"), + "progress.spinner": Style(color="green"), + "status.spinner": Style(color="green"), + "tree": Style(), + "tree.line": Style(), + "markdown.paragraph": Style(), + "markdown.text": Style(), + "markdown.em": Style(italic=True), + "markdown.emph": Style(italic=True), # For commonmark backwards compatibility + "markdown.strong": Style(bold=True), + "markdown.code": Style(bold=True, color="cyan", bgcolor="black"), + "markdown.code_block": Style(color="cyan", bgcolor="black"), + "markdown.block_quote": Style(color="magenta"), + "markdown.list": Style(color="cyan"), + "markdown.item": Style(), + "markdown.item.bullet": Style(color="yellow", bold=True), + "markdown.item.number": Style(color="yellow", bold=True), + "markdown.hr": Style(color="yellow"), + "markdown.h1.border": Style(), + "markdown.h1": Style(bold=True), + "markdown.h2": Style(bold=True, underline=True), + "markdown.h3": Style(bold=True), + "markdown.h4": Style(bold=True, dim=True), + "markdown.h5": Style(underline=True), + "markdown.h6": Style(italic=True), + "markdown.h7": Style(italic=True, dim=True), + "markdown.link": Style(color="bright_blue"), + "markdown.link_url": Style(color="blue", underline=True), + "markdown.s": Style(strike=True), + "iso8601.date": Style(color="blue"), + "iso8601.time": Style(color="magenta"), + "iso8601.timezone": Style(color="yellow"), +} + + +if __name__ == "__main__": # pragma: no cover + import argparse + import io + + from pip._vendor.rich.console import Console + from pip._vendor.rich.table import Table + from pip._vendor.rich.text import Text + + parser = argparse.ArgumentParser() + parser.add_argument("--html", action="store_true", help="Export as HTML table") + args = parser.parse_args() + html: bool = args.html + console = Console(record=True, width=70, file=io.StringIO()) if html else Console() + + table = Table("Name", "Styling") + + for style_name, style in DEFAULT_STYLES.items(): + table.add_row(Text(style_name, style=style), str(style)) + + console.print(table) + if html: + print(console.export_html(inline_styles=True)) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/diagnose.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/diagnose.py new file mode 100644 index 0000000..ad36183 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/diagnose.py @@ -0,0 +1,37 @@ +import os +import platform + +from pip._vendor.rich import inspect +from pip._vendor.rich.console import Console, get_windows_console_features +from pip._vendor.rich.panel import Panel +from pip._vendor.rich.pretty import Pretty + + +def report() -> None: # pragma: no cover + """Print a report to the terminal with debugging information""" + console = Console() + inspect(console) + features = get_windows_console_features() + inspect(features) + + env_names = ( + "TERM", + "COLORTERM", + "CLICOLOR", + "NO_COLOR", + "TERM_PROGRAM", + "COLUMNS", + "LINES", + "JUPYTER_COLUMNS", + "JUPYTER_LINES", + "JPY_PARENT_PID", + "VSCODE_VERBOSE_LOGGING", + ) + env = {name: os.getenv(name) for name in env_names} + console.print(Panel.fit((Pretty(env)), title="[b]Environment Variables")) + + console.print(f'platform="{platform.system()}"') + + +if __name__ == "__main__": # pragma: no cover + report() diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/emoji.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/emoji.py new file mode 100644 index 0000000..791f046 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/emoji.py @@ -0,0 +1,96 @@ +import sys +from typing import TYPE_CHECKING, Optional, Union + +from .jupyter import JupyterMixin +from .segment import Segment +from .style import Style +from ._emoji_codes import EMOJI +from ._emoji_replace import _emoji_replace + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from pip._vendor.typing_extensions import Literal # pragma: no cover + + +if TYPE_CHECKING: + from .console import Console, ConsoleOptions, RenderResult + + +EmojiVariant = Literal["emoji", "text"] + + +class NoEmoji(Exception): + """No emoji by that name.""" + + +class Emoji(JupyterMixin): + __slots__ = ["name", "style", "_char", "variant"] + + VARIANTS = {"text": "\uFE0E", "emoji": "\uFE0F"} + + def __init__( + self, + name: str, + style: Union[str, Style] = "none", + variant: Optional[EmojiVariant] = None, + ) -> None: + """A single emoji character. + + Args: + name (str): Name of emoji. + style (Union[str, Style], optional): Optional style. Defaults to None. + + Raises: + NoEmoji: If the emoji doesn't exist. + """ + self.name = name + self.style = style + self.variant = variant + try: + self._char = EMOJI[name] + except KeyError: + raise NoEmoji(f"No emoji called {name!r}") + if variant is not None: + self._char += self.VARIANTS.get(variant, "") + + @classmethod + def replace(cls, text: str) -> str: + """Replace emoji markup with corresponding unicode characters. + + Args: + text (str): A string with emojis codes, e.g. "Hello :smiley:!" + + Returns: + str: A string with emoji codes replaces with actual emoji. + """ + return _emoji_replace(text) + + def __repr__(self) -> str: + return f"" + + def __str__(self) -> str: + return self._char + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": + yield Segment(self._char, console.get_style(self.style)) + + +if __name__ == "__main__": # pragma: no cover + import sys + + from pip._vendor.rich.columns import Columns + from pip._vendor.rich.console import Console + + console = Console(record=True) + + columns = Columns( + (f":{name}: {name}" for name in sorted(EMOJI.keys()) if "\u200D" not in name), + column_first=True, + ) + + console.print(columns) + if len(sys.argv) > 1: + console.save_html(sys.argv[1]) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/errors.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/errors.py new file mode 100644 index 0000000..0bcbe53 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/errors.py @@ -0,0 +1,34 @@ +class ConsoleError(Exception): + """An error in console operation.""" + + +class StyleError(Exception): + """An error in styles.""" + + +class StyleSyntaxError(ConsoleError): + """Style was badly formatted.""" + + +class MissingStyle(StyleError): + """No such style.""" + + +class StyleStackError(ConsoleError): + """Style stack is invalid.""" + + +class NotRenderableError(ConsoleError): + """Object is not renderable.""" + + +class MarkupError(ConsoleError): + """Markup was badly formatted.""" + + +class LiveError(ConsoleError): + """Error related to Live display.""" + + +class NoAltScreen(ConsoleError): + """Alt screen mode was required.""" diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/file_proxy.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/file_proxy.py new file mode 100644 index 0000000..4b0b0da --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/file_proxy.py @@ -0,0 +1,57 @@ +import io +from typing import IO, TYPE_CHECKING, Any, List + +from .ansi import AnsiDecoder +from .text import Text + +if TYPE_CHECKING: + from .console import Console + + +class FileProxy(io.TextIOBase): + """Wraps a file (e.g. sys.stdout) and redirects writes to a console.""" + + def __init__(self, console: "Console", file: IO[str]) -> None: + self.__console = console + self.__file = file + self.__buffer: List[str] = [] + self.__ansi_decoder = AnsiDecoder() + + @property + def rich_proxied_file(self) -> IO[str]: + """Get proxied file.""" + return self.__file + + def __getattr__(self, name: str) -> Any: + return getattr(self.__file, name) + + def write(self, text: str) -> int: + if not isinstance(text, str): + raise TypeError(f"write() argument must be str, not {type(text).__name__}") + buffer = self.__buffer + lines: List[str] = [] + while text: + line, new_line, text = text.partition("\n") + if new_line: + lines.append("".join(buffer) + line) + buffer.clear() + else: + buffer.append(line) + break + if lines: + console = self.__console + with console: + output = Text("\n").join( + self.__ansi_decoder.decode_line(line) for line in lines + ) + console.print(output) + return len(text) + + def flush(self) -> None: + output = "".join(self.__buffer) + if output: + self.__console.print(output) + del self.__buffer[:] + + def fileno(self) -> int: + return self.__file.fileno() diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/filesize.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/filesize.py new file mode 100644 index 0000000..99f118e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/filesize.py @@ -0,0 +1,89 @@ +# coding: utf-8 +"""Functions for reporting filesizes. Borrowed from https://github.com/PyFilesystem/pyfilesystem2 + +The functions declared in this module should cover the different +use cases needed to generate a string representation of a file size +using several different units. Since there are many standards regarding +file size units, three different functions have been implemented. + +See Also: + * `Wikipedia: Binary prefix `_ + +""" + +__all__ = ["decimal"] + +from typing import Iterable, List, Optional, Tuple + + +def _to_str( + size: int, + suffixes: Iterable[str], + base: int, + *, + precision: Optional[int] = 1, + separator: Optional[str] = " ", +) -> str: + if size == 1: + return "1 byte" + elif size < base: + return "{:,} bytes".format(size) + + for i, suffix in enumerate(suffixes, 2): # noqa: B007 + unit = base**i + if size < unit: + break + return "{:,.{precision}f}{separator}{}".format( + (base * size / unit), + suffix, + precision=precision, + separator=separator, + ) + + +def pick_unit_and_suffix(size: int, suffixes: List[str], base: int) -> Tuple[int, str]: + """Pick a suffix and base for the given size.""" + for i, suffix in enumerate(suffixes): + unit = base**i + if size < unit * base: + break + return unit, suffix + + +def decimal( + size: int, + *, + precision: Optional[int] = 1, + separator: Optional[str] = " ", +) -> str: + """Convert a filesize in to a string (powers of 1000, SI prefixes). + + In this convention, ``1000 B = 1 kB``. + + This is typically the format used to advertise the storage + capacity of USB flash drives and the like (*256 MB* meaning + actually a storage capacity of more than *256 000 000 B*), + or used by **Mac OS X** since v10.6 to report file sizes. + + Arguments: + int (size): A file size. + int (precision): The number of decimal places to include (default = 1). + str (separator): The string to separate the value from the units (default = " "). + + Returns: + `str`: A string containing a abbreviated file size and units. + + Example: + >>> filesize.decimal(30000) + '30.0 kB' + >>> filesize.decimal(30000, precision=2, separator="") + '30.00kB' + + """ + return _to_str( + size, + ("kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"), + 1000, + precision=precision, + separator=separator, + ) diff --git a/.venv/lib/python3.12/site-packages/pip/_vendor/rich/highlighter.py b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/highlighter.py new file mode 100644 index 0000000..c264679 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/pip/_vendor/rich/highlighter.py @@ -0,0 +1,232 @@ +import re +from abc import ABC, abstractmethod +from typing import List, Union + +from .text import Span, Text + + +def _combine_regex(*regexes: str) -> str: + """Combine a number of regexes in to a single regex. + + Returns: + str: New regex with all regexes ORed together. + """ + return "|".join(regexes) + + +class Highlighter(ABC): + """Abstract base class for highlighters.""" + + def __call__(self, text: Union[str, Text]) -> Text: + """Highlight a str or Text instance. + + Args: + text (Union[str, ~Text]): Text to highlight. + + Raises: + TypeError: If not called with text or str. + + Returns: + Text: A test instance with highlighting applied. + """ + if isinstance(text, str): + highlight_text = Text(text) + elif isinstance(text, Text): + highlight_text = text.copy() + else: + raise TypeError(f"str or Text instance required, not {text!r}") + self.highlight(highlight_text) + return highlight_text + + @abstractmethod + def highlight(self, text: Text) -> None: + """Apply highlighting in place to text. + + Args: + text (~Text): A text object highlight. + """ + + +class NullHighlighter(Highlighter): + """A highlighter object that doesn't highlight. + + May be used to disable highlighting entirely. + + """ + + def highlight(self, text: Text) -> None: + """Nothing to do""" + + +class RegexHighlighter(Highlighter): + """Applies highlighting from a list of regular expressions.""" + + highlights: List[str] = [] + base_style: str = "" + + def highlight(self, text: Text) -> None: + """Highlight :class:`rich.text.Text` using regular expressions. + + Args: + text (~Text): Text to highlighted. + + """ + + highlight_regex = text.highlight_regex + for re_highlight in self.highlights: + highlight_regex(re_highlight, style_prefix=self.base_style) + + +class ReprHighlighter(RegexHighlighter): + """Highlights the text typically produced from ``__repr__`` methods.""" + + base_style = "repr." + highlights = [ + r"(?P<)(?P[-\w.:|]*)(?P[\w\W]*)(?P>)", + r'(?P[\w_]{1,50})=(?P"?[\w_]+"?)?', + r"(?P[][{}()])", + _combine_regex( + r"(?P[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})", + r"(?P([A-Fa-f0-9]{1,4}::?){1,7}[A-Fa-f0-9]{1,4})", + r"(?P(?:[0-9A-Fa-f]{1,2}-){7}[0-9A-Fa-f]{1,2}|(?:[0-9A-Fa-f]{1,2}:){7}[0-9A-Fa-f]{1,2}|(?:[0-9A-Fa-f]{4}\.){3}[0-9A-Fa-f]{4})", + r"(?P(?:[0-9A-Fa-f]{1,2}-){5}[0-9A-Fa-f]{1,2}|(?:[0-9A-Fa-f]{1,2}:){5}[0-9A-Fa-f]{1,2}|(?:[0-9A-Fa-f]{4}\.){2}[0-9A-Fa-f]{4})", + r"(?P[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12})", + r"(?P[\w.]*?)\(", + r"\b(?PTrue)\b|\b(?PFalse)\b|\b(?PNone)\b", + r"(?P\.\.\.)", + r"(?P(?(?\B(/[-\w._+]+)*\/)(?P[-\w._+]*)?", + r"(?b?'''.*?(?(file|https|http|ws|wss)://[-0-9a-zA-Z$_+!`(),.?/;:&=%#]*)", + ), + ] + + +class JSONHighlighter(RegexHighlighter): + """Highlights JSON""" + + # Captures the start and end of JSON strings, handling escaped quotes + JSON_STR = r"(?b?\".*?(?[\{\[\(\)\]\}])", + r"\b(?Ptrue)\b|\b(?Pfalse)\b|\b(?Pnull)\b", + r"(?P(? None: + super().highlight(text) + + # Additional work to handle highlighting JSON keys + plain = text.plain + append = text.spans.append + whitespace = self.JSON_WHITESPACE + for match in re.finditer(self.JSON_STR, plain): + start, end = match.span() + cursor = end + while cursor < len(plain): + char = plain[cursor] + cursor += 1 + if char == ":": + append(Span(start, end, "json.key")) + elif char in whitespace: + continue + break + + +class ISO8601Highlighter(RegexHighlighter): + """Highlights the ISO8601 date time strings. + Regex reference: https://www.oreilly.com/library/view/regular-expressions-cookbook/9781449327453/ch04s07.html + """ + + base_style = "iso8601." + highlights = [ + # + # Dates + # + # Calendar month (e.g. 2008-08). The hyphen is required + r"^(?P[0-9]{4})-(?P1[0-2]|0[1-9])$", + # Calendar date w/o hyphens (e.g. 20080830) + r"^(?P(?P[0-9]{4})(?P1[0-2]|0[1-9])(?P3[01]|0[1-9]|[12][0-9]))$", + # Ordinal date (e.g. 2008-243). The hyphen is optional + r"^(?P(?P[0-9]{4})-?(?P36[0-6]|3[0-5][0-9]|[12][0-9]{2}|0[1-9][0-9]|00[1-9]))$", + # + # Weeks + # + # Week of the year (e.g., 2008-W35). The hyphen is optional + r"^(?P(?P[0-9]{4})-?W(?P5[0-3]|[1-4][0-9]|0[1-9]))$", + # Week date (e.g., 2008-W35-6). The hyphens are optional + r"^(?P(?P[0-9]{4})-?W(?P5[0-3]|[1-4][0-9]|0[1-9])-?(?P[1-7]))$", + # + # Times + # + # Hours and minutes (e.g., 17:21). The colon is optional + r"^(?P

' : '\U0001d4ab', + '\\' : '\U0001d4ac', + '\\' : '\U0000211b', + '\\' : '\U0001d4ae', + '\\' : '\U0001d4af', + '\\' : '\U0001d4b0', + '\\' : '\U0001d4b1', + '\\' : '\U0001d4b2', + '\\' : '\U0001d4b3', + '\\' : '\U0001d4b4', + '\\' : '\U0001d4b5', + '\\' : '\U0001d5ba', + '\\' : '\U0001d5bb', + '\\' : '\U0001d5bc', + '\\' : '\U0001d5bd', + '\\' : '\U0001d5be', + '\\' : '\U0001d5bf', + '\\' : '\U0001d5c0', + '\\' : '\U0001d5c1', + '\\' : '\U0001d5c2', + '\\' : '\U0001d5c3', + '\\' : '\U0001d5c4', + '\\' : '\U0001d5c5', + '\\' : '\U0001d5c6', + '\\' : '\U0001d5c7', + '\\' : '\U0001d5c8', + '\\